[med-svn] [galaxy] 01/03: Import Upstream version 16.10

Olivier Sallou osallou at debian.org
Sun Jan 15 12:48:26 UTC 2017


This is an automated email from the git hooks/post-receive script.

osallou pushed a commit to branch master
in repository galaxy.

commit f7d0bfeb2c07cb44db468100cc56fc973182afb2
Author: Olivier Sallou <osallou at debian.org>
Date:   Sun Jan 15 12:45:32 2017 +0000

    Import Upstream version 16.10
---
 .ci/check_controller.sh                            |     6 +
 .ci/check_mako.sh                                  |     6 +
 .ci/first_startup.sh                               |    16 +
 .ci/flake8_blacklist.txt                           |     7 +
 .ci/flake8_docstrings_include_list.txt             |     2 +
 .ci/flake8_lint_include_list.txt                   |   579 +
 .ci/flake8_py3_wrapper.sh                          |     5 +
 .ci/flake8_wrapper.sh                              |     8 +
 .ci/flake8_wrapper_docstrings.sh                   |    21 +
 .ci/flake8_wrapper_imports.sh                      |     5 +
 .ci/py3_sources.txt                                |   102 +
 .ci/validate_test_tools.sh                         |    11 +
 .coveragerc                                        |     3 +
 .travis.yml                                        |    36 +
 CITATION                                           |    54 +
 CODE_OF_CONDUCT.md                                 |    99 +
 CONTRIBUTING.md                                    |   120 +
 CONTRIBUTORS.md                                    |   152 +
 LICENSE.txt                                        |   186 +
 Makefile                                           |   215 +
 README.rst                                         |    63 +
 client/.docker-build.sh                            |     4 +
 client/Dockerfile                                  |    13 +
 client/GruntFile.js                                |    28 +
 client/README.md                                   |   123 +
 client/bower.json                                  |    40 +
 client/galaxy/scripts/apps/analysis.js             |   177 +
 client/galaxy/scripts/apps/history-panel.js        |    95 +
 client/galaxy/scripts/apps/login.js                |    44 +
 client/galaxy/scripts/apps/tool-panel.js           |   114 +
 .../scripts/galaxy.interactive_environments.js     |    59 +
 client/galaxy/scripts/galaxy.js                    |   256 +
 client/galaxy/scripts/galaxy.library.js            |   233 +
 client/galaxy/scripts/galaxy.pages.js              |   642 +
 client/galaxy/scripts/i18n.js                      |   183 +
 client/galaxy/scripts/layout/generic-nav-view.js   |    98 +
 client/galaxy/scripts/layout/masthead.js           |    92 +
 client/galaxy/scripts/layout/menu.js               |   419 +
 client/galaxy/scripts/layout/modal.js              |   150 +
 client/galaxy/scripts/layout/page.js               |   170 +
 client/galaxy/scripts/layout/panel.js              |   293 +
 client/galaxy/scripts/layout/scratchbook.js        |   190 +
 client/galaxy/scripts/libs/backbone.js             |  1920 +++
 client/galaxy/scripts/libs/bbi/bigwig.js           |  1190 ++
 .../galaxy/scripts/libs/bbi/jquery-ajax-native.js  |   157 +
 client/galaxy/scripts/libs/bbi/jszlib.js           |  2163 +++
 client/galaxy/scripts/libs/bbi/spans.js            |   257 +
 client/galaxy/scripts/libs/bibtex.js               |  1867 +++
 client/galaxy/scripts/libs/bootstrap-tour.js       |   927 ++
 client/galaxy/scripts/libs/bootstrap.js            |   850 ++
 client/galaxy/scripts/libs/d3.js                   |  9554 ++++++++++++
 client/galaxy/scripts/libs/farbtastic.js           |   516 +
 client/galaxy/scripts/libs/jquery.complexify.js    |   192 +
 client/galaxy/scripts/libs/jquery.sparklines.js    |  3054 ++++
 client/galaxy/scripts/libs/jquery/jqtouch.js       |   883 ++
 client/galaxy/scripts/libs/jquery/jquery-ui.js     |     6 +
 .../scripts/libs/jquery/jquery.autocomplete.js     |  1152 ++
 client/galaxy/scripts/libs/jquery/jquery.cookie.js |    97 +
 .../galaxy/scripts/libs/jquery/jquery.dynatree.js  |  3457 +++++
 .../scripts/libs/jquery/jquery.event.drag.js       |   405 +
 .../scripts/libs/jquery/jquery.event.drop.js       |   302 +
 .../scripts/libs/jquery/jquery.event.hover.js      |    84 +
 client/galaxy/scripts/libs/jquery/jquery.form.js   |  1204 ++
 client/galaxy/scripts/libs/jquery/jquery.js        | 11008 ++++++++++++++
 .../galaxy/scripts/libs/jquery/jquery.migrate.js   |   717 +
 .../scripts/libs/jquery/jquery.mousewheel.js       |   125 +
 client/galaxy/scripts/libs/jquery/jquery.rating.js |   376 +
 .../galaxy/scripts/libs/jquery/jquery.wymeditor.js |  4756 ++++++
 client/galaxy/scripts/libs/jquery/jstorage.js      |   933 ++
 client/galaxy/scripts/libs/jquery/jstree.js        |  6743 +++++++++
 client/galaxy/scripts/libs/jquery/select2.js       |  3507 +++++
 client/galaxy/scripts/libs/raven.js                |  2510 ++++
 client/galaxy/scripts/libs/require.js              |  2142 +++
 client/galaxy/scripts/libs/toastr.js               |   307 +
 client/galaxy/scripts/libs/underscore.js           |  1548 ++
 client/galaxy/scripts/mvc/annotation.js            |    79 +
 client/galaxy/scripts/mvc/base-mvc.js              |   591 +
 .../mvc/base/controlled-fetch-collection.js        |   327 +
 .../galaxy/scripts/mvc/citation/citation-model.js  |    97 +
 .../galaxy/scripts/mvc/citation/citation-view.js   |   188 +
 .../scripts/mvc/collection/collection-li-edit.js   |   124 +
 .../galaxy/scripts/mvc/collection/collection-li.js |   271 +
 .../scripts/mvc/collection/collection-model.js     |   471 +
 .../scripts/mvc/collection/collection-view-edit.js |   176 +
 .../scripts/mvc/collection/collection-view.js      |   255 +
 .../mvc/collection/list-collection-creator.js      |  1062 ++
 .../collection/list-of-pairs-collection-creator.js |  1726 +++
 .../mvc/collection/pair-collection-creator.js      |   253 +
 client/galaxy/scripts/mvc/dataset/data.js          |   682 +
 .../galaxy/scripts/mvc/dataset/dataset-choice.js   |   431 +
 .../galaxy/scripts/mvc/dataset/dataset-li-edit.js  |   432 +
 client/galaxy/scripts/mvc/dataset/dataset-li.js    |   501 +
 client/galaxy/scripts/mvc/dataset/dataset-list.js  |    46 +
 client/galaxy/scripts/mvc/dataset/dataset-model.js |   334 +
 client/galaxy/scripts/mvc/dataset/states.js        |    62 +
 client/galaxy/scripts/mvc/form/form-data.js        |   240 +
 client/galaxy/scripts/mvc/form/form-input.js       |   128 +
 client/galaxy/scripts/mvc/form/form-parameters.js  |   217 +
 client/galaxy/scripts/mvc/form/form-repeat.js      |    96 +
 client/galaxy/scripts/mvc/form/form-section.js     |   175 +
 client/galaxy/scripts/mvc/form/form-view.js        |   145 +
 client/galaxy/scripts/mvc/grid/grid-model.js       |   121 +
 client/galaxy/scripts/mvc/grid/grid-template.js    |   618 +
 client/galaxy/scripts/mvc/grid/grid-view.js        |   675 +
 client/galaxy/scripts/mvc/history/copy-dialog.js   |   202 +
 client/galaxy/scripts/mvc/history/hda-li-edit.js   |    79 +
 client/galaxy/scripts/mvc/history/hda-li.js        |    72 +
 client/galaxy/scripts/mvc/history/hda-model.js     |    28 +
 client/galaxy/scripts/mvc/history/hdca-li-edit.js  |    75 +
 client/galaxy/scripts/mvc/history/hdca-li.js       |   102 +
 client/galaxy/scripts/mvc/history/hdca-model.js    |   131 +
 .../scripts/mvc/history/history-content-model.js   |    90 +
 .../galaxy/scripts/mvc/history/history-contents.js |   478 +
 client/galaxy/scripts/mvc/history/history-model.js |   518 +
 .../scripts/mvc/history/history-preferences.js     |    98 +
 .../scripts/mvc/history/history-structure-view.js  |   583 +
 .../scripts/mvc/history/history-view-annotated.js  |   106 +
 .../mvc/history/history-view-edit-current.js       |   474 +
 .../scripts/mvc/history/history-view-edit.js       |   599 +
 client/galaxy/scripts/mvc/history/history-view.js  |   622 +
 client/galaxy/scripts/mvc/history/job-dag.js       |   326 +
 client/galaxy/scripts/mvc/history/multi-panel.js   |  1074 ++
 client/galaxy/scripts/mvc/history/options-menu.js  |   247 +
 client/galaxy/scripts/mvc/job/job-li.js            |   206 +
 client/galaxy/scripts/mvc/job/job-model.js         |   202 +
 .../scripts/mvc/library/library-dataset-view.js    |  1091 ++
 .../scripts/mvc/library/library-folder-view.js     |   319 +
 .../scripts/mvc/library/library-folderlist-view.js |   401 +
 .../scripts/mvc/library/library-folderrow-view.js  |   381 +
 .../mvc/library/library-foldertoolbar-view.js      |  1440 ++
 .../scripts/mvc/library/library-library-view.js    |   348 +
 .../mvc/library/library-librarylist-view.js        |   267 +
 .../scripts/mvc/library/library-libraryrow-view.js |   278 +
 .../mvc/library/library-librarytoolbar-view.js     |   258 +
 client/galaxy/scripts/mvc/library/library-model.js |   246 +
 client/galaxy/scripts/mvc/list/list-item.js        |   489 +
 client/galaxy/scripts/mvc/list/list-view.js        |  1035 ++
 client/galaxy/scripts/mvc/tag.js                   |   120 +
 client/galaxy/scripts/mvc/tool/tool-form-base.js   |   356 +
 .../galaxy/scripts/mvc/tool/tool-form-composite.js |   511 +
 .../galaxy/scripts/mvc/tool/tool-form-workflow.js  |   240 +
 client/galaxy/scripts/mvc/tool/tool-form.js        |   183 +
 client/galaxy/scripts/mvc/tool/tools.js            |   869 ++
 client/galaxy/scripts/mvc/tours.js                 |   122 +
 client/galaxy/scripts/mvc/ui/error-modal.js        |   135 +
 client/galaxy/scripts/mvc/ui/icon-button.js        |   183 +
 client/galaxy/scripts/mvc/ui/popup-menu.js         |   315 +
 client/galaxy/scripts/mvc/ui/ui-buttons.js         |   304 +
 client/galaxy/scripts/mvc/ui/ui-color-picker.js    |   162 +
 client/galaxy/scripts/mvc/ui/ui-drilldown.js       |    96 +
 client/galaxy/scripts/mvc/ui/ui-frames.js          |   573 +
 client/galaxy/scripts/mvc/ui/ui-list.js            |   157 +
 client/galaxy/scripts/mvc/ui/ui-misc.js            |   168 +
 client/galaxy/scripts/mvc/ui/ui-modal.js           |   189 +
 client/galaxy/scripts/mvc/ui/ui-options.js         |   246 +
 client/galaxy/scripts/mvc/ui/ui-popover.js         |   175 +
 client/galaxy/scripts/mvc/ui/ui-portlet.js         |   198 +
 client/galaxy/scripts/mvc/ui/ui-select-content.js  |   287 +
 client/galaxy/scripts/mvc/ui/ui-select-default.js  |   332 +
 client/galaxy/scripts/mvc/ui/ui-select-ftp.js      |    53 +
 client/galaxy/scripts/mvc/ui/ui-select-library.js  |   127 +
 client/galaxy/scripts/mvc/ui/ui-select.js          |   213 +
 client/galaxy/scripts/mvc/ui/ui-slider.js          |   105 +
 client/galaxy/scripts/mvc/ui/ui-table.js           |   228 +
 client/galaxy/scripts/mvc/ui/ui-tabs.js            |   149 +
 client/galaxy/scripts/mvc/ui/ui-thumbnails.js      |   111 +
 .../scripts/mvc/upload/composite/composite-row.js  |   291 +
 .../scripts/mvc/upload/composite/composite-view.js |   191 +
 .../scripts/mvc/upload/default/default-row.js      |   232 +
 .../scripts/mvc/upload/default/default-view.js     |   358 +
 client/galaxy/scripts/mvc/upload/upload-button.js  |    49 +
 client/galaxy/scripts/mvc/upload/upload-ftp.js     |   153 +
 client/galaxy/scripts/mvc/upload/upload-model.js   |    26 +
 .../galaxy/scripts/mvc/upload/upload-settings.js   |    43 +
 client/galaxy/scripts/mvc/upload/upload-view.js    |   184 +
 client/galaxy/scripts/mvc/user/user-model.js       |   127 +
 client/galaxy/scripts/mvc/user/user-quotameter.js  |   158 +
 .../mvc/visualization/visualization-model.js       |   140 +
 client/galaxy/scripts/mvc/webhooks.js              |    57 +
 .../galaxy/scripts/mvc/workflow/workflow-canvas.js |   245 +
 .../scripts/mvc/workflow/workflow-connector.js     |   131 +
 .../scripts/mvc/workflow/workflow-manager.js       |   486 +
 .../galaxy/scripts/mvc/workflow/workflow-node.js   |   249 +
 .../scripts/mvc/workflow/workflow-terminals.js     |   503 +
 .../scripts/mvc/workflow/workflow-view-data.js     |   132 +
 .../scripts/mvc/workflow/workflow-view-node.js     |   102 +
 .../mvc/workflow/workflow-view-terminals.js        |   238 +
 .../galaxy/scripts/mvc/workflow/workflow-view.js   |   918 ++
 client/galaxy/scripts/nls/ja/locale.js             |   217 +
 client/galaxy/scripts/nls/locale.js                |   222 +
 client/galaxy/scripts/nls/zh/locale.js             |   209 +
 client/galaxy/scripts/onload.js                    |   186 +
 client/galaxy/scripts/polyfills.js                 |    77 +
 client/galaxy/scripts/reports_webapp/run_stats.js  |   654 +
 client/galaxy/scripts/ui/autocom_tagging.js        |   391 +
 client/galaxy/scripts/ui/editable-text.js          |   119 +
 client/galaxy/scripts/ui/fa-icon-button.js         |    48 +
 client/galaxy/scripts/ui/filter-control.js         |   204 +
 client/galaxy/scripts/ui/hoverhighlight.js         |    37 +
 client/galaxy/scripts/ui/loading-indicator.js      |    98 +
 client/galaxy/scripts/ui/mode-button.js            |   191 +
 client/galaxy/scripts/ui/pagination.js             |   226 +
 client/galaxy/scripts/ui/peek-column-selector.js   |   317 +
 client/galaxy/scripts/ui/popupmenu.js              |   158 +
 client/galaxy/scripts/ui/scroll-panel.js           |    88 +
 client/galaxy/scripts/ui/search-input.js           |   153 +
 client/galaxy/scripts/utils/add-logging.js         |    42 +
 client/galaxy/scripts/utils/ajax-queue.js          |   179 +
 client/galaxy/scripts/utils/async-save-text.js     |    89 +
 client/galaxy/scripts/utils/config.js              |   312 +
 client/galaxy/scripts/utils/deferred.js            |    58 +
 client/galaxy/scripts/utils/graph.js               |   638 +
 client/galaxy/scripts/utils/levenshtein.js         |    60 +
 client/galaxy/scripts/utils/localization.js        |    50 +
 client/galaxy/scripts/utils/metrics-logger.js      |   475 +
 client/galaxy/scripts/utils/natural-sort.js        |    30 +
 .../galaxy/scripts/utils/query-string-parsing.js   |    33 +
 client/galaxy/scripts/utils/uploadbox.js           |   303 +
 client/galaxy/scripts/utils/utils.js               |   298 +
 client/galaxy/scripts/viz/bbi-data-manager.js      |    67 +
 client/galaxy/scripts/viz/circster.js              |  1154 ++
 client/galaxy/scripts/viz/phyloviz.js              |  1000 ++
 client/galaxy/scripts/viz/sweepster.js             |   952 ++
 client/galaxy/scripts/viz/trackster.js             |   663 +
 client/galaxy/scripts/viz/trackster/filters.js     |   628 +
 client/galaxy/scripts/viz/trackster/painters.js    |  1619 +++
 client/galaxy/scripts/viz/trackster/slotting.js    |   204 +
 client/galaxy/scripts/viz/trackster/tracks.js      |  4313 ++++++
 client/galaxy/scripts/viz/trackster/util.js        |   137 +
 client/galaxy/scripts/viz/visualization.js         |  1076 ++
 client/galaxy/scripts/viz/viz_views.js             |   100 +
 client/galaxy/style/blue_colors.ini                |    82 +
 client/galaxy/style/less/autocomplete_tagging.less |   148 +
 client/galaxy/style/less/base.less                 |  1771 +++
 client/galaxy/style/less/bootstrap.less            |    68 +
 client/galaxy/style/less/bootstrap/alerts.less     |    67 +
 client/galaxy/style/less/bootstrap/badges.less     |    51 +
 client/galaxy/style/less/bootstrap/bootstrap.less  |    59 +
 .../galaxy/style/less/bootstrap/breadcrumbs.less   |    23 +
 .../galaxy/style/less/bootstrap/button-groups.less |   248 +
 client/galaxy/style/less/bootstrap/buttons.less    |   160 +
 client/galaxy/style/less/bootstrap/carousel.less   |   209 +
 client/galaxy/style/less/bootstrap/close.less      |    33 +
 client/galaxy/style/less/bootstrap/code.less       |    56 +
 .../style/less/bootstrap/component-animations.less |    29 +
 client/galaxy/style/less/bootstrap/dropdowns.less  |   193 +
 client/galaxy/style/less/bootstrap/forms.less      |   362 +
 client/galaxy/style/less/bootstrap/glyphicons.less |   236 +
 client/galaxy/style/less/bootstrap/grid.less       |   346 +
 .../galaxy/style/less/bootstrap/input-groups.less  |   127 +
 client/galaxy/style/less/bootstrap/jumbotron.less  |    40 +
 client/galaxy/style/less/bootstrap/labels.less     |    58 +
 client/galaxy/style/less/bootstrap/list-group.less |    88 +
 client/galaxy/style/less/bootstrap/media.less      |    56 +
 client/galaxy/style/less/bootstrap/mixins.less     |   744 +
 client/galaxy/style/less/bootstrap/modals.less     |   141 +
 client/galaxy/style/less/bootstrap/navbar.less     |   621 +
 client/galaxy/style/less/bootstrap/navs.less       |   248 +
 client/galaxy/style/less/bootstrap/normalize.less  |   396 +
 client/galaxy/style/less/bootstrap/pager.less      |    55 +
 client/galaxy/style/less/bootstrap/pagination.less |    85 +
 client/galaxy/style/less/bootstrap/panels.less     |   148 +
 client/galaxy/style/less/bootstrap/popovers.less   |   133 +
 client/galaxy/style/less/bootstrap/print.less      |   100 +
 .../galaxy/style/less/bootstrap/progress-bars.less |    95 +
 .../style/less/bootstrap/responsive-utilities.less |   220 +
 .../galaxy/style/less/bootstrap/scaffolding.less   |   130 +
 client/galaxy/style/less/bootstrap/tables.less     |   238 +
 client/galaxy/style/less/bootstrap/theme.less      |   241 +
 client/galaxy/style/less/bootstrap/thumbnails.less |    32 +
 client/galaxy/style/less/bootstrap/tooltip.less    |    95 +
 client/galaxy/style/less/bootstrap/type.less       |   242 +
 client/galaxy/style/less/bootstrap/utilities.less  |    42 +
 client/galaxy/style/less/bootstrap/variables.less  |   628 +
 client/galaxy/style/less/bootstrap/wells.less      |    29 +
 client/galaxy/style/less/circster.less             |     7 +
 client/galaxy/style/less/collection.less           |    74 +
 client/galaxy/style/less/dataset.less              |   340 +
 client/galaxy/style/less/embed_item.less           |    90 +
 client/galaxy/style/less/flex.less                 |    73 +
 .../style/less/fontawesome/bordered-pulled.less    |    16 +
 client/galaxy/style/less/fontawesome/core.less     |    10 +
 .../galaxy/style/less/fontawesome/fixed-width.less |     6 +
 .../style/less/fontawesome/font-awesome.less       |    17 +
 client/galaxy/style/less/fontawesome/icons.less    |   552 +
 client/galaxy/style/less/fontawesome/larger.less   |    13 +
 client/galaxy/style/less/fontawesome/list.less     |    19 +
 client/galaxy/style/less/fontawesome/mixins.less   |    25 +
 client/galaxy/style/less/fontawesome/path.less     |    14 +
 .../style/less/fontawesome/rotated-flipped.less    |    20 +
 client/galaxy/style/less/fontawesome/spinning.less |    29 +
 client/galaxy/style/less/fontawesome/stacked.less  |    20 +
 .../galaxy/style/less/fontawesome/variables.less   |   561 +
 client/galaxy/style/less/frame.less                |   154 +
 client/galaxy/style/less/galaxy_bootstrap.less     |    67 +
 .../style/less/galaxy_bootstrap/overrides.less     |    83 +
 .../style/less/galaxy_bootstrap/variables.less     |   698 +
 client/galaxy/style/less/galaxy_variables.less     |    37 +
 client/galaxy/style/less/history.less              |   741 +
 client/galaxy/style/less/iconic_stroke.less        |   461 +
 client/galaxy/style/less/iphone.less               |   438 +
 client/galaxy/style/less/job.less                  |    56 +
 client/galaxy/style/less/jstree.less               |   989 ++
 client/galaxy/style/less/library.less              |   201 +
 client/galaxy/style/less/list-item.less            |   232 +
 client/galaxy/style/less/reports.less              |   237 +
 client/galaxy/style/less/select2.less              |   704 +
 client/galaxy/style/less/theme/blue.less           |    24 +
 client/galaxy/style/less/toastr.less               |   232 +
 client/galaxy/style/less/tour.less                 |     4 +
 client/galaxy/style/less/trackster.less            |   572 +
 client/galaxy/style/less/ui.less                   |   897 ++
 client/galaxy/style/less/ui/dataset-choice.less    |    94 +
 client/galaxy/style/less/ui/error-modal.less       |    34 +
 client/galaxy/style/less/ui/icon-btn.less          |    61 +
 client/galaxy/style/less/ui/pagination.less        |    34 +
 .../style/less/ui/paired-collection-creator.less   |   605 +
 .../galaxy/style/less/ui/peek-column-selector.less |    69 +
 client/galaxy/style/less/ui/search-input.less      |    27 +
 client/galaxy/style/less/upload.less               |   287 +
 client/galaxy/style/source_material/circle.py      |    55 +
 .../galaxy/style/source_material/galaxy_icons.svg  |   345 +
 .../style/source_material/galaxy_spinner.fla       |   Bin 0 -> 30208 bytes
 client/grunt-tasks/install-libs.js                 |    93 +
 client/grunt-tasks/scripts.js                      |   123 +
 client/grunt-tasks/style.js                        |   109 +
 client/grunt-tasks/webpack.js                      |    38 +
 client/package.json                                |    29 +
 .../scripts/mvc/groups/group-detail-view.js        |   146 +
 .../toolshed/scripts/mvc/groups/group-list-view.js |   148 +
 .../scripts/mvc/groups/group-listrow-view.js       |    36 +
 client/toolshed/scripts/mvc/groups/group-model.js  |    22 +
 client/toolshed/scripts/toolshed.groups.js         |    51 +
 client/webpack.config.js                           |    79 +
 config/auth_conf.xml.sample                        |   110 +
 config/data_manager_conf.xml.sample                |     3 +
 config/datatypes_conf.xml.sample                   |   702 +
 config/dependency_resolvers_conf.xml.sample        |    28 +
 config/disposable_email_blacklist.conf.sample      |     9 +
 config/external_service_types_conf.xml.sample      |     5 +
 config/galaxy.ini.sample                           |  1286 ++
 config/job_conf.xml.sample_advanced                |   784 +
 config/job_conf.xml.sample_basic                   |    13 +
 config/job_metrics_conf.xml.sample                 |   124 +
 config/job_resource_params_conf.xml.sample         |     6 +
 config/migrated_tools_conf.xml.sample              |     3 +
 config/object_store_conf.xml.sample                |    57 +
 config/openid_conf.xml.sample                      |     8 +
 .../bam_iobio/config/bam_iobio.ini.sample          |    43 +
 .../bam_iobio/config/bam_iobio.xml                 |    15 +
 .../bam_iobio/static/js/bam_iobio.js               |    39 +
 .../bam_iobio/templates/bam_iobio.mako             |    62 +
 .../common/templates/ie.mako                       |    35 +
 .../interactive_environments.dtd                   |     1 +
 .../jupyter/config/allowed_images.yml.sample       |    14 +
 .../jupyter/config/jupyter.ini.sample              |    42 +
 .../jupyter/config/jupyter.xml                     |    17 +
 .../jupyter/static/js/jupyter.js                   |   123 +
 .../jupyter/templates/jupyter.mako                 |    74 +
 .../neo/config/allowed_images.yml.sample           |    11 +
 .../neo/config/neo.ini.sample                      |    42 +
 .../interactive_environments/neo/config/neo.xml    |    19 +
 .../interactive_environments/neo/static/js/neo.js  |    12 +
 .../neo/templates/neo.mako                         |    43 +
 .../phinch/config/phinch.ini.sample                |    21 +
 .../phinch/config/phinch.xml                       |    15 +
 .../phinch/static/js/phinch.js                     |    51 +
 .../phinch/templates/phinch.mako                   |    46 +
 .../rstudio/config/allowed_images.yml.sample       |    14 +
 .../rstudio/config/rstudio.ini.sample              |    43 +
 .../rstudio/config/rstudio.xml                     |    17 +
 .../rstudio/static/js/crypto/base64.js             |    73 +
 .../rstudio/static/js/crypto/jsbn.js               |   562 +
 .../rstudio/static/js/crypto/prng4.js              |    47 +
 .../rstudio/static/js/crypto/rng.js                |    70 +
 .../rstudio/static/js/crypto/rsa.js                |   114 +
 .../rstudio/static/js/crypto/rstudio.big.js        |   861 ++
 .../rstudio/static/js/crypto/rstudio.min.js        |    24 +
 .../rstudio/static/js/crypto/rstudio/base64.js     |    73 +
 .../rstudio/static/js/crypto/rstudio/jsbn.js       |   562 +
 .../rstudio/static/js/crypto/rstudio/prng4.js      |    47 +
 .../rstudio/static/js/crypto/rstudio/rng.js        |    70 +
 .../rstudio/static/js/crypto/rstudio/rsa.js        |   114 +
 .../rstudio/static/js/crypto/wu/base64.js          |    71 +
 .../rstudio/static/js/crypto/wu/jsbn.js            |   559 +
 .../rstudio/static/js/crypto/wu/prng4.js           |    45 +
 .../rstudio/static/js/crypto/wu/rng.js             |    75 +
 .../rstudio/static/js/crypto/wu/rsa.js             |   112 +
 .../rstudio/static/js/rstudio.js                   |    72 +
 .../rstudio/templates/rstudio.mako                 |    73 +
 config/plugins/tours/core.galaxy_ui.yaml           |   216 +
 config/plugins/tours/core.history.yaml             |   148 +
 config/plugins/tours/core.scratchbook.yaml         |    77 +
 config/plugins/visualizations/README.txt           |    30 +
 .../visualizations/additional_template_paths.xml   |     6 +
 .../visualizations/charts/config/charts.xml        |    17 +
 config/plugins/visualizations/charts/package.json  |    41 +
 .../visualizations/charts/static/client/app.css    |    83 +
 .../visualizations/charts/static/client/app.js     |    68 +
 .../charts/static/client/components/model.js       |    98 +
 .../charts/static/client/components/screenshot.js  |   169 +
 .../charts/static/client/views/description.js      |    41 +
 .../charts/static/client/views/editor.js           |   172 +
 .../charts/static/client/views/groups.js           |   105 +
 .../charts/static/client/views/settings.js         |    47 +
 .../charts/static/client/views/viewer.js           |   137 +
 .../charts/static/client/views/viewport.js         |   100 +
 .../charts/static/repository/build/benfred_venn.js |     2 +
 .../static/repository/build/benfred_venn.js.map    |     1 +
 .../static/repository/build/biojs_drawrnajs.js     |    91 +
 .../static/repository/build/biojs_drawrnajs.js.map |     1 +
 .../charts/static/repository/build/biojs_msa.js    |    21 +
 .../static/repository/build/biojs_msa.js.map       |     1 +
 .../static/repository/build/cytoscape_basic.js     |    37 +
 .../static/repository/build/cytoscape_basic.js.map |     1 +
 .../charts/static/repository/build/jqplot_bar.js   |    10 +
 .../static/repository/build/jqplot_bar.js.map      |     1 +
 .../static/repository/build/jqplot_boxplot.js      |    10 +
 .../static/repository/build/jqplot_boxplot.js.map  |     1 +
 .../repository/build/jqplot_histogram_discrete.js  |    10 +
 .../build/jqplot_histogram_discrete.js.map         |     1 +
 .../charts/static/repository/build/jqplot_line.js  |    10 +
 .../static/repository/build/jqplot_line.js.map     |     1 +
 .../static/repository/build/jqplot_scatter.js      |    10 +
 .../static/repository/build/jqplot_scatter.js.map  |     1 +
 .../charts/static/repository/build/nvd3_bar.js     |     8 +
 .../charts/static/repository/build/nvd3_bar.js.map |     1 +
 .../static/repository/build/nvd3_bar_horizontal.js |     8 +
 .../repository/build/nvd3_bar_horizontal.js.map    |     1 +
 .../build/nvd3_bar_horizontal_stacked.js           |     8 +
 .../build/nvd3_bar_horizontal_stacked.js.map       |     1 +
 .../static/repository/build/nvd3_bar_stacked.js    |     8 +
 .../repository/build/nvd3_bar_stacked.js.map       |     1 +
 .../static/repository/build/nvd3_histogram.js      |     8 +
 .../static/repository/build/nvd3_histogram.js.map  |     1 +
 .../repository/build/nvd3_histogram_discrete.js    |     8 +
 .../build/nvd3_histogram_discrete.js.map           |     1 +
 .../charts/static/repository/build/nvd3_line.js    |     8 +
 .../static/repository/build/nvd3_line.js.map       |     1 +
 .../static/repository/build/nvd3_line_focus.js     |     8 +
 .../static/repository/build/nvd3_line_focus.js.map |     1 +
 .../charts/static/repository/build/nvd3_pie.js     |     8 +
 .../charts/static/repository/build/nvd3_pie.js.map |     1 +
 .../charts/static/repository/build/nvd3_scatter.js |     8 +
 .../static/repository/build/nvd3_scatter.js.map    |     1 +
 .../static/repository/build/nvd3_stackedarea.js    |     8 +
 .../repository/build/nvd3_stackedarea.js.map       |     1 +
 .../repository/build/nvd3_stackedarea_full.js      |     8 +
 .../repository/build/nvd3_stackedarea_full.js.map  |     1 +
 .../repository/build/nvd3_stackedarea_stream.js    |     8 +
 .../build/nvd3_stackedarea_stream.js.map           |     1 +
 .../static/repository/build/others_example.js      |     2 +
 .../static/repository/build/others_example.js.map  |     1 +
 .../static/repository/build/others_heatmap.js      |     2 +
 .../static/repository/build/others_heatmap.js.map  |     1 +
 .../repository/build/others_heatmap_cluster.js     |     2 +
 .../repository/build/others_heatmap_cluster.js.map |     1 +
 .../charts/static/repository/build/pv_viewer.js    |     7 +
 .../static/repository/build/pv_viewer.js.map       |     1 +
 .../charts/static/repository/build/registry.js     |     2 +
 .../charts/static/repository/build/registry.js.map |     1 +
 .../charts/static/repository/build/registry.tmp.js |     1 +
 .../static/repository/plugins/benfred/venn.css     |    14 +
 .../static/repository/plugins/benfred/venn.js      |  1789 +++
 .../static/repository/plugins/biojs/biojs.msa.js   |    21 +
 .../repository/plugins/biojs/drawrnajs/drawrna.js  |    15 +
 .../plugins/biojs/drawrnajs/layouts/layout.js      |    24 +
 .../plugins/biojs/drawrnajs/layouts/naview/base.js |    49 +
 .../biojs/drawrnajs/layouts/naview/connection.js   |   100 +
 .../biojs/drawrnajs/layouts/naview/getnaview.js    |    44 +
 .../plugins/biojs/drawrnajs/layouts/naview/loop.js |    98 +
 .../biojs/drawrnajs/layouts/naview/naview.js       |  1098 ++
 .../biojs/drawrnajs/layouts/naview/radloop.js      |    38 +
 .../biojs/drawrnajs/layouts/naview/region.js       |    38 +
 .../biojs/drawrnajs/layouts/radiate/getradiate.js  |   271 +
 .../plugins/biojs/drawrnajs/models/link.js         |    17 +
 .../plugins/biojs/drawrnajs/models/linkcol.js      |    28 +
 .../plugins/biojs/drawrnajs/models/ncanno.js       |    36 +
 .../plugins/biojs/drawrnajs/models/residue.js      |    19 +
 .../plugins/biojs/drawrnajs/models/residuecol.js   |    21 +
 .../plugins/biojs/drawrnajs/models/structure.js    |   158 +
 .../plugins/biojs/drawrnajs/utils/.Rhistory        |     0
 .../plugins/biojs/drawrnajs/utils/lasso.js         |    34 +
 .../plugins/biojs/drawrnajs/utils/parsedbr.js      |    58 +
 .../plugins/biojs/drawrnajs/utils/spectrum.js      |  2287 +++
 .../plugins/biojs/drawrnajs/utils/style.js         |   235 +
 .../plugins/biojs/drawrnajs/views/annoview.js      |   129 +
 .../plugins/biojs/drawrnajs/views/optspanel.js     |   189 +
 .../plugins/biojs/drawrnajs/views/seqpanel.js      |    78 +
 .../plugins/biojs/drawrnajs/views/vispanel.js      |   218 +
 .../repository/plugins/cytoscape/cytoscape.js      |    62 +
 .../repository/plugins/jqplot/jquery.jqplot.css    |   261 +
 .../repository/plugins/jqplot/jquery.jqplot.js     |     3 +
 .../plugins/jqplot/jquery.jqplot.plugins.js        |   151 +
 .../static/repository/plugins/nvd3/nv.d3.css       |     1 +
 .../charts/static/repository/plugins/nvd3/nv.d3.js | 14364 +++++++++++++++++++
 .../charts/static/repository/plugins/pv/viewer.js  |    13 +
 .../charts/static/repository/registry.json         |     9 +
 .../charts/static/repository/utilities/jobs.js     |   101 +
 .../charts/static/repository/utilities/utils.js    |   123 +
 .../visualizations/benfred/venn/config.js          |    24 +
 .../visualizations/benfred/venn/logo.png           |   Bin 0 -> 10644 bytes
 .../visualizations/benfred/venn/wrapper.js         |    87 +
 .../visualizations/biojs/drawrnajs/config.js       |     9 +
 .../visualizations/biojs/drawrnajs/logo.png        |   Bin 0 -> 53524 bytes
 .../visualizations/biojs/drawrnajs/wrapper.js      |    27 +
 .../repository/visualizations/biojs/msa/config.js  |    14 +
 .../repository/visualizations/biojs/msa/logo.png   |   Bin 0 -> 28113 bytes
 .../repository/visualizations/biojs/msa/wrapper.js |    21 +
 .../visualizations/cytoscape/basic/config.js       |     9 +
 .../visualizations/cytoscape/basic/logo.png        |   Bin 0 -> 89725 bytes
 .../visualizations/cytoscape/basic/wrapper.js      |    56 +
 .../repository/visualizations/jqplot/bar/config.js |    19 +
 .../repository/visualizations/jqplot/bar/logo.png  |   Bin 0 -> 7073 bytes
 .../visualizations/jqplot/bar/wrapper.js           |    22 +
 .../visualizations/jqplot/boxplot/config.js        |    16 +
 .../visualizations/jqplot/boxplot/logo.png         |   Bin 0 -> 6973 bytes
 .../visualizations/jqplot/boxplot/wrapper.js       |   102 +
 .../visualizations/jqplot/common/config.js         |    22 +
 .../visualizations/jqplot/common/plot-config.js    |   110 +
 .../visualizations/jqplot/common/wrapper.js        |    98 +
 .../jqplot/histogram_discrete/config.js            |    14 +
 .../jqplot/histogram_discrete/logo.png             |   Bin 0 -> 5831 bytes
 .../jqplot/histogram_discrete/wrapper.js           |    26 +
 .../visualizations/jqplot/line/config.js           |    19 +
 .../repository/visualizations/jqplot/line/logo.png |   Bin 0 -> 8401 bytes
 .../visualizations/jqplot/line/wrapper.js          |     7 +
 .../visualizations/jqplot/scatter/config.js        |    18 +
 .../visualizations/jqplot/scatter/logo.png         |   Bin 0 -> 5379 bytes
 .../visualizations/jqplot/scatter/wrapper.js       |    18 +
 .../repository/visualizations/nvd3/bar/config.js   |    19 +
 .../repository/visualizations/nvd3/bar/logo.png    |   Bin 0 -> 7073 bytes
 .../repository/visualizations/nvd3/bar/wrapper.js  |     8 +
 .../visualizations/nvd3/bar_horizontal/config.js   |    19 +
 .../visualizations/nvd3/bar_horizontal/logo.png    |   Bin 0 -> 6020 bytes
 .../visualizations/nvd3/bar_horizontal/wrapper.js  |     8 +
 .../nvd3/bar_horizontal_stacked/config.js          |    20 +
 .../nvd3/bar_horizontal_stacked/logo.png           |   Bin 0 -> 5294 bytes
 .../nvd3/bar_horizontal_stacked/wrapper.js         |    11 +
 .../visualizations/nvd3/bar_stacked/config.js      |    20 +
 .../visualizations/nvd3/bar_stacked/logo.png       |   Bin 0 -> 6061 bytes
 .../visualizations/nvd3/bar_stacked/wrapper.js     |    11 +
 .../visualizations/nvd3/common/config.js           |    21 +
 .../visualizations/nvd3/common/wrapper.js          |    75 +
 .../visualizations/nvd3/histogram/config.js        |    27 +
 .../visualizations/nvd3/histogram/logo.png         |   Bin 0 -> 5831 bytes
 .../visualizations/nvd3/histogram/wrapper.js       |    24 +
 .../nvd3/histogram_discrete/config.js              |    17 +
 .../nvd3/histogram_discrete/logo.png               |   Bin 0 -> 5831 bytes
 .../nvd3/histogram_discrete/wrapper.js             |    24 +
 .../repository/visualizations/nvd3/line/config.js  |    20 +
 .../repository/visualizations/nvd3/line/logo.png   |   Bin 0 -> 8401 bytes
 .../repository/visualizations/nvd3/line/wrapper.js |     8 +
 .../visualizations/nvd3/line_focus/config.js       |    20 +
 .../visualizations/nvd3/line_focus/logo.png        |   Bin 0 -> 15003 bytes
 .../visualizations/nvd3/line_focus/wrapper.js      |     8 +
 .../repository/visualizations/nvd3/pie/config.js   |    73 +
 .../repository/visualizations/nvd3/pie/logo.png    |   Bin 0 -> 9448 bytes
 .../repository/visualizations/nvd3/pie/wrapper.js  |    69 +
 .../visualizations/nvd3/scatter/config.js          |    19 +
 .../visualizations/nvd3/scatter/logo.png           |   Bin 0 -> 5379 bytes
 .../visualizations/nvd3/scatter/wrapper.js         |    13 +
 .../visualizations/nvd3/stackedarea/config.js      |    21 +
 .../visualizations/nvd3/stackedarea/logo.png       |   Bin 0 -> 7866 bytes
 .../visualizations/nvd3/stackedarea/wrapper.js     |     8 +
 .../visualizations/nvd3/stackedarea_full/config.js |    21 +
 .../visualizations/nvd3/stackedarea_full/logo.png  |   Bin 0 -> 6378 bytes
 .../nvd3/stackedarea_full/wrapper.js               |    11 +
 .../nvd3/stackedarea_stream/config.js              |    22 +
 .../nvd3/stackedarea_stream/logo.png               |   Bin 0 -> 9571 bytes
 .../nvd3/stackedarea_stream/wrapper.js             |    11 +
 .../visualizations/others/example/config.js        |    16 +
 .../visualizations/others/example/logo.png         |   Bin 0 -> 7438 bytes
 .../visualizations/others/example/wrapper.js       |    39 +
 .../visualizations/others/heatmap/config.js        |    61 +
 .../others/heatmap/heatmap-parameters.js           |    35 +
 .../others/heatmap/heatmap-plugin.js               |   403 +
 .../visualizations/others/heatmap/logo.png         |   Bin 0 -> 14531 bytes
 .../visualizations/others/heatmap/wrapper.js       |    15 +
 .../others/heatmap_cluster/config.js               |     7 +
 .../visualizations/others/heatmap_cluster/logo.png |   Bin 0 -> 11367 bytes
 .../others/heatmap_cluster/wrapper.js              |    34 +
 .../repository/visualizations/pv/viewer/config.js  |   103 +
 .../repository/visualizations/pv/viewer/logo.png   |   Bin 0 -> 47810 bytes
 .../repository/visualizations/pv/viewer/wrapper.js |    38 +
 .../visualizations/utilities/tabular-datasets.js   |   121 +
 .../visualizations/utilities/tabular-form.js       |    79 +
 .../visualizations/utilities/tabular-utilities.js  |   302 +
 .../visualizations/charts/templates/charts.mako    |    67 +
 .../visualizations/charts/webpack.config.js        |    48 +
 .../visualizations/circster/config/circster.xml    |    26 +
 .../visualizations/common/templates/README.txt     |     8 +
 .../common/templates/config_utils.mako             |    46 +
 .../common/templates/script_entry_point.mako       |    10 +
 .../common/templates/visualization_base.mako       |    77 +
 config/plugins/visualizations/csg/config/csg.xml   |    18 +
 .../plugins/visualizations/csg/static/Detector.js  |    79 +
 .../visualizations/csg/static/OrbitControls.js     |   583 +
 .../plugins/visualizations/csg/static/PLYLoader.js |   307 +
 .../plugins/visualizations/csg/static/VTKLoader.js |   188 +
 .../visualizations/csg/static/dat.gui.min.js       |    94 +
 .../plugins/visualizations/csg/static/three.min.js |   835 ++
 .../plugins/visualizations/csg/templates/csg.mako  |   278 +
 config/plugins/visualizations/graphviz/README.md   |    71 +
 .../visualizations/graphviz/config/graphviz.xml    |    25 +
 .../visualizations/graphviz/static/css/style.css   |   197 +
 .../visualizations/graphviz/static/img/15xvbd5.png |   Bin 0 -> 174 bytes
 .../graphviz/static/img/lite-blue-check.png        |   Bin 0 -> 3101 bytes
 .../visualizations/graphviz/static/js/collapse.js  |   215 +
 .../graphviz/static/js/cytoscape.min.js            |    27 +
 .../visualizations/graphviz/static/js/graphVis.js  |   186 +
 .../graphviz/static/js/jquery.qtip.js              |  3451 +++++
 .../graphviz/static/js/tip_centerwindow.js         |   104 +
 .../graphviz/static/js/tip_followscroll.js         |    88 +
 .../graphviz/static/js/toolPanelFunctions.js       |   609 +
 .../graphviz/static/js/wz_tooltip.js               |  1301 ++
 .../graphviz/templates/graphviz.mako               |   173 +
 .../visualizations/phyloviz/config/phyloviz.xml    |    19 +
 .../visualizations/scatterplot/Gruntfile.js        |    46 +
 .../scatterplot/config/scatterplot.xml             |    16 +
 .../visualizations/scatterplot/package.json        |    23 +
 .../scatterplot/src/scatterplot-config-editor.js   |   387 +
 .../scatterplot/src/scatterplot-display.js         |   214 +
 .../scatterplot/src/scatterplot-model.js           |    39 +
 .../visualizations/scatterplot/src/scatterplot.js  |   287 +
 .../scatterplot/static/numeric-column-stats.js     |    89 +
 .../scatterplot/static/scatterplot-edit.js         |     1 +
 .../scatterplot/static/scatterplot.css             |   225 +
 .../scatterplot/static/worker-stats.js             |     5 +
 .../scatterplot/templates/scatterplot.mako         |   113 +
 .../visualizations/sweepster/config/sweepster.xml  |    25 +
 .../visualizations/trackster/config/trackster.xml  |    29 +
 config/plugins/visualizations/visualization.dtd    |   163 +
 .../demo/history_test1/config/history_test1.yml    |     5 +
 .../demo/history_test2/config/history_test2.yml    |     5 +
 .../demo/masthead_test/config/masthead_test.yml    |    10 +
 .../webhooks/demo/phdcomics/config/phdcomics.yaml  |     5 +
 .../webhooks/demo/phdcomics/helper/__init__.py     |    43 +
 .../webhooks/demo/phdcomics/static/script.js       |    55 +
 .../webhooks/demo/phdcomics/static/styles.css      |    66 +
 .../demo/trans_object/config/trans_object.yaml     |    12 +
 .../webhooks/demo/trans_object/helper/__init__.py  |     6 +
 config/plugins/webhooks/demo/xkcd/config/xkcd.yml  |     5 +
 config/plugins/webhooks/demo/xkcd/static/script.js |    56 +
 .../plugins/webhooks/demo/xkcd/static/styles.css   |    66 +
 config/reports.ini.sample                          |    91 +
 config/shed_data_manager_conf.xml.sample           |     3 +
 config/shed_tool_conf.xml.sample                   |     3 +
 config/shed_tool_data_table_conf.xml.sample        |     3 +
 config/tool_conf.xml.main                          |   134 +
 config/tool_conf.xml.sample                        |   122 +
 config/tool_data_table_conf.xml.sample             |   103 +
 config/tool_destinations.yml.sample                |   311 +
 config/tool_shed.ini.sample                        |   181 +
 config/tool_sheds_conf.xml.sample                  |     7 +
 config/workflow_schedulers_conf.xml.sample         |    14 +
 contrib/README                                     |    37 +
 contrib/collect_sge_job_timings.sh                 |   126 +
 contrib/edu.psu.galaxy.GalaxyServer.plist          |    22 +
 contrib/galaxy.debian-init                         |    59 +
 contrib/galaxy.fedora-init                         |   107 +
 contrib/galaxy.solaris-smf.xml                     |    75 +
 contrib/galaxy_config_merger.py                    |    92 +
 contrib/galaxy_reports.fedora-init                 |    95 +
 contrib/galaxy_supervisor.conf                     |    45 +
 contrib/gls.pl                                     |   204 +
 contrib/nagios/README                              |     1 +
 contrib/nagios/check_galaxy                        |    40 +
 contrib/nagios/check_galaxy.py                     |   394 +
 create_db.sh                                       |    12 +
 cron/README.txt                                    |    15 +
 cron/add_manual_builds.py                          |    53 +
 cron/build_chrom_db.py                             |    82 +
 cron/check_galaxy.sh                               |   222 +
 cron/cleanup_datasets.py                           |     6 +
 cron/parse_builds.py                               |    54 +
 cron/parse_builds_3_sites.py                       |    47 +
 cron/updateucsc.sh.sample                          |    90 +
 database/info.txt                                  |     1 +
 display_applications/biom/biom_simple.xml          |     7 +
 display_applications/ensembl/ensembl_bam.xml       |    21 +
 display_applications/ensembl/ensembl_gff.xml       |    69 +
 .../ensembl/ensembl_interval_as_bed.xml            |    69 +
 display_applications/gbrowse/gbrowse_gff.xml       |    29 +
 .../gbrowse/gbrowse_interval_as_bed.xml            |    29 +
 display_applications/gbrowse/gbrowse_wig.xml       |    29 +
 display_applications/igb/bam.xml                   |    13 +
 display_applications/igb/bb.xml                    |    12 +
 display_applications/igb/bed.xml                   |    15 +
 display_applications/igb/bedgraph.xml              |    15 +
 display_applications/igb/bigwig.xml                |    20 +
 display_applications/igb/gtf.xml                   |    15 +
 display_applications/igb/wig.xml                   |    20 +
 display_applications/igv/bam.xml                   |    94 +
 display_applications/igv/bigwig.xml                |    89 +
 display_applications/igv/gff.xml                   |    92 +
 display_applications/igv/interval_as_bed.xml       |    92 +
 display_applications/igv/vcf.xml                   |    94 +
 display_applications/iobio/bam.xml                 |     8 +
 display_applications/iobio/vcf.xml                 |     8 +
 display_applications/rviewer/bed.xml               |    17 +
 display_applications/rviewer/vcf.xml               |    17 +
 display_applications/ucsc/bam.xml                  |    17 +
 display_applications/ucsc/bigbed.xml               |    16 +
 display_applications/ucsc/bigwig.xml               |    16 +
 display_applications/ucsc/interval_as_bed.xml      |    23 +
 display_applications/ucsc/maf_customtrack.xml      |    15 +
 display_applications/ucsc/trackhub.xml             |     6 +
 display_applications/ucsc/vcf.xml                  |    17 +
 doc/Makefile                                       |   185 +
 doc/fix_schema_rst.sh                              |    11 +
 doc/parse_gx_xsd.py                                |   171 +
 doc/patch.py                                       |  1076 ++
 doc/schema_template.md                             |    79 +
 doc/source/_static/style.css                       |     3 +
 doc/source/_templates/layout.html                  |     5 +
 doc/source/admin/chat-0.png                        |   Bin 0 -> 6828 bytes
 doc/source/admin/chat-1.png                        |   Bin 0 -> 8988 bytes
 doc/source/admin/chat-2.png                        |   Bin 0 -> 9068 bytes
 doc/source/admin/chat-3.png                        |   Bin 0 -> 7596 bytes
 doc/source/admin/chat-4.png                        |   Bin 0 -> 2292 bytes
 doc/source/admin/chat.rst                          |   137 +
 doc/source/admin/communication_server.png          |   Bin 0 -> 44707 bytes
 doc/source/admin/communication_server_rooms.png    |   Bin 0 -> 38249 bytes
 doc/source/admin/conda_faq.rst                     |   344 +
 doc/source/admin/dependency_resolvers.rst          |   194 +
 doc/source/admin/framework_dependencies.rst        |   641 +
 doc/source/admin/grt.rst                           |    41 +
 doc/source/admin/images_webhooks/history-menu.png  |   Bin 0 -> 207455 bytes
 doc/source/admin/images_webhooks/masthead.png      |   Bin 0 -> 152891 bytes
 .../images_webhooks/masthead_trans_object.png      |   Bin 0 -> 155709 bytes
 doc/source/admin/images_webhooks/tool.png          |   Bin 0 -> 341938 bytes
 doc/source/admin/images_webhooks/workflow.png      |   Bin 0 -> 251450 bytes
 doc/source/admin/index.rst                         |    27 +
 doc/source/admin/interactive_environments.png      |   Bin 0 -> 252388 bytes
 doc/source/admin/interactive_environments.rst      |   259 +
 doc/source/admin/interactive_environments.svg      |  2536 ++++
 doc/source/admin/mulled_containers.rst             |   128 +
 doc/source/admin/useful_scripts.rst                |    19 +
 doc/source/admin/webhooks.rst                      |   186 +
 doc/source/api/guidelines.rst                      |    70 +
 doc/source/api/quickstart.rst                      |   185 +
 doc/source/api_doc.rst                             |    29 +
 doc/source/conf.py                                 |   294 +
 doc/source/dev/build_a_job_runner.rst              |   350 +
 doc/source/dev/faq.rst                             |    28 +
 doc/source/dev/index.rst                           |    18 +
 doc/source/dev/inherit.png                         |   Bin 0 -> 9341 bytes
 doc/source/dev/interactive_environments.rst        |   479 +
 .../dev/interactive_environments_success.png       |   Bin 0 -> 8685 bytes
 doc/source/dev/queue.png                           |   Bin 0 -> 1447 bytes
 doc/source/dev/queue_b.png                         |   Bin 0 -> 4581 bytes
 doc/source/dev/runner_diag.png                     |   Bin 0 -> 197974 bytes
 doc/source/index.rst                               |    63 +
 doc/source/lib/galaxy.actions.rst                  |    20 +
 doc/source/lib/galaxy.auth.providers.rst           |    44 +
 doc/source/lib/galaxy.auth.rst                     |    15 +
 doc/source/lib/galaxy.dataset_collections.rst      |    67 +
 .../lib/galaxy.dataset_collections.types.rst       |    28 +
 doc/source/lib/galaxy.datatypes.converters.rst     |   258 +
 doc/source/lib/galaxy.datatypes.dataproviders.rst  |    84 +
 .../lib/galaxy.datatypes.display_applications.rst  |    36 +
 doc/source/lib/galaxy.datatypes.rst                |   206 +
 doc/source/lib/galaxy.datatypes.util.rst           |    36 +
 doc/source/lib/galaxy.dependencies.rst             |     8 +
 doc/source/lib/galaxy.eggs.rst                     |     8 +
 doc/source/lib/galaxy.exceptions.rst               |    20 +
 .../galaxy.external_services.result_handlers.rst   |    20 +
 doc/source/lib/galaxy.external_services.rst        |    43 +
 doc/source/lib/galaxy.forms.rst                    |    20 +
 doc/source/lib/galaxy.jobs.actions.rst             |    20 +
 doc/source/lib/galaxy.jobs.deferred.rst            |    36 +
 doc/source/lib/galaxy.jobs.metrics.collectl.rst    |    44 +
 .../lib/galaxy.jobs.metrics.instrumenters.rst      |    60 +
 doc/source/lib/galaxy.jobs.metrics.rst             |    28 +
 doc/source/lib/galaxy.jobs.rst                     |   103 +
 doc/source/lib/galaxy.jobs.runners.rst             |    92 +
 .../lib/galaxy.jobs.runners.state_handlers.rst     |    20 +
 .../lib/galaxy.jobs.runners.util.cli.job.rst       |    36 +
 doc/source/lib/galaxy.jobs.runners.util.cli.rst    |    28 +
 .../lib/galaxy.jobs.runners.util.cli.shell.rst     |    28 +
 doc/source/lib/galaxy.jobs.runners.util.condor.rst |     8 +
 .../lib/galaxy.jobs.runners.util.job_script.rst    |     8 +
 doc/source/lib/galaxy.jobs.runners.util.rst        |    45 +
 doc/source/lib/galaxy.jobs.splitters.rst           |    28 +
 doc/source/lib/galaxy.managers.rst                 |   228 +
 doc/source/lib/galaxy.model.migrate.rst            |    20 +
 doc/source/lib/galaxy.model.orm.rst                |    44 +
 doc/source/lib/galaxy.model.rst                    |    69 +
 .../lib/galaxy.model.tool_shed_install.migrate.rst |    20 +
 doc/source/lib/galaxy.model.tool_shed_install.rst  |    27 +
 doc/source/lib/galaxy.objectstore.rst              |    52 +
 doc/source/lib/galaxy.openid.rst                   |    20 +
 doc/source/lib/galaxy.quota.rst                    |     8 +
 doc/source/lib/galaxy.rst                          |    91 +
 doc/source/lib/galaxy.sample_tracking.rst          |    44 +
 doc/source/lib/galaxy.security.rst                 |    28 +
 doc/source/lib/galaxy.tags.rst                     |     8 +
 doc/source/lib/galaxy.tools.actions.rst            |    60 +
 doc/source/lib/galaxy.tools.data.rst               |     8 +
 doc/source/lib/galaxy.tools.data_manager.rst       |    20 +
 doc/source/lib/galaxy.tools.deps.resolvers.rst     |    68 +
 doc/source/lib/galaxy.tools.deps.rst               |    83 +
 doc/source/lib/galaxy.tools.filters.rst            |     8 +
 doc/source/lib/galaxy.tools.imp_exp.rst            |    28 +
 doc/source/lib/galaxy.tools.linters.rst            |    82 +
 doc/source/lib/galaxy.tools.parameters.rst         |   106 +
 doc/source/lib/galaxy.tools.parser.rst             |    52 +
 doc/source/lib/galaxy.tools.rst                    |   102 +
 doc/source/lib/galaxy.tools.search.rst             |     8 +
 doc/source/lib/galaxy.tools.toolbox.filters.rst    |     8 +
 doc/source/lib/galaxy.tools.toolbox.lineages.rst   |    44 +
 doc/source/lib/galaxy.tools.toolbox.rst            |    60 +
 doc/source/lib/galaxy.tools.util.galaxyops.rst     |     8 +
 doc/source/lib/galaxy.tools.util.rst               |    27 +
 doc/source/lib/galaxy.util.backports.rst           |     7 +
 doc/source/lib/galaxy.util.log.rst                 |    20 +
 doc/source/lib/galaxy.util.pastescript.rst         |    28 +
 doc/source/lib/galaxy.util.rst                     |   309 +
 ...alaxy.visualization.data_providers.phyloviz.rst |    44 +
 .../lib/galaxy.visualization.data_providers.rst    |    51 +
 doc/source/lib/galaxy.visualization.genome.rst     |     8 +
 doc/source/lib/galaxy.visualization.plugins.rst    |    52 +
 doc/source/lib/galaxy.visualization.rst            |    30 +
 doc/source/lib/galaxy.visualization.tracks.rst     |     8 +
 doc/source/lib/galaxy.web.base.controllers.rst     |    20 +
 doc/source/lib/galaxy.web.base.rst                 |    43 +
 doc/source/lib/galaxy.web.framework.helpers.rst    |    20 +
 doc/source/lib/galaxy.web.framework.middleware.rst |    84 +
 doc/source/lib/galaxy.web.framework.rst            |    60 +
 doc/source/lib/galaxy.web.proxy.rst                |    20 +
 doc/source/lib/galaxy.web.rst                      |    54 +
 doc/source/lib/galaxy.web.security.rst             |     8 +
 doc/source/lib/galaxy.webapps.galaxy.api.rst       |   324 +
 .../lib/galaxy.webapps.galaxy.controllers.rst      |   236 +
 doc/source/lib/galaxy.webapps.galaxy.rst           |    28 +
 .../lib/galaxy.webapps.reports.controllers.rst     |    76 +
 doc/source/lib/galaxy.webapps.reports.rst          |    43 +
 doc/source/lib/galaxy.webapps.rst                  |    29 +
 doc/source/lib/galaxy.webapps.tool_shed.api.rst    |    76 +
 .../lib/galaxy.webapps.tool_shed.controllers.rst   |    68 +
 ...laxy.webapps.tool_shed.framework.middleware.rst |    28 +
 .../lib/galaxy.webapps.tool_shed.framework.rst     |    15 +
 .../lib/galaxy.webapps.tool_shed.model.migrate.rst |    20 +
 doc/source/lib/galaxy.webapps.tool_shed.model.rst  |    27 +
 doc/source/lib/galaxy.webapps.tool_shed.rst        |    49 +
 doc/source/lib/galaxy.webapps.tool_shed.search.rst |    28 +
 .../lib/galaxy.webapps.tool_shed.security.rst      |     8 +
 doc/source/lib/galaxy.webapps.tool_shed.util.rst   |    36 +
 doc/source/lib/galaxy.work.rst                     |    20 +
 doc/source/lib/galaxy.workflow.rst                 |    75 +
 doc/source/lib/galaxy.workflow.schedulers.rst      |    20 +
 doc/source/lib/galaxy_ext.metadata.rst             |    20 +
 doc/source/lib/galaxy_ext.rst                      |    15 +
 doc/source/lib/galaxy_utils.rst                    |    15 +
 doc/source/lib/galaxy_utils.sequence.rst           |    52 +
 doc/source/lib/log_tempfile.rst                    |     7 +
 doc/source/lib/mimeparse.rst                       |     7 +
 doc/source/lib/modules.rst                         |    13 +
 doc/source/lib/psyco_full.rst                      |     7 +
 doc/source/lib/tool_shed.capsule.rst               |    20 +
 .../lib/tool_shed.dependencies.repository.rst      |    20 +
 doc/source/lib/tool_shed.dependencies.rst          |    28 +
 doc/source/lib/tool_shed.dependencies.tool.rst     |    20 +
 .../lib/tool_shed.galaxy_install.datatypes.rst     |    20 +
 doc/source/lib/tool_shed.galaxy_install.grids.rst  |    20 +
 .../lib/tool_shed.galaxy_install.metadata.rst      |    20 +
 .../lib/tool_shed.galaxy_install.migrate.rst       |    28 +
 ...shed.galaxy_install.repository_dependencies.rst |    20 +
 doc/source/lib/tool_shed.galaxy_install.rst        |    74 +
 ...hed.galaxy_install.tool_dependencies.recipe.rst |    60 +
 .../tool_shed.galaxy_install.tool_dependencies.rst |    27 +
 doc/source/lib/tool_shed.galaxy_install.tools.rst  |    28 +
 ...tool_shed.galaxy_install.utility_containers.rst |     8 +
 doc/source/lib/tool_shed.grids.rst                 |    52 +
 doc/source/lib/tool_shed.managers.rst              |    28 +
 doc/source/lib/tool_shed.metadata.rst              |    28 +
 doc/source/lib/tool_shed.repository_types.rst      |    60 +
 doc/source/lib/tool_shed.rst                       |    44 +
 doc/source/lib/tool_shed.tools.rst                 |    36 +
 doc/source/lib/tool_shed.util.rst                  |   156 +
 doc/source/lib/tool_shed.utility_containers.rst    |    20 +
 doc/source/project/issues.rst                      |   234 +
 doc/source/project/organization.rst                |   199 +
 doc/source/releases/13.01_announce.rst             |    11 +
 doc/source/releases/13.02_announce.rst             |    11 +
 doc/source/releases/13.04_announce.rst             |    11 +
 doc/source/releases/13.06_announce.rst             |    11 +
 doc/source/releases/13.08_announce.rst             |    11 +
 doc/source/releases/13.11_announce.rst             |    11 +
 doc/source/releases/14.02_announce.rst             |    11 +
 doc/source/releases/14.04_announce.rst             |    11 +
 doc/source/releases/14.06_announce.rst             |    11 +
 doc/source/releases/14.08_announce.rst             |    11 +
 doc/source/releases/14.10_announce.rst             |    11 +
 doc/source/releases/15.01_announce.rst             |    11 +
 doc/source/releases/15.03_announce.rst             |    11 +
 doc/source/releases/15.05.rst                      |   356 +
 doc/source/releases/15.05_announce.rst             |    58 +
 doc/source/releases/15.07.rst                      |   548 +
 doc/source/releases/15.07_announce.rst             |    70 +
 doc/source/releases/15.10.rst                      |   641 +
 doc/source/releases/15.10_announce.rst             |    85 +
 doc/source/releases/16.01.rst                      |   782 +
 doc/source/releases/16.01_announce.rst             |   162 +
 doc/source/releases/16.04.rst                      |   867 ++
 doc/source/releases/16.04_announce.rst             |   120 +
 doc/source/releases/16.07.rst                      |   787 +
 doc/source/releases/16.07_announce.rst             |    59 +
 doc/source/releases/16.10.rst                      |   693 +
 doc/source/releases/16.10_announce.rst             |    84 +
 doc/source/releases/17.01_announce.rst             |    10 +
 doc/source/releases/_header.rst                    |     3 +
 doc/source/releases/_thanks.rst                    |     7 +
 doc/source/releases/index.rst                      |    27 +
 doc/source/releases/older_releases.rst             |    11 +
 .../slideshow/architecture/galaxy_architecture.md  |  1341 ++
 .../architecture/images/backbone-model-view.svg    |   152 +
 .../architecture/images/cluster_support.svg        |     4 +
 .../architecture/images/data_managers.svg          |     4 +
 .../slideshow/architecture/images/docker-chart.png |   Bin 0 -> 185452 bytes
 .../slideshow/architecture/images/family/team.png  |   Bin 0 -> 91085 bytes
 .../architecture/images/galaxy_schema.png          |   Bin 0 -> 2937500 bytes
 doc/source/slideshow/architecture/images/hda.svg   |     4 +
 .../architecture/images/hda_dataset.plantuml.svg   |     1 +
 .../architecture/images/hda_dataset.plantuml.txt   |    23 +
 .../architecture/images/hda_hdca.plantuml.svg      |     1 +
 .../architecture/images/hda_hdca.plantuml.txt      |    29 +
 .../slideshow/architecture/images/jsload.png       |   Bin 0 -> 278367 bytes
 .../slideshow/architecture/images/libraries.svg    |     4 +
 .../architecture/images/library_permissions.svg    |     4 +
 .../architecture/images/objectstore.plantuml.svg   |     1 +
 .../architecture/images/objectstore.plantuml.txt   |    28 +
 .../architecture/images/objectstore_diagram.svg    |     4 +
 .../architecture/images/plantuml_options.txt       |    34 +
 .../slideshow/architecture/images/sequence.json    |     3 +
 .../architecture/images/server_client.plantuml.svg |     1 +
 .../architecture/images/server_client.plantuml.txt |    20 +
 .../images/server_client_old.plantuml.svg          |     1 +
 .../images/server_client_old.plantuml.txt          |    14 +
 .../architecture/images/sqla_arch_small.png        |   Bin 0 -> 42731 bytes
 .../architecture/images/usegalaxy_webservers.svg   |     4 +
 .../slideshow/architecture/images/usegalaxyorg.svg |     4 +
 .../architecture/images/webapp.plantuml.svg        |     1 +
 .../architecture/images/webapp.plantuml.txt        |    25 +
 .../architecture/images/what-is-webpack.svg        |   529 +
 .../architecture/images/workflow_definition.svg    |     4 +
 .../slideshow/architecture/images/workflow_run.svg |     4 +
 .../slideshow/architecture/images/wsgi_app.svg     |     4 +
 .../slideshow/architecture/images/wsgi_request.svg |     4 +
 .../slideshow/architecture/remark-latest.min.js    |    14 +
 doc/source/slideshow/architecture/style.css        |   210 +
 doc/source/ts_api_doc.rst                          |    17 +
 external_service_types/454_life_sciences.xml       |    26 +
 .../applied_biosystems_solid.xml                   |    38 +
 .../pacific_biosciences_smrt_portal.xml            |   134 +
 .../simple_unknown_sequencer.xml                   |    26 +
 extract_dataset_parts.sh                           |     8 +
 lib/galaxy/__init__.py                             |    20 +
 lib/galaxy/actions/__init__.py                     |     0
 lib/galaxy/actions/admin.py                        |   200 +
 lib/galaxy/app.py                                  |   222 +
 lib/galaxy/auth/__init__.py                        |   174 +
 lib/galaxy/auth/providers/__init__.py              |    58 +
 lib/galaxy/auth/providers/alwaysreject.py          |    33 +
 lib/galaxy/auth/providers/ldap_ad.py               |   202 +
 lib/galaxy/auth/providers/localdb.py               |    32 +
 lib/galaxy/auth/providers/pam_auth.py              |   152 +
 lib/galaxy/config.py                               |   982 ++
 lib/galaxy/dataset_collections/__init__.py         |     0
 lib/galaxy/dataset_collections/builder.py          |    90 +
 lib/galaxy/dataset_collections/matching.py         |    85 +
 lib/galaxy/dataset_collections/registry.py         |    24 +
 lib/galaxy/dataset_collections/structure.py        |   134 +
 lib/galaxy/dataset_collections/subcollections.py   |    25 +
 lib/galaxy/dataset_collections/type_description.py |   127 +
 lib/galaxy/dataset_collections/types/__init__.py   |    23 +
 lib/galaxy/dataset_collections/types/list.py       |    20 +
 lib/galaxy/dataset_collections/types/paired.py     |    46 +
 lib/galaxy/datatypes/__init__.py                   |     0
 lib/galaxy/datatypes/assembly.py                   |   235 +
 lib/galaxy/datatypes/binary.py                     |  1438 ++
 lib/galaxy/datatypes/blast.py                      |   311 +
 lib/galaxy/datatypes/checkers.py                   |    25 +
 lib/galaxy/datatypes/chrominfo.py                  |    11 +
 .../datatypes/constructive_solid_geometry.py       |   467 +
 lib/galaxy/datatypes/converters/__init__.py        |     0
 lib/galaxy/datatypes/converters/bam_to_bai.xml     |    16 +
 .../converters/bam_to_bigwig_converter.xml         |    27 +
 .../converters/bcf_bgzip_to_bcf_converter.py       |    22 +
 .../converters/bcf_bgzip_to_bcf_converter.xml      |    14 +
 .../converters/bcf_to_bcf_bgzip_converter.py       |    22 +
 .../converters/bcf_to_bcf_bgzip_converter.xml      |    14 +
 .../bed_gff_or_vcf_to_bigwig_converter.xml         |    34 +
 .../converters/bed_to_bgzip_converter.xml          |    14 +
 .../datatypes/converters/bed_to_fli_converter.xml  |    13 +
 .../datatypes/converters/bed_to_gff_converter.py   |    78 +
 .../datatypes/converters/bed_to_gff_converter.xml  |    13 +
 .../converters/bed_to_interval_index_converter.xml |    14 +
 .../converters/bed_to_tabix_converter.xml          |    15 +
 .../converters/bedgraph_to_array_tree_converter.py |    56 +
 .../bedgraph_to_array_tree_converter.xml           |    14 +
 .../converters/bedgraph_to_bigwig_converter.xml    |    15 +
 lib/galaxy/datatypes/converters/bgzip.py           |    51 +
 .../converters/cml_to_inchi_converter.xml          |    22 +
 .../datatypes/converters/cml_to_mol2_converter.xml |    22 +
 .../datatypes/converters/cml_to_sdf_converter.xml  |    22 +
 .../datatypes/converters/cml_to_smi_converter.xml  |    48 +
 .../converters/encodepeak_to_bgzip_converter.xml   |    19 +
 .../converters/encodepeak_to_tabix_converter.xml   |    20 +
 lib/galaxy/datatypes/converters/fasta_to_2bit.xml  |    17 +
 .../fasta_to_bowtie_base_index_converter.xml       |    22 +
 .../fasta_to_bowtie_color_index_converter.xml      |    23 +
 lib/galaxy/datatypes/converters/fasta_to_len.py    |    53 +
 lib/galaxy/datatypes/converters/fasta_to_len.xml   |    13 +
 .../converters/fasta_to_tabular_converter.py       |    52 +
 .../converters/fasta_to_tabular_converter.xml      |    13 +
 lib/galaxy/datatypes/converters/fastq_to_fqtoc.py  |    50 +
 lib/galaxy/datatypes/converters/fastq_to_fqtoc.xml |    13 +
 .../converters/fastqsolexa_to_fasta_converter.py   |    55 +
 .../converters/fastqsolexa_to_fasta_converter.xml  |    12 +
 .../converters/fastqsolexa_to_qual_converter.py    |    98 +
 .../converters/fastqsolexa_to_qual_converter.xml   |    11 +
 .../datatypes/converters/gff_to_bed_converter.py   |    47 +
 .../datatypes/converters/gff_to_bed_converter.xml  |    13 +
 .../converters/gff_to_bgzip_converter.xml          |    14 +
 .../datatypes/converters/gff_to_fli_converter.xml  |    13 +
 .../converters/gff_to_interval_index_converter.py  |    41 +
 .../converters/gff_to_interval_index_converter.xml |    14 +
 .../converters/gff_to_tabix_converter.xml          |    15 +
 .../converters/inchi_to_cml_converter.xml          |    22 +
 .../converters/inchi_to_mol2_converter.xml         |    22 +
 .../converters/inchi_to_mol_converter.xml          |    22 +
 .../converters/inchi_to_sdf_converter.xml          |    22 +
 .../converters/inchi_to_smi_converter.xml          |    22 +
 .../converters/interval_to_bed12_converter.xml     |    15 +
 .../converters/interval_to_bed6_converter.xml      |    15 +
 .../converters/interval_to_bed_converter.py        |    66 +
 .../converters/interval_to_bed_converter.xml       |    15 +
 .../converters/interval_to_bedstrict_converter.py  |   158 +
 .../converters/interval_to_bedstrict_converter.xml |    15 +
 .../converters/interval_to_bgzip_converter.xml     |    19 +
 .../converters/interval_to_bigwig_converter.xml    |    37 +
 .../datatypes/converters/interval_to_coverage.py   |   151 +
 .../datatypes/converters/interval_to_coverage.xml  |    18 +
 lib/galaxy/datatypes/converters/interval_to_fli.py |   107 +
 .../interval_to_interval_index_converter.py        |    51 +
 .../interval_to_interval_index_converter.xml       |    19 +
 .../converters/interval_to_tabix_converter.py      |    41 +
 .../converters/interval_to_tabix_converter.xml     |    20 +
 .../datatypes/converters/len_to_linecount.xml      |    13 +
 .../datatypes/converters/lped_to_fped_converter.py |   111 +
 .../converters/lped_to_fped_converter.xml          |    15 +
 .../datatypes/converters/lped_to_pbed_converter.py |   113 +
 .../converters/lped_to_pbed_converter.xml          |    16 +
 .../datatypes/converters/maf_to_fasta_converter.py |    35 +
 .../converters/maf_to_fasta_converter.xml          |    15 +
 .../converters/maf_to_interval_converter.py        |    35 +
 .../converters/maf_to_interval_converter.xml       |    15 +
 .../datatypes/converters/mol2_to_cml_converter.xml |    22 +
 .../converters/mol2_to_inchi_converter.xml         |    22 +
 .../datatypes/converters/mol2_to_mol_converter.xml |    22 +
 .../datatypes/converters/mol2_to_sdf_converter.xml |    22 +
 .../datatypes/converters/mol2_to_smi_converter.xml |    22 +
 .../datatypes/converters/mol_to_cml_converter.xml  |    21 +
 .../converters/mol_to_inchi_converter.xml          |    21 +
 .../datatypes/converters/mol_to_mol2_converter.xml |    21 +
 .../datatypes/converters/mol_to_smi_converter.xml  |    21 +
 .../converters/pbed_ldreduced_converter.py         |   115 +
 .../converters/pbed_ldreduced_converter.xml        |    18 +
 .../datatypes/converters/pbed_to_lped_converter.py |    81 +
 .../converters/pbed_to_lped_converter.xml          |    16 +
 .../picard_interval_list_to_bed6_converter.py      |    45 +
 .../picard_interval_list_to_bed6_converter.xml     |    12 +
 .../pileup_to_interval_index_converter.py          |    36 +
 .../pileup_to_interval_index_converter.xml         |    15 +
 .../converters/ref_to_seq_taxonomy_converter.py    |    28 +
 .../converters/ref_to_seq_taxonomy_converter.xml   |    12 +
 lib/galaxy/datatypes/converters/sam_to_bam.py      |   116 +
 lib/galaxy/datatypes/converters/sam_to_bam.xml     |    20 +
 .../converters/sam_to_bigwig_converter.xml         |    25 +
 .../datatypes/converters/sdf_to_cml_converter.xml  |    22 +
 .../converters/sdf_to_inchi_converter.xml          |    22 +
 .../datatypes/converters/sdf_to_mol2_converter.xml |    22 +
 .../datatypes/converters/sdf_to_smi_converter.xml  |    27 +
 .../datatypes/converters/smi_to_cml_converter.xml  |    22 +
 .../converters/smi_to_inchi_converter.xml          |    22 +
 .../datatypes/converters/smi_to_mol2_converter.xml |    22 +
 .../datatypes/converters/smi_to_mol_converter.xml  |    22 +
 .../datatypes/converters/smi_to_sdf_converter.xml  |    22 +
 .../datatypes/converters/smi_to_smi_converter.xml  |    48 +
 .../datatypes/converters/tabular_to_dbnsfp.py      |    35 +
 .../datatypes/converters/tabular_to_dbnsfp.xml     |    12 +
 .../converters/vcf_bgzip_to_tabix_converter.xml    |    14 +
 .../converters/vcf_to_bgzip_converter.xml          |    14 +
 .../converters/vcf_to_interval_index_converter.py  |    35 +
 .../converters/vcf_to_interval_index_converter.xml |    14 +
 .../converters/vcf_to_tabix_converter.xml          |    15 +
 .../datatypes/converters/vcf_to_vcf_bgzip.py       |    25 +
 .../converters/vcf_to_vcf_bgzip_converter.xml      |    14 +
 .../converters/wig_to_bigwig_converter.xml         |    20 +
 .../converters/wiggle_to_array_tree_converter.py   |    30 +
 .../converters/wiggle_to_array_tree_converter.xml  |    14 +
 .../converters/wiggle_to_simple_converter.py       |    49 +
 .../converters/wiggle_to_simple_converter.xml      |    11 +
 lib/galaxy/datatypes/coverage.py                   |    55 +
 lib/galaxy/datatypes/data.py                       |  1018 ++
 lib/galaxy/datatypes/dataproviders/__init__.py     |    31 +
 lib/galaxy/datatypes/dataproviders/base.py         |   317 +
 lib/galaxy/datatypes/dataproviders/chunk.py        |    79 +
 lib/galaxy/datatypes/dataproviders/column.py       |   357 +
 lib/galaxy/datatypes/dataproviders/dataset.py      |   782 +
 lib/galaxy/datatypes/dataproviders/decorators.py   |   152 +
 lib/galaxy/datatypes/dataproviders/exceptions.py   |    34 +
 lib/galaxy/datatypes/dataproviders/external.py     |   163 +
 lib/galaxy/datatypes/dataproviders/hierarchy.py    |   138 +
 lib/galaxy/datatypes/dataproviders/line.py         |   270 +
 .../datatypes/display_applications/__init__.py     |     3 +
 .../datatypes/display_applications/application.py  |   345 +
 .../datatypes/display_applications/parameters.py   |   249 +
 lib/galaxy/datatypes/display_applications/util.py  |    32 +
 lib/galaxy/datatypes/genetics.py                   |   821 ++
 lib/galaxy/datatypes/graph.py                      |   160 +
 lib/galaxy/datatypes/images.py                     |   272 +
 lib/galaxy/datatypes/interval.py                   |  1572 ++
 lib/galaxy/datatypes/metadata.py                   |    46 +
 lib/galaxy/datatypes/molecules.py                  |   820 ++
 lib/galaxy/datatypes/mothur.py                     |   935 ++
 lib/galaxy/datatypes/msa.py                        |   206 +
 lib/galaxy/datatypes/neo4j.py                      |   134 +
 lib/galaxy/datatypes/ngsindex.py                   |    77 +
 lib/galaxy/datatypes/proteomics.py                 |   419 +
 lib/galaxy/datatypes/qualityscore.py               |   132 +
 lib/galaxy/datatypes/registry.py                   |   877 ++
 lib/galaxy/datatypes/sequence.py                   |  1068 ++
 lib/galaxy/datatypes/set_metadata_tool.xml         |    18 +
 lib/galaxy/datatypes/sniff.py                      |   484 +
 lib/galaxy/datatypes/tabular.py                    |  1139 ++
 lib/galaxy/datatypes/test/1.bam                    |   Bin 0 -> 3592 bytes
 lib/galaxy/datatypes/test/1.bed                    |    65 +
 lib/galaxy/datatypes/test/1.fastq                  |     8 +
 lib/galaxy/datatypes/test/1.fastqsanger            |     8 +
 lib/galaxy/datatypes/test/1.fastqsolexa            |     8 +
 lib/galaxy/datatypes/test/1.gg                     |     4 +
 lib/galaxy/datatypes/test/1.sam                    |    97 +
 lib/galaxy/datatypes/test/1.sff                    |   Bin 0 -> 100 bytes
 lib/galaxy/datatypes/test/10col.pileup             |    30 +
 lib/galaxy/datatypes/test/2.fastq                  |     8 +
 lib/galaxy/datatypes/test/2.fastqsanger            |     8 +
 lib/galaxy/datatypes/test/2.fastqsolexa            |     8 +
 lib/galaxy/datatypes/test/3unsorted.bam            |   Bin 0 -> 1666 bytes
 lib/galaxy/datatypes/test/4.bed                    |     1 +
 lib/galaxy/datatypes/test/5e5z.pdb                 |   357 +
 lib/galaxy/datatypes/test/6col.pileup              |    30 +
 lib/galaxy/datatypes/test/NuBBE_1_obabel_3D.pdbqt  |    55 +
 lib/galaxy/datatypes/test/alignment.axt            |    36 +
 lib/galaxy/datatypes/test/alignment.lav            |   178 +
 lib/galaxy/datatypes/test/complete.bed             |     2 +
 lib/galaxy/datatypes/test/drugbank_drugs.cml       |   385 +
 lib/galaxy/datatypes/test/drugbank_drugs.inchi     |     2 +
 lib/galaxy/datatypes/test/drugbank_drugs.mol2      |   354 +
 lib/galaxy/datatypes/test/drugbank_drugs.sdf       |   491 +
 lib/galaxy/datatypes/test/drugbank_drugs.smi       |     2 +
 lib/galaxy/datatypes/test/file.html                |    74 +
 lib/galaxy/datatypes/test/gff_version_3.gff        |   150 +
 lib/galaxy/datatypes/test/interval.interval        |     5 +
 lib/galaxy/datatypes/test/interval1.bed            |    11 +
 lib/galaxy/datatypes/test/issue1818.tabular        |    84 +
 .../test/megablast_xml_parser_test1.blastxml       |  4117 ++++++
 .../test/mothur_datatypetest_false.mothur.axes     |    42 +
 .../test/mothur_datatypetest_false.mothur.filter   |     1 +
 .../test/mothur_datatypetest_false.mothur.freq     |    42 +
 .../mothur_datatypetest_false.mothur.lower.dist    |    84 +
 .../test/mothur_datatypetest_false.mothur.map      |    18 +
 .../test/mothur_datatypetest_false.mothur.oligos   |    12 +
 .../test/mothur_datatypetest_false.mothur.otu      |    37 +
 .../mothur_datatypetest_false.mothur.pair.dist     |    42 +
 .../test/mothur_datatypetest_false.mothur.quan     |    42 +
 .../mothur_datatypetest_false.mothur.ref.taxonomy  |    42 +
 .../test/mothur_datatypetest_false.mothur.sabund   |     5 +
 .../test/mothur_datatypetest_false.mothur.shared   |     3 +
 .../mothur_datatypetest_false.mothur.square.dist   |    42 +
 .../test/mothur_datatypetest_true.mothur.axes      |    42 +
 .../test/mothur_datatypetest_true.mothur.filter    |     1 +
 .../test/mothur_datatypetest_true.mothur.freq      |    42 +
 .../mothur_datatypetest_true.mothur.lower.dist     |    99 +
 .../test/mothur_datatypetest_true.mothur.map       |    18 +
 .../test/mothur_datatypetest_true.mothur.oligos    |    12 +
 .../test/mothur_datatypetest_true.mothur.otu       |    37 +
 .../test/mothur_datatypetest_true.mothur.pair.dist |    42 +
 .../test/mothur_datatypetest_true.mothur.quan      |    42 +
 .../mothur_datatypetest_true.mothur.ref.taxonomy   |    42 +
 .../test/mothur_datatypetest_true.mothur.sabund    |     5 +
 .../test/mothur_datatypetest_true.mothur.shared    |     3 +
 .../mothur_datatypetest_true.mothur.square.dist    |    99 +
 lib/galaxy/datatypes/test/oxli_countgraph.oxlicg   |   Bin 0 -> 24037 bytes
 lib/galaxy/datatypes/test/oxli_graphlabels.oxligl  |   Bin 0 -> 178 bytes
 lib/galaxy/datatypes/test/oxli_nodegraph.oxling    |   Bin 0 -> 28 bytes
 lib/galaxy/datatypes/test/oxli_stoptags.oxlist     |   Bin 0 -> 1178 bytes
 lib/galaxy/datatypes/test/oxli_subset.oxliss       |   Bin 0 -> 2394 bytes
 lib/galaxy/datatypes/test/oxli_tagset.oxlits       |   Bin 0 -> 1078 bytes
 lib/galaxy/datatypes/test/q.fps                    |     7 +
 lib/galaxy/datatypes/test/sequence.csfasta         |    21 +
 lib/galaxy/datatypes/test/sequence.fasta           |     2 +
 lib/galaxy/datatypes/test/sequence.maf             |     8 +
 lib/galaxy/datatypes/test/sequence.qual            |     9 +
 lib/galaxy/datatypes/test/sequence.qual454         |    16 +
 lib/galaxy/datatypes/test/sequence.qualsolid       |    12 +
 .../test/tblastn_four_human_vs_rhodopsin.xml       |   741 +
 lib/galaxy/datatypes/test/temp.txt                 |     2 +
 lib/galaxy/datatypes/test/test.CEL                 |   Bin 0 -> 4000 bytes
 lib/galaxy/datatypes/test/test.gff                 |    35 +
 lib/galaxy/datatypes/test/test.gtf                 |   500 +
 lib/galaxy/datatypes/test/test.idpDB               |   Bin 0 -> 55296 bytes
 lib/galaxy/datatypes/test/test.mz5                 |   Bin 0 -> 173408 bytes
 lib/galaxy/datatypes/test/test_ensembl.tab         |     8 +
 lib/galaxy/datatypes/test/test_space.txt           |    10 +
 lib/galaxy/datatypes/test/test_tab.bed             |     2 +
 lib/galaxy/datatypes/test/test_tab1.tabular        |    32 +
 lib/galaxy/datatypes/test/ucsc.customtrack         |     3 +
 lib/galaxy/datatypes/test/wiggle.wig               |     3 +
 lib/galaxy/datatypes/text.py                       |   544 +
 lib/galaxy/datatypes/tracks.py                     |    85 +
 lib/galaxy/datatypes/triples.py                    |   188 +
 lib/galaxy/datatypes/util/__init__.py              |     3 +
 lib/galaxy/datatypes/util/generic_util.py          |    19 +
 lib/galaxy/datatypes/util/gff_util.py              |   437 +
 lib/galaxy/datatypes/xml.py                        |   156 +
 lib/galaxy/dependencies/__init__.py                |   116 +
 lib/galaxy/dependencies/conda-environment.txt      |    75 +
 .../dependencies/conditional-requirements.txt      |    14 +
 lib/galaxy/dependencies/dev-requirements.txt       |     4 +
 .../dependencies/pinned-hashed-requirements.txt    |   150 +
 lib/galaxy/dependencies/pinned-requirements.txt    |    71 +
 lib/galaxy/dependencies/requirements.txt           |    70 +
 lib/galaxy/eggs/__init__.py                        |     8 +
 lib/galaxy/exceptions/__init__.py                  |   180 +
 lib/galaxy/exceptions/error_codes.json             |   142 +
 lib/galaxy/exceptions/error_codes.py               |    47 +
 lib/galaxy/external_services/__init__.py           |     0
 lib/galaxy/external_services/actions.py            |   193 +
 lib/galaxy/external_services/parameters.py         |    44 +
 .../external_services/result_handlers/__init__.py  |     0
 .../external_services/result_handlers/basic.py     |    68 +
 lib/galaxy/external_services/service.py            |   246 +
 lib/galaxy/forms/__init__.py                       |     0
 lib/galaxy/forms/forms.py                          |   323 +
 lib/galaxy/jobs/__init__.py                        |  2122 +++
 lib/galaxy/jobs/actions/__init__.py                |     4 +
 lib/galaxy/jobs/actions/post.py                    |   384 +
 lib/galaxy/jobs/command_factory.py                 |   246 +
 lib/galaxy/jobs/datasets.py                        |   109 +
 lib/galaxy/jobs/deferred/__init__.py               |   191 +
 lib/galaxy/jobs/deferred/data_transfer.py          |   384 +
 lib/galaxy/jobs/deferred/manual_data_transfer.py   |   107 +
 .../deferred/pacific_biosciences_smrt_portal.py    |   132 +
 lib/galaxy/jobs/dynamic_tool_destination.py        |  1414 ++
 lib/galaxy/jobs/error_level.py                     |    25 +
 lib/galaxy/jobs/handler.py                         |   807 ++
 lib/galaxy/jobs/manager.py                         |    47 +
 lib/galaxy/jobs/mapper.py                          |   245 +
 lib/galaxy/jobs/metrics/__init__.py                |   130 +
 lib/galaxy/jobs/metrics/collectl/__init__.py       |     4 +
 lib/galaxy/jobs/metrics/collectl/cli.py            |   141 +
 lib/galaxy/jobs/metrics/collectl/processes.py      |   256 +
 lib/galaxy/jobs/metrics/collectl/stats.py          |    27 +
 lib/galaxy/jobs/metrics/collectl/subsystems.py     |    81 +
 lib/galaxy/jobs/metrics/formatting.py              |    18 +
 lib/galaxy/jobs/metrics/instrumenters/__init__.py  |    55 +
 lib/galaxy/jobs/metrics/instrumenters/collectl.py  |   219 +
 lib/galaxy/jobs/metrics/instrumenters/core.py      |    88 +
 lib/galaxy/jobs/metrics/instrumenters/cpuinfo.py   |    64 +
 lib/galaxy/jobs/metrics/instrumenters/env.py       |    73 +
 lib/galaxy/jobs/metrics/instrumenters/meminfo.py   |    66 +
 lib/galaxy/jobs/metrics/instrumenters/uname.py     |    35 +
 lib/galaxy/jobs/output_checker.py                  |   166 +
 lib/galaxy/jobs/rule_helper.py                     |   196 +
 lib/galaxy/jobs/rules/__init__.py                  |     0
 lib/galaxy/jobs/runners/__init__.py                |   631 +
 lib/galaxy/jobs/runners/cli.py                     |   225 +
 lib/galaxy/jobs/runners/condor.py                  |   237 +
 lib/galaxy/jobs/runners/drmaa.py                   |   400 +
 lib/galaxy/jobs/runners/godocker.py                |   471 +
 lib/galaxy/jobs/runners/kubernetes.py              |   369 +
 lib/galaxy/jobs/runners/local.py                   |   207 +
 lib/galaxy/jobs/runners/pbs.py                     |   558 +
 lib/galaxy/jobs/runners/pulsar.py                  |   858 ++
 lib/galaxy/jobs/runners/slurm.py                   |   150 +
 lib/galaxy/jobs/runners/state_handler_factory.py   |    23 +
 lib/galaxy/jobs/runners/state_handlers/__init__.py |     0
 lib/galaxy/jobs/runners/state_handlers/resubmit.py |    67 +
 lib/galaxy/jobs/runners/tasks.py                   |   246 +
 lib/galaxy/jobs/runners/util/__init__.py           |    10 +
 lib/galaxy/jobs/runners/util/cli/__init__.py       |    73 +
 lib/galaxy/jobs/runners/util/cli/factory.py        |    22 +
 lib/galaxy/jobs/runners/util/cli/job/__init__.py   |    58 +
 lib/galaxy/jobs/runners/util/cli/job/slurm.py      |    98 +
 .../jobs/runners/util/cli/job/slurm_torque.py      |    29 +
 lib/galaxy/jobs/runners/util/cli/job/torque.py     |   124 +
 lib/galaxy/jobs/runners/util/cli/shell/__init__.py |    19 +
 lib/galaxy/jobs/runners/util/cli/shell/local.py    |    61 +
 lib/galaxy/jobs/runners/util/cli/shell/rsh.py      |    40 +
 lib/galaxy/jobs/runners/util/condor/__init__.py    |   115 +
 lib/galaxy/jobs/runners/util/drmaa/__init__.py     |    56 +
 lib/galaxy/jobs/runners/util/env.py                |    40 +
 lib/galaxy/jobs/runners/util/external.py           |    37 +
 .../util/job_script/CLUSTER_SLOTS_STATEMENT.sh     |    29 +
 .../util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh   |    27 +
 .../jobs/runners/util/job_script/__init__.py       |   149 +
 lib/galaxy/jobs/runners/util/kill.py               |    65 +
 lib/galaxy/jobs/runners/util/retry.py              |   112 +
 lib/galaxy/jobs/runners/util/sudo.py               |    24 +
 lib/galaxy/jobs/splitters/__init__.py              |     0
 lib/galaxy/jobs/splitters/basic.py                 |    26 +
 lib/galaxy/jobs/splitters/multi.py                 |   195 +
 lib/galaxy/jobs/stock_rules.py                     |    25 +
 lib/galaxy/jobs/transfer_manager.py                |   159 +
 lib/galaxy/main.py                                 |     1 +
 lib/galaxy/managers/__init__.py                    |    33 +
 lib/galaxy/managers/annotatable.py                 |   108 +
 lib/galaxy/managers/api_keys.py                    |    26 +
 lib/galaxy/managers/base.py                        |  1149 ++
 lib/galaxy/managers/citations.py                   |   167 +
 lib/galaxy/managers/collections.py                 |   338 +
 lib/galaxy/managers/collections_util.py            |    98 +
 lib/galaxy/managers/configuration.py               |   100 +
 lib/galaxy/managers/containers.py                  |   131 +
 lib/galaxy/managers/context.py                     |   192 +
 lib/galaxy/managers/datasets.py                    |   649 +
 lib/galaxy/managers/deletable.py                   |   116 +
 lib/galaxy/managers/folders.py                     |   288 +
 lib/galaxy/managers/hdas.py                        |   479 +
 lib/galaxy/managers/hdcas.py                       |   261 +
 lib/galaxy/managers/histories.py                   |   388 +
 lib/galaxy/managers/history_contents.py            |   486 +
 lib/galaxy/managers/lddas.py                       |    19 +
 lib/galaxy/managers/libraries.py                   |   258 +
 lib/galaxy/managers/pages.py                       |    74 +
 lib/galaxy/managers/ratable.py                     |   123 +
 lib/galaxy/managers/rbac_secured.py                |   271 +
 lib/galaxy/managers/roles.py                       |    49 +
 lib/galaxy/managers/secured.py                     |   121 +
 lib/galaxy/managers/sharable.py                    |   441 +
 lib/galaxy/managers/taggable.py                    |   122 +
 lib/galaxy/managers/tags.py                        |   319 +
 lib/galaxy/managers/users.py                       |   373 +
 lib/galaxy/managers/visualizations.py              |    77 +
 lib/galaxy/managers/workflows.py                   |   956 ++
 lib/galaxy/model/__init__.py                       |  5212 +++++++
 lib/galaxy/model/base.py                           |    39 +
 lib/galaxy/model/custom_types.py                   |   333 +
 lib/galaxy/model/item_attrs.py                     |   171 +
 lib/galaxy/model/mapping.py                        |  2598 ++++
 lib/galaxy/model/metadata.py                       |   895 ++
 lib/galaxy/model/migrate/__init__.py               |     0
 lib/galaxy/model/migrate/check.py                  |   127 +
 lib/galaxy/model/migrate/migrate.cfg               |    20 +
 .../model/migrate/versions/0001_initial_tables.py  |   201 +
 .../migrate/versions/0002_metadata_file_table.py   |    30 +
 .../versions/0003_security_and_libraries.py        |   770 +
 .../migrate/versions/0004_indexes_and_defaults.py  |    65 +
 .../migrate/versions/0005_cleanup_datasets_fix.py  |   746 +
 .../migrate/versions/0006_change_qual_datatype.py  |    65 +
 .../migrate/versions/0007_sharing_histories.py     |    84 +
 .../model/migrate/versions/0008_galaxy_forms.py    |   206 +
 .../model/migrate/versions/0009_request_table.py   |    62 +
 .../versions/0010_hda_display_at_authz_table.py    |    72 +
 .../migrate/versions/0011_v0010_mysql_index_fix.py |    67 +
 .../model/migrate/versions/0012_user_address.py    |    99 +
 .../0013_change_lib_item_templates_to_forms.py     |   247 +
 lib/galaxy/model/migrate/versions/0014_pages.py    |    74 +
 lib/galaxy/model/migrate/versions/0015_tagging.py  |   119 +
 .../migrate/versions/0016_v0015_mysql_index_fix.py |    56 +
 .../migrate/versions/0017_library_item_indexes.py  |    59 +
 .../versions/0018_ordered_tags_and_page_tags.py    |   119 +
 .../versions/0019_request_library_folder.py        |    89 +
 .../migrate/versions/0020_library_upload_job.py    |   128 +
 .../model/migrate/versions/0021_user_prefs.py      |    49 +
 .../migrate/versions/0022_visualization_tables.py  |    53 +
 .../0023_page_published_and_deleted_columns.py     |    38 +
 .../versions/0024_page_slug_unique_constraint.py   |    43 +
 .../model/migrate/versions/0025_user_info.py       |    69 +
 .../model/migrate/versions/0026_cloud_tables.py    |   159 +
 .../model/migrate/versions/0027_request_events.py  |   100 +
 .../0028_external_metadata_file_override.py        |    50 +
 .../model/migrate/versions/0029_user_actions.py    |    51 +
 .../migrate/versions/0030_history_slug_column.py   |    40 +
 .../versions/0031_community_and_workflow_tags.py   |   178 +
 .../versions/0032_stored_workflow_slug_column.py   |    43 +
 ...3_published_cols_for_histories_and_workflows.py |   104 +
 .../versions/0034_page_user_share_association.py   |    41 +
 ...0035_item_annotations_and_workflow_step_tags.py |   143 +
 ...eted_column_to_library_template_assoc_tables.py |    60 +
 .../model/migrate/versions/0037_samples_library.py |   151 +
 ...able_column_to_library_template_assoc_tables.py |    81 +
 .../0039_add_synopsis_column_to_library_table.py   |    27 +
 .../migrate/versions/0040_page_annotations.py      |    42 +
 .../migrate/versions/0041_workflow_invocation.py   |    50 +
 .../versions/0042_workflow_invocation_fix.py       |    56 +
 ...043_visualization_sharing_tagging_annotating.py |   197 +
 .../0044_add_notify_column_to_request_table.py     |    27 +
 .../0045_request_type_permissions_table.py         |    36 +
 .../migrate/versions/0046_post_job_actions.py      |    49 +
 .../versions/0047_job_table_user_id_column.py      |    84 +
 .../versions/0048_dataset_instance_state_column.py |    65 +
 .../model/migrate/versions/0049_api_keys_table.py  |    41 +
 .../migrate/versions/0050_drop_cloud_tables.py     |   149 +
 .../versions/0051_imported_col_for_jobs_table.py   |    49 +
 .../migrate/versions/0052_sample_dataset_table.py  |    95 +
 .../model/migrate/versions/0053_item_ratings.py    |   134 +
 .../migrate/versions/0054_visualization_dbkey.py   |    56 +
 .../versions/0055_add_pja_assoc_for_jobs.py        |    38 +
 .../migrate/versions/0056_workflow_outputs.py      |    38 +
 .../model/migrate/versions/0057_request_notify.py  |    58 +
 .../migrate/versions/0058_history_import_export.py |    48 +
 .../versions/0059_sample_dataset_file_path.py      |    55 +
 .../versions/0060_history_archive_import.py        |    72 +
 lib/galaxy/model/migrate/versions/0061_tasks.py    |    50 +
 .../migrate/versions/0062_user_openid_table.py     |    58 +
 .../model/migrate/versions/0063_sequencer_table.py |    56 +
 ...64_add_run_and_sample_run_association_tables.py |    67 +
 .../0065_add_name_to_form_fields_and_values.py     |   142 +
 .../0066_deferred_job_and_transfer_job_tables.py   |    69 +
 .../versions/0067_populate_sequencer_table.py      |   297 +
 .../0068_rename_sequencer_to_external_services.py  |   275 +
 .../versions/0069_rename_sequencer_form_type.py    |    32 +
 .../0070_add_info_column_to_deferred_job_table.py  |    36 +
 .../0071_add_history_and_workflow_to_sample.py     |    47 +
 ...pid_and_socket_columns_to_transfer_job_table.py |    40 +
 .../0073_add_ldda_to_implicit_conversion_table.py  |    40 +
 ...4_add_purged_column_to_library_dataset_table.py |    70 +
 .../0075_add_subindex_column_to_run_table.py       |    33 +
 .../0076_fix_form_values_data_corruption.py        |    90 +
 .../0077_create_tool_tag_association_table.py      |    49 +
 .../0078_add_columns_for_disk_usage_accounting.py  |    88 +
 .../versions/0079_input_library_to_job_table.py    |    43 +
 .../model/migrate/versions/0080_quota_tables.py    |   105 +
 .../versions/0081_add_tool_version_to_hda_ldda.py  |    40 +
 .../0082_add_tool_shed_repository_table.py         |    55 +
 .../versions/0083_add_prepare_files_to_task.py     |    64 +
 ...084_add_ldda_id_to_implicit_conversion_table.py |    39 +
 .../model/migrate/versions/0085_add_task_info.py   |    38 +
 .../0086_add_tool_shed_repository_table_columns.py |    82 +
 .../versions/0087_tool_id_guid_map_table.py        |    54 +
 .../0088_add_installed_changeset_revison_column.py |    67 +
 .../versions/0089_add_object_store_id_columns.py   |    40 +
 .../0090_add_tool_shed_repository_table_columns.py |    63 +
 .../versions/0091_add_tool_version_tables.py       |   125 +
 .../versions/0092_add_migrate_tools_table.py       |    52 +
 .../migrate/versions/0093_add_job_params_col.py    |    51 +
 .../migrate/versions/0094_add_job_handler_col.py   |    51 +
 .../model/migrate/versions/0095_hda_subsets.py     |    54 +
 .../model/migrate/versions/0096_openid_provider.py |    47 +
 .../migrate/versions/0097_add_ctx_rev_column.py    |    47 +
 .../versions/0098_genome_index_tool_data_table.py  |    54 +
 .../versions/0099_add_tool_dependency_table.py     |    54 +
 ...0_alter_tool_dependency_table_version_column.py |    53 +
 ...101_drop_installed_changeset_revision_column.py |    44 +
 .../0102_add_tool_dependency_status_columns.py     |    74 +
 ...0103_add_tool_shed_repository_status_columns.py |    71 +
 ...0104_update_genome_downloader_job_parameters.py |    86 +
 .../versions/0105_add_cleanup_event_table.py       |   120 +
 .../migrate/versions/0106_add_missing_indexes.py   |    81 +
 .../versions/0107_add_exit_code_to_job_and_task.py |    68 +
 .../migrate/versions/0108_add_extended_metadata.py |    76 +
 .../0109_add_repository_dependency_tables.py       |    61 +
 .../migrate/versions/0110_add_dataset_uuid.py      |    49 +
 .../migrate/versions/0111_add_job_destinations.py  |    56 +
 ...tion_and_data_manager_job_association_tables.py |    59 +
 .../versions/0113_update_migrate_tools_table.py    |    35 +
 .../0114_update_migrate_tools_table_again.py       |    35 +
 .../versions/0115_longer_user_password_field.py    |    26 +
 ...pdate_available_col_add_tool_shed_status_col.py |    81 +
 .../migrate/versions/0117_add_user_activation.py   |    58 +
 .../versions/0118_add_hda_extended_metadata.py     |    43 +
 .../model/migrate/versions/0119_job_metrics.py     |    99 +
 .../migrate/versions/0120_dataset_collections.py   |   176 +
 .../model/migrate/versions/0121_workflow_uuids.py  |    54 +
 .../migrate/versions/0122_grow_mysql_blobs.py      |    59 +
 .../versions/0123_add_workflow_request_tables.py   |   143 +
 .../migrate/versions/0124_job_state_history.py     |    46 +
 .../versions/0125_workflow_step_tracking.py        |    52 +
 .../model/migrate/versions/0126_password_reset.py  |    39 +
 .../versions/0127_output_collection_adjustments.py |    83 +
 .../model/migrate/versions/0128_session_timeout.py |    47 +
 .../0129_job_external_output_metadata_validity.py  |    47 +
 .../migrate/versions/0130_change_pref_datatype.py  |    27 +
 ...0131_subworkflow_and_input_parameter_modules.py |   135 +
 .../0132_add_lastpasswordchange_to_user.py         |    18 +
 .../versions/0133_add_dependency_column_to_job.py  |    49 +
 lib/galaxy/model/orm/__init__.py                   |     3 +
 lib/galaxy/model/orm/engine_factory.py             |    21 +
 lib/galaxy/model/orm/logging_connection_proxy.py   |    76 +
 lib/galaxy/model/orm/now.py                        |    13 +
 lib/galaxy/model/orm/scripts.py                    |   113 +
 lib/galaxy/model/search.py                         |   670 +
 lib/galaxy/model/tool_shed_install/__init__.py     |   662 +
 lib/galaxy/model/tool_shed_install/mapping.py      |   119 +
 .../model/tool_shed_install/migrate/__init__.py    |     0
 .../model/tool_shed_install/migrate/check.py       |    97 +
 .../model/tool_shed_install/migrate/migrate.cfg    |    20 +
 .../0001_add_tool_shed_repository_table.py         |     1 +
 .../0002_add_tool_shed_repository_table_columns.py |     1 +
 .../versions/0003_tool_id_guid_map_table.py        |     1 +
 .../0004_add_installed_changeset_revison_column.py |     1 +
 .../0005_add_tool_shed_repository_table_columns.py |     1 +
 .../versions/0006_add_tool_version_tables.py       |     1 +
 .../versions/0007_add_migrate_tools_table.py       |     1 +
 .../migrate/versions/0008_add_ctx_rev_column.py    |     1 +
 .../versions/0009_add_tool_dependency_table.py     |     1 +
 ...0_alter_tool_dependency_table_version_column.py |     1 +
 ...011_drop_installed_changeset_revision_column.py |     1 +
 .../0012_add_tool_dependency_status_columns.py     |     1 +
 ...0013_add_tool_shed_repository_status_columns.py |     1 +
 .../0014_add_repository_dependency_tables.py       |     1 +
 .../versions/0015_update_migrate_tools_table.py    |     1 +
 .../0016_update_migrate_tools_table_again.py       |     1 +
 ...pdate_available_col_add_tool_shed_status_col.py |     1 +
 lib/galaxy/model/util.py                           |    33 +
 lib/galaxy/objectstore/__init__.py                 |   791 +
 lib/galaxy/objectstore/azure_blob.py               |   541 +
 lib/galaxy/objectstore/pulsar.py                   |    79 +
 lib/galaxy/objectstore/rods.py                     |   349 +
 lib/galaxy/objectstore/s3.py                       |   638 +
 lib/galaxy/objectstore/s3_multipart_upload.py      |   109 +
 lib/galaxy/openid/__init__.py                      |     3 +
 lib/galaxy/openid/providers.py                     |   145 +
 lib/galaxy/queue_worker.py                         |   197 +
 lib/galaxy/queues.py                               |    41 +
 lib/galaxy/quota/__init__.py                       |   191 +
 lib/galaxy/sample_tracking/__init__.py             |     0
 lib/galaxy/sample_tracking/data_transfer.py        |    58 +
 .../sample_tracking/external_service_types.py      |   122 +
 lib/galaxy/sample_tracking/request_types.py        |    33 +
 lib/galaxy/sample_tracking/sample.py               |    26 +
 lib/galaxy/security/__init__.py                    |  1588 ++
 lib/galaxy/security/passwords.py                   |    82 +
 lib/galaxy/security/validate_user_input.py         |    86 +
 lib/galaxy/tags/__init__.py                        |     3 +
 lib/galaxy/tags/tag_handler.py                     |     0
 lib/galaxy/tools/__init__.py                       |  2516 ++++
 lib/galaxy/tools/actions/__init__.py               |   821 ++
 lib/galaxy/tools/actions/data_manager.py           |    18 +
 lib/galaxy/tools/actions/data_source.py            |    13 +
 lib/galaxy/tools/actions/history_imp_exp.py        |   144 +
 lib/galaxy/tools/actions/metadata.py               |   110 +
 lib/galaxy/tools/actions/model_operations.py       |    67 +
 lib/galaxy/tools/actions/upload.py                 |    38 +
 lib/galaxy/tools/actions/upload_common.py          |   427 +
 lib/galaxy/tools/cwl/__init__.py                   |    18 +
 lib/galaxy/tools/cwl/cwltool_deps.py               |    76 +
 lib/galaxy/tools/cwl/parser.py                     |   624 +
 lib/galaxy/tools/cwl/representation.py             |   177 +
 lib/galaxy/tools/cwl/runtime_actions.py            |    54 +
 lib/galaxy/tools/cwl/schema.py                     |    73 +
 lib/galaxy/tools/data/__init__.py                  |   727 +
 lib/galaxy/tools/data_manager/__init__.py          |     3 +
 lib/galaxy/tools/data_manager/manager.py           |   399 +
 lib/galaxy/tools/deps/__init__.py                  |   223 +
 lib/galaxy/tools/deps/brew_exts.py                 |   556 +
 lib/galaxy/tools/deps/brew_util.py                 |    39 +
 lib/galaxy/tools/deps/commands.py                  |   155 +
 lib/galaxy/tools/deps/conda_compat.py              |   112 +
 lib/galaxy/tools/deps/conda_util.py                |   533 +
 .../tools/deps/container_resolvers/__init__.py     |    50 +
 .../tools/deps/container_resolvers/explicit.py     |    26 +
 .../tools/deps/container_resolvers/mulled.py       |   207 +
 lib/galaxy/tools/deps/containers.py                |   452 +
 lib/galaxy/tools/deps/dependencies.py              |    68 +
 lib/galaxy/tools/deps/docker_util.py               |   215 +
 lib/galaxy/tools/deps/dockerfiles.py               |    75 +
 lib/galaxy/tools/deps/installable.py               |    77 +
 lib/galaxy/tools/deps/mulled/__init__.py           |     0
 lib/galaxy/tools/deps/mulled/_cli.py               |    20 +
 lib/galaxy/tools/deps/mulled/invfile.lua           |    67 +
 lib/galaxy/tools/deps/mulled/mulled_build.py       |   288 +
 .../tools/deps/mulled/mulled_build_channel.py      |   111 +
 lib/galaxy/tools/deps/mulled/mulled_build_files.py |    79 +
 lib/galaxy/tools/deps/mulled/mulled_build_tool.py  |    52 +
 lib/galaxy/tools/deps/mulled/mulled_search.py      |   131 +
 lib/galaxy/tools/deps/mulled/util.py               |   117 +
 lib/galaxy/tools/deps/requirements.py              |   136 +
 lib/galaxy/tools/deps/resolvers/__init__.py        |   128 +
 .../deps/resolvers/brewed_tool_shed_packages.py    |   149 +
 lib/galaxy/tools/deps/resolvers/conda.py           |   257 +
 lib/galaxy/tools/deps/resolvers/galaxy_packages.py |   129 +
 lib/galaxy/tools/deps/resolvers/homebrew.py        |    56 +
 lib/galaxy/tools/deps/resolvers/modules.py         |   183 +
 lib/galaxy/tools/deps/resolvers/resolver_mixins.py |    83 +
 .../tools/deps/resolvers/tool_shed_packages.py     |    66 +
 .../deps/resolvers/unlinked_tool_shed_packages.py  |   197 +
 lib/galaxy/tools/deps/views.py                     |   141 +
 lib/galaxy/tools/errors.py                         |   249 +
 lib/galaxy/tools/evaluation.py                     |   585 +
 lib/galaxy/tools/exception_handling.py             |     6 +
 lib/galaxy/tools/execute.py                        |   207 +
 lib/galaxy/tools/filter_failed_collection.xml      |    45 +
 lib/galaxy/tools/filters/__init__.py               |     5 +
 lib/galaxy/tools/flatten_collection.xml            |    56 +
 lib/galaxy/tools/imp_exp/__init__.py               |   534 +
 .../tools/imp_exp/exp_history_to_archive.xml       |    16 +
 lib/galaxy/tools/imp_exp/export_history.py         |   112 +
 .../tools/imp_exp/imp_history_from_archive.xml     |    14 +
 lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py  |   102 +
 lib/galaxy/tools/lint.py                           |   138 +
 lib/galaxy/tools/lint_util.py                      |     5 +
 lib/galaxy/tools/linters/__init__.py               |     1 +
 lib/galaxy/tools/linters/citations.py              |    32 +
 lib/galaxy/tools/linters/command.py                |    51 +
 lib/galaxy/tools/linters/general.py                |    37 +
 lib/galaxy/tools/linters/help.py                   |    45 +
 lib/galaxy/tools/linters/inputs.py                 |   122 +
 lib/galaxy/tools/linters/outputs.py                |    37 +
 lib/galaxy/tools/linters/stdio.py                  |    60 +
 lib/galaxy/tools/linters/tests.py                  |    36 +
 lib/galaxy/tools/linters/xml_order.py              |    65 +
 lib/galaxy/tools/loader.py                         |    10 +
 lib/galaxy/tools/loader_directory.py               |   279 +
 lib/galaxy/tools/merge_collection.xml              |   279 +
 lib/galaxy/tools/parameters/__init__.py            |   214 +
 lib/galaxy/tools/parameters/basic.py               |  2090 +++
 lib/galaxy/tools/parameters/dataset_matcher.py     |   184 +
 lib/galaxy/tools/parameters/dynamic_options.py     |   654 +
 lib/galaxy/tools/parameters/grouping.py            |   613 +
 lib/galaxy/tools/parameters/history_query.py       |    51 +
 lib/galaxy/tools/parameters/input_translation.py   |   109 +
 lib/galaxy/tools/parameters/meta.py                |   159 +
 lib/galaxy/tools/parameters/output_collect.py      |   526 +
 lib/galaxy/tools/parameters/sanitize.py            |   170 +
 lib/galaxy/tools/parameters/validation.py          |   448 +
 lib/galaxy/tools/parameters/wrapped.py             |   116 +
 lib/galaxy/tools/parameters/wrapped_json.py        |    83 +
 lib/galaxy/tools/parser/__init__.py                |    14 +
 lib/galaxy/tools/parser/cwl.py                     |   162 +
 lib/galaxy/tools/parser/factory.py                 |    74 +
 lib/galaxy/tools/parser/interface.py               |   357 +
 lib/galaxy/tools/parser/output_actions.py          |   663 +
 lib/galaxy/tools/parser/output_collection_def.py   |    75 +
 lib/galaxy/tools/parser/output_objects.py          |   220 +
 lib/galaxy/tools/parser/util.py                    |    38 +
 lib/galaxy/tools/parser/xml.py                     |   950 ++
 lib/galaxy/tools/parser/yaml.py                    |   345 +
 lib/galaxy/tools/search/__init__.py                |   116 +
 lib/galaxy/tools/special_tools.py                  |    13 +
 lib/galaxy/tools/test.py                           |   384 +
 lib/galaxy/tools/toolbox/__init__.py               |    13 +
 lib/galaxy/tools/toolbox/base.py                   |  1081 ++
 lib/galaxy/tools/toolbox/cache.py                  |    27 +
 lib/galaxy/tools/toolbox/filters/__init__.py       |   109 +
 .../tools/toolbox/filters/examples.py.sample       |    87 +
 lib/galaxy/tools/toolbox/integrated_panel.py       |   106 +
 lib/galaxy/tools/toolbox/lineages/__init__.py      |     6 +
 lib/galaxy/tools/toolbox/lineages/factory.py       |    49 +
 lib/galaxy/tools/toolbox/lineages/interface.py     |    48 +
 lib/galaxy/tools/toolbox/lineages/stock.py         |    51 +
 lib/galaxy/tools/toolbox/lineages/tool_shed.py     |    97 +
 lib/galaxy/tools/toolbox/panel.py                  |   170 +
 lib/galaxy/tools/toolbox/parser.py                 |   158 +
 lib/galaxy/tools/toolbox/tags.py                   |    77 +
 lib/galaxy/tools/toolbox/watcher.py                |   232 +
 lib/galaxy/tools/unzip_collection.xml              |    23 +
 lib/galaxy/tools/util/__init__.py                  |     6 +
 lib/galaxy/tools/util/galaxyops/__init__.py        |    41 +
 lib/galaxy/tools/util/maf_utilities.py             |   753 +
 lib/galaxy/tools/verify/__init__.py                |   289 +
 lib/galaxy/tools/verify/asserts/__init__.py        |    78 +
 lib/galaxy/tools/verify/asserts/tabular.py         |    19 +
 lib/galaxy/tools/verify/asserts/text.py            |    34 +
 lib/galaxy/tools/verify/asserts/xml.py             |    89 +
 lib/galaxy/tools/verify/test_data.py               |   121 +
 lib/galaxy/tools/wrappers.py                       |   413 +
 lib/galaxy/tools/xsd/LICENSE                       |    22 +
 lib/galaxy/tools/xsd/README.md                     |    45 +
 lib/galaxy/tools/xsd/galaxy.jxb                    |    22 +
 lib/galaxy/tools/xsd/galaxy.xsd                    |  5125 +++++++
 lib/galaxy/tools/zip_collection.xml                |    26 +
 lib/galaxy/tours/__init__.py                       |    80 +
 lib/galaxy/util/__init__.py                        |  1533 ++
 lib/galaxy/util/aliaspickler.py                    |    30 +
 lib/galaxy/util/backports/__init__.py              |     3 +
 lib/galaxy/util/biostar.py                         |   181 +
 lib/galaxy/util/bunch.py                           |    39 +
 lib/galaxy/util/checkers.py                        |   139 +
 lib/galaxy/util/dbkeys.py                          |    93 +
 lib/galaxy/util/dictifiable.py                     |    62 +
 lib/galaxy/util/docutils_template.txt              |     1 +
 lib/galaxy/util/expressions.py                     |    56 +
 lib/galaxy/util/filelock.py                        |    82 +
 lib/galaxy/util/hash_util.py                       |    50 +
 lib/galaxy/util/heartbeat.py                       |   192 +
 lib/galaxy/util/image_util.py                      |    75 +
 lib/galaxy/util/inflection.py                      |   375 +
 lib/galaxy/util/json.py                            |   169 +
 lib/galaxy/util/jstree.py                          |   162 +
 lib/galaxy/util/lazy_process.py                    |    57 +
 lib/galaxy/util/log/__init__.py                    |     7 +
 lib/galaxy/util/log/fluent_log.py                  |    45 +
 lib/galaxy/util/multi_byte.py                      |    25 +
 lib/galaxy/util/none_like.py                       |    41 +
 lib/galaxy/util/object_wrapper.py                  |   493 +
 lib/galaxy/util/odict.py                           |    94 +
 lib/galaxy/util/pastescript/__init__.py            |     3 +
 lib/galaxy/util/pastescript/loadwsgi.py            |   776 +
 lib/galaxy/util/pastescript/serve.py               |  1072 ++
 lib/galaxy/util/permutations.py                    |   114 +
 lib/galaxy/util/plugin_config.py                   |    79 +
 lib/galaxy/util/postfork.py                        |    43 +
 lib/galaxy/util/properties.py                      |   100 +
 lib/galaxy/util/sanitize_html.py                   |   441 +
 lib/galaxy/util/simplegraph.py                     |   129 +
 lib/galaxy/util/sleeper.py                         |    22 +
 lib/galaxy/util/sockets.py                         |    43 +
 lib/galaxy/util/specs.py                           |    32 +
 lib/galaxy/util/sqlite.py                          |    28 +
 lib/galaxy/util/streamball.py                      |    59 +
 lib/galaxy/util/submodules.py                      |    32 +
 lib/galaxy/util/template.py                        |    16 +
 lib/galaxy/util/topsort.py                         |   210 +
 lib/galaxy/util/ucsc.py                            |    35 +
 lib/galaxy/util/validation.py                      |    38 +
 lib/galaxy/util/xml_macros.py                      |   292 +
 lib/galaxy/version.py                              |     3 +
 lib/galaxy/visualization/__init__.py               |     3 +
 .../visualization/data_providers/__init__.py       |     3 +
 lib/galaxy/visualization/data_providers/basic.py   |   195 +
 lib/galaxy/visualization/data_providers/cigar.py   |   104 +
 lib/galaxy/visualization/data_providers/genome.py  |  1717 +++
 .../data_providers/phyloviz/__init__.py            |    44 +
 .../data_providers/phyloviz/baseparser.py          |   121 +
 .../data_providers/phyloviz/newickparser.py        |   182 +
 .../data_providers/phyloviz/nexusparser.py         |   102 +
 .../data_providers/phyloviz/phyloxmlparser.py      |   128 +
 .../visualization/data_providers/registry.py       |   113 +
 lib/galaxy/visualization/genome/__init__.py        |     3 +
 lib/galaxy/visualization/genomes.py                |   398 +
 lib/galaxy/visualization/plugins/__init__.py       |     0
 lib/galaxy/visualization/plugins/config_parser.py  |   461 +
 lib/galaxy/visualization/plugins/plugin.py         |   342 +
 lib/galaxy/visualization/plugins/registry.py       |   266 +
 .../visualization/plugins/resource_parser.py       |   229 +
 lib/galaxy/visualization/plugins/utils.py          |    75 +
 lib/galaxy/visualization/tracks/__init__.py        |     1 +
 lib/galaxy/web/__init__.py                         |    38 +
 lib/galaxy/web/base/__init__.py                    |     0
 lib/galaxy/web/base/controller.py                  |  2261 +++
 lib/galaxy/web/base/controllers/__init__.py        |     0
 lib/galaxy/web/base/controllers/admin.py           |  1189 ++
 lib/galaxy/web/base/interactive_environments.py    |   411 +
 lib/galaxy/web/base/pluginframework.py             |   580 +
 lib/galaxy/web/buildapp.py                         |     5 +
 lib/galaxy/web/form_builder.py                     |   827 ++
 lib/galaxy/web/formatting.py                       |    32 +
 lib/galaxy/web/framework/__init__.py               |     6 +
 lib/galaxy/web/framework/base.py                   |   478 +
 lib/galaxy/web/framework/decorators.py             |   404 +
 lib/galaxy/web/framework/formbuilder.py            |    86 +
 lib/galaxy/web/framework/helpers/__init__.py       |   113 +
 lib/galaxy/web/framework/helpers/grids.py          |   860 ++
 lib/galaxy/web/framework/middleware/__init__.py    |     3 +
 lib/galaxy/web/framework/middleware/batch.py       |   185 +
 lib/galaxy/web/framework/middleware/error.py       |   498 +
 lib/galaxy/web/framework/middleware/profile.py     |   175 +
 lib/galaxy/web/framework/middleware/remoteuser.py  |   205 +
 lib/galaxy/web/framework/middleware/request_id.py  |    14 +
 lib/galaxy/web/framework/middleware/sentry.py      |    99 +
 lib/galaxy/web/framework/middleware/static.py      |    55 +
 lib/galaxy/web/framework/middleware/statsd.py      |    37 +
 lib/galaxy/web/framework/middleware/translogger.py |   121 +
 .../web/framework/middleware/xforwardedhost.py     |    22 +
 lib/galaxy/web/framework/openid_manager.py         |    75 +
 lib/galaxy/web/framework/webapp.py                 |   943 ++
 lib/galaxy/web/params.py                           |    31 +
 lib/galaxy/web/proxy/__init__.py                   |   263 +
 lib/galaxy/web/proxy/js/Dockerfile                 |    18 +
 lib/galaxy/web/proxy/js/README.md                  |     2 +
 lib/galaxy/web/proxy/js/lib/main.js                |    47 +
 lib/galaxy/web/proxy/js/lib/mapper.js              |    78 +
 lib/galaxy/web/proxy/js/lib/proxy.js               |   175 +
 lib/galaxy/web/proxy/js/package.json               |    19 +
 lib/galaxy/web/security/__init__.py                |   130 +
 lib/galaxy/webapps/__init__.py                     |     3 +
 lib/galaxy/webapps/galaxy/__init__.py              |     0
 lib/galaxy/webapps/galaxy/api/__init__.py          |     0
 lib/galaxy/webapps/galaxy/api/annotations.py       |    91 +
 lib/galaxy/webapps/galaxy/api/authenticate.py      |   105 +
 lib/galaxy/webapps/galaxy/api/configuration.py     |   100 +
 .../webapps/galaxy/api/dataset_collections.py      |    85 +
 lib/galaxy/webapps/galaxy/api/datasets.py          |   382 +
 lib/galaxy/webapps/galaxy/api/datatypes.py         |   132 +
 lib/galaxy/webapps/galaxy/api/extended_metadata.py |    81 +
 lib/galaxy/webapps/galaxy/api/folder_contents.py   |   332 +
 lib/galaxy/webapps/galaxy/api/folders.py           |   288 +
 lib/galaxy/webapps/galaxy/api/forms.py             |    75 +
 lib/galaxy/webapps/galaxy/api/genomes.py           |    77 +
 lib/galaxy/webapps/galaxy/api/group_roles.py       |   125 +
 lib/galaxy/webapps/galaxy/api/group_users.py       |   125 +
 lib/galaxy/webapps/galaxy/api/groups.py            |   129 +
 lib/galaxy/webapps/galaxy/api/histories.py         |   484 +
 lib/galaxy/webapps/galaxy/api/history_contents.py  |   723 +
 lib/galaxy/webapps/galaxy/api/item_tags.py         |    76 +
 lib/galaxy/webapps/galaxy/api/job_files.py         |   166 +
 lib/galaxy/webapps/galaxy/api/jobs.py              |   352 +
 lib/galaxy/webapps/galaxy/api/lda_datasets.py      |   756 +
 lib/galaxy/webapps/galaxy/api/libraries.py         |   377 +
 lib/galaxy/webapps/galaxy/api/library_contents.py  |   444 +
 lib/galaxy/webapps/galaxy/api/metrics.py           |   101 +
 lib/galaxy/webapps/galaxy/api/page_revisions.py    |    90 +
 lib/galaxy/webapps/galaxy/api/pages.py             |   151 +
 lib/galaxy/webapps/galaxy/api/provenance.py        |    98 +
 lib/galaxy/webapps/galaxy/api/quotas.py            |   147 +
 lib/galaxy/webapps/galaxy/api/remote_files.py      |   149 +
 lib/galaxy/webapps/galaxy/api/request_types.py     |   102 +
 lib/galaxy/webapps/galaxy/api/requests.py          |   108 +
 lib/galaxy/webapps/galaxy/api/roles.py             |    93 +
 lib/galaxy/webapps/galaxy/api/samples.py           |   145 +
 lib/galaxy/webapps/galaxy/api/search.py            |    66 +
 lib/galaxy/webapps/galaxy/api/tool_data.py         |   125 +
 lib/galaxy/webapps/galaxy/api/tool_dependencies.py |   183 +
 .../webapps/galaxy/api/tool_shed_repositories.py   |   741 +
 lib/galaxy/webapps/galaxy/api/tools.py             |   632 +
 lib/galaxy/webapps/galaxy/api/tours.py             |    50 +
 lib/galaxy/webapps/galaxy/api/users.py             |   242 +
 lib/galaxy/webapps/galaxy/api/visualizations.py    |   206 +
 lib/galaxy/webapps/galaxy/api/webhooks.py          |    71 +
 lib/galaxy/webapps/galaxy/api/workflows.py         |   633 +
 lib/galaxy/webapps/galaxy/buildapp.py              |   877 ++
 lib/galaxy/webapps/galaxy/controllers/__init__.py  |     3 +
 lib/galaxy/webapps/galaxy/controllers/admin.py     |   905 ++
 .../webapps/galaxy/controllers/admin_toolshed.py   |  2121 +++
 lib/galaxy/webapps/galaxy/controllers/async.py     |   145 +
 lib/galaxy/webapps/galaxy/controllers/biostar.py   |   103 +
 .../webapps/galaxy/controllers/data_manager.py     |   118 +
 lib/galaxy/webapps/galaxy/controllers/dataset.py   |  1174 ++
 lib/galaxy/webapps/galaxy/controllers/error.py     |     7 +
 .../webapps/galaxy/controllers/external_service.py |   369 +
 .../galaxy/controllers/external_services.py        |    27 +
 lib/galaxy/webapps/galaxy/controllers/forms.py     |   676 +
 lib/galaxy/webapps/galaxy/controllers/history.py   |  1364 ++
 lib/galaxy/webapps/galaxy/controllers/library.py   |   140 +
 .../webapps/galaxy/controllers/library_admin.py    |   247 +
 .../webapps/galaxy/controllers/library_common.py   |  2805 ++++
 lib/galaxy/webapps/galaxy/controllers/mobile.py    |    95 +
 lib/galaxy/webapps/galaxy/controllers/page.py      |   824 ++
 .../webapps/galaxy/controllers/request_type.py     |   477 +
 lib/galaxy/webapps/galaxy/controllers/requests.py  |    98 +
 .../webapps/galaxy/controllers/requests_admin.py   |   473 +
 .../webapps/galaxy/controllers/requests_common.py  |  1960 +++
 lib/galaxy/webapps/galaxy/controllers/root.py      |   540 +
 lib/galaxy/webapps/galaxy/controllers/search.py    |    19 +
 lib/galaxy/webapps/galaxy/controllers/tag.py       |   210 +
 .../webapps/galaxy/controllers/tool_runner.py      |   147 +
 lib/galaxy/webapps/galaxy/controllers/user.py      |  1879 +++
 lib/galaxy/webapps/galaxy/controllers/userskeys.py |    79 +
 .../webapps/galaxy/controllers/visualization.py    |  1079 ++
 lib/galaxy/webapps/galaxy/controllers/workflow.py  |  1315 ++
 lib/galaxy/webapps/reports/__init__.py             |     6 +
 lib/galaxy/webapps/reports/app.py                  |    40 +
 lib/galaxy/webapps/reports/buildapp.py             |   151 +
 lib/galaxy/webapps/reports/config.py               |   126 +
 lib/galaxy/webapps/reports/controllers/__init__.py |     1 +
 lib/galaxy/webapps/reports/controllers/history.py  |   192 +
 lib/galaxy/webapps/reports/controllers/home.py     |   123 +
 lib/galaxy/webapps/reports/controllers/jobs.py     |  1128 ++
 lib/galaxy/webapps/reports/controllers/query.py    |    30 +
 lib/galaxy/webapps/reports/controllers/root.py     |     9 +
 .../webapps/reports/controllers/sample_tracking.py |   206 +
 lib/galaxy/webapps/reports/controllers/system.py   |   214 +
 lib/galaxy/webapps/reports/controllers/tools.py    |   365 +
 lib/galaxy/webapps/reports/controllers/users.py    |   216 +
 .../webapps/reports/controllers/workflows.py       |   464 +
 lib/galaxy/webapps/tool_shed/__init__.py           |     6 +
 lib/galaxy/webapps/tool_shed/api/__init__.py       |     0
 lib/galaxy/webapps/tool_shed/api/authenticate.py   |    34 +
 lib/galaxy/webapps/tool_shed/api/categories.py     |   135 +
 lib/galaxy/webapps/tool_shed/api/configuration.py  |    28 +
 lib/galaxy/webapps/tool_shed/api/groups.py         |   149 +
 lib/galaxy/webapps/tool_shed/api/repositories.py   |  1098 ++
 .../webapps/tool_shed/api/repository_revisions.py  |   244 +
 lib/galaxy/webapps/tool_shed/api/tools.py          |   101 +
 lib/galaxy/webapps/tool_shed/api/users.py          |   131 +
 lib/galaxy/webapps/tool_shed/app.py                |    85 +
 lib/galaxy/webapps/tool_shed/buildapp.py           |   313 +
 lib/galaxy/webapps/tool_shed/config.py             |   253 +
 .../webapps/tool_shed/controllers/__init__.py      |     1 +
 lib/galaxy/webapps/tool_shed/controllers/admin.py  |   494 +
 lib/galaxy/webapps/tool_shed/controllers/groups.py |    19 +
 lib/galaxy/webapps/tool_shed/controllers/hg.py     |    36 +
 .../webapps/tool_shed/controllers/repository.py    |  2968 ++++
 .../tool_shed/controllers/repository_review.py     |   613 +
 lib/galaxy/webapps/tool_shed/controllers/upload.py |   408 +
 lib/galaxy/webapps/tool_shed/controllers/user.py   |     3 +
 lib/galaxy/webapps/tool_shed/framework/__init__.py |     3 +
 .../tool_shed/framework/middleware/__init__.py     |     1 +
 .../tool_shed/framework/middleware/remoteuser.py   |   126 +
 lib/galaxy/webapps/tool_shed/model/__init__.py     |   511 +
 lib/galaxy/webapps/tool_shed/model/mapping.py      |   331 +
 .../webapps/tool_shed/model/migrate/__init__.py    |     0
 .../webapps/tool_shed/model/migrate/check.py       |    92 +
 .../webapps/tool_shed/model/migrate/migrate.cfg    |    20 +
 .../model/migrate/versions/0001_initial_tables.py  |   156 +
 .../migrate/versions/0002_add_tool_suite_column.py |    51 +
 .../0003_review_and_review_association_tables.py   |    49 +
 .../migrate/versions/0004_repository_tables.py     |    83 +
 .../versions/0005_drop_tool_related_tables.py      |   195 +
 .../versions/0006_add_email_alerts_column.py       |    48 +
 ...dd_long_description_times_downloaded_columns.py |    64 +
 .../versions/0008_add_repository_metadata_table.py |    53 +
 .../migrate/versions/0009_add_malicious_column.py  |    51 +
 .../versions/0010_add_new_repo_alert_column.py     |    53 +
 .../versions/0011_add_tool_versions_column.py      |    47 +
 .../versions/0012_add_downloadable_column.py       |    49 +
 .../migrate/versions/0013_add_review_tables.py     |   220 +
 .../migrate/versions/0014_add_deprecated_column.py |    51 +
 .../migrate/versions/0015_add_api_keys_table.py    |    49 +
 ...st_tools_functionally_correct_errors_columns.py |    99 +
 ...utility_columns_to_repository_metadata_table.py |   113 +
 .../0018_add_repository_metadata_flag_columns.py   |    92 +
 ...ool_test_table_and_test_install_error_column.py |    83 +
 .../versions/0020_add_repository_type_column.py    |    46 +
 .../versions/0021_change_repository_type_value.py  |    32 +
 .../versions/0022_add_repository_admin_roles.py    |   163 +
 .../0023_add_repository_url_and_hompeage_url.py    |    49 +
 .../model/migrate/versions/0024_password_reset.py  |    37 +
 .../model/migrate/versions/0025_session_timeout.py |    45 +
 lib/galaxy/webapps/tool_shed/search/__init__.py    |     0
 lib/galaxy/webapps/tool_shed/search/repo_search.py |   125 +
 lib/galaxy/webapps/tool_shed/search/tool_search.py |    84 +
 lib/galaxy/webapps/tool_shed/security/__init__.py  |   279 +
 lib/galaxy/webapps/tool_shed/util/__init__.py      |     0
 lib/galaxy/webapps/tool_shed/util/hgweb_config.py  |   103 +
 lib/galaxy/webapps/tool_shed/util/ratings_util.py  |    28 +
 .../webapps/tool_shed/util/shed_statistics.py      |    94 +
 lib/galaxy/webapps/util.py                         |    21 +
 lib/galaxy/webhooks/__init__.py                    |   106 +
 lib/galaxy/work/__init__.py                        |     0
 lib/galaxy/work/context.py                         |    47 +
 lib/galaxy/workflow/__init__.py                    |     0
 lib/galaxy/workflow/extract.py                     |   375 +
 lib/galaxy/workflow/modules.py                     |  1367 ++
 lib/galaxy/workflow/render.py                      |   151 +
 lib/galaxy/workflow/run.py                         |   402 +
 lib/galaxy/workflow/run_request.py                 |   422 +
 lib/galaxy/workflow/schedulers/__init__.py         |    41 +
 lib/galaxy/workflow/schedulers/core.py             |    46 +
 lib/galaxy/workflow/scheduling_manager.py          |   197 +
 lib/galaxy/workflow/steps.py                       |    63 +
 lib/galaxy_ext/__init__.py                         |     4 +
 lib/galaxy_ext/metadata/__init__.py                |     1 +
 lib/galaxy_ext/metadata/set_metadata.py            |   158 +
 lib/galaxy_utils/__init__.py                       |     0
 lib/galaxy_utils/sequence/__init__.py              |     0
 lib/galaxy_utils/sequence/fasta.py                 |   125 +
 lib/galaxy_utils/sequence/fastq.py                 |   838 ++
 lib/galaxy_utils/sequence/sequence.py              |    74 +
 lib/galaxy_utils/sequence/transform.py             |    90 +
 lib/galaxy_utils/sequence/vcf.py                   |   121 +
 lib/log_tempfile.py                                |    31 +
 lib/mimeparse.py                                   |   191 +
 lib/psyco_full.py                                  |     5 +
 lib/tool_shed/__init__.py                          |     0
 lib/tool_shed/capsule/__init__.py                  |     0
 lib/tool_shed/capsule/capsule_manager.py           |   932 ++
 lib/tool_shed/dependencies/__init__.py             |     0
 lib/tool_shed/dependencies/attribute_handlers.py   |   204 +
 lib/tool_shed/dependencies/repository/__init__.py  |     0
 .../dependencies/repository/relation_builder.py    |   488 +
 lib/tool_shed/dependencies/tool/__init__.py        |     0
 .../dependencies/tool/tag_attribute_handler.py     |   203 +
 lib/tool_shed/galaxy_install/__init__.py           |     0
 lib/tool_shed/galaxy_install/datatypes/__init__.py |     0
 .../datatypes/custom_datatype_manager.py           |   214 +
 lib/tool_shed/galaxy_install/dependency_display.py |   619 +
 lib/tool_shed/galaxy_install/grids/__init__.py     |     0
 .../galaxy_install/grids/admin_toolshed_grids.py   |   431 +
 lib/tool_shed/galaxy_install/install_manager.py    |  1038 ++
 .../galaxy_install/installed_repository_manager.py |  1064 ++
 lib/tool_shed/galaxy_install/metadata/__init__.py  |     0
 .../installed_repository_metadata_manager.py       |   189 +
 lib/tool_shed/galaxy_install/migrate/__init__.py   |     0
 lib/tool_shed/galaxy_install/migrate/check.py      |   163 +
 lib/tool_shed/galaxy_install/migrate/common.py     |    72 +
 lib/tool_shed/galaxy_install/migrate/migrate.cfg   |    20 +
 .../galaxy_install/migrate/versions/0001_tools.py  |    12 +
 .../galaxy_install/migrate/versions/0002_tools.py  |    15 +
 .../galaxy_install/migrate/versions/0003_tools.py  |    14 +
 .../galaxy_install/migrate/versions/0004_tools.py  |    16 +
 .../galaxy_install/migrate/versions/0005_tools.py  |    17 +
 .../galaxy_install/migrate/versions/0006_tools.py  |    22 +
 .../galaxy_install/migrate/versions/0007_tools.py  |    19 +
 .../galaxy_install/migrate/versions/0008_tools.py  |   116 +
 .../galaxy_install/migrate/versions/0009_tools.py  |   106 +
 .../galaxy_install/migrate/versions/0010_tools.py  |   112 +
 .../galaxy_install/migrate/versions/0011_tools.py  |    64 +
 .../galaxy_install/migrate/versions/0012_tools.py  |    50 +
 .../galaxy_install/repair_repository_manager.py    |   229 +
 .../repository_dependencies/__init__.py            |     0
 .../repository_dependency_manager.py               |   471 +
 .../galaxy_install/tool_dependencies/__init__.py   |     0
 .../tool_dependencies/env_manager.py               |   146 +
 .../tool_dependencies/recipe/__init__.py           |     0
 .../recipe/asynchronous_reader.py                  |    31 +
 .../tool_dependencies/recipe/env_file_builder.py   |    96 +
 .../recipe/install_environment.py                  |   275 +
 .../tool_dependencies/recipe/recipe_manager.py     |   107 +
 .../tool_dependencies/recipe/step_handler.py       |  1912 +++
 .../tool_dependencies/recipe/tag_handler.py        |   636 +
 .../galaxy_install/tool_migration_manager.py       |   698 +
 lib/tool_shed/galaxy_install/tools/__init__.py     |     0
 lib/tool_shed/galaxy_install/tools/data_manager.py |   161 +
 .../galaxy_install/tools/tool_panel_manager.py     |   453 +
 .../galaxy_install/update_repository_manager.py    |   148 +
 .../galaxy_install/utility_containers/__init__.py  |   144 +
 lib/tool_shed/grids/__init__.py                    |     0
 lib/tool_shed/grids/admin_grids.py                 |   512 +
 .../grids/repository_grid_filter_manager.py        |    49 +
 lib/tool_shed/grids/repository_grids.py            |  1432 ++
 lib/tool_shed/grids/repository_review_grids.py     |   435 +
 lib/tool_shed/grids/util.py                        |   188 +
 lib/tool_shed/managers/__init__.py                 |     0
 lib/tool_shed/managers/groups.py                   |   123 +
 lib/tool_shed/managers/repositories.py             |    66 +
 lib/tool_shed/metadata/__init__.py                 |     0
 lib/tool_shed/metadata/metadata_generator.py       |  1122 ++
 .../metadata/repository_metadata_manager.py        |   999 ++
 lib/tool_shed/repository_registry.py               |   378 +
 lib/tool_shed/repository_types/__init__.py         |     0
 lib/tool_shed/repository_types/metadata.py         |    29 +
 lib/tool_shed/repository_types/registry.py         |    21 +
 .../repository_suite_definition.py                 |    39 +
 .../repository_types/tool_dependency_definition.py |    38 +
 lib/tool_shed/repository_types/unrestricted.py     |    20 +
 lib/tool_shed/repository_types/util.py             |    57 +
 lib/tool_shed/tool_shed_registry.py                |    61 +
 lib/tool_shed/tools/__init__.py                    |     0
 lib/tool_shed/tools/data_table_manager.py          |   176 +
 lib/tool_shed/tools/tool_validator.py              |   337 +
 lib/tool_shed/tools/tool_version_manager.py        |   115 +
 lib/tool_shed/util/__init__.py                     |     0
 lib/tool_shed/util/basic_util.py                   |   150 +
 lib/tool_shed/util/commit_util.py                  |   257 +
 lib/tool_shed/util/common_util.py                  |   350 +
 lib/tool_shed/util/container_util.py               |    71 +
 lib/tool_shed/util/encoding_util.py                |    37 +
 lib/tool_shed/util/hg_util.py                      |   462 +
 lib/tool_shed/util/metadata_util.py                |   369 +
 lib/tool_shed/util/readme_util.py                  |   117 +
 lib/tool_shed/util/repository_content_util.py      |    64 +
 lib/tool_shed/util/repository_util.py              |  1043 ++
 lib/tool_shed/util/review_util.py                  |   131 +
 lib/tool_shed/util/search_util.py                  |   171 +
 lib/tool_shed/util/shed_util_common.py             |   652 +
 lib/tool_shed/util/tool_dependency_util.py         |   391 +
 lib/tool_shed/util/tool_util.py                    |   279 +
 lib/tool_shed/util/web_util.py                     |    22 +
 lib/tool_shed/util/workflow_util.py                |   416 +
 lib/tool_shed/util/xml_util.py                     |   154 +
 lib/tool_shed/utility_containers/__init__.py       |   360 +
 .../utility_container_manager.py                   |   860 ++
 locale/en/LC_MESSAGES/ginga.mo                     |   Bin 0 -> 3873 bytes
 locale/en/LC_MESSAGES/ginga.po                     |   299 +
 locale/en/LC_MESSAGES/tools.mo                     |   Bin 0 -> 1564 bytes
 locale/en/LC_MESSAGES/tools.po                     |    85 +
 locale/ginga.pot                                   |   296 +
 locale/ja/LC_MESSAGES/ginga.mo                     |   Bin 0 -> 8439 bytes
 locale/ja/LC_MESSAGES/ginga.po                     |   518 +
 locale/ja/LC_MESSAGES/tools.mo                     |   Bin 0 -> 1557 bytes
 locale/ja/LC_MESSAGES/tools.po                     |    85 +
 locale/tools.pot                                   |    66 +
 locale/zh/LC_MESSAGES/ginga.mo                     |   Bin 0 -> 9906 bytes
 locale/zh/LC_MESSAGES/ginga.po                     |   603 +
 locale/zh/LC_MESSAGES/tools.mo                     |   Bin 0 -> 498 bytes
 locale/zh/LC_MESSAGES/tools.po                     |    84 +
 manage_db.sh                                       |    17 +
 manage_tools.sh                                    |    12 +
 openid/aol.xml                                     |     4 +
 openid/genomespace.xml                             |    16 +
 openid/google.xml                                  |     4 +
 openid/launchpad.xml                               |     4 +
 openid/yahoo.xml                                   |     4 +
 requirements.txt                                   |     1 +
 rolling_restart.sh                                 |     5 +
 run.sh                                             |   133 +
 run_reports.sh                                     |    50 +
 run_tests.sh                                       |   533 +
 run_tool_shed.sh                                   |    41 +
 scripts/__init__.py                                |     0
 scripts/api/README                                 |   116 +
 scripts/api/common.py                              |   200 +
 scripts/api/copy_hda_to_library_folder.py          |    35 +
 scripts/api/create.py                              |    15 +
 scripts/api/data_manager_example_execute.py        |   105 +
 scripts/api/delete.py                              |    15 +
 scripts/api/display.py                             |    19 +
 scripts/api/example_watch_folder.py                |    90 +
 .../api/filter_failed_datasets_from_collection.py  |    63 +
 scripts/api/form_create_from_xml.py                |    16 +
 scripts/api/history_create_history.py              |    19 +
 scripts/api/history_delete_history.py              |    19 +
 scripts/api/import_library_dataset_to_history.py   |    17 +
 ...orkflows_from_installed_tool_shed_repository.py |    64 +
 scripts/api/install_tool_shed_repositories.py      |    68 +
 scripts/api/library_create_folder.py               |    22 +
 scripts/api/library_create_library.py              |    20 +
 scripts/api/library_upload_dir.py                  |   170 +
 scripts/api/library_upload_from_import_dir.py      |    24 +
 scripts/api/load_data_with_metadata.py             |    83 +
 scripts/api/repair_tool_shed_repository.py         |    60 +
 scripts/api/request_type_create_from_xml.py        |    22 +
 scripts/api/requests_update_state.py               |    15 +
 .../reset_metadata_on_installed_repositories.py    |    27 +
 .../sample_tracking/request_form.xml               |    15 +
 .../sample_tracking/request_type.xml               |     9 +
 .../sample_tracking/sample_form.xml                |    18 +
 scripts/api/sample_dataset_update_status.py        |    21 +
 scripts/api/sample_update_state.py                 |    20 +
 scripts/api/search.py                              |    59 +
 scripts/api/sequencer_configuration_create.py      |    50 +
 scripts/api/update.py                              |    15 +
 scripts/api/upload_to_history.py                   |    54 +
 scripts/api/workflow_delete.py                     |    28 +
 scripts/api/workflow_execute.py                    |    33 +
 scripts/api/workflow_execute_parameters.py         |    55 +
 scripts/api/workflow_import.py                     |    31 +
 scripts/api/workflow_import_from_file_rpark.py     |    32 +
 scripts/auth/pam_auth_helper.py                    |    41 +
 scripts/binary_compatibility.py                    |    75 +
 scripts/bootstrap_history.py                       |   668 +
 scripts/build_toolbox.py                           |   172 +
 scripts/build_universe_config.py                   |    34 +
 scripts/check_eggs.py                              |     1 +
 scripts/check_galaxy.py                            |   415 +
 scripts/check_python.py                            |    29 +
 scripts/cleanup_datasets/admin_cleanup_datasets.py |   277 +
 .../admin_cleanup_deletion_template.txt.sample     |    11 +
 .../admin_cleanup_warning_template.txt.sample      |    11 +
 scripts/cleanup_datasets/cleanup_datasets.py       |   548 +
 scripts/cleanup_datasets/delete_datasets.sh        |     4 +
 .../cleanup_datasets/delete_userless_histories.sh  |     4 +
 scripts/cleanup_datasets/pgcleanup.py              |   781 +
 scripts/cleanup_datasets/populate_uuid.py          |    52 +
 scripts/cleanup_datasets/populate_uuid.sh          |     5 +
 scripts/cleanup_datasets/purge_datasets.sh         |     4 +
 scripts/cleanup_datasets/purge_folders.sh          |     4 +
 scripts/cleanup_datasets/purge_histories.sh        |     4 +
 scripts/cleanup_datasets/purge_libraries.sh        |     4 +
 .../remove_renamed_datasets_from_disk.py           |    49 +
 scripts/cleanup_datasets/rename_purged_datasets.py |    51 +
 scripts/cleanup_datasets/update_dataset_size.py    |    50 +
 scripts/cleanup_datasets/update_metadata.py        |    62 +
 scripts/cleanup_datasets/update_metadata.sh        |     9 +
 scripts/common_startup.sh                          |   153 +
 scripts/communication/communication_server.py      |   212 +
 scripts/communication/template/communication.css   |   180 +
 scripts/communication/template/communication.html  |    58 +
 scripts/communication/template/communication.js    |   759 +
 scripts/create_db.py                               |    40 +
 scripts/data_libraries/build_lucene_index.py       |   110 +
 scripts/data_libraries/build_lucene_index.sh       |     4 +
 scripts/data_libraries/build_whoosh_index.py       |    92 +
 scripts/data_libraries/build_whoosh_index.sh       |     4 +
 scripts/db_shell.py                                |   101 +
 scripts/drmaa_external_killer.py                   |    54 +
 scripts/drmaa_external_runner.py                   |   136 +
 scripts/edam_mapping.py                            |    61 +
 scripts/external_chown_script.py                   |    25 +
 scripts/extract_dataset_part.py                    |    48 +
 scripts/extract_toolbox_sections.py                |   137 +
 scripts/fetch_eggs.py                              |    32 +
 scripts/functional_tests.py                        |   121 +
 scripts/galaxy-main                                |   246 +
 scripts/get_platforms.py                           |    11 +
 scripts/grt.py                                     |   227 +
 scripts/grt.yml.sample                             |     7 +
 scripts/helper.py                                  |    63 +
 scripts/loc_files/create_all_fasta_loc.py          |   303 +
 scripts/manage_db.py                               |    23 +
 scripts/manage_tools.py                            |    37 +
 scripts/metagenomics/convert_title.py              |    40 +
 scripts/metagenomics/process_BLAST_db.sh           |    18 +
 scripts/microbes/BeautifulSoup.py                  |  1808 +++
 scripts/microbes/README.txt                        |    24 +
 scripts/microbes/create_bacteria_loc_file.py       |    73 +
 scripts/microbes/create_bacteria_table.py          |    79 +
 scripts/microbes/create_nib_seq_loc_file.py        |    87 +
 scripts/microbes/get_builds_lengths.py             |    60 +
 scripts/microbes/harvest_bacteria.py               |   255 +
 scripts/microbes/ncbi_to_ucsc.py                   |   141 +
 scripts/microbes/util.py                           |   235 +
 scripts/migrate_tools/0002_tools.sh                |     4 +
 scripts/migrate_tools/0002_tools.xml               |   113 +
 scripts/migrate_tools/0003_tools.sh                |     4 +
 scripts/migrate_tools/0003_tools.xml               |     6 +
 scripts/migrate_tools/0004_tools.sh                |     4 +
 scripts/migrate_tools/0004_tools.xml               |    12 +
 scripts/migrate_tools/0005_tools.sh                |     4 +
 scripts/migrate_tools/0005_tools.xml               |     7 +
 scripts/migrate_tools/0006_tools.sh                |     4 +
 scripts/migrate_tools/0006_tools.xml               |    18 +
 scripts/migrate_tools/0007_tools.sh                |     4 +
 scripts/migrate_tools/0007_tools.xml               |    15 +
 scripts/migrate_tools/0008_tools.sh                |     4 +
 scripts/migrate_tools/0008_tools.xml               |   147 +
 scripts/migrate_tools/0009_tools.sh                |     4 +
 scripts/migrate_tools/0009_tools.xml               |   132 +
 scripts/migrate_tools/0010_tools.sh                |     4 +
 scripts/migrate_tools/0010_tools.xml               |   141 +
 scripts/migrate_tools/0011_tools.sh                |     4 +
 scripts/migrate_tools/0011_tools.xml               |    69 +
 scripts/migrate_tools/0012_tools.sh                |     4 +
 scripts/migrate_tools/0012_tools.xml               |    48 +
 scripts/migrate_tools/migrate_tools.py             |    29 +
 scripts/nosetests.py                               |    11 +
 scripts/others/incorrect_gops_jobs.py              |   109 +
 scripts/others/incorrect_gops_jobs.sh              |     4 +
 scripts/others/incorrect_gops_join_jobs.py         |    99 +
 scripts/others/incorrect_gops_join_jobs.sh         |     4 +
 scripts/paster.py                                  |    26 +
 scripts/patch.sh                                   |    52 +
 scripts/rst2html.py                                |    25 +
 scripts/run_selenium_tests.sh                      |     8 +
 scripts/runtime_stats.py                           |   272 +
 scripts/set_dataset_sizes.py                       |    62 +
 scripts/set_user_disk_usage.py                     |   100 +
 scripts/slideshow/__init__.py                      |     0
 scripts/slideshow/build_slideshow.py               |    28 +
 scripts/slideshow/example.md                       |   243 +
 scripts/slideshow/slideshow_template.html          |    18 +
 scripts/slideshow/style.sample.css                 |    25 +
 scripts/summarize_timings.py                       |    51 +
 scripts/sync_reports_config.py                     |    62 +
 scripts/taxonomy/gi2tax_test.txt                   |   100 +
 scripts/taxonomy/names_test.txt                    |   100 +
 scripts/taxonomy/processTaxonomy.sh                |    18 +
 .../tool_shed/api/add_repository_registry_entry.py |    39 +
 scripts/tool_shed/api/common.py                    |   292 +
 scripts/tool_shed/api/create_categories.py         |    56 +
 scripts/tool_shed/api/create_users.py              |    59 +
 scripts/tool_shed/api/export.py                    |   134 +
 .../api/get_filtered_repository_revisions.py       |    97 +
 scripts/tool_shed/api/import_capsule.py            |    46 +
 .../api/remove_repository_registry_entry.py        |    39 +
 .../api/reset_metadata_on_repositories.py          |    91 +
 .../api/tool_shed_repository_revision_update.py    |    30 +
 .../bootstrap_tool_shed/bootstrap_tool_shed.sh     |   109 +
 .../bootstrap_tool_shed/bootstrap_util.py          |   130 +
 .../create_user_with_api_key.py                    |   139 +
 .../bootstrap_tool_shed/parse_run_sh_args.sh       |    15 +
 .../bootstrap_tool_shed/user_info.xml.sample       |    12 +
 scripts/tool_shed/build_ts_whoosh_index.py         |   222 +
 scripts/tool_shed/check_download_urls.py           |    39 +
 ...for_empty_tool_dependency_installation_paths.py |    70 +
 ...for_empty_tool_dependency_installation_paths.py |   153 +
 .../clean_up_tool_dependency_directory.py          |    32 +
 .../deprecate_repositories_without_metadata.py     |   175 +
 scripts/tool_shed/migrate_tools_to_repositories.py |   337 +
 scripts/tool_shed/migrate_tools_to_repositories.sh |     4 +
 ...ow_tool_dependency_installation_dir_contents.py |    73 +
 scripts/tools/maf/check_loc_file.py                |    56 +
 scripts/tools/re_escape_output.py                  |    34 +
 scripts/transfer.py                                |   300 +
 scripts/update_shed_config_path.py                 |    60 +
 scripts/validate_tools.sh                          |    35 +
 setup.cfg                                          |    15 +
 static/favicon.ico                                 |   Bin 0 -> 15086 bytes
 static/formatHelp.html                             |   665 +
 static/gmaj/docs/cathy.gmaj.png                    |   Bin 0 -> 324 bytes
 static/gmaj/docs/gmaj.css                          |    21 +
 static/gmaj/docs/gmaj_bugs.html                    |   128 +
 static/gmaj/docs/gmaj_help.html                    |   860 ++
 static/gmaj/docs/gmaj_input.html                   |   735 +
 static/gmaj/docs/gmaj_install.html                 |   187 +
 static/gmaj/docs/gmaj_news.html                    |   525 +
 static/gmaj/docs/gmaj_readme.html                  |    85 +
 static/gmaj/docs/hand14.gif                        |   Bin 0 -> 1005 bytes
 static/gmaj/docs/sample.gmaj                       |   239 +
 static/gmaj/gmaj.jar                               |   Bin 0 -> 334262 bytes
 static/images/Armitagep_manhattan.png              |   Bin 0 -> 40256 bytes
 static/images/Armitagep_qqplot.png                 |   Bin 0 -> 12816 bytes
 static/images/add_icon.png                         |   Bin 0 -> 654 bytes
 static/images/add_icon_dark.png                    |   Bin 0 -> 924 bytes
 static/images/aggregate_history1.png               |   Bin 0 -> 71976 bytes
 static/images/aggregate_history2.png               |   Bin 0 -> 11843 bytes
 static/images/bar_chart.png                        |   Bin 0 -> 13071 bytes
 static/images/bed_warn.png                         |   Bin 0 -> 4697 bytes
 static/images/closebox.png                         |   Bin 0 -> 1910 bytes
 static/images/dat_points_table_brows_1.png         |   Bin 0 -> 63427 bytes
 static/images/ddarrowsplit.png                     |   Bin 0 -> 2921 bytes
 static/images/delete.gif                           |   Bin 0 -> 752 bytes
 static/images/delete_tag_icon_gray.png             |   Bin 0 -> 150 bytes
 static/images/delete_tag_icon_white.png            |   Bin 0 -> 140 bytes
 static/images/documents-stack-faded.png            |   Bin 0 -> 587 bytes
 static/images/documents-stack.png                  |   Bin 0 -> 594 bytes
 static/images/dropdownarrow.png                    |   Bin 0 -> 126 bytes
 static/images/dw.gif                               |   Bin 0 -> 917 bytes
 static/images/fetchTax.png                         |   Bin 0 -> 13144 bytes
 static/images/fonts/fontawesome-webfont.eot        |   Bin 0 -> 56006 bytes
 static/images/fonts/fontawesome-webfont.ttf        |   Bin 0 -> 112160 bytes
 static/images/fonts/fontawesome-webfont.woff       |   Bin 0 -> 65452 bytes
 static/images/fonts/iconic_stroke.eot              |   Bin 0 -> 25883 bytes
 static/images/fonts/iconic_stroke.otf              |   Bin 0 -> 40600 bytes
 static/images/fonts/iconic_stroke.svg              |   492 +
 static/images/fonts/iconic_stroke.ttf              |   Bin 0 -> 18856 bytes
 static/images/fugue/application-dock-270-bw.png    |   Bin 0 -> 1288 bytes
 static/images/fugue/application-dock-270.png       |   Bin 0 -> 516 bytes
 static/images/fugue/arrow-000-small-bw.png         |   Bin 0 -> 1041 bytes
 static/images/fugue/arrow-090.png                  |   Bin 0 -> 553 bytes
 static/images/fugue/arrow-circle.png               |   Bin 0 -> 770 bytes
 static/images/fugue/arrow-resize-090-bw.png        |   Bin 0 -> 1123 bytes
 static/images/fugue/arrow-resize-090.png           |   Bin 0 -> 403 bytes
 static/images/fugue/arrow-split-bw.png             |   Bin 0 -> 1389 bytes
 static/images/fugue/arrow-split.png                |   Bin 0 -> 707 bytes
 static/images/fugue/arrow-transition-270-bw.png    |   Bin 0 -> 1334 bytes
 static/images/fugue/arrow-transition-bw.png        |   Bin 0 -> 1314 bytes
 static/images/fugue/asterisk-small-outline.png     |   Bin 0 -> 281 bytes
 static/images/fugue/asterisk-small-yellow.png      |   Bin 0 -> 452 bytes
 static/images/fugue/asterisk-small.png             |   Bin 0 -> 445 bytes
 static/images/fugue/block--plus-bw.png             |   Bin 0 -> 1346 bytes
 static/images/fugue/block--plus.png                |   Bin 0 -> 689 bytes
 static/images/fugue/bookmarks-bw.png               |   Bin 0 -> 1353 bytes
 static/images/fugue/bookmarks.png                  |   Bin 0 -> 596 bytes
 static/images/fugue/bug.png                        |   Bin 0 -> 682 bytes
 static/images/fugue/chart.png                      |   Bin 0 -> 436 bytes
 static/images/fugue/chevron-expand-bw.png          |   Bin 0 -> 1214 bytes
 static/images/fugue/chevron-expand.png             |   Bin 0 -> 490 bytes
 static/images/fugue/chevron.png                    |   Bin 0 -> 493 bytes
 static/images/fugue/control-270.png                |   Bin 0 -> 375 bytes
 static/images/fugue/cross-button.png               |   Bin 0 -> 555 bytes
 static/images/fugue/cross-circle-bw.png            |   Bin 0 -> 1422 bytes
 static/images/fugue/cross-circle.png               |   Bin 0 -> 689 bytes
 static/images/fugue/cross-small-bw.png             |   Bin 0 -> 1053 bytes
 static/images/fugue/cross.png                      |   Bin 0 -> 476 bytes
 static/images/fugue/disk--arrow-bw.png             |   Bin 0 -> 1333 bytes
 static/images/fugue/disk--arrow.png                |   Bin 0 -> 603 bytes
 static/images/fugue/disk.png                       |   Bin 0 -> 475 bytes
 static/images/fugue/exclamation.png                |   Bin 0 -> 613 bytes
 static/images/fugue/external.png                   |   Bin 0 -> 621 bytes
 static/images/fugue/eye.png                        |   Bin 0 -> 536 bytes
 static/images/fugue/gear-bw.png                    |   Bin 0 -> 1424 bytes
 static/images/fugue/gear.png                       |   Bin 0 -> 721 bytes
 static/images/fugue/globe-bw.png                   |   Bin 0 -> 1575 bytes
 static/images/fugue/globe.png                      |   Bin 0 -> 849 bytes
 static/images/fugue/hammer-bw.png                  |   Bin 0 -> 1273 bytes
 static/images/fugue/hammer.png                     |   Bin 0 -> 575 bytes
 static/images/fugue/information-white.png          |   Bin 0 -> 651 bytes
 static/images/fugue/layer-transparent-bw.png       |   Bin 0 -> 1299 bytes
 static/images/fugue/layer-transparent.png          |   Bin 0 -> 566 bytes
 static/images/fugue/layers-stack-bw.png            |   Bin 0 -> 1392 bytes
 static/images/fugue/layers-stack.png               |   Bin 0 -> 664 bytes
 static/images/fugue/magnifier-left.png             |   Bin 0 -> 681 bytes
 static/images/fugue/magnifier-zoom-out.png         |   Bin 0 -> 736 bytes
 static/images/fugue/magnifier-zoom.png             |   Bin 0 -> 758 bytes
 static/images/fugue/navigation.png                 |   Bin 0 -> 776 bytes
 static/images/fugue/pencil-small.png               |   Bin 0 -> 309 bytes
 static/images/fugue/pencil.png                     |   Bin 0 -> 475 bytes
 static/images/fugue/plus-button-bw.png             |   Bin 0 -> 1309 bytes
 static/images/fugue/plus-button.png                |   Bin 0 -> 544 bytes
 static/images/fugue/plus-circle.png                |   Bin 0 -> 674 bytes
 static/images/fugue/sticky-note-text.png           |   Bin 0 -> 520 bytes
 static/images/fugue/tag--plus.png                  |   Bin 0 -> 778 bytes
 static/images/fugue/tag-label.png                  |   Bin 0 -> 714 bytes
 static/images/fugue/tags.png                       |   Bin 0 -> 661 bytes
 static/images/fugue/toggle-bw.png                  |   Bin 0 -> 1200 bytes
 static/images/fugue/toggle-expand-bw.png           |   Bin 0 -> 1269 bytes
 static/images/fugue/toggle-expand.png              |   Bin 0 -> 520 bytes
 static/images/fugue/toggle.png                     |   Bin 0 -> 448 bytes
 static/images/fugue/toolbox-bw.png                 |   Bin 0 -> 1243 bytes
 static/images/fugue/toolbox.png                    |   Bin 0 -> 488 bytes
 static/images/fugue/ui-slider-050-bw.png           |   Bin 0 -> 1155 bytes
 static/images/fugue/ui-slider-050.png              |   Bin 0 -> 454 bytes
 static/images/galaxyIcon_noText.png                |   Bin 0 -> 569 bytes
 static/images/hatch-023858.png                     |   Bin 0 -> 190 bytes
 static/images/hatch-fade-023858.gif                |   Bin 0 -> 147 bytes
 static/images/histogram.png                        |   Bin 0 -> 6648 bytes
 static/images/histogram2.png                       |   Bin 0 -> 8639 bytes
 static/images/history-buttons/delete_icon.png      |   Bin 0 -> 1381 bytes
 static/images/history-buttons/delete_icon_dark.png |   Bin 0 -> 1621 bytes
 static/images/history-buttons/delete_icon_grey.png |   Bin 0 -> 425 bytes
 static/images/history-buttons/eye_icon.png         |   Bin 0 -> 1512 bytes
 static/images/history-buttons/eye_icon_dark.png    |   Bin 0 -> 1631 bytes
 static/images/history-buttons/eye_icon_grey.png    |   Bin 0 -> 396 bytes
 static/images/history-buttons/pencil_icon.png      |   Bin 0 -> 1413 bytes
 static/images/history-buttons/pencil_icon_dark.png |   Bin 0 -> 1571 bytes
 static/images/history-buttons/pencil_icon_grey.png |   Bin 0 -> 405 bytes
 static/images/history-states/data_empty.png        |   Bin 0 -> 497 bytes
 static/images/history-states/data_error.png        |   Bin 0 -> 497 bytes
 static/images/history-states/data_ok.png           |   Bin 0 -> 503 bytes
 static/images/history-states/data_queued.png       |   Bin 0 -> 562 bytes
 static/images/history.gif                          |   Bin 0 -> 813 bytes
 static/images/history_down_arrow.gif               |   Bin 0 -> 204 bytes
 static/images/history_up_arrow.gif                 |   Bin 0 -> 201 bytes
 static/images/icon_error_lrg.gif                   |   Bin 0 -> 1531 bytes
 static/images/icon_error_sml.gif                   |   Bin 0 -> 1010 bytes
 static/images/icon_info_lrg.gif                    |   Bin 0 -> 1383 bytes
 static/images/icon_info_sml.gif                    |   Bin 0 -> 606 bytes
 static/images/icon_success_lrg.gif                 |   Bin 0 -> 1492 bytes
 static/images/icon_success_sml.gif                 |   Bin 0 -> 990 bytes
 static/images/icon_warning_lrg.gif                 |   Bin 0 -> 1491 bytes
 static/images/icon_warning_sml.gif                 |   Bin 0 -> 576 bytes
 static/images/jstree/32px.png                      |   Bin 0 -> 3121 bytes
 static/images/jstree/40px.png                      |   Bin 0 -> 1880 bytes
 static/images/jstree/throbber.gif                  |   Bin 0 -> 1720 bytes
 static/images/kendall.png                          |   Bin 0 -> 1157 bytes
 static/images/light_gray_grid.gif                  |   Bin 0 -> 836 bytes
 static/images/loading_large_white_bg.gif           |   Bin 0 -> 3208 bytes
 static/images/loading_small_white_bg.gif           |   Bin 0 -> 1849 bytes
 static/images/maf_icons/interval2maf.png           |   Bin 0 -> 11787 bytes
 static/images/maf_icons/stitchMaf.png              |   Bin 0 -> 11270 bytes
 static/images/mag_glass.png                        |   Bin 0 -> 706 bytes
 static/images/mutation_visualization_example.png   |   Bin 0 -> 6169 bytes
 static/images/openid-16x16.gif                     |   Bin 0 -> 328 bytes
 static/images/overview_arrows.png                  |   Bin 0 -> 176 bytes
 static/images/pearson.png                          |   Bin 0 -> 1602 bytes
 static/images/resizable.png                        |   Bin 0 -> 119 bytes
 static/images/rgWebLogo3_test.jpg                  |   Bin 0 -> 21641 bytes
 static/images/scatter.png                          |   Bin 0 -> 6975 bytes
 static/images/scatterplot.png                      |   Bin 0 -> 11945 bytes
 static/images/select2-spinner.gif                  |   Bin 0 -> 1849 bytes
 static/images/select2.png                          |   Bin 0 -> 613 bytes
 static/images/select2x2.png                        |   Bin 0 -> 845 bytes
 static/images/silk/add.png                         |   Bin 0 -> 733 bytes
 static/images/silk/book.png                        |   Bin 0 -> 593 bytes
 static/images/silk/book_open.png                   |   Bin 0 -> 622 bytes
 static/images/silk/chart_curve.png                 |   Bin 0 -> 710 bytes
 static/images/silk/folder.png                      |   Bin 0 -> 537 bytes
 static/images/silk/folder_page.png                 |   Bin 0 -> 688 bytes
 static/images/silk/link.png                        |   Bin 0 -> 343 bytes
 static/images/silk/link_break.png                  |   Bin 0 -> 657 bytes
 static/images/silk/page_white.png                  |   Bin 0 -> 294 bytes
 static/images/silk/page_white_compressed.png       |   Bin 0 -> 724 bytes
 static/images/silk/resultset_bottom.png            |   Bin 0 -> 446 bytes
 static/images/silk/resultset_next.png              |   Bin 0 -> 395 bytes
 static/images/silk/resultset_previous.png          |   Bin 0 -> 836 bytes
 static/images/solid_qual.png                       |   Bin 0 -> 63324 bytes
 static/images/spearman.png                         |   Bin 0 -> 894 bytes
 static/images/square_empty.gif                     |   Bin 0 -> 77 bytes
 static/images/square_error.gif                     |   Bin 0 -> 82 bytes
 static/images/square_ok.gif                        |   Bin 0 -> 80 bytes
 static/images/square_queued.gif                    |   Bin 0 -> 79 bytes
 static/images/square_running.gif                   |   Bin 0 -> 180 bytes
 static/images/star.gif                             |   Bin 0 -> 815 bytes
 static/images/thumbtack_icon.png                   |   Bin 0 -> 2957 bytes
 static/images/thumbtack_icon_dark.png              |   Bin 0 -> 3075 bytes
 static/images/tipsy.gif                            |   Bin 0 -> 867 bytes
 static/images/tool_menu_down_arrow.gif             |   Bin 0 -> 308 bytes
 .../lda/first_matrix_generator_example_file.png    |   Bin 0 -> 33141 bytes
 .../lda/second_matrix_generator_example_file.png   |   Bin 0 -> 23673 bytes
 static/images/tracks/block.png                     |   Bin 0 -> 1272 bytes
 static/images/tracks/close_btn.gif                 |   Bin 0 -> 463 bytes
 static/images/tracks/diag_bg.gif                   |   Bin 0 -> 4460 bytes
 static/images/tracks/go_btn.gif                    |   Bin 0 -> 434 bytes
 static/images/tracks/handle-left.gif               |   Bin 0 -> 121 bytes
 static/images/tracks/handle-right.gif              |   Bin 0 -> 124 bytes
 static/images/tracks/pan_left.gif                  |   Bin 0 -> 339 bytes
 static/images/tracks/pan_right.gif                 |   Bin 0 -> 356 bytes
 static/images/tracks/show_history.gif              |   Bin 0 -> 712 bytes
 static/images/tracks/zoom_in.gif                   |   Bin 0 -> 111 bytes
 static/images/tracks/zoom_in_full.gif              |   Bin 0 -> 151 bytes
 static/images/tracks/zoom_out.gif                  |   Bin 0 -> 95 bytes
 static/images/tracks/zoom_out_full.gif             |   Bin 0 -> 120 bytes
 static/images/up.gif                               |   Bin 0 -> 920 bytes
 .../images/visualization/draggable_horizontal.png  |   Bin 0 -> 87 bytes
 static/images/visualization/draggable_vertical.png |   Bin 0 -> 103 bytes
 static/images/visualization/strand_left.png        |   Bin 0 -> 116 bytes
 static/images/visualization/strand_left_inv.png    |   Bin 0 -> 88 bytes
 static/images/visualization/strand_right.png       |   Bin 0 -> 120 bytes
 static/images/visualization/strand_right_inv.png   |   Bin 0 -> 86 bytes
 static/images/yui/rel_interstitial_loading.gif     |   Bin 0 -> 6610 bytes
 static/incompatible-browser.html                   |    33 +
 static/jqtouch/img/backButton.png                  |   Bin 0 -> 816 bytes
 static/jqtouch/img/back_button.png                 |   Bin 0 -> 3756 bytes
 static/jqtouch/img/back_button_clicked.png         |   Bin 0 -> 3741 bytes
 static/jqtouch/img/bg_row.gif                      |   Bin 0 -> 162 bytes
 static/jqtouch/img/bg_row_select.gif               |   Bin 0 -> 264 bytes
 static/jqtouch/img/blueButton.png                  |   Bin 0 -> 517 bytes
 static/jqtouch/img/button.png                      |   Bin 0 -> 3315 bytes
 static/jqtouch/img/button_clicked.png              |   Bin 0 -> 3283 bytes
 static/jqtouch/img/cancel.png                      |   Bin 0 -> 362 bytes
 static/jqtouch/img/chevron.png                     |   Bin 0 -> 259 bytes
 static/jqtouch/img/chevron_select.png              |   Bin 0 -> 308 bytes
 static/jqtouch/img/grayButton.png                  |   Bin 0 -> 943 bytes
 static/jqtouch/img/header.gif                      |   Bin 0 -> 271 bytes
 static/jqtouch/img/listGroup.png                   |   Bin 0 -> 2867 bytes
 static/jqtouch/img/loading.gif                     |   Bin 0 -> 1435 bytes
 static/jqtouch/img/pinstripes.png                  |   Bin 0 -> 117 bytes
 static/jqtouch/img/selection.png                   |   Bin 0 -> 159 bytes
 static/jqtouch/img/thumb.png                       |   Bin 0 -> 2835 bytes
 static/jqtouch/img/toggle.png                      |   Bin 0 -> 2815 bytes
 static/jqtouch/img/toggleOn.png                    |   Bin 0 -> 163 bytes
 static/jqtouch/img/toolButton.png                  |   Bin 0 -> 531 bytes
 static/jqtouch/img/toolbar.gif                     |   Bin 0 -> 269 bytes
 static/jqtouch/img/toolbar.png                     |   Bin 0 -> 171 bytes
 static/jqtouch/img/whiteButton.png                 |   Bin 0 -> 978 bytes
 static/jqtouch/jqtouch.css                         |   436 +
 static/june_2007_style                             |     1 +
 static/laj/docs/java_plugin_help.html              |   132 +
 static/laj/docs/laj_applet_help.html               |   331 +
 static/laj/laj.jar                                 |   Bin 0 -> 149005 bytes
 static/maps/galaxy-app-base.js.map                 |     1 +
 static/maps/galaxy.interactive_environments.js.map |     1 +
 static/maps/galaxy.js.map                          |     1 +
 static/maps/galaxy.library.js.map                  |     1 +
 static/maps/galaxy.menu.js.map                     |     1 +
 static/maps/galaxy.pages.js.map                    |     1 +
 static/maps/i18n.js.map                            |     1 +
 static/maps/layout/generic-nav-view.js.map         |     1 +
 static/maps/layout/masthead.js.map                 |     1 +
 static/maps/layout/menu.js.map                     |     1 +
 static/maps/layout/modal.js.map                    |     1 +
 static/maps/layout/page.js.map                     |     1 +
 static/maps/layout/panel.js.map                    |     1 +
 static/maps/layout/scratchbook.js.map              |     1 +
 static/maps/libs/backbone.js.map                   |     1 +
 static/maps/libs/bbi/bigwig.js.map                 |     1 +
 static/maps/libs/bbi/jquery-ajax-native.js.map     |     1 +
 static/maps/libs/bbi/jszlib.js.map                 |     1 +
 static/maps/libs/bbi/spans.js.map                  |     1 +
 static/maps/libs/bibtex.js.map                     |     1 +
 static/maps/libs/bootstrap-tour.js.map             |     1 +
 static/maps/libs/bootstrap.js.map                  |     1 +
 static/maps/libs/common-libs.js.map                |     1 +
 static/maps/libs/d3.js.map                         |     1 +
 static/maps/libs/farbtastic.js.map                 |     1 +
 static/maps/libs/jquery.complexify.js.map          |     1 +
 static/maps/libs/jquery.sparklines.js.map          |     1 +
 static/maps/libs/jquery/jqtouch.js.map             |     1 +
 static/maps/libs/jquery/jquery-ui.js.map           |     1 +
 static/maps/libs/jquery/jquery.autocomplete.js.map |     1 +
 static/maps/libs/jquery/jquery.cookie.js.map       |     1 +
 static/maps/libs/jquery/jquery.dynatree.js.map     |     1 +
 static/maps/libs/jquery/jquery.event.drag.js.map   |     1 +
 static/maps/libs/jquery/jquery.event.drop.js.map   |     1 +
 static/maps/libs/jquery/jquery.event.hover.js.map  |     1 +
 static/maps/libs/jquery/jquery.form.js.map         |     1 +
 static/maps/libs/jquery/jquery.js.map              |     1 +
 static/maps/libs/jquery/jquery.migrate.js.map      |     1 +
 static/maps/libs/jquery/jquery.mousewheel.js.map   |     1 +
 static/maps/libs/jquery/jquery.rating.js.map       |     1 +
 static/maps/libs/jquery/jquery.wymeditor.js.map    |     1 +
 static/maps/libs/jquery/jstorage.js.map            |     1 +
 static/maps/libs/jquery/jstree.js.map              |     1 +
 static/maps/libs/jquery/select2.js.map             |     1 +
 static/maps/libs/raven.js.map                      |     1 +
 static/maps/libs/require.js.map                    |     1 +
 static/maps/libs/toastr.js.map                     |     1 +
 static/maps/libs/underscore.js.map                 |     1 +
 static/maps/mvc/annotation.js.map                  |     1 +
 static/maps/mvc/base-mvc.js.map                    |     1 +
 .../mvc/base/controlled-fetch-collection.js.map    |     1 +
 static/maps/mvc/citation/citation-model.js.map     |     1 +
 static/maps/mvc/citation/citation-view.js.map      |     1 +
 .../maps/mvc/collection/collection-li-edit.js.map  |     1 +
 static/maps/mvc/collection/collection-li.js.map    |     1 +
 static/maps/mvc/collection/collection-model.js.map |     1 +
 .../mvc/collection/collection-view-edit.js.map     |     1 +
 static/maps/mvc/collection/collection-view.js.map  |     1 +
 .../mvc/collection/list-collection-creator.js.map  |     1 +
 .../list-of-pairs-collection-creator.js.map        |     1 +
 .../mvc/collection/pair-collection-creator.js.map  |     1 +
 .../collection/paired-collection-creator.js.map    |     1 +
 static/maps/mvc/dataset/data.js.map                |     1 +
 static/maps/mvc/dataset/dataset-choice.js.map      |     1 +
 static/maps/mvc/dataset/dataset-li-edit.js.map     |     1 +
 static/maps/mvc/dataset/dataset-li.js.map          |     1 +
 static/maps/mvc/dataset/dataset-list.js.map        |     1 +
 static/maps/mvc/dataset/dataset-model.js.map       |     1 +
 static/maps/mvc/dataset/states.js.map              |     1 +
 static/maps/mvc/form/form-data.js.map              |     1 +
 static/maps/mvc/form/form-input.js.map             |     1 +
 static/maps/mvc/form/form-parameters.js.map        |     1 +
 static/maps/mvc/form/form-repeat.js.map            |     1 +
 static/maps/mvc/form/form-section.js.map           |     1 +
 static/maps/mvc/form/form-view.js.map              |     1 +
 static/maps/mvc/grid/grid-model.js.map             |     1 +
 static/maps/mvc/grid/grid-template.js.map          |     1 +
 static/maps/mvc/grid/grid-view.js.map              |     1 +
 static/maps/mvc/groups/group-detail-view.js.map    |     1 +
 .../maps/mvc/groups/group-groupdetail-view.js.map  |     1 +
 static/maps/mvc/groups/group-grouprow-view.js.map  |     1 +
 static/maps/mvc/groups/group-list-view.js.map      |     1 +
 static/maps/mvc/groups/group-listrow-view.js.map   |     1 +
 static/maps/mvc/groups/group-model.js.map          |     1 +
 static/maps/mvc/groups/group.model.js.map          |     1 +
 static/maps/mvc/history/copy-dialog.js.map         |     1 +
 static/maps/mvc/history/hda-li-edit.js.map         |     1 +
 static/maps/mvc/history/hda-li.js.map              |     1 +
 static/maps/mvc/history/hda-model.js.map           |     1 +
 static/maps/mvc/history/hdca-li-edit.js.map        |     1 +
 static/maps/mvc/history/hdca-li.js.map             |     1 +
 static/maps/mvc/history/hdca-model.js.map          |     1 +
 .../maps/mvc/history/history-content-model.js.map  |     1 +
 static/maps/mvc/history/history-contents.js.map    |     1 +
 static/maps/mvc/history/history-model.js.map       |     1 +
 static/maps/mvc/history/history-preferences.js.map |     1 +
 .../maps/mvc/history/history-structure-view.js.map |     1 +
 .../maps/mvc/history/history-view-annotated.js.map |     1 +
 .../mvc/history/history-view-edit-current.js.map   |     1 +
 static/maps/mvc/history/history-view-edit.js.map   |     1 +
 static/maps/mvc/history/history-view.js.map        |     1 +
 static/maps/mvc/history/job-dag.js.map             |     1 +
 static/maps/mvc/history/multi-panel.js.map         |     1 +
 static/maps/mvc/history/options-menu.js.map        |     1 +
 static/maps/mvc/job/job-li.js.map                  |     1 +
 static/maps/mvc/job/job-model.js.map               |     1 +
 .../maps/mvc/library/library-dataset-view.js.map   |     1 +
 static/maps/mvc/library/library-folder-view.js.map |     1 +
 .../mvc/library/library-folderlist-view.js.map     |     1 +
 .../maps/mvc/library/library-folderrow-view.js.map |     1 +
 .../mvc/library/library-foldertoolbar-view.js.map  |     1 +
 .../maps/mvc/library/library-library-view.js.map   |     1 +
 .../mvc/library/library-librarylist-view.js.map    |     1 +
 .../mvc/library/library-libraryrow-view.js.map     |     1 +
 .../mvc/library/library-librarytoolbar-view.js.map |     1 +
 static/maps/mvc/library/library-model.js.map       |     1 +
 static/maps/mvc/list/list-item.js.map              |     1 +
 static/maps/mvc/list/list-panel.js.map             |     1 +
 static/maps/mvc/list/list-view.js.map              |     1 +
 static/maps/mvc/tag.js.map                         |     1 +
 static/maps/mvc/tool/tool-form-base.js.map         |     1 +
 static/maps/mvc/tool/tool-form-composite.js.map    |     1 +
 static/maps/mvc/tool/tool-form-workflow.js.map     |     1 +
 static/maps/mvc/tool/tool-form.js.map              |     1 +
 static/maps/mvc/tool/tool-template.js.map          |     1 +
 static/maps/mvc/tool/tool-webhooks.js.map          |     1 +
 static/maps/mvc/tool/tools.js.map                  |     1 +
 static/maps/mvc/tours.js.map                       |     1 +
 static/maps/mvc/ui/error-modal.js.map              |     1 +
 static/maps/mvc/ui/icon-button.js.map              |     1 +
 static/maps/mvc/ui/popup-menu.js.map               |     1 +
 static/maps/mvc/ui/ui-buttons.js.map               |     1 +
 static/maps/mvc/ui/ui-color-picker.js.map          |     1 +
 static/maps/mvc/ui/ui-drilldown.js.map             |     1 +
 static/maps/mvc/ui/ui-frames.js.map                |     1 +
 static/maps/mvc/ui/ui-list.js.map                  |     1 +
 static/maps/mvc/ui/ui-misc.js.map                  |     1 +
 static/maps/mvc/ui/ui-modal.js.map                 |     1 +
 static/maps/mvc/ui/ui-options.js.map               |     1 +
 static/maps/mvc/ui/ui-popover.js.map               |     1 +
 static/maps/mvc/ui/ui-portlet.js.map               |     1 +
 static/maps/mvc/ui/ui-select-content.js.map        |     1 +
 static/maps/mvc/ui/ui-select-default.js.map        |     1 +
 static/maps/mvc/ui/ui-select-ftp.js.map            |     1 +
 static/maps/mvc/ui/ui-select-library.js.map        |     1 +
 static/maps/mvc/ui/ui-select.js.map                |     1 +
 static/maps/mvc/ui/ui-slider.js.map                |     1 +
 static/maps/mvc/ui/ui-table.js.map                 |     1 +
 static/maps/mvc/ui/ui-tabs.js.map                  |     1 +
 static/maps/mvc/ui/ui-thumbnails.js.map            |     1 +
 .../maps/mvc/upload/composite/composite-row.js.map |     1 +
 .../mvc/upload/composite/composite-view.js.map     |     1 +
 static/maps/mvc/upload/default/default-row.js.map  |     1 +
 static/maps/mvc/upload/default/default-view.js.map |     1 +
 static/maps/mvc/upload/upload-button.js.map        |     1 +
 static/maps/mvc/upload/upload-ftp.js.map           |     1 +
 static/maps/mvc/upload/upload-model.js.map         |     1 +
 static/maps/mvc/upload/upload-row.js.map           |     1 +
 static/maps/mvc/upload/upload-settings.js.map      |     1 +
 static/maps/mvc/upload/upload-view.js.map          |     1 +
 static/maps/mvc/user/change-password.js.map        |     1 +
 static/maps/mvc/user/extra-information.js.map      |     1 +
 .../maps/mvc/user/manage-user-information.js.map   |     1 +
 static/maps/mvc/user/user-model.js.map             |     1 +
 static/maps/mvc/user/user-quotameter.js.map        |     1 +
 .../mvc/visualization/visualization-model.js.map   |     1 +
 static/maps/mvc/webhooks.js.map                    |     1 +
 static/maps/mvc/workflow/workflow-canvas.js.map    |     1 +
 static/maps/mvc/workflow/workflow-connector.js.map |     1 +
 static/maps/mvc/workflow/workflow-manager.js.map   |     1 +
 static/maps/mvc/workflow/workflow-node.js.map      |     1 +
 static/maps/mvc/workflow/workflow-terminals.js.map |     1 +
 static/maps/mvc/workflow/workflow-view-data.js.map |     1 +
 static/maps/mvc/workflow/workflow-view-node.js.map |     1 +
 .../mvc/workflow/workflow-view-terminals.js.map    |     1 +
 static/maps/mvc/workflow/workflow-view.js.map      |     1 +
 static/maps/mvc/workflow/workflow.js.map           |     1 +
 static/maps/nls/ja/locale.js.map                   |     1 +
 static/maps/nls/locale.js.map                      |     1 +
 static/maps/nls/zh/locale.js.map                   |     1 +
 static/maps/onload.js.map                          |     1 +
 static/maps/polyfills.js.map                       |     1 +
 static/maps/reports_webapp/run_stats.js.map        |     1 +
 .../maps/templates/compiled/panel_section.js.map   |     1 +
 static/maps/templates/compiled/tool_form.js.map    |     1 +
 static/maps/templates/compiled/tool_link.js.map    |     1 +
 static/maps/templates/compiled/tool_search.js.map  |     1 +
 static/maps/toolshed.groups.js.map                 |     1 +
 static/maps/ui/autocom_tagging.js.map              |     1 +
 static/maps/ui/editable-text.js.map                |     1 +
 static/maps/ui/fa-icon-button.js.map               |     1 +
 static/maps/ui/filter-control.js.map               |     1 +
 static/maps/ui/hoverhighlight.js.map               |     1 +
 static/maps/ui/loading-indicator.js.map            |     1 +
 static/maps/ui/mode-button.js.map                  |     1 +
 static/maps/ui/pagination.js.map                   |     1 +
 static/maps/ui/peek-column-selector.js.map         |     1 +
 static/maps/ui/popupmenu.js.map                    |     1 +
 static/maps/ui/scroll-panel.js.map                 |     1 +
 static/maps/ui/search-input.js.map                 |     1 +
 static/maps/utils/add-logging.js.map               |     1 +
 static/maps/utils/ajax-queue.js.map                |     1 +
 static/maps/utils/async-save-text.js.map           |     1 +
 static/maps/utils/config.js.map                    |     1 +
 static/maps/utils/deferred.js.map                  |     1 +
 static/maps/utils/graph.js.map                     |     1 +
 static/maps/utils/levenshtein.js.map               |     1 +
 static/maps/utils/localization.js.map              |     1 +
 static/maps/utils/metrics-logger.js.map            |     1 +
 static/maps/utils/natural-sort.js.map              |     1 +
 static/maps/utils/query-string-parsing.js.map      |     1 +
 static/maps/utils/uploadbox.js.map                 |     1 +
 static/maps/utils/utils.js.map                     |     1 +
 static/maps/viz/bbi-data-manager.js.map            |     1 +
 static/maps/viz/circster.js.map                    |     1 +
 static/maps/viz/phyloviz.js.map                    |     1 +
 static/maps/viz/sweepster.js.map                   |     1 +
 static/maps/viz/trackster.js.map                   |     1 +
 static/maps/viz/trackster/filters.js.map           |     1 +
 static/maps/viz/trackster/painters.js.map          |     1 +
 static/maps/viz/trackster/slotting.js.map          |     1 +
 static/maps/viz/trackster/tracks.js.map            |     1 +
 static/maps/viz/trackster/util.js.map              |     1 +
 static/maps/viz/visualization.js.map               |     1 +
 static/maps/viz/viz_views.js.map                   |     1 +
 static/patmat/findcluster.png                      |   Bin 0 -> 10834 bytes
 static/robots.txt                                  |     3 +
 static/scripts/bundled/analysis.bundled.js         |    13 +
 static/scripts/bundled/analysis.bundled.js.map     |     1 +
 static/scripts/bundled/libs.bundled.js             |    44 +
 static/scripts/bundled/libs.bundled.js.map         |     1 +
 static/scripts/bundled/login.bundled.js            |     2 +
 static/scripts/bundled/login.bundled.js.map        |     1 +
 static/scripts/galaxy.interactive_environments.js  |     2 +
 static/scripts/galaxy.js                           |     2 +
 static/scripts/galaxy.library.js                   |     2 +
 static/scripts/galaxy.menu.js                      |     2 +
 static/scripts/galaxy.pages.js                     |     2 +
 static/scripts/i18n.js                             |     2 +
 static/scripts/layout/generic-nav-view.js          |     2 +
 static/scripts/layout/masthead.js                  |     2 +
 static/scripts/layout/menu.js                      |     2 +
 static/scripts/layout/modal.js                     |     2 +
 static/scripts/layout/page.js                      |     2 +
 static/scripts/layout/panel.js                     |     2 +
 static/scripts/layout/scratchbook.js               |     2 +
 static/scripts/libs/backbone.js                    |     2 +
 static/scripts/libs/bbi/bigwig.js                  |     2 +
 static/scripts/libs/bbi/jquery-ajax-native.js      |     2 +
 static/scripts/libs/bbi/jszlib.js                  |     2 +
 static/scripts/libs/bbi/spans.js                   |     2 +
 static/scripts/libs/bibtex.js                      |     3 +
 static/scripts/libs/bootstrap-tour.js              |     2 +
 static/scripts/libs/bootstrap.js                   |     2 +
 static/scripts/libs/common-libs.js                 |     2 +
 static/scripts/libs/d3.js                          |     7 +
 static/scripts/libs/farbtastic.js                  |     2 +
 static/scripts/libs/jquery.complexify.js           |     2 +
 static/scripts/libs/jquery.sparklines.js           |     3 +
 static/scripts/libs/jquery/jqtouch.js              |     2 +
 static/scripts/libs/jquery/jquery-ui.js            |     4 +
 static/scripts/libs/jquery/jquery.autocomplete.js  |     2 +
 static/scripts/libs/jquery/jquery.cookie.js        |     2 +
 static/scripts/libs/jquery/jquery.dynatree.js      |     3 +
 static/scripts/libs/jquery/jquery.event.drag.js    |     2 +
 static/scripts/libs/jquery/jquery.event.drop.js    |     2 +
 static/scripts/libs/jquery/jquery.event.hover.js   |     2 +
 static/scripts/libs/jquery/jquery.form.js          |     2 +
 static/scripts/libs/jquery/jquery.js               |     5 +
 static/scripts/libs/jquery/jquery.migrate.js       |     2 +
 static/scripts/libs/jquery/jquery.mousewheel.js    |     2 +
 static/scripts/libs/jquery/jquery.rating.js        |     2 +
 static/scripts/libs/jquery/jquery.wymeditor.js     |     4 +
 static/scripts/libs/jquery/jstorage.js             |     2 +
 static/scripts/libs/jquery/jstree.js               |     6 +
 static/scripts/libs/jquery/select2.js              |     4 +
 static/scripts/libs/raven.js                       |     2 +
 static/scripts/libs/require.js                     |     2 +
 static/scripts/libs/toastr.js                      |     2 +
 static/scripts/libs/underscore.js                  |     2 +
 static/scripts/mvc/annotation.js                   |     2 +
 static/scripts/mvc/base-mvc.js                     |     2 +
 .../mvc/base/controlled-fetch-collection.js        |     2 +
 static/scripts/mvc/citation/citation-model.js      |     2 +
 static/scripts/mvc/citation/citation-view.js       |     2 +
 .../scripts/mvc/collection/collection-li-edit.js   |     2 +
 static/scripts/mvc/collection/collection-li.js     |     2 +
 static/scripts/mvc/collection/collection-model.js  |     2 +
 .../scripts/mvc/collection/collection-view-edit.js |     2 +
 static/scripts/mvc/collection/collection-view.js   |     2 +
 .../mvc/collection/list-collection-creator.js      |     2 +
 .../collection/list-of-pairs-collection-creator.js |     2 +
 .../mvc/collection/pair-collection-creator.js      |     2 +
 static/scripts/mvc/dataset/data.js                 |     2 +
 static/scripts/mvc/dataset/dataset-choice.js       |     2 +
 static/scripts/mvc/dataset/dataset-li-edit.js      |     2 +
 static/scripts/mvc/dataset/dataset-li.js           |     2 +
 static/scripts/mvc/dataset/dataset-list.js         |     2 +
 static/scripts/mvc/dataset/dataset-model.js        |     2 +
 static/scripts/mvc/dataset/states.js               |     2 +
 static/scripts/mvc/form/form-data.js               |     2 +
 static/scripts/mvc/form/form-input.js              |     2 +
 static/scripts/mvc/form/form-parameters.js         |     2 +
 static/scripts/mvc/form/form-repeat.js             |     2 +
 static/scripts/mvc/form/form-section.js            |     2 +
 static/scripts/mvc/form/form-view.js               |     2 +
 static/scripts/mvc/grid/grid-model.js              |     2 +
 static/scripts/mvc/grid/grid-template.js           |     2 +
 static/scripts/mvc/grid/grid-view.js               |     2 +
 static/scripts/mvc/history/copy-dialog.js          |     2 +
 static/scripts/mvc/history/hda-li-edit.js          |     2 +
 static/scripts/mvc/history/hda-li.js               |     2 +
 static/scripts/mvc/history/hda-model.js            |     2 +
 static/scripts/mvc/history/hdca-li-edit.js         |     2 +
 static/scripts/mvc/history/hdca-li.js              |     2 +
 static/scripts/mvc/history/hdca-model.js           |     2 +
 .../scripts/mvc/history/history-content-model.js   |     2 +
 static/scripts/mvc/history/history-contents.js     |     2 +
 static/scripts/mvc/history/history-model.js        |     2 +
 static/scripts/mvc/history/history-preferences.js  |     2 +
 .../scripts/mvc/history/history-structure-view.js  |     2 +
 .../scripts/mvc/history/history-view-annotated.js  |     2 +
 .../mvc/history/history-view-edit-current.js       |     2 +
 static/scripts/mvc/history/history-view-edit.js    |     2 +
 static/scripts/mvc/history/history-view.js         |     2 +
 static/scripts/mvc/history/job-dag.js              |     2 +
 static/scripts/mvc/history/multi-panel.js          |     2 +
 static/scripts/mvc/history/options-menu.js         |     2 +
 static/scripts/mvc/job/job-li.js                   |     2 +
 static/scripts/mvc/job/job-model.js                |     2 +
 static/scripts/mvc/library/library-dataset-view.js |     2 +
 static/scripts/mvc/library/library-folder-view.js  |     2 +
 .../scripts/mvc/library/library-folderlist-view.js |     2 +
 .../scripts/mvc/library/library-folderrow-view.js  |     2 +
 .../mvc/library/library-foldertoolbar-view.js      |     3 +
 static/scripts/mvc/library/library-library-view.js |     2 +
 .../mvc/library/library-librarylist-view.js        |     2 +
 .../scripts/mvc/library/library-libraryrow-view.js |     2 +
 .../mvc/library/library-librarytoolbar-view.js     |     2 +
 static/scripts/mvc/library/library-model.js        |     2 +
 static/scripts/mvc/list/list-item.js               |     2 +
 static/scripts/mvc/list/list-view.js               |     2 +
 static/scripts/mvc/tag.js                          |     2 +
 static/scripts/mvc/tool/tool-form-base.js          |     2 +
 static/scripts/mvc/tool/tool-form-composite.js     |     2 +
 static/scripts/mvc/tool/tool-form-workflow.js      |     2 +
 static/scripts/mvc/tool/tool-form.js               |     2 +
 static/scripts/mvc/tool/tool-template.js           |     2 +
 static/scripts/mvc/tool/tool-webhooks.js           |     2 +
 static/scripts/mvc/tool/tools.js                   |     2 +
 static/scripts/mvc/tours.js                        |     2 +
 static/scripts/mvc/ui/error-modal.js               |     2 +
 static/scripts/mvc/ui/icon-button.js               |     2 +
 static/scripts/mvc/ui/popup-menu.js                |     2 +
 static/scripts/mvc/ui/ui-buttons.js                |     2 +
 static/scripts/mvc/ui/ui-color-picker.js           |     2 +
 static/scripts/mvc/ui/ui-drilldown.js              |     2 +
 static/scripts/mvc/ui/ui-frames.js                 |     2 +
 static/scripts/mvc/ui/ui-list.js                   |     2 +
 static/scripts/mvc/ui/ui-misc.js                   |     2 +
 static/scripts/mvc/ui/ui-modal.js                  |     2 +
 static/scripts/mvc/ui/ui-options.js                |     2 +
 static/scripts/mvc/ui/ui-popover.js                |     2 +
 static/scripts/mvc/ui/ui-portlet.js                |     2 +
 static/scripts/mvc/ui/ui-select-content.js         |     2 +
 static/scripts/mvc/ui/ui-select-default.js         |     2 +
 static/scripts/mvc/ui/ui-select-ftp.js             |     2 +
 static/scripts/mvc/ui/ui-select-library.js         |     2 +
 static/scripts/mvc/ui/ui-select.js                 |     2 +
 static/scripts/mvc/ui/ui-slider.js                 |     2 +
 static/scripts/mvc/ui/ui-table.js                  |     2 +
 static/scripts/mvc/ui/ui-tabs.js                   |     2 +
 static/scripts/mvc/ui/ui-thumbnails.js             |     2 +
 .../scripts/mvc/upload/composite/composite-row.js  |     2 +
 .../scripts/mvc/upload/composite/composite-view.js |     2 +
 static/scripts/mvc/upload/default/default-row.js   |     2 +
 static/scripts/mvc/upload/default/default-view.js  |     2 +
 static/scripts/mvc/upload/upload-button.js         |     2 +
 static/scripts/mvc/upload/upload-ftp.js            |     2 +
 static/scripts/mvc/upload/upload-model.js          |     2 +
 static/scripts/mvc/upload/upload-settings.js       |     2 +
 static/scripts/mvc/upload/upload-view.js           |     2 +
 static/scripts/mvc/user/change-password.js         |     2 +
 static/scripts/mvc/user/extra-information.js       |     2 +
 static/scripts/mvc/user/manage-user-information.js |     2 +
 static/scripts/mvc/user/user-model.js              |     2 +
 static/scripts/mvc/user/user-quotameter.js         |     2 +
 .../mvc/visualization/visualization-model.js       |     2 +
 static/scripts/mvc/webhooks.js                     |     2 +
 static/scripts/mvc/workflow/workflow-canvas.js     |     2 +
 static/scripts/mvc/workflow/workflow-connector.js  |     2 +
 static/scripts/mvc/workflow/workflow-manager.js    |     2 +
 static/scripts/mvc/workflow/workflow-node.js       |     2 +
 static/scripts/mvc/workflow/workflow-terminals.js  |     2 +
 static/scripts/mvc/workflow/workflow-view-data.js  |     2 +
 static/scripts/mvc/workflow/workflow-view-node.js  |     2 +
 .../mvc/workflow/workflow-view-terminals.js        |     2 +
 static/scripts/mvc/workflow/workflow-view.js       |     2 +
 static/scripts/nls/ja/locale.js                    |     2 +
 static/scripts/nls/locale.js                       |     2 +
 static/scripts/nls/zh/locale.js                    |     2 +
 static/scripts/onload.js                           |     2 +
 static/scripts/packed                              |     1 +
 static/scripts/polyfills.js                        |     2 +
 static/scripts/reports_webapp/run_stats.js         |     2 +
 static/scripts/ui/autocom_tagging.js               |     2 +
 static/scripts/ui/editable-text.js                 |     2 +
 static/scripts/ui/fa-icon-button.js                |     2 +
 static/scripts/ui/filter-control.js                |     2 +
 static/scripts/ui/hoverhighlight.js                |     2 +
 static/scripts/ui/loading-indicator.js             |     2 +
 static/scripts/ui/mode-button.js                   |     2 +
 static/scripts/ui/pagination.js                    |     2 +
 static/scripts/ui/peek-column-selector.js          |     2 +
 static/scripts/ui/popupmenu.js                     |     2 +
 static/scripts/ui/scroll-panel.js                  |     2 +
 static/scripts/ui/search-input.js                  |     2 +
 static/scripts/utils/add-logging.js                |     2 +
 static/scripts/utils/ajax-queue.js                 |     2 +
 static/scripts/utils/async-save-text.js            |     2 +
 static/scripts/utils/config.js                     |     2 +
 static/scripts/utils/deferred.js                   |     2 +
 static/scripts/utils/graph.js                      |     2 +
 static/scripts/utils/levenshtein.js                |     2 +
 static/scripts/utils/localization.js               |     2 +
 static/scripts/utils/metrics-logger.js             |     2 +
 static/scripts/utils/natural-sort.js               |     2 +
 static/scripts/utils/query-string-parsing.js       |     2 +
 static/scripts/utils/uploadbox.js                  |     2 +
 static/scripts/utils/utils.js                      |     2 +
 static/scripts/viz/bbi-data-manager.js             |     2 +
 static/scripts/viz/circster.js                     |     2 +
 static/scripts/viz/phyloviz.js                     |     2 +
 static/scripts/viz/sweepster.js                    |     2 +
 static/scripts/viz/trackster.js                    |     2 +
 static/scripts/viz/trackster/filters.js            |     2 +
 static/scripts/viz/trackster/painters.js           |     2 +
 static/scripts/viz/trackster/slotting.js           |     2 +
 static/scripts/viz/trackster/tracks.js             |     4 +
 static/scripts/viz/trackster/util.js               |     2 +
 static/scripts/viz/visualization.js                |     2 +
 static/scripts/viz/viz_views.js                    |     2 +
 static/src                                         |     1 +
 static/style/base.css                              |     1 +
 static/style/blue/autocomplete_tagging.css         |     1 +
 static/style/blue/base.css                         |     4 +
 static/style/blue/base_bg.png                      |   Bin 0 -> 98 bytes
 static/style/blue/bootstrap-tour.css               |    73 +
 static/style/blue/button_bar_bg_light.png          |   Bin 0 -> 108 bytes
 static/style/blue/circster.css                     |     1 +
 static/style/blue/data_empty.png                   |   Bin 0 -> 497 bytes
 static/style/blue/data_error.png                   |   Bin 0 -> 497 bytes
 static/style/blue/data_ok.png                      |   Bin 0 -> 503 bytes
 static/style/blue/data_queued.png                  |   Bin 0 -> 562 bytes
 static/style/blue/data_running.gif                 |   Bin 0 -> 2141 bytes
 static/style/blue/data_upload.gif                  |   Bin 0 -> 810 bytes
 static/style/blue/done_message_icon.png            |   Bin 0 -> 670 bytes
 static/style/blue/dynatree_skin/icons-rtl.gif      |   Bin 0 -> 4046 bytes
 static/style/blue/dynatree_skin/icons.gif          |   Bin 0 -> 4041 bytes
 static/style/blue/dynatree_skin/loading.gif        |   Bin 0 -> 570 bytes
 static/style/blue/dynatree_skin/ui.dynatree.css    |   442 +
 static/style/blue/dynatree_skin/vline-rtl.gif      |   Bin 0 -> 842 bytes
 static/style/blue/dynatree_skin/vline.gif          |   Bin 0 -> 844 bytes
 static/style/blue/embed_item.css                   |     1 +
 static/style/blue/error_bg.png                     |   Bin 0 -> 168 bytes
 static/style/blue/error_large.png                  |   Bin 0 -> 1052 bytes
 static/style/blue/error_message_icon.png           |   Bin 0 -> 706 bytes
 static/style/blue/error_small.png                  |   Bin 0 -> 497 bytes
 static/style/blue/footer_title_bg.png              |   Bin 0 -> 224 bytes
 static/style/blue/form_body_bg.png                 |   Bin 0 -> 92 bytes
 static/style/blue/form_title_bg.png                |   Bin 0 -> 207 bytes
 static/style/blue/fugue.png                        |   Bin 0 -> 26463 bytes
 static/style/blue/gray_bg.png                      |   Bin 0 -> 124 bytes
 static/style/blue/hgrad.png                        |   Bin 0 -> 80 bytes
 static/style/blue/hgrad_over.png                   |   Bin 0 -> 78 bytes
 static/style/blue/history-buttons.png              |   Bin 0 -> 2509 bytes
 static/style/blue/history-states.png               |   Bin 0 -> 1321 bytes
 static/style/blue/history.css                      |     0
 static/style/blue/info_large.png                   |   Bin 0 -> 624 bytes
 static/style/blue/info_message_icon.png            |   Bin 0 -> 453 bytes
 static/style/blue/info_small.png                   |   Bin 0 -> 350 bytes
 static/style/blue/iphone.css                       |     1 +
 .../images/ui-bg_flat_0_aaaaaa_40x100.png          |   Bin 0 -> 180 bytes
 .../images/ui-bg_flat_75_ffffff_40x100.png         |   Bin 0 -> 178 bytes
 .../images/ui-bg_glass_55_fbf9ee_1x400.png         |   Bin 0 -> 144 bytes
 .../images/ui-bg_glass_65_ffffff_1x400.png         |   Bin 0 -> 105 bytes
 .../images/ui-bg_glass_75_dadada_1x400.png         |   Bin 0 -> 111 bytes
 .../images/ui-bg_glass_75_e6e6e6_1x400.png         |   Bin 0 -> 110 bytes
 .../images/ui-bg_glass_95_fef1ec_1x400.png         |   Bin 0 -> 119 bytes
 .../ui-bg_highlight-soft_75_cccccc_1x100.png       |   Bin 0 -> 101 bytes
 .../smoothness/images/ui-icons_222222_256x240.png  |   Bin 0 -> 4369 bytes
 .../smoothness/images/ui-icons_2e83ff_256x240.png  |   Bin 0 -> 5355 bytes
 .../smoothness/images/ui-icons_454545_256x240.png  |   Bin 0 -> 4369 bytes
 .../smoothness/images/ui-icons_888888_256x240.png  |   Bin 0 -> 4369 bytes
 .../smoothness/images/ui-icons_cd0a0a_256x240.png  |   Bin 0 -> 4369 bytes
 .../style/blue/jquery-ui/smoothness/jquery-ui.css  |    10 +
 static/style/blue/jquery.rating.css                |    12 +
 static/style/blue/largespinner.gif                 |   Bin 0 -> 3208 bytes
 static/style/blue/layout_callout_top.png           |   Bin 0 -> 268 bytes
 static/style/blue/library.css                      |     1 +
 static/style/blue/masthead.css                     |     1 +
 static/style/blue/masthead_bg.png                  |   Bin 0 -> 84 bytes
 static/style/blue/menu_bg.png                      |   Bin 0 -> 624 bytes
 static/style/blue/ok_bg.png                        |   Bin 0 -> 168 bytes
 static/style/blue/ok_large.png                     |   Bin 0 -> 977 bytes
 static/style/blue/ok_small.png                     |   Bin 0 -> 503 bytes
 static/style/blue/panel_header_bg.png              |   Bin 0 -> 120 bytes
 static/style/blue/popupmenu_callout_top.png        |   Bin 0 -> 268 bytes
 static/style/blue/question-balloon.png             |   Bin 0 -> 722 bytes
 static/style/blue/question-octagon-frame.png       |   Bin 0 -> 777 bytes
 static/style/blue/reports.css                      |     1 +
 static/style/blue/sprite-fugue.png                 |   Bin 0 -> 31536 bytes
 static/style/blue/sprite-history-buttons.png       |   Bin 0 -> 3046 bytes
 static/style/blue/sprite-history-states.png        |   Bin 0 -> 1643 bytes
 static/style/blue/tiny_arrow_left.png              |   Bin 0 -> 101 bytes
 static/style/blue/tiny_arrow_right.png             |   Bin 0 -> 103 bytes
 static/style/blue/trackster.css                    |     1 +
 static/style/blue/wait_large.png                   |   Bin 0 -> 1596 bytes
 static/style/blue/wait_small.png                   |   Bin 0 -> 562 bytes
 static/style/blue/warn_bg.png                      |   Bin 0 -> 169 bytes
 static/style/blue/warn_large.png                   |   Bin 0 -> 682 bytes
 static/style/blue/warn_message_icon.png            |   Bin 0 -> 695 bytes
 static/style/blue/warn_small.png                   |   Bin 0 -> 416 bytes
 static/style/blue/workflow_circle_drag.png         |   Bin 0 -> 219 bytes
 static/style/blue/workflow_circle_green.png        |   Bin 0 -> 274 bytes
 static/style/blue/workflow_circle_open.png         |   Bin 0 -> 266 bytes
 static/style/shared_images/data_running.gif        |   Bin 0 -> 2141 bytes
 static/style/shared_images/error_large.png         |   Bin 0 -> 1052 bytes
 static/style/shared_images/error_small.png         |   Bin 0 -> 497 bytes
 static/style/shared_images/info_large.png          |   Bin 0 -> 624 bytes
 static/style/shared_images/info_small.png          |   Bin 0 -> 350 bytes
 static/style/shared_images/ok_large.png            |   Bin 0 -> 977 bytes
 static/style/shared_images/ok_small.png            |   Bin 0 -> 503 bytes
 static/style/shared_images/wait_large.png          |   Bin 0 -> 1596 bytes
 static/style/shared_images/wait_small.png          |   Bin 0 -> 562 bytes
 static/style/shared_images/warn_large.png          |   Bin 0 -> 682 bytes
 static/style/shared_images/warn_small.png          |   Bin 0 -> 416 bytes
 .../maps/mvc/groups/group-detail-view.js.map       |     1 +
 .../maps/mvc/groups/group-list-view.js.map         |     1 +
 .../maps/mvc/groups/group-listrow-view.js.map      |     1 +
 static/toolshed/maps/mvc/groups/group-model.js.map |     1 +
 static/toolshed/maps/test-file.js.map              |     1 +
 static/toolshed/maps/toolshed.groups.js.map        |     1 +
 .../scripts/mvc/groups/group-detail-view.js        |     2 +
 .../toolshed/scripts/mvc/groups/group-list-view.js |     2 +
 .../scripts/mvc/groups/group-listrow-view.js       |     2 +
 static/toolshed/scripts/mvc/groups/group-model.js  |     2 +
 static/toolshed/scripts/toolshed.groups.js         |     2 +
 static/toolshed/src                                |     1 +
 static/user_disabled.html                          |    28 +
 static/welcome.html.sample                         |    42 +
 static/wymeditor/iframe/default/lbl-blockquote.png |   Bin 0 -> 196 bytes
 static/wymeditor/iframe/default/lbl-h1.png         |   Bin 0 -> 166 bytes
 static/wymeditor/iframe/default/lbl-h2.png         |   Bin 0 -> 172 bytes
 static/wymeditor/iframe/default/lbl-h3.png         |   Bin 0 -> 170 bytes
 static/wymeditor/iframe/default/lbl-h4.png         |   Bin 0 -> 172 bytes
 static/wymeditor/iframe/default/lbl-h5.png         |   Bin 0 -> 172 bytes
 static/wymeditor/iframe/default/lbl-h6.png         |   Bin 0 -> 171 bytes
 static/wymeditor/iframe/default/lbl-p.png          |   Bin 0 -> 3607 bytes
 static/wymeditor/iframe/default/lbl-pre.png        |   Bin 0 -> 177 bytes
 static/wymeditor/iframe/default/wymiframe.css      |    90 +
 static/wymeditor/iframe/default/wymiframe.html     |    26 +
 static/wymeditor/iframe/galaxy/lbl-blockquote.png  |   Bin 0 -> 196 bytes
 static/wymeditor/iframe/galaxy/lbl-h1.png          |   Bin 0 -> 166 bytes
 static/wymeditor/iframe/galaxy/lbl-h2.png          |   Bin 0 -> 172 bytes
 static/wymeditor/iframe/galaxy/lbl-h3.png          |   Bin 0 -> 170 bytes
 static/wymeditor/iframe/galaxy/lbl-h4.png          |   Bin 0 -> 172 bytes
 static/wymeditor/iframe/galaxy/lbl-h5.png          |   Bin 0 -> 172 bytes
 static/wymeditor/iframe/galaxy/lbl-h6.png          |   Bin 0 -> 171 bytes
 static/wymeditor/iframe/galaxy/lbl-p.png           |   Bin 0 -> 3607 bytes
 static/wymeditor/iframe/galaxy/lbl-pre.png         |   Bin 0 -> 177 bytes
 static/wymeditor/iframe/galaxy/wymiframe.css       |    95 +
 static/wymeditor/iframe/galaxy/wymiframe.html      |    27 +
 static/wymeditor/lang/bg.js                        |    45 +
 static/wymeditor/lang/ca.js                        |    45 +
 static/wymeditor/lang/cs.js                        |    45 +
 static/wymeditor/lang/de.js                        |    45 +
 static/wymeditor/lang/en.js                        |    52 +
 static/wymeditor/lang/es.js                        |    45 +
 static/wymeditor/lang/fa.js                        |    46 +
 static/wymeditor/lang/fi.js                        |    44 +
 static/wymeditor/lang/fr.js                        |    45 +
 static/wymeditor/lang/he.js                        |    45 +
 static/wymeditor/lang/hr.js                        |    45 +
 static/wymeditor/lang/hu.js                        |    45 +
 static/wymeditor/lang/it.js                        |    45 +
 static/wymeditor/lang/nb.js                        |    45 +
 static/wymeditor/lang/nl.js                        |    45 +
 static/wymeditor/lang/nn.js                        |    45 +
 static/wymeditor/lang/pl.js                        |    45 +
 static/wymeditor/lang/pt-br.js                     |    45 +
 static/wymeditor/lang/pt.js                        |    45 +
 static/wymeditor/lang/ru.js                        |    45 +
 static/wymeditor/lang/sv.js                        |    45 +
 static/wymeditor/lang/tr.js                        |    45 +
 static/wymeditor/lang/zh_cn.js                     |    47 +
 static/wymeditor/skins/default/icons.png           |   Bin 0 -> 3651 bytes
 static/wymeditor/skins/default/skin.css            |   133 +
 static/wymeditor/skins/default/skin.js             |    40 +
 static/wymeditor/skins/galaxy/icons.png            |   Bin 0 -> 3041 bytes
 static/wymeditor/skins/galaxy/skin.css             |   137 +
 static/wymeditor/skins/galaxy/skin.js              |    35 +
 templates/admin/dataset_security/group/grid.mako   |     1 +
 templates/admin/dataset_security/group/group.mako  |    83 +
 .../admin/dataset_security/group/group_create.mako |   100 +
 .../admin/dataset_security/group/group_rename.mako |    36 +
 templates/admin/dataset_security/role/grid.mako    |     1 +
 templates/admin/dataset_security/role/role.mako    |   117 +
 .../admin/dataset_security/role/role_create.mako   |   104 +
 .../admin/dataset_security/role/role_rename.mako   |    43 +
 templates/admin/external_service/common.mako       |    47 +
 .../external_service/create_external_service.mako  |    28 +
 .../external_service/edit_external_service.mako    |    40 +
 templates/admin/external_service/grid.mako         |     1 +
 .../reload_external_service_types.mako             |    22 +
 .../external_service/view_external_service.mako    |    23 +
 templates/admin/forms/create_form.mako             |    21 +
 templates/admin/forms/edit_form_definition.mako    |   153 +
 templates/admin/forms/grid.mako                    |     1 +
 templates/admin/forms/view_form_definition.mako    |    85 +
 templates/admin/impersonate.mako                   |    58 +
 templates/admin/jobs.mako                          |   209 +
 templates/admin/library/grid.mako                  |     1 +
 templates/admin/library/new_library.mako           |    44 +
 templates/admin/package_tool.mako                  |    56 +
 templates/admin/quota/grid.mako                    |     1 +
 templates/admin/quota/quota.mako                   |    83 +
 templates/admin/quota/quota_create.mako            |   125 +
 templates/admin/quota/quota_edit.mako              |    46 +
 templates/admin/quota/quota_rename.mako            |    52 +
 templates/admin/quota/quota_set_default.mako       |    44 +
 templates/admin/reload_tool.mako                   |    79 +
 templates/admin/request_type/common.mako           |    33 +
 .../admin/request_type/create_request_type.mako    |    65 +
 .../admin/request_type/edit_request_type.mako      |   100 +
 templates/admin/request_type/grid.mako             |     1 +
 .../request_type/request_type_permissions.mako     |    95 +
 .../admin/request_type/view_request_type.mako      |    83 +
 templates/admin/requests/grid.mako                 |     1 +
 templates/admin/requests/reject.mako               |    36 +
 templates/admin/requests/rename_datasets.mako      |    59 +
 templates/admin/requests/sample_datasets_grid.mako |     9 +
 templates/admin/requests/view_sample_dataset.mako  |    79 +
 templates/admin/review_tool_migration_stages.mako  |   123 +
 templates/admin/tool_errors.mako                   |    27 +
 .../tool_shed_repository/browse_category.mako      |    80 +
 .../tool_shed_repository/browse_repository.mako    |    36 +
 .../browse_tool_dependency.mako                    |    66 +
 .../tool_shed_repository/browse_toolsheds.mako     |   918 ++
 templates/admin/tool_shed_repository/common.mako   |   394 +
 .../deactivate_or_uninstall_repository.mako        |   207 +
 templates/admin/tool_shed_repository/grid.mako     |     1 +
 .../initiate_repository_installation.mako          |    71 +
 .../install_tool_dependencies_with_update.mako     |    93 +
 .../tool_shed_repository/manage_repository.mako    |    82 +
 .../manage_repository_tool_dependencies.mako       |   107 +
 .../purge_repository_confirmation.mako             |    74 +
 .../tool_shed_repository/repair_repository.mako    |    80 +
 .../repository_actions_menu.mako                   |    47 +
 .../repository_installation_grid.mako              |     8 +
 .../repository_installation_status.mako            |    30 +
 .../reselect_tool_panel_section.mako               |   101 +
 .../reset_metadata_on_selected_repositories.mako   |    39 +
 .../select_shed_tool_panel_config.mako             |   128 +
 .../select_tool_panel_section.mako                 |   182 +
 .../tool_dependencies_grid.mako                    |     8 +
 .../tool_dependency_installation_status.mako       |    19 +
 .../uninstall_tool_dependencies.mako               |    63 +
 .../tool_shed_repository/view_tool_metadata.mako   |   211 +
 .../admin/tool_shed_repository/view_workflow.mako  |    37 +
 templates/admin/tool_version/grid.mako             |     1 +
 templates/admin/user/grid.mako                     |     1 +
 templates/admin/user/reset_password.mako           |    37 +
 templates/admin/user/user.mako                     |    83 +
 templates/admin/view_data_tables_registry.mako     |    50 +
 templates/admin/view_datatypes_registry.mako       |    57 +
 templates/base.mako                                |   109 +
 templates/base/base_panels.mako                    |   246 +
 templates/common/select_template.mako              |    76 +
 templates/common/template_common.mako              |   200 +
 templates/display_base.mako                        |   295 +
 templates/display_common.mako                      |   154 +
 templates/embed_base.mako                          |    63 +
 templates/export_base.mako                         |   122 +
 templates/form.mako                                |   112 +
 templates/galaxy_client_app.mako                   |   115 +
 templates/grid_base.mako                           |   264 +
 templates/grid_base_async.mako                     |     4 +
 templates/ind_share_base.mako                      |   195 +
 templates/js-app.mako                              |   114 +
 templates/message.mako                             |    62 +
 templates/no_access.mako                           |    15 +
 templates/page_base.mako                           |   199 +
 templates/panels.mako                              |    18 +
 templates/refresh_frames.mako                      |    61 +
 templates/rss.mako                                 |    23 +
 templates/search/index.mako                        |   133 +
 templates/sharing_base.mako                        |   243 +
 templates/show_params.mako                         |   251 +
 templates/slug_editing_js.mako                     |    36 +
 templates/sorting_base.mako                        |    27 +
 templates/spark_base.mako                          |    68 +
 templates/tagging_common.mako                      |   307 +
 templates/tool_shed_rating.mako                    |    33 +
 templates/user/change_password.mako                |    60 +
 templates/user/communication_settings.mako         |    19 +
 templates/user/dbkeys.mako                         |   249 +
 templates/user/edit_address.mako                   |    96 +
 templates/user/index.mako                          |    47 +
 templates/user/info.mako                           |   130 +
 templates/user/login.mako                          |   124 +
 templates/user/logout.mako                         |    53 +
 templates/user/new_address.mako                    |    97 +
 templates/user/openid_associate.mako               |    73 +
 templates/user/openid_manage.mako                  |    11 +
 templates/user/permissions.mako                    |    19 +
 templates/user/register.mako                       |   215 +
 templates/user/reset_password.mako                 |    20 +
 templates/user/toolbox_filters.mako                |    91 +
 templates/user/username.mako                       |    27 +
 templates/webapps/galaxy/admin/center.mako         |   110 +
 templates/webapps/galaxy/admin/index.mako          |   132 +
 .../webapps/galaxy/admin/sanitize_whitelist.mako   |    59 +
 templates/webapps/galaxy/admin/tool_sheds.mako     |    41 +
 templates/webapps/galaxy/admin/toolsheds.mako      |    41 +
 .../galaxy/admin/view_display_applications.mako    |    48 +
 templates/webapps/galaxy/base_panels.mako          |    21 +
 .../webapps/galaxy/biostar/post_redirect.mako      |    26 +
 templates/webapps/galaxy/data_manager/index.mako   |    70 +
 .../galaxy/data_manager/manage_data_manager.mako   |    56 +
 .../galaxy/data_manager/manage_data_table.mako     |    45 +
 .../webapps/galaxy/data_manager/view_job.mako      |    63 +
 templates/webapps/galaxy/dataset/copy_view.mako    |   159 +
 templates/webapps/galaxy/dataset/display.mako      |   154 +
 .../dataset/display_application/display.mako       |    27 +
 .../webapps/galaxy/dataset/edit_attributes.mako    |   206 +
 templates/webapps/galaxy/dataset/embed.mako        |    22 +
 templates/webapps/galaxy/dataset/errors.mako       |   114 +
 templates/webapps/galaxy/dataset/grid.mako         |     1 +
 templates/webapps/galaxy/dataset/item_content.mako |     3 +
 templates/webapps/galaxy/dataset/large_file.mako   |    14 +
 .../webapps/galaxy/dataset/security_common.mako    |   132 +
 .../webapps/galaxy/dataset/tabular_chunked.mako    |    25 +
 .../external_services/generic_jquery_grid.mako     |    59 +
 .../galaxy/external_services/generic_json.mako     |    11 +
 .../galaxy/external_services/json_common.mako      |    28 +
 templates/webapps/galaxy/galaxy.masthead.mako      |    81 +
 templates/webapps/galaxy/galaxy.panels.mako        |   237 +
 templates/webapps/galaxy/history/as_xml.mako       |    16 +
 templates/webapps/galaxy/history/citations.mako    |    35 +
 templates/webapps/galaxy/history/display.mako      |   104 +
 .../webapps/galaxy/history/display_structured.mako |   317 +
 templates/webapps/galaxy/history/embed.mako        |   138 +
 templates/webapps/galaxy/history/grid.mako         |    40 +
 templates/webapps/galaxy/history/grid_js.mako      |    65 +
 templates/webapps/galaxy/history/item_content.mako |     3 +
 templates/webapps/galaxy/history/list_as_xml.mako  |     7 +
 .../webapps/galaxy/history/list_published.mako     |    32 +
 templates/webapps/galaxy/history/permissions.mako  |     9 +
 templates/webapps/galaxy/history/rename.mako       |    38 +
 templates/webapps/galaxy/history/share.mako        |   282 +
 templates/webapps/galaxy/history/shared_grid.mako  |    99 +
 templates/webapps/galaxy/history/structure.mako    |    93 +
 templates/webapps/galaxy/history/view.mako         |   212 +
 .../webapps/galaxy/history/view_multiple.mako      |    59 +
 .../galaxy/library/common/browse_library.mako      |   615 +
 .../galaxy/library/common/browse_library_opt.mako  |   621 +
 .../webapps/galaxy/library/common/common.mako      |   491 +
 .../webapps/galaxy/library/common/folder_info.mako |    61 +
 .../galaxy/library/common/folder_permissions.mako  |    19 +
 .../common/import_datasets_to_histories.mako       |   104 +
 .../galaxy/library/common/ldda_edit_info.mako      |   181 +
 .../webapps/galaxy/library/common/ldda_info.mako   |   311 +
 .../galaxy/library/common/ldda_permissions.mako    |    70 +
 .../library/common/library_dataset_info.mako       |    70 +
 .../common/library_dataset_permissions.mako        |    25 +
 .../common/library_dataset_search_results.mako     |   137 +
 .../galaxy/library/common/library_info.mako        |   116 +
 .../galaxy/library/common/library_item_info.mako   |    15 +
 .../galaxy/library/common/library_permissions.mako |    18 +
 .../galaxy/library/common/move_library_item.mako   |   106 +
 .../webapps/galaxy/library/common/new_folder.mako  |    38 +
 .../webapps/galaxy/library/common/upload.mako      |    36 +
 templates/webapps/galaxy/library/grid.mako         |     1 +
 templates/webapps/galaxy/library/index.mako        |    15 +
 .../webapps/galaxy/mobile/dataset/detail.mako      |    29 +
 templates/webapps/galaxy/mobile/dataset/peek.mako  |    12 +
 templates/webapps/galaxy/mobile/form.mako          |    46 +
 .../webapps/galaxy/mobile/history/detail.mako      |    80 +
 templates/webapps/galaxy/mobile/history/list.mako  |    55 +
 templates/webapps/galaxy/mobile/index.mako         |    52 +
 .../webapps/galaxy/mobile/manage_library.mako      |    73 +
 templates/webapps/galaxy/mobile/settings.mako      |    34 +
 templates/webapps/galaxy/page/create.mako          |    14 +
 templates/webapps/galaxy/page/display.mako         |   158 +
 templates/webapps/galaxy/page/editor.mako          |    64 +
 templates/webapps/galaxy/page/index.mako           |    53 +
 templates/webapps/galaxy/page/list_published.mako  |    33 +
 .../webapps/galaxy/page/select_items_grid.mako     |     3 +
 templates/webapps/galaxy/page/wymiframe.mako       |    27 +
 .../galaxy/requests/common/add_samples.mako        |   128 +
 .../webapps/galaxy/requests/common/common.mako     |   727 +
 .../galaxy/requests/common/create_request.mako     |    60 +
 .../requests/common/edit_basic_request_info.mako   |   102 +
 .../galaxy/requests/common/edit_samples.mako       |   145 +
 .../galaxy/requests/common/find_samples.mako       |   100 +
 .../webapps/galaxy/requests/common/index.mako      |    16 +
 .../common/sample_dataset_transfer_status.mako     |     5 +
 .../galaxy/requests/common/sample_datasets.mako    |     5 +
 .../galaxy/requests/common/sample_state.mako       |     5 +
 .../galaxy/requests/common/view_request.mako       |   174 +
 .../requests/common/view_request_history.mako      |    59 +
 .../galaxy/requests/common/view_sample.mako        |   122 +
 .../requests/common/view_sample_datasets.mako      |    45 +
 .../requests/common/view_sample_history.mako       |    38 +
 .../galaxy/requests/find_samples_index.mako        |    14 +
 templates/webapps/galaxy/requests/grid.mako        |     1 +
 templates/webapps/galaxy/requests/index.mako       |    14 +
 templates/webapps/galaxy/root/redirect.mako        |     5 +
 templates/webapps/galaxy/root/tool_runner.mako     |    42 +
 templates/webapps/galaxy/tracks/add_to_viz.mako    |     3 +
 templates/webapps/galaxy/tracks/add_tracks.mako    |     3 +
 .../tracks/history_datasets_select_grid.mako       |     5 +
 .../webapps/galaxy/tracks/history_select_grid.mako |    84 +
 .../tracks/library_datasets_select_grid.mako       |    13 +
 templates/webapps/galaxy/user/api_keys.mako        |    45 +
 templates/webapps/galaxy/user/list_users.mako      |    38 +
 templates/webapps/galaxy/user/manage_info.mako     |    95 +
 templates/webapps/galaxy/visualization/create.mako |    14 +
 .../webapps/galaxy/visualization/display.mako      |   109 +
 .../galaxy/visualization/display_in_frame.mako     |    58 +
 templates/webapps/galaxy/visualization/embed.mako  |     4 +
 .../galaxy/visualization/embed_in_frame.mako       |    80 +
 templates/webapps/galaxy/visualization/gie.mako    |   157 +
 .../webapps/galaxy/visualization/item_content.mako |     3 +
 templates/webapps/galaxy/visualization/list.mako   |    67 +
 .../galaxy/visualization/list_published.mako       |    35 +
 .../webapps/galaxy/visualization/phyloviz.mako     |   318 +
 .../webapps/galaxy/visualization/sweepster.mako    |   152 +
 .../workflow/build_from_current_history.mako       |   163 +
 .../webapps/galaxy/workflow/configure_menu.mako    |    99 +
 templates/webapps/galaxy/workflow/display.mako     |   132 +
 templates/webapps/galaxy/workflow/editor.mako      |   442 +
 .../galaxy/workflow/editor_generic_form.mako       |    71 +
 .../webapps/galaxy/workflow/editor_tool_form.mako  |    15 +
 templates/webapps/galaxy/workflow/embed.mako       |    24 +
 templates/webapps/galaxy/workflow/import.mako      |    69 +
 .../webapps/galaxy/workflow/item_content.mako      |     3 +
 templates/webapps/galaxy/workflow/list.mako        |   122 +
 .../webapps/galaxy/workflow/list_for_run.mako      |    66 +
 .../webapps/galaxy/workflow/list_published.mako    |    32 +
 .../webapps/galaxy/workflow/myexp_export.mako      |    21 +
 templates/webapps/galaxy/workflow/run.mako         |    10 +
 templates/webapps/galaxy/workflow/sharing.mako     |   320 +
 templates/webapps/reports/base_panels.mako         |    44 +
 templates/webapps/reports/dataset_info.mako        |   125 +
 templates/webapps/reports/grid.mako                |     1 +
 .../reports/history_and_dataset_per_user.mako      |    76 +
 .../webapps/reports/history_and_dataset_type.mako  |    75 +
 templates/webapps/reports/history_per_user.mako    |    68 +
 templates/webapps/reports/index.mako               |   139 +
 templates/webapps/reports/job_info.mako            |    95 +
 .../webapps/reports/jobs_errors_per_tool.mako      |   115 +
 templates/webapps/reports/jobs_per_month_all.mako  |    97 +
 .../webapps/reports/jobs_per_month_in_error.mako   |    93 +
 templates/webapps/reports/jobs_per_tool.mako       |   103 +
 templates/webapps/reports/jobs_per_user.mako       |    97 +
 .../webapps/reports/jobs_specified_month_all.mako  |   106 +
 .../reports/jobs_specified_month_in_error.mako     |   104 +
 templates/webapps/reports/jobs_tool_per_month.mako |    66 +
 templates/webapps/reports/jobs_user_per_month.mako |    70 +
 templates/webapps/reports/registered_users.mako    |    34 +
 .../reports/registered_users_per_month.mako        |    51 +
 .../reports/registered_users_specified_date.mako   |    42 +
 .../reports/registered_users_specified_month.mako  |    50 +
 .../webapps/reports/requests_per_month_all.mako    |    38 +
 templates/webapps/reports/requests_per_user.mako   |    42 +
 .../webapps/reports/requests_user_per_month.mako   |    59 +
 templates/webapps/reports/run_stats.mako           |    52 +
 templates/webapps/reports/system.mako              |   118 +
 templates/webapps/reports/tool_error_messages.mako |    87 +
 templates/webapps/reports/tool_execution_time.mako |    79 +
 .../reports/tool_execution_time_per_month.mako     |    77 +
 templates/webapps/reports/tools_and_job_state.mako |    84 +
 .../reports/tools_and_job_state_per_month.mako     |    46 +
 .../webapps/reports/users_last_access_date.mako    |    78 +
 .../webapps/reports/users_user_disk_usage.mako     |    62 +
 .../webapps/reports/workflows_per_month_all.mako   |    88 +
 templates/webapps/reports/workflows_per_user.mako  |   103 +
 .../webapps/reports/workflows_per_workflow.mako    |    97 +
 .../webapps/reports/workflows_user_per_month.mako  |    67 +
 templates/webapps/tool_shed/admin/center.mako      |    50 +
 templates/webapps/tool_shed/admin/index.mako       |   118 +
 templates/webapps/tool_shed/admin/statistics.mako  |    64 +
 templates/webapps/tool_shed/base_panels.mako       |   178 +
 .../tool_shed/category/create_category.mako        |    34 +
 .../webapps/tool_shed/category/edit_category.mako  |    43 +
 templates/webapps/tool_shed/category/grid.mako     |    13 +
 .../webapps/tool_shed/category/valid_grid.mako     |    13 +
 templates/webapps/tool_shed/common/common.mako     |   135 +
 .../webapps/tool_shed/common/grid_common.mako      |   187 +
 .../tool_shed/common/repository_actions_menu.mako  |   210 +
 .../reset_metadata_on_selected_repositories.mako   |    93 +
 templates/webapps/tool_shed/group/index.mako       |    55 +
 templates/webapps/tool_shed/index.mako             |   241 +
 .../tool_shed/repository/browse_repository.mako    |   100 +
 templates/webapps/tool_shed/repository/common.mako |  1265 ++
 .../tool_shed/repository/contact_owner.mako        |    38 +
 .../tool_shed/repository/create_repository.mako    |    83 +
 .../repository/docker_image_repositories.mako      |    64 +
 .../tool_shed/repository/export_repository.mako    |    47 +
 .../webapps/tool_shed/repository/find_tools.mako   |    61 +
 .../tool_shed/repository/find_workflows.mako       |    50 +
 templates/webapps/tool_shed/repository/grid.mako   |     1 +
 .../tool_shed/repository/import_capsule.mako       |   146 +
 .../repository/import_capsule_results.mako         |    82 +
 .../tool_shed/repository/manage_repository.mako    |   411 +
 .../repository/preview_tools_in_changeset.mako     |    60 +
 .../tool_shed/repository/rate_repository.mako      |   154 +
 .../webapps/tool_shed/repository/tool_form.mako    |   154 +
 templates/webapps/tool_shed/repository/upload.mako |   161 +
 .../tool_shed/repository/upload_capsule.mako       |    60 +
 .../tool_shed/repository/view_changelog.mako       |   117 +
 .../tool_shed/repository/view_changeset.mako       |   172 +
 .../tool_shed/repository/view_repository.mako      |   277 +
 .../tool_shed/repository/view_tool_metadata.mako   |   285 +
 .../tool_shed/repository/view_workflow.mako        |    43 +
 .../tool_shed/repository_review/browse_review.mako |   119 +
 .../repository_review/create_component.mako        |    34 +
 .../repository_review/edit_component.mako          |    37 +
 .../tool_shed/repository_review/edit_review.mako   |   163 +
 .../webapps/tool_shed/repository_review/grid.mako  |     1 +
 .../reviews_of_changeset_revision.mako             |   113 +
 .../repository_review/reviews_of_repository.mako   |    88 +
 .../repository_review/select_previous_review.mako  |    90 +
 templates/webapps/tool_shed/role/role.mako         |   136 +
 .../tool_shed/user/manage_email_alerts.mako        |    54 +
 templates/webapps/tool_shed/user/manage_info.mako  |     9 +
 test-data/1.RData                                  |   Bin 0 -> 59 bytes
 test-data/1.axt                                    |    85 +
 test-data/1.bam                                    |   Bin 0 -> 3592 bytes
 test-data/1.bed                                    |    65 +
 test-data/1.bed.spaces                             |    65 +
 test-data/1.bedgraph                               |    30 +
 test-data/1.bigbed                                 |   Bin 0 -> 154328 bytes
 test-data/1.bigwig                                 |   Bin 0 -> 21218 bytes
 test-data/1.customtrack                            |     3 +
 test-data/1.fasta                                  |     2 +
 test-data/1.fastq                                  |     8 +
 test-data/1.fastqsanger                            |     8 +
 test-data/1.fastqsolexa                            |     8 +
 test-data/1.interval                               |     5 +
 test-data/1.lav                                    |   178 +
 test-data/1.pileup                                 |  1000 ++
 test-data/1.sam                                    |    29 +
 test-data/1.scf                                    |   Bin 0 -> 139484 bytes
 test-data/1.sff                                    |   Bin 0 -> 100 bytes
 test-data/1.tabular                                |     6 +
 test-data/1.txt                                    |    10 +
 test-data/1.wig                                    |     3 +
 test-data/2.bed                                    |    68 +
 test-data/2.fasta                                  |    11 +
 test-data/2.tabular                                |    10 +
 test-data/2gen.fastq                               |     8 +
 test-data/3.bam                                    |   Bin 0 -> 1655 bytes
 test-data/3.bed                                    |    25 +
 test-data/3.maf                                    |    55 +
 test-data/3unsorted.bam                            |   Bin 0 -> 1666 bytes
 test-data/4.bed                                    |     1 +
 test-data/4.bed.bz2                                |   Bin 0 -> 92 bytes
 test-data/4.bed.gz                                 |   Bin 0 -> 80 bytes
 test-data/4.bed.zip                                |   Bin 0 -> 198 bytes
 test-data/454Score.pdf                             |   545 +
 test-data/454Score.png                             |   Bin 0 -> 4392 bytes
 test-data/5.bed                                    |   134 +
 test-data/5.gff                                    |    25 +
 test-data/5.gff3                                   |   150 +
 test-data/6.bed                                    |    10 +
 test-data/7.bed                                    |    29 +
 test-data/8.bed                                    |     1 +
 test-data/9.bed                                    |     1 +
 test-data/GRCm38mm10_chr5_34761740-34912521.fa     |  5421 +++++++
 test-data/a.tab                                    |    15 +
 test-data/a.txt                                    |    15 +
 test-data/asian_chars_1.txt                        |     1 +
 test-data/bam_from_sam.bam                         |   Bin 0 -> 502 bytes
 test-data/bcf_index_metadata_test.bcf              |   Bin 0 -> 12492 bytes
 test-data/bcf_index_metadata_test.txt              |     1 +
 test-data/biom1_metadata_test.txt                  |    11 +
 test-data/cat_wrapper_out1.bed                     |   133 +
 test-data/composite_output_expected_log            |     2 +
 test-data/filter1_in3.sam                          |   100 +
 test-data/filter1_in5.tab                          |     5 +
 test-data/filter1_inbad.bed                        |     6 +
 test-data/filter1_test1.bed                        |     4 +
 test-data/filter1_test2.bed                        |     2 +
 test-data/filter1_test3.sam                        |     6 +
 test-data/filter1_test4.bed                        |     5 +
 test-data/filter1_test5.tab                        |     4 +
 test-data/html_file.txt                            |    74 +
 test-data/input_taxonomy.biom1                     |     1 +
 test-data/library/3.bed                            |    25 +
 test-data/library/4.bed                            |     1 +
 test-data/library/5.bed                            |   134 +
 test-data/neostore.zip                             |   Bin 0 -> 7246 bytes
 test-data/phiX.fasta                               |    79 +
 test-data/qualscores.qual454                       |    49 +
 test-data/qualscores.qualsolid                     |    48 +
 test-data/rgenetics.bed                            |     1 +
 test-data/rgenetics.bim                            |    25 +
 test-data/rgenetics.fam                            |    40 +
 test-data/rgenetics.map                            |    25 +
 test-data/rgenetics.ped                            |    40 +
 test-data/sam_with_header.sam                      |    14 +
 test-data/shrimp_cs_test1.csfasta                  |  5000 +++++++
 test-data/simple_line.txt                          |     1 +
 test-data/simple_line_alternative.txt              |     1 +
 test-data/simple_line_x2.txt                       |     2 +
 test-data/simple_line_x3.txt                       |     3 +
 test-data/simple_line_x5.txt                       |     5 +
 test-data/simple_lines_both.txt                    |     2 +
 test-data/simple_lines_interleaved.txt             |     4 +
 test-data/tinywga.bed                              |     1 +
 test-data/tinywga.bim                              |    25 +
 test-data/tinywga.fam                              |    40 +
 test-data/tinywga.map                              |    25 +
 test-data/tinywga.ped                              |    40 +
 test-data/tinywga.ped.space_to_tab                 |    40 +
 test-data/users/test1 at bx.psu.edu/1.fasta           |     2 +
 test-data/users/test3 at bx.psu.edu/run1/2.fasta      |    11 +
 test/TESTING.md                                    |     7 +
 test/api/__init__.py                               |     0
 test/api/helpers.py                                |   489 +
 test/api/test_api_batch.py                         |    89 +
 test/api/test_authenticate.py                      |    29 +
 test/api/test_dataset_collections.py               |   131 +
 test/api/test_datasets.py                          |    43 +
 test/api/test_datatypes.py                         |    64 +
 test/api/test_framework.py                         |    27 +
 test/api/test_histories.py                         |   102 +
 test/api/test_history_contents.py                  |   185 +
 test/api/test_history_contents_provenance.py       |    16 +
 test/api/test_jobs.py                              |   212 +
 test/api/test_libraries.py                         |    95 +
 test/api/test_page_revisions.py                    |    36 +
 test/api/test_pages.py                             |   104 +
 test/api/test_search.py                            |    32 +
 test/api/test_tool_data.py                         |    76 +
 test/api/test_tools.py                             |  1254 ++
 test/api/test_tours.py                             |    17 +
 test/api/test_users.py                             |    85 +
 test/api/test_workflow_1.ga                        |    87 +
 test/api/test_workflow_2.ga                        |    92 +
 test/api/test_workflow_batch.ga                    |   145 +
 test/api/test_workflow_extraction.py               |   470 +
 test/api/test_workflow_map_reduce_pause.ga         |   198 +
 test/api/test_workflow_matching_lists.ga           |   117 +
 test/api/test_workflow_missing_tool.ga             |    87 +
 test/api/test_workflow_pause.ga                    |   118 +
 test/api/test_workflow_topoambigouity.ga           |   471 +
 .../test_workflow_topoambigouity_auto_laidout.ga   |   471 +
 test/api/test_workflow_validation_1.ga             |    33 +
 test/api/test_workflow_with_runtime_input.ga       |    66 +
 test/api/test_workflows.py                         |  1705 +++
 test/api/test_workflows_from_yaml.py               |   284 +
 test/api/workflows_format_2/README.txt             |    12 +
 test/api/workflows_format_2/__init__.py            |    11 +
 test/api/workflows_format_2/converter.py           |   518 +
 test/api/workflows_format_2/interface.py           |    75 +
 test/api/workflows_format_2/main.py                |    42 +
 test/base/__init__.py                              |     0
 test/base/api.py                                   |   123 +
 test/base/api_asserts.py                           |    36 +
 test/base/api_util.py                              |    24 +
 test/base/driver_util.py                           |   707 +
 test/base/instrument.py                            |    82 +
 test/base/integration_util.py                      |    63 +
 test/base/interactor.py                            |   496 +
 test/base/nose_util.py                             |    27 +
 test/base/test_db_util.py                          |   224 +
 test/base/test_logging.py                          |    16 +
 test/base/tool_shed_util.py                        |    90 +
 test/base/twilltestcase.py                         |  2332 +++
 test/casperjs/README.txt                           |     4 +
 test/casperjs/anon-history-tests.js                |   116 +
 test/casperjs/api-anon-history-permission-tests.js |   205 +
 test/casperjs/api-anon-history-tests.js            |   120 +
 test/casperjs/api-batch-tests.js                   |    83 +
 test/casperjs/api-configuration-tests.js           |    75 +
 test/casperjs/api-dataset-tests.js                 |    59 +
 test/casperjs/api-hda-tests.js                     |   350 +
 test/casperjs/api-history-permission-tests.js      |   263 +
 test/casperjs/api-history-tests.js                 |   321 +
 test/casperjs/api-tool-tests.js                    |   219 +
 test/casperjs/api-user-tests.js                    |    52 +
 test/casperjs/api-visualizations-tests.js          |   199 +
 test/casperjs/casperjs_runner.py                   |   463 +
 test/casperjs/hda-state-tests.js                   |   397 +
 test/casperjs/history-options-tests.js             |    77 +
 test/casperjs/history-panel-tests.js               |   271 +
 test/casperjs/history-share-tests.js               |   227 +
 test/casperjs/login-tests.js                       |    96 +
 test/casperjs/modules/api.js                       |   906 ++
 test/casperjs/modules/historyoptions.js            |   200 +
 test/casperjs/modules/historypanel.js              |   424 +
 test/casperjs/modules/tools.js                     |   203 +
 test/casperjs/modules/user.js                      |   296 +
 test/casperjs/page-data/selectors.json             |    35 +
 test/casperjs/registration-tests.js                |   137 +
 test/casperjs/server_env.py                        |   143 +
 test/casperjs/spaceghost.js                        |  1295 ++
 test/casperjs/test-data/simple_test.ga             |    96 +
 test/casperjs/upload-tests.js                      |    45 +
 test/casperjs/utils/simple-galaxy.js               |    65 +
 test/docker/README.md                              |    13 +
 test/docker/base/Dockerfile                        |   111 +
 test/docker/base/ansible_vars.yml                  |    48 +
 test/docker/base/provision.yml                     |    16 +
 test/docker/base/run_test_wrapper.sh               |    76 +
 test/docker/base/start_mysql.sh                    |    47 +
 test/functional/__init__.py                        |     3 +
 test/functional/database_contexts.py               |     6 +
 test/functional/test_data_managers.py              |   112 +
 test/functional/test_library_templates.py          |   737 +
 test/functional/test_toolbox.py                    |   316 +
 test/functional/tool-data/data1/entry.txt          |     1 +
 test/functional/tool-data/data1/entry.txt.index    |     1 +
 test/functional/tool-data/data2/entry.txt          |     1 +
 test/functional/tool-data/data2/entry.txt.index    |     1 +
 test/functional/tool-data/fasta_indexes.loc        |     2 +
 .../tool-data/sample_tool_data_tables.xml          |    14 +
 test/functional/tool-data/testalpha.loc            |     2 +
 test/functional/tool-data/testbeta.loc             |     0
 test/functional/tools/README.txt                   |    15 +
 test/functional/tools/bibtex.xml                   |   322 +
 test/functional/tools/boolean_conditional.xml      |    53 +
 test/functional/tools/catDocker.xml                |    28 +
 test/functional/tools/checksum.xml                 |    17 +
 test/functional/tools/cheetah_casting.xml          |    27 +
 .../tools/cheetah_problem_syntax_error.xml         |    17 +
 .../tools/cheetah_problem_unbound_var.xml          |    16 +
 .../tools/cheetah_problem_unbound_var_input.xml    |    19 +
 test/functional/tools/code_file.py                 |     8 +
 test/functional/tools/code_file.xml                |    15 +
 .../collection_creates_dynamic_list_of_pairs.xml   |    71 +
 .../tools/collection_creates_dynamic_nested.xml    |    66 +
 test/functional/tools/collection_creates_list.xml  |    39 +
 .../functional/tools/collection_creates_list_2.xml |    22 +
 .../tools/collection_creates_list_of_pairs.xml     |    50 +
 test/functional/tools/collection_creates_pair.xml  |    37 +
 .../tools/collection_creates_pair_from_type.xml    |    35 +
 test/functional/tools/collection_mixed_param.xml   |    35 +
 test/functional/tools/collection_nested_test.xml   |    51 +
 .../functional/tools/collection_optional_param.xml |    38 +
 test/functional/tools/collection_paired_test.xml   |    27 +
 .../tools/collection_split_on_column.xml           |    30 +
 test/functional/tools/collection_two_paired.xml    |    69 +
 test/functional/tools/collection_type_source.xml   |    62 +
 test/functional/tools/color_param.xml              |    30 +
 test/functional/tools/column_multi_param.xml       |    25 +
 test/functional/tools/column_param.xml             |    23 +
 test/functional/tools/composite.xml                |    20 +
 test/functional/tools/composite_output.xml         |    26 +
 test/functional/tools/composite_output_tests.xml   |    29 +
 test/functional/tools/create_10.xml                |    35 +
 test/functional/tools/data_manager.xml             |    16 +
 test/functional/tools/dbkey_filter_input.xml       |    39 +
 test/functional/tools/dbkey_filter_multi_input.xml |    34 +
 test/functional/tools/dbkey_output_action.xml      |    36 +
 test/functional/tools/detect_errors_aggressive.xml |    51 +
 test/functional/tools/disambiguate_cond.xml        |    96 +
 test/functional/tools/disambiguate_repeats.xml     |    63 +
 test/functional/tools/empty_output.xml             |    12 +
 test/functional/tools/environment_variables.xml    |    34 +
 test/functional/tools/exit_code_from_file.xml      |    13 +
 test/functional/tools/for_workflows/1.bam          |     1 +
 test/functional/tools/for_workflows/cat.xml        |    19 +
 .../tools/for_workflows/cat_collection.xml         |    16 +
 .../tools/for_workflows/cat_interleave.xml         |    18 +
 test/functional/tools/for_workflows/cat_list.xml   |    16 +
 .../for_workflows/create_input_collection.xml      |    40 +
 test/functional/tools/for_workflows/head.xml       |    13 +
 test/functional/tools/for_workflows/mapper.xml     |    16 +
 test/functional/tools/for_workflows/pileup.xml     |    18 +
 test/functional/tools/for_workflows/split.xml      |    33 +
 test/functional/tools/gzipped_inputs.xml           |    19 +
 test/functional/tools/identifier_collection.xml    |    15 +
 test/functional/tools/identifier_multiple.xml      |    15 +
 test/functional/tools/identifier_single.xml        |    13 +
 test/functional/tools/implicit_default_conds.xml   |    49 +
 test/functional/tools/inheritance_simple.xml       |    24 +
 test/functional/tools/inputs_as_json.xml           |   129 +
 test/functional/tools/job_properties.xml           |    62 +
 test/functional/tools/library_data.xml             |    19 +
 test/functional/tools/maxseconds.xml               |    16 +
 test/functional/tools/md5sum.xml                   |    17 +
 test/functional/tools/metadata.xml                 |    29 +
 test/functional/tools/metadata_bam.xml             |    25 +
 test/functional/tools/metadata_bcf.xml             |    16 +
 test/functional/tools/metadata_biom1.xml           |    28 +
 test/functional/tools/metadata_column_names.xml    |    24 +
 test/functional/tools/min_repeat.xml               |    27 +
 test/functional/tools/mulled_example_multi_1.xml   |    18 +
 test/functional/tools/multi_data_optional.xml      |    27 +
 test/functional/tools/multi_data_param.xml         |    52 +
 test/functional/tools/multi_data_repeat.xml        |    13 +
 test/functional/tools/multi_output.xml             |    27 +
 .../tools/multi_output_assign_primary.xml          |    34 +
 test/functional/tools/multi_output_configured.xml  |    67 +
 test/functional/tools/multi_repeats.xml            |    44 +
 test/functional/tools/multi_select.xml             |    45 +
 test/functional/tools/multiple_versions_v01.xml    |    11 +
 test/functional/tools/multiple_versions_v02.xml    |    11 +
 .../tools/output_action_change_format.xml          |    52 +
 test/functional/tools/output_auto_format.xml       |    16 +
 test/functional/tools/output_collection_filter.xml |    52 +
 test/functional/tools/output_filter.xml            |    46 +
 .../functional/tools/output_filter_exception_1.xml |    35 +
 test/functional/tools/output_format.xml            |    72 +
 test/functional/tools/output_format_collection.xml |    35 +
 test/functional/tools/output_order.xml             |    25 +
 test/functional/tools/parallelism.xml              |    20 +
 test/functional/tools/parallelism_optional.xml     |    21 +
 test/functional/tools/paths_as_file.xml            |    37 +
 test/functional/tools/sam_to_bam.xml               |    19 +
 test/functional/tools/sample_data_manager_conf.xml |    16 +
 test/functional/tools/sample_datatypes_conf.xml    |    18 +
 test/functional/tools/samples_tool_conf.xml        |   131 +
 test/functional/tools/section.xml                  |    43 +
 test/functional/tools/simple_constructs.xml        |    77 +
 test/functional/tools/simple_constructs.yml        |   140 +
 test/functional/tools/special_params.xml           |    36 +
 test/functional/tools/strict_shell.xml             |    23 +
 test/functional/tools/strict_shell_default_off.xml |    20 +
 test/functional/tools/tool_directory.xml           |    20 +
 test/functional/tools/tool_provided_metadata_1.xml |    29 +
 test/functional/tools/tool_provided_metadata_2.xml |    38 +
 test/functional/tools/tool_provided_metadata_3.xml |    43 +
 test/functional/tools/top_level_data.xml           |    34 +
 test/functional/tools/unicode_stream.xml           |    41 +
 test/functional/tools/upload.py                    |     1 +
 test/functional/tools/upload.xml                   |     1 +
 test/functional/tools/upload_tool_conf.xml         |     6 +
 test/functional/tools/validation_default.xml       |    34 +
 test/functional/tools/validation_empty_dataset.xml |    17 +
 test/functional/tools/validation_repeat.xml        |    57 +
 test/functional/tools/validation_sanitizer.xml     |    35 +
 test/functional/tools/version_command.py           |     2 +
 .../tools/version_command_interpreter.xml          |    20 +
 test/functional/tools/version_command_plain.xml    |    20 +
 test/functional/tools/version_command_tool_dir.xml |    20 +
 test/functional/workflow.py                        |   187 +
 test/integration/__init__.py                       |     6 +
 test/integration/embedded_pulsar_job_conf.xml      |    21 +
 test/integration/test_pulsar_embedded.py           |    27 +
 test/integration/test_resolvers.py                 |   104 +
 test/manual/__init__.py                            |     0
 test/manual/launch_and_run.sh                      |    78 +
 test/manual/workflow_job_conf.xml                  |    26 +
 test/manual/workflows_scaling.py                   |   231 +
 test/qunit/Gruntfile.js                            |    33 +
 test/qunit/README.txt                              |    33 +
 test/qunit/package.json                            |    11 +
 test/qunit/scripts                                 |     1 +
 test/qunit/test-app.js                             |    40 +
 test/qunit/test-common.js                          |   134 +
 test/qunit/test-data/bootstrapped.js               |    31 +
 test/qunit/test-data/fakeserver.js                 |    11 +
 test/qunit/test-data/job-dag-1.js                  |   534 +
 .../test-data/paired-collection-creator.data.js    |   153 +
 test/qunit/test-libs/qunit-1.23.1.css              |   305 +
 test/qunit/test-libs/qunit-1.23.1.js               |  4334 ++++++
 test/qunit/test-libs/sinon-1.17.3.js               |  6437 +++++++++
 test/qunit/test-libs/sinon-qunit-1.0.0.js          |    62 +
 test/qunit/tests/form_tests.html                   |    11 +
 test/qunit/tests/form_tests.js                     |   104 +
 test/qunit/tests/galaxy-app-base.html              |    10 +
 test/qunit/tests/galaxy-app-base.js                |   110 +
 test/qunit/tests/graph.html                        |    10 +
 test/qunit/tests/graph.js                          |   540 +
 test/qunit/tests/hda-base.html                     |    10 +
 test/qunit/tests/hda-base.js                       |    33 +
 test/qunit/tests/history_contents_model_tests.html |    10 +
 test/qunit/tests/history_contents_model_tests.js   |    44 +
 test/qunit/tests/job-dag.html                      |    10 +
 test/qunit/tests/job-dag.js                        |   261 +
 .../tests/list-of-pairs-collection-creator.html    |    10 +
 .../tests/list-of-pairs-collection-creator.js      |   106 +
 test/qunit/tests/masthead_tests.html               |    11 +
 test/qunit/tests/masthead_tests.js                 |   122 +
 test/qunit/tests/metrics-logger.html               |    10 +
 test/qunit/tests/metrics-logger.js                 |   357 +
 test/qunit/tests/modal_tests.html                  |    10 +
 test/qunit/tests/modal_tests.js                    |    87 +
 test/qunit/tests/page_tests.html                   |    11 +
 test/qunit/tests/page_tests.js                     |    50 +
 test/qunit/tests/popover_tests.html                |    11 +
 test/qunit/tests/popover_tests.js                  |    40 +
 test/qunit/tests/ui_tests.html                     |    11 +
 test/qunit/tests/ui_tests.js                       |   787 +
 test/qunit/tests/upload_dialog_tests.html          |    10 +
 test/qunit/tests/upload_dialog_tests.js            |    49 +
 test/qunit/tests/utils_test.html                   |    11 +
 test/qunit/tests/utils_test.js                     |    20 +
 test/qunit/tests/workflow_editor_tests.html        |    10 +
 test/qunit/tests/workflow_editor_tests.js          |  1137 ++
 test/shed_functional/__init__.py                   |     1 +
 test/shed_functional/base/__init__.py              |     0
 test/shed_functional/base/common.py                |    35 +
 test/shed_functional/base/test_db_util.py          |   231 +
 test/shed_functional/base/twilltestcase.py         |  1530 ++
 test/shed_functional/functional/__init__.py        |     0
 .../test_0000_basic_repository_features.py         |   358 +
 .../test_0010_repository_with_tool_dependencies.py |   165 +
 .../test_0020_basic_repository_dependencies.py     |   102 +
 .../test_0030_repository_dependency_revisions.py   |   163 +
 .../test_0040_repository_circular_dependencies.py  |   116 +
 .../test_0050_circular_dependencies_4_levels.py    |   266 +
 .../functional/test_0060_workflows.py              |   122 +
 .../functional/test_0070_invalid_tool.py           |    67 +
 .../test_0080_advanced_circular_dependencies.py    |    98 +
 .../functional/test_0090_tool_search.py            |   191 +
 .../test_0100_complex_repository_dependencies.py   |   225 +
 ..._0110_invalid_simple_repository_dependencies.py |   143 +
 ...simple_repository_dependency_multiple_owners.py |   141 +
 .../functional/test_0130_datatype_converters.py    |    78 +
 .../functional/test_0140_tool_help_images.py       |    83 +
 .../test_0150_prior_installation_required.py       |   104 +
 ...st_0160_circular_prior_installation_required.py |   156 +
 ...est_0170_complex_prior_installation_required.py |   132 +
 .../functional/test_0300_reset_all_metadata.py     |   603 +
 .../functional/test_0310_hg_api_features.py        |    94 +
 .../test_0400_repository_component_reviews.py      |   578 +
 ...0_repository_component_review_access_control.py |   189 +
 .../test_0420_citable_urls_for_repositories.py     |   229 +
 .../functional/test_0430_browse_utilities.py       |   176 +
 .../test_0440_deleting_dependency_definitions.py   |   357 +
 .../functional/test_0460_upload_to_repository.py   |   481 +
 .../test_0470_tool_dependency_repository_type.py   |   255 +
 .../test_0480_tool_dependency_xml_verification.py  |    71 +
 .../test_0490_export_import_repositories.py        |    90 +
 ...est_0500_export_repository_simple_dependency.py |    89 +
 ...xport_import_repository_complex_dependencies.py |    99 +
 ...est_0520_import_export_circular_dependencies.py |    81 +
 .../test_0530_repository_admin_feature.py          |   158 +
 .../test_0540_get_all_metadata_from_api.py         |   147 +
 .../test_0550_metadata_updated_dependencies.py     |   157 +
 .../test_1000_install_basic_repository.py          |   123 +
 ...10_install_repository_with_tool_dependencies.py |   133 +
 ...tall_repository_with_repository_dependencies.py |   132 +
 ...install_repository_with_dependency_revisions.py |   159 +
 ...stall_repository_basic_circular_dependencies.py |   155 +
 .../test_1050_circular_dependencies_4_levels.py    |   376 +
 .../test_1060_install_repository_with_workflow.py  |   153 +
 .../functional/test_1070_invalid_tool.py           |    87 +
 ...80_advanced_circular_dependency_installation.py |   377 +
 .../test_1090_repository_dependency_handling.py    |   178 +
 ...1100_install_updated_repository_dependencies.py |   116 +
 .../test_1110_install_tool_from_tool_search.py     |   311 +
 ...install_repository_with_complex_dependencies.py |   282 +
 ...epository_with_invalid_repository_dependency.py |   175 +
 ...simple_repository_dependency_multiple_owners.py |   194 +
 .../functional/test_1150_datatype_converters.py    |    88 +
 .../functional/test_1160_tool_help_images.py       |    79 +
 .../test_1170_prior_installation_required.py       |   145 +
 ...st_1180_circular_prior_installation_required.py |   250 +
 ...est_1190_complex_prior_installation_required.py |   176 +
 ...200_uninstall_and_reinstall_basic_repository.py |   129 +
 ..._reinstall_repository_with_tool_dependencies.py |   146 +
 ...tall_repository_with_repository_dependencies.py |   146 +
 ...install_repository_with_dependency_revisions.py |   181 +
 .../functional/test_1300_reset_all_metadata.py     |   459 +
 .../test_1400_review_migration_stages.py           |    41 +
 .../functional/test_1410_update_manager.py         |   124 +
 ...1420_tool_dependency_environment_inheritance.py |   319 +
 .../test_1430_repair_installed_repository.py       |   173 +
 .../functional/test_1440_missing_env_sh_files.py   |   125 +
 .../test_1450_installing_datatypes_sniffers.py     |   177 +
 .../functional/test_1460_data_managers.py          |    92 +
 .../test_1470_updating_installed_repositories.py   |   136 +
 test/shed_functional/functional_tests.py           |   169 +
 .../0460_files/repository_dependencies.xml         |     4 +
 .../0460_files/repository_dependencies_in_root.tar |   Bin 0 -> 2048 bytes
 .../repository_dependencies_in_subfolder.tar       |   Bin 0 -> 2560 bytes
 .../test_data/0460_files/tool_dependencies.xml     |     6 +
 .../0460_files/tool_dependencies_in_root.tar       |   Bin 0 -> 2048 bytes
 .../0460_files/tool_dependencies_in_subfolder.tar  |   Bin 0 -> 2560 bytes
 .../test_data/0480_files/tool_dependencies.xml     |    14 +
 .../0540_files/column_maker/column_maker.tar       |   Bin 0 -> 10240 bytes
 .../column_maker/repository_dependencies.xml       |     4 +
 .../0540_files/convert_chars/convert_chars.tar     |   Bin 0 -> 10240 bytes
 .../0540_files/convert_chars/tool_dependencies.xml |     6 +
 .../0540_files/package_bwa/tool_dependencies.xml   |    13 +
 .../test_data/0550_files/filtering_1.0.tgz         |   Bin 0 -> 3650 bytes
 .../0550_files/package_freebayes_1_0550.tgz        |   Bin 0 -> 452 bytes
 .../0550_files/package_freebayes_2_0550.tgz        |   Bin 0 -> 461 bytes
 .../0550_files/package_samtools_1_0550.tgz         |   Bin 0 -> 407 bytes
 .../0550_files/package_samtools_2_0550.tgz         |   Bin 0 -> 419 bytes
 .../0550_files/temp/tool_dependencies.xml          |    17 +
 .../test_data/1420_files/binary_tarballs/atlas.tar |   Bin 0 -> 2560 bytes
 .../test_data/1420_files/binary_tarballs/boost.tar |   Bin 0 -> 2560 bytes
 .../test_data/1420_files/binary_tarballs/bzlib.tar |   Bin 0 -> 2560 bytes
 .../1420_files/binary_tarballs/lapack.tar          |   Bin 0 -> 2560 bytes
 .../test_data/1420_files/binary_tarballs/numpy.tar |   Bin 0 -> 2560 bytes
 .../test_data/1420_files/binary_tarballs/rdkit.tar |   Bin 0 -> 2560 bytes
 .../package_atlas_3_10_1420/tool_dependencies.xml  |    22 +
 .../package_boost_1_53_1420/tool_dependencies.xml  |    29 +
 .../package_bzlib_1_0_1420/tool_dependencies.xml   |    21 +
 .../package_lapack_3_4_1420/tool_dependencies.xml  |    18 +
 .../package_numpy_1_7_1420/tool_dependencies.xml   |    35 +
 .../tool_dependencies.xml                          |    43 +
 .../complex_dependency/tool_dependencies.xml       |     6 +
 .../dependency_definition/tool_dependencies.xml    |    14 +
 .../data_manager_files/test_data_manager.tar       |   Bin 0 -> 1932 bytes
 .../bed_to_gff_converter/bed_to_gff_converter.tar  |   Bin 0 -> 10240 bytes
 test/shed_functional/test_data/bismark/bismark.tar |   Bin 0 -> 593920 bytes
 .../bismark/bismark_methylation_extractor.xml      |   306 +
 .../test_data/blast/blast_datatypes.tar            |   Bin 0 -> 30720 bytes
 .../test_data/blast/blastxml_to_top_descr.tar      |   Bin 0 -> 20480 bytes
 test/shed_functional/test_data/bwa/bwa_base.tar    |   Bin 0 -> 58880 bytes
 test/shed_functional/test_data/bwa/bwa_color.tar   |   Bin 0 -> 60416 bytes
 .../test_data/bwa/complex/bwa_base.tar             |   Bin 0 -> 57344 bytes
 .../bwa/complex/readme/tool_dependencies.xml       |    16 +
 .../test_data/bwa/complex/tool_dependencies.xml    |    13 +
 .../test_data/column_maker/column_maker.tar        |   Bin 0 -> 10240 bytes
 .../test_data/convert_chars/convert_chars.tar      |   Bin 0 -> 10240 bytes
 .../0470_files/emboss_complex_dependency.tar       |   Bin 0 -> 11776 bytes
 .../emboss/0470_files/tool_dependencies.xml        |     7 +
 .../test_data/emboss/datatypes/datatypes_conf.xml  |   101 +
 test/shed_functional/test_data/emboss/emboss.tar   |   Bin 0 -> 20480 bytes
 .../first_tool_dependency/tool_dependencies.xml    |    48 +
 .../second_tool_dependency/tool_dependencies.xml   |    49 +
 .../first_tool_dependency/tool_dependencies.xml    |    22 +
 .../second_tool_dependency/tool_dependencies.xml   |    23 +
 test/shed_functional/test_data/filtering/README    |     1 +
 .../test_data/filtering/filtering_0000.txt         |     1 +
 .../test_data/filtering/filtering_1.1.0.tar        |   Bin 0 -> 20480 bytes
 .../test_data/filtering/filtering_2.2.0.tar        |   Bin 0 -> 20480 bytes
 .../test_data/filtering/filtering_test_data.tar    |   Bin 0 -> 40960 bytes
 .../shed_functional/test_data/filtering/readme.txt |     2 +
 ...Workflow_for_0060_filter_workflow_repository.ga |    60 +
 .../test_data/freebayes/freebayes.tar              |   Bin 0 -> 51200 bytes
 .../test_data/freebayes/freebayes.xml              |   669 +
 .../tool_dependencies.xml                          |    46 +
 .../tool_dependencies.xml                          |    46 +
 .../test_data/freebayes/sam_fa_indices.loc.sample  |    28 +
 .../freebayes/tool_data_table_conf.xml.sample      |     8 +
 .../test_data/freebayes/tool_dependencies.xml      |    30 +
 .../test_data/htseq_count/htseq_count.tar          |   Bin 0 -> 634880 bytes
 .../package_matplotlib/package_matplotlib_1_2.tar  |   Bin 0 -> 10240 bytes
 .../package_matplotlib/tool_dependencies.xml       |    25 +
 .../test_data/package_numpy/package_numpy_1_7.tar  |   Bin 0 -> 10240 bytes
 .../proteomics_datatypes/proteomics_datatypes.tar  |   Bin 0 -> 20480 bytes
 test/shed_functional/test_data/readme.txt          |     1 +
 .../repository_capsules/0490_filtering.tar.gz      |   Bin 0 -> 13184 bytes
 .../repository_capsules/0500_emboss_5.tar.gz       |   Bin 0 -> 3862 bytes
 .../0510_trans_proteomic_pipeline.tar.gz           |   Bin 0 -> 6334 bytes
 .../repository_capsules/0520_filtering.tar.gz      |   Bin 0 -> 13578 bytes
 test/unit/dataset_collections/__init__.py          |     0
 test/unit/dataset_collections/test_matching.py     |   141 +
 test/unit/datatypes/__init__.py                    |     0
 test/unit/datatypes/dataproviders/__init__.py      |     0
 .../dataproviders/test_base_dataproviders.py       |   380 +
 .../dataproviders/test_line_dataproviders.py       |   300 +
 test/unit/datatypes/test_data.py                   |    11 +
 test/unit/jobs/__init__.py                         |     0
 .../unit/jobs/dynamic_tool_destination/__init__.py |     0
 .../dynamic_tool_destination/data/dest_fail.yml    |     9 +
 .../data/priority_tool_destination.yml             |   125 +
 .../jobs/dynamic_tool_destination/data/test.empty  |     0
 .../jobs/dynamic_tool_destination/data/test.fasta  |    21 +
 .../jobs/dynamic_tool_destination/data/test1.full  |    10 +
 .../jobs/dynamic_tool_destination/data/test3.full  |   115 +
 .../data/test_no_verbose.yml                       |    11 +
 .../data/test_num_input_datasets.yml               |    17 +
 .../dynamic_tool_destination/data/test_users.yml   |    14 +
 .../data/tool_destination.yml                      |    85 +
 .../jobs/dynamic_tool_destination/mockGalaxy.py    |    97 +
 .../test_dynamic_tool_destination.py               |   774 +
 .../unit/jobs/dynamic_tool_destination/ymltests.py |   947 ++
 test/unit/jobs/test_command_factory.py             |   190 +
 test/unit/jobs/test_datasets.py                    |    17 +
 test/unit/jobs/test_job_configuration.py           |   161 +
 test/unit/jobs/test_job_output_checker.py          |    96 +
 test/unit/jobs/test_job_wrapper.py                 |   209 +
 test/unit/jobs/test_mapper.py                      |   178 +
 test/unit/jobs/test_rule_helper.py                 |   196 +
 test/unit/jobs/test_rules/10_site.py               |    54 +
 test/unit/jobs/test_rules/20_instance.py           |     4 +
 test/unit/jobs/test_rules/__init__.py              |     0
 test/unit/jobs/test_runner_local.py                |   173 +
 test/unit/jobs/test_runner_params.py               |    48 +
 test/unit/managers/__init__.py                     |     0
 test/unit/managers/base.py                         |   140 +
 test/unit/managers/test_CollectionManager.py       |   137 +
 test/unit/managers/test_DatasetManager.py          |   449 +
 test/unit/managers/test_HDAManager.py              |   676 +
 test/unit/managers/test_HDCAManager.py             |   118 +
 test/unit/managers/test_HistoryContentsManager.py  |   347 +
 test/unit/managers/test_HistoryManager.py          |   907 ++
 test/unit/managers/test_UserManager.py             |   278 +
 test/unit/shed_unit/__init__.py                    |     4 +
 test/unit/shed_unit/test_fabric_util.py            |    46 +
 test/unit/shed_unit/test_td_common_util.py         |    88 +
 test/unit/shed_unit/test_tool_panel_manager.py     |   206 +
 test/unit/test_galaxy_mapping.py                   |   524 +
 test/unit/test_galaxy_transactions.py              |    74 +
 test/unit/test_lazy_process.py                     |    26 +
 test/unit/test_objectstore.py                      |   247 +
 test/unit/test_routes.py                           |   111 +
 test/unit/test_security_helper.py                  |    73 +
 test/unit/test_sockets.py                          |    11 +
 test/unit/test_sqlite_utils.py                     |    60 +
 test/unit/test_topsort.py                          |    39 +
 test/unit/tools/__init__.py                        |     0
 test/unit/tools/filter_modules/__init__.py         |     0
 test/unit/tools/filter_modules/filtermod.py        |    22 +
 test/unit/tools/test_actions.py                    |   268 +
 test/unit/tools/test_citations.py                  |    56 +
 test/unit/tools/test_collect_primary_datasets.py   |   330 +
 test/unit/tools/test_column_parameters.py          |   106 +
 test/unit/tools/test_conda_resolution.py           |    69 +
 test/unit/tools/test_data_parameters.py            |   211 +
 test/unit/tools/test_dataset_matcher.py            |   169 +
 test/unit/tools/test_evaluation.py                 |   318 +
 test/unit/tools/test_execution.py                  |   220 +
 test/unit/tools/test_history_imp_exp.py            |   168 +
 test/unit/tools/test_parameter_parsing.py          |   357 +
 test/unit/tools/test_parsing.py                    |   446 +
 test/unit/tools/test_select_parameters.py          |   112 +
 .../unit/tools/test_tool_dependency_description.py |    45 +
 test/unit/tools/test_tool_deps.py                  |   387 +
 test/unit/tools/test_tool_external_files.py        |    31 +
 test/unit/tools/test_tool_loader.py                |   272 +
 test/unit/tools/test_tool_panel.py                 |    27 +
 test/unit/tools/test_toolbox.py                    |   448 +
 test/unit/tools/test_toolbox_filters.py            |    84 +
 test/unit/tools/test_watcher.py                    |    88 +
 test/unit/tools/test_wrappers.py                   |   187 +
 test/unit/tools_support.py                         |   210 +
 test/unit/unittest_utils/__init__.py               |     0
 test/unit/unittest_utils/galaxy_mock.py            |   197 +
 test/unit/unittest_utils/tempfilecache.py          |    48 +
 test/unit/unittest_utils/utility.py                |    23 +
 test/unit/visualizations/__init__.py               |     0
 test/unit/visualizations/plugins/__init__.py       |     0
 .../plugins/test_VisualizationPlugin.py            |   202 +
 .../plugins/test_VisualizationsRegistry.py         |   277 +
 test/unit/web/__init__.py                          |     0
 test/unit/web/base/__init__.py                     |     0
 test/unit/web/base/test_HookPluginManager.py       |   250 +
 .../unit/web/base/test_PageServingPluginManager.py |   125 +
 test/unit/web/base/test_PluginManager.py           |    98 +
 test/unit/web/framework/__init__.py                |     0
 test/unit/web/framework/test_webapp.py             |   121 +
 test/unit/workflows/__init__.py                    |     0
 test/unit/workflows/test_extract_summary.py        |   154 +
 test/unit/workflows/test_modules.py                |   358 +
 test/unit/workflows/test_render.py                 |    69 +
 test/unit/workflows/test_run_parameters.py         |   147 +
 test/unit/workflows/test_workflow_progress.py      |   207 +
 test/unit/workflows/workflow_support.py            |   127 +
 tool-data/add_scores.loc.sample                    |    20 +
 tool-data/alignseq.loc.sample                      |    57 +
 tool-data/all_fasta.loc.sample                     |    18 +
 tool-data/bam_iobio.loc.sample                     |     3 +
 tool-data/bfast_indexes.loc.sample                 |    38 +
 tool-data/binned_scores.loc.sample                 |    37 +
 tool-data/biom_simple_display.loc.sample           |     3 +
 tool-data/blastdb.loc.sample                       |    44 +
 tool-data/blastdb_d.loc.sample                     |    57 +
 tool-data/blastdb_p.loc.sample                     |    44 +
 tool-data/codingSnps.loc.sample                    |    23 +
 tool-data/encode_datasets.loc.sample               |    62 +
 tool-data/faseq.loc.sample                         |    26 +
 tool-data/funDo.loc.sample                         |    11 +
 tool-data/liftOver.loc.sample                      |    27 +
 tool-data/maf_index.loc.sample                     |    17 +
 tool-data/maf_pairwise.loc.sample                  |    31 +
 tool-data/microbial_data.loc.sample                |    37 +
 tool-data/mosaik_index.loc.sample                  |    19 +
 tool-data/ngs_sim_fasta.loc.sample                 |    20 +
 tool-data/perm_base_index.loc.sample               |    27 +
 tool-data/perm_color_index.loc.sample              |    28 +
 tool-data/phastOdds.loc.sample                     |    21 +
 tool-data/picard_index.loc.sample                  |    26 +
 tool-data/quality_scores.loc.sample                |    26 +
 tool-data/regions.loc.sample                       |    24 +
 tool-data/sequence_index_base.loc.sample           |    29 +
 tool-data/sequence_index_color.loc.sample          |    29 +
 tool-data/shared/ensembl/ensembl_sites.txt         |     5 +
 .../shared/ensembl/ensembl_sites_data_URL.txt      |     8 +
 tool-data/shared/gbrowse/gbrowse_build_sites.txt   |    24 +
 tool-data/shared/igv/igv_build_sites.txt.sample    |     4 +
 .../shared/rviewer/rviewer_build_sites.txt.sample  |     3 +
 tool-data/shared/ucsc/builds.txt.buildbot          |     1 +
 tool-data/shared/ucsc/builds.txt.sample            |   152 +
 tool-data/shared/ucsc/manual_builds.txt.sample     |   708 +
 tool-data/shared/ucsc/ucsc_build_sites.txt.sample  |     7 +
 tool-data/sift_db.loc.sample                       |    22 +
 tool-data/srma_index.loc.sample                    |    29 +
 tool-data/twobit.loc.sample                        |    26 +
 tool-data/vcf_iobio.loc.sample                     |     3 +
 tool_list.py                                       |    83 +
 tools/data_source/access_libraries.xml             |     8 +
 tools/data_source/bed_convert.xml                  |    14 +
 tools/data_source/biomart.xml                      |    46 +
 tools/data_source/biomart_test.xml                 |    46 +
 tools/data_source/cbi_rice_mart.xml                |    39 +
 tools/data_source/data_source.py                   |   119 +
 tools/data_source/ebi_sra.xml                      |    15 +
 tools/data_source/eupathdb.xml                     |    13 +
 tools/data_source/fetch.py                         |    27 +
 tools/data_source/fly_modencode.xml                |    32 +
 tools/data_source/flymine.xml                      |    35 +
 tools/data_source/flymine_test.xml                 |    31 +
 tools/data_source/genbank.py                       |    44 +
 tools/data_source/genbank.xml                      |    25 +
 tools/data_source/gramene_mart.xml                 |    42 +
 tools/data_source/hapmapmart.xml                   |    46 +
 tools/data_source/hbvar.xml                        |    21 +
 tools/data_source/hbvar_filter.py                  |    81 +
 tools/data_source/import.py                        |    62 +
 tools/data_source/import.xml                       |    27 +
 tools/data_source/metabolicmine.xml                |    13 +
 tools/data_source/microbial_import.py              |    86 +
 tools/data_source/microbial_import.xml             |   114 +
 tools/data_source/microbial_import_code.py         |   158 +
 tools/data_source/modmine.xml                      |    19 +
 tools/data_source/mousemine.xml                    |    35 +
 tools/data_source/ratmine.xml                      |    34 +
 tools/data_source/ucsc_tablebrowser.xml            |    46 +
 tools/data_source/ucsc_tablebrowser_archaea.xml    |    47 +
 tools/data_source/ucsc_tablebrowser_test.xml       |    46 +
 tools/data_source/upload.py                        |   425 +
 tools/data_source/upload.xml                       |   232 +
 tools/data_source/worm_modencode.xml               |    32 +
 tools/data_source/wormbase.xml                     |    27 +
 tools/data_source/wormbase_test.xml                |    27 +
 tools/data_source/yeastmine.xml                    |    20 +
 tools/data_source/zebrafishmine.xml                |    20 +
 tools/evolution/add_scores.py                      |   113 +
 tools/evolution/add_scores.xml                     |   106 +
 tools/evolution/codingSnps.pl                      |   571 +
 tools/evolution/codingSnps.xml                     |   177 +
 tools/evolution/codingSnps_filter.py               |    41 +
 tools/extract/extract_genomic_dna.py               |   316 +
 tools/extract/extract_genomic_dna.xml              |   189 +
 tools/extract/liftOver_wrapper.py                  |    86 +
 tools/extract/liftOver_wrapper.xml                 |   144 +
 tools/filters/CreateInterval.pl                    |    19 +
 tools/filters/CreateInterval.xml                   |    56 +
 tools/filters/axt_to_concat_fasta.py               |    50 +
 tools/filters/axt_to_concat_fasta.xml              |    66 +
 tools/filters/axt_to_fasta.py                      |    52 +
 tools/filters/axt_to_fasta.xml                     |    72 +
 tools/filters/axt_to_lav.py                        |   180 +
 tools/filters/axt_to_lav.xml                       |    97 +
 tools/filters/axt_to_lav_code.py                   |     6 +
 tools/filters/bed2gff.xml                          |    92 +
 tools/filters/bed_to_bigbed.xml                    |    58 +
 tools/filters/bed_to_gff_converter.py              |    78 +
 tools/filters/catWrapper.py                        |    32 +
 tools/filters/catWrapper.xml                       |    79 +
 tools/filters/changeCase.pl                        |    58 +
 tools/filters/changeCase.xml                       |    77 +
 tools/filters/commWrapper.pl                       |    19 +
 tools/filters/commWrapper.xml                      |    38 +
 tools/filters/compare.xml                          |    79 +
 tools/filters/condense_characters.pl               |   105 +
 tools/filters/condense_characters.xml              |    48 +
 tools/filters/convert_characters.pl                |   101 +
 tools/filters/convert_characters.py                |    54 +
 tools/filters/convert_characters.xml               |    77 +
 tools/filters/cutWrapper.pl                        |    87 +
 tools/filters/cutWrapper.xml                       |   211 +
 tools/filters/fileGrep.xml                         |    42 +
 tools/filters/fixedValueColumn.pl                  |    34 +
 tools/filters/fixedValueColumn.xml                 |    61 +
 tools/filters/gff/extract_GFF_Features.py          |    54 +
 tools/filters/gff/extract_GFF_Features.xml         |   114 +
 tools/filters/gff/gff_filter_by_attribute.py       |   306 +
 tools/filters/gff/gff_filter_by_attribute.xml      |    54 +
 tools/filters/gff/gff_filter_by_feature_count.py   |   182 +
 tools/filters/gff/gff_filter_by_feature_count.xml  |    53 +
 .../gff/gtf_filter_by_attribute_values_list.py     |    71 +
 .../gff/gtf_filter_by_attribute_values_list.xml    |    42 +
 tools/filters/gff/sort_gtf.py                      |    20 +
 tools/filters/gff2bed.xml                          |    90 +
 tools/filters/gff_to_bed_converter.py              |   136 +
 tools/filters/grep.py                              |   137 +
 tools/filters/grep.xml                             |    82 +
 tools/filters/gtf2bedgraph.xml                     |    84 +
 tools/filters/gtf_to_bedgraph_converter.py         |    87 +
 tools/filters/headWrapper.pl                       |    19 +
 tools/filters/headWrapper.xml                      |    42 +
 tools/filters/join.py                              |   390 +
 tools/filters/joinWrapper.pl                       |    51 +
 tools/filters/joinWrapper.py                       |    77 +
 tools/filters/joiner.xml                           |   180 +
 tools/filters/joiner2.xml                          |    13 +
 tools/filters/lav_to_bed.py                        |    53 +
 tools/filters/lav_to_bed.xml                       |    68 +
 tools/filters/lav_to_bed_code.py                   |    19 +
 tools/filters/mergeCols.py                         |    43 +
 tools/filters/mergeCols.xml                        |    63 +
 tools/filters/pasteWrapper.pl                      |    35 +
 tools/filters/pasteWrapper.xml                     |    68 +
 tools/filters/random_lines_two_pass.py             |    78 +
 tools/filters/randomlines.py                       |    36 +
 tools/filters/randomlines.xml                      |    66 +
 tools/filters/remove_beginning.pl                  |    33 +
 tools/filters/remove_beginning.xml                 |    42 +
 tools/filters/secure_hash_message_digest.py        |    48 +
 tools/filters/secure_hash_message_digest.xml       |    45 +
 tools/filters/sff_extract.py                       |  1340 ++
 tools/filters/sff_extractor.xml                    |    58 +
 tools/filters/sorter.py                            |    58 +
 tools/filters/sorter.xml                           |   188 +
 tools/filters/tailWrapper.pl                       |    19 +
 tools/filters/tailWrapper.xml                      |    42 +
 tools/filters/trimmer.py                           |   113 +
 tools/filters/trimmer.xml                          |   140 +
 tools/filters/ucsc_gene_bed_to_exon_bed.py         |   139 +
 tools/filters/ucsc_gene_bed_to_exon_bed.xml        |    78 +
 tools/filters/ucsc_gene_bed_to_intron_bed.py       |    85 +
 tools/filters/ucsc_gene_bed_to_intron_bed.xml      |    60 +
 tools/filters/ucsc_gene_table_to_intervals.py      |   117 +
 tools/filters/ucsc_gene_table_to_intervals.xml     |    25 +
 tools/filters/uniq.py                              |   141 +
 tools/filters/uniq.xml                             |   105 +
 tools/filters/wc_gnu.xml                           |    72 +
 tools/filters/wig_to_bigwig.xml                    |    94 +
 tools/filters/wiggle_to_simple.py                  |    48 +
 tools/filters/wiggle_to_simple.xml                 |    88 +
 tools/genomespace/genomespace_exporter.py          |   334 +
 tools/genomespace/genomespace_exporter.xml         |    54 +
 tools/genomespace/genomespace_file_browser.py      |   217 +
 tools/genomespace/genomespace_file_browser_dev.xml |    15 +
 .../genomespace/genomespace_file_browser_prod.xml  |    15 +
 .../genomespace/genomespace_file_browser_test.xml  |    15 +
 tools/genomespace/genomespace_importer.py          |   220 +
 tools/genomespace/genomespace_importer.xml         |    26 +
 tools/maf/genebed_maf_to_fasta.xml                 |    95 +
 tools/maf/interval2maf.py                          |   145 +
 tools/maf/interval2maf.xml                         |   295 +
 tools/maf/interval2maf_pairwise.xml                |    48 +
 tools/maf/interval_maf_to_merged_fasta.py          |   204 +
 tools/maf/interval_maf_to_merged_fasta.xml         |   112 +
 tools/maf/macros.xml                               |    16 +
 tools/maf/maf_by_block_number.py                   |    50 +
 tools/maf/maf_by_block_number.xml                  |    38 +
 tools/maf/maf_filter.py                            |    74 +
 tools/maf/maf_filter.xml                           |   199 +
 tools/maf/maf_limit_size.py                        |    37 +
 tools/maf/maf_limit_size.xml                       |    34 +
 tools/maf/maf_limit_to_species.py                  |    54 +
 tools/maf/maf_limit_to_species.xml                 |    49 +
 tools/maf/maf_reverse_complement.py                |    44 +
 tools/maf/maf_reverse_complement.xml               |    51 +
 tools/maf/maf_split_by_species.py                  |    46 +
 tools/maf/maf_split_by_species.xml                 |   221 +
 tools/maf/maf_stats.py                             |   115 +
 tools/maf/maf_stats.xml                            |   115 +
 tools/maf/maf_thread_for_species.py                |    55 +
 tools/maf/maf_thread_for_species.xml               |    59 +
 tools/maf/maf_to_bed.py                            |    84 +
 tools/maf/maf_to_bed.xml                           |   134 +
 tools/maf/maf_to_bed_code.py                       |    19 +
 tools/maf/maf_to_fasta.xml                         |   197 +
 tools/maf/maf_to_fasta_concat.py                   |    60 +
 tools/maf/maf_to_fasta_multiple_sets.py            |    61 +
 tools/maf/maf_to_interval.py                       |    71 +
 tools/maf/maf_to_interval.xml                      |   131 +
 tools/maf/vcf_to_maf_customtrack.py                |   163 +
 tools/maf/vcf_to_maf_customtrack.xml               |   131 +
 tools/meme/fimo.xml                                |   238 +
 tools/meme/fimo_wrapper.py                         |    78 +
 tools/meme/meme.xml                                |   353 +
 tools/metag_tools/blat_wrapper.py                  |   112 +
 tools/metag_tools/blat_wrapper.xml                 |    99 +
 tools/metag_tools/shrimp_color_wrapper.py          |   116 +
 tools/metag_tools/shrimp_color_wrapper.xml         |   181 +
 tools/metag_tools/shrimp_wrapper.py                |   642 +
 tools/metag_tools/shrimp_wrapper.xml               |   279 +
 .../bwa_solid2fastq_modified.pl                    |    89 +
 tools/next_gen_conversion/fastq_conversions.py     |    45 +
 tools/next_gen_conversion/fastq_conversions.xml    |   133 +
 tools/next_gen_conversion/fastq_gen_conv.py        |   177 +
 tools/next_gen_conversion/fastq_gen_conv.xml       |   106 +
 tools/next_gen_conversion/solid2fastq.py           |   201 +
 tools/next_gen_conversion/solid2fastq.xml          |   154 +
 tools/next_gen_conversion/solid_to_fastq.py        |    74 +
 tools/next_gen_conversion/solid_to_fastq.xml       |   101 +
 tools/ngs_simulation/ngs_simulation.py             |   280 +
 tools/ngs_simulation/ngs_simulation.xml            |   222 +
 tools/phenotype_association/BEAM2_wrapper.sh       |    75 +
 tools/phenotype_association/beam.xml               |   141 +
 tools/phenotype_association/gpass.pl               |    79 +
 tools/phenotype_association/gpass.xml              |   115 +
 tools/phenotype_association/ldtools.xml            |   114 +
 tools/phenotype_association/ldtools_wrapper.sh     |    64 +
 tools/phenotype_association/linkToDavid.pl         |    59 +
 tools/phenotype_association/linkToDavid.xml        |   114 +
 tools/phenotype_association/linkToGProfile.pl      |    89 +
 tools/phenotype_association/linkToGProfile.xml     |    93 +
 tools/phenotype_association/lped_to_geno.pl        |   104 +
 tools/phenotype_association/lps.xml                |   323 +
 tools/phenotype_association/lps_tool_wrapper.sh    |    38 +
 tools/phenotype_association/master2gd_snp.pl       |   221 +
 tools/phenotype_association/master2gd_snp.xml      |    86 +
 tools/phenotype_association/master2pg.pl           |   131 +
 tools/phenotype_association/master2pg.xml          |    66 +
 tools/phenotype_association/mergeSnps.pl           |    57 +
 tools/phenotype_association/pagetag.py             |   313 +
 tools/phenotype_association/pass.xml               |   130 +
 tools/phenotype_association/pass_wrapper.sh        |    13 +
 tools/phenotype_association/senatag.py             |   258 +
 tools/phenotype_association/sift.xml               |   180 +
 .../phenotype_association/sift_variants_wrapper.sh |   184 +
 tools/phenotype_association/vcf2pgSnpMult.pl       |    81 +
 tools/plotting/bar_chart.py                        |   140 +
 tools/plotting/bar_chart.xml                       |    58 +
 tools/plotting/boxplot.xml                         |   111 +
 tools/solid_tools/maq_cs_wrapper.py                |   273 +
 tools/solid_tools/maq_cs_wrapper.xml               |   120 +
 tools/solid_tools/maq_cs_wrapper_code.py           |     4 +
 tools/solid_tools/qualsolid_boxplot_graph.sh       |    94 +
 tools/solid_tools/solid_qual_boxplot.xml           |    40 +
 tools/solid_tools/solid_qual_stats.py              |   140 +
 tools/solid_tools/solid_qual_stats.xml             |    69 +
 tools/sr_assembly/velvetg.xml                      |   301 +
 tools/sr_assembly/velvetg_wrapper.py               |    48 +
 tools/sr_assembly/velveth.xml                      |   129 +
 tools/sr_assembly/velveth_wrapper.py               |    61 +
 tools/sr_mapping/PerM.xml                          |   369 +
 tools/sr_mapping/bfast_wrapper.py                  |   351 +
 tools/sr_mapping/bfast_wrapper.xml                 |   384 +
 tools/sr_mapping/fastq_statistics.xml              |    94 +
 tools/sr_mapping/mosaik.xml                        |   129 +
 tools/sr_mapping/srma_wrapper.py                   |   201 +
 tools/sr_mapping/srma_wrapper.xml                  |   221 +
 .../stats/aggregate_binned_scores_in_intervals.xml |   113 +
 tools/stats/aggregate_scores_in_intervals.py       |   252 +
 tools/stats/filtering.py                           |   263 +
 tools/stats/filtering.xml                          |    90 +
 tools/stats/grouping.py                            |   179 +
 tools/stats/grouping.xml                           |   142 +
 tools/stats/gsummary.py                            |   124 +
 tools/stats/gsummary.xml                           |    82 +
 tools/stats/gsummary.xml.groups                    |    62 +
 tools/stats/r_wrapper.sh                           |    23 +
 tools/visualization/LAJ.py                         |    11 +
 tools/visualization/LAJ.xml                        |    42 +
 tools/visualization/LAJ_code.py                    |    42 +
 tox.ini                                            |    90 +
 4353 files changed, 604283 insertions(+)

diff --git a/.ci/check_controller.sh b/.ci/check_controller.sh
new file mode 100644
index 0000000..73e3931
--- /dev/null
+++ b/.ci/check_controller.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -e
+MAX_LINE_COUNT=19900
+project_dir=`dirname $0`/..
+cd $project_dir
+bash -c "[ `find lib/galaxy/webapps/galaxy/controllers/ -name '*.py' | xargs wc -l | tail -n 1 | awk '{ printf \$1; }'` -lt $MAX_LINE_COUNT ]"
diff --git a/.ci/check_mako.sh b/.ci/check_mako.sh
new file mode 100644
index 0000000..0c0fb77
--- /dev/null
+++ b/.ci/check_mako.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -e
+MAX_MAKO_COUNT=330
+project_dir=`dirname $0`/..
+cd $project_dir
+bash -c "[ `find templates -iname '*.mako'  | wc -l | cut -f1 -d' '` -lt $MAX_MAKO_COUNT ]"
diff --git a/.ci/first_startup.sh b/.ci/first_startup.sh
new file mode 100644
index 0000000..a05a3cf
--- /dev/null
+++ b/.ci/first_startup.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+TRIES=120
+URL=http://localhost:8080
+EXIT_CODE=1
+i=0
+echo "Testing for correct startup:"
+bash run.sh --daemon && \
+    while [ "$i" -le $TRIES ]; do
+        curl "$URL" && EXIT_CODE=0 && break
+        sleep 1
+        i=$((i + 1))
+    done
+kill -9 "$(cat paster.pid)"
+echo "exit code:$EXIT_CODE, showing startup log:"
+cat paster.log
+exit $EXIT_CODE
diff --git a/.ci/flake8_blacklist.txt b/.ci/flake8_blacklist.txt
new file mode 100644
index 0000000..4f2201f
--- /dev/null
+++ b/.ci/flake8_blacklist.txt
@@ -0,0 +1,7 @@
+.venv/
+.tox/
+client/node_modules/
+database/
+doc/source/conf.py
+eggs/
+lib/galaxy/util/jstree.py
diff --git a/.ci/flake8_docstrings_include_list.txt b/.ci/flake8_docstrings_include_list.txt
new file mode 100644
index 0000000..1f5e96f
--- /dev/null
+++ b/.ci/flake8_docstrings_include_list.txt
@@ -0,0 +1,2 @@
+lib/galaxy/jobs/metrics
+lib/galaxy/exceptions
diff --git a/.ci/flake8_lint_include_list.txt b/.ci/flake8_lint_include_list.txt
new file mode 100644
index 0000000..2cbbb80
--- /dev/null
+++ b/.ci/flake8_lint_include_list.txt
@@ -0,0 +1,579 @@
+client/galaxy/style/source_material/circle.py
+contrib/
+cron/add_manual_builds.py
+cron/cleanup_datasets.py
+cron/parse_builds_3_sites.py
+cron/parse_builds.py
+doc/parse_gx_xsd.py
+doc/patch.py
+lib/galaxy/actions/
+lib/galaxy/auth/
+lib/galaxy/config.py
+lib/galaxy/dataset_collections/builder.py
+lib/galaxy/dataset_collections/__init__.py
+lib/galaxy/dataset_collections/structure.py
+lib/galaxy/dataset_collections/subcollections.py
+lib/galaxy/dataset_collections/type_description.py
+lib/galaxy/datatypes/assembly.py
+lib/galaxy/datatypes/binary.py
+lib/galaxy/datatypes/checkers.py
+lib/galaxy/datatypes/constructive_solid_geometry.py
+lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.py
+lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.py
+lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py
+lib/galaxy/datatypes/converters/bed_to_gff_converter.py
+lib/galaxy/datatypes/converters/bgzip.py
+lib/galaxy/datatypes/converters/fasta_to_len.py
+lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py
+lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.py
+lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
+lib/galaxy/datatypes/converters/gff_to_bed_converter.py
+lib/galaxy/datatypes/converters/gff_to_interval_index_converter.py
+lib/galaxy/datatypes/converters/__init__.py
+lib/galaxy/datatypes/converters/interval_to_bed_converter.py
+lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py
+lib/galaxy/datatypes/converters/interval_to_coverage.py
+lib/galaxy/datatypes/converters/interval_to_fli.py
+lib/galaxy/datatypes/converters/interval_to_interval_index_converter.py
+lib/galaxy/datatypes/converters/interval_to_tabix_converter.py
+lib/galaxy/datatypes/converters/lped_to_fped_converter.py
+lib/galaxy/datatypes/converters/lped_to_pbed_converter.py
+lib/galaxy/datatypes/converters/maf_to_fasta_converter.py
+lib/galaxy/datatypes/converters/maf_to_interval_converter.py
+lib/galaxy/datatypes/converters/pbed_to_lped_converter.py
+lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py
+lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
+lib/galaxy/datatypes/converters/tabular_to_dbnsfp.py
+lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip.py
+lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py
+lib/galaxy/datatypes/coverage.py
+lib/galaxy/datatypes/dataproviders/base.py
+lib/galaxy/datatypes/dataproviders/exceptions.py
+lib/galaxy/datatypes/dataproviders/__init__.py
+lib/galaxy/datatypes/data.py
+lib/galaxy/datatypes/display_applications/__init__.py
+lib/galaxy/datatypes/display_applications/util.py
+lib/galaxy/datatypes/images.py
+lib/galaxy/datatypes/__init__.py
+lib/galaxy/datatypes/metadata.py
+lib/galaxy/datatypes/msa.py
+lib/galaxy/datatypes/ngsindex.py
+lib/galaxy/datatypes/proteomics.py
+lib/galaxy/datatypes/sequence.py
+lib/galaxy/datatypes/tabular.py
+lib/galaxy/datatypes/text.py
+lib/galaxy/datatypes/tracks.py
+lib/galaxy/datatypes/util/generic_util.py
+lib/galaxy/datatypes/util/__init__.py
+lib/galaxy/eggs/
+lib/galaxy/exceptions/__init__.py
+lib/galaxy/external_services/__init__.py
+lib/galaxy/external_services/parameters.py
+lib/galaxy/external_services/result_handlers/basic.py
+lib/galaxy/external_services/result_handlers/__init__.py
+lib/galaxy_ext/
+lib/galaxy/forms/__init__.py
+lib/galaxy/jobs/actions/
+lib/galaxy/jobs/command_factory.py
+lib/galaxy/jobs/datasets.py
+lib/galaxy/jobs/deferred/
+lib/galaxy/jobs/error_level.py
+lib/galaxy/jobs/__init__.py
+lib/galaxy/jobs/manager.py
+lib/galaxy/jobs/metrics/
+lib/galaxy/jobs/rules/
+lib/galaxy/jobs/runners/cli.py
+lib/galaxy/jobs/runners/condor.py
+lib/galaxy/jobs/runners/drmaa.py
+lib/galaxy/jobs/runners/godocker.py
+lib/galaxy/jobs/runners/kubernetes.py
+lib/galaxy/jobs/runners/local.py
+lib/galaxy/jobs/runners/pbs.py
+lib/galaxy/jobs/runners/pulsar.py
+lib/galaxy/jobs/runners/slurm.py
+lib/galaxy/jobs/runners/state_handlers/
+lib/galaxy/jobs/runners/tasks.py
+lib/galaxy/jobs/runners/util/cli/factory.py
+lib/galaxy/jobs/runners/util/cli/job/__init__.py
+lib/galaxy/jobs/runners/util/cli/job/slurm_torque.py
+lib/galaxy/jobs/runners/util/cli/job/torque.py
+lib/galaxy/jobs/runners/util/cli/shell/__init__.py
+lib/galaxy/jobs/runners/util/drmaa/
+lib/galaxy/jobs/runners/util/env.py
+lib/galaxy/jobs/runners/util/external.py
+lib/galaxy/jobs/runners/util/__init__.py
+lib/galaxy/jobs/runners/util/job_script/
+lib/galaxy/jobs/splitters/basic.py
+lib/galaxy/jobs/splitters/__init__.py
+lib/galaxy/jobs/stock_rules.py
+lib/galaxy/jobs/transfer_manager.py
+lib/galaxy/main.py
+lib/galaxy/managers/annotatable.py
+lib/galaxy/managers/api_keys.py
+lib/galaxy/managers/base.py
+lib/galaxy/managers/collections_util.py
+lib/galaxy/managers/context.py
+lib/galaxy/managers/deletable.py
+lib/galaxy/managers/__init__.py
+lib/galaxy/managers/lddas.py
+lib/galaxy/managers/libraries.py
+lib/galaxy/managers/secured.py
+lib/galaxy/managers/taggable.py
+lib/galaxy/model/__init__.py
+lib/galaxy/model/item_attrs.py
+lib/galaxy/model/mapping.py
+lib/galaxy/model/metadata.py
+lib/galaxy/model/migrate/__init__.py
+lib/galaxy/model/migrate/versions/0001_initial_tables.py
+lib/galaxy/model/migrate/versions/0002_metadata_file_table.py
+lib/galaxy/model/migrate/versions/0003_security_and_libraries.py
+lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py
+lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py
+lib/galaxy/model/migrate/versions/0007_sharing_histories.py
+lib/galaxy/model/migrate/versions/0008_galaxy_forms.py
+lib/galaxy/model/migrate/versions/0009_request_table.py
+lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py
+lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py
+lib/galaxy/model/migrate/versions/0012_user_address.py
+lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py
+lib/galaxy/model/migrate/versions/0014_pages.py
+lib/galaxy/model/migrate/versions/0015_tagging.py
+lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py
+lib/galaxy/model/migrate/versions/0017_library_item_indexes.py
+lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
+lib/galaxy/model/migrate/versions/0019_request_library_folder.py
+lib/galaxy/model/migrate/versions/0020_library_upload_job.py
+lib/galaxy/model/migrate/versions/0021_user_prefs.py
+lib/galaxy/model/migrate/versions/0022_visualization_tables.py
+lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py
+lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py
+lib/galaxy/model/migrate/versions/0025_user_info.py
+lib/galaxy/model/migrate/versions/0026_cloud_tables.py
+lib/galaxy/model/migrate/versions/0027_request_events.py
+lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py
+lib/galaxy/model/migrate/versions/0029_user_actions.py
+lib/galaxy/model/migrate/versions/0030_history_slug_column.py
+lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py
+lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py
+lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py
+lib/galaxy/model/migrate/versions/0034_page_user_share_association.py
+lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py
+lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py
+lib/galaxy/model/migrate/versions/0037_samples_library.py
+lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py
+lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py
+lib/galaxy/model/migrate/versions/0040_page_annotations.py
+lib/galaxy/model/migrate/versions/0041_workflow_invocation.py
+lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py
+lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py
+lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py
+lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py
+lib/galaxy/model/migrate/versions/0046_post_job_actions.py
+lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py
+lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py
+lib/galaxy/model/migrate/versions/0049_api_keys_table.py
+lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py
+lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py
+lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py
+lib/galaxy/model/migrate/versions/0053_item_ratings.py
+lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py
+lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py
+lib/galaxy/model/migrate/versions/0056_workflow_outputs.py
+lib/galaxy/model/migrate/versions/0057_request_notify.py
+lib/galaxy/model/migrate/versions/0058_history_import_export.py
+lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py
+lib/galaxy/model/migrate/versions/0060_history_archive_import.py
+lib/galaxy/model/migrate/versions/0061_tasks.py
+lib/galaxy/model/migrate/versions/0062_user_openid_table.py
+lib/galaxy/model/migrate/versions/0063_sequencer_table.py
+lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py
+lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py
+lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py
+lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py
+lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py
+lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py
+lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py
+lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py
+lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py
+lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py
+lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py
+lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py
+lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py
+lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py
+lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py
+lib/galaxy/model/migrate/versions/0080_quota_tables.py
+lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py
+lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py
+lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py
+lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py
+lib/galaxy/model/migrate/versions/0085_add_task_info.py
+lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py
+lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py
+lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
+lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py
+lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py
+lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py
+lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py
+lib/galaxy/model/migrate/versions/0093_add_job_params_col.py
+lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py
+lib/galaxy/model/migrate/versions/0095_hda_subsets.py
+lib/galaxy/model/migrate/versions/0096_openid_provider.py
+lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
+lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py
+lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py
+lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py
+lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
+lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
+lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py
+lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
+lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py
+lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py
+lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py
+lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
+lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
+lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py
+lib/galaxy/model/migrate/versions/0111_add_job_destinations.py
+lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py
+lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py
+lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py
+lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py
+lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py
+lib/galaxy/model/migrate/versions/0117_add_user_activation.py
+lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py
+lib/galaxy/model/migrate/versions/0119_job_metrics.py
+lib/galaxy/model/migrate/versions/0120_dataset_collections.py
+lib/galaxy/model/migrate/versions/0121_workflow_uuids.py
+lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py
+lib/galaxy/model/migrate/versions/0124_job_state_history.py
+lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py
+lib/galaxy/model/migrate/versions/0126_password_reset.py
+lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py
+lib/galaxy/model/migrate/versions/0128_session_timeout.py
+lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py
+lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py
+lib/galaxy/model/orm/
+lib/galaxy/model/tool_shed_install/migrate/__init__.py
+lib/galaxy/model/tool_shed_install/migrate/versions/
+lib/galaxy/model/util.py
+lib/galaxy/objectstore/pulsar.py
+lib/galaxy/objectstore/s3_multipart_upload.py
+lib/galaxy/openid/__init__.py
+lib/galaxy/quota/
+lib/galaxy/sample_tracking/data_transfer.py
+lib/galaxy/sample_tracking/__init__.py
+lib/galaxy/sample_tracking/sample.py
+lib/galaxy/security/validate_user_input.py
+lib/galaxy/tags/
+lib/galaxy/tools/actions/__init__.py
+lib/galaxy/tools/actions/metadata.py
+lib/galaxy/tools/cwl/
+lib/galaxy/tools/data_manager/__init__.py
+lib/galaxy/tools/deps/
+lib/galaxy/tools/exception_handling.py
+lib/galaxy/tools/execute.py
+lib/galaxy/tools/filters/
+lib/galaxy/tools/imp_exp/export_history.py
+lib/galaxy/tools/imp_exp/__init__.py
+lib/galaxy/tools/linters/
+lib/galaxy/tools/lint.py
+lib/galaxy/tools/lint_util.py
+lib/galaxy/tools/loader_directory.py
+lib/galaxy/tools/loader.py
+lib/galaxy/tools/parameters/dataset_matcher.py
+lib/galaxy/tools/parameters/history_query.py
+lib/galaxy/tools/parameters/__init__.py
+lib/galaxy/tools/parameters/input_translation.py
+lib/galaxy/tools/parameters/sanitize.py
+lib/galaxy/tools/parameters/validation.py
+lib/galaxy/tools/parameters/wrapped_json.py
+lib/galaxy/tools/parameters/wrapped.py
+lib/galaxy/tools/parser/
+lib/galaxy/tools/special_tools.py
+lib/galaxy/tools/test.py
+lib/galaxy/tools/toolbox/
+lib/galaxy/tools/util/galaxyops/
+lib/galaxy/tools/util/__init__.py
+lib/galaxy/tools/verify/
+lib/galaxy/util/
+lib/galaxy_utils/__init__.py
+lib/galaxy/util/sleeper.py
+lib/galaxy/util/specs.py
+lib/galaxy_utils/sequence/fasta.py
+lib/galaxy_utils/sequence/fastq.py
+lib/galaxy_utils/sequence/__init__.py
+lib/galaxy_utils/sequence/transform.py
+lib/galaxy_utils/sequence/vcf.py
+lib/galaxy/util/template.py
+lib/galaxy/util/ucsc.py
+lib/galaxy/version.py
+lib/galaxy/visualization/data_providers/basic.py
+lib/galaxy/visualization/data_providers/cigar.py
+lib/galaxy/visualization/data_providers/__init__.py
+lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
+lib/galaxy/visualization/genome/
+lib/galaxy/visualization/genomes.py
+lib/galaxy/visualization/__init__.py
+lib/galaxy/visualization/plugins/__init__.py
+lib/galaxy/visualization/plugins/utils.py
+lib/galaxy/visualization/tracks/
+lib/galaxy/web/base/controllers/__init__.py
+lib/galaxy/web/base/__init__.py
+lib/galaxy/web/buildapp.py
+lib/galaxy/web/formatting.py
+lib/galaxy/web/framework/base.py
+lib/galaxy/web/framework/decorators.py
+lib/galaxy/web/framework/helpers/grids.py
+lib/galaxy/web/framework/__init__.py
+lib/galaxy/web/framework/middleware/error.py
+lib/galaxy/web/framework/middleware/__init__.py
+lib/galaxy/web/framework/middleware/request_id.py
+lib/galaxy/web/framework/middleware/sentry.py
+lib/galaxy/web/framework/middleware/static.py
+lib/galaxy/web/framework/middleware/statsd.py
+lib/galaxy/web/framework/middleware/translogger.py
+lib/galaxy/web/framework/middleware/xforwardedhost.py
+lib/galaxy/web/__init__.py
+lib/galaxy/web/params.py
+lib/galaxy/webapps/galaxy/api/genomes.py
+lib/galaxy/webapps/galaxy/api/__init__.py
+lib/galaxy/webapps/galaxy/api/jobs.py
+lib/galaxy/webapps/galaxy/api/lda_datasets.py
+lib/galaxy/webapps/galaxy/api/requests.py
+lib/galaxy/webapps/galaxy/api/roles.py
+lib/galaxy/webapps/galaxy/api/samples.py
+lib/galaxy/webapps/galaxy/api/tools.py
+lib/galaxy/webapps/galaxy/api/tours.py
+lib/galaxy/webapps/galaxy/api/workflows.py
+lib/galaxy/webapps/galaxy/controllers/async.py
+lib/galaxy/webapps/galaxy/controllers/data_manager.py
+lib/galaxy/webapps/galaxy/controllers/error.py
+lib/galaxy/webapps/galaxy/controllers/external_services.py
+lib/galaxy/webapps/galaxy/controllers/history.py
+lib/galaxy/webapps/galaxy/controllers/__init__.py
+lib/galaxy/webapps/galaxy/controllers/mobile.py
+lib/galaxy/webapps/galaxy/controllers/page.py
+lib/galaxy/webapps/galaxy/controllers/requests.py
+lib/galaxy/webapps/galaxy/controllers/search.py
+lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+lib/galaxy/webapps/galaxy/controllers/userskeys.py
+lib/galaxy/webapps/galaxy/__init__.py
+lib/galaxy/webapps/__init__.py
+lib/galaxy/webapps/reports/config.py
+lib/galaxy/webapps/reports/controllers/__init__.py
+lib/galaxy/webapps/reports/controllers/query.py
+lib/galaxy/webapps/reports/__init__.py
+lib/galaxy/webapps/tool_shed/api/__init__.py
+lib/galaxy/webapps/tool_shed/controllers/groups.py
+lib/galaxy/webapps/tool_shed/controllers/__init__.py
+lib/galaxy/webapps/tool_shed/controllers/user.py
+lib/galaxy/webapps/tool_shed/framework/__init__.py
+lib/galaxy/webapps/tool_shed/framework/middleware/__init__.py
+lib/galaxy/webapps/tool_shed/framework/middleware/remoteuser.py
+lib/galaxy/webapps/tool_shed/__init__.py
+lib/galaxy/webapps/tool_shed/model/mapping.py
+lib/galaxy/webapps/tool_shed/model/migrate/
+lib/galaxy/webapps/tool_shed/search/__init__.py
+lib/galaxy/webapps/tool_shed/security/
+lib/galaxy/webapps/tool_shed/util/__init__.py
+lib/galaxy/webapps/tool_shed/util/ratings_util.py
+lib/galaxy/webapps/tool_shed/util/shed_statistics.py
+lib/galaxy/webapps/util.py
+lib/galaxy/workflow/extract.py
+lib/galaxy/workflow/__init__.py
+lib/galaxy/workflow/render.py
+lib/galaxy/workflow/run.py
+lib/galaxy/workflow/schedulers/
+lib/galaxy/workflow/steps.py
+lib/galaxy/work/__init__.py
+lib/mimeparse.py
+lib/psyco_full.py
+lib/tool_shed/
+scripts/api/common.py
+scripts/api/copy_hda_to_library_folder.py
+scripts/api/create.py
+scripts/api/delete.py
+scripts/api/example_watch_folder.py
+scripts/api/filter_failed_datasets_from_collection.py
+scripts/api/form_create_from_xml.py
+scripts/api/history_create_history.py
+scripts/api/history_delete_history.py
+scripts/api/import_library_dataset_to_history.py
+scripts/api/import_workflows_from_installed_tool_shed_repository.py
+scripts/api/install_tool_shed_repositories.py
+scripts/api/library_create_folder.py
+scripts/api/library_create_library.py
+scripts/api/library_upload_dir.py
+scripts/api/library_upload_from_import_dir.py
+scripts/api/load_data_with_metadata.py
+scripts/api/repair_tool_shed_repository.py
+scripts/api/requests_update_state.py
+scripts/api/request_type_create_from_xml.py
+scripts/api/reset_metadata_on_installed_repositories.py
+scripts/api/sample_dataset_update_status.py
+scripts/api/sample_update_state.py
+scripts/api/sequencer_configuration_create.py
+scripts/api/update.py
+scripts/api/upload_to_history.py
+scripts/api/workflow_delete.py
+scripts/api/workflow_execute_parameters.py
+scripts/api/workflow_execute.py
+scripts/api/workflow_import_from_file_rpark.py
+scripts/api/workflow_import.py
+scripts/auth/
+scripts/bootstrap_history.py
+scripts/build_toolbox.py
+scripts/build_universe_config.py
+scripts/check_galaxy.py
+scripts/check_python.py
+scripts/cleanup_datasets/cleanup_datasets.py
+scripts/cleanup_datasets/populate_uuid.py
+scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py
+scripts/cleanup_datasets/rename_purged_datasets.py
+scripts/cleanup_datasets/update_metadata.py
+scripts/create_db.py
+scripts/data_libraries/
+scripts/db_shell.py
+scripts/drmaa_external_killer.py
+scripts/drmaa_external_runner.py
+scripts/external_chown_script.py
+scripts/extract_dataset_part.py
+scripts/extract_toolbox_sections.py
+scripts/functional_tests.py
+scripts/get_platforms.py
+scripts/helper.py
+scripts/__init__.py
+scripts/loc_files/
+scripts/manage_db.py
+scripts/manage_tools.py
+scripts/metagenomics/
+scripts/microbes/create_bacteria_loc_file.py
+scripts/microbes/create_bacteria_table.py
+scripts/microbes/create_nib_seq_loc_file.py
+scripts/microbes/get_builds_lengths.py
+scripts/microbes/ncbi_to_ucsc.py
+scripts/microbes/util.py
+scripts/migrate_tools/
+scripts/nosetests.py
+scripts/others/
+scripts/runtime_stats.py
+scripts/set_dataset_sizes.py
+scripts/set_user_disk_usage.py
+scripts/sync_reports_config.py
+scripts/tool_shed/api/add_repository_registry_entry.py
+scripts/tool_shed/api/common.py
+scripts/tool_shed/api/create_categories.py
+scripts/tool_shed/api/create_users.py
+scripts/tool_shed/api/import_capsule.py
+scripts/tool_shed/api/remove_repository_registry_entry.py
+scripts/tool_shed/api/reset_metadata_on_repositories.py
+scripts/tool_shed/api/tool_shed_repository_revision_update.py
+scripts/tool_shed/build_ts_whoosh_index.py
+scripts/tool_shed/check_download_urls.py
+scripts/tool_shed/check_filesystem_for_empty_tool_dependency_installation_paths.py
+scripts/tool_shed/clean_up_tool_dependency_directory.py
+scripts/tool_shed/migrate_tools_to_repositories.py
+scripts/tool_shed/show_tool_dependency_installation_dir_contents.py
+scripts/tools/re_escape_output.py
+test/api/__init__.py
+test/api/test_dataset_collections.py
+test/api/test_datasets.py
+test/api/test_datatypes.py
+test/api/test_framework.py
+test/api/test_history_contents_provenance.py
+test/api/test_jobs.py
+test/api/test_libraries.py
+test/api/test_page_revisions.py
+test/api/test_search.py
+test/api/test_tool_data.py
+test/api/test_tools.py
+test/api/test_tours.py
+test/api/test_workflow_extraction.py
+test/api/test_workflows_from_yaml.py
+test/api/workflows_format_2/
+test/base/api_asserts.py
+test/base/api.py
+test/base/api_util.py
+test/base/__init__.py
+test/base/instrument.py
+test/base/integration_util.py
+test/base/interactor.py
+test/base/nose_util.py
+test/base/test_logging.py
+test/base/tool_shed_util.py
+test/base/twilltestcase.py
+test/casperjs/
+test/functional/database_contexts.py
+test/functional/__init__.py
+test/functional/test_data_managers.py
+test/functional/test_toolbox.py
+test/functional/tool-data/
+test/functional/tools/
+test/functional/workflow.py
+test/integration/
+test/manual/__init__.py
+test/shed_functional/base/common.py
+test/shed_functional/base/__init__.py
+test/shed_functional/functional/
+test/shed_functional/functional_tests.py
+test/shed_functional/__init__.py
+test/unit/dataset_collections/
+test/unit/datatypes/dataproviders/__init__.py
+test/unit/datatypes/dataproviders/test_line_dataproviders.py
+test/unit/datatypes/__init__.py
+test/unit/datatypes/test_data.py
+test/unit/jobs/dynamic_tool_destination/
+test/unit/jobs/__init__.py
+test/unit/jobs/test_command_factory.py
+test/unit/jobs/test_datasets.py
+test/unit/jobs/test_job_configuration.py
+test/unit/jobs/test_job_output_checker.py
+test/unit/jobs/test_mapper.py
+test/unit/jobs/test_rule_helper.py
+test/unit/jobs/test_rules/
+test/unit/jobs/test_runner_params.py
+test/unit/managers/__init__.py
+test/unit/managers/test_CollectionManager.py
+test/unit/managers/test_DatasetManager.py
+test/unit/managers/test_HDAManager.py
+test/unit/managers/test_HDCAManager.py
+test/unit/managers/test_HistoryContentsManager.py
+test/unit/managers/test_UserManager.py
+test/unit/shed_unit/__init__.py
+test/unit/shed_unit/test_fabric_util.py
+test/unit/shed_unit/test_td_common_util.py
+test/unit/test_galaxy_transactions.py
+test/unit/test_lazy_process.py
+test/unit/test_routes.py
+test/unit/test_security_helper.py
+test/unit/test_sockets.py
+test/unit/test_sqlite_utils.py
+test/unit/test_topsort.py
+test/unit/tools/filter_modules/
+test/unit/tools/__init__.py
+test/unit/tools/test_actions.py
+test/unit/tools/test_citations.py
+test/unit/tools/test_collect_primary_datasets.py
+test/unit/tools/test_conda_resolution.py
+test/unit/tools/test_dataset_matcher.py
+test/unit/tools/test_parameter_parsing.py
+test/unit/tools/test_parsing.py
+test/unit/tools/test_select_parameters.py
+test/unit/tools/test_toolbox_filters.py
+test/unit/tools/test_tool_dependency_description.py
+test/unit/tools/test_tool_deps.py
+test/unit/tools/test_tool_external_files.py
+test/unit/tools/test_tool_loader.py
+test/unit/tools/test_tool_panel.py
+test/unit/tools/test_watcher.py
+test/unit/tools/test_wrappers.py
+test/unit/tools_support.py
+test/unit/unittest_utils/
+test/unit/visualizations/__init__.py
+test/unit/visualizations/plugins/__init__.py
+test/unit/web/base/__init__.py
+test/unit/web/framework/__init__.py
+test/unit/web/__init__.py
+test/unit/workflows/
+test/unit/test_objectstore.py
+tool_list.py
+tools/
diff --git a/.ci/flake8_py3_wrapper.sh b/.ci/flake8_py3_wrapper.sh
new file mode 100755
index 0000000..64419fc
--- /dev/null
+++ b/.ci/flake8_py3_wrapper.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+set -e
+
+flake8 --exclude `paste -sd, .ci/flake8_blacklist.txt` `paste -s .ci/py3_sources.txt`
diff --git a/.ci/flake8_wrapper.sh b/.ci/flake8_wrapper.sh
new file mode 100755
index 0000000..47395c0
--- /dev/null
+++ b/.ci/flake8_wrapper.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+set -e
+
+flake8 --exclude `paste -sd, .ci/flake8_blacklist.txt` .
+
+# Apply stricter rules for the directories shared with Pulsar
+flake8 --ignore=D --max-line-length=150 lib/galaxy/jobs/runners/util/
diff --git a/.ci/flake8_wrapper_docstrings.sh b/.ci/flake8_wrapper_docstrings.sh
new file mode 100755
index 0000000..4fe943c
--- /dev/null
+++ b/.ci/flake8_wrapper_docstrings.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+set -e
+
+# D100 - Missing docstring in public module.
+# D2XX - Whitespace issues.
+# D3XX - Quoting issues.
+# D401 - First line should be in imperative mood 
+# D403 - First word of the first line should be properly capitalized
+args="--ignore=D --select=D100,D201,D202,D206,D207,D208,D209,D211,D3,D401,D403"
+
+# If the first argument is --include, lint the modules expected to pass. If
+# the first argument is --exclude, lint all modules the full Galaxy linter lints
+# (this will fail).
+
+if [ "$1" = "--include" ];
+then
+    flake8 $args `paste .ci/flake8_docstrings_include_list.txt`
+else
+    flake8 $args --exclude `paste -sd, .ci/flake8_blacklist.txt` .
+fi
diff --git a/.ci/flake8_wrapper_imports.sh b/.ci/flake8_wrapper_imports.sh
new file mode 100755
index 0000000..22afed2
--- /dev/null
+++ b/.ci/flake8_wrapper_imports.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+set -e
+
+flake8 `paste .ci/flake8_lint_include_list.txt`
diff --git a/.ci/py3_sources.txt b/.ci/py3_sources.txt
new file mode 100644
index 0000000..8643f9f
--- /dev/null
+++ b/.ci/py3_sources.txt
@@ -0,0 +1,102 @@
+client/galaxy/style/source_material/circle.py
+contrib/
+cron/
+doc/parse_gx_xsd.py
+doc/patch.py
+lib/galaxy/actions/
+lib/galaxy/auth/
+lib/galaxy/config.py
+lib/galaxy/dataset_collections/
+lib/galaxy/datatypes/assembly.py
+lib/galaxy/datatypes/binary.py
+lib/galaxy/datatypes/constructive_solid_geometry.py
+lib/galaxy/datatypes/converters/
+lib/galaxy/datatypes/dataproviders/
+lib/galaxy/datatypes/data.py
+lib/galaxy/datatypes/images.py
+lib/galaxy/datatypes/msa.py
+lib/galaxy/datatypes/ngsindex.py
+lib/galaxy/datatypes/proteomics.py
+lib/galaxy/datatypes/sequence.py
+lib/galaxy/datatypes/sniff.py
+lib/galaxy/datatypes/tabular.py
+lib/galaxy/datatypes/tracks.py
+lib/galaxy/dependencies/
+lib/galaxy/eggs/
+lib/galaxy/exceptions/
+lib/galaxy/external_services/
+lib/galaxy/forms/
+lib/galaxy/jobs/
+lib/galaxy/managers/
+lib/galaxy/model/__init__.py
+lib/galaxy/model/item_attrs.py
+lib/galaxy/model/mapping.py
+lib/galaxy/model/metadata.py
+lib/galaxy/model/migrate/
+lib/galaxy/model/orm/now.py
+lib/galaxy/objectstore/
+lib/galaxy/openid/
+lib/galaxy/quota/
+lib/galaxy/sample_tracking/
+lib/galaxy/security/
+lib/galaxy/tags/
+lib/galaxy/tools/cwl/
+lib/galaxy/tools/deps/
+lib/galaxy/tools/exception_handling.py
+lib/galaxy/tools/execute.py
+lib/galaxy/tools/lint.py
+lib/galaxy/tools/lint_util.py
+lib/galaxy/tools/linters/
+lib/galaxy/tools/loader.py
+lib/galaxy/tools/loader_directory.py
+lib/galaxy/tools/parameters/dataset_matcher.py
+lib/galaxy/tools/parameters/__init__.py
+lib/galaxy/tools/parameters/wrapped_json.py
+lib/galaxy/tools/parameters/wrapped.py
+lib/galaxy/tools/parser/
+lib/galaxy/tools/toolbox/
+lib/galaxy/tours/
+lib/galaxy/util/
+lib/galaxy/visualization/
+lib/galaxy/web/buildapp.py
+lib/galaxy/web/framework/__init__.py
+lib/galaxy/web/framework/middleware/error.py
+lib/galaxy/web/framework/middleware/static.py
+lib/galaxy/web/__init__.py
+lib/galaxy/webapps/galaxy/api/tours.py
+lib/galaxy/webapps/galaxy/api/workflows.py
+lib/galaxy/webapps/galaxy/controllers/userskeys.py
+lib/galaxy/webapps/reports/config.py
+lib/galaxy/webapps/reports/__init__.py
+lib/galaxy/webapps/tool_shed/controllers/user.py
+lib/galaxy/webapps/tool_shed/__init__.py
+lib/galaxy/work/
+lib/galaxy/workflow/extract.py
+lib/galaxy/workflow/run.py
+lib/galaxy/workflow/schedulers/core.py
+lib/galaxy_ext/
+lib/galaxy_utils/
+lib/log_tempfile.py
+lib/psyco_full.py
+lib/tool_shed/
+scripts/api/
+scripts/auth/
+scripts/bootstrap_history.py
+scripts/build_toolbox.py
+scripts/check_eggs.py
+scripts/check_galaxy.py
+scripts/check_python.py
+scripts/cleanup_datasets/admin_cleanup_datasets.py
+scripts/cleanup_datasets/cleanup_datasets.py
+scripts/cleanup_datasets/pgcleanup.py
+scripts/cleanup_datasets/populate_uuid.py
+scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py
+scripts/cleanup_datasets/rename_purged_datasets.py
+scripts/cleanup_datasets/update_dataset_size.py
+scripts/cleanup_datasets/update_metadata.py
+scripts/data_libraries/build_whoosh_index.py
+scripts/db_shell.py
+scripts/drmaa_external_runner.py
+test/
+tool_list.py
+tools/
diff --git a/.ci/validate_test_tools.sh b/.ci/validate_test_tools.sh
new file mode 100755
index 0000000..9b43e7a
--- /dev/null
+++ b/.ci/validate_test_tools.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+cd `dirname $0`/..
+
+xsd_path="lib/galaxy/tools/xsd/galaxy.xsd"
+# Lint the XSD
+xmllint --noout "$xsd_path"
+
+test_tools_path='test/functional/tools'
+tool_files_list=$(ls "$test_tools_path"/*xml | grep -v '_conf.xml$')
+sh scripts/validate_tools.sh $tool_files_list
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..2605044
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,3 @@
+[run]
+branch = True
+include = lib/galaxy/*
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..889911f
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,36 @@
+language: python
+python: 2.7
+os:
+  - linux
+env:
+  - TOX_ENV=py34-lint
+  - TOX_ENV=py27-lint
+  - TOX_ENV=py27-unit
+  - TOX_ENV=qunit
+  - TOX_ENV=first_startup
+  - TOX_ENV=py27-lint-imports
+  - TOX_ENV=py27-lint-imports-include-list
+  - TOX_ENV=validate-test-tools
+  - TOX_ENV=py27-lint-docstring-include-list
+
+matrix:
+  include:
+  - os: osx
+    env: TOX_ENV=first_startup
+    language: generic
+  - os: osx
+    env: TOX_ENV=py27-unit
+    language: generic
+  allow_failures:
+  - env: TOX_ENV=py27-lint-imports
+
+install:
+  - pip install tox
+  - if [ "$TOX_ENV" == "validate-test-tools" ]; then sudo apt-get install libxml2-utils; fi
+  - if [ "$TOX_ENV" == "qunit" ]; then bash -c 'cd test/qunit && npm install'; fi
+  - if [ "$TOX_ENV" == "first_startup" ]; then bash -c "bash scripts/common_startup.sh && wget -q https://github.com/jmchilton/galaxy-downloads/raw/master/db_gx_rev_0127.sqlite && mv db_gx_rev_0127.sqlite database/universe.sqlite && bash manage_db.sh -c ./config/galaxy.ini.sample upgrade"; fi
+
+script: tox -e $TOX_ENV
+
+notifications:
+  email: false
diff --git a/CITATION b/CITATION
new file mode 100644
index 0000000..1e6092e
--- /dev/null
+++ b/CITATION
@@ -0,0 +1,54 @@
+If you use or extend Galaxy in your published work, please cite each of the
+following publications:
+
+- Goecks, J, Nekrutenko, A, Taylor, J and The Galaxy Team. "Galaxy: a
+  comprehensive approach for supporting accessible, reproducible, and
+  transparent computational research in the life sciences." 
+  Genome Biol. 2010 Aug 25;11(8):R86.
+
+- Blankenberg D, Von Kuster G, Coraor N, Ananda G, Lazarus R, Mangan M,
+  Nekrutenko A, Taylor J. "Galaxy: a web-based genome analysis tool for
+  experimentalists". Current Protocols in Molecular Biology. 
+  2010 Jan; Chapter 19:Unit 19.10.1-21.
+
+- Giardine B, Riemer C, Hardison RC, Burhans R, Elnitski L, Shah P, Zhang Y,
+  Blankenberg D, Albert I, Taylor J, Miller W, Kent WJ, Nekrutenko A. "Galaxy:
+  a platform for interactive large-scale genome analysis." 
+  Genome Research. 2005 Oct; 15(10):1451-5.
+
+See also: http://wiki.galaxyproject.org/CitingGalaxy
+
+
+
+BibTeX format:
+
+ at article{goecks2010galaxy,
+  title={Galaxy: a comprehensive approach for supporting accessible, reproducible, and transparent computational research in the life sciences},
+  author={Goecks, Jeremy and Nekrutenko, Anton and Taylor, James and The Galaxy Team},
+  journal={Genome Biol},
+  volume={11},
+  number={8},
+  pages={R86},
+  year={2010}
+}
+
+ at article{blankenberg2010galaxy,
+  title={Galaxy: A Web-Based Genome Analysis Tool for Experimentalists},
+  author={Blankenberg, Daniel and Kuster, Gregory Von and Coraor, Nathaniel and Ananda, Guruprasad and Lazarus, Ross and Mangan, Mary and Nekrutenko, Anton and Taylor, James},
+  journal={Current protocols in molecular biology},
+  pages={19--10},
+  year={2010},
+  publisher={John Wiley \& Sons, Inc.}
+}
+
+ at article{giardine2005galaxy,
+  title={Galaxy: a platform for interactive large-scale genome analysis},
+  author={Giardine, Belinda and Riemer, Cathy and Hardison, Ross C and Burhans, Richard and Elnitski, Laura and Shah, Prachi and Zhang, Yi and Blankenberg, Daniel and Albert, Istvan and Taylor, James and Miller, Webb C and Kent, W James and Nekrutenko, Anton},
+  journal={Genome research},
+  volume={15},
+  number={10},
+  pages={1451--1455},
+  year={2005},
+  publisher={Cold Spring Harbor Lab}
+}
+
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..26bb100
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,99 @@
+Galaxy Project Code of Conduct
+==============================
+
+This code of conduct outlines our expectations for participants within the
+Galaxy community, as well as steps to reporting unacceptable behavior. We are
+committed to providing a welcoming and inspiring community for all and expect
+our code of conduct to be honored. Anyone who violates this code of conduct may
+be banned from the community.
+
+Our open source community strives to:
+
+* **Be friendly and patient.**
+
+* **Be welcoming**: We strive to be a community that welcomes and
+  supports people of all backgrounds and identities. This includes, but is not
+  limited to members of any race, ethnicity, culture, national origin, colour,
+  immigration status, social and economic class, educational level, sex, sexual
+  orientation, gender identity and expression, age, size, family status,
+  political belief, religion, and mental and physical ability.
+
+* **Be considerate**: Your work will be used by other people, and you in turn
+  will depend on the work of others. Any decision you take will affect users
+  and colleagues, and you should take those consequences into account when
+  making decisions. Remember that we're a world-wide community, so you might
+  not be communicating in someone else's primary language.
+
+* **Be respectful**: Not all of us will agree all the time, but disagreement is
+  no excuse for poor behavior and poor manners. We might all experience some
+  frustration now and then, but we cannot allow that frustration to turn into a
+  personal attack. It’s important to remember that a community where people
+  feel uncomfortable or threatened is not a productive one.
+
+* **Be careful in the words that we choose**: We are a community of
+  professionals, and we conduct ourselves professionally. Be kind to others. Do
+  not insult or put down other participants. Harassment and other exclusionary
+  behavior aren't acceptable. This includes, but is not limited to: Violent
+  threats or language directed against another person, Discriminatory jokes and
+  language, Posting sexually explicit or violent material, Posting (or
+  threatening to post) other people’s personally identifying information
+  (“doxing”), Personal insults, especially those using racist or sexist terms,
+  Unwelcome sexual attention, Advocating for, or encouraging, any of the above
+  behavior, Repeated harassment of others. In general, if someone asks you to
+  stop, then stop.
+
+* **Try to understand why we disagree**: Disagreements, both social and
+  technical, happen all the time. It is important that we resolve disagreements
+  and differing views constructively. Remember that we’re different. Diversity
+  contributes to the strength of our community, which is composed of people
+  from a wide range of backgrounds. Different people have different
+  perspectives on issues. Being unable to understand why someone holds a
+  viewpoint doesn’t mean that they’re wrong. Don’t forget that it is human to
+  err and blaming each other doesn’t get us anywhere. Instead, focus on helping
+  to resolve issues and learning from mistakes.
+
+### Diversity Statement
+
+We encourage everyone to participate and are committed to building a community
+for all. Although we will fail at times, we seek to treat everyone both as
+fairly and equally as possible. Whenever a participant has made a mistake, we
+expect them to take responsibility for it. If someone has been harmed or
+offended, it is our responsibility to listen carefully and respectfully, and do
+our best to right the wrong.
+
+Although this list cannot be exhaustive, we explicitly honor diversity in age,
+gender, gender identity or expression, culture, ethnicity, language, national
+origin, political beliefs, profession, race, religion, sexual orientation,
+socioeconomic status, and technical ability. We will not tolerate
+discrimination based on any of the protected characteristics above, including
+participants with disabilities.
+
+### Reporting Issues
+
+If you experience or witness unacceptable behavior, or have any other concerns,
+please report it by contacting Dave Clements (clementsgalaxy at gmail.com). To
+report an issue involving Dave Clements please email James Taylor
+(james at taylorlab.org). All reports will be handled with discretion. In your
+report please include:
+
+- Your contact information.
+
+- Names (real, nicknames, or pseudonyms) of any individuals involved. If there
+  are additional witnesses, please include them as well. Your account of what
+  occurred, and if you believe the incident is ongoing. If there is a publicly
+  available record (e.g. a mailing list archive or a public IRC logger), please
+  include a link.
+
+- Any additional information that may be helpful.
+
+After filing a report, a representative will contact you personally, review the
+incident, follow up with any additional questions, and make a decision as to
+how to respond. If the person who is harassing you is part of the response
+team, they will recuse themselves from handling your incident. If the complaint
+originates from a member of the response team, it will be handled by a
+different member of the response team. We will respect confidentiality requests
+for the purpose of protecting victims of abuse.
+
+### Attribution & Acknowledgements
+
+This code of conduct is based on the Open Code of Conduct from the TODOGroup.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..b27d3aa
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,120 @@
+# Contributing
+
+Galaxy welcomes new development!
+This document briefly describes how to contribute to the [core
+galaxy project](https://github.com/galaxyproject/galaxy) -
+also checkout our 2013 Galaxy Community
+Conference presentation on the topic
+([video](https://vimeo.com/channels/581875/73486255),
+[presentation](https://wiki.galaxyproject.org/Documents/Presentations/GCC2013?action=AttachFile&do=view&target=BakerContribute.pdf)). For
+information on contributing more broadly to the Galaxy ecosystem and a
+deeper discussion of some of these points - please see the
+[Develop](https://wiki.galaxyproject.org/Develop/) section of the
+[Galaxy Wiki](https://wiki.galaxyproject.org/).
+
+## Before you Begin
+
+If you have an idea for a feature to add or an approach for a bugfix,
+it is best to communicate with Galaxy developers early. The most
+common venues for this are
+[GitHub issues](https://github.com/galaxyproject/galaxy/issues) and the
+[Galaxy and Tool Shed Trello boards](https://wiki.galaxyproject.org/Issues).
+Browse through existing GitHub issues and Trello cards and if one seems related,
+comment on it. We also maintain a [card](https://trello.com/c/eFdPIdIB) with
+links to smaller issues we believe would make the best entry points for new
+developers.
+Galaxy developers are generally available via
+[IRC](https://wiki.galaxyproject.org/GetInvolved#IRC_Channel) and on
+the [development mailing list](http://dev.list.galaxyproject.org/).
+
+## Reporting a new issue
+
+If no existing Galaxy issue/Trello card seems appropriate, a new issue can be
+opened using [this form](https://github.com/galaxyproject/galaxy/issues/new).
+
+## How to Contribute
+
+* All changes to the [core galaxy project](https://github.com/galaxyproject/galaxy)
+  should be made through pull requests to this repository (with just two
+  exceptions outlined below).
+
+* If you are new to Git, the [Try Git](http://try.github.com/) tutorial is a good places to start.
+  More learning resources are listed at https://help.github.com/articles/good-resources-for-learning-git-and-github/ .
+
+* Make sure you have a free [GitHub](https://github.com/) account.
+
+* Fork the [galaxy repository](https://github.com/galaxyproject/galaxy) on
+  GitHub to make your changes.
+  (While many Galaxy instances track
+  [galaxy-dist](https://bitbucket.org/galaxy/galaxy-dist), active development
+  happens in the galaxy GitHub repository and this is where pull requests
+  should be made).
+  To keep your copy up to date with respect to the main repository, you need to
+  frequently [sync your fork](https://help.github.com/articles/syncing-a-fork/):
+  ```
+    $ git remote add upstream https://github.com/galaxyproject/galaxy
+    $ git fetch upstream
+    $ git checkout dev
+    $ git merge upstream/dev
+  ```
+
+* Choose the correct branch to develop your changes against.
+
+  * Additions of new features to the code base should be pushed to the `dev` branch (`git
+    checkout dev`).
+
+  * Most bug fixes to previously release components (things in galaxy-dist)
+    should be made against the recent `release_XX.XX` branch (`git checkout release_XX.XX`).
+
+  * Serious security problems should not be fixed via pull request - please
+    responsibly disclose these by e-mailing them (with or without patches) to
+    galaxy-committers at lists.galaxyproject.org . The Galaxy core development team will
+    issue patches to public servers before announcing the issue to ensure there
+    is time to patch and highlight these fixes widely. We will provide you
+    credit for the discovery when publicly disclosing the issue.
+
+* If your changes modify code - please ensure the resulting files
+  conform to Galaxy [style
+  guidelines](https://wiki.galaxyproject.org/Develop/BestPractices).
+
+* Galaxy contains hundreds of tests of different types and complexity
+  and running each is difficult and probably not reasonable at this
+  time (someday we will provide a holistic test procedure to make this
+  possible). For now, please just review the [running tests
+  documentation](https://wiki.galaxyproject.org/Admin/RunningTests)
+  and run any that seem relevant. Developers reviewing your pull
+  request will be happy to help guide you to running the most relevant
+  tests as part of the pull request review process and may request the
+  output of these tests. You can run the continuous integration tests locally
+  using `tox`, example: `tox -e py27-lint,py27-unit`.
+
+* Commit and push your changes to your
+  [fork](https://help.github.com/articles/pushing-to-a-remote/).
+
+* Open a [pull
+  request](https://help.github.com/articles/creating-a-pull-request/)
+  with these changes. You pull request message ideally should include:
+
+   * A description of why the changes should be made.
+
+   * A description of the implementation of the changes.
+
+   * A description of how to test the changes.
+
+* The pull request should pass all the continuous integration tests which are
+  automatically run by GitHub using e.g. Travis CI.
+
+* Your pull request will be handled according to
+  [some rules](doc/source/project/organization.rst#handling-pull-requests).
+
+## A Quick Note about Tools
+
+  For the most part, Galaxy tools should be published to the
+  [Tool Shed](https://wiki.galaxyproject.org/ToolShed) and not in this
+  repository directly. If you are looking to supply fixes for migrated
+  core tools that used to exist in this repository - please checkout
+  the [tools-devteam](https://github.com/galaxyproject/tools-devteam)
+  repository on GitHub.
+
+  More information about tool development can be found [on the
+  wiki](https://wiki.galaxyproject.org/Develop).
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
new file mode 100644
index 0000000..ffc1571
--- /dev/null
+++ b/CONTRIBUTORS.md
@@ -0,0 +1,152 @@
+# Contributors
+
+The following individuals have contributed code to Galaxy:
+
+* Enis Afgan <afgane at gmail.com>
+* Istvan Albert <istvan.albert at gmail.com>
+* Renato Alves <alves.rjc at gmail.com> <rjalves at igc.gulbenkian.pt>
+* Guruprasad Ananda <gua110 at bx.psu.edu>
+* Florent Angly <florent.angly at gmail.com>
+* Raj Ayyampalayam <raj76 at uga.edu>
+* Finn Bacall <finn.bacall at cs.man.ac.uk>
+* Dannon Baker <dannon.baker at gmail.com>
+* Christopher Bare <christopherbare at gmail.com>
+* Marius van den Beek <m.vandenbeek at gmail.com>
+* Dan Blanchard <dan.blanchard at gmail.com>
+* Daniel Blankenberg <dan.blankenberg at gmail.com> <dan at bx.psu.edu>
+* James Boocock <sfk2001 at gmail.com>
+* Carlos Borroto <carlos.borroto at gmail.com>
+* Daniel Bouchard <dbouchard at corefacility.ca> <daniel.bouchard at phac-aspc.gc.ca>
+* Dave Bouvier <dave at bx.psu.edu>
+* Adam Brenner <aebrenne at uci.edu>
+* Anthony Bretaudeau <anthony.bretaudeau at rennes.inra.fr> <abretaud at irisa.fr>
+* Christian Y. Brenninkmeijer <christian.brenninkmeijer at manchester.ac.uk>
+* Freek de Bruijn <freek.de.bruijn at nbic.nl>
+* Richard Burhans <burhans at bx.psu.edu>
+* Jennifer Cabral <jencabral at gmail.com>
+* Martin Čech <marten at bx.psu.edu>
+* Ramkrishna Chakrabarty <rc at bx.psu.edu>
+* Brad Chapman <chapmanb at 50mail.com>
+* John Chilton <jmchilton at gmail.com>
+* Saket Choudhary <saketkc at gmail.com>
+* Wen-Yu Chung <wychung at bx.psu.edu>
+* Dave Clements <clements at galaxyproject.org>
+* Peter Cock <p.j.a.cock at googlemail.com> <peter.cock at hutton.ac.uk>
+* Ira Cooke <iracooke at gmail.com>
+* Nate Coraor <nate at bx.psu.edu>
+* Michael Cotterell <mepcotterell at gmail.com>
+* Michael R. Crusoe <crusoe at ucdavis.edu>
+* Gianmauro Cuccuru <gmauro at crs4.it>
+* Frederik Delaere <frederik.delaere at gmail.com>
+* Matthias Desmet <matthias.desmet at ugent.be>
+* Olivia Doppelt <olivia.doppelt at pasteur.fr>
+* Shane Dowling <shane at shanedowling.com>
+* John Duddy <jduddy at illumina.com>
+* Carl Eberhard <carlfeberhard at gmail.com>
+* Mark Einon <mark.einon at gmail.com>
+* Kyle Ellrott <kellrott at gmail.com> <kellrott at soe.ucsc.edu>
+* Eric Enns <eric.enns at gmail.com>
+* fescudie <fescudie at toulouse.inra.fr>
+* Dorine Francheteau <dorine at bx.psu.edu>
+* Jean-Frédéric (@JeanFred on Github)
+* Jaime Frey <jfrey at cs.wisc.edu>
+* Carrie Ganote <cganote at iu.edu>
+* Ryan Golhar <ngsbioinformatics at gmail.com>
+* Jeremy Goecks <jeremy.goecks at emory.edu> <jgoecks at gwu.edu>
+* Nuwan Goonasekera <nuwan.goonasekera at gmail.com>
+* Björn Grüning <bjoern.gruening at gmail.com> <bjoern at gruenings.eu>
+* Aysam Guerler <aysam.guerler at gmail.com>
+* Simon Guest <simon.guest at agresearch.co.nz>
+* Jianbin He <jbhe at bx.psu.edu>
+* Peter van Heusden <pvh at sanbi.ac.za>
+* Morita Hideyuki <h-morita at esm.co.jp>
+* Saskia Hiltemann <zazkia at gmail.com>
+* Rob Hooft <rob.hooft at nbic.nl>
+* Y. Hoogstrate <y.hoogstrate at erasmusmc.nl>
+* Jian-Long Huang <jlh at pyhub.org>
+* Gert Hulselmans <gert.hulselmans at med.kuleuven.be>
+* Jennifer Jackson <jen at bx.psu.edu>
+* Joachim Jacob <joachim.jacob at gmail.com>
+* Jim Johnson <jj at umn.edu> <jj at msi.umn.edu>
+* Radhesh Kamath <radhesh at bx.psu.edu>
+* Jan Kanis <jan.code at jankanis.nl>
+* David King <dcking at bx.psu.edu>
+* Rory Kirchner <roryk at mit.edu>
+* Edward Kirton <eskirton at lbl.gov>
+* Brad Langhorst <langhorst at neb.com>
+* Ross Lazarus <ross.lazarus at gmail.com> <rossl at bx.psu.edu>
+* Gildas Le Corguillé @lecorguille
+* Simone Leo <simone.leo at gmail.com>
+* Kanwei Li <kanwei at gmail.com>
+* Michael Li <michael.li at uwaterloo.ca>
+* Pierre Lindenbaum <plindenbaum at yahoo.fr>
+* Mikael Loaec <mikael.loaec at versailles.inra.fr>
+* Philip Mabon <philipmabon at gmail.com>
+* Remi Marenco <remi.marenco at gmail.com> <remimarenco at gmail.com>
+* Zipho Mashologu <zipho at trustpay.biz>
+* Thomas McGowan <mcgo0092 at msi.umn.edu>
+* Scott McManus <scottmcmanus at emory.edu> <scottmcmanus at gatech.edu>
+* Hunter Moseley <hunter.moseley at louisville.edu>
+* Takao Nakaguchi <takao.nakaguchi at gmail.com>
+* Arjun Nath <arjun at bx.psu.edu>
+* Anton Nekrutenko <anton at bx.psu.edu> <anton at nekrut.org>
+* Eric Paniagua <paniagua.cshl at gmail.com>
+* Richard Park <rpark at bu.edu>
+* Lance Parsons <lparsons at princeton.edu>
+* Chinmay Rao <chinmay at bx.psu.edu>
+* Matt Ralston <mrals89 at gmail.com>
+* Eric Rasche <esr at tamu.edu> <rasche.eric at gmail.com> <rasche.eric at yandex.ru>
+* Andrew Robinson <Andrew.Robinson at latrobe.edu.au>
+* Michael Sauria <crockopotamus at gmail.com>
+* Andrea Sbardellati <andrea.sbardellati at crs4.it>
+* Ian Schenck <ian at bx.psu.edu>
+* Nick Semenkovich <semenko at alum.mit.edu>
+* Matthew Shirley <mdshw5 at gmail.com>
+* Sourav Singh <ssouravsingh12 at gmail.com>
+* Clare Sloggett <sloc at unimelb.edu.au>
+* Eteri Sokhoyan @Sokhoyan
+* Nicola Soranzo <nicola.soranzo at tgac.ac.uk> <nsoranzo at tiscali.it> <soranzo at crs4.it>
+* Roy Storey <kiwiroy at gmail.com>
+* Hanfei Sun <ad9075 at gmail.com>
+* Ilya Sytchev <hackdna at gmail.com>
+* James Taylor <james at jamestaylor.org>
+* Tomithy Too <tomithy.too at gmail.com>
+* David Trudgian <dave at trudgian.net> <david.trudgian at utsouthwestern.edu>
+* Nitesh Turaga <nitesh.turaga at gmail.com>
+* Clayton Turner <clayclay911 at gmail.com>
+* Jesse c j van Dam <jesse.vandam at wur.nl>
+* Marek Vavruša <marek at vavrusa.com>
+* Martijn Vermaat <m.vermaat.hg at lumc.nl>
+* Kelly Vincent <kpvincent at bx.psu.edu>
+* Greg Von Kuster <greg at bx.psu.edu>
+* Pavan Videm <videmp at informatik.uni-freiburg.de>
+* Hiral Vora <hvora1 at uncc.edu>
+* Andrew Warren <anwarren at vbi.vt.edu>
+* Trevor Wennblom <trevor at well.com>
+* Thomas Wollmann <thomas.s.wollmann at gmail.com> <thomas.wollmann at bioquant.uni-heidelberg.de>
+* Jay Young <xiaojay at gmail.com>
+* Yi Zhang <yizhang at bx.psu.edu>
+
+# Institutional sponsors
+
+Galaxy development began at The Pennsylvania State University in 2006.
+In 2009 all contributions to that point were licensed by The
+Pennsylvania State University under the terms of the Academic Free
+License 3.0 (see LICENSE.txt). This license applies to all subsequent
+contributions - including but not limited to development at The
+Pennsylvania State University, Emory University, Johns Hopkins
+University, and George Washington University as part of the following
+NIH and NSF grants:
+
+* NSF DBI 0543285, “Tailoring genomic data to the needs of experimental
+  biologists and educators”
+* NIH R21 HG005133, “A turnkey solution for next generation sequence
+  data analysis”
+* NIH R01 HG004909, “An efficient lightweight environment for biomedical
+  computation”
+* NSF DBI 0850103, “Cyberinfrastructure for accessible and reproducible
+  research in life sciences”
+* NIH RC2 HG005542, “Dynamically scalable accessible analysis for next
+  generation sequence data”
+* NIH U41 HG006620, “Democratization of data analysis in life sciences
+  through Galaxy”
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..9fc19c2
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,186 @@
+Copyright (c) 2005-2016 Galaxy Contributors (see CONTRIBUTORS.md)
+
+Licensed under the Academic Free License version 3.0
+
+ 1) Grant of Copyright License. Licensor grants You a worldwide, royalty-free, 
+    non-exclusive, sublicensable license, for the duration of the copyright, to 
+    do the following:
+
+    a) to reproduce the Original Work in copies, either alone or as part of a 
+       collective work;
+
+    b) to translate, adapt, alter, transform, modify, or arrange the Original 
+       Work, thereby creating derivative works ("Derivative Works") based upon 
+       the Original Work;
+
+    c) to distribute or communicate copies of the Original Work and Derivative 
+       Works to the public, under any license of your choice that does not 
+       contradict the terms and conditions, including Licensor's reserved 
+       rights and remedies, in this Academic Free License;
+
+    d) to perform the Original Work publicly; and
+
+    e) to display the Original Work publicly.
+
+ 2) Grant of Patent License. Licensor grants You a worldwide, royalty-free, 
+    non-exclusive, sublicensable license, under patent claims owned or 
+    controlled by the Licensor that are embodied in the Original Work as 
+    furnished by the Licensor, for the duration of the patents, to make, use, 
+    sell, offer for sale, have made, and import the Original Work and 
+    Derivative Works.
+
+ 3) Grant of Source Code License. The term "Source Code" means the preferred 
+    form of the Original Work for making modifications to it and all available 
+    documentation describing how to modify the Original Work. Licensor agrees 
+    to provide a machine-readable copy of the Source Code of the Original Work 
+    along with each copy of the Original Work that Licensor distributes. 
+    Licensor reserves the right to satisfy this obligation by placing a 
+    machine-readable copy of the Source Code in an information repository 
+    reasonably calculated to permit inexpensive and convenient access by You 
+    for as long as Licensor continues to distribute the Original Work.
+
+ 4) Exclusions From License Grant. Neither the names of Licensor, nor the 
+    names of any contributors to the Original Work, nor any of their 
+    trademarks or service marks, may be used to endorse or promote products 
+    derived from this Original Work without express prior permission of the 
+    Licensor. Except as expressly stated herein, nothing in this License 
+    grants any license to Licensor's trademarks, copyrights, patents, trade 
+    secrets or any other intellectual property. No patent license is granted 
+    to make, use, sell, offer for sale, have made, or import embodiments of 
+    any patent claims other than the licensed claims defined in Section 2. 
+    No license is granted to the trademarks of Licensor even if such marks 
+    are included in the Original Work. Nothing in this License shall be 
+    interpreted to prohibit Licensor from licensing under terms different 
+    from this License any Original Work that Licensor otherwise would have a 
+    right to license.
+
+ 5) External Deployment. The term "External Deployment" means the use, 
+    distribution, or communication of the Original Work or Derivative Works 
+    in any way such that the Original Work or Derivative Works may be used by 
+    anyone other than You, whether those works are distributed or 
+    communicated to those persons or made available as an application 
+    intended for use over a network. As an express condition for the grants 
+    of license hereunder, You must treat any External Deployment by You of 
+    the Original Work or a Derivative Work as a distribution under 
+    section 1(c).
+
+ 6) Attribution Rights. You must retain, in the Source Code of any Derivative 
+    Works that You create, all copyright, patent, or trademark notices from 
+    the Source Code of the Original Work, as well as any notices of licensing 
+    and any descriptive text identified therein as an "Attribution Notice." 
+    You must cause the Source Code for any Derivative Works that You create 
+    to carry a prominent Attribution Notice reasonably calculated to inform 
+    recipients that You have modified the Original Work.
+
+ 7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that 
+    the copyright in and to the Original Work and the patent rights granted 
+    herein by Licensor are owned by the Licensor or are sublicensed to You 
+    under the terms of this License with the permission of the contributor(s) 
+    of those copyrights and patent rights. Except as expressly stated in the 
+    immediately preceding sentence, the Original Work is provided under this 
+    License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or 
+    implied, including, without limitation, the warranties of 
+    non-infringement, merchantability or fitness for a particular purpose. 
+    THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This 
+    DISCLAIMER OF WARRANTY constitutes an essential part of this License. 
+    No license to the Original Work is granted by this License except under 
+    this disclaimer.
+
+ 8) Limitation of Liability. Under no circumstances and under no legal 
+    theory, whether in tort (including negligence), contract, or otherwise, 
+    shall the Licensor be liable to anyone for any indirect, special, 
+    incidental, or consequential damages of any character arising as a result 
+    of this License or the use of the Original Work including, without 
+    limitation, damages for loss of goodwill, work stoppage, computer failure 
+    or malfunction, or any and all other commercial damages or losses. This 
+    limitation of liability shall not apply to the extent applicable law 
+    prohibits such limitation.
+
+ 9) Acceptance and Termination. If, at any time, You expressly assented to 
+    this License, that assent indicates your clear and irrevocable acceptance 
+    of this License and all of its terms and conditions. If You distribute or 
+    communicate copies of the Original Work or a Derivative Work, You must 
+    make a reasonable effort under the circumstances to obtain the express 
+    assent of recipients to the terms of this License. This License 
+    conditions your rights to undertake the activities listed in Section 1, 
+    including your right to create Derivative Works based upon the Original 
+    Work, and doing so without honoring these terms and conditions is 
+    prohibited by copyright law and international treaty. Nothing in this 
+    License is intended to affect copyright exceptions and limitations 
+    (including "fair use" or "fair dealing"). This License shall terminate 
+    immediately and You may no longer exercise any of the rights granted to 
+    You by this License upon your failure to honor the conditions in 
+    Section 1(c).
+
+10) Termination for Patent Action. This License shall terminate 
+    automatically and You may no longer exercise any of the rights granted 
+    to You by this License as of the date You commence an action, including 
+    a cross-claim or counterclaim, against Licensor or any licensee alleging 
+    that the Original Work infringes a patent. This termination provision 
+    shall not apply for an action alleging patent infringement by 
+    combinations of the Original Work with other software or hardware.
+
+11) Jurisdiction, Venue and Governing Law. Any action or suit relating to 
+    this License may be brought only in the courts of a jurisdiction wherein 
+    the Licensor resides or in which Licensor conducts its primary business, 
+    and under the laws of that jurisdiction excluding its conflict-of-law 
+    provisions. The application of the United Nations Convention on 
+    Contracts for the International Sale of Goods is expressly excluded. Any 
+    use of the Original Work outside the scope of this License or after its 
+    termination shall be subject to the requirements and penalties of 
+    copyright or patent law in the appropriate jurisdiction. This section 
+    shall survive the termination of this License.
+
+12) Attorneys' Fees. In any action to enforce the terms of this License or 
+    seeking damages relating thereto, the prevailing party shall be entitled 
+    to recover its costs and expenses, including, without limitation, 
+    reasonable attorneys' fees and costs incurred in connection with such 
+    action, including any appeal of such action. This section shall survive 
+    the termination of this License.
+
+13) Miscellaneous. If any provision of this License is held to be 
+    unenforceable, such provision shall be reformed only to the extent 
+    necessary to make it enforceable.
+
+14) Definition of "You" in This License. "You" throughout this License, 
+    whether in upper or lower case, means an individual or a legal entity 
+    exercising rights under, and complying with all of the terms of, this 
+    License. For legal entities, "You" includes any entity that controls, is 
+    controlled by, or is under common control with you. For purposes of this 
+    definition, "control" means (i) the power, direct or indirect, to cause 
+    the direction or management of such entity, whether by contract or 
+    otherwise, or (ii) ownership of fifty percent (50%) or more of the 
+    outstanding shares, or (iii) beneficial ownership of such entity.
+
+15) Right to Use. You may use the Original Work in all ways not otherwise 
+    restricted or conditioned by this License or by law, and Licensor 
+    promises not to interfere with or be responsible for such uses by You.
+
+16) Modification of This License. This License is Copyright © 2005 Lawrence 
+    Rosen. Permission is granted to copy, distribute, or communicate this 
+    License without modification. Nothing in this License permits You to 
+    modify this License as applied to the Original Work or to Derivative 
+    Works. However, You may modify the text of this License and copy, 
+    distribute or communicate your modified version (the "Modified 
+    License") and apply it to other original works of authorship subject to 
+    the following conditions: (i) You may not indicate in any way that your 
+    Modified License is the "Academic Free License" or "AFL" and you may not 
+    use those names in the name of your Modified License; (ii) You must 
+    replace the notice specified in the first paragraph above with the 
+    notice "Licensed under <insert your license name here>" or with a notice 
+    of your own that is not confusingly similar to the notice in this 
+    License; and (iii) You may not claim that your original works are open 
+    source software unless your Modified License has been approved by Open 
+    Source Initiative (OSI) and You comply with its license review and 
+    certification process.
+
+
+Some icons found in Galaxy are from the Silk Icons set, available under
+the Creative Commons Attribution 2.5 License, from:
+
+http://www.famfamfam.com/lab/icons/silk/
+
+
+Other images and documentation are licensed under the Creative Commons Attribution 3.0 (CC BY 3.0) License.   See 
+
+http://creativecommons.org/licenses/by/3.0/
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..c7e6e55
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,215 @@
+RELEASE_CURR:=16.01
+RELEASE_CURR_MINOR_NEXT:=$(shell expr `awk '$$1 == "VERSION_MINOR" {print $$NF}' lib/galaxy/version.py | tr -d \" | sed 's/None/0/;s/dev/0/;' ` + 1)
+RELEASE_NEXT:=16.04
+# TODO: This needs to be updated with create_release_rc
+#RELEASE_NEXT_BRANCH:=release_$(RELEASE_NEXT)
+RELEASE_NEXT_BRANCH:=dev
+RELEASE_UPSTREAM:=upstream
+MY_UPSTREAM:=origin
+# Location of virtualenv used for development.
+VENV?=.venv
+# Source virtualenv to execute command (flake8, sphinx, twine, etc...)
+IN_VENV=if [ -f $(VENV)/bin/activate ]; then . $(VENV)/bin/activate; fi;
+PROJECT_URL?=https://github.com/galaxyproject/galaxy
+GRUNT_DOCKER_NAME:=galaxy/client-builder:16.01
+GRUNT_EXEC?=node_modules/grunt-cli/bin/grunt
+DOCS_DIR=doc
+DOC_SOURCE_DIR=$(DOCS_DIR)/source
+SLIDESHOW_DIR=$(DOC_SOURCE_DIR)/slideshow
+OPEN_RESOURCE=bash -c 'open $$0 || xdg-open $$0'
+SLIDESHOW_TO_PDF?=bash -c 'docker run --rm -v `pwd`:/cwd astefanutti/decktape /cwd/$$0 /cwd/`dirname $$0`/`basename -s .html $$0`.pdf'
+
+all: help
+	@echo "This makefile is primarily used for building Galaxy's JS client. A sensible all target is not yet implemented."
+
+# Building docs requires sphinx and utilities be installed (see issue 3166) as well as pandoc.
+# Run following commands to setup the Python portion of these requirements:
+#   $ ./scripts/common_startup.sh
+#   $ . .venv/bin/activate
+#   $ pip install sphinx sphinx_rtd_theme lxml recommonmark
+docs: ## generate Sphinx HTML documentation, including API docs
+	$(IN_VENV) $(MAKE) -C doc clean
+	$(IN_VENV) $(MAKE) -C doc html
+
+docs-slides-ready:
+	test -f plantuml.jar ||  wget http://jaist.dl.sourceforge.net/project/plantuml/plantuml.jar
+	java -jar plantuml.jar -c $(DOC_SOURCE_DIR)/slideshow/architecture/images/plantuml_options.txt -tsvg $(SLIDESHOW_DIR)/architecture/images/ *.plantuml.txt
+	$(IN_VENV) python scripts/slideshow/build_slideshow.py 'Galaxy Architecture' $(SLIDESHOW_DIR)/architecture/galaxy_architecture.md
+
+docs-slides-export: docs-slides-ready
+	$(SLIDESHOW_TO_PDF) $(SLIDESHOW_DIR)/galaxy_architecture/galaxy_architecture.html
+
+_open-docs:
+	$(OPEN_RESOURCE) $(DOCS_DIR)/_build/html/index.html
+
+open-docs: docs _open-docs ## generate Sphinx HTML documentation and open in browser
+
+open-project: ## open project on github
+	$(OPEN_RESOURCE) $(PROJECT_URL)
+
+lint: ## check style using tox and flake8 for Python 2 and Python 3
+	$(IN_VENV) tox -e py27-lint && tox -e py34-lint
+
+release-ensure-upstream: ## Ensure upstream branch for release commands setup
+ifeq (shell git remote -v | grep $(RELEASE_UPSTREAM), )
+	git remote add $(RELEASE_UPSTREAM) git at github.com:galaxyproject/galaxy.git
+else
+	@echo "Remote $(RELEASE_UPSTREAM) already exists."
+endif
+
+release-merge-stable-to-next: release-ensure-upstream ## Merge last release into dev
+	git fetch $(RELEASE_UPSTREAM) && git checkout dev && git merge --ff-only $(RELEASE_UPSTREAM)/dev && git merge $(RELEASE_UPSTREAM)/$(RELEASE_PREVIOUS)
+
+release-push-dev: release-ensure-upstream # Push local dev branch upstream
+	git push $(RELEASE_UPSTREAM) dev
+
+release-issue: ## Create release issue on github
+	$(IN_VENV) python scripts/bootstrap_history.py --create-release-issue $(RELEASE_CURR)
+
+release-check-metadata: ## check github PR metadata for target release
+	$(IN_VENV) python scripts/bootstrap_history.py --check-release $(RELEASE_CURR)
+
+release-check-blocking-issues: ## Check github for release blocking issues
+	$(IN_VENV) python scripts/bootstrap_history.py --check-blocking-issues $(RELEASE_CURR)
+
+release-check-blocking-prs: ## Check github for release blocking PRs
+	$(IN_VENV) python scripts/bootstrap_history.py --check-blocking-prs $(RELEASE_CURR)
+
+release-bootstrap-history: ## bootstrap history for a new release
+	$(IN_VENV) python scripts/bootstrap_history.py --release $(RELEASE_CURR)
+
+npm-deps: ## Install NodeJS dependencies.
+	cd client && npm install
+
+grunt: npm-deps ## Calls out to Grunt to build client
+	cd client && $(GRUNT_EXEC)
+
+style: npm-deps ## Calls the style task of Grunt
+	cd client && $(GRUNT_EXEC) style
+
+client-install-libs: npm-deps ## Fetch updated client dependencies using bower.
+	cd client && $(GRUNT_EXEC) install-libs
+
+client: grunt style ## Rebuild all client-side artifacts
+
+grunt-docker-image: ## Build docker image for running grunt
+	docker build -t ${GRUNT_DOCKER_NAME} client
+
+grunt-docker: grunt-docker-image ## Run grunt inside docker
+	docker run -it -v `pwd`:/data ${GRUNT_DOCKER_NAME}
+
+clean-grunt-docker-image: ## Remove grunt docker image
+	docker rmi ${GRUNT_DOCKER_NAME}
+
+grunt-watch-style: npm-deps ## Execute watching style builder for dev purposes
+	cd client && $(GRUNT_EXEC) watch-style
+
+grunt-watch-develop: npm-deps ## Execute watching grunt builder for dev purposes (unpacked, allows debugger statements)
+	cd client && $(GRUNT_EXEC) watch --develop
+
+webpack-watch: npm-deps ## Execute watching webpack for dev purposes
+	cd client && ./node_modules/webpack/bin/webpack.js --watch
+
+client-develop: grunt-watch-style grunt-watch-develop webpack-watch  ## A useful target for parallel development building.
+	@echo "Remember to rerun `make client` before committing!"
+
+
+# Release Targets
+release-create-rc: release-ensure-upstream ## Create a release-candidate branch
+	git checkout dev
+	git pull --ff-only $(RELEASE_UPSTREAM) dev
+	git push $(MY_UPSTREAM) dev
+	git checkout -b release_$(RELEASE_CURR)
+	git push $(MY_UPSTREAM) release_$(RELEASE_CURR)
+	git push $(RELEASE_UPSTREAM) release_$(RELEASE_CURR)
+	git checkout -b version-$(RELEASE_CURR)
+	sed -i.bak -e "s/^VERSION_MAJOR = .*/VERSION_MAJOR = \"$(RELEASE_CURR)\"/" lib/galaxy/version.py
+	sed -i.bak -e "s/^VERSION_MINOR = .*/VERSION_MINOR = \"rc1\"/" lib/galaxy/version.py
+	rm -f lib/galaxy/version.py.bak
+	git add lib/galaxy/version.py
+	git commit -m "Update version to $(RELEASE_CURR).rc1"
+	git checkout dev
+
+	git checkout -b version-$(RELEASE_NEXT).dev
+	sed -i.bak -e "s/^VERSION_MAJOR = .*/VERSION_MAJOR = \"$(RELEASE_NEXT)\"/" lib/galaxy/version.py
+	rm -f lib/galaxy/version.py.bak
+	git add lib/galaxy/version.py
+	git commit -m "Update version to $(RELEASE_NEXT).dev"
+
+	-git merge version-$(RELEASE_CURR)
+	git checkout --ours lib/galaxy/version.py
+	git add lib/galaxy/version.py
+	git commit -m "Merge branch 'version-$(RELEASE_CURR)' into version-$(RELEASE_NEXT).dev"
+	git push $(MY_UPSTREAM) version-$(RELEASE_CURR):version-$(RELEASE_CURR)
+	git push $(MY_UPSTREAM) version-$(RELEASE_NEXT).dev:version-$(RELEASE_NEXT).dev
+	git checkout dev
+	git branch -d version-$(RELEASE_CURR)
+	git branch -d version-$(RELEASE_NEXT).dev
+	# TODO: Use hub to automate these PR creations or push directly.
+	@echo "Open a PR from version-$(RELEASE_CURR) of your fork to release_$(RELEASE_CURR)"
+	@echo "Open a PR from version-$(RELEASE_NEXT).dev of your fork to dev"
+
+release-create: release-ensure-upstream ## Create a release branch
+	git checkout master
+	git pull --ff-only $(RELEASE_UPSTREAM) master
+	git push $(MY_UPSTREAM) master
+	git checkout release_$(RELEASE_CURR)
+	git pull --ff-only $(RELEASE_UPSTREAM) release_$(RELEASE_CURR)
+	#git push $(MY_UPSTREAM) release_$(RELEASE_CURR)
+	git checkout dev
+	git pull --ff-only $(RELEASE_UPSTREAM) dev
+	#git push $(MY_UPSTREAM) dev
+	# Test run of merging. If there are conflicts, it will fail here.
+	git merge release_$(RELEASE_CURR)
+	git checkout release_$(RELEASE_CURR)
+	sed -i.bak -e "s/^VERSION_MINOR = .*/VERSION_MINOR = None/" lib/galaxy/version.py
+	rm -f lib/galaxy/version.py.bak
+	git add lib/galaxy/version.py
+	git commit -m "Update version to $(RELEASE_CURR)"
+	git tag -m "Tag version $(RELEASE_CURR)" v$(RELEASE_CURR)
+
+	git checkout dev
+	-git merge release_$(RELEASE_CURR)
+	git checkout --ours lib/galaxy/version.py
+	git add lib/galaxy/version.py
+	git commit -m "Merge branch 'release_$(RELEASE_CURR)' into dev"
+	git checkout master
+	git merge release_$(RELEASE_CURR)
+	git push $(RELEASE_UPSTREAM) release_$(RELEASE_CURR):release_$(RELEASE_CURR)
+	git push $(RELEASE_UPSTREAM) dev:dev
+	git push $(RELEASE_UPSTREAM) master:master
+	git push $(RELEASE_UPSTREAM) --tags
+
+release-create-point: ## Create a point release
+	git pull --ff-only $(RELEASE_UPSTREAM) master
+	git push $(MY_UPSTREAM) master
+	git checkout release_$(RELEASE_CURR)
+	git pull --ff-only $(RELEASE_UPSTREAM) release_$(RELEASE_CURR)
+	#git push $(MY_UPSTREAM) release_$(RELEASE_CURR)
+	git checkout $(RELEASE_NEXT_BRANCH)
+	git pull --ff-only $(RELEASE_UPSTREAM) $(RELEASE_NEXT_BRANCH)
+	#git push $(MY_UPSTREAM) $(RELEASE_NEXT_BRANCH)
+	git merge release_$(RELEASE_CURR)
+	git checkout release_$(RELEASE_CURR)
+	sed -i.bak -e "s/^VERSION_MINOR = .*/VERSION_MINOR = \"$(RELEASE_CURR_MINOR_NEXT)\"/" lib/galaxy/version.py
+	rm -f lib/galaxy/version.py.bak
+	git add lib/galaxy/version.py
+	git commit -m "Update version to $(RELEASE_CURR).$(RELEASE_CURR_MINOR_NEXT)"
+	git tag -m "Tag version $(RELEASE_CURR).$(RELEASE_CURR_MINOR_NEXT)" v$(RELEASE_CURR).$(RELEASE_CURR_MINOR_NEXT)
+	git checkout $(RELEASE_NEXT_BRANCH)
+	-git merge release_$(RELEASE_CURR)
+	git checkout --ours lib/galaxy/version.py
+	git add lib/galaxy/version.py
+	git commit -m "Merge branch 'release_$(RELEASE_CURR)' into $(RELEASE_NEXT_BRANCH)"
+	git checkout master
+	git merge release_$(RELEASE_CURR)
+	#git push origin release_$(RELEASE_CURR):release_$(RELEASE_CURR)
+	#git push origin $(RELEASE_NEXT_BRANCH):release_$(RELEASE_NEXT_BRANCH)
+	#git push origin master:master
+	#git push origin --tags
+	git checkout release_$(RELEASE_CURR)
+
+.PHONY: help
+
+help:
+	@egrep '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..2f869e5
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,63 @@
+.. figure:: https://wiki.galaxyproject.org/Images/GalaxyLogo?action=AttachFile&do=get&target=galaxy_project_logo.jpg
+   :alt: Galaxy Logo
+
+The latest information about Galaxy is available via `https://galaxyproject.org/ <https://galaxyproject.org/>`__
+
+.. image:: https://img.shields.io/badge/questions-galaxy%20biostar-blue.svg
+    :target: https://biostar.usegalaxy.org
+    :alt: Ask a question
+
+.. image:: https://img.shields.io/badge/chat-irc.freenode.net%23galaxyproject-blue.svg
+    :target: https://webchat.freenode.net/?channels=galaxyproject
+    :alt: Chat with us
+
+.. image:: https://img.shields.io/badge/docs-release-green.svg
+    :target: https://docs.galaxyproject.org/en/master/
+    :alt: Release Documentation
+
+.. image:: https://travis-ci.org/galaxyproject/galaxy.svg?branch=dev
+    :target: https://travis-ci.org/galaxyproject/galaxy
+    :alt: Inspect the test results
+
+Galaxy Quickstart
+=================
+
+Galaxy requires Python 2.7 To check your python version, run:
+
+.. code:: console
+
+    $ python -V
+    Python 2.7.3
+
+Start Galaxy:
+
+.. code:: console
+
+    $ sh run.sh
+
+Once Galaxy completes startup, you should be able to view Galaxy in your
+browser at:
+
+http://localhost:8080
+
+You may wish to make changes from the default configuration. This can be
+done in the ``config/galaxy.ini`` file. Tools can be either installed
+from the Tool Shed or added manually. For details please see the Galaxy
+wiki:
+
+https://wiki.galaxyproject.org/Admin/Tools/AddToolFromToolShedTutorial
+
+Not all dependencies are included for the tools provided in the sample
+``tool_conf.xml``. A full list of external dependencies is available at:
+
+https://wiki.galaxyproject.org/Admin/Tools/ToolDependencies
+
+Issues and Galaxy Development
+=============================
+
+Please see `CONTRIBUTING.md <CONTRIBUTING.md>`_ .
+
+Roadmap
+=============================
+
+Interested in the next steps for Galaxy? Take a look here: https://github.com/galaxyproject/galaxy/issues/1928
diff --git a/client/.docker-build.sh b/client/.docker-build.sh
new file mode 100755
index 0000000..7452b63
--- /dev/null
+++ b/client/.docker-build.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+cd /data/client && \
+    npm install && \
+    grunt
diff --git a/client/Dockerfile b/client/Dockerfile
new file mode 100644
index 0000000..ad1a9c5
--- /dev/null
+++ b/client/Dockerfile
@@ -0,0 +1,13 @@
+FROM digitallyseamless/nodejs-bower-grunt
+RUN mkdir /gx
+
+COPY package.json /gx/package.json
+
+RUN cd /gx && \
+    npm install -g && \
+    cd / && \
+    rm -rf /gx
+
+WORKDIR /data/client
+ADD ./.docker-build.sh /build.sh
+CMD ["/build.sh"]
diff --git a/client/GruntFile.js b/client/GruntFile.js
new file mode 100644
index 0000000..831d90d
--- /dev/null
+++ b/client/GruntFile.js
@@ -0,0 +1,28 @@
+module.exports = function(grunt) {
+    "use strict";
+
+    var GALAXY_PATHS = {
+            dist        : '../static/scripts',
+            maps        : '../static/maps',
+            // this symlink allows us to serve uncompressed scripts in DEV_PATH for use with sourcemaps
+            srcSymlink  : '../static/src',
+        },
+        TOOLSHED_PATHS = {
+            dist        : '../static/toolshed/scripts',
+            maps        : '../static/toolshed/maps',
+            srcSymlink  : '../static/toolshed/src',
+        };
+
+    grunt.config.set( 'app', 'galaxy' );
+    grunt.config.set( 'paths', GALAXY_PATHS );
+    if( grunt.option( 'app' ) === 'toolshed' ){
+	    grunt.config.set( 'app', grunt.option( 'app' ) );
+	    grunt.config.set( 'paths', TOOLSHED_PATHS );
+    }
+
+    grunt.loadNpmTasks('grunt-check-modules');
+    // see the sub directory grunt-tasks/ for individual task definitions
+    grunt.loadTasks( 'grunt-tasks' );
+    // note: 'handlebars' *not* 'templates' since handlebars doesn't call uglify
+    grunt.registerTask( 'default', [ 'check-modules', 'uglify', 'webpack' ] );
+};
diff --git a/client/README.md b/client/README.md
new file mode 100644
index 0000000..1a55b22
--- /dev/null
+++ b/client/README.md
@@ -0,0 +1,123 @@
+Client Build System
+===================
+
+Builds and moves the client-side scripts necessary for running the Galaxy webapps. There's no need to use this system
+unless you are modifying or developing client-side scripts.
+
+The base dependencies you'll need are Node.js and the Node Package Manager
+(npm).  See nodejs.org for more information.
+
+
+Simple Full Build
+=================
+
+The simplest way to rebuild the entire client to incorporate any local changes
+is to run the 'client' rule in the Galaxy makefile, which is in the repository
+root.  This will also ensure any local node modules are installed.
+
+    make client
+
+
+Detailed Build Instructions
+===========================
+
+Once npm is installed, install the grunt task manager and its command line into your global scope:
+
+    npm install -g grunt grunt-cli
+
+Next, from within this directory, install the local build dependencies:
+
+    cd client
+    npm install
+
+You're now ready to re-build the client scripts after modifying them.
+
+
+Rebuilding
+==========
+
+There are two methods for rebuilding: a complete rebuild and automatic, partial rebuilds while you develop.
+
+A complete rebuild can be done with the following (from the `client` directory):
+
+    grunt
+
+This will:
+
+1. compress the files in client/galaxy/scripts and place them in static/scripts
+2. generate source maps and place them in static/maps
+3. rebuild the webpack-based client apps
+
+
+Rebuilding Scripts Only
+=======================
+
+To re-minify all the individual javascript files:
+
+    grunt scripts
+
+
+Rebuilding Webpack Apps
+=======================
+
+To rebuild the webpack bundles for apps (compressed for production):
+
+    grunt webpack
+
+To rebuild the apps without compression:
+
+    grunt webpack-dev
+
+To rebuild without compression and watch and rebuild when scripts change:
+
+    grunt webpack-watch
+
+
+Changing Styles/CSS
+===================
+
+The CSS and styling used by Galaxy is also controlled from this directory. Galaxy uses LESS, a superset of CSS that
+compiles to CSS, for its styling. LESS files are kept in client/galaxy/style/less. Compiled CSS is in static/style/blue.
+
+Use grunt to recompile the LESS in into CSS (from the `client` directory):
+
+    grunt style
+
+
+Grunt watch
+===========
+
+Grunt can also do an automatic, partial rebuild of any files you change *as you develop* by:
+
+1. opening a new terminal session
+2. `cd client`
+3. Watch with:
+    1. `grunt watch` to watch the *scripts/* folder
+    2. `grunt watch-style` to watch the *style/* folder
+
+This starts a new grunt watch process that will monitor the files, in the corresponding folder, for changes and copy and
+rebuild them when they change.
+
+You can stop the watch task by pressing `Ctrl+C`. Note: you should also be able to background that task
+if you prefer.
+
+
+Using a Locally Installed Version of Grunt
+==========================================
+
+A non-global version of grunt and the grunt-cli are installed when using 'npm install'. If you'd rather build with that
+version, you'll need to use the full, local path when calling it:
+
+    ./node_modules/.bin/grunt
+    # or
+    ./node_modules/.bin/grunt watch
+
+
+The Toolshed Client Build
+=========================
+
+The commands mentioned above in 'Rebuilding' and 'Grunt watch' also can be applied to toolshed scripts by using the
+`--app=toolshed` option:
+
+	grunt watch --app=toolshed
+	grunt --app=toolshed
diff --git a/client/bower.json b/client/bower.json
new file mode 100644
index 0000000..6f45f82
--- /dev/null
+++ b/client/bower.json
@@ -0,0 +1,40 @@
+{
+  "name": "galaxy-client-libs",
+  "version": "0.0.0",
+  "description": "External client-side libraries used by Galaxy",
+  "keywords": [
+    "galaxy",
+    "galaxyproject"
+  ],
+  "homepage": "usegalaxy.org",
+  "dependencies": {
+    "jquery": "~1.12",
+    "ravenjs": "~3",
+    "underscore": "~1",
+    "backbone": "~1.3",
+    "bootstrap": "~3.3.2",
+    "bootstrap-tour": "~0.10.2",
+    "d3": "~3",
+    "farbtastic": "~2.0.0-alpha.1",
+    "toastr": "~2.1.0",
+    "jQTouch": "git://github.com/senchalabs/jQTouch#~1.0.0",
+    "bib2json": "git://github.com/galaxyproject/bib2json#galaxy",
+    "jquery-form": "~3.46.0",
+    "jquery-autocomplete": "git://github.com/dyve/jquery-autocomplete",
+    "select2": "~3.5.2",
+    "jStorage": "~0.4.12",
+    "jquery.cookie": "~1.4.1",
+    "dynatree": "~1.2.5",
+    "jquery-mousewheel": "~3.1.12",
+    "wymeditor": "~1.0.0-rc.1",
+    "jstree": "~3.0.9",
+    "jquery-ui": "git://github.com/jquery/jquery-ui.git#~1.11.2",
+    "jquery.complexify": "git://github.com/danpalmer/jquery.complexify.js.git#~0.5.1",
+     "threedubmedia.jquery.event": "*",
+    "jquery-migrate": "~1.4",
+    "requirejs": "~2"
+  },
+  "resolutions": {
+    "jquery": "~1.12"
+  }
+}
diff --git a/client/galaxy/scripts/apps/analysis.js b/client/galaxy/scripts/apps/analysis.js
new file mode 100644
index 0000000..b1d445d
--- /dev/null
+++ b/client/galaxy/scripts/apps/analysis.js
@@ -0,0 +1,177 @@
+
+var jQuery = require( 'jquery' ),
+    $ = jQuery,
+    GalaxyApp = require( 'galaxy' ).GalaxyApp,
+    QUERY_STRING = require( 'utils/query-string-parsing' ),
+    PANEL = require( 'layout/panel' ),
+    ToolPanel = require( './tool-panel' ),
+    HistoryPanel = require( './history-panel' ),
+    PAGE = require( 'layout/page' ),
+    ToolForm = require( 'mvc/tool/tool-form' ),
+    Tours = require( 'mvc/tours' );
+
+/** define the 'Analyze Data'/analysis/main/home page for Galaxy
+ *  * has a masthead
+ *  * a left tool menu to allow the user to load tools in the center panel
+ *  * a right history menu that shows the user's current data
+ *  * a center panel
+ *  Both panels (generally) persist while the center panel shows any
+ *  UI needed for the current step of an analysis, like:
+ *      * tool forms to set tool parameters,
+ *      * tables showing the contents of datasets
+ *      * etc.
+ */
+window.app = function app( options, bootstrapped ){
+    window.Galaxy = new GalaxyApp( options, bootstrapped );
+    Galaxy.debug( 'analysis app' );
+    // TODO: use router as App base (combining with Galaxy)
+
+    // .................................................... panels and page
+    var config = options.config,
+        toolPanel = new ToolPanel({
+            el                  : '#left',
+            userIsAnonymous     : Galaxy.user.isAnonymous(),
+            toolbox             : config.toolbox,
+            toolbox_in_panel    : config.toolbox_in_panel,
+            stored_workflow_menu_entries : config.stored_workflow_menu_entries,
+            nginx_upload_path   : config.nginx_upload_path,
+            ftp_upload_site     : config.ftp_upload_site,
+            default_genome      : config.default_genome,
+            default_extension   : config.default_extension,
+        }),
+        centerPanel = new PANEL.CenterPanel({
+            el              : '#center'
+        }),
+        historyPanel = new HistoryPanel({
+            el              : '#right',
+            galaxyRoot      : Galaxy.root,
+            userIsAnonymous : Galaxy.user.isAnonymous(),
+            allow_user_dataset_purge: config.allow_user_dataset_purge,
+        }),
+        analysisPage = new PAGE.PageLayoutView( _.extend( options, {
+            el              : 'body',
+            left            : toolPanel,
+            center          : centerPanel,
+            right           : historyPanel,
+        }));
+
+    // .................................................... decorate the galaxy object
+    // TODO: most of this is becoming unnecessary as we move to apps
+    Galaxy.page = analysisPage;
+    Galaxy.params = Galaxy.config.params;
+
+    // add tool panel to Galaxy object
+    Galaxy.toolPanel = toolPanel.tool_panel;
+    Galaxy.upload = toolPanel.uploadButton;
+
+    Galaxy.currHistoryPanel = historyPanel.historyView;
+    Galaxy.currHistoryPanel.listenToGalaxy( Galaxy );
+
+    //HACK: move there
+    Galaxy.app = {
+        display : function( view, target ){
+            // TODO: Remove this line after select2 update
+            $( '.select2-hidden-accessible' ).remove();
+            centerPanel.display( view );
+        },
+    };
+
+    // .................................................... routes
+    /**  */
+    var router = new ( Backbone.Router.extend({
+        // TODO: not many client routes at this point - fill and remove from server.
+        // since we're at root here, this may be the last to be routed entirely on the client.
+        initialize : function( options ){
+            this.options = options;
+        },
+
+        /** override to parse query string into obj and send to each route */
+        execute: function( callback, args, name ){
+            Galaxy.debug( 'router execute:', callback, args, name );
+            var queryObj = QUERY_STRING.parse( args.pop() );
+            args.push( queryObj );
+            if( callback ){
+                callback.apply( this, args );
+            }
+        },
+
+        routes : {
+            '(/)' : 'home',
+            // TODO: remove annoying 'root' from root urls
+            '(/)root*' : 'home',
+            '(/)tours(/)(:tour_id)' : 'show_tours',
+        },
+
+        show_tours : function( tour_id ){
+            if (tour_id){
+                Tours.giveTour(tour_id);
+            }
+            else{
+                centerPanel.display( new Tours.ToursView() );
+            }
+        },
+
+        /**  */
+        home : function( params ){
+            // TODO: to router, remove Globals
+            // load a tool by id (tool_id) or rerun a previous tool execution (job_id)
+            if( params.tool_id || params.job_id ) {
+                if ( params.tool_id === 'upload1' ) {
+                    Galaxy.upload.show();
+                    this._loadCenterIframe( 'welcome' );
+                } else {
+                    this._loadToolForm( params );
+                }
+            } else {
+                // show the workflow run form
+                if( params.workflow_id ){
+                    this._loadCenterIframe( 'workflow/run?id=' + params.workflow_id );
+                // load the center iframe with controller.action: galaxy.org/?m_c=history&m_a=list -> history/list
+                } else if( params.m_c ){
+                    this._loadCenterIframe( params.m_c + '/' + params.m_a );
+                // show the workflow run form
+                } else {
+                    this._loadCenterIframe( 'welcome' );
+                }
+            }
+        },
+
+        /** load the center panel with a tool form described by the given params obj */
+        _loadToolForm : function( params ){
+            //TODO: load tool form code async
+            params.id = params.tool_id;
+            centerPanel.display( new ToolForm.View( params ) );
+        },
+
+        /** load the center panel iframe using the given url */
+        _loadCenterIframe : function( url, root ){
+            root = root || Galaxy.root;
+            url = root + url;
+            centerPanel.$( '#galaxy_main' ).prop( 'src', url );
+        },
+
+    }))( options );
+
+    // .................................................... when the page is ready
+    // render and start the router
+    $(function(){
+        analysisPage.render();
+        analysisPage.right.historyView.loadCurrentHistory();
+
+        // use galaxy to listen to history size changes and then re-fetch the user's total size (to update the quota meter)
+        // TODO: we have to do this here (and after every page.render()) because the masthead is re-created on each
+        // page render. It's re-created each time because there is no render function and can't be re-rendered without
+        // re-creating it.
+        Galaxy.listenTo( analysisPage.right.historyView, 'history-size-change', function(){
+            // fetch to update the quota meter adding 'current' for any anon-user's id
+            Galaxy.user.fetch({ url: Galaxy.user.urlRoot() + '/' + ( Galaxy.user.id || 'current' ) });
+        });
+        analysisPage.right.historyView.connectToQuotaMeter( analysisPage.masthead.quotaMeter );
+
+        // start the router - which will call any of the routes above
+        Backbone.history.start({
+            root        : Galaxy.root,
+            pushState   : true,
+        });
+    });
+};
diff --git a/client/galaxy/scripts/apps/history-panel.js b/client/galaxy/scripts/apps/history-panel.js
new file mode 100644
index 0000000..c0b4611
--- /dev/null
+++ b/client/galaxy/scripts/apps/history-panel.js
@@ -0,0 +1,95 @@
+var RightPanel = require( 'layout/panel' ).RightPanel,
+    Ui = require( 'mvc/ui/ui-misc' ),
+    historyOptionsMenu = require( 'mvc/history/options-menu' );
+    CurrentHistoryView = require( 'mvc/history/history-view-edit-current' ).CurrentHistoryView,
+    _l = require( 'utils/localization' );
+
+/** the right hand panel in the analysis page that shows the current history */
+var HistoryPanel = RightPanel.extend({
+
+    title : _l( 'History' ),
+
+    initialize : function( options ){
+        RightPanel.prototype.initialize.call( this, options );
+        this.options = _.pick( options, 'userIsAnonymous', 'allow_user_dataset_purge', 'galaxyRoot' );
+
+        // view of the current history
+        this.historyView = new CurrentHistoryView({
+            className       : CurrentHistoryView.prototype.className + ' middle',
+            purgeAllowed    : options.allow_user_dataset_purge,
+            linkTarget      : 'galaxy_main'
+        });
+    },
+
+    /** override to change footer selector */
+    $toggleButton : function(){
+        return this.$( '.footer > .panel-collapse' );
+    },
+
+    render : function(){
+        RightPanel.prototype.render.call( this );
+        this.optionsMenu = historyOptionsMenu( this.$( '#history-options-button' ), {
+            anonymous    : this.options.userIsAnonymous,
+            purgeAllowed : this.options.allow_user_dataset_purge,
+            root         : this.options.galaxyRoot
+        });
+        this.$( '> .header .buttons [title]' ).tooltip({ placement: 'bottom' });
+        this.historyView.setElement( this.$( '.history-panel' ) );
+        this.$el.attr( 'class', 'history-right-panel' );
+    },
+
+    /** override to add buttons */
+    _templateHeader: function( data ){
+        var historyUrl = this.options.galaxyRoot + 'history';
+        var multiUrl = this.options.galaxyRoot + 'history/view_multiple';
+        return [
+            '<div class="header">',
+                '<div class="buttons">',
+                    // this button re-fetches the history and contents and re-renders the history panel
+                    '<a id="history-refresh-button" title="', _l( 'Refresh history' ), '" ',
+                       'class="" href="', historyUrl, '"><span class="fa fa-refresh"></span></a>',
+                    // opens a drop down menu with history related functions (like view all, delete, share, etc.)
+                    '<a id="history-options-button" title="', _l( 'History options' ), '" ',
+                       'class="" href="javascript:void(0)"><span class="fa fa-cog"></span></a>',
+                    !this.options.userIsAnonymous?
+                        [ '<a id="history-view-multi-button" title="', _l( 'View all histories' ), '" ',
+                             'class="" href="', multiUrl, '"><span class="fa fa-columns"></span></a>' ].join('') : '',
+                '</div>',
+                '<div class="title">', _.escape( this.title ), '</div>',
+            '</div>',
+        ].join('');
+    },
+
+    /** add history view div */
+    _templateBody : function( data ){
+        return [
+            '<div id="current-history-panel" class="history-panel middle"/>',
+        ].join('');
+    },
+
+    /** override to use simplified selector */
+    _templateFooter: function( data ){
+        return [
+            '<div class="footer">',
+                '<div class="panel-collapse ', _.escape( this.id ), '"/>',
+                '<div class="drag"/>',
+            '</div>',
+        ].join('');
+    },
+
+    events : {
+        'click #history-refresh-button'   : '_clickRefresh',
+        // override to change footer selector
+        'mousedown .footer > .drag'       : '_mousedownDragHandler',
+        'click .footer > .panel-collapse' : 'toggle'
+    },
+
+    _clickRefresh : function( ev ){
+        ev.preventDefault();
+        this.historyView.loadCurrentHistory();
+    },
+
+    toString : function(){ return 'HistoryPanel'; }
+});
+
+module.exports = HistoryPanel;
diff --git a/client/galaxy/scripts/apps/login.js b/client/galaxy/scripts/apps/login.js
new file mode 100644
index 0000000..63342e3
--- /dev/null
+++ b/client/galaxy/scripts/apps/login.js
@@ -0,0 +1,44 @@
+
+var jQuery = require( 'jquery' ),
+    $ = jQuery,
+    GalaxyApp = require( 'galaxy' ).GalaxyApp,
+    PANEL = require( 'layout/panel' ),
+    _l = require( 'utils/localization' ),
+    PAGE = require( 'layout/page' );
+
+window.app = function app( options, bootstrapped ){
+    window.Galaxy = new GalaxyApp( options, bootstrapped );
+    Galaxy.debug( 'login app' );
+    var redirect = encodeURI( options.redirect );
+
+    // TODO: remove iframe for user login (at least) and render login page from here
+    // then remove this redirect
+    if( !options.show_welcome_with_login ){
+        var params = jQuery.param({ use_panels : 'True', redirect : redirect });
+        window.location.href = Galaxy.root + 'user/login?' + params;
+        return;
+    }
+
+    var loginPage = new PAGE.PageLayoutView( _.extend( options, {
+        el      : 'body',
+        center  : new PANEL.CenterPanel({ el : '#center' }),
+        right   : new PANEL.RightPanel({
+            title : _l( 'Login required' ),
+            el : '#right'
+        }),
+    }));
+
+    $(function(){
+        // TODO: incorporate *actual* referrer/redirect info as the original page does
+        var params = jQuery.param({ redirect : redirect }),
+            loginUrl = Galaxy.root + 'user/login?' + params;
+        loginPage.render();
+
+        // welcome page (probably) needs to remain sandboxed
+        loginPage.center.$( '#galaxy_main' ).prop( 'src', options.welcome_url );
+
+        loginPage.right.$( '.unified-panel-body' )
+            .css( 'overflow', 'hidden' )
+            .html( '<iframe src="' + loginUrl + '" frameborder="0" style="width: 100%; height: 100%;"/>' );
+    });
+};
diff --git a/client/galaxy/scripts/apps/tool-panel.js b/client/galaxy/scripts/apps/tool-panel.js
new file mode 100644
index 0000000..7c4c623
--- /dev/null
+++ b/client/galaxy/scripts/apps/tool-panel.js
@@ -0,0 +1,114 @@
+var LeftPanel = require( 'layout/panel' ).LeftPanel,
+    Tools = require( 'mvc/tool/tools' ),
+    Upload = require( 'mvc/upload/upload-view' ),
+    _l = require( 'utils/localization' );
+
+/* Builds the tool menu panel on the left of the analysis page */
+var ToolPanel = LeftPanel.extend({
+
+    title : _l( 'Tools' ),
+
+    initialize: function( options ){
+        LeftPanel.prototype.initialize.call( this, options );
+        this.log( this + '.initialize:', options );
+
+        /** @type {Object[]} descriptions of user's workflows to be shown in the tool menu */
+        this.stored_workflow_menu_entries = options.stored_workflow_menu_entries || [];
+
+        // create tool search, tool panel, and tool panel view.
+        var tool_search = new Tools.ToolSearch({
+            hidden      : false
+        });
+        var tools = new Tools.ToolCollection( options.toolbox );
+        this.tool_panel = new Tools.ToolPanel({
+            tool_search : tool_search,
+            tools       : tools,
+            layout      : options.toolbox_in_panel
+        });
+        this.tool_panel_view = new Tools.ToolPanelView({ model: this.tool_panel });
+
+        // add upload modal
+        this.uploadButton = new Upload({
+            nginx_upload_path   : options.nginx_upload_path,
+            ftp_upload_site     : options.ftp_upload_site,
+            default_genome      : options.default_genome,
+            default_extension   : options.default_extension,
+        });
+    },
+
+    render : function(){
+        var self = this;
+        LeftPanel.prototype.render.call( self );
+        self.$( '.panel-header-buttons' ).append( self.uploadButton.$el );
+
+        // if there are tools, render panel and display everything
+        if (self.tool_panel.get( 'layout' ).size() > 0) {
+            self.tool_panel_view.render();
+            //TODO: why the hide/show?
+            self.$( '.toolMenu' ).show();
+        }
+        self.$( '.toolMenuContainer' ).prepend( self.tool_panel_view.$el );
+
+        self._renderWorkflowMenu();
+
+        // if a tool link has the minsizehint attribute, handle it here (gen. by hiding the tool panel)
+        self.$( 'a[minsizehint]' ).click( function() {
+            if ( parent.handle_minwidth_hint ) {
+                parent.handle_minwidth_hint( $( self ).attr( 'minsizehint' ) );
+            }
+        });
+    },
+
+    /** build the dom for the workflow portion of the tool menu */
+    _renderWorkflowMenu : function(){
+        var self = this;
+        // add internal workflow list
+        self.$( '#internal-workflows' ).append( self._templateTool({
+            title   : _l( 'All workflows' ),
+            href    : 'workflow/list_for_run'
+        }));
+        _.each( self.stored_workflow_menu_entries, function( menu_entry ){
+            self.$( '#internal-workflows' ).append( self._templateTool({
+                title : menu_entry.stored_workflow.name,
+                href  : 'workflow/run?id=' + menu_entry.encoded_stored_workflow_id
+            }));
+        });
+    },
+
+    /** build a link to one tool */
+    _templateTool: function( tool ) {
+        return [
+            '<div class="toolTitle">',
+                // global
+                '<a href="', Galaxy.root, tool.href, '" target="galaxy_main">', tool.title, '</a>',
+            '</div>'
+        ].join('');
+    },
+
+    /** override to include inital menu dom and workflow section */
+    _templateBody : function(){
+        return [
+            '<div class="unified-panel-body unified-panel-body-background">',
+                '<div class="toolMenuContainer">',
+                    '<div class="toolMenu" style="display: none">',
+                        '<div id="search-no-results" style="display: none; padding-top: 5px">',
+                            '<em><strong>', _l( 'Search did not match any tools.' ), '</strong></em>',
+                        '</div>',
+                    '</div>',
+                    '<div class="toolSectionPad"/>',
+                    '<div class="toolSectionPad"/>',
+                    '<div class="toolSectionTitle" id="title_XXinternalXXworkflow">',
+                        '<span>', _l( 'Workflows' ), '</span>',
+                    '</div>',
+                    '<div id="internal-workflows" class="toolSectionBody">',
+                        '<div class="toolSectionBg"/>',
+                    '</div>',
+                '</div>',
+            '</div>'
+        ].join('');
+    },
+
+    toString : function(){ return 'ToolPanel'; }
+});
+
+module.exports = ToolPanel;
diff --git a/client/galaxy/scripts/galaxy.interactive_environments.js b/client/galaxy/scripts/galaxy.interactive_environments.js
new file mode 100644
index 0000000..eb2ed98
--- /dev/null
+++ b/client/galaxy/scripts/galaxy.interactive_environments.js
@@ -0,0 +1,59 @@
+/**
+ * Internal function to remove content from the main area and add the notebook.
+ * Not idempotent
+ */
+function append_notebook(url){
+    clear_main_area();
+    $('#main').append('<iframe frameBorder="0" seamless="seamless" style="width: 100%; height: 100%; overflow:hidden;" scrolling="no" src="'+ url +'"></iframe>'
+    );
+}
+
+function clear_main_area(){
+    $('#spinner').remove();
+    $('#main').children().remove();
+}
+
+function display_spinner(){
+        $('#main').append('<img id="spinner" src="' + galaxy_root + 'static/style/largespinner.gif" style="position:absolute;margin:auto;top:0;left:0;right:0;bottom:0;">');
+}
+
+
+/**
+ * Test availability of a URL, and call a callback when done.
+ * http://stackoverflow.com/q/25390206/347368
+ * @param {String} url: URL to test availability of. Must return a 200 (302->200 is OK).
+ * @param {String} callback: function to call once successfully connected.
+ *
+ */
+function test_ie_availability(url, success_callback){
+    var request_count = 0;
+    display_spinner();
+    interval = setInterval(function(){
+        $.ajax({
+            url: url,
+            xhrFields: {
+                withCredentials: true
+            },
+            type: "GET",
+            timeout: 500,
+            success: function(){
+                console.log("Connected to IE, returning");
+                clearInterval(interval);
+                success_callback();
+            },
+            error: function(jqxhr, status, error){
+                request_count++;
+                console.log("Request " + request_count);
+                if(request_count > 30){
+                    clearInterval(interval);
+                    clear_main_area();
+                    toastr.error(
+                        "Could not connect to IE, contact your administrator",
+                        "Error",
+                        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': false}
+                    );
+                }
+            }
+        });
+    }, 1000);
+}
diff --git a/client/galaxy/scripts/galaxy.js b/client/galaxy/scripts/galaxy.js
new file mode 100644
index 0000000..9184f70
--- /dev/null
+++ b/client/galaxy/scripts/galaxy.js
@@ -0,0 +1,256 @@
+define([
+    'libs/underscore',
+    'libs/backbone',
+    'mvc/base-mvc',
+    'mvc/user/user-model',
+    'utils/metrics-logger',
+    'utils/add-logging',
+    'utils/localization'
+], function( _, Backbone, BASE_MVC, userModel, metricsLogger, addLogging, localize ){
+
+// TODO: move into a singleton pattern and have dependents import Galaxy
+// ============================================================================
+/** Base galaxy client-side application.
+ *      Iniitializes:
+ *          logger      : the logger/metrics-logger
+ *          localize    : the string localizer
+ *          config      : the current configuration (any k/v in
+ *              galaxy.ini available from the configuration API)
+ *          user        : the current user (as a mvc/user/user-model)
+ */
+function GalaxyApp( options, bootstrapped ){
+    var self = this;
+    return self._init( options || {}, bootstrapped || {} );
+}
+
+// add logging shortcuts for this object
+addLogging( GalaxyApp, 'GalaxyApp' );
+
+// a debug flag can be set via local storage and made available during script/page loading
+var DEBUGGING_KEY = 'galaxy:debug',
+    NAMESPACE_KEY = DEBUGGING_KEY + ':namespaces',
+    localDebugging = false;
+try {
+    localDebugging = localStorage.getItem( DEBUGGING_KEY ) == 'true';
+} catch( storageErr ){
+    console.log( localize( 'localStorage not available for debug flag retrieval' ) );
+}
+
+/** initalize options and sub-components */
+GalaxyApp.prototype._init = function __init( options, bootstrapped ){
+    var self = this;
+    _.extend( self, Backbone.Events );
+    if( localDebugging ){
+        self.logger = console;
+        console.debug( 'debugging galaxy:', 'options:', options, 'bootstrapped:', bootstrapped );
+    }
+
+    self._processOptions( options );
+    // special case for root
+    self.root = options.root || '/';
+
+    self._initConfig( options.config || {} );
+    self._patchGalaxy( window.Galaxy );
+
+    self._initLogger( self.options.loggerOptions || {} );
+    // at this point, either logging or not and namespaces are enabled - chat it up
+    self.debug( 'GalaxyApp.options: ', self.options );
+    self.debug( 'GalaxyApp.config: ', self.config );
+    self.debug( 'GalaxyApp.logger: ', self.logger );
+
+    self._initLocale();
+    self.debug( 'GalaxyApp.localize: ', self.localize );
+
+    self.config = options.config || {};
+    self.debug( 'GalaxyApp.config: ', self.config );
+
+    self._initUser( options.user || {} );
+    self.debug( 'GalaxyApp.user: ', self.user );
+
+    self._setUpListeners();
+    self.trigger( 'ready', self );
+
+    return self;
+};
+
+/** default options */
+GalaxyApp.prototype.defaultOptions = {
+    /** monkey patch attributes from existing window.Galaxy object? */
+    patchExisting   : true,
+    /** root url of this app */
+    root            : '/'
+};
+
+/** filter to options present in defaultOptions (and default to them) */
+GalaxyApp.prototype._processOptions = function _processOptions( options ){
+    var self = this,
+        defaults = self.defaultOptions;
+
+    self.options = {};
+    for( var k in defaults ){
+        if( defaults.hasOwnProperty( k ) ){
+            self.options[ k ] = ( options.hasOwnProperty( k ) )?( options[ k ] ):( defaults[ k ] );
+        }
+    }
+    return self;
+};
+
+/** parse the config and any extra info derived from it */
+GalaxyApp.prototype._initConfig = function _initConfig( config ){
+    var self = this;
+    self.config = config;
+
+    // give precendence to localdebugging for this setting
+    self.config.debug = localDebugging || self.config.debug;
+
+    return self;
+};
+
+/** add an option from options if the key matches an option in defaultOptions */
+GalaxyApp.prototype._patchGalaxy = function _patchGalaxy( patchWith ){
+    var self = this;
+    // in case req or plain script tag order has created a prev. version of the Galaxy obj...
+    if( self.options.patchExisting && patchWith ){
+        // self.debug( 'found existing Galaxy object:', patchWith );
+        // ...(for now) monkey patch any added attributes that the previous Galaxy may have had
+        //TODO: move those attributes to more formal assignment in GalaxyApp
+        for( var k in patchWith ){
+            if( patchWith.hasOwnProperty( k ) ){
+                // self.debug( '\t patching in ' + k + ' to Galaxy:', self[ k ] );
+                self[ k ] = patchWith[ k ];
+            }
+        }
+    }
+};
+
+/** set up the metrics logger (utils/metrics-logger) and pass loggerOptions */
+GalaxyApp.prototype._initLogger = function _initLogger( loggerOptions ){
+    var self = this;
+
+    // default to console logging at the debug level if the debug flag is set
+    if( self.config.debug ){
+        loggerOptions.consoleLogger = loggerOptions.consoleLogger || console;
+        loggerOptions.consoleLevel = loggerOptions.consoleLevel || metricsLogger.MetricsLogger.ALL;
+        // load any logging namespaces from localStorage if we can
+        try {
+            loggerOptions.consoleNamespaceWhitelist = localStorage.getItem( NAMESPACE_KEY ).split( ',' );
+        } catch( storageErr ){}
+    }
+
+    self.logger = new metricsLogger.MetricsLogger( loggerOptions );
+    self.emit = {};
+    [ 'log', 'debug', 'info', 'warn', 'error', 'metric' ].map(function( i ) {
+        self.emit[ i ] = function( data ){
+            self.logger.emit( i, arguments[ 0 ], Array.prototype.slice.call( arguments, 1 ) );
+        };
+    });
+
+    if( self.config.debug ){
+        // add this logger to mvc's loggable mixin so that all models can use the logger
+        BASE_MVC.LoggableMixin.logger = self.logger;
+    }
+    return self;
+};
+
+/** add the localize fn to this object and the window namespace (as '_l') */
+GalaxyApp.prototype._initLocale = function _initLocale( options ){
+    var self = this;
+    self.debug( '_initLocale:', options );
+    self.localize = localize;
+    // add to window as global shortened alias
+    // TODO: temporary - remove when can require for plugins
+    window._l = self.localize;
+    return self;
+};
+
+/** set up the current user as a Backbone model (mvc/user/user-model) */
+GalaxyApp.prototype._initUser = function _initUser( userJSON ){
+    var self = this;
+    self.debug( '_initUser:', userJSON );
+    self.user = new userModel.User( userJSON );
+    self.user.logger = self.logger;
+    return self;
+};
+
+/** Set up DOM/jQuery/Backbone event listeners enabled for all pages */
+GalaxyApp.prototype._setUpListeners = function _setUpListeners(){
+    var self = this;
+
+    // hook to jq beforeSend to record the most recent ajax call and cache some data about it
+    /** cached info about the last ajax call made through jQuery */
+    self.lastAjax = {};
+    $( document ).bind( 'ajaxSend', function( ev, xhr, options ){
+        var data = options.data;
+        try {
+            data = JSON.parse( data );
+        } catch( err ){}
+
+        self.lastAjax = {
+            url     : location.href.slice( 0, -1 ) + options.url,
+            data    : data
+        };
+        //TODO:?? we might somehow manage to *retry* ajax using either this hook or Backbone.sync
+    });
+    return self;
+};
+
+/** Turn debugging/console-output on/off by passing boolean. Pass nothing to get current setting. */
+GalaxyApp.prototype.debugging = function _debugging( setting ){
+    var self = this;
+    try {
+        if( setting === undefined ){
+            return localStorage.getItem( DEBUGGING_KEY ) === 'true';
+        }
+        if( setting ){
+            localStorage.setItem( DEBUGGING_KEY, true );
+            return true;
+        }
+
+        localStorage.removeItem( DEBUGGING_KEY );
+        // also remove all namespaces
+        self.debuggingNamespaces( null );
+
+    } catch( storageErr ){
+        console.log( localize( 'localStorage not available for debug flag retrieval' ) );
+    }
+    return false;
+};
+
+/** Add, remove, or clear namespaces from the debugging filters
+ *  Pass no arguments to retrieve the existing namespaces as an array.
+ *  Pass in null to clear all namespaces (all logging messages will show now).
+ *  Pass in an array of strings or single string of the namespaces to filter to.
+ *  Returns the new/current namespaces as an array;
+ */
+GalaxyApp.prototype.debuggingNamespaces = function _debuggingNamespaces( namespaces ){
+    var self = this;
+    try {
+        if( namespaces === undefined ){
+            var csv = localStorage.getItem( NAMESPACE_KEY );
+            return typeof( csv ) === 'string'? csv.split( ',' ) : [];
+        } else if( namespaces === null ) {
+            localStorage.removeItem( NAMESPACE_KEY );
+        } else {
+            localStorage.setItem( NAMESPACE_KEY, namespaces );
+        }
+        var newSettings = self.debuggingNamespaces();
+        if( self.logger ){
+            self.logger.options.consoleNamespaceWhitelist = newSettings;
+        }
+        return newSettings;
+    } catch( storageErr ){
+        console.log( localize( 'localStorage not available for debug namespace retrieval' ) );
+    }
+};
+
+/** string rep */
+GalaxyApp.prototype.toString = function toString(){
+    var userEmail = this.user? ( this.user.get( 'email' ) || '(anonymous)' ) : 'uninitialized';
+    return 'GalaxyApp(' + userEmail + ')';
+};
+
+// ============================================================================
+    return {
+        GalaxyApp : GalaxyApp
+    };
+});
diff --git a/client/galaxy/scripts/galaxy.library.js b/client/galaxy/scripts/galaxy.library.js
new file mode 100644
index 0000000..9c5bf63
--- /dev/null
+++ b/client/galaxy/scripts/galaxy.library.js
@@ -0,0 +1,233 @@
+// MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
+// === MAIN GALAXY LIBRARY MODULE ====
+// MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
+
+define([
+  "layout/masthead",
+  "utils/utils",
+  "libs/toastr",
+  "mvc/base-mvc",
+  "mvc/library/library-model",
+  "mvc/library/library-folderlist-view",
+  "mvc/library/library-librarylist-view",
+  "mvc/library/library-librarytoolbar-view",
+  "mvc/library/library-foldertoolbar-view",
+  "mvc/library/library-dataset-view",
+  "mvc/library/library-library-view",
+  "mvc/library/library-folder-view"
+  ],
+  function(mod_masthead,
+    mod_utils,
+    mod_toastr,
+    mod_baseMVC,
+    mod_library_model,
+    mod_folderlist_view,
+    mod_librarylist_view,
+    mod_librarytoolbar_view,
+    mod_foldertoolbar_view,
+    mod_library_dataset_view,
+    mod_library_library_view,
+    mod_library_folder_view
+   ) {
+
+// ============================================================================
+/**
+ * The Data Libraries router. Takes care about triggering routes
+ * and sends users to proper pieces of the application.
+ */
+var LibraryRouter = Backbone.Router.extend({
+
+  initialize: function() {
+    this.routesHit = 0;
+    // keep count of number of routes handled by the application
+    Backbone.history.on( 'route', function() { this.routesHit++; }, this );
+
+    this.bind( 'route', this.trackPageview );
+  },
+
+  routes: {
+    ""                                                              : "libraries",
+    "page/:show_page"                                               : "libraries_page",
+    "library/:library_id/permissions"                               : "library_permissions",
+    "folders/:folder_id/permissions"                                : "folder_permissions",
+    "folders/:id"                                                   : "folder_content",
+    "folders/:id/page/:show_page"                                   : "folder_page",
+    "folders/:folder_id/datasets/:dataset_id"                       : "dataset_detail",
+    "folders/:folder_id/datasets/:dataset_id/permissions"           : "dataset_permissions",
+    "folders/:folder_id/datasets/:dataset_id/versions/:ldda_id"     : "dataset_version",
+    "folders/:folder_id/download/:format"                           : "download",
+    "folders/:folder_id/import/:source"                             : "import_datasets"
+  },
+
+  /**
+   * If more than one route has been hit the user did not land on current
+   * page directly so we can go back safely. Otherwise go to the home page.
+   * Use replaceState if available so the navigation doesn't create an
+   * extra history entry
+   */
+  back: function() {
+    if( this.routesHit > 1 ) {
+      window.history.back();
+    } else {
+      this.navigate( '#', { trigger:true, replace:true } );
+    }
+  },
+
+  /**
+   * Track every route change as a page view in Google Analytics.
+   */
+  trackPageview: function () {
+    var url = Backbone.history.getFragment();
+    //prepend slash
+    if (!/^\//.test(url) && url != "") {
+      url = "/" + url;
+    }
+    if ( typeof ga !== 'undefined' ) {
+      ga( 'send', 'pageview', Galaxy.root + 'library/list' + url );
+    }
+  }
+});
+
+// ============================================================================
+/** session storage for library preferences */
+var LibraryPrefs = mod_baseMVC.SessionStorageModel.extend({
+    defaults : {
+        with_deleted      : false,
+        sort_order        : 'asc',
+        sort_by           : 'name',
+        library_page_size : 20,
+        folder_page_size  : 15
+    }
+});
+
+// ============================================================================
+/**
+ * Main view of the Galaxy Data Libraries. Stores pointers to other subviews
+ * and defines what router should do on the route triggers.
+ */
+var GalaxyLibrary = Backbone.View.extend({
+
+    libraryToolbarView: null,
+    libraryListView: null,
+    library_router: null,
+    libraryView: null,
+    folderToolbarView: null,
+    folderListView: null,
+    datasetView: null,
+
+    initialize : function(){
+
+      // This should go upstream in the js app once available
+      if ( window.Galaxy.config.ga_code ){
+      (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+            (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+            m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+            })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+            ga('create', window.Galaxy.config.ga_code, 'auto');
+            ga('send', 'pageview');
+      }
+
+        Galaxy.libraries = this;
+
+        this.preferences = new LibraryPrefs( { id: 'global-lib-prefs' } );
+
+        this.library_router = new LibraryRouter();
+
+        this.library_router.on( 'route:libraries', function() {
+            if ( Galaxy.libraries.libraryToolbarView){
+                Galaxy.libraries.libraryToolbarView.$el.unbind('click');
+            }
+            Galaxy.libraries.libraryToolbarView = new mod_librarytoolbar_view.LibraryToolbarView();
+            Galaxy.libraries.libraryListView = new mod_librarylist_view.LibraryListView();
+        });
+
+        this.library_router.on('route:libraries_page', function( show_page ) {
+            if ( Galaxy.libraries.libraryToolbarView === null ){
+                Galaxy.libraries.libraryToolbarView = new mod_librarytoolbar_view.LibraryToolbarView();
+                Galaxy.libraries.libraryListView = new mod_librarylist_view.LibraryListView( { show_page: show_page } );
+            } else {
+                Galaxy.libraries.libraryListView.render( { show_page: show_page } );
+            }
+        });
+
+        this.library_router.on( 'route:folder_content', function( id ) {
+            if (Galaxy.libraries.folderToolbarView){
+                Galaxy.libraries.folderToolbarView.$el.unbind( 'click' );
+            }
+            Galaxy.libraries.folderToolbarView = new mod_foldertoolbar_view.FolderToolbarView( { id: id } );
+            Galaxy.libraries.folderListView = new mod_folderlist_view.FolderListView( { id: id } );
+        });
+
+        this.library_router.on( 'route:folder_page', function( id, show_page ) {
+            if ( Galaxy.libraries.folderToolbarView === null ){
+                Galaxy.libraries.folderToolbarView = new mod_foldertoolbar_view.FolderToolbarView( {id: id} );
+                Galaxy.libraries.folderListView = new mod_folderlist_view.FolderListView( { id: id, show_page: show_page } );
+            } else {
+                Galaxy.libraries.folderListView.render( { id: id, show_page: parseInt( show_page ) } );
+            }
+        });
+
+        this.library_router.on( 'route:download', function( folder_id, format ) {
+            if ( $( '#folder_list_body' ).find( ':checked' ).length === 0 ) {
+                mod_toastr.info( 'You must select at least one dataset to download' );
+                Galaxy.libraries.library_router.navigate( 'folders/' + folder_id, { trigger: true, replace: true } );
+            } else {
+                Galaxy.libraries.folderToolbarView.download( folder_id, format );
+                Galaxy.libraries.library_router.navigate( 'folders/' + folder_id, { trigger: false, replace: true } );
+            }
+        });
+
+        this.library_router.on( 'route:dataset_detail', function(folder_id, dataset_id){
+            if (Galaxy.libraries.datasetView){
+                Galaxy.libraries.datasetView.$el.unbind('click');
+            }
+            Galaxy.libraries.datasetView = new mod_library_dataset_view.LibraryDatasetView({id: dataset_id, show_version: false, show_permissions: false});
+        });
+
+        this.library_router.on( 'route:dataset_version', function(folder_id, dataset_id, ldda_id){
+            if (Galaxy.libraries.datasetView){
+                Galaxy.libraries.datasetView.$el.unbind('click');
+            }
+            Galaxy.libraries.datasetView = new mod_library_dataset_view.LibraryDatasetView({id: dataset_id, ldda_id: ldda_id, show_version: true});
+        });
+
+        this.library_router.on( 'route:dataset_permissions', function(folder_id, dataset_id){
+            if (Galaxy.libraries.datasetView){
+                Galaxy.libraries.datasetView.$el.unbind('click');
+            }
+            Galaxy.libraries.datasetView = new mod_library_dataset_view.LibraryDatasetView({id: dataset_id, show_permissions: true});
+        });
+
+        this.library_router.on( 'route:library_permissions', function(library_id){
+            if (Galaxy.libraries.libraryView){
+                Galaxy.libraries.libraryView.$el.unbind('click');
+            }
+            Galaxy.libraries.libraryView = new mod_library_library_view.LibraryView({id: library_id, show_permissions: true});
+        });
+
+        this.library_router.on( 'route:folder_permissions', function(folder_id){
+            if (Galaxy.libraries.folderView){
+                Galaxy.libraries.folderView.$el.unbind('click');
+            }
+            Galaxy.libraries.folderView = new mod_library_folder_view.FolderView({id: folder_id, show_permissions: true});
+        });
+
+        this.library_router.on( 'route:import_datasets', function( folder_id, source ){
+            if ( Galaxy.libraries.folderToolbarView && Galaxy.libraries.folderListView ){
+                Galaxy.libraries.folderToolbarView.showImportModal( { source:source } );
+            } else {
+                Galaxy.libraries.folderToolbarView = new mod_foldertoolbar_view.FolderToolbarView( { id: folder_id } );
+                Galaxy.libraries.folderListView = new mod_folderlist_view.FolderListView( { id: folder_id } );
+                Galaxy.libraries.folderToolbarView.showImportModal( { source: source } );
+            }
+        });
+
+        Backbone.history.start({pushState: false});
+    }
+});
+
+return {
+    GalaxyApp: GalaxyLibrary
+};
+
+});
diff --git a/client/galaxy/scripts/galaxy.pages.js b/client/galaxy/scripts/galaxy.pages.js
new file mode 100644
index 0000000..63f92b7
--- /dev/null
+++ b/client/galaxy/scripts/galaxy.pages.js
@@ -0,0 +1,642 @@
+
+var CONTROLS =
+{
+    // Item types.
+    ITEM_HISTORY : "item_history",
+    ITEM_DATASET : "item_dataset",
+    ITEM_WORKFLOW : "item_workflow",
+    ITEM_PAGE : "item_page",
+    ITEM_VISUALIZATION : "item_visualization",
+
+    // Link dialogs.
+    DIALOG_HISTORY_LINK : "link_history",
+    DIALOG_DATASET_LINK : "link_dataset",
+    DIALOG_WORKFLOW_LINK : "link_workflow",
+    DIALOG_PAGE_LINK : "link_page",
+    DIALOG_VISUALIZATION_LINK : "link_visualization",
+
+    // Embed dialogs.
+    DIALOG_EMBED_HISTORY : "embed_history",
+    DIALOG_EMBED_DATASET : "embed_dataset",
+    DIALOG_EMBED_WORKFLOW : "embed_workflow",
+    DIALOG_EMBED_PAGE : "embed_page",
+    DIALOG_EMBED_VISUALIZATION : "embed_visualization"
+};
+
+// Initialize Galaxy elements.
+function init_galaxy_elts(wym)
+{
+    // Set up events to make annotation easy.
+    $('.annotation', wym._doc.body).each( function()
+    {
+         $(this).click( function() {
+             // Works in Safari, not in Firefox.
+             var range = wym._doc.createRange();
+             range.selectNodeContents( this );
+             var selection = window.getSelection();
+             selection.removeAllRanges();
+             selection.addRange(range);
+             var t = "";
+         });
+    });
+
+};
+
+// Based on the dialog type, return a dictionary of information about an item
+function get_item_info( dialog_type )
+{
+    var
+        item_singular,
+        item_plural,
+        item_controller;
+    switch( dialog_type ) {
+        case( CONTROLS.ITEM_HISTORY ):
+            item_singular = "History";
+            item_plural = "Histories";
+            item_controller = "history";
+            item_class = "History";
+            break;
+        case( CONTROLS.ITEM_DATASET ):
+            item_singular = "Dataset";
+            item_plural = "Datasets";
+            item_controller = "dataset";
+            item_class = "HistoryDatasetAssociation";
+            break;
+        case( CONTROLS.ITEM_WORKFLOW ):
+            item_singular = "Workflow";
+            item_plural = "Workflows";
+            item_controller = "workflow";
+            item_class = "StoredWorkflow";
+            break;
+        case( CONTROLS.ITEM_PAGE ):
+            item_singular = "Page";
+            item_plural = "Pages";
+            item_controller = "page";
+            item_class = "Page";
+            break;
+        case( CONTROLS.ITEM_VISUALIZATION ):
+            item_singular = "Visualization";
+            item_plural = "Visualizations";
+            item_controller = "visualization";
+            item_class = "Visualization";
+            break;
+    }
+
+    // Build ajax URL that lists items for selection.
+    var item_list_action = "list_" + item_plural.toLowerCase() + "_for_selection";
+    var ajax_url = list_objects_url.replace( "LIST_ACTION", item_list_action );
+
+    // Set up and return dict.
+    return {
+        singular : item_singular,
+        plural : item_plural,
+        controller : item_controller,
+        iclass : item_class,
+        list_ajax_url : ajax_url
+    };
+};
+
+// Make an item importable.
+function make_item_importable( item_controller, item_id, item_type )
+{
+    ajax_url = set_accessible_url.replace( "ITEM_CONTROLLER", item_controller );
+    $.ajax({
+      type: "POST",
+      url: ajax_url,
+      data: { id: item_id, accessible: 'True' },
+      error: function() { alert("Making " + item_type + " accessible failed"); }
+    });
+};
+
+// Completely replace WYM's dialog handling
+WYMeditor.editor.prototype.dialog = function( dialogType, dialogFeatures, bodyHtml ) {
+
+    var wym = this;
+    var sStamp = wym.uniqueStamp();
+    var selected = wym.selected();
+
+    // Swap out URL attribute for id/name attribute in link creation to enable anchor creation in page.
+    function set_link_id()
+    {
+        // When "set link id" link clicked, update UI.
+        $('#set_link_id').click( function()
+        {
+            // Set label.
+            $("#link_attribute_label").text("ID/Name");
+
+            // Set input elt class, value.
+            var attribute_input = $(".wym_href");
+            attribute_input.addClass("wym_id").removeClass("wym_href");
+            if (selected)
+                attribute_input.val( $(selected).attr('id') );
+
+            // Remove link.
+            $(this).remove();
+        });
+    }
+
+    // LINK DIALOG
+    if ( dialogType == WYMeditor.DIALOG_LINK ) {
+        if(selected) {
+            $(wym._options.hrefSelector).val($(selected).attr(WYMeditor.HREF));
+            $(wym._options.srcSelector).val($(selected).attr(WYMeditor.SRC));
+            $(wym._options.titleSelector).val($(selected).attr(WYMeditor.TITLE));
+            $(wym._options.altSelector).val($(selected).attr(WYMeditor.ALT));
+        }
+        // Get current URL, title.
+        var curURL, curTitle;
+        if (selected)
+        {
+            curURL = $(selected).attr("href");
+            if (curURL == undefined)
+                curURL = "";
+            curTitle = $(selected).attr("title");
+            if (curTitle == undefined)
+                curTitle = "";
+        }
+        show_modal(
+            "Create Link",
+            "<div><div><label id='link_attribute_label'>URL <span style='float: right; font-size: 90%'><a href='#' id='set_link_id'>Create in-page anchor</a></span></label><br><input type='text' class='wym_href' value='" + curURL + "' size='40' /></div>"
+                + "<div><label>Title</label><br><input type='text' class='wym_title' value='" + curTitle + "' size='40' /></div><div>",
+            {
+                "Make link": function() {
+                    // Get URL, name/title.
+                    var sUrl = $(wym._options.hrefSelector).val() || '',
+                        sId = $(".wym_id").val() || '',
+                        sName = $(wym._options.titleSelector).val() || '';
+
+                    if (sUrl || sId) {
+                        // Create link.
+                        wym._exec(WYMeditor.CREATE_LINK, sStamp);
+
+                        // Set link attributes.
+                        var link = $("a[href=" + sStamp + "]", wym._doc.body);
+                        link.attr(WYMeditor.HREF, sUrl)
+                            .attr(WYMeditor.TITLE, sName)
+                            .attr("id", sId);
+
+                        // If link's text is default (wym-...), change it to the title.
+                        if (link.text().indexOf('wym-') === 0) {
+                            link.text(sName);
+                        }
+                    }
+                    hide_modal();
+                },
+                "Cancel": function() {
+                    hide_modal();
+                }
+            },
+            {},
+            set_link_id
+        );
+    }
+
+    // IMAGE DIALOG
+    if ( dialogType == WYMeditor.DIALOG_IMAGE ) {
+        if(wym._selected_image) {
+            $(wym._options.dialogImageSelector + " " + wym._options.srcSelector)
+              .val($(wym._selected_image).attr(WYMeditor.SRC));
+            $(wym._options.dialogImageSelector + " " + wym._options.titleSelector)
+              .val($(wym._selected_image).attr(WYMeditor.TITLE));
+            $(wym._options.dialogImageSelector + " " + wym._options.altSelector)
+              .val($(wym._selected_image).attr(WYMeditor.ALT));
+        }
+        show_modal(
+            "Image",
+            "<div class='row'>"
+                + "<label>URL</label><br>"
+                + "<input type='text' class='wym_src' value='' size='40' />"
+                + "</div>"
+                + "<div class='row'>"
+                + "<label>Alt text</label><br>"
+                + "<input type='text' class='wym_alt' value='' size='40' />"
+                + "</div>"
+                + "<div class='row'>"
+                + "<label>Title</label><br>"
+                + "<input type='text' class='wym_title' value='' size='40' />"
+                + "</div>",
+            {
+                "Insert": function() {
+                    var sUrl = $(wym._options.srcSelector).val();
+                    if(sUrl.length > 0) {
+                      wym._exec(WYMeditor.INSERT_IMAGE, sStamp);
+                      $("img[src$=" + sStamp + "]", wym._doc.body)
+                          .attr(WYMeditor.SRC, sUrl)
+                          .attr(WYMeditor.TITLE, $(wym._options.titleSelector).val())
+                          .attr(WYMeditor.ALT, $(wym._options.altSelector).val());
+                    }
+                    hide_modal();
+                },
+                "Cancel": function() {
+                    hide_modal();
+                }
+            }
+        );
+        return;
+    }
+
+    // TABLE DIALOG
+    if ( dialogType == WYMeditor.DIALOG_TABLE ) {
+        show_modal(
+            "Table",
+            "<div class='row'>"
+                + "<label>Caption</label><br>"
+                + "<input type='text' class='wym_caption' value='' size='40' />"
+                + "</div>"
+                + "<div class='row'>"
+                + "<label>Summary</label><br>"
+                + "<input type='text' class='wym_summary' value='' size='40' />"
+                + "</div>"
+                + "<div class='row'>"
+                + "<label>Number Of Rows<br></label>"
+                + "<input type='text' class='wym_rows' value='3' size='3' />"
+                + "</div>"
+                + "<div class='row'>"
+                + "<label>Number Of Cols<br></label>"
+                + "<input type='text' class='wym_cols' value='2' size='3' />"
+                + "</div>",
+            {
+                "Insert": function() {
+                    var iRows = $(wym._options.rowsSelector).val();
+                    var iCols = $(wym._options.colsSelector).val();
+
+                    if(iRows > 0 && iCols > 0) {
+
+                      var table = wym._doc.createElement(WYMeditor.TABLE);
+                      var newRow = null;
+                              var newCol = null;
+
+                              var sCaption = $(wym._options.captionSelector).val();
+
+                              //we create the caption
+                              var newCaption = table.createCaption();
+                              newCaption.innerHTML = sCaption;
+
+                              //we create the rows and cells
+                              for(x=0; x<iRows; x++) {
+                                      newRow = table.insertRow(x);
+                                      for(y=0; y<iCols; y++) {newRow.insertCell(y);}
+                              }
+
+                      //set the summary attr
+                      $(table).attr('summary',
+                          $(wym._options.summarySelector).val());
+
+                      //append the table after the selected container
+                      var node = $(wym.findUp(wym.container(),
+                        WYMeditor.MAIN_CONTAINERS)).get(0);
+                      if(!node || !node.parentNode) $(wym._doc.body).append(table);
+                      else $(node).after(table);
+                    }
+                    hide_modal();
+                },
+                "Cancel": function() {
+                    hide_modal();
+                }
+            }
+        );
+    }
+
+    // INSERT "GALAXY ITEM" LINK DIALOG
+    if ( dialogType == CONTROLS.DIALOG_HISTORY_LINK || dialogType == CONTROLS.DIALOG_DATASET_LINK ||
+         dialogType == CONTROLS.DIALOG_WORKFLOW_LINK || dialogType == CONTROLS.DIALOG_PAGE_LINK ||
+         dialogType == CONTROLS.DIALOG_VISUALIZATION_LINK ) {
+        // Based on item type, set useful vars.
+        var item_info;
+        switch(dialogType)
+        {
+            case(CONTROLS.DIALOG_HISTORY_LINK):
+                item_info = get_item_info(CONTROLS.ITEM_HISTORY);
+                break;
+            case(CONTROLS.DIALOG_DATASET_LINK):
+                item_info = get_item_info(CONTROLS.ITEM_DATASET);
+                break;
+            case(CONTROLS.DIALOG_WORKFLOW_LINK):
+                item_info = get_item_info(CONTROLS.ITEM_WORKFLOW);
+                break;
+            case(CONTROLS.DIALOG_PAGE_LINK):
+                item_info = get_item_info(CONTROLS.ITEM_PAGE);
+                break;
+            case(CONTROLS.DIALOG_VISUALIZATION_LINK):
+                item_info = get_item_info(CONTROLS.ITEM_VISUALIZATION);
+                break;
+        }
+
+        $.ajax(
+        {
+            url: item_info.list_ajax_url,
+            data: {},
+            error: function() { alert( "Failed to list "  + item_info.plural.toLowerCase() + " for selection"); },
+            success: function(table_html)
+            {
+                show_modal(
+                    "Insert Link to " + item_info.singular,
+                    table_html +
+                    "<div><input id='make-importable' type='checkbox' checked/>" +
+                    "Make the selected " + item_info.plural.toLowerCase() + " accessible so that they can viewed by everyone.</div>"
+                    ,
+                    {
+                        "Insert": function()
+                        {
+                            // Make selected items accessible (importable) ?
+                            var make_importable = false;
+                            if ( $('#make-importable:checked').val() !== null )
+                                make_importable = true;
+
+                            // Insert links to history for each checked item.
+                            var item_ids = new Array();
+                            $('input[name=id]:checked').each(function() {
+                                var item_id = $(this).val();
+
+                                // Make item importable?
+                                if (make_importable)
+                                    make_item_importable(item_info.controller, item_id, item_info.singular);
+
+                                // Insert link(s) to item(s). This is done by getting item info and then manipulating wym.
+                                url_template = get_name_and_link_url + item_id;
+                                ajax_url = url_template.replace( "ITEM_CONTROLLER", item_info.controller);
+                                $.getJSON( ajax_url, function( returned_item_info ) {
+                                    // Get link text.
+                                    wym._exec(WYMeditor.CREATE_LINK, sStamp);
+                                    var link_text = $("a[href=" + sStamp + "]", wym._doc.body).text();
+
+                                    // Insert link: need to do different actions depending on link text.
+                                    if (
+                                        link_text == "" // Firefox.
+                                        ||
+                                        link_text == sStamp // Safari
+                                        )
+                                    {
+                                        // User selected no text; create link from scratch and use default text.
+                                        wym.insert("<a href='" + returned_item_info.link + "'>" + item_info.singular + " '" + returned_item_info.name + "'</a>");
+                                    }
+                                    else
+                                    {
+                                        // Link created from selected text; add href and title.
+                                        $("a[href=" + sStamp + "]", wym._doc.body).attr(WYMeditor.HREF, returned_item_info.link).attr(WYMeditor.TITLE, item_info.singular + item_id);
+                                    }
+                                });
+                            });
+
+                            hide_modal();
+                        },
+                        "Cancel": function()
+                        {
+                            hide_modal();
+                        }
+                    }
+                );
+            }
+        });
+    }
+    // EMBED GALAXY OBJECT DIALOGS
+    if ( dialogType == CONTROLS.DIALOG_EMBED_HISTORY || dialogType == CONTROLS.DIALOG_EMBED_DATASET || dialogType == CONTROLS.DIALOG_EMBED_WORKFLOW || dialogType == CONTROLS.DIALOG_EMBED_PAGE || dialogType == CONTROLS.DIALOG_EMBED_VISUALIZATION ) {
+        // Based on item type, set useful vars.
+        var item_info;
+        switch(dialogType)
+        {
+            case(CONTROLS.DIALOG_EMBED_HISTORY):
+                item_info = get_item_info(CONTROLS.ITEM_HISTORY);
+                break;
+            case(CONTROLS.DIALOG_EMBED_DATASET):
+                item_info = get_item_info(CONTROLS.ITEM_DATASET);
+                break;
+            case(CONTROLS.DIALOG_EMBED_WORKFLOW):
+                item_info = get_item_info(CONTROLS.ITEM_WORKFLOW);
+                break;
+            case(CONTROLS.DIALOG_EMBED_PAGE):
+                item_info = get_item_info(CONTROLS.ITEM_PAGE);
+                break;
+            case(CONTROLS.DIALOG_EMBED_VISUALIZATION):
+                item_info = get_item_info(CONTROLS.ITEM_VISUALIZATION);
+                break;
+        }
+
+        $.ajax(
+        {
+            url: item_info.list_ajax_url,
+            data: {},
+            error: function() { alert( "Failed to list "  + item_info.plural.toLowerCase() + " for selection"); },
+            success: function(list_html)
+            {
+                // Can make histories, workflows importable; cannot make datasets importable.
+                if (dialogType == CONTROLS.DIALOG_EMBED_HISTORY || dialogType == CONTROLS.DIALOG_EMBED_WORKFLOW
+                    || dialogType == CONTROLS.DIALOG_EMBED_VISUALIZATION)
+                    list_html = list_html + "<div><input id='make-importable' type='checkbox' checked/>" +
+                                "Make the selected " + item_info.plural.toLowerCase() + " accessible so that they can viewed by everyone.</div>";
+                show_modal(
+                    "Embed " + item_info.plural,
+                    list_html,
+                    {
+                        "Embed": function()
+                        {
+                            // Make selected items accessible (importable) ?
+                            var make_importable = false;
+                            if ( $('#make-importable:checked').val() != null )
+                                make_importable = true;
+
+                            $('input[name=id]:checked').each(function() {
+                                // Get item ID and name.
+                                var item_id = $(this).val();
+                                // Use ':first' because there are many labels in table; the first one is the item name.
+                                var item_name = $("label[for='" + item_id + "']:first").text();
+
+                                if (make_importable)
+                                    make_item_importable(item_info.controller, item_id, item_info.singular);
+
+                                // Embedded item HTML; item class is embedded in div container classes; this is necessary because the editor strips
+                                // all non-standard attributes when it returns its content (e.g. it will not return an element attribute of the form
+                                // item_class='History').
+                                var item_elt_id = item_info.iclass + "-"  + item_id;
+                                var item_embed_html = [
+                                    "<div id='", item_elt_id, "' class='embedded-item ",
+                                            item_info.singular.toLowerCase(), " placeholder'>",
+                                        "<p class='title'>",
+                                            "Embedded Galaxy ", item_info.singular, " '", item_name, "'",
+                                        "</p>",
+                                        "<p class='content'>",
+                                            "[Do not edit this block; Galaxy will fill it in with the annotated ",
+                                            item_info.singular.toLowerCase(), " when it is displayed.]",
+                                        "</p>",
+                                    "</div>" ].join( '' );
+
+                                // Insert embedded item into document.
+                                wym.insert(item_embed_html);
+
+                            });
+                            hide_modal();
+                        },
+                        "Cancel": function()
+                        {
+                            hide_modal();
+                        }
+                    }
+                );
+            }
+        });
+    }
+};
+
+$(function(){
+    // Generic error handling
+    $(document).ajaxError( function ( e, x ) {
+        // console.log( e, x );
+        var message = x.responseText || x.statusText || "Could not connect to server";
+        show_modal( "Server error", message, { "Ignore error" : hide_modal } );
+        return false;
+    });
+    // Create editor
+    $("[name=page_content]").wymeditor( {
+        skin: 'galaxy',
+        basePath: editor_base_path,
+        iframeBasePath: iframe_base_path,
+        boxHtml:   "<table class='wym_box' width='100%' height='100%'>"
+                    + "<tr><td><div class='wym_area_top'>"
+                    + WYMeditor.TOOLS
+                    + "</div></td></tr>"
+                    + "<tr height='100%'><td>"
+                    + "<div class='wym_area_main' style='height: 100%;'>"
+                    // + WYMeditor.HTML
+                    + WYMeditor.IFRAME
+                    + WYMeditor.STATUS
+                    + "</div>"
+                    + "</div>"
+                    + "</td></tr></table>",
+        toolsItems: [
+            {'name': 'Bold', 'title': 'Strong', 'css': 'wym_tools_strong'},
+            {'name': 'Italic', 'title': 'Emphasis', 'css': 'wym_tools_emphasis'},
+            {'name': 'Superscript', 'title': 'Superscript', 'css': 'wym_tools_superscript'},
+            {'name': 'Subscript', 'title': 'Subscript', 'css': 'wym_tools_subscript'},
+            {'name': 'InsertOrderedList', 'title': 'Ordered_List', 'css': 'wym_tools_ordered_list'},
+            {'name': 'InsertUnorderedList', 'title': 'Unordered_List', 'css': 'wym_tools_unordered_list'},
+            {'name': 'Indent', 'title': 'Indent', 'css': 'wym_tools_indent'},
+            {'name': 'Outdent', 'title': 'Outdent', 'css': 'wym_tools_outdent'},
+            {'name': 'Undo', 'title': 'Undo', 'css': 'wym_tools_undo'},
+            {'name': 'Redo', 'title': 'Redo', 'css': 'wym_tools_redo'},
+            {'name': 'CreateLink', 'title': 'Link', 'css': 'wym_tools_link'},
+            {'name': 'Unlink', 'title': 'Unlink', 'css': 'wym_tools_unlink'},
+            {'name': 'InsertImage', 'title': 'Image', 'css': 'wym_tools_image'},
+            {'name': 'InsertTable', 'title': 'Table', 'css': 'wym_tools_table'},
+        ]
+    });
+    // Get the editor object
+    var editor = $.wymeditors(0);
+    var save = function ( callback ) {
+        show_modal( "Saving page", "progress" );
+
+        // Do save.
+        $.ajax( {
+            url: save_url,
+            type: "POST",
+            data: {
+                id: page_id,
+                content: editor.xhtml(),
+                annotations: JSON.stringify(new Object()),
+                // annotations: JSON.stringify(annotations),
+                "_": "true"
+            },
+            success: function() {
+                callback();
+            }
+        });
+    }
+    // Save button
+    $("#save-button").click( function() {
+        save( function() { hide_modal(); } )
+    });
+    // Close button
+    $("#close-button").click(function() {
+        // var new_content = editor.xhtml();
+        // var changed = ( initial_content != new_content );
+        var changed = false;
+        if ( changed ) {
+            var do_close = function() {
+                window.onbeforeunload = undefined;
+                window.document.location = page_list_url;
+            };
+            show_modal( "Close editor",
+                        "There are unsaved changes to your page which will be lost.",
+                        {
+                            "Cancel" : hide_modal,
+                            "Save Changes" : function() {
+                                save( do_close );
+                            }
+                        }, {
+                            "Don't Save": do_close
+                        } );
+        } else {
+            window.document.location = page_list_url;
+        }
+    });
+
+    // Initialize galaxy elements.
+    //init_galaxy_elts(editor);
+
+    //
+    // Containers, Galaxy style
+    //
+    var containers_menu = $("<div class='galaxy-page-editor-button'><a id='insert-galaxy-link' class='action-button popup' href='#'>Paragraph type</a></div>");
+    $(".wym_area_top").append( containers_menu );
+
+    // Add menu options.
+    var items = {}
+    $.each( editor._options.containersItems, function( k, v ) {
+        var tagname = v.name;
+        items[ v.title.replace( '_', ' ' ) ] = function() { editor.container( tagname ) }
+    });
+    make_popupmenu( containers_menu, items);
+
+    //
+    // Create 'Insert Link to Galaxy Object' menu.
+    //
+
+    // Add menu button.
+    var insert_link_menu_button = $("<div><a id='insert-galaxy-link' class='action-button popup' href='#'>Insert Link to Galaxy Object</a></div>").addClass('galaxy-page-editor-button');
+    $(".wym_area_top").append(insert_link_menu_button);
+
+    // Add menu options.
+    make_popupmenu( insert_link_menu_button, {
+        "Insert History Link": function() {
+            editor.dialog(CONTROLS.DIALOG_HISTORY_LINK);
+        },
+        "Insert Dataset Link": function() {
+            editor.dialog(CONTROLS.DIALOG_DATASET_LINK);
+        },
+        "Insert Workflow Link": function() {
+            editor.dialog(CONTROLS.DIALOG_WORKFLOW_LINK);
+        },
+        "Insert Page Link": function() {
+            editor.dialog(CONTROLS.DIALOG_PAGE_LINK);
+        },
+        "Insert Visualization Link": function() {
+            editor.dialog(CONTROLS.DIALOG_VISUALIZATION_LINK);
+        },
+    });
+
+    //
+    // Create 'Embed Galaxy Object' menu.
+    //
+
+    // Add menu button.
+    var embed_object_button = $("<div><a id='embed-galaxy-object' class='action-button popup' href='#'>Embed Galaxy Object</a></div>").addClass('galaxy-page-editor-button');
+    $(".wym_area_top").append(embed_object_button);
+
+    // Add menu options.
+    make_popupmenu( embed_object_button, {
+        "Embed History": function() {
+            editor.dialog(CONTROLS.DIALOG_EMBED_HISTORY);
+        },
+        "Embed Dataset": function() {
+            editor.dialog(CONTROLS.DIALOG_EMBED_DATASET);
+        },
+        "Embed Workflow": function() {
+            editor.dialog(CONTROLS.DIALOG_EMBED_WORKFLOW);
+        },
+        "Embed Visualization": function() {
+            editor.dialog(CONTROLS.DIALOG_EMBED_VISUALIZATION);
+        },
+        //"Embed Page": function() {
+        //    editor.dialog(CONTROLS.DIALOG_EMBED_PAGE);
+        //}
+    });
+});
diff --git a/client/galaxy/scripts/i18n.js b/client/galaxy/scripts/i18n.js
new file mode 100644
index 0000000..9fa0c26
--- /dev/null
+++ b/client/galaxy/scripts/i18n.js
@@ -0,0 +1,183 @@
+/**
+ * @license RequireJS i18n 2.0.4 Copyright (c) 2010-2012, The Dojo Foundation All Rights Reserved.
+ * Available via the MIT or new BSD license.
+ * see: http://github.com/requirejs/i18n for details
+ */
+/*jslint regexp: true */
+/*global require: false, navigator: false, define: false */
+
+/**
+ * This plugin handles i18n! prefixed modules. It does the following:
+ *
+ * 1) A regular module can have a dependency on an i18n bundle, but the regular
+ * module does not want to specify what locale to load. So it just specifies
+ * the top-level bundle, like "i18n!nls/colors".
+ *
+ * This plugin will load the i18n bundle at nls/colors, see that it is a root/master
+ * bundle since it does not have a locale in its name. It will then try to find
+ * the best match locale available in that master bundle, then request all the
+ * locale pieces for that best match locale. For instance, if the locale is "en-us",
+ * then the plugin will ask for the "en-us", "en" and "root" bundles to be loaded
+ * (but only if they are specified on the master bundle).
+ *
+ * Once all the bundles for the locale pieces load, then it mixes in all those
+ * locale pieces into each other, then finally sets the context.defined value
+ * for the nls/colors bundle to be that mixed in locale.
+ *
+ * 2) A regular module specifies a specific locale to load. For instance,
+ * i18n!nls/fr-fr/colors. In this case, the plugin needs to load the master bundle
+ * first, at nls/colors, then figure out what the best match locale is for fr-fr,
+ * since maybe only fr or just root is defined for that locale. Once that best
+ * fit is found, all of its locale pieces need to have their bundles loaded.
+ *
+ * Once all the bundles for the locale pieces load, then it mixes in all those
+ * locale pieces into each other, then finally sets the context.defined value
+ * for the nls/fr-fr/colors bundle to be that mixed in locale.
+ */
+(function () {
+    'use strict';
+
+    //regexp for reconstructing the master bundle name from parts of the regexp match
+    //nlsRegExp.exec("foo/bar/baz/nls/en-ca/foo") gives:
+    //["foo/bar/baz/nls/en-ca/foo", "foo/bar/baz/nls/", "/", "/", "en-ca", "foo"]
+    //nlsRegExp.exec("foo/bar/baz/nls/foo") gives:
+    //["foo/bar/baz/nls/foo", "foo/bar/baz/nls/", "/", "/", "foo", ""]
+    //so, if match[5] is blank, it means this is the top bundle definition.
+    var nlsRegExp = /(^.*(^|\/)nls(\/|$))([^\/]*)\/?([^\/]*)/;
+
+    //Helper function to avoid repeating code. Lots of arguments in the
+    //desire to stay functional and support RequireJS contexts without having
+    //to know about the RequireJS contexts.
+    function addPart(locale, master, needed, toLoad, prefix, suffix) {
+        if (master[locale]) {
+            needed.push(locale);
+            if (master[locale] === true || master[locale] === 1) {
+                toLoad.push(prefix + locale + '/' + suffix);
+            }
+        }
+    }
+
+    function addIfExists(req, locale, toLoad, prefix, suffix) {
+        var fullName = prefix + locale + '/' + suffix;
+        if (require._fileExists(req.toUrl(fullName + '.js'))) {
+            toLoad.push(fullName);
+        }
+    }
+
+    /**
+     * Simple function to mix in properties from source into target,
+     * but only if target does not already have a property of the same name.
+     * This is not robust in IE for transferring methods that match
+     * Object.prototype names, but the uses of mixin here seem unlikely to
+     * trigger a problem related to that.
+     */
+    function mixin(target, source, force) {
+        var prop;
+        for (prop in source) {
+            if (source.hasOwnProperty(prop) && (!target.hasOwnProperty(prop) || force)) {
+                target[prop] = source[prop];
+            } else if (typeof source[prop] === 'object') {
+                if (!target[prop] && source[prop]) {
+                    target[prop] = {};
+                }
+                mixin(target[prop], source[prop], force);
+            }
+        }
+    }
+
+    define(['module'], function (module) {
+        var masterConfig = module.config ? module.config() : {};
+
+        return {
+            version: '2.0.4',
+            /**
+             * Called when a dependency needs to be loaded.
+             */
+            load: function (name, req, onLoad, config) {
+                config = config || {};
+
+                if (config.locale) {
+                    masterConfig.locale = config.locale;
+                }
+
+                var masterName,
+                    match = nlsRegExp.exec(name),
+                    prefix = match[1],
+                    locale = match[4],
+                    suffix = match[5],
+                    parts = locale.split("-"),
+                    toLoad = [],
+                    value = {},
+                    i, part, current = "";
+
+                //If match[5] is blank, it means this is the top bundle definition,
+                //so it does not have to be handled. Locale-specific requests
+                //will have a match[4] value but no match[5]
+                if (match[5]) {
+                    //locale-specific bundle
+                    prefix = match[1];
+                    masterName = prefix + suffix;
+                } else {
+                    //Top-level bundle.
+                    masterName = name;
+                    suffix = match[4];
+                    locale = masterConfig.locale;
+                    if (!locale) {
+                        locale = masterConfig.locale =
+                            typeof navigator === "undefined" ? "root" :
+                            (navigator.language ||
+                             navigator.userLanguage || "root").toLowerCase();
+                    }
+                    parts = locale.split("-");
+                }
+
+                if (config.isBuild) {
+                    //Check for existence of all locale possible files and
+                    //require them if exist.
+                    toLoad.push(masterName);
+                    addIfExists(req, "root", toLoad, prefix, suffix);
+                    for (i = 0; i < parts.length; i++) {
+                        part = parts[i];
+                        current += (current ? "-" : "") + part;
+                        addIfExists(req, current, toLoad, prefix, suffix);
+                    }
+
+                    req(toLoad, function () {
+                        onLoad();
+                    });
+                } else {
+                    //First, fetch the master bundle, it knows what locales are available.
+                    req([masterName], function (master) {
+                        //Figure out the best fit
+                        var needed = [],
+                            part;
+
+                        //Always allow for root, then do the rest of the locale parts.
+                        addPart("root", master, needed, toLoad, prefix, suffix);
+                        for (i = 0; i < parts.length; i++) {
+                            part = parts[i];
+                            current += (current ? "-" : "") + part;
+                            addPart(current, master, needed, toLoad, prefix, suffix);
+                        }
+
+                        //Load all the parts missing.
+                        req(toLoad, function () {
+                            var i, partBundle, part;
+                            for (i = needed.length - 1; i > -1 && needed[i]; i--) {
+                                part = needed[i];
+                                partBundle = master[part];
+                                if (partBundle === true || partBundle === 1) {
+                                    partBundle = req(prefix + part + '/' + suffix);
+                                }
+                                mixin(value, partBundle);
+                            }
+
+                            //All done, notify the loader.
+                            onLoad(value);
+                        });
+                    });
+                }
+            }
+        };
+    });
+}());
diff --git a/client/galaxy/scripts/layout/generic-nav-view.js b/client/galaxy/scripts/layout/generic-nav-view.js
new file mode 100644
index 0000000..a4d6396
--- /dev/null
+++ b/client/galaxy/scripts/layout/generic-nav-view.js
@@ -0,0 +1,98 @@
+/** Real-time Communication feature **/
+define(['mvc/ui/ui-modal'], function( Modal ) {
+
+var GenericNavView = Backbone.View.extend({
+
+    initialize: function ( ) {
+        this.modal = null;
+    },
+
+    /** makes bootstrap modal and iframe inside it */
+    makeModalIframe: function( e ) {
+	// make modal
+	var host =  window.Galaxy.config.communication_server_host,
+	    port = window.Galaxy.config.communication_server_port,
+	    username = escape( window.Galaxy.user.attributes.username ),
+	    persistent_communication_rooms = escape( window.Galaxy.config.persistent_communication_rooms ),
+	    query_string = "?username=" + username + "&persistent_communication_rooms=" + persistent_communication_rooms,
+	    src = host + ":" + port + query_string,
+	    $el_chat_modal_header = null,
+	    $el_chat_modal_body = null,
+            iframe_template = '<iframe class="f-iframe fade in communication-iframe" src="' + src + '"> </iframe>',
+            header_template = '<i class="fa fa-comment" aria-hidden="true" title="Communicate with other users"></i>' +
+                              '<i class="fa fa-expand expand-compress-modal" aria-hidden="true" title="Maximize"></i>' + 
+	                      '<i class="fa fa-times close-modal" aria-hidden="true" title="Close"></i>',
+            frame_height = 350,
+            frame_width = 600,
+            class_names = 'ui-modal chat-modal';
+        
+	// deletes the chat modal if already present and create one
+        if( $( '.chat-modal' ).length > 0 ) {
+            $( '.chat-modal' ).remove();
+	}
+        // creates a modal
+	GenericNavView.modal = new Modal.View({
+	    body            : iframe_template,
+	    height          : frame_height,
+	    width           : frame_width,
+	    closing_events  : true,
+	    title_separator : false,
+            cls             : class_names
+	});
+
+	// shows modal
+	GenericNavView.modal.show();
+        $el_chat_modal_header = $( '.chat-modal .modal-header' );
+        $el_chat_modal_body = $( '.chat-modal .modal-body' );
+	// adjusts the css of bootstrap modal for chat
+        $el_chat_modal_header.addClass('modal-header-body');
+        $el_chat_modal_body.addClass('modal-header-body');
+	$el_chat_modal_header.find( 'h4' ).remove();
+        $el_chat_modal_header.removeAttr( 'min-height padding border' );
+	$el_chat_modal_header.append( header_template );
+	// click event of the close button for chat
+	$( '.close-modal' ).click(function( e ) {
+	    $( '.chat-modal' ).css( 'display', 'none' );
+	});
+        // click event of expand and compress icon
+        $( '.expand-compress-modal' ).click(function( e ) {
+            if( $( '.expand-compress-modal' ).hasClass( 'fa-expand' ) ) {
+                $( '.chat-modal .modal-dialog' ).width( '1000px' );
+                $( '.chat-modal .modal-body' ).height( '575px' );
+                $( '.expand-compress-modal' ).removeClass( 'fa-expand' ).addClass( 'fa-compress' );
+                $( '.expand-compress-modal' ).attr('title', 'Minimize');
+                $( '.expand-compress-modal' ).css('margin-left', '96.2%');
+            }
+            else {
+                $( '.chat-modal .modal-dialog' ).width( frame_width + 'px' );
+                $( '.chat-modal .modal-body' ).height( frame_height + 'px' );
+                $( '.expand-compress-modal' ).removeClass( 'fa-compress' ).addClass( 'fa-expand' );
+                $( '.expand-compress-modal' ).attr('title', 'Maximize');
+                $( '.expand-compress-modal' ).css('margin-left', '93.2%');
+            }
+	    
+	});
+        return this;
+    },
+
+    /**renders the chat icon as a nav item*/
+    render: function() {
+        var self = this,
+            navItem = {};
+        navItem = {
+            id              : 'show-chat-online',
+            icon            : 'fa-comment-o',
+            tooltip         : 'Chat online',
+            visible         : false,
+            onclick         : self.makeModalIframe
+        }
+        return navItem;
+    }
+});
+
+return {
+    GenericNavView  : GenericNavView
+};
+
+});
+
diff --git a/client/galaxy/scripts/layout/masthead.js b/client/galaxy/scripts/layout/masthead.js
new file mode 100644
index 0000000..efb0a50
--- /dev/null
+++ b/client/galaxy/scripts/layout/masthead.js
@@ -0,0 +1,92 @@
+define([
+    'utils/utils',
+    'layout/menu',
+    'layout/scratchbook',
+    'mvc/user/user-quotameter',
+], function( Utils, Menu, Scratchbook, QuotaMeter ) {
+
+/** Masthead **/
+var View = Backbone.View.extend({
+    initialize : function( options ) {
+        var self = this;
+        this.options = options;
+        this.setElement( this._template() );
+        this.$navbarBrandLink   = this.$( '.navbar-brand-link' );
+        this.$navbarBrandImage  = this.$( '.navbar-brand-image' );
+        this.$navbarBrandTitle  = this.$( '.navbar-brand-title' );
+        this.$navbarTabs        = this.$( '.navbar-tabs' );
+        this.$quoteMeter        = this.$( '.quota-meter-container' );
+
+        // build tabs
+        this.collection = new Menu.Collection();
+        this.collection.on( 'add', function( model ) {
+            self.$navbarTabs.append( new Menu.Tab( { model : model } ).render().$el );
+        }).on( 'reset', function() {
+            self.$navbarTabs.empty();
+        }).on( 'dispatch', function( callback ) {
+            self.collection.each( function ( m ) { callback( m ) });
+        }).fetch( this.options );
+
+        // scratchbook
+        Galaxy.frame = this.frame = new Scratchbook( { collection: this.collection } );
+
+        // set up the quota meter (And fetch the current user data from trans)
+        // add quota meter to masthead
+        Galaxy.quotaMeter = this.quotaMeter = new QuotaMeter.UserQuotaMeter({
+            model   : Galaxy.user,
+            el      : this.$quoteMeter
+        });
+
+        // loop through beforeunload functions if the user attempts to unload the page
+        $( window ).on( 'click', function( e ) {
+            var $download_link = $( e.target ).closest( 'a[download]' );
+            if ( $download_link.length == 1 ) {
+                if( $( 'iframe[id=download]' ).length === 0 ) {
+                    $( 'body' ).append( $( '<iframe/>' ).attr( 'id', 'download' ).hide() );
+                }
+                $( 'iframe[id=download]' ).attr( 'src', $download_link.attr( 'href' ) );
+                e.preventDefault();
+            }
+        }).on( 'beforeunload', function() {
+            var text = '';
+            self.collection.each( function( model ) {
+                var q = model.get( 'onbeforeunload' ) && model.get( 'onbeforeunload' )();
+                q && ( text += q + ' ' );
+            });
+            if ( text !== '' ) {
+                return text;
+            }
+        });
+    },
+
+    render: function() {
+        this.$navbarBrandTitle.html( 'Galaxy ' + ( this.options.brand && '/ ' + this.options.brand || '' ) );
+        this.$navbarBrandLink.attr( 'href', this.options.logo_url );
+        this.$navbarBrandImage.attr( 'src', this.options.logo_src );
+        this.quotaMeter.render();
+        return this;
+    },
+
+    /** body template */
+    _template: function() {
+        return  '<div id="masthead" class="navbar navbar-fixed-top navbar-inverse">' +
+                    '<div class="navbar-header">' +
+                        '<div class="navbar-tabs"/>' +
+                    '</div>' +
+                    '<div class="navbar-brand">' +
+                        '<a class="navbar-brand-link">' +
+                            '<img class="navbar-brand-image"/>' +
+                            '<span class="navbar-brand-title"/>' +
+                        '</a>' +
+                    '</div>' +
+                    '<div class="quota-meter-container"/>' +
+                    '<div class="navbar-icons"/>' +
+                '</div>';
+    }
+});
+
+return {
+    View: View
+};
+
+});
diff --git a/client/galaxy/scripts/layout/menu.js b/client/galaxy/scripts/layout/menu.js
new file mode 100644
index 0000000..2fbe420
--- /dev/null
+++ b/client/galaxy/scripts/layout/menu.js
@@ -0,0 +1,419 @@
+/** Masthead Collection **/
+define(['mvc/tours', 'layout/generic-nav-view', 'mvc/webhooks'], function( Tours, GenericNav, Webhooks ) {
+var Collection = Backbone.Collection.extend({
+    model: Backbone.Model.extend({
+        defaults: {
+            visible         : true,
+            target          : '_parent'
+        }
+    }),
+    fetch: function( options ){
+        options = options || {};
+        this.reset();
+
+        //
+        // Chat server tab
+        //
+        var extendedNavItem = new GenericNav.GenericNavView();
+        this.add(extendedNavItem.render()); 
+
+        //
+        // Analyze data tab.
+        //
+        this.add({
+            id              : 'analysis',
+            title           : 'Analyze Data',
+            url             : '',
+            tooltip         : 'Analysis home view'
+        });
+
+        //
+        // Workflow tab.
+        //
+        this.add({
+            id              : 'workflow',
+            title           : 'Workflow',
+            url             : 'workflow',
+            tooltip         : 'Chain tools into workflows',
+            disabled        : !Galaxy.user.id
+        });
+
+        //
+        // 'Shared Items' or Libraries tab.
+        //
+        this.add({
+            id              : 'shared',
+            title           : 'Shared Data',
+            url             : 'library/index',
+            tooltip         : 'Access published resources',
+            menu            : [{
+                    title   : 'Data Libraries',
+                    url     : 'library/list'
+                },{
+                    title   : 'Histories',
+                    url     : 'history/list_published'
+                },{
+                    title   : 'Workflows',
+                    url     : 'workflow/list_published'
+                },{
+                    title   : 'Visualizations',
+                    url     : 'visualization/list_published'
+                },{
+                    title   : 'Pages',
+                    url     : 'page/list_published'
+            }]
+        });
+
+        //
+        // Lab menu.
+        //
+        options.user_requests && this.add({
+            id              : 'lab',
+            title           : 'Lab',
+            menu            : [{
+                    title   : 'Sequencing Requests',
+                    url     : 'requests/index'
+                },{
+                    title   : 'Find Samples',
+                    url     : 'requests/find_samples_index'
+                },{
+                    title   : 'Help',
+                    url     : options.lims_doc_url
+            }]
+        });
+
+        //
+        // Visualization tab.
+        //
+        this.add({
+            id              : 'visualization',
+            title           : 'Visualization',
+            url             : 'visualization/list',
+            tooltip         : 'Visualize datasets',
+            disabled        : !Galaxy.user.id,
+            menu            : [{
+                    title   : 'New Track Browser',
+                    url     : 'visualization/trackster',
+                    target  : '_frame'
+                },{
+                    title   : 'Saved Visualizations',
+                    url     : 'visualization/list',
+                    target  : '_frame'
+                },{
+                    title   : 'Interactive Environments',
+                    url     : 'visualization/gie_list',
+                    target  : 'galaxy_main'
+                }
+            ]
+        });
+
+        //
+        // Webhooks
+        //
+        Webhooks.add({
+            url: 'api/webhooks/masthead/all',
+            callback: function(webhooks) {
+                $(document).ready(function() {
+                    $.each(webhooks.models, function(index, model) {
+                        var webhook = model.toJSON();
+                        if (webhook.activate) {
+                            Galaxy.page.masthead.collection.add({
+                                id      : webhook.name,
+                                icon    : webhook.config.icon,
+                                url     : webhook.config.url,
+                                tooltip : webhook.config.tooltip,
+                                onclick : webhook.config.function && new Function(webhook.config.function),
+                            });
+                        }
+                    });
+                });
+            }
+        });
+
+        //
+        // Admin.
+        //
+        Galaxy.user.get( 'is_admin' ) && this.add({
+            id              : 'admin',
+            title           : 'Admin',
+            url             : 'admin',
+            tooltip         : 'Administer this Galaxy',
+            cls             : 'admin-only'
+        });
+
+        //
+        // Help tab.
+        //
+        var helpTab = {
+            id              : 'help',
+            title           : 'Help',
+            tooltip         : 'Support, contact, and community hubs',
+            menu            : [{
+                    title   : 'Support',
+                    url     : options.support_url,
+                    target  : '_blank'
+                },{
+                    title   : 'Search',
+                    url     : options.search_url,
+                    target  : '_blank'
+                },{
+                    title   : 'Mailing Lists',
+                    url     : options.mailing_lists,
+                    target  : '_blank'
+                },{
+                    title   : 'Videos',
+                    url     : options.screencasts_url,
+                    target  : '_blank'
+                },{
+                    title   : 'Wiki',
+                    url     : options.wiki_url,
+                    target  : '_blank'
+                },{
+                    title   : 'How to Cite Galaxy',
+                    url     : options.citation_url,
+                    target  : '_blank'
+                },{
+                    title   : 'Interactive Tours',
+                    url     : 'tours',
+                    onclick : function(){
+                        if (Galaxy.app){
+                            Galaxy.app.display(new Tours.ToursView());
+                        } else {
+                            // Redirect and use clientside routing to go to tour index
+                            window.location = Galaxy.root + "tours";
+                        }
+                    }
+            }]
+        };
+        options.terms_url && helpTab.menu.push({
+            title   : 'Terms and Conditions',
+            url     : options.terms_url,
+            target  : '_blank'
+        });
+        options.biostar_url && helpTab.menu.unshift({
+            title   : 'Ask a question',
+            url     : 'biostar/biostar_question_redirect',
+            target  : '_blank'
+        });
+        options.biostar_url && helpTab.menu.unshift({
+            title   : 'Galaxy Biostar',
+            url     : options.biostar_url_redirect,
+            target  : '_blank'
+        });
+        this.add( helpTab );
+
+        //
+        // User tab.
+        //
+        if ( !Galaxy.user.id ){
+            var userTab = {
+                id              : 'user',
+                title           : 'User',
+                cls             : 'loggedout-only',
+                tooltip         : 'Account registration or login',
+                menu            : [{
+                    title           : 'Login',
+                    url             : 'user/login',
+                    target          : 'galaxy_main',
+                    noscratchbook   : true
+                }]
+            };
+            options.allow_user_creation && userTab.menu.push({
+                title           : 'Register',
+                url             : 'user/create',
+                target          : 'galaxy_main',
+                noscratchbook   : true
+            });
+            this.add( userTab );
+        } else {
+            var userTab = {
+                id              : 'user',
+                title           : 'User',
+                cls             : 'loggedin-only',
+                tooltip         : 'Account preferences and saved data',
+                menu            : [{
+                        title   : 'Logged in as ' + Galaxy.user.get( 'email' )
+                    },{
+                        title   : 'Preferences',
+                        url     : 'user?cntrller=user',
+                        target  : 'galaxy_main'
+                    },{
+                        title   : 'Custom Builds',
+                        url     : 'user/dbkeys',
+                        target  : 'galaxy_main'
+                    },{
+                        title   : 'Logout',
+                        url     : 'user/logout',
+                        target  : '_top',
+                        divider : true
+                    },{
+                        title   : 'Saved Histories',
+                        url     : 'history/list',
+                        target  : 'galaxy_main'
+                    },{
+                        title   : 'Saved Datasets',
+                        url     : 'dataset/list',
+                        target  : 'galaxy_main'
+                    },{
+                        title   : 'Saved Pages',
+                        url     : 'page/list',
+                        target  : '_top'
+                    },{
+                        title   : 'API Keys',
+                        url     : 'user/api_keys?cntrller=user',
+                        target  : 'galaxy_main'
+                }]
+            };
+            options.use_remote_user && userTab.menu.push({
+                title   : 'Public Name',
+                url     : 'user/edit_username?cntrller=user',
+                target  : 'galaxy_main'
+            });
+            this.add( userTab );
+        }
+        var activeView = this.get( options.active_view );
+        activeView && activeView.set( 'active', true );
+        return new jQuery.Deferred().resolve().promise();
+    }
+});
+
+/** Masthead tab **/
+var Tab = Backbone.View.extend({
+    initialize: function ( options ) {
+        this.model = options.model;
+        this.setElement( this._template() );
+        this.$dropdown  = this.$( '.dropdown' );
+        this.$toggle    = this.$( '.dropdown-toggle' );
+        this.$menu      = this.$( '.dropdown-menu' );
+        this.$note      = this.$( '.dropdown-note' );
+        this.listenTo( this.model, 'change', this.render, this );
+    },
+
+    events: {
+        'click .dropdown-toggle' : '_toggleClick'
+    },
+
+    render: function() {
+        var self = this;
+        $( '.tooltip' ).remove();
+        this.$el.attr( 'id', this.model.id )
+                .css( { visibility : this.model.get( 'visible' ) && 'visible' || 'hidden' } );
+        this.model.set( 'url', this._formatUrl( this.model.get( 'url' ) ) );
+        this.$note.html( this.model.get( 'note' ) || '' )
+                  .removeClass().addClass( 'dropdown-note' )
+                  .addClass( this.model.get( 'note_cls' ) )
+                  .css( { 'display' : this.model.get( 'show_note' ) && 'block' || 'none' } )
+        this.$toggle.html( this.model.get( 'title' ) || '' )
+                    .removeClass().addClass( 'dropdown-toggle' )
+                    .addClass( this.model.get( 'cls' ) )
+                    .addClass( this.model.get( 'icon' ) && 'dropdown-icon fa ' + this.model.get( 'icon' ) )
+                    .addClass( this.model.get( 'toggle' ) && 'toggle' )
+                    .attr( 'target', this.model.get( 'target' ) )
+                    .attr( 'href', this.model.get( 'url' ) )
+                    .attr( 'title', this.model.get( 'tooltip' ) )
+                    .tooltip( 'destroy' );
+        this.model.get( 'tooltip' ) && this.$toggle.tooltip( { placement: 'bottom' } );
+        this.$dropdown.removeClass().addClass( 'dropdown' )
+                      .addClass( this.model.get( 'disabled' ) && 'disabled' )
+                      .addClass( this.model.get( 'active' ) && 'active' );
+        if ( this.model.get( 'menu' ) && this.model.get( 'show_menu' ) ) {
+            this.$menu.show();
+            $( '#dd-helper' ).show().off().on( 'click',  function() {
+                $( '#dd-helper' ).hide();
+                self.model.set( 'show_menu', false );
+            });
+        } else {
+            self.$menu.hide();
+            $( '#dd-helper' ).hide();
+        }
+        this.$menu.empty().removeClass( 'dropdown-menu' );
+        if ( this.model.get( 'menu' ) ) {
+            _.each( this.model.get( 'menu' ), function( menuItem ) {
+                self.$menu.append( self._buildMenuItem( menuItem ) );
+                menuItem.divider && self.$menu.append( $( '<li/>' ).addClass( 'divider' ) );
+            });
+            self.$menu.addClass( 'dropdown-menu' );
+            self.$toggle.append( $( '<b/>' ).addClass( 'caret' ) );
+        }
+        return this;
+    },
+
+    /** Add new menu item */
+    _buildMenuItem: function ( options ) {
+        var self = this;
+        options = _.defaults( options || {}, {
+            title           : '',
+            url             : '',
+            target          : '_parent',
+            noscratchbook   : false
+        });
+        options.url = self._formatUrl( options.url );
+        return $( '<li/>' ).append(
+            $( '<a/>' ).attr( 'href', options.url )
+                       .attr( 'target', options.target )
+                       .html( options.title )
+                       .on( 'click', function( e ) {
+                            e.preventDefault();
+                            self.model.set( 'show_menu', false );
+                            if (options.onclick){
+                                options.onclick();
+                            } else {
+                                Galaxy.frame.add( options );
+                            }
+                       })
+        );
+    },
+
+    /** Handle click event */
+    _toggleClick: function( e ) {
+        var self = this;
+        var model = this.model;
+        e.preventDefault();
+        $( '.tooltip' ).hide();
+        model.trigger( 'dispatch', function( m ) {
+            model.id !== m.id && m.get( 'menu' ) && m.set( 'show_menu', false );
+        });
+        if ( !model.get( 'disabled' ) ) {
+            if ( !model.get( 'menu' ) ) {
+                model.get( 'onclick' ) ? model.get( 'onclick' )() : Galaxy.frame.add( model.attributes );
+            } else {
+                model.set( 'show_menu', true );
+            }
+        } else {
+            function buildLink( label, url ) {
+                return $( '<div/>' ).append( $( '<a/>' ).attr( 'href', Galaxy.root + url ).html( label ) ).html()
+            }
+            this.$toggle.popover && this.$toggle.popover( 'destroy' );
+            this.$toggle.popover({
+                html        : true,
+                placement   : 'bottom',
+                content     : 'Please ' + buildLink( 'login', 'user/login?use_panels=True' ) + ' or ' +
+                                          buildLink( 'register', 'user/create?use_panels=True' ) + ' to use this feature.'
+            }).popover( 'show' );
+            setTimeout( function() { self.$toggle.popover( 'destroy' ) }, 5000 );
+        }
+    },
+
+    /** Url formatting */
+    _formatUrl: function( url ) {
+        return typeof url == 'string' && url.indexOf( '//' ) === -1 && url.charAt( 0 ) != '/' ? Galaxy.root + url : url;
+    },
+
+    /** body tempate */
+    _template: function () {
+        return  '<ul class="nav navbar-nav">' +
+                    '<li class="dropdown">' +
+                        '<a class="dropdown-toggle"/>' +
+                        '<ul class="dropdown-menu"/>' +
+                        '<div class="dropdown-note"/>' +
+                    '</li>' +
+                '</ul>';
+    }
+});
+
+return {
+    Collection  : Collection,
+    Tab         : Tab
+};
+
+});
diff --git a/client/galaxy/scripts/layout/modal.js b/client/galaxy/scripts/layout/modal.js
new file mode 100644
index 0000000..e9c3588
--- /dev/null
+++ b/client/galaxy/scripts/layout/modal.js
@@ -0,0 +1,150 @@
+define([
+    'jquery',
+], function (jQuery){
+
+"use strict";
+// ============================================================================
+//TODO: (the older version) unify with ui-modal (the newer version)
+var $ = jQuery;
+
+// Modal dialog boxes
+var Modal = function( options ) {
+    this.$overlay = options.overlay;
+    this.$dialog = options.dialog;
+    this.$header = this.$dialog.find( ".modal-header" );
+    this.$body = this.$dialog.find( ".modal-body" );
+    this.$footer = this.$dialog.find( ".modal-footer" );
+    this.$backdrop = options.backdrop;
+    // Close button
+    this.$header.find( ".close" ).on( "click", $.proxy( this.hide, this ) );
+};
+
+$.extend( Modal.prototype, {
+    setContent: function( options ) {
+        this.$header.hide();
+        // Title
+        if ( options.title ) {
+            this.$header.find( ".title" ).html( options.title );
+            this.$header.show();
+        }
+        if ( options.closeButton ) {
+            this.$header.find( ".close" ).show();
+            this.$header.show();
+        } else {
+            this.$header.find( ".close" ).hide();
+        }
+        // Buttons
+        this.$footer.hide();
+        var $buttons = this.$footer.find( ".buttons" ).html( "" );
+        if ( options.buttons ) {
+            $.each( options.buttons, function( name, value ) {
+                 $buttons.append( $( '<button></button> ' ).text( name ).click( value ) ).append( " " );
+            });
+            this.$footer.show();
+        }
+        var $extraButtons = this.$footer.find( ".extra_buttons" ).html( "" );
+        if ( options.extra_buttons ) {
+            $.each( options.extra_buttons, function( name, value ) {
+                 $extraButtons.append( $( '<button></button>' ).text( name ).click( value ) ).append( " " );
+            });
+            this.$footer.show();
+        }
+        // Body
+        var body = options.body;
+        if ( body == "progress" ) {
+            body = $("<div class='progress progress-striped active'><div class='progress-bar' style='width: 100%'></div></div>");
+        }
+        this.$body.html( body );
+    },
+    show: function( options, callback ) {
+        if ( ! this.$dialog.is( ":visible" ) ) {
+            if ( options.backdrop) {
+                this.$backdrop.addClass( "in" );
+            } else {
+                this.$backdrop.removeClass( "in" );
+            }
+            this.$overlay.show();
+            this.$dialog.show();
+            this.$overlay.addClass("in");
+            // Fix min-width so that modal cannot shrink considerably if new content is loaded.
+            this.$body.css( "min-width", this.$body.width() );
+            // Set max-height so that modal does not exceed window size and is in middle of page.
+            // TODO: this could perhaps be handled better using CSS.
+            this.$body.css( "max-height",
+                            $(window).height() -
+                            this.$footer.outerHeight() -
+                            this.$header.outerHeight() -
+                            parseInt( this.$dialog.css( "padding-top" ), 10 ) -
+                            parseInt( this.$dialog.css( "padding-bottom" ), 10 )
+                            );
+        }
+        // Callback on init
+        if ( callback ) {
+            callback();
+        }
+    },
+    hide: function() {
+        var modal = this;
+        modal.$dialog.fadeOut( function() {
+           modal.$overlay.hide();
+           modal.$backdrop.removeClass( "in" );
+           modal.$body.children().remove();
+           // Clear min-width to allow for modal to take size of new body.
+           modal.$body.css( "min-width", undefined );
+       });
+   }
+});
+
+var modal;
+
+$(function(){
+   modal = new Modal( { overlay: $("#top-modal"), dialog: $("#top-modal-dialog"), backdrop: $("#top-modal-backdrop") } );
+});
+
+// Backward compatibility
+function hide_modal() {
+    modal.hide();
+}
+
+function show_modal( title, body, buttons, extra_buttons, init_fn ) {
+    modal.setContent( { title: title, body: body, buttons: buttons, extra_buttons: extra_buttons } );
+    modal.show( { backdrop: true }, init_fn );
+}
+
+function show_message( title, body, buttons, extra_buttons, init_fn ) {
+    modal.setContent( { title: title, body: body, buttons: buttons, extra_buttons: extra_buttons } );
+    modal.show( { backdrop: false }, init_fn  );
+}
+
+function show_in_overlay( options ) {
+    var width = options.width || '600';
+    var height = options.height || '400';
+    var scroll = options.scroll || 'auto';
+    $("#overlay-background").bind( "click.overlay", function() {
+        hide_modal();
+        $("#overlay-background").unbind( "click.overlay" );
+    });
+    modal.setContent({
+        closeButton: true,
+        title: " ",
+        body: $(
+            "<div style='margin: -5px;'><iframe style='margin: 0; padding: 0;' src='" + options.url +
+            "' width='" + width +
+            "' height='" + height +
+            "' scrolling='" + scroll +
+            "' frameborder='0'></iframe></div>"
+        )
+    });
+    modal.show( { backdrop: true } );
+}
+
+
+// ============================================================================
+    return {
+        Modal : Modal,
+        hide_modal : hide_modal,
+        show_modal : show_modal,
+        show_message : show_message,
+        show_in_overlay : show_in_overlay,
+    };
+});
diff --git a/client/galaxy/scripts/layout/page.js b/client/galaxy/scripts/layout/page.js
new file mode 100644
index 0000000..c9d4732
--- /dev/null
+++ b/client/galaxy/scripts/layout/page.js
@@ -0,0 +1,170 @@
+define([
+    'layout/masthead',
+    'layout/panel',
+    'mvc/ui/ui-modal',
+    'mvc/base-mvc'
+], function( Masthead, Panel, Modal, BaseMVC ) {
+
+// ============================================================================
+var PageLayoutView = Backbone.View.extend( BaseMVC.LoggableMixin ).extend({
+    _logNamespace : 'layout',
+
+    el : 'body',
+    className : 'full-content',
+
+    _panelIds : [
+        'left', 'center', 'right'
+    ],
+
+    defaultOptions : {
+        message_box_visible     : false,
+        message_box_content     : '',
+        message_box_class       : 'info',
+        show_inactivity_warning : false,
+        inactivity_box_content  : ''
+    },
+
+    initialize : function( options ) {
+        // TODO: remove globals
+        this.log( this + '.initialize:', options );
+        _.extend( this, _.pick( options, this._panelIds ) );
+        this.options = _.defaults( _.omit( options.config, this._panelIds ), this.defaultOptions );
+        Galaxy.modal = this.modal = new Modal.View();
+        this.masthead = new Masthead.View( this.options );
+        this.$el.attr( 'scroll', 'no' );
+        this.$el.html( this._template() );
+        this.$el.append( this.masthead.frame.$el );
+        this.$( '#masthead' ).replaceWith( this.masthead.$el );
+        this.$el.append( this.modal.$el );
+        this.$messagebox = this.$( '#messagebox' );
+        this.$inactivebox = this.$( '#inactivebox' );
+    },
+
+    render : function() {
+        // TODO: Remove this line after select2 update
+        $( '.select2-hidden-accessible' ).remove();
+        this.log( this + '.render:' );
+        this.masthead.render();
+        this.renderMessageBox();
+        this.renderInactivityBox();
+        this.renderPanels();
+        this._checkCommunicationServerOnline();
+        return this;
+    },
+
+    /** Render message box */
+    renderMessageBox : function() {
+        if ( this.options.message_box_visible ){
+            var content = this.options.message_box_content || '';
+            var level = this.options.message_box_class || 'info';
+            this.$el.addClass( 'has-message-box' );
+            this.$messagebox
+                .attr( 'class', 'panel-' + level + '-message' )
+                .html( content )
+                .toggle( !!content )
+                .show();
+        } else {
+            this.$el.removeClass( 'has-message-box' );
+            this.$messagebox.hide();
+        }
+        return this;
+    },
+
+    /** Render inactivity warning */
+    renderInactivityBox : function() {
+        if( this.options.show_inactivity_warning ){
+            var content = this.options.inactivity_box_content || '';
+            var verificationLink = $( '<a/>' ).attr( 'href', Galaxy.root + 'user/resend_verification' ).text( 'Resend verification' );
+            this.$el.addClass( 'has-inactivity-box' );
+            this.$inactivebox
+                .html( content + ' ' )
+                .append( verificationLink )
+                .toggle( !!content )
+                .show();
+        } else {
+            this.$el.removeClass( 'has-inactivity-box' );
+            this.$inactivebox.hide();
+        }
+        return this;
+    },
+
+    /** Render panels */
+    renderPanels : function() {
+        var page = this;
+        this._panelIds.forEach( function( panelId ){
+            if( _.has( page, panelId ) ){
+                page[ panelId ].setElement( '#' + panelId );
+                page[ panelId ].render();
+            }
+        });
+        if( !this.left ){
+            this.center.$el.css( 'left', 0 );
+        }
+        if( !this.right ){
+            this.center.$el.css( 'right', 0 );
+        }
+        return this;
+    },
+
+    /** body template */
+    _template: function() {
+        return [
+            '<div id="everything">',
+                '<div id="background"/>',
+                '<div id="masthead"/>',
+                '<div id="messagebox"/>',
+                '<div id="inactivebox" class="panel-warning-message" />',
+                this.left?   '<div id="left" />' : '',
+                this.center? '<div id="center" class="inbound" />' : '',
+                this.right?  '<div id="right" />' : '',
+            '</div>',
+            '<div id="dd-helper" />',
+        ].join('');
+    },
+
+    /** hide both side panels if previously shown */
+    hideSidePanels : function(){
+        if( this.left ){
+            this.left.hide();
+        }
+        if( this.right ){
+            this.right.hide();
+        }
+    },
+
+    toString : function() { return 'PageLayoutView'; },
+
+    /** Check if the communication server is online and show the icon otherwise hide the icon */
+    _checkCommunicationServerOnline: function(){
+        var host = window.Galaxy.config.communication_server_host,
+            port = window.Galaxy.config.communication_server_port,
+            $chat_icon_element = $( "#show-chat-online" );
+        /** Check if the user has deactivated the communication in it's personal settings */
+        if (window.Galaxy.user.attributes.preferences !== undefined && window.Galaxy.user.attributes.preferences.communication_server === '1') {
+            // See if the configured communication server is available
+            $.ajax({
+                url: host + ":" + port,
+            })
+            .success( function( data ) { 
+                    // enable communication only when a user is logged in
+                    if( window.Galaxy.user.id !== null ) {
+                        if( $chat_icon_element.css( "visibility")  === "hidden" ) {
+                            $chat_icon_element.css( "visibility", "visible" ); 
+                        }
+                    }
+            })
+            .error( function( data ) { 
+                // hide the communication icon if the communication server is not available
+                $chat_icon_element.css( "visibility", "hidden" ); 
+            });
+        } else {
+            $chat_icon_element.css( "visibility", "hidden" ); 
+        }
+    },
+});
+
+// ============================================================================
+    return {
+        PageLayoutView: PageLayoutView
+    };
+});
diff --git a/client/galaxy/scripts/layout/panel.js b/client/galaxy/scripts/layout/panel.js
new file mode 100644
index 0000000..52e666e
--- /dev/null
+++ b/client/galaxy/scripts/layout/panel.js
@@ -0,0 +1,293 @@
+define([
+    'jquery',
+    'libs/underscore',
+    'libs/backbone',
+    'mvc/base-mvc',
+], function( jQuery, _, Backbone, BASE_MVC ){
+
+"use strict";
+// ============================================================================
+var $ = jQuery;
+
+var MIN_PANEL_WIDTH = 160,
+    MAX_PANEL_WIDTH = 800;
+
+// ----------------------------------------------------------------------------
+/**
+ *
+ */
+var SidePanel = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : 'layout',
+
+    initialize: function( attributes ){
+        this.log( this + '.initialize:', attributes );
+        this.title = attributes.title || this.title || '';
+
+        this.hidden = false;
+        this.savedSize = null;
+        this.hiddenByTool = false;
+    },
+
+    $center : function(){
+        return this.$el.siblings( '#center' );
+    },
+
+    $toggleButton : function(){
+        return this.$( '.unified-panel-footer > .panel-collapse' );
+    },
+
+    render: function(){
+        this.log( this + '.render:' );
+        this.$el.html( this.template( this.id ) );
+    },
+
+    /** panel dom template. id is 'right' or 'left' */
+    template: function(){
+        return [
+            this._templateHeader(),
+            this._templateBody(),
+            this._templateFooter(),
+        ].join('');
+    },
+
+    /** panel dom template. id is 'right' or 'left' */
+    _templateHeader: function( data ){
+        return [
+            '<div class="unified-panel-header" unselectable="on">',
+                '<div class="unified-panel-header-inner">',
+                    '<div class="panel-header-buttons" style="float: right"/>',
+                    '<div class="panel-header-text">', _.escape( this.title ), '</div>',
+                '</div>',
+            '</div>',
+        ].join('');
+    },
+
+    /** panel dom template. id is 'right' or 'left' */
+    _templateBody: function( data ){
+        return '<div class="unified-panel-body"/>';
+    },
+
+    /** panel dom template. id is 'right' or 'left' */
+    _templateFooter: function( data ){
+        return [
+            '<div class="unified-panel-footer">',
+                '<div class="panel-collapse ', _.escape( this.id ), '"/>',
+                '<div class="drag"/>',
+            '</div>',
+        ].join('');
+    },
+
+    // ..............................................................
+    events : {
+        'mousedown .unified-panel-footer > .drag'       : '_mousedownDragHandler',
+        'click .unified-panel-footer > .panel-collapse' : 'toggle'
+    },
+
+    _mousedownDragHandler : function( ev ){
+        var self = this,
+            draggingLeft = this.id === 'left',
+            prevX = ev.pageX;
+
+        function move( e ){
+            var delta = e.pageX - prevX;
+            prevX = e.pageX;
+
+            var oldWidth = self.$el.width(),
+                newWidth = draggingLeft?( oldWidth + delta ):( oldWidth - delta );
+            // Limit range
+            newWidth = Math.min( MAX_PANEL_WIDTH, Math.max( MIN_PANEL_WIDTH, newWidth ) );
+            self.resize( newWidth );
+        }
+
+        // this is a page wide overlay that assists in capturing the move and release of the mouse
+        // if not provided, progress and end wouldn't fire if the mouse moved out of the drag button area
+        $( '#dd-helper' )
+            .show()
+            .on( 'mousemove', move )
+            .one( 'mouseup', function( e ){
+                $( this ).hide().off( 'mousemove', move );
+            });
+    },
+
+    //TODO: the following three could be simplified I think
+    resize : function( newSize ){
+        this.$el.css( 'width', newSize );
+        // if panel is 'right' (this.id), move center right newSize number of pixels
+        this.$center().css( this.id, newSize );
+        return self;
+    },
+
+    show : function(){
+        if( !this.hidden ){ return; }
+        var self = this,
+            animation = {},
+            whichSide = this.id;
+
+        animation[ whichSide ] = 0;
+        self.$el
+            .css( whichSide, -this.savedSize )
+            .show()
+            .animate( animation, "fast", function(){
+                self.resize( self.savedSize );
+            });
+
+        self.hidden = false;
+        self.$toggleButton().removeClass( "hidden" );
+        return self;
+    },
+
+    hide : function(){
+        if( this.hidden ){ return; }
+        var self = this,
+            animation = {},
+            whichSide = this.id;
+
+        self.savedSize = this.$el.width();
+        animation[ whichSide ] = -this.savedSize;
+        this.$el.animate( animation, "fast" );
+        this.$center().css( whichSide, 0 );
+
+        self.hidden = true;
+        self.$toggleButton().addClass( "hidden" );
+        return self;
+    },
+
+    toggle: function( ev ){
+        var self = this;
+        if( self.hidden ){
+            self.show();
+        } else {
+            self.hide();
+        }
+        self.hiddenByTool = false;
+        return self;
+    },
+
+    // ..............................................................
+    //TODO: only used in message.mako?
+    /**   */
+    handle_minwidth_hint: function( hint ){
+        var space = this.$center().width() - ( this.hidden ? this.savedSize : 0 );
+        if( space < hint ){
+            if( !this.hidden ){
+                this.toggle();
+                this.hiddenByTool = true;
+            }
+        } else {
+            if( this.hiddenByTool ){
+                this.toggle();
+                this.hiddenByTool = false;
+            }
+        }
+        return self;
+    },
+
+    /**   */
+    force_panel : function( op ){
+        if( op == 'show' ){ return this.show(); }
+        if( op == 'hide' ){ return this.hide(); }
+        return self;
+    },
+
+    toString : function(){ return 'SidePanel(' + this.id + ')'; }
+});
+
+// ----------------------------------------------------------------------------
+// TODO: side should be defined by page - not here
+var LeftPanel = SidePanel.extend({
+    id : 'left',
+});
+
+var RightPanel = SidePanel.extend({
+    id : 'right',
+});
+
+
+// ----------------------------------------------------------------------------
+/**
+ *
+ */
+var CenterPanel = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : 'layout',
+
+    initialize : function( options ){
+        this.log( this + '.initialize:', options );
+        /** previous view contained in the center panel - cached for removal later */
+        this.prev = null;
+    },
+
+    render : function(){
+        this.log( this + '.render:' );
+        this.$el.html( this.template() );
+        // ?: doesn't work/listen in events map
+        this.$( '#galaxy_main' ).on( 'load', _.bind( this._iframeChangeHandler, this ) );
+    },
+
+    /**   */
+    _iframeChangeHandler : function( ev ){
+        var iframe = ev.currentTarget;
+        var location = iframe.contentWindow && iframe.contentWindow.location;
+        if( location && location.host ){
+            // show the iframe and hide MVCview div, remove any views in the MVCview div
+            $( iframe ).show();
+            if( this.prev ){
+                this.prev.remove();
+            }
+            this.$( '#center-panel' ).hide();
+            // TODO: move to Galaxy
+            Galaxy.trigger( 'galaxy_main:load', {
+                fullpath: location.pathname + location.search + location.hash,
+                pathname: location.pathname,
+                search  : location.search,
+                hash    : location.hash
+            });
+        }
+    },
+
+    /**   */
+    display: function( view ){
+        // we need to display an MVC view: hide the iframe and show the other center panel
+        // first checking for any onbeforeunload handlers on the iframe
+        var contentWindow = this.$( '#galaxy_main' )[ 0 ].contentWindow || {};
+        var message = contentWindow.onbeforeunload && contentWindow.onbeforeunload();
+        if ( !message || confirm( message ) ) {
+            contentWindow.onbeforeunload = undefined;
+            // remove any previous views
+            if( this.prev ){
+                this.prev.remove();
+            }
+            this.prev = view;
+            this.$( '#galaxy_main' ).attr( 'src', 'about:blank' ).hide();
+            this.$( '#center-panel' ).scrollTop( 0 ).append( view.$el ).show();
+            Galaxy.trigger( 'center-panel:load', view );
+
+        } else {
+            if( view ){
+                view.remove();
+            }
+        }
+    },
+
+    template: function(){
+        return [
+            //TODO: remove inline styling
+            '<div style="position: absolute; width: 100%; height: 100%">',
+                '<iframe name="galaxy_main" id="galaxy_main" frameborder="0" ',
+                        'style="position: absolute; width: 100%; height: 100%;"/>',
+                '<div id="center-panel" ',
+                     'style="display: none; position: absolute; width: 100%; height: 100%; padding: 10px; overflow: auto;"/>',
+            '</div>'
+        ].join('');
+    },
+
+    toString : function(){ return 'CenterPanel'; }
+});
+
+
+// ============================================================================
+    return {
+        LeftPanel : LeftPanel,
+        RightPanel : RightPanel,
+        CenterPanel : CenterPanel
+    };
+});
diff --git a/client/galaxy/scripts/layout/scratchbook.js b/client/galaxy/scripts/layout/scratchbook.js
new file mode 100644
index 0000000..faf9da4
--- /dev/null
+++ b/client/galaxy/scripts/layout/scratchbook.js
@@ -0,0 +1,190 @@
+/** Frame manager uses the ui-frames to create the scratch book masthead icon and functionality **/
+define([ 'mvc/ui/ui-frames' ], function( Frames ) {
+return Backbone.View.extend({
+    initialize : function( options ) {
+        var self = this;
+        options = options || {};
+        this.frames = new Frames.View({ visible : false });
+        this.setElement( this.frames.$el );
+        this.buttonActive = options.collection.add({
+            id              : 'enable-scratchbook',
+            icon            : 'fa-th',
+            tooltip         : 'Enable/Disable Scratchbook',
+            onclick         : function() {
+                self.active = !self.active;
+                self.buttonActive.set({
+                    toggle    : self.active,
+                    show_note : self.active,
+                    note_cls  : self.active && 'fa fa-check'
+                });
+                !self.active && self.frames.hide();
+            },
+            onbeforeunload  : function() {
+                if ( self.frames.length() > 0 ) {
+                    return 'You opened ' + self.frames.length() + ' frame(s) which will be lost.';
+                }
+            }
+        });
+        this.buttonLoad = options.collection.add({
+            id              : 'show-scratchbook',
+            icon            : 'fa-eye',
+            tooltip         : 'Show/Hide Scratchbook',
+            show_note       : true,
+            visible         : false,
+            onclick         : function( e ) {
+                self.frames.visible ? self.frames.hide() : self.frames.show();
+            }
+        });
+        this.frames.on( 'add remove', function() {
+            this.visible && this.length() == 0 && this.hide();
+            self.buttonLoad.set( { 'note': this.length(), 'visible': this.length() > 0 } );
+        }).on( 'show hide ', function() {
+            self.buttonLoad.set( { 'toggle': this.visible, 'icon': this.visible && 'fa-eye' || 'fa-eye-slash' } );
+        });
+        this.history_cache = {};
+    },
+
+    /** Add a dataset to the frames */
+    addDataset: function( dataset_id ) {
+        var self = this;
+        var current_dataset = null;
+        if ( Galaxy && Galaxy.currHistoryPanel ) {
+            var history_id = Galaxy.currHistoryPanel.collection.historyId;
+            this.history_cache[ history_id ] = { name: Galaxy.currHistoryPanel.model.get( 'name' ), dataset_ids: [] };
+            Galaxy.currHistoryPanel.collection.each( function( model ) {
+                !model.get( 'deleted' ) && model.get( 'visible' ) && self.history_cache[ history_id ].dataset_ids.push( model.get( 'id' ) );
+            });
+        }
+        var _findDataset = function( dataset, offset ) {
+            if ( dataset ) {
+                var history_details = self.history_cache[ dataset.get( 'history_id' ) ];
+                if ( history_details && history_details.dataset_ids ) {
+                    var dataset_list = history_details.dataset_ids;
+                    var pos = dataset_list.indexOf( dataset.get( 'id' ) );
+                    if ( pos !== -1 && pos + offset >= 0 && pos + offset < dataset_list.length ) {
+                        return dataset_list[ pos + offset ];
+                    }
+                }
+            }
+        };
+        var _loadDatasetOffset = function( dataset, offset, frame ) {
+            var new_dataset_id = _findDataset( dataset, offset );
+            if ( new_dataset_id ) {
+                self._loadDataset( new_dataset_id, function( new_dataset, config ) {
+                    current_dataset = new_dataset;
+                    frame.model.set( config );
+                });
+            } else {
+                frame.model.trigger( 'change' );
+            }
+        }
+        this._loadDataset( dataset_id, function( dataset, config ) {
+            current_dataset = dataset;
+            self.add( _.extend( { menu: [ { icon     : 'fa fa-chevron-circle-left',
+                                            tooltip  : 'Previous in History',
+                                            onclick  : function( frame ) { _loadDatasetOffset( current_dataset, -1, frame ) },
+                                            disabled : function() { return !_findDataset( current_dataset, -1 ) } },
+                                          { icon     : 'fa fa-chevron-circle-right',
+                                            tooltip  : 'Next in History',
+                                            onclick  : function( frame ) { _loadDatasetOffset( current_dataset, 1, frame ) },
+                                            disabled : function() { return !_findDataset( current_dataset, 1 ) } } ] }, config ) )
+        });
+    },
+
+    _loadDataset: function( dataset_id, callback ) {
+        var self = this;
+        require([ 'mvc/dataset/data' ], function( DATA ) {
+            var dataset = new DATA.Dataset( { id : dataset_id } );
+            $.when( dataset.fetch() ).then( function() {
+                var is_tabular = _.find( [ 'tabular', 'interval' ] , function( data_type ) {
+                    return dataset.get( 'data_type' ).indexOf( data_type ) !== -1;
+                });
+                var title = dataset.get( 'name' );
+                var history_details = self.history_cache[ dataset.get( 'history_id' ) ];
+                if ( history_details ) {
+                    title = history_details.name + ': ' + title;
+                }
+                callback( dataset, is_tabular ? {
+                    title   : title,
+                    url     : null,
+                    content : DATA.createTabularDatasetChunkedView({
+                        model       : new DATA.TabularDataset( dataset.toJSON() ),
+                        embedded    : true,
+                        height      : '100%'
+                    }).$el
+                } : {
+                    title   : title,
+                    url     : Galaxy.root + 'datasets/' + dataset_id + '/display/?preview=True',
+                    content : null
+                } );
+            });
+        });
+    },
+
+    /** Add a trackster visualization to the frames. */
+    addTrackster: function(viz_id) {
+        var self = this;
+        require(['viz/visualization', 'viz/trackster'], function(visualization, trackster) {
+            var viz = new visualization.Visualization({id: viz_id});
+            $.when( viz.fetch() ).then( function() {
+                var ui = new trackster.TracksterUI(Galaxy.root);
+
+                // Construct frame config based on dataset's type.
+                var frame_config = {
+                        title: viz.get('name'),
+                        type: 'other',
+                        content: function(parent_elt) {
+                            // Create view config.
+                            var view_config = {
+                                container: parent_elt,
+                                name: viz.get('title'),
+                                id: viz.id,
+                                // FIXME: this will not work with custom builds b/c the dbkey needed to be encoded.
+                                dbkey: viz.get('dbkey'),
+                                stand_alone: false
+                            },
+                            latest_revision = viz.get('latest_revision'),
+                            drawables = latest_revision.config.view.drawables;
+
+                            // Set up datasets in drawables.
+                            _.each(drawables, function(d) {
+                                d.dataset = {
+                                    hda_ldda: d.hda_ldda,
+                                    id: d.dataset_id
+                                };
+                            });
+                            view = ui.create_visualization(view_config,
+                                                           latest_revision.config.viewport,
+                                                           latest_revision.config.view.drawables,
+                                                           latest_revision.config.bookmarks,
+                                                           false);
+                        }
+                    };
+                self.add(frame_config);
+            });
+        });
+    },
+
+    /** Add and display a new frame/window based on options. */
+    add: function( options ) {
+        if ( options.target == '_blank' ) {
+            window.open( options.url );
+        } else if ( options.target == '_top' || options.target == '_parent' || options.target == '_self' ) {
+            window.location = options.url;
+        } else if ( !this.active || options.noscratchbook ) {
+            var $galaxy_main = $( window.parent.document ).find( '#galaxy_main' );
+            if ( options.target == 'galaxy_main' || options.target == 'center' ) {
+                if ( $galaxy_main.length === 0 ) {
+                    window.location = options.url + ( options.url.indexOf( '?' ) == -1 ? '?' : '&' ) + 'use_panels=True';
+                } else {
+                    $galaxy_main.attr( 'src', options.url );
+                }
+            } else
+                window.location = options.url;
+        } else {
+            this.frames.add( options );
+        }
+    }
+});
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/backbone.js b/client/galaxy/scripts/libs/backbone.js
new file mode 100644
index 0000000..55ccb22
--- /dev/null
+++ b/client/galaxy/scripts/libs/backbone.js
@@ -0,0 +1,1920 @@
+//     Backbone.js 1.3.3
+
+//     (c) 2010-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+//     Backbone may be freely distributed under the MIT license.
+//     For all details and documentation:
+//     http://backbonejs.org
+
+(function(factory) {
+
+  // Establish the root object, `window` (`self`) in the browser, or `global` on the server.
+  // We use `self` instead of `window` for `WebWorker` support.
+  var root = (typeof self == 'object' && self.self === self && self) ||
+            (typeof global == 'object' && global.global === global && global);
+
+  // Set up Backbone appropriately for the environment. Start with AMD.
+  if (typeof define === 'function' && define.amd) {
+    define(['underscore', 'jquery', 'exports'], function(_, $, exports) {
+      // Export global even in AMD case in case this script is loaded with
+      // others that may still expect a global Backbone.
+      root.Backbone = factory(root, exports, _, $);
+    });
+
+  // Next for Node.js or CommonJS. jQuery may not be needed as a module.
+  } else if (typeof exports !== 'undefined') {
+    var _ = require('underscore'), $;
+    try { $ = require('jquery'); } catch (e) {}
+    factory(root, exports, _, $);
+
+  // Finally, as a browser global.
+  } else {
+    root.Backbone = factory(root, {}, root._, (root.jQuery || root.Zepto || root.ender || root.$));
+  }
+
+})(function(root, Backbone, _, $) {
+
+  // Initial Setup
+  // -------------
+
+  // Save the previous value of the `Backbone` variable, so that it can be
+  // restored later on, if `noConflict` is used.
+  var previousBackbone = root.Backbone;
+
+  // Create a local reference to a common array method we'll want to use later.
+  var slice = Array.prototype.slice;
+
+  // Current version of the library. Keep in sync with `package.json`.
+  Backbone.VERSION = '1.3.3';
+
+  // For Backbone's purposes, jQuery, Zepto, Ender, or My Library (kidding) owns
+  // the `$` variable.
+  Backbone.$ = $;
+
+  // Runs Backbone.js in *noConflict* mode, returning the `Backbone` variable
+  // to its previous owner. Returns a reference to this Backbone object.
+  Backbone.noConflict = function() {
+    root.Backbone = previousBackbone;
+    return this;
+  };
+
+  // Turn on `emulateHTTP` to support legacy HTTP servers. Setting this option
+  // will fake `"PATCH"`, `"PUT"` and `"DELETE"` requests via the `_method` parameter and
+  // set a `X-Http-Method-Override` header.
+  Backbone.emulateHTTP = false;
+
+  // Turn on `emulateJSON` to support legacy servers that can't deal with direct
+  // `application/json` requests ... this will encode the body as
+  // `application/x-www-form-urlencoded` instead and will send the model in a
+  // form param named `model`.
+  Backbone.emulateJSON = false;
+
+  // Proxy Backbone class methods to Underscore functions, wrapping the model's
+  // `attributes` object or collection's `models` array behind the scenes.
+  //
+  // collection.filter(function(model) { return model.get('age') > 10 });
+  // collection.each(this.addView);
+  //
+  // `Function#apply` can be slow so we use the method's arg count, if we know it.
+  var addMethod = function(length, method, attribute) {
+    switch (length) {
+      case 1: return function() {
+        return _[method](this[attribute]);
+      };
+      case 2: return function(value) {
+        return _[method](this[attribute], value);
+      };
+      case 3: return function(iteratee, context) {
+        return _[method](this[attribute], cb(iteratee, this), context);
+      };
+      case 4: return function(iteratee, defaultVal, context) {
+        return _[method](this[attribute], cb(iteratee, this), defaultVal, context);
+      };
+      default: return function() {
+        var args = slice.call(arguments);
+        args.unshift(this[attribute]);
+        return _[method].apply(_, args);
+      };
+    }
+  };
+  var addUnderscoreMethods = function(Class, methods, attribute) {
+    _.each(methods, function(length, method) {
+      if (_[method]) Class.prototype[method] = addMethod(length, method, attribute);
+    });
+  };
+
+  // Support `collection.sortBy('attr')` and `collection.findWhere({id: 1})`.
+  var cb = function(iteratee, instance) {
+    if (_.isFunction(iteratee)) return iteratee;
+    if (_.isObject(iteratee) && !instance._isModel(iteratee)) return modelMatcher(iteratee);
+    if (_.isString(iteratee)) return function(model) { return model.get(iteratee); };
+    return iteratee;
+  };
+  var modelMatcher = function(attrs) {
+    var matcher = _.matches(attrs);
+    return function(model) {
+      return matcher(model.attributes);
+    };
+  };
+
+  // Backbone.Events
+  // ---------------
+
+  // A module that can be mixed in to *any object* in order to provide it with
+  // a custom event channel. You may bind a callback to an event with `on` or
+  // remove with `off`; `trigger`-ing an event fires all callbacks in
+  // succession.
+  //
+  //     var object = {};
+  //     _.extend(object, Backbone.Events);
+  //     object.on('expand', function(){ alert('expanded'); });
+  //     object.trigger('expand');
+  //
+  var Events = Backbone.Events = {};
+
+  // Regular expression used to split event strings.
+  var eventSplitter = /\s+/;
+
+  // Iterates over the standard `event, callback` (as well as the fancy multiple
+  // space-separated events `"change blur", callback` and jQuery-style event
+  // maps `{event: callback}`).
+  var eventsApi = function(iteratee, events, name, callback, opts) {
+    var i = 0, names;
+    if (name && typeof name === 'object') {
+      // Handle event maps.
+      if (callback !== void 0 && 'context' in opts && opts.context === void 0) opts.context = callback;
+      for (names = _.keys(name); i < names.length ; i++) {
+        events = eventsApi(iteratee, events, names[i], name[names[i]], opts);
+      }
+    } else if (name && eventSplitter.test(name)) {
+      // Handle space-separated event names by delegating them individually.
+      for (names = name.split(eventSplitter); i < names.length; i++) {
+        events = iteratee(events, names[i], callback, opts);
+      }
+    } else {
+      // Finally, standard events.
+      events = iteratee(events, name, callback, opts);
+    }
+    return events;
+  };
+
+  // Bind an event to a `callback` function. Passing `"all"` will bind
+  // the callback to all events fired.
+  Events.on = function(name, callback, context) {
+    return internalOn(this, name, callback, context);
+  };
+
+  // Guard the `listening` argument from the public API.
+  var internalOn = function(obj, name, callback, context, listening) {
+    obj._events = eventsApi(onApi, obj._events || {}, name, callback, {
+      context: context,
+      ctx: obj,
+      listening: listening
+    });
+
+    if (listening) {
+      var listeners = obj._listeners || (obj._listeners = {});
+      listeners[listening.id] = listening;
+    }
+
+    return obj;
+  };
+
+  // Inversion-of-control versions of `on`. Tell *this* object to listen to
+  // an event in another object... keeping track of what it's listening to
+  // for easier unbinding later.
+  Events.listenTo = function(obj, name, callback) {
+    if (!obj) return this;
+    var id = obj._listenId || (obj._listenId = _.uniqueId('l'));
+    var listeningTo = this._listeningTo || (this._listeningTo = {});
+    var listening = listeningTo[id];
+
+    // This object is not listening to any other events on `obj` yet.
+    // Setup the necessary references to track the listening callbacks.
+    if (!listening) {
+      var thisId = this._listenId || (this._listenId = _.uniqueId('l'));
+      listening = listeningTo[id] = {obj: obj, objId: id, id: thisId, listeningTo: listeningTo, count: 0};
+    }
+
+    // Bind callbacks on obj, and keep track of them on listening.
+    internalOn(obj, name, callback, this, listening);
+    return this;
+  };
+
+  // The reducing API that adds a callback to the `events` object.
+  var onApi = function(events, name, callback, options) {
+    if (callback) {
+      var handlers = events[name] || (events[name] = []);
+      var context = options.context, ctx = options.ctx, listening = options.listening;
+      if (listening) listening.count++;
+
+      handlers.push({callback: callback, context: context, ctx: context || ctx, listening: listening});
+    }
+    return events;
+  };
+
+  // Remove one or many callbacks. If `context` is null, removes all
+  // callbacks with that function. If `callback` is null, removes all
+  // callbacks for the event. If `name` is null, removes all bound
+  // callbacks for all events.
+  Events.off = function(name, callback, context) {
+    if (!this._events) return this;
+    this._events = eventsApi(offApi, this._events, name, callback, {
+      context: context,
+      listeners: this._listeners
+    });
+    return this;
+  };
+
+  // Tell this object to stop listening to either specific events ... or
+  // to every object it's currently listening to.
+  Events.stopListening = function(obj, name, callback) {
+    var listeningTo = this._listeningTo;
+    if (!listeningTo) return this;
+
+    var ids = obj ? [obj._listenId] : _.keys(listeningTo);
+
+    for (var i = 0; i < ids.length; i++) {
+      var listening = listeningTo[ids[i]];
+
+      // If listening doesn't exist, this object is not currently
+      // listening to obj. Break out early.
+      if (!listening) break;
+
+      listening.obj.off(name, callback, this);
+    }
+
+    return this;
+  };
+
+  // The reducing API that removes a callback from the `events` object.
+  var offApi = function(events, name, callback, options) {
+    if (!events) return;
+
+    var i = 0, listening;
+    var context = options.context, listeners = options.listeners;
+
+    // Delete all events listeners and "drop" events.
+    if (!name && !callback && !context) {
+      var ids = _.keys(listeners);
+      for (; i < ids.length; i++) {
+        listening = listeners[ids[i]];
+        delete listeners[listening.id];
+        delete listening.listeningTo[listening.objId];
+      }
+      return;
+    }
+
+    var names = name ? [name] : _.keys(events);
+    for (; i < names.length; i++) {
+      name = names[i];
+      var handlers = events[name];
+
+      // Bail out if there are no events stored.
+      if (!handlers) break;
+
+      // Replace events if there are any remaining.  Otherwise, clean up.
+      var remaining = [];
+      for (var j = 0; j < handlers.length; j++) {
+        var handler = handlers[j];
+        if (
+          callback && callback !== handler.callback &&
+            callback !== handler.callback._callback ||
+              context && context !== handler.context
+        ) {
+          remaining.push(handler);
+        } else {
+          listening = handler.listening;
+          if (listening && --listening.count === 0) {
+            delete listeners[listening.id];
+            delete listening.listeningTo[listening.objId];
+          }
+        }
+      }
+
+      // Update tail event if the list has any events.  Otherwise, clean up.
+      if (remaining.length) {
+        events[name] = remaining;
+      } else {
+        delete events[name];
+      }
+    }
+    return events;
+  };
+
+  // Bind an event to only be triggered a single time. After the first time
+  // the callback is invoked, its listener will be removed. If multiple events
+  // are passed in using the space-separated syntax, the handler will fire
+  // once for each event, not once for a combination of all events.
+  Events.once = function(name, callback, context) {
+    // Map the event into a `{event: once}` object.
+    var events = eventsApi(onceMap, {}, name, callback, _.bind(this.off, this));
+    if (typeof name === 'string' && context == null) callback = void 0;
+    return this.on(events, callback, context);
+  };
+
+  // Inversion-of-control versions of `once`.
+  Events.listenToOnce = function(obj, name, callback) {
+    // Map the event into a `{event: once}` object.
+    var events = eventsApi(onceMap, {}, name, callback, _.bind(this.stopListening, this, obj));
+    return this.listenTo(obj, events);
+  };
+
+  // Reduces the event callbacks into a map of `{event: onceWrapper}`.
+  // `offer` unbinds the `onceWrapper` after it has been called.
+  var onceMap = function(map, name, callback, offer) {
+    if (callback) {
+      var once = map[name] = _.once(function() {
+        offer(name, once);
+        callback.apply(this, arguments);
+      });
+      once._callback = callback;
+    }
+    return map;
+  };
+
+  // Trigger one or many events, firing all bound callbacks. Callbacks are
+  // passed the same arguments as `trigger` is, apart from the event name
+  // (unless you're listening on `"all"`, which will cause your callback to
+  // receive the true name of the event as the first argument).
+  Events.trigger = function(name) {
+    if (!this._events) return this;
+
+    var length = Math.max(0, arguments.length - 1);
+    var args = Array(length);
+    for (var i = 0; i < length; i++) args[i] = arguments[i + 1];
+
+    eventsApi(triggerApi, this._events, name, void 0, args);
+    return this;
+  };
+
+  // Handles triggering the appropriate event callbacks.
+  var triggerApi = function(objEvents, name, callback, args) {
+    if (objEvents) {
+      var events = objEvents[name];
+      var allEvents = objEvents.all;
+      if (events && allEvents) allEvents = allEvents.slice();
+      if (events) triggerEvents(events, args);
+      if (allEvents) triggerEvents(allEvents, [name].concat(args));
+    }
+    return objEvents;
+  };
+
+  // A difficult-to-believe, but optimized internal dispatch function for
+  // triggering events. Tries to keep the usual cases speedy (most internal
+  // Backbone events have 3 arguments).
+  var triggerEvents = function(events, args) {
+    var ev, i = -1, l = events.length, a1 = args[0], a2 = args[1], a3 = args[2];
+    switch (args.length) {
+      case 0: while (++i < l) (ev = events[i]).callback.call(ev.ctx); return;
+      case 1: while (++i < l) (ev = events[i]).callback.call(ev.ctx, a1); return;
+      case 2: while (++i < l) (ev = events[i]).callback.call(ev.ctx, a1, a2); return;
+      case 3: while (++i < l) (ev = events[i]).callback.call(ev.ctx, a1, a2, a3); return;
+      default: while (++i < l) (ev = events[i]).callback.apply(ev.ctx, args); return;
+    }
+  };
+
+  // Aliases for backwards compatibility.
+  Events.bind   = Events.on;
+  Events.unbind = Events.off;
+
+  // Allow the `Backbone` object to serve as a global event bus, for folks who
+  // want global "pubsub" in a convenient place.
+  _.extend(Backbone, Events);
+
+  // Backbone.Model
+  // --------------
+
+  // Backbone **Models** are the basic data object in the framework --
+  // frequently representing a row in a table in a database on your server.
+  // A discrete chunk of data and a bunch of useful, related methods for
+  // performing computations and transformations on that data.
+
+  // Create a new model with the specified attributes. A client id (`cid`)
+  // is automatically generated and assigned for you.
+  var Model = Backbone.Model = function(attributes, options) {
+    var attrs = attributes || {};
+    options || (options = {});
+    this.cid = _.uniqueId(this.cidPrefix);
+    this.attributes = {};
+    if (options.collection) this.collection = options.collection;
+    if (options.parse) attrs = this.parse(attrs, options) || {};
+    var defaults = _.result(this, 'defaults');
+    attrs = _.defaults(_.extend({}, defaults, attrs), defaults);
+    this.set(attrs, options);
+    this.changed = {};
+    this.initialize.apply(this, arguments);
+  };
+
+  // Attach all inheritable methods to the Model prototype.
+  _.extend(Model.prototype, Events, {
+
+    // A hash of attributes whose current and previous value differ.
+    changed: null,
+
+    // The value returned during the last failed validation.
+    validationError: null,
+
+    // The default name for the JSON `id` attribute is `"id"`. MongoDB and
+    // CouchDB users may want to set this to `"_id"`.
+    idAttribute: 'id',
+
+    // The prefix is used to create the client id which is used to identify models locally.
+    // You may want to override this if you're experiencing name clashes with model ids.
+    cidPrefix: 'c',
+
+    // Initialize is an empty function by default. Override it with your own
+    // initialization logic.
+    initialize: function(){},
+
+    // Return a copy of the model's `attributes` object.
+    toJSON: function(options) {
+      return _.clone(this.attributes);
+    },
+
+    // Proxy `Backbone.sync` by default -- but override this if you need
+    // custom syncing semantics for *this* particular model.
+    sync: function() {
+      return Backbone.sync.apply(this, arguments);
+    },
+
+    // Get the value of an attribute.
+    get: function(attr) {
+      return this.attributes[attr];
+    },
+
+    // Get the HTML-escaped value of an attribute.
+    escape: function(attr) {
+      return _.escape(this.get(attr));
+    },
+
+    // Returns `true` if the attribute contains a value that is not null
+    // or undefined.
+    has: function(attr) {
+      return this.get(attr) != null;
+    },
+
+    // Special-cased proxy to underscore's `_.matches` method.
+    matches: function(attrs) {
+      return !!_.iteratee(attrs, this)(this.attributes);
+    },
+
+    // Set a hash of model attributes on the object, firing `"change"`. This is
+    // the core primitive operation of a model, updating the data and notifying
+    // anyone who needs to know about the change in state. The heart of the beast.
+    set: function(key, val, options) {
+      if (key == null) return this;
+
+      // Handle both `"key", value` and `{key: value}` -style arguments.
+      var attrs;
+      if (typeof key === 'object') {
+        attrs = key;
+        options = val;
+      } else {
+        (attrs = {})[key] = val;
+      }
+
+      options || (options = {});
+
+      // Run validation.
+      if (!this._validate(attrs, options)) return false;
+
+      // Extract attributes and options.
+      var unset      = options.unset;
+      var silent     = options.silent;
+      var changes    = [];
+      var changing   = this._changing;
+      this._changing = true;
+
+      if (!changing) {
+        this._previousAttributes = _.clone(this.attributes);
+        this.changed = {};
+      }
+
+      var current = this.attributes;
+      var changed = this.changed;
+      var prev    = this._previousAttributes;
+
+      // For each `set` attribute, update or delete the current value.
+      for (var attr in attrs) {
+        val = attrs[attr];
+        if (!_.isEqual(current[attr], val)) changes.push(attr);
+        if (!_.isEqual(prev[attr], val)) {
+          changed[attr] = val;
+        } else {
+          delete changed[attr];
+        }
+        unset ? delete current[attr] : current[attr] = val;
+      }
+
+      // Update the `id`.
+      if (this.idAttribute in attrs) this.id = this.get(this.idAttribute);
+
+      // Trigger all relevant attribute changes.
+      if (!silent) {
+        if (changes.length) this._pending = options;
+        for (var i = 0; i < changes.length; i++) {
+          this.trigger('change:' + changes[i], this, current[changes[i]], options);
+        }
+      }
+
+      // You might be wondering why there's a `while` loop here. Changes can
+      // be recursively nested within `"change"` events.
+      if (changing) return this;
+      if (!silent) {
+        while (this._pending) {
+          options = this._pending;
+          this._pending = false;
+          this.trigger('change', this, options);
+        }
+      }
+      this._pending = false;
+      this._changing = false;
+      return this;
+    },
+
+    // Remove an attribute from the model, firing `"change"`. `unset` is a noop
+    // if the attribute doesn't exist.
+    unset: function(attr, options) {
+      return this.set(attr, void 0, _.extend({}, options, {unset: true}));
+    },
+
+    // Clear all attributes on the model, firing `"change"`.
+    clear: function(options) {
+      var attrs = {};
+      for (var key in this.attributes) attrs[key] = void 0;
+      return this.set(attrs, _.extend({}, options, {unset: true}));
+    },
+
+    // Determine if the model has changed since the last `"change"` event.
+    // If you specify an attribute name, determine if that attribute has changed.
+    hasChanged: function(attr) {
+      if (attr == null) return !_.isEmpty(this.changed);
+      return _.has(this.changed, attr);
+    },
+
+    // Return an object containing all the attributes that have changed, or
+    // false if there are no changed attributes. Useful for determining what
+    // parts of a view need to be updated and/or what attributes need to be
+    // persisted to the server. Unset attributes will be set to undefined.
+    // You can also pass an attributes object to diff against the model,
+    // determining if there *would be* a change.
+    changedAttributes: function(diff) {
+      if (!diff) return this.hasChanged() ? _.clone(this.changed) : false;
+      var old = this._changing ? this._previousAttributes : this.attributes;
+      var changed = {};
+      for (var attr in diff) {
+        var val = diff[attr];
+        if (_.isEqual(old[attr], val)) continue;
+        changed[attr] = val;
+      }
+      return _.size(changed) ? changed : false;
+    },
+
+    // Get the previous value of an attribute, recorded at the time the last
+    // `"change"` event was fired.
+    previous: function(attr) {
+      if (attr == null || !this._previousAttributes) return null;
+      return this._previousAttributes[attr];
+    },
+
+    // Get all of the attributes of the model at the time of the previous
+    // `"change"` event.
+    previousAttributes: function() {
+      return _.clone(this._previousAttributes);
+    },
+
+    // Fetch the model from the server, merging the response with the model's
+    // local attributes. Any changed attributes will trigger a "change" event.
+    fetch: function(options) {
+      options = _.extend({parse: true}, options);
+      var model = this;
+      var success = options.success;
+      options.success = function(resp) {
+        var serverAttrs = options.parse ? model.parse(resp, options) : resp;
+        if (!model.set(serverAttrs, options)) return false;
+        if (success) success.call(options.context, model, resp, options);
+        model.trigger('sync', model, resp, options);
+      };
+      wrapError(this, options);
+      return this.sync('read', this, options);
+    },
+
+    // Set a hash of model attributes, and sync the model to the server.
+    // If the server returns an attributes hash that differs, the model's
+    // state will be `set` again.
+    save: function(key, val, options) {
+      // Handle both `"key", value` and `{key: value}` -style arguments.
+      var attrs;
+      if (key == null || typeof key === 'object') {
+        attrs = key;
+        options = val;
+      } else {
+        (attrs = {})[key] = val;
+      }
+
+      options = _.extend({validate: true, parse: true}, options);
+      var wait = options.wait;
+
+      // If we're not waiting and attributes exist, save acts as
+      // `set(attr).save(null, opts)` with validation. Otherwise, check if
+      // the model will be valid when the attributes, if any, are set.
+      if (attrs && !wait) {
+        if (!this.set(attrs, options)) return false;
+      } else if (!this._validate(attrs, options)) {
+        return false;
+      }
+
+      // After a successful server-side save, the client is (optionally)
+      // updated with the server-side state.
+      var model = this;
+      var success = options.success;
+      var attributes = this.attributes;
+      options.success = function(resp) {
+        // Ensure attributes are restored during synchronous saves.
+        model.attributes = attributes;
+        var serverAttrs = options.parse ? model.parse(resp, options) : resp;
+        if (wait) serverAttrs = _.extend({}, attrs, serverAttrs);
+        if (serverAttrs && !model.set(serverAttrs, options)) return false;
+        if (success) success.call(options.context, model, resp, options);
+        model.trigger('sync', model, resp, options);
+      };
+      wrapError(this, options);
+
+      // Set temporary attributes if `{wait: true}` to properly find new ids.
+      if (attrs && wait) this.attributes = _.extend({}, attributes, attrs);
+
+      var method = this.isNew() ? 'create' : (options.patch ? 'patch' : 'update');
+      if (method === 'patch' && !options.attrs) options.attrs = attrs;
+      var xhr = this.sync(method, this, options);
+
+      // Restore attributes.
+      this.attributes = attributes;
+
+      return xhr;
+    },
+
+    // Destroy this model on the server if it was already persisted.
+    // Optimistically removes the model from its collection, if it has one.
+    // If `wait: true` is passed, waits for the server to respond before removal.
+    destroy: function(options) {
+      options = options ? _.clone(options) : {};
+      var model = this;
+      var success = options.success;
+      var wait = options.wait;
+
+      var destroy = function() {
+        model.stopListening();
+        model.trigger('destroy', model, model.collection, options);
+      };
+
+      options.success = function(resp) {
+        if (wait) destroy();
+        if (success) success.call(options.context, model, resp, options);
+        if (!model.isNew()) model.trigger('sync', model, resp, options);
+      };
+
+      var xhr = false;
+      if (this.isNew()) {
+        _.defer(options.success);
+      } else {
+        wrapError(this, options);
+        xhr = this.sync('delete', this, options);
+      }
+      if (!wait) destroy();
+      return xhr;
+    },
+
+    // Default URL for the model's representation on the server -- if you're
+    // using Backbone's restful methods, override this to change the endpoint
+    // that will be called.
+    url: function() {
+      var base =
+        _.result(this, 'urlRoot') ||
+        _.result(this.collection, 'url') ||
+        urlError();
+      if (this.isNew()) return base;
+      var id = this.get(this.idAttribute);
+      return base.replace(/[^\/]$/, '$&/') + encodeURIComponent(id);
+    },
+
+    // **parse** converts a response into the hash of attributes to be `set` on
+    // the model. The default implementation is just to pass the response along.
+    parse: function(resp, options) {
+      return resp;
+    },
+
+    // Create a new model with identical attributes to this one.
+    clone: function() {
+      return new this.constructor(this.attributes);
+    },
+
+    // A model is new if it has never been saved to the server, and lacks an id.
+    isNew: function() {
+      return !this.has(this.idAttribute);
+    },
+
+    // Check if the model is currently in a valid state.
+    isValid: function(options) {
+      return this._validate({}, _.extend({}, options, {validate: true}));
+    },
+
+    // Run validation against the next complete set of model attributes,
+    // returning `true` if all is well. Otherwise, fire an `"invalid"` event.
+    _validate: function(attrs, options) {
+      if (!options.validate || !this.validate) return true;
+      attrs = _.extend({}, this.attributes, attrs);
+      var error = this.validationError = this.validate(attrs, options) || null;
+      if (!error) return true;
+      this.trigger('invalid', this, error, _.extend(options, {validationError: error}));
+      return false;
+    }
+
+  });
+
+  // Underscore methods that we want to implement on the Model, mapped to the
+  // number of arguments they take.
+  var modelMethods = {keys: 1, values: 1, pairs: 1, invert: 1, pick: 0,
+      omit: 0, chain: 1, isEmpty: 1};
+
+  // Mix in each Underscore method as a proxy to `Model#attributes`.
+  addUnderscoreMethods(Model, modelMethods, 'attributes');
+
+  // Backbone.Collection
+  // -------------------
+
+  // If models tend to represent a single row of data, a Backbone Collection is
+  // more analogous to a table full of data ... or a small slice or page of that
+  // table, or a collection of rows that belong together for a particular reason
+  // -- all of the messages in this particular folder, all of the documents
+  // belonging to this particular author, and so on. Collections maintain
+  // indexes of their models, both in order, and for lookup by `id`.
+
+  // Create a new **Collection**, perhaps to contain a specific type of `model`.
+  // If a `comparator` is specified, the Collection will maintain
+  // its models in sort order, as they're added and removed.
+  var Collection = Backbone.Collection = function(models, options) {
+    options || (options = {});
+    if (options.model) this.model = options.model;
+    if (options.comparator !== void 0) this.comparator = options.comparator;
+    this._reset();
+    this.initialize.apply(this, arguments);
+    if (models) this.reset(models, _.extend({silent: true}, options));
+  };
+
+  // Default options for `Collection#set`.
+  var setOptions = {add: true, remove: true, merge: true};
+  var addOptions = {add: true, remove: false};
+
+  // Splices `insert` into `array` at index `at`.
+  var splice = function(array, insert, at) {
+    at = Math.min(Math.max(at, 0), array.length);
+    var tail = Array(array.length - at);
+    var length = insert.length;
+    var i;
+    for (i = 0; i < tail.length; i++) tail[i] = array[i + at];
+    for (i = 0; i < length; i++) array[i + at] = insert[i];
+    for (i = 0; i < tail.length; i++) array[i + length + at] = tail[i];
+  };
+
+  // Define the Collection's inheritable methods.
+  _.extend(Collection.prototype, Events, {
+
+    // The default model for a collection is just a **Backbone.Model**.
+    // This should be overridden in most cases.
+    model: Model,
+
+    // Initialize is an empty function by default. Override it with your own
+    // initialization logic.
+    initialize: function(){},
+
+    // The JSON representation of a Collection is an array of the
+    // models' attributes.
+    toJSON: function(options) {
+      return this.map(function(model) { return model.toJSON(options); });
+    },
+
+    // Proxy `Backbone.sync` by default.
+    sync: function() {
+      return Backbone.sync.apply(this, arguments);
+    },
+
+    // Add a model, or list of models to the set. `models` may be Backbone
+    // Models or raw JavaScript objects to be converted to Models, or any
+    // combination of the two.
+    add: function(models, options) {
+      return this.set(models, _.extend({merge: false}, options, addOptions));
+    },
+
+    // Remove a model, or a list of models from the set.
+    remove: function(models, options) {
+      options = _.extend({}, options);
+      var singular = !_.isArray(models);
+      models = singular ? [models] : models.slice();
+      var removed = this._removeModels(models, options);
+      if (!options.silent && removed.length) {
+        options.changes = {added: [], merged: [], removed: removed};
+        this.trigger('update', this, options);
+      }
+      return singular ? removed[0] : removed;
+    },
+
+    // Update a collection by `set`-ing a new list of models, adding new ones,
+    // removing models that are no longer present, and merging models that
+    // already exist in the collection, as necessary. Similar to **Model#set**,
+    // the core operation for updating the data contained by the collection.
+    set: function(models, options) {
+      if (models == null) return;
+
+      options = _.extend({}, setOptions, options);
+      if (options.parse && !this._isModel(models)) {
+        models = this.parse(models, options) || [];
+      }
+
+      var singular = !_.isArray(models);
+      models = singular ? [models] : models.slice();
+
+      var at = options.at;
+      if (at != null) at = +at;
+      if (at > this.length) at = this.length;
+      if (at < 0) at += this.length + 1;
+
+      var set = [];
+      var toAdd = [];
+      var toMerge = [];
+      var toRemove = [];
+      var modelMap = {};
+
+      var add = options.add;
+      var merge = options.merge;
+      var remove = options.remove;
+
+      var sort = false;
+      var sortable = this.comparator && at == null && options.sort !== false;
+      var sortAttr = _.isString(this.comparator) ? this.comparator : null;
+
+      // Turn bare objects into model references, and prevent invalid models
+      // from being added.
+      var model, i;
+      for (i = 0; i < models.length; i++) {
+        model = models[i];
+
+        // If a duplicate is found, prevent it from being added and
+        // optionally merge it into the existing model.
+        var existing = this.get(model);
+        if (existing) {
+          if (merge && model !== existing) {
+            var attrs = this._isModel(model) ? model.attributes : model;
+            if (options.parse) attrs = existing.parse(attrs, options);
+            existing.set(attrs, options);
+            toMerge.push(existing);
+            if (sortable && !sort) sort = existing.hasChanged(sortAttr);
+          }
+          if (!modelMap[existing.cid]) {
+            modelMap[existing.cid] = true;
+            set.push(existing);
+          }
+          models[i] = existing;
+
+        // If this is a new, valid model, push it to the `toAdd` list.
+        } else if (add) {
+          model = models[i] = this._prepareModel(model, options);
+          if (model) {
+            toAdd.push(model);
+            this._addReference(model, options);
+            modelMap[model.cid] = true;
+            set.push(model);
+          }
+        }
+      }
+
+      // Remove stale models.
+      if (remove) {
+        for (i = 0; i < this.length; i++) {
+          model = this.models[i];
+          if (!modelMap[model.cid]) toRemove.push(model);
+        }
+        if (toRemove.length) this._removeModels(toRemove, options);
+      }
+
+      // See if sorting is needed, update `length` and splice in new models.
+      var orderChanged = false;
+      var replace = !sortable && add && remove;
+      if (set.length && replace) {
+        orderChanged = this.length !== set.length || _.some(this.models, function(m, index) {
+          return m !== set[index];
+        });
+        this.models.length = 0;
+        splice(this.models, set, 0);
+        this.length = this.models.length;
+      } else if (toAdd.length) {
+        if (sortable) sort = true;
+        splice(this.models, toAdd, at == null ? this.length : at);
+        this.length = this.models.length;
+      }
+
+      // Silently sort the collection if appropriate.
+      if (sort) this.sort({silent: true});
+
+      // Unless silenced, it's time to fire all appropriate add/sort/update events.
+      if (!options.silent) {
+        for (i = 0; i < toAdd.length; i++) {
+          if (at != null) options.index = at + i;
+          model = toAdd[i];
+          model.trigger('add', model, this, options);
+        }
+        if (sort || orderChanged) this.trigger('sort', this, options);
+        if (toAdd.length || toRemove.length || toMerge.length) {
+          options.changes = {
+            added: toAdd,
+            removed: toRemove,
+            merged: toMerge
+          };
+          this.trigger('update', this, options);
+        }
+      }
+
+      // Return the added (or merged) model (or models).
+      return singular ? models[0] : models;
+    },
+
+    // When you have more items than you want to add or remove individually,
+    // you can reset the entire set with a new list of models, without firing
+    // any granular `add` or `remove` events. Fires `reset` when finished.
+    // Useful for bulk operations and optimizations.
+    reset: function(models, options) {
+      options = options ? _.clone(options) : {};
+      for (var i = 0; i < this.models.length; i++) {
+        this._removeReference(this.models[i], options);
+      }
+      options.previousModels = this.models;
+      this._reset();
+      models = this.add(models, _.extend({silent: true}, options));
+      if (!options.silent) this.trigger('reset', this, options);
+      return models;
+    },
+
+    // Add a model to the end of the collection.
+    push: function(model, options) {
+      return this.add(model, _.extend({at: this.length}, options));
+    },
+
+    // Remove a model from the end of the collection.
+    pop: function(options) {
+      var model = this.at(this.length - 1);
+      return this.remove(model, options);
+    },
+
+    // Add a model to the beginning of the collection.
+    unshift: function(model, options) {
+      return this.add(model, _.extend({at: 0}, options));
+    },
+
+    // Remove a model from the beginning of the collection.
+    shift: function(options) {
+      var model = this.at(0);
+      return this.remove(model, options);
+    },
+
+    // Slice out a sub-array of models from the collection.
+    slice: function() {
+      return slice.apply(this.models, arguments);
+    },
+
+    // Get a model from the set by id, cid, model object with id or cid
+    // properties, or an attributes object that is transformed through modelId.
+    get: function(obj) {
+      if (obj == null) return void 0;
+      return this._byId[obj] ||
+        this._byId[this.modelId(obj.attributes || obj)] ||
+        obj.cid && this._byId[obj.cid];
+    },
+
+    // Returns `true` if the model is in the collection.
+    has: function(obj) {
+      return this.get(obj) != null;
+    },
+
+    // Get the model at the given index.
+    at: function(index) {
+      if (index < 0) index += this.length;
+      return this.models[index];
+    },
+
+    // Return models with matching attributes. Useful for simple cases of
+    // `filter`.
+    where: function(attrs, first) {
+      return this[first ? 'find' : 'filter'](attrs);
+    },
+
+    // Return the first model with matching attributes. Useful for simple cases
+    // of `find`.
+    findWhere: function(attrs) {
+      return this.where(attrs, true);
+    },
+
+    // Force the collection to re-sort itself. You don't need to call this under
+    // normal circumstances, as the set will maintain sort order as each item
+    // is added.
+    sort: function(options) {
+      var comparator = this.comparator;
+      if (!comparator) throw new Error('Cannot sort a set without a comparator');
+      options || (options = {});
+
+      var length = comparator.length;
+      if (_.isFunction(comparator)) comparator = _.bind(comparator, this);
+
+      // Run sort based on type of `comparator`.
+      if (length === 1 || _.isString(comparator)) {
+        this.models = this.sortBy(comparator);
+      } else {
+        this.models.sort(comparator);
+      }
+      if (!options.silent) this.trigger('sort', this, options);
+      return this;
+    },
+
+    // Pluck an attribute from each model in the collection.
+    pluck: function(attr) {
+      return this.map(attr + '');
+    },
+
+    // Fetch the default set of models for this collection, resetting the
+    // collection when they arrive. If `reset: true` is passed, the response
+    // data will be passed through the `reset` method instead of `set`.
+    fetch: function(options) {
+      options = _.extend({parse: true}, options);
+      var success = options.success;
+      var collection = this;
+      options.success = function(resp) {
+        var method = options.reset ? 'reset' : 'set';
+        collection[method](resp, options);
+        if (success) success.call(options.context, collection, resp, options);
+        collection.trigger('sync', collection, resp, options);
+      };
+      wrapError(this, options);
+      return this.sync('read', this, options);
+    },
+
+    // Create a new instance of a model in this collection. Add the model to the
+    // collection immediately, unless `wait: true` is passed, in which case we
+    // wait for the server to agree.
+    create: function(model, options) {
+      options = options ? _.clone(options) : {};
+      var wait = options.wait;
+      model = this._prepareModel(model, options);
+      if (!model) return false;
+      if (!wait) this.add(model, options);
+      var collection = this;
+      var success = options.success;
+      options.success = function(m, resp, callbackOpts) {
+        if (wait) collection.add(m, callbackOpts);
+        if (success) success.call(callbackOpts.context, m, resp, callbackOpts);
+      };
+      model.save(null, options);
+      return model;
+    },
+
+    // **parse** converts a response into a list of models to be added to the
+    // collection. The default implementation is just to pass it through.
+    parse: function(resp, options) {
+      return resp;
+    },
+
+    // Create a new collection with an identical list of models as this one.
+    clone: function() {
+      return new this.constructor(this.models, {
+        model: this.model,
+        comparator: this.comparator
+      });
+    },
+
+    // Define how to uniquely identify models in the collection.
+    modelId: function(attrs) {
+      return attrs[this.model.prototype.idAttribute || 'id'];
+    },
+
+    // Private method to reset all internal state. Called when the collection
+    // is first initialized or reset.
+    _reset: function() {
+      this.length = 0;
+      this.models = [];
+      this._byId  = {};
+    },
+
+    // Prepare a hash of attributes (or other model) to be added to this
+    // collection.
+    _prepareModel: function(attrs, options) {
+      if (this._isModel(attrs)) {
+        if (!attrs.collection) attrs.collection = this;
+        return attrs;
+      }
+      options = options ? _.clone(options) : {};
+      options.collection = this;
+      var model = new this.model(attrs, options);
+      if (!model.validationError) return model;
+      this.trigger('invalid', this, model.validationError, options);
+      return false;
+    },
+
+    // Internal method called by both remove and set.
+    _removeModels: function(models, options) {
+      var removed = [];
+      for (var i = 0; i < models.length; i++) {
+        var model = this.get(models[i]);
+        if (!model) continue;
+
+        var index = this.indexOf(model);
+        this.models.splice(index, 1);
+        this.length--;
+
+        // Remove references before triggering 'remove' event to prevent an
+        // infinite loop. #3693
+        delete this._byId[model.cid];
+        var id = this.modelId(model.attributes);
+        if (id != null) delete this._byId[id];
+
+        if (!options.silent) {
+          options.index = index;
+          model.trigger('remove', model, this, options);
+        }
+
+        removed.push(model);
+        this._removeReference(model, options);
+      }
+      return removed;
+    },
+
+    // Method for checking whether an object should be considered a model for
+    // the purposes of adding to the collection.
+    _isModel: function(model) {
+      return model instanceof Model;
+    },
+
+    // Internal method to create a model's ties to a collection.
+    _addReference: function(model, options) {
+      this._byId[model.cid] = model;
+      var id = this.modelId(model.attributes);
+      if (id != null) this._byId[id] = model;
+      model.on('all', this._onModelEvent, this);
+    },
+
+    // Internal method to sever a model's ties to a collection.
+    _removeReference: function(model, options) {
+      delete this._byId[model.cid];
+      var id = this.modelId(model.attributes);
+      if (id != null) delete this._byId[id];
+      if (this === model.collection) delete model.collection;
+      model.off('all', this._onModelEvent, this);
+    },
+
+    // Internal method called every time a model in the set fires an event.
+    // Sets need to update their indexes when models change ids. All other
+    // events simply proxy through. "add" and "remove" events that originate
+    // in other collections are ignored.
+    _onModelEvent: function(event, model, collection, options) {
+      if (model) {
+        if ((event === 'add' || event === 'remove') && collection !== this) return;
+        if (event === 'destroy') this.remove(model, options);
+        if (event === 'change') {
+          var prevId = this.modelId(model.previousAttributes());
+          var id = this.modelId(model.attributes);
+          if (prevId !== id) {
+            if (prevId != null) delete this._byId[prevId];
+            if (id != null) this._byId[id] = model;
+          }
+        }
+      }
+      this.trigger.apply(this, arguments);
+    }
+
+  });
+
+  // Underscore methods that we want to implement on the Collection.
+  // 90% of the core usefulness of Backbone Collections is actually implemented
+  // right here:
+  var collectionMethods = {forEach: 3, each: 3, map: 3, collect: 3, reduce: 0,
+      foldl: 0, inject: 0, reduceRight: 0, foldr: 0, find: 3, detect: 3, filter: 3,
+      select: 3, reject: 3, every: 3, all: 3, some: 3, any: 3, include: 3, includes: 3,
+      contains: 3, invoke: 0, max: 3, min: 3, toArray: 1, size: 1, first: 3,
+      head: 3, take: 3, initial: 3, rest: 3, tail: 3, drop: 3, last: 3,
+      without: 0, difference: 0, indexOf: 3, shuffle: 1, lastIndexOf: 3,
+      isEmpty: 1, chain: 1, sample: 3, partition: 3, groupBy: 3, countBy: 3,
+      sortBy: 3, indexBy: 3, findIndex: 3, findLastIndex: 3};
+
+  // Mix in each Underscore method as a proxy to `Collection#models`.
+  addUnderscoreMethods(Collection, collectionMethods, 'models');
+
+  // Backbone.View
+  // -------------
+
+  // Backbone Views are almost more convention than they are actual code. A View
+  // is simply a JavaScript object that represents a logical chunk of UI in the
+  // DOM. This might be a single item, an entire list, a sidebar or panel, or
+  // even the surrounding frame which wraps your whole app. Defining a chunk of
+  // UI as a **View** allows you to define your DOM events declaratively, without
+  // having to worry about render order ... and makes it easy for the view to
+  // react to specific changes in the state of your models.
+
+  // Creating a Backbone.View creates its initial element outside of the DOM,
+  // if an existing element is not provided...
+  var View = Backbone.View = function(options) {
+    this.cid = _.uniqueId('view');
+    _.extend(this, _.pick(options, viewOptions));
+    this._ensureElement();
+    this.initialize.apply(this, arguments);
+  };
+
+  // Cached regex to split keys for `delegate`.
+  var delegateEventSplitter = /^(\S+)\s*(.*)$/;
+
+  // List of view options to be set as properties.
+  var viewOptions = ['model', 'collection', 'el', 'id', 'attributes', 'className', 'tagName', 'events'];
+
+  // Set up all inheritable **Backbone.View** properties and methods.
+  _.extend(View.prototype, Events, {
+
+    // The default `tagName` of a View's element is `"div"`.
+    tagName: 'div',
+
+    // jQuery delegate for element lookup, scoped to DOM elements within the
+    // current view. This should be preferred to global lookups where possible.
+    $: function(selector) {
+      return this.$el.find(selector);
+    },
+
+    // Initialize is an empty function by default. Override it with your own
+    // initialization logic.
+    initialize: function(){},
+
+    // **render** is the core function that your view should override, in order
+    // to populate its element (`this.el`), with the appropriate HTML. The
+    // convention is for **render** to always return `this`.
+    render: function() {
+      return this;
+    },
+
+    // Remove this view by taking the element out of the DOM, and removing any
+    // applicable Backbone.Events listeners.
+    remove: function() {
+      this._removeElement();
+      this.stopListening();
+      return this;
+    },
+
+    // Remove this view's element from the document and all event listeners
+    // attached to it. Exposed for subclasses using an alternative DOM
+    // manipulation API.
+    _removeElement: function() {
+      this.$el.remove();
+    },
+
+    // Change the view's element (`this.el` property) and re-delegate the
+    // view's events on the new element.
+    setElement: function(element) {
+      this.undelegateEvents();
+      this._setElement(element);
+      this.delegateEvents();
+      return this;
+    },
+
+    // Creates the `this.el` and `this.$el` references for this view using the
+    // given `el`. `el` can be a CSS selector or an HTML string, a jQuery
+    // context or an element. Subclasses can override this to utilize an
+    // alternative DOM manipulation API and are only required to set the
+    // `this.el` property.
+    _setElement: function(el) {
+      this.$el = el instanceof Backbone.$ ? el : Backbone.$(el);
+      this.el = this.$el[0];
+    },
+
+    // Set callbacks, where `this.events` is a hash of
+    //
+    // *{"event selector": "callback"}*
+    //
+    //     {
+    //       'mousedown .title':  'edit',
+    //       'click .button':     'save',
+    //       'click .open':       function(e) { ... }
+    //     }
+    //
+    // pairs. Callbacks will be bound to the view, with `this` set properly.
+    // Uses event delegation for efficiency.
+    // Omitting the selector binds the event to `this.el`.
+    delegateEvents: function(events) {
+      events || (events = _.result(this, 'events'));
+      if (!events) return this;
+      this.undelegateEvents();
+      for (var key in events) {
+        var method = events[key];
+        if (!_.isFunction(method)) method = this[method];
+        if (!method) continue;
+        var match = key.match(delegateEventSplitter);
+        this.delegate(match[1], match[2], _.bind(method, this));
+      }
+      return this;
+    },
+
+    // Add a single event listener to the view's element (or a child element
+    // using `selector`). This only works for delegate-able events: not `focus`,
+    // `blur`, and not `change`, `submit`, and `reset` in Internet Explorer.
+    delegate: function(eventName, selector, listener) {
+      this.$el.on(eventName + '.delegateEvents' + this.cid, selector, listener);
+      return this;
+    },
+
+    // Clears all callbacks previously bound to the view by `delegateEvents`.
+    // You usually don't need to use this, but may wish to if you have multiple
+    // Backbone views attached to the same DOM element.
+    undelegateEvents: function() {
+      if (this.$el) this.$el.off('.delegateEvents' + this.cid);
+      return this;
+    },
+
+    // A finer-grained `undelegateEvents` for removing a single delegated event.
+    // `selector` and `listener` are both optional.
+    undelegate: function(eventName, selector, listener) {
+      this.$el.off(eventName + '.delegateEvents' + this.cid, selector, listener);
+      return this;
+    },
+
+    // Produces a DOM element to be assigned to your view. Exposed for
+    // subclasses using an alternative DOM manipulation API.
+    _createElement: function(tagName) {
+      return document.createElement(tagName);
+    },
+
+    // Ensure that the View has a DOM element to render into.
+    // If `this.el` is a string, pass it through `$()`, take the first
+    // matching element, and re-assign it to `el`. Otherwise, create
+    // an element from the `id`, `className` and `tagName` properties.
+    _ensureElement: function() {
+      if (!this.el) {
+        var attrs = _.extend({}, _.result(this, 'attributes'));
+        if (this.id) attrs.id = _.result(this, 'id');
+        if (this.className) attrs['class'] = _.result(this, 'className');
+        this.setElement(this._createElement(_.result(this, 'tagName')));
+        this._setAttributes(attrs);
+      } else {
+        this.setElement(_.result(this, 'el'));
+      }
+    },
+
+    // Set attributes from a hash on this view's element.  Exposed for
+    // subclasses using an alternative DOM manipulation API.
+    _setAttributes: function(attributes) {
+      this.$el.attr(attributes);
+    }
+
+  });
+
+  // Backbone.sync
+  // -------------
+
+  // Override this function to change the manner in which Backbone persists
+  // models to the server. You will be passed the type of request, and the
+  // model in question. By default, makes a RESTful Ajax request
+  // to the model's `url()`. Some possible customizations could be:
+  //
+  // * Use `setTimeout` to batch rapid-fire updates into a single request.
+  // * Send up the models as XML instead of JSON.
+  // * Persist models via WebSockets instead of Ajax.
+  //
+  // Turn on `Backbone.emulateHTTP` in order to send `PUT` and `DELETE` requests
+  // as `POST`, with a `_method` parameter containing the true HTTP method,
+  // as well as all requests with the body as `application/x-www-form-urlencoded`
+  // instead of `application/json` with the model in a param named `model`.
+  // Useful when interfacing with server-side languages like **PHP** that make
+  // it difficult to read the body of `PUT` requests.
+  Backbone.sync = function(method, model, options) {
+    var type = methodMap[method];
+
+    // Default options, unless specified.
+    _.defaults(options || (options = {}), {
+      emulateHTTP: Backbone.emulateHTTP,
+      emulateJSON: Backbone.emulateJSON
+    });
+
+    // Default JSON-request options.
+    var params = {type: type, dataType: 'json'};
+
+    // Ensure that we have a URL.
+    if (!options.url) {
+      params.url = _.result(model, 'url') || urlError();
+    }
+
+    // Ensure that we have the appropriate request data.
+    if (options.data == null && model && (method === 'create' || method === 'update' || method === 'patch')) {
+      params.contentType = 'application/json';
+      params.data = JSON.stringify(options.attrs || model.toJSON(options));
+    }
+
+    // For older servers, emulate JSON by encoding the request into an HTML-form.
+    if (options.emulateJSON) {
+      params.contentType = 'application/x-www-form-urlencoded';
+      params.data = params.data ? {model: params.data} : {};
+    }
+
+    // For older servers, emulate HTTP by mimicking the HTTP method with `_method`
+    // And an `X-HTTP-Method-Override` header.
+    if (options.emulateHTTP && (type === 'PUT' || type === 'DELETE' || type === 'PATCH')) {
+      params.type = 'POST';
+      if (options.emulateJSON) params.data._method = type;
+      var beforeSend = options.beforeSend;
+      options.beforeSend = function(xhr) {
+        xhr.setRequestHeader('X-HTTP-Method-Override', type);
+        if (beforeSend) return beforeSend.apply(this, arguments);
+      };
+    }
+
+    // Don't process data on a non-GET request.
+    if (params.type !== 'GET' && !options.emulateJSON) {
+      params.processData = false;
+    }
+
+    // Pass along `textStatus` and `errorThrown` from jQuery.
+    var error = options.error;
+    options.error = function(xhr, textStatus, errorThrown) {
+      options.textStatus = textStatus;
+      options.errorThrown = errorThrown;
+      if (error) error.call(options.context, xhr, textStatus, errorThrown);
+    };
+
+    // Make the request, allowing the user to override any Ajax options.
+    var xhr = options.xhr = Backbone.ajax(_.extend(params, options));
+    model.trigger('request', model, xhr, options);
+    return xhr;
+  };
+
+  // Map from CRUD to HTTP for our default `Backbone.sync` implementation.
+  var methodMap = {
+    'create': 'POST',
+    'update': 'PUT',
+    'patch': 'PATCH',
+    'delete': 'DELETE',
+    'read': 'GET'
+  };
+
+  // Set the default implementation of `Backbone.ajax` to proxy through to `$`.
+  // Override this if you'd like to use a different library.
+  Backbone.ajax = function() {
+    return Backbone.$.ajax.apply(Backbone.$, arguments);
+  };
+
+  // Backbone.Router
+  // ---------------
+
+  // Routers map faux-URLs to actions, and fire events when routes are
+  // matched. Creating a new one sets its `routes` hash, if not set statically.
+  var Router = Backbone.Router = function(options) {
+    options || (options = {});
+    if (options.routes) this.routes = options.routes;
+    this._bindRoutes();
+    this.initialize.apply(this, arguments);
+  };
+
+  // Cached regular expressions for matching named param parts and splatted
+  // parts of route strings.
+  var optionalParam = /\((.*?)\)/g;
+  var namedParam    = /(\(\?)?:\w+/g;
+  var splatParam    = /\*\w+/g;
+  var escapeRegExp  = /[\-{}\[\]+?.,\\\^$|#\s]/g;
+
+  // Set up all inheritable **Backbone.Router** properties and methods.
+  _.extend(Router.prototype, Events, {
+
+    // Initialize is an empty function by default. Override it with your own
+    // initialization logic.
+    initialize: function(){},
+
+    // Manually bind a single named route to a callback. For example:
+    //
+    //     this.route('search/:query/p:num', 'search', function(query, num) {
+    //       ...
+    //     });
+    //
+    route: function(route, name, callback) {
+      if (!_.isRegExp(route)) route = this._routeToRegExp(route);
+      if (_.isFunction(name)) {
+        callback = name;
+        name = '';
+      }
+      if (!callback) callback = this[name];
+      var router = this;
+      Backbone.history.route(route, function(fragment) {
+        var args = router._extractParameters(route, fragment);
+        if (router.execute(callback, args, name) !== false) {
+          router.trigger.apply(router, ['route:' + name].concat(args));
+          router.trigger('route', name, args);
+          Backbone.history.trigger('route', router, name, args);
+        }
+      });
+      return this;
+    },
+
+    // Execute a route handler with the provided parameters.  This is an
+    // excellent place to do pre-route setup or post-route cleanup.
+    execute: function(callback, args, name) {
+      if (callback) callback.apply(this, args);
+    },
+
+    // Simple proxy to `Backbone.history` to save a fragment into the history.
+    navigate: function(fragment, options) {
+      Backbone.history.navigate(fragment, options);
+      return this;
+    },
+
+    // Bind all defined routes to `Backbone.history`. We have to reverse the
+    // order of the routes here to support behavior where the most general
+    // routes can be defined at the bottom of the route map.
+    _bindRoutes: function() {
+      if (!this.routes) return;
+      this.routes = _.result(this, 'routes');
+      var route, routes = _.keys(this.routes);
+      while ((route = routes.pop()) != null) {
+        this.route(route, this.routes[route]);
+      }
+    },
+
+    // Convert a route string into a regular expression, suitable for matching
+    // against the current location hash.
+    _routeToRegExp: function(route) {
+      route = route.replace(escapeRegExp, '\\$&')
+                   .replace(optionalParam, '(?:$1)?')
+                   .replace(namedParam, function(match, optional) {
+                     return optional ? match : '([^/?]+)';
+                   })
+                   .replace(splatParam, '([^?]*?)');
+      return new RegExp('^' + route + '(?:\\?([\\s\\S]*))?$');
+    },
+
+    // Given a route, and a URL fragment that it matches, return the array of
+    // extracted decoded parameters. Empty or unmatched parameters will be
+    // treated as `null` to normalize cross-browser behavior.
+    _extractParameters: function(route, fragment) {
+      var params = route.exec(fragment).slice(1);
+      return _.map(params, function(param, i) {
+        // Don't decode the search params.
+        if (i === params.length - 1) return param || null;
+        return param ? decodeURIComponent(param) : null;
+      });
+    }
+
+  });
+
+  // Backbone.History
+  // ----------------
+
+  // Handles cross-browser history management, based on either
+  // [pushState](http://diveintohtml5.info/history.html) and real URLs, or
+  // [onhashchange](https://developer.mozilla.org/en-US/docs/DOM/window.onhashchange)
+  // and URL fragments. If the browser supports neither (old IE, natch),
+  // falls back to polling.
+  var History = Backbone.History = function() {
+    this.handlers = [];
+    this.checkUrl = _.bind(this.checkUrl, this);
+
+    // Ensure that `History` can be used outside of the browser.
+    if (typeof window !== 'undefined') {
+      this.location = window.location;
+      this.history = window.history;
+    }
+  };
+
+  // Cached regex for stripping a leading hash/slash and trailing space.
+  var routeStripper = /^[#\/]|\s+$/g;
+
+  // Cached regex for stripping leading and trailing slashes.
+  var rootStripper = /^\/+|\/+$/g;
+
+  // Cached regex for stripping urls of hash.
+  var pathStripper = /#.*$/;
+
+  // Has the history handling already been started?
+  History.started = false;
+
+  // Set up all inheritable **Backbone.History** properties and methods.
+  _.extend(History.prototype, Events, {
+
+    // The default interval to poll for hash changes, if necessary, is
+    // twenty times a second.
+    interval: 50,
+
+    // Are we at the app root?
+    atRoot: function() {
+      var path = this.location.pathname.replace(/[^\/]$/, '$&/');
+      return path === this.root && !this.getSearch();
+    },
+
+    // Does the pathname match the root?
+    matchRoot: function() {
+      var path = this.decodeFragment(this.location.pathname);
+      var rootPath = path.slice(0, this.root.length - 1) + '/';
+      return rootPath === this.root;
+    },
+
+    // Unicode characters in `location.pathname` are percent encoded so they're
+    // decoded for comparison. `%25` should not be decoded since it may be part
+    // of an encoded parameter.
+    decodeFragment: function(fragment) {
+      return decodeURI(fragment.replace(/%25/g, '%2525'));
+    },
+
+    // In IE6, the hash fragment and search params are incorrect if the
+    // fragment contains `?`.
+    getSearch: function() {
+      var match = this.location.href.replace(/#.*/, '').match(/\?.+/);
+      return match ? match[0] : '';
+    },
+
+    // Gets the true hash value. Cannot use location.hash directly due to bug
+    // in Firefox where location.hash will always be decoded.
+    getHash: function(window) {
+      var match = (window || this).location.href.match(/#(.*)$/);
+      return match ? match[1] : '';
+    },
+
+    // Get the pathname and search params, without the root.
+    getPath: function() {
+      var path = this.decodeFragment(
+        this.location.pathname + this.getSearch()
+      ).slice(this.root.length - 1);
+      return path.charAt(0) === '/' ? path.slice(1) : path;
+    },
+
+    // Get the cross-browser normalized URL fragment from the path or hash.
+    getFragment: function(fragment) {
+      if (fragment == null) {
+        if (this._usePushState || !this._wantsHashChange) {
+          fragment = this.getPath();
+        } else {
+          fragment = this.getHash();
+        }
+      }
+      return fragment.replace(routeStripper, '');
+    },
+
+    // Start the hash change handling, returning `true` if the current URL matches
+    // an existing route, and `false` otherwise.
+    start: function(options) {
+      if (History.started) throw new Error('Backbone.history has already been started');
+      History.started = true;
+
+      // Figure out the initial configuration. Do we need an iframe?
+      // Is pushState desired ... is it available?
+      this.options          = _.extend({root: '/'}, this.options, options);
+      this.root             = this.options.root;
+      this._wantsHashChange = this.options.hashChange !== false;
+      this._hasHashChange   = 'onhashchange' in window && (document.documentMode === void 0 || document.documentMode > 7);
+      this._useHashChange   = this._wantsHashChange && this._hasHashChange;
+      this._wantsPushState  = !!this.options.pushState;
+      this._hasPushState    = !!(this.history && this.history.pushState);
+      this._usePushState    = this._wantsPushState && this._hasPushState;
+      this.fragment         = this.getFragment();
+
+      // Normalize root to always include a leading and trailing slash.
+      this.root = ('/' + this.root + '/').replace(rootStripper, '/');
+
+      // Transition from hashChange to pushState or vice versa if both are
+      // requested.
+      if (this._wantsHashChange && this._wantsPushState) {
+
+        // If we've started off with a route from a `pushState`-enabled
+        // browser, but we're currently in a browser that doesn't support it...
+        if (!this._hasPushState && !this.atRoot()) {
+          var rootPath = this.root.slice(0, -1) || '/';
+          this.location.replace(rootPath + '#' + this.getPath());
+          // Return immediately as browser will do redirect to new url
+          return true;
+
+        // Or if we've started out with a hash-based route, but we're currently
+        // in a browser where it could be `pushState`-based instead...
+        } else if (this._hasPushState && this.atRoot()) {
+          this.navigate(this.getHash(), {replace: true});
+        }
+
+      }
+
+      // Proxy an iframe to handle location events if the browser doesn't
+      // support the `hashchange` event, HTML5 history, or the user wants
+      // `hashChange` but not `pushState`.
+      if (!this._hasHashChange && this._wantsHashChange && !this._usePushState) {
+        this.iframe = document.createElement('iframe');
+        this.iframe.src = 'javascript:0';
+        this.iframe.style.display = 'none';
+        this.iframe.tabIndex = -1;
+        var body = document.body;
+        // Using `appendChild` will throw on IE < 9 if the document is not ready.
+        var iWindow = body.insertBefore(this.iframe, body.firstChild).contentWindow;
+        iWindow.document.open();
+        iWindow.document.close();
+        iWindow.location.hash = '#' + this.fragment;
+      }
+
+      // Add a cross-platform `addEventListener` shim for older browsers.
+      var addEventListener = window.addEventListener || function(eventName, listener) {
+        return attachEvent('on' + eventName, listener);
+      };
+
+      // Depending on whether we're using pushState or hashes, and whether
+      // 'onhashchange' is supported, determine how we check the URL state.
+      if (this._usePushState) {
+        addEventListener('popstate', this.checkUrl, false);
+      } else if (this._useHashChange && !this.iframe) {
+        addEventListener('hashchange', this.checkUrl, false);
+      } else if (this._wantsHashChange) {
+        this._checkUrlInterval = setInterval(this.checkUrl, this.interval);
+      }
+
+      if (!this.options.silent) return this.loadUrl();
+    },
+
+    // Disable Backbone.history, perhaps temporarily. Not useful in a real app,
+    // but possibly useful for unit testing Routers.
+    stop: function() {
+      // Add a cross-platform `removeEventListener` shim for older browsers.
+      var removeEventListener = window.removeEventListener || function(eventName, listener) {
+        return detachEvent('on' + eventName, listener);
+      };
+
+      // Remove window listeners.
+      if (this._usePushState) {
+        removeEventListener('popstate', this.checkUrl, false);
+      } else if (this._useHashChange && !this.iframe) {
+        removeEventListener('hashchange', this.checkUrl, false);
+      }
+
+      // Clean up the iframe if necessary.
+      if (this.iframe) {
+        document.body.removeChild(this.iframe);
+        this.iframe = null;
+      }
+
+      // Some environments will throw when clearing an undefined interval.
+      if (this._checkUrlInterval) clearInterval(this._checkUrlInterval);
+      History.started = false;
+    },
+
+    // Add a route to be tested when the fragment changes. Routes added later
+    // may override previous routes.
+    route: function(route, callback) {
+      this.handlers.unshift({route: route, callback: callback});
+    },
+
+    // Checks the current URL to see if it has changed, and if it has,
+    // calls `loadUrl`, normalizing across the hidden iframe.
+    checkUrl: function(e) {
+      var current = this.getFragment();
+
+      // If the user pressed the back button, the iframe's hash will have
+      // changed and we should use that for comparison.
+      if (current === this.fragment && this.iframe) {
+        current = this.getHash(this.iframe.contentWindow);
+      }
+
+      if (current === this.fragment) return false;
+      if (this.iframe) this.navigate(current);
+      this.loadUrl();
+    },
+
+    // Attempt to load the current URL fragment. If a route succeeds with a
+    // match, returns `true`. If no defined routes matches the fragment,
+    // returns `false`.
+    loadUrl: function(fragment) {
+      // If the root doesn't match, no routes can match either.
+      if (!this.matchRoot()) return false;
+      fragment = this.fragment = this.getFragment(fragment);
+      return _.some(this.handlers, function(handler) {
+        if (handler.route.test(fragment)) {
+          handler.callback(fragment);
+          return true;
+        }
+      });
+    },
+
+    // Save a fragment into the hash history, or replace the URL state if the
+    // 'replace' option is passed. You are responsible for properly URL-encoding
+    // the fragment in advance.
+    //
+    // The options object can contain `trigger: true` if you wish to have the
+    // route callback be fired (not usually desirable), or `replace: true`, if
+    // you wish to modify the current URL without adding an entry to the history.
+    navigate: function(fragment, options) {
+      if (!History.started) return false;
+      if (!options || options === true) options = {trigger: !!options};
+
+      // Normalize the fragment.
+      fragment = this.getFragment(fragment || '');
+
+      // Don't include a trailing slash on the root.
+      var rootPath = this.root;
+      if (fragment === '' || fragment.charAt(0) === '?') {
+        rootPath = rootPath.slice(0, -1) || '/';
+      }
+      var url = rootPath + fragment;
+
+      // Strip the hash and decode for matching.
+      fragment = this.decodeFragment(fragment.replace(pathStripper, ''));
+
+      if (this.fragment === fragment) return;
+      this.fragment = fragment;
+
+      // If pushState is available, we use it to set the fragment as a real URL.
+      if (this._usePushState) {
+        this.history[options.replace ? 'replaceState' : 'pushState']({}, document.title, url);
+
+      // If hash changes haven't been explicitly disabled, update the hash
+      // fragment to store history.
+      } else if (this._wantsHashChange) {
+        this._updateHash(this.location, fragment, options.replace);
+        if (this.iframe && fragment !== this.getHash(this.iframe.contentWindow)) {
+          var iWindow = this.iframe.contentWindow;
+
+          // Opening and closing the iframe tricks IE7 and earlier to push a
+          // history entry on hash-tag change.  When replace is true, we don't
+          // want this.
+          if (!options.replace) {
+            iWindow.document.open();
+            iWindow.document.close();
+          }
+
+          this._updateHash(iWindow.location, fragment, options.replace);
+        }
+
+      // If you've told us that you explicitly don't want fallback hashchange-
+      // based history, then `navigate` becomes a page refresh.
+      } else {
+        return this.location.assign(url);
+      }
+      if (options.trigger) return this.loadUrl(fragment);
+    },
+
+    // Update the hash location, either replacing the current entry, or adding
+    // a new one to the browser history.
+    _updateHash: function(location, fragment, replace) {
+      if (replace) {
+        var href = location.href.replace(/(javascript:|#).*$/, '');
+        location.replace(href + '#' + fragment);
+      } else {
+        // Some browsers require that `hash` contains a leading #.
+        location.hash = '#' + fragment;
+      }
+    }
+
+  });
+
+  // Create the default Backbone.history.
+  Backbone.history = new History;
+
+  // Helpers
+  // -------
+
+  // Helper function to correctly set up the prototype chain for subclasses.
+  // Similar to `goog.inherits`, but uses a hash of prototype properties and
+  // class properties to be extended.
+  var extend = function(protoProps, staticProps) {
+    var parent = this;
+    var child;
+
+    // The constructor function for the new subclass is either defined by you
+    // (the "constructor" property in your `extend` definition), or defaulted
+    // by us to simply call the parent constructor.
+    if (protoProps && _.has(protoProps, 'constructor')) {
+      child = protoProps.constructor;
+    } else {
+      child = function(){ return parent.apply(this, arguments); };
+    }
+
+    // Add static properties to the constructor function, if supplied.
+    _.extend(child, parent, staticProps);
+
+    // Set the prototype chain to inherit from `parent`, without calling
+    // `parent`'s constructor function and add the prototype properties.
+    child.prototype = _.create(parent.prototype, protoProps);
+    child.prototype.constructor = child;
+
+    // Set a convenience property in case the parent's prototype is needed
+    // later.
+    child.__super__ = parent.prototype;
+
+    return child;
+  };
+
+  // Set up inheritance for the model, collection, router, view and history.
+  Model.extend = Collection.extend = Router.extend = View.extend = History.extend = extend;
+
+  // Throw an error when a URL is needed, and none is supplied.
+  var urlError = function() {
+    throw new Error('A "url" property or function must be specified');
+  };
+
+  // Wrap an optional error callback with a fallback error event.
+  var wrapError = function(model, options) {
+    var error = options.error;
+    options.error = function(resp) {
+      if (error) error.call(options.context, model, resp, options);
+      model.trigger('error', model, resp, options);
+    };
+  };
+
+  return Backbone;
+});
diff --git a/client/galaxy/scripts/libs/bbi/bigwig.js b/client/galaxy/scripts/libs/bbi/bigwig.js
new file mode 100644
index 0000000..81946cb
--- /dev/null
+++ b/client/galaxy/scripts/libs/bbi/bigwig.js
@@ -0,0 +1,1190 @@
+//
+// Author: Jeremy Goecks
+//
+// Modified from:
+//
+// Dalliance Genome Explorer
+// (c) Thomas Down 2006-2010
+//
+// bigwig.js: indexed binary WIG (and BED) files
+//
+
+// Requirements:
+//  * jquery and ajax-native plugin for reading binary data, jquery for promises
+//  * spans for working working with genomic intervals
+//  * jszlib for decompression.
+define(["libs/bbi/spans", "libs/bbi/jszlib", "libs/bbi/jquery-ajax-native"], function(spans, jszlib) {
+    "use strict";
+
+    // -- Copied from das.js --
+
+    function DASFeature() {
+    }
+
+    function DASGroup(id) {
+        if (id)
+        this.id = id;
+    }
+
+    // -- End copy --
+
+    // -- Copied from bin.js --
+
+    function readInt(ba, offset) {
+        return (ba[offset + 3] << 24) | (ba[offset + 2] << 16) | (ba[offset + 1] << 8) | (ba[offset]);
+    }
+
+    // -- End copy --
+
+    // Some globals.
+    var Range = spans.Range;
+    var union = spans.union;
+    var intersection = spans.intersection;
+
+    var jszlib_inflate_buffer = jszlib.inflateBuffer;
+    var arrayCopy = jszlib.arrayCopy;
+
+    var BIG_WIG_MAGIC = 0x888FFC26;
+    var BIG_WIG_MAGIC_BE = 0x26FC8F88;
+    var BIG_BED_MAGIC = 0x8789F2EB;
+    var BIG_BED_MAGIC_BE = 0xEBF28987;
+
+
+    var BIG_WIG_TYPE_GRAPH = 1;
+    var BIG_WIG_TYPE_VSTEP = 2;
+    var BIG_WIG_TYPE_FSTEP = 3;
+
+    var M1 = 256;
+    var M2 = 256*256;
+    var M3 = 256*256*256;
+    var M4 = 256*256*256*256;
+
+    var BED_COLOR_REGEXP = new RegExp("^[0-9]+,[0-9]+,[0-9]+");
+
+    /**
+     * Read binary data from a URL using HTTP Range header. Requires jQuery and ajax-native plugin.
+     */
+    function read(url, start, size) {
+        // Taken from bin.js:
+        // This may be necessary for Safari:
+        //   if ((isSafari || this.opts.salt) && url.indexOf('?') < 0) {
+        //       url = url + '?salt=' + b64_sha1('' + Date.now() + ',' + (++seed));
+        //   }
+
+        var chunkSizeLimit = Math.pow(10, 6); // 1 MB
+        if(size > chunkSizeLimit) {
+            // TODO: raise error.
+        }
+
+        // Read data from remote file.
+        return $.ajax({
+            type: 'GET',
+            dataType: 'native',
+            url: url,
+            // Potential timeout on first request to catch mixed-content errors on Chromium.
+            timeout: 5000,
+            beforeSend: function(xhrObj) {
+                // (size - 1) because range is inclusive.
+                xhrObj.setRequestHeader("Range", "bytes=" + start + "-" + (start + (size - 1)));
+            },
+            xhrFields: {
+                responseType: 'arraybuffer'
+            }
+        });
+    }
+
+    function bwg_readOffset(ba, o) {
+        var offset = ba[o] + ba[o+1]*M1 + ba[o+2]*M2 + ba[o+3]*M3 + ba[o+4]*M4;
+        return offset;
+    }
+
+    function BigWig() {
+    }
+
+    /**
+     * Read the chromosome B+ tree header.
+     */
+    BigWig.prototype.readChromTree = function() {
+        var thisB = this;
+        this.chromsToIDs = {};
+        this.idsToChroms = {};
+        this.maxID = 0;
+
+        var udo = this.unzoomedDataOffset;
+        var eb = (udo - this.chromTreeOffset) & 3;
+        udo = udo + 4 - eb;
+
+        // Read and parse the chrom tree, return the promise so that subsequent actions can be taken.
+        return $.when(read(this.url, this.chromTreeOffset, udo - this.chromTreeOffset)).then(function(bpt) {
+            var ba = new Uint8Array(bpt);
+            var sa = new Int16Array(bpt);
+            var la = new Int32Array(bpt);
+            var bptMagic = la[0];
+            var blockSize = la[1];
+            var keySize = la[2];
+            var valSize = la[3];
+            var itemCount = bwg_readOffset(ba, 16);
+            var rootNodeOffset = 32;
+
+            var bptReadNode = function(offset) {
+                var nodeType = ba[offset];
+                var cnt = sa[(offset/2) + 1];
+                offset += 4;
+                for (var n = 0; n < cnt; ++n) {
+                    if (nodeType === 0) {
+                        offset += keySize;
+                        var childOffset = bwg_readOffset(ba, offset);
+                        offset += 8;
+                        childOffset -= thisB.chromTreeOffset;
+                        bptReadNode(childOffset);
+                    } else {
+                        var key = '';
+                        for (var ki = 0; ki < keySize; ++ki) {
+                            var charCode = ba[offset++];
+                            if (charCode !== 0) {
+                                key += String.fromCharCode(charCode);
+                            }
+                        }
+                        var chromId = (ba[offset+3]<<24) | (ba[offset+2]<<16) | (ba[offset+1]<<8) | (ba[offset+0]);
+                        var chromSize = (ba[offset + 7]<<24) | (ba[offset+6]<<16) | (ba[offset+5]<<8) | (ba[offset+4]);
+                        offset += 8;
+
+                        thisB.chromsToIDs[key] = chromId;
+                        if (key.indexOf('chr') === 0) {
+                            thisB.chromsToIDs[key.substr(3)] = chromId;
+                        }
+                        thisB.idsToChroms[chromId] = key;
+                        thisB.maxID = Math.max(thisB.maxID, chromId);
+                    }
+                }
+            };
+            bptReadNode(rootNodeOffset);
+        });
+    }
+
+    function BigWigView(bwg, cirTreeOffset, cirTreeLength, isSummary) {
+        this.bwg = bwg;
+        this.cirTreeOffset = cirTreeOffset;
+        this.cirTreeLength = cirTreeLength;
+        this.isSummary = isSummary;
+    }
+
+
+
+    BigWigView.prototype.readWigData = function(chrName, min, max) {
+        var chr = this.bwg.chromsToIDs[chrName],
+            rval;
+        if (chr === undefined) {
+            // Not an error because some .bwgs won't have data for all chromosomes.
+            rval = [];
+        } else {
+            rval = this.readWigDataById(chr, min, max);
+        }
+
+        return rval;
+    };
+
+    BigWigView.prototype.readWigDataById = function(chr, min, max) {
+        var thisB = this,
+            promise = $.Deferred();
+
+        // Read the R-tree index header and then read data again.
+        if (!this.cirHeader) {
+            $.when(read(thisB.bwg.url, this.cirTreeOffset, 48)).then(function(result) {
+                thisB.cirHeader = result;
+                var la = new Int32Array(thisB.cirHeader);
+                thisB.cirBlockSize = la[1];
+                $.when(thisB.readWigDataById(chr, min, max)).then(function(result) {
+                    promise.resolve(result);
+                });
+            });
+            return promise;
+        }
+
+        var blocksToFetch = [];
+        var outstanding = 0;
+
+        var beforeBWG = Date.now();
+
+        var filter = function(chromId, fmin, fmax, toks) {
+            return ((chr < 0 || chromId == chr) && fmin <= max && fmax >= min);
+        };
+
+        var cirFobRecur = function(offset, level) {
+            if (thisB.bwg.instrument)
+            console.log('level=' + level + '; offset=' + offset + '; time=' + (Date.now()|0));
+
+            outstanding += offset.length;
+
+            if (offset.length == 1 && offset[0] - thisB.cirTreeOffset == 48 && thisB.cachedCirRoot) {
+                cirFobRecur2(thisB.cachedCirRoot, 0, level);
+                --outstanding;
+                if (outstanding === 0) {
+                    $.when(thisB.fetchFeatures(filter, blocksToFetch)).then(function(result) {
+                        promise.resolve(result);
+                    });
+                }
+                return;
+            }
+
+            var maxCirBlockSpan = 4 +  (thisB.cirBlockSize * 32);   // Upper bound on size, based on a completely full leaf node.
+            var spans;
+            for (var i = 0; i < offset.length; ++i) {
+                var blockSpan = new Range(offset[i], offset[i] + maxCirBlockSpan);
+                spans = spans ? union(spans, blockSpan) : blockSpan;
+            }
+
+            var fetchRanges = spans.ranges();
+            for (var r = 0; r < fetchRanges.length; ++r) {
+                var fr = fetchRanges[r];
+                cirFobStartFetch(offset, fr, level);
+            }
+        };
+
+        var cirFobStartFetch = function(offset, fr, level, attempts) {
+            var length = fr.max() - fr.min();
+            $.when(read(thisB.bwg.url, fr.min(), fr.max() - fr.min())).then(function(resultBuffer) {
+                for (var i = 0; i < offset.length; ++i) {
+                    if (fr.contains(offset[i])) {
+                        cirFobRecur2(resultBuffer, offset[i] - fr.min(), level);
+
+                        if (offset[i] - thisB.cirTreeOffset == 48 && offset[i] - fr.min() === 0)
+                            thisB.cachedCirRoot = resultBuffer;
+
+                        --outstanding;
+                        if (outstanding === 0) {
+                            $.when(thisB.fetchFeatures(filter, blocksToFetch)).then(function(result) {
+                                promise.resolve(result);
+                            });
+                        }
+                    }
+                }
+            });
+        };
+
+        var cirFobRecur2 = function(cirBlockData, offset, level) {
+            var ba = new Uint8Array(cirBlockData);
+            var sa = new Int16Array(cirBlockData);
+            var la = new Int32Array(cirBlockData);
+
+            var isLeaf = ba[offset];
+            var cnt = sa[offset/2 + 1];
+            offset += 4;
+
+            if (isLeaf !== 0) {
+                for (var i = 0; i < cnt; ++i) {
+                    var lo = offset/4;
+                    var startChrom = la[lo];
+                    var startBase = la[lo + 1];
+                    var endChrom = la[lo + 2];
+                    var endBase = la[lo + 3];
+                    var blockOffset = bwg_readOffset(ba, offset+16);
+                    var blockSize = bwg_readOffset(ba, offset+24);
+                    if (((chr < 0 || startChrom < chr) || (startChrom == chr && startBase <= max)) &&
+                    ((chr < 0 || endChrom   > chr) || (endChrom == chr && endBase >= min)))
+                    {
+                        blocksToFetch.push({offset: blockOffset, size: blockSize});
+                    }
+                    offset += 32;
+                }
+            } else {
+                var recurOffsets = [];
+                for (var i = 0; i < cnt; ++i) {
+                    var lo = offset/4;
+                    var startChrom = la[lo];
+                    var startBase = la[lo + 1];
+                    var endChrom = la[lo + 2];
+                    var endBase = la[lo + 3];
+                    var blockOffset = bwg_readOffset(ba, offset+16);
+                    if ((chr < 0 || startChrom < chr || (startChrom == chr && startBase <= max)) &&
+                    (chr < 0 || endChrom   > chr || (endChrom == chr && endBase >= min)))
+                    {
+                        recurOffsets.push(blockOffset);
+                    }
+                    offset += 24;
+                }
+                if (recurOffsets.length > 0) {
+                    cirFobRecur(recurOffsets, level + 1);
+                }
+            }
+        };
+
+        cirFobRecur([thisB.cirTreeOffset + 48], 1);
+        return promise;
+    };
+
+    /**
+     * Fetch data for a set of blocks. Returns a promise that resolves to fetched data.
+     */
+    BigWigView.prototype.fetchFeatures = function(filter, blocksToFetch) {
+        var thisB = this,
+            promise = $.Deferred();
+
+        blocksToFetch.sort(function(b0, b1) {
+            return (b0.offset|0) - (b1.offset|0);
+        });
+
+        if (blocksToFetch.length === 0) {
+            return [];
+        } else {
+            var features = [];
+            var createFeature = function(chr, fmin, fmax, opts) {
+                if (!opts) {
+                    opts = {};
+                }
+
+                var f = new DASFeature();
+                f._chromId = chr;
+                f.segment = thisB.bwg.idsToChroms[chr];
+                f.min = fmin;
+                f.max = fmax;
+                f.type = thisB.bwg.type;
+
+                for (var k in opts) {
+                    f[k] = opts[k];
+                }
+
+                features.push(f);
+            };
+
+            // Recursive function to read blocks of data.
+            var tramp = function() {
+                if (blocksToFetch.length === 0) {
+                    var afterBWG = Date.now();
+                    // dlog('BWG fetch took ' + (afterBWG - beforeBWG) + 'ms');
+                    return promise.resolve(features);
+                }
+                else {
+                    var block = blocksToFetch[0];
+                    if (block.data) {
+                        thisB.parseFeatures(block.data, createFeature, filter);
+                        blocksToFetch.splice(0, 1);
+                        tramp();
+                    }
+                    else {
+                        var fetchStart = block.offset;
+                        var fetchSize = block.size;
+                        var bi = 1;
+                        while (bi < blocksToFetch.length && blocksToFetch[bi].offset == (fetchStart + fetchSize)) {
+                            fetchSize += blocksToFetch[bi].size;
+                            ++bi;
+                        }
+
+                        $.when(read(thisB.bwg.url, fetchStart, fetchSize)).then(function(result) {
+                            var offset = 0;
+                            var bi = 0;
+                            while (offset < fetchSize) {
+                                var fb = blocksToFetch[bi];
+
+                                var data;
+                                if (thisB.bwg.uncompressBufSize > 0) {
+                                    data = jszlib_inflate_buffer(result, offset + 2, fb.size - 2);
+                                } else {
+                                    var tmp = new Uint8Array(fb.size);    // FIXME is this really the best we can do?
+                                    arrayCopy(new Uint8Array(result, offset, fb.size), 0, tmp, 0, fb.size);
+                                    data = tmp.buffer;
+                                }
+                                fb.data = data;
+
+                                offset += fb.size;
+                                ++bi;
+                            }
+                            tramp();
+                        });
+                    }
+                }
+            };
+            tramp();
+        }
+
+        return promise;
+    };
+
+    BigWigView.prototype.parseFeatures = function(data, createFeature, filter) {
+        var ba = new Uint8Array(data);
+
+        if (this.isSummary) {
+            var sa = new Int16Array(data);
+            var la = new Int32Array(data);
+            var fa = new Float32Array(data);
+
+            var itemCount = data.byteLength/32;
+            for (var i = 0; i < itemCount; ++i) {
+                var chromId =   la[(i*8)];
+                var start =     la[(i*8)+1];
+                var end =       la[(i*8)+2];
+                var validCnt =  la[(i*8)+3];
+                var minVal    = fa[(i*8)+4];
+                var maxVal    = fa[(i*8)+5];
+                var sumData   = fa[(i*8)+6];
+                var sumSqData = fa[(i*8)+7];
+
+                if (filter(chromId, start + 1, end)) {
+                    var summaryOpts = {type: 'bigwig', score: sumData/validCnt, maxScore: maxVal};
+                    if (this.bwg.type == 'bigbed') {
+                        summaryOpts.type = 'density';
+                    }
+                    createFeature(chromId, start + 1, end, summaryOpts);
+                }
+            }
+        }
+        else if (this.bwg.type == 'bigwig') {
+            var sa = new Int16Array(data);
+            var la = new Int32Array(data);
+            var fa = new Float32Array(data);
+
+            var chromId = la[0];
+            var blockStart = la[1];
+            var blockEnd = la[2];
+            var itemStep = la[3];
+            var itemSpan = la[4];
+            var blockType = ba[20];
+            var itemCount = sa[11];
+
+            if (blockType == BIG_WIG_TYPE_FSTEP) {
+                for (var i = 0; i < itemCount; ++i) {
+                    var score = fa[i + 6];
+                    var fmin = blockStart + (i*itemStep) + 1, fmax = blockStart + (i*itemStep) + itemSpan;
+                    if (filter(chromId, fmin, fmax))
+                    createFeature(chromId, fmin, fmax, {score: score});
+                }
+            } else if (blockType == BIG_WIG_TYPE_VSTEP) {
+                for (var i = 0; i < itemCount; ++i) {
+                    var start = la[(i*2) + 6] + 1;
+                    var end = start + itemSpan - 1;
+                    var score = fa[(i*2) + 7];
+                    if (filter(chromId, start, end))
+                    createFeature(chromId, start, end, {score: score});
+                }
+            } else if (blockType == BIG_WIG_TYPE_GRAPH) {
+                for (var i = 0; i < itemCount; ++i) {
+                    var start = la[(i*3) + 6] + 1;
+                    var end   = la[(i*3) + 7];
+                    var score = fa[(i*3) + 8];
+                    if (start > end) {
+                        start = end;
+                    }
+                    if (filter(chromId, start, end))
+                    createFeature(chromId, start, end, {score: score});
+                }
+            } else {
+                console.log('Currently not handling bwgType=' + blockType);
+            }
+        } else if (this.bwg.type == 'bigbed') {
+            var offset = 0;
+            var dfc = this.bwg.definedFieldCount;
+            var schema = this.bwg.schema;
+
+            while (offset < ba.length) {
+                var chromId = (ba[offset+3]<<24) | (ba[offset+2]<<16) | (ba[offset+1]<<8) | (ba[offset+0]);
+                var start = (ba[offset+7]<<24) | (ba[offset+6]<<16) | (ba[offset+5]<<8) | (ba[offset+4]);
+                var end = (ba[offset+11]<<24) | (ba[offset+10]<<16) | (ba[offset+9]<<8) | (ba[offset+8]);
+                offset += 12;
+                var rest = '';
+                while (true) {
+                    var ch = ba[offset++];
+                    if (ch != 0) {
+                        rest += String.fromCharCode(ch);
+                    } else {
+                        break;
+                    }
+                }
+
+                var featureOpts = {};
+
+                var bedColumns;
+                if (rest.length > 0) {
+                    bedColumns = rest.split('\t');
+                } else {
+                    bedColumns = [];
+                }
+                if (bedColumns.length > 0 && dfc > 3) {
+                    featureOpts.label = bedColumns[0];
+                }
+                if (bedColumns.length > 1 && dfc > 4) {
+                    var score = parseInt(bedColumns[1]);
+                    if (!isNaN(score))
+                    featureOpts.score = score;
+                }
+                if (bedColumns.length > 2 && dfc > 5) {
+                    featureOpts.orientation = bedColumns[2];
+                }
+                if (bedColumns.length > 5 && dfc > 8) {
+                    var color = bedColumns[5];
+                    if (BED_COLOR_REGEXP.test(color)) {
+                        featureOpts.itemRgb = 'rgb(' + color + ')';
+                    }
+                }
+
+                if (bedColumns.length > dfc-3 && schema) {
+                    for (var col = dfc - 3; col < bedColumns.length; ++col) {
+                        featureOpts[schema.fields[col+3].name] = bedColumns[col];
+                    }
+                }
+
+                if (filter(chromId, start + 1, end, bedColumns)) {
+                    if (dfc < 12) {
+                        createFeature(chromId, start + 1, end, featureOpts);
+                    } else {
+                        // TODO: add block starts, sizes, thick start, thick end to feature.
+                        var thickStart = bedColumns[3]|0;
+                        var thickEnd   = bedColumns[4]|0;
+                        var blockCount = bedColumns[6]|0;
+                        var blockSizes = bedColumns[7].split(',');
+                        var blockStarts = bedColumns[8].split(',');
+
+                        if (featureOpts.exonFrames) {
+                            var exonFrames = featureOpts.exonFrames.split(',');
+                            featureOpts.exonFrames = undefined;
+                        }
+
+                        featureOpts.type = 'transcript'
+                        var grp = new DASGroup();
+                        for (var k in featureOpts) {
+                            grp[k] = featureOpts[k];
+                        }
+                        grp.id = bedColumns[0];
+                        grp.segment = this.bwg.idsToChroms[chromId];
+                        grp.min = start + 1;
+                        grp.max = end;
+                        grp.notes = [];
+                        featureOpts.groups = [grp];
+
+                        // Moving towards using bigGenePred model, but will
+                        // still support old Dalliance-style BED12+gene-name for the
+                        // foreseeable future.
+                        if (bedColumns.length > 9) {
+                            var geneId = featureOpts.geneName || bedColumns[9];
+                            var geneName = geneId;
+                            if (bedColumns.length > 10) {
+                                geneName = bedColumns[10];
+                            }
+                            if (featureOpts.geneName2)
+                            geneName = featureOpts.geneName2;
+
+                            var gg = $.extend({}, grp);
+                            gg.id = geneId;
+                            gg.label = geneName;
+                            gg.type = 'gene';
+                            featureOpts.groups.push(gg);
+                        }
+
+                        var spanList = [];
+                        for (var b = 0; b < blockCount; ++b) {
+                            var bmin = (blockStarts[b]|0) + start;
+                            var bmax = bmin + (blockSizes[b]|0);
+                            var span = new Range(bmin, bmax);
+                            spanList.push(span);
+                        }
+                        var spans = union(spanList);
+
+                        var tsList = spans.ranges();
+                        for (var s = 0; s < tsList.length; ++s) {
+                            var ts = tsList[s];
+                            createFeature(chromId, ts.min() + 1, ts.max(), featureOpts);
+                        }
+
+                        if (thickEnd > thickStart) {
+                            var codingRegion = (featureOpts.orientation == '+') ?
+                            new Range(thickStart, thickEnd + 3) :
+                            new Range(thickStart - 3, thickEnd);
+                            // +/- 3 to account for stop codon
+
+                            var tl = intersection(spans, codingRegion);
+                            if (tl) {
+                                featureOpts.type = 'translation';
+                                var tlList = tl.ranges();
+                                var readingFrame = 0;
+
+                                var tlOffset = 0;
+                                while (tlList[0].min() > tsList[tlOffset].max())
+                                tlOffset++;
+
+                                for (var s = 0; s < tlList.length; ++s) {
+                                    // Record reading frame for every exon
+                                    var index = s;
+                                    if (featureOpts.orientation == '-')
+                                    index = tlList.length - s - 1;
+                                    var ts = tlList[index];
+                                    featureOpts.readframe = readingFrame;
+                                    if (exonFrames) {
+                                        var brf = parseInt(exonFrames[index + tlOffset]);
+                                        if (typeof(brf) === 'number' && brf >= 0 && brf <= 2) {
+                                            featureOpts.readframe = brf;
+                                            featureOpts.readframeExplicit = true;
+                                        }
+                                    }
+                                    var length = ts.max() - ts.min();
+                                    readingFrame = (readingFrame + length) % 3;
+                                    createFeature(chromId, ts.min() + 1, ts.max(), featureOpts);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        } else {
+            throw Error("Don't know what to do with " + this.bwg.type);
+        }
+    }
+
+    //
+    // nasty cut/paste, should roll back in!
+    //
+
+    BigWigView.prototype.getFirstAdjacent = function(chrName, pos, dir, callback) {
+        var chr = this.bwg.chromsToIDs[chrName];
+        if (chr === undefined) {
+            // Not an error because some .bwgs won't have data for all chromosomes.
+            return callback([]);
+        } else {
+            this.getFirstAdjacentById(chr, pos, dir, callback);
+        }
+    }
+
+    BigWigView.prototype.getFirstAdjacentById = function(chr, pos, dir, callback) {
+        var thisB = this;
+        if (!this.cirHeader) {
+            this.bwg.data.slice(this.cirTreeOffset, 48).fetch(function(result) {
+                thisB.cirHeader = result;
+                var la = new Int32Array(thisB.cirHeader);
+                thisB.cirBlockSize = la[1];
+                thisB.getFirstAdjacentById(chr, pos, dir, callback);
+            });
+            return;
+        }
+
+        var blockToFetch = null;
+        var bestBlockChr = -1;
+        var bestBlockOffset = -1;
+
+        var outstanding = 0;
+
+        var beforeBWG = Date.now();
+
+        var cirFobRecur = function(offset, level) {
+            outstanding += offset.length;
+
+            var maxCirBlockSpan = 4 +  (thisB.cirBlockSize * 32);   // Upper bound on size, based on a completely full leaf node.
+            var spans;
+            for (var i = 0; i < offset.length; ++i) {
+                var blockSpan = new Range(offset[i], offset[i] + maxCirBlockSpan);
+                spans = spans ? union(spans, blockSpan) : blockSpan;
+            }
+
+            var fetchRanges = spans.ranges();
+            for (var r = 0; r < fetchRanges.length; ++r) {
+                var fr = fetchRanges[r];
+                cirFobStartFetch(offset, fr, level);
+            }
+        }
+
+        var cirFobStartFetch = function(offset, fr, level, attempts) {
+            var length = fr.max() - fr.min();
+            thisB.bwg.data.slice(fr.min(), fr.max() - fr.min()).fetch(function(resultBuffer) {
+                for (var i = 0; i < offset.length; ++i) {
+                    if (fr.contains(offset[i])) {
+                        cirFobRecur2(resultBuffer, offset[i] - fr.min(), level);
+                        --outstanding;
+                        if (outstanding == 0) {
+                            if (!blockToFetch) {
+                                if (dir > 0 && (chr != 0 || pos > 0)) {
+                                    return thisB.getFirstAdjacentById(0, 0, dir, callback);
+                                } else if (dir < 0 && (chr != thisB.bwg.maxID || pos < 1000000000)) {
+                                    return thisB.getFirstAdjacentById(thisB.bwg.maxID, 1000000000, dir, callback);
+                                }
+                                return callback([]);
+                            }
+
+                            thisB.fetchFeatures(function(chrx, fmin, fmax, toks) {
+                                return (dir < 0 && (chrx < chr || fmax < pos)) || (dir > 0 && (chrx > chr || fmin > pos));
+                            }, [blockToFetch], function(features) {
+                                var bestFeature = null;
+                                var bestChr = -1;
+                                var bestPos = -1;
+                                for (var fi = 0; fi < features.length; ++fi) {
+                                    var f = features[fi];
+                                    var chrx = f._chromId, fmin = f.min, fmax = f.max;
+                                    if (bestFeature == null || ((dir < 0) && (chrx > bestChr || fmax > bestPos)) || ((dir > 0) && (chrx < bestChr || fmin < bestPos))) {
+                                        bestFeature = f;
+                                        bestPos = (dir < 0) ? fmax : fmin;
+                                        bestChr = chrx;
+                                    }
+                                }
+
+                                if (bestFeature != null)
+                                return callback([bestFeature]);
+                                else
+                                return callback([]);
+                            });
+                        }
+                    }
+                }
+            });
+        }
+
+        var cirFobRecur2 = function(cirBlockData, offset, level) {
+            var ba = new Uint8Array(cirBlockData);
+            var sa = new Int16Array(cirBlockData);
+            var la = new Int32Array(cirBlockData);
+
+            var isLeaf = ba[offset];
+            var cnt = sa[offset/2 + 1];
+            offset += 4;
+
+            if (isLeaf != 0) {
+                for (var i = 0; i < cnt; ++i) {
+                    var lo = offset/4;
+                    var startChrom = la[lo];
+                    var startBase = la[lo + 1];
+                    var endChrom = la[lo + 2];
+                    var endBase = la[lo + 3];
+                    var blockOffset = bwg_readOffset(ba, offset+16);
+                    var blockSize = bwg_readOffset(ba, offset+24);
+                    if ((dir < 0 && ((startChrom < chr || (startChrom == chr && startBase <= pos)))) ||
+                    (dir > 0 && ((endChrom > chr || (endChrom == chr && endBase >= pos)))))
+                    {
+                        // console.log('Got an interesting block: startBase=' + startChrom + ':' + startBase + '; endBase=' + endChrom + ':' + endBase + '; offset=' + blockOffset + '; size=' + blockSize);
+                        if (/_random/.exec(thisB.bwg.idsToChroms[startChrom])) {
+                            // dlog('skipping random: ' + thisB.bwg.idsToChroms[startChrom]);
+                        } else if (blockToFetch == null || ((dir < 0) && (endChrom > bestBlockChr || (endChrom == bestBlockChr && endBase > bestBlockOffset)) ||
+                        (dir > 0) && (startChrom < bestBlockChr || (startChrom == bestBlockChr && startBase < bestBlockOffset))))
+                        {
+                            //                        dlog('best is: startBase=' + startChrom + ':' + startBase + '; endBase=' + endChrom + ':' + endBase + '; offset=' + blockOffset + '; size=' + blockSize);
+                            blockToFetch = {offset: blockOffset, size: blockSize};
+                            bestBlockOffset = (dir < 0) ? endBase : startBase;
+                            bestBlockChr = (dir < 0) ? endChrom : startChrom;
+                        }
+                    }
+                    offset += 32;
+                }
+            } else {
+                var bestRecur = -1;
+                var bestPos = -1;
+                var bestChr = -1;
+                for (var i = 0; i < cnt; ++i) {
+                    var lo = offset/4;
+                    var startChrom = la[lo];
+                    var startBase = la[lo + 1];
+                    var endChrom = la[lo + 2];
+                    var endBase = la[lo + 3];
+                    var blockOffset = (la[lo + 4]<<32) | (la[lo + 5]);
+                    if ((dir < 0 && ((startChrom < chr || (startChrom == chr && startBase <= pos)) &&
+                    (endChrom   >= chr))) ||
+                    (dir > 0 && ((endChrom > chr || (endChrom == chr && endBase >= pos)) &&
+                    (startChrom <= chr))))
+                    {
+                        if (bestRecur < 0 || endBase > bestPos) {
+                            bestRecur = blockOffset;
+                            bestPos = (dir < 0) ? endBase : startBase;
+                            bestChr = (dir < 0) ? endChrom : startChrom;
+                        }
+                    }
+                    offset += 24;
+                }
+                if (bestRecur >= 0) {
+                    cirFobRecur([bestRecur], level + 1);
+                }
+            }
+        };
+
+
+        cirFobRecur([thisB.cirTreeOffset + 48], 1);
+    }
+
+    /**
+    * Automatically choose a zoom level and return data from that level.
+    */
+    BigWig.prototype.readWigData = function(chrName, min, max) {
+        // Maximum number of data points to return when reading a bigwig. This is used
+        // to choose the appropriate level. One data point ~= 25-65 bytes.
+        // FIXME: For targeted sequencing, data points returned is going to be much lower,
+        // so this will need to be tuned appropriately.
+        var MAX_DATA_POINTS = 25000;
+
+        var range = max - min,
+        view;
+        // If no zooming needed or available (common in bigbed), use unzoomed view.
+        if (range <= MAX_DATA_POINTS || this.zoomLevels.length === 0) {
+            view = this.getUnzoomedView();
+        }
+        else {
+            // Find reasonable zoom level. Reduction is the # of bases represented
+            // by each data point at that level.
+            for (var i = 0; i < this.zoomLevels.length; i++) {
+                if (range/this.zoomLevels[i].reduction < MAX_DATA_POINTS) {
+                    view = this.getZoomedView(i);
+                    break;
+                }
+            }
+        }
+
+        return view.readWigData(chrName, min, max);
+    }
+
+    BigWig.prototype.getUnzoomedView = function() {
+        if (!this.unzoomedView) {
+            var cirLen = 4000;
+            var nzl = this.zoomLevels[0];
+            if (nzl) {
+                cirLen = this.zoomLevels[0].dataOffset - this.unzoomedIndexOffset;
+            }
+            this.unzoomedView = new BigWigView(this, this.unzoomedIndexOffset, cirLen, false);
+        }
+        return this.unzoomedView;
+    }
+
+    BigWig.prototype.getZoomedView = function(z) {
+        var zh = this.zoomLevels[z];
+        if (!zh.view) {
+            zh.view = new BigWigView(this, zh.indexOffset, /* this.zoomLevels[z + 1].dataOffset - zh.indexOffset */ 4000, true);
+        }
+        return zh.view;
+    }
+
+    /**
+     * Create a BigWig object using a URL to a bigwig/bigbed file. Returns a promise
+     * that resolves to the object when it's available.
+     */
+    function makeBwg(url) {
+        var promise = $.Deferred(),
+        bwg = new BigWig();
+        bwg.url = url;
+
+        // Read and parse bigwig header, including chrom tree.
+        $.when(read(bwg.url, 0, 512)).then(function(result) {
+            if (!result) {
+                return promise.resolve(null, "Couldn't fetch file");
+            }
+
+            var header = result;
+            var ba = new Uint8Array(header);
+            var sa = new Int16Array(header);
+            var la = new Int32Array(header);
+            var magic = ba[0] + (M1 * ba[1]) + (M2 * ba[2]) + (M3 * ba[3]);
+            if (magic == BIG_WIG_MAGIC) {
+                bwg.type = 'bigwig';
+            } else if (magic == BIG_BED_MAGIC) {
+                bwg.type = 'bigbed';
+            } else if (magic == BIG_WIG_MAGIC_BE || magic == BIG_BED_MAGIC_BE) {
+                return promise.resolve(null, "Currently don't support big-endian BBI files");
+
+            } else {
+                return promise.resolve(null, "Not a supported format, magic=0x" + magic.toString(16));
+
+            }
+
+            bwg.version = sa[2];             // 4
+            bwg.numZoomLevels = sa[3];       // 6
+            bwg.chromTreeOffset = bwg_readOffset(ba, 8);
+            bwg.unzoomedDataOffset = bwg_readOffset(ba, 16);
+            bwg.unzoomedIndexOffset = bwg_readOffset(ba, 24);
+            bwg.fieldCount = sa[16];         // 32
+            bwg.definedFieldCount = sa[17];  // 34
+            bwg.asOffset = bwg_readOffset(ba, 36);
+            bwg.totalSummaryOffset = bwg_readOffset(ba, 44);
+            bwg.uncompressBufSize = la[13];  // 52
+            bwg.extHeaderOffset = bwg_readOffset(ba, 56);
+
+            bwg.zoomLevels = [];
+            for (var zl = 0; zl < bwg.numZoomLevels; ++zl) {
+                var zlReduction = la[zl*6 + 16]
+                var zlData = bwg_readOffset(ba, zl*24 + 72);
+                var zlIndex = bwg_readOffset(ba, zl*24 + 80);
+                bwg.zoomLevels.push({reduction: zlReduction, dataOffset: zlData, indexOffset: zlIndex});
+            }
+
+            $.when(bwg.readChromTree()).then(function() {
+                bwg.getAutoSQL(function(as) {
+                    bwg.schema = as;
+                    return promise.resolve(bwg);
+                });
+            });
+        });
+
+        return promise;
+    }
+
+
+    BigWig.prototype._tsFetch = function(zoom, chr, min, max, callback) {
+        var bwg = this;
+        if (zoom >= this.zoomLevels.length - 1) {
+            if (!this.topLevelReductionCache) {
+                this.getZoomedView(this.zoomLevels.length - 1).readWigDataById(-1, 0, 300000000, function(feats) {
+                    bwg.topLevelReductionCache = feats;
+                    return bwg._tsFetch(zoom, chr, min, max, callback);
+                });
+            } else {
+                var f = [];
+                var c = this.topLevelReductionCache;
+                for (var fi = 0; fi < c.length; ++fi) {
+                    if (c[fi]._chromId == chr) {
+                        f.push(c[fi]);
+                    }
+                }
+                return callback(f);
+            }
+        } else {
+            var view;
+            if (zoom < 0) {
+                view = this.getUnzoomedView();
+            } else {
+                view = this.getZoomedView(zoom);
+            }
+            return view.readWigDataById(chr, min, max, callback);
+        }
+    }
+
+    BigWig.prototype.thresholdSearch = function(chrName, referencePoint, dir, threshold, callback) {
+        dir = (dir<0) ? -1 : 1;
+        var bwg = this;
+        var initialChr = this.chromsToIDs[chrName];
+        var candidates = [{chrOrd: 0, chr: initialChr, zoom: bwg.zoomLevels.length - 4, min: 0, max: 300000000, fromRef: true}]
+        for (var i = 1; i <= this.maxID + 1; ++i) {
+            var chrId = (initialChr + (dir*i)) % (this.maxID + 1);
+            if (chrId < 0)
+            chrId += (this.maxID + 1);
+            candidates.push({chrOrd: i, chr: chrId, zoom: bwg.zoomLevels.length - 1, min: 0, max: 300000000})
+        }
+
+        function fbThresholdSearchRecur() {
+            if (candidates.length == 0) {
+                return callback(null);
+            }
+            candidates.sort(function(c1, c2) {
+                var d = c1.zoom - c2.zoom;
+                if (d != 0)
+                return d;
+
+                d = c1.chrOrd - c2.chrOrd;
+                if (d != 0)
+                return d;
+                else
+                return c1.min - c2.min * dir;
+            });
+
+            var candidate = candidates.splice(0, 1)[0];
+            bwg._tsFetch(candidate.zoom, candidate.chr, candidate.min, candidate.max, function(feats) {
+                var rp = dir > 0 ? 0 : 300000000;
+                if (candidate.fromRef)
+                rp = referencePoint;
+
+                for (var fi = 0; fi < feats.length; ++fi) {
+                    var f = feats[fi];
+                    var score;
+                    if (f.maxScore != undefined)
+                    score = f.maxScore;
+                    else
+                    score = f.score;
+
+                    if (dir > 0) {
+                        if (score > threshold) {
+                            if (candidate.zoom < 0) {
+                                if (f.min > rp)
+                                return callback(f);
+                            } else if (f.max > rp) {
+                                candidates.push({chr: candidate.chr, chrOrd: candidate.chrOrd, zoom: candidate.zoom - 2, min: f.min, max: f.max, fromRef: candidate.fromRef});
+                            }
+                        }
+                    } else {
+                        if (score > threshold) {
+                            if (candidate.zoom < 0) {
+                                if (f.max < rp)
+                                return callback(f);
+                            } else if (f.min < rp) {
+                                candidates.push({chr: candidate.chr, chrOrd: candidate.chrOrd, zoom: candidate.zoom - 2, min: f.min, max: f.max, fromRef: candidate.fromRef});
+                            }
+                        }
+                    }
+                }
+                fbThresholdSearchRecur();
+            });
+        }
+
+        fbThresholdSearchRecur();
+    }
+
+    BigWig.prototype.getAutoSQL = function(callback) {
+        var thisB = this;
+        if (!this.asOffset)
+        return callback(null);
+
+        $.when(read(this.url, this.asOffset, 2048)).then(function(result) {
+            var ba = new Uint8Array(result);
+            var s = '';
+            for (var i = 0; i < ba.length; ++i) {
+                if (ba[i] == 0)
+                break;
+                s += String.fromCharCode(ba[i]);
+            }
+
+            /*
+            * Quick'n'dirty attempt to parse autoSql format.
+            * See: http://www.linuxjournal.com/files/linuxjournal.com/linuxjournal/articles/059/5949/5949l2.html
+            */
+
+            var header_re = /(\w+)\s+(\w+)\s+("([^"]+)")?\s+\(\s*/;
+                var field_re = /([\w\[\]]+)\s+(\w+)\s*;\s*("([^"]+)")?\s*/g;
+
+            var headerMatch = header_re.exec(s);
+            if (headerMatch) {
+                var as = {
+                    declType: headerMatch[1],
+                    name: headerMatch[2],
+                    comment: headerMatch[4],
+
+                    fields: []
+                };
+
+                s = s.substring(headerMatch[0]);
+                for (var m = field_re.exec(s); m != null; m = field_re.exec(s)) {
+                    as.fields.push({type: m[1],
+                        name: m[2],
+                        comment: m[4]});
+                    }
+
+                    return callback(as);
+                }
+        });
+    };
+
+    BigWig.prototype.getExtraIndices = function(callback) {
+        var thisB = this;
+        if (this.version < 4 || this.extHeaderOffset == 0 || this.type != 'bigbed') {
+            return callback(null);
+        } else {
+            this.data.slice(this.extHeaderOffset, 64).fetch(function(result) {
+                if (!result) {
+                    return callback(null, "Couldn't fetch extension header");
+                }
+
+                var ba = new Uint8Array(result);
+                var sa = new Int16Array(result);
+                var la = new Int32Array(result);
+
+                var extHeaderSize = sa[0];
+                var extraIndexCount = sa[1];
+                var extraIndexListOffset = bwg_readOffset(ba, 4);
+
+                if (extraIndexCount == 0) {
+                    return callback(null);
+                }
+
+                // FIXME 20byte records only make sense for single-field indices.
+                // Right now, these seem to be the only things around, but the format
+                // is actually more general.
+                thisB.data.slice(extraIndexListOffset, extraIndexCount * 20).fetch(function(eil) {
+                    if (!eil) {
+                        return callback(null, "Couldn't fetch index info");
+                    }
+
+                    var ba = new Uint8Array(eil);
+                    var sa = new Int16Array(eil);
+                    var la = new Int32Array(eil);
+
+                    var indices = [];
+                    for (var ii = 0; ii < extraIndexCount; ++ii) {
+                        var eiType = sa[ii*10];
+                        var eiFieldCount = sa[ii*10 + 1];
+                        var eiOffset = bwg_readOffset(ba, ii*20 + 4);
+                        var eiField = sa[ii*10 + 8]
+                        var index = new BBIExtraIndex(thisB, eiType, eiFieldCount, eiOffset, eiField);
+                        indices.push(index);
+                    }
+                    callback(indices);
+                });
+            });
+        }
+    }
+
+    function BBIExtraIndex(bbi, type, fieldCount, offset, field) {
+        this.bbi = bbi;
+        this.type = type;
+        this.fieldCount = fieldCount;
+        this.offset = offset;
+        this.field = field;
+    }
+
+    BBIExtraIndex.prototype.lookup = function(name, callback) {
+        var thisB = this;
+
+        this.bbi.data.slice(this.offset, 32).fetch(function(bpt) {
+            var ba = new Uint8Array(bpt);
+            var sa = new Int16Array(bpt);
+            var la = new Int32Array(bpt);
+            var bptMagic = la[0];
+            var blockSize = la[1];
+            var keySize = la[2];
+            var valSize = la[3];
+            var itemCount = bwg_readOffset(ba, 16);
+            var rootNodeOffset = 32;
+
+            function bptReadNode(nodeOffset) {
+                thisB.bbi.data.slice(nodeOffset, 4 + (blockSize * (keySize + valSize))).fetch(function(node) {
+                    var ba = new Uint8Array(node);
+                    var sa = new Uint16Array(node);
+                    var la = new Uint32Array(node);
+
+                    var nodeType = ba[0];
+                    var cnt = sa[1];
+
+                    var offset = 4;
+                    if (nodeType == 0) {
+                        var lastChildOffset = null;
+                        for (var n = 0; n < cnt; ++n) {
+                            var key = '';
+                            for (var ki = 0; ki < keySize; ++ki) {
+                                var charCode = ba[offset++];
+                                if (charCode != 0) {
+                                    key += String.fromCharCode(charCode);
+                                }
+                            }
+
+                            var childOffset = bwg_readOffset(ba, offset);
+                            offset += 8;
+
+                            if (name.localeCompare(key) < 0 && lastChildOffset) {
+                                bptReadNode(lastChildOffset);
+                                return;
+                            }
+                            lastChildOffset = childOffset;
+                        }
+                        bptReadNode(lastChildOffset);
+                    } else {
+                        for (var n = 0; n < cnt; ++n) {
+                            var key = '';
+                            for (var ki = 0; ki < keySize; ++ki) {
+                                var charCode = ba[offset++];
+                                if (charCode != 0) {
+                                    key += String.fromCharCode(charCode);
+                                }
+                            }
+
+                            // Specific for EI case.
+                            if (key == name) {
+                                var start = bwg_readOffset(ba, offset);
+                                var length = readInt(ba, offset + 8);
+
+                                return thisB.bbi.getUnzoomedView().fetchFeatures(
+                                    function(chr, min, max, toks) {
+                                        if (toks && toks.length > thisB.field - 3)
+                                        return toks[thisB.field - 3] == name;
+                                    },
+                                    [{offset: start, size: length}],
+                                    callback);
+                                }
+                                offset += valSize;
+                            }
+                            return callback([]);
+                        }
+                    });
+                }
+
+                bptReadNode(thisB.offset + rootNodeOffset);
+        });
+    }
+
+    return {
+        makeBwg: makeBwg
+    };
+});
diff --git a/client/galaxy/scripts/libs/bbi/jquery-ajax-native.js b/client/galaxy/scripts/libs/bbi/jquery-ajax-native.js
new file mode 100644
index 0000000..5570270
--- /dev/null
+++ b/client/galaxy/scripts/libs/bbi/jquery-ajax-native.js
@@ -0,0 +1,157 @@
+//     jQuery Ajax Native Plugin
+
+//     (c) 2015 Tarik Zakaria Benmerar, Acigna Inc.
+//      jQuery Ajax Native Plugin may be freely distributed under the MIT license.
+(function (root, factory) {
+    if (typeof define === 'function' && define.amd) {
+        // AMD. Register as an anonymous module.
+        define(['jquery'], factory);
+    } else if (typeof exports === 'object') {
+        // Node. Does not work with strict CommonJS, but
+        // only CommonJS-like environments that support module.exports,
+        // like Node.
+        module.exports = factory(require('jquery'));
+    } else {
+        // Browser globals (root is window)
+        factory(root.jQuery);
+    }
+}(this, function ( $ ) {
+    var ajaxSettings = $.ajaxSettings;
+    ajaxSettings.responseFields.native = 'responseNative';
+    ajaxSettings.converters[ '* native' ] = true;
+    var support = {},
+        xhrId = 0,
+        xhrSuccessStatus = {
+            // file protocol always yields status code 0, assume 200
+            0: 200,
+            // Support: IE9
+            // #1450: sometimes IE returns 1223 when it should be 204
+            1223: 204
+        },
+        xhrCallbacks = {},
+        xhrSupported = jQuery.ajaxSettings.xhr();
+    // Support: IE9
+    // Open requests must be manually aborted on unload (#5280)
+    if ( window.ActiveXObject ) {
+        $( window ).on( "unload", function() {
+            for ( var key in xhrCallbacks ) {
+                xhrCallbacks[ key ]();
+            }
+        });
+    }
+    support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported );
+    support.ajax = xhrSupported = !!xhrSupported;
+
+    //Native Data Type Ajax Transport
+    $.ajaxTransport('native', function ( options ) {
+        var callback;
+        // Cross domain only allowed if supported through XMLHttpRequest
+        if ( support.cors || xhrSupported && !options.crossDomain ) {
+            return {
+                send: function( headers, complete ) {
+                    var i,
+                        xhr = options.xhr(),
+                        id = ++xhrId,
+                        responses = {};
+
+                    xhr.open( options.type, options.url, options.async, options.username, options.password );
+
+                    // Apply custom fields if provided
+                    if ( options.xhrFields ) {
+                        for ( i in options.xhrFields ) {
+                            xhr[ i ] = options.xhrFields[ i ];
+                        }
+                    }
+
+                    // Override mime type if needed
+                    if ( options.mimeType && xhr.overrideMimeType ) {
+                        xhr.overrideMimeType( options.mimeType );
+                    }
+
+                    // X-Requested-With header
+                    // For cross-domain requests, seeing as conditions for a preflight are
+                    // akin to a jigsaw puzzle, we simply never set it to be sure.
+                    // (it can always be set on a per-request basis or even using ajaxSetup)
+                    // For same-domain requests, won't change header if already provided.
+                    if ( !options.crossDomain && !headers["X-Requested-With"] ) {
+                        headers["X-Requested-With"] = "XMLHttpRequest";
+                    }
+
+                    // Set headers
+                    for ( i in headers ) {
+                        xhr.setRequestHeader( i, headers[ i ] );
+                    }
+
+                    // Callback
+                    callback = function( type ) {
+                        return function() {
+                            if ( callback ) {
+                                delete xhrCallbacks[ id ];
+                                callback = xhr.onload = xhr.onerror = null;
+
+                                if ( type === "abort" ) {
+                                    xhr.abort();
+                                } else if ( type === "error" ) {
+                                    complete(
+                                        // file: protocol always yields status 0; see #8605, #14207
+                                        xhr.status,
+                                        xhr.statusText
+                                    );
+                                } else {
+                                    // The native response associated with the responseType
+                                    // Stored in the xhr.response attribute (XHR2 Spec)
+                                    if ( xhr.response ) {
+                                        responses.native = xhr.response;
+                                    }
+
+                                    complete(
+                                        xhrSuccessStatus[ xhr.status ] || xhr.status,
+                                        xhr.statusText,
+                                        responses,
+                                        xhr.getAllResponseHeaders()
+                                    );
+                                }
+                            }
+                        };
+                    };
+
+                    // Listen to events
+                    xhr.onload = callback();
+                    xhr.onerror = callback("error");
+
+                    // Create the abort callback
+                    callback = xhrCallbacks[ id ] = callback("abort");
+
+                    try {
+                        // Do send the request (this may raise an exception)
+                        xhr.send( options.hasContent && options.data || null );
+                    } catch ( e ) {
+                        // #14683: Only rethrow if this hasn't been notified as an error yet
+                        if ( callback ) {
+                            throw e;
+                        }
+                    }
+                },
+
+                abort: function() {
+                    if ( callback ) {
+                        callback();
+                    }
+                }
+            };
+        }
+    });
+
+
+    //$.getNative wrapper
+    $.getNative = function ( url, callback ) {
+        return $.ajax({
+            dataType: 'native',
+            url: url,
+            xhrFields: {
+                responseType: 'arraybuffer'
+            },
+            success: callback
+        });
+    }
+}));
diff --git a/client/galaxy/scripts/libs/bbi/jszlib.js b/client/galaxy/scripts/libs/bbi/jszlib.js
new file mode 100644
index 0000000..1274c47
--- /dev/null
+++ b/client/galaxy/scripts/libs/bbi/jszlib.js
@@ -0,0 +1,2163 @@
+/* -*- mode: javascript; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+
+//
+// Javascript ZLib
+// By Thomas Down 2010-2011
+//
+// Based very heavily on portions of jzlib (by ymnk at jcraft.com), who in
+// turn credits Jean-loup Gailly and Mark Adler for the original zlib code.
+//
+// inflate.js: ZLib inflate code
+//
+
+//
+// Shared constants
+//
+
+define([],function() {
+
+var MAX_WBITS=15; // 32K LZ77 window
+var DEF_WBITS=MAX_WBITS;
+var MAX_MEM_LEVEL=9;
+var MANY=1440;
+var BMAX = 15;
+
+// preset dictionary flag in zlib header
+var PRESET_DICT=0x20;
+
+var Z_NO_FLUSH=0;
+var Z_PARTIAL_FLUSH=1;
+var Z_SYNC_FLUSH=2;
+var Z_FULL_FLUSH=3;
+var Z_FINISH=4;
+
+var Z_DEFLATED=8;
+
+var Z_OK=0;
+var Z_STREAM_END=1;
+var Z_NEED_DICT=2;
+var Z_ERRNO=-1;
+var Z_STREAM_ERROR=-2;
+var Z_DATA_ERROR=-3;
+var Z_MEM_ERROR=-4;
+var Z_BUF_ERROR=-5;
+var Z_VERSION_ERROR=-6;
+
+var METHOD=0;   // waiting for method byte
+var FLAG=1;     // waiting for flag byte
+var DICT4=2;    // four dictionary check bytes to go
+var DICT3=3;    // three dictionary check bytes to go
+var DICT2=4;    // two dictionary check bytes to go
+var DICT1=5;    // one dictionary check byte to go
+var DICT0=6;    // waiting for inflateSetDictionary
+var BLOCKS=7;   // decompressing blocks
+var CHECK4=8;   // four check bytes to go
+var CHECK3=9;   // three check bytes to go
+var CHECK2=10;  // two check bytes to go
+var CHECK1=11;  // one check byte to go
+var DONE=12;    // finished check, done
+var BAD=13;     // got an error--stay here
+
+var inflate_mask = [0x00000000, 0x00000001, 0x00000003, 0x00000007, 0x0000000f, 0x0000001f, 0x0000003f, 0x0000007f, 0x000000ff, 0x000001ff, 0x000003ff, 0x000007ff, 0x00000fff, 0x00001fff, 0x00003fff, 0x00007fff, 0x0000ffff];
+
+var IB_TYPE=0;  // get type bits (3, including end bit)
+var IB_LENS=1;  // get lengths for stored
+var IB_STORED=2;// processing stored block
+var IB_TABLE=3; // get table lengths
+var IB_BTREE=4; // get bit lengths tree for a dynamic block
+var IB_DTREE=5; // get length, distance trees for a dynamic block
+var IB_CODES=6; // processing fixed or dynamic block
+var IB_DRY=7;   // output remaining window bytes
+var IB_DONE=8;  // finished last block, done
+var IB_BAD=9;   // ot a data error--stuck here
+
+var fixed_bl = 9;
+var fixed_bd = 5;
+
+var fixed_tl = [
+    96,7,256, 0,8,80, 0,8,16, 84,8,115,
+    82,7,31, 0,8,112, 0,8,48, 0,9,192,
+    80,7,10, 0,8,96, 0,8,32, 0,9,160,
+    0,8,0, 0,8,128, 0,8,64, 0,9,224,
+    80,7,6, 0,8,88, 0,8,24, 0,9,144,
+    83,7,59, 0,8,120, 0,8,56, 0,9,208,
+    81,7,17, 0,8,104, 0,8,40, 0,9,176,
+    0,8,8, 0,8,136, 0,8,72, 0,9,240,
+    80,7,4, 0,8,84, 0,8,20, 85,8,227,
+    83,7,43, 0,8,116, 0,8,52, 0,9,200,
+    81,7,13, 0,8,100, 0,8,36, 0,9,168,
+    0,8,4, 0,8,132, 0,8,68, 0,9,232,
+    80,7,8, 0,8,92, 0,8,28, 0,9,152,
+    84,7,83, 0,8,124, 0,8,60, 0,9,216,
+    82,7,23, 0,8,108, 0,8,44, 0,9,184,
+    0,8,12, 0,8,140, 0,8,76, 0,9,248,
+    80,7,3, 0,8,82, 0,8,18, 85,8,163,
+    83,7,35, 0,8,114, 0,8,50, 0,9,196,
+    81,7,11, 0,8,98, 0,8,34, 0,9,164,
+    0,8,2, 0,8,130, 0,8,66, 0,9,228,
+    80,7,7, 0,8,90, 0,8,26, 0,9,148,
+    84,7,67, 0,8,122, 0,8,58, 0,9,212,
+    82,7,19, 0,8,106, 0,8,42, 0,9,180,
+    0,8,10, 0,8,138, 0,8,74, 0,9,244,
+    80,7,5, 0,8,86, 0,8,22, 192,8,0,
+    83,7,51, 0,8,118, 0,8,54, 0,9,204,
+    81,7,15, 0,8,102, 0,8,38, 0,9,172,
+    0,8,6, 0,8,134, 0,8,70, 0,9,236,
+    80,7,9, 0,8,94, 0,8,30, 0,9,156,
+    84,7,99, 0,8,126, 0,8,62, 0,9,220,
+    82,7,27, 0,8,110, 0,8,46, 0,9,188,
+    0,8,14, 0,8,142, 0,8,78, 0,9,252,
+    96,7,256, 0,8,81, 0,8,17, 85,8,131,
+    82,7,31, 0,8,113, 0,8,49, 0,9,194,
+    80,7,10, 0,8,97, 0,8,33, 0,9,162,
+    0,8,1, 0,8,129, 0,8,65, 0,9,226,
+    80,7,6, 0,8,89, 0,8,25, 0,9,146,
+    83,7,59, 0,8,121, 0,8,57, 0,9,210,
+    81,7,17, 0,8,105, 0,8,41, 0,9,178,
+    0,8,9, 0,8,137, 0,8,73, 0,9,242,
+    80,7,4, 0,8,85, 0,8,21, 80,8,258,
+    83,7,43, 0,8,117, 0,8,53, 0,9,202,
+    81,7,13, 0,8,101, 0,8,37, 0,9,170,
+    0,8,5, 0,8,133, 0,8,69, 0,9,234,
+    80,7,8, 0,8,93, 0,8,29, 0,9,154,
+    84,7,83, 0,8,125, 0,8,61, 0,9,218,
+    82,7,23, 0,8,109, 0,8,45, 0,9,186,
+    0,8,13, 0,8,141, 0,8,77, 0,9,250,
+    80,7,3, 0,8,83, 0,8,19, 85,8,195,
+    83,7,35, 0,8,115, 0,8,51, 0,9,198,
+    81,7,11, 0,8,99, 0,8,35, 0,9,166,
+    0,8,3, 0,8,131, 0,8,67, 0,9,230,
+    80,7,7, 0,8,91, 0,8,27, 0,9,150,
+    84,7,67, 0,8,123, 0,8,59, 0,9,214,
+    82,7,19, 0,8,107, 0,8,43, 0,9,182,
+    0,8,11, 0,8,139, 0,8,75, 0,9,246,
+    80,7,5, 0,8,87, 0,8,23, 192,8,0,
+    83,7,51, 0,8,119, 0,8,55, 0,9,206,
+    81,7,15, 0,8,103, 0,8,39, 0,9,174,
+    0,8,7, 0,8,135, 0,8,71, 0,9,238,
+    80,7,9, 0,8,95, 0,8,31, 0,9,158,
+    84,7,99, 0,8,127, 0,8,63, 0,9,222,
+    82,7,27, 0,8,111, 0,8,47, 0,9,190,
+    0,8,15, 0,8,143, 0,8,79, 0,9,254,
+    96,7,256, 0,8,80, 0,8,16, 84,8,115,
+    82,7,31, 0,8,112, 0,8,48, 0,9,193,
+
+    80,7,10, 0,8,96, 0,8,32, 0,9,161,
+    0,8,0, 0,8,128, 0,8,64, 0,9,225,
+    80,7,6, 0,8,88, 0,8,24, 0,9,145,
+    83,7,59, 0,8,120, 0,8,56, 0,9,209,
+    81,7,17, 0,8,104, 0,8,40, 0,9,177,
+    0,8,8, 0,8,136, 0,8,72, 0,9,241,
+    80,7,4, 0,8,84, 0,8,20, 85,8,227,
+    83,7,43, 0,8,116, 0,8,52, 0,9,201,
+    81,7,13, 0,8,100, 0,8,36, 0,9,169,
+    0,8,4, 0,8,132, 0,8,68, 0,9,233,
+    80,7,8, 0,8,92, 0,8,28, 0,9,153,
+    84,7,83, 0,8,124, 0,8,60, 0,9,217,
+    82,7,23, 0,8,108, 0,8,44, 0,9,185,
+    0,8,12, 0,8,140, 0,8,76, 0,9,249,
+    80,7,3, 0,8,82, 0,8,18, 85,8,163,
+    83,7,35, 0,8,114, 0,8,50, 0,9,197,
+    81,7,11, 0,8,98, 0,8,34, 0,9,165,
+    0,8,2, 0,8,130, 0,8,66, 0,9,229,
+    80,7,7, 0,8,90, 0,8,26, 0,9,149,
+    84,7,67, 0,8,122, 0,8,58, 0,9,213,
+    82,7,19, 0,8,106, 0,8,42, 0,9,181,
+    0,8,10, 0,8,138, 0,8,74, 0,9,245,
+    80,7,5, 0,8,86, 0,8,22, 192,8,0,
+    83,7,51, 0,8,118, 0,8,54, 0,9,205,
+    81,7,15, 0,8,102, 0,8,38, 0,9,173,
+    0,8,6, 0,8,134, 0,8,70, 0,9,237,
+    80,7,9, 0,8,94, 0,8,30, 0,9,157,
+    84,7,99, 0,8,126, 0,8,62, 0,9,221,
+    82,7,27, 0,8,110, 0,8,46, 0,9,189,
+    0,8,14, 0,8,142, 0,8,78, 0,9,253,
+    96,7,256, 0,8,81, 0,8,17, 85,8,131,
+    82,7,31, 0,8,113, 0,8,49, 0,9,195,
+    80,7,10, 0,8,97, 0,8,33, 0,9,163,
+    0,8,1, 0,8,129, 0,8,65, 0,9,227,
+    80,7,6, 0,8,89, 0,8,25, 0,9,147,
+    83,7,59, 0,8,121, 0,8,57, 0,9,211,
+    81,7,17, 0,8,105, 0,8,41, 0,9,179,
+    0,8,9, 0,8,137, 0,8,73, 0,9,243,
+    80,7,4, 0,8,85, 0,8,21, 80,8,258,
+    83,7,43, 0,8,117, 0,8,53, 0,9,203,
+    81,7,13, 0,8,101, 0,8,37, 0,9,171,
+    0,8,5, 0,8,133, 0,8,69, 0,9,235,
+    80,7,8, 0,8,93, 0,8,29, 0,9,155,
+    84,7,83, 0,8,125, 0,8,61, 0,9,219,
+    82,7,23, 0,8,109, 0,8,45, 0,9,187,
+    0,8,13, 0,8,141, 0,8,77, 0,9,251,
+    80,7,3, 0,8,83, 0,8,19, 85,8,195,
+    83,7,35, 0,8,115, 0,8,51, 0,9,199,
+    81,7,11, 0,8,99, 0,8,35, 0,9,167,
+    0,8,3, 0,8,131, 0,8,67, 0,9,231,
+    80,7,7, 0,8,91, 0,8,27, 0,9,151,
+    84,7,67, 0,8,123, 0,8,59, 0,9,215,
+    82,7,19, 0,8,107, 0,8,43, 0,9,183,
+    0,8,11, 0,8,139, 0,8,75, 0,9,247,
+    80,7,5, 0,8,87, 0,8,23, 192,8,0,
+    83,7,51, 0,8,119, 0,8,55, 0,9,207,
+    81,7,15, 0,8,103, 0,8,39, 0,9,175,
+    0,8,7, 0,8,135, 0,8,71, 0,9,239,
+    80,7,9, 0,8,95, 0,8,31, 0,9,159,
+    84,7,99, 0,8,127, 0,8,63, 0,9,223,
+    82,7,27, 0,8,111, 0,8,47, 0,9,191,
+    0,8,15, 0,8,143, 0,8,79, 0,9,255
+];
+var fixed_td = [
+    80,5,1, 87,5,257, 83,5,17, 91,5,4097,
+    81,5,5, 89,5,1025, 85,5,65, 93,5,16385,
+    80,5,3, 88,5,513, 84,5,33, 92,5,8193,
+    82,5,9, 90,5,2049, 86,5,129, 192,5,24577,
+    80,5,2, 87,5,385, 83,5,25, 91,5,6145,
+    81,5,7, 89,5,1537, 85,5,97, 93,5,24577,
+    80,5,4, 88,5,769, 84,5,49, 92,5,12289,
+    82,5,13, 90,5,3073, 86,5,193, 192,5,24577
+];
+
+  // Tables for deflate from PKZIP's appnote.txt.
+  var cplens = [ // Copy lengths for literal codes 257..285
+        3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
+        35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0
+  ];
+
+  // see note #13 above about 258
+  var cplext = [ // Extra bits for literal codes 257..285
+        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
+        3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112  // 112==invalid
+  ];
+
+ var cpdist = [ // Copy offsets for distance codes 0..29
+        1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
+        257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
+        8193, 12289, 16385, 24577
+  ];
+
+  var cpdext = [ // Extra bits for distance codes
+        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
+        7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
+        12, 12, 13, 13];
+
+//
+// ZStream.java
+//
+
+function ZStream() {
+}
+
+
+ZStream.prototype.inflateInit = function(w, nowrap) {
+    if (!w) {
+	w = DEF_WBITS;
+    }
+    if (nowrap) {
+	nowrap = false;
+    }
+    this.istate = new Inflate();
+    return this.istate.inflateInit(this, nowrap?-w:w);
+}
+
+ZStream.prototype.inflate = function(f) {
+    if(this.istate==null) return Z_STREAM_ERROR;
+    return this.istate.inflate(this, f);
+}
+
+ZStream.prototype.inflateEnd = function(){
+    if(this.istate==null) return Z_STREAM_ERROR;
+    var ret=istate.inflateEnd(this);
+    this.istate = null;
+    return ret;
+}
+ZStream.prototype.inflateSync = function(){
+    // if(istate == null) return Z_STREAM_ERROR;
+    return istate.inflateSync(this);
+}
+ZStream.prototype.inflateSetDictionary = function(dictionary, dictLength){
+    // if(istate == null) return Z_STREAM_ERROR;
+    return istate.inflateSetDictionary(this, dictionary, dictLength);
+}
+
+/*
+
+  public int deflateInit(int level){
+    return deflateInit(level, MAX_WBITS);
+  }
+  public int deflateInit(int level, boolean nowrap){
+    return deflateInit(level, MAX_WBITS, nowrap);
+  }
+  public int deflateInit(int level, int bits){
+    return deflateInit(level, bits, false);
+  }
+  public int deflateInit(int level, int bits, boolean nowrap){
+    dstate=new Deflate();
+    return dstate.deflateInit(this, level, nowrap?-bits:bits);
+  }
+  public int deflate(int flush){
+    if(dstate==null){
+      return Z_STREAM_ERROR;
+    }
+    return dstate.deflate(this, flush);
+  }
+  public int deflateEnd(){
+    if(dstate==null) return Z_STREAM_ERROR;
+    int ret=dstate.deflateEnd();
+    dstate=null;
+    return ret;
+  }
+  public int deflateParams(int level, int strategy){
+    if(dstate==null) return Z_STREAM_ERROR;
+    return dstate.deflateParams(this, level, strategy);
+  }
+  public int deflateSetDictionary (byte[] dictionary, int dictLength){
+    if(dstate == null)
+      return Z_STREAM_ERROR;
+    return dstate.deflateSetDictionary(this, dictionary, dictLength);
+  }
+
+*/
+
+/*
+  // Flush as much pending output as possible. All deflate() output goes
+  // through this function so some applications may wish to modify it
+  // to avoid allocating a large strm->next_out buffer and copying into it.
+  // (See also read_buf()).
+  void flush_pending(){
+    int len=dstate.pending;
+
+    if(len>avail_out) len=avail_out;
+    if(len==0) return;
+
+    if(dstate.pending_buf.length<=dstate.pending_out ||
+       next_out.length<=next_out_index ||
+       dstate.pending_buf.length<(dstate.pending_out+len) ||
+       next_out.length<(next_out_index+len)){
+      System.out.println(dstate.pending_buf.length+", "+dstate.pending_out+
+			 ", "+next_out.length+", "+next_out_index+", "+len);
+      System.out.println("avail_out="+avail_out);
+    }
+
+    System.arraycopy(dstate.pending_buf, dstate.pending_out,
+		     next_out, next_out_index, len);
+
+    next_out_index+=len;
+    dstate.pending_out+=len;
+    total_out+=len;
+    avail_out-=len;
+    dstate.pending-=len;
+    if(dstate.pending==0){
+      dstate.pending_out=0;
+    }
+  }
+
+  // Read a new buffer from the current input stream, update the adler32
+  // and total number of bytes read.  All deflate() input goes through
+  // this function so some applications may wish to modify it to avoid
+  // allocating a large strm->next_in buffer and copying from it.
+  // (See also flush_pending()).
+  int read_buf(byte[] buf, int start, int size) {
+    int len=avail_in;
+
+    if(len>size) len=size;
+    if(len==0) return 0;
+
+    avail_in-=len;
+
+    if(dstate.noheader==0) {
+      adler=_adler.adler32(adler, next_in, next_in_index, len);
+    }
+    System.arraycopy(next_in, next_in_index, buf, start, len);
+    next_in_index  += len;
+    total_in += len;
+    return len;
+  }
+
+  public void free(){
+    next_in=null;
+    next_out=null;
+    msg=null;
+    _adler=null;
+  }
+}
+*/
+
+
+//
+// Inflate.java
+//
+
+function Inflate() {
+    this.was = [0];
+}
+
+Inflate.prototype.inflateReset = function(z) {
+    if(z == null || z.istate == null) return Z_STREAM_ERROR;
+
+    z.total_in = z.total_out = 0;
+    z.msg = null;
+    z.istate.mode = z.istate.nowrap!=0 ? BLOCKS : METHOD;
+    z.istate.blocks.reset(z, null);
+    return Z_OK;
+}
+
+Inflate.prototype.inflateEnd = function(z){
+    if(this.blocks != null)
+      this.blocks.free(z);
+    this.blocks=null;
+    return Z_OK;
+}
+
+Inflate.prototype.inflateInit = function(z, w){
+    z.msg = null;
+    this.blocks = null;
+
+    // handle undocumented nowrap option (no zlib header or check)
+    nowrap = 0;
+    if(w < 0){
+      w = - w;
+      nowrap = 1;
+    }
+
+    // set window size
+    if(w<8 ||w>15){
+      this.inflateEnd(z);
+      return Z_STREAM_ERROR;
+    }
+    this.wbits=w;
+
+    z.istate.blocks=new InfBlocks(z,
+				  z.istate.nowrap!=0 ? null : this,
+				  1<<w);
+
+    // reset state
+    this.inflateReset(z);
+    return Z_OK;
+  }
+
+Inflate.prototype.inflate = function(z, f){
+    var r, b;
+
+    if(z == null || z.istate == null || z.next_in == null)
+      return Z_STREAM_ERROR;
+    f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK;
+    r = Z_BUF_ERROR;
+    while (true){
+      switch (z.istate.mode){
+      case METHOD:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        if(((z.istate.method = z.next_in[z.next_in_index++])&0xf)!=Z_DEFLATED){
+          z.istate.mode = BAD;
+          z.msg="unknown compression method";
+          z.istate.marker = 5;       // can't try inflateSync
+          break;
+        }
+        if((z.istate.method>>4)+8>z.istate.wbits){
+          z.istate.mode = BAD;
+          z.msg="invalid window size";
+          z.istate.marker = 5;       // can't try inflateSync
+          break;
+        }
+        z.istate.mode=FLAG;
+      case FLAG:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        b = (z.next_in[z.next_in_index++])&0xff;
+
+        if((((z.istate.method << 8)+b) % 31)!=0){
+          z.istate.mode = BAD;
+          z.msg = "incorrect header check";
+          z.istate.marker = 5;       // can't try inflateSync
+          break;
+        }
+
+        if((b&PRESET_DICT)==0){
+          z.istate.mode = BLOCKS;
+          break;
+        }
+        z.istate.mode = DICT4;
+      case DICT4:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need=((z.next_in[z.next_in_index++]&0xff)<<24)&0xff000000;
+        z.istate.mode=DICT3;
+      case DICT3:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need+=((z.next_in[z.next_in_index++]&0xff)<<16)&0xff0000;
+        z.istate.mode=DICT2;
+      case DICT2:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need+=((z.next_in[z.next_in_index++]&0xff)<<8)&0xff00;
+        z.istate.mode=DICT1;
+      case DICT1:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need += (z.next_in[z.next_in_index++]&0xff);
+        z.adler = z.istate.need;
+        z.istate.mode = DICT0;
+        return Z_NEED_DICT;
+      case DICT0:
+        z.istate.mode = BAD;
+        z.msg = "need dictionary";
+        z.istate.marker = 0;       // can try inflateSync
+        return Z_STREAM_ERROR;
+      case BLOCKS:
+
+        r = z.istate.blocks.proc(z, r);
+        if(r == Z_DATA_ERROR){
+          z.istate.mode = BAD;
+          z.istate.marker = 0;     // can try inflateSync
+          break;
+        }
+        if(r == Z_OK){
+          r = f;
+        }
+        if(r != Z_STREAM_END){
+          return r;
+        }
+        r = f;
+        z.istate.blocks.reset(z, z.istate.was);
+        if(z.istate.nowrap!=0){
+          z.istate.mode=DONE;
+          break;
+        }
+        z.istate.mode=CHECK4;
+      case CHECK4:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need=((z.next_in[z.next_in_index++]&0xff)<<24)&0xff000000;
+        z.istate.mode=CHECK3;
+      case CHECK3:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need+=((z.next_in[z.next_in_index++]&0xff)<<16)&0xff0000;
+        z.istate.mode = CHECK2;
+      case CHECK2:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need+=((z.next_in[z.next_in_index++]&0xff)<<8)&0xff00;
+        z.istate.mode = CHECK1;
+      case CHECK1:
+
+        if(z.avail_in==0)return r;r=f;
+
+        z.avail_in--; z.total_in++;
+        z.istate.need+=(z.next_in[z.next_in_index++]&0xff);
+
+        if(((z.istate.was[0])) != ((z.istate.need))){
+          z.istate.mode = BAD;
+          z.msg = "incorrect data check";
+          z.istate.marker = 5;       // can't try inflateSync
+          break;
+        }
+
+        z.istate.mode = DONE;
+      case DONE:
+        return Z_STREAM_END;
+      case BAD:
+        return Z_DATA_ERROR;
+      default:
+        return Z_STREAM_ERROR;
+      }
+    }
+  }
+
+
+Inflate.prototype.inflateSetDictionary = function(z,  dictionary, dictLength) {
+    var index=0;
+    var length = dictLength;
+    if(z==null || z.istate == null|| z.istate.mode != DICT0)
+      return Z_STREAM_ERROR;
+
+    if(z._adler.adler32(1, dictionary, 0, dictLength)!=z.adler){
+      return Z_DATA_ERROR;
+    }
+
+    z.adler = z._adler.adler32(0, null, 0, 0);
+
+    if(length >= (1<<z.istate.wbits)){
+      length = (1<<z.istate.wbits)-1;
+      index=dictLength - length;
+    }
+    z.istate.blocks.set_dictionary(dictionary, index, length);
+    z.istate.mode = BLOCKS;
+    return Z_OK;
+  }
+
+//  static private byte[] mark = {(byte)0, (byte)0, (byte)0xff, (byte)0xff};
+var mark = [0, 0, 255, 255]
+
+Inflate.prototype.inflateSync = function(z){
+    var n;       // number of bytes to look at
+    var p;       // pointer to bytes
+    var m;       // number of marker bytes found in a row
+    var r, w;   // temporaries to save total_in and total_out
+
+    // set up
+    if(z == null || z.istate == null)
+      return Z_STREAM_ERROR;
+    if(z.istate.mode != BAD){
+      z.istate.mode = BAD;
+      z.istate.marker = 0;
+    }
+    if((n=z.avail_in)==0)
+      return Z_BUF_ERROR;
+    p=z.next_in_index;
+    m=z.istate.marker;
+
+    // search
+    while (n!=0 && m < 4){
+      if(z.next_in[p] == mark[m]){
+        m++;
+      }
+      else if(z.next_in[p]!=0){
+        m = 0;
+      }
+      else{
+        m = 4 - m;
+      }
+      p++; n--;
+    }
+
+    // restore
+    z.total_in += p-z.next_in_index;
+    z.next_in_index = p;
+    z.avail_in = n;
+    z.istate.marker = m;
+
+    // return no joy or set up to restart on a new block
+    if(m != 4){
+      return Z_DATA_ERROR;
+    }
+    r=z.total_in;  w=z.total_out;
+    this.inflateReset(z);
+    z.total_in=r;  z.total_out = w;
+    z.istate.mode = BLOCKS;
+    return Z_OK;
+}
+
+  // Returns true if inflate is currently at the end of a block generated
+  // by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP
+  // implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH
+  // but removes the length bytes of the resulting empty stored block. When
+  // decompressing, PPP checks that at the end of input packet, inflate is
+  // waiting for these length bytes.
+Inflate.prototype.inflateSyncPoint = function(z){
+    if(z == null || z.istate == null || z.istate.blocks == null)
+      return Z_STREAM_ERROR;
+    return z.istate.blocks.sync_point();
+}
+
+
+//
+// InfBlocks.java
+//
+
+var INFBLOCKS_BORDER = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
+
+function InfBlocks(z, checkfn, w) {
+    this.hufts=new Int32Array(MANY*3);
+    this.window=new Uint8Array(w);
+    this.end=w;
+    this.checkfn = checkfn;
+    this.mode = IB_TYPE;
+    this.reset(z, null);
+
+    this.left = 0;            // if STORED, bytes left to copy
+
+    this.table = 0;           // table lengths (14 bits)
+    this.index = 0;           // index into blens (or border)
+    this.blens = null;         // bit lengths of codes
+    this.bb=new Int32Array(1); // bit length tree depth
+    this.tb=new Int32Array(1); // bit length decoding tree
+
+    this.codes = new InfCodes();
+
+    this.last = 0;            // true if this block is the last block
+
+  // mode independent information
+    this.bitk = 0;            // bits in bit buffer
+    this.bitb = 0;            // bit buffer
+    this.read = 0;            // window read pointer
+    this.write = 0;           // window write pointer
+    this.check = 0;          // check on output
+
+    this.inftree=new InfTree();
+}
+
+
+
+
+InfBlocks.prototype.reset = function(z, c){
+    if(c) c[0]=this.check;
+    if(this.mode==IB_CODES){
+      this.codes.free(z);
+    }
+    this.mode=IB_TYPE;
+    this.bitk=0;
+    this.bitb=0;
+    this.read=this.write=0;
+
+    if(this.checkfn)
+      z.adler=this.check=z._adler.adler32(0, null, 0, 0);
+  }
+
+ InfBlocks.prototype.proc = function(z, r){
+    var t;              // temporary storage
+    var b;              // bit buffer
+    var k;              // bits in bit buffer
+    var p;              // input data pointer
+    var n;              // bytes available there
+    var q;              // output window write pointer
+    var m;              // bytes to end of window or read pointer
+
+    // copy input/output information to locals (UPDATE macro restores)
+    {p=z.next_in_index;n=z.avail_in;b=this.bitb;k=this.bitk;}
+    {q=this.write;m=(q<this.read ? this.read-q-1 : this.end-q);}
+
+    // process input based on current state
+    while(true){
+      switch (this.mode){
+      case IB_TYPE:
+
+	while(k<(3)){
+	  if(n!=0){
+	    r=Z_OK;
+	  }
+	  else{
+	    this.bitb=b; this.bitk=k;
+	    z.avail_in=n;
+	    z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    this.write=q;
+	    return this.inflate_flush(z,r);
+	  };
+	  n--;
+	  b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+	t = (b & 7);
+	this.last = t & 1;
+
+	switch (t >>> 1){
+        case 0:                         // stored
+          {b>>>=(3);k-=(3);}
+          t = k & 7;                    // go to byte boundary
+
+          {b>>>=(t);k-=(t);}
+          this.mode = IB_LENS;                  // get length of stored block
+          break;
+        case 1:                         // fixed
+          {
+              var bl=new Int32Array(1);
+	      var bd=new Int32Array(1);
+              var tl=[];
+	      var td=[];
+
+	      inflate_trees_fixed(bl, bd, tl, td, z);
+              this.codes.init(bl[0], bd[0], tl[0], 0, td[0], 0, z);
+          }
+
+          {b>>>=(3);k-=(3);}
+
+          this.mode = IB_CODES;
+          break;
+        case 2:                         // dynamic
+
+          {b>>>=(3);k-=(3);}
+
+          this.mode = IB_TABLE;
+          break;
+        case 3:                         // illegal
+
+          {b>>>=(3);k-=(3);}
+          this.mode = BAD;
+          z.msg = "invalid block type";
+          r = Z_DATA_ERROR;
+
+	  this.bitb=b; this.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  this.write=q;
+	  return this.inflate_flush(z,r);
+	}
+	break;
+      case IB_LENS:
+	while(k<(32)){
+	  if(n!=0){
+	    r=Z_OK;
+	  }
+	  else{
+	    this.bitb=b; this.bitk=k;
+	    z.avail_in=n;
+	    z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    this.write=q;
+	    return this.inflate_flush(z,r);
+	  };
+	  n--;
+	  b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+
+	if ((((~b) >>> 16) & 0xffff) != (b & 0xffff)){
+	  this.mode = BAD;
+	  z.msg = "invalid stored block lengths";
+	  r = Z_DATA_ERROR;
+
+	  this.bitb=b; this.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  this.write=q;
+	  return this.inflate_flush(z,r);
+	}
+	this.left = (b & 0xffff);
+	b = k = 0;                       // dump bits
+	this.mode = this.left!=0 ? IB_STORED : (this.last!=0 ? IB_DRY : IB_TYPE);
+	break;
+      case IB_STORED:
+	if (n == 0){
+	  this.bitb=b; this.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  write=q;
+	  return this.inflate_flush(z,r);
+	}
+
+	if(m==0){
+	  if(q==end&&read!=0){
+	    q=0; m=(q<this.read ? this.read-q-1 : this.end-q);
+	  }
+	  if(m==0){
+	    this.write=q;
+	    r=this.inflate_flush(z,r);
+	    q=this.write; m = (q < this.read ? this.read-q-1 : this.end-q);
+	    if(q==this.end && this.read != 0){
+	      q=0; m = (q < this.read ? this.read-q-1 : this.end-q);
+	    }
+	    if(m==0){
+	      this.bitb=b; this.bitk=k;
+	      z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	      this.write=q;
+	      return this.inflate_flush(z,r);
+	    }
+	  }
+	}
+	r=Z_OK;
+
+	t = this.left;
+	if(t>n) t = n;
+	if(t>m) t = m;
+	arrayCopy(z.next_in, p, this.window, q, t);
+	p += t;  n -= t;
+	q += t;  m -= t;
+	if ((this.left -= t) != 0)
+	  break;
+	this.mode = (this.last != 0 ? IB_DRY : IB_TYPE);
+	break;
+      case IB_TABLE:
+
+	while(k<(14)){
+	  if(n!=0){
+	    r=Z_OK;
+	  }
+	  else{
+	    this.bitb=b; this.bitk=k;
+	    z.avail_in=n;
+	    z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    this.write=q;
+	    return this.inflate_flush(z,r);
+	  };
+	  n--;
+	  b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+
+	this.table = t = (b & 0x3fff);
+	if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
+	  {
+	    this.mode = IB_BAD;
+	    z.msg = "too many length or distance symbols";
+	    r = Z_DATA_ERROR;
+
+	    this.bitb=b; this.bitk=k;
+	    z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    this.write=q;
+	    return this.inflate_flush(z,r);
+	  }
+	t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
+	if(this.blens==null || this.blens.length<t){
+	    this.blens=new Int32Array(t);
+	}
+	else{
+	  for(var i=0; i<t; i++){
+              this.blens[i]=0;
+          }
+	}
+
+	{b>>>=(14);k-=(14);}
+
+	this.index = 0;
+	mode = IB_BTREE;
+      case IB_BTREE:
+	while (this.index < 4 + (this.table >>> 10)){
+	  while(k<(3)){
+	    if(n!=0){
+	      r=Z_OK;
+	    }
+	    else{
+	      this.bitb=b; this.bitk=k;
+	      z.avail_in=n;
+	      z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	      this.write=q;
+	      return this.inflate_flush(z,r);
+	    };
+	    n--;
+	    b|=(z.next_in[p++]&0xff)<<k;
+	    k+=8;
+	  }
+
+	  this.blens[INFBLOCKS_BORDER[this.index++]] = b&7;
+
+	  {b>>>=(3);k-=(3);}
+	}
+
+	while(this.index < 19){
+	  this.blens[INFBLOCKS_BORDER[this.index++]] = 0;
+	}
+
+	this.bb[0] = 7;
+	t = this.inftree.inflate_trees_bits(this.blens, this.bb, this.tb, this.hufts, z);
+	if (t != Z_OK){
+	  r = t;
+	  if (r == Z_DATA_ERROR){
+	    this.blens=null;
+	    this.mode = IB_BAD;
+	  }
+
+	  this.bitb=b; this.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  write=q;
+	  return this.inflate_flush(z,r);
+	}
+
+	this.index = 0;
+	this.mode = IB_DTREE;
+      case IB_DTREE:
+	while (true){
+	  t = this.table;
+	  if(!(this.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))){
+	    break;
+	  }
+
+	  var h; //int[]
+	  var i, j, c;
+
+	  t = this.bb[0];
+
+	  while(k<(t)){
+	    if(n!=0){
+	      r=Z_OK;
+	    }
+	    else{
+	      this.bitb=b; this.bitk=k;
+	      z.avail_in=n;
+	      z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	      this.write=q;
+	      return this.inflate_flush(z,r);
+	    };
+	    n--;
+	    b|=(z.next_in[p++]&0xff)<<k;
+	    k+=8;
+	  }
+
+//	  if (this.tb[0]==-1){
+//            dlog("null...");
+//	  }
+
+	  t=this.hufts[(this.tb[0]+(b & inflate_mask[t]))*3+1];
+	  c=this.hufts[(this.tb[0]+(b & inflate_mask[t]))*3+2];
+
+	  if (c < 16){
+	    b>>>=(t);k-=(t);
+	    this.blens[this.index++] = c;
+	  }
+	  else { // c == 16..18
+	    i = c == 18 ? 7 : c - 14;
+	    j = c == 18 ? 11 : 3;
+
+	    while(k<(t+i)){
+	      if(n!=0){
+		r=Z_OK;
+	      }
+	      else{
+		this.bitb=b; this.bitk=k;
+		z.avail_in=n;
+		z.total_in+=p-z.next_in_index;z.next_in_index=p;
+		this.write=q;
+		return this.inflate_flush(z,r);
+	      };
+	      n--;
+	      b|=(z.next_in[p++]&0xff)<<k;
+	      k+=8;
+	    }
+
+	    b>>>=(t);k-=(t);
+
+	    j += (b & inflate_mask[i]);
+
+	    b>>>=(i);k-=(i);
+
+	    i = this.index;
+	    t = this.table;
+	    if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
+		(c == 16 && i < 1)){
+	      this.blens=null;
+	      this.mode = IB_BAD;
+	      z.msg = "invalid bit length repeat";
+	      r = Z_DATA_ERROR;
+
+	      this.bitb=b; this.bitk=k;
+	      z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	      this.write=q;
+	      return this.inflate_flush(z,r);
+	    }
+
+	    c = c == 16 ? this.blens[i-1] : 0;
+	    do{
+	      this.blens[i++] = c;
+	    }
+	    while (--j!=0);
+	    this.index = i;
+	  }
+	}
+
+	this.tb[0]=-1;
+	{
+	    var bl=new Int32Array(1);
+	    var bd=new Int32Array(1);
+	    var tl=new Int32Array(1);
+	    var td=new Int32Array(1);
+	    bl[0] = 9;         // must be <= 9 for lookahead assumptions
+	    bd[0] = 6;         // must be <= 9 for lookahead assumptions
+
+	    t = this.table;
+	    t = this.inftree.inflate_trees_dynamic(257 + (t & 0x1f),
+					      1 + ((t >> 5) & 0x1f),
+					      this.blens, bl, bd, tl, td, this.hufts, z);
+
+	    if (t != Z_OK){
+	        if (t == Z_DATA_ERROR){
+	            this.blens=null;
+	            this.mode = BAD;
+	        }
+	        r = t;
+
+	        this.bitb=b; this.bitk=k;
+	        z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	        this.write=q;
+	        return this.inflate_flush(z,r);
+	    }
+	    this.codes.init(bl[0], bd[0], this.hufts, tl[0], this.hufts, td[0], z);
+	}
+	this.mode = IB_CODES;
+      case IB_CODES:
+	this.bitb=b; this.bitk=k;
+	z.avail_in=n; z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	this.write=q;
+
+	if ((r = this.codes.proc(this, z, r)) != Z_STREAM_END){
+	  return this.inflate_flush(z, r);
+	}
+	r = Z_OK;
+	this.codes.free(z);
+
+	p=z.next_in_index; n=z.avail_in;b=this.bitb;k=this.bitk;
+	q=this.write;m = (q < this.read ? this.read-q-1 : this.end-q);
+
+	if (this.last==0){
+	  this.mode = IB_TYPE;
+	  break;
+	}
+	this.mode = IB_DRY;
+      case IB_DRY:
+	this.write=q;
+	r = this.inflate_flush(z, r);
+	q=this.write; m = (q < this.read ? this.read-q-1 : this.end-q);
+	if (this.read != this.write){
+	  this.bitb=b; this.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  this.write=q;
+	  return this.inflate_flush(z, r);
+	}
+	mode = DONE;
+      case IB_DONE:
+	r = Z_STREAM_END;
+
+	this.bitb=b; this.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	this.write=q;
+	return this.inflate_flush(z, r);
+      case IB_BAD:
+	r = Z_DATA_ERROR;
+
+	this.bitb=b; this.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	this.write=q;
+	return this.inflate_flush(z, r);
+
+      default:
+	r = Z_STREAM_ERROR;
+
+	this.bitb=b; this.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	this.write=q;
+	return this.inflate_flush(z, r);
+      }
+    }
+  }
+
+InfBlocks.prototype.free = function(z){
+    this.reset(z, null);
+    this.window=null;
+    this.hufts=null;
+}
+
+InfBlocks.prototype.set_dictionary = function(d, start, n){
+    arrayCopy(d, start, window, 0, n);
+    this.read = this.write = n;
+}
+
+  // Returns true if inflate is currently at the end of a block generated
+  // by Z_SYNC_FLUSH or Z_FULL_FLUSH.
+InfBlocks.prototype.sync_point = function(){
+    return this.mode == IB_LENS;
+}
+
+  // copy as much as possible from the sliding window to the output area
+InfBlocks.prototype.inflate_flush = function(z, r){
+    var n;
+    var p;
+    var q;
+
+    // local copies of source and destination pointers
+    p = z.next_out_index;
+    q = this.read;
+
+    // compute number of bytes to copy as far as end of window
+    n = ((q <= this.write ? this.write : this.end) - q);
+    if (n > z.avail_out) n = z.avail_out;
+    if (n!=0 && r == Z_BUF_ERROR) r = Z_OK;
+
+    // update counters
+    z.avail_out -= n;
+    z.total_out += n;
+
+    // update check information
+    if(this.checkfn != null)
+      z.adler=this.check=z._adler.adler32(this.check, this.window, q, n);
+
+    // copy as far as end of window
+    arrayCopy(this.window, q, z.next_out, p, n);
+    p += n;
+    q += n;
+
+    // see if more to copy at beginning of window
+    if (q == this.end){
+      // wrap pointers
+      q = 0;
+      if (this.write == this.end)
+        this.write = 0;
+
+      // compute bytes to copy
+      n = this.write - q;
+      if (n > z.avail_out) n = z.avail_out;
+      if (n!=0 && r == Z_BUF_ERROR) r = Z_OK;
+
+      // update counters
+      z.avail_out -= n;
+      z.total_out += n;
+
+      // update check information
+      if(this.checkfn != null)
+	z.adler=this.check=z._adler.adler32(this.check, this.window, q, n);
+
+      // copy
+      arrayCopy(this.window, q, z.next_out, p, n);
+      p += n;
+      q += n;
+    }
+
+    // update pointers
+    z.next_out_index = p;
+    this.read = q;
+
+    // done
+    return r;
+  }
+
+//
+// InfCodes.java
+//
+
+var IC_START=0;  // x: set up for LEN
+var IC_LEN=1;    // i: get length/literal/eob next
+var IC_LENEXT=2; // i: getting length extra (have base)
+var IC_DIST=3;   // i: get distance next
+var IC_DISTEXT=4;// i: getting distance extra
+var IC_COPY=5;   // o: copying bytes in window, waiting for space
+var IC_LIT=6;    // o: got literal, waiting for output space
+var IC_WASH=7;   // o: got eob, possibly still output waiting
+var IC_END=8;    // x: got eob and all data flushed
+var IC_BADCODE=9;// x: got error
+
+function InfCodes() {
+}
+
+InfCodes.prototype.init = function(bl, bd, tl, tl_index, td, td_index, z) {
+    this.mode=IC_START;
+    this.lbits=bl;
+    this.dbits=bd;
+    this.ltree=tl;
+    this.ltree_index=tl_index;
+    this.dtree = td;
+    this.dtree_index=td_index;
+    this.tree=null;
+}
+
+InfCodes.prototype.proc = function(s, z, r){
+    var j;              // temporary storage
+    var t;              // temporary pointer (int[])
+    var tindex;         // temporary pointer
+    var e;              // extra bits or operation
+    var b=0;            // bit buffer
+    var k=0;            // bits in bit buffer
+    var p=0;            // input data pointer
+    var n;              // bytes available there
+    var q;              // output window write pointer
+    var m;              // bytes to end of window or read pointer
+    var f;              // pointer to copy strings from
+
+    // copy input/output information to locals (UPDATE macro restores)
+    p=z.next_in_index;n=z.avail_in;b=s.bitb;k=s.bitk;
+    q=s.write;m=q<s.read?s.read-q-1:s.end-q;
+
+    // process input and output based on current state
+    while (true){
+      switch (this.mode){
+	// waiting for "i:"=input, "o:"=output, "x:"=nothing
+      case IC_START:         // x: set up for LEN
+	if (m >= 258 && n >= 10){
+
+	  s.bitb=b;s.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  s.write=q;
+	  r = this.inflate_fast(this.lbits, this.dbits,
+			   this.ltree, this.ltree_index,
+			   this.dtree, this.dtree_index,
+			   s, z);
+
+	  p=z.next_in_index;n=z.avail_in;b=s.bitb;k=s.bitk;
+	  q=s.write;m=q<s.read?s.read-q-1:s.end-q;
+
+	  if (r != Z_OK){
+	    this.mode = r == Z_STREAM_END ? IC_WASH : IC_BADCODE;
+	    break;
+	  }
+	}
+	this.need = this.lbits;
+	this.tree = this.ltree;
+	this.tree_index=this.ltree_index;
+
+	this.mode = IC_LEN;
+      case IC_LEN:           // i: get length/literal/eob next
+	j = this.need;
+
+	while(k<(j)){
+	  if(n!=0)r=Z_OK;
+	  else{
+
+	    s.bitb=b;s.bitk=k;
+	    z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    s.write=q;
+	    return s.inflate_flush(z,r);
+	  }
+	  n--;
+	  b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+
+	tindex=(this.tree_index+(b&inflate_mask[j]))*3;
+
+	b>>>=(this.tree[tindex+1]);
+	k-=(this.tree[tindex+1]);
+
+	e=this.tree[tindex];
+
+	if(e == 0){               // literal
+	  this.lit = this.tree[tindex+2];
+	  this.mode = IC_LIT;
+	  break;
+	}
+	if((e & 16)!=0 ){          // length
+	  this.get = e & 15;
+	  this.len = this.tree[tindex+2];
+	  this.mode = IC_LENEXT;
+	  break;
+	}
+	if ((e & 64) == 0){        // next table
+	  this.need = e;
+	  this.tree_index = tindex/3 + this.tree[tindex+2];
+	  break;
+	}
+	if ((e & 32)!=0){               // end of block
+	  this.mode = IC_WASH;
+	  break;
+	}
+	this.mode = IC_BADCODE;        // invalid code
+	z.msg = "invalid literal/length code";
+	r = Z_DATA_ERROR;
+
+	s.bitb=b;s.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	s.write=q;
+	return s.inflate_flush(z,r);
+
+      case IC_LENEXT:        // i: getting length extra (have base)
+	j = this.get;
+
+	while(k<(j)){
+	  if(n!=0)r=Z_OK;
+	  else{
+
+	    s.bitb=b;s.bitk=k;
+	    z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    s.write=q;
+	    return s.inflate_flush(z,r);
+	  }
+	  n--; b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+
+	this.len += (b & inflate_mask[j]);
+
+	b>>=j;
+	k-=j;
+
+	this.need = this.dbits;
+	this.tree = this.dtree;
+	this.tree_index = this.dtree_index;
+	this.mode = IC_DIST;
+      case IC_DIST:          // i: get distance next
+	j = this.need;
+
+	while(k<(j)){
+	  if(n!=0)r=Z_OK;
+	  else{
+
+	    s.bitb=b;s.bitk=k;
+	    z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    s.write=q;
+	    return s.inflate_flush(z,r);
+	  }
+	  n--; b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+
+	tindex=(this.tree_index+(b & inflate_mask[j]))*3;
+
+	b>>=this.tree[tindex+1];
+	k-=this.tree[tindex+1];
+
+	e = (this.tree[tindex]);
+	if((e & 16)!=0){               // distance
+	  this.get = e & 15;
+	  this.dist = this.tree[tindex+2];
+	  this.mode = IC_DISTEXT;
+	  break;
+	}
+	if ((e & 64) == 0){        // next table
+	  this.need = e;
+	  this.tree_index = tindex/3 + this.tree[tindex+2];
+	  break;
+	}
+	this.mode = IC_BADCODE;        // invalid code
+	z.msg = "invalid distance code";
+	r = Z_DATA_ERROR;
+
+	s.bitb=b;s.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	s.write=q;
+	return s.inflate_flush(z,r);
+
+      case IC_DISTEXT:       // i: getting distance extra
+	j = this.get;
+
+	while(k<(j)){
+	  if(n!=0)r=Z_OK;
+	  else{
+
+	    s.bitb=b;s.bitk=k;
+	    z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	    s.write=q;
+	    return s.inflate_flush(z,r);
+	  }
+	  n--; b|=(z.next_in[p++]&0xff)<<k;
+	  k+=8;
+	}
+
+	this.dist += (b & inflate_mask[j]);
+
+	b>>=j;
+	k-=j;
+
+	this.mode = IC_COPY;
+      case IC_COPY:          // o: copying bytes in window, waiting for space
+        f = q - this.dist;
+        while(f < 0){     // modulo window size-"while" instead
+          f += s.end;     // of "if" handles invalid distances
+	}
+	while (this.len!=0){
+
+	  if(m==0){
+	    if(q==s.end&&s.read!=0){q=0;m=q<s.read?s.read-q-1:s.end-q;}
+	    if(m==0){
+	      s.write=q; r=s.inflate_flush(z,r);
+	      q=s.write;m=q<s.read?s.read-q-1:s.end-q;
+
+	      if(q==s.end&&s.read!=0){q=0;m=q<s.read?s.read-q-1:s.end-q;}
+
+	      if(m==0){
+		s.bitb=b;s.bitk=k;
+		z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+		s.write=q;
+		return s.inflate_flush(z,r);
+	      }
+	    }
+	  }
+
+	  s.window[q++]=s.window[f++]; m--;
+
+	  if (f == s.end)
+            f = 0;
+	  this.len--;
+	}
+	this.mode = IC_START;
+	break;
+      case IC_LIT:           // o: got literal, waiting for output space
+	if(m==0){
+	  if(q==s.end&&s.read!=0){q=0;m=q<s.read?s.read-q-1:s.end-q;}
+	  if(m==0){
+	    s.write=q; r=s.inflate_flush(z,r);
+	    q=s.write;m=q<s.read?s.read-q-1:s.end-q;
+
+	    if(q==s.end&&s.read!=0){q=0;m=q<s.read?s.read-q-1:s.end-q;}
+	    if(m==0){
+	      s.bitb=b;s.bitk=k;
+	      z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	      s.write=q;
+	      return s.inflate_flush(z,r);
+	    }
+	  }
+	}
+	r=Z_OK;
+
+	s.window[q++]=this.lit; m--;
+
+	this.mode = IC_START;
+	break;
+      case IC_WASH:           // o: got eob, possibly more output
+	if (k > 7){        // return unused byte, if any
+	  k -= 8;
+	  n++;
+	  p--;             // can always return one
+	}
+
+	s.write=q; r=s.inflate_flush(z,r);
+	q=s.write;m=q<s.read?s.read-q-1:s.end-q;
+
+	if (s.read != s.write){
+	  s.bitb=b;s.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  s.write=q;
+	  return s.inflate_flush(z,r);
+	}
+	this.mode = IC_END;
+      case IC_END:
+	r = Z_STREAM_END;
+	s.bitb=b;s.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	s.write=q;
+	return s.inflate_flush(z,r);
+
+      case IC_BADCODE:       // x: got error
+
+	r = Z_DATA_ERROR;
+
+	s.bitb=b;s.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	s.write=q;
+	return s.inflate_flush(z,r);
+
+      default:
+	r = Z_STREAM_ERROR;
+
+	s.bitb=b;s.bitk=k;
+	z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	s.write=q;
+	return s.inflate_flush(z,r);
+      }
+    }
+  }
+
+InfCodes.prototype.free = function(z){
+    //  ZFREE(z, c);
+}
+
+  // Called with number of bytes left to write in window at least 258
+  // (the maximum string length) and number of input bytes available
+  // at least ten.  The ten bytes are six bytes for the longest length/
+  // distance pair plus four bytes for overloading the bit buffer.
+
+InfCodes.prototype.inflate_fast = function(bl, bd, tl, tl_index, td, td_index, s, z) {
+    var t;                // temporary pointer
+    var   tp;             // temporary pointer (int[])
+    var tp_index;         // temporary pointer
+    var e;                // extra bits or operation
+    var b;                // bit buffer
+    var k;                // bits in bit buffer
+    var p;                // input data pointer
+    var n;                // bytes available there
+    var q;                // output window write pointer
+    var m;                // bytes to end of window or read pointer
+    var ml;               // mask for literal/length tree
+    var md;               // mask for distance tree
+    var c;                // bytes to copy
+    var d;                // distance back to copy from
+    var r;                // copy source pointer
+
+    var tp_index_t_3;     // (tp_index+t)*3
+
+    // load input, output, bit values
+    p=z.next_in_index;n=z.avail_in;b=s.bitb;k=s.bitk;
+    q=s.write;m=q<s.read?s.read-q-1:s.end-q;
+
+    // initialize masks
+    ml = inflate_mask[bl];
+    md = inflate_mask[bd];
+
+    // do until not enough input or output space for fast loop
+    do {                          // assume called with m >= 258 && n >= 10
+      // get literal/length code
+      while(k<(20)){              // max bits for literal/length code
+	n--;
+	b|=(z.next_in[p++]&0xff)<<k;k+=8;
+      }
+
+      t= b&ml;
+      tp=tl;
+      tp_index=tl_index;
+      tp_index_t_3=(tp_index+t)*3;
+      if ((e = tp[tp_index_t_3]) == 0){
+	b>>=(tp[tp_index_t_3+1]); k-=(tp[tp_index_t_3+1]);
+
+	s.window[q++] = tp[tp_index_t_3+2];
+	m--;
+	continue;
+      }
+      do {
+
+	b>>=(tp[tp_index_t_3+1]); k-=(tp[tp_index_t_3+1]);
+
+	if((e&16)!=0){
+	  e &= 15;
+	  c = tp[tp_index_t_3+2] + (b & inflate_mask[e]);
+
+	  b>>=e; k-=e;
+
+	  // decode distance base of block to copy
+	  while(k<(15)){           // max bits for distance code
+	    n--;
+	    b|=(z.next_in[p++]&0xff)<<k;k+=8;
+	  }
+
+	  t= b&md;
+	  tp=td;
+	  tp_index=td_index;
+          tp_index_t_3=(tp_index+t)*3;
+	  e = tp[tp_index_t_3];
+
+	  do {
+
+	    b>>=(tp[tp_index_t_3+1]); k-=(tp[tp_index_t_3+1]);
+
+	    if((e&16)!=0){
+	      // get extra bits to add to distance base
+	      e &= 15;
+	      while(k<(e)){         // get extra bits (up to 13)
+		n--;
+		b|=(z.next_in[p++]&0xff)<<k;k+=8;
+	      }
+
+	      d = tp[tp_index_t_3+2] + (b&inflate_mask[e]);
+
+	      b>>=(e); k-=(e);
+
+	      // do the copy
+	      m -= c;
+	      if (q >= d){                // offset before dest
+		//  just copy
+		r=q-d;
+		if(q-r>0 && 2>(q-r)){
+		  s.window[q++]=s.window[r++]; // minimum count is three,
+		  s.window[q++]=s.window[r++]; // so unroll loop a little
+		  c-=2;
+		}
+		else{
+		  s.window[q++]=s.window[r++]; // minimum count is three,
+		  s.window[q++]=s.window[r++]; // so unroll loop a little
+		  c-=2;
+		}
+	      }
+	      else{                  // else offset after destination
+                r=q-d;
+                do{
+                  r+=s.end;          // force pointer in window
+                }while(r<0);         // covers invalid distances
+		e=s.end-r;
+		if(c>e){             // if source crosses,
+		  c-=e;              // wrapped copy
+		  if(q-r>0 && e>(q-r)){
+		    do{s.window[q++] = s.window[r++];}
+		    while(--e!=0);
+		  }
+		  else{
+		    arrayCopy(s.window, r, s.window, q, e);
+		    q+=e; r+=e; e=0;
+		  }
+		  r = 0;                  // copy rest from start of window
+		}
+
+	      }
+
+	      // copy all or what's left
+              do{s.window[q++] = s.window[r++];}
+		while(--c!=0);
+	      break;
+	    }
+	    else if((e&64)==0){
+	      t+=tp[tp_index_t_3+2];
+	      t+=(b&inflate_mask[e]);
+	      tp_index_t_3=(tp_index+t)*3;
+	      e=tp[tp_index_t_3];
+	    }
+	    else{
+	      z.msg = "invalid distance code";
+
+	      c=z.avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;
+
+	      s.bitb=b;s.bitk=k;
+	      z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	      s.write=q;
+
+	      return Z_DATA_ERROR;
+	    }
+	  }
+	  while(true);
+	  break;
+	}
+
+	if((e&64)==0){
+	  t+=tp[tp_index_t_3+2];
+	  t+=(b&inflate_mask[e]);
+	  tp_index_t_3=(tp_index+t)*3;
+	  if((e=tp[tp_index_t_3])==0){
+
+	    b>>=(tp[tp_index_t_3+1]); k-=(tp[tp_index_t_3+1]);
+
+	    s.window[q++]=tp[tp_index_t_3+2];
+	    m--;
+	    break;
+	  }
+	}
+	else if((e&32)!=0){
+
+	  c=z.avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;
+
+	  s.bitb=b;s.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  s.write=q;
+
+	  return Z_STREAM_END;
+	}
+	else{
+	  z.msg="invalid literal/length code";
+
+	  c=z.avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;
+
+	  s.bitb=b;s.bitk=k;
+	  z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+	  s.write=q;
+
+	  return Z_DATA_ERROR;
+	}
+      }
+      while(true);
+    }
+    while(m>=258 && n>= 10);
+
+    // not enough input or output--restore pointers and return
+    c=z.avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;
+
+    s.bitb=b;s.bitk=k;
+    z.avail_in=n;z.total_in+=p-z.next_in_index;z.next_in_index=p;
+    s.write=q;
+
+    return Z_OK;
+}
+
+//
+// InfTree.java
+//
+
+function InfTree() {
+}
+
+InfTree.prototype.huft_build = function(b, bindex, n, s, d, e, t, m, hp, hn, v) {
+
+    // Given a list of code lengths and a maximum table size, make a set of
+    // tables to decode that set of codes.  Return Z_OK on success, Z_BUF_ERROR
+    // if the given code set is incomplete (the tables are still built in this
+    // case), Z_DATA_ERROR if the input is invalid (an over-subscribed set of
+    // lengths), or Z_MEM_ERROR if not enough memory.
+
+    var a;                       // counter for codes of length k
+    var f;                       // i repeats in table every f entries
+    var g;                       // maximum code length
+    var h;                       // table level
+    var i;                       // counter, current code
+    var j;                       // counter
+    var k;                       // number of bits in current code
+    var l;                       // bits per table (returned in m)
+    var mask;                    // (1 << w) - 1, to avoid cc -O bug on HP
+    var p;                       // pointer into c[], b[], or v[]
+    var q;                       // points to current table
+    var w;                       // bits before this table == (l * h)
+    var xp;                      // pointer into x
+    var y;                       // number of dummy codes added
+    var z;                       // number of entries in current table
+
+    // Generate counts for each bit length
+
+    p = 0; i = n;
+    do {
+      this.c[b[bindex+p]]++; p++; i--;   // assume all entries <= BMAX
+    }while(i!=0);
+
+    if(this.c[0] == n){                // null input--all zero length codes
+      t[0] = -1;
+      m[0] = 0;
+      return Z_OK;
+    }
+
+    // Find minimum and maximum length, bound *m by those
+    l = m[0];
+    for (j = 1; j <= BMAX; j++)
+      if(this.c[j]!=0) break;
+    k = j;                        // minimum code length
+    if(l < j){
+      l = j;
+    }
+    for (i = BMAX; i!=0; i--){
+      if(this.c[i]!=0) break;
+    }
+    g = i;                        // maximum code length
+    if(l > i){
+      l = i;
+    }
+    m[0] = l;
+
+    // Adjust last length count to fill out codes, if needed
+    for (y = 1 << j; j < i; j++, y <<= 1){
+      if ((y -= this.c[j]) < 0){
+        return Z_DATA_ERROR;
+      }
+    }
+    if ((y -= this.c[i]) < 0){
+      return Z_DATA_ERROR;
+    }
+    this.c[i] += y;
+
+    // Generate starting offsets into the value table for each length
+    this.x[1] = j = 0;
+    p = 1;  xp = 2;
+    while (--i!=0) {                 // note that i == g from above
+      this.x[xp] = (j += this.c[p]);
+      xp++;
+      p++;
+    }
+
+    // Make a table of values in order of bit lengths
+    i = 0; p = 0;
+    do {
+      if ((j = b[bindex+p]) != 0){
+        this.v[this.x[j]++] = i;
+      }
+      p++;
+    }
+    while (++i < n);
+    n = this.x[g];                     // set n to length of v
+
+    // Generate the Huffman codes and for each, make the table entries
+    this.x[0] = i = 0;                 // first Huffman code is zero
+    p = 0;                        // grab values in bit order
+    h = -1;                       // no tables yet--level -1
+    w = -l;                       // bits decoded == (l * h)
+    this.u[0] = 0;                     // just to keep compilers happy
+    q = 0;                        // ditto
+    z = 0;                        // ditto
+
+    // go through the bit lengths (k already is bits in shortest code)
+    for (; k <= g; k++){
+      a = this.c[k];
+      while (a--!=0){
+	// here i is the Huffman code of length k bits for value *p
+	// make tables up to required level
+        while (k > w + l){
+          h++;
+          w += l;                 // previous table always l bits
+	  // compute minimum size table less than or equal to l bits
+          z = g - w;
+          z = (z > l) ? l : z;        // table size upper limit
+          if((f=1<<(j=k-w))>a+1){     // try a k-w bit table
+                                      // too few codes for k-w bit table
+            f -= a + 1;               // deduct codes from patterns left
+            xp = k;
+            if(j < z){
+              while (++j < z){        // try smaller tables up to z bits
+                if((f <<= 1) <= this.c[++xp])
+                  break;              // enough codes to use up j bits
+                f -= this.c[xp];           // else deduct codes from patterns
+              }
+	    }
+          }
+          z = 1 << j;                 // table entries for j-bit table
+
+	  // allocate new table
+          if (this.hn[0] + z > MANY){       // (note: doesn't matter for fixed)
+            return Z_DATA_ERROR;       // overflow of MANY
+          }
+          this.u[h] = q = /*hp+*/ this.hn[0];   // DEBUG
+          this.hn[0] += z;
+
+	  // connect to last table, if there is one
+	  if(h!=0){
+            this.x[h]=i;           // save pattern for backing up
+            this.r[0]=j;     // bits in this table
+            this.r[1]=l;     // bits to dump before this table
+            j=i>>>(w - l);
+            this.r[2] = (q - this.u[h-1] - j);               // offset to this table
+            arrayCopy(this.r, 0, hp, (this.u[h-1]+j)*3, 3); // connect to last table
+          }
+          else{
+            t[0] = q;               // first table is returned result
+	  }
+        }
+
+	// set up table entry in r
+        this.r[1] = (k - w);
+        if (p >= n){
+          this.r[0] = 128 + 64;      // out of values--invalid code
+	}
+        else if (v[p] < s){
+          this.r[0] = (this.v[p] < 256 ? 0 : 32 + 64);  // 256 is end-of-block
+          this.r[2] = this.v[p++];          // simple code is just the value
+        }
+        else{
+          this.r[0]=(e[this.v[p]-s]+16+64); // non-simple--look up in lists
+          this.r[2]=d[this.v[p++] - s];
+        }
+
+        // fill code-like entries with r
+        f=1<<(k-w);
+        for (j=i>>>w;j<z;j+=f){
+          arrayCopy(this.r, 0, hp, (q+j)*3, 3);
+	}
+
+	// backwards increment the k-bit code i
+        for (j = 1 << (k - 1); (i & j)!=0; j >>>= 1){
+          i ^= j;
+	}
+        i ^= j;
+
+	// backup over finished tables
+        mask = (1 << w) - 1;      // needed on HP, cc -O bug
+        while ((i & mask) != this.x[h]){
+          h--;                    // don't need to update q
+          w -= l;
+          mask = (1 << w) - 1;
+        }
+      }
+    }
+    // Return Z_BUF_ERROR if we were given an incomplete table
+    return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
+}
+
+InfTree.prototype.inflate_trees_bits = function(c, bb, tb, hp, z) {
+    var result;
+    this.initWorkArea(19);
+    this.hn[0]=0;
+    result = this.huft_build(c, 0, 19, 19, null, null, tb, bb, hp, this.hn, this.v);
+
+    if(result == Z_DATA_ERROR){
+      z.msg = "oversubscribed dynamic bit lengths tree";
+    }
+    else if(result == Z_BUF_ERROR || bb[0] == 0){
+      z.msg = "incomplete dynamic bit lengths tree";
+      result = Z_DATA_ERROR;
+    }
+    return result;
+}
+
+InfTree.prototype.inflate_trees_dynamic = function(nl, nd, c, bl, bd, tl, td, hp, z) {
+    var result;
+
+    // build literal/length tree
+    this.initWorkArea(288);
+    this.hn[0]=0;
+    result = this.huft_build(c, 0, nl, 257, cplens, cplext, tl, bl, hp, this.hn, this.v);
+    if (result != Z_OK || bl[0] == 0){
+      if(result == Z_DATA_ERROR){
+        z.msg = "oversubscribed literal/length tree";
+      }
+      else if (result != Z_MEM_ERROR){
+        z.msg = "incomplete literal/length tree";
+        result = Z_DATA_ERROR;
+      }
+      return result;
+    }
+
+    // build distance tree
+    this.initWorkArea(288);
+    result = this.huft_build(c, nl, nd, 0, cpdist, cpdext, td, bd, hp, this.hn, this.v);
+
+    if (result != Z_OK || (bd[0] == 0 && nl > 257)){
+      if (result == Z_DATA_ERROR){
+        z.msg = "oversubscribed distance tree";
+      }
+      else if (result == Z_BUF_ERROR) {
+        z.msg = "incomplete distance tree";
+        result = Z_DATA_ERROR;
+      }
+      else if (result != Z_MEM_ERROR){
+        z.msg = "empty distance tree with lengths";
+        result = Z_DATA_ERROR;
+      }
+      return result;
+    }
+
+    return Z_OK;
+}
+/*
+  static int inflate_trees_fixed(int[] bl,  //literal desired/actual bit depth
+                                 int[] bd,  //distance desired/actual bit depth
+                                 int[][] tl,//literal/length tree result
+                                 int[][] td,//distance tree result
+                                 ZStream z  //for memory allocation
+				 ){
+
+*/
+
+function inflate_trees_fixed(bl, bd, tl, td, z) {
+    bl[0]=fixed_bl;
+    bd[0]=fixed_bd;
+    tl[0]=fixed_tl;
+    td[0]=fixed_td;
+    return Z_OK;
+}
+
+InfTree.prototype.initWorkArea = function(vsize){
+    if(this.hn==null){
+        this.hn=new Int32Array(1);
+        this.v=new Int32Array(vsize);
+        this.c=new Int32Array(BMAX+1);
+        this.r=new Int32Array(3);
+        this.u=new Int32Array(BMAX);
+        this.x=new Int32Array(BMAX+1);
+    }
+    if(this.v.length<vsize){
+        this.v=new Int32Array(vsize);
+    }
+    for(var i=0; i<vsize; i++){this.v[i]=0;}
+    for(var i=0; i<BMAX+1; i++){this.c[i]=0;}
+    for(var i=0; i<3; i++){this.r[i]=0;}
+//  for(int i=0; i<BMAX; i++){u[i]=0;}
+    arrayCopy(this.c, 0, this.u, 0, BMAX);
+//  for(int i=0; i<BMAX+1; i++){x[i]=0;}
+    arrayCopy(this.c, 0, this.x, 0, BMAX+1);
+}
+
+var testArray = new Uint8Array(1);
+var hasSubarray = (typeof testArray.subarray === 'function');
+var hasSlice = false; /* (typeof testArray.slice === 'function'); */ // Chrome slice performance is so dire that we're currently not using it...
+
+function arrayCopy(src, srcOffset, dest, destOffset, count) {
+    if (count == 0) {
+        return;
+    }
+    if (!src) {
+        throw "Undef src";
+    } else if (!dest) {
+        throw "Undef dest";
+    }
+
+    if (srcOffset == 0 && count == src.length) {
+        arrayCopy_fast(src, dest, destOffset);
+    } else if (hasSubarray) {
+        arrayCopy_fast(src.subarray(srcOffset, srcOffset + count), dest, destOffset);
+    } else if (src.BYTES_PER_ELEMENT == 1 && count > 100) {
+        arrayCopy_fast(new Uint8Array(src.buffer, src.byteOffset + srcOffset, count), dest, destOffset);
+    } else {
+        arrayCopy_slow(src, srcOffset, dest, destOffset, count);
+    }
+
+}
+
+function arrayCopy_slow(src, srcOffset, dest, destOffset, count) {
+
+    // dlog('_slow call: srcOffset=' + srcOffset + '; destOffset=' + destOffset + '; count=' + count);
+
+     for (var i = 0; i < count; ++i) {
+        dest[destOffset + i] = src[srcOffset + i];
+    }
+}
+
+function arrayCopy_fast(src, dest, destOffset) {
+    dest.set(src, destOffset);
+}
+
+
+  // largest prime smaller than 65536
+var ADLER_BASE=65521;
+  // NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
+var ADLER_NMAX=5552;
+
+function adler32(adler, /* byte[] */ buf,  index, len){
+    if(buf == null){ return 1; }
+
+    var s1=adler&0xffff;
+    var s2=(adler>>16)&0xffff;
+    var k;
+
+    while(len > 0) {
+      k=len<ADLER_NMAX?len:ADLER_NMAX;
+      len-=k;
+      while(k>=16){
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        s1+=buf[index++]&0xff; s2+=s1;
+        k-=16;
+      }
+      if(k!=0){
+        do{
+          s1+=buf[index++]&0xff; s2+=s1;
+        }
+        while(--k!=0);
+      }
+      s1%=ADLER_BASE;
+      s2%=ADLER_BASE;
+    }
+    return (s2<<16)|s1;
+}
+
+
+
+function jszlib_inflate_buffer(buffer, start, length, afterUncOffset) {
+    if (!start) {
+        buffer = new Uint8Array(buffer);
+    } else if (!length) {
+        buffer = new Uint8Array(buffer, start, buffer.byteLength - start);
+    } else {
+        buffer = new Uint8Array(buffer, start, length);
+    }
+
+    var z = new ZStream();
+    z.inflateInit(DEF_WBITS, true);
+    z.next_in = buffer;
+    z.next_in_index = 0;
+    z.avail_in = buffer.length;
+
+    var oBlockList = [];
+    var totalSize = 0;
+    while (true) {
+        var obuf = new Uint8Array(32000);
+        z.next_out = obuf;
+        z.next_out_index = 0;
+        z.avail_out = obuf.length;
+        var status = z.inflate(Z_NO_FLUSH);
+        if (status != Z_OK && status != Z_STREAM_END && status != Z_BUF_ERROR) {
+            throw z.msg;
+        }
+        if (z.avail_out != 0) {
+            var newob = new Uint8Array(obuf.length - z.avail_out);
+            arrayCopy(obuf, 0, newob, 0, (obuf.length - z.avail_out));
+            obuf = newob;
+        }
+        oBlockList.push(obuf);
+        totalSize += obuf.length;
+        if (status == Z_STREAM_END || status == Z_BUF_ERROR) {
+            break;
+        }
+    }
+
+    if (afterUncOffset) {
+        afterUncOffset[0] = (start || 0) + z.next_in_index;
+    }
+
+    if (oBlockList.length == 1) {
+        return oBlockList[0].buffer;
+    } else {
+        var out = new Uint8Array(totalSize);
+        var cursor = 0;
+        for (var i = 0; i < oBlockList.length; ++i) {
+            var b = oBlockList[i];
+            arrayCopy(b, 0, out, cursor, b.length);
+            cursor += b.length;
+        }
+        return out.buffer;
+    }
+}
+
+return {
+    inflateBuffer: jszlib_inflate_buffer,
+    arrayCopy: arrayCopy
+  };
+});
diff --git a/client/galaxy/scripts/libs/bbi/spans.js b/client/galaxy/scripts/libs/bbi/spans.js
new file mode 100644
index 0000000..7c61b85
--- /dev/null
+++ b/client/galaxy/scripts/libs/bbi/spans.js
@@ -0,0 +1,257 @@
+/* -*- mode: javascript; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+
+//
+// Dalliance Genome Explorer
+// (c) Thomas Down 2006-2010
+//
+// spans.js: JavaScript Intset/Location port.
+//
+
+define([], function() {
+
+"use strict";
+
+
+function Range(min, max)
+{
+    if (typeof(min) != 'number' || typeof(max) != 'number')
+        throw 'Bad range ' + min + ',' + max;
+    this._min = min;
+    this._max = max;
+}
+
+Range.prototype.min = function() {
+    return this._min;
+}
+
+Range.prototype.max = function() {
+    return this._max;
+}
+
+Range.prototype.contains = function(pos) {
+    return pos >= this._min && pos <= this._max;
+}
+
+Range.prototype.isContiguous = function() {
+    return true;
+}
+
+Range.prototype.ranges = function() {
+    return [this];
+}
+
+Range.prototype._pushRanges = function(ranges) {
+    ranges.push(this);
+}
+
+Range.prototype.toString = function() {
+    return '[' + this._min + '-' + this._max + ']';
+}
+
+function _Compound(ranges) {
+    // given: a set of unsorted possibly overlapping ranges
+    // sort the input ranges
+    var sorted = ranges.sort(_rangeOrder);
+    // merge overlaps between adjacent ranges
+    var merged = [];
+    var current = sorted.shift();
+    sorted.forEach(function(range) {
+        if (range._min <= current._max) {
+            if (range._max > current._max) {
+                current._max = range._max;
+            }
+        }
+        else {
+            merged.push(current);
+            current = range;
+        }
+    });
+    merged.push(current);
+    this._ranges = merged;
+}
+
+_Compound.prototype.min = function() {
+    return this._ranges[0].min();
+}
+
+_Compound.prototype.max = function() {
+    return this._ranges[this._ranges.length - 1].max();
+}
+
+// returns the index of the first range that is not less than pos
+_Compound.prototype.lower_bound = function(pos) {
+    // first check if pos is out of range
+    var r = this.ranges();
+    if (pos > this.max()) return r.length;
+    if (pos < this.min()) return 0;
+    // do a binary search
+    var a=0, b=r.length - 1;
+    while (a <= b) {
+        var m = Math.floor((a+b)/2);
+        if (pos > r[m]._max) {
+            a = m+1;
+        }
+        else if (pos < r[m]._min) {
+            b = m-1;
+        }
+        else {
+            return m;
+        }
+    }
+    return a;
+}
+
+_Compound.prototype.contains = function(pos) {
+    var lb = this.lower_bound(pos);
+    if (lb < this._ranges.length && this._ranges[lb].contains(pos)) {
+        return true;
+    }
+    return false;
+}
+
+_Compound.prototype.insertRange = function(range) {
+    var lb = this.lower_bound(range._min);
+    if (lb === this._ranges.length) { // range follows this
+        this._ranges.push(range);
+        return;
+    }
+
+    var r = this.ranges();
+    if (range._max < r[lb]._min) { // range preceeds lb
+        this._ranges.splice(lb,0,range);
+        return;
+    }
+
+    // range overlaps lb (at least)
+    if (r[lb]._min < range._min) range._min = r[lb]._min;
+    var ub = lb+1;
+    while (ub < r.length && r[ub]._min <= range._max) {
+        ub++;
+    }
+    ub--;
+    // ub is the upper bound of the new range
+    if (r[ub]._max > range._max) range._max = r[ub]._max;
+
+    // splice range into this._ranges
+    this._ranges.splice(lb,ub-lb+1,range);
+    return;
+}
+
+_Compound.prototype.isContiguous = function() {
+    return this._ranges.length > 1;
+}
+
+_Compound.prototype.ranges = function() {
+    return this._ranges;
+}
+
+_Compound.prototype._pushRanges = function(ranges) {
+    for (var ri = 0; ri < this._ranges.length; ++ri)
+        ranges.push(this._ranges[ri]);
+}
+
+_Compound.prototype.toString = function() {
+    var s = '';
+    for (var r = 0; r < this._ranges.length; ++r) {
+        if (r>0) {
+            s = s + ',';
+        }
+        s = s + this._ranges[r].toString();
+    }
+    return s;
+}
+
+function union(s0, s1) {
+    if (! (s0 instanceof _Compound)) {
+        if (! (s0 instanceof Array))
+            s0 = [s0];
+        s0 = new _Compound(s0);
+    }
+
+    if (s1)
+        s0.insertRange(s1);
+
+    return s0;
+}
+
+function intersection(s0, s1) {
+    var r0 = s0.ranges();
+    var r1 = s1.ranges();
+    var l0 = r0.length, l1 = r1.length;
+    var i0 = 0, i1 = 0;
+    var or = [];
+
+    while (i0 < l0 && i1 < l1) {
+        var s0 = r0[i0], s1 = r1[i1];
+        var lapMin = Math.max(s0.min(), s1.min());
+        var lapMax = Math.min(s0.max(), s1.max());
+        if (lapMax >= lapMin) {
+            or.push(new Range(lapMin, lapMax));
+        }
+        if (s0.max() > s1.max()) {
+            ++i1;
+        } else {
+            ++i0;
+        }
+    }
+
+    if (or.length == 0) {
+        return null; // FIXME
+    } else if (or.length == 1) {
+        return or[0];
+    } else {
+        return new _Compound(or);
+    }
+}
+
+function coverage(s) {
+    var tot = 0;
+    var rl = s.ranges();
+    for (var ri = 0; ri < rl.length; ++ri) {
+        var r = rl[ri];
+        tot += (r.max() - r.min() + 1);
+    }
+    return tot;
+}
+
+
+
+function rangeOrder(a, b)
+{
+    if (a.min() < b.min()) {
+        return -1;
+    } else if (a.min() > b.min()) {
+        return 1;
+    } else if (a.max() < b.max()) {
+        return -1;
+    } else if (b.max() > a.max()) {
+        return 1;
+    } else {
+        return 0;
+    }
+}
+
+function _rangeOrder(a, b)
+{
+    if (a._min < b._min) {
+        return -1;
+    } else if (a._min > b._min) {
+        return 1;
+    } else if (a._max < b._max) {
+        return -1;
+    } else if (b._max > a._max) {
+        return 1;
+    } else {
+        return 0;
+    }
+}
+
+return {
+        Range: Range,
+        union: union,
+        intersection: intersection,
+        coverage: coverage,
+        rangeOver: rangeOrder,
+        _rangeOrder: _rangeOrder
+};
+});
diff --git a/client/galaxy/scripts/libs/bibtex.js b/client/galaxy/scripts/libs/bibtex.js
new file mode 100644
index 0000000..fdaaf03
--- /dev/null
+++ b/client/galaxy/scripts/libs/bibtex.js
@@ -0,0 +1,1867 @@
+/**
+ * Parser.js
+ * Copyright 2012-13 Mayank Lahiri
+ * mlahiri at gmail.com
+ * Released under the BSD License.
+ *
+ * A forgiving Bibtex parser that can:
+ * 
+ * (1) operate in streaming or block mode, extracting entries as dictionaries. 
+ * (2) convert Latex special characters to UTF-8.
+ * (3) best-effort parse malformed entries.
+ * (4) run in a CommonJS environment or a browser, without any dependencies.
+ * (5) be advanced-compiled by Google Closure Compiler.
+ * 
+ * Handwritten as a labor of love, not auto-generated from a grammar. 
+ *
+ * Modes of usage:
+ *
+ * (1) Synchronous, string
+ *
+ *   var entries = BibtexParser(text);
+ *   console.log(entries);
+ *
+ * (2) Asynchronous, stream
+ *
+ *   var entryCallback = function(entry) { console.log(entry); }
+ *   var parser = new BibtexParser(entryCallback);
+ *   parser.parse(chunk1);
+ *   parser.parse(chunk2);
+ *   ...
+ * 
+ * @param {text|function(Object)} arg Either a Bibtex string or callback 
+ *                                    function for processing parsed entries.
+ * @constructor
+ */
+function BibtexParser(arg0) {
+  // Determine how this function is to be used
+  if (typeof arg0 == 'string') {
+    // Passed a string, synchronous call without 'new'
+    var tempStorage = {};
+    var entries = [];
+    function accumulator(entry) {
+      entries.push(entry);
+    }
+    var parser = BibtexParser.call(tempStorage, accumulator);
+    parser.parse(arg0);
+    return {
+      'entries':    entries,
+      'errors':     parser.getErrors()
+    }
+  }
+  if (typeof arg0 != 'function') {
+    throw 'Invalid parser construction.';
+  }
+
+  /** @enum {number} */
+  this.STATES_ = {
+    ENTRY_OR_JUNK:    0,
+    OBJECT_TYPE:      1,
+    ENTRY_KEY:        2, 
+    KV_KEY:           3, 
+    EQUALS:           4,
+    KV_VALUE:         5 
+  }
+  /** @private */ this.DATA_          = {};
+  /** @private */ this.CALLBACK_      = arg0;
+  /** @private */ this.CHAR_          = 0;
+  /** @private */ this.LINE_          = 1;
+  /** @private */ this.CHAR_IN_LINE_  = 0;
+  /** @private */ this.SKIPWS_        = true;
+  /** @private */ this.SKIPCOMMENT_   = true;
+  /** @private */ this.PARSETMP_      = {};
+  /** @private */ this.SKIPTILLEOL_   = false;
+  /** @private */ this.VALBRACES_     = null;
+  /** @private */ this.BRACETYPE_     = null;
+  /** @private */ this.BRACECOUNT_    = 0;
+  /** @private */ this.STATE_         = this.STATES_.ENTRY_OR_JUNK;
+  /** @private */ this.ERRORS_        = [];
+  /** @private */ this.ENTRY_TYPES_   = {
+    'inproceedings'     : 1,
+    'proceedings'       : 2,
+    'article'           : 3,
+    'techreport'        : 4,
+    'misc'              : 5,
+    'mastersthesis'     : 6,
+    'book'              : 7,
+    'phdthesis'         : 8,
+    'incollection'      : 9,
+    'unpublished'       : 10,
+    'inbook'            : 11,
+    'manual'            : 12,
+    'periodical'        : 13,
+    'booklet'           : 14,
+    'masterthesis'      : 15,
+    'conference'        : 16
+    ,'online'           : 998 // Galaxy MOD: Handle @online entries for preprints.
+    ,'data'             : 999 // Galaxy MOD: Handle @data citations coming from figshare.
+  }
+  /** @private */ this.MACROS_        = {
+    'jan'               : 'January',
+    'feb'               : 'February',
+    'mar'               : 'March',
+    'apr'               : 'April',
+    'may'               : 'May',
+    'jun'               : 'June',
+    'jul'               : 'July',
+    'aug'               : 'August',
+    'sep'               : 'September',
+    'oct'               : 'October',
+    'nov'               : 'November',
+    'dec'               : 'December',
+    'Jan'               : 'January',
+    'Feb'               : 'February',
+    'Mar'               : 'March',
+    'Apr'               : 'April',
+    'May'               : 'May',
+    'Jun'               : 'June',
+    'Jul'               : 'July',
+    'Aug'               : 'August',
+    'Sep'               : 'September',
+    'Oct'               : 'October',
+    'Nov'               : 'November',
+    'Dec'               : 'December'
+  }
+
+  /**
+   * Gets an array of all errors encountered during parsing.
+   * Array entries are of the format:
+   *  [ line number, character in line, character in stream, error text ]
+   *
+   * @returns Array<Array>
+   * @public
+   */
+  this.getErrors = function() {
+    return this.ERRORS_;
+  }
+
+  /**
+   * Processes a chunk of data
+   * @public
+   */
+  this.parse = function(chunk) {
+    for (var i = 0; i < chunk.length; i++)
+      this.processCharacter_(chunk[i]);
+  }
+
+  /**
+   * Logs error at current stream position.
+   *
+   * @private
+   */
+  this.error_ = function(text) {
+    this.ERRORS_.push([ this.LINE_, 
+                        this.CHAR_IN_LINE_,
+                        this.CHAR_,
+                        text ])
+  }
+
+  /**
+   * Called after an entire entry has been parsed from the stream.
+   * Performs post-processing and invokes the entry callback pointed to by
+   * this.CALLBACK_. Parsed (but unprocessed) entry data is in this.DATA_.
+   */
+  this.processEntry_ = function() {
+    var data = this.DATA_;
+    if (data.Fields) 
+      for (var f in data.Fields) {
+        var raw = data.Fields[f];
+
+        // Convert Latex/Bibtex special characters to UTF-8 equivalents
+        for (var i = 0; i < this.CHARCONV_.length; i++) {
+          var re = this.CHARCONV_[i][0];
+          var rep = this.CHARCONV_[i][1];
+          raw = raw.replace(re, rep);
+        }
+
+        // Basic substitutions
+        raw = raw.replace(/[\n\r\t]/g, ' ')
+                 .replace(/\s\s+/g, ' ')
+                 .replace(/^\s+|\s+$/g, '')
+
+        // Remove braces and backslashes
+        var len = raw.length;
+        var processed = '';
+        for (var i = 0; i < len; i++) {
+          var c = raw[i];
+          var skip = false;
+          if (c == '\\' && i < len-1) 
+            c = raw[++i];
+          else {
+            if (c == '{' || c == '}')
+              skip = true;
+          }
+          if (!skip)
+            processed += c;
+        }
+        data.Fields[f] = processed
+      }
+
+    if (data.ObjectType == 'string') {
+      for (var f in data.Fields) {  
+        this.MACROS_[f] = data.Fields[f];
+      }
+    } else {
+      // Parsed a new Bibtex entry
+      this.CALLBACK_(data);
+    }
+  }
+
+
+  /**
+   * Processes next character in the stream, invoking the callback after 
+   * each entry has been found and processed.
+   * 
+   * @private
+   * @param {string} c Next character in input stream
+   */
+  this.processCharacter_ = function(c) {
+    // Housekeeping
+    this.CHAR_++;
+    this.CHAR_IN_LINE_++;
+    if (c == '\n') {
+      this.LINE_++;
+      this.CHAR_IN_LINE_ = 1;
+    }
+
+    // Convenience states for skipping whitespace when needed
+    if (this.SKIPTILLEOL_) {
+      if (c == '\n')
+        this.SKIPTILLEOL_ = false;
+      return;
+    }
+    if (this.SKIPCOMMENT_ && c == '%') {
+      this.SKIPTILLEOL_ = true;
+      return;
+    }
+    if (this.SKIPWS_ && /\s/.test(c))
+      return;
+    this.SKIPWS_ = false;
+    this.SKIPCOMMENT_ = false;
+    this.SKIPTILLEOL_ = false;
+
+    // Main state machine
+    var AnotherIteration = true;
+    while (AnotherIteration) {
+      //console.log(this.LINE_, this.CHAR_IN_LINE_, this.STATE_, c)
+      AnotherIteration = false;
+      switch(this.STATE_) {
+        // -- Scan for an object marker ('@')
+        // -- Reset temporary data structure in case previous entry was garbled
+        case this.STATES_.ENTRY_OR_JUNK:
+          if (c == '@') {
+            // SUCCESS:     Parsed a valid start-of-object marker.
+            // NEXT_STATE:  OBJECT_TYPE
+            this.STATE_ = this.STATES_.OBJECT_TYPE;
+            this.DATA_ = {
+              ObjectType    : ''
+            };
+          }
+          this.BRACETYPE_   = null;
+          this.SKIPWS_      = true;
+          this.SKIPCOMMENT_ = true;
+          break;
+
+        // Start at first non-whitespace character after start-of-object '@'
+        // -- Accept [A-Za-z], break on non-matching character
+        // -- Populate this.DATA_.EntryType and this.DATA_.ObjectType
+        case this.STATES_.OBJECT_TYPE:
+          if (/[A-Za-z]/.test(c)) {
+            this.DATA_.ObjectType += c.toLowerCase();
+            this.SKIPWS_      = true;
+            this.SKIPCOMMENT_ = true;
+          } else {
+            // Break from state and validate object type
+            var ot = this.DATA_.ObjectType;
+            if (ot == 'comment') {
+              this.STATE_ = this.STATES_.ENTRY_OR_JUNK;
+            } else {
+              if (ot == 'string') {
+                this.DATA_.ObjectType = ot;
+                this.DATA_.Fields = {};
+                this.BRACETYPE_ = c;
+                this.BRACECOUNT_ = 1;
+                this.STATE_ = this.STATES_.KV_KEY;
+                this.SKIPWS_      = true;
+                this.SKIPCOMMENT_ = true;
+                this.PARSETMP_ = {
+                  Key:    ''
+                }
+              } else {
+                if (ot == 'preamble') {
+                  this.STATE_ = this.STATES_.ENTRY_OR_JUNK;
+                } else {
+                  if (ot in this.ENTRY_TYPES_) {
+                    // SUCCESS:     Parsed a valid object type.
+                    // NEXT_STATE:  ENTRY_KEY
+                    this.DATA_.ObjectType = 'entry';
+                    this.DATA_.EntryType  = ot;
+                    this.DATA_.EntryKey   = '';
+                    this.STATE_           = this.STATES_.ENTRY_KEY;
+                    AnotherIteration      = true;
+                  } else {
+                    // ERROR:       Unrecognized object type.
+                    // NEXT_STATE:  ENTRY_OR_JUNK
+                    this.error_('Unrecognized object type: "' +
+                                this.DATA_.ObjectType + '"')
+                    this.STATE_ = this.STATES_.ENTRY_OR_JUNK;
+                  }
+                }
+              }
+            }
+          }
+          break;
+
+          // Start at first non-alphabetic character after an entry type
+          // -- Populate this.DATA_.EntryKey
+          case this.STATES_.ENTRY_KEY:
+            if ((c === '{' || c === '(') && this.BRACETYPE_ == null) {
+              this.BRACETYPE_   = c;
+              this.BRACECOUNT_  = 1;
+              this.SKIPWS_      = true;
+              this.SKIPCOMMENT_ = true;
+              break;
+            }
+            if (/[,%\s]/.test(c)) {
+              if (this.DATA_.EntryKey.length < 1) { 
+                // Skip comments and whitespace before entry key
+                this.SKIPWS_      = true;
+                this.SKIPCOMMENT_ = true;
+              } else {
+                if (this.BRACETYPE_ == null) {
+                  // ERROR:       No opening brace for object
+                  // NEXT_STATE:  ENTRY_OR_JUNK
+                  this.error_('No opening brace for object.');
+                  this.STATE_ = this.STATES_.ENTRY_OR_JUNK;
+                } else {
+                  // SUCCESS:     Parsed an entry key
+                  // NEXT_STATE:  KV_KEY
+                  this.SKIPWS_      = true;
+                  this.SKIPCOMMENT_ = true;
+                  AnotherIteration  = true;
+                  this.STATE_       = this.STATES_.KV_KEY;
+                  this.PARSETMP_.Key= '';
+                  this.DATA_.Fields = {};
+                }
+              }
+            } else {
+              this.DATA_.EntryKey += c;
+              this.SKIPWS_        = false;
+              this.SKIPCOMMENT_   = false;
+            }
+            break;
+
+          // Start at first non-whitespace/comment character after entry key.
+          // -- Populate this.PARSETMP_.Key
+          case this.STATES_.KV_KEY:
+            // Test for end of entry
+            if ((c == '}' && this.BRACETYPE_ == '{') ||
+                (c == ')' && this.BRACETYPE_ == '(')) {
+              // SUCCESS:       Parsed an entry, possible incomplete
+              // NEXT_STATE:    ENTRY_OR_JUNK
+              this.processEntry_();
+              this.SKIPWS_      = true;
+              this.SKIPCOMMENT_ = true;
+              this.STATE_ = this.STATES_.ENTRY_OR_JUNK;
+              break;
+            }
+            if (/[\-A-Za-z:]/.test(c)) {
+              // Add to key
+              this.PARSETMP_.Key  += c;
+              this.SKIPWS_        = false;
+              this.SKIPCOMMENT_   = false;
+            } else {
+              // Either end of key or we haven't encountered start of key
+              if (this.PARSETMP_.Key.length < 1) {
+                // Keep going till we see a key
+                this.SKIPWS_      = true;
+                this.SKIPCOMMENT_ = true;
+              } else {
+                // SUCCESS:       Found full key in K/V pair
+                // NEXT_STATE:    EQUALS
+                this.SKIPWS_      = true;
+                this.SKIPCOMMENT_ = true;
+                this.STATE_       = this.STATES_.EQUALS;
+                AnotherIteration  = true;
+              }
+            }
+            break;
+
+          // Start at first non-alphabetic character after K/V pair key.
+          case this.STATES_.EQUALS:
+            if ((c == '}' && this.BRACETYPE_ == '{') ||
+                (c == ')' && this.BRACETYPE_ == '(')) {
+              // ERROR:         K/V pair with key but no value
+              // NEXT_STATE:    ENTRY_OR_JUNK
+              this.error_('Key-value pair has key "' +
+                          this.PARSETMP_.Key + '", but no value.');
+              this.processEntry_();
+              this.SKIPWS_      = true;
+              this.SKIPCOMMENT_ = true;
+              this.STATE_ = this.STATES_.ENTRY_OR_JUNK;
+              break;
+            }
+            if (c == '=') {
+              // SUCCESS:       found an equal signs separating key and value
+              // NEXT_STATE:    KV_VALUE
+              this.SKIPWS_          = true;
+              this.SKIPCOMMENT_     = true;
+              this.STATE_           = this.STATES_.KV_VALUE;
+              this.PARSETMP_.Value  = '';
+              this.VALBRACES_       = { '"' : [], '{' : [] };
+            }
+            break;
+
+          // Start at first non-whitespace/comment character after '=' 
+          // -- Populate this.PARSETMP_.Value
+          case this.STATES_.KV_VALUE:
+            var delim             = this.VALBRACES_;
+            var val               = this.PARSETMP_.Value;
+            var doneParsingValue  = false;
+
+            // Test for special characters
+            if (c == '"' || c == '{' || c == '}' || c == ',') {
+              if (c == ',') {
+                // This comma can mean:
+                // (1) just another comma literal
+                // (2) end of a macro reference
+                if (0 === delim['"'].length + delim['{'].length) {
+                  // end of a macro reference
+                  var macro = this.PARSETMP_.Value.trim();
+                  if (macro in this.MACROS_) {
+                    // Successful macro reference
+                    this.PARSETMP_.Value = this.MACROS_[macro];
+                  } else {
+                    // Reference to an undefined macro
+                    this.error_('Reference to an undefined macro: '+macro);
+                  }
+                  doneParsingValue = true;
+                }
+              }
+              if (c == '"') {
+                // This quote can mean:
+                // (1) opening delimiter
+                // (2) closing delimiter
+                // (3) literal, if we have a '{' on the stack
+                if (0 === delim['"'].length + delim['{'].length) {
+                  // opening delimiter
+                  delim['"'].push(this.CHAR_)
+                  this.SKIPWS_        = false;
+                  this.SKIPCOMMENT_   = false;
+                  break;
+                }
+                if (delim['"'].length == 1 && delim['{'].length == 0 &&
+                    (val.length==0 || val[val.length-1] != '\\')) {
+                  // closing delimiter
+                  doneParsingValue = true;
+                } else {
+                  // literal, add to value
+                }
+              }
+              if (c == '{') {
+                // This brace can mean:
+                // (1) opening delimiter
+                // (2) stacked verbatim delimiter
+                if (val.length == 0 || val[val.length-1] != '\\') {
+                  delim['{'].push(this.CHAR_)
+                  this.SKIPWS_        = false;
+                  this.SKIPCOMMENT_   = false;
+                } else {
+                  // literal, add to value
+                }
+              }
+              if (c == '}') {
+                // This brace can mean:
+                // (1) closing delimiter
+                // (2) closing stacked verbatim delimiter
+                // (3) end of object definition if value was a macro
+                if (0 === delim['"'].length + delim['{'].length) {
+                  // end of object definition, after macro
+                  var macro = this.PARSETMP_.Value.trim();
+                  if (macro in this.MACROS_) {
+                    // Successful macro reference
+                    this.PARSETMP_.Value = this.MACROS_[macro];
+                  } else {
+                    // Reference to an undefined macro
+                    this.error_('Reference to an undefined macro: '+macro);
+                  }
+                  AnotherIteration = true;
+                  doneParsingValue = true;
+                } else {
+                  if (val.length == 0 || val[val.length-1] != '\\') {
+                    if (delim['{'].length > 0) {
+                      // pop stack for stacked verbatim delimiter
+                      delim['{'].splice(delim['{'].length-1, 1)
+                      if (0 == delim['{'].length + delim['"'].length) {
+                        // closing delimiter
+                        doneParsingValue = true;
+                      } else {
+                        // end verbatim block
+                      }
+                    }
+                  } else {
+                    // literal, add to value
+                  }
+                }
+              }
+            }
+
+            // If here, then we are either done parsing the value or 
+            // have a literal that should be added to the value.
+            if (doneParsingValue) {
+              // SUCCESS:     value parsed
+              // NEXT_STATE:  KV_KEY
+              this.SKIPWS_        = true;
+              this.SKIPCOMMENT_   = true;
+              this.STATE_         = this.STATES_.KV_KEY;
+              this.DATA_.Fields[this.PARSETMP_.Key] = this.PARSETMP_.Value;
+              this.PARSETMP_      = { Key: '' };
+              this.VALBRACES_     = null;
+            } else {
+              this.PARSETMP_.Value += c;
+            }
+            break;
+      } // end switch (this.STATE_)
+    } // end while(AnotherIteration)
+  } // end function processCharacter 
+
+  /** @private */ this.CHARCONV_ = [
+    [ /\\space /g, '\u0020' ],
+    [ /\\textdollar /g, '\u0024' ],
+    [ /\\textquotesingle /g, '\u0027' ],
+    [ /\\ast /g, '\u002A' ],
+    [ /\\textbackslash /g, '\u005C' ],
+    [ /\\\^\{\}/g, '\u005E' ],
+    [ /\\textasciigrave /g, '\u0060' ],
+    [ /\\lbrace /g, '\u007B' ],
+    [ /\\vert /g, '\u007C' ],
+    [ /\\rbrace /g, '\u007D' ],
+    [ /\\textasciitilde /g, '\u007E' ],
+    [ /\\textexclamdown /g, '\u00A1' ],
+    [ /\\textcent /g, '\u00A2' ],
+    [ /\\textsterling /g, '\u00A3' ],
+    [ /\\textcurrency /g, '\u00A4' ],
+    [ /\\textyen /g, '\u00A5' ],
+    [ /\\textbrokenbar /g, '\u00A6' ],
+    [ /\\textsection /g, '\u00A7' ],
+    [ /\\textasciidieresis /g, '\u00A8' ],
+    [ /\\textcopyright /g, '\u00A9' ],
+    [ /\\textordfeminine /g, '\u00AA' ],
+    [ /\\guillemotleft /g, '\u00AB' ],
+    [ /\\lnot /g, '\u00AC' ],
+    [ /\\textregistered /g, '\u00AE' ],
+    [ /\\textasciimacron /g, '\u00AF' ],
+    [ /\\textdegree /g, '\u00B0' ],
+    [ /\\pm /g, '\u00B1' ],
+    [ /\\textasciiacute /g, '\u00B4' ],
+    [ /\\mathrm\{\\mu\}/g, '\u00B5' ],
+    [ /\\textparagraph /g, '\u00B6' ],
+    [ /\\cdot /g, '\u00B7' ],
+    [ /\\c\{\}/g, '\u00B8' ],
+    [ /\\textordmasculine /g, '\u00BA' ],
+    [ /\\guillemotright /g, '\u00BB' ],
+    [ /\\textonequarter /g, '\u00BC' ],
+    [ /\\textonehalf /g, '\u00BD' ],
+    [ /\\textthreequarters /g, '\u00BE' ],
+    [ /\\textquestiondown /g, '\u00BF' ],
+    [ /\\`\{A\}/g, '\u00C0' ],
+    [ /\\'\{A\}/g, '\u00C1' ],
+    [ /\\\^\{A\}/g, '\u00C2' ],
+    [ /\\~\{A\}/g, '\u00C3' ],
+    [ /\\"\{A\}/g, '\u00C4' ],
+    [ /\\AA /g, '\u00C5' ],
+    [ /\\AE /g, '\u00C6' ],
+    [ /\\c\{C\}/g, '\u00C7' ],
+    [ /\\`\{E\}/g, '\u00C8' ],
+    [ /\\'\{E\}/g, '\u00C9' ],
+    [ /\\\^\{E\}/g, '\u00CA' ],
+    [ /\\"\{E\}/g, '\u00CB' ],
+    [ /\\`\{I\}/g, '\u00CC' ],
+    [ /\\'\{I\}/g, '\u00CD' ],
+    [ /\\\^\{I\}/g, '\u00CE' ],
+    [ /\\"\{I\}/g, '\u00CF' ],
+    [ /\\DH /g, '\u00D0' ],
+    [ /\\~\{N\}/g, '\u00D1' ],
+    [ /\\`\{O\}/g, '\u00D2' ],
+    [ /\\'\{O\}/g, '\u00D3' ],
+    [ /\\\^\{O\}/g, '\u00D4' ],
+    [ /\\~\{O\}/g, '\u00D5' ],
+    [ /\\"\{O\}/g, '\u00D6' ],
+    [ /\\texttimes /g, '\u00D7' ],
+    [ /\\O /g, '\u00D8' ],
+    [ /\\`\{U\}/g, '\u00D9' ],
+    [ /\\'\{U\}/g, '\u00DA' ],
+    [ /\\\^\{U\}/g, '\u00DB' ],
+    [ /\\"\{U\}/g, '\u00DC' ],
+    [ /\\'\{Y\}/g, '\u00DD' ],
+    [ /\\TH /g, '\u00DE' ],
+    [ /\\ss /g, '\u00DF' ],
+    [ /\\`\{a\}/g, '\u00E0' ],
+    [ /\\'\{a\}/g, '\u00E1' ],
+    [ /\\\^\{a\}/g, '\u00E2' ],
+    [ /\\~\{a\}/g, '\u00E3' ],
+    [ /\\"\{a\}/g, '\u00E4' ],
+    [ /\\aa /g, '\u00E5' ],
+    [ /\\ae /g, '\u00E6' ],
+    [ /\\c\{c\}/g, '\u00E7' ],
+    [ /\\`\{e\}/g, '\u00E8' ],
+    [ /\\'\{e\}/g, '\u00E9' ],
+    [ /\\\^\{e\}/g, '\u00EA' ],
+    [ /\\"\{e\}/g, '\u00EB' ],
+    [ /\\`\{\\i\}/g, '\u00EC' ],
+    [ /\\'\{\\i\}/g, '\u00ED' ],
+    [ /\\\^\{\\i\}/g, '\u00EE' ],
+    [ /\\"\{\\i\}/g, '\u00EF' ],
+    [ /\\dh /g, '\u00F0' ],
+    [ /\\~\{n\}/g, '\u00F1' ],
+    [ /\\`\{o\}/g, '\u00F2' ],
+    [ /\\'\{o\}/g, '\u00F3' ],
+    [ /\\\^\{o\}/g, '\u00F4' ],
+    [ /\\~\{o\}/g, '\u00F5' ],
+    [ /\\"\{o\}/g, '\u00F6' ],
+    [ /\\div /g, '\u00F7' ],
+    [ /\\o /g, '\u00F8' ],
+    [ /\\`\{u\}/g, '\u00F9' ],
+    [ /\\'\{u\}/g, '\u00FA' ],
+    [ /\\\^\{u\}/g, '\u00FB' ],
+    [ /\\"\{u\}/g, '\u00FC' ],
+    [ /\\'\{y\}/g, '\u00FD' ],
+    [ /\\th /g, '\u00FE' ],
+    [ /\\"\{y\}/g, '\u00FF' ],
+    [ /\\=\{A\}/g, '\u0100' ],
+    [ /\\=\{a\}/g, '\u0101' ],
+    [ /\\u\{A\}/g, '\u0102' ],
+    [ /\\u\{a\}/g, '\u0103' ],
+    [ /\\k\{A\}/g, '\u0104' ],
+    [ /\\k\{a\}/g, '\u0105' ],
+    [ /\\'\{C\}/g, '\u0106' ],
+    [ /\\'\{c\}/g, '\u0107' ],
+    [ /\\\^\{C\}/g, '\u0108' ],
+    [ /\\\^\{c\}/g, '\u0109' ],
+    [ /\\.\{C\}/g, '\u010A' ],
+    [ /\\.\{c\}/g, '\u010B' ],
+    [ /\\v\{C\}/g, '\u010C' ],
+    [ /\\v\{c\}/g, '\u010D' ],
+    [ /\\v\{D\}/g, '\u010E' ],
+    [ /\\v\{d\}/g, '\u010F' ],
+    [ /\\DJ /g, '\u0110' ],
+    [ /\\dj /g, '\u0111' ],
+    [ /\\=\{E\}/g, '\u0112' ],
+    [ /\\=\{e\}/g, '\u0113' ],
+    [ /\\u\{E\}/g, '\u0114' ],
+    [ /\\u\{e\}/g, '\u0115' ],
+    [ /\\.\{E\}/g, '\u0116' ],
+    [ /\\.\{e\}/g, '\u0117' ],
+    [ /\\k\{E\}/g, '\u0118' ],
+    [ /\\k\{e\}/g, '\u0119' ],
+    [ /\\v\{E\}/g, '\u011A' ],
+    [ /\\v\{e\}/g, '\u011B' ],
+    [ /\\\^\{G\}/g, '\u011C' ],
+    [ /\\\^\{g\}/g, '\u011D' ],
+    [ /\\u\{G\}/g, '\u011E' ],
+    [ /\\u\{g\}/g, '\u011F' ],
+    [ /\\.\{G\}/g, '\u0120' ],
+    [ /\\.\{g\}/g, '\u0121' ],
+    [ /\\c\{G\}/g, '\u0122' ],
+    [ /\\c\{g\}/g, '\u0123' ],
+    [ /\\\^\{H\}/g, '\u0124' ],
+    [ /\\\^\{h\}/g, '\u0125' ],
+    [ /\\Elzxh /g, '\u0127' ],
+    [ /\\~\{I\}/g, '\u0128' ],
+    [ /\\~\{\\i\}/g, '\u0129' ],
+    [ /\\=\{I\}/g, '\u012A' ],
+    [ /\\=\{\\i\}/g, '\u012B' ],
+    [ /\\u\{I\}/g, '\u012C' ],
+    [ /\\u\{\\i\}/g, '\u012D' ],
+    [ /\\k\{I\}/g, '\u012E' ],
+    [ /\\k\{i\}/g, '\u012F' ],
+    [ /\\.\{I\}/g, '\u0130' ],
+    [ /\\i /g, '\u0131' ],
+    [ /\\\^\{J\}/g, '\u0134' ],
+    [ /\\\^\{\\j\}/g, '\u0135' ],
+    [ /\\c\{K\}/g, '\u0136' ],
+    [ /\\c\{k\}/g, '\u0137' ],
+    [ /\\'\{L\}/g, '\u0139' ],
+    [ /\\'\{l\}/g, '\u013A' ],
+    [ /\\c\{L\}/g, '\u013B' ],
+    [ /\\c\{l\}/g, '\u013C' ],
+    [ /\\v\{L\}/g, '\u013D' ],
+    [ /\\v\{l\}/g, '\u013E' ],
+    [ /\\L /g, '\u0141' ],
+    [ /\\l /g, '\u0142' ],
+    [ /\\'\{N\}/g, '\u0143' ],
+    [ /\\'\{n\}/g, '\u0144' ],
+    [ /\\c\{N\}/g, '\u0145' ],
+    [ /\\c\{n\}/g, '\u0146' ],
+    [ /\\v\{N\}/g, '\u0147' ],
+    [ /\\v\{n\}/g, '\u0148' ],
+    [ /\\NG /g, '\u014A' ],
+    [ /\\ng /g, '\u014B' ],
+    [ /\\=\{O\}/g, '\u014C' ],
+    [ /\\=\{o\}/g, '\u014D' ],
+    [ /\\u\{O\}/g, '\u014E' ],
+    [ /\\u\{o\}/g, '\u014F' ],
+    [ /\\H\{O\}/g, '\u0150' ],
+    [ /\\H\{o\}/g, '\u0151' ],
+    [ /\\OE /g, '\u0152' ],
+    [ /\\oe /g, '\u0153' ],
+    [ /\\'\{R\}/g, '\u0154' ],
+    [ /\\'\{r\}/g, '\u0155' ],
+    [ /\\c\{R\}/g, '\u0156' ],
+    [ /\\c\{r\}/g, '\u0157' ],
+    [ /\\v\{R\}/g, '\u0158' ],
+    [ /\\v\{r\}/g, '\u0159' ],
+    [ /\\'\{S\}/g, '\u015A' ],
+    [ /\\'\{s\}/g, '\u015B' ],
+    [ /\\\^\{S\}/g, '\u015C' ],
+    [ /\\\^\{s\}/g, '\u015D' ],
+    [ /\\c\{S\}/g, '\u015E' ],
+    [ /\\c\{s\}/g, '\u015F' ],
+    [ /\\v\{S\}/g, '\u0160' ],
+    [ /\\v\{s\}/g, '\u0161' ],
+    [ /\\c\{T\}/g, '\u0162' ],
+    [ /\\c\{t\}/g, '\u0163' ],
+    [ /\\v\{T\}/g, '\u0164' ],
+    [ /\\v\{t\}/g, '\u0165' ],
+    [ /\\~\{U\}/g, '\u0168' ],
+    [ /\\~\{u\}/g, '\u0169' ],
+    [ /\\=\{U\}/g, '\u016A' ],
+    [ /\\=\{u\}/g, '\u016B' ],
+    [ /\\u\{U\}/g, '\u016C' ],
+    [ /\\u\{u\}/g, '\u016D' ],
+    [ /\\r\{U\}/g, '\u016E' ],
+    [ /\\r\{u\}/g, '\u016F' ],
+    [ /\\H\{U\}/g, '\u0170' ],
+    [ /\\H\{u\}/g, '\u0171' ],
+    [ /\\k\{U\}/g, '\u0172' ],
+    [ /\\k\{u\}/g, '\u0173' ],
+    [ /\\\^\{W\}/g, '\u0174' ],
+    [ /\\\^\{w\}/g, '\u0175' ],
+    [ /\\\^\{Y\}/g, '\u0176' ],
+    [ /\\\^\{y\}/g, '\u0177' ],
+    [ /\\"\{Y\}/g, '\u0178' ],
+    [ /\\'\{Z\}/g, '\u0179' ],
+    [ /\\'\{z\}/g, '\u017A' ],
+    [ /\\.\{Z\}/g, '\u017B' ],
+    [ /\\.\{z\}/g, '\u017C' ],
+    [ /\\v\{Z\}/g, '\u017D' ],
+    [ /\\v\{z\}/g, '\u017E' ],
+    [ /\\texthvlig /g, '\u0195' ],
+    [ /\\textnrleg /g, '\u019E' ],
+    [ /\\eth /g, '\u01AA' ],
+    [ /\\textdoublepipe /g, '\u01C2' ],
+    [ /\\'\{g\}/g, '\u01F5' ],
+    [ /\\Elztrna /g, '\u0250' ],
+    [ /\\Elztrnsa /g, '\u0252' ],
+    [ /\\Elzopeno /g, '\u0254' ],
+    [ /\\Elzrtld /g, '\u0256' ],
+    [ /\\Elzschwa /g, '\u0259' ],
+    [ /\\varepsilon /g, '\u025B' ],
+    [ /\\Elzpgamma /g, '\u0263' ],
+    [ /\\Elzpbgam /g, '\u0264' ],
+    [ /\\Elztrnh /g, '\u0265' ],
+    [ /\\Elzbtdl /g, '\u026C' ],
+    [ /\\Elzrtll /g, '\u026D' ],
+    [ /\\Elztrnm /g, '\u026F' ],
+    [ /\\Elztrnmlr /g, '\u0270' ],
+    [ /\\Elzltlmr /g, '\u0271' ],
+    [ /\\Elzltln /g, '\u0272' ],
+    [ /\\Elzrtln /g, '\u0273' ],
+    [ /\\Elzclomeg /g, '\u0277' ],
+    [ /\\textphi /g, '\u0278' ],
+    [ /\\Elztrnr /g, '\u0279' ],
+    [ /\\Elztrnrl /g, '\u027A' ],
+    [ /\\Elzrttrnr /g, '\u027B' ],
+    [ /\\Elzrl /g, '\u027C' ],
+    [ /\\Elzrtlr /g, '\u027D' ],
+    [ /\\Elzfhr /g, '\u027E' ],
+    [ /\\Elzrtls /g, '\u0282' ],
+    [ /\\Elzesh /g, '\u0283' ],
+    [ /\\Elztrnt /g, '\u0287' ],
+    [ /\\Elzrtlt /g, '\u0288' ],
+    [ /\\Elzpupsil /g, '\u028A' ],
+    [ /\\Elzpscrv /g, '\u028B' ],
+    [ /\\Elzinvv /g, '\u028C' ],
+    [ /\\Elzinvw /g, '\u028D' ],
+    [ /\\Elztrny /g, '\u028E' ],
+    [ /\\Elzrtlz /g, '\u0290' ],
+    [ /\\Elzyogh /g, '\u0292' ],
+    [ /\\Elzglst /g, '\u0294' ],
+    [ /\\Elzreglst /g, '\u0295' ],
+    [ /\\Elzinglst /g, '\u0296' ],
+    [ /\\textturnk /g, '\u029E' ],
+    [ /\\Elzdyogh /g, '\u02A4' ],
+    [ /\\Elztesh /g, '\u02A7' ],
+    [ /\\textasciicaron /g, '\u02C7' ],
+    [ /\\Elzverts /g, '\u02C8' ],
+    [ /\\Elzverti /g, '\u02CC' ],
+    [ /\\Elzlmrk /g, '\u02D0' ],
+    [ /\\Elzhlmrk /g, '\u02D1' ],
+    [ /\\Elzsbrhr /g, '\u02D2' ],
+    [ /\\Elzsblhr /g, '\u02D3' ],
+    [ /\\Elzrais /g, '\u02D4' ],
+    [ /\\Elzlow /g, '\u02D5' ],
+    [ /\\textasciibreve /g, '\u02D8' ],
+    [ /\\textperiodcentered /g, '\u02D9' ],
+    [ /\\r\{\}/g, '\u02DA' ],
+    [ /\\k\{\}/g, '\u02DB' ],
+    [ /\\texttildelow /g, '\u02DC' ],
+    [ /\\H\{\}/g, '\u02DD' ],
+    [ /\\tone\{55\}/g, '\u02E5' ],
+    [ /\\tone\{44\}/g, '\u02E6' ],
+    [ /\\tone\{33\}/g, '\u02E7' ],
+    [ /\\tone\{22\}/g, '\u02E8' ],
+    [ /\\tone\{11\}/g, '\u02E9' ],
+    [ /\\cyrchar\\C/g, '\u030F' ],
+    [ /\\Elzpalh /g, '\u0321' ],
+    [ /\\Elzrh /g, '\u0322' ],
+    [ /\\Elzsbbrg /g, '\u032A' ],
+    [ /\\Elzxl /g, '\u0335' ],
+    [ /\\Elzbar /g, '\u0336' ],
+    [ /\\'\{A\}/g, '\u0386' ],
+    [ /\\'\{E\}/g, '\u0388' ],
+    [ /\\'\{H\}/g, '\u0389' ],
+    [ /\\'\{\}\{I\}/g, '\u038A' ],
+    [ /\\'\{\}O/g, '\u038C' ],
+    [ /\\mathrm\{'Y\}/g, '\u038E' ],
+    [ /\\mathrm\{'\\Omega\}/g, '\u038F' ],
+    [ /\\acute\{\\ddot\{\\iota\}\}/g, '\u0390' ],
+    [ /\\Alpha /g, '\u0391' ],
+    [ /\\Beta /g, '\u0392' ],
+    [ /\\Gamma /g, '\u0393' ],
+    [ /\\Delta /g, '\u0394' ],
+    [ /\\Epsilon /g, '\u0395' ],
+    [ /\\Zeta /g, '\u0396' ],
+    [ /\\Eta /g, '\u0397' ],
+    [ /\\Theta /g, '\u0398' ],
+    [ /\\Iota /g, '\u0399' ],
+    [ /\\Kappa /g, '\u039A' ],
+    [ /\\Lambda /g, '\u039B' ],
+    [ /\\Xi /g, '\u039E' ],
+    [ /\\Pi /g, '\u03A0' ],
+    [ /\\Rho /g, '\u03A1' ],
+    [ /\\Sigma /g, '\u03A3' ],
+    [ /\\Tau /g, '\u03A4' ],
+    [ /\\Upsilon /g, '\u03A5' ],
+    [ /\\Phi /g, '\u03A6' ],
+    [ /\\Chi /g, '\u03A7' ],
+    [ /\\Psi /g, '\u03A8' ],
+    [ /\\Omega /g, '\u03A9' ],
+    [ /\\mathrm\{\\ddot\{I\}\}/g, '\u03AA' ],
+    [ /\\mathrm\{\\ddot\{Y\}\}/g, '\u03AB' ],
+    [ /\\'\{\$\\alpha\$\}/g, '\u03AC' ],
+    [ /\\acute\{\\epsilon\}/g, '\u03AD' ],
+    [ /\\acute\{\\eta\}/g, '\u03AE' ],
+    [ /\\acute\{\\iota\}/g, '\u03AF' ],
+    [ /\\acute\{\\ddot\{\\upsilon\}\}/g, '\u03B0' ],
+    [ /\\alpha /g, '\u03B1' ],
+    [ /\\beta /g, '\u03B2' ],
+    [ /\\gamma /g, '\u03B3' ],
+    [ /\\delta /g, '\u03B4' ],
+    [ /\\epsilon /g, '\u03B5' ],
+    [ /\\zeta /g, '\u03B6' ],
+    [ /\\eta /g, '\u03B7' ],
+    [ /\\texttheta /g, '\u03B8' ],
+    [ /\\iota /g, '\u03B9' ],
+    [ /\\kappa /g, '\u03BA' ],
+    [ /\\lambda /g, '\u03BB' ],
+    [ /\\mu /g, '\u03BC' ],
+    [ /\\nu /g, '\u03BD' ],
+    [ /\\xi /g, '\u03BE' ],
+    [ /\\pi /g, '\u03C0' ],
+    [ /\\rho /g, '\u03C1' ],
+    [ /\\varsigma /g, '\u03C2' ],
+    [ /\\sigma /g, '\u03C3' ],
+    [ /\\tau /g, '\u03C4' ],
+    [ /\\upsilon /g, '\u03C5' ],
+    [ /\\varphi /g, '\u03C6' ],
+    [ /\\chi /g, '\u03C7' ],
+    [ /\\psi /g, '\u03C8' ],
+    [ /\\omega /g, '\u03C9' ],
+    [ /\\ddot\{\\iota\}/g, '\u03CA' ],
+    [ /\\ddot\{\\upsilon\}/g, '\u03CB' ],
+    [ /\\'\{o\}/g, '\u03CC' ],
+    [ /\\acute\{\\upsilon\}/g, '\u03CD' ],
+    [ /\\acute\{\\omega\}/g, '\u03CE' ],
+    [ /\\Pisymbol\{ppi022\}\{87\}/g, '\u03D0' ],
+    [ /\\textvartheta /g, '\u03D1' ],
+    [ /\\Upsilon /g, '\u03D2' ],
+    [ /\\phi /g, '\u03D5' ],
+    [ /\\varpi /g, '\u03D6' ],
+    [ /\\Stigma /g, '\u03DA' ],
+    [ /\\Digamma /g, '\u03DC' ],
+    [ /\\digamma /g, '\u03DD' ],
+    [ /\\Koppa /g, '\u03DE' ],
+    [ /\\Sampi /g, '\u03E0' ],
+    [ /\\varkappa /g, '\u03F0' ],
+    [ /\\varrho /g, '\u03F1' ],
+    [ /\\textTheta /g, '\u03F4' ],
+    [ /\\backepsilon /g, '\u03F6' ],
+    [ /\\cyrchar\\CYRYO /g, '\u0401' ],
+    [ /\\cyrchar\\CYRDJE /g, '\u0402' ],
+    [ /\\cyrchar\{\\'\\CYRG\}/g, '\u0403' ],
+    [ /\\cyrchar\\CYRIE /g, '\u0404' ],
+    [ /\\cyrchar\\CYRDZE /g, '\u0405' ],
+    [ /\\cyrchar\\CYRII /g, '\u0406' ],
+    [ /\\cyrchar\\CYRYI /g, '\u0407' ],
+    [ /\\cyrchar\\CYRJE /g, '\u0408' ],
+    [ /\\cyrchar\\CYRLJE /g, '\u0409' ],
+    [ /\\cyrchar\\CYRNJE /g, '\u040A' ],
+    [ /\\cyrchar\\CYRTSHE /g, '\u040B' ],
+    [ /\\cyrchar\{\\'\\CYRK\}/g, '\u040C' ],
+    [ /\\cyrchar\\CYRUSHRT /g, '\u040E' ],
+    [ /\\cyrchar\\CYRDZHE /g, '\u040F' ],
+    [ /\\cyrchar\\CYRA /g, '\u0410' ],
+    [ /\\cyrchar\\CYRB /g, '\u0411' ],
+    [ /\\cyrchar\\CYRV /g, '\u0412' ],
+    [ /\\cyrchar\\CYRG /g, '\u0413' ],
+    [ /\\cyrchar\\CYRD /g, '\u0414' ],
+    [ /\\cyrchar\\CYRE /g, '\u0415' ],
+    [ /\\cyrchar\\CYRZH /g, '\u0416' ],
+    [ /\\cyrchar\\CYRZ /g, '\u0417' ],
+    [ /\\cyrchar\\CYRI /g, '\u0418' ],
+    [ /\\cyrchar\\CYRISHRT /g, '\u0419' ],
+    [ /\\cyrchar\\CYRK /g, '\u041A' ],
+    [ /\\cyrchar\\CYRL /g, '\u041B' ],
+    [ /\\cyrchar\\CYRM /g, '\u041C' ],
+    [ /\\cyrchar\\CYRN /g, '\u041D' ],
+    [ /\\cyrchar\\CYRO /g, '\u041E' ],
+    [ /\\cyrchar\\CYRP /g, '\u041F' ],
+    [ /\\cyrchar\\CYRR /g, '\u0420' ],
+    [ /\\cyrchar\\CYRS /g, '\u0421' ],
+    [ /\\cyrchar\\CYRT /g, '\u0422' ],
+    [ /\\cyrchar\\CYRU /g, '\u0423' ],
+    [ /\\cyrchar\\CYRF /g, '\u0424' ],
+    [ /\\cyrchar\\CYRH /g, '\u0425' ],
+    [ /\\cyrchar\\CYRC /g, '\u0426' ],
+    [ /\\cyrchar\\CYRCH /g, '\u0427' ],
+    [ /\\cyrchar\\CYRSH /g, '\u0428' ],
+    [ /\\cyrchar\\CYRSHCH /g, '\u0429' ],
+    [ /\\cyrchar\\CYRHRDSN /g, '\u042A' ],
+    [ /\\cyrchar\\CYRERY /g, '\u042B' ],
+    [ /\\cyrchar\\CYRSFTSN /g, '\u042C' ],
+    [ /\\cyrchar\\CYREREV /g, '\u042D' ],
+    [ /\\cyrchar\\CYRYU /g, '\u042E' ],
+    [ /\\cyrchar\\CYRYA /g, '\u042F' ],
+    [ /\\cyrchar\\cyra /g, '\u0430' ],
+    [ /\\cyrchar\\cyrb /g, '\u0431' ],
+    [ /\\cyrchar\\cyrv /g, '\u0432' ],
+    [ /\\cyrchar\\cyrg /g, '\u0433' ],
+    [ /\\cyrchar\\cyrd /g, '\u0434' ],
+    [ /\\cyrchar\\cyre /g, '\u0435' ],
+    [ /\\cyrchar\\cyrzh /g, '\u0436' ],
+    [ /\\cyrchar\\cyrz /g, '\u0437' ],
+    [ /\\cyrchar\\cyri /g, '\u0438' ],
+    [ /\\cyrchar\\cyrishrt /g, '\u0439' ],
+    [ /\\cyrchar\\cyrk /g, '\u043A' ],
+    [ /\\cyrchar\\cyrl /g, '\u043B' ],
+    [ /\\cyrchar\\cyrm /g, '\u043C' ],
+    [ /\\cyrchar\\cyrn /g, '\u043D' ],
+    [ /\\cyrchar\\cyro /g, '\u043E' ],
+    [ /\\cyrchar\\cyrp /g, '\u043F' ],
+    [ /\\cyrchar\\cyrr /g, '\u0440' ],
+    [ /\\cyrchar\\cyrs /g, '\u0441' ],
+    [ /\\cyrchar\\cyrt /g, '\u0442' ],
+    [ /\\cyrchar\\cyru /g, '\u0443' ],
+    [ /\\cyrchar\\cyrf /g, '\u0444' ],
+    [ /\\cyrchar\\cyrh /g, '\u0445' ],
+    [ /\\cyrchar\\cyrc /g, '\u0446' ],
+    [ /\\cyrchar\\cyrch /g, '\u0447' ],
+    [ /\\cyrchar\\cyrsh /g, '\u0448' ],
+    [ /\\cyrchar\\cyrshch /g, '\u0449' ],
+    [ /\\cyrchar\\cyrhrdsn /g, '\u044A' ],
+    [ /\\cyrchar\\cyrery /g, '\u044B' ],
+    [ /\\cyrchar\\cyrsftsn /g, '\u044C' ],
+    [ /\\cyrchar\\cyrerev /g, '\u044D' ],
+    [ /\\cyrchar\\cyryu /g, '\u044E' ],
+    [ /\\cyrchar\\cyrya /g, '\u044F' ],
+    [ /\\cyrchar\\cyryo /g, '\u0451' ],
+    [ /\\cyrchar\\cyrdje /g, '\u0452' ],
+    [ /\\cyrchar\{\\'\\cyrg\}/g, '\u0453' ],
+    [ /\\cyrchar\\cyrie /g, '\u0454' ],
+    [ /\\cyrchar\\cyrdze /g, '\u0455' ],
+    [ /\\cyrchar\\cyrii /g, '\u0456' ],
+    [ /\\cyrchar\\cyryi /g, '\u0457' ],
+    [ /\\cyrchar\\cyrje /g, '\u0458' ],
+    [ /\\cyrchar\\cyrlje /g, '\u0459' ],
+    [ /\\cyrchar\\cyrnje /g, '\u045A' ],
+    [ /\\cyrchar\\cyrtshe /g, '\u045B' ],
+    [ /\\cyrchar\{\\'\\cyrk\}/g, '\u045C' ],
+    [ /\\cyrchar\\cyrushrt /g, '\u045E' ],
+    [ /\\cyrchar\\cyrdzhe /g, '\u045F' ],
+    [ /\\cyrchar\\CYROMEGA /g, '\u0460' ],
+    [ /\\cyrchar\\cyromega /g, '\u0461' ],
+    [ /\\cyrchar\\CYRYAT /g, '\u0462' ],
+    [ /\\cyrchar\\CYRIOTE /g, '\u0464' ],
+    [ /\\cyrchar\\cyriote /g, '\u0465' ],
+    [ /\\cyrchar\\CYRLYUS /g, '\u0466' ],
+    [ /\\cyrchar\\cyrlyus /g, '\u0467' ],
+    [ /\\cyrchar\\CYRIOTLYUS /g, '\u0468' ],
+    [ /\\cyrchar\\cyriotlyus /g, '\u0469' ],
+    [ /\\cyrchar\\CYRBYUS /g, '\u046A' ],
+    [ /\\cyrchar\\CYRIOTBYUS /g, '\u046C' ],
+    [ /\\cyrchar\\cyriotbyus /g, '\u046D' ],
+    [ /\\cyrchar\\CYRKSI /g, '\u046E' ],
+    [ /\\cyrchar\\cyrksi /g, '\u046F' ],
+    [ /\\cyrchar\\CYRPSI /g, '\u0470' ],
+    [ /\\cyrchar\\cyrpsi /g, '\u0471' ],
+    [ /\\cyrchar\\CYRFITA /g, '\u0472' ],
+    [ /\\cyrchar\\CYRIZH /g, '\u0474' ],
+    [ /\\cyrchar\\CYRUK /g, '\u0478' ],
+    [ /\\cyrchar\\cyruk /g, '\u0479' ],
+    [ /\\cyrchar\\CYROMEGARND /g, '\u047A' ],
+    [ /\\cyrchar\\cyromegarnd /g, '\u047B' ],
+    [ /\\cyrchar\\CYROMEGATITLO /g, '\u047C' ],
+    [ /\\cyrchar\\cyromegatitlo /g, '\u047D' ],
+    [ /\\cyrchar\\CYROT /g, '\u047E' ],
+    [ /\\cyrchar\\cyrot /g, '\u047F' ],
+    [ /\\cyrchar\\CYRKOPPA /g, '\u0480' ],
+    [ /\\cyrchar\\cyrkoppa /g, '\u0481' ],
+    [ /\\cyrchar\\cyrthousands /g, '\u0482' ],
+    [ /\\cyrchar\\cyrhundredthousands /g, '\u0488' ],
+    [ /\\cyrchar\\cyrmillions /g, '\u0489' ],
+    [ /\\cyrchar\\CYRSEMISFTSN /g, '\u048C' ],
+    [ /\\cyrchar\\cyrsemisftsn /g, '\u048D' ],
+    [ /\\cyrchar\\CYRRTICK /g, '\u048E' ],
+    [ /\\cyrchar\\cyrrtick /g, '\u048F' ],
+    [ /\\cyrchar\\CYRGUP /g, '\u0490' ],
+    [ /\\cyrchar\\cyrgup /g, '\u0491' ],
+    [ /\\cyrchar\\CYRGHCRS /g, '\u0492' ],
+    [ /\\cyrchar\\cyrghcrs /g, '\u0493' ],
+    [ /\\cyrchar\\CYRGHK /g, '\u0494' ],
+    [ /\\cyrchar\\cyrghk /g, '\u0495' ],
+    [ /\\cyrchar\\CYRZHDSC /g, '\u0496' ],
+    [ /\\cyrchar\\cyrzhdsc /g, '\u0497' ],
+    [ /\\cyrchar\\CYRZDSC /g, '\u0498' ],
+    [ /\\cyrchar\\cyrzdsc /g, '\u0499' ],
+    [ /\\cyrchar\\CYRKDSC /g, '\u049A' ],
+    [ /\\cyrchar\\cyrkdsc /g, '\u049B' ],
+    [ /\\cyrchar\\CYRKVCRS /g, '\u049C' ],
+    [ /\\cyrchar\\cyrkvcrs /g, '\u049D' ],
+    [ /\\cyrchar\\CYRKHCRS /g, '\u049E' ],
+    [ /\\cyrchar\\cyrkhcrs /g, '\u049F' ],
+    [ /\\cyrchar\\CYRKBEAK /g, '\u04A0' ],
+    [ /\\cyrchar\\cyrkbeak /g, '\u04A1' ],
+    [ /\\cyrchar\\CYRNDSC /g, '\u04A2' ],
+    [ /\\cyrchar\\cyrndsc /g, '\u04A3' ],
+    [ /\\cyrchar\\CYRNG /g, '\u04A4' ],
+    [ /\\cyrchar\\cyrng /g, '\u04A5' ],
+    [ /\\cyrchar\\CYRPHK /g, '\u04A6' ],
+    [ /\\cyrchar\\cyrphk /g, '\u04A7' ],
+    [ /\\cyrchar\\CYRABHHA /g, '\u04A8' ],
+    [ /\\cyrchar\\cyrabhha /g, '\u04A9' ],
+    [ /\\cyrchar\\CYRSDSC /g, '\u04AA' ],
+    [ /\\cyrchar\\cyrsdsc /g, '\u04AB' ],
+    [ /\\cyrchar\\CYRTDSC /g, '\u04AC' ],
+    [ /\\cyrchar\\cyrtdsc /g, '\u04AD' ],
+    [ /\\cyrchar\\CYRY /g, '\u04AE' ],
+    [ /\\cyrchar\\cyry /g, '\u04AF' ],
+    [ /\\cyrchar\\CYRYHCRS /g, '\u04B0' ],
+    [ /\\cyrchar\\cyryhcrs /g, '\u04B1' ],
+    [ /\\cyrchar\\CYRHDSC /g, '\u04B2' ],
+    [ /\\cyrchar\\cyrhdsc /g, '\u04B3' ],
+    [ /\\cyrchar\\CYRTETSE /g, '\u04B4' ],
+    [ /\\cyrchar\\cyrtetse /g, '\u04B5' ],
+    [ /\\cyrchar\\CYRCHRDSC /g, '\u04B6' ],
+    [ /\\cyrchar\\cyrchrdsc /g, '\u04B7' ],
+    [ /\\cyrchar\\CYRCHVCRS /g, '\u04B8' ],
+    [ /\\cyrchar\\cyrchvcrs /g, '\u04B9' ],
+    [ /\\cyrchar\\CYRSHHA /g, '\u04BA' ],
+    [ /\\cyrchar\\cyrshha /g, '\u04BB' ],
+    [ /\\cyrchar\\CYRABHCH /g, '\u04BC' ],
+    [ /\\cyrchar\\cyrabhch /g, '\u04BD' ],
+    [ /\\cyrchar\\CYRABHCHDSC /g, '\u04BE' ],
+    [ /\\cyrchar\\cyrabhchdsc /g, '\u04BF' ],
+    [ /\\cyrchar\\CYRpalochka /g, '\u04C0' ],
+    [ /\\cyrchar\\CYRKHK /g, '\u04C3' ],
+    [ /\\cyrchar\\cyrkhk /g, '\u04C4' ],
+    [ /\\cyrchar\\CYRNHK /g, '\u04C7' ],
+    [ /\\cyrchar\\cyrnhk /g, '\u04C8' ],
+    [ /\\cyrchar\\CYRCHLDSC /g, '\u04CB' ],
+    [ /\\cyrchar\\cyrchldsc /g, '\u04CC' ],
+    [ /\\cyrchar\\CYRAE /g, '\u04D4' ],
+    [ /\\cyrchar\\cyrae /g, '\u04D5' ],
+    [ /\\cyrchar\\CYRSCHWA /g, '\u04D8' ],
+    [ /\\cyrchar\\cyrschwa /g, '\u04D9' ],
+    [ /\\cyrchar\\CYRABHDZE /g, '\u04E0' ],
+    [ /\\cyrchar\\cyrabhdze /g, '\u04E1' ],
+    [ /\\cyrchar\\CYROTLD /g, '\u04E8' ],
+    [ /\\cyrchar\\cyrotld /g, '\u04E9' ],
+    [ /\\hspace\{0.6em\}/g, '\u2002' ],
+    [ /\\hspace\{1em\}/g, '\u2003' ],
+    [ /\\hspace\{0.33em\}/g, '\u2004' ],
+    [ /\\hspace\{0.25em\}/g, '\u2005' ],
+    [ /\\hspace\{0.166em\}/g, '\u2006' ],
+    [ /\\hphantom\{0\}/g, '\u2007' ],
+    [ /\\hphantom\{,\}/g, '\u2008' ],
+    [ /\\hspace\{0.167em\}/g, '\u2009' ],
+    [ /\\mkern1mu /g, '\u200A' ],
+    [ /\\textendash /g, '\u2013' ],
+    [ /\\textemdash /g, '\u2014' ],
+    [ /\\rule\{1em\}\{1pt\}/g, '\u2015' ],
+    [ /\\Vert /g, '\u2016' ],
+    [ /\\Elzreapos /g, '\u201B' ],
+    [ /\\textquotedblleft /g, '\u201C' ],
+    [ /\\textquotedblright /g, '\u201D' ],
+    [ /\\textdagger /g, '\u2020' ],
+    [ /\\textdaggerdbl /g, '\u2021' ],
+    [ /\\textbullet /g, '\u2022' ],
+    [ /\\ldots /g, '\u2026' ],
+    [ /\\textperthousand /g, '\u2030' ],
+    [ /\\textpertenthousand /g, '\u2031' ],
+    [ /\\backprime /g, '\u2035' ],
+    [ /\\guilsinglleft /g, '\u2039' ],
+    [ /\\guilsinglright /g, '\u203A' ],
+    [ /\\mkern4mu /g, '\u205F' ],
+    [ /\\nolinebreak /g, '\u2060' ],
+    [ /\\ensuremath\{\\Elzpes\}/g, '\u20A7' ],
+    [ /\\mbox\{\\texteuro\} /g, '\u20AC' ],
+    [ /\\dddot /g, '\u20DB' ],
+    [ /\\ddddot /g, '\u20DC' ],
+    [ /\\mathbb\{C\}/g, '\u2102' ],
+    [ /\\mathscr\{g\}/g, '\u210A' ],
+    [ /\\mathscr\{H\}/g, '\u210B' ],
+    [ /\\mathfrak\{H\}/g, '\u210C' ],
+    [ /\\mathbb\{H\}/g, '\u210D' ],
+    [ /\\hslash /g, '\u210F' ],
+    [ /\\mathscr\{I\}/g, '\u2110' ],
+    [ /\\mathfrak\{I\}/g, '\u2111' ],
+    [ /\\mathscr\{L\}/g, '\u2112' ],
+    [ /\\mathscr\{l\}/g, '\u2113' ],
+    [ /\\mathbb\{N\}/g, '\u2115' ],
+    [ /\\cyrchar\\textnumero /g, '\u2116' ],
+    [ /\\wp /g, '\u2118' ],
+    [ /\\mathbb\{P\}/g, '\u2119' ],
+    [ /\\mathbb\{Q\}/g, '\u211A' ],
+    [ /\\mathscr\{R\}/g, '\u211B' ],
+    [ /\\mathfrak\{R\}/g, '\u211C' ],
+    [ /\\mathbb\{R\}/g, '\u211D' ],
+    [ /\\Elzxrat /g, '\u211E' ],
+    [ /\\texttrademark /g, '\u2122' ],
+    [ /\\mathbb\{Z\}/g, '\u2124' ],
+    [ /\\Omega /g, '\u2126' ],
+    [ /\\mho /g, '\u2127' ],
+    [ /\\mathfrak\{Z\}/g, '\u2128' ],
+    [ /\\ElsevierGlyph\{2129\}/g, '\u2129' ],
+    [ /\\AA /g, '\u212B' ],
+    [ /\\mathscr\{B\}/g, '\u212C' ],
+    [ /\\mathfrak\{C\}/g, '\u212D' ],
+    [ /\\mathscr\{e\}/g, '\u212F' ],
+    [ /\\mathscr\{E\}/g, '\u2130' ],
+    [ /\\mathscr\{F\}/g, '\u2131' ],
+    [ /\\mathscr\{M\}/g, '\u2133' ],
+    [ /\\mathscr\{o\}/g, '\u2134' ],
+    [ /\\aleph /g, '\u2135' ],
+    [ /\\beth /g, '\u2136' ],
+    [ /\\gimel /g, '\u2137' ],
+    [ /\\daleth /g, '\u2138' ],
+    [ /\\textfrac\{1\}\{3\}/g, '\u2153' ],
+    [ /\\textfrac\{2\}\{3\}/g, '\u2154' ],
+    [ /\\textfrac\{1\}\{5\}/g, '\u2155' ],
+    [ /\\textfrac\{2\}\{5\}/g, '\u2156' ],
+    [ /\\textfrac\{3\}\{5\}/g, '\u2157' ],
+    [ /\\textfrac\{4\}\{5\}/g, '\u2158' ],
+    [ /\\textfrac\{1\}\{6\}/g, '\u2159' ],
+    [ /\\textfrac\{5\}\{6\}/g, '\u215A' ],
+    [ /\\textfrac\{1\}\{8\}/g, '\u215B' ],
+    [ /\\textfrac\{3\}\{8\}/g, '\u215C' ],
+    [ /\\textfrac\{5\}\{8\}/g, '\u215D' ],
+    [ /\\textfrac\{7\}\{8\}/g, '\u215E' ],
+    [ /\\leftarrow /g, '\u2190' ],
+    [ /\\uparrow /g, '\u2191' ],
+    [ /\\rightarrow /g, '\u2192' ],
+    [ /\\downarrow /g, '\u2193' ],
+    [ /\\leftrightarrow /g, '\u2194' ],
+    [ /\\updownarrow /g, '\u2195' ],
+    [ /\\nwarrow /g, '\u2196' ],
+    [ /\\nearrow /g, '\u2197' ],
+    [ /\\searrow /g, '\u2198' ],
+    [ /\\swarrow /g, '\u2199' ],
+    [ /\\nleftarrow /g, '\u219A' ],
+    [ /\\nrightarrow /g, '\u219B' ],
+    [ /\\arrowwaveright /g, '\u219C' ],
+    [ /\\arrowwaveright /g, '\u219D' ],
+    [ /\\twoheadleftarrow /g, '\u219E' ],
+    [ /\\twoheadrightarrow /g, '\u21A0' ],
+    [ /\\leftarrowtail /g, '\u21A2' ],
+    [ /\\rightarrowtail /g, '\u21A3' ],
+    [ /\\mapsto /g, '\u21A6' ],
+    [ /\\hookleftarrow /g, '\u21A9' ],
+    [ /\\hookrightarrow /g, '\u21AA' ],
+    [ /\\looparrowleft /g, '\u21AB' ],
+    [ /\\looparrowright /g, '\u21AC' ],
+    [ /\\leftrightsquigarrow /g, '\u21AD' ],
+    [ /\\nleftrightarrow /g, '\u21AE' ],
+    [ /\\Lsh /g, '\u21B0' ],
+    [ /\\Rsh /g, '\u21B1' ],
+    [ /\\ElsevierGlyph\{21B3\}/g, '\u21B3' ],
+    [ /\\curvearrowleft /g, '\u21B6' ],
+    [ /\\curvearrowright /g, '\u21B7' ],
+    [ /\\circlearrowleft /g, '\u21BA' ],
+    [ /\\circlearrowright /g, '\u21BB' ],
+    [ /\\leftharpoonup /g, '\u21BC' ],
+    [ /\\leftharpoondown /g, '\u21BD' ],
+    [ /\\upharpoonright /g, '\u21BE' ],
+    [ /\\upharpoonleft /g, '\u21BF' ],
+    [ /\\rightharpoonup /g, '\u21C0' ],
+    [ /\\rightharpoondown /g, '\u21C1' ],
+    [ /\\downharpoonright /g, '\u21C2' ],
+    [ /\\downharpoonleft /g, '\u21C3' ],
+    [ /\\rightleftarrows /g, '\u21C4' ],
+    [ /\\dblarrowupdown /g, '\u21C5' ],
+    [ /\\leftrightarrows /g, '\u21C6' ],
+    [ /\\leftleftarrows /g, '\u21C7' ],
+    [ /\\upuparrows /g, '\u21C8' ],
+    [ /\\rightrightarrows /g, '\u21C9' ],
+    [ /\\downdownarrows /g, '\u21CA' ],
+    [ /\\leftrightharpoons /g, '\u21CB' ],
+    [ /\\rightleftharpoons /g, '\u21CC' ],
+    [ /\\nLeftarrow /g, '\u21CD' ],
+    [ /\\nLeftrightarrow /g, '\u21CE' ],
+    [ /\\nRightarrow /g, '\u21CF' ],
+    [ /\\Leftarrow /g, '\u21D0' ],
+    [ /\\Uparrow /g, '\u21D1' ],
+    [ /\\Rightarrow /g, '\u21D2' ],
+    [ /\\Downarrow /g, '\u21D3' ],
+    [ /\\Leftrightarrow /g, '\u21D4' ],
+    [ /\\Updownarrow /g, '\u21D5' ],
+    [ /\\Lleftarrow /g, '\u21DA' ],
+    [ /\\Rrightarrow /g, '\u21DB' ],
+    [ /\\rightsquigarrow /g, '\u21DD' ],
+    [ /\\DownArrowUpArrow /g, '\u21F5' ],
+    [ /\\forall /g, '\u2200' ],
+    [ /\\complement /g, '\u2201' ],
+    [ /\\partial /g, '\u2202' ],
+    [ /\\exists /g, '\u2203' ],
+    [ /\\nexists /g, '\u2204' ],
+    [ /\\varnothing /g, '\u2205' ],
+    [ /\\nabla /g, '\u2207' ],
+    [ /\\in /g, '\u2208' ],
+    [ /\\not\\in /g, '\u2209' ],
+    [ /\\ni /g, '\u220B' ],
+    [ /\\not\\ni /g, '\u220C' ],
+    [ /\\prod /g, '\u220F' ],
+    [ /\\coprod /g, '\u2210' ],
+    [ /\\sum /g, '\u2211' ],
+    [ /\\mp /g, '\u2213' ],
+    [ /\\dotplus /g, '\u2214' ],
+    [ /\\setminus /g, '\u2216' ],
+    [ /\\circ /g, '\u2218' ],
+    [ /\\bullet /g, '\u2219' ],
+    [ /\\surd /g, '\u221A' ],
+    [ /\\propto /g, '\u221D' ],
+    [ /\\infty /g, '\u221E' ],
+    [ /\\rightangle /g, '\u221F' ],
+    [ /\\angle /g, '\u2220' ],
+    [ /\\measuredangle /g, '\u2221' ],
+    [ /\\sphericalangle /g, '\u2222' ],
+    [ /\\mid /g, '\u2223' ],
+    [ /\\nmid /g, '\u2224' ],
+    [ /\\parallel /g, '\u2225' ],
+    [ /\\nparallel /g, '\u2226' ],
+    [ /\\wedge /g, '\u2227' ],
+    [ /\\vee /g, '\u2228' ],
+    [ /\\cap /g, '\u2229' ],
+    [ /\\cup /g, '\u222A' ],
+    [ /\\int /g, '\u222B' ],
+    [ /\\int\\!\\int /g, '\u222C' ],
+    [ /\\int\\!\\int\\!\\int /g, '\u222D' ],
+    [ /\\oint /g, '\u222E' ],
+    [ /\\surfintegral /g, '\u222F' ],
+    [ /\\volintegral /g, '\u2230' ],
+    [ /\\clwintegral /g, '\u2231' ],
+    [ /\\ElsevierGlyph\{2232\}/g, '\u2232' ],
+    [ /\\ElsevierGlyph\{2233\}/g, '\u2233' ],
+    [ /\\therefore /g, '\u2234' ],
+    [ /\\because /g, '\u2235' ],
+    [ /\\Colon /g, '\u2237' ],
+    [ /\\ElsevierGlyph\{2238\}/g, '\u2238' ],
+    [ /\\mathbin\{\{:\}\\!\\!\{\-\}\\!\\!\{:\}\}/g, '\u223A' ],
+    [ /\\homothetic /g, '\u223B' ],
+    [ /\\sim /g, '\u223C' ],
+    [ /\\backsim /g, '\u223D' ],
+    [ /\\lazysinv /g, '\u223E' ],
+    [ /\\wr /g, '\u2240' ],
+    [ /\\not\\sim /g, '\u2241' ],
+    [ /\\ElsevierGlyph\{2242\}/g, '\u2242' ],
+    [ /\\NotEqualTilde /g, '\u2242-00338' ],
+    [ /\\simeq /g, '\u2243' ],
+    [ /\\not\\simeq /g, '\u2244' ],
+    [ /\\cong /g, '\u2245' ],
+    [ /\\approxnotequal /g, '\u2246' ],
+    [ /\\not\\cong /g, '\u2247' ],
+    [ /\\approx /g, '\u2248' ],
+    [ /\\not\\approx /g, '\u2249' ],
+    [ /\\approxeq /g, '\u224A' ],
+    [ /\\tildetrpl /g, '\u224B' ],
+    [ /\\not\\apid /g, '\u224B-00338' ],
+    [ /\\allequal /g, '\u224C' ],
+    [ /\\asymp /g, '\u224D' ],
+    [ /\\Bumpeq /g, '\u224E' ],
+    [ /\\NotHumpDownHump /g, '\u224E-00338' ],
+    [ /\\bumpeq /g, '\u224F' ],
+    [ /\\NotHumpEqual /g, '\u224F-00338' ],
+    [ /\\doteq /g, '\u2250' ],
+    [ /\\not\\doteq/g, '\u2250-00338' ],
+    [ /\\doteqdot /g, '\u2251' ],
+    [ /\\fallingdotseq /g, '\u2252' ],
+    [ /\\risingdotseq /g, '\u2253' ],
+    [ /\\eqcirc /g, '\u2256' ],
+    [ /\\circeq /g, '\u2257' ],
+    [ /\\estimates /g, '\u2259' ],
+    [ /\\ElsevierGlyph\{225A\}/g, '\u225A' ],
+    [ /\\starequal /g, '\u225B' ],
+    [ /\\triangleq /g, '\u225C' ],
+    [ /\\ElsevierGlyph\{225F\}/g, '\u225F' ],
+    [ /\\not =/g, '\u2260' ],
+    [ /\\equiv /g, '\u2261' ],
+    [ /\\not\\equiv /g, '\u2262' ],
+    [ /\\leq /g, '\u2264' ],
+    [ /\\geq /g, '\u2265' ],
+    [ /\\leqq /g, '\u2266' ],
+    [ /\\geqq /g, '\u2267' ],
+    [ /\\lneqq /g, '\u2268' ],
+    [ /\\lvertneqq /g, '\u2268-0FE00' ],
+    [ /\\gneqq /g, '\u2269' ],
+    [ /\\gvertneqq /g, '\u2269-0FE00' ],
+    [ /\\ll /g, '\u226A' ],
+    [ /\\NotLessLess /g, '\u226A-00338' ],
+    [ /\\gg /g, '\u226B' ],
+    [ /\\NotGreaterGreater /g, '\u226B-00338' ],
+    [ /\\between /g, '\u226C' ],
+    [ /\\not\\kern\-0.3em\\times /g, '\u226D' ],
+    [ /\\not</g, '\u226E' ],
+    [ /\\not>/g, '\u226F' ],
+    [ /\\not\\leq /g, '\u2270' ],
+    [ /\\not\\geq /g, '\u2271' ],
+    [ /\\lessequivlnt /g, '\u2272' ],
+    [ /\\greaterequivlnt /g, '\u2273' ],
+    [ /\\ElsevierGlyph\{2274\}/g, '\u2274' ],
+    [ /\\ElsevierGlyph\{2275\}/g, '\u2275' ],
+    [ /\\lessgtr /g, '\u2276' ],
+    [ /\\gtrless /g, '\u2277' ],
+    [ /\\notlessgreater /g, '\u2278' ],
+    [ /\\notgreaterless /g, '\u2279' ],
+    [ /\\prec /g, '\u227A' ],
+    [ /\\succ /g, '\u227B' ],
+    [ /\\preccurlyeq /g, '\u227C' ],
+    [ /\\succcurlyeq /g, '\u227D' ],
+    [ /\\precapprox /g, '\u227E' ],
+    [ /\\NotPrecedesTilde /g, '\u227E-00338' ],
+    [ /\\succapprox /g, '\u227F' ],
+    [ /\\NotSucceedsTilde /g, '\u227F-00338' ],
+    [ /\\not\\prec /g, '\u2280' ],
+    [ /\\not\\succ /g, '\u2281' ],
+    [ /\\subset /g, '\u2282' ],
+    [ /\\supset /g, '\u2283' ],
+    [ /\\not\\subset /g, '\u2284' ],
+    [ /\\not\\supset /g, '\u2285' ],
+    [ /\\subseteq /g, '\u2286' ],
+    [ /\\supseteq /g, '\u2287' ],
+    [ /\\not\\subseteq /g, '\u2288' ],
+    [ /\\not\\supseteq /g, '\u2289' ],
+    [ /\\subsetneq /g, '\u228A' ],
+    [ /\\varsubsetneqq /g, '\u228A-0FE00' ],
+    [ /\\supsetneq /g, '\u228B' ],
+    [ /\\varsupsetneq /g, '\u228B-0FE00' ],
+    [ /\\uplus /g, '\u228E' ],
+    [ /\\sqsubset /g, '\u228F' ],
+    [ /\\NotSquareSubset /g, '\u228F-00338' ],
+    [ /\\sqsupset /g, '\u2290' ],
+    [ /\\NotSquareSuperset /g, '\u2290-00338' ],
+    [ /\\sqsubseteq /g, '\u2291' ],
+    [ /\\sqsupseteq /g, '\u2292' ],
+    [ /\\sqcap /g, '\u2293' ],
+    [ /\\sqcup /g, '\u2294' ],
+    [ /\\oplus /g, '\u2295' ],
+    [ /\\ominus /g, '\u2296' ],
+    [ /\\otimes /g, '\u2297' ],
+    [ /\\oslash /g, '\u2298' ],
+    [ /\\odot /g, '\u2299' ],
+    [ /\\circledcirc /g, '\u229A' ],
+    [ /\\circledast /g, '\u229B' ],
+    [ /\\circleddash /g, '\u229D' ],
+    [ /\\boxplus /g, '\u229E' ],
+    [ /\\boxminus /g, '\u229F' ],
+    [ /\\boxtimes /g, '\u22A0' ],
+    [ /\\boxdot /g, '\u22A1' ],
+    [ /\\vdash /g, '\u22A2' ],
+    [ /\\dashv /g, '\u22A3' ],
+    [ /\\top /g, '\u22A4' ],
+    [ /\\perp /g, '\u22A5' ],
+    [ /\\truestate /g, '\u22A7' ],
+    [ /\\forcesextra /g, '\u22A8' ],
+    [ /\\Vdash /g, '\u22A9' ],
+    [ /\\Vvdash /g, '\u22AA' ],
+    [ /\\VDash /g, '\u22AB' ],
+    [ /\\nvdash /g, '\u22AC' ],
+    [ /\\nvDash /g, '\u22AD' ],
+    [ /\\nVdash /g, '\u22AE' ],
+    [ /\\nVDash /g, '\u22AF' ],
+    [ /\\vartriangleleft /g, '\u22B2' ],
+    [ /\\vartriangleright /g, '\u22B3' ],
+    [ /\\trianglelefteq /g, '\u22B4' ],
+    [ /\\trianglerighteq /g, '\u22B5' ],
+    [ /\\original /g, '\u22B6' ],
+    [ /\\image /g, '\u22B7' ],
+    [ /\\multimap /g, '\u22B8' ],
+    [ /\\hermitconjmatrix /g, '\u22B9' ],
+    [ /\\intercal /g, '\u22BA' ],
+    [ /\\veebar /g, '\u22BB' ],
+    [ /\\rightanglearc /g, '\u22BE' ],
+    [ /\\ElsevierGlyph\{22C0\}/g, '\u22C0' ],
+    [ /\\ElsevierGlyph\{22C1\}/g, '\u22C1' ],
+    [ /\\bigcap /g, '\u22C2' ],
+    [ /\\bigcup /g, '\u22C3' ],
+    [ /\\diamond /g, '\u22C4' ],
+    [ /\\cdot /g, '\u22C5' ],
+    [ /\\star /g, '\u22C6' ],
+    [ /\\divideontimes /g, '\u22C7' ],
+    [ /\\bowtie /g, '\u22C8' ],
+    [ /\\ltimes /g, '\u22C9' ],
+    [ /\\rtimes /g, '\u22CA' ],
+    [ /\\leftthreetimes /g, '\u22CB' ],
+    [ /\\rightthreetimes /g, '\u22CC' ],
+    [ /\\backsimeq /g, '\u22CD' ],
+    [ /\\curlyvee /g, '\u22CE' ],
+    [ /\\curlywedge /g, '\u22CF' ],
+    [ /\\Subset /g, '\u22D0' ],
+    [ /\\Supset /g, '\u22D1' ],
+    [ /\\Cap /g, '\u22D2' ],
+    [ /\\Cup /g, '\u22D3' ],
+    [ /\\pitchfork /g, '\u22D4' ],
+    [ /\\lessdot /g, '\u22D6' ],
+    [ /\\gtrdot /g, '\u22D7' ],
+    [ /\\verymuchless /g, '\u22D8' ],
+    [ /\\verymuchgreater /g, '\u22D9' ],
+    [ /\\lesseqgtr /g, '\u22DA' ],
+    [ /\\gtreqless /g, '\u22DB' ],
+    [ /\\curlyeqprec /g, '\u22DE' ],
+    [ /\\curlyeqsucc /g, '\u22DF' ],
+    [ /\\not\\sqsubseteq /g, '\u22E2' ],
+    [ /\\not\\sqsupseteq /g, '\u22E3' ],
+    [ /\\Elzsqspne /g, '\u22E5' ],
+    [ /\\lnsim /g, '\u22E6' ],
+    [ /\\gnsim /g, '\u22E7' ],
+    [ /\\precedesnotsimilar /g, '\u22E8' ],
+    [ /\\succnsim /g, '\u22E9' ],
+    [ /\\ntriangleleft /g, '\u22EA' ],
+    [ /\\ntriangleright /g, '\u22EB' ],
+    [ /\\ntrianglelefteq /g, '\u22EC' ],
+    [ /\\ntrianglerighteq /g, '\u22ED' ],
+    [ /\\vdots /g, '\u22EE' ],
+    [ /\\cdots /g, '\u22EF' ],
+    [ /\\upslopeellipsis /g, '\u22F0' ],
+    [ /\\downslopeellipsis /g, '\u22F1' ],
+    [ /\\barwedge /g, '\u2305' ],
+    [ /\\perspcorrespond /g, '\u2306' ],
+    [ /\\lceil /g, '\u2308' ],
+    [ /\\rceil /g, '\u2309' ],
+    [ /\\lfloor /g, '\u230A' ],
+    [ /\\rfloor /g, '\u230B' ],
+    [ /\\recorder /g, '\u2315' ],
+    [ /\\mathchar"2208/g, '\u2316' ],
+    [ /\\ulcorner /g, '\u231C' ],
+    [ /\\urcorner /g, '\u231D' ],
+    [ /\\llcorner /g, '\u231E' ],
+    [ /\\lrcorner /g, '\u231F' ],
+    [ /\\frown /g, '\u2322' ],
+    [ /\\smile /g, '\u2323' ],
+    [ /\\langle /g, '\u2329' ],
+    [ /\\rangle /g, '\u232A' ],
+    [ /\\ElsevierGlyph\{E838\}/g, '\u233D' ],
+    [ /\\Elzdlcorn /g, '\u23A3' ],
+    [ /\\lmoustache /g, '\u23B0' ],
+    [ /\\rmoustache /g, '\u23B1' ],
+    [ /\\textvisiblespace /g, '\u2423' ],
+    [ /\\ding\{172\}/g, '\u2460' ],
+    [ /\\ding\{173\}/g, '\u2461' ],
+    [ /\\ding\{174\}/g, '\u2462' ],
+    [ /\\ding\{175\}/g, '\u2463' ],
+    [ /\\ding\{176\}/g, '\u2464' ],
+    [ /\\ding\{177\}/g, '\u2465' ],
+    [ /\\ding\{178\}/g, '\u2466' ],
+    [ /\\ding\{179\}/g, '\u2467' ],
+    [ /\\ding\{180\}/g, '\u2468' ],
+    [ /\\ding\{181\}/g, '\u2469' ],
+    [ /\\circledS /g, '\u24C8' ],
+    [ /\\Elzdshfnc /g, '\u2506' ],
+    [ /\\Elzsqfnw /g, '\u2519' ],
+    [ /\\diagup /g, '\u2571' ],
+    [ /\\ding\{110\}/g, '\u25A0' ],
+    [ /\\square /g, '\u25A1' ],
+    [ /\\blacksquare /g, '\u25AA' ],
+    [ /\\fbox\{~~\}/g, '\u25AD' ],
+    [ /\\Elzvrecto /g, '\u25AF' ],
+    [ /\\ElsevierGlyph\{E381\}/g, '\u25B1' ],
+    [ /\\ding\{115\}/g, '\u25B2' ],
+    [ /\\bigtriangleup /g, '\u25B3' ],
+    [ /\\blacktriangle /g, '\u25B4' ],
+    [ /\\vartriangle /g, '\u25B5' ],
+    [ /\\blacktriangleright /g, '\u25B8' ],
+    [ /\\triangleright /g, '\u25B9' ],
+    [ /\\ding\{116\}/g, '\u25BC' ],
+    [ /\\bigtriangledown /g, '\u25BD' ],
+    [ /\\blacktriangledown /g, '\u25BE' ],
+    [ /\\triangledown /g, '\u25BF' ],
+    [ /\\blacktriangleleft /g, '\u25C2' ],
+    [ /\\triangleleft /g, '\u25C3' ],
+    [ /\\ding\{117\}/g, '\u25C6' ],
+    [ /\\lozenge /g, '\u25CA' ],
+    [ /\\bigcirc /g, '\u25CB' ],
+    [ /\\ding\{108\}/g, '\u25CF' ],
+    [ /\\Elzcirfl /g, '\u25D0' ],
+    [ /\\Elzcirfr /g, '\u25D1' ],
+    [ /\\Elzcirfb /g, '\u25D2' ],
+    [ /\\ding\{119\}/g, '\u25D7' ],
+    [ /\\Elzrvbull /g, '\u25D8' ],
+    [ /\\Elzsqfl /g, '\u25E7' ],
+    [ /\\Elzsqfr /g, '\u25E8' ],
+    [ /\\Elzsqfse /g, '\u25EA' ],
+    [ /\\bigcirc /g, '\u25EF' ],
+    [ /\\ding\{72\}/g, '\u2605' ],
+    [ /\\ding\{73\}/g, '\u2606' ],
+    [ /\\ding\{37\}/g, '\u260E' ],
+    [ /\\ding\{42\}/g, '\u261B' ],
+    [ /\\ding\{43\}/g, '\u261E' ],
+    [ /\\rightmoon /g, '\u263E' ],
+    [ /\\mercury /g, '\u263F' ],
+    [ /\\venus /g, '\u2640' ],
+    [ /\\male /g, '\u2642' ],
+    [ /\\jupiter /g, '\u2643' ],
+    [ /\\saturn /g, '\u2644' ],
+    [ /\\uranus /g, '\u2645' ],
+    [ /\\neptune /g, '\u2646' ],
+    [ /\\pluto /g, '\u2647' ],
+    [ /\\aries /g, '\u2648' ],
+    [ /\\taurus /g, '\u2649' ],
+    [ /\\gemini /g, '\u264A' ],
+    [ /\\cancer /g, '\u264B' ],
+    [ /\\leo /g, '\u264C' ],
+    [ /\\virgo /g, '\u264D' ],
+    [ /\\libra /g, '\u264E' ],
+    [ /\\scorpio /g, '\u264F' ],
+    [ /\\sagittarius /g, '\u2650' ],
+    [ /\\capricornus /g, '\u2651' ],
+    [ /\\aquarius /g, '\u2652' ],
+    [ /\\pisces /g, '\u2653' ],
+    [ /\\ding\{171\}/g, '\u2660' ],
+    [ /\\diamond /g, '\u2662' ],
+    [ /\\ding\{168\}/g, '\u2663' ],
+    [ /\\ding\{170\}/g, '\u2665' ],
+    [ /\\ding\{169\}/g, '\u2666' ],
+    [ /\\quarternote /g, '\u2669' ],
+    [ /\\eighthnote /g, '\u266A' ],
+    [ /\\flat /g, '\u266D' ],
+    [ /\\natural /g, '\u266E' ],
+    [ /\\sharp /g, '\u266F' ],
+    [ /\\ding\{33\}/g, '\u2701' ],
+    [ /\\ding\{34\}/g, '\u2702' ],
+    [ /\\ding\{35\}/g, '\u2703' ],
+    [ /\\ding\{36\}/g, '\u2704' ],
+    [ /\\ding\{38\}/g, '\u2706' ],
+    [ /\\ding\{39\}/g, '\u2707' ],
+    [ /\\ding\{40\}/g, '\u2708' ],
+    [ /\\ding\{41\}/g, '\u2709' ],
+    [ /\\ding\{44\}/g, '\u270C' ],
+    [ /\\ding\{45\}/g, '\u270D' ],
+    [ /\\ding\{46\}/g, '\u270E' ],
+    [ /\\ding\{47\}/g, '\u270F' ],
+    [ /\\ding\{48\}/g, '\u2710' ],
+    [ /\\ding\{49\}/g, '\u2711' ],
+    [ /\\ding\{50\}/g, '\u2712' ],
+    [ /\\ding\{51\}/g, '\u2713' ],
+    [ /\\ding\{52\}/g, '\u2714' ],
+    [ /\\ding\{53\}/g, '\u2715' ],
+    [ /\\ding\{54\}/g, '\u2716' ],
+    [ /\\ding\{55\}/g, '\u2717' ],
+    [ /\\ding\{56\}/g, '\u2718' ],
+    [ /\\ding\{57\}/g, '\u2719' ],
+    [ /\\ding\{58\}/g, '\u271A' ],
+    [ /\\ding\{59\}/g, '\u271B' ],
+    [ /\\ding\{60\}/g, '\u271C' ],
+    [ /\\ding\{61\}/g, '\u271D' ],
+    [ /\\ding\{62\}/g, '\u271E' ],
+    [ /\\ding\{63\}/g, '\u271F' ],
+    [ /\\ding\{64\}/g, '\u2720' ],
+    [ /\\ding\{65\}/g, '\u2721' ],
+    [ /\\ding\{66\}/g, '\u2722' ],
+    [ /\\ding\{67\}/g, '\u2723' ],
+    [ /\\ding\{68\}/g, '\u2724' ],
+    [ /\\ding\{69\}/g, '\u2725' ],
+    [ /\\ding\{70\}/g, '\u2726' ],
+    [ /\\ding\{71\}/g, '\u2727' ],
+    [ /\\ding\{73\}/g, '\u2729' ],
+    [ /\\ding\{74\}/g, '\u272A' ],
+    [ /\\ding\{75\}/g, '\u272B' ],
+    [ /\\ding\{76\}/g, '\u272C' ],
+    [ /\\ding\{77\}/g, '\u272D' ],
+    [ /\\ding\{78\}/g, '\u272E' ],
+    [ /\\ding\{79\}/g, '\u272F' ],
+    [ /\\ding\{80\}/g, '\u2730' ],
+    [ /\\ding\{81\}/g, '\u2731' ],
+    [ /\\ding\{82\}/g, '\u2732' ],
+    [ /\\ding\{83\}/g, '\u2733' ],
+    [ /\\ding\{84\}/g, '\u2734' ],
+    [ /\\ding\{85\}/g, '\u2735' ],
+    [ /\\ding\{86\}/g, '\u2736' ],
+    [ /\\ding\{87\}/g, '\u2737' ],
+    [ /\\ding\{88\}/g, '\u2738' ],
+    [ /\\ding\{89\}/g, '\u2739' ],
+    [ /\\ding\{90\}/g, '\u273A' ],
+    [ /\\ding\{91\}/g, '\u273B' ],
+    [ /\\ding\{92\}/g, '\u273C' ],
+    [ /\\ding\{93\}/g, '\u273D' ],
+    [ /\\ding\{94\}/g, '\u273E' ],
+    [ /\\ding\{95\}/g, '\u273F' ],
+    [ /\\ding\{96\}/g, '\u2740' ],
+    [ /\\ding\{97\}/g, '\u2741' ],
+    [ /\\ding\{98\}/g, '\u2742' ],
+    [ /\\ding\{99\}/g, '\u2743' ],
+    [ /\\ding\{100\}/g, '\u2744' ],
+    [ /\\ding\{101\}/g, '\u2745' ],
+    [ /\\ding\{102\}/g, '\u2746' ],
+    [ /\\ding\{103\}/g, '\u2747' ],
+    [ /\\ding\{104\}/g, '\u2748' ],
+    [ /\\ding\{105\}/g, '\u2749' ],
+    [ /\\ding\{106\}/g, '\u274A' ],
+    [ /\\ding\{107\}/g, '\u274B' ],
+    [ /\\ding\{109\}/g, '\u274D' ],
+    [ /\\ding\{111\}/g, '\u274F' ],
+    [ /\\ding\{112\}/g, '\u2750' ],
+    [ /\\ding\{113\}/g, '\u2751' ],
+    [ /\\ding\{114\}/g, '\u2752' ],
+    [ /\\ding\{118\}/g, '\u2756' ],
+    [ /\\ding\{120\}/g, '\u2758' ],
+    [ /\\ding\{121\}/g, '\u2759' ],
+    [ /\\ding\{122\}/g, '\u275A' ],
+    [ /\\ding\{123\}/g, '\u275B' ],
+    [ /\\ding\{124\}/g, '\u275C' ],
+    [ /\\ding\{125\}/g, '\u275D' ],
+    [ /\\ding\{126\}/g, '\u275E' ],
+    [ /\\ding\{161\}/g, '\u2761' ],
+    [ /\\ding\{162\}/g, '\u2762' ],
+    [ /\\ding\{163\}/g, '\u2763' ],
+    [ /\\ding\{164\}/g, '\u2764' ],
+    [ /\\ding\{165\}/g, '\u2765' ],
+    [ /\\ding\{166\}/g, '\u2766' ],
+    [ /\\ding\{167\}/g, '\u2767' ],
+    [ /\\ding\{182\}/g, '\u2776' ],
+    [ /\\ding\{183\}/g, '\u2777' ],
+    [ /\\ding\{184\}/g, '\u2778' ],
+    [ /\\ding\{185\}/g, '\u2779' ],
+    [ /\\ding\{186\}/g, '\u277A' ],
+    [ /\\ding\{187\}/g, '\u277B' ],
+    [ /\\ding\{188\}/g, '\u277C' ],
+    [ /\\ding\{189\}/g, '\u277D' ],
+    [ /\\ding\{190\}/g, '\u277E' ],
+    [ /\\ding\{191\}/g, '\u277F' ],
+    [ /\\ding\{192\}/g, '\u2780' ],
+    [ /\\ding\{193\}/g, '\u2781' ],
+    [ /\\ding\{194\}/g, '\u2782' ],
+    [ /\\ding\{195\}/g, '\u2783' ],
+    [ /\\ding\{196\}/g, '\u2784' ],
+    [ /\\ding\{197\}/g, '\u2785' ],
+    [ /\\ding\{198\}/g, '\u2786' ],
+    [ /\\ding\{199\}/g, '\u2787' ],
+    [ /\\ding\{200\}/g, '\u2788' ],
+    [ /\\ding\{201\}/g, '\u2789' ],
+    [ /\\ding\{202\}/g, '\u278A' ],
+    [ /\\ding\{203\}/g, '\u278B' ],
+    [ /\\ding\{204\}/g, '\u278C' ],
+    [ /\\ding\{205\}/g, '\u278D' ],
+    [ /\\ding\{206\}/g, '\u278E' ],
+    [ /\\ding\{207\}/g, '\u278F' ],
+    [ /\\ding\{208\}/g, '\u2790' ],
+    [ /\\ding\{209\}/g, '\u2791' ],
+    [ /\\ding\{210\}/g, '\u2792' ],
+    [ /\\ding\{211\}/g, '\u2793' ],
+    [ /\\ding\{212\}/g, '\u2794' ],
+    [ /\\ding\{216\}/g, '\u2798' ],
+    [ /\\ding\{217\}/g, '\u2799' ],
+    [ /\\ding\{218\}/g, '\u279A' ],
+    [ /\\ding\{219\}/g, '\u279B' ],
+    [ /\\ding\{220\}/g, '\u279C' ],
+    [ /\\ding\{221\}/g, '\u279D' ],
+    [ /\\ding\{222\}/g, '\u279E' ],
+    [ /\\ding\{223\}/g, '\u279F' ],
+    [ /\\ding\{224\}/g, '\u27A0' ],
+    [ /\\ding\{225\}/g, '\u27A1' ],
+    [ /\\ding\{226\}/g, '\u27A2' ],
+    [ /\\ding\{227\}/g, '\u27A3' ],
+    [ /\\ding\{228\}/g, '\u27A4' ],
+    [ /\\ding\{229\}/g, '\u27A5' ],
+    [ /\\ding\{230\}/g, '\u27A6' ],
+    [ /\\ding\{231\}/g, '\u27A7' ],
+    [ /\\ding\{232\}/g, '\u27A8' ],
+    [ /\\ding\{233\}/g, '\u27A9' ],
+    [ /\\ding\{234\}/g, '\u27AA' ],
+    [ /\\ding\{235\}/g, '\u27AB' ],
+    [ /\\ding\{236\}/g, '\u27AC' ],
+    [ /\\ding\{237\}/g, '\u27AD' ],
+    [ /\\ding\{238\}/g, '\u27AE' ],
+    [ /\\ding\{239\}/g, '\u27AF' ],
+    [ /\\ding\{241\}/g, '\u27B1' ],
+    [ /\\ding\{242\}/g, '\u27B2' ],
+    [ /\\ding\{243\}/g, '\u27B3' ],
+    [ /\\ding\{244\}/g, '\u27B4' ],
+    [ /\\ding\{245\}/g, '\u27B5' ],
+    [ /\\ding\{246\}/g, '\u27B6' ],
+    [ /\\ding\{247\}/g, '\u27B7' ],
+    [ /\\ding\{248\}/g, '\u27B8' ],
+    [ /\\ding\{249\}/g, '\u27B9' ],
+    [ /\\ding\{250\}/g, '\u27BA' ],
+    [ /\\ding\{251\}/g, '\u27BB' ],
+    [ /\\ding\{252\}/g, '\u27BC' ],
+    [ /\\ding\{253\}/g, '\u27BD' ],
+    [ /\\ding\{254\}/g, '\u27BE' ],
+    [ /\\longleftarrow /g, '\u27F5' ],
+    [ /\\longrightarrow /g, '\u27F6' ],
+    [ /\\longleftrightarrow /g, '\u27F7' ],
+    [ /\\Longleftarrow /g, '\u27F8' ],
+    [ /\\Longrightarrow /g, '\u27F9' ],
+    [ /\\Longleftrightarrow /g, '\u27FA' ],
+    [ /\\longmapsto /g, '\u27FC' ],
+    [ /\\sim\\joinrel\\leadsto/g, '\u27FF' ],
+    [ /\\ElsevierGlyph\{E212\}/g, '\u2905' ],
+    [ /\\UpArrowBar /g, '\u2912' ],
+    [ /\\DownArrowBar /g, '\u2913' ],
+    [ /\\ElsevierGlyph\{E20C\}/g, '\u2923' ],
+    [ /\\ElsevierGlyph\{E20D\}/g, '\u2924' ],
+    [ /\\ElsevierGlyph\{E20B\}/g, '\u2925' ],
+    [ /\\ElsevierGlyph\{E20A\}/g, '\u2926' ],
+    [ /\\ElsevierGlyph\{E211\}/g, '\u2927' ],
+    [ /\\ElsevierGlyph\{E20E\}/g, '\u2928' ],
+    [ /\\ElsevierGlyph\{E20F\}/g, '\u2929' ],
+    [ /\\ElsevierGlyph\{E210\}/g, '\u292A' ],
+    [ /\\ElsevierGlyph\{E21C\}/g, '\u2933' ],
+    [ /\\ElsevierGlyph\{E21D\}/g, '\u2933-00338' ],
+    [ /\\ElsevierGlyph\{E21A\}/g, '\u2936' ],
+    [ /\\ElsevierGlyph\{E219\}/g, '\u2937' ],
+    [ /\\Elolarr /g, '\u2940' ],
+    [ /\\Elorarr /g, '\u2941' ],
+    [ /\\ElzRlarr /g, '\u2942' ],
+    [ /\\ElzrLarr /g, '\u2944' ],
+    [ /\\Elzrarrx /g, '\u2947' ],
+    [ /\\LeftRightVector /g, '\u294E' ],
+    [ /\\RightUpDownVector /g, '\u294F' ],
+    [ /\\DownLeftRightVector /g, '\u2950' ],
+    [ /\\LeftUpDownVector /g, '\u2951' ],
+    [ /\\LeftVectorBar /g, '\u2952' ],
+    [ /\\RightVectorBar /g, '\u2953' ],
+    [ /\\RightUpVectorBar /g, '\u2954' ],
+    [ /\\RightDownVectorBar /g, '\u2955' ],
+    [ /\\DownLeftVectorBar /g, '\u2956' ],
+    [ /\\DownRightVectorBar /g, '\u2957' ],
+    [ /\\LeftUpVectorBar /g, '\u2958' ],
+    [ /\\LeftDownVectorBar /g, '\u2959' ],
+    [ /\\LeftTeeVector /g, '\u295A' ],
+    [ /\\RightTeeVector /g, '\u295B' ],
+    [ /\\RightUpTeeVector /g, '\u295C' ],
+    [ /\\RightDownTeeVector /g, '\u295D' ],
+    [ /\\DownLeftTeeVector /g, '\u295E' ],
+    [ /\\DownRightTeeVector /g, '\u295F' ],
+    [ /\\LeftUpTeeVector /g, '\u2960' ],
+    [ /\\LeftDownTeeVector /g, '\u2961' ],
+    [ /\\UpEquilibrium /g, '\u296E' ],
+    [ /\\ReverseUpEquilibrium /g, '\u296F' ],
+    [ /\\RoundImplies /g, '\u2970' ],
+    [ /\\ElsevierGlyph\{E214\}/g, '\u297C' ],
+    [ /\\ElsevierGlyph\{E215\}/g, '\u297D' ],
+    [ /\\Elztfnc /g, '\u2980' ],
+    [ /\\ElsevierGlyph\{3018\}/g, '\u2985' ],
+    [ /\\Elroang /g, '\u2986' ],
+    [ /\\ElsevierGlyph\{E291\}/g, '\u2994' ],
+    [ /\\Elzddfnc /g, '\u2999' ],
+    [ /\\Angle /g, '\u299C' ],
+    [ /\\Elzlpargt /g, '\u29A0' ],
+    [ /\\ElsevierGlyph\{E260\}/g, '\u29B5' ],
+    [ /\\ElsevierGlyph\{E61B\}/g, '\u29B6' ],
+    [ /\\ElzLap /g, '\u29CA' ],
+    [ /\\Elzdefas /g, '\u29CB' ],
+    [ /\\LeftTriangleBar /g, '\u29CF' ],
+    [ /\\NotLeftTriangleBar /g, '\u29CF-00338' ],
+    [ /\\RightTriangleBar /g, '\u29D0' ],
+    [ /\\NotRightTriangleBar /g, '\u29D0-00338' ],
+    [ /\\ElsevierGlyph\{E372\}/g, '\u29DC' ],
+    [ /\\blacklozenge /g, '\u29EB' ],
+    [ /\\RuleDelayed /g, '\u29F4' ],
+    [ /\\Elxuplus /g, '\u2A04' ],
+    [ /\\ElzThr /g, '\u2A05' ],
+    [ /\\Elxsqcup /g, '\u2A06' ],
+    [ /\\ElzInf /g, '\u2A07' ],
+    [ /\\ElzSup /g, '\u2A08' ],
+    [ /\\ElzCint /g, '\u2A0D' ],
+    [ /\\clockoint /g, '\u2A0F' ],
+    [ /\\ElsevierGlyph\{E395\}/g, '\u2A10' ],
+    [ /\\sqrint /g, '\u2A16' ],
+    [ /\\ElsevierGlyph\{E25A\}/g, '\u2A25' ],
+    [ /\\ElsevierGlyph\{E25B\}/g, '\u2A2A' ],
+    [ /\\ElsevierGlyph\{E25C\}/g, '\u2A2D' ],
+    [ /\\ElsevierGlyph\{E25D\}/g, '\u2A2E' ],
+    [ /\\ElzTimes /g, '\u2A2F' ],
+    [ /\\ElsevierGlyph\{E25E\}/g, '\u2A34' ],
+    [ /\\ElsevierGlyph\{E25E\}/g, '\u2A35' ],
+    [ /\\ElsevierGlyph\{E259\}/g, '\u2A3C' ],
+    [ /\\amalg /g, '\u2A3F' ],
+    [ /\\ElzAnd /g, '\u2A53' ],
+    [ /\\ElzOr /g, '\u2A54' ],
+    [ /\\ElsevierGlyph\{E36E\}/g, '\u2A55' ],
+    [ /\\ElOr /g, '\u2A56' ],
+    [ /\\perspcorrespond /g, '\u2A5E' ],
+    [ /\\Elzminhat /g, '\u2A5F' ],
+    [ /\\ElsevierGlyph\{225A\}/g, '\u2A63' ],
+    [ /\\stackrel\{*\}\{=\}/g, '\u2A6E' ],
+    [ /\\Equal /g, '\u2A75' ],
+    [ /\\leqslant /g, '\u2A7D' ],
+    [ /\\nleqslant /g, '\u2A7D-00338' ],
+    [ /\\geqslant /g, '\u2A7E' ],
+    [ /\\ngeqslant /g, '\u2A7E-00338' ],
+    [ /\\lessapprox /g, '\u2A85' ],
+    [ /\\gtrapprox /g, '\u2A86' ],
+    [ /\\lneq /g, '\u2A87' ],
+    [ /\\gneq /g, '\u2A88' ],
+    [ /\\lnapprox /g, '\u2A89' ],
+    [ /\\gnapprox /g, '\u2A8A' ],
+    [ /\\lesseqqgtr /g, '\u2A8B' ],
+    [ /\\gtreqqless /g, '\u2A8C' ],
+    [ /\\eqslantless /g, '\u2A95' ],
+    [ /\\eqslantgtr /g, '\u2A96' ],
+    [ /\\Pisymbol\{ppi020\}\{117\}/g, '\u2A9D' ],
+    [ /\\Pisymbol\{ppi020\}\{105\}/g, '\u2A9E' ],
+    [ /\\NestedLessLess /g, '\u2AA1' ],
+    [ /\\NotNestedLessLess /g, '\u2AA1-00338' ],
+    [ /\\NestedGreaterGreater /g, '\u2AA2' ],
+    [ /\\NotNestedGreaterGreater /g, '\u2AA2-00338' ],
+    [ /\\preceq /g, '\u2AAF' ],
+    [ /\\not\\preceq /g, '\u2AAF-00338' ],
+    [ /\\succeq /g, '\u2AB0' ],
+    [ /\\not\\succeq /g, '\u2AB0-00338' ],
+    [ /\\precneqq /g, '\u2AB5' ],
+    [ /\\succneqq /g, '\u2AB6' ],
+    [ /\\precapprox /g, '\u2AB7' ],
+    [ /\\succapprox /g, '\u2AB8' ],
+    [ /\\precnapprox /g, '\u2AB9' ],
+    [ /\\succnapprox /g, '\u2ABA' ],
+    [ /\\subseteqq /g, '\u2AC5' ],
+    [ /\\nsubseteqq /g, '\u2AC5-00338' ],
+    [ /\\supseteqq /g, '\u2AC6' ],
+    [ /\\nsupseteqq/g, '\u2AC6-00338' ],
+    [ /\\subsetneqq /g, '\u2ACB' ],
+    [ /\\supsetneqq /g, '\u2ACC' ],
+    [ /\\ElsevierGlyph\{E30D\}/g, '\u2AEB' ],
+    [ /\\Elztdcol /g, '\u2AF6' ],
+    [ /\\ElsevierGlyph\{300A\}/g, '\u300A' ],
+    [ /\\ElsevierGlyph\{300B\}/g, '\u300B' ],
+    [ /\\ElsevierGlyph\{3018\}/g, '\u3018' ],
+    [ /\\ElsevierGlyph\{3019\}/g, '\u3019' ],
+    [ /\\openbracketleft /g, '\u301A' ],
+    [ /\\openbracketright /g, '\u301B' ]
+  ]
+
+  return this;
+} // end function BibtexParser
+
+if (typeof module !== 'undefined' && module.exports) {
+  module.exports = BibtexParser;
+}
diff --git a/client/galaxy/scripts/libs/bootstrap-tour.js b/client/galaxy/scripts/libs/bootstrap-tour.js
new file mode 100644
index 0000000..009525a
--- /dev/null
+++ b/client/galaxy/scripts/libs/bootstrap-tour.js
@@ -0,0 +1,927 @@
+/* ========================================================================
+ * bootstrap-tour - v0.10.2
+ * http://bootstraptour.com
+ * ========================================================================
+ * Copyright 2012-2015 Ulrich Sossou
+ *
+ * ========================================================================
+ * Licensed under the MIT License (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     https://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ========================================================================
+ */
+
+(function($, window) {
+  var Tour, document;
+  document = window.document;
+  Tour = (function() {
+    function Tour(options) {
+      var storage;
+      try {
+        storage = window.localStorage;
+      } catch (_error) {
+        storage = false;
+      }
+      this._options = $.extend({
+        name: 'tour',
+        steps: [],
+        container: 'body',
+        autoscroll: true,
+        keyboard: true,
+        storage: storage,
+        debug: false,
+        backdrop: false,
+        backdropContainer: 'body',
+        backdropPadding: 0,
+        redirect: true,
+        orphan: false,
+        duration: false,
+        delay: false,
+        basePath: '',
+        template: '<div class="popover" role="tooltip"> <div class="arrow"></div> <h3 class="popover-title"></h3> <div class="popover-content"></div> <div class="popover-navigation"> <div class="btn-group"> <button class="btn btn-sm btn-default" data-role="prev">« Prev</button> <button class="btn btn-sm btn-default" data-role="next">Next »</button> <button class="btn btn-sm btn-default" data-role="pause-resume" data-pause-text="Pause" data-resume-text="Resume">Pause</button>  [...]
+        afterSetState: function(key, value) {},
+        afterGetState: function(key, value) {},
+        afterRemoveState: function(key) {},
+        onStart: function(tour) {},
+        onEnd: function(tour) {},
+        onShow: function(tour) {},
+        onShown: function(tour) {},
+        onHide: function(tour) {},
+        onHidden: function(tour) {},
+        onNext: function(tour) {},
+        onPrev: function(tour) {},
+        onPause: function(tour, duration) {},
+        onResume: function(tour, duration) {},
+        onRedirectError: function(tour) {}
+      }, options);
+      this._force = false;
+      this._inited = false;
+      this._current = null;
+      this.backdrop = {
+        overlay: null,
+        $element: null,
+        $background: null,
+        backgroundShown: false,
+        overlayElementShown: false
+      };
+      this;
+    }
+
+    Tour.prototype.addSteps = function(steps) {
+      var step, _i, _len;
+      for (_i = 0, _len = steps.length; _i < _len; _i++) {
+        step = steps[_i];
+        this.addStep(step);
+      }
+      return this;
+    };
+
+    Tour.prototype.addStep = function(step) {
+      this._options.steps.push(step);
+      return this;
+    };
+
+    Tour.prototype.getStep = function(i) {
+      if (this._options.steps[i] != null) {
+        return $.extend({
+          id: "step-" + i,
+          path: '',
+          host: '',
+          placement: 'right',
+          title: '',
+          content: '<p></p>',
+          next: i === this._options.steps.length - 1 ? -1 : i + 1,
+          prev: i - 1,
+          animation: true,
+          container: this._options.container,
+          autoscroll: this._options.autoscroll,
+          backdrop: this._options.backdrop,
+          backdropContainer: this._options.backdropContainer,
+          backdropPadding: this._options.backdropPadding,
+          redirect: this._options.redirect,
+          reflexElement: this._options.steps[i].element,
+          orphan: this._options.orphan,
+          duration: this._options.duration,
+          delay: this._options.delay,
+          template: this._options.template,
+          onShow: this._options.onShow,
+          onShown: this._options.onShown,
+          onHide: this._options.onHide,
+          onHidden: this._options.onHidden,
+          onNext: this._options.onNext,
+          onPrev: this._options.onPrev,
+          onPause: this._options.onPause,
+          onResume: this._options.onResume,
+          onRedirectError: this._options.onRedirectError
+        }, this._options.steps[i]);
+      }
+    };
+
+    Tour.prototype.init = function(force) {
+      this._force = force;
+      if (this.ended()) {
+        this._debug('Tour ended, init prevented.');
+        return this;
+      }
+      this.setCurrentStep();
+      this._initMouseNavigation();
+      this._initKeyboardNavigation();
+      this._onResize((function(_this) {
+        return function() {
+          return _this.showStep(_this._current);
+        };
+      })(this));
+      if (this._current !== null) {
+        this.showStep(this._current);
+      }
+      this._inited = true;
+      return this;
+    };
+
+    Tour.prototype.start = function(force) {
+      var promise;
+      if (force == null) {
+        force = false;
+      }
+      if (!this._inited) {
+        this.init(force);
+      }
+      if (this._current === null) {
+        promise = this._makePromise(this._options.onStart != null ? this._options.onStart(this) : void 0);
+        this._callOnPromiseDone(promise, this.showStep, 0);
+      }
+      return this;
+    };
+
+    Tour.prototype.next = function() {
+      var promise;
+      promise = this.hideStep(this._current);
+      return this._callOnPromiseDone(promise, this._showNextStep);
+    };
+
+    Tour.prototype.prev = function() {
+      var promise;
+      promise = this.hideStep(this._current);
+      return this._callOnPromiseDone(promise, this._showPrevStep);
+    };
+
+    Tour.prototype.goTo = function(i) {
+      var promise;
+      promise = this.hideStep(this._current);
+      return this._callOnPromiseDone(promise, this.showStep, i);
+    };
+
+    Tour.prototype.end = function() {
+      var endHelper, promise;
+      endHelper = (function(_this) {
+        return function(e) {
+          $(document).off("click.tour-" + _this._options.name);
+          $(document).off("keyup.tour-" + _this._options.name);
+          $(window).off("resize.tour-" + _this._options.name);
+          _this._setState('end', 'yes');
+          _this._inited = false;
+          _this._force = false;
+          _this._clearTimer();
+          if (_this._options.onEnd != null) {
+            return _this._options.onEnd(_this);
+          }
+        };
+      })(this);
+      promise = this.hideStep(this._current);
+      return this._callOnPromiseDone(promise, endHelper);
+    };
+
+    Tour.prototype.ended = function() {
+      return !this._force && !!this._getState('end');
+    };
+
+    Tour.prototype.restart = function() {
+      this._removeState('current_step');
+      this._removeState('end');
+      this._removeState('redirect_to');
+      return this.start();
+    };
+
+    Tour.prototype.pause = function() {
+      var step;
+      step = this.getStep(this._current);
+      if (!(step && step.duration)) {
+        return this;
+      }
+      this._paused = true;
+      this._duration -= new Date().getTime() - this._start;
+      window.clearTimeout(this._timer);
+      this._debug("Paused/Stopped step " + (this._current + 1) + " timer (" + this._duration + " remaining).");
+      if (step.onPause != null) {
+        return step.onPause(this, this._duration);
+      }
+    };
+
+    Tour.prototype.resume = function() {
+      var step;
+      step = this.getStep(this._current);
+      if (!(step && step.duration)) {
+        return this;
+      }
+      this._paused = false;
+      this._start = new Date().getTime();
+      this._duration = this._duration || step.duration;
+      this._timer = window.setTimeout((function(_this) {
+        return function() {
+          if (_this._isLast()) {
+            return _this.next();
+          } else {
+            return _this.end();
+          }
+        };
+      })(this), this._duration);
+      this._debug("Started step " + (this._current + 1) + " timer with duration " + this._duration);
+      if ((step.onResume != null) && this._duration !== step.duration) {
+        return step.onResume(this, this._duration);
+      }
+    };
+
+    Tour.prototype.hideStep = function(i) {
+      var hideStepHelper, promise, step;
+      step = this.getStep(i);
+      if (!step) {
+        return;
+      }
+      this._clearTimer();
+      promise = this._makePromise(step.onHide != null ? step.onHide(this, i) : void 0);
+      hideStepHelper = (function(_this) {
+        return function(e) {
+          var $element;
+          $element = $(step.element);
+          if (!($element.data('bs.popover') || $element.data('popover'))) {
+            $element = $('body');
+          }
+          $element.popover('destroy').removeClass("tour-" + _this._options.name + "-element tour-" + _this._options.name + "-" + i + "-element");
+          $element.removeData('bs.popover');
+          if (step.reflex) {
+            $(step.reflexElement).removeClass('tour-step-element-reflex').off("" + (_this._reflexEvent(step.reflex)) + ".tour-" + _this._options.name);
+          }
+          if (step.backdrop) {
+            _this._hideBackdrop();
+          }
+          if (step.onHidden != null) {
+            return step.onHidden(_this);
+          }
+        };
+      })(this);
+      this._callOnPromiseDone(promise, hideStepHelper);
+      return promise;
+    };
+
+    Tour.prototype.showStep = function(i) {
+      var promise, showStepHelper, skipToPrevious, step;
+      if (this.ended()) {
+        this._debug('Tour ended, showStep prevented.');
+        return this;
+      }
+      step = this.getStep(i);
+      if (!step) {
+        return;
+      }
+      skipToPrevious = i < this._current;
+      promise = this._makePromise(step.onShow != null ? step.onShow(this, i) : void 0);
+      showStepHelper = (function(_this) {
+        return function(e) {
+          var path, showPopoverAndOverlay;
+          _this.setCurrentStep(i);
+          path = (function() {
+            switch ({}.toString.call(step.path)) {
+              case '[object Function]':
+                return step.path();
+              case '[object String]':
+                return this._options.basePath + step.path;
+              default:
+                return step.path;
+            }
+          }).call(_this);
+          if (_this._isRedirect(step.host, path, document.location)) {
+            _this._redirect(step, i, path);
+            if (!_this._isJustPathHashDifferent(step.host, path, document.location)) {
+              return;
+            }
+          }
+          if (_this._isOrphan(step)) {
+            if (step.orphan === false) {
+              _this._debug("Skip the orphan step " + (_this._current + 1) + ".\nOrphan option is false and the element does not exist or is hidden.");
+              if (skipToPrevious) {
+                _this._showPrevStep();
+              } else {
+                _this._showNextStep();
+              }
+              return;
+            }
+            _this._debug("Show the orphan step " + (_this._current + 1) + ". Orphans option is true.");
+          }
+          if (step.backdrop) {
+            _this._showBackdrop(step);
+          }
+          showPopoverAndOverlay = function() {
+            if (_this.getCurrentStep() !== i || _this.ended()) {
+              return;
+            }
+            if ((step.element != null) && step.backdrop) {
+              _this._showOverlayElement(step);
+            }
+            _this._showPopover(step, i);
+            if (step.onShown != null) {
+              step.onShown(_this);
+            }
+            return _this._debug("Step " + (_this._current + 1) + " of " + _this._options.steps.length);
+          };
+          if (step.autoscroll) {
+            _this._scrollIntoView(step.element, showPopoverAndOverlay);
+          } else {
+            showPopoverAndOverlay();
+          }
+          if (step.duration) {
+            return _this.resume();
+          }
+        };
+      })(this);
+      if (step.delay) {
+        this._debug("Wait " + step.delay + " milliseconds to show the step " + (this._current + 1));
+        window.setTimeout((function(_this) {
+          return function() {
+            return _this._callOnPromiseDone(promise, showStepHelper);
+          };
+        })(this), step.delay);
+      } else {
+        this._callOnPromiseDone(promise, showStepHelper);
+      }
+      return promise;
+    };
+
+    Tour.prototype.getCurrentStep = function() {
+      return this._current;
+    };
+
+    Tour.prototype.setCurrentStep = function(value) {
+      if (value != null) {
+        this._current = value;
+        this._setState('current_step', value);
+      } else {
+        this._current = this._getState('current_step');
+        this._current = this._current === null ? null : parseInt(this._current, 10);
+      }
+      return this;
+    };
+
+    Tour.prototype.redraw = function() {
+      return this._showOverlayElement(this.getStep(this.getCurrentStep()).element, true);
+    };
+
+    Tour.prototype._setState = function(key, value) {
+      var e, keyName;
+      if (this._options.storage) {
+        keyName = "" + this._options.name + "_" + key;
+        try {
+          this._options.storage.setItem(keyName, value);
+        } catch (_error) {
+          e = _error;
+          if (e.code === DOMException.QUOTA_EXCEEDED_ERR) {
+            this._debug('LocalStorage quota exceeded. State storage failed.');
+          }
+        }
+        return this._options.afterSetState(keyName, value);
+      } else {
+        if (this._state == null) {
+          this._state = {};
+        }
+        return this._state[key] = value;
+      }
+    };
+
+    Tour.prototype._removeState = function(key) {
+      var keyName;
+      if (this._options.storage) {
+        keyName = "" + this._options.name + "_" + key;
+        this._options.storage.removeItem(keyName);
+        return this._options.afterRemoveState(keyName);
+      } else {
+        if (this._state != null) {
+          return delete this._state[key];
+        }
+      }
+    };
+
+    Tour.prototype._getState = function(key) {
+      var keyName, value;
+      if (this._options.storage) {
+        keyName = "" + this._options.name + "_" + key;
+        value = this._options.storage.getItem(keyName);
+      } else {
+        if (this._state != null) {
+          value = this._state[key];
+        }
+      }
+      if (value === void 0 || value === 'null') {
+        value = null;
+      }
+      this._options.afterGetState(key, value);
+      return value;
+    };
+
+    Tour.prototype._showNextStep = function() {
+      var promise, showNextStepHelper, step;
+      step = this.getStep(this._current);
+      showNextStepHelper = (function(_this) {
+        return function(e) {
+          return _this.showStep(step.next);
+        };
+      })(this);
+      promise = this._makePromise(step.onNext != null ? step.onNext(this) : void 0);
+      return this._callOnPromiseDone(promise, showNextStepHelper);
+    };
+
+    Tour.prototype._showPrevStep = function() {
+      var promise, showPrevStepHelper, step;
+      step = this.getStep(this._current);
+      showPrevStepHelper = (function(_this) {
+        return function(e) {
+          return _this.showStep(step.prev);
+        };
+      })(this);
+      promise = this._makePromise(step.onPrev != null ? step.onPrev(this) : void 0);
+      return this._callOnPromiseDone(promise, showPrevStepHelper);
+    };
+
+    Tour.prototype._debug = function(text) {
+      if (this._options.debug) {
+        return window.console.log("Bootstrap Tour '" + this._options.name + "' | " + text);
+      }
+    };
+
+    Tour.prototype._isRedirect = function(host, path, location) {
+      var currentPath;
+      if (host !== '') {
+        if (this._isHostDifferent(host, location.href)) {
+          return true;
+        }
+      }
+      currentPath = [location.pathname, location.search, location.hash].join('');
+      return (path != null) && path !== '' && (({}.toString.call(path) === '[object RegExp]' && !path.test(currentPath)) || ({}.toString.call(path) === '[object String]' && this._isPathDifferent(path, currentPath)));
+    };
+
+    Tour.prototype._isHostDifferent = function(host, currentURL) {
+      return this._getProtocol(host) !== this._getProtocol(currentURL) || this._getHost(host) !== this._getHost(currentURL);
+    };
+
+    Tour.prototype._isPathDifferent = function(path, currentPath) {
+      return this._getPath(path) !== this._getPath(currentPath) || !this._equal(this._getQuery(path), this._getQuery(currentPath)) || !this._equal(this._getHash(path), this._getHash(currentPath));
+    };
+
+    Tour.prototype._isJustPathHashDifferent = function(host, path, location) {
+      var currentPath;
+      if (host !== '') {
+        if (this._isHostDifferent(host, location.href)) {
+          return false;
+        }
+      }
+      currentPath = [location.pathname, location.search, location.hash].join('');
+      if ({}.toString.call(path) === '[object String]') {
+        return this._getPath(path) === this._getPath(currentPath) && this._equal(this._getQuery(path), this._getQuery(currentPath)) && !this._equal(this._getHash(path), this._getHash(currentPath));
+      }
+      return false;
+    };
+
+    Tour.prototype._redirect = function(step, i, path) {
+      if ($.isFunction(step.redirect)) {
+        return step.redirect.call(this, path);
+      } else if (step.redirect === true) {
+        this._debug("Redirect to " + step.host + path);
+        if (this._getState('redirect_to') === ("" + i)) {
+          this._debug("Error redirection loop to " + path);
+          this._removeState('redirect_to');
+          if (step.onRedirectError != null) {
+            return step.onRedirectError(this);
+          }
+        } else {
+          this._setState('redirect_to', "" + i);
+          return document.location.href = "" + step.host + path;
+        }
+      }
+    };
+
+    Tour.prototype._isOrphan = function(step) {
+      return (step.element == null) || !$(step.element).length || $(step.element).is(':hidden') && ($(step.element)[0].namespaceURI !== 'http://www.w3.org/2000/svg');
+    };
+
+    Tour.prototype._isLast = function() {
+      return this._current < this._options.steps.length - 1;
+    };
+
+    Tour.prototype._showPopover = function(step, i) {
+      var $element, $tip, isOrphan, options, shouldAddSmart;
+      $(".tour-" + this._options.name).remove();
+      options = $.extend({}, this._options);
+      isOrphan = this._isOrphan(step);
+      step.template = this._template(step, i);
+      if (isOrphan) {
+        step.element = 'body';
+        step.placement = 'top';
+      }
+      $element = $(step.element);
+      $element.addClass("tour-" + this._options.name + "-element tour-" + this._options.name + "-" + i + "-element");
+      if (step.options) {
+        $.extend(options, step.options);
+      }
+      if (step.reflex && !isOrphan) {
+        $(step.reflexElement).addClass('tour-step-element-reflex').off("" + (this._reflexEvent(step.reflex)) + ".tour-" + this._options.name).on("" + (this._reflexEvent(step.reflex)) + ".tour-" + this._options.name, (function(_this) {
+          return function() {
+            if (_this._isLast()) {
+              return _this.next();
+            } else {
+              return _this.end();
+            }
+          };
+        })(this));
+      }
+      shouldAddSmart = step.smartPlacement === true && step.placement.search(/auto/i) === -1;
+      $element.popover({
+        placement: shouldAddSmart ? "auto " + step.placement : step.placement,
+        trigger: 'manual',
+        title: step.title,
+        content: step.content,
+        html: true,
+        animation: step.animation,
+        container: step.container,
+        template: step.template,
+        selector: step.element
+      }).popover('show');
+      $tip = $element.data('bs.popover') ? $element.data('bs.popover').tip() : $element.data('popover').tip();
+      $tip.attr('id', step.id);
+      this._reposition($tip, step);
+      if (isOrphan) {
+        return this._center($tip);
+      }
+    };
+
+    Tour.prototype._template = function(step, i) {
+      var $navigation, $next, $prev, $resume, $template, template;
+      template = step.template;
+      if (this._isOrphan(step) && {}.toString.call(step.orphan) !== '[object Boolean]') {
+        template = step.orphan;
+      }
+      $template = $.isFunction(template) ? $(template(i, step)) : $(template);
+      $navigation = $template.find('.popover-navigation');
+      $prev = $navigation.find('[data-role="prev"]');
+      $next = $navigation.find('[data-role="next"]');
+      $resume = $navigation.find('[data-role="pause-resume"]');
+      if (this._isOrphan(step)) {
+        $template.addClass('orphan');
+      }
+      $template.addClass("tour-" + this._options.name + " tour-" + this._options.name + "-" + i);
+      if (step.reflex) {
+        $template.addClass("tour-" + this._options.name + "-reflex");
+      }
+      if (step.prev < 0) {
+        $prev.addClass('disabled');
+        $prev.prop('disabled', true);
+      }
+      if (step.next < 0) {
+        $next.addClass('disabled');
+        $next.prop('disabled', true);
+      }
+      if (!step.duration) {
+        $resume.remove();
+      }
+      return $template.clone().wrap('<div>').parent().html();
+    };
+
+    Tour.prototype._reflexEvent = function(reflex) {
+      if ({}.toString.call(reflex) === '[object Boolean]') {
+        return 'click';
+      } else {
+        return reflex;
+      }
+    };
+
+    Tour.prototype._reposition = function($tip, step) {
+      var offsetBottom, offsetHeight, offsetRight, offsetWidth, originalLeft, originalTop, tipOffset;
+      offsetWidth = $tip[0].offsetWidth;
+      offsetHeight = $tip[0].offsetHeight;
+      tipOffset = $tip.offset();
+      originalLeft = tipOffset.left;
+      originalTop = tipOffset.top;
+      offsetBottom = $(document).outerHeight() - tipOffset.top - $tip.outerHeight();
+      if (offsetBottom < 0) {
+        tipOffset.top = tipOffset.top + offsetBottom;
+      }
+      offsetRight = $('html').outerWidth() - tipOffset.left - $tip.outerWidth();
+      if (offsetRight < 0) {
+        tipOffset.left = tipOffset.left + offsetRight;
+      }
+      if (tipOffset.top < 0) {
+        tipOffset.top = 0;
+      }
+      if (tipOffset.left < 0) {
+        tipOffset.left = 0;
+      }
+      $tip.offset(tipOffset);
+      if (step.placement === 'bottom' || step.placement === 'top') {
+        if (originalLeft !== tipOffset.left) {
+          return this._replaceArrow($tip, (tipOffset.left - originalLeft) * 2, offsetWidth, 'left');
+        }
+      } else {
+        if (originalTop !== tipOffset.top) {
+          return this._replaceArrow($tip, (tipOffset.top - originalTop) * 2, offsetHeight, 'top');
+        }
+      }
+    };
+
+    Tour.prototype._center = function($tip) {
+      return $tip.css('top', $(window).outerHeight() / 2 - $tip.outerHeight() / 2);
+    };
+
+    Tour.prototype._replaceArrow = function($tip, delta, dimension, position) {
+      return $tip.find('.arrow').css(position, delta ? 50 * (1 - delta / dimension) + '%' : '');
+    };
+
+    Tour.prototype._scrollIntoView = function(element, callback) {
+      var $element, $window, counter, offsetTop, scrollTop, windowHeight;
+      $element = $(element);
+      if (!$element.length) {
+        return callback();
+      }
+      $window = $(window);
+      offsetTop = $element.offset().top;
+      windowHeight = $window.height();
+      scrollTop = Math.max(0, offsetTop - (windowHeight / 2));
+      this._debug("Scroll into view. ScrollTop: " + scrollTop + ". Element offset: " + offsetTop + ". Window height: " + windowHeight + ".");
+      counter = 0;
+      return $('body, html').stop(true, true).animate({
+        scrollTop: Math.ceil(scrollTop)
+      }, (function(_this) {
+        return function() {
+          if (++counter === 2) {
+            callback();
+            return _this._debug("Scroll into view.\nAnimation end element offset: " + ($element.offset().top) + ".\nWindow height: " + ($window.height()) + ".");
+          }
+        };
+      })(this));
+    };
+
+    Tour.prototype._onResize = function(callback, timeout) {
+      return $(window).on("resize.tour-" + this._options.name, function() {
+        clearTimeout(timeout);
+        return timeout = setTimeout(callback, 100);
+      });
+    };
+
+    Tour.prototype._initMouseNavigation = function() {
+      var _this;
+      _this = this;
+      return $(document).off("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='prev']").off("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='next']").off("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='end']").off("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='pause-resume']").on("click.tour-" + this._options.name, ".pop [...]
+        return function(e) {
+          e.preventDefault();
+          return _this.next();
+        };
+      })(this)).on("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='prev']", (function(_this) {
+        return function(e) {
+          e.preventDefault();
+          return _this.prev();
+        };
+      })(this)).on("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='end']", (function(_this) {
+        return function(e) {
+          e.preventDefault();
+          return _this.end();
+        };
+      })(this)).on("click.tour-" + this._options.name, ".popover.tour-" + this._options.name + " *[data-role='pause-resume']", function(e) {
+        var $this;
+        e.preventDefault();
+        $this = $(this);
+        $this.text(_this._paused ? $this.data('pause-text') : $this.data('resume-text'));
+        if (_this._paused) {
+          return _this.resume();
+        } else {
+          return _this.pause();
+        }
+      });
+    };
+
+    Tour.prototype._initKeyboardNavigation = function() {
+      if (!this._options.keyboard) {
+        return;
+      }
+      return $(document).on("keyup.tour-" + this._options.name, (function(_this) {
+        return function(e) {
+          if (!e.which) {
+            return;
+          }
+          switch (e.which) {
+            case 39:
+              e.preventDefault();
+              if (_this._isLast()) {
+                return _this.next();
+              } else {
+                return _this.end();
+              }
+              break;
+            case 37:
+              e.preventDefault();
+              if (_this._current > 0) {
+                return _this.prev();
+              }
+              break;
+            case 27:
+              e.preventDefault();
+              return _this.end();
+          }
+        };
+      })(this));
+    };
+
+    Tour.prototype._makePromise = function(result) {
+      if (result && $.isFunction(result.then)) {
+        return result;
+      } else {
+        return null;
+      }
+    };
+
+    Tour.prototype._callOnPromiseDone = function(promise, cb, arg) {
+      if (promise) {
+        return promise.then((function(_this) {
+          return function(e) {
+            return cb.call(_this, arg);
+          };
+        })(this));
+      } else {
+        return cb.call(this, arg);
+      }
+    };
+
+    Tour.prototype._showBackdrop = function(step) {
+      if (this.backdrop.backgroundShown) {
+        return;
+      }
+      this.backdrop = $('<div>', {
+        "class": 'tour-backdrop'
+      });
+      this.backdrop.backgroundShown = true;
+      return $(step.backdropContainer).append(this.backdrop);
+    };
+
+    Tour.prototype._hideBackdrop = function() {
+      this._hideOverlayElement();
+      return this._hideBackground();
+    };
+
+    Tour.prototype._hideBackground = function() {
+      if (this.backdrop) {
+        this.backdrop.remove();
+        this.backdrop.overlay = null;
+        return this.backdrop.backgroundShown = false;
+      }
+    };
+
+    Tour.prototype._showOverlayElement = function(step, force) {
+      var $element, elementData;
+      $element = $(step.element);
+      if (!$element || $element.length === 0 || this.backdrop.overlayElementShown && !force) {
+        return;
+      }
+      if (!this.backdrop.overlayElementShown) {
+        this.backdrop.$element = $element.addClass('tour-step-backdrop');
+        this.backdrop.$background = $('<div>', {
+          "class": 'tour-step-background'
+        });
+        this.backdrop.$background.appendTo(step.backdropContainer);
+        this.backdrop.overlayElementShown = true;
+      }
+      elementData = {
+        width: $element.innerWidth(),
+        height: $element.innerHeight(),
+        offset: $element.offset()
+      };
+      if (step.backdropPadding) {
+        elementData = this._applyBackdropPadding(step.backdropPadding, elementData);
+      }
+      return this.backdrop.$background.width(elementData.width).height(elementData.height).offset(elementData.offset);
+    };
+
+    Tour.prototype._hideOverlayElement = function() {
+      if (!this.backdrop.overlayElementShown) {
+        return;
+      }
+      this.backdrop.$element.removeClass('tour-step-backdrop');
+      this.backdrop.$background.remove();
+      this.backdrop.$element = null;
+      this.backdrop.$background = null;
+      return this.backdrop.overlayElementShown = false;
+    };
+
+    Tour.prototype._applyBackdropPadding = function(padding, data) {
+      if (typeof padding === 'object') {
+        if (padding.top == null) {
+          padding.top = 0;
+        }
+        if (padding.right == null) {
+          padding.right = 0;
+        }
+        if (padding.bottom == null) {
+          padding.bottom = 0;
+        }
+        if (padding.left == null) {
+          padding.left = 0;
+        }
+        data.offset.top = data.offset.top - padding.top;
+        data.offset.left = data.offset.left - padding.left;
+        data.width = data.width + padding.left + padding.right;
+        data.height = data.height + padding.top + padding.bottom;
+      } else {
+        data.offset.top = data.offset.top - padding;
+        data.offset.left = data.offset.left - padding;
+        data.width = data.width + (padding * 2);
+        data.height = data.height + (padding * 2);
+      }
+      return data;
+    };
+
+    Tour.prototype._clearTimer = function() {
+      window.clearTimeout(this._timer);
+      this._timer = null;
+      return this._duration = null;
+    };
+
+    Tour.prototype._getProtocol = function(url) {
+      url = url.split('://');
+      if (url.length > 1) {
+        return url[0];
+      } else {
+        return 'http';
+      }
+    };
+
+    Tour.prototype._getHost = function(url) {
+      url = url.split('//');
+      url = url.length > 1 ? url[1] : url[0];
+      return url.split('/')[0];
+    };
+
+    Tour.prototype._getPath = function(path) {
+      return path.replace(/\/?$/, '').split('?')[0].split('#')[0];
+    };
+
+    Tour.prototype._getQuery = function(path) {
+      return this._getParams(path, '?');
+    };
+
+    Tour.prototype._getHash = function(path) {
+      return this._getParams(path, '#');
+    };
+
+    Tour.prototype._getParams = function(path, start) {
+      var param, params, paramsObject, _i, _len;
+      params = path.split(start);
+      if (params.length === 1) {
+        return {};
+      }
+      params = params[1].split('&');
+      paramsObject = {};
+      for (_i = 0, _len = params.length; _i < _len; _i++) {
+        param = params[_i];
+        param = param.split('=');
+        paramsObject[param[0]] = param[1] || '';
+      }
+      return paramsObject;
+    };
+
+    Tour.prototype._equal = function(obj1, obj2) {
+      var k, v;
+      if ({}.toString.call(obj1) === '[object Object]' && {}.toString.call(obj2) === '[object Object]') {
+        for (k in obj1) {
+          v = obj1[k];
+          if (obj2[k] !== v) {
+            return false;
+          }
+        }
+        for (k in obj2) {
+          v = obj2[k];
+          if (obj1[k] !== v) {
+            return false;
+          }
+        }
+        return true;
+      }
+      return obj1 === obj2;
+    };
+
+    return Tour;
+
+  })();
+  return window.Tour = Tour;
+})(jQuery, window);
diff --git a/client/galaxy/scripts/libs/bootstrap.js b/client/galaxy/scripts/libs/bootstrap.js
new file mode 100644
index 0000000..0ea9898
--- /dev/null
+++ b/client/galaxy/scripts/libs/bootstrap.js
@@ -0,0 +1,850 @@
+/* ========================================================================
+ * Bootstrap: transition.js v3.0.0
+ * http://twbs.github.com/bootstrap/javascript.html#transitions
+ * ========================================================================
+ * Copyright 2013 Twitter, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ======================================================================== */
+
+
++function ($) { "use strict";
+
+  // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
+  // ============================================================
+
+  function transitionEnd() {
+    var el = document.createElement('bootstrap')
+
+    var transEndEventNames = {
+      'WebkitTransition' : 'webkitTransitionEnd'
+    , 'MozTransition'    : 'transitionend'
+    , 'OTransition'      : 'oTransitionEnd otransitionend'
+    , 'transition'       : 'transitionend'
+    }
+
+    for (var name in transEndEventNames) {
+      if (el.style[name] !== undefined) {
+        return { end: transEndEventNames[name] }
+      }
+    }
+  }
+
+  // http://blog.alexmaccaw.com/css-transitions
+  $.fn.emulateTransitionEnd = function (duration) {
+    var called = false, $el = this
+    $(this).one($.support.transition.end, function () { called = true })
+    var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
+    setTimeout(callback, duration)
+    return this
+  }
+
+  $(function () {
+    $.support.transition = transitionEnd()
+  })
+
+}(window.jQuery);
+/* ========================================================================
+ * Bootstrap: tab.js v3.0.0
+ * http://twbs.github.com/bootstrap/javascript.html#tabs
+ * ========================================================================
+ * Copyright 2012 Twitter, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ======================================================================== */
+
+
++function ($) { "use strict";
+
+  // TAB CLASS DEFINITION
+  // ====================
+
+  var Tab = function (element) {
+    this.element = $(element)
+  }
+
+  Tab.prototype.show = function () {
+    var $this    = this.element
+    var $ul      = $this.closest('ul:not(.dropdown-menu)')
+    var selector = $this.attr('data-target')
+
+    if (!selector) {
+      selector = $this.attr('href')
+      selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7
+    }
+
+    if ($this.parent('li').hasClass('active')) return
+
+    var previous = $ul.find('.active:last a')[0]
+    var e        = $.Event('show.bs.tab', {
+      relatedTarget: previous
+    })
+
+    $this.trigger(e)
+
+    if (e.isDefaultPrevented()) return
+
+    var $target = $(selector)
+
+    this.activate($this.parent('li'), $ul)
+    this.activate($target, $target.parent(), function () {
+      $this.trigger({
+        type: 'shown.bs.tab'
+      , relatedTarget: previous
+      })
+    })
+  }
+
+  Tab.prototype.activate = function (element, container, callback) {
+    var $active    = container.find('> .active')
+    var transition = callback
+      && $.support.transition
+      && $active.hasClass('fade')
+
+    function next() {
+      $active
+        .removeClass('active')
+        .find('> .dropdown-menu > .active')
+        .removeClass('active')
+
+      element.addClass('active')
+
+      if (transition) {
+        element[0].offsetWidth // reflow for transition
+        element.addClass('in')
+      } else {
+        element.removeClass('fade')
+      }
+
+      if (element.parent('.dropdown-menu')) {
+        element.closest('li.dropdown').addClass('active')
+      }
+
+      callback && callback()
+    }
+
+    transition ?
+      $active
+        .one($.support.transition.end, next)
+        .emulateTransitionEnd(150) :
+      next()
+
+    $active.removeClass('in')
+  }
+
+
+  // TAB PLUGIN DEFINITION
+  // =====================
+
+  var old = $.fn.tab
+
+  $.fn.tab = function ( option ) {
+    return this.each(function () {
+      var $this = $(this)
+      var data  = $this.data('bs.tab')
+
+      if (!data) $this.data('bs.tab', (data = new Tab(this)))
+      if (typeof option == 'string') data[option]()
+    })
+  }
+
+  $.fn.tab.Constructor = Tab
+
+
+  // TAB NO CONFLICT
+  // ===============
+
+  $.fn.tab.noConflict = function () {
+    $.fn.tab = old
+    return this
+  }
+
+
+  // TAB DATA-API
+  // ============
+
+  $(document).on('click.bs.tab.data-api', '[data-toggle="tab"], [data-toggle="pill"]', function (e) {
+    e.preventDefault()
+    $(this).tab('show')
+  })
+
+}(window.jQuery);
+/* ========================================================================
+ * Bootstrap: tooltip.js v3.0.0
+ * http://twbs.github.com/bootstrap/javascript.html#tooltip
+ * Inspired by the original jQuery.tipsy by Jason Frame
+ * ========================================================================
+ * Copyright 2012 Twitter, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ======================================================================== */
+
+
++function ($) { "use strict";
+
+  // TOOLTIP PUBLIC CLASS DEFINITION
+  // ===============================
+
+  var Tooltip = function (element, options) {
+    this.type       =
+    this.options    =
+    this.enabled    =
+    this.timeout    =
+    this.hoverState =
+    this.$element   = null
+
+    this.init('tooltip', element, options)
+  }
+
+  Tooltip.DEFAULTS = {
+    animation: true
+  , placement: 'top'
+  , selector: false
+  , template: '<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>'
+  , trigger: 'hover focus'
+  , title: ''
+  , delay: 0
+  , html: false
+  , container: 'body' 
+  }
+
+  Tooltip.prototype.init = function (type, element, options) {
+    this.enabled  = true
+    this.type     = type
+    this.$element = $(element)
+    this.options  = this.getOptions(options)
+
+    var triggers = this.options.trigger.split(' ')
+
+    for (var i = triggers.length; i--;) {
+      var trigger = triggers[i]
+
+      if (trigger == 'click') {
+        this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
+      } else if (trigger != 'manual') {
+        var eventIn  = trigger == 'hover' ? 'mouseenter' : 'focus'
+        var eventOut = trigger == 'hover' ? 'mouseleave' : 'blur'
+
+        this.$element.on(eventIn  + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
+        this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
+      }
+    }
+
+    this.options.selector ?
+      (this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
+      this.fixTitle()
+  }
+
+  Tooltip.prototype.getDefaults = function () {
+    return Tooltip.DEFAULTS
+  }
+
+  Tooltip.prototype.getOptions = function (options) {
+    options = $.extend({}, this.getDefaults(), this.$element.data(), options)
+
+    if (options.delay && typeof options.delay == 'number') {
+      options.delay = {
+        show: options.delay
+      , hide: options.delay
+      }
+    }
+
+    return options
+  }
+
+  Tooltip.prototype.getDelegateOptions = function () {
+    var options  = {}
+    var defaults = this.getDefaults()
+
+    this._options && $.each(this._options, function (key, value) {
+      if (defaults[key] != value) options[key] = value
+    })
+
+    return options
+  }
+
+  Tooltip.prototype.enter = function (obj) {
+    var self = obj instanceof this.constructor ?
+      obj : $(obj.currentTarget)[this.type](this.getDelegateOptions()).data('bs.' + this.type)
+
+    clearTimeout(self.timeout)
+
+    self.hoverState = 'in'
+
+    if (!self.options.delay || !self.options.delay.show) return self.show()
+
+    self.timeout = setTimeout(function () {
+      if (self.hoverState == 'in') self.show()
+    }, self.options.delay.show)
+  }
+
+  Tooltip.prototype.leave = function (obj) {
+    var self = obj instanceof this.constructor ?
+      obj : $(obj.currentTarget)[this.type](this.getDelegateOptions()).data('bs.' + this.type)
+
+    clearTimeout(self.timeout)
+
+    self.hoverState = 'out'
+
+    if (!self.options.delay || !self.options.delay.hide) return self.hide()
+
+    self.timeout = setTimeout(function () {
+      if (self.hoverState == 'out') self.hide()
+    }, self.options.delay.hide)
+  }
+
+  Tooltip.prototype.show = function () {
+    var e = $.Event('show.bs.'+ this.type)
+
+    if (this.hasContent() && this.enabled) {
+      this.$element.trigger(e)
+
+      if (e.isDefaultPrevented()) return
+
+      var $tip = this.tip()
+
+      this.setContent()
+
+      if (this.options.animation) $tip.addClass('fade')
+
+      var placement = typeof this.options.placement == 'function' ?
+        this.options.placement.call(this, $tip[0], this.$element[0]) :
+        this.options.placement
+
+      var autoToken = /\s?auto?\s?/i
+      var autoPlace = autoToken.test(placement)
+      if (autoPlace) placement = placement.replace(autoToken, '') || 'top'
+
+      $tip
+        .detach()
+        .css({ top: 0, left: 0, display: 'block' })
+        .addClass(placement)
+
+      this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
+
+      var pos          = this.getPosition()
+      var actualWidth  = $tip[0].offsetWidth
+      var actualHeight = $tip[0].offsetHeight
+
+      if (autoPlace) {
+        var $parent = this.$element.parent()
+
+        var orgPlacement = placement
+        var docScroll    = document.documentElement.scrollTop || document.body.scrollTop
+        var parentWidth  = this.options.container == 'body' ? window.innerWidth  : $parent.outerWidth()
+        var parentHeight = this.options.container == 'body' ? window.innerHeight : $parent.outerHeight()
+        var parentLeft   = this.options.container == 'body' ? 0 : $parent.offset().left
+
+        placement = placement == 'bottom' && pos.top   + pos.height  + actualHeight - docScroll > parentHeight  ? 'top'    :
+                    placement == 'top'    && pos.top   - docScroll   - actualHeight < 0                         ? 'bottom' :
+                    placement == 'right'  && pos.right + actualWidth > parentWidth                              ? 'left'   :
+                    placement == 'left'   && pos.left  - actualWidth < parentLeft                               ? 'right'  :
+                    placement
+
+        $tip
+          .removeClass(orgPlacement)
+          .addClass(placement)
+      }
+
+      var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight)
+
+      this.applyPlacement(calculatedOffset, placement)
+      this.$element.trigger('shown.bs.' + this.type)
+    }
+  }
+
+  Tooltip.prototype.applyPlacement = function(offset, placement) {
+    var replace
+    var $tip   = this.tip()
+    var width  = $tip[0].offsetWidth
+    var height = $tip[0].offsetHeight
+
+    // manually read margins because getBoundingClientRect includes difference
+    var marginTop = parseInt($tip.css('margin-top'), 10)
+    var marginLeft = parseInt($tip.css('margin-left'), 10)
+
+    // we must check for NaN for ie 8/9
+    if (isNaN(marginTop))  marginTop  = 0
+    if (isNaN(marginLeft)) marginLeft = 0
+
+    offset.top  = offset.top  + marginTop
+    offset.left = offset.left + marginLeft
+
+    $tip
+      .offset(offset)
+      .addClass('in')
+
+    // check to see if placing tip in new offset caused the tip to resize itself
+    var actualWidth  = $tip[0].offsetWidth
+    var actualHeight = $tip[0].offsetHeight
+
+    if (placement == 'top' && actualHeight != height) {
+      replace = true
+      offset.top = offset.top + height - actualHeight
+    }
+
+    if (/bottom|top/.test(placement)) {
+      var delta = 0
+
+      if (offset.left < 0) {
+        delta       = offset.left * -2
+        offset.left = 0
+
+        $tip.offset(offset)
+
+        actualWidth  = $tip[0].offsetWidth
+        actualHeight = $tip[0].offsetHeight
+      }
+
+      this.replaceArrow(delta - width + actualWidth, actualWidth, 'left')
+    } else {
+      this.replaceArrow(actualHeight - height, actualHeight, 'top')
+    }
+
+    if (replace) $tip.offset(offset)
+  }
+
+  Tooltip.prototype.replaceArrow = function(delta, dimension, position) {
+    this.arrow().css(position, delta ? (50 * (1 - delta / dimension) + "%") : '')
+  }
+
+  Tooltip.prototype.setContent = function () {
+    var $tip  = this.tip()
+    var title = this.getTitle()
+
+    $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
+    $tip.removeClass('fade in top bottom left right')
+  }
+
+  Tooltip.prototype.hide = function () {
+    var that = this
+    var $tip = this.tip()
+    var e    = $.Event('hide.bs.' + this.type)
+
+    function complete() {
+      if (that.hoverState != 'in') $tip.detach()
+    }
+
+    this.$element.trigger(e)
+
+    if (e.isDefaultPrevented()) return
+
+    $tip.removeClass('in')
+
+    $.support.transition && this.$tip.hasClass('fade') ?
+      $tip
+        .one($.support.transition.end, complete)
+        .emulateTransitionEnd(150) :
+      complete()
+
+    this.$element.trigger('hidden.bs.' + this.type)
+
+    return this
+  }
+
+  Tooltip.prototype.fixTitle = function () {
+    var $e = this.$element
+    if ($e.attr('title') || typeof($e.attr('data-original-title')) != 'string') {
+      $e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
+    }
+  }
+
+  Tooltip.prototype.hasContent = function () {
+    return this.getTitle()
+  }
+
+  Tooltip.prototype.getPosition = function () {
+    var el = this.$element[0]
+    return $.extend({}, (typeof el.getBoundingClientRect == 'function') ? el.getBoundingClientRect() : {
+      width: el.offsetWidth
+    , height: el.offsetHeight
+    }, this.$element.offset())
+  }
+
+  Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) {
+    return placement == 'bottom' ? { top: pos.top + pos.height,   left: pos.left + pos.width / 2 - actualWidth / 2  } :
+           placement == 'top'    ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2  } :
+           placement == 'left'   ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } :
+        /* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width   }
+  }
+
+  Tooltip.prototype.getTitle = function () {
+    var title
+    var $e = this.$element
+    var o  = this.options
+
+    title = $e.attr('data-original-title')
+      || (typeof o.title == 'function' ? o.title.call($e[0]) :  o.title)
+
+    return title
+  }
+
+  Tooltip.prototype.tip = function () {
+    return this.$tip = this.$tip || $(this.options.template)
+  }
+
+  Tooltip.prototype.arrow = function () {
+    return this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow')
+  }
+
+  Tooltip.prototype.validate = function () {
+    if (!this.$element[0].parentNode) {
+      this.hide()
+      this.$element = null
+      this.options  = null
+    }
+  }
+
+  Tooltip.prototype.enable = function () {
+    this.enabled = true
+  }
+
+  Tooltip.prototype.disable = function () {
+    this.enabled = false
+  }
+
+  Tooltip.prototype.toggleEnabled = function () {
+    this.enabled = !this.enabled
+  }
+
+  Tooltip.prototype.toggle = function (e) {
+    var self = e ? $(e.currentTarget)[this.type](this.getDelegateOptions()).data('bs.' + this.type) : this
+    self.tip().hasClass('in') ? self.leave(self) : self.enter(self)
+  }
+
+  Tooltip.prototype.destroy = function () {
+    this.hide().$element.off('.' + this.type).removeData('bs.' + this.type)
+  }
+
+
+  // TOOLTIP PLUGIN DEFINITION
+  // =========================
+
+  var old = $.fn.tooltip
+
+  $.fn.tooltip = function (option) {
+    return this.each(function () {
+      var $this   = $(this)
+      var data    = $this.data('bs.tooltip')
+      var options = typeof option == 'object' && option
+
+      if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options)))
+      if (typeof option == 'string') data[option]()
+    })
+  }
+
+  $.fn.tooltip.Constructor = Tooltip
+
+
+  // TOOLTIP NO CONFLICT
+  // ===================
+
+  $.fn.tooltip.noConflict = function () {
+    $.fn.tooltip = old
+    return this
+  }
+
+}(window.jQuery);
+
+/* ========================================================================
+ * Bootstrap: dropdown.js v3.0.0
+ * http://twbs.github.com/bootstrap/javascript.html#dropdowns
+ * ========================================================================
+ * Copyright 2012 Twitter, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ======================================================================== */
+
+
++function ($) { "use strict";
+
+  // DROPDOWN CLASS DEFINITION
+  // =========================
+
+  var backdrop = '.dropdown-backdrop'
+  var toggle   = '[data-toggle=dropdown]'
+  var Dropdown = function (element) {
+    var $el = $(element).on('click.bs.dropdown', this.toggle)
+  }
+
+  Dropdown.prototype.toggle = function (e) {
+    var $this = $(this)
+
+    if ($this.is('.disabled, :disabled')) return
+
+    var $parent  = getParent($this)
+    var isActive = $parent.hasClass('open')
+
+    clearMenus()
+
+    if (!isActive) {
+      if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) {
+        // if mobile we we use a backdrop because click events don't delegate
+        $('<div class="dropdown-backdrop"/>').insertAfter($(this)).on('click', clearMenus)
+      }
+
+      $parent.trigger(e = $.Event('show.bs.dropdown'))
+
+      if (e.isDefaultPrevented()) return
+
+      $parent
+        .toggleClass('open')
+        .trigger('shown.bs.dropdown')
+
+      $this.focus()
+    }
+
+    return false
+  }
+
+  Dropdown.prototype.keydown = function (e) {
+    if (!/(38|40|27)/.test(e.keyCode)) return
+
+    var $this = $(this)
+
+    e.preventDefault()
+    e.stopPropagation()
+
+    if ($this.is('.disabled, :disabled')) return
+
+    var $parent  = getParent($this)
+    var isActive = $parent.hasClass('open')
+
+    if (!isActive || (isActive && e.keyCode == 27)) {
+      if (e.which == 27) $parent.find(toggle).focus()
+      return $this.click()
+    }
+
+    var $items = $('[role=menu] li:not(.divider):visible a', $parent)
+
+    if (!$items.length) return
+
+    var index = $items.index($items.filter(':focus'))
+
+    if (e.keyCode == 38 && index > 0)                 index--                        // up
+    if (e.keyCode == 40 && index < $items.length - 1) index++                        // down
+    if (!~index)                                      index=0
+
+    $items.eq(index).focus()
+  }
+
+  function clearMenus() {
+    $(backdrop).remove()
+    $(toggle).each(function (e) {
+      var $parent = getParent($(this))
+      if (!$parent.hasClass('open')) return
+      $parent.trigger(e = $.Event('hide.bs.dropdown'))
+      if (e.isDefaultPrevented()) return
+      $parent.removeClass('open').trigger('hidden.bs.dropdown')
+    })
+  }
+
+  function getParent($this) {
+    var selector = $this.attr('data-target')
+
+    if (!selector) {
+      selector = $this.attr('href')
+      selector = selector && /#/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7
+    }
+
+    var $parent = selector && $(selector)
+
+    return $parent && $parent.length ? $parent : $this.parent()
+  }
+
+
+  // DROPDOWN PLUGIN DEFINITION
+  // ==========================
+
+  var old = $.fn.dropdown
+
+  $.fn.dropdown = function (option) {
+    return this.each(function () {
+      var $this = $(this)
+      var data  = $this.data('dropdown')
+
+      if (!data) $this.data('dropdown', (data = new Dropdown(this)))
+      if (typeof option == 'string') data[option].call($this)
+    })
+  }
+
+  $.fn.dropdown.Constructor = Dropdown
+
+
+  // DROPDOWN NO CONFLICT
+  // ====================
+
+  $.fn.dropdown.noConflict = function () {
+    $.fn.dropdown = old
+    return this
+  }
+
+
+  // APPLY TO STANDARD DROPDOWN ELEMENTS
+  // ===================================
+
+  $(document)
+    .on('click.bs.dropdown.data-api', clearMenus)
+    .on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() })
+    .on('click.bs.dropdown.data-api'  , toggle, Dropdown.prototype.toggle)
+    .on('keydown.bs.dropdown.data-api', toggle + ', [role=menu]' , Dropdown.prototype.keydown)
+
+}(window.jQuery);
+
+/* ========================================================================
+ * Bootstrap: popover.js v3.0.3
+ * http://getbootstrap.com/javascript/#popovers
+ * ========================================================================
+ * Copyright 2013 Twitter, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ======================================================================== */
+
+
++function ($) { "use strict";
+
+  // POPOVER PUBLIC CLASS DEFINITION
+  // ===============================
+
+  var Popover = function (element, options) {
+    this.init('popover', element, options)
+  }
+
+  if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
+
+  Popover.DEFAULTS = $.extend({} , $.fn.tooltip.Constructor.DEFAULTS, {
+    placement: 'right'
+  , trigger: 'click'
+  , content: ''
+  , template: '<div class="popover"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'
+  })
+
+
+  // NOTE: POPOVER EXTENDS tooltip.js
+  // ================================
+
+  Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype)
+
+  Popover.prototype.constructor = Popover
+
+  Popover.prototype.getDefaults = function () {
+    return Popover.DEFAULTS
+  }
+
+  Popover.prototype.setContent = function () {
+    var $tip    = this.tip()
+    var title   = this.getTitle()
+    var content = this.getContent()
+
+    $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
+    $tip.find('.popover-content')[this.options.html ? 'html' : 'text'](content)
+
+    $tip.removeClass('fade top bottom left right in')
+
+    // IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do
+    // this manually by checking the contents.
+    if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide()
+  }
+
+  Popover.prototype.hasContent = function () {
+    return this.getTitle() || this.getContent()
+  }
+
+  Popover.prototype.getContent = function () {
+    var $e = this.$element
+    var o  = this.options
+
+    return $e.attr('data-content')
+      || (typeof o.content == 'function' ?
+            o.content.call($e[0]) :
+            o.content)
+  }
+
+  Popover.prototype.arrow = function () {
+    return this.$arrow = this.$arrow || this.tip().find('.arrow')
+  }
+
+  Popover.prototype.tip = function () {
+    if (!this.$tip) this.$tip = $(this.options.template)
+    return this.$tip
+  }
+
+
+  // POPOVER PLUGIN DEFINITION
+  // =========================
+
+  var old = $.fn.popover
+
+  $.fn.popover = function (option) {
+    return this.each(function () {
+      var $this   = $(this)
+      var data    = $this.data('bs.popover')
+      var options = typeof option == 'object' && option
+
+      if (!data) $this.data('bs.popover', (data = new Popover(this, options)))
+      if (typeof option == 'string') data[option]()
+    })
+  }
+
+  $.fn.popover.Constructor = Popover
+
+
+  // POPOVER NO CONFLICT
+  // ===================
+
+  $.fn.popover.noConflict = function () {
+    $.fn.popover = old
+    return this
+  }
+
+}(jQuery);
diff --git a/client/galaxy/scripts/libs/d3.js b/client/galaxy/scripts/libs/d3.js
new file mode 100644
index 0000000..aded45c
--- /dev/null
+++ b/client/galaxy/scripts/libs/d3.js
@@ -0,0 +1,9554 @@
+!function() {
+  var d3 = {
+    version: "3.5.17"
+  };
+  var d3_arraySlice = [].slice, d3_array = function(list) {
+    return d3_arraySlice.call(list);
+  };
+  var d3_document = this.document;
+  function d3_documentElement(node) {
+    return node && (node.ownerDocument || node.document || node).documentElement;
+  }
+  function d3_window(node) {
+    return node && (node.ownerDocument && node.ownerDocument.defaultView || node.document && node || node.defaultView);
+  }
+  if (d3_document) {
+    try {
+      d3_array(d3_document.documentElement.childNodes)[0].nodeType;
+    } catch (e) {
+      d3_array = function(list) {
+        var i = list.length, array = new Array(i);
+        while (i--) array[i] = list[i];
+        return array;
+      };
+    }
+  }
+  if (!Date.now) Date.now = function() {
+    return +new Date();
+  };
+  if (d3_document) {
+    try {
+      d3_document.createElement("DIV").style.setProperty("opacity", 0, "");
+    } catch (error) {
+      var d3_element_prototype = this.Element.prototype, d3_element_setAttribute = d3_element_prototype.setAttribute, d3_element_setAttributeNS = d3_element_prototype.setAttributeNS, d3_style_prototype = this.CSSStyleDeclaration.prototype, d3_style_setProperty = d3_style_prototype.setProperty;
+      d3_element_prototype.setAttribute = function(name, value) {
+        d3_element_setAttribute.call(this, name, value + "");
+      };
+      d3_element_prototype.setAttributeNS = function(space, local, value) {
+        d3_element_setAttributeNS.call(this, space, local, value + "");
+      };
+      d3_style_prototype.setProperty = function(name, value, priority) {
+        d3_style_setProperty.call(this, name, value + "", priority);
+      };
+    }
+  }
+  d3.ascending = d3_ascending;
+  function d3_ascending(a, b) {
+    return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN;
+  }
+  d3.descending = function(a, b) {
+    return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN;
+  };
+  d3.min = function(array, f) {
+    var i = -1, n = array.length, a, b;
+    if (arguments.length === 1) {
+      while (++i < n) if ((b = array[i]) != null && b >= b) {
+        a = b;
+        break;
+      }
+      while (++i < n) if ((b = array[i]) != null && a > b) a = b;
+    } else {
+      while (++i < n) if ((b = f.call(array, array[i], i)) != null && b >= b) {
+        a = b;
+        break;
+      }
+      while (++i < n) if ((b = f.call(array, array[i], i)) != null && a > b) a = b;
+    }
+    return a;
+  };
+  d3.max = function(array, f) {
+    var i = -1, n = array.length, a, b;
+    if (arguments.length === 1) {
+      while (++i < n) if ((b = array[i]) != null && b >= b) {
+        a = b;
+        break;
+      }
+      while (++i < n) if ((b = array[i]) != null && b > a) a = b;
+    } else {
+      while (++i < n) if ((b = f.call(array, array[i], i)) != null && b >= b) {
+        a = b;
+        break;
+      }
+      while (++i < n) if ((b = f.call(array, array[i], i)) != null && b > a) a = b;
+    }
+    return a;
+  };
+  d3.extent = function(array, f) {
+    var i = -1, n = array.length, a, b, c;
+    if (arguments.length === 1) {
+      while (++i < n) if ((b = array[i]) != null && b >= b) {
+        a = c = b;
+        break;
+      }
+      while (++i < n) if ((b = array[i]) != null) {
+        if (a > b) a = b;
+        if (c < b) c = b;
+      }
+    } else {
+      while (++i < n) if ((b = f.call(array, array[i], i)) != null && b >= b) {
+        a = c = b;
+        break;
+      }
+      while (++i < n) if ((b = f.call(array, array[i], i)) != null) {
+        if (a > b) a = b;
+        if (c < b) c = b;
+      }
+    }
+    return [ a, c ];
+  };
+  function d3_number(x) {
+    return x === null ? NaN : +x;
+  }
+  function d3_numeric(x) {
+    return !isNaN(x);
+  }
+  d3.sum = function(array, f) {
+    var s = 0, n = array.length, a, i = -1;
+    if (arguments.length === 1) {
+      while (++i < n) if (d3_numeric(a = +array[i])) s += a;
+    } else {
+      while (++i < n) if (d3_numeric(a = +f.call(array, array[i], i))) s += a;
+    }
+    return s;
+  };
+  d3.mean = function(array, f) {
+    var s = 0, n = array.length, a, i = -1, j = n;
+    if (arguments.length === 1) {
+      while (++i < n) if (d3_numeric(a = d3_number(array[i]))) s += a; else --j;
+    } else {
+      while (++i < n) if (d3_numeric(a = d3_number(f.call(array, array[i], i)))) s += a; else --j;
+    }
+    if (j) return s / j;
+  };
+  d3.quantile = function(values, p) {
+    var H = (values.length - 1) * p + 1, h = Math.floor(H), v = +values[h - 1], e = H - h;
+    return e ? v + e * (values[h] - v) : v;
+  };
+  d3.median = function(array, f) {
+    var numbers = [], n = array.length, a, i = -1;
+    if (arguments.length === 1) {
+      while (++i < n) if (d3_numeric(a = d3_number(array[i]))) numbers.push(a);
+    } else {
+      while (++i < n) if (d3_numeric(a = d3_number(f.call(array, array[i], i)))) numbers.push(a);
+    }
+    if (numbers.length) return d3.quantile(numbers.sort(d3_ascending), .5);
+  };
+  d3.variance = function(array, f) {
+    var n = array.length, m = 0, a, d, s = 0, i = -1, j = 0;
+    if (arguments.length === 1) {
+      while (++i < n) {
+        if (d3_numeric(a = d3_number(array[i]))) {
+          d = a - m;
+          m += d / ++j;
+          s += d * (a - m);
+        }
+      }
+    } else {
+      while (++i < n) {
+        if (d3_numeric(a = d3_number(f.call(array, array[i], i)))) {
+          d = a - m;
+          m += d / ++j;
+          s += d * (a - m);
+        }
+      }
+    }
+    if (j > 1) return s / (j - 1);
+  };
+  d3.deviation = function() {
+    var v = d3.variance.apply(this, arguments);
+    return v ? Math.sqrt(v) : v;
+  };
+  function d3_bisector(compare) {
+    return {
+      left: function(a, x, lo, hi) {
+        if (arguments.length < 3) lo = 0;
+        if (arguments.length < 4) hi = a.length;
+        while (lo < hi) {
+          var mid = lo + hi >>> 1;
+          if (compare(a[mid], x) < 0) lo = mid + 1; else hi = mid;
+        }
+        return lo;
+      },
+      right: function(a, x, lo, hi) {
+        if (arguments.length < 3) lo = 0;
+        if (arguments.length < 4) hi = a.length;
+        while (lo < hi) {
+          var mid = lo + hi >>> 1;
+          if (compare(a[mid], x) > 0) hi = mid; else lo = mid + 1;
+        }
+        return lo;
+      }
+    };
+  }
+  var d3_bisect = d3_bisector(d3_ascending);
+  d3.bisectLeft = d3_bisect.left;
+  d3.bisect = d3.bisectRight = d3_bisect.right;
+  d3.bisector = function(f) {
+    return d3_bisector(f.length === 1 ? function(d, x) {
+      return d3_ascending(f(d), x);
+    } : f);
+  };
+  d3.shuffle = function(array, i0, i1) {
+    if ((m = arguments.length) < 3) {
+      i1 = array.length;
+      if (m < 2) i0 = 0;
+    }
+    var m = i1 - i0, t, i;
+    while (m) {
+      i = Math.random() * m-- | 0;
+      t = array[m + i0], array[m + i0] = array[i + i0], array[i + i0] = t;
+    }
+    return array;
+  };
+  d3.permute = function(array, indexes) {
+    var i = indexes.length, permutes = new Array(i);
+    while (i--) permutes[i] = array[indexes[i]];
+    return permutes;
+  };
+  d3.pairs = function(array) {
+    var i = 0, n = array.length - 1, p0, p1 = array[0], pairs = new Array(n < 0 ? 0 : n);
+    while (i < n) pairs[i] = [ p0 = p1, p1 = array[++i] ];
+    return pairs;
+  };
+  d3.transpose = function(matrix) {
+    if (!(n = matrix.length)) return [];
+    for (var i = -1, m = d3.min(matrix, d3_transposeLength), transpose = new Array(m); ++i < m; ) {
+      for (var j = -1, n, row = transpose[i] = new Array(n); ++j < n; ) {
+        row[j] = matrix[j][i];
+      }
+    }
+    return transpose;
+  };
+  function d3_transposeLength(d) {
+    return d.length;
+  }
+  d3.zip = function() {
+    return d3.transpose(arguments);
+  };
+  d3.keys = function(map) {
+    var keys = [];
+    for (var key in map) keys.push(key);
+    return keys;
+  };
+  d3.values = function(map) {
+    var values = [];
+    for (var key in map) values.push(map[key]);
+    return values;
+  };
+  d3.entries = function(map) {
+    var entries = [];
+    for (var key in map) entries.push({
+      key: key,
+      value: map[key]
+    });
+    return entries;
+  };
+  d3.merge = function(arrays) {
+    var n = arrays.length, m, i = -1, j = 0, merged, array;
+    while (++i < n) j += arrays[i].length;
+    merged = new Array(j);
+    while (--n >= 0) {
+      array = arrays[n];
+      m = array.length;
+      while (--m >= 0) {
+        merged[--j] = array[m];
+      }
+    }
+    return merged;
+  };
+  var abs = Math.abs;
+  d3.range = function(start, stop, step) {
+    if (arguments.length < 3) {
+      step = 1;
+      if (arguments.length < 2) {
+        stop = start;
+        start = 0;
+      }
+    }
+    if ((stop - start) / step === Infinity) throw new Error("infinite range");
+    var range = [], k = d3_range_integerScale(abs(step)), i = -1, j;
+    start *= k, stop *= k, step *= k;
+    if (step < 0) while ((j = start + step * ++i) > stop) range.push(j / k); else while ((j = start + step * ++i) < stop) range.push(j / k);
+    return range;
+  };
+  function d3_range_integerScale(x) {
+    var k = 1;
+    while (x * k % 1) k *= 10;
+    return k;
+  }
+  function d3_class(ctor, properties) {
+    for (var key in properties) {
+      Object.defineProperty(ctor.prototype, key, {
+        value: properties[key],
+        enumerable: false
+      });
+    }
+  }
+  d3.map = function(object, f) {
+    var map = new d3_Map();
+    if (object instanceof d3_Map) {
+      object.forEach(function(key, value) {
+        map.set(key, value);
+      });
+    } else if (Array.isArray(object)) {
+      var i = -1, n = object.length, o;
+      if (arguments.length === 1) while (++i < n) map.set(i, object[i]); else while (++i < n) map.set(f.call(object, o = object[i], i), o);
+    } else {
+      for (var key in object) map.set(key, object[key]);
+    }
+    return map;
+  };
+  function d3_Map() {
+    this._ = Object.create(null);
+  }
+  var d3_map_proto = "__proto__", d3_map_zero = "\x00";
+  d3_class(d3_Map, {
+    has: d3_map_has,
+    get: function(key) {
+      return this._[d3_map_escape(key)];
+    },
+    set: function(key, value) {
+      return this._[d3_map_escape(key)] = value;
+    },
+    remove: d3_map_remove,
+    keys: d3_map_keys,
+    values: function() {
+      var values = [];
+      for (var key in this._) values.push(this._[key]);
+      return values;
+    },
+    entries: function() {
+      var entries = [];
+      for (var key in this._) entries.push({
+        key: d3_map_unescape(key),
+        value: this._[key]
+      });
+      return entries;
+    },
+    size: d3_map_size,
+    empty: d3_map_empty,
+    forEach: function(f) {
+      for (var key in this._) f.call(this, d3_map_unescape(key), this._[key]);
+    }
+  });
+  function d3_map_escape(key) {
+    return (key += "") === d3_map_proto || key[0] === d3_map_zero ? d3_map_zero + key : key;
+  }
+  function d3_map_unescape(key) {
+    return (key += "")[0] === d3_map_zero ? key.slice(1) : key;
+  }
+  function d3_map_has(key) {
+    return d3_map_escape(key) in this._;
+  }
+  function d3_map_remove(key) {
+    return (key = d3_map_escape(key)) in this._ && delete this._[key];
+  }
+  function d3_map_keys() {
+    var keys = [];
+    for (var key in this._) keys.push(d3_map_unescape(key));
+    return keys;
+  }
+  function d3_map_size() {
+    var size = 0;
+    for (var key in this._) ++size;
+    return size;
+  }
+  function d3_map_empty() {
+    for (var key in this._) return false;
+    return true;
+  }
+  d3.nest = function() {
+    var nest = {}, keys = [], sortKeys = [], sortValues, rollup;
+    function map(mapType, array, depth) {
+      if (depth >= keys.length) return rollup ? rollup.call(nest, array) : sortValues ? array.sort(sortValues) : array;
+      var i = -1, n = array.length, key = keys[depth++], keyValue, object, setter, valuesByKey = new d3_Map(), values;
+      while (++i < n) {
+        if (values = valuesByKey.get(keyValue = key(object = array[i]))) {
+          values.push(object);
+        } else {
+          valuesByKey.set(keyValue, [ object ]);
+        }
+      }
+      if (mapType) {
+        object = mapType();
+        setter = function(keyValue, values) {
+          object.set(keyValue, map(mapType, values, depth));
+        };
+      } else {
+        object = {};
+        setter = function(keyValue, values) {
+          object[keyValue] = map(mapType, values, depth);
+        };
+      }
+      valuesByKey.forEach(setter);
+      return object;
+    }
+    function entries(map, depth) {
+      if (depth >= keys.length) return map;
+      var array = [], sortKey = sortKeys[depth++];
+      map.forEach(function(key, keyMap) {
+        array.push({
+          key: key,
+          values: entries(keyMap, depth)
+        });
+      });
+      return sortKey ? array.sort(function(a, b) {
+        return sortKey(a.key, b.key);
+      }) : array;
+    }
+    nest.map = function(array, mapType) {
+      return map(mapType, array, 0);
+    };
+    nest.entries = function(array) {
+      return entries(map(d3.map, array, 0), 0);
+    };
+    nest.key = function(d) {
+      keys.push(d);
+      return nest;
+    };
+    nest.sortKeys = function(order) {
+      sortKeys[keys.length - 1] = order;
+      return nest;
+    };
+    nest.sortValues = function(order) {
+      sortValues = order;
+      return nest;
+    };
+    nest.rollup = function(f) {
+      rollup = f;
+      return nest;
+    };
+    return nest;
+  };
+  d3.set = function(array) {
+    var set = new d3_Set();
+    if (array) for (var i = 0, n = array.length; i < n; ++i) set.add(array[i]);
+    return set;
+  };
+  function d3_Set() {
+    this._ = Object.create(null);
+  }
+  d3_class(d3_Set, {
+    has: d3_map_has,
+    add: function(key) {
+      this._[d3_map_escape(key += "")] = true;
+      return key;
+    },
+    remove: d3_map_remove,
+    values: d3_map_keys,
+    size: d3_map_size,
+    empty: d3_map_empty,
+    forEach: function(f) {
+      for (var key in this._) f.call(this, d3_map_unescape(key));
+    }
+  });
+  d3.behavior = {};
+  function d3_identity(d) {
+    return d;
+  }
+  d3.rebind = function(target, source) {
+    var i = 1, n = arguments.length, method;
+    while (++i < n) target[method = arguments[i]] = d3_rebind(target, source, source[method]);
+    return target;
+  };
+  function d3_rebind(target, source, method) {
+    return function() {
+      var value = method.apply(source, arguments);
+      return value === source ? target : value;
+    };
+  }
+  function d3_vendorSymbol(object, name) {
+    if (name in object) return name;
+    name = name.charAt(0).toUpperCase() + name.slice(1);
+    for (var i = 0, n = d3_vendorPrefixes.length; i < n; ++i) {
+      var prefixName = d3_vendorPrefixes[i] + name;
+      if (prefixName in object) return prefixName;
+    }
+  }
+  var d3_vendorPrefixes = [ "webkit", "ms", "moz", "Moz", "o", "O" ];
+  function d3_noop() {}
+  d3.dispatch = function() {
+    var dispatch = new d3_dispatch(), i = -1, n = arguments.length;
+    while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch);
+    return dispatch;
+  };
+  function d3_dispatch() {}
+  d3_dispatch.prototype.on = function(type, listener) {
+    var i = type.indexOf("."), name = "";
+    if (i >= 0) {
+      name = type.slice(i + 1);
+      type = type.slice(0, i);
+    }
+    if (type) return arguments.length < 2 ? this[type].on(name) : this[type].on(name, listener);
+    if (arguments.length === 2) {
+      if (listener == null) for (type in this) {
+        if (this.hasOwnProperty(type)) this[type].on(name, null);
+      }
+      return this;
+    }
+  };
+  function d3_dispatch_event(dispatch) {
+    var listeners = [], listenerByName = new d3_Map();
+    function event() {
+      var z = listeners, i = -1, n = z.length, l;
+      while (++i < n) if (l = z[i].on) l.apply(this, arguments);
+      return dispatch;
+    }
+    event.on = function(name, listener) {
+      var l = listenerByName.get(name), i;
+      if (arguments.length < 2) return l && l.on;
+      if (l) {
+        l.on = null;
+        listeners = listeners.slice(0, i = listeners.indexOf(l)).concat(listeners.slice(i + 1));
+        listenerByName.remove(name);
+      }
+      if (listener) listeners.push(listenerByName.set(name, {
+        on: listener
+      }));
+      return dispatch;
+    };
+    return event;
+  }
+  d3.event = null;
+  function d3_eventPreventDefault() {
+    d3.event.preventDefault();
+  }
+  function d3_eventSource() {
+    var e = d3.event, s;
+    while (s = e.sourceEvent) e = s;
+    return e;
+  }
+  function d3_eventDispatch(target) {
+    var dispatch = new d3_dispatch(), i = 0, n = arguments.length;
+    while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch);
+    dispatch.of = function(thiz, argumentz) {
+      return function(e1) {
+        try {
+          var e0 = e1.sourceEvent = d3.event;
+          e1.target = target;
+          d3.event = e1;
+          dispatch[e1.type].apply(thiz, argumentz);
+        } finally {
+          d3.event = e0;
+        }
+      };
+    };
+    return dispatch;
+  }
+  d3.requote = function(s) {
+    return s.replace(d3_requote_re, "\\$&");
+  };
+  var d3_requote_re = /[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g;
+  var d3_subclass = {}.__proto__ ? function(object, prototype) {
+    object.__proto__ = prototype;
+  } : function(object, prototype) {
+    for (var property in prototype) object[property] = prototype[property];
+  };
+  function d3_selection(groups) {
+    d3_subclass(groups, d3_selectionPrototype);
+    return groups;
+  }
+  var d3_select = function(s, n) {
+    return n.querySelector(s);
+  }, d3_selectAll = function(s, n) {
+    return n.querySelectorAll(s);
+  }, d3_selectMatches = function(n, s) {
+    var d3_selectMatcher = n.matches || n[d3_vendorSymbol(n, "matchesSelector")];
+    d3_selectMatches = function(n, s) {
+      return d3_selectMatcher.call(n, s);
+    };
+    return d3_selectMatches(n, s);
+  };
+  if (typeof Sizzle === "function") {
+    d3_select = function(s, n) {
+      return Sizzle(s, n)[0] || null;
+    };
+    d3_selectAll = Sizzle;
+    d3_selectMatches = Sizzle.matchesSelector;
+  }
+  d3.selection = function() {
+    return d3.select(d3_document.documentElement);
+  };
+  var d3_selectionPrototype = d3.selection.prototype = [];
+  d3_selectionPrototype.select = function(selector) {
+    var subgroups = [], subgroup, subnode, group, node;
+    selector = d3_selection_selector(selector);
+    for (var j = -1, m = this.length; ++j < m; ) {
+      subgroups.push(subgroup = []);
+      subgroup.parentNode = (group = this[j]).parentNode;
+      for (var i = -1, n = group.length; ++i < n; ) {
+        if (node = group[i]) {
+          subgroup.push(subnode = selector.call(node, node.__data__, i, j));
+          if (subnode && "__data__" in node) subnode.__data__ = node.__data__;
+        } else {
+          subgroup.push(null);
+        }
+      }
+    }
+    return d3_selection(subgroups);
+  };
+  function d3_selection_selector(selector) {
+    return typeof selector === "function" ? selector : function() {
+      return d3_select(selector, this);
+    };
+  }
+  d3_selectionPrototype.selectAll = function(selector) {
+    var subgroups = [], subgroup, node;
+    selector = d3_selection_selectorAll(selector);
+    for (var j = -1, m = this.length; ++j < m; ) {
+      for (var group = this[j], i = -1, n = group.length; ++i < n; ) {
+        if (node = group[i]) {
+          subgroups.push(subgroup = d3_array(selector.call(node, node.__data__, i, j)));
+          subgroup.parentNode = node;
+        }
+      }
+    }
+    return d3_selection(subgroups);
+  };
+  function d3_selection_selectorAll(selector) {
+    return typeof selector === "function" ? selector : function() {
+      return d3_selectAll(selector, this);
+    };
+  }
+  var d3_nsXhtml = "http://www.w3.org/1999/xhtml";
+  var d3_nsPrefix = {
+    svg: "http://www.w3.org/2000/svg",
+    xhtml: d3_nsXhtml,
+    xlink: "http://www.w3.org/1999/xlink",
+    xml: "http://www.w3.org/XML/1998/namespace",
+    xmlns: "http://www.w3.org/2000/xmlns/"
+  };
+  d3.ns = {
+    prefix: d3_nsPrefix,
+    qualify: function(name) {
+      var i = name.indexOf(":"), prefix = name;
+      if (i >= 0 && (prefix = name.slice(0, i)) !== "xmlns") name = name.slice(i + 1);
+      return d3_nsPrefix.hasOwnProperty(prefix) ? {
+        space: d3_nsPrefix[prefix],
+        local: name
+      } : name;
+    }
+  };
+  d3_selectionPrototype.attr = function(name, value) {
+    if (arguments.length < 2) {
+      if (typeof name === "string") {
+        var node = this.node();
+        name = d3.ns.qualify(name);
+        return name.local ? node.getAttributeNS(name.space, name.local) : node.getAttribute(name);
+      }
+      for (value in name) this.each(d3_selection_attr(value, name[value]));
+      return this;
+    }
+    return this.each(d3_selection_attr(name, value));
+  };
+  function d3_selection_attr(name, value) {
+    name = d3.ns.qualify(name);
+    function attrNull() {
+      this.removeAttribute(name);
+    }
+    function attrNullNS() {
+      this.removeAttributeNS(name.space, name.local);
+    }
+    function attrConstant() {
+      this.setAttribute(name, value);
+    }
+    function attrConstantNS() {
+      this.setAttributeNS(name.space, name.local, value);
+    }
+    function attrFunction() {
+      var x = value.apply(this, arguments);
+      if (x == null) this.removeAttribute(name); else this.setAttribute(name, x);
+    }
+    function attrFunctionNS() {
+      var x = value.apply(this, arguments);
+      if (x == null) this.removeAttributeNS(name.space, name.local); else this.setAttributeNS(name.space, name.local, x);
+    }
+    return value == null ? name.local ? attrNullNS : attrNull : typeof value === "function" ? name.local ? attrFunctionNS : attrFunction : name.local ? attrConstantNS : attrConstant;
+  }
+  function d3_collapse(s) {
+    return s.trim().replace(/\s+/g, " ");
+  }
+  d3_selectionPrototype.classed = function(name, value) {
+    if (arguments.length < 2) {
+      if (typeof name === "string") {
+        var node = this.node(), n = (name = d3_selection_classes(name)).length, i = -1;
+        if (value = node.classList) {
+          while (++i < n) if (!value.contains(name[i])) return false;
+        } else {
+          value = node.getAttribute("class");
+          while (++i < n) if (!d3_selection_classedRe(name[i]).test(value)) return false;
+        }
+        return true;
+      }
+      for (value in name) this.each(d3_selection_classed(value, name[value]));
+      return this;
+    }
+    return this.each(d3_selection_classed(name, value));
+  };
+  function d3_selection_classedRe(name) {
+    return new RegExp("(?:^|\\s+)" + d3.requote(name) + "(?:\\s+|$)", "g");
+  }
+  function d3_selection_classes(name) {
+    return (name + "").trim().split(/^|\s+/);
+  }
+  function d3_selection_classed(name, value) {
+    name = d3_selection_classes(name).map(d3_selection_classedName);
+    var n = name.length;
+    function classedConstant() {
+      var i = -1;
+      while (++i < n) name[i](this, value);
+    }
+    function classedFunction() {
+      var i = -1, x = value.apply(this, arguments);
+      while (++i < n) name[i](this, x);
+    }
+    return typeof value === "function" ? classedFunction : classedConstant;
+  }
+  function d3_selection_classedName(name) {
+    var re = d3_selection_classedRe(name);
+    return function(node, value) {
+      if (c = node.classList) return value ? c.add(name) : c.remove(name);
+      var c = node.getAttribute("class") || "";
+      if (value) {
+        re.lastIndex = 0;
+        if (!re.test(c)) node.setAttribute("class", d3_collapse(c + " " + name));
+      } else {
+        node.setAttribute("class", d3_collapse(c.replace(re, " ")));
+      }
+    };
+  }
+  d3_selectionPrototype.style = function(name, value, priority) {
+    var n = arguments.length;
+    if (n < 3) {
+      if (typeof name !== "string") {
+        if (n < 2) value = "";
+        for (priority in name) this.each(d3_selection_style(priority, name[priority], value));
+        return this;
+      }
+      if (n < 2) {
+        var node = this.node();
+        return d3_window(node).getComputedStyle(node, null).getPropertyValue(name);
+      }
+      priority = "";
+    }
+    return this.each(d3_selection_style(name, value, priority));
+  };
+  function d3_selection_style(name, value, priority) {
+    function styleNull() {
+      this.style.removeProperty(name);
+    }
+    function styleConstant() {
+      this.style.setProperty(name, value, priority);
+    }
+    function styleFunction() {
+      var x = value.apply(this, arguments);
+      if (x == null) this.style.removeProperty(name); else this.style.setProperty(name, x, priority);
+    }
+    return value == null ? styleNull : typeof value === "function" ? styleFunction : styleConstant;
+  }
+  d3_selectionPrototype.property = function(name, value) {
+    if (arguments.length < 2) {
+      if (typeof name === "string") return this.node()[name];
+      for (value in name) this.each(d3_selection_property(value, name[value]));
+      return this;
+    }
+    return this.each(d3_selection_property(name, value));
+  };
+  function d3_selection_property(name, value) {
+    function propertyNull() {
+      delete this[name];
+    }
+    function propertyConstant() {
+      this[name] = value;
+    }
+    function propertyFunction() {
+      var x = value.apply(this, arguments);
+      if (x == null) delete this[name]; else this[name] = x;
+    }
+    return value == null ? propertyNull : typeof value === "function" ? propertyFunction : propertyConstant;
+  }
+  d3_selectionPrototype.text = function(value) {
+    return arguments.length ? this.each(typeof value === "function" ? function() {
+      var v = value.apply(this, arguments);
+      this.textContent = v == null ? "" : v;
+    } : value == null ? function() {
+      this.textContent = "";
+    } : function() {
+      this.textContent = value;
+    }) : this.node().textContent;
+  };
+  d3_selectionPrototype.html = function(value) {
+    return arguments.length ? this.each(typeof value === "function" ? function() {
+      var v = value.apply(this, arguments);
+      this.innerHTML = v == null ? "" : v;
+    } : value == null ? function() {
+      this.innerHTML = "";
+    } : function() {
+      this.innerHTML = value;
+    }) : this.node().innerHTML;
+  };
+  d3_selectionPrototype.append = function(name) {
+    name = d3_selection_creator(name);
+    return this.select(function() {
+      return this.appendChild(name.apply(this, arguments));
+    });
+  };
+  function d3_selection_creator(name) {
+    function create() {
+      var document = this.ownerDocument, namespace = this.namespaceURI;
+      return namespace === d3_nsXhtml && document.documentElement.namespaceURI === d3_nsXhtml ? document.createElement(name) : document.createElementNS(namespace, name);
+    }
+    function createNS() {
+      return this.ownerDocument.createElementNS(name.space, name.local);
+    }
+    return typeof name === "function" ? name : (name = d3.ns.qualify(name)).local ? createNS : create;
+  }
+  d3_selectionPrototype.insert = function(name, before) {
+    name = d3_selection_creator(name);
+    before = d3_selection_selector(before);
+    return this.select(function() {
+      return this.insertBefore(name.apply(this, arguments), before.apply(this, arguments) || null);
+    });
+  };
+  d3_selectionPrototype.remove = function() {
+    return this.each(d3_selectionRemove);
+  };
+  function d3_selectionRemove() {
+    var parent = this.parentNode;
+    if (parent) parent.removeChild(this);
+  }
+  d3_selectionPrototype.data = function(value, key) {
+    var i = -1, n = this.length, group, node;
+    if (!arguments.length) {
+      value = new Array(n = (group = this[0]).length);
+      while (++i < n) {
+        if (node = group[i]) {
+          value[i] = node.__data__;
+        }
+      }
+      return value;
+    }
+    function bind(group, groupData) {
+      var i, n = group.length, m = groupData.length, n0 = Math.min(n, m), updateNodes = new Array(m), enterNodes = new Array(m), exitNodes = new Array(n), node, nodeData;
+      if (key) {
+        var nodeByKeyValue = new d3_Map(), keyValues = new Array(n), keyValue;
+        for (i = -1; ++i < n; ) {
+          if (node = group[i]) {
+            if (nodeByKeyValue.has(keyValue = key.call(node, node.__data__, i))) {
+              exitNodes[i] = node;
+            } else {
+              nodeByKeyValue.set(keyValue, node);
+            }
+            keyValues[i] = keyValue;
+          }
+        }
+        for (i = -1; ++i < m; ) {
+          if (!(node = nodeByKeyValue.get(keyValue = key.call(groupData, nodeData = groupData[i], i)))) {
+            enterNodes[i] = d3_selection_dataNode(nodeData);
+          } else if (node !== true) {
+            updateNodes[i] = node;
+            node.__data__ = nodeData;
+          }
+          nodeByKeyValue.set(keyValue, true);
+        }
+        for (i = -1; ++i < n; ) {
+          if (i in keyValues && nodeByKeyValue.get(keyValues[i]) !== true) {
+            exitNodes[i] = group[i];
+          }
+        }
+      } else {
+        for (i = -1; ++i < n0; ) {
+          node = group[i];
+          nodeData = groupData[i];
+          if (node) {
+            node.__data__ = nodeData;
+            updateNodes[i] = node;
+          } else {
+            enterNodes[i] = d3_selection_dataNode(nodeData);
+          }
+        }
+        for (;i < m; ++i) {
+          enterNodes[i] = d3_selection_dataNode(groupData[i]);
+        }
+        for (;i < n; ++i) {
+          exitNodes[i] = group[i];
+        }
+      }
+      enterNodes.update = updateNodes;
+      enterNodes.parentNode = updateNodes.parentNode = exitNodes.parentNode = group.parentNode;
+      enter.push(enterNodes);
+      update.push(updateNodes);
+      exit.push(exitNodes);
+    }
+    var enter = d3_selection_enter([]), update = d3_selection([]), exit = d3_selection([]);
+    if (typeof value === "function") {
+      while (++i < n) {
+        bind(group = this[i], value.call(group, group.parentNode.__data__, i));
+      }
+    } else {
+      while (++i < n) {
+        bind(group = this[i], value);
+      }
+    }
+    update.enter = function() {
+      return enter;
+    };
+    update.exit = function() {
+      return exit;
+    };
+    return update;
+  };
+  function d3_selection_dataNode(data) {
+    return {
+      __data__: data
+    };
+  }
+  d3_selectionPrototype.datum = function(value) {
+    return arguments.length ? this.property("__data__", value) : this.property("__data__");
+  };
+  d3_selectionPrototype.filter = function(filter) {
+    var subgroups = [], subgroup, group, node;
+    if (typeof filter !== "function") filter = d3_selection_filter(filter);
+    for (var j = 0, m = this.length; j < m; j++) {
+      subgroups.push(subgroup = []);
+      subgroup.parentNode = (group = this[j]).parentNode;
+      for (var i = 0, n = group.length; i < n; i++) {
+        if ((node = group[i]) && filter.call(node, node.__data__, i, j)) {
+          subgroup.push(node);
+        }
+      }
+    }
+    return d3_selection(subgroups);
+  };
+  function d3_selection_filter(selector) {
+    return function() {
+      return d3_selectMatches(this, selector);
+    };
+  }
+  d3_selectionPrototype.order = function() {
+    for (var j = -1, m = this.length; ++j < m; ) {
+      for (var group = this[j], i = group.length - 1, next = group[i], node; --i >= 0; ) {
+        if (node = group[i]) {
+          if (next && next !== node.nextSibling) next.parentNode.insertBefore(node, next);
+          next = node;
+        }
+      }
+    }
+    return this;
+  };
+  d3_selectionPrototype.sort = function(comparator) {
+    comparator = d3_selection_sortComparator.apply(this, arguments);
+    for (var j = -1, m = this.length; ++j < m; ) this[j].sort(comparator);
+    return this.order();
+  };
+  function d3_selection_sortComparator(comparator) {
+    if (!arguments.length) comparator = d3_ascending;
+    return function(a, b) {
+      return a && b ? comparator(a.__data__, b.__data__) : !a - !b;
+    };
+  }
+  d3_selectionPrototype.each = function(callback) {
+    return d3_selection_each(this, function(node, i, j) {
+      callback.call(node, node.__data__, i, j);
+    });
+  };
+  function d3_selection_each(groups, callback) {
+    for (var j = 0, m = groups.length; j < m; j++) {
+      for (var group = groups[j], i = 0, n = group.length, node; i < n; i++) {
+        if (node = group[i]) callback(node, i, j);
+      }
+    }
+    return groups;
+  }
+  d3_selectionPrototype.call = function(callback) {
+    var args = d3_array(arguments);
+    callback.apply(args[0] = this, args);
+    return this;
+  };
+  d3_selectionPrototype.empty = function() {
+    return !this.node();
+  };
+  d3_selectionPrototype.node = function() {
+    for (var j = 0, m = this.length; j < m; j++) {
+      for (var group = this[j], i = 0, n = group.length; i < n; i++) {
+        var node = group[i];
+        if (node) return node;
+      }
+    }
+    return null;
+  };
+  d3_selectionPrototype.size = function() {
+    var n = 0;
+    d3_selection_each(this, function() {
+      ++n;
+    });
+    return n;
+  };
+  function d3_selection_enter(selection) {
+    d3_subclass(selection, d3_selection_enterPrototype);
+    return selection;
+  }
+  var d3_selection_enterPrototype = [];
+  d3.selection.enter = d3_selection_enter;
+  d3.selection.enter.prototype = d3_selection_enterPrototype;
+  d3_selection_enterPrototype.append = d3_selectionPrototype.append;
+  d3_selection_enterPrototype.empty = d3_selectionPrototype.empty;
+  d3_selection_enterPrototype.node = d3_selectionPrototype.node;
+  d3_selection_enterPrototype.call = d3_selectionPrototype.call;
+  d3_selection_enterPrototype.size = d3_selectionPrototype.size;
+  d3_selection_enterPrototype.select = function(selector) {
+    var subgroups = [], subgroup, subnode, upgroup, group, node;
+    for (var j = -1, m = this.length; ++j < m; ) {
+      upgroup = (group = this[j]).update;
+      subgroups.push(subgroup = []);
+      subgroup.parentNode = group.parentNode;
+      for (var i = -1, n = group.length; ++i < n; ) {
+        if (node = group[i]) {
+          subgroup.push(upgroup[i] = subnode = selector.call(group.parentNode, node.__data__, i, j));
+          subnode.__data__ = node.__data__;
+        } else {
+          subgroup.push(null);
+        }
+      }
+    }
+    return d3_selection(subgroups);
+  };
+  d3_selection_enterPrototype.insert = function(name, before) {
+    if (arguments.length < 2) before = d3_selection_enterInsertBefore(this);
+    return d3_selectionPrototype.insert.call(this, name, before);
+  };
+  function d3_selection_enterInsertBefore(enter) {
+    var i0, j0;
+    return function(d, i, j) {
+      var group = enter[j].update, n = group.length, node;
+      if (j != j0) j0 = j, i0 = 0;
+      if (i >= i0) i0 = i + 1;
+      while (!(node = group[i0]) && ++i0 < n) ;
+      return node;
+    };
+  }
+  d3.select = function(node) {
+    var group;
+    if (typeof node === "string") {
+      group = [ d3_select(node, d3_document) ];
+      group.parentNode = d3_document.documentElement;
+    } else {
+      group = [ node ];
+      group.parentNode = d3_documentElement(node);
+    }
+    return d3_selection([ group ]);
+  };
+  d3.selectAll = function(nodes) {
+    var group;
+    if (typeof nodes === "string") {
+      group = d3_array(d3_selectAll(nodes, d3_document));
+      group.parentNode = d3_document.documentElement;
+    } else {
+      group = d3_array(nodes);
+      group.parentNode = null;
+    }
+    return d3_selection([ group ]);
+  };
+  d3_selectionPrototype.on = function(type, listener, capture) {
+    var n = arguments.length;
+    if (n < 3) {
+      if (typeof type !== "string") {
+        if (n < 2) listener = false;
+        for (capture in type) this.each(d3_selection_on(capture, type[capture], listener));
+        return this;
+      }
+      if (n < 2) return (n = this.node()["__on" + type]) && n._;
+      capture = false;
+    }
+    return this.each(d3_selection_on(type, listener, capture));
+  };
+  function d3_selection_on(type, listener, capture) {
+    var name = "__on" + type, i = type.indexOf("."), wrap = d3_selection_onListener;
+    if (i > 0) type = type.slice(0, i);
+    var filter = d3_selection_onFilters.get(type);
+    if (filter) type = filter, wrap = d3_selection_onFilter;
+    function onRemove() {
+      var l = this[name];
+      if (l) {
+        this.removeEventListener(type, l, l.$);
+        delete this[name];
+      }
+    }
+    function onAdd() {
+      var l = wrap(listener, d3_array(arguments));
+      onRemove.call(this);
+      this.addEventListener(type, this[name] = l, l.$ = capture);
+      l._ = listener;
+    }
+    function removeAll() {
+      var re = new RegExp("^__on([^.]+)" + d3.requote(type) + "$"), match;
+      for (var name in this) {
+        if (match = name.match(re)) {
+          var l = this[name];
+          this.removeEventListener(match[1], l, l.$);
+          delete this[name];
+        }
+      }
+    }
+    return i ? listener ? onAdd : onRemove : listener ? d3_noop : removeAll;
+  }
+  var d3_selection_onFilters = d3.map({
+    mouseenter: "mouseover",
+    mouseleave: "mouseout"
+  });
+  if (d3_document) {
+    d3_selection_onFilters.forEach(function(k) {
+      if ("on" + k in d3_document) d3_selection_onFilters.remove(k);
+    });
+  }
+  function d3_selection_onListener(listener, argumentz) {
+    return function(e) {
+      var o = d3.event;
+      d3.event = e;
+      argumentz[0] = this.__data__;
+      try {
+        listener.apply(this, argumentz);
+      } finally {
+        d3.event = o;
+      }
+    };
+  }
+  function d3_selection_onFilter(listener, argumentz) {
+    var l = d3_selection_onListener(listener, argumentz);
+    return function(e) {
+      var target = this, related = e.relatedTarget;
+      if (!related || related !== target && !(related.compareDocumentPosition(target) & 8)) {
+        l.call(target, e);
+      }
+    };
+  }
+  var d3_event_dragSelect, d3_event_dragId = 0;
+  function d3_event_dragSuppress(node) {
+    var name = ".dragsuppress-" + ++d3_event_dragId, click = "click" + name, w = d3.select(d3_window(node)).on("touchmove" + name, d3_eventPreventDefault).on("dragstart" + name, d3_eventPreventDefault).on("selectstart" + name, d3_eventPreventDefault);
+    if (d3_event_dragSelect == null) {
+      d3_event_dragSelect = "onselectstart" in node ? false : d3_vendorSymbol(node.style, "userSelect");
+    }
+    if (d3_event_dragSelect) {
+      var style = d3_documentElement(node).style, select = style[d3_event_dragSelect];
+      style[d3_event_dragSelect] = "none";
+    }
+    return function(suppressClick) {
+      w.on(name, null);
+      if (d3_event_dragSelect) style[d3_event_dragSelect] = select;
+      if (suppressClick) {
+        var off = function() {
+          w.on(click, null);
+        };
+        w.on(click, function() {
+          d3_eventPreventDefault();
+          off();
+        }, true);
+        setTimeout(off, 0);
+      }
+    };
+  }
+  d3.mouse = function(container) {
+    return d3_mousePoint(container, d3_eventSource());
+  };
+  var d3_mouse_bug44083 = this.navigator && /WebKit/.test(this.navigator.userAgent) ? -1 : 0;
+  function d3_mousePoint(container, e) {
+    if (e.changedTouches) e = e.changedTouches[0];
+    var svg = container.ownerSVGElement || container;
+    if (svg.createSVGPoint) {
+      var point = svg.createSVGPoint();
+      if (d3_mouse_bug44083 < 0) {
+        var window = d3_window(container);
+        if (window.scrollX || window.scrollY) {
+          svg = d3.select("body").append("svg").style({
+            position: "absolute",
+            top: 0,
+            left: 0,
+            margin: 0,
+            padding: 0,
+            border: "none"
+          }, "important");
+          var ctm = svg[0][0].getScreenCTM();
+          d3_mouse_bug44083 = !(ctm.f || ctm.e);
+          svg.remove();
+        }
+      }
+      if (d3_mouse_bug44083) point.x = e.pageX, point.y = e.pageY; else point.x = e.clientX, 
+      point.y = e.clientY;
+      point = point.matrixTransform(container.getScreenCTM().inverse());
+      return [ point.x, point.y ];
+    }
+    var rect = container.getBoundingClientRect();
+    return [ e.clientX - rect.left - container.clientLeft, e.clientY - rect.top - container.clientTop ];
+  }
+  d3.touch = function(container, touches, identifier) {
+    if (arguments.length < 3) identifier = touches, touches = d3_eventSource().changedTouches;
+    if (touches) for (var i = 0, n = touches.length, touch; i < n; ++i) {
+      if ((touch = touches[i]).identifier === identifier) {
+        return d3_mousePoint(container, touch);
+      }
+    }
+  };
+  d3.behavior.drag = function() {
+    var event = d3_eventDispatch(drag, "drag", "dragstart", "dragend"), origin = null, mousedown = dragstart(d3_noop, d3.mouse, d3_window, "mousemove", "mouseup"), touchstart = dragstart(d3_behavior_dragTouchId, d3.touch, d3_identity, "touchmove", "touchend");
+    function drag() {
+      this.on("mousedown.drag", mousedown).on("touchstart.drag", touchstart);
+    }
+    function dragstart(id, position, subject, move, end) {
+      return function() {
+        var that = this, target = d3.event.target.correspondingElement || d3.event.target, parent = that.parentNode, dispatch = event.of(that, arguments), dragged = 0, dragId = id(), dragName = ".drag" + (dragId == null ? "" : "-" + dragId), dragOffset, dragSubject = d3.select(subject(target)).on(move + dragName, moved).on(end + dragName, ended), dragRestore = d3_event_dragSuppress(target), position0 = position(parent, dragId);
+        if (origin) {
+          dragOffset = origin.apply(that, arguments);
+          dragOffset = [ dragOffset.x - position0[0], dragOffset.y - position0[1] ];
+        } else {
+          dragOffset = [ 0, 0 ];
+        }
+        dispatch({
+          type: "dragstart"
+        });
+        function moved() {
+          var position1 = position(parent, dragId), dx, dy;
+          if (!position1) return;
+          dx = position1[0] - position0[0];
+          dy = position1[1] - position0[1];
+          dragged |= dx | dy;
+          position0 = position1;
+          dispatch({
+            type: "drag",
+            x: position1[0] + dragOffset[0],
+            y: position1[1] + dragOffset[1],
+            dx: dx,
+            dy: dy
+          });
+        }
+        function ended() {
+          if (!position(parent, dragId)) return;
+          dragSubject.on(move + dragName, null).on(end + dragName, null);
+          dragRestore(dragged);
+          dispatch({
+            type: "dragend"
+          });
+        }
+      };
+    }
+    drag.origin = function(x) {
+      if (!arguments.length) return origin;
+      origin = x;
+      return drag;
+    };
+    return d3.rebind(drag, event, "on");
+  };
+  function d3_behavior_dragTouchId() {
+    return d3.event.changedTouches[0].identifier;
+  }
+  d3.touches = function(container, touches) {
+    if (arguments.length < 2) touches = d3_eventSource().touches;
+    return touches ? d3_array(touches).map(function(touch) {
+      var point = d3_mousePoint(container, touch);
+      point.identifier = touch.identifier;
+      return point;
+    }) : [];
+  };
+  var ε = 1e-6, ε2 = ε * ε, π = Math.PI, τ = 2 * π, τε = τ - ε, halfπ = π / 2, d3_radians = π / 180, d3_degrees = 180 / π;
+  function d3_sgn(x) {
+    return x > 0 ? 1 : x < 0 ? -1 : 0;
+  }
+  function d3_cross2d(a, b, c) {
+    return (b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0]);
+  }
+  function d3_acos(x) {
+    return x > 1 ? 0 : x < -1 ? π : Math.acos(x);
+  }
+  function d3_asin(x) {
+    return x > 1 ? halfπ : x < -1 ? -halfπ : Math.asin(x);
+  }
+  function d3_sinh(x) {
+    return ((x = Math.exp(x)) - 1 / x) / 2;
+  }
+  function d3_cosh(x) {
+    return ((x = Math.exp(x)) + 1 / x) / 2;
+  }
+  function d3_tanh(x) {
+    return ((x = Math.exp(2 * x)) - 1) / (x + 1);
+  }
+  function d3_haversin(x) {
+    return (x = Math.sin(x / 2)) * x;
+  }
+  var ρ = Math.SQRT2, ρ2 = 2, ρ4 = 4;
+  d3.interpolateZoom = function(p0, p1) {
+    var ux0 = p0[0], uy0 = p0[1], w0 = p0[2], ux1 = p1[0], uy1 = p1[1], w1 = p1[2], dx = ux1 - ux0, dy = uy1 - uy0, d2 = dx * dx + dy * dy, i, S;
+    if (d2 < ε2) {
+      S = Math.log(w1 / w0) / ρ;
+      i = function(t) {
+        return [ ux0 + t * dx, uy0 + t * dy, w0 * Math.exp(ρ * t * S) ];
+      };
+    } else {
+      var d1 = Math.sqrt(d2), b0 = (w1 * w1 - w0 * w0 + ρ4 * d2) / (2 * w0 * ρ2 * d1), b1 = (w1 * w1 - w0 * w0 - ρ4 * d2) / (2 * w1 * ρ2 * d1), r0 = Math.log(Math.sqrt(b0 * b0 + 1) - b0), r1 = Math.log(Math.sqrt(b1 * b1 + 1) - b1);
+      S = (r1 - r0) / ρ;
+      i = function(t) {
+        var s = t * S, coshr0 = d3_cosh(r0), u = w0 / (ρ2 * d1) * (coshr0 * d3_tanh(ρ * s + r0) - d3_sinh(r0));
+        return [ ux0 + u * dx, uy0 + u * dy, w0 * coshr0 / d3_cosh(ρ * s + r0) ];
+      };
+    }
+    i.duration = S * 1e3;
+    return i;
+  };
+  d3.behavior.zoom = function() {
+    var view = {
+      x: 0,
+      y: 0,
+      k: 1
+    }, translate0, center0, center, size = [ 960, 500 ], scaleExtent = d3_behavior_zoomInfinity, duration = 250, zooming = 0, mousedown = "mousedown.zoom", mousemove = "mousemove.zoom", mouseup = "mouseup.zoom", mousewheelTimer, touchstart = "touchstart.zoom", touchtime, event = d3_eventDispatch(zoom, "zoomstart", "zoom", "zoomend"), x0, x1, y0, y1;
+    if (!d3_behavior_zoomWheel) {
+      d3_behavior_zoomWheel = "onwheel" in d3_document ? (d3_behavior_zoomDelta = function() {
+        return -d3.event.deltaY * (d3.event.deltaMode ? 120 : 1);
+      }, "wheel") : "onmousewheel" in d3_document ? (d3_behavior_zoomDelta = function() {
+        return d3.event.wheelDelta;
+      }, "mousewheel") : (d3_behavior_zoomDelta = function() {
+        return -d3.event.detail;
+      }, "MozMousePixelScroll");
+    }
+    function zoom(g) {
+      g.on(mousedown, mousedowned).on(d3_behavior_zoomWheel + ".zoom", mousewheeled).on("dblclick.zoom", dblclicked).on(touchstart, touchstarted);
+    }
+    zoom.event = function(g) {
+      g.each(function() {
+        var dispatch = event.of(this, arguments), view1 = view;
+        if (d3_transitionInheritId) {
+          d3.select(this).transition().each("start.zoom", function() {
+            view = this.__chart__ || {
+              x: 0,
+              y: 0,
+              k: 1
+            };
+            zoomstarted(dispatch);
+          }).tween("zoom:zoom", function() {
+            var dx = size[0], dy = size[1], cx = center0 ? center0[0] : dx / 2, cy = center0 ? center0[1] : dy / 2, i = d3.interpolateZoom([ (cx - view.x) / view.k, (cy - view.y) / view.k, dx / view.k ], [ (cx - view1.x) / view1.k, (cy - view1.y) / view1.k, dx / view1.k ]);
+            return function(t) {
+              var l = i(t), k = dx / l[2];
+              this.__chart__ = view = {
+                x: cx - l[0] * k,
+                y: cy - l[1] * k,
+                k: k
+              };
+              zoomed(dispatch);
+            };
+          }).each("interrupt.zoom", function() {
+            zoomended(dispatch);
+          }).each("end.zoom", function() {
+            zoomended(dispatch);
+          });
+        } else {
+          this.__chart__ = view;
+          zoomstarted(dispatch);
+          zoomed(dispatch);
+          zoomended(dispatch);
+        }
+      });
+    };
+    zoom.translate = function(_) {
+      if (!arguments.length) return [ view.x, view.y ];
+      view = {
+        x: +_[0],
+        y: +_[1],
+        k: view.k
+      };
+      rescale();
+      return zoom;
+    };
+    zoom.scale = function(_) {
+      if (!arguments.length) return view.k;
+      view = {
+        x: view.x,
+        y: view.y,
+        k: null
+      };
+      scaleTo(+_);
+      rescale();
+      return zoom;
+    };
+    zoom.scaleExtent = function(_) {
+      if (!arguments.length) return scaleExtent;
+      scaleExtent = _ == null ? d3_behavior_zoomInfinity : [ +_[0], +_[1] ];
+      return zoom;
+    };
+    zoom.center = function(_) {
+      if (!arguments.length) return center;
+      center = _ && [ +_[0], +_[1] ];
+      return zoom;
+    };
+    zoom.size = function(_) {
+      if (!arguments.length) return size;
+      size = _ && [ +_[0], +_[1] ];
+      return zoom;
+    };
+    zoom.duration = function(_) {
+      if (!arguments.length) return duration;
+      duration = +_;
+      return zoom;
+    };
+    zoom.x = function(z) {
+      if (!arguments.length) return x1;
+      x1 = z;
+      x0 = z.copy();
+      view = {
+        x: 0,
+        y: 0,
+        k: 1
+      };
+      return zoom;
+    };
+    zoom.y = function(z) {
+      if (!arguments.length) return y1;
+      y1 = z;
+      y0 = z.copy();
+      view = {
+        x: 0,
+        y: 0,
+        k: 1
+      };
+      return zoom;
+    };
+    function location(p) {
+      return [ (p[0] - view.x) / view.k, (p[1] - view.y) / view.k ];
+    }
+    function point(l) {
+      return [ l[0] * view.k + view.x, l[1] * view.k + view.y ];
+    }
+    function scaleTo(s) {
+      view.k = Math.max(scaleExtent[0], Math.min(scaleExtent[1], s));
+    }
+    function translateTo(p, l) {
+      l = point(l);
+      view.x += p[0] - l[0];
+      view.y += p[1] - l[1];
+    }
+    function zoomTo(that, p, l, k) {
+      that.__chart__ = {
+        x: view.x,
+        y: view.y,
+        k: view.k
+      };
+      scaleTo(Math.pow(2, k));
+      translateTo(center0 = p, l);
+      that = d3.select(that);
+      if (duration > 0) that = that.transition().duration(duration);
+      that.call(zoom.event);
+    }
+    function rescale() {
+      if (x1) x1.domain(x0.range().map(function(x) {
+        return (x - view.x) / view.k;
+      }).map(x0.invert));
+      if (y1) y1.domain(y0.range().map(function(y) {
+        return (y - view.y) / view.k;
+      }).map(y0.invert));
+    }
+    function zoomstarted(dispatch) {
+      if (!zooming++) dispatch({
+        type: "zoomstart"
+      });
+    }
+    function zoomed(dispatch) {
+      rescale();
+      dispatch({
+        type: "zoom",
+        scale: view.k,
+        translate: [ view.x, view.y ]
+      });
+    }
+    function zoomended(dispatch) {
+      if (!--zooming) dispatch({
+        type: "zoomend"
+      }), center0 = null;
+    }
+    function mousedowned() {
+      var that = this, dispatch = event.of(that, arguments), dragged = 0, subject = d3.select(d3_window(that)).on(mousemove, moved).on(mouseup, ended), location0 = location(d3.mouse(that)), dragRestore = d3_event_dragSuppress(that);
+      d3_selection_interrupt.call(that);
+      zoomstarted(dispatch);
+      function moved() {
+        dragged = 1;
+        translateTo(d3.mouse(that), location0);
+        zoomed(dispatch);
+      }
+      function ended() {
+        subject.on(mousemove, null).on(mouseup, null);
+        dragRestore(dragged);
+        zoomended(dispatch);
+      }
+    }
+    function touchstarted() {
+      var that = this, dispatch = event.of(that, arguments), locations0 = {}, distance0 = 0, scale0, zoomName = ".zoom-" + d3.event.changedTouches[0].identifier, touchmove = "touchmove" + zoomName, touchend = "touchend" + zoomName, targets = [], subject = d3.select(that), dragRestore = d3_event_dragSuppress(that);
+      started();
+      zoomstarted(dispatch);
+      subject.on(mousedown, null).on(touchstart, started);
+      function relocate() {
+        var touches = d3.touches(that);
+        scale0 = view.k;
+        touches.forEach(function(t) {
+          if (t.identifier in locations0) locations0[t.identifier] = location(t);
+        });
+        return touches;
+      }
+      function started() {
+        var target = d3.event.target;
+        d3.select(target).on(touchmove, moved).on(touchend, ended);
+        targets.push(target);
+        var changed = d3.event.changedTouches;
+        for (var i = 0, n = changed.length; i < n; ++i) {
+          locations0[changed[i].identifier] = null;
+        }
+        var touches = relocate(), now = Date.now();
+        if (touches.length === 1) {
+          if (now - touchtime < 500) {
+            var p = touches[0];
+            zoomTo(that, p, locations0[p.identifier], Math.floor(Math.log(view.k) / Math.LN2) + 1);
+            d3_eventPreventDefault();
+          }
+          touchtime = now;
+        } else if (touches.length > 1) {
+          var p = touches[0], q = touches[1], dx = p[0] - q[0], dy = p[1] - q[1];
+          distance0 = dx * dx + dy * dy;
+        }
+      }
+      function moved() {
+        var touches = d3.touches(that), p0, l0, p1, l1;
+        d3_selection_interrupt.call(that);
+        for (var i = 0, n = touches.length; i < n; ++i, l1 = null) {
+          p1 = touches[i];
+          if (l1 = locations0[p1.identifier]) {
+            if (l0) break;
+            p0 = p1, l0 = l1;
+          }
+        }
+        if (l1) {
+          var distance1 = (distance1 = p1[0] - p0[0]) * distance1 + (distance1 = p1[1] - p0[1]) * distance1, scale1 = distance0 && Math.sqrt(distance1 / distance0);
+          p0 = [ (p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2 ];
+          l0 = [ (l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2 ];
+          scaleTo(scale1 * scale0);
+        }
+        touchtime = null;
+        translateTo(p0, l0);
+        zoomed(dispatch);
+      }
+      function ended() {
+        if (d3.event.touches.length) {
+          var changed = d3.event.changedTouches;
+          for (var i = 0, n = changed.length; i < n; ++i) {
+            delete locations0[changed[i].identifier];
+          }
+          for (var identifier in locations0) {
+            return void relocate();
+          }
+        }
+        d3.selectAll(targets).on(zoomName, null);
+        subject.on(mousedown, mousedowned).on(touchstart, touchstarted);
+        dragRestore();
+        zoomended(dispatch);
+      }
+    }
+    function mousewheeled() {
+      var dispatch = event.of(this, arguments);
+      if (mousewheelTimer) clearTimeout(mousewheelTimer); else d3_selection_interrupt.call(this), 
+      translate0 = location(center0 = center || d3.mouse(this)), zoomstarted(dispatch);
+      mousewheelTimer = setTimeout(function() {
+        mousewheelTimer = null;
+        zoomended(dispatch);
+      }, 50);
+      d3_eventPreventDefault();
+      scaleTo(Math.pow(2, d3_behavior_zoomDelta() * .002) * view.k);
+      translateTo(center0, translate0);
+      zoomed(dispatch);
+    }
+    function dblclicked() {
+      var p = d3.mouse(this), k = Math.log(view.k) / Math.LN2;
+      zoomTo(this, p, location(p), d3.event.shiftKey ? Math.ceil(k) - 1 : Math.floor(k) + 1);
+    }
+    return d3.rebind(zoom, event, "on");
+  };
+  var d3_behavior_zoomInfinity = [ 0, Infinity ], d3_behavior_zoomDelta, d3_behavior_zoomWheel;
+  d3.color = d3_color;
+  function d3_color() {}
+  d3_color.prototype.toString = function() {
+    return this.rgb() + "";
+  };
+  d3.hsl = d3_hsl;
+  function d3_hsl(h, s, l) {
+    return this instanceof d3_hsl ? void (this.h = +h, this.s = +s, this.l = +l) : arguments.length < 2 ? h instanceof d3_hsl ? new d3_hsl(h.h, h.s, h.l) : d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl) : new d3_hsl(h, s, l);
+  }
+  var d3_hslPrototype = d3_hsl.prototype = new d3_color();
+  d3_hslPrototype.brighter = function(k) {
+    k = Math.pow(.7, arguments.length ? k : 1);
+    return new d3_hsl(this.h, this.s, this.l / k);
+  };
+  d3_hslPrototype.darker = function(k) {
+    k = Math.pow(.7, arguments.length ? k : 1);
+    return new d3_hsl(this.h, this.s, k * this.l);
+  };
+  d3_hslPrototype.rgb = function() {
+    return d3_hsl_rgb(this.h, this.s, this.l);
+  };
+  function d3_hsl_rgb(h, s, l) {
+    var m1, m2;
+    h = isNaN(h) ? 0 : (h %= 360) < 0 ? h + 360 : h;
+    s = isNaN(s) ? 0 : s < 0 ? 0 : s > 1 ? 1 : s;
+    l = l < 0 ? 0 : l > 1 ? 1 : l;
+    m2 = l <= .5 ? l * (1 + s) : l + s - l * s;
+    m1 = 2 * l - m2;
+    function v(h) {
+      if (h > 360) h -= 360; else if (h < 0) h += 360;
+      if (h < 60) return m1 + (m2 - m1) * h / 60;
+      if (h < 180) return m2;
+      if (h < 240) return m1 + (m2 - m1) * (240 - h) / 60;
+      return m1;
+    }
+    function vv(h) {
+      return Math.round(v(h) * 255);
+    }
+    return new d3_rgb(vv(h + 120), vv(h), vv(h - 120));
+  }
+  d3.hcl = d3_hcl;
+  function d3_hcl(h, c, l) {
+    return this instanceof d3_hcl ? void (this.h = +h, this.c = +c, this.l = +l) : arguments.length < 2 ? h instanceof d3_hcl ? new d3_hcl(h.h, h.c, h.l) : h instanceof d3_lab ? d3_lab_hcl(h.l, h.a, h.b) : d3_lab_hcl((h = d3_rgb_lab((h = d3.rgb(h)).r, h.g, h.b)).l, h.a, h.b) : new d3_hcl(h, c, l);
+  }
+  var d3_hclPrototype = d3_hcl.prototype = new d3_color();
+  d3_hclPrototype.brighter = function(k) {
+    return new d3_hcl(this.h, this.c, Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1)));
+  };
+  d3_hclPrototype.darker = function(k) {
+    return new d3_hcl(this.h, this.c, Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1)));
+  };
+  d3_hclPrototype.rgb = function() {
+    return d3_hcl_lab(this.h, this.c, this.l).rgb();
+  };
+  function d3_hcl_lab(h, c, l) {
+    if (isNaN(h)) h = 0;
+    if (isNaN(c)) c = 0;
+    return new d3_lab(l, Math.cos(h *= d3_radians) * c, Math.sin(h) * c);
+  }
+  d3.lab = d3_lab;
+  function d3_lab(l, a, b) {
+    return this instanceof d3_lab ? void (this.l = +l, this.a = +a, this.b = +b) : arguments.length < 2 ? l instanceof d3_lab ? new d3_lab(l.l, l.a, l.b) : l instanceof d3_hcl ? d3_hcl_lab(l.h, l.c, l.l) : d3_rgb_lab((l = d3_rgb(l)).r, l.g, l.b) : new d3_lab(l, a, b);
+  }
+  var d3_lab_K = 18;
+  var d3_lab_X = .95047, d3_lab_Y = 1, d3_lab_Z = 1.08883;
+  var d3_labPrototype = d3_lab.prototype = new d3_color();
+  d3_labPrototype.brighter = function(k) {
+    return new d3_lab(Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1)), this.a, this.b);
+  };
+  d3_labPrototype.darker = function(k) {
+    return new d3_lab(Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1)), this.a, this.b);
+  };
+  d3_labPrototype.rgb = function() {
+    return d3_lab_rgb(this.l, this.a, this.b);
+  };
+  function d3_lab_rgb(l, a, b) {
+    var y = (l + 16) / 116, x = y + a / 500, z = y - b / 200;
+    x = d3_lab_xyz(x) * d3_lab_X;
+    y = d3_lab_xyz(y) * d3_lab_Y;
+    z = d3_lab_xyz(z) * d3_lab_Z;
+    return new d3_rgb(d3_xyz_rgb(3.2404542 * x - 1.5371385 * y - .4985314 * z), d3_xyz_rgb(-.969266 * x + 1.8760108 * y + .041556 * z), d3_xyz_rgb(.0556434 * x - .2040259 * y + 1.0572252 * z));
+  }
+  function d3_lab_hcl(l, a, b) {
+    return l > 0 ? new d3_hcl(Math.atan2(b, a) * d3_degrees, Math.sqrt(a * a + b * b), l) : new d3_hcl(NaN, NaN, l);
+  }
+  function d3_lab_xyz(x) {
+    return x > .206893034 ? x * x * x : (x - 4 / 29) / 7.787037;
+  }
+  function d3_xyz_lab(x) {
+    return x > .008856 ? Math.pow(x, 1 / 3) : 7.787037 * x + 4 / 29;
+  }
+  function d3_xyz_rgb(r) {
+    return Math.round(255 * (r <= .00304 ? 12.92 * r : 1.055 * Math.pow(r, 1 / 2.4) - .055));
+  }
+  d3.rgb = d3_rgb;
+  function d3_rgb(r, g, b) {
+    return this instanceof d3_rgb ? void (this.r = ~~r, this.g = ~~g, this.b = ~~b) : arguments.length < 2 ? r instanceof d3_rgb ? new d3_rgb(r.r, r.g, r.b) : d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb) : new d3_rgb(r, g, b);
+  }
+  function d3_rgbNumber(value) {
+    return new d3_rgb(value >> 16, value >> 8 & 255, value & 255);
+  }
+  function d3_rgbString(value) {
+    return d3_rgbNumber(value) + "";
+  }
+  var d3_rgbPrototype = d3_rgb.prototype = new d3_color();
+  d3_rgbPrototype.brighter = function(k) {
+    k = Math.pow(.7, arguments.length ? k : 1);
+    var r = this.r, g = this.g, b = this.b, i = 30;
+    if (!r && !g && !b) return new d3_rgb(i, i, i);
+    if (r && r < i) r = i;
+    if (g && g < i) g = i;
+    if (b && b < i) b = i;
+    return new d3_rgb(Math.min(255, r / k), Math.min(255, g / k), Math.min(255, b / k));
+  };
+  d3_rgbPrototype.darker = function(k) {
+    k = Math.pow(.7, arguments.length ? k : 1);
+    return new d3_rgb(k * this.r, k * this.g, k * this.b);
+  };
+  d3_rgbPrototype.hsl = function() {
+    return d3_rgb_hsl(this.r, this.g, this.b);
+  };
+  d3_rgbPrototype.toString = function() {
+    return "#" + d3_rgb_hex(this.r) + d3_rgb_hex(this.g) + d3_rgb_hex(this.b);
+  };
+  function d3_rgb_hex(v) {
+    return v < 16 ? "0" + Math.max(0, v).toString(16) : Math.min(255, v).toString(16);
+  }
+  function d3_rgb_parse(format, rgb, hsl) {
+    var r = 0, g = 0, b = 0, m1, m2, color;
+    m1 = /([a-z]+)\((.*)\)/.exec(format = format.toLowerCase());
+    if (m1) {
+      m2 = m1[2].split(",");
+      switch (m1[1]) {
+       case "hsl":
+        {
+          return hsl(parseFloat(m2[0]), parseFloat(m2[1]) / 100, parseFloat(m2[2]) / 100);
+        }
+
+       case "rgb":
+        {
+          return rgb(d3_rgb_parseNumber(m2[0]), d3_rgb_parseNumber(m2[1]), d3_rgb_parseNumber(m2[2]));
+        }
+      }
+    }
+    if (color = d3_rgb_names.get(format)) {
+      return rgb(color.r, color.g, color.b);
+    }
+    if (format != null && format.charAt(0) === "#" && !isNaN(color = parseInt(format.slice(1), 16))) {
+      if (format.length === 4) {
+        r = (color & 3840) >> 4;
+        r = r >> 4 | r;
+        g = color & 240;
+        g = g >> 4 | g;
+        b = color & 15;
+        b = b << 4 | b;
+      } else if (format.length === 7) {
+        r = (color & 16711680) >> 16;
+        g = (color & 65280) >> 8;
+        b = color & 255;
+      }
+    }
+    return rgb(r, g, b);
+  }
+  function d3_rgb_hsl(r, g, b) {
+    var min = Math.min(r /= 255, g /= 255, b /= 255), max = Math.max(r, g, b), d = max - min, h, s, l = (max + min) / 2;
+    if (d) {
+      s = l < .5 ? d / (max + min) : d / (2 - max - min);
+      if (r == max) h = (g - b) / d + (g < b ? 6 : 0); else if (g == max) h = (b - r) / d + 2; else h = (r - g) / d + 4;
+      h *= 60;
+    } else {
+      h = NaN;
+      s = l > 0 && l < 1 ? 0 : h;
+    }
+    return new d3_hsl(h, s, l);
+  }
+  function d3_rgb_lab(r, g, b) {
+    r = d3_rgb_xyz(r);
+    g = d3_rgb_xyz(g);
+    b = d3_rgb_xyz(b);
+    var x = d3_xyz_lab((.4124564 * r + .3575761 * g + .1804375 * b) / d3_lab_X), y = d3_xyz_lab((.2126729 * r + .7151522 * g + .072175 * b) / d3_lab_Y), z = d3_xyz_lab((.0193339 * r + .119192 * g + .9503041 * b) / d3_lab_Z);
+    return d3_lab(116 * y - 16, 500 * (x - y), 200 * (y - z));
+  }
+  function d3_rgb_xyz(r) {
+    return (r /= 255) <= .04045 ? r / 12.92 : Math.pow((r + .055) / 1.055, 2.4);
+  }
+  function d3_rgb_parseNumber(c) {
+    var f = parseFloat(c);
+    return c.charAt(c.length - 1) === "%" ? Math.round(f * 2.55) : f;
+  }
+  var d3_rgb_names = d3.map({
+    aliceblue: 15792383,
+    antiquewhite: 16444375,
+    aqua: 65535,
+    aquamarine: 8388564,
+    azure: 15794175,
+    beige: 16119260,
+    bisque: 16770244,
+    black: 0,
+    blanchedalmond: 16772045,
+    blue: 255,
+    blueviolet: 9055202,
+    brown: 10824234,
+    burlywood: 14596231,
+    cadetblue: 6266528,
+    chartreuse: 8388352,
+    chocolate: 13789470,
+    coral: 16744272,
+    cornflowerblue: 6591981,
+    cornsilk: 16775388,
+    crimson: 14423100,
+    cyan: 65535,
+    darkblue: 139,
+    darkcyan: 35723,
+    darkgoldenrod: 12092939,
+    darkgray: 11119017,
+    darkgreen: 25600,
+    darkgrey: 11119017,
+    darkkhaki: 12433259,
+    darkmagenta: 9109643,
+    darkolivegreen: 5597999,
+    darkorange: 16747520,
+    darkorchid: 10040012,
+    darkred: 9109504,
+    darksalmon: 15308410,
+    darkseagreen: 9419919,
+    darkslateblue: 4734347,
+    darkslategray: 3100495,
+    darkslategrey: 3100495,
+    darkturquoise: 52945,
+    darkviolet: 9699539,
+    deeppink: 16716947,
+    deepskyblue: 49151,
+    dimgray: 6908265,
+    dimgrey: 6908265,
+    dodgerblue: 2003199,
+    firebrick: 11674146,
+    floralwhite: 16775920,
+    forestgreen: 2263842,
+    fuchsia: 16711935,
+    gainsboro: 14474460,
+    ghostwhite: 16316671,
+    gold: 16766720,
+    goldenrod: 14329120,
+    gray: 8421504,
+    green: 32768,
+    greenyellow: 11403055,
+    grey: 8421504,
+    honeydew: 15794160,
+    hotpink: 16738740,
+    indianred: 13458524,
+    indigo: 4915330,
+    ivory: 16777200,
+    khaki: 15787660,
+    lavender: 15132410,
+    lavenderblush: 16773365,
+    lawngreen: 8190976,
+    lemonchiffon: 16775885,
+    lightblue: 11393254,
+    lightcoral: 15761536,
+    lightcyan: 14745599,
+    lightgoldenrodyellow: 16448210,
+    lightgray: 13882323,
+    lightgreen: 9498256,
+    lightgrey: 13882323,
+    lightpink: 16758465,
+    lightsalmon: 16752762,
+    lightseagreen: 2142890,
+    lightskyblue: 8900346,
+    lightslategray: 7833753,
+    lightslategrey: 7833753,
+    lightsteelblue: 11584734,
+    lightyellow: 16777184,
+    lime: 65280,
+    limegreen: 3329330,
+    linen: 16445670,
+    magenta: 16711935,
+    maroon: 8388608,
+    mediumaquamarine: 6737322,
+    mediumblue: 205,
+    mediumorchid: 12211667,
+    mediumpurple: 9662683,
+    mediumseagreen: 3978097,
+    mediumslateblue: 8087790,
+    mediumspringgreen: 64154,
+    mediumturquoise: 4772300,
+    mediumvioletred: 13047173,
+    midnightblue: 1644912,
+    mintcream: 16121850,
+    mistyrose: 16770273,
+    moccasin: 16770229,
+    navajowhite: 16768685,
+    navy: 128,
+    oldlace: 16643558,
+    olive: 8421376,
+    olivedrab: 7048739,
+    orange: 16753920,
+    orangered: 16729344,
+    orchid: 14315734,
+    palegoldenrod: 15657130,
+    palegreen: 10025880,
+    paleturquoise: 11529966,
+    palevioletred: 14381203,
+    papayawhip: 16773077,
+    peachpuff: 16767673,
+    peru: 13468991,
+    pink: 16761035,
+    plum: 14524637,
+    powderblue: 11591910,
+    purple: 8388736,
+    rebeccapurple: 6697881,
+    red: 16711680,
+    rosybrown: 12357519,
+    royalblue: 4286945,
+    saddlebrown: 9127187,
+    salmon: 16416882,
+    sandybrown: 16032864,
+    seagreen: 3050327,
+    seashell: 16774638,
+    sienna: 10506797,
+    silver: 12632256,
+    skyblue: 8900331,
+    slateblue: 6970061,
+    slategray: 7372944,
+    slategrey: 7372944,
+    snow: 16775930,
+    springgreen: 65407,
+    steelblue: 4620980,
+    tan: 13808780,
+    teal: 32896,
+    thistle: 14204888,
+    tomato: 16737095,
+    turquoise: 4251856,
+    violet: 15631086,
+    wheat: 16113331,
+    white: 16777215,
+    whitesmoke: 16119285,
+    yellow: 16776960,
+    yellowgreen: 10145074
+  });
+  d3_rgb_names.forEach(function(key, value) {
+    d3_rgb_names.set(key, d3_rgbNumber(value));
+  });
+  function d3_functor(v) {
+    return typeof v === "function" ? v : function() {
+      return v;
+    };
+  }
+  d3.functor = d3_functor;
+  d3.xhr = d3_xhrType(d3_identity);
+  function d3_xhrType(response) {
+    return function(url, mimeType, callback) {
+      if (arguments.length === 2 && typeof mimeType === "function") callback = mimeType, 
+      mimeType = null;
+      return d3_xhr(url, mimeType, response, callback);
+    };
+  }
+  function d3_xhr(url, mimeType, response, callback) {
+    var xhr = {}, dispatch = d3.dispatch("beforesend", "progress", "load", "error"), headers = {}, request = new XMLHttpRequest(), responseType = null;
+    if (this.XDomainRequest && !("withCredentials" in request) && /^(http(s)?:)?\/\//.test(url)) request = new XDomainRequest();
+    "onload" in request ? request.onload = request.onerror = respond : request.onreadystatechange = function() {
+      request.readyState > 3 && respond();
+    };
+    function respond() {
+      var status = request.status, result;
+      if (!status && d3_xhrHasResponse(request) || status >= 200 && status < 300 || status === 304) {
+        try {
+          result = response.call(xhr, request);
+        } catch (e) {
+          dispatch.error.call(xhr, e);
+          return;
+        }
+        dispatch.load.call(xhr, result);
+      } else {
+        dispatch.error.call(xhr, request);
+      }
+    }
+    request.onprogress = function(event) {
+      var o = d3.event;
+      d3.event = event;
+      try {
+        dispatch.progress.call(xhr, request);
+      } finally {
+        d3.event = o;
+      }
+    };
+    xhr.header = function(name, value) {
+      name = (name + "").toLowerCase();
+      if (arguments.length < 2) return headers[name];
+      if (value == null) delete headers[name]; else headers[name] = value + "";
+      return xhr;
+    };
+    xhr.mimeType = function(value) {
+      if (!arguments.length) return mimeType;
+      mimeType = value == null ? null : value + "";
+      return xhr;
+    };
+    xhr.responseType = function(value) {
+      if (!arguments.length) return responseType;
+      responseType = value;
+      return xhr;
+    };
+    xhr.response = function(value) {
+      response = value;
+      return xhr;
+    };
+    [ "get", "post" ].forEach(function(method) {
+      xhr[method] = function() {
+        return xhr.send.apply(xhr, [ method ].concat(d3_array(arguments)));
+      };
+    });
+    xhr.send = function(method, data, callback) {
+      if (arguments.length === 2 && typeof data === "function") callback = data, data = null;
+      request.open(method, url, true);
+      if (mimeType != null && !("accept" in headers)) headers["accept"] = mimeType + ",*/*";
+      if (request.setRequestHeader) for (var name in headers) request.setRequestHeader(name, headers[name]);
+      if (mimeType != null && request.overrideMimeType) request.overrideMimeType(mimeType);
+      if (responseType != null) request.responseType = responseType;
+      if (callback != null) xhr.on("error", callback).on("load", function(request) {
+        callback(null, request);
+      });
+      dispatch.beforesend.call(xhr, request);
+      request.send(data == null ? null : data);
+      return xhr;
+    };
+    xhr.abort = function() {
+      request.abort();
+      return xhr;
+    };
+    d3.rebind(xhr, dispatch, "on");
+    return callback == null ? xhr : xhr.get(d3_xhr_fixCallback(callback));
+  }
+  function d3_xhr_fixCallback(callback) {
+    return callback.length === 1 ? function(error, request) {
+      callback(error == null ? request : null);
+    } : callback;
+  }
+  function d3_xhrHasResponse(request) {
+    var type = request.responseType;
+    return type && type !== "text" ? request.response : request.responseText;
+  }
+  d3.dsv = function(delimiter, mimeType) {
+    var reFormat = new RegExp('["' + delimiter + "\n]"), delimiterCode = delimiter.charCodeAt(0);
+    function dsv(url, row, callback) {
+      if (arguments.length < 3) callback = row, row = null;
+      var xhr = d3_xhr(url, mimeType, row == null ? response : typedResponse(row), callback);
+      xhr.row = function(_) {
+        return arguments.length ? xhr.response((row = _) == null ? response : typedResponse(_)) : row;
+      };
+      return xhr;
+    }
+    function response(request) {
+      return dsv.parse(request.responseText);
+    }
+    function typedResponse(f) {
+      return function(request) {
+        return dsv.parse(request.responseText, f);
+      };
+    }
+    dsv.parse = function(text, f) {
+      var o;
+      return dsv.parseRows(text, function(row, i) {
+        if (o) return o(row, i - 1);
+        var a = new Function("d", "return {" + row.map(function(name, i) {
+          return JSON.stringify(name) + ": d[" + i + "]";
+        }).join(",") + "}");
+        o = f ? function(row, i) {
+          return f(a(row), i);
+        } : a;
+      });
+    };
+    dsv.parseRows = function(text, f) {
+      var EOL = {}, EOF = {}, rows = [], N = text.length, I = 0, n = 0, t, eol;
+      function token() {
+        if (I >= N) return EOF;
+        if (eol) return eol = false, EOL;
+        var j = I;
+        if (text.charCodeAt(j) === 34) {
+          var i = j;
+          while (i++ < N) {
+            if (text.charCodeAt(i) === 34) {
+              if (text.charCodeAt(i + 1) !== 34) break;
+              ++i;
+            }
+          }
+          I = i + 2;
+          var c = text.charCodeAt(i + 1);
+          if (c === 13) {
+            eol = true;
+            if (text.charCodeAt(i + 2) === 10) ++I;
+          } else if (c === 10) {
+            eol = true;
+          }
+          return text.slice(j + 1, i).replace(/""/g, '"');
+        }
+        while (I < N) {
+          var c = text.charCodeAt(I++), k = 1;
+          if (c === 10) eol = true; else if (c === 13) {
+            eol = true;
+            if (text.charCodeAt(I) === 10) ++I, ++k;
+          } else if (c !== delimiterCode) continue;
+          return text.slice(j, I - k);
+        }
+        return text.slice(j);
+      }
+      while ((t = token()) !== EOF) {
+        var a = [];
+        while (t !== EOL && t !== EOF) {
+          a.push(t);
+          t = token();
+        }
+        if (f && (a = f(a, n++)) == null) continue;
+        rows.push(a);
+      }
+      return rows;
+    };
+    dsv.format = function(rows) {
+      if (Array.isArray(rows[0])) return dsv.formatRows(rows);
+      var fieldSet = new d3_Set(), fields = [];
+      rows.forEach(function(row) {
+        for (var field in row) {
+          if (!fieldSet.has(field)) {
+            fields.push(fieldSet.add(field));
+          }
+        }
+      });
+      return [ fields.map(formatValue).join(delimiter) ].concat(rows.map(function(row) {
+        return fields.map(function(field) {
+          return formatValue(row[field]);
+        }).join(delimiter);
+      })).join("\n");
+    };
+    dsv.formatRows = function(rows) {
+      return rows.map(formatRow).join("\n");
+    };
+    function formatRow(row) {
+      return row.map(formatValue).join(delimiter);
+    }
+    function formatValue(text) {
+      return reFormat.test(text) ? '"' + text.replace(/\"/g, '""') + '"' : text;
+    }
+    return dsv;
+  };
+  d3.csv = d3.dsv(",", "text/csv");
+  d3.tsv = d3.dsv("	", "text/tab-separated-values");
+  var d3_timer_queueHead, d3_timer_queueTail, d3_timer_interval, d3_timer_timeout, d3_timer_frame = this[d3_vendorSymbol(this, "requestAnimationFrame")] || function(callback) {
+    setTimeout(callback, 17);
+  };
+  d3.timer = function() {
+    d3_timer.apply(this, arguments);
+  };
+  function d3_timer(callback, delay, then) {
+    var n = arguments.length;
+    if (n < 2) delay = 0;
+    if (n < 3) then = Date.now();
+    var time = then + delay, timer = {
+      c: callback,
+      t: time,
+      n: null
+    };
+    if (d3_timer_queueTail) d3_timer_queueTail.n = timer; else d3_timer_queueHead = timer;
+    d3_timer_queueTail = timer;
+    if (!d3_timer_interval) {
+      d3_timer_timeout = clearTimeout(d3_timer_timeout);
+      d3_timer_interval = 1;
+      d3_timer_frame(d3_timer_step);
+    }
+    return timer;
+  }
+  function d3_timer_step() {
+    var now = d3_timer_mark(), delay = d3_timer_sweep() - now;
+    if (delay > 24) {
+      if (isFinite(delay)) {
+        clearTimeout(d3_timer_timeout);
+        d3_timer_timeout = setTimeout(d3_timer_step, delay);
+      }
+      d3_timer_interval = 0;
+    } else {
+      d3_timer_interval = 1;
+      d3_timer_frame(d3_timer_step);
+    }
+  }
+  d3.timer.flush = function() {
+    d3_timer_mark();
+    d3_timer_sweep();
+  };
+  function d3_timer_mark() {
+    var now = Date.now(), timer = d3_timer_queueHead;
+    while (timer) {
+      if (now >= timer.t && timer.c(now - timer.t)) timer.c = null;
+      timer = timer.n;
+    }
+    return now;
+  }
+  function d3_timer_sweep() {
+    var t0, t1 = d3_timer_queueHead, time = Infinity;
+    while (t1) {
+      if (t1.c) {
+        if (t1.t < time) time = t1.t;
+        t1 = (t0 = t1).n;
+      } else {
+        t1 = t0 ? t0.n = t1.n : d3_timer_queueHead = t1.n;
+      }
+    }
+    d3_timer_queueTail = t0;
+    return time;
+  }
+  function d3_format_precision(x, p) {
+    return p - (x ? Math.ceil(Math.log(x) / Math.LN10) : 1);
+  }
+  d3.round = function(x, n) {
+    return n ? Math.round(x * (n = Math.pow(10, n))) / n : Math.round(x);
+  };
+  var d3_formatPrefixes = [ "y", "z", "a", "f", "p", "n", "µ", "m", "", "k", "M", "G", "T", "P", "E", "Z", "Y" ].map(d3_formatPrefix);
+  d3.formatPrefix = function(value, precision) {
+    var i = 0;
+    if (value = +value) {
+      if (value < 0) value *= -1;
+      if (precision) value = d3.round(value, d3_format_precision(value, precision));
+      i = 1 + Math.floor(1e-12 + Math.log(value) / Math.LN10);
+      i = Math.max(-24, Math.min(24, Math.floor((i - 1) / 3) * 3));
+    }
+    return d3_formatPrefixes[8 + i / 3];
+  };
+  function d3_formatPrefix(d, i) {
+    var k = Math.pow(10, abs(8 - i) * 3);
+    return {
+      scale: i > 8 ? function(d) {
+        return d / k;
+      } : function(d) {
+        return d * k;
+      },
+      symbol: d
+    };
+  }
+  function d3_locale_numberFormat(locale) {
+    var locale_decimal = locale.decimal, locale_thousands = locale.thousands, locale_grouping = locale.grouping, locale_currency = locale.currency, formatGroup = locale_grouping && locale_thousands ? function(value, width) {
+      var i = value.length, t = [], j = 0, g = locale_grouping[0], length = 0;
+      while (i > 0 && g > 0) {
+        if (length + g + 1 > width) g = Math.max(1, width - length);
+        t.push(value.substring(i -= g, i + g));
+        if ((length += g + 1) > width) break;
+        g = locale_grouping[j = (j + 1) % locale_grouping.length];
+      }
+      return t.reverse().join(locale_thousands);
+    } : d3_identity;
+    return function(specifier) {
+      var match = d3_format_re.exec(specifier), fill = match[1] || " ", align = match[2] || ">", sign = match[3] || "-", symbol = match[4] || "", zfill = match[5], width = +match[6], comma = match[7], precision = match[8], type = match[9], scale = 1, prefix = "", suffix = "", integer = false, exponent = true;
+      if (precision) precision = +precision.substring(1);
+      if (zfill || fill === "0" && align === "=") {
+        zfill = fill = "0";
+        align = "=";
+      }
+      switch (type) {
+       case "n":
+        comma = true;
+        type = "g";
+        break;
+
+       case "%":
+        scale = 100;
+        suffix = "%";
+        type = "f";
+        break;
+
+       case "p":
+        scale = 100;
+        suffix = "%";
+        type = "r";
+        break;
+
+       case "b":
+       case "o":
+       case "x":
+       case "X":
+        if (symbol === "#") prefix = "0" + type.toLowerCase();
+
+       case "c":
+        exponent = false;
+
+       case "d":
+        integer = true;
+        precision = 0;
+        break;
+
+       case "s":
+        scale = -1;
+        type = "r";
+        break;
+      }
+      if (symbol === "$") prefix = locale_currency[0], suffix = locale_currency[1];
+      if (type == "r" && !precision) type = "g";
+      if (precision != null) {
+        if (type == "g") precision = Math.max(1, Math.min(21, precision)); else if (type == "e" || type == "f") precision = Math.max(0, Math.min(20, precision));
+      }
+      type = d3_format_types.get(type) || d3_format_typeDefault;
+      var zcomma = zfill && comma;
+      return function(value) {
+        var fullSuffix = suffix;
+        if (integer && value % 1) return "";
+        var negative = value < 0 || value === 0 && 1 / value < 0 ? (value = -value, "-") : sign === "-" ? "" : sign;
+        if (scale < 0) {
+          var unit = d3.formatPrefix(value, precision);
+          value = unit.scale(value);
+          fullSuffix = unit.symbol + suffix;
+        } else {
+          value *= scale;
+        }
+        value = type(value, precision);
+        var i = value.lastIndexOf("."), before, after;
+        if (i < 0) {
+          var j = exponent ? value.lastIndexOf("e") : -1;
+          if (j < 0) before = value, after = ""; else before = value.substring(0, j), after = value.substring(j);
+        } else {
+          before = value.substring(0, i);
+          after = locale_decimal + value.substring(i + 1);
+        }
+        if (!zfill && comma) before = formatGroup(before, Infinity);
+        var length = prefix.length + before.length + after.length + (zcomma ? 0 : negative.length), padding = length < width ? new Array(length = width - length + 1).join(fill) : "";
+        if (zcomma) before = formatGroup(padding + before, padding.length ? width - after.length : Infinity);
+        negative += prefix;
+        value = before + after;
+        return (align === "<" ? negative + value + padding : align === ">" ? padding + negative + value : align === "^" ? padding.substring(0, length >>= 1) + negative + value + padding.substring(length) : negative + (zcomma ? value : padding + value)) + fullSuffix;
+      };
+    };
+  }
+  var d3_format_re = /(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i;
+  var d3_format_types = d3.map({
+    b: function(x) {
+      return x.toString(2);
+    },
+    c: function(x) {
+      return String.fromCharCode(x);
+    },
+    o: function(x) {
+      return x.toString(8);
+    },
+    x: function(x) {
+      return x.toString(16);
+    },
+    X: function(x) {
+      return x.toString(16).toUpperCase();
+    },
+    g: function(x, p) {
+      return x.toPrecision(p);
+    },
+    e: function(x, p) {
+      return x.toExponential(p);
+    },
+    f: function(x, p) {
+      return x.toFixed(p);
+    },
+    r: function(x, p) {
+      return (x = d3.round(x, d3_format_precision(x, p))).toFixed(Math.max(0, Math.min(20, d3_format_precision(x * (1 + 1e-15), p))));
+    }
+  });
+  function d3_format_typeDefault(x) {
+    return x + "";
+  }
+  var d3_time = d3.time = {}, d3_date = Date;
+  function d3_date_utc() {
+    this._ = new Date(arguments.length > 1 ? Date.UTC.apply(this, arguments) : arguments[0]);
+  }
+  d3_date_utc.prototype = {
+    getDate: function() {
+      return this._.getUTCDate();
+    },
+    getDay: function() {
+      return this._.getUTCDay();
+    },
+    getFullYear: function() {
+      return this._.getUTCFullYear();
+    },
+    getHours: function() {
+      return this._.getUTCHours();
+    },
+    getMilliseconds: function() {
+      return this._.getUTCMilliseconds();
+    },
+    getMinutes: function() {
+      return this._.getUTCMinutes();
+    },
+    getMonth: function() {
+      return this._.getUTCMonth();
+    },
+    getSeconds: function() {
+      return this._.getUTCSeconds();
+    },
+    getTime: function() {
+      return this._.getTime();
+    },
+    getTimezoneOffset: function() {
+      return 0;
+    },
+    valueOf: function() {
+      return this._.valueOf();
+    },
+    setDate: function() {
+      d3_time_prototype.setUTCDate.apply(this._, arguments);
+    },
+    setDay: function() {
+      d3_time_prototype.setUTCDay.apply(this._, arguments);
+    },
+    setFullYear: function() {
+      d3_time_prototype.setUTCFullYear.apply(this._, arguments);
+    },
+    setHours: function() {
+      d3_time_prototype.setUTCHours.apply(this._, arguments);
+    },
+    setMilliseconds: function() {
+      d3_time_prototype.setUTCMilliseconds.apply(this._, arguments);
+    },
+    setMinutes: function() {
+      d3_time_prototype.setUTCMinutes.apply(this._, arguments);
+    },
+    setMonth: function() {
+      d3_time_prototype.setUTCMonth.apply(this._, arguments);
+    },
+    setSeconds: function() {
+      d3_time_prototype.setUTCSeconds.apply(this._, arguments);
+    },
+    setTime: function() {
+      d3_time_prototype.setTime.apply(this._, arguments);
+    }
+  };
+  var d3_time_prototype = Date.prototype;
+  function d3_time_interval(local, step, number) {
+    function round(date) {
+      var d0 = local(date), d1 = offset(d0, 1);
+      return date - d0 < d1 - date ? d0 : d1;
+    }
+    function ceil(date) {
+      step(date = local(new d3_date(date - 1)), 1);
+      return date;
+    }
+    function offset(date, k) {
+      step(date = new d3_date(+date), k);
+      return date;
+    }
+    function range(t0, t1, dt) {
+      var time = ceil(t0), times = [];
+      if (dt > 1) {
+        while (time < t1) {
+          if (!(number(time) % dt)) times.push(new Date(+time));
+          step(time, 1);
+        }
+      } else {
+        while (time < t1) times.push(new Date(+time)), step(time, 1);
+      }
+      return times;
+    }
+    function range_utc(t0, t1, dt) {
+      try {
+        d3_date = d3_date_utc;
+        var utc = new d3_date_utc();
+        utc._ = t0;
+        return range(utc, t1, dt);
+      } finally {
+        d3_date = Date;
+      }
+    }
+    local.floor = local;
+    local.round = round;
+    local.ceil = ceil;
+    local.offset = offset;
+    local.range = range;
+    var utc = local.utc = d3_time_interval_utc(local);
+    utc.floor = utc;
+    utc.round = d3_time_interval_utc(round);
+    utc.ceil = d3_time_interval_utc(ceil);
+    utc.offset = d3_time_interval_utc(offset);
+    utc.range = range_utc;
+    return local;
+  }
+  function d3_time_interval_utc(method) {
+    return function(date, k) {
+      try {
+        d3_date = d3_date_utc;
+        var utc = new d3_date_utc();
+        utc._ = date;
+        return method(utc, k)._;
+      } finally {
+        d3_date = Date;
+      }
+    };
+  }
+  d3_time.year = d3_time_interval(function(date) {
+    date = d3_time.day(date);
+    date.setMonth(0, 1);
+    return date;
+  }, function(date, offset) {
+    date.setFullYear(date.getFullYear() + offset);
+  }, function(date) {
+    return date.getFullYear();
+  });
+  d3_time.years = d3_time.year.range;
+  d3_time.years.utc = d3_time.year.utc.range;
+  d3_time.day = d3_time_interval(function(date) {
+    var day = new d3_date(2e3, 0);
+    day.setFullYear(date.getFullYear(), date.getMonth(), date.getDate());
+    return day;
+  }, function(date, offset) {
+    date.setDate(date.getDate() + offset);
+  }, function(date) {
+    return date.getDate() - 1;
+  });
+  d3_time.days = d3_time.day.range;
+  d3_time.days.utc = d3_time.day.utc.range;
+  d3_time.dayOfYear = function(date) {
+    var year = d3_time.year(date);
+    return Math.floor((date - year - (date.getTimezoneOffset() - year.getTimezoneOffset()) * 6e4) / 864e5);
+  };
+  [ "sunday", "monday", "tuesday", "wednesday", "thursday", "friday", "saturday" ].forEach(function(day, i) {
+    i = 7 - i;
+    var interval = d3_time[day] = d3_time_interval(function(date) {
+      (date = d3_time.day(date)).setDate(date.getDate() - (date.getDay() + i) % 7);
+      return date;
+    }, function(date, offset) {
+      date.setDate(date.getDate() + Math.floor(offset) * 7);
+    }, function(date) {
+      var day = d3_time.year(date).getDay();
+      return Math.floor((d3_time.dayOfYear(date) + (day + i) % 7) / 7) - (day !== i);
+    });
+    d3_time[day + "s"] = interval.range;
+    d3_time[day + "s"].utc = interval.utc.range;
+    d3_time[day + "OfYear"] = function(date) {
+      var day = d3_time.year(date).getDay();
+      return Math.floor((d3_time.dayOfYear(date) + (day + i) % 7) / 7);
+    };
+  });
+  d3_time.week = d3_time.sunday;
+  d3_time.weeks = d3_time.sunday.range;
+  d3_time.weeks.utc = d3_time.sunday.utc.range;
+  d3_time.weekOfYear = d3_time.sundayOfYear;
+  function d3_locale_timeFormat(locale) {
+    var locale_dateTime = locale.dateTime, locale_date = locale.date, locale_time = locale.time, locale_periods = locale.periods, locale_days = locale.days, locale_shortDays = locale.shortDays, locale_months = locale.months, locale_shortMonths = locale.shortMonths;
+    function d3_time_format(template) {
+      var n = template.length;
+      function format(date) {
+        var string = [], i = -1, j = 0, c, p, f;
+        while (++i < n) {
+          if (template.charCodeAt(i) === 37) {
+            string.push(template.slice(j, i));
+            if ((p = d3_time_formatPads[c = template.charAt(++i)]) != null) c = template.charAt(++i);
+            if (f = d3_time_formats[c]) c = f(date, p == null ? c === "e" ? " " : "0" : p);
+            string.push(c);
+            j = i + 1;
+          }
+        }
+        string.push(template.slice(j, i));
+        return string.join("");
+      }
+      format.parse = function(string) {
+        var d = {
+          y: 1900,
+          m: 0,
+          d: 1,
+          H: 0,
+          M: 0,
+          S: 0,
+          L: 0,
+          Z: null
+        }, i = d3_time_parse(d, template, string, 0);
+        if (i != string.length) return null;
+        if ("p" in d) d.H = d.H % 12 + d.p * 12;
+        var localZ = d.Z != null && d3_date !== d3_date_utc, date = new (localZ ? d3_date_utc : d3_date)();
+        if ("j" in d) date.setFullYear(d.y, 0, d.j); else if ("W" in d || "U" in d) {
+          if (!("w" in d)) d.w = "W" in d ? 1 : 0;
+          date.setFullYear(d.y, 0, 1);
+          date.setFullYear(d.y, 0, "W" in d ? (d.w + 6) % 7 + d.W * 7 - (date.getDay() + 5) % 7 : d.w + d.U * 7 - (date.getDay() + 6) % 7);
+        } else date.setFullYear(d.y, d.m, d.d);
+        date.setHours(d.H + (d.Z / 100 | 0), d.M + d.Z % 100, d.S, d.L);
+        return localZ ? date._ : date;
+      };
+      format.toString = function() {
+        return template;
+      };
+      return format;
+    }
+    function d3_time_parse(date, template, string, j) {
+      var c, p, t, i = 0, n = template.length, m = string.length;
+      while (i < n) {
+        if (j >= m) return -1;
+        c = template.charCodeAt(i++);
+        if (c === 37) {
+          t = template.charAt(i++);
+          p = d3_time_parsers[t in d3_time_formatPads ? template.charAt(i++) : t];
+          if (!p || (j = p(date, string, j)) < 0) return -1;
+        } else if (c != string.charCodeAt(j++)) {
+          return -1;
+        }
+      }
+      return j;
+    }
+    d3_time_format.utc = function(template) {
+      var local = d3_time_format(template);
+      function format(date) {
+        try {
+          d3_date = d3_date_utc;
+          var utc = new d3_date();
+          utc._ = date;
+          return local(utc);
+        } finally {
+          d3_date = Date;
+        }
+      }
+      format.parse = function(string) {
+        try {
+          d3_date = d3_date_utc;
+          var date = local.parse(string);
+          return date && date._;
+        } finally {
+          d3_date = Date;
+        }
+      };
+      format.toString = local.toString;
+      return format;
+    };
+    d3_time_format.multi = d3_time_format.utc.multi = d3_time_formatMulti;
+    var d3_time_periodLookup = d3.map(), d3_time_dayRe = d3_time_formatRe(locale_days), d3_time_dayLookup = d3_time_formatLookup(locale_days), d3_time_dayAbbrevRe = d3_time_formatRe(locale_shortDays), d3_time_dayAbbrevLookup = d3_time_formatLookup(locale_shortDays), d3_time_monthRe = d3_time_formatRe(locale_months), d3_time_monthLookup = d3_time_formatLookup(locale_months), d3_time_monthAbbrevRe = d3_time_formatRe(locale_shortMonths), d3_time_monthAbbrevLookup = d3_time_formatLookup(loca [...]
+    locale_periods.forEach(function(p, i) {
+      d3_time_periodLookup.set(p.toLowerCase(), i);
+    });
+    var d3_time_formats = {
+      a: function(d) {
+        return locale_shortDays[d.getDay()];
+      },
+      A: function(d) {
+        return locale_days[d.getDay()];
+      },
+      b: function(d) {
+        return locale_shortMonths[d.getMonth()];
+      },
+      B: function(d) {
+        return locale_months[d.getMonth()];
+      },
+      c: d3_time_format(locale_dateTime),
+      d: function(d, p) {
+        return d3_time_formatPad(d.getDate(), p, 2);
+      },
+      e: function(d, p) {
+        return d3_time_formatPad(d.getDate(), p, 2);
+      },
+      H: function(d, p) {
+        return d3_time_formatPad(d.getHours(), p, 2);
+      },
+      I: function(d, p) {
+        return d3_time_formatPad(d.getHours() % 12 || 12, p, 2);
+      },
+      j: function(d, p) {
+        return d3_time_formatPad(1 + d3_time.dayOfYear(d), p, 3);
+      },
+      L: function(d, p) {
+        return d3_time_formatPad(d.getMilliseconds(), p, 3);
+      },
+      m: function(d, p) {
+        return d3_time_formatPad(d.getMonth() + 1, p, 2);
+      },
+      M: function(d, p) {
+        return d3_time_formatPad(d.getMinutes(), p, 2);
+      },
+      p: function(d) {
+        return locale_periods[+(d.getHours() >= 12)];
+      },
+      S: function(d, p) {
+        return d3_time_formatPad(d.getSeconds(), p, 2);
+      },
+      U: function(d, p) {
+        return d3_time_formatPad(d3_time.sundayOfYear(d), p, 2);
+      },
+      w: function(d) {
+        return d.getDay();
+      },
+      W: function(d, p) {
+        return d3_time_formatPad(d3_time.mondayOfYear(d), p, 2);
+      },
+      x: d3_time_format(locale_date),
+      X: d3_time_format(locale_time),
+      y: function(d, p) {
+        return d3_time_formatPad(d.getFullYear() % 100, p, 2);
+      },
+      Y: function(d, p) {
+        return d3_time_formatPad(d.getFullYear() % 1e4, p, 4);
+      },
+      Z: d3_time_zone,
+      "%": function() {
+        return "%";
+      }
+    };
+    var d3_time_parsers = {
+      a: d3_time_parseWeekdayAbbrev,
+      A: d3_time_parseWeekday,
+      b: d3_time_parseMonthAbbrev,
+      B: d3_time_parseMonth,
+      c: d3_time_parseLocaleFull,
+      d: d3_time_parseDay,
+      e: d3_time_parseDay,
+      H: d3_time_parseHour24,
+      I: d3_time_parseHour24,
+      j: d3_time_parseDayOfYear,
+      L: d3_time_parseMilliseconds,
+      m: d3_time_parseMonthNumber,
+      M: d3_time_parseMinutes,
+      p: d3_time_parseAmPm,
+      S: d3_time_parseSeconds,
+      U: d3_time_parseWeekNumberSunday,
+      w: d3_time_parseWeekdayNumber,
+      W: d3_time_parseWeekNumberMonday,
+      x: d3_time_parseLocaleDate,
+      X: d3_time_parseLocaleTime,
+      y: d3_time_parseYear,
+      Y: d3_time_parseFullYear,
+      Z: d3_time_parseZone,
+      "%": d3_time_parseLiteralPercent
+    };
+    function d3_time_parseWeekdayAbbrev(date, string, i) {
+      d3_time_dayAbbrevRe.lastIndex = 0;
+      var n = d3_time_dayAbbrevRe.exec(string.slice(i));
+      return n ? (date.w = d3_time_dayAbbrevLookup.get(n[0].toLowerCase()), i + n[0].length) : -1;
+    }
+    function d3_time_parseWeekday(date, string, i) {
+      d3_time_dayRe.lastIndex = 0;
+      var n = d3_time_dayRe.exec(string.slice(i));
+      return n ? (date.w = d3_time_dayLookup.get(n[0].toLowerCase()), i + n[0].length) : -1;
+    }
+    function d3_time_parseMonthAbbrev(date, string, i) {
+      d3_time_monthAbbrevRe.lastIndex = 0;
+      var n = d3_time_monthAbbrevRe.exec(string.slice(i));
+      return n ? (date.m = d3_time_monthAbbrevLookup.get(n[0].toLowerCase()), i + n[0].length) : -1;
+    }
+    function d3_time_parseMonth(date, string, i) {
+      d3_time_monthRe.lastIndex = 0;
+      var n = d3_time_monthRe.exec(string.slice(i));
+      return n ? (date.m = d3_time_monthLookup.get(n[0].toLowerCase()), i + n[0].length) : -1;
+    }
+    function d3_time_parseLocaleFull(date, string, i) {
+      return d3_time_parse(date, d3_time_formats.c.toString(), string, i);
+    }
+    function d3_time_parseLocaleDate(date, string, i) {
+      return d3_time_parse(date, d3_time_formats.x.toString(), string, i);
+    }
+    function d3_time_parseLocaleTime(date, string, i) {
+      return d3_time_parse(date, d3_time_formats.X.toString(), string, i);
+    }
+    function d3_time_parseAmPm(date, string, i) {
+      var n = d3_time_periodLookup.get(string.slice(i, i += 2).toLowerCase());
+      return n == null ? -1 : (date.p = n, i);
+    }
+    return d3_time_format;
+  }
+  var d3_time_formatPads = {
+    "-": "",
+    _: " ",
+    "0": "0"
+  }, d3_time_numberRe = /^\s*\d+/, d3_time_percentRe = /^%/;
+  function d3_time_formatPad(value, fill, width) {
+    var sign = value < 0 ? "-" : "", string = (sign ? -value : value) + "", length = string.length;
+    return sign + (length < width ? new Array(width - length + 1).join(fill) + string : string);
+  }
+  function d3_time_formatRe(names) {
+    return new RegExp("^(?:" + names.map(d3.requote).join("|") + ")", "i");
+  }
+  function d3_time_formatLookup(names) {
+    var map = new d3_Map(), i = -1, n = names.length;
+    while (++i < n) map.set(names[i].toLowerCase(), i);
+    return map;
+  }
+  function d3_time_parseWeekdayNumber(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 1));
+    return n ? (date.w = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseWeekNumberSunday(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i));
+    return n ? (date.U = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseWeekNumberMonday(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i));
+    return n ? (date.W = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseFullYear(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 4));
+    return n ? (date.y = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseYear(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 2));
+    return n ? (date.y = d3_time_expandYear(+n[0]), i + n[0].length) : -1;
+  }
+  function d3_time_parseZone(date, string, i) {
+    return /^[+-]\d{4}$/.test(string = string.slice(i, i + 5)) ? (date.Z = -string, 
+    i + 5) : -1;
+  }
+  function d3_time_expandYear(d) {
+    return d + (d > 68 ? 1900 : 2e3);
+  }
+  function d3_time_parseMonthNumber(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 2));
+    return n ? (date.m = n[0] - 1, i + n[0].length) : -1;
+  }
+  function d3_time_parseDay(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 2));
+    return n ? (date.d = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseDayOfYear(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 3));
+    return n ? (date.j = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseHour24(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 2));
+    return n ? (date.H = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseMinutes(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 2));
+    return n ? (date.M = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseSeconds(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 2));
+    return n ? (date.S = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_parseMilliseconds(date, string, i) {
+    d3_time_numberRe.lastIndex = 0;
+    var n = d3_time_numberRe.exec(string.slice(i, i + 3));
+    return n ? (date.L = +n[0], i + n[0].length) : -1;
+  }
+  function d3_time_zone(d) {
+    var z = d.getTimezoneOffset(), zs = z > 0 ? "-" : "+", zh = abs(z) / 60 | 0, zm = abs(z) % 60;
+    return zs + d3_time_formatPad(zh, "0", 2) + d3_time_formatPad(zm, "0", 2);
+  }
+  function d3_time_parseLiteralPercent(date, string, i) {
+    d3_time_percentRe.lastIndex = 0;
+    var n = d3_time_percentRe.exec(string.slice(i, i + 1));
+    return n ? i + n[0].length : -1;
+  }
+  function d3_time_formatMulti(formats) {
+    var n = formats.length, i = -1;
+    while (++i < n) formats[i][0] = this(formats[i][0]);
+    return function(date) {
+      var i = 0, f = formats[i];
+      while (!f[1](date)) f = formats[++i];
+      return f[0](date);
+    };
+  }
+  d3.locale = function(locale) {
+    return {
+      numberFormat: d3_locale_numberFormat(locale),
+      timeFormat: d3_locale_timeFormat(locale)
+    };
+  };
+  var d3_locale_enUS = d3.locale({
+    decimal: ".",
+    thousands: ",",
+    grouping: [ 3 ],
+    currency: [ "$", "" ],
+    dateTime: "%a %b %e %X %Y",
+    date: "%m/%d/%Y",
+    time: "%H:%M:%S",
+    periods: [ "AM", "PM" ],
+    days: [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ],
+    shortDays: [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ],
+    months: [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ],
+    shortMonths: [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ]
+  });
+  d3.format = d3_locale_enUS.numberFormat;
+  d3.geo = {};
+  function d3_adder() {}
+  d3_adder.prototype = {
+    s: 0,
+    t: 0,
+    add: function(y) {
+      d3_adderSum(y, this.t, d3_adderTemp);
+      d3_adderSum(d3_adderTemp.s, this.s, this);
+      if (this.s) this.t += d3_adderTemp.t; else this.s = d3_adderTemp.t;
+    },
+    reset: function() {
+      this.s = this.t = 0;
+    },
+    valueOf: function() {
+      return this.s;
+    }
+  };
+  var d3_adderTemp = new d3_adder();
+  function d3_adderSum(a, b, o) {
+    var x = o.s = a + b, bv = x - a, av = x - bv;
+    o.t = a - av + (b - bv);
+  }
+  d3.geo.stream = function(object, listener) {
+    if (object && d3_geo_streamObjectType.hasOwnProperty(object.type)) {
+      d3_geo_streamObjectType[object.type](object, listener);
+    } else {
+      d3_geo_streamGeometry(object, listener);
+    }
+  };
+  function d3_geo_streamGeometry(geometry, listener) {
+    if (geometry && d3_geo_streamGeometryType.hasOwnProperty(geometry.type)) {
+      d3_geo_streamGeometryType[geometry.type](geometry, listener);
+    }
+  }
+  var d3_geo_streamObjectType = {
+    Feature: function(feature, listener) {
+      d3_geo_streamGeometry(feature.geometry, listener);
+    },
+    FeatureCollection: function(object, listener) {
+      var features = object.features, i = -1, n = features.length;
+      while (++i < n) d3_geo_streamGeometry(features[i].geometry, listener);
+    }
+  };
+  var d3_geo_streamGeometryType = {
+    Sphere: function(object, listener) {
+      listener.sphere();
+    },
+    Point: function(object, listener) {
+      object = object.coordinates;
+      listener.point(object[0], object[1], object[2]);
+    },
+    MultiPoint: function(object, listener) {
+      var coordinates = object.coordinates, i = -1, n = coordinates.length;
+      while (++i < n) object = coordinates[i], listener.point(object[0], object[1], object[2]);
+    },
+    LineString: function(object, listener) {
+      d3_geo_streamLine(object.coordinates, listener, 0);
+    },
+    MultiLineString: function(object, listener) {
+      var coordinates = object.coordinates, i = -1, n = coordinates.length;
+      while (++i < n) d3_geo_streamLine(coordinates[i], listener, 0);
+    },
+    Polygon: function(object, listener) {
+      d3_geo_streamPolygon(object.coordinates, listener);
+    },
+    MultiPolygon: function(object, listener) {
+      var coordinates = object.coordinates, i = -1, n = coordinates.length;
+      while (++i < n) d3_geo_streamPolygon(coordinates[i], listener);
+    },
+    GeometryCollection: function(object, listener) {
+      var geometries = object.geometries, i = -1, n = geometries.length;
+      while (++i < n) d3_geo_streamGeometry(geometries[i], listener);
+    }
+  };
+  function d3_geo_streamLine(coordinates, listener, closed) {
+    var i = -1, n = coordinates.length - closed, coordinate;
+    listener.lineStart();
+    while (++i < n) coordinate = coordinates[i], listener.point(coordinate[0], coordinate[1], coordinate[2]);
+    listener.lineEnd();
+  }
+  function d3_geo_streamPolygon(coordinates, listener) {
+    var i = -1, n = coordinates.length;
+    listener.polygonStart();
+    while (++i < n) d3_geo_streamLine(coordinates[i], listener, 1);
+    listener.polygonEnd();
+  }
+  d3.geo.area = function(object) {
+    d3_geo_areaSum = 0;
+    d3.geo.stream(object, d3_geo_area);
+    return d3_geo_areaSum;
+  };
+  var d3_geo_areaSum, d3_geo_areaRingSum = new d3_adder();
+  var d3_geo_area = {
+    sphere: function() {
+      d3_geo_areaSum += 4 * π;
+    },
+    point: d3_noop,
+    lineStart: d3_noop,
+    lineEnd: d3_noop,
+    polygonStart: function() {
+      d3_geo_areaRingSum.reset();
+      d3_geo_area.lineStart = d3_geo_areaRingStart;
+    },
+    polygonEnd: function() {
+      var area = 2 * d3_geo_areaRingSum;
+      d3_geo_areaSum += area < 0 ? 4 * π + area : area;
+      d3_geo_area.lineStart = d3_geo_area.lineEnd = d3_geo_area.point = d3_noop;
+    }
+  };
+  function d3_geo_areaRingStart() {
+    var λ00, φ00, λ0, cosφ0, sinφ0;
+    d3_geo_area.point = function(λ, φ) {
+      d3_geo_area.point = nextPoint;
+      λ0 = (λ00 = λ) * d3_radians, cosφ0 = Math.cos(φ = (φ00 = φ) * d3_radians / 2 + π / 4), 
+      sinφ0 = Math.sin(φ);
+    };
+    function nextPoint(λ, φ) {
+      λ *= d3_radians;
+      φ = φ * d3_radians / 2 + π / 4;
+      var dλ = λ - λ0, sdλ = dλ >= 0 ? 1 : -1, adλ = sdλ * dλ, cosφ = Math.cos(φ), sinφ = Math.sin(φ), k = sinφ0 * sinφ, u = cosφ0 * cosφ + k * Math.cos(adλ), v = k * sdλ * Math.sin(adλ);
+      d3_geo_areaRingSum.add(Math.atan2(v, u));
+      λ0 = λ, cosφ0 = cosφ, sinφ0 = sinφ;
+    }
+    d3_geo_area.lineEnd = function() {
+      nextPoint(λ00, φ00);
+    };
+  }
+  function d3_geo_cartesian(spherical) {
+    var λ = spherical[0], φ = spherical[1], cosφ = Math.cos(φ);
+    return [ cosφ * Math.cos(λ), cosφ * Math.sin(λ), Math.sin(φ) ];
+  }
+  function d3_geo_cartesianDot(a, b) {
+    return a[0] * b[0] + a[1] * b[1] + a[2] * b[2];
+  }
+  function d3_geo_cartesianCross(a, b) {
+    return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0] ];
+  }
+  function d3_geo_cartesianAdd(a, b) {
+    a[0] += b[0];
+    a[1] += b[1];
+    a[2] += b[2];
+  }
+  function d3_geo_cartesianScale(vector, k) {
+    return [ vector[0] * k, vector[1] * k, vector[2] * k ];
+  }
+  function d3_geo_cartesianNormalize(d) {
+    var l = Math.sqrt(d[0] * d[0] + d[1] * d[1] + d[2] * d[2]);
+    d[0] /= l;
+    d[1] /= l;
+    d[2] /= l;
+  }
+  function d3_geo_spherical(cartesian) {
+    return [ Math.atan2(cartesian[1], cartesian[0]), d3_asin(cartesian[2]) ];
+  }
+  function d3_geo_sphericalEqual(a, b) {
+    return abs(a[0] - b[0]) < ε && abs(a[1] - b[1]) < ε;
+  }
+  d3.geo.bounds = function() {
+    var λ0, φ0, λ1, φ1, λ_, λ__, φ__, p0, dλSum, ranges, range;
+    var bound = {
+      point: point,
+      lineStart: lineStart,
+      lineEnd: lineEnd,
+      polygonStart: function() {
+        bound.point = ringPoint;
+        bound.lineStart = ringStart;
+        bound.lineEnd = ringEnd;
+        dλSum = 0;
+        d3_geo_area.polygonStart();
+      },
+      polygonEnd: function() {
+        d3_geo_area.polygonEnd();
+        bound.point = point;
+        bound.lineStart = lineStart;
+        bound.lineEnd = lineEnd;
+        if (d3_geo_areaRingSum < 0) λ0 = -(λ1 = 180), φ0 = -(φ1 = 90); else if (dλSum > ε) φ1 = 90; else if (dλSum < -ε) φ0 = -90;
+        range[0] = λ0, range[1] = λ1;
+      }
+    };
+    function point(λ, φ) {
+      ranges.push(range = [ λ0 = λ, λ1 = λ ]);
+      if (φ < φ0) φ0 = φ;
+      if (φ > φ1) φ1 = φ;
+    }
+    function linePoint(λ, φ) {
+      var p = d3_geo_cartesian([ λ * d3_radians, φ * d3_radians ]);
+      if (p0) {
+        var normal = d3_geo_cartesianCross(p0, p), equatorial = [ normal[1], -normal[0], 0 ], inflection = d3_geo_cartesianCross(equatorial, normal);
+        d3_geo_cartesianNormalize(inflection);
+        inflection = d3_geo_spherical(inflection);
+        var dλ = λ - λ_, s = dλ > 0 ? 1 : -1, λi = inflection[0] * d3_degrees * s, antimeridian = abs(dλ) > 180;
+        if (antimeridian ^ (s * λ_ < λi && λi < s * λ)) {
+          var φi = inflection[1] * d3_degrees;
+          if (φi > φ1) φ1 = φi;
+        } else if (λi = (λi + 360) % 360 - 180, antimeridian ^ (s * λ_ < λi && λi < s * λ)) {
+          var φi = -inflection[1] * d3_degrees;
+          if (φi < φ0) φ0 = φi;
+        } else {
+          if (φ < φ0) φ0 = φ;
+          if (φ > φ1) φ1 = φ;
+        }
+        if (antimeridian) {
+          if (λ < λ_) {
+            if (angle(λ0, λ) > angle(λ0, λ1)) λ1 = λ;
+          } else {
+            if (angle(λ, λ1) > angle(λ0, λ1)) λ0 = λ;
+          }
+        } else {
+          if (λ1 >= λ0) {
+            if (λ < λ0) λ0 = λ;
+            if (λ > λ1) λ1 = λ;
+          } else {
+            if (λ > λ_) {
+              if (angle(λ0, λ) > angle(λ0, λ1)) λ1 = λ;
+            } else {
+              if (angle(λ, λ1) > angle(λ0, λ1)) λ0 = λ;
+            }
+          }
+        }
+      } else {
+        point(λ, φ);
+      }
+      p0 = p, λ_ = λ;
+    }
+    function lineStart() {
+      bound.point = linePoint;
+    }
+    function lineEnd() {
+      range[0] = λ0, range[1] = λ1;
+      bound.point = point;
+      p0 = null;
+    }
+    function ringPoint(λ, φ) {
+      if (p0) {
+        var dλ = λ - λ_;
+        dλSum += abs(dλ) > 180 ? dλ + (dλ > 0 ? 360 : -360) : dλ;
+      } else λ__ = λ, φ__ = φ;
+      d3_geo_area.point(λ, φ);
+      linePoint(λ, φ);
+    }
+    function ringStart() {
+      d3_geo_area.lineStart();
+    }
+    function ringEnd() {
+      ringPoint(λ__, φ__);
+      d3_geo_area.lineEnd();
+      if (abs(dλSum) > ε) λ0 = -(λ1 = 180);
+      range[0] = λ0, range[1] = λ1;
+      p0 = null;
+    }
+    function angle(λ0, λ1) {
+      return (λ1 -= λ0) < 0 ? λ1 + 360 : λ1;
+    }
+    function compareRanges(a, b) {
+      return a[0] - b[0];
+    }
+    function withinRange(x, range) {
+      return range[0] <= range[1] ? range[0] <= x && x <= range[1] : x < range[0] || range[1] < x;
+    }
+    return function(feature) {
+      φ1 = λ1 = -(λ0 = φ0 = Infinity);
+      ranges = [];
+      d3.geo.stream(feature, bound);
+      var n = ranges.length;
+      if (n) {
+        ranges.sort(compareRanges);
+        for (var i = 1, a = ranges[0], b, merged = [ a ]; i < n; ++i) {
+          b = ranges[i];
+          if (withinRange(b[0], a) || withinRange(b[1], a)) {
+            if (angle(a[0], b[1]) > angle(a[0], a[1])) a[1] = b[1];
+            if (angle(b[0], a[1]) > angle(a[0], a[1])) a[0] = b[0];
+          } else {
+            merged.push(a = b);
+          }
+        }
+        var best = -Infinity, dλ;
+        for (var n = merged.length - 1, i = 0, a = merged[n], b; i <= n; a = b, ++i) {
+          b = merged[i];
+          if ((dλ = angle(a[1], b[0])) > best) best = dλ, λ0 = b[0], λ1 = a[1];
+        }
+      }
+      ranges = range = null;
+      return λ0 === Infinity || φ0 === Infinity ? [ [ NaN, NaN ], [ NaN, NaN ] ] : [ [ λ0, φ0 ], [ λ1, φ1 ] ];
+    };
+  }();
+  d3.geo.centroid = function(object) {
+    d3_geo_centroidW0 = d3_geo_centroidW1 = d3_geo_centroidX0 = d3_geo_centroidY0 = d3_geo_centroidZ0 = d3_geo_centroidX1 = d3_geo_centroidY1 = d3_geo_centroidZ1 = d3_geo_centroidX2 = d3_geo_centroidY2 = d3_geo_centroidZ2 = 0;
+    d3.geo.stream(object, d3_geo_centroid);
+    var x = d3_geo_centroidX2, y = d3_geo_centroidY2, z = d3_geo_centroidZ2, m = x * x + y * y + z * z;
+    if (m < ε2) {
+      x = d3_geo_centroidX1, y = d3_geo_centroidY1, z = d3_geo_centroidZ1;
+      if (d3_geo_centroidW1 < ε) x = d3_geo_centroidX0, y = d3_geo_centroidY0, z = d3_geo_centroidZ0;
+      m = x * x + y * y + z * z;
+      if (m < ε2) return [ NaN, NaN ];
+    }
+    return [ Math.atan2(y, x) * d3_degrees, d3_asin(z / Math.sqrt(m)) * d3_degrees ];
+  };
+  var d3_geo_centroidW0, d3_geo_centroidW1, d3_geo_centroidX0, d3_geo_centroidY0, d3_geo_centroidZ0, d3_geo_centroidX1, d3_geo_centroidY1, d3_geo_centroidZ1, d3_geo_centroidX2, d3_geo_centroidY2, d3_geo_centroidZ2;
+  var d3_geo_centroid = {
+    sphere: d3_noop,
+    point: d3_geo_centroidPoint,
+    lineStart: d3_geo_centroidLineStart,
+    lineEnd: d3_geo_centroidLineEnd,
+    polygonStart: function() {
+      d3_geo_centroid.lineStart = d3_geo_centroidRingStart;
+    },
+    polygonEnd: function() {
+      d3_geo_centroid.lineStart = d3_geo_centroidLineStart;
+    }
+  };
+  function d3_geo_centroidPoint(λ, φ) {
+    λ *= d3_radians;
+    var cosφ = Math.cos(φ *= d3_radians);
+    d3_geo_centroidPointXYZ(cosφ * Math.cos(λ), cosφ * Math.sin(λ), Math.sin(φ));
+  }
+  function d3_geo_centroidPointXYZ(x, y, z) {
+    ++d3_geo_centroidW0;
+    d3_geo_centroidX0 += (x - d3_geo_centroidX0) / d3_geo_centroidW0;
+    d3_geo_centroidY0 += (y - d3_geo_centroidY0) / d3_geo_centroidW0;
+    d3_geo_centroidZ0 += (z - d3_geo_centroidZ0) / d3_geo_centroidW0;
+  }
+  function d3_geo_centroidLineStart() {
+    var x0, y0, z0;
+    d3_geo_centroid.point = function(λ, φ) {
+      λ *= d3_radians;
+      var cosφ = Math.cos(φ *= d3_radians);
+      x0 = cosφ * Math.cos(λ);
+      y0 = cosφ * Math.sin(λ);
+      z0 = Math.sin(φ);
+      d3_geo_centroid.point = nextPoint;
+      d3_geo_centroidPointXYZ(x0, y0, z0);
+    };
+    function nextPoint(λ, φ) {
+      λ *= d3_radians;
+      var cosφ = Math.cos(φ *= d3_radians), x = cosφ * Math.cos(λ), y = cosφ * Math.sin(λ), z = Math.sin(φ), w = Math.atan2(Math.sqrt((w = y0 * z - z0 * y) * w + (w = z0 * x - x0 * z) * w + (w = x0 * y - y0 * x) * w), x0 * x + y0 * y + z0 * z);
+      d3_geo_centroidW1 += w;
+      d3_geo_centroidX1 += w * (x0 + (x0 = x));
+      d3_geo_centroidY1 += w * (y0 + (y0 = y));
+      d3_geo_centroidZ1 += w * (z0 + (z0 = z));
+      d3_geo_centroidPointXYZ(x0, y0, z0);
+    }
+  }
+  function d3_geo_centroidLineEnd() {
+    d3_geo_centroid.point = d3_geo_centroidPoint;
+  }
+  function d3_geo_centroidRingStart() {
+    var λ00, φ00, x0, y0, z0;
+    d3_geo_centroid.point = function(λ, φ) {
+      λ00 = λ, φ00 = φ;
+      d3_geo_centroid.point = nextPoint;
+      λ *= d3_radians;
+      var cosφ = Math.cos(φ *= d3_radians);
+      x0 = cosφ * Math.cos(λ);
+      y0 = cosφ * Math.sin(λ);
+      z0 = Math.sin(φ);
+      d3_geo_centroidPointXYZ(x0, y0, z0);
+    };
+    d3_geo_centroid.lineEnd = function() {
+      nextPoint(λ00, φ00);
+      d3_geo_centroid.lineEnd = d3_geo_centroidLineEnd;
+      d3_geo_centroid.point = d3_geo_centroidPoint;
+    };
+    function nextPoint(λ, φ) {
+      λ *= d3_radians;
+      var cosφ = Math.cos(φ *= d3_radians), x = cosφ * Math.cos(λ), y = cosφ * Math.sin(λ), z = Math.sin(φ), cx = y0 * z - z0 * y, cy = z0 * x - x0 * z, cz = x0 * y - y0 * x, m = Math.sqrt(cx * cx + cy * cy + cz * cz), u = x0 * x + y0 * y + z0 * z, v = m && -d3_acos(u) / m, w = Math.atan2(m, u);
+      d3_geo_centroidX2 += v * cx;
+      d3_geo_centroidY2 += v * cy;
+      d3_geo_centroidZ2 += v * cz;
+      d3_geo_centroidW1 += w;
+      d3_geo_centroidX1 += w * (x0 + (x0 = x));
+      d3_geo_centroidY1 += w * (y0 + (y0 = y));
+      d3_geo_centroidZ1 += w * (z0 + (z0 = z));
+      d3_geo_centroidPointXYZ(x0, y0, z0);
+    }
+  }
+  function d3_geo_compose(a, b) {
+    function compose(x, y) {
+      return x = a(x, y), b(x[0], x[1]);
+    }
+    if (a.invert && b.invert) compose.invert = function(x, y) {
+      return x = b.invert(x, y), x && a.invert(x[0], x[1]);
+    };
+    return compose;
+  }
+  function d3_true() {
+    return true;
+  }
+  function d3_geo_clipPolygon(segments, compare, clipStartInside, interpolate, listener) {
+    var subject = [], clip = [];
+    segments.forEach(function(segment) {
+      if ((n = segment.length - 1) <= 0) return;
+      var n, p0 = segment[0], p1 = segment[n];
+      if (d3_geo_sphericalEqual(p0, p1)) {
+        listener.lineStart();
+        for (var i = 0; i < n; ++i) listener.point((p0 = segment[i])[0], p0[1]);
+        listener.lineEnd();
+        return;
+      }
+      var a = new d3_geo_clipPolygonIntersection(p0, segment, null, true), b = new d3_geo_clipPolygonIntersection(p0, null, a, false);
+      a.o = b;
+      subject.push(a);
+      clip.push(b);
+      a = new d3_geo_clipPolygonIntersection(p1, segment, null, false);
+      b = new d3_geo_clipPolygonIntersection(p1, null, a, true);
+      a.o = b;
+      subject.push(a);
+      clip.push(b);
+    });
+    clip.sort(compare);
+    d3_geo_clipPolygonLinkCircular(subject);
+    d3_geo_clipPolygonLinkCircular(clip);
+    if (!subject.length) return;
+    for (var i = 0, entry = clipStartInside, n = clip.length; i < n; ++i) {
+      clip[i].e = entry = !entry;
+    }
+    var start = subject[0], points, point;
+    while (1) {
+      var current = start, isSubject = true;
+      while (current.v) if ((current = current.n) === start) return;
+      points = current.z;
+      listener.lineStart();
+      do {
+        current.v = current.o.v = true;
+        if (current.e) {
+          if (isSubject) {
+            for (var i = 0, n = points.length; i < n; ++i) listener.point((point = points[i])[0], point[1]);
+          } else {
+            interpolate(current.x, current.n.x, 1, listener);
+          }
+          current = current.n;
+        } else {
+          if (isSubject) {
+            points = current.p.z;
+            for (var i = points.length - 1; i >= 0; --i) listener.point((point = points[i])[0], point[1]);
+          } else {
+            interpolate(current.x, current.p.x, -1, listener);
+          }
+          current = current.p;
+        }
+        current = current.o;
+        points = current.z;
+        isSubject = !isSubject;
+      } while (!current.v);
+      listener.lineEnd();
+    }
+  }
+  function d3_geo_clipPolygonLinkCircular(array) {
+    if (!(n = array.length)) return;
+    var n, i = 0, a = array[0], b;
+    while (++i < n) {
+      a.n = b = array[i];
+      b.p = a;
+      a = b;
+    }
+    a.n = b = array[0];
+    b.p = a;
+  }
+  function d3_geo_clipPolygonIntersection(point, points, other, entry) {
+    this.x = point;
+    this.z = points;
+    this.o = other;
+    this.e = entry;
+    this.v = false;
+    this.n = this.p = null;
+  }
+  function d3_geo_clip(pointVisible, clipLine, interpolate, clipStart) {
+    return function(rotate, listener) {
+      var line = clipLine(listener), rotatedClipStart = rotate.invert(clipStart[0], clipStart[1]);
+      var clip = {
+        point: point,
+        lineStart: lineStart,
+        lineEnd: lineEnd,
+        polygonStart: function() {
+          clip.point = pointRing;
+          clip.lineStart = ringStart;
+          clip.lineEnd = ringEnd;
+          segments = [];
+          polygon = [];
+        },
+        polygonEnd: function() {
+          clip.point = point;
+          clip.lineStart = lineStart;
+          clip.lineEnd = lineEnd;
+          segments = d3.merge(segments);
+          var clipStartInside = d3_geo_pointInPolygon(rotatedClipStart, polygon);
+          if (segments.length) {
+            if (!polygonStarted) listener.polygonStart(), polygonStarted = true;
+            d3_geo_clipPolygon(segments, d3_geo_clipSort, clipStartInside, interpolate, listener);
+          } else if (clipStartInside) {
+            if (!polygonStarted) listener.polygonStart(), polygonStarted = true;
+            listener.lineStart();
+            interpolate(null, null, 1, listener);
+            listener.lineEnd();
+          }
+          if (polygonStarted) listener.polygonEnd(), polygonStarted = false;
+          segments = polygon = null;
+        },
+        sphere: function() {
+          listener.polygonStart();
+          listener.lineStart();
+          interpolate(null, null, 1, listener);
+          listener.lineEnd();
+          listener.polygonEnd();
+        }
+      };
+      function point(λ, φ) {
+        var point = rotate(λ, φ);
+        if (pointVisible(λ = point[0], φ = point[1])) listener.point(λ, φ);
+      }
+      function pointLine(λ, φ) {
+        var point = rotate(λ, φ);
+        line.point(point[0], point[1]);
+      }
+      function lineStart() {
+        clip.point = pointLine;
+        line.lineStart();
+      }
+      function lineEnd() {
+        clip.point = point;
+        line.lineEnd();
+      }
+      var segments;
+      var buffer = d3_geo_clipBufferListener(), ringListener = clipLine(buffer), polygonStarted = false, polygon, ring;
+      function pointRing(λ, φ) {
+        ring.push([ λ, φ ]);
+        var point = rotate(λ, φ);
+        ringListener.point(point[0], point[1]);
+      }
+      function ringStart() {
+        ringListener.lineStart();
+        ring = [];
+      }
+      function ringEnd() {
+        pointRing(ring[0][0], ring[0][1]);
+        ringListener.lineEnd();
+        var clean = ringListener.clean(), ringSegments = buffer.buffer(), segment, n = ringSegments.length;
+        ring.pop();
+        polygon.push(ring);
+        ring = null;
+        if (!n) return;
+        if (clean & 1) {
+          segment = ringSegments[0];
+          var n = segment.length - 1, i = -1, point;
+          if (n > 0) {
+            if (!polygonStarted) listener.polygonStart(), polygonStarted = true;
+            listener.lineStart();
+            while (++i < n) listener.point((point = segment[i])[0], point[1]);
+            listener.lineEnd();
+          }
+          return;
+        }
+        if (n > 1 && clean & 2) ringSegments.push(ringSegments.pop().concat(ringSegments.shift()));
+        segments.push(ringSegments.filter(d3_geo_clipSegmentLength1));
+      }
+      return clip;
+    };
+  }
+  function d3_geo_clipSegmentLength1(segment) {
+    return segment.length > 1;
+  }
+  function d3_geo_clipBufferListener() {
+    var lines = [], line;
+    return {
+      lineStart: function() {
+        lines.push(line = []);
+      },
+      point: function(λ, φ) {
+        line.push([ λ, φ ]);
+      },
+      lineEnd: d3_noop,
+      buffer: function() {
+        var buffer = lines;
+        lines = [];
+        line = null;
+        return buffer;
+      },
+      rejoin: function() {
+        if (lines.length > 1) lines.push(lines.pop().concat(lines.shift()));
+      }
+    };
+  }
+  function d3_geo_clipSort(a, b) {
+    return ((a = a.x)[0] < 0 ? a[1] - halfπ - ε : halfπ - a[1]) - ((b = b.x)[0] < 0 ? b[1] - halfπ - ε : halfπ - b[1]);
+  }
+  var d3_geo_clipAntimeridian = d3_geo_clip(d3_true, d3_geo_clipAntimeridianLine, d3_geo_clipAntimeridianInterpolate, [ -π, -π / 2 ]);
+  function d3_geo_clipAntimeridianLine(listener) {
+    var λ0 = NaN, φ0 = NaN, sλ0 = NaN, clean;
+    return {
+      lineStart: function() {
+        listener.lineStart();
+        clean = 1;
+      },
+      point: function(λ1, φ1) {
+        var sλ1 = λ1 > 0 ? π : -π, dλ = abs(λ1 - λ0);
+        if (abs(dλ - π) < ε) {
+          listener.point(λ0, φ0 = (φ0 + φ1) / 2 > 0 ? halfπ : -halfπ);
+          listener.point(sλ0, φ0);
+          listener.lineEnd();
+          listener.lineStart();
+          listener.point(sλ1, φ0);
+          listener.point(λ1, φ0);
+          clean = 0;
+        } else if (sλ0 !== sλ1 && dλ >= π) {
+          if (abs(λ0 - sλ0) < ε) λ0 -= sλ0 * ε;
+          if (abs(λ1 - sλ1) < ε) λ1 -= sλ1 * ε;
+          φ0 = d3_geo_clipAntimeridianIntersect(λ0, φ0, λ1, φ1);
+          listener.point(sλ0, φ0);
+          listener.lineEnd();
+          listener.lineStart();
+          listener.point(sλ1, φ0);
+          clean = 0;
+        }
+        listener.point(λ0 = λ1, φ0 = φ1);
+        sλ0 = sλ1;
+      },
+      lineEnd: function() {
+        listener.lineEnd();
+        λ0 = φ0 = NaN;
+      },
+      clean: function() {
+        return 2 - clean;
+      }
+    };
+  }
+  function d3_geo_clipAntimeridianIntersect(λ0, φ0, λ1, φ1) {
+    var cosφ0, cosφ1, sinλ0_λ1 = Math.sin(λ0 - λ1);
+    return abs(sinλ0_λ1) > ε ? Math.atan((Math.sin(φ0) * (cosφ1 = Math.cos(φ1)) * Math.sin(λ1) - Math.sin(φ1) * (cosφ0 = Math.cos(φ0)) * Math.sin(λ0)) / (cosφ0 * cosφ1 * sinλ0_λ1)) : (φ0 + φ1) / 2;
+  }
+  function d3_geo_clipAntimeridianInterpolate(from, to, direction, listener) {
+    var φ;
+    if (from == null) {
+      φ = direction * halfπ;
+      listener.point(-π, φ);
+      listener.point(0, φ);
+      listener.point(π, φ);
+      listener.point(π, 0);
+      listener.point(π, -φ);
+      listener.point(0, -φ);
+      listener.point(-π, -φ);
+      listener.point(-π, 0);
+      listener.point(-π, φ);
+    } else if (abs(from[0] - to[0]) > ε) {
+      var s = from[0] < to[0] ? π : -π;
+      φ = direction * s / 2;
+      listener.point(-s, φ);
+      listener.point(0, φ);
+      listener.point(s, φ);
+    } else {
+      listener.point(to[0], to[1]);
+    }
+  }
+  function d3_geo_pointInPolygon(point, polygon) {
+    var meridian = point[0], parallel = point[1], meridianNormal = [ Math.sin(meridian), -Math.cos(meridian), 0 ], polarAngle = 0, winding = 0;
+    d3_geo_areaRingSum.reset();
+    for (var i = 0, n = polygon.length; i < n; ++i) {
+      var ring = polygon[i], m = ring.length;
+      if (!m) continue;
+      var point0 = ring[0], λ0 = point0[0], φ0 = point0[1] / 2 + π / 4, sinφ0 = Math.sin(φ0), cosφ0 = Math.cos(φ0), j = 1;
+      while (true) {
+        if (j === m) j = 0;
+        point = ring[j];
+        var λ = point[0], φ = point[1] / 2 + π / 4, sinφ = Math.sin(φ), cosφ = Math.cos(φ), dλ = λ - λ0, sdλ = dλ >= 0 ? 1 : -1, adλ = sdλ * dλ, antimeridian = adλ > π, k = sinφ0 * sinφ;
+        d3_geo_areaRingSum.add(Math.atan2(k * sdλ * Math.sin(adλ), cosφ0 * cosφ + k * Math.cos(adλ)));
+        polarAngle += antimeridian ? dλ + sdλ * τ : dλ;
+        if (antimeridian ^ λ0 >= meridian ^ λ >= meridian) {
+          var arc = d3_geo_cartesianCross(d3_geo_cartesian(point0), d3_geo_cartesian(point));
+          d3_geo_cartesianNormalize(arc);
+          var intersection = d3_geo_cartesianCross(meridianNormal, arc);
+          d3_geo_cartesianNormalize(intersection);
+          var φarc = (antimeridian ^ dλ >= 0 ? -1 : 1) * d3_asin(intersection[2]);
+          if (parallel > φarc || parallel === φarc && (arc[0] || arc[1])) {
+            winding += antimeridian ^ dλ >= 0 ? 1 : -1;
+          }
+        }
+        if (!j++) break;
+        λ0 = λ, sinφ0 = sinφ, cosφ0 = cosφ, point0 = point;
+      }
+    }
+    return (polarAngle < -ε || polarAngle < ε && d3_geo_areaRingSum < -ε) ^ winding & 1;
+  }
+  function d3_geo_clipCircle(radius) {
+    var cr = Math.cos(radius), smallRadius = cr > 0, notHemisphere = abs(cr) > ε, interpolate = d3_geo_circleInterpolate(radius, 6 * d3_radians);
+    return d3_geo_clip(visible, clipLine, interpolate, smallRadius ? [ 0, -radius ] : [ -π, radius - π ]);
+    function visible(λ, φ) {
+      return Math.cos(λ) * Math.cos(φ) > cr;
+    }
+    function clipLine(listener) {
+      var point0, c0, v0, v00, clean;
+      return {
+        lineStart: function() {
+          v00 = v0 = false;
+          clean = 1;
+        },
+        point: function(λ, φ) {
+          var point1 = [ λ, φ ], point2, v = visible(λ, φ), c = smallRadius ? v ? 0 : code(λ, φ) : v ? code(λ + (λ < 0 ? π : -π), φ) : 0;
+          if (!point0 && (v00 = v0 = v)) listener.lineStart();
+          if (v !== v0) {
+            point2 = intersect(point0, point1);
+            if (d3_geo_sphericalEqual(point0, point2) || d3_geo_sphericalEqual(point1, point2)) {
+              point1[0] += ε;
+              point1[1] += ε;
+              v = visible(point1[0], point1[1]);
+            }
+          }
+          if (v !== v0) {
+            clean = 0;
+            if (v) {
+              listener.lineStart();
+              point2 = intersect(point1, point0);
+              listener.point(point2[0], point2[1]);
+            } else {
+              point2 = intersect(point0, point1);
+              listener.point(point2[0], point2[1]);
+              listener.lineEnd();
+            }
+            point0 = point2;
+          } else if (notHemisphere && point0 && smallRadius ^ v) {
+            var t;
+            if (!(c & c0) && (t = intersect(point1, point0, true))) {
+              clean = 0;
+              if (smallRadius) {
+                listener.lineStart();
+                listener.point(t[0][0], t[0][1]);
+                listener.point(t[1][0], t[1][1]);
+                listener.lineEnd();
+              } else {
+                listener.point(t[1][0], t[1][1]);
+                listener.lineEnd();
+                listener.lineStart();
+                listener.point(t[0][0], t[0][1]);
+              }
+            }
+          }
+          if (v && (!point0 || !d3_geo_sphericalEqual(point0, point1))) {
+            listener.point(point1[0], point1[1]);
+          }
+          point0 = point1, v0 = v, c0 = c;
+        },
+        lineEnd: function() {
+          if (v0) listener.lineEnd();
+          point0 = null;
+        },
+        clean: function() {
+          return clean | (v00 && v0) << 1;
+        }
+      };
+    }
+    function intersect(a, b, two) {
+      var pa = d3_geo_cartesian(a), pb = d3_geo_cartesian(b);
+      var n1 = [ 1, 0, 0 ], n2 = d3_geo_cartesianCross(pa, pb), n2n2 = d3_geo_cartesianDot(n2, n2), n1n2 = n2[0], determinant = n2n2 - n1n2 * n1n2;
+      if (!determinant) return !two && a;
+      var c1 = cr * n2n2 / determinant, c2 = -cr * n1n2 / determinant, n1xn2 = d3_geo_cartesianCross(n1, n2), A = d3_geo_cartesianScale(n1, c1), B = d3_geo_cartesianScale(n2, c2);
+      d3_geo_cartesianAdd(A, B);
+      var u = n1xn2, w = d3_geo_cartesianDot(A, u), uu = d3_geo_cartesianDot(u, u), t2 = w * w - uu * (d3_geo_cartesianDot(A, A) - 1);
+      if (t2 < 0) return;
+      var t = Math.sqrt(t2), q = d3_geo_cartesianScale(u, (-w - t) / uu);
+      d3_geo_cartesianAdd(q, A);
+      q = d3_geo_spherical(q);
+      if (!two) return q;
+      var λ0 = a[0], λ1 = b[0], φ0 = a[1], φ1 = b[1], z;
+      if (λ1 < λ0) z = λ0, λ0 = λ1, λ1 = z;
+      var δλ = λ1 - λ0, polar = abs(δλ - π) < ε, meridian = polar || δλ < ε;
+      if (!polar && φ1 < φ0) z = φ0, φ0 = φ1, φ1 = z;
+      if (meridian ? polar ? φ0 + φ1 > 0 ^ q[1] < (abs(q[0] - λ0) < ε ? φ0 : φ1) : φ0 <= q[1] && q[1] <= φ1 : δλ > π ^ (λ0 <= q[0] && q[0] <= λ1)) {
+        var q1 = d3_geo_cartesianScale(u, (-w + t) / uu);
+        d3_geo_cartesianAdd(q1, A);
+        return [ q, d3_geo_spherical(q1) ];
+      }
+    }
+    function code(λ, φ) {
+      var r = smallRadius ? radius : π - radius, code = 0;
+      if (λ < -r) code |= 1; else if (λ > r) code |= 2;
+      if (φ < -r) code |= 4; else if (φ > r) code |= 8;
+      return code;
+    }
+  }
+  function d3_geom_clipLine(x0, y0, x1, y1) {
+    return function(line) {
+      var a = line.a, b = line.b, ax = a.x, ay = a.y, bx = b.x, by = b.y, t0 = 0, t1 = 1, dx = bx - ax, dy = by - ay, r;
+      r = x0 - ax;
+      if (!dx && r > 0) return;
+      r /= dx;
+      if (dx < 0) {
+        if (r < t0) return;
+        if (r < t1) t1 = r;
+      } else if (dx > 0) {
+        if (r > t1) return;
+        if (r > t0) t0 = r;
+      }
+      r = x1 - ax;
+      if (!dx && r < 0) return;
+      r /= dx;
+      if (dx < 0) {
+        if (r > t1) return;
+        if (r > t0) t0 = r;
+      } else if (dx > 0) {
+        if (r < t0) return;
+        if (r < t1) t1 = r;
+      }
+      r = y0 - ay;
+      if (!dy && r > 0) return;
+      r /= dy;
+      if (dy < 0) {
+        if (r < t0) return;
+        if (r < t1) t1 = r;
+      } else if (dy > 0) {
+        if (r > t1) return;
+        if (r > t0) t0 = r;
+      }
+      r = y1 - ay;
+      if (!dy && r < 0) return;
+      r /= dy;
+      if (dy < 0) {
+        if (r > t1) return;
+        if (r > t0) t0 = r;
+      } else if (dy > 0) {
+        if (r < t0) return;
+        if (r < t1) t1 = r;
+      }
+      if (t0 > 0) line.a = {
+        x: ax + t0 * dx,
+        y: ay + t0 * dy
+      };
+      if (t1 < 1) line.b = {
+        x: ax + t1 * dx,
+        y: ay + t1 * dy
+      };
+      return line;
+    };
+  }
+  var d3_geo_clipExtentMAX = 1e9;
+  d3.geo.clipExtent = function() {
+    var x0, y0, x1, y1, stream, clip, clipExtent = {
+      stream: function(output) {
+        if (stream) stream.valid = false;
+        stream = clip(output);
+        stream.valid = true;
+        return stream;
+      },
+      extent: function(_) {
+        if (!arguments.length) return [ [ x0, y0 ], [ x1, y1 ] ];
+        clip = d3_geo_clipExtent(x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1]);
+        if (stream) stream.valid = false, stream = null;
+        return clipExtent;
+      }
+    };
+    return clipExtent.extent([ [ 0, 0 ], [ 960, 500 ] ]);
+  };
+  function d3_geo_clipExtent(x0, y0, x1, y1) {
+    return function(listener) {
+      var listener_ = listener, bufferListener = d3_geo_clipBufferListener(), clipLine = d3_geom_clipLine(x0, y0, x1, y1), segments, polygon, ring;
+      var clip = {
+        point: point,
+        lineStart: lineStart,
+        lineEnd: lineEnd,
+        polygonStart: function() {
+          listener = bufferListener;
+          segments = [];
+          polygon = [];
+          clean = true;
+        },
+        polygonEnd: function() {
+          listener = listener_;
+          segments = d3.merge(segments);
+          var clipStartInside = insidePolygon([ x0, y1 ]), inside = clean && clipStartInside, visible = segments.length;
+          if (inside || visible) {
+            listener.polygonStart();
+            if (inside) {
+              listener.lineStart();
+              interpolate(null, null, 1, listener);
+              listener.lineEnd();
+            }
+            if (visible) {
+              d3_geo_clipPolygon(segments, compare, clipStartInside, interpolate, listener);
+            }
+            listener.polygonEnd();
+          }
+          segments = polygon = ring = null;
+        }
+      };
+      function insidePolygon(p) {
+        var wn = 0, n = polygon.length, y = p[1];
+        for (var i = 0; i < n; ++i) {
+          for (var j = 1, v = polygon[i], m = v.length, a = v[0], b; j < m; ++j) {
+            b = v[j];
+            if (a[1] <= y) {
+              if (b[1] > y && d3_cross2d(a, b, p) > 0) ++wn;
+            } else {
+              if (b[1] <= y && d3_cross2d(a, b, p) < 0) --wn;
+            }
+            a = b;
+          }
+        }
+        return wn !== 0;
+      }
+      function interpolate(from, to, direction, listener) {
+        var a = 0, a1 = 0;
+        if (from == null || (a = corner(from, direction)) !== (a1 = corner(to, direction)) || comparePoints(from, to) < 0 ^ direction > 0) {
+          do {
+            listener.point(a === 0 || a === 3 ? x0 : x1, a > 1 ? y1 : y0);
+          } while ((a = (a + direction + 4) % 4) !== a1);
+        } else {
+          listener.point(to[0], to[1]);
+        }
+      }
+      function pointVisible(x, y) {
+        return x0 <= x && x <= x1 && y0 <= y && y <= y1;
+      }
+      function point(x, y) {
+        if (pointVisible(x, y)) listener.point(x, y);
+      }
+      var x__, y__, v__, x_, y_, v_, first, clean;
+      function lineStart() {
+        clip.point = linePoint;
+        if (polygon) polygon.push(ring = []);
+        first = true;
+        v_ = false;
+        x_ = y_ = NaN;
+      }
+      function lineEnd() {
+        if (segments) {
+          linePoint(x__, y__);
+          if (v__ && v_) bufferListener.rejoin();
+          segments.push(bufferListener.buffer());
+        }
+        clip.point = point;
+        if (v_) listener.lineEnd();
+      }
+      function linePoint(x, y) {
+        x = Math.max(-d3_geo_clipExtentMAX, Math.min(d3_geo_clipExtentMAX, x));
+        y = Math.max(-d3_geo_clipExtentMAX, Math.min(d3_geo_clipExtentMAX, y));
+        var v = pointVisible(x, y);
+        if (polygon) ring.push([ x, y ]);
+        if (first) {
+          x__ = x, y__ = y, v__ = v;
+          first = false;
+          if (v) {
+            listener.lineStart();
+            listener.point(x, y);
+          }
+        } else {
+          if (v && v_) listener.point(x, y); else {
+            var l = {
+              a: {
+                x: x_,
+                y: y_
+              },
+              b: {
+                x: x,
+                y: y
+              }
+            };
+            if (clipLine(l)) {
+              if (!v_) {
+                listener.lineStart();
+                listener.point(l.a.x, l.a.y);
+              }
+              listener.point(l.b.x, l.b.y);
+              if (!v) listener.lineEnd();
+              clean = false;
+            } else if (v) {
+              listener.lineStart();
+              listener.point(x, y);
+              clean = false;
+            }
+          }
+        }
+        x_ = x, y_ = y, v_ = v;
+      }
+      return clip;
+    };
+    function corner(p, direction) {
+      return abs(p[0] - x0) < ε ? direction > 0 ? 0 : 3 : abs(p[0] - x1) < ε ? direction > 0 ? 2 : 1 : abs(p[1] - y0) < ε ? direction > 0 ? 1 : 0 : direction > 0 ? 3 : 2;
+    }
+    function compare(a, b) {
+      return comparePoints(a.x, b.x);
+    }
+    function comparePoints(a, b) {
+      var ca = corner(a, 1), cb = corner(b, 1);
+      return ca !== cb ? ca - cb : ca === 0 ? b[1] - a[1] : ca === 1 ? a[0] - b[0] : ca === 2 ? a[1] - b[1] : b[0] - a[0];
+    }
+  }
+  function d3_geo_conic(projectAt) {
+    var φ0 = 0, φ1 = π / 3, m = d3_geo_projectionMutator(projectAt), p = m(φ0, φ1);
+    p.parallels = function(_) {
+      if (!arguments.length) return [ φ0 / π * 180, φ1 / π * 180 ];
+      return m(φ0 = _[0] * π / 180, φ1 = _[1] * π / 180);
+    };
+    return p;
+  }
+  function d3_geo_conicEqualArea(φ0, φ1) {
+    var sinφ0 = Math.sin(φ0), n = (sinφ0 + Math.sin(φ1)) / 2, C = 1 + sinφ0 * (2 * n - sinφ0), ρ0 = Math.sqrt(C) / n;
+    function forward(λ, φ) {
+      var ρ = Math.sqrt(C - 2 * n * Math.sin(φ)) / n;
+      return [ ρ * Math.sin(λ *= n), ρ0 - ρ * Math.cos(λ) ];
+    }
+    forward.invert = function(x, y) {
+      var ρ0_y = ρ0 - y;
+      return [ Math.atan2(x, ρ0_y) / n, d3_asin((C - (x * x + ρ0_y * ρ0_y) * n * n) / (2 * n)) ];
+    };
+    return forward;
+  }
+  (d3.geo.conicEqualArea = function() {
+    return d3_geo_conic(d3_geo_conicEqualArea);
+  }).raw = d3_geo_conicEqualArea;
+  d3.geo.albers = function() {
+    return d3.geo.conicEqualArea().rotate([ 96, 0 ]).center([ -.6, 38.7 ]).parallels([ 29.5, 45.5 ]).scale(1070);
+  };
+  d3.geo.albersUsa = function() {
+    var lower48 = d3.geo.albers();
+    var alaska = d3.geo.conicEqualArea().rotate([ 154, 0 ]).center([ -2, 58.5 ]).parallels([ 55, 65 ]);
+    var hawaii = d3.geo.conicEqualArea().rotate([ 157, 0 ]).center([ -3, 19.9 ]).parallels([ 8, 18 ]);
+    var point, pointStream = {
+      point: function(x, y) {
+        point = [ x, y ];
+      }
+    }, lower48Point, alaskaPoint, hawaiiPoint;
+    function albersUsa(coordinates) {
+      var x = coordinates[0], y = coordinates[1];
+      point = null;
+      (lower48Point(x, y), point) || (alaskaPoint(x, y), point) || hawaiiPoint(x, y);
+      return point;
+    }
+    albersUsa.invert = function(coordinates) {
+      var k = lower48.scale(), t = lower48.translate(), x = (coordinates[0] - t[0]) / k, y = (coordinates[1] - t[1]) / k;
+      return (y >= .12 && y < .234 && x >= -.425 && x < -.214 ? alaska : y >= .166 && y < .234 && x >= -.214 && x < -.115 ? hawaii : lower48).invert(coordinates);
+    };
+    albersUsa.stream = function(stream) {
+      var lower48Stream = lower48.stream(stream), alaskaStream = alaska.stream(stream), hawaiiStream = hawaii.stream(stream);
+      return {
+        point: function(x, y) {
+          lower48Stream.point(x, y);
+          alaskaStream.point(x, y);
+          hawaiiStream.point(x, y);
+        },
+        sphere: function() {
+          lower48Stream.sphere();
+          alaskaStream.sphere();
+          hawaiiStream.sphere();
+        },
+        lineStart: function() {
+          lower48Stream.lineStart();
+          alaskaStream.lineStart();
+          hawaiiStream.lineStart();
+        },
+        lineEnd: function() {
+          lower48Stream.lineEnd();
+          alaskaStream.lineEnd();
+          hawaiiStream.lineEnd();
+        },
+        polygonStart: function() {
+          lower48Stream.polygonStart();
+          alaskaStream.polygonStart();
+          hawaiiStream.polygonStart();
+        },
+        polygonEnd: function() {
+          lower48Stream.polygonEnd();
+          alaskaStream.polygonEnd();
+          hawaiiStream.polygonEnd();
+        }
+      };
+    };
+    albersUsa.precision = function(_) {
+      if (!arguments.length) return lower48.precision();
+      lower48.precision(_);
+      alaska.precision(_);
+      hawaii.precision(_);
+      return albersUsa;
+    };
+    albersUsa.scale = function(_) {
+      if (!arguments.length) return lower48.scale();
+      lower48.scale(_);
+      alaska.scale(_ * .35);
+      hawaii.scale(_);
+      return albersUsa.translate(lower48.translate());
+    };
+    albersUsa.translate = function(_) {
+      if (!arguments.length) return lower48.translate();
+      var k = lower48.scale(), x = +_[0], y = +_[1];
+      lower48Point = lower48.translate(_).clipExtent([ [ x - .455 * k, y - .238 * k ], [ x + .455 * k, y + .238 * k ] ]).stream(pointStream).point;
+      alaskaPoint = alaska.translate([ x - .307 * k, y + .201 * k ]).clipExtent([ [ x - .425 * k + ε, y + .12 * k + ε ], [ x - .214 * k - ε, y + .234 * k - ε ] ]).stream(pointStream).point;
+      hawaiiPoint = hawaii.translate([ x - .205 * k, y + .212 * k ]).clipExtent([ [ x - .214 * k + ε, y + .166 * k + ε ], [ x - .115 * k - ε, y + .234 * k - ε ] ]).stream(pointStream).point;
+      return albersUsa;
+    };
+    return albersUsa.scale(1070);
+  };
+  var d3_geo_pathAreaSum, d3_geo_pathAreaPolygon, d3_geo_pathArea = {
+    point: d3_noop,
+    lineStart: d3_noop,
+    lineEnd: d3_noop,
+    polygonStart: function() {
+      d3_geo_pathAreaPolygon = 0;
+      d3_geo_pathArea.lineStart = d3_geo_pathAreaRingStart;
+    },
+    polygonEnd: function() {
+      d3_geo_pathArea.lineStart = d3_geo_pathArea.lineEnd = d3_geo_pathArea.point = d3_noop;
+      d3_geo_pathAreaSum += abs(d3_geo_pathAreaPolygon / 2);
+    }
+  };
+  function d3_geo_pathAreaRingStart() {
+    var x00, y00, x0, y0;
+    d3_geo_pathArea.point = function(x, y) {
+      d3_geo_pathArea.point = nextPoint;
+      x00 = x0 = x, y00 = y0 = y;
+    };
+    function nextPoint(x, y) {
+      d3_geo_pathAreaPolygon += y0 * x - x0 * y;
+      x0 = x, y0 = y;
+    }
+    d3_geo_pathArea.lineEnd = function() {
+      nextPoint(x00, y00);
+    };
+  }
+  var d3_geo_pathBoundsX0, d3_geo_pathBoundsY0, d3_geo_pathBoundsX1, d3_geo_pathBoundsY1;
+  var d3_geo_pathBounds = {
+    point: d3_geo_pathBoundsPoint,
+    lineStart: d3_noop,
+    lineEnd: d3_noop,
+    polygonStart: d3_noop,
+    polygonEnd: d3_noop
+  };
+  function d3_geo_pathBoundsPoint(x, y) {
+    if (x < d3_geo_pathBoundsX0) d3_geo_pathBoundsX0 = x;
+    if (x > d3_geo_pathBoundsX1) d3_geo_pathBoundsX1 = x;
+    if (y < d3_geo_pathBoundsY0) d3_geo_pathBoundsY0 = y;
+    if (y > d3_geo_pathBoundsY1) d3_geo_pathBoundsY1 = y;
+  }
+  function d3_geo_pathBuffer() {
+    var pointCircle = d3_geo_pathBufferCircle(4.5), buffer = [];
+    var stream = {
+      point: point,
+      lineStart: function() {
+        stream.point = pointLineStart;
+      },
+      lineEnd: lineEnd,
+      polygonStart: function() {
+        stream.lineEnd = lineEndPolygon;
+      },
+      polygonEnd: function() {
+        stream.lineEnd = lineEnd;
+        stream.point = point;
+      },
+      pointRadius: function(_) {
+        pointCircle = d3_geo_pathBufferCircle(_);
+        return stream;
+      },
+      result: function() {
+        if (buffer.length) {
+          var result = buffer.join("");
+          buffer = [];
+          return result;
+        }
+      }
+    };
+    function point(x, y) {
+      buffer.push("M", x, ",", y, pointCircle);
+    }
+    function pointLineStart(x, y) {
+      buffer.push("M", x, ",", y);
+      stream.point = pointLine;
+    }
+    function pointLine(x, y) {
+      buffer.push("L", x, ",", y);
+    }
+    function lineEnd() {
+      stream.point = point;
+    }
+    function lineEndPolygon() {
+      buffer.push("Z");
+    }
+    return stream;
+  }
+  function d3_geo_pathBufferCircle(radius) {
+    return "m0," + radius + "a" + radius + "," + radius + " 0 1,1 0," + -2 * radius + "a" + radius + "," + radius + " 0 1,1 0," + 2 * radius + "z";
+  }
+  var d3_geo_pathCentroid = {
+    point: d3_geo_pathCentroidPoint,
+    lineStart: d3_geo_pathCentroidLineStart,
+    lineEnd: d3_geo_pathCentroidLineEnd,
+    polygonStart: function() {
+      d3_geo_pathCentroid.lineStart = d3_geo_pathCentroidRingStart;
+    },
+    polygonEnd: function() {
+      d3_geo_pathCentroid.point = d3_geo_pathCentroidPoint;
+      d3_geo_pathCentroid.lineStart = d3_geo_pathCentroidLineStart;
+      d3_geo_pathCentroid.lineEnd = d3_geo_pathCentroidLineEnd;
+    }
+  };
+  function d3_geo_pathCentroidPoint(x, y) {
+    d3_geo_centroidX0 += x;
+    d3_geo_centroidY0 += y;
+    ++d3_geo_centroidZ0;
+  }
+  function d3_geo_pathCentroidLineStart() {
+    var x0, y0;
+    d3_geo_pathCentroid.point = function(x, y) {
+      d3_geo_pathCentroid.point = nextPoint;
+      d3_geo_pathCentroidPoint(x0 = x, y0 = y);
+    };
+    function nextPoint(x, y) {
+      var dx = x - x0, dy = y - y0, z = Math.sqrt(dx * dx + dy * dy);
+      d3_geo_centroidX1 += z * (x0 + x) / 2;
+      d3_geo_centroidY1 += z * (y0 + y) / 2;
+      d3_geo_centroidZ1 += z;
+      d3_geo_pathCentroidPoint(x0 = x, y0 = y);
+    }
+  }
+  function d3_geo_pathCentroidLineEnd() {
+    d3_geo_pathCentroid.point = d3_geo_pathCentroidPoint;
+  }
+  function d3_geo_pathCentroidRingStart() {
+    var x00, y00, x0, y0;
+    d3_geo_pathCentroid.point = function(x, y) {
+      d3_geo_pathCentroid.point = nextPoint;
+      d3_geo_pathCentroidPoint(x00 = x0 = x, y00 = y0 = y);
+    };
+    function nextPoint(x, y) {
+      var dx = x - x0, dy = y - y0, z = Math.sqrt(dx * dx + dy * dy);
+      d3_geo_centroidX1 += z * (x0 + x) / 2;
+      d3_geo_centroidY1 += z * (y0 + y) / 2;
+      d3_geo_centroidZ1 += z;
+      z = y0 * x - x0 * y;
+      d3_geo_centroidX2 += z * (x0 + x);
+      d3_geo_centroidY2 += z * (y0 + y);
+      d3_geo_centroidZ2 += z * 3;
+      d3_geo_pathCentroidPoint(x0 = x, y0 = y);
+    }
+    d3_geo_pathCentroid.lineEnd = function() {
+      nextPoint(x00, y00);
+    };
+  }
+  function d3_geo_pathContext(context) {
+    var pointRadius = 4.5;
+    var stream = {
+      point: point,
+      lineStart: function() {
+        stream.point = pointLineStart;
+      },
+      lineEnd: lineEnd,
+      polygonStart: function() {
+        stream.lineEnd = lineEndPolygon;
+      },
+      polygonEnd: function() {
+        stream.lineEnd = lineEnd;
+        stream.point = point;
+      },
+      pointRadius: function(_) {
+        pointRadius = _;
+        return stream;
+      },
+      result: d3_noop
+    };
+    function point(x, y) {
+      context.moveTo(x + pointRadius, y);
+      context.arc(x, y, pointRadius, 0, τ);
+    }
+    function pointLineStart(x, y) {
+      context.moveTo(x, y);
+      stream.point = pointLine;
+    }
+    function pointLine(x, y) {
+      context.lineTo(x, y);
+    }
+    function lineEnd() {
+      stream.point = point;
+    }
+    function lineEndPolygon() {
+      context.closePath();
+    }
+    return stream;
+  }
+  function d3_geo_resample(project) {
+    var δ2 = .5, cosMinDistance = Math.cos(30 * d3_radians), maxDepth = 16;
+    function resample(stream) {
+      return (maxDepth ? resampleRecursive : resampleNone)(stream);
+    }
+    function resampleNone(stream) {
+      return d3_geo_transformPoint(stream, function(x, y) {
+        x = project(x, y);
+        stream.point(x[0], x[1]);
+      });
+    }
+    function resampleRecursive(stream) {
+      var λ00, φ00, x00, y00, a00, b00, c00, λ0, x0, y0, a0, b0, c0;
+      var resample = {
+        point: point,
+        lineStart: lineStart,
+        lineEnd: lineEnd,
+        polygonStart: function() {
+          stream.polygonStart();
+          resample.lineStart = ringStart;
+        },
+        polygonEnd: function() {
+          stream.polygonEnd();
+          resample.lineStart = lineStart;
+        }
+      };
+      function point(x, y) {
+        x = project(x, y);
+        stream.point(x[0], x[1]);
+      }
+      function lineStart() {
+        x0 = NaN;
+        resample.point = linePoint;
+        stream.lineStart();
+      }
+      function linePoint(λ, φ) {
+        var c = d3_geo_cartesian([ λ, φ ]), p = project(λ, φ);
+        resampleLineTo(x0, y0, λ0, a0, b0, c0, x0 = p[0], y0 = p[1], λ0 = λ, a0 = c[0], b0 = c[1], c0 = c[2], maxDepth, stream);
+        stream.point(x0, y0);
+      }
+      function lineEnd() {
+        resample.point = point;
+        stream.lineEnd();
+      }
+      function ringStart() {
+        lineStart();
+        resample.point = ringPoint;
+        resample.lineEnd = ringEnd;
+      }
+      function ringPoint(λ, φ) {
+        linePoint(λ00 = λ, φ00 = φ), x00 = x0, y00 = y0, a00 = a0, b00 = b0, c00 = c0;
+        resample.point = linePoint;
+      }
+      function ringEnd() {
+        resampleLineTo(x0, y0, λ0, a0, b0, c0, x00, y00, λ00, a00, b00, c00, maxDepth, stream);
+        resample.lineEnd = lineEnd;
+        lineEnd();
+      }
+      return resample;
+    }
+    function resampleLineTo(x0, y0, λ0, a0, b0, c0, x1, y1, λ1, a1, b1, c1, depth, stream) {
+      var dx = x1 - x0, dy = y1 - y0, d2 = dx * dx + dy * dy;
+      if (d2 > 4 * δ2 && depth--) {
+        var a = a0 + a1, b = b0 + b1, c = c0 + c1, m = Math.sqrt(a * a + b * b + c * c), φ2 = Math.asin(c /= m), λ2 = abs(abs(c) - 1) < ε || abs(λ0 - λ1) < ε ? (λ0 + λ1) / 2 : Math.atan2(b, a), p = project(λ2, φ2), x2 = p[0], y2 = p[1], dx2 = x2 - x0, dy2 = y2 - y0, dz = dy * dx2 - dx * dy2;
+        if (dz * dz / d2 > δ2 || abs((dx * dx2 + dy * dy2) / d2 - .5) > .3 || a0 * a1 + b0 * b1 + c0 * c1 < cosMinDistance) {
+          resampleLineTo(x0, y0, λ0, a0, b0, c0, x2, y2, λ2, a /= m, b /= m, c, depth, stream);
+          stream.point(x2, y2);
+          resampleLineTo(x2, y2, λ2, a, b, c, x1, y1, λ1, a1, b1, c1, depth, stream);
+        }
+      }
+    }
+    resample.precision = function(_) {
+      if (!arguments.length) return Math.sqrt(δ2);
+      maxDepth = (δ2 = _ * _) > 0 && 16;
+      return resample;
+    };
+    return resample;
+  }
+  d3.geo.path = function() {
+    var pointRadius = 4.5, projection, context, projectStream, contextStream, cacheStream;
+    function path(object) {
+      if (object) {
+        if (typeof pointRadius === "function") contextStream.pointRadius(+pointRadius.apply(this, arguments));
+        if (!cacheStream || !cacheStream.valid) cacheStream = projectStream(contextStream);
+        d3.geo.stream(object, cacheStream);
+      }
+      return contextStream.result();
+    }
+    path.area = function(object) {
+      d3_geo_pathAreaSum = 0;
+      d3.geo.stream(object, projectStream(d3_geo_pathArea));
+      return d3_geo_pathAreaSum;
+    };
+    path.centroid = function(object) {
+      d3_geo_centroidX0 = d3_geo_centroidY0 = d3_geo_centroidZ0 = d3_geo_centroidX1 = d3_geo_centroidY1 = d3_geo_centroidZ1 = d3_geo_centroidX2 = d3_geo_centroidY2 = d3_geo_centroidZ2 = 0;
+      d3.geo.stream(object, projectStream(d3_geo_pathCentroid));
+      return d3_geo_centroidZ2 ? [ d3_geo_centroidX2 / d3_geo_centroidZ2, d3_geo_centroidY2 / d3_geo_centroidZ2 ] : d3_geo_centroidZ1 ? [ d3_geo_centroidX1 / d3_geo_centroidZ1, d3_geo_centroidY1 / d3_geo_centroidZ1 ] : d3_geo_centroidZ0 ? [ d3_geo_centroidX0 / d3_geo_centroidZ0, d3_geo_centroidY0 / d3_geo_centroidZ0 ] : [ NaN, NaN ];
+    };
+    path.bounds = function(object) {
+      d3_geo_pathBoundsX1 = d3_geo_pathBoundsY1 = -(d3_geo_pathBoundsX0 = d3_geo_pathBoundsY0 = Infinity);
+      d3.geo.stream(object, projectStream(d3_geo_pathBounds));
+      return [ [ d3_geo_pathBoundsX0, d3_geo_pathBoundsY0 ], [ d3_geo_pathBoundsX1, d3_geo_pathBoundsY1 ] ];
+    };
+    path.projection = function(_) {
+      if (!arguments.length) return projection;
+      projectStream = (projection = _) ? _.stream || d3_geo_pathProjectStream(_) : d3_identity;
+      return reset();
+    };
+    path.context = function(_) {
+      if (!arguments.length) return context;
+      contextStream = (context = _) == null ? new d3_geo_pathBuffer() : new d3_geo_pathContext(_);
+      if (typeof pointRadius !== "function") contextStream.pointRadius(pointRadius);
+      return reset();
+    };
+    path.pointRadius = function(_) {
+      if (!arguments.length) return pointRadius;
+      pointRadius = typeof _ === "function" ? _ : (contextStream.pointRadius(+_), +_);
+      return path;
+    };
+    function reset() {
+      cacheStream = null;
+      return path;
+    }
+    return path.projection(d3.geo.albersUsa()).context(null);
+  };
+  function d3_geo_pathProjectStream(project) {
+    var resample = d3_geo_resample(function(x, y) {
+      return project([ x * d3_degrees, y * d3_degrees ]);
+    });
+    return function(stream) {
+      return d3_geo_projectionRadians(resample(stream));
+    };
+  }
+  d3.geo.transform = function(methods) {
+    return {
+      stream: function(stream) {
+        var transform = new d3_geo_transform(stream);
+        for (var k in methods) transform[k] = methods[k];
+        return transform;
+      }
+    };
+  };
+  function d3_geo_transform(stream) {
+    this.stream = stream;
+  }
+  d3_geo_transform.prototype = {
+    point: function(x, y) {
+      this.stream.point(x, y);
+    },
+    sphere: function() {
+      this.stream.sphere();
+    },
+    lineStart: function() {
+      this.stream.lineStart();
+    },
+    lineEnd: function() {
+      this.stream.lineEnd();
+    },
+    polygonStart: function() {
+      this.stream.polygonStart();
+    },
+    polygonEnd: function() {
+      this.stream.polygonEnd();
+    }
+  };
+  function d3_geo_transformPoint(stream, point) {
+    return {
+      point: point,
+      sphere: function() {
+        stream.sphere();
+      },
+      lineStart: function() {
+        stream.lineStart();
+      },
+      lineEnd: function() {
+        stream.lineEnd();
+      },
+      polygonStart: function() {
+        stream.polygonStart();
+      },
+      polygonEnd: function() {
+        stream.polygonEnd();
+      }
+    };
+  }
+  d3.geo.projection = d3_geo_projection;
+  d3.geo.projectionMutator = d3_geo_projectionMutator;
+  function d3_geo_projection(project) {
+    return d3_geo_projectionMutator(function() {
+      return project;
+    })();
+  }
+  function d3_geo_projectionMutator(projectAt) {
+    var project, rotate, projectRotate, projectResample = d3_geo_resample(function(x, y) {
+      x = project(x, y);
+      return [ x[0] * k + δx, δy - x[1] * k ];
+    }), k = 150, x = 480, y = 250, λ = 0, φ = 0, δλ = 0, δφ = 0, δγ = 0, δx, δy, preclip = d3_geo_clipAntimeridian, postclip = d3_identity, clipAngle = null, clipExtent = null, stream;
+    function projection(point) {
+      point = projectRotate(point[0] * d3_radians, point[1] * d3_radians);
+      return [ point[0] * k + δx, δy - point[1] * k ];
+    }
+    function invert(point) {
+      point = projectRotate.invert((point[0] - δx) / k, (δy - point[1]) / k);
+      return point && [ point[0] * d3_degrees, point[1] * d3_degrees ];
+    }
+    projection.stream = function(output) {
+      if (stream) stream.valid = false;
+      stream = d3_geo_projectionRadians(preclip(rotate, projectResample(postclip(output))));
+      stream.valid = true;
+      return stream;
+    };
+    projection.clipAngle = function(_) {
+      if (!arguments.length) return clipAngle;
+      preclip = _ == null ? (clipAngle = _, d3_geo_clipAntimeridian) : d3_geo_clipCircle((clipAngle = +_) * d3_radians);
+      return invalidate();
+    };
+    projection.clipExtent = function(_) {
+      if (!arguments.length) return clipExtent;
+      clipExtent = _;
+      postclip = _ ? d3_geo_clipExtent(_[0][0], _[0][1], _[1][0], _[1][1]) : d3_identity;
+      return invalidate();
+    };
+    projection.scale = function(_) {
+      if (!arguments.length) return k;
+      k = +_;
+      return reset();
+    };
+    projection.translate = function(_) {
+      if (!arguments.length) return [ x, y ];
+      x = +_[0];
+      y = +_[1];
+      return reset();
+    };
+    projection.center = function(_) {
+      if (!arguments.length) return [ λ * d3_degrees, φ * d3_degrees ];
+      λ = _[0] % 360 * d3_radians;
+      φ = _[1] % 360 * d3_radians;
+      return reset();
+    };
+    projection.rotate = function(_) {
+      if (!arguments.length) return [ δλ * d3_degrees, δφ * d3_degrees, δγ * d3_degrees ];
+      δλ = _[0] % 360 * d3_radians;
+      δφ = _[1] % 360 * d3_radians;
+      δγ = _.length > 2 ? _[2] % 360 * d3_radians : 0;
+      return reset();
+    };
+    d3.rebind(projection, projectResample, "precision");
+    function reset() {
+      projectRotate = d3_geo_compose(rotate = d3_geo_rotation(δλ, δφ, δγ), project);
+      var center = project(λ, φ);
+      δx = x - center[0] * k;
+      δy = y + center[1] * k;
+      return invalidate();
+    }
+    function invalidate() {
+      if (stream) stream.valid = false, stream = null;
+      return projection;
+    }
+    return function() {
+      project = projectAt.apply(this, arguments);
+      projection.invert = project.invert && invert;
+      return reset();
+    };
+  }
+  function d3_geo_projectionRadians(stream) {
+    return d3_geo_transformPoint(stream, function(x, y) {
+      stream.point(x * d3_radians, y * d3_radians);
+    });
+  }
+  function d3_geo_equirectangular(λ, φ) {
+    return [ λ, φ ];
+  }
+  (d3.geo.equirectangular = function() {
+    return d3_geo_projection(d3_geo_equirectangular);
+  }).raw = d3_geo_equirectangular.invert = d3_geo_equirectangular;
+  d3.geo.rotation = function(rotate) {
+    rotate = d3_geo_rotation(rotate[0] % 360 * d3_radians, rotate[1] * d3_radians, rotate.length > 2 ? rotate[2] * d3_radians : 0);
+    function forward(coordinates) {
+      coordinates = rotate(coordinates[0] * d3_radians, coordinates[1] * d3_radians);
+      return coordinates[0] *= d3_degrees, coordinates[1] *= d3_degrees, coordinates;
+    }
+    forward.invert = function(coordinates) {
+      coordinates = rotate.invert(coordinates[0] * d3_radians, coordinates[1] * d3_radians);
+      return coordinates[0] *= d3_degrees, coordinates[1] *= d3_degrees, coordinates;
+    };
+    return forward;
+  };
+  function d3_geo_identityRotation(λ, φ) {
+    return [ λ > π ? λ - τ : λ < -π ? λ + τ : λ, φ ];
+  }
+  d3_geo_identityRotation.invert = d3_geo_equirectangular;
+  function d3_geo_rotation(δλ, δφ, δγ) {
+    return δλ ? δφ || δγ ? d3_geo_compose(d3_geo_rotationλ(δλ), d3_geo_rotationφγ(δφ, δγ)) : d3_geo_rotationλ(δλ) : δφ || δγ ? d3_geo_rotationφγ(δφ, δγ) : d3_geo_identityRotation;
+  }
+  function d3_geo_forwardRotationλ(δλ) {
+    return function(λ, φ) {
+      return λ += δλ, [ λ > π ? λ - τ : λ < -π ? λ + τ : λ, φ ];
+    };
+  }
+  function d3_geo_rotationλ(δλ) {
+    var rotation = d3_geo_forwardRotationλ(δλ);
+    rotation.invert = d3_geo_forwardRotationλ(-δλ);
+    return rotation;
+  }
+  function d3_geo_rotationφγ(δφ, δγ) {
+    var cosδφ = Math.cos(δφ), sinδφ = Math.sin(δφ), cosδγ = Math.cos(δγ), sinδγ = Math.sin(δγ);
+    function rotation(λ, φ) {
+      var cosφ = Math.cos(φ), x = Math.cos(λ) * cosφ, y = Math.sin(λ) * cosφ, z = Math.sin(φ), k = z * cosδφ + x * sinδφ;
+      return [ Math.atan2(y * cosδγ - k * sinδγ, x * cosδφ - z * sinδφ), d3_asin(k * cosδγ + y * sinδγ) ];
+    }
+    rotation.invert = function(λ, φ) {
+      var cosφ = Math.cos(φ), x = Math.cos(λ) * cosφ, y = Math.sin(λ) * cosφ, z = Math.sin(φ), k = z * cosδγ - y * sinδγ;
+      return [ Math.atan2(y * cosδγ + z * sinδγ, x * cosδφ + k * sinδφ), d3_asin(k * cosδφ - x * sinδφ) ];
+    };
+    return rotation;
+  }
+  d3.geo.circle = function() {
+    var origin = [ 0, 0 ], angle, precision = 6, interpolate;
+    function circle() {
+      var center = typeof origin === "function" ? origin.apply(this, arguments) : origin, rotate = d3_geo_rotation(-center[0] * d3_radians, -center[1] * d3_radians, 0).invert, ring = [];
+      interpolate(null, null, 1, {
+        point: function(x, y) {
+          ring.push(x = rotate(x, y));
+          x[0] *= d3_degrees, x[1] *= d3_degrees;
+        }
+      });
+      return {
+        type: "Polygon",
+        coordinates: [ ring ]
+      };
+    }
+    circle.origin = function(x) {
+      if (!arguments.length) return origin;
+      origin = x;
+      return circle;
+    };
+    circle.angle = function(x) {
+      if (!arguments.length) return angle;
+      interpolate = d3_geo_circleInterpolate((angle = +x) * d3_radians, precision * d3_radians);
+      return circle;
+    };
+    circle.precision = function(_) {
+      if (!arguments.length) return precision;
+      interpolate = d3_geo_circleInterpolate(angle * d3_radians, (precision = +_) * d3_radians);
+      return circle;
+    };
+    return circle.angle(90);
+  };
+  function d3_geo_circleInterpolate(radius, precision) {
+    var cr = Math.cos(radius), sr = Math.sin(radius);
+    return function(from, to, direction, listener) {
+      var step = direction * precision;
+      if (from != null) {
+        from = d3_geo_circleAngle(cr, from);
+        to = d3_geo_circleAngle(cr, to);
+        if (direction > 0 ? from < to : from > to) from += direction * τ;
+      } else {
+        from = radius + direction * τ;
+        to = radius - .5 * step;
+      }
+      for (var point, t = from; direction > 0 ? t > to : t < to; t -= step) {
+        listener.point((point = d3_geo_spherical([ cr, -sr * Math.cos(t), -sr * Math.sin(t) ]))[0], point[1]);
+      }
+    };
+  }
+  function d3_geo_circleAngle(cr, point) {
+    var a = d3_geo_cartesian(point);
+    a[0] -= cr;
+    d3_geo_cartesianNormalize(a);
+    var angle = d3_acos(-a[1]);
+    return ((-a[2] < 0 ? -angle : angle) + 2 * Math.PI - ε) % (2 * Math.PI);
+  }
+  d3.geo.distance = function(a, b) {
+    var Δλ = (b[0] - a[0]) * d3_radians, φ0 = a[1] * d3_radians, φ1 = b[1] * d3_radians, sinΔλ = Math.sin(Δλ), cosΔλ = Math.cos(Δλ), sinφ0 = Math.sin(φ0), cosφ0 = Math.cos(φ0), sinφ1 = Math.sin(φ1), cosφ1 = Math.cos(φ1), t;
+    return Math.atan2(Math.sqrt((t = cosφ1 * sinΔλ) * t + (t = cosφ0 * sinφ1 - sinφ0 * cosφ1 * cosΔλ) * t), sinφ0 * sinφ1 + cosφ0 * cosφ1 * cosΔλ);
+  };
+  d3.geo.graticule = function() {
+    var x1, x0, X1, X0, y1, y0, Y1, Y0, dx = 10, dy = dx, DX = 90, DY = 360, x, y, X, Y, precision = 2.5;
+    function graticule() {
+      return {
+        type: "MultiLineString",
+        coordinates: lines()
+      };
+    }
+    function lines() {
+      return d3.range(Math.ceil(X0 / DX) * DX, X1, DX).map(X).concat(d3.range(Math.ceil(Y0 / DY) * DY, Y1, DY).map(Y)).concat(d3.range(Math.ceil(x0 / dx) * dx, x1, dx).filter(function(x) {
+        return abs(x % DX) > ε;
+      }).map(x)).concat(d3.range(Math.ceil(y0 / dy) * dy, y1, dy).filter(function(y) {
+        return abs(y % DY) > ε;
+      }).map(y));
+    }
+    graticule.lines = function() {
+      return lines().map(function(coordinates) {
+        return {
+          type: "LineString",
+          coordinates: coordinates
+        };
+      });
+    };
+    graticule.outline = function() {
+      return {
+        type: "Polygon",
+        coordinates: [ X(X0).concat(Y(Y1).slice(1), X(X1).reverse().slice(1), Y(Y0).reverse().slice(1)) ]
+      };
+    };
+    graticule.extent = function(_) {
+      if (!arguments.length) return graticule.minorExtent();
+      return graticule.majorExtent(_).minorExtent(_);
+    };
+    graticule.majorExtent = function(_) {
+      if (!arguments.length) return [ [ X0, Y0 ], [ X1, Y1 ] ];
+      X0 = +_[0][0], X1 = +_[1][0];
+      Y0 = +_[0][1], Y1 = +_[1][1];
+      if (X0 > X1) _ = X0, X0 = X1, X1 = _;
+      if (Y0 > Y1) _ = Y0, Y0 = Y1, Y1 = _;
+      return graticule.precision(precision);
+    };
+    graticule.minorExtent = function(_) {
+      if (!arguments.length) return [ [ x0, y0 ], [ x1, y1 ] ];
+      x0 = +_[0][0], x1 = +_[1][0];
+      y0 = +_[0][1], y1 = +_[1][1];
+      if (x0 > x1) _ = x0, x0 = x1, x1 = _;
+      if (y0 > y1) _ = y0, y0 = y1, y1 = _;
+      return graticule.precision(precision);
+    };
+    graticule.step = function(_) {
+      if (!arguments.length) return graticule.minorStep();
+      return graticule.majorStep(_).minorStep(_);
+    };
+    graticule.majorStep = function(_) {
+      if (!arguments.length) return [ DX, DY ];
+      DX = +_[0], DY = +_[1];
+      return graticule;
+    };
+    graticule.minorStep = function(_) {
+      if (!arguments.length) return [ dx, dy ];
+      dx = +_[0], dy = +_[1];
+      return graticule;
+    };
+    graticule.precision = function(_) {
+      if (!arguments.length) return precision;
+      precision = +_;
+      x = d3_geo_graticuleX(y0, y1, 90);
+      y = d3_geo_graticuleY(x0, x1, precision);
+      X = d3_geo_graticuleX(Y0, Y1, 90);
+      Y = d3_geo_graticuleY(X0, X1, precision);
+      return graticule;
+    };
+    return graticule.majorExtent([ [ -180, -90 + ε ], [ 180, 90 - ε ] ]).minorExtent([ [ -180, -80 - ε ], [ 180, 80 + ε ] ]);
+  };
+  function d3_geo_graticuleX(y0, y1, dy) {
+    var y = d3.range(y0, y1 - ε, dy).concat(y1);
+    return function(x) {
+      return y.map(function(y) {
+        return [ x, y ];
+      });
+    };
+  }
+  function d3_geo_graticuleY(x0, x1, dx) {
+    var x = d3.range(x0, x1 - ε, dx).concat(x1);
+    return function(y) {
+      return x.map(function(x) {
+        return [ x, y ];
+      });
+    };
+  }
+  function d3_source(d) {
+    return d.source;
+  }
+  function d3_target(d) {
+    return d.target;
+  }
+  d3.geo.greatArc = function() {
+    var source = d3_source, source_, target = d3_target, target_;
+    function greatArc() {
+      return {
+        type: "LineString",
+        coordinates: [ source_ || source.apply(this, arguments), target_ || target.apply(this, arguments) ]
+      };
+    }
+    greatArc.distance = function() {
+      return d3.geo.distance(source_ || source.apply(this, arguments), target_ || target.apply(this, arguments));
+    };
+    greatArc.source = function(_) {
+      if (!arguments.length) return source;
+      source = _, source_ = typeof _ === "function" ? null : _;
+      return greatArc;
+    };
+    greatArc.target = function(_) {
+      if (!arguments.length) return target;
+      target = _, target_ = typeof _ === "function" ? null : _;
+      return greatArc;
+    };
+    greatArc.precision = function() {
+      return arguments.length ? greatArc : 0;
+    };
+    return greatArc;
+  };
+  d3.geo.interpolate = function(source, target) {
+    return d3_geo_interpolate(source[0] * d3_radians, source[1] * d3_radians, target[0] * d3_radians, target[1] * d3_radians);
+  };
+  function d3_geo_interpolate(x0, y0, x1, y1) {
+    var cy0 = Math.cos(y0), sy0 = Math.sin(y0), cy1 = Math.cos(y1), sy1 = Math.sin(y1), kx0 = cy0 * Math.cos(x0), ky0 = cy0 * Math.sin(x0), kx1 = cy1 * Math.cos(x1), ky1 = cy1 * Math.sin(x1), d = 2 * Math.asin(Math.sqrt(d3_haversin(y1 - y0) + cy0 * cy1 * d3_haversin(x1 - x0))), k = 1 / Math.sin(d);
+    var interpolate = d ? function(t) {
+      var B = Math.sin(t *= d) * k, A = Math.sin(d - t) * k, x = A * kx0 + B * kx1, y = A * ky0 + B * ky1, z = A * sy0 + B * sy1;
+      return [ Math.atan2(y, x) * d3_degrees, Math.atan2(z, Math.sqrt(x * x + y * y)) * d3_degrees ];
+    } : function() {
+      return [ x0 * d3_degrees, y0 * d3_degrees ];
+    };
+    interpolate.distance = d;
+    return interpolate;
+  }
+  d3.geo.length = function(object) {
+    d3_geo_lengthSum = 0;
+    d3.geo.stream(object, d3_geo_length);
+    return d3_geo_lengthSum;
+  };
+  var d3_geo_lengthSum;
+  var d3_geo_length = {
+    sphere: d3_noop,
+    point: d3_noop,
+    lineStart: d3_geo_lengthLineStart,
+    lineEnd: d3_noop,
+    polygonStart: d3_noop,
+    polygonEnd: d3_noop
+  };
+  function d3_geo_lengthLineStart() {
+    var λ0, sinφ0, cosφ0;
+    d3_geo_length.point = function(λ, φ) {
+      λ0 = λ * d3_radians, sinφ0 = Math.sin(φ *= d3_radians), cosφ0 = Math.cos(φ);
+      d3_geo_length.point = nextPoint;
+    };
+    d3_geo_length.lineEnd = function() {
+      d3_geo_length.point = d3_geo_length.lineEnd = d3_noop;
+    };
+    function nextPoint(λ, φ) {
+      var sinφ = Math.sin(φ *= d3_radians), cosφ = Math.cos(φ), t = abs((λ *= d3_radians) - λ0), cosΔλ = Math.cos(t);
+      d3_geo_lengthSum += Math.atan2(Math.sqrt((t = cosφ * Math.sin(t)) * t + (t = cosφ0 * sinφ - sinφ0 * cosφ * cosΔλ) * t), sinφ0 * sinφ + cosφ0 * cosφ * cosΔλ);
+      λ0 = λ, sinφ0 = sinφ, cosφ0 = cosφ;
+    }
+  }
+  function d3_geo_azimuthal(scale, angle) {
+    function azimuthal(λ, φ) {
+      var cosλ = Math.cos(λ), cosφ = Math.cos(φ), k = scale(cosλ * cosφ);
+      return [ k * cosφ * Math.sin(λ), k * Math.sin(φ) ];
+    }
+    azimuthal.invert = function(x, y) {
+      var ρ = Math.sqrt(x * x + y * y), c = angle(ρ), sinc = Math.sin(c), cosc = Math.cos(c);
+      return [ Math.atan2(x * sinc, ρ * cosc), Math.asin(ρ && y * sinc / ρ) ];
+    };
+    return azimuthal;
+  }
+  var d3_geo_azimuthalEqualArea = d3_geo_azimuthal(function(cosλcosφ) {
+    return Math.sqrt(2 / (1 + cosλcosφ));
+  }, function(ρ) {
+    return 2 * Math.asin(ρ / 2);
+  });
+  (d3.geo.azimuthalEqualArea = function() {
+    return d3_geo_projection(d3_geo_azimuthalEqualArea);
+  }).raw = d3_geo_azimuthalEqualArea;
+  var d3_geo_azimuthalEquidistant = d3_geo_azimuthal(function(cosλcosφ) {
+    var c = Math.acos(cosλcosφ);
+    return c && c / Math.sin(c);
+  }, d3_identity);
+  (d3.geo.azimuthalEquidistant = function() {
+    return d3_geo_projection(d3_geo_azimuthalEquidistant);
+  }).raw = d3_geo_azimuthalEquidistant;
+  function d3_geo_conicConformal(φ0, φ1) {
+    var cosφ0 = Math.cos(φ0), t = function(φ) {
+      return Math.tan(π / 4 + φ / 2);
+    }, n = φ0 === φ1 ? Math.sin(φ0) : Math.log(cosφ0 / Math.cos(φ1)) / Math.log(t(φ1) / t(φ0)), F = cosφ0 * Math.pow(t(φ0), n) / n;
+    if (!n) return d3_geo_mercator;
+    function forward(λ, φ) {
+      if (F > 0) {
+        if (φ < -halfπ + ε) φ = -halfπ + ε;
+      } else {
+        if (φ > halfπ - ε) φ = halfπ - ε;
+      }
+      var ρ = F / Math.pow(t(φ), n);
+      return [ ρ * Math.sin(n * λ), F - ρ * Math.cos(n * λ) ];
+    }
+    forward.invert = function(x, y) {
+      var ρ0_y = F - y, ρ = d3_sgn(n) * Math.sqrt(x * x + ρ0_y * ρ0_y);
+      return [ Math.atan2(x, ρ0_y) / n, 2 * Math.atan(Math.pow(F / ρ, 1 / n)) - halfπ ];
+    };
+    return forward;
+  }
+  (d3.geo.conicConformal = function() {
+    return d3_geo_conic(d3_geo_conicConformal);
+  }).raw = d3_geo_conicConformal;
+  function d3_geo_conicEquidistant(φ0, φ1) {
+    var cosφ0 = Math.cos(φ0), n = φ0 === φ1 ? Math.sin(φ0) : (cosφ0 - Math.cos(φ1)) / (φ1 - φ0), G = cosφ0 / n + φ0;
+    if (abs(n) < ε) return d3_geo_equirectangular;
+    function forward(λ, φ) {
+      var ρ = G - φ;
+      return [ ρ * Math.sin(n * λ), G - ρ * Math.cos(n * λ) ];
+    }
+    forward.invert = function(x, y) {
+      var ρ0_y = G - y;
+      return [ Math.atan2(x, ρ0_y) / n, G - d3_sgn(n) * Math.sqrt(x * x + ρ0_y * ρ0_y) ];
+    };
+    return forward;
+  }
+  (d3.geo.conicEquidistant = function() {
+    return d3_geo_conic(d3_geo_conicEquidistant);
+  }).raw = d3_geo_conicEquidistant;
+  var d3_geo_gnomonic = d3_geo_azimuthal(function(cosλcosφ) {
+    return 1 / cosλcosφ;
+  }, Math.atan);
+  (d3.geo.gnomonic = function() {
+    return d3_geo_projection(d3_geo_gnomonic);
+  }).raw = d3_geo_gnomonic;
+  function d3_geo_mercator(λ, φ) {
+    return [ λ, Math.log(Math.tan(π / 4 + φ / 2)) ];
+  }
+  d3_geo_mercator.invert = function(x, y) {
+    return [ x, 2 * Math.atan(Math.exp(y)) - halfπ ];
+  };
+  function d3_geo_mercatorProjection(project) {
+    var m = d3_geo_projection(project), scale = m.scale, translate = m.translate, clipExtent = m.clipExtent, clipAuto;
+    m.scale = function() {
+      var v = scale.apply(m, arguments);
+      return v === m ? clipAuto ? m.clipExtent(null) : m : v;
+    };
+    m.translate = function() {
+      var v = translate.apply(m, arguments);
+      return v === m ? clipAuto ? m.clipExtent(null) : m : v;
+    };
+    m.clipExtent = function(_) {
+      var v = clipExtent.apply(m, arguments);
+      if (v === m) {
+        if (clipAuto = _ == null) {
+          var k = π * scale(), t = translate();
+          clipExtent([ [ t[0] - k, t[1] - k ], [ t[0] + k, t[1] + k ] ]);
+        }
+      } else if (clipAuto) {
+        v = null;
+      }
+      return v;
+    };
+    return m.clipExtent(null);
+  }
+  (d3.geo.mercator = function() {
+    return d3_geo_mercatorProjection(d3_geo_mercator);
+  }).raw = d3_geo_mercator;
+  var d3_geo_orthographic = d3_geo_azimuthal(function() {
+    return 1;
+  }, Math.asin);
+  (d3.geo.orthographic = function() {
+    return d3_geo_projection(d3_geo_orthographic);
+  }).raw = d3_geo_orthographic;
+  var d3_geo_stereographic = d3_geo_azimuthal(function(cosλcosφ) {
+    return 1 / (1 + cosλcosφ);
+  }, function(ρ) {
+    return 2 * Math.atan(ρ);
+  });
+  (d3.geo.stereographic = function() {
+    return d3_geo_projection(d3_geo_stereographic);
+  }).raw = d3_geo_stereographic;
+  function d3_geo_transverseMercator(λ, φ) {
+    return [ Math.log(Math.tan(π / 4 + φ / 2)), -λ ];
+  }
+  d3_geo_transverseMercator.invert = function(x, y) {
+    return [ -y, 2 * Math.atan(Math.exp(x)) - halfπ ];
+  };
+  (d3.geo.transverseMercator = function() {
+    var projection = d3_geo_mercatorProjection(d3_geo_transverseMercator), center = projection.center, rotate = projection.rotate;
+    projection.center = function(_) {
+      return _ ? center([ -_[1], _[0] ]) : (_ = center(), [ _[1], -_[0] ]);
+    };
+    projection.rotate = function(_) {
+      return _ ? rotate([ _[0], _[1], _.length > 2 ? _[2] + 90 : 90 ]) : (_ = rotate(), 
+      [ _[0], _[1], _[2] - 90 ]);
+    };
+    return rotate([ 0, 0, 90 ]);
+  }).raw = d3_geo_transverseMercator;
+  d3.geom = {};
+  function d3_geom_pointX(d) {
+    return d[0];
+  }
+  function d3_geom_pointY(d) {
+    return d[1];
+  }
+  d3.geom.hull = function(vertices) {
+    var x = d3_geom_pointX, y = d3_geom_pointY;
+    if (arguments.length) return hull(vertices);
+    function hull(data) {
+      if (data.length < 3) return [];
+      var fx = d3_functor(x), fy = d3_functor(y), i, n = data.length, points = [], flippedPoints = [];
+      for (i = 0; i < n; i++) {
+        points.push([ +fx.call(this, data[i], i), +fy.call(this, data[i], i), i ]);
+      }
+      points.sort(d3_geom_hullOrder);
+      for (i = 0; i < n; i++) flippedPoints.push([ points[i][0], -points[i][1] ]);
+      var upper = d3_geom_hullUpper(points), lower = d3_geom_hullUpper(flippedPoints);
+      var skipLeft = lower[0] === upper[0], skipRight = lower[lower.length - 1] === upper[upper.length - 1], polygon = [];
+      for (i = upper.length - 1; i >= 0; --i) polygon.push(data[points[upper[i]][2]]);
+      for (i = +skipLeft; i < lower.length - skipRight; ++i) polygon.push(data[points[lower[i]][2]]);
+      return polygon;
+    }
+    hull.x = function(_) {
+      return arguments.length ? (x = _, hull) : x;
+    };
+    hull.y = function(_) {
+      return arguments.length ? (y = _, hull) : y;
+    };
+    return hull;
+  };
+  function d3_geom_hullUpper(points) {
+    var n = points.length, hull = [ 0, 1 ], hs = 2;
+    for (var i = 2; i < n; i++) {
+      while (hs > 1 && d3_cross2d(points[hull[hs - 2]], points[hull[hs - 1]], points[i]) <= 0) --hs;
+      hull[hs++] = i;
+    }
+    return hull.slice(0, hs);
+  }
+  function d3_geom_hullOrder(a, b) {
+    return a[0] - b[0] || a[1] - b[1];
+  }
+  d3.geom.polygon = function(coordinates) {
+    d3_subclass(coordinates, d3_geom_polygonPrototype);
+    return coordinates;
+  };
+  var d3_geom_polygonPrototype = d3.geom.polygon.prototype = [];
+  d3_geom_polygonPrototype.area = function() {
+    var i = -1, n = this.length, a, b = this[n - 1], area = 0;
+    while (++i < n) {
+      a = b;
+      b = this[i];
+      area += a[1] * b[0] - a[0] * b[1];
+    }
+    return area * .5;
+  };
+  d3_geom_polygonPrototype.centroid = function(k) {
+    var i = -1, n = this.length, x = 0, y = 0, a, b = this[n - 1], c;
+    if (!arguments.length) k = -1 / (6 * this.area());
+    while (++i < n) {
+      a = b;
+      b = this[i];
+      c = a[0] * b[1] - b[0] * a[1];
+      x += (a[0] + b[0]) * c;
+      y += (a[1] + b[1]) * c;
+    }
+    return [ x * k, y * k ];
+  };
+  d3_geom_polygonPrototype.clip = function(subject) {
+    var input, closed = d3_geom_polygonClosed(subject), i = -1, n = this.length - d3_geom_polygonClosed(this), j, m, a = this[n - 1], b, c, d;
+    while (++i < n) {
+      input = subject.slice();
+      subject.length = 0;
+      b = this[i];
+      c = input[(m = input.length - closed) - 1];
+      j = -1;
+      while (++j < m) {
+        d = input[j];
+        if (d3_geom_polygonInside(d, a, b)) {
+          if (!d3_geom_polygonInside(c, a, b)) {
+            subject.push(d3_geom_polygonIntersect(c, d, a, b));
+          }
+          subject.push(d);
+        } else if (d3_geom_polygonInside(c, a, b)) {
+          subject.push(d3_geom_polygonIntersect(c, d, a, b));
+        }
+        c = d;
+      }
+      if (closed) subject.push(subject[0]);
+      a = b;
+    }
+    return subject;
+  };
+  function d3_geom_polygonInside(p, a, b) {
+    return (b[0] - a[0]) * (p[1] - a[1]) < (b[1] - a[1]) * (p[0] - a[0]);
+  }
+  function d3_geom_polygonIntersect(c, d, a, b) {
+    var x1 = c[0], x3 = a[0], x21 = d[0] - x1, x43 = b[0] - x3, y1 = c[1], y3 = a[1], y21 = d[1] - y1, y43 = b[1] - y3, ua = (x43 * (y1 - y3) - y43 * (x1 - x3)) / (y43 * x21 - x43 * y21);
+    return [ x1 + ua * x21, y1 + ua * y21 ];
+  }
+  function d3_geom_polygonClosed(coordinates) {
+    var a = coordinates[0], b = coordinates[coordinates.length - 1];
+    return !(a[0] - b[0] || a[1] - b[1]);
+  }
+  var d3_geom_voronoiEdges, d3_geom_voronoiCells, d3_geom_voronoiBeaches, d3_geom_voronoiBeachPool = [], d3_geom_voronoiFirstCircle, d3_geom_voronoiCircles, d3_geom_voronoiCirclePool = [];
+  function d3_geom_voronoiBeach() {
+    d3_geom_voronoiRedBlackNode(this);
+    this.edge = this.site = this.circle = null;
+  }
+  function d3_geom_voronoiCreateBeach(site) {
+    var beach = d3_geom_voronoiBeachPool.pop() || new d3_geom_voronoiBeach();
+    beach.site = site;
+    return beach;
+  }
+  function d3_geom_voronoiDetachBeach(beach) {
+    d3_geom_voronoiDetachCircle(beach);
+    d3_geom_voronoiBeaches.remove(beach);
+    d3_geom_voronoiBeachPool.push(beach);
+    d3_geom_voronoiRedBlackNode(beach);
+  }
+  function d3_geom_voronoiRemoveBeach(beach) {
+    var circle = beach.circle, x = circle.x, y = circle.cy, vertex = {
+      x: x,
+      y: y
+    }, previous = beach.P, next = beach.N, disappearing = [ beach ];
+    d3_geom_voronoiDetachBeach(beach);
+    var lArc = previous;
+    while (lArc.circle && abs(x - lArc.circle.x) < ε && abs(y - lArc.circle.cy) < ε) {
+      previous = lArc.P;
+      disappearing.unshift(lArc);
+      d3_geom_voronoiDetachBeach(lArc);
+      lArc = previous;
+    }
+    disappearing.unshift(lArc);
+    d3_geom_voronoiDetachCircle(lArc);
+    var rArc = next;
+    while (rArc.circle && abs(x - rArc.circle.x) < ε && abs(y - rArc.circle.cy) < ε) {
+      next = rArc.N;
+      disappearing.push(rArc);
+      d3_geom_voronoiDetachBeach(rArc);
+      rArc = next;
+    }
+    disappearing.push(rArc);
+    d3_geom_voronoiDetachCircle(rArc);
+    var nArcs = disappearing.length, iArc;
+    for (iArc = 1; iArc < nArcs; ++iArc) {
+      rArc = disappearing[iArc];
+      lArc = disappearing[iArc - 1];
+      d3_geom_voronoiSetEdgeEnd(rArc.edge, lArc.site, rArc.site, vertex);
+    }
+    lArc = disappearing[0];
+    rArc = disappearing[nArcs - 1];
+    rArc.edge = d3_geom_voronoiCreateEdge(lArc.site, rArc.site, null, vertex);
+    d3_geom_voronoiAttachCircle(lArc);
+    d3_geom_voronoiAttachCircle(rArc);
+  }
+  function d3_geom_voronoiAddBeach(site) {
+    var x = site.x, directrix = site.y, lArc, rArc, dxl, dxr, node = d3_geom_voronoiBeaches._;
+    while (node) {
+      dxl = d3_geom_voronoiLeftBreakPoint(node, directrix) - x;
+      if (dxl > ε) node = node.L; else {
+        dxr = x - d3_geom_voronoiRightBreakPoint(node, directrix);
+        if (dxr > ε) {
+          if (!node.R) {
+            lArc = node;
+            break;
+          }
+          node = node.R;
+        } else {
+          if (dxl > -ε) {
+            lArc = node.P;
+            rArc = node;
+          } else if (dxr > -ε) {
+            lArc = node;
+            rArc = node.N;
+          } else {
+            lArc = rArc = node;
+          }
+          break;
+        }
+      }
+    }
+    var newArc = d3_geom_voronoiCreateBeach(site);
+    d3_geom_voronoiBeaches.insert(lArc, newArc);
+    if (!lArc && !rArc) return;
+    if (lArc === rArc) {
+      d3_geom_voronoiDetachCircle(lArc);
+      rArc = d3_geom_voronoiCreateBeach(lArc.site);
+      d3_geom_voronoiBeaches.insert(newArc, rArc);
+      newArc.edge = rArc.edge = d3_geom_voronoiCreateEdge(lArc.site, newArc.site);
+      d3_geom_voronoiAttachCircle(lArc);
+      d3_geom_voronoiAttachCircle(rArc);
+      return;
+    }
+    if (!rArc) {
+      newArc.edge = d3_geom_voronoiCreateEdge(lArc.site, newArc.site);
+      return;
+    }
+    d3_geom_voronoiDetachCircle(lArc);
+    d3_geom_voronoiDetachCircle(rArc);
+    var lSite = lArc.site, ax = lSite.x, ay = lSite.y, bx = site.x - ax, by = site.y - ay, rSite = rArc.site, cx = rSite.x - ax, cy = rSite.y - ay, d = 2 * (bx * cy - by * cx), hb = bx * bx + by * by, hc = cx * cx + cy * cy, vertex = {
+      x: (cy * hb - by * hc) / d + ax,
+      y: (bx * hc - cx * hb) / d + ay
+    };
+    d3_geom_voronoiSetEdgeEnd(rArc.edge, lSite, rSite, vertex);
+    newArc.edge = d3_geom_voronoiCreateEdge(lSite, site, null, vertex);
+    rArc.edge = d3_geom_voronoiCreateEdge(site, rSite, null, vertex);
+    d3_geom_voronoiAttachCircle(lArc);
+    d3_geom_voronoiAttachCircle(rArc);
+  }
+  function d3_geom_voronoiLeftBreakPoint(arc, directrix) {
+    var site = arc.site, rfocx = site.x, rfocy = site.y, pby2 = rfocy - directrix;
+    if (!pby2) return rfocx;
+    var lArc = arc.P;
+    if (!lArc) return -Infinity;
+    site = lArc.site;
+    var lfocx = site.x, lfocy = site.y, plby2 = lfocy - directrix;
+    if (!plby2) return lfocx;
+    var hl = lfocx - rfocx, aby2 = 1 / pby2 - 1 / plby2, b = hl / plby2;
+    if (aby2) return (-b + Math.sqrt(b * b - 2 * aby2 * (hl * hl / (-2 * plby2) - lfocy + plby2 / 2 + rfocy - pby2 / 2))) / aby2 + rfocx;
+    return (rfocx + lfocx) / 2;
+  }
+  function d3_geom_voronoiRightBreakPoint(arc, directrix) {
+    var rArc = arc.N;
+    if (rArc) return d3_geom_voronoiLeftBreakPoint(rArc, directrix);
+    var site = arc.site;
+    return site.y === directrix ? site.x : Infinity;
+  }
+  function d3_geom_voronoiCell(site) {
+    this.site = site;
+    this.edges = [];
+  }
+  d3_geom_voronoiCell.prototype.prepare = function() {
+    var halfEdges = this.edges, iHalfEdge = halfEdges.length, edge;
+    while (iHalfEdge--) {
+      edge = halfEdges[iHalfEdge].edge;
+      if (!edge.b || !edge.a) halfEdges.splice(iHalfEdge, 1);
+    }
+    halfEdges.sort(d3_geom_voronoiHalfEdgeOrder);
+    return halfEdges.length;
+  };
+  function d3_geom_voronoiCloseCells(extent) {
+    var x0 = extent[0][0], x1 = extent[1][0], y0 = extent[0][1], y1 = extent[1][1], x2, y2, x3, y3, cells = d3_geom_voronoiCells, iCell = cells.length, cell, iHalfEdge, halfEdges, nHalfEdges, start, end;
+    while (iCell--) {
+      cell = cells[iCell];
+      if (!cell || !cell.prepare()) continue;
+      halfEdges = cell.edges;
+      nHalfEdges = halfEdges.length;
+      iHalfEdge = 0;
+      while (iHalfEdge < nHalfEdges) {
+        end = halfEdges[iHalfEdge].end(), x3 = end.x, y3 = end.y;
+        start = halfEdges[++iHalfEdge % nHalfEdges].start(), x2 = start.x, y2 = start.y;
+        if (abs(x3 - x2) > ε || abs(y3 - y2) > ε) {
+          halfEdges.splice(iHalfEdge, 0, new d3_geom_voronoiHalfEdge(d3_geom_voronoiCreateBorderEdge(cell.site, end, abs(x3 - x0) < ε && y1 - y3 > ε ? {
+            x: x0,
+            y: abs(x2 - x0) < ε ? y2 : y1
+          } : abs(y3 - y1) < ε && x1 - x3 > ε ? {
+            x: abs(y2 - y1) < ε ? x2 : x1,
+            y: y1
+          } : abs(x3 - x1) < ε && y3 - y0 > ε ? {
+            x: x1,
+            y: abs(x2 - x1) < ε ? y2 : y0
+          } : abs(y3 - y0) < ε && x3 - x0 > ε ? {
+            x: abs(y2 - y0) < ε ? x2 : x0,
+            y: y0
+          } : null), cell.site, null));
+          ++nHalfEdges;
+        }
+      }
+    }
+  }
+  function d3_geom_voronoiHalfEdgeOrder(a, b) {
+    return b.angle - a.angle;
+  }
+  function d3_geom_voronoiCircle() {
+    d3_geom_voronoiRedBlackNode(this);
+    this.x = this.y = this.arc = this.site = this.cy = null;
+  }
+  function d3_geom_voronoiAttachCircle(arc) {
+    var lArc = arc.P, rArc = arc.N;
+    if (!lArc || !rArc) return;
+    var lSite = lArc.site, cSite = arc.site, rSite = rArc.site;
+    if (lSite === rSite) return;
+    var bx = cSite.x, by = cSite.y, ax = lSite.x - bx, ay = lSite.y - by, cx = rSite.x - bx, cy = rSite.y - by;
+    var d = 2 * (ax * cy - ay * cx);
+    if (d >= -ε2) return;
+    var ha = ax * ax + ay * ay, hc = cx * cx + cy * cy, x = (cy * ha - ay * hc) / d, y = (ax * hc - cx * ha) / d, cy = y + by;
+    var circle = d3_geom_voronoiCirclePool.pop() || new d3_geom_voronoiCircle();
+    circle.arc = arc;
+    circle.site = cSite;
+    circle.x = x + bx;
+    circle.y = cy + Math.sqrt(x * x + y * y);
+    circle.cy = cy;
+    arc.circle = circle;
+    var before = null, node = d3_geom_voronoiCircles._;
+    while (node) {
+      if (circle.y < node.y || circle.y === node.y && circle.x <= node.x) {
+        if (node.L) node = node.L; else {
+          before = node.P;
+          break;
+        }
+      } else {
+        if (node.R) node = node.R; else {
+          before = node;
+          break;
+        }
+      }
+    }
+    d3_geom_voronoiCircles.insert(before, circle);
+    if (!before) d3_geom_voronoiFirstCircle = circle;
+  }
+  function d3_geom_voronoiDetachCircle(arc) {
+    var circle = arc.circle;
+    if (circle) {
+      if (!circle.P) d3_geom_voronoiFirstCircle = circle.N;
+      d3_geom_voronoiCircles.remove(circle);
+      d3_geom_voronoiCirclePool.push(circle);
+      d3_geom_voronoiRedBlackNode(circle);
+      arc.circle = null;
+    }
+  }
+  function d3_geom_voronoiClipEdges(extent) {
+    var edges = d3_geom_voronoiEdges, clip = d3_geom_clipLine(extent[0][0], extent[0][1], extent[1][0], extent[1][1]), i = edges.length, e;
+    while (i--) {
+      e = edges[i];
+      if (!d3_geom_voronoiConnectEdge(e, extent) || !clip(e) || abs(e.a.x - e.b.x) < ε && abs(e.a.y - e.b.y) < ε) {
+        e.a = e.b = null;
+        edges.splice(i, 1);
+      }
+    }
+  }
+  function d3_geom_voronoiConnectEdge(edge, extent) {
+    var vb = edge.b;
+    if (vb) return true;
+    var va = edge.a, x0 = extent[0][0], x1 = extent[1][0], y0 = extent[0][1], y1 = extent[1][1], lSite = edge.l, rSite = edge.r, lx = lSite.x, ly = lSite.y, rx = rSite.x, ry = rSite.y, fx = (lx + rx) / 2, fy = (ly + ry) / 2, fm, fb;
+    if (ry === ly) {
+      if (fx < x0 || fx >= x1) return;
+      if (lx > rx) {
+        if (!va) va = {
+          x: fx,
+          y: y0
+        }; else if (va.y >= y1) return;
+        vb = {
+          x: fx,
+          y: y1
+        };
+      } else {
+        if (!va) va = {
+          x: fx,
+          y: y1
+        }; else if (va.y < y0) return;
+        vb = {
+          x: fx,
+          y: y0
+        };
+      }
+    } else {
+      fm = (lx - rx) / (ry - ly);
+      fb = fy - fm * fx;
+      if (fm < -1 || fm > 1) {
+        if (lx > rx) {
+          if (!va) va = {
+            x: (y0 - fb) / fm,
+            y: y0
+          }; else if (va.y >= y1) return;
+          vb = {
+            x: (y1 - fb) / fm,
+            y: y1
+          };
+        } else {
+          if (!va) va = {
+            x: (y1 - fb) / fm,
+            y: y1
+          }; else if (va.y < y0) return;
+          vb = {
+            x: (y0 - fb) / fm,
+            y: y0
+          };
+        }
+      } else {
+        if (ly < ry) {
+          if (!va) va = {
+            x: x0,
+            y: fm * x0 + fb
+          }; else if (va.x >= x1) return;
+          vb = {
+            x: x1,
+            y: fm * x1 + fb
+          };
+        } else {
+          if (!va) va = {
+            x: x1,
+            y: fm * x1 + fb
+          }; else if (va.x < x0) return;
+          vb = {
+            x: x0,
+            y: fm * x0 + fb
+          };
+        }
+      }
+    }
+    edge.a = va;
+    edge.b = vb;
+    return true;
+  }
+  function d3_geom_voronoiEdge(lSite, rSite) {
+    this.l = lSite;
+    this.r = rSite;
+    this.a = this.b = null;
+  }
+  function d3_geom_voronoiCreateEdge(lSite, rSite, va, vb) {
+    var edge = new d3_geom_voronoiEdge(lSite, rSite);
+    d3_geom_voronoiEdges.push(edge);
+    if (va) d3_geom_voronoiSetEdgeEnd(edge, lSite, rSite, va);
+    if (vb) d3_geom_voronoiSetEdgeEnd(edge, rSite, lSite, vb);
+    d3_geom_voronoiCells[lSite.i].edges.push(new d3_geom_voronoiHalfEdge(edge, lSite, rSite));
+    d3_geom_voronoiCells[rSite.i].edges.push(new d3_geom_voronoiHalfEdge(edge, rSite, lSite));
+    return edge;
+  }
+  function d3_geom_voronoiCreateBorderEdge(lSite, va, vb) {
+    var edge = new d3_geom_voronoiEdge(lSite, null);
+    edge.a = va;
+    edge.b = vb;
+    d3_geom_voronoiEdges.push(edge);
+    return edge;
+  }
+  function d3_geom_voronoiSetEdgeEnd(edge, lSite, rSite, vertex) {
+    if (!edge.a && !edge.b) {
+      edge.a = vertex;
+      edge.l = lSite;
+      edge.r = rSite;
+    } else if (edge.l === rSite) {
+      edge.b = vertex;
+    } else {
+      edge.a = vertex;
+    }
+  }
+  function d3_geom_voronoiHalfEdge(edge, lSite, rSite) {
+    var va = edge.a, vb = edge.b;
+    this.edge = edge;
+    this.site = lSite;
+    this.angle = rSite ? Math.atan2(rSite.y - lSite.y, rSite.x - lSite.x) : edge.l === lSite ? Math.atan2(vb.x - va.x, va.y - vb.y) : Math.atan2(va.x - vb.x, vb.y - va.y);
+  }
+  d3_geom_voronoiHalfEdge.prototype = {
+    start: function() {
+      return this.edge.l === this.site ? this.edge.a : this.edge.b;
+    },
+    end: function() {
+      return this.edge.l === this.site ? this.edge.b : this.edge.a;
+    }
+  };
+  function d3_geom_voronoiRedBlackTree() {
+    this._ = null;
+  }
+  function d3_geom_voronoiRedBlackNode(node) {
+    node.U = node.C = node.L = node.R = node.P = node.N = null;
+  }
+  d3_geom_voronoiRedBlackTree.prototype = {
+    insert: function(after, node) {
+      var parent, grandpa, uncle;
+      if (after) {
+        node.P = after;
+        node.N = after.N;
+        if (after.N) after.N.P = node;
+        after.N = node;
+        if (after.R) {
+          after = after.R;
+          while (after.L) after = after.L;
+          after.L = node;
+        } else {
+          after.R = node;
+        }
+        parent = after;
+      } else if (this._) {
+        after = d3_geom_voronoiRedBlackFirst(this._);
+        node.P = null;
+        node.N = after;
+        after.P = after.L = node;
+        parent = after;
+      } else {
+        node.P = node.N = null;
+        this._ = node;
+        parent = null;
+      }
+      node.L = node.R = null;
+      node.U = parent;
+      node.C = true;
+      after = node;
+      while (parent && parent.C) {
+        grandpa = parent.U;
+        if (parent === grandpa.L) {
+          uncle = grandpa.R;
+          if (uncle && uncle.C) {
+            parent.C = uncle.C = false;
+            grandpa.C = true;
+            after = grandpa;
+          } else {
+            if (after === parent.R) {
+              d3_geom_voronoiRedBlackRotateLeft(this, parent);
+              after = parent;
+              parent = after.U;
+            }
+            parent.C = false;
+            grandpa.C = true;
+            d3_geom_voronoiRedBlackRotateRight(this, grandpa);
+          }
+        } else {
+          uncle = grandpa.L;
+          if (uncle && uncle.C) {
+            parent.C = uncle.C = false;
+            grandpa.C = true;
+            after = grandpa;
+          } else {
+            if (after === parent.L) {
+              d3_geom_voronoiRedBlackRotateRight(this, parent);
+              after = parent;
+              parent = after.U;
+            }
+            parent.C = false;
+            grandpa.C = true;
+            d3_geom_voronoiRedBlackRotateLeft(this, grandpa);
+          }
+        }
+        parent = after.U;
+      }
+      this._.C = false;
+    },
+    remove: function(node) {
+      if (node.N) node.N.P = node.P;
+      if (node.P) node.P.N = node.N;
+      node.N = node.P = null;
+      var parent = node.U, sibling, left = node.L, right = node.R, next, red;
+      if (!left) next = right; else if (!right) next = left; else next = d3_geom_voronoiRedBlackFirst(right);
+      if (parent) {
+        if (parent.L === node) parent.L = next; else parent.R = next;
+      } else {
+        this._ = next;
+      }
+      if (left && right) {
+        red = next.C;
+        next.C = node.C;
+        next.L = left;
+        left.U = next;
+        if (next !== right) {
+          parent = next.U;
+          next.U = node.U;
+          node = next.R;
+          parent.L = node;
+          next.R = right;
+          right.U = next;
+        } else {
+          next.U = parent;
+          parent = next;
+          node = next.R;
+        }
+      } else {
+        red = node.C;
+        node = next;
+      }
+      if (node) node.U = parent;
+      if (red) return;
+      if (node && node.C) {
+        node.C = false;
+        return;
+      }
+      do {
+        if (node === this._) break;
+        if (node === parent.L) {
+          sibling = parent.R;
+          if (sibling.C) {
+            sibling.C = false;
+            parent.C = true;
+            d3_geom_voronoiRedBlackRotateLeft(this, parent);
+            sibling = parent.R;
+          }
+          if (sibling.L && sibling.L.C || sibling.R && sibling.R.C) {
+            if (!sibling.R || !sibling.R.C) {
+              sibling.L.C = false;
+              sibling.C = true;
+              d3_geom_voronoiRedBlackRotateRight(this, sibling);
+              sibling = parent.R;
+            }
+            sibling.C = parent.C;
+            parent.C = sibling.R.C = false;
+            d3_geom_voronoiRedBlackRotateLeft(this, parent);
+            node = this._;
+            break;
+          }
+        } else {
+          sibling = parent.L;
+          if (sibling.C) {
+            sibling.C = false;
+            parent.C = true;
+            d3_geom_voronoiRedBlackRotateRight(this, parent);
+            sibling = parent.L;
+          }
+          if (sibling.L && sibling.L.C || sibling.R && sibling.R.C) {
+            if (!sibling.L || !sibling.L.C) {
+              sibling.R.C = false;
+              sibling.C = true;
+              d3_geom_voronoiRedBlackRotateLeft(this, sibling);
+              sibling = parent.L;
+            }
+            sibling.C = parent.C;
+            parent.C = sibling.L.C = false;
+            d3_geom_voronoiRedBlackRotateRight(this, parent);
+            node = this._;
+            break;
+          }
+        }
+        sibling.C = true;
+        node = parent;
+        parent = parent.U;
+      } while (!node.C);
+      if (node) node.C = false;
+    }
+  };
+  function d3_geom_voronoiRedBlackRotateLeft(tree, node) {
+    var p = node, q = node.R, parent = p.U;
+    if (parent) {
+      if (parent.L === p) parent.L = q; else parent.R = q;
+    } else {
+      tree._ = q;
+    }
+    q.U = parent;
+    p.U = q;
+    p.R = q.L;
+    if (p.R) p.R.U = p;
+    q.L = p;
+  }
+  function d3_geom_voronoiRedBlackRotateRight(tree, node) {
+    var p = node, q = node.L, parent = p.U;
+    if (parent) {
+      if (parent.L === p) parent.L = q; else parent.R = q;
+    } else {
+      tree._ = q;
+    }
+    q.U = parent;
+    p.U = q;
+    p.L = q.R;
+    if (p.L) p.L.U = p;
+    q.R = p;
+  }
+  function d3_geom_voronoiRedBlackFirst(node) {
+    while (node.L) node = node.L;
+    return node;
+  }
+  function d3_geom_voronoi(sites, bbox) {
+    var site = sites.sort(d3_geom_voronoiVertexOrder).pop(), x0, y0, circle;
+    d3_geom_voronoiEdges = [];
+    d3_geom_voronoiCells = new Array(sites.length);
+    d3_geom_voronoiBeaches = new d3_geom_voronoiRedBlackTree();
+    d3_geom_voronoiCircles = new d3_geom_voronoiRedBlackTree();
+    while (true) {
+      circle = d3_geom_voronoiFirstCircle;
+      if (site && (!circle || site.y < circle.y || site.y === circle.y && site.x < circle.x)) {
+        if (site.x !== x0 || site.y !== y0) {
+          d3_geom_voronoiCells[site.i] = new d3_geom_voronoiCell(site);
+          d3_geom_voronoiAddBeach(site);
+          x0 = site.x, y0 = site.y;
+        }
+        site = sites.pop();
+      } else if (circle) {
+        d3_geom_voronoiRemoveBeach(circle.arc);
+      } else {
+        break;
+      }
+    }
+    if (bbox) d3_geom_voronoiClipEdges(bbox), d3_geom_voronoiCloseCells(bbox);
+    var diagram = {
+      cells: d3_geom_voronoiCells,
+      edges: d3_geom_voronoiEdges
+    };
+    d3_geom_voronoiBeaches = d3_geom_voronoiCircles = d3_geom_voronoiEdges = d3_geom_voronoiCells = null;
+    return diagram;
+  }
+  function d3_geom_voronoiVertexOrder(a, b) {
+    return b.y - a.y || b.x - a.x;
+  }
+  d3.geom.voronoi = function(points) {
+    var x = d3_geom_pointX, y = d3_geom_pointY, fx = x, fy = y, clipExtent = d3_geom_voronoiClipExtent;
+    if (points) return voronoi(points);
+    function voronoi(data) {
+      var polygons = new Array(data.length), x0 = clipExtent[0][0], y0 = clipExtent[0][1], x1 = clipExtent[1][0], y1 = clipExtent[1][1];
+      d3_geom_voronoi(sites(data), clipExtent).cells.forEach(function(cell, i) {
+        var edges = cell.edges, site = cell.site, polygon = polygons[i] = edges.length ? edges.map(function(e) {
+          var s = e.start();
+          return [ s.x, s.y ];
+        }) : site.x >= x0 && site.x <= x1 && site.y >= y0 && site.y <= y1 ? [ [ x0, y1 ], [ x1, y1 ], [ x1, y0 ], [ x0, y0 ] ] : [];
+        polygon.point = data[i];
+      });
+      return polygons;
+    }
+    function sites(data) {
+      return data.map(function(d, i) {
+        return {
+          x: Math.round(fx(d, i) / ε) * ε,
+          y: Math.round(fy(d, i) / ε) * ε,
+          i: i
+        };
+      });
+    }
+    voronoi.links = function(data) {
+      return d3_geom_voronoi(sites(data)).edges.filter(function(edge) {
+        return edge.l && edge.r;
+      }).map(function(edge) {
+        return {
+          source: data[edge.l.i],
+          target: data[edge.r.i]
+        };
+      });
+    };
+    voronoi.triangles = function(data) {
+      var triangles = [];
+      d3_geom_voronoi(sites(data)).cells.forEach(function(cell, i) {
+        var site = cell.site, edges = cell.edges.sort(d3_geom_voronoiHalfEdgeOrder), j = -1, m = edges.length, e0, s0, e1 = edges[m - 1].edge, s1 = e1.l === site ? e1.r : e1.l;
+        while (++j < m) {
+          e0 = e1;
+          s0 = s1;
+          e1 = edges[j].edge;
+          s1 = e1.l === site ? e1.r : e1.l;
+          if (i < s0.i && i < s1.i && d3_geom_voronoiTriangleArea(site, s0, s1) < 0) {
+            triangles.push([ data[i], data[s0.i], data[s1.i] ]);
+          }
+        }
+      });
+      return triangles;
+    };
+    voronoi.x = function(_) {
+      return arguments.length ? (fx = d3_functor(x = _), voronoi) : x;
+    };
+    voronoi.y = function(_) {
+      return arguments.length ? (fy = d3_functor(y = _), voronoi) : y;
+    };
+    voronoi.clipExtent = function(_) {
+      if (!arguments.length) return clipExtent === d3_geom_voronoiClipExtent ? null : clipExtent;
+      clipExtent = _ == null ? d3_geom_voronoiClipExtent : _;
+      return voronoi;
+    };
+    voronoi.size = function(_) {
+      if (!arguments.length) return clipExtent === d3_geom_voronoiClipExtent ? null : clipExtent && clipExtent[1];
+      return voronoi.clipExtent(_ && [ [ 0, 0 ], _ ]);
+    };
+    return voronoi;
+  };
+  var d3_geom_voronoiClipExtent = [ [ -1e6, -1e6 ], [ 1e6, 1e6 ] ];
+  function d3_geom_voronoiTriangleArea(a, b, c) {
+    return (a.x - c.x) * (b.y - a.y) - (a.x - b.x) * (c.y - a.y);
+  }
+  d3.geom.delaunay = function(vertices) {
+    return d3.geom.voronoi().triangles(vertices);
+  };
+  d3.geom.quadtree = function(points, x1, y1, x2, y2) {
+    var x = d3_geom_pointX, y = d3_geom_pointY, compat;
+    if (compat = arguments.length) {
+      x = d3_geom_quadtreeCompatX;
+      y = d3_geom_quadtreeCompatY;
+      if (compat === 3) {
+        y2 = y1;
+        x2 = x1;
+        y1 = x1 = 0;
+      }
+      return quadtree(points);
+    }
+    function quadtree(data) {
+      var d, fx = d3_functor(x), fy = d3_functor(y), xs, ys, i, n, x1_, y1_, x2_, y2_;
+      if (x1 != null) {
+        x1_ = x1, y1_ = y1, x2_ = x2, y2_ = y2;
+      } else {
+        x2_ = y2_ = -(x1_ = y1_ = Infinity);
+        xs = [], ys = [];
+        n = data.length;
+        if (compat) for (i = 0; i < n; ++i) {
+          d = data[i];
+          if (d.x < x1_) x1_ = d.x;
+          if (d.y < y1_) y1_ = d.y;
+          if (d.x > x2_) x2_ = d.x;
+          if (d.y > y2_) y2_ = d.y;
+          xs.push(d.x);
+          ys.push(d.y);
+        } else for (i = 0; i < n; ++i) {
+          var x_ = +fx(d = data[i], i), y_ = +fy(d, i);
+          if (x_ < x1_) x1_ = x_;
+          if (y_ < y1_) y1_ = y_;
+          if (x_ > x2_) x2_ = x_;
+          if (y_ > y2_) y2_ = y_;
+          xs.push(x_);
+          ys.push(y_);
+        }
+      }
+      var dx = x2_ - x1_, dy = y2_ - y1_;
+      if (dx > dy) y2_ = y1_ + dx; else x2_ = x1_ + dy;
+      function insert(n, d, x, y, x1, y1, x2, y2) {
+        if (isNaN(x) || isNaN(y)) return;
+        if (n.leaf) {
+          var nx = n.x, ny = n.y;
+          if (nx != null) {
+            if (abs(nx - x) + abs(ny - y) < .01) {
+              insertChild(n, d, x, y, x1, y1, x2, y2);
+            } else {
+              var nPoint = n.point;
+              n.x = n.y = n.point = null;
+              insertChild(n, nPoint, nx, ny, x1, y1, x2, y2);
+              insertChild(n, d, x, y, x1, y1, x2, y2);
+            }
+          } else {
+            n.x = x, n.y = y, n.point = d;
+          }
+        } else {
+          insertChild(n, d, x, y, x1, y1, x2, y2);
+        }
+      }
+      function insertChild(n, d, x, y, x1, y1, x2, y2) {
+        var xm = (x1 + x2) * .5, ym = (y1 + y2) * .5, right = x >= xm, below = y >= ym, i = below << 1 | right;
+        n.leaf = false;
+        n = n.nodes[i] || (n.nodes[i] = d3_geom_quadtreeNode());
+        if (right) x1 = xm; else x2 = xm;
+        if (below) y1 = ym; else y2 = ym;
+        insert(n, d, x, y, x1, y1, x2, y2);
+      }
+      var root = d3_geom_quadtreeNode();
+      root.add = function(d) {
+        insert(root, d, +fx(d, ++i), +fy(d, i), x1_, y1_, x2_, y2_);
+      };
+      root.visit = function(f) {
+        d3_geom_quadtreeVisit(f, root, x1_, y1_, x2_, y2_);
+      };
+      root.find = function(point) {
+        return d3_geom_quadtreeFind(root, point[0], point[1], x1_, y1_, x2_, y2_);
+      };
+      i = -1;
+      if (x1 == null) {
+        while (++i < n) {
+          insert(root, data[i], xs[i], ys[i], x1_, y1_, x2_, y2_);
+        }
+        --i;
+      } else data.forEach(root.add);
+      xs = ys = data = d = null;
+      return root;
+    }
+    quadtree.x = function(_) {
+      return arguments.length ? (x = _, quadtree) : x;
+    };
+    quadtree.y = function(_) {
+      return arguments.length ? (y = _, quadtree) : y;
+    };
+    quadtree.extent = function(_) {
+      if (!arguments.length) return x1 == null ? null : [ [ x1, y1 ], [ x2, y2 ] ];
+      if (_ == null) x1 = y1 = x2 = y2 = null; else x1 = +_[0][0], y1 = +_[0][1], x2 = +_[1][0], 
+      y2 = +_[1][1];
+      return quadtree;
+    };
+    quadtree.size = function(_) {
+      if (!arguments.length) return x1 == null ? null : [ x2 - x1, y2 - y1 ];
+      if (_ == null) x1 = y1 = x2 = y2 = null; else x1 = y1 = 0, x2 = +_[0], y2 = +_[1];
+      return quadtree;
+    };
+    return quadtree;
+  };
+  function d3_geom_quadtreeCompatX(d) {
+    return d.x;
+  }
+  function d3_geom_quadtreeCompatY(d) {
+    return d.y;
+  }
+  function d3_geom_quadtreeNode() {
+    return {
+      leaf: true,
+      nodes: [],
+      point: null,
+      x: null,
+      y: null
+    };
+  }
+  function d3_geom_quadtreeVisit(f, node, x1, y1, x2, y2) {
+    if (!f(node, x1, y1, x2, y2)) {
+      var sx = (x1 + x2) * .5, sy = (y1 + y2) * .5, children = node.nodes;
+      if (children[0]) d3_geom_quadtreeVisit(f, children[0], x1, y1, sx, sy);
+      if (children[1]) d3_geom_quadtreeVisit(f, children[1], sx, y1, x2, sy);
+      if (children[2]) d3_geom_quadtreeVisit(f, children[2], x1, sy, sx, y2);
+      if (children[3]) d3_geom_quadtreeVisit(f, children[3], sx, sy, x2, y2);
+    }
+  }
+  function d3_geom_quadtreeFind(root, x, y, x0, y0, x3, y3) {
+    var minDistance2 = Infinity, closestPoint;
+    (function find(node, x1, y1, x2, y2) {
+      if (x1 > x3 || y1 > y3 || x2 < x0 || y2 < y0) return;
+      if (point = node.point) {
+        var point, dx = x - node.x, dy = y - node.y, distance2 = dx * dx + dy * dy;
+        if (distance2 < minDistance2) {
+          var distance = Math.sqrt(minDistance2 = distance2);
+          x0 = x - distance, y0 = y - distance;
+          x3 = x + distance, y3 = y + distance;
+          closestPoint = point;
+        }
+      }
+      var children = node.nodes, xm = (x1 + x2) * .5, ym = (y1 + y2) * .5, right = x >= xm, below = y >= ym;
+      for (var i = below << 1 | right, j = i + 4; i < j; ++i) {
+        if (node = children[i & 3]) switch (i & 3) {
+         case 0:
+          find(node, x1, y1, xm, ym);
+          break;
+
+         case 1:
+          find(node, xm, y1, x2, ym);
+          break;
+
+         case 2:
+          find(node, x1, ym, xm, y2);
+          break;
+
+         case 3:
+          find(node, xm, ym, x2, y2);
+          break;
+        }
+      }
+    })(root, x0, y0, x3, y3);
+    return closestPoint;
+  }
+  d3.interpolateRgb = d3_interpolateRgb;
+  function d3_interpolateRgb(a, b) {
+    a = d3.rgb(a);
+    b = d3.rgb(b);
+    var ar = a.r, ag = a.g, ab = a.b, br = b.r - ar, bg = b.g - ag, bb = b.b - ab;
+    return function(t) {
+      return "#" + d3_rgb_hex(Math.round(ar + br * t)) + d3_rgb_hex(Math.round(ag + bg * t)) + d3_rgb_hex(Math.round(ab + bb * t));
+    };
+  }
+  d3.interpolateObject = d3_interpolateObject;
+  function d3_interpolateObject(a, b) {
+    var i = {}, c = {}, k;
+    for (k in a) {
+      if (k in b) {
+        i[k] = d3_interpolate(a[k], b[k]);
+      } else {
+        c[k] = a[k];
+      }
+    }
+    for (k in b) {
+      if (!(k in a)) {
+        c[k] = b[k];
+      }
+    }
+    return function(t) {
+      for (k in i) c[k] = i[k](t);
+      return c;
+    };
+  }
+  d3.interpolateNumber = d3_interpolateNumber;
+  function d3_interpolateNumber(a, b) {
+    a = +a, b = +b;
+    return function(t) {
+      return a * (1 - t) + b * t;
+    };
+  }
+  d3.interpolateString = d3_interpolateString;
+  function d3_interpolateString(a, b) {
+    var bi = d3_interpolate_numberA.lastIndex = d3_interpolate_numberB.lastIndex = 0, am, bm, bs, i = -1, s = [], q = [];
+    a = a + "", b = b + "";
+    while ((am = d3_interpolate_numberA.exec(a)) && (bm = d3_interpolate_numberB.exec(b))) {
+      if ((bs = bm.index) > bi) {
+        bs = b.slice(bi, bs);
+        if (s[i]) s[i] += bs; else s[++i] = bs;
+      }
+      if ((am = am[0]) === (bm = bm[0])) {
+        if (s[i]) s[i] += bm; else s[++i] = bm;
+      } else {
+        s[++i] = null;
+        q.push({
+          i: i,
+          x: d3_interpolateNumber(am, bm)
+        });
+      }
+      bi = d3_interpolate_numberB.lastIndex;
+    }
+    if (bi < b.length) {
+      bs = b.slice(bi);
+      if (s[i]) s[i] += bs; else s[++i] = bs;
+    }
+    return s.length < 2 ? q[0] ? (b = q[0].x, function(t) {
+      return b(t) + "";
+    }) : function() {
+      return b;
+    } : (b = q.length, function(t) {
+      for (var i = 0, o; i < b; ++i) s[(o = q[i]).i] = o.x(t);
+      return s.join("");
+    });
+  }
+  var d3_interpolate_numberA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g, d3_interpolate_numberB = new RegExp(d3_interpolate_numberA.source, "g");
+  d3.interpolate = d3_interpolate;
+  function d3_interpolate(a, b) {
+    var i = d3.interpolators.length, f;
+    while (--i >= 0 && !(f = d3.interpolators[i](a, b))) ;
+    return f;
+  }
+  d3.interpolators = [ function(a, b) {
+    var t = typeof b;
+    return (t === "string" ? d3_rgb_names.has(b.toLowerCase()) || /^(#|rgb\(|hsl\()/i.test(b) ? d3_interpolateRgb : d3_interpolateString : b instanceof d3_color ? d3_interpolateRgb : Array.isArray(b) ? d3_interpolateArray : t === "object" && isNaN(b) ? d3_interpolateObject : d3_interpolateNumber)(a, b);
+  } ];
+  d3.interpolateArray = d3_interpolateArray;
+  function d3_interpolateArray(a, b) {
+    var x = [], c = [], na = a.length, nb = b.length, n0 = Math.min(a.length, b.length), i;
+    for (i = 0; i < n0; ++i) x.push(d3_interpolate(a[i], b[i]));
+    for (;i < na; ++i) c[i] = a[i];
+    for (;i < nb; ++i) c[i] = b[i];
+    return function(t) {
+      for (i = 0; i < n0; ++i) c[i] = x[i](t);
+      return c;
+    };
+  }
+  var d3_ease_default = function() {
+    return d3_identity;
+  };
+  var d3_ease = d3.map({
+    linear: d3_ease_default,
+    poly: d3_ease_poly,
+    quad: function() {
+      return d3_ease_quad;
+    },
+    cubic: function() {
+      return d3_ease_cubic;
+    },
+    sin: function() {
+      return d3_ease_sin;
+    },
+    exp: function() {
+      return d3_ease_exp;
+    },
+    circle: function() {
+      return d3_ease_circle;
+    },
+    elastic: d3_ease_elastic,
+    back: d3_ease_back,
+    bounce: function() {
+      return d3_ease_bounce;
+    }
+  });
+  var d3_ease_mode = d3.map({
+    "in": d3_identity,
+    out: d3_ease_reverse,
+    "in-out": d3_ease_reflect,
+    "out-in": function(f) {
+      return d3_ease_reflect(d3_ease_reverse(f));
+    }
+  });
+  d3.ease = function(name) {
+    var i = name.indexOf("-"), t = i >= 0 ? name.slice(0, i) : name, m = i >= 0 ? name.slice(i + 1) : "in";
+    t = d3_ease.get(t) || d3_ease_default;
+    m = d3_ease_mode.get(m) || d3_identity;
+    return d3_ease_clamp(m(t.apply(null, d3_arraySlice.call(arguments, 1))));
+  };
+  function d3_ease_clamp(f) {
+    return function(t) {
+      return t <= 0 ? 0 : t >= 1 ? 1 : f(t);
+    };
+  }
+  function d3_ease_reverse(f) {
+    return function(t) {
+      return 1 - f(1 - t);
+    };
+  }
+  function d3_ease_reflect(f) {
+    return function(t) {
+      return .5 * (t < .5 ? f(2 * t) : 2 - f(2 - 2 * t));
+    };
+  }
+  function d3_ease_quad(t) {
+    return t * t;
+  }
+  function d3_ease_cubic(t) {
+    return t * t * t;
+  }
+  function d3_ease_cubicInOut(t) {
+    if (t <= 0) return 0;
+    if (t >= 1) return 1;
+    var t2 = t * t, t3 = t2 * t;
+    return 4 * (t < .5 ? t3 : 3 * (t - t2) + t3 - .75);
+  }
+  function d3_ease_poly(e) {
+    return function(t) {
+      return Math.pow(t, e);
+    };
+  }
+  function d3_ease_sin(t) {
+    return 1 - Math.cos(t * halfπ);
+  }
+  function d3_ease_exp(t) {
+    return Math.pow(2, 10 * (t - 1));
+  }
+  function d3_ease_circle(t) {
+    return 1 - Math.sqrt(1 - t * t);
+  }
+  function d3_ease_elastic(a, p) {
+    var s;
+    if (arguments.length < 2) p = .45;
+    if (arguments.length) s = p / τ * Math.asin(1 / a); else a = 1, s = p / 4;
+    return function(t) {
+      return 1 + a * Math.pow(2, -10 * t) * Math.sin((t - s) * τ / p);
+    };
+  }
+  function d3_ease_back(s) {
+    if (!s) s = 1.70158;
+    return function(t) {
+      return t * t * ((s + 1) * t - s);
+    };
+  }
+  function d3_ease_bounce(t) {
+    return t < 1 / 2.75 ? 7.5625 * t * t : t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + .75 : t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + .9375 : 7.5625 * (t -= 2.625 / 2.75) * t + .984375;
+  }
+  d3.interpolateHcl = d3_interpolateHcl;
+  function d3_interpolateHcl(a, b) {
+    a = d3.hcl(a);
+    b = d3.hcl(b);
+    var ah = a.h, ac = a.c, al = a.l, bh = b.h - ah, bc = b.c - ac, bl = b.l - al;
+    if (isNaN(bc)) bc = 0, ac = isNaN(ac) ? b.c : ac;
+    if (isNaN(bh)) bh = 0, ah = isNaN(ah) ? b.h : ah; else if (bh > 180) bh -= 360; else if (bh < -180) bh += 360;
+    return function(t) {
+      return d3_hcl_lab(ah + bh * t, ac + bc * t, al + bl * t) + "";
+    };
+  }
+  d3.interpolateHsl = d3_interpolateHsl;
+  function d3_interpolateHsl(a, b) {
+    a = d3.hsl(a);
+    b = d3.hsl(b);
+    var ah = a.h, as = a.s, al = a.l, bh = b.h - ah, bs = b.s - as, bl = b.l - al;
+    if (isNaN(bs)) bs = 0, as = isNaN(as) ? b.s : as;
+    if (isNaN(bh)) bh = 0, ah = isNaN(ah) ? b.h : ah; else if (bh > 180) bh -= 360; else if (bh < -180) bh += 360;
+    return function(t) {
+      return d3_hsl_rgb(ah + bh * t, as + bs * t, al + bl * t) + "";
+    };
+  }
+  d3.interpolateLab = d3_interpolateLab;
+  function d3_interpolateLab(a, b) {
+    a = d3.lab(a);
+    b = d3.lab(b);
+    var al = a.l, aa = a.a, ab = a.b, bl = b.l - al, ba = b.a - aa, bb = b.b - ab;
+    return function(t) {
+      return d3_lab_rgb(al + bl * t, aa + ba * t, ab + bb * t) + "";
+    };
+  }
+  d3.interpolateRound = d3_interpolateRound;
+  function d3_interpolateRound(a, b) {
+    b -= a;
+    return function(t) {
+      return Math.round(a + b * t);
+    };
+  }
+  d3.transform = function(string) {
+    var g = d3_document.createElementNS(d3.ns.prefix.svg, "g");
+    return (d3.transform = function(string) {
+      if (string != null) {
+        g.setAttribute("transform", string);
+        var t = g.transform.baseVal.consolidate();
+      }
+      return new d3_transform(t ? t.matrix : d3_transformIdentity);
+    })(string);
+  };
+  function d3_transform(m) {
+    var r0 = [ m.a, m.b ], r1 = [ m.c, m.d ], kx = d3_transformNormalize(r0), kz = d3_transformDot(r0, r1), ky = d3_transformNormalize(d3_transformCombine(r1, r0, -kz)) || 0;
+    if (r0[0] * r1[1] < r1[0] * r0[1]) {
+      r0[0] *= -1;
+      r0[1] *= -1;
+      kx *= -1;
+      kz *= -1;
+    }
+    this.rotate = (kx ? Math.atan2(r0[1], r0[0]) : Math.atan2(-r1[0], r1[1])) * d3_degrees;
+    this.translate = [ m.e, m.f ];
+    this.scale = [ kx, ky ];
+    this.skew = ky ? Math.atan2(kz, ky) * d3_degrees : 0;
+  }
+  d3_transform.prototype.toString = function() {
+    return "translate(" + this.translate + ")rotate(" + this.rotate + ")skewX(" + this.skew + ")scale(" + this.scale + ")";
+  };
+  function d3_transformDot(a, b) {
+    return a[0] * b[0] + a[1] * b[1];
+  }
+  function d3_transformNormalize(a) {
+    var k = Math.sqrt(d3_transformDot(a, a));
+    if (k) {
+      a[0] /= k;
+      a[1] /= k;
+    }
+    return k;
+  }
+  function d3_transformCombine(a, b, k) {
+    a[0] += k * b[0];
+    a[1] += k * b[1];
+    return a;
+  }
+  var d3_transformIdentity = {
+    a: 1,
+    b: 0,
+    c: 0,
+    d: 1,
+    e: 0,
+    f: 0
+  };
+  d3.interpolateTransform = d3_interpolateTransform;
+  function d3_interpolateTransformPop(s) {
+    return s.length ? s.pop() + "," : "";
+  }
+  function d3_interpolateTranslate(ta, tb, s, q) {
+    if (ta[0] !== tb[0] || ta[1] !== tb[1]) {
+      var i = s.push("translate(", null, ",", null, ")");
+      q.push({
+        i: i - 4,
+        x: d3_interpolateNumber(ta[0], tb[0])
+      }, {
+        i: i - 2,
+        x: d3_interpolateNumber(ta[1], tb[1])
+      });
+    } else if (tb[0] || tb[1]) {
+      s.push("translate(" + tb + ")");
+    }
+  }
+  function d3_interpolateRotate(ra, rb, s, q) {
+    if (ra !== rb) {
+      if (ra - rb > 180) rb += 360; else if (rb - ra > 180) ra += 360;
+      q.push({
+        i: s.push(d3_interpolateTransformPop(s) + "rotate(", null, ")") - 2,
+        x: d3_interpolateNumber(ra, rb)
+      });
+    } else if (rb) {
+      s.push(d3_interpolateTransformPop(s) + "rotate(" + rb + ")");
+    }
+  }
+  function d3_interpolateSkew(wa, wb, s, q) {
+    if (wa !== wb) {
+      q.push({
+        i: s.push(d3_interpolateTransformPop(s) + "skewX(", null, ")") - 2,
+        x: d3_interpolateNumber(wa, wb)
+      });
+    } else if (wb) {
+      s.push(d3_interpolateTransformPop(s) + "skewX(" + wb + ")");
+    }
+  }
+  function d3_interpolateScale(ka, kb, s, q) {
+    if (ka[0] !== kb[0] || ka[1] !== kb[1]) {
+      var i = s.push(d3_interpolateTransformPop(s) + "scale(", null, ",", null, ")");
+      q.push({
+        i: i - 4,
+        x: d3_interpolateNumber(ka[0], kb[0])
+      }, {
+        i: i - 2,
+        x: d3_interpolateNumber(ka[1], kb[1])
+      });
+    } else if (kb[0] !== 1 || kb[1] !== 1) {
+      s.push(d3_interpolateTransformPop(s) + "scale(" + kb + ")");
+    }
+  }
+  function d3_interpolateTransform(a, b) {
+    var s = [], q = [];
+    a = d3.transform(a), b = d3.transform(b);
+    d3_interpolateTranslate(a.translate, b.translate, s, q);
+    d3_interpolateRotate(a.rotate, b.rotate, s, q);
+    d3_interpolateSkew(a.skew, b.skew, s, q);
+    d3_interpolateScale(a.scale, b.scale, s, q);
+    a = b = null;
+    return function(t) {
+      var i = -1, n = q.length, o;
+      while (++i < n) s[(o = q[i]).i] = o.x(t);
+      return s.join("");
+    };
+  }
+  function d3_uninterpolateNumber(a, b) {
+    b = (b -= a = +a) || 1 / b;
+    return function(x) {
+      return (x - a) / b;
+    };
+  }
+  function d3_uninterpolateClamp(a, b) {
+    b = (b -= a = +a) || 1 / b;
+    return function(x) {
+      return Math.max(0, Math.min(1, (x - a) / b));
+    };
+  }
+  d3.layout = {};
+  d3.layout.bundle = function() {
+    return function(links) {
+      var paths = [], i = -1, n = links.length;
+      while (++i < n) paths.push(d3_layout_bundlePath(links[i]));
+      return paths;
+    };
+  };
+  function d3_layout_bundlePath(link) {
+    var start = link.source, end = link.target, lca = d3_layout_bundleLeastCommonAncestor(start, end), points = [ start ];
+    while (start !== lca) {
+      start = start.parent;
+      points.push(start);
+    }
+    var k = points.length;
+    while (end !== lca) {
+      points.splice(k, 0, end);
+      end = end.parent;
+    }
+    return points;
+  }
+  function d3_layout_bundleAncestors(node) {
+    var ancestors = [], parent = node.parent;
+    while (parent != null) {
+      ancestors.push(node);
+      node = parent;
+      parent = parent.parent;
+    }
+    ancestors.push(node);
+    return ancestors;
+  }
+  function d3_layout_bundleLeastCommonAncestor(a, b) {
+    if (a === b) return a;
+    var aNodes = d3_layout_bundleAncestors(a), bNodes = d3_layout_bundleAncestors(b), aNode = aNodes.pop(), bNode = bNodes.pop(), sharedNode = null;
+    while (aNode === bNode) {
+      sharedNode = aNode;
+      aNode = aNodes.pop();
+      bNode = bNodes.pop();
+    }
+    return sharedNode;
+  }
+  d3.layout.chord = function() {
+    var chord = {}, chords, groups, matrix, n, padding = 0, sortGroups, sortSubgroups, sortChords;
+    function relayout() {
+      var subgroups = {}, groupSums = [], groupIndex = d3.range(n), subgroupIndex = [], k, x, x0, i, j;
+      chords = [];
+      groups = [];
+      k = 0, i = -1;
+      while (++i < n) {
+        x = 0, j = -1;
+        while (++j < n) {
+          x += matrix[i][j];
+        }
+        groupSums.push(x);
+        subgroupIndex.push(d3.range(n));
+        k += x;
+      }
+      if (sortGroups) {
+        groupIndex.sort(function(a, b) {
+          return sortGroups(groupSums[a], groupSums[b]);
+        });
+      }
+      if (sortSubgroups) {
+        subgroupIndex.forEach(function(d, i) {
+          d.sort(function(a, b) {
+            return sortSubgroups(matrix[i][a], matrix[i][b]);
+          });
+        });
+      }
+      k = (τ - padding * n) / k;
+      x = 0, i = -1;
+      while (++i < n) {
+        x0 = x, j = -1;
+        while (++j < n) {
+          var di = groupIndex[i], dj = subgroupIndex[di][j], v = matrix[di][dj], a0 = x, a1 = x += v * k;
+          subgroups[di + "-" + dj] = {
+            index: di,
+            subindex: dj,
+            startAngle: a0,
+            endAngle: a1,
+            value: v
+          };
+        }
+        groups[di] = {
+          index: di,
+          startAngle: x0,
+          endAngle: x,
+          value: groupSums[di]
+        };
+        x += padding;
+      }
+      i = -1;
+      while (++i < n) {
+        j = i - 1;
+        while (++j < n) {
+          var source = subgroups[i + "-" + j], target = subgroups[j + "-" + i];
+          if (source.value || target.value) {
+            chords.push(source.value < target.value ? {
+              source: target,
+              target: source
+            } : {
+              source: source,
+              target: target
+            });
+          }
+        }
+      }
+      if (sortChords) resort();
+    }
+    function resort() {
+      chords.sort(function(a, b) {
+        return sortChords((a.source.value + a.target.value) / 2, (b.source.value + b.target.value) / 2);
+      });
+    }
+    chord.matrix = function(x) {
+      if (!arguments.length) return matrix;
+      n = (matrix = x) && matrix.length;
+      chords = groups = null;
+      return chord;
+    };
+    chord.padding = function(x) {
+      if (!arguments.length) return padding;
+      padding = x;
+      chords = groups = null;
+      return chord;
+    };
+    chord.sortGroups = function(x) {
+      if (!arguments.length) return sortGroups;
+      sortGroups = x;
+      chords = groups = null;
+      return chord;
+    };
+    chord.sortSubgroups = function(x) {
+      if (!arguments.length) return sortSubgroups;
+      sortSubgroups = x;
+      chords = null;
+      return chord;
+    };
+    chord.sortChords = function(x) {
+      if (!arguments.length) return sortChords;
+      sortChords = x;
+      if (chords) resort();
+      return chord;
+    };
+    chord.chords = function() {
+      if (!chords) relayout();
+      return chords;
+    };
+    chord.groups = function() {
+      if (!groups) relayout();
+      return groups;
+    };
+    return chord;
+  };
+  d3.layout.force = function() {
+    var force = {}, event = d3.dispatch("start", "tick", "end"), timer, size = [ 1, 1 ], drag, alpha, friction = .9, linkDistance = d3_layout_forceLinkDistance, linkStrength = d3_layout_forceLinkStrength, charge = -30, chargeDistance2 = d3_layout_forceChargeDistance2, gravity = .1, theta2 = .64, nodes = [], links = [], distances, strengths, charges;
+    function repulse(node) {
+      return function(quad, x1, _, x2) {
+        if (quad.point !== node) {
+          var dx = quad.cx - node.x, dy = quad.cy - node.y, dw = x2 - x1, dn = dx * dx + dy * dy;
+          if (dw * dw / theta2 < dn) {
+            if (dn < chargeDistance2) {
+              var k = quad.charge / dn;
+              node.px -= dx * k;
+              node.py -= dy * k;
+            }
+            return true;
+          }
+          if (quad.point && dn && dn < chargeDistance2) {
+            var k = quad.pointCharge / dn;
+            node.px -= dx * k;
+            node.py -= dy * k;
+          }
+        }
+        return !quad.charge;
+      };
+    }
+    force.tick = function() {
+      if ((alpha *= .99) < .005) {
+        timer = null;
+        event.end({
+          type: "end",
+          alpha: alpha = 0
+        });
+        return true;
+      }
+      var n = nodes.length, m = links.length, q, i, o, s, t, l, k, x, y;
+      for (i = 0; i < m; ++i) {
+        o = links[i];
+        s = o.source;
+        t = o.target;
+        x = t.x - s.x;
+        y = t.y - s.y;
+        if (l = x * x + y * y) {
+          l = alpha * strengths[i] * ((l = Math.sqrt(l)) - distances[i]) / l;
+          x *= l;
+          y *= l;
+          t.x -= x * (k = s.weight + t.weight ? s.weight / (s.weight + t.weight) : .5);
+          t.y -= y * k;
+          s.x += x * (k = 1 - k);
+          s.y += y * k;
+        }
+      }
+      if (k = alpha * gravity) {
+        x = size[0] / 2;
+        y = size[1] / 2;
+        i = -1;
+        if (k) while (++i < n) {
+          o = nodes[i];
+          o.x += (x - o.x) * k;
+          o.y += (y - o.y) * k;
+        }
+      }
+      if (charge) {
+        d3_layout_forceAccumulate(q = d3.geom.quadtree(nodes), alpha, charges);
+        i = -1;
+        while (++i < n) {
+          if (!(o = nodes[i]).fixed) {
+            q.visit(repulse(o));
+          }
+        }
+      }
+      i = -1;
+      while (++i < n) {
+        o = nodes[i];
+        if (o.fixed) {
+          o.x = o.px;
+          o.y = o.py;
+        } else {
+          o.x -= (o.px - (o.px = o.x)) * friction;
+          o.y -= (o.py - (o.py = o.y)) * friction;
+        }
+      }
+      event.tick({
+        type: "tick",
+        alpha: alpha
+      });
+    };
+    force.nodes = function(x) {
+      if (!arguments.length) return nodes;
+      nodes = x;
+      return force;
+    };
+    force.links = function(x) {
+      if (!arguments.length) return links;
+      links = x;
+      return force;
+    };
+    force.size = function(x) {
+      if (!arguments.length) return size;
+      size = x;
+      return force;
+    };
+    force.linkDistance = function(x) {
+      if (!arguments.length) return linkDistance;
+      linkDistance = typeof x === "function" ? x : +x;
+      return force;
+    };
+    force.distance = force.linkDistance;
+    force.linkStrength = function(x) {
+      if (!arguments.length) return linkStrength;
+      linkStrength = typeof x === "function" ? x : +x;
+      return force;
+    };
+    force.friction = function(x) {
+      if (!arguments.length) return friction;
+      friction = +x;
+      return force;
+    };
+    force.charge = function(x) {
+      if (!arguments.length) return charge;
+      charge = typeof x === "function" ? x : +x;
+      return force;
+    };
+    force.chargeDistance = function(x) {
+      if (!arguments.length) return Math.sqrt(chargeDistance2);
+      chargeDistance2 = x * x;
+      return force;
+    };
+    force.gravity = function(x) {
+      if (!arguments.length) return gravity;
+      gravity = +x;
+      return force;
+    };
+    force.theta = function(x) {
+      if (!arguments.length) return Math.sqrt(theta2);
+      theta2 = x * x;
+      return force;
+    };
+    force.alpha = function(x) {
+      if (!arguments.length) return alpha;
+      x = +x;
+      if (alpha) {
+        if (x > 0) {
+          alpha = x;
+        } else {
+          timer.c = null, timer.t = NaN, timer = null;
+          event.end({
+            type: "end",
+            alpha: alpha = 0
+          });
+        }
+      } else if (x > 0) {
+        event.start({
+          type: "start",
+          alpha: alpha = x
+        });
+        timer = d3_timer(force.tick);
+      }
+      return force;
+    };
+    force.start = function() {
+      var i, n = nodes.length, m = links.length, w = size[0], h = size[1], neighbors, o;
+      for (i = 0; i < n; ++i) {
+        (o = nodes[i]).index = i;
+        o.weight = 0;
+      }
+      for (i = 0; i < m; ++i) {
+        o = links[i];
+        if (typeof o.source == "number") o.source = nodes[o.source];
+        if (typeof o.target == "number") o.target = nodes[o.target];
+        ++o.source.weight;
+        ++o.target.weight;
+      }
+      for (i = 0; i < n; ++i) {
+        o = nodes[i];
+        if (isNaN(o.x)) o.x = position("x", w);
+        if (isNaN(o.y)) o.y = position("y", h);
+        if (isNaN(o.px)) o.px = o.x;
+        if (isNaN(o.py)) o.py = o.y;
+      }
+      distances = [];
+      if (typeof linkDistance === "function") for (i = 0; i < m; ++i) distances[i] = +linkDistance.call(this, links[i], i); else for (i = 0; i < m; ++i) distances[i] = linkDistance;
+      strengths = [];
+      if (typeof linkStrength === "function") for (i = 0; i < m; ++i) strengths[i] = +linkStrength.call(this, links[i], i); else for (i = 0; i < m; ++i) strengths[i] = linkStrength;
+      charges = [];
+      if (typeof charge === "function") for (i = 0; i < n; ++i) charges[i] = +charge.call(this, nodes[i], i); else for (i = 0; i < n; ++i) charges[i] = charge;
+      function position(dimension, size) {
+        if (!neighbors) {
+          neighbors = new Array(n);
+          for (j = 0; j < n; ++j) {
+            neighbors[j] = [];
+          }
+          for (j = 0; j < m; ++j) {
+            var o = links[j];
+            neighbors[o.source.index].push(o.target);
+            neighbors[o.target.index].push(o.source);
+          }
+        }
+        var candidates = neighbors[i], j = -1, l = candidates.length, x;
+        while (++j < l) if (!isNaN(x = candidates[j][dimension])) return x;
+        return Math.random() * size;
+      }
+      return force.resume();
+    };
+    force.resume = function() {
+      return force.alpha(.1);
+    };
+    force.stop = function() {
+      return force.alpha(0);
+    };
+    force.drag = function() {
+      if (!drag) drag = d3.behavior.drag().origin(d3_identity).on("dragstart.force", d3_layout_forceDragstart).on("drag.force", dragmove).on("dragend.force", d3_layout_forceDragend);
+      if (!arguments.length) return drag;
+      this.on("mouseover.force", d3_layout_forceMouseover).on("mouseout.force", d3_layout_forceMouseout).call(drag);
+    };
+    function dragmove(d) {
+      d.px = d3.event.x, d.py = d3.event.y;
+      force.resume();
+    }
+    return d3.rebind(force, event, "on");
+  };
+  function d3_layout_forceDragstart(d) {
+    d.fixed |= 2;
+  }
+  function d3_layout_forceDragend(d) {
+    d.fixed &= ~6;
+  }
+  function d3_layout_forceMouseover(d) {
+    d.fixed |= 4;
+    d.px = d.x, d.py = d.y;
+  }
+  function d3_layout_forceMouseout(d) {
+    d.fixed &= ~4;
+  }
+  function d3_layout_forceAccumulate(quad, alpha, charges) {
+    var cx = 0, cy = 0;
+    quad.charge = 0;
+    if (!quad.leaf) {
+      var nodes = quad.nodes, n = nodes.length, i = -1, c;
+      while (++i < n) {
+        c = nodes[i];
+        if (c == null) continue;
+        d3_layout_forceAccumulate(c, alpha, charges);
+        quad.charge += c.charge;
+        cx += c.charge * c.cx;
+        cy += c.charge * c.cy;
+      }
+    }
+    if (quad.point) {
+      if (!quad.leaf) {
+        quad.point.x += Math.random() - .5;
+        quad.point.y += Math.random() - .5;
+      }
+      var k = alpha * charges[quad.point.index];
+      quad.charge += quad.pointCharge = k;
+      cx += k * quad.point.x;
+      cy += k * quad.point.y;
+    }
+    quad.cx = cx / quad.charge;
+    quad.cy = cy / quad.charge;
+  }
+  var d3_layout_forceLinkDistance = 20, d3_layout_forceLinkStrength = 1, d3_layout_forceChargeDistance2 = Infinity;
+  d3.layout.hierarchy = function() {
+    var sort = d3_layout_hierarchySort, children = d3_layout_hierarchyChildren, value = d3_layout_hierarchyValue;
+    function hierarchy(root) {
+      var stack = [ root ], nodes = [], node;
+      root.depth = 0;
+      while ((node = stack.pop()) != null) {
+        nodes.push(node);
+        if ((childs = children.call(hierarchy, node, node.depth)) && (n = childs.length)) {
+          var n, childs, child;
+          while (--n >= 0) {
+            stack.push(child = childs[n]);
+            child.parent = node;
+            child.depth = node.depth + 1;
+          }
+          if (value) node.value = 0;
+          node.children = childs;
+        } else {
+          if (value) node.value = +value.call(hierarchy, node, node.depth) || 0;
+          delete node.children;
+        }
+      }
+      d3_layout_hierarchyVisitAfter(root, function(node) {
+        var childs, parent;
+        if (sort && (childs = node.children)) childs.sort(sort);
+        if (value && (parent = node.parent)) parent.value += node.value;
+      });
+      return nodes;
+    }
+    hierarchy.sort = function(x) {
+      if (!arguments.length) return sort;
+      sort = x;
+      return hierarchy;
+    };
+    hierarchy.children = function(x) {
+      if (!arguments.length) return children;
+      children = x;
+      return hierarchy;
+    };
+    hierarchy.value = function(x) {
+      if (!arguments.length) return value;
+      value = x;
+      return hierarchy;
+    };
+    hierarchy.revalue = function(root) {
+      if (value) {
+        d3_layout_hierarchyVisitBefore(root, function(node) {
+          if (node.children) node.value = 0;
+        });
+        d3_layout_hierarchyVisitAfter(root, function(node) {
+          var parent;
+          if (!node.children) node.value = +value.call(hierarchy, node, node.depth) || 0;
+          if (parent = node.parent) parent.value += node.value;
+        });
+      }
+      return root;
+    };
+    return hierarchy;
+  };
+  function d3_layout_hierarchyRebind(object, hierarchy) {
+    d3.rebind(object, hierarchy, "sort", "children", "value");
+    object.nodes = object;
+    object.links = d3_layout_hierarchyLinks;
+    return object;
+  }
+  function d3_layout_hierarchyVisitBefore(node, callback) {
+    var nodes = [ node ];
+    while ((node = nodes.pop()) != null) {
+      callback(node);
+      if ((children = node.children) && (n = children.length)) {
+        var n, children;
+        while (--n >= 0) nodes.push(children[n]);
+      }
+    }
+  }
+  function d3_layout_hierarchyVisitAfter(node, callback) {
+    var nodes = [ node ], nodes2 = [];
+    while ((node = nodes.pop()) != null) {
+      nodes2.push(node);
+      if ((children = node.children) && (n = children.length)) {
+        var i = -1, n, children;
+        while (++i < n) nodes.push(children[i]);
+      }
+    }
+    while ((node = nodes2.pop()) != null) {
+      callback(node);
+    }
+  }
+  function d3_layout_hierarchyChildren(d) {
+    return d.children;
+  }
+  function d3_layout_hierarchyValue(d) {
+    return d.value;
+  }
+  function d3_layout_hierarchySort(a, b) {
+    return b.value - a.value;
+  }
+  function d3_layout_hierarchyLinks(nodes) {
+    return d3.merge(nodes.map(function(parent) {
+      return (parent.children || []).map(function(child) {
+        return {
+          source: parent,
+          target: child
+        };
+      });
+    }));
+  }
+  d3.layout.partition = function() {
+    var hierarchy = d3.layout.hierarchy(), size = [ 1, 1 ];
+    function position(node, x, dx, dy) {
+      var children = node.children;
+      node.x = x;
+      node.y = node.depth * dy;
+      node.dx = dx;
+      node.dy = dy;
+      if (children && (n = children.length)) {
+        var i = -1, n, c, d;
+        dx = node.value ? dx / node.value : 0;
+        while (++i < n) {
+          position(c = children[i], x, d = c.value * dx, dy);
+          x += d;
+        }
+      }
+    }
+    function depth(node) {
+      var children = node.children, d = 0;
+      if (children && (n = children.length)) {
+        var i = -1, n;
+        while (++i < n) d = Math.max(d, depth(children[i]));
+      }
+      return 1 + d;
+    }
+    function partition(d, i) {
+      var nodes = hierarchy.call(this, d, i);
+      position(nodes[0], 0, size[0], size[1] / depth(nodes[0]));
+      return nodes;
+    }
+    partition.size = function(x) {
+      if (!arguments.length) return size;
+      size = x;
+      return partition;
+    };
+    return d3_layout_hierarchyRebind(partition, hierarchy);
+  };
+  d3.layout.pie = function() {
+    var value = Number, sort = d3_layout_pieSortByValue, startAngle = 0, endAngle = τ, padAngle = 0;
+    function pie(data) {
+      var n = data.length, values = data.map(function(d, i) {
+        return +value.call(pie, d, i);
+      }), a = +(typeof startAngle === "function" ? startAngle.apply(this, arguments) : startAngle), da = (typeof endAngle === "function" ? endAngle.apply(this, arguments) : endAngle) - a, p = Math.min(Math.abs(da) / n, +(typeof padAngle === "function" ? padAngle.apply(this, arguments) : padAngle)), pa = p * (da < 0 ? -1 : 1), sum = d3.sum(values), k = sum ? (da - n * pa) / sum : 0, index = d3.range(n), arcs = [], v;
+      if (sort != null) index.sort(sort === d3_layout_pieSortByValue ? function(i, j) {
+        return values[j] - values[i];
+      } : function(i, j) {
+        return sort(data[i], data[j]);
+      });
+      index.forEach(function(i) {
+        arcs[i] = {
+          data: data[i],
+          value: v = values[i],
+          startAngle: a,
+          endAngle: a += v * k + pa,
+          padAngle: p
+        };
+      });
+      return arcs;
+    }
+    pie.value = function(_) {
+      if (!arguments.length) return value;
+      value = _;
+      return pie;
+    };
+    pie.sort = function(_) {
+      if (!arguments.length) return sort;
+      sort = _;
+      return pie;
+    };
+    pie.startAngle = function(_) {
+      if (!arguments.length) return startAngle;
+      startAngle = _;
+      return pie;
+    };
+    pie.endAngle = function(_) {
+      if (!arguments.length) return endAngle;
+      endAngle = _;
+      return pie;
+    };
+    pie.padAngle = function(_) {
+      if (!arguments.length) return padAngle;
+      padAngle = _;
+      return pie;
+    };
+    return pie;
+  };
+  var d3_layout_pieSortByValue = {};
+  d3.layout.stack = function() {
+    var values = d3_identity, order = d3_layout_stackOrderDefault, offset = d3_layout_stackOffsetZero, out = d3_layout_stackOut, x = d3_layout_stackX, y = d3_layout_stackY;
+    function stack(data, index) {
+      if (!(n = data.length)) return data;
+      var series = data.map(function(d, i) {
+        return values.call(stack, d, i);
+      });
+      var points = series.map(function(d) {
+        return d.map(function(v, i) {
+          return [ x.call(stack, v, i), y.call(stack, v, i) ];
+        });
+      });
+      var orders = order.call(stack, points, index);
+      series = d3.permute(series, orders);
+      points = d3.permute(points, orders);
+      var offsets = offset.call(stack, points, index);
+      var m = series[0].length, n, i, j, o;
+      for (j = 0; j < m; ++j) {
+        out.call(stack, series[0][j], o = offsets[j], points[0][j][1]);
+        for (i = 1; i < n; ++i) {
+          out.call(stack, series[i][j], o += points[i - 1][j][1], points[i][j][1]);
+        }
+      }
+      return data;
+    }
+    stack.values = function(x) {
+      if (!arguments.length) return values;
+      values = x;
+      return stack;
+    };
+    stack.order = function(x) {
+      if (!arguments.length) return order;
+      order = typeof x === "function" ? x : d3_layout_stackOrders.get(x) || d3_layout_stackOrderDefault;
+      return stack;
+    };
+    stack.offset = function(x) {
+      if (!arguments.length) return offset;
+      offset = typeof x === "function" ? x : d3_layout_stackOffsets.get(x) || d3_layout_stackOffsetZero;
+      return stack;
+    };
+    stack.x = function(z) {
+      if (!arguments.length) return x;
+      x = z;
+      return stack;
+    };
+    stack.y = function(z) {
+      if (!arguments.length) return y;
+      y = z;
+      return stack;
+    };
+    stack.out = function(z) {
+      if (!arguments.length) return out;
+      out = z;
+      return stack;
+    };
+    return stack;
+  };
+  function d3_layout_stackX(d) {
+    return d.x;
+  }
+  function d3_layout_stackY(d) {
+    return d.y;
+  }
+  function d3_layout_stackOut(d, y0, y) {
+    d.y0 = y0;
+    d.y = y;
+  }
+  var d3_layout_stackOrders = d3.map({
+    "inside-out": function(data) {
+      var n = data.length, i, j, max = data.map(d3_layout_stackMaxIndex), sums = data.map(d3_layout_stackReduceSum), index = d3.range(n).sort(function(a, b) {
+        return max[a] - max[b];
+      }), top = 0, bottom = 0, tops = [], bottoms = [];
+      for (i = 0; i < n; ++i) {
+        j = index[i];
+        if (top < bottom) {
+          top += sums[j];
+          tops.push(j);
+        } else {
+          bottom += sums[j];
+          bottoms.push(j);
+        }
+      }
+      return bottoms.reverse().concat(tops);
+    },
+    reverse: function(data) {
+      return d3.range(data.length).reverse();
+    },
+    "default": d3_layout_stackOrderDefault
+  });
+  var d3_layout_stackOffsets = d3.map({
+    silhouette: function(data) {
+      var n = data.length, m = data[0].length, sums = [], max = 0, i, j, o, y0 = [];
+      for (j = 0; j < m; ++j) {
+        for (i = 0, o = 0; i < n; i++) o += data[i][j][1];
+        if (o > max) max = o;
+        sums.push(o);
+      }
+      for (j = 0; j < m; ++j) {
+        y0[j] = (max - sums[j]) / 2;
+      }
+      return y0;
+    },
+    wiggle: function(data) {
+      var n = data.length, x = data[0], m = x.length, i, j, k, s1, s2, s3, dx, o, o0, y0 = [];
+      y0[0] = o = o0 = 0;
+      for (j = 1; j < m; ++j) {
+        for (i = 0, s1 = 0; i < n; ++i) s1 += data[i][j][1];
+        for (i = 0, s2 = 0, dx = x[j][0] - x[j - 1][0]; i < n; ++i) {
+          for (k = 0, s3 = (data[i][j][1] - data[i][j - 1][1]) / (2 * dx); k < i; ++k) {
+            s3 += (data[k][j][1] - data[k][j - 1][1]) / dx;
+          }
+          s2 += s3 * data[i][j][1];
+        }
+        y0[j] = o -= s1 ? s2 / s1 * dx : 0;
+        if (o < o0) o0 = o;
+      }
+      for (j = 0; j < m; ++j) y0[j] -= o0;
+      return y0;
+    },
+    expand: function(data) {
+      var n = data.length, m = data[0].length, k = 1 / n, i, j, o, y0 = [];
+      for (j = 0; j < m; ++j) {
+        for (i = 0, o = 0; i < n; i++) o += data[i][j][1];
+        if (o) for (i = 0; i < n; i++) data[i][j][1] /= o; else for (i = 0; i < n; i++) data[i][j][1] = k;
+      }
+      for (j = 0; j < m; ++j) y0[j] = 0;
+      return y0;
+    },
+    zero: d3_layout_stackOffsetZero
+  });
+  function d3_layout_stackOrderDefault(data) {
+    return d3.range(data.length);
+  }
+  function d3_layout_stackOffsetZero(data) {
+    var j = -1, m = data[0].length, y0 = [];
+    while (++j < m) y0[j] = 0;
+    return y0;
+  }
+  function d3_layout_stackMaxIndex(array) {
+    var i = 1, j = 0, v = array[0][1], k, n = array.length;
+    for (;i < n; ++i) {
+      if ((k = array[i][1]) > v) {
+        j = i;
+        v = k;
+      }
+    }
+    return j;
+  }
+  function d3_layout_stackReduceSum(d) {
+    return d.reduce(d3_layout_stackSum, 0);
+  }
+  function d3_layout_stackSum(p, d) {
+    return p + d[1];
+  }
+  d3.layout.histogram = function() {
+    var frequency = true, valuer = Number, ranger = d3_layout_histogramRange, binner = d3_layout_histogramBinSturges;
+    function histogram(data, i) {
+      var bins = [], values = data.map(valuer, this), range = ranger.call(this, values, i), thresholds = binner.call(this, range, values, i), bin, i = -1, n = values.length, m = thresholds.length - 1, k = frequency ? 1 : 1 / n, x;
+      while (++i < m) {
+        bin = bins[i] = [];
+        bin.dx = thresholds[i + 1] - (bin.x = thresholds[i]);
+        bin.y = 0;
+      }
+      if (m > 0) {
+        i = -1;
+        while (++i < n) {
+          x = values[i];
+          if (x >= range[0] && x <= range[1]) {
+            bin = bins[d3.bisect(thresholds, x, 1, m) - 1];
+            bin.y += k;
+            bin.push(data[i]);
+          }
+        }
+      }
+      return bins;
+    }
+    histogram.value = function(x) {
+      if (!arguments.length) return valuer;
+      valuer = x;
+      return histogram;
+    };
+    histogram.range = function(x) {
+      if (!arguments.length) return ranger;
+      ranger = d3_functor(x);
+      return histogram;
+    };
+    histogram.bins = function(x) {
+      if (!arguments.length) return binner;
+      binner = typeof x === "number" ? function(range) {
+        return d3_layout_histogramBinFixed(range, x);
+      } : d3_functor(x);
+      return histogram;
+    };
+    histogram.frequency = function(x) {
+      if (!arguments.length) return frequency;
+      frequency = !!x;
+      return histogram;
+    };
+    return histogram;
+  };
+  function d3_layout_histogramBinSturges(range, values) {
+    return d3_layout_histogramBinFixed(range, Math.ceil(Math.log(values.length) / Math.LN2 + 1));
+  }
+  function d3_layout_histogramBinFixed(range, n) {
+    var x = -1, b = +range[0], m = (range[1] - b) / n, f = [];
+    while (++x <= n) f[x] = m * x + b;
+    return f;
+  }
+  function d3_layout_histogramRange(values) {
+    return [ d3.min(values), d3.max(values) ];
+  }
+  d3.layout.pack = function() {
+    var hierarchy = d3.layout.hierarchy().sort(d3_layout_packSort), padding = 0, size = [ 1, 1 ], radius;
+    function pack(d, i) {
+      var nodes = hierarchy.call(this, d, i), root = nodes[0], w = size[0], h = size[1], r = radius == null ? Math.sqrt : typeof radius === "function" ? radius : function() {
+        return radius;
+      };
+      root.x = root.y = 0;
+      d3_layout_hierarchyVisitAfter(root, function(d) {
+        d.r = +r(d.value);
+      });
+      d3_layout_hierarchyVisitAfter(root, d3_layout_packSiblings);
+      if (padding) {
+        var dr = padding * (radius ? 1 : Math.max(2 * root.r / w, 2 * root.r / h)) / 2;
+        d3_layout_hierarchyVisitAfter(root, function(d) {
+          d.r += dr;
+        });
+        d3_layout_hierarchyVisitAfter(root, d3_layout_packSiblings);
+        d3_layout_hierarchyVisitAfter(root, function(d) {
+          d.r -= dr;
+        });
+      }
+      d3_layout_packTransform(root, w / 2, h / 2, radius ? 1 : 1 / Math.max(2 * root.r / w, 2 * root.r / h));
+      return nodes;
+    }
+    pack.size = function(_) {
+      if (!arguments.length) return size;
+      size = _;
+      return pack;
+    };
+    pack.radius = function(_) {
+      if (!arguments.length) return radius;
+      radius = _ == null || typeof _ === "function" ? _ : +_;
+      return pack;
+    };
+    pack.padding = function(_) {
+      if (!arguments.length) return padding;
+      padding = +_;
+      return pack;
+    };
+    return d3_layout_hierarchyRebind(pack, hierarchy);
+  };
+  function d3_layout_packSort(a, b) {
+    return a.value - b.value;
+  }
+  function d3_layout_packInsert(a, b) {
+    var c = a._pack_next;
+    a._pack_next = b;
+    b._pack_prev = a;
+    b._pack_next = c;
+    c._pack_prev = b;
+  }
+  function d3_layout_packSplice(a, b) {
+    a._pack_next = b;
+    b._pack_prev = a;
+  }
+  function d3_layout_packIntersects(a, b) {
+    var dx = b.x - a.x, dy = b.y - a.y, dr = a.r + b.r;
+    return .999 * dr * dr > dx * dx + dy * dy;
+  }
+  function d3_layout_packSiblings(node) {
+    if (!(nodes = node.children) || !(n = nodes.length)) return;
+    var nodes, xMin = Infinity, xMax = -Infinity, yMin = Infinity, yMax = -Infinity, a, b, c, i, j, k, n;
+    function bound(node) {
+      xMin = Math.min(node.x - node.r, xMin);
+      xMax = Math.max(node.x + node.r, xMax);
+      yMin = Math.min(node.y - node.r, yMin);
+      yMax = Math.max(node.y + node.r, yMax);
+    }
+    nodes.forEach(d3_layout_packLink);
+    a = nodes[0];
+    a.x = -a.r;
+    a.y = 0;
+    bound(a);
+    if (n > 1) {
+      b = nodes[1];
+      b.x = b.r;
+      b.y = 0;
+      bound(b);
+      if (n > 2) {
+        c = nodes[2];
+        d3_layout_packPlace(a, b, c);
+        bound(c);
+        d3_layout_packInsert(a, c);
+        a._pack_prev = c;
+        d3_layout_packInsert(c, b);
+        b = a._pack_next;
+        for (i = 3; i < n; i++) {
+          d3_layout_packPlace(a, b, c = nodes[i]);
+          var isect = 0, s1 = 1, s2 = 1;
+          for (j = b._pack_next; j !== b; j = j._pack_next, s1++) {
+            if (d3_layout_packIntersects(j, c)) {
+              isect = 1;
+              break;
+            }
+          }
+          if (isect == 1) {
+            for (k = a._pack_prev; k !== j._pack_prev; k = k._pack_prev, s2++) {
+              if (d3_layout_packIntersects(k, c)) {
+                break;
+              }
+            }
+          }
+          if (isect) {
+            if (s1 < s2 || s1 == s2 && b.r < a.r) d3_layout_packSplice(a, b = j); else d3_layout_packSplice(a = k, b);
+            i--;
+          } else {
+            d3_layout_packInsert(a, c);
+            b = c;
+            bound(c);
+          }
+        }
+      }
+    }
+    var cx = (xMin + xMax) / 2, cy = (yMin + yMax) / 2, cr = 0;
+    for (i = 0; i < n; i++) {
+      c = nodes[i];
+      c.x -= cx;
+      c.y -= cy;
+      cr = Math.max(cr, c.r + Math.sqrt(c.x * c.x + c.y * c.y));
+    }
+    node.r = cr;
+    nodes.forEach(d3_layout_packUnlink);
+  }
+  function d3_layout_packLink(node) {
+    node._pack_next = node._pack_prev = node;
+  }
+  function d3_layout_packUnlink(node) {
+    delete node._pack_next;
+    delete node._pack_prev;
+  }
+  function d3_layout_packTransform(node, x, y, k) {
+    var children = node.children;
+    node.x = x += k * node.x;
+    node.y = y += k * node.y;
+    node.r *= k;
+    if (children) {
+      var i = -1, n = children.length;
+      while (++i < n) d3_layout_packTransform(children[i], x, y, k);
+    }
+  }
+  function d3_layout_packPlace(a, b, c) {
+    var db = a.r + c.r, dx = b.x - a.x, dy = b.y - a.y;
+    if (db && (dx || dy)) {
+      var da = b.r + c.r, dc = dx * dx + dy * dy;
+      da *= da;
+      db *= db;
+      var x = .5 + (db - da) / (2 * dc), y = Math.sqrt(Math.max(0, 2 * da * (db + dc) - (db -= dc) * db - da * da)) / (2 * dc);
+      c.x = a.x + x * dx + y * dy;
+      c.y = a.y + x * dy - y * dx;
+    } else {
+      c.x = a.x + db;
+      c.y = a.y;
+    }
+  }
+  d3.layout.tree = function() {
+    var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ], nodeSize = null;
+    function tree(d, i) {
+      var nodes = hierarchy.call(this, d, i), root0 = nodes[0], root1 = wrapTree(root0);
+      d3_layout_hierarchyVisitAfter(root1, firstWalk), root1.parent.m = -root1.z;
+      d3_layout_hierarchyVisitBefore(root1, secondWalk);
+      if (nodeSize) d3_layout_hierarchyVisitBefore(root0, sizeNode); else {
+        var left = root0, right = root0, bottom = root0;
+        d3_layout_hierarchyVisitBefore(root0, function(node) {
+          if (node.x < left.x) left = node;
+          if (node.x > right.x) right = node;
+          if (node.depth > bottom.depth) bottom = node;
+        });
+        var tx = separation(left, right) / 2 - left.x, kx = size[0] / (right.x + separation(right, left) / 2 + tx), ky = size[1] / (bottom.depth || 1);
+        d3_layout_hierarchyVisitBefore(root0, function(node) {
+          node.x = (node.x + tx) * kx;
+          node.y = node.depth * ky;
+        });
+      }
+      return nodes;
+    }
+    function wrapTree(root0) {
+      var root1 = {
+        A: null,
+        children: [ root0 ]
+      }, queue = [ root1 ], node1;
+      while ((node1 = queue.pop()) != null) {
+        for (var children = node1.children, child, i = 0, n = children.length; i < n; ++i) {
+          queue.push((children[i] = child = {
+            _: children[i],
+            parent: node1,
+            children: (child = children[i].children) && child.slice() || [],
+            A: null,
+            a: null,
+            z: 0,
+            m: 0,
+            c: 0,
+            s: 0,
+            t: null,
+            i: i
+          }).a = child);
+        }
+      }
+      return root1.children[0];
+    }
+    function firstWalk(v) {
+      var children = v.children, siblings = v.parent.children, w = v.i ? siblings[v.i - 1] : null;
+      if (children.length) {
+        d3_layout_treeShift(v);
+        var midpoint = (children[0].z + children[children.length - 1].z) / 2;
+        if (w) {
+          v.z = w.z + separation(v._, w._);
+          v.m = v.z - midpoint;
+        } else {
+          v.z = midpoint;
+        }
+      } else if (w) {
+        v.z = w.z + separation(v._, w._);
+      }
+      v.parent.A = apportion(v, w, v.parent.A || siblings[0]);
+    }
+    function secondWalk(v) {
+      v._.x = v.z + v.parent.m;
+      v.m += v.parent.m;
+    }
+    function apportion(v, w, ancestor) {
+      if (w) {
+        var vip = v, vop = v, vim = w, vom = vip.parent.children[0], sip = vip.m, sop = vop.m, sim = vim.m, som = vom.m, shift;
+        while (vim = d3_layout_treeRight(vim), vip = d3_layout_treeLeft(vip), vim && vip) {
+          vom = d3_layout_treeLeft(vom);
+          vop = d3_layout_treeRight(vop);
+          vop.a = v;
+          shift = vim.z + sim - vip.z - sip + separation(vim._, vip._);
+          if (shift > 0) {
+            d3_layout_treeMove(d3_layout_treeAncestor(vim, v, ancestor), v, shift);
+            sip += shift;
+            sop += shift;
+          }
+          sim += vim.m;
+          sip += vip.m;
+          som += vom.m;
+          sop += vop.m;
+        }
+        if (vim && !d3_layout_treeRight(vop)) {
+          vop.t = vim;
+          vop.m += sim - sop;
+        }
+        if (vip && !d3_layout_treeLeft(vom)) {
+          vom.t = vip;
+          vom.m += sip - som;
+          ancestor = v;
+        }
+      }
+      return ancestor;
+    }
+    function sizeNode(node) {
+      node.x *= size[0];
+      node.y = node.depth * size[1];
+    }
+    tree.separation = function(x) {
+      if (!arguments.length) return separation;
+      separation = x;
+      return tree;
+    };
+    tree.size = function(x) {
+      if (!arguments.length) return nodeSize ? null : size;
+      nodeSize = (size = x) == null ? sizeNode : null;
+      return tree;
+    };
+    tree.nodeSize = function(x) {
+      if (!arguments.length) return nodeSize ? size : null;
+      nodeSize = (size = x) == null ? null : sizeNode;
+      return tree;
+    };
+    return d3_layout_hierarchyRebind(tree, hierarchy);
+  };
+  function d3_layout_treeSeparation(a, b) {
+    return a.parent == b.parent ? 1 : 2;
+  }
+  function d3_layout_treeLeft(v) {
+    var children = v.children;
+    return children.length ? children[0] : v.t;
+  }
+  function d3_layout_treeRight(v) {
+    var children = v.children, n;
+    return (n = children.length) ? children[n - 1] : v.t;
+  }
+  function d3_layout_treeMove(wm, wp, shift) {
+    var change = shift / (wp.i - wm.i);
+    wp.c -= change;
+    wp.s += shift;
+    wm.c += change;
+    wp.z += shift;
+    wp.m += shift;
+  }
+  function d3_layout_treeShift(v) {
+    var shift = 0, change = 0, children = v.children, i = children.length, w;
+    while (--i >= 0) {
+      w = children[i];
+      w.z += shift;
+      w.m += shift;
+      shift += w.s + (change += w.c);
+    }
+  }
+  function d3_layout_treeAncestor(vim, v, ancestor) {
+    return vim.a.parent === v.parent ? vim.a : ancestor;
+  }
+  d3.layout.cluster = function() {
+    var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ], nodeSize = false;
+    function cluster(d, i) {
+      var nodes = hierarchy.call(this, d, i), root = nodes[0], previousNode, x = 0;
+      d3_layout_hierarchyVisitAfter(root, function(node) {
+        var children = node.children;
+        if (children && children.length) {
+          node.x = d3_layout_clusterX(children);
+          node.y = d3_layout_clusterY(children);
+        } else {
+          node.x = previousNode ? x += separation(node, previousNode) : 0;
+          node.y = 0;
+          previousNode = node;
+        }
+      });
+      var left = d3_layout_clusterLeft(root), right = d3_layout_clusterRight(root), x0 = left.x - separation(left, right) / 2, x1 = right.x + separation(right, left) / 2;
+      d3_layout_hierarchyVisitAfter(root, nodeSize ? function(node) {
+        node.x = (node.x - root.x) * size[0];
+        node.y = (root.y - node.y) * size[1];
+      } : function(node) {
+        node.x = (node.x - x0) / (x1 - x0) * size[0];
+        node.y = (1 - (root.y ? node.y / root.y : 1)) * size[1];
+      });
+      return nodes;
+    }
+    cluster.separation = function(x) {
+      if (!arguments.length) return separation;
+      separation = x;
+      return cluster;
+    };
+    cluster.size = function(x) {
+      if (!arguments.length) return nodeSize ? null : size;
+      nodeSize = (size = x) == null;
+      return cluster;
+    };
+    cluster.nodeSize = function(x) {
+      if (!arguments.length) return nodeSize ? size : null;
+      nodeSize = (size = x) != null;
+      return cluster;
+    };
+    return d3_layout_hierarchyRebind(cluster, hierarchy);
+  };
+  function d3_layout_clusterY(children) {
+    return 1 + d3.max(children, function(child) {
+      return child.y;
+    });
+  }
+  function d3_layout_clusterX(children) {
+    return children.reduce(function(x, child) {
+      return x + child.x;
+    }, 0) / children.length;
+  }
+  function d3_layout_clusterLeft(node) {
+    var children = node.children;
+    return children && children.length ? d3_layout_clusterLeft(children[0]) : node;
+  }
+  function d3_layout_clusterRight(node) {
+    var children = node.children, n;
+    return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node;
+  }
+  d3.layout.treemap = function() {
+    var hierarchy = d3.layout.hierarchy(), round = Math.round, size = [ 1, 1 ], padding = null, pad = d3_layout_treemapPadNull, sticky = false, stickies, mode = "squarify", ratio = .5 * (1 + Math.sqrt(5));
+    function scale(children, k) {
+      var i = -1, n = children.length, child, area;
+      while (++i < n) {
+        area = (child = children[i]).value * (k < 0 ? 0 : k);
+        child.area = isNaN(area) || area <= 0 ? 0 : area;
+      }
+    }
+    function squarify(node) {
+      var children = node.children;
+      if (children && children.length) {
+        var rect = pad(node), row = [], remaining = children.slice(), child, best = Infinity, score, u = mode === "slice" ? rect.dx : mode === "dice" ? rect.dy : mode === "slice-dice" ? node.depth & 1 ? rect.dy : rect.dx : Math.min(rect.dx, rect.dy), n;
+        scale(remaining, rect.dx * rect.dy / node.value);
+        row.area = 0;
+        while ((n = remaining.length) > 0) {
+          row.push(child = remaining[n - 1]);
+          row.area += child.area;
+          if (mode !== "squarify" || (score = worst(row, u)) <= best) {
+            remaining.pop();
+            best = score;
+          } else {
+            row.area -= row.pop().area;
+            position(row, u, rect, false);
+            u = Math.min(rect.dx, rect.dy);
+            row.length = row.area = 0;
+            best = Infinity;
+          }
+        }
+        if (row.length) {
+          position(row, u, rect, true);
+          row.length = row.area = 0;
+        }
+        children.forEach(squarify);
+      }
+    }
+    function stickify(node) {
+      var children = node.children;
+      if (children && children.length) {
+        var rect = pad(node), remaining = children.slice(), child, row = [];
+        scale(remaining, rect.dx * rect.dy / node.value);
+        row.area = 0;
+        while (child = remaining.pop()) {
+          row.push(child);
+          row.area += child.area;
+          if (child.z != null) {
+            position(row, child.z ? rect.dx : rect.dy, rect, !remaining.length);
+            row.length = row.area = 0;
+          }
+        }
+        children.forEach(stickify);
+      }
+    }
+    function worst(row, u) {
+      var s = row.area, r, rmax = 0, rmin = Infinity, i = -1, n = row.length;
+      while (++i < n) {
+        if (!(r = row[i].area)) continue;
+        if (r < rmin) rmin = r;
+        if (r > rmax) rmax = r;
+      }
+      s *= s;
+      u *= u;
+      return s ? Math.max(u * rmax * ratio / s, s / (u * rmin * ratio)) : Infinity;
+    }
+    function position(row, u, rect, flush) {
+      var i = -1, n = row.length, x = rect.x, y = rect.y, v = u ? round(row.area / u) : 0, o;
+      if (u == rect.dx) {
+        if (flush || v > rect.dy) v = rect.dy;
+        while (++i < n) {
+          o = row[i];
+          o.x = x;
+          o.y = y;
+          o.dy = v;
+          x += o.dx = Math.min(rect.x + rect.dx - x, v ? round(o.area / v) : 0);
+        }
+        o.z = true;
+        o.dx += rect.x + rect.dx - x;
+        rect.y += v;
+        rect.dy -= v;
+      } else {
+        if (flush || v > rect.dx) v = rect.dx;
+        while (++i < n) {
+          o = row[i];
+          o.x = x;
+          o.y = y;
+          o.dx = v;
+          y += o.dy = Math.min(rect.y + rect.dy - y, v ? round(o.area / v) : 0);
+        }
+        o.z = false;
+        o.dy += rect.y + rect.dy - y;
+        rect.x += v;
+        rect.dx -= v;
+      }
+    }
+    function treemap(d) {
+      var nodes = stickies || hierarchy(d), root = nodes[0];
+      root.x = root.y = 0;
+      if (root.value) root.dx = size[0], root.dy = size[1]; else root.dx = root.dy = 0;
+      if (stickies) hierarchy.revalue(root);
+      scale([ root ], root.dx * root.dy / root.value);
+      (stickies ? stickify : squarify)(root);
+      if (sticky) stickies = nodes;
+      return nodes;
+    }
+    treemap.size = function(x) {
+      if (!arguments.length) return size;
+      size = x;
+      return treemap;
+    };
+    treemap.padding = function(x) {
+      if (!arguments.length) return padding;
+      function padFunction(node) {
+        var p = x.call(treemap, node, node.depth);
+        return p == null ? d3_layout_treemapPadNull(node) : d3_layout_treemapPad(node, typeof p === "number" ? [ p, p, p, p ] : p);
+      }
+      function padConstant(node) {
+        return d3_layout_treemapPad(node, x);
+      }
+      var type;
+      pad = (padding = x) == null ? d3_layout_treemapPadNull : (type = typeof x) === "function" ? padFunction : type === "number" ? (x = [ x, x, x, x ], 
+      padConstant) : padConstant;
+      return treemap;
+    };
+    treemap.round = function(x) {
+      if (!arguments.length) return round != Number;
+      round = x ? Math.round : Number;
+      return treemap;
+    };
+    treemap.sticky = function(x) {
+      if (!arguments.length) return sticky;
+      sticky = x;
+      stickies = null;
+      return treemap;
+    };
+    treemap.ratio = function(x) {
+      if (!arguments.length) return ratio;
+      ratio = x;
+      return treemap;
+    };
+    treemap.mode = function(x) {
+      if (!arguments.length) return mode;
+      mode = x + "";
+      return treemap;
+    };
+    return d3_layout_hierarchyRebind(treemap, hierarchy);
+  };
+  function d3_layout_treemapPadNull(node) {
+    return {
+      x: node.x,
+      y: node.y,
+      dx: node.dx,
+      dy: node.dy
+    };
+  }
+  function d3_layout_treemapPad(node, padding) {
+    var x = node.x + padding[3], y = node.y + padding[0], dx = node.dx - padding[1] - padding[3], dy = node.dy - padding[0] - padding[2];
+    if (dx < 0) {
+      x += dx / 2;
+      dx = 0;
+    }
+    if (dy < 0) {
+      y += dy / 2;
+      dy = 0;
+    }
+    return {
+      x: x,
+      y: y,
+      dx: dx,
+      dy: dy
+    };
+  }
+  d3.random = {
+    normal: function(µ, σ) {
+      var n = arguments.length;
+      if (n < 2) σ = 1;
+      if (n < 1) µ = 0;
+      return function() {
+        var x, y, r;
+        do {
+          x = Math.random() * 2 - 1;
+          y = Math.random() * 2 - 1;
+          r = x * x + y * y;
+        } while (!r || r > 1);
+        return µ + σ * x * Math.sqrt(-2 * Math.log(r) / r);
+      };
+    },
+    logNormal: function() {
+      var random = d3.random.normal.apply(d3, arguments);
+      return function() {
+        return Math.exp(random());
+      };
+    },
+    bates: function(m) {
+      var random = d3.random.irwinHall(m);
+      return function() {
+        return random() / m;
+      };
+    },
+    irwinHall: function(m) {
+      return function() {
+        for (var s = 0, j = 0; j < m; j++) s += Math.random();
+        return s;
+      };
+    }
+  };
+  d3.scale = {};
+  function d3_scaleExtent(domain) {
+    var start = domain[0], stop = domain[domain.length - 1];
+    return start < stop ? [ start, stop ] : [ stop, start ];
+  }
+  function d3_scaleRange(scale) {
+    return scale.rangeExtent ? scale.rangeExtent() : d3_scaleExtent(scale.range());
+  }
+  function d3_scale_bilinear(domain, range, uninterpolate, interpolate) {
+    var u = uninterpolate(domain[0], domain[1]), i = interpolate(range[0], range[1]);
+    return function(x) {
+      return i(u(x));
+    };
+  }
+  function d3_scale_nice(domain, nice) {
+    var i0 = 0, i1 = domain.length - 1, x0 = domain[i0], x1 = domain[i1], dx;
+    if (x1 < x0) {
+      dx = i0, i0 = i1, i1 = dx;
+      dx = x0, x0 = x1, x1 = dx;
+    }
+    domain[i0] = nice.floor(x0);
+    domain[i1] = nice.ceil(x1);
+    return domain;
+  }
+  function d3_scale_niceStep(step) {
+    return step ? {
+      floor: function(x) {
+        return Math.floor(x / step) * step;
+      },
+      ceil: function(x) {
+        return Math.ceil(x / step) * step;
+      }
+    } : d3_scale_niceIdentity;
+  }
+  var d3_scale_niceIdentity = {
+    floor: d3_identity,
+    ceil: d3_identity
+  };
+  function d3_scale_polylinear(domain, range, uninterpolate, interpolate) {
+    var u = [], i = [], j = 0, k = Math.min(domain.length, range.length) - 1;
+    if (domain[k] < domain[0]) {
+      domain = domain.slice().reverse();
+      range = range.slice().reverse();
+    }
+    while (++j <= k) {
+      u.push(uninterpolate(domain[j - 1], domain[j]));
+      i.push(interpolate(range[j - 1], range[j]));
+    }
+    return function(x) {
+      var j = d3.bisect(domain, x, 1, k) - 1;
+      return i[j](u[j](x));
+    };
+  }
+  d3.scale.linear = function() {
+    return d3_scale_linear([ 0, 1 ], [ 0, 1 ], d3_interpolate, false);
+  };
+  function d3_scale_linear(domain, range, interpolate, clamp) {
+    var output, input;
+    function rescale() {
+      var linear = Math.min(domain.length, range.length) > 2 ? d3_scale_polylinear : d3_scale_bilinear, uninterpolate = clamp ? d3_uninterpolateClamp : d3_uninterpolateNumber;
+      output = linear(domain, range, uninterpolate, interpolate);
+      input = linear(range, domain, uninterpolate, d3_interpolate);
+      return scale;
+    }
+    function scale(x) {
+      return output(x);
+    }
+    scale.invert = function(y) {
+      return input(y);
+    };
+    scale.domain = function(x) {
+      if (!arguments.length) return domain;
+      domain = x.map(Number);
+      return rescale();
+    };
+    scale.range = function(x) {
+      if (!arguments.length) return range;
+      range = x;
+      return rescale();
+    };
+    scale.rangeRound = function(x) {
+      return scale.range(x).interpolate(d3_interpolateRound);
+    };
+    scale.clamp = function(x) {
+      if (!arguments.length) return clamp;
+      clamp = x;
+      return rescale();
+    };
+    scale.interpolate = function(x) {
+      if (!arguments.length) return interpolate;
+      interpolate = x;
+      return rescale();
+    };
+    scale.ticks = function(m) {
+      return d3_scale_linearTicks(domain, m);
+    };
+    scale.tickFormat = function(m, format) {
+      return d3_scale_linearTickFormat(domain, m, format);
+    };
+    scale.nice = function(m) {
+      d3_scale_linearNice(domain, m);
+      return rescale();
+    };
+    scale.copy = function() {
+      return d3_scale_linear(domain, range, interpolate, clamp);
+    };
+    return rescale();
+  }
+  function d3_scale_linearRebind(scale, linear) {
+    return d3.rebind(scale, linear, "range", "rangeRound", "interpolate", "clamp");
+  }
+  function d3_scale_linearNice(domain, m) {
+    d3_scale_nice(domain, d3_scale_niceStep(d3_scale_linearTickRange(domain, m)[2]));
+    d3_scale_nice(domain, d3_scale_niceStep(d3_scale_linearTickRange(domain, m)[2]));
+    return domain;
+  }
+  function d3_scale_linearTickRange(domain, m) {
+    if (m == null) m = 10;
+    var extent = d3_scaleExtent(domain), span = extent[1] - extent[0], step = Math.pow(10, Math.floor(Math.log(span / m) / Math.LN10)), err = m / span * step;
+    if (err <= .15) step *= 10; else if (err <= .35) step *= 5; else if (err <= .75) step *= 2;
+    extent[0] = Math.ceil(extent[0] / step) * step;
+    extent[1] = Math.floor(extent[1] / step) * step + step * .5;
+    extent[2] = step;
+    return extent;
+  }
+  function d3_scale_linearTicks(domain, m) {
+    return d3.range.apply(d3, d3_scale_linearTickRange(domain, m));
+  }
+  function d3_scale_linearTickFormat(domain, m, format) {
+    var range = d3_scale_linearTickRange(domain, m);
+    if (format) {
+      var match = d3_format_re.exec(format);
+      match.shift();
+      if (match[8] === "s") {
+        var prefix = d3.formatPrefix(Math.max(abs(range[0]), abs(range[1])));
+        if (!match[7]) match[7] = "." + d3_scale_linearPrecision(prefix.scale(range[2]));
+        match[8] = "f";
+        format = d3.format(match.join(""));
+        return function(d) {
+          return format(prefix.scale(d)) + prefix.symbol;
+        };
+      }
+      if (!match[7]) match[7] = "." + d3_scale_linearFormatPrecision(match[8], range);
+      format = match.join("");
+    } else {
+      format = ",." + d3_scale_linearPrecision(range[2]) + "f";
+    }
+    return d3.format(format);
+  }
+  var d3_scale_linearFormatSignificant = {
+    s: 1,
+    g: 1,
+    p: 1,
+    r: 1,
+    e: 1
+  };
+  function d3_scale_linearPrecision(value) {
+    return -Math.floor(Math.log(value) / Math.LN10 + .01);
+  }
+  function d3_scale_linearFormatPrecision(type, range) {
+    var p = d3_scale_linearPrecision(range[2]);
+    return type in d3_scale_linearFormatSignificant ? Math.abs(p - d3_scale_linearPrecision(Math.max(abs(range[0]), abs(range[1])))) + +(type !== "e") : p - (type === "%") * 2;
+  }
+  d3.scale.log = function() {
+    return d3_scale_log(d3.scale.linear().domain([ 0, 1 ]), 10, true, [ 1, 10 ]);
+  };
+  function d3_scale_log(linear, base, positive, domain) {
+    function log(x) {
+      return (positive ? Math.log(x < 0 ? 0 : x) : -Math.log(x > 0 ? 0 : -x)) / Math.log(base);
+    }
+    function pow(x) {
+      return positive ? Math.pow(base, x) : -Math.pow(base, -x);
+    }
+    function scale(x) {
+      return linear(log(x));
+    }
+    scale.invert = function(x) {
+      return pow(linear.invert(x));
+    };
+    scale.domain = function(x) {
+      if (!arguments.length) return domain;
+      positive = x[0] >= 0;
+      linear.domain((domain = x.map(Number)).map(log));
+      return scale;
+    };
+    scale.base = function(_) {
+      if (!arguments.length) return base;
+      base = +_;
+      linear.domain(domain.map(log));
+      return scale;
+    };
+    scale.nice = function() {
+      var niced = d3_scale_nice(domain.map(log), positive ? Math : d3_scale_logNiceNegative);
+      linear.domain(niced);
+      domain = niced.map(pow);
+      return scale;
+    };
+    scale.ticks = function() {
+      var extent = d3_scaleExtent(domain), ticks = [], u = extent[0], v = extent[1], i = Math.floor(log(u)), j = Math.ceil(log(v)), n = base % 1 ? 2 : base;
+      if (isFinite(j - i)) {
+        if (positive) {
+          for (;i < j; i++) for (var k = 1; k < n; k++) ticks.push(pow(i) * k);
+          ticks.push(pow(i));
+        } else {
+          ticks.push(pow(i));
+          for (;i++ < j; ) for (var k = n - 1; k > 0; k--) ticks.push(pow(i) * k);
+        }
+        for (i = 0; ticks[i] < u; i++) {}
+        for (j = ticks.length; ticks[j - 1] > v; j--) {}
+        ticks = ticks.slice(i, j);
+      }
+      return ticks;
+    };
+    scale.tickFormat = function(n, format) {
+      if (!arguments.length) return d3_scale_logFormat;
+      if (arguments.length < 2) format = d3_scale_logFormat; else if (typeof format !== "function") format = d3.format(format);
+      var k = Math.max(1, base * n / scale.ticks().length);
+      return function(d) {
+        var i = d / pow(Math.round(log(d)));
+        if (i * base < base - .5) i *= base;
+        return i <= k ? format(d) : "";
+      };
+    };
+    scale.copy = function() {
+      return d3_scale_log(linear.copy(), base, positive, domain);
+    };
+    return d3_scale_linearRebind(scale, linear);
+  }
+  var d3_scale_logFormat = d3.format(".0e"), d3_scale_logNiceNegative = {
+    floor: function(x) {
+      return -Math.ceil(-x);
+    },
+    ceil: function(x) {
+      return -Math.floor(-x);
+    }
+  };
+  d3.scale.pow = function() {
+    return d3_scale_pow(d3.scale.linear(), 1, [ 0, 1 ]);
+  };
+  function d3_scale_pow(linear, exponent, domain) {
+    var powp = d3_scale_powPow(exponent), powb = d3_scale_powPow(1 / exponent);
+    function scale(x) {
+      return linear(powp(x));
+    }
+    scale.invert = function(x) {
+      return powb(linear.invert(x));
+    };
+    scale.domain = function(x) {
+      if (!arguments.length) return domain;
+      linear.domain((domain = x.map(Number)).map(powp));
+      return scale;
+    };
+    scale.ticks = function(m) {
+      return d3_scale_linearTicks(domain, m);
+    };
+    scale.tickFormat = function(m, format) {
+      return d3_scale_linearTickFormat(domain, m, format);
+    };
+    scale.nice = function(m) {
+      return scale.domain(d3_scale_linearNice(domain, m));
+    };
+    scale.exponent = function(x) {
+      if (!arguments.length) return exponent;
+      powp = d3_scale_powPow(exponent = x);
+      powb = d3_scale_powPow(1 / exponent);
+      linear.domain(domain.map(powp));
+      return scale;
+    };
+    scale.copy = function() {
+      return d3_scale_pow(linear.copy(), exponent, domain);
+    };
+    return d3_scale_linearRebind(scale, linear);
+  }
+  function d3_scale_powPow(e) {
+    return function(x) {
+      return x < 0 ? -Math.pow(-x, e) : Math.pow(x, e);
+    };
+  }
+  d3.scale.sqrt = function() {
+    return d3.scale.pow().exponent(.5);
+  };
+  d3.scale.ordinal = function() {
+    return d3_scale_ordinal([], {
+      t: "range",
+      a: [ [] ]
+    });
+  };
+  function d3_scale_ordinal(domain, ranger) {
+    var index, range, rangeBand;
+    function scale(x) {
+      return range[((index.get(x) || (ranger.t === "range" ? index.set(x, domain.push(x)) : NaN)) - 1) % range.length];
+    }
+    function steps(start, step) {
+      return d3.range(domain.length).map(function(i) {
+        return start + step * i;
+      });
+    }
+    scale.domain = function(x) {
+      if (!arguments.length) return domain;
+      domain = [];
+      index = new d3_Map();
+      var i = -1, n = x.length, xi;
+      while (++i < n) if (!index.has(xi = x[i])) index.set(xi, domain.push(xi));
+      return scale[ranger.t].apply(scale, ranger.a);
+    };
+    scale.range = function(x) {
+      if (!arguments.length) return range;
+      range = x;
+      rangeBand = 0;
+      ranger = {
+        t: "range",
+        a: arguments
+      };
+      return scale;
+    };
+    scale.rangePoints = function(x, padding) {
+      if (arguments.length < 2) padding = 0;
+      var start = x[0], stop = x[1], step = domain.length < 2 ? (start = (start + stop) / 2, 
+      0) : (stop - start) / (domain.length - 1 + padding);
+      range = steps(start + step * padding / 2, step);
+      rangeBand = 0;
+      ranger = {
+        t: "rangePoints",
+        a: arguments
+      };
+      return scale;
+    };
+    scale.rangeRoundPoints = function(x, padding) {
+      if (arguments.length < 2) padding = 0;
+      var start = x[0], stop = x[1], step = domain.length < 2 ? (start = stop = Math.round((start + stop) / 2), 
+      0) : (stop - start) / (domain.length - 1 + padding) | 0;
+      range = steps(start + Math.round(step * padding / 2 + (stop - start - (domain.length - 1 + padding) * step) / 2), step);
+      rangeBand = 0;
+      ranger = {
+        t: "rangeRoundPoints",
+        a: arguments
+      };
+      return scale;
+    };
+    scale.rangeBands = function(x, padding, outerPadding) {
+      if (arguments.length < 2) padding = 0;
+      if (arguments.length < 3) outerPadding = padding;
+      var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = (stop - start) / (domain.length - padding + 2 * outerPadding);
+      range = steps(start + step * outerPadding, step);
+      if (reverse) range.reverse();
+      rangeBand = step * (1 - padding);
+      ranger = {
+        t: "rangeBands",
+        a: arguments
+      };
+      return scale;
+    };
+    scale.rangeRoundBands = function(x, padding, outerPadding) {
+      if (arguments.length < 2) padding = 0;
+      if (arguments.length < 3) outerPadding = padding;
+      var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = Math.floor((stop - start) / (domain.length - padding + 2 * outerPadding));
+      range = steps(start + Math.round((stop - start - (domain.length - padding) * step) / 2), step);
+      if (reverse) range.reverse();
+      rangeBand = Math.round(step * (1 - padding));
+      ranger = {
+        t: "rangeRoundBands",
+        a: arguments
+      };
+      return scale;
+    };
+    scale.rangeBand = function() {
+      return rangeBand;
+    };
+    scale.rangeExtent = function() {
+      return d3_scaleExtent(ranger.a[0]);
+    };
+    scale.copy = function() {
+      return d3_scale_ordinal(domain, ranger);
+    };
+    return scale.domain(domain);
+  }
+  d3.scale.category10 = function() {
+    return d3.scale.ordinal().range(d3_category10);
+  };
+  d3.scale.category20 = function() {
+    return d3.scale.ordinal().range(d3_category20);
+  };
+  d3.scale.category20b = function() {
+    return d3.scale.ordinal().range(d3_category20b);
+  };
+  d3.scale.category20c = function() {
+    return d3.scale.ordinal().range(d3_category20c);
+  };
+  var d3_category10 = [ 2062260, 16744206, 2924588, 14034728, 9725885, 9197131, 14907330, 8355711, 12369186, 1556175 ].map(d3_rgbString);
+  var d3_category20 = [ 2062260, 11454440, 16744206, 16759672, 2924588, 10018698, 14034728, 16750742, 9725885, 12955861, 9197131, 12885140, 14907330, 16234194, 8355711, 13092807, 12369186, 14408589, 1556175, 10410725 ].map(d3_rgbString);
+  var d3_category20b = [ 3750777, 5395619, 7040719, 10264286, 6519097, 9216594, 11915115, 13556636, 9202993, 12426809, 15186514, 15190932, 8666169, 11356490, 14049643, 15177372, 8077683, 10834324, 13528509, 14589654 ].map(d3_rgbString);
+  var d3_category20c = [ 3244733, 7057110, 10406625, 13032431, 15095053, 16616764, 16625259, 16634018, 3253076, 7652470, 10607003, 13101504, 7695281, 10394312, 12369372, 14342891, 6513507, 9868950, 12434877, 14277081 ].map(d3_rgbString);
+  d3.scale.quantile = function() {
+    return d3_scale_quantile([], []);
+  };
+  function d3_scale_quantile(domain, range) {
+    var thresholds;
+    function rescale() {
+      var k = 0, q = range.length;
+      thresholds = [];
+      while (++k < q) thresholds[k - 1] = d3.quantile(domain, k / q);
+      return scale;
+    }
+    function scale(x) {
+      if (!isNaN(x = +x)) return range[d3.bisect(thresholds, x)];
+    }
+    scale.domain = function(x) {
+      if (!arguments.length) return domain;
+      domain = x.map(d3_number).filter(d3_numeric).sort(d3_ascending);
+      return rescale();
+    };
+    scale.range = function(x) {
+      if (!arguments.length) return range;
+      range = x;
+      return rescale();
+    };
+    scale.quantiles = function() {
+      return thresholds;
+    };
+    scale.invertExtent = function(y) {
+      y = range.indexOf(y);
+      return y < 0 ? [ NaN, NaN ] : [ y > 0 ? thresholds[y - 1] : domain[0], y < thresholds.length ? thresholds[y] : domain[domain.length - 1] ];
+    };
+    scale.copy = function() {
+      return d3_scale_quantile(domain, range);
+    };
+    return rescale();
+  }
+  d3.scale.quantize = function() {
+    return d3_scale_quantize(0, 1, [ 0, 1 ]);
+  };
+  function d3_scale_quantize(x0, x1, range) {
+    var kx, i;
+    function scale(x) {
+      return range[Math.max(0, Math.min(i, Math.floor(kx * (x - x0))))];
+    }
+    function rescale() {
+      kx = range.length / (x1 - x0);
+      i = range.length - 1;
+      return scale;
+    }
+    scale.domain = function(x) {
+      if (!arguments.length) return [ x0, x1 ];
+      x0 = +x[0];
+      x1 = +x[x.length - 1];
+      return rescale();
+    };
+    scale.range = function(x) {
+      if (!arguments.length) return range;
+      range = x;
+      return rescale();
+    };
+    scale.invertExtent = function(y) {
+      y = range.indexOf(y);
+      y = y < 0 ? NaN : y / kx + x0;
+      return [ y, y + 1 / kx ];
+    };
+    scale.copy = function() {
+      return d3_scale_quantize(x0, x1, range);
+    };
+    return rescale();
+  }
+  d3.scale.threshold = function() {
+    return d3_scale_threshold([ .5 ], [ 0, 1 ]);
+  };
+  function d3_scale_threshold(domain, range) {
+    function scale(x) {
+      if (x <= x) return range[d3.bisect(domain, x)];
+    }
+    scale.domain = function(_) {
+      if (!arguments.length) return domain;
+      domain = _;
+      return scale;
+    };
+    scale.range = function(_) {
+      if (!arguments.length) return range;
+      range = _;
+      return scale;
+    };
+    scale.invertExtent = function(y) {
+      y = range.indexOf(y);
+      return [ domain[y - 1], domain[y] ];
+    };
+    scale.copy = function() {
+      return d3_scale_threshold(domain, range);
+    };
+    return scale;
+  }
+  d3.scale.identity = function() {
+    return d3_scale_identity([ 0, 1 ]);
+  };
+  function d3_scale_identity(domain) {
+    function identity(x) {
+      return +x;
+    }
+    identity.invert = identity;
+    identity.domain = identity.range = function(x) {
+      if (!arguments.length) return domain;
+      domain = x.map(identity);
+      return identity;
+    };
+    identity.ticks = function(m) {
+      return d3_scale_linearTicks(domain, m);
+    };
+    identity.tickFormat = function(m, format) {
+      return d3_scale_linearTickFormat(domain, m, format);
+    };
+    identity.copy = function() {
+      return d3_scale_identity(domain);
+    };
+    return identity;
+  }
+  d3.svg = {};
+  function d3_zero() {
+    return 0;
+  }
+  d3.svg.arc = function() {
+    var innerRadius = d3_svg_arcInnerRadius, outerRadius = d3_svg_arcOuterRadius, cornerRadius = d3_zero, padRadius = d3_svg_arcAuto, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle, padAngle = d3_svg_arcPadAngle;
+    function arc() {
+      var r0 = Math.max(0, +innerRadius.apply(this, arguments)), r1 = Math.max(0, +outerRadius.apply(this, arguments)), a0 = startAngle.apply(this, arguments) - halfπ, a1 = endAngle.apply(this, arguments) - halfπ, da = Math.abs(a1 - a0), cw = a0 > a1 ? 0 : 1;
+      if (r1 < r0) rc = r1, r1 = r0, r0 = rc;
+      if (da >= τε) return circleSegment(r1, cw) + (r0 ? circleSegment(r0, 1 - cw) : "") + "Z";
+      var rc, cr, rp, ap, p0 = 0, p1 = 0, x0, y0, x1, y1, x2, y2, x3, y3, path = [];
+      if (ap = (+padAngle.apply(this, arguments) || 0) / 2) {
+        rp = padRadius === d3_svg_arcAuto ? Math.sqrt(r0 * r0 + r1 * r1) : +padRadius.apply(this, arguments);
+        if (!cw) p1 *= -1;
+        if (r1) p1 = d3_asin(rp / r1 * Math.sin(ap));
+        if (r0) p0 = d3_asin(rp / r0 * Math.sin(ap));
+      }
+      if (r1) {
+        x0 = r1 * Math.cos(a0 + p1);
+        y0 = r1 * Math.sin(a0 + p1);
+        x1 = r1 * Math.cos(a1 - p1);
+        y1 = r1 * Math.sin(a1 - p1);
+        var l1 = Math.abs(a1 - a0 - 2 * p1) <= π ? 0 : 1;
+        if (p1 && d3_svg_arcSweep(x0, y0, x1, y1) === cw ^ l1) {
+          var h1 = (a0 + a1) / 2;
+          x0 = r1 * Math.cos(h1);
+          y0 = r1 * Math.sin(h1);
+          x1 = y1 = null;
+        }
+      } else {
+        x0 = y0 = 0;
+      }
+      if (r0) {
+        x2 = r0 * Math.cos(a1 - p0);
+        y2 = r0 * Math.sin(a1 - p0);
+        x3 = r0 * Math.cos(a0 + p0);
+        y3 = r0 * Math.sin(a0 + p0);
+        var l0 = Math.abs(a0 - a1 + 2 * p0) <= π ? 0 : 1;
+        if (p0 && d3_svg_arcSweep(x2, y2, x3, y3) === 1 - cw ^ l0) {
+          var h0 = (a0 + a1) / 2;
+          x2 = r0 * Math.cos(h0);
+          y2 = r0 * Math.sin(h0);
+          x3 = y3 = null;
+        }
+      } else {
+        x2 = y2 = 0;
+      }
+      if (da > ε && (rc = Math.min(Math.abs(r1 - r0) / 2, +cornerRadius.apply(this, arguments))) > .001) {
+        cr = r0 < r1 ^ cw ? 0 : 1;
+        var rc1 = rc, rc0 = rc;
+        if (da < π) {
+          var oc = x3 == null ? [ x2, y2 ] : x1 == null ? [ x0, y0 ] : d3_geom_polygonIntersect([ x0, y0 ], [ x3, y3 ], [ x1, y1 ], [ x2, y2 ]), ax = x0 - oc[0], ay = y0 - oc[1], bx = x1 - oc[0], by = y1 - oc[1], kc = 1 / Math.sin(Math.acos((ax * bx + ay * by) / (Math.sqrt(ax * ax + ay * ay) * Math.sqrt(bx * bx + by * by))) / 2), lc = Math.sqrt(oc[0] * oc[0] + oc[1] * oc[1]);
+          rc0 = Math.min(rc, (r0 - lc) / (kc - 1));
+          rc1 = Math.min(rc, (r1 - lc) / (kc + 1));
+        }
+        if (x1 != null) {
+          var t30 = d3_svg_arcCornerTangents(x3 == null ? [ x2, y2 ] : [ x3, y3 ], [ x0, y0 ], r1, rc1, cw), t12 = d3_svg_arcCornerTangents([ x1, y1 ], [ x2, y2 ], r1, rc1, cw);
+          if (rc === rc1) {
+            path.push("M", t30[0], "A", rc1, ",", rc1, " 0 0,", cr, " ", t30[1], "A", r1, ",", r1, " 0 ", 1 - cw ^ d3_svg_arcSweep(t30[1][0], t30[1][1], t12[1][0], t12[1][1]), ",", cw, " ", t12[1], "A", rc1, ",", rc1, " 0 0,", cr, " ", t12[0]);
+          } else {
+            path.push("M", t30[0], "A", rc1, ",", rc1, " 0 1,", cr, " ", t12[0]);
+          }
+        } else {
+          path.push("M", x0, ",", y0);
+        }
+        if (x3 != null) {
+          var t03 = d3_svg_arcCornerTangents([ x0, y0 ], [ x3, y3 ], r0, -rc0, cw), t21 = d3_svg_arcCornerTangents([ x2, y2 ], x1 == null ? [ x0, y0 ] : [ x1, y1 ], r0, -rc0, cw);
+          if (rc === rc0) {
+            path.push("L", t21[0], "A", rc0, ",", rc0, " 0 0,", cr, " ", t21[1], "A", r0, ",", r0, " 0 ", cw ^ d3_svg_arcSweep(t21[1][0], t21[1][1], t03[1][0], t03[1][1]), ",", 1 - cw, " ", t03[1], "A", rc0, ",", rc0, " 0 0,", cr, " ", t03[0]);
+          } else {
+            path.push("L", t21[0], "A", rc0, ",", rc0, " 0 0,", cr, " ", t03[0]);
+          }
+        } else {
+          path.push("L", x2, ",", y2);
+        }
+      } else {
+        path.push("M", x0, ",", y0);
+        if (x1 != null) path.push("A", r1, ",", r1, " 0 ", l1, ",", cw, " ", x1, ",", y1);
+        path.push("L", x2, ",", y2);
+        if (x3 != null) path.push("A", r0, ",", r0, " 0 ", l0, ",", 1 - cw, " ", x3, ",", y3);
+      }
+      path.push("Z");
+      return path.join("");
+    }
+    function circleSegment(r1, cw) {
+      return "M0," + r1 + "A" + r1 + "," + r1 + " 0 1," + cw + " 0," + -r1 + "A" + r1 + "," + r1 + " 0 1," + cw + " 0," + r1;
+    }
+    arc.innerRadius = function(v) {
+      if (!arguments.length) return innerRadius;
+      innerRadius = d3_functor(v);
+      return arc;
+    };
+    arc.outerRadius = function(v) {
+      if (!arguments.length) return outerRadius;
+      outerRadius = d3_functor(v);
+      return arc;
+    };
+    arc.cornerRadius = function(v) {
+      if (!arguments.length) return cornerRadius;
+      cornerRadius = d3_functor(v);
+      return arc;
+    };
+    arc.padRadius = function(v) {
+      if (!arguments.length) return padRadius;
+      padRadius = v == d3_svg_arcAuto ? d3_svg_arcAuto : d3_functor(v);
+      return arc;
+    };
+    arc.startAngle = function(v) {
+      if (!arguments.length) return startAngle;
+      startAngle = d3_functor(v);
+      return arc;
+    };
+    arc.endAngle = function(v) {
+      if (!arguments.length) return endAngle;
+      endAngle = d3_functor(v);
+      return arc;
+    };
+    arc.padAngle = function(v) {
+      if (!arguments.length) return padAngle;
+      padAngle = d3_functor(v);
+      return arc;
+    };
+    arc.centroid = function() {
+      var r = (+innerRadius.apply(this, arguments) + +outerRadius.apply(this, arguments)) / 2, a = (+startAngle.apply(this, arguments) + +endAngle.apply(this, arguments)) / 2 - halfπ;
+      return [ Math.cos(a) * r, Math.sin(a) * r ];
+    };
+    return arc;
+  };
+  var d3_svg_arcAuto = "auto";
+  function d3_svg_arcInnerRadius(d) {
+    return d.innerRadius;
+  }
+  function d3_svg_arcOuterRadius(d) {
+    return d.outerRadius;
+  }
+  function d3_svg_arcStartAngle(d) {
+    return d.startAngle;
+  }
+  function d3_svg_arcEndAngle(d) {
+    return d.endAngle;
+  }
+  function d3_svg_arcPadAngle(d) {
+    return d && d.padAngle;
+  }
+  function d3_svg_arcSweep(x0, y0, x1, y1) {
+    return (x0 - x1) * y0 - (y0 - y1) * x0 > 0 ? 0 : 1;
+  }
+  function d3_svg_arcCornerTangents(p0, p1, r1, rc, cw) {
+    var x01 = p0[0] - p1[0], y01 = p0[1] - p1[1], lo = (cw ? rc : -rc) / Math.sqrt(x01 * x01 + y01 * y01), ox = lo * y01, oy = -lo * x01, x1 = p0[0] + ox, y1 = p0[1] + oy, x2 = p1[0] + ox, y2 = p1[1] + oy, x3 = (x1 + x2) / 2, y3 = (y1 + y2) / 2, dx = x2 - x1, dy = y2 - y1, d2 = dx * dx + dy * dy, r = r1 - rc, D = x1 * y2 - x2 * y1, d = (dy < 0 ? -1 : 1) * Math.sqrt(Math.max(0, r * r * d2 - D * D)), cx0 = (D * dy - dx * d) / d2, cy0 = (-D * dx - dy * d) / d2, cx1 = (D * dy + dx * d) / d2, [...]
+    if (dx0 * dx0 + dy0 * dy0 > dx1 * dx1 + dy1 * dy1) cx0 = cx1, cy0 = cy1;
+    return [ [ cx0 - ox, cy0 - oy ], [ cx0 * r1 / r, cy0 * r1 / r ] ];
+  }
+  function d3_svg_line(projection) {
+    var x = d3_geom_pointX, y = d3_geom_pointY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, tension = .7;
+    function line(data) {
+      var segments = [], points = [], i = -1, n = data.length, d, fx = d3_functor(x), fy = d3_functor(y);
+      function segment() {
+        segments.push("M", interpolate(projection(points), tension));
+      }
+      while (++i < n) {
+        if (defined.call(this, d = data[i], i)) {
+          points.push([ +fx.call(this, d, i), +fy.call(this, d, i) ]);
+        } else if (points.length) {
+          segment();
+          points = [];
+        }
+      }
+      if (points.length) segment();
+      return segments.length ? segments.join("") : null;
+    }
+    line.x = function(_) {
+      if (!arguments.length) return x;
+      x = _;
+      return line;
+    };
+    line.y = function(_) {
+      if (!arguments.length) return y;
+      y = _;
+      return line;
+    };
+    line.defined = function(_) {
+      if (!arguments.length) return defined;
+      defined = _;
+      return line;
+    };
+    line.interpolate = function(_) {
+      if (!arguments.length) return interpolateKey;
+      if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key;
+      return line;
+    };
+    line.tension = function(_) {
+      if (!arguments.length) return tension;
+      tension = _;
+      return line;
+    };
+    return line;
+  }
+  d3.svg.line = function() {
+    return d3_svg_line(d3_identity);
+  };
+  var d3_svg_lineInterpolators = d3.map({
+    linear: d3_svg_lineLinear,
+    "linear-closed": d3_svg_lineLinearClosed,
+    step: d3_svg_lineStep,
+    "step-before": d3_svg_lineStepBefore,
+    "step-after": d3_svg_lineStepAfter,
+    basis: d3_svg_lineBasis,
+    "basis-open": d3_svg_lineBasisOpen,
+    "basis-closed": d3_svg_lineBasisClosed,
+    bundle: d3_svg_lineBundle,
+    cardinal: d3_svg_lineCardinal,
+    "cardinal-open": d3_svg_lineCardinalOpen,
+    "cardinal-closed": d3_svg_lineCardinalClosed,
+    monotone: d3_svg_lineMonotone
+  });
+  d3_svg_lineInterpolators.forEach(function(key, value) {
+    value.key = key;
+    value.closed = /-closed$/.test(key);
+  });
+  function d3_svg_lineLinear(points) {
+    return points.length > 1 ? points.join("L") : points + "Z";
+  }
+  function d3_svg_lineLinearClosed(points) {
+    return points.join("L") + "Z";
+  }
+  function d3_svg_lineStep(points) {
+    var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ];
+    while (++i < n) path.push("H", (p[0] + (p = points[i])[0]) / 2, "V", p[1]);
+    if (n > 1) path.push("H", p[0]);
+    return path.join("");
+  }
+  function d3_svg_lineStepBefore(points) {
+    var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ];
+    while (++i < n) path.push("V", (p = points[i])[1], "H", p[0]);
+    return path.join("");
+  }
+  function d3_svg_lineStepAfter(points) {
+    var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ];
+    while (++i < n) path.push("H", (p = points[i])[0], "V", p[1]);
+    return path.join("");
+  }
+  function d3_svg_lineCardinalOpen(points, tension) {
+    return points.length < 4 ? d3_svg_lineLinear(points) : points[1] + d3_svg_lineHermite(points.slice(1, -1), d3_svg_lineCardinalTangents(points, tension));
+  }
+  function d3_svg_lineCardinalClosed(points, tension) {
+    return points.length < 3 ? d3_svg_lineLinearClosed(points) : points[0] + d3_svg_lineHermite((points.push(points[0]), 
+    points), d3_svg_lineCardinalTangents([ points[points.length - 2] ].concat(points, [ points[1] ]), tension));
+  }
+  function d3_svg_lineCardinal(points, tension) {
+    return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineCardinalTangents(points, tension));
+  }
+  function d3_svg_lineHermite(points, tangents) {
+    if (tangents.length < 1 || points.length != tangents.length && points.length != tangents.length + 2) {
+      return d3_svg_lineLinear(points);
+    }
+    var quad = points.length != tangents.length, path = "", p0 = points[0], p = points[1], t0 = tangents[0], t = t0, pi = 1;
+    if (quad) {
+      path += "Q" + (p[0] - t0[0] * 2 / 3) + "," + (p[1] - t0[1] * 2 / 3) + "," + p[0] + "," + p[1];
+      p0 = points[1];
+      pi = 2;
+    }
+    if (tangents.length > 1) {
+      t = tangents[1];
+      p = points[pi];
+      pi++;
+      path += "C" + (p0[0] + t0[0]) + "," + (p0[1] + t0[1]) + "," + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1];
+      for (var i = 2; i < tangents.length; i++, pi++) {
+        p = points[pi];
+        t = tangents[i];
+        path += "S" + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1];
+      }
+    }
+    if (quad) {
+      var lp = points[pi];
+      path += "Q" + (p[0] + t[0] * 2 / 3) + "," + (p[1] + t[1] * 2 / 3) + "," + lp[0] + "," + lp[1];
+    }
+    return path;
+  }
+  function d3_svg_lineCardinalTangents(points, tension) {
+    var tangents = [], a = (1 - tension) / 2, p0, p1 = points[0], p2 = points[1], i = 1, n = points.length;
+    while (++i < n) {
+      p0 = p1;
+      p1 = p2;
+      p2 = points[i];
+      tangents.push([ a * (p2[0] - p0[0]), a * (p2[1] - p0[1]) ]);
+    }
+    return tangents;
+  }
+  function d3_svg_lineBasis(points) {
+    if (points.length < 3) return d3_svg_lineLinear(points);
+    var i = 1, n = points.length, pi = points[0], x0 = pi[0], y0 = pi[1], px = [ x0, x0, x0, (pi = points[1])[0] ], py = [ y0, y0, y0, pi[1] ], path = [ x0, ",", y0, "L", d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ];
+    points.push(points[n - 1]);
+    while (++i <= n) {
+      pi = points[i];
+      px.shift();
+      px.push(pi[0]);
+      py.shift();
+      py.push(pi[1]);
+      d3_svg_lineBasisBezier(path, px, py);
+    }
+    points.pop();
+    path.push("L", pi);
+    return path.join("");
+  }
+  function d3_svg_lineBasisOpen(points) {
+    if (points.length < 4) return d3_svg_lineLinear(points);
+    var path = [], i = -1, n = points.length, pi, px = [ 0 ], py = [ 0 ];
+    while (++i < 3) {
+      pi = points[i];
+      px.push(pi[0]);
+      py.push(pi[1]);
+    }
+    path.push(d3_svg_lineDot4(d3_svg_lineBasisBezier3, px) + "," + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py));
+    --i;
+    while (++i < n) {
+      pi = points[i];
+      px.shift();
+      px.push(pi[0]);
+      py.shift();
+      py.push(pi[1]);
+      d3_svg_lineBasisBezier(path, px, py);
+    }
+    return path.join("");
+  }
+  function d3_svg_lineBasisClosed(points) {
+    var path, i = -1, n = points.length, m = n + 4, pi, px = [], py = [];
+    while (++i < 4) {
+      pi = points[i % n];
+      px.push(pi[0]);
+      py.push(pi[1]);
+    }
+    path = [ d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ];
+    --i;
+    while (++i < m) {
+      pi = points[i % n];
+      px.shift();
+      px.push(pi[0]);
+      py.shift();
+      py.push(pi[1]);
+      d3_svg_lineBasisBezier(path, px, py);
+    }
+    return path.join("");
+  }
+  function d3_svg_lineBundle(points, tension) {
+    var n = points.length - 1;
+    if (n) {
+      var x0 = points[0][0], y0 = points[0][1], dx = points[n][0] - x0, dy = points[n][1] - y0, i = -1, p, t;
+      while (++i <= n) {
+        p = points[i];
+        t = i / n;
+        p[0] = tension * p[0] + (1 - tension) * (x0 + t * dx);
+        p[1] = tension * p[1] + (1 - tension) * (y0 + t * dy);
+      }
+    }
+    return d3_svg_lineBasis(points);
+  }
+  function d3_svg_lineDot4(a, b) {
+    return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + a[3] * b[3];
+  }
+  var d3_svg_lineBasisBezier1 = [ 0, 2 / 3, 1 / 3, 0 ], d3_svg_lineBasisBezier2 = [ 0, 1 / 3, 2 / 3, 0 ], d3_svg_lineBasisBezier3 = [ 0, 1 / 6, 2 / 3, 1 / 6 ];
+  function d3_svg_lineBasisBezier(path, x, y) {
+    path.push("C", d3_svg_lineDot4(d3_svg_lineBasisBezier1, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier1, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, y));
+  }
+  function d3_svg_lineSlope(p0, p1) {
+    return (p1[1] - p0[1]) / (p1[0] - p0[0]);
+  }
+  function d3_svg_lineFiniteDifferences(points) {
+    var i = 0, j = points.length - 1, m = [], p0 = points[0], p1 = points[1], d = m[0] = d3_svg_lineSlope(p0, p1);
+    while (++i < j) {
+      m[i] = (d + (d = d3_svg_lineSlope(p0 = p1, p1 = points[i + 1]))) / 2;
+    }
+    m[i] = d;
+    return m;
+  }
+  function d3_svg_lineMonotoneTangents(points) {
+    var tangents = [], d, a, b, s, m = d3_svg_lineFiniteDifferences(points), i = -1, j = points.length - 1;
+    while (++i < j) {
+      d = d3_svg_lineSlope(points[i], points[i + 1]);
+      if (abs(d) < ε) {
+        m[i] = m[i + 1] = 0;
+      } else {
+        a = m[i] / d;
+        b = m[i + 1] / d;
+        s = a * a + b * b;
+        if (s > 9) {
+          s = d * 3 / Math.sqrt(s);
+          m[i] = s * a;
+          m[i + 1] = s * b;
+        }
+      }
+    }
+    i = -1;
+    while (++i <= j) {
+      s = (points[Math.min(j, i + 1)][0] - points[Math.max(0, i - 1)][0]) / (6 * (1 + m[i] * m[i]));
+      tangents.push([ s || 0, m[i] * s || 0 ]);
+    }
+    return tangents;
+  }
+  function d3_svg_lineMonotone(points) {
+    return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineMonotoneTangents(points));
+  }
+  d3.svg.line.radial = function() {
+    var line = d3_svg_line(d3_svg_lineRadial);
+    line.radius = line.x, delete line.x;
+    line.angle = line.y, delete line.y;
+    return line;
+  };
+  function d3_svg_lineRadial(points) {
+    var point, i = -1, n = points.length, r, a;
+    while (++i < n) {
+      point = points[i];
+      r = point[0];
+      a = point[1] - halfπ;
+      point[0] = r * Math.cos(a);
+      point[1] = r * Math.sin(a);
+    }
+    return points;
+  }
+  function d3_svg_area(projection) {
+    var x0 = d3_geom_pointX, x1 = d3_geom_pointX, y0 = 0, y1 = d3_geom_pointY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, interpolateReverse = interpolate, L = "L", tension = .7;
+    function area(data) {
+      var segments = [], points0 = [], points1 = [], i = -1, n = data.length, d, fx0 = d3_functor(x0), fy0 = d3_functor(y0), fx1 = x0 === x1 ? function() {
+        return x;
+      } : d3_functor(x1), fy1 = y0 === y1 ? function() {
+        return y;
+      } : d3_functor(y1), x, y;
+      function segment() {
+        segments.push("M", interpolate(projection(points1), tension), L, interpolateReverse(projection(points0.reverse()), tension), "Z");
+      }
+      while (++i < n) {
+        if (defined.call(this, d = data[i], i)) {
+          points0.push([ x = +fx0.call(this, d, i), y = +fy0.call(this, d, i) ]);
+          points1.push([ +fx1.call(this, d, i), +fy1.call(this, d, i) ]);
+        } else if (points0.length) {
+          segment();
+          points0 = [];
+          points1 = [];
+        }
+      }
+      if (points0.length) segment();
+      return segments.length ? segments.join("") : null;
+    }
+    area.x = function(_) {
+      if (!arguments.length) return x1;
+      x0 = x1 = _;
+      return area;
+    };
+    area.x0 = function(_) {
+      if (!arguments.length) return x0;
+      x0 = _;
+      return area;
+    };
+    area.x1 = function(_) {
+      if (!arguments.length) return x1;
+      x1 = _;
+      return area;
+    };
+    area.y = function(_) {
+      if (!arguments.length) return y1;
+      y0 = y1 = _;
+      return area;
+    };
+    area.y0 = function(_) {
+      if (!arguments.length) return y0;
+      y0 = _;
+      return area;
+    };
+    area.y1 = function(_) {
+      if (!arguments.length) return y1;
+      y1 = _;
+      return area;
+    };
+    area.defined = function(_) {
+      if (!arguments.length) return defined;
+      defined = _;
+      return area;
+    };
+    area.interpolate = function(_) {
+      if (!arguments.length) return interpolateKey;
+      if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key;
+      interpolateReverse = interpolate.reverse || interpolate;
+      L = interpolate.closed ? "M" : "L";
+      return area;
+    };
+    area.tension = function(_) {
+      if (!arguments.length) return tension;
+      tension = _;
+      return area;
+    };
+    return area;
+  }
+  d3_svg_lineStepBefore.reverse = d3_svg_lineStepAfter;
+  d3_svg_lineStepAfter.reverse = d3_svg_lineStepBefore;
+  d3.svg.area = function() {
+    return d3_svg_area(d3_identity);
+  };
+  d3.svg.area.radial = function() {
+    var area = d3_svg_area(d3_svg_lineRadial);
+    area.radius = area.x, delete area.x;
+    area.innerRadius = area.x0, delete area.x0;
+    area.outerRadius = area.x1, delete area.x1;
+    area.angle = area.y, delete area.y;
+    area.startAngle = area.y0, delete area.y0;
+    area.endAngle = area.y1, delete area.y1;
+    return area;
+  };
+  d3.svg.chord = function() {
+    var source = d3_source, target = d3_target, radius = d3_svg_chordRadius, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle;
+    function chord(d, i) {
+      var s = subgroup(this, source, d, i), t = subgroup(this, target, d, i);
+      return "M" + s.p0 + arc(s.r, s.p1, s.a1 - s.a0) + (equals(s, t) ? curve(s.r, s.p1, s.r, s.p0) : curve(s.r, s.p1, t.r, t.p0) + arc(t.r, t.p1, t.a1 - t.a0) + curve(t.r, t.p1, s.r, s.p0)) + "Z";
+    }
+    function subgroup(self, f, d, i) {
+      var subgroup = f.call(self, d, i), r = radius.call(self, subgroup, i), a0 = startAngle.call(self, subgroup, i) - halfπ, a1 = endAngle.call(self, subgroup, i) - halfπ;
+      return {
+        r: r,
+        a0: a0,
+        a1: a1,
+        p0: [ r * Math.cos(a0), r * Math.sin(a0) ],
+        p1: [ r * Math.cos(a1), r * Math.sin(a1) ]
+      };
+    }
+    function equals(a, b) {
+      return a.a0 == b.a0 && a.a1 == b.a1;
+    }
+    function arc(r, p, a) {
+      return "A" + r + "," + r + " 0 " + +(a > π) + ",1 " + p;
+    }
+    function curve(r0, p0, r1, p1) {
+      return "Q 0,0 " + p1;
+    }
+    chord.radius = function(v) {
+      if (!arguments.length) return radius;
+      radius = d3_functor(v);
+      return chord;
+    };
+    chord.source = function(v) {
+      if (!arguments.length) return source;
+      source = d3_functor(v);
+      return chord;
+    };
+    chord.target = function(v) {
+      if (!arguments.length) return target;
+      target = d3_functor(v);
+      return chord;
+    };
+    chord.startAngle = function(v) {
+      if (!arguments.length) return startAngle;
+      startAngle = d3_functor(v);
+      return chord;
+    };
+    chord.endAngle = function(v) {
+      if (!arguments.length) return endAngle;
+      endAngle = d3_functor(v);
+      return chord;
+    };
+    return chord;
+  };
+  function d3_svg_chordRadius(d) {
+    return d.radius;
+  }
+  d3.svg.diagonal = function() {
+    var source = d3_source, target = d3_target, projection = d3_svg_diagonalProjection;
+    function diagonal(d, i) {
+      var p0 = source.call(this, d, i), p3 = target.call(this, d, i), m = (p0.y + p3.y) / 2, p = [ p0, {
+        x: p0.x,
+        y: m
+      }, {
+        x: p3.x,
+        y: m
+      }, p3 ];
+      p = p.map(projection);
+      return "M" + p[0] + "C" + p[1] + " " + p[2] + " " + p[3];
+    }
+    diagonal.source = function(x) {
+      if (!arguments.length) return source;
+      source = d3_functor(x);
+      return diagonal;
+    };
+    diagonal.target = function(x) {
+      if (!arguments.length) return target;
+      target = d3_functor(x);
+      return diagonal;
+    };
+    diagonal.projection = function(x) {
+      if (!arguments.length) return projection;
+      projection = x;
+      return diagonal;
+    };
+    return diagonal;
+  };
+  function d3_svg_diagonalProjection(d) {
+    return [ d.x, d.y ];
+  }
+  d3.svg.diagonal.radial = function() {
+    var diagonal = d3.svg.diagonal(), projection = d3_svg_diagonalProjection, projection_ = diagonal.projection;
+    diagonal.projection = function(x) {
+      return arguments.length ? projection_(d3_svg_diagonalRadialProjection(projection = x)) : projection;
+    };
+    return diagonal;
+  };
+  function d3_svg_diagonalRadialProjection(projection) {
+    return function() {
+      var d = projection.apply(this, arguments), r = d[0], a = d[1] - halfπ;
+      return [ r * Math.cos(a), r * Math.sin(a) ];
+    };
+  }
+  d3.svg.symbol = function() {
+    var type = d3_svg_symbolType, size = d3_svg_symbolSize;
+    function symbol(d, i) {
+      return (d3_svg_symbols.get(type.call(this, d, i)) || d3_svg_symbolCircle)(size.call(this, d, i));
+    }
+    symbol.type = function(x) {
+      if (!arguments.length) return type;
+      type = d3_functor(x);
+      return symbol;
+    };
+    symbol.size = function(x) {
+      if (!arguments.length) return size;
+      size = d3_functor(x);
+      return symbol;
+    };
+    return symbol;
+  };
+  function d3_svg_symbolSize() {
+    return 64;
+  }
+  function d3_svg_symbolType() {
+    return "circle";
+  }
+  function d3_svg_symbolCircle(size) {
+    var r = Math.sqrt(size / π);
+    return "M0," + r + "A" + r + "," + r + " 0 1,1 0," + -r + "A" + r + "," + r + " 0 1,1 0," + r + "Z";
+  }
+  var d3_svg_symbols = d3.map({
+    circle: d3_svg_symbolCircle,
+    cross: function(size) {
+      var r = Math.sqrt(size / 5) / 2;
+      return "M" + -3 * r + "," + -r + "H" + -r + "V" + -3 * r + "H" + r + "V" + -r + "H" + 3 * r + "V" + r + "H" + r + "V" + 3 * r + "H" + -r + "V" + r + "H" + -3 * r + "Z";
+    },
+    diamond: function(size) {
+      var ry = Math.sqrt(size / (2 * d3_svg_symbolTan30)), rx = ry * d3_svg_symbolTan30;
+      return "M0," + -ry + "L" + rx + ",0" + " 0," + ry + " " + -rx + ",0" + "Z";
+    },
+    square: function(size) {
+      var r = Math.sqrt(size) / 2;
+      return "M" + -r + "," + -r + "L" + r + "," + -r + " " + r + "," + r + " " + -r + "," + r + "Z";
+    },
+    "triangle-down": function(size) {
+      var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2;
+      return "M0," + ry + "L" + rx + "," + -ry + " " + -rx + "," + -ry + "Z";
+    },
+    "triangle-up": function(size) {
+      var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2;
+      return "M0," + -ry + "L" + rx + "," + ry + " " + -rx + "," + ry + "Z";
+    }
+  });
+  d3.svg.symbolTypes = d3_svg_symbols.keys();
+  var d3_svg_symbolSqrt3 = Math.sqrt(3), d3_svg_symbolTan30 = Math.tan(30 * d3_radians);
+  d3_selectionPrototype.transition = function(name) {
+    var id = d3_transitionInheritId || ++d3_transitionId, ns = d3_transitionNamespace(name), subgroups = [], subgroup, node, transition = d3_transitionInherit || {
+      time: Date.now(),
+      ease: d3_ease_cubicInOut,
+      delay: 0,
+      duration: 250
+    };
+    for (var j = -1, m = this.length; ++j < m; ) {
+      subgroups.push(subgroup = []);
+      for (var group = this[j], i = -1, n = group.length; ++i < n; ) {
+        if (node = group[i]) d3_transitionNode(node, i, ns, id, transition);
+        subgroup.push(node);
+      }
+    }
+    return d3_transition(subgroups, ns, id);
+  };
+  d3_selectionPrototype.interrupt = function(name) {
+    return this.each(name == null ? d3_selection_interrupt : d3_selection_interruptNS(d3_transitionNamespace(name)));
+  };
+  var d3_selection_interrupt = d3_selection_interruptNS(d3_transitionNamespace());
+  function d3_selection_interruptNS(ns) {
+    return function() {
+      var lock, activeId, active;
+      if ((lock = this[ns]) && (active = lock[activeId = lock.active])) {
+        active.timer.c = null;
+        active.timer.t = NaN;
+        if (--lock.count) delete lock[activeId]; else delete this[ns];
+        lock.active += .5;
+        active.event && active.event.interrupt.call(this, this.__data__, active.index);
+      }
+    };
+  }
+  function d3_transition(groups, ns, id) {
+    d3_subclass(groups, d3_transitionPrototype);
+    groups.namespace = ns;
+    groups.id = id;
+    return groups;
+  }
+  var d3_transitionPrototype = [], d3_transitionId = 0, d3_transitionInheritId, d3_transitionInherit;
+  d3_transitionPrototype.call = d3_selectionPrototype.call;
+  d3_transitionPrototype.empty = d3_selectionPrototype.empty;
+  d3_transitionPrototype.node = d3_selectionPrototype.node;
+  d3_transitionPrototype.size = d3_selectionPrototype.size;
+  d3.transition = function(selection, name) {
+    return selection && selection.transition ? d3_transitionInheritId ? selection.transition(name) : selection : d3.selection().transition(selection);
+  };
+  d3.transition.prototype = d3_transitionPrototype;
+  d3_transitionPrototype.select = function(selector) {
+    var id = this.id, ns = this.namespace, subgroups = [], subgroup, subnode, node;
+    selector = d3_selection_selector(selector);
+    for (var j = -1, m = this.length; ++j < m; ) {
+      subgroups.push(subgroup = []);
+      for (var group = this[j], i = -1, n = group.length; ++i < n; ) {
+        if ((node = group[i]) && (subnode = selector.call(node, node.__data__, i, j))) {
+          if ("__data__" in node) subnode.__data__ = node.__data__;
+          d3_transitionNode(subnode, i, ns, id, node[ns][id]);
+          subgroup.push(subnode);
+        } else {
+          subgroup.push(null);
+        }
+      }
+    }
+    return d3_transition(subgroups, ns, id);
+  };
+  d3_transitionPrototype.selectAll = function(selector) {
+    var id = this.id, ns = this.namespace, subgroups = [], subgroup, subnodes, node, subnode, transition;
+    selector = d3_selection_selectorAll(selector);
+    for (var j = -1, m = this.length; ++j < m; ) {
+      for (var group = this[j], i = -1, n = group.length; ++i < n; ) {
+        if (node = group[i]) {
+          transition = node[ns][id];
+          subnodes = selector.call(node, node.__data__, i, j);
+          subgroups.push(subgroup = []);
+          for (var k = -1, o = subnodes.length; ++k < o; ) {
+            if (subnode = subnodes[k]) d3_transitionNode(subnode, k, ns, id, transition);
+            subgroup.push(subnode);
+          }
+        }
+      }
+    }
+    return d3_transition(subgroups, ns, id);
+  };
+  d3_transitionPrototype.filter = function(filter) {
+    var subgroups = [], subgroup, group, node;
+    if (typeof filter !== "function") filter = d3_selection_filter(filter);
+    for (var j = 0, m = this.length; j < m; j++) {
+      subgroups.push(subgroup = []);
+      for (var group = this[j], i = 0, n = group.length; i < n; i++) {
+        if ((node = group[i]) && filter.call(node, node.__data__, i, j)) {
+          subgroup.push(node);
+        }
+      }
+    }
+    return d3_transition(subgroups, this.namespace, this.id);
+  };
+  d3_transitionPrototype.tween = function(name, tween) {
+    var id = this.id, ns = this.namespace;
+    if (arguments.length < 2) return this.node()[ns][id].tween.get(name);
+    return d3_selection_each(this, tween == null ? function(node) {
+      node[ns][id].tween.remove(name);
+    } : function(node) {
+      node[ns][id].tween.set(name, tween);
+    });
+  };
+  function d3_transition_tween(groups, name, value, tween) {
+    var id = groups.id, ns = groups.namespace;
+    return d3_selection_each(groups, typeof value === "function" ? function(node, i, j) {
+      node[ns][id].tween.set(name, tween(value.call(node, node.__data__, i, j)));
+    } : (value = tween(value), function(node) {
+      node[ns][id].tween.set(name, value);
+    }));
+  }
+  d3_transitionPrototype.attr = function(nameNS, value) {
+    if (arguments.length < 2) {
+      for (value in nameNS) this.attr(value, nameNS[value]);
+      return this;
+    }
+    var interpolate = nameNS == "transform" ? d3_interpolateTransform : d3_interpolate, name = d3.ns.qualify(nameNS);
+    function attrNull() {
+      this.removeAttribute(name);
+    }
+    function attrNullNS() {
+      this.removeAttributeNS(name.space, name.local);
+    }
+    function attrTween(b) {
+      return b == null ? attrNull : (b += "", function() {
+        var a = this.getAttribute(name), i;
+        return a !== b && (i = interpolate(a, b), function(t) {
+          this.setAttribute(name, i(t));
+        });
+      });
+    }
+    function attrTweenNS(b) {
+      return b == null ? attrNullNS : (b += "", function() {
+        var a = this.getAttributeNS(name.space, name.local), i;
+        return a !== b && (i = interpolate(a, b), function(t) {
+          this.setAttributeNS(name.space, name.local, i(t));
+        });
+      });
+    }
+    return d3_transition_tween(this, "attr." + nameNS, value, name.local ? attrTweenNS : attrTween);
+  };
+  d3_transitionPrototype.attrTween = function(nameNS, tween) {
+    var name = d3.ns.qualify(nameNS);
+    function attrTween(d, i) {
+      var f = tween.call(this, d, i, this.getAttribute(name));
+      return f && function(t) {
+        this.setAttribute(name, f(t));
+      };
+    }
+    function attrTweenNS(d, i) {
+      var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local));
+      return f && function(t) {
+        this.setAttributeNS(name.space, name.local, f(t));
+      };
+    }
+    return this.tween("attr." + nameNS, name.local ? attrTweenNS : attrTween);
+  };
+  d3_transitionPrototype.style = function(name, value, priority) {
+    var n = arguments.length;
+    if (n < 3) {
+      if (typeof name !== "string") {
+        if (n < 2) value = "";
+        for (priority in name) this.style(priority, name[priority], value);
+        return this;
+      }
+      priority = "";
+    }
+    function styleNull() {
+      this.style.removeProperty(name);
+    }
+    function styleString(b) {
+      return b == null ? styleNull : (b += "", function() {
+        var a = d3_window(this).getComputedStyle(this, null).getPropertyValue(name), i;
+        return a !== b && (i = d3_interpolate(a, b), function(t) {
+          this.style.setProperty(name, i(t), priority);
+        });
+      });
+    }
+    return d3_transition_tween(this, "style." + name, value, styleString);
+  };
+  d3_transitionPrototype.styleTween = function(name, tween, priority) {
+    if (arguments.length < 3) priority = "";
+    function styleTween(d, i) {
+      var f = tween.call(this, d, i, d3_window(this).getComputedStyle(this, null).getPropertyValue(name));
+      return f && function(t) {
+        this.style.setProperty(name, f(t), priority);
+      };
+    }
+    return this.tween("style." + name, styleTween);
+  };
+  d3_transitionPrototype.text = function(value) {
+    return d3_transition_tween(this, "text", value, d3_transition_text);
+  };
+  function d3_transition_text(b) {
+    if (b == null) b = "";
+    return function() {
+      this.textContent = b;
+    };
+  }
+  d3_transitionPrototype.remove = function() {
+    var ns = this.namespace;
+    return this.each("end.transition", function() {
+      var p;
+      if (this[ns].count < 2 && (p = this.parentNode)) p.removeChild(this);
+    });
+  };
+  d3_transitionPrototype.ease = function(value) {
+    var id = this.id, ns = this.namespace;
+    if (arguments.length < 1) return this.node()[ns][id].ease;
+    if (typeof value !== "function") value = d3.ease.apply(d3, arguments);
+    return d3_selection_each(this, function(node) {
+      node[ns][id].ease = value;
+    });
+  };
+  d3_transitionPrototype.delay = function(value) {
+    var id = this.id, ns = this.namespace;
+    if (arguments.length < 1) return this.node()[ns][id].delay;
+    return d3_selection_each(this, typeof value === "function" ? function(node, i, j) {
+      node[ns][id].delay = +value.call(node, node.__data__, i, j);
+    } : (value = +value, function(node) {
+      node[ns][id].delay = value;
+    }));
+  };
+  d3_transitionPrototype.duration = function(value) {
+    var id = this.id, ns = this.namespace;
+    if (arguments.length < 1) return this.node()[ns][id].duration;
+    return d3_selection_each(this, typeof value === "function" ? function(node, i, j) {
+      node[ns][id].duration = Math.max(1, value.call(node, node.__data__, i, j));
+    } : (value = Math.max(1, value), function(node) {
+      node[ns][id].duration = value;
+    }));
+  };
+  d3_transitionPrototype.each = function(type, listener) {
+    var id = this.id, ns = this.namespace;
+    if (arguments.length < 2) {
+      var inherit = d3_transitionInherit, inheritId = d3_transitionInheritId;
+      try {
+        d3_transitionInheritId = id;
+        d3_selection_each(this, function(node, i, j) {
+          d3_transitionInherit = node[ns][id];
+          type.call(node, node.__data__, i, j);
+        });
+      } finally {
+        d3_transitionInherit = inherit;
+        d3_transitionInheritId = inheritId;
+      }
+    } else {
+      d3_selection_each(this, function(node) {
+        var transition = node[ns][id];
+        (transition.event || (transition.event = d3.dispatch("start", "end", "interrupt"))).on(type, listener);
+      });
+    }
+    return this;
+  };
+  d3_transitionPrototype.transition = function() {
+    var id0 = this.id, id1 = ++d3_transitionId, ns = this.namespace, subgroups = [], subgroup, group, node, transition;
+    for (var j = 0, m = this.length; j < m; j++) {
+      subgroups.push(subgroup = []);
+      for (var group = this[j], i = 0, n = group.length; i < n; i++) {
+        if (node = group[i]) {
+          transition = node[ns][id0];
+          d3_transitionNode(node, i, ns, id1, {
+            time: transition.time,
+            ease: transition.ease,
+            delay: transition.delay + transition.duration,
+            duration: transition.duration
+          });
+        }
+        subgroup.push(node);
+      }
+    }
+    return d3_transition(subgroups, ns, id1);
+  };
+  function d3_transitionNamespace(name) {
+    return name == null ? "__transition__" : "__transition_" + name + "__";
+  }
+  function d3_transitionNode(node, i, ns, id, inherit) {
+    var lock = node[ns] || (node[ns] = {
+      active: 0,
+      count: 0
+    }), transition = lock[id], time, timer, duration, ease, tweens;
+    function schedule(elapsed) {
+      var delay = transition.delay;
+      timer.t = delay + time;
+      if (delay <= elapsed) return start(elapsed - delay);
+      timer.c = start;
+    }
+    function start(elapsed) {
+      var activeId = lock.active, active = lock[activeId];
+      if (active) {
+        active.timer.c = null;
+        active.timer.t = NaN;
+        --lock.count;
+        delete lock[activeId];
+        active.event && active.event.interrupt.call(node, node.__data__, active.index);
+      }
+      for (var cancelId in lock) {
+        if (+cancelId < id) {
+          var cancel = lock[cancelId];
+          cancel.timer.c = null;
+          cancel.timer.t = NaN;
+          --lock.count;
+          delete lock[cancelId];
+        }
+      }
+      timer.c = tick;
+      d3_timer(function() {
+        if (timer.c && tick(elapsed || 1)) {
+          timer.c = null;
+          timer.t = NaN;
+        }
+        return 1;
+      }, 0, time);
+      lock.active = id;
+      transition.event && transition.event.start.call(node, node.__data__, i);
+      tweens = [];
+      transition.tween.forEach(function(key, value) {
+        if (value = value.call(node, node.__data__, i)) {
+          tweens.push(value);
+        }
+      });
+      ease = transition.ease;
+      duration = transition.duration;
+    }
+    function tick(elapsed) {
+      var t = elapsed / duration, e = ease(t), n = tweens.length;
+      while (n > 0) {
+        tweens[--n].call(node, e);
+      }
+      if (t >= 1) {
+        transition.event && transition.event.end.call(node, node.__data__, i);
+        if (--lock.count) delete lock[id]; else delete node[ns];
+        return 1;
+      }
+    }
+    if (!transition) {
+      time = inherit.time;
+      timer = d3_timer(schedule, 0, time);
+      transition = lock[id] = {
+        tween: new d3_Map(),
+        time: time,
+        timer: timer,
+        delay: inherit.delay,
+        duration: inherit.duration,
+        ease: inherit.ease,
+        index: i
+      };
+      inherit = null;
+      ++lock.count;
+    }
+  }
+  d3.svg.axis = function() {
+    var scale = d3.scale.linear(), orient = d3_svg_axisDefaultOrient, innerTickSize = 6, outerTickSize = 6, tickPadding = 3, tickArguments_ = [ 10 ], tickValues = null, tickFormat_;
+    function axis(g) {
+      g.each(function() {
+        var g = d3.select(this);
+        var scale0 = this.__chart__ || scale, scale1 = this.__chart__ = scale.copy();
+        var ticks = tickValues == null ? scale1.ticks ? scale1.ticks.apply(scale1, tickArguments_) : scale1.domain() : tickValues, tickFormat = tickFormat_ == null ? scale1.tickFormat ? scale1.tickFormat.apply(scale1, tickArguments_) : d3_identity : tickFormat_, tick = g.selectAll(".tick").data(ticks, scale1), tickEnter = tick.enter().insert("g", ".domain").attr("class", "tick").style("opacity", ε), tickExit = d3.transition(tick.exit()).style("opacity", ε).remove(), tickUpdate = d3.trans [...]
+        var range = d3_scaleRange(scale1), path = g.selectAll(".domain").data([ 0 ]), pathUpdate = (path.enter().append("path").attr("class", "domain"), 
+        d3.transition(path));
+        tickEnter.append("line");
+        tickEnter.append("text");
+        var lineEnter = tickEnter.select("line"), lineUpdate = tickUpdate.select("line"), text = tick.select("text").text(tickFormat), textEnter = tickEnter.select("text"), textUpdate = tickUpdate.select("text"), sign = orient === "top" || orient === "left" ? -1 : 1, x1, x2, y1, y2;
+        if (orient === "bottom" || orient === "top") {
+          tickTransform = d3_svg_axisX, x1 = "x", y1 = "y", x2 = "x2", y2 = "y2";
+          text.attr("dy", sign < 0 ? "0em" : ".71em").style("text-anchor", "middle");
+          pathUpdate.attr("d", "M" + range[0] + "," + sign * outerTickSize + "V0H" + range[1] + "V" + sign * outerTickSize);
+        } else {
+          tickTransform = d3_svg_axisY, x1 = "y", y1 = "x", x2 = "y2", y2 = "x2";
+          text.attr("dy", ".32em").style("text-anchor", sign < 0 ? "end" : "start");
+          pathUpdate.attr("d", "M" + sign * outerTickSize + "," + range[0] + "H0V" + range[1] + "H" + sign * outerTickSize);
+        }
+        lineEnter.attr(y2, sign * innerTickSize);
+        textEnter.attr(y1, sign * tickSpacing);
+        lineUpdate.attr(x2, 0).attr(y2, sign * innerTickSize);
+        textUpdate.attr(x1, 0).attr(y1, sign * tickSpacing);
+        if (scale1.rangeBand) {
+          var x = scale1, dx = x.rangeBand() / 2;
+          scale0 = scale1 = function(d) {
+            return x(d) + dx;
+          };
+        } else if (scale0.rangeBand) {
+          scale0 = scale1;
+        } else {
+          tickExit.call(tickTransform, scale1, scale0);
+        }
+        tickEnter.call(tickTransform, scale0, scale1);
+        tickUpdate.call(tickTransform, scale1, scale1);
+      });
+    }
+    axis.scale = function(x) {
+      if (!arguments.length) return scale;
+      scale = x;
+      return axis;
+    };
+    axis.orient = function(x) {
+      if (!arguments.length) return orient;
+      orient = x in d3_svg_axisOrients ? x + "" : d3_svg_axisDefaultOrient;
+      return axis;
+    };
+    axis.ticks = function() {
+      if (!arguments.length) return tickArguments_;
+      tickArguments_ = d3_array(arguments);
+      return axis;
+    };
+    axis.tickValues = function(x) {
+      if (!arguments.length) return tickValues;
+      tickValues = x;
+      return axis;
+    };
+    axis.tickFormat = function(x) {
+      if (!arguments.length) return tickFormat_;
+      tickFormat_ = x;
+      return axis;
+    };
+    axis.tickSize = function(x) {
+      var n = arguments.length;
+      if (!n) return innerTickSize;
+      innerTickSize = +x;
+      outerTickSize = +arguments[n - 1];
+      return axis;
+    };
+    axis.innerTickSize = function(x) {
+      if (!arguments.length) return innerTickSize;
+      innerTickSize = +x;
+      return axis;
+    };
+    axis.outerTickSize = function(x) {
+      if (!arguments.length) return outerTickSize;
+      outerTickSize = +x;
+      return axis;
+    };
+    axis.tickPadding = function(x) {
+      if (!arguments.length) return tickPadding;
+      tickPadding = +x;
+      return axis;
+    };
+    axis.tickSubdivide = function() {
+      return arguments.length && axis;
+    };
+    return axis;
+  };
+  var d3_svg_axisDefaultOrient = "bottom", d3_svg_axisOrients = {
+    top: 1,
+    right: 1,
+    bottom: 1,
+    left: 1
+  };
+  function d3_svg_axisX(selection, x0, x1) {
+    selection.attr("transform", function(d) {
+      var v0 = x0(d);
+      return "translate(" + (isFinite(v0) ? v0 : x1(d)) + ",0)";
+    });
+  }
+  function d3_svg_axisY(selection, y0, y1) {
+    selection.attr("transform", function(d) {
+      var v0 = y0(d);
+      return "translate(0," + (isFinite(v0) ? v0 : y1(d)) + ")";
+    });
+  }
+  d3.svg.brush = function() {
+    var event = d3_eventDispatch(brush, "brushstart", "brush", "brushend"), x = null, y = null, xExtent = [ 0, 0 ], yExtent = [ 0, 0 ], xExtentDomain, yExtentDomain, xClamp = true, yClamp = true, resizes = d3_svg_brushResizes[0];
+    function brush(g) {
+      g.each(function() {
+        var g = d3.select(this).style("pointer-events", "all").style("-webkit-tap-highlight-color", "rgba(0,0,0,0)").on("mousedown.brush", brushstart).on("touchstart.brush", brushstart);
+        var background = g.selectAll(".background").data([ 0 ]);
+        background.enter().append("rect").attr("class", "background").style("visibility", "hidden").style("cursor", "crosshair");
+        g.selectAll(".extent").data([ 0 ]).enter().append("rect").attr("class", "extent").style("cursor", "move");
+        var resize = g.selectAll(".resize").data(resizes, d3_identity);
+        resize.exit().remove();
+        resize.enter().append("g").attr("class", function(d) {
+          return "resize " + d;
+        }).style("cursor", function(d) {
+          return d3_svg_brushCursor[d];
+        }).append("rect").attr("x", function(d) {
+          return /[ew]$/.test(d) ? -3 : null;
+        }).attr("y", function(d) {
+          return /^[ns]/.test(d) ? -3 : null;
+        }).attr("width", 6).attr("height", 6).style("visibility", "hidden");
+        resize.style("display", brush.empty() ? "none" : null);
+        var gUpdate = d3.transition(g), backgroundUpdate = d3.transition(background), range;
+        if (x) {
+          range = d3_scaleRange(x);
+          backgroundUpdate.attr("x", range[0]).attr("width", range[1] - range[0]);
+          redrawX(gUpdate);
+        }
+        if (y) {
+          range = d3_scaleRange(y);
+          backgroundUpdate.attr("y", range[0]).attr("height", range[1] - range[0]);
+          redrawY(gUpdate);
+        }
+        redraw(gUpdate);
+      });
+    }
+    brush.event = function(g) {
+      g.each(function() {
+        var event_ = event.of(this, arguments), extent1 = {
+          x: xExtent,
+          y: yExtent,
+          i: xExtentDomain,
+          j: yExtentDomain
+        }, extent0 = this.__chart__ || extent1;
+        this.__chart__ = extent1;
+        if (d3_transitionInheritId) {
+          d3.select(this).transition().each("start.brush", function() {
+            xExtentDomain = extent0.i;
+            yExtentDomain = extent0.j;
+            xExtent = extent0.x;
+            yExtent = extent0.y;
+            event_({
+              type: "brushstart"
+            });
+          }).tween("brush:brush", function() {
+            var xi = d3_interpolateArray(xExtent, extent1.x), yi = d3_interpolateArray(yExtent, extent1.y);
+            xExtentDomain = yExtentDomain = null;
+            return function(t) {
+              xExtent = extent1.x = xi(t);
+              yExtent = extent1.y = yi(t);
+              event_({
+                type: "brush",
+                mode: "resize"
+              });
+            };
+          }).each("end.brush", function() {
+            xExtentDomain = extent1.i;
+            yExtentDomain = extent1.j;
+            event_({
+              type: "brush",
+              mode: "resize"
+            });
+            event_({
+              type: "brushend"
+            });
+          });
+        } else {
+          event_({
+            type: "brushstart"
+          });
+          event_({
+            type: "brush",
+            mode: "resize"
+          });
+          event_({
+            type: "brushend"
+          });
+        }
+      });
+    };
+    function redraw(g) {
+      g.selectAll(".resize").attr("transform", function(d) {
+        return "translate(" + xExtent[+/e$/.test(d)] + "," + yExtent[+/^s/.test(d)] + ")";
+      });
+    }
+    function redrawX(g) {
+      g.select(".extent").attr("x", xExtent[0]);
+      g.selectAll(".extent,.n>rect,.s>rect").attr("width", xExtent[1] - xExtent[0]);
+    }
+    function redrawY(g) {
+      g.select(".extent").attr("y", yExtent[0]);
+      g.selectAll(".extent,.e>rect,.w>rect").attr("height", yExtent[1] - yExtent[0]);
+    }
+    function brushstart() {
+      var target = this, eventTarget = d3.select(d3.event.target), event_ = event.of(target, arguments), g = d3.select(target), resizing = eventTarget.datum(), resizingX = !/^(n|s)$/.test(resizing) && x, resizingY = !/^(e|w)$/.test(resizing) && y, dragging = eventTarget.classed("extent"), dragRestore = d3_event_dragSuppress(target), center, origin = d3.mouse(target), offset;
+      var w = d3.select(d3_window(target)).on("keydown.brush", keydown).on("keyup.brush", keyup);
+      if (d3.event.changedTouches) {
+        w.on("touchmove.brush", brushmove).on("touchend.brush", brushend);
+      } else {
+        w.on("mousemove.brush", brushmove).on("mouseup.brush", brushend);
+      }
+      g.interrupt().selectAll("*").interrupt();
+      if (dragging) {
+        origin[0] = xExtent[0] - origin[0];
+        origin[1] = yExtent[0] - origin[1];
+      } else if (resizing) {
+        var ex = +/w$/.test(resizing), ey = +/^n/.test(resizing);
+        offset = [ xExtent[1 - ex] - origin[0], yExtent[1 - ey] - origin[1] ];
+        origin[0] = xExtent[ex];
+        origin[1] = yExtent[ey];
+      } else if (d3.event.altKey) center = origin.slice();
+      g.style("pointer-events", "none").selectAll(".resize").style("display", null);
+      d3.select("body").style("cursor", eventTarget.style("cursor"));
+      event_({
+        type: "brushstart"
+      });
+      brushmove();
+      function keydown() {
+        if (d3.event.keyCode == 32) {
+          if (!dragging) {
+            center = null;
+            origin[0] -= xExtent[1];
+            origin[1] -= yExtent[1];
+            dragging = 2;
+          }
+          d3_eventPreventDefault();
+        }
+      }
+      function keyup() {
+        if (d3.event.keyCode == 32 && dragging == 2) {
+          origin[0] += xExtent[1];
+          origin[1] += yExtent[1];
+          dragging = 0;
+          d3_eventPreventDefault();
+        }
+      }
+      function brushmove() {
+        var point = d3.mouse(target), moved = false;
+        if (offset) {
+          point[0] += offset[0];
+          point[1] += offset[1];
+        }
+        if (!dragging) {
+          if (d3.event.altKey) {
+            if (!center) center = [ (xExtent[0] + xExtent[1]) / 2, (yExtent[0] + yExtent[1]) / 2 ];
+            origin[0] = xExtent[+(point[0] < center[0])];
+            origin[1] = yExtent[+(point[1] < center[1])];
+          } else center = null;
+        }
+        if (resizingX && move1(point, x, 0)) {
+          redrawX(g);
+          moved = true;
+        }
+        if (resizingY && move1(point, y, 1)) {
+          redrawY(g);
+          moved = true;
+        }
+        if (moved) {
+          redraw(g);
+          event_({
+            type: "brush",
+            mode: dragging ? "move" : "resize"
+          });
+        }
+      }
+      function move1(point, scale, i) {
+        var range = d3_scaleRange(scale), r0 = range[0], r1 = range[1], position = origin[i], extent = i ? yExtent : xExtent, size = extent[1] - extent[0], min, max;
+        if (dragging) {
+          r0 -= position;
+          r1 -= size + position;
+        }
+        min = (i ? yClamp : xClamp) ? Math.max(r0, Math.min(r1, point[i])) : point[i];
+        if (dragging) {
+          max = (min += position) + size;
+        } else {
+          if (center) position = Math.max(r0, Math.min(r1, 2 * center[i] - min));
+          if (position < min) {
+            max = min;
+            min = position;
+          } else {
+            max = position;
+          }
+        }
+        if (extent[0] != min || extent[1] != max) {
+          if (i) yExtentDomain = null; else xExtentDomain = null;
+          extent[0] = min;
+          extent[1] = max;
+          return true;
+        }
+      }
+      function brushend() {
+        brushmove();
+        g.style("pointer-events", "all").selectAll(".resize").style("display", brush.empty() ? "none" : null);
+        d3.select("body").style("cursor", null);
+        w.on("mousemove.brush", null).on("mouseup.brush", null).on("touchmove.brush", null).on("touchend.brush", null).on("keydown.brush", null).on("keyup.brush", null);
+        dragRestore();
+        event_({
+          type: "brushend"
+        });
+      }
+    }
+    brush.x = function(z) {
+      if (!arguments.length) return x;
+      x = z;
+      resizes = d3_svg_brushResizes[!x << 1 | !y];
+      return brush;
+    };
+    brush.y = function(z) {
+      if (!arguments.length) return y;
+      y = z;
+      resizes = d3_svg_brushResizes[!x << 1 | !y];
+      return brush;
+    };
+    brush.clamp = function(z) {
+      if (!arguments.length) return x && y ? [ xClamp, yClamp ] : x ? xClamp : y ? yClamp : null;
+      if (x && y) xClamp = !!z[0], yClamp = !!z[1]; else if (x) xClamp = !!z; else if (y) yClamp = !!z;
+      return brush;
+    };
+    brush.extent = function(z) {
+      var x0, x1, y0, y1, t;
+      if (!arguments.length) {
+        if (x) {
+          if (xExtentDomain) {
+            x0 = xExtentDomain[0], x1 = xExtentDomain[1];
+          } else {
+            x0 = xExtent[0], x1 = xExtent[1];
+            if (x.invert) x0 = x.invert(x0), x1 = x.invert(x1);
+            if (x1 < x0) t = x0, x0 = x1, x1 = t;
+          }
+        }
+        if (y) {
+          if (yExtentDomain) {
+            y0 = yExtentDomain[0], y1 = yExtentDomain[1];
+          } else {
+            y0 = yExtent[0], y1 = yExtent[1];
+            if (y.invert) y0 = y.invert(y0), y1 = y.invert(y1);
+            if (y1 < y0) t = y0, y0 = y1, y1 = t;
+          }
+        }
+        return x && y ? [ [ x0, y0 ], [ x1, y1 ] ] : x ? [ x0, x1 ] : y && [ y0, y1 ];
+      }
+      if (x) {
+        x0 = z[0], x1 = z[1];
+        if (y) x0 = x0[0], x1 = x1[0];
+        xExtentDomain = [ x0, x1 ];
+        if (x.invert) x0 = x(x0), x1 = x(x1);
+        if (x1 < x0) t = x0, x0 = x1, x1 = t;
+        if (x0 != xExtent[0] || x1 != xExtent[1]) xExtent = [ x0, x1 ];
+      }
+      if (y) {
+        y0 = z[0], y1 = z[1];
+        if (x) y0 = y0[1], y1 = y1[1];
+        yExtentDomain = [ y0, y1 ];
+        if (y.invert) y0 = y(y0), y1 = y(y1);
+        if (y1 < y0) t = y0, y0 = y1, y1 = t;
+        if (y0 != yExtent[0] || y1 != yExtent[1]) yExtent = [ y0, y1 ];
+      }
+      return brush;
+    };
+    brush.clear = function() {
+      if (!brush.empty()) {
+        xExtent = [ 0, 0 ], yExtent = [ 0, 0 ];
+        xExtentDomain = yExtentDomain = null;
+      }
+      return brush;
+    };
+    brush.empty = function() {
+      return !!x && xExtent[0] == xExtent[1] || !!y && yExtent[0] == yExtent[1];
+    };
+    return d3.rebind(brush, event, "on");
+  };
+  var d3_svg_brushCursor = {
+    n: "ns-resize",
+    e: "ew-resize",
+    s: "ns-resize",
+    w: "ew-resize",
+    nw: "nwse-resize",
+    ne: "nesw-resize",
+    se: "nwse-resize",
+    sw: "nesw-resize"
+  };
+  var d3_svg_brushResizes = [ [ "n", "e", "s", "w", "nw", "ne", "se", "sw" ], [ "e", "w" ], [ "n", "s" ], [] ];
+  var d3_time_format = d3_time.format = d3_locale_enUS.timeFormat;
+  var d3_time_formatUtc = d3_time_format.utc;
+  var d3_time_formatIso = d3_time_formatUtc("%Y-%m-%dT%H:%M:%S.%LZ");
+  d3_time_format.iso = Date.prototype.toISOString && +new Date("2000-01-01T00:00:00.000Z") ? d3_time_formatIsoNative : d3_time_formatIso;
+  function d3_time_formatIsoNative(date) {
+    return date.toISOString();
+  }
+  d3_time_formatIsoNative.parse = function(string) {
+    var date = new Date(string);
+    return isNaN(date) ? null : date;
+  };
+  d3_time_formatIsoNative.toString = d3_time_formatIso.toString;
+  d3_time.second = d3_time_interval(function(date) {
+    return new d3_date(Math.floor(date / 1e3) * 1e3);
+  }, function(date, offset) {
+    date.setTime(date.getTime() + Math.floor(offset) * 1e3);
+  }, function(date) {
+    return date.getSeconds();
+  });
+  d3_time.seconds = d3_time.second.range;
+  d3_time.seconds.utc = d3_time.second.utc.range;
+  d3_time.minute = d3_time_interval(function(date) {
+    return new d3_date(Math.floor(date / 6e4) * 6e4);
+  }, function(date, offset) {
+    date.setTime(date.getTime() + Math.floor(offset) * 6e4);
+  }, function(date) {
+    return date.getMinutes();
+  });
+  d3_time.minutes = d3_time.minute.range;
+  d3_time.minutes.utc = d3_time.minute.utc.range;
+  d3_time.hour = d3_time_interval(function(date) {
+    var timezone = date.getTimezoneOffset() / 60;
+    return new d3_date((Math.floor(date / 36e5 - timezone) + timezone) * 36e5);
+  }, function(date, offset) {
+    date.setTime(date.getTime() + Math.floor(offset) * 36e5);
+  }, function(date) {
+    return date.getHours();
+  });
+  d3_time.hours = d3_time.hour.range;
+  d3_time.hours.utc = d3_time.hour.utc.range;
+  d3_time.month = d3_time_interval(function(date) {
+    date = d3_time.day(date);
+    date.setDate(1);
+    return date;
+  }, function(date, offset) {
+    date.setMonth(date.getMonth() + offset);
+  }, function(date) {
+    return date.getMonth();
+  });
+  d3_time.months = d3_time.month.range;
+  d3_time.months.utc = d3_time.month.utc.range;
+  function d3_time_scale(linear, methods, format) {
+    function scale(x) {
+      return linear(x);
+    }
+    scale.invert = function(x) {
+      return d3_time_scaleDate(linear.invert(x));
+    };
+    scale.domain = function(x) {
+      if (!arguments.length) return linear.domain().map(d3_time_scaleDate);
+      linear.domain(x);
+      return scale;
+    };
+    function tickMethod(extent, count) {
+      var span = extent[1] - extent[0], target = span / count, i = d3.bisect(d3_time_scaleSteps, target);
+      return i == d3_time_scaleSteps.length ? [ methods.year, d3_scale_linearTickRange(extent.map(function(d) {
+        return d / 31536e6;
+      }), count)[2] ] : !i ? [ d3_time_scaleMilliseconds, d3_scale_linearTickRange(extent, count)[2] ] : methods[target / d3_time_scaleSteps[i - 1] < d3_time_scaleSteps[i] / target ? i - 1 : i];
+    }
+    scale.nice = function(interval, skip) {
+      var domain = scale.domain(), extent = d3_scaleExtent(domain), method = interval == null ? tickMethod(extent, 10) : typeof interval === "number" && tickMethod(extent, interval);
+      if (method) interval = method[0], skip = method[1];
+      function skipped(date) {
+        return !isNaN(date) && !interval.range(date, d3_time_scaleDate(+date + 1), skip).length;
+      }
+      return scale.domain(d3_scale_nice(domain, skip > 1 ? {
+        floor: function(date) {
+          while (skipped(date = interval.floor(date))) date = d3_time_scaleDate(date - 1);
+          return date;
+        },
+        ceil: function(date) {
+          while (skipped(date = interval.ceil(date))) date = d3_time_scaleDate(+date + 1);
+          return date;
+        }
+      } : interval));
+    };
+    scale.ticks = function(interval, skip) {
+      var extent = d3_scaleExtent(scale.domain()), method = interval == null ? tickMethod(extent, 10) : typeof interval === "number" ? tickMethod(extent, interval) : !interval.range && [ {
+        range: interval
+      }, skip ];
+      if (method) interval = method[0], skip = method[1];
+      return interval.range(extent[0], d3_time_scaleDate(+extent[1] + 1), skip < 1 ? 1 : skip);
+    };
+    scale.tickFormat = function() {
+      return format;
+    };
+    scale.copy = function() {
+      return d3_time_scale(linear.copy(), methods, format);
+    };
+    return d3_scale_linearRebind(scale, linear);
+  }
+  function d3_time_scaleDate(t) {
+    return new Date(t);
+  }
+  var d3_time_scaleSteps = [ 1e3, 5e3, 15e3, 3e4, 6e4, 3e5, 9e5, 18e5, 36e5, 108e5, 216e5, 432e5, 864e5, 1728e5, 6048e5, 2592e6, 7776e6, 31536e6 ];
+  var d3_time_scaleLocalMethods = [ [ d3_time.second, 1 ], [ d3_time.second, 5 ], [ d3_time.second, 15 ], [ d3_time.second, 30 ], [ d3_time.minute, 1 ], [ d3_time.minute, 5 ], [ d3_time.minute, 15 ], [ d3_time.minute, 30 ], [ d3_time.hour, 1 ], [ d3_time.hour, 3 ], [ d3_time.hour, 6 ], [ d3_time.hour, 12 ], [ d3_time.day, 1 ], [ d3_time.day, 2 ], [ d3_time.week, 1 ], [ d3_time.month, 1 ], [ d3_time.month, 3 ], [ d3_time.year, 1 ] ];
+  var d3_time_scaleLocalFormat = d3_time_format.multi([ [ ".%L", function(d) {
+    return d.getMilliseconds();
+  } ], [ ":%S", function(d) {
+    return d.getSeconds();
+  } ], [ "%I:%M", function(d) {
+    return d.getMinutes();
+  } ], [ "%I %p", function(d) {
+    return d.getHours();
+  } ], [ "%a %d", function(d) {
+    return d.getDay() && d.getDate() != 1;
+  } ], [ "%b %d", function(d) {
+    return d.getDate() != 1;
+  } ], [ "%B", function(d) {
+    return d.getMonth();
+  } ], [ "%Y", d3_true ] ]);
+  var d3_time_scaleMilliseconds = {
+    range: function(start, stop, step) {
+      return d3.range(Math.ceil(start / step) * step, +stop, step).map(d3_time_scaleDate);
+    },
+    floor: d3_identity,
+    ceil: d3_identity
+  };
+  d3_time_scaleLocalMethods.year = d3_time.year;
+  d3_time.scale = function() {
+    return d3_time_scale(d3.scale.linear(), d3_time_scaleLocalMethods, d3_time_scaleLocalFormat);
+  };
+  var d3_time_scaleUtcMethods = d3_time_scaleLocalMethods.map(function(m) {
+    return [ m[0].utc, m[1] ];
+  });
+  var d3_time_scaleUtcFormat = d3_time_formatUtc.multi([ [ ".%L", function(d) {
+    return d.getUTCMilliseconds();
+  } ], [ ":%S", function(d) {
+    return d.getUTCSeconds();
+  } ], [ "%I:%M", function(d) {
+    return d.getUTCMinutes();
+  } ], [ "%I %p", function(d) {
+    return d.getUTCHours();
+  } ], [ "%a %d", function(d) {
+    return d.getUTCDay() && d.getUTCDate() != 1;
+  } ], [ "%b %d", function(d) {
+    return d.getUTCDate() != 1;
+  } ], [ "%B", function(d) {
+    return d.getUTCMonth();
+  } ], [ "%Y", d3_true ] ]);
+  d3_time_scaleUtcMethods.year = d3_time.year.utc;
+  d3_time.scale.utc = function() {
+    return d3_time_scale(d3.scale.linear(), d3_time_scaleUtcMethods, d3_time_scaleUtcFormat);
+  };
+  d3.text = d3_xhrType(function(request) {
+    return request.responseText;
+  });
+  d3.json = function(url, callback) {
+    return d3_xhr(url, "application/json", d3_json, callback);
+  };
+  function d3_json(request) {
+    return JSON.parse(request.responseText);
+  }
+  d3.html = function(url, callback) {
+    return d3_xhr(url, "text/html", d3_html, callback);
+  };
+  function d3_html(request) {
+    var range = d3_document.createRange();
+    range.selectNode(d3_document.body);
+    return range.createContextualFragment(request.responseText);
+  }
+  d3.xml = d3_xhrType(function(request) {
+    return request.responseXML;
+  });
+  if (typeof define === "function" && define.amd) this.d3 = d3, define(d3); else if (typeof module === "object" && module.exports) module.exports = d3; else this.d3 = d3;
+}();
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/farbtastic.js b/client/galaxy/scripts/libs/farbtastic.js
new file mode 100644
index 0000000..01e1173
--- /dev/null
+++ b/client/galaxy/scripts/libs/farbtastic.js
@@ -0,0 +1,516 @@
+// Farbtastic 2.0.0-alpha.1
+(function ($) {
+
+var __debug = false;
+
+$.fn.farbtastic = function (options) {
+  $.farbtastic(this, options);
+  return this;
+};
+
+$.farbtastic = function (container, options) {
+  var container = $(container)[0];
+  return container.farbtastic || (container.farbtastic = new $._farbtastic(container, options));
+}
+
+$._farbtastic = function (container, options) {
+  var fb = this;
+
+  /////////////////////////////////////////////////////
+
+  /**
+   * Link to the given element(s) or callback.
+   */
+  fb.linkTo = function (callback) {
+    // Unbind previous nodes
+    if (typeof fb.callback == 'object') {
+      $(fb.callback).unbind('keyup', fb.updateValue);
+    }
+
+    // Reset color
+    fb.color = null;
+
+    // Bind callback or elements
+    if (typeof callback == 'function') {
+      fb.callback = callback;
+    }
+    else if (typeof callback == 'object' || typeof callback == 'string') {
+      fb.callback = $(callback);
+      fb.callback.bind('keyup', fb.updateValue);
+      if (fb.callback[0].value) {
+        fb.setColor(fb.callback[0].value);
+      }
+    }
+    return this;
+  }
+  fb.updateValue = function (event) {
+    if (this.value && this.value != fb.color) {
+      fb.setColor(this.value);
+    }
+  }
+
+  /**
+   * Change color with HTML syntax #123456
+   */
+  fb.setColor = function (color) {
+    var unpack = fb.unpack(color);
+    if (fb.color != color && unpack) {
+      fb.color = color;
+      fb.rgb = unpack;
+      fb.hsl = fb.RGBToHSL(fb.rgb);
+      fb.updateDisplay();
+    }
+    return this;
+  }
+
+  /**
+   * Change color with HSL triplet [0..1, 0..1, 0..1]
+   */
+  fb.setHSL = function (hsl) {
+    fb.hsl = hsl;
+    fb.rgb = fb.HSLToRGB(hsl);
+    fb.color = fb.pack(fb.rgb);
+    fb.updateDisplay();
+    return this;
+  }
+
+  /////////////////////////////////////////////////////
+
+  /**
+   * Initialize the color picker widget.
+   */
+  fb.initWidget = function () {
+
+    // Insert markup and size accordingly.
+    var dim = {
+      width: options.width,
+      height: options.width
+    };
+    $(container)
+      .html(
+        '<div class="farbtastic" style="position: relative">' +
+          '<div class="farbtastic-solid"></div>' +
+          '<canvas class="farbtastic-mask"></canvas>' +
+          '<canvas class="farbtastic-overlay"></canvas>' +
+        '</div>'
+      )
+      .find('*').attr(dim).css(dim).end()
+      .find('div>*').css('position', 'absolute');
+
+    // IE Fix: Recreate canvas elements with doc.createElement and excanvas.
+    $.browser.msie || false && $('canvas', container).each(function () {
+      // Fetch info.
+      var attr = { 'class': $(this).attr('class'), style: this.getAttribute('style') },
+          e = document.createElement('canvas');
+      // Replace element.
+      $(this).before($(e).attr(attr)).remove();
+      // Init with explorerCanvas.
+      G_vmlCanvasManager && G_vmlCanvasManager.initElement(e);
+      // Set explorerCanvas elements dimensions and absolute positioning.
+      $(e).attr(dim).css(dim).css('position', 'absolute')
+        .find('*').attr(dim).css(dim);
+    });
+
+    // Determine layout
+    fb.radius = (options.width - options.wheelWidth) / 2 - 1;
+    fb.square = Math.floor((fb.radius - options.wheelWidth / 2) * 0.7) - 1;
+    fb.mid = Math.floor(options.width / 2);
+    fb.markerSize = options.wheelWidth * 0.3;
+    fb.solidFill = $('.farbtastic-solid', container).css({
+      width: fb.square * 2 - 1,
+      height: fb.square * 2 - 1,
+      left: fb.mid - fb.square,
+      top: fb.mid - fb.square
+    });
+
+    // Set up drawing context.
+    fb.cnvMask = $('.farbtastic-mask', container);
+    fb.ctxMask = fb.cnvMask[0].getContext('2d');
+    fb.cnvOverlay = $('.farbtastic-overlay', container);
+    fb.ctxOverlay = fb.cnvOverlay[0].getContext('2d');
+    fb.ctxMask.translate(fb.mid, fb.mid);
+    fb.ctxOverlay.translate(fb.mid, fb.mid);
+
+    // Draw widget base layers.
+    fb.drawCircle();
+    fb.drawMask();
+  }
+
+  /**
+   * Draw the color wheel.
+   */
+  fb.drawCircle = function () {
+    var tm = +(new Date());
+    // Draw a hue circle with a bunch of gradient-stroked beziers.
+    // Have to use beziers, as gradient-stroked arcs don't work.
+    var n = 24,
+        r = fb.radius,
+        w = options.wheelWidth,
+        nudge = 8 / r / n * Math.PI, // Fudge factor for seams.
+        m = fb.ctxMask,
+        angle1 = 0, color1, d1;
+    m.save();
+    m.lineWidth = w / r;
+    m.scale(r, r);
+    // Each segment goes from angle1 to angle2.
+    for (var i = 0; i <= n; ++i) {
+      var d2 = i / n,
+          angle2 = d2 * Math.PI * 2,
+          // Endpoints
+          x1 = Math.sin(angle1), y1 = -Math.cos(angle1);
+          x2 = Math.sin(angle2), y2 = -Math.cos(angle2),
+          // Midpoint chosen so that the endpoints are tangent to the circle.
+          am = (angle1 + angle2) / 2,
+          tan = 1 / Math.cos((angle2 - angle1) / 2),
+          xm = Math.sin(am) * tan, ym = -Math.cos(am) * tan,
+          // New color
+          color2 = fb.pack(fb.HSLToRGB([d2, 1, 0.5]));
+      if (i > 0) {
+        if ($.browser.msie || false) {
+          // IE's gradient calculations mess up the colors. Correct along the diagonals.
+          var corr = (1 + Math.min(Math.abs(Math.tan(angle1)), Math.abs(Math.tan(Math.PI / 2 - angle1)))) / n;
+          color1 = fb.pack(fb.HSLToRGB([d1 - 0.15 * corr, 1, 0.5]));
+          color2 = fb.pack(fb.HSLToRGB([d2 + 0.15 * corr, 1, 0.5]));
+          // Create gradient fill between the endpoints.
+          var grad = m.createLinearGradient(x1, y1, x2, y2);
+          grad.addColorStop(0, color1);
+          grad.addColorStop(1, color2);
+          m.fillStyle = grad;
+          // Draw quadratic curve segment as a fill.
+          var r1 = (r + w / 2) / r, r2 = (r - w / 2) / r; // inner/outer radius.
+          m.beginPath();
+          m.moveTo(x1 * r1, y1 * r1);
+          m.quadraticCurveTo(xm * r1, ym * r1, x2 * r1, y2 * r1);
+          m.lineTo(x2 * r2, y2 * r2);
+          m.quadraticCurveTo(xm * r2, ym * r2, x1 * r2, y1 * r2);
+          m.fill();
+        }
+        else {
+          // Create gradient fill between the endpoints.
+          var grad = m.createLinearGradient(x1, y1, x2, y2);
+          grad.addColorStop(0, color1);
+          grad.addColorStop(1, color2);
+          m.strokeStyle = grad;
+          // Draw quadratic curve segment.
+          m.beginPath();
+          m.moveTo(x1, y1);
+          m.quadraticCurveTo(xm, ym, x2, y2);
+          m.stroke();
+        }
+      }
+      // Prevent seams where curves join.
+      angle1 = angle2 - nudge; color1 = color2; d1 = d2;
+    }
+    m.restore();
+    __debug && $('body').append('<div>drawCircle '+ (+(new Date()) - tm) +'ms');
+  };
+
+  /**
+   * Draw the saturation/luminance mask.
+   */
+  fb.drawMask = function () {
+    var tm = +(new Date());
+
+    // Iterate over sat/lum space and calculate appropriate mask pixel values.
+    var size = fb.square * 2, sq = fb.square;
+    function calculateMask(sizex, sizey, outputPixel) {
+      var isx = 1 / sizex, isy = 1 / sizey;
+      for (var y = 0; y <= sizey; ++y) {
+        var l = 1 - y * isy;
+        for (var x = 0; x <= sizex; ++x) {
+          var s = 1 - x * isx;
+          // From sat/lum to alpha and color (grayscale)
+          var a = 1 - 2 * Math.min(l * s, (1 - l) * s);
+          var c = (a > 0) ? ((2 * l - 1 + a) * .5 / a) : 0;
+          outputPixel(x, y, c, a);
+        }
+      }
+    }
+
+    // Method #1: direct pixel access (new Canvas).
+    if (fb.ctxMask.getImageData) {
+      // Create half-resolution buffer.
+      var sz = Math.floor(size / 2);
+      var buffer = document.createElement('canvas');
+      buffer.width = buffer.height = sz + 1;
+      var ctx = buffer.getContext('2d');
+      var frame = ctx.getImageData(0, 0, sz + 1, sz + 1);
+
+      var i = 0;
+      calculateMask(sz, sz, function (x, y, c, a) {
+        frame.data[i++] = frame.data[i++] = frame.data[i++] = c * 255;
+        frame.data[i++] = a * 255;
+      });
+
+      ctx.putImageData(frame, 0, 0);
+      fb.ctxMask.drawImage(buffer, 0, 0, sz + 1, sz + 1, -sq, -sq, sq * 2, sq * 2);
+    }
+    // Method #2: drawing commands (old Canvas).
+    else if (!($.browser.msie || false)) {
+      // Render directly at half-resolution
+      var sz = Math.floor(size / 2);
+      calculateMask(sz, sz, function (x, y, c, a) {
+        c = Math.round(c * 255);
+        fb.ctxMask.fillStyle = 'rgba(' + c + ', ' + c + ', ' + c + ', ' + a +')';
+        fb.ctxMask.fillRect(x * 2 - sq - 1, y * 2 - sq - 1, 2, 2);
+      });
+    }
+    // Method #3: vertical DXImageTransform gradient strips (IE).
+    else {
+      var cache_last, cache, w = 6; // Each strip is 6 pixels wide.
+      var sizex = Math.floor(size / w);
+      // 6 vertical pieces of gradient per strip.
+      calculateMask(sizex, 6, function (x, y, c, a) {
+        if (x == 0) {
+          cache_last = cache;
+          cache = [];
+        }
+        c = Math.round(c * 255);
+        a = Math.round(a * 255);
+        // We can only start outputting gradients once we have two rows of pixels.
+        if (y > 0) {
+          var c_last = cache_last[x][0],
+              a_last = cache_last[x][1],
+              color1 = fb.packDX(c_last, a_last),
+              color2 = fb.packDX(c, a),
+              y1 = Math.round(fb.mid + ((y - 1) * .333 - 1) * sq),
+              y2 = Math.round(fb.mid + (y * .333 - 1) * sq);
+          $('<div>').css({
+            position: 'absolute',
+            filter: "progid:DXImageTransform.Microsoft.Gradient(StartColorStr="+ color1 +", EndColorStr="+ color2 +", GradientType=0)",
+            top: y1,
+            height: y2 - y1,
+            // Avoid right-edge sticking out.
+            left: fb.mid + (x * w - sq - 1),
+            width: w - (x == sizex ? Math.round(w / 2) : 0)
+          }).appendTo(fb.cnvMask);
+        }
+        cache.push([c, a]);
+      });
+    }
+    __debug && $('body').append('<div>drawMask '+ (+(new Date()) - tm) +'ms');
+  }
+
+  /**
+   * Draw the selection markers.
+   */
+  fb.drawMarkers = function () {
+    // Determine marker dimensions
+    var sz = options.width, lw = Math.ceil(fb.markerSize / 4), r = fb.markerSize - lw + 1;
+    var angle = fb.hsl[0] * 6.28,
+        x1 =  Math.sin(angle) * fb.radius,
+        y1 = -Math.cos(angle) * fb.radius,
+        x2 = 2 * fb.square * (.5 - fb.hsl[1]),
+        y2 = 2 * fb.square * (.5 - fb.hsl[2]),
+        c1 = fb.invert ? '#fff' : '#000',
+        c2 = fb.invert ? '#000' : '#fff';
+    var circles = [
+      { x: x1, y: y1, r: r,             c: '#000', lw: lw + 1 },
+      { x: x1, y: y1, r: fb.markerSize, c: '#fff', lw: lw },
+      { x: x2, y: y2, r: r,             c: c2,     lw: lw + 1 },
+      { x: x2, y: y2, r: fb.markerSize, c: c1,     lw: lw },
+    ];
+
+    // Update the overlay canvas.
+    fb.ctxOverlay.clearRect(-fb.mid, -fb.mid, sz, sz);
+    for (var i = 0; i < circles.length; i++) {
+      var c = circles[i];
+      fb.ctxOverlay.lineWidth = c.lw;
+      fb.ctxOverlay.strokeStyle = c.c;
+      fb.ctxOverlay.beginPath();
+      fb.ctxOverlay.arc(c.x, c.y, c.r, 0, Math.PI * 2, true);
+      fb.ctxOverlay.stroke();
+    }
+  }
+
+  /**
+   * Update the markers and styles
+   */
+  fb.updateDisplay = function () {
+    // Determine whether labels/markers should invert.
+    fb.invert = (fb.rgb[0] * 0.3 + fb.rgb[1] * .59 + fb.rgb[2] * .11) <= 0.6;
+
+    // Update the solid background fill.
+    fb.solidFill.css('backgroundColor', fb.pack(fb.HSLToRGB([fb.hsl[0], 1, 0.5])));
+
+    // Draw markers
+    fb.drawMarkers();
+
+    // Linked elements or callback
+    if (typeof fb.callback == 'object') {
+      // Set background/foreground color
+      $(fb.callback).css({
+        backgroundColor: fb.color,
+        color: fb.invert ? '#fff' : '#000'
+      });
+
+      // Change linked value
+      $(fb.callback).each(function() {
+        if ((typeof this.value == 'string') && this.value != fb.color) {
+          this.value = fb.color;
+        }
+      }).change();
+    }
+    else if (typeof fb.callback == 'function') {
+      fb.callback.call(fb, fb.color);
+    }
+  }
+
+  /**
+   * Helper for returning coordinates relative to the center.
+   */
+  fb.widgetCoords = function (event) {
+    return {
+      x: event.pageX - fb.offset.left - fb.mid,
+      y: event.pageY - fb.offset.top - fb.mid
+    };
+  }
+
+  /**
+   * Mousedown handler
+   */
+  fb.mousedown = function (event) {
+    // Capture mouse
+    if (!$._farbtastic.dragging) {
+      $(document).bind('mousemove', fb.mousemove).bind('mouseup', fb.mouseup);
+      $._farbtastic.dragging = true;
+    }
+
+    // Update the stored offset for the widget.
+    fb.offset = $(container).offset();
+
+    // Check which area is being dragged
+    var pos = fb.widgetCoords(event);
+    fb.circleDrag = Math.max(Math.abs(pos.x), Math.abs(pos.y)) > (fb.square + 2);
+
+    // Process
+    fb.mousemove(event);
+    return false;
+  }
+
+  /**
+   * Mousemove handler
+   */
+  fb.mousemove = function (event) {
+    // Get coordinates relative to color picker center
+    var pos = fb.widgetCoords(event);
+
+    // Set new HSL parameters
+    if (fb.circleDrag) {
+      var hue = Math.atan2(pos.x, -pos.y) / 6.28;
+      fb.setHSL([(hue + 1) % 1, fb.hsl[1], fb.hsl[2]]);
+    }
+    else {
+      var sat = Math.max(0, Math.min(1, -(pos.x / fb.square / 2) + .5));
+      var lum = Math.max(0, Math.min(1, -(pos.y / fb.square / 2) + .5));
+      fb.setHSL([fb.hsl[0], sat, lum]);
+    }
+    return false;
+  }
+
+  /**
+   * Mouseup handler
+   */
+  fb.mouseup = function () {
+    // Uncapture mouse
+    $(document).unbind('mousemove', fb.mousemove);
+    $(document).unbind('mouseup', fb.mouseup);
+    $._farbtastic.dragging = false;
+  }
+
+  /* Various color utility functions */
+  fb.dec2hex = function (x) {
+    return (x < 16 ? '0' : '') + x.toString(16);
+  }
+
+  fb.packDX = function (c, a) {
+    return '#' + fb.dec2hex(a) + fb.dec2hex(c) + fb.dec2hex(c) + fb.dec2hex(c);
+  };
+
+  fb.pack = function (rgb) {
+    var r = Math.round(rgb[0] * 255);
+    var g = Math.round(rgb[1] * 255);
+    var b = Math.round(rgb[2] * 255);
+    return '#' + fb.dec2hex(r) + fb.dec2hex(g) + fb.dec2hex(b);
+  };
+
+  fb.unpack = function (color) {
+    if (color.length == 7) {
+      function x(i) {
+        return parseInt(color.substring(i, i + 2), 16) / 255;
+      }
+      return [ x(1), x(3), x(5) ];
+    }
+    else if (color.length == 4) {
+      function x(i) {
+        return parseInt(color.substring(i, i + 1), 16) / 15;
+      }
+      return [ x(1), x(2), x(3) ];
+    }
+  };
+
+  fb.HSLToRGB = function (hsl) {
+    var m1, m2, r, g, b;
+    var h = hsl[0], s = hsl[1], l = hsl[2];
+    m2 = (l <= 0.5) ? l * (s + 1) : l + s - l * s;
+    m1 = l * 2 - m2;
+    return [
+      this.hueToRGB(m1, m2, h + 0.33333),
+      this.hueToRGB(m1, m2, h),
+      this.hueToRGB(m1, m2, h - 0.33333)
+    ];
+  };
+
+  fb.hueToRGB = function (m1, m2, h) {
+    h = (h + 1) % 1;
+    if (h * 6 < 1) return m1 + (m2 - m1) * h * 6;
+    if (h * 2 < 1) return m2;
+    if (h * 3 < 2) return m1 + (m2 - m1) * (0.66666 - h) * 6;
+    return m1;
+  };
+
+  fb.RGBToHSL = function (rgb) {
+    var r = rgb[0], g = rgb[1], b = rgb[2],
+        min = Math.min(r, g, b),
+        max = Math.max(r, g, b),
+        delta = max - min,
+        h = 0,
+        s = 0,
+        l = (min + max) / 2;
+    if (l > 0 && l < 1) {
+      s = delta / (l < 0.5 ? (2 * l) : (2 - 2 * l));
+    }
+    if (delta > 0) {
+      if (max == r && max != g) h += (g - b) / delta;
+      if (max == g && max != b) h += (2 + (b - r) / delta);
+      if (max == b && max != r) h += (4 + (r - g) / delta);
+      h /= 6;
+    }
+    return [h, s, l];
+  };
+
+  // Parse options.
+  if (!options.callback) {
+    options = { callback: options };
+  }
+  options = $.extend({
+    width: 300,
+    wheelWidth: (options.width || 300) / 10,
+    callback: null
+  }, options);
+
+  // Initialize.
+  fb.initWidget();
+
+  // Install mousedown handler (the others are set on the document on-demand)
+  $('canvas.farbtastic-overlay', container).mousedown(fb.mousedown);
+
+  // Set linked elements/callback
+  if (options.callback) {
+    fb.linkTo(options.callback);
+  }
+  // Set to gray.
+  if (!fb.color) fb.setColor('#808080');
+}
+
+})(jQuery);
diff --git a/client/galaxy/scripts/libs/jquery.complexify.js b/client/galaxy/scripts/libs/jquery.complexify.js
new file mode 100644
index 0000000..9d09a8d
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery.complexify.js
@@ -0,0 +1,192 @@
+/*
+  http://github.com/danpalmer/jquery.complexify.js
+
+  This code is distributed under the WTFPL v2:
+*/
+(function ($) {
+
+  $.fn.extend({
+    complexify: function(options, callback) {
+
+      var MIN_COMPLEXITY = 49; // 12 chars with Upper, Lower and Number
+      var MAX_COMPLEXITY = 120; //  25 chars, all charsets
+      var CHARSETS = [
+        // Commonly Used
+        ////////////////////
+        [0x0020, 0x0020], // Space
+        [0x0030, 0x0039], // Numbers
+        [0x0041, 0x005A], // Uppercase
+        [0x0061, 0x007A], // Lowercase
+        [0x0021, 0x002F], // Punctuation
+        [0x003A, 0x0040], // Punctuation
+        [0x005B, 0x0060], // Punctuation
+        [0x007B, 0x007E], // Punctuation
+        // Everything Else
+        ////////////////////
+        [0x0080, 0x00FF], // Latin-1 Supplement
+        [0x0100, 0x017F], // Latin Extended-A
+        [0x0180, 0x024F], // Latin Extended-B
+        [0x0250, 0x02AF], // IPA Extensions
+        [0x02B0, 0x02FF], // Spacing Modifier Letters
+        [0x0300, 0x036F], // Combining Diacritical Marks
+        [0x0370, 0x03FF], // Greek
+        [0x0400, 0x04FF], // Cyrillic
+        [0x0530, 0x058F], // Armenian
+        [0x0590, 0x05FF], // Hebrew
+        [0x0600, 0x06FF], // Arabic
+        [0x0700, 0x074F], // Syriac
+        [0x0780, 0x07BF], // Thaana
+        [0x0900, 0x097F], // Devanagari
+        [0x0980, 0x09FF], // Bengali
+        [0x0A00, 0x0A7F], // Gurmukhi
+        [0x0A80, 0x0AFF], // Gujarati
+        [0x0B00, 0x0B7F], // Oriya
+        [0x0B80, 0x0BFF], // Tamil
+        [0x0C00, 0x0C7F], // Telugu
+        [0x0C80, 0x0CFF], // Kannada
+        [0x0D00, 0x0D7F], // Malayalam
+        [0x0D80, 0x0DFF], // Sinhala
+        [0x0E00, 0x0E7F], // Thai
+        [0x0E80, 0x0EFF], // Lao
+        [0x0F00, 0x0FFF], // Tibetan
+        [0x1000, 0x109F], // Myanmar
+        [0x10A0, 0x10FF], // Georgian
+        [0x1100, 0x11FF], // Hangul Jamo
+        [0x1200, 0x137F], // Ethiopic
+        [0x13A0, 0x13FF], // Cherokee
+        [0x1400, 0x167F], // Unified Canadian Aboriginal Syllabics
+        [0x1680, 0x169F], // Ogham
+        [0x16A0, 0x16FF], // Runic
+        [0x1780, 0x17FF], // Khmer
+        [0x1800, 0x18AF], // Mongolian
+        [0x1E00, 0x1EFF], // Latin Extended Additional
+        [0x1F00, 0x1FFF], // Greek Extended
+        [0x2000, 0x206F], // General Punctuation
+        [0x2070, 0x209F], // Superscripts and Subscripts
+        [0x20A0, 0x20CF], // Currency Symbols
+        [0x20D0, 0x20FF], // Combining Marks for Symbols
+        [0x2100, 0x214F], // Letterlike Symbols
+        [0x2150, 0x218F], // Number Forms
+        [0x2190, 0x21FF], // Arrows
+        [0x2200, 0x22FF], // Mathematical Operators
+        [0x2300, 0x23FF], // Miscellaneous Technical
+        [0x2400, 0x243F], // Control Pictures
+        [0x2440, 0x245F], // Optical Character Recognition
+        [0x2460, 0x24FF], // Enclosed Alphanumerics
+        [0x2500, 0x257F], // Box Drawing
+        [0x2580, 0x259F], // Block Elements
+        [0x25A0, 0x25FF], // Geometric Shapes
+        [0x2600, 0x26FF], // Miscellaneous Symbols
+        [0x2700, 0x27BF], // Dingbats
+        [0x2800, 0x28FF], // Braille Patterns
+        [0x2E80, 0x2EFF], // CJK Radicals Supplement
+        [0x2F00, 0x2FDF], // Kangxi Radicals
+        [0x2FF0, 0x2FFF], // Ideographic Description Characters
+        [0x3000, 0x303F], // CJK Symbols and Punctuation
+        [0x3040, 0x309F], // Hiragana
+        [0x30A0, 0x30FF], // Katakana
+        [0x3100, 0x312F], // Bopomofo
+        [0x3130, 0x318F], // Hangul Compatibility Jamo
+        [0x3190, 0x319F], // Kanbun
+        [0x31A0, 0x31BF], // Bopomofo Extended
+        [0x3200, 0x32FF], // Enclosed CJK Letters and Months
+        [0x3300, 0x33FF], // CJK Compatibility
+        [0x3400, 0x4DB5], // CJK Unified Ideographs Extension A
+        [0x4E00, 0x9FFF], // CJK Unified Ideographs
+        [0xA000, 0xA48F], // Yi Syllables
+        [0xA490, 0xA4CF], // Yi Radicals
+        [0xAC00, 0xD7A3], // Hangul Syllables
+        [0xD800, 0xDB7F], // High Surrogates
+        [0xDB80, 0xDBFF], // High Private Use Surrogates
+        [0xDC00, 0xDFFF], // Low Surrogates
+        [0xE000, 0xF8FF], // Private Use
+        [0xF900, 0xFAFF], // CJK Compatibility Ideographs
+        [0xFB00, 0xFB4F], // Alphabetic Presentation Forms
+        [0xFB50, 0xFDFF], // Arabic Presentation Forms-A
+        [0xFE20, 0xFE2F], // Combining Half Marks
+        [0xFE30, 0xFE4F], // CJK Compatibility Forms
+        [0xFE50, 0xFE6F], // Small Form Variants
+        [0xFE70, 0xFEFE], // Arabic Presentation Forms-B
+        [0xFEFF, 0xFEFF], // Specials
+        [0xFF00, 0xFFEF], // Halfwidth and Fullwidth Forms
+        [0xFFF0, 0xFFFD]  // Specials
+      ];
+
+      var defaults = {
+        minimumChars: 8,
+        strengthScaleFactor: 1,
+        bannedPasswords: window.COMPLEXIFY_BANLIST || [],
+        banMode: 'strict' // (strict|loose)
+      };
+
+      if($.isFunction(options) && !callback) {
+        callback = options;
+        options = {};
+      }
+
+      options = $.extend(defaults, options);
+
+      function additionalComplexityForCharset(str, charset) {
+        for (var i = str.length - 1; i >= 0; i--) {
+          if (charset[0] <= str.charCodeAt(i) && str.charCodeAt(i) <= charset[1]) {
+            return charset[1] - charset[0] + 1;
+          }
+        }
+        return 0;
+      }
+
+      function inBanlist(str) {
+        if (options.banMode === 'strict') {
+          for (var i = 0; i < options.bannedPasswords.length; i++) {
+            if (str.toLowerCase().indexOf(options.bannedPasswords[i].toLowerCase()) !== -1) {
+                return true;
+            }
+          }
+          return false;
+        } else {
+          return $.inArray(str, options.bannedPasswords) > -1 ? true : false;
+        }
+      }
+
+      function evaluateSecurity() {
+        var password = $(this).val();
+        var complexity = 0, valid = false;
+
+        // Reset complexity to 0 when banned password is found
+        if (!inBanlist(password)) {
+
+          // Add character complexity
+          for (var i = CHARSETS.length - 1; i >= 0; i--) {
+            complexity += additionalComplexityForCharset(password, CHARSETS[i]);
+          }
+
+        } else {
+          complexity = 1;
+        }
+
+        // Use natural log to produce linear scale
+        complexity = Math.log(Math.pow(complexity, password.length)) * (1/options.strengthScaleFactor);
+
+        valid = (complexity > MIN_COMPLEXITY && password.length >= options.minimumChars);
+
+        // Scale to percentage, so it can be used for a progress bar
+        complexity = (complexity / MAX_COMPLEXITY) * 100;
+        complexity = (complexity > 100) ? 100 : complexity;
+
+        callback.call(this, valid, complexity);
+      }
+
+      this.each(function () {
+      	if($(this).val()) {
+          evaluateSecurity.apply(this);
+        }
+      });
+
+      return this.each(function () {
+        $(this).bind('keyup focus input propertychange mouseup', evaluateSecurity);
+      });
+
+    }
+  });
+
+})(jQuery);
diff --git a/client/galaxy/scripts/libs/jquery.sparklines.js b/client/galaxy/scripts/libs/jquery.sparklines.js
new file mode 100644
index 0000000..43b24c0
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery.sparklines.js
@@ -0,0 +1,3054 @@
+/**
+*
+* jquery.sparkline.js
+*
+* v2.1.2
+* (c) Splunk, Inc
+* Contact: Gareth Watts (gareth at splunk.com)
+* http://omnipotent.net/jquery.sparkline/
+*
+* Generates inline sparkline charts from data supplied either to the method
+* or inline in HTML
+*
+* Compatible with Internet Explorer 6.0+ and modern browsers equipped with the canvas tag
+* (Firefox 2.0+, Safari, Opera, etc)
+*
+* License: New BSD License
+*
+* Copyright (c) 2012, Splunk Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without modification,
+* are permitted provided that the following conditions are met:
+*
+*     * Redistributions of source code must retain the above copyright notice,
+*       this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above copyright notice,
+*       this list of conditions and the following disclaimer in the documentation
+*       and/or other materials provided with the distribution.
+*     * Neither the name of Splunk Inc nor the names of its contributors may
+*       be used to endorse or promote products derived from this software without
+*       specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
+* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+* SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
+* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*
+* Usage:
+*  $(selector).sparkline(values, options)
+*
+* If values is undefined or set to 'html' then the data values are read from the specified tag:
+*   <p>Sparkline: <span class="sparkline">1,4,6,6,8,5,3,5</span></p>
+*   $('.sparkline').sparkline();
+* There must be no spaces in the enclosed data set
+*
+* Otherwise values must be an array of numbers or null values
+*    <p>Sparkline: <span id="sparkline1">This text replaced if the browser is compatible</span></p>
+*    $('#sparkline1').sparkline([1,4,6,6,8,5,3,5])
+*    $('#sparkline2').sparkline([1,4,6,null,null,5,3,5])
+*
+* Values can also be specified in an HTML comment, or as a values attribute:
+*    <p>Sparkline: <span class="sparkline"><!--1,4,6,6,8,5,3,5 --></span></p>
+*    <p>Sparkline: <span class="sparkline" values="1,4,6,6,8,5,3,5"></span></p>
+*    $('.sparkline').sparkline();
+*
+* For line charts, x values can also be specified:
+*   <p>Sparkline: <span class="sparkline">1:1,2.7:4,3.4:6,5:6,6:8,8.7:5,9:3,10:5</span></p>
+*    $('#sparkline1').sparkline([ [1,1], [2.7,4], [3.4,6], [5,6], [6,8], [8.7,5], [9,3], [10,5] ])
+*
+* By default, options should be passed in as teh second argument to the sparkline function:
+*   $('.sparkline').sparkline([1,2,3,4], {type: 'bar'})
+*
+* Options can also be set by passing them on the tag itself.  This feature is disabled by default though
+* as there's a slight performance overhead:
+*   $('.sparkline').sparkline([1,2,3,4], {enableTagOptions: true})
+*   <p>Sparkline: <span class="sparkline" sparkType="bar" sparkBarColor="red">loading</span></p>
+* Prefix all options supplied as tag attribute with "spark" (configurable by setting tagOptionPrefix)
+*
+* Supported options:
+*   lineColor - Color of the line used for the chart
+*   fillColor - Color used to fill in the chart - Set to '' or false for a transparent chart
+*   width - Width of the chart - Defaults to 3 times the number of values in pixels
+*   height - Height of the chart - Defaults to the height of the containing element
+*   chartRangeMin - Specify the minimum value to use for the Y range of the chart - Defaults to the minimum value supplied
+*   chartRangeMax - Specify the maximum value to use for the Y range of the chart - Defaults to the maximum value supplied
+*   chartRangeClip - Clip out of range values to the max/min specified by chartRangeMin and chartRangeMax
+*   chartRangeMinX - Specify the minimum value to use for the X range of the chart - Defaults to the minimum value supplied
+*   chartRangeMaxX - Specify the maximum value to use for the X range of the chart - Defaults to the maximum value supplied
+*   composite - If true then don't erase any existing chart attached to the tag, but draw
+*           another chart over the top - Note that width and height are ignored if an
+*           existing chart is detected.
+*   tagValuesAttribute - Name of tag attribute to check for data values - Defaults to 'values'
+*   enableTagOptions - Whether to check tags for sparkline options
+*   tagOptionPrefix - Prefix used for options supplied as tag attributes - Defaults to 'spark'
+*   disableHiddenCheck - If set to true, then the plugin will assume that charts will never be drawn into a
+*           hidden dom element, avoding a browser reflow
+*   disableInteraction - If set to true then all mouseover/click interaction behaviour will be disabled,
+*       making the plugin perform much like it did in 1.x
+*   disableTooltips - If set to true then tooltips will be disabled - Defaults to false (tooltips enabled)
+*   disableHighlight - If set to true then highlighting of selected chart elements on mouseover will be disabled
+*       defaults to false (highlights enabled)
+*   highlightLighten - Factor to lighten/darken highlighted chart values by - Defaults to 1.4 for a 40% increase
+*   tooltipContainer - Specify which DOM element the tooltip should be rendered into - defaults to document.body
+*   tooltipClassname - Optional CSS classname to apply to tooltips - If not specified then a default style will be applied
+*   tooltipOffsetX - How many pixels away from the mouse pointer to render the tooltip on the X axis
+*   tooltipOffsetY - How many pixels away from the mouse pointer to render the tooltip on the r axis
+*   tooltipFormatter  - Optional callback that allows you to override the HTML displayed in the tooltip
+*       callback is given arguments of (sparkline, options, fields)
+*   tooltipChartTitle - If specified then the tooltip uses the string specified by this setting as a title
+*   tooltipFormat - A format string or SPFormat object  (or an array thereof for multiple entries)
+*       to control the format of the tooltip
+*   tooltipPrefix - A string to prepend to each field displayed in a tooltip
+*   tooltipSuffix - A string to append to each field displayed in a tooltip
+*   tooltipSkipNull - If true then null values will not have a tooltip displayed (defaults to true)
+*   tooltipValueLookups - An object or range map to map field values to tooltip strings
+*       (eg. to map -1 to "Lost", 0 to "Draw", and 1 to "Win")
+*   numberFormatter - Optional callback for formatting numbers in tooltips
+*   numberDigitGroupSep - Character to use for group separator in numbers "1,234" - Defaults to ","
+*   numberDecimalMark - Character to use for the decimal point when formatting numbers - Defaults to "."
+*   numberDigitGroupCount - Number of digits between group separator - Defaults to 3
+*
+* There are 7 types of sparkline, selected by supplying a "type" option of 'line' (default),
+* 'bar', 'tristate', 'bullet', 'discrete', 'pie' or 'box'
+*    line - Line chart.  Options:
+*       spotColor - Set to '' to not end each line in a circular spot
+*       minSpotColor - If set, color of spot at minimum value
+*       maxSpotColor - If set, color of spot at maximum value
+*       spotRadius - Radius in pixels
+*       lineWidth - Width of line in pixels
+*       normalRangeMin
+*       normalRangeMax - If set draws a filled horizontal bar between these two values marking the "normal"
+*                      or expected range of values
+*       normalRangeColor - Color to use for the above bar
+*       drawNormalOnTop - Draw the normal range above the chart fill color if true
+*       defaultPixelsPerValue - Defaults to 3 pixels of width for each value in the chart
+*       highlightSpotColor - The color to use for drawing a highlight spot on mouseover - Set to null to disable
+*       highlightLineColor - The color to use for drawing a highlight line on mouseover - Set to null to disable
+*       valueSpots - Specify which points to draw spots on, and in which color.  Accepts a range map
+*
+*   bar - Bar chart.  Options:
+*       barColor - Color of bars for postive values
+*       negBarColor - Color of bars for negative values
+*       zeroColor - Color of bars with zero values
+*       nullColor - Color of bars with null values - Defaults to omitting the bar entirely
+*       barWidth - Width of bars in pixels
+*       colorMap - Optional mappnig of values to colors to override the *BarColor values above
+*                  can be an Array of values to control the color of individual bars or a range map
+*                  to specify colors for individual ranges of values
+*       barSpacing - Gap between bars in pixels
+*       zeroAxis - Centers the y-axis around zero if true
+*
+*   tristate - Charts values of win (>0), lose (<0) or draw (=0)
+*       posBarColor - Color of win values
+*       negBarColor - Color of lose values
+*       zeroBarColor - Color of draw values
+*       barWidth - Width of bars in pixels
+*       barSpacing - Gap between bars in pixels
+*       colorMap - Optional mappnig of values to colors to override the *BarColor values above
+*                  can be an Array of values to control the color of individual bars or a range map
+*                  to specify colors for individual ranges of values
+*
+*   discrete - Options:
+*       lineHeight - Height of each line in pixels - Defaults to 30% of the graph height
+*       thesholdValue - Values less than this value will be drawn using thresholdColor instead of lineColor
+*       thresholdColor
+*
+*   bullet - Values for bullet graphs msut be in the order: target, performance, range1, range2, range3, ...
+*       options:
+*       targetColor - The color of the vertical target marker
+*       targetWidth - The width of the target marker in pixels
+*       performanceColor - The color of the performance measure horizontal bar
+*       rangeColors - Colors to use for each qualitative range background color
+*
+*   pie - Pie chart. Options:
+*       sliceColors - An array of colors to use for pie slices
+*       offset - Angle in degrees to offset the first slice - Try -90 or +90
+*       borderWidth - Width of border to draw around the pie chart, in pixels - Defaults to 0 (no border)
+*       borderColor - Color to use for the pie chart border - Defaults to #000
+*
+*   box - Box plot. Options:
+*       raw - Set to true to supply pre-computed plot points as values
+*             values should be: low_outlier, low_whisker, q1, median, q3, high_whisker, high_outlier
+*             When set to false you can supply any number of values and the box plot will
+*             be computed for you.  Default is false.
+*       showOutliers - Set to true (default) to display outliers as circles
+*       outlierIQR - Interquartile range used to determine outliers.  Default 1.5
+*       boxLineColor - Outline color of the box
+*       boxFillColor - Fill color for the box
+*       whiskerColor - Line color used for whiskers
+*       outlierLineColor - Outline color of outlier circles
+*       outlierFillColor - Fill color of the outlier circles
+*       spotRadius - Radius of outlier circles
+*       medianColor - Line color of the median line
+*       target - Draw a target cross hair at the supplied value (default undefined)
+*
+*
+*
+*   Examples:
+*   $('#sparkline1').sparkline(myvalues, { lineColor: '#f00', fillColor: false });
+*   $('.barsparks').sparkline('html', { type:'bar', height:'40px', barWidth:5 });
+*   $('#tristate').sparkline([1,1,-1,1,0,0,-1], { type:'tristate' }):
+*   $('#discrete').sparkline([1,3,4,5,5,3,4,5], { type:'discrete' });
+*   $('#bullet').sparkline([10,12,12,9,7], { type:'bullet' });
+*   $('#pie').sparkline([1,1,2], { type:'pie' });
+*/
+
+/*jslint regexp: true, browser: true, jquery: true, white: true, nomen: false, plusplus: false, maxerr: 500, indent: 4 */
+
+(function(document, Math, undefined) { // performance/minified-size optimization
+(function(factory) {
+    if(typeof define === 'function' && define.amd) {
+        define(['jquery'], factory);
+    } else if (jQuery && !jQuery.fn.sparkline) {
+        factory(jQuery);
+    }
+}
+(function($) {
+    'use strict';
+
+    var UNSET_OPTION = {},
+        getDefaults, createClass, SPFormat, clipval, quartile, normalizeValue, normalizeValues,
+        remove, isNumber, all, sum, addCSS, ensureArray, formatNumber, RangeMap,
+        MouseHandler, Tooltip, barHighlightMixin,
+        line, bar, tristate, discrete, bullet, pie, box, defaultStyles, initStyles,
+        VShape, VCanvas_base, VCanvas_canvas, VCanvas_vml, pending, shapeCount = 0;
+
+    /**
+     * Default configuration settings
+     */
+    getDefaults = function () {
+        return {
+            // Settings common to most/all chart types
+            common: {
+                type: 'line',
+                lineColor: '#00f',
+                fillColor: '#cdf',
+                defaultPixelsPerValue: 3,
+                width: 'auto',
+                height: 'auto',
+                composite: false,
+                tagValuesAttribute: 'values',
+                tagOptionsPrefix: 'spark',
+                enableTagOptions: false,
+                enableHighlight: true,
+                highlightLighten: 1.4,
+                tooltipSkipNull: true,
+                tooltipPrefix: '',
+                tooltipSuffix: '',
+                disableHiddenCheck: false,
+                numberFormatter: false,
+                numberDigitGroupCount: 3,
+                numberDigitGroupSep: ',',
+                numberDecimalMark: '.',
+                disableTooltips: false,
+                disableInteraction: false
+            },
+            // Defaults for line charts
+            line: {
+                spotColor: '#f80',
+                highlightSpotColor: '#5f5',
+                highlightLineColor: '#f22',
+                spotRadius: 1.5,
+                minSpotColor: '#f80',
+                maxSpotColor: '#f80',
+                lineWidth: 1,
+                normalRangeMin: undefined,
+                normalRangeMax: undefined,
+                normalRangeColor: '#ccc',
+                drawNormalOnTop: false,
+                chartRangeMin: undefined,
+                chartRangeMax: undefined,
+                chartRangeMinX: undefined,
+                chartRangeMaxX: undefined,
+                tooltipFormat: new SPFormat('<span style="color: {{color}}">●</span> {{prefix}}{{y}}{{suffix}}')
+            },
+            // Defaults for bar charts
+            bar: {
+                barColor: '#3366cc',
+                negBarColor: '#f44',
+                stackedBarColor: ['#3366cc', '#dc3912', '#ff9900', '#109618', '#66aa00',
+                    '#dd4477', '#0099c6', '#990099'],
+                zeroColor: undefined,
+                nullColor: undefined,
+                zeroAxis: true,
+                barWidth: 4,
+                barSpacing: 1,
+                chartRangeMax: undefined,
+                chartRangeMin: undefined,
+                chartRangeClip: false,
+                colorMap: undefined,
+                tooltipFormat: new SPFormat('<span style="color: {{color}}">●</span> {{prefix}}{{value}}{{suffix}}')
+            },
+            // Defaults for tristate charts
+            tristate: {
+                barWidth: 4,
+                barSpacing: 1,
+                posBarColor: '#6f6',
+                negBarColor: '#f44',
+                zeroBarColor: '#999',
+                colorMap: {},
+                tooltipFormat: new SPFormat('<span style="color: {{color}}">●</span> {{value:map}}'),
+                tooltipValueLookups: { map: { '-1': 'Loss', '0': 'Draw', '1': 'Win' } }
+            },
+            // Defaults for discrete charts
+            discrete: {
+                lineHeight: 'auto',
+                thresholdColor: undefined,
+                thresholdValue: 0,
+                chartRangeMax: undefined,
+                chartRangeMin: undefined,
+                chartRangeClip: false,
+                tooltipFormat: new SPFormat('{{prefix}}{{value}}{{suffix}}')
+            },
+            // Defaults for bullet charts
+            bullet: {
+                targetColor: '#f33',
+                targetWidth: 3, // width of the target bar in pixels
+                performanceColor: '#33f',
+                rangeColors: ['#d3dafe', '#a8b6ff', '#7f94ff'],
+                base: undefined, // set this to a number to change the base start number
+                tooltipFormat: new SPFormat('{{fieldkey:fields}} - {{value}}'),
+                tooltipValueLookups: { fields: {r: 'Range', p: 'Performance', t: 'Target'} }
+            },
+            // Defaults for pie charts
+            pie: {
+                offset: 0,
+                sliceColors: ['#3366cc', '#dc3912', '#ff9900', '#109618', '#66aa00',
+                    '#dd4477', '#0099c6', '#990099'],
+                borderWidth: 0,
+                borderColor: '#000',
+                tooltipFormat: new SPFormat('<span style="color: {{color}}">●</span> {{value}} ({{percent.1}}%)')
+            },
+            // Defaults for box plots
+            box: {
+                raw: false,
+                boxLineColor: '#000',
+                boxFillColor: '#cdf',
+                whiskerColor: '#000',
+                outlierLineColor: '#333',
+                outlierFillColor: '#fff',
+                medianColor: '#f00',
+                showOutliers: true,
+                outlierIQR: 1.5,
+                spotRadius: 1.5,
+                target: undefined,
+                targetColor: '#4a2',
+                chartRangeMax: undefined,
+                chartRangeMin: undefined,
+                tooltipFormat: new SPFormat('{{field:fields}}: {{value}}'),
+                tooltipFormatFieldlistKey: 'field',
+                tooltipValueLookups: { fields: { lq: 'Lower Quartile', med: 'Median',
+                    uq: 'Upper Quartile', lo: 'Left Outlier', ro: 'Right Outlier',
+                    lw: 'Left Whisker', rw: 'Right Whisker'} }
+            }
+        };
+    };
+
+    // You can have tooltips use a css class other than jqstooltip by specifying tooltipClassname
+    defaultStyles = '.jqstooltip { ' +
+            'position: absolute;' +
+            'left: 0px;' +
+            'top: 0px;' +
+            'visibility: hidden;' +
+            'background: rgb(0, 0, 0) transparent;' +
+            'background-color: rgba(0,0,0,0.6);' +
+            'filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=#99000000, endColorstr=#99000000);' +
+            '-ms-filter: "progid:DXImageTransform.Microsoft.gradient(startColorstr=#99000000, endColorstr=#99000000)";' +
+            'color: white;' +
+            'font: 10px arial, san serif;' +
+            'text-align: left;' +
+            'white-space: nowrap;' +
+            'padding: 5px;' +
+            'border: 1px solid white;' +
+            'z-index: 10000;' +
+            '}' +
+            '.jqsfield { ' +
+            'color: white;' +
+            'font: 10px arial, san serif;' +
+            'text-align: left;' +
+            '}';
+
+    /**
+     * Utilities
+     */
+
+    createClass = function (/* [baseclass, [mixin, ...]], definition */) {
+        var Class, args;
+        Class = function () {
+            this.init.apply(this, arguments);
+        };
+        if (arguments.length > 1) {
+            if (arguments[0]) {
+                Class.prototype = $.extend(new arguments[0](), arguments[arguments.length - 1]);
+                Class._super = arguments[0].prototype;
+            } else {
+                Class.prototype = arguments[arguments.length - 1];
+            }
+            if (arguments.length > 2) {
+                args = Array.prototype.slice.call(arguments, 1, -1);
+                args.unshift(Class.prototype);
+                $.extend.apply($, args);
+            }
+        } else {
+            Class.prototype = arguments[0];
+        }
+        Class.prototype.cls = Class;
+        return Class;
+    };
+
+    /**
+     * Wraps a format string for tooltips
+     * {{x}}
+     * {{x.2}
+     * {{x:months}}
+     */
+    $.SPFormatClass = SPFormat = createClass({
+        fre: /\{\{([\w.]+?)(:(.+?))?\}\}/g,
+        precre: /(\w+)\.(\d+)/,
+
+        init: function (format, fclass) {
+            this.format = format;
+            this.fclass = fclass;
+        },
+
+        render: function (fieldset, lookups, options) {
+            var self = this,
+                fields = fieldset,
+                match, token, lookupkey, fieldvalue, prec;
+            return this.format.replace(this.fre, function () {
+                var lookup;
+                token = arguments[1];
+                lookupkey = arguments[3];
+                match = self.precre.exec(token);
+                if (match) {
+                    prec = match[2];
+                    token = match[1];
+                } else {
+                    prec = false;
+                }
+                fieldvalue = fields[token];
+                if (fieldvalue === undefined) {
+                    return '';
+                }
+                if (lookupkey && lookups && lookups[lookupkey]) {
+                    lookup = lookups[lookupkey];
+                    if (lookup.get) { // RangeMap
+                        return lookups[lookupkey].get(fieldvalue) || fieldvalue;
+                    } else {
+                        return lookups[lookupkey][fieldvalue] || fieldvalue;
+                    }
+                }
+                if (isNumber(fieldvalue)) {
+                    if (options.get('numberFormatter')) {
+                        fieldvalue = options.get('numberFormatter')(fieldvalue);
+                    } else {
+                        fieldvalue = formatNumber(fieldvalue, prec,
+                            options.get('numberDigitGroupCount'),
+                            options.get('numberDigitGroupSep'),
+                            options.get('numberDecimalMark'));
+                    }
+                }
+                return fieldvalue;
+            });
+        }
+    });
+
+    // convience method to avoid needing the new operator
+    $.spformat = function(format, fclass) {
+        return new SPFormat(format, fclass);
+    };
+
+    clipval = function (val, min, max) {
+        if (val < min) {
+            return min;
+        }
+        if (val > max) {
+            return max;
+        }
+        return val;
+    };
+
+    quartile = function (values, q) {
+        var vl;
+        if (q === 2) {
+            vl = Math.floor(values.length / 2);
+            return values.length % 2 ? values[vl] : (values[vl-1] + values[vl]) / 2;
+        } else {
+            if (values.length % 2 ) { // odd
+                vl = (values.length * q + q) / 4;
+                return vl % 1 ? (values[Math.floor(vl)] + values[Math.floor(vl) - 1]) / 2 : values[vl-1];
+            } else { //even
+                vl = (values.length * q + 2) / 4;
+                return vl % 1 ? (values[Math.floor(vl)] + values[Math.floor(vl) - 1]) / 2 :  values[vl-1];
+
+            }
+        }
+    };
+
+    normalizeValue = function (val) {
+        var nf;
+        switch (val) {
+            case 'undefined':
+                val = undefined;
+                break;
+            case 'null':
+                val = null;
+                break;
+            case 'true':
+                val = true;
+                break;
+            case 'false':
+                val = false;
+                break;
+            default:
+                nf = parseFloat(val);
+                if (val == nf) {
+                    val = nf;
+                }
+        }
+        return val;
+    };
+
+    normalizeValues = function (vals) {
+        var i, result = [];
+        for (i = vals.length; i--;) {
+            result[i] = normalizeValue(vals[i]);
+        }
+        return result;
+    };
+
+    remove = function (vals, filter) {
+        var i, vl, result = [];
+        for (i = 0, vl = vals.length; i < vl; i++) {
+            if (vals[i] !== filter) {
+                result.push(vals[i]);
+            }
+        }
+        return result;
+    };
+
+    isNumber = function (num) {
+        return !isNaN(parseFloat(num)) && isFinite(num);
+    };
+
+    formatNumber = function (num, prec, groupsize, groupsep, decsep) {
+        var p, i;
+        num = (prec === false ? parseFloat(num).toString() : num.toFixed(prec)).split('');
+        p = (p = $.inArray('.', num)) < 0 ? num.length : p;
+        if (p < num.length) {
+            num[p] = decsep;
+        }
+        for (i = p - groupsize; i > 0; i -= groupsize) {
+            num.splice(i, 0, groupsep);
+        }
+        return num.join('');
+    };
+
+    // determine if all values of an array match a value
+    // returns true if the array is empty
+    all = function (val, arr, ignoreNull) {
+        var i;
+        for (i = arr.length; i--; ) {
+            if (ignoreNull && arr[i] === null) continue;
+            if (arr[i] !== val) {
+                return false;
+            }
+        }
+        return true;
+    };
+
+    // sums the numeric values in an array, ignoring other values
+    sum = function (vals) {
+        var total = 0, i;
+        for (i = vals.length; i--;) {
+            total += typeof vals[i] === 'number' ? vals[i] : 0;
+        }
+        return total;
+    };
+
+    ensureArray = function (val) {
+        return $.isArray(val) ? val : [val];
+    };
+
+    // http://paulirish.com/2008/bookmarklet-inject-new-css-rules/
+    addCSS = function(css) {
+        var tag;
+        //if ('\v' == 'v') /* ie only */ {
+        if (document.createStyleSheet) {
+            document.createStyleSheet().cssText = css;
+        } else {
+            tag = document.createElement('style');
+            tag.type = 'text/css';
+            document.getElementsByTagName('head')[0].appendChild(tag);
+            tag[(typeof document.body.style.WebkitAppearance == 'string') /* webkit only */ ? 'innerText' : 'innerHTML'] = css;
+        }
+    };
+
+    // Provide a cross-browser interface to a few simple drawing primitives
+    $.fn.simpledraw = function (width, height, useExisting, interact) {
+        var target, mhandler;
+        if (useExisting && (target = this.data('_jqs_vcanvas'))) {
+            return target;
+        }
+
+        if ($.fn.sparkline.canvas === false) {
+            // We've already determined that neither Canvas nor VML are available
+            return false;
+
+        } else if ($.fn.sparkline.canvas === undefined) {
+            // No function defined yet -- need to see if we support Canvas or VML
+            var el = document.createElement('canvas');
+            if (!!(el.getContext && el.getContext('2d'))) {
+                // Canvas is available
+                $.fn.sparkline.canvas = function(width, height, target, interact) {
+                    return new VCanvas_canvas(width, height, target, interact);
+                };
+            } else if (document.namespaces && !document.namespaces.v) {
+                // VML is available
+                document.namespaces.add('v', 'urn:schemas-microsoft-com:vml', '#default#VML');
+                $.fn.sparkline.canvas = function(width, height, target, interact) {
+                    return new VCanvas_vml(width, height, target);
+                };
+            } else {
+                // Neither Canvas nor VML are available
+                $.fn.sparkline.canvas = false;
+                return false;
+            }
+        }
+
+        if (width === undefined) {
+            width = $(this).innerWidth();
+        }
+        if (height === undefined) {
+            height = $(this).innerHeight();
+        }
+
+        target = $.fn.sparkline.canvas(width, height, this, interact);
+
+        mhandler = $(this).data('_jqs_mhandler');
+        if (mhandler) {
+            mhandler.registerCanvas(target);
+        }
+        return target;
+    };
+
+    $.fn.cleardraw = function () {
+        var target = this.data('_jqs_vcanvas');
+        if (target) {
+            target.reset();
+        }
+    };
+
+    $.RangeMapClass = RangeMap = createClass({
+        init: function (map) {
+            var key, range, rangelist = [];
+            for (key in map) {
+                if (map.hasOwnProperty(key) && typeof key === 'string' && key.indexOf(':') > -1) {
+                    range = key.split(':');
+                    range[0] = range[0].length === 0 ? -Infinity : parseFloat(range[0]);
+                    range[1] = range[1].length === 0 ? Infinity : parseFloat(range[1]);
+                    range[2] = map[key];
+                    rangelist.push(range);
+                }
+            }
+            this.map = map;
+            this.rangelist = rangelist || false;
+        },
+
+        get: function (value) {
+            var rangelist = this.rangelist,
+                i, range, result;
+            if ((result = this.map[value]) !== undefined) {
+                return result;
+            }
+            if (rangelist) {
+                for (i = rangelist.length; i--;) {
+                    range = rangelist[i];
+                    if (range[0] <= value && range[1] >= value) {
+                        return range[2];
+                    }
+                }
+            }
+            return undefined;
+        }
+    });
+
+    // Convenience function
+    $.range_map = function(map) {
+        return new RangeMap(map);
+    };
+
+    MouseHandler = createClass({
+        init: function (el, options) {
+            var $el = $(el);
+            this.$el = $el;
+            this.options = options;
+            this.currentPageX = 0;
+            this.currentPageY = 0;
+            this.el = el;
+            this.splist = [];
+            this.tooltip = null;
+            this.over = false;
+            this.displayTooltips = !options.get('disableTooltips');
+            this.highlightEnabled = !options.get('disableHighlight');
+        },
+
+        registerSparkline: function (sp) {
+            this.splist.push(sp);
+            if (this.over) {
+                this.updateDisplay();
+            }
+        },
+
+        registerCanvas: function (canvas) {
+            var $canvas = $(canvas.canvas);
+            this.canvas = canvas;
+            this.$canvas = $canvas;
+            $canvas.mouseenter($.proxy(this.mouseenter, this));
+            $canvas.mouseleave($.proxy(this.mouseleave, this));
+            $canvas.click($.proxy(this.mouseclick, this));
+        },
+
+        reset: function (removeTooltip) {
+            this.splist = [];
+            if (this.tooltip && removeTooltip) {
+                this.tooltip.remove();
+                this.tooltip = undefined;
+            }
+        },
+
+        mouseclick: function (e) {
+            var clickEvent = $.Event('sparklineClick');
+            clickEvent.originalEvent = e;
+            clickEvent.sparklines = this.splist;
+            this.$el.trigger(clickEvent);
+        },
+
+        mouseenter: function (e) {
+            $(document.body).unbind('mousemove.jqs');
+            $(document.body).bind('mousemove.jqs', $.proxy(this.mousemove, this));
+            this.over = true;
+            this.currentPageX = e.pageX;
+            this.currentPageY = e.pageY;
+            this.currentEl = e.target;
+            if (!this.tooltip && this.displayTooltips) {
+                this.tooltip = new Tooltip(this.options);
+                this.tooltip.updatePosition(e.pageX, e.pageY);
+            }
+            this.updateDisplay();
+        },
+
+        mouseleave: function () {
+            $(document.body).unbind('mousemove.jqs');
+            var splist = this.splist,
+                 spcount = splist.length,
+                 needsRefresh = false,
+                 sp, i;
+            this.over = false;
+            this.currentEl = null;
+
+            if (this.tooltip) {
+                this.tooltip.remove();
+                this.tooltip = null;
+            }
+
+            for (i = 0; i < spcount; i++) {
+                sp = splist[i];
+                if (sp.clearRegionHighlight()) {
+                    needsRefresh = true;
+                }
+            }
+
+            if (needsRefresh) {
+                this.canvas.render();
+            }
+        },
+
+        mousemove: function (e) {
+            this.currentPageX = e.pageX;
+            this.currentPageY = e.pageY;
+            this.currentEl = e.target;
+            if (this.tooltip) {
+                this.tooltip.updatePosition(e.pageX, e.pageY);
+            }
+            this.updateDisplay();
+        },
+
+        updateDisplay: function () {
+            var splist = this.splist,
+                 spcount = splist.length,
+                 needsRefresh = false,
+                 offset = this.$canvas.offset(),
+                 localX = this.currentPageX - offset.left,
+                 localY = this.currentPageY - offset.top,
+                 tooltiphtml, sp, i, result, changeEvent;
+            if (!this.over) {
+                return;
+            }
+            for (i = 0; i < spcount; i++) {
+                sp = splist[i];
+                result = sp.setRegionHighlight(this.currentEl, localX, localY);
+                if (result) {
+                    needsRefresh = true;
+                }
+            }
+            if (needsRefresh) {
+                changeEvent = $.Event('sparklineRegionChange');
+                changeEvent.sparklines = this.splist;
+                this.$el.trigger(changeEvent);
+                if (this.tooltip) {
+                    tooltiphtml = '';
+                    for (i = 0; i < spcount; i++) {
+                        sp = splist[i];
+                        tooltiphtml += sp.getCurrentRegionTooltip();
+                    }
+                    this.tooltip.setContent(tooltiphtml);
+                }
+                if (!this.disableHighlight) {
+                    this.canvas.render();
+                }
+            }
+            if (result === null) {
+                this.mouseleave();
+            }
+        }
+    });
+
+
+    Tooltip = createClass({
+        sizeStyle: 'position: static !important;' +
+            'display: block !important;' +
+            'visibility: hidden !important;' +
+            'float: left !important;',
+
+        init: function (options) {
+            var tooltipClassname = options.get('tooltipClassname', 'jqstooltip'),
+                sizetipStyle = this.sizeStyle,
+                offset;
+            this.container = options.get('tooltipContainer') || document.body;
+            this.tooltipOffsetX = options.get('tooltipOffsetX', 10);
+            this.tooltipOffsetY = options.get('tooltipOffsetY', 12);
+            // remove any previous lingering tooltip
+            $('#jqssizetip').remove();
+            $('#jqstooltip').remove();
+            this.sizetip = $('<div/>', {
+                id: 'jqssizetip',
+                style: sizetipStyle,
+                'class': tooltipClassname
+            });
+            this.tooltip = $('<div/>', {
+                id: 'jqstooltip',
+                'class': tooltipClassname
+            }).appendTo(this.container);
+            // account for the container's location
+            offset = this.tooltip.offset();
+            this.offsetLeft = offset.left;
+            this.offsetTop = offset.top;
+            this.hidden = true;
+            $(window).unbind('resize.jqs scroll.jqs');
+            $(window).bind('resize.jqs scroll.jqs', $.proxy(this.updateWindowDims, this));
+            this.updateWindowDims();
+        },
+
+        updateWindowDims: function () {
+            this.scrollTop = $(window).scrollTop();
+            this.scrollLeft = $(window).scrollLeft();
+            this.scrollRight = this.scrollLeft + $(window).width();
+            this.updatePosition();
+        },
+
+        getSize: function (content) {
+            this.sizetip.html(content).appendTo(this.container);
+            this.width = this.sizetip.width() + 1;
+            this.height = this.sizetip.height();
+            this.sizetip.remove();
+        },
+
+        setContent: function (content) {
+            if (!content) {
+                this.tooltip.css('visibility', 'hidden');
+                this.hidden = true;
+                return;
+            }
+            this.getSize(content);
+            this.tooltip.html(content)
+                .css({
+                    'width': this.width,
+                    'height': this.height,
+                    'visibility': 'visible'
+                });
+            if (this.hidden) {
+                this.hidden = false;
+                this.updatePosition();
+            }
+        },
+
+        updatePosition: function (x, y) {
+            if (x === undefined) {
+                if (this.mousex === undefined) {
+                    return;
+                }
+                x = this.mousex - this.offsetLeft;
+                y = this.mousey - this.offsetTop;
+
+            } else {
+                this.mousex = x = x - this.offsetLeft;
+                this.mousey = y = y - this.offsetTop;
+            }
+            if (!this.height || !this.width || this.hidden) {
+                return;
+            }
+
+            y -= this.height + this.tooltipOffsetY;
+            x += this.tooltipOffsetX;
+
+            if (y < this.scrollTop) {
+                y = this.scrollTop;
+            }
+            if (x < this.scrollLeft) {
+                x = this.scrollLeft;
+            } else if (x + this.width > this.scrollRight) {
+                x = this.scrollRight - this.width;
+            }
+
+            this.tooltip.css({
+                'left': x,
+                'top': y
+            });
+        },
+
+        remove: function () {
+            this.tooltip.remove();
+            this.sizetip.remove();
+            this.sizetip = this.tooltip = undefined;
+            $(window).unbind('resize.jqs scroll.jqs');
+        }
+    });
+
+    initStyles = function() {
+        addCSS(defaultStyles);
+    };
+
+    $(initStyles);
+
+    pending = [];
+    $.fn.sparkline = function (userValues, userOptions) {
+        return this.each(function () {
+            var options = new $.fn.sparkline.options(this, userOptions),
+                 $this = $(this),
+                 render, i;
+            render = function () {
+                var values, width, height, tmp, mhandler, sp, vals;
+                if (userValues === 'html' || userValues === undefined) {
+                    vals = this.getAttribute(options.get('tagValuesAttribute'));
+                    if (vals === undefined || vals === null) {
+                        vals = $this.html();
+                    }
+                    values = vals.replace(/(^\s*<!--)|(-->\s*$)|\s+/g, '').split(',');
+                } else {
+                    values = userValues;
+                }
+
+                width = options.get('width') === 'auto' ? values.length * options.get('defaultPixelsPerValue') : options.get('width');
+                if (options.get('height') === 'auto') {
+                    if (!options.get('composite') || !$.data(this, '_jqs_vcanvas')) {
+                        // must be a better way to get the line height
+                        tmp = document.createElement('span');
+                        tmp.innerHTML = 'a';
+                        $this.html(tmp);
+                        height = $(tmp).innerHeight() || $(tmp).height();
+                        $(tmp).remove();
+                        tmp = null;
+                    }
+                } else {
+                    height = options.get('height');
+                }
+
+                if (!options.get('disableInteraction')) {
+                    mhandler = $.data(this, '_jqs_mhandler');
+                    if (!mhandler) {
+                        mhandler = new MouseHandler(this, options);
+                        $.data(this, '_jqs_mhandler', mhandler);
+                    } else if (!options.get('composite')) {
+                        mhandler.reset();
+                    }
+                } else {
+                    mhandler = false;
+                }
+
+                if (options.get('composite') && !$.data(this, '_jqs_vcanvas')) {
+                    if (!$.data(this, '_jqs_errnotify')) {
+                        alert('Attempted to attach a composite sparkline to an element with no existing sparkline');
+                        $.data(this, '_jqs_errnotify', true);
+                    }
+                    return;
+                }
+
+                sp = new $.fn.sparkline[options.get('type')](this, values, options, width, height);
+
+                sp.render();
+
+                if (mhandler) {
+                    mhandler.registerSparkline(sp);
+                }
+            };
+            if (($(this).html() && !options.get('disableHiddenCheck') && $(this).is(':hidden')) || !$(this).parents('body').length) {
+                if (!options.get('composite') && $.data(this, '_jqs_pending')) {
+                    // remove any existing references to the element
+                    for (i = pending.length; i; i--) {
+                        if (pending[i - 1][0] == this) {
+                            pending.splice(i - 1, 1);
+                        }
+                    }
+                }
+                pending.push([this, render]);
+                $.data(this, '_jqs_pending', true);
+            } else {
+                render.call(this);
+            }
+        });
+    };
+
+    $.fn.sparkline.defaults = getDefaults();
+
+
+    $.sparkline_display_visible = function () {
+        var el, i, pl;
+        var done = [];
+        for (i = 0, pl = pending.length; i < pl; i++) {
+            el = pending[i][0];
+            if ($(el).is(':visible') && !$(el).parents().is(':hidden')) {
+                pending[i][1].call(el);
+                $.data(pending[i][0], '_jqs_pending', false);
+                done.push(i);
+            } else if (!$(el).closest('html').length && !$.data(el, '_jqs_pending')) {
+                // element has been inserted and removed from the DOM
+                // If it was not yet inserted into the dom then the .data request
+                // will return true.
+                // removing from the dom causes the data to be removed.
+                $.data(pending[i][0], '_jqs_pending', false);
+                done.push(i);
+            }
+        }
+        for (i = done.length; i; i--) {
+            pending.splice(done[i - 1], 1);
+        }
+    };
+
+
+    /**
+     * User option handler
+     */
+    $.fn.sparkline.options = createClass({
+        init: function (tag, userOptions) {
+            var extendedOptions, defaults, base, tagOptionType;
+            this.userOptions = userOptions = userOptions || {};
+            this.tag = tag;
+            this.tagValCache = {};
+            defaults = $.fn.sparkline.defaults;
+            base = defaults.common;
+            this.tagOptionsPrefix = userOptions.enableTagOptions && (userOptions.tagOptionsPrefix || base.tagOptionsPrefix);
+
+            tagOptionType = this.getTagSetting('type');
+            if (tagOptionType === UNSET_OPTION) {
+                extendedOptions = defaults[userOptions.type || base.type];
+            } else {
+                extendedOptions = defaults[tagOptionType];
+            }
+            this.mergedOptions = $.extend({}, base, extendedOptions, userOptions);
+        },
+
+
+        getTagSetting: function (key) {
+            var prefix = this.tagOptionsPrefix,
+                val, i, pairs, keyval;
+            if (prefix === false || prefix === undefined) {
+                return UNSET_OPTION;
+            }
+            if (this.tagValCache.hasOwnProperty(key)) {
+                val = this.tagValCache.key;
+            } else {
+                val = this.tag.getAttribute(prefix + key);
+                if (val === undefined || val === null) {
+                    val = UNSET_OPTION;
+                } else if (val.substr(0, 1) === '[') {
+                    val = val.substr(1, val.length - 2).split(',');
+                    for (i = val.length; i--;) {
+                        val[i] = normalizeValue(val[i].replace(/(^\s*)|(\s*$)/g, ''));
+                    }
+                } else if (val.substr(0, 1) === '{') {
+                    pairs = val.substr(1, val.length - 2).split(',');
+                    val = {};
+                    for (i = pairs.length; i--;) {
+                        keyval = pairs[i].split(':', 2);
+                        val[keyval[0].replace(/(^\s*)|(\s*$)/g, '')] = normalizeValue(keyval[1].replace(/(^\s*)|(\s*$)/g, ''));
+                    }
+                } else {
+                    val = normalizeValue(val);
+                }
+                this.tagValCache.key = val;
+            }
+            return val;
+        },
+
+        get: function (key, defaultval) {
+            var tagOption = this.getTagSetting(key),
+                result;
+            if (tagOption !== UNSET_OPTION) {
+                return tagOption;
+            }
+            return (result = this.mergedOptions[key]) === undefined ? defaultval : result;
+        }
+    });
+
+
+    $.fn.sparkline._base = createClass({
+        disabled: false,
+
+        init: function (el, values, options, width, height) {
+            this.el = el;
+            this.$el = $(el);
+            this.values = values;
+            this.options = options;
+            this.width = width;
+            this.height = height;
+            this.currentRegion = undefined;
+        },
+
+        /**
+         * Setup the canvas
+         */
+        initTarget: function () {
+            var interactive = !this.options.get('disableInteraction');
+            if (!(this.target = this.$el.simpledraw(this.width, this.height, this.options.get('composite'), interactive))) {
+                this.disabled = true;
+            } else {
+                this.canvasWidth = this.target.pixelWidth;
+                this.canvasHeight = this.target.pixelHeight;
+            }
+        },
+
+        /**
+         * Actually render the chart to the canvas
+         */
+        render: function () {
+            if (this.disabled) {
+                this.el.innerHTML = '';
+                return false;
+            }
+            return true;
+        },
+
+        /**
+         * Return a region id for a given x/y co-ordinate
+         */
+        getRegion: function (x, y) {
+        },
+
+        /**
+         * Highlight an item based on the moused-over x,y co-ordinate
+         */
+        setRegionHighlight: function (el, x, y) {
+            var currentRegion = this.currentRegion,
+                highlightEnabled = !this.options.get('disableHighlight'),
+                newRegion;
+            if (x > this.canvasWidth || y > this.canvasHeight || x < 0 || y < 0) {
+                return null;
+            }
+            newRegion = this.getRegion(el, x, y);
+            if (currentRegion !== newRegion) {
+                if (currentRegion !== undefined && highlightEnabled) {
+                    this.removeHighlight();
+                }
+                this.currentRegion = newRegion;
+                if (newRegion !== undefined && highlightEnabled) {
+                    this.renderHighlight();
+                }
+                return true;
+            }
+            return false;
+        },
+
+        /**
+         * Reset any currently highlighted item
+         */
+        clearRegionHighlight: function () {
+            if (this.currentRegion !== undefined) {
+                this.removeHighlight();
+                this.currentRegion = undefined;
+                return true;
+            }
+            return false;
+        },
+
+        renderHighlight: function () {
+            this.changeHighlight(true);
+        },
+
+        removeHighlight: function () {
+            this.changeHighlight(false);
+        },
+
+        changeHighlight: function (highlight)  {},
+
+        /**
+         * Fetch the HTML to display as a tooltip
+         */
+        getCurrentRegionTooltip: function () {
+            var options = this.options,
+                header = '',
+                entries = [],
+                fields, formats, formatlen, fclass, text, i,
+                showFields, showFieldsKey, newFields, fv,
+                formatter, format, fieldlen, j;
+            if (this.currentRegion === undefined) {
+                return '';
+            }
+            fields = this.getCurrentRegionFields();
+            formatter = options.get('tooltipFormatter');
+            if (formatter) {
+                return formatter(this, options, fields);
+            }
+            if (options.get('tooltipChartTitle')) {
+                header += '<div class="jqs jqstitle">' + options.get('tooltipChartTitle') + '</div>\n';
+            }
+            formats = this.options.get('tooltipFormat');
+            if (!formats) {
+                return '';
+            }
+            if (!$.isArray(formats)) {
+                formats = [formats];
+            }
+            if (!$.isArray(fields)) {
+                fields = [fields];
+            }
+            showFields = this.options.get('tooltipFormatFieldlist');
+            showFieldsKey = this.options.get('tooltipFormatFieldlistKey');
+            if (showFields && showFieldsKey) {
+                // user-selected ordering of fields
+                newFields = [];
+                for (i = fields.length; i--;) {
+                    fv = fields[i][showFieldsKey];
+                    if ((j = $.inArray(fv, showFields)) != -1) {
+                        newFields[j] = fields[i];
+                    }
+                }
+                fields = newFields;
+            }
+            formatlen = formats.length;
+            fieldlen = fields.length;
+            for (i = 0; i < formatlen; i++) {
+                format = formats[i];
+                if (typeof format === 'string') {
+                    format = new SPFormat(format);
+                }
+                fclass = format.fclass || 'jqsfield';
+                for (j = 0; j < fieldlen; j++) {
+                    if (!fields[j].isNull || !options.get('tooltipSkipNull')) {
+                        $.extend(fields[j], {
+                            prefix: options.get('tooltipPrefix'),
+                            suffix: options.get('tooltipSuffix')
+                        });
+                        text = format.render(fields[j], options.get('tooltipValueLookups'), options);
+                        entries.push('<div class="' + fclass + '">' + text + '</div>');
+                    }
+                }
+            }
+            if (entries.length) {
+                return header + entries.join('\n');
+            }
+            return '';
+        },
+
+        getCurrentRegionFields: function () {},
+
+        calcHighlightColor: function (color, options) {
+            var highlightColor = options.get('highlightColor'),
+                lighten = options.get('highlightLighten'),
+                parse, mult, rgbnew, i;
+            if (highlightColor) {
+                return highlightColor;
+            }
+            if (lighten) {
+                // extract RGB values
+                parse = /^#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(color) || /^#([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})$/i.exec(color);
+                if (parse) {
+                    rgbnew = [];
+                    mult = color.length === 4 ? 16 : 1;
+                    for (i = 0; i < 3; i++) {
+                        rgbnew[i] = clipval(Math.round(parseInt(parse[i + 1], 16) * mult * lighten), 0, 255);
+                    }
+                    return 'rgb(' + rgbnew.join(',') + ')';
+                }
+
+            }
+            return color;
+        }
+
+    });
+
+    barHighlightMixin = {
+        changeHighlight: function (highlight) {
+            var currentRegion = this.currentRegion,
+                target = this.target,
+                shapeids = this.regionShapes[currentRegion],
+                newShapes;
+            // will be null if the region value was null
+            if (shapeids) {
+                newShapes = this.renderRegion(currentRegion, highlight);
+                if ($.isArray(newShapes) || $.isArray(shapeids)) {
+                    target.replaceWithShapes(shapeids, newShapes);
+                    this.regionShapes[currentRegion] = $.map(newShapes, function (newShape) {
+                        return newShape.id;
+                    });
+                } else {
+                    target.replaceWithShape(shapeids, newShapes);
+                    this.regionShapes[currentRegion] = newShapes.id;
+                }
+            }
+        },
+
+        render: function () {
+            var values = this.values,
+                target = this.target,
+                regionShapes = this.regionShapes,
+                shapes, ids, i, j;
+
+            if (!this.cls._super.render.call(this)) {
+                return;
+            }
+            for (i = values.length; i--;) {
+                shapes = this.renderRegion(i);
+                if (shapes) {
+                    if ($.isArray(shapes)) {
+                        ids = [];
+                        for (j = shapes.length; j--;) {
+                            shapes[j].append();
+                            ids.push(shapes[j].id);
+                        }
+                        regionShapes[i] = ids;
+                    } else {
+                        shapes.append();
+                        regionShapes[i] = shapes.id; // store just the shapeid
+                    }
+                } else {
+                    // null value
+                    regionShapes[i] = null;
+                }
+            }
+            target.render();
+        }
+    };
+
+    /**
+     * Line charts
+     */
+    $.fn.sparkline.line = line = createClass($.fn.sparkline._base, {
+        type: 'line',
+
+        init: function (el, values, options, width, height) {
+            line._super.init.call(this, el, values, options, width, height);
+            this.vertices = [];
+            this.regionMap = [];
+            this.xvalues = [];
+            this.yvalues = [];
+            this.yminmax = [];
+            this.hightlightSpotId = null;
+            this.lastShapeId = null;
+            this.initTarget();
+        },
+
+        getRegion: function (el, x, y) {
+            var i,
+                regionMap = this.regionMap; // maps regions to value positions
+            for (i = regionMap.length; i--;) {
+                if (regionMap[i] !== null && x >= regionMap[i][0] && x <= regionMap[i][1]) {
+                    return regionMap[i][2];
+                }
+            }
+            return undefined;
+        },
+
+        getCurrentRegionFields: function () {
+            var currentRegion = this.currentRegion;
+            return {
+                isNull: this.yvalues[currentRegion] === null,
+                x: this.xvalues[currentRegion],
+                y: this.yvalues[currentRegion],
+                color: this.options.get('lineColor'),
+                fillColor: this.options.get('fillColor'),
+                offset: currentRegion
+            };
+        },
+
+        renderHighlight: function () {
+            var currentRegion = this.currentRegion,
+                target = this.target,
+                vertex = this.vertices[currentRegion],
+                options = this.options,
+                spotRadius = options.get('spotRadius'),
+                highlightSpotColor = options.get('highlightSpotColor'),
+                highlightLineColor = options.get('highlightLineColor'),
+                highlightSpot, highlightLine;
+
+            if (!vertex) {
+                return;
+            }
+            if (spotRadius && highlightSpotColor) {
+                highlightSpot = target.drawCircle(vertex[0], vertex[1],
+                    spotRadius, undefined, highlightSpotColor);
+                this.highlightSpotId = highlightSpot.id;
+                target.insertAfterShape(this.lastShapeId, highlightSpot);
+            }
+            if (highlightLineColor) {
+                highlightLine = target.drawLine(vertex[0], this.canvasTop, vertex[0],
+                    this.canvasTop + this.canvasHeight, highlightLineColor);
+                this.highlightLineId = highlightLine.id;
+                target.insertAfterShape(this.lastShapeId, highlightLine);
+            }
+        },
+
+        removeHighlight: function () {
+            var target = this.target;
+            if (this.highlightSpotId) {
+                target.removeShapeId(this.highlightSpotId);
+                this.highlightSpotId = null;
+            }
+            if (this.highlightLineId) {
+                target.removeShapeId(this.highlightLineId);
+                this.highlightLineId = null;
+            }
+        },
+
+        scanValues: function () {
+            var values = this.values,
+                valcount = values.length,
+                xvalues = this.xvalues,
+                yvalues = this.yvalues,
+                yminmax = this.yminmax,
+                i, val, isStr, isArray, sp;
+            for (i = 0; i < valcount; i++) {
+                val = values[i];
+                isStr = typeof(values[i]) === 'string';
+                isArray = typeof(values[i]) === 'object' && values[i] instanceof Array;
+                sp = isStr && values[i].split(':');
+                if (isStr && sp.length === 2) { // x:y
+                    xvalues.push(Number(sp[0]));
+                    yvalues.push(Number(sp[1]));
+                    yminmax.push(Number(sp[1]));
+                } else if (isArray) {
+                    xvalues.push(val[0]);
+                    yvalues.push(val[1]);
+                    yminmax.push(val[1]);
+                } else {
+                    xvalues.push(i);
+                    if (values[i] === null || values[i] === 'null') {
+                        yvalues.push(null);
+                    } else {
+                        yvalues.push(Number(val));
+                        yminmax.push(Number(val));
+                    }
+                }
+            }
+            if (this.options.get('xvalues')) {
+                xvalues = this.options.get('xvalues');
+            }
+
+            this.maxy = this.maxyorg = Math.max.apply(Math, yminmax);
+            this.miny = this.minyorg = Math.min.apply(Math, yminmax);
+
+            this.maxx = Math.max.apply(Math, xvalues);
+            this.minx = Math.min.apply(Math, xvalues);
+
+            this.xvalues = xvalues;
+            this.yvalues = yvalues;
+            this.yminmax = yminmax;
+
+        },
+
+        processRangeOptions: function () {
+            var options = this.options,
+                normalRangeMin = options.get('normalRangeMin'),
+                normalRangeMax = options.get('normalRangeMax');
+
+            if (normalRangeMin !== undefined) {
+                if (normalRangeMin < this.miny) {
+                    this.miny = normalRangeMin;
+                }
+                if (normalRangeMax > this.maxy) {
+                    this.maxy = normalRangeMax;
+                }
+            }
+            if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < this.miny)) {
+                this.miny = options.get('chartRangeMin');
+            }
+            if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > this.maxy)) {
+                this.maxy = options.get('chartRangeMax');
+            }
+            if (options.get('chartRangeMinX') !== undefined && (options.get('chartRangeClipX') || options.get('chartRangeMinX') < this.minx)) {
+                this.minx = options.get('chartRangeMinX');
+            }
+            if (options.get('chartRangeMaxX') !== undefined && (options.get('chartRangeClipX') || options.get('chartRangeMaxX') > this.maxx)) {
+                this.maxx = options.get('chartRangeMaxX');
+            }
+
+        },
+
+        drawNormalRange: function (canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey) {
+            var normalRangeMin = this.options.get('normalRangeMin'),
+                normalRangeMax = this.options.get('normalRangeMax'),
+                ytop = canvasTop + Math.round(canvasHeight - (canvasHeight * ((normalRangeMax - this.miny) / rangey))),
+                height = Math.round((canvasHeight * (normalRangeMax - normalRangeMin)) / rangey);
+            this.target.drawRect(canvasLeft, ytop, canvasWidth, height, undefined, this.options.get('normalRangeColor')).append();
+        },
+
+        render: function () {
+            var options = this.options,
+                target = this.target,
+                canvasWidth = this.canvasWidth,
+                canvasHeight = this.canvasHeight,
+                vertices = this.vertices,
+                spotRadius = options.get('spotRadius'),
+                regionMap = this.regionMap,
+                rangex, rangey, yvallast,
+                canvasTop, canvasLeft,
+                vertex, path, paths, x, y, xnext, xpos, xposnext,
+                last, next, yvalcount, lineShapes, fillShapes, plen,
+                valueSpots, hlSpotsEnabled, color, xvalues, yvalues, i;
+
+            if (!line._super.render.call(this)) {
+                return;
+            }
+
+            this.scanValues();
+            this.processRangeOptions();
+
+            xvalues = this.xvalues;
+            yvalues = this.yvalues;
+
+            if (!this.yminmax.length || this.yvalues.length < 2) {
+                // empty or all null valuess
+                return;
+            }
+
+            canvasTop = canvasLeft = 0;
+
+            rangex = this.maxx - this.minx === 0 ? 1 : this.maxx - this.minx;
+            rangey = this.maxy - this.miny === 0 ? 1 : this.maxy - this.miny;
+            yvallast = this.yvalues.length - 1;
+
+            if (spotRadius && (canvasWidth < (spotRadius * 4) || canvasHeight < (spotRadius * 4))) {
+                spotRadius = 0;
+            }
+            if (spotRadius) {
+                // adjust the canvas size as required so that spots will fit
+                hlSpotsEnabled = options.get('highlightSpotColor') &&  !options.get('disableInteraction');
+                if (hlSpotsEnabled || options.get('minSpotColor') || (options.get('spotColor') && yvalues[yvallast] === this.miny)) {
+                    canvasHeight -= Math.ceil(spotRadius);
+                }
+                if (hlSpotsEnabled || options.get('maxSpotColor') || (options.get('spotColor') && yvalues[yvallast] === this.maxy)) {
+                    canvasHeight -= Math.ceil(spotRadius);
+                    canvasTop += Math.ceil(spotRadius);
+                }
+                if (hlSpotsEnabled ||
+                     ((options.get('minSpotColor') || options.get('maxSpotColor')) && (yvalues[0] === this.miny || yvalues[0] === this.maxy))) {
+                    canvasLeft += Math.ceil(spotRadius);
+                    canvasWidth -= Math.ceil(spotRadius);
+                }
+                if (hlSpotsEnabled || options.get('spotColor') ||
+                    (options.get('minSpotColor') || options.get('maxSpotColor') &&
+                        (yvalues[yvallast] === this.miny || yvalues[yvallast] === this.maxy))) {
+                    canvasWidth -= Math.ceil(spotRadius);
+                }
+            }
+
+
+            canvasHeight--;
+
+            if (options.get('normalRangeMin') !== undefined && !options.get('drawNormalOnTop')) {
+                this.drawNormalRange(canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey);
+            }
+
+            path = [];
+            paths = [path];
+            last = next = null;
+            yvalcount = yvalues.length;
+            for (i = 0; i < yvalcount; i++) {
+                x = xvalues[i];
+                xnext = xvalues[i + 1];
+                y = yvalues[i];
+                xpos = canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex));
+                xposnext = i < yvalcount - 1 ? canvasLeft + Math.round((xnext - this.minx) * (canvasWidth / rangex)) : canvasWidth;
+                next = xpos + ((xposnext - xpos) / 2);
+                regionMap[i] = [last || 0, next, i];
+                last = next;
+                if (y === null) {
+                    if (i) {
+                        if (yvalues[i - 1] !== null) {
+                            path = [];
+                            paths.push(path);
+                        }
+                        vertices.push(null);
+                    }
+                } else {
+                    if (y < this.miny) {
+                        y = this.miny;
+                    }
+                    if (y > this.maxy) {
+                        y = this.maxy;
+                    }
+                    if (!path.length) {
+                        // previous value was null
+                        path.push([xpos, canvasTop + canvasHeight]);
+                    }
+                    vertex = [xpos, canvasTop + Math.round(canvasHeight - (canvasHeight * ((y - this.miny) / rangey)))];
+                    path.push(vertex);
+                    vertices.push(vertex);
+                }
+            }
+
+            lineShapes = [];
+            fillShapes = [];
+            plen = paths.length;
+            for (i = 0; i < plen; i++) {
+                path = paths[i];
+                if (path.length) {
+                    if (options.get('fillColor')) {
+                        path.push([path[path.length - 1][0], (canvasTop + canvasHeight)]);
+                        fillShapes.push(path.slice(0));
+                        path.pop();
+                    }
+                    // if there's only a single point in this path, then we want to display it
+                    // as a vertical line which means we keep path[0]  as is
+                    if (path.length > 2) {
+                        // else we want the first value
+                        path[0] = [path[0][0], path[1][1]];
+                    }
+                    lineShapes.push(path);
+                }
+            }
+
+            // draw the fill first, then optionally the normal range, then the line on top of that
+            plen = fillShapes.length;
+            for (i = 0; i < plen; i++) {
+                target.drawShape(fillShapes[i],
+                    options.get('fillColor'), options.get('fillColor')).append();
+            }
+
+            if (options.get('normalRangeMin') !== undefined && options.get('drawNormalOnTop')) {
+                this.drawNormalRange(canvasLeft, canvasTop, canvasHeight, canvasWidth, rangey);
+            }
+
+            plen = lineShapes.length;
+            for (i = 0; i < plen; i++) {
+                target.drawShape(lineShapes[i], options.get('lineColor'), undefined,
+                    options.get('lineWidth')).append();
+            }
+
+            if (spotRadius && options.get('valueSpots')) {
+                valueSpots = options.get('valueSpots');
+                if (valueSpots.get === undefined) {
+                    valueSpots = new RangeMap(valueSpots);
+                }
+                for (i = 0; i < yvalcount; i++) {
+                    color = valueSpots.get(yvalues[i]);
+                    if (color) {
+                        target.drawCircle(canvasLeft + Math.round((xvalues[i] - this.minx) * (canvasWidth / rangex)),
+                            canvasTop + Math.round(canvasHeight - (canvasHeight * ((yvalues[i] - this.miny) / rangey))),
+                            spotRadius, undefined,
+                            color).append();
+                    }
+                }
+
+            }
+            if (spotRadius && options.get('spotColor') && yvalues[yvallast] !== null) {
+                target.drawCircle(canvasLeft + Math.round((xvalues[xvalues.length - 1] - this.minx) * (canvasWidth / rangex)),
+                    canvasTop + Math.round(canvasHeight - (canvasHeight * ((yvalues[yvallast] - this.miny) / rangey))),
+                    spotRadius, undefined,
+                    options.get('spotColor')).append();
+            }
+            if (this.maxy !== this.minyorg) {
+                if (spotRadius && options.get('minSpotColor')) {
+                    x = xvalues[$.inArray(this.minyorg, yvalues)];
+                    target.drawCircle(canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)),
+                        canvasTop + Math.round(canvasHeight - (canvasHeight * ((this.minyorg - this.miny) / rangey))),
+                        spotRadius, undefined,
+                        options.get('minSpotColor')).append();
+                }
+                if (spotRadius && options.get('maxSpotColor')) {
+                    x = xvalues[$.inArray(this.maxyorg, yvalues)];
+                    target.drawCircle(canvasLeft + Math.round((x - this.minx) * (canvasWidth / rangex)),
+                        canvasTop + Math.round(canvasHeight - (canvasHeight * ((this.maxyorg - this.miny) / rangey))),
+                        spotRadius, undefined,
+                        options.get('maxSpotColor')).append();
+                }
+            }
+
+            this.lastShapeId = target.getLastShapeId();
+            this.canvasTop = canvasTop;
+            target.render();
+        }
+    });
+
+    /**
+     * Bar charts
+     */
+    $.fn.sparkline.bar = bar = createClass($.fn.sparkline._base, barHighlightMixin, {
+        type: 'bar',
+
+        init: function (el, values, options, width, height) {
+            var barWidth = parseInt(options.get('barWidth'), 10),
+                barSpacing = parseInt(options.get('barSpacing'), 10),
+                chartRangeMin = options.get('chartRangeMin'),
+                chartRangeMax = options.get('chartRangeMax'),
+                chartRangeClip = options.get('chartRangeClip'),
+                stackMin = Infinity,
+                stackMax = -Infinity,
+                isStackString, groupMin, groupMax, stackRanges,
+                numValues, i, vlen, range, zeroAxis, xaxisOffset, min, max, clipMin, clipMax,
+                stacked, vlist, j, slen, svals, val, yoffset, yMaxCalc, canvasHeightEf;
+            bar._super.init.call(this, el, values, options, width, height);
+
+            // scan values to determine whether to stack bars
+            for (i = 0, vlen = values.length; i < vlen; i++) {
+                val = values[i];
+                isStackString = typeof(val) === 'string' && val.indexOf(':') > -1;
+                if (isStackString || $.isArray(val)) {
+                    stacked = true;
+                    if (isStackString) {
+                        val = values[i] = normalizeValues(val.split(':'));
+                    }
+                    val = remove(val, null); // min/max will treat null as zero
+                    groupMin = Math.min.apply(Math, val);
+                    groupMax = Math.max.apply(Math, val);
+                    if (groupMin < stackMin) {
+                        stackMin = groupMin;
+                    }
+                    if (groupMax > stackMax) {
+                        stackMax = groupMax;
+                    }
+                }
+            }
+
+            this.stacked = stacked;
+            this.regionShapes = {};
+            this.barWidth = barWidth;
+            this.barSpacing = barSpacing;
+            this.totalBarWidth = barWidth + barSpacing;
+            this.width = width = (values.length * barWidth) + ((values.length - 1) * barSpacing);
+
+            this.initTarget();
+
+            if (chartRangeClip) {
+                clipMin = chartRangeMin === undefined ? -Infinity : chartRangeMin;
+                clipMax = chartRangeMax === undefined ? Infinity : chartRangeMax;
+            }
+
+            numValues = [];
+            stackRanges = stacked ? [] : numValues;
+            var stackTotals = [];
+            var stackRangesNeg = [];
+            for (i = 0, vlen = values.length; i < vlen; i++) {
+                if (stacked) {
+                    vlist = values[i];
+                    values[i] = svals = [];
+                    stackTotals[i] = 0;
+                    stackRanges[i] = stackRangesNeg[i] = 0;
+                    for (j = 0, slen = vlist.length; j < slen; j++) {
+                        val = svals[j] = chartRangeClip ? clipval(vlist[j], clipMin, clipMax) : vlist[j];
+                        if (val !== null) {
+                            if (val > 0) {
+                                stackTotals[i] += val;
+                            }
+                            if (stackMin < 0 && stackMax > 0) {
+                                if (val < 0) {
+                                    stackRangesNeg[i] += Math.abs(val);
+                                } else {
+                                    stackRanges[i] += val;
+                                }
+                            } else {
+                                stackRanges[i] += Math.abs(val - (val < 0 ? stackMax : stackMin));
+                            }
+                            numValues.push(val);
+                        }
+                    }
+                } else {
+                    val = chartRangeClip ? clipval(values[i], clipMin, clipMax) : values[i];
+                    val = values[i] = normalizeValue(val);
+                    if (val !== null) {
+                        numValues.push(val);
+                    }
+                }
+            }
+            this.max = max = Math.max.apply(Math, numValues);
+            this.min = min = Math.min.apply(Math, numValues);
+            this.stackMax = stackMax = stacked ? Math.max.apply(Math, stackTotals) : max;
+            this.stackMin = stackMin = stacked ? Math.min.apply(Math, numValues) : min;
+
+            if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < min)) {
+                min = options.get('chartRangeMin');
+            }
+            if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > max)) {
+                max = options.get('chartRangeMax');
+            }
+
+            this.zeroAxis = zeroAxis = options.get('zeroAxis', true);
+            if (min <= 0 && max >= 0 && zeroAxis) {
+                xaxisOffset = 0;
+            } else if (zeroAxis == false) {
+                xaxisOffset = min;
+            } else if (min > 0) {
+                xaxisOffset = min;
+            } else {
+                xaxisOffset = max;
+            }
+            this.xaxisOffset = xaxisOffset;
+
+            range = stacked ? (Math.max.apply(Math, stackRanges) + Math.max.apply(Math, stackRangesNeg)) : max - min;
+
+            // as we plot zero/min values a single pixel line, we add a pixel to all other
+            // values - Reduce the effective canvas size to suit
+            this.canvasHeightEf = (zeroAxis && min < 0) ? this.canvasHeight - 2 : this.canvasHeight - 1;
+
+            if (min < xaxisOffset) {
+                yMaxCalc = (stacked && max >= 0) ? stackMax : max;
+                yoffset = (yMaxCalc - xaxisOffset) / range * this.canvasHeight;
+                if (yoffset !== Math.ceil(yoffset)) {
+                    this.canvasHeightEf -= 2;
+                    yoffset = Math.ceil(yoffset);
+                }
+            } else {
+                yoffset = this.canvasHeight;
+            }
+            this.yoffset = yoffset;
+
+            if ($.isArray(options.get('colorMap'))) {
+                this.colorMapByIndex = options.get('colorMap');
+                this.colorMapByValue = null;
+            } else {
+                this.colorMapByIndex = null;
+                this.colorMapByValue = options.get('colorMap');
+                if (this.colorMapByValue && this.colorMapByValue.get === undefined) {
+                    this.colorMapByValue = new RangeMap(this.colorMapByValue);
+                }
+            }
+
+            this.range = range;
+        },
+
+        getRegion: function (el, x, y) {
+            var result = Math.floor(x / this.totalBarWidth);
+            return (result < 0 || result >= this.values.length) ? undefined : result;
+        },
+
+        getCurrentRegionFields: function () {
+            var currentRegion = this.currentRegion,
+                values = ensureArray(this.values[currentRegion]),
+                result = [],
+                value, i;
+            for (i = values.length; i--;) {
+                value = values[i];
+                result.push({
+                    isNull: value === null,
+                    value: value,
+                    color: this.calcColor(i, value, currentRegion),
+                    offset: currentRegion
+                });
+            }
+            return result;
+        },
+
+        calcColor: function (stacknum, value, valuenum) {
+            var colorMapByIndex = this.colorMapByIndex,
+                colorMapByValue = this.colorMapByValue,
+                options = this.options,
+                color, newColor;
+            if (this.stacked) {
+                color = options.get('stackedBarColor');
+            } else {
+                color = (value < 0) ? options.get('negBarColor') : options.get('barColor');
+            }
+            if (value === 0 && options.get('zeroColor') !== undefined) {
+                color = options.get('zeroColor');
+            }
+            if (colorMapByValue && (newColor = colorMapByValue.get(value))) {
+                color = newColor;
+            } else if (colorMapByIndex && colorMapByIndex.length > valuenum) {
+                color = colorMapByIndex[valuenum];
+            }
+            return $.isArray(color) ? color[stacknum % color.length] : color;
+        },
+
+        /**
+         * Render bar(s) for a region
+         */
+        renderRegion: function (valuenum, highlight) {
+            var vals = this.values[valuenum],
+                options = this.options,
+                xaxisOffset = this.xaxisOffset,
+                result = [],
+                range = this.range,
+                stacked = this.stacked,
+                target = this.target,
+                x = valuenum * this.totalBarWidth,
+                canvasHeightEf = this.canvasHeightEf,
+                yoffset = this.yoffset,
+                y, height, color, isNull, yoffsetNeg, i, valcount, val, minPlotted, allMin;
+
+            vals = $.isArray(vals) ? vals : [vals];
+            valcount = vals.length;
+            val = vals[0];
+            isNull = all(null, vals);
+            allMin = all(xaxisOffset, vals, true);
+
+            if (isNull) {
+                if (options.get('nullColor')) {
+                    color = highlight ? options.get('nullColor') : this.calcHighlightColor(options.get('nullColor'), options);
+                    y = (yoffset > 0) ? yoffset - 1 : yoffset;
+                    return target.drawRect(x, y, this.barWidth - 1, 0, color, color);
+                } else {
+                    return undefined;
+                }
+            }
+            yoffsetNeg = yoffset;
+            for (i = 0; i < valcount; i++) {
+                val = vals[i];
+
+                if (stacked && val === xaxisOffset) {
+                    if (!allMin || minPlotted) {
+                        continue;
+                    }
+                    minPlotted = true;
+                }
+
+                if (range > 0) {
+                    height = Math.floor(canvasHeightEf * ((Math.abs(val - xaxisOffset) / range))) + 1;
+                } else {
+                    height = 1;
+                }
+                if (val < xaxisOffset || (val === xaxisOffset && yoffset === 0)) {
+                    y = yoffsetNeg;
+                    yoffsetNeg += height;
+                } else {
+                    y = yoffset - height;
+                    yoffset -= height;
+                }
+                color = this.calcColor(i, val, valuenum);
+                if (highlight) {
+                    color = this.calcHighlightColor(color, options);
+                }
+                result.push(target.drawRect(x, y, this.barWidth - 1, height - 1, color, color));
+            }
+            if (result.length === 1) {
+                return result[0];
+            }
+            return result;
+        }
+    });
+
+    /**
+     * Tristate charts
+     */
+    $.fn.sparkline.tristate = tristate = createClass($.fn.sparkline._base, barHighlightMixin, {
+        type: 'tristate',
+
+        init: function (el, values, options, width, height) {
+            var barWidth = parseInt(options.get('barWidth'), 10),
+                barSpacing = parseInt(options.get('barSpacing'), 10);
+            tristate._super.init.call(this, el, values, options, width, height);
+
+            this.regionShapes = {};
+            this.barWidth = barWidth;
+            this.barSpacing = barSpacing;
+            this.totalBarWidth = barWidth + barSpacing;
+            this.values = $.map(values, Number);
+            this.width = width = (values.length * barWidth) + ((values.length - 1) * barSpacing);
+
+            if ($.isArray(options.get('colorMap'))) {
+                this.colorMapByIndex = options.get('colorMap');
+                this.colorMapByValue = null;
+            } else {
+                this.colorMapByIndex = null;
+                this.colorMapByValue = options.get('colorMap');
+                if (this.colorMapByValue && this.colorMapByValue.get === undefined) {
+                    this.colorMapByValue = new RangeMap(this.colorMapByValue);
+                }
+            }
+            this.initTarget();
+        },
+
+        getRegion: function (el, x, y) {
+            return Math.floor(x / this.totalBarWidth);
+        },
+
+        getCurrentRegionFields: function () {
+            var currentRegion = this.currentRegion;
+            return {
+                isNull: this.values[currentRegion] === undefined,
+                value: this.values[currentRegion],
+                color: this.calcColor(this.values[currentRegion], currentRegion),
+                offset: currentRegion
+            };
+        },
+
+        calcColor: function (value, valuenum) {
+            var values = this.values,
+                options = this.options,
+                colorMapByIndex = this.colorMapByIndex,
+                colorMapByValue = this.colorMapByValue,
+                color, newColor;
+
+            if (colorMapByValue && (newColor = colorMapByValue.get(value))) {
+                color = newColor;
+            } else if (colorMapByIndex && colorMapByIndex.length > valuenum) {
+                color = colorMapByIndex[valuenum];
+            } else if (values[valuenum] < 0) {
+                color = options.get('negBarColor');
+            } else if (values[valuenum] > 0) {
+                color = options.get('posBarColor');
+            } else {
+                color = options.get('zeroBarColor');
+            }
+            return color;
+        },
+
+        renderRegion: function (valuenum, highlight) {
+            var values = this.values,
+                options = this.options,
+                target = this.target,
+                canvasHeight, height, halfHeight,
+                x, y, color;
+
+            canvasHeight = target.pixelHeight;
+            halfHeight = Math.round(canvasHeight / 2);
+
+            x = valuenum * this.totalBarWidth;
+            if (values[valuenum] < 0) {
+                y = halfHeight;
+                height = halfHeight - 1;
+            } else if (values[valuenum] > 0) {
+                y = 0;
+                height = halfHeight - 1;
+            } else {
+                y = halfHeight - 1;
+                height = 2;
+            }
+            color = this.calcColor(values[valuenum], valuenum);
+            if (color === null) {
+                return;
+            }
+            if (highlight) {
+                color = this.calcHighlightColor(color, options);
+            }
+            return target.drawRect(x, y, this.barWidth - 1, height - 1, color, color);
+        }
+    });
+
+    /**
+     * Discrete charts
+     */
+    $.fn.sparkline.discrete = discrete = createClass($.fn.sparkline._base, barHighlightMixin, {
+        type: 'discrete',
+
+        init: function (el, values, options, width, height) {
+            discrete._super.init.call(this, el, values, options, width, height);
+
+            this.regionShapes = {};
+            this.values = values = $.map(values, Number);
+            this.min = Math.min.apply(Math, values);
+            this.max = Math.max.apply(Math, values);
+            this.range = this.max - this.min;
+            this.width = width = options.get('width') === 'auto' ? values.length * 2 : this.width;
+            this.interval = Math.floor(width / values.length);
+            this.itemWidth = width / values.length;
+            if (options.get('chartRangeMin') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMin') < this.min)) {
+                this.min = options.get('chartRangeMin');
+            }
+            if (options.get('chartRangeMax') !== undefined && (options.get('chartRangeClip') || options.get('chartRangeMax') > this.max)) {
+                this.max = options.get('chartRangeMax');
+            }
+            this.initTarget();
+            if (this.target) {
+                this.lineHeight = options.get('lineHeight') === 'auto' ? Math.round(this.canvasHeight * 0.3) : options.get('lineHeight');
+            }
+        },
+
+        getRegion: function (el, x, y) {
+            return Math.floor(x / this.itemWidth);
+        },
+
+        getCurrentRegionFields: function () {
+            var currentRegion = this.currentRegion;
+            return {
+                isNull: this.values[currentRegion] === undefined,
+                value: this.values[currentRegion],
+                offset: currentRegion
+            };
+        },
+
+        renderRegion: function (valuenum, highlight) {
+            var values = this.values,
+                options = this.options,
+                min = this.min,
+                max = this.max,
+                range = this.range,
+                interval = this.interval,
+                target = this.target,
+                canvasHeight = this.canvasHeight,
+                lineHeight = this.lineHeight,
+                pheight = canvasHeight - lineHeight,
+                ytop, val, color, x;
+
+            val = clipval(values[valuenum], min, max);
+            x = valuenum * interval;
+            ytop = Math.round(pheight - pheight * ((val - min) / range));
+            color = (options.get('thresholdColor') && val < options.get('thresholdValue')) ? options.get('thresholdColor') : options.get('lineColor');
+            if (highlight) {
+                color = this.calcHighlightColor(color, options);
+            }
+            return target.drawLine(x, ytop, x, ytop + lineHeight, color);
+        }
+    });
+
+    /**
+     * Bullet charts
+     */
+    $.fn.sparkline.bullet = bullet = createClass($.fn.sparkline._base, {
+        type: 'bullet',
+
+        init: function (el, values, options, width, height) {
+            var min, max, vals;
+            bullet._super.init.call(this, el, values, options, width, height);
+
+            // values: target, performance, range1, range2, range3
+            this.values = values = normalizeValues(values);
+            // target or performance could be null
+            vals = values.slice();
+            vals[0] = vals[0] === null ? vals[2] : vals[0];
+            vals[1] = values[1] === null ? vals[2] : vals[1];
+            min = Math.min.apply(Math, values);
+            max = Math.max.apply(Math, values);
+            if (options.get('base') === undefined) {
+                min = min < 0 ? min : 0;
+            } else {
+                min = options.get('base');
+            }
+            this.min = min;
+            this.max = max;
+            this.range = max - min;
+            this.shapes = {};
+            this.valueShapes = {};
+            this.regiondata = {};
+            this.width = width = options.get('width') === 'auto' ? '4.0em' : width;
+            this.target = this.$el.simpledraw(width, height, options.get('composite'));
+            if (!values.length) {
+                this.disabled = true;
+            }
+            this.initTarget();
+        },
+
+        getRegion: function (el, x, y) {
+            var shapeid = this.target.getShapeAt(el, x, y);
+            return (shapeid !== undefined && this.shapes[shapeid] !== undefined) ? this.shapes[shapeid] : undefined;
+        },
+
+        getCurrentRegionFields: function () {
+            var currentRegion = this.currentRegion;
+            return {
+                fieldkey: currentRegion.substr(0, 1),
+                value: this.values[currentRegion.substr(1)],
+                region: currentRegion
+            };
+        },
+
+        changeHighlight: function (highlight) {
+            var currentRegion = this.currentRegion,
+                shapeid = this.valueShapes[currentRegion],
+                shape;
+            delete this.shapes[shapeid];
+            switch (currentRegion.substr(0, 1)) {
+                case 'r':
+                    shape = this.renderRange(currentRegion.substr(1), highlight);
+                    break;
+                case 'p':
+                    shape = this.renderPerformance(highlight);
+                    break;
+                case 't':
+                    shape = this.renderTarget(highlight);
+                    break;
+            }
+            this.valueShapes[currentRegion] = shape.id;
+            this.shapes[shape.id] = currentRegion;
+            this.target.replaceWithShape(shapeid, shape);
+        },
+
+        renderRange: function (rn, highlight) {
+            var rangeval = this.values[rn],
+                rangewidth = Math.round(this.canvasWidth * ((rangeval - this.min) / this.range)),
+                color = this.options.get('rangeColors')[rn - 2];
+            if (highlight) {
+                color = this.calcHighlightColor(color, this.options);
+            }
+            return this.target.drawRect(0, 0, rangewidth - 1, this.canvasHeight - 1, color, color);
+        },
+
+        renderPerformance: function (highlight) {
+            var perfval = this.values[1],
+                perfwidth = Math.round(this.canvasWidth * ((perfval - this.min) / this.range)),
+                color = this.options.get('performanceColor');
+            if (highlight) {
+                color = this.calcHighlightColor(color, this.options);
+            }
+            return this.target.drawRect(0, Math.round(this.canvasHeight * 0.3), perfwidth - 1,
+                Math.round(this.canvasHeight * 0.4) - 1, color, color);
+        },
+
+        renderTarget: function (highlight) {
+            var targetval = this.values[0],
+                x = Math.round(this.canvasWidth * ((targetval - this.min) / this.range) - (this.options.get('targetWidth') / 2)),
+                targettop = Math.round(this.canvasHeight * 0.10),
+                targetheight = this.canvasHeight - (targettop * 2),
+                color = this.options.get('targetColor');
+            if (highlight) {
+                color = this.calcHighlightColor(color, this.options);
+            }
+            return this.target.drawRect(x, targettop, this.options.get('targetWidth') - 1, targetheight - 1, color, color);
+        },
+
+        render: function () {
+            var vlen = this.values.length,
+                target = this.target,
+                i, shape;
+            if (!bullet._super.render.call(this)) {
+                return;
+            }
+            for (i = 2; i < vlen; i++) {
+                shape = this.renderRange(i).append();
+                this.shapes[shape.id] = 'r' + i;
+                this.valueShapes['r' + i] = shape.id;
+            }
+            if (this.values[1] !== null) {
+                shape = this.renderPerformance().append();
+                this.shapes[shape.id] = 'p1';
+                this.valueShapes.p1 = shape.id;
+            }
+            if (this.values[0] !== null) {
+                shape = this.renderTarget().append();
+                this.shapes[shape.id] = 't0';
+                this.valueShapes.t0 = shape.id;
+            }
+            target.render();
+        }
+    });
+
+    /**
+     * Pie charts
+     */
+    $.fn.sparkline.pie = pie = createClass($.fn.sparkline._base, {
+        type: 'pie',
+
+        init: function (el, values, options, width, height) {
+            var total = 0, i;
+
+            pie._super.init.call(this, el, values, options, width, height);
+
+            this.shapes = {}; // map shape ids to value offsets
+            this.valueShapes = {}; // maps value offsets to shape ids
+            this.values = values = $.map(values, Number);
+
+            if (options.get('width') === 'auto') {
+                this.width = this.height;
+            }
+
+            if (values.length > 0) {
+                for (i = values.length; i--;) {
+                    total += values[i];
+                }
+            }
+            this.total = total;
+            this.initTarget();
+            this.radius = Math.floor(Math.min(this.canvasWidth, this.canvasHeight) / 2);
+        },
+
+        getRegion: function (el, x, y) {
+            var shapeid = this.target.getShapeAt(el, x, y);
+            return (shapeid !== undefined && this.shapes[shapeid] !== undefined) ? this.shapes[shapeid] : undefined;
+        },
+
+        getCurrentRegionFields: function () {
+            var currentRegion = this.currentRegion;
+            return {
+                isNull: this.values[currentRegion] === undefined,
+                value: this.values[currentRegion],
+                percent: this.values[currentRegion] / this.total * 100,
+                color: this.options.get('sliceColors')[currentRegion % this.options.get('sliceColors').length],
+                offset: currentRegion
+            };
+        },
+
+        changeHighlight: function (highlight) {
+            var currentRegion = this.currentRegion,
+                 newslice = this.renderSlice(currentRegion, highlight),
+                 shapeid = this.valueShapes[currentRegion];
+            delete this.shapes[shapeid];
+            this.target.replaceWithShape(shapeid, newslice);
+            this.valueShapes[currentRegion] = newslice.id;
+            this.shapes[newslice.id] = currentRegion;
+        },
+
+        renderSlice: function (valuenum, highlight) {
+            var target = this.target,
+                options = this.options,
+                radius = this.radius,
+                borderWidth = options.get('borderWidth'),
+                offset = options.get('offset'),
+                circle = 2 * Math.PI,
+                values = this.values,
+                total = this.total,
+                next = offset ? (2*Math.PI)*(offset/360) : 0,
+                start, end, i, vlen, color;
+
+            vlen = values.length;
+            for (i = 0; i < vlen; i++) {
+                start = next;
+                end = next;
+                if (total > 0) {  // avoid divide by zero
+                    end = next + (circle * (values[i] / total));
+                }
+                if (valuenum === i) {
+                    color = options.get('sliceColors')[i % options.get('sliceColors').length];
+                    if (highlight) {
+                        color = this.calcHighlightColor(color, options);
+                    }
+
+                    return target.drawPieSlice(radius, radius, radius - borderWidth, start, end, undefined, color);
+                }
+                next = end;
+            }
+        },
+
+        render: function () {
+            var target = this.target,
+                values = this.values,
+                options = this.options,
+                radius = this.radius,
+                borderWidth = options.get('borderWidth'),
+                shape, i;
+
+            if (!pie._super.render.call(this)) {
+                return;
+            }
+            if (borderWidth) {
+                target.drawCircle(radius, radius, Math.floor(radius - (borderWidth / 2)),
+                    options.get('borderColor'), undefined, borderWidth).append();
+            }
+            for (i = values.length; i--;) {
+                if (values[i]) { // don't render zero values
+                    shape = this.renderSlice(i).append();
+                    this.valueShapes[i] = shape.id; // store just the shapeid
+                    this.shapes[shape.id] = i;
+                }
+            }
+            target.render();
+        }
+    });
+
+    /**
+     * Box plots
+     */
+    $.fn.sparkline.box = box = createClass($.fn.sparkline._base, {
+        type: 'box',
+
+        init: function (el, values, options, width, height) {
+            box._super.init.call(this, el, values, options, width, height);
+            this.values = $.map(values, Number);
+            this.width = options.get('width') === 'auto' ? '4.0em' : width;
+            this.initTarget();
+            if (!this.values.length) {
+                this.disabled = 1;
+            }
+        },
+
+        /**
+         * Simulate a single region
+         */
+        getRegion: function () {
+            return 1;
+        },
+
+        getCurrentRegionFields: function () {
+            var result = [
+                { field: 'lq', value: this.quartiles[0] },
+                { field: 'med', value: this.quartiles[1] },
+                { field: 'uq', value: this.quartiles[2] }
+            ];
+            if (this.loutlier !== undefined) {
+                result.push({ field: 'lo', value: this.loutlier});
+            }
+            if (this.routlier !== undefined) {
+                result.push({ field: 'ro', value: this.routlier});
+            }
+            if (this.lwhisker !== undefined) {
+                result.push({ field: 'lw', value: this.lwhisker});
+            }
+            if (this.rwhisker !== undefined) {
+                result.push({ field: 'rw', value: this.rwhisker});
+            }
+            return result;
+        },
+
+        render: function () {
+            var target = this.target,
+                values = this.values,
+                vlen = values.length,
+                options = this.options,
+                canvasWidth = this.canvasWidth,
+                canvasHeight = this.canvasHeight,
+                minValue = options.get('chartRangeMin') === undefined ? Math.min.apply(Math, values) : options.get('chartRangeMin'),
+                maxValue = options.get('chartRangeMax') === undefined ? Math.max.apply(Math, values) : options.get('chartRangeMax'),
+                canvasLeft = 0,
+                lwhisker, loutlier, iqr, q1, q2, q3, rwhisker, routlier, i,
+                size, unitSize;
+
+            if (!box._super.render.call(this)) {
+                return;
+            }
+
+            if (options.get('raw')) {
+                if (options.get('showOutliers') && values.length > 5) {
+                    loutlier = values[0];
+                    lwhisker = values[1];
+                    q1 = values[2];
+                    q2 = values[3];
+                    q3 = values[4];
+                    rwhisker = values[5];
+                    routlier = values[6];
+                } else {
+                    lwhisker = values[0];
+                    q1 = values[1];
+                    q2 = values[2];
+                    q3 = values[3];
+                    rwhisker = values[4];
+                }
+            } else {
+                values.sort(function (a, b) { return a - b; });
+                q1 = quartile(values, 1);
+                q2 = quartile(values, 2);
+                q3 = quartile(values, 3);
+                iqr = q3 - q1;
+                if (options.get('showOutliers')) {
+                    lwhisker = rwhisker = undefined;
+                    for (i = 0; i < vlen; i++) {
+                        if (lwhisker === undefined && values[i] > q1 - (iqr * options.get('outlierIQR'))) {
+                            lwhisker = values[i];
+                        }
+                        if (values[i] < q3 + (iqr * options.get('outlierIQR'))) {
+                            rwhisker = values[i];
+                        }
+                    }
+                    loutlier = values[0];
+                    routlier = values[vlen - 1];
+                } else {
+                    lwhisker = values[0];
+                    rwhisker = values[vlen - 1];
+                }
+            }
+            this.quartiles = [q1, q2, q3];
+            this.lwhisker = lwhisker;
+            this.rwhisker = rwhisker;
+            this.loutlier = loutlier;
+            this.routlier = routlier;
+
+            unitSize = canvasWidth / (maxValue - minValue + 1);
+            if (options.get('showOutliers')) {
+                canvasLeft = Math.ceil(options.get('spotRadius'));
+                canvasWidth -= 2 * Math.ceil(options.get('spotRadius'));
+                unitSize = canvasWidth / (maxValue - minValue + 1);
+                if (loutlier < lwhisker) {
+                    target.drawCircle((loutlier - minValue) * unitSize + canvasLeft,
+                        canvasHeight / 2,
+                        options.get('spotRadius'),
+                        options.get('outlierLineColor'),
+                        options.get('outlierFillColor')).append();
+                }
+                if (routlier > rwhisker) {
+                    target.drawCircle((routlier - minValue) * unitSize + canvasLeft,
+                        canvasHeight / 2,
+                        options.get('spotRadius'),
+                        options.get('outlierLineColor'),
+                        options.get('outlierFillColor')).append();
+                }
+            }
+
+            // box
+            target.drawRect(
+                Math.round((q1 - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight * 0.1),
+                Math.round((q3 - q1) * unitSize),
+                Math.round(canvasHeight * 0.8),
+                options.get('boxLineColor'),
+                options.get('boxFillColor')).append();
+            // left whisker
+            target.drawLine(
+                Math.round((lwhisker - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight / 2),
+                Math.round((q1 - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight / 2),
+                options.get('lineColor')).append();
+            target.drawLine(
+                Math.round((lwhisker - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight / 4),
+                Math.round((lwhisker - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight - canvasHeight / 4),
+                options.get('whiskerColor')).append();
+            // right whisker
+            target.drawLine(Math.round((rwhisker - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight / 2),
+                Math.round((q3 - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight / 2),
+                options.get('lineColor')).append();
+            target.drawLine(
+                Math.round((rwhisker - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight / 4),
+                Math.round((rwhisker - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight - canvasHeight / 4),
+                options.get('whiskerColor')).append();
+            // median line
+            target.drawLine(
+                Math.round((q2 - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight * 0.1),
+                Math.round((q2 - minValue) * unitSize + canvasLeft),
+                Math.round(canvasHeight * 0.9),
+                options.get('medianColor')).append();
+            if (options.get('target')) {
+                size = Math.ceil(options.get('spotRadius'));
+                target.drawLine(
+                    Math.round((options.get('target') - minValue) * unitSize + canvasLeft),
+                    Math.round((canvasHeight / 2) - size),
+                    Math.round((options.get('target') - minValue) * unitSize + canvasLeft),
+                    Math.round((canvasHeight / 2) + size),
+                    options.get('targetColor')).append();
+                target.drawLine(
+                    Math.round((options.get('target') - minValue) * unitSize + canvasLeft - size),
+                    Math.round(canvasHeight / 2),
+                    Math.round((options.get('target') - minValue) * unitSize + canvasLeft + size),
+                    Math.round(canvasHeight / 2),
+                    options.get('targetColor')).append();
+            }
+            target.render();
+        }
+    });
+
+    // Setup a very simple "virtual canvas" to make drawing the few shapes we need easier
+    // This is accessible as $(foo).simpledraw()
+
+    VShape = createClass({
+        init: function (target, id, type, args) {
+            this.target = target;
+            this.id = id;
+            this.type = type;
+            this.args = args;
+        },
+        append: function () {
+            this.target.appendShape(this);
+            return this;
+        }
+    });
+
+    VCanvas_base = createClass({
+        _pxregex: /(\d+)(px)?\s*$/i,
+
+        init: function (width, height, target) {
+            if (!width) {
+                return;
+            }
+            this.width = width;
+            this.height = height;
+            this.target = target;
+            this.lastShapeId = null;
+            if (target[0]) {
+                target = target[0];
+            }
+            $.data(target, '_jqs_vcanvas', this);
+        },
+
+        drawLine: function (x1, y1, x2, y2, lineColor, lineWidth) {
+            return this.drawShape([[x1, y1], [x2, y2]], lineColor, lineWidth);
+        },
+
+        drawShape: function (path, lineColor, fillColor, lineWidth) {
+            return this._genShape('Shape', [path, lineColor, fillColor, lineWidth]);
+        },
+
+        drawCircle: function (x, y, radius, lineColor, fillColor, lineWidth) {
+            return this._genShape('Circle', [x, y, radius, lineColor, fillColor, lineWidth]);
+        },
+
+        drawPieSlice: function (x, y, radius, startAngle, endAngle, lineColor, fillColor) {
+            return this._genShape('PieSlice', [x, y, radius, startAngle, endAngle, lineColor, fillColor]);
+        },
+
+        drawRect: function (x, y, width, height, lineColor, fillColor) {
+            return this._genShape('Rect', [x, y, width, height, lineColor, fillColor]);
+        },
+
+        getElement: function () {
+            return this.canvas;
+        },
+
+        /**
+         * Return the most recently inserted shape id
+         */
+        getLastShapeId: function () {
+            return this.lastShapeId;
+        },
+
+        /**
+         * Clear and reset the canvas
+         */
+        reset: function () {
+            alert('reset not implemented');
+        },
+
+        _insert: function (el, target) {
+            $(target).html(el);
+        },
+
+        /**
+         * Calculate the pixel dimensions of the canvas
+         */
+        _calculatePixelDims: function (width, height, canvas) {
+            // XXX This should probably be a configurable option
+            var match;
+            match = this._pxregex.exec(height);
+            if (match) {
+                this.pixelHeight = match[1];
+            } else {
+                this.pixelHeight = $(canvas).height();
+            }
+            match = this._pxregex.exec(width);
+            if (match) {
+                this.pixelWidth = match[1];
+            } else {
+                this.pixelWidth = $(canvas).width();
+            }
+        },
+
+        /**
+         * Generate a shape object and id for later rendering
+         */
+        _genShape: function (shapetype, shapeargs) {
+            var id = shapeCount++;
+            shapeargs.unshift(id);
+            return new VShape(this, id, shapetype, shapeargs);
+        },
+
+        /**
+         * Add a shape to the end of the render queue
+         */
+        appendShape: function (shape) {
+            alert('appendShape not implemented');
+        },
+
+        /**
+         * Replace one shape with another
+         */
+        replaceWithShape: function (shapeid, shape) {
+            alert('replaceWithShape not implemented');
+        },
+
+        /**
+         * Insert one shape after another in the render queue
+         */
+        insertAfterShape: function (shapeid, shape) {
+            alert('insertAfterShape not implemented');
+        },
+
+        /**
+         * Remove a shape from the queue
+         */
+        removeShapeId: function (shapeid) {
+            alert('removeShapeId not implemented');
+        },
+
+        /**
+         * Find a shape at the specified x/y co-ordinates
+         */
+        getShapeAt: function (el, x, y) {
+            alert('getShapeAt not implemented');
+        },
+
+        /**
+         * Render all queued shapes onto the canvas
+         */
+        render: function () {
+            alert('render not implemented');
+        }
+    });
+
+    VCanvas_canvas = createClass(VCanvas_base, {
+        init: function (width, height, target, interact) {
+            VCanvas_canvas._super.init.call(this, width, height, target);
+            this.canvas = document.createElement('canvas');
+            if (target[0]) {
+                target = target[0];
+            }
+            $.data(target, '_jqs_vcanvas', this);
+            $(this.canvas).css({ display: 'inline-block', width: width, height: height, verticalAlign: 'top' });
+            this._insert(this.canvas, target);
+            this._calculatePixelDims(width, height, this.canvas);
+            this.canvas.width = this.pixelWidth;
+            this.canvas.height = this.pixelHeight;
+            this.interact = interact;
+            this.shapes = {};
+            this.shapeseq = [];
+            this.currentTargetShapeId = undefined;
+            $(this.canvas).css({width: this.pixelWidth, height: this.pixelHeight});
+        },
+
+        _getContext: function (lineColor, fillColor, lineWidth) {
+            var context = this.canvas.getContext('2d');
+            if (lineColor !== undefined) {
+                context.strokeStyle = lineColor;
+            }
+            context.lineWidth = lineWidth === undefined ? 1 : lineWidth;
+            if (fillColor !== undefined) {
+                context.fillStyle = fillColor;
+            }
+            return context;
+        },
+
+        reset: function () {
+            var context = this._getContext();
+            context.clearRect(0, 0, this.pixelWidth, this.pixelHeight);
+            this.shapes = {};
+            this.shapeseq = [];
+            this.currentTargetShapeId = undefined;
+        },
+
+        _drawShape: function (shapeid, path, lineColor, fillColor, lineWidth) {
+            var context = this._getContext(lineColor, fillColor, lineWidth),
+                i, plen;
+            context.beginPath();
+            context.moveTo(path[0][0] + 0.5, path[0][1] + 0.5);
+            for (i = 1, plen = path.length; i < plen; i++) {
+                context.lineTo(path[i][0] + 0.5, path[i][1] + 0.5); // the 0.5 offset gives us crisp pixel-width lines
+            }
+            if (lineColor !== undefined) {
+                context.stroke();
+            }
+            if (fillColor !== undefined) {
+                context.fill();
+            }
+            if (this.targetX !== undefined && this.targetY !== undefined &&
+                context.isPointInPath(this.targetX, this.targetY)) {
+                this.currentTargetShapeId = shapeid;
+            }
+        },
+
+        _drawCircle: function (shapeid, x, y, radius, lineColor, fillColor, lineWidth) {
+            var context = this._getContext(lineColor, fillColor, lineWidth);
+            context.beginPath();
+            context.arc(x, y, radius, 0, 2 * Math.PI, false);
+            if (this.targetX !== undefined && this.targetY !== undefined &&
+                context.isPointInPath(this.targetX, this.targetY)) {
+                this.currentTargetShapeId = shapeid;
+            }
+            if (lineColor !== undefined) {
+                context.stroke();
+            }
+            if (fillColor !== undefined) {
+                context.fill();
+            }
+        },
+
+        _drawPieSlice: function (shapeid, x, y, radius, startAngle, endAngle, lineColor, fillColor) {
+            var context = this._getContext(lineColor, fillColor);
+            context.beginPath();
+            context.moveTo(x, y);
+            context.arc(x, y, radius, startAngle, endAngle, false);
+            context.lineTo(x, y);
+            context.closePath();
+            if (lineColor !== undefined) {
+                context.stroke();
+            }
+            if (fillColor) {
+                context.fill();
+            }
+            if (this.targetX !== undefined && this.targetY !== undefined &&
+                context.isPointInPath(this.targetX, this.targetY)) {
+                this.currentTargetShapeId = shapeid;
+            }
+        },
+
+        _drawRect: function (shapeid, x, y, width, height, lineColor, fillColor) {
+            return this._drawShape(shapeid, [[x, y], [x + width, y], [x + width, y + height], [x, y + height], [x, y]], lineColor, fillColor);
+        },
+
+        appendShape: function (shape) {
+            this.shapes[shape.id] = shape;
+            this.shapeseq.push(shape.id);
+            this.lastShapeId = shape.id;
+            return shape.id;
+        },
+
+        replaceWithShape: function (shapeid, shape) {
+            var shapeseq = this.shapeseq,
+                i;
+            this.shapes[shape.id] = shape;
+            for (i = shapeseq.length; i--;) {
+                if (shapeseq[i] == shapeid) {
+                    shapeseq[i] = shape.id;
+                }
+            }
+            delete this.shapes[shapeid];
+        },
+
+        replaceWithShapes: function (shapeids, shapes) {
+            var shapeseq = this.shapeseq,
+                shapemap = {},
+                sid, i, first;
+
+            for (i = shapeids.length; i--;) {
+                shapemap[shapeids[i]] = true;
+            }
+            for (i = shapeseq.length; i--;) {
+                sid = shapeseq[i];
+                if (shapemap[sid]) {
+                    shapeseq.splice(i, 1);
+                    delete this.shapes[sid];
+                    first = i;
+                }
+            }
+            for (i = shapes.length; i--;) {
+                shapeseq.splice(first, 0, shapes[i].id);
+                this.shapes[shapes[i].id] = shapes[i];
+            }
+
+        },
+
+        insertAfterShape: function (shapeid, shape) {
+            var shapeseq = this.shapeseq,
+                i;
+            for (i = shapeseq.length; i--;) {
+                if (shapeseq[i] === shapeid) {
+                    shapeseq.splice(i + 1, 0, shape.id);
+                    this.shapes[shape.id] = shape;
+                    return;
+                }
+            }
+        },
+
+        removeShapeId: function (shapeid) {
+            var shapeseq = this.shapeseq,
+                i;
+            for (i = shapeseq.length; i--;) {
+                if (shapeseq[i] === shapeid) {
+                    shapeseq.splice(i, 1);
+                    break;
+                }
+            }
+            delete this.shapes[shapeid];
+        },
+
+        getShapeAt: function (el, x, y) {
+            this.targetX = x;
+            this.targetY = y;
+            this.render();
+            return this.currentTargetShapeId;
+        },
+
+        render: function () {
+            var shapeseq = this.shapeseq,
+                shapes = this.shapes,
+                shapeCount = shapeseq.length,
+                context = this._getContext(),
+                shapeid, shape, i;
+            context.clearRect(0, 0, this.pixelWidth, this.pixelHeight);
+            for (i = 0; i < shapeCount; i++) {
+                shapeid = shapeseq[i];
+                shape = shapes[shapeid];
+                this['_draw' + shape.type].apply(this, shape.args);
+            }
+            if (!this.interact) {
+                // not interactive so no need to keep the shapes array
+                this.shapes = {};
+                this.shapeseq = [];
+            }
+        }
+
+    });
+
+    VCanvas_vml = createClass(VCanvas_base, {
+        init: function (width, height, target) {
+            var groupel;
+            VCanvas_vml._super.init.call(this, width, height, target);
+            if (target[0]) {
+                target = target[0];
+            }
+            $.data(target, '_jqs_vcanvas', this);
+            this.canvas = document.createElement('span');
+            $(this.canvas).css({ display: 'inline-block', position: 'relative', overflow: 'hidden', width: width, height: height, margin: '0px', padding: '0px', verticalAlign: 'top'});
+            this._insert(this.canvas, target);
+            this._calculatePixelDims(width, height, this.canvas);
+            this.canvas.width = this.pixelWidth;
+            this.canvas.height = this.pixelHeight;
+            groupel = '<v:group coordorigin="0 0" coordsize="' + this.pixelWidth + ' ' + this.pixelHeight + '"' +
+                    ' style="position:absolute;top:0;left:0;width:' + this.pixelWidth + 'px;height=' + this.pixelHeight + 'px;"></v:group>';
+            this.canvas.insertAdjacentHTML('beforeEnd', groupel);
+            this.group = $(this.canvas).children()[0];
+            this.rendered = false;
+            this.prerender = '';
+        },
+
+        _drawShape: function (shapeid, path, lineColor, fillColor, lineWidth) {
+            var vpath = [],
+                initial, stroke, fill, closed, vel, plen, i;
+            for (i = 0, plen = path.length; i < plen; i++) {
+                vpath[i] = '' + (path[i][0]) + ',' + (path[i][1]);
+            }
+            initial = vpath.splice(0, 1);
+            lineWidth = lineWidth === undefined ? 1 : lineWidth;
+            stroke = lineColor === undefined ? ' stroked="false" ' : ' strokeWeight="' + lineWidth + 'px" strokeColor="' + lineColor + '" ';
+            fill = fillColor === undefined ? ' filled="false"' : ' fillColor="' + fillColor + '" filled="true" ';
+            closed = vpath[0] === vpath[vpath.length - 1] ? 'x ' : '';
+            vel = '<v:shape coordorigin="0 0" coordsize="' + this.pixelWidth + ' ' + this.pixelHeight + '" ' +
+                 ' id="jqsshape' + shapeid + '" ' +
+                 stroke +
+                 fill +
+                ' style="position:absolute;left:0px;top:0px;height:' + this.pixelHeight + 'px;width:' + this.pixelWidth + 'px;padding:0px;margin:0px;" ' +
+                ' path="m ' + initial + ' l ' + vpath.join(', ') + ' ' + closed + 'e">' +
+                ' </v:shape>';
+            return vel;
+        },
+
+        _drawCircle: function (shapeid, x, y, radius, lineColor, fillColor, lineWidth) {
+            var stroke, fill, vel;
+            x -= radius;
+            y -= radius;
+            stroke = lineColor === undefined ? ' stroked="false" ' : ' strokeWeight="' + lineWidth + 'px" strokeColor="' + lineColor + '" ';
+            fill = fillColor === undefined ? ' filled="false"' : ' fillColor="' + fillColor + '" filled="true" ';
+            vel = '<v:oval ' +
+                 ' id="jqsshape' + shapeid + '" ' +
+                stroke +
+                fill +
+                ' style="position:absolute;top:' + y + 'px; left:' + x + 'px; width:' + (radius * 2) + 'px; height:' + (radius * 2) + 'px"></v:oval>';
+            return vel;
+
+        },
+
+        _drawPieSlice: function (shapeid, x, y, radius, startAngle, endAngle, lineColor, fillColor) {
+            var vpath, startx, starty, endx, endy, stroke, fill, vel;
+            if (startAngle === endAngle) {
+                return '';  // VML seems to have problem when start angle equals end angle.
+            }
+            if ((endAngle - startAngle) === (2 * Math.PI)) {
+                startAngle = 0.0;  // VML seems to have a problem when drawing a full circle that doesn't start 0
+                endAngle = (2 * Math.PI);
+            }
+
+            startx = x + Math.round(Math.cos(startAngle) * radius);
+            starty = y + Math.round(Math.sin(startAngle) * radius);
+            endx = x + Math.round(Math.cos(endAngle) * radius);
+            endy = y + Math.round(Math.sin(endAngle) * radius);
+
+            if (startx === endx && starty === endy) {
+                if ((endAngle - startAngle) < Math.PI) {
+                    // Prevent very small slices from being mistaken as a whole pie
+                    return '';
+                }
+                // essentially going to be the entire circle, so ignore startAngle
+                startx = endx = x + radius;
+                starty = endy = y;
+            }
+
+            if (startx === endx && starty === endy && (endAngle - startAngle) < Math.PI) {
+                return '';
+            }
+
+            vpath = [x - radius, y - radius, x + radius, y + radius, startx, starty, endx, endy];
+            stroke = lineColor === undefined ? ' stroked="false" ' : ' strokeWeight="1px" strokeColor="' + lineColor + '" ';
+            fill = fillColor === undefined ? ' filled="false"' : ' fillColor="' + fillColor + '" filled="true" ';
+            vel = '<v:shape coordorigin="0 0" coordsize="' + this.pixelWidth + ' ' + this.pixelHeight + '" ' +
+                 ' id="jqsshape' + shapeid + '" ' +
+                 stroke +
+                 fill +
+                ' style="position:absolute;left:0px;top:0px;height:' + this.pixelHeight + 'px;width:' + this.pixelWidth + 'px;padding:0px;margin:0px;" ' +
+                ' path="m ' + x + ',' + y + ' wa ' + vpath.join(', ') + ' x e">' +
+                ' </v:shape>';
+            return vel;
+        },
+
+        _drawRect: function (shapeid, x, y, width, height, lineColor, fillColor) {
+            return this._drawShape(shapeid, [[x, y], [x, y + height], [x + width, y + height], [x + width, y], [x, y]], lineColor, fillColor);
+        },
+
+        reset: function () {
+            this.group.innerHTML = '';
+        },
+
+        appendShape: function (shape) {
+            var vel = this['_draw' + shape.type].apply(this, shape.args);
+            if (this.rendered) {
+                this.group.insertAdjacentHTML('beforeEnd', vel);
+            } else {
+                this.prerender += vel;
+            }
+            this.lastShapeId = shape.id;
+            return shape.id;
+        },
+
+        replaceWithShape: function (shapeid, shape) {
+            var existing = $('#jqsshape' + shapeid),
+                vel = this['_draw' + shape.type].apply(this, shape.args);
+            existing[0].outerHTML = vel;
+        },
+
+        replaceWithShapes: function (shapeids, shapes) {
+            // replace the first shapeid with all the new shapes then toast the remaining old shapes
+            var existing = $('#jqsshape' + shapeids[0]),
+                replace = '',
+                slen = shapes.length,
+                i;
+            for (i = 0; i < slen; i++) {
+                replace += this['_draw' + shapes[i].type].apply(this, shapes[i].args);
+            }
+            existing[0].outerHTML = replace;
+            for (i = 1; i < shapeids.length; i++) {
+                $('#jqsshape' + shapeids[i]).remove();
+            }
+        },
+
+        insertAfterShape: function (shapeid, shape) {
+            var existing = $('#jqsshape' + shapeid),
+                 vel = this['_draw' + shape.type].apply(this, shape.args);
+            existing[0].insertAdjacentHTML('afterEnd', vel);
+        },
+
+        removeShapeId: function (shapeid) {
+            var existing = $('#jqsshape' + shapeid);
+            this.group.removeChild(existing[0]);
+        },
+
+        getShapeAt: function (el, x, y) {
+            var shapeid = el.id.substr(8);
+            return shapeid;
+        },
+
+        render: function () {
+            if (!this.rendered) {
+                // batch the intial render into a single repaint
+                this.group.innerHTML = this.prerender;
+                this.rendered = true;
+            }
+        }
+    });
+
+}))}(document, Math));
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jqtouch.js b/client/galaxy/scripts/libs/jquery/jqtouch.js
new file mode 100644
index 0000000..0f5426d
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jqtouch.js
@@ -0,0 +1,883 @@
+(function() {
+
+    $.jQTouch = function(options) {
+        // Initialize internal jQT variables
+        var $body,
+            $head=$('head'),
+            history=[],
+            newPageCount=0,
+            jQTSettings={},
+            $currentPage='',
+            orientation='portrait',
+            touchSelectors=[],
+            publicObj={},
+            tapBuffer=100, // High click delay = ~350, quickest animation (slide) = 250
+            extensions=$.jQTouch.prototype.extensions,
+            extTapHandlers=$.jQTouch.prototype.tapHandlers,
+            tapHandlers=[],
+            animations=[],
+            hairExtensions='',
+            defaults = {
+                addGlossToIcon: true,
+                backSelector: '.back, .cancel, .goback',
+                cacheGetRequests: true,
+                debug: true,
+                defaultAnimation: 'slideleft',
+                fixedViewport: true,
+                formSelector: 'form',
+                fullScreen: true,
+                fullScreenClass: 'fullscreen',
+                icon: null,
+                icon4: null, // available in iOS 4.2 and later
+                preloadImages: false,
+                starter: $(document).ready,
+                startupScreen: null,
+                statusBar: 'default', // other options: black-translucent, black
+                submitSelector: '.submit',
+                touchSelector: 'a, .touch',
+                trackScrollPositions: true,
+                updateHash: true,
+                useAnimations: true,
+                useFastTouch: true,
+                useTouchScroll: true,
+                animations: [ // highest to lowest priority
+                    {name:'cubeleft', selector:'.cubeleft, .cube', is3d: true},
+                    {name:'cuberight', selector:'.cuberight', is3d: true},
+                    {name:'dissolve', selector:'.dissolve'},
+                    {name:'fade', selector:'.fade'},
+                    {name:'flipleft', selector:'.flipleft, .flip', is3d: true},
+                    {name:'flipright', selector:'.flipright', is3d: true},
+                    {name:'pop', selector:'.pop', is3d: true},
+                    {name:'swapleft', selector:'.swapleft, .swap', is3d: true},
+                    {name:'swapright', selector:'.swapright', is3d: true},
+                    {name:'slidedown', selector:'.slidedown'},
+                    {name:'slideright', selector:'.slideright'},
+                    {name:'slideup', selector:'.slideup'},
+                    {name:'slideleft', selector:'.slideleft, .slide, #jqt > * > ul li a'}
+                ]
+            }; // end defaults
+
+        function warn(message) {
+            if (window.console !== undefined && jQTSettings.debug === true) {
+                console.warn(message);
+            }
+        }
+
+        function addAnimation(animation) {
+            if (typeof(animation.selector) === 'string' && typeof(animation.name) === 'string') {
+                animations.push(animation);
+            }
+        }
+
+        function addTapHandler(tapHandler) {
+            if (typeof(tapHandler.name) === 'string' && typeof(tapHandler.isSupported) === 'function' && typeof(tapHandler.fn) === 'function') {
+                tapHandlers.push(tapHandler);
+            }
+        }
+
+        function addPageToHistory(page, animation) {
+            history.unshift({
+                page: page,
+                animation: animation,
+                hash: '#' + page.attr('id'),
+                id: page.attr('id')
+            });
+        }
+
+        // Unfortunately, we can not assume the 'tap' event
+        // is being used for links, forms, etc.
+        function clickHandler(e) {
+            // Figure out whether to prevent default
+            var $el = $(e.target);
+
+            // Find the nearest tappable ancestor
+            if (!$el.is(touchSelectors.join(', '))) {
+                $el = $(e.target).closest(touchSelectors.join(', '));
+            }
+
+            // Prevent default if we found an internal link
+            // (relative or absolute)
+            if ($el && $el.attr('href') && !$el.isExternalLink()) {
+                warn('Need to prevent default click behavior.');
+                e.preventDefault();
+            } else {
+                warn('No need to prevent default click behavior.');
+            }
+
+            // Trigger a tap event if touchstart is not on the job
+            if ($.support.touch) {
+                warn('Not converting click to a tap event because touch handler is on the job.');
+            } else {
+                warn('Converting click event to a tap event because touch handlers are not present or off.');
+                $(e.target).trigger('tap', e);
+            }
+        }
+
+        function doNavigation(fromPage, toPage, animation, goingBack) {
+
+            goingBack = goingBack ? goingBack : false;
+
+            // Error check for target page
+            if (toPage === undefined || toPage.length === 0) {
+                $.fn.unselect();
+                warn('Target element is missing.');
+                return false;
+            }
+
+            // Error check for fromPage === toPage
+            if (toPage.hasClass('current')) {
+                $.fn.unselect();
+                warn('You are already on the page you are trying to navigate to.');
+                return false;
+            }
+
+            // Collapse the keyboard
+            $(':focus').trigger('blur');
+
+            fromPage.trigger('pageAnimationStart', { direction: 'out', back: goingBack });
+            toPage.trigger('pageAnimationStart', { direction: 'in', back: goingBack });
+
+            if ($.support.animationEvents && animation && jQTSettings.useAnimations) {
+                // Fail over to 2d animation if need be
+                if (!$.support.transform3d && animation.is3d) {
+                    warn('Did not detect support for 3d animations, falling back to ' + jQTSettings.defaultAnimation + '.');
+                    animation.name = jQTSettings.defaultAnimation;
+                }
+
+                // Reverse animation if need be
+                var finalAnimationName = animation.name,
+                    is3d = animation.is3d ? 'animating3d' : '';
+
+                if (goingBack) {
+                    finalAnimationName = finalAnimationName.replace(/left|right|up|down|in|out/, reverseAnimation);
+                }
+
+                warn('finalAnimationName is ' + finalAnimationName + '.');
+
+                // Bind internal 'cleanup' callback
+                fromPage.bind('webkitAnimationEnd', navigationEndHandler);
+
+                // Trigger animations
+                $body.addClass('animating ' + is3d);
+
+                var lastScroll = window.pageYOffset;
+
+                // Position the incoming page so toolbar is at top of
+                // viewport regardless of scroll position on from page
+                if (jQTSettings.trackScrollPositions === true) {
+                    toPage.css('top', window.pageYOffset - (toPage.data('lastScroll') || 0));
+                }
+
+                toPage.addClass(finalAnimationName + ' in current');
+                fromPage.removeClass('current').addClass(finalAnimationName + ' out inmotion');
+
+                if (jQTSettings.trackScrollPositions === true) {
+                    fromPage.data('lastScroll', lastScroll);
+                    $('.scroll', fromPage).each(function() {
+                        $(this).data('lastScroll', this.scrollTop);
+                    });
+                }
+            } else {
+                toPage.addClass('current in');
+                fromPage.removeClass('current');
+                navigationEndHandler();
+            }
+
+            // Housekeeping
+            $currentPage = toPage;
+            if (goingBack) {
+                history.shift();
+            } else {
+                addPageToHistory($currentPage, animation);
+            }
+            setHash($currentPage.attr('id'));
+
+            // Private navigationEnd callback
+            function navigationEndHandler(event) {
+                var bufferTime = tapBuffer;
+
+                if ($.support.animationEvents && animation && jQTSettings.useAnimations) {
+                    fromPage.unbind('webkitAnimationEnd', navigationEndHandler);
+                    fromPage.removeClass(finalAnimationName + ' out inmotion');
+                    if (finalAnimationName) {
+                        toPage.removeClass(finalAnimationName);
+                    }
+                    $body.removeClass('animating animating3d');
+                    if (jQTSettings.trackScrollPositions === true) {
+                        toPage.css('top', -toPage.data('lastScroll'));
+
+                        // Have to make sure the scroll/style resets
+                        // are outside the flow of this function.
+                        setTimeout(function() {
+                            toPage.css('top', 0);
+                            window.scroll(0, toPage.data('lastScroll'));
+                            $('.scroll', toPage).each(function() {
+                                this.scrollTop = - $(this).data('lastScroll');
+                            });
+                        }, 0);
+                    }
+                } else {
+                    fromPage.removeClass(finalAnimationName + ' out inmotion');
+                    if (finalAnimationName) {
+                        toPage.removeClass(finalAnimationName);
+                    }
+                    bufferTime += 260;
+                }
+
+                // 'in' class is intentionally delayed,
+                // as it is our ghost click hack
+                setTimeout(function() {
+                    toPage.removeClass('in');
+                    window.scroll(0,0);
+                }, bufferTime);
+
+                fromPage.unselect();
+
+                // Trigger custom events
+                toPage.trigger('pageAnimationEnd', {
+                    direction:'in',
+                    animation: animation,
+                    back: goingBack
+                });
+                fromPage.trigger('pageAnimationEnd', {
+                    direction:'out',
+                    animation: animation,
+                    back: goingBack
+                });
+            }
+
+            return true;
+        }
+
+        function reverseAnimation(animation) {
+            var opposites={
+                'up' : 'down',
+                'down' : 'up',
+                'left' : 'right',
+                'right' : 'left',
+                'in' : 'out',
+                'out' : 'in'
+            };
+
+            return opposites[animation] || animation;
+        }
+
+        function getOrientation() {
+            return orientation;
+        }
+
+        function goBack() {
+            // Error checking
+            if (history.length < 1) {
+                warn('History is empty.');
+            }
+
+            if (history.length === 1) {
+                warn('You are on the first panel.');
+                window.history.go(-1);
+            }
+
+            var from = history[0],
+                to = history[1];
+
+            if (doNavigation(from.page, to.page, from.animation, true)) {
+                return publicObj;
+            } else {
+                warn('Could not go back.');
+                return false;
+            }
+        }
+
+        function goTo(toPage, animation) {
+            var fromPage = history[0].page;
+
+            if (typeof animation === 'string') {
+                for (var i=0, max=animations.length; i < max; i++) {
+                    if (animations[i].name === animation) {
+                        animation = animations[i];
+                        break;
+                    }
+                }
+            }
+
+            if (typeof toPage === 'string') {
+                var nextPage = $(toPage);
+
+                if (nextPage.length < 1) {
+                    showPageByHref(toPage, {
+                        animation: animation
+                    });
+                    return;
+                } else {
+                    toPage = nextPage;
+                }
+            }
+            if (doNavigation(fromPage, toPage, animation)) {
+                return publicObj;
+            } else {
+                warn('Could not animate pages.');
+                return false;
+            }
+        }
+
+        function hashChangeHandler(e) {
+            if (location.hash === history[0].hash) {
+                warn('We are on the right panel.');
+                return true;
+            } else if (location.hash === '') {
+                goBack();
+                return true;
+            } else if (history[1] && location.hash === history[1].hash) {
+                goBack();
+                return true;
+            } else {
+                // Lastly, just try going to the ID...
+                warn('Could not find ID in history, just forwarding to DOM element.');
+                goTo($(location.hash), jQTSettings.defaultAnimation);
+            }
+        }
+
+        function initHairExtensions(options) {
+            // Preload images
+            if (jQTSettings.preloadImages) {
+                for (var i = jQTSettings.preloadImages.length - 1; i >= 0; i--) {
+                    (new Image()).src = jQTSettings.preloadImages[i];
+                }
+            }
+
+            // Set appropriate icon
+            // (retina display available in iOS 4.2 and later.)
+            var precomposed = (jQTSettings.addGlossToIcon) ? '' : '-precomposed';
+            if (jQTSettings.icon) {
+                hairExtensions += '<link rel="apple-touch-icon' + precomposed + '" href="' + jQTSettings.icon + '" />';
+            }
+            if (jQTSettings.icon4) {
+                hairExtensions += '<link rel="apple-touch-icon' + precomposed + '" sizes="114x114" href="' + jQTSettings.icon4 + '" />';
+            }
+            // Set startup screen
+            if (jQTSettings.startupScreen) {
+                hairExtensions += '<link rel="apple-touch-startup-image" href="' + jQTSettings.startupScreen + '" />';
+            }
+
+            // Set viewport
+            if (jQTSettings.fixedViewport) {
+                hairExtensions += '<meta name="viewport" content="initial-scale=1.0, maximum-scale=1.0, user-scalable=0"/>';
+            }
+
+            // Set full-screen
+            if (jQTSettings.fullScreen) {
+                hairExtensions += '<meta name="apple-mobile-web-app-capable" content="yes" />';
+                if (jQTSettings.statusBar) {
+                    hairExtensions += '<meta name="apple-mobile-web-app-status-bar-style" content="' + jQTSettings.statusBar + '" />';
+                }
+            }
+
+            // Attach hair extensions
+            if (hairExtensions) {
+                $head.prepend(hairExtensions);
+            }
+        }
+
+        function initFXExtensions() {
+            // Define public jQuery functions
+            $.fn.isExternalLink = function() {
+                var $el = $(this);
+                return ($el.attr('target') === '_blank' || $el.attr('rel') === 'external' || $el.is('a[href^="http://maps.google.com"], a[href^="mailto:"], a[href^="tel:"], a[href^="javascript:"], a[href*="youtube.com/v"], a[href*="youtube.com/watch"]'));
+            };
+            $.fn.makeActive = function() {
+                return $(this).addClass('active');
+            };
+            $.fn.unselect = function(obj) {
+                if (obj) {
+                    obj.removeClass('active');
+                } else {
+                    $('.active').removeClass('active');
+                }
+            };
+        }
+
+        function getAnimation(el) {
+            var animation;
+
+            for (var i=0, max=animations.length; i < max; i++) {
+                if (el.is(animations[i].selector)) {
+                    animation = animations[i];
+                    break;
+                }
+            }
+
+            if (!animation) {
+                warn('Animation could not be found. Using ' + jQTSettings.defaultAnimation + '.');
+                animation = jQTSettings.defaultAnimation;
+            }
+            return animation;
+        }
+
+        function insertPages(nodes, animation) {
+
+            var targetPage = null;
+
+            // Call dom.createElement element directly
+            // instead of relying on $(nodes), to work around:
+            // https://github.com/madrobby/zepto/issues/312
+            var div = document.createElement('div');
+            div.innerHTML = nodes;
+
+            $(div).children().each(function(index, node) {
+                var $node = $(this);
+                if (!$node.attr('id')) {
+                    $node.attr('id', 'page-' + (++newPageCount));
+                }
+
+                // Remove any existing instance
+                $('#' + $node.attr('id')).remove();
+
+                $body.append($node);
+                $body.trigger('pageInserted', {page: $node});
+
+                if ($node.hasClass('current') || !targetPage) {
+                    targetPage = $node;
+                }
+            });
+            if (targetPage !== null) {
+                goTo(targetPage, animation);
+                return targetPage;
+            } else {
+                return false;
+            }
+        }
+
+        function orientationChangeHandler() {
+            scrollTo(0,0);
+            orientation = Math.abs(window.orientation) === 90 ? 'landscape' : 'portrait';
+            $body.removeClass('portrait landscape').addClass(orientation).trigger('turn', {orientation: orientation});
+        }
+
+        function setHash(hash) {
+            // Sanitize
+            if (jQTSettings.updateHash) {
+                location.hash = '#' + hash.replace(/^#/, '');
+            }
+        }
+
+        // Document ready stuff
+        function start() {
+            // Store some properties in a support object
+            if (!$.support) $.support = {};
+            $.support.animationEvents = (typeof window.WebKitAnimationEvent !== 'undefined');
+            $.support.touch = (typeof window.TouchEvent !== 'undefined') && (window.navigator.userAgent.indexOf('Mobile') > -1) && jQTSettings.useFastTouch;
+            $.support.transform3d = supportForTransform3d();
+            $.support.ios5 = supportIOS5();
+
+            if (!$.support.touch) {
+                warn('This device does not support touch interaction, or it has been deactivated by the developer. Some features might be unavailable.');
+            }
+            if (!$.support.transform3d) {
+                warn('This device does not support 3d animation. 2d animations will be used instead.');
+            }
+
+            // Add extensions
+            for (var i=0, max=extensions.length; i < max; i++) {
+                var fn = extensions[i];
+                if ($.isFunction(fn)) {
+                    $.extend(publicObj, fn(publicObj));
+                }
+            }
+
+            // Add extensions tapHandlers
+            for (var j=0, maxTapHandlers=extTapHandlers.length; j < maxTapHandlers; j++) {
+                addTapHandler(extTapHandlers[j]);
+            }
+            // Add default tapHandlers
+            addDefaultTapHandlers();
+
+            // Add animations
+            for (var k=0, maxAnimations=defaults.animations.length; k < maxAnimations; k++) {
+                var animation = defaults.animations[k];
+                if (jQTSettings[animation.name + 'Selector'] !== undefined) {
+                    animation.selector = jQTSettings[animation.name + 'Selector'];
+                }
+                addAnimation(animation);
+            }
+
+            // Create an array of stuff that needs touch event handling
+            touchSelectors.push(jQTSettings.touchSelector);
+            touchSelectors.push(jQTSettings.backSelector);
+            touchSelectors.push(jQTSettings.submitSelector);
+            $(touchSelectors.join(', ')).css('-webkit-touch-callout', 'none');
+
+            // Make sure we have a jqt element
+            $body = $('#jqt');
+            var anatomyLessons = [];
+
+            if ($body.length === 0) {
+                warn('Could not find an element with the id "jqt", so the body id has been set to "jqt". If you are having any problems, wrapping your panels in a div with the id "jqt" might help.');
+                $body = $(document.body).attr('id', 'jqt');
+            }
+
+            // Add some specific css if need be
+            if ($.support.transform3d) {
+                anatomyLessons.push('supports3d');
+            }
+
+            if (jQTSettings.useTouchScroll) {
+                if ($.support.ios5) {
+                    anatomyLessons.push('touchscroll');
+                } else {
+                    anatomyLessons.push('autoscroll');
+                }
+            }
+
+            if (jQTSettings.fullScreenClass && window.navigator.standalone === true) {
+                anatomyLessons.push(jQTSettings.fullScreenClass, jQTSettings.statusBar);
+            }
+
+            // Bind events
+            $body
+                .addClass(anatomyLessons.join(' '))
+                .bind('click', clickHandler)
+                .bind('orientationchange', orientationChangeHandler)
+                .bind('submit', submitHandler)
+                .bind('tap', tapHandler)
+                .bind($.support.touch ? 'touchstart' : 'mousedown', touchStartHandler)
+                .trigger('orientationchange');
+
+            $(window).bind('hashchange', hashChangeHandler);
+
+            var startHash = location.hash;
+
+            // Determine what the initial view should be
+            if ($('#jqt > .current').length === 0) {
+                $currentPage = $('#jqt > *:first-child').addClass('current');
+            } else {
+                $currentPage = $('#jqt > .current');
+            }
+
+            setHash($currentPage.attr('id'));
+            addPageToHistory($currentPage);
+
+            if ($(startHash).length === 1) {
+                goTo(startHash);
+            }
+        }
+
+        function showPageByHref(href, options) {
+
+            var defaults = {
+                data: null,
+                method: 'GET',
+                animation: null,
+                callback: null,
+                $referrer: null
+            };
+
+            var settings = $.extend({}, defaults, options);
+
+            if (href !== '#') {
+                $.ajax({
+                    url: href,
+                    data: settings.data,
+                    type: settings.method,
+                    success: function (data) {
+                        var firstPage = insertPages(data, settings.animation);
+                        if (firstPage) {
+                            if (settings.method === 'GET' && jQTSettings.cacheGetRequests === true && settings.$referrer) {
+                                settings.$referrer.attr('href', '#' + firstPage.attr('id'));
+                            }
+                            if (settings.callback) {
+                                settings.callback(true);
+                            }
+                        }
+                    },
+                    error: function (data) {
+                        if (settings.$referrer) {
+                            settings.$referrer.unselect();
+                        }
+                        if (settings.callback) {
+                            settings.callback(false);
+                        }
+                    }
+                });
+            } else if (settings.$referrer) {
+                settings.$referrer.unselect();
+            }
+        }
+
+        function submitHandler(e, callback) {
+
+            $(':focus').trigger('blur');
+
+            e.preventDefault();
+
+            var $form = (typeof(e)==='string') ? $(e).eq(0) : (e.target ? $(e.target) : $(e));
+
+            if ($form.length && $form.is(jQTSettings.formSelector) && $form.attr('action')) {
+                showPageByHref($form.attr('action'), {
+                    data: $form.serialize(),
+                    method: $form.attr('method') || 'POST',
+                    animation: getAnimation($form),
+                    callback: callback
+                });
+                return false;
+            }
+            return true;
+        }
+
+        function submitParentForm($el) {
+
+            var $form = $el.closest('form');
+            if ($form.length === 0) {
+                warn('No parent form found.');
+            } else {
+                warn('About to submit parent form.');
+                $form.trigger('submit');
+                return false;
+            }
+            return true;
+        }
+
+        function supportForTransform3d() {
+
+            var head, body, style, div, result;
+
+            head = document.getElementsByTagName('head')[0];
+            body = document.body;
+
+            style = document.createElement('style');
+            style.textContent = '@media (transform-3d),(-o-transform-3d),(-moz-transform-3d),(-webkit-transform-3d){#jqt-3dtest{height:3px}}';
+
+            div = document.createElement('div');
+            div.id = 'jqt-3dtest';
+
+            // Add to the page
+            head.appendChild(style);
+            body.appendChild(div);
+
+            // Check the result
+            result = div.offsetHeight === 3;
+
+            // Clean up
+            style.parentNode.removeChild(style);
+            div.parentNode.removeChild(div);
+
+            // Pass back result
+            warn('Support for 3d transforms: ' + result + '.');
+            return result;
+        }
+
+        function supportIOS5() {
+            var support = false;
+            var REGEX_IOS_VERSION = /OS (\d+)(_\d+)* like Mac OS X/i;
+
+            var agentString = window.navigator.userAgent;
+            if (REGEX_IOS_VERSION.test(agentString)) {
+                support = (REGEX_IOS_VERSION.exec(agentString)[1] >= 5);
+            }
+            return support;
+        }
+
+        function touchStartHandler(e) {
+
+            var $el = $(e.target),
+                selectors = touchSelectors.join(', ');
+
+            // Find the nearest tappable ancestor
+            if (!$el.is(selectors)) {
+                $el = $el.closest(selectors);
+            }
+
+            // Make sure we have a tappable element
+            if ($el.length && $el.attr('href')) {
+                $el.addClass('active');
+            }
+
+            // Remove our active class if we move
+            $el.on($.support.touch ? 'touchmove' : 'mousemove', function() {
+                $el.removeClass('active');
+            });
+
+            $el.on('touchend', function() {
+                $el.unbind('touchmove mousemove');
+            });
+        }
+
+        function tapHandler(e) {
+
+            if (e.isDefaultPrevented()) {
+                return true;
+            }
+
+            // Grab the target element
+            var $el = $(e.target);
+
+            // Find the nearest tappable ancestor
+            if (!$el.is(touchSelectors.join(', '))) {
+                $el = $el.closest(touchSelectors.join(', '));
+            }
+
+            // Make sure we have a tappable element
+            if (!$el.length || !$el.attr('href')) {
+                warn('Could not find a link related to tapped element.');
+                return true;
+            }
+
+            // Init some vars
+            var target = $el.attr('target'),
+                hash = $el.prop('hash'),
+                href = $el.attr('href');
+
+            var params = {
+                e: e,
+                $el: $el,
+                target: target,
+                hash: hash,
+                href: href,
+                jQTSettings: jQTSettings
+            };
+
+            // Loop thru all handlers
+            for (var i=0, len=tapHandlers.length; i<len; i++) {
+                var handler = tapHandlers[i];
+                var supported = handler.isSupported(e, params);
+                if (supported) {
+                    var flag = handler.fn(e, params);
+                    return flag;
+                }
+            }
+        }
+
+        function addDefaultTapHandlers() {
+            addTapHandler({
+                name: 'external-link',
+                isSupported: function(e, params) {
+                    return params.$el.isExternalLink();
+                },
+                fn: function(e, params) {
+                    params.$el.unselect();
+                    return true;
+                }
+            });
+            addTapHandler({
+                name: 'back-selector',
+                isSupported: function(e, params) {
+                    return params.$el.is(params.jQTSettings.backSelector);
+                },
+                fn: function(e, params) {
+                    // User clicked or tapped a back button
+                    goBack(params.hash);
+                }
+            });
+            addTapHandler({
+                name: 'submit-selector',
+                isSupported: function(e, params) {
+                    return params.$el.is(params.jQTSettings.submitSelector);
+                },
+                fn: function(e, params) {
+                    // User clicked or tapped a submit element
+                    submitParentForm(params.$el);
+                }
+            });
+            addTapHandler({
+                name: 'webapp',
+                isSupported: function(e, params) {
+                    return params.target === '_webapp';
+                },
+                fn: function(e, params) {
+                    // User clicked or tapped an internal link, fullscreen mode
+                    window.location = params.href;
+                    return false;
+                }
+            });
+            addTapHandler({
+                name: 'no-op',
+                isSupported: function(e, params) {
+                    return params.href === '#';
+                },
+                fn: function(e, params) {
+                    // Allow tap on item with no href
+                    params.$el.unselect();
+                    return true;
+                }
+            });
+            addTapHandler({
+                name: 'standard',
+                isSupported: function(e, params) {
+                    return params.hash && params.hash !== '#';
+                },
+                fn: function(e, params) {
+                    var animation = getAnimation(params.$el);
+                    // Internal href
+                    params.$el.addClass('active');
+                    goTo(
+                        $(params.hash).data('referrer', params.$el),
+                        animation,
+                        params.$el.hasClass('reverse')
+                    );
+                    return false;
+                }
+            });
+            addTapHandler({
+                name: 'external',
+                isSupported: function(e, params) {
+                    return true;
+                },
+                fn: function(e, params) {
+                    var animation = getAnimation(params.$el);
+
+                    // External href
+                    params.$el.addClass('loading active');
+                    showPageByHref(params.$el.attr('href'), {
+                        animation: animation,
+                        callback: function() {
+                            params.$el.removeClass('loading');
+                            setTimeout($.fn.unselect, 250, params.$el);
+                        },
+                        $referrer: params.$el
+                    });
+                    return false;
+                }
+            });
+        }
+
+        // Get the party started
+        jQTSettings = $.extend({}, defaults, options);
+
+        initHairExtensions(options);
+
+        initFXExtensions();
+
+        // Expose public methods and properties
+        publicObj = {
+            addAnimation: addAnimation,
+            animations: animations,
+            getOrientation: getOrientation,
+            goBack: goBack,
+            insertPages: insertPages,
+            goTo: goTo,
+            history: history,
+            settings: jQTSettings,
+            submitForm: submitHandler
+        };
+        
+        // must be called publicObj assignement to prevent timing problem with extension loading. 
+        jQTSettings.starter(start);
+
+        return publicObj;
+    };
+
+    $.jQTouch.prototype.extensions = [];
+    $.jQTouch.prototype.tapHandlers = [];
+
+    // Extensions directly manipulate the jQTouch object,
+    // before it's initialized
+    $.jQTouch.addExtension = function(extension) {
+        $.jQTouch.prototype.extensions.push(extension);
+    };
+
+    // Experimental tap handlers that can bypass
+    // default jQTouch tap handling
+    $.jQTouch.addTapHandler = function(extension) {
+        $.jQTouch.prototype.tapHandlers.push(extension);
+    };
+
+})(); // Double closure, ALL THE WAY ACROSS THE SKY
diff --git a/client/galaxy/scripts/libs/jquery/jquery-ui.js b/client/galaxy/scripts/libs/jquery/jquery-ui.js
new file mode 100755
index 0000000..6100a95
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery-ui.js
@@ -0,0 +1,6 @@
+/*! jQuery UI - v1.9.1 - 2012-10-29
+* http://jqueryui.com
+* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.position.js, jquery.ui.autocomplete.js, jquery.ui.button.js, jquery.ui.menu.js, jquery.ui.slider.js
+* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
+
+(function(e,t){function i(t,n){var r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&&s(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&&s(t)}function s(t){return e.expr.filters.visible(t)&&!e(t).parents().andSelf().filter(function(){return e.css(this,"visibility")==="hidden"}).length}var n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return; [...]
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jquery.autocomplete.js b/client/galaxy/scripts/libs/jquery/jquery.autocomplete.js
new file mode 100644
index 0000000..7be89e4
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.autocomplete.js
@@ -0,0 +1,1152 @@
+/**
+ * @fileOverview jquery-autocomplete, the jQuery Autocompleter
+ * @author <a href="mailto:dylan at dyve.net">Dylan Verheul</a>
+ * @version 2.4.4
+ * @requires jQuery 1.6+
+ * @license MIT | GPL | Apache 2.0, see LICENSE.txt
+ * @see https://github.com/dyve/jquery-autocomplete
+ */
+(function($) {
+    "use strict";
+
+    /**
+     * jQuery autocomplete plugin
+     * @param {object|string} options
+     * @returns (object} jQuery object
+     */
+    $.fn.autocomplete = function(options) {
+        var url;
+        if (arguments.length > 1) {
+            url = options;
+            options = arguments[1];
+            options.url = url;
+        } else if (typeof options === 'string') {
+            url = options;
+            options = { url: url };
+        }
+        var opts = $.extend({}, $.fn.autocomplete.defaults, options);
+        return this.each(function() {
+            var $this = $(this);
+            $this.data('autocompleter', new $.Autocompleter(
+                $this,
+                $.meta ? $.extend({}, opts, $this.data()) : opts
+            ));
+        });
+    };
+
+    /**
+     * Store default options
+     * @type {object}
+     */
+    $.fn.autocomplete.defaults = {
+        inputClass: 'acInput',
+        loadingClass: 'acLoading',
+        resultsClass: 'acResults',
+        selectClass: 'acSelect',
+        queryParamName: 'q',
+        extraParams: {},
+        remoteDataType: false,
+        lineSeparator: '\n',
+        cellSeparator: '|',
+        minChars: 2,
+        maxItemsToShow: 10,
+        delay: 400,
+        useCache: true,
+        maxCacheLength: 10,
+        matchSubset: true,
+        matchCase: false,
+        matchInside: true,
+        mustMatch: false,
+        selectFirst: false,
+        selectOnly: false,
+        showResult: null,
+        preventDefaultReturn: 1,
+        preventDefaultTab: 0,
+        autoFill: false,
+        filterResults: true,
+        filter: true,
+        sortResults: true,
+        sortFunction: null,
+        onItemSelect: null,
+        onNoMatch: null,
+        onFinish: null,
+        matchStringConverter: null,
+        beforeUseConverter: null,
+        autoWidth: 'min-width',
+        useDelimiter: false,
+        delimiterChar: ',',
+        delimiterKeyCode: 188,
+        processData: null,
+        onError: null,
+        enabled: true
+    };
+
+    /**
+     * Sanitize result
+     * @param {Object} result
+     * @returns {Object} object with members value (String) and data (Object)
+     * @private
+     */
+    var sanitizeResult = function(result) {
+        var value, data;
+        var type = typeof result;
+        if (type === 'string') {
+            value = result;
+            data = {};
+        } else if ($.isArray(result)) {
+            value = result[0];
+            data = result.slice(1);
+        } else if (type === 'object') {
+            value = result.value;
+            data = result.data;
+        }
+        value = String(value);
+        if (typeof data !== 'object') {
+            data = {};
+        }
+        return {
+            value: value,
+            data: data
+        };
+    };
+
+    /**
+     * Sanitize integer
+     * @param {mixed} value
+     * @param {Object} options
+     * @returns {Number} integer
+     * @private
+     */
+    var sanitizeInteger = function(value, stdValue, options) {
+        var num = parseInt(value, 10);
+        options = options || {};
+        if (isNaN(num) || (options.min && num < options.min)) {
+            num = stdValue;
+        }
+        return num;
+    };
+
+    /**
+     * Create partial url for a name/value pair
+     */
+    var makeUrlParam = function(name, value) {
+        return [name, encodeURIComponent(value)].join('=');
+    };
+
+    /**
+     * Build an url
+     * @param {string} url Base url
+     * @param {object} [params] Dictionary of parameters
+     */
+    var makeUrl = function(url, params) {
+        var urlAppend = [];
+        $.each(params, function(index, value) {
+            urlAppend.push(makeUrlParam(index, value));
+        });
+        if (urlAppend.length) {
+            url += url.indexOf('?') === -1 ? '?' : '&';
+            url += urlAppend.join('&');
+        }
+        return url;
+    };
+
+    /**
+     * Default sort filter
+     * @param {object} a
+     * @param {object} b
+     * @param {boolean} matchCase
+     * @returns {number}
+     */
+    var sortValueAlpha = function(a, b, matchCase) {
+        a = String(a.value);
+        b = String(b.value);
+        if (!matchCase) {
+            a = a.toLowerCase();
+            b = b.toLowerCase();
+        }
+        if (a > b) {
+            return 1;
+        }
+        if (a < b) {
+            return -1;
+        }
+        return 0;
+    };
+
+    /**
+     * Parse data received in text format
+     * @param {string} text Plain text input
+     * @param {string} lineSeparator String that separates lines
+     * @param {string} cellSeparator String that separates cells
+     * @returns {array} Array of autocomplete data objects
+     */
+    var plainTextParser = function(text, lineSeparator, cellSeparator) {
+        var results = [];
+        var i, j, data, line, value, lines;
+        // Be nice, fix linebreaks before splitting on lineSeparator
+        lines = String(text).replace('\r\n', '\n').split(lineSeparator);
+        for (i = 0; i < lines.length; i++) {
+            line = lines[i].split(cellSeparator);
+            data = [];
+            for (j = 0; j < line.length; j++) {
+                data.push(decodeURIComponent(line[j]));
+            }
+            value = data.shift();
+            results.push({ value: value, data: data });
+        }
+        return results;
+    };
+
+    /**
+     * Autocompleter class
+     * @param {object} $elem jQuery object with one input tag
+     * @param {object} options Settings
+     * @constructor
+     */
+    $.Autocompleter = function($elem, options) {
+
+        /**
+         * Assert parameters
+         */
+        if (!$elem || !($elem instanceof $) || $elem.length !== 1 || ($elem.get(0).tagName.toUpperCase() !== 'INPUT' && $elem.get(0).tagName.toUpperCase() !== 'TEXTAREA')) {
+            throw new Error('Invalid parameter for jquery.Autocompleter, jQuery object with one element with INPUT or TEXTAREA tag expected.');
+        }
+
+        /**
+         * @constant Link to this instance
+         * @type object
+         * @private
+         */
+        var self = this;
+
+        /**
+         * @property {object} Options for this instance
+         * @public
+         */
+        this.options = options;
+
+        /**
+         * @property object Cached data for this instance
+         * @private
+         */
+        this.cacheData_ = {};
+
+        /**
+         * @property {number} Number of cached data items
+         * @private
+         */
+        this.cacheLength_ = 0;
+
+        /**
+         * @property {string} Class name to mark selected item
+         * @private
+         */
+        this.selectClass_ = 'jquery-autocomplete-selected-item';
+
+        /**
+         * @property {number} Handler to activation timeout
+         * @private
+         */
+        this.keyTimeout_ = null;
+
+        /**
+         * @property {number} Handler to finish timeout
+         * @private
+         */
+        this.finishTimeout_ = null;
+
+        /**
+         * @property {number} Last key pressed in the input field (store for behavior)
+         * @private
+         */
+        this.lastKeyPressed_ = null;
+
+        /**
+         * @property {string} Last value processed by the autocompleter
+         * @private
+         */
+        this.lastProcessedValue_ = null;
+
+        /**
+         * @property {string} Last value selected by the user
+         * @private
+         */
+        this.lastSelectedValue_ = null;
+
+        /**
+         * @property {boolean} Is this autocompleter active (showing results)?
+         * @see showResults
+         * @private
+         */
+        this.active_ = false;
+
+        /**
+         * @property {boolean} Is this autocompleter allowed to finish on blur?
+         * @private
+         */
+        this.finishOnBlur_ = true;
+
+        /**
+         * Sanitize options
+         */
+        this.options.minChars = sanitizeInteger(this.options.minChars, $.fn.autocomplete.defaults.minChars, { min: 0 });
+        this.options.maxItemsToShow = sanitizeInteger(this.options.maxItemsToShow, $.fn.autocomplete.defaults.maxItemsToShow, { min: 0 });
+        this.options.maxCacheLength = sanitizeInteger(this.options.maxCacheLength, $.fn.autocomplete.defaults.maxCacheLength, { min: 1 });
+        this.options.delay = sanitizeInteger(this.options.delay, $.fn.autocomplete.defaults.delay, { min: 0 });
+        if (this.options.preventDefaultReturn != 2) {
+            this.options.preventDefaultReturn = this.options.preventDefaultReturn ? 1 : 0;
+        }
+        if (this.options.preventDefaultTab != 2) {
+            this.options.preventDefaultTab = this.options.preventDefaultTab ? 1 : 0;
+        }
+
+        /**
+         * Init DOM elements repository
+         */
+        this.dom = {};
+
+        /**
+         * Store the input element we're attached to in the repository
+         */
+        this.dom.$elem = $elem;
+
+        /**
+         * Switch off the native autocomplete and add the input class
+         */
+        this.dom.$elem.attr('autocomplete', 'off').addClass(this.options.inputClass);
+
+        /**
+         * Create DOM element to hold results, and force absolute position
+         */
+        this.dom.$results = $('<div></div>').hide().addClass(this.options.resultsClass).css({
+            position: 'absolute'
+        });
+        $('body').append(this.dom.$results);
+
+        /**
+         * Attach keyboard monitoring to $elem
+         */
+        $elem.keydown(function(e) {
+            self.lastKeyPressed_ = e.keyCode;
+            switch(self.lastKeyPressed_) {
+
+                case self.options.delimiterKeyCode: // comma = 188
+                    if (self.options.useDelimiter && self.active_) {
+                        self.selectCurrent();
+                    }
+                    break;
+
+                // ignore navigational & special keys
+                case 35: // end
+                case 36: // home
+                case 16: // shift
+                case 17: // ctrl
+                case 18: // alt
+                case 37: // left
+                case 39: // right
+                    break;
+
+                case 38: // up
+                    e.preventDefault();
+                    if (self.active_) {
+                        self.focusPrev();
+                    } else {
+                        self.activate();
+                    }
+                    return false;
+
+                case 40: // down
+                    e.preventDefault();
+                    if (self.active_) {
+                        self.focusNext();
+                    } else {
+                        self.activate();
+                    }
+                    return false;
+
+                case 9: // tab
+                    if (self.active_) {
+                        self.selectCurrent();
+                        if (self.options.preventDefaultTab) {
+                            e.preventDefault();
+                            return false;
+                        }
+                    }
+                    if (self.options.preventDefaultTab === 2) {
+                        e.preventDefault();
+                        return false;
+                    }
+                break;
+
+                case 13: // return
+                    if (self.active_) {
+                        self.selectCurrent();
+                        if (self.options.preventDefaultReturn) {
+                            e.preventDefault();
+                            return false;
+                        }
+                    }
+                    if (self.options.preventDefaultReturn === 2) {
+                        e.preventDefault();
+                        return false;
+                    }
+                break;
+
+                case 27: // escape
+                    if (self.active_) {
+                        e.preventDefault();
+                        self.deactivate(true);
+                        return false;
+                    }
+                break;
+
+                default:
+                    self.activate();
+
+            }
+        });
+
+        /**
+         * Attach paste event listener because paste may occur much later then keydown or even without a keydown at all
+         */
+        $elem.on('paste', function() {
+            self.activate();
+        });
+
+        /**
+         * Finish on blur event
+         * Use a timeout because instant blur gives race conditions
+         */
+        var onBlurFunction = function() {
+            self.deactivate(true);
+        }
+        $elem.blur(function() {
+            if (self.finishOnBlur_) {
+                self.finishTimeout_ = setTimeout(onBlurFunction, 200);
+            }
+        });
+        /**
+         * Catch a race condition on form submit
+         */
+        $elem.parents('form').on('submit', onBlurFunction);
+
+    };
+
+    /**
+     * Position output DOM elements
+     * @private
+     */
+    $.Autocompleter.prototype.position = function() {
+        var offset = this.dom.$elem.offset();
+        var height = this.dom.$results.outerHeight();
+        var totalHeight = $(window).outerHeight();
+        var inputBottom = offset.top + this.dom.$elem.outerHeight();
+        var bottomIfDown = inputBottom + height;
+        // Set autocomplete results at the bottom of input
+        var position = {top: inputBottom, left: offset.left};
+        if (bottomIfDown > totalHeight) {
+            // Try to set autocomplete results at the top of input
+            var topIfUp = offset.top - height;
+            if (topIfUp >= 0) {
+                position.top = topIfUp;
+            }
+        }
+        this.dom.$results.css(position);
+    };
+
+    /**
+     * Read from cache
+     * @private
+     */
+    $.Autocompleter.prototype.cacheRead = function(filter) {
+        var filterLength, searchLength, search, maxPos, pos;
+        if (this.options.useCache) {
+            filter = String(filter);
+            filterLength = filter.length;
+            if (this.options.matchSubset) {
+                searchLength = 1;
+            } else {
+                searchLength = filterLength;
+            }
+            while (searchLength <= filterLength) {
+                if (this.options.matchInside) {
+                    maxPos = filterLength - searchLength;
+                } else {
+                    maxPos = 0;
+                }
+                pos = 0;
+                while (pos <= maxPos) {
+                    search = filter.substr(0, searchLength);
+                    if (this.cacheData_[search] !== undefined) {
+                        return this.cacheData_[search];
+                    }
+                    pos++;
+                }
+                searchLength++;
+            }
+        }
+        return false;
+    };
+
+    /**
+     * Write to cache
+     * @private
+     */
+    $.Autocompleter.prototype.cacheWrite = function(filter, data) {
+        if (this.options.useCache) {
+            if (this.cacheLength_ >= this.options.maxCacheLength) {
+                this.cacheFlush();
+            }
+            filter = String(filter);
+            if (this.cacheData_[filter] !== undefined) {
+                this.cacheLength_++;
+            }
+            this.cacheData_[filter] = data;
+            return this.cacheData_[filter];
+        }
+        return false;
+    };
+
+    /**
+     * Flush cache
+     * @public
+     */
+    $.Autocompleter.prototype.cacheFlush = function() {
+        this.cacheData_ = {};
+        this.cacheLength_ = 0;
+    };
+
+    /**
+     * Call hook
+     * Note that all called hooks are passed the autocompleter object
+     * @param {string} hook
+     * @param data
+     * @returns Result of called hook, false if hook is undefined
+     */
+    $.Autocompleter.prototype.callHook = function(hook, data) {
+        var f = this.options[hook];
+        if (f && $.isFunction(f)) {
+            return f(data, this);
+        }
+        return false;
+    };
+
+    /**
+     * Set timeout to activate autocompleter
+     */
+    $.Autocompleter.prototype.activate = function() {
+        if (!this.options.enabled) return;
+        var self = this;
+        if (this.keyTimeout_) {
+            clearTimeout(this.keyTimeout_);
+        }
+        this.keyTimeout_ = setTimeout(function() {
+            self.activateNow();
+        }, this.options.delay);
+    };
+
+    /**
+     * Activate autocompleter immediately
+     */
+    $.Autocompleter.prototype.activateNow = function() {
+        var value = this.beforeUseConverter(this.dom.$elem.val());
+        if (value !== this.lastProcessedValue_ && value !== this.lastSelectedValue_) {
+            this.fetchData(value);
+        }
+    };
+
+    /**
+     * Get autocomplete data for a given value
+     * @param {string} value Value to base autocompletion on
+     * @private
+     */
+    $.Autocompleter.prototype.fetchData = function(value) {
+        var self = this;
+        var processResults = function(results, filter) {
+            if (self.options.processData) {
+                results = self.options.processData(results);
+            }
+            self.showResults(self.filterResults(results, filter), filter);
+        };
+        this.lastProcessedValue_ = value;
+        if (value.length < this.options.minChars) {
+            processResults([], value);
+        } else if (this.options.data) {
+            processResults(this.options.data, value);
+        } else {
+            this.fetchRemoteData(value, function(remoteData) {
+                processResults(remoteData, value);
+            });
+        }
+    };
+
+    /**
+     * Get remote autocomplete data for a given value
+     * @param {string} filter The filter to base remote data on
+     * @param {function} callback The function to call after data retrieval
+     * @private
+     */
+    $.Autocompleter.prototype.fetchRemoteData = function(filter, callback) {
+        var data = this.cacheRead(filter);
+        if (data) {
+            callback(data);
+        } else {
+            var self = this;
+            var dataType = self.options.remoteDataType === 'json' ? 'json' : 'text';
+            var ajaxCallback = function(data) {
+                var parsed = false;
+                if (data !== false) {
+                    parsed = self.parseRemoteData(data);
+                    self.cacheWrite(filter, parsed);
+                }
+                self.dom.$elem.removeClass(self.options.loadingClass);
+                callback(parsed);
+            };
+            this.dom.$elem.addClass(this.options.loadingClass);
+            $.ajax({
+                url: this.makeUrl(filter),
+                success: ajaxCallback,
+                error: function(jqXHR, textStatus, errorThrown) {
+                    if($.isFunction(self.options.onError)) {
+                        self.options.onError(jqXHR, textStatus, errorThrown);
+                    } else {
+                      ajaxCallback(false);
+                    }
+                },
+                dataType: dataType
+            });
+        }
+    };
+
+    /**
+     * Create or update an extra parameter for the remote request
+     * @param {string} name Parameter name
+     * @param {string} value Parameter value
+     * @public
+     */
+    $.Autocompleter.prototype.setExtraParam = function(name, value) {
+        var index = $.trim(String(name));
+        if (index) {
+            if (!this.options.extraParams) {
+                this.options.extraParams = {};
+            }
+            if (this.options.extraParams[index] !== value) {
+                this.options.extraParams[index] = value;
+                this.cacheFlush();
+            }
+        }
+
+        return this;
+    };
+
+    /**
+     * Build the url for a remote request
+     * If options.queryParamName === false, append query to url instead of using a GET parameter
+     * @param {string} param The value parameter to pass to the backend
+     * @returns {string} The finished url with parameters
+     */
+    $.Autocompleter.prototype.makeUrl = function(param) {
+        var self = this;
+        var url = this.options.url;
+        var params = $.extend({}, this.options.extraParams);
+
+        if (this.options.queryParamName === false) {
+            url += encodeURIComponent(param);
+        } else {
+            params[this.options.queryParamName] = param;
+        }
+
+        return makeUrl(url, params);
+    };
+
+    /**
+     * Parse data received from server
+     * @param remoteData Data received from remote server
+     * @returns {array} Parsed data
+     */
+    $.Autocompleter.prototype.parseRemoteData = function(remoteData) {
+        var remoteDataType;
+        var data = remoteData;
+        if (this.options.remoteDataType === 'json') {
+            remoteDataType = typeof(remoteData);
+            switch (remoteDataType) {
+                case 'object':
+                    data = remoteData;
+                    break;
+                case 'string':
+                    data = $.parseJSON(remoteData);
+                    break;
+                default:
+                    throw new Error("Unexpected remote data type: " + remoteDataType);
+            }
+            return data;
+        }
+        return plainTextParser(data, this.options.lineSeparator, this.options.cellSeparator);
+    };
+
+    /**
+     * Default filter for results
+     * @param {Object} result
+     * @param {String} filter
+     * @returns {boolean} Include this result
+     * @private
+     */
+    $.Autocompleter.prototype.defaultFilter = function(result, filter) {
+        if (!result.value) {
+            return false;
+        }
+        if (this.options.filterResults) {
+            var pattern = this.matchStringConverter(filter);
+            var testValue = this.matchStringConverter(result.value);
+            if (!this.options.matchCase) {
+                pattern = pattern.toLowerCase();
+                testValue = testValue.toLowerCase();
+            }
+            var patternIndex = testValue.indexOf(pattern);
+            if (this.options.matchInside) {
+                return patternIndex > -1;
+            } else {
+                return patternIndex === 0;
+            }
+        }
+        return true;
+    };
+
+    /**
+     * Filter result
+     * @param {Object} result
+     * @param {String} filter
+     * @returns {boolean} Include this result
+     * @private
+     */
+    $.Autocompleter.prototype.filterResult = function(result, filter) {
+        // No filter
+        if (this.options.filter === false) {
+            return true;
+        }
+        // Custom filter
+        if ($.isFunction(this.options.filter)) {
+            return this.options.filter(result, filter);
+        }
+        // Default filter
+        return this.defaultFilter(result, filter);
+    };
+
+    /**
+     * Filter results
+     * @param results
+     * @param filter
+     */
+    $.Autocompleter.prototype.filterResults = function(results, filter) {
+        var filtered = [];
+        var i, result;
+
+        for (i = 0; i < results.length; i++) {
+            result = sanitizeResult(results[i]);
+            if (this.filterResult(result, filter)) {
+                filtered.push(result);
+            }
+        }
+        if (this.options.sortResults) {
+            filtered = this.sortResults(filtered, filter);
+        }
+        if (this.options.maxItemsToShow > 0 && this.options.maxItemsToShow < filtered.length) {
+            filtered.length = this.options.maxItemsToShow;
+        }
+        return filtered;
+    };
+
+    /**
+     * Sort results
+     * @param results
+     * @param filter
+     */
+    $.Autocompleter.prototype.sortResults = function(results, filter) {
+        var self = this;
+        var sortFunction = this.options.sortFunction;
+        if (!$.isFunction(sortFunction)) {
+            sortFunction = function(a, b, f) {
+                return sortValueAlpha(a, b, self.options.matchCase);
+            };
+        }
+        results.sort(function(a, b) {
+            return sortFunction(a, b, filter, self.options);
+        });
+        return results;
+    };
+
+    /**
+     * Convert string before matching
+     * @param s
+     * @param a
+     * @param b
+     */
+    $.Autocompleter.prototype.matchStringConverter = function(s, a, b) {
+        var converter = this.options.matchStringConverter;
+        if ($.isFunction(converter)) {
+            s = converter(s, a, b);
+        }
+        return s;
+    };
+
+    /**
+     * Convert string before use
+     * @param {String} s
+     */
+    $.Autocompleter.prototype.beforeUseConverter = function(s) {
+        s = this.getValue(s);
+        var converter = this.options.beforeUseConverter;
+        if ($.isFunction(converter)) {
+            s = converter(s);
+        }
+        return s;
+    };
+
+    /**
+     * Enable finish on blur event
+     */
+    $.Autocompleter.prototype.enableFinishOnBlur = function() {
+        this.finishOnBlur_ = true;
+    };
+
+    /**
+     * Disable finish on blur event
+     */
+    $.Autocompleter.prototype.disableFinishOnBlur = function() {
+        this.finishOnBlur_ = false;
+    };
+
+    /**
+     * Create a results item (LI element) from a result
+     * @param result
+     */
+    $.Autocompleter.prototype.createItemFromResult = function(result) {
+        var self = this;
+        var $li = $('<li/>');
+        $li.html(this.showResult(result.value, result.data));
+        $li.data({value: result.value, data: result.data})
+            .click(function() {
+                self.selectItem($li);
+            })
+            .mousedown(self.disableFinishOnBlur)
+            .mouseup(self.enableFinishOnBlur)
+        ;
+        return $li;
+    };
+
+    /**
+     * Get all items from the results list
+     * @param result
+     */
+    $.Autocompleter.prototype.getItems = function() {
+        return $('>ul>li', this.dom.$results);
+    };
+
+    /**
+     * Show all results
+     * @param results
+     * @param filter
+     */
+    $.Autocompleter.prototype.showResults = function(results, filter) {
+        var numResults = results.length;
+        var self = this;
+        var $ul = $('<ul></ul>');
+        var i, result, $li, autoWidth, first = false, $first = false;
+
+        if (numResults) {
+            for (i = 0; i < numResults; i++) {
+                result = results[i];
+                $li = this.createItemFromResult(result);
+                $ul.append($li);
+                if (first === false) {
+                    first = String(result.value);
+                    $first = $li;
+                    $li.addClass(this.options.firstItemClass);
+                }
+                if (i === numResults - 1) {
+                    $li.addClass(this.options.lastItemClass);
+                }
+            }
+
+            this.dom.$results.html($ul).show();
+
+            // Always recalculate position since window size or
+            // input element location may have changed.
+            this.position();
+            if (this.options.autoWidth) {
+                autoWidth = this.dom.$elem.outerWidth() - this.dom.$results.outerWidth() + this.dom.$results.width();
+                this.dom.$results.css(this.options.autoWidth, autoWidth);
+            }
+            this.getItems().hover(
+                function() { self.focusItem(this); },
+                function() { /* void */ }
+            );
+            if (this.autoFill(first, filter) || this.options.selectFirst || (this.options.selectOnly && numResults === 1)) {
+                this.focusItem($first);
+            }
+            this.active_ = true;
+        } else {
+            this.hideResults();
+            this.active_ = false;
+        }
+    };
+
+    $.Autocompleter.prototype.showResult = function(value, data) {
+        if ($.isFunction(this.options.showResult)) {
+            return this.options.showResult(value, data);
+        } else {
+            return $('<p></p>').text(value).html();
+        }
+    };
+
+    $.Autocompleter.prototype.autoFill = function(value, filter) {
+        var lcValue, lcFilter, valueLength, filterLength;
+        if (this.options.autoFill && this.lastKeyPressed_ !== 8) {
+            lcValue = String(value).toLowerCase();
+            lcFilter = String(filter).toLowerCase();
+            valueLength = value.length;
+            filterLength = filter.length;
+            if (lcValue.substr(0, filterLength) === lcFilter) {
+                var d = this.getDelimiterOffsets();
+                var pad = d.start ? ' ' : ''; // if there is a preceding delimiter
+                this.setValue( pad + value );
+                var start = filterLength + d.start + pad.length;
+                var end = valueLength + d.start + pad.length;
+                this.selectRange(start, end);
+                return true;
+            }
+        }
+        return false;
+    };
+
+    $.Autocompleter.prototype.focusNext = function() {
+        this.focusMove(+1);
+    };
+
+    $.Autocompleter.prototype.focusPrev = function() {
+        this.focusMove(-1);
+    };
+
+    $.Autocompleter.prototype.focusMove = function(modifier) {
+        var $items = this.getItems();
+        modifier = sanitizeInteger(modifier, 0);
+        if (modifier) {
+            for (var i = 0; i < $items.length; i++) {
+                if ($($items[i]).hasClass(this.selectClass_)) {
+                    this.focusItem(i + modifier);
+                    return;
+                }
+            }
+        }
+        this.focusItem(0);
+    };
+
+    $.Autocompleter.prototype.focusItem = function(item) {
+        var $item, $items = this.getItems();
+        if ($items.length) {
+            $items.removeClass(this.selectClass_).removeClass(this.options.selectClass);
+            if (typeof item === 'number') {
+                if (item < 0) {
+                    item = 0;
+                } else if (item >= $items.length) {
+                    item = $items.length - 1;
+                }
+                $item = $($items[item]);
+            } else {
+                $item = $(item);
+            }
+            if ($item) {
+                $item.addClass(this.selectClass_).addClass(this.options.selectClass);
+            }
+        }
+    };
+
+    $.Autocompleter.prototype.selectCurrent = function() {
+        var $item = $('li.' + this.selectClass_, this.dom.$results);
+        if ($item.length === 1) {
+            this.selectItem($item);
+        } else {
+            this.deactivate(false);
+        }
+    };
+
+    $.Autocompleter.prototype.selectItem = function($li) {
+        var value = $li.data('value');
+        var data = $li.data('data');
+        var displayValue = this.displayValue(value, data);
+        var processedDisplayValue = this.beforeUseConverter(displayValue);
+        this.lastProcessedValue_ = processedDisplayValue;
+        this.lastSelectedValue_ = processedDisplayValue;
+        var d = this.getDelimiterOffsets();
+        var delimiter = this.options.delimiterChar;
+        var elem = this.dom.$elem;
+        var extraCaretPos = 0;
+        if ( this.options.useDelimiter ) {
+            // if there is a preceding delimiter, add a space after the delimiter
+            if ( elem.val().substring(d.start-1, d.start) == delimiter && delimiter != ' ' ) {
+                displayValue = ' ' + displayValue;
+            }
+            // if there is not already a delimiter trailing this value, add it
+            if ( elem.val().substring(d.end, d.end+1) != delimiter && this.lastKeyPressed_ != this.options.delimiterKeyCode ) {
+                displayValue = displayValue + delimiter;
+            } else {
+                // move the cursor after the existing trailing delimiter
+                extraCaretPos = 1;
+            }
+        }
+        this.setValue(displayValue);
+        this.setCaret(d.start + displayValue.length + extraCaretPos);
+        this.callHook('onItemSelect', { value: value, data: data });
+        this.deactivate(true);
+        elem.focus();
+    };
+
+    $.Autocompleter.prototype.displayValue = function(value, data) {
+        if ($.isFunction(this.options.displayValue)) {
+            return this.options.displayValue(value, data);
+        }
+        return value;
+    };
+
+    $.Autocompleter.prototype.hideResults = function() {
+        this.dom.$results.hide();
+    };
+
+    $.Autocompleter.prototype.deactivate = function(finish) {
+        if (this.finishTimeout_) {
+            clearTimeout(this.finishTimeout_);
+        }
+        if (this.keyTimeout_) {
+            clearTimeout(this.keyTimeout_);
+        }
+        if (finish) {
+            if (this.lastProcessedValue_ !== this.lastSelectedValue_) {
+                if (this.options.mustMatch) {
+                    this.setValue('');
+                }
+                this.callHook('onNoMatch');
+            }
+            if (this.active_) {
+                this.callHook('onFinish');
+            }
+            this.lastKeyPressed_ = null;
+            this.lastProcessedValue_ = null;
+            this.lastSelectedValue_ = null;
+            this.active_ = false;
+        }
+        this.hideResults();
+    };
+
+    $.Autocompleter.prototype.selectRange = function(start, end) {
+        var input = this.dom.$elem.get(0);
+        if (input.setSelectionRange) {
+            input.focus();
+            input.setSelectionRange(start, end);
+        } else if (input.createTextRange) {
+            var range = input.createTextRange();
+            range.collapse(true);
+            range.moveEnd('character', end);
+            range.moveStart('character', start);
+            range.select();
+        }
+    };
+
+    /**
+     * Move caret to position
+     * @param {Number} pos
+     */
+    $.Autocompleter.prototype.setCaret = function(pos) {
+        this.selectRange(pos, pos);
+    };
+
+    /**
+     * Get caret position
+     */
+    $.Autocompleter.prototype.getCaret = function() {
+        var $elem = this.dom.$elem;
+        var elem = $elem[0];
+        var val, selection, range, start, end, stored_range;
+        if (elem.createTextRange) { // IE
+            selection = document.selection;
+            if (elem.tagName.toLowerCase() != 'textarea') {
+                val = $elem.val();
+                range = selection.createRange().duplicate();
+                range.moveEnd('character', val.length);
+                if (range.text === '') {
+                    start = val.length;
+                } else {
+                    start = val.lastIndexOf(range.text);
+                }
+                range = selection.createRange().duplicate();
+                range.moveStart('character', -val.length);
+                end = range.text.length;
+            } else {
+                range = selection.createRange();
+                stored_range = range.duplicate();
+                stored_range.moveToElementText(elem);
+                stored_range.setEndPoint('EndToEnd', range);
+                start = stored_range.text.length - range.text.length;
+                end = start + range.text.length;
+            }
+        } else {
+            start = $elem[0].selectionStart;
+            end = $elem[0].selectionEnd;
+        }
+        return {
+            start: start,
+            end: end
+        };
+    };
+
+    /**
+     * Set the value that is currently being autocompleted
+     * @param {String} value
+     */
+    $.Autocompleter.prototype.setValue = function(value) {
+        if ( this.options.useDelimiter ) {
+            // set the substring between the current delimiters
+            var val = this.dom.$elem.val();
+            var d = this.getDelimiterOffsets();
+            var preVal = val.substring(0, d.start);
+            var postVal = val.substring(d.end);
+            value = preVal + value + postVal;
+        }
+        this.dom.$elem.val(value);
+    };
+
+    /**
+     * Get the value currently being autocompleted
+     * @param {String} value
+     */
+    $.Autocompleter.prototype.getValue = function(value) {
+        if ( this.options.useDelimiter ) {
+            var d = this.getDelimiterOffsets();
+            return value.substring(d.start, d.end).trim();
+        } else {
+            return value;
+        }
+    };
+
+    /**
+     * Get the offsets of the value currently being autocompleted
+     */
+    $.Autocompleter.prototype.getDelimiterOffsets = function() {
+        var val = this.dom.$elem.val();
+        if ( this.options.useDelimiter ) {
+            var preCaretVal = val.substring(0, this.getCaret().start);
+            var start = preCaretVal.lastIndexOf(this.options.delimiterChar) + 1;
+            var postCaretVal = val.substring(this.getCaret().start);
+            var end = postCaretVal.indexOf(this.options.delimiterChar);
+            if ( end == -1 ) end = val.length;
+            end += this.getCaret().start;
+        } else {
+            start = 0;
+            end = val.length;
+        }
+        return {
+            start: start,
+            end: end
+        };
+    };
+
+})(jQuery);
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jquery.cookie.js b/client/galaxy/scripts/libs/jquery/jquery.cookie.js
new file mode 100644
index 0000000..a80bfa2
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.cookie.js
@@ -0,0 +1,97 @@
+/**
+ * Cookie plugin
+ *
+ * Copyright (c) 2006 Klaus Hartl (stilbuero.de)
+ * Dual licensed under the MIT and GPL licenses:
+ * http://www.opensource.org/licenses/mit-license.php
+ * http://www.gnu.org/licenses/gpl.html
+ *
+ */
+
+/**
+ * Create a cookie with the given name and value and other optional parameters.
+ *
+ * @example $.cookie('the_cookie', 'the_value');
+ * @desc Set the value of a cookie.
+ * @example $.cookie('the_cookie', 'the_value', { expires: 7, path: '/', domain: 'jquery.com', secure: true });
+ * @desc Create a cookie with all available options.
+ * @example $.cookie('the_cookie', 'the_value');
+ * @desc Create a session cookie.
+ * @example $.cookie('the_cookie', null);
+ * @desc Delete a cookie by passing null as value. Keep in mind that you have to use the same path and domain
+ *       used when the cookie was set.
+ *
+ * @param String name The name of the cookie.
+ * @param String value The value of the cookie.
+ * @param Object options An object literal containing key/value pairs to provide optional cookie attributes.
+ * @option Number|Date expires Either an integer specifying the expiration date from now on in days or a Date object.
+ *                             If a negative value is specified (e.g. a date in the past), the cookie will be deleted.
+ *                             If set to null or omitted, the cookie will be a session cookie and will not be retained
+ *                             when the the browser exits.
+ * @option String path The value of the path atribute of the cookie (default: path of page that created the cookie).
+ * @option String domain The value of the domain attribute of the cookie (default: domain of page that created the cookie).
+ * @option Boolean secure If true, the secure attribute of the cookie will be set and the cookie transmission will
+ *                        require a secure protocol (like HTTPS).
+ * @type undefined
+ *
+ * @name $.cookie
+ * @cat Plugins/Cookie
+ * @author Klaus Hartl/klaus.hartl at stilbuero.de
+ */
+
+/**
+ * Get the value of a cookie with the given name.
+ *
+ * @example $.cookie('the_cookie');
+ * @desc Get the value of a cookie.
+ *
+ * @param String name The name of the cookie.
+ * @return The value of the cookie.
+ * @type String
+ *
+ * @name $.cookie
+ * @cat Plugins/Cookie
+ * @author Klaus Hartl/klaus.hartl at stilbuero.de
+ */
+jQuery.cookie = function(name, value, options) {
+    if (typeof value != 'undefined') { // name and value given, set cookie
+        options = options || {};
+        if (value === null) {
+            value = '';
+            options = $.extend({}, options); // clone object since it's unexpected behavior if the expired property were changed
+            options.expires = -1;
+        }
+        var expires = '';
+        if (options.expires && (typeof options.expires == 'number' || options.expires.toUTCString)) {
+            var date;
+            if (typeof options.expires == 'number') {
+                date = new Date();
+                date.setTime(date.getTime() + (options.expires * 24 * 60 * 60 * 1000));
+            } else {
+                date = options.expires;
+            }
+            expires = '; expires=' + date.toUTCString(); // use expires attribute, max-age is not supported by IE
+        }
+        // NOTE Needed to parenthesize options.path and options.domain
+        // in the following expressions, otherwise they evaluate to undefined
+        // in the packed version for some reason...
+        var path = options.path ? '; path=' + (options.path) : '';
+        var domain = options.domain ? '; domain=' + (options.domain) : '';
+        var secure = options.secure ? '; secure' : '';
+        document.cookie = [name, '=', encodeURIComponent(value), expires, path, domain, secure].join('');
+    } else { // only name given, get cookie
+        var cookieValue = null;
+        if (document.cookie && document.cookie != '') {
+            var cookies = document.cookie.split(';');
+            for (var i = 0; i < cookies.length; i++) {
+                var cookie = jQuery.trim(cookies[i]);
+                // Does this cookie string begin with the name we want?
+                if (cookie.substring(0, name.length + 1) == (name + '=')) {
+                    cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
+                    break;
+                }
+            }
+        }
+        return cookieValue;
+    }
+};
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jquery.dynatree.js b/client/galaxy/scripts/libs/jquery/jquery.dynatree.js
new file mode 100755
index 0000000..e45bedf
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.dynatree.js
@@ -0,0 +1,3457 @@
+/*! ****************************************************************************
+	jquery.dynatree.js
+	Dynamic tree view control, with support for lazy loading of branches.
+
+	Copyright (c) 2006-2014, Martin Wendt (http://wwWendt.de)
+	Dual licensed under the MIT or GPL Version 2 licenses.
+	http://code.google.com/p/dynatree/wiki/LicenseInfo
+
+	A current version and some documentation is available at
+		http://dynatree.googlecode.com/
+
+	@version: 1.2.8
+	@date:    2015-07-04T16:44
+
+	@depends: jquery.js
+	@depends: jquery.ui.core.js
+	@depends: jquery.cookie.js
+*******************************************************************************/
+
+/* jsHint options*/
+// Note: We currently allow eval() to parse the 'data' attributes, when initializing from HTML.
+// TODO: pass jsHint with the options given in grunt.js only.
+//       The following should not be required:
+/*global alert */
+/*jshint nomen:false, smarttabs:true, eqeqeq:false, evil:true, regexp:false */
+
+/*************************************************************************
+ *  Debug functions
+ */
+
+var _canLog = true;
+
+function _log(mode, msg) {
+	/**
+	 * Usage: logMsg("%o was toggled", this);
+	 */
+	if( !_canLog ){
+		return;
+	}
+	// Remove first argument
+	var args = Array.prototype.slice.apply(arguments, [1]);
+	// Prepend timestamp
+	var dt = new Date();
+	var tag = dt.getHours() + ":" + dt.getMinutes() + ":" +
+				dt.getSeconds() + "." + dt.getMilliseconds();
+	args[0] = tag + " - " + args[0];
+
+	try {
+		switch( mode ) {
+		case "info":
+			window.console.info.apply(window.console, args);
+			break;
+		case "warn":
+			window.console.warn.apply(window.console, args);
+			break;
+		default:
+			window.console.log.apply(window.console, args);
+			break;
+		}
+	} catch(e) {
+		if( !window.console ){
+			_canLog = false; // Permanently disable, when logging is not supported by the browser
+		}else if(e.number === -2146827850){
+			// fix for IE8, where window.console.log() exists, but does not support .apply()
+			window.console.log(args.join(", "));
+		}
+	}
+}
+
+
+function logMsg(msg) {
+	Array.prototype.unshift.apply(arguments, ["debug"]);
+	_log.apply(this, arguments);
+}
+
+
+// Forward declaration
+var getDynaTreePersistData = null;
+
+
+
+/*************************************************************************
+ *  Constants
+ */
+var DTNodeStatus_Error   = -1;
+var DTNodeStatus_Loading = 1;
+var DTNodeStatus_Ok      = 0;
+
+
+// Start of local namespace
+(function($) {
+
+/*************************************************************************
+ *  Common tool functions.
+ */
+
+var Class = {
+	create: function() {
+		return function() {
+			this.initialize.apply(this, arguments);
+		};
+	}
+};
+
+// Tool function to get dtnode from the event target:
+function getDtNodeFromElement(el) {
+	alert("getDtNodeFromElement is deprecated");
+	return $.ui.dynatree.getNode(el);
+/*
+	var iMax = 5;
+	while( el && iMax-- ) {
+		if(el.dtnode) { return el.dtnode; }
+		el = el.parentNode;
+	}
+	return null;
+*/
+}
+
+function noop() {
+}
+
+
+/* Convert number to string and prepend +/-; return empty string for 0.*/
+function offsetString(n){
+	return n === 0 ? "" : (( n > 0 ) ? ("+" + n) : ("" + n));
+}
+
+
+/* Check browser version, since $.browser was removed in jQuery 1.9 */
+function _checkBrowser(){
+	var matched, browser;
+	function uaMatch( ua ) {
+		ua = ua.toLowerCase();
+		var match = /(chrome)[ \/]([\w.]+)/.exec( ua ) ||
+			 /(webkit)[ \/]([\w.]+)/.exec( ua ) ||
+			 /(opera)(?:.*version|)[ \/]([\w.]+)/.exec( ua ) ||
+			 /(msie) ([\w.]+)/.exec( ua ) ||
+			 ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec( ua ) ||
+			 [];
+		return {
+			browser: match[ 1 ] || "",
+			version: match[ 2 ] || "0"
+		};
+	}
+	matched = uaMatch( navigator.userAgent );
+	browser = {};
+	 if ( matched.browser ) {
+		 browser[ matched.browser ] = true;
+		 browser.version = matched.version;
+	 }
+	 if ( browser.chrome ) {
+		 browser.webkit = true;
+	 } else if ( browser.webkit ) {
+		 browser.safari = true;
+	 }
+	 return browser;
+}
+
+
+/** Compare two dotted version strings (like '10.2.3').
+ * @returns {Integer} 0: v1 == v2, -1: v1 < v2, 1: v1 > v2
+ */
+function versionCompare(v1, v2) {
+	var v1parts = ("" + v1).split("."),
+		v2parts = ("" + v2).split("."),
+		minLength = Math.min(v1parts.length, v2parts.length),
+		p1, p2, i;
+	// Compare tuple pair-by-pair.
+	for(i = 0; i < minLength; i++) {
+		// Convert to integer if possible, because "8" > "10".
+		p1 = parseInt(v1parts[i], 10);
+		p2 = parseInt(v2parts[i], 10);
+		if (isNaN(p1)){ p1 = v1parts[i]; }
+		if (isNaN(p2)){ p2 = v2parts[i]; }
+		if (p1 == p2) {
+			continue;
+		}else if (p1 > p2) {
+			return 1;
+		}else if (p1 < p2) {
+			return -1;
+		}
+		// one operand is NaN
+		return NaN;
+	}
+	// The longer tuple is always considered 'greater'
+	if (v1parts.length === v2parts.length) {
+		return 0;
+	}
+	return (v1parts.length < v2parts.length) ? -1 : 1;
+}
+
+
+//var BROWSER = jQuery.browser || _checkBrowser();
+var BROWSER = _checkBrowser(); // issue 440
+var jquerySupports = {
+	// http://jqueryui.com/upgrade-guide/1.9/#deprecated-offset-option-merged-into-my-and-at
+	positionMyOfs: versionCompare($.ui.version, "1.9") >= 0  //isVersionAtLeast($.ui.version, 1, 9)
+	};
+
+
+/*************************************************************************
+ *  Class DynaTreeNode
+ */
+var DynaTreeNode = Class.create();
+
+DynaTreeNode.prototype = {
+	initialize: function(parent, tree, data) {
+		/**
+		 * @constructor
+		 */
+		this.parent = parent;
+		this.tree = tree;
+		if ( typeof data === "string" ){
+			data = { title: data };
+		}
+//      if( !data.key ){
+		if( data.key == null ){ // test for null OR undefined (issue 420)
+			data.key = "_" + tree._nodeCount++;
+		}else{
+			data.key = "" + data.key; // issue 371
+		}
+		this.data = $.extend({}, $.ui.dynatree.nodedatadefaults, data);
+		this.li = null; // not yet created
+		this.span = null; // not yet created
+		this.ul = null; // not yet created
+		this.childList = null; // no subnodes yet
+		this._isLoading = false; // Lazy content is being loaded
+		this.hasSubSel = false;
+		this.bExpanded = false;
+		this.bSelected = false;
+
+	},
+
+	toString: function() {
+		return "DynaTreeNode<" + this.data.key + ">: '" + this.data.title + "'";
+	},
+
+	toDict: function(recursive, callback) {
+		var node,
+			dict = $.extend({}, this.data);
+		dict.activate = ( this.tree.activeNode === this );
+		dict.focus = ( this.tree.focusNode === this );
+		dict.expand = this.bExpanded;
+		dict.select = this.bSelected;
+		if( callback ){
+			callback(dict);
+		}
+		if( recursive && this.childList ) {
+			dict.children = [];
+			for(var i=0, l=this.childList.length; i<l; i++ ){
+				node = this.childList[i];
+				if( !node.isStatusNode() ){
+					dict.children.push(node.toDict(true, callback));
+				}
+			}
+		} else {
+			delete dict.children;
+		}
+		return dict;
+	},
+
+	fromDict: function(dict) {
+		/**
+		 * Update node data. If dict contains 'children', then also replace
+		 * the hole sub tree.
+		 */
+		var children = dict.children;
+		if(children === undefined){
+			this.data = $.extend(this.data, dict);
+			this.render();
+			return;
+		}
+		dict = $.extend({}, dict);
+		dict.children = undefined;
+		this.data = $.extend(this.data, dict);
+		this.removeChildren();
+		this.addChild(children);
+	},
+
+	_getInnerHtml: function() {
+		var tree = this.tree,
+			opts = tree.options,
+			cache = tree.cache,
+			level = this.getLevel(),
+			data = this.data,
+			res = "",
+			imageSrc;
+		// connector (expanded, expandable or simple)
+		if( level < opts.minExpandLevel ) {
+			if(level > 1){
+				res += cache.tagConnector;
+			}
+			// .. else (i.e. for root level) skip expander/connector altogether
+		} else if( this.hasChildren() !== false ) {
+			res += cache.tagExpander;
+		} else {
+			res += cache.tagConnector;
+		}
+		// Checkbox mode
+		if( opts.checkbox && data.hideCheckbox !== true && !data.isStatusNode ) {
+			res += cache.tagCheckbox;
+		}
+		// folder or doctype icon
+		if ( data.icon ) {
+			if (data.icon.charAt(0) === "/"){
+				imageSrc = data.icon;
+			}else{
+				imageSrc = opts.imagePath + data.icon;
+			}
+			res += "<img src='" + imageSrc + "' alt='' />";
+		} else if ( data.icon === false ) {
+			// icon == false means 'no icon'
+//          noop(); // keep JSLint happy
+		} else if ( data.iconClass ) {
+			res +=  "<span class='" + " " + data.iconClass +  "'></span>";
+		} else {
+			// icon == null means 'default icon'
+			res += cache.tagNodeIcon;
+		}
+		// node title
+		var nodeTitle = "";
+		if ( opts.onCustomRender ){
+			nodeTitle = opts.onCustomRender.call(tree, this) || "";
+		}
+		if(!nodeTitle){
+			var tooltip = data.tooltip ? ' title="' + data.tooltip.replace(/\"/g, '"') + '"' : '',
+				href = data.href || "#";
+			if( opts.noLink || data.noLink ) {
+				nodeTitle = '<span style="display:inline-block;" class="' + opts.classNames.title + '"' + tooltip + '>' + data.title + '</span>';
+//              this.tree.logDebug("nodeTitle: " + nodeTitle);
+			} else {
+				nodeTitle = '<a href="' + href + '" class="' + opts.classNames.title + '"' + tooltip + '>' + data.title + '</a>';
+			}
+		}
+		res += nodeTitle;
+		return res;
+	},
+
+
+	_fixOrder: function() {
+		/**
+		 * Make sure, that <li> order matches childList order.
+		 */
+		var cl = this.childList;
+		if( !cl || !this.ul ){
+			return;
+		}
+		var childLI = this.ul.firstChild;
+		for(var i=0, l=cl.length-1; i<l; i++) {
+			var childNode1 = cl[i];
+			var childNode2 = childLI.dtnode;
+			if( childNode1 !== childNode2 ) {
+				this.tree.logDebug("_fixOrder: mismatch at index " + i + ": " + childNode1 + " != " + childNode2);
+				this.ul.insertBefore(childNode1.li, childNode2.li);
+			} else {
+				childLI = childLI.nextSibling;
+			}
+		}
+	},
+
+
+	render: function(useEffects, includeInvisible) {
+		/**
+		 * Create <li><span>..</span> .. </li> tags for this node.
+		 *
+		 * <li id='KEY' dtnode=NODE> // This div contains the node's span and list of child div's.
+		 *   <span class='title'>S S S A</span> // Span contains graphic spans and title <a> tag
+		 *   <ul> // only present, when node has children
+		 *       <li id='KEY' dtnode=NODE>child1</li>
+		 *       <li id='KEY' dtnode=NODE>child2</li>
+		 *   </ul>
+		 * </li>
+		 */
+//      this.tree.logDebug("%s.render(%s)", this, useEffects);
+		// ---
+		var tree = this.tree,
+			parent = this.parent,
+			data = this.data,
+			opts = tree.options,
+			cn = opts.classNames,
+			isLastSib = this.isLastSibling(),
+			firstTime = false;
+
+		if( !parent && !this.ul ) {
+			// Root node has only a <ul>
+			this.li = this.span = null;
+			this.ul = document.createElement("ul");
+			if( opts.minExpandLevel > 1 ){
+				this.ul.className = cn.container + " " + cn.noConnector;
+			}else{
+				this.ul.className = cn.container;
+			}
+		} else if( parent ) {
+			// Create <li><span /> </li>
+			if( ! this.li ) {
+				firstTime = true;
+				this.li = document.createElement("li");
+				this.li.dtnode = this;
+				if( data.key && opts.generateIds ){
+					this.li.id = opts.idPrefix + data.key;
+				}
+				this.span = document.createElement("span");
+				this.span.className = cn.title;
+				this.li.appendChild(this.span);
+
+				if( !parent.ul ) {
+					// This is the parent's first child: create UL tag
+					// (Hidden, because it will be
+					parent.ul = document.createElement("ul");
+					parent.ul.style.display = "none";
+					parent.li.appendChild(parent.ul);
+//                  if( opts.minExpandLevel > this.getLevel() ){
+//                      parent.ul.className = cn.noConnector;
+//                  }
+				}
+				// set node connector images, links and text
+//              this.span.innerHTML = this._getInnerHtml();
+
+				parent.ul.appendChild(this.li);
+			}
+			// set node connector images, links and text
+			this.span.innerHTML = this._getInnerHtml();
+			// Set classes for current status
+			var cnList = [];
+			cnList.push(cn.node);
+			if( data.isFolder ){
+				cnList.push(cn.folder);
+			}
+			if( this.bExpanded ){
+				cnList.push(cn.expanded);
+			}
+			if( this.hasChildren() !== false ){
+				cnList.push(cn.hasChildren);
+			}
+			if( data.isLazy && this.childList === null ){
+				cnList.push(cn.lazy);
+			}
+			if( isLastSib ){
+				cnList.push(cn.lastsib);
+			}
+			if( this.bSelected ){
+				cnList.push(cn.selected);
+			}
+			if( this.hasSubSel ){
+				cnList.push(cn.partsel);
+			}
+			if( tree.activeNode === this ){
+				cnList.push(cn.active);
+			}
+			if( data.addClass ){
+				cnList.push(data.addClass);
+			}
+			// IE6 doesn't correctly evaluate multiple class names,
+			// so we create combined class names that can be used in the CSS
+			cnList.push(cn.combinedExpanderPrefix
+					+ (this.bExpanded ? "e" : "c")
+					+ (data.isLazy && this.childList === null ? "d" : "")
+					+ (isLastSib ? "l" : "")
+					);
+			cnList.push(cn.combinedIconPrefix
+					+ (this.bExpanded ? "e" : "c")
+					+ (data.isFolder ? "f" : "")
+					);
+			this.span.className = cnList.join(" ");
+
+			// TODO: we should not set this in the <span> tag also, if we set it here:
+			this.li.className = isLastSib ? cn.lastsib : "";
+
+			// Allow tweaking, binding, after node was created for the first time
+			if(firstTime && opts.onCreate){
+				opts.onCreate.call(tree, this, this.span);
+			}
+			// Hide children, if node is collapsed
+//          this.ul.style.display = ( this.bExpanded || !parent ) ? "" : "none";
+			// Allow tweaking after node state was rendered
+			if(opts.onRender){
+				opts.onRender.call(tree, this, this.span);
+			}
+		}
+		// Visit child nodes
+		if( (this.bExpanded || includeInvisible === true) && this.childList ) {
+			for(var i=0, l=this.childList.length; i<l; i++) {
+				this.childList[i].render(false, includeInvisible);
+			}
+			// Make sure the tag order matches the child array
+			this._fixOrder();
+		}
+		// Hide children, if node is collapsed
+		if( this.ul ) {
+			var isHidden = (this.ul.style.display === "none");
+			var isExpanded = !!this.bExpanded;
+//          logMsg("isHidden:%s", isHidden);
+			if( useEffects && opts.fx && (isHidden === isExpanded) ) {
+				var duration = opts.fx.duration || 200;
+				$(this.ul).animate(opts.fx, duration);
+			} else {
+				this.ul.style.display = ( this.bExpanded || !parent ) ? "" : "none";
+			}
+		}
+	},
+	/** Return '/id1/id2/id3'. */
+	getKeyPath: function(excludeSelf) {
+		var path = [],
+			sep = this.tree.options.keyPathSeparator;
+
+		this.visitParents(function(node){
+			if(node.parent){
+				path.unshift(node.data.key);
+			}
+		}, !excludeSelf);
+		return sep + path.join(sep);
+	},
+
+	getParent: function() {
+		return this.parent;
+	},
+
+	getChildren: function() {
+		if(this.hasChildren() === undefined){
+			return undefined; // Lazy node: unloaded, currently loading, or load error
+		}
+		return this.childList;
+	},
+
+	/** Check if node has children (returns undefined, if not sure). */
+	hasChildren: function() {
+		if(this.data.isLazy){
+			if(this.childList === null || this.childList === undefined){
+				// Not yet loaded
+				return undefined;
+			}else if(this.childList.length === 0){
+				// Loaded, but response was empty
+				return false;
+			}else if(this.childList.length === 1 && this.childList[0].isStatusNode()){
+				// Currently loading or load error
+				return undefined;
+			}
+			return true;
+		}
+		return !!this.childList;
+	},
+
+	isFirstSibling: function() {
+		var p = this.parent;
+		return !p || p.childList[0] === this;
+	},
+
+	isLastSibling: function() {
+		var p = this.parent;
+		return !p || p.childList[p.childList.length-1] === this;
+	},
+
+	isLoading: function() {
+		return !!this._isLoading;
+	},
+
+	getPrevSibling: function() {
+		if( !this.parent ){
+			return null;
+		}
+		var ac = this.parent.childList;
+		for(var i=1, l=ac.length; i<l; i++){ // start with 1, so prev(first) = null
+			if( ac[i] === this ){
+				return ac[i-1];
+			}
+		}
+		return null;
+	},
+
+	getNextSibling: function() {
+		if( !this.parent ){
+			return null;
+		}
+		var ac = this.parent.childList;
+		for(var i=0, l=ac.length-1; i<l; i++){ // up to length-2, so next(last) = null
+			if( ac[i] === this ){
+				return ac[i+1];
+			}
+		}
+		return null;
+	},
+
+	isStatusNode: function() {
+		return (this.data.isStatusNode === true);
+	},
+
+	isChildOf: function(otherNode) {
+		return (this.parent && this.parent === otherNode);
+	},
+
+	isDescendantOf: function(otherNode) {
+		if(!otherNode){
+			return false;
+		}
+		var p = this.parent;
+		while( p ) {
+			if( p === otherNode ){
+				return true;
+			}
+			p = p.parent;
+		}
+		return false;
+	},
+
+	countChildren: function() {
+		var cl = this.childList;
+		if( !cl ){
+			return 0;
+		}
+		var n = cl.length;
+		for(var i=0, l=n; i<l; i++){
+			var child = cl[i];
+			n += child.countChildren();
+		}
+		return n;
+	},
+
+	/**Sort child list by title.
+	 * cmd: optional compare function.
+	 * deep: optional: pass true to sort all descendant nodes.
+	 */
+	sortChildren: function(cmp, deep) {
+		var cl = this.childList;
+		if( !cl ){
+			return;
+		}
+		cmp = cmp || function(a, b) {
+//          return a.data.title === b.data.title ? 0 : a.data.title > b.data.title ? 1 : -1;
+			var x = a.data.title.toLowerCase(),
+				y = b.data.title.toLowerCase();
+			return x === y ? 0 : x > y ? 1 : -1;
+			};
+		cl.sort(cmp);
+		if( deep ){
+			for(var i=0, l=cl.length; i<l; i++){
+				if( cl[i].childList ){
+					cl[i].sortChildren(cmp, "$norender$");
+				}
+			}
+		}
+		if( deep !== "$norender$" ){
+			this.render();
+		}
+	},
+
+	_setStatusNode: function(data) {
+		// Create, modify or remove the status child node (pass 'null', to remove it).
+		var firstChild = ( this.childList ? this.childList[0] : null );
+		if( !data ) {
+			if ( firstChild && firstChild.isStatusNode()) {
+				try{
+					// I've seen exceptions here with loadKeyPath...
+					if(this.ul){
+						this.ul.removeChild(firstChild.li);
+						firstChild.li = null; // avoid leaks (issue 215)
+					}
+				}catch(e){}
+				if( this.childList.length === 1 ){
+					this.childList = [];
+				}else{
+					this.childList.shift();
+				}
+			}
+		} else if ( firstChild ) {
+			data.isStatusNode = true;
+			data.key = "_statusNode";
+			firstChild.data = data;
+			firstChild.render();
+		} else {
+			data.isStatusNode = true;
+			data.key = "_statusNode";
+			firstChild = this.addChild(data);
+		}
+	},
+
+	setLazyNodeStatus: function(lts, opts) {
+		var tooltip = (opts && opts.tooltip) ? opts.tooltip : null,
+			info = (opts && opts.info) ? " (" + opts.info + ")" : "";
+		switch( lts ) {
+			case DTNodeStatus_Ok:
+				this._setStatusNode(null);
+				$(this.span).removeClass(this.tree.options.classNames.nodeLoading);
+				this._isLoading = false;
+//              this.render();
+				if( this.tree.options.autoFocus ) {
+					if( this === this.tree.tnRoot && this.childList && this.childList.length > 0) {
+						// special case: using ajaxInit
+						this.childList[0].focus();
+					} else {
+						this.focus();
+					}
+				}
+				break;
+			case DTNodeStatus_Loading:
+				this._isLoading = true;
+				$(this.span).addClass(this.tree.options.classNames.nodeLoading);
+				// The root is hidden, so we set a temporary status child
+				if(!this.parent){
+					this._setStatusNode({
+						title: this.tree.options.strings.loading + info,
+						tooltip: tooltip,
+						addClass: this.tree.options.classNames.nodeWait
+					});
+				}
+				break;
+			case DTNodeStatus_Error:
+				this._isLoading = false;
+//              $(this.span).addClass(this.tree.options.classNames.nodeError);
+				this._setStatusNode({
+					title: this.tree.options.strings.loadError + info,
+					tooltip: tooltip,
+					addClass: this.tree.options.classNames.nodeError
+				});
+				break;
+			default:
+				throw "Bad LazyNodeStatus: '" + lts + "'.";
+		}
+	},
+
+	_parentList: function(includeRoot, includeSelf) {
+		var l = [];
+		var dtn = includeSelf ? this : this.parent;
+		while( dtn ) {
+			if( includeRoot || dtn.parent ){
+				l.unshift(dtn);
+			}
+			dtn = dtn.parent;
+		}
+		return l;
+	},
+	getLevel: function() {
+		/**
+		 * Return node depth. 0: System root node, 1: visible top-level node.
+		 */
+		var level = 0;
+		var dtn = this.parent;
+		while( dtn ) {
+			level++;
+			dtn = dtn.parent;
+		}
+		return level;
+	},
+
+	_getTypeForOuterNodeEvent: function(event) {
+		/** Return the inner node span (title, checkbox or expander) if
+		 *  event.target points to the outer span.
+		 *  This function should fix issue #93:
+		 *  FF2 ignores empty spans, when generating events (returning the parent instead).
+		 */
+		var cns = this.tree.options.classNames;
+		var target = event.target;
+		// Only process clicks on an outer node span (probably due to a FF2 event handling bug)
+		if( target.className.indexOf(cns.node) < 0 ) {
+			return null;
+		}
+		// Event coordinates, relative to outer node span:
+		var eventX = event.pageX - target.offsetLeft;
+		var eventY = event.pageY - target.offsetTop;
+
+		for(var i=0, l=target.childNodes.length; i<l; i++) {
+			var cn = target.childNodes[i];
+			var x = cn.offsetLeft - target.offsetLeft;
+			var y = cn.offsetTop - target.offsetTop;
+			var nx = cn.clientWidth, ny = cn.clientHeight;
+//          alert (cn.className + ": " + x + ", " + y + ", s:" + nx + ", " + ny);
+			if( eventX >= x && eventX <= (x+nx) && eventY >= y && eventY <= (y+ny) ) {
+//              alert("HIT "+ cn.className);
+				if( cn.className==cns.title ){
+					return "title";
+				}else if( cn.className==cns.expander ){
+					return "expander";
+				}else if( cn.className==cns.checkbox || cn.className==cns.radio ){
+					return "checkbox";
+				}else if( cn.className==cns.nodeIcon ){
+					return "icon";
+				}
+			}
+		}
+		return "prefix";
+	},
+
+	getEventTargetType: function(event) {
+		// Return the part of a node, that a click event occurred on.
+		// Note: there is no check, if the event was fired on THIS node.
+		var tcn = event && event.target ? event.target.className : "",
+			cns = this.tree.options.classNames;
+
+		if( tcn.indexOf(cns.title) >= 0 ){
+			return "title";
+		}else if( tcn.indexOf(cns.expander) >= 0 ){
+			return "expander";
+		}else if( tcn.indexOf(cns.checkbox) >= 0 || tcn.indexOf(cns.radio) >= 0 ){
+			return "checkbox";
+		}else if( tcn.indexOf(cns.nodeIcon) >= 0 ){
+			return "icon";
+		}else if( tcn.indexOf(cns.empty) >= 0 || tcn.indexOf(cns.vline) >= 0 || tcn.indexOf(cns.connector) >= 0 ){
+			return "prefix";
+		}else if( tcn.indexOf(cns.node) >= 0 ){
+			// FIX issue #93
+			return this._getTypeForOuterNodeEvent(event);
+		}
+		return null;
+	},
+
+	isVisible: function() {
+		// Return true, if all parents are expanded.
+		var parents = this._parentList(true, false);
+		for(var i=0, l=parents.length; i<l; i++){
+			if( ! parents[i].bExpanded ){ return false; }
+		}
+		return true;
+	},
+
+	makeVisible: function() {
+		// Make sure, all parents are expanded
+		var parents = this._parentList(true, false);
+		for(var i=0, l=parents.length; i<l; i++){
+			parents[i]._expand(true);
+		}
+	},
+
+	focus: function() {
+		// TODO: check, if we already have focus
+//      this.tree.logDebug("dtnode.focus(): %o", this);
+		this.makeVisible();
+		try {
+			$(this.span).find(">a").focus();
+		} catch(e) { }
+	},
+
+	isFocused: function() {
+		return (this.tree.tnFocused === this);
+	},
+
+	_activate: function(flag, fireEvents) {
+		// (De)Activate - but not focus - this node.
+		this.tree.logDebug("dtnode._activate(%o, fireEvents=%o) - %o", flag, fireEvents, this);
+		var opts = this.tree.options;
+		if( this.data.isStatusNode ){
+			return;
+		}
+		if( flag ) {
+			if ( fireEvents && opts.onQueryActivate && opts.onQueryActivate.call(this.tree, flag, this) === false ){
+				return; // Callback returned false
+			}
+			// Activate
+			if( this.tree.activeNode ) {
+				if( this.tree.activeNode === this ){
+					return;
+				}
+				this.tree.activeNode.deactivate();
+			}
+			if( opts.activeVisible ){
+				this.makeVisible();
+			}
+			this.tree.activeNode = this;
+			if( opts.persist ){
+				$.cookie(opts.cookieId + "-active", this.data.key, opts.cookie);
+			}
+			this.tree.persistence.activeKey = this.data.key;
+			$(this.span).addClass(opts.classNames.active);
+			if ( fireEvents && opts.onActivate ){
+				opts.onActivate.call(this.tree, this);
+			}
+		} else {
+			// Deactivate
+			if( this.tree.activeNode === this ) {
+				if ( opts.onQueryActivate && opts.onQueryActivate.call(this.tree, false, this) === false ){
+					return; // Callback returned false
+				}
+				$(this.span).removeClass(opts.classNames.active);
+				if( opts.persist ) {
+					// Note: we don't pass null, but ''. So the cookie is not deleted.
+					// If we pass null, we also have to pass a COPY of opts, because $cookie will override opts.expires (issue 84)
+					$.cookie(opts.cookieId + "-active", "", opts.cookie);
+				}
+				this.tree.persistence.activeKey = null;
+				this.tree.activeNode = null;
+				if ( fireEvents && opts.onDeactivate ){
+					opts.onDeactivate.call(this.tree, this);
+				}
+			}
+		}
+	},
+
+	activate: function() {
+		// Select - but not focus - this node.
+//      this.tree.logDebug("dtnode.activate(): %o", this);
+		this._activate(true, true);
+	},
+
+	activateSilently: function() {
+		this._activate(true, false);
+	},
+
+	deactivate: function() {
+//      this.tree.logDebug("dtnode.deactivate(): %o", this);
+		this._activate(false, true);
+	},
+
+	isActive: function() {
+		return (this.tree.activeNode === this);
+	},
+
+	_userActivate: function() {
+		// Handle user click / [space] / [enter], according to clickFolderMode.
+		var activate = true;
+		var expand = false;
+		if ( this.data.isFolder ) {
+			switch( this.tree.options.clickFolderMode ) {
+			case 2:
+				activate = false;
+				expand = true;
+				break;
+			case 3:
+				activate = expand = true;
+				break;
+			}
+		}
+		if( this.parent === null ) {
+			expand = false;
+		}
+		if( expand ) {
+			this.toggleExpand();
+			this.focus();
+		}
+		if( activate ) {
+			this.activate();
+		}
+	},
+
+	_setSubSel: function(hasSubSel) {
+		if( hasSubSel ) {
+			this.hasSubSel = true;
+			$(this.span).addClass(this.tree.options.classNames.partsel);
+		} else {
+			this.hasSubSel = false;
+			$(this.span).removeClass(this.tree.options.classNames.partsel);
+		}
+	},
+	/**
+	 * Fix selection and partsel status, of parent nodes, according to current status of
+	 * end nodes.
+	 */
+	_updatePartSelectionState: function() {
+//      alert("_updatePartSelectionState " + this);
+//      this.tree.logDebug("_updatePartSelectionState() - %o", this);
+		var sel;
+		// Return `true` or `false` for end nodes and remove part-sel flag
+		if( ! this.hasChildren() ){
+			sel = (this.bSelected && !this.data.unselectable && !this.data.isStatusNode);
+			this._setSubSel(false);
+			return sel;
+		}
+		// Return `true`, `false`, or `undefined` for parent nodes
+		var i, l,
+			cl = this.childList,
+			allSelected = true,
+			allDeselected = true;
+		for(i=0, l=cl.length; i<l;  i++) {
+			var n = cl[i],
+				s = n._updatePartSelectionState();
+			if( s !== false){
+				allDeselected = false;
+			}
+			if( s !== true){
+				allSelected = false;
+			}
+		}
+		if( allSelected ){
+			sel = true;
+		} else if ( allDeselected ){
+			sel = false;
+		} else {
+			sel = undefined;
+		}
+		this._setSubSel(sel === undefined);
+		this.bSelected = (sel === true);
+		return sel;
+	},
+
+	/**
+	 * Fix selection status, after this node was (de)selected in multi-hier mode.
+	 * This includes (de)selecting all children.
+	 */
+	_fixSelectionState: function() {
+//      alert("_fixSelectionState " + this);
+//      this.tree.logDebug("_fixSelectionState(%s) - %o", this.bSelected, this);
+		var p, i, l;
+		if( this.bSelected ) {
+			// Select all children
+			this.visit(function(node){
+				node.parent._setSubSel(true);
+				if(!node.data.unselectable){
+					node._select(true, false, false);
+				}
+			});
+			// Select parents, if all children are selected
+			p = this.parent;
+			while( p ) {
+				p._setSubSel(true);
+				var allChildsSelected = true;
+				for(i=0, l=p.childList.length; i<l;  i++) {
+					var n = p.childList[i];
+					if( !n.bSelected && !n.data.isStatusNode && !n.data.unselectable) {
+					// issue 305 proposes this:
+//                  if( !n.bSelected && !n.data.isStatusNode ) {
+						allChildsSelected = false;
+						break;
+					}
+				}
+				if( allChildsSelected ){
+					p._select(true, false, false);
+				}
+				p = p.parent;
+			}
+		} else {
+			// Deselect all children
+			this._setSubSel(false);
+			this.visit(function(node){
+				node._setSubSel(false);
+				node._select(false, false, false);
+			});
+			// Deselect parents, and recalc hasSubSel
+			p = this.parent;
+			while( p ) {
+				p._select(false, false, false);
+				var isPartSel = false;
+				for(i=0, l=p.childList.length; i<l;  i++) {
+					if( p.childList[i].bSelected || p.childList[i].hasSubSel ) {
+						isPartSel = true;
+						break;
+					}
+				}
+				p._setSubSel(isPartSel);
+				p = p.parent;
+			}
+		}
+	},
+
+	_select: function(sel, fireEvents, deep) {
+		// Select - but not focus - this node.
+//      this.tree.logDebug("dtnode._select(%o) - %o", sel, this);
+		var opts = this.tree.options;
+		if( this.data.isStatusNode ){
+			return;
+		}
+		//
+		if( this.bSelected === sel ) {
+//          this.tree.logDebug("dtnode._select(%o) IGNORED - %o", sel, this);
+			return;
+		}
+		// Allow event listener to abort selection
+		if ( fireEvents && opts.onQuerySelect && opts.onQuerySelect.call(this.tree, sel, this) === false ){
+			return; // Callback returned false
+		}
+		// Force single-selection
+		if( opts.selectMode==1 && sel ) {
+			this.tree.visit(function(node){
+				if( node.bSelected ) {
+					// Deselect; assuming that in selectMode:1 there's max. one other selected node
+					node._select(false, false, false);
+					return false;
+				}
+			});
+		}
+
+		this.bSelected = sel;
+//        this.tree._changeNodeList("select", this, sel);
+
+		if( sel ) {
+			if( opts.persist ){
+				this.tree.persistence.addSelect(this.data.key);
+			}
+			$(this.span).addClass(opts.classNames.selected);
+
+			if( deep && opts.selectMode === 3 ){
+				this._fixSelectionState();
+			}
+			if ( fireEvents && opts.onSelect ){
+				opts.onSelect.call(this.tree, true, this);
+			}
+		} else {
+			if( opts.persist ){
+				this.tree.persistence.clearSelect(this.data.key);
+			}
+			$(this.span).removeClass(opts.classNames.selected);
+
+			if( deep && opts.selectMode === 3 ){
+				this._fixSelectionState();
+			}
+			if ( fireEvents && opts.onSelect ){
+				opts.onSelect.call(this.tree, false, this);
+			}
+		}
+	},
+
+	select: function(sel) {
+		// Select - but not focus - this node.
+//      this.tree.logDebug("dtnode.select(%o) - %o", sel, this);
+		if( this.data.unselectable ){
+			return this.bSelected;
+		}
+		return this._select(sel!==false, true, true);
+	},
+
+	toggleSelect: function() {
+//      this.tree.logDebug("dtnode.toggleSelect() - %o", this);
+		return this.select(!this.bSelected);
+	},
+
+	isSelected: function() {
+		return this.bSelected;
+	},
+
+	isLazy: function() {
+		return !!this.data.isLazy;
+	},
+
+	_loadContent: function() {
+		try {
+			var opts = this.tree.options;
+			this.tree.logDebug("_loadContent: start - %o", this);
+			this.setLazyNodeStatus(DTNodeStatus_Loading);
+			if( true === opts.onLazyRead.call(this.tree, this) ) {
+				// If function returns 'true', we assume that the loading is done:
+				this.setLazyNodeStatus(DTNodeStatus_Ok);
+				// Otherwise (i.e. if the loading was started as an asynchronous process)
+				// the onLazyRead(dtnode) handler is expected to call dtnode.setLazyNodeStatus(DTNodeStatus_Ok/_Error) when done.
+				this.tree.logDebug("_loadContent: succeeded - %o", this);
+			}
+		} catch(e) {
+			this.tree.logWarning("_loadContent: failed - %o", e);
+			this.setLazyNodeStatus(DTNodeStatus_Error, {tooltip: ""+e});
+		}
+	},
+
+	_expand: function(bExpand, forceSync) {
+		if( this.bExpanded === bExpand ) {
+			this.tree.logDebug("dtnode._expand(%o) IGNORED - %o", bExpand, this);
+			return;
+		}
+		this.tree.logDebug("dtnode._expand(%o) - %o", bExpand, this);
+		var opts = this.tree.options;
+		if( !bExpand && this.getLevel() < opts.minExpandLevel ) {
+			this.tree.logDebug("dtnode._expand(%o) prevented collapse - %o", bExpand, this);
+			return;
+		}
+		if ( opts.onQueryExpand && opts.onQueryExpand.call(this.tree, bExpand, this) === false ){
+			return; // Callback returned false
+		}
+		this.bExpanded = bExpand;
+
+		// Persist expand state
+		if( opts.persist ) {
+			if( bExpand ){
+				this.tree.persistence.addExpand(this.data.key);
+			}else{
+				this.tree.persistence.clearExpand(this.data.key);
+			}
+		}
+		// Do not apply animations in init phase, or before lazy-loading
+		var allowEffects = !(this.data.isLazy && this.childList === null)
+			&& !this._isLoading
+			&& !forceSync;
+		this.render(allowEffects);
+
+		// Auto-collapse mode: collapse all siblings
+		if( this.bExpanded && this.parent && opts.autoCollapse ) {
+			var parents = this._parentList(false, true);
+			for(var i=0, l=parents.length; i<l; i++){
+				parents[i].collapseSiblings();
+			}
+		}
+		// If the currently active node is now hidden, deactivate it
+		if( opts.activeVisible && this.tree.activeNode && ! this.tree.activeNode.isVisible() ) {
+			this.tree.activeNode.deactivate();
+		}
+		// Expanding a lazy node: set 'loading...' and call callback
+		if( bExpand && this.data.isLazy && this.childList === null && !this._isLoading ) {
+			this._loadContent();
+			return;
+		}
+		if ( opts.onExpand ){
+			opts.onExpand.call(this.tree, bExpand, this);
+		}
+	},
+
+	isExpanded: function() {
+		return this.bExpanded;
+	},
+
+	expand: function(flag) {
+		flag = (flag !== false);
+		if( !this.childList && !this.data.isLazy && flag ){
+			return; // Prevent expanding empty nodes
+		} else if( this.parent === null && !flag ){
+			return; // Prevent collapsing the root
+		}
+		this._expand(flag);
+	},
+
+	scheduleAction: function(mode, ms) {
+		/** Schedule activity for delayed execution (cancel any pending request).
+		 *  scheduleAction('cancel') will cancel the request.
+		 */
+		if( this.tree.timer ) {
+			clearTimeout(this.tree.timer);
+			this.tree.logDebug("clearTimeout(%o)", this.tree.timer);
+		}
+		var self = this; // required for closures
+		switch (mode) {
+		case "cancel":
+			// Simply made sure that timer was cleared
+			break;
+		case "expand":
+			this.tree.timer = setTimeout(function(){
+				self.tree.logDebug("setTimeout: trigger expand");
+				self.expand(true);
+			}, ms);
+			break;
+		case "activate":
+			this.tree.timer = setTimeout(function(){
+				self.tree.logDebug("setTimeout: trigger activate");
+				self.activate();
+			}, ms);
+			break;
+		default:
+			throw "Invalid mode " + mode;
+		}
+		this.tree.logDebug("setTimeout(%s, %s): %s", mode, ms, this.tree.timer);
+	},
+
+	toggleExpand: function() {
+		this.expand(!this.bExpanded);
+	},
+
+	collapseSiblings: function() {
+		if( this.parent === null ){
+			return;
+		}
+		var ac = this.parent.childList;
+		for (var i=0, l=ac.length; i<l; i++) {
+			if ( ac[i] !== this && ac[i].bExpanded ){
+				ac[i]._expand(false);
+			}
+		}
+	},
+
+	_onClick: function(event) {
+//      this.tree.logDebug("dtnode.onClick(" + event.type + "): dtnode:" + this + ", button:" + event.button + ", which: " + event.which);
+		var targetType = this.getEventTargetType(event);
+		if( targetType === "expander" ) {
+			// Clicking the expander icon always expands/collapses
+			this.toggleExpand();
+			this.focus(); // issue 95
+		} else if( targetType === "checkbox" ) {
+			// Clicking the checkbox always (de)selects
+			this.toggleSelect();
+			this.focus(); // issue 95
+		} else {
+			this._userActivate();
+			var aTag = this.span.getElementsByTagName("a");
+			if(aTag[0]){
+				// issue 154, 313
+				if(!(BROWSER.msie && parseInt(BROWSER.version, 10) < 9)){
+					aTag[0].focus();
+				}
+			}else{
+				// 'noLink' option was set
+				return true;
+			}
+		}
+		// Make sure that clicks stop, otherwise <a href='#'> jumps to the top
+		event.preventDefault();
+	},
+
+	_onDblClick: function(event) {
+//      this.tree.logDebug("dtnode.onDblClick(" + event.type + "): dtnode:" + this + ", button:" + event.button + ", which: " + event.which);
+	},
+
+	_onKeydown: function(event) {
+//      this.tree.logDebug("dtnode.onKeydown(" + event.type + "): dtnode:" + this + ", charCode:" + event.charCode + ", keyCode: " + event.keyCode + ", which: " + event.which);
+		var handled = true,
+			sib;
+//      alert("keyDown" + event.which);
+
+		switch( event.which ) {
+			// charCodes:
+//          case 43: // '+'
+			case 107: // '+'
+			case 187: // '+' @ Chrome, Safari
+				if( !this.bExpanded ){ this.toggleExpand(); }
+				break;
+//          case 45: // '-'
+			case 109: // '-'
+			case 189: // '+' @ Chrome, Safari
+				if( this.bExpanded ){ this.toggleExpand(); }
+				break;
+			//~ case 42: // '*'
+				//~ break;
+			//~ case 47: // '/'
+				//~ break;
+			// case 13: // <enter>
+				// <enter> on a focused <a> tag seems to generate a click-event.
+				// this._userActivate();
+				// break;
+			case 32: // <space>
+				this._userActivate();
+				break;
+			case 8: // <backspace>
+				if( this.parent ){
+					this.parent.focus();
+				}
+				break;
+			case 37: // <left>
+				if( this.bExpanded ) {
+					this.toggleExpand();
+					this.focus();
+//              } else if( this.parent && (this.tree.options.rootVisible || this.parent.parent) ) {
+				} else if( this.parent && this.parent.parent ) {
+					this.parent.focus();
+				}
+				break;
+			case 39: // <right>
+				if( !this.bExpanded && (this.childList || this.data.isLazy) ) {
+					this.toggleExpand();
+					this.focus();
+				} else if( this.childList ) {
+					this.childList[0].focus();
+				}
+				break;
+			case 38: // <up>
+				sib = this.getPrevSibling();
+				while( sib && sib.bExpanded && sib.childList ){
+					sib = sib.childList[sib.childList.length-1];
+				}
+//              if( !sib && this.parent && (this.tree.options.rootVisible || this.parent.parent) )
+				if( !sib && this.parent && this.parent.parent ){
+					sib = this.parent;
+				}
+				if( sib ){
+					sib.focus();
+				}
+				break;
+			case 40: // <down>
+				if( this.bExpanded && this.childList ) {
+					sib = this.childList[0];
+				} else {
+					var parents = this._parentList(false, true);
+					for(var i=parents.length-1; i>=0; i--) {
+						sib = parents[i].getNextSibling();
+						if( sib ){ break; }
+					}
+				}
+				if( sib ){
+					sib.focus();
+				}
+				break;
+			default:
+				handled = false;
+		}
+		// Return false, if handled, to prevent default processing
+//      return !handled;
+		if(handled){
+			event.preventDefault();
+		}
+	},
+
+	_onKeypress: function(event) {
+		// onKeypress is only hooked to allow user callbacks.
+		// We don't process it, because IE and Safari don't fire keypress for cursor keys.
+//      this.tree.logDebug("dtnode.onKeypress(" + event.type + "): dtnode:" + this + ", charCode:" + event.charCode + ", keyCode: " + event.keyCode + ", which: " + event.which);
+	},
+
+	_onFocus: function(event) {
+		// Handles blur and focus events.
+//      this.tree.logDebug("dtnode._onFocus(%o): %o", event, this);
+		var opts = this.tree.options;
+		if ( event.type == "blur" || event.type == "focusout" ) {
+			if ( opts.onBlur ){
+				opts.onBlur.call(this.tree, this);
+			}
+			if( this.tree.tnFocused ){
+				$(this.tree.tnFocused.span).removeClass(opts.classNames.focused);
+			}
+			this.tree.tnFocused = null;
+			if( opts.persist ){
+				$.cookie(opts.cookieId + "-focus", "", opts.cookie);
+			}
+		} else if ( event.type=="focus" || event.type=="focusin") {
+			// Fix: sometimes the blur event is not generated
+			if( this.tree.tnFocused && this.tree.tnFocused !== this ) {
+				this.tree.logDebug("dtnode.onFocus: out of sync: curFocus: %o", this.tree.tnFocused);
+				$(this.tree.tnFocused.span).removeClass(opts.classNames.focused);
+			}
+			this.tree.tnFocused = this;
+			if ( opts.onFocus ){
+				opts.onFocus.call(this.tree, this);
+			}
+			$(this.tree.tnFocused.span).addClass(opts.classNames.focused);
+			if( opts.persist ){
+				$.cookie(opts.cookieId + "-focus", this.data.key, opts.cookie);
+			}
+		}
+		// TODO: return anything?
+//      return false;
+	},
+
+	visit: function(fn, includeSelf) {
+		// Call fn(node) for all child nodes. Stop iteration, if fn() returns false.
+		var res = true;
+		if( includeSelf === true ) {
+			res = fn(this);
+			if( res === false || res === "skip" ){
+				return res;
+			}
+		}
+		if(this.childList){
+			for(var i=0, l=this.childList.length; i<l; i++){
+				res = this.childList[i].visit(fn, true);
+				if( res === false ){
+					break;
+				}
+			}
+		}
+		return res;
+	},
+
+	visitParents: function(fn, includeSelf) {
+		// Visit parent nodes (bottom up)
+		if(includeSelf && fn(this) === false){
+			return false;
+		}
+		var p = this.parent;
+		while( p ) {
+			if(fn(p) === false){
+				return false;
+			}
+			p = p.parent;
+		}
+		return true;
+	},
+
+	remove: function() {
+		// Remove this node
+//      this.tree.logDebug ("%s.remove()", this);
+		if ( this === this.tree.root ){
+			throw "Cannot remove system root";
+		}
+		return this.parent.removeChild(this);
+	},
+
+	removeChild: function(tn) {
+		// Remove tn from list of direct children.
+		var ac = this.childList;
+		if( ac.length === 1 ) {
+			if( tn !== ac[0] ){
+				throw "removeChild: invalid child";
+			}
+			return this.removeChildren();
+		}
+		if( tn === this.tree.activeNode ){
+			tn.deactivate();
+		}
+		if( this.tree.options.persist ) {
+			if( tn.bSelected ){
+				this.tree.persistence.clearSelect(tn.data.key);
+			}
+			if ( tn.bExpanded ){
+				this.tree.persistence.clearExpand(tn.data.key);
+			}
+		}
+		tn.removeChildren(true);
+		if(this.ul && tn.li	){
+//          $("li", $(this.ul)).remove(); // issue 399
+			this.ul.removeChild(tn.li); // issue 402
+		}
+		for(var i=0, l=ac.length; i<l; i++) {
+			if( ac[i] === tn ) {
+				this.childList.splice(i, 1);
+//              delete tn;  // JSLint complained
+				break;
+			}
+		}
+	},
+
+	removeChildren: function(isRecursiveCall, retainPersistence) {
+		// Remove all child nodes (more efficiently than recursive remove())
+		this.tree.logDebug("%s.removeChildren(%o)", this, isRecursiveCall);
+		var tree = this.tree;
+		var ac = this.childList;
+		if( ac ) {
+			for(var i=0, l=ac.length; i<l; i++) {
+				var tn = ac[i];
+				if ( tn === tree.activeNode && !retainPersistence ){
+					tn.deactivate();
+				}
+				if( this.tree.options.persist && !retainPersistence ) {
+					if( tn.bSelected ){
+						this.tree.persistence.clearSelect(tn.data.key);
+					}
+					if ( tn.bExpanded ){
+						this.tree.persistence.clearExpand(tn.data.key);
+					}
+				}
+				tn.removeChildren(true, retainPersistence);
+				if(this.ul && tn.li){
+//                  this.ul.removeChild(tn.li);
+					$("li", $(this.ul)).remove(); // issue 231
+				}
+//              delete tn;  JSLint complained
+			}
+			// Set to 'null' which is interpreted as 'not yet loaded' for lazy
+			// nodes
+			this.childList = null;
+		}
+		if( ! isRecursiveCall ) {
+//          this._expand(false);
+//          this.isRead = false;
+			this._isLoading = false;
+			this.render();
+		}
+	},
+
+	setTitle: function(title) {
+		this.fromDict({title: title});
+	},
+
+	reload: function(force) {
+		throw "Use reloadChildren() instead";
+	},
+
+	reloadChildren: function(callback) {
+		// Reload lazy content (expansion state is maintained).
+		if( this.parent === null ){
+			throw "Use tree.reload() instead";
+		}else if( ! this.data.isLazy ){
+			throw "node.reloadChildren() requires lazy nodes.";
+		}
+		// appendAjax triggers 'nodeLoaded' event.
+		// We listen to this, if a callback was passed to reloadChildren
+		if(callback){
+			var self = this;
+			var eventType = "nodeLoaded.dynatree." + this.tree.$tree.attr("id")
+				+ "." + this.data.key;
+			this.tree.$tree.bind(eventType, function(e, node, isOk){
+				self.tree.$tree.unbind(eventType);
+				self.tree.logDebug("loaded %o, %o, %o", e, node, isOk);
+				if(node !== self){
+					throw "got invalid load event";
+				}
+				callback.call(self.tree, node, isOk);
+			});
+		}
+		// The expansion state is maintained
+		this.removeChildren();
+		this._loadContent();
+//      if( this.bExpanded ) {
+//          // Remove children first, to prevent effects being applied
+//          this.removeChildren();
+//          // then force re-expand to trigger lazy loading
+////            this.expand(false);
+////            this.expand(true);
+//          this._loadContent();
+//      } else {
+//          this.removeChildren();
+//          this._loadContent();
+//      }
+	},
+
+	/**
+	 * Make sure the node with a given key path is available in the tree.
+	 */
+	_loadKeyPath: function(keyPath, callback) {
+		var tree = this.tree;
+		tree.logDebug("%s._loadKeyPath(%s)", this, keyPath);
+		if(keyPath === ""){
+			throw "Key path must not be empty";
+		}
+		var segList = keyPath.split(tree.options.keyPathSeparator);
+		if(segList[0] === ""){
+			throw "Key path must be relative (don't start with '/')";
+		}
+		var seg = segList.shift();
+		if(this.childList){
+			for(var i=0, l=this.childList.length; i < l; i++){
+				var child = this.childList[i];
+				if( child.data.key === seg ){
+					if(segList.length === 0) {
+						// Found the end node
+						callback.call(tree, child, "ok");
+
+					}else if(child.data.isLazy && (child.childList === null || child.childList === undefined)){
+						tree.logDebug("%s._loadKeyPath(%s) -> reloading %s...", this, keyPath, child);
+						var self = this;
+						// Note: this line gives a JSLint warning (Don't make functions within a loop)
+						/*jshint loopfunc:true */
+						child.reloadChildren(function(node, isOk){
+							// After loading, look for direct child with that key
+							if(isOk){
+								tree.logDebug("%s._loadKeyPath(%s) -> reloaded %s.", node, keyPath, node);
+								callback.call(tree, child, "loaded");
+								node._loadKeyPath(segList.join(tree.options.keyPathSeparator), callback);
+							}else{
+								tree.logWarning("%s._loadKeyPath(%s) -> reloadChildren() failed.", self, keyPath);
+								callback.call(tree, child, "error");
+							}
+						});
+						// we can ignore it, since it will only be exectuted once, the the loop is ended
+						// See also http://stackoverflow.com/questions/3037598/how-to-get-around-the-jslint-error-dont-make-functions-within-a-loop
+					} else {
+						callback.call(tree, child, "loaded");
+						// Look for direct child with that key
+						child._loadKeyPath(segList.join(tree.options.keyPathSeparator), callback);
+					}
+					return;
+				}
+			}
+		}
+		// Could not find key
+		// Callback params: child: undefined, the segment, isEndNode (segList.length === 0)
+		callback.call(tree, undefined, "notfound", seg, segList.length === 0);
+		tree.logWarning("Node not found: " + seg);
+		return;
+	},
+
+	resetLazy: function() {
+		// Discard lazy content.
+		if( this.parent === null ){
+			throw "Use tree.reload() instead";
+		}else if( ! this.data.isLazy ){
+			throw "node.resetLazy() requires lazy nodes.";
+		}
+		this.expand(false);
+		this.removeChildren();
+	},
+
+	_addChildNode: function(dtnode, beforeNode) {
+		/**
+		 * Internal function to add one single DynatreeNode as a child.
+		 *
+		 */
+		var tree = this.tree,
+			opts = tree.options,
+			pers = tree.persistence;
+
+//      tree.logDebug("%s._addChildNode(%o)", this, dtnode);
+
+		// --- Update and fix dtnode attributes if necessary
+		dtnode.parent = this;
+//      if( beforeNode && (beforeNode.parent !== this || beforeNode === dtnode ) )
+//          throw "<beforeNode> must be another child of <this>";
+
+		// --- Add dtnode as a child
+		if ( this.childList === null ) {
+			this.childList = [];
+		} else if( ! beforeNode ) {
+			// Fix 'lastsib'
+			if(this.childList.length > 0) {
+				$(this.childList[this.childList.length-1].span).removeClass(opts.classNames.lastsib);
+			}
+		}
+		if( beforeNode ) {
+			var iBefore = $.inArray(beforeNode, this.childList);
+			if( iBefore < 0 ){
+				throw "<beforeNode> must be a child of <this>";
+			}
+			this.childList.splice(iBefore, 0, dtnode);
+		} else {
+			// Append node
+			this.childList.push(dtnode);
+		}
+
+		// --- Handle persistence
+		// Initial status is read from cookies, if persistence is active and
+		// cookies are already present.
+		// Otherwise the status is read from the data attributes and then persisted.
+		var isInitializing = tree.isInitializing();
+		if( opts.persist && pers.cookiesFound && isInitializing ) {
+			// Init status from cookies
+//          tree.logDebug("init from cookie, pa=%o, dk=%o", pers.activeKey, dtnode.data.key);
+			if( pers.activeKey === dtnode.data.key ){
+				tree.activeNode = dtnode;
+			}
+			if( pers.focusedKey === dtnode.data.key ){
+				tree.focusNode = dtnode;
+			}
+			dtnode.bExpanded = ($.inArray(dtnode.data.key, pers.expandedKeyList) >= 0);
+			dtnode.bSelected = ($.inArray(dtnode.data.key, pers.selectedKeyList) >= 0);
+//          tree.logDebug("    key=%o, bSelected=%o", dtnode.data.key, dtnode.bSelected);
+		} else {
+			// Init status from data (Note: we write the cookies after the init phase)
+//          tree.logDebug("init from data");
+			if( dtnode.data.activate ) {
+				tree.activeNode = dtnode;
+				if( opts.persist ){
+					pers.activeKey = dtnode.data.key;
+				}
+			}
+			if( dtnode.data.focus ) {
+				tree.focusNode = dtnode;
+				if( opts.persist ){
+					pers.focusedKey = dtnode.data.key;
+				}
+			}
+			dtnode.bExpanded = ( dtnode.data.expand === true ); // Collapsed by default
+			if( dtnode.bExpanded && opts.persist ){
+				pers.addExpand(dtnode.data.key);
+			}
+			dtnode.bSelected = ( dtnode.data.select === true ); // Deselected by default
+/*
+			Doesn't work, cause pers.selectedKeyList may be null
+			if( dtnode.bSelected && opts.selectMode==1
+				&& pers.selectedKeyList && pers.selectedKeyList.length>0 ) {
+				tree.logWarning("Ignored multi-selection in single-mode for %o", dtnode);
+				dtnode.bSelected = false; // Fixing bad input data (multi selection for mode:1)
+			}
+*/
+			if( dtnode.bSelected && opts.persist ){
+				pers.addSelect(dtnode.data.key);
+			}
+		}
+
+		// Always expand, if it's below minExpandLevel
+//      tree.logDebug ("%s._addChildNode(%o), l=%o", this, dtnode, dtnode.getLevel());
+		if ( opts.minExpandLevel >= dtnode.getLevel() ) {
+//          tree.logDebug ("Force expand for %o", dtnode);
+			this.bExpanded = true;
+		}
+
+		// In multi-hier mode, update the parents selection state
+		// issue #82: only if not initializing, because the children may not exist yet
+//      if( !dtnode.data.isStatusNode && opts.selectMode==3 && !isInitializing )
+//          dtnode._fixSelectionState();
+
+		// In multi-hier mode, update the parents selection state
+		if( dtnode.bSelected && opts.selectMode==3 ) {
+			var p = this;
+			while( p ) {
+				if( !p.hasSubSel ){
+					p._setSubSel(true);
+				}
+				p = p.parent;
+			}
+		}
+		// render this node and the new child
+		if ( tree.bEnableUpdate ){
+			this.render();
+		}
+		return dtnode;
+	},
+
+	addChild: function(obj, beforeNode) {
+		/**
+		 * Add a node object as child.
+		 *
+		 * This should be the only place, where a DynaTreeNode is constructed!
+		 * (Except for the root node creation in the tree constructor)
+		 *
+		 * @param obj A JS object (may be recursive) or an array of those.
+		 * @param {DynaTreeNode} beforeNode (optional) sibling node.
+		 *
+		 * Data format: array of node objects, with optional 'children' attributes.
+		 * [
+		 *  { title: "t1", isFolder: true, ... }
+		 *  { title: "t2", isFolder: true, ...,
+		 *      children: [
+		 *          {title: "t2.1", ..},
+		 *          {..}
+		 *          ]
+		 *  }
+		 * ]
+		 * A simple object is also accepted instead of an array.
+		 *
+		 */
+//      this.tree.logDebug("%s.addChild(%o, %o)", this, obj, beforeNode);
+		if(typeof(obj) == "string"){
+			throw "Invalid data type for " + obj;
+		}else if( !obj || obj.length === 0 ){ // Passed null or undefined or empty array
+			return;
+		}else if( obj instanceof DynaTreeNode ){
+			return this._addChildNode(obj, beforeNode);
+		}
+
+		if( !obj.length ){ // Passed a single data object
+			obj = [ obj ];
+		}
+		var prevFlag = this.tree.enableUpdate(false);
+
+		var tnFirst = null;
+		for (var i=0, l=obj.length; i<l; i++) {
+			var data = obj[i];
+			var dtnode = this._addChildNode(new DynaTreeNode(this, this.tree, data), beforeNode);
+			if( !tnFirst ){
+				tnFirst = dtnode;
+			}
+			// Add child nodes recursively
+			if( data.children ){
+				dtnode.addChild(data.children, null);
+			}
+		}
+		this.tree.enableUpdate(prevFlag);
+		return tnFirst;
+	},
+
+	append: function(obj) {
+		this.tree.logWarning("node.append() is deprecated (use node.addChild() instead).");
+		return this.addChild(obj, null);
+	},
+
+	appendAjax: function(ajaxOptions) {
+		var self = this;
+		this.removeChildren(false, true);
+		this.setLazyNodeStatus(DTNodeStatus_Loading);
+		// Debug feature: force a delay, to simulate slow loading...
+		if(ajaxOptions.debugLazyDelay){
+			var ms = ajaxOptions.debugLazyDelay;
+			ajaxOptions.debugLazyDelay = 0;
+			this.tree.logInfo("appendAjax: waiting for debugLazyDelay " + ms);
+			setTimeout(function(){self.appendAjax(ajaxOptions);}, ms);
+			return;
+		}
+		// Ajax option inheritance: $.ajaxSetup < $.ui.dynatree.prototype.options.ajaxDefaults < tree.options.ajaxDefaults < ajaxOptions
+		var orgSuccess = ajaxOptions.success,
+			orgError = ajaxOptions.error,
+			eventType = "nodeLoaded.dynatree." + this.tree.$tree.attr("id") + "." + this.data.key;
+		var options = $.extend({}, this.tree.options.ajaxDefaults, ajaxOptions, {
+			success: function(data, textStatus, jqXHR){
+				// <this> is the request options
+//              self.tree.logDebug("appendAjax().success");
+				var prevPhase = self.tree.phase,
+					options = self.tree.options; // #473
+
+				self.tree.phase = "init";
+				// postProcess is similar to the standard dataFilter hook,
+				// but it is also called for JSONP
+				if( options.postProcess ){
+					data = options.postProcess.call(this, data, this.dataType);
+				}
+				// Process ASPX WebMethod JSON object inside "d" property
+				// http://code.google.com/p/dynatree/issues/detail?id=202
+				else if (data && data.hasOwnProperty("d")) {
+				   data = (typeof data.d) == "string" ? $.parseJSON(data.d) : data.d;
+				}
+				if(!$.isArray(data) || data.length !== 0){
+					self.addChild(data, null);
+				}
+				self.tree.phase = "postInit";
+				if( orgSuccess ){
+					orgSuccess.call(options, self, data, textStatus);
+				}
+				self.tree.logDebug("trigger " + eventType);
+				self.tree.$tree.trigger(eventType, [self, true]);
+				self.tree.phase = prevPhase;
+				// This should be the last command, so node._isLoading is true
+				// while the callbacks run
+				self.setLazyNodeStatus(DTNodeStatus_Ok);
+				if($.isArray(data) && data.length === 0){
+					// Set to [] which is interpreted as 'no children' for lazy
+					// nodes
+					self.childList = [];
+					self.render();
+				}
+				},
+			error: function(jqXHR, textStatus, errorThrown){
+				// <this> is the request options
+				self.tree.logWarning("appendAjax failed:", textStatus, ":\n", jqXHR, "\n", errorThrown);
+				if( orgError ){
+					orgError.call(options, self, jqXHR, textStatus, errorThrown);
+				}
+				self.tree.$tree.trigger(eventType, [self, false]);
+				self.setLazyNodeStatus(DTNodeStatus_Error, {info: textStatus, tooltip: "" + errorThrown});
+				}
+		});
+		$.ajax(options);
+	},
+
+	move: function(targetNode, mode) {
+		/**Move this node to targetNode.
+		 *  mode 'child': append this node as last child of targetNode.
+		 *                This is the default. To be compatble with the D'n'd
+		 *                hitMode, we also accept 'over'.
+		 *  mode 'before': add this node as sibling before targetNode.
+		 *  mode 'after': add this node as sibling after targetNode.
+		 */
+		var pos;
+		if(this === targetNode){
+			return;
+		}
+		if( !this.parent  ){
+			throw "Cannot move system root";
+		}
+		if(mode === undefined || mode == "over"){
+			mode = "child";
+		}
+		var prevParent = this.parent;
+		var targetParent = (mode === "child") ? targetNode : targetNode.parent;
+		if( targetParent.isDescendantOf(this) ){
+			throw "Cannot move a node to it's own descendant";
+		}
+		// Unlink this node from current parent
+		if( this.parent.childList.length == 1 ) {
+			this.parent.childList = this.parent.data.isLazy ? [] : null;
+			this.parent.bExpanded = false;
+		} else {
+			pos = $.inArray(this, this.parent.childList);
+			if( pos < 0 ){
+				throw "Internal error";
+			}
+			this.parent.childList.splice(pos, 1);
+		}
+		// Remove from source DOM parent
+		if(this.parent.ul && this.li){
+			this.parent.ul.removeChild(this.li);
+		}
+
+		// Insert this node to target parent's child list
+		this.parent = targetParent;
+		if( targetParent.hasChildren() ) {
+			switch(mode) {
+			case "child":
+				// Append to existing target children
+				targetParent.childList.push(this);
+				break;
+			case "before":
+				// Insert this node before target node
+				pos = $.inArray(targetNode, targetParent.childList);
+				if( pos < 0 ){
+					throw "Internal error";
+				}
+				targetParent.childList.splice(pos, 0, this);
+				break;
+			case "after":
+				// Insert this node after target node
+				pos = $.inArray(targetNode, targetParent.childList);
+				if( pos < 0 ){
+					throw "Internal error";
+				}
+				targetParent.childList.splice(pos+1, 0, this);
+				break;
+			default:
+				throw "Invalid mode " + mode;
+			}
+		} else {
+			targetParent.childList = [ this ];
+		}
+		// Parent has no <ul> tag yet:
+		if( !targetParent.ul ) {
+			// This is the parent's first child: create UL tag
+			// (Hidden, because it will be
+			targetParent.ul = document.createElement("ul");
+			targetParent.ul.style.display = "none";
+			if( targetParent.li ){
+				targetParent.li.appendChild(targetParent.ul);
+			}
+		}
+		// Issue 319: Add to target DOM parent (only if node was already rendered(expanded))
+		if(this.li){
+			targetParent.ul.appendChild(this.li);
+		}
+
+		if( this.tree !== targetNode.tree ) {
+			// Fix node.tree for all source nodes
+			this.visit(function(node){
+				node.tree = targetNode.tree;
+			}, null, true);
+			throw "Not yet implemented.";
+		}
+		// TODO: fix selection state
+		// TODO: fix active state
+		if( !prevParent.isDescendantOf(targetParent)) {
+			prevParent.render();
+		}
+		if( !targetParent.isDescendantOf(prevParent) ) {
+			targetParent.render();
+		}
+//      this.tree.redraw();
+/*
+		var tree = this.tree;
+		var opts = tree.options;
+		var pers = tree.persistence;
+
+
+		// Always expand, if it's below minExpandLevel
+//      tree.logDebug ("%s._addChildNode(%o), l=%o", this, dtnode, dtnode.getLevel());
+		if ( opts.minExpandLevel >= dtnode.getLevel() ) {
+//          tree.logDebug ("Force expand for %o", dtnode);
+			this.bExpanded = true;
+		}
+
+		// In multi-hier mode, update the parents selection state
+		// issue #82: only if not initializing, because the children may not exist yet
+//      if( !dtnode.data.isStatusNode && opts.selectMode==3 && !isInitializing )
+//          dtnode._fixSelectionState();
+
+		// In multi-hier mode, update the parents selection state
+		if( dtnode.bSelected && opts.selectMode==3 ) {
+			var p = this;
+			while( p ) {
+				if( !p.hasSubSel )
+					p._setSubSel(true);
+				p = p.parent;
+			}
+		}
+		// render this node and the new child
+		if ( tree.bEnableUpdate )
+			this.render();
+
+		return dtnode;
+
+*/
+	},
+
+	// --- end of class
+	lastentry: undefined
+};
+
+/*************************************************************************
+ * class DynaTreeStatus
+ */
+
+var DynaTreeStatus = Class.create();
+
+
+DynaTreeStatus._getTreePersistData = function(cookieId, cookieOpts) {
+	// Static member: Return persistence information from cookies
+	var ts = new DynaTreeStatus(cookieId, cookieOpts);
+	ts.read();
+	return ts.toDict();
+};
+// Make available in global scope
+getDynaTreePersistData = DynaTreeStatus._getTreePersistData; // TODO: deprecated
+
+
+DynaTreeStatus.prototype = {
+	// Constructor
+	initialize: function(cookieId, cookieOpts) {
+//      this._log("DynaTreeStatus: initialize");
+		if( cookieId === undefined ){
+			cookieId = $.ui.dynatree.prototype.options.cookieId;
+		}
+		cookieOpts = $.extend({}, $.ui.dynatree.prototype.options.cookie, cookieOpts);
+
+		this.cookieId = cookieId;
+		this.cookieOpts = cookieOpts;
+		this.cookiesFound = undefined;
+		this.activeKey = null;
+		this.focusedKey = null;
+		this.expandedKeyList = null;
+		this.selectedKeyList = null;
+	},
+	// member functions
+	_log: function(msg) {
+		//  this.logDebug("_changeNodeList(%o): nodeList:%o, idx:%o", mode, nodeList, idx);
+		Array.prototype.unshift.apply(arguments, ["debug"]);
+		_log.apply(this, arguments);
+	},
+	read: function() {
+//      this._log("DynaTreeStatus: read");
+		// Read or init cookies.
+		this.cookiesFound = false;
+
+		var cookie = $.cookie(this.cookieId + "-active");
+		this.activeKey = cookie || "";
+		if( cookie ){
+			this.cookiesFound = true;
+		}
+		cookie = $.cookie(this.cookieId + "-focus");
+		this.focusedKey = cookie || "";
+		if( cookie ){
+			this.cookiesFound = true;
+		}
+		cookie = $.cookie(this.cookieId + "-expand");
+		this.expandedKeyList = cookie ? cookie.split(",") : [];
+		if( cookie ){
+			this.cookiesFound = true;
+		}
+		cookie = $.cookie(this.cookieId + "-select");
+		this.selectedKeyList = cookie ? cookie.split(",") : [];
+		if( cookie ){
+			this.cookiesFound = true;
+		}
+	},
+	write: function() {
+//      this._log("DynaTreeStatus: write");
+		$.cookie(this.cookieId + "-active", ( this.activeKey === null ) ? "" : this.activeKey, this.cookieOpts);
+		$.cookie(this.cookieId + "-focus", ( this.focusedKey === null ) ? "" : this.focusedKey, this.cookieOpts);
+		$.cookie(this.cookieId + "-expand", ( this.expandedKeyList === null ) ? "" : this.expandedKeyList.join(","), this.cookieOpts);
+		$.cookie(this.cookieId + "-select", ( this.selectedKeyList === null ) ? "" : this.selectedKeyList.join(","), this.cookieOpts);
+	},
+	addExpand: function(key) {
+//      this._log("addExpand(%o)", key);
+		if( $.inArray(key, this.expandedKeyList) < 0 ) {
+			this.expandedKeyList.push(key);
+			$.cookie(this.cookieId + "-expand", this.expandedKeyList.join(","), this.cookieOpts);
+		}
+	},
+	clearExpand: function(key) {
+//      this._log("clearExpand(%o)", key);
+		var idx = $.inArray(key, this.expandedKeyList);
+		if( idx >= 0 ) {
+			this.expandedKeyList.splice(idx, 1);
+			$.cookie(this.cookieId + "-expand", this.expandedKeyList.join(","), this.cookieOpts);
+		}
+	},
+	addSelect: function(key) {
+//      this._log("addSelect(%o)", key);
+		if( $.inArray(key, this.selectedKeyList) < 0 ) {
+			this.selectedKeyList.push(key);
+			$.cookie(this.cookieId + "-select", this.selectedKeyList.join(","), this.cookieOpts);
+		}
+	},
+	clearSelect: function(key) {
+//      this._log("clearSelect(%o)", key);
+		var idx = $.inArray(key, this.selectedKeyList);
+		if( idx >= 0 ) {
+			this.selectedKeyList.splice(idx, 1);
+			$.cookie(this.cookieId + "-select", this.selectedKeyList.join(","), this.cookieOpts);
+		}
+	},
+	isReloading: function() {
+		return this.cookiesFound === true;
+	},
+	toDict: function() {
+		return {
+			cookiesFound: this.cookiesFound,
+			activeKey: this.activeKey,
+			focusedKey: this.activeKey,
+			expandedKeyList: this.expandedKeyList,
+			selectedKeyList: this.selectedKeyList
+		};
+	},
+	// --- end of class
+	lastentry: undefined
+};
+
+
+/*************************************************************************
+ * class DynaTree
+ */
+
+var DynaTree = Class.create();
+
+// --- Static members ----------------------------------------------------------
+
+DynaTree.version = "@@Version";
+
+//--- Class members ------------------------------------------------------------
+
+DynaTree.prototype = {
+	// Constructor
+	initialize: function($widget) {
+		// instance members
+		this.phase = "init";
+		this.$widget = $widget;
+		this.options = $widget.options;
+		this.$tree = $widget.element;
+		this.timer = null;
+		// find container element
+		this.divTree = this.$tree.get(0);
+
+		_initDragAndDrop(this);
+	},
+
+	// member functions
+
+	_load: function(callback) {
+		var $widget = this.$widget;
+		var opts = this.options,
+			self = this;
+		this.bEnableUpdate = true;
+		this._nodeCount = 1;
+		this.activeNode = null;
+		this.focusNode = null;
+
+		// Some deprecation warnings to help with migration
+		if( opts.rootVisible !== undefined ){
+			this.logWarning("Option 'rootVisible' is no longer supported.");
+		}
+		if( opts.minExpandLevel < 1 ) {
+			this.logWarning("Option 'minExpandLevel' must be >= 1.");
+			opts.minExpandLevel = 1;
+		}
+//      _log("warn", "jQuery.support.boxModel " + jQuery.support.boxModel);
+
+		// If a 'options.classNames' dictionary was passed, still use defaults
+		// for undefined classes:
+		if( opts.classNames !== $.ui.dynatree.prototype.options.classNames ) {
+			opts.classNames = $.extend({}, $.ui.dynatree.prototype.options.classNames, opts.classNames);
+		}
+		if( opts.ajaxDefaults !== $.ui.dynatree.prototype.options.ajaxDefaults ) {
+			opts.ajaxDefaults = $.extend({}, $.ui.dynatree.prototype.options.ajaxDefaults, opts.ajaxDefaults);
+		}
+		if( opts.dnd !== $.ui.dynatree.prototype.options.dnd ) {
+			opts.dnd = $.extend({}, $.ui.dynatree.prototype.options.dnd, opts.dnd);
+		}
+		// Guess skin path, if not specified
+		if(!opts.imagePath) {
+			$("script").each( function () {
+				var _rexDtLibName = /.*dynatree[^\/]*\.js$/i;
+				if( this.src.search(_rexDtLibName) >= 0 ) {
+					if( this.src.indexOf("/")>=0 ){ // issue #47
+						opts.imagePath = this.src.slice(0, this.src.lastIndexOf("/")) + "/skin/";
+					}else{
+						opts.imagePath = "skin/";
+					}
+					self.logDebug("Guessing imagePath from '%s': '%s'", this.src, opts.imagePath);
+					return false; // first match
+				}
+			});
+		}
+
+		this.persistence = new DynaTreeStatus(opts.cookieId, opts.cookie);
+		if( opts.persist ) {
+			if( !$.cookie ){
+				_log("warn", "Please include jquery.cookie.js to use persistence.");
+			}
+			this.persistence.read();
+		}
+		this.logDebug("DynaTree.persistence: %o", this.persistence.toDict());
+
+		// Cached tag strings
+		this.cache = {
+			tagEmpty: "<span class='" + opts.classNames.empty + "'></span>",
+			tagVline: "<span class='" + opts.classNames.vline + "'></span>",
+			tagExpander: "<span class='" + opts.classNames.expander + "'></span>",
+			tagConnector: "<span class='" + opts.classNames.connector + "'></span>",
+			tagNodeIcon: "<span class='" + opts.classNames.nodeIcon + "'></span>",
+			tagCheckbox: "<span class='" + opts.classNames.checkbox + "'></span>",
+			lastentry: undefined
+		};
+
+		// Clear container, in case it contained some 'waiting' or 'error' text
+		// for clients that don't support JS.
+		// We don't do this however, if we try to load from an embedded UL element.
+		if( opts.children || (opts.initAjax && opts.initAjax.url) || opts.initId ){
+			$(this.divTree).empty();
+		}
+		var $ulInitialize = this.$tree.find(">ul:first").hide();
+
+		// Create the root element
+		this.tnRoot = new DynaTreeNode(null, this, {});
+		this.tnRoot.bExpanded = true;
+		this.tnRoot.render();
+		this.divTree.appendChild(this.tnRoot.ul);
+
+		var root = this.tnRoot,
+			isReloading = ( opts.persist && this.persistence.isReloading() ),
+			isLazy = false,
+			prevFlag = this.enableUpdate(false);
+
+		this.logDebug("Dynatree._load(): read tree structure...");
+
+		// Init tree structure
+		if( opts.children ) {
+			// Read structure from node array
+			root.addChild(opts.children);
+
+		} else if( opts.initAjax && opts.initAjax.url ) {
+			// Init tree from AJAX request
+			isLazy = true;
+			root.data.isLazy = true;
+			this._reloadAjax(callback);
+
+		} else if( opts.initId ) {
+			// Init tree from another UL element
+			this._createFromTag(root, $("#"+opts.initId));
+
+		} else {
+			// Init tree from the first UL element inside the container <div>
+//          var $ul = this.$tree.find(">ul:first").hide();
+			this._createFromTag(root, $ulInitialize);
+			$ulInitialize.remove();
+		}
+
+		this._checkConsistency();
+		// Fix part-sel flags
+		if(!isLazy && opts.selectMode == 3){
+			root._updatePartSelectionState();
+		}
+		// Render html markup
+		this.logDebug("Dynatree._load(): render nodes...");
+		this.enableUpdate(prevFlag);
+
+		// bind event handlers
+		this.logDebug("Dynatree._load(): bind events...");
+		this.$widget.bind();
+
+		// --- Post-load processing
+		this.logDebug("Dynatree._load(): postInit...");
+		this.phase = "postInit";
+
+		// In persist mode, make sure that cookies are written, even if they are empty
+		if( opts.persist ) {
+			this.persistence.write();
+		}
+		// Set focus, if possible (this will also fire an event and write a cookie)
+		if( this.focusNode && this.focusNode.isVisible() ) {
+			this.logDebug("Focus on init: %o", this.focusNode);
+			this.focusNode.focus();
+		}
+		if( !isLazy ) {
+			if( opts.onPostInit ) {
+				opts.onPostInit.call(this, isReloading, false);
+			}
+			if( callback ){
+				callback.call(this, "ok");
+			}
+		}
+		this.phase = "idle";
+	},
+
+	_reloadAjax: function(callback) {
+		// Reload
+		var opts = this.options;
+		if( ! opts.initAjax || ! opts.initAjax.url ){
+			throw "tree.reload() requires 'initAjax' mode.";
+		}
+		var pers = this.persistence;
+		var ajaxOpts = $.extend({}, opts.initAjax);
+		// Append cookie info to the request
+//      this.logDebug("reloadAjax: key=%o, an.key:%o", pers.activeKey, this.activeNode?this.activeNode.data.key:"?");
+		if( ajaxOpts.addActiveKey ){
+			ajaxOpts.data.activeKey = pers.activeKey;
+		}
+		if( ajaxOpts.addFocusedKey ){
+			ajaxOpts.data.focusedKey = pers.focusedKey;
+		}
+		if( ajaxOpts.addExpandedKeyList ){
+			ajaxOpts.data.expandedKeyList = pers.expandedKeyList.join(",");
+		}
+		if( ajaxOpts.addSelectedKeyList ){
+			ajaxOpts.data.selectedKeyList = pers.selectedKeyList.join(",");
+		}
+		// Set up onPostInit callback to be called when Ajax returns
+		if( ajaxOpts.success ){
+			this.logWarning("initAjax: success callback is ignored; use onPostInit instead.");
+		}
+		if( ajaxOpts.error ){
+			this.logWarning("initAjax: error callback is ignored; use onPostInit instead.");
+		}
+		var isReloading = pers.isReloading();
+		ajaxOpts.success = function(dtnode, data, textStatus) {
+			if(opts.selectMode == 3){
+				dtnode.tree.tnRoot._updatePartSelectionState();
+			}
+			if(opts.onPostInit){
+				opts.onPostInit.call(dtnode.tree, isReloading, false);
+			}
+			if(callback){
+				callback.call(dtnode.tree, "ok");
+			}
+		};
+		ajaxOpts.error = function(dtnode, XMLHttpRequest, textStatus, errorThrown) {
+			if(opts.onPostInit){
+				opts.onPostInit.call(dtnode.tree, isReloading, true, XMLHttpRequest, textStatus, errorThrown);
+			}
+			if(callback){
+				callback.call(dtnode.tree, "error", XMLHttpRequest, textStatus, errorThrown);
+			}
+		};
+//      }
+		this.logDebug("Dynatree._init(): send Ajax request...");
+		this.tnRoot.appendAjax(ajaxOpts);
+	},
+
+	toString: function() {
+		return "Dynatree '" + this.$tree.attr("id") + "'";
+	},
+
+	toDict: function(includeRoot) {
+		var dict = this.tnRoot.toDict(true);
+		return includeRoot ? dict : dict.children;
+	},
+
+	serializeArray: function(stopOnParents) {
+		// Return a JavaScript array of objects, ready to be encoded as a JSON
+		// string for selected nodes
+		var nodeList = this.getSelectedNodes(stopOnParents),
+			name = this.$tree.attr("name") || this.$tree.attr("id"),
+			arr = [];
+		for(var i=0, l=nodeList.length; i<l; i++){
+			arr.push({name: name, value: nodeList[i].data.key});
+		}
+		return arr;
+	},
+
+	getPersistData: function() {
+		return this.persistence.toDict();
+	},
+
+	logDebug: function(msg) {
+		if( this.options.debugLevel >= 2 ) {
+			Array.prototype.unshift.apply(arguments, ["debug"]);
+			_log.apply(this, arguments);
+		}
+	},
+
+	logInfo: function(msg) {
+		if( this.options.debugLevel >= 1 ) {
+			Array.prototype.unshift.apply(arguments, ["info"]);
+			_log.apply(this, arguments);
+		}
+	},
+
+	logWarning: function(msg) {
+		Array.prototype.unshift.apply(arguments, ["warn"]);
+		_log.apply(this, arguments);
+	},
+
+	isInitializing: function() {
+		return ( this.phase=="init" || this.phase=="postInit" );
+	},
+	isReloading: function() {
+		return ( this.phase=="init" || this.phase=="postInit" ) && this.options.persist && this.persistence.cookiesFound;
+	},
+	isUserEvent: function() {
+		return ( this.phase=="userEvent" );
+	},
+
+	redraw: function() {
+//      this.logDebug("dynatree.redraw()...");
+		this.tnRoot.render(false, false);
+//      this.logDebug("dynatree.redraw() done.");
+	},
+	renderInvisibleNodes: function() {
+		this.tnRoot.render(false, true);
+	},
+	reload: function(callback) {
+		this._load(callback);
+	},
+
+	getRoot: function() {
+		return this.tnRoot;
+	},
+
+	enable: function() {
+		this.$widget.enable();
+	},
+
+	disable: function() {
+		this.$widget.disable();
+	},
+
+	getNodeByKey: function(key) {
+		// Search the DOM by element ID (assuming this is faster than traversing all nodes).
+		// $("#...") has problems, if the key contains '.', so we use getElementById()
+		var el = document.getElementById(this.options.idPrefix + key);
+		if( el ){
+			return el.dtnode ? el.dtnode : null;
+		}
+		// Not found in the DOM, but still may be in an unrendered part of tree
+		var match = null;
+		this.visit(function(node){
+//          window.console.log("%s", node);
+			if(node.data.key === key) {
+				match = node;
+				return false;
+			}
+		}, true);
+		return match;
+	},
+
+	getActiveNode: function() {
+		return this.activeNode;
+	},
+
+	reactivate: function(setFocus) {
+		// Re-fire onQueryActivate and onActivate events.
+		var node = this.activeNode;
+//      this.logDebug("reactivate %o", node);
+		if( node ) {
+			this.activeNode = null; // Force re-activating
+			node.activate();
+			if( setFocus ){
+				node.focus();
+			}
+		}
+	},
+
+	getSelectedNodes: function(stopOnParents) {
+		var nodeList = [];
+		this.tnRoot.visit(function(node){
+			if( node.bSelected ) {
+				nodeList.push(node);
+				if( stopOnParents === true ){
+					return "skip"; // stop processing this branch
+				}
+			}
+		});
+		return nodeList;
+	},
+
+	activateKey: function(key) {
+		var dtnode = (key === null) ? null : this.getNodeByKey(key);
+		if( !dtnode ) {
+			if( this.activeNode ){
+				this.activeNode.deactivate();
+			}
+			this.activeNode = null;
+			return null;
+		}
+		dtnode.focus();
+		dtnode.activate();
+		return dtnode;
+	},
+
+	loadKeyPath: function(keyPath, callback) {
+		var segList = keyPath.split(this.options.keyPathSeparator);
+		// Remove leading '/'
+		if(segList[0] === ""){
+			segList.shift();
+		}
+		// Remove leading system root key
+		if(segList[0] == this.tnRoot.data.key){
+			this.logDebug("Removed leading root key.");
+			segList.shift();
+		}
+		keyPath = segList.join(this.options.keyPathSeparator);
+		return this.tnRoot._loadKeyPath(keyPath, callback);
+	},
+
+	selectKey: function(key, select) {
+		var dtnode = this.getNodeByKey(key);
+		if( !dtnode ){
+			return null;
+		}
+		dtnode.select(select);
+		return dtnode;
+	},
+
+	enableUpdate: function(bEnable) {
+		if ( this.bEnableUpdate==bEnable ){
+			return bEnable;
+		}
+		this.bEnableUpdate = bEnable;
+		if ( bEnable ){
+			this.redraw();
+		}
+		return !bEnable; // return previous value
+	},
+
+	count: function() {
+		return this.tnRoot.countChildren();
+	},
+
+	visit: function(fn, includeRoot) {
+		return this.tnRoot.visit(fn, includeRoot);
+	},
+
+	_createFromTag: function(parentTreeNode, $ulParent) {
+		// Convert a <UL>...</UL> list into children of the parent tree node.
+		var self = this;
+/*
+TODO: better?
+		this.$lis = $("li:has(a[href])", this.element);
+		this.$tabs = this.$lis.map(function() { return $("a", this)[0]; });
+ */
+		$ulParent.find(">li").each(function() {
+			var $li = $(this),
+				$liSpan = $li.find(">span:first"),
+				$liA = $li.find(">a:first"),
+				title,
+				href = null,
+				target = null,
+				tooltip;
+			if( $liSpan.length ) {
+				// If a <li><span> tag is specified, use it literally.
+				title = $liSpan.html();
+			} else if( $liA.length ) {
+				title = $liA.html();
+				href = $liA.attr("href");
+				target = $liA.attr("target");
+				tooltip = $liA.attr("title");
+			} else {
+				// If only a <li> tag is specified, use the trimmed string up to
+				// the next child <ul> tag.
+				title = $li.html();
+				var iPos = title.search(/<ul/i);
+				if( iPos >= 0 ){
+					title = $.trim(title.substring(0, iPos));
+				}else{
+					title = $.trim(title);
+				}
+//              self.logDebug("%o", title);
+			}
+			// Parse node options from ID, title and class attributes
+			var data = {
+				title: title,
+				tooltip: tooltip,
+				isFolder: $li.hasClass("folder"),
+				isLazy: $li.hasClass("lazy"),
+				expand: $li.hasClass("expanded"),
+				select: $li.hasClass("selected"),
+				activate: $li.hasClass("active"),
+				focus: $li.hasClass("focused"),
+				noLink: $li.hasClass("noLink")
+			};
+			if( href ){
+				data.href = href;
+				data.target = target;
+			}
+			if( $li.attr("title") ){
+				data.tooltip = $li.attr("title"); // overrides <a title='...'>
+			}
+			if( $li.attr("id") ){
+				data.key = "" + $li.attr("id");
+			}
+			// If a data attribute is present, evaluate as a JavaScript object
+			if( $li.attr("data") ) {
+				var dataAttr = $.trim($li.attr("data"));
+				if( dataAttr ) {
+					if( dataAttr.charAt(0) != "{" ){
+						dataAttr = "{" + dataAttr + "}";
+					}
+					try {
+						$.extend(data, eval("(" + dataAttr + ")"));
+					} catch(e) {
+						throw ("Error parsing node data: " + e + "\ndata:\n'" + dataAttr + "'");
+					}
+				}
+			}
+			var childNode = parentTreeNode.addChild(data);
+			// Recursive reading of child nodes, if LI tag contains an UL tag
+			var $ul = $li.find(">ul:first");
+			if( $ul.length ) {
+				self._createFromTag(childNode, $ul); // must use 'self', because 'this' is the each() context
+			}
+		});
+	},
+
+	_checkConsistency: function() {
+//      this.logDebug("tree._checkConsistency() NOT IMPLEMENTED - %o", this);
+	},
+
+	_setDndStatus: function(sourceNode, targetNode, helper, hitMode, accept) {
+		// hitMode: 'after', 'before', 'over', 'out', 'start', 'stop'
+		var $source = sourceNode ? $(sourceNode.span) : null,
+			$target = $(targetNode.span),
+			posOpts,
+			markerOffsetX = 0,
+			markerAt = "center";
+
+		if( !this.$dndMarker ) {
+			this.$dndMarker = $("<div id='dynatree-drop-marker'></div>")
+				.hide()
+				.css({"z-index": 1000})
+				.prependTo($(this.divTree).parent());
+
+//          logMsg("Creating marker: %o", this.$dndMarker);
+		}
+/*
+		if(hitMode === "start"){
+		}
+		if(hitMode === "stop"){
+//          sourceNode.removeClass("dynatree-drop-target");
+		}
+*/
+		if(hitMode === "after" || hitMode === "before" || hitMode === "over"){
+//          $source && $source.addClass("dynatree-drag-source");
+//          $target.addClass("dynatree-drop-target");
+
+			switch(hitMode){
+			case "before":
+				this.$dndMarker.removeClass("dynatree-drop-after dynatree-drop-over");
+				this.$dndMarker.addClass("dynatree-drop-before");
+				markerAt = "top";
+				break;
+			case "after":
+				this.$dndMarker.removeClass("dynatree-drop-before dynatree-drop-over");
+				this.$dndMarker.addClass("dynatree-drop-after");
+				markerAt = "bottom";
+				break;
+			default:
+				this.$dndMarker.removeClass("dynatree-drop-after dynatree-drop-before");
+				this.$dndMarker.addClass("dynatree-drop-over");
+				$target.addClass("dynatree-drop-target");
+				markerOffsetX = 8;
+			}
+//          logMsg("Creating marker: %o", this.$dndMarker);
+//          logMsg("    $target.offset=%o", $target);
+//          logMsg("    pos/$target.offset=%o", pos);
+//          logMsg("    $target.position=%o", $target.position());
+//          logMsg("    $target.offsetParent=%o, ot:%o", $target.offsetParent(), $target.offsetParent().offset());
+//          logMsg("    $(this.divTree).offset=%o", $(this.divTree).offset());
+//          logMsg("    $(this.divTree).parent=%o", $(this.divTree).parent());
+//          var pos = $target.offset();
+//          var parentPos = $target.offsetParent().offset();
+//          var bodyPos = $target.offsetParent().offset();
+
+			if( jquerySupports.positionMyOfs ){
+				posOpts = {
+					my: "left" + offsetString(markerOffsetX) + " center",
+					at: "left " + markerAt,
+					of: $target
+				};
+			} else {
+				posOpts = {
+					my: "left center",
+					at: "left " + markerAt,
+					of: $target,
+					offset: "" + markerOffsetX + " 0"
+				};
+			}
+			this.$dndMarker
+				.show()
+				.position(posOpts);
+
+//          helper.addClass("dynatree-drop-hover");
+		} else {
+//          $source && $source.removeClass("dynatree-drag-source");
+			$target.removeClass("dynatree-drop-target");
+			this.$dndMarker.hide();
+//          helper.removeClass("dynatree-drop-hover");
+		}
+		if(hitMode === "after"){
+			$target.addClass("dynatree-drop-after");
+		} else {
+			$target.removeClass("dynatree-drop-after");
+		}
+		if(hitMode === "before"){
+			$target.addClass("dynatree-drop-before");
+		} else {
+			$target.removeClass("dynatree-drop-before");
+		}
+		if(accept === true){
+			if($source){
+				$source.addClass("dynatree-drop-accept");
+			}
+			$target.addClass("dynatree-drop-accept");
+			helper.addClass("dynatree-drop-accept");
+		}else{
+			if($source){
+				$source.removeClass("dynatree-drop-accept");
+			}
+			$target.removeClass("dynatree-drop-accept");
+			helper.removeClass("dynatree-drop-accept");
+		}
+		if(accept === false){
+			if($source){
+				$source.addClass("dynatree-drop-reject");
+			}
+			$target.addClass("dynatree-drop-reject");
+			helper.addClass("dynatree-drop-reject");
+		}else{
+			if($source){
+				$source.removeClass("dynatree-drop-reject");
+			}
+			$target.removeClass("dynatree-drop-reject");
+			helper.removeClass("dynatree-drop-reject");
+		}
+	},
+
+	_onDragEvent: function(eventName, node, otherNode, event, ui, draggable) {
+		/**
+		 * Handles drag'n'drop functionality.
+		 *
+		 * A standard jQuery drag-and-drop process may generate these calls:
+		 *
+		 * draggable helper():
+		 *     _onDragEvent("helper", sourceNode, null, event, null, null);
+		 * start:
+		 *     _onDragEvent("start", sourceNode, null, event, ui, draggable);
+		 * drag:
+		 *     _onDragEvent("leave", prevTargetNode, sourceNode, event, ui, draggable);
+		 *     _onDragEvent("over", targetNode, sourceNode, event, ui, draggable);
+		 *     _onDragEvent("enter", targetNode, sourceNode, event, ui, draggable);
+		 * stop:
+		 *     _onDragEvent("drop", targetNode, sourceNode, event, ui, draggable);
+		 *     _onDragEvent("leave", targetNode, sourceNode, event, ui, draggable);
+		 *     _onDragEvent("stop", sourceNode, null, event, ui, draggable);
+		 */
+		var hitMode, enterResponse, r,
+			dnd = this.options.dnd,
+			res = null,
+			nodeTag = $(node.span);
+
+		switch (eventName) {
+		case "helper":
+			// Only event and node argument is available
+			var $helper = $("<div class='dynatree-drag-helper'><span class='dynatree-drag-helper-img' /></div>")
+					// .append($(event.target).closest(".dynatree-title").clone());
+					.append(nodeTag.find(".dynatree-title").clone());
+			// issue 244: helper should be child of scrollParent
+			$("ul.dynatree-container", node.tree.divTree).append($helper);
+//          $(node.tree.divTree).append($helper);
+			// Attach node reference to helper object
+			$helper.data("dtSourceNode", node);
+			res = $helper;
+			break;
+		case "start":
+			if(node.isStatusNode()) {
+				res = false;
+			} else if(dnd.onDragStart) {
+				res = dnd.onDragStart(node);
+			}
+			if(res === false) {
+				this.logDebug("tree.onDragStart() cancelled");
+				//draggable._clear();
+				// NOTE: the return value seems to be ignored (drag is not canceled, when false is returned)
+				ui.helper.trigger("mouseup");
+				ui.helper.hide();
+			} else {
+				nodeTag.addClass("dynatree-drag-source");
+			}
+			break;
+		case "enter":
+			r = dnd.onDragEnter ? dnd.onDragEnter(node, otherNode, ui, draggable) : null;
+			if(!r){
+				// convert null, undefined, false to false
+				res = false;
+			}else if ( $.isArray(r) ) {
+				res = {
+					over: ($.inArray("over", r) >= 0),
+					before: ($.inArray("before", r) >= 0),
+					after: ($.inArray("after", r) >= 0)
+				};
+			}else{
+				res = {
+					over: ((r === true) || (r === "over")),
+					before: ((r === true) || (r === "before")),
+					after: ((r === true) || (r === "after"))
+				};
+			}
+			ui.helper.data("enterResponse", res);
+//            this.logDebug("helper.enterResponse: %o", res);
+			break;
+		case "over":
+			enterResponse = ui.helper.data("enterResponse");
+			hitMode = null;
+			if(enterResponse === false){
+				// Don't call onDragOver if onEnter returned false.
+				// issue 332
+//              break;
+			} else if(typeof enterResponse === "string") {
+				// Use hitMode from onEnter if provided.
+				hitMode = enterResponse;
+			} else {
+				// Calculate hitMode from relative cursor position.
+				var nodeOfs = nodeTag.offset();
+				var relPos = { x: event.pageX - nodeOfs.left,
+							   y: event.pageY - nodeOfs.top };
+				var relPos2 = { x: relPos.x / nodeTag.width(),
+								y: relPos.y / nodeTag.height() };
+
+				if( enterResponse.after && relPos2.y > 0.75 ){
+					hitMode = "after";
+				} else if(!enterResponse.over && enterResponse.after && relPos2.y > 0.5 ){
+					hitMode = "after";
+				} else if(enterResponse.before && relPos2.y <= 0.25) {
+					hitMode = "before";
+				} else if(!enterResponse.over && enterResponse.before && relPos2.y <= 0.5) {
+					hitMode = "before";
+				} else if(enterResponse.over) {
+					hitMode = "over";
+				}
+				// Prevent no-ops like 'before source node'
+				// TODO: these are no-ops when moving nodes, but not in copy mode
+				if( dnd.preventVoidMoves ){
+					if(node === otherNode){
+						hitMode = null;
+					}else if(hitMode === "before" && otherNode && node === otherNode.getNextSibling()){
+						hitMode = null;
+					}else if(hitMode === "after" && otherNode && node === otherNode.getPrevSibling()){
+						hitMode = null;
+					}else if(hitMode === "over" && otherNode
+							&& otherNode.parent === node && otherNode.isLastSibling() ){
+						hitMode = null;
+					}
+				}
+//              this.logDebug("hitMode: %s - %s - %s", hitMode, (node.parent === otherNode), node.isLastSibling());
+				ui.helper.data("hitMode", hitMode);
+			}
+			// Auto-expand node (only when 'over' the node, not 'before', or 'after')
+			if(hitMode === "over"
+				&& dnd.autoExpandMS && node.hasChildren() !== false && !node.bExpanded) {
+				node.scheduleAction("expand", dnd.autoExpandMS);
+			}
+			if(hitMode && dnd.onDragOver){
+				res = dnd.onDragOver(node, otherNode, hitMode, ui, draggable);
+				if(res === "over" || res === "before" || res === "after") {
+					hitMode = res;
+				}
+			}
+			// issue 332
+//          this._setDndStatus(otherNode, node, ui.helper, hitMode, res!==false);
+			this._setDndStatus(otherNode, node, ui.helper, hitMode, res!==false && hitMode !== null);
+			break;
+		case "drop":
+			// issue 286: don't trigger onDrop, if DnD status is 'reject'
+			var isForbidden = ui.helper.hasClass("dynatree-drop-reject");
+			hitMode = ui.helper.data("hitMode");
+			if(hitMode && dnd.onDrop && !isForbidden){
+				dnd.onDrop(node, otherNode, hitMode, ui, draggable);
+			}
+			break;
+		case "leave":
+			// Cancel pending expand request
+			node.scheduleAction("cancel");
+			ui.helper.data("enterResponse", null);
+			ui.helper.data("hitMode", null);
+			this._setDndStatus(otherNode, node, ui.helper, "out", undefined);
+			if(dnd.onDragLeave){
+				dnd.onDragLeave(node, otherNode, ui, draggable);
+			}
+			break;
+		case "stop":
+			nodeTag.removeClass("dynatree-drag-source");
+			if(dnd.onDragStop){
+				dnd.onDragStop(node);
+			}
+			break;
+		default:
+			throw "Unsupported drag event: " + eventName;
+		}
+		return res;
+	},
+
+	cancelDrag: function() {
+		 var dd = $.ui.ddmanager.current;
+		 if(dd){
+			 dd.cancel();
+		 }
+	},
+
+	// --- end of class
+	lastentry: undefined
+};
+
+/*************************************************************************
+ * Widget $(..).dynatree
+ */
+
+$.widget("ui.dynatree", {
+/*
+	init: function() {
+		// ui.core 1.6 renamed init() to _init(): this stub assures backward compatibility
+		_log("warn", "ui.dynatree.init() was called; you should upgrade to jquery.ui.core.js v1.8 or higher.");
+		return this._init();
+	},
+ */
+	_init: function() {
+//      if( parseFloat($.ui.version) < 1.8 ) {
+		if(versionCompare($.ui.version, "1.8") < 0){
+			// jquery.ui.core 1.8 renamed _init() to _create(): this stub assures backward compatibility
+			if(this.options.debugLevel >= 0){
+				_log("warn", "ui.dynatree._init() was called; you should upgrade to jquery.ui.core.js v1.8 or higher.");
+			}
+			return this._create();
+		}
+		// jquery.ui.core 1.8 still uses _init() to perform "default functionality"
+		if(this.options.debugLevel >= 2){
+			_log("debug", "ui.dynatree._init() was called; no current default functionality.");
+		}
+	},
+
+	_create: function() {
+		var opts = this.options;
+		if(opts.debugLevel >= 1){
+			logMsg("Dynatree._create(): version='%s', debugLevel=%o.", $.ui.dynatree.version, this.options.debugLevel);
+		}
+		// The widget framework supplies this.element and this.options.
+		this.options.event += ".dynatree"; // namespace event
+
+		var divTree = this.element.get(0);
+/*      // Clear container, in case it contained some 'waiting' or 'error' text
+		// for clients that don't support JS
+		if( opts.children || (opts.initAjax && opts.initAjax.url) || opts.initId )
+			$(divTree).empty();
+*/
+		// Create the DynaTree object
+		this.tree = new DynaTree(this);
+		this.tree._load();
+		this.tree.logDebug("Dynatree._init(): done.");
+	},
+
+	bind: function() {
+		// Prevent duplicate binding
+		this.unbind();
+
+		var eventNames = "click.dynatree dblclick.dynatree";
+		if( this.options.keyboard ){
+			// Note: leading ' '!
+			eventNames += " keypress.dynatree keydown.dynatree";
+		}
+		this.element.bind(eventNames, function(event){
+			var dtnode = $.ui.dynatree.getNode(event.target);
+			if( !dtnode ){
+				return true;  // Allow bubbling of other events
+			}
+			var tree = dtnode.tree;
+			var o = tree.options;
+			tree.logDebug("event(%s): dtnode: %s", event.type, dtnode);
+			var prevPhase = tree.phase;
+			tree.phase = "userEvent";
+			try {
+				switch(event.type) {
+				case "click":
+					return ( o.onClick && o.onClick.call(tree, dtnode, event)===false ) ? false : dtnode._onClick(event);
+				case "dblclick":
+					return ( o.onDblClick && o.onDblClick.call(tree, dtnode, event)===false ) ? false : dtnode._onDblClick(event);
+				case "keydown":
+					return ( o.onKeydown && o.onKeydown.call(tree, dtnode, event)===false ) ? false : dtnode._onKeydown(event);
+				case "keypress":
+					return ( o.onKeypress && o.onKeypress.call(tree, dtnode, event)===false ) ? false : dtnode._onKeypress(event);
+				}
+			} catch(e) {
+				var _ = null; // issue 117
+				tree.logWarning("bind(%o): dtnode: %o, error: %o", event, dtnode, e);
+			} finally {
+				tree.phase = prevPhase;
+			}
+		});
+
+		// focus/blur don't bubble, i.e. are not delegated to parent <div> tags,
+		// so we use the addEventListener capturing phase.
+		// See http://www.howtocreate.co.uk/tutorials/javascript/domevents
+		function __focusHandler(event) {
+			// Handles blur and focus.
+			// Fix event for IE:
+			// doesn't pass JSLint:
+//          event = arguments[0] = $.event.fix( event || window.event );
+			// what jQuery does:
+//          var args = jQuery.makeArray( arguments );
+//          event = args[0] = jQuery.event.fix( event || window.event );
+			event = $.event.fix( event || window.event );
+			var dtnode = $.ui.dynatree.getNode(event.target);
+			return dtnode ? dtnode._onFocus(event) : false;
+		}
+		var div = this.tree.divTree;
+
+		if( div.addEventListener ) {
+			div.addEventListener("focus", __focusHandler, true);
+			div.addEventListener("blur", __focusHandler, true);
+		} else {
+			div.onfocusin = div.onfocusout = __focusHandler;
+		}
+		// EVENTS
+		// disable click if event is configured to something else
+//      if (!(/^click/).test(o.event))
+//          this.$tabs.bind("click.tabs", function() { return false; });
+
+	},
+
+	unbind: function() {
+		this.element.unbind(".dynatree");
+	},
+
+/* TODO: we could handle option changes during runtime here (maybe to re-render, ...)
+	setData: function(key, value) {
+		this.tree.logDebug("dynatree.setData('" + key + "', '" + value + "')");
+	},
+*/
+	enable: function() {
+		this.bind();
+		// Call default disable(): remove -disabled from css:
+		$.Widget.prototype.enable.apply(this, arguments);
+	},
+
+	disable: function() {
+		this.unbind();
+		// Call default disable(): add -disabled to css:
+		$.Widget.prototype.disable.apply(this, arguments);
+	},
+
+	// --- getter methods (i.e. NOT returning a reference to $)
+	getTree: function() {
+		return this.tree;
+	},
+
+	getRoot: function() {
+		return this.tree.getRoot();
+	},
+
+	getActiveNode: function() {
+		return this.tree.getActiveNode();
+	},
+
+	getSelectedNodes: function() {
+		return this.tree.getSelectedNodes();
+	},
+
+	// ------------------------------------------------------------------------
+	lastentry: undefined
+});
+
+
+// The following methods return a value (thus breaking the jQuery call chain):
+if(versionCompare($.ui.version, "1.8") < 0){
+	$.ui.dynatree.getter = "getTree getRoot getActiveNode getSelectedNodes";
+}
+
+/*******************************************************************************
+ * Tools in ui.dynatree namespace
+ */
+$.extend($.ui.dynatree, {
+	/** @type {String} */
+	version: "1.2.8",
+	/** @type {String} */
+	buildType: "release",
+	/** Expose class object as $.ui.dynatree._DynaTreeClass */
+	_DynaTreeClass: DynaTree,
+	/** Expose class object as $.ui.dynatree._DynaTreeNodeClass */
+	_DynaTreeNodeClass: DynaTreeNode,
+	/**
+	 * Return a DynaTreeNode object for a given DOM element
+	 */
+	getNode: function(el) {
+		if(el instanceof DynaTreeNode){
+			return el; // el already was a DynaTreeNode
+		}
+		if(el.selector !== undefined){
+			el = el[0]; // el was a jQuery object: use the DOM element
+		}
+		// TODO: for some reason $el.parents("[dtnode]") does not work (jQuery 1.6.1)
+		// maybe, because dtnode is a property, not an attribute
+		while( el ) {
+			if(el.dtnode) {
+				return el.dtnode;
+			}
+			el = el.parentNode;
+		}
+		return null;
+	},
+	/**Return persistence information from cookies.*/
+	getPersistData: DynaTreeStatus._getTreePersistData
+});
+
+
+
+/*******************************************************************************
+ * Plugin default options:
+ */
+$.ui.dynatree.prototype.options = {
+	title: "Dynatree", // Tree's name (only used for debug output)
+	minExpandLevel: 1, // 1: root node is not collapsible
+	imagePath: null, // Path to a folder containing icons. Defaults to 'skin/' subdirectory.
+	children: null, // Init tree structure from this object array.
+	initId: null, // Init tree structure from a <ul> element with this ID.
+	initAjax: null, // Ajax options used to initialize the tree strucuture.
+	autoFocus: true, // Set focus to first child, when expanding or lazy-loading.
+	keyboard: true, // Support keyboard navigation.
+	persist: false, // Persist expand-status to a cookie
+	autoCollapse: false, // Automatically collapse all siblings, when a node is expanded.
+	clickFolderMode: 3, // 1:activate, 2:expand, 3:activate and expand
+	activeVisible: true, // Make sure, active nodes are visible (expanded).
+	checkbox: false, // Show checkboxes.
+	selectMode: 2, // 1:single, 2:multi, 3:multi-hier
+	fx: null, // Animations, e.g. null or { height: "toggle", duration: 200 }
+	noLink: false, // Use <span> instead of <a> tags for all nodes
+	// Low level event handlers: onEvent(dtnode, event): return false, to stop default processing
+	onClick: null, // null: generate focus, expand, activate, select events.
+	onDblClick: null, // (No default actions.)
+	onKeydown: null, // null: generate keyboard navigation (focus, expand, activate).
+	onKeypress: null, // (No default actions.)
+	onFocus: null, // null: set focus to node.
+	onBlur: null, // null: remove focus from node.
+
+	// Pre-event handlers onQueryEvent(flag, dtnode): return false, to stop processing
+	onQueryActivate: null, // Callback(flag, dtnode) before a node is (de)activated.
+	onQuerySelect: null, // Callback(flag, dtnode) before a node is (de)selected.
+	onQueryExpand: null, // Callback(flag, dtnode) before a node is expanded/collpsed.
+
+	// High level event handlers
+	onPostInit: null, // Callback(isReloading, isError) when tree was (re)loaded.
+	onActivate: null, // Callback(dtnode) when a node is activated.
+	onDeactivate: null, // Callback(dtnode) when a node is deactivated.
+	onSelect: null, // Callback(flag, dtnode) when a node is (de)selected.
+	onExpand: null, // Callback(flag, dtnode) when a node is expanded/collapsed.
+	onLazyRead: null, // Callback(dtnode) when a lazy node is expanded for the first time.
+	onCustomRender: null, // Callback(dtnode) before a node is rendered. Return a HTML string to override.
+	onCreate: null, // Callback(dtnode, nodeSpan) after a node was rendered for the first time.
+	onRender: null, // Callback(dtnode, nodeSpan) after a node was rendered.
+				// postProcess is similar to the standard dataFilter hook,
+				// but it is also called for JSONP
+	postProcess: null, // Callback(data, dataType) before an Ajax result is passed to dynatree
+
+	// Drag'n'drop support
+	dnd: {
+		// Make tree nodes draggable:
+		onDragStart: null, // Callback(sourceNode), return true, to enable dnd
+		onDragStop: null, // Callback(sourceNode)
+//      helper: null,
+		revert: false, // true: slide helper back to source if drop is rejected
+		// Make tree nodes accept draggables
+		autoExpandMS: 1000, // Expand nodes after n milliseconds of hovering.
+		preventVoidMoves: true, // Prevent dropping nodes 'before self', etc.
+		onDragEnter: null, // Callback(targetNode, sourceNode, ui, draggable)
+		onDragOver: null, // Callback(targetNode, sourceNode, hitMode)
+		onDrop: null, // Callback(targetNode, sourceNode, hitMode, ui, draggable)
+		onDragLeave: null // Callback(targetNode, sourceNode)
+	},
+	ajaxDefaults: { // Used by initAjax option
+		cache: false, // false: Append random '_' argument to the request url to prevent caching.
+		timeout: 0, // >0: Make sure we get an ajax error for invalid URLs
+		dataType: "json" // Expect json format and pass json object to callbacks.
+	},
+	strings: {
+		loading: "Loading…",
+		loadError: "Load error!"
+	},
+	generateIds: false, // Generate id attributes like <span id='dynatree-id-KEY'>
+	idPrefix: "dynatree-id-", // Used to generate node id's like <span id="dynatree-id-<key>">.
+	keyPathSeparator: "/", // Used by node.getKeyPath() and tree.loadKeyPath().
+//    cookieId: "dynatree-cookie", // Choose a more unique name, to allow multiple trees.
+	cookieId: "dynatree", // Choose a more unique name, to allow multiple trees.
+	cookie: {
+		expires: null //7, // Days or Date; null: session cookie
+//      path: "/", // Defaults to current page
+//      domain: "jquery.com",
+//      secure: true
+	},
+	// Class names used, when rendering the HTML markup.
+	// Note:
+	// These settings only apply on initialisation.
+	// If only single entries are passed for options.classNames, all other
+	// values are still set to default.
+	classNames: {
+		container: "dynatree-container",
+		node: "dynatree-node",
+		folder: "dynatree-folder",
+//      document: "dynatree-document",
+
+		empty: "dynatree-empty",
+		vline: "dynatree-vline",
+		expander: "dynatree-expander",
+		connector: "dynatree-connector",
+		checkbox: "dynatree-checkbox",
+		radio: "dynatree-radio",
+		nodeIcon: "dynatree-icon",
+		title: "dynatree-title",
+		noConnector: "dynatree-no-connector",
+
+		nodeError: "dynatree-statusnode-error",
+		nodeWait: "dynatree-statusnode-wait",
+		hidden: "dynatree-hidden",
+		combinedExpanderPrefix: "dynatree-exp-",
+		combinedIconPrefix: "dynatree-ico-",
+		nodeLoading: "dynatree-loading",
+//      disabled: "dynatree-disabled",
+		hasChildren: "dynatree-has-children",
+		active: "dynatree-active",
+		selected: "dynatree-selected",
+		expanded: "dynatree-expanded",
+		lazy: "dynatree-lazy",
+		focused: "dynatree-focused",
+		partsel: "dynatree-partsel",
+		lastsib: "dynatree-lastsib"
+	},
+	debugLevel: 1, // 0:quiet, 1:normal, 2:debug
+
+	// ------------------------------------------------------------------------
+	lastentry: undefined
+};
+//
+if(versionCompare($.ui.version, "1.8") < 0){
+	$.ui.dynatree.defaults = $.ui.dynatree.prototype.options;
+}
+
+/*******************************************************************************
+ * Reserved data attributes for a tree node.
+ */
+$.ui.dynatree.nodedatadefaults = {
+	title: null, // (required) Displayed name of the node (html is allowed here)
+	key: null, // May be used with activate(), select(), find(), ...
+	isFolder: false, // Use a folder icon. Also the node is expandable but not selectable.
+	isLazy: false, // Call onLazyRead(), when the node is expanded for the first time to allow for delayed creation of children.
+	tooltip: null, // Show this popup text.
+	href: null, // Added to the generated <a> tag.
+	icon: null, // Use a custom image (filename relative to tree.options.imagePath). 'null' for default icon, 'false' for no icon.
+	addClass: null, // Class name added to the node's span tag.
+	noLink: false, // Use <span> instead of <a> tag for this node
+	activate: false, // Initial active status.
+	focus: false, // Initial focused status.
+	expand: false, // Initial expanded status.
+	select: false, // Initial selected status.
+	hideCheckbox: false, // Suppress checkbox display for this node.
+	unselectable: false, // Prevent selection.
+//  disabled: false,
+	// The following attributes are only valid if passed to some functions:
+	children: null, // Array of child nodes.
+	// NOTE: we can also add custom attributes here.
+	// This may then also be used in the onActivate(), onSelect() or onLazyTree() callbacks.
+	// ------------------------------------------------------------------------
+	lastentry: undefined
+};
+
+/*******************************************************************************
+ * Drag and drop support
+ */
+function _initDragAndDrop(tree) {
+	var dnd = tree.options.dnd || null;
+	// Register 'connectToDynatree' option with ui.draggable
+	if(dnd && (dnd.onDragStart || dnd.onDrop)) {
+		_registerDnd();
+	}
+	// Attach ui.draggable to this Dynatree instance
+	if(dnd && dnd.onDragStart ) {
+		tree.$tree.draggable({
+			addClasses: false,
+			appendTo: "body",
+			containment: false,
+			delay: 0,
+			distance: 4,
+//            revert: false,
+			// slide back, when dropping over non-target
+			revert: dnd.revert !== true ? false : function(dropped){
+				// This is called by ui-draggable._mouseStop() when a drag stops.
+				// Return `true` to let the helper slide back.
+				logMsg("draggable.revert(), dropped=", dropped);
+				if(typeof dropped === "boolean"){
+					// dropped == true, when dropped over a simple, valid droppable target.
+					// false, when dropped outside a drop target.
+					return !dropped;
+				}
+				// Drop comes from another tree. Default behavior is to assume
+				// a valid drop, since we are over a drop-target.
+				// Therefore we have to make an extra check, if the target node
+				// was rejected by a Dynatree callback.
+				var helper = $.ui.ddmanager && $.ui.ddmanager.current && $.ui.ddmanager.current.helper;
+				var isRejected = helper && helper.hasClass("dynatree-drop-reject");
+				return isRejected;
+				},
+			scroll: true, // issue 244: enable scrolling (if ul.dynatree-container)
+			scrollSpeed: 7,
+			scrollSensitivity: 10,
+			// Delegate draggable.start, drag, and stop events to our handler
+			connectToDynatree: true,
+			// Let source tree create the helper element
+			helper: function(event) {
+				var sourceNode = $.ui.dynatree.getNode(event.target);
+				if(!sourceNode){ // issue 211
+					return "<div></div>";
+				}
+				return sourceNode.tree._onDragEvent("helper", sourceNode, null, event, null, null);
+			},
+			start: function(event, ui) {
+				// See issues 211, 268, 278
+//              var sourceNode = $.ui.dynatree.getNode(event.target);
+				var sourceNode = ui.helper.data("dtSourceNode");
+				return !!sourceNode; // Abort dragging if no Node could be found
+			}
+		});
+	}
+	// Attach ui.droppable to this Dynatree instance
+	if(dnd && dnd.onDrop) {
+		tree.$tree.droppable({
+			addClasses: false,
+			tolerance: "pointer",
+//            tolerance: "intersect",
+			greedy: false
+		});
+	}
+}
+
+//--- Extend ui.draggable event handling --------------------------------------
+var didRegisterDnd = false;
+var _registerDnd = function() {
+	if(didRegisterDnd){
+		return;
+	}
+	// Register proxy-functions for draggable.start/drag/stop
+	$.ui.plugin.add("draggable", "connectToDynatree", {
+		start: function(event, ui) {
+			// issue 386
+			var draggable = $(this).data("ui-draggable") || $(this).data("draggable"),
+				sourceNode = ui.helper.data("dtSourceNode") || null;
+//          logMsg("draggable-connectToDynatree.start, %s", sourceNode);
+//          logMsg("    this: %o", this);
+//          logMsg("    event: %o", event);
+//          logMsg("    draggable: %o", draggable);
+//          logMsg("    ui: %o", ui);
+
+			if(sourceNode) {
+				// Adjust helper offset, so cursor is slightly outside top/left corner
+//              draggable.offset.click.top -= event.target.offsetTop;
+//              draggable.offset.click.left -= event.target.offsetLeft;
+				draggable.offset.click.top = -2;
+				draggable.offset.click.left = + 16;
+//              logMsg("    draggable2: %o", draggable);
+//              logMsg("    draggable.offset.click FIXED: %s/%s", draggable.offset.click.left, draggable.offset.click.top);
+				// Trigger onDragStart event
+				// TODO: when called as connectTo..., the return value is ignored(?)
+				return sourceNode.tree._onDragEvent("start", sourceNode, null, event, ui, draggable);
+			}
+		},
+		drag: function(event, ui) {
+			// issue 386
+			var draggable = $(this).data("ui-draggable") || $(this).data("draggable"),
+				sourceNode = ui.helper.data("dtSourceNode") || null,
+				prevTargetNode = ui.helper.data("dtTargetNode") || null,
+				targetNode = $.ui.dynatree.getNode(event.target);
+//          logMsg("$.ui.dynatree.getNode(%o): %s", event.target, targetNode);
+//          logMsg("connectToDynatree.drag: helper: %o", ui.helper[0]);
+			if(event.target && !targetNode){
+				// We got a drag event, but the targetNode could not be found
+				// at the event location. This may happen,
+				// 1. if the mouse jumped over the drag helper,
+				// 2. or if non-dynatree element is dragged
+				// We ignore it:
+				var isHelper = $(event.target).closest("div.dynatree-drag-helper,#dynatree-drop-marker").length > 0;
+				if(isHelper){
+//                  logMsg("Drag event over helper: ignored.");
+					return;
+				}
+			}
+//          logMsg("draggable-connectToDynatree.drag: targetNode(from event): %s, dtTargetNode: %s", targetNode, ui.helper.data("dtTargetNode"));
+			ui.helper.data("dtTargetNode", targetNode);
+			// Leaving a tree node
+			if(prevTargetNode && prevTargetNode !== targetNode ) {
+				prevTargetNode.tree._onDragEvent("leave", prevTargetNode, sourceNode, event, ui, draggable);
+			}
+			if(targetNode){
+				if(!targetNode.tree.options.dnd.onDrop) {
+					// not enabled as drop target
+				} else if(targetNode === prevTargetNode) {
+					// Moving over same node
+					targetNode.tree._onDragEvent("over", targetNode, sourceNode, event, ui, draggable);
+				}else{
+					// Entering this node first time
+					targetNode.tree._onDragEvent("enter", targetNode, sourceNode, event, ui, draggable);
+				}
+			}
+			// else go ahead with standard event handling
+		},
+		stop: function(event, ui) {
+			// issue 386
+			var draggable = $(this).data("ui-draggable") || $(this).data("draggable"),
+				sourceNode = ui.helper.data("dtSourceNode") || null,
+				targetNode = ui.helper.data("dtTargetNode") || null,
+//              mouseDownEvent = draggable._mouseDownEvent,
+				eventType = event.type,
+				dropped = (eventType == "mouseup" && event.which == 1);
+			logMsg("draggable-connectToDynatree.stop: targetNode(from event): %s, dtTargetNode: %s", targetNode, ui.helper.data("dtTargetNode"));
+//          logMsg("draggable-connectToDynatree.stop, %s", sourceNode);
+//          logMsg("    type: %o, downEvent: %o, upEvent: %o", eventType, mouseDownEvent, event);
+//          logMsg("    targetNode: %o", targetNode);
+			if(!dropped){
+				logMsg("Drag was cancelled");
+			}
+			if(targetNode) {
+				if(dropped){
+					targetNode.tree._onDragEvent("drop", targetNode, sourceNode, event, ui, draggable);
+				}
+				targetNode.tree._onDragEvent("leave", targetNode, sourceNode, event, ui, draggable);
+			}
+			if(sourceNode){
+				sourceNode.tree._onDragEvent("stop", sourceNode, null, event, ui, draggable);
+			}
+		}
+	});
+	didRegisterDnd = true;
+};
+
+// ---------------------------------------------------------------------------
+}(jQuery));
diff --git a/client/galaxy/scripts/libs/jquery/jquery.event.drag.js b/client/galaxy/scripts/libs/jquery/jquery.event.drag.js
new file mode 100644
index 0000000..8648e94
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.event.drag.js
@@ -0,0 +1,405 @@
+/*! 
+ * jquery.event.drag - v 2.2
+ * Copyright (c) 2010 Three Dub Media - http://threedubmedia.com
+ * Open Source MIT License - http://threedubmedia.com/code/license
+ */
+// Created: 2008-06-04 
+// Updated: 2012-05-21
+// REQUIRES: jquery 1.7.x
+
+;(function( $ ){
+
+// add the jquery instance method
+$.fn.drag = function( str, arg, opts ){
+	// figure out the event type
+	var type = typeof str == "string" ? str : "",
+	// figure out the event handler...
+	fn = $.isFunction( str ) ? str : $.isFunction( arg ) ? arg : null;
+	// fix the event type
+	if ( type.indexOf("drag") !== 0 ) 
+		type = "drag"+ type;
+	// were options passed
+	opts = ( str == fn ? arg : opts ) || {};
+	// trigger or bind event handler
+	return fn ? this.bind( type, opts, fn ) : this.trigger( type );
+};
+
+// local refs (increase compression)
+var $event = $.event, 
+$special = $event.special,
+// configure the drag special event 
+drag = $special.drag = {
+	
+	// these are the default settings
+	defaults: {
+		which: 1, // mouse button pressed to start drag sequence
+		distance: 0, // distance dragged before dragstart
+		not: ':input', // selector to suppress dragging on target elements
+		handle: null, // selector to match handle target elements
+		relative: false, // true to use "position", false to use "offset"
+		drop: true, // false to suppress drop events, true or selector to allow
+		click: false // false to suppress click events after dragend (no proxy)
+	},
+	
+	// the key name for stored drag data
+	datakey: "dragdata",
+	
+	// prevent bubbling for better performance
+	noBubble: true,
+	
+	// count bound related events
+	add: function( obj ){ 
+		// read the interaction data
+		var data = $.data( this, drag.datakey ),
+		// read any passed options 
+		opts = obj.data || {};
+		// count another realted event
+		data.related += 1;
+		// extend data options bound with this event
+		// don't iterate "opts" in case it is a node 
+		$.each( drag.defaults, function( key, def ){
+			if ( opts[ key ] !== undefined )
+				data[ key ] = opts[ key ];
+		});
+	},
+	
+	// forget unbound related events
+	remove: function(){
+		$.data( this, drag.datakey ).related -= 1;
+	},
+	
+	// configure interaction, capture settings
+	setup: function(){
+		// check for related events
+		if ( $.data( this, drag.datakey ) ) 
+			return;
+		// initialize the drag data with copied defaults
+		var data = $.extend({ related:0 }, drag.defaults );
+		// store the interaction data
+		$.data( this, drag.datakey, data );
+		// bind the mousedown event, which starts drag interactions
+		$event.add( this, "touchstart mousedown", drag.init, data );
+		// prevent image dragging in IE...
+		if ( this.attachEvent ) 
+			this.attachEvent("ondragstart", drag.dontstart ); 
+	},
+	
+	// destroy configured interaction
+	teardown: function(){
+		var data = $.data( this, drag.datakey ) || {};
+		// check for related events
+		if ( data.related ) 
+			return;
+		// remove the stored data
+		$.removeData( this, drag.datakey );
+		// remove the mousedown event
+		$event.remove( this, "touchstart mousedown", drag.init );
+		// enable text selection
+		drag.textselect( true ); 
+		// un-prevent image dragging in IE...
+		if ( this.detachEvent ) 
+			this.detachEvent("ondragstart", drag.dontstart ); 
+	},
+		
+	// initialize the interaction
+	init: function( event ){ 
+		// sorry, only one touch at a time
+		if ( drag.touched ) 
+			return;
+		// the drag/drop interaction data
+		var dd = event.data, results;
+		// check the which directive
+		if ( event.which != 0 && dd.which > 0 && event.which != dd.which ) 
+			return; 
+		// check for suppressed selector
+		if ( $( event.target ).is( dd.not ) ) 
+			return;
+		// check for handle selector
+		if ( dd.handle && !$( event.target ).closest( dd.handle, event.currentTarget ).length ) 
+			return;
+
+		drag.touched = event.type == 'touchstart' ? this : null;
+		dd.propagates = 1;
+		dd.mousedown = this;
+		dd.interactions = [ drag.interaction( this, dd ) ];
+		dd.target = event.target;
+		dd.pageX = event.pageX;
+		dd.pageY = event.pageY;
+		dd.dragging = null;
+		// handle draginit event... 
+		results = drag.hijack( event, "draginit", dd );
+		// early cancel
+		if ( !dd.propagates )
+			return;
+		// flatten the result set
+		results = drag.flatten( results );
+		// insert new interaction elements
+		if ( results && results.length ){
+			dd.interactions = [];
+			$.each( results, function(){
+				dd.interactions.push( drag.interaction( this, dd ) );
+			});
+		}
+		// remember how many interactions are propagating
+		dd.propagates = dd.interactions.length;
+		// locate and init the drop targets
+		if ( dd.drop !== false && $special.drop ) 
+			$special.drop.handler( event, dd );
+		// disable text selection
+		drag.textselect( false ); 
+		// bind additional events...
+		if ( drag.touched )
+			$event.add( drag.touched, "touchmove touchend", drag.handler, dd );
+		else 
+			$event.add( document, "mousemove mouseup", drag.handler, dd );
+		// helps prevent text selection or scrolling
+		if ( !drag.touched || dd.live )
+			return false;
+	},	
+	
+	// returns an interaction object
+	interaction: function( elem, dd ){
+		var offset = $( elem )[ dd.relative ? "position" : "offset" ]() || { top:0, left:0 };
+		return {
+			drag: elem, 
+			callback: new drag.callback(), 
+			droppable: [],
+			offset: offset
+		};
+	},
+	
+	// handle drag-releatd DOM events
+	handler: function( event ){ 
+		// read the data before hijacking anything
+		var dd = event.data;	
+		// handle various events
+		switch ( event.type ){
+			// mousemove, check distance, start dragging
+			case !dd.dragging && 'touchmove': 
+				event.preventDefault();
+			case !dd.dragging && 'mousemove':
+				//  drag tolerance, x� + y� = distance�
+				if ( Math.pow(  event.pageX-dd.pageX, 2 ) + Math.pow(  event.pageY-dd.pageY, 2 ) < Math.pow( dd.distance, 2 ) ) 
+					break; // distance tolerance not reached
+				event.target = dd.target; // force target from "mousedown" event (fix distance issue)
+				drag.hijack( event, "dragstart", dd ); // trigger "dragstart"
+				if ( dd.propagates ) // "dragstart" not rejected
+					dd.dragging = true; // activate interaction
+			// mousemove, dragging
+			case 'touchmove':
+				event.preventDefault();
+			case 'mousemove':
+				if ( dd.dragging ){
+					// trigger "drag"		
+					drag.hijack( event, "drag", dd );
+					if ( dd.propagates ){
+						// manage drop events
+						if ( dd.drop !== false && $special.drop )
+							$special.drop.handler( event, dd ); // "dropstart", "dropend"							
+						break; // "drag" not rejected, stop		
+					}
+					event.type = "mouseup"; // helps "drop" handler behave
+				}
+			// mouseup, stop dragging
+			case 'touchend': 
+			case 'mouseup': 
+			default:
+				if ( drag.touched )
+					$event.remove( drag.touched, "touchmove touchend", drag.handler ); // remove touch events
+				else 
+					$event.remove( document, "mousemove mouseup", drag.handler ); // remove page events	
+				if ( dd.dragging ){
+					if ( dd.drop !== false && $special.drop )
+						$special.drop.handler( event, dd ); // "drop"
+					drag.hijack( event, "dragend", dd ); // trigger "dragend"	
+				}
+				else {
+					drag.hijack( event, "dragclickonly", dd ); // trigger "dragclickonly"	
+				}
+				drag.textselect( true ); // enable text selection
+				// if suppressing click events...
+				if ( dd.click === false && dd.dragging )
+					$.data( dd.mousedown, "suppress.click", new Date().getTime() + 5 );
+				dd.dragging = drag.touched = false; // deactivate element	
+				break;
+		}
+	},
+		
+	// re-use event object for custom events
+	hijack: function( event, type, dd, x, elem ){
+		// not configured
+		if ( !dd ) 
+			return;
+		// remember the original event and type
+		var orig = { event:event.originalEvent, type:event.type },
+		// is the event drag related or drog related?
+		mode = type.indexOf("drop") ? "drag" : "drop",
+		// iteration vars
+		result, i = x || 0, ia, $elems, callback,
+		len = !isNaN( x ) ? x : dd.interactions.length;
+		// modify the event type
+		event.type = type;
+		// remove the original event
+		event.originalEvent = null;
+		// initialize the results
+		dd.results = [];
+		// handle each interacted element
+		do if ( ia = dd.interactions[ i ] ){
+			// validate the interaction
+			if ( type !== "dragend" && ia.cancelled )
+				continue;
+			// set the dragdrop properties on the event object
+			callback = drag.properties( event, dd, ia );
+			// prepare for more results
+			ia.results = [];
+			// handle each element
+			$( elem || ia[ mode ] || dd.droppable ).each(function( p, subject ){
+				// identify drag or drop targets individually
+				callback.target = subject;
+				// force propagtion of the custom event
+				event.isPropagationStopped = function(){ return false; };
+				// handle the event	
+				result = subject ? $event.dispatch.call( subject, event, callback ) : null;
+				// stop the drag interaction for this element
+				if ( result === false ){
+					if ( mode == "drag" ){
+						ia.cancelled = true;
+						dd.propagates -= 1;
+					}
+					if ( type == "drop" ){
+						ia[ mode ][p] = null;
+					}
+				}
+				// assign any dropinit elements
+				else if ( type == "dropinit" )
+					ia.droppable.push( drag.element( result ) || subject );
+				// accept a returned proxy element 
+				if ( type == "dragstart" )
+					ia.proxy = $( drag.element( result ) || ia.drag )[0];
+				// remember this result	
+				ia.results.push( result );
+				// forget the event result, for recycling
+				delete event.result;
+				// break on cancelled handler
+				if ( type !== "dropinit" )
+					return result;
+			});	
+			// flatten the results	
+			dd.results[ i ] = drag.flatten( ia.results );	
+			// accept a set of valid drop targets
+			if ( type == "dropinit" )
+				ia.droppable = drag.flatten( ia.droppable );
+			// locate drop targets
+			if ( type == "dragstart" && !ia.cancelled )
+				callback.update(); 
+		}
+		while ( ++i < len )
+		// restore the original event & type
+		event.type = orig.type;
+		event.originalEvent = orig.event;
+		// return all handler results
+		return drag.flatten( dd.results );
+	},
+		
+	// extend the callback object with drag/drop properties...
+	properties: function( event, dd, ia ){		
+		var obj = ia.callback;
+		// elements
+		obj.drag = ia.drag;
+		obj.proxy = ia.proxy || ia.drag;
+		// starting mouse position
+		obj.startX = dd.pageX;
+		obj.startY = dd.pageY;
+		// current distance dragged
+		obj.deltaX = event.pageX - dd.pageX;
+		obj.deltaY = event.pageY - dd.pageY;
+		// original element position
+		obj.originalX = ia.offset.left;
+		obj.originalY = ia.offset.top;
+		// adjusted element position
+		obj.offsetX = obj.originalX + obj.deltaX; 
+		obj.offsetY = obj.originalY + obj.deltaY;
+		// assign the drop targets information
+		obj.drop = drag.flatten( ( ia.drop || [] ).slice() );
+		obj.available = drag.flatten( ( ia.droppable || [] ).slice() );
+		return obj;	
+	},
+	
+	// determine is the argument is an element or jquery instance
+	element: function( arg ){
+		if ( arg && ( arg.jquery || arg.nodeType == 1 ) )
+			return arg;
+	},
+	
+	// flatten nested jquery objects and arrays into a single dimension array
+	flatten: function( arr ){
+		return $.map( arr, function( member ){
+			return member && member.jquery ? $.makeArray( member ) : 
+				member && member.length ? drag.flatten( member ) : member;
+		});
+	},
+	
+	// toggles text selection attributes ON (true) or OFF (false)
+	textselect: function( bool ){ 
+		$( document )[ bool ? "unbind" : "bind" ]("selectstart", drag.dontstart )
+			.css("MozUserSelect", bool ? "" : "none" );
+		// .attr("unselectable", bool ? "off" : "on" )
+		document.unselectable = bool ? "off" : "on"; 
+	},
+	
+	// suppress "selectstart" and "ondragstart" events
+	dontstart: function(){ 
+		return false; 
+	},
+	
+	// a callback instance contructor
+	callback: function(){}
+	
+};
+
+// callback methods
+drag.callback.prototype = {
+	update: function(){
+		if ( $special.drop && this.available.length )
+			$.each( this.available, function( i ){
+				$special.drop.locate( this, i );
+			});
+	}
+};
+
+// patch $.event.$dispatch to allow suppressing clicks
+var $dispatch = $event.dispatch;
+$event.dispatch = function( event ){
+	if ( $.data( this, "suppress."+ event.type ) - new Date().getTime() > 0 ){
+		$.removeData( this, "suppress."+ event.type );
+		return;
+	}
+	return $dispatch.apply( this, arguments );
+};
+
+// event fix hooks for touch events...
+var touchHooks = 
+$event.fixHooks.touchstart = 
+$event.fixHooks.touchmove = 
+$event.fixHooks.touchend =
+$event.fixHooks.touchcancel = {
+	props: "clientX clientY pageX pageY screenX screenY".split( " " ),
+	filter: function( event, orig ) {
+		if ( orig ){
+			var touched = ( orig.touches && orig.touches[0] )
+				|| ( orig.changedTouches && orig.changedTouches[0] )
+				|| null; 
+			// iOS webkit: touchstart, touchmove, touchend
+			if ( touched ) 
+				$.each( touchHooks.props, function( i, prop ){
+					event[ prop ] = touched[ prop ];
+				});
+		}
+		return event;
+	}
+};
+
+// share the same special event configuration with related events...
+$special.draginit = $special.dragstart = $special.dragend = drag;
+
+})( jQuery );
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jquery.event.drop.js b/client/galaxy/scripts/libs/jquery/jquery.event.drop.js
new file mode 100644
index 0000000..7599ef9
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.event.drop.js
@@ -0,0 +1,302 @@
+/*! 
+ * jquery.event.drop - v 2.2
+ * Copyright (c) 2010 Three Dub Media - http://threedubmedia.com
+ * Open Source MIT License - http://threedubmedia.com/code/license
+ */
+// Created: 2008-06-04 
+// Updated: 2012-05-21
+// REQUIRES: jquery 1.7.x, event.drag 2.2
+
+;(function($){ // secure $ jQuery alias
+
+// Events: drop, dropstart, dropend
+
+// add the jquery instance method
+$.fn.drop = function( str, arg, opts ){
+	// figure out the event type
+	var type = typeof str == "string" ? str : "",
+	// figure out the event handler...
+	fn = $.isFunction( str ) ? str : $.isFunction( arg ) ? arg : null;
+	// fix the event type
+	if ( type.indexOf("drop") !== 0 ) 
+		type = "drop"+ type;
+	// were options passed
+	opts = ( str == fn ? arg : opts ) || {};
+	// trigger or bind event handler
+	return fn ? this.bind( type, opts, fn ) : this.trigger( type );
+};
+
+// DROP MANAGEMENT UTILITY
+// returns filtered drop target elements, caches their positions
+$.drop = function( opts ){ 
+	opts = opts || {};
+	// safely set new options...
+	drop.multi = opts.multi === true ? Infinity : 
+		opts.multi === false ? 1 : !isNaN( opts.multi ) ? opts.multi : drop.multi;
+	drop.delay = opts.delay || drop.delay;
+	drop.tolerance = $.isFunction( opts.tolerance ) ? opts.tolerance : 
+		opts.tolerance === null ? null : drop.tolerance;
+	drop.mode = opts.mode || drop.mode || 'intersect';
+};
+
+// local refs (increase compression)
+var $event = $.event, 
+$special = $event.special,
+// configure the drop special event
+drop = $.event.special.drop = {
+
+	// these are the default settings
+	multi: 1, // allow multiple drop winners per dragged element
+	delay: 20, // async timeout delay
+	mode: 'overlap', // drop tolerance mode
+		
+	// internal cache
+	targets: [], 
+	
+	// the key name for stored drop data
+	datakey: "dropdata",
+		
+	// prevent bubbling for better performance
+	noBubble: true,
+	
+	// count bound related events
+	add: function( obj ){ 
+		// read the interaction data
+		var data = $.data( this, drop.datakey );
+		// count another realted event
+		data.related += 1;
+	},
+	
+	// forget unbound related events
+	remove: function(){
+		$.data( this, drop.datakey ).related -= 1;
+	},
+	
+	// configure the interactions
+	setup: function(){
+		// check for related events
+		if ( $.data( this, drop.datakey ) ) 
+			return;
+		// initialize the drop element data
+		var data = { 
+			related: 0,
+			active: [],
+			anyactive: 0,
+			winner: 0,
+			location: {}
+		};
+		// store the drop data on the element
+		$.data( this, drop.datakey, data );
+		// store the drop target in internal cache
+		drop.targets.push( this );
+	},
+	
+	// destroy the configure interaction	
+	teardown: function(){ 
+		var data = $.data( this, drop.datakey ) || {};
+		// check for related events
+		if ( data.related ) 
+			return;
+		// remove the stored data
+		$.removeData( this, drop.datakey );
+		// reference the targeted element
+		var element = this;
+		// remove from the internal cache
+		drop.targets = $.grep( drop.targets, function( target ){ 
+			return ( target !== element ); 
+		});
+	},
+	
+	// shared event handler
+	handler: function( event, dd ){ 
+		// local vars
+		var results, $targets;
+		// make sure the right data is available
+		if ( !dd ) 
+			return;
+		// handle various events
+		switch ( event.type ){
+			// draginit, from $.event.special.drag
+			case 'mousedown': // DROPINIT >>
+			case 'touchstart': // DROPINIT >>
+				// collect and assign the drop targets
+				$targets =  $( drop.targets );
+				if ( typeof dd.drop == "string" )
+					$targets = $targets.filter( dd.drop );
+				// reset drop data winner properties
+				$targets.each(function(){
+					var data = $.data( this, drop.datakey );
+					data.active = [];
+					data.anyactive = 0;
+					data.winner = 0;
+				});
+				// set available target elements
+				dd.droppable = $targets;
+				// activate drop targets for the initial element being dragged
+				$special.drag.hijack( event, "dropinit", dd ); 
+				break;
+			// drag, from $.event.special.drag
+			case 'mousemove': // TOLERATE >>
+			case 'touchmove': // TOLERATE >>
+				drop.event = event; // store the mousemove event
+				if ( !drop.timer )
+					// monitor drop targets
+					drop.tolerate( dd ); 
+				break;
+			// dragend, from $.event.special.drag
+			case 'mouseup': // DROP >> DROPEND >>
+			case 'touchend': // DROP >> DROPEND >>
+				drop.timer = clearTimeout( drop.timer ); // delete timer	
+				if ( dd.propagates ){
+					$special.drag.hijack( event, "drop", dd ); 
+					$special.drag.hijack( event, "dropend", dd ); 
+				}
+				break;
+				
+		}
+	},
+		
+	// returns the location positions of an element
+	locate: function( elem, index ){ 
+		var data = $.data( elem, drop.datakey ),
+		$elem = $( elem ), 
+		posi = $elem.offset() || {}, 
+		height = $elem.outerHeight(), 
+		width = $elem.outerWidth(),
+		location = { 
+			elem: elem, 
+			width: width, 
+			height: height,
+			top: posi.top, 
+			left: posi.left, 
+			right: posi.left + width, 
+			bottom: posi.top + height
+		};
+		// drag elements might not have dropdata
+		if ( data ){
+			data.location = location;
+			data.index = index;
+			data.elem = elem;
+		}
+		return location;
+	},
+	
+	// test the location positions of an element against another OR an X,Y coord
+	contains: function( target, test ){ // target { location } contains test [x,y] or { location }
+		return ( ( test[0] || test.left ) >= target.left && ( test[0] || test.right ) <= target.right
+			&& ( test[1] || test.top ) >= target.top && ( test[1] || test.bottom ) <= target.bottom ); 
+	},
+	
+	// stored tolerance modes
+	modes: { // fn scope: "$.event.special.drop" object 
+		// target with mouse wins, else target with most overlap wins
+		'intersect': function( event, proxy, target ){
+			return this.contains( target, [ event.pageX, event.pageY ] ) ? // check cursor
+				1e9 : this.modes.overlap.apply( this, arguments ); // check overlap
+		},
+		// target with most overlap wins	
+		'overlap': function( event, proxy, target ){
+			// calculate the area of overlap...
+			return Math.max( 0, Math.min( target.bottom, proxy.bottom ) - Math.max( target.top, proxy.top ) )
+				* Math.max( 0, Math.min( target.right, proxy.right ) - Math.max( target.left, proxy.left ) );
+		},
+		// proxy is completely contained within target bounds	
+		'fit': function( event, proxy, target ){
+			return this.contains( target, proxy ) ? 1 : 0;
+		},
+		// center of the proxy is contained within target bounds	
+		'middle': function( event, proxy, target ){
+			return this.contains( target, [ proxy.left + proxy.width * .5, proxy.top + proxy.height * .5 ] ) ? 1 : 0;
+		}
+	},	
+	
+	// sort drop target cache by by winner (dsc), then index (asc)
+	sort: function( a, b ){
+		return ( b.winner - a.winner ) || ( a.index - b.index );
+	},
+		
+	// async, recursive tolerance execution
+	tolerate: function( dd ){		
+		// declare local refs
+		var i, drp, drg, data, arr, len, elem,
+		// interaction iteration variables
+		x = 0, ia, end = dd.interactions.length,
+		// determine the mouse coords
+		xy = [ drop.event.pageX, drop.event.pageY ],
+		// custom or stored tolerance fn
+		tolerance = drop.tolerance || drop.modes[ drop.mode ];
+		// go through each passed interaction...
+		do if ( ia = dd.interactions[x] ){
+			// check valid interaction
+			if ( !ia )
+				return; 
+			// initialize or clear the drop data
+			ia.drop = [];
+			// holds the drop elements
+			arr = []; 
+			len = ia.droppable.length;
+			// determine the proxy location, if needed
+			if ( tolerance )
+				drg = drop.locate( ia.proxy ); 
+			// reset the loop
+			i = 0;
+			// loop each stored drop target
+			do if ( elem = ia.droppable[i] ){ 
+				data = $.data( elem, drop.datakey );
+				drp = data.location;
+				if ( !drp ) continue;
+				// find a winner: tolerance function is defined, call it
+				data.winner = tolerance ? tolerance.call( drop, drop.event, drg, drp ) 
+					// mouse position is always the fallback
+					: drop.contains( drp, xy ) ? 1 : 0; 
+				arr.push( data );	
+			} while ( ++i < len ); // loop 
+			// sort the drop targets
+			arr.sort( drop.sort );			
+			// reset the loop
+			i = 0;
+			// loop through all of the targets again
+			do if ( data = arr[ i ] ){
+				// winners...
+				if ( data.winner && ia.drop.length < drop.multi ){
+					// new winner... dropstart
+					if ( !data.active[x] && !data.anyactive ){
+						// check to make sure that this is not prevented
+						if ( $special.drag.hijack( drop.event, "dropstart", dd, x, data.elem )[0] !== false ){ 	
+							data.active[x] = 1;
+							data.anyactive += 1;
+						}
+						// if false, it is not a winner
+						else
+							data.winner = 0;
+					}
+					// if it is still a winner
+					if ( data.winner )
+						ia.drop.push( data.elem );
+				}
+				// losers... 
+				else if ( data.active[x] && data.anyactive == 1 ){
+					// former winner... dropend
+					$special.drag.hijack( drop.event, "dropend", dd, x, data.elem ); 
+					data.active[x] = 0;
+					data.anyactive -= 1;
+				}
+			} while ( ++i < len ); // loop 		
+		} while ( ++x < end ) // loop
+		// check if the mouse is still moving or is idle
+		if ( drop.last && xy[0] == drop.last.pageX && xy[1] == drop.last.pageY ) 
+			delete drop.timer; // idle, don't recurse
+		else  // recurse
+			drop.timer = setTimeout(function(){ 
+				drop.tolerate( dd ); 
+			}, drop.delay );
+		// remember event, to compare idleness
+		drop.last = drop.event; 
+	}
+	
+};
+
+// share the same special event configuration with related events...
+$special.dropinit = $special.dropstart = $special.dropend = drop;
+
+})(jQuery); // confine scope	
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jquery.event.hover.js b/client/galaxy/scripts/libs/jquery/jquery.event.hover.js
new file mode 100644
index 0000000..c607990
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.event.hover.js
@@ -0,0 +1,84 @@
+;(function($){ // secure $ jQuery alias
+/*******************************************************************************************/	
+// jquery.event.hover.js 
+// Copyright (c) 2008, Three Dub Media (http://threedubmedia.com)
+// Licensed under the MIT License (MIT-LICENSE.txt)
+// http://www.opensource.org/licenses/mit-license.php
+//
+// JQuery 1.9+ compatible version
+//
+// Optional settings :
+// $.event.special.hover.delay = 100;
+// Defines the delay (msec) while mouse is inside the element before checking the speed
+// $.event.special.hover.speed = 100;
+// Defines the maximum speed (px/sec) the mouse may be moving to trigger the hover event
+/*******************************************************************************************/
+
+// save the old jquery "hover" method
+$.fn._hover = $.fn.hover;
+
+// jquery method 
+$.fn.hover = function( fn1, fn2, fn3 ) {
+	if ( fn3 ) this.bind('hoverstart', fn1 ); // 3 args
+	if ( fn2 ) this.bind('hoverend', fn3 ? fn3 : fn2 ); // 2+ args
+	return !fn1 ? this.trigger('hover') // 0 args 
+		: this.bind('hover', fn3 ? fn2 : fn1 ); // 1+ args
+	};	
+
+// special event configuration
+var hover = $.event.special.hover = {
+	delay: 100, // milliseconds
+	speed: 100, // pixels per second
+	setup: function( data ){
+		data = $.extend({ speed: hover.speed, delay: hover.delay, hovered:0 }, data||{} );
+		$.event.add( this, "mouseenter mouseleave", hoverHandler, data );
+		},
+	teardown: function(){
+		$.event.remove( this, "mouseenter mouseleave", hoverHandler );
+		}
+	};
+
+// shared event handler
+function hoverHandler( event ){
+	var data = event.data || event;
+	switch ( event.type ){
+		case 'mouseenter': // mouseover
+			data.dist2 = 0; // init mouse distance�
+			data.event = event; // store the event
+			event.type = "hoverstart"; // hijack event
+            if($.event.dispatch.call(this, event) !== false) {
+            	data.elem = this; // ref to the current element
+				$.event.add( this, "mousemove", hoverHandler, data ); // track the mouse
+				data.timer = setTimeout( compare, data.delay ); // start async compare
+				}
+			break;
+		case 'mousemove': // track the event, mouse distance� = x� + y�
+			data.dist2 += Math.pow( event.pageX-data.event.pageX, 2 ) 
+				+ Math.pow( event.pageY-data.event.pageY, 2 ); 
+			data.event = event; // store current event
+			break;
+		case 'mouseleave': // mouseout
+			clearTimeout( data.timer ); // uncompare
+			if ( data.hovered ){ 
+				event.type = "hoverend"; // hijack event
+                $.event.dispatch.call(this, event); // handle "hoverend"
+				data.hovered--; // reset flag
+				}
+			else $.event.remove( data.elem, "mousemove", hoverHandler ); // untrack
+			break;
+		default: // timeout compare // distance� = x� + y�  = ( speed * time )�
+			if ( data.dist2 <= Math.pow( data.speed*( data.delay/1e3 ), 2 ) ){ // speed acceptable
+				$.event.remove( data.elem, "mousemove", hoverHandler ); // untrack
+				data.event.type = "hover"; // hijack event
+                if($.event.dispatch.call(data.elem, data.event) !== false) // handle "hover"
+					data.hovered++; // flag for "hoverend"
+				}
+			else data.timer = setTimeout( compare, data.delay ); // async recurse
+			data.dist2 = 0; // reset distance� for next compare
+			break;
+		}
+	function compare(){ hoverHandler( data ); }; // timeout/recursive function
+	};
+	
+/*******************************************************************************************/
+})(jQuery); // confine scope
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/jquery/jquery.form.js b/client/galaxy/scripts/libs/jquery/jquery.form.js
new file mode 100644
index 0000000..69dc982
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.form.js
@@ -0,0 +1,1204 @@
+/*!
+ * jQuery Form Plugin
+ * version: 3.45.0-2013.10.17
+ * Requires jQuery v1.5 or later
+ * Copyright (c) 2013 M. Alsup
+ * Examples and documentation at: http://malsup.com/jquery/form/
+ * Project repository: https://github.com/malsup/form
+ * Dual licensed under the MIT and GPL licenses.
+ * https://github.com/malsup/form#copyright-and-license
+ */
+/*global ActiveXObject */
+;(function($) {
+"use strict";
+
+/*
+    Usage Note:
+    -----------
+    Do not use both ajaxSubmit and ajaxForm on the same form.  These
+    functions are mutually exclusive.  Use ajaxSubmit if you want
+    to bind your own submit handler to the form.  For example,
+
+    $(document).ready(function() {
+        $('#myForm').on('submit', function(e) {
+            e.preventDefault(); // <-- important
+            $(this).ajaxSubmit({
+                target: '#output'
+            });
+        });
+    });
+
+    Use ajaxForm when you want the plugin to manage all the event binding
+    for you.  For example,
+
+    $(document).ready(function() {
+        $('#myForm').ajaxForm({
+            target: '#output'
+        });
+    });
+
+    You can also use ajaxForm with delegation (requires jQuery v1.7+), so the
+    form does not have to exist when you invoke ajaxForm:
+
+    $('#myForm').ajaxForm({
+        delegation: true,
+        target: '#output'
+    });
+
+    When using ajaxForm, the ajaxSubmit function will be invoked for you
+    at the appropriate time.
+*/
+
+/**
+ * Feature detection
+ */
+var feature = {};
+feature.fileapi = $("<input type='file'/>").get(0).files !== undefined;
+feature.formdata = window.FormData !== undefined;
+
+var hasProp = !!$.fn.prop;
+
+// attr2 uses prop when it can but checks the return type for
+// an expected string.  this accounts for the case where a form 
+// contains inputs with names like "action" or "method"; in those
+// cases "prop" returns the element
+$.fn.attr2 = function() {
+    if ( ! hasProp )
+        return this.attr.apply(this, arguments);
+    var val = this.prop.apply(this, arguments);
+    if ( ( val && val.jquery ) || typeof val === 'string' )
+        return val;
+    return this.attr.apply(this, arguments);
+};
+
+/**
+ * ajaxSubmit() provides a mechanism for immediately submitting
+ * an HTML form using AJAX.
+ */
+$.fn.ajaxSubmit = function(options) {
+    /*jshint scripturl:true */
+
+    // fast fail if nothing selected (http://dev.jquery.com/ticket/2752)
+    if (!this.length) {
+        log('ajaxSubmit: skipping submit process - no element selected');
+        return this;
+    }
+
+    var method, action, url, $form = this;
+
+    if (typeof options == 'function') {
+        options = { success: options };
+    }
+    else if ( options === undefined ) {
+        options = {};
+    }
+
+    method = options.type || this.attr2('method');
+    action = options.url  || this.attr2('action');
+
+    url = (typeof action === 'string') ? $.trim(action) : '';
+    url = url || window.location.href || '';
+    if (url) {
+        // clean url (don't include hash vaue)
+        url = (url.match(/^([^#]+)/)||[])[1];
+    }
+
+    options = $.extend(true, {
+        url:  url,
+        success: $.ajaxSettings.success,
+        type: method || $.ajaxSettings.type,
+        iframeSrc: /^https/i.test(window.location.href || '') ? 'javascript:false' : 'about:blank'
+    }, options);
+
+    // hook for manipulating the form data before it is extracted;
+    // convenient for use with rich editors like tinyMCE or FCKEditor
+    var veto = {};
+    this.trigger('form-pre-serialize', [this, options, veto]);
+    if (veto.veto) {
+        log('ajaxSubmit: submit vetoed via form-pre-serialize trigger');
+        return this;
+    }
+
+    // provide opportunity to alter form data before it is serialized
+    if (options.beforeSerialize && options.beforeSerialize(this, options) === false) {
+        log('ajaxSubmit: submit aborted via beforeSerialize callback');
+        return this;
+    }
+
+    var traditional = options.traditional;
+    if ( traditional === undefined ) {
+        traditional = $.ajaxSettings.traditional;
+    }
+
+    var elements = [];
+    var qx, a = this.formToArray(options.semantic, elements);
+    if (options.data) {
+        options.extraData = options.data;
+        qx = $.param(options.data, traditional);
+    }
+
+    // give pre-submit callback an opportunity to abort the submit
+    if (options.beforeSubmit && options.beforeSubmit(a, this, options) === false) {
+        log('ajaxSubmit: submit aborted via beforeSubmit callback');
+        return this;
+    }
+
+    // fire vetoable 'validate' event
+    this.trigger('form-submit-validate', [a, this, options, veto]);
+    if (veto.veto) {
+        log('ajaxSubmit: submit vetoed via form-submit-validate trigger');
+        return this;
+    }
+
+    var q = $.param(a, traditional);
+    if (qx) {
+        q = ( q ? (q + '&' + qx) : qx );
+    }
+    if (options.type.toUpperCase() == 'GET') {
+        options.url += (options.url.indexOf('?') >= 0 ? '&' : '?') + q;
+        options.data = null;  // data is null for 'get'
+    }
+    else {
+        options.data = q; // data is the query string for 'post'
+    }
+
+    var callbacks = [];
+    if (options.resetForm) {
+        callbacks.push(function() { $form.resetForm(); });
+    }
+    if (options.clearForm) {
+        callbacks.push(function() { $form.clearForm(options.includeHidden); });
+    }
+
+    // perform a load on the target only if dataType is not provided
+    if (!options.dataType && options.target) {
+        var oldSuccess = options.success || function(){};
+        callbacks.push(function(data) {
+            var fn = options.replaceTarget ? 'replaceWith' : 'html';
+            $(options.target)[fn](data).each(oldSuccess, arguments);
+        });
+    }
+    else if (options.success) {
+        callbacks.push(options.success);
+    }
+
+    options.success = function(data, status, xhr) { // jQuery 1.4+ passes xhr as 3rd arg
+        var context = options.context || this ;    // jQuery 1.4+ supports scope context
+        for (var i=0, max=callbacks.length; i < max; i++) {
+            callbacks[i].apply(context, [data, status, xhr || $form, $form]);
+        }
+    };
+
+    if (options.error) {
+        var oldError = options.error;
+        options.error = function(xhr, status, error) {
+            var context = options.context || this;
+            oldError.apply(context, [xhr, status, error, $form]);
+        };
+    }
+
+     if (options.complete) {
+        var oldComplete = options.complete;
+        options.complete = function(xhr, status) {
+            var context = options.context || this;
+            oldComplete.apply(context, [xhr, status, $form]);
+        };
+    }
+
+    // are there files to upload?
+
+    // [value] (issue #113), also see comment:
+    // https://github.com/malsup/form/commit/588306aedba1de01388032d5f42a60159eea9228#commitcomment-2180219
+    var fileInputs = $('input[type=file]:enabled', this).filter(function() { return $(this).val() !== ''; });
+
+    var hasFileInputs = fileInputs.length > 0;
+    var mp = 'multipart/form-data';
+    var multipart = ($form.attr('enctype') == mp || $form.attr('encoding') == mp);
+
+    var fileAPI = feature.fileapi && feature.formdata;
+    log("fileAPI :" + fileAPI);
+    var shouldUseFrame = (hasFileInputs || multipart) && !fileAPI;
+
+    var jqxhr;
+
+    // options.iframe allows user to force iframe mode
+    // 06-NOV-09: now defaulting to iframe mode if file input is detected
+    if (options.iframe !== false && (options.iframe || shouldUseFrame)) {
+        // hack to fix Safari hang (thanks to Tim Molendijk for this)
+        // see:  http://groups.google.com/group/jquery-dev/browse_thread/thread/36395b7ab510dd5d
+        if (options.closeKeepAlive) {
+            $.get(options.closeKeepAlive, function() {
+                jqxhr = fileUploadIframe(a);
+            });
+        }
+        else {
+            jqxhr = fileUploadIframe(a);
+        }
+    }
+    else if ((hasFileInputs || multipart) && fileAPI) {
+        jqxhr = fileUploadXhr(a);
+    }
+    else {
+        jqxhr = $.ajax(options);
+    }
+
+    $form.removeData('jqxhr').data('jqxhr', jqxhr);
+
+    // clear element array
+    for (var k=0; k < elements.length; k++)
+        elements[k] = null;
+
+    // fire 'notify' event
+    this.trigger('form-submit-notify', [this, options]);
+    return this;
+
+    // utility fn for deep serialization
+    function deepSerialize(extraData){
+        var serialized = $.param(extraData, options.traditional).split('&');
+        var len = serialized.length;
+        var result = [];
+        var i, part;
+        for (i=0; i < len; i++) {
+            // #252; undo param space replacement
+            serialized[i] = serialized[i].replace(/\+/g,' ');
+            part = serialized[i].split('=');
+            // #278; use array instead of object storage, favoring array serializations
+            result.push([decodeURIComponent(part[0]), decodeURIComponent(part[1])]);
+        }
+        return result;
+    }
+
+     // XMLHttpRequest Level 2 file uploads (big hat tip to francois2metz)
+    function fileUploadXhr(a) {
+        var formdata = new FormData();
+
+        for (var i=0; i < a.length; i++) {
+            formdata.append(a[i].name, a[i].value);
+        }
+
+        if (options.extraData) {
+            var serializedData = deepSerialize(options.extraData);
+            for (i=0; i < serializedData.length; i++)
+                if (serializedData[i])
+                    formdata.append(serializedData[i][0], serializedData[i][1]);
+        }
+
+        options.data = null;
+
+        var s = $.extend(true, {}, $.ajaxSettings, options, {
+            contentType: false,
+            processData: false,
+            cache: false,
+            type: method || 'POST'
+        });
+
+        if (options.uploadProgress) {
+            // workaround because jqXHR does not expose upload property
+            s.xhr = function() {
+                var xhr = $.ajaxSettings.xhr();
+                if (xhr.upload) {
+                    xhr.upload.addEventListener('progress', function(event) {
+                        var percent = 0;
+                        var position = event.loaded || event.position; /*event.position is deprecated*/
+                        var total = event.total;
+                        if (event.lengthComputable) {
+                            percent = Math.ceil(position / total * 100);
+                        }
+                        options.uploadProgress(event, position, total, percent);
+                    }, false);
+                }
+                return xhr;
+            };
+        }
+
+        s.data = null;
+        var beforeSend = s.beforeSend;
+        s.beforeSend = function(xhr, o) {
+            //Send FormData() provided by user
+            if (options.formData)
+                o.data = options.formData;
+            else
+                o.data = formdata;
+            if(beforeSend)
+                beforeSend.call(this, xhr, o);
+        };
+        return $.ajax(s);
+    }
+
+    // private function for handling file uploads (hat tip to YAHOO!)
+    function fileUploadIframe(a) {
+        var form = $form[0], el, i, s, g, id, $io, io, xhr, sub, n, timedOut, timeoutHandle;
+        var deferred = $.Deferred();
+
+        // #341
+        deferred.abort = function(status) {
+            xhr.abort(status);
+        };
+
+        if (a) {
+            // ensure that every serialized input is still enabled
+            for (i=0; i < elements.length; i++) {
+                el = $(elements[i]);
+                if ( hasProp )
+                    el.prop('disabled', false);
+                else
+                    el.removeAttr('disabled');
+            }
+        }
+
+        s = $.extend(true, {}, $.ajaxSettings, options);
+        s.context = s.context || s;
+        id = 'jqFormIO' + (new Date().getTime());
+        if (s.iframeTarget) {
+            $io = $(s.iframeTarget);
+            n = $io.attr2('name');
+            if (!n)
+                 $io.attr2('name', id);
+            else
+                id = n;
+        }
+        else {
+            $io = $('<iframe name="' + id + '" src="'+ s.iframeSrc +'" />');
+            $io.css({ position: 'absolute', top: '-1000px', left: '-1000px' });
+        }
+        io = $io[0];
+
+
+        xhr = { // mock object
+            aborted: 0,
+            responseText: null,
+            responseXML: null,
+            status: 0,
+            statusText: 'n/a',
+            getAllResponseHeaders: function() {},
+            getResponseHeader: function() {},
+            setRequestHeader: function() {},
+            abort: function(status) {
+                var e = (status === 'timeout' ? 'timeout' : 'aborted');
+                log('aborting upload... ' + e);
+                this.aborted = 1;
+
+                try { // #214, #257
+                    if (io.contentWindow.document.execCommand) {
+                        io.contentWindow.document.execCommand('Stop');
+                    }
+                }
+                catch(ignore) {}
+
+                $io.attr('src', s.iframeSrc); // abort op in progress
+                xhr.error = e;
+                if (s.error)
+                    s.error.call(s.context, xhr, e, status);
+                if (g)
+                    $.event.trigger("ajaxError", [xhr, s, e]);
+                if (s.complete)
+                    s.complete.call(s.context, xhr, e);
+            }
+        };
+
+        g = s.global;
+        // trigger ajax global events so that activity/block indicators work like normal
+        if (g && 0 === $.active++) {
+            $.event.trigger("ajaxStart");
+        }
+        if (g) {
+            $.event.trigger("ajaxSend", [xhr, s]);
+        }
+
+        if (s.beforeSend && s.beforeSend.call(s.context, xhr, s) === false) {
+            if (s.global) {
+                $.active--;
+            }
+            deferred.reject();
+            return deferred;
+        }
+        if (xhr.aborted) {
+            deferred.reject();
+            return deferred;
+        }
+
+        // add submitting element to data if we know it
+        sub = form.clk;
+        if (sub) {
+            n = sub.name;
+            if (n && !sub.disabled) {
+                s.extraData = s.extraData || {};
+                s.extraData[n] = sub.value;
+                if (sub.type == "image") {
+                    s.extraData[n+'.x'] = form.clk_x;
+                    s.extraData[n+'.y'] = form.clk_y;
+                }
+            }
+        }
+
+        var CLIENT_TIMEOUT_ABORT = 1;
+        var SERVER_ABORT = 2;
+                
+        function getDoc(frame) {
+            /* it looks like contentWindow or contentDocument do not
+             * carry the protocol property in ie8, when running under ssl
+             * frame.document is the only valid response document, since
+             * the protocol is know but not on the other two objects. strange?
+             * "Same origin policy" http://en.wikipedia.org/wiki/Same_origin_policy
+             */
+            
+            var doc = null;
+            
+            // IE8 cascading access check
+            try {
+                if (frame.contentWindow) {
+                    doc = frame.contentWindow.document;
+                }
+            } catch(err) {
+                // IE8 access denied under ssl & missing protocol
+                log('cannot get iframe.contentWindow document: ' + err);
+            }
+
+            if (doc) { // successful getting content
+                return doc;
+            }
+
+            try { // simply checking may throw in ie8 under ssl or mismatched protocol
+                doc = frame.contentDocument ? frame.contentDocument : frame.document;
+            } catch(err) {
+                // last attempt
+                log('cannot get iframe.contentDocument: ' + err);
+                doc = frame.document;
+            }
+            return doc;
+        }
+
+        // Rails CSRF hack (thanks to Yvan Barthelemy)
+        var csrf_token = $('meta[name=csrf-token]').attr('content');
+        var csrf_param = $('meta[name=csrf-param]').attr('content');
+        if (csrf_param && csrf_token) {
+            s.extraData = s.extraData || {};
+            s.extraData[csrf_param] = csrf_token;
+        }
+
+        // take a breath so that pending repaints get some cpu time before the upload starts
+        function doSubmit() {
+            // make sure form attrs are set
+            var t = $form.attr2('target'), a = $form.attr2('action');
+
+            // update form attrs in IE friendly way
+            form.setAttribute('target',id);
+            if (!method || /post/i.test(method) ) {
+                form.setAttribute('method', 'POST');
+            }
+            if (a != s.url) {
+                form.setAttribute('action', s.url);
+            }
+
+            // ie borks in some cases when setting encoding
+            if (! s.skipEncodingOverride && (!method || /post/i.test(method))) {
+                $form.attr({
+                    encoding: 'multipart/form-data',
+                    enctype:  'multipart/form-data'
+                });
+            }
+
+            // support timout
+            if (s.timeout) {
+                timeoutHandle = setTimeout(function() { timedOut = true; cb(CLIENT_TIMEOUT_ABORT); }, s.timeout);
+            }
+
+            // look for server aborts
+            function checkState() {
+                try {
+                    var state = getDoc(io).readyState;
+                    log('state = ' + state);
+                    if (state && state.toLowerCase() == 'uninitialized')
+                        setTimeout(checkState,50);
+                }
+                catch(e) {
+                    log('Server abort: ' , e, ' (', e.name, ')');
+                    cb(SERVER_ABORT);
+                    if (timeoutHandle)
+                        clearTimeout(timeoutHandle);
+                    timeoutHandle = undefined;
+                }
+            }
+
+            // add "extra" data to form if provided in options
+            var extraInputs = [];
+            try {
+                if (s.extraData) {
+                    for (var n in s.extraData) {
+                        if (s.extraData.hasOwnProperty(n)) {
+                           // if using the $.param format that allows for multiple values with the same name
+                           if($.isPlainObject(s.extraData[n]) && s.extraData[n].hasOwnProperty('name') && s.extraData[n].hasOwnProperty('value')) {
+                               extraInputs.push(
+                               $('<input type="hidden" name="'+s.extraData[n].name+'">').val(s.extraData[n].value)
+                                   .appendTo(form)[0]);
+                           } else {
+                               extraInputs.push(
+                               $('<input type="hidden" name="'+n+'">').val(s.extraData[n])
+                                   .appendTo(form)[0]);
+                           }
+                        }
+                    }
+                }
+
+                if (!s.iframeTarget) {
+                    // add iframe to doc and submit the form
+                    $io.appendTo('body');
+                }
+                if (io.attachEvent)
+                    io.attachEvent('onload', cb);
+                else
+                    io.addEventListener('load', cb, false);
+                setTimeout(checkState,15);
+
+                try {
+                    form.submit();
+                } catch(err) {
+                    // just in case form has element with name/id of 'submit'
+                    var submitFn = document.createElement('form').submit;
+                    submitFn.apply(form);
+                }
+            }
+            finally {
+                // reset attrs and remove "extra" input elements
+                form.setAttribute('action',a);
+                if(t) {
+                    form.setAttribute('target', t);
+                } else {
+                    $form.removeAttr('target');
+                }
+                $(extraInputs).remove();
+            }
+        }
+
+        if (s.forceSync) {
+            doSubmit();
+        }
+        else {
+            setTimeout(doSubmit, 10); // this lets dom updates render
+        }
+
+        var data, doc, domCheckCount = 50, callbackProcessed;
+
+        function cb(e) {
+            if (xhr.aborted || callbackProcessed) {
+                return;
+            }
+            
+            doc = getDoc(io);
+            if(!doc) {
+                log('cannot access response document');
+                e = SERVER_ABORT;
+            }
+            if (e === CLIENT_TIMEOUT_ABORT && xhr) {
+                xhr.abort('timeout');
+                deferred.reject(xhr, 'timeout');
+                return;
+            }
+            else if (e == SERVER_ABORT && xhr) {
+                xhr.abort('server abort');
+                deferred.reject(xhr, 'error', 'server abort');
+                return;
+            }
+
+            if (!doc || doc.location.href == s.iframeSrc) {
+                // response not received yet
+                if (!timedOut)
+                    return;
+            }
+            if (io.detachEvent)
+                io.detachEvent('onload', cb);
+            else
+                io.removeEventListener('load', cb, false);
+
+            var status = 'success', errMsg;
+            try {
+                if (timedOut) {
+                    throw 'timeout';
+                }
+
+                var isXml = s.dataType == 'xml' || doc.XMLDocument || $.isXMLDoc(doc);
+                log('isXml='+isXml);
+                if (!isXml && window.opera && (doc.body === null || !doc.body.innerHTML)) {
+                    if (--domCheckCount) {
+                        // in some browsers (Opera) the iframe DOM is not always traversable when
+                        // the onload callback fires, so we loop a bit to accommodate
+                        log('requeing onLoad callback, DOM not available');
+                        setTimeout(cb, 250);
+                        return;
+                    }
+                    // let this fall through because server response could be an empty document
+                    //log('Could not access iframe DOM after mutiple tries.');
+                    //throw 'DOMException: not available';
+                }
+
+                //log('response detected');
+                var docRoot = doc.body ? doc.body : doc.documentElement;
+                xhr.responseText = docRoot ? docRoot.innerHTML : null;
+                xhr.responseXML = doc.XMLDocument ? doc.XMLDocument : doc;
+                if (isXml)
+                    s.dataType = 'xml';
+                xhr.getResponseHeader = function(header){
+                    var headers = {'content-type': s.dataType};
+                    return headers[header.toLowerCase()];
+                };
+                // support for XHR 'status' & 'statusText' emulation :
+                if (docRoot) {
+                    xhr.status = Number( docRoot.getAttribute('status') ) || xhr.status;
+                    xhr.statusText = docRoot.getAttribute('statusText') || xhr.statusText;
+                }
+
+                var dt = (s.dataType || '').toLowerCase();
+                var scr = /(json|script|text)/.test(dt);
+                if (scr || s.textarea) {
+                    // see if user embedded response in textarea
+                    var ta = doc.getElementsByTagName('textarea')[0];
+                    if (ta) {
+                        xhr.responseText = ta.value;
+                        // support for XHR 'status' & 'statusText' emulation :
+                        xhr.status = Number( ta.getAttribute('status') ) || xhr.status;
+                        xhr.statusText = ta.getAttribute('statusText') || xhr.statusText;
+                    }
+                    else if (scr) {
+                        // account for browsers injecting pre around json response
+                        var pre = doc.getElementsByTagName('pre')[0];
+                        var b = doc.getElementsByTagName('body')[0];
+                        if (pre) {
+                            xhr.responseText = pre.textContent ? pre.textContent : pre.innerText;
+                        }
+                        else if (b) {
+                            xhr.responseText = b.textContent ? b.textContent : b.innerText;
+                        }
+                    }
+                }
+                else if (dt == 'xml' && !xhr.responseXML && xhr.responseText) {
+                    xhr.responseXML = toXml(xhr.responseText);
+                }
+
+                try {
+                    data = httpData(xhr, dt, s);
+                }
+                catch (err) {
+                    status = 'parsererror';
+                    xhr.error = errMsg = (err || status);
+                }
+            }
+            catch (err) {
+                log('error caught: ',err);
+                status = 'error';
+                xhr.error = errMsg = (err || status);
+            }
+
+            if (xhr.aborted) {
+                log('upload aborted');
+                status = null;
+            }
+
+            if (xhr.status) { // we've set xhr.status
+                status = (xhr.status >= 200 && xhr.status < 300 || xhr.status === 304) ? 'success' : 'error';
+            }
+
+            // ordering of these callbacks/triggers is odd, but that's how $.ajax does it
+            if (status === 'success') {
+                if (s.success)
+                    s.success.call(s.context, data, 'success', xhr);
+                deferred.resolve(xhr.responseText, 'success', xhr);
+                if (g)
+                    $.event.trigger("ajaxSuccess", [xhr, s]);
+            }
+            else if (status) {
+                if (errMsg === undefined)
+                    errMsg = xhr.statusText;
+                if (s.error)
+                    s.error.call(s.context, xhr, status, errMsg);
+                deferred.reject(xhr, 'error', errMsg);
+                if (g)
+                    $.event.trigger("ajaxError", [xhr, s, errMsg]);
+            }
+
+            if (g)
+                $.event.trigger("ajaxComplete", [xhr, s]);
+
+            if (g && ! --$.active) {
+                $.event.trigger("ajaxStop");
+            }
+
+            if (s.complete)
+                s.complete.call(s.context, xhr, status);
+
+            callbackProcessed = true;
+            if (s.timeout)
+                clearTimeout(timeoutHandle);
+
+            // clean up
+            setTimeout(function() {
+                if (!s.iframeTarget)
+                    $io.remove();
+                else  //adding else to clean up existing iframe response.
+                    $io.attr('src', s.iframeSrc);
+                xhr.responseXML = null;
+            }, 100);
+        }
+
+        var toXml = $.parseXML || function(s, doc) { // use parseXML if available (jQuery 1.5+)
+            if (window.ActiveXObject) {
+                doc = new ActiveXObject('Microsoft.XMLDOM');
+                doc.async = 'false';
+                doc.loadXML(s);
+            }
+            else {
+                doc = (new DOMParser()).parseFromString(s, 'text/xml');
+            }
+            return (doc && doc.documentElement && doc.documentElement.nodeName != 'parsererror') ? doc : null;
+        };
+        var parseJSON = $.parseJSON || function(s) {
+            /*jslint evil:true */
+            return window['eval']('(' + s + ')');
+        };
+
+        var httpData = function( xhr, type, s ) { // mostly lifted from jq1.4.4
+
+            var ct = xhr.getResponseHeader('content-type') || '',
+                xml = type === 'xml' || !type && ct.indexOf('xml') >= 0,
+                data = xml ? xhr.responseXML : xhr.responseText;
+
+            if (xml && data.documentElement.nodeName === 'parsererror') {
+                if ($.error)
+                    $.error('parsererror');
+            }
+            if (s && s.dataFilter) {
+                data = s.dataFilter(data, type);
+            }
+            if (typeof data === 'string') {
+                if (type === 'json' || !type && ct.indexOf('json') >= 0) {
+                    data = parseJSON(data);
+                } else if (type === "script" || !type && ct.indexOf("javascript") >= 0) {
+                    $.globalEval(data);
+                }
+            }
+            return data;
+        };
+
+        return deferred;
+    }
+};
+
+/**
+ * ajaxForm() provides a mechanism for fully automating form submission.
+ *
+ * The advantages of using this method instead of ajaxSubmit() are:
+ *
+ * 1: This method will include coordinates for <input type="image" /> elements (if the element
+ *    is used to submit the form).
+ * 2. This method will include the submit element's name/value data (for the element that was
+ *    used to submit the form).
+ * 3. This method binds the submit() method to the form for you.
+ *
+ * The options argument for ajaxForm works exactly as it does for ajaxSubmit.  ajaxForm merely
+ * passes the options argument along after properly binding events for submit elements and
+ * the form itself.
+ */
+$.fn.ajaxForm = function(options) {
+    options = options || {};
+    options.delegation = options.delegation && $.isFunction($.fn.on);
+
+    // in jQuery 1.3+ we can fix mistakes with the ready state
+    if (!options.delegation && this.length === 0) {
+        var o = { s: this.selector, c: this.context };
+        if (!$.isReady && o.s) {
+            log('DOM not ready, queuing ajaxForm');
+            $(function() {
+                $(o.s,o.c).ajaxForm(options);
+            });
+            return this;
+        }
+        // is your DOM ready?  http://docs.jquery.com/Tutorials:Introducing_$(document).ready()
+        log('terminating; zero elements found by selector' + ($.isReady ? '' : ' (DOM not ready)'));
+        return this;
+    }
+
+    if ( options.delegation ) {
+        $(document)
+            .off('submit.form-plugin', this.selector, doAjaxSubmit)
+            .off('click.form-plugin', this.selector, captureSubmittingElement)
+            .on('submit.form-plugin', this.selector, options, doAjaxSubmit)
+            .on('click.form-plugin', this.selector, options, captureSubmittingElement);
+        return this;
+    }
+
+    return this.ajaxFormUnbind()
+        .bind('submit.form-plugin', options, doAjaxSubmit)
+        .bind('click.form-plugin', options, captureSubmittingElement);
+};
+
+// private event handlers
+function doAjaxSubmit(e) {
+    /*jshint validthis:true */
+    var options = e.data;
+    if (!e.isDefaultPrevented()) { // if event has been canceled, don't proceed
+        e.preventDefault();
+        $(e.target).ajaxSubmit(options); // #365
+    }
+}
+
+function captureSubmittingElement(e) {
+    /*jshint validthis:true */
+    var target = e.target;
+    var $el = $(target);
+    if (!($el.is("[type=submit],[type=image]"))) {
+        // is this a child element of the submit el?  (ex: a span within a button)
+        var t = $el.closest('[type=submit]');
+        if (t.length === 0) {
+            return;
+        }
+        target = t[0];
+    }
+    var form = this;
+    form.clk = target;
+    if (target.type == 'image') {
+        if (e.offsetX !== undefined) {
+            form.clk_x = e.offsetX;
+            form.clk_y = e.offsetY;
+        } else if (typeof $.fn.offset == 'function') {
+            var offset = $el.offset();
+            form.clk_x = e.pageX - offset.left;
+            form.clk_y = e.pageY - offset.top;
+        } else {
+            form.clk_x = e.pageX - target.offsetLeft;
+            form.clk_y = e.pageY - target.offsetTop;
+        }
+    }
+    // clear form vars
+    setTimeout(function() { form.clk = form.clk_x = form.clk_y = null; }, 100);
+}
+
+
+// ajaxFormUnbind unbinds the event handlers that were bound by ajaxForm
+$.fn.ajaxFormUnbind = function() {
+    return this.unbind('submit.form-plugin click.form-plugin');
+};
+
+/**
+ * formToArray() gathers form element data into an array of objects that can
+ * be passed to any of the following ajax functions: $.get, $.post, or load.
+ * Each object in the array has both a 'name' and 'value' property.  An example of
+ * an array for a simple login form might be:
+ *
+ * [ { name: 'username', value: 'jresig' }, { name: 'password', value: 'secret' } ]
+ *
+ * It is this array that is passed to pre-submit callback functions provided to the
+ * ajaxSubmit() and ajaxForm() methods.
+ */
+$.fn.formToArray = function(semantic, elements) {
+    var a = [];
+    if (this.length === 0) {
+        return a;
+    }
+
+    var form = this[0];
+    var els = semantic ? form.getElementsByTagName('*') : form.elements;
+    if (!els) {
+        return a;
+    }
+
+    var i,j,n,v,el,max,jmax;
+    for(i=0, max=els.length; i < max; i++) {
+        el = els[i];
+        n = el.name;
+        if (!n || el.disabled) {
+            continue;
+        }
+
+        if (semantic && form.clk && el.type == "image") {
+            // handle image inputs on the fly when semantic == true
+            if(form.clk == el) {
+                a.push({name: n, value: $(el).val(), type: el.type });
+                a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
+            }
+            continue;
+        }
+
+        v = $.fieldValue(el, true);
+        if (v && v.constructor == Array) {
+            if (elements)
+                elements.push(el);
+            for(j=0, jmax=v.length; j < jmax; j++) {
+                a.push({name: n, value: v[j]});
+            }
+        }
+        else if (feature.fileapi && el.type == 'file') {
+            if (elements)
+                elements.push(el);
+            var files = el.files;
+            if (files.length) {
+                for (j=0; j < files.length; j++) {
+                    a.push({name: n, value: files[j], type: el.type});
+                }
+            }
+            else {
+                // #180
+                a.push({ name: n, value: '', type: el.type });
+            }
+        }
+        else if (v !== null && typeof v != 'undefined') {
+            if (elements)
+                elements.push(el);
+            a.push({name: n, value: v, type: el.type, required: el.required});
+        }
+    }
+
+    if (!semantic && form.clk) {
+        // input type=='image' are not found in elements array! handle it here
+        var $input = $(form.clk), input = $input[0];
+        n = input.name;
+        if (n && !input.disabled && input.type == 'image') {
+            a.push({name: n, value: $input.val()});
+            a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
+        }
+    }
+    return a;
+};
+
+/**
+ * Serializes form data into a 'submittable' string. This method will return a string
+ * in the format: name1=value1&name2=value2
+ */
+$.fn.formSerialize = function(semantic) {
+    //hand off to jQuery.param for proper encoding
+    return $.param(this.formToArray(semantic));
+};
+
+/**
+ * Serializes all field elements in the jQuery object into a query string.
+ * This method will return a string in the format: name1=value1&name2=value2
+ */
+$.fn.fieldSerialize = function(successful) {
+    var a = [];
+    this.each(function() {
+        var n = this.name;
+        if (!n) {
+            return;
+        }
+        var v = $.fieldValue(this, successful);
+        if (v && v.constructor == Array) {
+            for (var i=0,max=v.length; i < max; i++) {
+                a.push({name: n, value: v[i]});
+            }
+        }
+        else if (v !== null && typeof v != 'undefined') {
+            a.push({name: this.name, value: v});
+        }
+    });
+    //hand off to jQuery.param for proper encoding
+    return $.param(a);
+};
+
+/**
+ * Returns the value(s) of the element in the matched set.  For example, consider the following form:
+ *
+ *  <form><fieldset>
+ *      <input name="A" type="text" />
+ *      <input name="A" type="text" />
+ *      <input name="B" type="checkbox" value="B1" />
+ *      <input name="B" type="checkbox" value="B2"/>
+ *      <input name="C" type="radio" value="C1" />
+ *      <input name="C" type="radio" value="C2" />
+ *  </fieldset></form>
+ *
+ *  var v = $('input[type=text]').fieldValue();
+ *  // if no values are entered into the text inputs
+ *  v == ['','']
+ *  // if values entered into the text inputs are 'foo' and 'bar'
+ *  v == ['foo','bar']
+ *
+ *  var v = $('input[type=checkbox]').fieldValue();
+ *  // if neither checkbox is checked
+ *  v === undefined
+ *  // if both checkboxes are checked
+ *  v == ['B1', 'B2']
+ *
+ *  var v = $('input[type=radio]').fieldValue();
+ *  // if neither radio is checked
+ *  v === undefined
+ *  // if first radio is checked
+ *  v == ['C1']
+ *
+ * The successful argument controls whether or not the field element must be 'successful'
+ * (per http://www.w3.org/TR/html4/interact/forms.html#successful-controls).
+ * The default value of the successful argument is true.  If this value is false the value(s)
+ * for each element is returned.
+ *
+ * Note: This method *always* returns an array.  If no valid value can be determined the
+ *    array will be empty, otherwise it will contain one or more values.
+ */
+$.fn.fieldValue = function(successful) {
+    for (var val=[], i=0, max=this.length; i < max; i++) {
+        var el = this[i];
+        var v = $.fieldValue(el, successful);
+        if (v === null || typeof v == 'undefined' || (v.constructor == Array && !v.length)) {
+            continue;
+        }
+        if (v.constructor == Array)
+            $.merge(val, v);
+        else
+            val.push(v);
+    }
+    return val;
+};
+
+/**
+ * Returns the value of the field element.
+ */
+$.fieldValue = function(el, successful) {
+    var n = el.name, t = el.type, tag = el.tagName.toLowerCase();
+    if (successful === undefined) {
+        successful = true;
+    }
+
+    if (successful && (!n || el.disabled || t == 'reset' || t == 'button' ||
+        (t == 'checkbox' || t == 'radio') && !el.checked ||
+        (t == 'submit' || t == 'image') && el.form && el.form.clk != el ||
+        tag == 'select' && el.selectedIndex == -1)) {
+            return null;
+    }
+
+    if (tag == 'select') {
+        var index = el.selectedIndex;
+        if (index < 0) {
+            return null;
+        }
+        var a = [], ops = el.options;
+        var one = (t == 'select-one');
+        var max = (one ? index+1 : ops.length);
+        for(var i=(one ? index : 0); i < max; i++) {
+            var op = ops[i];
+            if (op.selected) {
+                var v = op.value;
+                if (!v) { // extra pain for IE...
+                    v = (op.attributes && op.attributes['value'] && !(op.attributes['value'].specified)) ? op.text : op.value;
+                }
+                if (one) {
+                    return v;
+                }
+                a.push(v);
+            }
+        }
+        return a;
+    }
+    return $(el).val();
+};
+
+/**
+ * Clears the form data.  Takes the following actions on the form's input fields:
+ *  - input text fields will have their 'value' property set to the empty string
+ *  - select elements will have their 'selectedIndex' property set to -1
+ *  - checkbox and radio inputs will have their 'checked' property set to false
+ *  - inputs of type submit, button, reset, and hidden will *not* be effected
+ *  - button elements will *not* be effected
+ */
+$.fn.clearForm = function(includeHidden) {
+    return this.each(function() {
+        $('input,select,textarea', this).clearFields(includeHidden);
+    });
+};
+
+/**
+ * Clears the selected form elements.
+ */
+$.fn.clearFields = $.fn.clearInputs = function(includeHidden) {
+    var re = /^(?:color|date|datetime|email|month|number|password|range|search|tel|text|time|url|week)$/i; // 'hidden' is not in this list
+    return this.each(function() {
+        var t = this.type, tag = this.tagName.toLowerCase();
+        if (re.test(t) || tag == 'textarea') {
+            this.value = '';
+        }
+        else if (t == 'checkbox' || t == 'radio') {
+            this.checked = false;
+        }
+        else if (tag == 'select') {
+            this.selectedIndex = -1;
+        }
+		else if (t == "file") {
+			if (/MSIE/.test(navigator.userAgent)) {
+				$(this).replaceWith($(this).clone(true));
+			} else {
+				$(this).val('');
+			}
+		}
+        else if (includeHidden) {
+            // includeHidden can be the value true, or it can be a selector string
+            // indicating a special test; for example:
+            //  $('#myForm').clearForm('.special:hidden')
+            // the above would clean hidden inputs that have the class of 'special'
+            if ( (includeHidden === true && /hidden/.test(t)) ||
+                 (typeof includeHidden == 'string' && $(this).is(includeHidden)) )
+                this.value = '';
+        }
+    });
+};
+
+/**
+ * Resets the form data.  Causes all form elements to be reset to their original value.
+ */
+$.fn.resetForm = function() {
+    return this.each(function() {
+        // guard against an input with the name of 'reset'
+        // note that IE reports the reset function as an 'object'
+        if (typeof this.reset == 'function' || (typeof this.reset == 'object' && !this.reset.nodeType)) {
+            this.reset();
+        }
+    });
+};
+
+/**
+ * Enables or disables any matching elements.
+ */
+$.fn.enable = function(b) {
+    if (b === undefined) {
+        b = true;
+    }
+    return this.each(function() {
+        this.disabled = !b;
+    });
+};
+
+/**
+ * Checks/unchecks any matching checkboxes or radio buttons and
+ * selects/deselects and matching option elements.
+ */
+$.fn.selected = function(select) {
+    if (select === undefined) {
+        select = true;
+    }
+    return this.each(function() {
+        var t = this.type;
+        if (t == 'checkbox' || t == 'radio') {
+            this.checked = select;
+        }
+        else if (this.tagName.toLowerCase() == 'option') {
+            var $sel = $(this).parent('select');
+            if (select && $sel[0] && $sel[0].type == 'select-one') {
+                // deselect all other options
+                $sel.find('option').selected(false);
+            }
+            this.selected = select;
+        }
+    });
+};
+
+// expose debug var
+$.fn.ajaxSubmit.debug = false;
+
+// helper fn for console logging
+function log() {
+    if (!$.fn.ajaxSubmit.debug)
+        return;
+    var msg = '[jquery.form] ' + Array.prototype.join.call(arguments,'');
+    if (window.console && window.console.log) {
+        window.console.log(msg);
+    }
+    else if (window.opera && window.opera.postError) {
+        window.opera.postError(msg);
+    }
+}
+
+})( (typeof(jQuery) != 'undefined') ? jQuery : window.Zepto );
diff --git a/client/galaxy/scripts/libs/jquery/jquery.js b/client/galaxy/scripts/libs/jquery/jquery.js
new file mode 100644
index 0000000..7fc60fc
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.js
@@ -0,0 +1,11008 @@
+/*!
+ * jQuery JavaScript Library v1.12.4
+ * http://jquery.com/
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ *
+ * Copyright jQuery Foundation and other contributors
+ * Released under the MIT license
+ * http://jquery.org/license
+ *
+ * Date: 2016-05-20T17:17Z
+ */
+
+(function( global, factory ) {
+
+	if ( typeof module === "object" && typeof module.exports === "object" ) {
+		// For CommonJS and CommonJS-like environments where a proper `window`
+		// is present, execute the factory and get jQuery.
+		// For environments that do not have a `window` with a `document`
+		// (such as Node.js), expose a factory as module.exports.
+		// This accentuates the need for the creation of a real `window`.
+		// e.g. var jQuery = require("jquery")(window);
+		// See ticket #14549 for more info.
+		module.exports = global.document ?
+			factory( global, true ) :
+			function( w ) {
+				if ( !w.document ) {
+					throw new Error( "jQuery requires a window with a document" );
+				}
+				return factory( w );
+			};
+	} else {
+		factory( global );
+	}
+
+// Pass this if window is not defined yet
+}(typeof window !== "undefined" ? window : this, function( window, noGlobal ) {
+
+// Support: Firefox 18+
+// Can't be in strict mode, several libs including ASP.NET trace
+// the stack via arguments.caller.callee and Firefox dies if
+// you try to trace through "use strict" call chains. (#13335)
+//"use strict";
+var deletedIds = [];
+
+var document = window.document;
+
+var slice = deletedIds.slice;
+
+var concat = deletedIds.concat;
+
+var push = deletedIds.push;
+
+var indexOf = deletedIds.indexOf;
+
+var class2type = {};
+
+var toString = class2type.toString;
+
+var hasOwn = class2type.hasOwnProperty;
+
+var support = {};
+
+
+
+var
+	version = "1.12.4",
+
+	// Define a local copy of jQuery
+	jQuery = function( selector, context ) {
+
+		// The jQuery object is actually just the init constructor 'enhanced'
+		// Need init if jQuery is called (just allow error to be thrown if not included)
+		return new jQuery.fn.init( selector, context );
+	},
+
+	// Support: Android<4.1, IE<9
+	// Make sure we trim BOM and NBSP
+	rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,
+
+	// Matches dashed string for camelizing
+	rmsPrefix = /^-ms-/,
+	rdashAlpha = /-([\da-z])/gi,
+
+	// Used by jQuery.camelCase as callback to replace()
+	fcamelCase = function( all, letter ) {
+		return letter.toUpperCase();
+	};
+
+jQuery.fn = jQuery.prototype = {
+
+	// The current version of jQuery being used
+	jquery: version,
+
+	constructor: jQuery,
+
+	// Start with an empty selector
+	selector: "",
+
+	// The default length of a jQuery object is 0
+	length: 0,
+
+	toArray: function() {
+		return slice.call( this );
+	},
+
+	// Get the Nth element in the matched element set OR
+	// Get the whole matched element set as a clean array
+	get: function( num ) {
+		return num != null ?
+
+			// Return just the one element from the set
+			( num < 0 ? this[ num + this.length ] : this[ num ] ) :
+
+			// Return all the elements in a clean array
+			slice.call( this );
+	},
+
+	// Take an array of elements and push it onto the stack
+	// (returning the new matched element set)
+	pushStack: function( elems ) {
+
+		// Build a new jQuery matched element set
+		var ret = jQuery.merge( this.constructor(), elems );
+
+		// Add the old object onto the stack (as a reference)
+		ret.prevObject = this;
+		ret.context = this.context;
+
+		// Return the newly-formed element set
+		return ret;
+	},
+
+	// Execute a callback for every element in the matched set.
+	each: function( callback ) {
+		return jQuery.each( this, callback );
+	},
+
+	map: function( callback ) {
+		return this.pushStack( jQuery.map( this, function( elem, i ) {
+			return callback.call( elem, i, elem );
+		} ) );
+	},
+
+	slice: function() {
+		return this.pushStack( slice.apply( this, arguments ) );
+	},
+
+	first: function() {
+		return this.eq( 0 );
+	},
+
+	last: function() {
+		return this.eq( -1 );
+	},
+
+	eq: function( i ) {
+		var len = this.length,
+			j = +i + ( i < 0 ? len : 0 );
+		return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] );
+	},
+
+	end: function() {
+		return this.prevObject || this.constructor();
+	},
+
+	// For internal use only.
+	// Behaves like an Array's method, not like a jQuery method.
+	push: push,
+	sort: deletedIds.sort,
+	splice: deletedIds.splice
+};
+
+jQuery.extend = jQuery.fn.extend = function() {
+	var src, copyIsArray, copy, name, options, clone,
+		target = arguments[ 0 ] || {},
+		i = 1,
+		length = arguments.length,
+		deep = false;
+
+	// Handle a deep copy situation
+	if ( typeof target === "boolean" ) {
+		deep = target;
+
+		// skip the boolean and the target
+		target = arguments[ i ] || {};
+		i++;
+	}
+
+	// Handle case when target is a string or something (possible in deep copy)
+	if ( typeof target !== "object" && !jQuery.isFunction( target ) ) {
+		target = {};
+	}
+
+	// extend jQuery itself if only one argument is passed
+	if ( i === length ) {
+		target = this;
+		i--;
+	}
+
+	for ( ; i < length; i++ ) {
+
+		// Only deal with non-null/undefined values
+		if ( ( options = arguments[ i ] ) != null ) {
+
+			// Extend the base object
+			for ( name in options ) {
+				src = target[ name ];
+				copy = options[ name ];
+
+				// Prevent never-ending loop
+				if ( target === copy ) {
+					continue;
+				}
+
+				// Recurse if we're merging plain objects or arrays
+				if ( deep && copy && ( jQuery.isPlainObject( copy ) ||
+					( copyIsArray = jQuery.isArray( copy ) ) ) ) {
+
+					if ( copyIsArray ) {
+						copyIsArray = false;
+						clone = src && jQuery.isArray( src ) ? src : [];
+
+					} else {
+						clone = src && jQuery.isPlainObject( src ) ? src : {};
+					}
+
+					// Never move original objects, clone them
+					target[ name ] = jQuery.extend( deep, clone, copy );
+
+				// Don't bring in undefined values
+				} else if ( copy !== undefined ) {
+					target[ name ] = copy;
+				}
+			}
+		}
+	}
+
+	// Return the modified object
+	return target;
+};
+
+jQuery.extend( {
+
+	// Unique for each copy of jQuery on the page
+	expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ),
+
+	// Assume jQuery is ready without the ready module
+	isReady: true,
+
+	error: function( msg ) {
+		throw new Error( msg );
+	},
+
+	noop: function() {},
+
+	// See test/unit/core.js for details concerning isFunction.
+	// Since version 1.3, DOM methods and functions like alert
+	// aren't supported. They return false on IE (#2968).
+	isFunction: function( obj ) {
+		return jQuery.type( obj ) === "function";
+	},
+
+	isArray: Array.isArray || function( obj ) {
+		return jQuery.type( obj ) === "array";
+	},
+
+	isWindow: function( obj ) {
+		/* jshint eqeqeq: false */
+		return obj != null && obj == obj.window;
+	},
+
+	isNumeric: function( obj ) {
+
+		// parseFloat NaNs numeric-cast false positives (null|true|false|"")
+		// ...but misinterprets leading-number strings, particularly hex literals ("0x...")
+		// subtraction forces infinities to NaN
+		// adding 1 corrects loss of precision from parseFloat (#15100)
+		var realStringObj = obj && obj.toString();
+		return !jQuery.isArray( obj ) && ( realStringObj - parseFloat( realStringObj ) + 1 ) >= 0;
+	},
+
+	isEmptyObject: function( obj ) {
+		var name;
+		for ( name in obj ) {
+			return false;
+		}
+		return true;
+	},
+
+	isPlainObject: function( obj ) {
+		var key;
+
+		// Must be an Object.
+		// Because of IE, we also have to check the presence of the constructor property.
+		// Make sure that DOM nodes and window objects don't pass through, as well
+		if ( !obj || jQuery.type( obj ) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) {
+			return false;
+		}
+
+		try {
+
+			// Not own constructor property must be Object
+			if ( obj.constructor &&
+				!hasOwn.call( obj, "constructor" ) &&
+				!hasOwn.call( obj.constructor.prototype, "isPrototypeOf" ) ) {
+				return false;
+			}
+		} catch ( e ) {
+
+			// IE8,9 Will throw exceptions on certain host objects #9897
+			return false;
+		}
+
+		// Support: IE<9
+		// Handle iteration over inherited properties before own properties.
+		if ( !support.ownFirst ) {
+			for ( key in obj ) {
+				return hasOwn.call( obj, key );
+			}
+		}
+
+		// Own properties are enumerated firstly, so to speed up,
+		// if last one is own, then all properties are own.
+		for ( key in obj ) {}
+
+		return key === undefined || hasOwn.call( obj, key );
+	},
+
+	type: function( obj ) {
+		if ( obj == null ) {
+			return obj + "";
+		}
+		return typeof obj === "object" || typeof obj === "function" ?
+			class2type[ toString.call( obj ) ] || "object" :
+			typeof obj;
+	},
+
+	// Workarounds based on findings by Jim Driscoll
+	// http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context
+	globalEval: function( data ) {
+		if ( data && jQuery.trim( data ) ) {
+
+			// We use execScript on Internet Explorer
+			// We use an anonymous function so that context is window
+			// rather than jQuery in Firefox
+			( window.execScript || function( data ) {
+				window[ "eval" ].call( window, data ); // jscs:ignore requireDotNotation
+			} )( data );
+		}
+	},
+
+	// Convert dashed to camelCase; used by the css and data modules
+	// Microsoft forgot to hump their vendor prefix (#9572)
+	camelCase: function( string ) {
+		return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase );
+	},
+
+	nodeName: function( elem, name ) {
+		return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
+	},
+
+	each: function( obj, callback ) {
+		var length, i = 0;
+
+		if ( isArrayLike( obj ) ) {
+			length = obj.length;
+			for ( ; i < length; i++ ) {
+				if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) {
+					break;
+				}
+			}
+		} else {
+			for ( i in obj ) {
+				if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) {
+					break;
+				}
+			}
+		}
+
+		return obj;
+	},
+
+	// Support: Android<4.1, IE<9
+	trim: function( text ) {
+		return text == null ?
+			"" :
+			( text + "" ).replace( rtrim, "" );
+	},
+
+	// results is for internal usage only
+	makeArray: function( arr, results ) {
+		var ret = results || [];
+
+		if ( arr != null ) {
+			if ( isArrayLike( Object( arr ) ) ) {
+				jQuery.merge( ret,
+					typeof arr === "string" ?
+					[ arr ] : arr
+				);
+			} else {
+				push.call( ret, arr );
+			}
+		}
+
+		return ret;
+	},
+
+	inArray: function( elem, arr, i ) {
+		var len;
+
+		if ( arr ) {
+			if ( indexOf ) {
+				return indexOf.call( arr, elem, i );
+			}
+
+			len = arr.length;
+			i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0;
+
+			for ( ; i < len; i++ ) {
+
+				// Skip accessing in sparse arrays
+				if ( i in arr && arr[ i ] === elem ) {
+					return i;
+				}
+			}
+		}
+
+		return -1;
+	},
+
+	merge: function( first, second ) {
+		var len = +second.length,
+			j = 0,
+			i = first.length;
+
+		while ( j < len ) {
+			first[ i++ ] = second[ j++ ];
+		}
+
+		// Support: IE<9
+		// Workaround casting of .length to NaN on otherwise arraylike objects (e.g., NodeLists)
+		if ( len !== len ) {
+			while ( second[ j ] !== undefined ) {
+				first[ i++ ] = second[ j++ ];
+			}
+		}
+
+		first.length = i;
+
+		return first;
+	},
+
+	grep: function( elems, callback, invert ) {
+		var callbackInverse,
+			matches = [],
+			i = 0,
+			length = elems.length,
+			callbackExpect = !invert;
+
+		// Go through the array, only saving the items
+		// that pass the validator function
+		for ( ; i < length; i++ ) {
+			callbackInverse = !callback( elems[ i ], i );
+			if ( callbackInverse !== callbackExpect ) {
+				matches.push( elems[ i ] );
+			}
+		}
+
+		return matches;
+	},
+
+	// arg is for internal usage only
+	map: function( elems, callback, arg ) {
+		var length, value,
+			i = 0,
+			ret = [];
+
+		// Go through the array, translating each of the items to their new values
+		if ( isArrayLike( elems ) ) {
+			length = elems.length;
+			for ( ; i < length; i++ ) {
+				value = callback( elems[ i ], i, arg );
+
+				if ( value != null ) {
+					ret.push( value );
+				}
+			}
+
+		// Go through every key on the object,
+		} else {
+			for ( i in elems ) {
+				value = callback( elems[ i ], i, arg );
+
+				if ( value != null ) {
+					ret.push( value );
+				}
+			}
+		}
+
+		// Flatten any nested arrays
+		return concat.apply( [], ret );
+	},
+
+	// A global GUID counter for objects
+	guid: 1,
+
+	// Bind a function to a context, optionally partially applying any
+	// arguments.
+	proxy: function( fn, context ) {
+		var args, proxy, tmp;
+
+		if ( typeof context === "string" ) {
+			tmp = fn[ context ];
+			context = fn;
+			fn = tmp;
+		}
+
+		// Quick check to determine if target is callable, in the spec
+		// this throws a TypeError, but we will just return undefined.
+		if ( !jQuery.isFunction( fn ) ) {
+			return undefined;
+		}
+
+		// Simulated bind
+		args = slice.call( arguments, 2 );
+		proxy = function() {
+			return fn.apply( context || this, args.concat( slice.call( arguments ) ) );
+		};
+
+		// Set the guid of unique handler to the same of original handler, so it can be removed
+		proxy.guid = fn.guid = fn.guid || jQuery.guid++;
+
+		return proxy;
+	},
+
+	now: function() {
+		return +( new Date() );
+	},
+
+	// jQuery.support is not used in Core but other projects attach their
+	// properties to it so it needs to exist.
+	support: support
+} );
+
+// JSHint would error on this code due to the Symbol not being defined in ES5.
+// Defining this global in .jshintrc would create a danger of using the global
+// unguarded in another place, it seems safer to just disable JSHint for these
+// three lines.
+/* jshint ignore: start */
+if ( typeof Symbol === "function" ) {
+	jQuery.fn[ Symbol.iterator ] = deletedIds[ Symbol.iterator ];
+}
+/* jshint ignore: end */
+
+// Populate the class2type map
+jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ),
+function( i, name ) {
+	class2type[ "[object " + name + "]" ] = name.toLowerCase();
+} );
+
+function isArrayLike( obj ) {
+
+	// Support: iOS 8.2 (not reproducible in simulator)
+	// `in` check used to prevent JIT error (gh-2145)
+	// hasOwn isn't used here due to false negatives
+	// regarding Nodelist length in IE
+	var length = !!obj && "length" in obj && obj.length,
+		type = jQuery.type( obj );
+
+	if ( type === "function" || jQuery.isWindow( obj ) ) {
+		return false;
+	}
+
+	return type === "array" || length === 0 ||
+		typeof length === "number" && length > 0 && ( length - 1 ) in obj;
+}
+var Sizzle =
+/*!
+ * Sizzle CSS Selector Engine v2.2.1
+ * http://sizzlejs.com/
+ *
+ * Copyright jQuery Foundation and other contributors
+ * Released under the MIT license
+ * http://jquery.org/license
+ *
+ * Date: 2015-10-17
+ */
+(function( window ) {
+
+var i,
+	support,
+	Expr,
+	getText,
+	isXML,
+	tokenize,
+	compile,
+	select,
+	outermostContext,
+	sortInput,
+	hasDuplicate,
+
+	// Local document vars
+	setDocument,
+	document,
+	docElem,
+	documentIsHTML,
+	rbuggyQSA,
+	rbuggyMatches,
+	matches,
+	contains,
+
+	// Instance-specific data
+	expando = "sizzle" + 1 * new Date(),
+	preferredDoc = window.document,
+	dirruns = 0,
+	done = 0,
+	classCache = createCache(),
+	tokenCache = createCache(),
+	compilerCache = createCache(),
+	sortOrder = function( a, b ) {
+		if ( a === b ) {
+			hasDuplicate = true;
+		}
+		return 0;
+	},
+
+	// General-purpose constants
+	MAX_NEGATIVE = 1 << 31,
+
+	// Instance methods
+	hasOwn = ({}).hasOwnProperty,
+	arr = [],
+	pop = arr.pop,
+	push_native = arr.push,
+	push = arr.push,
+	slice = arr.slice,
+	// Use a stripped-down indexOf as it's faster than native
+	// http://jsperf.com/thor-indexof-vs-for/5
+	indexOf = function( list, elem ) {
+		var i = 0,
+			len = list.length;
+		for ( ; i < len; i++ ) {
+			if ( list[i] === elem ) {
+				return i;
+			}
+		}
+		return -1;
+	},
+
+	booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",
+
+	// Regular expressions
+
+	// http://www.w3.org/TR/css3-selectors/#whitespace
+	whitespace = "[\\x20\\t\\r\\n\\f]",
+
+	// http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier
+	identifier = "(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",
+
+	// Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors
+	attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace +
+		// Operator (capture 2)
+		"*([*^$|!~]?=)" + whitespace +
+		// "Attribute values must be CSS identifiers [capture 5] or strings [capture 3 or capture 4]"
+		"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + whitespace +
+		"*\\]",
+
+	pseudos = ":(" + identifier + ")(?:\\((" +
+		// To reduce the number of selectors needing tokenize in the preFilter, prefer arguments:
+		// 1. quoted (capture 3; capture 4 or capture 5)
+		"('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" +
+		// 2. simple (capture 6)
+		"((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" +
+		// 3. anything else (capture 2)
+		".*" +
+		")\\)|)",
+
+	// Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter
+	rwhitespace = new RegExp( whitespace + "+", "g" ),
+	rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ),
+
+	rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ),
+	rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ),
+
+	rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*?)" + whitespace + "*\\]", "g" ),
+
+	rpseudo = new RegExp( pseudos ),
+	ridentifier = new RegExp( "^" + identifier + "$" ),
+
+	matchExpr = {
+		"ID": new RegExp( "^#(" + identifier + ")" ),
+		"CLASS": new RegExp( "^\\.(" + identifier + ")" ),
+		"TAG": new RegExp( "^(" + identifier + "|[*])" ),
+		"ATTR": new RegExp( "^" + attributes ),
+		"PSEUDO": new RegExp( "^" + pseudos ),
+		"CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace +
+			"*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace +
+			"*(\\d+)|))" + whitespace + "*\\)|)", "i" ),
+		"bool": new RegExp( "^(?:" + booleans + ")$", "i" ),
+		// For use in libraries implementing .is()
+		// We use this for POS matching in `select`
+		"needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" +
+			whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" )
+	},
+
+	rinputs = /^(?:input|select|textarea|button)$/i,
+	rheader = /^h\d$/i,
+
+	rnative = /^[^{]+\{\s*\[native \w/,
+
+	// Easily-parseable/retrievable ID or TAG or CLASS selectors
+	rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,
+
+	rsibling = /[+~]/,
+	rescape = /'|\\/g,
+
+	// CSS escapes http://www.w3.org/TR/CSS21/syndata.html#escaped-characters
+	runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ),
+	funescape = function( _, escaped, escapedWhitespace ) {
+		var high = "0x" + escaped - 0x10000;
+		// NaN means non-codepoint
+		// Support: Firefox<24
+		// Workaround erroneous numeric interpretation of +"0x"
+		return high !== high || escapedWhitespace ?
+			escaped :
+			high < 0 ?
+				// BMP codepoint
+				String.fromCharCode( high + 0x10000 ) :
+				// Supplemental Plane codepoint (surrogate pair)
+				String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 );
+	},
+
+	// Used for iframes
+	// See setDocument()
+	// Removing the function wrapper causes a "Permission Denied"
+	// error in IE
+	unloadHandler = function() {
+		setDocument();
+	};
+
+// Optimize for push.apply( _, NodeList )
+try {
+	push.apply(
+		(arr = slice.call( preferredDoc.childNodes )),
+		preferredDoc.childNodes
+	);
+	// Support: Android<4.0
+	// Detect silently failing push.apply
+	arr[ preferredDoc.childNodes.length ].nodeType;
+} catch ( e ) {
+	push = { apply: arr.length ?
+
+		// Leverage slice if possible
+		function( target, els ) {
+			push_native.apply( target, slice.call(els) );
+		} :
+
+		// Support: IE<9
+		// Otherwise append directly
+		function( target, els ) {
+			var j = target.length,
+				i = 0;
+			// Can't trust NodeList.length
+			while ( (target[j++] = els[i++]) ) {}
+			target.length = j - 1;
+		}
+	};
+}
+
+function Sizzle( selector, context, results, seed ) {
+	var m, i, elem, nid, nidselect, match, groups, newSelector,
+		newContext = context && context.ownerDocument,
+
+		// nodeType defaults to 9, since context defaults to document
+		nodeType = context ? context.nodeType : 9;
+
+	results = results || [];
+
+	// Return early from calls with invalid selector or context
+	if ( typeof selector !== "string" || !selector ||
+		nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) {
+
+		return results;
+	}
+
+	// Try to shortcut find operations (as opposed to filters) in HTML documents
+	if ( !seed ) {
+
+		if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) {
+			setDocument( context );
+		}
+		context = context || document;
+
+		if ( documentIsHTML ) {
+
+			// If the selector is sufficiently simple, try using a "get*By*" DOM method
+			// (excepting DocumentFragment context, where the methods don't exist)
+			if ( nodeType !== 11 && (match = rquickExpr.exec( selector )) ) {
+
+				// ID selector
+				if ( (m = match[1]) ) {
+
+					// Document context
+					if ( nodeType === 9 ) {
+						if ( (elem = context.getElementById( m )) ) {
+
+							// Support: IE, Opera, Webkit
+							// TODO: identify versions
+							// getElementById can match elements by name instead of ID
+							if ( elem.id === m ) {
+								results.push( elem );
+								return results;
+							}
+						} else {
+							return results;
+						}
+
+					// Element context
+					} else {
+
+						// Support: IE, Opera, Webkit
+						// TODO: identify versions
+						// getElementById can match elements by name instead of ID
+						if ( newContext && (elem = newContext.getElementById( m )) &&
+							contains( context, elem ) &&
+							elem.id === m ) {
+
+							results.push( elem );
+							return results;
+						}
+					}
+
+				// Type selector
+				} else if ( match[2] ) {
+					push.apply( results, context.getElementsByTagName( selector ) );
+					return results;
+
+				// Class selector
+				} else if ( (m = match[3]) && support.getElementsByClassName &&
+					context.getElementsByClassName ) {
+
+					push.apply( results, context.getElementsByClassName( m ) );
+					return results;
+				}
+			}
+
+			// Take advantage of querySelectorAll
+			if ( support.qsa &&
+				!compilerCache[ selector + " " ] &&
+				(!rbuggyQSA || !rbuggyQSA.test( selector )) ) {
+
+				if ( nodeType !== 1 ) {
+					newContext = context;
+					newSelector = selector;
+
+				// qSA looks outside Element context, which is not what we want
+				// Thanks to Andrew Dupont for this workaround technique
+				// Support: IE <=8
+				// Exclude object elements
+				} else if ( context.nodeName.toLowerCase() !== "object" ) {
+
+					// Capture the context ID, setting it first if necessary
+					if ( (nid = context.getAttribute( "id" )) ) {
+						nid = nid.replace( rescape, "\\$&" );
+					} else {
+						context.setAttribute( "id", (nid = expando) );
+					}
+
+					// Prefix every selector in the list
+					groups = tokenize( selector );
+					i = groups.length;
+					nidselect = ridentifier.test( nid ) ? "#" + nid : "[id='" + nid + "']";
+					while ( i-- ) {
+						groups[i] = nidselect + " " + toSelector( groups[i] );
+					}
+					newSelector = groups.join( "," );
+
+					// Expand context for sibling selectors
+					newContext = rsibling.test( selector ) && testContext( context.parentNode ) ||
+						context;
+				}
+
+				if ( newSelector ) {
+					try {
+						push.apply( results,
+							newContext.querySelectorAll( newSelector )
+						);
+						return results;
+					} catch ( qsaError ) {
+					} finally {
+						if ( nid === expando ) {
+							context.removeAttribute( "id" );
+						}
+					}
+				}
+			}
+		}
+	}
+
+	// All others
+	return select( selector.replace( rtrim, "$1" ), context, results, seed );
+}
+
+/**
+ * Create key-value caches of limited size
+ * @returns {function(string, object)} Returns the Object data after storing it on itself with
+ *	property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength)
+ *	deleting the oldest entry
+ */
+function createCache() {
+	var keys = [];
+
+	function cache( key, value ) {
+		// Use (key + " ") to avoid collision with native prototype properties (see Issue #157)
+		if ( keys.push( key + " " ) > Expr.cacheLength ) {
+			// Only keep the most recent entries
+			delete cache[ keys.shift() ];
+		}
+		return (cache[ key + " " ] = value);
+	}
+	return cache;
+}
+
+/**
+ * Mark a function for special use by Sizzle
+ * @param {Function} fn The function to mark
+ */
+function markFunction( fn ) {
+	fn[ expando ] = true;
+	return fn;
+}
+
+/**
+ * Support testing using an element
+ * @param {Function} fn Passed the created div and expects a boolean result
+ */
+function assert( fn ) {
+	var div = document.createElement("div");
+
+	try {
+		return !!fn( div );
+	} catch (e) {
+		return false;
+	} finally {
+		// Remove from its parent by default
+		if ( div.parentNode ) {
+			div.parentNode.removeChild( div );
+		}
+		// release memory in IE
+		div = null;
+	}
+}
+
+/**
+ * Adds the same handler for all of the specified attrs
+ * @param {String} attrs Pipe-separated list of attributes
+ * @param {Function} handler The method that will be applied
+ */
+function addHandle( attrs, handler ) {
+	var arr = attrs.split("|"),
+		i = arr.length;
+
+	while ( i-- ) {
+		Expr.attrHandle[ arr[i] ] = handler;
+	}
+}
+
+/**
+ * Checks document order of two siblings
+ * @param {Element} a
+ * @param {Element} b
+ * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b
+ */
+function siblingCheck( a, b ) {
+	var cur = b && a,
+		diff = cur && a.nodeType === 1 && b.nodeType === 1 &&
+			( ~b.sourceIndex || MAX_NEGATIVE ) -
+			( ~a.sourceIndex || MAX_NEGATIVE );
+
+	// Use IE sourceIndex if available on both nodes
+	if ( diff ) {
+		return diff;
+	}
+
+	// Check if b follows a
+	if ( cur ) {
+		while ( (cur = cur.nextSibling) ) {
+			if ( cur === b ) {
+				return -1;
+			}
+		}
+	}
+
+	return a ? 1 : -1;
+}
+
+/**
+ * Returns a function to use in pseudos for input types
+ * @param {String} type
+ */
+function createInputPseudo( type ) {
+	return function( elem ) {
+		var name = elem.nodeName.toLowerCase();
+		return name === "input" && elem.type === type;
+	};
+}
+
+/**
+ * Returns a function to use in pseudos for buttons
+ * @param {String} type
+ */
+function createButtonPseudo( type ) {
+	return function( elem ) {
+		var name = elem.nodeName.toLowerCase();
+		return (name === "input" || name === "button") && elem.type === type;
+	};
+}
+
+/**
+ * Returns a function to use in pseudos for positionals
+ * @param {Function} fn
+ */
+function createPositionalPseudo( fn ) {
+	return markFunction(function( argument ) {
+		argument = +argument;
+		return markFunction(function( seed, matches ) {
+			var j,
+				matchIndexes = fn( [], seed.length, argument ),
+				i = matchIndexes.length;
+
+			// Match elements found at the specified indexes
+			while ( i-- ) {
+				if ( seed[ (j = matchIndexes[i]) ] ) {
+					seed[j] = !(matches[j] = seed[j]);
+				}
+			}
+		});
+	});
+}
+
+/**
+ * Checks a node for validity as a Sizzle context
+ * @param {Element|Object=} context
+ * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value
+ */
+function testContext( context ) {
+	return context && typeof context.getElementsByTagName !== "undefined" && context;
+}
+
+// Expose support vars for convenience
+support = Sizzle.support = {};
+
+/**
+ * Detects XML nodes
+ * @param {Element|Object} elem An element or a document
+ * @returns {Boolean} True iff elem is a non-HTML XML node
+ */
+isXML = Sizzle.isXML = function( elem ) {
+	// documentElement is verified for cases where it doesn't yet exist
+	// (such as loading iframes in IE - #4833)
+	var documentElement = elem && (elem.ownerDocument || elem).documentElement;
+	return documentElement ? documentElement.nodeName !== "HTML" : false;
+};
+
+/**
+ * Sets document-related variables once based on the current document
+ * @param {Element|Object} [doc] An element or document object to use to set the document
+ * @returns {Object} Returns the current document
+ */
+setDocument = Sizzle.setDocument = function( node ) {
+	var hasCompare, parent,
+		doc = node ? node.ownerDocument || node : preferredDoc;
+
+	// Return early if doc is invalid or already selected
+	if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) {
+		return document;
+	}
+
+	// Update global variables
+	document = doc;
+	docElem = document.documentElement;
+	documentIsHTML = !isXML( document );
+
+	// Support: IE 9-11, Edge
+	// Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936)
+	if ( (parent = document.defaultView) && parent.top !== parent ) {
+		// Support: IE 11
+		if ( parent.addEventListener ) {
+			parent.addEventListener( "unload", unloadHandler, false );
+
+		// Support: IE 9 - 10 only
+		} else if ( parent.attachEvent ) {
+			parent.attachEvent( "onunload", unloadHandler );
+		}
+	}
+
+	/* Attributes
+	---------------------------------------------------------------------- */
+
+	// Support: IE<8
+	// Verify that getAttribute really returns attributes and not properties
+	// (excepting IE8 booleans)
+	support.attributes = assert(function( div ) {
+		div.className = "i";
+		return !div.getAttribute("className");
+	});
+
+	/* getElement(s)By*
+	---------------------------------------------------------------------- */
+
+	// Check if getElementsByTagName("*") returns only elements
+	support.getElementsByTagName = assert(function( div ) {
+		div.appendChild( document.createComment("") );
+		return !div.getElementsByTagName("*").length;
+	});
+
+	// Support: IE<9
+	support.getElementsByClassName = rnative.test( document.getElementsByClassName );
+
+	// Support: IE<10
+	// Check if getElementById returns elements by name
+	// The broken getElementById methods don't pick up programatically-set names,
+	// so use a roundabout getElementsByName test
+	support.getById = assert(function( div ) {
+		docElem.appendChild( div ).id = expando;
+		return !document.getElementsByName || !document.getElementsByName( expando ).length;
+	});
+
+	// ID find and filter
+	if ( support.getById ) {
+		Expr.find["ID"] = function( id, context ) {
+			if ( typeof context.getElementById !== "undefined" && documentIsHTML ) {
+				var m = context.getElementById( id );
+				return m ? [ m ] : [];
+			}
+		};
+		Expr.filter["ID"] = function( id ) {
+			var attrId = id.replace( runescape, funescape );
+			return function( elem ) {
+				return elem.getAttribute("id") === attrId;
+			};
+		};
+	} else {
+		// Support: IE6/7
+		// getElementById is not reliable as a find shortcut
+		delete Expr.find["ID"];
+
+		Expr.filter["ID"] =  function( id ) {
+			var attrId = id.replace( runescape, funescape );
+			return function( elem ) {
+				var node = typeof elem.getAttributeNode !== "undefined" &&
+					elem.getAttributeNode("id");
+				return node && node.value === attrId;
+			};
+		};
+	}
+
+	// Tag
+	Expr.find["TAG"] = support.getElementsByTagName ?
+		function( tag, context ) {
+			if ( typeof context.getElementsByTagName !== "undefined" ) {
+				return context.getElementsByTagName( tag );
+
+			// DocumentFragment nodes don't have gEBTN
+			} else if ( support.qsa ) {
+				return context.querySelectorAll( tag );
+			}
+		} :
+
+		function( tag, context ) {
+			var elem,
+				tmp = [],
+				i = 0,
+				// By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too
+				results = context.getElementsByTagName( tag );
+
+			// Filter out possible comments
+			if ( tag === "*" ) {
+				while ( (elem = results[i++]) ) {
+					if ( elem.nodeType === 1 ) {
+						tmp.push( elem );
+					}
+				}
+
+				return tmp;
+			}
+			return results;
+		};
+
+	// Class
+	Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) {
+		if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) {
+			return context.getElementsByClassName( className );
+		}
+	};
+
+	/* QSA/matchesSelector
+	---------------------------------------------------------------------- */
+
+	// QSA and matchesSelector support
+
+	// matchesSelector(:active) reports false when true (IE9/Opera 11.5)
+	rbuggyMatches = [];
+
+	// qSa(:focus) reports false when true (Chrome 21)
+	// We allow this because of a bug in IE8/9 that throws an error
+	// whenever `document.activeElement` is accessed on an iframe
+	// So, we allow :focus to pass through QSA all the time to avoid the IE error
+	// See http://bugs.jquery.com/ticket/13378
+	rbuggyQSA = [];
+
+	if ( (support.qsa = rnative.test( document.querySelectorAll )) ) {
+		// Build QSA regex
+		// Regex strategy adopted from Diego Perini
+		assert(function( div ) {
+			// Select is set to empty string on purpose
+			// This is to test IE's treatment of not explicitly
+			// setting a boolean content attribute,
+			// since its presence should be enough
+			// http://bugs.jquery.com/ticket/12359
+			docElem.appendChild( div ).innerHTML = "<a id='" + expando + "'></a>" +
+				"<select id='" + expando + "-\r\\' msallowcapture=''>" +
+				"<option selected=''></option></select>";
+
+			// Support: IE8, Opera 11-12.16
+			// Nothing should be selected when empty strings follow ^= or $= or *=
+			// The test attribute must be unknown in Opera but "safe" for WinRT
+			// http://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section
+			if ( div.querySelectorAll("[msallowcapture^='']").length ) {
+				rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" );
+			}
+
+			// Support: IE8
+			// Boolean attributes and "value" are not treated correctly
+			if ( !div.querySelectorAll("[selected]").length ) {
+				rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" );
+			}
+
+			// Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+
+			if ( !div.querySelectorAll( "[id~=" + expando + "-]" ).length ) {
+				rbuggyQSA.push("~=");
+			}
+
+			// Webkit/Opera - :checked should return selected option elements
+			// http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked
+			// IE8 throws error here and will not see later tests
+			if ( !div.querySelectorAll(":checked").length ) {
+				rbuggyQSA.push(":checked");
+			}
+
+			// Support: Safari 8+, iOS 8+
+			// https://bugs.webkit.org/show_bug.cgi?id=136851
+			// In-page `selector#id sibing-combinator selector` fails
+			if ( !div.querySelectorAll( "a#" + expando + "+*" ).length ) {
+				rbuggyQSA.push(".#.+[+~]");
+			}
+		});
+
+		assert(function( div ) {
+			// Support: Windows 8 Native Apps
+			// The type and name attributes are restricted during .innerHTML assignment
+			var input = document.createElement("input");
+			input.setAttribute( "type", "hidden" );
+			div.appendChild( input ).setAttribute( "name", "D" );
+
+			// Support: IE8
+			// Enforce case-sensitivity of name attribute
+			if ( div.querySelectorAll("[name=d]").length ) {
+				rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" );
+			}
+
+			// FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled)
+			// IE8 throws error here and will not see later tests
+			if ( !div.querySelectorAll(":enabled").length ) {
+				rbuggyQSA.push( ":enabled", ":disabled" );
+			}
+
+			// Opera 10-11 does not throw on post-comma invalid pseudos
+			div.querySelectorAll("*,:x");
+			rbuggyQSA.push(",.*:");
+		});
+	}
+
+	if ( (support.matchesSelector = rnative.test( (matches = docElem.matches ||
+		docElem.webkitMatchesSelector ||
+		docElem.mozMatchesSelector ||
+		docElem.oMatchesSelector ||
+		docElem.msMatchesSelector) )) ) {
+
+		assert(function( div ) {
+			// Check to see if it's possible to do matchesSelector
+			// on a disconnected node (IE 9)
+			support.disconnectedMatch = matches.call( div, "div" );
+
+			// This should fail with an exception
+			// Gecko does not error, returns false instead
+			matches.call( div, "[s!='']:x" );
+			rbuggyMatches.push( "!=", pseudos );
+		});
+	}
+
+	rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") );
+	rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") );
+
+	/* Contains
+	---------------------------------------------------------------------- */
+	hasCompare = rnative.test( docElem.compareDocumentPosition );
+
+	// Element contains another
+	// Purposefully self-exclusive
+	// As in, an element does not contain itself
+	contains = hasCompare || rnative.test( docElem.contains ) ?
+		function( a, b ) {
+			var adown = a.nodeType === 9 ? a.documentElement : a,
+				bup = b && b.parentNode;
+			return a === bup || !!( bup && bup.nodeType === 1 && (
+				adown.contains ?
+					adown.contains( bup ) :
+					a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16
+			));
+		} :
+		function( a, b ) {
+			if ( b ) {
+				while ( (b = b.parentNode) ) {
+					if ( b === a ) {
+						return true;
+					}
+				}
+			}
+			return false;
+		};
+
+	/* Sorting
+	---------------------------------------------------------------------- */
+
+	// Document order sorting
+	sortOrder = hasCompare ?
+	function( a, b ) {
+
+		// Flag for duplicate removal
+		if ( a === b ) {
+			hasDuplicate = true;
+			return 0;
+		}
+
+		// Sort on method existence if only one input has compareDocumentPosition
+		var compare = !a.compareDocumentPosition - !b.compareDocumentPosition;
+		if ( compare ) {
+			return compare;
+		}
+
+		// Calculate position if both inputs belong to the same document
+		compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ?
+			a.compareDocumentPosition( b ) :
+
+			// Otherwise we know they are disconnected
+			1;
+
+		// Disconnected nodes
+		if ( compare & 1 ||
+			(!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) {
+
+			// Choose the first element that is related to our preferred document
+			if ( a === document || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) {
+				return -1;
+			}
+			if ( b === document || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) {
+				return 1;
+			}
+
+			// Maintain original order
+			return sortInput ?
+				( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) :
+				0;
+		}
+
+		return compare & 4 ? -1 : 1;
+	} :
+	function( a, b ) {
+		// Exit early if the nodes are identical
+		if ( a === b ) {
+			hasDuplicate = true;
+			return 0;
+		}
+
+		var cur,
+			i = 0,
+			aup = a.parentNode,
+			bup = b.parentNode,
+			ap = [ a ],
+			bp = [ b ];
+
+		// Parentless nodes are either documents or disconnected
+		if ( !aup || !bup ) {
+			return a === document ? -1 :
+				b === document ? 1 :
+				aup ? -1 :
+				bup ? 1 :
+				sortInput ?
+				( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) :
+				0;
+
+		// If the nodes are siblings, we can do a quick check
+		} else if ( aup === bup ) {
+			return siblingCheck( a, b );
+		}
+
+		// Otherwise we need full lists of their ancestors for comparison
+		cur = a;
+		while ( (cur = cur.parentNode) ) {
+			ap.unshift( cur );
+		}
+		cur = b;
+		while ( (cur = cur.parentNode) ) {
+			bp.unshift( cur );
+		}
+
+		// Walk down the tree looking for a discrepancy
+		while ( ap[i] === bp[i] ) {
+			i++;
+		}
+
+		return i ?
+			// Do a sibling check if the nodes have a common ancestor
+			siblingCheck( ap[i], bp[i] ) :
+
+			// Otherwise nodes in our document sort first
+			ap[i] === preferredDoc ? -1 :
+			bp[i] === preferredDoc ? 1 :
+			0;
+	};
+
+	return document;
+};
+
+Sizzle.matches = function( expr, elements ) {
+	return Sizzle( expr, null, null, elements );
+};
+
+Sizzle.matchesSelector = function( elem, expr ) {
+	// Set document vars if needed
+	if ( ( elem.ownerDocument || elem ) !== document ) {
+		setDocument( elem );
+	}
+
+	// Make sure that attribute selectors are quoted
+	expr = expr.replace( rattributeQuotes, "='$1']" );
+
+	if ( support.matchesSelector && documentIsHTML &&
+		!compilerCache[ expr + " " ] &&
+		( !rbuggyMatches || !rbuggyMatches.test( expr ) ) &&
+		( !rbuggyQSA     || !rbuggyQSA.test( expr ) ) ) {
+
+		try {
+			var ret = matches.call( elem, expr );
+
+			// IE 9's matchesSelector returns false on disconnected nodes
+			if ( ret || support.disconnectedMatch ||
+					// As well, disconnected nodes are said to be in a document
+					// fragment in IE 9
+					elem.document && elem.document.nodeType !== 11 ) {
+				return ret;
+			}
+		} catch (e) {}
+	}
+
+	return Sizzle( expr, document, null, [ elem ] ).length > 0;
+};
+
+Sizzle.contains = function( context, elem ) {
+	// Set document vars if needed
+	if ( ( context.ownerDocument || context ) !== document ) {
+		setDocument( context );
+	}
+	return contains( context, elem );
+};
+
+Sizzle.attr = function( elem, name ) {
+	// Set document vars if needed
+	if ( ( elem.ownerDocument || elem ) !== document ) {
+		setDocument( elem );
+	}
+
+	var fn = Expr.attrHandle[ name.toLowerCase() ],
+		// Don't get fooled by Object.prototype properties (jQuery #13807)
+		val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ?
+			fn( elem, name, !documentIsHTML ) :
+			undefined;
+
+	return val !== undefined ?
+		val :
+		support.attributes || !documentIsHTML ?
+			elem.getAttribute( name ) :
+			(val = elem.getAttributeNode(name)) && val.specified ?
+				val.value :
+				null;
+};
+
+Sizzle.error = function( msg ) {
+	throw new Error( "Syntax error, unrecognized expression: " + msg );
+};
+
+/**
+ * Document sorting and removing duplicates
+ * @param {ArrayLike} results
+ */
+Sizzle.uniqueSort = function( results ) {
+	var elem,
+		duplicates = [],
+		j = 0,
+		i = 0;
+
+	// Unless we *know* we can detect duplicates, assume their presence
+	hasDuplicate = !support.detectDuplicates;
+	sortInput = !support.sortStable && results.slice( 0 );
+	results.sort( sortOrder );
+
+	if ( hasDuplicate ) {
+		while ( (elem = results[i++]) ) {
+			if ( elem === results[ i ] ) {
+				j = duplicates.push( i );
+			}
+		}
+		while ( j-- ) {
+			results.splice( duplicates[ j ], 1 );
+		}
+	}
+
+	// Clear input after sorting to release objects
+	// See https://github.com/jquery/sizzle/pull/225
+	sortInput = null;
+
+	return results;
+};
+
+/**
+ * Utility function for retrieving the text value of an array of DOM nodes
+ * @param {Array|Element} elem
+ */
+getText = Sizzle.getText = function( elem ) {
+	var node,
+		ret = "",
+		i = 0,
+		nodeType = elem.nodeType;
+
+	if ( !nodeType ) {
+		// If no nodeType, this is expected to be an array
+		while ( (node = elem[i++]) ) {
+			// Do not traverse comment nodes
+			ret += getText( node );
+		}
+	} else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) {
+		// Use textContent for elements
+		// innerText usage removed for consistency of new lines (jQuery #11153)
+		if ( typeof elem.textContent === "string" ) {
+			return elem.textContent;
+		} else {
+			// Traverse its children
+			for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {
+				ret += getText( elem );
+			}
+		}
+	} else if ( nodeType === 3 || nodeType === 4 ) {
+		return elem.nodeValue;
+	}
+	// Do not include comment or processing instruction nodes
+
+	return ret;
+};
+
+Expr = Sizzle.selectors = {
+
+	// Can be adjusted by the user
+	cacheLength: 50,
+
+	createPseudo: markFunction,
+
+	match: matchExpr,
+
+	attrHandle: {},
+
+	find: {},
+
+	relative: {
+		">": { dir: "parentNode", first: true },
+		" ": { dir: "parentNode" },
+		"+": { dir: "previousSibling", first: true },
+		"~": { dir: "previousSibling" }
+	},
+
+	preFilter: {
+		"ATTR": function( match ) {
+			match[1] = match[1].replace( runescape, funescape );
+
+			// Move the given value to match[3] whether quoted or unquoted
+			match[3] = ( match[3] || match[4] || match[5] || "" ).replace( runescape, funescape );
+
+			if ( match[2] === "~=" ) {
+				match[3] = " " + match[3] + " ";
+			}
+
+			return match.slice( 0, 4 );
+		},
+
+		"CHILD": function( match ) {
+			/* matches from matchExpr["CHILD"]
+				1 type (only|nth|...)
+				2 what (child|of-type)
+				3 argument (even|odd|\d*|\d*n([+-]\d+)?|...)
+				4 xn-component of xn+y argument ([+-]?\d*n|)
+				5 sign of xn-component
+				6 x of xn-component
+				7 sign of y-component
+				8 y of y-component
+			*/
+			match[1] = match[1].toLowerCase();
+
+			if ( match[1].slice( 0, 3 ) === "nth" ) {
+				// nth-* requires argument
+				if ( !match[3] ) {
+					Sizzle.error( match[0] );
+				}
+
+				// numeric x and y parameters for Expr.filter.CHILD
+				// remember that false/true cast respectively to 0/1
+				match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) );
+				match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" );
+
+			// other types prohibit arguments
+			} else if ( match[3] ) {
+				Sizzle.error( match[0] );
+			}
+
+			return match;
+		},
+
+		"PSEUDO": function( match ) {
+			var excess,
+				unquoted = !match[6] && match[2];
+
+			if ( matchExpr["CHILD"].test( match[0] ) ) {
+				return null;
+			}
+
+			// Accept quoted arguments as-is
+			if ( match[3] ) {
+				match[2] = match[4] || match[5] || "";
+
+			// Strip excess characters from unquoted arguments
+			} else if ( unquoted && rpseudo.test( unquoted ) &&
+				// Get excess from tokenize (recursively)
+				(excess = tokenize( unquoted, true )) &&
+				// advance to the next closing parenthesis
+				(excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) {
+
+				// excess is a negative index
+				match[0] = match[0].slice( 0, excess );
+				match[2] = unquoted.slice( 0, excess );
+			}
+
+			// Return only captures needed by the pseudo filter method (type and argument)
+			return match.slice( 0, 3 );
+		}
+	},
+
+	filter: {
+
+		"TAG": function( nodeNameSelector ) {
+			var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase();
+			return nodeNameSelector === "*" ?
+				function() { return true; } :
+				function( elem ) {
+					return elem.nodeName && elem.nodeName.toLowerCase() === nodeName;
+				};
+		},
+
+		"CLASS": function( className ) {
+			var pattern = classCache[ className + " " ];
+
+			return pattern ||
+				(pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) &&
+				classCache( className, function( elem ) {
+					return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== "undefined" && elem.getAttribute("class") || "" );
+				});
+		},
+
+		"ATTR": function( name, operator, check ) {
+			return function( elem ) {
+				var result = Sizzle.attr( elem, name );
+
+				if ( result == null ) {
+					return operator === "!=";
+				}
+				if ( !operator ) {
+					return true;
+				}
+
+				result += "";
+
+				return operator === "=" ? result === check :
+					operator === "!=" ? result !== check :
+					operator === "^=" ? check && result.indexOf( check ) === 0 :
+					operator === "*=" ? check && result.indexOf( check ) > -1 :
+					operator === "$=" ? check && result.slice( -check.length ) === check :
+					operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 :
+					operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" :
+					false;
+			};
+		},
+
+		"CHILD": function( type, what, argument, first, last ) {
+			var simple = type.slice( 0, 3 ) !== "nth",
+				forward = type.slice( -4 ) !== "last",
+				ofType = what === "of-type";
+
+			return first === 1 && last === 0 ?
+
+				// Shortcut for :nth-*(n)
+				function( elem ) {
+					return !!elem.parentNode;
+				} :
+
+				function( elem, context, xml ) {
+					var cache, uniqueCache, outerCache, node, nodeIndex, start,
+						dir = simple !== forward ? "nextSibling" : "previousSibling",
+						parent = elem.parentNode,
+						name = ofType && elem.nodeName.toLowerCase(),
+						useCache = !xml && !ofType,
+						diff = false;
+
+					if ( parent ) {
+
+						// :(first|last|only)-(child|of-type)
+						if ( simple ) {
+							while ( dir ) {
+								node = elem;
+								while ( (node = node[ dir ]) ) {
+									if ( ofType ?
+										node.nodeName.toLowerCase() === name :
+										node.nodeType === 1 ) {
+
+										return false;
+									}
+								}
+								// Reverse direction for :only-* (if we haven't yet done so)
+								start = dir = type === "only" && !start && "nextSibling";
+							}
+							return true;
+						}
+
+						start = [ forward ? parent.firstChild : parent.lastChild ];
+
+						// non-xml :nth-child(...) stores cache data on `parent`
+						if ( forward && useCache ) {
+
+							// Seek `elem` from a previously-cached index
+
+							// ...in a gzip-friendly way
+							node = parent;
+							outerCache = node[ expando ] || (node[ expando ] = {});
+
+							// Support: IE <9 only
+							// Defend against cloned attroperties (jQuery gh-1709)
+							uniqueCache = outerCache[ node.uniqueID ] ||
+								(outerCache[ node.uniqueID ] = {});
+
+							cache = uniqueCache[ type ] || [];
+							nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ];
+							diff = nodeIndex && cache[ 2 ];
+							node = nodeIndex && parent.childNodes[ nodeIndex ];
+
+							while ( (node = ++nodeIndex && node && node[ dir ] ||
+
+								// Fallback to seeking `elem` from the start
+								(diff = nodeIndex = 0) || start.pop()) ) {
+
+								// When found, cache indexes on `parent` and break
+								if ( node.nodeType === 1 && ++diff && node === elem ) {
+									uniqueCache[ type ] = [ dirruns, nodeIndex, diff ];
+									break;
+								}
+							}
+
+						} else {
+							// Use previously-cached element index if available
+							if ( useCache ) {
+								// ...in a gzip-friendly way
+								node = elem;
+								outerCache = node[ expando ] || (node[ expando ] = {});
+
+								// Support: IE <9 only
+								// Defend against cloned attroperties (jQuery gh-1709)
+								uniqueCache = outerCache[ node.uniqueID ] ||
+									(outerCache[ node.uniqueID ] = {});
+
+								cache = uniqueCache[ type ] || [];
+								nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ];
+								diff = nodeIndex;
+							}
+
+							// xml :nth-child(...)
+							// or :nth-last-child(...) or :nth(-last)?-of-type(...)
+							if ( diff === false ) {
+								// Use the same loop as above to seek `elem` from the start
+								while ( (node = ++nodeIndex && node && node[ dir ] ||
+									(diff = nodeIndex = 0) || start.pop()) ) {
+
+									if ( ( ofType ?
+										node.nodeName.toLowerCase() === name :
+										node.nodeType === 1 ) &&
+										++diff ) {
+
+										// Cache the index of each encountered element
+										if ( useCache ) {
+											outerCache = node[ expando ] || (node[ expando ] = {});
+
+											// Support: IE <9 only
+											// Defend against cloned attroperties (jQuery gh-1709)
+											uniqueCache = outerCache[ node.uniqueID ] ||
+												(outerCache[ node.uniqueID ] = {});
+
+											uniqueCache[ type ] = [ dirruns, diff ];
+										}
+
+										if ( node === elem ) {
+											break;
+										}
+									}
+								}
+							}
+						}
+
+						// Incorporate the offset, then check against cycle size
+						diff -= last;
+						return diff === first || ( diff % first === 0 && diff / first >= 0 );
+					}
+				};
+		},
+
+		"PSEUDO": function( pseudo, argument ) {
+			// pseudo-class names are case-insensitive
+			// http://www.w3.org/TR/selectors/#pseudo-classes
+			// Prioritize by case sensitivity in case custom pseudos are added with uppercase letters
+			// Remember that setFilters inherits from pseudos
+			var args,
+				fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] ||
+					Sizzle.error( "unsupported pseudo: " + pseudo );
+
+			// The user may use createPseudo to indicate that
+			// arguments are needed to create the filter function
+			// just as Sizzle does
+			if ( fn[ expando ] ) {
+				return fn( argument );
+			}
+
+			// But maintain support for old signatures
+			if ( fn.length > 1 ) {
+				args = [ pseudo, pseudo, "", argument ];
+				return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ?
+					markFunction(function( seed, matches ) {
+						var idx,
+							matched = fn( seed, argument ),
+							i = matched.length;
+						while ( i-- ) {
+							idx = indexOf( seed, matched[i] );
+							seed[ idx ] = !( matches[ idx ] = matched[i] );
+						}
+					}) :
+					function( elem ) {
+						return fn( elem, 0, args );
+					};
+			}
+
+			return fn;
+		}
+	},
+
+	pseudos: {
+		// Potentially complex pseudos
+		"not": markFunction(function( selector ) {
+			// Trim the selector passed to compile
+			// to avoid treating leading and trailing
+			// spaces as combinators
+			var input = [],
+				results = [],
+				matcher = compile( selector.replace( rtrim, "$1" ) );
+
+			return matcher[ expando ] ?
+				markFunction(function( seed, matches, context, xml ) {
+					var elem,
+						unmatched = matcher( seed, null, xml, [] ),
+						i = seed.length;
+
+					// Match elements unmatched by `matcher`
+					while ( i-- ) {
+						if ( (elem = unmatched[i]) ) {
+							seed[i] = !(matches[i] = elem);
+						}
+					}
+				}) :
+				function( elem, context, xml ) {
+					input[0] = elem;
+					matcher( input, null, xml, results );
+					// Don't keep the element (issue #299)
+					input[0] = null;
+					return !results.pop();
+				};
+		}),
+
+		"has": markFunction(function( selector ) {
+			return function( elem ) {
+				return Sizzle( selector, elem ).length > 0;
+			};
+		}),
+
+		"contains": markFunction(function( text ) {
+			text = text.replace( runescape, funescape );
+			return function( elem ) {
+				return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1;
+			};
+		}),
+
+		// "Whether an element is represented by a :lang() selector
+		// is based solely on the element's language value
+		// being equal to the identifier C,
+		// or beginning with the identifier C immediately followed by "-".
+		// The matching of C against the element's language value is performed case-insensitively.
+		// The identifier C does not have to be a valid language name."
+		// http://www.w3.org/TR/selectors/#lang-pseudo
+		"lang": markFunction( function( lang ) {
+			// lang value must be a valid identifier
+			if ( !ridentifier.test(lang || "") ) {
+				Sizzle.error( "unsupported lang: " + lang );
+			}
+			lang = lang.replace( runescape, funescape ).toLowerCase();
+			return function( elem ) {
+				var elemLang;
+				do {
+					if ( (elemLang = documentIsHTML ?
+						elem.lang :
+						elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) {
+
+						elemLang = elemLang.toLowerCase();
+						return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0;
+					}
+				} while ( (elem = elem.parentNode) && elem.nodeType === 1 );
+				return false;
+			};
+		}),
+
+		// Miscellaneous
+		"target": function( elem ) {
+			var hash = window.location && window.location.hash;
+			return hash && hash.slice( 1 ) === elem.id;
+		},
+
+		"root": function( elem ) {
+			return elem === docElem;
+		},
+
+		"focus": function( elem ) {
+			return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex);
+		},
+
+		// Boolean properties
+		"enabled": function( elem ) {
+			return elem.disabled === false;
+		},
+
+		"disabled": function( elem ) {
+			return elem.disabled === true;
+		},
+
+		"checked": function( elem ) {
+			// In CSS3, :checked should return both checked and selected elements
+			// http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked
+			var nodeName = elem.nodeName.toLowerCase();
+			return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected);
+		},
+
+		"selected": function( elem ) {
+			// Accessing this property makes selected-by-default
+			// options in Safari work properly
+			if ( elem.parentNode ) {
+				elem.parentNode.selectedIndex;
+			}
+
+			return elem.selected === true;
+		},
+
+		// Contents
+		"empty": function( elem ) {
+			// http://www.w3.org/TR/selectors/#empty-pseudo
+			// :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5),
+			//   but not by others (comment: 8; processing instruction: 7; etc.)
+			// nodeType < 6 works because attributes (2) do not appear as children
+			for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {
+				if ( elem.nodeType < 6 ) {
+					return false;
+				}
+			}
+			return true;
+		},
+
+		"parent": function( elem ) {
+			return !Expr.pseudos["empty"]( elem );
+		},
+
+		// Element/input types
+		"header": function( elem ) {
+			return rheader.test( elem.nodeName );
+		},
+
+		"input": function( elem ) {
+			return rinputs.test( elem.nodeName );
+		},
+
+		"button": function( elem ) {
+			var name = elem.nodeName.toLowerCase();
+			return name === "input" && elem.type === "button" || name === "button";
+		},
+
+		"text": function( elem ) {
+			var attr;
+			return elem.nodeName.toLowerCase() === "input" &&
+				elem.type === "text" &&
+
+				// Support: IE<8
+				// New HTML5 attribute values (e.g., "search") appear with elem.type === "text"
+				( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === "text" );
+		},
+
+		// Position-in-collection
+		"first": createPositionalPseudo(function() {
+			return [ 0 ];
+		}),
+
+		"last": createPositionalPseudo(function( matchIndexes, length ) {
+			return [ length - 1 ];
+		}),
+
+		"eq": createPositionalPseudo(function( matchIndexes, length, argument ) {
+			return [ argument < 0 ? argument + length : argument ];
+		}),
+
+		"even": createPositionalPseudo(function( matchIndexes, length ) {
+			var i = 0;
+			for ( ; i < length; i += 2 ) {
+				matchIndexes.push( i );
+			}
+			return matchIndexes;
+		}),
+
+		"odd": createPositionalPseudo(function( matchIndexes, length ) {
+			var i = 1;
+			for ( ; i < length; i += 2 ) {
+				matchIndexes.push( i );
+			}
+			return matchIndexes;
+		}),
+
+		"lt": createPositionalPseudo(function( matchIndexes, length, argument ) {
+			var i = argument < 0 ? argument + length : argument;
+			for ( ; --i >= 0; ) {
+				matchIndexes.push( i );
+			}
+			return matchIndexes;
+		}),
+
+		"gt": createPositionalPseudo(function( matchIndexes, length, argument ) {
+			var i = argument < 0 ? argument + length : argument;
+			for ( ; ++i < length; ) {
+				matchIndexes.push( i );
+			}
+			return matchIndexes;
+		})
+	}
+};
+
+Expr.pseudos["nth"] = Expr.pseudos["eq"];
+
+// Add button/input type pseudos
+for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) {
+	Expr.pseudos[ i ] = createInputPseudo( i );
+}
+for ( i in { submit: true, reset: true } ) {
+	Expr.pseudos[ i ] = createButtonPseudo( i );
+}
+
+// Easy API for creating new setFilters
+function setFilters() {}
+setFilters.prototype = Expr.filters = Expr.pseudos;
+Expr.setFilters = new setFilters();
+
+tokenize = Sizzle.tokenize = function( selector, parseOnly ) {
+	var matched, match, tokens, type,
+		soFar, groups, preFilters,
+		cached = tokenCache[ selector + " " ];
+
+	if ( cached ) {
+		return parseOnly ? 0 : cached.slice( 0 );
+	}
+
+	soFar = selector;
+	groups = [];
+	preFilters = Expr.preFilter;
+
+	while ( soFar ) {
+
+		// Comma and first run
+		if ( !matched || (match = rcomma.exec( soFar )) ) {
+			if ( match ) {
+				// Don't consume trailing commas as valid
+				soFar = soFar.slice( match[0].length ) || soFar;
+			}
+			groups.push( (tokens = []) );
+		}
+
+		matched = false;
+
+		// Combinators
+		if ( (match = rcombinators.exec( soFar )) ) {
+			matched = match.shift();
+			tokens.push({
+				value: matched,
+				// Cast descendant combinators to space
+				type: match[0].replace( rtrim, " " )
+			});
+			soFar = soFar.slice( matched.length );
+		}
+
+		// Filters
+		for ( type in Expr.filter ) {
+			if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] ||
+				(match = preFilters[ type ]( match ))) ) {
+				matched = match.shift();
+				tokens.push({
+					value: matched,
+					type: type,
+					matches: match
+				});
+				soFar = soFar.slice( matched.length );
+			}
+		}
+
+		if ( !matched ) {
+			break;
+		}
+	}
+
+	// Return the length of the invalid excess
+	// if we're just parsing
+	// Otherwise, throw an error or return tokens
+	return parseOnly ?
+		soFar.length :
+		soFar ?
+			Sizzle.error( selector ) :
+			// Cache the tokens
+			tokenCache( selector, groups ).slice( 0 );
+};
+
+function toSelector( tokens ) {
+	var i = 0,
+		len = tokens.length,
+		selector = "";
+	for ( ; i < len; i++ ) {
+		selector += tokens[i].value;
+	}
+	return selector;
+}
+
+function addCombinator( matcher, combinator, base ) {
+	var dir = combinator.dir,
+		checkNonElements = base && dir === "parentNode",
+		doneName = done++;
+
+	return combinator.first ?
+		// Check against closest ancestor/preceding element
+		function( elem, context, xml ) {
+			while ( (elem = elem[ dir ]) ) {
+				if ( elem.nodeType === 1 || checkNonElements ) {
+					return matcher( elem, context, xml );
+				}
+			}
+		} :
+
+		// Check against all ancestor/preceding elements
+		function( elem, context, xml ) {
+			var oldCache, uniqueCache, outerCache,
+				newCache = [ dirruns, doneName ];
+
+			// We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching
+			if ( xml ) {
+				while ( (elem = elem[ dir ]) ) {
+					if ( elem.nodeType === 1 || checkNonElements ) {
+						if ( matcher( elem, context, xml ) ) {
+							return true;
+						}
+					}
+				}
+			} else {
+				while ( (elem = elem[ dir ]) ) {
+					if ( elem.nodeType === 1 || checkNonElements ) {
+						outerCache = elem[ expando ] || (elem[ expando ] = {});
+
+						// Support: IE <9 only
+						// Defend against cloned attroperties (jQuery gh-1709)
+						uniqueCache = outerCache[ elem.uniqueID ] || (outerCache[ elem.uniqueID ] = {});
+
+						if ( (oldCache = uniqueCache[ dir ]) &&
+							oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) {
+
+							// Assign to newCache so results back-propagate to previous elements
+							return (newCache[ 2 ] = oldCache[ 2 ]);
+						} else {
+							// Reuse newcache so results back-propagate to previous elements
+							uniqueCache[ dir ] = newCache;
+
+							// A match means we're done; a fail means we have to keep checking
+							if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) {
+								return true;
+							}
+						}
+					}
+				}
+			}
+		};
+}
+
+function elementMatcher( matchers ) {
+	return matchers.length > 1 ?
+		function( elem, context, xml ) {
+			var i = matchers.length;
+			while ( i-- ) {
+				if ( !matchers[i]( elem, context, xml ) ) {
+					return false;
+				}
+			}
+			return true;
+		} :
+		matchers[0];
+}
+
+function multipleContexts( selector, contexts, results ) {
+	var i = 0,
+		len = contexts.length;
+	for ( ; i < len; i++ ) {
+		Sizzle( selector, contexts[i], results );
+	}
+	return results;
+}
+
+function condense( unmatched, map, filter, context, xml ) {
+	var elem,
+		newUnmatched = [],
+		i = 0,
+		len = unmatched.length,
+		mapped = map != null;
+
+	for ( ; i < len; i++ ) {
+		if ( (elem = unmatched[i]) ) {
+			if ( !filter || filter( elem, context, xml ) ) {
+				newUnmatched.push( elem );
+				if ( mapped ) {
+					map.push( i );
+				}
+			}
+		}
+	}
+
+	return newUnmatched;
+}
+
+function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) {
+	if ( postFilter && !postFilter[ expando ] ) {
+		postFilter = setMatcher( postFilter );
+	}
+	if ( postFinder && !postFinder[ expando ] ) {
+		postFinder = setMatcher( postFinder, postSelector );
+	}
+	return markFunction(function( seed, results, context, xml ) {
+		var temp, i, elem,
+			preMap = [],
+			postMap = [],
+			preexisting = results.length,
+
+			// Get initial elements from seed or context
+			elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ),
+
+			// Prefilter to get matcher input, preserving a map for seed-results synchronization
+			matcherIn = preFilter && ( seed || !selector ) ?
+				condense( elems, preMap, preFilter, context, xml ) :
+				elems,
+
+			matcherOut = matcher ?
+				// If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results,
+				postFinder || ( seed ? preFilter : preexisting || postFilter ) ?
+
+					// ...intermediate processing is necessary
+					[] :
+
+					// ...otherwise use results directly
+					results :
+				matcherIn;
+
+		// Find primary matches
+		if ( matcher ) {
+			matcher( matcherIn, matcherOut, context, xml );
+		}
+
+		// Apply postFilter
+		if ( postFilter ) {
+			temp = condense( matcherOut, postMap );
+			postFilter( temp, [], context, xml );
+
+			// Un-match failing elements by moving them back to matcherIn
+			i = temp.length;
+			while ( i-- ) {
+				if ( (elem = temp[i]) ) {
+					matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem);
+				}
+			}
+		}
+
+		if ( seed ) {
+			if ( postFinder || preFilter ) {
+				if ( postFinder ) {
+					// Get the final matcherOut by condensing this intermediate into postFinder contexts
+					temp = [];
+					i = matcherOut.length;
+					while ( i-- ) {
+						if ( (elem = matcherOut[i]) ) {
+							// Restore matcherIn since elem is not yet a final match
+							temp.push( (matcherIn[i] = elem) );
+						}
+					}
+					postFinder( null, (matcherOut = []), temp, xml );
+				}
+
+				// Move matched elements from seed to results to keep them synchronized
+				i = matcherOut.length;
+				while ( i-- ) {
+					if ( (elem = matcherOut[i]) &&
+						(temp = postFinder ? indexOf( seed, elem ) : preMap[i]) > -1 ) {
+
+						seed[temp] = !(results[temp] = elem);
+					}
+				}
+			}
+
+		// Add elements to results, through postFinder if defined
+		} else {
+			matcherOut = condense(
+				matcherOut === results ?
+					matcherOut.splice( preexisting, matcherOut.length ) :
+					matcherOut
+			);
+			if ( postFinder ) {
+				postFinder( null, results, matcherOut, xml );
+			} else {
+				push.apply( results, matcherOut );
+			}
+		}
+	});
+}
+
+function matcherFromTokens( tokens ) {
+	var checkContext, matcher, j,
+		len = tokens.length,
+		leadingRelative = Expr.relative[ tokens[0].type ],
+		implicitRelative = leadingRelative || Expr.relative[" "],
+		i = leadingRelative ? 1 : 0,
+
+		// The foundational matcher ensures that elements are reachable from top-level context(s)
+		matchContext = addCombinator( function( elem ) {
+			return elem === checkContext;
+		}, implicitRelative, true ),
+		matchAnyContext = addCombinator( function( elem ) {
+			return indexOf( checkContext, elem ) > -1;
+		}, implicitRelative, true ),
+		matchers = [ function( elem, context, xml ) {
+			var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || (
+				(checkContext = context).nodeType ?
+					matchContext( elem, context, xml ) :
+					matchAnyContext( elem, context, xml ) );
+			// Avoid hanging onto element (issue #299)
+			checkContext = null;
+			return ret;
+		} ];
+
+	for ( ; i < len; i++ ) {
+		if ( (matcher = Expr.relative[ tokens[i].type ]) ) {
+			matchers = [ addCombinator(elementMatcher( matchers ), matcher) ];
+		} else {
+			matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches );
+
+			// Return special upon seeing a positional matcher
+			if ( matcher[ expando ] ) {
+				// Find the next relative operator (if any) for proper handling
+				j = ++i;
+				for ( ; j < len; j++ ) {
+					if ( Expr.relative[ tokens[j].type ] ) {
+						break;
+					}
+				}
+				return setMatcher(
+					i > 1 && elementMatcher( matchers ),
+					i > 1 && toSelector(
+						// If the preceding token was a descendant combinator, insert an implicit any-element `*`
+						tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" })
+					).replace( rtrim, "$1" ),
+					matcher,
+					i < j && matcherFromTokens( tokens.slice( i, j ) ),
+					j < len && matcherFromTokens( (tokens = tokens.slice( j )) ),
+					j < len && toSelector( tokens )
+				);
+			}
+			matchers.push( matcher );
+		}
+	}
+
+	return elementMatcher( matchers );
+}
+
+function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
+	var bySet = setMatchers.length > 0,
+		byElement = elementMatchers.length > 0,
+		superMatcher = function( seed, context, xml, results, outermost ) {
+			var elem, j, matcher,
+				matchedCount = 0,
+				i = "0",
+				unmatched = seed && [],
+				setMatched = [],
+				contextBackup = outermostContext,
+				// We must always have either seed elements or outermost context
+				elems = seed || byElement && Expr.find["TAG"]( "*", outermost ),
+				// Use integer dirruns iff this is the outermost matcher
+				dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1),
+				len = elems.length;
+
+			if ( outermost ) {
+				outermostContext = context === document || context || outermost;
+			}
+
+			// Add elements passing elementMatchers directly to results
+			// Support: IE<9, Safari
+			// Tolerate NodeList properties (IE: "length"; Safari: <number>) matching elements by id
+			for ( ; i !== len && (elem = elems[i]) != null; i++ ) {
+				if ( byElement && elem ) {
+					j = 0;
+					if ( !context && elem.ownerDocument !== document ) {
+						setDocument( elem );
+						xml = !documentIsHTML;
+					}
+					while ( (matcher = elementMatchers[j++]) ) {
+						if ( matcher( elem, context || document, xml) ) {
+							results.push( elem );
+							break;
+						}
+					}
+					if ( outermost ) {
+						dirruns = dirrunsUnique;
+					}
+				}
+
+				// Track unmatched elements for set filters
+				if ( bySet ) {
+					// They will have gone through all possible matchers
+					if ( (elem = !matcher && elem) ) {
+						matchedCount--;
+					}
+
+					// Lengthen the array for every element, matched or not
+					if ( seed ) {
+						unmatched.push( elem );
+					}
+				}
+			}
+
+			// `i` is now the count of elements visited above, and adding it to `matchedCount`
+			// makes the latter nonnegative.
+			matchedCount += i;
+
+			// Apply set filters to unmatched elements
+			// NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount`
+			// equals `i`), unless we didn't visit _any_ elements in the above loop because we have
+			// no element matchers and no seed.
+			// Incrementing an initially-string "0" `i` allows `i` to remain a string only in that
+			// case, which will result in a "00" `matchedCount` that differs from `i` but is also
+			// numerically zero.
+			if ( bySet && i !== matchedCount ) {
+				j = 0;
+				while ( (matcher = setMatchers[j++]) ) {
+					matcher( unmatched, setMatched, context, xml );
+				}
+
+				if ( seed ) {
+					// Reintegrate element matches to eliminate the need for sorting
+					if ( matchedCount > 0 ) {
+						while ( i-- ) {
+							if ( !(unmatched[i] || setMatched[i]) ) {
+								setMatched[i] = pop.call( results );
+							}
+						}
+					}
+
+					// Discard index placeholder values to get only actual matches
+					setMatched = condense( setMatched );
+				}
+
+				// Add matches to results
+				push.apply( results, setMatched );
+
+				// Seedless set matches succeeding multiple successful matchers stipulate sorting
+				if ( outermost && !seed && setMatched.length > 0 &&
+					( matchedCount + setMatchers.length ) > 1 ) {
+
+					Sizzle.uniqueSort( results );
+				}
+			}
+
+			// Override manipulation of globals by nested matchers
+			if ( outermost ) {
+				dirruns = dirrunsUnique;
+				outermostContext = contextBackup;
+			}
+
+			return unmatched;
+		};
+
+	return bySet ?
+		markFunction( superMatcher ) :
+		superMatcher;
+}
+
+compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) {
+	var i,
+		setMatchers = [],
+		elementMatchers = [],
+		cached = compilerCache[ selector + " " ];
+
+	if ( !cached ) {
+		// Generate a function of recursive functions that can be used to check each element
+		if ( !match ) {
+			match = tokenize( selector );
+		}
+		i = match.length;
+		while ( i-- ) {
+			cached = matcherFromTokens( match[i] );
+			if ( cached[ expando ] ) {
+				setMatchers.push( cached );
+			} else {
+				elementMatchers.push( cached );
+			}
+		}
+
+		// Cache the compiled function
+		cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) );
+
+		// Save selector and tokenization
+		cached.selector = selector;
+	}
+	return cached;
+};
+
+/**
+ * A low-level selection function that works with Sizzle's compiled
+ *  selector functions
+ * @param {String|Function} selector A selector or a pre-compiled
+ *  selector function built with Sizzle.compile
+ * @param {Element} context
+ * @param {Array} [results]
+ * @param {Array} [seed] A set of elements to match against
+ */
+select = Sizzle.select = function( selector, context, results, seed ) {
+	var i, tokens, token, type, find,
+		compiled = typeof selector === "function" && selector,
+		match = !seed && tokenize( (selector = compiled.selector || selector) );
+
+	results = results || [];
+
+	// Try to minimize operations if there is only one selector in the list and no seed
+	// (the latter of which guarantees us context)
+	if ( match.length === 1 ) {
+
+		// Reduce context if the leading compound selector is an ID
+		tokens = match[0] = match[0].slice( 0 );
+		if ( tokens.length > 2 && (token = tokens[0]).type === "ID" &&
+				support.getById && context.nodeType === 9 && documentIsHTML &&
+				Expr.relative[ tokens[1].type ] ) {
+
+			context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0];
+			if ( !context ) {
+				return results;
+
+			// Precompiled matchers will still verify ancestry, so step up a level
+			} else if ( compiled ) {
+				context = context.parentNode;
+			}
+
+			selector = selector.slice( tokens.shift().value.length );
+		}
+
+		// Fetch a seed set for right-to-left matching
+		i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length;
+		while ( i-- ) {
+			token = tokens[i];
+
+			// Abort if we hit a combinator
+			if ( Expr.relative[ (type = token.type) ] ) {
+				break;
+			}
+			if ( (find = Expr.find[ type ]) ) {
+				// Search, expanding context for leading sibling combinators
+				if ( (seed = find(
+					token.matches[0].replace( runescape, funescape ),
+					rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context
+				)) ) {
+
+					// If seed is empty or no tokens remain, we can return early
+					tokens.splice( i, 1 );
+					selector = seed.length && toSelector( tokens );
+					if ( !selector ) {
+						push.apply( results, seed );
+						return results;
+					}
+
+					break;
+				}
+			}
+		}
+	}
+
+	// Compile and execute a filtering function if one is not provided
+	// Provide `match` to avoid retokenization if we modified the selector above
+	( compiled || compile( selector, match ) )(
+		seed,
+		context,
+		!documentIsHTML,
+		results,
+		!context || rsibling.test( selector ) && testContext( context.parentNode ) || context
+	);
+	return results;
+};
+
+// One-time assignments
+
+// Sort stability
+support.sortStable = expando.split("").sort( sortOrder ).join("") === expando;
+
+// Support: Chrome 14-35+
+// Always assume duplicates if they aren't passed to the comparison function
+support.detectDuplicates = !!hasDuplicate;
+
+// Initialize against the default document
+setDocument();
+
+// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27)
+// Detached nodes confoundingly follow *each other*
+support.sortDetached = assert(function( div1 ) {
+	// Should return 1, but returns 4 (following)
+	return div1.compareDocumentPosition( document.createElement("div") ) & 1;
+});
+
+// Support: IE<8
+// Prevent attribute/property "interpolation"
+// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx
+if ( !assert(function( div ) {
+	div.innerHTML = "<a href='#'></a>";
+	return div.firstChild.getAttribute("href") === "#" ;
+}) ) {
+	addHandle( "type|href|height|width", function( elem, name, isXML ) {
+		if ( !isXML ) {
+			return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 );
+		}
+	});
+}
+
+// Support: IE<9
+// Use defaultValue in place of getAttribute("value")
+if ( !support.attributes || !assert(function( div ) {
+	div.innerHTML = "<input/>";
+	div.firstChild.setAttribute( "value", "" );
+	return div.firstChild.getAttribute( "value" ) === "";
+}) ) {
+	addHandle( "value", function( elem, name, isXML ) {
+		if ( !isXML && elem.nodeName.toLowerCase() === "input" ) {
+			return elem.defaultValue;
+		}
+	});
+}
+
+// Support: IE<9
+// Use getAttributeNode to fetch booleans when getAttribute lies
+if ( !assert(function( div ) {
+	return div.getAttribute("disabled") == null;
+}) ) {
+	addHandle( booleans, function( elem, name, isXML ) {
+		var val;
+		if ( !isXML ) {
+			return elem[ name ] === true ? name.toLowerCase() :
+					(val = elem.getAttributeNode( name )) && val.specified ?
+					val.value :
+				null;
+		}
+	});
+}
+
+return Sizzle;
+
+})( window );
+
+
+
+jQuery.find = Sizzle;
+jQuery.expr = Sizzle.selectors;
+jQuery.expr[ ":" ] = jQuery.expr.pseudos;
+jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort;
+jQuery.text = Sizzle.getText;
+jQuery.isXMLDoc = Sizzle.isXML;
+jQuery.contains = Sizzle.contains;
+
+
+
+var dir = function( elem, dir, until ) {
+	var matched = [],
+		truncate = until !== undefined;
+
+	while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) {
+		if ( elem.nodeType === 1 ) {
+			if ( truncate && jQuery( elem ).is( until ) ) {
+				break;
+			}
+			matched.push( elem );
+		}
+	}
+	return matched;
+};
+
+
+var siblings = function( n, elem ) {
+	var matched = [];
+
+	for ( ; n; n = n.nextSibling ) {
+		if ( n.nodeType === 1 && n !== elem ) {
+			matched.push( n );
+		}
+	}
+
+	return matched;
+};
+
+
+var rneedsContext = jQuery.expr.match.needsContext;
+
+var rsingleTag = ( /^<([\w-]+)\s*\/?>(?:<\/\1>|)$/ );
+
+
+
+var risSimple = /^.[^:#\[\.,]*$/;
+
+// Implement the identical functionality for filter and not
+function winnow( elements, qualifier, not ) {
+	if ( jQuery.isFunction( qualifier ) ) {
+		return jQuery.grep( elements, function( elem, i ) {
+			/* jshint -W018 */
+			return !!qualifier.call( elem, i, elem ) !== not;
+		} );
+
+	}
+
+	if ( qualifier.nodeType ) {
+		return jQuery.grep( elements, function( elem ) {
+			return ( elem === qualifier ) !== not;
+		} );
+
+	}
+
+	if ( typeof qualifier === "string" ) {
+		if ( risSimple.test( qualifier ) ) {
+			return jQuery.filter( qualifier, elements, not );
+		}
+
+		qualifier = jQuery.filter( qualifier, elements );
+	}
+
+	return jQuery.grep( elements, function( elem ) {
+		return ( jQuery.inArray( elem, qualifier ) > -1 ) !== not;
+	} );
+}
+
+jQuery.filter = function( expr, elems, not ) {
+	var elem = elems[ 0 ];
+
+	if ( not ) {
+		expr = ":not(" + expr + ")";
+	}
+
+	return elems.length === 1 && elem.nodeType === 1 ?
+		jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] :
+		jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) {
+			return elem.nodeType === 1;
+		} ) );
+};
+
+jQuery.fn.extend( {
+	find: function( selector ) {
+		var i,
+			ret = [],
+			self = this,
+			len = self.length;
+
+		if ( typeof selector !== "string" ) {
+			return this.pushStack( jQuery( selector ).filter( function() {
+				for ( i = 0; i < len; i++ ) {
+					if ( jQuery.contains( self[ i ], this ) ) {
+						return true;
+					}
+				}
+			} ) );
+		}
+
+		for ( i = 0; i < len; i++ ) {
+			jQuery.find( selector, self[ i ], ret );
+		}
+
+		// Needed because $( selector, context ) becomes $( context ).find( selector )
+		ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret );
+		ret.selector = this.selector ? this.selector + " " + selector : selector;
+		return ret;
+	},
+	filter: function( selector ) {
+		return this.pushStack( winnow( this, selector || [], false ) );
+	},
+	not: function( selector ) {
+		return this.pushStack( winnow( this, selector || [], true ) );
+	},
+	is: function( selector ) {
+		return !!winnow(
+			this,
+
+			// If this is a positional/relative selector, check membership in the returned set
+			// so $("p:first").is("p:last") won't return true for a doc with two "p".
+			typeof selector === "string" && rneedsContext.test( selector ) ?
+				jQuery( selector ) :
+				selector || [],
+			false
+		).length;
+	}
+} );
+
+
+// Initialize a jQuery object
+
+
+// A central reference to the root jQuery(document)
+var rootjQuery,
+
+	// A simple way to check for HTML strings
+	// Prioritize #id over <tag> to avoid XSS via location.hash (#9521)
+	// Strict HTML recognition (#11290: must start with <)
+	rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,
+
+	init = jQuery.fn.init = function( selector, context, root ) {
+		var match, elem;
+
+		// HANDLE: $(""), $(null), $(undefined), $(false)
+		if ( !selector ) {
+			return this;
+		}
+
+		// init accepts an alternate rootjQuery
+		// so migrate can support jQuery.sub (gh-2101)
+		root = root || rootjQuery;
+
+		// Handle HTML strings
+		if ( typeof selector === "string" ) {
+			if ( selector.charAt( 0 ) === "<" &&
+				selector.charAt( selector.length - 1 ) === ">" &&
+				selector.length >= 3 ) {
+
+				// Assume that strings that start and end with <> are HTML and skip the regex check
+				match = [ null, selector, null ];
+
+			} else {
+				match = rquickExpr.exec( selector );
+			}
+
+			// Match html or make sure no context is specified for #id
+			if ( match && ( match[ 1 ] || !context ) ) {
+
+				// HANDLE: $(html) -> $(array)
+				if ( match[ 1 ] ) {
+					context = context instanceof jQuery ? context[ 0 ] : context;
+
+					// scripts is true for back-compat
+					// Intentionally let the error be thrown if parseHTML is not present
+					jQuery.merge( this, jQuery.parseHTML(
+						match[ 1 ],
+						context && context.nodeType ? context.ownerDocument || context : document,
+						true
+					) );
+
+					// HANDLE: $(html, props)
+					if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) {
+						for ( match in context ) {
+
+							// Properties of context are called as methods if possible
+							if ( jQuery.isFunction( this[ match ] ) ) {
+								this[ match ]( context[ match ] );
+
+							// ...and otherwise set as attributes
+							} else {
+								this.attr( match, context[ match ] );
+							}
+						}
+					}
+
+					return this;
+
+				// HANDLE: $(#id)
+				} else {
+					elem = document.getElementById( match[ 2 ] );
+
+					// Check parentNode to catch when Blackberry 4.6 returns
+					// nodes that are no longer in the document #6963
+					if ( elem && elem.parentNode ) {
+
+						// Handle the case where IE and Opera return items
+						// by name instead of ID
+						if ( elem.id !== match[ 2 ] ) {
+							return rootjQuery.find( selector );
+						}
+
+						// Otherwise, we inject the element directly into the jQuery object
+						this.length = 1;
+						this[ 0 ] = elem;
+					}
+
+					this.context = document;
+					this.selector = selector;
+					return this;
+				}
+
+			// HANDLE: $(expr, $(...))
+			} else if ( !context || context.jquery ) {
+				return ( context || root ).find( selector );
+
+			// HANDLE: $(expr, context)
+			// (which is just equivalent to: $(context).find(expr)
+			} else {
+				return this.constructor( context ).find( selector );
+			}
+
+		// HANDLE: $(DOMElement)
+		} else if ( selector.nodeType ) {
+			this.context = this[ 0 ] = selector;
+			this.length = 1;
+			return this;
+
+		// HANDLE: $(function)
+		// Shortcut for document ready
+		} else if ( jQuery.isFunction( selector ) ) {
+			return typeof root.ready !== "undefined" ?
+				root.ready( selector ) :
+
+				// Execute immediately if ready is not present
+				selector( jQuery );
+		}
+
+		if ( selector.selector !== undefined ) {
+			this.selector = selector.selector;
+			this.context = selector.context;
+		}
+
+		return jQuery.makeArray( selector, this );
+	};
+
+// Give the init function the jQuery prototype for later instantiation
+init.prototype = jQuery.fn;
+
+// Initialize central reference
+rootjQuery = jQuery( document );
+
+
+var rparentsprev = /^(?:parents|prev(?:Until|All))/,
+
+	// methods guaranteed to produce a unique set when starting from a unique set
+	guaranteedUnique = {
+		children: true,
+		contents: true,
+		next: true,
+		prev: true
+	};
+
+jQuery.fn.extend( {
+	has: function( target ) {
+		var i,
+			targets = jQuery( target, this ),
+			len = targets.length;
+
+		return this.filter( function() {
+			for ( i = 0; i < len; i++ ) {
+				if ( jQuery.contains( this, targets[ i ] ) ) {
+					return true;
+				}
+			}
+		} );
+	},
+
+	closest: function( selectors, context ) {
+		var cur,
+			i = 0,
+			l = this.length,
+			matched = [],
+			pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ?
+				jQuery( selectors, context || this.context ) :
+				0;
+
+		for ( ; i < l; i++ ) {
+			for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) {
+
+				// Always skip document fragments
+				if ( cur.nodeType < 11 && ( pos ?
+					pos.index( cur ) > -1 :
+
+					// Don't pass non-elements to Sizzle
+					cur.nodeType === 1 &&
+						jQuery.find.matchesSelector( cur, selectors ) ) ) {
+
+					matched.push( cur );
+					break;
+				}
+			}
+		}
+
+		return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched );
+	},
+
+	// Determine the position of an element within
+	// the matched set of elements
+	index: function( elem ) {
+
+		// No argument, return index in parent
+		if ( !elem ) {
+			return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1;
+		}
+
+		// index in selector
+		if ( typeof elem === "string" ) {
+			return jQuery.inArray( this[ 0 ], jQuery( elem ) );
+		}
+
+		// Locate the position of the desired element
+		return jQuery.inArray(
+
+			// If it receives a jQuery object, the first element is used
+			elem.jquery ? elem[ 0 ] : elem, this );
+	},
+
+	add: function( selector, context ) {
+		return this.pushStack(
+			jQuery.uniqueSort(
+				jQuery.merge( this.get(), jQuery( selector, context ) )
+			)
+		);
+	},
+
+	addBack: function( selector ) {
+		return this.add( selector == null ?
+			this.prevObject : this.prevObject.filter( selector )
+		);
+	}
+} );
+
+function sibling( cur, dir ) {
+	do {
+		cur = cur[ dir ];
+	} while ( cur && cur.nodeType !== 1 );
+
+	return cur;
+}
+
+jQuery.each( {
+	parent: function( elem ) {
+		var parent = elem.parentNode;
+		return parent && parent.nodeType !== 11 ? parent : null;
+	},
+	parents: function( elem ) {
+		return dir( elem, "parentNode" );
+	},
+	parentsUntil: function( elem, i, until ) {
+		return dir( elem, "parentNode", until );
+	},
+	next: function( elem ) {
+		return sibling( elem, "nextSibling" );
+	},
+	prev: function( elem ) {
+		return sibling( elem, "previousSibling" );
+	},
+	nextAll: function( elem ) {
+		return dir( elem, "nextSibling" );
+	},
+	prevAll: function( elem ) {
+		return dir( elem, "previousSibling" );
+	},
+	nextUntil: function( elem, i, until ) {
+		return dir( elem, "nextSibling", until );
+	},
+	prevUntil: function( elem, i, until ) {
+		return dir( elem, "previousSibling", until );
+	},
+	siblings: function( elem ) {
+		return siblings( ( elem.parentNode || {} ).firstChild, elem );
+	},
+	children: function( elem ) {
+		return siblings( elem.firstChild );
+	},
+	contents: function( elem ) {
+		return jQuery.nodeName( elem, "iframe" ) ?
+			elem.contentDocument || elem.contentWindow.document :
+			jQuery.merge( [], elem.childNodes );
+	}
+}, function( name, fn ) {
+	jQuery.fn[ name ] = function( until, selector ) {
+		var ret = jQuery.map( this, fn, until );
+
+		if ( name.slice( -5 ) !== "Until" ) {
+			selector = until;
+		}
+
+		if ( selector && typeof selector === "string" ) {
+			ret = jQuery.filter( selector, ret );
+		}
+
+		if ( this.length > 1 ) {
+
+			// Remove duplicates
+			if ( !guaranteedUnique[ name ] ) {
+				ret = jQuery.uniqueSort( ret );
+			}
+
+			// Reverse order for parents* and prev-derivatives
+			if ( rparentsprev.test( name ) ) {
+				ret = ret.reverse();
+			}
+		}
+
+		return this.pushStack( ret );
+	};
+} );
+var rnotwhite = ( /\S+/g );
+
+
+
+// Convert String-formatted options into Object-formatted ones
+function createOptions( options ) {
+	var object = {};
+	jQuery.each( options.match( rnotwhite ) || [], function( _, flag ) {
+		object[ flag ] = true;
+	} );
+	return object;
+}
+
+/*
+ * Create a callback list using the following parameters:
+ *
+ *	options: an optional list of space-separated options that will change how
+ *			the callback list behaves or a more traditional option object
+ *
+ * By default a callback list will act like an event callback list and can be
+ * "fired" multiple times.
+ *
+ * Possible options:
+ *
+ *	once:			will ensure the callback list can only be fired once (like a Deferred)
+ *
+ *	memory:			will keep track of previous values and will call any callback added
+ *					after the list has been fired right away with the latest "memorized"
+ *					values (like a Deferred)
+ *
+ *	unique:			will ensure a callback can only be added once (no duplicate in the list)
+ *
+ *	stopOnFalse:	interrupt callings when a callback returns false
+ *
+ */
+jQuery.Callbacks = function( options ) {
+
+	// Convert options from String-formatted to Object-formatted if needed
+	// (we check in cache first)
+	options = typeof options === "string" ?
+		createOptions( options ) :
+		jQuery.extend( {}, options );
+
+	var // Flag to know if list is currently firing
+		firing,
+
+		// Last fire value for non-forgettable lists
+		memory,
+
+		// Flag to know if list was already fired
+		fired,
+
+		// Flag to prevent firing
+		locked,
+
+		// Actual callback list
+		list = [],
+
+		// Queue of execution data for repeatable lists
+		queue = [],
+
+		// Index of currently firing callback (modified by add/remove as needed)
+		firingIndex = -1,
+
+		// Fire callbacks
+		fire = function() {
+
+			// Enforce single-firing
+			locked = options.once;
+
+			// Execute callbacks for all pending executions,
+			// respecting firingIndex overrides and runtime changes
+			fired = firing = true;
+			for ( ; queue.length; firingIndex = -1 ) {
+				memory = queue.shift();
+				while ( ++firingIndex < list.length ) {
+
+					// Run callback and check for early termination
+					if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false &&
+						options.stopOnFalse ) {
+
+						// Jump to end and forget the data so .add doesn't re-fire
+						firingIndex = list.length;
+						memory = false;
+					}
+				}
+			}
+
+			// Forget the data if we're done with it
+			if ( !options.memory ) {
+				memory = false;
+			}
+
+			firing = false;
+
+			// Clean up if we're done firing for good
+			if ( locked ) {
+
+				// Keep an empty list if we have data for future add calls
+				if ( memory ) {
+					list = [];
+
+				// Otherwise, this object is spent
+				} else {
+					list = "";
+				}
+			}
+		},
+
+		// Actual Callbacks object
+		self = {
+
+			// Add a callback or a collection of callbacks to the list
+			add: function() {
+				if ( list ) {
+
+					// If we have memory from a past run, we should fire after adding
+					if ( memory && !firing ) {
+						firingIndex = list.length - 1;
+						queue.push( memory );
+					}
+
+					( function add( args ) {
+						jQuery.each( args, function( _, arg ) {
+							if ( jQuery.isFunction( arg ) ) {
+								if ( !options.unique || !self.has( arg ) ) {
+									list.push( arg );
+								}
+							} else if ( arg && arg.length && jQuery.type( arg ) !== "string" ) {
+
+								// Inspect recursively
+								add( arg );
+							}
+						} );
+					} )( arguments );
+
+					if ( memory && !firing ) {
+						fire();
+					}
+				}
+				return this;
+			},
+
+			// Remove a callback from the list
+			remove: function() {
+				jQuery.each( arguments, function( _, arg ) {
+					var index;
+					while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) {
+						list.splice( index, 1 );
+
+						// Handle firing indexes
+						if ( index <= firingIndex ) {
+							firingIndex--;
+						}
+					}
+				} );
+				return this;
+			},
+
+			// Check if a given callback is in the list.
+			// If no argument is given, return whether or not list has callbacks attached.
+			has: function( fn ) {
+				return fn ?
+					jQuery.inArray( fn, list ) > -1 :
+					list.length > 0;
+			},
+
+			// Remove all callbacks from the list
+			empty: function() {
+				if ( list ) {
+					list = [];
+				}
+				return this;
+			},
+
+			// Disable .fire and .add
+			// Abort any current/pending executions
+			// Clear all callbacks and values
+			disable: function() {
+				locked = queue = [];
+				list = memory = "";
+				return this;
+			},
+			disabled: function() {
+				return !list;
+			},
+
+			// Disable .fire
+			// Also disable .add unless we have memory (since it would have no effect)
+			// Abort any pending executions
+			lock: function() {
+				locked = true;
+				if ( !memory ) {
+					self.disable();
+				}
+				return this;
+			},
+			locked: function() {
+				return !!locked;
+			},
+
+			// Call all callbacks with the given context and arguments
+			fireWith: function( context, args ) {
+				if ( !locked ) {
+					args = args || [];
+					args = [ context, args.slice ? args.slice() : args ];
+					queue.push( args );
+					if ( !firing ) {
+						fire();
+					}
+				}
+				return this;
+			},
+
+			// Call all the callbacks with the given arguments
+			fire: function() {
+				self.fireWith( this, arguments );
+				return this;
+			},
+
+			// To know if the callbacks have already been called at least once
+			fired: function() {
+				return !!fired;
+			}
+		};
+
+	return self;
+};
+
+
+jQuery.extend( {
+
+	Deferred: function( func ) {
+		var tuples = [
+
+				// action, add listener, listener list, final state
+				[ "resolve", "done", jQuery.Callbacks( "once memory" ), "resolved" ],
+				[ "reject", "fail", jQuery.Callbacks( "once memory" ), "rejected" ],
+				[ "notify", "progress", jQuery.Callbacks( "memory" ) ]
+			],
+			state = "pending",
+			promise = {
+				state: function() {
+					return state;
+				},
+				always: function() {
+					deferred.done( arguments ).fail( arguments );
+					return this;
+				},
+				then: function( /* fnDone, fnFail, fnProgress */ ) {
+					var fns = arguments;
+					return jQuery.Deferred( function( newDefer ) {
+						jQuery.each( tuples, function( i, tuple ) {
+							var fn = jQuery.isFunction( fns[ i ] ) && fns[ i ];
+
+							// deferred[ done | fail | progress ] for forwarding actions to newDefer
+							deferred[ tuple[ 1 ] ]( function() {
+								var returned = fn && fn.apply( this, arguments );
+								if ( returned && jQuery.isFunction( returned.promise ) ) {
+									returned.promise()
+										.progress( newDefer.notify )
+										.done( newDefer.resolve )
+										.fail( newDefer.reject );
+								} else {
+									newDefer[ tuple[ 0 ] + "With" ](
+										this === promise ? newDefer.promise() : this,
+										fn ? [ returned ] : arguments
+									);
+								}
+							} );
+						} );
+						fns = null;
+					} ).promise();
+				},
+
+				// Get a promise for this deferred
+				// If obj is provided, the promise aspect is added to the object
+				promise: function( obj ) {
+					return obj != null ? jQuery.extend( obj, promise ) : promise;
+				}
+			},
+			deferred = {};
+
+		// Keep pipe for back-compat
+		promise.pipe = promise.then;
+
+		// Add list-specific methods
+		jQuery.each( tuples, function( i, tuple ) {
+			var list = tuple[ 2 ],
+				stateString = tuple[ 3 ];
+
+			// promise[ done | fail | progress ] = list.add
+			promise[ tuple[ 1 ] ] = list.add;
+
+			// Handle state
+			if ( stateString ) {
+				list.add( function() {
+
+					// state = [ resolved | rejected ]
+					state = stateString;
+
+				// [ reject_list | resolve_list ].disable; progress_list.lock
+				}, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock );
+			}
+
+			// deferred[ resolve | reject | notify ]
+			deferred[ tuple[ 0 ] ] = function() {
+				deferred[ tuple[ 0 ] + "With" ]( this === deferred ? promise : this, arguments );
+				return this;
+			};
+			deferred[ tuple[ 0 ] + "With" ] = list.fireWith;
+		} );
+
+		// Make the deferred a promise
+		promise.promise( deferred );
+
+		// Call given func if any
+		if ( func ) {
+			func.call( deferred, deferred );
+		}
+
+		// All done!
+		return deferred;
+	},
+
+	// Deferred helper
+	when: function( subordinate /* , ..., subordinateN */ ) {
+		var i = 0,
+			resolveValues = slice.call( arguments ),
+			length = resolveValues.length,
+
+			// the count of uncompleted subordinates
+			remaining = length !== 1 ||
+				( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0,
+
+			// the master Deferred.
+			// If resolveValues consist of only a single Deferred, just use that.
+			deferred = remaining === 1 ? subordinate : jQuery.Deferred(),
+
+			// Update function for both resolve and progress values
+			updateFunc = function( i, contexts, values ) {
+				return function( value ) {
+					contexts[ i ] = this;
+					values[ i ] = arguments.length > 1 ? slice.call( arguments ) : value;
+					if ( values === progressValues ) {
+						deferred.notifyWith( contexts, values );
+
+					} else if ( !( --remaining ) ) {
+						deferred.resolveWith( contexts, values );
+					}
+				};
+			},
+
+			progressValues, progressContexts, resolveContexts;
+
+		// add listeners to Deferred subordinates; treat others as resolved
+		if ( length > 1 ) {
+			progressValues = new Array( length );
+			progressContexts = new Array( length );
+			resolveContexts = new Array( length );
+			for ( ; i < length; i++ ) {
+				if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) {
+					resolveValues[ i ].promise()
+						.progress( updateFunc( i, progressContexts, progressValues ) )
+						.done( updateFunc( i, resolveContexts, resolveValues ) )
+						.fail( deferred.reject );
+				} else {
+					--remaining;
+				}
+			}
+		}
+
+		// if we're not waiting on anything, resolve the master
+		if ( !remaining ) {
+			deferred.resolveWith( resolveContexts, resolveValues );
+		}
+
+		return deferred.promise();
+	}
+} );
+
+
+// The deferred used on DOM ready
+var readyList;
+
+jQuery.fn.ready = function( fn ) {
+
+	// Add the callback
+	jQuery.ready.promise().done( fn );
+
+	return this;
+};
+
+jQuery.extend( {
+
+	// Is the DOM ready to be used? Set to true once it occurs.
+	isReady: false,
+
+	// A counter to track how many items to wait for before
+	// the ready event fires. See #6781
+	readyWait: 1,
+
+	// Hold (or release) the ready event
+	holdReady: function( hold ) {
+		if ( hold ) {
+			jQuery.readyWait++;
+		} else {
+			jQuery.ready( true );
+		}
+	},
+
+	// Handle when the DOM is ready
+	ready: function( wait ) {
+
+		// Abort if there are pending holds or we're already ready
+		if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) {
+			return;
+		}
+
+		// Remember that the DOM is ready
+		jQuery.isReady = true;
+
+		// If a normal DOM Ready event fired, decrement, and wait if need be
+		if ( wait !== true && --jQuery.readyWait > 0 ) {
+			return;
+		}
+
+		// If there are functions bound, to execute
+		readyList.resolveWith( document, [ jQuery ] );
+
+		// Trigger any bound ready events
+		if ( jQuery.fn.triggerHandler ) {
+			jQuery( document ).triggerHandler( "ready" );
+			jQuery( document ).off( "ready" );
+		}
+	}
+} );
+
+/**
+ * Clean-up method for dom ready events
+ */
+function detach() {
+	if ( document.addEventListener ) {
+		document.removeEventListener( "DOMContentLoaded", completed );
+		window.removeEventListener( "load", completed );
+
+	} else {
+		document.detachEvent( "onreadystatechange", completed );
+		window.detachEvent( "onload", completed );
+	}
+}
+
+/**
+ * The ready event handler and self cleanup method
+ */
+function completed() {
+
+	// readyState === "complete" is good enough for us to call the dom ready in oldIE
+	if ( document.addEventListener ||
+		window.event.type === "load" ||
+		document.readyState === "complete" ) {
+
+		detach();
+		jQuery.ready();
+	}
+}
+
+jQuery.ready.promise = function( obj ) {
+	if ( !readyList ) {
+
+		readyList = jQuery.Deferred();
+
+		// Catch cases where $(document).ready() is called
+		// after the browser event has already occurred.
+		// Support: IE6-10
+		// Older IE sometimes signals "interactive" too soon
+		if ( document.readyState === "complete" ||
+			( document.readyState !== "loading" && !document.documentElement.doScroll ) ) {
+
+			// Handle it asynchronously to allow scripts the opportunity to delay ready
+			window.setTimeout( jQuery.ready );
+
+		// Standards-based browsers support DOMContentLoaded
+		} else if ( document.addEventListener ) {
+
+			// Use the handy event callback
+			document.addEventListener( "DOMContentLoaded", completed );
+
+			// A fallback to window.onload, that will always work
+			window.addEventListener( "load", completed );
+
+		// If IE event model is used
+		} else {
+
+			// Ensure firing before onload, maybe late but safe also for iframes
+			document.attachEvent( "onreadystatechange", completed );
+
+			// A fallback to window.onload, that will always work
+			window.attachEvent( "onload", completed );
+
+			// If IE and not a frame
+			// continually check to see if the document is ready
+			var top = false;
+
+			try {
+				top = window.frameElement == null && document.documentElement;
+			} catch ( e ) {}
+
+			if ( top && top.doScroll ) {
+				( function doScrollCheck() {
+					if ( !jQuery.isReady ) {
+
+						try {
+
+							// Use the trick by Diego Perini
+							// http://javascript.nwbox.com/IEContentLoaded/
+							top.doScroll( "left" );
+						} catch ( e ) {
+							return window.setTimeout( doScrollCheck, 50 );
+						}
+
+						// detach all dom ready events
+						detach();
+
+						// and execute any waiting functions
+						jQuery.ready();
+					}
+				} )();
+			}
+		}
+	}
+	return readyList.promise( obj );
+};
+
+// Kick off the DOM ready check even if the user does not
+jQuery.ready.promise();
+
+
+
+
+// Support: IE<9
+// Iteration over object's inherited properties before its own
+var i;
+for ( i in jQuery( support ) ) {
+	break;
+}
+support.ownFirst = i === "0";
+
+// Note: most support tests are defined in their respective modules.
+// false until the test is run
+support.inlineBlockNeedsLayout = false;
+
+// Execute ASAP in case we need to set body.style.zoom
+jQuery( function() {
+
+	// Minified: var a,b,c,d
+	var val, div, body, container;
+
+	body = document.getElementsByTagName( "body" )[ 0 ];
+	if ( !body || !body.style ) {
+
+		// Return for frameset docs that don't have a body
+		return;
+	}
+
+	// Setup
+	div = document.createElement( "div" );
+	container = document.createElement( "div" );
+	container.style.cssText = "position:absolute;border:0;width:0;height:0;top:0;left:-9999px";
+	body.appendChild( container ).appendChild( div );
+
+	if ( typeof div.style.zoom !== "undefined" ) {
+
+		// Support: IE<8
+		// Check if natively block-level elements act like inline-block
+		// elements when setting their display to 'inline' and giving
+		// them layout
+		div.style.cssText = "display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1";
+
+		support.inlineBlockNeedsLayout = val = div.offsetWidth === 3;
+		if ( val ) {
+
+			// Prevent IE 6 from affecting layout for positioned elements #11048
+			// Prevent IE from shrinking the body in IE 7 mode #12869
+			// Support: IE<8
+			body.style.zoom = 1;
+		}
+	}
+
+	body.removeChild( container );
+} );
+
+
+( function() {
+	var div = document.createElement( "div" );
+
+	// Support: IE<9
+	support.deleteExpando = true;
+	try {
+		delete div.test;
+	} catch ( e ) {
+		support.deleteExpando = false;
+	}
+
+	// Null elements to avoid leaks in IE.
+	div = null;
+} )();
+var acceptData = function( elem ) {
+	var noData = jQuery.noData[ ( elem.nodeName + " " ).toLowerCase() ],
+		nodeType = +elem.nodeType || 1;
+
+	// Do not set data on non-element DOM nodes because it will not be cleared (#8335).
+	return nodeType !== 1 && nodeType !== 9 ?
+		false :
+
+		// Nodes accept data unless otherwise specified; rejection can be conditional
+		!noData || noData !== true && elem.getAttribute( "classid" ) === noData;
+};
+
+
+
+
+var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,
+	rmultiDash = /([A-Z])/g;
+
+function dataAttr( elem, key, data ) {
+
+	// If nothing was found internally, try to fetch any
+	// data from the HTML5 data-* attribute
+	if ( data === undefined && elem.nodeType === 1 ) {
+
+		var name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase();
+
+		data = elem.getAttribute( name );
+
+		if ( typeof data === "string" ) {
+			try {
+				data = data === "true" ? true :
+					data === "false" ? false :
+					data === "null" ? null :
+
+					// Only convert to a number if it doesn't change the string
+					+data + "" === data ? +data :
+					rbrace.test( data ) ? jQuery.parseJSON( data ) :
+					data;
+			} catch ( e ) {}
+
+			// Make sure we set the data so it isn't changed later
+			jQuery.data( elem, key, data );
+
+		} else {
+			data = undefined;
+		}
+	}
+
+	return data;
+}
+
+// checks a cache object for emptiness
+function isEmptyDataObject( obj ) {
+	var name;
+	for ( name in obj ) {
+
+		// if the public data object is empty, the private is still empty
+		if ( name === "data" && jQuery.isEmptyObject( obj[ name ] ) ) {
+			continue;
+		}
+		if ( name !== "toJSON" ) {
+			return false;
+		}
+	}
+
+	return true;
+}
+
+function internalData( elem, name, data, pvt /* Internal Use Only */ ) {
+	if ( !acceptData( elem ) ) {
+		return;
+	}
+
+	var ret, thisCache,
+		internalKey = jQuery.expando,
+
+		// We have to handle DOM nodes and JS objects differently because IE6-7
+		// can't GC object references properly across the DOM-JS boundary
+		isNode = elem.nodeType,
+
+		// Only DOM nodes need the global jQuery cache; JS object data is
+		// attached directly to the object so GC can occur automatically
+		cache = isNode ? jQuery.cache : elem,
+
+		// Only defining an ID for JS objects if its cache already exists allows
+		// the code to shortcut on the same path as a DOM node with no cache
+		id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey;
+
+	// Avoid doing any more work than we need to when trying to get data on an
+	// object that has no data at all
+	if ( ( !id || !cache[ id ] || ( !pvt && !cache[ id ].data ) ) &&
+		data === undefined && typeof name === "string" ) {
+		return;
+	}
+
+	if ( !id ) {
+
+		// Only DOM nodes need a new unique ID for each element since their data
+		// ends up in the global cache
+		if ( isNode ) {
+			id = elem[ internalKey ] = deletedIds.pop() || jQuery.guid++;
+		} else {
+			id = internalKey;
+		}
+	}
+
+	if ( !cache[ id ] ) {
+
+		// Avoid exposing jQuery metadata on plain JS objects when the object
+		// is serialized using JSON.stringify
+		cache[ id ] = isNode ? {} : { toJSON: jQuery.noop };
+	}
+
+	// An object can be passed to jQuery.data instead of a key/value pair; this gets
+	// shallow copied over onto the existing cache
+	if ( typeof name === "object" || typeof name === "function" ) {
+		if ( pvt ) {
+			cache[ id ] = jQuery.extend( cache[ id ], name );
+		} else {
+			cache[ id ].data = jQuery.extend( cache[ id ].data, name );
+		}
+	}
+
+	thisCache = cache[ id ];
+
+	// jQuery data() is stored in a separate object inside the object's internal data
+	// cache in order to avoid key collisions between internal data and user-defined
+	// data.
+	if ( !pvt ) {
+		if ( !thisCache.data ) {
+			thisCache.data = {};
+		}
+
+		thisCache = thisCache.data;
+	}
+
+	if ( data !== undefined ) {
+		thisCache[ jQuery.camelCase( name ) ] = data;
+	}
+
+	// Check for both converted-to-camel and non-converted data property names
+	// If a data property was specified
+	if ( typeof name === "string" ) {
+
+		// First Try to find as-is property data
+		ret = thisCache[ name ];
+
+		// Test for null|undefined property data
+		if ( ret == null ) {
+
+			// Try to find the camelCased property
+			ret = thisCache[ jQuery.camelCase( name ) ];
+		}
+	} else {
+		ret = thisCache;
+	}
+
+	return ret;
+}
+
+function internalRemoveData( elem, name, pvt ) {
+	if ( !acceptData( elem ) ) {
+		return;
+	}
+
+	var thisCache, i,
+		isNode = elem.nodeType,
+
+		// See jQuery.data for more information
+		cache = isNode ? jQuery.cache : elem,
+		id = isNode ? elem[ jQuery.expando ] : jQuery.expando;
+
+	// If there is already no cache entry for this object, there is no
+	// purpose in continuing
+	if ( !cache[ id ] ) {
+		return;
+	}
+
+	if ( name ) {
+
+		thisCache = pvt ? cache[ id ] : cache[ id ].data;
+
+		if ( thisCache ) {
+
+			// Support array or space separated string names for data keys
+			if ( !jQuery.isArray( name ) ) {
+
+				// try the string as a key before any manipulation
+				if ( name in thisCache ) {
+					name = [ name ];
+				} else {
+
+					// split the camel cased version by spaces unless a key with the spaces exists
+					name = jQuery.camelCase( name );
+					if ( name in thisCache ) {
+						name = [ name ];
+					} else {
+						name = name.split( " " );
+					}
+				}
+			} else {
+
+				// If "name" is an array of keys...
+				// When data is initially created, via ("key", "val") signature,
+				// keys will be converted to camelCase.
+				// Since there is no way to tell _how_ a key was added, remove
+				// both plain key and camelCase key. #12786
+				// This will only penalize the array argument path.
+				name = name.concat( jQuery.map( name, jQuery.camelCase ) );
+			}
+
+			i = name.length;
+			while ( i-- ) {
+				delete thisCache[ name[ i ] ];
+			}
+
+			// If there is no data left in the cache, we want to continue
+			// and let the cache object itself get destroyed
+			if ( pvt ? !isEmptyDataObject( thisCache ) : !jQuery.isEmptyObject( thisCache ) ) {
+				return;
+			}
+		}
+	}
+
+	// See jQuery.data for more information
+	if ( !pvt ) {
+		delete cache[ id ].data;
+
+		// Don't destroy the parent cache unless the internal data object
+		// had been the only thing left in it
+		if ( !isEmptyDataObject( cache[ id ] ) ) {
+			return;
+		}
+	}
+
+	// Destroy the cache
+	if ( isNode ) {
+		jQuery.cleanData( [ elem ], true );
+
+	// Use delete when supported for expandos or `cache` is not a window per isWindow (#10080)
+	/* jshint eqeqeq: false */
+	} else if ( support.deleteExpando || cache != cache.window ) {
+		/* jshint eqeqeq: true */
+		delete cache[ id ];
+
+	// When all else fails, undefined
+	} else {
+		cache[ id ] = undefined;
+	}
+}
+
+jQuery.extend( {
+	cache: {},
+
+	// The following elements (space-suffixed to avoid Object.prototype collisions)
+	// throw uncatchable exceptions if you attempt to set expando properties
+	noData: {
+		"applet ": true,
+		"embed ": true,
+
+		// ...but Flash objects (which have this classid) *can* handle expandos
+		"object ": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"
+	},
+
+	hasData: function( elem ) {
+		elem = elem.nodeType ? jQuery.cache[ elem[ jQuery.expando ] ] : elem[ jQuery.expando ];
+		return !!elem && !isEmptyDataObject( elem );
+	},
+
+	data: function( elem, name, data ) {
+		return internalData( elem, name, data );
+	},
+
+	removeData: function( elem, name ) {
+		return internalRemoveData( elem, name );
+	},
+
+	// For internal use only.
+	_data: function( elem, name, data ) {
+		return internalData( elem, name, data, true );
+	},
+
+	_removeData: function( elem, name ) {
+		return internalRemoveData( elem, name, true );
+	}
+} );
+
+jQuery.fn.extend( {
+	data: function( key, value ) {
+		var i, name, data,
+			elem = this[ 0 ],
+			attrs = elem && elem.attributes;
+
+		// Special expections of .data basically thwart jQuery.access,
+		// so implement the relevant behavior ourselves
+
+		// Gets all values
+		if ( key === undefined ) {
+			if ( this.length ) {
+				data = jQuery.data( elem );
+
+				if ( elem.nodeType === 1 && !jQuery._data( elem, "parsedAttrs" ) ) {
+					i = attrs.length;
+					while ( i-- ) {
+
+						// Support: IE11+
+						// The attrs elements can be null (#14894)
+						if ( attrs[ i ] ) {
+							name = attrs[ i ].name;
+							if ( name.indexOf( "data-" ) === 0 ) {
+								name = jQuery.camelCase( name.slice( 5 ) );
+								dataAttr( elem, name, data[ name ] );
+							}
+						}
+					}
+					jQuery._data( elem, "parsedAttrs", true );
+				}
+			}
+
+			return data;
+		}
+
+		// Sets multiple values
+		if ( typeof key === "object" ) {
+			return this.each( function() {
+				jQuery.data( this, key );
+			} );
+		}
+
+		return arguments.length > 1 ?
+
+			// Sets one value
+			this.each( function() {
+				jQuery.data( this, key, value );
+			} ) :
+
+			// Gets one value
+			// Try to fetch any internally stored data first
+			elem ? dataAttr( elem, key, jQuery.data( elem, key ) ) : undefined;
+	},
+
+	removeData: function( key ) {
+		return this.each( function() {
+			jQuery.removeData( this, key );
+		} );
+	}
+} );
+
+
+jQuery.extend( {
+	queue: function( elem, type, data ) {
+		var queue;
+
+		if ( elem ) {
+			type = ( type || "fx" ) + "queue";
+			queue = jQuery._data( elem, type );
+
+			// Speed up dequeue by getting out quickly if this is just a lookup
+			if ( data ) {
+				if ( !queue || jQuery.isArray( data ) ) {
+					queue = jQuery._data( elem, type, jQuery.makeArray( data ) );
+				} else {
+					queue.push( data );
+				}
+			}
+			return queue || [];
+		}
+	},
+
+	dequeue: function( elem, type ) {
+		type = type || "fx";
+
+		var queue = jQuery.queue( elem, type ),
+			startLength = queue.length,
+			fn = queue.shift(),
+			hooks = jQuery._queueHooks( elem, type ),
+			next = function() {
+				jQuery.dequeue( elem, type );
+			};
+
+		// If the fx queue is dequeued, always remove the progress sentinel
+		if ( fn === "inprogress" ) {
+			fn = queue.shift();
+			startLength--;
+		}
+
+		if ( fn ) {
+
+			// Add a progress sentinel to prevent the fx queue from being
+			// automatically dequeued
+			if ( type === "fx" ) {
+				queue.unshift( "inprogress" );
+			}
+
+			// clear up the last queue stop function
+			delete hooks.stop;
+			fn.call( elem, next, hooks );
+		}
+
+		if ( !startLength && hooks ) {
+			hooks.empty.fire();
+		}
+	},
+
+	// not intended for public consumption - generates a queueHooks object,
+	// or returns the current one
+	_queueHooks: function( elem, type ) {
+		var key = type + "queueHooks";
+		return jQuery._data( elem, key ) || jQuery._data( elem, key, {
+			empty: jQuery.Callbacks( "once memory" ).add( function() {
+				jQuery._removeData( elem, type + "queue" );
+				jQuery._removeData( elem, key );
+			} )
+		} );
+	}
+} );
+
+jQuery.fn.extend( {
+	queue: function( type, data ) {
+		var setter = 2;
+
+		if ( typeof type !== "string" ) {
+			data = type;
+			type = "fx";
+			setter--;
+		}
+
+		if ( arguments.length < setter ) {
+			return jQuery.queue( this[ 0 ], type );
+		}
+
+		return data === undefined ?
+			this :
+			this.each( function() {
+				var queue = jQuery.queue( this, type, data );
+
+				// ensure a hooks for this queue
+				jQuery._queueHooks( this, type );
+
+				if ( type === "fx" && queue[ 0 ] !== "inprogress" ) {
+					jQuery.dequeue( this, type );
+				}
+			} );
+	},
+	dequeue: function( type ) {
+		return this.each( function() {
+			jQuery.dequeue( this, type );
+		} );
+	},
+	clearQueue: function( type ) {
+		return this.queue( type || "fx", [] );
+	},
+
+	// Get a promise resolved when queues of a certain type
+	// are emptied (fx is the type by default)
+	promise: function( type, obj ) {
+		var tmp,
+			count = 1,
+			defer = jQuery.Deferred(),
+			elements = this,
+			i = this.length,
+			resolve = function() {
+				if ( !( --count ) ) {
+					defer.resolveWith( elements, [ elements ] );
+				}
+			};
+
+		if ( typeof type !== "string" ) {
+			obj = type;
+			type = undefined;
+		}
+		type = type || "fx";
+
+		while ( i-- ) {
+			tmp = jQuery._data( elements[ i ], type + "queueHooks" );
+			if ( tmp && tmp.empty ) {
+				count++;
+				tmp.empty.add( resolve );
+			}
+		}
+		resolve();
+		return defer.promise( obj );
+	}
+} );
+
+
+( function() {
+	var shrinkWrapBlocksVal;
+
+	support.shrinkWrapBlocks = function() {
+		if ( shrinkWrapBlocksVal != null ) {
+			return shrinkWrapBlocksVal;
+		}
+
+		// Will be changed later if needed.
+		shrinkWrapBlocksVal = false;
+
+		// Minified: var b,c,d
+		var div, body, container;
+
+		body = document.getElementsByTagName( "body" )[ 0 ];
+		if ( !body || !body.style ) {
+
+			// Test fired too early or in an unsupported environment, exit.
+			return;
+		}
+
+		// Setup
+		div = document.createElement( "div" );
+		container = document.createElement( "div" );
+		container.style.cssText = "position:absolute;border:0;width:0;height:0;top:0;left:-9999px";
+		body.appendChild( container ).appendChild( div );
+
+		// Support: IE6
+		// Check if elements with layout shrink-wrap their children
+		if ( typeof div.style.zoom !== "undefined" ) {
+
+			// Reset CSS: box-sizing; display; margin; border
+			div.style.cssText =
+
+				// Support: Firefox<29, Android 2.3
+				// Vendor-prefix box-sizing
+				"-webkit-box-sizing:content-box;-moz-box-sizing:content-box;" +
+				"box-sizing:content-box;display:block;margin:0;border:0;" +
+				"padding:1px;width:1px;zoom:1";
+			div.appendChild( document.createElement( "div" ) ).style.width = "5px";
+			shrinkWrapBlocksVal = div.offsetWidth !== 3;
+		}
+
+		body.removeChild( container );
+
+		return shrinkWrapBlocksVal;
+	};
+
+} )();
+var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source;
+
+var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" );
+
+
+var cssExpand = [ "Top", "Right", "Bottom", "Left" ];
+
+var isHidden = function( elem, el ) {
+
+		// isHidden might be called from jQuery#filter function;
+		// in that case, element will be second argument
+		elem = el || elem;
+		return jQuery.css( elem, "display" ) === "none" ||
+			!jQuery.contains( elem.ownerDocument, elem );
+	};
+
+
+
+function adjustCSS( elem, prop, valueParts, tween ) {
+	var adjusted,
+		scale = 1,
+		maxIterations = 20,
+		currentValue = tween ?
+			function() { return tween.cur(); } :
+			function() { return jQuery.css( elem, prop, "" ); },
+		initial = currentValue(),
+		unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ),
+
+		// Starting value computation is required for potential unit mismatches
+		initialInUnit = ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) &&
+			rcssNum.exec( jQuery.css( elem, prop ) );
+
+	if ( initialInUnit && initialInUnit[ 3 ] !== unit ) {
+
+		// Trust units reported by jQuery.css
+		unit = unit || initialInUnit[ 3 ];
+
+		// Make sure we update the tween properties later on
+		valueParts = valueParts || [];
+
+		// Iteratively approximate from a nonzero starting point
+		initialInUnit = +initial || 1;
+
+		do {
+
+			// If previous iteration zeroed out, double until we get *something*.
+			// Use string for doubling so we don't accidentally see scale as unchanged below
+			scale = scale || ".5";
+
+			// Adjust and apply
+			initialInUnit = initialInUnit / scale;
+			jQuery.style( elem, prop, initialInUnit + unit );
+
+		// Update scale, tolerating zero or NaN from tween.cur()
+		// Break the loop if scale is unchanged or perfect, or if we've just had enough.
+		} while (
+			scale !== ( scale = currentValue() / initial ) && scale !== 1 && --maxIterations
+		);
+	}
+
+	if ( valueParts ) {
+		initialInUnit = +initialInUnit || +initial || 0;
+
+		// Apply relative offset (+=/-=) if specified
+		adjusted = valueParts[ 1 ] ?
+			initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] :
+			+valueParts[ 2 ];
+		if ( tween ) {
+			tween.unit = unit;
+			tween.start = initialInUnit;
+			tween.end = adjusted;
+		}
+	}
+	return adjusted;
+}
+
+
+// Multifunctional method to get and set values of a collection
+// The value/s can optionally be executed if it's a function
+var access = function( elems, fn, key, value, chainable, emptyGet, raw ) {
+	var i = 0,
+		length = elems.length,
+		bulk = key == null;
+
+	// Sets many values
+	if ( jQuery.type( key ) === "object" ) {
+		chainable = true;
+		for ( i in key ) {
+			access( elems, fn, i, key[ i ], true, emptyGet, raw );
+		}
+
+	// Sets one value
+	} else if ( value !== undefined ) {
+		chainable = true;
+
+		if ( !jQuery.isFunction( value ) ) {
+			raw = true;
+		}
+
+		if ( bulk ) {
+
+			// Bulk operations run against the entire set
+			if ( raw ) {
+				fn.call( elems, value );
+				fn = null;
+
+			// ...except when executing function values
+			} else {
+				bulk = fn;
+				fn = function( elem, key, value ) {
+					return bulk.call( jQuery( elem ), value );
+				};
+			}
+		}
+
+		if ( fn ) {
+			for ( ; i < length; i++ ) {
+				fn(
+					elems[ i ],
+					key,
+					raw ? value : value.call( elems[ i ], i, fn( elems[ i ], key ) )
+				);
+			}
+		}
+	}
+
+	return chainable ?
+		elems :
+
+		// Gets
+		bulk ?
+			fn.call( elems ) :
+			length ? fn( elems[ 0 ], key ) : emptyGet;
+};
+var rcheckableType = ( /^(?:checkbox|radio)$/i );
+
+var rtagName = ( /<([\w:-]+)/ );
+
+var rscriptType = ( /^$|\/(?:java|ecma)script/i );
+
+var rleadingWhitespace = ( /^\s+/ );
+
+var nodeNames = "abbr|article|aside|audio|bdi|canvas|data|datalist|" +
+		"details|dialog|figcaption|figure|footer|header|hgroup|main|" +
+		"mark|meter|nav|output|picture|progress|section|summary|template|time|video";
+
+
+
+function createSafeFragment( document ) {
+	var list = nodeNames.split( "|" ),
+		safeFrag = document.createDocumentFragment();
+
+	if ( safeFrag.createElement ) {
+		while ( list.length ) {
+			safeFrag.createElement(
+				list.pop()
+			);
+		}
+	}
+	return safeFrag;
+}
+
+
+( function() {
+	var div = document.createElement( "div" ),
+		fragment = document.createDocumentFragment(),
+		input = document.createElement( "input" );
+
+	// Setup
+	div.innerHTML = "  <link/><table></table><a href='/a'>a</a><input type='checkbox'/>";
+
+	// IE strips leading whitespace when .innerHTML is used
+	support.leadingWhitespace = div.firstChild.nodeType === 3;
+
+	// Make sure that tbody elements aren't automatically inserted
+	// IE will insert them into empty tables
+	support.tbody = !div.getElementsByTagName( "tbody" ).length;
+
+	// Make sure that link elements get serialized correctly by innerHTML
+	// This requires a wrapper element in IE
+	support.htmlSerialize = !!div.getElementsByTagName( "link" ).length;
+
+	// Makes sure cloning an html5 element does not cause problems
+	// Where outerHTML is undefined, this still works
+	support.html5Clone =
+		document.createElement( "nav" ).cloneNode( true ).outerHTML !== "<:nav></:nav>";
+
+	// Check if a disconnected checkbox will retain its checked
+	// value of true after appended to the DOM (IE6/7)
+	input.type = "checkbox";
+	input.checked = true;
+	fragment.appendChild( input );
+	support.appendChecked = input.checked;
+
+	// Make sure textarea (and checkbox) defaultValue is properly cloned
+	// Support: IE6-IE11+
+	div.innerHTML = "<textarea>x</textarea>";
+	support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue;
+
+	// #11217 - WebKit loses check when the name is after the checked attribute
+	fragment.appendChild( div );
+
+	// Support: Windows Web Apps (WWA)
+	// `name` and `type` must use .setAttribute for WWA (#14901)
+	input = document.createElement( "input" );
+	input.setAttribute( "type", "radio" );
+	input.setAttribute( "checked", "checked" );
+	input.setAttribute( "name", "t" );
+
+	div.appendChild( input );
+
+	// Support: Safari 5.1, iOS 5.1, Android 4.x, Android 2.3
+	// old WebKit doesn't clone checked state correctly in fragments
+	support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked;
+
+	// Support: IE<9
+	// Cloned elements keep attachEvent handlers, we use addEventListener on IE9+
+	support.noCloneEvent = !!div.addEventListener;
+
+	// Support: IE<9
+	// Since attributes and properties are the same in IE,
+	// cleanData must set properties to undefined rather than use removeAttribute
+	div[ jQuery.expando ] = 1;
+	support.attributes = !div.getAttribute( jQuery.expando );
+} )();
+
+
+// We have to close these tags to support XHTML (#13200)
+var wrapMap = {
+	option: [ 1, "<select multiple='multiple'>", "</select>" ],
+	legend: [ 1, "<fieldset>", "</fieldset>" ],
+	area: [ 1, "<map>", "</map>" ],
+
+	// Support: IE8
+	param: [ 1, "<object>", "</object>" ],
+	thead: [ 1, "<table>", "</table>" ],
+	tr: [ 2, "<table><tbody>", "</tbody></table>" ],
+	col: [ 2, "<table><tbody></tbody><colgroup>", "</colgroup></table>" ],
+	td: [ 3, "<table><tbody><tr>", "</tr></tbody></table>" ],
+
+	// IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags,
+	// unless wrapped in a div with non-breaking characters in front of it.
+	_default: support.htmlSerialize ? [ 0, "", "" ] : [ 1, "X<div>", "</div>" ]
+};
+
+// Support: IE8-IE9
+wrapMap.optgroup = wrapMap.option;
+
+wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead;
+wrapMap.th = wrapMap.td;
+
+
+function getAll( context, tag ) {
+	var elems, elem,
+		i = 0,
+		found = typeof context.getElementsByTagName !== "undefined" ?
+			context.getElementsByTagName( tag || "*" ) :
+			typeof context.querySelectorAll !== "undefined" ?
+				context.querySelectorAll( tag || "*" ) :
+				undefined;
+
+	if ( !found ) {
+		for ( found = [], elems = context.childNodes || context;
+			( elem = elems[ i ] ) != null;
+			i++
+		) {
+			if ( !tag || jQuery.nodeName( elem, tag ) ) {
+				found.push( elem );
+			} else {
+				jQuery.merge( found, getAll( elem, tag ) );
+			}
+		}
+	}
+
+	return tag === undefined || tag && jQuery.nodeName( context, tag ) ?
+		jQuery.merge( [ context ], found ) :
+		found;
+}
+
+
+// Mark scripts as having already been evaluated
+function setGlobalEval( elems, refElements ) {
+	var elem,
+		i = 0;
+	for ( ; ( elem = elems[ i ] ) != null; i++ ) {
+		jQuery._data(
+			elem,
+			"globalEval",
+			!refElements || jQuery._data( refElements[ i ], "globalEval" )
+		);
+	}
+}
+
+
+var rhtml = /<|&#?\w+;/,
+	rtbody = /<tbody/i;
+
+function fixDefaultChecked( elem ) {
+	if ( rcheckableType.test( elem.type ) ) {
+		elem.defaultChecked = elem.checked;
+	}
+}
+
+function buildFragment( elems, context, scripts, selection, ignored ) {
+	var j, elem, contains,
+		tmp, tag, tbody, wrap,
+		l = elems.length,
+
+		// Ensure a safe fragment
+		safe = createSafeFragment( context ),
+
+		nodes = [],
+		i = 0;
+
+	for ( ; i < l; i++ ) {
+		elem = elems[ i ];
+
+		if ( elem || elem === 0 ) {
+
+			// Add nodes directly
+			if ( jQuery.type( elem ) === "object" ) {
+				jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem );
+
+			// Convert non-html into a text node
+			} else if ( !rhtml.test( elem ) ) {
+				nodes.push( context.createTextNode( elem ) );
+
+			// Convert html into DOM nodes
+			} else {
+				tmp = tmp || safe.appendChild( context.createElement( "div" ) );
+
+				// Deserialize a standard representation
+				tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase();
+				wrap = wrapMap[ tag ] || wrapMap._default;
+
+				tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ];
+
+				// Descend through wrappers to the right content
+				j = wrap[ 0 ];
+				while ( j-- ) {
+					tmp = tmp.lastChild;
+				}
+
+				// Manually add leading whitespace removed by IE
+				if ( !support.leadingWhitespace && rleadingWhitespace.test( elem ) ) {
+					nodes.push( context.createTextNode( rleadingWhitespace.exec( elem )[ 0 ] ) );
+				}
+
+				// Remove IE's autoinserted <tbody> from table fragments
+				if ( !support.tbody ) {
+
+					// String was a <table>, *may* have spurious <tbody>
+					elem = tag === "table" && !rtbody.test( elem ) ?
+						tmp.firstChild :
+
+						// String was a bare <thead> or <tfoot>
+						wrap[ 1 ] === "<table>" && !rtbody.test( elem ) ?
+							tmp :
+							0;
+
+					j = elem && elem.childNodes.length;
+					while ( j-- ) {
+						if ( jQuery.nodeName( ( tbody = elem.childNodes[ j ] ), "tbody" ) &&
+							!tbody.childNodes.length ) {
+
+							elem.removeChild( tbody );
+						}
+					}
+				}
+
+				jQuery.merge( nodes, tmp.childNodes );
+
+				// Fix #12392 for WebKit and IE > 9
+				tmp.textContent = "";
+
+				// Fix #12392 for oldIE
+				while ( tmp.firstChild ) {
+					tmp.removeChild( tmp.firstChild );
+				}
+
+				// Remember the top-level container for proper cleanup
+				tmp = safe.lastChild;
+			}
+		}
+	}
+
+	// Fix #11356: Clear elements from fragment
+	if ( tmp ) {
+		safe.removeChild( tmp );
+	}
+
+	// Reset defaultChecked for any radios and checkboxes
+	// about to be appended to the DOM in IE 6/7 (#8060)
+	if ( !support.appendChecked ) {
+		jQuery.grep( getAll( nodes, "input" ), fixDefaultChecked );
+	}
+
+	i = 0;
+	while ( ( elem = nodes[ i++ ] ) ) {
+
+		// Skip elements already in the context collection (trac-4087)
+		if ( selection && jQuery.inArray( elem, selection ) > -1 ) {
+			if ( ignored ) {
+				ignored.push( elem );
+			}
+
+			continue;
+		}
+
+		contains = jQuery.contains( elem.ownerDocument, elem );
+
+		// Append to fragment
+		tmp = getAll( safe.appendChild( elem ), "script" );
+
+		// Preserve script evaluation history
+		if ( contains ) {
+			setGlobalEval( tmp );
+		}
+
+		// Capture executables
+		if ( scripts ) {
+			j = 0;
+			while ( ( elem = tmp[ j++ ] ) ) {
+				if ( rscriptType.test( elem.type || "" ) ) {
+					scripts.push( elem );
+				}
+			}
+		}
+	}
+
+	tmp = null;
+
+	return safe;
+}
+
+
+( function() {
+	var i, eventName,
+		div = document.createElement( "div" );
+
+	// Support: IE<9 (lack submit/change bubble), Firefox (lack focus(in | out) events)
+	for ( i in { submit: true, change: true, focusin: true } ) {
+		eventName = "on" + i;
+
+		if ( !( support[ i ] = eventName in window ) ) {
+
+			// Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP)
+			div.setAttribute( eventName, "t" );
+			support[ i ] = div.attributes[ eventName ].expando === false;
+		}
+	}
+
+	// Null elements to avoid leaks in IE.
+	div = null;
+} )();
+
+
+var rformElems = /^(?:input|select|textarea)$/i,
+	rkeyEvent = /^key/,
+	rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/,
+	rfocusMorph = /^(?:focusinfocus|focusoutblur)$/,
+	rtypenamespace = /^([^.]*)(?:\.(.+)|)/;
+
+function returnTrue() {
+	return true;
+}
+
+function returnFalse() {
+	return false;
+}
+
+// Support: IE9
+// See #13393 for more info
+function safeActiveElement() {
+	try {
+		return document.activeElement;
+	} catch ( err ) { }
+}
+
+function on( elem, types, selector, data, fn, one ) {
+	var origFn, type;
+
+	// Types can be a map of types/handlers
+	if ( typeof types === "object" ) {
+
+		// ( types-Object, selector, data )
+		if ( typeof selector !== "string" ) {
+
+			// ( types-Object, data )
+			data = data || selector;
+			selector = undefined;
+		}
+		for ( type in types ) {
+			on( elem, type, selector, data, types[ type ], one );
+		}
+		return elem;
+	}
+
+	if ( data == null && fn == null ) {
+
+		// ( types, fn )
+		fn = selector;
+		data = selector = undefined;
+	} else if ( fn == null ) {
+		if ( typeof selector === "string" ) {
+
+			// ( types, selector, fn )
+			fn = data;
+			data = undefined;
+		} else {
+
+			// ( types, data, fn )
+			fn = data;
+			data = selector;
+			selector = undefined;
+		}
+	}
+	if ( fn === false ) {
+		fn = returnFalse;
+	} else if ( !fn ) {
+		return elem;
+	}
+
+	if ( one === 1 ) {
+		origFn = fn;
+		fn = function( event ) {
+
+			// Can use an empty set, since event contains the info
+			jQuery().off( event );
+			return origFn.apply( this, arguments );
+		};
+
+		// Use same guid so caller can remove using origFn
+		fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ );
+	}
+	return elem.each( function() {
+		jQuery.event.add( this, types, fn, data, selector );
+	} );
+}
+
+/*
+ * Helper functions for managing events -- not part of the public interface.
+ * Props to Dean Edwards' addEvent library for many of the ideas.
+ */
+jQuery.event = {
+
+	global: {},
+
+	add: function( elem, types, handler, data, selector ) {
+		var tmp, events, t, handleObjIn,
+			special, eventHandle, handleObj,
+			handlers, type, namespaces, origType,
+			elemData = jQuery._data( elem );
+
+		// Don't attach events to noData or text/comment nodes (but allow plain objects)
+		if ( !elemData ) {
+			return;
+		}
+
+		// Caller can pass in an object of custom data in lieu of the handler
+		if ( handler.handler ) {
+			handleObjIn = handler;
+			handler = handleObjIn.handler;
+			selector = handleObjIn.selector;
+		}
+
+		// Make sure that the handler has a unique ID, used to find/remove it later
+		if ( !handler.guid ) {
+			handler.guid = jQuery.guid++;
+		}
+
+		// Init the element's event structure and main handler, if this is the first
+		if ( !( events = elemData.events ) ) {
+			events = elemData.events = {};
+		}
+		if ( !( eventHandle = elemData.handle ) ) {
+			eventHandle = elemData.handle = function( e ) {
+
+				// Discard the second event of a jQuery.event.trigger() and
+				// when an event is called after a page has unloaded
+				return typeof jQuery !== "undefined" &&
+					( !e || jQuery.event.triggered !== e.type ) ?
+					jQuery.event.dispatch.apply( eventHandle.elem, arguments ) :
+					undefined;
+			};
+
+			// Add elem as a property of the handle fn to prevent a memory leak
+			// with IE non-native events
+			eventHandle.elem = elem;
+		}
+
+		// Handle multiple events separated by a space
+		types = ( types || "" ).match( rnotwhite ) || [ "" ];
+		t = types.length;
+		while ( t-- ) {
+			tmp = rtypenamespace.exec( types[ t ] ) || [];
+			type = origType = tmp[ 1 ];
+			namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort();
+
+			// There *must* be a type, no attaching namespace-only handlers
+			if ( !type ) {
+				continue;
+			}
+
+			// If event changes its type, use the special event handlers for the changed type
+			special = jQuery.event.special[ type ] || {};
+
+			// If selector defined, determine special event api type, otherwise given type
+			type = ( selector ? special.delegateType : special.bindType ) || type;
+
+			// Update special based on newly reset type
+			special = jQuery.event.special[ type ] || {};
+
+			// handleObj is passed to all event handlers
+			handleObj = jQuery.extend( {
+				type: type,
+				origType: origType,
+				data: data,
+				handler: handler,
+				guid: handler.guid,
+				selector: selector,
+				needsContext: selector && jQuery.expr.match.needsContext.test( selector ),
+				namespace: namespaces.join( "." )
+			}, handleObjIn );
+
+			// Init the event handler queue if we're the first
+			if ( !( handlers = events[ type ] ) ) {
+				handlers = events[ type ] = [];
+				handlers.delegateCount = 0;
+
+				// Only use addEventListener/attachEvent if the special events handler returns false
+				if ( !special.setup ||
+					special.setup.call( elem, data, namespaces, eventHandle ) === false ) {
+
+					// Bind the global event handler to the element
+					if ( elem.addEventListener ) {
+						elem.addEventListener( type, eventHandle, false );
+
+					} else if ( elem.attachEvent ) {
+						elem.attachEvent( "on" + type, eventHandle );
+					}
+				}
+			}
+
+			if ( special.add ) {
+				special.add.call( elem, handleObj );
+
+				if ( !handleObj.handler.guid ) {
+					handleObj.handler.guid = handler.guid;
+				}
+			}
+
+			// Add to the element's handler list, delegates in front
+			if ( selector ) {
+				handlers.splice( handlers.delegateCount++, 0, handleObj );
+			} else {
+				handlers.push( handleObj );
+			}
+
+			// Keep track of which events have ever been used, for event optimization
+			jQuery.event.global[ type ] = true;
+		}
+
+		// Nullify elem to prevent memory leaks in IE
+		elem = null;
+	},
+
+	// Detach an event or set of events from an element
+	remove: function( elem, types, handler, selector, mappedTypes ) {
+		var j, handleObj, tmp,
+			origCount, t, events,
+			special, handlers, type,
+			namespaces, origType,
+			elemData = jQuery.hasData( elem ) && jQuery._data( elem );
+
+		if ( !elemData || !( events = elemData.events ) ) {
+			return;
+		}
+
+		// Once for each type.namespace in types; type may be omitted
+		types = ( types || "" ).match( rnotwhite ) || [ "" ];
+		t = types.length;
+		while ( t-- ) {
+			tmp = rtypenamespace.exec( types[ t ] ) || [];
+			type = origType = tmp[ 1 ];
+			namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort();
+
+			// Unbind all events (on this namespace, if provided) for the element
+			if ( !type ) {
+				for ( type in events ) {
+					jQuery.event.remove( elem, type + types[ t ], handler, selector, true );
+				}
+				continue;
+			}
+
+			special = jQuery.event.special[ type ] || {};
+			type = ( selector ? special.delegateType : special.bindType ) || type;
+			handlers = events[ type ] || [];
+			tmp = tmp[ 2 ] &&
+				new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" );
+
+			// Remove matching events
+			origCount = j = handlers.length;
+			while ( j-- ) {
+				handleObj = handlers[ j ];
+
+				if ( ( mappedTypes || origType === handleObj.origType ) &&
+					( !handler || handler.guid === handleObj.guid ) &&
+					( !tmp || tmp.test( handleObj.namespace ) ) &&
+					( !selector || selector === handleObj.selector ||
+						selector === "**" && handleObj.selector ) ) {
+					handlers.splice( j, 1 );
+
+					if ( handleObj.selector ) {
+						handlers.delegateCount--;
+					}
+					if ( special.remove ) {
+						special.remove.call( elem, handleObj );
+					}
+				}
+			}
+
+			// Remove generic event handler if we removed something and no more handlers exist
+			// (avoids potential for endless recursion during removal of special event handlers)
+			if ( origCount && !handlers.length ) {
+				if ( !special.teardown ||
+					special.teardown.call( elem, namespaces, elemData.handle ) === false ) {
+
+					jQuery.removeEvent( elem, type, elemData.handle );
+				}
+
+				delete events[ type ];
+			}
+		}
+
+		// Remove the expando if it's no longer used
+		if ( jQuery.isEmptyObject( events ) ) {
+			delete elemData.handle;
+
+			// removeData also checks for emptiness and clears the expando if empty
+			// so use it instead of delete
+			jQuery._removeData( elem, "events" );
+		}
+	},
+
+	trigger: function( event, data, elem, onlyHandlers ) {
+		var handle, ontype, cur,
+			bubbleType, special, tmp, i,
+			eventPath = [ elem || document ],
+			type = hasOwn.call( event, "type" ) ? event.type : event,
+			namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : [];
+
+		cur = tmp = elem = elem || document;
+
+		// Don't do events on text and comment nodes
+		if ( elem.nodeType === 3 || elem.nodeType === 8 ) {
+			return;
+		}
+
+		// focus/blur morphs to focusin/out; ensure we're not firing them right now
+		if ( rfocusMorph.test( type + jQuery.event.triggered ) ) {
+			return;
+		}
+
+		if ( type.indexOf( "." ) > -1 ) {
+
+			// Namespaced trigger; create a regexp to match event type in handle()
+			namespaces = type.split( "." );
+			type = namespaces.shift();
+			namespaces.sort();
+		}
+		ontype = type.indexOf( ":" ) < 0 && "on" + type;
+
+		// Caller can pass in a jQuery.Event object, Object, or just an event type string
+		event = event[ jQuery.expando ] ?
+			event :
+			new jQuery.Event( type, typeof event === "object" && event );
+
+		// Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true)
+		event.isTrigger = onlyHandlers ? 2 : 3;
+		event.namespace = namespaces.join( "." );
+		event.rnamespace = event.namespace ?
+			new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) :
+			null;
+
+		// Clean up the event in case it is being reused
+		event.result = undefined;
+		if ( !event.target ) {
+			event.target = elem;
+		}
+
+		// Clone any incoming data and prepend the event, creating the handler arg list
+		data = data == null ?
+			[ event ] :
+			jQuery.makeArray( data, [ event ] );
+
+		// Allow special events to draw outside the lines
+		special = jQuery.event.special[ type ] || {};
+		if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) {
+			return;
+		}
+
+		// Determine event propagation path in advance, per W3C events spec (#9951)
+		// Bubble up to document, then to window; watch for a global ownerDocument var (#9724)
+		if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) {
+
+			bubbleType = special.delegateType || type;
+			if ( !rfocusMorph.test( bubbleType + type ) ) {
+				cur = cur.parentNode;
+			}
+			for ( ; cur; cur = cur.parentNode ) {
+				eventPath.push( cur );
+				tmp = cur;
+			}
+
+			// Only add window if we got to document (e.g., not plain obj or detached DOM)
+			if ( tmp === ( elem.ownerDocument || document ) ) {
+				eventPath.push( tmp.defaultView || tmp.parentWindow || window );
+			}
+		}
+
+		// Fire handlers on the event path
+		i = 0;
+		while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) {
+
+			event.type = i > 1 ?
+				bubbleType :
+				special.bindType || type;
+
+			// jQuery handler
+			handle = ( jQuery._data( cur, "events" ) || {} )[ event.type ] &&
+				jQuery._data( cur, "handle" );
+
+			if ( handle ) {
+				handle.apply( cur, data );
+			}
+
+			// Native handler
+			handle = ontype && cur[ ontype ];
+			if ( handle && handle.apply && acceptData( cur ) ) {
+				event.result = handle.apply( cur, data );
+				if ( event.result === false ) {
+					event.preventDefault();
+				}
+			}
+		}
+		event.type = type;
+
+		// If nobody prevented the default action, do it now
+		if ( !onlyHandlers && !event.isDefaultPrevented() ) {
+
+			if (
+				( !special._default ||
+				 special._default.apply( eventPath.pop(), data ) === false
+				) && acceptData( elem )
+			) {
+
+				// Call a native DOM method on the target with the same name name as the event.
+				// Can't use an .isFunction() check here because IE6/7 fails that test.
+				// Don't do default actions on window, that's where global variables be (#6170)
+				if ( ontype && elem[ type ] && !jQuery.isWindow( elem ) ) {
+
+					// Don't re-trigger an onFOO event when we call its FOO() method
+					tmp = elem[ ontype ];
+
+					if ( tmp ) {
+						elem[ ontype ] = null;
+					}
+
+					// Prevent re-triggering of the same event, since we already bubbled it above
+					jQuery.event.triggered = type;
+					try {
+						elem[ type ]();
+					} catch ( e ) {
+
+						// IE<9 dies on focus/blur to hidden element (#1486,#12518)
+						// only reproducible on winXP IE8 native, not IE9 in IE8 mode
+					}
+					jQuery.event.triggered = undefined;
+
+					if ( tmp ) {
+						elem[ ontype ] = tmp;
+					}
+				}
+			}
+		}
+
+		return event.result;
+	},
+
+	dispatch: function( event ) {
+
+		// Make a writable jQuery.Event from the native event object
+		event = jQuery.event.fix( event );
+
+		var i, j, ret, matched, handleObj,
+			handlerQueue = [],
+			args = slice.call( arguments ),
+			handlers = ( jQuery._data( this, "events" ) || {} )[ event.type ] || [],
+			special = jQuery.event.special[ event.type ] || {};
+
+		// Use the fix-ed jQuery.Event rather than the (read-only) native event
+		args[ 0 ] = event;
+		event.delegateTarget = this;
+
+		// Call the preDispatch hook for the mapped type, and let it bail if desired
+		if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) {
+			return;
+		}
+
+		// Determine handlers
+		handlerQueue = jQuery.event.handlers.call( this, event, handlers );
+
+		// Run delegates first; they may want to stop propagation beneath us
+		i = 0;
+		while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) {
+			event.currentTarget = matched.elem;
+
+			j = 0;
+			while ( ( handleObj = matched.handlers[ j++ ] ) &&
+				!event.isImmediatePropagationStopped() ) {
+
+				// Triggered event must either 1) have no namespace, or 2) have namespace(s)
+				// a subset or equal to those in the bound event (both can have no namespace).
+				if ( !event.rnamespace || event.rnamespace.test( handleObj.namespace ) ) {
+
+					event.handleObj = handleObj;
+					event.data = handleObj.data;
+
+					ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle ||
+						handleObj.handler ).apply( matched.elem, args );
+
+					if ( ret !== undefined ) {
+						if ( ( event.result = ret ) === false ) {
+							event.preventDefault();
+							event.stopPropagation();
+						}
+					}
+				}
+			}
+		}
+
+		// Call the postDispatch hook for the mapped type
+		if ( special.postDispatch ) {
+			special.postDispatch.call( this, event );
+		}
+
+		return event.result;
+	},
+
+	handlers: function( event, handlers ) {
+		var i, matches, sel, handleObj,
+			handlerQueue = [],
+			delegateCount = handlers.delegateCount,
+			cur = event.target;
+
+		// Support (at least): Chrome, IE9
+		// Find delegate handlers
+		// Black-hole SVG <use> instance trees (#13180)
+		//
+		// Support: Firefox<=42+
+		// Avoid non-left-click in FF but don't block IE radio events (#3861, gh-2343)
+		if ( delegateCount && cur.nodeType &&
+			( event.type !== "click" || isNaN( event.button ) || event.button < 1 ) ) {
+
+			/* jshint eqeqeq: false */
+			for ( ; cur != this; cur = cur.parentNode || this ) {
+				/* jshint eqeqeq: true */
+
+				// Don't check non-elements (#13208)
+				// Don't process clicks on disabled elements (#6911, #8165, #11382, #11764)
+				if ( cur.nodeType === 1 && ( cur.disabled !== true || event.type !== "click" ) ) {
+					matches = [];
+					for ( i = 0; i < delegateCount; i++ ) {
+						handleObj = handlers[ i ];
+
+						// Don't conflict with Object.prototype properties (#13203)
+						sel = handleObj.selector + " ";
+
+						if ( matches[ sel ] === undefined ) {
+							matches[ sel ] = handleObj.needsContext ?
+								jQuery( sel, this ).index( cur ) > -1 :
+								jQuery.find( sel, this, null, [ cur ] ).length;
+						}
+						if ( matches[ sel ] ) {
+							matches.push( handleObj );
+						}
+					}
+					if ( matches.length ) {
+						handlerQueue.push( { elem: cur, handlers: matches } );
+					}
+				}
+			}
+		}
+
+		// Add the remaining (directly-bound) handlers
+		if ( delegateCount < handlers.length ) {
+			handlerQueue.push( { elem: this, handlers: handlers.slice( delegateCount ) } );
+		}
+
+		return handlerQueue;
+	},
+
+	fix: function( event ) {
+		if ( event[ jQuery.expando ] ) {
+			return event;
+		}
+
+		// Create a writable copy of the event object and normalize some properties
+		var i, prop, copy,
+			type = event.type,
+			originalEvent = event,
+			fixHook = this.fixHooks[ type ];
+
+		if ( !fixHook ) {
+			this.fixHooks[ type ] = fixHook =
+				rmouseEvent.test( type ) ? this.mouseHooks :
+				rkeyEvent.test( type ) ? this.keyHooks :
+				{};
+		}
+		copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props;
+
+		event = new jQuery.Event( originalEvent );
+
+		i = copy.length;
+		while ( i-- ) {
+			prop = copy[ i ];
+			event[ prop ] = originalEvent[ prop ];
+		}
+
+		// Support: IE<9
+		// Fix target property (#1925)
+		if ( !event.target ) {
+			event.target = originalEvent.srcElement || document;
+		}
+
+		// Support: Safari 6-8+
+		// Target should not be a text node (#504, #13143)
+		if ( event.target.nodeType === 3 ) {
+			event.target = event.target.parentNode;
+		}
+
+		// Support: IE<9
+		// For mouse/key events, metaKey==false if it's undefined (#3368, #11328)
+		event.metaKey = !!event.metaKey;
+
+		return fixHook.filter ? fixHook.filter( event, originalEvent ) : event;
+	},
+
+	// Includes some event props shared by KeyEvent and MouseEvent
+	props: ( "altKey bubbles cancelable ctrlKey currentTarget detail eventPhase " +
+		"metaKey relatedTarget shiftKey target timeStamp view which" ).split( " " ),
+
+	fixHooks: {},
+
+	keyHooks: {
+		props: "char charCode key keyCode".split( " " ),
+		filter: function( event, original ) {
+
+			// Add which for key events
+			if ( event.which == null ) {
+				event.which = original.charCode != null ? original.charCode : original.keyCode;
+			}
+
+			return event;
+		}
+	},
+
+	mouseHooks: {
+		props: ( "button buttons clientX clientY fromElement offsetX offsetY " +
+			"pageX pageY screenX screenY toElement" ).split( " " ),
+		filter: function( event, original ) {
+			var body, eventDoc, doc,
+				button = original.button,
+				fromElement = original.fromElement;
+
+			// Calculate pageX/Y if missing and clientX/Y available
+			if ( event.pageX == null && original.clientX != null ) {
+				eventDoc = event.target.ownerDocument || document;
+				doc = eventDoc.documentElement;
+				body = eventDoc.body;
+
+				event.pageX = original.clientX +
+					( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) -
+					( doc && doc.clientLeft || body && body.clientLeft || 0 );
+				event.pageY = original.clientY +
+					( doc && doc.scrollTop  || body && body.scrollTop  || 0 ) -
+					( doc && doc.clientTop  || body && body.clientTop  || 0 );
+			}
+
+			// Add relatedTarget, if necessary
+			if ( !event.relatedTarget && fromElement ) {
+				event.relatedTarget = fromElement === event.target ?
+					original.toElement :
+					fromElement;
+			}
+
+			// Add which for click: 1 === left; 2 === middle; 3 === right
+			// Note: button is not normalized, so don't use it
+			if ( !event.which && button !== undefined ) {
+				event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) );
+			}
+
+			return event;
+		}
+	},
+
+	special: {
+		load: {
+
+			// Prevent triggered image.load events from bubbling to window.load
+			noBubble: true
+		},
+		focus: {
+
+			// Fire native event if possible so blur/focus sequence is correct
+			trigger: function() {
+				if ( this !== safeActiveElement() && this.focus ) {
+					try {
+						this.focus();
+						return false;
+					} catch ( e ) {
+
+						// Support: IE<9
+						// If we error on focus to hidden element (#1486, #12518),
+						// let .trigger() run the handlers
+					}
+				}
+			},
+			delegateType: "focusin"
+		},
+		blur: {
+			trigger: function() {
+				if ( this === safeActiveElement() && this.blur ) {
+					this.blur();
+					return false;
+				}
+			},
+			delegateType: "focusout"
+		},
+		click: {
+
+			// For checkbox, fire native event so checked state will be right
+			trigger: function() {
+				if ( jQuery.nodeName( this, "input" ) && this.type === "checkbox" && this.click ) {
+					this.click();
+					return false;
+				}
+			},
+
+			// For cross-browser consistency, don't fire native .click() on links
+			_default: function( event ) {
+				return jQuery.nodeName( event.target, "a" );
+			}
+		},
+
+		beforeunload: {
+			postDispatch: function( event ) {
+
+				// Support: Firefox 20+
+				// Firefox doesn't alert if the returnValue field is not set.
+				if ( event.result !== undefined && event.originalEvent ) {
+					event.originalEvent.returnValue = event.result;
+				}
+			}
+		}
+	},
+
+	// Piggyback on a donor event to simulate a different one
+	simulate: function( type, elem, event ) {
+		var e = jQuery.extend(
+			new jQuery.Event(),
+			event,
+			{
+				type: type,
+				isSimulated: true
+
+				// Previously, `originalEvent: {}` was set here, so stopPropagation call
+				// would not be triggered on donor event, since in our own
+				// jQuery.event.stopPropagation function we had a check for existence of
+				// originalEvent.stopPropagation method, so, consequently it would be a noop.
+				//
+				// Guard for simulated events was moved to jQuery.event.stopPropagation function
+				// since `originalEvent` should point to the original event for the
+				// constancy with other events and for more focused logic
+			}
+		);
+
+		jQuery.event.trigger( e, null, elem );
+
+		if ( e.isDefaultPrevented() ) {
+			event.preventDefault();
+		}
+	}
+};
+
+jQuery.removeEvent = document.removeEventListener ?
+	function( elem, type, handle ) {
+
+		// This "if" is needed for plain objects
+		if ( elem.removeEventListener ) {
+			elem.removeEventListener( type, handle );
+		}
+	} :
+	function( elem, type, handle ) {
+		var name = "on" + type;
+
+		if ( elem.detachEvent ) {
+
+			// #8545, #7054, preventing memory leaks for custom events in IE6-8
+			// detachEvent needed property on element, by name of that event,
+			// to properly expose it to GC
+			if ( typeof elem[ name ] === "undefined" ) {
+				elem[ name ] = null;
+			}
+
+			elem.detachEvent( name, handle );
+		}
+	};
+
+jQuery.Event = function( src, props ) {
+
+	// Allow instantiation without the 'new' keyword
+	if ( !( this instanceof jQuery.Event ) ) {
+		return new jQuery.Event( src, props );
+	}
+
+	// Event object
+	if ( src && src.type ) {
+		this.originalEvent = src;
+		this.type = src.type;
+
+		// Events bubbling up the document may have been marked as prevented
+		// by a handler lower down the tree; reflect the correct value.
+		this.isDefaultPrevented = src.defaultPrevented ||
+				src.defaultPrevented === undefined &&
+
+				// Support: IE < 9, Android < 4.0
+				src.returnValue === false ?
+			returnTrue :
+			returnFalse;
+
+	// Event type
+	} else {
+		this.type = src;
+	}
+
+	// Put explicitly provided properties onto the event object
+	if ( props ) {
+		jQuery.extend( this, props );
+	}
+
+	// Create a timestamp if incoming event doesn't have one
+	this.timeStamp = src && src.timeStamp || jQuery.now();
+
+	// Mark it as fixed
+	this[ jQuery.expando ] = true;
+};
+
+// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding
+// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html
+jQuery.Event.prototype = {
+	constructor: jQuery.Event,
+	isDefaultPrevented: returnFalse,
+	isPropagationStopped: returnFalse,
+	isImmediatePropagationStopped: returnFalse,
+
+	preventDefault: function() {
+		var e = this.originalEvent;
+
+		this.isDefaultPrevented = returnTrue;
+		if ( !e ) {
+			return;
+		}
+
+		// If preventDefault exists, run it on the original event
+		if ( e.preventDefault ) {
+			e.preventDefault();
+
+		// Support: IE
+		// Otherwise set the returnValue property of the original event to false
+		} else {
+			e.returnValue = false;
+		}
+	},
+	stopPropagation: function() {
+		var e = this.originalEvent;
+
+		this.isPropagationStopped = returnTrue;
+
+		if ( !e || this.isSimulated ) {
+			return;
+		}
+
+		// If stopPropagation exists, run it on the original event
+		if ( e.stopPropagation ) {
+			e.stopPropagation();
+		}
+
+		// Support: IE
+		// Set the cancelBubble property of the original event to true
+		e.cancelBubble = true;
+	},
+	stopImmediatePropagation: function() {
+		var e = this.originalEvent;
+
+		this.isImmediatePropagationStopped = returnTrue;
+
+		if ( e && e.stopImmediatePropagation ) {
+			e.stopImmediatePropagation();
+		}
+
+		this.stopPropagation();
+	}
+};
+
+// Create mouseenter/leave events using mouseover/out and event-time checks
+// so that event delegation works in jQuery.
+// Do the same for pointerenter/pointerleave and pointerover/pointerout
+//
+// Support: Safari 7 only
+// Safari sends mouseenter too often; see:
+// https://code.google.com/p/chromium/issues/detail?id=470258
+// for the description of the bug (it existed in older Chrome versions as well).
+jQuery.each( {
+	mouseenter: "mouseover",
+	mouseleave: "mouseout",
+	pointerenter: "pointerover",
+	pointerleave: "pointerout"
+}, function( orig, fix ) {
+	jQuery.event.special[ orig ] = {
+		delegateType: fix,
+		bindType: fix,
+
+		handle: function( event ) {
+			var ret,
+				target = this,
+				related = event.relatedTarget,
+				handleObj = event.handleObj;
+
+			// For mouseenter/leave call the handler if related is outside the target.
+			// NB: No relatedTarget if the mouse left/entered the browser window
+			if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) {
+				event.type = handleObj.origType;
+				ret = handleObj.handler.apply( this, arguments );
+				event.type = fix;
+			}
+			return ret;
+		}
+	};
+} );
+
+// IE submit delegation
+if ( !support.submit ) {
+
+	jQuery.event.special.submit = {
+		setup: function() {
+
+			// Only need this for delegated form submit events
+			if ( jQuery.nodeName( this, "form" ) ) {
+				return false;
+			}
+
+			// Lazy-add a submit handler when a descendant form may potentially be submitted
+			jQuery.event.add( this, "click._submit keypress._submit", function( e ) {
+
+				// Node name check avoids a VML-related crash in IE (#9807)
+				var elem = e.target,
+					form = jQuery.nodeName( elem, "input" ) || jQuery.nodeName( elem, "button" ) ?
+
+						// Support: IE <=8
+						// We use jQuery.prop instead of elem.form
+						// to allow fixing the IE8 delegated submit issue (gh-2332)
+						// by 3rd party polyfills/workarounds.
+						jQuery.prop( elem, "form" ) :
+						undefined;
+
+				if ( form && !jQuery._data( form, "submit" ) ) {
+					jQuery.event.add( form, "submit._submit", function( event ) {
+						event._submitBubble = true;
+					} );
+					jQuery._data( form, "submit", true );
+				}
+			} );
+
+			// return undefined since we don't need an event listener
+		},
+
+		postDispatch: function( event ) {
+
+			// If form was submitted by the user, bubble the event up the tree
+			if ( event._submitBubble ) {
+				delete event._submitBubble;
+				if ( this.parentNode && !event.isTrigger ) {
+					jQuery.event.simulate( "submit", this.parentNode, event );
+				}
+			}
+		},
+
+		teardown: function() {
+
+			// Only need this for delegated form submit events
+			if ( jQuery.nodeName( this, "form" ) ) {
+				return false;
+			}
+
+			// Remove delegated handlers; cleanData eventually reaps submit handlers attached above
+			jQuery.event.remove( this, "._submit" );
+		}
+	};
+}
+
+// IE change delegation and checkbox/radio fix
+if ( !support.change ) {
+
+	jQuery.event.special.change = {
+
+		setup: function() {
+
+			if ( rformElems.test( this.nodeName ) ) {
+
+				// IE doesn't fire change on a check/radio until blur; trigger it on click
+				// after a propertychange. Eat the blur-change in special.change.handle.
+				// This still fires onchange a second time for check/radio after blur.
+				if ( this.type === "checkbox" || this.type === "radio" ) {
+					jQuery.event.add( this, "propertychange._change", function( event ) {
+						if ( event.originalEvent.propertyName === "checked" ) {
+							this._justChanged = true;
+						}
+					} );
+					jQuery.event.add( this, "click._change", function( event ) {
+						if ( this._justChanged && !event.isTrigger ) {
+							this._justChanged = false;
+						}
+
+						// Allow triggered, simulated change events (#11500)
+						jQuery.event.simulate( "change", this, event );
+					} );
+				}
+				return false;
+			}
+
+			// Delegated event; lazy-add a change handler on descendant inputs
+			jQuery.event.add( this, "beforeactivate._change", function( e ) {
+				var elem = e.target;
+
+				if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, "change" ) ) {
+					jQuery.event.add( elem, "change._change", function( event ) {
+						if ( this.parentNode && !event.isSimulated && !event.isTrigger ) {
+							jQuery.event.simulate( "change", this.parentNode, event );
+						}
+					} );
+					jQuery._data( elem, "change", true );
+				}
+			} );
+		},
+
+		handle: function( event ) {
+			var elem = event.target;
+
+			// Swallow native change events from checkbox/radio, we already triggered them above
+			if ( this !== elem || event.isSimulated || event.isTrigger ||
+				( elem.type !== "radio" && elem.type !== "checkbox" ) ) {
+
+				return event.handleObj.handler.apply( this, arguments );
+			}
+		},
+
+		teardown: function() {
+			jQuery.event.remove( this, "._change" );
+
+			return !rformElems.test( this.nodeName );
+		}
+	};
+}
+
+// Support: Firefox
+// Firefox doesn't have focus(in | out) events
+// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787
+//
+// Support: Chrome, Safari
+// focus(in | out) events fire after focus & blur events,
+// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order
+// Related ticket - https://code.google.com/p/chromium/issues/detail?id=449857
+if ( !support.focusin ) {
+	jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) {
+
+		// Attach a single capturing handler on the document while someone wants focusin/focusout
+		var handler = function( event ) {
+			jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) );
+		};
+
+		jQuery.event.special[ fix ] = {
+			setup: function() {
+				var doc = this.ownerDocument || this,
+					attaches = jQuery._data( doc, fix );
+
+				if ( !attaches ) {
+					doc.addEventListener( orig, handler, true );
+				}
+				jQuery._data( doc, fix, ( attaches || 0 ) + 1 );
+			},
+			teardown: function() {
+				var doc = this.ownerDocument || this,
+					attaches = jQuery._data( doc, fix ) - 1;
+
+				if ( !attaches ) {
+					doc.removeEventListener( orig, handler, true );
+					jQuery._removeData( doc, fix );
+				} else {
+					jQuery._data( doc, fix, attaches );
+				}
+			}
+		};
+	} );
+}
+
+jQuery.fn.extend( {
+
+	on: function( types, selector, data, fn ) {
+		return on( this, types, selector, data, fn );
+	},
+	one: function( types, selector, data, fn ) {
+		return on( this, types, selector, data, fn, 1 );
+	},
+	off: function( types, selector, fn ) {
+		var handleObj, type;
+		if ( types && types.preventDefault && types.handleObj ) {
+
+			// ( event )  dispatched jQuery.Event
+			handleObj = types.handleObj;
+			jQuery( types.delegateTarget ).off(
+				handleObj.namespace ?
+					handleObj.origType + "." + handleObj.namespace :
+					handleObj.origType,
+				handleObj.selector,
+				handleObj.handler
+			);
+			return this;
+		}
+		if ( typeof types === "object" ) {
+
+			// ( types-object [, selector] )
+			for ( type in types ) {
+				this.off( type, selector, types[ type ] );
+			}
+			return this;
+		}
+		if ( selector === false || typeof selector === "function" ) {
+
+			// ( types [, fn] )
+			fn = selector;
+			selector = undefined;
+		}
+		if ( fn === false ) {
+			fn = returnFalse;
+		}
+		return this.each( function() {
+			jQuery.event.remove( this, types, fn, selector );
+		} );
+	},
+
+	trigger: function( type, data ) {
+		return this.each( function() {
+			jQuery.event.trigger( type, data, this );
+		} );
+	},
+	triggerHandler: function( type, data ) {
+		var elem = this[ 0 ];
+		if ( elem ) {
+			return jQuery.event.trigger( type, data, elem, true );
+		}
+	}
+} );
+
+
+var rinlinejQuery = / jQuery\d+="(?:null|\d+)"/g,
+	rnoshimcache = new RegExp( "<(?:" + nodeNames + ")[\\s/>]", "i" ),
+	rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,
+
+	// Support: IE 10-11, Edge 10240+
+	// In IE/Edge using regex groups here causes severe slowdowns.
+	// See https://connect.microsoft.com/IE/feedback/details/1736512/
+	rnoInnerhtml = /<script|<style|<link/i,
+
+	// checked="checked" or checked
+	rchecked = /checked\s*(?:[^=]|=\s*.checked.)/i,
+	rscriptTypeMasked = /^true\/(.*)/,
+	rcleanScript = /^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,
+	safeFragment = createSafeFragment( document ),
+	fragmentDiv = safeFragment.appendChild( document.createElement( "div" ) );
+
+// Support: IE<8
+// Manipulating tables requires a tbody
+function manipulationTarget( elem, content ) {
+	return jQuery.nodeName( elem, "table" ) &&
+		jQuery.nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ?
+
+		elem.getElementsByTagName( "tbody" )[ 0 ] ||
+			elem.appendChild( elem.ownerDocument.createElement( "tbody" ) ) :
+		elem;
+}
+
+// Replace/restore the type attribute of script elements for safe DOM manipulation
+function disableScript( elem ) {
+	elem.type = ( jQuery.find.attr( elem, "type" ) !== null ) + "/" + elem.type;
+	return elem;
+}
+function restoreScript( elem ) {
+	var match = rscriptTypeMasked.exec( elem.type );
+	if ( match ) {
+		elem.type = match[ 1 ];
+	} else {
+		elem.removeAttribute( "type" );
+	}
+	return elem;
+}
+
+function cloneCopyEvent( src, dest ) {
+	if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) {
+		return;
+	}
+
+	var type, i, l,
+		oldData = jQuery._data( src ),
+		curData = jQuery._data( dest, oldData ),
+		events = oldData.events;
+
+	if ( events ) {
+		delete curData.handle;
+		curData.events = {};
+
+		for ( type in events ) {
+			for ( i = 0, l = events[ type ].length; i < l; i++ ) {
+				jQuery.event.add( dest, type, events[ type ][ i ] );
+			}
+		}
+	}
+
+	// make the cloned public data object a copy from the original
+	if ( curData.data ) {
+		curData.data = jQuery.extend( {}, curData.data );
+	}
+}
+
+function fixCloneNodeIssues( src, dest ) {
+	var nodeName, e, data;
+
+	// We do not need to do anything for non-Elements
+	if ( dest.nodeType !== 1 ) {
+		return;
+	}
+
+	nodeName = dest.nodeName.toLowerCase();
+
+	// IE6-8 copies events bound via attachEvent when using cloneNode.
+	if ( !support.noCloneEvent && dest[ jQuery.expando ] ) {
+		data = jQuery._data( dest );
+
+		for ( e in data.events ) {
+			jQuery.removeEvent( dest, e, data.handle );
+		}
+
+		// Event data gets referenced instead of copied if the expando gets copied too
+		dest.removeAttribute( jQuery.expando );
+	}
+
+	// IE blanks contents when cloning scripts, and tries to evaluate newly-set text
+	if ( nodeName === "script" && dest.text !== src.text ) {
+		disableScript( dest ).text = src.text;
+		restoreScript( dest );
+
+	// IE6-10 improperly clones children of object elements using classid.
+	// IE10 throws NoModificationAllowedError if parent is null, #12132.
+	} else if ( nodeName === "object" ) {
+		if ( dest.parentNode ) {
+			dest.outerHTML = src.outerHTML;
+		}
+
+		// This path appears unavoidable for IE9. When cloning an object
+		// element in IE9, the outerHTML strategy above is not sufficient.
+		// If the src has innerHTML and the destination does not,
+		// copy the src.innerHTML into the dest.innerHTML. #10324
+		if ( support.html5Clone && ( src.innerHTML && !jQuery.trim( dest.innerHTML ) ) ) {
+			dest.innerHTML = src.innerHTML;
+		}
+
+	} else if ( nodeName === "input" && rcheckableType.test( src.type ) ) {
+
+		// IE6-8 fails to persist the checked state of a cloned checkbox
+		// or radio button. Worse, IE6-7 fail to give the cloned element
+		// a checked appearance if the defaultChecked value isn't also set
+
+		dest.defaultChecked = dest.checked = src.checked;
+
+		// IE6-7 get confused and end up setting the value of a cloned
+		// checkbox/radio button to an empty string instead of "on"
+		if ( dest.value !== src.value ) {
+			dest.value = src.value;
+		}
+
+	// IE6-8 fails to return the selected option to the default selected
+	// state when cloning options
+	} else if ( nodeName === "option" ) {
+		dest.defaultSelected = dest.selected = src.defaultSelected;
+
+	// IE6-8 fails to set the defaultValue to the correct value when
+	// cloning other types of input fields
+	} else if ( nodeName === "input" || nodeName === "textarea" ) {
+		dest.defaultValue = src.defaultValue;
+	}
+}
+
+function domManip( collection, args, callback, ignored ) {
+
+	// Flatten any nested arrays
+	args = concat.apply( [], args );
+
+	var first, node, hasScripts,
+		scripts, doc, fragment,
+		i = 0,
+		l = collection.length,
+		iNoClone = l - 1,
+		value = args[ 0 ],
+		isFunction = jQuery.isFunction( value );
+
+	// We can't cloneNode fragments that contain checked, in WebKit
+	if ( isFunction ||
+			( l > 1 && typeof value === "string" &&
+				!support.checkClone && rchecked.test( value ) ) ) {
+		return collection.each( function( index ) {
+			var self = collection.eq( index );
+			if ( isFunction ) {
+				args[ 0 ] = value.call( this, index, self.html() );
+			}
+			domManip( self, args, callback, ignored );
+		} );
+	}
+
+	if ( l ) {
+		fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored );
+		first = fragment.firstChild;
+
+		if ( fragment.childNodes.length === 1 ) {
+			fragment = first;
+		}
+
+		// Require either new content or an interest in ignored elements to invoke the callback
+		if ( first || ignored ) {
+			scripts = jQuery.map( getAll( fragment, "script" ), disableScript );
+			hasScripts = scripts.length;
+
+			// Use the original fragment for the last item
+			// instead of the first because it can end up
+			// being emptied incorrectly in certain situations (#8070).
+			for ( ; i < l; i++ ) {
+				node = fragment;
+
+				if ( i !== iNoClone ) {
+					node = jQuery.clone( node, true, true );
+
+					// Keep references to cloned scripts for later restoration
+					if ( hasScripts ) {
+
+						// Support: Android<4.1, PhantomJS<2
+						// push.apply(_, arraylike) throws on ancient WebKit
+						jQuery.merge( scripts, getAll( node, "script" ) );
+					}
+				}
+
+				callback.call( collection[ i ], node, i );
+			}
+
+			if ( hasScripts ) {
+				doc = scripts[ scripts.length - 1 ].ownerDocument;
+
+				// Reenable scripts
+				jQuery.map( scripts, restoreScript );
+
+				// Evaluate executable scripts on first document insertion
+				for ( i = 0; i < hasScripts; i++ ) {
+					node = scripts[ i ];
+					if ( rscriptType.test( node.type || "" ) &&
+						!jQuery._data( node, "globalEval" ) &&
+						jQuery.contains( doc, node ) ) {
+
+						if ( node.src ) {
+
+							// Optional AJAX dependency, but won't run scripts if not present
+							if ( jQuery._evalUrl ) {
+								jQuery._evalUrl( node.src );
+							}
+						} else {
+							jQuery.globalEval(
+								( node.text || node.textContent || node.innerHTML || "" )
+									.replace( rcleanScript, "" )
+							);
+						}
+					}
+				}
+			}
+
+			// Fix #11809: Avoid leaking memory
+			fragment = first = null;
+		}
+	}
+
+	return collection;
+}
+
+function remove( elem, selector, keepData ) {
+	var node,
+		elems = selector ? jQuery.filter( selector, elem ) : elem,
+		i = 0;
+
+	for ( ; ( node = elems[ i ] ) != null; i++ ) {
+
+		if ( !keepData && node.nodeType === 1 ) {
+			jQuery.cleanData( getAll( node ) );
+		}
+
+		if ( node.parentNode ) {
+			if ( keepData && jQuery.contains( node.ownerDocument, node ) ) {
+				setGlobalEval( getAll( node, "script" ) );
+			}
+			node.parentNode.removeChild( node );
+		}
+	}
+
+	return elem;
+}
+
+jQuery.extend( {
+	htmlPrefilter: function( html ) {
+		return html.replace( rxhtmlTag, "<$1></$2>" );
+	},
+
+	clone: function( elem, dataAndEvents, deepDataAndEvents ) {
+		var destElements, node, clone, i, srcElements,
+			inPage = jQuery.contains( elem.ownerDocument, elem );
+
+		if ( support.html5Clone || jQuery.isXMLDoc( elem ) ||
+			!rnoshimcache.test( "<" + elem.nodeName + ">" ) ) {
+
+			clone = elem.cloneNode( true );
+
+		// IE<=8 does not properly clone detached, unknown element nodes
+		} else {
+			fragmentDiv.innerHTML = elem.outerHTML;
+			fragmentDiv.removeChild( clone = fragmentDiv.firstChild );
+		}
+
+		if ( ( !support.noCloneEvent || !support.noCloneChecked ) &&
+				( elem.nodeType === 1 || elem.nodeType === 11 ) && !jQuery.isXMLDoc( elem ) ) {
+
+			// We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2
+			destElements = getAll( clone );
+			srcElements = getAll( elem );
+
+			// Fix all IE cloning issues
+			for ( i = 0; ( node = srcElements[ i ] ) != null; ++i ) {
+
+				// Ensure that the destination node is not null; Fixes #9587
+				if ( destElements[ i ] ) {
+					fixCloneNodeIssues( node, destElements[ i ] );
+				}
+			}
+		}
+
+		// Copy the events from the original to the clone
+		if ( dataAndEvents ) {
+			if ( deepDataAndEvents ) {
+				srcElements = srcElements || getAll( elem );
+				destElements = destElements || getAll( clone );
+
+				for ( i = 0; ( node = srcElements[ i ] ) != null; i++ ) {
+					cloneCopyEvent( node, destElements[ i ] );
+				}
+			} else {
+				cloneCopyEvent( elem, clone );
+			}
+		}
+
+		// Preserve script evaluation history
+		destElements = getAll( clone, "script" );
+		if ( destElements.length > 0 ) {
+			setGlobalEval( destElements, !inPage && getAll( elem, "script" ) );
+		}
+
+		destElements = srcElements = node = null;
+
+		// Return the cloned set
+		return clone;
+	},
+
+	cleanData: function( elems, /* internal */ forceAcceptData ) {
+		var elem, type, id, data,
+			i = 0,
+			internalKey = jQuery.expando,
+			cache = jQuery.cache,
+			attributes = support.attributes,
+			special = jQuery.event.special;
+
+		for ( ; ( elem = elems[ i ] ) != null; i++ ) {
+			if ( forceAcceptData || acceptData( elem ) ) {
+
+				id = elem[ internalKey ];
+				data = id && cache[ id ];
+
+				if ( data ) {
+					if ( data.events ) {
+						for ( type in data.events ) {
+							if ( special[ type ] ) {
+								jQuery.event.remove( elem, type );
+
+							// This is a shortcut to avoid jQuery.event.remove's overhead
+							} else {
+								jQuery.removeEvent( elem, type, data.handle );
+							}
+						}
+					}
+
+					// Remove cache only if it was not already removed by jQuery.event.remove
+					if ( cache[ id ] ) {
+
+						delete cache[ id ];
+
+						// Support: IE<9
+						// IE does not allow us to delete expando properties from nodes
+						// IE creates expando attributes along with the property
+						// IE does not have a removeAttribute function on Document nodes
+						if ( !attributes && typeof elem.removeAttribute !== "undefined" ) {
+							elem.removeAttribute( internalKey );
+
+						// Webkit & Blink performance suffers when deleting properties
+						// from DOM nodes, so set to undefined instead
+						// https://code.google.com/p/chromium/issues/detail?id=378607
+						} else {
+							elem[ internalKey ] = undefined;
+						}
+
+						deletedIds.push( id );
+					}
+				}
+			}
+		}
+	}
+} );
+
+jQuery.fn.extend( {
+
+	// Keep domManip exposed until 3.0 (gh-2225)
+	domManip: domManip,
+
+	detach: function( selector ) {
+		return remove( this, selector, true );
+	},
+
+	remove: function( selector ) {
+		return remove( this, selector );
+	},
+
+	text: function( value ) {
+		return access( this, function( value ) {
+			return value === undefined ?
+				jQuery.text( this ) :
+				this.empty().append(
+					( this[ 0 ] && this[ 0 ].ownerDocument || document ).createTextNode( value )
+				);
+		}, null, value, arguments.length );
+	},
+
+	append: function() {
+		return domManip( this, arguments, function( elem ) {
+			if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
+				var target = manipulationTarget( this, elem );
+				target.appendChild( elem );
+			}
+		} );
+	},
+
+	prepend: function() {
+		return domManip( this, arguments, function( elem ) {
+			if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
+				var target = manipulationTarget( this, elem );
+				target.insertBefore( elem, target.firstChild );
+			}
+		} );
+	},
+
+	before: function() {
+		return domManip( this, arguments, function( elem ) {
+			if ( this.parentNode ) {
+				this.parentNode.insertBefore( elem, this );
+			}
+		} );
+	},
+
+	after: function() {
+		return domManip( this, arguments, function( elem ) {
+			if ( this.parentNode ) {
+				this.parentNode.insertBefore( elem, this.nextSibling );
+			}
+		} );
+	},
+
+	empty: function() {
+		var elem,
+			i = 0;
+
+		for ( ; ( elem = this[ i ] ) != null; i++ ) {
+
+			// Remove element nodes and prevent memory leaks
+			if ( elem.nodeType === 1 ) {
+				jQuery.cleanData( getAll( elem, false ) );
+			}
+
+			// Remove any remaining nodes
+			while ( elem.firstChild ) {
+				elem.removeChild( elem.firstChild );
+			}
+
+			// If this is a select, ensure that it displays empty (#12336)
+			// Support: IE<9
+			if ( elem.options && jQuery.nodeName( elem, "select" ) ) {
+				elem.options.length = 0;
+			}
+		}
+
+		return this;
+	},
+
+	clone: function( dataAndEvents, deepDataAndEvents ) {
+		dataAndEvents = dataAndEvents == null ? false : dataAndEvents;
+		deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents;
+
+		return this.map( function() {
+			return jQuery.clone( this, dataAndEvents, deepDataAndEvents );
+		} );
+	},
+
+	html: function( value ) {
+		return access( this, function( value ) {
+			var elem = this[ 0 ] || {},
+				i = 0,
+				l = this.length;
+
+			if ( value === undefined ) {
+				return elem.nodeType === 1 ?
+					elem.innerHTML.replace( rinlinejQuery, "" ) :
+					undefined;
+			}
+
+			// See if we can take a shortcut and just use innerHTML
+			if ( typeof value === "string" && !rnoInnerhtml.test( value ) &&
+				( support.htmlSerialize || !rnoshimcache.test( value )  ) &&
+				( support.leadingWhitespace || !rleadingWhitespace.test( value ) ) &&
+				!wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) {
+
+				value = jQuery.htmlPrefilter( value );
+
+				try {
+					for ( ; i < l; i++ ) {
+
+						// Remove element nodes and prevent memory leaks
+						elem = this[ i ] || {};
+						if ( elem.nodeType === 1 ) {
+							jQuery.cleanData( getAll( elem, false ) );
+							elem.innerHTML = value;
+						}
+					}
+
+					elem = 0;
+
+				// If using innerHTML throws an exception, use the fallback method
+				} catch ( e ) {}
+			}
+
+			if ( elem ) {
+				this.empty().append( value );
+			}
+		}, null, value, arguments.length );
+	},
+
+	replaceWith: function() {
+		var ignored = [];
+
+		// Make the changes, replacing each non-ignored context element with the new content
+		return domManip( this, arguments, function( elem ) {
+			var parent = this.parentNode;
+
+			if ( jQuery.inArray( this, ignored ) < 0 ) {
+				jQuery.cleanData( getAll( this ) );
+				if ( parent ) {
+					parent.replaceChild( elem, this );
+				}
+			}
+
+		// Force callback invocation
+		}, ignored );
+	}
+} );
+
+jQuery.each( {
+	appendTo: "append",
+	prependTo: "prepend",
+	insertBefore: "before",
+	insertAfter: "after",
+	replaceAll: "replaceWith"
+}, function( name, original ) {
+	jQuery.fn[ name ] = function( selector ) {
+		var elems,
+			i = 0,
+			ret = [],
+			insert = jQuery( selector ),
+			last = insert.length - 1;
+
+		for ( ; i <= last; i++ ) {
+			elems = i === last ? this : this.clone( true );
+			jQuery( insert[ i ] )[ original ]( elems );
+
+			// Modern browsers can apply jQuery collections as arrays, but oldIE needs a .get()
+			push.apply( ret, elems.get() );
+		}
+
+		return this.pushStack( ret );
+	};
+} );
+
+
+var iframe,
+	elemdisplay = {
+
+		// Support: Firefox
+		// We have to pre-define these values for FF (#10227)
+		HTML: "block",
+		BODY: "block"
+	};
+
+/**
+ * Retrieve the actual display of a element
+ * @param {String} name nodeName of the element
+ * @param {Object} doc Document object
+ */
+
+// Called only from within defaultDisplay
+function actualDisplay( name, doc ) {
+	var elem = jQuery( doc.createElement( name ) ).appendTo( doc.body ),
+
+		display = jQuery.css( elem[ 0 ], "display" );
+
+	// We don't have any data stored on the element,
+	// so use "detach" method as fast way to get rid of the element
+	elem.detach();
+
+	return display;
+}
+
+/**
+ * Try to determine the default display value of an element
+ * @param {String} nodeName
+ */
+function defaultDisplay( nodeName ) {
+	var doc = document,
+		display = elemdisplay[ nodeName ];
+
+	if ( !display ) {
+		display = actualDisplay( nodeName, doc );
+
+		// If the simple way fails, read from inside an iframe
+		if ( display === "none" || !display ) {
+
+			// Use the already-created iframe if possible
+			iframe = ( iframe || jQuery( "<iframe frameborder='0' width='0' height='0'/>" ) )
+				.appendTo( doc.documentElement );
+
+			// Always write a new HTML skeleton so Webkit and Firefox don't choke on reuse
+			doc = ( iframe[ 0 ].contentWindow || iframe[ 0 ].contentDocument ).document;
+
+			// Support: IE
+			doc.write();
+			doc.close();
+
+			display = actualDisplay( nodeName, doc );
+			iframe.detach();
+		}
+
+		// Store the correct default display
+		elemdisplay[ nodeName ] = display;
+	}
+
+	return display;
+}
+var rmargin = ( /^margin/ );
+
+var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" );
+
+var swap = function( elem, options, callback, args ) {
+	var ret, name,
+		old = {};
+
+	// Remember the old values, and insert the new ones
+	for ( name in options ) {
+		old[ name ] = elem.style[ name ];
+		elem.style[ name ] = options[ name ];
+	}
+
+	ret = callback.apply( elem, args || [] );
+
+	// Revert the old values
+	for ( name in options ) {
+		elem.style[ name ] = old[ name ];
+	}
+
+	return ret;
+};
+
+
+var documentElement = document.documentElement;
+
+
+
+( function() {
+	var pixelPositionVal, pixelMarginRightVal, boxSizingReliableVal,
+		reliableHiddenOffsetsVal, reliableMarginRightVal, reliableMarginLeftVal,
+		container = document.createElement( "div" ),
+		div = document.createElement( "div" );
+
+	// Finish early in limited (non-browser) environments
+	if ( !div.style ) {
+		return;
+	}
+
+	div.style.cssText = "float:left;opacity:.5";
+
+	// Support: IE<9
+	// Make sure that element opacity exists (as opposed to filter)
+	support.opacity = div.style.opacity === "0.5";
+
+	// Verify style float existence
+	// (IE uses styleFloat instead of cssFloat)
+	support.cssFloat = !!div.style.cssFloat;
+
+	div.style.backgroundClip = "content-box";
+	div.cloneNode( true ).style.backgroundClip = "";
+	support.clearCloneStyle = div.style.backgroundClip === "content-box";
+
+	container = document.createElement( "div" );
+	container.style.cssText = "border:0;width:8px;height:0;top:0;left:-9999px;" +
+		"padding:0;margin-top:1px;position:absolute";
+	div.innerHTML = "";
+	container.appendChild( div );
+
+	// Support: Firefox<29, Android 2.3
+	// Vendor-prefix box-sizing
+	support.boxSizing = div.style.boxSizing === "" || div.style.MozBoxSizing === "" ||
+		div.style.WebkitBoxSizing === "";
+
+	jQuery.extend( support, {
+		reliableHiddenOffsets: function() {
+			if ( pixelPositionVal == null ) {
+				computeStyleTests();
+			}
+			return reliableHiddenOffsetsVal;
+		},
+
+		boxSizingReliable: function() {
+
+			// We're checking for pixelPositionVal here instead of boxSizingReliableVal
+			// since that compresses better and they're computed together anyway.
+			if ( pixelPositionVal == null ) {
+				computeStyleTests();
+			}
+			return boxSizingReliableVal;
+		},
+
+		pixelMarginRight: function() {
+
+			// Support: Android 4.0-4.3
+			if ( pixelPositionVal == null ) {
+				computeStyleTests();
+			}
+			return pixelMarginRightVal;
+		},
+
+		pixelPosition: function() {
+			if ( pixelPositionVal == null ) {
+				computeStyleTests();
+			}
+			return pixelPositionVal;
+		},
+
+		reliableMarginRight: function() {
+
+			// Support: Android 2.3
+			if ( pixelPositionVal == null ) {
+				computeStyleTests();
+			}
+			return reliableMarginRightVal;
+		},
+
+		reliableMarginLeft: function() {
+
+			// Support: IE <=8 only, Android 4.0 - 4.3 only, Firefox <=3 - 37
+			if ( pixelPositionVal == null ) {
+				computeStyleTests();
+			}
+			return reliableMarginLeftVal;
+		}
+	} );
+
+	function computeStyleTests() {
+		var contents, divStyle,
+			documentElement = document.documentElement;
+
+		// Setup
+		documentElement.appendChild( container );
+
+		div.style.cssText =
+
+			// Support: Android 2.3
+			// Vendor-prefix box-sizing
+			"-webkit-box-sizing:border-box;box-sizing:border-box;" +
+			"position:relative;display:block;" +
+			"margin:auto;border:1px;padding:1px;" +
+			"top:1%;width:50%";
+
+		// Support: IE<9
+		// Assume reasonable values in the absence of getComputedStyle
+		pixelPositionVal = boxSizingReliableVal = reliableMarginLeftVal = false;
+		pixelMarginRightVal = reliableMarginRightVal = true;
+
+		// Check for getComputedStyle so that this code is not run in IE<9.
+		if ( window.getComputedStyle ) {
+			divStyle = window.getComputedStyle( div );
+			pixelPositionVal = ( divStyle || {} ).top !== "1%";
+			reliableMarginLeftVal = ( divStyle || {} ).marginLeft === "2px";
+			boxSizingReliableVal = ( divStyle || { width: "4px" } ).width === "4px";
+
+			// Support: Android 4.0 - 4.3 only
+			// Some styles come back with percentage values, even though they shouldn't
+			div.style.marginRight = "50%";
+			pixelMarginRightVal = ( divStyle || { marginRight: "4px" } ).marginRight === "4px";
+
+			// Support: Android 2.3 only
+			// Div with explicit width and no margin-right incorrectly
+			// gets computed margin-right based on width of container (#3333)
+			// WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right
+			contents = div.appendChild( document.createElement( "div" ) );
+
+			// Reset CSS: box-sizing; display; margin; border; padding
+			contents.style.cssText = div.style.cssText =
+
+				// Support: Android 2.3
+				// Vendor-prefix box-sizing
+				"-webkit-box-sizing:content-box;-moz-box-sizing:content-box;" +
+				"box-sizing:content-box;display:block;margin:0;border:0;padding:0";
+			contents.style.marginRight = contents.style.width = "0";
+			div.style.width = "1px";
+
+			reliableMarginRightVal =
+				!parseFloat( ( window.getComputedStyle( contents ) || {} ).marginRight );
+
+			div.removeChild( contents );
+		}
+
+		// Support: IE6-8
+		// First check that getClientRects works as expected
+		// Check if table cells still have offsetWidth/Height when they are set
+		// to display:none and there are still other visible table cells in a
+		// table row; if so, offsetWidth/Height are not reliable for use when
+		// determining if an element has been hidden directly using
+		// display:none (it is still safe to use offsets if a parent element is
+		// hidden; don safety goggles and see bug #4512 for more information).
+		div.style.display = "none";
+		reliableHiddenOffsetsVal = div.getClientRects().length === 0;
+		if ( reliableHiddenOffsetsVal ) {
+			div.style.display = "";
+			div.innerHTML = "<table><tr><td></td><td>t</td></tr></table>";
+			div.childNodes[ 0 ].style.borderCollapse = "separate";
+			contents = div.getElementsByTagName( "td" );
+			contents[ 0 ].style.cssText = "margin:0;border:0;padding:0;display:none";
+			reliableHiddenOffsetsVal = contents[ 0 ].offsetHeight === 0;
+			if ( reliableHiddenOffsetsVal ) {
+				contents[ 0 ].style.display = "";
+				contents[ 1 ].style.display = "none";
+				reliableHiddenOffsetsVal = contents[ 0 ].offsetHeight === 0;
+			}
+		}
+
+		// Teardown
+		documentElement.removeChild( container );
+	}
+
+} )();
+
+
+var getStyles, curCSS,
+	rposition = /^(top|right|bottom|left)$/;
+
+if ( window.getComputedStyle ) {
+	getStyles = function( elem ) {
+
+		// Support: IE<=11+, Firefox<=30+ (#15098, #14150)
+		// IE throws on elements created in popups
+		// FF meanwhile throws on frame elements through "defaultView.getComputedStyle"
+		var view = elem.ownerDocument.defaultView;
+
+		if ( !view || !view.opener ) {
+			view = window;
+		}
+
+		return view.getComputedStyle( elem );
+	};
+
+	curCSS = function( elem, name, computed ) {
+		var width, minWidth, maxWidth, ret,
+			style = elem.style;
+
+		computed = computed || getStyles( elem );
+
+		// getPropertyValue is only needed for .css('filter') in IE9, see #12537
+		ret = computed ? computed.getPropertyValue( name ) || computed[ name ] : undefined;
+
+		// Support: Opera 12.1x only
+		// Fall back to style even without computed
+		// computed is undefined for elems on document fragments
+		if ( ( ret === "" || ret === undefined ) && !jQuery.contains( elem.ownerDocument, elem ) ) {
+			ret = jQuery.style( elem, name );
+		}
+
+		if ( computed ) {
+
+			// A tribute to the "awesome hack by Dean Edwards"
+			// Chrome < 17 and Safari 5.0 uses "computed value"
+			// instead of "used value" for margin-right
+			// Safari 5.1.7 (at least) returns percentage for a larger set of values,
+			// but width seems to be reliably pixels
+			// this is against the CSSOM draft spec:
+			// http://dev.w3.org/csswg/cssom/#resolved-values
+			if ( !support.pixelMarginRight() && rnumnonpx.test( ret ) && rmargin.test( name ) ) {
+
+				// Remember the original values
+				width = style.width;
+				minWidth = style.minWidth;
+				maxWidth = style.maxWidth;
+
+				// Put in the new values to get a computed value out
+				style.minWidth = style.maxWidth = style.width = ret;
+				ret = computed.width;
+
+				// Revert the changed values
+				style.width = width;
+				style.minWidth = minWidth;
+				style.maxWidth = maxWidth;
+			}
+		}
+
+		// Support: IE
+		// IE returns zIndex value as an integer.
+		return ret === undefined ?
+			ret :
+			ret + "";
+	};
+} else if ( documentElement.currentStyle ) {
+	getStyles = function( elem ) {
+		return elem.currentStyle;
+	};
+
+	curCSS = function( elem, name, computed ) {
+		var left, rs, rsLeft, ret,
+			style = elem.style;
+
+		computed = computed || getStyles( elem );
+		ret = computed ? computed[ name ] : undefined;
+
+		// Avoid setting ret to empty string here
+		// so we don't default to auto
+		if ( ret == null && style && style[ name ] ) {
+			ret = style[ name ];
+		}
+
+		// From the awesome hack by Dean Edwards
+		// http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291
+
+		// If we're not dealing with a regular pixel number
+		// but a number that has a weird ending, we need to convert it to pixels
+		// but not position css attributes, as those are
+		// proportional to the parent element instead
+		// and we can't measure the parent instead because it
+		// might trigger a "stacking dolls" problem
+		if ( rnumnonpx.test( ret ) && !rposition.test( name ) ) {
+
+			// Remember the original values
+			left = style.left;
+			rs = elem.runtimeStyle;
+			rsLeft = rs && rs.left;
+
+			// Put in the new values to get a computed value out
+			if ( rsLeft ) {
+				rs.left = elem.currentStyle.left;
+			}
+			style.left = name === "fontSize" ? "1em" : ret;
+			ret = style.pixelLeft + "px";
+
+			// Revert the changed values
+			style.left = left;
+			if ( rsLeft ) {
+				rs.left = rsLeft;
+			}
+		}
+
+		// Support: IE
+		// IE returns zIndex value as an integer.
+		return ret === undefined ?
+			ret :
+			ret + "" || "auto";
+	};
+}
+
+
+
+
+function addGetHookIf( conditionFn, hookFn ) {
+
+	// Define the hook, we'll check on the first run if it's really needed.
+	return {
+		get: function() {
+			if ( conditionFn() ) {
+
+				// Hook not needed (or it's not possible to use it due
+				// to missing dependency), remove it.
+				delete this.get;
+				return;
+			}
+
+			// Hook needed; redefine it so that the support test is not executed again.
+			return ( this.get = hookFn ).apply( this, arguments );
+		}
+	};
+}
+
+
+var
+
+		ralpha = /alpha\([^)]*\)/i,
+	ropacity = /opacity\s*=\s*([^)]*)/i,
+
+	// swappable if display is none or starts with table except
+	// "table", "table-cell", or "table-caption"
+	// see here for display values:
+	// https://developer.mozilla.org/en-US/docs/CSS/display
+	rdisplayswap = /^(none|table(?!-c[ea]).+)/,
+	rnumsplit = new RegExp( "^(" + pnum + ")(.*)$", "i" ),
+
+	cssShow = { position: "absolute", visibility: "hidden", display: "block" },
+	cssNormalTransform = {
+		letterSpacing: "0",
+		fontWeight: "400"
+	},
+
+	cssPrefixes = [ "Webkit", "O", "Moz", "ms" ],
+	emptyStyle = document.createElement( "div" ).style;
+
+
+// return a css property mapped to a potentially vendor prefixed property
+function vendorPropName( name ) {
+
+	// shortcut for names that are not vendor prefixed
+	if ( name in emptyStyle ) {
+		return name;
+	}
+
+	// check for vendor prefixed names
+	var capName = name.charAt( 0 ).toUpperCase() + name.slice( 1 ),
+		i = cssPrefixes.length;
+
+	while ( i-- ) {
+		name = cssPrefixes[ i ] + capName;
+		if ( name in emptyStyle ) {
+			return name;
+		}
+	}
+}
+
+function showHide( elements, show ) {
+	var display, elem, hidden,
+		values = [],
+		index = 0,
+		length = elements.length;
+
+	for ( ; index < length; index++ ) {
+		elem = elements[ index ];
+		if ( !elem.style ) {
+			continue;
+		}
+
+		values[ index ] = jQuery._data( elem, "olddisplay" );
+		display = elem.style.display;
+		if ( show ) {
+
+			// Reset the inline display of this element to learn if it is
+			// being hidden by cascaded rules or not
+			if ( !values[ index ] && display === "none" ) {
+				elem.style.display = "";
+			}
+
+			// Set elements which have been overridden with display: none
+			// in a stylesheet to whatever the default browser style is
+			// for such an element
+			if ( elem.style.display === "" && isHidden( elem ) ) {
+				values[ index ] =
+					jQuery._data( elem, "olddisplay", defaultDisplay( elem.nodeName ) );
+			}
+		} else {
+			hidden = isHidden( elem );
+
+			if ( display && display !== "none" || !hidden ) {
+				jQuery._data(
+					elem,
+					"olddisplay",
+					hidden ? display : jQuery.css( elem, "display" )
+				);
+			}
+		}
+	}
+
+	// Set the display of most of the elements in a second loop
+	// to avoid the constant reflow
+	for ( index = 0; index < length; index++ ) {
+		elem = elements[ index ];
+		if ( !elem.style ) {
+			continue;
+		}
+		if ( !show || elem.style.display === "none" || elem.style.display === "" ) {
+			elem.style.display = show ? values[ index ] || "" : "none";
+		}
+	}
+
+	return elements;
+}
+
+function setPositiveNumber( elem, value, subtract ) {
+	var matches = rnumsplit.exec( value );
+	return matches ?
+
+		// Guard against undefined "subtract", e.g., when used as in cssHooks
+		Math.max( 0, matches[ 1 ] - ( subtract || 0 ) ) + ( matches[ 2 ] || "px" ) :
+		value;
+}
+
+function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) {
+	var i = extra === ( isBorderBox ? "border" : "content" ) ?
+
+		// If we already have the right measurement, avoid augmentation
+		4 :
+
+		// Otherwise initialize for horizontal or vertical properties
+		name === "width" ? 1 : 0,
+
+		val = 0;
+
+	for ( ; i < 4; i += 2 ) {
+
+		// both box models exclude margin, so add it if we want it
+		if ( extra === "margin" ) {
+			val += jQuery.css( elem, extra + cssExpand[ i ], true, styles );
+		}
+
+		if ( isBorderBox ) {
+
+			// border-box includes padding, so remove it if we want content
+			if ( extra === "content" ) {
+				val -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles );
+			}
+
+			// at this point, extra isn't border nor margin, so remove border
+			if ( extra !== "margin" ) {
+				val -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
+			}
+		} else {
+
+			// at this point, extra isn't content, so add padding
+			val += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles );
+
+			// at this point, extra isn't content nor padding, so add border
+			if ( extra !== "padding" ) {
+				val += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
+			}
+		}
+	}
+
+	return val;
+}
+
+function getWidthOrHeight( elem, name, extra ) {
+
+	// Start with offset property, which is equivalent to the border-box value
+	var valueIsBorderBox = true,
+		val = name === "width" ? elem.offsetWidth : elem.offsetHeight,
+		styles = getStyles( elem ),
+		isBorderBox = support.boxSizing &&
+			jQuery.css( elem, "boxSizing", false, styles ) === "border-box";
+
+	// some non-html elements return undefined for offsetWidth, so check for null/undefined
+	// svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285
+	// MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668
+	if ( val <= 0 || val == null ) {
+
+		// Fall back to computed then uncomputed css if necessary
+		val = curCSS( elem, name, styles );
+		if ( val < 0 || val == null ) {
+			val = elem.style[ name ];
+		}
+
+		// Computed unit is not pixels. Stop here and return.
+		if ( rnumnonpx.test( val ) ) {
+			return val;
+		}
+
+		// we need the check for style in case a browser which returns unreliable values
+		// for getComputedStyle silently falls back to the reliable elem.style
+		valueIsBorderBox = isBorderBox &&
+			( support.boxSizingReliable() || val === elem.style[ name ] );
+
+		// Normalize "", auto, and prepare for extra
+		val = parseFloat( val ) || 0;
+	}
+
+	// use the active box-sizing model to add/subtract irrelevant styles
+	return ( val +
+		augmentWidthOrHeight(
+			elem,
+			name,
+			extra || ( isBorderBox ? "border" : "content" ),
+			valueIsBorderBox,
+			styles
+		)
+	) + "px";
+}
+
+jQuery.extend( {
+
+	// Add in style property hooks for overriding the default
+	// behavior of getting and setting a style property
+	cssHooks: {
+		opacity: {
+			get: function( elem, computed ) {
+				if ( computed ) {
+
+					// We should always get a number back from opacity
+					var ret = curCSS( elem, "opacity" );
+					return ret === "" ? "1" : ret;
+				}
+			}
+		}
+	},
+
+	// Don't automatically add "px" to these possibly-unitless properties
+	cssNumber: {
+		"animationIterationCount": true,
+		"columnCount": true,
+		"fillOpacity": true,
+		"flexGrow": true,
+		"flexShrink": true,
+		"fontWeight": true,
+		"lineHeight": true,
+		"opacity": true,
+		"order": true,
+		"orphans": true,
+		"widows": true,
+		"zIndex": true,
+		"zoom": true
+	},
+
+	// Add in properties whose names you wish to fix before
+	// setting or getting the value
+	cssProps: {
+
+		// normalize float css property
+		"float": support.cssFloat ? "cssFloat" : "styleFloat"
+	},
+
+	// Get and set the style property on a DOM Node
+	style: function( elem, name, value, extra ) {
+
+		// Don't set styles on text and comment nodes
+		if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) {
+			return;
+		}
+
+		// Make sure that we're working with the right name
+		var ret, type, hooks,
+			origName = jQuery.camelCase( name ),
+			style = elem.style;
+
+		name = jQuery.cssProps[ origName ] ||
+			( jQuery.cssProps[ origName ] = vendorPropName( origName ) || origName );
+
+		// gets hook for the prefixed version
+		// followed by the unprefixed version
+		hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];
+
+		// Check if we're setting a value
+		if ( value !== undefined ) {
+			type = typeof value;
+
+			// Convert "+=" or "-=" to relative numbers (#7345)
+			if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) {
+				value = adjustCSS( elem, name, ret );
+
+				// Fixes bug #9237
+				type = "number";
+			}
+
+			// Make sure that null and NaN values aren't set. See: #7116
+			if ( value == null || value !== value ) {
+				return;
+			}
+
+			// If a number was passed in, add the unit (except for certain CSS properties)
+			if ( type === "number" ) {
+				value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" );
+			}
+
+			// Fixes #8908, it can be done more correctly by specifing setters in cssHooks,
+			// but it would mean to define eight
+			// (for every problematic property) identical functions
+			if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) {
+				style[ name ] = "inherit";
+			}
+
+			// If a hook was provided, use that value, otherwise just set the specified value
+			if ( !hooks || !( "set" in hooks ) ||
+				( value = hooks.set( elem, value, extra ) ) !== undefined ) {
+
+				// Support: IE
+				// Swallow errors from 'invalid' CSS values (#5509)
+				try {
+					style[ name ] = value;
+				} catch ( e ) {}
+			}
+
+		} else {
+
+			// If a hook was provided get the non-computed value from there
+			if ( hooks && "get" in hooks &&
+				( ret = hooks.get( elem, false, extra ) ) !== undefined ) {
+
+				return ret;
+			}
+
+			// Otherwise just get the value from the style object
+			return style[ name ];
+		}
+	},
+
+	css: function( elem, name, extra, styles ) {
+		var num, val, hooks,
+			origName = jQuery.camelCase( name );
+
+		// Make sure that we're working with the right name
+		name = jQuery.cssProps[ origName ] ||
+			( jQuery.cssProps[ origName ] = vendorPropName( origName ) || origName );
+
+		// gets hook for the prefixed version
+		// followed by the unprefixed version
+		hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];
+
+		// If a hook was provided get the computed value from there
+		if ( hooks && "get" in hooks ) {
+			val = hooks.get( elem, true, extra );
+		}
+
+		// Otherwise, if a way to get the computed value exists, use that
+		if ( val === undefined ) {
+			val = curCSS( elem, name, styles );
+		}
+
+		//convert "normal" to computed value
+		if ( val === "normal" && name in cssNormalTransform ) {
+			val = cssNormalTransform[ name ];
+		}
+
+		// Return, converting to number if forced or a qualifier was provided and val looks numeric
+		if ( extra === "" || extra ) {
+			num = parseFloat( val );
+			return extra === true || isFinite( num ) ? num || 0 : val;
+		}
+		return val;
+	}
+} );
+
+jQuery.each( [ "height", "width" ], function( i, name ) {
+	jQuery.cssHooks[ name ] = {
+		get: function( elem, computed, extra ) {
+			if ( computed ) {
+
+				// certain elements can have dimension info if we invisibly show them
+				// however, it must have a current display style that would benefit from this
+				return rdisplayswap.test( jQuery.css( elem, "display" ) ) &&
+					elem.offsetWidth === 0 ?
+						swap( elem, cssShow, function() {
+							return getWidthOrHeight( elem, name, extra );
+						} ) :
+						getWidthOrHeight( elem, name, extra );
+			}
+		},
+
+		set: function( elem, value, extra ) {
+			var styles = extra && getStyles( elem );
+			return setPositiveNumber( elem, value, extra ?
+				augmentWidthOrHeight(
+					elem,
+					name,
+					extra,
+					support.boxSizing &&
+						jQuery.css( elem, "boxSizing", false, styles ) === "border-box",
+					styles
+				) : 0
+			);
+		}
+	};
+} );
+
+if ( !support.opacity ) {
+	jQuery.cssHooks.opacity = {
+		get: function( elem, computed ) {
+
+			// IE uses filters for opacity
+			return ropacity.test( ( computed && elem.currentStyle ?
+				elem.currentStyle.filter :
+				elem.style.filter ) || "" ) ?
+					( 0.01 * parseFloat( RegExp.$1 ) ) + "" :
+					computed ? "1" : "";
+		},
+
+		set: function( elem, value ) {
+			var style = elem.style,
+				currentStyle = elem.currentStyle,
+				opacity = jQuery.isNumeric( value ) ? "alpha(opacity=" + value * 100 + ")" : "",
+				filter = currentStyle && currentStyle.filter || style.filter || "";
+
+			// IE has trouble with opacity if it does not have layout
+			// Force it by setting the zoom level
+			style.zoom = 1;
+
+			// if setting opacity to 1, and no other filters exist -
+			// attempt to remove filter attribute #6652
+			// if value === "", then remove inline opacity #12685
+			if ( ( value >= 1 || value === "" ) &&
+					jQuery.trim( filter.replace( ralpha, "" ) ) === "" &&
+					style.removeAttribute ) {
+
+				// Setting style.filter to null, "" & " " still leave "filter:" in the cssText
+				// if "filter:" is present at all, clearType is disabled, we want to avoid this
+				// style.removeAttribute is IE Only, but so apparently is this code path...
+				style.removeAttribute( "filter" );
+
+				// if there is no filter style applied in a css rule
+				// or unset inline opacity, we are done
+				if ( value === "" || currentStyle && !currentStyle.filter ) {
+					return;
+				}
+			}
+
+			// otherwise, set new filter values
+			style.filter = ralpha.test( filter ) ?
+				filter.replace( ralpha, opacity ) :
+				filter + " " + opacity;
+		}
+	};
+}
+
+jQuery.cssHooks.marginRight = addGetHookIf( support.reliableMarginRight,
+	function( elem, computed ) {
+		if ( computed ) {
+			return swap( elem, { "display": "inline-block" },
+				curCSS, [ elem, "marginRight" ] );
+		}
+	}
+);
+
+jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft,
+	function( elem, computed ) {
+		if ( computed ) {
+			return (
+				parseFloat( curCSS( elem, "marginLeft" ) ) ||
+
+				// Support: IE<=11+
+				// Running getBoundingClientRect on a disconnected node in IE throws an error
+				// Support: IE8 only
+				// getClientRects() errors on disconnected elems
+				( jQuery.contains( elem.ownerDocument, elem ) ?
+					elem.getBoundingClientRect().left -
+						swap( elem, { marginLeft: 0 }, function() {
+							return elem.getBoundingClientRect().left;
+						} ) :
+					0
+				)
+			) + "px";
+		}
+	}
+);
+
+// These hooks are used by animate to expand properties
+jQuery.each( {
+	margin: "",
+	padding: "",
+	border: "Width"
+}, function( prefix, suffix ) {
+	jQuery.cssHooks[ prefix + suffix ] = {
+		expand: function( value ) {
+			var i = 0,
+				expanded = {},
+
+				// assumes a single number if not a string
+				parts = typeof value === "string" ? value.split( " " ) : [ value ];
+
+			for ( ; i < 4; i++ ) {
+				expanded[ prefix + cssExpand[ i ] + suffix ] =
+					parts[ i ] || parts[ i - 2 ] || parts[ 0 ];
+			}
+
+			return expanded;
+		}
+	};
+
+	if ( !rmargin.test( prefix ) ) {
+		jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber;
+	}
+} );
+
+jQuery.fn.extend( {
+	css: function( name, value ) {
+		return access( this, function( elem, name, value ) {
+			var styles, len,
+				map = {},
+				i = 0;
+
+			if ( jQuery.isArray( name ) ) {
+				styles = getStyles( elem );
+				len = name.length;
+
+				for ( ; i < len; i++ ) {
+					map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles );
+				}
+
+				return map;
+			}
+
+			return value !== undefined ?
+				jQuery.style( elem, name, value ) :
+				jQuery.css( elem, name );
+		}, name, value, arguments.length > 1 );
+	},
+	show: function() {
+		return showHide( this, true );
+	},
+	hide: function() {
+		return showHide( this );
+	},
+	toggle: function( state ) {
+		if ( typeof state === "boolean" ) {
+			return state ? this.show() : this.hide();
+		}
+
+		return this.each( function() {
+			if ( isHidden( this ) ) {
+				jQuery( this ).show();
+			} else {
+				jQuery( this ).hide();
+			}
+		} );
+	}
+} );
+
+
+function Tween( elem, options, prop, end, easing ) {
+	return new Tween.prototype.init( elem, options, prop, end, easing );
+}
+jQuery.Tween = Tween;
+
+Tween.prototype = {
+	constructor: Tween,
+	init: function( elem, options, prop, end, easing, unit ) {
+		this.elem = elem;
+		this.prop = prop;
+		this.easing = easing || jQuery.easing._default;
+		this.options = options;
+		this.start = this.now = this.cur();
+		this.end = end;
+		this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" );
+	},
+	cur: function() {
+		var hooks = Tween.propHooks[ this.prop ];
+
+		return hooks && hooks.get ?
+			hooks.get( this ) :
+			Tween.propHooks._default.get( this );
+	},
+	run: function( percent ) {
+		var eased,
+			hooks = Tween.propHooks[ this.prop ];
+
+		if ( this.options.duration ) {
+			this.pos = eased = jQuery.easing[ this.easing ](
+				percent, this.options.duration * percent, 0, 1, this.options.duration
+			);
+		} else {
+			this.pos = eased = percent;
+		}
+		this.now = ( this.end - this.start ) * eased + this.start;
+
+		if ( this.options.step ) {
+			this.options.step.call( this.elem, this.now, this );
+		}
+
+		if ( hooks && hooks.set ) {
+			hooks.set( this );
+		} else {
+			Tween.propHooks._default.set( this );
+		}
+		return this;
+	}
+};
+
+Tween.prototype.init.prototype = Tween.prototype;
+
+Tween.propHooks = {
+	_default: {
+		get: function( tween ) {
+			var result;
+
+			// Use a property on the element directly when it is not a DOM element,
+			// or when there is no matching style property that exists.
+			if ( tween.elem.nodeType !== 1 ||
+				tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) {
+				return tween.elem[ tween.prop ];
+			}
+
+			// passing an empty string as a 3rd parameter to .css will automatically
+			// attempt a parseFloat and fallback to a string if the parse fails
+			// so, simple values such as "10px" are parsed to Float.
+			// complex values such as "rotate(1rad)" are returned as is.
+			result = jQuery.css( tween.elem, tween.prop, "" );
+
+			// Empty strings, null, undefined and "auto" are converted to 0.
+			return !result || result === "auto" ? 0 : result;
+		},
+		set: function( tween ) {
+
+			// use step hook for back compat - use cssHook if its there - use .style if its
+			// available and use plain properties where available
+			if ( jQuery.fx.step[ tween.prop ] ) {
+				jQuery.fx.step[ tween.prop ]( tween );
+			} else if ( tween.elem.nodeType === 1 &&
+				( tween.elem.style[ jQuery.cssProps[ tween.prop ] ] != null ||
+					jQuery.cssHooks[ tween.prop ] ) ) {
+				jQuery.style( tween.elem, tween.prop, tween.now + tween.unit );
+			} else {
+				tween.elem[ tween.prop ] = tween.now;
+			}
+		}
+	}
+};
+
+// Support: IE <=9
+// Panic based approach to setting things on disconnected nodes
+
+Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = {
+	set: function( tween ) {
+		if ( tween.elem.nodeType && tween.elem.parentNode ) {
+			tween.elem[ tween.prop ] = tween.now;
+		}
+	}
+};
+
+jQuery.easing = {
+	linear: function( p ) {
+		return p;
+	},
+	swing: function( p ) {
+		return 0.5 - Math.cos( p * Math.PI ) / 2;
+	},
+	_default: "swing"
+};
+
+jQuery.fx = Tween.prototype.init;
+
+// Back Compat <1.8 extension point
+jQuery.fx.step = {};
+
+
+
+
+var
+	fxNow, timerId,
+	rfxtypes = /^(?:toggle|show|hide)$/,
+	rrun = /queueHooks$/;
+
+// Animations created synchronously will run synchronously
+function createFxNow() {
+	window.setTimeout( function() {
+		fxNow = undefined;
+	} );
+	return ( fxNow = jQuery.now() );
+}
+
+// Generate parameters to create a standard animation
+function genFx( type, includeWidth ) {
+	var which,
+		attrs = { height: type },
+		i = 0;
+
+	// if we include width, step value is 1 to do all cssExpand values,
+	// if we don't include width, step value is 2 to skip over Left and Right
+	includeWidth = includeWidth ? 1 : 0;
+	for ( ; i < 4 ; i += 2 - includeWidth ) {
+		which = cssExpand[ i ];
+		attrs[ "margin" + which ] = attrs[ "padding" + which ] = type;
+	}
+
+	if ( includeWidth ) {
+		attrs.opacity = attrs.width = type;
+	}
+
+	return attrs;
+}
+
+function createTween( value, prop, animation ) {
+	var tween,
+		collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ),
+		index = 0,
+		length = collection.length;
+	for ( ; index < length; index++ ) {
+		if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) {
+
+			// we're done with this property
+			return tween;
+		}
+	}
+}
+
+function defaultPrefilter( elem, props, opts ) {
+	/* jshint validthis: true */
+	var prop, value, toggle, tween, hooks, oldfire, display, checkDisplay,
+		anim = this,
+		orig = {},
+		style = elem.style,
+		hidden = elem.nodeType && isHidden( elem ),
+		dataShow = jQuery._data( elem, "fxshow" );
+
+	// handle queue: false promises
+	if ( !opts.queue ) {
+		hooks = jQuery._queueHooks( elem, "fx" );
+		if ( hooks.unqueued == null ) {
+			hooks.unqueued = 0;
+			oldfire = hooks.empty.fire;
+			hooks.empty.fire = function() {
+				if ( !hooks.unqueued ) {
+					oldfire();
+				}
+			};
+		}
+		hooks.unqueued++;
+
+		anim.always( function() {
+
+			// doing this makes sure that the complete handler will be called
+			// before this completes
+			anim.always( function() {
+				hooks.unqueued--;
+				if ( !jQuery.queue( elem, "fx" ).length ) {
+					hooks.empty.fire();
+				}
+			} );
+		} );
+	}
+
+	// height/width overflow pass
+	if ( elem.nodeType === 1 && ( "height" in props || "width" in props ) ) {
+
+		// Make sure that nothing sneaks out
+		// Record all 3 overflow attributes because IE does not
+		// change the overflow attribute when overflowX and
+		// overflowY are set to the same value
+		opts.overflow = [ style.overflow, style.overflowX, style.overflowY ];
+
+		// Set display property to inline-block for height/width
+		// animations on inline elements that are having width/height animated
+		display = jQuery.css( elem, "display" );
+
+		// Test default display if display is currently "none"
+		checkDisplay = display === "none" ?
+			jQuery._data( elem, "olddisplay" ) || defaultDisplay( elem.nodeName ) : display;
+
+		if ( checkDisplay === "inline" && jQuery.css( elem, "float" ) === "none" ) {
+
+			// inline-level elements accept inline-block;
+			// block-level elements need to be inline with layout
+			if ( !support.inlineBlockNeedsLayout || defaultDisplay( elem.nodeName ) === "inline" ) {
+				style.display = "inline-block";
+			} else {
+				style.zoom = 1;
+			}
+		}
+	}
+
+	if ( opts.overflow ) {
+		style.overflow = "hidden";
+		if ( !support.shrinkWrapBlocks() ) {
+			anim.always( function() {
+				style.overflow = opts.overflow[ 0 ];
+				style.overflowX = opts.overflow[ 1 ];
+				style.overflowY = opts.overflow[ 2 ];
+			} );
+		}
+	}
+
+	// show/hide pass
+	for ( prop in props ) {
+		value = props[ prop ];
+		if ( rfxtypes.exec( value ) ) {
+			delete props[ prop ];
+			toggle = toggle || value === "toggle";
+			if ( value === ( hidden ? "hide" : "show" ) ) {
+
+				// If there is dataShow left over from a stopped hide or show
+				// and we are going to proceed with show, we should pretend to be hidden
+				if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) {
+					hidden = true;
+				} else {
+					continue;
+				}
+			}
+			orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop );
+
+		// Any non-fx value stops us from restoring the original display value
+		} else {
+			display = undefined;
+		}
+	}
+
+	if ( !jQuery.isEmptyObject( orig ) ) {
+		if ( dataShow ) {
+			if ( "hidden" in dataShow ) {
+				hidden = dataShow.hidden;
+			}
+		} else {
+			dataShow = jQuery._data( elem, "fxshow", {} );
+		}
+
+		// store state if its toggle - enables .stop().toggle() to "reverse"
+		if ( toggle ) {
+			dataShow.hidden = !hidden;
+		}
+		if ( hidden ) {
+			jQuery( elem ).show();
+		} else {
+			anim.done( function() {
+				jQuery( elem ).hide();
+			} );
+		}
+		anim.done( function() {
+			var prop;
+			jQuery._removeData( elem, "fxshow" );
+			for ( prop in orig ) {
+				jQuery.style( elem, prop, orig[ prop ] );
+			}
+		} );
+		for ( prop in orig ) {
+			tween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim );
+
+			if ( !( prop in dataShow ) ) {
+				dataShow[ prop ] = tween.start;
+				if ( hidden ) {
+					tween.end = tween.start;
+					tween.start = prop === "width" || prop === "height" ? 1 : 0;
+				}
+			}
+		}
+
+	// If this is a noop like .hide().hide(), restore an overwritten display value
+	} else if ( ( display === "none" ? defaultDisplay( elem.nodeName ) : display ) === "inline" ) {
+		style.display = display;
+	}
+}
+
+function propFilter( props, specialEasing ) {
+	var index, name, easing, value, hooks;
+
+	// camelCase, specialEasing and expand cssHook pass
+	for ( index in props ) {
+		name = jQuery.camelCase( index );
+		easing = specialEasing[ name ];
+		value = props[ index ];
+		if ( jQuery.isArray( value ) ) {
+			easing = value[ 1 ];
+			value = props[ index ] = value[ 0 ];
+		}
+
+		if ( index !== name ) {
+			props[ name ] = value;
+			delete props[ index ];
+		}
+
+		hooks = jQuery.cssHooks[ name ];
+		if ( hooks && "expand" in hooks ) {
+			value = hooks.expand( value );
+			delete props[ name ];
+
+			// not quite $.extend, this wont overwrite keys already present.
+			// also - reusing 'index' from above because we have the correct "name"
+			for ( index in value ) {
+				if ( !( index in props ) ) {
+					props[ index ] = value[ index ];
+					specialEasing[ index ] = easing;
+				}
+			}
+		} else {
+			specialEasing[ name ] = easing;
+		}
+	}
+}
+
+function Animation( elem, properties, options ) {
+	var result,
+		stopped,
+		index = 0,
+		length = Animation.prefilters.length,
+		deferred = jQuery.Deferred().always( function() {
+
+			// don't match elem in the :animated selector
+			delete tick.elem;
+		} ),
+		tick = function() {
+			if ( stopped ) {
+				return false;
+			}
+			var currentTime = fxNow || createFxNow(),
+				remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ),
+
+				// Support: Android 2.3
+				// Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497)
+				temp = remaining / animation.duration || 0,
+				percent = 1 - temp,
+				index = 0,
+				length = animation.tweens.length;
+
+			for ( ; index < length ; index++ ) {
+				animation.tweens[ index ].run( percent );
+			}
+
+			deferred.notifyWith( elem, [ animation, percent, remaining ] );
+
+			if ( percent < 1 && length ) {
+				return remaining;
+			} else {
+				deferred.resolveWith( elem, [ animation ] );
+				return false;
+			}
+		},
+		animation = deferred.promise( {
+			elem: elem,
+			props: jQuery.extend( {}, properties ),
+			opts: jQuery.extend( true, {
+				specialEasing: {},
+				easing: jQuery.easing._default
+			}, options ),
+			originalProperties: properties,
+			originalOptions: options,
+			startTime: fxNow || createFxNow(),
+			duration: options.duration,
+			tweens: [],
+			createTween: function( prop, end ) {
+				var tween = jQuery.Tween( elem, animation.opts, prop, end,
+						animation.opts.specialEasing[ prop ] || animation.opts.easing );
+				animation.tweens.push( tween );
+				return tween;
+			},
+			stop: function( gotoEnd ) {
+				var index = 0,
+
+					// if we are going to the end, we want to run all the tweens
+					// otherwise we skip this part
+					length = gotoEnd ? animation.tweens.length : 0;
+				if ( stopped ) {
+					return this;
+				}
+				stopped = true;
+				for ( ; index < length ; index++ ) {
+					animation.tweens[ index ].run( 1 );
+				}
+
+				// resolve when we played the last frame
+				// otherwise, reject
+				if ( gotoEnd ) {
+					deferred.notifyWith( elem, [ animation, 1, 0 ] );
+					deferred.resolveWith( elem, [ animation, gotoEnd ] );
+				} else {
+					deferred.rejectWith( elem, [ animation, gotoEnd ] );
+				}
+				return this;
+			}
+		} ),
+		props = animation.props;
+
+	propFilter( props, animation.opts.specialEasing );
+
+	for ( ; index < length ; index++ ) {
+		result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts );
+		if ( result ) {
+			if ( jQuery.isFunction( result.stop ) ) {
+				jQuery._queueHooks( animation.elem, animation.opts.queue ).stop =
+					jQuery.proxy( result.stop, result );
+			}
+			return result;
+		}
+	}
+
+	jQuery.map( props, createTween, animation );
+
+	if ( jQuery.isFunction( animation.opts.start ) ) {
+		animation.opts.start.call( elem, animation );
+	}
+
+	jQuery.fx.timer(
+		jQuery.extend( tick, {
+			elem: elem,
+			anim: animation,
+			queue: animation.opts.queue
+		} )
+	);
+
+	// attach callbacks from options
+	return animation.progress( animation.opts.progress )
+		.done( animation.opts.done, animation.opts.complete )
+		.fail( animation.opts.fail )
+		.always( animation.opts.always );
+}
+
+jQuery.Animation = jQuery.extend( Animation, {
+
+	tweeners: {
+		"*": [ function( prop, value ) {
+			var tween = this.createTween( prop, value );
+			adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween );
+			return tween;
+		} ]
+	},
+
+	tweener: function( props, callback ) {
+		if ( jQuery.isFunction( props ) ) {
+			callback = props;
+			props = [ "*" ];
+		} else {
+			props = props.match( rnotwhite );
+		}
+
+		var prop,
+			index = 0,
+			length = props.length;
+
+		for ( ; index < length ; index++ ) {
+			prop = props[ index ];
+			Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || [];
+			Animation.tweeners[ prop ].unshift( callback );
+		}
+	},
+
+	prefilters: [ defaultPrefilter ],
+
+	prefilter: function( callback, prepend ) {
+		if ( prepend ) {
+			Animation.prefilters.unshift( callback );
+		} else {
+			Animation.prefilters.push( callback );
+		}
+	}
+} );
+
+jQuery.speed = function( speed, easing, fn ) {
+	var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : {
+		complete: fn || !fn && easing ||
+			jQuery.isFunction( speed ) && speed,
+		duration: speed,
+		easing: fn && easing || easing && !jQuery.isFunction( easing ) && easing
+	};
+
+	opt.duration = jQuery.fx.off ? 0 : typeof opt.duration === "number" ? opt.duration :
+		opt.duration in jQuery.fx.speeds ?
+			jQuery.fx.speeds[ opt.duration ] : jQuery.fx.speeds._default;
+
+	// normalize opt.queue - true/undefined/null -> "fx"
+	if ( opt.queue == null || opt.queue === true ) {
+		opt.queue = "fx";
+	}
+
+	// Queueing
+	opt.old = opt.complete;
+
+	opt.complete = function() {
+		if ( jQuery.isFunction( opt.old ) ) {
+			opt.old.call( this );
+		}
+
+		if ( opt.queue ) {
+			jQuery.dequeue( this, opt.queue );
+		}
+	};
+
+	return opt;
+};
+
+jQuery.fn.extend( {
+	fadeTo: function( speed, to, easing, callback ) {
+
+		// show any hidden elements after setting opacity to 0
+		return this.filter( isHidden ).css( "opacity", 0 ).show()
+
+			// animate to the value specified
+			.end().animate( { opacity: to }, speed, easing, callback );
+	},
+	animate: function( prop, speed, easing, callback ) {
+		var empty = jQuery.isEmptyObject( prop ),
+			optall = jQuery.speed( speed, easing, callback ),
+			doAnimation = function() {
+
+				// Operate on a copy of prop so per-property easing won't be lost
+				var anim = Animation( this, jQuery.extend( {}, prop ), optall );
+
+				// Empty animations, or finishing resolves immediately
+				if ( empty || jQuery._data( this, "finish" ) ) {
+					anim.stop( true );
+				}
+			};
+			doAnimation.finish = doAnimation;
+
+		return empty || optall.queue === false ?
+			this.each( doAnimation ) :
+			this.queue( optall.queue, doAnimation );
+	},
+	stop: function( type, clearQueue, gotoEnd ) {
+		var stopQueue = function( hooks ) {
+			var stop = hooks.stop;
+			delete hooks.stop;
+			stop( gotoEnd );
+		};
+
+		if ( typeof type !== "string" ) {
+			gotoEnd = clearQueue;
+			clearQueue = type;
+			type = undefined;
+		}
+		if ( clearQueue && type !== false ) {
+			this.queue( type || "fx", [] );
+		}
+
+		return this.each( function() {
+			var dequeue = true,
+				index = type != null && type + "queueHooks",
+				timers = jQuery.timers,
+				data = jQuery._data( this );
+
+			if ( index ) {
+				if ( data[ index ] && data[ index ].stop ) {
+					stopQueue( data[ index ] );
+				}
+			} else {
+				for ( index in data ) {
+					if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) {
+						stopQueue( data[ index ] );
+					}
+				}
+			}
+
+			for ( index = timers.length; index--; ) {
+				if ( timers[ index ].elem === this &&
+					( type == null || timers[ index ].queue === type ) ) {
+
+					timers[ index ].anim.stop( gotoEnd );
+					dequeue = false;
+					timers.splice( index, 1 );
+				}
+			}
+
+			// start the next in the queue if the last step wasn't forced
+			// timers currently will call their complete callbacks, which will dequeue
+			// but only if they were gotoEnd
+			if ( dequeue || !gotoEnd ) {
+				jQuery.dequeue( this, type );
+			}
+		} );
+	},
+	finish: function( type ) {
+		if ( type !== false ) {
+			type = type || "fx";
+		}
+		return this.each( function() {
+			var index,
+				data = jQuery._data( this ),
+				queue = data[ type + "queue" ],
+				hooks = data[ type + "queueHooks" ],
+				timers = jQuery.timers,
+				length = queue ? queue.length : 0;
+
+			// enable finishing flag on private data
+			data.finish = true;
+
+			// empty the queue first
+			jQuery.queue( this, type, [] );
+
+			if ( hooks && hooks.stop ) {
+				hooks.stop.call( this, true );
+			}
+
+			// look for any active animations, and finish them
+			for ( index = timers.length; index--; ) {
+				if ( timers[ index ].elem === this && timers[ index ].queue === type ) {
+					timers[ index ].anim.stop( true );
+					timers.splice( index, 1 );
+				}
+			}
+
+			// look for any animations in the old queue and finish them
+			for ( index = 0; index < length; index++ ) {
+				if ( queue[ index ] && queue[ index ].finish ) {
+					queue[ index ].finish.call( this );
+				}
+			}
+
+			// turn off finishing flag
+			delete data.finish;
+		} );
+	}
+} );
+
+jQuery.each( [ "toggle", "show", "hide" ], function( i, name ) {
+	var cssFn = jQuery.fn[ name ];
+	jQuery.fn[ name ] = function( speed, easing, callback ) {
+		return speed == null || typeof speed === "boolean" ?
+			cssFn.apply( this, arguments ) :
+			this.animate( genFx( name, true ), speed, easing, callback );
+	};
+} );
+
+// Generate shortcuts for custom animations
+jQuery.each( {
+	slideDown: genFx( "show" ),
+	slideUp: genFx( "hide" ),
+	slideToggle: genFx( "toggle" ),
+	fadeIn: { opacity: "show" },
+	fadeOut: { opacity: "hide" },
+	fadeToggle: { opacity: "toggle" }
+}, function( name, props ) {
+	jQuery.fn[ name ] = function( speed, easing, callback ) {
+		return this.animate( props, speed, easing, callback );
+	};
+} );
+
+jQuery.timers = [];
+jQuery.fx.tick = function() {
+	var timer,
+		timers = jQuery.timers,
+		i = 0;
+
+	fxNow = jQuery.now();
+
+	for ( ; i < timers.length; i++ ) {
+		timer = timers[ i ];
+
+		// Checks the timer has not already been removed
+		if ( !timer() && timers[ i ] === timer ) {
+			timers.splice( i--, 1 );
+		}
+	}
+
+	if ( !timers.length ) {
+		jQuery.fx.stop();
+	}
+	fxNow = undefined;
+};
+
+jQuery.fx.timer = function( timer ) {
+	jQuery.timers.push( timer );
+	if ( timer() ) {
+		jQuery.fx.start();
+	} else {
+		jQuery.timers.pop();
+	}
+};
+
+jQuery.fx.interval = 13;
+
+jQuery.fx.start = function() {
+	if ( !timerId ) {
+		timerId = window.setInterval( jQuery.fx.tick, jQuery.fx.interval );
+	}
+};
+
+jQuery.fx.stop = function() {
+	window.clearInterval( timerId );
+	timerId = null;
+};
+
+jQuery.fx.speeds = {
+	slow: 600,
+	fast: 200,
+
+	// Default speed
+	_default: 400
+};
+
+
+// Based off of the plugin by Clint Helfers, with permission.
+// http://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/
+jQuery.fn.delay = function( time, type ) {
+	time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time;
+	type = type || "fx";
+
+	return this.queue( type, function( next, hooks ) {
+		var timeout = window.setTimeout( next, time );
+		hooks.stop = function() {
+			window.clearTimeout( timeout );
+		};
+	} );
+};
+
+
+( function() {
+	var a,
+		input = document.createElement( "input" ),
+		div = document.createElement( "div" ),
+		select = document.createElement( "select" ),
+		opt = select.appendChild( document.createElement( "option" ) );
+
+	// Setup
+	div = document.createElement( "div" );
+	div.setAttribute( "className", "t" );
+	div.innerHTML = "  <link/><table></table><a href='/a'>a</a><input type='checkbox'/>";
+	a = div.getElementsByTagName( "a" )[ 0 ];
+
+	// Support: Windows Web Apps (WWA)
+	// `type` must use .setAttribute for WWA (#14901)
+	input.setAttribute( "type", "checkbox" );
+	div.appendChild( input );
+
+	a = div.getElementsByTagName( "a" )[ 0 ];
+
+	// First batch of tests.
+	a.style.cssText = "top:1px";
+
+	// Test setAttribute on camelCase class.
+	// If it works, we need attrFixes when doing get/setAttribute (ie6/7)
+	support.getSetAttribute = div.className !== "t";
+
+	// Get the style information from getAttribute
+	// (IE uses .cssText instead)
+	support.style = /top/.test( a.getAttribute( "style" ) );
+
+	// Make sure that URLs aren't manipulated
+	// (IE normalizes it by default)
+	support.hrefNormalized = a.getAttribute( "href" ) === "/a";
+
+	// Check the default checkbox/radio value ("" on WebKit; "on" elsewhere)
+	support.checkOn = !!input.value;
+
+	// Make sure that a selected-by-default option has a working selected property.
+	// (WebKit defaults to false instead of true, IE too, if it's in an optgroup)
+	support.optSelected = opt.selected;
+
+	// Tests for enctype support on a form (#6743)
+	support.enctype = !!document.createElement( "form" ).enctype;
+
+	// Make sure that the options inside disabled selects aren't marked as disabled
+	// (WebKit marks them as disabled)
+	select.disabled = true;
+	support.optDisabled = !opt.disabled;
+
+	// Support: IE8 only
+	// Check if we can trust getAttribute("value")
+	input = document.createElement( "input" );
+	input.setAttribute( "value", "" );
+	support.input = input.getAttribute( "value" ) === "";
+
+	// Check if an input maintains its value after becoming a radio
+	input.value = "t";
+	input.setAttribute( "type", "radio" );
+	support.radioValue = input.value === "t";
+} )();
+
+
+var rreturn = /\r/g,
+	rspaces = /[\x20\t\r\n\f]+/g;
+
+jQuery.fn.extend( {
+	val: function( value ) {
+		var hooks, ret, isFunction,
+			elem = this[ 0 ];
+
+		if ( !arguments.length ) {
+			if ( elem ) {
+				hooks = jQuery.valHooks[ elem.type ] ||
+					jQuery.valHooks[ elem.nodeName.toLowerCase() ];
+
+				if (
+					hooks &&
+					"get" in hooks &&
+					( ret = hooks.get( elem, "value" ) ) !== undefined
+				) {
+					return ret;
+				}
+
+				ret = elem.value;
+
+				return typeof ret === "string" ?
+
+					// handle most common string cases
+					ret.replace( rreturn, "" ) :
+
+					// handle cases where value is null/undef or number
+					ret == null ? "" : ret;
+			}
+
+			return;
+		}
+
+		isFunction = jQuery.isFunction( value );
+
+		return this.each( function( i ) {
+			var val;
+
+			if ( this.nodeType !== 1 ) {
+				return;
+			}
+
+			if ( isFunction ) {
+				val = value.call( this, i, jQuery( this ).val() );
+			} else {
+				val = value;
+			}
+
+			// Treat null/undefined as ""; convert numbers to string
+			if ( val == null ) {
+				val = "";
+			} else if ( typeof val === "number" ) {
+				val += "";
+			} else if ( jQuery.isArray( val ) ) {
+				val = jQuery.map( val, function( value ) {
+					return value == null ? "" : value + "";
+				} );
+			}
+
+			hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ];
+
+			// If set returns undefined, fall back to normal setting
+			if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) {
+				this.value = val;
+			}
+		} );
+	}
+} );
+
+jQuery.extend( {
+	valHooks: {
+		option: {
+			get: function( elem ) {
+				var val = jQuery.find.attr( elem, "value" );
+				return val != null ?
+					val :
+
+					// Support: IE10-11+
+					// option.text throws exceptions (#14686, #14858)
+					// Strip and collapse whitespace
+					// https://html.spec.whatwg.org/#strip-and-collapse-whitespace
+					jQuery.trim( jQuery.text( elem ) ).replace( rspaces, " " );
+			}
+		},
+		select: {
+			get: function( elem ) {
+				var value, option,
+					options = elem.options,
+					index = elem.selectedIndex,
+					one = elem.type === "select-one" || index < 0,
+					values = one ? null : [],
+					max = one ? index + 1 : options.length,
+					i = index < 0 ?
+						max :
+						one ? index : 0;
+
+				// Loop through all the selected options
+				for ( ; i < max; i++ ) {
+					option = options[ i ];
+
+					// oldIE doesn't update selected after form reset (#2551)
+					if ( ( option.selected || i === index ) &&
+
+							// Don't return options that are disabled or in a disabled optgroup
+							( support.optDisabled ?
+								!option.disabled :
+								option.getAttribute( "disabled" ) === null ) &&
+							( !option.parentNode.disabled ||
+								!jQuery.nodeName( option.parentNode, "optgroup" ) ) ) {
+
+						// Get the specific value for the option
+						value = jQuery( option ).val();
+
+						// We don't need an array for one selects
+						if ( one ) {
+							return value;
+						}
+
+						// Multi-Selects return an array
+						values.push( value );
+					}
+				}
+
+				return values;
+			},
+
+			set: function( elem, value ) {
+				var optionSet, option,
+					options = elem.options,
+					values = jQuery.makeArray( value ),
+					i = options.length;
+
+				while ( i-- ) {
+					option = options[ i ];
+
+					if ( jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 ) {
+
+						// Support: IE6
+						// When new option element is added to select box we need to
+						// force reflow of newly added node in order to workaround delay
+						// of initialization properties
+						try {
+							option.selected = optionSet = true;
+
+						} catch ( _ ) {
+
+							// Will be executed only in IE6
+							option.scrollHeight;
+						}
+
+					} else {
+						option.selected = false;
+					}
+				}
+
+				// Force browsers to behave consistently when non-matching value is set
+				if ( !optionSet ) {
+					elem.selectedIndex = -1;
+				}
+
+				return options;
+			}
+		}
+	}
+} );
+
+// Radios and checkboxes getter/setter
+jQuery.each( [ "radio", "checkbox" ], function() {
+	jQuery.valHooks[ this ] = {
+		set: function( elem, value ) {
+			if ( jQuery.isArray( value ) ) {
+				return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 );
+			}
+		}
+	};
+	if ( !support.checkOn ) {
+		jQuery.valHooks[ this ].get = function( elem ) {
+			return elem.getAttribute( "value" ) === null ? "on" : elem.value;
+		};
+	}
+} );
+
+
+
+
+var nodeHook, boolHook,
+	attrHandle = jQuery.expr.attrHandle,
+	ruseDefault = /^(?:checked|selected)$/i,
+	getSetAttribute = support.getSetAttribute,
+	getSetInput = support.input;
+
+jQuery.fn.extend( {
+	attr: function( name, value ) {
+		return access( this, jQuery.attr, name, value, arguments.length > 1 );
+	},
+
+	removeAttr: function( name ) {
+		return this.each( function() {
+			jQuery.removeAttr( this, name );
+		} );
+	}
+} );
+
+jQuery.extend( {
+	attr: function( elem, name, value ) {
+		var ret, hooks,
+			nType = elem.nodeType;
+
+		// Don't get/set attributes on text, comment and attribute nodes
+		if ( nType === 3 || nType === 8 || nType === 2 ) {
+			return;
+		}
+
+		// Fallback to prop when attributes are not supported
+		if ( typeof elem.getAttribute === "undefined" ) {
+			return jQuery.prop( elem, name, value );
+		}
+
+		// All attributes are lowercase
+		// Grab necessary hook if one is defined
+		if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {
+			name = name.toLowerCase();
+			hooks = jQuery.attrHooks[ name ] ||
+				( jQuery.expr.match.bool.test( name ) ? boolHook : nodeHook );
+		}
+
+		if ( value !== undefined ) {
+			if ( value === null ) {
+				jQuery.removeAttr( elem, name );
+				return;
+			}
+
+			if ( hooks && "set" in hooks &&
+				( ret = hooks.set( elem, value, name ) ) !== undefined ) {
+				return ret;
+			}
+
+			elem.setAttribute( name, value + "" );
+			return value;
+		}
+
+		if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) {
+			return ret;
+		}
+
+		ret = jQuery.find.attr( elem, name );
+
+		// Non-existent attributes return null, we normalize to undefined
+		return ret == null ? undefined : ret;
+	},
+
+	attrHooks: {
+		type: {
+			set: function( elem, value ) {
+				if ( !support.radioValue && value === "radio" &&
+					jQuery.nodeName( elem, "input" ) ) {
+
+					// Setting the type on a radio button after the value resets the value in IE8-9
+					// Reset value to default in case type is set after value during creation
+					var val = elem.value;
+					elem.setAttribute( "type", value );
+					if ( val ) {
+						elem.value = val;
+					}
+					return value;
+				}
+			}
+		}
+	},
+
+	removeAttr: function( elem, value ) {
+		var name, propName,
+			i = 0,
+			attrNames = value && value.match( rnotwhite );
+
+		if ( attrNames && elem.nodeType === 1 ) {
+			while ( ( name = attrNames[ i++ ] ) ) {
+				propName = jQuery.propFix[ name ] || name;
+
+				// Boolean attributes get special treatment (#10870)
+				if ( jQuery.expr.match.bool.test( name ) ) {
+
+					// Set corresponding property to false
+					if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {
+						elem[ propName ] = false;
+
+					// Support: IE<9
+					// Also clear defaultChecked/defaultSelected (if appropriate)
+					} else {
+						elem[ jQuery.camelCase( "default-" + name ) ] =
+							elem[ propName ] = false;
+					}
+
+				// See #9699 for explanation of this approach (setting first, then removal)
+				} else {
+					jQuery.attr( elem, name, "" );
+				}
+
+				elem.removeAttribute( getSetAttribute ? name : propName );
+			}
+		}
+	}
+} );
+
+// Hooks for boolean attributes
+boolHook = {
+	set: function( elem, value, name ) {
+		if ( value === false ) {
+
+			// Remove boolean attributes when set to false
+			jQuery.removeAttr( elem, name );
+		} else if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {
+
+			// IE<8 needs the *property* name
+			elem.setAttribute( !getSetAttribute && jQuery.propFix[ name ] || name, name );
+
+		} else {
+
+			// Support: IE<9
+			// Use defaultChecked and defaultSelected for oldIE
+			elem[ jQuery.camelCase( "default-" + name ) ] = elem[ name ] = true;
+		}
+		return name;
+	}
+};
+
+jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) {
+	var getter = attrHandle[ name ] || jQuery.find.attr;
+
+	if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {
+		attrHandle[ name ] = function( elem, name, isXML ) {
+			var ret, handle;
+			if ( !isXML ) {
+
+				// Avoid an infinite loop by temporarily removing this function from the getter
+				handle = attrHandle[ name ];
+				attrHandle[ name ] = ret;
+				ret = getter( elem, name, isXML ) != null ?
+					name.toLowerCase() :
+					null;
+				attrHandle[ name ] = handle;
+			}
+			return ret;
+		};
+	} else {
+		attrHandle[ name ] = function( elem, name, isXML ) {
+			if ( !isXML ) {
+				return elem[ jQuery.camelCase( "default-" + name ) ] ?
+					name.toLowerCase() :
+					null;
+			}
+		};
+	}
+} );
+
+// fix oldIE attroperties
+if ( !getSetInput || !getSetAttribute ) {
+	jQuery.attrHooks.value = {
+		set: function( elem, value, name ) {
+			if ( jQuery.nodeName( elem, "input" ) ) {
+
+				// Does not return so that setAttribute is also used
+				elem.defaultValue = value;
+			} else {
+
+				// Use nodeHook if defined (#1954); otherwise setAttribute is fine
+				return nodeHook && nodeHook.set( elem, value, name );
+			}
+		}
+	};
+}
+
+// IE6/7 do not support getting/setting some attributes with get/setAttribute
+if ( !getSetAttribute ) {
+
+	// Use this for any attribute in IE6/7
+	// This fixes almost every IE6/7 issue
+	nodeHook = {
+		set: function( elem, value, name ) {
+
+			// Set the existing or create a new attribute node
+			var ret = elem.getAttributeNode( name );
+			if ( !ret ) {
+				elem.setAttributeNode(
+					( ret = elem.ownerDocument.createAttribute( name ) )
+				);
+			}
+
+			ret.value = value += "";
+
+			// Break association with cloned elements by also using setAttribute (#9646)
+			if ( name === "value" || value === elem.getAttribute( name ) ) {
+				return value;
+			}
+		}
+	};
+
+	// Some attributes are constructed with empty-string values when not defined
+	attrHandle.id = attrHandle.name = attrHandle.coords =
+		function( elem, name, isXML ) {
+			var ret;
+			if ( !isXML ) {
+				return ( ret = elem.getAttributeNode( name ) ) && ret.value !== "" ?
+					ret.value :
+					null;
+			}
+		};
+
+	// Fixing value retrieval on a button requires this module
+	jQuery.valHooks.button = {
+		get: function( elem, name ) {
+			var ret = elem.getAttributeNode( name );
+			if ( ret && ret.specified ) {
+				return ret.value;
+			}
+		},
+		set: nodeHook.set
+	};
+
+	// Set contenteditable to false on removals(#10429)
+	// Setting to empty string throws an error as an invalid value
+	jQuery.attrHooks.contenteditable = {
+		set: function( elem, value, name ) {
+			nodeHook.set( elem, value === "" ? false : value, name );
+		}
+	};
+
+	// Set width and height to auto instead of 0 on empty string( Bug #8150 )
+	// This is for removals
+	jQuery.each( [ "width", "height" ], function( i, name ) {
+		jQuery.attrHooks[ name ] = {
+			set: function( elem, value ) {
+				if ( value === "" ) {
+					elem.setAttribute( name, "auto" );
+					return value;
+				}
+			}
+		};
+	} );
+}
+
+if ( !support.style ) {
+	jQuery.attrHooks.style = {
+		get: function( elem ) {
+
+			// Return undefined in the case of empty string
+			// Note: IE uppercases css property names, but if we were to .toLowerCase()
+			// .cssText, that would destroy case sensitivity in URL's, like in "background"
+			return elem.style.cssText || undefined;
+		},
+		set: function( elem, value ) {
+			return ( elem.style.cssText = value + "" );
+		}
+	};
+}
+
+
+
+
+var rfocusable = /^(?:input|select|textarea|button|object)$/i,
+	rclickable = /^(?:a|area)$/i;
+
+jQuery.fn.extend( {
+	prop: function( name, value ) {
+		return access( this, jQuery.prop, name, value, arguments.length > 1 );
+	},
+
+	removeProp: function( name ) {
+		name = jQuery.propFix[ name ] || name;
+		return this.each( function() {
+
+			// try/catch handles cases where IE balks (such as removing a property on window)
+			try {
+				this[ name ] = undefined;
+				delete this[ name ];
+			} catch ( e ) {}
+		} );
+	}
+} );
+
+jQuery.extend( {
+	prop: function( elem, name, value ) {
+		var ret, hooks,
+			nType = elem.nodeType;
+
+		// Don't get/set properties on text, comment and attribute nodes
+		if ( nType === 3 || nType === 8 || nType === 2 ) {
+			return;
+		}
+
+		if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {
+
+			// Fix name and attach hooks
+			name = jQuery.propFix[ name ] || name;
+			hooks = jQuery.propHooks[ name ];
+		}
+
+		if ( value !== undefined ) {
+			if ( hooks && "set" in hooks &&
+				( ret = hooks.set( elem, value, name ) ) !== undefined ) {
+				return ret;
+			}
+
+			return ( elem[ name ] = value );
+		}
+
+		if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) {
+			return ret;
+		}
+
+		return elem[ name ];
+	},
+
+	propHooks: {
+		tabIndex: {
+			get: function( elem ) {
+
+				// elem.tabIndex doesn't always return the
+				// correct value when it hasn't been explicitly set
+				// http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/
+				// Use proper attribute retrieval(#12072)
+				var tabindex = jQuery.find.attr( elem, "tabindex" );
+
+				return tabindex ?
+					parseInt( tabindex, 10 ) :
+					rfocusable.test( elem.nodeName ) ||
+						rclickable.test( elem.nodeName ) && elem.href ?
+							0 :
+							-1;
+			}
+		}
+	},
+
+	propFix: {
+		"for": "htmlFor",
+		"class": "className"
+	}
+} );
+
+// Some attributes require a special call on IE
+// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx
+if ( !support.hrefNormalized ) {
+
+	// href/src property should get the full normalized URL (#10299/#12915)
+	jQuery.each( [ "href", "src" ], function( i, name ) {
+		jQuery.propHooks[ name ] = {
+			get: function( elem ) {
+				return elem.getAttribute( name, 4 );
+			}
+		};
+	} );
+}
+
+// Support: Safari, IE9+
+// Accessing the selectedIndex property
+// forces the browser to respect setting selected
+// on the option
+// The getter ensures a default option is selected
+// when in an optgroup
+if ( !support.optSelected ) {
+	jQuery.propHooks.selected = {
+		get: function( elem ) {
+			var parent = elem.parentNode;
+
+			if ( parent ) {
+				parent.selectedIndex;
+
+				// Make sure that it also works with optgroups, see #5701
+				if ( parent.parentNode ) {
+					parent.parentNode.selectedIndex;
+				}
+			}
+			return null;
+		},
+		set: function( elem ) {
+			var parent = elem.parentNode;
+			if ( parent ) {
+				parent.selectedIndex;
+
+				if ( parent.parentNode ) {
+					parent.parentNode.selectedIndex;
+				}
+			}
+		}
+	};
+}
+
+jQuery.each( [
+	"tabIndex",
+	"readOnly",
+	"maxLength",
+	"cellSpacing",
+	"cellPadding",
+	"rowSpan",
+	"colSpan",
+	"useMap",
+	"frameBorder",
+	"contentEditable"
+], function() {
+	jQuery.propFix[ this.toLowerCase() ] = this;
+} );
+
+// IE6/7 call enctype encoding
+if ( !support.enctype ) {
+	jQuery.propFix.enctype = "encoding";
+}
+
+
+
+
+var rclass = /[\t\r\n\f]/g;
+
+function getClass( elem ) {
+	return jQuery.attr( elem, "class" ) || "";
+}
+
+jQuery.fn.extend( {
+	addClass: function( value ) {
+		var classes, elem, cur, curValue, clazz, j, finalValue,
+			i = 0;
+
+		if ( jQuery.isFunction( value ) ) {
+			return this.each( function( j ) {
+				jQuery( this ).addClass( value.call( this, j, getClass( this ) ) );
+			} );
+		}
+
+		if ( typeof value === "string" && value ) {
+			classes = value.match( rnotwhite ) || [];
+
+			while ( ( elem = this[ i++ ] ) ) {
+				curValue = getClass( elem );
+				cur = elem.nodeType === 1 &&
+					( " " + curValue + " " ).replace( rclass, " " );
+
+				if ( cur ) {
+					j = 0;
+					while ( ( clazz = classes[ j++ ] ) ) {
+						if ( cur.indexOf( " " + clazz + " " ) < 0 ) {
+							cur += clazz + " ";
+						}
+					}
+
+					// only assign if different to avoid unneeded rendering.
+					finalValue = jQuery.trim( cur );
+					if ( curValue !== finalValue ) {
+						jQuery.attr( elem, "class", finalValue );
+					}
+				}
+			}
+		}
+
+		return this;
+	},
+
+	removeClass: function( value ) {
+		var classes, elem, cur, curValue, clazz, j, finalValue,
+			i = 0;
+
+		if ( jQuery.isFunction( value ) ) {
+			return this.each( function( j ) {
+				jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) );
+			} );
+		}
+
+		if ( !arguments.length ) {
+			return this.attr( "class", "" );
+		}
+
+		if ( typeof value === "string" && value ) {
+			classes = value.match( rnotwhite ) || [];
+
+			while ( ( elem = this[ i++ ] ) ) {
+				curValue = getClass( elem );
+
+				// This expression is here for better compressibility (see addClass)
+				cur = elem.nodeType === 1 &&
+					( " " + curValue + " " ).replace( rclass, " " );
+
+				if ( cur ) {
+					j = 0;
+					while ( ( clazz = classes[ j++ ] ) ) {
+
+						// Remove *all* instances
+						while ( cur.indexOf( " " + clazz + " " ) > -1 ) {
+							cur = cur.replace( " " + clazz + " ", " " );
+						}
+					}
+
+					// Only assign if different to avoid unneeded rendering.
+					finalValue = jQuery.trim( cur );
+					if ( curValue !== finalValue ) {
+						jQuery.attr( elem, "class", finalValue );
+					}
+				}
+			}
+		}
+
+		return this;
+	},
+
+	toggleClass: function( value, stateVal ) {
+		var type = typeof value;
+
+		if ( typeof stateVal === "boolean" && type === "string" ) {
+			return stateVal ? this.addClass( value ) : this.removeClass( value );
+		}
+
+		if ( jQuery.isFunction( value ) ) {
+			return this.each( function( i ) {
+				jQuery( this ).toggleClass(
+					value.call( this, i, getClass( this ), stateVal ),
+					stateVal
+				);
+			} );
+		}
+
+		return this.each( function() {
+			var className, i, self, classNames;
+
+			if ( type === "string" ) {
+
+				// Toggle individual class names
+				i = 0;
+				self = jQuery( this );
+				classNames = value.match( rnotwhite ) || [];
+
+				while ( ( className = classNames[ i++ ] ) ) {
+
+					// Check each className given, space separated list
+					if ( self.hasClass( className ) ) {
+						self.removeClass( className );
+					} else {
+						self.addClass( className );
+					}
+				}
+
+			// Toggle whole class name
+			} else if ( value === undefined || type === "boolean" ) {
+				className = getClass( this );
+				if ( className ) {
+
+					// store className if set
+					jQuery._data( this, "__className__", className );
+				}
+
+				// If the element has a class name or if we're passed "false",
+				// then remove the whole classname (if there was one, the above saved it).
+				// Otherwise bring back whatever was previously saved (if anything),
+				// falling back to the empty string if nothing was stored.
+				jQuery.attr( this, "class",
+					className || value === false ?
+					"" :
+					jQuery._data( this, "__className__" ) || ""
+				);
+			}
+		} );
+	},
+
+	hasClass: function( selector ) {
+		var className, elem,
+			i = 0;
+
+		className = " " + selector + " ";
+		while ( ( elem = this[ i++ ] ) ) {
+			if ( elem.nodeType === 1 &&
+				( " " + getClass( elem ) + " " ).replace( rclass, " " )
+					.indexOf( className ) > -1
+			) {
+				return true;
+			}
+		}
+
+		return false;
+	}
+} );
+
+
+
+
+// Return jQuery for attributes-only inclusion
+
+
+jQuery.each( ( "blur focus focusin focusout load resize scroll unload click dblclick " +
+	"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
+	"change select submit keydown keypress keyup error contextmenu" ).split( " " ),
+	function( i, name ) {
+
+	// Handle event binding
+	jQuery.fn[ name ] = function( data, fn ) {
+		return arguments.length > 0 ?
+			this.on( name, null, data, fn ) :
+			this.trigger( name );
+	};
+} );
+
+jQuery.fn.extend( {
+	hover: function( fnOver, fnOut ) {
+		return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );
+	}
+} );
+
+
+var location = window.location;
+
+var nonce = jQuery.now();
+
+var rquery = ( /\?/ );
+
+
+
+var rvalidtokens = /(,)|(\[|{)|(}|])|"(?:[^"\\\r\n]|\\["\\\/bfnrt]|\\u[\da-fA-F]{4})*"\s*:?|true|false|null|-?(?!0\d)\d+(?:\.\d+|)(?:[eE][+-]?\d+|)/g;
+
+jQuery.parseJSON = function( data ) {
+
+	// Attempt to parse using the native JSON parser first
+	if ( window.JSON && window.JSON.parse ) {
+
+		// Support: Android 2.3
+		// Workaround failure to string-cast null input
+		return window.JSON.parse( data + "" );
+	}
+
+	var requireNonComma,
+		depth = null,
+		str = jQuery.trim( data + "" );
+
+	// Guard against invalid (and possibly dangerous) input by ensuring that nothing remains
+	// after removing valid tokens
+	return str && !jQuery.trim( str.replace( rvalidtokens, function( token, comma, open, close ) {
+
+		// Force termination if we see a misplaced comma
+		if ( requireNonComma && comma ) {
+			depth = 0;
+		}
+
+		// Perform no more replacements after returning to outermost depth
+		if ( depth === 0 ) {
+			return token;
+		}
+
+		// Commas must not follow "[", "{", or ","
+		requireNonComma = open || comma;
+
+		// Determine new depth
+		// array/object open ("[" or "{"): depth += true - false (increment)
+		// array/object close ("]" or "}"): depth += false - true (decrement)
+		// other cases ("," or primitive): depth += true - true (numeric cast)
+		depth += !close - !open;
+
+		// Remove this token
+		return "";
+	} ) ) ?
+		( Function( "return " + str ) )() :
+		jQuery.error( "Invalid JSON: " + data );
+};
+
+
+// Cross-browser xml parsing
+jQuery.parseXML = function( data ) {
+	var xml, tmp;
+	if ( !data || typeof data !== "string" ) {
+		return null;
+	}
+	try {
+		if ( window.DOMParser ) { // Standard
+			tmp = new window.DOMParser();
+			xml = tmp.parseFromString( data, "text/xml" );
+		} else { // IE
+			xml = new window.ActiveXObject( "Microsoft.XMLDOM" );
+			xml.async = "false";
+			xml.loadXML( data );
+		}
+	} catch ( e ) {
+		xml = undefined;
+	}
+	if ( !xml || !xml.documentElement || xml.getElementsByTagName( "parsererror" ).length ) {
+		jQuery.error( "Invalid XML: " + data );
+	}
+	return xml;
+};
+
+
+var
+	rhash = /#.*$/,
+	rts = /([?&])_=[^&]*/,
+
+	// IE leaves an \r character at EOL
+	rheaders = /^(.*?):[ \t]*([^\r\n]*)\r?$/mg,
+
+	// #7653, #8125, #8152: local protocol detection
+	rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/,
+	rnoContent = /^(?:GET|HEAD)$/,
+	rprotocol = /^\/\//,
+	rurl = /^([\w.+-]+:)(?:\/\/(?:[^\/?#]*@|)([^\/?#:]*)(?::(\d+)|)|)/,
+
+	/* Prefilters
+	 * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example)
+	 * 2) These are called:
+	 *    - BEFORE asking for a transport
+	 *    - AFTER param serialization (s.data is a string if s.processData is true)
+	 * 3) key is the dataType
+	 * 4) the catchall symbol "*" can be used
+	 * 5) execution will start with transport dataType and THEN continue down to "*" if needed
+	 */
+	prefilters = {},
+
+	/* Transports bindings
+	 * 1) key is the dataType
+	 * 2) the catchall symbol "*" can be used
+	 * 3) selection will start with transport dataType and THEN go to "*" if needed
+	 */
+	transports = {},
+
+	// Avoid comment-prolog char sequence (#10098); must appease lint and evade compression
+	allTypes = "*/".concat( "*" ),
+
+	// Document location
+	ajaxLocation = location.href,
+
+	// Segment location into parts
+	ajaxLocParts = rurl.exec( ajaxLocation.toLowerCase() ) || [];
+
+// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport
+function addToPrefiltersOrTransports( structure ) {
+
+	// dataTypeExpression is optional and defaults to "*"
+	return function( dataTypeExpression, func ) {
+
+		if ( typeof dataTypeExpression !== "string" ) {
+			func = dataTypeExpression;
+			dataTypeExpression = "*";
+		}
+
+		var dataType,
+			i = 0,
+			dataTypes = dataTypeExpression.toLowerCase().match( rnotwhite ) || [];
+
+		if ( jQuery.isFunction( func ) ) {
+
+			// For each dataType in the dataTypeExpression
+			while ( ( dataType = dataTypes[ i++ ] ) ) {
+
+				// Prepend if requested
+				if ( dataType.charAt( 0 ) === "+" ) {
+					dataType = dataType.slice( 1 ) || "*";
+					( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func );
+
+				// Otherwise append
+				} else {
+					( structure[ dataType ] = structure[ dataType ] || [] ).push( func );
+				}
+			}
+		}
+	};
+}
+
+// Base inspection function for prefilters and transports
+function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) {
+
+	var inspected = {},
+		seekingTransport = ( structure === transports );
+
+	function inspect( dataType ) {
+		var selected;
+		inspected[ dataType ] = true;
+		jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) {
+			var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR );
+			if ( typeof dataTypeOrTransport === "string" &&
+				!seekingTransport && !inspected[ dataTypeOrTransport ] ) {
+
+				options.dataTypes.unshift( dataTypeOrTransport );
+				inspect( dataTypeOrTransport );
+				return false;
+			} else if ( seekingTransport ) {
+				return !( selected = dataTypeOrTransport );
+			}
+		} );
+		return selected;
+	}
+
+	return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" );
+}
+
+// A special extend for ajax options
+// that takes "flat" options (not to be deep extended)
+// Fixes #9887
+function ajaxExtend( target, src ) {
+	var deep, key,
+		flatOptions = jQuery.ajaxSettings.flatOptions || {};
+
+	for ( key in src ) {
+		if ( src[ key ] !== undefined ) {
+			( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ];
+		}
+	}
+	if ( deep ) {
+		jQuery.extend( true, target, deep );
+	}
+
+	return target;
+}
+
+/* Handles responses to an ajax request:
+ * - finds the right dataType (mediates between content-type and expected dataType)
+ * - returns the corresponding response
+ */
+function ajaxHandleResponses( s, jqXHR, responses ) {
+	var firstDataType, ct, finalDataType, type,
+		contents = s.contents,
+		dataTypes = s.dataTypes;
+
+	// Remove auto dataType and get content-type in the process
+	while ( dataTypes[ 0 ] === "*" ) {
+		dataTypes.shift();
+		if ( ct === undefined ) {
+			ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" );
+		}
+	}
+
+	// Check if we're dealing with a known content-type
+	if ( ct ) {
+		for ( type in contents ) {
+			if ( contents[ type ] && contents[ type ].test( ct ) ) {
+				dataTypes.unshift( type );
+				break;
+			}
+		}
+	}
+
+	// Check to see if we have a response for the expected dataType
+	if ( dataTypes[ 0 ] in responses ) {
+		finalDataType = dataTypes[ 0 ];
+	} else {
+
+		// Try convertible dataTypes
+		for ( type in responses ) {
+			if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) {
+				finalDataType = type;
+				break;
+			}
+			if ( !firstDataType ) {
+				firstDataType = type;
+			}
+		}
+
+		// Or just use first one
+		finalDataType = finalDataType || firstDataType;
+	}
+
+	// If we found a dataType
+	// We add the dataType to the list if needed
+	// and return the corresponding response
+	if ( finalDataType ) {
+		if ( finalDataType !== dataTypes[ 0 ] ) {
+			dataTypes.unshift( finalDataType );
+		}
+		return responses[ finalDataType ];
+	}
+}
+
+/* Chain conversions given the request and the original response
+ * Also sets the responseXXX fields on the jqXHR instance
+ */
+function ajaxConvert( s, response, jqXHR, isSuccess ) {
+	var conv2, current, conv, tmp, prev,
+		converters = {},
+
+		// Work with a copy of dataTypes in case we need to modify it for conversion
+		dataTypes = s.dataTypes.slice();
+
+	// Create converters map with lowercased keys
+	if ( dataTypes[ 1 ] ) {
+		for ( conv in s.converters ) {
+			converters[ conv.toLowerCase() ] = s.converters[ conv ];
+		}
+	}
+
+	current = dataTypes.shift();
+
+	// Convert to each sequential dataType
+	while ( current ) {
+
+		if ( s.responseFields[ current ] ) {
+			jqXHR[ s.responseFields[ current ] ] = response;
+		}
+
+		// Apply the dataFilter if provided
+		if ( !prev && isSuccess && s.dataFilter ) {
+			response = s.dataFilter( response, s.dataType );
+		}
+
+		prev = current;
+		current = dataTypes.shift();
+
+		if ( current ) {
+
+			// There's only work to do if current dataType is non-auto
+			if ( current === "*" ) {
+
+				current = prev;
+
+			// Convert response if prev dataType is non-auto and differs from current
+			} else if ( prev !== "*" && prev !== current ) {
+
+				// Seek a direct converter
+				conv = converters[ prev + " " + current ] || converters[ "* " + current ];
+
+				// If none found, seek a pair
+				if ( !conv ) {
+					for ( conv2 in converters ) {
+
+						// If conv2 outputs current
+						tmp = conv2.split( " " );
+						if ( tmp[ 1 ] === current ) {
+
+							// If prev can be converted to accepted input
+							conv = converters[ prev + " " + tmp[ 0 ] ] ||
+								converters[ "* " + tmp[ 0 ] ];
+							if ( conv ) {
+
+								// Condense equivalence converters
+								if ( conv === true ) {
+									conv = converters[ conv2 ];
+
+								// Otherwise, insert the intermediate dataType
+								} else if ( converters[ conv2 ] !== true ) {
+									current = tmp[ 0 ];
+									dataTypes.unshift( tmp[ 1 ] );
+								}
+								break;
+							}
+						}
+					}
+				}
+
+				// Apply converter (if not an equivalence)
+				if ( conv !== true ) {
+
+					// Unless errors are allowed to bubble, catch and return them
+					if ( conv && s[ "throws" ] ) { // jscs:ignore requireDotNotation
+						response = conv( response );
+					} else {
+						try {
+							response = conv( response );
+						} catch ( e ) {
+							return {
+								state: "parsererror",
+								error: conv ? e : "No conversion from " + prev + " to " + current
+							};
+						}
+					}
+				}
+			}
+		}
+	}
+
+	return { state: "success", data: response };
+}
+
+jQuery.extend( {
+
+	// Counter for holding the number of active queries
+	active: 0,
+
+	// Last-Modified header cache for next request
+	lastModified: {},
+	etag: {},
+
+	ajaxSettings: {
+		url: ajaxLocation,
+		type: "GET",
+		isLocal: rlocalProtocol.test( ajaxLocParts[ 1 ] ),
+		global: true,
+		processData: true,
+		async: true,
+		contentType: "application/x-www-form-urlencoded; charset=UTF-8",
+		/*
+		timeout: 0,
+		data: null,
+		dataType: null,
+		username: null,
+		password: null,
+		cache: null,
+		throws: false,
+		traditional: false,
+		headers: {},
+		*/
+
+		accepts: {
+			"*": allTypes,
+			text: "text/plain",
+			html: "text/html",
+			xml: "application/xml, text/xml",
+			json: "application/json, text/javascript"
+		},
+
+		contents: {
+			xml: /\bxml\b/,
+			html: /\bhtml/,
+			json: /\bjson\b/
+		},
+
+		responseFields: {
+			xml: "responseXML",
+			text: "responseText",
+			json: "responseJSON"
+		},
+
+		// Data converters
+		// Keys separate source (or catchall "*") and destination types with a single space
+		converters: {
+
+			// Convert anything to text
+			"* text": String,
+
+			// Text to html (true = no transformation)
+			"text html": true,
+
+			// Evaluate text as a json expression
+			"text json": jQuery.parseJSON,
+
+			// Parse text as xml
+			"text xml": jQuery.parseXML
+		},
+
+		// For options that shouldn't be deep extended:
+		// you can add your own custom options here if
+		// and when you create one that shouldn't be
+		// deep extended (see ajaxExtend)
+		flatOptions: {
+			url: true,
+			context: true
+		}
+	},
+
+	// Creates a full fledged settings object into target
+	// with both ajaxSettings and settings fields.
+	// If target is omitted, writes into ajaxSettings.
+	ajaxSetup: function( target, settings ) {
+		return settings ?
+
+			// Building a settings object
+			ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) :
+
+			// Extending ajaxSettings
+			ajaxExtend( jQuery.ajaxSettings, target );
+	},
+
+	ajaxPrefilter: addToPrefiltersOrTransports( prefilters ),
+	ajaxTransport: addToPrefiltersOrTransports( transports ),
+
+	// Main method
+	ajax: function( url, options ) {
+
+		// If url is an object, simulate pre-1.5 signature
+		if ( typeof url === "object" ) {
+			options = url;
+			url = undefined;
+		}
+
+		// Force options to be an object
+		options = options || {};
+
+		var
+
+			// Cross-domain detection vars
+			parts,
+
+			// Loop variable
+			i,
+
+			// URL without anti-cache param
+			cacheURL,
+
+			// Response headers as string
+			responseHeadersString,
+
+			// timeout handle
+			timeoutTimer,
+
+			// To know if global events are to be dispatched
+			fireGlobals,
+
+			transport,
+
+			// Response headers
+			responseHeaders,
+
+			// Create the final options object
+			s = jQuery.ajaxSetup( {}, options ),
+
+			// Callbacks context
+			callbackContext = s.context || s,
+
+			// Context for global events is callbackContext if it is a DOM node or jQuery collection
+			globalEventContext = s.context &&
+				( callbackContext.nodeType || callbackContext.jquery ) ?
+					jQuery( callbackContext ) :
+					jQuery.event,
+
+			// Deferreds
+			deferred = jQuery.Deferred(),
+			completeDeferred = jQuery.Callbacks( "once memory" ),
+
+			// Status-dependent callbacks
+			statusCode = s.statusCode || {},
+
+			// Headers (they are sent all at once)
+			requestHeaders = {},
+			requestHeadersNames = {},
+
+			// The jqXHR state
+			state = 0,
+
+			// Default abort message
+			strAbort = "canceled",
+
+			// Fake xhr
+			jqXHR = {
+				readyState: 0,
+
+				// Builds headers hashtable if needed
+				getResponseHeader: function( key ) {
+					var match;
+					if ( state === 2 ) {
+						if ( !responseHeaders ) {
+							responseHeaders = {};
+							while ( ( match = rheaders.exec( responseHeadersString ) ) ) {
+								responseHeaders[ match[ 1 ].toLowerCase() ] = match[ 2 ];
+							}
+						}
+						match = responseHeaders[ key.toLowerCase() ];
+					}
+					return match == null ? null : match;
+				},
+
+				// Raw string
+				getAllResponseHeaders: function() {
+					return state === 2 ? responseHeadersString : null;
+				},
+
+				// Caches the header
+				setRequestHeader: function( name, value ) {
+					var lname = name.toLowerCase();
+					if ( !state ) {
+						name = requestHeadersNames[ lname ] = requestHeadersNames[ lname ] || name;
+						requestHeaders[ name ] = value;
+					}
+					return this;
+				},
+
+				// Overrides response content-type header
+				overrideMimeType: function( type ) {
+					if ( !state ) {
+						s.mimeType = type;
+					}
+					return this;
+				},
+
+				// Status-dependent callbacks
+				statusCode: function( map ) {
+					var code;
+					if ( map ) {
+						if ( state < 2 ) {
+							for ( code in map ) {
+
+								// Lazy-add the new callback in a way that preserves old ones
+								statusCode[ code ] = [ statusCode[ code ], map[ code ] ];
+							}
+						} else {
+
+							// Execute the appropriate callbacks
+							jqXHR.always( map[ jqXHR.status ] );
+						}
+					}
+					return this;
+				},
+
+				// Cancel the request
+				abort: function( statusText ) {
+					var finalText = statusText || strAbort;
+					if ( transport ) {
+						transport.abort( finalText );
+					}
+					done( 0, finalText );
+					return this;
+				}
+			};
+
+		// Attach deferreds
+		deferred.promise( jqXHR ).complete = completeDeferred.add;
+		jqXHR.success = jqXHR.done;
+		jqXHR.error = jqXHR.fail;
+
+		// Remove hash character (#7531: and string promotion)
+		// Add protocol if not provided (#5866: IE7 issue with protocol-less urls)
+		// Handle falsy url in the settings object (#10093: consistency with old signature)
+		// We also use the url parameter if available
+		s.url = ( ( url || s.url || ajaxLocation ) + "" )
+			.replace( rhash, "" )
+			.replace( rprotocol, ajaxLocParts[ 1 ] + "//" );
+
+		// Alias method option to type as per ticket #12004
+		s.type = options.method || options.type || s.method || s.type;
+
+		// Extract dataTypes list
+		s.dataTypes = jQuery.trim( s.dataType || "*" ).toLowerCase().match( rnotwhite ) || [ "" ];
+
+		// A cross-domain request is in order when we have a protocol:host:port mismatch
+		if ( s.crossDomain == null ) {
+			parts = rurl.exec( s.url.toLowerCase() );
+			s.crossDomain = !!( parts &&
+				( parts[ 1 ] !== ajaxLocParts[ 1 ] || parts[ 2 ] !== ajaxLocParts[ 2 ] ||
+					( parts[ 3 ] || ( parts[ 1 ] === "http:" ? "80" : "443" ) ) !==
+						( ajaxLocParts[ 3 ] || ( ajaxLocParts[ 1 ] === "http:" ? "80" : "443" ) ) )
+			);
+		}
+
+		// Convert data if not already a string
+		if ( s.data && s.processData && typeof s.data !== "string" ) {
+			s.data = jQuery.param( s.data, s.traditional );
+		}
+
+		// Apply prefilters
+		inspectPrefiltersOrTransports( prefilters, s, options, jqXHR );
+
+		// If request was aborted inside a prefilter, stop there
+		if ( state === 2 ) {
+			return jqXHR;
+		}
+
+		// We can fire global events as of now if asked to
+		// Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118)
+		fireGlobals = jQuery.event && s.global;
+
+		// Watch for a new set of requests
+		if ( fireGlobals && jQuery.active++ === 0 ) {
+			jQuery.event.trigger( "ajaxStart" );
+		}
+
+		// Uppercase the type
+		s.type = s.type.toUpperCase();
+
+		// Determine if request has content
+		s.hasContent = !rnoContent.test( s.type );
+
+		// Save the URL in case we're toying with the If-Modified-Since
+		// and/or If-None-Match header later on
+		cacheURL = s.url;
+
+		// More options handling for requests with no content
+		if ( !s.hasContent ) {
+
+			// If data is available, append data to url
+			if ( s.data ) {
+				cacheURL = ( s.url += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data );
+
+				// #9682: remove data so that it's not used in an eventual retry
+				delete s.data;
+			}
+
+			// Add anti-cache in url if needed
+			if ( s.cache === false ) {
+				s.url = rts.test( cacheURL ) ?
+
+					// If there is already a '_' parameter, set its value
+					cacheURL.replace( rts, "$1_=" + nonce++ ) :
+
+					// Otherwise add one to the end
+					cacheURL + ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + nonce++;
+			}
+		}
+
+		// Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+		if ( s.ifModified ) {
+			if ( jQuery.lastModified[ cacheURL ] ) {
+				jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] );
+			}
+			if ( jQuery.etag[ cacheURL ] ) {
+				jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] );
+			}
+		}
+
+		// Set the correct header, if data is being sent
+		if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) {
+			jqXHR.setRequestHeader( "Content-Type", s.contentType );
+		}
+
+		// Set the Accepts header for the server, depending on the dataType
+		jqXHR.setRequestHeader(
+			"Accept",
+			s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ?
+				s.accepts[ s.dataTypes[ 0 ] ] +
+					( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) :
+				s.accepts[ "*" ]
+		);
+
+		// Check for headers option
+		for ( i in s.headers ) {
+			jqXHR.setRequestHeader( i, s.headers[ i ] );
+		}
+
+		// Allow custom headers/mimetypes and early abort
+		if ( s.beforeSend &&
+			( s.beforeSend.call( callbackContext, jqXHR, s ) === false || state === 2 ) ) {
+
+			// Abort if not done already and return
+			return jqXHR.abort();
+		}
+
+		// aborting is no longer a cancellation
+		strAbort = "abort";
+
+		// Install callbacks on deferreds
+		for ( i in { success: 1, error: 1, complete: 1 } ) {
+			jqXHR[ i ]( s[ i ] );
+		}
+
+		// Get transport
+		transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR );
+
+		// If no transport, we auto-abort
+		if ( !transport ) {
+			done( -1, "No Transport" );
+		} else {
+			jqXHR.readyState = 1;
+
+			// Send global event
+			if ( fireGlobals ) {
+				globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] );
+			}
+
+			// If request was aborted inside ajaxSend, stop there
+			if ( state === 2 ) {
+				return jqXHR;
+			}
+
+			// Timeout
+			if ( s.async && s.timeout > 0 ) {
+				timeoutTimer = window.setTimeout( function() {
+					jqXHR.abort( "timeout" );
+				}, s.timeout );
+			}
+
+			try {
+				state = 1;
+				transport.send( requestHeaders, done );
+			} catch ( e ) {
+
+				// Propagate exception as error if not done
+				if ( state < 2 ) {
+					done( -1, e );
+
+				// Simply rethrow otherwise
+				} else {
+					throw e;
+				}
+			}
+		}
+
+		// Callback for when everything is done
+		function done( status, nativeStatusText, responses, headers ) {
+			var isSuccess, success, error, response, modified,
+				statusText = nativeStatusText;
+
+			// Called once
+			if ( state === 2 ) {
+				return;
+			}
+
+			// State is "done" now
+			state = 2;
+
+			// Clear timeout if it exists
+			if ( timeoutTimer ) {
+				window.clearTimeout( timeoutTimer );
+			}
+
+			// Dereference transport for early garbage collection
+			// (no matter how long the jqXHR object will be used)
+			transport = undefined;
+
+			// Cache response headers
+			responseHeadersString = headers || "";
+
+			// Set readyState
+			jqXHR.readyState = status > 0 ? 4 : 0;
+
+			// Determine if successful
+			isSuccess = status >= 200 && status < 300 || status === 304;
+
+			// Get response data
+			if ( responses ) {
+				response = ajaxHandleResponses( s, jqXHR, responses );
+			}
+
+			// Convert no matter what (that way responseXXX fields are always set)
+			response = ajaxConvert( s, response, jqXHR, isSuccess );
+
+			// If successful, handle type chaining
+			if ( isSuccess ) {
+
+				// Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+				if ( s.ifModified ) {
+					modified = jqXHR.getResponseHeader( "Last-Modified" );
+					if ( modified ) {
+						jQuery.lastModified[ cacheURL ] = modified;
+					}
+					modified = jqXHR.getResponseHeader( "etag" );
+					if ( modified ) {
+						jQuery.etag[ cacheURL ] = modified;
+					}
+				}
+
+				// if no content
+				if ( status === 204 || s.type === "HEAD" ) {
+					statusText = "nocontent";
+
+				// if not modified
+				} else if ( status === 304 ) {
+					statusText = "notmodified";
+
+				// If we have data, let's convert it
+				} else {
+					statusText = response.state;
+					success = response.data;
+					error = response.error;
+					isSuccess = !error;
+				}
+			} else {
+
+				// We extract error from statusText
+				// then normalize statusText and status for non-aborts
+				error = statusText;
+				if ( status || !statusText ) {
+					statusText = "error";
+					if ( status < 0 ) {
+						status = 0;
+					}
+				}
+			}
+
+			// Set data for the fake xhr object
+			jqXHR.status = status;
+			jqXHR.statusText = ( nativeStatusText || statusText ) + "";
+
+			// Success/Error
+			if ( isSuccess ) {
+				deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] );
+			} else {
+				deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] );
+			}
+
+			// Status-dependent callbacks
+			jqXHR.statusCode( statusCode );
+			statusCode = undefined;
+
+			if ( fireGlobals ) {
+				globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError",
+					[ jqXHR, s, isSuccess ? success : error ] );
+			}
+
+			// Complete
+			completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] );
+
+			if ( fireGlobals ) {
+				globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] );
+
+				// Handle the global AJAX counter
+				if ( !( --jQuery.active ) ) {
+					jQuery.event.trigger( "ajaxStop" );
+				}
+			}
+		}
+
+		return jqXHR;
+	},
+
+	getJSON: function( url, data, callback ) {
+		return jQuery.get( url, data, callback, "json" );
+	},
+
+	getScript: function( url, callback ) {
+		return jQuery.get( url, undefined, callback, "script" );
+	}
+} );
+
+jQuery.each( [ "get", "post" ], function( i, method ) {
+	jQuery[ method ] = function( url, data, callback, type ) {
+
+		// shift arguments if data argument was omitted
+		if ( jQuery.isFunction( data ) ) {
+			type = type || callback;
+			callback = data;
+			data = undefined;
+		}
+
+		// The url can be an options object (which then must have .url)
+		return jQuery.ajax( jQuery.extend( {
+			url: url,
+			type: method,
+			dataType: type,
+			data: data,
+			success: callback
+		}, jQuery.isPlainObject( url ) && url ) );
+	};
+} );
+
+
+jQuery._evalUrl = function( url ) {
+	return jQuery.ajax( {
+		url: url,
+
+		// Make this explicit, since user can override this through ajaxSetup (#11264)
+		type: "GET",
+		dataType: "script",
+		cache: true,
+		async: false,
+		global: false,
+		"throws": true
+	} );
+};
+
+
+jQuery.fn.extend( {
+	wrapAll: function( html ) {
+		if ( jQuery.isFunction( html ) ) {
+			return this.each( function( i ) {
+				jQuery( this ).wrapAll( html.call( this, i ) );
+			} );
+		}
+
+		if ( this[ 0 ] ) {
+
+			// The elements to wrap the target around
+			var wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true );
+
+			if ( this[ 0 ].parentNode ) {
+				wrap.insertBefore( this[ 0 ] );
+			}
+
+			wrap.map( function() {
+				var elem = this;
+
+				while ( elem.firstChild && elem.firstChild.nodeType === 1 ) {
+					elem = elem.firstChild;
+				}
+
+				return elem;
+			} ).append( this );
+		}
+
+		return this;
+	},
+
+	wrapInner: function( html ) {
+		if ( jQuery.isFunction( html ) ) {
+			return this.each( function( i ) {
+				jQuery( this ).wrapInner( html.call( this, i ) );
+			} );
+		}
+
+		return this.each( function() {
+			var self = jQuery( this ),
+				contents = self.contents();
+
+			if ( contents.length ) {
+				contents.wrapAll( html );
+
+			} else {
+				self.append( html );
+			}
+		} );
+	},
+
+	wrap: function( html ) {
+		var isFunction = jQuery.isFunction( html );
+
+		return this.each( function( i ) {
+			jQuery( this ).wrapAll( isFunction ? html.call( this, i ) : html );
+		} );
+	},
+
+	unwrap: function() {
+		return this.parent().each( function() {
+			if ( !jQuery.nodeName( this, "body" ) ) {
+				jQuery( this ).replaceWith( this.childNodes );
+			}
+		} ).end();
+	}
+} );
+
+
+function getDisplay( elem ) {
+	return elem.style && elem.style.display || jQuery.css( elem, "display" );
+}
+
+function filterHidden( elem ) {
+
+	// Disconnected elements are considered hidden
+	if ( !jQuery.contains( elem.ownerDocument || document, elem ) ) {
+		return true;
+	}
+	while ( elem && elem.nodeType === 1 ) {
+		if ( getDisplay( elem ) === "none" || elem.type === "hidden" ) {
+			return true;
+		}
+		elem = elem.parentNode;
+	}
+	return false;
+}
+
+jQuery.expr.filters.hidden = function( elem ) {
+
+	// Support: Opera <= 12.12
+	// Opera reports offsetWidths and offsetHeights less than zero on some elements
+	return support.reliableHiddenOffsets() ?
+		( elem.offsetWidth <= 0 && elem.offsetHeight <= 0 &&
+			!elem.getClientRects().length ) :
+			filterHidden( elem );
+};
+
+jQuery.expr.filters.visible = function( elem ) {
+	return !jQuery.expr.filters.hidden( elem );
+};
+
+
+
+
+var r20 = /%20/g,
+	rbracket = /\[\]$/,
+	rCRLF = /\r?\n/g,
+	rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i,
+	rsubmittable = /^(?:input|select|textarea|keygen)/i;
+
+function buildParams( prefix, obj, traditional, add ) {
+	var name;
+
+	if ( jQuery.isArray( obj ) ) {
+
+		// Serialize array item.
+		jQuery.each( obj, function( i, v ) {
+			if ( traditional || rbracket.test( prefix ) ) {
+
+				// Treat each array item as a scalar.
+				add( prefix, v );
+
+			} else {
+
+				// Item is non-scalar (array or object), encode its numeric index.
+				buildParams(
+					prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]",
+					v,
+					traditional,
+					add
+				);
+			}
+		} );
+
+	} else if ( !traditional && jQuery.type( obj ) === "object" ) {
+
+		// Serialize object item.
+		for ( name in obj ) {
+			buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add );
+		}
+
+	} else {
+
+		// Serialize scalar item.
+		add( prefix, obj );
+	}
+}
+
+// Serialize an array of form elements or a set of
+// key/values into a query string
+jQuery.param = function( a, traditional ) {
+	var prefix,
+		s = [],
+		add = function( key, value ) {
+
+			// If value is a function, invoke it and return its value
+			value = jQuery.isFunction( value ) ? value() : ( value == null ? "" : value );
+			s[ s.length ] = encodeURIComponent( key ) + "=" + encodeURIComponent( value );
+		};
+
+	// Set traditional to true for jQuery <= 1.3.2 behavior.
+	if ( traditional === undefined ) {
+		traditional = jQuery.ajaxSettings && jQuery.ajaxSettings.traditional;
+	}
+
+	// If an array was passed in, assume that it is an array of form elements.
+	if ( jQuery.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) {
+
+		// Serialize the form elements
+		jQuery.each( a, function() {
+			add( this.name, this.value );
+		} );
+
+	} else {
+
+		// If traditional, encode the "old" way (the way 1.3.2 or older
+		// did it), otherwise encode params recursively.
+		for ( prefix in a ) {
+			buildParams( prefix, a[ prefix ], traditional, add );
+		}
+	}
+
+	// Return the resulting serialization
+	return s.join( "&" ).replace( r20, "+" );
+};
+
+jQuery.fn.extend( {
+	serialize: function() {
+		return jQuery.param( this.serializeArray() );
+	},
+	serializeArray: function() {
+		return this.map( function() {
+
+			// Can add propHook for "elements" to filter or add form elements
+			var elements = jQuery.prop( this, "elements" );
+			return elements ? jQuery.makeArray( elements ) : this;
+		} )
+		.filter( function() {
+			var type = this.type;
+
+			// Use .is(":disabled") so that fieldset[disabled] works
+			return this.name && !jQuery( this ).is( ":disabled" ) &&
+				rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&
+				( this.checked || !rcheckableType.test( type ) );
+		} )
+		.map( function( i, elem ) {
+			var val = jQuery( this ).val();
+
+			return val == null ?
+				null :
+				jQuery.isArray( val ) ?
+					jQuery.map( val, function( val ) {
+						return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
+					} ) :
+					{ name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
+		} ).get();
+	}
+} );
+
+
+// Create the request object
+// (This is still attached to ajaxSettings for backward compatibility)
+jQuery.ajaxSettings.xhr = window.ActiveXObject !== undefined ?
+
+	// Support: IE6-IE8
+	function() {
+
+		// XHR cannot access local files, always use ActiveX for that case
+		if ( this.isLocal ) {
+			return createActiveXHR();
+		}
+
+		// Support: IE 9-11
+		// IE seems to error on cross-domain PATCH requests when ActiveX XHR
+		// is used. In IE 9+ always use the native XHR.
+		// Note: this condition won't catch Edge as it doesn't define
+		// document.documentMode but it also doesn't support ActiveX so it won't
+		// reach this code.
+		if ( document.documentMode > 8 ) {
+			return createStandardXHR();
+		}
+
+		// Support: IE<9
+		// oldIE XHR does not support non-RFC2616 methods (#13240)
+		// See http://msdn.microsoft.com/en-us/library/ie/ms536648(v=vs.85).aspx
+		// and http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9
+		// Although this check for six methods instead of eight
+		// since IE also does not support "trace" and "connect"
+		return /^(get|post|head|put|delete|options)$/i.test( this.type ) &&
+			createStandardXHR() || createActiveXHR();
+	} :
+
+	// For all other browsers, use the standard XMLHttpRequest object
+	createStandardXHR;
+
+var xhrId = 0,
+	xhrCallbacks = {},
+	xhrSupported = jQuery.ajaxSettings.xhr();
+
+// Support: IE<10
+// Open requests must be manually aborted on unload (#5280)
+// See https://support.microsoft.com/kb/2856746 for more info
+if ( window.attachEvent ) {
+	window.attachEvent( "onunload", function() {
+		for ( var key in xhrCallbacks ) {
+			xhrCallbacks[ key ]( undefined, true );
+		}
+	} );
+}
+
+// Determine support properties
+support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported );
+xhrSupported = support.ajax = !!xhrSupported;
+
+// Create transport if the browser can provide an xhr
+if ( xhrSupported ) {
+
+	jQuery.ajaxTransport( function( options ) {
+
+		// Cross domain only allowed if supported through XMLHttpRequest
+		if ( !options.crossDomain || support.cors ) {
+
+			var callback;
+
+			return {
+				send: function( headers, complete ) {
+					var i,
+						xhr = options.xhr(),
+						id = ++xhrId;
+
+					// Open the socket
+					xhr.open(
+						options.type,
+						options.url,
+						options.async,
+						options.username,
+						options.password
+					);
+
+					// Apply custom fields if provided
+					if ( options.xhrFields ) {
+						for ( i in options.xhrFields ) {
+							xhr[ i ] = options.xhrFields[ i ];
+						}
+					}
+
+					// Override mime type if needed
+					if ( options.mimeType && xhr.overrideMimeType ) {
+						xhr.overrideMimeType( options.mimeType );
+					}
+
+					// X-Requested-With header
+					// For cross-domain requests, seeing as conditions for a preflight are
+					// akin to a jigsaw puzzle, we simply never set it to be sure.
+					// (it can always be set on a per-request basis or even using ajaxSetup)
+					// For same-domain requests, won't change header if already provided.
+					if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) {
+						headers[ "X-Requested-With" ] = "XMLHttpRequest";
+					}
+
+					// Set headers
+					for ( i in headers ) {
+
+						// Support: IE<9
+						// IE's ActiveXObject throws a 'Type Mismatch' exception when setting
+						// request header to a null-value.
+						//
+						// To keep consistent with other XHR implementations, cast the value
+						// to string and ignore `undefined`.
+						if ( headers[ i ] !== undefined ) {
+							xhr.setRequestHeader( i, headers[ i ] + "" );
+						}
+					}
+
+					// Do send the request
+					// This may raise an exception which is actually
+					// handled in jQuery.ajax (so no try/catch here)
+					xhr.send( ( options.hasContent && options.data ) || null );
+
+					// Listener
+					callback = function( _, isAbort ) {
+						var status, statusText, responses;
+
+						// Was never called and is aborted or complete
+						if ( callback && ( isAbort || xhr.readyState === 4 ) ) {
+
+							// Clean up
+							delete xhrCallbacks[ id ];
+							callback = undefined;
+							xhr.onreadystatechange = jQuery.noop;
+
+							// Abort manually if needed
+							if ( isAbort ) {
+								if ( xhr.readyState !== 4 ) {
+									xhr.abort();
+								}
+							} else {
+								responses = {};
+								status = xhr.status;
+
+								// Support: IE<10
+								// Accessing binary-data responseText throws an exception
+								// (#11426)
+								if ( typeof xhr.responseText === "string" ) {
+									responses.text = xhr.responseText;
+								}
+
+								// Firefox throws an exception when accessing
+								// statusText for faulty cross-domain requests
+								try {
+									statusText = xhr.statusText;
+								} catch ( e ) {
+
+									// We normalize with Webkit giving an empty statusText
+									statusText = "";
+								}
+
+								// Filter status for non standard behaviors
+
+								// If the request is local and we have data: assume a success
+								// (success with no data won't get notified, that's the best we
+								// can do given current implementations)
+								if ( !status && options.isLocal && !options.crossDomain ) {
+									status = responses.text ? 200 : 404;
+
+								// IE - #1450: sometimes returns 1223 when it should be 204
+								} else if ( status === 1223 ) {
+									status = 204;
+								}
+							}
+						}
+
+						// Call complete if needed
+						if ( responses ) {
+							complete( status, statusText, responses, xhr.getAllResponseHeaders() );
+						}
+					};
+
+					// Do send the request
+					// `xhr.send` may raise an exception, but it will be
+					// handled in jQuery.ajax (so no try/catch here)
+					if ( !options.async ) {
+
+						// If we're in sync mode we fire the callback
+						callback();
+					} else if ( xhr.readyState === 4 ) {
+
+						// (IE6 & IE7) if it's in cache and has been
+						// retrieved directly we need to fire the callback
+						window.setTimeout( callback );
+					} else {
+
+						// Register the callback, but delay it in case `xhr.send` throws
+						// Add to the list of active xhr callbacks
+						xhr.onreadystatechange = xhrCallbacks[ id ] = callback;
+					}
+				},
+
+				abort: function() {
+					if ( callback ) {
+						callback( undefined, true );
+					}
+				}
+			};
+		}
+	} );
+}
+
+// Functions to create xhrs
+function createStandardXHR() {
+	try {
+		return new window.XMLHttpRequest();
+	} catch ( e ) {}
+}
+
+function createActiveXHR() {
+	try {
+		return new window.ActiveXObject( "Microsoft.XMLHTTP" );
+	} catch ( e ) {}
+}
+
+
+
+
+// Install script dataType
+jQuery.ajaxSetup( {
+	accepts: {
+		script: "text/javascript, application/javascript, " +
+			"application/ecmascript, application/x-ecmascript"
+	},
+	contents: {
+		script: /\b(?:java|ecma)script\b/
+	},
+	converters: {
+		"text script": function( text ) {
+			jQuery.globalEval( text );
+			return text;
+		}
+	}
+} );
+
+// Handle cache's special case and global
+jQuery.ajaxPrefilter( "script", function( s ) {
+	if ( s.cache === undefined ) {
+		s.cache = false;
+	}
+	if ( s.crossDomain ) {
+		s.type = "GET";
+		s.global = false;
+	}
+} );
+
+// Bind script tag hack transport
+jQuery.ajaxTransport( "script", function( s ) {
+
+	// This transport only deals with cross domain requests
+	if ( s.crossDomain ) {
+
+		var script,
+			head = document.head || jQuery( "head" )[ 0 ] || document.documentElement;
+
+		return {
+
+			send: function( _, callback ) {
+
+				script = document.createElement( "script" );
+
+				script.async = true;
+
+				if ( s.scriptCharset ) {
+					script.charset = s.scriptCharset;
+				}
+
+				script.src = s.url;
+
+				// Attach handlers for all browsers
+				script.onload = script.onreadystatechange = function( _, isAbort ) {
+
+					if ( isAbort || !script.readyState || /loaded|complete/.test( script.readyState ) ) {
+
+						// Handle memory leak in IE
+						script.onload = script.onreadystatechange = null;
+
+						// Remove the script
+						if ( script.parentNode ) {
+							script.parentNode.removeChild( script );
+						}
+
+						// Dereference the script
+						script = null;
+
+						// Callback if not abort
+						if ( !isAbort ) {
+							callback( 200, "success" );
+						}
+					}
+				};
+
+				// Circumvent IE6 bugs with base elements (#2709 and #4378) by prepending
+				// Use native DOM manipulation to avoid our domManip AJAX trickery
+				head.insertBefore( script, head.firstChild );
+			},
+
+			abort: function() {
+				if ( script ) {
+					script.onload( undefined, true );
+				}
+			}
+		};
+	}
+} );
+
+
+
+
+var oldCallbacks = [],
+	rjsonp = /(=)\?(?=&|$)|\?\?/;
+
+// Default jsonp settings
+jQuery.ajaxSetup( {
+	jsonp: "callback",
+	jsonpCallback: function() {
+		var callback = oldCallbacks.pop() || ( jQuery.expando + "_" + ( nonce++ ) );
+		this[ callback ] = true;
+		return callback;
+	}
+} );
+
+// Detect, normalize options and install callbacks for jsonp requests
+jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) {
+
+	var callbackName, overwritten, responseContainer,
+		jsonProp = s.jsonp !== false && ( rjsonp.test( s.url ) ?
+			"url" :
+			typeof s.data === "string" &&
+				( s.contentType || "" )
+					.indexOf( "application/x-www-form-urlencoded" ) === 0 &&
+				rjsonp.test( s.data ) && "data"
+		);
+
+	// Handle iff the expected data type is "jsonp" or we have a parameter to set
+	if ( jsonProp || s.dataTypes[ 0 ] === "jsonp" ) {
+
+		// Get callback name, remembering preexisting value associated with it
+		callbackName = s.jsonpCallback = jQuery.isFunction( s.jsonpCallback ) ?
+			s.jsonpCallback() :
+			s.jsonpCallback;
+
+		// Insert callback into url or form data
+		if ( jsonProp ) {
+			s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, "$1" + callbackName );
+		} else if ( s.jsonp !== false ) {
+			s.url += ( rquery.test( s.url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName;
+		}
+
+		// Use data converter to retrieve json after script execution
+		s.converters[ "script json" ] = function() {
+			if ( !responseContainer ) {
+				jQuery.error( callbackName + " was not called" );
+			}
+			return responseContainer[ 0 ];
+		};
+
+		// force json dataType
+		s.dataTypes[ 0 ] = "json";
+
+		// Install callback
+		overwritten = window[ callbackName ];
+		window[ callbackName ] = function() {
+			responseContainer = arguments;
+		};
+
+		// Clean-up function (fires after converters)
+		jqXHR.always( function() {
+
+			// If previous value didn't exist - remove it
+			if ( overwritten === undefined ) {
+				jQuery( window ).removeProp( callbackName );
+
+			// Otherwise restore preexisting value
+			} else {
+				window[ callbackName ] = overwritten;
+			}
+
+			// Save back as free
+			if ( s[ callbackName ] ) {
+
+				// make sure that re-using the options doesn't screw things around
+				s.jsonpCallback = originalSettings.jsonpCallback;
+
+				// save the callback name for future use
+				oldCallbacks.push( callbackName );
+			}
+
+			// Call if it was a function and we have a response
+			if ( responseContainer && jQuery.isFunction( overwritten ) ) {
+				overwritten( responseContainer[ 0 ] );
+			}
+
+			responseContainer = overwritten = undefined;
+		} );
+
+		// Delegate to script
+		return "script";
+	}
+} );
+
+
+
+
+// data: string of html
+// context (optional): If specified, the fragment will be created in this context,
+// defaults to document
+// keepScripts (optional): If true, will include scripts passed in the html string
+jQuery.parseHTML = function( data, context, keepScripts ) {
+	if ( !data || typeof data !== "string" ) {
+		return null;
+	}
+	if ( typeof context === "boolean" ) {
+		keepScripts = context;
+		context = false;
+	}
+	context = context || document;
+
+	var parsed = rsingleTag.exec( data ),
+		scripts = !keepScripts && [];
+
+	// Single tag
+	if ( parsed ) {
+		return [ context.createElement( parsed[ 1 ] ) ];
+	}
+
+	parsed = buildFragment( [ data ], context, scripts );
+
+	if ( scripts && scripts.length ) {
+		jQuery( scripts ).remove();
+	}
+
+	return jQuery.merge( [], parsed.childNodes );
+};
+
+
+// Keep a copy of the old load method
+var _load = jQuery.fn.load;
+
+/**
+ * Load a url into a page
+ */
+jQuery.fn.load = function( url, params, callback ) {
+	if ( typeof url !== "string" && _load ) {
+		return _load.apply( this, arguments );
+	}
+
+	var selector, type, response,
+		self = this,
+		off = url.indexOf( " " );
+
+	if ( off > -1 ) {
+		selector = jQuery.trim( url.slice( off, url.length ) );
+		url = url.slice( 0, off );
+	}
+
+	// If it's a function
+	if ( jQuery.isFunction( params ) ) {
+
+		// We assume that it's the callback
+		callback = params;
+		params = undefined;
+
+	// Otherwise, build a param string
+	} else if ( params && typeof params === "object" ) {
+		type = "POST";
+	}
+
+	// If we have elements to modify, make the request
+	if ( self.length > 0 ) {
+		jQuery.ajax( {
+			url: url,
+
+			// If "type" variable is undefined, then "GET" method will be used.
+			// Make value of this field explicit since
+			// user can override it through ajaxSetup method
+			type: type || "GET",
+			dataType: "html",
+			data: params
+		} ).done( function( responseText ) {
+
+			// Save response for use in complete callback
+			response = arguments;
+
+			self.html( selector ?
+
+				// If a selector was specified, locate the right elements in a dummy div
+				// Exclude scripts to avoid IE 'Permission Denied' errors
+				jQuery( "<div>" ).append( jQuery.parseHTML( responseText ) ).find( selector ) :
+
+				// Otherwise use the full result
+				responseText );
+
+		// If the request succeeds, this function gets "data", "status", "jqXHR"
+		// but they are ignored because response was set above.
+		// If it fails, this function gets "jqXHR", "status", "error"
+		} ).always( callback && function( jqXHR, status ) {
+			self.each( function() {
+				callback.apply( this, response || [ jqXHR.responseText, status, jqXHR ] );
+			} );
+		} );
+	}
+
+	return this;
+};
+
+
+
+
+// Attach a bunch of functions for handling common AJAX events
+jQuery.each( [
+	"ajaxStart",
+	"ajaxStop",
+	"ajaxComplete",
+	"ajaxError",
+	"ajaxSuccess",
+	"ajaxSend"
+], function( i, type ) {
+	jQuery.fn[ type ] = function( fn ) {
+		return this.on( type, fn );
+	};
+} );
+
+
+
+
+jQuery.expr.filters.animated = function( elem ) {
+	return jQuery.grep( jQuery.timers, function( fn ) {
+		return elem === fn.elem;
+	} ).length;
+};
+
+
+
+
+
+/**
+ * Gets a window from an element
+ */
+function getWindow( elem ) {
+	return jQuery.isWindow( elem ) ?
+		elem :
+		elem.nodeType === 9 ?
+			elem.defaultView || elem.parentWindow :
+			false;
+}
+
+jQuery.offset = {
+	setOffset: function( elem, options, i ) {
+		var curPosition, curLeft, curCSSTop, curTop, curOffset, curCSSLeft, calculatePosition,
+			position = jQuery.css( elem, "position" ),
+			curElem = jQuery( elem ),
+			props = {};
+
+		// set position first, in-case top/left are set even on static elem
+		if ( position === "static" ) {
+			elem.style.position = "relative";
+		}
+
+		curOffset = curElem.offset();
+		curCSSTop = jQuery.css( elem, "top" );
+		curCSSLeft = jQuery.css( elem, "left" );
+		calculatePosition = ( position === "absolute" || position === "fixed" ) &&
+			jQuery.inArray( "auto", [ curCSSTop, curCSSLeft ] ) > -1;
+
+		// need to be able to calculate position if either top or left
+		// is auto and position is either absolute or fixed
+		if ( calculatePosition ) {
+			curPosition = curElem.position();
+			curTop = curPosition.top;
+			curLeft = curPosition.left;
+		} else {
+			curTop = parseFloat( curCSSTop ) || 0;
+			curLeft = parseFloat( curCSSLeft ) || 0;
+		}
+
+		if ( jQuery.isFunction( options ) ) {
+
+			// Use jQuery.extend here to allow modification of coordinates argument (gh-1848)
+			options = options.call( elem, i, jQuery.extend( {}, curOffset ) );
+		}
+
+		if ( options.top != null ) {
+			props.top = ( options.top - curOffset.top ) + curTop;
+		}
+		if ( options.left != null ) {
+			props.left = ( options.left - curOffset.left ) + curLeft;
+		}
+
+		if ( "using" in options ) {
+			options.using.call( elem, props );
+		} else {
+			curElem.css( props );
+		}
+	}
+};
+
+jQuery.fn.extend( {
+	offset: function( options ) {
+		if ( arguments.length ) {
+			return options === undefined ?
+				this :
+				this.each( function( i ) {
+					jQuery.offset.setOffset( this, options, i );
+				} );
+		}
+
+		var docElem, win,
+			box = { top: 0, left: 0 },
+			elem = this[ 0 ],
+			doc = elem && elem.ownerDocument;
+
+		if ( !doc ) {
+			return;
+		}
+
+		docElem = doc.documentElement;
+
+		// Make sure it's not a disconnected DOM node
+		if ( !jQuery.contains( docElem, elem ) ) {
+			return box;
+		}
+
+		// If we don't have gBCR, just use 0,0 rather than error
+		// BlackBerry 5, iOS 3 (original iPhone)
+		if ( typeof elem.getBoundingClientRect !== "undefined" ) {
+			box = elem.getBoundingClientRect();
+		}
+		win = getWindow( doc );
+		return {
+			top: box.top  + ( win.pageYOffset || docElem.scrollTop )  - ( docElem.clientTop  || 0 ),
+			left: box.left + ( win.pageXOffset || docElem.scrollLeft ) - ( docElem.clientLeft || 0 )
+		};
+	},
+
+	position: function() {
+		if ( !this[ 0 ] ) {
+			return;
+		}
+
+		var offsetParent, offset,
+			parentOffset = { top: 0, left: 0 },
+			elem = this[ 0 ];
+
+		// Fixed elements are offset from window (parentOffset = {top:0, left: 0},
+		// because it is its only offset parent
+		if ( jQuery.css( elem, "position" ) === "fixed" ) {
+
+			// we assume that getBoundingClientRect is available when computed position is fixed
+			offset = elem.getBoundingClientRect();
+		} else {
+
+			// Get *real* offsetParent
+			offsetParent = this.offsetParent();
+
+			// Get correct offsets
+			offset = this.offset();
+			if ( !jQuery.nodeName( offsetParent[ 0 ], "html" ) ) {
+				parentOffset = offsetParent.offset();
+			}
+
+			// Add offsetParent borders
+			parentOffset.top  += jQuery.css( offsetParent[ 0 ], "borderTopWidth", true );
+			parentOffset.left += jQuery.css( offsetParent[ 0 ], "borderLeftWidth", true );
+		}
+
+		// Subtract parent offsets and element margins
+		// note: when an element has margin: auto the offsetLeft and marginLeft
+		// are the same in Safari causing offset.left to incorrectly be 0
+		return {
+			top:  offset.top  - parentOffset.top - jQuery.css( elem, "marginTop", true ),
+			left: offset.left - parentOffset.left - jQuery.css( elem, "marginLeft", true )
+		};
+	},
+
+	offsetParent: function() {
+		return this.map( function() {
+			var offsetParent = this.offsetParent;
+
+			while ( offsetParent && ( !jQuery.nodeName( offsetParent, "html" ) &&
+				jQuery.css( offsetParent, "position" ) === "static" ) ) {
+				offsetParent = offsetParent.offsetParent;
+			}
+			return offsetParent || documentElement;
+		} );
+	}
+} );
+
+// Create scrollLeft and scrollTop methods
+jQuery.each( { scrollLeft: "pageXOffset", scrollTop: "pageYOffset" }, function( method, prop ) {
+	var top = /Y/.test( prop );
+
+	jQuery.fn[ method ] = function( val ) {
+		return access( this, function( elem, method, val ) {
+			var win = getWindow( elem );
+
+			if ( val === undefined ) {
+				return win ? ( prop in win ) ? win[ prop ] :
+					win.document.documentElement[ method ] :
+					elem[ method ];
+			}
+
+			if ( win ) {
+				win.scrollTo(
+					!top ? val : jQuery( win ).scrollLeft(),
+					top ? val : jQuery( win ).scrollTop()
+				);
+
+			} else {
+				elem[ method ] = val;
+			}
+		}, method, val, arguments.length, null );
+	};
+} );
+
+// Support: Safari<7-8+, Chrome<37-44+
+// Add the top/left cssHooks using jQuery.fn.position
+// Webkit bug: https://bugs.webkit.org/show_bug.cgi?id=29084
+// getComputedStyle returns percent when specified for top/left/bottom/right
+// rather than make the css module depend on the offset module, we just check for it here
+jQuery.each( [ "top", "left" ], function( i, prop ) {
+	jQuery.cssHooks[ prop ] = addGetHookIf( support.pixelPosition,
+		function( elem, computed ) {
+			if ( computed ) {
+				computed = curCSS( elem, prop );
+
+				// if curCSS returns percentage, fallback to offset
+				return rnumnonpx.test( computed ) ?
+					jQuery( elem ).position()[ prop ] + "px" :
+					computed;
+			}
+		}
+	);
+} );
+
+
+// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods
+jQuery.each( { Height: "height", Width: "width" }, function( name, type ) {
+	jQuery.each( { padding: "inner" + name, content: type, "": "outer" + name },
+	function( defaultExtra, funcName ) {
+
+		// margin is only for outerHeight, outerWidth
+		jQuery.fn[ funcName ] = function( margin, value ) {
+			var chainable = arguments.length && ( defaultExtra || typeof margin !== "boolean" ),
+				extra = defaultExtra || ( margin === true || value === true ? "margin" : "border" );
+
+			return access( this, function( elem, type, value ) {
+				var doc;
+
+				if ( jQuery.isWindow( elem ) ) {
+
+					// As of 5/8/2012 this will yield incorrect results for Mobile Safari, but there
+					// isn't a whole lot we can do. See pull request at this URL for discussion:
+					// https://github.com/jquery/jquery/pull/764
+					return elem.document.documentElement[ "client" + name ];
+				}
+
+				// Get document width or height
+				if ( elem.nodeType === 9 ) {
+					doc = elem.documentElement;
+
+					// Either scroll[Width/Height] or offset[Width/Height] or client[Width/Height],
+					// whichever is greatest
+					// unfortunately, this causes bug #3838 in IE6/8 only,
+					// but there is currently no good, small way to fix it.
+					return Math.max(
+						elem.body[ "scroll" + name ], doc[ "scroll" + name ],
+						elem.body[ "offset" + name ], doc[ "offset" + name ],
+						doc[ "client" + name ]
+					);
+				}
+
+				return value === undefined ?
+
+					// Get width or height on the element, requesting but not forcing parseFloat
+					jQuery.css( elem, type, extra ) :
+
+					// Set width or height on the element
+					jQuery.style( elem, type, value, extra );
+			}, type, chainable ? margin : undefined, chainable, null );
+		};
+	} );
+} );
+
+
+jQuery.fn.extend( {
+
+	bind: function( types, data, fn ) {
+		return this.on( types, null, data, fn );
+	},
+	unbind: function( types, fn ) {
+		return this.off( types, null, fn );
+	},
+
+	delegate: function( selector, types, data, fn ) {
+		return this.on( types, selector, data, fn );
+	},
+	undelegate: function( selector, types, fn ) {
+
+		// ( namespace ) or ( selector, types [, fn] )
+		return arguments.length === 1 ?
+			this.off( selector, "**" ) :
+			this.off( types, selector || "**", fn );
+	}
+} );
+
+// The number of elements contained in the matched element set
+jQuery.fn.size = function() {
+	return this.length;
+};
+
+jQuery.fn.andSelf = jQuery.fn.addBack;
+
+
+
+
+// Register as a named AMD module, since jQuery can be concatenated with other
+// files that may use define, but not via a proper concatenation script that
+// understands anonymous AMD modules. A named AMD is safest and most robust
+// way to register. Lowercase jquery is used because AMD module names are
+// derived from file names, and jQuery is normally delivered in a lowercase
+// file name. Do this after creating the global so that if an AMD module wants
+// to call noConflict to hide this version of jQuery, it will work.
+
+// Note that for maximum portability, libraries that are not jQuery should
+// declare themselves as anonymous modules, and avoid setting a global if an
+// AMD loader is present. jQuery is a special case. For more information, see
+// https://github.com/jrburke/requirejs/wiki/Updating-existing-libraries#wiki-anon
+
+if ( typeof define === "function" && define.amd ) {
+	define( "jquery", [], function() {
+		return jQuery;
+	} );
+}
+
+
+
+var
+
+	// Map over jQuery in case of overwrite
+	_jQuery = window.jQuery,
+
+	// Map over the $ in case of overwrite
+	_$ = window.$;
+
+jQuery.noConflict = function( deep ) {
+	if ( window.$ === jQuery ) {
+		window.$ = _$;
+	}
+
+	if ( deep && window.jQuery === jQuery ) {
+		window.jQuery = _jQuery;
+	}
+
+	return jQuery;
+};
+
+// Expose jQuery and $ identifiers, even in
+// AMD (#7102#comment:10, https://github.com/jquery/jquery/pull/557)
+// and CommonJS for browser emulators (#13566)
+if ( !noGlobal ) {
+	window.jQuery = window.$ = jQuery;
+}
+
+return jQuery;
+}));
diff --git a/client/galaxy/scripts/libs/jquery/jquery.migrate.js b/client/galaxy/scripts/libs/jquery/jquery.migrate.js
new file mode 100644
index 0000000..e3538e9
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.migrate.js
@@ -0,0 +1,717 @@
+/*!
+ * jQuery Migrate - v1.4.0 - 2016-02-26
+ * Copyright jQuery Foundation and other contributors
+ */
+(function( jQuery, window, undefined ) {
+// See http://bugs.jquery.com/ticket/13335
+// "use strict";
+
+
+jQuery.migrateVersion = "1.4.0";
+
+
+var warnedAbout = {};
+
+// List of warnings already given; public read only
+jQuery.migrateWarnings = [];
+
+// Set to true to prevent console output; migrateWarnings still maintained
+// jQuery.migrateMute = false;
+
+// Show a message on the console so devs know we're active
+if ( window.console && window.console.log ) {
+	window.console.log( "JQMIGRATE: Migrate is installed" +
+		( jQuery.migrateMute ? "" : " with logging active" ) +
+		", version " + jQuery.migrateVersion );
+}
+
+// Set to false to disable traces that appear with warnings
+if ( jQuery.migrateTrace === undefined ) {
+	jQuery.migrateTrace = true;
+}
+
+// Forget any warnings we've already given; public
+jQuery.migrateReset = function() {
+	warnedAbout = {};
+	jQuery.migrateWarnings.length = 0;
+};
+
+function migrateWarn( msg) {
+	var console = window.console;
+	if ( !warnedAbout[ msg ] ) {
+		warnedAbout[ msg ] = true;
+		jQuery.migrateWarnings.push( msg );
+		if ( console && console.warn && !jQuery.migrateMute ) {
+			console.warn( "JQMIGRATE: " + msg );
+			if ( jQuery.migrateTrace && console.trace ) {
+				console.trace();
+			}
+		}
+	}
+}
+
+function migrateWarnProp( obj, prop, value, msg ) {
+	if ( Object.defineProperty ) {
+		// On ES5 browsers (non-oldIE), warn if the code tries to get prop;
+		// allow property to be overwritten in case some other plugin wants it
+		try {
+			Object.defineProperty( obj, prop, {
+				configurable: true,
+				enumerable: true,
+				get: function() {
+					migrateWarn( msg );
+					return value;
+				},
+				set: function( newValue ) {
+					migrateWarn( msg );
+					value = newValue;
+				}
+			});
+			return;
+		} catch( err ) {
+			// IE8 is a dope about Object.defineProperty, can't warn there
+		}
+	}
+
+	// Non-ES5 (or broken) browser; just set the property
+	jQuery._definePropertyBroken = true;
+	obj[ prop ] = value;
+}
+
+if ( document.compatMode === "BackCompat" ) {
+	// jQuery has never supported or tested Quirks Mode
+	migrateWarn( "jQuery is not compatible with Quirks Mode" );
+}
+
+
+var attrFn = jQuery( "<input/>", { size: 1 } ).attr("size") && jQuery.attrFn,
+	oldAttr = jQuery.attr,
+	valueAttrGet = jQuery.attrHooks.value && jQuery.attrHooks.value.get ||
+		function() { return null; },
+	valueAttrSet = jQuery.attrHooks.value && jQuery.attrHooks.value.set ||
+		function() { return undefined; },
+	rnoType = /^(?:input|button)$/i,
+	rnoAttrNodeType = /^[238]$/,
+	rboolean = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,
+	ruseDefault = /^(?:checked|selected)$/i;
+
+// jQuery.attrFn
+migrateWarnProp( jQuery, "attrFn", attrFn || {}, "jQuery.attrFn is deprecated" );
+
+jQuery.attr = function( elem, name, value, pass ) {
+	var lowerName = name.toLowerCase(),
+		nType = elem && elem.nodeType;
+
+	if ( pass ) {
+		// Since pass is used internally, we only warn for new jQuery
+		// versions where there isn't a pass arg in the formal params
+		if ( oldAttr.length < 4 ) {
+			migrateWarn("jQuery.fn.attr( props, pass ) is deprecated");
+		}
+		if ( elem && !rnoAttrNodeType.test( nType ) &&
+			(attrFn ? name in attrFn : jQuery.isFunction(jQuery.fn[name])) ) {
+			return jQuery( elem )[ name ]( value );
+		}
+	}
+
+	// Warn if user tries to set `type`, since it breaks on IE 6/7/8; by checking
+	// for disconnected elements we don't warn on $( "<button>", { type: "button" } ).
+	if ( name === "type" && value !== undefined && rnoType.test( elem.nodeName ) && elem.parentNode ) {
+		migrateWarn("Can't change the 'type' of an input or button in IE 6/7/8");
+	}
+
+	// Restore boolHook for boolean property/attribute synchronization
+	if ( !jQuery.attrHooks[ lowerName ] && rboolean.test( lowerName ) ) {
+		jQuery.attrHooks[ lowerName ] = {
+			get: function( elem, name ) {
+				// Align boolean attributes with corresponding properties
+				// Fall back to attribute presence where some booleans are not supported
+				var attrNode,
+					property = jQuery.prop( elem, name );
+				return property === true || typeof property !== "boolean" &&
+					( attrNode = elem.getAttributeNode(name) ) && attrNode.nodeValue !== false ?
+
+					name.toLowerCase() :
+					undefined;
+			},
+			set: function( elem, value, name ) {
+				var propName;
+				if ( value === false ) {
+					// Remove boolean attributes when set to false
+					jQuery.removeAttr( elem, name );
+				} else {
+					// value is true since we know at this point it's type boolean and not false
+					// Set boolean attributes to the same name and set the DOM property
+					propName = jQuery.propFix[ name ] || name;
+					if ( propName in elem ) {
+						// Only set the IDL specifically if it already exists on the element
+						elem[ propName ] = true;
+					}
+
+					elem.setAttribute( name, name.toLowerCase() );
+				}
+				return name;
+			}
+		};
+
+		// Warn only for attributes that can remain distinct from their properties post-1.9
+		if ( ruseDefault.test( lowerName ) ) {
+			migrateWarn( "jQuery.fn.attr('" + lowerName + "') might use property instead of attribute" );
+		}
+	}
+
+	return oldAttr.call( jQuery, elem, name, value );
+};
+
+// attrHooks: value
+jQuery.attrHooks.value = {
+	get: function( elem, name ) {
+		var nodeName = ( elem.nodeName || "" ).toLowerCase();
+		if ( nodeName === "button" ) {
+			return valueAttrGet.apply( this, arguments );
+		}
+		if ( nodeName !== "input" && nodeName !== "option" ) {
+			migrateWarn("jQuery.fn.attr('value') no longer gets properties");
+		}
+		return name in elem ?
+			elem.value :
+			null;
+	},
+	set: function( elem, value ) {
+		var nodeName = ( elem.nodeName || "" ).toLowerCase();
+		if ( nodeName === "button" ) {
+			return valueAttrSet.apply( this, arguments );
+		}
+		if ( nodeName !== "input" && nodeName !== "option" ) {
+			migrateWarn("jQuery.fn.attr('value', val) no longer sets properties");
+		}
+		// Does not return so that setAttribute is also used
+		elem.value = value;
+	}
+};
+
+
+var matched, browser,
+	oldInit = jQuery.fn.init,
+	oldParseJSON = jQuery.parseJSON,
+	rspaceAngle = /^\s*</,
+	rattrHash = /\[\s*\w+\s*[~|^$*]?=\s*(?![\s'"])[^#\]]*#/,
+	// Note: XSS check is done below after string is trimmed
+	rquickExpr = /^([^<]*)(<[\w\W]+>)([^>]*)$/;
+
+// $(html) "looks like html" rule change
+jQuery.fn.init = function( selector, context, rootjQuery ) {
+	var match, ret;
+
+	if ( selector && typeof selector === "string" && !jQuery.isPlainObject( context ) &&
+			(match = rquickExpr.exec( jQuery.trim( selector ) )) && match[ 0 ] ) {
+		// This is an HTML string according to the "old" rules; is it still?
+		if ( !rspaceAngle.test( selector ) ) {
+			migrateWarn("$(html) HTML strings must start with '<' character");
+		}
+		if ( match[ 3 ] ) {
+			migrateWarn("$(html) HTML text after last tag is ignored");
+		}
+
+		// Consistently reject any HTML-like string starting with a hash (#9521)
+		// Note that this may break jQuery 1.6.x code that otherwise would work.
+		if ( match[ 0 ].charAt( 0 ) === "#" ) {
+			migrateWarn("HTML string cannot start with a '#' character");
+			jQuery.error("JQMIGRATE: Invalid selector string (XSS)");
+		}
+		// Now process using loose rules; let pre-1.8 play too
+		if ( context && context.context ) {
+			// jQuery object as context; parseHTML expects a DOM object
+			context = context.context;
+		}
+		if ( jQuery.parseHTML ) {
+			return oldInit.call( this,
+					jQuery.parseHTML( match[ 2 ], context && context.ownerDocument ||
+						context || document, true ), context, rootjQuery );
+		}
+	}
+
+	if ( selector === "#" ) {
+
+		// jQuery( "#" ) is a bogus ID selector, but it returned an empty set before jQuery 3.0
+		migrateWarn( "jQuery( '#' ) is not a valid selector" );
+		selector = [];
+
+	} else if ( rattrHash.test( selector ) ) {
+
+		// The nonstandard and undocumented unquoted-hash was removed in jQuery 1.12.0
+		// Note that this doesn't actually fix the selector due to potential false positives
+		migrateWarn( "Attribute selectors with '#' must be quoted: '" + selector + "'" );
+	}
+
+	ret = oldInit.apply( this, arguments );
+
+	// Fill in selector and context properties so .live() works
+	if ( selector && selector.selector !== undefined ) {
+		// A jQuery object, copy its properties
+		ret.selector = selector.selector;
+		ret.context = selector.context;
+
+	} else {
+		ret.selector = typeof selector === "string" ? selector : "";
+		if ( selector ) {
+			ret.context = selector.nodeType? selector : context || document;
+		}
+	}
+
+	return ret;
+};
+jQuery.fn.init.prototype = jQuery.fn;
+
+// Let $.parseJSON(falsy_value) return null
+jQuery.parseJSON = function( json ) {
+	if ( !json ) {
+		migrateWarn("jQuery.parseJSON requires a valid JSON string");
+		return null;
+	}
+	return oldParseJSON.apply( this, arguments );
+};
+
+jQuery.uaMatch = function( ua ) {
+	ua = ua.toLowerCase();
+
+	var match = /(chrome)[ \/]([\w.]+)/.exec( ua ) ||
+		/(webkit)[ \/]([\w.]+)/.exec( ua ) ||
+		/(opera)(?:.*version|)[ \/]([\w.]+)/.exec( ua ) ||
+		/(msie) ([\w.]+)/.exec( ua ) ||
+		ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec( ua ) ||
+		[];
+
+	return {
+		browser: match[ 1 ] || "",
+		version: match[ 2 ] || "0"
+	};
+};
+
+// Don't clobber any existing jQuery.browser in case it's different
+if ( !jQuery.browser ) {
+	matched = jQuery.uaMatch( navigator.userAgent );
+	browser = {};
+
+	if ( matched.browser ) {
+		browser[ matched.browser ] = true;
+		browser.version = matched.version;
+	}
+
+	// Chrome is Webkit, but Webkit is also Safari.
+	if ( browser.chrome ) {
+		browser.webkit = true;
+	} else if ( browser.webkit ) {
+		browser.safari = true;
+	}
+
+	jQuery.browser = browser;
+}
+
+// Warn if the code tries to get jQuery.browser
+migrateWarnProp( jQuery, "browser", jQuery.browser, "jQuery.browser is deprecated" );
+
+// jQuery.boxModel deprecated in 1.3, jQuery.support.boxModel deprecated in 1.7
+jQuery.boxModel = jQuery.support.boxModel = (document.compatMode === "CSS1Compat");
+migrateWarnProp( jQuery, "boxModel", jQuery.boxModel, "jQuery.boxModel is deprecated" );
+migrateWarnProp( jQuery.support, "boxModel", jQuery.support.boxModel, "jQuery.support.boxModel is deprecated" );
+
+jQuery.sub = function() {
+	function jQuerySub( selector, context ) {
+		return new jQuerySub.fn.init( selector, context );
+	}
+	jQuery.extend( true, jQuerySub, this );
+	jQuerySub.superclass = this;
+	jQuerySub.fn = jQuerySub.prototype = this();
+	jQuerySub.fn.constructor = jQuerySub;
+	jQuerySub.sub = this.sub;
+	jQuerySub.fn.init = function init( selector, context ) {
+		var instance = jQuery.fn.init.call( this, selector, context, rootjQuerySub );
+		return instance instanceof jQuerySub ?
+			instance :
+			jQuerySub( instance );
+	};
+	jQuerySub.fn.init.prototype = jQuerySub.fn;
+	var rootjQuerySub = jQuerySub(document);
+	migrateWarn( "jQuery.sub() is deprecated" );
+	return jQuerySub;
+};
+
+// The number of elements contained in the matched element set
+jQuery.fn.size = function() {
+	migrateWarn( "jQuery.fn.size() is deprecated; use the .length property" );
+	return this.length;
+};
+
+
+var internalSwapCall = false;
+
+// If this version of jQuery has .swap(), don't false-alarm on internal uses
+if ( jQuery.swap ) {
+	jQuery.each( [ "height", "width", "reliableMarginRight" ], function( _, name ) {
+		var oldHook = jQuery.cssHooks[ name ] && jQuery.cssHooks[ name ].get;
+
+		if ( oldHook ) {
+			jQuery.cssHooks[ name ].get = function() {
+				var ret;
+
+				internalSwapCall = true;
+				ret = oldHook.apply( this, arguments );
+				internalSwapCall = false;
+				return ret;
+			};
+		}
+	});
+}
+
+jQuery.swap = function( elem, options, callback, args ) {
+	var ret, name,
+		old = {};
+
+	if ( !internalSwapCall ) {
+		migrateWarn( "jQuery.swap() is undocumented and deprecated" );
+	}
+
+	// Remember the old values, and insert the new ones
+	for ( name in options ) {
+		old[ name ] = elem.style[ name ];
+		elem.style[ name ] = options[ name ];
+	}
+
+	ret = callback.apply( elem, args || [] );
+
+	// Revert the old values
+	for ( name in options ) {
+		elem.style[ name ] = old[ name ];
+	}
+
+	return ret;
+};
+
+
+// Ensure that $.ajax gets the new parseJSON defined in core.js
+jQuery.ajaxSetup({
+	converters: {
+		"text json": jQuery.parseJSON
+	}
+});
+
+
+var oldFnData = jQuery.fn.data;
+
+jQuery.fn.data = function( name ) {
+	var ret, evt,
+		elem = this[0];
+
+	// Handles 1.7 which has this behavior and 1.8 which doesn't
+	if ( elem && name === "events" && arguments.length === 1 ) {
+		ret = jQuery.data( elem, name );
+		evt = jQuery._data( elem, name );
+		if ( ( ret === undefined || ret === evt ) && evt !== undefined ) {
+			migrateWarn("Use of jQuery.fn.data('events') is deprecated");
+			return evt;
+		}
+	}
+	return oldFnData.apply( this, arguments );
+};
+
+
+var rscriptType = /\/(java|ecma)script/i;
+
+// Since jQuery.clean is used internally on older versions, we only shim if it's missing
+if ( !jQuery.clean ) {
+	jQuery.clean = function( elems, context, fragment, scripts ) {
+		// Set context per 1.8 logic
+		context = context || document;
+		context = !context.nodeType && context[0] || context;
+		context = context.ownerDocument || context;
+
+		migrateWarn("jQuery.clean() is deprecated");
+
+		var i, elem, handleScript, jsTags,
+			ret = [];
+
+		jQuery.merge( ret, jQuery.buildFragment( elems, context ).childNodes );
+
+		// Complex logic lifted directly from jQuery 1.8
+		if ( fragment ) {
+			// Special handling of each script element
+			handleScript = function( elem ) {
+				// Check if we consider it executable
+				if ( !elem.type || rscriptType.test( elem.type ) ) {
+					// Detach the script and store it in the scripts array (if provided) or the fragment
+					// Return truthy to indicate that it has been handled
+					return scripts ?
+						scripts.push( elem.parentNode ? elem.parentNode.removeChild( elem ) : elem ) :
+						fragment.appendChild( elem );
+				}
+			};
+
+			for ( i = 0; (elem = ret[i]) != null; i++ ) {
+				// Check if we're done after handling an executable script
+				if ( !( jQuery.nodeName( elem, "script" ) && handleScript( elem ) ) ) {
+					// Append to fragment and handle embedded scripts
+					fragment.appendChild( elem );
+					if ( typeof elem.getElementsByTagName !== "undefined" ) {
+						// handleScript alters the DOM, so use jQuery.merge to ensure snapshot iteration
+						jsTags = jQuery.grep( jQuery.merge( [], elem.getElementsByTagName("script") ), handleScript );
+
+						// Splice the scripts into ret after their former ancestor and advance our index beyond them
+						ret.splice.apply( ret, [i + 1, 0].concat( jsTags ) );
+						i += jsTags.length;
+					}
+				}
+			}
+		}
+
+		return ret;
+	};
+}
+
+var eventAdd = jQuery.event.add,
+	eventRemove = jQuery.event.remove,
+	eventTrigger = jQuery.event.trigger,
+	oldToggle = jQuery.fn.toggle,
+	oldLive = jQuery.fn.live,
+	oldDie = jQuery.fn.die,
+	oldLoad = jQuery.fn.load,
+	ajaxEvents = "ajaxStart|ajaxStop|ajaxSend|ajaxComplete|ajaxError|ajaxSuccess",
+	rajaxEvent = new RegExp( "\\b(?:" + ajaxEvents + ")\\b" ),
+	rhoverHack = /(?:^|\s)hover(\.\S+|)\b/,
+	hoverHack = function( events ) {
+		if ( typeof( events ) !== "string" || jQuery.event.special.hover ) {
+			return events;
+		}
+		if ( rhoverHack.test( events ) ) {
+			migrateWarn("'hover' pseudo-event is deprecated, use 'mouseenter mouseleave'");
+		}
+		return events && events.replace( rhoverHack, "mouseenter$1 mouseleave$1" );
+	};
+
+// Event props removed in 1.9, put them back if needed; no practical way to warn them
+if ( jQuery.event.props && jQuery.event.props[ 0 ] !== "attrChange" ) {
+	jQuery.event.props.unshift( "attrChange", "attrName", "relatedNode", "srcElement" );
+}
+
+// Undocumented jQuery.event.handle was "deprecated" in jQuery 1.7
+if ( jQuery.event.dispatch ) {
+	migrateWarnProp( jQuery.event, "handle", jQuery.event.dispatch, "jQuery.event.handle is undocumented and deprecated" );
+}
+
+// Support for 'hover' pseudo-event and ajax event warnings
+jQuery.event.add = function( elem, types, handler, data, selector ){
+	if ( elem !== document && rajaxEvent.test( types ) ) {
+		migrateWarn( "AJAX events should be attached to document: " + types );
+	}
+	eventAdd.call( this, elem, hoverHack( types || "" ), handler, data, selector );
+};
+jQuery.event.remove = function( elem, types, handler, selector, mappedTypes ){
+	eventRemove.call( this, elem, hoverHack( types ) || "", handler, selector, mappedTypes );
+};
+
+jQuery.each( [ "load", "unload", "error" ], function( _, name ) {
+
+	jQuery.fn[ name ] = function() {
+		var args = Array.prototype.slice.call( arguments, 0 );
+
+		// If this is an ajax load() the first arg should be the string URL;
+		// technically this could also be the "Anything" arg of the event .load()
+		// which just goes to show why this dumb signature has been deprecated!
+		// jQuery custom builds that exclude the Ajax module justifiably die here.
+		if ( name === "load" && typeof args[ 0 ] === "string" ) {
+			return oldLoad.apply( this, args );
+		}
+
+		migrateWarn( "jQuery.fn." + name + "() is deprecated" );
+
+		args.splice( 0, 0, name );
+		if ( arguments.length ) {
+			return this.bind.apply( this, args );
+		}
+
+		// Use .triggerHandler here because:
+		// - load and unload events don't need to bubble, only applied to window or image
+		// - error event should not bubble to window, although it does pre-1.7
+		// See http://bugs.jquery.com/ticket/11820
+		this.triggerHandler.apply( this, args );
+		return this;
+	};
+
+});
+
+jQuery.fn.toggle = function( fn, fn2 ) {
+
+	// Don't mess with animation or css toggles
+	if ( !jQuery.isFunction( fn ) || !jQuery.isFunction( fn2 ) ) {
+		return oldToggle.apply( this, arguments );
+	}
+	migrateWarn("jQuery.fn.toggle(handler, handler...) is deprecated");
+
+	// Save reference to arguments for access in closure
+	var args = arguments,
+		guid = fn.guid || jQuery.guid++,
+		i = 0,
+		toggler = function( event ) {
+			// Figure out which function to execute
+			var lastToggle = ( jQuery._data( this, "lastToggle" + fn.guid ) || 0 ) % i;
+			jQuery._data( this, "lastToggle" + fn.guid, lastToggle + 1 );
+
+			// Make sure that clicks stop
+			event.preventDefault();
+
+			// and execute the function
+			return args[ lastToggle ].apply( this, arguments ) || false;
+		};
+
+	// link all the functions, so any of them can unbind this click handler
+	toggler.guid = guid;
+	while ( i < args.length ) {
+		args[ i++ ].guid = guid;
+	}
+
+	return this.click( toggler );
+};
+
+jQuery.fn.live = function( types, data, fn ) {
+	migrateWarn("jQuery.fn.live() is deprecated");
+	if ( oldLive ) {
+		return oldLive.apply( this, arguments );
+	}
+	jQuery( this.context ).on( types, this.selector, data, fn );
+	return this;
+};
+
+jQuery.fn.die = function( types, fn ) {
+	migrateWarn("jQuery.fn.die() is deprecated");
+	if ( oldDie ) {
+		return oldDie.apply( this, arguments );
+	}
+	jQuery( this.context ).off( types, this.selector || "**", fn );
+	return this;
+};
+
+// Turn global events into document-triggered events
+jQuery.event.trigger = function( event, data, elem, onlyHandlers  ){
+	if ( !elem && !rajaxEvent.test( event ) ) {
+		migrateWarn( "Global events are undocumented and deprecated" );
+	}
+	return eventTrigger.call( this,  event, data, elem || document, onlyHandlers  );
+};
+jQuery.each( ajaxEvents.split("|"),
+	function( _, name ) {
+		jQuery.event.special[ name ] = {
+			setup: function() {
+				var elem = this;
+
+				// The document needs no shimming; must be !== for oldIE
+				if ( elem !== document ) {
+					jQuery.event.add( document, name + "." + jQuery.guid, function() {
+						jQuery.event.trigger( name, Array.prototype.slice.call( arguments, 1 ), elem, true );
+					});
+					jQuery._data( this, name, jQuery.guid++ );
+				}
+				return false;
+			},
+			teardown: function() {
+				if ( this !== document ) {
+					jQuery.event.remove( document, name + "." + jQuery._data( this, name ) );
+				}
+				return false;
+			}
+		};
+	}
+);
+
+jQuery.event.special.ready = {
+	setup: function() {
+		if ( this === document ) {
+			migrateWarn( "'ready' event is deprecated" );
+		}
+	}
+};
+
+var oldSelf = jQuery.fn.andSelf || jQuery.fn.addBack,
+	oldFind = jQuery.fn.find;
+
+jQuery.fn.andSelf = function() {
+	migrateWarn("jQuery.fn.andSelf() replaced by jQuery.fn.addBack()");
+	return oldSelf.apply( this, arguments );
+};
+
+jQuery.fn.find = function( selector ) {
+	var ret = oldFind.apply( this, arguments );
+	ret.context = this.context;
+	ret.selector = this.selector ? this.selector + " " + selector : selector;
+	return ret;
+};
+
+
+// jQuery 1.6 did not support Callbacks, do not warn there
+if ( jQuery.Callbacks ) {
+
+	var oldDeferred = jQuery.Deferred,
+		tuples = [
+			// action, add listener, callbacks, .then handlers, final state
+			[ "resolve", "done", jQuery.Callbacks("once memory"),
+				jQuery.Callbacks("once memory"), "resolved" ],
+			[ "reject", "fail", jQuery.Callbacks("once memory"),
+				jQuery.Callbacks("once memory"), "rejected" ],
+			[ "notify", "progress", jQuery.Callbacks("memory"),
+				jQuery.Callbacks("memory") ]
+		];
+
+	jQuery.Deferred = function( func ) {
+		var deferred = oldDeferred(),
+			promise = deferred.promise();
+
+		deferred.pipe = promise.pipe = function( /* fnDone, fnFail, fnProgress */ ) {
+			var fns = arguments;
+
+			migrateWarn( "deferred.pipe() is deprecated" );
+
+			return jQuery.Deferred(function( newDefer ) {
+				jQuery.each( tuples, function( i, tuple ) {
+					var fn = jQuery.isFunction( fns[ i ] ) && fns[ i ];
+					// deferred.done(function() { bind to newDefer or newDefer.resolve })
+					// deferred.fail(function() { bind to newDefer or newDefer.reject })
+					// deferred.progress(function() { bind to newDefer or newDefer.notify })
+					deferred[ tuple[1] ](function() {
+						var returned = fn && fn.apply( this, arguments );
+						if ( returned && jQuery.isFunction( returned.promise ) ) {
+							returned.promise()
+								.done( newDefer.resolve )
+								.fail( newDefer.reject )
+								.progress( newDefer.notify );
+						} else {
+							newDefer[ tuple[ 0 ] + "With" ](
+								this === promise ? newDefer.promise() : this,
+								fn ? [ returned ] : arguments
+							);
+						}
+					});
+				});
+				fns = null;
+			}).promise();
+
+		};
+
+		deferred.isResolved = function() {
+			migrateWarn( "deferred.isResolved is deprecated" );
+			return deferred.state() === "resolved";
+		};
+
+		deferred.isRejected = function() {
+			migrateWarn( "deferred.isRejected is deprecated" );
+			return deferred.state() === "rejected";
+		};
+
+		if ( func ) {
+			func.call( deferred, deferred );
+		}
+
+		return deferred;
+	};
+
+}
+
+})( jQuery, window );
diff --git a/client/galaxy/scripts/libs/jquery/jquery.mousewheel.js b/client/galaxy/scripts/libs/jquery/jquery.mousewheel.js
new file mode 100644
index 0000000..86cc455
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.mousewheel.js
@@ -0,0 +1,125 @@
+/*! Copyright (c) 2013 Brandon Aaron (http://brandonaaron.net)
+ * Licensed under the MIT License (LICENSE.txt).
+ *
+ * Thanks to: http://adomas.org/javascript-mouse-wheel/ for some pointers.
+ * Thanks to: Mathias Bank(http://www.mathias-bank.de) for a scope bug fix.
+ * Thanks to: Seamus Leahy for adding deltaX and deltaY
+ *
+ * Version: 3.1.3
+ *
+ * Requires: 1.2.2+
+ */
+
+(function (factory) {
+    // GALAXY HACK 
+    // (JG): Galaxy's mixing of a global jQuery and require modules doesn't work with 
+    // the logic below. Instead, do the right thing for this configuration without any checks.
+    factory(jQuery);
+    // END HACK
+
+    /*
+    if ( typeof define === 'function' && define.amd ) {
+        // AMD. Register as an anonymous module.
+        define(['jquery'], factory);
+    } else if (typeof exports === 'object') {
+        // Node/CommonJS style for Browserify
+        module.exports = factory;
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+    */
+}(function ($) {
+
+    var toFix = ['wheel', 'mousewheel', 'DOMMouseScroll', 'MozMousePixelScroll'];
+    var toBind = 'onwheel' in document || document.documentMode >= 9 ? ['wheel'] : ['mousewheel', 'DomMouseScroll', 'MozMousePixelScroll'];
+    var lowestDelta, lowestDeltaXY;
+
+    if ( $.event.fixHooks ) {
+        for ( var i = toFix.length; i; ) {
+            $.event.fixHooks[ toFix[--i] ] = $.event.mouseHooks;
+        }
+    }
+
+    $.event.special.mousewheel = {
+        setup: function() {
+            if ( this.addEventListener ) {
+                for ( var i = toBind.length; i; ) {
+                    this.addEventListener( toBind[--i], handler, false );
+                }
+            } else {
+                this.onmousewheel = handler;
+            }
+        },
+
+        teardown: function() {
+            if ( this.removeEventListener ) {
+                for ( var i = toBind.length; i; ) {
+                    this.removeEventListener( toBind[--i], handler, false );
+                }
+            } else {
+                this.onmousewheel = null;
+            }
+        }
+    };
+
+    $.fn.extend({
+        mousewheel: function(fn) {
+            return fn ? this.bind("mousewheel", fn) : this.trigger("mousewheel");
+        },
+
+        unmousewheel: function(fn) {
+            return this.unbind("mousewheel", fn);
+        }
+    });
+
+
+    function handler(event) {
+        var orgEvent = event || window.event,
+            args = [].slice.call(arguments, 1),
+            delta = 0,
+            deltaX = 0,
+            deltaY = 0,
+            absDelta = 0,
+            absDeltaXY = 0,
+            fn;
+        event = $.event.fix(orgEvent);
+        event.type = "mousewheel";
+
+        // Old school scrollwheel delta
+        if ( orgEvent.wheelDelta ) { delta = orgEvent.wheelDelta; }
+        if ( orgEvent.detail )     { delta = orgEvent.detail * -1; }
+
+        // New school wheel delta (wheel event)
+        if ( orgEvent.deltaY ) {
+            deltaY = orgEvent.deltaY * -1;
+            delta  = deltaY;
+        }
+        if ( orgEvent.deltaX ) {
+            deltaX = orgEvent.deltaX;
+            delta  = deltaX * -1;
+        }
+
+        // Webkit
+        if ( orgEvent.wheelDeltaY !== undefined ) { deltaY = orgEvent.wheelDeltaY; }
+        if ( orgEvent.wheelDeltaX !== undefined ) { deltaX = orgEvent.wheelDeltaX * -1; }
+
+        // Look for lowest delta to normalize the delta values
+        absDelta = Math.abs(delta);
+        if ( !lowestDelta || absDelta < lowestDelta ) { lowestDelta = absDelta; }
+        absDeltaXY = Math.max(Math.abs(deltaY), Math.abs(deltaX));
+        if ( !lowestDeltaXY || absDeltaXY < lowestDeltaXY ) { lowestDeltaXY = absDeltaXY; }
+
+        // Get a whole value for the deltas
+        fn = delta > 0 ? 'floor' : 'ceil';
+        delta  = Math[fn](delta / lowestDelta);
+        deltaX = Math[fn](deltaX / lowestDeltaXY);
+        deltaY = Math[fn](deltaY / lowestDeltaXY);
+
+        // Add event and delta to the front of the arguments
+        args.unshift(event, delta, deltaX, deltaY);
+
+        return ($.event.dispatch || $.event.handle).apply(this, args);
+    }
+
+}));
diff --git a/client/galaxy/scripts/libs/jquery/jquery.rating.js b/client/galaxy/scripts/libs/jquery/jquery.rating.js
new file mode 100644
index 0000000..77a440c
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.rating.js
@@ -0,0 +1,376 @@
+/*
+ ### jQuery Star Rating Plugin v4.11 - 2013-03-14 ###
+ * Home: http://www.fyneworks.com/jquery/star-rating/
+ * Code: http://code.google.com/p/jquery-star-rating-plugin/
+ *
+	* Licensed under http://en.wikipedia.org/wiki/MIT_License
+ ###
+*/
+
+/*# AVOID COLLISIONS #*/
+;if(window.jQuery) (function($){
+/*# AVOID COLLISIONS #*/
+	
+	// IE6 Background Image Fix
+	if ((!$.support.opacity && !$.support.style)) try { document.execCommand("BackgroundImageCache", false, true)} catch(e) { };
+	// Thanks to http://www.visualjquery.com/rating/rating_redux.html
+	
+	// plugin initialization
+	$.fn.rating = function(options){
+		if(this.length==0) return this; // quick fail
+		
+		// Handle API methods
+		if(typeof arguments[0]=='string'){
+			// Perform API methods on individual elements
+			if(this.length>1){
+				var args = arguments;
+				return this.each(function(){
+					$.fn.rating.apply($(this), args);
+    });
+			};
+			// Invoke API method handler
+			$.fn.rating[arguments[0]].apply(this, $.makeArray(arguments).slice(1) || []);
+			// Quick exit...
+			return this;
+		};
+		
+		// Initialize options for this call
+		var options = $.extend(
+			{}/* new object */,
+			$.fn.rating.options/* default options */,
+			options || {} /* just-in-time options */
+		);
+		
+		// Allow multiple controls with the same name by making each call unique
+		$.fn.rating.calls++;
+		
+		// loop through each matched element
+		this
+		 .not('.star-rating-applied')
+			.addClass('star-rating-applied')
+		.each(function(){
+			
+			// Load control parameters / find context / etc
+			var control, input = $(this);
+			var eid = (this.name || 'unnamed-rating').replace(/\[|\]/g, '_').replace(/^\_+|\_+$/g,'');
+			var context = $(this.form || document.body);
+			
+			// FIX: http://code.google.com/p/jquery-star-rating-plugin/issues/detail?id=23
+			var raters = context.data('rating');
+			if(!raters || raters.call!=$.fn.rating.calls) raters = { count:0, call:$.fn.rating.calls };
+			var rater = raters[eid] || context.data('rating'+eid);
+			
+			// if rater is available, verify that the control still exists
+			if(rater) control = rater.data('rating');
+			
+			if(rater && control)//{// save a byte!
+				// add star to control if rater is available and the same control still exists
+				control.count++;
+				
+			//}// save a byte!
+			else{
+				// create new control if first star or control element was removed/replaced
+				
+				// Initialize options for this rater
+				control = $.extend(
+					{}/* new object */,
+					options || {} /* current call options */,
+					($.metadata? input.metadata(): ($.meta?input.data():null)) || {}, /* metadata options */
+					{ count:0, stars: [], inputs: [] }
+				);
+				
+				// increment number of rating controls
+				control.serial = raters.count++;
+				
+				// create rating element
+				rater = $('<span class="star-rating-control"/>');
+				input.before(rater);
+				
+				// Mark element for initialization (once all stars are ready)
+				rater.addClass('rating-to-be-drawn');
+				
+				// Accept readOnly setting from 'disabled' property
+				if(input.attr('disabled') || input.hasClass('disabled')) control.readOnly = true;
+				
+				// Accept required setting from class property (class='required')
+				if(input.hasClass('required')) control.required = true;
+				
+				// Create 'cancel' button
+				rater.append(
+					control.cancel = $('<div class="rating-cancel"><a title="' + control.cancel + '">' + control.cancelValue + '</a></div>')
+					.on('mouseover',function(){
+						$(this).rating('drain');
+						$(this).addClass('star-rating-hover');
+						//$(this).rating('focus');
+					})
+					.on('mouseout',function(){
+						$(this).rating('draw');
+						$(this).removeClass('star-rating-hover');
+						//$(this).rating('blur');
+					})
+					.on('click',function(){
+					 $(this).rating('select');
+					})
+					.data('rating', control)
+				);
+				
+			}; // first element of group
+			
+			// insert rating star (thanks Jan Fanslau rev125 for blind support https://code.google.com/p/jquery-star-rating-plugin/issues/detail?id=125)
+			var star = $('<div role="text" aria-label="'+ this.title +'" class="star-rating rater-'+ control.serial +'"><a title="' + (this.title || this.value) + '">' + this.value + '</a></div>');
+			rater.append(star);
+			
+			// inherit attributes from input element
+			if(this.id) star.attr('id', this.id);
+			if(this.className) star.addClass(this.className);
+			
+			// Half-stars?
+			if(control.half) control.split = 2;
+			
+			// Prepare division control
+			if(typeof control.split=='number' && control.split>0){
+				var stw = ($.fn.width ? star.width() : 0) || control.starWidth;
+				var spi = (control.count % control.split), spw = Math.floor(stw/control.split);
+				star
+				// restrict star's width and hide overflow (already in CSS)
+				.width(spw)
+				// move the star left by using a negative margin
+				// this is work-around to IE's stupid box model (position:relative doesn't work)
+				.find('a').css({ 'margin-left':'-'+ (spi*spw) +'px' })
+			};
+			
+			// readOnly?
+			if(control.readOnly)//{ //save a byte!
+				// Mark star as readOnly so user can customize display
+				star.addClass('star-rating-readonly');
+			//}  //save a byte!
+			else//{ //save a byte!
+			 // Enable hover css effects
+				star.addClass('star-rating-live')
+				 // Attach mouse events
+					.on('mouseover',function(){
+						$(this).rating('fill');
+						$(this).rating('focus');
+					})
+					.on('mouseout',function(){
+						$(this).rating('draw');
+						$(this).rating('blur');
+					})
+					.on('click',function(){
+						$(this).rating('select');
+					})
+				;
+			//}; //save a byte!
+			
+			// set current selection
+			if(this.checked)	control.current = star;
+			
+			// set current select for links
+			if(this.nodeName=="A"){
+    if($(this).hasClass('selected'))
+     control.current = star;
+   };
+			
+			// hide input element
+			input.hide();
+			
+			// backward compatibility, form element to plugin
+			input.on('change.rating',function(event){
+				if(event.selfTriggered) return false;
+    $(this).rating('select');
+   });
+			
+			// attach reference to star to input element and vice-versa
+			star.data('rating.input', input.data('rating.star', star));
+			
+			// store control information in form (or body when form not available)
+			control.stars[control.stars.length] = star[0];
+			control.inputs[control.inputs.length] = input[0];
+			control.rater = raters[eid] = rater;
+			control.context = context;
+			
+			input.data('rating', control);
+			rater.data('rating', control);
+			star.data('rating', control);
+			context.data('rating', raters);
+			context.data('rating'+eid, rater); // required for ajax forms
+  }); // each element
+		
+		// Initialize ratings (first draw)
+		$('.rating-to-be-drawn').rating('draw').removeClass('rating-to-be-drawn');
+		
+		return this; // don't break the chain...
+	};
+	
+	/*--------------------------------------------------------*/
+	
+	/*
+		### Core functionality and API ###
+	*/
+	$.extend($.fn.rating, {
+		// Used to append a unique serial number to internal control ID
+		// each time the plugin is invoked so same name controls can co-exist
+		calls: 0,
+		
+		focus: function(){
+			var control = this.data('rating'); if(!control) return this;
+			if(!control.focus) return this; // quick fail if not required
+			// find data for event
+			var input = $(this).data('rating.input') || $( this.tagName=='INPUT' ? this : null );
+   // focus handler, as requested by focusdigital.co.uk
+			if(control.focus) control.focus.apply(input[0], [input.val(), $('a', input.data('rating.star'))[0]]);
+		}, // $.fn.rating.focus
+		
+		blur: function(){
+			var control = this.data('rating'); if(!control) return this;
+			if(!control.blur) return this; // quick fail if not required
+			// find data for event
+			var input = $(this).data('rating.input') || $( this.tagName=='INPUT' ? this : null );
+   // blur handler, as requested by focusdigital.co.uk
+			if(control.blur) control.blur.apply(input[0], [input.val(), $('a', input.data('rating.star'))[0]]);
+		}, // $.fn.rating.blur
+		
+		fill: function(){ // fill to the current mouse position.
+			var control = this.data('rating'); if(!control) return this;
+			// do not execute when control is in read-only mode
+			if(control.readOnly) return;
+			// Reset all stars and highlight them up to this element
+			this.rating('drain');
+			this.prevAll().addBack().filter('.rater-'+ control.serial).addClass('star-rating-hover');
+		},// $.fn.rating.fill
+		
+		drain: function() { // drain all the stars.
+			var control = this.data('rating'); if(!control) return this;
+			// do not execute when control is in read-only mode
+			if(control.readOnly) return;
+			// Reset all stars
+			control.rater.children().filter('.rater-'+ control.serial).removeClass('star-rating-on').removeClass('star-rating-hover');
+		},// $.fn.rating.drain
+		
+		draw: function(){ // set value and stars to reflect current selection
+			var control = this.data('rating'); if(!control) return this;
+			// Clear all stars
+			this.rating('drain');
+			// Set control value
+			var current = $( control.current );//? control.current.data('rating.input') : null );
+			var starson = current.length ? current.prevAll().addBack().filter('.rater-'+ control.serial) : null;
+			if(starson)	starson.addClass('star-rating-on');
+			// Show/hide 'cancel' button
+			control.cancel[control.readOnly || control.required?'hide':'show']();
+			// Add/remove read-only classes to remove hand pointer
+			this.siblings()[control.readOnly?'addClass':'removeClass']('star-rating-readonly');
+		},// $.fn.rating.draw
+		
+		
+		
+		
+		
+		select: function(value,wantCallBack){ // select a value
+			var control = this.data('rating'); if(!control) return this;
+			// do not execute when control is in read-only mode
+			if(control.readOnly) return;
+			// clear selection
+			control.current = null;
+			// programmatically (based on user input)
+			if(typeof value!='undefined' || this.length>1){
+			 // select by index (0 based)
+				if(typeof value=='number')
+ 			 return $(control.stars[value]).rating('select',undefined,wantCallBack);
+				// select by literal value (must be passed as a string
+				if(typeof value=='string'){
+					//return
+					$.each(control.stars, function(){
+ 					//console.log($(this).data('rating.input'), $(this).data('rating.input').val(), value, $(this).data('rating.input').val()==value?'BINGO!':'');
+						if($(this).data('rating.input').val()==value) $(this).rating('select',undefined,wantCallBack);
+					});
+					// don't break the chain
+  			return this;
+				};
+			}
+			else{
+				control.current = this[0].tagName=='INPUT' ?
+				 this.data('rating.star') :
+					(this.is('.rater-'+ control.serial) ? this : null);
+			};
+			// Update rating control state
+			this.data('rating', control);
+			// Update display
+			this.rating('draw');
+			// find current input and its siblings
+			var current = $( control.current ? control.current.data('rating.input') : null );
+			var lastipt = $( control.inputs ).filter(':checked');
+			var deadipt = $( control.inputs ).not(current);
+			// check and uncheck elements as required
+			deadipt.prop('checked',false);//.removeAttr('checked');
+			current.prop('checked',true);//.attr('checked','checked');
+			// trigger change on current or last selected input
+			$(current.length? current : lastipt ).trigger({ type:'change', selfTriggered:true });
+			// click callback, as requested here: http://plugins.jquery.com/node/1655
+			if((wantCallBack || wantCallBack == undefined) && control.callback) control.callback.apply(current[0], [current.val(), $('a', control.current)[0]]);// callback event
+			// don't break the chain
+			return this;
+  },// $.fn.rating.select
+		
+		
+		
+		
+		
+		readOnly: function(toggle, disable){ // make the control read-only (still submits value)
+			var control = this.data('rating'); if(!control) return this;
+			// setread-only status
+			control.readOnly = toggle || toggle==undefined ? true : false;
+			// enable/disable control value submission
+			if(disable) $(control.inputs).attr("disabled", "disabled");
+			else     			$(control.inputs).removeAttr("disabled");
+			// Update rating control state
+			this.data('rating', control);
+			// Update display
+			this.rating('draw');
+		},// $.fn.rating.readOnly
+		
+		disable: function(){ // make read-only and never submit value
+			this.rating('readOnly', true, true);
+		},// $.fn.rating.disable
+		
+		enable: function(){ // make read/write and submit value
+			this.rating('readOnly', false, false);
+		}// $.fn.rating.select
+		
+ });
+	
+	/*--------------------------------------------------------*/
+	
+	/*
+		### Default Settings ###
+		eg.: You can override default control like this:
+		$.fn.rating.options.cancel = 'Clear';
+	*/
+	$.fn.rating.options = { //$.extend($.fn.rating, { options: {
+			cancel: 'Cancel Rating',   // advisory title for the 'cancel' link
+			cancelValue: '',           // value to submit when user click the 'cancel' link
+			split: 0,                  // split the star into how many parts?
+			
+			// Width of star image in case the plugin can't work it out. This can happen if
+			// the jQuery.dimensions plugin is not available OR the image is hidden at installation
+			starWidth: 16//,
+			
+			//NB.: These don't need to be pre-defined (can be undefined/null) so let's save some code!
+			//half:     false,         // just a shortcut to control.split = 2
+			//required: false,         // disables the 'cancel' button so user can only select one of the specified values
+			//readOnly: false,         // disable rating plugin interaction/ values cannot be.one('change',		//focus:    function(){},  // executed when stars are focused
+			//blur:     function(){},  // executed when stars are focused
+			//callback: function(){},  // executed when a star is clicked
+ }; //} });
+	
+	/*--------------------------------------------------------*/
+	
+	
+	  // auto-initialize plugin
+				$(function(){
+				 $('input[type=radio].star').rating();
+				});
+	
+	
+/*# AVOID COLLISIONS #*/
+})(jQuery);
+/*# AVOID COLLISIONS #*/
diff --git a/client/galaxy/scripts/libs/jquery/jquery.wymeditor.js b/client/galaxy/scripts/libs/jquery/jquery.wymeditor.js
new file mode 100644
index 0000000..5fb0e4a
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jquery.wymeditor.js
@@ -0,0 +1,4756 @@
+/**
+ * @version 0.5-rc1
+ *
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File: jquery.wymeditor.js
+ *
+ *        Main JS file with core classes and functions.
+ *        See the documentation for more info.
+ *
+ * About: authors
+ *
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ *        Volker Mische (vmx a-t gmx dotde)
+ *        Scott Lewis (lewiscot a-t gmail dotcom)
+ *        Bermi Ferrer (wymeditor a-t bermi dotorg)
+ *        Daniel Reszka (d.reszka a-t wymeditor dotorg)
+ *        Jonatan Lundin (jonatan.lundin _at_ gmail.com)
+ */
+
+/*
+   Namespace: WYMeditor
+   Global WYMeditor namespace.
+*/
+if(!WYMeditor) var WYMeditor = {};
+
+//Wrap the Firebug console in WYMeditor.console
+(function() {
+    if ( !window.console || !console.firebug ) {
+        var names = ["log", "debug", "info", "warn", "error", "assert", "dir", "dirxml",
+        "group", "groupEnd", "time", "timeEnd", "count", "trace", "profile", "profileEnd"];
+
+        WYMeditor.console = {};
+        for (var i = 0; i < names.length; ++i)
+            WYMeditor.console[names[i]] = function() {}
+
+    } else WYMeditor.console = window.console;
+})();
+
+jQuery.extend(WYMeditor, {
+
+/*
+    Constants: Global WYMeditor constants.
+
+    VERSION             - Defines WYMeditor version.
+    INSTANCES           - An array of loaded WYMeditor.editor instances.
+    STRINGS             - An array of loaded WYMeditor language pairs/values.
+    SKINS               - An array of loaded WYMeditor skins.
+    NAME                - The "name" attribute.
+    INDEX               - A string replaced by the instance index.
+    WYM_INDEX           - A string used to get/set the instance index.
+    BASE_PATH           - A string replaced by WYMeditor's base path.
+    SKIN_PATH           - A string replaced by WYMeditor's skin path.
+    WYM_PATH            - A string replaced by WYMeditor's main JS file path.
+    SKINS_DEFAULT_PATH  - The skins default base path.
+    SKINS_DEFAULT_CSS   - The skins default CSS file.
+    LANG_DEFAULT_PATH   - The language files default path.
+    IFRAME_BASE_PATH    - A string replaced by the designmode iframe's base path.
+    IFRAME_DEFAULT      - The iframe's default base path.
+    JQUERY_PATH         - A string replaced by the computed jQuery path.
+    DIRECTION           - A string replaced by the text direction (rtl or ltr).
+    LOGO                - A string replaced by WYMeditor logo.
+    TOOLS               - A string replaced by the toolbar's HTML.
+    TOOLS_ITEMS         - A string replaced by the toolbar items.
+    TOOL_NAME           - A string replaced by a toolbar item's name.
+    TOOL_TITLE          - A string replaced by a toolbar item's title.
+    TOOL_CLASS          - A string replaced by a toolbar item's class.
+    CLASSES             - A string replaced by the classes panel's HTML.
+    CLASSES_ITEMS       - A string replaced by the classes items.
+    CLASS_NAME          - A string replaced by a class item's name.
+    CLASS_TITLE         - A string replaced by a class item's title.
+    CONTAINERS          - A string replaced by the containers panel's HTML.
+    CONTAINERS_ITEMS    - A string replaced by the containers items.
+    CONTAINER_NAME      - A string replaced by a container item's name.
+    CONTAINER_TITLE     - A string replaced by a container item's title.
+    CONTAINER_CLASS     - A string replaced by a container item's class.
+    HTML                - A string replaced by the HTML view panel's HTML.
+    IFRAME              - A string replaced by the designmode iframe.
+    STATUS              - A string replaced by the status panel's HTML.
+    DIALOG_TITLE        - A string replaced by a dialog's title.
+    DIALOG_BODY         - A string replaced by a dialog's HTML body.
+    BODY                - The BODY element.
+    STRING              - The "string" type.
+    BODY,DIV,P,
+    H1,H2,H3,H4,H5,H6,
+    PRE,BLOCKQUOTE,
+    A,BR,IMG,
+    TABLE,TD,TH,
+    UL,OL,LI            - HTML elements string representation.
+    CLASS,HREF,SRC,
+    TITLE,ALT           - HTML attributes string representation.
+    DIALOG_LINK         - A link dialog type.
+    DIALOG_IMAGE        - An image dialog type.
+    DIALOG_TABLE        - A table dialog type.
+    DIALOG_PASTE        - A 'Paste from Word' dialog type.
+    BOLD                - Command: (un)set selection to <strong>.
+    ITALIC              - Command: (un)set selection to <em>.
+    CREATE_LINK         - Command: open the link dialog or (un)set link.
+    INSERT_IMAGE        - Command: open the image dialog or insert an image.
+    INSERT_TABLE        - Command: open the table dialog.
+    PASTE               - Command: open the paste dialog.
+    INDENT              - Command: nest a list item.
+    OUTDENT             - Command: unnest a list item.
+    TOGGLE_HTML         - Command: display/hide the HTML view.
+    FORMAT_BLOCK        - Command: set a block element to another type.
+    PREVIEW             - Command: open the preview dialog.
+    UNLINK              - Command: unset a link.
+    INSERT_UNORDEREDLIST- Command: insert an unordered list.
+    INSERT_ORDEREDLIST  - Command: insert an ordered list.
+    MAIN_CONTAINERS     - An array of the main HTML containers used in WYMeditor.
+    BLOCKS              - An array of the HTML block elements.
+    KEY                 - Standard key codes.
+    NODE                - Node types.
+
+*/
+
+    VERSION             : "0.5-rc1",
+    INSTANCES           : [],
+    STRINGS             : [],
+    SKINS               : [],
+    NAME                : "name",
+    INDEX               : "{Wym_Index}",
+    WYM_INDEX           : "wym_index",
+    BASE_PATH           : "{Wym_Base_Path}",
+    CSS_PATH            : "{Wym_Css_Path}",
+    WYM_PATH            : "{Wym_Wym_Path}",
+    SKINS_DEFAULT_PATH  : "skins/",
+    SKINS_DEFAULT_CSS   : "skin.css",
+    SKINS_DEFAULT_JS    : "skin.js",
+    LANG_DEFAULT_PATH   : "lang/",
+    IFRAME_BASE_PATH    : "{Wym_Iframe_Base_Path}",
+    IFRAME_DEFAULT      : "iframe/default/",
+    JQUERY_PATH         : "{Wym_Jquery_Path}",
+    DIRECTION           : "{Wym_Direction}",
+    LOGO                : "{Wym_Logo}",
+    TOOLS               : "{Wym_Tools}",
+    TOOLS_ITEMS         : "{Wym_Tools_Items}",
+    TOOL_NAME           : "{Wym_Tool_Name}",
+    TOOL_TITLE          : "{Wym_Tool_Title}",
+    TOOL_CLASS          : "{Wym_Tool_Class}",
+    CLASSES             : "{Wym_Classes}",
+    CLASSES_ITEMS       : "{Wym_Classes_Items}",
+    CLASS_NAME          : "{Wym_Class_Name}",
+    CLASS_TITLE         : "{Wym_Class_Title}",
+    CONTAINERS          : "{Wym_Containers}",
+    CONTAINERS_ITEMS    : "{Wym_Containers_Items}",
+    CONTAINER_NAME      : "{Wym_Container_Name}",
+    CONTAINER_TITLE     : "{Wym_Containers_Title}",
+    CONTAINER_CLASS     : "{Wym_Container_Class}",
+    HTML                : "{Wym_Html}",
+    IFRAME              : "{Wym_Iframe}",
+    STATUS              : "{Wym_Status}",
+    DIALOG_TITLE        : "{Wym_Dialog_Title}",
+    DIALOG_BODY         : "{Wym_Dialog_Body}",
+    STRING              : "string",
+    BODY                : "body",
+    DIV                 : "div",
+    P                   : "p",
+    H1                  : "h1",
+    H2                  : "h2",
+    H3                  : "h3",
+    H4                  : "h4",
+    H5                  : "h5",
+    H6                  : "h6",
+    PRE                 : "pre",
+    BLOCKQUOTE          : "blockquote",
+    A                   : "a",
+    BR                  : "br",
+    IMG                 : "img",
+    TABLE               : "table",
+    TD                  : "td",
+    TH                  : "th",
+    UL                  : "ul",
+    OL                  : "ol",
+    LI                  : "li",
+    CLASS               : "class",
+    HREF                : "href",
+    SRC                 : "src",
+    TITLE               : "title",
+    ALT                 : "alt",
+    DIALOG_LINK         : "Link",
+    DIALOG_IMAGE        : "Image",
+    DIALOG_TABLE        : "Table",
+    DIALOG_PASTE        : "Paste_From_Word",
+    BOLD                : "Bold",
+    ITALIC              : "Italic",
+    CREATE_LINK         : "CreateLink",
+    INSERT_IMAGE        : "InsertImage",
+    INSERT_TABLE        : "InsertTable",
+    INSERT_HTML         : "InsertHTML",
+    PASTE               : "Paste",
+    INDENT              : "Indent",
+    OUTDENT             : "Outdent",
+    TOGGLE_HTML         : "ToggleHtml",
+    FORMAT_BLOCK        : "FormatBlock",
+    PREVIEW             : "Preview",
+    UNLINK			        : "Unlink",
+    INSERT_UNORDEREDLIST: "InsertUnorderedList",
+    INSERT_ORDEREDLIST	: "InsertOrderedList",
+
+    MAIN_CONTAINERS : new Array("p","h1","h2","h3","h4","h5","h6","pre","blockquote"),
+
+    BLOCKS : new Array("address", "blockquote", "div", "dl",
+	   "fieldset", "form", "h1", "h2", "h3", "h4", "h5", "h6", "hr",
+	   "noscript", "ol", "p", "pre", "table", "ul", "dd", "dt",
+	   "li", "tbody", "td", "tfoot", "th", "thead", "tr"),
+
+    KEY : {
+      BACKSPACE: 8,
+      ENTER: 13,
+      END: 35,
+      HOME: 36,
+      LEFT: 37,
+      UP: 38,
+      RIGHT: 39,
+      DOWN: 40,
+      CURSOR: new Array(37, 38, 39, 40),
+      DELETE: 46
+    },
+
+    NODE : {
+      ELEMENT: 1,
+      ATTRIBUTE: 2,
+      TEXT: 3
+    },
+	
+    /*
+        Class: WYMeditor.editor
+        WYMeditor editor main class, instanciated for each editor occurrence.
+    */
+
+	editor : function(elem, options) {
+
+        /*
+            Constructor: WYMeditor.editor
+
+            Initializes main values (index, elements, paths, ...)
+            and call WYMeditor.editor.init which initializes the editor.
+
+            Parameters:
+
+                elem - The HTML element to be replaced by the editor.
+                options - The hash of options.
+
+            Returns:
+
+                Nothing.
+
+            See Also:
+
+                <WYMeditor.editor.init>
+        */
+
+        //store the instance in the INSTANCES array and store the index
+        this._index = WYMeditor.INSTANCES.push(this) - 1;
+        //store the element replaced by the editor
+        this._element = elem;
+        //store the options
+        this._options = options;
+        //store the element's inner value
+        this._html = jQuery(elem).val();
+
+        //store the HTML option, if any
+        if(this._options.html) this._html = this._options.html;
+        //get or compute the base path (where the main JS file is located)
+        this._options.basePath = this._options.basePath
+        || this.computeBasePath();
+        //get or set the skin path (where the skin files are located)
+        this._options.skinPath = this._options.skinPath
+        || this._options.basePath + WYMeditor.SKINS_DEFAULT_PATH
+           + this._options.skin + '/';
+        //get or compute the main JS file location
+        this._options.wymPath = this._options.wymPath
+        || this.computeWymPath();
+        //get or set the language files path
+        this._options.langPath = this._options.langPath
+        || this._options.basePath + WYMeditor.LANG_DEFAULT_PATH;
+        //get or set the designmode iframe's base path
+        this._options.iframeBasePath = this._options.iframeBasePath
+        || this._options.basePath + WYMeditor.IFRAME_DEFAULT;
+        //get or compute the jQuery JS file location
+        this._options.jQueryPath = this._options.jQueryPath
+        || this.computeJqueryPath();
+
+        //initialize the editor instance
+        this.init();
+	
+	}
+
+});
+
+/********** JQUERY **********/
+
+/**
+ * Replace an HTML element by WYMeditor
+ *
+ * @example jQuery(".wymeditor").wymeditor(
+ *        {
+ *
+ *        }
+ *      );
+ * @desc Example description here
+ * 
+ * @name WYMeditor
+ * @description WYMeditor is a web-based WYSIWYM XHTML editor
+ * @param Hash hash A hash of parameters
+ * @option Integer iExample Description here
+ * @option String sExample Description here
+ *
+ * @type jQuery
+ * @cat Plugins/WYMeditor
+ * @author Jean-Francois Hovinne
+ */
+jQuery.fn.wymeditor = function(options) {
+
+  options = jQuery.extend({
+
+    html:       "",
+    
+    basePath:   false,
+    
+    skinPath:    false,
+    
+    wymPath:    false,
+    
+    iframeBasePath: false,
+    
+    jQueryPath: false,
+    
+    styles: false,
+    
+    stylesheet: false,
+    
+    skin:       "default",
+    initSkin:   true,
+    loadSkin:   true,
+
+    lang:       "en",
+
+    direction:  "ltr",
+
+    boxHtml:   "<div class='wym_box'>"
+              + "<div class='wym_area_top'>" 
+              + WYMeditor.TOOLS
+              + "</div>"
+              + "<div class='wym_area_left'></div>"
+              + "<div class='wym_area_right'>"
+              + WYMeditor.CONTAINERS
+              + WYMeditor.CLASSES
+              + "</div>"
+              + "<div class='wym_area_main'>"
+              + WYMeditor.HTML
+              + WYMeditor.IFRAME
+              + WYMeditor.STATUS
+              + "</div>"
+              + "<div class='wym_area_bottom'>"
+              + WYMeditor.LOGO
+              + "</div>"
+              + "</div>",
+
+    logoHtml:  "<a class='wym_wymeditor_link' "
+              + "href='http://www.wymeditor.org/'>WYMeditor</a>",
+
+    iframeHtml:"<div class='wym_iframe wym_section'>"
+              + "<iframe "
+              + "src='"
+              + "/page/get_editor_iframe' "
+              //+ WYMeditor.IFRAME_BASE_PATH
+              //+ "wymiframe.html' "
+              + "onload='this.contentWindow.parent.WYMeditor.INSTANCES["
+              + WYMeditor.INDEX + "].initIframe(this)'"
+              + "></iframe>"
+              + "</div>",
+              
+    editorStyles: [],
+
+    toolsHtml: "<div class='wym_tools wym_section'>"
+              + "<h2>{Tools}</h2>"
+              + "<ul>"
+              + WYMeditor.TOOLS_ITEMS
+              + "</ul>"
+              + "</div>",
+              
+    toolsItemHtml:   "<li class='"
+                        + WYMeditor.TOOL_CLASS
+                        + "'><a href='#' name='"
+                        + WYMeditor.TOOL_NAME
+                        + "' title='"
+                        + WYMeditor.TOOL_TITLE
+                        + "'>"
+                        + WYMeditor.TOOL_TITLE
+                        + "</a></li>",
+
+    toolsItems: [
+        {'name': 'Bold', 'title': 'Strong', 'css': 'wym_tools_strong'}, 
+        {'name': 'Italic', 'title': 'Emphasis', 'css': 'wym_tools_emphasis'},
+        {'name': 'Superscript', 'title': 'Superscript',
+            'css': 'wym_tools_superscript'},
+        {'name': 'Subscript', 'title': 'Subscript',
+            'css': 'wym_tools_subscript'},
+        {'name': 'InsertOrderedList', 'title': 'Ordered_List',
+            'css': 'wym_tools_ordered_list'},
+        {'name': 'InsertUnorderedList', 'title': 'Unordered_List',
+            'css': 'wym_tools_unordered_list'},
+        {'name': 'Indent', 'title': 'Indent', 'css': 'wym_tools_indent'},
+        {'name': 'Outdent', 'title': 'Outdent', 'css': 'wym_tools_outdent'},
+        {'name': 'Undo', 'title': 'Undo', 'css': 'wym_tools_undo'},
+        {'name': 'Redo', 'title': 'Redo', 'css': 'wym_tools_redo'},
+        {'name': 'CreateLink', 'title': 'Link', 'css': 'wym_tools_link'},
+        {'name': 'Unlink', 'title': 'Unlink', 'css': 'wym_tools_unlink'},
+        {'name': 'InsertImage', 'title': 'Image', 'css': 'wym_tools_image'},
+        {'name': 'InsertTable', 'title': 'Table', 'css': 'wym_tools_table'},
+        {'name': 'Paste', 'title': 'Paste_From_Word',
+            'css': 'wym_tools_paste'},
+        {'name': 'ToggleHtml', 'title': 'HTML', 'css': 'wym_tools_html'},
+        {'name': 'Preview', 'title': 'Preview', 'css': 'wym_tools_preview'}
+    ],
+
+    containersHtml:    "<div class='wym_containers wym_section'>"
+                        + "<h2>{Containers}</h2>"
+                        + "<ul>"
+                        + WYMeditor.CONTAINERS_ITEMS
+                        + "</ul>"
+                        + "</div>",
+                        
+    containersItemHtml:"<li class='"
+                        + WYMeditor.CONTAINER_CLASS
+                        + "'>"
+                        + "<a href='#' name='"
+                        + WYMeditor.CONTAINER_NAME
+                        + "'>"
+                        + WYMeditor.CONTAINER_TITLE
+                        + "</a></li>",
+                        
+    containersItems: [
+        {'name': 'P', 'title': 'Paragraph', 'css': 'wym_containers_p'},
+        {'name': 'H1', 'title': 'Heading_1', 'css': 'wym_containers_h1'},
+        {'name': 'H2', 'title': 'Heading_2', 'css': 'wym_containers_h2'},
+        {'name': 'H3', 'title': 'Heading_3', 'css': 'wym_containers_h3'},
+        {'name': 'H4', 'title': 'Heading_4', 'css': 'wym_containers_h4'},
+        {'name': 'H5', 'title': 'Heading_5', 'css': 'wym_containers_h5'},
+        {'name': 'H6', 'title': 'Heading_6', 'css': 'wym_containers_h6'},
+        {'name': 'PRE', 'title': 'Preformatted', 'css': 'wym_containers_pre'},
+        {'name': 'BLOCKQUOTE', 'title': 'Blockquote',
+            'css': 'wym_containers_blockquote'},
+        {'name': 'TH', 'title': 'Table_Header', 'css': 'wym_containers_th'}
+    ],
+
+    classesHtml:       "<div class='wym_classes wym_section'>"
+                        + "<h2>{Classes}</h2><ul>"
+                        + WYMeditor.CLASSES_ITEMS
+                        + "</ul></div>",
+
+    classesItemHtml:   "<li><a href='#' name='"
+                        + WYMeditor.CLASS_NAME
+                        + "'>"
+                        + WYMeditor.CLASS_TITLE
+                        + "</a></li>",
+
+    classesItems:      [],
+
+    statusHtml:        "<div class='wym_status wym_section'>"
+                        + "<h2>{Status}</h2>"
+                        + "</div>",
+
+    htmlHtml:          "<div class='wym_html wym_section'>"
+                        + "<h2>{Source_Code}</h2>"
+                        + "<textarea class='wym_html_val'></textarea>"
+                        + "</div>",
+
+    boxSelector:       ".wym_box",
+    toolsSelector:     ".wym_tools",
+    toolsListSelector: " ul",
+    containersSelector:".wym_containers",
+    classesSelector:   ".wym_classes",
+    htmlSelector:      ".wym_html",
+    iframeSelector:    ".wym_iframe iframe",
+    iframeBodySelector:".wym_iframe",
+    statusSelector:    ".wym_status",
+    toolSelector:      ".wym_tools a",
+    containerSelector: ".wym_containers a",
+    classSelector:     ".wym_classes a",
+    htmlValSelector:   ".wym_html_val",
+    
+    hrefSelector:      ".wym_href",
+    srcSelector:       ".wym_src",
+    titleSelector:     ".wym_title",
+    altSelector:       ".wym_alt",
+    textSelector:      ".wym_text",
+    
+    rowsSelector:      ".wym_rows",
+    colsSelector:      ".wym_cols",
+    captionSelector:   ".wym_caption",
+    summarySelector:   ".wym_summary",
+    
+    submitSelector:    ".wym_submit",
+    cancelSelector:    ".wym_cancel",
+    previewSelector:   "",
+    
+    dialogTypeSelector:    ".wym_dialog_type",
+    dialogLinkSelector:    ".wym_dialog_link",
+    dialogImageSelector:   ".wym_dialog_image",
+    dialogTableSelector:   ".wym_dialog_table",
+    dialogPasteSelector:   ".wym_dialog_paste",
+    dialogPreviewSelector: ".wym_dialog_preview",
+    
+    updateSelector:    ".wymupdate",
+    updateEvent:       "click",
+    
+    dialogFeatures:    "menubar=no,titlebar=no,toolbar=no,resizable=no"
+                      + ",width=560,height=300,top=0,left=0",
+    dialogFeaturesPreview: "menubar=no,titlebar=no,toolbar=no,resizable=no"
+                      + ",scrollbars=yes,width=560,height=300,top=0,left=0",
+
+    dialogHtml:      "<!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Strict//EN'"
+                      + " 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd'>"
+                      + "<html dir='"
+                      + WYMeditor.DIRECTION
+                      + "'><head>"
+                      + "<link rel='stylesheet' type='text/css' media='screen'"
+                      + " href='"
+                      + WYMeditor.CSS_PATH
+                      + "' />"
+                      + "<title>"
+                      + WYMeditor.DIALOG_TITLE
+                      + "</title>"
+                      + "<script type='text/javascript'"
+                      + " src='"
+                      + WYMeditor.JQUERY_PATH
+                      + "'></script>"
+                      + "<script type='text/javascript'"
+                      + " src='"
+                      + WYMeditor.WYM_PATH
+                      + "'></script>"
+                      + "</head>"
+                      + WYMeditor.DIALOG_BODY
+                      + "</html>",
+                      
+    dialogLinkHtml:  "<body class='wym_dialog wym_dialog_link'"
+               + " onload='WYMeditor.INIT_DIALOG(" + WYMeditor.INDEX + ")'"
+               + ">"
+               + "<form>"
+               + "<fieldset>"
+               + "<input type='hidden' class='wym_dialog_type' value='"
+               + WYMeditor.DIALOG_LINK
+               + "' />"
+               + "<legend>{Link}</legend>"
+               + "<div class='row'>"
+               + "<label>{URL}</label>"
+               + "<input type='text' class='wym_href' value='' size='40' />"
+               + "</div>"
+               + "<div class='row'>"
+               + "<label>{Title}</label>"
+               + "<input type='text' class='wym_title' value='' size='40' />"
+               + "</div>"
+               + "<div class='row row-indent'>"
+               + "<input class='wym_submit' type='button'"
+               + " value='{Submit}' />"
+               + "<input class='wym_cancel' type='button'"
+               + "value='{Cancel}' />"
+               + "</div>"
+               + "</fieldset>"
+               + "</form>"
+               + "</body>",
+    
+    dialogImageHtml:  "<body class='wym_dialog wym_dialog_image'"
+               + " onload='WYMeditor.INIT_DIALOG(" + WYMeditor.INDEX + ")'"
+               + ">"
+               + "<form>"
+               + "<fieldset>"
+               + "<input type='hidden' class='wym_dialog_type' value='"
+               + WYMeditor.DIALOG_IMAGE
+               + "' />"
+               + "<legend>{Image}</legend>"
+               + "<div class='row'>"
+               + "<label>{URL}</label>"
+               + "<input type='text' class='wym_src' value='' size='40' />"
+               + "</div>"
+               + "<div class='row'>"
+               + "<label>{Alternative_Text}</label>"
+               + "<input type='text' class='wym_alt' value='' size='40' />"
+               + "</div>"
+               + "<div class='row'>"
+               + "<label>{Title}</label>"
+               + "<input type='text' class='wym_title' value='' size='40' />"
+               + "</div>"
+               + "<div class='row row-indent'>"
+               + "<input class='wym_submit' type='button'"
+               + " value='{Submit}' />"
+               + "<input class='wym_cancel' type='button'"
+               + "value='{Cancel}' />"
+               + "</div>"
+               + "</fieldset>"
+               + "</form>"
+               + "</body>",
+    
+    dialogTableHtml:  "<body class='wym_dialog wym_dialog_table'"
+               + " onload='WYMeditor.INIT_DIALOG(" + WYMeditor.INDEX + ")'"
+               + ">"
+               + "<form>"
+               + "<fieldset>"
+               + "<input type='hidden' class='wym_dialog_type' value='"
+               + WYMeditor.DIALOG_TABLE
+               + "' />"
+               + "<legend>{Table}</legend>"
+               + "<div class='row'>"
+               + "<label>{Caption}</label>"
+               + "<input type='text' class='wym_caption' value='' size='40' />"
+               + "</div>"
+               + "<div class='row'>"
+               + "<label>{Summary}</label>"
+               + "<input type='text' class='wym_summary' value='' size='40' />"
+               + "</div>"
+               + "<div class='row'>"
+               + "<label>{Number_Of_Rows}</label>"
+               + "<input type='text' class='wym_rows' value='3' size='3' />"
+               + "</div>"
+               + "<div class='row'>"
+               + "<label>{Number_Of_Cols}</label>"
+               + "<input type='text' class='wym_cols' value='2' size='3' />"
+               + "</div>"
+               + "<div class='row row-indent'>"
+               + "<input class='wym_submit' type='button'"
+               + " value='{Submit}' />"
+               + "<input class='wym_cancel' type='button'"
+               + "value='{Cancel}' />"
+               + "</div>"
+               + "</fieldset>"
+               + "</form>"
+               + "</body>",
+
+    dialogPasteHtml:  "<body class='wym_dialog wym_dialog_paste'"
+               + " onload='WYMeditor.INIT_DIALOG(" + WYMeditor.INDEX + ")'"
+               + ">"
+               + "<form>"
+               + "<input type='hidden' class='wym_dialog_type' value='"
+               + WYMeditor.DIALOG_PASTE
+               + "' />"
+               + "<fieldset>"
+               + "<legend>{Paste_From_Word}</legend>"
+               + "<div class='row'>"
+               + "<textarea class='wym_text' rows='10' cols='50'></textarea>"
+               + "</div>"
+               + "<div class='row'>"
+               + "<input class='wym_submit' type='button'"
+               + " value='{Submit}' />"
+               + "<input class='wym_cancel' type='button'"
+               + "value='{Cancel}' />"
+               + "</div>"
+               + "</fieldset>"
+               + "</form>"
+               + "</body>",
+
+    dialogPreviewHtml: "<body class='wym_dialog wym_dialog_preview'"
+                      + " onload='WYMeditor.INIT_DIALOG(" + WYMeditor.INDEX + ")'"
+                      + "></body>",
+                      
+    dialogStyles: [],
+
+    stringDelimiterLeft: "{",
+    stringDelimiterRight:"}",
+    
+    preInit: null,
+    preBind: null,
+    postInit: null,
+    
+    preInitDialog: null,
+    postInitDialog: null
+
+  }, options);
+
+  return this.each(function() {
+
+    new WYMeditor.editor(jQuery(this),options);
+  });
+};
+
+/* @name extend
+ * @description Returns the WYMeditor instance based on its index
+ */
+jQuery.extend({
+  wymeditors: function(i) {
+    return (WYMeditor.INSTANCES[i]);
+  }
+});
+
+
+/********** WYMeditor **********/
+
+/* @name Wymeditor
+ * @description WYMeditor class
+ */
+
+/* @name init
+ * @description Initializes a WYMeditor instance
+ */
+WYMeditor.editor.prototype.init = function() {
+
+  //load subclass - browser specific
+  //unsupported browsers: do nothing
+  if (jQuery.browser.msie) {
+    var WymClass = new WYMeditor.WymClassExplorer(this);
+  }
+  else if (jQuery.browser.mozilla) {
+    var WymClass = new WYMeditor.WymClassMozilla(this);
+  }
+  else if (jQuery.browser.opera) {
+    var WymClass = new WYMeditor.WymClassOpera(this);
+  }
+  // Galaxy HACK: add Chrome to browser detection; this is fixed in later versions of WYMEditor.
+  else if (jQuery.browser.safari || jQuery.browser.chrome) {
+    var WymClass = new WYMeditor.WymClassSafari(this);
+  }
+  
+  if(WymClass) {
+  
+      if(jQuery.isFunction(this._options.preInit)) this._options.preInit(this);
+
+      var SaxListener = new WYMeditor.XhtmlSaxListener();
+      jQuery.extend(SaxListener, WymClass);
+      this.parser = new WYMeditor.XhtmlParser(SaxListener);
+      
+      if(this._options.styles || this._options.stylesheet){
+        this.configureEditorUsingRawCss();
+      }
+      
+      this.helper = new WYMeditor.XmlHelper();
+      
+      //extend the Wymeditor object
+      //don't use jQuery.extend since 1.1.4
+      //jQuery.extend(this, WymClass);
+      for (var prop in WymClass) { this[prop] = WymClass[prop]; }
+
+      //load wymbox
+      this._box = jQuery(this._element).hide().after(this._options.boxHtml).next().addClass('wym_box_' + this._index);
+
+      //store the instance index in wymbox and element replaced by editor instance
+      //but keep it compatible with jQuery < 1.2.3, see #122
+      if( jQuery.isFunction( jQuery.fn.data ) ) {
+        jQuery.data(this._box.get(0), WYMeditor.WYM_INDEX, this._index);
+        jQuery.data(this._element.get(0), WYMeditor.WYM_INDEX, this._index);
+      }
+      
+      var h = WYMeditor.Helper;
+
+      //construct the iframe
+      var iframeHtml = this._options.iframeHtml;
+      iframeHtml = h.replaceAll(iframeHtml, WYMeditor.INDEX, this._index);
+      iframeHtml = h.replaceAll(iframeHtml, WYMeditor.IFRAME_BASE_PATH, this._options.iframeBasePath);
+      
+      //construct wymbox
+      var boxHtml = jQuery(this._box).html();
+      
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.LOGO, this._options.logoHtml);
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.TOOLS, this._options.toolsHtml);
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.CONTAINERS,this._options.containersHtml);
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.CLASSES, this._options.classesHtml);
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.HTML, this._options.htmlHtml);
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.IFRAME, iframeHtml);
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.STATUS, this._options.statusHtml);
+      
+      //construct tools list
+      var aTools = eval(this._options.toolsItems);
+      var sTools = "";
+
+      for(var i = 0; i < aTools.length; i++) {
+        var oTool = aTools[i];
+        if(oTool.name && oTool.title)
+          var sTool = this._options.toolsItemHtml;
+          var sTool = h.replaceAll(sTool, WYMeditor.TOOL_NAME, oTool.name);
+          sTool = h.replaceAll(sTool, WYMeditor.TOOL_TITLE, this._options.stringDelimiterLeft
+            + oTool.title
+            + this._options.stringDelimiterRight);
+          sTool = h.replaceAll(sTool, WYMeditor.TOOL_CLASS, oTool.css);
+          sTools += sTool;
+      }
+
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.TOOLS_ITEMS, sTools);
+      
+      //construct classes list
+      var aClasses = eval(this._options.classesItems);
+      var sClasses = "";
+
+      for(var i = 0; i < aClasses.length; i++) {
+        var oClass = aClasses[i];
+        if(oClass.name && oClass.title)
+          var sClass = this._options.classesItemHtml;
+          sClass = h.replaceAll(sClass, WYMeditor.CLASS_NAME, oClass.name);
+          sClass = h.replaceAll(sClass, WYMeditor.CLASS_TITLE, oClass.title);
+          sClasses += sClass;
+      }
+
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.CLASSES_ITEMS, sClasses);
+      
+      //construct containers list
+      var aContainers = eval(this._options.containersItems);
+      var sContainers = "";
+
+      for(var i = 0; i < aContainers.length; i++) {
+        var oContainer = aContainers[i];
+        if(oContainer.name && oContainer.title)
+          var sContainer = this._options.containersItemHtml;
+          sContainer = h.replaceAll(sContainer, WYMeditor.CONTAINER_NAME, oContainer.name);
+          sContainer = h.replaceAll(sContainer, WYMeditor.CONTAINER_TITLE,
+              this._options.stringDelimiterLeft
+            + oContainer.title
+            + this._options.stringDelimiterRight);
+          sContainer = h.replaceAll(sContainer, WYMeditor.CONTAINER_CLASS, oContainer.css);
+          sContainers += sContainer;
+      }
+
+      boxHtml = h.replaceAll(boxHtml, WYMeditor.CONTAINERS_ITEMS, sContainers);
+
+      //l10n
+      boxHtml = this.replaceStrings(boxHtml);
+      
+      //load html in wymbox
+      jQuery(this._box).html(boxHtml);
+      
+      //hide the html value
+      jQuery(this._box).find(this._options.htmlSelector).hide();
+      
+      //enable the skin
+      this.loadSkin();
+      
+    }
+};
+
+WYMeditor.editor.prototype.bindEvents = function() {
+
+  //copy the instance
+  var wym = this;
+  
+  //handle click event on tools buttons
+  jQuery(this._box).find(this._options.toolSelector).click(function() {
+    wym._iframe.contentWindow.focus(); //See #154
+    wym.exec(jQuery(this).attr(WYMeditor.NAME));    
+    return(false);
+  });
+  
+  //handle click event on containers buttons
+  jQuery(this._box).find(this._options.containerSelector).click(function() {
+    wym.container(jQuery(this).attr(WYMeditor.NAME));
+    return(false);
+  });
+  
+  //handle keyup event on html value: set the editor value
+  //handle focus/blur events to check if the element has focus, see #147
+  jQuery(this._box).find(this._options.htmlValSelector)
+    .keyup(function() { jQuery(wym._doc.body).html(jQuery(this).val());})
+    .focus(function() { jQuery(this).toggleClass('hasfocus'); })
+    .blur(function() { jQuery(this).toggleClass('hasfocus'); });
+
+  //handle click event on classes buttons
+  jQuery(this._box).find(this._options.classSelector).click(function() {
+  
+    var aClasses = eval(wym._options.classesItems);
+    var sName = jQuery(this).attr(WYMeditor.NAME);
+    
+    var oClass = WYMeditor.Helper.findByName(aClasses, sName);
+    
+    if(oClass) {
+      var jqexpr = oClass.expr;
+      wym.toggleClass(sName, jqexpr);
+    }
+    wym._iframe.contentWindow.focus(); //See #154
+    return(false);
+  });
+  
+  //handle event on update element
+  jQuery(this._options.updateSelector)
+    .bind(this._options.updateEvent, function() {
+      wym.update();
+  });
+};
+
+WYMeditor.editor.prototype.ready = function() {
+  return(this._doc != null);
+};
+
+
+/********** METHODS **********/
+
+/* @name box
+ * @description Returns the WYMeditor container
+ */
+WYMeditor.editor.prototype.box = function() {
+  return(this._box);
+};
+
+/* @name html
+ * @description Get/Set the html value
+ */
+WYMeditor.editor.prototype.html = function(html) {
+
+  if(typeof html === 'string') jQuery(this._doc.body).html(html);
+  else return(jQuery(this._doc.body).html());
+};
+
+/* @name xhtml
+ * @description Cleans up the HTML
+ */
+WYMeditor.editor.prototype.xhtml = function() {
+    return this.parser.parse(this.html());
+};
+
+/* @name exec
+ * @description Executes a button command
+ */
+WYMeditor.editor.prototype.exec = function(cmd) {
+  
+  //base function for execCommand
+  //open a dialog or exec
+  switch(cmd) {
+    case WYMeditor.CREATE_LINK:
+      var container = this.container();
+      if(container || this._selected_image) this.dialog(WYMeditor.DIALOG_LINK);
+    break;
+    
+    case WYMeditor.INSERT_IMAGE:
+      this.dialog(WYMeditor.DIALOG_IMAGE);
+    break;
+    
+    case WYMeditor.INSERT_TABLE:
+      this.dialog(WYMeditor.DIALOG_TABLE);
+    break;
+    
+    case WYMeditor.PASTE:
+      this.dialog(WYMeditor.DIALOG_PASTE);
+    break;
+    
+    case WYMeditor.TOGGLE_HTML:
+      this.update();
+      this.toggleHtml();
+
+      //partially fixes #121 when the user manually inserts an image
+      if(!jQuery(this._box).find(this._options.htmlSelector).is(':visible'))
+        this.listen();
+    break;
+    
+    case WYMeditor.PREVIEW:
+      this.dialog(WYMeditor.PREVIEW, this._options.dialogFeaturesPreview);
+    break;
+    
+    default:
+      this._exec(cmd);
+    break;
+  }
+};
+
+/* @name container
+ * @description Get/Set the selected container
+ */
+WYMeditor.editor.prototype.container = function(sType) {
+
+  if(sType) {
+  
+    var container = null;
+    
+    if(sType.toLowerCase() == WYMeditor.TH) {
+    
+      container = this.container();
+      
+      //find the TD or TH container
+      switch(container.tagName.toLowerCase()) {
+      
+        case WYMeditor.TD: case WYMeditor.TH:
+          break;
+        default:
+          var aTypes = new Array(WYMeditor.TD,WYMeditor.TH);
+          container = this.findUp(this.container(), aTypes);
+          break;
+      }
+      
+      //if it exists, switch
+      if(container!=null) {
+      
+        sType = (container.tagName.toLowerCase() == WYMeditor.TD)? WYMeditor.TH: WYMeditor.TD;
+        this.switchTo(container,sType);
+        this.update();
+      }
+    } else {
+  
+      //set the container type
+      var aTypes=new Array(WYMeditor.P,WYMeditor.H1,WYMeditor.H2,WYMeditor.H3,WYMeditor.H4,WYMeditor.H5,
+      WYMeditor.H6,WYMeditor.PRE,WYMeditor.BLOCKQUOTE);
+      container = this.findUp(this.container(), aTypes);
+      
+      if(container) {
+  
+        var newNode = null;
+  
+        //blockquotes must contain a block level element
+        if(sType.toLowerCase() == WYMeditor.BLOCKQUOTE) {
+        
+          var blockquote = this.findUp(this.container(), WYMeditor.BLOCKQUOTE);
+          
+          if(blockquote == null) {
+          
+            newNode = this._doc.createElement(sType);
+            container.parentNode.insertBefore(newNode,container);
+            newNode.appendChild(container);
+            this.setFocusToNode(newNode.firstChild);
+            
+          } else {
+          
+            var nodes = blockquote.childNodes;
+            var lgt = nodes.length;
+            var firstNode = null;
+            
+            if(lgt > 0) firstNode = nodes.item(0);
+            for(var x=0; x<lgt; x++) {
+              blockquote.parentNode.insertBefore(nodes.item(0),blockquote);
+            }
+            blockquote.parentNode.removeChild(blockquote);
+            if(firstNode) this.setFocusToNode(firstNode);
+          }
+        }
+        
+        else this.switchTo(container,sType);
+      
+        this.update();
+      }
+    }
+  }
+  else return(this.selected());
+};
+
+/* @name toggleClass
+ * @description Toggles class on selected element, or one of its parents
+ */
+WYMeditor.editor.prototype.toggleClass = function(sClass, jqexpr) {
+
+  var container = (this._selected_image
+                    ? this._selected_image
+                    : jQuery(this.selected()));
+  container = jQuery(container).parentsOrSelf(jqexpr);
+  jQuery(container).toggleClass(sClass);
+
+  if(!jQuery(container).attr(WYMeditor.CLASS)) jQuery(container).removeAttr(this._class);
+
+};
+
+/* @name findUp
+ * @description Returns the first parent or self container, based on its type
+ */
+WYMeditor.editor.prototype.findUp = function(node, filter) {
+
+  //filter is a string or an array of strings
+
+  if(node) {
+
+      var tagname = node.tagName.toLowerCase();
+      
+      if(typeof(filter) == WYMeditor.STRING) {
+    
+        while(tagname != filter && tagname != WYMeditor.BODY) {
+        
+          node = node.parentNode;
+          tagname = node.tagName.toLowerCase();
+        }
+      
+      } else {
+      
+        var bFound = false;
+        
+        while(!bFound && tagname != WYMeditor.BODY) {
+          for(var i = 0; i < filter.length; i++) {
+            if(tagname == filter[i]) {
+              bFound = true;
+              break;
+            }
+          }
+          if(!bFound) {
+            node = node.parentNode;
+            tagname = node.tagName.toLowerCase();
+          }
+        }
+      }
+      
+      if(tagname != WYMeditor.BODY) return(node);
+      else return(null);
+      
+  } else return(null);
+};
+
+/* @name switchTo
+ * @description Switch the node's type
+ */
+WYMeditor.editor.prototype.switchTo = function(node,sType) {
+
+  var newNode = this._doc.createElement(sType);
+  var html = jQuery(node).html();
+  node.parentNode.replaceChild(newNode,node);
+  jQuery(newNode).html(html);
+  this.setFocusToNode(newNode);
+};
+
+WYMeditor.editor.prototype.replaceStrings = function(sVal) {
+  //check if the language file has already been loaded
+  //if not, get it via a synchronous ajax call
+  if(!WYMeditor.STRINGS[this._options.lang]) {
+    try {
+      eval(jQuery.ajax({url:this._options.langPath
+        + this._options.lang + '.js', async:false}).responseText);
+    } catch(e) {
+        WYMeditor.console.error("WYMeditor: error while parsing language file.");
+        return sVal;
+    }
+  }
+
+  //replace all the strings in sVal and return it
+  for (var key in WYMeditor.STRINGS[this._options.lang]) {
+    sVal = WYMeditor.Helper.replaceAll(sVal, this._options.stringDelimiterLeft + key 
+    + this._options.stringDelimiterRight,
+    WYMeditor.STRINGS[this._options.lang][key]);
+  };
+  return(sVal);
+};
+
+WYMeditor.editor.prototype.encloseString = function(sVal) {
+
+  return(this._options.stringDelimiterLeft
+    + sVal
+    + this._options.stringDelimiterRight);
+};
+
+/* @name status
+ * @description Prints a status message
+ */
+WYMeditor.editor.prototype.status = function(sMessage) {
+
+  //print status message
+  jQuery(this._box).find(this._options.statusSelector).html(sMessage);
+};
+
+/* @name update
+ * @description Updates the element and textarea values
+ */
+WYMeditor.editor.prototype.update = function() {
+
+  var html = this.xhtml();
+  jQuery(this._element).val(html);
+  jQuery(this._box).find(this._options.htmlValSelector).not('.hasfocus').val(html); //#147
+};
+
+/* @name dialog
+ * @description Opens a dialog box
+ */
+WYMeditor.editor.prototype.dialog = function( dialogType, dialogFeatures, bodyHtml ) {
+  var features = dialogFeatures || this._wym._options.dialogFeatures;
+  var wDialog = window.open('', 'dialog', features);
+
+  if(wDialog) {
+
+    var sBodyHtml = "";
+    
+    switch( dialogType ) {
+
+      case(WYMeditor.DIALOG_LINK):
+        sBodyHtml = this._options.dialogLinkHtml;
+      break;
+      case(WYMeditor.DIALOG_IMAGE):
+        sBodyHtml = this._options.dialogImageHtml;
+      break;
+      case(WYMeditor.DIALOG_TABLE):
+        sBodyHtml = this._options.dialogTableHtml;
+      break;
+      case(WYMeditor.DIALOG_PASTE):
+        sBodyHtml = this._options.dialogPasteHtml;
+      break;
+      case(WYMeditor.PREVIEW):
+        sBodyHtml = this._options.dialogPreviewHtml;
+      break;
+      default:
+        sBodyHtml = bodyHtml;
+    }
+    var h = WYMeditor.Helper;
+
+    //construct the dialog
+    var dialogHtml = this._options.dialogHtml;
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.BASE_PATH, this._options.basePath);
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.DIRECTION, this._options.direction);
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.CSS_PATH, this._options.skinPath + WYMeditor.SKINS_DEFAULT_CSS);
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.WYM_PATH, this._options.wymPath);
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.JQUERY_PATH, this._options.jQueryPath);
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.DIALOG_TITLE, this.encloseString( dialogType ));
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.DIALOG_BODY, sBodyHtml);
+    dialogHtml = h.replaceAll(dialogHtml, WYMeditor.INDEX, this._index);
+      
+    dialogHtml = this.replaceStrings(dialogHtml);
+    var doc = wDialog.document;
+    doc.write(dialogHtml);
+    doc.close();
+  }
+};
+
+/* @name toggleHtml
+ * @description Shows/Hides the HTML
+ */
+WYMeditor.editor.prototype.toggleHtml = function() {
+  jQuery(this._box).find(this._options.htmlSelector).toggle();
+};
+
+WYMeditor.editor.prototype.uniqueStamp = function() {
+	var now = new Date();
+	return("wym-" + now.getTime());
+};
+
+WYMeditor.editor.prototype.paste = function(sData) {
+
+  var sTmp;
+  var container = this.selected();
+	
+  //split the data, using double newlines as the separator
+  var aP = sData.split(this._newLine + this._newLine);
+  var rExp = new RegExp(this._newLine, "g");
+
+  //add a P for each item
+  if(container && container.tagName.toLowerCase() != WYMeditor.BODY) {
+    for(x = aP.length - 1; x >= 0; x--) {
+        sTmp = aP[x];
+        //simple newlines are replaced by a break
+        sTmp = sTmp.replace(rExp, "<br />");
+        jQuery(container).after("<p>" + sTmp + "</p>");
+    }
+  } else {
+    for(x = 0; x < aP.length; x++) {
+        sTmp = aP[x];
+        //simple newlines are replaced by a break
+        sTmp = sTmp.replace(rExp, "<br />");
+        jQuery(this._doc.body).append("<p>" + sTmp + "</p>");
+    }
+  
+  }
+};
+
+WYMeditor.editor.prototype.insert = function(html) {
+    // Do we have a selection?
+    if (this._iframe.contentWindow.getSelection().focusNode != null) {
+        // Overwrite selection with provided html
+        this._exec( WYMeditor.INSERT_HTML, html);
+    } else {
+        // Fall back to the internal paste function if there's no selection
+        this.paste(html)
+    }
+};
+
+WYMeditor.editor.prototype.wrap = function(left, right) {
+    // Do we have a selection?
+    if (this._iframe.contentWindow.getSelection().focusNode != null) {
+        // Wrap selection with provided html
+        this._exec( WYMeditor.INSERT_HTML, left + this._iframe.contentWindow.getSelection().toString() + right);
+    }
+};
+
+WYMeditor.editor.prototype.unwrap = function() {
+    // Do we have a selection?
+    if (this._iframe.contentWindow.getSelection().focusNode != null) {
+        // Unwrap selection
+        this._exec( WYMeditor.INSERT_HTML, this._iframe.contentWindow.getSelection().toString() );
+    }
+};
+
+WYMeditor.editor.prototype.addCssRules = function(doc, aCss) {
+  var styles = doc.styleSheets[0];
+  if(styles) {
+    for(var i = 0; i < aCss.length; i++) {
+      var oCss = aCss[i];
+      if(oCss.name && oCss.css) this.addCssRule(styles, oCss);
+    }
+  }
+};
+
+/********** CONFIGURATION **********/
+
+WYMeditor.editor.prototype.computeBasePath = function() {
+  return jQuery(jQuery.grep(jQuery('script'), function(s){
+    return (s.src && s.src.match(/jquery\.wymeditor(\.pack|\.min|\.packed)?\.js(\?.*)?$/ ))
+  })).attr('src').replace(/jquery\.wymeditor(\.pack|\.min|\.packed)?\.js(\?.*)?$/, '');
+};
+
+WYMeditor.editor.prototype.computeWymPath = function() {
+  return jQuery(jQuery.grep(jQuery('script'), function(s){
+    return (s.src && s.src.match(/jquery\.wymeditor(\.pack|\.min|\.packed)?\.js(\?.*)?$/ ))
+  })).attr('src');
+};
+
+WYMeditor.editor.prototype.computeJqueryPath = function() {
+  return jQuery(jQuery.grep(jQuery('script'), function(s){
+    return (s.src && s.src.match(/jquery(-(.*)){0,1}(\.pack|\.min|\.packed)?\.js(\?.*)?$/ ))
+  })).attr('src');
+};
+
+WYMeditor.editor.prototype.computeCssPath = function() {
+  return jQuery(jQuery.grep(jQuery('link'), function(s){
+   return (s.href && s.href.match(/wymeditor\/skins\/(.*)screen\.css(\?.*)?$/ ))
+  })).attr('href');
+};
+
+WYMeditor.editor.prototype.configureEditorUsingRawCss = function() {
+
+  var CssParser = new WYMeditor.WymCssParser();
+  if(this._options.stylesheet){
+    CssParser.parse(jQuery.ajax({url: this._options.stylesheet,async:false}).responseText);
+  }else{
+    CssParser.parse(this._options.styles, false);
+  }
+
+  if(this._options.classesItems.length == 0) {
+    this._options.classesItems = CssParser.css_settings.classesItems;
+  }
+  if(this._options.editorStyles.length == 0) {
+    this._options.editorStyles = CssParser.css_settings.editorStyles;
+  }
+  if(this._options.dialogStyles.length == 0) {
+    this._options.dialogStyles = CssParser.css_settings.dialogStyles;
+  }
+};
+
+/********** EVENTS **********/
+
+WYMeditor.editor.prototype.listen = function() {
+
+  //don't use jQuery.find() on the iframe body
+  //because of MSIE + jQuery + expando issue (#JQ1143)
+  //jQuery(this._doc.body).find("*").bind("mouseup", this.mouseup);
+  
+  jQuery(this._doc.body).bind("mousedown", this.mousedown);
+  var images = this._doc.body.getElementsByTagName("img");
+  for(var i=0; i < images.length; i++) {
+    jQuery(images[i]).bind("mousedown", this.mousedown);
+  }
+};
+
+WYMeditor.editor.prototype.mousedown = function(evt) {
+  
+  var wym = WYMeditor.INSTANCES[this.ownerDocument.title];
+  wym._selected_image = (this.tagName.toLowerCase() == WYMeditor.IMG) ? this : null;
+  evt.stopPropagation();
+};
+
+/********** SKINS **********/
+
+/*
+ * Function: WYMeditor.loadCss
+ *      Loads a stylesheet in the document.
+ *
+ * Parameters:
+ *      href - The CSS path.
+ */
+WYMeditor.loadCss = function(href) {
+    
+    var link = document.createElement('link');
+    link.rel = 'stylesheet';
+    link.href = href;
+
+    var head = jQuery('head').get(0);
+    head.appendChild(link);
+};
+
+/*
+ *  Function: WYMeditor.editor.loadSkin
+ *      Loads the skin CSS and initialization script (if needed).
+ */
+WYMeditor.editor.prototype.loadSkin = function() {
+
+    //does the user want to automatically load the CSS (default: yes)?
+    //we also test if it hasn't been already loaded by another instance
+    //see below for a better (second) test
+    if(this._options.loadSkin && !WYMeditor.SKINS[this._options.skin]) {
+
+        //check if it hasn't been already loaded
+        //so we don't load it more than once
+        //(we check the existing <link> elements)
+
+        var found = false;
+        var rExp = new RegExp(this._options.skin
+             + '\/' + WYMeditor.SKINS_DEFAULT_CSS + '$');
+
+        jQuery('link').each( function() {
+            if(this.href.match(rExp)) found = true;
+        });
+
+        //load it, using the skin path
+        if(!found) WYMeditor.loadCss( this._options.skinPath
+            + WYMeditor.SKINS_DEFAULT_CSS );
+    }
+
+    //put the classname (ex. wym_skin_default) on wym_box
+    jQuery(this._box).addClass( "wym_skin_" + this._options.skin );
+
+    //does the user want to use some JS to initialize the skin (default: yes)?
+    //also check if it hasn't already been loaded by another instance
+    if(this._options.initSkin && !WYMeditor.SKINS[this._options.skin]) {
+
+        eval(jQuery.ajax({url:this._options.skinPath
+            + WYMeditor.SKINS_DEFAULT_JS, async:false}).responseText);
+    }
+
+    //init the skin, if needed
+    if(WYMeditor.SKINS[this._options.skin]
+    && WYMeditor.SKINS[this._options.skin].init)
+       WYMeditor.SKINS[this._options.skin].init(this);
+
+};
+
+
+/********** DIALOGS **********/
+
+WYMeditor.INIT_DIALOG = function(index) {
+
+  var wym = window.opener.WYMeditor.INSTANCES[index];
+  var doc = window.document;
+  var selected = wym.selected();
+  var dialogType = jQuery(wym._options.dialogTypeSelector).val();
+  var sStamp = wym.uniqueStamp();
+
+  switch(dialogType) {
+
+  case WYMeditor.DIALOG_LINK:
+    //ensure that we select the link to populate the fields
+    if(selected && selected.tagName && selected.tagName.toLowerCase != WYMeditor.A)
+      selected = jQuery(selected).parentsOrSelf(WYMeditor.A);
+
+    //fix MSIE selection if link image has been clicked
+    if(!selected && wym._selected_image)
+      selected = jQuery(wym._selected_image).parentsOrSelf(WYMeditor.A);
+  break;
+
+  }
+
+  //pre-init functions
+  if(jQuery.isFunction(wym._options.preInitDialog))
+    wym._options.preInitDialog(wym,window);
+
+  //add css rules from options
+  var styles = doc.styleSheets[0];
+  var aCss = eval(wym._options.dialogStyles);
+
+  wym.addCssRules(doc, aCss);
+
+  //auto populate fields if selected container (e.g. A)
+  if(selected) {
+    jQuery(wym._options.hrefSelector).val(jQuery(selected).attr(WYMeditor.HREF));
+    jQuery(wym._options.srcSelector).val(jQuery(selected).attr(WYMeditor.SRC));
+    jQuery(wym._options.titleSelector).val(jQuery(selected).attr(WYMeditor.TITLE));
+    jQuery(wym._options.altSelector).val(jQuery(selected).attr(WYMeditor.ALT));
+  }
+
+  //auto populate image fields if selected image
+  if(wym._selected_image) {
+    jQuery(wym._options.dialogImageSelector + " " + wym._options.srcSelector)
+      .val(jQuery(wym._selected_image).attr(WYMeditor.SRC));
+    jQuery(wym._options.dialogImageSelector + " " + wym._options.titleSelector)
+      .val(jQuery(wym._selected_image).attr(WYMeditor.TITLE));
+    jQuery(wym._options.dialogImageSelector + " " + wym._options.altSelector)
+      .val(jQuery(wym._selected_image).attr(WYMeditor.ALT));
+  }
+
+  jQuery(wym._options.dialogLinkSelector + " "
+    + wym._options.submitSelector).click(function() {
+
+      var sUrl = jQuery(wym._options.hrefSelector).val();
+      if(sUrl.length > 0) {
+
+        wym._exec(WYMeditor.CREATE_LINK, sStamp);
+
+        jQuery("a[href=" + sStamp + "]", wym._doc.body)
+            .attr(WYMeditor.HREF, sUrl)
+            .attr(WYMeditor.TITLE, jQuery(wym._options.titleSelector).val());
+
+      }
+      window.close();
+  });
+
+  jQuery(wym._options.dialogImageSelector + " "
+    + wym._options.submitSelector).click(function() {
+
+      var sUrl = jQuery(wym._options.srcSelector).val();
+      if(sUrl.length > 0) {
+
+        wym._exec(WYMeditor.INSERT_IMAGE, sStamp);
+
+        jQuery("img[src$=" + sStamp + "]", wym._doc.body)
+            .attr(WYMeditor.SRC, sUrl)
+            .attr(WYMeditor.TITLE, jQuery(wym._options.titleSelector).val())
+            .attr(WYMeditor.ALT, jQuery(wym._options.altSelector).val());
+      }
+      window.close();
+  });
+
+  jQuery(wym._options.dialogTableSelector + " "
+    + wym._options.submitSelector).click(function() {
+
+      var iRows = jQuery(wym._options.rowsSelector).val();
+      var iCols = jQuery(wym._options.colsSelector).val();
+
+      if(iRows > 0 && iCols > 0) {
+
+        var table = wym._doc.createElement(WYMeditor.TABLE);
+        var newRow = null;
+		var newCol = null;
+
+		var sCaption = jQuery(wym._options.captionSelector).val();
+
+		//we create the caption
+		var newCaption = table.createCaption();
+		newCaption.innerHTML = sCaption;
+
+		//we create the rows and cells
+		for(x=0; x<iRows; x++) {
+			newRow = table.insertRow(x);
+			for(y=0; y<iCols; y++) {newRow.insertCell(y);}
+		}
+
+        //set the summary attr
+        jQuery(table).attr('summary',
+            jQuery(wym._options.summarySelector).val());
+
+        //append the table after the selected container
+        var node = jQuery(wym.findUp(wym.container(),
+          WYMeditor.MAIN_CONTAINERS)).get(0);
+        if(!node || !node.parentNode) jQuery(wym._doc.body).append(table);
+        else jQuery(node).after(table);
+      }
+      window.close();
+  });
+
+  jQuery(wym._options.dialogPasteSelector + " "
+    + wym._options.submitSelector).click(function() {
+
+      var sText = jQuery(wym._options.textSelector).val();
+      wym.paste(sText);
+      window.close();
+  });
+
+  jQuery(wym._options.dialogPreviewSelector + " "
+    + wym._options.previewSelector)
+    .html(wym.xhtml());
+
+  //cancel button
+  jQuery(wym._options.cancelSelector).mousedown(function() {
+    window.close();
+  });
+
+  //pre-init functions
+  if(jQuery.isFunction(wym._options.postInitDialog))
+    wym._options.postInitDialog(wym,window);
+
+};
+
+/********** XHTML LEXER/PARSER **********/
+
+/*
+* @name xml
+* @description Use these methods to generate XML and XHTML compliant tags and
+* escape tag attributes correctly
+* @author Bermi Ferrer - http://bermi.org
+* @author David Heinemeier Hansson http://loudthinking.com
+*/
+WYMeditor.XmlHelper = function()
+{
+  this._entitiesDiv = document.createElement('div');
+  return this;
+};
+
+
+/*
+* @name tag
+* @description
+* Returns an empty HTML tag of type *name* which by default is XHTML
+* compliant. Setting *open* to true will create an open tag compatible
+* with HTML 4.0 and below. Add HTML attributes by passing an attributes
+* array to *options*. For attributes with no value like (disabled and
+* readonly), give it a value of true in the *options* array.
+*
+* Examples:
+*
+*   this.tag('br')
+*    # => <br />
+*   this.tag ('br', false, true)
+*    # => <br>
+*   this.tag ('input', jQuery({type:'text',disabled:true }) )
+*    # => <input type="text" disabled="disabled" />
+*/
+WYMeditor.XmlHelper.prototype.tag = function(name, options, open)
+{
+  options = options || false;
+  open = open || false;
+  return '<'+name+(options ? this.tagOptions(options) : '')+(open ? '>' : ' />');
+};
+
+/*
+* @name contentTag
+* @description
+* Returns a XML block tag of type *name* surrounding the *content*. Add
+* XML attributes by passing an attributes array to *options*. For attributes
+* with no value like (disabled and readonly), give it a value of true in
+* the *options* array. You can use symbols or strings for the attribute names.
+*
+*   this.contentTag ('p', 'Hello world!' )
+*    # => <p>Hello world!</p>
+*   this.contentTag('div', this.contentTag('p', "Hello world!"), jQuery({class : "strong"}))
+*    # => <div class="strong"><p>Hello world!</p></div>
+*   this.contentTag("select", options, jQuery({multiple : true}))
+*    # => <select multiple="multiple">...options...</select>
+*/
+WYMeditor.XmlHelper.prototype.contentTag = function(name, content, options)
+{
+  options = options || false;
+  return '<'+name+(options ? this.tagOptions(options) : '')+'>'+content+'</'+name+'>';
+};
+
+/*
+* @name cdataSection
+* @description
+* Returns a CDATA section for the given +content+.  CDATA sections
+* are used to escape blocks of text containing characters which would
+* otherwise be recognized as markup. CDATA sections begin with the string
+* <tt><![CDATA[</tt> and } with (and may not contain) the string
+* <tt>]]></tt>.
+*/
+WYMeditor.XmlHelper.prototype.cdataSection = function(content)
+{
+  return '<![CDATA['+content+']]>';
+};
+
+
+/*
+* @name escapeOnce
+* @description
+* Returns the escaped +xml+ without affecting existing escaped entities.
+*
+*  this.escapeOnce( "1 > 2 & 3")
+*    # => "1 > 2 & 3"
+*/
+WYMeditor.XmlHelper.prototype.escapeOnce = function(xml)
+{
+  return this._fixDoubleEscape(this.escapeEntities(xml));
+};
+
+/*
+* @name _fixDoubleEscape
+* @description
+* Fix double-escaped entities, such as &amp;, &#123;, etc.
+*/
+WYMeditor.XmlHelper.prototype._fixDoubleEscape = function(escaped)
+{
+  return escaped.replace(/&([a-z]+|(#\d+));/ig, "&$1;");
+};
+
+/*
+* @name tagOptions
+* @description
+* Takes an array like the one generated by Tag.parseAttributes
+*  [["src", "http://www.editam.com/?a=b&c=d&f=g"], ["title", "Editam, <Simplified> CMS"]]
+* or an object like {src:"http://www.editam.com/?a=b&c=d&f=g", title:"Editam, <Simplified> CMS"}
+* and returns a string properly escaped like
+* ' src = "http://www.editam.com/?a=b&c=d&f=g" title = "Editam, <Simplified> CMS"'
+* which is valid for strict XHTML
+*/
+WYMeditor.XmlHelper.prototype.tagOptions = function(options)
+{
+  var xml = this;
+  xml._formated_options = '';
+
+  for (var key in options) {
+    var formated_options = '';
+    var value = options[key];
+    if(typeof value != 'function' && value.length > 0) {
+
+      if(parseInt(key) == key && typeof value == 'object'){
+        key = value.shift();
+        value = value.pop();
+      }
+      if(key != '' && value != ''){
+        xml._formated_options += ' '+key+'="'+xml.escapeOnce(value)+'"';
+      }
+    }
+  }
+  return xml._formated_options;
+};
+
+/*
+* @name escapeEntities
+* @description
+* Escapes XML/HTML entities <, >, & and ". If seccond parameter is set to false it
+* will not escape ". If set to true it will also escape '
+*/
+WYMeditor.XmlHelper.prototype.escapeEntities = function(string, escape_quotes)
+{
+  this._entitiesDiv.innerHTML = string;
+  this._entitiesDiv.textContent = string;
+  var result = this._entitiesDiv.innerHTML;
+  if(typeof escape_quotes == 'undefined'){
+    if(escape_quotes != false) result = result.replace('"', '"');
+    if(escape_quotes == true)  result = result.replace('"', ''');
+  }
+  return result;
+};
+
+/*
+* Parses a string conatining tag attributes and values an returns an array formated like
+*  [["src", "http://www.editam.com"], ["title", "Editam, Simplified CMS"]]
+*/
+WYMeditor.XmlHelper.prototype.parseAttributes = function(tag_attributes)
+{
+  // Use a compounded regex to match single quoted, double quoted and unquoted attribute pairs
+  var result = [];
+  var matches = tag_attributes.split(/((=\s*")(")("))|((=\s*\')(\')(\'))|((=\s*[^>\s]*))/g);
+  if(matches.toString() != tag_attributes){
+    for (var k in matches) {
+      var v = matches[k];
+      if(typeof v != 'function' && v.length != 0){
+        var re = new RegExp('(\\w+)\\s*'+v);
+        if(match = tag_attributes.match(re) ){
+          var value = v.replace(/^[\s=]+/, "");
+          var delimiter = value.charAt(0);
+          delimiter = delimiter == '"' ? '"' : (delimiter=="'"?"'":'');
+          if(delimiter != ''){
+            value = delimiter == '"' ? value.replace(/^"|"+$/g, '') :  value.replace(/^'|'+$/g, '');
+          }
+          tag_attributes = tag_attributes.replace(match[0],'');
+          result.push([match[1] , value]);
+        }
+      }
+    }
+  }
+  return result;
+};
+
+/**
+* XhtmlValidator for validating tag attributes
+*
+* @author Bermi Ferrer - http://bermi.org
+*/
+WYMeditor.XhtmlValidator = {
+  "_attributes":
+  {
+    "core":
+    {
+      "except":[
+      "base",
+      "head",
+      "html",
+      "meta",
+      "param",
+      "script",
+      "style",
+      "title"
+      ],
+      "attributes":[
+      "class",
+      "id",
+      "style",
+      "title",
+      "accesskey",
+      "tabindex"
+      ]
+    },
+    "language":
+    {
+      "except":[
+      "base",
+      "br",
+      "hr",
+      "iframe",
+      "param",
+      "script"
+      ],
+      "attributes":
+      {
+        "dir":[
+        "ltr",
+        "rtl"
+        ],
+        "0":"lang",
+        "1":"xml:lang"
+      }
+    },
+    "keyboard":
+    {
+      "attributes":
+      {
+        "accesskey":/^(\w){1}$/,
+        "tabindex":/^(\d)+$/
+      }
+    }
+  },
+  "_events":
+  {
+    "window":
+    {
+      "only":[
+      "body"
+      ],
+      "attributes":[
+      "onload",
+      "onunload"
+      ]
+    },
+    "form":
+    {
+      "only":[
+      "form",
+      "input",
+      "textarea",
+      "select",
+      "a",
+      "label",
+      "button"
+      ],
+      "attributes":[
+      "onchange",
+      "onsubmit",
+      "onreset",
+      "onselect",
+      "onblur",
+      "onfocus"
+      ]
+    },
+    "keyboard":
+    {
+      "except":[
+      "base",
+      "bdo",
+      "br",
+      "frame",
+      "frameset",
+      "head",
+      "html",
+      "iframe",
+      "meta",
+      "param",
+      "script",
+      "style",
+      "title"
+      ],
+      "attributes":[
+      "onkeydown",
+      "onkeypress",
+      "onkeyup"
+      ]
+    },
+    "mouse":
+    {
+      "except":[
+      "base",
+      "bdo",
+      "br",
+      "head",
+      "html",
+      "meta",
+      "param",
+      "script",
+      "style",
+      "title"
+      ],
+      "attributes":[
+      "onclick",
+      "ondblclick",
+      "onmousedown",
+      "onmousemove",
+      "onmouseover",
+      "onmouseout",
+      "onmouseup"
+      ]
+    }
+  },
+  "_tags":
+  {
+    "a":
+    {
+      "attributes":
+      {
+        "0":"charset",
+        "1":"coords",
+        "2":"href",
+        "3":"hreflang",
+        "4":"name",
+        "rel":/^(alternate|designates|stylesheet|start|next|prev|contents|index|glossary|copyright|chapter|section|subsection|appendix|help|bookmark| |shortcut|icon)+$/,
+        "rev":/^(alternate|designates|stylesheet|start|next|prev|contents|index|glossary|copyright|chapter|section|subsection|appendix|help|bookmark| |shortcut|icon)+$/,
+        "shape":/^(rect|rectangle|circ|circle|poly|polygon)$/,
+        "5":"type"
+      }
+    },
+    "0":"abbr",
+    "1":"acronym",
+    "2":"address",
+    "area":
+    {
+      "attributes":
+      {
+        "0":"alt",
+        "1":"coords",
+        "2":"href",
+        "nohref":/^(true|false)$/,
+        "shape":/^(rect|rectangle|circ|circle|poly|polygon)$/
+      },
+      "required":[
+      "alt"
+      ]
+    },
+    "3":"b",
+    "base":
+    {
+      "attributes":[
+      "href"
+      ],
+      "required":[
+      "href"
+      ]
+    },
+    "bdo":
+    {
+      "attributes":
+      {
+        "dir":/^(ltr|rtl)$/
+      },
+      "required":[
+      "dir"
+      ]
+    },
+    "4":"big",
+    "blockquote":
+    {
+      "attributes":[
+      "cite"
+      ]
+    },
+    "5":"body",
+    "6":"br",
+    "button":
+    {
+      "attributes":
+      {
+        "disabled":/^(disabled)$/,
+        "type":/^(button|reset|submit)$/,
+        "0":"value"
+      },
+      "inside":"form"
+    },
+    "7":"caption",
+    "8":"cite",
+    "9":"code",
+    "col":
+    {
+      "attributes":
+      {
+        "align":/^(right|left|center|justify)$/,
+        "0":"char",
+        "1":"charoff",
+        "span":/^(\d)+$/,
+        "valign":/^(top|middle|bottom|baseline)$/,
+        "2":"width"
+      },
+      "inside":"colgroup"
+    },
+    "colgroup":
+    {
+      "attributes":
+      {
+        "align":/^(right|left|center|justify)$/,
+        "0":"char",
+        "1":"charoff",
+        "span":/^(\d)+$/,
+        "valign":/^(top|middle|bottom|baseline)$/,
+        "2":"width"
+      }
+    },
+    "10":"dd",
+    "del":
+    {
+      "attributes":
+      {
+        "0":"cite",
+        "datetime":/^([0-9]){8}/
+      }
+    },
+    "11":"div",
+    "12":"dfn",
+    "13":"dl",
+    "14":"dt",
+    "15":"em",
+    "fieldset":
+    {
+      "inside":"form"
+    },
+    "form":
+    {
+      "attributes":
+      {
+        "0":"action",
+        "1":"accept",
+        "2":"accept-charset",
+        "3":"enctype",
+        "method":/^(get|post)$/
+      },
+      "required":[
+      "action"
+      ]
+    },
+    "head":
+    {
+      "attributes":[
+      "profile"
+      ]
+    },
+    "16":"h1",
+    "17":"h2",
+    "18":"h3",
+    "19":"h4",
+    "20":"h5",
+    "21":"h6",
+    "22":"hr",
+    "html":
+    {
+      "attributes":[
+      "xmlns"
+      ]
+    },
+    "23":"i",
+    "img":
+    {
+      "attributes":[
+      "alt",
+      "src",
+      "height",
+      "ismap",
+      "longdesc",
+      "usemap",
+      "width"
+      ],
+      "required":[
+      "alt",
+      "src"
+      ]
+    },
+    "input":
+    {
+      "attributes":
+      {
+        "0":"accept",
+        "1":"alt",
+        "checked":/^(checked)$/,
+        "disabled":/^(disabled)$/,
+        "maxlength":/^(\d)+$/,
+        "2":"name",
+        "readonly":/^(readonly)$/,
+        "size":/^(\d)+$/,
+        "3":"src",
+        "type":/^(button|checkbox|file|hidden|image|password|radio|reset|submit|text)$/,
+        "4":"value"
+      },
+      "inside":"form"
+    },
+    "ins":
+    {
+      "attributes":
+      {
+        "0":"cite",
+        "datetime":/^([0-9]){8}/
+      }
+    },
+    "24":"kbd",
+    "label":
+    {
+      "attributes":[
+      "for"
+      ],
+      "inside":"form"
+    },
+    "25":"legend",
+    "26":"li",
+    "link":
+    {
+      "attributes":
+      {
+        "0":"charset",
+        "1":"href",
+        "2":"hreflang",
+        "media":/^(all|braille|print|projection|screen|speech|,|;| )+$/i,
+        //next comment line required by Opera!
+        /*"rel":/^(alternate|appendix|bookmark|chapter|contents|copyright|glossary|help|home|index|next|prev|section|start|stylesheet|subsection| |shortcut|icon)+$/i,*/
+        "rel":/^(alternate|appendix|bookmark|chapter|contents|copyright|glossary|help|home|index|next|prev|section|start|stylesheet|subsection| |shortcut|icon)+$/i,
+        "rev":/^(alternate|appendix|bookmark|chapter|contents|copyright|glossary|help|home|index|next|prev|section|start|stylesheet|subsection| |shortcut|icon)+$/i,
+        "3":"type"
+      },
+      "inside":"head"
+    },
+    "map":
+    {
+      "attributes":[
+      "id",
+      "name"
+      ],
+      "required":[
+      "id"
+      ]
+    },
+    "meta":
+    {
+      "attributes":
+      {
+        "0":"content",
+        "http-equiv":/^(content\-type|expires|refresh|set\-cookie)$/i,
+        "1":"name",
+        "2":"scheme"
+      },
+      "required":[
+      "content"
+      ]
+    },
+    "27":"noscript",
+    "object":
+    {
+      "attributes":[
+      "archive",
+      "classid",
+      "codebase",
+      "codetype",
+      "data",
+      "declare",
+      "height",
+      "name",
+      "standby",
+      "type",
+      "usemap",
+      "width"
+      ]
+    },
+    "28":"ol",
+    "optgroup":
+    {
+      "attributes":
+      {
+        "0":"label",
+        "disabled": /^(disabled)$/
+      },
+      "required":[
+      "label"
+      ]
+    },
+    "option":
+    {
+      "attributes":
+      {
+        "0":"label",
+        "disabled":/^(disabled)$/,
+        "selected":/^(selected)$/,
+        "1":"value"
+      },
+      "inside":"select"
+    },
+    "29":"p",
+    "param":
+    {
+      "attributes":
+      {
+        "0":"type",
+        "valuetype":/^(data|ref|object)$/,
+        "1":"valuetype",
+        "2":"value"
+      },
+      "required":[
+      "name"
+      ]
+    },
+    "30":"pre",
+    "q":
+    {
+      "attributes":[
+      "cite"
+      ]
+    },
+    "31":"samp",
+    "script":
+    {
+      "attributes":
+      {
+        "type":/^(text\/ecmascript|text\/javascript|text\/jscript|text\/vbscript|text\/vbs|text\/xml)$/,
+        "0":"charset",
+        "defer":/^(defer)$/,
+        "1":"src"
+      },
+      "required":[
+      "type"
+      ]
+    },
+    "select":
+    {
+      "attributes":
+      {
+        "disabled":/^(disabled)$/,
+        "multiple":/^(multiple)$/,
+        "0":"name",
+        "1":"size"
+      },
+      "inside":"form"
+    },
+    "32":"small",
+    "33":"span",
+    "34":"strong",
+    "style":
+    {
+      "attributes":
+      {
+        "0":"type",
+        "media":/^(screen|tty|tv|projection|handheld|print|braille|aural|all)$/
+      },
+      "required":[
+      "type"
+      ]
+    },
+    "35":"sub",
+    "36":"sup",
+    "table":
+    {
+      "attributes":
+      {
+        "0":"border",
+        "1":"cellpadding",
+        "2":"cellspacing",
+        "frame":/^(void|above|below|hsides|lhs|rhs|vsides|box|border)$/,
+        "rules":/^(none|groups|rows|cols|all)$/,
+        "3":"summary",
+        "4":"width"
+      }
+    },
+    "tbody":
+    {
+      "attributes":
+      {
+        "align":/^(right|left|center|justify)$/,
+        "0":"char",
+        "1":"charoff",
+        "valign":/^(top|middle|bottom|baseline)$/
+      }
+    },
+    "td":
+    {
+      "attributes":
+      {
+        "0":"abbr",
+        "align":/^(left|right|center|justify|char)$/,
+        "1":"axis",
+        "2":"char",
+        "3":"charoff",
+        "colspan":/^(\d)+$/,
+        "4":"headers",
+        "rowspan":/^(\d)+$/,
+        "scope":/^(col|colgroup|row|rowgroup)$/,
+        "valign":/^(top|middle|bottom|baseline)$/
+      }
+    },
+    "textarea":
+    {
+      "attributes":[
+      "cols",
+      "rows",
+      "disabled",
+      "name",
+      "readonly"
+      ],
+      "required":[
+      "cols",
+      "rows"
+      ],
+      "inside":"form"
+    },
+    "tfoot":
+    {
+      "attributes":
+      {
+        "align":/^(right|left|center|justify)$/,
+        "0":"char",
+        "1":"charoff",
+        "valign":/^(top|middle|bottom)$/,
+        "2":"baseline"
+      }
+    },
+    "th":
+    {
+      "attributes":
+      {
+        "0":"abbr",
+        "align":/^(left|right|center|justify|char)$/,
+        "1":"axis",
+        "2":"char",
+        "3":"charoff",
+        "colspan":/^(\d)+$/,
+        "4":"headers",
+        "rowspan":/^(\d)+$/,
+        "scope":/^(col|colgroup|row|rowgroup)$/,
+        "valign":/^(top|middle|bottom|baseline)$/
+      }
+    },
+    "thead":
+    {
+      "attributes":
+      {
+        "align":/^(right|left|center|justify)$/,
+        "0":"char",
+        "1":"charoff",
+        "valign":/^(top|middle|bottom|baseline)$/
+      }
+    },
+    "37":"title",
+    "tr":
+    {
+      "attributes":
+      {
+        "align":/^(right|left|center|justify|char)$/,
+        "0":"char",
+        "1":"charoff",
+        "valign":/^(top|middle|bottom|baseline)$/
+      }
+    },
+    "38":"tt",
+    "39":"ul",
+    "40":"var"
+  },
+
+  // Temporary skiped attributes
+  skiped_attributes : [],
+  skiped_attribute_values : [],
+
+  getValidTagAttributes: function(tag, attributes)
+  {
+    var valid_attributes = {};
+    var possible_attributes = this.getPossibleTagAttributes(tag);
+    for(var attribute in attributes) {
+      var value = attributes[attribute];
+      var h = WYMeditor.Helper;
+      if(!h.contains(this.skiped_attributes, attribute) && !h.contains(this.skiped_attribute_values, value)){
+        if (typeof value != 'function' && h.contains(possible_attributes, attribute)) {
+          if (this.doesAttributeNeedsValidation(tag, attribute)) {
+            if(this.validateAttribute(tag, attribute, value)){
+              valid_attributes[attribute] = value;
+            }
+          }else{
+            valid_attributes[attribute] = value;
+          }
+        }
+      }
+    }
+    return valid_attributes;
+  },
+  getUniqueAttributesAndEventsForTag : function(tag)
+  {
+    var result = [];
+
+    if (this._tags[tag] && this._tags[tag]['attributes']) {
+      for (k in this._tags[tag]['attributes']) {
+        result.push(parseInt(k) == k ? this._tags[tag]['attributes'][k] : k);
+      }
+    }
+    return result;
+  },
+  getDefaultAttributesAndEventsForTags : function()
+  {
+    var result = [];
+    for (var key in this._events){
+      result.push(this._events[key]);
+    }
+    for (var key in this._attributes){
+      result.push(this._attributes[key]);
+    }
+    return result;
+  },
+  isValidTag : function(tag)
+  {
+    if(this._tags[tag]){
+      return true;
+    }
+    for(var key in this._tags){
+      if(this._tags[key] == tag){
+        return true;
+      }
+    }
+    return false;
+  },
+  getDefaultAttributesAndEventsForTag : function(tag)
+  {
+    var default_attributes = [];
+    if (this.isValidTag(tag)) {
+      var default_attributes_and_events = this.getDefaultAttributesAndEventsForTags();
+
+      for(var key in default_attributes_and_events) {
+        var defaults = default_attributes_and_events[key];
+        if(typeof defaults == 'object'){
+          var h = WYMeditor.Helper;
+          if ((defaults['except'] && h.contains(defaults['except'], tag)) || (defaults['only'] && !h.contains(defaults['only'], tag))) {
+            continue;
+          }
+
+          var tag_defaults = defaults['attributes'] ? defaults['attributes'] : defaults['events'];
+          for(var k in tag_defaults) {
+            default_attributes.push(typeof tag_defaults[k] != 'string' ? k : tag_defaults[k]);
+          }
+        }
+      }
+    }
+    return default_attributes;
+  },
+  doesAttributeNeedsValidation: function(tag, attribute)
+  {
+    return this._tags[tag] && ((this._tags[tag]['attributes'] && this._tags[tag]['attributes'][attribute]) || (this._tags[tag]['required'] &&
+     WYMeditor.Helper.contains(this._tags[tag]['required'], attribute)));
+  },
+  validateAttribute : function(tag, attribute, value)
+  {
+    if ( this._tags[tag] &&
+      (this._tags[tag]['attributes'] && this._tags[tag]['attributes'][attribute] && value.length > 0 && !value.match(this._tags[tag]['attributes'][attribute])) || // invalid format
+      (this._tags[tag] && this._tags[tag]['required'] && WYMeditor.Helper.contains(this._tags[tag]['required'], attribute) && value.length == 0) // required attribute
+    ) {
+      return false;
+    }
+    return typeof this._tags[tag] != 'undefined';
+  },
+  getPossibleTagAttributes : function(tag)
+  {
+    if (!this._possible_tag_attributes) {
+      this._possible_tag_attributes = {};
+    }
+    if (!this._possible_tag_attributes[tag]) {
+      this._possible_tag_attributes[tag] = this.getUniqueAttributesAndEventsForTag(tag).concat(this.getDefaultAttributesAndEventsForTag(tag));
+    }
+    return this._possible_tag_attributes[tag];
+  }
+};
+
+
+/**
+*    Compounded regular expression. Any of
+*    the contained patterns could match and
+*    when one does, its label is returned.
+*
+*    Constructor. Starts with no patterns.
+*    @param boolean case    True for case sensitive, false
+*                            for insensitive.
+*    @access public
+*    @author Marcus Baker (http://lastcraft.com)
+*    @author Bermi Ferrer (http://bermi.org)
+*/
+WYMeditor.ParallelRegex = function(case_sensitive)
+{
+  this._case = case_sensitive;
+  this._patterns = [];
+  this._labels = [];
+  this._regex = null;
+  return this;
+};
+
+
+/**
+*    Adds a pattern with an optional label.
+*    @param string pattern      Perl style regex, but ( and )
+*                                lose the usual meaning.
+*    @param string label        Label of regex to be returned
+*                                on a match.
+*    @access public
+*/
+WYMeditor.ParallelRegex.prototype.addPattern = function(pattern, label)
+{
+  label = label || true;
+  var count = this._patterns.length;
+  this._patterns[count] = pattern;
+  this._labels[count] = label;
+  this._regex = null;
+};
+
+/**
+*    Attempts to match all patterns at once against
+*    a string.
+*    @param string subject      String to match against.
+*
+*    @return boolean             True on success.
+*    @return string match         First matched portion of
+*                                subject.
+*    @access public
+*/
+WYMeditor.ParallelRegex.prototype.match = function(subject)
+{
+  if (this._patterns.length == 0) {
+    return [false, ''];
+  }
+  var matches = subject.match(this._getCompoundedRegex());
+
+  if(!matches){
+    return [false, ''];
+  }
+  var match = matches[0];
+  for (var i = 1; i < matches.length; i++) {
+    if (matches[i]) {
+      return [this._labels[i-1], match];
+    }
+  }
+  return [true, matches[0]];
+};
+
+/**
+*    Compounds the patterns into a single
+*    regular expression separated with the
+*    "or" operator. Caches the regex.
+*    Will automatically escape (, ) and / tokens.
+*    @param array patterns    List of patterns in order.
+*    @access private
+*/
+WYMeditor.ParallelRegex.prototype._getCompoundedRegex = function()
+{
+  if (this._regex == null) {
+    for (var i = 0, count = this._patterns.length; i < count; i++) {
+      this._patterns[i] = '(' + this._untokenizeRegex(this._tokenizeRegex(this._patterns[i]).replace(/([\/\(\)])/g,'\\$1')) + ')';
+    }
+    this._regex = new RegExp(this._patterns.join("|") ,this._getPerlMatchingFlags());
+  }
+  return this._regex;
+};
+
+/**
+* Escape lookahead/lookbehind blocks
+*/
+WYMeditor.ParallelRegex.prototype._tokenizeRegex = function(regex)
+{
+  return regex.
+  replace(/\(\?(i|m|s|x|U)\)/,     '~~~~~~Tk1\$1~~~~~~').
+  replace(/\(\?(\-[i|m|s|x|U])\)/, '~~~~~~Tk2\$1~~~~~~').
+  replace(/\(\?\=(.*)\)/,          '~~~~~~Tk3\$1~~~~~~').
+  replace(/\(\?\!(.*)\)/,          '~~~~~~Tk4\$1~~~~~~').
+  replace(/\(\?\<\=(.*)\)/,        '~~~~~~Tk5\$1~~~~~~').
+  replace(/\(\?\<\!(.*)\)/,        '~~~~~~Tk6\$1~~~~~~').
+  replace(/\(\?\:(.*)\)/,          '~~~~~~Tk7\$1~~~~~~');
+};
+
+/**
+* Unscape lookahead/lookbehind blocks
+*/
+WYMeditor.ParallelRegex.prototype._untokenizeRegex = function(regex)
+{
+  return regex.
+  replace(/~~~~~~Tk1(.{1})~~~~~~/,    "(?\$1)").
+  replace(/~~~~~~Tk2(.{2})~~~~~~/,    "(?\$1)").
+  replace(/~~~~~~Tk3(.*)~~~~~~/,      "(?=\$1)").
+  replace(/~~~~~~Tk4(.*)~~~~~~/,      "(?!\$1)").
+  replace(/~~~~~~Tk5(.*)~~~~~~/,      "(?<=\$1)").
+  replace(/~~~~~~Tk6(.*)~~~~~~/,      "(?<!\$1)").
+  replace(/~~~~~~Tk7(.*)~~~~~~/,      "(?:\$1)");
+};
+
+
+/**
+*    Accessor for perl regex mode flags to use.
+*    @return string       Perl regex flags.
+*    @access private
+*/
+WYMeditor.ParallelRegex.prototype._getPerlMatchingFlags = function()
+{
+  return (this._case ? "m" : "mi");
+};
+
+
+
+/**
+*    States for a stack machine.
+*
+*    Constructor. Starts in named state.
+*    @param string start        Starting state name.
+*    @access public
+*    @author Marcus Baker (http://lastcraft.com)
+*    @author Bermi Ferrer (http://bermi.org)
+*/
+WYMeditor.StateStack = function(start)
+{
+  this._stack = [start];
+  return this;
+};
+
+/**
+*    Accessor for current state.
+*    @return string       State.
+*    @access public
+*/
+WYMeditor.StateStack.prototype.getCurrent = function()
+{
+  return this._stack[this._stack.length - 1];
+};
+
+/**
+*    Adds a state to the stack and sets it
+*    to be the current state.
+*    @param string state        New state.
+*    @access public
+*/
+WYMeditor.StateStack.prototype.enter = function(state)
+{
+  this._stack.push(state);
+};
+
+/**
+*    Leaves the current state and reverts
+*    to the previous one.
+*    @return boolean    False if we drop off
+*                       the bottom of the list.
+*    @access public
+*/
+WYMeditor.StateStack.prototype.leave = function()
+{
+  if (this._stack.length == 1) {
+    return false;
+  }
+  this._stack.pop();
+  return true;
+};
+
+
+// GLOBALS
+WYMeditor.LEXER_ENTER = 1;
+WYMeditor.LEXER_MATCHED = 2;
+WYMeditor.LEXER_UNMATCHED = 3;
+WYMeditor.LEXER_EXIT = 4;
+WYMeditor.LEXER_SPECIAL = 5;
+
+
+/**
+*    Accepts text and breaks it into tokens.
+*    Some optimisation to make the sure the
+*    content is only scanned by the PHP regex
+*    parser once. Lexer modes must not start
+*    with leading underscores.
+*
+*    Sets up the lexer in case insensitive matching
+*    by default.
+*    @param Parser parser  Handling strategy by reference.
+*    @param string start            Starting handler.
+*    @param boolean case            True for case sensitive.
+*    @access public
+*    @author Marcus Baker (http://lastcraft.com)
+*    @author Bermi Ferrer (http://bermi.org)
+*/
+WYMeditor.Lexer = function(parser, start, case_sensitive)
+{
+  start = start || 'accept';
+  this._case = case_sensitive || false;
+  this._regexes = {};
+  this._parser = parser;
+  this._mode = new WYMeditor.StateStack(start);
+  this._mode_handlers = {};
+  this._mode_handlers[start] = start;
+  return this;
+};
+
+/**
+*    Adds a token search pattern for a particular
+*    parsing mode. The pattern does not change the
+*    current mode.
+*    @param string pattern      Perl style regex, but ( and )
+*                                lose the usual meaning.
+*    @param string mode         Should only apply this
+*                                pattern when dealing with
+*                                this type of input.
+*    @access public
+*/
+WYMeditor.Lexer.prototype.addPattern = function(pattern, mode)
+{
+  var mode = mode || "accept";
+  if (typeof this._regexes[mode] == 'undefined') {
+    this._regexes[mode] = new WYMeditor.ParallelRegex(this._case);
+  }
+  this._regexes[mode].addPattern(pattern);
+  if (typeof this._mode_handlers[mode] == 'undefined') {
+    this._mode_handlers[mode] = mode;
+  }
+};
+
+/**
+*    Adds a pattern that will enter a new parsing
+*    mode. Useful for entering parenthesis, strings,
+*    tags, etc.
+*    @param string pattern      Perl style regex, but ( and )
+*                                lose the usual meaning.
+*    @param string mode         Should only apply this
+*                                pattern when dealing with
+*                                this type of input.
+*    @param string new_mode     Change parsing to this new
+*                                nested mode.
+*    @access public
+*/
+WYMeditor.Lexer.prototype.addEntryPattern = function(pattern, mode, new_mode)
+{
+  if (typeof this._regexes[mode] == 'undefined') {
+    this._regexes[mode] = new WYMeditor.ParallelRegex(this._case);
+  }
+  this._regexes[mode].addPattern(pattern, new_mode);
+  if (typeof this._mode_handlers[new_mode] == 'undefined') {
+    this._mode_handlers[new_mode] = new_mode;
+  }
+};
+
+/**
+*    Adds a pattern that will exit the current mode
+*    and re-enter the previous one.
+*    @param string pattern      Perl style regex, but ( and )
+*                                lose the usual meaning.
+*    @param string mode         Mode to leave.
+*    @access public
+*/
+WYMeditor.Lexer.prototype.addExitPattern = function(pattern, mode)
+{
+  if (typeof this._regexes[mode] == 'undefined') {
+    this._regexes[mode] = new WYMeditor.ParallelRegex(this._case);
+  }
+  this._regexes[mode].addPattern(pattern, "__exit");
+  if (typeof this._mode_handlers[mode] == 'undefined') {
+    this._mode_handlers[mode] = mode;
+  }
+};
+
+/**
+*    Adds a pattern that has a special mode. Acts as an entry
+*    and exit pattern in one go, effectively calling a special
+*    parser handler for this token only.
+*    @param string pattern      Perl style regex, but ( and )
+*                                lose the usual meaning.
+*    @param string mode         Should only apply this
+*                                pattern when dealing with
+*                                this type of input.
+*    @param string special      Use this mode for this one token.
+*    @access public
+*/
+WYMeditor.Lexer.prototype.addSpecialPattern =  function(pattern, mode, special)
+{
+  if (typeof this._regexes[mode] == 'undefined') {
+    this._regexes[mode] = new WYMeditor.ParallelRegex(this._case);
+  }
+  this._regexes[mode].addPattern(pattern, '_'+special);
+  if (typeof this._mode_handlers[special] == 'undefined') {
+    this._mode_handlers[special] = special;
+  }
+};
+
+/**
+*    Adds a mapping from a mode to another handler.
+*    @param string mode        Mode to be remapped.
+*    @param string handler     New target handler.
+*    @access public
+*/
+WYMeditor.Lexer.prototype.mapHandler = function(mode, handler)
+{
+  this._mode_handlers[mode] = handler;
+};
+
+/**
+*    Splits the page text into tokens. Will fail
+*    if the handlers report an error or if no
+*    content is consumed. If successful then each
+*    unparsed and parsed token invokes a call to the
+*    held listener.
+*    @param string raw        Raw HTML text.
+*    @return boolean           True on success, else false.
+*    @access public
+*/
+WYMeditor.Lexer.prototype.parse = function(raw)
+{
+  if (typeof this._parser == 'undefined') {
+    return false;
+  }
+
+  var length = raw.length;
+  var parsed;
+  while (typeof (parsed = this._reduce(raw)) == 'object') {
+    var raw = parsed[0];
+    var unmatched = parsed[1];
+    var matched = parsed[2];
+    var mode = parsed[3];
+
+    if (! this._dispatchTokens(unmatched, matched, mode)) {
+      return false;
+    }
+
+    if (raw == '') {
+      return true;
+    }
+    if (raw.length == length) {
+      return false;
+    }
+    length = raw.length;
+  }
+  if (! parsed ) {
+    return false;
+  }
+
+  return this._invokeParser(raw, WYMeditor.LEXER_UNMATCHED);
+};
+
+/**
+*    Sends the matched token and any leading unmatched
+*    text to the parser changing the lexer to a new
+*    mode if one is listed.
+*    @param string unmatched    Unmatched leading portion.
+*    @param string matched      Actual token match.
+*    @param string mode         Mode after match. A boolean
+*                                false mode causes no change.
+*    @return boolean             False if there was any error
+*                                from the parser.
+*    @access private
+*/
+WYMeditor.Lexer.prototype._dispatchTokens = function(unmatched, matched, mode)
+{
+  mode = mode || false;
+
+  if (! this._invokeParser(unmatched, WYMeditor.LEXER_UNMATCHED)) {
+    return false;
+  }
+
+  if (typeof mode == 'boolean') {
+    return this._invokeParser(matched, WYMeditor.LEXER_MATCHED);
+  }
+  if (this._isModeEnd(mode)) {
+    if (! this._invokeParser(matched, WYMeditor.LEXER_EXIT)) {
+      return false;
+    }
+    return this._mode.leave();
+  }
+  if (this._isSpecialMode(mode)) {
+    this._mode.enter(this._decodeSpecial(mode));
+    if (! this._invokeParser(matched, WYMeditor.LEXER_SPECIAL)) {
+      return false;
+    }
+    return this._mode.leave();
+  }
+  this._mode.enter(mode);
+
+  return this._invokeParser(matched, WYMeditor.LEXER_ENTER);
+};
+
+/**
+*    Tests to see if the new mode is actually to leave
+*    the current mode and pop an item from the matching
+*    mode stack.
+*    @param string mode    Mode to test.
+*    @return boolean        True if this is the exit mode.
+*    @access private
+*/
+WYMeditor.Lexer.prototype._isModeEnd = function(mode)
+{
+  return (mode === "__exit");
+};
+
+/**
+*    Test to see if the mode is one where this mode
+*    is entered for this token only and automatically
+*    leaves immediately afterwoods.
+*    @param string mode    Mode to test.
+*    @return boolean        True if this is the exit mode.
+*    @access private
+*/
+WYMeditor.Lexer.prototype._isSpecialMode = function(mode)
+{
+  return (mode.substring(0,1) == "_");
+};
+
+/**
+*    Strips the magic underscore marking single token
+*    modes.
+*    @param string mode    Mode to decode.
+*    @return string         Underlying mode name.
+*    @access private
+*/
+WYMeditor.Lexer.prototype._decodeSpecial = function(mode)
+{
+  return mode.substring(1);
+};
+
+/**
+*    Calls the parser method named after the current
+*    mode. Empty content will be ignored. The lexer
+*    has a parser handler for each mode in the lexer.
+*    @param string content        Text parsed.
+*    @param boolean is_match      Token is recognised rather
+*                                  than unparsed data.
+*    @access private
+*/
+WYMeditor.Lexer.prototype._invokeParser = function(content, is_match)
+{
+
+  if (!/ +/.test(content) && ((content === '') || (content == false))) {
+    return true;
+  }
+  var current = this._mode.getCurrent();
+  var handler = this._mode_handlers[current];
+  var result;
+  eval('result = this._parser.' + handler + '(content, is_match);');
+  return result;
+};
+
+/**
+*    Tries to match a chunk of text and if successful
+*    removes the recognised chunk and any leading
+*    unparsed data. Empty strings will not be matched.
+*    @param string raw         The subject to parse. This is the
+*                               content that will be eaten.
+*    @return array/boolean      Three item list of unparsed
+*                               content followed by the
+*                               recognised token and finally the
+*                               action the parser is to take.
+*                               True if no match, false if there
+*                               is a parsing error.
+*    @access private
+*/
+WYMeditor.Lexer.prototype._reduce = function(raw)
+{
+  var matched = this._regexes[this._mode.getCurrent()].match(raw);
+  var match = matched[1];
+  var action = matched[0];
+  if (action) {
+    var unparsed_character_count = raw.indexOf(match);
+    var unparsed = raw.substr(0, unparsed_character_count);
+    raw = raw.substring(unparsed_character_count + match.length);
+    return [raw, unparsed, match, action];
+  }
+  return true;
+};
+
+
+
+/**
+* This are the rules for breaking the XHTML code into events
+* handled by the provided parser.
+*
+*    @author Marcus Baker (http://lastcraft.com)
+*    @author Bermi Ferrer (http://bermi.org)
+*/
+WYMeditor.XhtmlLexer = function(parser)
+{
+  jQuery.extend(this, new WYMeditor.Lexer(parser, 'Text'));
+
+  this.mapHandler('Text', 'Text');
+
+  this.addTokens();
+
+  this.init();
+
+  return this;
+};
+
+
+WYMeditor.XhtmlLexer.prototype.init = function()
+{
+};
+
+WYMeditor.XhtmlLexer.prototype.addTokens = function()
+{
+  this.addCommentTokens('Text');
+  this.addScriptTokens('Text');
+  this.addCssTokens('Text');
+  this.addTagTokens('Text');
+};
+
+WYMeditor.XhtmlLexer.prototype.addCommentTokens = function(scope)
+{
+  this.addEntryPattern("<!--", scope, 'Comment');
+  this.addExitPattern("-->", 'Comment');
+};
+
+WYMeditor.XhtmlLexer.prototype.addScriptTokens = function(scope)
+{
+  this.addEntryPattern("<script", scope, 'Script');
+  this.addExitPattern("</script>", 'Script');
+};
+
+WYMeditor.XhtmlLexer.prototype.addCssTokens = function(scope)
+{
+  this.addEntryPattern("<style", scope, 'Css');
+  this.addExitPattern("</style>", 'Css');
+};
+
+WYMeditor.XhtmlLexer.prototype.addTagTokens = function(scope)
+{
+  this.addSpecialPattern("<\\s*[a-z0-9:\-]+\\s*>", scope, 'OpeningTag');
+  this.addEntryPattern("<[a-z0-9:\-]+"+'[\\\/ \\\>]+', scope, 'OpeningTag');
+  this.addInTagDeclarationTokens('OpeningTag');
+
+  this.addSpecialPattern("</\\s*[a-z0-9:\-]+\\s*>", scope, 'ClosingTag');
+
+};
+
+WYMeditor.XhtmlLexer.prototype.addInTagDeclarationTokens = function(scope)
+{
+  this.addSpecialPattern('\\s+', scope, 'Ignore');
+
+  this.addAttributeTokens(scope);
+
+  this.addExitPattern('/>', scope);
+  this.addExitPattern('>', scope);
+
+};
+
+WYMeditor.XhtmlLexer.prototype.addAttributeTokens = function(scope)
+{
+  this.addSpecialPattern("\\s*[a-z-_0-9]*:?[a-z-_0-9]+\\s*(?=\=)\\s*", scope, 'TagAttributes');
+
+  this.addEntryPattern('=\\s*"', scope, 'DoubleQuotedAttribute');
+  this.addPattern("\\\\\"", 'DoubleQuotedAttribute');
+  this.addExitPattern('"', 'DoubleQuotedAttribute');
+
+  this.addEntryPattern("=\\s*'", scope, 'SingleQuotedAttribute');
+  this.addPattern("\\\\'", 'SingleQuotedAttribute');
+  this.addExitPattern("'", 'SingleQuotedAttribute');
+
+  this.addSpecialPattern('=\\s*[^>\\s]*', scope, 'UnquotedAttribute');
+};
+
+
+
+/**
+* XHTML Parser.
+*
+* This XHTML parser will trigger the events available on on
+* current SaxListener
+*
+*    @author Bermi Ferrer (http://bermi.org)
+*/
+WYMeditor.XhtmlParser = function(Listener, mode)
+{
+  var mode = mode || 'Text';
+  this._Lexer = new WYMeditor.XhtmlLexer(this);
+  this._Listener = Listener;
+  this._mode = mode;
+  this._matches = [];
+  this._last_match = '';
+  this._current_match = '';
+
+  return this;
+};
+
+WYMeditor.XhtmlParser.prototype.parse = function(raw)
+{
+  this._Lexer.parse(this.beforeParsing(raw));
+  return this.afterParsing(this._Listener.getResult());
+};
+
+WYMeditor.XhtmlParser.prototype.beforeParsing = function(raw)
+{
+  if(raw.match(/class="MsoNormal"/) || raw.match(/ns = "urn:schemas-microsoft-com/)){
+    // Usefull for cleaning up content pasted from other sources (MSWord)
+    this._Listener.avoidStylingTagsAndAttributes();
+  }
+  return this._Listener.beforeParsing(raw);
+};
+
+WYMeditor.XhtmlParser.prototype.afterParsing = function(parsed)
+{
+  if(this._Listener._avoiding_tags_implicitly){
+    this._Listener.allowStylingTagsAndAttributes();
+  }
+  return this._Listener.afterParsing(parsed);
+};
+
+
+WYMeditor.XhtmlParser.prototype.Ignore = function(match, state)
+{
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.Text = function(text)
+{
+  this._Listener.addContent(text);
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.Comment = function(match, status)
+{
+  return this._addNonTagBlock(match, status, 'addComment');
+};
+
+WYMeditor.XhtmlParser.prototype.Script = function(match, status)
+{
+  return this._addNonTagBlock(match, status, 'addScript');
+};
+
+WYMeditor.XhtmlParser.prototype.Css = function(match, status)
+{
+  return this._addNonTagBlock(match, status, 'addCss');
+};
+
+WYMeditor.XhtmlParser.prototype._addNonTagBlock = function(match, state, type)
+{
+  switch (state){
+    case WYMeditor.LEXER_ENTER:
+    this._non_tag = match;
+    break;
+    case WYMeditor.LEXER_UNMATCHED:
+    this._non_tag += match;
+    break;
+    case WYMeditor.LEXER_EXIT:
+    switch(type) {
+      case 'addComment':
+      this._Listener.addComment(this._non_tag+match);
+      break;
+      case 'addScript':
+      this._Listener.addScript(this._non_tag+match);
+      break;
+      case 'addCss':
+      this._Listener.addCss(this._non_tag+match);
+      break;
+    }
+  }
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.OpeningTag = function(match, state)
+{
+  switch (state){
+    case WYMeditor.LEXER_ENTER:
+    this._tag = this.normalizeTag(match);
+    this._tag_attributes = {};
+    break;
+    case WYMeditor.LEXER_SPECIAL:
+    this._callOpenTagListener(this.normalizeTag(match));
+    break;
+    case WYMeditor.LEXER_EXIT:
+    this._callOpenTagListener(this._tag, this._tag_attributes);
+  }
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.ClosingTag = function(match, state)
+{
+  this._callCloseTagListener(this.normalizeTag(match));
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype._callOpenTagListener = function(tag, attributes)
+{
+  var  attributes = attributes || {};
+  this.autoCloseUnclosedBeforeNewOpening(tag);
+
+  if(this._Listener.isBlockTag(tag)){
+    this._Listener._tag_stack.push(tag);
+    this._Listener.fixNestingBeforeOpeningBlockTag(tag, attributes);
+    this._Listener.openBlockTag(tag, attributes);
+    this._increaseOpenTagCounter(tag);
+  }else if(this._Listener.isInlineTag(tag)){
+    this._Listener.inlineTag(tag, attributes);
+  }else{
+    this._Listener.openUnknownTag(tag, attributes);
+    this._increaseOpenTagCounter(tag);
+  }
+  this._Listener.last_tag = tag;
+  this._Listener.last_tag_opened = true;
+  this._Listener.last_tag_attributes = attributes;
+};
+
+WYMeditor.XhtmlParser.prototype._callCloseTagListener = function(tag)
+{
+  if(this._decreaseOpenTagCounter(tag)){
+    this.autoCloseUnclosedBeforeTagClosing(tag);
+
+    if(this._Listener.isBlockTag(tag)){
+      var expected_tag = this._Listener._tag_stack.pop();
+      if(expected_tag == false){
+        return;
+      }else if(expected_tag != tag){
+        tag = expected_tag;
+      }
+      this._Listener.closeBlockTag(tag);
+    }else{
+      this._Listener.closeUnknownTag(tag);
+    }
+  }else{
+    this._Listener.closeUnopenedTag(tag);
+  }
+  this._Listener.last_tag = tag;
+  this._Listener.last_tag_opened = false;
+};
+
+WYMeditor.XhtmlParser.prototype._increaseOpenTagCounter = function(tag)
+{
+  this._Listener._open_tags[tag] = this._Listener._open_tags[tag] || 0;
+  this._Listener._open_tags[tag]++;
+};
+
+WYMeditor.XhtmlParser.prototype._decreaseOpenTagCounter = function(tag)
+{
+  if(this._Listener._open_tags[tag]){
+    this._Listener._open_tags[tag]--;
+    if(this._Listener._open_tags[tag] == 0){
+      this._Listener._open_tags[tag] = undefined;
+    }
+    return true;
+  }
+  return false;
+};
+
+WYMeditor.XhtmlParser.prototype.autoCloseUnclosedBeforeNewOpening = function(new_tag)
+{
+  this._autoCloseUnclosed(new_tag, false);
+};
+
+WYMeditor.XhtmlParser.prototype.autoCloseUnclosedBeforeTagClosing = function(tag)
+{
+  this._autoCloseUnclosed(tag, true);
+};
+
+WYMeditor.XhtmlParser.prototype._autoCloseUnclosed = function(new_tag, closing)
+{
+  var closing = closing || false;
+  if(this._Listener._open_tags){
+    for (var tag in this._Listener._open_tags) {
+      var counter = this._Listener._open_tags[tag];
+      if(counter > 0 && this._Listener.shouldCloseTagAutomatically(tag, new_tag, closing)){
+        this._callCloseTagListener(tag, true);
+      }
+    }
+  }
+};
+
+WYMeditor.XhtmlParser.prototype.getTagReplacements = function()
+{
+  return this._Listener.getTagReplacements();
+};
+
+WYMeditor.XhtmlParser.prototype.normalizeTag = function(tag)
+{
+  tag = tag.replace(/^([\s<\/>]*)|([\s<\/>]*)$/gm,'').toLowerCase();
+  var tags = this._Listener.getTagReplacements();
+  if(tags[tag]){
+    return tags[tag];
+  }
+  return tag;
+};
+
+WYMeditor.XhtmlParser.prototype.TagAttributes = function(match, state)
+{
+  if(WYMeditor.LEXER_SPECIAL == state){
+    this._current_attribute = match;
+  }
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.DoubleQuotedAttribute = function(match, state)
+{
+  if(WYMeditor.LEXER_UNMATCHED == state){
+    this._tag_attributes[this._current_attribute] = match;
+  }
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.SingleQuotedAttribute = function(match, state)
+{
+  if(WYMeditor.LEXER_UNMATCHED == state){
+    this._tag_attributes[this._current_attribute] = match;
+  }
+  return true;
+};
+
+WYMeditor.XhtmlParser.prototype.UnquotedAttribute = function(match, state)
+{
+  this._tag_attributes[this._current_attribute] = match.replace(/^=/,'');
+  return true;
+};
+
+
+
+/**
+* XHTML Sax parser.
+*
+*    @author Bermi Ferrer (http://bermi.org)
+*/
+WYMeditor.XhtmlSaxListener = function()
+{
+  this.output = '';
+  this.helper = new WYMeditor.XmlHelper();
+  this._open_tags = {};
+  this.validator = WYMeditor.XhtmlValidator;
+  this._tag_stack = [];
+  this.avoided_tags = [];
+
+  this.entities = {
+    ' ':' ','¡':'¡','¢':'¢',
+    '£':'£','¤':'¤','¥':'¥',
+    '¦':'¦','§':'§','¨':'¨',
+    '©':'©','ª':'ª','«':'«',
+    '¬':'¬','­':'­','®':'®',
+    '¯':'¯','°':'°','±':'±',
+    '&sup2;':'²','&sup3;':'³','´':'´',
+    'µ':'µ','¶':'¶','·':'·',
+    '¸':'¸','&sup1;':'¹','º':'º',
+    '»':'»','&frac14;':'¼','&frac12;':'½',
+    '&frac34;':'¾','¿':'¿','À':'À',
+    'Á':'Á','Â':'Â','Ã':'Ã',
+    'Ä':'Ä','Å':'Å','Æ':'Æ',
+    'Ç':'Ç','È':'È','É':'É',
+    'Ê':'Ê','Ë':'Ë','Ì':'Ì',
+    'Í':'Í','Î':'Î','Ï':'Ï',
+    'Ð':'Ð','Ñ':'Ñ','Ò':'Ò',
+    'Ó':'Ó','Ô':'Ô','Õ':'Õ',
+    'Ö':'Ö','×':'×','Ø':'Ø',
+    'Ù':'Ù','Ú':'Ú','Û':'Û',
+    'Ü':'Ü','Ý':'Ý','Þ':'Þ',
+    'ß':'ß','à':'à','á':'á',
+    'â':'â','ã':'ã','ä':'ä',
+    'å':'å','æ':'æ','ç':'ç',
+    'è':'è','é':'é','ê':'ê',
+    'ë':'ë','ì':'ì','í':'í',
+    'î':'î','ï':'ï','ð':'ð',
+    'ñ':'ñ','ò':'ò','ó':'ó',
+    'ô':'ô','õ':'õ','ö':'ö',
+    '÷':'÷','ø':'ø','ù':'ù',
+    'ú':'ú','û':'û','ü':'ü',
+    'ý':'ý','þ':'þ','ÿ':'ÿ',
+    'Œ':'Œ','œ':'œ','Š':'Š',
+    'š':'š','Ÿ':'Ÿ','ƒ':'ƒ',
+    'ˆ':'ˆ','˜':'˜','Α':'Α',
+    'Β':'Β','Γ':'Γ','Δ':'Δ',
+    'Ε':'Ε','Ζ':'Ζ','Η':'Η',
+    'Θ':'Θ','Ι':'Ι','Κ':'Κ',
+    'Λ':'Λ','Μ':'Μ','Ν':'Ν',
+    'Ξ':'Ξ','Ο':'Ο','Π':'Π',
+    'Ρ':'Ρ','Σ':'Σ','Τ':'Τ',
+    'Υ':'Υ','Φ':'Φ','Χ':'Χ',
+    'Ψ':'Ψ','Ω':'Ω','α':'α',
+    'β':'β','γ':'γ','δ':'δ',
+    'ε':'ε','ζ':'ζ','η':'η',
+    'θ':'θ','ι':'ι','κ':'κ',
+    'λ':'λ','μ':'μ','ν':'ν',
+    'ξ':'ξ','ο':'ο','π':'π',
+    'ρ':'ρ','ς':'ς','σ':'σ',
+    'τ':'τ','υ':'υ','φ':'φ',
+    'χ':'χ','ψ':'ψ','ω':'ω',
+    'ϑ':'ϑ','ϒ':'ϒ','ϖ':'ϖ',
+    ' ':' ',' ':' ',' ':' ',
+    '‌':'‌','‍':'‍','‎':'‎',
+    '‏':'‏','–':'–','—':'—',
+    '‘':'‘','’':'’','‚':'‚',
+    '“':'“','”':'”','„':'„',
+    '†':'†','‡':'‡','•':'•',
+    '…':'…','‰':'‰','′':'′',
+    '″':'″','‹':'‹','›':'›',
+    '‾':'‾','⁄':'⁄','€':'€',
+    'ℑ':'ℑ','℘':'℘','ℜ':'ℜ',
+    '™':'™','ℵ':'ℵ','←':'←',
+    '↑':'↑','→':'→','↓':'↓',
+    '↔':'↔','↵':'↵','⇐':'⇐',
+    '⇑':'⇑','⇒':'⇒','⇓':'⇓',
+    '⇔':'⇔','∀':'∀','∂':'∂',
+    '∃':'∃','∅':'∅','∇':'∇',
+    '∈':'∈','∉':'∉','∋':'∋',
+    '∏':'∏','∑':'∑','−':'−',
+    '∗':'∗','√':'√','∝':'∝',
+    '∞':'∞','∠':'∠','∧':'∧',
+    '∨':'∨','∩':'∩','∪':'∪',
+    '∫':'∫','&there4;':'∴','∼':'∼',
+    '≅':'≅','≈':'≈','≠':'≠',
+    '≡':'≡','≤':'≤','≥':'≥',
+    '⊂':'⊂','⊃':'⊃','⊄':'⊄',
+    '⊆':'⊆','⊇':'⊇','⊕':'⊕',
+    '⊗':'⊗','⊥':'⊥','⋅':'⋅',
+    '⌈':'⌈','⌉':'⌉','⌊':'⌊',
+    '⌋':'⌋','⟨':'〈','⟩':'〉',
+    '◊':'◊','♠':'♠','♣':'♣',
+    '♥':'♥','♦':'♦'};
+
+    this.block_tags = ["a", "abbr", "acronym", "address", "area", "b",
+    "base", "bdo", "big", "blockquote", "body", "button",
+    "caption", "cite", "code", "col", "colgroup", "dd", "del", "div",
+    "dfn", "dl", "dt", "em", "fieldset", "form", "head", "h1", "h2",
+    "h3", "h4", "h5", "h6", "html", "i", "ins",
+    "kbd", "label", "legend", "li", "map", "noscript",
+    "object", "ol", "optgroup", "option", "p", "param", "pre", "q",
+    "samp", "script", "select", "small", "span", "strong", "style",
+    "sub", "sup", "table", "tbody", "td", "textarea", "tfoot", "th",
+    "thead", "title", "tr", "tt", "ul", "var", "extends"];
+
+
+    this.inline_tags = ["br", "hr", "img", "input"];
+
+    return this;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.shouldCloseTagAutomatically = function(tag, now_on_tag, closing)
+{
+  var closing = closing || false;
+  if(tag == 'td'){
+    if((closing && now_on_tag == 'tr') || (!closing && now_on_tag == 'td')){
+      return true;
+    }
+  }
+  if(tag == 'option'){
+    if((closing && now_on_tag == 'select') || (!closing && now_on_tag == 'option')){
+      return true;
+    }
+  }
+  return false;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.beforeParsing = function(raw)
+{
+  this.output = '';
+  return raw;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.afterParsing = function(xhtml)
+{
+  xhtml = this.replaceNamedEntities(xhtml);
+  xhtml = this.joinRepeatedEntities(xhtml);
+  xhtml = this.removeEmptyTags(xhtml);
+  xhtml = this.removeBrInPre(xhtml);
+  return xhtml;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.replaceNamedEntities = function(xhtml)
+{
+  for (var entity in this.entities) {
+    xhtml = xhtml.replace(new RegExp(entity, 'g'), this.entities[entity]);
+  }
+  return xhtml;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.joinRepeatedEntities = function(xhtml)
+{
+  var tags = 'em|strong|sub|sup|acronym|pre|del|address';
+  return xhtml.replace(new RegExp('<\/('+tags+')><\\1>' ,''),'').
+  replace(new RegExp('(\s*<('+tags+')>\s*){2}(.*)(\s*<\/\\2>\s*){2}' ,''),'<\$2>\$3<\$2>');
+};
+
+// keep empty paragraphs
+// from: http://forum.wymeditor.org/forum/viewtopic.php?f=2&t=711#p2430
+//WYMeditor.XhtmlSaxListener.prototype.removeEmptyTags = function(xhtml)
+//{
+//  return xhtml.replace(new RegExp('<('+this.block_tags.join("|").replace(/\|td/,'')
+//	.replace(/\|th/, '')+')>(<br \/>| | |\\s)*<\/\\1>' ,'g'),'');
+//};
+WYMeditor.XhtmlSaxListener.prototype.removeEmptyTags = function(xhtml)
+{
+  return xhtml.replace(new RegExp('<('+this.block_tags.join("|").replace(/\|td/,'').replace(/\|p/,'')
+	.replace(/\|th/, '')+')>(<br \/>| | |\\s)*<\/\\1>' ,'g'),'');
+};
+
+WYMeditor.XhtmlSaxListener.prototype.removeBrInPre = function(xhtml)
+{
+  var matches = xhtml.match(new RegExp('<pre[^>]*>(.*?)<\/pre>','gmi'));
+  if(matches) {
+    for(var i=0; i<matches.length; i++) {
+      xhtml = xhtml.replace(matches[i], matches[i].replace(new RegExp('<br \/>', 'g'), String.fromCharCode(13,10)));
+    }
+  }
+  return xhtml;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.getResult = function()
+{
+  return this.output;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.getTagReplacements = function()
+{
+  return {'b':'strong', 'i':'em'};
+};
+
+WYMeditor.XhtmlSaxListener.prototype.addContent = function(text)
+{
+  this.output += text;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.addComment = function(text)
+{
+  if(this.remove_comments){
+    this.output += text;
+  }
+};
+
+WYMeditor.XhtmlSaxListener.prototype.addScript = function(text)
+{
+  if(!this.remove_scripts){
+    this.output += text;
+  }
+};
+
+WYMeditor.XhtmlSaxListener.prototype.addCss = function(text)
+{
+  if(!this.remove_embeded_styles){
+    this.output += text;
+  }
+};
+
+WYMeditor.XhtmlSaxListener.prototype.openBlockTag = function(tag, attributes)
+{
+  this.output += this.helper.tag(tag, this.validator.getValidTagAttributes(tag, attributes), true);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.inlineTag = function(tag, attributes)
+{
+  this.output += this.helper.tag(tag, this.validator.getValidTagAttributes(tag, attributes));
+};
+
+WYMeditor.XhtmlSaxListener.prototype.openUnknownTag = function(tag, attributes)
+{
+  //this.output += this.helper.tag(tag, attributes, true);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.closeBlockTag = function(tag)
+{
+  this.output = this.output.replace(/<br \/>$/, '')+this._getClosingTagContent('before', tag)+"</"+tag+">"+this._getClosingTagContent('after', tag);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.closeUnknownTag = function(tag)
+{
+  //this.output += "</"+tag+">";
+};
+
+WYMeditor.XhtmlSaxListener.prototype.closeUnopenedTag = function(tag)
+{
+  this.output += "</"+tag+">";
+};
+
+WYMeditor.XhtmlSaxListener.prototype.avoidStylingTagsAndAttributes = function()
+{
+  this.avoided_tags = ['div','span'];
+  this.validator.skiped_attributes = ['style'];
+  this.validator.skiped_attribute_values = ['MsoNormal','main1']; // MS Word attributes for class
+  this._avoiding_tags_implicitly = true;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.allowStylingTagsAndAttributes = function()
+{
+  this.avoided_tags = [];
+  this.validator.skiped_attributes = [];
+  this.validator.skiped_attribute_values = [];
+  this._avoiding_tags_implicitly = false;
+};
+
+WYMeditor.XhtmlSaxListener.prototype.isBlockTag = function(tag)
+{
+  return !WYMeditor.Helper.contains(this.avoided_tags, tag) && WYMeditor.Helper.contains(this.block_tags, tag);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.isInlineTag = function(tag)
+{
+  return !WYMeditor.Helper.contains(this.avoided_tags, tag) && WYMeditor.Helper.contains(this.inline_tags, tag);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.insertContentAfterClosingTag = function(tag, content)
+{
+  this._insertContentWhenClosingTag('after', tag, content);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.insertContentBeforeClosingTag = function(tag, content)
+{
+  this._insertContentWhenClosingTag('before', tag, content);
+};
+
+WYMeditor.XhtmlSaxListener.prototype.fixNestingBeforeOpeningBlockTag = function(tag, attributes)
+{
+    if(tag != 'li' && (tag == 'ul' || tag == 'ol') && this.last_tag && !this.last_tag_opened && this.last_tag == 'li'){
+      this.output = this.output.replace(/<\/li>$/, '');
+      this.insertContentAfterClosingTag(tag, '</li>');
+    }
+};
+
+WYMeditor.XhtmlSaxListener.prototype._insertContentWhenClosingTag = function(position, tag, content)
+{
+  if(!this['_insert_'+position+'_closing']){
+    this['_insert_'+position+'_closing'] = [];
+  }
+  if(!this['_insert_'+position+'_closing'][tag]){
+    this['_insert_'+position+'_closing'][tag] = [];
+  }
+  this['_insert_'+position+'_closing'][tag].push(content);
+};
+
+WYMeditor.XhtmlSaxListener.prototype._getClosingTagContent = function(position, tag)
+{
+  if( this['_insert_'+position+'_closing'] &&
+      this['_insert_'+position+'_closing'][tag] &&
+      this['_insert_'+position+'_closing'][tag].length > 0){
+        return this['_insert_'+position+'_closing'][tag].pop();
+  }
+  return '';
+};
+
+
+/********** CSS PARSER **********/
+
+
+WYMeditor.WymCssLexer = function(parser, only_wym_blocks)
+{
+  var only_wym_blocks = (typeof only_wym_blocks == 'undefined' ? true : only_wym_blocks);
+
+  jQuery.extend(this, new WYMeditor.Lexer(parser, (only_wym_blocks?'Ignore':'WymCss')));
+
+  this.mapHandler('WymCss', 'Ignore');
+
+  if(only_wym_blocks == true){
+    this.addEntryPattern("/\\\x2a[<\\s]*WYMeditor[>\\s]*\\\x2a/", 'Ignore', 'WymCss');
+    this.addExitPattern("/\\\x2a[<\/\\s]*WYMeditor[>\\s]*\\\x2a/", 'WymCss');
+  }
+
+  this.addSpecialPattern("[\\sa-z1-6]*\\\x2e[a-z-_0-9]+", 'WymCss', 'WymCssStyleDeclaration');
+
+  this.addEntryPattern("/\\\x2a", 'WymCss', 'WymCssComment');
+  this.addExitPattern("\\\x2a/", 'WymCssComment');
+
+  this.addEntryPattern("\x7b", 'WymCss', 'WymCssStyle');
+  this.addExitPattern("\x7d", 'WymCssStyle');
+
+  this.addEntryPattern("/\\\x2a", 'WymCssStyle', 'WymCssFeedbackStyle');
+  this.addExitPattern("\\\x2a/", 'WymCssFeedbackStyle');
+
+  return this;
+};
+
+WYMeditor.WymCssParser = function()
+{
+  this._in_style = false;
+  this._has_title = false;
+  this.only_wym_blocks = true;
+  this.css_settings = {'classesItems':[], 'editorStyles':[], 'dialogStyles':[]};
+  return this;
+};
+
+WYMeditor.WymCssParser.prototype.parse = function(raw, only_wym_blocks)
+{
+  var only_wym_blocks = (typeof only_wym_blocks == 'undefined' ? this.only_wym_blocks : only_wym_blocks);
+  this._Lexer = new WYMeditor.WymCssLexer(this, only_wym_blocks);
+  this._Lexer.parse(raw);
+};
+
+WYMeditor.WymCssParser.prototype.Ignore = function(match, state)
+{
+  return true;
+};
+
+WYMeditor.WymCssParser.prototype.WymCssComment = function(text, status)
+{
+  if(text.match(/end[a-z0-9\s]*wym[a-z0-9\s]*/mi)){
+    return false;
+  }
+  if(status == WYMeditor.LEXER_UNMATCHED){
+    if(!this._in_style){
+      this._has_title = true;
+      this._current_item = {'title':WYMeditor.Helper.trim(text)};
+    }else{
+      if(this._current_item[this._current_element]){
+        if(!this._current_item[this._current_element].expressions){
+          this._current_item[this._current_element].expressions = [text];
+        }else{
+          this._current_item[this._current_element].expressions.push(text);
+        }
+      }
+    }
+    this._in_style = true;
+  }
+  return true;
+};
+
+WYMeditor.WymCssParser.prototype.WymCssStyle = function(match, status)
+{
+  if(status == WYMeditor.LEXER_UNMATCHED){
+    match = WYMeditor.Helper.trim(match);
+    if(match != ''){
+      this._current_item[this._current_element].style = match;
+    }
+  }else if (status == WYMeditor.LEXER_EXIT){
+    this._in_style = false;
+    this._has_title = false;
+    this.addStyleSetting(this._current_item);
+  }
+  return true;
+};
+
+WYMeditor.WymCssParser.prototype.WymCssFeedbackStyle = function(match, status)
+{
+  if(status == WYMeditor.LEXER_UNMATCHED){
+    this._current_item[this._current_element].feedback_style = match.replace(/^([\s\/\*]*)|([\s\/\*]*)$/gm,'');
+  }
+  return true;
+};
+
+WYMeditor.WymCssParser.prototype.WymCssStyleDeclaration = function(match)
+{
+  match = match.replace(/^([\s\.]*)|([\s\.*]*)$/gm, '');
+
+  var tag = '';
+  if(match.indexOf('.') > 0){
+    var parts = match.split('.');
+    this._current_element = parts[1];
+    var tag = parts[0];
+  }else{
+    this._current_element = match;
+  }
+
+  if(!this._has_title){
+    this._current_item = {'title':(!tag?'':tag.toUpperCase()+': ')+this._current_element};
+    this._has_title = true;
+  }
+
+  if(!this._current_item[this._current_element]){
+    this._current_item[this._current_element] = {'name':this._current_element};
+  }
+  if(tag){
+    if(!this._current_item[this._current_element].tags){
+      this._current_item[this._current_element].tags = [tag];
+    }else{
+      this._current_item[this._current_element].tags.push(tag);
+    }
+  }
+  return true;
+};
+
+WYMeditor.WymCssParser.prototype.addStyleSetting = function(style_details)
+{
+  for (var name in style_details){
+    var details = style_details[name];
+    if(typeof details == 'object' && name != 'title'){
+
+      this.css_settings.classesItems.push({
+        'name': WYMeditor.Helper.trim(details.name),
+        'title': style_details.title,
+        'expr' : WYMeditor.Helper.trim((details.expressions||details.tags).join(', '))
+      });
+      if(details.feedback_style){
+        this.css_settings.editorStyles.push({
+          'name': '.'+ WYMeditor.Helper.trim(details.name),
+          'css': details.feedback_style
+        });
+      }
+      if(details.style){
+        this.css_settings.dialogStyles.push({
+          'name': '.'+ WYMeditor.Helper.trim(details.name),
+          'css': details.style
+        });
+      }
+    }
+  }
+};
+
+/********** HELPERS **********/
+
+// Returns true if it is a text node with whitespaces only
+jQuery.fn.isPhantomNode = function() {
+  if (this[0].nodeType == 3)
+    return !(/[^\t\n\r ]/.test(this[0].data));
+
+  return false;
+};
+
+WYMeditor.isPhantomNode = function(n) {
+  if (n.nodeType == 3)
+    return !(/[^\t\n\r ]/.test(n.data));
+
+  return false;
+};
+
+WYMeditor.isPhantomString = function(str) {
+    return !(/[^\t\n\r ]/.test(str));
+};
+
+// Returns the Parents or the node itself
+// jqexpr = a jQuery expression
+jQuery.fn.parentsOrSelf = function(jqexpr) {
+  var n = this;
+
+  if (n[0].nodeType == 3)
+    n = n.parents().slice(0,1);
+
+//  if (n.is(jqexpr)) // XXX should work, but doesn't (probably a jQuery bug)
+  if (n.filter(jqexpr).size() == 1)
+    return n;
+  else
+    return n.parents(jqexpr).slice(0,1);
+};
+
+// String & array helpers
+
+WYMeditor.Helper = {
+
+    //replace all instances of 'old' by 'rep' in 'str' string
+    replaceAll: function(str, old, rep) {
+        var rExp = new RegExp(old, "g");
+        return(str.replace(rExp, rep));
+    },
+
+    //insert 'inserted' at position 'pos' in 'str' string
+    insertAt: function(str, inserted, pos) {
+        return(str.substr(0,pos) + inserted + str.substring(pos));
+    },
+
+    //trim 'str' string
+    trim: function(str) {
+        return str.replace(/^(\s*)|(\s*)$/gm,'');
+    },
+
+    //return true if 'arr' array contains 'elem', or false
+    contains: function(arr, elem) {
+        for (var i = 0; i < arr.length; i++) {
+            if (arr[i] === elem) return true;
+        }
+        return false;
+    },
+
+    //return 'item' position in 'arr' array, or -1
+    indexOf: function(arr, item) {
+        var ret=-1;
+        for(var i = 0; i < arr.length; i++) {
+            if (arr[i] == item) {
+                ret = i;
+                break;
+            }
+        }
+	    return(ret);
+    },
+
+    //return 'item' object in 'arr' array, checking its 'name' property, or null
+    findByName: function(arr, name) {
+        for(var i = 0; i < arr.length; i++) {
+            var item = arr[i];
+            if(item.name == name) return(item);
+        }
+        return(null);
+    }
+};
+
+
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        jquery.wymeditor.explorer.js
+ *        MSIE specific class and functions.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ *        Bermi Ferrer (wymeditor a-t bermi dotorg)
+ *        Frédéric Palluel-Lafleur (fpalluel a-t gmail dotcom)
+ *        Jonatan Lundin (jonatan.lundin _at_ gmail.com)
+ */
+
+WYMeditor.WymClassExplorer = function(wym) {
+
+    this._wym = wym;
+    this._class = "className";
+    this._newLine = "\r\n";
+
+};
+
+WYMeditor.WymClassExplorer.prototype.initIframe = function(iframe) {
+
+    //This function is executed twice, though it is called once!
+    //But MSIE needs that, otherwise designMode won't work.
+    //Weird.
+    
+    this._iframe = iframe;
+    this._doc = iframe.contentWindow.document;
+    
+    //add css rules from options
+    var styles = this._doc.styleSheets[0];
+    var aCss = eval(this._options.editorStyles);
+
+    this.addCssRules(this._doc, aCss);
+
+    this._doc.title = this._wym._index;
+
+    //set the text direction
+    jQuery('html', this._doc).attr('dir', this._options.direction);
+    
+    //init html value
+    jQuery(this._doc.body).html(this._wym._html);
+    
+    //handle events
+    var wym = this;
+    
+    this._doc.body.onfocus = function()
+      {wym._doc.designMode = "on"; wym._doc = iframe.contentWindow.document;};
+    this._doc.onbeforedeactivate = function() {wym.saveCaret();};
+    this._doc.onkeyup = function() {
+      wym.saveCaret();
+      wym.keyup();
+    };
+    this._doc.onclick = function() {wym.saveCaret();};
+    
+    this._doc.body.onbeforepaste = function() {
+      wym._iframe.contentWindow.event.returnValue = false;
+    };
+    
+    this._doc.body.onpaste = function() {
+      wym._iframe.contentWindow.event.returnValue = false;
+      wym.paste(window.clipboardData.getData("Text"));
+    };
+    
+    //callback can't be executed twice, so we check
+    if(this._initialized) {
+      
+      //pre-bind functions
+      if(jQuery.isFunction(this._options.preBind)) this._options.preBind(this);
+      
+      //bind external events
+      this._wym.bindEvents();
+      
+      //post-init functions
+      if(jQuery.isFunction(this._options.postInit)) this._options.postInit(this);
+      
+      //add event listeners to doc elements, e.g. images
+      this.listen();
+    }
+    
+    this._initialized = true;
+    
+    //init designMode
+    this._doc.designMode="on";
+    try{
+        // (bermi's note) noticed when running unit tests on IE6
+        // Is this really needed, it trigger an unexisting property on IE6
+        this._doc = iframe.contentWindow.document; 
+    }catch(e){}
+};
+
+WYMeditor.WymClassExplorer.prototype._exec = function(cmd,param) {
+
+    switch(cmd) {
+    
+    case WYMeditor.INDENT: case WYMeditor.OUTDENT:
+    
+        var container = this.findUp(this.container(), WYMeditor.LI);
+        if(container) {
+            var ancestor = container.parentNode.parentNode;
+            if(container.parentNode.childNodes.length>1
+              || ancestor.tagName.toLowerCase() == WYMeditor.OL
+              || ancestor.tagName.toLowerCase() == WYMeditor.UL)
+              this._doc.execCommand(cmd);
+        }
+    break;
+    default:
+        if(param) this._doc.execCommand(cmd,false,param);
+        else this._doc.execCommand(cmd);
+    break;
+	}
+    
+    this.listen();
+};
+
+WYMeditor.WymClassExplorer.prototype.selected = function() {
+
+    var caretPos = this._iframe.contentWindow.document.caretPos;
+        if(caretPos!=null) {
+            if(caretPos.parentElement!=undefined)
+              return(caretPos.parentElement());
+        }
+};
+
+WYMeditor.WymClassExplorer.prototype.saveCaret = function() {
+
+    this._doc.caretPos = this._doc.selection.createRange();
+};
+
+WYMeditor.WymClassExplorer.prototype.addCssRule = function(styles, oCss) {
+
+    styles.addRule(oCss.name, oCss.css);
+};
+
+WYMeditor.WymClassExplorer.prototype.insert = function(html) {
+
+    // Get the current selection
+    var range = this._doc.selection.createRange();
+
+    // Check if the current selection is inside the editor
+    if ( jQuery(range.parentElement()).parents( this._options.iframeBodySelector ).is('*') ) {
+        try {
+            // Overwrite selection with provided html
+            range.pasteHTML(html);
+        } catch (e) { }
+    } else {
+        // Fall back to the internal paste function if there's no selection
+        this.paste(html);
+    }
+};
+
+WYMeditor.WymClassExplorer.prototype.wrap = function(left, right) {
+
+    // Get the current selection
+    var range = this._doc.selection.createRange();
+
+    // Check if the current selection is inside the editor
+    if ( jQuery(range.parentElement()).parents( this._options.iframeBodySelector ).is('*') ) {
+        try {
+            // Overwrite selection with provided html
+            range.pasteHTML(left + range.text + right);
+        } catch (e) { }
+    }
+};
+
+WYMeditor.WymClassExplorer.prototype.unwrap = function() {
+
+    // Get the current selection
+    var range = this._doc.selection.createRange();
+
+    // Check if the current selection is inside the editor
+    if ( jQuery(range.parentElement()).parents( this._options.iframeBodySelector ).is('*') ) {
+        try {
+            // Unwrap selection
+            var text = range.text;
+            this._exec( 'Cut' );
+            range.pasteHTML( text );
+        } catch (e) { }
+    }
+};
+
+//keyup handler
+WYMeditor.WymClassExplorer.prototype.keyup = function() {
+  this._selected_image = null;
+};
+
+WYMeditor.WymClassExplorer.prototype.setFocusToNode = function(node) {
+    var range = this._doc.selection.createRange();
+    range.moveToElementText(node);
+    range.collapse(false);
+    range.move('character',-1);
+    range.select();
+    node.focus();
+};
+
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        jquery.wymeditor.mozilla.js
+ *        Gecko specific class and functions.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ *        Volker Mische (vmx a-t gmx dotde)
+ *        Bermi Ferrer (wymeditor a-t bermi dotorg)
+ *        Frédéric Palluel-Lafleur (fpalluel a-t gmail dotcom)
+ */
+
+WYMeditor.WymClassMozilla = function(wym) {
+
+    this._wym = wym;
+    this._class = "class";
+    this._newLine = "\n";
+};
+
+WYMeditor.WymClassMozilla.prototype.initIframe = function(iframe) {
+
+    this._iframe = iframe;
+    this._doc = iframe.contentDocument;
+    
+    //add css rules from options
+    
+    var styles = this._doc.styleSheets[0];    
+    var aCss = eval(this._options.editorStyles);
+    
+    this.addCssRules(this._doc, aCss);
+
+    this._doc.title = this._wym._index;
+
+    //set the text direction
+    jQuery('html', this._doc).attr('dir', this._options.direction);
+    
+    //init html value
+    this.html(this._wym._html);
+    
+    //init designMode
+    this.enableDesignMode();
+    
+    //pre-bind functions
+    if(jQuery.isFunction(this._options.preBind)) this._options.preBind(this);
+    
+    //bind external events
+    this._wym.bindEvents();
+    
+    //bind editor keydown events
+    jQuery(this._doc).bind("keydown", this.keydown);
+    
+    //bind editor keyup events
+    jQuery(this._doc).bind("keyup", this.keyup);
+    
+    //bind editor focus events (used to reset designmode - Gecko bug)
+    jQuery(this._doc).bind("focus", this.enableDesignMode);
+    
+    //post-init functions
+    if(jQuery.isFunction(this._options.postInit)) this._options.postInit(this);
+    
+    //add event listeners to doc elements, e.g. images
+    this.listen();
+};
+
+/* @name html
+ * @description Get/Set the html value
+ */
+WYMeditor.WymClassMozilla.prototype.html = function(html) {
+
+  if(typeof html === 'string') {
+  
+    //disable designMode
+    try { this._doc.designMode = "off"; } catch(e) { };
+    
+    //replace em by i and strong by bold
+    //(designMode issue)
+    html = html.replace(/<em(\b[^>]*)>/gi, "<i$1>")
+      .replace(/<\/em>/gi, "</i>")
+      .replace(/<strong(\b[^>]*)>/gi, "<b$1>")
+      .replace(/<\/strong>/gi, "</b>");
+
+    //update the html body
+    jQuery(this._doc.body).html(html);
+    
+    //re-init designMode
+    this.enableDesignMode();
+  }
+  else return(jQuery(this._doc.body).html());
+};
+
+WYMeditor.WymClassMozilla.prototype._exec = function(cmd,param) {
+
+    if(!this.selected()) return(false);
+
+    switch(cmd) {
+    
+    case WYMeditor.INDENT: case WYMeditor.OUTDENT:
+    
+        var focusNode = this.selected();    
+        var sel = this._iframe.contentWindow.getSelection();
+        var anchorNode = sel.anchorNode;
+        if(anchorNode.nodeName == "#text") anchorNode = anchorNode.parentNode;
+        
+        focusNode = this.findUp(focusNode, WYMeditor.BLOCKS);
+        anchorNode = this.findUp(anchorNode, WYMeditor.BLOCKS);
+        
+        if(focusNode && focusNode == anchorNode
+          && focusNode.tagName.toLowerCase() == WYMeditor.LI) {
+
+            var ancestor = focusNode.parentNode.parentNode;
+
+            if(focusNode.parentNode.childNodes.length>1
+              || ancestor.tagName.toLowerCase() == WYMeditor.OL
+              || ancestor.tagName.toLowerCase() == WYMeditor.UL)
+                this._doc.execCommand(cmd,'',null);
+        }
+
+    break;
+    
+    default:
+
+        if(param) this._doc.execCommand(cmd,'',param);
+        else this._doc.execCommand(cmd,'',null);
+    }
+    
+    //set to P if parent = BODY
+    var container = this.selected();
+    if(container.tagName.toLowerCase() == WYMeditor.BODY)
+        this._exec(WYMeditor.FORMAT_BLOCK, WYMeditor.P);
+    
+    //add event handlers on doc elements
+
+    this.listen();
+};
+
+/* @name selected
+ * @description Returns the selected container
+ */
+WYMeditor.WymClassMozilla.prototype.selected = function() {
+
+    var sel = this._iframe.contentWindow.getSelection();
+    var node = sel.focusNode;
+    if(node) {
+        if(node.nodeName == "#text") return(node.parentNode);
+        else return(node);
+    } else return(null);
+};
+
+WYMeditor.WymClassMozilla.prototype.addCssRule = function(styles, oCss) {
+
+    styles.insertRule(oCss.name + " {" + oCss.css + "}",
+        styles.cssRules.length);
+};
+
+
+//keydown handler, mainly used for keyboard shortcuts
+WYMeditor.WymClassMozilla.prototype.keydown = function(evt) {
+  
+  //'this' is the doc
+  var wym = WYMeditor.INSTANCES[this.title];
+  var container = null;  
+
+  if( evt.keyCode === WYMeditor.KEY.ENTER ){
+	container = wym.selected();
+	if( container ){
+		var $container = $( container ),
+			$parent = $container.parent(),
+			$embedded = $container.parents( '.embedded-item' );
+		if( $embedded.size() ){
+			// if the cursor is in the title, add a para before
+			var $newNode = $( '<p/>' );
+			if( $container.hasClass( 'title' ) ){
+				$embedded.before( $newNode );
+			// if the cursor is in the content, add after
+			} else if( $container.hasClass( 'content' ) ){
+				$embedded.after( $newNode );
+			}
+			wym.setFocusToNode( $newNode.get(0) );
+			// prevent the default - wym's chop the text and split it up into paras
+			return false;
+		}
+	}
+  }
+
+  if(evt.ctrlKey){
+    if(evt.keyCode == 66){
+      //CTRL+b => STRONG
+      wym._exec(WYMeditor.BOLD);
+      return false;
+    }
+    if(evt.keyCode == 73){
+      //CTRL+i => EMPHASIS
+      wym._exec(WYMeditor.ITALIC);
+      return false;
+    }
+  }
+
+  else if(evt.keyCode == 13) {
+    if(!evt.shiftKey){
+      //fix PRE bug #73
+      container = wym.selected();
+      if(container && container.tagName.toLowerCase() == WYMeditor.PRE) {
+        evt.preventDefault();
+        wym.insert('<p></p>');
+      }
+    }
+  }
+};
+
+//keyup handler, mainly used for cleanups
+WYMeditor.WymClassMozilla.prototype.keyup = function(evt) {
+
+  //'this' is the doc
+  var wym = WYMeditor.INSTANCES[this.title];
+
+  wym._selected_image = null;
+  var container = null;
+
+  if(evt.keyCode == 13 && !evt.shiftKey) {
+
+    //RETURN key
+    //cleanup <br><br> between paragraphs
+    jQuery(wym._doc.body).children(WYMeditor.BR).remove();
+  }
+
+  else if(evt.keyCode != 8
+       && evt.keyCode != 17
+       && evt.keyCode != 46
+       && evt.keyCode != 224
+       && !evt.metaKey
+       && !evt.ctrlKey) {
+
+    //NOT BACKSPACE, NOT DELETE, NOT CTRL, NOT COMMAND
+    //text nodes replaced by P
+
+    container = wym.selected();
+    var name = container.tagName.toLowerCase();
+
+    //fix forbidden main containers
+    if(
+      name == "strong" ||
+      name == "b" ||
+      name == "em" ||
+      name == "i" ||
+      name == "sub" ||
+      name == "sup" ||
+      name == "a"
+
+    ) name = container.parentNode.tagName.toLowerCase();
+
+    if(name == WYMeditor.BODY) wym._exec(WYMeditor.FORMAT_BLOCK, WYMeditor.P);
+  }
+};
+
+WYMeditor.WymClassMozilla.prototype.enableDesignMode = function() {
+    if(this.designMode == "off") {
+      try {
+        this.designMode = "on";
+        this.execCommand("styleWithCSS", '', false);
+      } catch(e) { }
+    }
+};
+
+WYMeditor.WymClassMozilla.prototype.setFocusToNode = function(node) {
+    var range = document.createRange();
+    range.selectNode(node);
+    var selected = this._iframe.contentWindow.getSelection();
+    selected.addRange(range);
+    selected.collapse(node, node.childNodes.length);
+    this._iframe.contentWindow.focus();
+};
+
+WYMeditor.WymClassMozilla.prototype.openBlockTag = function(tag, attributes)
+{
+  var attributes = this.validator.getValidTagAttributes(tag, attributes);
+
+  // Handle Mozilla styled spans
+  if(tag == 'span' && attributes.style){
+    var new_tag = this.getTagForStyle(attributes.style);
+    if(new_tag){
+      this._tag_stack.pop();
+      var tag = new_tag;
+      this._tag_stack.push(new_tag);
+      attributes.style = '';
+    }else{
+      return;
+    }
+  }
+
+  this.output += this.helper.tag(tag, attributes, true);
+};
+
+WYMeditor.WymClassMozilla.prototype.getTagForStyle = function(style) {
+
+  if(/bold/.test(style)) return 'strong';
+  if(/italic/.test(style)) return 'em';
+  if(/sub/.test(style)) return 'sub';
+  if(/sub/.test(style)) return 'super';
+  return false;
+};
+
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        jquery.wymeditor.opera.js
+ *        Opera specific class and functions.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ */
+
+WYMeditor.WymClassOpera = function(wym) {
+
+    this._wym = wym;
+    this._class = "class";
+    this._newLine = "\r\n";
+};
+
+WYMeditor.WymClassOpera.prototype.initIframe = function(iframe) {
+
+    this._iframe = iframe;
+    this._doc = iframe.contentWindow.document;
+
+    //add css rules from options
+    var styles = this._doc.styleSheets[0];
+    var aCss = eval(this._options.editorStyles);
+
+    this.addCssRules(this._doc, aCss);
+
+    this._doc.title = this._wym._index;
+
+    //set the text direction
+    jQuery('html', this._doc).attr('dir', this._options.direction);
+
+    //init designMode
+    this._doc.designMode = "on";
+
+    //init html value
+    this.html(this._wym._html);
+
+    //pre-bind functions
+    if(jQuery.isFunction(this._options.preBind)) this._options.preBind(this);
+
+    //bind external events
+    this._wym.bindEvents();
+
+    //bind editor keydown events
+    jQuery(this._doc).bind("keydown", this.keydown);
+
+    //bind editor events
+    jQuery(this._doc).bind("keyup", this.keyup);
+
+    //post-init functions
+    if(jQuery.isFunction(this._options.postInit)) this._options.postInit(this);
+
+    //add event listeners to doc elements, e.g. images
+    this.listen();
+};
+
+WYMeditor.WymClassOpera.prototype._exec = function(cmd,param) {
+
+    if(param) this._doc.execCommand(cmd,false,param);
+    else this._doc.execCommand(cmd);
+
+    this.listen();
+};
+
+WYMeditor.WymClassOpera.prototype.selected = function() {
+
+    var sel=this._iframe.contentWindow.getSelection();
+    var node=sel.focusNode;
+    if(node) {
+        if(node.nodeName=="#text")return(node.parentNode);
+        else return(node);
+    } else return(null);
+};
+
+WYMeditor.WymClassOpera.prototype.addCssRule = function(styles, oCss) {
+
+    styles.insertRule(oCss.name + " {" + oCss.css + "}",
+        styles.cssRules.length);
+};
+
+//keydown handler
+WYMeditor.WymClassOpera.prototype.keydown = function(evt) {
+
+  //'this' is the doc
+  var wym = WYMeditor.INSTANCES[this.title];
+  var sel = wym._iframe.contentWindow.getSelection();
+  startNode = sel.getRangeAt(0).startContainer;
+
+  //Get a P instead of no container
+  if(!jQuery(startNode).parentsOrSelf(
+                WYMeditor.MAIN_CONTAINERS.join(","))[0]
+      && !jQuery(startNode).parentsOrSelf('li')
+      && evt.keyCode != WYMeditor.KEY.ENTER
+      && evt.keyCode != WYMeditor.KEY.LEFT
+      && evt.keyCode != WYMeditor.KEY.UP
+      && evt.keyCode != WYMeditor.KEY.RIGHT
+      && evt.keyCode != WYMeditor.KEY.DOWN
+      && evt.keyCode != WYMeditor.KEY.BACKSPACE
+      && evt.keyCode != WYMeditor.KEY.DELETE)
+      wym._exec(WYMeditor.FORMAT_BLOCK, WYMeditor.P);
+
+};
+
+//keyup handler
+WYMeditor.WymClassOpera.prototype.keyup = function(evt) {
+
+  //'this' is the doc
+  var wym = WYMeditor.INSTANCES[this.title];
+  wym._selected_image = null;
+};
+
+// TODO: implement me
+WYMeditor.WymClassOpera.prototype.setFocusToNode = function(node) {
+
+};
+
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        jquery.wymeditor.safari.js
+ *        Safari specific class and functions.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ *        Scott Lewis (lewiscot a-t gmail dotcom)
+ */
+
+WYMeditor.WymClassSafari = function(wym) {
+
+    this._wym = wym;
+    this._class = "class";
+    this._newLine = "\n";
+};
+
+WYMeditor.WymClassSafari.prototype.initIframe = function(iframe) {
+
+    this._iframe = iframe;
+    this._doc = iframe.contentDocument;
+
+    //add css rules from options
+
+    var styles = this._doc.styleSheets[0];
+    var aCss = eval(this._options.editorStyles);
+
+    this.addCssRules(this._doc, aCss);
+
+    this._doc.title = this._wym._index;
+
+    //set the text direction
+    jQuery('html', this._doc).attr('dir', this._options.direction);
+
+    //init designMode
+    this._doc.designMode = "on";
+
+    //init html value
+    this.html(this._wym._html);
+
+    //pre-bind functions
+    if(jQuery.isFunction(this._options.preBind)) this._options.preBind(this);
+
+    //bind external events
+    this._wym.bindEvents();
+
+    //bind editor keydown events
+    jQuery(this._doc).bind("keydown", this.keydown);
+
+    //bind editor keyup events
+    jQuery(this._doc).bind("keyup", this.keyup);
+
+    //post-init functions
+    if(jQuery.isFunction(this._options.postInit)) this._options.postInit(this);
+
+    //add event listeners to doc elements, e.g. images
+    this.listen();
+};
+
+WYMeditor.WymClassSafari.prototype._exec = function(cmd,param) {
+
+    if(!this.selected()) return(false);
+
+    switch(cmd) {
+
+    case WYMeditor.INDENT: case WYMeditor.OUTDENT:
+
+        var focusNode = this.selected();
+        var sel = this._iframe.contentWindow.getSelection();
+        var anchorNode = sel.anchorNode;
+        if(anchorNode.nodeName == "#text") anchorNode = anchorNode.parentNode;
+
+        focusNode = this.findUp(focusNode, WYMeditor.BLOCKS);
+        anchorNode = this.findUp(anchorNode, WYMeditor.BLOCKS);
+
+        if(focusNode && focusNode == anchorNode
+          && focusNode.tagName.toLowerCase() == WYMeditor.LI) {
+
+            var ancestor = focusNode.parentNode.parentNode;
+
+            if(focusNode.parentNode.childNodes.length>1
+              || ancestor.tagName.toLowerCase() == WYMeditor.OL
+              || ancestor.tagName.toLowerCase() == WYMeditor.UL)
+                this._doc.execCommand(cmd,'',null);
+        }
+
+    break;
+
+    case WYMeditor.INSERT_ORDEREDLIST: case WYMeditor.INSERT_UNORDEREDLIST:
+
+        this._doc.execCommand(cmd,'',null);
+
+        //Safari creates lists in e.g. paragraphs.
+        //Find the container, and remove it.
+        var focusNode = this.selected();
+        var container = this.findUp(focusNode, WYMeditor.MAIN_CONTAINERS);
+        if(container) jQuery(container).replaceWith(jQuery(container).html());
+
+    break;
+
+    default:
+
+        if(param) this._doc.execCommand(cmd,'',param);
+        else this._doc.execCommand(cmd,'',null);
+    }
+
+    //set to P if parent = BODY
+    var container = this.selected();
+    if(container && container.tagName.toLowerCase() == WYMeditor.BODY)
+        this._exec(WYMeditor.FORMAT_BLOCK, WYMeditor.P);
+
+    //add event handlers on doc elements
+    this.listen();
+};
+
+/* @name selected
+ * @description Returns the selected container
+ */
+WYMeditor.WymClassSafari.prototype.selected = function() {
+
+    var sel = this._iframe.contentWindow.getSelection();
+    var node = sel.focusNode;
+    if(node) {
+        if(node.nodeName == "#text") return(node.parentNode);
+        else return(node);
+    } else return(null);
+};
+
+WYMeditor.WymClassSafari.prototype.addCssRule = function(styles, oCss) {
+
+    styles.insertRule(oCss.name + " {" + oCss.css + "}",
+        styles.cssRules.length);
+};
+
+
+//keydown handler, mainly used for keyboard shortcuts
+WYMeditor.WymClassSafari.prototype.keydown = function(evt) {
+
+  //'this' is the doc
+  var wym = WYMeditor.INSTANCES[this.title];
+
+  if( evt.keyCode === WYMeditor.KEY.ENTER ){
+	var container = wym.selected();
+	if( container ){
+		var $container = $( container ),
+			$parent = $container.parent(),
+			$embedded = $container.parents( '.embedded-item' );
+		if( $embedded.size() ){
+			// if the cursor is in the title, add a para before
+			var $newNode = $( '<p/>' );
+			if( $container.hasClass( 'title' ) ){
+				$embedded.before( $newNode );
+			// if the cursor is in the content, add after
+			} else if( $container.hasClass( 'content' ) ){
+				$embedded.after( $newNode );
+			}
+			wym.setFocusToNode( $newNode.get(0) );
+			// prevent the default - wym's chop the text and split it up into paras
+			return false;
+		}
+	}
+  }
+  
+  if(evt.ctrlKey){
+    if(evt.keyCode == 66){
+      //CTRL+b => STRONG
+      wym._exec(WYMeditor.BOLD);
+      return false;
+    }
+    if(evt.keyCode == 73){
+      //CTRL+i => EMPHASIS
+      wym._exec(WYMeditor.ITALIC);
+      return false;
+    }
+  }
+};
+
+//keyup handler, mainly used for cleanups
+WYMeditor.WymClassSafari.prototype.keyup = function(evt) {
+
+  //'this' is the doc
+  var wym = WYMeditor.INSTANCES[this.title];
+  
+  wym._selected_image = null;
+  var container = null;
+
+  if(evt.keyCode == 13 && !evt.shiftKey) {
+  
+    //RETURN key
+    //cleanup <br><br> between paragraphs
+    jQuery(wym._doc.body).children(WYMeditor.BR).remove();
+    
+    //fix PRE bug #73
+    container = wym.selected();
+    if(container && container.tagName.toLowerCase() == WYMeditor.PRE)
+        wym._exec(WYMeditor.FORMAT_BLOCK, WYMeditor.P); //create P after PRE
+  }
+
+  //fix #112
+  if(evt.keyCode == 13 && evt.shiftKey) {
+    wym._exec('InsertLineBreak');
+  }
+  
+  if(evt.keyCode != 8
+       && evt.keyCode != 17
+       && evt.keyCode != 46
+       && evt.keyCode != 224
+       && !evt.metaKey
+       && !evt.ctrlKey) {
+      
+    //NOT BACKSPACE, NOT DELETE, NOT CTRL, NOT COMMAND
+    //text nodes replaced by P
+    
+    container = wym.selected();
+    var name = container.tagName.toLowerCase();
+
+    //fix forbidden main containers
+    if(
+      name == "strong" ||
+      name == "b" ||
+      name == "em" ||
+      name == "i" ||
+      name == "sub" ||
+      name == "sup" ||
+      name == "a" ||
+      name == "span" //fix #110
+
+    ) name = container.parentNode.tagName.toLowerCase();
+
+    if(name == WYMeditor.BODY || name == WYMeditor.DIV) wym._exec(WYMeditor.FORMAT_BLOCK, WYMeditor.P); //fix #110 for DIV
+  }
+};
+
+WYMeditor.WymClassSafari.prototype.setFocusToNode = function(node) {
+    var range = this._iframe.contentDocument.createRange();
+    range.selectNode(node);
+    var selected = this._iframe.contentWindow.getSelection();
+    selected.addRange(range);
+    selected.collapse(node, node.childNodes.length);
+    this._iframe.contentWindow.focus();
+};
+
+WYMeditor.WymClassSafari.prototype.openBlockTag = function(tag, attributes)
+{
+  var attributes = this.validator.getValidTagAttributes(tag, attributes);
+
+  // Handle Safari styled spans
+  if(tag == 'span' && attributes.style) {
+    var new_tag = this.getTagForStyle(attributes.style);
+    if(new_tag){
+      this._tag_stack.pop();
+      var tag = new_tag;
+      this._tag_stack.push(new_tag);
+      attributes.style = '';
+      
+      //should fix #125 - also removed the xhtml() override
+      if(typeof attributes['class'] == 'string')
+        attributes['class'] = attributes['class'].replace(/apple-style-span/gi, '');
+    
+    } else {
+      return;
+    }
+  }
+  
+  this.output += this.helper.tag(tag, attributes, true);
+};
+
+WYMeditor.WymClassSafari.prototype.getTagForStyle = function(style) {
+
+  if(/bold/.test(style)) return 'strong';
+  if(/italic/.test(style)) return 'em';
+  if(/sub/.test(style)) return 'sub';
+  if(/super/.test(style)) return 'sup';
+  return false;
+};
diff --git a/client/galaxy/scripts/libs/jquery/jstorage.js b/client/galaxy/scripts/libs/jquery/jstorage.js
new file mode 100644
index 0000000..b8b4ee0
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jstorage.js
@@ -0,0 +1,933 @@
+/*
+ * ----------------------------- JSTORAGE -------------------------------------
+ * Simple local storage wrapper to save data on the browser side, supporting
+ * all major browsers - IE6+, Firefox2+, Safari4+, Chrome4+ and Opera 10.5+
+ *
+ * Copyright (c) 2010 - 2012 Andris Reinman, andris.reinman at gmail.com
+ * Project homepage: www.jstorage.info
+ *
+ * Licensed under MIT-style license:
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+ (function(){
+    var
+        /* jStorage version */
+        JSTORAGE_VERSION = "0.4.4",
+
+        /* detect a dollar object or create one if not found */
+        $ = window.jQuery || window.$ || (window.$ = {}),
+
+        /* check for a JSON handling support */
+        JSON = {
+            parse:
+                window.JSON && (window.JSON.parse || window.JSON.decode) ||
+                String.prototype.evalJSON && function(str){return String(str).evalJSON();} ||
+                $.parseJSON ||
+                $.evalJSON,
+            stringify:
+                Object.toJSON ||
+                window.JSON && (window.JSON.stringify || window.JSON.encode) ||
+                $.toJSON
+        };
+
+    // Break if no JSON support was found
+    if(!('parse' in JSON) || !('stringify' in JSON)){
+        throw new Error("No JSON support found, include //cdnjs.cloudflare.com/ajax/libs/json2/20110223/json2.js to page");
+    }
+
+    var
+        /* This is the object, that holds the cached values */
+        _storage = {__jstorage_meta:{CRC32:{}}},
+
+        /* Actual browser storage (localStorage or globalStorage['domain']) */
+        _storage_service = {jStorage:"{}"},
+
+        /* DOM element for older IE versions, holds userData behavior */
+        _storage_elm = null,
+
+        /* How much space does the storage take */
+        _storage_size = 0,
+
+        /* which backend is currently used */
+        _backend = false,
+
+        /* onchange observers */
+        _observers = {},
+
+        /* timeout to wait after onchange event */
+        _observer_timeout = false,
+
+        /* last update time */
+        _observer_update = 0,
+
+        /* pubsub observers */
+        _pubsub_observers = {},
+
+        /* skip published items older than current timestamp */
+        _pubsub_last = +new Date(),
+
+        /* Next check for TTL */
+        _ttl_timeout,
+
+        /**
+         * XML encoding and decoding as XML nodes can't be JSON'ized
+         * XML nodes are encoded and decoded if the node is the value to be saved
+         * but not if it's as a property of another object
+         * Eg. -
+         *   $.jStorage.set("key", xmlNode);        // IS OK
+         *   $.jStorage.set("key", {xml: xmlNode}); // NOT OK
+         */
+        _XMLService = {
+
+            /**
+             * Validates a XML node to be XML
+             * based on jQuery.isXML function
+             */
+            isXML: function(elm){
+                var documentElement = (elm ? elm.ownerDocument || elm : 0).documentElement;
+                return documentElement ? documentElement.nodeName !== "HTML" : false;
+            },
+
+            /**
+             * Encodes a XML node to string
+             * based on http://www.mercurytide.co.uk/news/article/issues-when-working-ajax/
+             */
+            encode: function(xmlNode) {
+                if(!this.isXML(xmlNode)){
+                    return false;
+                }
+                try{ // Mozilla, Webkit, Opera
+                    return new XMLSerializer().serializeToString(xmlNode);
+                }catch(E1) {
+                    try {  // IE
+                        return xmlNode.xml;
+                    }catch(E2){}
+                }
+                return false;
+            },
+
+            /**
+             * Decodes a XML node from string
+             * loosely based on http://outwestmedia.com/jquery-plugins/xmldom/
+             */
+            decode: function(xmlString){
+                var dom_parser = ("DOMParser" in window && (new DOMParser()).parseFromString) ||
+                        (window.ActiveXObject && function(_xmlString) {
+                    var xml_doc = new ActiveXObject('Microsoft.XMLDOM');
+                    xml_doc.async = 'false';
+                    xml_doc.loadXML(_xmlString);
+                    return xml_doc;
+                }),
+                resultXML;
+                if(!dom_parser){
+                    return false;
+                }
+                resultXML = dom_parser.call("DOMParser" in window && (new DOMParser()) || window, xmlString, 'text/xml');
+                return this.isXML(resultXML)?resultXML:false;
+            }
+        };
+
+
+    ////////////////////////// PRIVATE METHODS ////////////////////////
+
+    /**
+     * Initialization function. Detects if the browser supports DOM Storage
+     * or userData behavior and behaves accordingly.
+     */
+    function _init(){
+        /* Check if browser supports localStorage */
+        var localStorageReallyWorks = false;
+        if("localStorage" in window){
+            try {
+                window.localStorage.setItem('_tmptest', 'tmpval');
+                localStorageReallyWorks = true;
+                window.localStorage.removeItem('_tmptest');
+            } catch(BogusQuotaExceededErrorOnIos5) {
+                // Thanks be to iOS5 Private Browsing mode which throws
+                // QUOTA_EXCEEDED_ERRROR DOM Exception 22.
+            }
+        }
+
+        if(localStorageReallyWorks){
+            try {
+                if(window.localStorage) {
+                    _storage_service = window.localStorage;
+                    _backend = "localStorage";
+                    _observer_update = _storage_service.jStorage_update;
+                }
+            } catch(E3) {/* Firefox fails when touching localStorage and cookies are disabled */}
+        }
+        /* Check if browser supports globalStorage */
+        else if("globalStorage" in window){
+            try {
+                if(window.globalStorage) {
+					if(window.location.hostname == 'localhost'){
+						_storage_service = window.globalStorage['localhost.localdomain'];
+					}
+					else{
+						_storage_service = window.globalStorage[window.location.hostname];
+					}
+                    _backend = "globalStorage";
+                    _observer_update = _storage_service.jStorage_update;
+                }
+            } catch(E4) {/* Firefox fails when touching localStorage and cookies are disabled */}
+        }
+        /* Check if browser supports userData behavior */
+        else {
+            _storage_elm = document.createElement('link');
+            if(_storage_elm.addBehavior){
+
+                /* Use a DOM element to act as userData storage */
+                _storage_elm.style.behavior = 'url(#default#userData)';
+
+                /* userData element needs to be inserted into the DOM! */
+                document.getElementsByTagName('head')[0].appendChild(_storage_elm);
+
+                try{
+                    _storage_elm.load("jStorage");
+                }catch(E){
+                    // try to reset cache
+                    _storage_elm.setAttribute("jStorage", "{}");
+                    _storage_elm.save("jStorage");
+                    _storage_elm.load("jStorage");
+                }
+
+                var data = "{}";
+                try{
+                    data = _storage_elm.getAttribute("jStorage");
+                }catch(E5){}
+
+                try{
+                    _observer_update = _storage_elm.getAttribute("jStorage_update");
+                }catch(E6){}
+
+                _storage_service.jStorage = data;
+                _backend = "userDataBehavior";
+            }else{
+                _storage_elm = null;
+                return;
+            }
+        }
+
+        // Load data from storage
+        _load_storage();
+
+        // remove dead keys
+        _handleTTL();
+
+        // start listening for changes
+        _setupObserver();
+
+        // initialize publish-subscribe service
+        _handlePubSub();
+
+        // handle cached navigation
+        if("addEventListener" in window){
+            window.addEventListener("pageshow", function(event){
+                if(event.persisted){
+                    _storageObserver();
+                }
+            }, false);
+        }
+    }
+
+    /**
+     * Reload data from storage when needed
+     */
+    function _reloadData(){
+        var data = "{}";
+
+        if(_backend == "userDataBehavior"){
+            _storage_elm.load("jStorage");
+
+            try{
+                data = _storage_elm.getAttribute("jStorage");
+            }catch(E5){}
+
+            try{
+                _observer_update = _storage_elm.getAttribute("jStorage_update");
+            }catch(E6){}
+
+            _storage_service.jStorage = data;
+        }
+
+        _load_storage();
+
+        // remove dead keys
+        _handleTTL();
+
+        _handlePubSub();
+    }
+
+    /**
+     * Sets up a storage change observer
+     */
+    function _setupObserver(){
+        if(_backend == "localStorage" || _backend == "globalStorage"){
+            if("addEventListener" in window){
+                window.addEventListener("storage", _storageObserver, false);
+            }else{
+                document.attachEvent("onstorage", _storageObserver);
+            }
+        }else if(_backend == "userDataBehavior"){
+            setInterval(_storageObserver, 1000);
+        }
+    }
+
+    /**
+     * Fired on any kind of data change, needs to check if anything has
+     * really been changed
+     */
+    function _storageObserver(){
+        var updateTime;
+        // cumulate change notifications with timeout
+        clearTimeout(_observer_timeout);
+        _observer_timeout = setTimeout(function(){
+
+            if(_backend == "localStorage" || _backend == "globalStorage"){
+                updateTime = _storage_service.jStorage_update;
+            }else if(_backend == "userDataBehavior"){
+                _storage_elm.load("jStorage");
+                try{
+                    updateTime = _storage_elm.getAttribute("jStorage_update");
+                }catch(E5){}
+            }
+
+            if(updateTime && updateTime != _observer_update){
+                _observer_update = updateTime;
+                _checkUpdatedKeys();
+            }
+
+        }, 25);
+    }
+
+    /**
+     * Reloads the data and checks if any keys are changed
+     */
+    function _checkUpdatedKeys(){
+        var oldCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32)),
+            newCrc32List;
+
+        _reloadData();
+        newCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32));
+
+        var key,
+            updated = [],
+            removed = [];
+
+        for(key in oldCrc32List){
+            if(oldCrc32List.hasOwnProperty(key)){
+                if(!newCrc32List[key]){
+                    removed.push(key);
+                    continue;
+                }
+                if(oldCrc32List[key] != newCrc32List[key] && String(oldCrc32List[key]).substr(0,2) == "2."){
+                    updated.push(key);
+                }
+            }
+        }
+
+        for(key in newCrc32List){
+            if(newCrc32List.hasOwnProperty(key)){
+                if(!oldCrc32List[key]){
+                    updated.push(key);
+                }
+            }
+        }
+
+        _fireObservers(updated, "updated");
+        _fireObservers(removed, "deleted");
+    }
+
+    /**
+     * Fires observers for updated keys
+     *
+     * @param {Array|String} keys Array of key names or a key
+     * @param {String} action What happened with the value (updated, deleted, flushed)
+     */
+    function _fireObservers(keys, action){
+        keys = [].concat(keys || []);
+        if(action == "flushed"){
+            keys = [];
+            for(var key in _observers){
+                if(_observers.hasOwnProperty(key)){
+                    keys.push(key);
+                }
+            }
+            action = "deleted";
+        }
+        for(var i=0, len = keys.length; i<len; i++){
+            if(_observers[keys[i]]){
+                for(var j=0, jlen = _observers[keys[i]].length; j<jlen; j++){
+                    _observers[keys[i]][j](keys[i], action);
+                }
+            }
+            if(_observers["*"]){
+                for(var j=0, jlen = _observers["*"].length; j<jlen; j++){
+                    _observers["*"][j](keys[i], action);
+                }
+            }
+        }
+    }
+
+    /**
+     * Publishes key change to listeners
+     */
+    function _publishChange(){
+        var updateTime = (+new Date()).toString();
+
+        if(_backend == "localStorage" || _backend == "globalStorage"){
+            try {
+                _storage_service.jStorage_update = updateTime;
+            } catch (E8) {
+                // safari private mode has been enabled after the jStorage initialization
+                _backend = false;
+            }
+        }else if(_backend == "userDataBehavior"){
+            _storage_elm.setAttribute("jStorage_update", updateTime);
+            _storage_elm.save("jStorage");
+        }
+
+        _storageObserver();
+    }
+
+    /**
+     * Loads the data from the storage based on the supported mechanism
+     */
+    function _load_storage(){
+        /* if jStorage string is retrieved, then decode it */
+        if(_storage_service.jStorage){
+            try{
+                _storage = JSON.parse(String(_storage_service.jStorage));
+            }catch(E6){_storage_service.jStorage = "{}";}
+        }else{
+            _storage_service.jStorage = "{}";
+        }
+        _storage_size = _storage_service.jStorage?String(_storage_service.jStorage).length:0;
+
+        if(!_storage.__jstorage_meta){
+            _storage.__jstorage_meta = {};
+        }
+        if(!_storage.__jstorage_meta.CRC32){
+            _storage.__jstorage_meta.CRC32 = {};
+        }
+    }
+
+    /**
+     * This functions provides the "save" mechanism to store the jStorage object
+     */
+    function _save(){
+        _dropOldEvents(); // remove expired events
+        try{
+            _storage_service.jStorage = JSON.stringify(_storage);
+            // If userData is used as the storage engine, additional
+            if(_storage_elm) {
+                _storage_elm.setAttribute("jStorage",_storage_service.jStorage);
+                _storage_elm.save("jStorage");
+            }
+            _storage_size = _storage_service.jStorage?String(_storage_service.jStorage).length:0;
+        }catch(E7){/* probably cache is full, nothing is saved this way*/}
+    }
+
+    /**
+     * Function checks if a key is set and is string or numberic
+     *
+     * @param {String} key Key name
+     */
+    function _checkKey(key){
+        if(!key || (typeof key != "string" && typeof key != "number")){
+            throw new TypeError('Key name must be string or numeric');
+        }
+        if(key == "__jstorage_meta"){
+            throw new TypeError('Reserved key name');
+        }
+        return true;
+    }
+
+    /**
+     * Removes expired keys
+     */
+    function _handleTTL(){
+        var curtime, i, TTL, CRC32, nextExpire = Infinity, changed = false, deleted = [];
+
+        clearTimeout(_ttl_timeout);
+
+        if(!_storage.__jstorage_meta || typeof _storage.__jstorage_meta.TTL != "object"){
+            // nothing to do here
+            return;
+        }
+
+        curtime = +new Date();
+        TTL = _storage.__jstorage_meta.TTL;
+
+        CRC32 = _storage.__jstorage_meta.CRC32;
+        for(i in TTL){
+            if(TTL.hasOwnProperty(i)){
+                if(TTL[i] <= curtime){
+                    delete TTL[i];
+                    delete CRC32[i];
+                    delete _storage[i];
+                    changed = true;
+                    deleted.push(i);
+                }else if(TTL[i] < nextExpire){
+                    nextExpire = TTL[i];
+                }
+            }
+        }
+
+        // set next check
+        if(nextExpire != Infinity){
+            _ttl_timeout = setTimeout(_handleTTL, nextExpire - curtime);
+        }
+
+        // save changes
+        if(changed){
+            _save();
+            _publishChange();
+            _fireObservers(deleted, "deleted");
+        }
+    }
+
+    /**
+     * Checks if there's any events on hold to be fired to listeners
+     */
+    function _handlePubSub(){
+        var i, len;
+        if(!_storage.__jstorage_meta.PubSub){
+            return;
+        }
+        var pubelm,
+            _pubsubCurrent = _pubsub_last;
+
+        for(i=len=_storage.__jstorage_meta.PubSub.length-1; i>=0; i--){
+            pubelm = _storage.__jstorage_meta.PubSub[i];
+            if(pubelm[0] > _pubsub_last){
+                _pubsubCurrent = pubelm[0];
+                _fireSubscribers(pubelm[1], pubelm[2]);
+            }
+        }
+
+        _pubsub_last = _pubsubCurrent;
+    }
+
+    /**
+     * Fires all subscriber listeners for a pubsub channel
+     *
+     * @param {String} channel Channel name
+     * @param {Mixed} payload Payload data to deliver
+     */
+    function _fireSubscribers(channel, payload){
+        if(_pubsub_observers[channel]){
+            for(var i=0, len = _pubsub_observers[channel].length; i<len; i++){
+                // send immutable data that can't be modified by listeners
+                _pubsub_observers[channel][i](channel, JSON.parse(JSON.stringify(payload)));
+            }
+        }
+    }
+
+    /**
+     * Remove old events from the publish stream (at least 2sec old)
+     */
+    function _dropOldEvents(){
+        if(!_storage.__jstorage_meta.PubSub){
+            return;
+        }
+
+        var retire = +new Date() - 2000;
+
+        for(var i=0, len = _storage.__jstorage_meta.PubSub.length; i<len; i++){
+            if(_storage.__jstorage_meta.PubSub[i][0] <= retire){
+                // deleteCount is needed for IE6
+                _storage.__jstorage_meta.PubSub.splice(i, _storage.__jstorage_meta.PubSub.length - i);
+                break;
+            }
+        }
+
+        if(!_storage.__jstorage_meta.PubSub.length){
+            delete _storage.__jstorage_meta.PubSub;
+        }
+
+    }
+
+    /**
+     * Publish payload to a channel
+     *
+     * @param {String} channel Channel name
+     * @param {Mixed} payload Payload to send to the subscribers
+     */
+    function _publish(channel, payload){
+        if(!_storage.__jstorage_meta){
+            _storage.__jstorage_meta = {};
+        }
+        if(!_storage.__jstorage_meta.PubSub){
+            _storage.__jstorage_meta.PubSub = [];
+        }
+
+        _storage.__jstorage_meta.PubSub.unshift([+new Date, channel, payload]);
+
+        _save();
+        _publishChange();
+    }
+
+
+    /**
+     * JS Implementation of MurmurHash2
+     *
+     *  SOURCE: https://github.com/garycourt/murmurhash-js (MIT licensed)
+     *
+     * @author <a href="mailto:gary.court at gmail.com">Gary Court</a>
+     * @see http://github.com/garycourt/murmurhash-js
+     * @author <a href="mailto:aappleby at gmail.com">Austin Appleby</a>
+     * @see http://sites.google.com/site/murmurhash/
+     *
+     * @param {string} str ASCII only
+     * @param {number} seed Positive integer only
+     * @return {number} 32-bit positive integer hash
+     */
+
+    function murmurhash2_32_gc(str, seed) {
+        var
+            l = str.length,
+            h = seed ^ l,
+            i = 0,
+            k;
+
+        while (l >= 4) {
+            k =
+                ((str.charCodeAt(i) & 0xff)) |
+                ((str.charCodeAt(++i) & 0xff) << 8) |
+                ((str.charCodeAt(++i) & 0xff) << 16) |
+                ((str.charCodeAt(++i) & 0xff) << 24);
+
+            k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
+            k ^= k >>> 24;
+            k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
+
+            h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16)) ^ k;
+
+            l -= 4;
+            ++i;
+        }
+
+        switch (l) {
+            case 3: h ^= (str.charCodeAt(i + 2) & 0xff) << 16;
+            case 2: h ^= (str.charCodeAt(i + 1) & 0xff) << 8;
+            case 1: h ^= (str.charCodeAt(i) & 0xff);
+                h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
+        }
+
+        h ^= h >>> 13;
+        h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
+        h ^= h >>> 15;
+
+        return h >>> 0;
+    }
+
+    ////////////////////////// PUBLIC INTERFACE /////////////////////////
+
+    $.jStorage = {
+        /* Version number */
+        version: JSTORAGE_VERSION,
+
+        /**
+         * Sets a key's value.
+         *
+         * @param {String} key Key to set. If this value is not set or not
+         *              a string an exception is raised.
+         * @param {Mixed} value Value to set. This can be any value that is JSON
+         *              compatible (Numbers, Strings, Objects etc.).
+         * @param {Object} [options] - possible options to use
+         * @param {Number} [options.TTL] - optional TTL value
+         * @return {Mixed} the used value
+         */
+        set: function(key, value, options){
+            _checkKey(key);
+
+            options = options || {};
+
+            // undefined values are deleted automatically
+            if(typeof value == "undefined"){
+                this.deleteKey(key);
+                return value;
+            }
+
+            if(_XMLService.isXML(value)){
+                value = {_is_xml:true,xml:_XMLService.encode(value)};
+            }else if(typeof value == "function"){
+                return undefined; // functions can't be saved!
+            }else if(value && typeof value == "object"){
+                // clone the object before saving to _storage tree
+                value = JSON.parse(JSON.stringify(value));
+            }
+
+            _storage[key] = value;
+
+            _storage.__jstorage_meta.CRC32[key] = "2." + murmurhash2_32_gc(JSON.stringify(value), 0x9747b28c);
+
+            this.setTTL(key, options.TTL || 0); // also handles saving and _publishChange
+
+            _fireObservers(key, "updated");
+            return value;
+        },
+
+        /**
+         * Looks up a key in cache
+         *
+         * @param {String} key - Key to look up.
+         * @param {mixed} def - Default value to return, if key didn't exist.
+         * @return {Mixed} the key value, default value or null
+         */
+        get: function(key, def){
+            _checkKey(key);
+            if(key in _storage){
+                if(_storage[key] && typeof _storage[key] == "object" && _storage[key]._is_xml) {
+                    return _XMLService.decode(_storage[key].xml);
+                }else{
+                    return _storage[key];
+                }
+            }
+            return typeof(def) == 'undefined' ? null : def;
+        },
+
+        /**
+         * Deletes a key from cache.
+         *
+         * @param {String} key - Key to delete.
+         * @return {Boolean} true if key existed or false if it didn't
+         */
+        deleteKey: function(key){
+            _checkKey(key);
+            if(key in _storage){
+                delete _storage[key];
+                // remove from TTL list
+                if(typeof _storage.__jstorage_meta.TTL == "object" &&
+                  key in _storage.__jstorage_meta.TTL){
+                    delete _storage.__jstorage_meta.TTL[key];
+                }
+
+                delete _storage.__jstorage_meta.CRC32[key];
+
+                _save();
+                _publishChange();
+                _fireObservers(key, "deleted");
+                return true;
+            }
+            return false;
+        },
+
+        /**
+         * Sets a TTL for a key, or remove it if ttl value is 0 or below
+         *
+         * @param {String} key - key to set the TTL for
+         * @param {Number} ttl - TTL timeout in milliseconds
+         * @return {Boolean} true if key existed or false if it didn't
+         */
+        setTTL: function(key, ttl){
+            var curtime = +new Date();
+            _checkKey(key);
+            ttl = Number(ttl) || 0;
+            if(key in _storage){
+
+                if(!_storage.__jstorage_meta.TTL){
+                    _storage.__jstorage_meta.TTL = {};
+                }
+
+                // Set TTL value for the key
+                if(ttl>0){
+                    _storage.__jstorage_meta.TTL[key] = curtime + ttl;
+                }else{
+                    delete _storage.__jstorage_meta.TTL[key];
+                }
+
+                _save();
+
+                _handleTTL();
+
+                _publishChange();
+                return true;
+            }
+            return false;
+        },
+
+        /**
+         * Gets remaining TTL (in milliseconds) for a key or 0 when no TTL has been set
+         *
+         * @param {String} key Key to check
+         * @return {Number} Remaining TTL in milliseconds
+         */
+        getTTL: function(key){
+            var curtime = +new Date(), ttl;
+            _checkKey(key);
+            if(key in _storage && _storage.__jstorage_meta.TTL && _storage.__jstorage_meta.TTL[key]){
+                ttl = _storage.__jstorage_meta.TTL[key] - curtime;
+                return ttl || 0;
+            }
+            return 0;
+        },
+
+        /**
+         * Deletes everything in cache.
+         *
+         * @return {Boolean} Always true
+         */
+        flush: function(){
+            _storage = {__jstorage_meta:{CRC32:{}}};
+            _save();
+            _publishChange();
+            _fireObservers(null, "flushed");
+            return true;
+        },
+
+        /**
+         * Returns a read-only copy of _storage
+         *
+         * @return {Object} Read-only copy of _storage
+        */
+        storageObj: function(){
+            function F() {}
+            F.prototype = _storage;
+            return new F();
+        },
+
+        /**
+         * Returns an index of all used keys as an array
+         * ['key1', 'key2',..'keyN']
+         *
+         * @return {Array} Used keys
+        */
+        index: function(){
+            var index = [], i;
+            for(i in _storage){
+                if(_storage.hasOwnProperty(i) && i != "__jstorage_meta"){
+                    index.push(i);
+                }
+            }
+            return index;
+        },
+
+        /**
+         * How much space in bytes does the storage take?
+         *
+         * @return {Number} Storage size in chars (not the same as in bytes,
+         *                  since some chars may take several bytes)
+         */
+        storageSize: function(){
+            return _storage_size;
+        },
+
+        /**
+         * Which backend is currently in use?
+         *
+         * @return {String} Backend name
+         */
+        currentBackend: function(){
+            return _backend;
+        },
+
+        /**
+         * Test if storage is available
+         *
+         * @return {Boolean} True if storage can be used
+         */
+        storageAvailable: function(){
+            return !!_backend;
+        },
+
+        /**
+         * Register change listeners
+         *
+         * @param {String} key Key name
+         * @param {Function} callback Function to run when the key changes
+         */
+        listenKeyChange: function(key, callback){
+            _checkKey(key);
+            if(!_observers[key]){
+                _observers[key] = [];
+            }
+            _observers[key].push(callback);
+        },
+
+        /**
+         * Remove change listeners
+         *
+         * @param {String} key Key name to unregister listeners against
+         * @param {Function} [callback] If set, unregister the callback, if not - unregister all
+         */
+        stopListening: function(key, callback){
+            _checkKey(key);
+
+            if(!_observers[key]){
+                return;
+            }
+
+            if(!callback){
+                delete _observers[key];
+                return;
+            }
+
+            for(var i = _observers[key].length - 1; i>=0; i--){
+                if(_observers[key][i] == callback){
+                    _observers[key].splice(i,1);
+                }
+            }
+        },
+
+        /**
+         * Subscribe to a Publish/Subscribe event stream
+         *
+         * @param {String} channel Channel name
+         * @param {Function} callback Function to run when the something is published to the channel
+         */
+        subscribe: function(channel, callback){
+            channel = (channel || "").toString();
+            if(!channel){
+                throw new TypeError('Channel not defined');
+            }
+            if(!_pubsub_observers[channel]){
+                _pubsub_observers[channel] = [];
+            }
+            _pubsub_observers[channel].push(callback);
+        },
+
+        /**
+         * Publish data to an event stream
+         *
+         * @param {String} channel Channel name
+         * @param {Mixed} payload Payload to deliver
+         */
+        publish: function(channel, payload){
+            channel = (channel || "").toString();
+            if(!channel){
+                throw new TypeError('Channel not defined');
+            }
+
+            _publish(channel, payload);
+        },
+
+        /**
+         * Reloads the data from browser storage
+         */
+        reInit: function(){
+            _reloadData();
+        }
+    };
+
+    // Initialize jStorage
+    _init();
+
+})();
diff --git a/client/galaxy/scripts/libs/jquery/jstree.js b/client/galaxy/scripts/libs/jquery/jstree.js
new file mode 100755
index 0000000..c6199f3
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/jstree.js
@@ -0,0 +1,6743 @@
+/*globals jQuery, define, exports, require, window, document */
+(function (factory) {
+	"use strict";
+	if (typeof define === 'function' && define.amd) {
+		define(['jquery'], factory);
+	}
+	else if(typeof exports === 'object') {
+		factory(require('jquery'));
+	}
+	else {
+		factory(jQuery);
+	}
+}(function ($, undefined) {
+	"use strict";
+/*!
+ * jsTree 3.0.3
+ * http://jstree.com/
+ *
+ * Copyright (c) 2014 Ivan Bozhanov (http://vakata.com)
+ *
+ * Licensed same as jquery - under the terms of the MIT License
+ *   http://www.opensource.org/licenses/mit-license.php
+ */
+/*!
+ * if using jslint please allow for the jQuery global and use following options: 
+ * jslint: browser: true, ass: true, bitwise: true, continue: true, nomen: true, plusplus: true, regexp: true, unparam: true, todo: true, white: true
+ */
+
+	// prevent another load? maybe there is a better way?
+	if($.jstree) {
+		return;
+	}
+
+	/**
+	 * ### jsTree core functionality
+	 */
+
+	// internal variables
+	var instance_counter = 0,
+		ccp_node = false,
+		ccp_mode = false,
+		ccp_inst = false,
+		themes_loaded = [],
+		src = $('script:last').attr('src'),
+		_d = document, _node = _d.createElement('LI'), _temp1, _temp2;
+
+	_node.setAttribute('role', 'treeitem');
+	_temp1 = _d.createElement('I');
+	_temp1.className = 'jstree-icon jstree-ocl';
+	_node.appendChild(_temp1);
+	_temp1 = _d.createElement('A');
+	_temp1.className = 'jstree-anchor';
+	_temp1.setAttribute('href','#');
+	_temp2 = _d.createElement('I');
+	_temp2.className = 'jstree-icon jstree-themeicon';
+	_temp1.appendChild(_temp2);
+	_node.appendChild(_temp1);
+	_temp1 = _temp2 = null;
+
+
+	/**
+	 * holds all jstree related functions and variables, including the actual class and methods to create, access and manipulate instances.
+	 * @name $.jstree
+	 */
+	$.jstree = {
+		/** 
+		 * specifies the jstree version in use
+		 * @name $.jstree.version
+		 */
+		version : '3.0.3',
+		/**
+		 * holds all the default options used when creating new instances
+		 * @name $.jstree.defaults
+		 */
+		defaults : {
+			/**
+			 * configure which plugins will be active on an instance. Should be an array of strings, where each element is a plugin name. The default is `[]`
+			 * @name $.jstree.defaults.plugins
+			 */
+			plugins : []
+		},
+		/**
+		 * stores all loaded jstree plugins (used internally)
+		 * @name $.jstree.plugins
+		 */
+		plugins : {},
+		path : src && src.indexOf('/') !== -1 ? src.replace(/\/[^\/]+$/,'') : '',
+		idregex : /[\\:&!^|()\[\]<>@*'+~#";.,=\- \/${}%]/g
+	};
+	/**
+	 * creates a jstree instance
+	 * @name $.jstree.create(el [, options])
+	 * @param {DOMElement|jQuery|String} el the element to create the instance on, can be jQuery extended or a selector
+	 * @param {Object} options options for this instance (extends `$.jstree.defaults`)
+	 * @return {jsTree} the new instance
+	 */
+	$.jstree.create = function (el, options) {
+		var tmp = new $.jstree.core(++instance_counter),
+			opt = options;
+		options = $.extend(true, {}, $.jstree.defaults, options);
+		if(opt && opt.plugins) {
+			options.plugins = opt.plugins;
+		}
+		$.each(options.plugins, function (i, k) {
+			if(i !== 'core') {
+				tmp = tmp.plugin(k, options[k]);
+			}
+		});
+		tmp.init(el, options);
+		return tmp;
+	};
+	/**
+	 * the jstree class constructor, used only internally
+	 * @private
+	 * @name $.jstree.core(id)
+	 * @param {Number} id this instance's index
+	 */
+	$.jstree.core = function (id) {
+		this._id = id;
+		this._cnt = 0;
+		this._wrk = null;
+		this._data = {
+			core : {
+				themes : {
+					name : false,
+					dots : false,
+					icons : false
+				},
+				selected : [],
+				last_error : {},
+				working : false,
+				worker_queue : []
+			}
+		};
+	};
+	/**
+	 * get a reference to an existing instance
+	 *
+	 * __Examples__
+	 *
+	 *	// provided a container with an ID of "tree", and a nested node with an ID of "branch"
+	 *	// all of there will return the same instance
+	 *	$.jstree.reference('tree');
+	 *	$.jstree.reference('#tree');
+	 *	$.jstree.reference($('#tree'));
+	 *	$.jstree.reference(document.getElementByID('tree'));
+	 *	$.jstree.reference('branch');
+	 *	$.jstree.reference('#branch');
+	 *	$.jstree.reference($('#branch'));
+	 *	$.jstree.reference(document.getElementByID('branch'));
+	 *
+	 * @name $.jstree.reference(needle)
+	 * @param {DOMElement|jQuery|String} needle
+	 * @return {jsTree|null} the instance or `null` if not found
+	 */
+	$.jstree.reference = function (needle) {
+		var tmp = null,
+			obj = null;
+		if(needle && needle.id) { needle = needle.id; }
+
+		if(!obj || !obj.length) {
+			try { obj = $(needle); } catch (ignore) { }
+		}
+		if(!obj || !obj.length) {
+			try { obj = $('#' + needle.replace($.jstree.idregex,'\\$&')); } catch (ignore) { }
+		}
+		if(obj && obj.length && (obj = obj.closest('.jstree')).length && (obj = obj.data('jstree'))) {
+			tmp = obj;
+		}
+		else {
+			$('.jstree').each(function () {
+				var inst = $(this).data('jstree');
+				if(inst && inst._model.data[needle]) {
+					tmp = inst;
+					return false;
+				}
+			});
+		}
+		return tmp;
+	};
+	/**
+	 * Create an instance, get an instance or invoke a command on a instance. 
+	 * 
+	 * If there is no instance associated with the current node a new one is created and `arg` is used to extend `$.jstree.defaults` for this new instance. There would be no return value (chaining is not broken).
+	 * 
+	 * If there is an existing instance and `arg` is a string the command specified by `arg` is executed on the instance, with any additional arguments passed to the function. If the function returns a value it will be returned (chaining could break depending on function).
+	 * 
+	 * If there is an existing instance and `arg` is not a string the instance itself is returned (similar to `$.jstree.reference`).
+	 * 
+	 * In any other case - nothing is returned and chaining is not broken.
+	 *
+	 * __Examples__
+	 *
+	 *	$('#tree1').jstree(); // creates an instance
+	 *	$('#tree2').jstree({ plugins : [] }); // create an instance with some options
+	 *	$('#tree1').jstree('open_node', '#branch_1'); // call a method on an existing instance, passing additional arguments
+	 *	$('#tree2').jstree(); // get an existing instance (or create an instance)
+	 *	$('#tree2').jstree(true); // get an existing instance (will not create new instance)
+	 *	$('#branch_1').jstree().select_node('#branch_1'); // get an instance (using a nested element and call a method)
+	 *
+	 * @name $().jstree([arg])
+	 * @param {String|Object} arg
+	 * @return {Mixed}
+	 */
+	$.fn.jstree = function (arg) {
+		// check for string argument
+		var is_method	= (typeof arg === 'string'),
+			args		= Array.prototype.slice.call(arguments, 1),
+			result		= null;
+		this.each(function () {
+			// get the instance (if there is one) and method (if it exists)
+			var instance = $.jstree.reference(this),
+				method = is_method && instance ? instance[arg] : null;
+			// if calling a method, and method is available - execute on the instance
+			result = is_method && method ?
+				method.apply(instance, args) :
+				null;
+			// if there is no instance and no method is being called - create one
+			if(!instance && !is_method && (arg === undefined || $.isPlainObject(arg))) {
+				$(this).data('jstree', new $.jstree.create(this, arg));
+			}
+			// if there is an instance and no method is called - return the instance
+			if( (instance && !is_method) || arg === true ) {
+				result = instance || false;
+			}
+			// if there was a method call which returned a result - break and return the value
+			if(result !== null && result !== undefined) {
+				return false;
+			}
+		});
+		// if there was a method call with a valid return value - return that, otherwise continue the chain
+		return result !== null && result !== undefined ?
+			result : this;
+	};
+	/**
+	 * used to find elements containing an instance
+	 *
+	 * __Examples__
+	 *
+	 *	$('div:jstree').each(function () {
+	 *		$(this).jstree('destroy');
+	 *	});
+	 *
+	 * @name $(':jstree')
+	 * @return {jQuery}
+	 */
+	$.expr[':'].jstree = $.expr.createPseudo(function(search) {
+		return function(a) {
+			return $(a).hasClass('jstree') &&
+				$(a).data('jstree') !== undefined;
+		};
+	});
+
+	/**
+	 * stores all defaults for the core
+	 * @name $.jstree.defaults.core
+	 */
+	$.jstree.defaults.core = {
+		/**
+		 * data configuration
+		 * 
+		 * If left as `false` the HTML inside the jstree container element is used to populate the tree (that should be an unordered list with list items).
+		 *
+		 * You can also pass in a HTML string or a JSON array here.
+		 * 
+		 * It is possible to pass in a standard jQuery-like AJAX config and jstree will automatically determine if the response is JSON or HTML and use that to populate the tree. 
+		 * In addition to the standard jQuery ajax options here you can suppy functions for `data` and `url`, the functions will be run in the current instance's scope and a param will be passed indicating which node is being loaded, the return value of those functions will be used.
+		 * 
+		 * The last option is to specify a function, that function will receive the node being loaded as argument and a second param which is a function which should be called with the result.
+		 *
+		 * __Examples__
+		 *
+		 *	// AJAX
+		 *	$('#tree').jstree({
+		 *		'core' : {
+		 *			'data' : {
+		 *				'url' : '/get/children/',
+		 *				'data' : function (node) {
+		 *					return { 'id' : node.id };
+		 *				}
+		 *			}
+		 *		});
+		 *
+		 *	// direct data
+		 *	$('#tree').jstree({
+		 *		'core' : {
+		 *			'data' : [
+		 *				'Simple root node',
+		 *				{
+		 *					'id' : 'node_2',
+		 *					'text' : 'Root node with options',
+		 *					'state' : { 'opened' : true, 'selected' : true },
+		 *					'children' : [ { 'text' : 'Child 1' }, 'Child 2']
+		 *				}
+		 *			]
+		 *		});
+		 *	
+		 *	// function
+		 *	$('#tree').jstree({
+		 *		'core' : {
+		 *			'data' : function (obj, callback) {
+		 *				callback.call(this, ['Root 1', 'Root 2']);
+		 *			}
+		 *		});
+		 * 
+		 * @name $.jstree.defaults.core.data
+		 */
+		data			: false,
+		/**
+		 * configure the various strings used throughout the tree
+		 *
+		 * You can use an object where the key is the string you need to replace and the value is your replacement.
+		 * Another option is to specify a function which will be called with an argument of the needed string and should return the replacement.
+		 * If left as `false` no replacement is made.
+		 *
+		 * __Examples__
+		 *
+		 *	$('#tree').jstree({
+		 *		'core' : {
+		 *			'strings' : {
+		 *				'Loading ...' : 'Please wait ...'
+		 *			}
+		 *		}
+		 *	});
+		 *
+		 * @name $.jstree.defaults.core.strings
+		 */
+		strings			: false,
+		/**
+		 * determines what happens when a user tries to modify the structure of the tree
+		 * If left as `false` all operations like create, rename, delete, move or copy are prevented.
+		 * You can set this to `true` to allow all interactions or use a function to have better control.
+		 *
+		 * __Examples__
+		 *
+		 *	$('#tree').jstree({
+		 *		'core' : {
+		 *			'check_callback' : function (operation, node, node_parent, node_position, more) {
+		 *				// operation can be 'create_node', 'rename_node', 'delete_node', 'move_node' or 'copy_node'
+		 *				// in case of 'rename_node' node_position is filled with the new node name
+		 *				return operation === 'rename_node' ? true : false;
+		 *			}
+		 *		}
+		 *	});
+		 * 
+		 * @name $.jstree.defaults.core.check_callback
+		 */
+		check_callback	: false,
+		/**
+		 * a callback called with a single object parameter in the instance's scope when something goes wrong (operation prevented, ajax failed, etc)
+		 * @name $.jstree.defaults.core.error
+		 */
+		error			: $.noop,
+		/**
+		 * the open / close animation duration in milliseconds - set this to `false` to disable the animation (default is `200`)
+		 * @name $.jstree.defaults.core.animation
+		 */
+		animation		: 200,
+		/**
+		 * a boolean indicating if multiple nodes can be selected
+		 * @name $.jstree.defaults.core.multiple
+		 */
+		multiple		: true,
+		/**
+		 * theme configuration object
+		 * @name $.jstree.defaults.core.themes
+		 */
+		themes			: {
+			/**
+			 * the name of the theme to use (if left as `false` the default theme is used)
+			 * @name $.jstree.defaults.core.themes.name
+			 */
+			name			: false,
+			/**
+			 * the URL of the theme's CSS file, leave this as `false` if you have manually included the theme CSS (recommended). You can set this to `true` too which will try to autoload the theme.
+			 * @name $.jstree.defaults.core.themes.url
+			 */
+			url				: false,
+			/**
+			 * the location of all jstree themes - only used if `url` is set to `true`
+			 * @name $.jstree.defaults.core.themes.dir
+			 */
+			dir				: false,
+			/**
+			 * a boolean indicating if connecting dots are shown
+			 * @name $.jstree.defaults.core.themes.dots
+			 */
+			dots			: true,
+			/**
+			 * a boolean indicating if node icons are shown
+			 * @name $.jstree.defaults.core.themes.icons
+			 */
+			icons			: true,
+			/**
+			 * a boolean indicating if the tree background is striped
+			 * @name $.jstree.defaults.core.themes.stripes
+			 */
+			stripes			: false,
+			/**
+			 * a string (or boolean `false`) specifying the theme variant to use (if the theme supports variants)
+			 * @name $.jstree.defaults.core.themes.variant
+			 */
+			variant			: false,
+			/**
+			 * a boolean specifying if a reponsive version of the theme should kick in on smaller screens (if the theme supports it). Defaults to `false`.
+			 * @name $.jstree.defaults.core.themes.responsive
+			 */
+			responsive		: false
+		},
+		/**
+		 * if left as `true` all parents of all selected nodes will be opened once the tree loads (so that all selected nodes are visible to the user)
+		 * @name $.jstree.defaults.core.expand_selected_onload
+		 */
+		expand_selected_onload : true,
+		/**
+		 * if left as `true` web workers will be used to parse incoming JSON data where possible, so that the UI will not be blocked by large requests. Workers are however about 30% slower. Defaults to `true`
+		 * @name $.jstree.defaults.core.worker
+		 */
+		worker : true,
+		/**
+		 * Force node text to plain text (and escape HTML). Defaults to `false`
+		 * @name $.jstree.defaults.core.force_text
+		 */
+		force_text : false
+	};
+	$.jstree.core.prototype = {
+		/**
+		 * used to decorate an instance with a plugin. Used internally.
+		 * @private
+		 * @name plugin(deco [, opts])
+		 * @param  {String} deco the plugin to decorate with
+		 * @param  {Object} opts options for the plugin
+		 * @return {jsTree}
+		 */
+		plugin : function (deco, opts) {
+			var Child = $.jstree.plugins[deco];
+			if(Child) {
+				this._data[deco] = {};
+				Child.prototype = this;
+				return new Child(opts, this);
+			}
+			return this;
+		},
+		/**
+		 * used to decorate an instance with a plugin. Used internally.
+		 * @private
+		 * @name init(el, optons)
+		 * @param {DOMElement|jQuery|String} el the element we are transforming
+		 * @param {Object} options options for this instance
+		 * @trigger init.jstree, loading.jstree, loaded.jstree, ready.jstree, changed.jstree
+		 */
+		init : function (el, options) {
+			this._model = {
+				data : {
+					'#' : {
+						id : '#',
+						parent : null,
+						parents : [],
+						children : [],
+						children_d : [],
+						state : { loaded : false }
+					}
+				},
+				changed : [],
+				force_full_redraw : false,
+				redraw_timeout : false,
+				default_state : {
+					loaded : true,
+					opened : false,
+					selected : false,
+					disabled : false
+				}
+			};
+
+			this.element = $(el).addClass('jstree jstree-' + this._id);
+			this.settings = options;
+			this.element.bind("destroyed", $.proxy(this.teardown, this));
+
+			this._data.core.ready = false;
+			this._data.core.loaded = false;
+			this._data.core.rtl = (this.element.css("direction") === "rtl");
+			this.element[this._data.core.rtl ? 'addClass' : 'removeClass']("jstree-rtl");
+			this.element.attr('role','tree');
+
+			this.bind();
+			/**
+			 * triggered after all events are bound
+			 * @event
+			 * @name init.jstree
+			 */
+			this.trigger("init");
+
+			this._data.core.original_container_html = this.element.find(" > ul > li").clone(true);
+			this._data.core.original_container_html
+				.find("li").addBack()
+				.contents().filter(function() {
+					return this.nodeType === 3 && (!this.nodeValue || /^\s+$/.test(this.nodeValue));
+				})
+				.remove();
+			this.element.html("<"+"ul class='jstree-container-ul jstree-children'><"+"li class='jstree-initial-node jstree-loading jstree-leaf jstree-last'><i class='jstree-icon jstree-ocl'></i><"+"a class='jstree-anchor' href='#'><i class='jstree-icon jstree-themeicon-hidden'></i>" + this.get_string("Loading ...") + "</a></li></ul>");
+			this._data.core.li_height = this.get_container_ul().children("li:eq(0)").height() || 24;
+			/**
+			 * triggered after the loading text is shown and before loading starts
+			 * @event
+			 * @name loading.jstree
+			 */
+			this.trigger("loading");
+			this.load_node('#');
+		},
+		/**
+		 * destroy an instance
+		 * @name destroy()
+		 * @param  {Boolean} keep_html if not set to `true` the container will be emptied, otherwise the current DOM elements will be kept intact
+		 */
+		destroy : function (keep_html) {
+			if(!keep_html) { this.element.empty(); }
+			this.element.unbind("destroyed", this.teardown);
+			this.teardown();
+		},
+		/**
+		 * part of the destroying of an instance. Used internally.
+		 * @private
+		 * @name teardown()
+		 */
+		teardown : function () {
+			this.unbind();
+			this.element
+				.removeClass('jstree')
+				.removeData('jstree')
+				.find("[class^='jstree']")
+					.addBack()
+					.attr("class", function () { return this.className.replace(/jstree[^ ]*|$/ig,''); });
+			this.element = null;
+		},
+		/**
+		 * bind all events. Used internally.
+		 * @private
+		 * @name bind()
+		 */
+		bind : function () {
+			this.element
+				.on("dblclick.jstree", function () {
+						if(document.selection && document.selection.empty) {
+							document.selection.empty();
+						}
+						else {
+							if(window.getSelection) {
+								var sel = window.getSelection();
+								try {
+									sel.removeAllRanges();
+									sel.collapse();
+								} catch (ignore) { }
+							}
+						}
+					})
+				.on("click.jstree", ".jstree-ocl", $.proxy(function (e) {
+						this.toggle_node(e.target);
+					}, this))
+				.on("click.jstree", ".jstree-anchor", $.proxy(function (e) {
+						e.preventDefault();
+						$(e.currentTarget).focus();
+						this.activate_node(e.currentTarget, e);
+					}, this))
+				.on('keydown.jstree', '.jstree-anchor', $.proxy(function (e) {
+						if(e.target.tagName === "INPUT") { return true; }
+						var o = null;
+						switch(e.which) {
+							case 13:
+							case 32:
+								e.type = "click";
+								$(e.currentTarget).trigger(e);
+								break;
+							case 37:
+								e.preventDefault();
+								if(this.is_open(e.currentTarget)) {
+									this.close_node(e.currentTarget);
+								}
+								else {
+									o = this.get_prev_dom(e.currentTarget);
+									if(o && o.length) { o.children('.jstree-anchor').focus(); }
+								}
+								break;
+							case 38:
+								e.preventDefault();
+								o = this.get_prev_dom(e.currentTarget);
+								if(o && o.length) { o.children('.jstree-anchor').focus(); }
+								break;
+							case 39:
+								e.preventDefault();
+								if(this.is_closed(e.currentTarget)) {
+									this.open_node(e.currentTarget, function (o) { this.get_node(o, true).children('.jstree-anchor').focus(); });
+								}
+								else {
+									o = this.get_next_dom(e.currentTarget);
+									if(o && o.length) { o.children('.jstree-anchor').focus(); }
+								}
+								break;
+							case 40:
+								e.preventDefault();
+								o = this.get_next_dom(e.currentTarget);
+								if(o && o.length) { o.children('.jstree-anchor').focus(); }
+								break;
+							// delete
+							case 46:
+								e.preventDefault();
+								o = this.get_node(e.currentTarget);
+								if(o && o.id && o.id !== '#') {
+									o = this.is_selected(o) ? this.get_selected() : o;
+									// this.delete_node(o);
+								}
+								break;
+							// f2
+							case 113:
+								e.preventDefault();
+								o = this.get_node(e.currentTarget);
+								/*!
+								if(o && o.id && o.id !== '#') {
+									// this.edit(o);
+								}
+								*/
+								break;
+							default:
+								// console.log(e.which);
+								break;
+						}
+					}, this))
+				.on("load_node.jstree", $.proxy(function (e, data) {
+						if(data.status) {
+							if(data.node.id === '#' && !this._data.core.loaded) {
+								this._data.core.loaded = true;
+								/**
+								 * triggered after the root node is loaded for the first time
+								 * @event
+								 * @name loaded.jstree
+								 */
+								this.trigger("loaded");
+							}
+							if(!this._data.core.ready && !this.get_container_ul().find('.jstree-loading:eq(0)').length) {
+								this._data.core.ready = true;
+								if(this._data.core.selected.length) {
+									if(this.settings.core.expand_selected_onload) {
+										var tmp = [], i, j;
+										for(i = 0, j = this._data.core.selected.length; i < j; i++) {
+											tmp = tmp.concat(this._model.data[this._data.core.selected[i]].parents);
+										}
+										tmp = $.vakata.array_unique(tmp);
+										for(i = 0, j = tmp.length; i < j; i++) {
+											this.open_node(tmp[i], false, 0);
+										}
+									}
+									this.trigger('changed', { 'action' : 'ready', 'selected' : this._data.core.selected });
+								}
+								/**
+								 * triggered after all nodes are finished loading
+								 * @event
+								 * @name ready.jstree
+								 */
+								setTimeout($.proxy(function () { this.trigger("ready"); }, this), 0);
+							}
+						}
+					}, this))
+				// THEME RELATED
+				.on("init.jstree", $.proxy(function () {
+						var s = this.settings.core.themes;
+						this._data.core.themes.dots			= s.dots;
+						this._data.core.themes.stripes		= s.stripes;
+						this._data.core.themes.icons		= s.icons;
+						this.set_theme(s.name || "default", s.url);
+						this.set_theme_variant(s.variant);
+					}, this))
+				.on("loading.jstree", $.proxy(function () {
+						this[ this._data.core.themes.dots ? "show_dots" : "hide_dots" ]();
+						this[ this._data.core.themes.icons ? "show_icons" : "hide_icons" ]();
+						this[ this._data.core.themes.stripes ? "show_stripes" : "hide_stripes" ]();
+					}, this))
+				.on('blur.jstree', '.jstree-anchor', $.proxy(function (e) {
+						$(e.currentTarget).filter('.jstree-hovered').mouseleave();
+					}, this))
+				.on('focus.jstree', '.jstree-anchor', $.proxy(function (e) {
+						this.element.find('.jstree-hovered').not(e.currentTarget).mouseleave();
+						$(e.currentTarget).mouseenter();
+					}, this))
+				.on('mouseenter.jstree', '.jstree-anchor', $.proxy(function (e) {
+						this.hover_node(e.currentTarget);
+					}, this))
+				.on('mouseleave.jstree', '.jstree-anchor', $.proxy(function (e) {
+						this.dehover_node(e.currentTarget);
+					}, this));
+		},
+		/**
+		 * part of the destroying of an instance. Used internally.
+		 * @private
+		 * @name unbind()
+		 */
+		unbind : function () {
+			this.element.off('.jstree');
+			$(document).off('.jstree-' + this._id);
+		},
+		/**
+		 * trigger an event. Used internally.
+		 * @private
+		 * @name trigger(ev [, data])
+		 * @param  {String} ev the name of the event to trigger
+		 * @param  {Object} data additional data to pass with the event
+		 */
+		trigger : function (ev, data) {
+			if(!data) {
+				data = {};
+			}
+			data.instance = this;
+			this.element.triggerHandler(ev.replace('.jstree','') + '.jstree', data);
+		},
+		/**
+		 * returns the jQuery extended instance container
+		 * @name get_container()
+		 * @return {jQuery}
+		 */
+		get_container : function () {
+			return this.element;
+		},
+		/**
+		 * returns the jQuery extended main UL node inside the instance container. Used internally.
+		 * @private
+		 * @name get_container_ul()
+		 * @return {jQuery}
+		 */
+		get_container_ul : function () {
+			return this.element.children(".jstree-children:eq(0)");
+		},
+		/**
+		 * gets string replacements (localization). Used internally.
+		 * @private
+		 * @name get_string(key)
+		 * @param  {String} key
+		 * @return {String}
+		 */
+		get_string : function (key) {
+			var a = this.settings.core.strings;
+			if($.isFunction(a)) { return a.call(this, key); }
+			if(a && a[key]) { return a[key]; }
+			return key;
+		},
+		/**
+		 * gets the first child of a DOM node. Used internally.
+		 * @private
+		 * @name _firstChild(dom)
+		 * @param  {DOMElement} dom
+		 * @return {DOMElement}
+		 */
+		_firstChild : function (dom) {
+			dom = dom ? dom.firstChild : null;
+			while(dom !== null && dom.nodeType !== 1) {
+				dom = dom.nextSibling;
+			}
+			return dom;
+		},
+		/**
+		 * gets the next sibling of a DOM node. Used internally.
+		 * @private
+		 * @name _nextSibling(dom)
+		 * @param  {DOMElement} dom
+		 * @return {DOMElement}
+		 */
+		_nextSibling : function (dom) {
+			dom = dom ? dom.nextSibling : null;
+			while(dom !== null && dom.nodeType !== 1) {
+				dom = dom.nextSibling;
+			}
+			return dom;
+		},
+		/**
+		 * gets the previous sibling of a DOM node. Used internally.
+		 * @private
+		 * @name _previousSibling(dom)
+		 * @param  {DOMElement} dom
+		 * @return {DOMElement}
+		 */
+		_previousSibling : function (dom) {
+			dom = dom ? dom.previousSibling : null;
+			while(dom !== null && dom.nodeType !== 1) {
+				dom = dom.previousSibling;
+			}
+			return dom;
+		},
+		/**
+		 * get the JSON representation of a node (or the actual jQuery extended DOM node) by using any input (child DOM element, ID string, selector, etc)
+		 * @name get_node(obj [, as_dom])
+		 * @param  {mixed} obj
+		 * @param  {Boolean} as_dom
+		 * @return {Object|jQuery}
+		 */
+		get_node : function (obj, as_dom) {
+			if(obj && obj.id) {
+				obj = obj.id;
+			}
+			var dom;
+			try {
+				if(this._model.data[obj]) {
+					obj = this._model.data[obj];
+				}
+				else if(((dom = $(obj, this.element)).length || (dom = $('#' + obj.replace($.jstree.idregex,'\\$&'), this.element)).length) && this._model.data[dom.closest('.jstree-node').attr('id')]) {
+					obj = this._model.data[dom.closest('.jstree-node').attr('id')];
+				}
+				else if((dom = $(obj, this.element)).length && dom.hasClass('jstree')) {
+					obj = this._model.data['#'];
+				}
+				else {
+					return false;
+				}
+
+				if(as_dom) {
+					obj = obj.id === '#' ? this.element : $('#' + obj.id.replace($.jstree.idregex,'\\$&'), this.element);
+				}
+				return obj;
+			} catch (ex) { return false; }
+		},
+		/**
+		 * get the path to a node, either consisting of node texts, or of node IDs, optionally glued together (otherwise an array)
+		 * @name get_path(obj [, glue, ids])
+		 * @param  {mixed} obj the node
+		 * @param  {String} glue if you want the path as a string - pass the glue here (for example '/'), if a falsy value is supplied here, an array is returned
+		 * @param  {Boolean} ids if set to true build the path using ID, otherwise node text is used
+		 * @return {mixed}
+		 */
+		get_path : function (obj, glue, ids) {
+			obj = obj.parents ? obj : this.get_node(obj);
+			if(!obj || obj.id === '#' || !obj.parents) {
+				return false;
+			}
+			var i, j, p = [];
+			p.push(ids ? obj.id : obj.text);
+			for(i = 0, j = obj.parents.length; i < j; i++) {
+				p.push(ids ? obj.parents[i] : this.get_text(obj.parents[i]));
+			}
+			p = p.reverse().slice(1);
+			return glue ? p.join(glue) : p;
+		},
+		/**
+		 * get the next visible node that is below the `obj` node. If `strict` is set to `true` only sibling nodes are returned.
+		 * @name get_next_dom(obj [, strict])
+		 * @param  {mixed} obj
+		 * @param  {Boolean} strict
+		 * @return {jQuery}
+		 */
+		get_next_dom : function (obj, strict) {
+			var tmp;
+			obj = this.get_node(obj, true);
+			if(obj[0] === this.element[0]) {
+				tmp = this._firstChild(this.get_container_ul()[0]);
+				return tmp ? $(tmp) : false;
+			}
+			if(!obj || !obj.length) {
+				return false;
+			}
+			if(strict) {
+				tmp = this._nextSibling(obj[0]);
+				return tmp ? $(tmp) : false;
+			}
+			if(obj.hasClass("jstree-open")) {
+				tmp = this._firstChild(obj.children('.jstree-children')[0]);
+				return tmp ? $(tmp) : false;
+			}
+			if((tmp = this._nextSibling(obj[0])) !== null) {
+				return $(tmp);
+			}
+			return obj.parentsUntil(".jstree",".jstree-node").next(".jstree-node").eq(0);
+		},
+		/**
+		 * get the previous visible node that is above the `obj` node. If `strict` is set to `true` only sibling nodes are returned.
+		 * @name get_prev_dom(obj [, strict])
+		 * @param  {mixed} obj
+		 * @param  {Boolean} strict
+		 * @return {jQuery}
+		 */
+		get_prev_dom : function (obj, strict) {
+			var tmp;
+			obj = this.get_node(obj, true);
+			if(obj[0] === this.element[0]) {
+				tmp = this.get_container_ul()[0].lastChild;
+				return tmp ? $(tmp) : false;
+			}
+			if(!obj || !obj.length) {
+				return false;
+			}
+			if(strict) {
+				tmp = this._previousSibling(obj[0]);
+				return tmp ? $(tmp) : false;
+			}
+			if((tmp = this._previousSibling(obj[0])) !== null) {
+				obj = $(tmp);
+				while(obj.hasClass("jstree-open")) {
+					obj = obj.children(".jstree-children:eq(0)").children(".jstree-node:last");
+				}
+				return obj;
+			}
+			tmp = obj[0].parentNode.parentNode;
+			return tmp && tmp.className && tmp.className.indexOf('jstree-node') !== -1 ? $(tmp) : false;
+		},
+		/**
+		 * get the parent ID of a node
+		 * @name get_parent(obj)
+		 * @param  {mixed} obj
+		 * @return {String}
+		 */
+		get_parent : function (obj) {
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			return obj.parent;
+		},
+		/**
+		 * get a jQuery collection of all the children of a node (node must be rendered)
+		 * @name get_children_dom(obj)
+		 * @param  {mixed} obj
+		 * @return {jQuery}
+		 */
+		get_children_dom : function (obj) {
+			obj = this.get_node(obj, true);
+			if(obj[0] === this.element[0]) {
+				return this.get_container_ul().children(".jstree-node");
+			}
+			if(!obj || !obj.length) {
+				return false;
+			}
+			return obj.children(".jstree-children").children(".jstree-node");
+		},
+		/**
+		 * checks if a node has children
+		 * @name is_parent(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_parent : function (obj) {
+			obj = this.get_node(obj);
+			return obj && (obj.state.loaded === false || obj.children.length > 0);
+		},
+		/**
+		 * checks if a node is loaded (its children are available)
+		 * @name is_loaded(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_loaded : function (obj) {
+			obj = this.get_node(obj);
+			return obj && obj.state.loaded;
+		},
+		/**
+		 * check if a node is currently loading (fetching children)
+		 * @name is_loading(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_loading : function (obj) {
+			obj = this.get_node(obj);
+			return obj && obj.state && obj.state.loading;
+		},
+		/**
+		 * check if a node is opened
+		 * @name is_open(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_open : function (obj) {
+			obj = this.get_node(obj);
+			return obj && obj.state.opened;
+		},
+		/**
+		 * check if a node is in a closed state
+		 * @name is_closed(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_closed : function (obj) {
+			obj = this.get_node(obj);
+			return obj && this.is_parent(obj) && !obj.state.opened;
+		},
+		/**
+		 * check if a node has no children
+		 * @name is_leaf(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_leaf : function (obj) {
+			return !this.is_parent(obj);
+		},
+		/**
+		 * loads a node (fetches its children using the `core.data` setting). Multiple nodes can be passed to by using an array.
+		 * @name load_node(obj [, callback])
+		 * @param  {mixed} obj
+		 * @param  {function} callback a function to be executed once loading is complete, the function is executed in the instance's scope and receives two arguments - the node and a boolean status
+		 * @return {Boolean}
+		 * @trigger load_node.jstree
+		 */
+		load_node : function (obj, callback) {
+			var k, l, i, j, c;
+			if($.isArray(obj)) {
+				this._load_nodes(obj.slice(), callback);
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj) {
+				if(callback) { callback.call(this, obj, false); }
+				return false;
+			}
+			// if(obj.state.loading) { } // the node is already loading - just wait for it to load and invoke callback? but if called implicitly it should be loaded again?
+			if(obj.state.loaded) {
+				obj.state.loaded = false;
+				for(k = 0, l = obj.children_d.length; k < l; k++) {
+					for(i = 0, j = obj.parents.length; i < j; i++) {
+						this._model.data[obj.parents[i]].children_d = $.vakata.array_remove_item(this._model.data[obj.parents[i]].children_d, obj.children_d[k]);
+					}
+					if(this._model.data[obj.children_d[k]].state.selected) {
+						c = true;
+						this._data.core.selected = $.vakata.array_remove_item(this._data.core.selected, obj.children_d[k]);
+					}
+					delete this._model.data[obj.children_d[k]];
+				}
+				obj.children = [];
+				obj.children_d = [];
+				if(c) {
+					this.trigger('changed', { 'action' : 'load_node', 'node' : obj, 'selected' : this._data.core.selected });
+				}
+			}
+			obj.state.loading = true;
+			this.get_node(obj, true).addClass("jstree-loading");
+			this._load_node(obj, $.proxy(function (status) {
+				obj = this._model.data[obj.id];
+				obj.state.loading = false;
+				obj.state.loaded = status;
+				var dom = this.get_node(obj, true);
+				if(obj.state.loaded && !obj.children.length && dom && dom.length && !dom.hasClass('jstree-leaf')) {
+					dom.removeClass('jstree-closed jstree-open').addClass('jstree-leaf');
+				}
+				dom.removeClass("jstree-loading");
+				/**
+				 * triggered after a node is loaded
+				 * @event
+				 * @name load_node.jstree
+				 * @param {Object} node the node that was loading
+				 * @param {Boolean} status was the node loaded successfully
+				 */
+				this.trigger('load_node', { "node" : obj, "status" : status });
+				if(callback) {
+					callback.call(this, obj, status);
+				}
+			}, this));
+			return true;
+		},
+		/**
+		 * load an array of nodes (will also load unavailable nodes as soon as the appear in the structure). Used internally.
+		 * @private
+		 * @name _load_nodes(nodes [, callback])
+		 * @param  {array} nodes
+		 * @param  {function} callback a function to be executed once loading is complete, the function is executed in the instance's scope and receives one argument - the array passed to _load_nodes
+		 */
+		_load_nodes : function (nodes, callback, is_callback) {
+			var r = true,
+				c = function () { this._load_nodes(nodes, callback, true); },
+				m = this._model.data, i, j;
+			for(i = 0, j = nodes.length; i < j; i++) {
+				if(m[nodes[i]] && (!m[nodes[i]].state.loaded || !is_callback)) {
+					if(!this.is_loading(nodes[i])) {
+						this.load_node(nodes[i], c);
+					}
+					r = false;
+				}
+			}
+			if(r) {
+				if(callback && !callback.done) {
+					callback.call(this, nodes);
+					callback.done = true;
+				}
+			}
+		},
+		/**
+		 * handles the actual loading of a node. Used only internally.
+		 * @private
+		 * @name _load_node(obj [, callback])
+		 * @param  {mixed} obj
+		 * @param  {function} callback a function to be executed once loading is complete, the function is executed in the instance's scope and receives one argument - a boolean status
+		 * @return {Boolean}
+		 */
+		_load_node : function (obj, callback) {
+			var s = this.settings.core.data, t;
+			// use original HTML
+			if(!s) {
+				if(obj.id === '#') {
+					return this._append_html_data(obj, this._data.core.original_container_html.clone(true), function (status) {
+						callback.call(this, status);
+					});
+				}
+				else {
+					return callback.call(this, false);
+				}
+				// return callback.call(this, obj.id === '#' ? this._append_html_data(obj, this._data.core.original_container_html.clone(true)) : false);
+			}
+			if($.isFunction(s)) {
+				return s.call(this, obj, $.proxy(function (d) {
+					if(d === false) {
+						callback.call(this, false);
+					}
+					this[typeof d === 'string' ? '_append_html_data' : '_append_json_data'](obj, typeof d === 'string' ? $(d) : d, function (status) {
+						callback.call(this, status);
+					});
+					// return d === false ? callback.call(this, false) : callback.call(this, this[typeof d === 'string' ? '_append_html_data' : '_append_json_data'](obj, typeof d === 'string' ? $(d) : d));
+				}, this));
+			}
+			if(typeof s === 'object') {
+				if(s.url) {
+					s = $.extend(true, {}, s);
+					if($.isFunction(s.url)) {
+						s.url = s.url.call(this, obj);
+					}
+					if($.isFunction(s.data)) {
+						s.data = s.data.call(this, obj);
+					}
+					return $.ajax(s)
+						.done($.proxy(function (d,t,x) {
+								var type = x.getResponseHeader('Content-Type');
+								if(type.indexOf('json') !== -1 || typeof d === "object") {
+									return this._append_json_data(obj, d, function (status) { callback.call(this, status); });
+									//return callback.call(this, this._append_json_data(obj, d));
+								}
+								if(type.indexOf('html') !== -1 || typeof d === "string") {
+									return this._append_html_data(obj, $(d), function (status) { callback.call(this, status); });
+									// return callback.call(this, this._append_html_data(obj, $(d)));
+								}
+								this._data.core.last_error = { 'error' : 'ajax', 'plugin' : 'core', 'id' : 'core_04', 'reason' : 'Could not load node', 'data' : JSON.stringify({ 'id' : obj.id, 'xhr' : x }) };
+								this.settings.core.error.call(this, this._data.core.last_error);
+								return callback.call(this, false);
+							}, this))
+						.fail($.proxy(function (f) {
+								callback.call(this, false);
+								this._data.core.last_error = { 'error' : 'ajax', 'plugin' : 'core', 'id' : 'core_04', 'reason' : 'Could not load node', 'data' : JSON.stringify({ 'id' : obj.id, 'xhr' : f }) };
+								this.settings.core.error.call(this, this._data.core.last_error);
+							}, this));
+				}
+				t = ($.isArray(s) || $.isPlainObject(s)) ? JSON.parse(JSON.stringify(s)) : s;
+				if(obj.id === '#') {
+					return this._append_json_data(obj, t, function (status) {
+						callback.call(this, status);
+					});
+				}
+				else {
+					this._data.core.last_error = { 'error' : 'nodata', 'plugin' : 'core', 'id' : 'core_05', 'reason' : 'Could not load node', 'data' : JSON.stringify({ 'id' : obj.id }) };
+					this.settings.core.error.call(this, this._data.core.last_error);
+					return callback.call(this, false);
+				}
+				//return callback.call(this, (obj.id === "#" ? this._append_json_data(obj, t) : false) );
+			}
+			if(typeof s === 'string') {
+				if(obj.id === '#') {
+					return this._append_html_data(obj, $(s), function (status) {
+						callback.call(this, status);
+					});
+				}
+				else {
+					this._data.core.last_error = { 'error' : 'nodata', 'plugin' : 'core', 'id' : 'core_06', 'reason' : 'Could not load node', 'data' : JSON.stringify({ 'id' : obj.id }) };
+					this.settings.core.error.call(this, this._data.core.last_error);
+					return callback.call(this, false);
+				}
+				//return callback.call(this, (obj.id === "#" ? this._append_html_data(obj, $(s)) : false) );
+			}
+			return callback.call(this, false);
+		},
+		/**
+		 * adds a node to the list of nodes to redraw. Used only internally.
+		 * @private
+		 * @name _node_changed(obj [, callback])
+		 * @param  {mixed} obj
+		 */
+		_node_changed : function (obj) {
+			obj = this.get_node(obj);
+			if(obj) {
+				this._model.changed.push(obj.id);
+			}
+		},
+		/**
+		 * appends HTML content to the tree. Used internally.
+		 * @private
+		 * @name _append_html_data(obj, data)
+		 * @param  {mixed} obj the node to append to
+		 * @param  {String} data the HTML string to parse and append
+		 * @trigger model.jstree, changed.jstree
+		 */
+		_append_html_data : function (dom, data, cb) {
+			dom = this.get_node(dom);
+			dom.children = [];
+			dom.children_d = [];
+			var dat = data.is('ul') ? data.children() : data,
+				par = dom.id,
+				chd = [],
+				dpc = [],
+				m = this._model.data,
+				p = m[par],
+				s = this._data.core.selected.length,
+				tmp, i, j;
+			dat.each($.proxy(function (i, v) {
+				tmp = this._parse_model_from_html($(v), par, p.parents.concat());
+				if(tmp) {
+					chd.push(tmp);
+					dpc.push(tmp);
+					if(m[tmp].children_d.length) {
+						dpc = dpc.concat(m[tmp].children_d);
+					}
+				}
+			}, this));
+			p.children = chd;
+			p.children_d = dpc;
+			for(i = 0, j = p.parents.length; i < j; i++) {
+				m[p.parents[i]].children_d = m[p.parents[i]].children_d.concat(dpc);
+			}
+			/**
+			 * triggered when new data is inserted to the tree model
+			 * @event
+			 * @name model.jstree
+			 * @param {Array} nodes an array of node IDs
+			 * @param {String} parent the parent ID of the nodes
+			 */
+			this.trigger('model', { "nodes" : dpc, 'parent' : par });
+			if(par !== '#') {
+				this._node_changed(par);
+				this.redraw();
+			}
+			else {
+				this.get_container_ul().children('.jstree-initial-node').remove();
+				this.redraw(true);
+			}
+			if(this._data.core.selected.length !== s) {
+				this.trigger('changed', { 'action' : 'model', 'selected' : this._data.core.selected });
+			}
+			cb.call(this, true);
+		},
+		/**
+		 * appends JSON content to the tree. Used internally.
+		 * @private
+		 * @name _append_json_data(obj, data)
+		 * @param  {mixed} obj the node to append to
+		 * @param  {String} data the JSON object to parse and append
+		 * @trigger model.jstree, changed.jstree
+		 */
+		_append_json_data : function (dom, data, cb) {
+			dom = this.get_node(dom);
+			dom.children = [];
+			dom.children_d = [];
+			// *%$@!!!
+			if(data.d) {
+				data = data.d;
+				if(typeof data === "string") {
+					data = JSON.parse(data);
+				}
+			}
+			if(!$.isArray(data)) { data = [data]; }
+			var w = null,
+				args = {
+					'df'	: this._model.default_state,
+					'dat'	: data,
+					'par'	: dom.id,
+					'm'		: this._model.data,
+					't_id'	: this._id,
+					't_cnt'	: this._cnt,
+					'sel'	: this._data.core.selected
+				},
+				func = function (data, undefined) {
+					if(data.data) { data = data.data; }
+					var dat = data.dat,
+						par = data.par,
+						chd = [],
+						dpc = [],
+						add = [],
+						df = data.df,
+						t_id = data.t_id,
+						t_cnt = data.t_cnt,
+						m = data.m,
+						p = m[par],
+						sel = data.sel,
+						tmp, i, j, rslt,
+						parse_flat = function (d, p, ps) {
+							if(!ps) { ps = []; }
+							else { ps = ps.concat(); }
+							if(p) { ps.unshift(p); }
+							var tid = d.id.toString(),
+								i, j, c, e,
+								tmp = {
+									id			: tid,
+									text		: d.text || '',
+									icon		: d.icon !== undefined ? d.icon : true,
+									parent		: p,
+									parents		: ps,
+									children	: d.children || [],
+									children_d	: d.children_d || [],
+									data		: d.data,
+									state		: { },
+									li_attr		: { id : false },
+									a_attr		: { href : '#' },
+									original	: false
+								};
+							for(i in df) {
+								if(df.hasOwnProperty(i)) {
+									tmp.state[i] = df[i];
+								}
+							}
+							if(d && d.data && d.data.jstree && d.data.jstree.icon) {
+								tmp.icon = d.data.jstree.icon;
+							}
+							if(d && d.data) {
+								tmp.data = d.data;
+								if(d.data.jstree) {
+									for(i in d.data.jstree) {
+										if(d.data.jstree.hasOwnProperty(i)) {
+											tmp.state[i] = d.data.jstree[i];
+										}
+									}
+								}
+							}
+							if(d && typeof d.state === 'object') {
+								for (i in d.state) {
+									if(d.state.hasOwnProperty(i)) {
+										tmp.state[i] = d.state[i];
+									}
+								}
+							}
+							if(d && typeof d.li_attr === 'object') {
+								for (i in d.li_attr) {
+									if(d.li_attr.hasOwnProperty(i)) {
+										tmp.li_attr[i] = d.li_attr[i];
+									}
+								}
+							}
+							if(!tmp.li_attr.id) {
+								tmp.li_attr.id = tid;
+							}
+							if(d && typeof d.a_attr === 'object') {
+								for (i in d.a_attr) {
+									if(d.a_attr.hasOwnProperty(i)) {
+										tmp.a_attr[i] = d.a_attr[i];
+									}
+								}
+							}
+							if(d && d.children && d.children === true) {
+								tmp.state.loaded = false;
+								tmp.children = [];
+								tmp.children_d = [];
+							}
+							m[tmp.id] = tmp;
+							for(i = 0, j = tmp.children.length; i < j; i++) {
+								c = parse_flat(m[tmp.children[i]], tmp.id, ps);
+								e = m[c];
+								tmp.children_d.push(c);
+								if(e.children_d.length) {
+									tmp.children_d = tmp.children_d.concat(e.children_d);
+								}
+							}
+							delete d.data;
+							delete d.children;
+							m[tmp.id].original = d;
+							if(tmp.state.selected) {
+								add.push(tmp.id);
+							}
+							return tmp.id;
+						},
+						parse_nest = function (d, p, ps) {
+							if(!ps) { ps = []; }
+							else { ps = ps.concat(); }
+							if(p) { ps.unshift(p); }
+							var tid = false, i, j, c, e, tmp;
+							do {
+								tid = 'j' + t_id + '_' + (++t_cnt);
+							} while(m[tid]);
+
+							tmp = {
+								id			: false,
+								text		: typeof d === 'string' ? d : '',
+								icon		: typeof d === 'object' && d.icon !== undefined ? d.icon : true,
+								parent		: p,
+								parents		: ps,
+								children	: [],
+								children_d	: [],
+								data		: null,
+								state		: { },
+								li_attr		: { id : false },
+								a_attr		: { href : '#' },
+								original	: false
+							};
+							for(i in df) {
+								if(df.hasOwnProperty(i)) {
+									tmp.state[i] = df[i];
+								}
+							}
+							if(d && d.id) { tmp.id = d.id.toString(); }
+							if(d && d.text) { tmp.text = d.text; }
+							if(d && d.data && d.data.jstree && d.data.jstree.icon) {
+								tmp.icon = d.data.jstree.icon;
+							}
+							if(d && d.data) {
+								tmp.data = d.data;
+								if(d.data.jstree) {
+									for(i in d.data.jstree) {
+										if(d.data.jstree.hasOwnProperty(i)) {
+											tmp.state[i] = d.data.jstree[i];
+										}
+									}
+								}
+							}
+							if(d && typeof d.state === 'object') {
+								for (i in d.state) {
+									if(d.state.hasOwnProperty(i)) {
+										tmp.state[i] = d.state[i];
+									}
+								}
+							}
+							if(d && typeof d.li_attr === 'object') {
+								for (i in d.li_attr) {
+									if(d.li_attr.hasOwnProperty(i)) {
+										tmp.li_attr[i] = d.li_attr[i];
+									}
+								}
+							}
+							if(tmp.li_attr.id && !tmp.id) {
+								tmp.id = tmp.li_attr.id.toString();
+							}
+							if(!tmp.id) {
+								tmp.id = tid;
+							}
+							if(!tmp.li_attr.id) {
+								tmp.li_attr.id = tmp.id;
+							}
+							if(d && typeof d.a_attr === 'object') {
+								for (i in d.a_attr) {
+									if(d.a_attr.hasOwnProperty(i)) {
+										tmp.a_attr[i] = d.a_attr[i];
+									}
+								}
+							}
+							if(d && d.children && d.children.length) {
+								for(i = 0, j = d.children.length; i < j; i++) {
+									c = parse_nest(d.children[i], tmp.id, ps);
+									e = m[c];
+									tmp.children.push(c);
+									if(e.children_d.length) {
+										tmp.children_d = tmp.children_d.concat(e.children_d);
+									}
+								}
+								tmp.children_d = tmp.children_d.concat(tmp.children);
+							}
+							if(d && d.children && d.children === true) {
+								tmp.state.loaded = false;
+								tmp.children = [];
+								tmp.children_d = [];
+							}
+							delete d.data;
+							delete d.children;
+							tmp.original = d;
+							m[tmp.id] = tmp;
+							if(tmp.state.selected) {
+								add.push(tmp.id);
+							}
+							return tmp.id;
+						};
+
+					if(dat.length && dat[0].id !== undefined && dat[0].parent !== undefined) {
+						// Flat JSON support (for easy import from DB):
+						// 1) convert to object (foreach)
+						for(i = 0, j = dat.length; i < j; i++) {
+							if(!dat[i].children) {
+								dat[i].children = [];
+							}
+							m[dat[i].id.toString()] = dat[i];
+						}
+						// 2) populate children (foreach)
+						for(i = 0, j = dat.length; i < j; i++) {
+							m[dat[i].parent.toString()].children.push(dat[i].id.toString());
+							// populate parent.children_d
+							p.children_d.push(dat[i].id.toString());
+						}
+						// 3) normalize && populate parents and children_d with recursion
+						for(i = 0, j = p.children.length; i < j; i++) {
+							tmp = parse_flat(m[p.children[i]], par, p.parents.concat());
+							dpc.push(tmp);
+							if(m[tmp].children_d.length) {
+								dpc = dpc.concat(m[tmp].children_d);
+							}
+						}
+						for(i = 0, j = p.parents.length; i < j; i++) {
+							m[p.parents[i]].children_d = m[p.parents[i]].children_d.concat(dpc);
+						}
+						// ?) three_state selection - p.state.selected && t - (if three_state foreach(dat => ch) -> foreach(parents) if(parent.selected) child.selected = true;
+						rslt = {
+							'cnt' : t_cnt,
+							'mod' : m,
+							'sel' : sel,
+							'par' : par,
+							'dpc' : dpc,
+							'add' : add
+						};
+					}
+					else {
+						for(i = 0, j = dat.length; i < j; i++) {
+							tmp = parse_nest(dat[i], par, p.parents.concat());
+							if(tmp) {
+								chd.push(tmp);
+								dpc.push(tmp);
+								if(m[tmp].children_d.length) {
+									dpc = dpc.concat(m[tmp].children_d);
+								}
+							}
+						}
+						p.children = chd;
+						p.children_d = dpc;
+						for(i = 0, j = p.parents.length; i < j; i++) {
+							m[p.parents[i]].children_d = m[p.parents[i]].children_d.concat(dpc);
+						}
+						rslt = {
+							'cnt' : t_cnt,
+							'mod' : m,
+							'sel' : sel,
+							'par' : par,
+							'dpc' : dpc,
+							'add' : add
+						};
+					}
+					return rslt;
+				},
+				rslt = function (rslt, worker) {
+					this._cnt = rslt.cnt;
+					this._model.data = rslt.mod; // breaks the reference in load_node - careful
+
+					if(worker) {
+						var i, j, a = rslt.add, r = rslt.sel, s = this._data.core.selected.slice(), m = this._model.data;
+						// if selection was changed while calculating in worker
+						if(r.length !== s.length || $.vakata.array_unique(r.concat(s)).length !== r.length) {
+							// deselect nodes that are no longer selected
+							for(i = 0, j = r.length; i < j; i++) {
+								if($.inArray(r[i], a) === -1 && $.inArray(r[i], s) === -1) {
+									m[r[i]].state.selected = false;
+								}
+							}
+							// select nodes that were selected in the mean time
+							for(i = 0, j = s.length; i < j; i++) {
+								if($.inArray(s[i], r) === -1) {
+									m[s[i]].state.selected = true;
+								}
+							}
+						}
+					}
+					if(rslt.add.length) {
+						this._data.core.selected = this._data.core.selected.concat(rslt.add);
+					}
+
+					this.trigger('model', { "nodes" : rslt.dpc, 'parent' : rslt.par });
+
+					if(rslt.par !== '#') {
+						this._node_changed(rslt.par);
+						this.redraw();
+					}
+					else {
+						// this.get_container_ul().children('.jstree-initial-node').remove();
+						this.redraw(true);
+					}
+					if(rslt.add.length) {
+						this.trigger('changed', { 'action' : 'model', 'selected' : this._data.core.selected });
+					}
+					cb.call(this, true);
+				};
+			if(this.settings.core.worker && window.Blob && window.URL && window.Worker) {
+				try {
+					if(this._wrk === null) {
+						this._wrk = window.URL.createObjectURL(
+							new window.Blob(
+								['self.onmessage = ' + func.toString().replace(/return ([^;}]+)[\s;}]+$/, 'postMessage($1);}')],
+								{type:"text/javascript"}
+							)
+						);
+					}
+					w = new window.Worker(this._wrk);
+					w.onmessage = $.proxy(function (e) {
+						rslt.call(this, e.data, true);
+						this._data.core.working = false;
+						if(this._data.core.worker_queue.length) {
+							this._append_json_data.apply(this, this._data.core.worker_queue.shift());
+						}
+					}, this);
+					if(!this._data.core.working) {
+						this._data.core.working = true;
+						w.postMessage(args);
+					}
+					else {
+						this._data.core.worker_queue.push([dom, data, cb]);
+					}
+				}
+				catch(e) {
+					rslt.call(this, func(args), false);
+				}
+			}
+			else {
+				rslt.call(this, func(args), false);
+			}
+		},
+		/**
+		 * parses a node from a jQuery object and appends them to the in memory tree model. Used internally.
+		 * @private
+		 * @name _parse_model_from_html(d [, p, ps])
+		 * @param  {jQuery} d the jQuery object to parse
+		 * @param  {String} p the parent ID
+		 * @param  {Array} ps list of all parents
+		 * @return {String} the ID of the object added to the model
+		 */
+		_parse_model_from_html : function (d, p, ps) {
+			if(!ps) { ps = []; }
+			else { ps = [].concat(ps); }
+			if(p) { ps.unshift(p); }
+			var c, e, m = this._model.data,
+				data = {
+					id			: false,
+					text		: false,
+					icon		: true,
+					parent		: p,
+					parents		: ps,
+					children	: [],
+					children_d	: [],
+					data		: null,
+					state		: { },
+					li_attr		: { id : false },
+					a_attr		: { href : '#' },
+					original	: false
+				}, i, tmp, tid;
+			for(i in this._model.default_state) {
+				if(this._model.default_state.hasOwnProperty(i)) {
+					data.state[i] = this._model.default_state[i];
+				}
+			}
+			tmp = $.vakata.attributes(d, true);
+			$.each(tmp, function (i, v) {
+				v = $.trim(v);
+				if(!v.length) { return true; }
+				data.li_attr[i] = v;
+				if(i === 'id') {
+					data.id = v.toString();
+				}
+			});
+			tmp = d.children('a').eq(0);
+			if(tmp.length) {
+				tmp = $.vakata.attributes(tmp, true);
+				$.each(tmp, function (i, v) {
+					v = $.trim(v);
+					if(v.length) {
+						data.a_attr[i] = v;
+					}
+				});
+			}
+			tmp = d.children("a:eq(0)").length ? d.children("a:eq(0)").clone() : d.clone();
+			tmp.children("ins, i, ul").remove();
+			tmp = tmp.html();
+			tmp = $('<div />').html(tmp);
+			data.text = this.settings.core.force_text ? tmp.text() : tmp.html();
+			tmp = d.data();
+			data.data = tmp ? $.extend(true, {}, tmp) : null;
+			data.state.opened = d.hasClass('jstree-open');
+			data.state.selected = d.children('a').hasClass('jstree-clicked');
+			data.state.disabled = d.children('a').hasClass('jstree-disabled');
+			if(data.data && data.data.jstree) {
+				for(i in data.data.jstree) {
+					if(data.data.jstree.hasOwnProperty(i)) {
+						data.state[i] = data.data.jstree[i];
+					}
+				}
+			}
+			tmp = d.children("a").children(".jstree-themeicon");
+			if(tmp.length) {
+				data.icon = tmp.hasClass('jstree-themeicon-hidden') ? false : tmp.attr('rel');
+			}
+			if(data.state.icon) {
+				data.icon = data.state.icon;
+			}
+			tmp = d.children("ul").children("li");
+			do {
+				tid = 'j' + this._id + '_' + (++this._cnt);
+			} while(m[tid]);
+			data.id = data.li_attr.id ? data.li_attr.id.toString() : tid;
+			if(tmp.length) {
+				tmp.each($.proxy(function (i, v) {
+					c = this._parse_model_from_html($(v), data.id, ps);
+					e = this._model.data[c];
+					data.children.push(c);
+					if(e.children_d.length) {
+						data.children_d = data.children_d.concat(e.children_d);
+					}
+				}, this));
+				data.children_d = data.children_d.concat(data.children);
+			}
+			else {
+				if(d.hasClass('jstree-closed')) {
+					data.state.loaded = false;
+				}
+			}
+			if(data.li_attr['class']) {
+				data.li_attr['class'] = data.li_attr['class'].replace('jstree-closed','').replace('jstree-open','');
+			}
+			if(data.a_attr['class']) {
+				data.a_attr['class'] = data.a_attr['class'].replace('jstree-clicked','').replace('jstree-disabled','');
+			}
+			m[data.id] = data;
+			if(data.state.selected) {
+				this._data.core.selected.push(data.id);
+			}
+			return data.id;
+		},
+		/**
+		 * parses a node from a JSON object (used when dealing with flat data, which has no nesting of children, but has id and parent properties) and appends it to the in memory tree model. Used internally.
+		 * @private
+		 * @name _parse_model_from_flat_json(d [, p, ps])
+		 * @param  {Object} d the JSON object to parse
+		 * @param  {String} p the parent ID
+		 * @param  {Array} ps list of all parents
+		 * @return {String} the ID of the object added to the model
+		 */
+		_parse_model_from_flat_json : function (d, p, ps) {
+			if(!ps) { ps = []; }
+			else { ps = ps.concat(); }
+			if(p) { ps.unshift(p); }
+			var tid = d.id.toString(),
+				m = this._model.data,
+				df = this._model.default_state,
+				i, j, c, e,
+				tmp = {
+					id			: tid,
+					text		: d.text || '',
+					icon		: d.icon !== undefined ? d.icon : true,
+					parent		: p,
+					parents		: ps,
+					children	: d.children || [],
+					children_d	: d.children_d || [],
+					data		: d.data,
+					state		: { },
+					li_attr		: { id : false },
+					a_attr		: { href : '#' },
+					original	: false
+				};
+			for(i in df) {
+				if(df.hasOwnProperty(i)) {
+					tmp.state[i] = df[i];
+				}
+			}
+			if(d && d.data && d.data.jstree && d.data.jstree.icon) {
+				tmp.icon = d.data.jstree.icon;
+			}
+			if(d && d.data) {
+				tmp.data = d.data;
+				if(d.data.jstree) {
+					for(i in d.data.jstree) {
+						if(d.data.jstree.hasOwnProperty(i)) {
+							tmp.state[i] = d.data.jstree[i];
+						}
+					}
+				}
+			}
+			if(d && typeof d.state === 'object') {
+				for (i in d.state) {
+					if(d.state.hasOwnProperty(i)) {
+						tmp.state[i] = d.state[i];
+					}
+				}
+			}
+			if(d && typeof d.li_attr === 'object') {
+				for (i in d.li_attr) {
+					if(d.li_attr.hasOwnProperty(i)) {
+						tmp.li_attr[i] = d.li_attr[i];
+					}
+				}
+			}
+			if(!tmp.li_attr.id) {
+				tmp.li_attr.id = tid;
+			}
+			if(d && typeof d.a_attr === 'object') {
+				for (i in d.a_attr) {
+					if(d.a_attr.hasOwnProperty(i)) {
+						tmp.a_attr[i] = d.a_attr[i];
+					}
+				}
+			}
+			if(d && d.children && d.children === true) {
+				tmp.state.loaded = false;
+				tmp.children = [];
+				tmp.children_d = [];
+			}
+			m[tmp.id] = tmp;
+			for(i = 0, j = tmp.children.length; i < j; i++) {
+				c = this._parse_model_from_flat_json(m[tmp.children[i]], tmp.id, ps);
+				e = m[c];
+				tmp.children_d.push(c);
+				if(e.children_d.length) {
+					tmp.children_d = tmp.children_d.concat(e.children_d);
+				}
+			}
+			delete d.data;
+			delete d.children;
+			m[tmp.id].original = d;
+			if(tmp.state.selected) {
+				this._data.core.selected.push(tmp.id);
+			}
+			return tmp.id;
+		},
+		/**
+		 * parses a node from a JSON object and appends it to the in memory tree model. Used internally.
+		 * @private
+		 * @name _parse_model_from_json(d [, p, ps])
+		 * @param  {Object} d the JSON object to parse
+		 * @param  {String} p the parent ID
+		 * @param  {Array} ps list of all parents
+		 * @return {String} the ID of the object added to the model
+		 */
+		_parse_model_from_json : function (d, p, ps) {
+			if(!ps) { ps = []; }
+			else { ps = ps.concat(); }
+			if(p) { ps.unshift(p); }
+			var tid = false, i, j, c, e, m = this._model.data, df = this._model.default_state, tmp;
+			do {
+				tid = 'j' + this._id + '_' + (++this._cnt);
+			} while(m[tid]);
+
+			tmp = {
+				id			: false,
+				text		: typeof d === 'string' ? d : '',
+				icon		: typeof d === 'object' && d.icon !== undefined ? d.icon : true,
+				parent		: p,
+				parents		: ps,
+				children	: [],
+				children_d	: [],
+				data		: null,
+				state		: { },
+				li_attr		: { id : false },
+				a_attr		: { href : '#' },
+				original	: false
+			};
+			for(i in df) {
+				if(df.hasOwnProperty(i)) {
+					tmp.state[i] = df[i];
+				}
+			}
+			if(d && d.id) { tmp.id = d.id.toString(); }
+			if(d && d.text) { tmp.text = d.text; }
+			if(d && d.data && d.data.jstree && d.data.jstree.icon) {
+				tmp.icon = d.data.jstree.icon;
+			}
+			if(d && d.data) {
+				tmp.data = d.data;
+				if(d.data.jstree) {
+					for(i in d.data.jstree) {
+						if(d.data.jstree.hasOwnProperty(i)) {
+							tmp.state[i] = d.data.jstree[i];
+						}
+					}
+				}
+			}
+			if(d && typeof d.state === 'object') {
+				for (i in d.state) {
+					if(d.state.hasOwnProperty(i)) {
+						tmp.state[i] = d.state[i];
+					}
+				}
+			}
+			if(d && typeof d.li_attr === 'object') {
+				for (i in d.li_attr) {
+					if(d.li_attr.hasOwnProperty(i)) {
+						tmp.li_attr[i] = d.li_attr[i];
+					}
+				}
+			}
+			if(tmp.li_attr.id && !tmp.id) {
+				tmp.id = tmp.li_attr.id.toString();
+			}
+			if(!tmp.id) {
+				tmp.id = tid;
+			}
+			if(!tmp.li_attr.id) {
+				tmp.li_attr.id = tmp.id;
+			}
+			if(d && typeof d.a_attr === 'object') {
+				for (i in d.a_attr) {
+					if(d.a_attr.hasOwnProperty(i)) {
+						tmp.a_attr[i] = d.a_attr[i];
+					}
+				}
+			}
+			if(d && d.children && d.children.length) {
+				for(i = 0, j = d.children.length; i < j; i++) {
+					c = this._parse_model_from_json(d.children[i], tmp.id, ps);
+					e = m[c];
+					tmp.children.push(c);
+					if(e.children_d.length) {
+						tmp.children_d = tmp.children_d.concat(e.children_d);
+					}
+				}
+				tmp.children_d = tmp.children_d.concat(tmp.children);
+			}
+			if(d && d.children && d.children === true) {
+				tmp.state.loaded = false;
+				tmp.children = [];
+				tmp.children_d = [];
+			}
+			delete d.data;
+			delete d.children;
+			tmp.original = d;
+			m[tmp.id] = tmp;
+			if(tmp.state.selected) {
+				this._data.core.selected.push(tmp.id);
+			}
+			return tmp.id;
+		},
+		/**
+		 * redraws all nodes that need to be redrawn. Used internally.
+		 * @private
+		 * @name _redraw()
+		 * @trigger redraw.jstree
+		 */
+		_redraw : function () {
+			var nodes = this._model.force_full_redraw ? this._model.data['#'].children.concat([]) : this._model.changed.concat([]),
+				f = document.createElement('UL'), tmp, i, j;
+			for(i = 0, j = nodes.length; i < j; i++) {
+				tmp = this.redraw_node(nodes[i], true, this._model.force_full_redraw);
+				if(tmp && this._model.force_full_redraw) {
+					f.appendChild(tmp);
+				}
+			}
+			if(this._model.force_full_redraw) {
+				f.className = this.get_container_ul()[0].className;
+				this.element.empty().append(f);
+				//this.get_container_ul()[0].appendChild(f);
+			}
+			this._model.force_full_redraw = false;
+			this._model.changed = [];
+			/**
+			 * triggered after nodes are redrawn
+			 * @event
+			 * @name redraw.jstree
+			 * @param {array} nodes the redrawn nodes
+			 */
+			this.trigger('redraw', { "nodes" : nodes });
+		},
+		/**
+		 * redraws all nodes that need to be redrawn or optionally - the whole tree
+		 * @name redraw([full])
+		 * @param {Boolean} full if set to `true` all nodes are redrawn.
+		 */
+		redraw : function (full) {
+			if(full) {
+				this._model.force_full_redraw = true;
+			}
+			//if(this._model.redraw_timeout) {
+			//	clearTimeout(this._model.redraw_timeout);
+			//}
+			//this._model.redraw_timeout = setTimeout($.proxy(this._redraw, this),0);
+			this._redraw();
+		},
+		/**
+		 * redraws a single node. Used internally.
+		 * @private
+		 * @name redraw_node(node, deep, is_callback)
+		 * @param {mixed} node the node to redraw
+		 * @param {Boolean} deep should child nodes be redrawn too
+		 * @param {Boolean} is_callback is this a recursion call
+		 */
+		redraw_node : function (node, deep, is_callback) {
+			var obj = this.get_node(node),
+				par = false,
+				ind = false,
+				old = false,
+				i = false,
+				j = false,
+				k = false,
+				c = '',
+				d = document,
+				m = this._model.data,
+				f = false,
+				s = false,
+				tmp = null;
+			if(!obj) { return false; }
+			if(obj.id === '#') {  return this.redraw(true); }
+			deep = deep || obj.children.length === 0;
+			node = !document.querySelector ? document.getElementById(obj.id) : this.element[0].querySelector('#' + ("0123456789".indexOf(obj.id[0]) !== -1 ? '\\3' + obj.id[0] + ' ' + obj.id.substr(1).replace($.jstree.idregex,'\\$&') : obj.id.replace($.jstree.idregex,'\\$&')) ); //, this.element);
+			if(!node) {
+				deep = true;
+				//node = d.createElement('LI');
+				if(!is_callback) {
+					par = obj.parent !== '#' ? $('#' + obj.parent.replace($.jstree.idregex,'\\$&'), this.element)[0] : null;
+					if(par !== null && (!par || !m[obj.parent].state.opened)) {
+						return false;
+					}
+					ind = $.inArray(obj.id, par === null ? m['#'].children : m[obj.parent].children);
+				}
+			}
+			else {
+				node = $(node);
+				if(!is_callback) {
+					par = node.parent().parent()[0];
+					if(par === this.element[0]) {
+						par = null;
+					}
+					ind = node.index();
+				}
+				// m[obj.id].data = node.data(); // use only node's data, no need to touch jquery storage
+				if(!deep && obj.children.length && !node.children('.jstree-children').length) {
+					deep = true;
+				}
+				if(!deep) {
+					old = node.children('.jstree-children')[0];
+				}
+				s = node.attr('aria-selected');
+				f = node.children('.jstree-anchor')[0] === document.activeElement;
+				node.remove();
+				//node = d.createElement('LI');
+				//node = node[0];
+			}
+			node = _node.cloneNode(true);
+			// node is DOM, deep is boolean
+
+			c = 'jstree-node ';
+			for(i in obj.li_attr) {
+				if(obj.li_attr.hasOwnProperty(i)) {
+					if(i === 'id') { continue; }
+					if(i !== 'class') {
+						node.setAttribute(i, obj.li_attr[i]);
+					}
+					else {
+						c += obj.li_attr[i];
+					}
+				}
+			}
+			if(s && s !== "false") {
+				node.setAttribute('aria-selected', true);
+			}
+			if(obj.state.loaded && !obj.children.length) {
+				c += ' jstree-leaf';
+			}
+			else {
+				c += obj.state.opened && obj.state.loaded ? ' jstree-open' : ' jstree-closed';
+				node.setAttribute('aria-expanded', (obj.state.opened && obj.state.loaded) );
+			}
+			if(obj.parent !== null && m[obj.parent].children[m[obj.parent].children.length - 1] === obj.id) {
+				c += ' jstree-last';
+			}
+			node.id = obj.id;
+			node.className = c;
+			c = ( obj.state.selected ? ' jstree-clicked' : '') + ( obj.state.disabled ? ' jstree-disabled' : '');
+			for(j in obj.a_attr) {
+				if(obj.a_attr.hasOwnProperty(j)) {
+					if(j === 'href' && obj.a_attr[j] === '#') { continue; }
+					if(j !== 'class') {
+						node.childNodes[1].setAttribute(j, obj.a_attr[j]);
+					}
+					else {
+						c += ' ' + obj.a_attr[j];
+					}
+				}
+			}
+			if(c.length) {
+				node.childNodes[1].className = 'jstree-anchor ' + c;
+			}
+			if((obj.icon && obj.icon !== true) || obj.icon === false) {
+				if(obj.icon === false) {
+					node.childNodes[1].childNodes[0].className += ' jstree-themeicon-hidden';
+				}
+				else if(obj.icon.indexOf('/') === -1 && obj.icon.indexOf('.') === -1) {
+					node.childNodes[1].childNodes[0].className += ' ' + obj.icon + ' jstree-themeicon-custom';
+				}
+				else {
+					node.childNodes[1].childNodes[0].style.backgroundImage = 'url('+obj.icon+')';
+					node.childNodes[1].childNodes[0].style.backgroundPosition = 'center center';
+					node.childNodes[1].childNodes[0].style.backgroundSize = 'auto';
+					node.childNodes[1].childNodes[0].className += ' jstree-themeicon-custom';
+				}
+			}
+
+			if(this.settings.core.force_text) {
+				node.childNodes[1].appendChild(d.createTextNode(obj.text));
+			}
+			else {
+				node.childNodes[1].innerHTML += obj.text;
+			}
+
+			if(deep && obj.children.length && obj.state.opened && obj.state.loaded) {
+				k = d.createElement('UL');
+				k.setAttribute('role', 'group');
+				k.className = 'jstree-children';
+				for(i = 0, j = obj.children.length; i < j; i++) {
+					k.appendChild(this.redraw_node(obj.children[i], deep, true));
+				}
+				node.appendChild(k);
+			}
+			if(old) {
+				node.appendChild(old);
+			}
+			if(!is_callback) {
+				// append back using par / ind
+				if(!par) {
+					par = this.element[0];
+				}
+				for(i = 0, j = par.childNodes.length; i < j; i++) {
+					if(par.childNodes[i] && par.childNodes[i].className && par.childNodes[i].className.indexOf('jstree-children') !== -1) {
+						tmp = par.childNodes[i];
+						break;
+					}
+				}
+				if(!tmp) {
+					tmp = d.createElement('UL');
+					tmp.setAttribute('role', 'group');
+					tmp.className = 'jstree-children';
+					par.appendChild(tmp);
+				}
+				par = tmp;
+
+				if(ind < par.childNodes.length) {
+					par.insertBefore(node, par.childNodes[ind]);
+				}
+				else {
+					par.appendChild(node);
+				}
+				if(f) {
+					node.childNodes[1].focus();
+				}
+			}
+			if(obj.state.opened && !obj.state.loaded) {
+				obj.state.opened = false;
+				setTimeout($.proxy(function () {
+					this.open_node(obj.id, false, 0);
+				}, this), 0);
+			}
+			return node;
+		},
+		/**
+		 * opens a node, revaling its children. If the node is not loaded it will be loaded and opened once ready.
+		 * @name open_node(obj [, callback, animation])
+		 * @param {mixed} obj the node to open
+		 * @param {Function} callback a function to execute once the node is opened
+		 * @param {Number} animation the animation duration in milliseconds when opening the node (overrides the `core.animation` setting). Use `false` for no animation.
+		 * @trigger open_node.jstree, after_open.jstree, before_open.jstree
+		 */
+		open_node : function (obj, callback, animation) {
+			var t1, t2, d, t;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.open_node(obj[t1], callback, animation);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			animation = animation === undefined ? this.settings.core.animation : animation;
+			if(!this.is_closed(obj)) {
+				if(callback) {
+					callback.call(this, obj, false);
+				}
+				return false;
+			}
+			if(!this.is_loaded(obj)) {
+				if(this.is_loading(obj)) {
+					return setTimeout($.proxy(function () {
+						this.open_node(obj, callback, animation);
+					}, this), 500);
+				}
+				this.load_node(obj, function (o, ok) {
+					return ok ? this.open_node(o, callback, animation) : (callback ? callback.call(this, o, false) : false);
+				});
+			}
+			else {
+				d = this.get_node(obj, true);
+				t = this;
+				if(d.length) {
+					if(obj.children.length && !this._firstChild(d.children('.jstree-children')[0])) {
+						obj.state.opened = true;
+						this.redraw_node(obj, true);
+						d = this.get_node(obj, true);
+					}
+					if(!animation) {
+						this.trigger('before_open', { "node" : obj });
+						d[0].className = d[0].className.replace('jstree-closed', 'jstree-open');
+						d[0].setAttribute("aria-expanded", true);
+					}
+					else {
+						this.trigger('before_open', { "node" : obj });
+						d
+							.children(".jstree-children").css("display","none").end()
+							.removeClass("jstree-closed").addClass("jstree-open").attr("aria-expanded", true)
+							.children(".jstree-children").stop(true, true)
+								.slideDown(animation, function () {
+									this.style.display = "";
+									t.trigger("after_open", { "node" : obj });
+								});
+					}
+				}
+				obj.state.opened = true;
+				if(callback) {
+					callback.call(this, obj, true);
+				}
+				if(!d.length) {
+					/**
+					 * triggered when a node is about to be opened (if the node is supposed to be in the DOM, it will be, but it won't be visible yet)
+					 * @event
+					 * @name before_open.jstree
+					 * @param {Object} node the opened node
+					 */
+					this.trigger('before_open', { "node" : obj });
+				}
+				/**
+				 * triggered when a node is opened (if there is an animation it will not be completed yet)
+				 * @event
+				 * @name open_node.jstree
+				 * @param {Object} node the opened node
+				 */
+				this.trigger('open_node', { "node" : obj });
+				if(!animation || !d.length) {
+					/**
+					 * triggered when a node is opened and the animation is complete
+					 * @event
+					 * @name after_open.jstree
+					 * @param {Object} node the opened node
+					 */
+					this.trigger("after_open", { "node" : obj });
+				}
+			}
+		},
+		/**
+		 * opens every parent of a node (node should be loaded)
+		 * @name _open_to(obj)
+		 * @param {mixed} obj the node to reveal
+		 * @private
+		 */
+		_open_to : function (obj) {
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			var i, j, p = obj.parents;
+			for(i = 0, j = p.length; i < j; i+=1) {
+				if(i !== '#') {
+					this.open_node(p[i], false, 0);
+				}
+			}
+			return $('#' + obj.id.replace($.jstree.idregex,'\\$&'), this.element);
+		},
+		/**
+		 * closes a node, hiding its children
+		 * @name close_node(obj [, animation])
+		 * @param {mixed} obj the node to close
+		 * @param {Number} animation the animation duration in milliseconds when closing the node (overrides the `core.animation` setting). Use `false` for no animation.
+		 * @trigger close_node.jstree, after_close.jstree
+		 */
+		close_node : function (obj, animation) {
+			var t1, t2, t, d;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.close_node(obj[t1], animation);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			if(this.is_closed(obj)) {
+				return false;
+			}
+			animation = animation === undefined ? this.settings.core.animation : animation;
+			t = this;
+			d = this.get_node(obj, true);
+			if(d.length) {
+				if(!animation) {
+					d[0].className = d[0].className.replace('jstree-open', 'jstree-closed');
+					d.attr("aria-expanded", false).children('.jstree-children').remove();
+				}
+				else {
+					d
+						.children(".jstree-children").attr("style","display:block !important").end()
+						.removeClass("jstree-open").addClass("jstree-closed").attr("aria-expanded", false)
+						.children(".jstree-children").stop(true, true).slideUp(animation, function () {
+							this.style.display = "";
+							d.children('.jstree-children').remove();
+							t.trigger("after_close", { "node" : obj });
+						});
+				}
+			}
+			obj.state.opened = false;
+			/**
+			 * triggered when a node is closed (if there is an animation it will not be complete yet)
+			 * @event
+			 * @name close_node.jstree
+			 * @param {Object} node the closed node
+			 */
+			this.trigger('close_node',{ "node" : obj });
+			if(!animation || !d.length) {
+				/**
+				 * triggered when a node is closed and the animation is complete
+				 * @event
+				 * @name after_close.jstree
+				 * @param {Object} node the closed node
+				 */
+				this.trigger("after_close", { "node" : obj });
+			}
+		},
+		/**
+		 * toggles a node - closing it if it is open, opening it if it is closed
+		 * @name toggle_node(obj)
+		 * @param {mixed} obj the node to toggle
+		 */
+		toggle_node : function (obj) {
+			var t1, t2;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.toggle_node(obj[t1]);
+				}
+				return true;
+			}
+			if(this.is_closed(obj)) {
+				return this.open_node(obj);
+			}
+			if(this.is_open(obj)) {
+				return this.close_node(obj);
+			}
+		},
+		/**
+		 * opens all nodes within a node (or the tree), revaling their children. If the node is not loaded it will be loaded and opened once ready.
+		 * @name open_all([obj, animation, original_obj])
+		 * @param {mixed} obj the node to open recursively, omit to open all nodes in the tree
+		 * @param {Number} animation the animation duration in milliseconds when opening the nodes, the default is no animation
+		 * @param {jQuery} reference to the node that started the process (internal use)
+		 * @trigger open_all.jstree
+		 */
+		open_all : function (obj, animation, original_obj) {
+			if(!obj) { obj = '#'; }
+			obj = this.get_node(obj);
+			if(!obj) { return false; }
+			var dom = obj.id === '#' ? this.get_container_ul() : this.get_node(obj, true), i, j, _this;
+			if(!dom.length) {
+				for(i = 0, j = obj.children_d.length; i < j; i++) {
+					if(this.is_closed(this._model.data[obj.children_d[i]])) {
+						this._model.data[obj.children_d[i]].state.opened = true;
+					}
+				}
+				return this.trigger('open_all', { "node" : obj });
+			}
+			original_obj = original_obj || dom;
+			_this = this;
+			dom = this.is_closed(obj) ? dom.find('.jstree-closed').addBack() : dom.find('.jstree-closed');
+			dom.each(function () {
+				_this.open_node(
+					this,
+					function(node, status) { if(status && this.is_parent(node)) { this.open_all(node, animation, original_obj); } },
+					animation || 0
+				);
+			});
+			if(original_obj.find('.jstree-closed').length === 0) {
+				/**
+				 * triggered when an `open_all` call completes
+				 * @event
+				 * @name open_all.jstree
+				 * @param {Object} node the opened node
+				 */
+				this.trigger('open_all', { "node" : this.get_node(original_obj) });
+			}
+		},
+		/**
+		 * closes all nodes within a node (or the tree), revaling their children
+		 * @name close_all([obj, animation])
+		 * @param {mixed} obj the node to close recursively, omit to close all nodes in the tree
+		 * @param {Number} animation the animation duration in milliseconds when closing the nodes, the default is no animation
+		 * @trigger close_all.jstree
+		 */
+		close_all : function (obj, animation) {
+			if(!obj) { obj = '#'; }
+			obj = this.get_node(obj);
+			if(!obj) { return false; }
+			var dom = obj.id === '#' ? this.get_container_ul() : this.get_node(obj, true),
+				_this = this, i, j;
+			if(!dom.length) {
+				for(i = 0, j = obj.children_d.length; i < j; i++) {
+					this._model.data[obj.children_d[i]].state.opened = false;
+				}
+				return this.trigger('close_all', { "node" : obj });
+			}
+			dom = this.is_open(obj) ? dom.find('.jstree-open').addBack() : dom.find('.jstree-open');
+			$(dom.get().reverse()).each(function () { _this.close_node(this, animation || 0); });
+			/**
+			 * triggered when an `close_all` call completes
+			 * @event
+			 * @name close_all.jstree
+			 * @param {Object} node the closed node
+			 */
+			this.trigger('close_all', { "node" : obj });
+		},
+		/**
+		 * checks if a node is disabled (not selectable)
+		 * @name is_disabled(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		is_disabled : function (obj) {
+			obj = this.get_node(obj);
+			return obj && obj.state && obj.state.disabled;
+		},
+		/**
+		 * enables a node - so that it can be selected
+		 * @name enable_node(obj)
+		 * @param {mixed} obj the node to enable
+		 * @trigger enable_node.jstree
+		 */
+		enable_node : function (obj) {
+			var t1, t2;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.enable_node(obj[t1]);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			obj.state.disabled = false;
+			this.get_node(obj,true).children('.jstree-anchor').removeClass('jstree-disabled');
+			/**
+			 * triggered when an node is enabled
+			 * @event
+			 * @name enable_node.jstree
+			 * @param {Object} node the enabled node
+			 */
+			this.trigger('enable_node', { 'node' : obj });
+		},
+		/**
+		 * disables a node - so that it can not be selected
+		 * @name disable_node(obj)
+		 * @param {mixed} obj the node to disable
+		 * @trigger disable_node.jstree
+		 */
+		disable_node : function (obj) {
+			var t1, t2;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.disable_node(obj[t1]);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			obj.state.disabled = true;
+			this.get_node(obj,true).children('.jstree-anchor').addClass('jstree-disabled');
+			/**
+			 * triggered when an node is disabled
+			 * @event
+			 * @name disable_node.jstree
+			 * @param {Object} node the disabled node
+			 */
+			this.trigger('disable_node', { 'node' : obj });
+		},
+		/**
+		 * called when a node is selected by the user. Used internally.
+		 * @private
+		 * @name activate_node(obj, e)
+		 * @param {mixed} obj the node
+		 * @param {Object} e the related event
+		 * @trigger activate_node.jstree
+		 */
+		activate_node : function (obj, e) {
+			if(this.is_disabled(obj)) {
+				return false;
+			}
+
+			// ensure last_clicked is still in the DOM, make it fresh (maybe it was moved?) and make sure it is still selected, if not - make last_clicked the last selected node
+			this._data.core.last_clicked = this._data.core.last_clicked && this._data.core.last_clicked.id !== undefined ? this.get_node(this._data.core.last_clicked.id) : null;
+			if(this._data.core.last_clicked && !this._data.core.last_clicked.state.selected) { this._data.core.last_clicked = null; }
+			if(!this._data.core.last_clicked && this._data.core.selected.length) { this._data.core.last_clicked = this.get_node(this._data.core.selected[this._data.core.selected.length - 1]); }
+
+			if(!this.settings.core.multiple || (!e.metaKey && !e.ctrlKey && !e.shiftKey) || (e.shiftKey && (!this._data.core.last_clicked || !this.get_parent(obj) || this.get_parent(obj) !== this._data.core.last_clicked.parent ) )) {
+				if(!this.settings.core.multiple && (e.metaKey || e.ctrlKey || e.shiftKey) && this.is_selected(obj)) {
+					this.deselect_node(obj, false, e);
+				}
+				else {
+					this.deselect_all(true);
+					this.select_node(obj, false, false, e);
+					this._data.core.last_clicked = this.get_node(obj);
+				}
+			}
+			else {
+				if(e.shiftKey) {
+					var o = this.get_node(obj).id,
+						l = this._data.core.last_clicked.id,
+						p = this.get_node(this._data.core.last_clicked.parent).children,
+						c = false,
+						i, j;
+					for(i = 0, j = p.length; i < j; i += 1) {
+						// separate IFs work whem o and l are the same
+						if(p[i] === o) {
+							c = !c;
+						}
+						if(p[i] === l) {
+							c = !c;
+						}
+						if(c || p[i] === o || p[i] === l) {
+							this.select_node(p[i], false, false, e);
+						}
+						else {
+							this.deselect_node(p[i], false, e);
+						}
+					}
+				}
+				else {
+					if(!this.is_selected(obj)) {
+						this.select_node(obj, false, false, e);
+					}
+					else {
+						this.deselect_node(obj, false, e);
+					}
+				}
+			}
+			/**
+			 * triggered when an node is clicked or intercated with by the user
+			 * @event
+			 * @name activate_node.jstree
+			 * @param {Object} node
+			 */
+			this.trigger('activate_node', { 'node' : this.get_node(obj) });
+		},
+		/**
+		 * applies the hover state on a node, called when a node is hovered by the user. Used internally.
+		 * @private
+		 * @name hover_node(obj)
+		 * @param {mixed} obj
+		 * @trigger hover_node.jstree
+		 */
+		hover_node : function (obj) {
+			obj = this.get_node(obj, true);
+			if(!obj || !obj.length || obj.children('.jstree-hovered').length) {
+				return false;
+			}
+			var o = this.element.find('.jstree-hovered'), t = this.element;
+			if(o && o.length) { this.dehover_node(o); }
+
+			obj.children('.jstree-anchor').addClass('jstree-hovered');
+			/**
+			 * triggered when an node is hovered
+			 * @event
+			 * @name hover_node.jstree
+			 * @param {Object} node
+			 */
+			this.trigger('hover_node', { 'node' : this.get_node(obj) });
+			setTimeout(function () { t.attr('aria-activedescendant', obj[0].id); obj.attr('aria-selected', true); }, 0);
+		},
+		/**
+		 * removes the hover state from a nodecalled when a node is no longer hovered by the user. Used internally.
+		 * @private
+		 * @name dehover_node(obj)
+		 * @param {mixed} obj
+		 * @trigger dehover_node.jstree
+		 */
+		dehover_node : function (obj) {
+			obj = this.get_node(obj, true);
+			if(!obj || !obj.length || !obj.children('.jstree-hovered').length) {
+				return false;
+			}
+			obj.attr('aria-selected', false).children('.jstree-anchor').removeClass('jstree-hovered');
+			/**
+			 * triggered when an node is no longer hovered
+			 * @event
+			 * @name dehover_node.jstree
+			 * @param {Object} node
+			 */
+			this.trigger('dehover_node', { 'node' : this.get_node(obj) });
+		},
+		/**
+		 * select a node
+		 * @name select_node(obj [, supress_event, prevent_open])
+		 * @param {mixed} obj an array can be used to select multiple nodes
+		 * @param {Boolean} supress_event if set to `true` the `changed.jstree` event won't be triggered
+		 * @param {Boolean} prevent_open if set to `true` parents of the selected node won't be opened
+		 * @trigger select_node.jstree, changed.jstree
+		 */
+		select_node : function (obj, supress_event, prevent_open, e) {
+			var dom, t1, t2, th;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.select_node(obj[t1], supress_event, prevent_open, e);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			dom = this.get_node(obj, true);
+			if(!obj.state.selected) {
+				obj.state.selected = true;
+				this._data.core.selected.push(obj.id);
+				if(!prevent_open) {
+					dom = this._open_to(obj);
+				}
+				if(dom && dom.length) {
+					dom.children('.jstree-anchor').addClass('jstree-clicked');
+				}
+				/**
+				 * triggered when an node is selected
+				 * @event
+				 * @name select_node.jstree
+				 * @param {Object} node
+				 * @param {Array} selected the current selection
+				 * @param {Object} event the event (if any) that triggered this select_node
+				 */
+				this.trigger('select_node', { 'node' : obj, 'selected' : this._data.core.selected, 'event' : e });
+				if(!supress_event) {
+					/**
+					 * triggered when selection changes
+					 * @event
+					 * @name changed.jstree
+					 * @param {Object} node
+					 * @param {Object} action the action that caused the selection to change
+					 * @param {Array} selected the current selection
+					 * @param {Object} event the event (if any) that triggered this changed event
+					 */
+					this.trigger('changed', { 'action' : 'select_node', 'node' : obj, 'selected' : this._data.core.selected, 'event' : e });
+				}
+			}
+		},
+		/**
+		 * deselect a node
+		 * @name deselect_node(obj [, supress_event])
+		 * @param {mixed} obj an array can be used to deselect multiple nodes
+		 * @param {Boolean} supress_event if set to `true` the `changed.jstree` event won't be triggered
+		 * @trigger deselect_node.jstree, changed.jstree
+		 */
+		deselect_node : function (obj, supress_event, e) {
+			var t1, t2, dom;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.deselect_node(obj[t1], supress_event, e);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			dom = this.get_node(obj, true);
+			if(obj.state.selected) {
+				obj.state.selected = false;
+				this._data.core.selected = $.vakata.array_remove_item(this._data.core.selected, obj.id);
+				if(dom.length) {
+					dom.children('.jstree-anchor').removeClass('jstree-clicked');
+				}
+				/**
+				 * triggered when an node is deselected
+				 * @event
+				 * @name deselect_node.jstree
+				 * @param {Object} node
+				 * @param {Array} selected the current selection
+				 * @param {Object} event the event (if any) that triggered this deselect_node
+				 */
+				this.trigger('deselect_node', { 'node' : obj, 'selected' : this._data.core.selected, 'event' : e });
+				if(!supress_event) {
+					this.trigger('changed', { 'action' : 'deselect_node', 'node' : obj, 'selected' : this._data.core.selected, 'event' : e });
+				}
+			}
+		},
+		/**
+		 * select all nodes in the tree
+		 * @name select_all([supress_event])
+		 * @param {Boolean} supress_event if set to `true` the `changed.jstree` event won't be triggered
+		 * @trigger select_all.jstree, changed.jstree
+		 */
+		select_all : function (supress_event) {
+			var tmp = this._data.core.selected.concat([]), i, j;
+			this._data.core.selected = this._model.data['#'].children_d.concat();
+			for(i = 0, j = this._data.core.selected.length; i < j; i++) {
+				if(this._model.data[this._data.core.selected[i]]) {
+					this._model.data[this._data.core.selected[i]].state.selected = true;
+				}
+			}
+			this.redraw(true);
+			/**
+			 * triggered when all nodes are selected
+			 * @event
+			 * @name select_all.jstree
+			 * @param {Array} selected the current selection
+			 */
+			this.trigger('select_all', { 'selected' : this._data.core.selected });
+			if(!supress_event) {
+				this.trigger('changed', { 'action' : 'select_all', 'selected' : this._data.core.selected, 'old_selection' : tmp });
+			}
+		},
+		/**
+		 * deselect all selected nodes
+		 * @name deselect_all([supress_event])
+		 * @param {Boolean} supress_event if set to `true` the `changed.jstree` event won't be triggered
+		 * @trigger deselect_all.jstree, changed.jstree
+		 */
+		deselect_all : function (supress_event) {
+			var tmp = this._data.core.selected.concat([]), i, j;
+			for(i = 0, j = this._data.core.selected.length; i < j; i++) {
+				if(this._model.data[this._data.core.selected[i]]) {
+					this._model.data[this._data.core.selected[i]].state.selected = false;
+				}
+			}
+			this._data.core.selected = [];
+			this.element.find('.jstree-clicked').removeClass('jstree-clicked');
+			/**
+			 * triggered when all nodes are deselected
+			 * @event
+			 * @name deselect_all.jstree
+			 * @param {Object} node the previous selection
+			 * @param {Array} selected the current selection
+			 */
+			this.trigger('deselect_all', { 'selected' : this._data.core.selected, 'node' : tmp });
+			if(!supress_event) {
+				this.trigger('changed', { 'action' : 'deselect_all', 'selected' : this._data.core.selected, 'old_selection' : tmp });
+			}
+		},
+		/**
+		 * checks if a node is selected
+		 * @name is_selected(obj)
+		 * @param  {mixed}  obj
+		 * @return {Boolean}
+		 */
+		is_selected : function (obj) {
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			return obj.state.selected;
+		},
+		/**
+		 * get an array of all selected nodes
+		 * @name get_selected([full])
+		 * @param  {mixed}  full if set to `true` the returned array will consist of the full node objects, otherwise - only IDs will be returned
+		 * @return {Array}
+		 */
+		get_selected : function (full) {
+			return full ? $.map(this._data.core.selected, $.proxy(function (i) { return this.get_node(i); }, this)) : this._data.core.selected.slice();
+		},
+		/**
+		 * get an array of all top level selected nodes (ignoring children of selected nodes)
+		 * @name get_top_selected([full])
+		 * @param  {mixed}  full if set to `true` the returned array will consist of the full node objects, otherwise - only IDs will be returned
+		 * @return {Array}
+		 */
+		get_top_selected : function (full) {
+			var tmp = this.get_selected(true),
+				obj = {}, i, j, k, l;
+			for(i = 0, j = tmp.length; i < j; i++) {
+				obj[tmp[i].id] = tmp[i];
+			}
+			for(i = 0, j = tmp.length; i < j; i++) {
+				for(k = 0, l = tmp[i].children_d.length; k < l; k++) {
+					if(obj[tmp[i].children_d[k]]) {
+						delete obj[tmp[i].children_d[k]];
+					}
+				}
+			}
+			tmp = [];
+			for(i in obj) {
+				if(obj.hasOwnProperty(i)) {
+					tmp.push(i);
+				}
+			}
+			return full ? $.map(tmp, $.proxy(function (i) { return this.get_node(i); }, this)) : tmp;
+		},
+		/**
+		 * get an array of all bottom level selected nodes (ignoring selected parents)
+		 * @name get_bottom_selected([full])
+		 * @param  {mixed}  full if set to `true` the returned array will consist of the full node objects, otherwise - only IDs will be returned
+		 * @return {Array}
+		 */
+		get_bottom_selected : function (full) {
+			var tmp = this.get_selected(true),
+				obj = [], i, j;
+			for(i = 0, j = tmp.length; i < j; i++) {
+				if(!tmp[i].children.length) {
+					obj.push(tmp[i].id);
+				}
+			}
+			return full ? $.map(obj, $.proxy(function (i) { return this.get_node(i); }, this)) : obj;
+		},
+		/**
+		 * gets the current state of the tree so that it can be restored later with `set_state(state)`. Used internally.
+		 * @name get_state()
+		 * @private
+		 * @return {Object}
+		 */
+		get_state : function () {
+			var state	= {
+				'core' : {
+					'open' : [],
+					'scroll' : {
+						'left' : this.element.scrollLeft(),
+						'top' : this.element.scrollTop()
+					},
+					/*!
+					'themes' : {
+						'name' : this.get_theme(),
+						'icons' : this._data.core.themes.icons,
+						'dots' : this._data.core.themes.dots
+					},
+					*/
+					'selected' : []
+				}
+			}, i;
+			for(i in this._model.data) {
+				if(this._model.data.hasOwnProperty(i)) {
+					if(i !== '#') {
+						if(this._model.data[i].state.opened) {
+							state.core.open.push(i);
+						}
+						if(this._model.data[i].state.selected) {
+							state.core.selected.push(i);
+						}
+					}
+				}
+			}
+			return state;
+		},
+		/**
+		 * sets the state of the tree. Used internally.
+		 * @name set_state(state [, callback])
+		 * @private
+		 * @param {Object} state the state to restore
+		 * @param {Function} callback an optional function to execute once the state is restored.
+		 * @trigger set_state.jstree
+		 */
+		set_state : function (state, callback) {
+			if(state) {
+				if(state.core) {
+					var res, n, t, _this;
+					if(state.core.open) {
+						if(!$.isArray(state.core.open)) {
+							delete state.core.open;
+							this.set_state(state, callback);
+							return false;
+						}
+						res = true;
+						n = false;
+						t = this;
+						$.each(state.core.open.concat([]), function (i, v) {
+							n = t.get_node(v);
+							if(n) {
+								if(t.is_loaded(v)) {
+									if(t.is_closed(v)) {
+										t.open_node(v, false, 0);
+									}
+									if(state && state.core && state.core.open) {
+										$.vakata.array_remove_item(state.core.open, v);
+									}
+								}
+								else {
+									if(!t.is_loading(v)) {
+										t.open_node(v, $.proxy(function (o, s) {
+											if(!s && state && state.core && state.core.open) {
+												$.vakata.array_remove_item(state.core.open, o.id);
+											}
+											this.set_state(state, callback);
+										}, t), 0);
+									}
+									// there will be some async activity - so wait for it
+									res = false;
+								}
+							}
+						});
+						if(res) {
+							delete state.core.open;
+							this.set_state(state, callback);
+						}
+						return false;
+					}
+					if(state.core.scroll) {
+						if(state.core.scroll && state.core.scroll.left !== undefined) {
+							this.element.scrollLeft(state.core.scroll.left);
+						}
+						if(state.core.scroll && state.core.scroll.top !== undefined) {
+							this.element.scrollTop(state.core.scroll.top);
+						}
+						delete state.core.scroll;
+						this.set_state(state, callback);
+						return false;
+					}
+					/*!
+					if(state.core.themes) {
+						if(state.core.themes.name) {
+							this.set_theme(state.core.themes.name);
+						}
+						if(typeof state.core.themes.dots !== 'undefined') {
+							this[ state.core.themes.dots ? "show_dots" : "hide_dots" ]();
+						}
+						if(typeof state.core.themes.icons !== 'undefined') {
+							this[ state.core.themes.icons ? "show_icons" : "hide_icons" ]();
+						}
+						delete state.core.themes;
+						delete state.core.open;
+						this.set_state(state, callback);
+						return false;
+					}
+					*/
+					if(state.core.selected) {
+						_this = this;
+						this.deselect_all();
+						$.each(state.core.selected, function (i, v) {
+							_this.select_node(v);
+						});
+						delete state.core.selected;
+						this.set_state(state, callback);
+						return false;
+					}
+					if($.isEmptyObject(state.core)) {
+						delete state.core;
+						this.set_state(state, callback);
+						return false;
+					}
+				}
+				if($.isEmptyObject(state)) {
+					state = null;
+					if(callback) { callback.call(this); }
+					/**
+					 * triggered when a `set_state` call completes
+					 * @event
+					 * @name set_state.jstree
+					 */
+					this.trigger('set_state');
+					return false;
+				}
+				return true;
+			}
+			return false;
+		},
+		/**
+		 * refreshes the tree - all nodes are reloaded with calls to `load_node`.
+		 * @name refresh()
+		 * @param {Boolean} skip_loading an option to skip showing the loading indicator
+		 * @param {Mixed} forget_state if set to `true` state will not be reapplied, if set to a function (receiving the current state as argument) the result of that function will be used as state
+		 * @trigger refresh.jstree
+		 */
+		refresh : function (skip_loading, forget_state) {
+			this._data.core.state = forget_state === true ? {} : this.get_state();
+			if(forget_state && $.isFunction(forget_state)) { this._data.core.state = forget_state.call(this, this._data.core.state); }
+			this._cnt = 0;
+			this._model.data = {
+				'#' : {
+					id : '#',
+					parent : null,
+					parents : [],
+					children : [],
+					children_d : [],
+					state : { loaded : false }
+				}
+			};
+			var c = this.get_container_ul()[0].className;
+			if(!skip_loading) {
+				this.element.html("<"+"ul class='"+c+"'><"+"li class='jstree-initial-node jstree-loading jstree-leaf jstree-last'><i class='jstree-icon jstree-ocl'></i><"+"a class='jstree-anchor' href='#'><i class='jstree-icon jstree-themeicon-hidden'></i>" + this.get_string("Loading ...") + "</a></li></ul>");
+			}
+			this.load_node('#', function (o, s) {
+				if(s) {
+					this.get_container_ul()[0].className = c;
+					this.set_state($.extend(true, {}, this._data.core.state), function () {
+						/**
+						 * triggered when a `refresh` call completes
+						 * @event
+						 * @name refresh.jstree
+						 */
+						this.trigger('refresh');
+					});
+				}
+				this._data.core.state = null;
+			});
+		},
+		/**
+		 * refreshes a node in the tree (reload its children) all opened nodes inside that node are reloaded with calls to `load_node`.
+		 * @name refresh_node(obj)
+		 * @param  {mixed} obj the node
+		 * @trigger refresh_node.jstree
+		 */
+		refresh_node : function (obj) {
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			var opened = [], to_load = [], s = this._data.core.selected.concat([]);
+			to_load.push(obj.id);
+			if(obj.state.opened === true) { opened.push(obj.id); }
+			this.get_node(obj, true).find('.jstree-open').each(function() { opened.push(this.id); });
+			this._load_nodes(to_load, $.proxy(function (nodes) {
+				this.open_node(opened, false, 0);
+				this.select_node(this._data.core.selected);
+				/**
+				 * triggered when a node is refreshed
+				 * @event
+				 * @name refresh_node.jstree
+				 * @param {Object} node - the refreshed node
+				 * @param {Array} nodes - an array of the IDs of the nodes that were reloaded
+				 */
+				this.trigger('refresh_node', { 'node' : obj, 'nodes' : nodes });
+			}, this));
+		},
+		/**
+		 * set (change) the ID of a node
+		 * @name set_id(obj, id)
+		 * @param  {mixed} obj the node
+		 * @param  {String} id the new ID
+		 * @return {Boolean}
+		 */
+		set_id : function (obj, id) {
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			var i, j, m = this._model.data;
+			id = id.toString();
+			// update parents (replace current ID with new one in children and children_d)
+			m[obj.parent].children[$.inArray(obj.id, m[obj.parent].children)] = id;
+			for(i = 0, j = obj.parents.length; i < j; i++) {
+				m[obj.parents[i]].children_d[$.inArray(obj.id, m[obj.parents[i]].children_d)] = id;
+			}
+			// update children (replace current ID with new one in parent and parents)
+			for(i = 0, j = obj.children.length; i < j; i++) {
+				m[obj.children[i]].parent = id;
+			}
+			for(i = 0, j = obj.children_d.length; i < j; i++) {
+				m[obj.children_d[i]].parents[$.inArray(obj.id, m[obj.children_d[i]].parents)] = id;
+			}
+			i = $.inArray(obj.id, this._data.core.selected);
+			if(i !== -1) { this._data.core.selected[i] = id; }
+			// update model and obj itself (obj.id, this._model.data[KEY])
+			i = this.get_node(obj.id, true);
+			if(i) {
+				i.attr('id', id);
+			}
+			delete m[obj.id];
+			obj.id = id;
+			m[id] = obj;
+			return true;
+		},
+		/**
+		 * get the text value of a node
+		 * @name get_text(obj)
+		 * @param  {mixed} obj the node
+		 * @return {String}
+		 */
+		get_text : function (obj) {
+			obj = this.get_node(obj);
+			return (!obj || obj.id === '#') ? false : obj.text;
+		},
+		/**
+		 * set the text value of a node. Used internally, please use `rename_node(obj, val)`.
+		 * @private
+		 * @name set_text(obj, val)
+		 * @param  {mixed} obj the node, you can pass an array to set the text on multiple nodes
+		 * @param  {String} val the new text value
+		 * @return {Boolean}
+		 * @trigger set_text.jstree
+		 */
+		set_text : function (obj, val) {
+			var t1, t2;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.set_text(obj[t1], val);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			obj.text = val;
+			if(this.get_node(obj, true).length) {
+				this.redraw_node(obj.id);
+			}
+			/**
+			 * triggered when a node text value is changed
+			 * @event
+			 * @name set_text.jstree
+			 * @param {Object} obj
+			 * @param {String} text the new value
+			 */
+			this.trigger('set_text',{ "obj" : obj, "text" : val });
+			return true;
+		},
+		/**
+		 * gets a JSON representation of a node (or the whole tree)
+		 * @name get_json([obj, options])
+		 * @param  {mixed} obj
+		 * @param  {Object} options
+		 * @param  {Boolean} options.no_state do not return state information
+		 * @param  {Boolean} options.no_id do not return ID
+		 * @param  {Boolean} options.no_children do not include children
+		 * @param  {Boolean} options.no_data do not include node data
+		 * @param  {Boolean} options.flat return flat JSON instead of nested
+		 * @return {Object}
+		 */
+		get_json : function (obj, options, flat) {
+			obj = this.get_node(obj || '#');
+			if(!obj) { return false; }
+			if(options && options.flat && !flat) { flat = []; }
+			var tmp = {
+				'id' : obj.id,
+				'text' : obj.text,
+				'icon' : this.get_icon(obj),
+				'li_attr' : obj.li_attr,
+				'a_attr' : obj.a_attr,
+				'state' : {},
+				'data' : options && options.no_data ? false : obj.data
+				//( this.get_node(obj, true).length ? this.get_node(obj, true).data() : obj.data ),
+			}, i, j;
+			if(options && options.flat) {
+				tmp.parent = obj.parent;
+			}
+			else {
+				tmp.children = [];
+			}
+			if(!options || !options.no_state) {
+				for(i in obj.state) {
+					if(obj.state.hasOwnProperty(i)) {
+						tmp.state[i] = obj.state[i];
+					}
+				}
+			}
+			if(options && options.no_id) {
+				delete tmp.id;
+				if(tmp.li_attr && tmp.li_attr.id) {
+					delete tmp.li_attr.id;
+				}
+			}
+			if(options && options.flat && obj.id !== '#') {
+				flat.push(tmp);
+			}
+			if(!options || !options.no_children) {
+				for(i = 0, j = obj.children.length; i < j; i++) {
+					if(options && options.flat) {
+						this.get_json(obj.children[i], options, flat);
+					}
+					else {
+						tmp.children.push(this.get_json(obj.children[i], options));
+					}
+				}
+			}
+			return options && options.flat ? flat : (obj.id === '#' ? tmp.children : tmp);
+		},
+		/**
+		 * create a new node (do not confuse with load_node)
+		 * @name create_node([obj, node, pos, callback, is_loaded])
+		 * @param  {mixed}   par       the parent node (to create a root node use either "#" (string) or `null`)
+		 * @param  {mixed}   node      the data for the new node (a valid JSON object, or a simple string with the name)
+		 * @param  {mixed}   pos       the index at which to insert the node, "first" and "last" are also supported, default is "last"
+		 * @param  {Function} callback a function to be called once the node is created
+		 * @param  {Boolean} is_loaded internal argument indicating if the parent node was succesfully loaded
+		 * @return {String}            the ID of the newly create node
+		 * @trigger model.jstree, create_node.jstree
+		 */
+		create_node : function (par, node, pos, callback, is_loaded) {
+			if(par === null) { par = "#"; }
+			par = this.get_node(par);
+			if(!par) { return false; }
+			pos = pos === undefined ? "last" : pos;
+			if(!pos.toString().match(/^(before|after)$/) && !is_loaded && !this.is_loaded(par)) {
+				return this.load_node(par, function () { this.create_node(par, node, pos, callback, true); });
+			}
+			if(!node) { node = { "text" : this.get_string('New node') }; }
+			if(node.text === undefined) { node.text = this.get_string('New node'); }
+			var tmp, dpc, i, j;
+
+			if(par.id === '#') {
+				if(pos === "before") { pos = "first"; }
+				if(pos === "after") { pos = "last"; }
+			}
+			switch(pos) {
+				case "before":
+					tmp = this.get_node(par.parent);
+					pos = $.inArray(par.id, tmp.children);
+					par = tmp;
+					break;
+				case "after" :
+					tmp = this.get_node(par.parent);
+					pos = $.inArray(par.id, tmp.children) + 1;
+					par = tmp;
+					break;
+				case "inside":
+				case "first":
+					pos = 0;
+					break;
+				case "last":
+					pos = par.children.length;
+					break;
+				default:
+					if(!pos) { pos = 0; }
+					break;
+			}
+			if(pos > par.children.length) { pos = par.children.length; }
+			if(!node.id) { node.id = true; }
+			if(!this.check("create_node", node, par, pos)) {
+				this.settings.core.error.call(this, this._data.core.last_error);
+				return false;
+			}
+			if(node.id === true) { delete node.id; }
+			node = this._parse_model_from_json(node, par.id, par.parents.concat());
+			if(!node) { return false; }
+			tmp = this.get_node(node);
+			dpc = [];
+			dpc.push(node);
+			dpc = dpc.concat(tmp.children_d);
+			this.trigger('model', { "nodes" : dpc, "parent" : par.id });
+
+			par.children_d = par.children_d.concat(dpc);
+			for(i = 0, j = par.parents.length; i < j; i++) {
+				this._model.data[par.parents[i]].children_d = this._model.data[par.parents[i]].children_d.concat(dpc);
+			}
+			node = tmp;
+			tmp = [];
+			for(i = 0, j = par.children.length; i < j; i++) {
+				tmp[i >= pos ? i+1 : i] = par.children[i];
+			}
+			tmp[pos] = node.id;
+			par.children = tmp;
+
+			this.redraw_node(par, true);
+			if(callback) { callback.call(this, this.get_node(node)); }
+			/**
+			 * triggered when a node is created
+			 * @event
+			 * @name create_node.jstree
+			 * @param {Object} node
+			 * @param {String} parent the parent's ID
+			 * @param {Number} position the position of the new node among the parent's children
+			 */
+			this.trigger('create_node', { "node" : this.get_node(node), "parent" : par.id, "position" : pos });
+			return node.id;
+		},
+		/**
+		 * set the text value of a node
+		 * @name rename_node(obj, val)
+		 * @param  {mixed} obj the node, you can pass an array to rename multiple nodes to the same name
+		 * @param  {String} val the new text value
+		 * @return {Boolean}
+		 * @trigger rename_node.jstree
+		 */
+		rename_node : function (obj, val) {
+			var t1, t2, old;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.rename_node(obj[t1], val);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			old = obj.text;
+			if(!this.check("rename_node", obj, this.get_parent(obj), val)) {
+				this.settings.core.error.call(this, this._data.core.last_error);
+				return false;
+			}
+			this.set_text(obj, val); // .apply(this, Array.prototype.slice.call(arguments))
+			/**
+			 * triggered when a node is renamed
+			 * @event
+			 * @name rename_node.jstree
+			 * @param {Object} node
+			 * @param {String} text the new value
+			 * @param {String} old the old value
+			 */
+			this.trigger('rename_node', { "node" : obj, "text" : val, "old" : old });
+			return true;
+		},
+		/**
+		 * remove a node
+		 * @name delete_node(obj)
+		 * @param  {mixed} obj the node, you can pass an array to delete multiple nodes
+		 * @return {Boolean}
+		 * @trigger delete_node.jstree, changed.jstree
+		 */
+		delete_node : function (obj) {
+			var t1, t2, par, pos, tmp, i, j, k, l, c;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.delete_node(obj[t1]);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			par = this.get_node(obj.parent);
+			pos = $.inArray(obj.id, par.children);
+			c = false;
+			if(!this.check("delete_node", obj, par, pos)) {
+				this.settings.core.error.call(this, this._data.core.last_error);
+				return false;
+			}
+			if(pos !== -1) {
+				par.children = $.vakata.array_remove(par.children, pos);
+			}
+			tmp = obj.children_d.concat([]);
+			tmp.push(obj.id);
+			for(k = 0, l = tmp.length; k < l; k++) {
+				for(i = 0, j = obj.parents.length; i < j; i++) {
+					pos = $.inArray(tmp[k], this._model.data[obj.parents[i]].children_d);
+					if(pos !== -1) {
+						this._model.data[obj.parents[i]].children_d = $.vakata.array_remove(this._model.data[obj.parents[i]].children_d, pos);
+					}
+				}
+				if(this._model.data[tmp[k]].state.selected) {
+					c = true;
+					pos = $.inArray(tmp[k], this._data.core.selected);
+					if(pos !== -1) {
+						this._data.core.selected = $.vakata.array_remove(this._data.core.selected, pos);
+					}
+				}
+			}
+			/**
+			 * triggered when a node is deleted
+			 * @event
+			 * @name delete_node.jstree
+			 * @param {Object} node
+			 * @param {String} parent the parent's ID
+			 */
+			this.trigger('delete_node', { "node" : obj, "parent" : par.id });
+			if(c) {
+				this.trigger('changed', { 'action' : 'delete_node', 'node' : obj, 'selected' : this._data.core.selected, 'parent' : par.id });
+			}
+			for(k = 0, l = tmp.length; k < l; k++) {
+				delete this._model.data[tmp[k]];
+			}
+			this.redraw_node(par, true);
+			return true;
+		},
+		/**
+		 * check if an operation is premitted on the tree. Used internally.
+		 * @private
+		 * @name check(chk, obj, par, pos)
+		 * @param  {String} chk the operation to check, can be "create_node", "rename_node", "delete_node", "copy_node" or "move_node"
+		 * @param  {mixed} obj the node
+		 * @param  {mixed} par the parent
+		 * @param  {mixed} pos the position to insert at, or if "rename_node" - the new name
+		 * @param  {mixed} more some various additional information, for example if a "move_node" operations is triggered by DND this will be the hovered node
+		 * @return {Boolean}
+		 */
+		check : function (chk, obj, par, pos, more) {
+			obj = obj && obj.id ? obj : this.get_node(obj);
+			par = par && par.id ? par : this.get_node(par);
+			var tmp = chk.match(/^move_node|copy_node|create_node$/i) ? par : obj,
+				chc = this.settings.core.check_callback;
+			if(chk === "move_node" || chk === "copy_node") {
+				if((!more || !more.is_multi) && (obj.id === par.id || $.inArray(obj.id, par.children) === pos || $.inArray(par.id, obj.children_d) !== -1)) {
+					this._data.core.last_error = { 'error' : 'check', 'plugin' : 'core', 'id' : 'core_01', 'reason' : 'Moving parent inside child', 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+					return false;
+				}
+			}
+			if(tmp && tmp.data) { tmp = tmp.data; }
+			if(tmp && tmp.functions && (tmp.functions[chk] === false || tmp.functions[chk] === true)) {
+				if(tmp.functions[chk] === false) {
+					this._data.core.last_error = { 'error' : 'check', 'plugin' : 'core', 'id' : 'core_02', 'reason' : 'Node data prevents function: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+				}
+				return tmp.functions[chk];
+			}
+			if(chc === false || ($.isFunction(chc) && chc.call(this, chk, obj, par, pos, more) === false) || (chc && chc[chk] === false)) {
+				this._data.core.last_error = { 'error' : 'check', 'plugin' : 'core', 'id' : 'core_03', 'reason' : 'User config for core.check_callback prevents function: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+				return false;
+			}
+			return true;
+		},
+		/**
+		 * get the last error
+		 * @name last_error()
+		 * @return {Object}
+		 */
+		last_error : function () {
+			return this._data.core.last_error;
+		},
+		/**
+		 * move a node to a new parent
+		 * @name move_node(obj, par [, pos, callback, is_loaded])
+		 * @param  {mixed} obj the node to move, pass an array to move multiple nodes
+		 * @param  {mixed} par the new parent
+		 * @param  {mixed} pos the position to insert at (besides integer values, "first" and "last" are supported, as well as "before" and "after"), defaults to integer `0`
+		 * @param  {function} callback a function to call once the move is completed, receives 3 arguments - the node, the new parent and the position
+		 * @param  {Boolean} internal parameter indicating if the parent node has been loaded
+		 * @trigger move_node.jstree
+		 */
+		move_node : function (obj, par, pos, callback, is_loaded) {
+			var t1, t2, old_par, old_pos, new_par, old_ins, is_multi, dpc, tmp, i, j, k, l, p;
+
+			par = this.get_node(par);
+			pos = pos === undefined ? 0 : pos;
+			if(!par) { return false; }
+			if(!pos.toString().match(/^(before|after)$/) && !is_loaded && !this.is_loaded(par)) {
+				return this.load_node(par, function () { this.move_node(obj, par, pos, callback, true); });
+			}
+
+			if($.isArray(obj)) {
+				obj = obj.reverse().slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.move_node(obj[t1], par, pos, callback, is_loaded);
+				}
+				return true;
+			}
+			obj = obj && obj.id ? obj : this.get_node(obj);
+
+			if(!obj || obj.id === '#') { return false; }
+
+			old_par = (obj.parent || '#').toString();
+			new_par = (!pos.toString().match(/^(before|after)$/) || par.id === '#') ? par : this.get_node(par.parent);
+			old_ins = obj.instance ? obj.instance : (this._model.data[obj.id] ? this : $.jstree.reference(obj.id));
+			is_multi = !old_ins || !old_ins._id || (this._id !== old_ins._id);
+			old_pos = old_ins && old_ins._id && old_par && old_ins._model.data[old_par] && old_ins._model.data[old_par].children ? $.inArray(obj.id, old_ins._model.data[old_par].children) : -1;
+			if(is_multi) {
+				if(this.copy_node(obj, par, pos, callback, is_loaded)) {
+					if(old_ins) { old_ins.delete_node(obj); }
+					return true;
+				}
+				return false;
+			}
+			//var m = this._model.data;
+			if(new_par.id === '#') {
+				if(pos === "before") { pos = "first"; }
+				if(pos === "after") { pos = "last"; }
+			}
+			switch(pos) {
+				case "before":
+					pos = $.inArray(par.id, new_par.children);
+					break;
+				case "after" :
+					pos = $.inArray(par.id, new_par.children) + 1;
+					break;
+				case "inside":
+				case "first":
+					pos = 0;
+					break;
+				case "last":
+					pos = new_par.children.length;
+					break;
+				default:
+					if(!pos) { pos = 0; }
+					break;
+			}
+			if(pos > new_par.children.length) { pos = new_par.children.length; }
+			if(!this.check("move_node", obj, new_par, pos, { 'core' : true, 'is_multi' : (old_ins && old_ins._id && old_ins._id !== this._id), 'is_foreign' : (!old_ins || !old_ins._id) })) {
+				this.settings.core.error.call(this, this._data.core.last_error);
+				return false;
+			}
+			if(obj.parent === new_par.id) {
+				dpc = new_par.children.concat();
+				tmp = $.inArray(obj.id, dpc);
+				if(tmp !== -1) {
+					dpc = $.vakata.array_remove(dpc, tmp);
+					if(pos > tmp) { pos--; }
+				}
+				tmp = [];
+				for(i = 0, j = dpc.length; i < j; i++) {
+					tmp[i >= pos ? i+1 : i] = dpc[i];
+				}
+				tmp[pos] = obj.id;
+				new_par.children = tmp;
+				this._node_changed(new_par.id);
+				this.redraw(new_par.id === '#');
+			}
+			else {
+				// clean old parent and up
+				tmp = obj.children_d.concat();
+				tmp.push(obj.id);
+				for(i = 0, j = obj.parents.length; i < j; i++) {
+					dpc = [];
+					p = old_ins._model.data[obj.parents[i]].children_d;
+					for(k = 0, l = p.length; k < l; k++) {
+						if($.inArray(p[k], tmp) === -1) {
+							dpc.push(p[k]);
+						}
+					}
+					old_ins._model.data[obj.parents[i]].children_d = dpc;
+				}
+				old_ins._model.data[old_par].children = $.vakata.array_remove_item(old_ins._model.data[old_par].children, obj.id);
+
+				// insert into new parent and up
+				for(i = 0, j = new_par.parents.length; i < j; i++) {
+					this._model.data[new_par.parents[i]].children_d = this._model.data[new_par.parents[i]].children_d.concat(tmp);
+				}
+				dpc = [];
+				for(i = 0, j = new_par.children.length; i < j; i++) {
+					dpc[i >= pos ? i+1 : i] = new_par.children[i];
+				}
+				dpc[pos] = obj.id;
+				new_par.children = dpc;
+				new_par.children_d.push(obj.id);
+				new_par.children_d = new_par.children_d.concat(obj.children_d);
+
+				// update object
+				obj.parent = new_par.id;
+				tmp = new_par.parents.concat();
+				tmp.unshift(new_par.id);
+				p = obj.parents.length;
+				obj.parents = tmp;
+
+				// update object children
+				tmp = tmp.concat();
+				for(i = 0, j = obj.children_d.length; i < j; i++) {
+					this._model.data[obj.children_d[i]].parents = this._model.data[obj.children_d[i]].parents.slice(0,p*-1);
+					Array.prototype.push.apply(this._model.data[obj.children_d[i]].parents, tmp);
+				}
+
+				this._node_changed(old_par);
+				this._node_changed(new_par.id);
+				this.redraw(old_par === '#' || new_par.id === '#');
+			}
+			if(callback) { callback.call(this, obj, new_par, pos); }
+			/**
+			 * triggered when a node is moved
+			 * @event
+			 * @name move_node.jstree
+			 * @param {Object} node
+			 * @param {String} parent the parent's ID
+			 * @param {Number} position the position of the node among the parent's children
+			 * @param {String} old_parent the old parent of the node
+			 * @param {Number} old_position the old position of the node
+			 * @param {Boolean} is_multi do the node and new parent belong to different instances
+			 * @param {jsTree} old_instance the instance the node came from
+			 * @param {jsTree} new_instance the instance of the new parent
+			 */
+			this.trigger('move_node', { "node" : obj, "parent" : new_par.id, "position" : pos, "old_parent" : old_par, "old_position" : old_pos, 'is_multi' : (old_ins && old_ins._id && old_ins._id !== this._id), 'is_foreign' : (!old_ins || !old_ins._id), 'old_instance' : old_ins, 'new_instance' : this });
+			return true;
+		},
+		/**
+		 * copy a node to a new parent
+		 * @name copy_node(obj, par [, pos, callback, is_loaded])
+		 * @param  {mixed} obj the node to copy, pass an array to copy multiple nodes
+		 * @param  {mixed} par the new parent
+		 * @param  {mixed} pos the position to insert at (besides integer values, "first" and "last" are supported, as well as "before" and "after"), defaults to integer `0`
+		 * @param  {function} callback a function to call once the move is completed, receives 3 arguments - the node, the new parent and the position
+		 * @param  {Boolean} internal parameter indicating if the parent node has been loaded
+		 * @trigger model.jstree copy_node.jstree
+		 */
+		copy_node : function (obj, par, pos, callback, is_loaded) {
+			var t1, t2, dpc, tmp, i, j, node, old_par, new_par, old_ins, is_multi;
+
+			par = this.get_node(par);
+			pos = pos === undefined ? 0 : pos;
+			if(!par) { return false; }
+			if(!pos.toString().match(/^(before|after)$/) && !is_loaded && !this.is_loaded(par)) {
+				return this.load_node(par, function () { this.copy_node(obj, par, pos, callback, true); });
+			}
+
+			if($.isArray(obj)) {
+				obj = obj.reverse().slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.copy_node(obj[t1], par, pos, callback, is_loaded);
+				}
+				return true;
+			}
+			obj = obj && obj.id ? obj : this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+
+			old_par = (obj.parent || '#').toString();
+			new_par = (!pos.toString().match(/^(before|after)$/) || par.id === '#') ? par : this.get_node(par.parent);
+			old_ins = obj.instance ? obj.instance : (this._model.data[obj.id] ? this : $.jstree.reference(obj.id));
+			is_multi = !old_ins || !old_ins._id || (this._id !== old_ins._id);
+			if(new_par.id === '#') {
+				if(pos === "before") { pos = "first"; }
+				if(pos === "after") { pos = "last"; }
+			}
+			switch(pos) {
+				case "before":
+					pos = $.inArray(par.id, new_par.children);
+					break;
+				case "after" :
+					pos = $.inArray(par.id, new_par.children) + 1;
+					break;
+				case "inside":
+				case "first":
+					pos = 0;
+					break;
+				case "last":
+					pos = new_par.children.length;
+					break;
+				default:
+					if(!pos) { pos = 0; }
+					break;
+			}
+			if(pos > new_par.children.length) { pos = new_par.children.length; }
+			if(!this.check("copy_node", obj, new_par, pos, { 'core' : true, 'is_multi' : (old_ins && old_ins._id && old_ins._id !== this._id), 'is_foreign' : (!old_ins || !old_ins._id) })) {
+				this.settings.core.error.call(this, this._data.core.last_error);
+				return false;
+			}
+			node = old_ins ? old_ins.get_json(obj, { no_id : true, no_data : true, no_state : true }) : obj;
+			if(!node) { return false; }
+			if(node.id === true) { delete node.id; }
+			node = this._parse_model_from_json(node, new_par.id, new_par.parents.concat());
+			if(!node) { return false; }
+			tmp = this.get_node(node);
+			if(obj && obj.state && obj.state.loaded === false) { tmp.state.loaded = false; }
+			dpc = [];
+			dpc.push(node);
+			dpc = dpc.concat(tmp.children_d);
+			this.trigger('model', { "nodes" : dpc, "parent" : new_par.id });
+
+			// insert into new parent and up
+			for(i = 0, j = new_par.parents.length; i < j; i++) {
+				this._model.data[new_par.parents[i]].children_d = this._model.data[new_par.parents[i]].children_d.concat(dpc);
+			}
+			dpc = [];
+			for(i = 0, j = new_par.children.length; i < j; i++) {
+				dpc[i >= pos ? i+1 : i] = new_par.children[i];
+			}
+			dpc[pos] = tmp.id;
+			new_par.children = dpc;
+			new_par.children_d.push(tmp.id);
+			new_par.children_d = new_par.children_d.concat(tmp.children_d);
+
+			this._node_changed(new_par.id);
+			this.redraw(new_par.id === '#');
+			if(callback) { callback.call(this, tmp, new_par, pos); }
+			/**
+			 * triggered when a node is copied
+			 * @event
+			 * @name copy_node.jstree
+			 * @param {Object} node the copied node
+			 * @param {Object} original the original node
+			 * @param {String} parent the parent's ID
+			 * @param {Number} position the position of the node among the parent's children
+			 * @param {String} old_parent the old parent of the node
+			 * @param {Number} old_position the position of the original node
+			 * @param {Boolean} is_multi do the node and new parent belong to different instances
+			 * @param {jsTree} old_instance the instance the node came from
+			 * @param {jsTree} new_instance the instance of the new parent
+			 */
+			this.trigger('copy_node', { "node" : tmp, "original" : obj, "parent" : new_par.id, "position" : pos, "old_parent" : old_par, "old_position" : old_ins && old_ins._id && old_par && old_ins._model.data[old_par] && old_ins._model.data[old_par].children ? $.inArray(obj.id, old_ins._model.data[old_par].children) : -1,'is_multi' : (old_ins && old_ins._id && old_ins._id !== this._id), 'is_foreign' : (!old_ins || !old_ins._id), 'old_instance' : old_ins, 'new_instance' : this });
+			return tmp.id;
+		},
+		/**
+		 * cut a node (a later call to `paste(obj)` would move the node)
+		 * @name cut(obj)
+		 * @param  {mixed} obj multiple objects can be passed using an array
+		 * @trigger cut.jstree
+		 */
+		cut : function (obj) {
+			if(!obj) { obj = this._data.core.selected.concat(); }
+			if(!$.isArray(obj)) { obj = [obj]; }
+			if(!obj.length) { return false; }
+			var tmp = [], o, t1, t2;
+			for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+				o = this.get_node(obj[t1]);
+				if(o && o.id && o.id !== '#') { tmp.push(o); }
+			}
+			if(!tmp.length) { return false; }
+			ccp_node = tmp;
+			ccp_inst = this;
+			ccp_mode = 'move_node';
+			/**
+			 * triggered when nodes are added to the buffer for moving
+			 * @event
+			 * @name cut.jstree
+			 * @param {Array} node
+			 */
+			this.trigger('cut', { "node" : obj });
+		},
+		/**
+		 * copy a node (a later call to `paste(obj)` would copy the node)
+		 * @name copy(obj)
+		 * @param  {mixed} obj multiple objects can be passed using an array
+		 * @trigger copy.jstre
+		 */
+		copy : function (obj) {
+			if(!obj) { obj = this._data.core.selected.concat(); }
+			if(!$.isArray(obj)) { obj = [obj]; }
+			if(!obj.length) { return false; }
+			var tmp = [], o, t1, t2;
+			for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+				o = this.get_node(obj[t1]);
+				if(o && o.id && o.id !== '#') { tmp.push(o); }
+			}
+			if(!tmp.length) { return false; }
+			ccp_node = tmp;
+			ccp_inst = this;
+			ccp_mode = 'copy_node';
+			/**
+			 * triggered when nodes are added to the buffer for copying
+			 * @event
+			 * @name copy.jstree
+			 * @param {Array} node
+			 */
+			this.trigger('copy', { "node" : obj });
+		},
+		/**
+		 * get the current buffer (any nodes that are waiting for a paste operation)
+		 * @name get_buffer()
+		 * @return {Object} an object consisting of `mode` ("copy_node" or "move_node"), `node` (an array of objects) and `inst` (the instance)
+		 */
+		get_buffer : function () {
+			return { 'mode' : ccp_mode, 'node' : ccp_node, 'inst' : ccp_inst };
+		},
+		/**
+		 * check if there is something in the buffer to paste
+		 * @name can_paste()
+		 * @return {Boolean}
+		 */
+		can_paste : function () {
+			return ccp_mode !== false && ccp_node !== false; // && ccp_inst._model.data[ccp_node];
+		},
+		/**
+		 * copy or move the previously cut or copied nodes to a new parent
+		 * @name paste(obj [, pos])
+		 * @param  {mixed} obj the new parent
+		 * @param  {mixed} pos the position to insert at (besides integer, "first" and "last" are supported), defaults to integer `0`
+		 * @trigger paste.jstree
+		 */
+		paste : function (obj, pos) {
+			obj = this.get_node(obj);
+			if(!obj || !ccp_mode || !ccp_mode.match(/^(copy_node|move_node)$/) || !ccp_node) { return false; }
+			if(this[ccp_mode](ccp_node, obj, pos)) {
+				/**
+				 * triggered when paste is invoked
+				 * @event
+				 * @name paste.jstree
+				 * @param {String} parent the ID of the receiving node
+				 * @param {Array} node the nodes in the buffer
+				 * @param {String} mode the performed operation - "copy_node" or "move_node"
+				 */
+				this.trigger('paste', { "parent" : obj.id, "node" : ccp_node, "mode" : ccp_mode });
+			}
+			ccp_node = false;
+			ccp_mode = false;
+			ccp_inst = false;
+		},
+		/**
+		 * put a node in edit mode (input field to rename the node)
+		 * @name edit(obj [, default_text])
+		 * @param  {mixed} obj
+		 * @param  {String} default_text the text to populate the input with (if omitted the node text value is used)
+		 */
+		edit : function (obj, default_text) {
+			obj = this.get_node(obj);
+			if(!obj) { return false; }
+			if(this.settings.core.check_callback === false) {
+				this._data.core.last_error = { 'error' : 'check', 'plugin' : 'core', 'id' : 'core_07', 'reason' : 'Could not edit node because of check_callback' };
+				this.settings.core.error.call(this, this._data.core.last_error);
+				return false;
+			}
+			default_text = typeof default_text === 'string' ? default_text : obj.text;
+			this.set_text(obj, "");
+			obj = this._open_to(obj);
+
+			var rtl = this._data.core.rtl,
+				w  = this.element.width(),
+				a  = obj.children('.jstree-anchor'),
+				s  = $('<span>'),
+				/*!
+				oi = obj.children("i:visible"),
+				ai = a.children("i:visible"),
+				w1 = oi.width() * oi.length,
+				w2 = ai.width() * ai.length,
+				*/
+				t  = default_text,
+				h1 = $("<"+"div />", { css : { "position" : "absolute", "top" : "-200px", "left" : (rtl ? "0px" : "-1000px"), "visibility" : "hidden" } }).appendTo("body"),
+				h2 = $("<"+"input />", {
+						"value" : t,
+						"class" : "jstree-rename-input",
+						// "size" : t.length,
+						"css" : {
+							"padding" : "0",
+							"border" : "1px solid silver",
+							"box-sizing" : "border-box",
+							"display" : "inline-block",
+							"height" : (this._data.core.li_height) + "px",
+							"lineHeight" : (this._data.core.li_height) + "px",
+							"width" : "150px" // will be set a bit further down
+						},
+						"blur" : $.proxy(function () {
+							var i = s.children(".jstree-rename-input"),
+								v = i.val();
+							if(v === "") { v = t; }
+							h1.remove();
+							s.replaceWith(a);
+							s.remove();
+							this.set_text(obj, t);
+							if(this.rename_node(obj, $('<div></div>').text(v)[this.settings.core.force_text ? 'text' : 'html']()) === false) {
+								this.set_text(obj, t); // move this up? and fix #483
+							}
+						}, this),
+						"keydown" : function (event) {
+							var key = event.which;
+							if(key === 27) {
+								this.value = t;
+							}
+							if(key === 27 || key === 13 || key === 37 || key === 38 || key === 39 || key === 40 || key === 32) {
+								event.stopImmediatePropagation();
+							}
+							if(key === 27 || key === 13) {
+								event.preventDefault();
+								this.blur();
+							}
+						},
+						"click" : function (e) { e.stopImmediatePropagation(); },
+						"mousedown" : function (e) { e.stopImmediatePropagation(); },
+						"keyup" : function (event) {
+							h2.width(Math.min(h1.text("pW" + this.value).width(),w));
+						},
+						"keypress" : function(event) {
+							if(event.which === 13) { return false; }
+						}
+					}),
+				fn = {
+						fontFamily		: a.css('fontFamily')		|| '',
+						fontSize		: a.css('fontSize')			|| '',
+						fontWeight		: a.css('fontWeight')		|| '',
+						fontStyle		: a.css('fontStyle')		|| '',
+						fontStretch		: a.css('fontStretch')		|| '',
+						fontVariant		: a.css('fontVariant')		|| '',
+						letterSpacing	: a.css('letterSpacing')	|| '',
+						wordSpacing		: a.css('wordSpacing')		|| ''
+				};
+			s.attr('class', a.attr('class')).append(a.contents().clone()).append(h2);
+			a.replaceWith(s);
+			h1.css(fn);
+			h2.css(fn).width(Math.min(h1.text("pW" + h2[0].value).width(),w))[0].select();
+		},
+
+
+		/**
+		 * changes the theme
+		 * @name set_theme(theme_name [, theme_url])
+		 * @param {String} theme_name the name of the new theme to apply
+		 * @param {mixed} theme_url  the location of the CSS file for this theme. Omit or set to `false` if you manually included the file. Set to `true` to autoload from the `core.themes.dir` directory.
+		 * @trigger set_theme.jstree
+		 */
+		set_theme : function (theme_name, theme_url) {
+			if(!theme_name) { return false; }
+			if(theme_url === true) {
+				var dir = this.settings.core.themes.dir;
+				if(!dir) { dir = $.jstree.path + '/themes'; }
+				theme_url = dir + '/' + theme_name + '/style.css';
+			}
+			if(theme_url && $.inArray(theme_url, themes_loaded) === -1) {
+				$('head').append('<'+'link rel="stylesheet" href="' + theme_url + '" type="text/css" />');
+				themes_loaded.push(theme_url);
+			}
+			if(this._data.core.themes.name) {
+				this.element.removeClass('jstree-' + this._data.core.themes.name);
+			}
+			this._data.core.themes.name = theme_name;
+			this.element.addClass('jstree-' + theme_name);
+			this.element[this.settings.core.themes.responsive ? 'addClass' : 'removeClass' ]('jstree-' + theme_name + '-responsive');
+			/**
+			 * triggered when a theme is set
+			 * @event
+			 * @name set_theme.jstree
+			 * @param {String} theme the new theme
+			 */
+			this.trigger('set_theme', { 'theme' : theme_name });
+		},
+		/**
+		 * gets the name of the currently applied theme name
+		 * @name get_theme()
+		 * @return {String}
+		 */
+		get_theme : function () { return this._data.core.themes.name; },
+		/**
+		 * changes the theme variant (if the theme has variants)
+		 * @name set_theme_variant(variant_name)
+		 * @param {String|Boolean} variant_name the variant to apply (if `false` is used the current variant is removed)
+		 */
+		set_theme_variant : function (variant_name) {
+			if(this._data.core.themes.variant) {
+				this.element.removeClass('jstree-' + this._data.core.themes.name + '-' + this._data.core.themes.variant);
+			}
+			this._data.core.themes.variant = variant_name;
+			if(variant_name) {
+				this.element.addClass('jstree-' + this._data.core.themes.name + '-' + this._data.core.themes.variant);
+			}
+		},
+		/**
+		 * gets the name of the currently applied theme variant
+		 * @name get_theme()
+		 * @return {String}
+		 */
+		get_theme_variant : function () { return this._data.core.themes.variant; },
+		/**
+		 * shows a striped background on the container (if the theme supports it)
+		 * @name show_stripes()
+		 */
+		show_stripes : function () { this._data.core.themes.stripes = true; this.get_container_ul().addClass("jstree-striped"); },
+		/**
+		 * hides the striped background on the container
+		 * @name hide_stripes()
+		 */
+		hide_stripes : function () { this._data.core.themes.stripes = false; this.get_container_ul().removeClass("jstree-striped"); },
+		/**
+		 * toggles the striped background on the container
+		 * @name toggle_stripes()
+		 */
+		toggle_stripes : function () { if(this._data.core.themes.stripes) { this.hide_stripes(); } else { this.show_stripes(); } },
+		/**
+		 * shows the connecting dots (if the theme supports it)
+		 * @name show_dots()
+		 */
+		show_dots : function () { this._data.core.themes.dots = true; this.get_container_ul().removeClass("jstree-no-dots"); },
+		/**
+		 * hides the connecting dots
+		 * @name hide_dots()
+		 */
+		hide_dots : function () { this._data.core.themes.dots = false; this.get_container_ul().addClass("jstree-no-dots"); },
+		/**
+		 * toggles the connecting dots
+		 * @name toggle_dots()
+		 */
+		toggle_dots : function () { if(this._data.core.themes.dots) { this.hide_dots(); } else { this.show_dots(); } },
+		/**
+		 * show the node icons
+		 * @name show_icons()
+		 */
+		show_icons : function () { this._data.core.themes.icons = true; this.get_container_ul().removeClass("jstree-no-icons"); },
+		/**
+		 * hide the node icons
+		 * @name hide_icons()
+		 */
+		hide_icons : function () { this._data.core.themes.icons = false; this.get_container_ul().addClass("jstree-no-icons"); },
+		/**
+		 * toggle the node icons
+		 * @name toggle_icons()
+		 */
+		toggle_icons : function () { if(this._data.core.themes.icons) { this.hide_icons(); } else { this.show_icons(); } },
+		/**
+		 * set the node icon for a node
+		 * @name set_icon(obj, icon)
+		 * @param {mixed} obj
+		 * @param {String} icon the new icon - can be a path to an icon or a className, if using an image that is in the current directory use a `./` prefix, otherwise it will be detected as a class
+		 */
+		set_icon : function (obj, icon) {
+			var t1, t2, dom, old;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.set_icon(obj[t1], icon);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			old = obj.icon;
+			obj.icon = icon;
+			dom = this.get_node(obj, true).children(".jstree-anchor").children(".jstree-themeicon");
+			if(icon === false) {
+				this.hide_icon(obj);
+			}
+			else if(icon === true) {
+				dom.removeClass('jstree-themeicon-custom ' + old).css("background","").removeAttr("rel");
+			}
+			else if(icon.indexOf("/") === -1 && icon.indexOf(".") === -1) {
+				dom.removeClass(old).css("background","");
+				dom.addClass(icon + ' jstree-themeicon-custom').attr("rel",icon);
+			}
+			else {
+				dom.removeClass(old).css("background","");
+				dom.addClass('jstree-themeicon-custom').css("background", "url('" + icon + "') center center no-repeat").attr("rel",icon);
+			}
+			return true;
+		},
+		/**
+		 * get the node icon for a node
+		 * @name get_icon(obj)
+		 * @param {mixed} obj
+		 * @return {String}
+		 */
+		get_icon : function (obj) {
+			obj = this.get_node(obj);
+			return (!obj || obj.id === '#') ? false : obj.icon;
+		},
+		/**
+		 * hide the icon on an individual node
+		 * @name hide_icon(obj)
+		 * @param {mixed} obj
+		 */
+		hide_icon : function (obj) {
+			var t1, t2;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.hide_icon(obj[t1]);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj === '#') { return false; }
+			obj.icon = false;
+			this.get_node(obj, true).children(".jstree-anchor").children(".jstree-themeicon").addClass('jstree-themeicon-hidden');
+			return true;
+		},
+		/**
+		 * show the icon on an individual node
+		 * @name show_icon(obj)
+		 * @param {mixed} obj
+		 */
+		show_icon : function (obj) {
+			var t1, t2, dom;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.show_icon(obj[t1]);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj === '#') { return false; }
+			dom = this.get_node(obj, true);
+			obj.icon = dom.length ? dom.children(".jstree-anchor").children(".jstree-themeicon").attr('rel') : true;
+			if(!obj.icon) { obj.icon = true; }
+			dom.children(".jstree-anchor").children(".jstree-themeicon").removeClass('jstree-themeicon-hidden');
+			return true;
+		}
+	};
+
+	// helpers
+	$.vakata = {};
+	// collect attributes
+	$.vakata.attributes = function(node, with_values) {
+		node = $(node)[0];
+		var attr = with_values ? {} : [];
+		if(node && node.attributes) {
+			$.each(node.attributes, function (i, v) {
+				if($.inArray(v.nodeName.toLowerCase(),['style','contenteditable','hasfocus','tabindex']) !== -1) { return; }
+				if(v.nodeValue !== null && $.trim(v.nodeValue) !== '') {
+					if(with_values) { attr[v.nodeName] = v.nodeValue; }
+					else { attr.push(v.nodeName); }
+				}
+			});
+		}
+		return attr;
+	};
+	$.vakata.array_unique = function(array) {
+		var a = [], i, j, l;
+		for(i = 0, l = array.length; i < l; i++) {
+			for(j = 0; j <= i; j++) {
+				if(array[i] === array[j]) {
+					break;
+				}
+			}
+			if(j === i) { a.push(array[i]); }
+		}
+		return a;
+	};
+	// remove item from array
+	$.vakata.array_remove = function(array, from, to) {
+		var rest = array.slice((to || from) + 1 || array.length);
+		array.length = from < 0 ? array.length + from : from;
+		array.push.apply(array, rest);
+		return array;
+	};
+	// remove item from array
+	$.vakata.array_remove_item = function(array, item) {
+		var tmp = $.inArray(item, array);
+		return tmp !== -1 ? $.vakata.array_remove(array, tmp) : array;
+	};
+
+/**
+ * ### Checkbox plugin
+ *
+ * This plugin renders checkbox icons in front of each node, making multiple selection much easier. 
+ * It also supports tri-state behavior, meaning that if a node has a few of its children checked it will be rendered as undetermined, and state will be propagated up.
+ */
+
+	var _i = document.createElement('I');
+	_i.className = 'jstree-icon jstree-checkbox';
+	/**
+	 * stores all defaults for the checkbox plugin
+	 * @name $.jstree.defaults.checkbox
+	 * @plugin checkbox
+	 */
+	$.jstree.defaults.checkbox = {
+		/**
+		 * a boolean indicating if checkboxes should be visible (can be changed at a later time using `show_checkboxes()` and `hide_checkboxes`). Defaults to `true`.
+		 * @name $.jstree.defaults.checkbox.visible
+		 * @plugin checkbox
+		 */
+		visible				: true,
+		/**
+		 * a boolean indicating if checkboxes should cascade down and have an undetermined state. Defaults to `true`.
+		 * @name $.jstree.defaults.checkbox.three_state
+		 * @plugin checkbox
+		 */
+		three_state			: true,
+		/**
+		 * a boolean indicating if clicking anywhere on the node should act as clicking on the checkbox. Defaults to `true`.
+		 * @name $.jstree.defaults.checkbox.whole_node
+		 * @plugin checkbox
+		 */
+		whole_node			: true,
+		/**
+		 * a boolean indicating if the selected style of a node should be kept, or removed. Defaults to `true`.
+		 * @name $.jstree.defaults.checkbox.keep_selected_style
+		 * @plugin checkbox
+		 */
+		keep_selected_style	: true,
+		/**
+		 * This setting controls how cascading and undetermined nodes are applied. 
+		 * If 'up' is in the string - cascading up is enabled, if 'down' is in the string - cascading down is enabled, if 'undetermined' is in the string - undetermined nodes will be used. 
+		 * If `three_state` is set to `true` this setting is automatically set to 'up+down+undetermined'. Defaults to ''.
+		 * @name $.jstree.defaults.checkbox.cascade
+		 * @plugin checkbox
+		 */
+		cascade				: '',
+		/**
+		 * This setting controls if checkbox are bound to the general tree selection or to an internal array maintained by the checkbox plugin. Defaults to `true`, only set to `false` if you know exactly what you are doing. 
+		 * @name $.jstree.defaults.checkbox.tie_selection
+		 * @plugin checkbox
+		 */
+		tie_selection		: true
+	};
+	$.jstree.plugins.checkbox = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+			this._data.checkbox.uto = false;
+			this._data.checkbox.selected = [];
+			if(this.settings.checkbox.three_state) {
+				this.settings.checkbox.cascade = 'up+down+undetermined';
+			}
+			this.element
+				.on("init.jstree", $.proxy(function () {
+						this._data.checkbox.visible = this.settings.checkbox.visible;
+						if(!this.settings.checkbox.keep_selected_style) {
+							this.element.addClass('jstree-checkbox-no-clicked');
+						}
+						if(this.settings.checkbox.tie_selection) {
+							this.element.addClass('jstree-checkbox-selection');
+						}
+					}, this))
+				.on("loading.jstree", $.proxy(function () {
+						this[ this._data.checkbox.visible ? 'show_checkboxes' : 'hide_checkboxes' ]();
+					}, this));
+			if(this.settings.checkbox.cascade.indexOf('undetermined') !== -1) {
+				this.element
+					.on('changed.jstree uncheck_node.jstree check_node.jstree uncheck_all.jstree check_all.jstree move_node.jstree copy_node.jstree redraw.jstree open_node.jstree', $.proxy(function () {
+							// only if undetermined is in setting
+							if(this._data.checkbox.uto) { clearTimeout(this._data.checkbox.uto); }
+							this._data.checkbox.uto = setTimeout($.proxy(this._undetermined, this), 50);
+						}, this));
+			}
+			if(!this.settings.checkbox.tie_selection) {
+				this.element
+					.on('model.jstree', $.proxy(function (e, data) {
+						var m = this._model.data,
+							p = m[data.parent],
+							dpc = data.nodes,
+							i, j;
+						for(i = 0, j = dpc.length; i < j; i++) {
+							m[dpc[i]].state.checked = (m[dpc[i]].original && m[dpc[i]].original.state && m[dpc[i]].original.state.checked);
+							if(m[dpc[i]].state.checked) {
+								this._data.checkbox.selected.push(dpc[i]);
+							}
+						}
+					}, this));
+			}
+			if(this.settings.checkbox.cascade.indexOf('up') !== -1 || this.settings.checkbox.cascade.indexOf('down') !== -1) {
+				this.element
+					.on('model.jstree', $.proxy(function (e, data) {
+							var m = this._model.data,
+								p = m[data.parent],
+								dpc = data.nodes,
+								chd = [],
+								c, i, j, k, l, tmp, s = this.settings.checkbox.cascade, t = this.settings.checkbox.tie_selection;
+
+							if(s.indexOf('down') !== -1) {
+								// apply down
+								if(p.state[ t ? 'selected' : 'checked' ]) {
+									for(i = 0, j = dpc.length; i < j; i++) {
+										m[dpc[i]].state[ t ? 'selected' : 'checked' ] = true;
+									}
+									this._data[ t ? 'core' : 'checkbox' ].selected = this._data[ t ? 'core' : 'checkbox' ].selected.concat(dpc);
+								}
+								else {
+									for(i = 0, j = dpc.length; i < j; i++) {
+										if(m[dpc[i]].state[ t ? 'selected' : 'checked' ]) {
+											for(k = 0, l = m[dpc[i]].children_d.length; k < l; k++) {
+												m[m[dpc[i]].children_d[k]].state[ t ? 'selected' : 'checked' ] = true;
+											}
+											this._data[ t ? 'core' : 'checkbox' ].selected = this._data[ t ? 'core' : 'checkbox' ].selected.concat(m[dpc[i]].children_d);
+										}
+									}
+								}
+							}
+
+							if(s.indexOf('up') !== -1) {
+								// apply up
+								for(i = 0, j = p.children_d.length; i < j; i++) {
+									if(!m[p.children_d[i]].children.length) {
+										chd.push(m[p.children_d[i]].parent);
+									}
+								}
+								chd = $.vakata.array_unique(chd);
+								for(k = 0, l = chd.length; k < l; k++) {
+									p = m[chd[k]];
+									while(p && p.id !== '#') {
+										c = 0;
+										for(i = 0, j = p.children.length; i < j; i++) {
+											c += m[p.children[i]].state[ t ? 'selected' : 'checked' ];
+										}
+										if(c === j) {
+											p.state[ t ? 'selected' : 'checked' ] = true;
+											this._data[ t ? 'core' : 'checkbox' ].selected.push(p.id);
+											tmp = this.get_node(p, true);
+											if(tmp && tmp.length) {
+												tmp.children('.jstree-anchor').addClass( t ? 'jstree-clicked' : 'jstree-checked');
+											}
+										}
+										else {
+											break;
+										}
+										p = this.get_node(p.parent);
+									}
+								}
+							}
+
+							this._data[ t ? 'core' : 'checkbox' ].selected = $.vakata.array_unique(this._data[ t ? 'core' : 'checkbox' ].selected);
+						}, this))
+					.on(this.settings.checkbox.tie_selection ? 'select_node.jstree' : 'check_node.jstree', $.proxy(function (e, data) {
+							var obj = data.node,
+								m = this._model.data,
+								par = this.get_node(obj.parent),
+								dom = this.get_node(obj, true),
+								i, j, c, tmp, s = this.settings.checkbox.cascade, t = this.settings.checkbox.tie_selection;
+
+							// apply down
+							if(s.indexOf('down') !== -1) {
+								this._data[ t ? 'core' : 'checkbox' ].selected = $.vakata.array_unique(this._data[ t ? 'core' : 'checkbox' ].selected.concat(obj.children_d));
+								for(i = 0, j = obj.children_d.length; i < j; i++) {
+									tmp = m[obj.children_d[i]];
+									tmp.state[ t ? 'selected' : 'checked' ] = true;
+									if(tmp && tmp.original && tmp.original.state && tmp.original.state.undetermined) {
+										tmp.original.state.undetermined = false;
+									}
+								}
+							}
+
+							// apply up
+							if(s.indexOf('up') !== -1) {
+								while(par && par.id !== '#') {
+									c = 0;
+									for(i = 0, j = par.children.length; i < j; i++) {
+										c += m[par.children[i]].state[ t ? 'selected' : 'checked' ];
+									}
+									if(c === j) {
+										par.state[ t ? 'selected' : 'checked' ] = true;
+										this._data[ t ? 'core' : 'checkbox' ].selected.push(par.id);
+										tmp = this.get_node(par, true);
+										if(tmp && tmp.length) {
+											tmp.children('.jstree-anchor').addClass(t ? 'jstree-clicked' : 'jstree-checked');
+										}
+									}
+									else {
+										break;
+									}
+									par = this.get_node(par.parent);
+								}
+							}
+
+							// apply down (process .children separately?)
+							if(s.indexOf('down') !== -1 && dom.length) {
+								dom.find('.jstree-anchor').addClass(t ? 'jstree-clicked' : 'jstree-checked');
+							}
+						}, this))
+					.on(this.settings.checkbox.tie_selection ? 'deselect_all.jstree' : 'uncheck_all.jstree', $.proxy(function (e, data) {
+							var obj = this.get_node('#'),
+								m = this._model.data,
+								i, j, tmp;
+							for(i = 0, j = obj.children_d.length; i < j; i++) {
+								tmp = m[obj.children_d[i]];
+								if(tmp && tmp.original && tmp.original.state && tmp.original.state.undetermined) {
+									tmp.original.state.undetermined = false;
+								}
+							}
+						}, this))
+					.on(this.settings.checkbox.tie_selection ? 'deselect_node.jstree' : 'uncheck_node.jstree', $.proxy(function (e, data) {
+							var obj = data.node,
+								dom = this.get_node(obj, true),
+								i, j, tmp, s = this.settings.checkbox.cascade, t = this.settings.checkbox.tie_selection;
+							if(obj && obj.original && obj.original.state && obj.original.state.undetermined) {
+								obj.original.state.undetermined = false;
+							}
+
+							// apply down
+							if(s.indexOf('down') !== -1) {
+								for(i = 0, j = obj.children_d.length; i < j; i++) {
+									tmp = this._model.data[obj.children_d[i]];
+									tmp.state[ t ? 'selected' : 'checked' ] = false;
+									if(tmp && tmp.original && tmp.original.state && tmp.original.state.undetermined) {
+										tmp.original.state.undetermined = false;
+									}
+								}
+							}
+
+							// apply up
+							if(s.indexOf('up') !== -1) {
+								for(i = 0, j = obj.parents.length; i < j; i++) {
+									tmp = this._model.data[obj.parents[i]];
+									tmp.state[ t ? 'selected' : 'checked' ] = false;
+									if(tmp && tmp.original && tmp.original.state && tmp.original.state.undetermined) {
+										tmp.original.state.undetermined = false;
+									}
+									tmp = this.get_node(obj.parents[i], true);
+									if(tmp && tmp.length) {
+										tmp.children('.jstree-anchor').removeClass(t ? 'jstree-clicked' : 'jstree-checked');
+									}
+								}
+							}
+							tmp = [];
+							for(i = 0, j = this._data[ t ? 'core' : 'checkbox' ].selected.length; i < j; i++) {
+								// apply down + apply up
+								if(
+									(s.indexOf('down') === -1 || $.inArray(this._data[ t ? 'core' : 'checkbox' ].selected[i], obj.children_d) === -1) &&
+									(s.indexOf('up') === -1 || $.inArray(this._data[ t ? 'core' : 'checkbox' ].selected[i], obj.parents) === -1)
+								) {
+									tmp.push(this._data[ t ? 'core' : 'checkbox' ].selected[i]);
+								}
+							}
+							this._data[ t ? 'core' : 'checkbox' ].selected = $.vakata.array_unique(tmp);
+
+							// apply down (process .children separately?)
+							if(s.indexOf('down') !== -1 && dom.length) {
+								dom.find('.jstree-anchor').removeClass(t ? 'jstree-clicked' : 'jstree-checked');
+							}
+						}, this));
+			}
+			if(this.settings.checkbox.cascade.indexOf('up') !== -1) {
+				this.element
+					.on('delete_node.jstree', $.proxy(function (e, data) {
+							// apply up (whole handler)
+							var p = this.get_node(data.parent),
+								m = this._model.data,
+								i, j, c, tmp, t = this.settings.checkbox.tie_selection;
+							while(p && p.id !== '#') {
+								c = 0;
+								for(i = 0, j = p.children.length; i < j; i++) {
+									c += m[p.children[i]].state[ t ? 'selected' : 'checked' ];
+								}
+								if(c === j) {
+									p.state[ t ? 'selected' : 'checked' ] = true;
+									this._data[ t ? 'core' : 'checkbox' ].selected.push(p.id);
+									tmp = this.get_node(p, true);
+									if(tmp && tmp.length) {
+										tmp.children('.jstree-anchor').addClass(t ? 'jstree-clicked' : 'jstree-checked');
+									}
+								}
+								else {
+									break;
+								}
+								p = this.get_node(p.parent);
+							}
+						}, this))
+					.on('move_node.jstree', $.proxy(function (e, data) {
+							// apply up (whole handler)
+							var is_multi = data.is_multi,
+								old_par = data.old_parent,
+								new_par = this.get_node(data.parent),
+								m = this._model.data,
+								p, c, i, j, tmp, t = this.settings.checkbox.tie_selection;
+							if(!is_multi) {
+								p = this.get_node(old_par);
+								while(p && p.id !== '#') {
+									c = 0;
+									for(i = 0, j = p.children.length; i < j; i++) {
+										c += m[p.children[i]].state[ t ? 'selected' : 'checked' ];
+									}
+									if(c === j) {
+										p.state[ t ? 'selected' : 'checked' ] = true;
+										this._data[ t ? 'core' : 'checkbox' ].selected.push(p.id);
+										tmp = this.get_node(p, true);
+										if(tmp && tmp.length) {
+											tmp.children('.jstree-anchor').addClass(t ? 'jstree-clicked' : 'jstree-checked');
+										}
+									}
+									else {
+										break;
+									}
+									p = this.get_node(p.parent);
+								}
+							}
+							p = new_par;
+							while(p && p.id !== '#') {
+								c = 0;
+								for(i = 0, j = p.children.length; i < j; i++) {
+									c += m[p.children[i]].state[ t ? 'selected' : 'checked' ];
+								}
+								if(c === j) {
+									if(!p.state[ t ? 'selected' : 'checked' ]) {
+										p.state[ t ? 'selected' : 'checked' ] = true;
+										this._data[ t ? 'core' : 'checkbox' ].selected.push(p.id);
+										tmp = this.get_node(p, true);
+										if(tmp && tmp.length) {
+											tmp.children('.jstree-anchor').addClass(t ? 'jstree-clicked' : 'jstree-checked');
+										}
+									}
+								}
+								else {
+									if(p.state[ t ? 'selected' : 'checked' ]) {
+										p.state[ t ? 'selected' : 'checked' ] = false;
+										this._data[ t ? 'core' : 'checkbox' ].selected = $.vakata.array_remove_item(this._data[ t ? 'core' : 'checkbox' ].selected, p.id);
+										tmp = this.get_node(p, true);
+										if(tmp && tmp.length) {
+											tmp.children('.jstree-anchor').removeClass(t ? 'jstree-clicked' : 'jstree-checked');
+										}
+									}
+									else {
+										break;
+									}
+								}
+								p = this.get_node(p.parent);
+							}
+						}, this));
+			}
+		};
+		/**
+		 * set the undetermined state where and if necessary. Used internally.
+		 * @private
+		 * @name _undetermined()
+		 * @plugin checkbox
+		 */
+		this._undetermined = function () {
+			var i, j, m = this._model.data, t = this.settings.checkbox.tie_selection, s = this._data[ t ? 'core' : 'checkbox' ].selected, p = [], tt = this;
+			for(i = 0, j = s.length; i < j; i++) {
+				if(m[s[i]] && m[s[i]].parents) {
+					p = p.concat(m[s[i]].parents);
+				}
+			}
+			// attempt for server side undetermined state
+			this.element.find('.jstree-closed').not(':has(.jstree-children)')
+				.each(function () {
+					var tmp = tt.get_node(this), tmp2;
+					if(!tmp.state.loaded) {
+						if(tmp.original && tmp.original.state && tmp.original.state.undetermined && tmp.original.state.undetermined === true) {
+							p.push(tmp.id);
+							p = p.concat(tmp.parents);
+						}
+					}
+					else {
+						for(i = 0, j = tmp.children_d.length; i < j; i++) {
+							tmp2 = m[tmp.children_d[i]];
+							if(!tmp2.state.loaded && tmp2.original && tmp2.original.state && tmp2.original.state.undetermined && tmp2.original.state.undetermined === true) {
+								p.push(tmp2.id);
+								p = p.concat(tmp2.parents);
+							}
+						}
+					}
+				});
+			p = $.vakata.array_unique(p);
+			p = $.vakata.array_remove_item(p,'#');
+
+			this.element.find('.jstree-undetermined').removeClass('jstree-undetermined');
+			for(i = 0, j = p.length; i < j; i++) {
+				if(!m[p[i]].state[ t ? 'selected' : 'checked' ]) {
+					s = this.get_node(p[i], true);
+					if(s && s.length) {
+						s.children('.jstree-anchor').children('.jstree-checkbox').addClass('jstree-undetermined');
+					}
+				}
+			}
+		};
+		this.redraw_node = function(obj, deep, is_callback) {
+			obj = parent.redraw_node.call(this, obj, deep, is_callback);
+			if(obj) {
+				var i, j, tmp = null;
+				for(i = 0, j = obj.childNodes.length; i < j; i++) {
+					if(obj.childNodes[i] && obj.childNodes[i].className && obj.childNodes[i].className.indexOf("jstree-anchor") !== -1) {
+						tmp = obj.childNodes[i];
+						break;
+					}
+				}
+				if(tmp) {
+					if(!this.settings.checkbox.tie_selection && this._model.data[obj.id].state.checked) { tmp.className += ' jstree-checked'; }
+					tmp.insertBefore(_i.cloneNode(false), tmp.childNodes[0]);
+				}
+			}
+			if(!is_callback && this.settings.checkbox.cascade.indexOf('undetermined') !== -1) {
+				if(this._data.checkbox.uto) { clearTimeout(this._data.checkbox.uto); }
+				this._data.checkbox.uto = setTimeout($.proxy(this._undetermined, this), 50);
+			}
+			return obj;
+		};
+		/**
+		 * show the node checkbox icons
+		 * @name show_checkboxes()
+		 * @plugin checkbox
+		 */
+		this.show_checkboxes = function () { this._data.core.themes.checkboxes = true; this.get_container_ul().removeClass("jstree-no-checkboxes"); };
+		/**
+		 * hide the node checkbox icons
+		 * @name hide_checkboxes()
+		 * @plugin checkbox
+		 */
+		this.hide_checkboxes = function () { this._data.core.themes.checkboxes = false; this.get_container_ul().addClass("jstree-no-checkboxes"); };
+		/**
+		 * toggle the node icons
+		 * @name toggle_checkboxes()
+		 * @plugin checkbox
+		 */
+		this.toggle_checkboxes = function () { if(this._data.core.themes.checkboxes) { this.hide_checkboxes(); } else { this.show_checkboxes(); } };
+		/**
+		 * checks if a node is in an undetermined state
+		 * @name is_undetermined(obj)
+		 * @param  {mixed} obj
+		 * @return {Boolean}
+		 */
+		this.is_undetermined = function (obj) {
+			obj = this.get_node(obj);
+			var s = this.settings.checkbox.cascade, i, j, t = this.settings.checkbox.tie_selection, d = this._data[ t ? 'core' : 'checkbox' ].selected, m = this._model.data;
+			if(!obj || obj.state[ t ? 'selected' : 'checked' ] === true || s.indexOf('undetermined') === -1 || (s.indexOf('down') === -1 && s.indexOf('up') === -1)) {
+				return false;
+			}
+			if(!obj.state.loaded && obj.original.state.undetermined === true) {
+				return true;
+			}
+			for(i = 0, j = obj.children_d.length; i < j; i++) {
+				if($.inArray(obj.children_d[i], d) !== -1 || (!m[obj.children_d[i]].state.loaded && m[obj.children_d[i]].original.state.undetermined)) {
+					return true;
+				}
+			}
+			return false;
+		};
+
+		this.activate_node = function (obj, e) {
+			if(this.settings.checkbox.tie_selection && (this.settings.checkbox.whole_node || $(e.target).hasClass('jstree-checkbox'))) {
+				e.ctrlKey = true;
+			}
+			if(this.settings.checkbox.tie_selection || (!this.settings.checkbox.whole_node && !$(e.target).hasClass('jstree-checkbox'))) {
+				return parent.activate_node.call(this, obj, e);
+			}
+			if(this.is_checked(obj)) {
+				this.uncheck_node(obj, e);
+			}
+			else {
+				this.check_node(obj, e);
+			}
+			this.trigger('activate_node', { 'node' : this.get_node(obj) });
+		};
+
+		/**
+		 * check a node (only if tie_selection in checkbox settings is false, otherwise select_node will be called internally)
+		 * @name check_node(obj)
+		 * @param {mixed} obj an array can be used to check multiple nodes
+		 * @trigger check_node.jstree
+		 * @plugin checkbox
+		 */
+		this.check_node = function (obj, e) {
+			if(this.settings.checkbox.tie_selection) { return this.select_node(obj, false, true, e); }
+			var dom, t1, t2, th;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.check_node(obj[t1], e);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			dom = this.get_node(obj, true);
+			if(!obj.state.checked) {
+				obj.state.checked = true;
+				this._data.checkbox.selected.push(obj.id);
+				if(dom && dom.length) {
+					dom.children('.jstree-anchor').addClass('jstree-checked');
+				}
+				/**
+				 * triggered when an node is checked (only if tie_selection in checkbox settings is false)
+				 * @event
+				 * @name check_node.jstree
+				 * @param {Object} node
+				 * @param {Array} selected the current selection
+				 * @param {Object} event the event (if any) that triggered this check_node
+				 * @plugin checkbox
+				 */
+				this.trigger('check_node', { 'node' : obj, 'selected' : this._data.checkbox.selected, 'event' : e });
+			}
+		};
+		/**
+		 * uncheck a node (only if tie_selection in checkbox settings is false, otherwise deselect_node will be called internally)
+		 * @name deselect_node(obj)
+		 * @param {mixed} obj an array can be used to deselect multiple nodes
+		 * @trigger uncheck_node.jstree
+		 * @plugin checkbox
+		 */
+		this.uncheck_node = function (obj, e) {
+			if(this.settings.checkbox.tie_selection) { return this.deselect_node(obj, false, e); }
+			var t1, t2, dom;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.uncheck_node(obj[t1], e);
+				}
+				return true;
+			}
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') {
+				return false;
+			}
+			dom = this.get_node(obj, true);
+			if(obj.state.checked) {
+				obj.state.checked = false;
+				this._data.checkbox.selected = $.vakata.array_remove_item(this._data.checkbox.selected, obj.id);
+				if(dom.length) {
+					dom.children('.jstree-anchor').removeClass('jstree-checked');
+				}
+				/**
+				 * triggered when an node is unchecked (only if tie_selection in checkbox settings is false)
+				 * @event
+				 * @name uncheck_node.jstree
+				 * @param {Object} node
+				 * @param {Array} selected the current selection
+				 * @param {Object} event the event (if any) that triggered this uncheck_node
+				 * @plugin checkbox
+				 */
+				this.trigger('uncheck_node', { 'node' : obj, 'selected' : this._data.checkbox.selected, 'event' : e });
+			}
+		};
+		/**
+		 * checks all nodes in the tree (only if tie_selection in checkbox settings is false, otherwise select_all will be called internally)
+		 * @name check_all()
+		 * @trigger check_all.jstree, changed.jstree
+		 * @plugin checkbox
+		 */
+		this.check_all = function () {
+			if(this.settings.checkbox.tie_selection) { return this.select_all(); }
+			var tmp = this._data.checkbox.selected.concat([]), i, j;
+			this._data.checkbox.selected = this._model.data['#'].children_d.concat();
+			for(i = 0, j = this._data.checkbox.selected.length; i < j; i++) {
+				if(this._model.data[this._data.checkbox.selected[i]]) {
+					this._model.data[this._data.checkbox.selected[i]].state.checked = true;
+				}
+			}
+			this.redraw(true);
+			/**
+			 * triggered when all nodes are checked (only if tie_selection in checkbox settings is false)
+			 * @event
+			 * @name check_all.jstree
+			 * @param {Array} selected the current selection
+			 * @plugin checkbox
+			 */
+			this.trigger('check_all', { 'selected' : this._data.checkbox.selected });
+		};
+		/**
+		 * uncheck all checked nodes (only if tie_selection in checkbox settings is false, otherwise deselect_all will be called internally)
+		 * @name uncheck_all()
+		 * @trigger uncheck_all.jstree
+		 * @plugin checkbox
+		 */
+		this.uncheck_all = function () {
+			if(this.settings.checkbox.tie_selection) { return this.deselect_all(); }
+			var tmp = this._data.checkbox.selected.concat([]), i, j;
+			for(i = 0, j = this._data.checkbox.selected.length; i < j; i++) {
+				if(this._model.data[this._data.checkbox.selected[i]]) {
+					this._model.data[this._data.checkbox.selected[i]].state.checked = false;
+				}
+			}
+			this._data.checkbox.selected = [];
+			this.element.find('.jstree-checked').removeClass('jstree-checked');
+			/**
+			 * triggered when all nodes are unchecked (only if tie_selection in checkbox settings is false)
+			 * @event
+			 * @name uncheck_all.jstree
+			 * @param {Object} node the previous selection
+			 * @param {Array} selected the current selection
+			 * @plugin checkbox
+			 */
+			this.trigger('uncheck_all', { 'selected' : this._data.checkbox.selected, 'node' : tmp });
+		};
+		/**
+		 * checks if a node is checked (if tie_selection is on in the settings this function will return the same as is_selected)
+		 * @name is_checked(obj)
+		 * @param  {mixed}  obj
+		 * @return {Boolean}
+		 * @plugin checkbox
+		 */
+		this.is_checked = function (obj) {
+			if(this.settings.checkbox.tie_selection) { return this.is_selected(obj); }
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			return obj.state.checked;
+		};
+		/**
+		 * get an array of all checked nodes (if tie_selection is on in the settings this function will return the same as get_selected)
+		 * @name get_checked([full])
+		 * @param  {mixed}  full if set to `true` the returned array will consist of the full node objects, otherwise - only IDs will be returned
+		 * @return {Array}
+		 * @plugin checkbox
+		 */
+		this.get_checked = function (full) {
+			if(this.settings.checkbox.tie_selection) { return this.get_selected(full); }
+			return full ? $.map(this._data.checkbox.selected, $.proxy(function (i) { return this.get_node(i); }, this)) : this._data.checkbox.selected;
+		};
+		/**
+		 * get an array of all top level checked nodes (ignoring children of checked nodes) (if tie_selection is on in the settings this function will return the same as get_top_selected)
+		 * @name get_top_checked([full])
+		 * @param  {mixed}  full if set to `true` the returned array will consist of the full node objects, otherwise - only IDs will be returned
+		 * @return {Array}
+		 * @plugin checkbox
+		 */
+		this.get_top_checked = function (full) {
+			if(this.settings.checkbox.tie_selection) { return this.get_top_selected(full); }
+			var tmp = this.get_checked(true),
+				obj = {}, i, j, k, l;
+			for(i = 0, j = tmp.length; i < j; i++) {
+				obj[tmp[i].id] = tmp[i];
+			}
+			for(i = 0, j = tmp.length; i < j; i++) {
+				for(k = 0, l = tmp[i].children_d.length; k < l; k++) {
+					if(obj[tmp[i].children_d[k]]) {
+						delete obj[tmp[i].children_d[k]];
+					}
+				}
+			}
+			tmp = [];
+			for(i in obj) {
+				if(obj.hasOwnProperty(i)) {
+					tmp.push(i);
+				}
+			}
+			return full ? $.map(tmp, $.proxy(function (i) { return this.get_node(i); }, this)) : tmp;
+		};
+		/**
+		 * get an array of all bottom level checked nodes (ignoring selected parents) (if tie_selection is on in the settings this function will return the same as get_bottom_selected)
+		 * @name get_bottom_checked([full])
+		 * @param  {mixed}  full if set to `true` the returned array will consist of the full node objects, otherwise - only IDs will be returned
+		 * @return {Array}
+		 * @plugin checkbox
+		 */
+		this.get_bottom_checked = function (full) {
+			if(this.settings.checkbox.tie_selection) { return this.get_bottom_selected(full); }
+			var tmp = this.get_checked(true),
+				obj = [], i, j;
+			for(i = 0, j = tmp.length; i < j; i++) {
+				if(!tmp[i].children.length) {
+					obj.push(tmp[i].id);
+				}
+			}
+			return full ? $.map(obj, $.proxy(function (i) { return this.get_node(i); }, this)) : obj;
+		};
+	};
+
+	// include the checkbox plugin by default
+	// $.jstree.defaults.plugins.push("checkbox");
+
+/**
+ * ### Contextmenu plugin
+ *
+ * Shows a context menu when a node is right-clicked.
+ */
+// TODO: move logic outside of function + check multiple move
+
+	/**
+	 * stores all defaults for the contextmenu plugin
+	 * @name $.jstree.defaults.contextmenu
+	 * @plugin contextmenu
+	 */
+	$.jstree.defaults.contextmenu = {
+		/**
+		 * a boolean indicating if the node should be selected when the context menu is invoked on it. Defaults to `true`.
+		 * @name $.jstree.defaults.contextmenu.select_node
+		 * @plugin contextmenu
+		 */
+		select_node : true,
+		/**
+		 * a boolean indicating if the menu should be shown aligned with the node. Defaults to `true`, otherwise the mouse coordinates are used.
+		 * @name $.jstree.defaults.contextmenu.show_at_node
+		 * @plugin contextmenu
+		 */
+		show_at_node : true,
+		/**
+		 * an object of actions, or a function that accepts a node and a callback function and calls the callback function with an object of actions available for that node (you can also return the items too).
+		 * 
+		 * Each action consists of a key (a unique name) and a value which is an object with the following properties (only label and action are required):
+		 * 
+		 * * `separator_before` - a boolean indicating if there should be a separator before this item
+		 * * `separator_after` - a boolean indicating if there should be a separator after this item
+		 * * `_disabled` - a boolean indicating if this action should be disabled
+		 * * `label` - a string - the name of the action (could be a function returning a string)
+		 * * `action` - a function to be executed if this item is chosen
+		 * * `icon` - a string, can be a path to an icon or a className, if using an image that is in the current directory use a `./` prefix, otherwise it will be detected as a class
+		 * * `shortcut` - keyCode which will trigger the action if the menu is open (for example `113` for rename, which equals F2)
+		 * * `shortcut_label` - shortcut label (like for example `F2` for rename)
+		 * 
+		 * @name $.jstree.defaults.contextmenu.items
+		 * @plugin contextmenu
+		 */
+		items : function (o, cb) { // Could be an object directly
+			return {
+				"create" : {
+					"separator_before"	: false,
+					"separator_after"	: true,
+					"_disabled"			: false, //(this.check("create_node", data.reference, {}, "last")),
+					"label"				: "Create",
+					"action"			: function (data) {
+						var inst = $.jstree.reference(data.reference),
+							obj = inst.get_node(data.reference);
+						inst.create_node(obj, {}, "last", function (new_node) {
+							setTimeout(function () { inst.edit(new_node); },0);
+						});
+					}
+				},
+				"rename" : {
+					"separator_before"	: false,
+					"separator_after"	: false,
+					"_disabled"			: false, //(this.check("rename_node", data.reference, this.get_parent(data.reference), "")),
+					"label"				: "Rename",
+					/*
+					"shortcut"			: 113,
+					"shortcut_label"	: 'F2',
+					"icon"				: "glyphicon glyphicon-leaf",
+					*/
+					"action"			: function (data) {
+						var inst = $.jstree.reference(data.reference),
+							obj = inst.get_node(data.reference);
+						inst.edit(obj);
+					}
+				},
+				"remove" : {
+					"separator_before"	: false,
+					"icon"				: false,
+					"separator_after"	: false,
+					"_disabled"			: false, //(this.check("delete_node", data.reference, this.get_parent(data.reference), "")),
+					"label"				: "Delete",
+					"action"			: function (data) {
+						var inst = $.jstree.reference(data.reference),
+							obj = inst.get_node(data.reference);
+						if(inst.is_selected(obj)) {
+							inst.delete_node(inst.get_selected());
+						}
+						else {
+							inst.delete_node(obj);
+						}
+					}
+				},
+				"ccp" : {
+					"separator_before"	: true,
+					"icon"				: false,
+					"separator_after"	: false,
+					"label"				: "Edit",
+					"action"			: false,
+					"submenu" : {
+						"cut" : {
+							"separator_before"	: false,
+							"separator_after"	: false,
+							"label"				: "Cut",
+							"action"			: function (data) {
+								var inst = $.jstree.reference(data.reference),
+									obj = inst.get_node(data.reference);
+								if(inst.is_selected(obj)) {
+									inst.cut(inst.get_selected());
+								}
+								else {
+									inst.cut(obj);
+								}
+							}
+						},
+						"copy" : {
+							"separator_before"	: false,
+							"icon"				: false,
+							"separator_after"	: false,
+							"label"				: "Copy",
+							"action"			: function (data) {
+								var inst = $.jstree.reference(data.reference),
+									obj = inst.get_node(data.reference);
+								if(inst.is_selected(obj)) {
+									inst.copy(inst.get_selected());
+								}
+								else {
+									inst.copy(obj);
+								}
+							}
+						},
+						"paste" : {
+							"separator_before"	: false,
+							"icon"				: false,
+							"_disabled"			: function (data) {
+								return !$.jstree.reference(data.reference).can_paste();
+							},
+							"separator_after"	: false,
+							"label"				: "Paste",
+							"action"			: function (data) {
+								var inst = $.jstree.reference(data.reference),
+									obj = inst.get_node(data.reference);
+								inst.paste(obj);
+							}
+						}
+					}
+				}
+			};
+		}
+	};
+
+	$.jstree.plugins.contextmenu = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+
+			var last_ts = 0;
+			this.element
+				.on("contextmenu.jstree", ".jstree-anchor", $.proxy(function (e) {
+						e.preventDefault();
+						last_ts = e.ctrlKey ? e.timeStamp : 0;
+						if(!this.is_loading(e.currentTarget)) {
+							this.show_contextmenu(e.currentTarget, e.pageX, e.pageY, e);
+						}
+					}, this))
+				.on("click.jstree", ".jstree-anchor", $.proxy(function (e) {
+						if(this._data.contextmenu.visible && (!last_ts || e.timeStamp - last_ts > 250)) { // work around safari & macOS ctrl+click
+							$.vakata.context.hide();
+						}
+					}, this));
+			/*
+			if(!('oncontextmenu' in document.body) && ('ontouchstart' in document.body)) {
+				var el = null, tm = null;
+				this.element
+					.on("touchstart", ".jstree-anchor", function (e) {
+						el = e.currentTarget;
+						tm = +new Date();
+						$(document).one("touchend", function (e) {
+							e.target = document.elementFromPoint(e.originalEvent.targetTouches[0].pageX - window.pageXOffset, e.originalEvent.targetTouches[0].pageY - window.pageYOffset);
+							e.currentTarget = e.target;
+							tm = ((+(new Date())) - tm);
+							if(e.target === el && tm > 600 && tm < 1000) {
+								e.preventDefault();
+								$(el).trigger('contextmenu', e);
+							}
+							el = null;
+							tm = null;
+						});
+					});
+			}
+			*/
+			$(document).on("context_hide.vakata", $.proxy(function () { this._data.contextmenu.visible = false; }, this));
+		};
+		this.teardown = function () {
+			if(this._data.contextmenu.visible) {
+				$.vakata.context.hide();
+			}
+			parent.teardown.call(this);
+		};
+
+		/**
+		 * prepare and show the context menu for a node
+		 * @name show_contextmenu(obj [, x, y])
+		 * @param {mixed} obj the node
+		 * @param {Number} x the x-coordinate relative to the document to show the menu at
+		 * @param {Number} y the y-coordinate relative to the document to show the menu at
+		 * @param {Object} e the event if available that triggered the contextmenu
+		 * @plugin contextmenu
+		 * @trigger show_contextmenu.jstree
+		 */
+		this.show_contextmenu = function (obj, x, y, e) {
+			obj = this.get_node(obj);
+			if(!obj || obj.id === '#') { return false; }
+			var s = this.settings.contextmenu,
+				d = this.get_node(obj, true),
+				a = d.children(".jstree-anchor"),
+				o = false,
+				i = false;
+			if(s.show_at_node || x === undefined || y === undefined) {
+				o = a.offset();
+				x = o.left;
+				y = o.top + this._data.core.li_height;
+			}
+			if(this.settings.contextmenu.select_node && !this.is_selected(obj)) {
+				this.activate_node(obj, e);
+			}
+
+			i = s.items;
+			if($.isFunction(i)) {
+				i = i.call(this, obj, $.proxy(function (i) {
+					this._show_contextmenu(obj, x, y, i);
+				}, this));
+			}
+			if($.isPlainObject(i)) {
+				this._show_contextmenu(obj, x, y, i);
+			}
+		};
+		/**
+		 * show the prepared context menu for a node
+		 * @name _show_contextmenu(obj, x, y, i)
+		 * @param {mixed} obj the node
+		 * @param {Number} x the x-coordinate relative to the document to show the menu at
+		 * @param {Number} y the y-coordinate relative to the document to show the menu at
+		 * @param {Number} i the object of items to show
+		 * @plugin contextmenu
+		 * @trigger show_contextmenu.jstree
+		 * @private
+		 */
+		this._show_contextmenu = function (obj, x, y, i) {
+			var d = this.get_node(obj, true),
+				a = d.children(".jstree-anchor");
+			$(document).one("context_show.vakata", $.proxy(function (e, data) {
+				var cls = 'jstree-contextmenu jstree-' + this.get_theme() + '-contextmenu';
+				$(data.element).addClass(cls);
+			}, this));
+			this._data.contextmenu.visible = true;
+			$.vakata.context.show(a, { 'x' : x, 'y' : y }, i);
+			/**
+			 * triggered when the contextmenu is shown for a node
+			 * @event
+			 * @name show_contextmenu.jstree
+			 * @param {Object} node the node
+			 * @param {Number} x the x-coordinate of the menu relative to the document
+			 * @param {Number} y the y-coordinate of the menu relative to the document
+			 * @plugin contextmenu
+			 */
+			this.trigger('show_contextmenu', { "node" : obj, "x" : x, "y" : y });
+		};
+	};
+
+	// contextmenu helper
+	(function ($) {
+		var right_to_left = false,
+			vakata_context = {
+				element		: false,
+				reference	: false,
+				position_x	: 0,
+				position_y	: 0,
+				items		: [],
+				html		: "",
+				is_visible	: false
+			};
+
+		$.vakata.context = {
+			settings : {
+				hide_onmouseleave	: 0,
+				icons				: true
+			},
+			_trigger : function (event_name) {
+				$(document).triggerHandler("context_" + event_name + ".vakata", {
+					"reference"	: vakata_context.reference,
+					"element"	: vakata_context.element,
+					"position"	: {
+						"x" : vakata_context.position_x,
+						"y" : vakata_context.position_y
+					}
+				});
+			},
+			_execute : function (i) {
+				i = vakata_context.items[i];
+				return i && (!i._disabled || ($.isFunction(i._disabled) && !i._disabled({ "item" : i, "reference" : vakata_context.reference, "element" : vakata_context.element }))) && i.action ? i.action.call(null, {
+							"item"		: i,
+							"reference"	: vakata_context.reference,
+							"element"	: vakata_context.element,
+							"position"	: {
+								"x" : vakata_context.position_x,
+								"y" : vakata_context.position_y
+							}
+						}) : false;
+			},
+			_parse : function (o, is_callback) {
+				if(!o) { return false; }
+				if(!is_callback) {
+					vakata_context.html		= "";
+					vakata_context.items	= [];
+				}
+				var str = "",
+					sep = false,
+					tmp;
+
+				if(is_callback) { str += "<"+"ul>"; }
+				$.each(o, function (i, val) {
+					if(!val) { return true; }
+					vakata_context.items.push(val);
+					if(!sep && val.separator_before) {
+						str += "<"+"li class='vakata-context-separator'><"+"a href='#' " + ($.vakata.context.settings.icons ? '' : 'style="margin-left:0px;"') + "> <"+"/a><"+"/li>";
+					}
+					sep = false;
+					str += "<"+"li class='" + (val._class || "") + (val._disabled === true || ($.isFunction(val._disabled) && val._disabled({ "item" : val, "reference" : vakata_context.reference, "element" : vakata_context.element })) ? " vakata-contextmenu-disabled " : "") + "' "+(val.shortcut?" data-shortcut='"+val.shortcut+"' ":'')+">";
+					str += "<"+"a href='#' rel='" + (vakata_context.items.length - 1) + "'>";
+					if($.vakata.context.settings.icons) {
+						str += "<"+"i ";
+						if(val.icon) {
+							if(val.icon.indexOf("/") !== -1 || val.icon.indexOf(".") !== -1) { str += " style='background:url(\"" + val.icon + "\") center center no-repeat' "; }
+							else { str += " class='" + val.icon + "' "; }
+						}
+						str += "><"+"/i><"+"span class='vakata-contextmenu-sep'> <"+"/span>";
+					}
+					str += ($.isFunction(val.label) ? val.label({ "item" : i, "reference" : vakata_context.reference, "element" : vakata_context.element }) : val.label) + (val.shortcut?' <span class="vakata-contextmenu-shortcut vakata-contextmenu-shortcut-'+val.shortcut+'">'+ (val.shortcut_label || '') +'</span>':'') + "<"+"/a>";
+					if(val.submenu) {
+						tmp = $.vakata.context._parse(val.submenu, true);
+						if(tmp) { str += tmp; }
+					}
+					str += "<"+"/li>";
+					if(val.separator_after) {
+						str += "<"+"li class='vakata-context-separator'><"+"a href='#' " + ($.vakata.context.settings.icons ? '' : 'style="margin-left:0px;"') + "> <"+"/a><"+"/li>";
+						sep = true;
+					}
+				});
+				str  = str.replace(/<li class\='vakata-context-separator'\><\/li\>$/,"");
+				if(is_callback) { str += "</ul>"; }
+				/**
+				 * triggered on the document when the contextmenu is parsed (HTML is built)
+				 * @event
+				 * @plugin contextmenu
+				 * @name context_parse.vakata
+				 * @param {jQuery} reference the element that was right clicked
+				 * @param {jQuery} element the DOM element of the menu itself
+				 * @param {Object} position the x & y coordinates of the menu
+				 */
+				if(!is_callback) { vakata_context.html = str; $.vakata.context._trigger("parse"); }
+				return str.length > 10 ? str : false;
+			},
+			_show_submenu : function (o) {
+				o = $(o);
+				if(!o.length || !o.children("ul").length) { return; }
+				var e = o.children("ul"),
+					x = o.offset().left + o.outerWidth(),
+					y = o.offset().top,
+					w = e.width(),
+					h = e.height(),
+					dw = $(window).width() + $(window).scrollLeft(),
+					dh = $(window).height() + $(window).scrollTop();
+				// може да се спести е една проверка - дали няма някой от класовете вече нагоре
+				if(right_to_left) {
+					o[x - (w + 10 + o.outerWidth()) < 0 ? "addClass" : "removeClass"]("vakata-context-left");
+				}
+				else {
+					o[x + w + 10 > dw ? "addClass" : "removeClass"]("vakata-context-right");
+				}
+				if(y + h + 10 > dh) {
+					e.css("bottom","-1px");
+				}
+				e.show();
+			},
+			show : function (reference, position, data) {
+				var o, e, x, y, w, h, dw, dh, cond = true;
+				if(vakata_context.element && vakata_context.element.length) {
+					vakata_context.element.width('');
+				}
+				switch(cond) {
+					case (!position && !reference):
+						return false;
+					case (!!position && !!reference):
+						vakata_context.reference	= reference;
+						vakata_context.position_x	= position.x;
+						vakata_context.position_y	= position.y;
+						break;
+					case (!position && !!reference):
+						vakata_context.reference	= reference;
+						o = reference.offset();
+						vakata_context.position_x	= o.left + reference.outerHeight();
+						vakata_context.position_y	= o.top;
+						break;
+					case (!!position && !reference):
+						vakata_context.position_x	= position.x;
+						vakata_context.position_y	= position.y;
+						break;
+				}
+				if(!!reference && !data && $(reference).data('vakata_contextmenu')) {
+					data = $(reference).data('vakata_contextmenu');
+				}
+				if($.vakata.context._parse(data)) {
+					vakata_context.element.html(vakata_context.html);
+				}
+				if(vakata_context.items.length) {
+					e = vakata_context.element;
+					x = vakata_context.position_x;
+					y = vakata_context.position_y;
+					w = e.width();
+					h = e.height();
+					dw = $(window).width() + $(window).scrollLeft();
+					dh = $(window).height() + $(window).scrollTop();
+					if(right_to_left) {
+						x -= e.outerWidth();
+						if(x < $(window).scrollLeft() + 20) {
+							x = $(window).scrollLeft() + 20;
+						}
+					}
+					if(x + w + 20 > dw) {
+						x = dw - (w + 20);
+					}
+					if(y + h + 20 > dh) {
+						y = dh - (h + 20);
+					}
+
+					vakata_context.element
+						.css({ "left" : x, "top" : y })
+						.show()
+						.find('a:eq(0)').focus().parent().addClass("vakata-context-hover");
+					vakata_context.is_visible = true;
+					/**
+					 * triggered on the document when the contextmenu is shown
+					 * @event
+					 * @plugin contextmenu
+					 * @name context_show.vakata
+					 * @param {jQuery} reference the element that was right clicked
+					 * @param {jQuery} element the DOM element of the menu itself
+					 * @param {Object} position the x & y coordinates of the menu
+					 */
+					$.vakata.context._trigger("show");
+				}
+			},
+			hide : function () {
+				if(vakata_context.is_visible) {
+					vakata_context.element.hide().find("ul").hide().end().find(':focus').blur();
+					vakata_context.is_visible = false;
+					/**
+					 * triggered on the document when the contextmenu is hidden
+					 * @event
+					 * @plugin contextmenu
+					 * @name context_hide.vakata
+					 * @param {jQuery} reference the element that was right clicked
+					 * @param {jQuery} element the DOM element of the menu itself
+					 * @param {Object} position the x & y coordinates of the menu
+					 */
+					$.vakata.context._trigger("hide");
+				}
+			}
+		};
+		$(function () {
+			right_to_left = $("body").css("direction") === "rtl";
+			var to = false;
+
+			vakata_context.element = $("<ul class='vakata-context'></ul>");
+			vakata_context.element
+				.on("mouseenter", "li", function (e) {
+					e.stopImmediatePropagation();
+
+					if($.contains(this, e.relatedTarget)) {
+						// премахнато заради delegate mouseleave по-долу
+						// $(this).find(".vakata-context-hover").removeClass("vakata-context-hover");
+						return;
+					}
+
+					if(to) { clearTimeout(to); }
+					vakata_context.element.find(".vakata-context-hover").removeClass("vakata-context-hover").end();
+
+					$(this)
+						.siblings().find("ul").hide().end().end()
+						.parentsUntil(".vakata-context", "li").addBack().addClass("vakata-context-hover");
+					$.vakata.context._show_submenu(this);
+				})
+				// тестово - дали не натоварва?
+				.on("mouseleave", "li", function (e) {
+					if($.contains(this, e.relatedTarget)) { return; }
+					$(this).find(".vakata-context-hover").addBack().removeClass("vakata-context-hover");
+				})
+				.on("mouseleave", function (e) {
+					$(this).find(".vakata-context-hover").removeClass("vakata-context-hover");
+					if($.vakata.context.settings.hide_onmouseleave) {
+						to = setTimeout(
+							(function (t) {
+								return function () { $.vakata.context.hide(); };
+							}(this)), $.vakata.context.settings.hide_onmouseleave);
+					}
+				})
+				.on("click", "a", function (e) {
+					e.preventDefault();
+				//})
+				//.on("mouseup", "a", function (e) {
+					if(!$(this).blur().parent().hasClass("vakata-context-disabled") && $.vakata.context._execute($(this).attr("rel")) !== false) {
+						$.vakata.context.hide();
+					}
+				})
+				.on('keydown', 'a', function (e) {
+						var o = null;
+						switch(e.which) {
+							case 13:
+							case 32:
+								e.type = "mouseup";
+								e.preventDefault();
+								$(e.currentTarget).trigger(e);
+								break;
+							case 37:
+								if(vakata_context.is_visible) {
+									vakata_context.element.find(".vakata-context-hover").last().parents("li:eq(0)").find("ul").hide().find(".vakata-context-hover").removeClass("vakata-context-hover").end().end().children('a').focus();
+									e.stopImmediatePropagation();
+									e.preventDefault();
+								}
+								break;
+							case 38:
+								if(vakata_context.is_visible) {
+									o = vakata_context.element.find("ul:visible").addBack().last().children(".vakata-context-hover").removeClass("vakata-context-hover").prevAll("li:not(.vakata-context-separator)").first();
+									if(!o.length) { o = vakata_context.element.find("ul:visible").addBack().last().children("li:not(.vakata-context-separator)").last(); }
+									o.addClass("vakata-context-hover").children('a').focus();
+									e.stopImmediatePropagation();
+									e.preventDefault();
+								}
+								break;
+							case 39:
+								if(vakata_context.is_visible) {
+									vakata_context.element.find(".vakata-context-hover").last().children("ul").show().children("li:not(.vakata-context-separator)").removeClass("vakata-context-hover").first().addClass("vakata-context-hover").children('a').focus();
+									e.stopImmediatePropagation();
+									e.preventDefault();
+								}
+								break;
+							case 40:
+								if(vakata_context.is_visible) {
+									o = vakata_context.element.find("ul:visible").addBack().last().children(".vakata-context-hover").removeClass("vakata-context-hover").nextAll("li:not(.vakata-context-separator)").first();
+									if(!o.length) { o = vakata_context.element.find("ul:visible").addBack().last().children("li:not(.vakata-context-separator)").first(); }
+									o.addClass("vakata-context-hover").children('a').focus();
+									e.stopImmediatePropagation();
+									e.preventDefault();
+								}
+								break;
+							case 27:
+								$.vakata.context.hide();
+								e.preventDefault();
+								break;
+							default:
+								//console.log(e.which);
+								break;
+						}
+					})
+				.on('keydown', function (e) {
+					e.preventDefault();
+					var a = vakata_context.element.find('.vakata-contextmenu-shortcut-' + e.which).parent();
+					if(a.parent().not('.vakata-context-disabled')) {
+						a.mouseup();
+					}
+				})
+				.appendTo("body");
+
+			$(document)
+				.on("mousedown", function (e) {
+					if(vakata_context.is_visible && !$.contains(vakata_context.element[0], e.target)) { $.vakata.context.hide(); }
+				})
+				.on("context_show.vakata", function (e, data) {
+					vakata_context.element.find("li:has(ul)").children("a").addClass("vakata-context-parent");
+					if(right_to_left) {
+						vakata_context.element.addClass("vakata-context-rtl").css("direction", "rtl");
+					}
+					// also apply a RTL class?
+					vakata_context.element.find("ul").hide().end();
+				});
+		});
+	}($));
+	// $.jstree.defaults.plugins.push("contextmenu");
+
+/**
+ * ### Drag'n'drop plugin
+ *
+ * Enables dragging and dropping of nodes in the tree, resulting in a move or copy operations.
+ */
+
+	/**
+	 * stores all defaults for the drag'n'drop plugin
+	 * @name $.jstree.defaults.dnd
+	 * @plugin dnd
+	 */
+	$.jstree.defaults.dnd = {
+		/**
+		 * a boolean indicating if a copy should be possible while dragging (by pressint the meta key or Ctrl). Defaults to `true`.
+		 * @name $.jstree.defaults.dnd.copy
+		 * @plugin dnd
+		 */
+		copy : true,
+		/**
+		 * a number indicating how long a node should remain hovered while dragging to be opened. Defaults to `500`.
+		 * @name $.jstree.defaults.dnd.open_timeout
+		 * @plugin dnd
+		 */
+		open_timeout : 500,
+		/**
+		 * a function invoked each time a node is about to be dragged, invoked in the tree's scope and receives the nodes about to be dragged as an argument (array) - return `false` to prevent dragging
+		 * @name $.jstree.defaults.dnd.is_draggable
+		 * @plugin dnd
+		 */
+		is_draggable : true,
+		/**
+		 * a boolean indicating if checks should constantly be made while the user is dragging the node (as opposed to checking only on drop), default is `true`
+		 * @name $.jstree.defaults.dnd.check_while_dragging
+		 * @plugin dnd
+		 */
+		check_while_dragging : true,
+		/**
+		 * a boolean indicating if nodes from this tree should only be copied with dnd (as opposed to moved), default is `false`
+		 * @name $.jstree.defaults.dnd.always_copy
+		 * @plugin dnd
+		 */
+		always_copy : false,
+		/**
+		 * when dropping a node "inside", this setting indicates the position the node should go to - it can be an integer or a string: "first" (same as 0) or "last", default is `0`
+		 * @name $.jstree.defaults.dnd.inside_pos
+		 * @plugin dnd
+		 */
+		inside_pos : 0
+	};
+	// TODO: now check works by checking for each node individually, how about max_children, unique, etc?
+	// TODO: drop somewhere else - maybe demo only?
+	$.jstree.plugins.dnd = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+
+			this.element
+				.on('mousedown.jstree touchstart.jstree', '.jstree-anchor', $.proxy(function (e) {
+					var obj = this.get_node(e.target),
+						mlt = this.is_selected(obj) ? this.get_selected().length : 1;
+					if(obj && obj.id && obj.id !== "#" && (e.which === 1 || e.type === "touchstart") &&
+						(this.settings.dnd.is_draggable === true || ($.isFunction(this.settings.dnd.is_draggable) && this.settings.dnd.is_draggable.call(this, (mlt > 1 ? this.get_selected(true) : [obj]))))
+					) {
+						this.element.trigger('mousedown.jstree');
+						return $.vakata.dnd.start(e, { 'jstree' : true, 'origin' : this, 'obj' : this.get_node(obj,true), 'nodes' : mlt > 1 ? this.get_selected() : [obj.id] }, '<div id="jstree-dnd" class="jstree-' + this.get_theme() + ( this.settings.core.themes.responsive ? ' jstree-dnd-responsive' : '' ) + '"><i class="jstree-icon jstree-er"></i>' + (mlt > 1 ? mlt + ' ' + this.get_string('nodes') : this.get_text(e.currentTarget, true)) + '<ins class="jstree-copy" style="display:none;">+</ins></div>');
+					}
+				}, this));
+		};
+	};
+
+	$(function() {
+		// bind only once for all instances
+		var lastmv = false,
+			laster = false,
+			opento = false,
+			marker = $('<div id="jstree-marker"> </div>').hide().appendTo('body');
+
+		$(document)
+			.bind('dnd_start.vakata', function (e, data) {
+				lastmv = false;
+			})
+			.bind('dnd_move.vakata', function (e, data) {
+				if(opento) { clearTimeout(opento); }
+				if(!data || !data.data || !data.data.jstree) { return; }
+
+				// if we are hovering the marker image do nothing (can happen on "inside" drags)
+				if(data.event.target.id && data.event.target.id === 'jstree-marker') {
+					return;
+				}
+
+				var ins = $.jstree.reference(data.event.target),
+					ref = false,
+					off = false,
+					rel = false,
+					l, t, h, p, i, o, ok, t1, t2, op, ps, pr, ip, tm;
+				// if we are over an instance
+				if(ins && ins._data && ins._data.dnd) {
+					marker.attr('class', 'jstree-' + ins.get_theme() + ( ins.settings.core.themes.responsive ? ' jstree-dnd-responsive' : '' ));
+					data.helper
+						.children().attr('class', 'jstree-' + ins.get_theme() + ( ins.settings.core.themes.responsive ? ' jstree-dnd-responsive' : '' ))
+						.find('.jstree-copy:eq(0)')[ data.data.origin && (data.data.origin.settings.dnd.always_copy || (data.data.origin.settings.dnd.copy && (data.event.metaKey || data.event.ctrlKey))) ? 'show' : 'hide' ]();
+
+
+					// if are hovering the container itself add a new root node
+					if( (data.event.target === ins.element[0] || data.event.target === ins.get_container_ul()[0]) && ins.get_container_ul().children().length === 0) {
+						ok = true;
+						for(t1 = 0, t2 = data.data.nodes.length; t1 < t2; t1++) {
+							ok = ok && ins.check( (data.data.origin && (data.data.origin.settings.dnd.always_copy || (data.data.origin.settings.dnd.copy && (data.event.metaKey || data.event.ctrlKey)) ) ? "copy_node" : "move_node"), (data.data.origin && data.data.origin !== ins ? data.data.origin.get_node(data.data.nodes[t1]) : data.data.nodes[t1]), '#', 'last', { 'dnd' : true, 'ref' : ins.get_node('#'), 'pos' : 'i', 'is_multi' : (data.data.origin && data.data.origin !== ins), 'is_foreign' : (!data.data.origin) });
+							if(!ok) { break; }
+						}
+						if(ok) {
+							lastmv = { 'ins' : ins, 'par' : '#', 'pos' : 'last' };
+							marker.hide();
+							data.helper.find('.jstree-icon:eq(0)').removeClass('jstree-er').addClass('jstree-ok');
+							return;
+						}
+					}
+					else {
+						// if we are hovering a tree node
+						ref = $(data.event.target).closest('.jstree-anchor');
+						if(ref && ref.length && ref.parent().is('.jstree-closed, .jstree-open, .jstree-leaf')) {
+							off = ref.offset();
+							rel = data.event.pageY - off.top;
+							h = ref.height();
+							if(rel < h / 3) {
+								o = ['b', 'i', 'a'];
+							}
+							else if(rel > h - h / 3) {
+								o = ['a', 'i', 'b'];
+							}
+							else {
+								o = rel > h / 2 ? ['i', 'a', 'b'] : ['i', 'b', 'a'];
+							}
+							$.each(o, function (j, v) {
+								switch(v) {
+									case 'b':
+										l = off.left - 6;
+										t = off.top;
+										p = ins.get_parent(ref);
+										i = ref.parent().index();
+										break;
+									case 'i':
+										ip = ins.settings.dnd.inside_pos;
+										tm = ins.get_node(ref.parent());
+										l = off.left - 2;
+										t = off.top + h / 2 + 1;
+										p = tm.id;
+										i = ip === 'first' ? 0 : (ip === 'last' ? tm.children.length : Math.min(ip, tm.children.length));
+										break;
+									case 'a':
+										l = off.left - 6;
+										t = off.top + h;
+										p = ins.get_parent(ref);
+										i = ref.parent().index() + 1;
+										break;
+								}
+								/*!
+								// TODO: moving inside, but the node is not yet loaded?
+								// the check will work anyway, as when moving the node will be loaded first and checked again
+								if(v === 'i' && !ins.is_loaded(p)) { }
+								*/
+								ok = true;
+								for(t1 = 0, t2 = data.data.nodes.length; t1 < t2; t1++) {
+									op = data.data.origin && (data.data.origin.settings.dnd.always_copy || (data.data.origin.settings.dnd.copy && (data.event.metaKey || data.event.ctrlKey))) ? "copy_node" : "move_node";
+									ps = i;
+									if(op === "move_node" && v === 'a' && (data.data.origin && data.data.origin === ins) && p === ins.get_parent(data.data.nodes[t1])) {
+										pr = ins.get_node(p);
+										if(ps > $.inArray(data.data.nodes[t1], pr.children)) {
+											ps -= 1;
+										}
+									}
+									ok = ok && ( (ins && ins.settings && ins.settings.dnd && ins.settings.dnd.check_while_dragging === false) || ins.check(op, (data.data.origin && data.data.origin !== ins ? data.data.origin.get_node(data.data.nodes[t1]) : data.data.nodes[t1]), p, ps, { 'dnd' : true, 'ref' : ins.get_node(ref.parent()), 'pos' : v, 'is_multi' : (data.data.origin && data.data.origin !== ins), 'is_foreign' : (!data.data.origin) }) );
+									if(!ok) {
+										if(ins && ins.last_error) { laster = ins.last_error(); }
+										break;
+									}
+								}
+								if(ok) {
+									if(v === 'i' && ref.parent().is('.jstree-closed') && ins.settings.dnd.open_timeout) {
+										opento = setTimeout((function (x, z) { return function () { x.open_node(z); }; }(ins, ref)), ins.settings.dnd.open_timeout);
+									}
+									lastmv = { 'ins' : ins, 'par' : p, 'pos' : v === 'i' && ip === 'last' && i === 0 && !ins.is_loaded(tm) ? 'last' : i };
+									marker.css({ 'left' : l + 'px', 'top' : t + 'px' }).show();
+									data.helper.find('.jstree-icon:eq(0)').removeClass('jstree-er').addClass('jstree-ok');
+									laster = {};
+									o = true;
+									return false;
+								}
+							});
+							if(o === true) { return; }
+						}
+					}
+				}
+				lastmv = false;
+				data.helper.find('.jstree-icon').removeClass('jstree-ok').addClass('jstree-er');
+				marker.hide();
+			})
+			.bind('dnd_scroll.vakata', function (e, data) {
+				if(!data || !data.data || !data.data.jstree) { return; }
+				marker.hide();
+				lastmv = false;
+				data.helper.find('.jstree-icon:eq(0)').removeClass('jstree-ok').addClass('jstree-er');
+			})
+			.bind('dnd_stop.vakata', function (e, data) {
+				if(opento) { clearTimeout(opento); }
+				if(!data || !data.data || !data.data.jstree) { return; }
+				marker.hide();
+				var i, j, nodes = [];
+				if(lastmv) {
+					for(i = 0, j = data.data.nodes.length; i < j; i++) {
+						nodes[i] = data.data.origin ? data.data.origin.get_node(data.data.nodes[i]) : data.data.nodes[i];
+						if(data.data.origin) {
+							nodes[i].instance = data.data.origin;
+						}
+					}
+					lastmv.ins[ data.data.origin && (data.data.origin.settings.dnd.always_copy || (data.data.origin.settings.dnd.copy && (data.event.metaKey || data.event.ctrlKey))) ? 'copy_node' : 'move_node' ](nodes, lastmv.par, lastmv.pos);
+				}
+				else {
+					i = $(data.event.target).closest('.jstree');
+					if(i.length && laster && laster.error && laster.error === 'check') {
+						i = i.jstree(true);
+						if(i) {
+							i.settings.core.error.call(this, laster);
+						}
+					}
+				}
+			})
+			.bind('keyup keydown', function (e, data) {
+				data = $.vakata.dnd._get();
+				if(data && data.data && data.data.jstree) {
+					data.helper.find('.jstree-copy:eq(0)')[ data.data.origin && (data.data.origin.settings.dnd.always_copy || (data.data.origin.settings.dnd.copy && (e.metaKey || e.ctrlKey))) ? 'show' : 'hide' ]();
+				}
+			});
+	});
+
+	// helpers
+	(function ($) {
+		// private variable
+		var vakata_dnd = {
+			element	: false,
+			target	: false,
+			is_down	: false,
+			is_drag	: false,
+			helper	: false,
+			helper_w: 0,
+			data	: false,
+			init_x	: 0,
+			init_y	: 0,
+			scroll_l: 0,
+			scroll_t: 0,
+			scroll_e: false,
+			scroll_i: false,
+			is_touch: false
+		};
+		$.vakata.dnd = {
+			settings : {
+				scroll_speed		: 10,
+				scroll_proximity	: 20,
+				helper_left			: 5,
+				helper_top			: 10,
+				threshold			: 5,
+				threshold_touch		: 50
+			},
+			_trigger : function (event_name, e) {
+				var data = $.vakata.dnd._get();
+				data.event = e;
+				$(document).triggerHandler("dnd_" + event_name + ".vakata", data);
+			},
+			_get : function () {
+				return {
+					"data"		: vakata_dnd.data,
+					"element"	: vakata_dnd.element,
+					"helper"	: vakata_dnd.helper
+				};
+			},
+			_clean : function () {
+				if(vakata_dnd.helper) { vakata_dnd.helper.remove(); }
+				if(vakata_dnd.scroll_i) { clearInterval(vakata_dnd.scroll_i); vakata_dnd.scroll_i = false; }
+				vakata_dnd = {
+					element	: false,
+					target	: false,
+					is_down	: false,
+					is_drag	: false,
+					helper	: false,
+					helper_w: 0,
+					data	: false,
+					init_x	: 0,
+					init_y	: 0,
+					scroll_l: 0,
+					scroll_t: 0,
+					scroll_e: false,
+					scroll_i: false,
+					is_touch: false
+				};
+				$(document).off("mousemove touchmove", $.vakata.dnd.drag);
+				$(document).off("mouseup touchend", $.vakata.dnd.stop);
+			},
+			_scroll : function (init_only) {
+				if(!vakata_dnd.scroll_e || (!vakata_dnd.scroll_l && !vakata_dnd.scroll_t)) {
+					if(vakata_dnd.scroll_i) { clearInterval(vakata_dnd.scroll_i); vakata_dnd.scroll_i = false; }
+					return false;
+				}
+				if(!vakata_dnd.scroll_i) {
+					vakata_dnd.scroll_i = setInterval($.vakata.dnd._scroll, 100);
+					return false;
+				}
+				if(init_only === true) { return false; }
+
+				var i = vakata_dnd.scroll_e.scrollTop(),
+					j = vakata_dnd.scroll_e.scrollLeft();
+				vakata_dnd.scroll_e.scrollTop(i + vakata_dnd.scroll_t * $.vakata.dnd.settings.scroll_speed);
+				vakata_dnd.scroll_e.scrollLeft(j + vakata_dnd.scroll_l * $.vakata.dnd.settings.scroll_speed);
+				if(i !== vakata_dnd.scroll_e.scrollTop() || j !== vakata_dnd.scroll_e.scrollLeft()) {
+					/**
+					 * triggered on the document when a drag causes an element to scroll
+					 * @event
+					 * @plugin dnd
+					 * @name dnd_scroll.vakata
+					 * @param {Mixed} data any data supplied with the call to $.vakata.dnd.start
+					 * @param {DOM} element the DOM element being dragged
+					 * @param {jQuery} helper the helper shown next to the mouse
+					 * @param {jQuery} event the element that is scrolling
+					 */
+					$.vakata.dnd._trigger("scroll", vakata_dnd.scroll_e);
+				}
+			},
+			start : function (e, data, html) {
+				if(e.type === "touchstart" && e.originalEvent && e.originalEvent.changedTouches && e.originalEvent.changedTouches[0]) {
+					e.pageX = e.originalEvent.changedTouches[0].pageX;
+					e.pageY = e.originalEvent.changedTouches[0].pageY;
+					e.target = document.elementFromPoint(e.originalEvent.changedTouches[0].pageX - window.pageXOffset, e.originalEvent.changedTouches[0].pageY - window.pageYOffset);
+				}
+				if(vakata_dnd.is_drag) { $.vakata.dnd.stop({}); }
+				try {
+					e.currentTarget.unselectable = "on";
+					e.currentTarget.onselectstart = function() { return false; };
+					if(e.currentTarget.style) { e.currentTarget.style.MozUserSelect = "none"; }
+				} catch(ignore) { }
+				vakata_dnd.init_x	= e.pageX;
+				vakata_dnd.init_y	= e.pageY;
+				vakata_dnd.data		= data;
+				vakata_dnd.is_down	= true;
+				vakata_dnd.element	= e.currentTarget;
+				vakata_dnd.target	= e.target;
+				vakata_dnd.is_touch	= e.type === "touchstart";
+				if(html !== false) {
+					vakata_dnd.helper = $("<div id='vakata-dnd'></div>").html(html).css({
+						"display"		: "block",
+						"margin"		: "0",
+						"padding"		: "0",
+						"position"		: "absolute",
+						"top"			: "-2000px",
+						"lineHeight"	: "16px",
+						"zIndex"		: "10000"
+					});
+				}
+				$(document).bind("mousemove touchmove", $.vakata.dnd.drag);
+				$(document).bind("mouseup touchend", $.vakata.dnd.stop);
+				return false;
+			},
+			drag : function (e) {
+				if(e.type === "touchmove" && e.originalEvent && e.originalEvent.changedTouches && e.originalEvent.changedTouches[0]) {
+					e.pageX = e.originalEvent.changedTouches[0].pageX;
+					e.pageY = e.originalEvent.changedTouches[0].pageY;
+					e.target = document.elementFromPoint(e.originalEvent.changedTouches[0].pageX - window.pageXOffset, e.originalEvent.changedTouches[0].pageY - window.pageYOffset);
+				}
+				if(!vakata_dnd.is_down) { return; }
+				if(!vakata_dnd.is_drag) {
+					if(
+						Math.abs(e.pageX - vakata_dnd.init_x) > (vakata_dnd.is_touch ? $.vakata.dnd.settings.threshold_touch : $.vakata.dnd.settings.threshold) ||
+						Math.abs(e.pageY - vakata_dnd.init_y) > (vakata_dnd.is_touch ? $.vakata.dnd.settings.threshold_touch : $.vakata.dnd.settings.threshold)
+					) {
+						if(vakata_dnd.helper) {
+							vakata_dnd.helper.appendTo("body");
+							vakata_dnd.helper_w = vakata_dnd.helper.outerWidth();
+						}
+						vakata_dnd.is_drag = true;
+						/**
+						 * triggered on the document when a drag starts
+						 * @event
+						 * @plugin dnd
+						 * @name dnd_start.vakata
+						 * @param {Mixed} data any data supplied with the call to $.vakata.dnd.start
+						 * @param {DOM} element the DOM element being dragged
+						 * @param {jQuery} helper the helper shown next to the mouse
+						 * @param {Object} event the event that caused the start (probably mousemove)
+						 */
+						$.vakata.dnd._trigger("start", e);
+					}
+					else { return; }
+				}
+
+				var d  = false, w  = false,
+					dh = false, wh = false,
+					dw = false, ww = false,
+					dt = false, dl = false,
+					ht = false, hl = false;
+
+				vakata_dnd.scroll_t = 0;
+				vakata_dnd.scroll_l = 0;
+				vakata_dnd.scroll_e = false;
+				$($(e.target).parentsUntil("body").addBack().get().reverse())
+					.filter(function () {
+						return	(/^auto|scroll$/).test($(this).css("overflow")) &&
+								(this.scrollHeight > this.offsetHeight || this.scrollWidth > this.offsetWidth);
+					})
+					.each(function () {
+						var t = $(this), o = t.offset();
+						if(this.scrollHeight > this.offsetHeight) {
+							if(o.top + t.height() - e.pageY < $.vakata.dnd.settings.scroll_proximity)	{ vakata_dnd.scroll_t = 1; }
+							if(e.pageY - o.top < $.vakata.dnd.settings.scroll_proximity)				{ vakata_dnd.scroll_t = -1; }
+						}
+						if(this.scrollWidth > this.offsetWidth) {
+							if(o.left + t.width() - e.pageX < $.vakata.dnd.settings.scroll_proximity)	{ vakata_dnd.scroll_l = 1; }
+							if(e.pageX - o.left < $.vakata.dnd.settings.scroll_proximity)				{ vakata_dnd.scroll_l = -1; }
+						}
+						if(vakata_dnd.scroll_t || vakata_dnd.scroll_l) {
+							vakata_dnd.scroll_e = $(this);
+							return false;
+						}
+					});
+
+				if(!vakata_dnd.scroll_e) {
+					d  = $(document); w = $(window);
+					dh = d.height(); wh = w.height();
+					dw = d.width(); ww = w.width();
+					dt = d.scrollTop(); dl = d.scrollLeft();
+					if(dh > wh && e.pageY - dt < $.vakata.dnd.settings.scroll_proximity)		{ vakata_dnd.scroll_t = -1;  }
+					if(dh > wh && wh - (e.pageY - dt) < $.vakata.dnd.settings.scroll_proximity)	{ vakata_dnd.scroll_t = 1; }
+					if(dw > ww && e.pageX - dl < $.vakata.dnd.settings.scroll_proximity)		{ vakata_dnd.scroll_l = -1; }
+					if(dw > ww && ww - (e.pageX - dl) < $.vakata.dnd.settings.scroll_proximity)	{ vakata_dnd.scroll_l = 1; }
+					if(vakata_dnd.scroll_t || vakata_dnd.scroll_l) {
+						vakata_dnd.scroll_e = d;
+					}
+				}
+				if(vakata_dnd.scroll_e) { $.vakata.dnd._scroll(true); }
+
+				if(vakata_dnd.helper) {
+					ht = parseInt(e.pageY + $.vakata.dnd.settings.helper_top, 10);
+					hl = parseInt(e.pageX + $.vakata.dnd.settings.helper_left, 10);
+					if(dh && ht + 25 > dh) { ht = dh - 50; }
+					if(dw && hl + vakata_dnd.helper_w > dw) { hl = dw - (vakata_dnd.helper_w + 2); }
+					vakata_dnd.helper.css({
+						left	: hl + "px",
+						top		: ht + "px"
+					});
+				}
+				/**
+				 * triggered on the document when a drag is in progress
+				 * @event
+				 * @plugin dnd
+				 * @name dnd_move.vakata
+				 * @param {Mixed} data any data supplied with the call to $.vakata.dnd.start
+				 * @param {DOM} element the DOM element being dragged
+				 * @param {jQuery} helper the helper shown next to the mouse
+				 * @param {Object} event the event that caused this to trigger (most likely mousemove)
+				 */
+				$.vakata.dnd._trigger("move", e);
+				return false;
+			},
+			stop : function (e) {
+				if(e.type === "touchend" && e.originalEvent && e.originalEvent.changedTouches && e.originalEvent.changedTouches[0]) {
+					e.pageX = e.originalEvent.changedTouches[0].pageX;
+					e.pageY = e.originalEvent.changedTouches[0].pageY;
+					e.target = document.elementFromPoint(e.originalEvent.changedTouches[0].pageX - window.pageXOffset, e.originalEvent.changedTouches[0].pageY - window.pageYOffset);
+				}
+				if(vakata_dnd.is_drag) {
+					/**
+					 * triggered on the document when a drag stops (the dragged element is dropped)
+					 * @event
+					 * @plugin dnd
+					 * @name dnd_stop.vakata
+					 * @param {Mixed} data any data supplied with the call to $.vakata.dnd.start
+					 * @param {DOM} element the DOM element being dragged
+					 * @param {jQuery} helper the helper shown next to the mouse
+					 * @param {Object} event the event that caused the stop
+					 */
+					$.vakata.dnd._trigger("stop", e);
+				}
+				else {
+					if(e.type === "touchend" && e.target === vakata_dnd.target) {
+						var to = setTimeout(function () { $(e.target).click(); }, 100);
+						$(e.target).one('click', function() { if(to) { clearTimeout(to); } });
+					}
+				}
+				$.vakata.dnd._clean();
+				return false;
+			}
+		};
+	}($));
+
+	// include the dnd plugin by default
+	// $.jstree.defaults.plugins.push("dnd");
+
+
+/**
+ * ### Search plugin
+ *
+ * Adds search functionality to jsTree.
+ */
+
+	/**
+	 * stores all defaults for the search plugin
+	 * @name $.jstree.defaults.search
+	 * @plugin search
+	 */
+	$.jstree.defaults.search = {
+		/**
+		 * a jQuery-like AJAX config, which jstree uses if a server should be queried for results. 
+		 * 
+		 * A `str` (which is the search string) parameter will be added with the request. The expected result is a JSON array with nodes that need to be opened so that matching nodes will be revealed.
+		 * Leave this setting as `false` to not query the server. You can also set this to a function, which will be invoked in the instance's scope and receive 2 parameters - the search string and the callback to call with the array of nodes to load.
+		 * @name $.jstree.defaults.search.ajax
+		 * @plugin search
+		 */
+		ajax : false,
+		/**
+		 * Indicates if the search should be fuzzy or not (should `chnd3` match `child node 3`). Default is `false`.
+		 * @name $.jstree.defaults.search.fuzzy
+		 * @plugin search
+		 */
+		fuzzy : false,
+		/**
+		 * Indicates if the search should be case sensitive. Default is `false`.
+		 * @name $.jstree.defaults.search.case_sensitive
+		 * @plugin search
+		 */
+		case_sensitive : false,
+		/**
+		 * Indicates if the tree should be filtered to show only matching nodes (keep in mind this can be a heavy on large trees in old browsers). Default is `false`.
+		 * @name $.jstree.defaults.search.show_only_matches
+		 * @plugin search
+		 */
+		show_only_matches : false,
+		/**
+		 * Indicates if all nodes opened to reveal the search result, should be closed when the search is cleared or a new search is performed. Default is `true`.
+		 * @name $.jstree.defaults.search.close_opened_onclear
+		 * @plugin search
+		 */
+		close_opened_onclear : true,
+		/**
+		 * Indicates if only leaf nodes should be included in search results. Default is `false`.
+		 * @name $.jstree.defaults.search.search_leaves_only
+		 * @plugin search
+		 */
+		search_leaves_only : false,
+		/**
+		 * If set to a function it wil be called in the instance's scope with two arguments - search string and node (where node will be every node in the structure, so use with caution).
+		 * If the function returns a truthy value the node will be considered a match (it might not be displayed if search_only_leaves is set to true and the node is not a leaf). Default is `false`.
+		 * @name $.jstree.defaults.search.search_callback
+		 * @plugin search
+		 */
+		search_callback : false
+	};
+
+	$.jstree.plugins.search = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+
+			this._data.search.str = "";
+			this._data.search.dom = $();
+			this._data.search.res = [];
+			this._data.search.opn = [];
+
+			this.element.on('before_open.jstree', $.proxy(function (e, data) {
+				var i, j, f, r = this._data.search.res, s = [], o = $();
+				if(r && r.length) {
+					this._data.search.dom = $(this.element[0].querySelectorAll('#' + $.map(r, function (v) { return "0123456789".indexOf(v[0]) !== -1 ? '\\3' + v[0] + ' ' + v.substr(1).replace($.jstree.idregex,'\\$&') : v.replace($.jstree.idregex,'\\$&'); }).join(', #')));
+					this._data.search.dom.children(".jstree-anchor").addClass('jstree-search');
+					if(this.settings.search.show_only_matches && this._data.search.res.length) {
+						for(i = 0, j = r.length; i < j; i++) {
+							s = s.concat(this.get_node(r[i]).parents);
+						}
+						s = $.vakata.array_remove_item($.vakata.array_unique(s),'#');
+						o = s.length ? $(this.element[0].querySelectorAll('#' + $.map(s, function (v) { return "0123456789".indexOf(v[0]) !== -1 ? '\\3' + v[0] + ' ' + v.substr(1).replace($.jstree.idregex,'\\$&') : v.replace($.jstree.idregex,'\\$&'); }).join(', #'))) : $();
+
+						this.element.find(".jstree-node").hide().filter('.jstree-last').filter(function() { return this.nextSibling; }).removeClass('jstree-last');
+						o = o.add(this._data.search.dom);
+						o.parentsUntil(".jstree").addBack().show()
+							.filter(".jstree-children").each(function () { $(this).children(".jstree-node:visible").eq(-1).addClass("jstree-last"); });
+					}
+				}
+			}, this));
+			if(this.settings.search.show_only_matches) {
+				this.element
+					.on("search.jstree", function (e, data) {
+						if(data.nodes.length) {
+							$(this).find(".jstree-node").hide().filter('.jstree-last').filter(function() { return this.nextSibling; }).removeClass('jstree-last');
+							data.nodes.parentsUntil(".jstree").addBack().show()
+								.filter(".jstree-children").each(function () { $(this).children(".jstree-node:visible").eq(-1).addClass("jstree-last"); });
+						}
+					})
+					.on("clear_search.jstree", function (e, data) {
+						if(data.nodes.length) {
+							$(this).find(".jstree-node").css("display","").filter('.jstree-last').filter(function() { return this.nextSibling; }).removeClass('jstree-last');
+						}
+					});
+			}
+		};
+		/**
+		 * used to search the tree nodes for a given string
+		 * @name search(str [, skip_async])
+		 * @param {String} str the search string
+		 * @param {Boolean} skip_async if set to true server will not be queried even if configured
+		 * @plugin search
+		 * @trigger search.jstree
+		 */
+		this.search = function (str, skip_async) {
+			if(str === false || $.trim(str.toString()) === "") {
+				return this.clear_search();
+			}
+			str = str.toString();
+			var s = this.settings.search,
+				a = s.ajax ? s.ajax : false,
+				f = null,
+				r = [],
+				p = [], i, j;
+			if(this._data.search.res.length) {
+				this.clear_search();
+			}
+			if(!skip_async && a !== false) {
+				if($.isFunction(a)) {
+					return a.call(this, str, $.proxy(function (d) {
+							if(d && d.d) { d = d.d; }
+							this._load_nodes(!$.isArray(d) ? [] : $.vakata.array_unique(d), function () {
+								this.search(str, true);
+							}, true);
+						}, this));
+				}
+				else {
+					a = $.extend({}, a);
+					if(!a.data) { a.data = {}; }
+					a.data.str = str;
+					return $.ajax(a)
+						.fail($.proxy(function () {
+							this._data.core.last_error = { 'error' : 'ajax', 'plugin' : 'search', 'id' : 'search_01', 'reason' : 'Could not load search parents', 'data' : JSON.stringify(a) };
+							this.settings.core.error.call(this, this._data.core.last_error);
+						}, this))
+						.done($.proxy(function (d) {
+							if(d && d.d) { d = d.d; }
+							this._load_nodes(!$.isArray(d) ? [] : $.vakata.array_unique(d), function () {
+								this.search(str, true);
+							}, true);
+						}, this));
+				}
+			}
+			this._data.search.str = str;
+			this._data.search.dom = $();
+			this._data.search.res = [];
+			this._data.search.opn = [];
+
+			f = new $.vakata.search(str, true, { caseSensitive : s.case_sensitive, fuzzy : s.fuzzy });
+
+			$.each(this._model.data, function (i, v) {
+				if(v.text && ( (s.search_callback && s.search_callback.call(this, str, v)) || (!s.search_callback && f.search(v.text).isMatch) ) && (!s.search_leaves_only || (v.state.loaded && v.children.length === 0)) ) {
+					r.push(i);
+					p = p.concat(v.parents);
+				}
+			});
+			if(r.length) {
+				p = $.vakata.array_unique(p);
+				this._search_open(p);
+				this._data.search.dom = $(this.element[0].querySelectorAll('#' + $.map(r, function (v) { return "0123456789".indexOf(v[0]) !== -1 ? '\\3' + v[0] + ' ' + v.substr(1).replace($.jstree.idregex,'\\$&') : v.replace($.jstree.idregex,'\\$&'); }).join(', #')));
+				this._data.search.res = r;
+				this._data.search.dom.children(".jstree-anchor").addClass('jstree-search');
+			}
+			/**
+			 * triggered after search is complete
+			 * @event
+			 * @name search.jstree
+			 * @param {jQuery} nodes a jQuery collection of matching nodes
+			 * @param {String} str the search string
+			 * @param {Array} res a collection of objects represeing the matching nodes
+			 * @plugin search
+			 */
+			this.trigger('search', { nodes : this._data.search.dom, str : str, res : this._data.search.res });
+		};
+		/**
+		 * used to clear the last search (removes classes and shows all nodes if filtering is on)
+		 * @name clear_search()
+		 * @plugin search
+		 * @trigger clear_search.jstree
+		 */
+		this.clear_search = function () {
+			this._data.search.dom.children(".jstree-anchor").removeClass("jstree-search");
+			if(this.settings.search.close_opened_onclear) {
+				this.close_node(this._data.search.opn, 0);
+			}
+			/**
+			 * triggered after search is complete
+			 * @event
+			 * @name clear_search.jstree
+			 * @param {jQuery} nodes a jQuery collection of matching nodes (the result from the last search)
+			 * @param {String} str the search string (the last search string)
+			 * @param {Array} res a collection of objects represeing the matching nodes (the result from the last search)
+			 * @plugin search
+			 */
+			this.trigger('clear_search', { 'nodes' : this._data.search.dom, str : this._data.search.str, res : this._data.search.res });
+			this._data.search.str = "";
+			this._data.search.res = [];
+			this._data.search.opn = [];
+			this._data.search.dom = $();
+		};
+		/**
+		 * opens nodes that need to be opened to reveal the search results. Used only internally.
+		 * @private
+		 * @name _search_open(d)
+		 * @param {Array} d an array of node IDs
+		 * @plugin search
+		 */
+		this._search_open = function (d) {
+			var t = this;
+			$.each(d.concat([]), function (i, v) {
+				if(v === "#") { return true; }
+				try { v = $('#' + v.replace($.jstree.idregex,'\\$&'), t.element); } catch(ignore) { }
+				if(v && v.length) {
+					if(t.is_closed(v)) {
+						t._data.search.opn.push(v[0].id);
+						t.open_node(v, function () { t._search_open(d); }, 0);
+					}
+				}
+			});
+		};
+	};
+
+	// helpers
+	(function ($) {
+		// from http://kiro.me/projects/fuse.html
+		$.vakata.search = function(pattern, txt, options) {
+			options = options || {};
+			if(options.fuzzy !== false) {
+				options.fuzzy = true;
+			}
+			pattern = options.caseSensitive ? pattern : pattern.toLowerCase();
+			var MATCH_LOCATION	= options.location || 0,
+				MATCH_DISTANCE	= options.distance || 100,
+				MATCH_THRESHOLD	= options.threshold || 0.6,
+				patternLen = pattern.length,
+				matchmask, pattern_alphabet, match_bitapScore, search;
+			if(patternLen > 32) {
+				options.fuzzy = false;
+			}
+			if(options.fuzzy) {
+				matchmask = 1 << (patternLen - 1);
+				pattern_alphabet = (function () {
+					var mask = {},
+						i = 0;
+					for (i = 0; i < patternLen; i++) {
+						mask[pattern.charAt(i)] = 0;
+					}
+					for (i = 0; i < patternLen; i++) {
+						mask[pattern.charAt(i)] |= 1 << (patternLen - i - 1);
+					}
+					return mask;
+				}());
+				match_bitapScore = function (e, x) {
+					var accuracy = e / patternLen,
+						proximity = Math.abs(MATCH_LOCATION - x);
+					if(!MATCH_DISTANCE) {
+						return proximity ? 1.0 : accuracy;
+					}
+					return accuracy + (proximity / MATCH_DISTANCE);
+				};
+			}
+			search = function (text) {
+				text = options.caseSensitive ? text : text.toLowerCase();
+				if(pattern === text || text.indexOf(pattern) !== -1) {
+					return {
+						isMatch: true,
+						score: 0
+					};
+				}
+				if(!options.fuzzy) {
+					return {
+						isMatch: false,
+						score: 1
+					};
+				}
+				var i, j,
+					textLen = text.length,
+					scoreThreshold = MATCH_THRESHOLD,
+					bestLoc = text.indexOf(pattern, MATCH_LOCATION),
+					binMin, binMid,
+					binMax = patternLen + textLen,
+					lastRd, start, finish, rd, charMatch,
+					score = 1,
+					locations = [];
+				if (bestLoc !== -1) {
+					scoreThreshold = Math.min(match_bitapScore(0, bestLoc), scoreThreshold);
+					bestLoc = text.lastIndexOf(pattern, MATCH_LOCATION + patternLen);
+					if (bestLoc !== -1) {
+						scoreThreshold = Math.min(match_bitapScore(0, bestLoc), scoreThreshold);
+					}
+				}
+				bestLoc = -1;
+				for (i = 0; i < patternLen; i++) {
+					binMin = 0;
+					binMid = binMax;
+					while (binMin < binMid) {
+						if (match_bitapScore(i, MATCH_LOCATION + binMid) <= scoreThreshold) {
+							binMin = binMid;
+						} else {
+							binMax = binMid;
+						}
+						binMid = Math.floor((binMax - binMin) / 2 + binMin);
+					}
+					binMax = binMid;
+					start = Math.max(1, MATCH_LOCATION - binMid + 1);
+					finish = Math.min(MATCH_LOCATION + binMid, textLen) + patternLen;
+					rd = new Array(finish + 2);
+					rd[finish + 1] = (1 << i) - 1;
+					for (j = finish; j >= start; j--) {
+						charMatch = pattern_alphabet[text.charAt(j - 1)];
+						if (i === 0) {
+							rd[j] = ((rd[j + 1] << 1) | 1) & charMatch;
+						} else {
+							rd[j] = ((rd[j + 1] << 1) | 1) & charMatch | (((lastRd[j + 1] | lastRd[j]) << 1) | 1) | lastRd[j + 1];
+						}
+						if (rd[j] & matchmask) {
+							score = match_bitapScore(i, j - 1);
+							if (score <= scoreThreshold) {
+								scoreThreshold = score;
+								bestLoc = j - 1;
+								locations.push(bestLoc);
+								if (bestLoc > MATCH_LOCATION) {
+									start = Math.max(1, 2 * MATCH_LOCATION - bestLoc);
+								} else {
+									break;
+								}
+							}
+						}
+					}
+					if (match_bitapScore(i + 1, MATCH_LOCATION) > scoreThreshold) {
+						break;
+					}
+					lastRd = rd;
+				}
+				return {
+					isMatch: bestLoc >= 0,
+					score: score
+				};
+			};
+			return txt === true ? { 'search' : search } : search(txt);
+		};
+	}($));
+
+	// include the search plugin by default
+	// $.jstree.defaults.plugins.push("search");
+
+/**
+ * ### Sort plugin
+ *
+ * Autmatically sorts all siblings in the tree according to a sorting function.
+ */
+
+	/**
+	 * the settings function used to sort the nodes.
+	 * It is executed in the tree's context, accepts two nodes as arguments and should return `1` or `-1`.
+	 * @name $.jstree.defaults.sort
+	 * @plugin sort
+	 */
+	$.jstree.defaults.sort = function (a, b) {
+		//return this.get_type(a) === this.get_type(b) ? (this.get_text(a) > this.get_text(b) ? 1 : -1) : this.get_type(a) >= this.get_type(b);
+		return this.get_text(a) > this.get_text(b) ? 1 : -1;
+	};
+	$.jstree.plugins.sort = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+			this.element
+				.on("model.jstree", $.proxy(function (e, data) {
+						this.sort(data.parent, true);
+					}, this))
+				.on("rename_node.jstree create_node.jstree", $.proxy(function (e, data) {
+						this.sort(data.parent || data.node.parent, false);
+						this.redraw_node(data.parent || data.node.parent, true);
+					}, this))
+				.on("move_node.jstree copy_node.jstree", $.proxy(function (e, data) {
+						this.sort(data.parent, false);
+						this.redraw_node(data.parent, true);
+					}, this));
+		};
+		/**
+		 * used to sort a node's children
+		 * @private
+		 * @name sort(obj [, deep])
+		 * @param  {mixed} obj the node
+		 * @param {Boolean} deep if set to `true` nodes are sorted recursively.
+		 * @plugin sort
+		 * @trigger search.jstree
+		 */
+		this.sort = function (obj, deep) {
+			var i, j;
+			obj = this.get_node(obj);
+			if(obj && obj.children && obj.children.length) {
+				obj.children.sort($.proxy(this.settings.sort, this));
+				if(deep) {
+					for(i = 0, j = obj.children_d.length; i < j; i++) {
+						this.sort(obj.children_d[i], false);
+					}
+				}
+			}
+		};
+	};
+
+	// include the sort plugin by default
+	// $.jstree.defaults.plugins.push("sort");
+
+/**
+ * ### State plugin
+ *
+ * Saves the state of the tree (selected nodes, opened nodes) on the user's computer using available options (localStorage, cookies, etc)
+ */
+
+	var to = false;
+	/**
+	 * stores all defaults for the state plugin
+	 * @name $.jstree.defaults.state
+	 * @plugin state
+	 */
+	$.jstree.defaults.state = {
+		/**
+		 * A string for the key to use when saving the current tree (change if using multiple trees in your project). Defaults to `jstree`.
+		 * @name $.jstree.defaults.state.key
+		 * @plugin state
+		 */
+		key		: 'jstree',
+		/**
+		 * A space separated list of events that trigger a state save. Defaults to `changed.jstree open_node.jstree close_node.jstree`.
+		 * @name $.jstree.defaults.state.events
+		 * @plugin state
+		 */
+		events	: 'changed.jstree open_node.jstree close_node.jstree',
+		/**
+		 * Time in milliseconds after which the state will expire. Defaults to 'false' meaning - no expire.
+		 * @name $.jstree.defaults.state.ttl
+		 * @plugin state
+		 */
+		ttl		: false,
+		/**
+		 * A function that will be executed prior to restoring state with one argument - the state object. Can be used to clear unwanted parts of the state.
+		 * @name $.jstree.defaults.state.filter
+		 * @plugin state
+		 */
+		filter	: false
+	};
+	$.jstree.plugins.state = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+			var bind = $.proxy(function () {
+				this.element.on(this.settings.state.events, $.proxy(function () {
+					if(to) { clearTimeout(to); }
+					to = setTimeout($.proxy(function () { this.save_state(); }, this), 100);
+				}, this));
+			}, this);
+			this.element
+				.on("ready.jstree", $.proxy(function (e, data) {
+						this.element.one("restore_state.jstree", bind);
+						if(!this.restore_state()) { bind(); }
+					}, this));
+		};
+		/**
+		 * save the state
+		 * @name save_state()
+		 * @plugin state
+		 */
+		this.save_state = function () {
+			var st = { 'state' : this.get_state(), 'ttl' : this.settings.state.ttl, 'sec' : +(new Date()) };
+			$.vakata.storage.set(this.settings.state.key, JSON.stringify(st));
+		};
+		/**
+		 * restore the state from the user's computer
+		 * @name restore_state()
+		 * @plugin state
+		 */
+		this.restore_state = function () {
+			var k = $.vakata.storage.get(this.settings.state.key);
+			if(!!k) { try { k = JSON.parse(k); } catch(ex) { return false; } }
+			if(!!k && k.ttl && k.sec && +(new Date()) - k.sec > k.ttl) { return false; }
+			if(!!k && k.state) { k = k.state; }
+			if(!!k && $.isFunction(this.settings.state.filter)) { k = this.settings.state.filter.call(this, k); }
+			if(!!k) {
+				this.element.one("set_state.jstree", function (e, data) { data.instance.trigger('restore_state', { 'state' : $.extend(true, {}, k) }); });
+				this.set_state(k);
+				return true;
+			}
+			return false;
+		};
+		/**
+		 * clear the state on the user's computer
+		 * @name clear_state()
+		 * @plugin state
+		 */
+		this.clear_state = function () {
+			return $.vakata.storage.del(this.settings.state.key);
+		};
+	};
+
+	(function ($, undefined) {
+		$.vakata.storage = {
+			// simply specifying the functions in FF throws an error
+			set : function (key, val) { return window.localStorage.setItem(key, val); },
+			get : function (key) { return window.localStorage.getItem(key); },
+			del : function (key) { return window.localStorage.removeItem(key); }
+		};
+	}($));
+
+	// include the state plugin by default
+	// $.jstree.defaults.plugins.push("state");
+
+/**
+ * ### Types plugin
+ *
+ * Makes it possible to add predefined types for groups of nodes, which make it possible to easily control nesting rules and icon for each group.
+ */
+
+	/**
+	 * An object storing all types as key value pairs, where the key is the type name and the value is an object that could contain following keys (all optional).
+	 * 
+	 * * `max_children` the maximum number of immediate children this node type can have. Do not specify or set to `-1` for unlimited.
+	 * * `max_depth` the maximum number of nesting this node type can have. A value of `1` would mean that the node can have children, but no grandchildren. Do not specify or set to `-1` for unlimited.
+	 * * `valid_children` an array of node type strings, that nodes of this type can have as children. Do not specify or set to `-1` for no limits.
+	 * * `icon` a string - can be a path to an icon or a className, if using an image that is in the current directory use a `./` prefix, otherwise it will be detected as a class. Omit to use the default icon from your theme.
+	 *
+	 * There are two predefined types:
+	 * 
+	 * * `#` represents the root of the tree, for example `max_children` would control the maximum number of root nodes.
+	 * * `default` represents the default node - any settings here will be applied to all nodes that do not have a type specified.
+	 * 
+	 * @name $.jstree.defaults.types
+	 * @plugin types
+	 */
+	$.jstree.defaults.types = {
+		'#' : {},
+		'default' : {}
+	};
+
+	$.jstree.plugins.types = function (options, parent) {
+		this.init = function (el, options) {
+			var i, j;
+			if(options && options.types && options.types['default']) {
+				for(i in options.types) {
+					if(i !== "default" && i !== "#" && options.types.hasOwnProperty(i)) {
+						for(j in options.types['default']) {
+							if(options.types['default'].hasOwnProperty(j) && options.types[i][j] === undefined) {
+								options.types[i][j] = options.types['default'][j];
+							}
+						}
+					}
+				}
+			}
+			parent.init.call(this, el, options);
+			this._model.data['#'].type = '#';
+		};
+		this.refresh = function (skip_loading, forget_state) {
+			parent.refresh.call(this, skip_loading, forget_state);
+			this._model.data['#'].type = '#';
+		};
+		this.bind = function () {
+			this.element
+				.on('model.jstree', $.proxy(function (e, data) {
+						var m = this._model.data,
+							dpc = data.nodes,
+							t = this.settings.types,
+							i, j, c = 'default';
+						for(i = 0, j = dpc.length; i < j; i++) {
+							c = 'default';
+							if(m[dpc[i]].original && m[dpc[i]].original.type && t[m[dpc[i]].original.type]) {
+								c = m[dpc[i]].original.type;
+							}
+							if(m[dpc[i]].data && m[dpc[i]].data.jstree && m[dpc[i]].data.jstree.type && t[m[dpc[i]].data.jstree.type]) {
+								c = m[dpc[i]].data.jstree.type;
+							}
+							m[dpc[i]].type = c;
+							if(m[dpc[i]].icon === true && t[c].icon !== undefined) {
+								m[dpc[i]].icon = t[c].icon;
+							}
+						}
+						m['#'].type = '#';
+					}, this));
+			parent.bind.call(this);
+		};
+		this.get_json = function (obj, options, flat) {
+			var i, j,
+				m = this._model.data,
+				opt = options ? $.extend(true, {}, options, {no_id:false}) : {},
+				tmp = parent.get_json.call(this, obj, opt, flat);
+			if(tmp === false) { return false; }
+			if($.isArray(tmp)) {
+				for(i = 0, j = tmp.length; i < j; i++) {
+					tmp[i].type = tmp[i].id && m[tmp[i].id] && m[tmp[i].id].type ? m[tmp[i].id].type : "default";
+					if(options && options.no_id) {
+						delete tmp[i].id;
+						if(tmp[i].li_attr && tmp[i].li_attr.id) {
+							delete tmp[i].li_attr.id;
+						}
+					}
+				}
+			}
+			else {
+				tmp.type = tmp.id && m[tmp.id] && m[tmp.id].type ? m[tmp.id].type : "default";
+				if(options && options.no_id) {
+					tmp = this._delete_ids(tmp);
+				}
+			}
+			return tmp;
+		};
+		this._delete_ids = function (tmp) {
+			if($.isArray(tmp)) {
+				for(var i = 0, j = tmp.length; i < j; i++) {
+					tmp[i] = this._delete_ids(tmp[i]);
+				}
+				return tmp;
+			}
+			delete tmp.id;
+			if(tmp.li_attr && tmp.li_attr.id) {
+				delete tmp.li_attr.id;
+			}
+			if(tmp.children && $.isArray(tmp.children)) {
+				tmp.children = this._delete_ids(tmp.children);
+			}
+			return tmp;
+		};
+		this.check = function (chk, obj, par, pos, more) {
+			if(parent.check.call(this, chk, obj, par, pos, more) === false) { return false; }
+			obj = obj && obj.id ? obj : this.get_node(obj);
+			par = par && par.id ? par : this.get_node(par);
+			var m = obj && obj.id ? $.jstree.reference(obj.id) : null, tmp, d, i, j;
+			m = m && m._model && m._model.data ? m._model.data : null;
+			switch(chk) {
+				case "create_node":
+				case "move_node":
+				case "copy_node":
+					if(chk !== 'move_node' || $.inArray(obj.id, par.children) === -1) {
+						tmp = this.get_rules(par);
+						if(tmp.max_children !== undefined && tmp.max_children !== -1 && tmp.max_children === par.children.length) {
+							this._data.core.last_error = { 'error' : 'check', 'plugin' : 'types', 'id' : 'types_01', 'reason' : 'max_children prevents function: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+							return false;
+						}
+						if(tmp.valid_children !== undefined && tmp.valid_children !== -1 && $.inArray(obj.type, tmp.valid_children) === -1) {
+							this._data.core.last_error = { 'error' : 'check', 'plugin' : 'types', 'id' : 'types_02', 'reason' : 'valid_children prevents function: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+							return false;
+						}
+						if(m && obj.children_d && obj.parents) {
+							d = 0;
+							for(i = 0, j = obj.children_d.length; i < j; i++) {
+								d = Math.max(d, m[obj.children_d[i]].parents.length);
+							}
+							d = d - obj.parents.length + 1;
+						}
+						if(d <= 0 || d === undefined) { d = 1; }
+						do {
+							if(tmp.max_depth !== undefined && tmp.max_depth !== -1 && tmp.max_depth < d) {
+								this._data.core.last_error = { 'error' : 'check', 'plugin' : 'types', 'id' : 'types_03', 'reason' : 'max_depth prevents function: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+								return false;
+							}
+							par = this.get_node(par.parent);
+							tmp = this.get_rules(par);
+							d++;
+						} while(par);
+					}
+					break;
+			}
+			return true;
+		};
+		/**
+		 * used to retrieve the type settings object for a node
+		 * @name get_rules(obj)
+		 * @param {mixed} obj the node to find the rules for
+		 * @return {Object}
+		 * @plugin types
+		 */
+		this.get_rules = function (obj) {
+			obj = this.get_node(obj);
+			if(!obj) { return false; }
+			var tmp = this.get_type(obj, true);
+			if(tmp.max_depth === undefined) { tmp.max_depth = -1; }
+			if(tmp.max_children === undefined) { tmp.max_children = -1; }
+			if(tmp.valid_children === undefined) { tmp.valid_children = -1; }
+			return tmp;
+		};
+		/**
+		 * used to retrieve the type string or settings object for a node
+		 * @name get_type(obj [, rules])
+		 * @param {mixed} obj the node to find the rules for
+		 * @param {Boolean} rules if set to `true` instead of a string the settings object will be returned
+		 * @return {String|Object}
+		 * @plugin types
+		 */
+		this.get_type = function (obj, rules) {
+			obj = this.get_node(obj);
+			return (!obj) ? false : ( rules ? $.extend({ 'type' : obj.type }, this.settings.types[obj.type]) : obj.type);
+		};
+		/**
+		 * used to change a node's type
+		 * @name set_type(obj, type)
+		 * @param {mixed} obj the node to change
+		 * @param {String} type the new type
+		 * @plugin types
+		 */
+		this.set_type = function (obj, type) {
+			var t, t1, t2, old_type, old_icon;
+			if($.isArray(obj)) {
+				obj = obj.slice();
+				for(t1 = 0, t2 = obj.length; t1 < t2; t1++) {
+					this.set_type(obj[t1], type);
+				}
+				return true;
+			}
+			t = this.settings.types;
+			obj = this.get_node(obj);
+			if(!t[type] || !obj) { return false; }
+			old_type = obj.type;
+			old_icon = this.get_icon(obj);
+			obj.type = type;
+			if(old_icon === true || (t[old_type] && t[old_type].icon && old_icon === t[old_type].icon)) {
+				this.set_icon(obj, t[type].icon !== undefined ? t[type].icon : true);
+			}
+			return true;
+		};
+	};
+	// include the types plugin by default
+	// $.jstree.defaults.plugins.push("types");
+
+/**
+ * ### Unique plugin
+ *
+ * Enforces that no nodes with the same name can coexist as siblings.
+ */
+
+	/**
+	 * stores all defaults for the unique plugin
+	 * @name $.jstree.defaults.unique
+	 * @plugin unique
+	 */
+	$.jstree.defaults.unique = {
+		/**
+		 * Indicates if the comparison should be case sensitive. Default is `false`.
+		 * @name $.jstree.defaults.unique.case_sensitive
+		 * @plugin unique
+		 */
+		case_sensitive : false,
+		/**
+		 * A callback executed in the instance's scope when a new node is created and the name is already taken, the two arguments are the conflicting name and the counter. The default will produce results like `New node (2)`.
+		 * @name $.jstree.defaults.unique.duplicate
+		 * @plugin unique
+		 */
+		duplicate : function (name, counter) {
+			return name + ' (' + counter + ')';
+		}
+	};
+
+	$.jstree.plugins.unique = function (options, parent) {
+		this.check = function (chk, obj, par, pos, more) {
+			if(parent.check.call(this, chk, obj, par, pos, more) === false) { return false; }
+			obj = obj && obj.id ? obj : this.get_node(obj);
+			par = par && par.id ? par : this.get_node(par);
+			if(!par || !par.children) { return true; }
+			var n = chk === "rename_node" ? pos : obj.text,
+				c = [],
+				s = this.settings.unique.case_sensitive,
+				m = this._model.data, i, j;
+			for(i = 0, j = par.children.length; i < j; i++) {
+				c.push(s ? m[par.children[i]].text : m[par.children[i]].text.toLowerCase());
+			}
+			if(!s) { n = n.toLowerCase(); }
+			switch(chk) {
+				case "delete_node":
+					return true;
+				case "rename_node":
+					i = ($.inArray(n, c) === -1 || (obj.text && obj.text[ s ? 'toString' : 'toLowerCase']() === n));
+					if(!i) {
+						this._data.core.last_error = { 'error' : 'check', 'plugin' : 'unique', 'id' : 'unique_01', 'reason' : 'Child with name ' + n + ' already exists. Preventing: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+					}
+					return i;
+				case "create_node":
+					i = ($.inArray(n, c) === -1);
+					if(!i) {
+						this._data.core.last_error = { 'error' : 'check', 'plugin' : 'unique', 'id' : 'unique_04', 'reason' : 'Child with name ' + n + ' already exists. Preventing: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+					}
+					return i;
+				case "copy_node":
+					i = ($.inArray(n, c) === -1);
+					if(!i) {
+						this._data.core.last_error = { 'error' : 'check', 'plugin' : 'unique', 'id' : 'unique_02', 'reason' : 'Child with name ' + n + ' already exists. Preventing: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+					}
+					return i;
+				case "move_node":
+					i = (obj.parent === par.id || $.inArray(n, c) === -1);
+					if(!i) {
+						this._data.core.last_error = { 'error' : 'check', 'plugin' : 'unique', 'id' : 'unique_03', 'reason' : 'Child with name ' + n + ' already exists. Preventing: ' + chk, 'data' : JSON.stringify({ 'chk' : chk, 'pos' : pos, 'obj' : obj && obj.id ? obj.id : false, 'par' : par && par.id ? par.id : false }) };
+					}
+					return i;
+			}
+			return true;
+		};
+		this.create_node = function (par, node, pos, callback, is_loaded) {
+			if(!node || node.text === undefined) {
+				if(par === null) {
+					par = "#";
+				}
+				par = this.get_node(par);
+				if(!par) {
+					return parent.create_node.call(this, par, node, pos, callback, is_loaded);
+				}
+				pos = pos === undefined ? "last" : pos;
+				if(!pos.toString().match(/^(before|after)$/) && !is_loaded && !this.is_loaded(par)) {
+					return parent.create_node.call(this, par, node, pos, callback, is_loaded);
+				}
+				if(!node) { node = {}; }
+				var tmp, n, dpc, i, j, m = this._model.data, s = this.settings.unique.case_sensitive, cb = this.settings.unique.duplicate;
+				n = tmp = this.get_string('New node');
+				dpc = [];
+				for(i = 0, j = par.children.length; i < j; i++) {
+					dpc.push(s ? m[par.children[i]].text : m[par.children[i]].text.toLowerCase());
+				}
+				i = 1;
+				while($.inArray(s ? n : n.toLowerCase(), dpc) !== -1) {
+					n = cb.call(this, tmp, (++i)).toString();
+				}
+				node.text = n;
+			}
+			return parent.create_node.call(this, par, node, pos, callback, is_loaded);
+		};
+	};
+
+	// include the unique plugin by default
+	// $.jstree.defaults.plugins.push("unique");
+
+
+/**
+ * ### Wholerow plugin
+ *
+ * Makes each node appear block level. Making selection easier. May cause slow down for large trees in old browsers.
+ */
+
+	var div = document.createElement('DIV');
+	div.setAttribute('unselectable','on');
+	div.className = 'jstree-wholerow';
+	div.innerHTML = ' ';
+	$.jstree.plugins.wholerow = function (options, parent) {
+		this.bind = function () {
+			parent.bind.call(this);
+
+			this.element
+				.on('ready.jstree set_state.jstree', $.proxy(function () {
+						this.hide_dots();
+					}, this))
+				.on("init.jstree loading.jstree ready.jstree", $.proxy(function () {
+						//div.style.height = this._data.core.li_height + 'px';
+						this.get_container_ul().addClass('jstree-wholerow-ul');
+					}, this))
+				.on("deselect_all.jstree", $.proxy(function (e, data) {
+						this.element.find('.jstree-wholerow-clicked').removeClass('jstree-wholerow-clicked');
+					}, this))
+				.on("changed.jstree", $.proxy(function (e, data) {
+						this.element.find('.jstree-wholerow-clicked').removeClass('jstree-wholerow-clicked');
+						var tmp = false, i, j;
+						for(i = 0, j = data.selected.length; i < j; i++) {
+							tmp = this.get_node(data.selected[i], true);
+							if(tmp && tmp.length) {
+								tmp.children('.jstree-wholerow').addClass('jstree-wholerow-clicked');
+							}
+						}
+					}, this))
+				.on("open_node.jstree", $.proxy(function (e, data) {
+						this.get_node(data.node, true).find('.jstree-clicked').parent().children('.jstree-wholerow').addClass('jstree-wholerow-clicked');
+					}, this))
+				.on("hover_node.jstree dehover_node.jstree", $.proxy(function (e, data) {
+						this.get_node(data.node, true).children('.jstree-wholerow')[e.type === "hover_node"?"addClass":"removeClass"]('jstree-wholerow-hovered');
+					}, this))
+				.on("contextmenu.jstree", ".jstree-wholerow", $.proxy(function (e) {
+						e.preventDefault();
+						var tmp = $.Event('contextmenu', { metaKey : e.metaKey, ctrlKey : e.ctrlKey, altKey : e.altKey, shiftKey : e.shiftKey, pageX : e.pageX, pageY : e.pageY });
+						$(e.currentTarget).closest(".jstree-node").children(".jstree-anchor:eq(0)").trigger(tmp);
+					}, this))
+				.on("click.jstree", ".jstree-wholerow", function (e) {
+						e.stopImmediatePropagation();
+						var tmp = $.Event('click', { metaKey : e.metaKey, ctrlKey : e.ctrlKey, altKey : e.altKey, shiftKey : e.shiftKey });
+						$(e.currentTarget).closest(".jstree-node").children(".jstree-anchor:eq(0)").trigger(tmp).focus();
+					})
+				.on("click.jstree", ".jstree-leaf > .jstree-ocl", $.proxy(function (e) {
+						e.stopImmediatePropagation();
+						var tmp = $.Event('click', { metaKey : e.metaKey, ctrlKey : e.ctrlKey, altKey : e.altKey, shiftKey : e.shiftKey });
+						$(e.currentTarget).closest(".jstree-node").children(".jstree-anchor:eq(0)").trigger(tmp).focus();
+					}, this))
+				.on("mouseover.jstree", ".jstree-wholerow, .jstree-icon", $.proxy(function (e) {
+						e.stopImmediatePropagation();
+						this.hover_node(e.currentTarget);
+						return false;
+					}, this))
+				.on("mouseleave.jstree", ".jstree-node", $.proxy(function (e) {
+						this.dehover_node(e.currentTarget);
+					}, this));
+		};
+		this.teardown = function () {
+			if(this.settings.wholerow) {
+				this.element.find(".jstree-wholerow").remove();
+			}
+			parent.teardown.call(this);
+		};
+		this.redraw_node = function(obj, deep, callback) {
+			obj = parent.redraw_node.call(this, obj, deep, callback);
+			if(obj) {
+				var tmp = div.cloneNode(true);
+				//tmp.style.height = this._data.core.li_height + 'px';
+				if($.inArray(obj.id, this._data.core.selected) !== -1) { tmp.className += ' jstree-wholerow-clicked'; }
+				obj.insertBefore(tmp, obj.childNodes[0]);
+			}
+			return obj;
+		};
+	};
+	// include the wholerow plugin by default
+	// $.jstree.defaults.plugins.push("wholerow");
+
+}));
diff --git a/client/galaxy/scripts/libs/jquery/select2.js b/client/galaxy/scripts/libs/jquery/select2.js
new file mode 100755
index 0000000..1d31ce8
--- /dev/null
+++ b/client/galaxy/scripts/libs/jquery/select2.js
@@ -0,0 +1,3507 @@
+/*
+Copyright 2012 Igor Vaynberg
+
+Version: 3.5.1 Timestamp: Tue Jul 22 18:58:56 EDT 2014
+
+This software is licensed under the Apache License, Version 2.0 (the "Apache License") or the GNU
+General Public License version 2 (the "GPL License"). You may choose either license to govern your
+use of this software only upon the condition that you accept all of the terms of either the Apache
+License or the GPL License.
+
+You may obtain a copy of the Apache License and the GPL License at:
+
+    http://www.apache.org/licenses/LICENSE-2.0
+    http://www.gnu.org/licenses/gpl-2.0.html
+
+Unless required by applicable law or agreed to in writing, software distributed under the
+Apache License or the GPL License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the Apache License and the GPL License for
+the specific language governing permissions and limitations under the Apache License and the GPL License.
+*/
+(function ($) {
+    if(typeof $.fn.each2 == "undefined") {
+        $.extend($.fn, {
+            /*
+            * 4-10 times faster .each replacement
+            * use it carefully, as it overrides jQuery context of element on each iteration
+            */
+            each2 : function (c) {
+                var j = $([0]), i = -1, l = this.length;
+                while (
+                    ++i < l
+                    && (j.context = j[0] = this[i])
+                    && c.call(j[0], i, j) !== false //"this"=DOM, i=index, j=jQuery object
+                );
+                return this;
+            }
+        });
+    }
+})(jQuery);
+
+(function ($, undefined) {
+    "use strict";
+    /*global document, window, jQuery, console */
+
+    if (window.Select2 !== undefined) {
+        return;
+    }
+
+    var KEY, AbstractSelect2, SingleSelect2, MultiSelect2, nextUid, sizer,
+        lastMousePosition={x:0,y:0}, $document, scrollBarDimensions,
+
+    KEY = {
+        TAB: 9,
+        ENTER: 13,
+        ESC: 27,
+        SPACE: 32,
+        LEFT: 37,
+        UP: 38,
+        RIGHT: 39,
+        DOWN: 40,
+        SHIFT: 16,
+        CTRL: 17,
+        ALT: 18,
+        PAGE_UP: 33,
+        PAGE_DOWN: 34,
+        HOME: 36,
+        END: 35,
+        BACKSPACE: 8,
+        DELETE: 46,
+        isArrow: function (k) {
+            k = k.which ? k.which : k;
+            switch (k) {
+            case KEY.LEFT:
+            case KEY.RIGHT:
+            case KEY.UP:
+            case KEY.DOWN:
+                return true;
+            }
+            return false;
+        },
+        isControl: function (e) {
+            var k = e.which;
+            switch (k) {
+            case KEY.SHIFT:
+            case KEY.CTRL:
+            case KEY.ALT:
+                return true;
+            }
+
+            if (e.metaKey) return true;
+
+            return false;
+        },
+        isFunctionKey: function (k) {
+            k = k.which ? k.which : k;
+            return k >= 112 && k <= 123;
+        }
+    },
+    MEASURE_SCROLLBAR_TEMPLATE = "<div class='select2-measure-scrollbar'></div>",
+
+    DIACRITICS = {"\u24B6":"A","\uFF21":"A","\u00C0":"A","\u00C1":"A","\u00C2":"A","\u1EA6":"A","\u1EA4":"A","\u1EAA":"A","\u1EA8":"A","\u00C3":"A","\u0100":"A","\u0102":"A","\u1EB0":"A","\u1EAE":"A","\u1EB4":"A","\u1EB2":"A","\u0226":"A","\u01E0":"A","\u00C4":"A","\u01DE":"A","\u1EA2":"A","\u00C5":"A","\u01FA":"A","\u01CD":"A","\u0200":"A","\u0202":"A","\u1EA0":"A","\u1EAC":"A","\u1EB6":"A","\u1E00":"A","\u0104":"A","\u023A":"A","\u2C6F":"A","\uA732":"AA","\u00C6":"AE","\u01FC":"AE","\u [...]
+
+    $document = $(document);
+
+    nextUid=(function() { var counter=1; return function() { return counter++; }; }());
+
+
+    function reinsertElement(element) {
+        var placeholder = $(document.createTextNode(''));
+
+        element.before(placeholder);
+        placeholder.before(element);
+        placeholder.remove();
+    }
+
+    function stripDiacritics(str) {
+        // Used 'uni range + named function' from http://jsperf.com/diacritics/18
+        function match(a) {
+            return DIACRITICS[a] || a;
+        }
+
+        return str.replace(/[^\u0000-\u007E]/g, match);
+    }
+
+    function indexOf(value, array) {
+        var i = 0, l = array.length;
+        for (; i < l; i = i + 1) {
+            if (equal(value, array[i])) return i;
+        }
+        return -1;
+    }
+
+    function measureScrollbar () {
+        var $template = $( MEASURE_SCROLLBAR_TEMPLATE );
+        $template.appendTo('body');
+
+        var dim = {
+            width: $template.width() - $template[0].clientWidth,
+            height: $template.height() - $template[0].clientHeight
+        };
+        $template.remove();
+
+        return dim;
+    }
+
+    /**
+     * Compares equality of a and b
+     * @param a
+     * @param b
+     */
+    function equal(a, b) {
+        if (a === b) return true;
+        if (a === undefined || b === undefined) return false;
+        if (a === null || b === null) return false;
+        // Check whether 'a' or 'b' is a string (primitive or object).
+        // The concatenation of an empty string (+'') converts its argument to a string's primitive.
+        if (a.constructor === String) return a+'' === b+''; // a+'' - in case 'a' is a String object
+        if (b.constructor === String) return b+'' === a+''; // b+'' - in case 'b' is a String object
+        return false;
+    }
+
+    /**
+     * Splits the string into an array of values, trimming each value. An empty array is returned for nulls or empty
+     * strings
+     * @param string
+     * @param separator
+     */
+    function splitVal(string, separator) {
+        var val, i, l;
+        if (string === null || string.length < 1) return [];
+        val = string.split(separator);
+        for (i = 0, l = val.length; i < l; i = i + 1) val[i] = $.trim(val[i]);
+        return val;
+    }
+
+    function getSideBorderPadding(element) {
+        return element.outerWidth(false) - element.width();
+    }
+
+    function installKeyUpChangeEvent(element) {
+        var key="keyup-change-value";
+        element.on("keydown", function () {
+            if ($.data(element, key) === undefined) {
+                $.data(element, key, element.val());
+            }
+        });
+        element.on("keyup", function () {
+            var val= $.data(element, key);
+            if (val !== undefined && element.val() !== val) {
+                $.removeData(element, key);
+                element.trigger("keyup-change");
+            }
+        });
+    }
+
+
+    /**
+     * filters mouse events so an event is fired only if the mouse moved.
+     *
+     * filters out mouse events that occur when mouse is stationary but
+     * the elements under the pointer are scrolled.
+     */
+    function installFilteredMouseMove(element) {
+        element.on("mousemove", function (e) {
+            var lastpos = lastMousePosition;
+            if (lastpos === undefined || lastpos.x !== e.pageX || lastpos.y !== e.pageY) {
+                $(e.target).trigger("mousemove-filtered", e);
+            }
+        });
+    }
+
+    /**
+     * Debounces a function. Returns a function that calls the original fn function only if no invocations have been made
+     * within the last quietMillis milliseconds.
+     *
+     * @param quietMillis number of milliseconds to wait before invoking fn
+     * @param fn function to be debounced
+     * @param ctx object to be used as this reference within fn
+     * @return debounced version of fn
+     */
+    function debounce(quietMillis, fn, ctx) {
+        ctx = ctx || undefined;
+        var timeout;
+        return function () {
+            var args = arguments;
+            window.clearTimeout(timeout);
+            timeout = window.setTimeout(function() {
+                fn.apply(ctx, args);
+            }, quietMillis);
+        };
+    }
+
+    function installDebouncedScroll(threshold, element) {
+        var notify = debounce(threshold, function (e) { element.trigger("scroll-debounced", e);});
+        element.on("scroll", function (e) {
+            if (indexOf(e.target, element.get()) >= 0) notify(e);
+        });
+    }
+
+    function focus($el) {
+        if ($el[0] === document.activeElement) return;
+
+        /* set the focus in a 0 timeout - that way the focus is set after the processing
+            of the current event has finished - which seems like the only reliable way
+            to set focus */
+        window.setTimeout(function() {
+            var el=$el[0], pos=$el.val().length, range;
+
+            $el.focus();
+
+            /* make sure el received focus so we do not error out when trying to manipulate the caret.
+                sometimes modals or others listeners may steal it after its set */
+            var isVisible = (el.offsetWidth > 0 || el.offsetHeight > 0);
+            if (isVisible && el === document.activeElement) {
+
+                /* after the focus is set move the caret to the end, necessary when we val()
+                    just before setting focus */
+                if(el.setSelectionRange)
+                {
+                    el.setSelectionRange(pos, pos);
+                }
+                else if (el.createTextRange) {
+                    range = el.createTextRange();
+                    range.collapse(false);
+                    range.select();
+                }
+            }
+        }, 0);
+    }
+
+    function getCursorInfo(el) {
+        el = $(el)[0];
+        var offset = 0;
+        var length = 0;
+        if ('selectionStart' in el) {
+            offset = el.selectionStart;
+            length = el.selectionEnd - offset;
+        } else if ('selection' in document) {
+            el.focus();
+            var sel = document.selection.createRange();
+            length = document.selection.createRange().text.length;
+            sel.moveStart('character', -el.value.length);
+            offset = sel.text.length - length;
+        }
+        return { offset: offset, length: length };
+    }
+
+    function killEvent(event) {
+        event.preventDefault();
+        event.stopPropagation();
+    }
+    function killEventImmediately(event) {
+        event.preventDefault();
+        event.stopImmediatePropagation();
+    }
+
+    function measureTextWidth(e) {
+        if (!sizer){
+            var style = e[0].currentStyle || window.getComputedStyle(e[0], null);
+            sizer = $(document.createElement("div")).css({
+                position: "absolute",
+                left: "-10000px",
+                top: "-10000px",
+                display: "none",
+                fontSize: style.fontSize,
+                fontFamily: style.fontFamily,
+                fontStyle: style.fontStyle,
+                fontWeight: style.fontWeight,
+                letterSpacing: style.letterSpacing,
+                textTransform: style.textTransform,
+                whiteSpace: "nowrap"
+            });
+            sizer.attr("class","select2-sizer");
+            $("body").append(sizer);
+        }
+        sizer.text(e.val());
+        return sizer.width();
+    }
+
+    function syncCssClasses(dest, src, adapter) {
+        var classes, replacements = [], adapted;
+
+        classes = $.trim(dest.attr("class"));
+
+        if (classes) {
+            classes = '' + classes; // for IE which returns object
+
+            $(classes.split(/\s+/)).each2(function() {
+                if (this.indexOf("select2-") === 0) {
+                    replacements.push(this);
+                }
+            });
+        }
+
+        classes = $.trim(src.attr("class"));
+
+        if (classes) {
+            classes = '' + classes; // for IE which returns object
+
+            $(classes.split(/\s+/)).each2(function() {
+                if (this.indexOf("select2-") !== 0) {
+                    adapted = adapter(this);
+
+                    if (adapted) {
+                        replacements.push(adapted);
+                    }
+                }
+            });
+        }
+
+        dest.attr("class", replacements.join(" "));
+    }
+
+
+    function markMatch(text, term, markup, escapeMarkup) {
+        var match=stripDiacritics(text.toUpperCase()).indexOf(stripDiacritics(term.toUpperCase())),
+            tl=term.length;
+
+        if (match<0) {
+            markup.push(escapeMarkup(text));
+            return;
+        }
+
+        markup.push(escapeMarkup(text.substring(0, match)));
+        markup.push("<span class='select2-match'>");
+        markup.push(escapeMarkup(text.substring(match, match + tl)));
+        markup.push("</span>");
+        markup.push(escapeMarkup(text.substring(match + tl, text.length)));
+    }
+
+    function defaultEscapeMarkup(markup) {
+        var replace_map = {
+            '\\': '\',
+            '&': '&',
+            '<': '<',
+            '>': '>',
+            '"': '"',
+            "'": ''',
+            "/": '/'
+        };
+
+        return String(markup).replace(/[&<>"'\/\\]/g, function (match) {
+            return replace_map[match];
+        });
+    }
+
+    /**
+     * Produces an ajax-based query function
+     *
+     * @param options object containing configuration parameters
+     * @param options.params parameter map for the transport ajax call, can contain such options as cache, jsonpCallback, etc. see $.ajax
+     * @param options.transport function that will be used to execute the ajax request. must be compatible with parameters supported by $.ajax
+     * @param options.url url for the data
+     * @param options.data a function(searchTerm, pageNumber, context) that should return an object containing query string parameters for the above url.
+     * @param options.dataType request data type: ajax, jsonp, other datatypes supported by jQuery's $.ajax function or the transport function if specified
+     * @param options.quietMillis (optional) milliseconds to wait before making the ajaxRequest, helps debounce the ajax function if invoked too often
+     * @param options.results a function(remoteData, pageNumber, query) that converts data returned form the remote request to the format expected by Select2.
+     *      The expected format is an object containing the following keys:
+     *      results array of objects that will be used as choices
+     *      more (optional) boolean indicating whether there are more results available
+     *      Example: {results:[{id:1, text:'Red'},{id:2, text:'Blue'}], more:true}
+     */
+    function ajax(options) {
+        var timeout, // current scheduled but not yet executed request
+            handler = null,
+            quietMillis = options.quietMillis || 100,
+            ajaxUrl = options.url,
+            self = this;
+
+        return function (query) {
+            window.clearTimeout(timeout);
+            timeout = window.setTimeout(function () {
+                var data = options.data, // ajax data function
+                    url = ajaxUrl, // ajax url string or function
+                    transport = options.transport || $.fn.select2.ajaxDefaults.transport,
+                    // deprecated - to be removed in 4.0  - use params instead
+                    deprecated = {
+                        type: options.type || 'GET', // set type of request (GET or POST)
+                        cache: options.cache || false,
+                        jsonpCallback: options.jsonpCallback||undefined,
+                        dataType: options.dataType||"json"
+                    },
+                    params = $.extend({}, $.fn.select2.ajaxDefaults.params, deprecated);
+
+                data = data ? data.call(self, query.term, query.page, query.context) : null;
+                url = (typeof url === 'function') ? url.call(self, query.term, query.page, query.context) : url;
+
+                if (handler && typeof handler.abort === "function") { handler.abort(); }
+
+                if (options.params) {
+                    if ($.isFunction(options.params)) {
+                        $.extend(params, options.params.call(self));
+                    } else {
+                        $.extend(params, options.params);
+                    }
+                }
+
+                $.extend(params, {
+                    url: url,
+                    dataType: options.dataType,
+                    data: data,
+                    success: function (data) {
+                        // TODO - replace query.page with query so users have access to term, page, etc.
+                        // added query as third paramter to keep backwards compatibility
+                        var results = options.results(data, query.page, query);
+                        query.callback(results);
+                    },
+                    error: function(jqXHR, textStatus, errorThrown){
+                        var results = {
+                            hasError: true,
+                            jqXHR: jqXHR,
+                            textStatus: textStatus,
+                            errorThrown: errorThrown,
+                        };
+
+                        query.callback(results);
+                    }
+                });
+                handler = transport.call(self, params);
+            }, quietMillis);
+        };
+    }
+
+    /**
+     * Produces a query function that works with a local array
+     *
+     * @param options object containing configuration parameters. The options parameter can either be an array or an
+     * object.
+     *
+     * If the array form is used it is assumed that it contains objects with 'id' and 'text' keys.
+     *
+     * If the object form is used it is assumed that it contains 'data' and 'text' keys. The 'data' key should contain
+     * an array of objects that will be used as choices. These objects must contain at least an 'id' key. The 'text'
+     * key can either be a String in which case it is expected that each element in the 'data' array has a key with the
+     * value of 'text' which will be used to match choices. Alternatively, text can be a function(item) that can extract
+     * the text.
+     */
+    function local(options) {
+        var data = options, // data elements
+            dataText,
+            tmp,
+            text = function (item) { return ""+item.text; }; // function used to retrieve the text portion of a data item that is matched against the search
+
+         if ($.isArray(data)) {
+            tmp = data;
+            data = { results: tmp };
+        }
+
+         if ($.isFunction(data) === false) {
+            tmp = data;
+            data = function() { return tmp; };
+        }
+
+        var dataItem = data();
+        if (dataItem.text) {
+            text = dataItem.text;
+            // if text is not a function we assume it to be a key name
+            if (!$.isFunction(text)) {
+                dataText = dataItem.text; // we need to store this in a separate variable because in the next step data gets reset and data.text is no longer available
+                text = function (item) { return item[dataText]; };
+            }
+        }
+
+        return function (query) {
+            var t = query.term, filtered = { results: [] }, process;
+            if (t === "") {
+                query.callback(data());
+                return;
+            }
+
+            process = function(datum, collection) {
+                var group, attr;
+                datum = datum[0];
+                if (datum.children) {
+                    group = {};
+                    for (attr in datum) {
+                        if (datum.hasOwnProperty(attr)) group[attr]=datum[attr];
+                    }
+                    group.children=[];
+                    $(datum.children).each2(function(i, childDatum) { process(childDatum, group.children); });
+                    if (group.children.length || query.matcher(t, text(group), datum)) {
+                        collection.push(group);
+                    }
+                } else {
+                    if (query.matcher(t, text(datum), datum)) {
+                        collection.push(datum);
+                    }
+                }
+            };
+
+            $(data().results).each2(function(i, datum) { process(datum, filtered.results); });
+            query.callback(filtered);
+        };
+    }
+
+    // TODO javadoc
+    function tags(data) {
+        var isFunc = $.isFunction(data);
+        return function (query) {
+            var t = query.term, filtered = {results: []};
+            var result = isFunc ? data(query) : data;
+            if ($.isArray(result)) {
+                $(result).each(function () {
+                    var isObject = this.text !== undefined,
+                        text = isObject ? this.text : this;
+                    if (t === "" || query.matcher(t, text)) {
+                        filtered.results.push(isObject ? this : {id: this, text: this});
+                    }
+                });
+                query.callback(filtered);
+            }
+        };
+    }
+
+    /**
+     * Checks if the formatter function should be used.
+     *
+     * Throws an error if it is not a function. Returns true if it should be used,
+     * false if no formatting should be performed.
+     *
+     * @param formatter
+     */
+    function checkFormatter(formatter, formatterName) {
+        if ($.isFunction(formatter)) return true;
+        if (!formatter) return false;
+        if (typeof(formatter) === 'string') return true;
+        throw new Error(formatterName +" must be a string, function, or falsy value");
+    }
+
+  /**
+   * Returns a given value
+   * If given a function, returns its output
+   *
+   * @param val string|function
+   * @param context value of "this" to be passed to function
+   * @returns {*}
+   */
+    function evaluate(val, context) {
+        if ($.isFunction(val)) {
+            var args = Array.prototype.slice.call(arguments, 2);
+            return val.apply(context, args);
+        }
+        return val;
+    }
+
+    function countResults(results) {
+        var count = 0;
+        $.each(results, function(i, item) {
+            if (item.children) {
+                count += countResults(item.children);
+            } else {
+                count++;
+            }
+        });
+        return count;
+    }
+
+    /**
+     * Default tokenizer. This function uses breaks the input on substring match of any string from the
+     * opts.tokenSeparators array and uses opts.createSearchChoice to create the choice object. Both of those
+     * two options have to be defined in order for the tokenizer to work.
+     *
+     * @param input text user has typed so far or pasted into the search field
+     * @param selection currently selected choices
+     * @param selectCallback function(choice) callback tho add the choice to selection
+     * @param opts select2's opts
+     * @return undefined/null to leave the current input unchanged, or a string to change the input to the returned value
+     */
+    function defaultTokenizer(input, selection, selectCallback, opts) {
+        var original = input, // store the original so we can compare and know if we need to tell the search to update its text
+            dupe = false, // check for whether a token we extracted represents a duplicate selected choice
+            token, // token
+            index, // position at which the separator was found
+            i, l, // looping variables
+            separator; // the matched separator
+
+        if (!opts.createSearchChoice || !opts.tokenSeparators || opts.tokenSeparators.length < 1) return undefined;
+
+        while (true) {
+            index = -1;
+
+            for (i = 0, l = opts.tokenSeparators.length; i < l; i++) {
+                separator = opts.tokenSeparators[i];
+                index = input.indexOf(separator);
+                if (index >= 0) break;
+            }
+
+            if (index < 0) break; // did not find any token separator in the input string, bail
+
+            token = input.substring(0, index);
+            input = input.substring(index + separator.length);
+
+            if (token.length > 0) {
+                token = opts.createSearchChoice.call(this, token, selection);
+                if (token !== undefined && token !== null && opts.id(token) !== undefined && opts.id(token) !== null) {
+                    dupe = false;
+                    for (i = 0, l = selection.length; i < l; i++) {
+                        if (equal(opts.id(token), opts.id(selection[i]))) {
+                            dupe = true; break;
+                        }
+                    }
+
+                    if (!dupe) selectCallback(token);
+                }
+            }
+        }
+
+        if (original!==input) return input;
+    }
+
+    function cleanupJQueryElements() {
+        var self = this;
+
+        $.each(arguments, function (i, element) {
+            self[element].remove();
+            self[element] = null;
+        });
+    }
+
+    /**
+     * Creates a new class
+     *
+     * @param superClass
+     * @param methods
+     */
+    function clazz(SuperClass, methods) {
+        var constructor = function () {};
+        constructor.prototype = new SuperClass;
+        constructor.prototype.constructor = constructor;
+        constructor.prototype.parent = SuperClass.prototype;
+        constructor.prototype = $.extend(constructor.prototype, methods);
+        return constructor;
+    }
+
+    AbstractSelect2 = clazz(Object, {
+
+        // abstract
+        bind: function (func) {
+            var self = this;
+            return function () {
+                func.apply(self, arguments);
+            };
+        },
+
+        // abstract
+        init: function (opts) {
+            var results, search, resultsSelector = ".select2-results";
+
+            // prepare options
+            this.opts = opts = this.prepareOpts(opts);
+
+            this.id=opts.id;
+
+            // destroy if called on an existing component
+            if (opts.element.data("select2") !== undefined &&
+                opts.element.data("select2") !== null) {
+                opts.element.data("select2").destroy();
+            }
+
+            this.container = this.createContainer();
+
+            this.liveRegion = $("<span>", {
+                    role: "status",
+                    "aria-live": "polite"
+                })
+                .addClass("select2-hidden-accessible")
+                .appendTo(document.body);
+
+            this.containerId="s2id_"+(opts.element.attr("id") || "autogen"+nextUid());
+            this.containerEventName= this.containerId
+                .replace(/([.])/g, '_')
+                .replace(/([;&,\-\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1');
+            this.container.attr("id", this.containerId);
+
+            this.container.attr("title", opts.element.attr("title"));
+
+            this.body = $("body");
+
+            syncCssClasses(this.container, this.opts.element, this.opts.adaptContainerCssClass);
+
+            this.container.attr("style", opts.element.attr("style"));
+            this.container.css(evaluate(opts.containerCss, this.opts.element));
+            this.container.addClass(evaluate(opts.containerCssClass, this.opts.element));
+
+            this.elementTabIndex = this.opts.element.attr("tabindex");
+
+            // swap container for the element
+            this.opts.element
+                .data("select2", this)
+                .attr("tabindex", "-1")
+                .before(this.container)
+                .on("click.select2", killEvent); // do not leak click events
+
+            this.container.data("select2", this);
+
+            this.dropdown = this.container.find(".select2-drop");
+
+            syncCssClasses(this.dropdown, this.opts.element, this.opts.adaptDropdownCssClass);
+
+            this.dropdown.addClass(evaluate(opts.dropdownCssClass, this.opts.element));
+            this.dropdown.data("select2", this);
+            this.dropdown.on("click", killEvent);
+
+            this.results = results = this.container.find(resultsSelector);
+            this.search = search = this.container.find("input.select2-input");
+
+            this.queryCount = 0;
+            this.resultsPage = 0;
+            this.context = null;
+
+            // initialize the container
+            this.initContainer();
+
+            this.container.on("click", killEvent);
+
+            installFilteredMouseMove(this.results);
+
+            this.dropdown.on("mousemove-filtered", resultsSelector, this.bind(this.highlightUnderEvent));
+            this.dropdown.on("touchstart touchmove touchend", resultsSelector, this.bind(function (event) {
+                this._touchEvent = true;
+                this.highlightUnderEvent(event);
+            }));
+            this.dropdown.on("touchmove", resultsSelector, this.bind(this.touchMoved));
+            this.dropdown.on("touchstart touchend", resultsSelector, this.bind(this.clearTouchMoved));
+
+            // Waiting for a click event on touch devices to select option and hide dropdown
+            // otherwise click will be triggered on an underlying element
+            this.dropdown.on('click', this.bind(function (event) {
+                if (this._touchEvent) {
+                    this._touchEvent = false;
+                    this.selectHighlighted();
+                }
+            }));
+
+            installDebouncedScroll(80, this.results);
+            this.dropdown.on("scroll-debounced", resultsSelector, this.bind(this.loadMoreIfNeeded));
+
+            // do not propagate change event from the search field out of the component
+            $(this.container).on("change", ".select2-input", function(e) {e.stopPropagation();});
+            $(this.dropdown).on("change", ".select2-input", function(e) {e.stopPropagation();});
+
+            // if jquery.mousewheel plugin is installed we can prevent out-of-bounds scrolling of results via mousewheel
+            if ($.fn.mousewheel) {
+                results.mousewheel(function (e, delta, deltaX, deltaY) {
+                    var top = results.scrollTop();
+                    if (deltaY > 0 && top - deltaY <= 0) {
+                        results.scrollTop(0);
+                        killEvent(e);
+                    } else if (deltaY < 0 && results.get(0).scrollHeight - results.scrollTop() + deltaY <= results.height()) {
+                        results.scrollTop(results.get(0).scrollHeight - results.height());
+                        killEvent(e);
+                    }
+                });
+            }
+
+            installKeyUpChangeEvent(search);
+            search.on("keyup-change input paste", this.bind(this.updateResults));
+            search.on("focus", function () { search.addClass("select2-focused"); });
+            search.on("blur", function () { search.removeClass("select2-focused");});
+
+            this.dropdown.on("mouseup", resultsSelector, this.bind(function (e) {
+                if ($(e.target).closest(".select2-result-selectable").length > 0) {
+                    this.highlightUnderEvent(e);
+                    this.selectHighlighted(e);
+                }
+            }));
+
+            // trap all mouse events from leaving the dropdown. sometimes there may be a modal that is listening
+            // for mouse events outside of itself so it can close itself. since the dropdown is now outside the select2's
+            // dom it will trigger the popup close, which is not what we want
+            // focusin can cause focus wars between modals and select2 since the dropdown is outside the modal.
+            this.dropdown.on("click mouseup mousedown touchstart touchend focusin", function (e) { e.stopPropagation(); });
+
+            this.nextSearchTerm = undefined;
+
+            if ($.isFunction(this.opts.initSelection)) {
+                // initialize selection based on the current value of the source element
+                this.initSelection();
+
+                // if the user has provided a function that can set selection based on the value of the source element
+                // we monitor the change event on the element and trigger it, allowing for two way synchronization
+                this.monitorSource();
+            }
+
+            if (opts.maximumInputLength !== null) {
+                this.search.attr("maxlength", opts.maximumInputLength);
+            }
+
+            var disabled = opts.element.prop("disabled");
+            if (disabled === undefined) disabled = false;
+            this.enable(!disabled);
+
+            var readonly = opts.element.prop("readonly");
+            if (readonly === undefined) readonly = false;
+            this.readonly(readonly);
+
+            // Calculate size of scrollbar
+            scrollBarDimensions = scrollBarDimensions || measureScrollbar();
+
+            this.autofocus = opts.element.prop("autofocus");
+            opts.element.prop("autofocus", false);
+            if (this.autofocus) this.focus();
+
+            this.search.attr("placeholder", opts.searchInputPlaceholder);
+        },
+
+        // abstract
+        destroy: function () {
+            var element=this.opts.element, select2 = element.data("select2"), self = this;
+
+            this.close();
+
+            if (element.length && element[0].detachEvent) {
+                element.each(function () {
+                    this.detachEvent("onpropertychange", self._sync);
+                });
+            }
+            if (this.propertyObserver) {
+                this.propertyObserver.disconnect();
+                this.propertyObserver = null;
+            }
+            this._sync = null;
+
+            if (select2 !== undefined) {
+                select2.container.remove();
+                select2.liveRegion.remove();
+                select2.dropdown.remove();
+                element
+                    .removeClass("select2-offscreen")
+                    .removeData("select2")
+                    .off(".select2")
+                    .prop("autofocus", this.autofocus || false);
+                if (this.elementTabIndex) {
+                    element.attr({tabindex: this.elementTabIndex});
+                } else {
+                    element.removeAttr("tabindex");
+                }
+                element.show();
+            }
+
+            cleanupJQueryElements.call(this,
+                "container",
+                "liveRegion",
+                "dropdown",
+                "results",
+                "search"
+            );
+        },
+
+        // abstract
+        optionToData: function(element) {
+            if (element.is("option")) {
+                return {
+                    id:element.prop("value"),
+                    text:element.text(),
+                    element: element.get(),
+                    css: element.attr("class"),
+                    disabled: element.prop("disabled"),
+                    locked: equal(element.attr("locked"), "locked") || equal(element.data("locked"), true)
+                };
+            } else if (element.is("optgroup")) {
+                return {
+                    text:element.attr("label"),
+                    children:[],
+                    element: element.get(),
+                    css: element.attr("class")
+                };
+            }
+        },
+
+        // abstract
+        prepareOpts: function (opts) {
+            var element, select, idKey, ajaxUrl, self = this;
+
+            element = opts.element;
+
+            if (element.get(0).tagName.toLowerCase() === "select") {
+                this.select = select = opts.element;
+            }
+
+            if (select) {
+                // these options are not allowed when attached to a select because they are picked up off the element itself
+                $.each(["id", "multiple", "ajax", "query", "createSearchChoice", "initSelection", "data", "tags"], function () {
+                    if (this in opts) {
+                        throw new Error("Option '" + this + "' is not allowed for Select2 when attached to a <select> element.");
+                    }
+                });
+            }
+
+            opts = $.extend({}, {
+                populateResults: function(container, results, query) {
+                    var populate, id=this.opts.id, liveRegion=this.liveRegion;
+
+                    populate=function(results, container, depth) {
+
+                        var i, l, result, selectable, disabled, compound, node, label, innerContainer, formatted;
+
+                        results = opts.sortResults(results, container, query);
+
+                        // collect the created nodes for bulk append
+                        var nodes = [];
+                        for (i = 0, l = results.length; i < l; i = i + 1) {
+
+                            result=results[i];
+
+                            disabled = (result.disabled === true);
+                            selectable = (!disabled) && (id(result) !== undefined);
+
+                            compound=result.children && result.children.length > 0;
+
+                            node=$("<li></li>");
+                            node.addClass("select2-results-dept-"+depth);
+                            node.addClass("select2-result");
+                            node.addClass(selectable ? "select2-result-selectable" : "select2-result-unselectable");
+                            if (disabled) { node.addClass("select2-disabled"); }
+                            if (compound) { node.addClass("select2-result-with-children"); }
+                            node.addClass(self.opts.formatResultCssClass(result));
+                            node.attr("role", "presentation");
+
+                            label=$(document.createElement("div"));
+                            label.addClass("select2-result-label");
+                            label.attr("id", "select2-result-label-" + nextUid());
+                            label.attr("role", "option");
+
+                            formatted=opts.formatResult(result, label, query, self.opts.escapeMarkup);
+                            if (formatted!==undefined) {
+                                label.html(formatted);
+                                node.append(label);
+                            }
+
+
+                            if (compound) {
+
+                                innerContainer=$("<ul></ul>");
+                                innerContainer.addClass("select2-result-sub");
+                                populate(result.children, innerContainer, depth+1);
+                                node.append(innerContainer);
+                            }
+
+                            node.data("select2-data", result);
+                            nodes.push(node[0]);
+                        }
+
+                        // bulk append the created nodes
+                        container.append(nodes);
+                        liveRegion.text(opts.formatMatches(results.length));
+                    };
+
+                    populate(results, container, 0);
+                }
+            }, $.fn.select2.defaults, opts);
+
+            if (typeof(opts.id) !== "function") {
+                idKey = opts.id;
+                opts.id = function (e) { return e[idKey]; };
+            }
+
+            if ($.isArray(opts.element.data("select2Tags"))) {
+                if ("tags" in opts) {
+                    throw "tags specified as both an attribute 'data-select2-tags' and in options of Select2 " + opts.element.attr("id");
+                }
+                opts.tags=opts.element.data("select2Tags");
+            }
+
+            if (select) {
+                opts.query = this.bind(function (query) {
+                    var data = { results: [], more: false },
+                        term = query.term,
+                        children, placeholderOption, process;
+
+                    process=function(element, collection) {
+                        var group;
+                        if (element.is("option")) {
+                            if (query.matcher(term, element.text(), element)) {
+                                collection.push(self.optionToData(element));
+                            }
+                        } else if (element.is("optgroup")) {
+                            group=self.optionToData(element);
+                            element.children().each2(function(i, elm) { process(elm, group.children); });
+                            if (group.children.length>0) {
+                                collection.push(group);
+                            }
+                        }
+                    };
+
+                    children=element.children();
+
+                    // ignore the placeholder option if there is one
+                    if (this.getPlaceholder() !== undefined && children.length > 0) {
+                        placeholderOption = this.getPlaceholderOption();
+                        if (placeholderOption) {
+                            children=children.not(placeholderOption);
+                        }
+                    }
+
+                    children.each2(function(i, elm) { process(elm, data.results); });
+
+                    query.callback(data);
+                });
+                // this is needed because inside val() we construct choices from options and their id is hardcoded
+                opts.id=function(e) { return e.id; };
+            } else {
+                if (!("query" in opts)) {
+
+                    if ("ajax" in opts) {
+                        ajaxUrl = opts.element.data("ajax-url");
+                        if (ajaxUrl && ajaxUrl.length > 0) {
+                            opts.ajax.url = ajaxUrl;
+                        }
+                        opts.query = ajax.call(opts.element, opts.ajax);
+                    } else if ("data" in opts) {
+                        opts.query = local(opts.data);
+                    } else if ("tags" in opts) {
+                        opts.query = tags(opts.tags);
+                        if (opts.createSearchChoice === undefined) {
+                            opts.createSearchChoice = function (term) { return {id: $.trim(term), text: $.trim(term)}; };
+                        }
+                        if (opts.initSelection === undefined) {
+                            opts.initSelection = function (element, callback) {
+                                var data = [];
+                                $(splitVal(element.val(), opts.separator)).each(function () {
+                                    var obj = { id: this, text: this },
+                                        tags = opts.tags;
+                                    if ($.isFunction(tags)) tags=tags();
+                                    $(tags).each(function() { if (equal(this.id, obj.id)) { obj = this; return false; } });
+                                    data.push(obj);
+                                });
+
+                                callback(data);
+                            };
+                        }
+                    }
+                }
+            }
+            if (typeof(opts.query) !== "function") {
+                throw "query function not defined for Select2 " + opts.element.attr("id");
+            }
+
+            if (opts.createSearchChoicePosition === 'top') {
+                opts.createSearchChoicePosition = function(list, item) { list.unshift(item); };
+            }
+            else if (opts.createSearchChoicePosition === 'bottom') {
+                opts.createSearchChoicePosition = function(list, item) { list.push(item); };
+            }
+            else if (typeof(opts.createSearchChoicePosition) !== "function")  {
+                throw "invalid createSearchChoicePosition option must be 'top', 'bottom' or a custom function";
+            }
+
+            return opts;
+        },
+
+        /**
+         * Monitor the original element for changes and update select2 accordingly
+         */
+        // abstract
+        monitorSource: function () {
+            var el = this.opts.element, observer, self = this;
+
+            el.on("change.select2", this.bind(function (e) {
+                if (this.opts.element.data("select2-change-triggered") !== true) {
+                    this.initSelection();
+                }
+            }));
+
+            this._sync = this.bind(function () {
+
+                // sync enabled state
+                var disabled = el.prop("disabled");
+                if (disabled === undefined) disabled = false;
+                this.enable(!disabled);
+
+                var readonly = el.prop("readonly");
+                if (readonly === undefined) readonly = false;
+                this.readonly(readonly);
+
+                syncCssClasses(this.container, this.opts.element, this.opts.adaptContainerCssClass);
+                this.container.addClass(evaluate(this.opts.containerCssClass, this.opts.element));
+
+                syncCssClasses(this.dropdown, this.opts.element, this.opts.adaptDropdownCssClass);
+                this.dropdown.addClass(evaluate(this.opts.dropdownCssClass, this.opts.element));
+
+            });
+
+            // IE8-10 (IE9/10 won't fire propertyChange via attachEventListener)
+            if (el.length && el[0].attachEvent) {
+                el.each(function() {
+                    this.attachEvent("onpropertychange", self._sync);
+                });
+            }
+
+            // safari, chrome, firefox, IE11
+            observer = window.MutationObserver || window.WebKitMutationObserver|| window.MozMutationObserver;
+            if (observer !== undefined) {
+                if (this.propertyObserver) { delete this.propertyObserver; this.propertyObserver = null; }
+                this.propertyObserver = new observer(function (mutations) {
+                    $.each(mutations, self._sync);
+                });
+                this.propertyObserver.observe(el.get(0), { attributes:true, subtree:false });
+            }
+        },
+
+        // abstract
+        triggerSelect: function(data) {
+            var evt = $.Event("select2-selecting", { val: this.id(data), object: data, choice: data });
+            this.opts.element.trigger(evt);
+            return !evt.isDefaultPrevented();
+        },
+
+        /**
+         * Triggers the change event on the source element
+         */
+        // abstract
+        triggerChange: function (details) {
+
+            details = details || {};
+            details= $.extend({}, details, { type: "change", val: this.val() });
+            // prevents recursive triggering
+            this.opts.element.data("select2-change-triggered", true);
+            this.opts.element.trigger(details);
+            this.opts.element.data("select2-change-triggered", false);
+
+            // some validation frameworks ignore the change event and listen instead to keyup, click for selects
+            // so here we trigger the click event manually
+            this.opts.element.click();
+
+            // ValidationEngine ignores the change event and listens instead to blur
+            // so here we trigger the blur event manually if so desired
+            if (this.opts.blurOnChange)
+                this.opts.element.blur();
+        },
+
+        //abstract
+        isInterfaceEnabled: function()
+        {
+            return this.enabledInterface === true;
+        },
+
+        // abstract
+        enableInterface: function() {
+            var enabled = this._enabled && !this._readonly,
+                disabled = !enabled;
+
+            if (enabled === this.enabledInterface) return false;
+
+            this.container.toggleClass("select2-container-disabled", disabled);
+            this.close();
+            this.enabledInterface = enabled;
+
+            return true;
+        },
+
+        // abstract
+        enable: function(enabled) {
+            if (enabled === undefined) enabled = true;
+            if (this._enabled === enabled) return;
+            this._enabled = enabled;
+
+            this.opts.element.prop("disabled", !enabled);
+            this.enableInterface();
+        },
+
+        // abstract
+        disable: function() {
+            this.enable(false);
+        },
+
+        // abstract
+        readonly: function(enabled) {
+            if (enabled === undefined) enabled = false;
+            if (this._readonly === enabled) return;
+            this._readonly = enabled;
+
+            this.opts.element.prop("readonly", enabled);
+            this.enableInterface();
+        },
+
+        // abstract
+        opened: function () {
+            return (this.container) ? this.container.hasClass("select2-dropdown-open") : false;
+        },
+
+        // abstract
+        positionDropdown: function() {
+            var $dropdown = this.dropdown,
+                offset = this.container.offset(),
+                height = this.container.outerHeight(false),
+                width = this.container.outerWidth(false),
+                dropHeight = $dropdown.outerHeight(false),
+                $window = $(window),
+                windowWidth = $window.width(),
+                windowHeight = $window.height(),
+                viewPortRight = $window.scrollLeft() + windowWidth,
+                viewportBottom = $window.scrollTop() + windowHeight,
+                dropTop = offset.top + height,
+                dropLeft = offset.left,
+                enoughRoomBelow = dropTop + dropHeight <= viewportBottom,
+                enoughRoomAbove = (offset.top - dropHeight) >= $window.scrollTop(),
+                dropWidth = $dropdown.outerWidth(false),
+                enoughRoomOnRight = dropLeft + dropWidth <= viewPortRight,
+                aboveNow = $dropdown.hasClass("select2-drop-above"),
+                bodyOffset,
+                above,
+                changeDirection,
+                css,
+                resultsListNode;
+
+            // always prefer the current above/below alignment, unless there is not enough room
+            if (aboveNow) {
+                above = true;
+                if (!enoughRoomAbove && enoughRoomBelow) {
+                    changeDirection = true;
+                    above = false;
+                }
+            } else {
+                above = false;
+                if (!enoughRoomBelow && enoughRoomAbove) {
+                    changeDirection = true;
+                    above = true;
+                }
+            }
+
+            //if we are changing direction we need to get positions when dropdown is hidden;
+            if (changeDirection) {
+                $dropdown.hide();
+                offset = this.container.offset();
+                height = this.container.outerHeight(false);
+                width = this.container.outerWidth(false);
+                dropHeight = $dropdown.outerHeight(false);
+                viewPortRight = $window.scrollLeft() + windowWidth;
+                viewportBottom = $window.scrollTop() + windowHeight;
+                dropTop = offset.top + height;
+                dropLeft = offset.left;
+                dropWidth = $dropdown.outerWidth(false);
+                enoughRoomOnRight = dropLeft + dropWidth <= viewPortRight;
+                $dropdown.show();
+
+                // fix so the cursor does not move to the left within the search-textbox in IE
+                this.focusSearch();
+            }
+
+            if (this.opts.dropdownAutoWidth) {
+                resultsListNode = $('.select2-results', $dropdown)[0];
+                $dropdown.addClass('select2-drop-auto-width');
+                $dropdown.css('width', '');
+                // Add scrollbar width to dropdown if vertical scrollbar is present
+                dropWidth = $dropdown.outerWidth(false) + (resultsListNode.scrollHeight === resultsListNode.clientHeight ? 0 : scrollBarDimensions.width);
+                dropWidth > width ? width = dropWidth : dropWidth = width;
+                dropHeight = $dropdown.outerHeight(false);
+                enoughRoomOnRight = dropLeft + dropWidth <= viewPortRight;
+            }
+            else {
+                this.container.removeClass('select2-drop-auto-width');
+            }
+
+            //console.log("below/ droptop:", dropTop, "dropHeight", dropHeight, "sum", (dropTop+dropHeight)+" viewport bottom", viewportBottom, "enough?", enoughRoomBelow);
+            //console.log("above/ offset.top", offset.top, "dropHeight", dropHeight, "top", (offset.top-dropHeight), "scrollTop", this.body.scrollTop(), "enough?", enoughRoomAbove);
+
+            // fix positioning when body has an offset and is not position: static
+            if (this.body.css('position') !== 'static') {
+                bodyOffset = this.body.offset();
+                dropTop -= bodyOffset.top;
+                dropLeft -= bodyOffset.left;
+            }
+
+            if (!enoughRoomOnRight) {
+                dropLeft = offset.left + this.container.outerWidth(false) - dropWidth;
+            }
+
+            css =  {
+                left: dropLeft,
+                width: width
+            };
+
+            if (above) {
+                css.top = offset.top - dropHeight;
+                css.bottom = 'auto';
+                this.container.addClass("select2-drop-above");
+                $dropdown.addClass("select2-drop-above");
+            }
+            else {
+                css.top = dropTop;
+                css.bottom = 'auto';
+                this.container.removeClass("select2-drop-above");
+                $dropdown.removeClass("select2-drop-above");
+            }
+            css = $.extend(css, evaluate(this.opts.dropdownCss, this.opts.element));
+
+            $dropdown.css(css);
+        },
+
+        // abstract
+        shouldOpen: function() {
+            var event;
+
+            if (this.opened()) return false;
+
+            if (this._enabled === false || this._readonly === true) return false;
+
+            event = $.Event("select2-opening");
+            this.opts.element.trigger(event);
+            return !event.isDefaultPrevented();
+        },
+
+        // abstract
+        clearDropdownAlignmentPreference: function() {
+            // clear the classes used to figure out the preference of where the dropdown should be opened
+            this.container.removeClass("select2-drop-above");
+            this.dropdown.removeClass("select2-drop-above");
+        },
+
+        /**
+         * Opens the dropdown
+         *
+         * @return {Boolean} whether or not dropdown was opened. This method will return false if, for example,
+         * the dropdown is already open, or if the 'open' event listener on the element called preventDefault().
+         */
+        // abstract
+        open: function () {
+
+            if (!this.shouldOpen()) return false;
+
+            this.opening();
+
+            // Only bind the document mousemove when the dropdown is visible
+            $document.on("mousemove.select2Event", function (e) {
+                lastMousePosition.x = e.pageX;
+                lastMousePosition.y = e.pageY;
+            });
+
+            return true;
+        },
+
+        /**
+         * Performs the opening of the dropdown
+         */
+        // abstract
+        opening: function() {
+            var cid = this.containerEventName,
+                scroll = "scroll." + cid,
+                resize = "resize."+cid,
+                orient = "orientationchange."+cid,
+                mask;
+
+            this.container.addClass("select2-dropdown-open").addClass("select2-container-active");
+
+            this.clearDropdownAlignmentPreference();
+
+            if(this.dropdown[0] !== this.body.children().last()[0]) {
+                this.dropdown.detach().appendTo(this.body);
+            }
+
+            // create the dropdown mask if doesn't already exist
+            mask = $("#select2-drop-mask");
+            if (mask.length == 0) {
+                mask = $(document.createElement("div"));
+                mask.attr("id","select2-drop-mask").attr("class","select2-drop-mask");
+                mask.hide();
+                mask.appendTo(this.body);
+                mask.on("mousedown touchstart click", function (e) {
+                    // Prevent IE from generating a click event on the body
+                    reinsertElement(mask);
+
+                    var dropdown = $("#select2-drop"), self;
+                    if (dropdown.length > 0) {
+                        self=dropdown.data("select2");
+                        if (self.opts.selectOnBlur) {
+                            self.selectHighlighted({noFocus: true});
+                        }
+                        self.close();
+                        e.preventDefault();
+                        e.stopPropagation();
+                    }
+                });
+            }
+
+            // ensure the mask is always right before the dropdown
+            if (this.dropdown.prev()[0] !== mask[0]) {
+                this.dropdown.before(mask);
+            }
+
+            // move the global id to the correct dropdown
+            $("#select2-drop").removeAttr("id");
+            this.dropdown.attr("id", "select2-drop");
+
+            // show the elements
+            mask.show();
+
+            this.positionDropdown();
+            this.dropdown.show();
+            this.positionDropdown();
+
+            this.dropdown.addClass("select2-drop-active");
+
+            // attach listeners to events that can change the position of the container and thus require
+            // the position of the dropdown to be updated as well so it does not come unglued from the container
+            var that = this;
+            this.container.parents().add(window).each(function () {
+                $(this).on(resize+" "+scroll+" "+orient, function (e) {
+                    if (that.opened()) that.positionDropdown();
+                });
+            });
+
+
+        },
+
+        // abstract
+        close: function () {
+            if (!this.opened()) return;
+
+            var cid = this.containerEventName,
+                scroll = "scroll." + cid,
+                resize = "resize."+cid,
+                orient = "orientationchange."+cid;
+
+            // unbind event listeners
+            this.container.parents().add(window).each(function () { $(this).off(scroll).off(resize).off(orient); });
+
+            this.clearDropdownAlignmentPreference();
+
+            $("#select2-drop-mask").hide();
+            this.dropdown.removeAttr("id"); // only the active dropdown has the select2-drop id
+            this.dropdown.hide();
+            this.container.removeClass("select2-dropdown-open").removeClass("select2-container-active");
+            this.results.empty();
+
+            // Now that the dropdown is closed, unbind the global document mousemove event
+            $document.off("mousemove.select2Event");
+
+            this.clearSearch();
+            this.search.removeClass("select2-active");
+            this.opts.element.trigger($.Event("select2-close"));
+        },
+
+        /**
+         * Opens control, sets input value, and updates results.
+         */
+        // abstract
+        externalSearch: function (term) {
+            this.open();
+            this.search.val(term);
+            this.updateResults(false);
+        },
+
+        // abstract
+        clearSearch: function () {
+
+        },
+
+        //abstract
+        getMaximumSelectionSize: function() {
+            return evaluate(this.opts.maximumSelectionSize, this.opts.element);
+        },
+
+        // abstract
+        ensureHighlightVisible: function () {
+            var results = this.results, children, index, child, hb, rb, y, more, topOffset;
+
+            index = this.highlight();
+
+            if (index < 0) return;
+
+            if (index == 0) {
+
+                // if the first element is highlighted scroll all the way to the top,
+                // that way any unselectable headers above it will also be scrolled
+                // into view
+
+                results.scrollTop(0);
+                return;
+            }
+
+            children = this.findHighlightableChoices().find('.select2-result-label');
+
+            child = $(children[index]);
+
+            topOffset = (child.offset() || {}).top || 0;
+
+            hb = topOffset + child.outerHeight(true);
+
+            // if this is the last child lets also make sure select2-more-results is visible
+            if (index === children.length - 1) {
+                more = results.find("li.select2-more-results");
+                if (more.length > 0) {
+                    hb = more.offset().top + more.outerHeight(true);
+                }
+            }
+
+            rb = results.offset().top + results.outerHeight(true);
+            if (hb > rb) {
+                results.scrollTop(results.scrollTop() + (hb - rb));
+            }
+            y = topOffset - results.offset().top;
+
+            // make sure the top of the element is visible
+            if (y < 0 && child.css('display') != 'none' ) {
+                results.scrollTop(results.scrollTop() + y); // y is negative
+            }
+        },
+
+        // abstract
+        findHighlightableChoices: function() {
+            return this.results.find(".select2-result-selectable:not(.select2-disabled):not(.select2-selected)");
+        },
+
+        // abstract
+        moveHighlight: function (delta) {
+            var choices = this.findHighlightableChoices(),
+                index = this.highlight();
+
+            while (index > -1 && index < choices.length) {
+                index += delta;
+                var choice = $(choices[index]);
+                if (choice.hasClass("select2-result-selectable") && !choice.hasClass("select2-disabled") && !choice.hasClass("select2-selected")) {
+                    this.highlight(index);
+                    break;
+                }
+            }
+        },
+
+        // abstract
+        highlight: function (index) {
+            var choices = this.findHighlightableChoices(),
+                choice,
+                data;
+
+            if (arguments.length === 0) {
+                return indexOf(choices.filter(".select2-highlighted")[0], choices.get());
+            }
+
+            if (index >= choices.length) index = choices.length - 1;
+            if (index < 0) index = 0;
+
+            this.removeHighlight();
+
+            choice = $(choices[index]);
+            choice.addClass("select2-highlighted");
+
+            // ensure assistive technology can determine the active choice
+            this.search.attr("aria-activedescendant", choice.find(".select2-result-label").attr("id"));
+
+            this.ensureHighlightVisible();
+
+            this.liveRegion.text(choice.text());
+
+            data = choice.data("select2-data");
+            if (data) {
+                this.opts.element.trigger({ type: "select2-highlight", val: this.id(data), choice: data });
+            }
+        },
+
+        removeHighlight: function() {
+            this.results.find(".select2-highlighted").removeClass("select2-highlighted");
+        },
+
+        touchMoved: function() {
+            this._touchMoved = true;
+        },
+
+        clearTouchMoved: function() {
+          this._touchMoved = false;
+        },
+
+        // abstract
+        countSelectableResults: function() {
+            return this.findHighlightableChoices().length;
+        },
+
+        // abstract
+        highlightUnderEvent: function (event) {
+            var el = $(event.target).closest(".select2-result-selectable");
+            if (el.length > 0 && !el.is(".select2-highlighted")) {
+                var choices = this.findHighlightableChoices();
+                this.highlight(choices.index(el));
+            } else if (el.length == 0) {
+                // if we are over an unselectable item remove all highlights
+                this.removeHighlight();
+            }
+        },
+
+        // abstract
+        loadMoreIfNeeded: function () {
+            var results = this.results,
+                more = results.find("li.select2-more-results"),
+                below, // pixels the element is below the scroll fold, below==0 is when the element is starting to be visible
+                page = this.resultsPage + 1,
+                self=this,
+                term=this.search.val(),
+                context=this.context;
+
+            if (more.length === 0) return;
+            below = more.offset().top - results.offset().top - results.height();
+
+            if (below <= this.opts.loadMorePadding) {
+                more.addClass("select2-active");
+                this.opts.query({
+                        element: this.opts.element,
+                        term: term,
+                        page: page,
+                        context: context,
+                        matcher: this.opts.matcher,
+                        callback: this.bind(function (data) {
+
+                    // ignore a response if the select2 has been closed before it was received
+                    if (!self.opened()) return;
+
+
+                    self.opts.populateResults.call(this, results, data.results, {term: term, page: page, context:context});
+                    self.postprocessResults(data, false, false);
+
+                    if (data.more===true) {
+                        more.detach().appendTo(results).text(evaluate(self.opts.formatLoadMore, self.opts.element, page+1));
+                        window.setTimeout(function() { self.loadMoreIfNeeded(); }, 10);
+                    } else {
+                        more.remove();
+                    }
+                    self.positionDropdown();
+                    self.resultsPage = page;
+                    self.context = data.context;
+                    this.opts.element.trigger({ type: "select2-loaded", items: data });
+                })});
+            }
+        },
+
+        /**
+         * Default tokenizer function which does nothing
+         */
+        tokenize: function() {
+
+        },
+
+        /**
+         * @param initial whether or not this is the call to this method right after the dropdown has been opened
+         */
+        // abstract
+        updateResults: function (initial) {
+            var search = this.search,
+                results = this.results,
+                opts = this.opts,
+                data,
+                self = this,
+                input,
+                term = search.val(),
+                lastTerm = $.data(this.container, "select2-last-term"),
+                // sequence number used to drop out-of-order responses
+                queryNumber;
+
+            // prevent duplicate queries against the same term
+            if (initial !== true && lastTerm && equal(term, lastTerm)) return;
+
+            $.data(this.container, "select2-last-term", term);
+
+            // if the search is currently hidden we do not alter the results
+            if (initial !== true && (this.showSearchInput === false || !this.opened())) {
+                return;
+            }
+
+            function postRender() {
+                search.removeClass("select2-active");
+                self.positionDropdown();
+                if (results.find('.select2-no-results,.select2-selection-limit,.select2-searching').length) {
+                    self.liveRegion.text(results.text());
+                }
+                else {
+                    self.liveRegion.text(self.opts.formatMatches(results.find('.select2-result-selectable').length));
+                }
+            }
+
+            function render(html) {
+                results.html(html);
+                postRender();
+            }
+
+            queryNumber = ++this.queryCount;
+
+            var maxSelSize = this.getMaximumSelectionSize();
+            if (maxSelSize >=1) {
+                data = this.data();
+                if ($.isArray(data) && data.length >= maxSelSize && checkFormatter(opts.formatSelectionTooBig, "formatSelectionTooBig")) {
+                    render("<li class='select2-selection-limit'>" + evaluate(opts.formatSelectionTooBig, opts.element, maxSelSize) + "</li>");
+                    return;
+                }
+            }
+
+            if (search.val().length < opts.minimumInputLength) {
+                if (checkFormatter(opts.formatInputTooShort, "formatInputTooShort")) {
+                    render("<li class='select2-no-results'>" + evaluate(opts.formatInputTooShort, opts.element, search.val(), opts.minimumInputLength) + "</li>");
+                } else {
+                    render("");
+                }
+                if (initial && this.showSearch) this.showSearch(true);
+                return;
+            }
+
+            if (opts.maximumInputLength && search.val().length > opts.maximumInputLength) {
+                if (checkFormatter(opts.formatInputTooLong, "formatInputTooLong")) {
+                    render("<li class='select2-no-results'>" + evaluate(opts.formatInputTooLong, opts.element, search.val(), opts.maximumInputLength) + "</li>");
+                } else {
+                    render("");
+                }
+                return;
+            }
+
+            if (opts.formatSearching && this.findHighlightableChoices().length === 0) {
+                render("<li class='select2-searching'>" + evaluate(opts.formatSearching, opts.element) + "</li>");
+            }
+
+            search.addClass("select2-active");
+
+            this.removeHighlight();
+
+            // give the tokenizer a chance to pre-process the input
+            input = this.tokenize();
+            if (input != undefined && input != null) {
+                search.val(input);
+            }
+
+            this.resultsPage = 1;
+
+            opts.query({
+                element: opts.element,
+                    term: search.val(),
+                    page: this.resultsPage,
+                    context: null,
+                    matcher: opts.matcher,
+                    callback: this.bind(function (data) {
+                var def; // default choice
+
+                // ignore old responses
+                if (queryNumber != this.queryCount) {
+                  return;
+                }
+
+                // ignore a response if the select2 has been closed before it was received
+                if (!this.opened()) {
+                    this.search.removeClass("select2-active");
+                    return;
+                }
+
+                // handle ajax error
+                if(data.hasError !== undefined && checkFormatter(opts.formatAjaxError, "formatAjaxError")) {
+                    render("<li class='select2-ajax-error'>" + evaluate(opts.formatAjaxError, opts.element, data.jqXHR, data.textStatus, data.errorThrown) + "</li>");
+                    return;
+                }
+
+                // save context, if any
+                this.context = (data.context===undefined) ? null : data.context;
+                // create a default choice and prepend it to the list
+                if (this.opts.createSearchChoice && search.val() !== "") {
+                    def = this.opts.createSearchChoice.call(self, search.val(), data.results);
+                    if (def !== undefined && def !== null && self.id(def) !== undefined && self.id(def) !== null) {
+                        if ($(data.results).filter(
+                            function () {
+                                return equal(self.id(this), self.id(def));
+                            }).length === 0) {
+                            this.opts.createSearchChoicePosition(data.results, def);
+                        }
+                    }
+                }
+
+                if (data.results.length === 0 && checkFormatter(opts.formatNoMatches, "formatNoMatches")) {
+                    render("<li class='select2-no-results'>" + evaluate(opts.formatNoMatches, opts.element, search.val()) + "</li>");
+                    return;
+                }
+
+                results.empty();
+                self.opts.populateResults.call(this, results, data.results, {term: search.val(), page: this.resultsPage, context:null});
+
+                if (data.more === true && checkFormatter(opts.formatLoadMore, "formatLoadMore")) {
+                    results.append("<li class='select2-more-results'>" + opts.escapeMarkup(evaluate(opts.formatLoadMore, opts.element, this.resultsPage)) + "</li>");
+                    window.setTimeout(function() { self.loadMoreIfNeeded(); }, 10);
+                }
+
+                this.postprocessResults(data, initial);
+
+                postRender();
+
+                this.opts.element.trigger({ type: "select2-loaded", items: data });
+            })});
+        },
+
+        // abstract
+        cancel: function () {
+            this.close();
+        },
+
+        // abstract
+        blur: function () {
+            // if selectOnBlur == true, select the currently highlighted option
+            if (this.opts.selectOnBlur)
+                this.selectHighlighted({noFocus: true});
+
+            this.close();
+            this.container.removeClass("select2-container-active");
+            // synonymous to .is(':focus'), which is available in jquery >= 1.6
+            if (this.search[0] === document.activeElement) { this.search.blur(); }
+            this.clearSearch();
+            this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus");
+        },
+
+        // abstract
+        focusSearch: function () {
+            focus(this.search);
+        },
+
+        // abstract
+        selectHighlighted: function (options) {
+            if (this._touchMoved) {
+              this.clearTouchMoved();
+              return;
+            }
+            var index=this.highlight(),
+                highlighted=this.results.find(".select2-highlighted"),
+                data = highlighted.closest('.select2-result').data("select2-data");
+
+            if (data) {
+                this.highlight(index);
+                this.onSelect(data, options);
+            } else if (options && options.noFocus) {
+                this.close();
+            }
+        },
+
+        // abstract
+        getPlaceholder: function () {
+            var placeholderOption;
+            return this.opts.element.attr("placeholder") ||
+                this.opts.element.attr("data-placeholder") || // jquery 1.4 compat
+                this.opts.element.data("placeholder") ||
+                this.opts.placeholder ||
+                ((placeholderOption = this.getPlaceholderOption()) !== undefined ? placeholderOption.text() : undefined);
+        },
+
+        // abstract
+        getPlaceholderOption: function() {
+            if (this.select) {
+                var firstOption = this.select.children('option').first();
+                if (this.opts.placeholderOption !== undefined ) {
+                    //Determine the placeholder option based on the specified placeholderOption setting
+                    return (this.opts.placeholderOption === "first" && firstOption) ||
+                           (typeof this.opts.placeholderOption === "function" && this.opts.placeholderOption(this.select));
+                } else if ($.trim(firstOption.text()) === "" && firstOption.val() === "") {
+                    //No explicit placeholder option specified, use the first if it's blank
+                    return firstOption;
+                }
+            }
+        },
+
+        /**
+         * Get the desired width for the container element.  This is
+         * derived first from option `width` passed to select2, then
+         * the inline 'style' on the original element, and finally
+         * falls back to the jQuery calculated element width.
+         */
+        // abstract
+        initContainerWidth: function () {
+            function resolveContainerWidth() {
+                var style, attrs, matches, i, l, attr;
+
+                if (this.opts.width === "off") {
+                    return null;
+                } else if (this.opts.width === "element"){
+                    return this.opts.element.outerWidth(false) === 0 ? 'auto' : this.opts.element.outerWidth(false) + 'px';
+                } else if (this.opts.width === "copy" || this.opts.width === "resolve") {
+                    // check if there is inline style on the element that contains width
+                    style = this.opts.element.attr('style');
+                    if (style !== undefined) {
+                        attrs = style.split(';');
+                        for (i = 0, l = attrs.length; i < l; i = i + 1) {
+                            attr = attrs[i].replace(/\s/g, '');
+                            matches = attr.match(/^width:(([-+]?([0-9]*\.)?[0-9]+)(px|em|ex|%|in|cm|mm|pt|pc))/i);
+                            if (matches !== null && matches.length >= 1)
+                                return matches[1];
+                        }
+                    }
+
+                    if (this.opts.width === "resolve") {
+                        // next check if css('width') can resolve a width that is percent based, this is sometimes possible
+                        // when attached to input type=hidden or elements hidden via css
+                        style = this.opts.element.css('width');
+                        if (style.indexOf("%") > 0) return style;
+
+                        // finally, fallback on the calculated width of the element
+                        return (this.opts.element.outerWidth(false) === 0 ? 'auto' : this.opts.element.outerWidth(false) + 'px');
+                    }
+
+                    return null;
+                } else if ($.isFunction(this.opts.width)) {
+                    return this.opts.width();
+                } else {
+                    return this.opts.width;
+               }
+            };
+
+            var width = resolveContainerWidth.call(this);
+            if (width !== null) {
+                this.container.css("width", width);
+            }
+        }
+    });
+
+    SingleSelect2 = clazz(AbstractSelect2, {
+
+        // single
+
+        createContainer: function () {
+            var container = $(document.createElement("div")).attr({
+                "class": "select2-container"
+            }).html([
+                "<a href='javascript:void(0)' class='select2-choice' tabindex='-1'>",
+                "   <span class='select2-chosen'> </span><abbr class='select2-search-choice-close'></abbr>",
+                "   <span class='select2-arrow' role='presentation'><b role='presentation'></b></span>",
+                "</a>",
+                "<label for='' class='select2-offscreen'></label>",
+                "<input class='select2-focusser select2-offscreen' type='text' aria-haspopup='true' role='button' />",
+                "<div class='select2-drop select2-display-none'>",
+                "   <div class='select2-search'>",
+                "       <label for='' class='select2-offscreen'></label>",
+                "       <input type='text' autocomplete='off' autocorrect='off' autocapitalize='off' spellcheck='false' class='select2-input' role='combobox' aria-expanded='true'",
+                "       aria-autocomplete='list' />",
+                "   </div>",
+                "   <ul class='select2-results' role='listbox'>",
+                "   </ul>",
+                "</div>"].join(""));
+            return container;
+        },
+
+        // single
+        enableInterface: function() {
+            if (this.parent.enableInterface.apply(this, arguments)) {
+                this.focusser.prop("disabled", !this.isInterfaceEnabled());
+            }
+        },
+
+        // single
+        opening: function () {
+            var el, range, len;
+
+            if (this.opts.minimumResultsForSearch >= 0) {
+                this.showSearch(true);
+            }
+
+            this.parent.opening.apply(this, arguments);
+
+            if (this.showSearchInput !== false) {
+                // IE appends focusser.val() at the end of field :/ so we manually insert it at the beginning using a range
+                // all other browsers handle this just fine
+
+                this.search.val(this.focusser.val());
+            }
+            if (this.opts.shouldFocusInput(this)) {
+                this.search.focus();
+                // move the cursor to the end after focussing, otherwise it will be at the beginning and
+                // new text will appear *before* focusser.val()
+                el = this.search.get(0);
+                if (el.createTextRange) {
+                    range = el.createTextRange();
+                    range.collapse(false);
+                    range.select();
+                } else if (el.setSelectionRange) {
+                    len = this.search.val().length;
+                    el.setSelectionRange(len, len);
+                }
+            }
+
+            // initializes search's value with nextSearchTerm (if defined by user)
+            // ignore nextSearchTerm if the dropdown is opened by the user pressing a letter
+            if(this.search.val() === "") {
+                if(this.nextSearchTerm != undefined){
+                    this.search.val(this.nextSearchTerm);
+                    this.search.select();
+                }
+            }
+
+            this.focusser.prop("disabled", true).val("");
+            this.updateResults(true);
+            this.opts.element.trigger($.Event("select2-open"));
+        },
+
+        // single
+        close: function () {
+            if (!this.opened()) return;
+            this.parent.close.apply(this, arguments);
+
+            this.focusser.prop("disabled", false);
+
+            if (this.opts.shouldFocusInput(this)) {
+                this.focusser.focus();
+            }
+        },
+
+        // single
+        focus: function () {
+            if (this.opened()) {
+                this.close();
+            } else {
+                this.focusser.prop("disabled", false);
+                if (this.opts.shouldFocusInput(this)) {
+                    this.focusser.focus();
+                }
+            }
+        },
+
+        // single
+        isFocused: function () {
+            return this.container.hasClass("select2-container-active");
+        },
+
+        // single
+        cancel: function () {
+            this.parent.cancel.apply(this, arguments);
+            this.focusser.prop("disabled", false);
+
+            if (this.opts.shouldFocusInput(this)) {
+                this.focusser.focus();
+            }
+        },
+
+        // single
+        destroy: function() {
+            $("label[for='" + this.focusser.attr('id') + "']")
+                .attr('for', this.opts.element.attr("id"));
+            this.parent.destroy.apply(this, arguments);
+
+            cleanupJQueryElements.call(this,
+                "selection",
+                "focusser"
+            );
+        },
+
+        // single
+        initContainer: function () {
+
+            var selection,
+                container = this.container,
+                dropdown = this.dropdown,
+                idSuffix = nextUid(),
+                elementLabel;
+
+            if (this.opts.minimumResultsForSearch < 0) {
+                this.showSearch(false);
+            } else {
+                this.showSearch(true);
+            }
+
+            this.selection = selection = container.find(".select2-choice");
+
+            this.focusser = container.find(".select2-focusser");
+
+            // add aria associations
+            selection.find(".select2-chosen").attr("id", "select2-chosen-"+idSuffix);
+            this.focusser.attr("aria-labelledby", "select2-chosen-"+idSuffix);
+            this.results.attr("id", "select2-results-"+idSuffix);
+            this.search.attr("aria-owns", "select2-results-"+idSuffix);
+
+            // rewrite labels from original element to focusser
+            this.focusser.attr("id", "s2id_autogen"+idSuffix);
+
+            elementLabel = $("label[for='" + this.opts.element.attr("id") + "']");
+
+            this.focusser.prev()
+                .text(elementLabel.text())
+                .attr('for', this.focusser.attr('id'));
+
+            // Ensure the original element retains an accessible name
+            var originalTitle = this.opts.element.attr("title");
+            this.opts.element.attr("title", (originalTitle || elementLabel.text()));
+
+            this.focusser.attr("tabindex", this.elementTabIndex);
+
+            // write label for search field using the label from the focusser element
+            this.search.attr("id", this.focusser.attr('id') + '_search');
+
+            this.search.prev()
+                .text($("label[for='" + this.focusser.attr('id') + "']").text())
+                .attr('for', this.search.attr('id'));
+
+            this.search.on("keydown", this.bind(function (e) {
+                if (!this.isInterfaceEnabled()) return;
+
+                // filter 229 keyCodes (input method editor is processing key input)
+                if (229 == e.keyCode) return;
+
+                if (e.which === KEY.PAGE_UP || e.which === KEY.PAGE_DOWN) {
+                    // prevent the page from scrolling
+                    killEvent(e);
+                    return;
+                }
+
+                switch (e.which) {
+                    case KEY.UP:
+                    case KEY.DOWN:
+                        this.moveHighlight((e.which === KEY.UP) ? -1 : 1);
+                        killEvent(e);
+                        return;
+                    case KEY.ENTER:
+                        this.selectHighlighted();
+                        killEvent(e);
+                        return;
+                    case KEY.TAB:
+                        this.selectHighlighted({noFocus: true});
+                        return;
+                    case KEY.ESC:
+                        this.cancel(e);
+                        killEvent(e);
+                        return;
+                }
+            }));
+
+            this.search.on("blur", this.bind(function(e) {
+                // a workaround for chrome to keep the search field focussed when the scroll bar is used to scroll the dropdown.
+                // without this the search field loses focus which is annoying
+                if (document.activeElement === this.body.get(0)) {
+                    window.setTimeout(this.bind(function() {
+                        if (this.opened()) {
+                            this.search.focus();
+                        }
+                    }), 0);
+                }
+            }));
+
+            this.focusser.on("keydown", this.bind(function (e) {
+                if (!this.isInterfaceEnabled()) return;
+
+                if (e.which === KEY.TAB || KEY.isControl(e) || KEY.isFunctionKey(e) || e.which === KEY.ESC) {
+                    return;
+                }
+
+                if (this.opts.openOnEnter === false && e.which === KEY.ENTER) {
+                    killEvent(e);
+                    return;
+                }
+
+                if (e.which == KEY.DOWN || e.which == KEY.UP
+                    || (e.which == KEY.ENTER && this.opts.openOnEnter)) {
+
+                    if (e.altKey || e.ctrlKey || e.shiftKey || e.metaKey) return;
+
+                    this.open();
+                    killEvent(e);
+                    return;
+                }
+
+                if (e.which == KEY.DELETE || e.which == KEY.BACKSPACE) {
+                    if (this.opts.allowClear) {
+                        this.clear();
+                    }
+                    killEvent(e);
+                    return;
+                }
+            }));
+
+
+            installKeyUpChangeEvent(this.focusser);
+            this.focusser.on("keyup-change input", this.bind(function(e) {
+                if (this.opts.minimumResultsForSearch >= 0) {
+                    e.stopPropagation();
+                    if (this.opened()) return;
+                    this.open();
+                }
+            }));
+
+            selection.on("mousedown touchstart", "abbr", this.bind(function (e) {
+                if (!this.isInterfaceEnabled()) return;
+                this.clear();
+                killEventImmediately(e);
+                this.close();
+                this.selection.focus();
+            }));
+
+            selection.on("mousedown touchstart", this.bind(function (e) {
+                // Prevent IE from generating a click event on the body
+                reinsertElement(selection);
+
+                if (!this.container.hasClass("select2-container-active")) {
+                    this.opts.element.trigger($.Event("select2-focus"));
+                }
+
+                if (this.opened()) {
+                    this.close();
+                } else if (this.isInterfaceEnabled()) {
+                    this.open();
+                }
+
+                killEvent(e);
+            }));
+
+            dropdown.on("mousedown touchstart", this.bind(function() {
+                if (this.opts.shouldFocusInput(this)) {
+                    this.search.focus();
+                }
+            }));
+
+            selection.on("focus", this.bind(function(e) {
+                killEvent(e);
+            }));
+
+            this.focusser.on("focus", this.bind(function(){
+                if (!this.container.hasClass("select2-container-active")) {
+                    this.opts.element.trigger($.Event("select2-focus"));
+                }
+                this.container.addClass("select2-container-active");
+            })).on("blur", this.bind(function() {
+                if (!this.opened()) {
+                    this.container.removeClass("select2-container-active");
+                    this.opts.element.trigger($.Event("select2-blur"));
+                }
+            }));
+            this.search.on("focus", this.bind(function(){
+                if (!this.container.hasClass("select2-container-active")) {
+                    this.opts.element.trigger($.Event("select2-focus"));
+                }
+                this.container.addClass("select2-container-active");
+            }));
+
+            this.initContainerWidth();
+            this.opts.element.addClass("select2-offscreen");
+            this.setPlaceholder();
+
+        },
+
+        // single
+        clear: function(triggerChange) {
+            var data=this.selection.data("select2-data");
+            if (data) { // guard against queued quick consecutive clicks
+                var evt = $.Event("select2-clearing");
+                this.opts.element.trigger(evt);
+                if (evt.isDefaultPrevented()) {
+                    return;
+                }
+                var placeholderOption = this.getPlaceholderOption();
+                this.opts.element.val(placeholderOption ? placeholderOption.val() : "");
+                this.selection.find(".select2-chosen").empty();
+                this.selection.removeData("select2-data");
+                this.setPlaceholder();
+
+                if (triggerChange !== false){
+                    this.opts.element.trigger({ type: "select2-removed", val: this.id(data), choice: data });
+                    this.triggerChange({removed:data});
+                }
+            }
+        },
+
+        /**
+         * Sets selection based on source element's value
+         */
+        // single
+        initSelection: function () {
+            var selected;
+            if (this.isPlaceholderOptionSelected()) {
+                this.updateSelection(null);
+                this.close();
+                this.setPlaceholder();
+            } else {
+                var self = this;
+                this.opts.initSelection.call(null, this.opts.element, function(selected){
+                    if (selected !== undefined && selected !== null) {
+                        self.updateSelection(selected);
+                        self.close();
+                        self.setPlaceholder();
+                        self.nextSearchTerm = self.opts.nextSearchTerm(selected, self.search.val());
+                    }
+                });
+            }
+        },
+
+        isPlaceholderOptionSelected: function() {
+            var placeholderOption;
+            if (this.getPlaceholder() === undefined) return false; // no placeholder specified so no option should be considered
+            return ((placeholderOption = this.getPlaceholderOption()) !== undefined && placeholderOption.prop("selected"))
+                || (this.opts.element.val() === "")
+                || (this.opts.element.val() === undefined)
+                || (this.opts.element.val() === null);
+        },
+
+        // single
+        prepareOpts: function () {
+            var opts = this.parent.prepareOpts.apply(this, arguments),
+                self=this;
+
+            if (opts.element.get(0).tagName.toLowerCase() === "select") {
+                // install the selection initializer
+                opts.initSelection = function (element, callback) {
+                    var selected = element.find("option").filter(function() { return this.selected && !this.disabled });
+                    // a single select box always has a value, no need to null check 'selected'
+                    callback(self.optionToData(selected));
+                };
+            } else if ("data" in opts) {
+                // install default initSelection when applied to hidden input and data is local
+                opts.initSelection = opts.initSelection || function (element, callback) {
+                    var id = element.val();
+                    //search in data by id, storing the actual matching item
+                    var match = null;
+                    opts.query({
+                        matcher: function(term, text, el){
+                            var is_match = equal(id, opts.id(el));
+                            if (is_match) {
+                                match = el;
+                            }
+                            return is_match;
+                        },
+                        callback: !$.isFunction(callback) ? $.noop : function() {
+                            callback(match);
+                        }
+                    });
+                };
+            }
+
+            return opts;
+        },
+
+        // single
+        getPlaceholder: function() {
+            // if a placeholder is specified on a single select without a valid placeholder option ignore it
+            if (this.select) {
+                if (this.getPlaceholderOption() === undefined) {
+                    return undefined;
+                }
+            }
+
+            return this.parent.getPlaceholder.apply(this, arguments);
+        },
+
+        // single
+        setPlaceholder: function () {
+            var placeholder = this.getPlaceholder();
+
+            if (this.isPlaceholderOptionSelected() && placeholder !== undefined) {
+
+                // check for a placeholder option if attached to a select
+                if (this.select && this.getPlaceholderOption() === undefined) return;
+
+                this.selection.find(".select2-chosen").html(this.opts.escapeMarkup(placeholder));
+
+                this.selection.addClass("select2-default");
+
+                this.container.removeClass("select2-allowclear");
+            }
+        },
+
+        // single
+        postprocessResults: function (data, initial, noHighlightUpdate) {
+            var selected = 0, self = this, showSearchInput = true;
+
+            // find the selected element in the result list
+
+            this.findHighlightableChoices().each2(function (i, elm) {
+                if (equal(self.id(elm.data("select2-data")), self.opts.element.val())) {
+                    selected = i;
+                    return false;
+                }
+            });
+
+            // and highlight it
+            if (noHighlightUpdate !== false) {
+                if (initial === true && selected >= 0) {
+                    this.highlight(selected);
+                } else {
+                    this.highlight(0);
+                }
+            }
+
+            // hide the search box if this is the first we got the results and there are enough of them for search
+
+            if (initial === true) {
+                var min = this.opts.minimumResultsForSearch;
+                if (min >= 0) {
+                    this.showSearch(countResults(data.results) >= min);
+                }
+            }
+        },
+
+        // single
+        showSearch: function(showSearchInput) {
+            if (this.showSearchInput === showSearchInput) return;
+
+            this.showSearchInput = showSearchInput;
+
+            this.dropdown.find(".select2-search").toggleClass("select2-search-hidden", !showSearchInput);
+            this.dropdown.find(".select2-search").toggleClass("select2-offscreen", !showSearchInput);
+            //add "select2-with-searchbox" to the container if search box is shown
+            $(this.dropdown, this.container).toggleClass("select2-with-searchbox", showSearchInput);
+        },
+
+        // single
+        onSelect: function (data, options) {
+
+            if (!this.triggerSelect(data)) { return; }
+
+            var old = this.opts.element.val(),
+                oldData = this.data();
+
+            this.opts.element.val(this.id(data));
+            this.updateSelection(data);
+
+            this.opts.element.trigger({ type: "select2-selected", val: this.id(data), choice: data });
+
+            this.nextSearchTerm = this.opts.nextSearchTerm(data, this.search.val());
+            this.close();
+
+            if ((!options || !options.noFocus) && this.opts.shouldFocusInput(this)) {
+                this.focusser.focus();
+            }
+
+            if (!equal(old, this.id(data))) {
+                this.triggerChange({ added: data, removed: oldData });
+            }
+        },
+
+        // single
+        updateSelection: function (data) {
+
+            var container=this.selection.find(".select2-chosen"), formatted, cssClass;
+
+            this.selection.data("select2-data", data);
+
+            container.empty();
+            if (data !== null) {
+                formatted=this.opts.formatSelection(data, container, this.opts.escapeMarkup);
+            }
+            if (formatted !== undefined) {
+                container.append(formatted);
+            }
+            cssClass=this.opts.formatSelectionCssClass(data, container);
+            if (cssClass !== undefined) {
+                container.addClass(cssClass);
+            }
+
+            this.selection.removeClass("select2-default");
+
+            if (this.opts.allowClear && this.getPlaceholder() !== undefined) {
+                this.container.addClass("select2-allowclear");
+            }
+        },
+
+        // single
+        val: function () {
+            var val,
+                triggerChange = false,
+                data = null,
+                self = this,
+                oldData = this.data();
+
+            if (arguments.length === 0) {
+                return this.opts.element.val();
+            }
+
+            val = arguments[0];
+
+            if (arguments.length > 1) {
+                triggerChange = arguments[1];
+            }
+
+            if (this.select) {
+                this.select
+                    .val(val)
+                    .find("option").filter(function() { return this.selected }).each2(function (i, elm) {
+                        data = self.optionToData(elm);
+                        return false;
+                    });
+                this.updateSelection(data);
+                this.setPlaceholder();
+                if (triggerChange) {
+                    this.triggerChange({added: data, removed:oldData});
+                }
+            } else {
+                // val is an id. !val is true for [undefined,null,'',0] - 0 is legal
+                if (!val && val !== 0) {
+                    this.clear(triggerChange);
+                    return;
+                }
+                if (this.opts.initSelection === undefined) {
+                    throw new Error("cannot call val() if initSelection() is not defined");
+                }
+                this.opts.element.val(val);
+                this.opts.initSelection(this.opts.element, function(data){
+                    self.opts.element.val(!data ? "" : self.id(data));
+                    self.updateSelection(data);
+                    self.setPlaceholder();
+                    if (triggerChange) {
+                        self.triggerChange({added: data, removed:oldData});
+                    }
+                });
+            }
+        },
+
+        // single
+        clearSearch: function () {
+            this.search.val("");
+            this.focusser.val("");
+        },
+
+        // single
+        data: function(value) {
+            var data,
+                triggerChange = false;
+
+            if (arguments.length === 0) {
+                data = this.selection.data("select2-data");
+                if (data == undefined) data = null;
+                return data;
+            } else {
+                if (arguments.length > 1) {
+                    triggerChange = arguments[1];
+                }
+                if (!value) {
+                    this.clear(triggerChange);
+                } else {
+                    data = this.data();
+                    this.opts.element.val(!value ? "" : this.id(value));
+                    this.updateSelection(value);
+                    if (triggerChange) {
+                        this.triggerChange({added: value, removed:data});
+                    }
+                }
+            }
+        }
+    });
+
+    MultiSelect2 = clazz(AbstractSelect2, {
+
+        // multi
+        createContainer: function () {
+            var container = $(document.createElement("div")).attr({
+                "class": "select2-container select2-container-multi"
+            }).html([
+                "<ul class='select2-choices'>",
+                "  <li class='select2-search-field'>",
+                "    <label for='' class='select2-offscreen'></label>",
+                "    <input type='text' autocomplete='off' autocorrect='off' autocapitalize='off' spellcheck='false' class='select2-input'>",
+                "  </li>",
+                "</ul>",
+                "<div class='select2-drop select2-drop-multi select2-display-none'>",
+                "   <ul class='select2-results'>",
+                "   </ul>",
+                "</div>"].join(""));
+            return container;
+        },
+
+        // multi
+        prepareOpts: function () {
+            var opts = this.parent.prepareOpts.apply(this, arguments),
+                self=this;
+
+            // TODO validate placeholder is a string if specified
+
+            if (opts.element.get(0).tagName.toLowerCase() === "select") {
+                // install the selection initializer
+                opts.initSelection = function (element, callback) {
+
+                    var data = [];
+
+                    element.find("option").filter(function() { return this.selected && !this.disabled }).each2(function (i, elm) {
+                        data.push(self.optionToData(elm));
+                    });
+                    callback(data);
+                };
+            } else if ("data" in opts) {
+                // install default initSelection when applied to hidden input and data is local
+                opts.initSelection = opts.initSelection || function (element, callback) {
+                    var ids = splitVal(element.val(), opts.separator);
+                    //search in data by array of ids, storing matching items in a list
+                    var matches = [];
+                    opts.query({
+                        matcher: function(term, text, el){
+                            var is_match = $.grep(ids, function(id) {
+                                return equal(id, opts.id(el));
+                            }).length;
+                            if (is_match) {
+                                matches.push(el);
+                            }
+                            return is_match;
+                        },
+                        callback: !$.isFunction(callback) ? $.noop : function() {
+                            // reorder matches based on the order they appear in the ids array because right now
+                            // they are in the order in which they appear in data array
+                            var ordered = [];
+                            for (var i = 0; i < ids.length; i++) {
+                                var id = ids[i];
+                                for (var j = 0; j < matches.length; j++) {
+                                    var match = matches[j];
+                                    if (equal(id, opts.id(match))) {
+                                        ordered.push(match);
+                                        matches.splice(j, 1);
+                                        break;
+                                    }
+                                }
+                            }
+                            callback(ordered);
+                        }
+                    });
+                };
+            }
+
+            return opts;
+        },
+
+        // multi
+        selectChoice: function (choice) {
+
+            var selected = this.container.find(".select2-search-choice-focus");
+            if (selected.length && choice && choice[0] == selected[0]) {
+
+            } else {
+                if (selected.length) {
+                    this.opts.element.trigger("choice-deselected", selected);
+                }
+                selected.removeClass("select2-search-choice-focus");
+                if (choice && choice.length) {
+                    this.close();
+                    choice.addClass("select2-search-choice-focus");
+                    this.opts.element.trigger("choice-selected", choice);
+                }
+            }
+        },
+
+        // multi
+        destroy: function() {
+            $("label[for='" + this.search.attr('id') + "']")
+                .attr('for', this.opts.element.attr("id"));
+            this.parent.destroy.apply(this, arguments);
+
+            cleanupJQueryElements.call(this,
+                "searchContainer",
+                "selection"
+            );
+        },
+
+        // multi
+        initContainer: function () {
+
+            var selector = ".select2-choices", selection;
+
+            this.searchContainer = this.container.find(".select2-search-field");
+            this.selection = selection = this.container.find(selector);
+
+            var _this = this;
+            this.selection.on("click", ".select2-search-choice:not(.select2-locked)", function (e) {
+                //killEvent(e);
+                _this.search[0].focus();
+                _this.selectChoice($(this));
+            });
+
+            // rewrite labels from original element to focusser
+            this.search.attr("id", "s2id_autogen"+nextUid());
+
+            this.search.prev()
+                .text($("label[for='" + this.opts.element.attr("id") + "']").text())
+                .attr('for', this.search.attr('id'));
+
+            this.search.on("input paste", this.bind(function() {
+                if (this.search.attr('placeholder') && this.search.val().length == 0) return;
+                if (!this.isInterfaceEnabled()) return;
+                if (!this.opened()) {
+                    this.open();
+                }
+            }));
+
+            this.search.attr("tabindex", this.elementTabIndex);
+
+            this.keydowns = 0;
+            this.search.on("keydown", this.bind(function (e) {
+                if (!this.isInterfaceEnabled()) return;
+
+                ++this.keydowns;
+                var selected = selection.find(".select2-search-choice-focus");
+                var prev = selected.prev(".select2-search-choice:not(.select2-locked)");
+                var next = selected.next(".select2-search-choice:not(.select2-locked)");
+                var pos = getCursorInfo(this.search);
+
+                if (selected.length &&
+                    (e.which == KEY.LEFT || e.which == KEY.RIGHT || e.which == KEY.BACKSPACE || e.which == KEY.DELETE || e.which == KEY.ENTER)) {
+                    var selectedChoice = selected;
+                    if (e.which == KEY.LEFT && prev.length) {
+                        selectedChoice = prev;
+                    }
+                    else if (e.which == KEY.RIGHT) {
+                        selectedChoice = next.length ? next : null;
+                    }
+                    else if (e.which === KEY.BACKSPACE) {
+                        if (this.unselect(selected.first())) {
+                            this.search.width(10);
+                            selectedChoice = prev.length ? prev : next;
+                        }
+                    } else if (e.which == KEY.DELETE) {
+                        if (this.unselect(selected.first())) {
+                            this.search.width(10);
+                            selectedChoice = next.length ? next : null;
+                        }
+                    } else if (e.which == KEY.ENTER) {
+                        selectedChoice = null;
+                    }
+
+                    this.selectChoice(selectedChoice);
+                    killEvent(e);
+                    if (!selectedChoice || !selectedChoice.length) {
+                        this.open();
+                    }
+                    return;
+                } else if (((e.which === KEY.BACKSPACE && this.keydowns == 1)
+                    || e.which == KEY.LEFT) && (pos.offset == 0 && !pos.length)) {
+
+                    this.selectChoice(selection.find(".select2-search-choice:not(.select2-locked)").last());
+                    killEvent(e);
+                    return;
+                } else {
+                    this.selectChoice(null);
+                }
+
+                if (this.opened()) {
+                    switch (e.which) {
+                    case KEY.UP:
+                    case KEY.DOWN:
+                        this.moveHighlight((e.which === KEY.UP) ? -1 : 1);
+                        killEvent(e);
+                        return;
+                    case KEY.ENTER:
+                        this.selectHighlighted();
+                        killEvent(e);
+                        return;
+                    case KEY.TAB:
+                        this.selectHighlighted({noFocus:true});
+                        this.close();
+                        return;
+                    case KEY.ESC:
+                        this.cancel(e);
+                        killEvent(e);
+                        return;
+                    }
+                }
+
+                if (e.which === KEY.TAB || KEY.isControl(e) || KEY.isFunctionKey(e)
+                 || e.which === KEY.BACKSPACE || e.which === KEY.ESC) {
+                    return;
+                }
+
+                if (e.which === KEY.ENTER) {
+                    if (this.opts.openOnEnter === false) {
+                        return;
+                    } else if (e.altKey || e.ctrlKey || e.shiftKey || e.metaKey) {
+                        return;
+                    }
+                }
+
+                this.open();
+
+                if (e.which === KEY.PAGE_UP || e.which === KEY.PAGE_DOWN) {
+                    // prevent the page from scrolling
+                    killEvent(e);
+                }
+
+                if (e.which === KEY.ENTER) {
+                    // prevent form from being submitted
+                    killEvent(e);
+                }
+
+            }));
+
+            this.search.on("keyup", this.bind(function (e) {
+                this.keydowns = 0;
+                this.resizeSearch();
+            })
+            );
+
+            this.search.on("blur", this.bind(function(e) {
+                this.container.removeClass("select2-container-active");
+                this.search.removeClass("select2-focused");
+                this.selectChoice(null);
+                if (!this.opened()) this.clearSearch();
+                e.stopImmediatePropagation();
+                this.opts.element.trigger($.Event("select2-blur"));
+            }));
+
+            this.container.on("click", selector, this.bind(function (e) {
+                if (!this.isInterfaceEnabled()) return;
+                if ($(e.target).closest(".select2-search-choice").length > 0) {
+                    // clicked inside a select2 search choice, do not open
+                    return;
+                }
+                this.selectChoice(null);
+                this.clearPlaceholder();
+                if (!this.container.hasClass("select2-container-active")) {
+                    this.opts.element.trigger($.Event("select2-focus"));
+                }
+                this.open();
+                this.focusSearch();
+                e.preventDefault();
+            }));
+
+            this.container.on("focus", selector, this.bind(function () {
+                if (!this.isInterfaceEnabled()) return;
+                if (!this.container.hasClass("select2-container-active")) {
+                    this.opts.element.trigger($.Event("select2-focus"));
+                }
+                this.container.addClass("select2-container-active");
+                this.dropdown.addClass("select2-drop-active");
+                this.clearPlaceholder();
+            }));
+
+            this.initContainerWidth();
+            this.opts.element.addClass("select2-offscreen");
+
+            // set the placeholder if necessary
+            this.clearSearch();
+        },
+
+        // multi
+        enableInterface: function() {
+            if (this.parent.enableInterface.apply(this, arguments)) {
+                this.search.prop("disabled", !this.isInterfaceEnabled());
+            }
+        },
+
+        // multi
+        initSelection: function () {
+            var data;
+            if (this.opts.element.val() === "" && this.opts.element.text() === "") {
+                this.updateSelection([]);
+                this.close();
+                // set the placeholder if necessary
+                this.clearSearch();
+            }
+            if (this.select || this.opts.element.val() !== "") {
+                var self = this;
+                this.opts.initSelection.call(null, this.opts.element, function(data){
+                    if (data !== undefined && data !== null) {
+                        self.updateSelection(data);
+                        self.close();
+                        // set the placeholder if necessary
+                        self.clearSearch();
+                    }
+                });
+            }
+        },
+
+        // multi
+        clearSearch: function () {
+            var placeholder = this.getPlaceholder(),
+                maxWidth = this.getMaxSearchWidth();
+
+            if (placeholder !== undefined  && this.getVal().length === 0 && this.search.hasClass("select2-focused") === false) {
+                this.search.val(placeholder).addClass("select2-default");
+                // stretch the search box to full width of the container so as much of the placeholder is visible as possible
+                // we could call this.resizeSearch(), but we do not because that requires a sizer and we do not want to create one so early because of a firefox bug, see #944
+                this.search.width(maxWidth > 0 ? maxWidth : this.container.css("width"));
+            } else {
+                this.search.val("").width(10);
+            }
+        },
+
+        // multi
+        clearPlaceholder: function () {
+            if (this.search.hasClass("select2-default")) {
+                this.search.val("").removeClass("select2-default");
+            }
+        },
+
+        // multi
+        opening: function () {
+            this.clearPlaceholder(); // should be done before super so placeholder is not used to search
+            this.resizeSearch();
+
+            this.parent.opening.apply(this, arguments);
+
+            this.focusSearch();
+
+            // initializes search's value with nextSearchTerm (if defined by user)
+            // ignore nextSearchTerm if the dropdown is opened by the user pressing a letter
+            if(this.search.val() === "") {
+                if(this.nextSearchTerm != undefined){
+                    this.search.val(this.nextSearchTerm);
+                    this.search.select();
+                }
+            }
+
+            this.updateResults(true);
+            if (this.opts.shouldFocusInput(this)) {
+                this.search.focus();
+            }
+            this.opts.element.trigger($.Event("select2-open"));
+        },
+
+        // multi
+        close: function () {
+            if (!this.opened()) return;
+            this.parent.close.apply(this, arguments);
+        },
+
+        // multi
+        focus: function () {
+            this.close();
+            this.search.focus();
+        },
+
+        // multi
+        isFocused: function () {
+            return this.search.hasClass("select2-focused");
+        },
+
+        // multi
+        updateSelection: function (data) {
+            var ids = [], filtered = [], self = this;
+
+            // filter out duplicates
+            $(data).each(function () {
+                if (indexOf(self.id(this), ids) < 0) {
+                    ids.push(self.id(this));
+                    filtered.push(this);
+                }
+            });
+            data = filtered;
+
+            this.selection.find(".select2-search-choice").remove();
+            $(data).each(function () {
+                self.addSelectedChoice(this);
+            });
+            self.postprocessResults();
+        },
+
+        // multi
+        tokenize: function() {
+            var input = this.search.val();
+            input = this.opts.tokenizer.call(this, input, this.data(), this.bind(this.onSelect), this.opts);
+            if (input != null && input != undefined) {
+                this.search.val(input);
+                if (input.length > 0) {
+                    this.open();
+                }
+            }
+
+        },
+
+        // multi
+        onSelect: function (data, options) {
+
+            if (!this.triggerSelect(data) || data.text === "") { return; }
+
+            this.addSelectedChoice(data);
+
+            this.opts.element.trigger({ type: "selected", val: this.id(data), choice: data });
+
+            // keep track of the search's value before it gets cleared
+            this.nextSearchTerm = this.opts.nextSearchTerm(data, this.search.val());
+            this.moveHighlight(1);
+            if (this.select || !this.opts.closeOnSelect) this.postprocessResults(data, false, this.opts.closeOnSelect===true);
+
+            if (this.opts.closeOnSelect) {
+                this.clearSearch()
+                this.updateResults()
+                this.close();
+                this.search.width(10);
+            } else {
+                if (this.countSelectableResults()>0) {
+                    this.search.width(10);
+                    this.resizeSearch();
+                    if (this.getMaximumSelectionSize() > 0 && this.val().length >= this.getMaximumSelectionSize()) {
+                        // if we reached max selection size repaint the results so choices
+                        // are replaced with the max selection reached message
+                        this.updateResults(true);
+                    } else {
+                        // initializes search's value with nextSearchTerm and update search result
+                        if(this.nextSearchTerm != undefined){
+                            this.search.val(this.nextSearchTerm);
+                            this.updateResults();
+                            this.search.select();
+                        }
+                    }
+                    this.positionDropdown();
+                } else {
+                    // if nothing left to select close
+                    this.close();
+                    this.search.width(10);
+                }
+            }
+
+            // since its not possible to select an element that has already been
+            // added we do not need to check if this is a new element before firing change
+            this.triggerChange({ added: data });
+
+            if (!options || !options.noFocus)
+                this.focusSearch();
+        },
+
+        // multi
+        cancel: function () {
+            this.close();
+            this.focusSearch();
+        },
+
+        addSelectedChoice: function (data) {
+            var enableChoice = !data.locked,
+                enabledItem = $(
+                    "<li class='select2-search-choice'>" +
+                    "    <div></div>" +
+                    "    <a href='#' class='select2-search-choice-close' tabindex='-1'></a>" +
+                    "</li>"),
+                disabledItem = $(
+                    "<li class='select2-search-choice select2-locked'>" +
+                    "<div></div>" +
+                    "</li>");
+            var choice = enableChoice ? enabledItem : disabledItem,
+                id = this.id(data),
+                val = this.getVal(),
+                formatted,
+                cssClass;
+
+            formatted=this.opts.formatSelection(data, choice.find("div"), this.opts.escapeMarkup);
+            if (formatted != undefined) {
+                choice.find("div").replaceWith("<div>"+formatted+"</div>");
+            }
+            cssClass=this.opts.formatSelectionCssClass(data, choice.find("div"));
+            if (cssClass != undefined) {
+                choice.addClass(cssClass);
+            }
+
+            if(enableChoice){
+              choice.find(".select2-search-choice-close")
+                  .on("mousedown", killEvent)
+                  .on("click dblclick", this.bind(function (e) {
+                  if (!this.isInterfaceEnabled()) return;
+
+                  this.unselect($(e.target));
+                  this.selection.find(".select2-search-choice-focus").removeClass("select2-search-choice-focus");
+                  killEvent(e);
+                  this.close();
+                  this.focusSearch();
+              })).on("focus", this.bind(function () {
+                  if (!this.isInterfaceEnabled()) return;
+                  this.container.addClass("select2-container-active");
+                  this.dropdown.addClass("select2-drop-active");
+              }));
+            }
+
+            choice.data("select2-data", data);
+            choice.insertBefore(this.searchContainer);
+
+            val.push(id);
+            this.setVal(val);
+        },
+
+        // multi
+        unselect: function (selected) {
+            var val = this.getVal(),
+                data,
+                index;
+            selected = selected.closest(".select2-search-choice");
+
+            if (selected.length === 0) {
+                throw "Invalid argument: " + selected + ". Must be .select2-search-choice";
+            }
+
+            data = selected.data("select2-data");
+
+            if (!data) {
+                // prevent a race condition when the 'x' is clicked really fast repeatedly the event can be queued
+                // and invoked on an element already removed
+                return;
+            }
+
+            var evt = $.Event("select2-removing");
+            evt.val = this.id(data);
+            evt.choice = data;
+            this.opts.element.trigger(evt);
+
+            if (evt.isDefaultPrevented()) {
+                return false;
+            }
+
+            while((index = indexOf(this.id(data), val)) >= 0) {
+                val.splice(index, 1);
+                this.setVal(val);
+                if (this.select) this.postprocessResults();
+            }
+
+            selected.remove();
+
+            this.opts.element.trigger({ type: "select2-removed", val: this.id(data), choice: data });
+            this.triggerChange({ removed: data });
+
+            return true;
+        },
+
+        // multi
+        postprocessResults: function (data, initial, noHighlightUpdate) {
+            var val = this.getVal(),
+                choices = this.results.find(".select2-result"),
+                compound = this.results.find(".select2-result-with-children"),
+                self = this;
+
+            choices.each2(function (i, choice) {
+                var id = self.id(choice.data("select2-data"));
+                if (indexOf(id, val) >= 0) {
+                    choice.addClass("select2-selected");
+                    // mark all children of the selected parent as selected
+                    choice.find(".select2-result-selectable").addClass("select2-selected");
+                }
+            });
+
+            compound.each2(function(i, choice) {
+                // hide an optgroup if it doesn't have any selectable children
+                if (!choice.is('.select2-result-selectable')
+                    && choice.find(".select2-result-selectable:not(.select2-selected)").length === 0) {
+                    choice.addClass("select2-selected");
+                }
+            });
+
+            if (this.highlight() == -1 && noHighlightUpdate !== false){
+                self.highlight(0);
+            }
+
+            //If all results are chosen render formatNoMatches
+            if(!this.opts.createSearchChoice && !choices.filter('.select2-result:not(.select2-selected)').length > 0){
+                if(!data || data && !data.more && this.results.find(".select2-no-results").length === 0) {
+                    if (checkFormatter(self.opts.formatNoMatches, "formatNoMatches")) {
+                        this.results.append("<li class='select2-no-results'>" + evaluate(self.opts.formatNoMatches, self.opts.element, self.search.val()) + "</li>");
+                    }
+                }
+            }
+
+        },
+
+        // multi
+        getMaxSearchWidth: function() {
+            return this.selection.width() - getSideBorderPadding(this.search);
+        },
+
+        // multi
+        resizeSearch: function () {
+            var minimumWidth, left, maxWidth, containerLeft, searchWidth,
+                sideBorderPadding = getSideBorderPadding(this.search);
+
+            minimumWidth = measureTextWidth(this.search) + 10;
+
+            left = this.search.offset().left;
+
+            maxWidth = this.selection.width();
+            containerLeft = this.selection.offset().left;
+
+            searchWidth = maxWidth - (left - containerLeft) - sideBorderPadding;
+
+            if (searchWidth < minimumWidth) {
+                searchWidth = maxWidth - sideBorderPadding;
+            }
+
+            if (searchWidth < 40) {
+                searchWidth = maxWidth - sideBorderPadding;
+            }
+
+            if (searchWidth <= 0) {
+              searchWidth = minimumWidth;
+            }
+
+            this.search.width(Math.floor(searchWidth));
+        },
+
+        // multi
+        getVal: function () {
+            var val;
+            if (this.select) {
+                val = this.select.val();
+                return val === null ? [] : val;
+            } else {
+                val = this.opts.element.val();
+                return splitVal(val, this.opts.separator);
+            }
+        },
+
+        // multi
+        setVal: function (val) {
+            var unique;
+            if (this.select) {
+                this.select.val(val);
+            } else {
+                unique = [];
+                // filter out duplicates
+                $(val).each(function () {
+                    if (indexOf(this, unique) < 0) unique.push(this);
+                });
+                this.opts.element.val(unique.length === 0 ? "" : unique.join(this.opts.separator));
+            }
+        },
+
+        // multi
+        buildChangeDetails: function (old, current) {
+            var current = current.slice(0),
+                old = old.slice(0);
+
+            // remove intersection from each array
+            for (var i = 0; i < current.length; i++) {
+                for (var j = 0; j < old.length; j++) {
+                    if (equal(this.opts.id(current[i]), this.opts.id(old[j]))) {
+                        current.splice(i, 1);
+                        if(i>0){
+                        	i--;
+                        }
+                        old.splice(j, 1);
+                        j--;
+                    }
+                }
+            }
+
+            return {added: current, removed: old};
+        },
+
+
+        // multi
+        val: function (val, triggerChange) {
+            var oldData, self=this;
+
+            if (arguments.length === 0) {
+                return this.getVal();
+            }
+
+            oldData=this.data();
+            if (!oldData.length) oldData=[];
+
+            // val is an id. !val is true for [undefined,null,'',0] - 0 is legal
+            if (!val && val !== 0) {
+                this.opts.element.val("");
+                this.updateSelection([]);
+                this.clearSearch();
+                if (triggerChange) {
+                    this.triggerChange({added: this.data(), removed: oldData});
+                }
+                return;
+            }
+
+            // val is a list of ids
+            this.setVal(val);
+
+            if (this.select) {
+                this.opts.initSelection(this.select, this.bind(this.updateSelection));
+                if (triggerChange) {
+                    this.triggerChange(this.buildChangeDetails(oldData, this.data()));
+                }
+            } else {
+                if (this.opts.initSelection === undefined) {
+                    throw new Error("val() cannot be called if initSelection() is not defined");
+                }
+
+                this.opts.initSelection(this.opts.element, function(data){
+                    var ids=$.map(data, self.id);
+                    self.setVal(ids);
+                    self.updateSelection(data);
+                    self.clearSearch();
+                    if (triggerChange) {
+                        self.triggerChange(self.buildChangeDetails(oldData, self.data()));
+                    }
+                });
+            }
+            this.clearSearch();
+        },
+
+        // multi
+        onSortStart: function() {
+            if (this.select) {
+                throw new Error("Sorting of elements is not supported when attached to <select>. Attach to <input type='hidden'/> instead.");
+            }
+
+            // collapse search field into 0 width so its container can be collapsed as well
+            this.search.width(0);
+            // hide the container
+            this.searchContainer.hide();
+        },
+
+        // multi
+        onSortEnd:function() {
+
+            var val=[], self=this;
+
+            // show search and move it to the end of the list
+            this.searchContainer.show();
+            // make sure the search container is the last item in the list
+            this.searchContainer.appendTo(this.searchContainer.parent());
+            // since we collapsed the width in dragStarted, we resize it here
+            this.resizeSearch();
+
+            // update selection
+            this.selection.find(".select2-search-choice").each(function() {
+                val.push(self.opts.id($(this).data("select2-data")));
+            });
+            this.setVal(val);
+            this.triggerChange();
+        },
+
+        // multi
+        data: function(values, triggerChange) {
+            var self=this, ids, old;
+            if (arguments.length === 0) {
+                 return this.selection
+                     .children(".select2-search-choice")
+                     .map(function() { return $(this).data("select2-data"); })
+                     .get();
+            } else {
+                old = this.data();
+                if (!values) { values = []; }
+                ids = $.map(values, function(e) { return self.opts.id(e); });
+                this.setVal(ids);
+                this.updateSelection(values);
+                this.clearSearch();
+                if (triggerChange) {
+                    this.triggerChange(this.buildChangeDetails(old, this.data()));
+                }
+            }
+        }
+    });
+
+    $.fn.select2 = function () {
+
+        var args = Array.prototype.slice.call(arguments, 0),
+            opts,
+            select2,
+            method, value, multiple,
+            allowedMethods = ["val", "destroy", "opened", "open", "close", "focus", "isFocused", "container", "dropdown", "onSortStart", "onSortEnd", "enable", "disable", "readonly", "positionDropdown", "data", "search"],
+            valueMethods = ["opened", "isFocused", "container", "dropdown"],
+            propertyMethods = ["val", "data"],
+            methodsMap = { search: "externalSearch" };
+
+        this.each(function () {
+            if (args.length === 0 || typeof(args[0]) === "object") {
+                opts = args.length === 0 ? {} : $.extend({}, args[0]);
+                opts.element = $(this);
+
+                if (opts.element.get(0).tagName.toLowerCase() === "select") {
+                    multiple = opts.element.prop("multiple");
+                } else {
+                    multiple = opts.multiple || false;
+                    if ("tags" in opts) {opts.multiple = multiple = true;}
+                }
+
+                select2 = multiple ? new window.Select2["class"].multi() : new window.Select2["class"].single();
+                select2.init(opts);
+            } else if (typeof(args[0]) === "string") {
+
+                if (indexOf(args[0], allowedMethods) < 0) {
+                    throw "Unknown method: " + args[0];
+                }
+
+                value = undefined;
+                select2 = $(this).data("select2");
+                if (select2 === undefined) return;
+
+                method=args[0];
+
+                if (method === "container") {
+                    value = select2.container;
+                } else if (method === "dropdown") {
+                    value = select2.dropdown;
+                } else {
+                    if (methodsMap[method]) method = methodsMap[method];
+
+                    value = select2[method].apply(select2, args.slice(1));
+                }
+                if (indexOf(args[0], valueMethods) >= 0
+                    || (indexOf(args[0], propertyMethods) >= 0 && args.length == 1)) {
+                    return false; // abort the iteration, ready to return first matched value
+                }
+            } else {
+                throw "Invalid arguments to select2 plugin: " + args;
+            }
+        });
+        return (value === undefined) ? this : value;
+    };
+
+    // plugin defaults, accessible to users
+    $.fn.select2.defaults = {
+        width: "copy",
+        loadMorePadding: 0,
+        closeOnSelect: true,
+        openOnEnter: true,
+        containerCss: {},
+        dropdownCss: {},
+        containerCssClass: "",
+        dropdownCssClass: "",
+        formatResult: function(result, container, query, escapeMarkup) {
+            var markup=[];
+            markMatch(result.text, query.term, markup, escapeMarkup);
+            return markup.join("");
+        },
+        formatSelection: function (data, container, escapeMarkup) {
+            return data ? escapeMarkup(data.text) : undefined;
+        },
+        sortResults: function (results, container, query) {
+            return results;
+        },
+        formatResultCssClass: function(data) {return data.css;},
+        formatSelectionCssClass: function(data, container) {return undefined;},
+        minimumResultsForSearch: 0,
+        minimumInputLength: 0,
+        maximumInputLength: null,
+        maximumSelectionSize: 0,
+        id: function (e) { return e == undefined ? null : e.id; },
+        matcher: function(term, text) {
+            return stripDiacritics(''+text).toUpperCase().indexOf(stripDiacritics(''+term).toUpperCase()) >= 0;
+        },
+        separator: ",",
+        tokenSeparators: [],
+        tokenizer: defaultTokenizer,
+        escapeMarkup: defaultEscapeMarkup,
+        blurOnChange: false,
+        selectOnBlur: false,
+        adaptContainerCssClass: function(c) { return c; },
+        adaptDropdownCssClass: function(c) { return null; },
+        nextSearchTerm: function(selectedObject, currentSearchTerm) { return undefined; },
+        searchInputPlaceholder: '',
+        createSearchChoicePosition: 'top',
+        shouldFocusInput: function (instance) {
+            // Attempt to detect touch devices
+            var supportsTouchEvents = (('ontouchstart' in window) ||
+                                       (navigator.msMaxTouchPoints > 0));
+
+            // Only devices which support touch events should be special cased
+            if (!supportsTouchEvents) {
+                return true;
+            }
+
+            // Never focus the input if search is disabled
+            if (instance.opts.minimumResultsForSearch < 0) {
+                return false;
+            }
+
+            return true;
+        }
+    };
+
+    $.fn.select2.locales = [];
+
+    $.fn.select2.locales['en'] = {
+         formatMatches: function (matches) { if (matches === 1) { return "One result is available, press enter to select it."; } return matches + " results are available, use up and down arrow keys to navigate."; },
+         formatNoMatches: function () { return "No matches found"; },
+         formatAjaxError: function (jqXHR, textStatus, errorThrown) { return "Loading failed"; },
+         formatInputTooShort: function (input, min) { var n = min - input.length; return "Please enter " + n + " or more character" + (n == 1 ? "" : "s"); },
+         formatInputTooLong: function (input, max) { var n = input.length - max; return "Please delete " + n + " character" + (n == 1 ? "" : "s"); },
+         formatSelectionTooBig: function (limit) { return "You can only select " + limit + " item" + (limit == 1 ? "" : "s"); },
+         formatLoadMore: function (pageNumber) { return "Loading more results…"; },
+         formatSearching: function () { return "Searching…"; },
+    };
+
+    $.extend($.fn.select2.defaults, $.fn.select2.locales['en']);
+
+    $.fn.select2.ajaxDefaults = {
+        transport: $.ajax,
+        params: {
+            type: "GET",
+            cache: false,
+            dataType: "json"
+        }
+    };
+
+    // exports
+    window.Select2 = {
+        query: {
+            ajax: ajax,
+            local: local,
+            tags: tags
+        }, util: {
+            debounce: debounce,
+            markMatch: markMatch,
+            escapeMarkup: defaultEscapeMarkup,
+            stripDiacritics: stripDiacritics
+        }, "class": {
+            "abstract": AbstractSelect2,
+            "single": SingleSelect2,
+            "multi": MultiSelect2
+        }
+    };
+
+}(jQuery));
diff --git a/client/galaxy/scripts/libs/raven.js b/client/galaxy/scripts/libs/raven.js
new file mode 100644
index 0000000..71bcad5
--- /dev/null
+++ b/client/galaxy/scripts/libs/raven.js
@@ -0,0 +1,2510 @@
+/*! Raven.js 3.3.0 (74c6c03) | github.com/getsentry/raven-js */
+
+/*
+ * Includes TraceKit
+ * https://github.com/getsentry/TraceKit
+ *
+ * Copyright 2016 Matt Robenolt and other contributors
+ * Released under the BSD license
+ * https://github.com/getsentry/raven-js/blob/master/LICENSE
+ *
+ */
+
+(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Raven = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)r [...]
+exports = module.exports = stringify
+exports.getSerialize = serializer
+
+function stringify(obj, replacer, spaces, cycleReplacer) {
+  return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces)
+}
+
+function serializer(replacer, cycleReplacer) {
+  var stack = [], keys = []
+
+  if (cycleReplacer == null) cycleReplacer = function(key, value) {
+    if (stack[0] === value) return "[Circular ~]"
+    return "[Circular ~." + keys.slice(0, stack.indexOf(value)).join(".") + "]"
+  }
+
+  return function(key, value) {
+    if (stack.length > 0) {
+      var thisPos = stack.indexOf(this)
+      ~thisPos ? stack.splice(thisPos + 1) : stack.push(this)
+      ~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key)
+      if (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value)
+    }
+    else stack.push(value)
+
+    return replacer == null ? value : replacer.call(this, key, value)
+  }
+}
+
+},{}],2:[function(_dereq_,module,exports){
+'use strict';
+
+function RavenConfigError(message) {
+    this.name = 'RavenConfigError';
+    this.message = message;
+}
+RavenConfigError.prototype = new Error();
+RavenConfigError.prototype.constructor = RavenConfigError;
+
+module.exports = RavenConfigError;
+
+},{}],3:[function(_dereq_,module,exports){
+'use strict';
+
+var wrapMethod = function(console, level, callback) {
+    var originalConsoleLevel = console[level];
+    var originalConsole = console;
+
+    if (!(level in console)) {
+        return;
+    }
+
+    var sentryLevel = level === 'warn'
+        ? 'warning'
+        : level;
+
+    console[level] = function () {
+        var args = [].slice.call(arguments);
+
+        var msg = '' + args.join(' ');
+        var data = {level: sentryLevel, logger: 'console', extra: {'arguments': args}};
+        callback && callback(msg, data);
+
+        // this fails for some browsers. :(
+        if (originalConsoleLevel) {
+            // IE9 doesn't allow calling apply on console functions directly
+            // See: https://stackoverflow.com/questions/5472938/does-ie9-support-console-log-and-is-it-a-real-function#answer-5473193
+            Function.prototype.apply.call(
+                originalConsoleLevel,
+                originalConsole,
+                args
+            );
+        }
+    };
+};
+
+module.exports = {
+    wrapMethod: wrapMethod
+};
+
+},{}],4:[function(_dereq_,module,exports){
+/*global XDomainRequest:false*/
+'use strict';
+
+var TraceKit = _dereq_(7);
+var RavenConfigError = _dereq_(2);
+var utils = _dereq_(6);
+var stringify = _dereq_(1);
+
+var isFunction = utils.isFunction;
+var isUndefined = utils.isUndefined;
+var isError = utils.isError;
+var isEmptyObject = utils.isEmptyObject;
+var hasKey = utils.hasKey;
+var joinRegExp = utils.joinRegExp;
+var each = utils.each;
+var objectMerge = utils.objectMerge;
+var truncate = utils.truncate;
+var urlencode = utils.urlencode;
+var uuid4 = utils.uuid4;
+var htmlTreeAsString = utils.htmlTreeAsString;
+var parseUrl = utils.parseUrl;
+var isString = utils.isString;
+
+var wrapConsoleMethod = _dereq_(3).wrapMethod;
+
+var dsnKeys = 'source protocol user pass host port path'.split(' '),
+    dsnPattern = /^(?:(\w+):)?\/\/(?:(\w+)(:\w+)?@)?([\w\.-]+)(?::(\d+))?(\/.*)/;
+
+function now() {
+    return +new Date();
+}
+
+// First, check for JSON support
+// If there is no JSON, we no-op the core features of Raven
+// since JSON is required to encode the payload
+function Raven() {
+    this._hasJSON = !!(typeof JSON === 'object' && JSON.stringify);
+    // Raven can run in contexts where there's no document (react-native)
+    this._hasDocument = typeof document !== 'undefined';
+    this._lastCapturedException = null;
+    this._lastEventId = null;
+    this._globalServer = null;
+    this._globalKey = null;
+    this._globalProject = null;
+    this._globalContext = {};
+    this._globalOptions = {
+        logger: 'javascript',
+        ignoreErrors: [],
+        ignoreUrls: [],
+        whitelistUrls: [],
+        includePaths: [],
+        crossOrigin: 'anonymous',
+        collectWindowErrors: true,
+        maxMessageLength: 0,
+        stackTraceLimit: 50
+    };
+    this._ignoreOnError = 0;
+    this._isRavenInstalled = false;
+    this._originalErrorStackTraceLimit = Error.stackTraceLimit;
+    // capture references to window.console *and* all its methods first
+    // before the console plugin has a chance to monkey patch
+    this._originalConsole = window.console || {};
+    this._originalConsoleMethods = {};
+    this._plugins = [];
+    this._startTime = now();
+    this._wrappedBuiltIns = [];
+    this._breadcrumbs = [];
+    this._breadcrumbLimit = 20;
+    this._lastCapturedEvent = null;
+    this._keypressTimeout;
+    this._location = window.location;
+    this._lastHref = this._location && this._location.href;
+
+    for (var method in this._originalConsole) {  // eslint-disable-line guard-for-in
+      this._originalConsoleMethods[method] = this._originalConsole[method];
+    }
+}
+
+/*
+ * The core Raven singleton
+ *
+ * @this {Raven}
+ */
+
+Raven.prototype = {
+    // Hardcode version string so that raven source can be loaded directly via
+    // webpack (using a build step causes webpack #1617). Grunt verifies that
+    // this value matches package.json during build.
+    //   See: https://github.com/getsentry/raven-js/issues/465
+    VERSION: '3.3.0',
+
+    debug: false,
+
+    TraceKit: TraceKit, // alias to TraceKit
+
+    /*
+     * Configure Raven with a DSN and extra options
+     *
+     * @param {string} dsn The public Sentry DSN
+     * @param {object} options Optional set of of global options [optional]
+     * @return {Raven}
+     */
+    config: function(dsn, options) {
+        var self = this;
+
+        if (this._globalServer) {
+                this._logDebug('error', 'Error: Raven has already been configured');
+            return this;
+        }
+        if (!dsn) return this;
+
+        // merge in options
+        if (options) {
+            each(options, function(key, value){
+                // tags and extra are special and need to be put into context
+                if (key === 'tags' || key === 'extra') {
+                    self._globalContext[key] = value;
+                } else {
+                    self._globalOptions[key] = value;
+                }
+            });
+        }
+
+        var uri = this._parseDSN(dsn),
+            lastSlash = uri.path.lastIndexOf('/'),
+            path = uri.path.substr(1, lastSlash);
+
+        this._dsn = dsn;
+
+        // "Script error." is hard coded into browsers for errors that it can't read.
+        // this is the result of a script being pulled in from an external domain and CORS.
+        this._globalOptions.ignoreErrors.push(/^Script error\.?$/);
+        this._globalOptions.ignoreErrors.push(/^Javascript error: Script error\.? on line 0$/);
+
+        // join regexp rules into one big rule
+        this._globalOptions.ignoreErrors = joinRegExp(this._globalOptions.ignoreErrors);
+        this._globalOptions.ignoreUrls = this._globalOptions.ignoreUrls.length ? joinRegExp(this._globalOptions.ignoreUrls) : false;
+        this._globalOptions.whitelistUrls = this._globalOptions.whitelistUrls.length ? joinRegExp(this._globalOptions.whitelistUrls) : false;
+        this._globalOptions.includePaths = joinRegExp(this._globalOptions.includePaths);
+
+        this._globalKey = uri.user;
+        this._globalSecret = uri.pass && uri.pass.substr(1);
+        this._globalProject = uri.path.substr(lastSlash + 1);
+
+        this._globalServer = this._getGlobalServer(uri);
+
+        this._globalEndpoint = this._globalServer +
+            '/' + path + 'api/' + this._globalProject + '/store/';
+
+        TraceKit.collectWindowErrors = !!this._globalOptions.collectWindowErrors;
+
+        // return for chaining
+        return this;
+    },
+
+    /*
+     * Installs a global window.onerror error handler
+     * to capture and report uncaught exceptions.
+     * At this point, install() is required to be called due
+     * to the way TraceKit is set up.
+     *
+     * @return {Raven}
+     */
+    install: function() {
+        var self = this;
+        if (this.isSetup() && !this._isRavenInstalled) {
+            TraceKit.report.subscribe(function () {
+                self._handleOnErrorStackInfo.apply(self, arguments);
+            });
+            this._wrapBuiltIns();
+
+            // Install all of the plugins
+            this._drainPlugins();
+
+            this._isRavenInstalled = true;
+        }
+
+        Error.stackTraceLimit = this._globalOptions.stackTraceLimit;
+        return this;
+    },
+
+    /*
+     * Wrap code within a context so Raven can capture errors
+     * reliably across domains that is executed immediately.
+     *
+     * @param {object} options A specific set of options for this context [optional]
+     * @param {function} func The callback to be immediately executed within the context
+     * @param {array} args An array of arguments to be called with the callback [optional]
+     */
+    context: function(options, func, args) {
+        if (isFunction(options)) {
+            args = func || [];
+            func = options;
+            options = undefined;
+        }
+
+        return this.wrap(options, func).apply(this, args);
+    },
+
+    /*
+     * Wrap code within a context and returns back a new function to be executed
+     *
+     * @param {object} options A specific set of options for this context [optional]
+     * @param {function} func The function to be wrapped in a new context
+     * @param {function} func A function to call before the try/catch wrapper [optional, private]
+     * @return {function} The newly wrapped functions with a context
+     */
+    wrap: function(options, func, _before) {
+        var self = this;
+        // 1 argument has been passed, and it's not a function
+        // so just return it
+        if (isUndefined(func) && !isFunction(options)) {
+            return options;
+        }
+
+        // options is optional
+        if (isFunction(options)) {
+            func = options;
+            options = undefined;
+        }
+
+        // At this point, we've passed along 2 arguments, and the second one
+        // is not a function either, so we'll just return the second argument.
+        if (!isFunction(func)) {
+            return func;
+        }
+
+        // We don't wanna wrap it twice!
+        try {
+            if (func.__raven__) {
+                return func;
+            }
+        } catch (e) {
+            // Just accessing the __raven__ prop in some Selenium environments
+            // can cause a "Permission denied" exception (see raven-js#495).
+            // Bail on wrapping and return the function as-is (defers to window.onerror).
+            return func;
+        }
+
+        // If this has already been wrapped in the past, return that
+        if (func.__raven_wrapper__ ){
+            return func.__raven_wrapper__ ;
+        }
+
+        function wrapped() {
+            var args = [], i = arguments.length,
+                deep = !options || options && options.deep !== false;
+
+            if (_before && isFunction(_before)) {
+                _before.apply(this, arguments);
+            }
+
+            // Recursively wrap all of a function's arguments that are
+            // functions themselves.
+            while(i--) args[i] = deep ? self.wrap(options, arguments[i]) : arguments[i];
+
+            try {
+                return func.apply(this, args);
+            } catch(e) {
+                self._ignoreNextOnError();
+                self.captureException(e, options);
+                throw e;
+            }
+        }
+
+        // copy over properties of the old function
+        for (var property in func) {
+            if (hasKey(func, property)) {
+                wrapped[property] = func[property];
+            }
+        }
+        wrapped.prototype = func.prototype;
+
+        func.__raven_wrapper__ = wrapped;
+        // Signal that this function has been wrapped already
+        // for both debugging and to prevent it to being wrapped twice
+        wrapped.__raven__ = true;
+        wrapped.__inner__ = func;
+
+        return wrapped;
+    },
+
+    /*
+     * Uninstalls the global error handler.
+     *
+     * @return {Raven}
+     */
+    uninstall: function() {
+        TraceKit.report.uninstall();
+
+        this._restoreBuiltIns();
+
+        Error.stackTraceLimit = this._originalErrorStackTraceLimit;
+        this._isRavenInstalled = false;
+
+        return this;
+    },
+
+    /*
+     * Manually capture an exception and send it over to Sentry
+     *
+     * @param {error} ex An exception to be logged
+     * @param {object} options A specific set of options for this error [optional]
+     * @return {Raven}
+     */
+    captureException: function(ex, options) {
+        // If not an Error is passed through, recall as a message instead
+        if (!isError(ex)) return this.captureMessage(ex, options);
+
+        // Store the raw exception object for potential debugging and introspection
+        this._lastCapturedException = ex;
+
+        // TraceKit.report will re-raise any exception passed to it,
+        // which means you have to wrap it in try/catch. Instead, we
+        // can wrap it here and only re-raise if TraceKit.report
+        // raises an exception different from the one we asked to
+        // report on.
+        try {
+            var stack = TraceKit.computeStackTrace(ex);
+            this._handleStackInfo(stack, options);
+        } catch(ex1) {
+            if(ex !== ex1) {
+                throw ex1;
+            }
+        }
+
+        return this;
+    },
+
+    /*
+     * Manually send a message to Sentry
+     *
+     * @param {string} msg A plain message to be captured in Sentry
+     * @param {object} options A specific set of options for this message [optional]
+     * @return {Raven}
+     */
+    captureMessage: function(msg, options) {
+        // config() automagically converts ignoreErrors from a list to a RegExp so we need to test for an
+        // early call; we'll error on the side of logging anything called before configuration since it's
+        // probably something you should see:
+        if (!!this._globalOptions.ignoreErrors.test && this._globalOptions.ignoreErrors.test(msg)) {
+            return;
+        }
+
+        // Fire away!
+        this._send(
+            objectMerge({
+                message: msg + ''  // Make sure it's actually a string
+            }, options)
+        );
+
+        return this;
+    },
+
+    captureBreadcrumb: function (obj) {
+        var crumb = objectMerge({
+            timestamp: now() / 1000
+        }, obj);
+
+        this._breadcrumbs.push(crumb);
+        if (this._breadcrumbs.length > this._breadcrumbLimit) {
+            this._breadcrumbs.shift();
+        }
+    },
+
+    addPlugin: function(plugin /*arg1, arg2, ... argN*/) {
+        var pluginArgs = Array.prototype.slice.call(arguments, 1);
+
+        this._plugins.push([plugin, pluginArgs]);
+        if (this._isRavenInstalled) {
+            this._drainPlugins();
+        }
+
+        return this;
+    },
+
+    /*
+     * Set/clear a user to be sent along with the payload.
+     *
+     * @param {object} user An object representing user data [optional]
+     * @return {Raven}
+     */
+    setUserContext: function(user) {
+        // Intentionally do not merge here since that's an unexpected behavior.
+        this._globalContext.user = user;
+
+        return this;
+    },
+
+    /*
+     * Merge extra attributes to be sent along with the payload.
+     *
+     * @param {object} extra An object representing extra data [optional]
+     * @return {Raven}
+     */
+    setExtraContext: function(extra) {
+        this._mergeContext('extra', extra);
+
+        return this;
+    },
+
+    /*
+     * Merge tags to be sent along with the payload.
+     *
+     * @param {object} tags An object representing tags [optional]
+     * @return {Raven}
+     */
+    setTagsContext: function(tags) {
+        this._mergeContext('tags', tags);
+
+        return this;
+    },
+
+    /*
+     * Clear all of the context.
+     *
+     * @return {Raven}
+     */
+    clearContext: function() {
+        this._globalContext = {};
+
+        return this;
+    },
+
+    /*
+     * Get a copy of the current context. This cannot be mutated.
+     *
+     * @return {object} copy of context
+     */
+    getContext: function() {
+        // lol javascript
+        return JSON.parse(stringify(this._globalContext));
+    },
+
+
+    /*
+     * Set environment of application
+     *
+     * @param {string} environment Typically something like 'production'.
+     * @return {Raven}
+     */
+    setEnvironment: function(environment) {
+        this._globalOptions.environment = environment;
+
+        return this;
+    },
+
+    /*
+     * Set release version of application
+     *
+     * @param {string} release Typically something like a git SHA to identify version
+     * @return {Raven}
+     */
+    setRelease: function(release) {
+        this._globalOptions.release = release;
+
+        return this;
+    },
+
+    /*
+     * Set the dataCallback option
+     *
+     * @param {function} callback The callback to run which allows the
+     *                            data blob to be mutated before sending
+     * @return {Raven}
+     */
+    setDataCallback: function(callback) {
+        var original = this._globalOptions.dataCallback;
+        this._globalOptions.dataCallback = isFunction(callback)
+          ? function (data) { return callback(data, original); }
+          : callback;
+
+        return this;
+    },
+
+    /*
+     * Set the shouldSendCallback option
+     *
+     * @param {function} callback The callback to run which allows
+     *                            introspecting the blob before sending
+     * @return {Raven}
+     */
+    setShouldSendCallback: function(callback) {
+        var original = this._globalOptions.shouldSendCallback;
+        this._globalOptions.shouldSendCallback = isFunction(callback)
+            ? function (data) { return callback(data, original); }
+            : callback;
+
+        return this;
+    },
+
+    /**
+     * Override the default HTTP transport mechanism that transmits data
+     * to the Sentry server.
+     *
+     * @param {function} transport Function invoked instead of the default
+     *                             `makeRequest` handler.
+     *
+     * @return {Raven}
+     */
+    setTransport: function(transport) {
+        this._globalOptions.transport = transport;
+
+        return this;
+    },
+
+    /*
+     * Get the latest raw exception that was captured by Raven.
+     *
+     * @return {error}
+     */
+    lastException: function() {
+        return this._lastCapturedException;
+    },
+
+    /*
+     * Get the last event id
+     *
+     * @return {string}
+     */
+    lastEventId: function() {
+        return this._lastEventId;
+    },
+
+    /*
+     * Determine if Raven is setup and ready to go.
+     *
+     * @return {boolean}
+     */
+    isSetup: function() {
+        if (!this._hasJSON) return false;  // needs JSON support
+        if (!this._globalServer) {
+            if (!this.ravenNotConfiguredError) {
+              this.ravenNotConfiguredError = true;
+              this._logDebug('error', 'Error: Raven has not been configured.');
+            }
+            return false;
+        }
+        return true;
+    },
+
+    afterLoad: function () {
+        // TODO: remove window dependence?
+
+        // Attempt to initialize Raven on load
+        var RavenConfig = window.RavenConfig;
+        if (RavenConfig) {
+            this.config(RavenConfig.dsn, RavenConfig.config).install();
+        }
+    },
+
+    showReportDialog: function (options) {
+        if (!window.document) // doesn't work without a document (React native)
+            return;
+
+        options = options || {};
+
+        var lastEventId = options.eventId || this.lastEventId();
+        if (!lastEventId) {
+            throw new RavenConfigError('Missing eventId');
+        }
+
+        var dsn = options.dsn || this._dsn;
+        if (!dsn) {
+            throw new RavenConfigError('Missing DSN');
+        }
+
+        var encode = encodeURIComponent;
+        var qs = '';
+        qs += '?eventId=' + encode(lastEventId);
+        qs += '&dsn=' + encode(dsn);
+
+        var user = options.user || this._globalContext.user;
+        if (user) {
+            if (user.name)  qs += '&name=' + encode(user.name);
+            if (user.email) qs += '&email=' + encode(user.email);
+        }
+
+        var globalServer = this._getGlobalServer(this._parseDSN(dsn));
+
+        var script = document.createElement('script');
+        script.async = true;
+        script.src = globalServer + '/api/embed/error-page/' + qs;
+        (document.head || document.body).appendChild(script);
+    },
+
+    /**** Private functions ****/
+    _ignoreNextOnError: function () {
+        var self = this;
+        this._ignoreOnError += 1;
+        setTimeout(function () {
+            // onerror should trigger before setTimeout
+            self._ignoreOnError -= 1;
+        });
+    },
+
+    _triggerEvent: function(eventType, options) {
+        // NOTE: `event` is a native browser thing, so let's avoid conflicting wiht it
+        var evt, key;
+
+        if (!this._hasDocument)
+            return;
+
+        options = options || {};
+
+        eventType = 'raven' + eventType.substr(0,1).toUpperCase() + eventType.substr(1);
+
+        if (document.createEvent) {
+            evt = document.createEvent('HTMLEvents');
+            evt.initEvent(eventType, true, true);
+        } else {
+            evt = document.createEventObject();
+            evt.eventType = eventType;
+        }
+
+        for (key in options) if (hasKey(options, key)) {
+            evt[key] = options[key];
+        }
+
+        if (document.createEvent) {
+            // IE9 if standards
+            document.dispatchEvent(evt);
+        } else {
+            // IE8 regardless of Quirks or Standards
+            // IE9 if quirks
+            try {
+                document.fireEvent('on' + evt.eventType.toLowerCase(), evt);
+            } catch(e) {
+                // Do nothing
+            }
+        }
+    },
+
+    /**
+     * Wraps addEventListener to capture UI breadcrumbs
+     * @param evtName the event name (e.g. "click")
+     * @returns {Function}
+     * @private
+     */
+    _breadcrumbEventHandler: function(evtName) {
+        var self = this;
+        return function (evt) {
+            // reset keypress timeout; e.g. triggering a 'click' after
+            // a 'keypress' will reset the keypress debounce so that a new
+            // set of keypresses can be recorded
+            self._keypressTimeout = null;
+
+            // It's possible this handler might trigger multiple times for the same
+            // event (e.g. event propagation through node ancestors). Ignore if we've
+            // already captured the event.
+            if (self._lastCapturedEvent === evt)
+                return;
+
+            self._lastCapturedEvent = evt;
+            var elem = evt.target;
+
+            var target;
+
+            // try/catch htmlTreeAsString because it's particularly complicated, and
+            // just accessing the DOM incorrectly can throw an exception in some circumstances.
+            try {
+                target = htmlTreeAsString(elem);
+            } catch (e) {
+                target = '<unknown>';
+            }
+
+            self.captureBreadcrumb({
+                category: 'ui.' + evtName, // e.g. ui.click, ui.input
+                message: target
+            });
+        };
+    },
+
+    /**
+     * Wraps addEventListener to capture keypress UI events
+     * @returns {Function}
+     * @private
+     */
+    _keypressEventHandler: function() {
+        var self = this,
+            debounceDuration = 1000; // milliseconds
+
+        // TODO: if somehow user switches keypress target before
+        //       debounce timeout is triggered, we will only capture
+        //       a single breadcrumb from the FIRST target (acceptable?)
+
+        return function (evt) {
+            var target = evt.target,
+                tagName = target && target.tagName;
+
+            // only consider keypress events on actual input elements
+            // this will disregard keypresses targeting body (e.g. tabbing
+            // through elements, hotkeys, etc)
+            if (!tagName || tagName !== 'INPUT' && tagName !== 'TEXTAREA')
+                return;
+
+            // record first keypress in a series, but ignore subsequent
+            // keypresses until debounce clears
+            var timeout = self._keypressTimeout;
+            if (!timeout) {
+                self._breadcrumbEventHandler('input')(evt);
+            }
+            clearTimeout(timeout);
+            self._keypressTimeout = setTimeout(function () {
+               self._keypressTimeout = null;
+            }, debounceDuration);
+        };
+    },
+
+    /**
+     * Captures a breadcrumb of type "navigation", normalizing input URLs
+     * @param to the originating URL
+     * @param from the target URL
+     * @private
+     */
+    _captureUrlChange: function(from, to) {
+        var parsedLoc = parseUrl(this._location.href);
+        var parsedTo = parseUrl(to);
+        var parsedFrom = parseUrl(from);
+
+        // because onpopstate only tells you the "new" (to) value of location.href, and
+        // not the previous (from) value, we need to track the value of the current URL
+        // state ourselves
+        this._lastHref = to;
+
+        // Use only the path component of the URL if the URL matches the current
+        // document (almost all the time when using pushState)
+        if (parsedLoc.protocol === parsedTo.protocol && parsedLoc.host === parsedTo.host)
+            to = parsedTo.relative;
+        if (parsedLoc.protocol === parsedFrom.protocol && parsedLoc.host === parsedFrom.host)
+            from = parsedFrom.relative;
+
+        this.captureBreadcrumb({
+            category: 'navigation',
+            data: {
+                to: to,
+                from: from
+            }
+        });
+    },
+
+    /**
+     * Install any queued plugins
+     */
+    _wrapBuiltIns: function() {
+        var self = this;
+
+        function fill(obj, name, replacement, noUndo) {
+            var orig = obj[name];
+            obj[name] = replacement(orig);
+            if (!noUndo) {
+                self._wrappedBuiltIns.push([obj, name, orig]);
+            }
+        }
+
+        function wrapTimeFn(orig) {
+            return function (fn, t) { // preserve arity
+                // Make a copy of the arguments to prevent deoptimization
+                // https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments
+                var args = new Array(arguments.length);
+                for(var i = 0; i < args.length; ++i) {
+                    args[i] = arguments[i];
+                }
+                var originalCallback = args[0];
+                if (isFunction(originalCallback)) {
+                    args[0] = self.wrap(originalCallback);
+                }
+
+                // IE < 9 doesn't support .call/.apply on setInterval/setTimeout, but it
+                // also supports only two arguments and doesn't care what this is, so we
+                // can just call the original function directly.
+                if (orig.apply) {
+                    return orig.apply(this, args);
+                } else {
+                    return orig(args[0], args[1]);
+                }
+            };
+        }
+
+        function wrapEventTarget(global) {
+            var proto = window[global] && window[global].prototype;
+            if (proto && proto.hasOwnProperty && proto.hasOwnProperty('addEventListener')) {
+                fill(proto, 'addEventListener', function(orig) {
+                    return function (evtName, fn, capture, secure) { // preserve arity
+                        try {
+                            if (fn && fn.handleEvent) {
+                                fn.handleEvent = self.wrap(fn.handleEvent);
+                            }
+                        } catch (err) {
+                            // can sometimes get 'Permission denied to access property "handle Event'
+                        }
+
+
+                        // TODO: more than just click
+                        var before;
+                        if (global === 'EventTarget' || global === 'Node') {
+                            if (evtName === 'click'){
+                                before = self._breadcrumbEventHandler(evtName);
+                            } else if (evtName === 'keypress') {
+                                before = self._keypressEventHandler();
+                            }
+                        }
+                        return orig.call(this, evtName, self.wrap(fn, undefined, before), capture, secure);
+                    };
+                });
+                fill(proto, 'removeEventListener', function (orig) {
+                    return function (evt, fn, capture, secure) {
+                        fn = fn && (fn.__raven_wrapper__ ? fn.__raven_wrapper__  : fn);
+                        return orig.call(this, evt, fn, capture, secure);
+                    };
+                });
+            }
+        }
+
+        function wrapProp(prop, xhr) {
+            if (prop in xhr && isFunction(xhr[prop])) {
+                fill(xhr, prop, function (orig) {
+                    return self.wrap(orig);
+                }, true /* noUndo */); // don't track filled methods on XHR instances
+            }
+        }
+
+        fill(window, 'setTimeout', wrapTimeFn);
+        fill(window, 'setInterval', wrapTimeFn);
+        if (window.requestAnimationFrame) {
+            fill(window, 'requestAnimationFrame', function (orig) {
+                return function (cb) {
+                    return orig(self.wrap(cb));
+                };
+            });
+        }
+
+        // Capture breadcrubms from any click that is unhandled / bubbled up all the way
+        // to the document. Do this before we instrument addEventListener.
+        if (this._hasDocument) {
+            if (document.addEventListener) {
+                document.addEventListener('click', self._breadcrumbEventHandler('click'), false);
+                document.addEventListener('keypress', self._keypressEventHandler(), false);
+            }
+            else {
+                // IE8 Compatibility
+                document.attachEvent('onclick', self._breadcrumbEventHandler('click'));
+                document.attachEvent('onkeypress', self._keypressEventHandler());
+            }
+        }
+
+        // event targets borrowed from bugsnag-js:
+        // https://github.com/bugsnag/bugsnag-js/blob/master/src/bugsnag.js#L666
+        var eventTargets = ['EventTarget', 'Window', 'Node', 'ApplicationCache', 'AudioTrackList', 'ChannelMergerNode', 'CryptoOperation', 'EventSource', 'FileReader', 'HTMLUnknownElement', 'IDBDatabase', 'IDBRequest', 'IDBTransaction', 'KeyOperation', 'MediaController', 'MessagePort', 'ModalWindow', 'Notification', 'SVGElementInstance', 'Screen', 'TextTrack', 'TextTrackCue', 'TextTrackList', 'WebSocket', 'WebSocketWorker', 'Worker', 'XMLHttpRequest', 'XMLHttpRequestEventTarget', 'XMLHtt [...]
+        for (var i = 0; i < eventTargets.length; i++) {
+            wrapEventTarget(eventTargets[i]);
+        }
+
+        if ('XMLHttpRequest' in window) {
+            var xhrproto = XMLHttpRequest.prototype;
+            fill(xhrproto, 'open', function(origOpen) {
+                return function (method, url) { // preserve arity
+
+                    // if Sentry key appears in URL, don't capture
+                    if (isString(url) && url.indexOf(self._globalKey) === -1) {
+                        this.__raven_xhr = {
+                            method: method,
+                            url: url,
+                            status_code: null
+                        };
+                    }
+
+                    return origOpen.apply(this, arguments);
+                };
+            });
+
+            fill(xhrproto, 'send', function(origSend) {
+                return function (data) { // preserve arity
+                    var xhr = this;
+
+                    function onreadystatechangeHandler() {
+                        if (xhr.__raven_xhr && (xhr.readyState === 1 || xhr.readyState === 4)) {
+                            try {
+                                // touching statusCode in some platforms throws
+                                // an exception
+                                xhr.__raven_xhr.status_code = xhr.status;
+                            } catch (e) { /* do nothing */ }
+                            self.captureBreadcrumb({
+                                type: 'http',
+                                category: 'xhr',
+                                data: xhr.__raven_xhr
+                            });
+                        }
+                    }
+
+                    var props = ['onload', 'onerror', 'onprogress'];
+                    for (var j = 0; j < props.length; j++) {
+                        wrapProp(props[j], xhr);
+                    }
+
+                    if ('onreadystatechange' in xhr && isFunction(xhr.onreadystatechange)) {
+                        fill(xhr, 'onreadystatechange', function (orig) {
+                            return self.wrap(orig, undefined, onreadystatechangeHandler);
+                        }, true /* noUndo */);
+                    } else {
+                        // if onreadystatechange wasn't actually set by the page on this xhr, we
+                        // are free to set our own and capture the breadcrumb
+                        xhr.onreadystatechange = onreadystatechangeHandler;
+                    }
+
+                    return origSend.apply(this, arguments);
+                };
+            });
+        }
+
+        // record navigation (URL) changes
+        // NOTE: in Chrome App environment, touching history.pushState, *even inside
+        //       a try/catch block*, will cause Chrome to output an error to console.error
+        // borrowed from: https://github.com/angular/angular.js/pull/13945/files
+        var chrome = window.chrome;
+        var isChromePackagedApp = chrome && chrome.app && chrome.app.runtime;
+        var hasPushState = !isChromePackagedApp && window.history && history.pushState;
+        if (hasPushState) {
+            // TODO: remove onpopstate handler on uninstall()
+            var oldOnPopState = window.onpopstate;
+            window.onpopstate = function () {
+                var currentHref = self._location.href;
+                self._captureUrlChange(self._lastHref, currentHref);
+
+                if (oldOnPopState) {
+                    return oldOnPopState.apply(this, arguments);
+                }
+            };
+
+            fill(history, 'pushState', function (origPushState) {
+                // note history.pushState.length is 0; intentionally not declaring
+                // params to preserve 0 arity
+                return function(/* state, title, url */) {
+                    var url = arguments.length > 2 ? arguments[2] : undefined;
+
+                    // url argument is optional
+                    if (url) {
+                        // coerce to string (this is what pushState does)
+                        self._captureUrlChange(self._lastHref, url + '');
+                    }
+
+                    return origPushState.apply(this, arguments);
+                };
+            });
+        }
+
+        // console
+        var consoleMethodCallback = function (msg, data) {
+            self.captureBreadcrumb({
+                message: msg,
+                level: data.level,
+                category: 'console'
+            });
+        };
+
+        if ('console' in window && console.log) {
+            each(['debug', 'info', 'warn', 'error', 'log'], function (_, level) {
+                wrapConsoleMethod(console, level, consoleMethodCallback);
+            });
+        }
+
+        var $ = window.jQuery || window.$;
+        if ($ && $.fn && $.fn.ready) {
+            fill($.fn, 'ready', function (orig) {
+                return function (fn) {
+                    return orig.call(this, self.wrap(fn));
+                };
+            });
+        }
+    },
+
+    _restoreBuiltIns: function () {
+        // restore any wrapped builtins
+        var builtin;
+        while (this._wrappedBuiltIns.length) {
+            builtin = this._wrappedBuiltIns.shift();
+
+            var obj = builtin[0],
+              name = builtin[1],
+              orig = builtin[2];
+
+            obj[name] = orig;
+        }
+    },
+
+    _drainPlugins: function() {
+        var self = this;
+
+        // FIX ME TODO
+        each(this._plugins, function(_, plugin) {
+            var installer = plugin[0];
+            var args = plugin[1];
+            installer.apply(self, [self].concat(args));
+        });
+    },
+
+    _parseDSN: function(str) {
+        var m = dsnPattern.exec(str),
+            dsn = {},
+            i = 7;
+
+        try {
+            while (i--) dsn[dsnKeys[i]] = m[i] || '';
+        } catch(e) {
+            throw new RavenConfigError('Invalid DSN: ' + str);
+        }
+
+        if (dsn.pass && !this._globalOptions.allowSecretKey) {
+            throw new RavenConfigError('Do not specify your secret key in the DSN. See: http://bit.ly/raven-secret-key');
+        }
+
+        return dsn;
+    },
+
+    _getGlobalServer: function(uri) {
+        // assemble the endpoint from the uri pieces
+        var globalServer = '//' + uri.host +
+            (uri.port ? ':' + uri.port : '');
+
+        if (uri.protocol) {
+            globalServer = uri.protocol + ':' + globalServer;
+        }
+        return globalServer;
+    },
+
+    _handleOnErrorStackInfo: function() {
+        // if we are intentionally ignoring errors via onerror, bail out
+        if (!this._ignoreOnError) {
+            this._handleStackInfo.apply(this, arguments);
+        }
+    },
+
+    _handleStackInfo: function(stackInfo, options) {
+        var self = this;
+        var frames = [];
+
+        if (stackInfo.stack && stackInfo.stack.length) {
+            each(stackInfo.stack, function(i, stack) {
+                var frame = self._normalizeFrame(stack);
+                if (frame) {
+                    frames.push(frame);
+                }
+            });
+        }
+
+        this._triggerEvent('handle', {
+            stackInfo: stackInfo,
+            options: options
+        });
+
+        this._processException(
+            stackInfo.name,
+            stackInfo.message,
+            stackInfo.url,
+            stackInfo.lineno,
+            frames.slice(0, this._globalOptions.stackTraceLimit),
+            options
+        );
+    },
+
+    _normalizeFrame: function(frame) {
+        if (!frame.url) return;
+
+        // normalize the frames data
+        var normalized = {
+            filename:   frame.url,
+            lineno:     frame.line,
+            colno:      frame.column,
+            'function': frame.func || '?'
+        };
+
+        normalized.in_app = !( // determine if an exception came from outside of our app
+            // first we check the global includePaths list.
+            !!this._globalOptions.includePaths.test && !this._globalOptions.includePaths.test(normalized.filename) ||
+            // Now we check for fun, if the function name is Raven or TraceKit
+            /(Raven|TraceKit)\./.test(normalized['function']) ||
+            // finally, we do a last ditch effort and check for raven.min.js
+            /raven\.(min\.)?js$/.test(normalized.filename)
+        );
+
+        return normalized;
+    },
+
+    _processException: function(type, message, fileurl, lineno, frames, options) {
+        var stacktrace;
+
+        if (!!this._globalOptions.ignoreErrors.test && this._globalOptions.ignoreErrors.test(message)) return;
+
+        message += '';
+
+        if (frames && frames.length) {
+            fileurl = frames[0].filename || fileurl;
+            // Sentry expects frames oldest to newest
+            // and JS sends them as newest to oldest
+            frames.reverse();
+            stacktrace = {frames: frames};
+        } else if (fileurl) {
+            stacktrace = {
+                frames: [{
+                    filename: fileurl,
+                    lineno: lineno,
+                    in_app: true
+                }]
+            };
+        }
+
+        if (!!this._globalOptions.ignoreUrls.test && this._globalOptions.ignoreUrls.test(fileurl)) return;
+        if (!!this._globalOptions.whitelistUrls.test && !this._globalOptions.whitelistUrls.test(fileurl)) return;
+
+        var data = objectMerge({
+            // sentry.interfaces.Exception
+            exception: {
+                values: [{
+                    type: type,
+                    value: message,
+                    stacktrace: stacktrace
+                }]
+            },
+            culprit: fileurl
+        }, options);
+
+        // Fire away!
+        this._send(data);
+    },
+
+    _trimPacket: function(data) {
+        // For now, we only want to truncate the two different messages
+        // but this could/should be expanded to just trim everything
+        var max = this._globalOptions.maxMessageLength;
+        if (data.message) {
+            data.message = truncate(data.message, max);
+        }
+        if (data.exception) {
+            var exception = data.exception.values[0];
+            exception.value = truncate(exception.value, max);
+        }
+
+        return data;
+    },
+
+    _getHttpData: function() {
+        if (!this._hasDocument || !document.location || !document.location.href) {
+            return;
+        }
+
+        var httpData = {
+            headers: {
+                'User-Agent': navigator.userAgent
+            }
+        };
+
+        httpData.url = document.location.href;
+
+        if (document.referrer) {
+            httpData.headers.Referer = document.referrer;
+        }
+
+        return httpData;
+    },
+
+
+    _send: function(data) {
+        var self = this;
+
+        var globalOptions = this._globalOptions;
+
+        var baseData = {
+            project: this._globalProject,
+            logger: globalOptions.logger,
+            platform: 'javascript'
+        }, httpData = this._getHttpData();
+
+        if (httpData) {
+            baseData.request = httpData;
+        }
+
+        data = objectMerge(baseData, data);
+
+        // Merge in the tags and extra separately since objectMerge doesn't handle a deep merge
+        data.tags = objectMerge(objectMerge({}, this._globalContext.tags), data.tags);
+        data.extra = objectMerge(objectMerge({}, this._globalContext.extra), data.extra);
+
+        // Send along our own collected metadata with extra
+        data.extra['session:duration'] = now() - this._startTime;
+
+        if (this._breadcrumbs && this._breadcrumbs.length > 0) {
+            // intentionally make shallow copy so that additions
+            // to breadcrumbs aren't accidentally sent in this request
+            data.breadcrumbs = {
+                values: [].slice.call(this._breadcrumbs, 0)
+            };
+        }
+
+        // If there are no tags/extra, strip the key from the payload alltogther.
+        if (isEmptyObject(data.tags)) delete data.tags;
+
+        if (this._globalContext.user) {
+            // sentry.interfaces.User
+            data.user = this._globalContext.user;
+        }
+
+        // Include the environment if it's defined in globalOptions
+        if (globalOptions.environment) data.environment = globalOptions.environment;
+
+        // Include the release if it's defined in globalOptions
+        if (globalOptions.release) data.release = globalOptions.release;
+
+        // Include server_name if it's defined in globalOptions
+        if (globalOptions.serverName) data.server_name = globalOptions.serverName;
+
+        if (isFunction(globalOptions.dataCallback)) {
+            data = globalOptions.dataCallback(data) || data;
+        }
+
+        // Why??????????
+        if (!data || isEmptyObject(data)) {
+            return;
+        }
+
+        // Check if the request should be filtered or not
+        if (isFunction(globalOptions.shouldSendCallback) && !globalOptions.shouldSendCallback(data)) {
+            return;
+        }
+
+        // Send along an event_id if not explicitly passed.
+        // This event_id can be used to reference the error within Sentry itself.
+        // Set lastEventId after we know the error should actually be sent
+        this._lastEventId = data.event_id || (data.event_id = uuid4());
+
+        // Try and clean up the packet before sending by truncating long values
+        data = this._trimPacket(data);
+
+        this._logDebug('debug', 'Raven about to send:', data);
+
+        if (!this.isSetup()) return;
+
+        var auth = {
+            sentry_version: '7',
+            sentry_client: 'raven-js/' + this.VERSION,
+            sentry_key: this._globalKey
+        };
+        if (this._globalSecret) {
+            auth.sentry_secret = this._globalSecret;
+        }
+
+        var exception = data.exception && data.exception.values[0];
+        this.captureBreadcrumb({
+            category: 'sentry',
+            message: exception
+                ? (exception.type ? exception.type + ': ' : '') + exception.message
+                : data.message,
+            event_id: data.event_id,
+            level: data.level || 'error' // presume error unless specified
+        });
+
+        var url = this._globalEndpoint;
+        (globalOptions.transport || this._makeRequest).call(this, {
+            url: url,
+            auth: auth,
+            data: data,
+            options: globalOptions,
+            onSuccess: function success() {
+                self._triggerEvent('success', {
+                    data: data,
+                    src: url
+                });
+            },
+            onError: function failure() {
+                self._triggerEvent('failure', {
+                    data: data,
+                    src: url
+                });
+            }
+        });
+    },
+
+    _makeRequest: function(opts) {
+        var request = new XMLHttpRequest();
+
+        // if browser doesn't support CORS (e.g. IE7), we are out of luck
+        var hasCORS =
+            'withCredentials' in request ||
+            typeof XDomainRequest !== 'undefined';
+
+        if (!hasCORS) return;
+
+        var url = opts.url;
+        function handler() {
+            if (request.status === 200) {
+                if (opts.onSuccess) {
+                    opts.onSuccess();
+                }
+            } else if (opts.onError) {
+                opts.onError();
+            }
+        }
+
+        if ('withCredentials' in request) {
+            request.onreadystatechange = function () {
+                if (request.readyState !== 4) {
+                    return;
+                }
+                handler();
+            };
+        } else {
+            request = new XDomainRequest();
+            // xdomainrequest cannot go http -> https (or vice versa),
+            // so always use protocol relative
+            url = url.replace(/^https?:/, '');
+
+            // onreadystatechange not supported by XDomainRequest
+            request.onload = handler;
+        }
+
+        // NOTE: auth is intentionally sent as part of query string (NOT as custom
+        //       HTTP header) so as to avoid preflight CORS requests
+        request.open('POST', url + '?' + urlencode(opts.auth));
+        request.send(stringify(opts.data));
+    },
+
+    _logDebug: function(level) {
+        if (this._originalConsoleMethods[level] && this.debug) {
+            // In IE<10 console methods do not have their own 'apply' method
+            Function.prototype.apply.call(
+                this._originalConsoleMethods[level],
+                this._originalConsole,
+                [].slice.call(arguments, 1)
+            );
+        }
+    },
+
+    _mergeContext: function(key, context) {
+        if (isUndefined(context)) {
+            delete this._globalContext[key];
+        } else {
+            this._globalContext[key] = objectMerge(this._globalContext[key] || {}, context);
+        }
+    }
+};
+
+// Deprecations
+Raven.prototype.setUser = Raven.prototype.setUserContext;
+Raven.prototype.setReleaseContext = Raven.prototype.setRelease;
+
+module.exports = Raven;
+
+},{"1":1,"2":2,"3":3,"6":6,"7":7}],5:[function(_dereq_,module,exports){
+/**
+ * Enforces a single instance of the Raven client, and the
+ * main entry point for Raven. If you are a consumer of the
+ * Raven library, you SHOULD load this file (vs raven.js).
+ **/
+
+'use strict';
+
+var RavenConstructor = _dereq_(4);
+
+var _Raven = window.Raven;
+
+var Raven = new RavenConstructor();
+
+/*
+ * Allow multiple versions of Raven to be installed.
+ * Strip Raven from the global context and returns the instance.
+ *
+ * @return {Raven}
+ */
+Raven.noConflict = function () {
+	window.Raven = _Raven;
+	return Raven;
+};
+
+Raven.afterLoad();
+
+module.exports = Raven;
+
+},{"4":4}],6:[function(_dereq_,module,exports){
+/*eslint no-extra-parens:0*/
+'use strict';
+
+var objectPrototype = Object.prototype;
+
+function isUndefined(what) {
+    return what === void 0;
+}
+
+function isFunction(what) {
+    return typeof what === 'function';
+}
+
+function isString(what) {
+    return objectPrototype.toString.call(what) === '[object String]';
+}
+
+function isObject(what) {
+    return typeof what === 'object' && what !== null;
+}
+
+function isEmptyObject(what) {
+    for (var _ in what) return false;  // eslint-disable-line guard-for-in, no-unused-vars
+    return true;
+}
+
+// Sorta yanked from https://github.com/joyent/node/blob/aa3b4b4/lib/util.js#L560
+// with some tiny modifications
+function isError(what) {
+    var toString = objectPrototype.toString.call(what);
+    return isObject(what) &&
+        toString === '[object Error]' ||
+        toString === '[object Exception]' || // Firefox NS_ERROR_FAILURE Exceptions
+        what instanceof Error;
+}
+
+function each(obj, callback) {
+    var i, j;
+
+    if (isUndefined(obj.length)) {
+        for (i in obj) {
+            if (hasKey(obj, i)) {
+                callback.call(null, i, obj[i]);
+            }
+        }
+    } else {
+        j = obj.length;
+        if (j) {
+            for (i = 0; i < j; i++) {
+                callback.call(null, i, obj[i]);
+            }
+        }
+    }
+}
+
+function objectMerge(obj1, obj2) {
+    if (!obj2) {
+        return obj1;
+    }
+    each(obj2, function(key, value){
+        obj1[key] = value;
+    });
+    return obj1;
+}
+
+function truncate(str, max) {
+    return !max || str.length <= max ? str : str.substr(0, max) + '\u2026';
+}
+
+/**
+ * hasKey, a better form of hasOwnProperty
+ * Example: hasKey(MainHostObject, property) === true/false
+ *
+ * @param {Object} host object to check property
+ * @param {string} key to check
+ */
+function hasKey(object, key) {
+    return objectPrototype.hasOwnProperty.call(object, key);
+}
+
+function joinRegExp(patterns) {
+    // Combine an array of regular expressions and strings into one large regexp
+    // Be mad.
+    var sources = [],
+        i = 0, len = patterns.length,
+        pattern;
+
+    for (; i < len; i++) {
+        pattern = patterns[i];
+        if (isString(pattern)) {
+            // If it's a string, we need to escape it
+            // Taken from: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions
+            sources.push(pattern.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1'));
+        } else if (pattern && pattern.source) {
+            // If it's a regexp already, we want to extract the source
+            sources.push(pattern.source);
+        }
+        // Intentionally skip other cases
+    }
+    return new RegExp(sources.join('|'), 'i');
+}
+
+function urlencode(o) {
+    var pairs = [];
+    each(o, function(key, value) {
+        pairs.push(encodeURIComponent(key) + '=' + encodeURIComponent(value));
+    });
+    return pairs.join('&');
+}
+
+// borrowed from https://tools.ietf.org/html/rfc3986#appendix-B
+// intentionally using regex and not <a/> href parsing trick because React Native and other
+// environments where DOM might not be available
+function parseUrl(url) {
+    var match = url.match(/^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?$/);
+    if (!match) return {};
+
+    // coerce to undefined values to empty string so we don't get 'undefined'
+    var query = match[6] || '';
+    var fragment = match[8] || '';
+    return {
+        protocol: match[2],
+        host: match[4],
+        path: match[5],
+        relative: match[5] + query + fragment // everything minus origin
+    };
+}
+function uuid4() {
+    var crypto = window.crypto || window.msCrypto;
+
+    if (!isUndefined(crypto) && crypto.getRandomValues) {
+        // Use window.crypto API if available
+        var arr = new Uint16Array(8);
+        crypto.getRandomValues(arr);
+
+        // set 4 in byte 7
+        arr[3] = arr[3] & 0xFFF | 0x4000;
+        // set 2 most significant bits of byte 9 to '10'
+        arr[4] = arr[4] & 0x3FFF | 0x8000;
+
+        var pad = function(num) {
+            var v = num.toString(16);
+            while (v.length < 4) {
+                v = '0' + v;
+            }
+            return v;
+        };
+
+        return pad(arr[0]) + pad(arr[1]) + pad(arr[2]) + pad(arr[3]) + pad(arr[4]) +
+        pad(arr[5]) + pad(arr[6]) + pad(arr[7]);
+    } else {
+        // http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/2117523#2117523
+        return 'xxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
+            var r = Math.random()*16|0,
+                v = c === 'x' ? r : r&0x3|0x8;
+            return v.toString(16);
+        });
+    }
+}
+
+/**
+ * Given a child DOM element, returns a query-selector statement describing that
+ * and its ancestors
+ * e.g. [HTMLElement] => body > div > input#foo.btn[name=baz]
+ * @param elem
+ * @returns {string}
+ */
+function htmlTreeAsString(elem) {
+    var MAX_TRAVERSE_HEIGHT = 5,
+        MAX_OUTPUT_LEN = 80,
+        out = [],
+        height = 0,
+        len = 0,
+        separator = ' > ',
+        sepLength = separator.length,
+        nextStr;
+
+    while (elem && height++ < MAX_TRAVERSE_HEIGHT) {
+
+        nextStr = htmlElementAsString(elem);
+        // bail out if
+        // - nextStr is the 'html' element
+        // - the length of the string that would be created exceeds MAX_OUTPUT_LEN
+        //   (ignore this limit if we are on the first iteration)
+        if (nextStr === 'html' || height > 1 && len + (out.length * sepLength) + nextStr.length >= MAX_OUTPUT_LEN) {
+            break;
+        }
+
+        out.push(nextStr);
+
+        len += nextStr.length;
+        elem = elem.parentNode;
+    }
+
+    return out.reverse().join(separator);
+}
+
+/**
+ * Returns a simple, query-selector representation of a DOM element
+ * e.g. [HTMLElement] => input#foo.btn[name=baz]
+ * @param HTMLElement
+ * @returns {string}
+ */
+function htmlElementAsString(elem) {
+    var out = [],
+        className,
+        classes,
+        key,
+        attr,
+        i;
+
+    if (!elem || !elem.tagName) {
+        return '';
+    }
+
+    out.push(elem.tagName.toLowerCase());
+    if (elem.id) {
+        out.push('#' + elem.id);
+    }
+
+    className = elem.className;
+    if (className && isString(className)) {
+        classes = className.split(' ');
+        for (i = 0; i < classes.length; i++) {
+            out.push('.' + classes[i]);
+        }
+    }
+    var attrWhitelist = ['type', 'name', 'title', 'alt'];
+    for (i = 0; i < attrWhitelist.length; i++) {
+        key = attrWhitelist[i];
+        attr = elem.getAttribute(key);
+        if (attr) {
+            out.push('[' + key + '="' + attr + '"]');
+        }
+    }
+    return out.join('');
+}
+
+module.exports = {
+    isUndefined: isUndefined,
+    isFunction: isFunction,
+    isString: isString,
+    isObject: isObject,
+    isEmptyObject: isEmptyObject,
+    isError: isError,
+    each: each,
+    objectMerge: objectMerge,
+    truncate: truncate,
+    hasKey: hasKey,
+    joinRegExp: joinRegExp,
+    urlencode: urlencode,
+    uuid4: uuid4,
+    htmlTreeAsString: htmlTreeAsString,
+    htmlElementAsString: htmlElementAsString,
+    parseUrl: parseUrl
+};
+
+},{}],7:[function(_dereq_,module,exports){
+'use strict';
+
+var utils = _dereq_(6);
+
+var hasKey = utils.hasKey;
+var isString = utils.isString;
+var isUndefined = utils.isUndefined;
+
+/*
+ TraceKit - Cross brower stack traces - github.com/occ/TraceKit
+ MIT license
+*/
+
+var TraceKit = {
+    collectWindowErrors: true,
+    debug: false
+};
+
+// global reference to slice
+var _slice = [].slice;
+var UNKNOWN_FUNCTION = '?';
+
+// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error#Error_types
+var ERROR_TYPES_RE = /^(?:Uncaught (?:exception: )?)?((?:Eval|Internal|Range|Reference|Syntax|Type|URI)Error): ?(.*)$/;
+
+function getLocationHref() {
+    if (typeof document === 'undefined')
+        return '';
+
+    return document.location.href;
+}
+
+/**
+ * TraceKit.report: cross-browser processing of unhandled exceptions
+ *
+ * Syntax:
+ *   TraceKit.report.subscribe(function(stackInfo) { ... })
+ *   TraceKit.report.unsubscribe(function(stackInfo) { ... })
+ *   TraceKit.report(exception)
+ *   try { ...code... } catch(ex) { TraceKit.report(ex); }
+ *
+ * Supports:
+ *   - Firefox: full stack trace with line numbers, plus column number
+ *              on top frame; column number is not guaranteed
+ *   - Opera:   full stack trace with line and column numbers
+ *   - Chrome:  full stack trace with line and column numbers
+ *   - Safari:  line and column number for the top frame only; some frames
+ *              may be missing, and column number is not guaranteed
+ *   - IE:      line and column number for the top frame only; some frames
+ *              may be missing, and column number is not guaranteed
+ *
+ * In theory, TraceKit should work on all of the following versions:
+ *   - IE5.5+ (only 8.0 tested)
+ *   - Firefox 0.9+ (only 3.5+ tested)
+ *   - Opera 7+ (only 10.50 tested; versions 9 and earlier may require
+ *     Exceptions Have Stacktrace to be enabled in opera:config)
+ *   - Safari 3+ (only 4+ tested)
+ *   - Chrome 1+ (only 5+ tested)
+ *   - Konqueror 3.5+ (untested)
+ *
+ * Requires TraceKit.computeStackTrace.
+ *
+ * Tries to catch all unhandled exceptions and report them to the
+ * subscribed handlers. Please note that TraceKit.report will rethrow the
+ * exception. This is REQUIRED in order to get a useful stack trace in IE.
+ * If the exception does not reach the top of the browser, you will only
+ * get a stack trace from the point where TraceKit.report was called.
+ *
+ * Handlers receive a stackInfo object as described in the
+ * TraceKit.computeStackTrace docs.
+ */
+TraceKit.report = (function reportModuleWrapper() {
+    var handlers = [],
+        lastArgs = null,
+        lastException = null,
+        lastExceptionStack = null;
+
+    /**
+     * Add a crash handler.
+     * @param {Function} handler
+     */
+    function subscribe(handler) {
+        installGlobalHandler();
+        handlers.push(handler);
+    }
+
+    /**
+     * Remove a crash handler.
+     * @param {Function} handler
+     */
+    function unsubscribe(handler) {
+        for (var i = handlers.length - 1; i >= 0; --i) {
+            if (handlers[i] === handler) {
+                handlers.splice(i, 1);
+            }
+        }
+    }
+
+    /**
+     * Remove all crash handlers.
+     */
+    function unsubscribeAll() {
+        uninstallGlobalHandler();
+        handlers = [];
+    }
+
+    /**
+     * Dispatch stack information to all handlers.
+     * @param {Object.<string, *>} stack
+     */
+    function notifyHandlers(stack, isWindowError) {
+        var exception = null;
+        if (isWindowError && !TraceKit.collectWindowErrors) {
+          return;
+        }
+        for (var i in handlers) {
+            if (hasKey(handlers, i)) {
+                try {
+                    handlers[i].apply(null, [stack].concat(_slice.call(arguments, 2)));
+                } catch (inner) {
+                    exception = inner;
+                }
+            }
+        }
+
+        if (exception) {
+            throw exception;
+        }
+    }
+
+    var _oldOnerrorHandler, _onErrorHandlerInstalled;
+
+    /**
+     * Ensures all global unhandled exceptions are recorded.
+     * Supported by Gecko and IE.
+     * @param {string} message Error message.
+     * @param {string} url URL of script that generated the exception.
+     * @param {(number|string)} lineNo The line number at which the error
+     * occurred.
+     * @param {?(number|string)} colNo The column number at which the error
+     * occurred.
+     * @param {?Error} ex The actual Error object.
+     */
+    function traceKitWindowOnError(message, url, lineNo, colNo, ex) {
+        var stack = null;
+
+        if (lastExceptionStack) {
+            TraceKit.computeStackTrace.augmentStackTraceWithInitialElement(lastExceptionStack, url, lineNo, message);
+            processLastException();
+        } else if (ex) {
+            // New chrome and blink send along a real error object
+            // Let's just report that like a normal error.
+            // See: https://mikewest.org/2013/08/debugging-runtime-errors-with-window-onerror
+            stack = TraceKit.computeStackTrace(ex);
+            notifyHandlers(stack, true);
+        } else {
+            var location = {
+                'url': url,
+                'line': lineNo,
+                'column': colNo
+            };
+
+            var name = undefined;
+            var msg = message; // must be new var or will modify original `arguments`
+            var groups;
+            if (isString(message)) {
+                var groups = message.match(ERROR_TYPES_RE);
+                if (groups) {
+                    name = groups[1];
+                    msg = groups[2];
+                }
+            }
+
+            location.func = UNKNOWN_FUNCTION;
+
+            stack = {
+                'name': name,
+                'message': msg,
+                'url': getLocationHref(),
+                'stack': [location]
+            };
+            notifyHandlers(stack, true);
+        }
+
+        if (_oldOnerrorHandler) {
+            return _oldOnerrorHandler.apply(this, arguments);
+        }
+
+        return false;
+    }
+
+    function installGlobalHandler ()
+    {
+        if (_onErrorHandlerInstalled) {
+            return;
+        }
+        _oldOnerrorHandler = window.onerror;
+        window.onerror = traceKitWindowOnError;
+        _onErrorHandlerInstalled = true;
+    }
+
+    function uninstallGlobalHandler ()
+    {
+        if (!_onErrorHandlerInstalled) {
+            return;
+        }
+        window.onerror = _oldOnerrorHandler;
+        _onErrorHandlerInstalled = false;
+        _oldOnerrorHandler = undefined;
+    }
+
+    function processLastException() {
+        var _lastExceptionStack = lastExceptionStack,
+            _lastArgs = lastArgs;
+        lastArgs = null;
+        lastExceptionStack = null;
+        lastException = null;
+        notifyHandlers.apply(null, [_lastExceptionStack, false].concat(_lastArgs));
+    }
+
+    /**
+     * Reports an unhandled Error to TraceKit.
+     * @param {Error} ex
+     * @param {?boolean} rethrow If false, do not re-throw the exception.
+     * Only used for window.onerror to not cause an infinite loop of
+     * rethrowing.
+     */
+    function report(ex, rethrow) {
+        var args = _slice.call(arguments, 1);
+        if (lastExceptionStack) {
+            if (lastException === ex) {
+                return; // already caught by an inner catch block, ignore
+            } else {
+              processLastException();
+            }
+        }
+
+        var stack = TraceKit.computeStackTrace(ex);
+        lastExceptionStack = stack;
+        lastException = ex;
+        lastArgs = args;
+
+        // If the stack trace is incomplete, wait for 2 seconds for
+        // slow slow IE to see if onerror occurs or not before reporting
+        // this exception; otherwise, we will end up with an incomplete
+        // stack trace
+        window.setTimeout(function () {
+            if (lastException === ex) {
+                processLastException();
+            }
+        }, (stack.incomplete ? 2000 : 0));
+
+        if (rethrow !== false) {
+            throw ex; // re-throw to propagate to the top level (and cause window.onerror)
+        }
+    }
+
+    report.subscribe = subscribe;
+    report.unsubscribe = unsubscribe;
+    report.uninstall = unsubscribeAll;
+    return report;
+}());
+
+/**
+ * TraceKit.computeStackTrace: cross-browser stack traces in JavaScript
+ *
+ * Syntax:
+ *   s = TraceKit.computeStackTrace(exception) // consider using TraceKit.report instead (see below)
+ * Returns:
+ *   s.name              - exception name
+ *   s.message           - exception message
+ *   s.stack[i].url      - JavaScript or HTML file URL
+ *   s.stack[i].func     - function name, or empty for anonymous functions (if guessing did not work)
+ *   s.stack[i].args     - arguments passed to the function, if known
+ *   s.stack[i].line     - line number, if known
+ *   s.stack[i].column   - column number, if known
+ *
+ * Supports:
+ *   - Firefox:  full stack trace with line numbers and unreliable column
+ *               number on top frame
+ *   - Opera 10: full stack trace with line and column numbers
+ *   - Opera 9-: full stack trace with line numbers
+ *   - Chrome:   full stack trace with line and column numbers
+ *   - Safari:   line and column number for the topmost stacktrace element
+ *               only
+ *   - IE:       no line numbers whatsoever
+ *
+ * Tries to guess names of anonymous functions by looking for assignments
+ * in the source code. In IE and Safari, we have to guess source file names
+ * by searching for function bodies inside all page scripts. This will not
+ * work for scripts that are loaded cross-domain.
+ * Here be dragons: some function names may be guessed incorrectly, and
+ * duplicate functions may be mismatched.
+ *
+ * TraceKit.computeStackTrace should only be used for tracing purposes.
+ * Logging of unhandled exceptions should be done with TraceKit.report,
+ * which builds on top of TraceKit.computeStackTrace and provides better
+ * IE support by utilizing the window.onerror event to retrieve information
+ * about the top of the stack.
+ *
+ * Note: In IE and Safari, no stack trace is recorded on the Error object,
+ * so computeStackTrace instead walks its *own* chain of callers.
+ * This means that:
+ *  * in Safari, some methods may be missing from the stack trace;
+ *  * in IE, the topmost function in the stack trace will always be the
+ *    caller of computeStackTrace.
+ *
+ * This is okay for tracing (because you are likely to be calling
+ * computeStackTrace from the function you want to be the topmost element
+ * of the stack trace anyway), but not okay for logging unhandled
+ * exceptions (because your catch block will likely be far away from the
+ * inner function that actually caused the exception).
+ *
+ */
+TraceKit.computeStackTrace = (function computeStackTraceWrapper() {
+    /**
+     * Escapes special characters, except for whitespace, in a string to be
+     * used inside a regular expression as a string literal.
+     * @param {string} text The string.
+     * @return {string} The escaped string literal.
+     */
+    function escapeRegExp(text) {
+        return text.replace(/[\-\[\]{}()*+?.,\\\^$|#]/g, '\\$&');
+    }
+
+    /**
+     * Escapes special characters in a string to be used inside a regular
+     * expression as a string literal. Also ensures that HTML entities will
+     * be matched the same as their literal friends.
+     * @param {string} body The string.
+     * @return {string} The escaped string.
+     */
+    function escapeCodeAsRegExpForMatchingInsideHTML(body) {
+        return escapeRegExp(body).replace('<', '(?:<|<)').replace('>', '(?:>|>)').replace('&', '(?:&|&)').replace('"', '(?:"|")').replace(/\s+/g, '\\s+');
+    }
+
+    // Contents of Exception in various browsers.
+    //
+    // SAFARI:
+    // ex.message = Can't find variable: qq
+    // ex.line = 59
+    // ex.sourceId = 580238192
+    // ex.sourceURL = http://...
+    // ex.expressionBeginOffset = 96
+    // ex.expressionCaretOffset = 98
+    // ex.expressionEndOffset = 98
+    // ex.name = ReferenceError
+    //
+    // FIREFOX:
+    // ex.message = qq is not defined
+    // ex.fileName = http://...
+    // ex.lineNumber = 59
+    // ex.columnNumber = 69
+    // ex.stack = ...stack trace... (see the example below)
+    // ex.name = ReferenceError
+    //
+    // CHROME:
+    // ex.message = qq is not defined
+    // ex.name = ReferenceError
+    // ex.type = not_defined
+    // ex.arguments = ['aa']
+    // ex.stack = ...stack trace...
+    //
+    // INTERNET EXPLORER:
+    // ex.message = ...
+    // ex.name = ReferenceError
+    //
+    // OPERA:
+    // ex.message = ...message... (see the example below)
+    // ex.name = ReferenceError
+    // ex.opera#sourceloc = 11  (pretty much useless, duplicates the info in ex.message)
+    // ex.stacktrace = n/a; see 'opera:config#UserPrefs|Exceptions Have Stacktrace'
+
+    /**
+     * Computes stack trace information from the stack property.
+     * Chrome and Gecko use this property.
+     * @param {Error} ex
+     * @return {?Object.<string, *>} Stack trace information.
+     */
+    function computeStackTraceFromStackProp(ex) {
+        if (isUndefined(ex.stack) || !ex.stack) return;
+
+        var chrome = /^\s*at (.*?) ?\(((?:file|https?|blob|chrome-extension|native|eval|<anonymous>).*?)(?::(\d+))?(?::(\d+))?\)?\s*$/i,
+            gecko = /^\s*(.*?)(?:\((.*?)\))?(?:^|@)((?:file|https?|blob|chrome|\[native).*?)(?::(\d+))?(?::(\d+))?\s*$/i,
+            winjs = /^\s*at (?:((?:\[object object\])?.+) )?\(?((?:file|ms-appx|https?|blob):.*?):(\d+)(?::(\d+))?\)?\s*$/i,
+            lines = ex.stack.split('\n'),
+            stack = [],
+            parts,
+            element,
+            reference = /^(.*) is undefined$/.exec(ex.message);
+
+        for (var i = 0, j = lines.length; i < j; ++i) {
+            if ((parts = chrome.exec(lines[i]))) {
+                var isNative = parts[2] && parts[2].indexOf('native') !== -1;
+                element = {
+                    'url': !isNative ? parts[2] : null,
+                    'func': parts[1] || UNKNOWN_FUNCTION,
+                    'args': isNative ? [parts[2]] : [],
+                    'line': parts[3] ? +parts[3] : null,
+                    'column': parts[4] ? +parts[4] : null
+                };
+            } else if ( parts = winjs.exec(lines[i]) ) {
+                element = {
+                    'url': parts[2],
+                    'func': parts[1] || UNKNOWN_FUNCTION,
+                    'args': [],
+                    'line': +parts[3],
+                    'column': parts[4] ? +parts[4] : null
+                };
+            } else if ((parts = gecko.exec(lines[i]))) {
+                element = {
+                    'url': parts[3],
+                    'func': parts[1] || UNKNOWN_FUNCTION,
+                    'args': parts[2] ? parts[2].split(',') : [],
+                    'line': parts[4] ? +parts[4] : null,
+                    'column': parts[5] ? +parts[5] : null
+                };
+            } else {
+                continue;
+            }
+
+            if (!element.func && element.line) {
+                element.func = UNKNOWN_FUNCTION;
+            }
+
+            stack.push(element);
+        }
+
+        if (!stack.length) {
+            return null;
+        }
+
+        if (!stack[0].column && !isUndefined(ex.columnNumber)) {
+            // FireFox uses this awesome columnNumber property for its top frame
+            // Also note, Firefox's column number is 0-based and everything else expects 1-based,
+            // so adding 1
+            stack[0].column = ex.columnNumber + 1;
+        }
+
+        return {
+            'name': ex.name,
+            'message': ex.message,
+            'url': getLocationHref(),
+            'stack': stack
+        };
+    }
+
+    /**
+     * Computes stack trace information from the stacktrace property.
+     * Opera 10 uses this property.
+     * @param {Error} ex
+     * @return {?Object.<string, *>} Stack trace information.
+     */
+    function computeStackTraceFromStacktraceProp(ex) {
+        // Access and store the stacktrace property before doing ANYTHING
+        // else to it because Opera is not very good at providing it
+        // reliably in other circumstances.
+        var stacktrace = ex.stacktrace;
+        if (isUndefined(ex.stacktrace) || !ex.stacktrace) return;
+
+        var opera10Regex = / line (\d+).*script (?:in )?(\S+)(?:: in function (\S+))?$/i,
+          opera11Regex = / line (\d+), column (\d+)\s*(?:in (?:<anonymous function: ([^>]+)>|([^\)]+))\((.*)\))? in (.*):\s*$/i,
+          lines = stacktrace.split('\n'),
+          stack = [],
+          parts;
+
+        for (var line = 0; line < lines.length; line += 2) {
+            var element = null;
+            if ((parts = opera10Regex.exec(lines[line]))) {
+                element = {
+                    'url': parts[2],
+                    'line': +parts[1],
+                    'column': null,
+                    'func': parts[3],
+                    'args':[]
+                };
+            } else if ((parts = opera11Regex.exec(lines[line]))) {
+                element = {
+                    'url': parts[6],
+                    'line': +parts[1],
+                    'column': +parts[2],
+                    'func': parts[3] || parts[4],
+                    'args': parts[5] ? parts[5].split(',') : []
+                };
+            }
+
+            if (element) {
+                if (!element.func && element.line) {
+                    element.func = UNKNOWN_FUNCTION;
+                }
+
+                stack.push(element);
+            }
+        }
+
+        if (!stack.length) {
+            return null;
+        }
+
+        return {
+            'name': ex.name,
+            'message': ex.message,
+            'url': getLocationHref(),
+            'stack': stack
+        };
+    }
+
+    /**
+     * NOT TESTED.
+     * Computes stack trace information from an error message that includes
+     * the stack trace.
+     * Opera 9 and earlier use this method if the option to show stack
+     * traces is turned on in opera:config.
+     * @param {Error} ex
+     * @return {?Object.<string, *>} Stack information.
+     */
+    function computeStackTraceFromOperaMultiLineMessage(ex) {
+        // Opera includes a stack trace into the exception message. An example is:
+        //
+        // Statement on line 3: Undefined variable: undefinedFunc
+        // Backtrace:
+        //   Line 3 of linked script file://localhost/Users/andreyvit/Projects/TraceKit/javascript-client/sample.js: In function zzz
+        //         undefinedFunc(a);
+        //   Line 7 of inline#1 script in file://localhost/Users/andreyvit/Projects/TraceKit/javascript-client/sample.html: In function yyy
+        //           zzz(x, y, z);
+        //   Line 3 of inline#1 script in file://localhost/Users/andreyvit/Projects/TraceKit/javascript-client/sample.html: In function xxx
+        //           yyy(a, a, a);
+        //   Line 1 of function script
+        //     try { xxx('hi'); return false; } catch(ex) { TraceKit.report(ex); }
+        //   ...
+
+        var lines = ex.message.split('\n');
+        if (lines.length < 4) {
+            return null;
+        }
+
+        var lineRE1 = /^\s*Line (\d+) of linked script ((?:file|https?|blob)\S+)(?:: in function (\S+))?\s*$/i,
+            lineRE2 = /^\s*Line (\d+) of inline#(\d+) script in ((?:file|https?|blob)\S+)(?:: in function (\S+))?\s*$/i,
+            lineRE3 = /^\s*Line (\d+) of function script\s*$/i,
+            stack = [],
+            scripts = document.getElementsByTagName('script'),
+            parts;
+
+        for (var line = 2; line < lines.length; line += 2) {
+            var item = null;
+            if ((parts = lineRE1.exec(lines[line]))) {
+                item = {
+                    'url': parts[2],
+                    'func': parts[3],
+                    'args': [],
+                    'line': +parts[1],
+                    'column': null
+                };
+            } else if ((parts = lineRE2.exec(lines[line]))) {
+                item = {
+                    'url': parts[3],
+                    'func': parts[4],
+                    'args': [],
+                    'line': +parts[1],
+                    'column': null // TODO: Check to see if inline#1 (+parts[2]) points to the script number or column number.
+                };
+                var relativeLine = (+parts[1]); // relative to the start of the <SCRIPT> block
+            } else if ((parts = lineRE3.exec(lines[line]))) {
+                var url = window.location.href.replace(/#.*$/, '');
+                item = {
+                    'url': url,
+                    'func': '',
+                    'args': [],
+                    'line': parts[1],
+                    'column': null
+                };
+            }
+
+            if (item) {
+                if (!item.func) {
+                    item.func = UNKNOWN_FUNCTION;
+                }
+
+                stack.push(item);
+            }
+        }
+
+        if (!stack.length) {
+            return null; // could not parse multiline exception message as Opera stack trace
+        }
+
+        return {
+            'name': ex.name,
+            'message': lines[0],
+            'url': getLocationHref(),
+            'stack': stack
+        };
+    }
+
+    /**
+     * Adds information about the first frame to incomplete stack traces.
+     * Safari and IE require this to get complete data on the first frame.
+     * @param {Object.<string, *>} stackInfo Stack trace information from
+     * one of the compute* methods.
+     * @param {string} url The URL of the script that caused an error.
+     * @param {(number|string)} lineNo The line number of the script that
+     * caused an error.
+     * @param {string=} message The error generated by the browser, which
+     * hopefully contains the name of the object that caused the error.
+     * @return {boolean} Whether or not the stack information was
+     * augmented.
+     */
+    function augmentStackTraceWithInitialElement(stackInfo, url, lineNo, message) {
+        var initial = {
+            'url': url,
+            'line': lineNo
+        };
+
+        if (initial.url && initial.line) {
+            stackInfo.incomplete = false;
+
+            if (!initial.func) {
+                initial.func = UNKNOWN_FUNCTION;
+            }
+
+            if (stackInfo.stack.length > 0) {
+                if (stackInfo.stack[0].url === initial.url) {
+                    if (stackInfo.stack[0].line === initial.line) {
+                        return false; // already in stack trace
+                    } else if (!stackInfo.stack[0].line && stackInfo.stack[0].func === initial.func) {
+                        stackInfo.stack[0].line = initial.line;
+                        return false;
+                    }
+                }
+            }
+
+            stackInfo.stack.unshift(initial);
+            stackInfo.partial = true;
+            return true;
+        } else {
+            stackInfo.incomplete = true;
+        }
+
+        return false;
+    }
+
+    /**
+     * Computes stack trace information by walking the arguments.caller
+     * chain at the time the exception occurred. This will cause earlier
+     * frames to be missed but is the only way to get any stack trace in
+     * Safari and IE. The top frame is restored by
+     * {@link augmentStackTraceWithInitialElement}.
+     * @param {Error} ex
+     * @return {?Object.<string, *>} Stack trace information.
+     */
+    function computeStackTraceByWalkingCallerChain(ex, depth) {
+        var functionName = /function\s+([_$a-zA-Z\xA0-\uFFFF][_$a-zA-Z0-9\xA0-\uFFFF]*)?\s*\(/i,
+            stack = [],
+            funcs = {},
+            recursion = false,
+            parts,
+            item,
+            source;
+
+        for (var curr = computeStackTraceByWalkingCallerChain.caller; curr && !recursion; curr = curr.caller) {
+            if (curr === computeStackTrace || curr === TraceKit.report) {
+                // console.log('skipping internal function');
+                continue;
+            }
+
+            item = {
+                'url': null,
+                'func': UNKNOWN_FUNCTION,
+                'line': null,
+                'column': null
+            };
+
+            if (curr.name) {
+                item.func = curr.name;
+            } else if ((parts = functionName.exec(curr.toString()))) {
+                item.func = parts[1];
+            }
+
+            if (typeof item.func === 'undefined') {
+              try {
+                item.func = parts.input.substring(0, parts.input.indexOf('{'));
+              } catch (e) { }
+            }
+
+            if (funcs['' + curr]) {
+                recursion = true;
+            }else{
+                funcs['' + curr] = true;
+            }
+
+            stack.push(item);
+        }
+
+        if (depth) {
+            // console.log('depth is ' + depth);
+            // console.log('stack is ' + stack.length);
+            stack.splice(0, depth);
+        }
+
+        var result = {
+            'name': ex.name,
+            'message': ex.message,
+            'url': getLocationHref(),
+            'stack': stack
+        };
+        augmentStackTraceWithInitialElement(result, ex.sourceURL || ex.fileName, ex.line || ex.lineNumber, ex.message || ex.description);
+        return result;
+    }
+
+    /**
+     * Computes a stack trace for an exception.
+     * @param {Error} ex
+     * @param {(string|number)=} depth
+     */
+    function computeStackTrace(ex, depth) {
+        var stack = null;
+        depth = (depth == null ? 0 : +depth);
+
+        try {
+            // This must be tried first because Opera 10 *destroys*
+            // its stacktrace property if you try to access the stack
+            // property first!!
+            stack = computeStackTraceFromStacktraceProp(ex);
+            if (stack) {
+                return stack;
+            }
+        } catch (e) {
+            if (TraceKit.debug) {
+                throw e;
+            }
+        }
+
+        try {
+            stack = computeStackTraceFromStackProp(ex);
+            if (stack) {
+                return stack;
+            }
+        } catch (e) {
+            if (TraceKit.debug) {
+                throw e;
+            }
+        }
+
+        try {
+            stack = computeStackTraceFromOperaMultiLineMessage(ex);
+            if (stack) {
+                return stack;
+            }
+        } catch (e) {
+            if (TraceKit.debug) {
+                throw e;
+            }
+        }
+
+        try {
+            stack = computeStackTraceByWalkingCallerChain(ex, depth + 1);
+            if (stack) {
+                return stack;
+            }
+        } catch (e) {
+            if (TraceKit.debug) {
+                throw e;
+            }
+        }
+
+        return {
+            'name': ex.name,
+            'message': ex.message,
+            'url': getLocationHref()
+        };
+    }
+
+    computeStackTrace.augmentStackTraceWithInitialElement = augmentStackTraceWithInitialElement;
+    computeStackTrace.computeStackTraceFromStackProp = computeStackTraceFromStackProp;
+
+    return computeStackTrace;
+}());
+
+module.exports = TraceKit;
+
+},{"6":6}]},{},[5])(5)
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/libs/require.js b/client/galaxy/scripts/libs/require.js
new file mode 100644
index 0000000..23ddb4e
--- /dev/null
+++ b/client/galaxy/scripts/libs/require.js
@@ -0,0 +1,2142 @@
+/** vim: et:ts=4:sw=4:sts=4
+ * @license RequireJS 2.2.0 Copyright jQuery Foundation and other contributors.
+ * Released under MIT license, http://github.com/requirejs/requirejs/LICENSE
+ */
+//Not using strict: uneven strict support in browsers, #392, and causes
+//problems with requirejs.exec()/transpiler plugins that may not be strict.
+/*jslint regexp: true, nomen: true, sloppy: true */
+/*global window, navigator, document, importScripts, setTimeout, opera */
+
+var requirejs, require, define;
+(function (global) {
+    var req, s, head, baseElement, dataMain, src,
+        interactiveScript, currentlyAddingScript, mainScript, subPath,
+        version = '2.2.0',
+        commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg,
+        cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,
+        jsSuffixRegExp = /\.js$/,
+        currDirRegExp = /^\.\//,
+        op = Object.prototype,
+        ostring = op.toString,
+        hasOwn = op.hasOwnProperty,
+        isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document),
+        isWebWorker = !isBrowser && typeof importScripts !== 'undefined',
+        //PS3 indicates loaded and complete, but need to wait for complete
+        //specifically. Sequence is 'loading', 'loaded', execution,
+        // then 'complete'. The UA check is unfortunate, but not sure how
+        //to feature test w/o causing perf issues.
+        readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ?
+                      /^complete$/ : /^(complete|loaded)$/,
+        defContextName = '_',
+        //Oh the tragedy, detecting opera. See the usage of isOpera for reason.
+        isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]',
+        contexts = {},
+        cfg = {},
+        globalDefQueue = [],
+        useInteractive = false;
+
+    //Could match something like ')//comment', do not lose the prefix to comment.
+    function commentReplace(match, multi, multiText, singlePrefix) {
+        return singlePrefix || '';
+    }
+
+    function isFunction(it) {
+        return ostring.call(it) === '[object Function]';
+    }
+
+    function isArray(it) {
+        return ostring.call(it) === '[object Array]';
+    }
+
+    /**
+     * Helper function for iterating over an array. If the func returns
+     * a true value, it will break out of the loop.
+     */
+    function each(ary, func) {
+        if (ary) {
+            var i;
+            for (i = 0; i < ary.length; i += 1) {
+                if (ary[i] && func(ary[i], i, ary)) {
+                    break;
+                }
+            }
+        }
+    }
+
+    /**
+     * Helper function for iterating over an array backwards. If the func
+     * returns a true value, it will break out of the loop.
+     */
+    function eachReverse(ary, func) {
+        if (ary) {
+            var i;
+            for (i = ary.length - 1; i > -1; i -= 1) {
+                if (ary[i] && func(ary[i], i, ary)) {
+                    break;
+                }
+            }
+        }
+    }
+
+    function hasProp(obj, prop) {
+        return hasOwn.call(obj, prop);
+    }
+
+    function getOwn(obj, prop) {
+        return hasProp(obj, prop) && obj[prop];
+    }
+
+    /**
+     * Cycles over properties in an object and calls a function for each
+     * property value. If the function returns a truthy value, then the
+     * iteration is stopped.
+     */
+    function eachProp(obj, func) {
+        var prop;
+        for (prop in obj) {
+            if (hasProp(obj, prop)) {
+                if (func(obj[prop], prop)) {
+                    break;
+                }
+            }
+        }
+    }
+
+    /**
+     * Simple function to mix in properties from source into target,
+     * but only if target does not already have a property of the same name.
+     */
+    function mixin(target, source, force, deepStringMixin) {
+        if (source) {
+            eachProp(source, function (value, prop) {
+                if (force || !hasProp(target, prop)) {
+                    if (deepStringMixin && typeof value === 'object' && value &&
+                        !isArray(value) && !isFunction(value) &&
+                        !(value instanceof RegExp)) {
+
+                        if (!target[prop]) {
+                            target[prop] = {};
+                        }
+                        mixin(target[prop], value, force, deepStringMixin);
+                    } else {
+                        target[prop] = value;
+                    }
+                }
+            });
+        }
+        return target;
+    }
+
+    //Similar to Function.prototype.bind, but the 'this' object is specified
+    //first, since it is easier to read/figure out what 'this' will be.
+    function bind(obj, fn) {
+        return function () {
+            return fn.apply(obj, arguments);
+        };
+    }
+
+    function scripts() {
+        return document.getElementsByTagName('script');
+    }
+
+    function defaultOnError(err) {
+        throw err;
+    }
+
+    //Allow getting a global that is expressed in
+    //dot notation, like 'a.b.c'.
+    function getGlobal(value) {
+        if (!value) {
+            return value;
+        }
+        var g = global;
+        each(value.split('.'), function (part) {
+            g = g[part];
+        });
+        return g;
+    }
+
+    /**
+     * Constructs an error with a pointer to an URL with more information.
+     * @param {String} id the error ID that maps to an ID on a web page.
+     * @param {String} message human readable error.
+     * @param {Error} [err] the original error, if there is one.
+     *
+     * @returns {Error}
+     */
+    function makeError(id, msg, err, requireModules) {
+        var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id);
+        e.requireType = id;
+        e.requireModules = requireModules;
+        if (err) {
+            e.originalError = err;
+        }
+        return e;
+    }
+
+    if (typeof define !== 'undefined') {
+        //If a define is already in play via another AMD loader,
+        //do not overwrite.
+        return;
+    }
+
+    if (typeof requirejs !== 'undefined') {
+        if (isFunction(requirejs)) {
+            //Do not overwrite an existing requirejs instance.
+            return;
+        }
+        cfg = requirejs;
+        requirejs = undefined;
+    }
+
+    //Allow for a require config object
+    if (typeof require !== 'undefined' && !isFunction(require)) {
+        //assume it is a config object.
+        cfg = require;
+        require = undefined;
+    }
+
+    function newContext(contextName) {
+        var inCheckLoaded, Module, context, handlers,
+            checkLoadedTimeoutId,
+            config = {
+                //Defaults. Do not set a default for map
+                //config to speed up normalize(), which
+                //will run faster if there is no default.
+                waitSeconds: 7,
+                baseUrl: './',
+                paths: {},
+                bundles: {},
+                pkgs: {},
+                shim: {},
+                config: {}
+            },
+            registry = {},
+            //registry of just enabled modules, to speed
+            //cycle breaking code when lots of modules
+            //are registered, but not activated.
+            enabledRegistry = {},
+            undefEvents = {},
+            defQueue = [],
+            defined = {},
+            urlFetched = {},
+            bundlesMap = {},
+            requireCounter = 1,
+            unnormalizedCounter = 1;
+
+        /**
+         * Trims the . and .. from an array of path segments.
+         * It will keep a leading path segment if a .. will become
+         * the first path segment, to help with module name lookups,
+         * which act like paths, but can be remapped. But the end result,
+         * all paths that use this function should look normalized.
+         * NOTE: this method MODIFIES the input array.
+         * @param {Array} ary the array of path segments.
+         */
+        function trimDots(ary) {
+            var i, part;
+            for (i = 0; i < ary.length; i++) {
+                part = ary[i];
+                if (part === '.') {
+                    ary.splice(i, 1);
+                    i -= 1;
+                } else if (part === '..') {
+                    // If at the start, or previous value is still ..,
+                    // keep them so that when converted to a path it may
+                    // still work when converted to a path, even though
+                    // as an ID it is less than ideal. In larger point
+                    // releases, may be better to just kick out an error.
+                    if (i === 0 || (i === 1 && ary[2] === '..') || ary[i - 1] === '..') {
+                        continue;
+                    } else if (i > 0) {
+                        ary.splice(i - 1, 2);
+                        i -= 2;
+                    }
+                }
+            }
+        }
+
+        /**
+         * Given a relative module name, like ./something, normalize it to
+         * a real name that can be mapped to a path.
+         * @param {String} name the relative name
+         * @param {String} baseName a real name that the name arg is relative
+         * to.
+         * @param {Boolean} applyMap apply the map config to the value. Should
+         * only be done if this normalization is for a dependency ID.
+         * @returns {String} normalized name
+         */
+        function normalize(name, baseName, applyMap) {
+            var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex,
+                foundMap, foundI, foundStarMap, starI, normalizedBaseParts,
+                baseParts = (baseName && baseName.split('/')),
+                map = config.map,
+                starMap = map && map['*'];
+
+            //Adjust any relative paths.
+            if (name) {
+                name = name.split('/');
+                lastIndex = name.length - 1;
+
+                // If wanting node ID compatibility, strip .js from end
+                // of IDs. Have to do this here, and not in nameToUrl
+                // because node allows either .js or non .js to map
+                // to same file.
+                if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) {
+                    name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, '');
+                }
+
+                // Starts with a '.' so need the baseName
+                if (name[0].charAt(0) === '.' && baseParts) {
+                    //Convert baseName to array, and lop off the last part,
+                    //so that . matches that 'directory' and not name of the baseName's
+                    //module. For instance, baseName of 'one/two/three', maps to
+                    //'one/two/three.js', but we want the directory, 'one/two' for
+                    //this normalization.
+                    normalizedBaseParts = baseParts.slice(0, baseParts.length - 1);
+                    name = normalizedBaseParts.concat(name);
+                }
+
+                trimDots(name);
+                name = name.join('/');
+            }
+
+            //Apply map config if available.
+            if (applyMap && map && (baseParts || starMap)) {
+                nameParts = name.split('/');
+
+                outerLoop: for (i = nameParts.length; i > 0; i -= 1) {
+                    nameSegment = nameParts.slice(0, i).join('/');
+
+                    if (baseParts) {
+                        //Find the longest baseName segment match in the config.
+                        //So, do joins on the biggest to smallest lengths of baseParts.
+                        for (j = baseParts.length; j > 0; j -= 1) {
+                            mapValue = getOwn(map, baseParts.slice(0, j).join('/'));
+
+                            //baseName segment has config, find if it has one for
+                            //this name.
+                            if (mapValue) {
+                                mapValue = getOwn(mapValue, nameSegment);
+                                if (mapValue) {
+                                    //Match, update name to the new value.
+                                    foundMap = mapValue;
+                                    foundI = i;
+                                    break outerLoop;
+                                }
+                            }
+                        }
+                    }
+
+                    //Check for a star map match, but just hold on to it,
+                    //if there is a shorter segment match later in a matching
+                    //config, then favor over this star map.
+                    if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) {
+                        foundStarMap = getOwn(starMap, nameSegment);
+                        starI = i;
+                    }
+                }
+
+                if (!foundMap && foundStarMap) {
+                    foundMap = foundStarMap;
+                    foundI = starI;
+                }
+
+                if (foundMap) {
+                    nameParts.splice(0, foundI, foundMap);
+                    name = nameParts.join('/');
+                }
+            }
+
+            // If the name points to a package's name, use
+            // the package main instead.
+            pkgMain = getOwn(config.pkgs, name);
+
+            return pkgMain ? pkgMain : name;
+        }
+
+        function removeScript(name) {
+            if (isBrowser) {
+                each(scripts(), function (scriptNode) {
+                    if (scriptNode.getAttribute('data-requiremodule') === name &&
+                            scriptNode.getAttribute('data-requirecontext') === context.contextName) {
+                        scriptNode.parentNode.removeChild(scriptNode);
+                        return true;
+                    }
+                });
+            }
+        }
+
+        function hasPathFallback(id) {
+            var pathConfig = getOwn(config.paths, id);
+            if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) {
+                //Pop off the first array value, since it failed, and
+                //retry
+                pathConfig.shift();
+                context.require.undef(id);
+
+                //Custom require that does not do map translation, since
+                //ID is "absolute", already mapped/resolved.
+                context.makeRequire(null, {
+                    skipMap: true
+                })([id]);
+
+                return true;
+            }
+        }
+
+        //Turns a plugin!resource to [plugin, resource]
+        //with the plugin being undefined if the name
+        //did not have a plugin prefix.
+        function splitPrefix(name) {
+            var prefix,
+                index = name ? name.indexOf('!') : -1;
+            if (index > -1) {
+                prefix = name.substring(0, index);
+                name = name.substring(index + 1, name.length);
+            }
+            return [prefix, name];
+        }
+
+        /**
+         * Creates a module mapping that includes plugin prefix, module
+         * name, and path. If parentModuleMap is provided it will
+         * also normalize the name via require.normalize()
+         *
+         * @param {String} name the module name
+         * @param {String} [parentModuleMap] parent module map
+         * for the module name, used to resolve relative names.
+         * @param {Boolean} isNormalized: is the ID already normalized.
+         * This is true if this call is done for a define() module ID.
+         * @param {Boolean} applyMap: apply the map config to the ID.
+         * Should only be true if this map is for a dependency.
+         *
+         * @returns {Object}
+         */
+        function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) {
+            var url, pluginModule, suffix, nameParts,
+                prefix = null,
+                parentName = parentModuleMap ? parentModuleMap.name : null,
+                originalName = name,
+                isDefine = true,
+                normalizedName = '';
+
+            //If no name, then it means it is a require call, generate an
+            //internal name.
+            if (!name) {
+                isDefine = false;
+                name = '_ at r' + (requireCounter += 1);
+            }
+
+            nameParts = splitPrefix(name);
+            prefix = nameParts[0];
+            name = nameParts[1];
+
+            if (prefix) {
+                prefix = normalize(prefix, parentName, applyMap);
+                pluginModule = getOwn(defined, prefix);
+            }
+
+            //Account for relative paths if there is a base name.
+            if (name) {
+                if (prefix) {
+                    if (pluginModule && pluginModule.normalize) {
+                        //Plugin is loaded, use its normalize method.
+                        normalizedName = pluginModule.normalize(name, function (name) {
+                            return normalize(name, parentName, applyMap);
+                        });
+                    } else {
+                        // If nested plugin references, then do not try to
+                        // normalize, as it will not normalize correctly. This
+                        // places a restriction on resourceIds, and the longer
+                        // term solution is not to normalize until plugins are
+                        // loaded and all normalizations to allow for async
+                        // loading of a loader plugin. But for now, fixes the
+                        // common uses. Details in #1131
+                        normalizedName = name.indexOf('!') === -1 ?
+                                         normalize(name, parentName, applyMap) :
+                                         name;
+                    }
+                } else {
+                    //A regular module.
+                    normalizedName = normalize(name, parentName, applyMap);
+
+                    //Normalized name may be a plugin ID due to map config
+                    //application in normalize. The map config values must
+                    //already be normalized, so do not need to redo that part.
+                    nameParts = splitPrefix(normalizedName);
+                    prefix = nameParts[0];
+                    normalizedName = nameParts[1];
+                    isNormalized = true;
+
+                    url = context.nameToUrl(normalizedName);
+                }
+            }
+
+            //If the id is a plugin id that cannot be determined if it needs
+            //normalization, stamp it with a unique ID so two matching relative
+            //ids that may conflict can be separate.
+            suffix = prefix && !pluginModule && !isNormalized ?
+                     '_unnormalized' + (unnormalizedCounter += 1) :
+                     '';
+
+            return {
+                prefix: prefix,
+                name: normalizedName,
+                parentMap: parentModuleMap,
+                unnormalized: !!suffix,
+                url: url,
+                originalName: originalName,
+                isDefine: isDefine,
+                id: (prefix ?
+                        prefix + '!' + normalizedName :
+                        normalizedName) + suffix
+            };
+        }
+
+        function getModule(depMap) {
+            var id = depMap.id,
+                mod = getOwn(registry, id);
+
+            if (!mod) {
+                mod = registry[id] = new context.Module(depMap);
+            }
+
+            return mod;
+        }
+
+        function on(depMap, name, fn) {
+            var id = depMap.id,
+                mod = getOwn(registry, id);
+
+            if (hasProp(defined, id) &&
+                    (!mod || mod.defineEmitComplete)) {
+                if (name === 'defined') {
+                    fn(defined[id]);
+                }
+            } else {
+                mod = getModule(depMap);
+                if (mod.error && name === 'error') {
+                    fn(mod.error);
+                } else {
+                    mod.on(name, fn);
+                }
+            }
+        }
+
+        function onError(err, errback) {
+            var ids = err.requireModules,
+                notified = false;
+
+            if (errback) {
+                errback(err);
+            } else {
+                each(ids, function (id) {
+                    var mod = getOwn(registry, id);
+                    if (mod) {
+                        //Set error on module, so it skips timeout checks.
+                        mod.error = err;
+                        if (mod.events.error) {
+                            notified = true;
+                            mod.emit('error', err);
+                        }
+                    }
+                });
+
+                if (!notified) {
+                    req.onError(err);
+                }
+            }
+        }
+
+        /**
+         * Internal method to transfer globalQueue items to this context's
+         * defQueue.
+         */
+        function takeGlobalQueue() {
+            //Push all the globalDefQueue items into the context's defQueue
+            if (globalDefQueue.length) {
+                each(globalDefQueue, function(queueItem) {
+                    var id = queueItem[0];
+                    if (typeof id === 'string') {
+                        context.defQueueMap[id] = true;
+                    }
+                    defQueue.push(queueItem);
+                });
+                globalDefQueue = [];
+            }
+        }
+
+        handlers = {
+            'require': function (mod) {
+                if (mod.require) {
+                    return mod.require;
+                } else {
+                    return (mod.require = context.makeRequire(mod.map));
+                }
+            },
+            'exports': function (mod) {
+                mod.usingExports = true;
+                if (mod.map.isDefine) {
+                    if (mod.exports) {
+                        return (defined[mod.map.id] = mod.exports);
+                    } else {
+                        return (mod.exports = defined[mod.map.id] = {});
+                    }
+                }
+            },
+            'module': function (mod) {
+                if (mod.module) {
+                    return mod.module;
+                } else {
+                    return (mod.module = {
+                        id: mod.map.id,
+                        uri: mod.map.url,
+                        config: function () {
+                            return getOwn(config.config, mod.map.id) || {};
+                        },
+                        exports: mod.exports || (mod.exports = {})
+                    });
+                }
+            }
+        };
+
+        function cleanRegistry(id) {
+            //Clean up machinery used for waiting modules.
+            delete registry[id];
+            delete enabledRegistry[id];
+        }
+
+        function breakCycle(mod, traced, processed) {
+            var id = mod.map.id;
+
+            if (mod.error) {
+                mod.emit('error', mod.error);
+            } else {
+                traced[id] = true;
+                each(mod.depMaps, function (depMap, i) {
+                    var depId = depMap.id,
+                        dep = getOwn(registry, depId);
+
+                    //Only force things that have not completed
+                    //being defined, so still in the registry,
+                    //and only if it has not been matched up
+                    //in the module already.
+                    if (dep && !mod.depMatched[i] && !processed[depId]) {
+                        if (getOwn(traced, depId)) {
+                            mod.defineDep(i, defined[depId]);
+                            mod.check(); //pass false?
+                        } else {
+                            breakCycle(dep, traced, processed);
+                        }
+                    }
+                });
+                processed[id] = true;
+            }
+        }
+
+        function checkLoaded() {
+            var err, usingPathFallback,
+                waitInterval = config.waitSeconds * 1000,
+                //It is possible to disable the wait interval by using waitSeconds of 0.
+                expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(),
+                noLoads = [],
+                reqCalls = [],
+                stillLoading = false,
+                needCycleCheck = true;
+
+            //Do not bother if this call was a result of a cycle break.
+            if (inCheckLoaded) {
+                return;
+            }
+
+            inCheckLoaded = true;
+
+            //Figure out the state of all the modules.
+            eachProp(enabledRegistry, function (mod) {
+                var map = mod.map,
+                    modId = map.id;
+
+                //Skip things that are not enabled or in error state.
+                if (!mod.enabled) {
+                    return;
+                }
+
+                if (!map.isDefine) {
+                    reqCalls.push(mod);
+                }
+
+                if (!mod.error) {
+                    //If the module should be executed, and it has not
+                    //been inited and time is up, remember it.
+                    if (!mod.inited && expired) {
+                        if (hasPathFallback(modId)) {
+                            usingPathFallback = true;
+                            stillLoading = true;
+                        } else {
+                            noLoads.push(modId);
+                            removeScript(modId);
+                        }
+                    } else if (!mod.inited && mod.fetched && map.isDefine) {
+                        stillLoading = true;
+                        if (!map.prefix) {
+                            //No reason to keep looking for unfinished
+                            //loading. If the only stillLoading is a
+                            //plugin resource though, keep going,
+                            //because it may be that a plugin resource
+                            //is waiting on a non-plugin cycle.
+                            return (needCycleCheck = false);
+                        }
+                    }
+                }
+            });
+
+            if (expired && noLoads.length) {
+                //If wait time expired, throw error of unloaded modules.
+                err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads);
+                err.contextName = context.contextName;
+                return onError(err);
+            }
+
+            //Not expired, check for a cycle.
+            if (needCycleCheck) {
+                each(reqCalls, function (mod) {
+                    breakCycle(mod, {}, {});
+                });
+            }
+
+            //If still waiting on loads, and the waiting load is something
+            //other than a plugin resource, or there are still outstanding
+            //scripts, then just try back later.
+            if ((!expired || usingPathFallback) && stillLoading) {
+                //Something is still waiting to load. Wait for it, but only
+                //if a timeout is not already in effect.
+                if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) {
+                    checkLoadedTimeoutId = setTimeout(function () {
+                        checkLoadedTimeoutId = 0;
+                        checkLoaded();
+                    }, 50);
+                }
+            }
+
+            inCheckLoaded = false;
+        }
+
+        Module = function (map) {
+            this.events = getOwn(undefEvents, map.id) || {};
+            this.map = map;
+            this.shim = getOwn(config.shim, map.id);
+            this.depExports = [];
+            this.depMaps = [];
+            this.depMatched = [];
+            this.pluginMaps = {};
+            this.depCount = 0;
+
+            /* this.exports this.factory
+               this.depMaps = [],
+               this.enabled, this.fetched
+            */
+        };
+
+        Module.prototype = {
+            init: function (depMaps, factory, errback, options) {
+                options = options || {};
+
+                //Do not do more inits if already done. Can happen if there
+                //are multiple define calls for the same module. That is not
+                //a normal, common case, but it is also not unexpected.
+                if (this.inited) {
+                    return;
+                }
+
+                this.factory = factory;
+
+                if (errback) {
+                    //Register for errors on this module.
+                    this.on('error', errback);
+                } else if (this.events.error) {
+                    //If no errback already, but there are error listeners
+                    //on this module, set up an errback to pass to the deps.
+                    errback = bind(this, function (err) {
+                        this.emit('error', err);
+                    });
+                }
+
+                //Do a copy of the dependency array, so that
+                //source inputs are not modified. For example
+                //"shim" deps are passed in here directly, and
+                //doing a direct modification of the depMaps array
+                //would affect that config.
+                this.depMaps = depMaps && depMaps.slice(0);
+
+                this.errback = errback;
+
+                //Indicate this module has be initialized
+                this.inited = true;
+
+                this.ignore = options.ignore;
+
+                //Could have option to init this module in enabled mode,
+                //or could have been previously marked as enabled. However,
+                //the dependencies are not known until init is called. So
+                //if enabled previously, now trigger dependencies as enabled.
+                if (options.enabled || this.enabled) {
+                    //Enable this module and dependencies.
+                    //Will call this.check()
+                    this.enable();
+                } else {
+                    this.check();
+                }
+            },
+
+            defineDep: function (i, depExports) {
+                //Because of cycles, defined callback for a given
+                //export can be called more than once.
+                if (!this.depMatched[i]) {
+                    this.depMatched[i] = true;
+                    this.depCount -= 1;
+                    this.depExports[i] = depExports;
+                }
+            },
+
+            fetch: function () {
+                if (this.fetched) {
+                    return;
+                }
+                this.fetched = true;
+
+                context.startTime = (new Date()).getTime();
+
+                var map = this.map;
+
+                //If the manager is for a plugin managed resource,
+                //ask the plugin to load it now.
+                if (this.shim) {
+                    context.makeRequire(this.map, {
+                        enableBuildCallback: true
+                    })(this.shim.deps || [], bind(this, function () {
+                        return map.prefix ? this.callPlugin() : this.load();
+                    }));
+                } else {
+                    //Regular dependency.
+                    return map.prefix ? this.callPlugin() : this.load();
+                }
+            },
+
+            load: function () {
+                var url = this.map.url;
+
+                //Regular dependency.
+                if (!urlFetched[url]) {
+                    urlFetched[url] = true;
+                    context.load(this.map.id, url);
+                }
+            },
+
+            /**
+             * Checks if the module is ready to define itself, and if so,
+             * define it.
+             */
+            check: function () {
+                if (!this.enabled || this.enabling) {
+                    return;
+                }
+
+                var err, cjsModule,
+                    id = this.map.id,
+                    depExports = this.depExports,
+                    exports = this.exports,
+                    factory = this.factory;
+
+                if (!this.inited) {
+                    // Only fetch if not already in the defQueue.
+                    if (!hasProp(context.defQueueMap, id)) {
+                        this.fetch();
+                    }
+                } else if (this.error) {
+                    this.emit('error', this.error);
+                } else if (!this.defining) {
+                    //The factory could trigger another require call
+                    //that would result in checking this module to
+                    //define itself again. If already in the process
+                    //of doing that, skip this work.
+                    this.defining = true;
+
+                    if (this.depCount < 1 && !this.defined) {
+                        if (isFunction(factory)) {
+                            //If there is an error listener, favor passing
+                            //to that instead of throwing an error. However,
+                            //only do it for define()'d  modules. require
+                            //errbacks should not be called for failures in
+                            //their callbacks (#699). However if a global
+                            //onError is set, use that.
+                            if ((this.events.error && this.map.isDefine) ||
+                                req.onError !== defaultOnError) {
+                                try {
+                                    exports = context.execCb(id, factory, depExports, exports);
+                                } catch (e) {
+                                    err = e;
+                                }
+                            } else {
+                                exports = context.execCb(id, factory, depExports, exports);
+                            }
+
+                            // Favor return value over exports. If node/cjs in play,
+                            // then will not have a return value anyway. Favor
+                            // module.exports assignment over exports object.
+                            if (this.map.isDefine && exports === undefined) {
+                                cjsModule = this.module;
+                                if (cjsModule) {
+                                    exports = cjsModule.exports;
+                                } else if (this.usingExports) {
+                                    //exports already set the defined value.
+                                    exports = this.exports;
+                                }
+                            }
+
+                            if (err) {
+                                err.requireMap = this.map;
+                                err.requireModules = this.map.isDefine ? [this.map.id] : null;
+                                err.requireType = this.map.isDefine ? 'define' : 'require';
+                                return onError((this.error = err));
+                            }
+
+                        } else {
+                            //Just a literal value
+                            exports = factory;
+                        }
+
+                        this.exports = exports;
+
+                        if (this.map.isDefine && !this.ignore) {
+                            defined[id] = exports;
+
+                            if (req.onResourceLoad) {
+                                var resLoadMaps = [];
+                                each(this.depMaps, function (depMap) {
+                                    resLoadMaps.push(depMap.normalizedMap || depMap);
+                                });
+                                req.onResourceLoad(context, this.map, resLoadMaps);
+                            }
+                        }
+
+                        //Clean up
+                        cleanRegistry(id);
+
+                        this.defined = true;
+                    }
+
+                    //Finished the define stage. Allow calling check again
+                    //to allow define notifications below in the case of a
+                    //cycle.
+                    this.defining = false;
+
+                    if (this.defined && !this.defineEmitted) {
+                        this.defineEmitted = true;
+                        this.emit('defined', this.exports);
+                        this.defineEmitComplete = true;
+                    }
+
+                }
+            },
+
+            callPlugin: function () {
+                var map = this.map,
+                    id = map.id,
+                    //Map already normalized the prefix.
+                    pluginMap = makeModuleMap(map.prefix);
+
+                //Mark this as a dependency for this plugin, so it
+                //can be traced for cycles.
+                this.depMaps.push(pluginMap);
+
+                on(pluginMap, 'defined', bind(this, function (plugin) {
+                    var load, normalizedMap, normalizedMod,
+                        bundleId = getOwn(bundlesMap, this.map.id),
+                        name = this.map.name,
+                        parentName = this.map.parentMap ? this.map.parentMap.name : null,
+                        localRequire = context.makeRequire(map.parentMap, {
+                            enableBuildCallback: true
+                        });
+
+                    //If current map is not normalized, wait for that
+                    //normalized name to load instead of continuing.
+                    if (this.map.unnormalized) {
+                        //Normalize the ID if the plugin allows it.
+                        if (plugin.normalize) {
+                            name = plugin.normalize(name, function (name) {
+                                return normalize(name, parentName, true);
+                            }) || '';
+                        }
+
+                        //prefix and name should already be normalized, no need
+                        //for applying map config again either.
+                        normalizedMap = makeModuleMap(map.prefix + '!' + name,
+                                                      this.map.parentMap);
+                        on(normalizedMap,
+                            'defined', bind(this, function (value) {
+                                this.map.normalizedMap = normalizedMap;
+                                this.init([], function () { return value; }, null, {
+                                    enabled: true,
+                                    ignore: true
+                                });
+                            }));
+
+                        normalizedMod = getOwn(registry, normalizedMap.id);
+                        if (normalizedMod) {
+                            //Mark this as a dependency for this plugin, so it
+                            //can be traced for cycles.
+                            this.depMaps.push(normalizedMap);
+
+                            if (this.events.error) {
+                                normalizedMod.on('error', bind(this, function (err) {
+                                    this.emit('error', err);
+                                }));
+                            }
+                            normalizedMod.enable();
+                        }
+
+                        return;
+                    }
+
+                    //If a paths config, then just load that file instead to
+                    //resolve the plugin, as it is built into that paths layer.
+                    if (bundleId) {
+                        this.map.url = context.nameToUrl(bundleId);
+                        this.load();
+                        return;
+                    }
+
+                    load = bind(this, function (value) {
+                        this.init([], function () { return value; }, null, {
+                            enabled: true
+                        });
+                    });
+
+                    load.error = bind(this, function (err) {
+                        this.inited = true;
+                        this.error = err;
+                        err.requireModules = [id];
+
+                        //Remove temp unnormalized modules for this module,
+                        //since they will never be resolved otherwise now.
+                        eachProp(registry, function (mod) {
+                            if (mod.map.id.indexOf(id + '_unnormalized') === 0) {
+                                cleanRegistry(mod.map.id);
+                            }
+                        });
+
+                        onError(err);
+                    });
+
+                    //Allow plugins to load other code without having to know the
+                    //context or how to 'complete' the load.
+                    load.fromText = bind(this, function (text, textAlt) {
+                        /*jslint evil: true */
+                        var moduleName = map.name,
+                            moduleMap = makeModuleMap(moduleName),
+                            hasInteractive = useInteractive;
+
+                        //As of 2.1.0, support just passing the text, to reinforce
+                        //fromText only being called once per resource. Still
+                        //support old style of passing moduleName but discard
+                        //that moduleName in favor of the internal ref.
+                        if (textAlt) {
+                            text = textAlt;
+                        }
+
+                        //Turn off interactive script matching for IE for any define
+                        //calls in the text, then turn it back on at the end.
+                        if (hasInteractive) {
+                            useInteractive = false;
+                        }
+
+                        //Prime the system by creating a module instance for
+                        //it.
+                        getModule(moduleMap);
+
+                        //Transfer any config to this other module.
+                        if (hasProp(config.config, id)) {
+                            config.config[moduleName] = config.config[id];
+                        }
+
+                        try {
+                            req.exec(text);
+                        } catch (e) {
+                            return onError(makeError('fromtexteval',
+                                             'fromText eval for ' + id +
+                                            ' failed: ' + e,
+                                             e,
+                                             [id]));
+                        }
+
+                        if (hasInteractive) {
+                            useInteractive = true;
+                        }
+
+                        //Mark this as a dependency for the plugin
+                        //resource
+                        this.depMaps.push(moduleMap);
+
+                        //Support anonymous modules.
+                        context.completeLoad(moduleName);
+
+                        //Bind the value of that module to the value for this
+                        //resource ID.
+                        localRequire([moduleName], load);
+                    });
+
+                    //Use parentName here since the plugin's name is not reliable,
+                    //could be some weird string with no path that actually wants to
+                    //reference the parentName's path.
+                    plugin.load(map.name, localRequire, load, config);
+                }));
+
+                context.enable(pluginMap, this);
+                this.pluginMaps[pluginMap.id] = pluginMap;
+            },
+
+            enable: function () {
+                enabledRegistry[this.map.id] = this;
+                this.enabled = true;
+
+                //Set flag mentioning that the module is enabling,
+                //so that immediate calls to the defined callbacks
+                //for dependencies do not trigger inadvertent load
+                //with the depCount still being zero.
+                this.enabling = true;
+
+                //Enable each dependency
+                each(this.depMaps, bind(this, function (depMap, i) {
+                    var id, mod, handler;
+
+                    if (typeof depMap === 'string') {
+                        //Dependency needs to be converted to a depMap
+                        //and wired up to this module.
+                        depMap = makeModuleMap(depMap,
+                                               (this.map.isDefine ? this.map : this.map.parentMap),
+                                               false,
+                                               !this.skipMap);
+                        this.depMaps[i] = depMap;
+
+                        handler = getOwn(handlers, depMap.id);
+
+                        if (handler) {
+                            this.depExports[i] = handler(this);
+                            return;
+                        }
+
+                        this.depCount += 1;
+
+                        on(depMap, 'defined', bind(this, function (depExports) {
+                            if (this.undefed) {
+                                return;
+                            }
+                            this.defineDep(i, depExports);
+                            this.check();
+                        }));
+
+                        if (this.errback) {
+                            on(depMap, 'error', bind(this, this.errback));
+                        } else if (this.events.error) {
+                            // No direct errback on this module, but something
+                            // else is listening for errors, so be sure to
+                            // propagate the error correctly.
+                            on(depMap, 'error', bind(this, function(err) {
+                                this.emit('error', err);
+                            }));
+                        }
+                    }
+
+                    id = depMap.id;
+                    mod = registry[id];
+
+                    //Skip special modules like 'require', 'exports', 'module'
+                    //Also, don't call enable if it is already enabled,
+                    //important in circular dependency cases.
+                    if (!hasProp(handlers, id) && mod && !mod.enabled) {
+                        context.enable(depMap, this);
+                    }
+                }));
+
+                //Enable each plugin that is used in
+                //a dependency
+                eachProp(this.pluginMaps, bind(this, function (pluginMap) {
+                    var mod = getOwn(registry, pluginMap.id);
+                    if (mod && !mod.enabled) {
+                        context.enable(pluginMap, this);
+                    }
+                }));
+
+                this.enabling = false;
+
+                this.check();
+            },
+
+            on: function (name, cb) {
+                var cbs = this.events[name];
+                if (!cbs) {
+                    cbs = this.events[name] = [];
+                }
+                cbs.push(cb);
+            },
+
+            emit: function (name, evt) {
+                each(this.events[name], function (cb) {
+                    cb(evt);
+                });
+                if (name === 'error') {
+                    //Now that the error handler was triggered, remove
+                    //the listeners, since this broken Module instance
+                    //can stay around for a while in the registry.
+                    delete this.events[name];
+                }
+            }
+        };
+
+        function callGetModule(args) {
+            //Skip modules already defined.
+            if (!hasProp(defined, args[0])) {
+                getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]);
+            }
+        }
+
+        function removeListener(node, func, name, ieName) {
+            //Favor detachEvent because of IE9
+            //issue, see attachEvent/addEventListener comment elsewhere
+            //in this file.
+            if (node.detachEvent && !isOpera) {
+                //Probably IE. If not it will throw an error, which will be
+                //useful to know.
+                if (ieName) {
+                    node.detachEvent(ieName, func);
+                }
+            } else {
+                node.removeEventListener(name, func, false);
+            }
+        }
+
+        /**
+         * Given an event from a script node, get the requirejs info from it,
+         * and then removes the event listeners on the node.
+         * @param {Event} evt
+         * @returns {Object}
+         */
+        function getScriptData(evt) {
+            //Using currentTarget instead of target for Firefox 2.0's sake. Not
+            //all old browsers will be supported, but this one was easy enough
+            //to support and still makes sense.
+            var node = evt.currentTarget || evt.srcElement;
+
+            //Remove the listeners once here.
+            removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange');
+            removeListener(node, context.onScriptError, 'error');
+
+            return {
+                node: node,
+                id: node && node.getAttribute('data-requiremodule')
+            };
+        }
+
+        function intakeDefines() {
+            var args;
+
+            //Any defined modules in the global queue, intake them now.
+            takeGlobalQueue();
+
+            //Make sure any remaining defQueue items get properly processed.
+            while (defQueue.length) {
+                args = defQueue.shift();
+                if (args[0] === null) {
+                    return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' +
+                        args[args.length - 1]));
+                } else {
+                    //args are id, deps, factory. Should be normalized by the
+                    //define() function.
+                    callGetModule(args);
+                }
+            }
+            context.defQueueMap = {};
+        }
+
+        context = {
+            config: config,
+            contextName: contextName,
+            registry: registry,
+            defined: defined,
+            urlFetched: urlFetched,
+            defQueue: defQueue,
+            defQueueMap: {},
+            Module: Module,
+            makeModuleMap: makeModuleMap,
+            nextTick: req.nextTick,
+            onError: onError,
+
+            /**
+             * Set a configuration for the context.
+             * @param {Object} cfg config object to integrate.
+             */
+            configure: function (cfg) {
+                //Make sure the baseUrl ends in a slash.
+                if (cfg.baseUrl) {
+                    if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') {
+                        cfg.baseUrl += '/';
+                    }
+                }
+
+                // Convert old style urlArgs string to a function.
+                if (typeof cfg.urlArgs === 'string') {
+                    var urlArgs = cfg.urlArgs;
+                    cfg.urlArgs = function(id, url) {
+                        return (url.indexOf('?') === -1 ? '?' : '&') + urlArgs;
+                    };
+                }
+
+                //Save off the paths since they require special processing,
+                //they are additive.
+                var shim = config.shim,
+                    objs = {
+                        paths: true,
+                        bundles: true,
+                        config: true,
+                        map: true
+                    };
+
+                eachProp(cfg, function (value, prop) {
+                    if (objs[prop]) {
+                        if (!config[prop]) {
+                            config[prop] = {};
+                        }
+                        mixin(config[prop], value, true, true);
+                    } else {
+                        config[prop] = value;
+                    }
+                });
+
+                //Reverse map the bundles
+                if (cfg.bundles) {
+                    eachProp(cfg.bundles, function (value, prop) {
+                        each(value, function (v) {
+                            if (v !== prop) {
+                                bundlesMap[v] = prop;
+                            }
+                        });
+                    });
+                }
+
+                //Merge shim
+                if (cfg.shim) {
+                    eachProp(cfg.shim, function (value, id) {
+                        //Normalize the structure
+                        if (isArray(value)) {
+                            value = {
+                                deps: value
+                            };
+                        }
+                        if ((value.exports || value.init) && !value.exportsFn) {
+                            value.exportsFn = context.makeShimExports(value);
+                        }
+                        shim[id] = value;
+                    });
+                    config.shim = shim;
+                }
+
+                //Adjust packages if necessary.
+                if (cfg.packages) {
+                    each(cfg.packages, function (pkgObj) {
+                        var location, name;
+
+                        pkgObj = typeof pkgObj === 'string' ? {name: pkgObj} : pkgObj;
+
+                        name = pkgObj.name;
+                        location = pkgObj.location;
+                        if (location) {
+                            config.paths[name] = pkgObj.location;
+                        }
+
+                        //Save pointer to main module ID for pkg name.
+                        //Remove leading dot in main, so main paths are normalized,
+                        //and remove any trailing .js, since different package
+                        //envs have different conventions: some use a module name,
+                        //some use a file name.
+                        config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main')
+                                     .replace(currDirRegExp, '')
+                                     .replace(jsSuffixRegExp, '');
+                    });
+                }
+
+                //If there are any "waiting to execute" modules in the registry,
+                //update the maps for them, since their info, like URLs to load,
+                //may have changed.
+                eachProp(registry, function (mod, id) {
+                    //If module already has init called, since it is too
+                    //late to modify them, and ignore unnormalized ones
+                    //since they are transient.
+                    if (!mod.inited && !mod.map.unnormalized) {
+                        mod.map = makeModuleMap(id, null, true);
+                    }
+                });
+
+                //If a deps array or a config callback is specified, then call
+                //require with those args. This is useful when require is defined as a
+                //config object before require.js is loaded.
+                if (cfg.deps || cfg.callback) {
+                    context.require(cfg.deps || [], cfg.callback);
+                }
+            },
+
+            makeShimExports: function (value) {
+                function fn() {
+                    var ret;
+                    if (value.init) {
+                        ret = value.init.apply(global, arguments);
+                    }
+                    return ret || (value.exports && getGlobal(value.exports));
+                }
+                return fn;
+            },
+
+            makeRequire: function (relMap, options) {
+                options = options || {};
+
+                function localRequire(deps, callback, errback) {
+                    var id, map, requireMod;
+
+                    if (options.enableBuildCallback && callback && isFunction(callback)) {
+                        callback.__requireJsBuild = true;
+                    }
+
+                    if (typeof deps === 'string') {
+                        if (isFunction(callback)) {
+                            //Invalid call
+                            return onError(makeError('requireargs', 'Invalid require call'), errback);
+                        }
+
+                        //If require|exports|module are requested, get the
+                        //value for them from the special handlers. Caveat:
+                        //this only works while module is being defined.
+                        if (relMap && hasProp(handlers, deps)) {
+                            return handlers[deps](registry[relMap.id]);
+                        }
+
+                        //Synchronous access to one module. If require.get is
+                        //available (as in the Node adapter), prefer that.
+                        if (req.get) {
+                            return req.get(context, deps, relMap, localRequire);
+                        }
+
+                        //Normalize module name, if it contains . or ..
+                        map = makeModuleMap(deps, relMap, false, true);
+                        id = map.id;
+
+                        if (!hasProp(defined, id)) {
+                            return onError(makeError('notloaded', 'Module name "' +
+                                        id +
+                                        '" has not been loaded yet for context: ' +
+                                        contextName +
+                                        (relMap ? '' : '. Use require([])')));
+                        }
+                        return defined[id];
+                    }
+
+                    //Grab defines waiting in the global queue.
+                    intakeDefines();
+
+                    //Mark all the dependencies as needing to be loaded.
+                    context.nextTick(function () {
+                        //Some defines could have been added since the
+                        //require call, collect them.
+                        intakeDefines();
+
+                        requireMod = getModule(makeModuleMap(null, relMap));
+
+                        //Store if map config should be applied to this require
+                        //call for dependencies.
+                        requireMod.skipMap = options.skipMap;
+
+                        requireMod.init(deps, callback, errback, {
+                            enabled: true
+                        });
+
+                        checkLoaded();
+                    });
+
+                    return localRequire;
+                }
+
+                mixin(localRequire, {
+                    isBrowser: isBrowser,
+
+                    /**
+                     * Converts a module name + .extension into an URL path.
+                     * *Requires* the use of a module name. It does not support using
+                     * plain URLs like nameToUrl.
+                     */
+                    toUrl: function (moduleNamePlusExt) {
+                        var ext,
+                            index = moduleNamePlusExt.lastIndexOf('.'),
+                            segment = moduleNamePlusExt.split('/')[0],
+                            isRelative = segment === '.' || segment === '..';
+
+                        //Have a file extension alias, and it is not the
+                        //dots from a relative path.
+                        if (index !== -1 && (!isRelative || index > 1)) {
+                            ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length);
+                            moduleNamePlusExt = moduleNamePlusExt.substring(0, index);
+                        }
+
+                        return context.nameToUrl(normalize(moduleNamePlusExt,
+                                                relMap && relMap.id, true), ext,  true);
+                    },
+
+                    defined: function (id) {
+                        return hasProp(defined, makeModuleMap(id, relMap, false, true).id);
+                    },
+
+                    specified: function (id) {
+                        id = makeModuleMap(id, relMap, false, true).id;
+                        return hasProp(defined, id) || hasProp(registry, id);
+                    }
+                });
+
+                //Only allow undef on top level require calls
+                if (!relMap) {
+                    localRequire.undef = function (id) {
+                        //Bind any waiting define() calls to this context,
+                        //fix for #408
+                        takeGlobalQueue();
+
+                        var map = makeModuleMap(id, relMap, true),
+                            mod = getOwn(registry, id);
+
+                        mod.undefed = true;
+                        removeScript(id);
+
+                        delete defined[id];
+                        delete urlFetched[map.url];
+                        delete undefEvents[id];
+
+                        //Clean queued defines too. Go backwards
+                        //in array so that the splices do not
+                        //mess up the iteration.
+                        eachReverse(defQueue, function(args, i) {
+                            if (args[0] === id) {
+                                defQueue.splice(i, 1);
+                            }
+                        });
+                        delete context.defQueueMap[id];
+
+                        if (mod) {
+                            //Hold on to listeners in case the
+                            //module will be attempted to be reloaded
+                            //using a different config.
+                            if (mod.events.defined) {
+                                undefEvents[id] = mod.events;
+                            }
+
+                            cleanRegistry(id);
+                        }
+                    };
+                }
+
+                return localRequire;
+            },
+
+            /**
+             * Called to enable a module if it is still in the registry
+             * awaiting enablement. A second arg, parent, the parent module,
+             * is passed in for context, when this method is overridden by
+             * the optimizer. Not shown here to keep code compact.
+             */
+            enable: function (depMap) {
+                var mod = getOwn(registry, depMap.id);
+                if (mod) {
+                    getModule(depMap).enable();
+                }
+            },
+
+            /**
+             * Internal method used by environment adapters to complete a load event.
+             * A load event could be a script load or just a load pass from a synchronous
+             * load call.
+             * @param {String} moduleName the name of the module to potentially complete.
+             */
+            completeLoad: function (moduleName) {
+                var found, args, mod,
+                    shim = getOwn(config.shim, moduleName) || {},
+                    shExports = shim.exports;
+
+                takeGlobalQueue();
+
+                while (defQueue.length) {
+                    args = defQueue.shift();
+                    if (args[0] === null) {
+                        args[0] = moduleName;
+                        //If already found an anonymous module and bound it
+                        //to this name, then this is some other anon module
+                        //waiting for its completeLoad to fire.
+                        if (found) {
+                            break;
+                        }
+                        found = true;
+                    } else if (args[0] === moduleName) {
+                        //Found matching define call for this script!
+                        found = true;
+                    }
+
+                    callGetModule(args);
+                }
+                context.defQueueMap = {};
+
+                //Do this after the cycle of callGetModule in case the result
+                //of those calls/init calls changes the registry.
+                mod = getOwn(registry, moduleName);
+
+                if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) {
+                    if (config.enforceDefine && (!shExports || !getGlobal(shExports))) {
+                        if (hasPathFallback(moduleName)) {
+                            return;
+                        } else {
+                            return onError(makeError('nodefine',
+                                             'No define call for ' + moduleName,
+                                             null,
+                                             [moduleName]));
+                        }
+                    } else {
+                        //A script that does not call define(), so just simulate
+                        //the call for it.
+                        callGetModule([moduleName, (shim.deps || []), shim.exportsFn]);
+                    }
+                }
+
+                checkLoaded();
+            },
+
+            /**
+             * Converts a module name to a file path. Supports cases where
+             * moduleName may actually be just an URL.
+             * Note that it **does not** call normalize on the moduleName,
+             * it is assumed to have already been normalized. This is an
+             * internal API, not a public one. Use toUrl for the public API.
+             */
+            nameToUrl: function (moduleName, ext, skipExt) {
+                var paths, syms, i, parentModule, url,
+                    parentPath, bundleId,
+                    pkgMain = getOwn(config.pkgs, moduleName);
+
+                if (pkgMain) {
+                    moduleName = pkgMain;
+                }
+
+                bundleId = getOwn(bundlesMap, moduleName);
+
+                if (bundleId) {
+                    return context.nameToUrl(bundleId, ext, skipExt);
+                }
+
+                //If a colon is in the URL, it indicates a protocol is used and it is just
+                //an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?)
+                //or ends with .js, then assume the user meant to use an url and not a module id.
+                //The slash is important for protocol-less URLs as well as full paths.
+                if (req.jsExtRegExp.test(moduleName)) {
+                    //Just a plain path, not module name lookup, so just return it.
+                    //Add extension if it is included. This is a bit wonky, only non-.js things pass
+                    //an extension, this method probably needs to be reworked.
+                    url = moduleName + (ext || '');
+                } else {
+                    //A module that needs to be converted to a path.
+                    paths = config.paths;
+
+                    syms = moduleName.split('/');
+                    //For each module name segment, see if there is a path
+                    //registered for it. Start with most specific name
+                    //and work up from it.
+                    for (i = syms.length; i > 0; i -= 1) {
+                        parentModule = syms.slice(0, i).join('/');
+
+                        parentPath = getOwn(paths, parentModule);
+                        if (parentPath) {
+                            //If an array, it means there are a few choices,
+                            //Choose the one that is desired
+                            if (isArray(parentPath)) {
+                                parentPath = parentPath[0];
+                            }
+                            syms.splice(0, i, parentPath);
+                            break;
+                        }
+                    }
+
+                    //Join the path parts together, then figure out if baseUrl is needed.
+                    url = syms.join('/');
+                    url += (ext || (/^data\:|^blob\:|\?/.test(url) || skipExt ? '' : '.js'));
+                    url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url;
+                }
+
+                return config.urlArgs && !/^blob\:/.test(url) ?
+                       url + config.urlArgs(moduleName, url) : url;
+            },
+
+            //Delegates to req.load. Broken out as a separate function to
+            //allow overriding in the optimizer.
+            load: function (id, url) {
+                req.load(context, id, url);
+            },
+
+            /**
+             * Executes a module callback function. Broken out as a separate function
+             * solely to allow the build system to sequence the files in the built
+             * layer in the right sequence.
+             *
+             * @private
+             */
+            execCb: function (name, callback, args, exports) {
+                return callback.apply(exports, args);
+            },
+
+            /**
+             * callback for script loads, used to check status of loading.
+             *
+             * @param {Event} evt the event from the browser for the script
+             * that was loaded.
+             */
+            onScriptLoad: function (evt) {
+                //Using currentTarget instead of target for Firefox 2.0's sake. Not
+                //all old browsers will be supported, but this one was easy enough
+                //to support and still makes sense.
+                if (evt.type === 'load' ||
+                        (readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) {
+                    //Reset interactive script so a script node is not held onto for
+                    //to long.
+                    interactiveScript = null;
+
+                    //Pull out the name of the module and the context.
+                    var data = getScriptData(evt);
+                    context.completeLoad(data.id);
+                }
+            },
+
+            /**
+             * Callback for script errors.
+             */
+            onScriptError: function (evt) {
+                var data = getScriptData(evt);
+                if (!hasPathFallback(data.id)) {
+                    var parents = [];
+                    eachProp(registry, function(value, key) {
+                        if (key.indexOf('_ at r') !== 0) {
+                            each(value.depMaps, function(depMap) {
+                                if (depMap.id === data.id) {
+                                    parents.push(key);
+                                    return true;
+                                }
+                            });
+                        }
+                    });
+                    return onError(makeError('scripterror', 'Script error for "' + data.id +
+                                             (parents.length ?
+                                             '", needed by: ' + parents.join(', ') :
+                                             '"'), evt, [data.id]));
+                }
+            }
+        };
+
+        context.require = context.makeRequire();
+        return context;
+    }
+
+    /**
+     * Main entry point.
+     *
+     * If the only argument to require is a string, then the module that
+     * is represented by that string is fetched for the appropriate context.
+     *
+     * If the first argument is an array, then it will be treated as an array
+     * of dependency string names to fetch. An optional function callback can
+     * be specified to execute when all of those dependencies are available.
+     *
+     * Make a local req variable to help Caja compliance (it assumes things
+     * on a require that are not standardized), and to give a short
+     * name for minification/local scope use.
+     */
+    req = requirejs = function (deps, callback, errback, optional) {
+
+        //Find the right context, use default
+        var context, config,
+            contextName = defContextName;
+
+        // Determine if have config object in the call.
+        if (!isArray(deps) && typeof deps !== 'string') {
+            // deps is a config object
+            config = deps;
+            if (isArray(callback)) {
+                // Adjust args if there are dependencies
+                deps = callback;
+                callback = errback;
+                errback = optional;
+            } else {
+                deps = [];
+            }
+        }
+
+        if (config && config.context) {
+            contextName = config.context;
+        }
+
+        context = getOwn(contexts, contextName);
+        if (!context) {
+            context = contexts[contextName] = req.s.newContext(contextName);
+        }
+
+        if (config) {
+            context.configure(config);
+        }
+
+        return context.require(deps, callback, errback);
+    };
+
+    /**
+     * Support require.config() to make it easier to cooperate with other
+     * AMD loaders on globally agreed names.
+     */
+    req.config = function (config) {
+        return req(config);
+    };
+
+    /**
+     * Execute something after the current tick
+     * of the event loop. Override for other envs
+     * that have a better solution than setTimeout.
+     * @param  {Function} fn function to execute later.
+     */
+    req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) {
+        setTimeout(fn, 4);
+    } : function (fn) { fn(); };
+
+    /**
+     * Export require as a global, but only if it does not already exist.
+     */
+    if (!require) {
+        require = req;
+    }
+
+    req.version = version;
+
+    //Used to filter out dependencies that are already paths.
+    req.jsExtRegExp = /^\/|:|\?|\.js$/;
+    req.isBrowser = isBrowser;
+    s = req.s = {
+        contexts: contexts,
+        newContext: newContext
+    };
+
+    //Create default context.
+    req({});
+
+    //Exports some context-sensitive methods on global require.
+    each([
+        'toUrl',
+        'undef',
+        'defined',
+        'specified'
+    ], function (prop) {
+        //Reference from contexts instead of early binding to default context,
+        //so that during builds, the latest instance of the default context
+        //with its config gets used.
+        req[prop] = function () {
+            var ctx = contexts[defContextName];
+            return ctx.require[prop].apply(ctx, arguments);
+        };
+    });
+
+    if (isBrowser) {
+        head = s.head = document.getElementsByTagName('head')[0];
+        //If BASE tag is in play, using appendChild is a problem for IE6.
+        //When that browser dies, this can be removed. Details in this jQuery bug:
+        //http://dev.jquery.com/ticket/2709
+        baseElement = document.getElementsByTagName('base')[0];
+        if (baseElement) {
+            head = s.head = baseElement.parentNode;
+        }
+    }
+
+    /**
+     * Any errors that require explicitly generates will be passed to this
+     * function. Intercept/override it if you want custom error handling.
+     * @param {Error} err the error object.
+     */
+    req.onError = defaultOnError;
+
+    /**
+     * Creates the node for the load command. Only used in browser envs.
+     */
+    req.createNode = function (config, moduleName, url) {
+        var node = config.xhtml ?
+                document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') :
+                document.createElement('script');
+        node.type = config.scriptType || 'text/javascript';
+        node.charset = 'utf-8';
+        node.async = true;
+        return node;
+    };
+
+    /**
+     * Does the request to load a module for the browser case.
+     * Make this a separate function to allow other environments
+     * to override it.
+     *
+     * @param {Object} context the require context to find state.
+     * @param {String} moduleName the name of the module.
+     * @param {Object} url the URL to the module.
+     */
+    req.load = function (context, moduleName, url) {
+        var config = (context && context.config) || {},
+            node;
+        if (isBrowser) {
+            //In the browser so use a script tag
+            node = req.createNode(config, moduleName, url);
+
+            node.setAttribute('data-requirecontext', context.contextName);
+            node.setAttribute('data-requiremodule', moduleName);
+
+            //Set up load listener. Test attachEvent first because IE9 has
+            //a subtle issue in its addEventListener and script onload firings
+            //that do not match the behavior of all other browsers with
+            //addEventListener support, which fire the onload event for a
+            //script right after the script execution. See:
+            //https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution
+            //UNFORTUNATELY Opera implements attachEvent but does not follow the script
+            //script execution mode.
+            if (node.attachEvent &&
+                    //Check if node.attachEvent is artificially added by custom script or
+                    //natively supported by browser
+                    //read https://github.com/requirejs/requirejs/issues/187
+                    //if we can NOT find [native code] then it must NOT natively supported.
+                    //in IE8, node.attachEvent does not have toString()
+                    //Note the test for "[native code" with no closing brace, see:
+                    //https://github.com/requirejs/requirejs/issues/273
+                    !(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) &&
+                    !isOpera) {
+                //Probably IE. IE (at least 6-8) do not fire
+                //script onload right after executing the script, so
+                //we cannot tie the anonymous define call to a name.
+                //However, IE reports the script as being in 'interactive'
+                //readyState at the time of the define call.
+                useInteractive = true;
+
+                node.attachEvent('onreadystatechange', context.onScriptLoad);
+                //It would be great to add an error handler here to catch
+                //404s in IE9+. However, onreadystatechange will fire before
+                //the error handler, so that does not help. If addEventListener
+                //is used, then IE will fire error before load, but we cannot
+                //use that pathway given the connect.microsoft.com issue
+                //mentioned above about not doing the 'script execute,
+                //then fire the script load event listener before execute
+                //next script' that other browsers do.
+                //Best hope: IE10 fixes the issues,
+                //and then destroys all installs of IE 6-9.
+                //node.attachEvent('onerror', context.onScriptError);
+            } else {
+                node.addEventListener('load', context.onScriptLoad, false);
+                node.addEventListener('error', context.onScriptError, false);
+            }
+            node.src = url;
+
+            //Calling onNodeCreated after all properties on the node have been
+            //set, but before it is placed in the DOM.
+            if (config.onNodeCreated) {
+                config.onNodeCreated(node, config, moduleName, url);
+            }
+
+            //For some cache cases in IE 6-8, the script executes before the end
+            //of the appendChild execution, so to tie an anonymous define
+            //call to the module name (which is stored on the node), hold on
+            //to a reference to this node, but clear after the DOM insertion.
+            currentlyAddingScript = node;
+            if (baseElement) {
+                head.insertBefore(node, baseElement);
+            } else {
+                head.appendChild(node);
+            }
+            currentlyAddingScript = null;
+
+            return node;
+        } else if (isWebWorker) {
+            try {
+                //In a web worker, use importScripts. This is not a very
+                //efficient use of importScripts, importScripts will block until
+                //its script is downloaded and evaluated. However, if web workers
+                //are in play, the expectation is that a build has been done so
+                //that only one script needs to be loaded anyway. This may need
+                //to be reevaluated if other use cases become common.
+
+                // Post a task to the event loop to work around a bug in WebKit
+                // where the worker gets garbage-collected after calling
+                // importScripts(): https://webkit.org/b/153317
+                setTimeout(function() {}, 0);
+                importScripts(url);
+
+                //Account for anonymous modules
+                context.completeLoad(moduleName);
+            } catch (e) {
+                context.onError(makeError('importscripts',
+                                'importScripts failed for ' +
+                                    moduleName + ' at ' + url,
+                                e,
+                                [moduleName]));
+            }
+        }
+    };
+
+    function getInteractiveScript() {
+        if (interactiveScript && interactiveScript.readyState === 'interactive') {
+            return interactiveScript;
+        }
+
+        eachReverse(scripts(), function (script) {
+            if (script.readyState === 'interactive') {
+                return (interactiveScript = script);
+            }
+        });
+        return interactiveScript;
+    }
+
+    //Look for a data-main script attribute, which could also adjust the baseUrl.
+    if (isBrowser && !cfg.skipDataMain) {
+        //Figure out baseUrl. Get it from the script tag with require.js in it.
+        eachReverse(scripts(), function (script) {
+            //Set the 'head' where we can append children by
+            //using the script's parent.
+            if (!head) {
+                head = script.parentNode;
+            }
+
+            //Look for a data-main attribute to set main script for the page
+            //to load. If it is there, the path to data main becomes the
+            //baseUrl, if it is not already set.
+            dataMain = script.getAttribute('data-main');
+            if (dataMain) {
+                //Preserve dataMain in case it is a path (i.e. contains '?')
+                mainScript = dataMain;
+
+                //Set final baseUrl if there is not already an explicit one,
+                //but only do so if the data-main value is not a loader plugin
+                //module ID.
+                if (!cfg.baseUrl && mainScript.indexOf('!') === -1) {
+                    //Pull off the directory of data-main for use as the
+                    //baseUrl.
+                    src = mainScript.split('/');
+                    mainScript = src.pop();
+                    subPath = src.length ? src.join('/')  + '/' : './';
+
+                    cfg.baseUrl = subPath;
+                }
+
+                //Strip off any trailing .js since mainScript is now
+                //like a module name.
+                mainScript = mainScript.replace(jsSuffixRegExp, '');
+
+                //If mainScript is still a path, fall back to dataMain
+                if (req.jsExtRegExp.test(mainScript)) {
+                    mainScript = dataMain;
+                }
+
+                //Put the data-main script in the files to load.
+                cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript];
+
+                return true;
+            }
+        });
+    }
+
+    /**
+     * The function that handles definitions of modules. Differs from
+     * require() in that a string for the module should be the first argument,
+     * and the function to execute after dependencies are loaded should
+     * return a value to define the module corresponding to the first argument's
+     * name.
+     */
+    define = function (name, deps, callback) {
+        var node, context;
+
+        //Allow for anonymous modules
+        if (typeof name !== 'string') {
+            //Adjust args appropriately
+            callback = deps;
+            deps = name;
+            name = null;
+        }
+
+        //This module may not have dependencies
+        if (!isArray(deps)) {
+            callback = deps;
+            deps = null;
+        }
+
+        //If no name, and callback is a function, then figure out if it a
+        //CommonJS thing with dependencies.
+        if (!deps && isFunction(callback)) {
+            deps = [];
+            //Remove comments from the callback string,
+            //look for require calls, and pull them into the dependencies,
+            //but only if there are function args.
+            if (callback.length) {
+                callback
+                    .toString()
+                    .replace(commentRegExp, commentReplace)
+                    .replace(cjsRequireRegExp, function (match, dep) {
+                        deps.push(dep);
+                    });
+
+                //May be a CommonJS thing even without require calls, but still
+                //could use exports, and module. Avoid doing exports and module
+                //work though if it just needs require.
+                //REQUIRES the function to expect the CommonJS variables in the
+                //order listed below.
+                deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps);
+            }
+        }
+
+        //If in IE 6-8 and hit an anonymous define() call, do the interactive
+        //work.
+        if (useInteractive) {
+            node = currentlyAddingScript || getInteractiveScript();
+            if (node) {
+                if (!name) {
+                    name = node.getAttribute('data-requiremodule');
+                }
+                context = contexts[node.getAttribute('data-requirecontext')];
+            }
+        }
+
+        //Always save off evaluating the def call until the script onload handler.
+        //This allows multiple modules to be in a file without prematurely
+        //tracing dependencies, and allows for anonymous module support,
+        //where the module name is not known until the script onload event
+        //occurs. If no context, use the global queue, and get it processed
+        //in the onscript load callback.
+        if (context) {
+            context.defQueue.push([name, deps, callback]);
+            context.defQueueMap[name] = true;
+        } else {
+            globalDefQueue.push([name, deps, callback]);
+        }
+    };
+
+    define.amd = {
+        jQuery: true
+    };
+
+    /**
+     * Executes the text. Normally just uses eval, but can be modified
+     * to use a better, environment-specific call. Only used for transpiling
+     * loader plugins, not for plain JS modules.
+     * @param {String} text the text to execute/evaluate.
+     */
+    req.exec = function (text) {
+        /*jslint evil: true */
+        return eval(text);
+    };
+
+    //Set up with config info.
+    req(cfg);
+}(this));
diff --git a/client/galaxy/scripts/libs/toastr.js b/client/galaxy/scripts/libs/toastr.js
new file mode 100644
index 0000000..ff2046b
--- /dev/null
+++ b/client/galaxy/scripts/libs/toastr.js
@@ -0,0 +1,307 @@
+/*
+ * Toastr
+ * Version 2.0.1
+ * Copyright 2012 John Papa and Hans Fjällemark.  
+ * All Rights Reserved.  
+ * Use, reproduction, distribution, and modification of this code is subject to the terms and 
+ * conditions of the MIT license, available at http://www.opensource.org/licenses/mit-license.php
+ *
+ * Author: John Papa and Hans Fjällemark
+ * Project: https://github.com/CodeSeven/toastr
+ */
+; (function (define) {
+	define([], function () {
+		var $ = jQuery;
+		return (function () {
+			var version = '2.0.1';
+			var $container;
+			var listener;
+			var toastId = 0;
+			var toastType = {
+				error: 'error',
+				info: 'info',
+				success: 'success',
+				warning: 'warning'
+			};
+
+			var toastr = {
+				clear: clear,
+				error: error,
+				getContainer: getContainer,
+				info: info,
+				options: {},
+				subscribe: subscribe,
+				success: success,
+				version: version,
+				warning: warning
+			};
+
+			return toastr;
+
+			//#region Accessible Methods
+			function error(message, title, optionsOverride) {
+				return notify({
+					type: toastType.error,
+					iconClass: getOptions().iconClasses.error,
+					message: message,
+					optionsOverride: optionsOverride,
+					title: title
+				});
+			}
+
+			function info(message, title, optionsOverride) {
+				return notify({
+					type: toastType.info,
+					iconClass: getOptions().iconClasses.info,
+					message: message,
+					optionsOverride: optionsOverride,
+					title: title
+				});
+			}
+
+			function subscribe(callback) {
+				listener = callback;
+			}
+
+			function success(message, title, optionsOverride) {
+				return notify({
+					type: toastType.success,
+					iconClass: getOptions().iconClasses.success,
+					message: message,
+					optionsOverride: optionsOverride,
+					title: title
+				});
+			}
+
+			function warning(message, title, optionsOverride) {
+				return notify({
+					type: toastType.warning,
+					iconClass: getOptions().iconClasses.warning,
+					message: message,
+					optionsOverride: optionsOverride,
+					title: title
+				});
+			}
+
+			function clear($toastElement) {
+				var options = getOptions();
+				if (!$container) { getContainer(options); }
+				if ($toastElement && $(':focus', $toastElement).length === 0) {
+					$toastElement[options.hideMethod]({
+						duration: options.hideDuration,
+						easing: options.hideEasing,
+						complete: function () { removeToast($toastElement); }
+					});
+					return;
+				}
+				if ($container.children().length) {
+					$container[options.hideMethod]({
+						duration: options.hideDuration,
+						easing: options.hideEasing,
+						complete: function () { $container.remove(); }
+					});
+				}
+			}
+			//#endregion
+
+			//#region Internal Methods
+
+			function getDefaults() {
+				return {
+					tapToDismiss: true,
+					toastClass: 'toast',
+					containerId: 'toast-container',
+					debug: false,
+
+					showMethod: 'fadeIn', //fadeIn, slideDown, and show are built into jQuery
+					showDuration: 300,
+					showEasing: 'swing', //swing and linear are built into jQuery
+					onShown: undefined,
+					hideMethod: 'fadeOut',
+					hideDuration: 1000,
+					hideEasing: 'swing',
+					onHidden: undefined,
+
+					extendedTimeOut: 1000,
+					iconClasses: {
+						error: 'toast-error',
+						info: 'toast-info',
+						success: 'toast-success',
+						warning: 'toast-warning'
+					},
+					iconClass: 'toast-info',
+					positionClass: 'toast-top-right',
+					timeOut: 5000, // Set timeOut and extendedTimeout to 0 to make it sticky
+					titleClass: 'toast-title',
+					messageClass: 'toast-message',
+					target: 'body',
+					closeHtml: '<button>×</button>',
+					newestOnTop: true
+				};
+			}
+
+			function publish(args) {
+				if (!listener) {
+					return;
+				}
+				listener(args);
+			}
+
+			function notify(map) {
+				var
+					options = getOptions(),
+					iconClass = map.iconClass || options.iconClass;
+
+				if (typeof (map.optionsOverride) !== 'undefined') {
+					options = $.extend(options, map.optionsOverride);
+					iconClass = map.optionsOverride.iconClass || iconClass;
+				}
+
+				toastId++;
+
+				$container = getContainer(options);
+				var
+					intervalId = null,
+					$toastElement = $('<div/>'),
+					$titleElement = $('<div/>'),
+					$messageElement = $('<div/>'),
+					$closeElement = $(options.closeHtml),
+					response = {
+						toastId: toastId,
+						state: 'visible',
+						startTime: new Date(),
+						options: options,
+						map: map
+					};
+
+				if (map.iconClass) {
+					$toastElement.addClass(options.toastClass).addClass(iconClass);
+				}
+
+				if (map.title) {
+					$titleElement.append(map.title).addClass(options.titleClass);
+					$toastElement.append($titleElement);
+				}
+
+				if (map.message) {
+					$messageElement.append(map.message).addClass(options.messageClass);
+					$toastElement.append($messageElement);
+				}
+
+				if (options.closeButton) {
+					$closeElement.addClass('toast-close-button');
+					$toastElement.prepend($closeElement);
+				}
+
+				$toastElement.hide();
+				if (options.newestOnTop) {
+					$container.prepend($toastElement);
+				} else {
+					$container.append($toastElement);
+				}
+
+
+				$toastElement[options.showMethod](
+					{ duration: options.showDuration, easing: options.showEasing, complete: options.onShown }
+				);
+				if (options.timeOut > 0) {
+					intervalId = setTimeout(hideToast, options.timeOut);
+				}
+
+				$toastElement.hover(stickAround, delayedhideToast);
+				if (!options.onclick && options.tapToDismiss) {
+					$toastElement.click(hideToast);
+				}
+				if (options.closeButton && $closeElement) {
+					$closeElement.click(function (event) {
+						event.stopPropagation();
+						hideToast(true);
+					});
+				}
+
+				if (options.onclick) {
+					$toastElement.click(function () {
+						options.onclick();
+						hideToast();
+					});
+				}
+
+				publish(response);
+
+				if (options.debug && console) {
+					console.log(response);
+				}
+
+				return $toastElement;
+
+				function hideToast(override) {
+					if ($(':focus', $toastElement).length && !override) {
+						return;
+					}
+					return $toastElement[options.hideMethod]({
+						duration: options.hideDuration,
+						easing: options.hideEasing,
+						complete: function () {
+							removeToast($toastElement);
+							if (options.onHidden) {
+								options.onHidden();
+							}
+							response.state = 'hidden';
+							response.endTime = new Date(),
+							publish(response);
+						}
+					});
+				}
+
+				function delayedhideToast() {
+					if (options.timeOut > 0 || options.extendedTimeOut > 0) {
+						intervalId = setTimeout(hideToast, options.extendedTimeOut);
+					}
+				}
+
+				function stickAround() {
+					clearTimeout(intervalId);
+					$toastElement.stop(true, true)[options.showMethod](
+						{ duration: options.showDuration, easing: options.showEasing }
+					);
+				}
+			}
+			function getContainer(options) {
+				if (!options) { options = getOptions(); }
+				$container = $('#' + options.containerId);
+				if ($container.length) {
+					return $container;
+				}
+				$container = $('<div/>')
+					.attr('id', options.containerId)
+					.addClass(options.positionClass);
+				$container.appendTo($(options.target));
+				return $container;
+			}
+
+			function getOptions() {
+				return $.extend({}, getDefaults(), toastr.options);
+			}
+
+			function removeToast($toastElement) {
+				if (!$container) { $container = getContainer(); }
+				if ($toastElement.is(':visible')) {
+					return;
+				}
+				$toastElement.remove();
+				$toastElement = null;
+				if ($container.children().length === 0) {
+					$container.remove();
+				}
+			}
+			//#endregion
+
+		})();
+	});
+}(typeof define === 'function' && define.amd ? define : function (deps, factory) {
+	if (typeof module !== 'undefined' && module.exports) { //Node
+		module.exports = factory(require(deps[0]));
+	} else {
+		window['toastr'] = factory(window['jQuery']);
+	}
+}));
diff --git a/client/galaxy/scripts/libs/underscore.js b/client/galaxy/scripts/libs/underscore.js
new file mode 100644
index 0000000..b29332f
--- /dev/null
+++ b/client/galaxy/scripts/libs/underscore.js
@@ -0,0 +1,1548 @@
+//     Underscore.js 1.8.3
+//     http://underscorejs.org
+//     (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+//     Underscore may be freely distributed under the MIT license.
+
+(function() {
+
+  // Baseline setup
+  // --------------
+
+  // Establish the root object, `window` in the browser, or `exports` on the server.
+  var root = this;
+
+  // Save the previous value of the `_` variable.
+  var previousUnderscore = root._;
+
+  // Save bytes in the minified (but not gzipped) version:
+  var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype;
+
+  // Create quick reference variables for speed access to core prototypes.
+  var
+    push             = ArrayProto.push,
+    slice            = ArrayProto.slice,
+    toString         = ObjProto.toString,
+    hasOwnProperty   = ObjProto.hasOwnProperty;
+
+  // All **ECMAScript 5** native function implementations that we hope to use
+  // are declared here.
+  var
+    nativeIsArray      = Array.isArray,
+    nativeKeys         = Object.keys,
+    nativeBind         = FuncProto.bind,
+    nativeCreate       = Object.create;
+
+  // Naked function reference for surrogate-prototype-swapping.
+  var Ctor = function(){};
+
+  // Create a safe reference to the Underscore object for use below.
+  var _ = function(obj) {
+    if (obj instanceof _) return obj;
+    if (!(this instanceof _)) return new _(obj);
+    this._wrapped = obj;
+  };
+
+  // Export the Underscore object for **Node.js**, with
+  // backwards-compatibility for the old `require()` API. If we're in
+  // the browser, add `_` as a global object.
+  if (typeof exports !== 'undefined') {
+    if (typeof module !== 'undefined' && module.exports) {
+      exports = module.exports = _;
+    }
+    exports._ = _;
+  } else {
+    root._ = _;
+  }
+
+  // Current version.
+  _.VERSION = '1.8.3';
+
+  // Internal function that returns an efficient (for current engines) version
+  // of the passed-in callback, to be repeatedly applied in other Underscore
+  // functions.
+  var optimizeCb = function(func, context, argCount) {
+    if (context === void 0) return func;
+    switch (argCount == null ? 3 : argCount) {
+      case 1: return function(value) {
+        return func.call(context, value);
+      };
+      case 2: return function(value, other) {
+        return func.call(context, value, other);
+      };
+      case 3: return function(value, index, collection) {
+        return func.call(context, value, index, collection);
+      };
+      case 4: return function(accumulator, value, index, collection) {
+        return func.call(context, accumulator, value, index, collection);
+      };
+    }
+    return function() {
+      return func.apply(context, arguments);
+    };
+  };
+
+  // A mostly-internal function to generate callbacks that can be applied
+  // to each element in a collection, returning the desired result — either
+  // identity, an arbitrary callback, a property matcher, or a property accessor.
+  var cb = function(value, context, argCount) {
+    if (value == null) return _.identity;
+    if (_.isFunction(value)) return optimizeCb(value, context, argCount);
+    if (_.isObject(value)) return _.matcher(value);
+    return _.property(value);
+  };
+  _.iteratee = function(value, context) {
+    return cb(value, context, Infinity);
+  };
+
+  // An internal function for creating assigner functions.
+  var createAssigner = function(keysFunc, undefinedOnly) {
+    return function(obj) {
+      var length = arguments.length;
+      if (length < 2 || obj == null) return obj;
+      for (var index = 1; index < length; index++) {
+        var source = arguments[index],
+            keys = keysFunc(source),
+            l = keys.length;
+        for (var i = 0; i < l; i++) {
+          var key = keys[i];
+          if (!undefinedOnly || obj[key] === void 0) obj[key] = source[key];
+        }
+      }
+      return obj;
+    };
+  };
+
+  // An internal function for creating a new object that inherits from another.
+  var baseCreate = function(prototype) {
+    if (!_.isObject(prototype)) return {};
+    if (nativeCreate) return nativeCreate(prototype);
+    Ctor.prototype = prototype;
+    var result = new Ctor;
+    Ctor.prototype = null;
+    return result;
+  };
+
+  var property = function(key) {
+    return function(obj) {
+      return obj == null ? void 0 : obj[key];
+    };
+  };
+
+  // Helper for collection methods to determine whether a collection
+  // should be iterated as an array or as an object
+  // Related: http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength
+  // Avoids a very nasty iOS 8 JIT bug on ARM-64. #2094
+  var MAX_ARRAY_INDEX = Math.pow(2, 53) - 1;
+  var getLength = property('length');
+  var isArrayLike = function(collection) {
+    var length = getLength(collection);
+    return typeof length == 'number' && length >= 0 && length <= MAX_ARRAY_INDEX;
+  };
+
+  // Collection Functions
+  // --------------------
+
+  // The cornerstone, an `each` implementation, aka `forEach`.
+  // Handles raw objects in addition to array-likes. Treats all
+  // sparse array-likes as if they were dense.
+  _.each = _.forEach = function(obj, iteratee, context) {
+    iteratee = optimizeCb(iteratee, context);
+    var i, length;
+    if (isArrayLike(obj)) {
+      for (i = 0, length = obj.length; i < length; i++) {
+        iteratee(obj[i], i, obj);
+      }
+    } else {
+      var keys = _.keys(obj);
+      for (i = 0, length = keys.length; i < length; i++) {
+        iteratee(obj[keys[i]], keys[i], obj);
+      }
+    }
+    return obj;
+  };
+
+  // Return the results of applying the iteratee to each element.
+  _.map = _.collect = function(obj, iteratee, context) {
+    iteratee = cb(iteratee, context);
+    var keys = !isArrayLike(obj) && _.keys(obj),
+        length = (keys || obj).length,
+        results = Array(length);
+    for (var index = 0; index < length; index++) {
+      var currentKey = keys ? keys[index] : index;
+      results[index] = iteratee(obj[currentKey], currentKey, obj);
+    }
+    return results;
+  };
+
+  // Create a reducing function iterating left or right.
+  function createReduce(dir) {
+    // Optimized iterator function as using arguments.length
+    // in the main function will deoptimize the, see #1991.
+    function iterator(obj, iteratee, memo, keys, index, length) {
+      for (; index >= 0 && index < length; index += dir) {
+        var currentKey = keys ? keys[index] : index;
+        memo = iteratee(memo, obj[currentKey], currentKey, obj);
+      }
+      return memo;
+    }
+
+    return function(obj, iteratee, memo, context) {
+      iteratee = optimizeCb(iteratee, context, 4);
+      var keys = !isArrayLike(obj) && _.keys(obj),
+          length = (keys || obj).length,
+          index = dir > 0 ? 0 : length - 1;
+      // Determine the initial value if none is provided.
+      if (arguments.length < 3) {
+        memo = obj[keys ? keys[index] : index];
+        index += dir;
+      }
+      return iterator(obj, iteratee, memo, keys, index, length);
+    };
+  }
+
+  // **Reduce** builds up a single result from a list of values, aka `inject`,
+  // or `foldl`.
+  _.reduce = _.foldl = _.inject = createReduce(1);
+
+  // The right-associative version of reduce, also known as `foldr`.
+  _.reduceRight = _.foldr = createReduce(-1);
+
+  // Return the first value which passes a truth test. Aliased as `detect`.
+  _.find = _.detect = function(obj, predicate, context) {
+    var key;
+    if (isArrayLike(obj)) {
+      key = _.findIndex(obj, predicate, context);
+    } else {
+      key = _.findKey(obj, predicate, context);
+    }
+    if (key !== void 0 && key !== -1) return obj[key];
+  };
+
+  // Return all the elements that pass a truth test.
+  // Aliased as `select`.
+  _.filter = _.select = function(obj, predicate, context) {
+    var results = [];
+    predicate = cb(predicate, context);
+    _.each(obj, function(value, index, list) {
+      if (predicate(value, index, list)) results.push(value);
+    });
+    return results;
+  };
+
+  // Return all the elements for which a truth test fails.
+  _.reject = function(obj, predicate, context) {
+    return _.filter(obj, _.negate(cb(predicate)), context);
+  };
+
+  // Determine whether all of the elements match a truth test.
+  // Aliased as `all`.
+  _.every = _.all = function(obj, predicate, context) {
+    predicate = cb(predicate, context);
+    var keys = !isArrayLike(obj) && _.keys(obj),
+        length = (keys || obj).length;
+    for (var index = 0; index < length; index++) {
+      var currentKey = keys ? keys[index] : index;
+      if (!predicate(obj[currentKey], currentKey, obj)) return false;
+    }
+    return true;
+  };
+
+  // Determine if at least one element in the object matches a truth test.
+  // Aliased as `any`.
+  _.some = _.any = function(obj, predicate, context) {
+    predicate = cb(predicate, context);
+    var keys = !isArrayLike(obj) && _.keys(obj),
+        length = (keys || obj).length;
+    for (var index = 0; index < length; index++) {
+      var currentKey = keys ? keys[index] : index;
+      if (predicate(obj[currentKey], currentKey, obj)) return true;
+    }
+    return false;
+  };
+
+  // Determine if the array or object contains a given item (using `===`).
+  // Aliased as `includes` and `include`.
+  _.contains = _.includes = _.include = function(obj, item, fromIndex, guard) {
+    if (!isArrayLike(obj)) obj = _.values(obj);
+    if (typeof fromIndex != 'number' || guard) fromIndex = 0;
+    return _.indexOf(obj, item, fromIndex) >= 0;
+  };
+
+  // Invoke a method (with arguments) on every item in a collection.
+  _.invoke = function(obj, method) {
+    var args = slice.call(arguments, 2);
+    var isFunc = _.isFunction(method);
+    return _.map(obj, function(value) {
+      var func = isFunc ? method : value[method];
+      return func == null ? func : func.apply(value, args);
+    });
+  };
+
+  // Convenience version of a common use case of `map`: fetching a property.
+  _.pluck = function(obj, key) {
+    return _.map(obj, _.property(key));
+  };
+
+  // Convenience version of a common use case of `filter`: selecting only objects
+  // containing specific `key:value` pairs.
+  _.where = function(obj, attrs) {
+    return _.filter(obj, _.matcher(attrs));
+  };
+
+  // Convenience version of a common use case of `find`: getting the first object
+  // containing specific `key:value` pairs.
+  _.findWhere = function(obj, attrs) {
+    return _.find(obj, _.matcher(attrs));
+  };
+
+  // Return the maximum element (or element-based computation).
+  _.max = function(obj, iteratee, context) {
+    var result = -Infinity, lastComputed = -Infinity,
+        value, computed;
+    if (iteratee == null && obj != null) {
+      obj = isArrayLike(obj) ? obj : _.values(obj);
+      for (var i = 0, length = obj.length; i < length; i++) {
+        value = obj[i];
+        if (value > result) {
+          result = value;
+        }
+      }
+    } else {
+      iteratee = cb(iteratee, context);
+      _.each(obj, function(value, index, list) {
+        computed = iteratee(value, index, list);
+        if (computed > lastComputed || computed === -Infinity && result === -Infinity) {
+          result = value;
+          lastComputed = computed;
+        }
+      });
+    }
+    return result;
+  };
+
+  // Return the minimum element (or element-based computation).
+  _.min = function(obj, iteratee, context) {
+    var result = Infinity, lastComputed = Infinity,
+        value, computed;
+    if (iteratee == null && obj != null) {
+      obj = isArrayLike(obj) ? obj : _.values(obj);
+      for (var i = 0, length = obj.length; i < length; i++) {
+        value = obj[i];
+        if (value < result) {
+          result = value;
+        }
+      }
+    } else {
+      iteratee = cb(iteratee, context);
+      _.each(obj, function(value, index, list) {
+        computed = iteratee(value, index, list);
+        if (computed < lastComputed || computed === Infinity && result === Infinity) {
+          result = value;
+          lastComputed = computed;
+        }
+      });
+    }
+    return result;
+  };
+
+  // Shuffle a collection, using the modern version of the
+  // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle).
+  _.shuffle = function(obj) {
+    var set = isArrayLike(obj) ? obj : _.values(obj);
+    var length = set.length;
+    var shuffled = Array(length);
+    for (var index = 0, rand; index < length; index++) {
+      rand = _.random(0, index);
+      if (rand !== index) shuffled[index] = shuffled[rand];
+      shuffled[rand] = set[index];
+    }
+    return shuffled;
+  };
+
+  // Sample **n** random values from a collection.
+  // If **n** is not specified, returns a single random element.
+  // The internal `guard` argument allows it to work with `map`.
+  _.sample = function(obj, n, guard) {
+    if (n == null || guard) {
+      if (!isArrayLike(obj)) obj = _.values(obj);
+      return obj[_.random(obj.length - 1)];
+    }
+    return _.shuffle(obj).slice(0, Math.max(0, n));
+  };
+
+  // Sort the object's values by a criterion produced by an iteratee.
+  _.sortBy = function(obj, iteratee, context) {
+    iteratee = cb(iteratee, context);
+    return _.pluck(_.map(obj, function(value, index, list) {
+      return {
+        value: value,
+        index: index,
+        criteria: iteratee(value, index, list)
+      };
+    }).sort(function(left, right) {
+      var a = left.criteria;
+      var b = right.criteria;
+      if (a !== b) {
+        if (a > b || a === void 0) return 1;
+        if (a < b || b === void 0) return -1;
+      }
+      return left.index - right.index;
+    }), 'value');
+  };
+
+  // An internal function used for aggregate "group by" operations.
+  var group = function(behavior) {
+    return function(obj, iteratee, context) {
+      var result = {};
+      iteratee = cb(iteratee, context);
+      _.each(obj, function(value, index) {
+        var key = iteratee(value, index, obj);
+        behavior(result, value, key);
+      });
+      return result;
+    };
+  };
+
+  // Groups the object's values by a criterion. Pass either a string attribute
+  // to group by, or a function that returns the criterion.
+  _.groupBy = group(function(result, value, key) {
+    if (_.has(result, key)) result[key].push(value); else result[key] = [value];
+  });
+
+  // Indexes the object's values by a criterion, similar to `groupBy`, but for
+  // when you know that your index values will be unique.
+  _.indexBy = group(function(result, value, key) {
+    result[key] = value;
+  });
+
+  // Counts instances of an object that group by a certain criterion. Pass
+  // either a string attribute to count by, or a function that returns the
+  // criterion.
+  _.countBy = group(function(result, value, key) {
+    if (_.has(result, key)) result[key]++; else result[key] = 1;
+  });
+
+  // Safely create a real, live array from anything iterable.
+  _.toArray = function(obj) {
+    if (!obj) return [];
+    if (_.isArray(obj)) return slice.call(obj);
+    if (isArrayLike(obj)) return _.map(obj, _.identity);
+    return _.values(obj);
+  };
+
+  // Return the number of elements in an object.
+  _.size = function(obj) {
+    if (obj == null) return 0;
+    return isArrayLike(obj) ? obj.length : _.keys(obj).length;
+  };
+
+  // Split a collection into two arrays: one whose elements all satisfy the given
+  // predicate, and one whose elements all do not satisfy the predicate.
+  _.partition = function(obj, predicate, context) {
+    predicate = cb(predicate, context);
+    var pass = [], fail = [];
+    _.each(obj, function(value, key, obj) {
+      (predicate(value, key, obj) ? pass : fail).push(value);
+    });
+    return [pass, fail];
+  };
+
+  // Array Functions
+  // ---------------
+
+  // Get the first element of an array. Passing **n** will return the first N
+  // values in the array. Aliased as `head` and `take`. The **guard** check
+  // allows it to work with `_.map`.
+  _.first = _.head = _.take = function(array, n, guard) {
+    if (array == null) return void 0;
+    if (n == null || guard) return array[0];
+    return _.initial(array, array.length - n);
+  };
+
+  // Returns everything but the last entry of the array. Especially useful on
+  // the arguments object. Passing **n** will return all the values in
+  // the array, excluding the last N.
+  _.initial = function(array, n, guard) {
+    return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n)));
+  };
+
+  // Get the last element of an array. Passing **n** will return the last N
+  // values in the array.
+  _.last = function(array, n, guard) {
+    if (array == null) return void 0;
+    if (n == null || guard) return array[array.length - 1];
+    return _.rest(array, Math.max(0, array.length - n));
+  };
+
+  // Returns everything but the first entry of the array. Aliased as `tail` and `drop`.
+  // Especially useful on the arguments object. Passing an **n** will return
+  // the rest N values in the array.
+  _.rest = _.tail = _.drop = function(array, n, guard) {
+    return slice.call(array, n == null || guard ? 1 : n);
+  };
+
+  // Trim out all falsy values from an array.
+  _.compact = function(array) {
+    return _.filter(array, _.identity);
+  };
+
+  // Internal implementation of a recursive `flatten` function.
+  var flatten = function(input, shallow, strict, startIndex) {
+    var output = [], idx = 0;
+    for (var i = startIndex || 0, length = getLength(input); i < length; i++) {
+      var value = input[i];
+      if (isArrayLike(value) && (_.isArray(value) || _.isArguments(value))) {
+        //flatten current level of array or arguments object
+        if (!shallow) value = flatten(value, shallow, strict);
+        var j = 0, len = value.length;
+        output.length += len;
+        while (j < len) {
+          output[idx++] = value[j++];
+        }
+      } else if (!strict) {
+        output[idx++] = value;
+      }
+    }
+    return output;
+  };
+
+  // Flatten out an array, either recursively (by default), or just one level.
+  _.flatten = function(array, shallow) {
+    return flatten(array, shallow, false);
+  };
+
+  // Return a version of the array that does not contain the specified value(s).
+  _.without = function(array) {
+    return _.difference(array, slice.call(arguments, 1));
+  };
+
+  // Produce a duplicate-free version of the array. If the array has already
+  // been sorted, you have the option of using a faster algorithm.
+  // Aliased as `unique`.
+  _.uniq = _.unique = function(array, isSorted, iteratee, context) {
+    if (!_.isBoolean(isSorted)) {
+      context = iteratee;
+      iteratee = isSorted;
+      isSorted = false;
+    }
+    if (iteratee != null) iteratee = cb(iteratee, context);
+    var result = [];
+    var seen = [];
+    for (var i = 0, length = getLength(array); i < length; i++) {
+      var value = array[i],
+          computed = iteratee ? iteratee(value, i, array) : value;
+      if (isSorted) {
+        if (!i || seen !== computed) result.push(value);
+        seen = computed;
+      } else if (iteratee) {
+        if (!_.contains(seen, computed)) {
+          seen.push(computed);
+          result.push(value);
+        }
+      } else if (!_.contains(result, value)) {
+        result.push(value);
+      }
+    }
+    return result;
+  };
+
+  // Produce an array that contains the union: each distinct element from all of
+  // the passed-in arrays.
+  _.union = function() {
+    return _.uniq(flatten(arguments, true, true));
+  };
+
+  // Produce an array that contains every item shared between all the
+  // passed-in arrays.
+  _.intersection = function(array) {
+    var result = [];
+    var argsLength = arguments.length;
+    for (var i = 0, length = getLength(array); i < length; i++) {
+      var item = array[i];
+      if (_.contains(result, item)) continue;
+      for (var j = 1; j < argsLength; j++) {
+        if (!_.contains(arguments[j], item)) break;
+      }
+      if (j === argsLength) result.push(item);
+    }
+    return result;
+  };
+
+  // Take the difference between one array and a number of other arrays.
+  // Only the elements present in just the first array will remain.
+  _.difference = function(array) {
+    var rest = flatten(arguments, true, true, 1);
+    return _.filter(array, function(value){
+      return !_.contains(rest, value);
+    });
+  };
+
+  // Zip together multiple lists into a single array -- elements that share
+  // an index go together.
+  _.zip = function() {
+    return _.unzip(arguments);
+  };
+
+  // Complement of _.zip. Unzip accepts an array of arrays and groups
+  // each array's elements on shared indices
+  _.unzip = function(array) {
+    var length = array && _.max(array, getLength).length || 0;
+    var result = Array(length);
+
+    for (var index = 0; index < length; index++) {
+      result[index] = _.pluck(array, index);
+    }
+    return result;
+  };
+
+  // Converts lists into objects. Pass either a single array of `[key, value]`
+  // pairs, or two parallel arrays of the same length -- one of keys, and one of
+  // the corresponding values.
+  _.object = function(list, values) {
+    var result = {};
+    for (var i = 0, length = getLength(list); i < length; i++) {
+      if (values) {
+        result[list[i]] = values[i];
+      } else {
+        result[list[i][0]] = list[i][1];
+      }
+    }
+    return result;
+  };
+
+  // Generator function to create the findIndex and findLastIndex functions
+  function createPredicateIndexFinder(dir) {
+    return function(array, predicate, context) {
+      predicate = cb(predicate, context);
+      var length = getLength(array);
+      var index = dir > 0 ? 0 : length - 1;
+      for (; index >= 0 && index < length; index += dir) {
+        if (predicate(array[index], index, array)) return index;
+      }
+      return -1;
+    };
+  }
+
+  // Returns the first index on an array-like that passes a predicate test
+  _.findIndex = createPredicateIndexFinder(1);
+  _.findLastIndex = createPredicateIndexFinder(-1);
+
+  // Use a comparator function to figure out the smallest index at which
+  // an object should be inserted so as to maintain order. Uses binary search.
+  _.sortedIndex = function(array, obj, iteratee, context) {
+    iteratee = cb(iteratee, context, 1);
+    var value = iteratee(obj);
+    var low = 0, high = getLength(array);
+    while (low < high) {
+      var mid = Math.floor((low + high) / 2);
+      if (iteratee(array[mid]) < value) low = mid + 1; else high = mid;
+    }
+    return low;
+  };
+
+  // Generator function to create the indexOf and lastIndexOf functions
+  function createIndexFinder(dir, predicateFind, sortedIndex) {
+    return function(array, item, idx) {
+      var i = 0, length = getLength(array);
+      if (typeof idx == 'number') {
+        if (dir > 0) {
+            i = idx >= 0 ? idx : Math.max(idx + length, i);
+        } else {
+            length = idx >= 0 ? Math.min(idx + 1, length) : idx + length + 1;
+        }
+      } else if (sortedIndex && idx && length) {
+        idx = sortedIndex(array, item);
+        return array[idx] === item ? idx : -1;
+      }
+      if (item !== item) {
+        idx = predicateFind(slice.call(array, i, length), _.isNaN);
+        return idx >= 0 ? idx + i : -1;
+      }
+      for (idx = dir > 0 ? i : length - 1; idx >= 0 && idx < length; idx += dir) {
+        if (array[idx] === item) return idx;
+      }
+      return -1;
+    };
+  }
+
+  // Return the position of the first occurrence of an item in an array,
+  // or -1 if the item is not included in the array.
+  // If the array is large and already in sort order, pass `true`
+  // for **isSorted** to use binary search.
+  _.indexOf = createIndexFinder(1, _.findIndex, _.sortedIndex);
+  _.lastIndexOf = createIndexFinder(-1, _.findLastIndex);
+
+  // Generate an integer Array containing an arithmetic progression. A port of
+  // the native Python `range()` function. See
+  // [the Python documentation](http://docs.python.org/library/functions.html#range).
+  _.range = function(start, stop, step) {
+    if (stop == null) {
+      stop = start || 0;
+      start = 0;
+    }
+    step = step || 1;
+
+    var length = Math.max(Math.ceil((stop - start) / step), 0);
+    var range = Array(length);
+
+    for (var idx = 0; idx < length; idx++, start += step) {
+      range[idx] = start;
+    }
+
+    return range;
+  };
+
+  // Function (ahem) Functions
+  // ------------------
+
+  // Determines whether to execute a function as a constructor
+  // or a normal function with the provided arguments
+  var executeBound = function(sourceFunc, boundFunc, context, callingContext, args) {
+    if (!(callingContext instanceof boundFunc)) return sourceFunc.apply(context, args);
+    var self = baseCreate(sourceFunc.prototype);
+    var result = sourceFunc.apply(self, args);
+    if (_.isObject(result)) return result;
+    return self;
+  };
+
+  // Create a function bound to a given object (assigning `this`, and arguments,
+  // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if
+  // available.
+  _.bind = function(func, context) {
+    if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1));
+    if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function');
+    var args = slice.call(arguments, 2);
+    var bound = function() {
+      return executeBound(func, bound, context, this, args.concat(slice.call(arguments)));
+    };
+    return bound;
+  };
+
+  // Partially apply a function by creating a version that has had some of its
+  // arguments pre-filled, without changing its dynamic `this` context. _ acts
+  // as a placeholder, allowing any combination of arguments to be pre-filled.
+  _.partial = function(func) {
+    var boundArgs = slice.call(arguments, 1);
+    var bound = function() {
+      var position = 0, length = boundArgs.length;
+      var args = Array(length);
+      for (var i = 0; i < length; i++) {
+        args[i] = boundArgs[i] === _ ? arguments[position++] : boundArgs[i];
+      }
+      while (position < arguments.length) args.push(arguments[position++]);
+      return executeBound(func, bound, this, this, args);
+    };
+    return bound;
+  };
+
+  // Bind a number of an object's methods to that object. Remaining arguments
+  // are the method names to be bound. Useful for ensuring that all callbacks
+  // defined on an object belong to it.
+  _.bindAll = function(obj) {
+    var i, length = arguments.length, key;
+    if (length <= 1) throw new Error('bindAll must be passed function names');
+    for (i = 1; i < length; i++) {
+      key = arguments[i];
+      obj[key] = _.bind(obj[key], obj);
+    }
+    return obj;
+  };
+
+  // Memoize an expensive function by storing its results.
+  _.memoize = function(func, hasher) {
+    var memoize = function(key) {
+      var cache = memoize.cache;
+      var address = '' + (hasher ? hasher.apply(this, arguments) : key);
+      if (!_.has(cache, address)) cache[address] = func.apply(this, arguments);
+      return cache[address];
+    };
+    memoize.cache = {};
+    return memoize;
+  };
+
+  // Delays a function for the given number of milliseconds, and then calls
+  // it with the arguments supplied.
+  _.delay = function(func, wait) {
+    var args = slice.call(arguments, 2);
+    return setTimeout(function(){
+      return func.apply(null, args);
+    }, wait);
+  };
+
+  // Defers a function, scheduling it to run after the current call stack has
+  // cleared.
+  _.defer = _.partial(_.delay, _, 1);
+
+  // Returns a function, that, when invoked, will only be triggered at most once
+  // during a given window of time. Normally, the throttled function will run
+  // as much as it can, without ever going more than once per `wait` duration;
+  // but if you'd like to disable the execution on the leading edge, pass
+  // `{leading: false}`. To disable execution on the trailing edge, ditto.
+  _.throttle = function(func, wait, options) {
+    var context, args, result;
+    var timeout = null;
+    var previous = 0;
+    if (!options) options = {};
+    var later = function() {
+      previous = options.leading === false ? 0 : _.now();
+      timeout = null;
+      result = func.apply(context, args);
+      if (!timeout) context = args = null;
+    };
+    return function() {
+      var now = _.now();
+      if (!previous && options.leading === false) previous = now;
+      var remaining = wait - (now - previous);
+      context = this;
+      args = arguments;
+      if (remaining <= 0 || remaining > wait) {
+        if (timeout) {
+          clearTimeout(timeout);
+          timeout = null;
+        }
+        previous = now;
+        result = func.apply(context, args);
+        if (!timeout) context = args = null;
+      } else if (!timeout && options.trailing !== false) {
+        timeout = setTimeout(later, remaining);
+      }
+      return result;
+    };
+  };
+
+  // Returns a function, that, as long as it continues to be invoked, will not
+  // be triggered. The function will be called after it stops being called for
+  // N milliseconds. If `immediate` is passed, trigger the function on the
+  // leading edge, instead of the trailing.
+  _.debounce = function(func, wait, immediate) {
+    var timeout, args, context, timestamp, result;
+
+    var later = function() {
+      var last = _.now() - timestamp;
+
+      if (last < wait && last >= 0) {
+        timeout = setTimeout(later, wait - last);
+      } else {
+        timeout = null;
+        if (!immediate) {
+          result = func.apply(context, args);
+          if (!timeout) context = args = null;
+        }
+      }
+    };
+
+    return function() {
+      context = this;
+      args = arguments;
+      timestamp = _.now();
+      var callNow = immediate && !timeout;
+      if (!timeout) timeout = setTimeout(later, wait);
+      if (callNow) {
+        result = func.apply(context, args);
+        context = args = null;
+      }
+
+      return result;
+    };
+  };
+
+  // Returns the first function passed as an argument to the second,
+  // allowing you to adjust arguments, run code before and after, and
+  // conditionally execute the original function.
+  _.wrap = function(func, wrapper) {
+    return _.partial(wrapper, func);
+  };
+
+  // Returns a negated version of the passed-in predicate.
+  _.negate = function(predicate) {
+    return function() {
+      return !predicate.apply(this, arguments);
+    };
+  };
+
+  // Returns a function that is the composition of a list of functions, each
+  // consuming the return value of the function that follows.
+  _.compose = function() {
+    var args = arguments;
+    var start = args.length - 1;
+    return function() {
+      var i = start;
+      var result = args[start].apply(this, arguments);
+      while (i--) result = args[i].call(this, result);
+      return result;
+    };
+  };
+
+  // Returns a function that will only be executed on and after the Nth call.
+  _.after = function(times, func) {
+    return function() {
+      if (--times < 1) {
+        return func.apply(this, arguments);
+      }
+    };
+  };
+
+  // Returns a function that will only be executed up to (but not including) the Nth call.
+  _.before = function(times, func) {
+    var memo;
+    return function() {
+      if (--times > 0) {
+        memo = func.apply(this, arguments);
+      }
+      if (times <= 1) func = null;
+      return memo;
+    };
+  };
+
+  // Returns a function that will be executed at most one time, no matter how
+  // often you call it. Useful for lazy initialization.
+  _.once = _.partial(_.before, 2);
+
+  // Object Functions
+  // ----------------
+
+  // Keys in IE < 9 that won't be iterated by `for key in ...` and thus missed.
+  var hasEnumBug = !{toString: null}.propertyIsEnumerable('toString');
+  var nonEnumerableProps = ['valueOf', 'isPrototypeOf', 'toString',
+                      'propertyIsEnumerable', 'hasOwnProperty', 'toLocaleString'];
+
+  function collectNonEnumProps(obj, keys) {
+    var nonEnumIdx = nonEnumerableProps.length;
+    var constructor = obj.constructor;
+    var proto = (_.isFunction(constructor) && constructor.prototype) || ObjProto;
+
+    // Constructor is a special case.
+    var prop = 'constructor';
+    if (_.has(obj, prop) && !_.contains(keys, prop)) keys.push(prop);
+
+    while (nonEnumIdx--) {
+      prop = nonEnumerableProps[nonEnumIdx];
+      if (prop in obj && obj[prop] !== proto[prop] && !_.contains(keys, prop)) {
+        keys.push(prop);
+      }
+    }
+  }
+
+  // Retrieve the names of an object's own properties.
+  // Delegates to **ECMAScript 5**'s native `Object.keys`
+  _.keys = function(obj) {
+    if (!_.isObject(obj)) return [];
+    if (nativeKeys) return nativeKeys(obj);
+    var keys = [];
+    for (var key in obj) if (_.has(obj, key)) keys.push(key);
+    // Ahem, IE < 9.
+    if (hasEnumBug) collectNonEnumProps(obj, keys);
+    return keys;
+  };
+
+  // Retrieve all the property names of an object.
+  _.allKeys = function(obj) {
+    if (!_.isObject(obj)) return [];
+    var keys = [];
+    for (var key in obj) keys.push(key);
+    // Ahem, IE < 9.
+    if (hasEnumBug) collectNonEnumProps(obj, keys);
+    return keys;
+  };
+
+  // Retrieve the values of an object's properties.
+  _.values = function(obj) {
+    var keys = _.keys(obj);
+    var length = keys.length;
+    var values = Array(length);
+    for (var i = 0; i < length; i++) {
+      values[i] = obj[keys[i]];
+    }
+    return values;
+  };
+
+  // Returns the results of applying the iteratee to each element of the object
+  // In contrast to _.map it returns an object
+  _.mapObject = function(obj, iteratee, context) {
+    iteratee = cb(iteratee, context);
+    var keys =  _.keys(obj),
+          length = keys.length,
+          results = {},
+          currentKey;
+      for (var index = 0; index < length; index++) {
+        currentKey = keys[index];
+        results[currentKey] = iteratee(obj[currentKey], currentKey, obj);
+      }
+      return results;
+  };
+
+  // Convert an object into a list of `[key, value]` pairs.
+  _.pairs = function(obj) {
+    var keys = _.keys(obj);
+    var length = keys.length;
+    var pairs = Array(length);
+    for (var i = 0; i < length; i++) {
+      pairs[i] = [keys[i], obj[keys[i]]];
+    }
+    return pairs;
+  };
+
+  // Invert the keys and values of an object. The values must be serializable.
+  _.invert = function(obj) {
+    var result = {};
+    var keys = _.keys(obj);
+    for (var i = 0, length = keys.length; i < length; i++) {
+      result[obj[keys[i]]] = keys[i];
+    }
+    return result;
+  };
+
+  // Return a sorted list of the function names available on the object.
+  // Aliased as `methods`
+  _.functions = _.methods = function(obj) {
+    var names = [];
+    for (var key in obj) {
+      if (_.isFunction(obj[key])) names.push(key);
+    }
+    return names.sort();
+  };
+
+  // Extend a given object with all the properties in passed-in object(s).
+  _.extend = createAssigner(_.allKeys);
+
+  // Assigns a given object with all the own properties in the passed-in object(s)
+  // (https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object/assign)
+  _.extendOwn = _.assign = createAssigner(_.keys);
+
+  // Returns the first key on an object that passes a predicate test
+  _.findKey = function(obj, predicate, context) {
+    predicate = cb(predicate, context);
+    var keys = _.keys(obj), key;
+    for (var i = 0, length = keys.length; i < length; i++) {
+      key = keys[i];
+      if (predicate(obj[key], key, obj)) return key;
+    }
+  };
+
+  // Return a copy of the object only containing the whitelisted properties.
+  _.pick = function(object, oiteratee, context) {
+    var result = {}, obj = object, iteratee, keys;
+    if (obj == null) return result;
+    if (_.isFunction(oiteratee)) {
+      keys = _.allKeys(obj);
+      iteratee = optimizeCb(oiteratee, context);
+    } else {
+      keys = flatten(arguments, false, false, 1);
+      iteratee = function(value, key, obj) { return key in obj; };
+      obj = Object(obj);
+    }
+    for (var i = 0, length = keys.length; i < length; i++) {
+      var key = keys[i];
+      var value = obj[key];
+      if (iteratee(value, key, obj)) result[key] = value;
+    }
+    return result;
+  };
+
+   // Return a copy of the object without the blacklisted properties.
+  _.omit = function(obj, iteratee, context) {
+    if (_.isFunction(iteratee)) {
+      iteratee = _.negate(iteratee);
+    } else {
+      var keys = _.map(flatten(arguments, false, false, 1), String);
+      iteratee = function(value, key) {
+        return !_.contains(keys, key);
+      };
+    }
+    return _.pick(obj, iteratee, context);
+  };
+
+  // Fill in a given object with default properties.
+  _.defaults = createAssigner(_.allKeys, true);
+
+  // Creates an object that inherits from the given prototype object.
+  // If additional properties are provided then they will be added to the
+  // created object.
+  _.create = function(prototype, props) {
+    var result = baseCreate(prototype);
+    if (props) _.extendOwn(result, props);
+    return result;
+  };
+
+  // Create a (shallow-cloned) duplicate of an object.
+  _.clone = function(obj) {
+    if (!_.isObject(obj)) return obj;
+    return _.isArray(obj) ? obj.slice() : _.extend({}, obj);
+  };
+
+  // Invokes interceptor with the obj, and then returns obj.
+  // The primary purpose of this method is to "tap into" a method chain, in
+  // order to perform operations on intermediate results within the chain.
+  _.tap = function(obj, interceptor) {
+    interceptor(obj);
+    return obj;
+  };
+
+  // Returns whether an object has a given set of `key:value` pairs.
+  _.isMatch = function(object, attrs) {
+    var keys = _.keys(attrs), length = keys.length;
+    if (object == null) return !length;
+    var obj = Object(object);
+    for (var i = 0; i < length; i++) {
+      var key = keys[i];
+      if (attrs[key] !== obj[key] || !(key in obj)) return false;
+    }
+    return true;
+  };
+
+
+  // Internal recursive comparison function for `isEqual`.
+  var eq = function(a, b, aStack, bStack) {
+    // Identical objects are equal. `0 === -0`, but they aren't identical.
+    // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal).
+    if (a === b) return a !== 0 || 1 / a === 1 / b;
+    // A strict comparison is necessary because `null == undefined`.
+    if (a == null || b == null) return a === b;
+    // Unwrap any wrapped objects.
+    if (a instanceof _) a = a._wrapped;
+    if (b instanceof _) b = b._wrapped;
+    // Compare `[[Class]]` names.
+    var className = toString.call(a);
+    if (className !== toString.call(b)) return false;
+    switch (className) {
+      // Strings, numbers, regular expressions, dates, and booleans are compared by value.
+      case '[object RegExp]':
+      // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i')
+      case '[object String]':
+        // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is
+        // equivalent to `new String("5")`.
+        return '' + a === '' + b;
+      case '[object Number]':
+        // `NaN`s are equivalent, but non-reflexive.
+        // Object(NaN) is equivalent to NaN
+        if (+a !== +a) return +b !== +b;
+        // An `egal` comparison is performed for other numeric values.
+        return +a === 0 ? 1 / +a === 1 / b : +a === +b;
+      case '[object Date]':
+      case '[object Boolean]':
+        // Coerce dates and booleans to numeric primitive values. Dates are compared by their
+        // millisecond representations. Note that invalid dates with millisecond representations
+        // of `NaN` are not equivalent.
+        return +a === +b;
+    }
+
+    var areArrays = className === '[object Array]';
+    if (!areArrays) {
+      if (typeof a != 'object' || typeof b != 'object') return false;
+
+      // Objects with different constructors are not equivalent, but `Object`s or `Array`s
+      // from different frames are.
+      var aCtor = a.constructor, bCtor = b.constructor;
+      if (aCtor !== bCtor && !(_.isFunction(aCtor) && aCtor instanceof aCtor &&
+                               _.isFunction(bCtor) && bCtor instanceof bCtor)
+                          && ('constructor' in a && 'constructor' in b)) {
+        return false;
+      }
+    }
+    // Assume equality for cyclic structures. The algorithm for detecting cyclic
+    // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`.
+
+    // Initializing stack of traversed objects.
+    // It's done here since we only need them for objects and arrays comparison.
+    aStack = aStack || [];
+    bStack = bStack || [];
+    var length = aStack.length;
+    while (length--) {
+      // Linear search. Performance is inversely proportional to the number of
+      // unique nested structures.
+      if (aStack[length] === a) return bStack[length] === b;
+    }
+
+    // Add the first object to the stack of traversed objects.
+    aStack.push(a);
+    bStack.push(b);
+
+    // Recursively compare objects and arrays.
+    if (areArrays) {
+      // Compare array lengths to determine if a deep comparison is necessary.
+      length = a.length;
+      if (length !== b.length) return false;
+      // Deep compare the contents, ignoring non-numeric properties.
+      while (length--) {
+        if (!eq(a[length], b[length], aStack, bStack)) return false;
+      }
+    } else {
+      // Deep compare objects.
+      var keys = _.keys(a), key;
+      length = keys.length;
+      // Ensure that both objects contain the same number of properties before comparing deep equality.
+      if (_.keys(b).length !== length) return false;
+      while (length--) {
+        // Deep compare each member
+        key = keys[length];
+        if (!(_.has(b, key) && eq(a[key], b[key], aStack, bStack))) return false;
+      }
+    }
+    // Remove the first object from the stack of traversed objects.
+    aStack.pop();
+    bStack.pop();
+    return true;
+  };
+
+  // Perform a deep comparison to check if two objects are equal.
+  _.isEqual = function(a, b) {
+    return eq(a, b);
+  };
+
+  // Is a given array, string, or object empty?
+  // An "empty" object has no enumerable own-properties.
+  _.isEmpty = function(obj) {
+    if (obj == null) return true;
+    if (isArrayLike(obj) && (_.isArray(obj) || _.isString(obj) || _.isArguments(obj))) return obj.length === 0;
+    return _.keys(obj).length === 0;
+  };
+
+  // Is a given value a DOM element?
+  _.isElement = function(obj) {
+    return !!(obj && obj.nodeType === 1);
+  };
+
+  // Is a given value an array?
+  // Delegates to ECMA5's native Array.isArray
+  _.isArray = nativeIsArray || function(obj) {
+    return toString.call(obj) === '[object Array]';
+  };
+
+  // Is a given variable an object?
+  _.isObject = function(obj) {
+    var type = typeof obj;
+    return type === 'function' || type === 'object' && !!obj;
+  };
+
+  // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp, isError.
+  _.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp', 'Error'], function(name) {
+    _['is' + name] = function(obj) {
+      return toString.call(obj) === '[object ' + name + ']';
+    };
+  });
+
+  // Define a fallback version of the method in browsers (ahem, IE < 9), where
+  // there isn't any inspectable "Arguments" type.
+  if (!_.isArguments(arguments)) {
+    _.isArguments = function(obj) {
+      return _.has(obj, 'callee');
+    };
+  }
+
+  // Optimize `isFunction` if appropriate. Work around some typeof bugs in old v8,
+  // IE 11 (#1621), and in Safari 8 (#1929).
+  if (typeof /./ != 'function' && typeof Int8Array != 'object') {
+    _.isFunction = function(obj) {
+      return typeof obj == 'function' || false;
+    };
+  }
+
+  // Is a given object a finite number?
+  _.isFinite = function(obj) {
+    return isFinite(obj) && !isNaN(parseFloat(obj));
+  };
+
+  // Is the given value `NaN`? (NaN is the only number which does not equal itself).
+  _.isNaN = function(obj) {
+    return _.isNumber(obj) && obj !== +obj;
+  };
+
+  // Is a given value a boolean?
+  _.isBoolean = function(obj) {
+    return obj === true || obj === false || toString.call(obj) === '[object Boolean]';
+  };
+
+  // Is a given value equal to null?
+  _.isNull = function(obj) {
+    return obj === null;
+  };
+
+  // Is a given variable undefined?
+  _.isUndefined = function(obj) {
+    return obj === void 0;
+  };
+
+  // Shortcut function for checking if an object has a given property directly
+  // on itself (in other words, not on a prototype).
+  _.has = function(obj, key) {
+    return obj != null && hasOwnProperty.call(obj, key);
+  };
+
+  // Utility Functions
+  // -----------------
+
+  // Run Underscore.js in *noConflict* mode, returning the `_` variable to its
+  // previous owner. Returns a reference to the Underscore object.
+  _.noConflict = function() {
+    root._ = previousUnderscore;
+    return this;
+  };
+
+  // Keep the identity function around for default iteratees.
+  _.identity = function(value) {
+    return value;
+  };
+
+  // Predicate-generating functions. Often useful outside of Underscore.
+  _.constant = function(value) {
+    return function() {
+      return value;
+    };
+  };
+
+  _.noop = function(){};
+
+  _.property = property;
+
+  // Generates a function for a given object that returns a given property.
+  _.propertyOf = function(obj) {
+    return obj == null ? function(){} : function(key) {
+      return obj[key];
+    };
+  };
+
+  // Returns a predicate for checking whether an object has a given set of
+  // `key:value` pairs.
+  _.matcher = _.matches = function(attrs) {
+    attrs = _.extendOwn({}, attrs);
+    return function(obj) {
+      return _.isMatch(obj, attrs);
+    };
+  };
+
+  // Run a function **n** times.
+  _.times = function(n, iteratee, context) {
+    var accum = Array(Math.max(0, n));
+    iteratee = optimizeCb(iteratee, context, 1);
+    for (var i = 0; i < n; i++) accum[i] = iteratee(i);
+    return accum;
+  };
+
+  // Return a random integer between min and max (inclusive).
+  _.random = function(min, max) {
+    if (max == null) {
+      max = min;
+      min = 0;
+    }
+    return min + Math.floor(Math.random() * (max - min + 1));
+  };
+
+  // A (possibly faster) way to get the current timestamp as an integer.
+  _.now = Date.now || function() {
+    return new Date().getTime();
+  };
+
+   // List of HTML entities for escaping.
+  var escapeMap = {
+    '&': '&',
+    '<': '<',
+    '>': '>',
+    '"': '"',
+    "'": '&#x27;',
+    '`': '&#x60;'
+  };
+  var unescapeMap = _.invert(escapeMap);
+
+  // Functions for escaping and unescaping strings to/from HTML interpolation.
+  var createEscaper = function(map) {
+    var escaper = function(match) {
+      return map[match];
+    };
+    // Regexes for identifying a key that needs to be escaped
+    var source = '(?:' + _.keys(map).join('|') + ')';
+    var testRegexp = RegExp(source);
+    var replaceRegexp = RegExp(source, 'g');
+    return function(string) {
+      string = string == null ? '' : '' + string;
+      return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string;
+    };
+  };
+  _.escape = createEscaper(escapeMap);
+  _.unescape = createEscaper(unescapeMap);
+
+  // If the value of the named `property` is a function then invoke it with the
+  // `object` as context; otherwise, return it.
+  _.result = function(object, property, fallback) {
+    var value = object == null ? void 0 : object[property];
+    if (value === void 0) {
+      value = fallback;
+    }
+    return _.isFunction(value) ? value.call(object) : value;
+  };
+
+  // Generate a unique integer id (unique within the entire client session).
+  // Useful for temporary DOM ids.
+  var idCounter = 0;
+  _.uniqueId = function(prefix) {
+    var id = ++idCounter + '';
+    return prefix ? prefix + id : id;
+  };
+
+  // By default, Underscore uses ERB-style template delimiters, change the
+  // following template settings to use alternative delimiters.
+  _.templateSettings = {
+    evaluate    : /<%([\s\S]+?)%>/g,
+    interpolate : /<%=([\s\S]+?)%>/g,
+    escape      : /<%-([\s\S]+?)%>/g
+  };
+
+  // When customizing `templateSettings`, if you don't want to define an
+  // interpolation, evaluation or escaping regex, we need one that is
+  // guaranteed not to match.
+  var noMatch = /(.)^/;
+
+  // Certain characters need to be escaped so that they can be put into a
+  // string literal.
+  var escapes = {
+    "'":      "'",
+    '\\':     '\\',
+    '\r':     'r',
+    '\n':     'n',
+    '\u2028': 'u2028',
+    '\u2029': 'u2029'
+  };
+
+  var escaper = /\\|'|\r|\n|\u2028|\u2029/g;
+
+  var escapeChar = function(match) {
+    return '\\' + escapes[match];
+  };
+
+  // JavaScript micro-templating, similar to John Resig's implementation.
+  // Underscore templating handles arbitrary delimiters, preserves whitespace,
+  // and correctly escapes quotes within interpolated code.
+  // NB: `oldSettings` only exists for backwards compatibility.
+  _.template = function(text, settings, oldSettings) {
+    if (!settings && oldSettings) settings = oldSettings;
+    settings = _.defaults({}, settings, _.templateSettings);
+
+    // Combine delimiters into one regular expression via alternation.
+    var matcher = RegExp([
+      (settings.escape || noMatch).source,
+      (settings.interpolate || noMatch).source,
+      (settings.evaluate || noMatch).source
+    ].join('|') + '|$', 'g');
+
+    // Compile the template source, escaping string literals appropriately.
+    var index = 0;
+    var source = "__p+='";
+    text.replace(matcher, function(match, escape, interpolate, evaluate, offset) {
+      source += text.slice(index, offset).replace(escaper, escapeChar);
+      index = offset + match.length;
+
+      if (escape) {
+        source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'";
+      } else if (interpolate) {
+        source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'";
+      } else if (evaluate) {
+        source += "';\n" + evaluate + "\n__p+='";
+      }
+
+      // Adobe VMs need the match returned to produce the correct offest.
+      return match;
+    });
+    source += "';\n";
+
+    // If a variable is not specified, place data values in local scope.
+    if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n';
+
+    source = "var __t,__p='',__j=Array.prototype.join," +
+      "print=function(){__p+=__j.call(arguments,'');};\n" +
+      source + 'return __p;\n';
+
+    try {
+      var render = new Function(settings.variable || 'obj', '_', source);
+    } catch (e) {
+      e.source = source;
+      throw e;
+    }
+
+    var template = function(data) {
+      return render.call(this, data, _);
+    };
+
+    // Provide the compiled source as a convenience for precompilation.
+    var argument = settings.variable || 'obj';
+    template.source = 'function(' + argument + '){\n' + source + '}';
+
+    return template;
+  };
+
+  // Add a "chain" function. Start chaining a wrapped Underscore object.
+  _.chain = function(obj) {
+    var instance = _(obj);
+    instance._chain = true;
+    return instance;
+  };
+
+  // OOP
+  // ---------------
+  // If Underscore is called as a function, it returns a wrapped object that
+  // can be used OO-style. This wrapper holds altered versions of all the
+  // underscore functions. Wrapped objects may be chained.
+
+  // Helper function to continue chaining intermediate results.
+  var result = function(instance, obj) {
+    return instance._chain ? _(obj).chain() : obj;
+  };
+
+  // Add your own custom functions to the Underscore object.
+  _.mixin = function(obj) {
+    _.each(_.functions(obj), function(name) {
+      var func = _[name] = obj[name];
+      _.prototype[name] = function() {
+        var args = [this._wrapped];
+        push.apply(args, arguments);
+        return result(this, func.apply(_, args));
+      };
+    });
+  };
+
+  // Add all of the Underscore functions to the wrapper object.
+  _.mixin(_);
+
+  // Add all mutator Array functions to the wrapper.
+  _.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) {
+    var method = ArrayProto[name];
+    _.prototype[name] = function() {
+      var obj = this._wrapped;
+      method.apply(obj, arguments);
+      if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0];
+      return result(this, obj);
+    };
+  });
+
+  // Add all accessor Array functions to the wrapper.
+  _.each(['concat', 'join', 'slice'], function(name) {
+    var method = ArrayProto[name];
+    _.prototype[name] = function() {
+      return result(this, method.apply(this._wrapped, arguments));
+    };
+  });
+
+  // Extracts the result from a wrapped and chained object.
+  _.prototype.value = function() {
+    return this._wrapped;
+  };
+
+  // Provide unwrapping proxy for some methods used in engine operations
+  // such as arithmetic and JSON stringification.
+  _.prototype.valueOf = _.prototype.toJSON = _.prototype.value;
+
+  _.prototype.toString = function() {
+    return '' + this._wrapped;
+  };
+
+  // AMD registration happens at the end for compatibility with AMD loaders
+  // that may not enforce next-turn semantics on modules. Even though general
+  // practice for AMD registration is to be anonymous, underscore registers
+  // as a named module because, like jQuery, it is a base library that is
+  // popular enough to be bundled in a third party lib, but not be part of
+  // an AMD load request. Those cases could generate an error when an
+  // anonymous define() is called outside of a loader request.
+  if (typeof define === 'function' && define.amd) {
+    define('underscore', [], function() {
+      return _;
+    });
+  }
+}.call(this));
diff --git a/client/galaxy/scripts/mvc/annotation.js b/client/galaxy/scripts/mvc/annotation.js
new file mode 100644
index 0000000..49e2b98
--- /dev/null
+++ b/client/galaxy/scripts/mvc/annotation.js
@@ -0,0 +1,79 @@
+define([
+    "mvc/base-mvc",
+    "utils/localization",
+    "ui/editable-text",
+], function( baseMVC, _l ){
+// =============================================================================
+/** A view on any model that has a 'annotation' attribute
+ */
+var AnnotationEditor = Backbone.View
+        .extend( baseMVC.LoggableMixin )
+        .extend( baseMVC.HiddenUntilActivatedViewMixin ).extend({
+
+    tagName     : 'div',
+    className   : 'annotation-display',
+
+    /** Set up listeners, parse options */
+    initialize : function( options ){
+        options = options || {};
+        this.tooltipConfig = options.tooltipConfig || { placement: 'bottom' };
+        //console.debug( this, options );
+        // only listen to the model only for changes to annotations
+        this.listenTo( this.model, 'change:annotation', function(){
+            this.render();
+        });
+        this.hiddenUntilActivated( options.$activator, options );
+    },
+
+    /** Build the DOM elements, call select to on the created input, and set up behaviors */
+    render : function(){
+        var view = this;
+        this.$el.html( this._template() );
+
+        //TODO: handle empties better
+        this.$annotation().make_text_editable({
+            use_textarea: true,
+            on_finish: function( newAnnotation ){
+                view.$annotation().text( newAnnotation );
+                view.model.save({ annotation: newAnnotation }, { silent: true })
+                    .fail( function(){
+                        view.$annotation().text( view.model.previous( 'annotation' ) );
+                    });
+            }
+        });
+        return this;
+    },
+
+    /** @returns {String} the html text used to build the view's DOM */
+    _template : function(){
+        var annotation = this.model.get( 'annotation' );
+        return [
+            //TODO: make prompt optional
+            '<label class="prompt">', _l( 'Annotation' ), '</label>',
+            // set up initial tags by adding as CSV to input vals (necc. to init select2)
+            '<div class="annotation">',
+                _.escape( annotation ),
+            '</div>'
+        ].join( '' );
+    },
+
+    /** @returns {jQuery} the main element for this view */
+    $annotation : function(){
+        return this.$el.find( '.annotation' );
+    },
+
+    /** shut down event listeners and remove this view's DOM */
+    remove : function(){
+        this.$annotation.off();
+        this.stopListening( this.model );
+        Backbone.View.prototype.remove.call( this );
+    },
+
+    /** string rep */
+    toString : function(){ return [ 'AnnotationEditor(', this.model + '', ')' ].join(''); }
+});
+// =============================================================================
+return {
+    AnnotationEditor : AnnotationEditor
+};
+});
diff --git a/client/galaxy/scripts/mvc/base-mvc.js b/client/galaxy/scripts/mvc/base-mvc.js
new file mode 100644
index 0000000..950f90f
--- /dev/null
+++ b/client/galaxy/scripts/mvc/base-mvc.js
@@ -0,0 +1,591 @@
+define([
+    'libs/underscore',
+    'libs/backbone',
+    'utils/add-logging',
+    'utils/localization'
+], function( _, Backbone, addLogging, _l ){
+'use strict';
+
+//==============================================================================
+/** @class Mixin to add logging capabilities to an object.
+ *      Designed to allow switching an objects log output off/on at one central
+ *      statement. Can be used with plain browser console (or something more
+ *      complex like an AJAX logger).
+ *  <br />NOTE: currently only uses the console.debug log function
+ *  (as opposed to debug, error, warn, etc.)
+ *  @name LoggableMixin
+ *
+ *  @example
+ *  // Add to your models/views at the definition using chaining:
+ *      var MyModel = Backbone.Model.extend( LoggableMixin ).extend({ // ... });
+ *
+ *  // or - more explicitly AFTER the definition:
+ *      var MyModel = Backbone.Model.extend({
+ *          logger  : console
+ *          // ...
+ *          this.log( '$#%& it! - broken already...' );
+ *      })
+ *      _.extend( MyModel.prototype, LoggableMixin )
+ *
+ */
+var LoggableMixin =  /** @lends LoggableMixin# */{
+    // replace null with console (if available) to see all logs for a particular view/model
+    /** The logging object whose log function will be used to output
+     *      messages. Null will supress all logging. Commonly set to console.
+     */
+    logger        : null,
+    /** @type {String} a namespace for filtering/focusing log output */
+    _logNamespace : '.',
+
+};
+addLogging( LoggableMixin );
+
+
+//==============================================================================
+/** Backbone model that syncs to the browser's sessionStorage API.
+ *      This all largely happens behind the scenes and no special calls are required.
+ */
+var SessionStorageModel = Backbone.Model.extend({
+    initialize : function( initialAttrs ){
+        // check for sessionStorage and error if no id is provided
+        this._checkEnabledSessionStorage();
+        if( !initialAttrs.id ){
+            throw new Error( 'SessionStorageModel requires an id in the initial attributes' );
+        }
+        this.id = initialAttrs.id;
+
+        // load existing from storage (if any), clear any attrs set by bbone before init is called,
+        //  layer initial over existing and defaults, and save
+        var existing = ( !this.isNew() )?( this._read( this ) ):( {} );
+        this.clear({ silent: true });
+        this.save( _.extend( {}, this.defaults, existing, initialAttrs ), { silent: true });
+
+        // save on any change to it immediately
+        this.on( 'change', function(){
+            this.save();
+        });
+    },
+
+    _checkEnabledSessionStorage : function(){
+        try {
+            return window.sessionStorage.length >= 0;
+        } catch( err ){
+            alert( 'Please enable cookies in your browser for this Galaxy site' );
+            return false;
+        }
+    },
+
+    /** override of bbone sync to save to sessionStorage rather than REST
+     *      bbone options (success, errror, etc.) should still apply
+     */
+    sync : function( method, model, options ){
+        if( !options.silent ){
+            model.trigger( 'request', model, {}, options );
+        }
+        var returned = {};
+        switch( method ){
+            case 'create'   : returned = this._create( model ); break;
+            case 'read'     : returned = this._read( model );   break;
+            case 'update'   : returned = this._update( model ); break;
+            case 'delete'   : returned = this._delete( model ); break;
+        }
+        if( returned !== undefined || returned !== null ){
+            if( options.success ){ options.success(); }
+        } else {
+            if( options.error ){ options.error(); }
+        }
+        return returned;
+    },
+
+    /** set storage to the stringified item */
+    _create : function( model ){
+        try {
+            var json = model.toJSON(),
+                set = sessionStorage.setItem( model.id, JSON.stringify( json ) );
+            return ( set === null )?( set ):( json );
+        // DOMException is thrown in Safari if in private browsing mode and sessionStorage is attempted:
+        // http://stackoverflow.com/questions/14555347
+        // TODO: this could probably use a more general soln - like detecting priv. mode + safari => non-ajaxing Model
+        } catch( err ){
+            if( !( ( err instanceof DOMException ) && ( navigator.userAgent.indexOf("Safari") > -1 ) ) ){
+                throw err;
+            }
+        }
+        return null;
+    },
+
+    /** read and parse json from storage */
+    _read : function( model ){
+        return JSON.parse( sessionStorage.getItem( model.id ) );
+    },
+
+    /** set storage to the item (alias to create) */
+    _update : function( model ){
+        return model._create( model );
+    },
+
+    /** remove the item from storage */
+    _delete : function( model ){
+        return sessionStorage.removeItem( model.id );
+    },
+
+    /** T/F whether sessionStorage contains the model's id (data is present) */
+    isNew : function(){
+        return !sessionStorage.hasOwnProperty( this.id );
+    },
+
+    _log : function(){
+        return JSON.stringify( this.toJSON(), null, '  ' );
+    },
+    toString : function(){
+        return 'SessionStorageModel(' + this.id + ')';
+    }
+
+});
+(function(){
+    SessionStorageModel.prototype = _.omit( SessionStorageModel.prototype, 'url', 'urlRoot' );
+}());
+
+
+//==============================================================================
+/** Function that allows mixing of hashs into bbone MVC while showing the mixins first
+ *      (before the more local class overrides/hash).
+ *      Basically, a simple reversal of param order on _.defaults() - to show mixins in top of definition.
+ *  @example:
+ *      var NewModel = Something.extend( mixin( MyMixinA, MyMixinB, { ... myVars : ... }) );
+ *
+ *  NOTE: this does not combine any hashes (like events, etc.) and you're expected to handle that
+ */
+function mixin( mixinHash1, /* mixinHash2, etc: ... variadic */ propsHash ){
+    var args = Array.prototype.slice.call( arguments, 0 ),
+        lastArg = args.pop();
+    args.unshift( lastArg );
+    return _.defaults.apply( _, args );
+}
+
+
+//==============================================================================
+/** A mixin for models that allow T/F/Matching to their attributes - useful when
+ *      searching or filtering collections of models.
+ * @example:
+ *      see hda-model for searchAttribute and searchAliases definition examples.
+ *      see history-contents.matches for how collections are filtered
+ *      and see readonly-history-view.searchHdas for how user input is connected to the filtering
+ */
+var SearchableModelMixin = {
+
+    /** what attributes of an HDA will be used in a text search */
+    searchAttributes : [
+        // override
+    ],
+
+    /** our attr keys don't often match the labels we display to the user - so, when using
+     *      attribute specifiers ('name="bler"') in a term, allow passing in aliases for the
+     *      following attr keys.
+     */
+    searchAliases : {
+        // override
+    },
+
+    /** search the attribute with key attrKey for the string searchFor; T/F if found */
+    searchAttribute : function( attrKey, searchFor ){
+        var attrVal = this.get( attrKey );
+        //this.debug( 'searchAttribute', attrKey, attrVal, searchFor );
+        // bail if empty searchFor or unsearchable values
+        if( !searchFor
+        ||  ( attrVal === undefined || attrVal === null ) ){
+            return false;
+        }
+        // pass to sep. fn for deep search of array attributes
+        if( _.isArray( attrVal ) ){ return this._searchArrayAttribute( attrVal, searchFor ); }
+        return ( attrVal.toString().toLowerCase().indexOf( searchFor.toLowerCase() ) !== -1 );
+    },
+
+    /** deep(er) search for array attributes; T/F if found */
+    _searchArrayAttribute : function( array, searchFor ){
+        //this.debug( '_searchArrayAttribute', array, searchFor );
+        searchFor = searchFor.toLowerCase();
+        //precondition: searchFor has already been validated as non-empty string
+        //precondition: assumes only 1 level array
+        //TODO: could possibly break up searchFor more (CSV...)
+        return _.any( array, function( elem ){
+            return ( elem.toString().toLowerCase().indexOf( searchFor.toLowerCase() ) !== -1 );
+        });
+    },
+
+    /** search all searchAttributes for the string searchFor,
+     *      returning a list of keys of attributes that contain searchFor
+     */
+    search : function( searchFor ){
+        var model = this;
+        return _.filter( this.searchAttributes, function( key ){
+            return model.searchAttribute( key, searchFor );
+        });
+    },
+
+    /** alias of search, but returns a boolean; accepts attribute specifiers where
+     *      the attributes searched can be narrowed to a single attribute using
+     *      the form: matches( 'genome_build=hg19' )
+     *      (the attribute keys allowed can also be aliases to the true attribute key;
+     *          see searchAliases above)
+     *  @param {String} term   plain text or ATTR_SPECIFIER sep. key=val pair
+     *  @returns {Boolean} was term found in (any) attribute(s)
+     */
+    matches : function( term ){
+        var ATTR_SPECIFIER = '=',
+            split = term.split( ATTR_SPECIFIER );
+        // attribute is specified - search only that
+        if( split.length >= 2 ){
+            var attrKey = split[0];
+            attrKey = this.searchAliases[ attrKey ] || attrKey;
+            return this.searchAttribute( attrKey, split[1] );
+        }
+        // no attribute is specified - search all attributes in searchAttributes
+        return !!this.search( term ).length;
+    },
+
+    /** an implicit AND search for all terms; IOW, a model must match all terms given
+     *      where terms is a whitespace separated value string.
+     *      e.g. given terms of: 'blah bler database=hg19'
+     *          an HDA would have to have attributes containing blah AND bler AND a genome_build == hg19
+     *      To include whitespace in terms: wrap the term in double quotations (name="blah bler").
+     */
+    matchesAll : function( terms ){
+        var model = this;
+        // break the terms up by whitespace and filter out the empty strings
+        terms = terms.match( /(".*"|\w*=".*"|\S*)/g ).filter( function( s ){ return !!s; });
+        return _.all( terms, function( term ){
+            term = term.replace( /"/g, '' );
+            return model.matches( term );
+        });
+    }
+};
+
+
+//==============================================================================
+/** A view that renders hidden and shows when some activator is clicked.
+ *      options:
+ *          showFn: the effect used to show/hide the View (defaults to jq.toggle)
+ *          $elementShown: some jqObject (defaults to this.$el) to be shown/hidden
+ *          onShowFirstTime: fn called the first time the view is shown
+ *          onshow: fn called every time the view is shown
+ *          onhide: fn called every time the view is hidden
+ *      events:
+ *          hiddenUntilActivated:shown (the view is passed as an arg)
+ *          hiddenUntilActivated:hidden (the view is passed as an arg)
+ *      instance vars:
+ *          view.hidden {boolean} is the view in the hidden state
+ */
+var HiddenUntilActivatedViewMixin = /** @lends hiddenUntilActivatedMixin# */{
+//TODO: since this is a mixin, consider moving toggle, hidden into HUAVOptions
+
+    /** call this in your initialize to set up the mixin
+     *  @param {jQuery} $activator the 'button' that's clicked to show/hide the view
+     *  @param {Object} hash with mixin options
+     */
+    hiddenUntilActivated : function( $activator, options ){
+        // call this in your view's initialize fn
+        options = options || {};
+//TODO: flesh out options - show them all here
+        this.HUAVOptions = {
+            $elementShown   : this.$el,
+            showFn          : jQuery.prototype.toggle,
+            showSpeed       : 'fast'
+        };
+        _.extend( this.HUAVOptions, options || {});
+        /** has this been shown already (and onshowFirstTime called)? */
+        this.HUAVOptions.hasBeenShown = this.HUAVOptions.$elementShown.is( ':visible' );
+        this.hidden = this.isHidden();
+
+        if( $activator ){
+            var mixin = this;
+            $activator.on( 'click', function( ev ){
+                mixin.toggle( mixin.HUAVOptions.showSpeed );
+            });
+        }
+    },
+
+//TODO:?? remove? use .hidden?
+    /** returns T/F if the view is hidden */
+    isHidden : function(){
+        return ( this.HUAVOptions.$elementShown.is( ':hidden' ) );
+    },
+
+    /** toggle the hidden state, show/hide $elementShown, call onshow/hide, trigger events */
+    toggle : function(){
+//TODO: more specific name - toggle is too general
+        // can be called manually as well with normal toggle arguments
+        //TODO: better as a callback (when the show/hide is actually done)
+        // show
+        if( this.hidden ){
+            // fire the optional fns on the first/each showing - good for render()
+            if( !this.HUAVOptions.hasBeenShown ){
+                if( _.isFunction( this.HUAVOptions.onshowFirstTime ) ){
+                    this.HUAVOptions.hasBeenShown = true;
+                    this.HUAVOptions.onshowFirstTime.call( this );
+                }
+            }
+            if( _.isFunction( this.HUAVOptions.onshow ) ){
+                this.HUAVOptions.onshow.call( this );
+                this.trigger( 'hiddenUntilActivated:shown', this );
+            }
+            this.hidden = false;
+
+        // hide
+        } else {
+            if( _.isFunction( this.HUAVOptions.onhide ) ){
+                this.HUAVOptions.onhide.call( this );
+                this.trigger( 'hiddenUntilActivated:hidden', this );
+            }
+            this.hidden = true;
+        }
+        return this.HUAVOptions.showFn.apply( this.HUAVOptions.$elementShown, arguments );
+    }
+};
+
+
+//==============================================================================
+/** Mixin for views that can be dragged and dropped
+ *      Allows for the drag behavior to be turned on/off, setting/removing jQuery event
+ *          handlers each time.
+ *      dataTransfer data is set to the JSON string of the view's model.toJSON
+ *      Override '$dragHandle' to define the draggable DOM sub-element.
+ */
+var DraggableViewMixin = {
+
+    /** set up instance vars to track whether this view is currently draggable */
+    initialize : function( attributes ){
+        /** is the body of this hda view expanded/not? */
+        this.draggable  = attributes.draggable || false;
+    },
+
+    /** what part of the view's DOM triggers the dragging */
+    $dragHandle : function(){
+//TODO: make abstract/general - move this to listItem
+        // override to the element you want to be your view's handle
+        return this.$( '.title-bar' );
+    },
+
+    /** toggle whether this view is draggable */
+    toggleDraggable : function(){
+        if( this.draggable ){
+            this.draggableOff();
+        } else {
+            this.draggableOn();
+        }
+    },
+
+    /** allow the view to be dragged, set up event handlers */
+    draggableOn : function(){
+        this.draggable = true;
+        this.dragStartHandler = _.bind( this._dragStartHandler, this );
+        this.dragEndHandler   = _.bind( this._dragEndHandler,   this );
+
+        var handle = this.$dragHandle().attr( 'draggable', true ).get(0);
+        handle.addEventListener( 'dragstart', this.dragStartHandler, false );
+        handle.addEventListener( 'dragend',   this.dragEndHandler,   false );
+    },
+
+    /** turn of view dragging and remove event listeners */
+    draggableOff : function(){
+        this.draggable = false;
+        var handle = this.$dragHandle().attr( 'draggable', false ).get(0);
+        handle.removeEventListener( 'dragstart', this.dragStartHandler, false );
+        handle.removeEventListener( 'dragend',   this.dragEndHandler,   false );
+    },
+
+    /** sets the dataTransfer data to the model's toJSON
+     *  @fires draggable:dragstart (bbone event) which is passed the event and this view
+     */
+    _dragStartHandler : function( event ){
+        event.dataTransfer.effectAllowed = 'move';
+        //ASSUMES: this.model
+        //TODO: all except IE: should be 'application/json', IE: must be 'text'
+        event.dataTransfer.setData( 'text', JSON.stringify( this.model.toJSON() ) );
+        this.trigger( 'draggable:dragstart', event, this );
+        return false;
+    },
+
+    /** handle the dragend
+     *  @fires draggable:dragend (bbone event) which is passed the event and this view
+     */
+    _dragEndHandler : function( event ){
+        this.trigger( 'draggable:dragend', event, this );
+        return false;
+    }
+};
+
+
+//==============================================================================
+/** Mixin that allows a view to be selected (gen. from a list).
+ *      Selection controls ($selector) may be hidden/shown/toggled.
+ *          The bbone event 'selectable' is fired when the controls are shown/hidden (passed T/F).
+ *      Default rendering is a font-awesome checkbox.
+ *      Default selector is '.selector' within the view's $el.
+ *      The bbone events 'selected' and 'de-selected' are fired when the $selector is clicked.
+ *          Both events are passed the view and the (jQuery) event.
+ */
+var SelectableViewMixin = {
+
+    /** Set up instance state vars for whether the selector is shown and whether the view has been selected */
+    initialize : function( attributes ){
+        /** is the view currently in selection mode? */
+        this.selectable = attributes.selectable || false;
+        /** is the view currently selected? */
+        this.selected   = attributes.selected || false;
+    },
+
+    /** $el sub-element where the selector is rendered and what can be clicked to select. */
+    $selector : function(){
+        return this.$( '.selector' );
+    },
+
+    /** How the selector is rendered - defaults to font-awesome checkbox */
+    _renderSelected : function(){
+        // override
+        this.$selector().find( 'span' )
+            .toggleClass( 'fa-check-square-o', this.selected ).toggleClass( 'fa-square-o', !this.selected );
+    },
+
+    /** Toggle whether the selector is shown */
+    toggleSelector : function(){
+//TODO: use this.selectable
+        if( !this.$selector().is( ':visible' ) ){
+            this.showSelector();
+        } else {
+            this.hideSelector();
+        }
+    },
+
+    /** Display the selector control.
+     *  @param {Number} a jQuery fx speed
+     *  @fires: selectable which is passed true (IOW, the selector is shown) and the view
+     */
+    showSelector : function( speed ){
+        speed = speed !== undefined? speed : this.fxSpeed;
+        // make sure selected state is represented properly
+        this.selectable = true;
+        this.trigger( 'selectable', true, this );
+        this._renderSelected();
+        if( speed ){
+            this.$selector().show( speed );
+        } else {
+            this.$selector().show();
+        }
+    },
+
+    /** remove the selector control
+     *  @param {Number} a jQuery fx speed
+     *  @fires: selectable which is passed false (IOW, the selector is not shown) and the view
+     */
+    hideSelector : function( speed ){
+        speed = speed !== undefined? speed : this.fxSpeed;
+        // reverse the process from showSelect
+        this.selectable = false;
+        this.trigger( 'selectable', false, this );
+        if( speed ){
+            this.$selector().hide( speed );
+        } else {
+            this.$selector().hide();
+        }
+    },
+
+    /** Toggle whether the view is selected */
+    toggleSelect : function( event ){
+        if( this.selected ){
+            this.deselect( event );
+        } else {
+            this.select( event );
+        }
+    },
+
+    /** Select this view and re-render the selector control to show it
+     *  @param {Event} a jQuery event that caused the selection
+     *  @fires: selected which is passed the view and the DOM event that triggered it (optionally)
+     */
+    select : function( event ){
+        // switch icon, set selected, and trigger event
+        if( !this.selected ){
+            this.trigger( 'selected', this, event );
+            this.selected = true;
+            this._renderSelected();
+        }
+        return false;
+    },
+
+    /** De-select this view and re-render the selector control to show it
+     *  @param {Event} a jQuery event that caused the selection
+     *  @fires: de-selected which is passed the view and the DOM event that triggered it (optionally)
+     */
+    deselect : function( event ){
+        // switch icon, set selected, and trigger event
+        if( this.selected ){
+            this.trigger( 'de-selected', this, event );
+            this.selected = false;
+            this._renderSelected();
+        }
+        return false;
+    }
+};
+
+
+//==============================================================================
+/** Return an underscore template fn from an array of strings.
+ *  @param {String[]} template      the template strings to compile into the underscore template fn
+ *  @param {String} jsonNamespace   an optional namespace for the json data passed in (defaults to 'model')
+ *  @returns {Function} the (wrapped) underscore template fn
+ *      The function accepts:
+ *
+ *  The template strings can access:
+ *      the json/model hash using model ("<%- model.myAttr %>) using the jsonNamespace above
+ *      _l: the localizer function
+ *      view (if passed): ostensibly, the view using the template (handy for view instance vars)
+ *      Because they're namespaced, undefined attributes will not throw an error.
+ *
+ *  @example:
+ *      templateBler : BASE_MVC.wrapTemplate([
+ *          '<div class="myclass <%- mynamespace.modelClass %>">',
+ *              '<span><% print( _l( mynamespace.message ) ); %>:<%= view.status %></span>'
+ *          '</div>'
+ *      ], 'mynamespace' )
+ *
+ *  Meant to be called in a View's definition in order to compile only once.
+ *
+ */
+function wrapTemplate( template, jsonNamespace ){
+    jsonNamespace = jsonNamespace || 'model';
+    var templateFn = _.template( template.join( '' ) );
+    return function( json, view ){
+        var templateVars = { view : view || {}, _l : _l };
+        templateVars[ jsonNamespace ] = json || {};
+        return templateFn( templateVars );
+    };
+}
+
+
+//==============================================================================
+/** Return a comparator function for sorted Collections */
+function buildComparator( attribute_name, options ){
+    options = options || {};
+    var ascending = options.ascending? 1 : -1;
+    return function __comparator( a, b ){
+        a = a.get( attribute_name );
+        b = b.get( attribute_name );
+        return ( a < b? -1 : ( a > b? 1 : 0 ) ) * ascending;
+    };
+}
+
+
+//==============================================================================
+    return {
+        LoggableMixin                   : LoggableMixin,
+        SessionStorageModel             : SessionStorageModel,
+        mixin                           : mixin,
+        SearchableModelMixin            : SearchableModelMixin,
+        HiddenUntilActivatedViewMixin   : HiddenUntilActivatedViewMixin,
+        DraggableViewMixin              : DraggableViewMixin,
+        SelectableViewMixin             : SelectableViewMixin,
+        wrapTemplate                    : wrapTemplate,
+        buildComparator                 : buildComparator,
+    };
+});
diff --git a/client/galaxy/scripts/mvc/base/controlled-fetch-collection.js b/client/galaxy/scripts/mvc/base/controlled-fetch-collection.js
new file mode 100644
index 0000000..3d3d0fe
--- /dev/null
+++ b/client/galaxy/scripts/mvc/base/controlled-fetch-collection.js
@@ -0,0 +1,327 @@
+define([
+    'libs/underscore',
+    'libs/backbone',
+    'mvc/base-mvc',
+], function( _, Backbone, BASE_MVC ){
+'use strict';
+
+//=============================================================================
+/**
+ * A Collection that can be limited/offset/re-ordered/filtered.
+ * @type {Backbone.Collection}
+ */
+var ControlledFetchCollection = Backbone.Collection.extend({
+
+    /** call setOrder on initialization to build the comparator based on options */
+    initialize : function( models, options ){
+        Backbone.Collection.prototype.initialize.call( this, models, options );
+        this.setOrder( options.order || this.order, { silent: true });
+    },
+
+    /** set up to track order changes and re-sort when changed */
+    _setUpListeners : function(){
+        return this.on({
+            'changed-order' : this.sort
+        });
+    },
+
+    /** override to provide order and offsets based on instance vars, set limit if passed,
+     *  and set allFetched/fire 'all-fetched' when xhr returns
+     */
+    fetch : function( options ){
+        options = this._buildFetchOptions( options );
+        // console.log( 'fetch options:', options );
+        return Backbone.Collection.prototype.fetch.call( this, options );
+    },
+
+    /** build ajax data/parameters from options */
+    _buildFetchOptions : function( options ){
+        // note: we normally want options passed in to override the defaults built here
+        // so most of these fns will generate defaults
+        options = _.clone( options ) || {};
+        var self = this;
+
+        // jquery ajax option; allows multiple q/qv for filters (instead of 'q[]')
+        options.traditional = true;
+
+        // options.data
+        // we keep limit, offset, etc. in options *as well as move it into data* because:
+        // - it makes fetch calling convenient to add it to a single options map (instead of as mult. args)
+        // - it allows the std. event handlers (for fetch, etc.) to have access
+        //   to the pagination options too
+        //      (i.e. this.on( 'sync', function( options ){ if( options.limit ){ ... } }))
+        // however, when we send to xhr/jquery we copy them to data also so that they become API query params
+        options.data = options.data || self._buildFetchData( options );
+        // console.log( 'data:', options.data );
+
+        // options.data.filters --> options.data.q, options.data.qv
+        var filters = this._buildFetchFilters( options );
+        // console.log( 'filters:', filters );
+        if( !_.isEmpty( filters ) ){
+            _.extend( options.data, this._fetchFiltersToAjaxData( filters ) );
+        }
+        // console.log( 'data:', options.data );
+        return options;
+    },
+
+    /** Build the dictionary to send to fetch's XHR as data */
+    _buildFetchData : function( options ){
+        var defaults = {};
+        if( this.order ){ defaults.order = this.order; }
+        return _.defaults( _.pick( options, this._fetchParams ), defaults );
+    },
+
+    /** These attribute keys are valid params to fetch/API-index */
+    _fetchParams : [
+        /** model dependent string to control the order of models returned */
+        'order',
+        /** limit the number of models returned from a fetch */
+        'limit',
+        /** skip this number of models when fetching */
+        'offset',
+        /** what series of attributes to return (model dependent) */
+        'view',
+        /** individual keys to return for the models (see api/histories.index) */
+        'keys'
+    ],
+
+    /** add any needed filters here based on collection state */
+    _buildFetchFilters : function( options ){
+        // override
+        return _.clone( options.filters || {} );
+    },
+
+    /** Convert dictionary filters to qqv style arrays */
+    _fetchFiltersToAjaxData : function( filters ){
+        // return as a map so ajax.data can extend from it
+        var filterMap = {
+            q  : [],
+            qv : []
+        };
+        _.each( filters, function( v, k ){
+            // don't send if filter value is empty
+            if( v === undefined || v === '' ){ return; }
+            // json to python
+            if( v === true ){ v = 'True'; }
+            if( v === false ){ v = 'False'; }
+            if( v === null ){ v = 'None'; }
+            // map to k/v arrays (q/qv)
+            filterMap.q.push( k );
+            filterMap.qv.push( v );
+        });
+        return filterMap;
+    },
+
+    /** override to reset allFetched flag to false */
+    reset : function( models, options ){
+        this.allFetched = false;
+        return Backbone.Collection.prototype.reset.call( this, models, options );
+    },
+
+    // ........................................................................ order
+    order : null,
+
+    /** @type {Object} map of collection available sorting orders containing comparator fns */
+    comparators : {
+        'update_time'       : BASE_MVC.buildComparator( 'update_time', { ascending: false }),
+        'update_time-asc'   : BASE_MVC.buildComparator( 'update_time', { ascending: true }),
+        'create_time'       : BASE_MVC.buildComparator( 'create_time', { ascending: false }),
+        'create_time-asc'   : BASE_MVC.buildComparator( 'create_time', { ascending: true }),
+    },
+
+    /** set the order and comparator for this collection then sort with the new order
+     *  @event 'changed-order' passed the new order and the collection
+     */
+    setOrder : function( order, options ){
+        options = options || {};
+        var collection = this;
+        var comparator = collection.comparators[ order ];
+        if( _.isUndefined( comparator ) ){ throw new Error( 'unknown order: ' + order ); }
+        // if( _.isUndefined( comparator ) ){ return; }
+        if( comparator === collection.comparator ){ return; }
+
+        var oldOrder = collection.order;
+        collection.order = order;
+        collection.comparator = comparator;
+
+        if( !options.silent ){
+            collection.trigger( 'changed-order', options );
+        }
+        return collection;
+    },
+
+});
+
+
+//=============================================================================
+/**
+ *
+ */
+var PaginatedCollection = ControlledFetchCollection.extend({
+
+    /** @type {Number} limit used for each page's fetch */
+    limitPerPage       : 500,
+
+    initialize : function( models, options ){
+        ControlledFetchCollection.prototype.initialize.call( this, models, options );
+        this.currentPage = options.currentPage || 0;
+    },
+
+    getTotalItemCount : function(){
+        return this.length;
+    },
+
+    shouldPaginate : function(){
+        return this.getTotalItemCount() >= this.limitPerPage;
+    },
+
+    getLastPage : function(){
+        return Math.floor( this.getTotalItemCount() / this.limitPerPage );
+    },
+
+    getPageCount : function(){
+        return this.getLastPage() + 1;
+    },
+
+    getPageLimitOffset : function( pageNum ){
+        pageNum = this.constrainPageNum( pageNum );
+        return {
+            limit : this.limitPerPage,
+            offset: pageNum * this.limitPerPage
+        };
+    },
+
+    constrainPageNum : function( pageNum ){
+        return Math.max( 0, Math.min( pageNum, this.getLastPage() ));
+    },
+
+    /** fetch the next page of data */
+    fetchPage : function( pageNum, options ){
+        var self = this;
+        pageNum = self.constrainPageNum( pageNum );
+        self.currentPage = pageNum;
+        options = _.defaults( options || {}, self.getPageLimitOffset( pageNum ) );
+
+        self.trigger( 'fetching-more' );
+        return self.fetch( options )
+            .always( function(){
+                self.trigger( 'fetching-more-done' );
+            });
+    },
+
+    fetchCurrentPage : function( options ){
+        return this.fetchPage( this.currentPage, options );
+    },
+
+    fetchPrevPage : function( options ){
+        return this.fetchPage( this.currentPage - 1, options );
+    },
+
+    fetchNextPage : function( options ){
+        return this.fetchPage( this.currentPage + 1, options );
+    },
+});
+
+
+//=============================================================================
+/**
+ * A Collection that will load more elements without reseting.
+ */
+var InfinitelyScrollingCollection = ControlledFetchCollection.extend({
+
+    /** @type {Number} limit used for the first fetch (or a reset) */
+    limitOnFirstFetch   : null,
+    /** @type {Number} limit used for each subsequent fetch */
+    limitPerFetch       : 100,
+
+    initialize : function( models, options ){
+        ControlledFetchCollection.prototype.initialize.call( this, models, options );
+        /** @type {Integer} number of contents to return from the first fetch */
+        this.limitOnFirstFetch = options.limitOnFirstFetch || this.limitOnFirstFetch;
+        /** @type {Integer} limit for every fetch after the first */
+        this.limitPerFetch = options.limitPerFetch || this.limitPerFetch;
+        /** @type {Boolean} are all contents fetched? */
+        this.allFetched = false;
+        /** @type {Integer} what was the offset of the last content returned */
+        this.lastFetched = options.lastFetched || 0;
+    },
+
+    /** build ajax data/parameters from options */
+    _buildFetchOptions : function( options ){
+        // options (options for backbone.fetch and jquery.ajax generally)
+        // backbone option; false here to make fetching an addititive process
+        options.remove = options.remove || false;
+        return ControlledFetchCollection.prototype._buildFetchOptions.call( this, options );
+    },
+
+    /** fetch the first 'page' of data */
+    fetchFirst : function( options ){
+        // console.log( 'ControlledFetchCollection.fetchFirst:', options );
+        options = options? _.clone( options ) : {};
+        this.allFetched = false;
+        this.lastFetched = 0;
+        return this.fetchMore( _.defaults( options, {
+            reset : true,
+            limit : this.limitOnFirstFetch,
+        }));
+    },
+
+    /** fetch the next page of data */
+    fetchMore : function( options ){
+        // console.log( 'ControlledFetchCollection.fetchMore:', options );
+        options = _.clone( options || {} );
+        var collection = this;
+
+        // console.log( 'fetchMore, options.reset:', options.reset );
+        if( ( !options.reset && collection.allFetched ) ){
+            return jQuery.when();
+        }
+
+        // TODO: this fails in the edge case where
+        //  the first fetch offset === limit (limit 4, offset 4, collection.length 4)
+        options.offset = options.reset? 0 : ( options.offset || collection.lastFetched );
+        var limit = options.limit = options.limit || collection.limitPerFetch || null;
+        // console.log( 'fetchMore, limit:', limit, 'offset:', options.offset );
+
+        collection.trigger( 'fetching-more' );
+        return collection.fetch( options )
+            .always( function(){
+                collection.trigger( 'fetching-more-done' );
+            })
+            // maintain allFetched flag and trigger if all were fetched this time
+            .done( function _postFetchMore( fetchedData ){
+                var numFetched = _.isArray( fetchedData )? fetchedData.length : 0;
+                collection.lastFetched += numFetched;
+                // console.log( 'fetchMore, lastFetched:', collection.lastFetched );
+                // anything less than a full page means we got all there is to get
+                if( !limit || numFetched < limit ){
+                    collection.allFetched = true;
+                    collection.trigger( 'all-fetched', this );
+                }
+            }
+        );
+    },
+
+    /** fetch all the collection */
+    fetchAll : function( options ){
+        // whitelist options to prevent allowing limit/offset/filters
+        // (use vanilla fetch instead)
+        options = options || {};
+        var self = this;
+        options = _.pick( options, 'silent' );
+        options.filters = {};
+        return self.fetch( options ).done( function( fetchData ){
+            self.allFetched = true;
+            self.trigger( 'all-fetched', self );
+        });
+    },
+});
+
+
+//==============================================================================
+    return {
+        ControlledFetchCollection     : ControlledFetchCollection,
+        PaginatedCollection           : PaginatedCollection,
+        InfinitelyScrollingCollection : InfinitelyScrollingCollection,
+    };
+});
diff --git a/client/galaxy/scripts/mvc/citation/citation-model.js b/client/galaxy/scripts/mvc/citation/citation-model.js
new file mode 100644
index 0000000..9643b0d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/citation/citation-model.js
@@ -0,0 +1,97 @@
+define([
+    "libs/bibtex",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( parseBibtex, baseMVC, _l ){
+/* global Backbone */
+// we use amd here to require, but bibtex uses a global or commonjs pattern.
+// webpack will load via commonjs and plain requirejs will load as global. Check both
+parseBibtex = parseBibtex || window.BibtexParser;
+
+var logNamespace = 'citation';
+//==============================================================================
+/** @class model for tool citations.
+ *  @name Citation
+ *  @augments Backbone.Model
+ */
+var Citation = Backbone.Model.extend( baseMVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    defaults : {
+        content: ''
+    },
+
+    initialize: function() {
+        var parsed;
+        try {
+            // TODO: to model.parse/.validate
+            parsed = parseBibtex( this.attributes.content );
+        } catch( err ){
+            return;
+        }
+        // bibtex returns successfully parsed in .entries and any parsing errors in .errors
+        if( parsed.errors.length ){
+            // the gen. form of these errors seems to be [ line, col, char, error message ]
+            var errors = parsed.errors.reduce( function( all, current ){ return all + '; ' + current; });
+            // throw new Error( 'Error parsing bibtex: ' + errors );
+            this.log( 'Error parsing bibtex: ' + errors );
+        }
+
+        this._fields = {};
+        this.entry = _.first( parsed.entries );
+        if( this.entry ){
+            var rawFields = this.entry.Fields;
+            for( var key in rawFields ){
+                var value = rawFields[ key ];
+                var lowerKey = key.toLowerCase();
+                this._fields[ lowerKey ] = value;
+            }
+        }
+    },
+    entryType: function() {
+        return this.entry? this.entry.EntryType : undefined;
+    },
+    fields: function() {
+        return this._fields;
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone collection of citations.
+ */
+var BaseCitationCollection = Backbone.Collection.extend( baseMVC.LoggableMixin ).extend( {
+    _logNamespace : logNamespace,
+
+    /** root api url */
+    urlRoot : Galaxy.root + 'api',
+    partial : true, // Assume some tools in history/workflow may not be properly annotated yet.
+    model : Citation,
+} );
+
+var HistoryCitationCollection = BaseCitationCollection.extend( {
+    /** complete api url */
+    url : function() {
+        return this.urlRoot + '/histories/' + this.history_id + '/citations';
+    }
+} );
+
+var ToolCitationCollection = BaseCitationCollection.extend( {
+    /** complete api url */
+    url : function() {
+        return this.urlRoot + '/tools/' + this.tool_id + '/citations';
+    },
+    partial : false, // If a tool has citations, assume they are complete.
+} );
+
+
+//==============================================================================
+
+return {
+    Citation : Citation,
+    HistoryCitationCollection  : HistoryCitationCollection,
+    ToolCitationCollection: ToolCitationCollection
+};
+
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/citation/citation-view.js b/client/galaxy/scripts/mvc/citation/citation-view.js
new file mode 100644
index 0000000..5fb3600
--- /dev/null
+++ b/client/galaxy/scripts/mvc/citation/citation-view.js
@@ -0,0 +1,188 @@
+define([
+    "mvc/base-mvc",
+    "mvc/citation/citation-model",
+    "utils/localization"
+], function( baseMVC, citationModel, _l ){
+
+var CitationView = Backbone.View.extend({
+    tagName: 'div',
+    className: 'citations',
+    render: function() {
+        this.$el.append( "<p>" + this.formattedReference() + "</p>" );
+        return this;
+    },
+    formattedReference: function() {
+        var model = this.model;
+        var entryType = model.entryType();
+        var fields = model.fields();
+
+        var ref = "";
+        // Code inspired by...
+        // https://github.com/vkaravir/bib-publication-list/blob/master/src/bib-publication-list.js
+        var authorsAndYear = this._asSentence( (fields.author ? fields.author : "") + (fields.year ? (" (" + fields.year + ")") : "") ) + " ";
+        var title = fields.title || "";
+        var pages = fields.pages ? ("pp. " + fields.pages) : "";
+        var address = fields.address;
+        if( entryType == "article" ) {
+            var volume = (fields.volume ? fields.volume : "") +
+                         (fields.number ? ( " (" + fields.number + ")" ) : "") +
+                         (pages ? ", " + pages : "");
+            ref = authorsAndYear + this._asSentence(title) +
+                    (fields.journal ? ("In <em>" + fields.journal + ", ") : "") +
+                    this._asSentence(volume) + 
+                    this._asSentence(fields.address) +
+                    "<\/em>";
+        } else if( entryType == "inproceedings" || entryType == "proceedings" ) {
+            ref = authorsAndYear + 
+                    this._asSentence(title) + 
+                    (fields.booktitle ? ("In <em>" + fields.booktitle + ", ") : "") +
+                    (pages ? pages : "") +
+                    (address ? ", " + address : "") + 
+                    ".<\/em>";
+        } else if( entryType == "mastersthesis" || entryType == "phdthesis" ) {
+            ref = authorsAndYear + this._asSentence(title) +
+                    (fields.howpublished ? fields.howpublished + ". " : "") +
+                    (fields.note ? fields.note + "." : "");
+        } else if( entryType == "techreport" ) {
+            ref = authorsAndYear + this._asSentence(title) +
+                    this._asSentence(fields.institution) +
+                    this._asSentence(fields.number) +
+                    this._asSentence(fields.type);
+        } else if( entryType == "book" || entryType == "inbook" || entryType == "incollection" ) {
+            ref = authorsAndYear + " " + this._formatBookInfo(fields);
+        } else {
+            ref = authorsAndYear + " " + this._asSentence(title) +
+                    this._asSentence(fields.howpublished) +
+                    this._asSentence(fields.note);
+        }
+        var doiUrl = "";
+        if( fields.doi ) {
+            doiUrl = 'http://dx.doi.org/' + fields.doi;
+            ref += '[<a href="' + doiUrl + '" target="_blank">doi:' + fields.doi + "</a>]";
+        }
+        var url = fields.url || doiUrl;
+        if( url ) {
+            ref += '[<a href="' + url + '" target="_blank">Link</a>]';
+        }
+        return ref;
+    },
+    _formatBookInfo: function(fields) {
+        var info = "";
+        if( fields.chapter ) {
+            info += fields.chapter + " in ";
+        }
+        if( fields.title ) {
+            info += "<em>" + fields.title + "<\/em>";
+        }
+        if( fields.editor ) {
+            info += ", Edited by " + fields.editor + ", ";
+        }
+        if( fields.publisher) {
+            info += ", " + fields.publisher;
+        }
+        if( fields.pages ) {
+            info += ", pp. " + fields.pages + "";
+        }
+        if( fields.series ) {
+            info += ", <em>" + fields.series + "<\/em>";
+        }
+        if( fields.volume ) {
+            info += ", Vol." + fields.volume;
+        }
+        if( fields.issn ) {
+            info += ", ISBN: " + fields.issn;
+        }
+        return info + ".";
+    },
+    _asSentence: function(str) {
+        return (str && str.trim()) ? str + ". " : "";
+    }
+});
+
+var CitationListView = Backbone.View.extend({
+    el: '#citations',
+    /**
+     * Set up view.
+     */
+    initialize: function() {
+        this.listenTo( this.collection, 'add', this.renderCitation );
+    },
+
+    events: {
+        'click .citations-to-bibtex': 'showBibtex',
+        'click .citations-to-formatted': 'showFormatted'
+    },
+
+    renderCitation: function( citation ) {
+        var citationView = new CitationView( { model: citation } );
+        this.$(".citations-formatted").append( citationView.render().el );
+        var rawTextarea = this.$(".citations-bibtex-text");
+        rawTextarea.val( rawTextarea.val() + "\n\r" + citation.attributes.content );
+    },
+
+    render: function() {
+        this.$el.html(this.citationsElement());
+        this.collection.each(function( item ){
+            this.renderCitation( item );
+        }, this);
+        this.showFormatted();
+    },
+
+    showBibtex: function() {
+        this.$(".citations-to-formatted").show();
+        this.$(".citations-to-bibtex").hide();
+        this.$(".citations-bibtex").show();
+        this.$(".citations-formatted").hide();
+        this.$(".citations-bibtex-text").select();
+    },
+
+    showFormatted: function() {
+        this.$(".citations-to-formatted").hide();
+        this.$(".citations-to-bibtex").show();
+        this.$(".citations-bibtex").hide();
+        this.$(".citations-formatted").show();
+    },
+
+    partialWarningElement: function() {
+        if( this.collection.partial ) {
+            return [
+                '<div style="padding:5px 10px">',
+                '<b>Warning: This is a experimental feature.</b> Most Galaxy tools will not annotate',
+                ' citations explicitly at this time. When writing up your analysis, please manually',
+                ' review your histories and find all references',
+                ' that should be cited in order to completely describe your work. Also, please remember to',
+                ' <a href="https://wiki.galaxyproject.org/CitingGalaxy">cite Galaxy</a>.',
+                '</div>',
+            ].join('');
+        } else {
+            return '';
+        }
+    },
+
+    citationsElement: function() {
+        return [
+            '<div class="toolForm">',
+                '<div class="toolFormTitle">',
+                    _l("Citations"),
+                    ' <button type="button" class="btn btn-xs citations-to-bibtex" title="Show all in BibTeX format."><i class="fa fa-pencil-square-o"></i> Show BibTeX</button>',
+                    ' <button type="button" class="btn btn-xs citations-to-formatted" title="Return to formatted citation list."><i class="fa fa-times"></i> Hide BibTeX</button>',
+                '</div>',
+                '<div class="toolFormBody" style="padding:5px 10px">',
+                this.partialWarningElement(),
+                '<span class="citations-formatted" style="word-wrap: break-word;"></span>',
+                '</div>',
+                '<div class="citations-bibtex toolFormBody" style="padding:5px 10px">',
+                '<textarea style="width: 100%; height: 500px;" class="citations-bibtex-text"></textarea>',
+                '</div>',
+            '</div>'
+        ].join( '' );
+    }
+});
+
+//==============================================================================
+return {
+    CitationView : CitationView,
+    CitationListView  : CitationListView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/collection/collection-li-edit.js b/client/galaxy/scripts/mvc/collection/collection-li-edit.js
new file mode 100644
index 0000000..bd4491b
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/collection-li-edit.js
@@ -0,0 +1,124 @@
+define([
+    "mvc/collection/collection-li",
+    "mvc/dataset/dataset-li-edit",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( DC_LI, DATASET_LI_EDIT, BASE_MVC, _l ){
+
+'use strict';
+//==============================================================================
+var DCListItemView = DC_LI.DCListItemView;
+/** @class Edit view for DatasetCollection.
+ */
+var DCListItemEdit = DCListItemView.extend(
+/** @lends DCListItemEdit.prototype */{
+
+    /** override to add linkTarget */
+    initialize : function( attributes ){
+        DCListItemView.prototype.initialize.call( this, attributes );
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DCListItemEdit(' + modelString + ')';
+    }
+});
+
+
+//==============================================================================
+var DCEListItemView = DC_LI.DCEListItemView;
+/** @class Read only view for DatasetCollectionElement.
+ */
+var DCEListItemEdit = DCEListItemView.extend(
+/** @lends DCEListItemEdit.prototype */{
+//TODO: this might be expendable - compacted with HDAListItemView
+
+    /** set up */
+    initialize  : function( attributes ){
+        DCEListItemView.prototype.initialize.call( this, attributes );
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DCEListItemEdit(' + modelString + ')';
+    }
+});
+
+
+//==============================================================================
+// NOTE: this does not inherit from DatasetDCEListItemView as you would expect
+//TODO: but should - if we can find something simpler than using diamond
+/** @class Editable view for a DatasetCollectionElement that is also an DatasetAssociation
+ *      (a dataset contained in a dataset collection).
+ */
+var DatasetDCEListItemEdit = DATASET_LI_EDIT.DatasetListItemEdit.extend(
+/** @lends DatasetDCEListItemEdit.prototype */{
+
+    /** set up */
+    initialize  : function( attributes ){
+        DATASET_LI_EDIT.DatasetListItemEdit.prototype.initialize.call( this, attributes );
+    },
+
+    // NOTE: this does not inherit from DatasetDCEListItemView - so we duplicate this here
+    //TODO: fix
+    /** In this override, only get details if in the ready state.
+     *  Note: fetch with no 'change' event triggering to prevent automatic rendering.
+     */
+    _fetchModelDetails : function(){
+        var view = this;
+        if( view.model.inReadyState() && !view.model.hasDetails() ){
+            return view.model.fetch({ silent: true });
+        }
+        return jQuery.when();
+    },
+
+    /** Override to remove delete button */
+    _renderDeleteButton : function(){
+        return null;
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DatasetDCEListItemEdit(' + modelString + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+DatasetDCEListItemEdit.prototype.templates = (function(){
+
+    return _.extend( {}, DATASET_LI_EDIT.DatasetListItemEdit.prototype.templates, {
+        titleBar : DC_LI.DatasetDCEListItemView.prototype.templates.titleBar
+    });
+}());
+
+
+//==============================================================================
+/** @class Read only view for a DatasetCollectionElement that is also a DatasetCollection
+ *      (a nested DC).
+ */
+var NestedDCDCEListItemEdit = DC_LI.NestedDCDCEListItemView.extend(
+/** @lends NestedDCDCEListItemEdit.prototype */{
+
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'NestedDCDCEListItemEdit(' + modelString + ')';
+    }
+});
+
+
+//==============================================================================
+    return {
+        DCListItemEdit          : DCListItemEdit,
+        DCEListItemEdit         : DCEListItemEdit,
+        DatasetDCEListItemEdit  : DatasetDCEListItemEdit,
+        NestedDCDCEListItemEdit : NestedDCDCEListItemEdit
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/collection-li.js b/client/galaxy/scripts/mvc/collection/collection-li.js
new file mode 100644
index 0000000..0708e5d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/collection-li.js
@@ -0,0 +1,271 @@
+define([
+    "mvc/list/list-item",
+    "mvc/dataset/dataset-li",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( LIST_ITEM, DATASET_LI, BASE_MVC, _l ){
+
+'use strict';
+//==============================================================================
+var FoldoutListItemView = LIST_ITEM.FoldoutListItemView,
+    ListItemView = LIST_ITEM.ListItemView;
+/** @class Read only view for DatasetCollection.
+ */
+var DCListItemView = FoldoutListItemView.extend(
+/** @lends DCListItemView.prototype */{
+
+    className   : FoldoutListItemView.prototype.className + " dataset-collection",
+    id          : function(){
+        return [ 'dataset_collection', this.model.get( 'id' ) ].join( '-' );
+    },
+
+    /** override to add linkTarget */
+    initialize : function( attributes ){
+        this.linkTarget = attributes.linkTarget || '_blank';
+        this.hasUser = attributes.hasUser;
+        FoldoutListItemView.prototype.initialize.call( this, attributes );
+    },
+
+    /** event listeners */
+    _setUpListeners : function(){
+        FoldoutListItemView.prototype._setUpListeners.call( this );
+        this.listenTo( this.model, 'change', function( model, options ){
+            // if the model has changed deletion status render it entirely
+            if( _.has( model.changed, 'deleted' ) ){
+                this.render();
+
+            // if the model has been decorated after the fact with the element count,
+            // render the subtitle where the count is displayed
+            } else if( _.has( model.changed, 'element_count' ) ){
+                this.$( '> .title-bar .subtitle' ).replaceWith( this._renderSubtitle() );
+            }
+        });
+    },
+
+    // ......................................................................... rendering
+    /** render a subtitle to show the user what sort of collection this is */
+    _renderSubtitle : function(){
+        return $( this.templates.subtitle( this.model.toJSON(), this ) );
+    },
+
+    // ......................................................................... foldout
+    /** override to add linktarget to sub-panel */
+    _getFoldoutPanelOptions : function(){
+        var options = FoldoutListItemView.prototype._getFoldoutPanelOptions.call( this );
+        return _.extend( options, {
+            linkTarget  : this.linkTarget,
+            hasUser     : this.hasUser
+        });
+    },
+
+    /** override to not catch sub-panel selectors */
+    $selector : function(){
+        return this.$( '> .selector' );
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DCListItemView(' + modelString + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+DCListItemView.prototype.templates = (function(){
+
+    var warnings = _.extend( {}, FoldoutListItemView.prototype.templates.warnings, {
+        error : BASE_MVC.wrapTemplate([
+            // error during index fetch - show error on dataset
+            '<% if( model.error ){ %>',
+                '<div class="errormessagesmall">',
+                    _l( 'There was an error getting the data for this collection' ), ': <%- model.error %>',
+                '</div>',
+            '<% } %>'
+        ]),
+        purged : BASE_MVC.wrapTemplate([
+            '<% if( model.purged ){ %>',
+                '<div class="purged-msg warningmessagesmall">',
+                    _l( 'This collection has been deleted and removed from disk' ),
+                '</div>',
+            '<% } %>'
+        ]),
+        deleted : BASE_MVC.wrapTemplate([
+            // deleted not purged
+            '<% if( model.deleted && !model.purged ){ %>',
+                '<div class="deleted-msg warningmessagesmall">',
+                    _l( 'This collection has been deleted' ),
+                '</div>',
+            '<% } %>'
+        ])
+    });
+
+    // use element identifier
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        '<div class="title-bar clear" tabindex="0">',
+            '<div class="title">',
+                '<span class="name"><%- collection.element_identifier || collection.name %></span>',
+            '</div>',
+            '<div class="subtitle"></div>',
+        '</div>'
+    ], 'collection' );
+
+    // use element identifier
+    var subtitleTemplate = BASE_MVC.wrapTemplate([
+        '<div class="subtitle">',
+            '<% var countText = collection.element_count? ( collection.element_count + " " ) : ""; %>',
+            '<%        if( collection.collection_type === "list" ){ %>',
+                _l( 'a list of <%- countText %>datasets' ),
+            '<% } else if( collection.collection_type === "paired" ){ %>',
+                _l( 'a pair of datasets' ),
+            '<% } else if( collection.collection_type === "list:paired" ){ %>',
+                _l( 'a list of <%- countText %>dataset pairs' ),
+            '<% } else if( collection.collection_type === "list:list" ){ %>',
+                _l( 'a list of <%- countText %>dataset lists' ),
+            '<% } %>',
+        '</div>'
+    ], 'collection' );
+
+    return _.extend( {}, FoldoutListItemView.prototype.templates, {
+        warnings    : warnings,
+        titleBar    : titleBarTemplate,
+        subtitle    : subtitleTemplate
+    });
+}());
+
+
+//==============================================================================
+/** @class Read only view for DatasetCollectionElement.
+ */
+var DCEListItemView = ListItemView.extend(
+/** @lends DCEListItemView.prototype */{
+
+    /** add the DCE class to the list item */
+    className   : ListItemView.prototype.className + " dataset-collection-element",
+
+    /** set up */
+    initialize  : function( attributes ){
+        if( attributes.logger ){ this.logger = this.model.logger = attributes.logger; }
+        this.log( 'DCEListItemView.initialize:', attributes );
+        ListItemView.prototype.initialize.call( this, attributes );
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DCEListItemView(' + modelString + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+DCEListItemView.prototype.templates = (function(){
+
+    // use the element identifier here - since that will persist and the user will need it
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        '<div class="title-bar clear" tabindex="0">',
+            '<div class="title">',
+                '<span class="name"><%- element.element_identifier %></span>',
+            '</div>',
+            '<div class="subtitle"></div>',
+        '</div>'
+    ], 'element' );
+
+    return _.extend( {}, ListItemView.prototype.templates, {
+        titleBar : titleBarTemplate
+    });
+}());
+
+
+//==============================================================================
+/** @class Read only view for a DatasetCollectionElement that is also an DatasetAssociation
+ *      (a dataset contained in a dataset collection).
+ */
+var DatasetDCEListItemView = DATASET_LI.DatasetListItemView.extend(
+/** @lends DatasetDCEListItemView.prototype */{
+
+    className   : DATASET_LI.DatasetListItemView.prototype.className + " dataset-collection-element",
+
+    /** set up */
+    initialize  : function( attributes ){
+        if( attributes.logger ){ this.logger = this.model.logger = attributes.logger; }
+        this.log( 'DatasetDCEListItemView.initialize:', attributes );
+        DATASET_LI.DatasetListItemView.prototype.initialize.call( this, attributes );
+    },
+
+    /** In this override, only get details if in the ready state.
+     *  Note: fetch with no 'change' event triggering to prevent automatic rendering.
+     */
+    _fetchModelDetails : function(){
+        var view = this;
+        if( view.model.inReadyState() && !view.model.hasDetails() ){
+            return view.model.fetch({ silent: true });
+        }
+        return jQuery.when();
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DatasetDCEListItemView(' + modelString + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+DatasetDCEListItemView.prototype.templates = (function(){
+
+    // use the element identifier here and not the dataset name
+    //TODO:?? can we steal the DCE titlebar?
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        '<div class="title-bar clear" tabindex="0">',
+            '<span class="state-icon"></span>',
+            '<div class="title">',
+                '<span class="name"><%- element.element_identifier %></span>',
+            '</div>',
+        '</div>'
+    ], 'element' );
+
+    return _.extend( {}, DATASET_LI.DatasetListItemView.prototype.templates, {
+        titleBar : titleBarTemplate
+    });
+}());
+
+
+//==============================================================================
+/** @class Read only view for a DatasetCollectionElement that is also a DatasetCollection
+ *      (a nested DC).
+ */
+var NestedDCDCEListItemView = DCListItemView.extend(
+/** @lends NestedDCDCEListItemView.prototype */{
+
+    className   : DCListItemView.prototype.className + " dataset-collection-element",
+
+    /** In this override, add the state as a class for use with state-based CSS */
+    _swapNewRender : function( $newRender ){
+        DCListItemView.prototype._swapNewRender.call( this, $newRender );
+        var state = this.model.get( 'state' ) || 'ok';
+        this.$el.addClass( 'state-' + state );
+        return this.$el;
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'NestedDCDCEListItemView(' + modelString + ')';
+    }
+});
+
+
+//==============================================================================
+    return {
+        DCListItemView          : DCListItemView,
+        DCEListItemView         : DCEListItemView,
+        DatasetDCEListItemView  : DatasetDCEListItemView,
+        NestedDCDCEListItemView : NestedDCDCEListItemView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/collection-model.js b/client/galaxy/scripts/mvc/collection/collection-model.js
new file mode 100644
index 0000000..e3fbe81
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/collection-model.js
@@ -0,0 +1,471 @@
+define([
+    "mvc/dataset/dataset-model",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( DATASET_MODEL, BASE_MVC, _l ){
+'use strict';
+
+//==============================================================================
+/*
+Notes:
+
+Terminology:
+    DatasetCollection/DC : a container of datasets or nested DatasetCollections
+    Element/DatasetCollectionElement/DCE : an item contained in a DatasetCollection
+    HistoryDatasetCollectionAssociation/HDCA: a DatasetCollection contained in a history
+
+
+This all seems too complex unfortunately:
+
+- Terminology collision between DatasetCollections (DCs) and Backbone Collections.
+- In the DatasetCollections API JSON, DC Elements use a 'Has A' stucture to *contain*
+    either a dataset or a nested DC. This would make the hierarchy much taller. I've
+    decided to merge the contained JSON with the DC element json - making the 'has a'
+    relation into an 'is a' relation. This seems simpler to me and allowed a lot of
+    DRY in both models and views, but may make tracking or tracing within these models
+    more difficult (since DatasetCollectionElements are now *also* DatasetAssociations
+    or DatasetCollections (nested)). This also violates the rule of thumb about
+    favoring aggregation over inheritance.
+- Currently, there are three DatasetCollection subclasses: List, Pair, and ListPaired.
+    These each should a) be usable on their own, b) be usable in the context of
+    nesting within a collection model (at least in the case of ListPaired), and
+    c) be usable within the context of other container models (like History or
+    LibraryFolder, etc.). I've tried to separate/extract classes in order to
+    handle those three situations, but it's proven difficult to do in a simple,
+    readable manner.
+- Ideally, histories and libraries would inherit from the same server models as
+    dataset collections do since they are (in essence) dataset collections themselves -
+    making the whole nested structure simpler. This would be a large, error-prone
+    refactoring and migration.
+
+Many of the classes and heirarchy are meant as extension points so, while the
+relations and flow may be difficult to understand initially, they'll allow us to
+handle the growth or flux dataset collection in the future (w/o actually implementing
+any YAGNI).
+
+*/
+//_________________________________________________________________________________________________ ELEMENTS
+/** @class mixin for Dataset collection elements.
+ *      When collection elements are passed from the API, the underlying element is
+ *          in a sub-object 'object' (IOW, a DCE representing an HDA will have HDA json in element.object).
+ *      This mixin uses the constructor and parse methods to merge that JSON with the DCE attribtues
+ *          effectively changing a DCE from a container to a subclass (has a --> is a).
+ */
+var DatasetCollectionElementMixin = {
+
+    /** default attributes used by elements in a dataset collection */
+    defaults : {
+        model_class         : 'DatasetCollectionElement',
+        element_identifier  : null,
+        element_index       : null,
+        element_type        : null
+    },
+
+    /** merge the attributes of the sub-object 'object' into this model */
+    _mergeObject : function( attributes ){
+        // if we don't preserve and correct ids here, the element id becomes the object id
+        // and collision in backbone's _byId will occur and only
+        _.extend( attributes, attributes.object, { element_id: attributes.id });
+        delete attributes.object;
+        return attributes;
+    },
+
+    /** override to merge this.object into this */
+    constructor : function( attributes, options ){
+        // console.debug( '\t DatasetCollectionElement.constructor:', attributes, options );
+        attributes = this._mergeObject( attributes );
+        this.idAttribute = 'element_id';
+        Backbone.Model.apply( this, arguments );
+    },
+
+    /** when the model is fetched, merge this.object into this */
+    parse : function( response, options ){
+        var attributes = response;
+        attributes = this._mergeObject( attributes );
+        return attributes;
+    }
+};
+
+/** @class Concrete class of Generic DatasetCollectionElement */
+var DatasetCollectionElement = Backbone.Model
+    .extend( BASE_MVC.LoggableMixin )
+    .extend( DatasetCollectionElementMixin )
+    .extend({ _logNamespace : 'collections' });
+
+
+//==============================================================================
+/** @class Base/Abstract Backbone collection for Generic DCEs. */
+var DCECollection = Backbone.Collection.extend( BASE_MVC.LoggableMixin ).extend(
+/** @lends DCECollection.prototype */{
+    _logNamespace : 'collections',
+
+    model: DatasetCollectionElement,
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'DatasetCollectionElementCollection(', this.length, ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone model for a dataset collection element that is a dataset (HDA).
+ */
+var DatasetDCE = DATASET_MODEL.DatasetAssociation.extend( BASE_MVC.mixin( DatasetCollectionElementMixin,
+/** @lends DatasetDCE.prototype */{
+
+    /** url fn */
+    url : function(){
+        // won't always be an hda
+        if( !this.has( 'history_id' ) ){
+            console.warn( 'no endpoint for non-hdas within a collection yet' );
+            // (a little silly since this api endpoint *also* points at hdas)
+            return Galaxy.root + 'api/datasets';
+        }
+        return Galaxy.root + 'api/histories/' + this.get( 'history_id' ) + '/contents/' + this.get( 'id' );
+    },
+
+    defaults : _.extend( {},
+        DATASET_MODEL.DatasetAssociation.prototype.defaults,
+        DatasetCollectionElementMixin.defaults
+    ),
+
+    // because all objects have constructors (as this hashmap would even if this next line wasn't present)
+    //  the constructor in hcontentMixin won't be attached by BASE_MVC.mixin to this model
+    //  - re-apply manually for now
+    /** call the mixin constructor */
+    constructor : function( attributes, options ){
+        this.debug( '\t DatasetDCE.constructor:', attributes, options );
+        //DATASET_MODEL.DatasetAssociation.prototype.constructor.call( this, attributes, options );
+        DatasetCollectionElementMixin.constructor.call( this, attributes, options );
+    },
+
+    /** Does this model already contain detailed data (as opposed to just summary level data)? */
+    hasDetails : function(){
+        return this.elements && this.elements.length;
+    },
+
+    /** String representation. */
+    toString : function(){
+        var objStr = this.get( 'element_identifier' );
+        return ([ 'DatasetDCE(', objStr, ')' ].join( '' ));
+    }
+}));
+
+
+//==============================================================================
+/** @class DCECollection of DatasetDCE's (a list of datasets, a pair of datasets).
+ */
+var DatasetDCECollection = DCECollection.extend(
+/** @lends DatasetDCECollection.prototype */{
+    model: DatasetDCE,
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'DatasetDCECollection(', this.length, ')' ].join( '' ));
+    }
+});
+
+
+//_________________________________________________________________________________________________ COLLECTIONS
+/** @class Backbone model for Dataset Collections.
+ *      The DC API returns an array of JSON objects under the attribute elements.
+ *      This model:
+ *          - removes that array/attribute ('elements') from the model,
+ *          - creates a bbone collection (of the class defined in the 'collectionClass' attribute),
+ *          - passes that json onto the bbone collection
+ *          - caches the bbone collection in this.elements
+ */
+var DatasetCollection = Backbone.Model
+        .extend( BASE_MVC.LoggableMixin )
+        .extend( BASE_MVC.SearchableModelMixin )
+        .extend(/** @lends DatasetCollection.prototype */{
+    _logNamespace : 'collections',
+
+    /** default attributes for a model */
+    defaults : {
+        /* 'list', 'paired', or 'list:paired' */
+        collection_type     : null,
+        //??
+        deleted             : false
+    },
+
+    /** Which class to use for elements */
+    collectionClass : DCECollection,
+
+    /** set up: create elements instance var and (on changes to elements) update them  */
+    initialize : function( model, options ){
+        this.debug( this + '(DatasetCollection).initialize:', model, options, this );
+        this.elements = this._createElementsModel();
+        this.on( 'change:elements', function(){
+            this.log( 'change:elements' );
+            //TODO: prob. better to update the collection instead of re-creating it
+            this.elements = this._createElementsModel();
+        });
+    },
+
+    /** move elements model attribute to full collection */
+    _createElementsModel : function(){
+        this.debug( this + '._createElementsModel', this.collectionClass, this.get( 'elements' ), this.elements );
+        //TODO: same patterns as DatasetCollectionElement _createObjectModel - refactor to BASE_MVC.hasSubModel?
+        var elements = this.get( 'elements' ) || [];
+        this.unset( 'elements', { silent: true });
+        this.elements = new this.collectionClass( elements );
+        //this.debug( 'collectionClass:', this.collectionClass + '', this.elements );
+        return this.elements;
+    },
+
+    // ........................................................................ common queries
+    /** pass the elements back within the model json when this is serialized */
+    toJSON : function(){
+        var json = Backbone.Model.prototype.toJSON.call( this );
+        if( this.elements ){
+            json.elements = this.elements.toJSON();
+        }
+        return json;
+    },
+
+    /** Is this collection in a 'ready' state no processing (for the collection) is left
+     *  to do on the server.
+     */
+    inReadyState : function(){
+        var populated = this.get( 'populated' );
+        return ( this.isDeletedOrPurged() || populated );
+    },
+
+    //TODO:?? the following are the same interface as DatasetAssociation - can we combine?
+    /** Does the DC contain any elements yet? Is a fetch() required? */
+    hasDetails : function(){
+        return this.elements.length !== 0;
+    },
+
+    /** Given the filters, what models in this.elements would be returned? */
+    getVisibleContents : function( filters ){
+        // filters unused for now
+        return this.elements;
+    },
+
+    // ........................................................................ ajax
+    /** override to use actual Dates objects for create/update times */
+    parse : function( response, options ){
+        var parsed = Backbone.Model.prototype.parse.call( this, response, options );
+        if( parsed.create_time ){
+            parsed.create_time = new Date( parsed.create_time );
+        }
+        if( parsed.update_time ){
+            parsed.update_time = new Date( parsed.update_time );
+        }
+        return parsed;
+    },
+
+    /** save this dataset, _Mark_ing it as deleted (just a flag) */
+    'delete' : function( options ){
+        if( this.get( 'deleted' ) ){ return jQuery.when(); }
+        return this.save( { deleted: true }, options );
+    },
+    /** save this dataset, _Mark_ing it as undeleted */
+    undelete : function( options ){
+        if( !this.get( 'deleted' ) || this.get( 'purged' ) ){ return jQuery.when(); }
+        return this.save( { deleted: false }, options );
+    },
+
+    /** Is this collection deleted or purged? */
+    isDeletedOrPurged : function(){
+        return ( this.get( 'deleted' ) || this.get( 'purged' ) );
+    },
+
+    // ........................................................................ searchable
+    /** searchable attributes for collections */
+    searchAttributes : [
+        'name'
+    ],
+
+    // ........................................................................ misc
+    /** String representation */
+    toString : function(){
+        var idAndName = [ this.get( 'id' ), this.get( 'name' ) || this.get( 'element_identifier' ) ];
+        return 'DatasetCollection(' + ( idAndName.join(',') ) + ')';
+    }
+});
+
+
+//==============================================================================
+/** Model for a DatasetCollection containing datasets (non-nested).
+ */
+var ListDatasetCollection = DatasetCollection.extend(
+/** @lends ListDatasetCollection.prototype */{
+
+    /** override since we know the collection will only contain datasets */
+    collectionClass : DatasetDCECollection,
+
+    /** String representation. */
+    toString : function(){ return 'List' + DatasetCollection.prototype.toString.call( this ); }
+});
+
+
+//==============================================================================
+/** Model for a DatasetCollection containing fwd/rev datasets (a list of 2).
+ */
+var PairDatasetCollection = ListDatasetCollection.extend(
+/** @lends PairDatasetCollection.prototype */{
+
+    /** String representation. */
+    toString : function(){ return 'Pair' + DatasetCollection.prototype.toString.call( this ); }
+});
+
+
+//_________________________________________________________________________________________________ NESTED COLLECTIONS
+// this is where things get weird, man. Weird.
+//TODO: it might be possible to compact all the following...I think.
+//==============================================================================
+/** @class Backbone model for a Generic DatasetCollectionElement that is also a DatasetCollection
+ *      (a nested collection). Currently only list:paired.
+ */
+var NestedDCDCE = DatasetCollection.extend( BASE_MVC.mixin( DatasetCollectionElementMixin,
+/** @lends NestedDCDCE.prototype */{
+
+    // because all objects have constructors (as this hashmap would even if this next line wasn't present)
+    //  the constructor in hcontentMixin won't be attached by BASE_MVC.mixin to this model
+    //  - re-apply manually it now
+    /** call the mixin constructor */
+    constructor : function( attributes, options ){
+        this.debug( '\t NestedDCDCE.constructor:', attributes, options );
+        DatasetCollectionElementMixin.constructor.call( this, attributes, options );
+    },
+
+    /** String representation. */
+    toString : function(){
+        var objStr = ( this.object )?( '' + this.object ):( this.get( 'element_identifier' ) );
+        return ([ 'NestedDCDCE(', objStr, ')' ].join( '' ));
+    }
+}));
+
+
+//==============================================================================
+/** @class Backbone collection containing Generic NestedDCDCE's (nested dataset collections).
+ */
+var NestedDCDCECollection = DCECollection.extend(
+/** @lends NestedDCDCECollection.prototype */{
+
+    /** This is a collection of nested collections */
+    model: NestedDCDCE,
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'NestedDCDCECollection(', this.length, ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone model for a paired dataset collection within a list:paired dataset collection.
+ */
+var NestedPairDCDCE = PairDatasetCollection.extend( BASE_MVC.mixin( DatasetCollectionElementMixin,
+/** @lends NestedPairDCDCE.prototype */{
+//TODO:?? possibly rename to NestedDatasetCollection?
+
+    // because all objects have constructors (as this hashmap would even if this next line wasn't present)
+    //  the constructor in hcontentMixin won't be attached by BASE_MVC.mixin to this model
+    //  - re-apply manually it now
+    /** This is both a collection and a collection element - call the constructor */
+    constructor : function( attributes, options ){
+        this.debug( '\t NestedPairDCDCE.constructor:', attributes, options );
+        //DatasetCollection.constructor.call( this, attributes, options );
+        DatasetCollectionElementMixin.constructor.call( this, attributes, options );
+    },
+
+    /** String representation. */
+    toString : function(){
+        var objStr = ( this.object )?( '' + this.object ):( this.get( 'element_identifier' ) );
+        return ([ 'NestedPairDCDCE(', objStr, ')' ].join( '' ));
+    }
+}));
+
+
+//==============================================================================
+/** @class Backbone collection for a backbone collection containing paired dataset collections.
+ */
+var NestedPairDCDCECollection = NestedDCDCECollection.extend(
+/** @lends PairDCDCECollection.prototype */{
+
+    /** We know this collection is composed of only nested pair collections */
+    model: NestedPairDCDCE,
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'NestedPairDCDCECollection(', this.length, ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone Model for a DatasetCollection (list) that contains DatasetCollections (pairs).
+ */
+var ListPairedDatasetCollection = DatasetCollection.extend(
+/** @lends ListPairedDatasetCollection.prototype */{
+
+    /** list:paired is the only collection that itself contains collections */
+    collectionClass : NestedPairDCDCECollection,
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'ListPairedDatasetCollection(', this.get( 'name' ), ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone model for a list dataset collection within a list:list dataset collection. */
+var NestedListDCDCE = ListDatasetCollection.extend( BASE_MVC.mixin( DatasetCollectionElementMixin,
+/** @lends NestedListDCDCE.prototype */{
+
+    /** This is both a collection and a collection element - call the constructor */
+    constructor : function( attributes, options ){
+        this.debug( '\t NestedListDCDCE.constructor:', attributes, options );
+        DatasetCollectionElementMixin.constructor.call( this, attributes, options );
+    },
+
+    /** String representation. */
+    toString : function(){
+        var objStr = ( this.object )?( '' + this.object ):( this.get( 'element_identifier' ) );
+        return ([ 'NestedListDCDCE(', objStr, ')' ].join( '' ));
+    }
+}));
+
+
+//==============================================================================
+/** @class Backbone collection containing list dataset collections. */
+var NestedListDCDCECollection = NestedDCDCECollection.extend({
+
+    /** We know this collection is composed of only nested pair collections */
+    model: NestedListDCDCE,
+
+    /** String representation. */
+    toString : function(){
+        return ([ 'NestedListDCDCECollection(', this.length, ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone Model for a DatasetCollection (list) that contains other lists. */
+var ListOfListsDatasetCollection = DatasetCollection.extend({
+
+    /** list:paired is the only collection that itself contains collections */
+    collectionClass : NestedListDCDCECollection,
+
+    /** String representation. */
+    toString : function(){
+        return ([ 'ListOfListsDatasetCollection(', this.get( 'name' ), ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+    return {
+        ListDatasetCollection       : ListDatasetCollection,
+        PairDatasetCollection       : PairDatasetCollection,
+        ListPairedDatasetCollection : ListPairedDatasetCollection,
+        ListOfListsDatasetCollection: ListOfListsDatasetCollection
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/collection-view-edit.js b/client/galaxy/scripts/mvc/collection/collection-view-edit.js
new file mode 100644
index 0000000..d2e7fdc
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/collection-view-edit.js
@@ -0,0 +1,176 @@
+define([
+    "mvc/collection/collection-view",
+    "mvc/collection/collection-model",
+    "mvc/collection/collection-li-edit",
+    "mvc/base-mvc",
+    "utils/localization",
+    "ui/editable-text",
+], function( DC_VIEW, DC_MODEL, DC_EDIT, BASE_MVC, _l ){
+
+'use strict';
+/* =============================================================================
+TODO:
+
+============================================================================= */
+/** @class editable View/Controller for a dataset collection.
+ */
+var _super = DC_VIEW.CollectionView;
+var CollectionViewEdit = _super.extend(
+/** @lends CollectionView.prototype */{
+    //MODEL is either a DatasetCollection (or subclass) or a DatasetCollectionElement (list of pairs)
+
+    /** logger used to record this.log messages, commonly set to console */
+    //logger              : console,
+
+    /** sub view class used for datasets */
+    DatasetDCEViewClass : DC_EDIT.DatasetDCEListItemEdit,
+    /** sub view class used for nested collections */
+    NestedDCDCEViewClass: DC_EDIT.NestedDCDCEListItemEdit,
+
+    // ......................................................................... SET UP
+    /** Set up the view, set up storage, bind listeners to HistoryContents events
+     *  @param {Object} attributes optional settings for the panel
+     */
+    initialize : function( attributes ){
+        _super.prototype.initialize.call( this, attributes );
+    },
+
+    /** In this override, make the collection name editable
+     */
+    _setUpBehaviors : function( $where ){
+        $where = $where || this.$el;
+        _super.prototype._setUpBehaviors.call( this, $where );
+        if( !this.model ){ return; }
+
+        // anon users shouldn't have access to any of the following
+        if( !Galaxy.user || Galaxy.user.isAnonymous() ){
+            return;
+        }
+
+        //TODO: extract
+        var panel = this,
+            nameSelector = '> .controls .name';
+        $where.find( nameSelector )
+            .attr( 'title', _l( 'Click to rename collection' ) )
+            .tooltip({ placement: 'bottom' })
+            .make_text_editable({
+                on_finish: function( newName ){
+                    var previousName = panel.model.get( 'name' );
+                    if( newName && newName !== previousName ){
+                        panel.$el.find( nameSelector ).text( newName );
+                        panel.model.save({ name: newName })
+                            .fail( function(){
+                                panel.$el.find( nameSelector ).text( panel.model.previous( 'name' ) );
+                            });
+                    } else {
+                        panel.$el.find( nameSelector ).text( previousName );
+                    }
+                }
+            });
+    },
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'CollectionViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class non-editable, read-only View/Controller for a dataset collection. */
+var ListCollectionViewEdit = CollectionViewEdit.extend(
+/** @lends ListCollectionView.prototype */{
+
+    //TODO: not strictly needed - due to switch in CollectionView._getContentClass
+    /** sub view class used for datasets */
+    DatasetDCEViewClass : DC_EDIT.DatasetDCEListItemEdit,
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'ListCollectionViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class Editable, read-only View/Controller for a dataset collection. */
+var PairCollectionViewEdit = ListCollectionViewEdit.extend(
+/** @lends PairCollectionViewEdit.prototype */{
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'PairCollectionViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class Editable (roughly since these collections are immutable),
+ *  View/Controller for a dataset collection.
+ */
+var NestedPairCollectionViewEdit = PairCollectionViewEdit.extend(
+/** @lends NestedPairCollectionViewEdit.prototype */{
+
+    /** Override to remove the editable text from the name/identifier - these collections are considered immutable */
+    _setUpBehaviors : function( $where ){
+        _super.prototype._setUpBehaviors.call( this, $where );
+    },
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'NestedPairCollectionViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class editable, View/Controller for a list of pairs dataset collection. */
+var ListOfPairsCollectionViewEdit = CollectionViewEdit.extend(
+/** @lends ListOfPairsCollectionView.prototype */{
+
+    //TODO: not strictly needed - due to switch in CollectionView._getContentClass
+    /** sub view class used for nested collections */
+    NestedDCDCEViewClass : DC_EDIT.NestedDCDCEListItemEdit.extend({
+        foldoutPanelClass : NestedPairCollectionViewEdit
+    }),
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'ListOfPairsCollectionViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class View/Controller for a list of lists dataset collection. */
+var ListOfListsCollectionViewEdit = CollectionViewEdit.extend(
+/** @lends ListOfListsCollectionView.prototype */{
+
+    //TODO: not strictly needed - due to switch in CollectionView._getContentClass
+    /** sub view class used for nested collections */
+    NestedDCDCEViewClass : DC_EDIT.NestedDCDCEListItemEdit.extend({
+        foldoutPanelClass : NestedPairCollectionViewEdit
+    }),
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'ListOfListsCollectionViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+//==============================================================================
+    return {
+        CollectionViewEdit              : CollectionViewEdit,
+        ListCollectionViewEdit          : ListCollectionViewEdit,
+        PairCollectionViewEdit          : PairCollectionViewEdit,
+        ListOfPairsCollectionViewEdit   : ListOfPairsCollectionViewEdit,
+        ListOfListsCollectionViewEdit   : ListOfListsCollectionViewEdit
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/collection-view.js b/client/galaxy/scripts/mvc/collection/collection-view.js
new file mode 100644
index 0000000..336967a
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/collection-view.js
@@ -0,0 +1,255 @@
+define([
+    "mvc/list/list-view",
+    "mvc/collection/collection-model",
+    "mvc/collection/collection-li",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( LIST_VIEW, DC_MODEL, DC_LI, BASE_MVC, _l ){
+
+'use strict';
+
+var logNamespace = 'collections';
+/* =============================================================================
+TODO:
+
+============================================================================= */
+/** @class non-editable, read-only View/Controller for a dataset collection.
+ */
+var _super = LIST_VIEW.ModelListPanel;
+var CollectionView = _super.extend(
+/** @lends CollectionView.prototype */{
+    //MODEL is either a DatasetCollection (or subclass) or a DatasetCollectionElement (list of pairs)
+    _logNamespace : logNamespace,
+
+    className           : _super.prototype.className + ' dataset-collection-panel',
+
+    /** sub view class used for datasets */
+    DatasetDCEViewClass : DC_LI.DatasetDCEListItemView,
+    /** sub view class used for nested collections */
+    NestedDCDCEViewClass: DC_LI.NestedDCDCEListItemView,
+    /** key of attribute in model to assign to this.collection */
+    modelCollectionKey  : 'elements',
+
+    // ......................................................................... SET UP
+    /** Set up the view, set up storage, bind listeners to HistoryContents events
+     *  @param {Object} attributes optional settings for the panel
+     */
+    initialize : function( attributes ){
+        _super.prototype.initialize.call( this, attributes );
+        this.linkTarget = attributes.linkTarget || '_blank';
+
+        this.hasUser = attributes.hasUser;
+        /** A stack of panels that currently cover or hide this panel */
+        this.panelStack = [];
+        /** The text of the link to go back to the panel containing this one */
+        this.parentName = attributes.parentName;
+        /** foldout or drilldown */
+        this.foldoutStyle = attributes.foldoutStyle || 'foldout';
+    },
+
+    _queueNewRender : function( $newRender, speed ) {
+        speed = ( speed === undefined )?( this.fxSpeed ):( speed );
+        var panel = this;
+        panel.log( '_queueNewRender:', $newRender, speed );
+
+        // TODO: jquery at 1.12 doesn't change display when the elem has display: flex
+        // this causes display: block for those elems after the use of show/hide animations
+        // animations are removed from this view for now until fixed
+        panel._swapNewRender( $newRender );
+        panel.trigger( 'rendered', panel );
+    },
+
+    // ------------------------------------------------------------------------ sub-views
+    /** In this override, use model.getVisibleContents */
+    _filterCollection : function(){
+        //TODO: should *not* be model.getVisibleContents - visibility is not model related
+        return this.model.getVisibleContents();
+    },
+
+    /** override to return proper view class based on element_type */
+    _getItemViewClass : function( model ){
+        //this.debug( this + '._getItemViewClass:', model );
+        //TODO: subclasses use DCEViewClass - but are currently unused - decide
+        switch( model.get( 'element_type' ) ){
+            case 'hda':
+                return this.DatasetDCEViewClass;
+            case 'dataset_collection':
+                return this.NestedDCDCEViewClass;
+        }
+        throw new TypeError( 'Unknown element type:', model.get( 'element_type' ) );
+    },
+
+    /** override to add link target and anon */
+    _getItemViewOptions : function( model ){
+        var options = _super.prototype._getItemViewOptions.call( this, model );
+        return _.extend( options, {
+            linkTarget      : this.linkTarget,
+            hasUser         : this.hasUser,
+            //TODO: could move to only nested: list:paired
+            foldoutStyle    : this.foldoutStyle
+        });
+    },
+
+    // ------------------------------------------------------------------------ collection sub-views
+    /** In this override, add/remove expanded/collapsed model ids to/from web storage */
+    _setUpItemViewListeners : function( view ){
+        var panel = this;
+        _super.prototype._setUpItemViewListeners.call( panel, view );
+
+        // use pub-sub to: handle drilldown expansion and collapse
+        panel.listenTo( view, {
+            'expanded:drilldown': function( v, drilldown ){
+                this._expandDrilldownPanel( drilldown );
+            },
+            'collapsed:drilldown': function( v, drilldown ){
+                this._collapseDrilldownPanel( drilldown );
+            }
+        });
+        return this;
+    },
+
+    /** Handle drill down by hiding this panels list and controls and showing the sub-panel */
+    _expandDrilldownPanel : function( drilldown ){
+        this.panelStack.push( drilldown );
+        // hide this panel's controls and list, set the name for back navigation, and attach to the $el
+        this.$( '> .controls' ).add( this.$list() ).hide();
+        drilldown.parentName = this.model.get( 'name' );
+        this.$el.append( drilldown.render().$el );
+    },
+
+    /** Handle drilldown close by freeing the panel and re-rendering this panel */
+    _collapseDrilldownPanel : function( drilldown ){
+        this.panelStack.pop();
+        this.render();
+    },
+
+    // ------------------------------------------------------------------------ panel events
+    /** event map */
+    events : {
+        'click .navigation .back' : 'close'
+    },
+
+    /** close/remove this collection panel */
+    close : function( event ){
+        this.remove();
+        this.trigger( 'close' );
+    },
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'CollectionView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+//------------------------------------------------------------------------------ TEMPLATES
+CollectionView.prototype.templates = (function(){
+
+    var controlsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="controls">',
+            '<div class="navigation">',
+                '<a class="back" href="javascript:void(0)">',
+                    '<span class="fa fa-icon fa-angle-left"></span>',
+                    _l( 'Back to ' ), '<%- view.parentName %>',
+                '</a>',
+            '</div>',
+
+            '<div class="title">',
+                '<div class="name"><%- collection.name || collection.element_identifier %></div>',
+                '<div class="subtitle">',
+                    '<% if( collection.collection_type === "list" ){ %>',
+                        _l( 'a list of datasets' ),
+                    '<% } else if( collection.collection_type === "paired" ){ %>',
+                        _l( 'a pair of datasets' ),
+                    '<% } else if( collection.collection_type === "list:paired" ){ %>',
+                        _l( 'a list of paired datasets' ),
+                    '<% } else if( collection.collection_type === "list:list" ){ %>',
+                        _l( 'a list of dataset lists' ),
+                    '<% } %>',
+                '</div>',
+            '</div>',
+        '</div>'
+    ], 'collection' );
+
+    return _.extend( _.clone( _super.prototype.templates ), {
+        controls : controlsTemplate
+    });
+}());
+
+
+
+// =============================================================================
+/** @class non-editable, read-only View/Controller for a dataset collection. */
+var ListCollectionView = CollectionView.extend(
+/** @lends ListCollectionView.prototype */{
+
+    //TODO: not strictly needed - due to switch in CollectionView._getContentClass
+    /** sub view class used for datasets */
+    DatasetDCEViewClass : DC_LI.DatasetDCEListItemView,
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'ListCollectionView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class non-editable, read-only View/Controller for a dataset collection. */
+var PairCollectionView = ListCollectionView.extend(
+/** @lends PairCollectionView.prototype */{
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'PairCollectionView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class non-editable, read-only View/Controller for a dataset collection. */
+var ListOfPairsCollectionView = CollectionView.extend(
+/** @lends ListOfPairsCollectionView.prototype */{
+
+    //TODO: not strictly needed - due to switch in CollectionView._getContentClass
+    /** sub view class used for nested collections */
+    NestedDCDCEViewClass : DC_LI.NestedDCDCEListItemView.extend({
+        foldoutPanelClass : PairCollectionView
+    }),
+
+    // ........................................................................ misc
+    /** string rep */
+    toString    : function(){
+        return 'ListOfPairsCollectionView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+// =============================================================================
+/** @class non-editable, read-only View/Controller for a list of lists dataset collection. */
+var ListOfListsCollectionView = CollectionView.extend({
+
+    /** sub view class used for nested collections */
+    NestedDCDCEViewClass : DC_LI.NestedDCDCEListItemView.extend({
+        foldoutPanelClass : PairCollectionView
+    }),
+
+    /** string rep */
+    toString    : function(){
+        return 'ListOfListsCollectionView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+//==============================================================================
+    return {
+        CollectionView              : CollectionView,
+        ListCollectionView          : ListCollectionView,
+        PairCollectionView          : PairCollectionView,
+        ListOfPairsCollectionView   : ListOfPairsCollectionView,
+        ListOfListsCollectionView   : ListOfListsCollectionView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/list-collection-creator.js b/client/galaxy/scripts/mvc/collection/list-collection-creator.js
new file mode 100644
index 0000000..5d7b757
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/list-collection-creator.js
@@ -0,0 +1,1062 @@
+
+define([
+    "mvc/history/hdca-model",
+    "mvc/dataset/states",
+    "mvc/base-mvc",
+    "mvc/ui/ui-modal",
+    "utils/natural-sort",
+    "utils/localization",
+    "ui/hoverhighlight"
+], function( HDCA, STATES, BASE_MVC, UI_MODAL, naturalSort, _l ){
+
+'use strict';
+
+var logNamespace = 'collections';
+/*==============================================================================
+TODO:
+    use proper Element model and not just json
+    straighten out createFn, collection.createHDCA
+    possibly stop using modals for this
+    It would be neat to do a drag and drop
+
+==============================================================================*/
+/** A view for both DatasetDCEs and NestedDCDCEs
+ *  (things that implement collection-model:DatasetCollectionElementMixin)
+ */
+var DatasetCollectionElementView = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+//TODO: use proper class (DatasetDCE or NestedDCDCE (or the union of both))
+    tagName     : 'li',
+    className   : 'collection-element',
+
+    initialize : function( attributes ){
+        this.element = attributes.element || {};
+        this.selected = attributes.selected || false;
+    },
+
+    render : function(){
+        this.$el
+            .attr( 'data-element-id', this.element.id )
+            .attr( 'draggable', true )
+            .html( this.template({ element: this.element }) );
+        if( this.selected ){
+            this.$el.addClass( 'selected' );
+        }
+        return this;
+    },
+
+    //TODO: lots of unused space in the element - possibly load details and display them horiz.
+    template : _.template([
+        '<a class="name" title="', _l( 'Click to rename' ), '" href="javascript:void(0)">',
+            '<%- element.name %>',
+        '</a>',
+        '<button class="discard btn btn-sm" title="', _l( 'Remove this dataset from the list' ), '">',
+            _l( 'Discard' ),
+        '</button>',
+    ].join('')),
+
+    /** select this element and pub */
+    select : function( toggle ){
+        this.$el.toggleClass( 'selected', toggle );
+        this.trigger( 'select', {
+            source   : this,
+            selected : this.$el.hasClass( 'selected' )
+        });
+    },
+
+    /** animate the removal of this element and pub */
+    discard : function(){
+        var view = this,
+            parentWidth = this.$el.parent().width();
+        this.$el.animate({ 'margin-right' : parentWidth }, 'fast', function(){
+            view.trigger( 'discard', {
+                source : view
+            });
+            view.destroy();
+        });
+    },
+
+    /** remove the DOM and any listeners */
+    destroy : function(){
+        this.off();
+        this.$el.remove();
+    },
+
+    events : {
+        'click'         : '_click',
+        'click .name'   : '_clickName',
+        'click .discard': '_clickDiscard',
+
+        'dragstart'     : '_dragstart',
+        'dragend'       : '_dragend',
+        'dragover'      : '_sendToParent',
+        'drop'          : '_sendToParent'
+    },
+
+    /** select when the li is clicked */
+    _click : function( ev ){
+        ev.stopPropagation();
+        this.select( ev );
+    },
+
+    /** rename a pair when the name is clicked */
+    _clickName : function( ev ){
+        ev.stopPropagation();
+        ev.preventDefault();
+        var promptString = [ _l( 'Enter a new name for the element' ), ':\n(',
+                             _l( 'Note that changing the name here will not rename the dataset' ), ')' ].join( '' ),
+            response = prompt( _l( 'Enter a new name for the element' ) + ':', this.element.name );
+        if( response ){
+            this.element.name = response;
+            this.render();
+        }
+        //TODO: cancelling with ESC leads to closure of the creator...
+    },
+
+    /** discard when the discard button is clicked */
+    _clickDiscard : function( ev ){
+        ev.stopPropagation();
+        this.discard();
+    },
+
+    /** dragging pairs for re-ordering */
+    _dragstart : function( ev ){
+        if( ev.originalEvent ){ ev = ev.originalEvent; }
+        ev.dataTransfer.effectAllowed = 'move';
+        ev.dataTransfer.setData( 'text/plain', JSON.stringify( this.element ) );
+
+        this.$el.addClass( 'dragging' );
+        this.$el.parent().trigger( 'collection-element.dragstart', [ this ] );
+    },
+
+    /** dragging for re-ordering */
+    _dragend : function( ev ){
+        this.$el.removeClass( 'dragging' );
+        this.$el.parent().trigger( 'collection-element.dragend', [ this ] );
+    },
+
+    /** manually bubble up an event to the parent/container */
+    _sendToParent : function( ev ){
+        this.$el.parent().trigger( ev );
+    },
+
+    /** string rep */
+    toString : function(){
+        return 'DatasetCollectionElementView()';
+    }
+});
+
+
+// ============================================================================
+/** An interface for building collections.
+ */
+var ListCollectionCreator = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    /** the class used to display individual elements */
+    elementViewClass : DatasetCollectionElementView,
+    /** the class this creator will create and save */
+    collectionClass  : HDCA.HistoryListDatasetCollection,
+    className        : 'list-collection-creator collection-creator flex-row-container',
+
+    /** minimum number of valid elements to start with in order to build a collection of this type */
+    minElements      : 1,
+
+    defaultAttributes : {
+//TODO: remove - use new collectionClass().save()
+        /** takes elements and creates the proper collection - returns a promise */
+        creationFn : function(){ throw new TypeError( 'no creation fn for creator' ); },
+        /** fn to call when the collection is created (scoped to this) */
+        oncreate   : function(){},
+        /** fn to call when the cancel button is clicked (scoped to this) - if falsy, no btn is displayed */
+        oncancel   : function(){},
+        /** distance from list edge to begin autoscrolling list */
+        autoscrollDist  : 24,
+        /** Color passed to hoverhighlight */
+        highlightClr    : 'rgba( 64, 255, 255, 1.0 )'
+    },
+
+    /** set up initial options, instance vars, behaviors */
+    initialize : function( attributes ){
+        this.metric( 'ListCollectionCreator.initialize', attributes );
+        var creator = this;
+        _.each( this.defaultAttributes, function( value, key ){
+            value = attributes[ key ] || value;
+            creator[ key ] = value;
+        });
+
+        /** unordered, original list - cache to allow reversal */
+        creator.initialElements = attributes.elements || [];
+
+        this._instanceSetUp();
+        this._elementsSetUp();
+        this._setUpBehaviors();
+    },
+
+    /** set up instance vars */
+    _instanceSetUp : function(){
+        /** Ids of elements that have been selected by the user - to preserve over renders */
+        this.selectedIds = {};
+        /** DOM elements currently being dragged */
+        this.$dragging = null;
+        /** Used for blocking UI events during ajax/operations (don't post twice) */
+        this.blocking = false;
+    },
+
+    // ------------------------------------------------------------------------ process raw list
+    /** set up main data */
+    _elementsSetUp : function(){
+        //this.debug( '-- _dataSetUp' );
+        /** a list of invalid elements and the reasons they aren't valid */
+        this.invalidElements = [];
+//TODO: handle fundamental problem of syncing DOM, views, and list here
+        /** data for list in progress */
+        this.workingElements = [];
+        /** views for workingElements */
+        this.elementViews = [];
+
+        // copy initial list, sort, add ids if needed
+        this.workingElements = this.initialElements.slice( 0 );
+        this._ensureElementIds();
+        this._validateElements();
+        this._mangleDuplicateNames();
+        this._sortElements();
+    },
+
+    /** add ids to dataset objs in initial list if none */
+    _ensureElementIds : function(){
+        this.workingElements.forEach( function( element ){
+            if( !element.hasOwnProperty( 'id' ) ){
+                element.id = _.uniqueId();
+            }
+        });
+        return this.workingElements;
+    },
+
+    /** separate working list into valid and invalid elements for this collection */
+    _validateElements : function(){
+        var creator = this,
+            existingNames = {};
+        creator.invalidElements = [];
+
+        this.workingElements = this.workingElements.filter( function( element ){
+            var problem = creator._isElementInvalid( element );
+            if( problem ){
+                creator.invalidElements.push({
+                    element : element,
+                    text    : problem
+                });
+            }
+            return !problem;
+        });
+        return this.workingElements;
+    },
+
+    /** describe what is wrong with a particular element if anything */
+    _isElementInvalid : function( element ){
+        if( element.history_content_type !== 'dataset' ){
+            return _l( "is not a dataset" );
+        }
+        if( element.state !== STATES.OK ){
+            if( _.contains( STATES.NOT_READY_STATES, element.state ) ){
+                return _l( "hasn't finished running yet" );
+            }
+            return _l( "has errored, is paused, or is not accessible" );
+        }
+        if( element.deleted || element.purged ){
+            return _l( "has been deleted or purged" );
+        }
+        return null;
+    },
+
+    /** mangle duplicate names using a mac-like '(counter)' addition to any duplicates */
+    _mangleDuplicateNames : function(){
+        var SAFETY = 900,
+            counter = 1,
+            existingNames = {};
+        this.workingElements.forEach( function( element ){
+            var currName = element.name;
+            while( existingNames.hasOwnProperty( currName ) ){
+                currName = element.name + ' (' + counter + ')';
+                counter += 1;
+                if( counter >= SAFETY ){
+                    throw new Error( 'Safety hit in while loop - thats impressive' );
+                }
+            }
+            element.name = currName;
+            existingNames[ element.name ] = true;
+        });
+    },
+
+    /** sort a list of elements */
+    _sortElements : function( list ){
+        // // currently only natural sort by name
+        // this.workingElements.sort( function( a, b ){ return naturalSort( a.name, b.name ); });
+        // return this.workingElements;
+    },
+
+    // ------------------------------------------------------------------------ rendering
+    // templates : ListCollectionCreator.templates,
+    /** render the entire interface */
+    render : function( speed, callback ){
+        //this.debug( '-- _render' );
+        if( this.workingElements.length < this.minElements ){
+            return this._renderInvalid( speed, callback );
+        }
+
+        this.$el.empty().html( this.templates.main() );
+        this._renderHeader( speed );
+        this._renderMiddle( speed );
+        this._renderFooter( speed );
+        this._addPluginComponents();
+        this.$( '.collection-name' ).focus();
+        this.trigger( 'rendered', this );
+        return this;
+    },
+
+
+    /** render a simplified interface aimed at telling the user why they can't move forward */
+    _renderInvalid : function( speed, callback ){
+        //this.debug( '-- _render' );
+        this.$el.empty().html( this.templates.invalidInitial({
+            problems: this.invalidElements,
+            elements: this.workingElements,
+        }));
+        if( typeof this.oncancel === 'function' ){
+            this.$( '.cancel-create.btn' ).show();
+        }
+        this.trigger( 'rendered', this );
+        return this;
+    },
+
+    /** render the header section */
+    _renderHeader : function( speed, callback ){
+        var $header = this.$( '.header' ).empty().html( this.templates.header() )
+            .find( '.help-content' ).prepend( $( this.templates.helpContent() ) );
+        //TODO: should only show once despite calling _renderHeader again
+        if( this.invalidElements.length ){
+            this._invalidElementsAlert();
+        }
+        return $header;
+    },
+
+    /** render the middle including the elements */
+    _renderMiddle : function( speed, callback ){
+        var $middle = this.$( '.middle' ).empty().html( this.templates.middle() );
+        this._renderList( speed );
+        return $middle;
+    },
+
+    /** render the footer, completion controls, and cancel controls */
+    _renderFooter : function( speed, callback ){
+        var $footer = this.$( '.footer' ).empty().html( this.templates.footer() );
+        if( typeof this.oncancel === 'function' ){
+            this.$( '.cancel-create.btn' ).show();
+        }
+        return $footer;
+    },
+
+    /** add any jQuery/bootstrap/custom plugins to elements rendered */
+    _addPluginComponents : function(){
+        this.$( '.help-content i' ).hoverhighlight( '.collection-creator', this.highlightClr );
+    },
+
+    /** build and show an alert describing any elements that could not be included due to problems */
+    _invalidElementsAlert : function(){
+        this._showAlert( this.templates.invalidElements({ problems: this.invalidElements }), 'alert-warning' );
+    },
+
+    /** add (or clear if clear is truthy) a validation warning to the DOM element described in what */
+    _validationWarning : function( what, clear ){
+        var VALIDATION_CLASS = 'validation-warning';
+        if( what === 'name' ){
+            what = this.$( '.collection-name' ).add( this.$( '.collection-name-prompt' ) );
+            this.$( '.collection-name' ).focus().select();
+        }
+        if( clear ){
+            what = what || this.$( '.' + VALIDATION_CLASS );
+            what.removeClass( VALIDATION_CLASS );
+        } else {
+            what.addClass( VALIDATION_CLASS );
+        }
+    },
+
+    _disableNameAndCreate : function( disable ){
+        disable = !_.isUndefined( disable )? disable : true;
+        if( disable ){
+            this.$( '.collection-name' ).prop( 'disabled', true );
+            this.$( '.create-collection' ).toggleClass( 'disabled', true );
+        // } else {
+        //     this.$( '.collection-name' ).prop( 'disabled', false );
+        //     this.$( '.create-collection' ).removeClass( 'disable' );
+        }
+    },
+
+    // ------------------------------------------------------------------------ rendering elements
+    /** conv. to the main list display DOM */
+    $list : function(){
+        return this.$( '.collection-elements' );
+    },
+
+    /** show or hide the clear selected control based on the num of selected elements */
+    _renderClearSelected : function(){
+        if( _.size( this.selectedIds ) ){
+            this.$( '.collection-elements-controls > .clear-selected' ).show();
+        } else {
+            this.$( '.collection-elements-controls > .clear-selected' ).hide();
+        }
+    },
+
+    /** render the elements in order (or a warning if no elements found) */
+    _renderList : function( speed, callback ){
+        //this.debug( '-- _renderList' );
+        var creator = this,
+            $tmp = jQuery( '<div/>' ),
+            $list = creator.$list();
+
+        _.each( this.elementViews, function( view ){
+            view.destroy();
+            creator.removeElementView( view );
+        });
+
+        // if( !this.workingElements.length ){
+        //     this._renderNoValidElements();
+        //     return;
+        // }
+
+        creator.workingElements.forEach( function( element ){
+            var elementView = creator._createElementView( element );
+            $tmp.append( elementView.$el );
+        });
+
+        creator._renderClearSelected();
+        $list.empty().append( $tmp.children() );
+        _.invoke( creator.elementViews, 'render' );
+
+        if( $list.height() > $list.css( 'max-height' ) ){
+            $list.css( 'border-width', '1px 0px 1px 0px' );
+        } else {
+            $list.css( 'border-width', '0px' );
+        }
+    },
+
+    /** create an element view, cache in elementViews, set up listeners, and return */
+    _createElementView : function( element ){
+        var elementView = new this.elementViewClass({
+//TODO: use non-generic class or not all
+            // model : COLLECTION.DatasetDCE( element )
+            element : element,
+            selected: _.has( this.selectedIds, element.id )
+        });
+        this.elementViews.push( elementView );
+        this._listenToElementView( elementView );
+        return elementView;
+    },
+
+    /** listen to any element events */
+    _listenToElementView : function( view ){
+        var creator = this;
+        creator.listenTo( view, {
+            select : function( data ){
+                var element = data.source.element;
+                if( data.selected ){
+                    creator.selectedIds[ element.id ] = true;
+                } else {
+                    delete creator.selectedIds[ element.id ];
+                }
+                creator.trigger( 'elements:select', data );
+            },
+            discard : function( data ){
+                creator.trigger( 'elements:discard', data );
+            }
+        });
+    },
+
+    /** add a new element view based on the json in element */
+    addElementView : function( element ){
+//TODO: workingElements is sorted, add element in appropo index
+        // add element, sort elements, find element index
+        // var view = this._createElementView( element );
+        // return view;
+    },
+
+    /** stop listening to view and remove from caches */
+    removeElementView : function( view ){
+        delete this.selectedIds[ view.element.id ];
+        this._renderClearSelected();
+
+        this.elementViews = _.without( this.elementViews, view );
+        this.stopListening( view );
+    },
+
+    /** render a message in the list that no elements remain to create a collection */
+    _renderNoElementsLeft : function(){
+        this._disableNameAndCreate( true );
+        this.$( '.collection-elements' ).append( this.templates.noElementsLeft() );
+    },
+
+    // /** render a message in the list that no valid elements were found to create a collection */
+    // _renderNoValidElements : function(){
+    //     this._disableNameAndCreate( true );
+    //     this.$( '.collection-elements' ).append( this.templates.noValidElements() );
+    // },
+
+    // ------------------------------------------------------------------------ API
+    /** convert element into JSON compatible with the collections API */
+    _elementToJSON : function( element ){
+        // return element.toJSON();
+        return element;
+    },
+
+    /** create the collection via the API
+     *  @returns {jQuery.xhr Object} the jquery ajax request
+     */
+    createList : function( name ){
+        if( !this.workingElements.length ){
+            var message = _l( 'No valid elements for final list' ) + '. ';
+            message += '<a class="cancel-create" href="javascript:void(0);">' + _l( 'Cancel' ) + '</a> ';
+            message += _l( 'or' );
+            message += ' <a class="reset" href="javascript:void(0);">' + _l( 'start over' ) + '</a>.';
+            this._showAlert( message );
+            return;
+        }
+
+        var creator = this,
+            elements = this.workingElements.map( function( element ){
+                return creator._elementToJSON( element );
+            });
+
+        creator.blocking = true;
+        return creator.creationFn( elements, name )
+            .always( function(){
+                creator.blocking = false;
+            })
+            .fail( function( xhr, status, message ){
+                creator.trigger( 'error', {
+                    xhr     : xhr,
+                    status  : status,
+                    message : _l( 'An error occurred while creating this collection' )
+                });
+            })
+            .done( function( response, message, xhr ){
+                creator.trigger( 'collection:created', response, message, xhr );
+                creator.metric( 'collection:created', response );
+                if( typeof creator.oncreate === 'function' ){
+                    creator.oncreate.call( this, response, message, xhr );
+                }
+            });
+    },
+
+    // ------------------------------------------------------------------------ events
+    /** set up event handlers on self */
+    _setUpBehaviors : function(){
+        this.on( 'error', this._errorHandler );
+
+        this.once( 'rendered', function(){
+            this.trigger( 'rendered:initial', this );
+        });
+
+        this.on( 'elements:select', function( data ){
+            this._renderClearSelected();
+        });
+
+        this.on( 'elements:discard', function( data ){
+            var element = data.source.element;
+            this.removeElementView( data.source );
+
+            this.workingElements = _.without( this.workingElements, element );
+            if( !this.workingElements.length ){
+                this._renderNoElementsLeft();
+            }
+        });
+
+        //this.on( 'all', function(){
+        //    this.info( arguments );
+        //});
+        return this;
+    },
+
+    /** handle errors with feedback and details to the user (if available) */
+    _errorHandler : function( data ){
+        this.error( data );
+
+        var creator = this;
+            content = data.message || _l( 'An error occurred' );
+        if( data.xhr ){
+            var xhr = data.xhr,
+                message = data.message;
+            if( xhr.readyState === 0 && xhr.status === 0 ){
+                content += ': ' + _l( 'Galaxy could not be reached and may be updating.' ) +
+                    _l( ' Try again in a few minutes.' );
+            } else if( xhr.responseJSON ){
+                content += ':<br /><pre>' + JSON.stringify( xhr.responseJSON ) + '</pre>';
+            } else {
+                content += ': ' + message;
+            }
+        }
+        creator._showAlert( content, 'alert-danger' );
+    },
+
+    events : {
+        // header
+        'click .more-help'              : '_clickMoreHelp',
+        'click .less-help'              : '_clickLessHelp',
+        'click .main-help'              : '_toggleHelp',
+        'click .header .alert button'   : '_hideAlert',
+
+        'click .reset'                  : 'reset',
+        'click .clear-selected'         : 'clearSelectedElements',
+
+        // elements - selection
+        'click .collection-elements'    : 'clearSelectedElements',
+
+        // elements - drop target
+        // 'dragenter .collection-elements': '_dragenterElements',
+        // 'dragleave .collection-elements': '_dragleaveElements',
+        'dragover .collection-elements' : '_dragoverElements',
+        'drop .collection-elements'     : '_dropElements',
+
+        // these bubble up from the elements as custom events
+        'collection-element.dragstart .collection-elements' : '_elementDragstart',
+        'collection-element.dragend   .collection-elements' : '_elementDragend',
+
+        // footer
+        'change .collection-name'       : '_changeName',
+        'keydown .collection-name'      : '_nameCheckForEnter',
+        'click .cancel-create'          : function( ev ){
+            if( typeof this.oncancel === 'function' ){
+                this.oncancel.call( this );
+            }
+        },
+        'click .create-collection'      : '_clickCreate'//,
+    },
+
+    // ........................................................................ header
+    /** expand help */
+    _clickMoreHelp : function( ev ){
+        ev.stopPropagation();
+        this.$( '.main-help' ).addClass( 'expanded' );
+        this.$( '.more-help' ).hide();
+    },
+    /** collapse help */
+    _clickLessHelp : function( ev ){
+        ev.stopPropagation();
+        this.$( '.main-help' ).removeClass( 'expanded' );
+        this.$( '.more-help' ).show();
+    },
+    /** toggle help */
+    _toggleHelp : function( ev ){
+        ev.stopPropagation();
+        this.$( '.main-help' ).toggleClass( 'expanded' );
+        this.$( '.more-help' ).toggle();
+    },
+
+    /** show an alert on the top of the interface containing message (alertClass is bootstrap's alert-*) */
+    _showAlert : function( message, alertClass ){
+        alertClass = alertClass || 'alert-danger';
+        this.$( '.main-help' ).hide();
+        this.$( '.header .alert' )
+            .attr( 'class', 'alert alert-dismissable' ).addClass( alertClass ).show()
+            .find( '.alert-message' ).html( message );
+    },
+    /** hide the alerts at the top */
+    _hideAlert : function( message ){
+        this.$( '.main-help' ).show();
+        this.$( '.header .alert' ).hide();
+    },
+
+    // ........................................................................ elements
+    /** reset all data to the initial state */
+    reset : function(){
+        this._instanceSetUp();
+        this._elementsSetUp();
+        this.render();
+    },
+
+    /** deselect all elements */
+    clearSelectedElements : function( ev ){
+        this.$( '.collection-elements .collection-element' ).removeClass( 'selected' );
+        this.$( '.collection-elements-controls > .clear-selected' ).hide();
+    },
+
+    //_dragenterElements : function( ev ){
+    //    //this.debug( '_dragenterElements:', ev );
+    //},
+//TODO: if selected are dragged out of the list area - remove the placeholder - cuz it won't work anyway
+    // _dragleaveElements : function( ev ){
+    //    //this.debug( '_dragleaveElements:', ev );
+    // },
+
+    /** track the mouse drag over the list adding a placeholder to show where the drop would occur */
+    _dragoverElements : function( ev ){
+        //this.debug( '_dragoverElements:', ev );
+        ev.preventDefault();
+
+        var $list = this.$list();
+        this._checkForAutoscroll( $list, ev.originalEvent.clientY );
+        var $nearest = this._getNearestElement( ev.originalEvent.clientY );
+
+        //TODO: no need to re-create - move instead
+        this.$( '.element-drop-placeholder' ).remove();
+        var $placeholder = $( '<div class="element-drop-placeholder"></div>' );
+        if( !$nearest.length ){
+            $list.append( $placeholder );
+        } else {
+            $nearest.before( $placeholder );
+        }
+    },
+
+    /** If the mouse is near enough to the list's top or bottom, scroll the list */
+    _checkForAutoscroll : function( $element, y ){
+        var AUTOSCROLL_SPEED = 2,
+            offset = $element.offset(),
+            scrollTop = $element.scrollTop(),
+            upperDist = y - offset.top,
+            lowerDist = ( offset.top + $element.outerHeight() ) - y;
+        if( upperDist >= 0 && upperDist < this.autoscrollDist ){
+            $element.scrollTop( scrollTop - AUTOSCROLL_SPEED );
+        } else if( lowerDist >= 0 && lowerDist < this.autoscrollDist ){
+            $element.scrollTop( scrollTop + AUTOSCROLL_SPEED );
+        }
+    },
+
+    /** get the nearest element based on the mouse's Y coordinate.
+     *  If the y is at the end of the list, return an empty jQuery object.
+     */
+    _getNearestElement : function( y ){
+        var WIGGLE = 4,
+            lis = this.$( '.collection-elements li.collection-element' ).toArray();
+        for( var i=0; i<lis.length; i++ ){
+            var $li = $( lis[i] ),
+                top = $li.offset().top,
+                halfHeight = Math.floor( $li.outerHeight() / 2 ) + WIGGLE;
+            if( top + halfHeight > y && top - halfHeight < y ){
+                return $li;
+            }
+        }
+        return $();
+    },
+
+    /** drop (dragged/selected elements) onto the list, re-ordering the internal list */
+    _dropElements : function( ev ){
+        if( ev.originalEvent ){ ev = ev.originalEvent; }
+        // both required for firefox
+        ev.preventDefault();
+        ev.dataTransfer.dropEffect = 'move';
+
+        // insert before the nearest element or after the last.
+        var $nearest = this._getNearestElement( ev.clientY );
+        if( $nearest.length ){
+            this.$dragging.insertBefore( $nearest );
+        } else {
+            // no nearest before - insert after last element
+            this.$dragging.insertAfter( this.$( '.collection-elements .collection-element' ).last() );
+        }
+        // resync the creator's list based on the new DOM order
+        this._syncOrderToDom();
+        return false;
+    },
+
+    /** resync the creator's list of elements based on the DOM order */
+    _syncOrderToDom : function(){
+        var creator = this,
+            newElements = [];
+        //TODO: doesn't seem wise to use the dom to store these - can't we sync another way?
+        this.$( '.collection-elements .collection-element' ).each( function(){
+            var id = $( this ).attr( 'data-element-id' ),
+                element = _.findWhere( creator.workingElements, { id: id });
+            if( element ){
+                newElements.push( element );
+            } else {
+                console.error( 'missing element: ', id );
+            }
+        });
+        this.workingElements = newElements;
+        this._renderList();
+    },
+
+    /** drag communication with element sub-views: dragstart */
+    _elementDragstart : function( ev, element ){
+        // auto select the element causing the event and move all selected
+        element.select( true );
+        this.$dragging = this.$( '.collection-elements .collection-element.selected' );
+    },
+
+    /** drag communication with element sub-views: dragend - remove the placeholder */
+    _elementDragend : function( ev, element ){
+        $( '.element-drop-placeholder' ).remove();
+        this.$dragging = null;
+    },
+
+    // ........................................................................ footer
+    /** handle a collection name change */
+    _changeName : function( ev ){
+        this._validationWarning( 'name', !!this._getName() );
+    },
+
+    /** check for enter key press when in the collection name and submit */
+    _nameCheckForEnter : function( ev ){
+        if( ev.keyCode === 13 && !this.blocking ){
+            this._clickCreate();
+        }
+    },
+
+    /** get the current collection name */
+    _getName : function(){
+        return _.escape( this.$( '.collection-name' ).val() );
+    },
+
+    /** attempt to create the current collection */
+    _clickCreate : function( ev ){
+        var name = this._getName();
+        if( !name ){
+            this._validationWarning( 'name' );
+        } else if( !this.blocking ){
+            this.createList( name );
+        }
+    },
+
+    // ------------------------------------------------------------------------ templates
+    //TODO: move to require text plugin and load these as text
+    //TODO: underscore currently unnecc. bc no vars are used
+    //TODO: better way of localizing text-nodes in long strings
+    /** underscore template fns attached to class */
+    templates : {
+        /** the skeleton */
+        main : _.template([
+            '<div class="header flex-row no-flex"></div>',
+            '<div class="middle flex-row flex-row-container"></div>',
+            '<div class="footer flex-row no-flex"></div>'
+        ].join('')),
+
+        /** the header (not including help text) */
+        header : _.template([
+            '<div class="main-help well clear">',
+                '<a class="more-help" href="javascript:void(0);">', _l( 'More help' ), '</a>',
+                '<div class="help-content">',
+                    '<a class="less-help" href="javascript:void(0);">', _l( 'Less' ), '</a>',
+                '</div>',
+            '</div>',
+            '<div class="alert alert-dismissable">',
+                '<button type="button" class="close" data-dismiss="alert" ',
+                    'title="', _l( 'Close and show more help' ), '" aria-hidden="true">×</button>',
+                '<span class="alert-message"></span>',
+            '</div>',
+        ].join('')),
+
+        /** the middle: element list */
+        middle : _.template([
+            '<div class="collection-elements-controls">',
+                '<a class="reset" href="javascript:void(0);" ',
+                    'title="', _l( 'Undo all reordering and discards' ), '">',
+                    _l( 'Start over' ),
+                '</a>',
+                '<a class="clear-selected" href="javascript:void(0);" ',
+                    'title="', _l( 'De-select all selected datasets' ), '">',
+                    _l( 'Clear selected' ),
+                '</a>',
+            '</div>',
+            '<div class="collection-elements scroll-container flex-row">',
+            '</div>'
+        ].join('')),
+
+        /** creation and cancel controls */
+        footer : _.template([
+            '<div class="attributes clear">',
+                '<div class="clear">',
+                    '<input class="collection-name form-control pull-right" ',
+                        'placeholder="', _l( 'Enter a name for your new collection' ), '" />',
+                    '<div class="collection-name-prompt pull-right">', _l( 'Name' ), ':</div>',
+                '</div>',
+            '</div>',
+
+            '<div class="actions clear vertically-spaced">',
+                '<div class="other-options pull-left">',
+                    '<button class="cancel-create btn" tabindex="-1">', _l( 'Cancel' ), '</button>',
+                    '<div class="create-other btn-group dropup">',
+                        '<button class="btn btn-default dropdown-toggle" data-toggle="dropdown">',
+                              _l( 'Create a different kind of collection' ),
+                              ' <span class="caret"></span>',
+                        '</button>',
+                        '<ul class="dropdown-menu" role="menu">',
+                              '<li><a href="#">', _l( 'Create a <i>single</i> pair' ), '</a></li>',
+                              '<li><a href="#">', _l( 'Create a list of <i>unpaired</i> datasets' ), '</a></li>',
+                        '</ul>',
+                    '</div>',
+                '</div>',
+
+                '<div class="main-options pull-right">',
+                    '<button class="create-collection btn btn-primary">', _l( 'Create list' ), '</button>',
+                '</div>',
+            '</div>'
+        ].join('')),
+
+        /** help content */
+        helpContent : _.template([
+            '<p>', _l([
+                'Collections of datasets are permanent, ordered lists of datasets that can be passed to tools and ',
+                'workflows in order to have analyses done on each member of the entire group. This interface allows ',
+                'you to create a collection and re-order the final collection.'
+            ].join( '' )), '</p>',
+            '<ul>',
+                '<li>', _l([
+                    'Rename elements in the list by clicking on ',
+                    '<i data-target=".collection-element .name">the existing name</i>.'
+                ].join( '' )), '</li>',
+                '<li>', _l([
+                    'Discard elements from the final created list by clicking on the ',
+                    '<i data-target=".collection-element .discard">"Discard"</i> button.'
+                ].join( '' )), '</li>',
+                '<li>', _l([
+                    'Reorder the list by clicking and dragging elements. Select multiple elements by clicking on ',
+                    '<i data-target=".collection-element">them</i> and you can then move those selected by dragging the ',
+                    'entire group. Deselect them by clicking them again or by clicking the ',
+                    'the <i data-target=".clear-selected">"Clear selected"</i> link.'
+                ].join( '' )), '</li>',
+                '<li>', _l([
+                    'Click the <i data-target=".reset">"Start over"</i> link to begin again as if you had just opened ',
+                    'the interface.'
+                ].join( '' )), '</li>',
+                '<li>', _l([
+                    'Click the <i data-target=".cancel-create">"Cancel"</i> button to exit the interface.'
+                ].join( '' )), '</li>',
+            '</ul><br />',
+            '<p>', _l([
+                'Once your collection is complete, enter a <i data-target=".collection-name">name</i> and ',
+                'click <i data-target=".create-collection">"Create list"</i>.'
+            ].join( '' )), '</p>'
+        ].join('')),
+
+        /** shown in list when all elements are discarded */
+        invalidElements : _.template([
+            _l( 'The following selections could not be included due to problems:' ),
+            '<ul><% _.each( problems, function( problem ){ %>',
+                '<li><b><%- problem.element.name %></b>: <%- problem.text %></li>',
+            '<% }); %></ul>'
+        ].join('')),
+
+        /** shown in list when all elements are discarded */
+        noElementsLeft : _.template([
+            '<li class="no-elements-left-message">',
+                _l( 'No elements left! ' ),
+                _l( 'Would you like to ' ), '<a class="reset" href="javascript:void(0)">', _l( 'start over' ), '</a>?',
+            '</li>'
+        ].join('')),
+
+        /** a simplified page communicating what went wrong and why the user needs to reselect something else */
+        invalidInitial : _.template([
+            '<div class="header flex-row no-flex">',
+                '<div class="alert alert-warning" style="display: block">',
+                    '<span class="alert-message">',
+                        '<% if( _.size( problems ) ){ %>',
+                            _l( 'The following selections could not be included due to problems' ), ':',
+                            '<ul><% _.each( problems, function( problem ){ %>',
+                                '<li><b><%- problem.element.name %></b>: <%- problem.text %></li>',
+                            '<% }); %></ul>',
+                        '<% } else if( _.size( elements ) < 1 ){ %>',
+                            _l( 'No datasets were selected' ), '.',
+                        '<% } %>',
+                        '<br />',
+                        _l( 'At least one element is needed for the collection' ), '. ',
+                        _l( 'You may need to ' ),
+                        '<a class="cancel-create" href="javascript:void(0)">', _l( 'cancel' ), '</a> ',
+                        _l( 'and reselect new elements' ), '.',
+                    '</span>',
+                '</div>',
+            '</div>',
+            '<div class="footer flex-row no-flex">',
+                '<div class="actions clear vertically-spaced">',
+                    '<div class="other-options pull-left">',
+                        '<button class="cancel-create btn" tabindex="-1">', _l( 'Cancel' ), '</button>',
+                        // _l( 'Create a different kind of collection' ),
+                    '</div>',
+                '</div>',
+            '</div>'
+        ].join('')),
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** string rep */
+    toString : function(){ return 'ListCollectionCreator'; }
+});
+
+
+
+//=============================================================================
+/** Create a modal and load its body with the given CreatorClass creator type
+ *  @returns {Deferred} resolved when creator has built a collection.
+ */
+var collectionCreatorModal = function _collectionCreatorModal( elements, options, CreatorClass ){
+
+    var deferred = jQuery.Deferred(),
+        modal = Galaxy.modal || ( new UI_MODAL.View() ),
+        creator;
+
+    options = _.defaults( options || {}, {
+        elements    : elements,
+        oncancel    : function(){
+            modal.hide();
+            deferred.reject( 'cancelled' );
+        },
+        oncreate    : function( creator, response ){
+            modal.hide();
+            deferred.resolve( response );
+        }
+    });
+
+    creator = new CreatorClass( options );
+    modal.show({
+        title   : options.title || _l( 'Create a collection' ),
+        body    : creator.$el,
+        width   : '80%',
+        height  : '100%',
+        closing_events: true
+    });
+    creator.render();
+    window._collectionCreator = creator;
+
+    //TODO: remove modal header
+    return deferred;
+};
+
+/** List collection flavor of collectionCreatorModal. */
+var listCollectionCreatorModal = function _listCollectionCreatorModal( elements, options ){
+    options = options || {};
+    options.title = _l( 'Create a collection from a list of datasets' );
+    return collectionCreatorModal( elements, options, ListCollectionCreator );
+};
+
+
+//==============================================================================
+/** Use a modal to create a list collection, then add it to the given history contents.
+ *  @returns {Deferred} resolved when the collection is added to the history.
+ */
+function createListCollection( contents ){
+    var elements = contents.toJSON(),
+        promise = listCollectionCreatorModal( elements, {
+            creationFn : function( elements, name ){
+                elements = elements.map( function( element ){
+                    return {
+                        id      : element.id,
+                        name    : element.name,
+                        //TODO: this allows for list:list even if the filter above does not - reconcile
+                        src     : ( element.history_content_type === 'dataset'? 'hda' : 'hdca' )
+                    };
+                });
+                return contents.createHDCA( elements, 'list', name );
+            }
+        });
+    return promise;
+}
+
+//==============================================================================
+    return {
+        DatasetCollectionElementView: DatasetCollectionElementView,
+        ListCollectionCreator       : ListCollectionCreator,
+
+        collectionCreatorModal      : collectionCreatorModal,
+        listCollectionCreatorModal  : listCollectionCreatorModal,
+        createListCollection        : createListCollection
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/list-of-pairs-collection-creator.js b/client/galaxy/scripts/mvc/collection/list-of-pairs-collection-creator.js
new file mode 100644
index 0000000..23eca8a
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/list-of-pairs-collection-creator.js
@@ -0,0 +1,1726 @@
+define([
+    "utils/levenshtein",
+    "utils/natural-sort",
+    "mvc/collection/list-collection-creator",
+    "mvc/base-mvc",
+    "utils/localization",
+    "ui/hoverhighlight"
+], function( levenshteinDistance, naturalSort, LIST_COLLECTION_CREATOR, baseMVC, _l ){
+
+'use strict';
+
+var logNamespace = 'collections';
+/* ============================================================================
+TODO:
+
+
+PROGRAMMATICALLY:
+currPanel.once( 'rendered', function(){
+    currPanel.showSelectors();
+    currPanel.selectAll();
+    _.last( currPanel.actionsPopup.options ).func();
+});
+
+============================================================================ */
+/** A view for paired datasets in the collections creator.
+ */
+var PairView = Backbone.View.extend( baseMVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    tagName     : 'li',
+    className   : 'dataset paired',
+
+    initialize : function( attributes ){
+        this.pair = attributes.pair || {};
+    },
+
+    template : _.template([
+        '<span class="forward-dataset-name flex-column"><%- pair.forward.name %></span>',
+        '<span class="pair-name-column flex-column">',
+            '<span class="pair-name"><%- pair.name %></span>',
+        '</span>',
+        '<span class="reverse-dataset-name flex-column"><%- pair.reverse.name %></span>'
+    ].join('')),
+
+    render : function(){
+        this.$el
+            .attr( 'draggable', true )
+            .data( 'pair', this.pair )
+            .html( this.template({ pair: this.pair }) )
+            .addClass( 'flex-column-container' );
+        return this;
+    },
+
+    events : {
+        'dragstart'         : '_dragstart',
+        'dragend'           : '_dragend',
+        'dragover'          : '_sendToParent',
+        'drop'              : '_sendToParent'
+    },
+
+    /** dragging pairs for re-ordering */
+    _dragstart : function( ev ){
+        ev.currentTarget.style.opacity = '0.4';
+        if( ev.originalEvent ){ ev = ev.originalEvent; }
+
+        ev.dataTransfer.effectAllowed = 'move';
+        ev.dataTransfer.setData( 'text/plain', JSON.stringify( this.pair ) );
+
+        this.$el.parent().trigger( 'pair.dragstart', [ this ] );
+    },
+
+    /** dragging pairs for re-ordering */
+    _dragend : function( ev ){
+        ev.currentTarget.style.opacity = '1.0';
+        this.$el.parent().trigger( 'pair.dragend', [ this ] );
+    },
+
+    /** manually bubble up an event to the parent/container */
+    _sendToParent : function( ev ){
+        this.$el.parent().trigger( ev );
+    },
+
+    /** string rep */
+    toString : function(){
+        return 'PairView(' + this.pair.name + ')';
+    }
+});
+
+
+// ============================================================================
+/** returns an autopair function that uses the provided options.match function */
+function autoPairFnBuilder( options ){
+    options = options || {};
+    options.createPair = options.createPair || function _defaultCreatePair( params ){
+        params = params || {};
+        var a = params.listA.splice( params.indexA, 1 )[0],
+            b = params.listB.splice( params.indexB, 1 )[0],
+            aInBIndex = params.listB.indexOf( a ),
+            bInAIndex = params.listA.indexOf( b );
+        if( aInBIndex !== -1 ){ params.listB.splice( aInBIndex, 1 ); }
+        if( bInAIndex !== -1 ){ params.listA.splice( bInAIndex, 1 ); }
+        return this._pair( a, b, { silent: true });
+    };
+    // compile these here outside of the loop
+    var _regexps = [];
+    function getRegExps(){
+        if( !_regexps.length ){
+            _regexps = [
+                new RegExp( this.filters[0] ),
+                new RegExp( this.filters[1] )
+            ];
+        }
+        return _regexps;
+    }
+    // mangle params as needed
+    options.preprocessMatch = options.preprocessMatch || function _defaultPreprocessMatch( params ){
+        var regexps = getRegExps.call( this );
+        return _.extend( params, {
+            matchTo     : params.matchTo.name.replace( regexps[0], '' ),
+            possible    : params.possible.name.replace( regexps[1], '' )
+        });
+    };
+
+    return function _strategy( params ){
+        this.debug( 'autopair _strategy ---------------------------' );
+        params = params || {};
+        var listA = params.listA,
+            listB = params.listB,
+            indexA = 0, indexB,
+            bestMatch = {
+                score : 0.0,
+                index : null
+            },
+            paired = [];
+        //console.debug( 'params:', JSON.stringify( params, null, '  ' ) );
+        this.debug( 'starting list lens:', listA.length, listB.length );
+        this.debug( 'bestMatch (starting):', JSON.stringify( bestMatch, null, '  ' ) );
+
+        while( indexA < listA.length ){
+            var matchTo = listA[ indexA ];
+            bestMatch.score = 0.0;
+
+            for( indexB=0; indexB<listB.length; indexB++ ){
+                var possible = listB[ indexB ];
+                this.debug( indexA + ':' + matchTo.name );
+                this.debug( indexB + ':' + possible.name );
+
+                // no matching with self
+                if( listA[ indexA ] !== listB[ indexB ] ){
+                    bestMatch = options.match.call( this, options.preprocessMatch.call( this, {
+                        matchTo : matchTo,
+                        possible: possible,
+                        index   : indexB,
+                        bestMatch : bestMatch
+                    }));
+                    this.debug( 'bestMatch:', JSON.stringify( bestMatch, null, '  ' ) );
+                    if( bestMatch.score === 1.0 ){
+                        this.debug( 'breaking early due to perfect match' );
+                        break;
+                    }
+                }
+            }
+            var scoreThreshold = options.scoreThreshold.call( this );
+            this.debug( 'scoreThreshold:', scoreThreshold );
+            this.debug( 'bestMatch.score:', bestMatch.score );
+
+            if( bestMatch.score >= scoreThreshold ){
+                //console.debug( 'autoPairFnBuilder.strategy', listA[ indexA ].name, listB[ bestMatch.index ].name );
+                paired.push( options.createPair.call( this, {
+                    listA   : listA,
+                    indexA  : indexA,
+                    listB   : listB,
+                    indexB  : bestMatch.index
+                }));
+                //console.debug( 'list lens now:', listA.length, listB.length );
+            } else {
+                indexA += 1;
+            }
+            if( !listA.length || !listB.length ){
+                return paired;
+            }
+        }
+        this.debug( 'paired:', JSON.stringify( paired, null, '  ' ) );
+        this.debug( 'autopair _strategy ---------------------------' );
+        return paired;
+    };
+}
+
+
+// ============================================================================
+/** An interface for building collections of paired datasets.
+ */
+var PairedCollectionCreator = Backbone.View.extend( baseMVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    className: 'list-of-pairs-collection-creator collection-creator flex-row-container',
+
+    /** set up initial options, instance vars, behaviors, and autopair (if set to do so) */
+    initialize : function( attributes ){
+        this.metric( 'PairedCollectionCreator.initialize', attributes );
+        //this.debug( '-- PairedCollectionCreator:', attributes );
+
+        attributes = _.defaults( attributes, {
+            datasets            : [],
+            filters             : this.DEFAULT_FILTERS,
+            automaticallyPair   : true,
+            strategy            : 'lcs',
+            matchPercentage     : 0.9,
+            twoPassAutopairing  : true
+        });
+
+        /** unordered, original list */
+        this.initialList = attributes.datasets;
+
+        /** is this from a history? if so, what's its id? */
+        this.historyId = attributes.historyId;
+
+        /** which filters should be used initially? (String[2] or name in commonFilters) */
+        this.filters = this.commonFilters[ attributes.filters ] || this.commonFilters[ this.DEFAULT_FILTERS ];
+        if( _.isArray( attributes.filters ) ){
+            this.filters = attributes.filters;
+        }
+
+        /** try to auto pair the unpaired datasets on load? */
+        this.automaticallyPair = attributes.automaticallyPair;
+
+        /** what method to use for auto pairing (will be passed aggression level) */
+        this.strategy = this.strategies[ attributes.strategy ] || this.strategies[ this.DEFAULT_STRATEGY ];
+        if( _.isFunction( attributes.strategy ) ){
+            this.strategy = attributes.strategy;
+        }
+
+        /** distance/mismatch level allowed for autopairing */
+        this.matchPercentage = attributes.matchPercentage;
+
+        /** try to autopair using simple first, then this.strategy on the remainder */
+        this.twoPassAutopairing = attributes.twoPassAutopairing;
+
+        /** remove file extensions (\.*) from created pair names? */
+        this.removeExtensions = true;
+        //this.removeExtensions = false;
+
+        /** fn to call when the cancel button is clicked (scoped to this) - if falsy, no btn is displayed */
+        this.oncancel = attributes.oncancel;
+        /** fn to call when the collection is created (scoped to this) */
+        this.oncreate = attributes.oncreate;
+
+        /** fn to call when the cancel button is clicked (scoped to this) - if falsy, no btn is displayed */
+        this.autoscrollDist = attributes.autoscrollDist || 24;
+
+        /** is the unpaired panel shown? */
+        this.unpairedPanelHidden = false;
+        /** is the paired panel shown? */
+        this.pairedPanelHidden = false;
+
+        /** DOM elements currently being dragged */
+        this.$dragging = null;
+
+        /** Used for blocking UI events during ajax/operations (don't post twice) */
+        this.blocking = false;
+
+        this._setUpBehaviors();
+        this._dataSetUp();
+    },
+
+    /** map of common filter pairs by name */
+    commonFilters : {
+        illumina        : [ '_1', '_2' ],
+        Rs              : [ '_R1', '_R2' ]
+    },
+    /** which commonFilter to use by default */
+    DEFAULT_FILTERS : 'illumina',
+
+    /** map of name->fn for autopairing */
+    strategies : {
+        'simple'        : 'autopairSimple',
+        'lcs'           : 'autopairLCS',
+        'levenshtein'   : 'autopairLevenshtein'
+    },
+    /** default autopair strategy name */
+    DEFAULT_STRATEGY : 'lcs',
+
+    // ------------------------------------------------------------------------ process raw list
+    /** set up main data: cache initialList, sort, and autopair */
+    _dataSetUp : function(){
+        //this.debug( '-- _dataSetUp' );
+
+        this.paired = [];
+        this.unpaired = [];
+
+        this.selectedIds = [];
+
+        // sort initial list, add ids if needed, and save new working copy to unpaired
+        this._sortInitialList();
+        this._ensureIds();
+        this.unpaired = this.initialList.slice( 0 );
+
+        if( this.automaticallyPair ){
+            this.autoPair();
+            this.once( 'rendered:initial', function(){
+                this.trigger( 'autopair' );
+            });
+        }
+    },
+
+    /** sort initial list */
+    _sortInitialList : function(){
+        //this.debug( '-- _sortInitialList' );
+        this._sortDatasetList( this.initialList );
+    },
+
+    /** sort a list of datasets */
+    _sortDatasetList : function( list ){
+        // currently only natural sort by name
+        list.sort( function( a, b ){ return naturalSort( a.name, b.name ); });
+        return list;
+    },
+
+    /** add ids to dataset objs in initial list if none */
+    _ensureIds : function(){
+        this.initialList.forEach( function( dataset ){
+            if( !dataset.hasOwnProperty( 'id' ) ){
+                dataset.id = _.uniqueId();
+            }
+        });
+        return this.initialList;
+    },
+
+    /** split initial list into two lists, those that pass forward filters & those passing reverse */
+    _splitByFilters : function(){
+        var regexFilters = this.filters.map( function( stringFilter ){
+                return new RegExp( stringFilter );
+            }),
+            split = [ [], [] ];
+
+        function _filter( unpaired, filter ){
+            return filter.test( unpaired.name );
+            //return dataset.name.indexOf( filter ) >= 0;
+        }
+        this.unpaired.forEach( function _filterEach( unpaired ){
+            // 90% of the time this seems to work, but:
+            //TODO: this treats *all* strings as regex which may confuse people - possibly check for // surrounding?
+            //  would need explanation in help as well
+            regexFilters.forEach( function( filter, i ){
+                if( _filter( unpaired, filter ) ){
+                    split[i].push( unpaired );
+                }
+            });
+        });
+        return split;
+    },
+
+    /** add a dataset to the unpaired list in it's proper order */
+    _addToUnpaired : function( dataset ){
+        // currently, unpaired is natural sorted by name, use binary search to find insertion point
+        var binSearchSortedIndex = function( low, hi ){
+            if( low === hi ){ return low; }
+
+            var mid = Math.floor( ( hi - low ) / 2 ) + low,
+                compared = naturalSort( dataset.name, this.unpaired[ mid ].name );
+
+            if( compared < 0 ){
+                return binSearchSortedIndex( low, mid );
+            } else if( compared > 0 ){
+                return binSearchSortedIndex( mid + 1, hi );
+            }
+            // walk the equal to find the last
+            while( this.unpaired[ mid ] && this.unpaired[ mid ].name === dataset.name ){ mid++; }
+            return mid;
+
+        }.bind( this );
+
+        this.unpaired.splice( binSearchSortedIndex( 0, this.unpaired.length ), 0, dataset );
+    },
+
+    // ------------------------------------------------------------------------ auto pairing
+    /** two passes to automatically create pairs:
+     *  use both simpleAutoPair, then the fn mentioned in strategy
+     */
+    autoPair : function( strategy ){
+        // split first using exact matching
+        var split = this._splitByFilters(),
+            paired = [];
+        if( this.twoPassAutopairing ){
+            paired = this.autopairSimple({
+                listA : split[0],
+                listB : split[1]
+            });
+            split = this._splitByFilters();
+        }
+
+        // uncomment to see printlns while running tests
+        //this.debug = function(){ console.log.apply( console, arguments ); };
+
+        // then try the remainder with something less strict
+        strategy = strategy || this.strategy;
+        split = this._splitByFilters();
+        paired = paired.concat( this[ strategy ].call( this, {
+            listA : split[0],
+            listB : split[1]
+        }));
+        return paired;
+    },
+
+    /** autopair by exact match */
+    autopairSimple : autoPairFnBuilder({
+        scoreThreshold: function(){ return 1.0; },
+        match : function _match( params ){
+            params = params || {};
+            if( params.matchTo === params.possible ){
+                return {
+                    index: params.index,
+                    score: 1.0
+                };
+            }
+            return params.bestMatch;
+        }
+    }),
+
+    /** autopair by levenshtein edit distance scoring */
+    autopairLevenshtein : autoPairFnBuilder({
+        scoreThreshold: function(){ return this.matchPercentage; },
+        match : function _matches( params ){
+            params = params || {};
+            var distance = levenshteinDistance( params.matchTo, params.possible ),
+                score = 1.0 - ( distance / ( Math.max( params.matchTo.length, params.possible.length ) ) );
+            if( score > params.bestMatch.score ){
+                return {
+                    index: params.index,
+                    score: score
+                };
+            }
+            return params.bestMatch;
+        }
+    }),
+
+    /** autopair by longest common substrings scoring */
+    autopairLCS : autoPairFnBuilder({
+        scoreThreshold: function(){ return this.matchPercentage; },
+        match : function _matches( params ){
+            params = params || {};
+            var match = this._naiveStartingAndEndingLCS( params.matchTo, params.possible ).length,
+                score = match / ( Math.max( params.matchTo.length, params.possible.length ) );
+            if( score > params.bestMatch.score ){
+                return {
+                    index: params.index,
+                    score: score
+                };
+            }
+            return params.bestMatch;
+        }
+    }),
+
+    /** return the concat'd longest common prefix and suffix from two strings */
+    _naiveStartingAndEndingLCS : function( s1, s2 ){
+        var fwdLCS = '',
+            revLCS = '',
+            i = 0, j = 0;
+        while( i < s1.length && i < s2.length ){
+            if( s1[ i ] !== s2[ i ] ){
+                break;
+            }
+            fwdLCS += s1[ i ];
+            i += 1;
+        }
+        if( i === s1.length ){ return s1; }
+        if( i === s2.length ){ return s2; }
+
+        i = ( s1.length - 1 );
+        j = ( s2.length - 1 );
+        while( i >= 0 && j >= 0 ){
+            if( s1[ i ] !== s2[ j ] ){
+                break;
+            }
+            revLCS = [ s1[ i ], revLCS ].join( '' );
+            i -= 1;
+            j -= 1;
+        }
+        return fwdLCS + revLCS;
+    },
+
+    // ------------------------------------------------------------------------ pairing / unpairing
+    /** create a pair from fwd and rev, removing them from unpaired, and placing the new pair in paired */
+    _pair : function( fwd, rev, options ){
+        options = options || {};
+        this.debug( '_pair:', fwd, rev );
+        var pair = this._createPair( fwd, rev, options.name );
+        this.paired.push( pair );
+        this.unpaired = _.without( this.unpaired, fwd, rev );
+        if( !options.silent ){
+            this.trigger( 'pair:new', pair );
+        }
+        return pair;
+    },
+
+    /** create a pair Object from fwd and rev, adding the name attribute (will guess if not given) */
+    _createPair : function( fwd, rev, name ){
+        // ensure existance and don't pair something with itself
+        if( !( fwd && rev ) || ( fwd === rev ) ){
+            throw new Error( 'Bad pairing: ' + [ JSON.stringify( fwd ), JSON.stringify( rev ) ] );
+        }
+        name = name || this._guessNameForPair( fwd, rev );
+        return { forward : fwd, name : name, reverse : rev };
+    },
+
+    /** try to find a good pair name for the given fwd and rev datasets */
+    _guessNameForPair : function( fwd, rev, removeExtensions ){
+        removeExtensions = ( removeExtensions !== undefined )?( removeExtensions ):( this.removeExtensions );
+        var fwdName = fwd.name,
+            revName = rev.name,
+            lcs = this._naiveStartingAndEndingLCS(
+                fwdName.replace( new RegExp( this.filters[0] ), '' ),
+                revName.replace( new RegExp( this.filters[1] ), '' )
+            );
+        if( removeExtensions ){
+            var lastDotIndex = lcs.lastIndexOf( '.' );
+            if( lastDotIndex > 0 ){
+                var extension = lcs.slice( lastDotIndex, lcs.length );
+                lcs = lcs.replace( extension, '' );
+                fwdName = fwdName.replace( extension, '' );
+                revName = revName.replace( extension, '' );
+            }
+        }
+        return lcs || ( fwdName + ' & ' + revName );
+    },
+
+    /** unpair a pair, removing it from paired, and adding the fwd,rev datasets back into unpaired */
+    _unpair : function( pair, options ){
+        options = options || {};
+        if( !pair ){
+            throw new Error( 'Bad pair: ' + JSON.stringify( pair ) );
+        }
+        this.paired = _.without( this.paired, pair );
+        this._addToUnpaired( pair.forward );
+        this._addToUnpaired( pair.reverse );
+
+        if( !options.silent ){
+            this.trigger( 'pair:unpair', [ pair ] );
+        }
+        return pair;
+    },
+
+    /** unpair all paired datasets */
+    unpairAll : function(){
+        var pairs = [];
+        while( this.paired.length ){
+            pairs.push( this._unpair( this.paired[ 0 ], { silent: true }) );
+        }
+        this.trigger( 'pair:unpair', pairs );
+    },
+
+    // ------------------------------------------------------------------------ API
+    /** convert a pair into JSON compatible with the collections API */
+    _pairToJSON : function( pair, src ){
+        src = src || 'hda';
+        //TODO: consider making this the pair structure when created instead
+        return {
+            collection_type : 'paired',
+            src             : 'new_collection',
+            name            : pair.name,
+            element_identifiers : [{
+                name    : 'forward',
+                id      : pair.forward.id,
+                src     : src
+            }, {
+                name    : 'reverse',
+                id      : pair.reverse.id,
+                src     : src
+            }]
+        };
+    },
+
+    /** create the collection via the API
+     *  @returns {jQuery.xhr Object}    the jquery ajax request
+     */
+    createList : function( name ){
+        var creator = this,
+            url = Galaxy.root + 'api/histories/' + this.historyId + '/contents/dataset_collections';
+
+        //TODO: use ListPairedCollection.create()
+        var ajaxData = {
+            type            : 'dataset_collection',
+            collection_type : 'list:paired',
+            name            : _.escape( name || creator.$( '.collection-name' ).val() ),
+            element_identifiers : creator.paired.map( function( pair ){
+                return creator._pairToJSON( pair );
+            })
+
+        };
+        //this.debug( JSON.stringify( ajaxData ) );
+        creator.blocking = true;
+        return jQuery.ajax( url, {
+            type        : 'POST',
+            contentType : 'application/json',
+            dataType    : 'json',
+            data        : JSON.stringify( ajaxData )
+        })
+        .always( function(){
+            creator.blocking = false;
+        })
+        .fail( function( xhr, status, message ){
+            creator._ajaxErrHandler( xhr, status, message );
+        })
+        .done( function( response, message, xhr ){
+            //this.info( 'ok', response, message, xhr );
+            creator.trigger( 'collection:created', response, message, xhr );
+            creator.metric( 'collection:created', response );
+            if( typeof creator.oncreate === 'function' ){
+                creator.oncreate.call( this, response, message, xhr );
+            }
+        });
+    },
+
+    /** handle ajax errors with feedback and details to the user (if available) */
+    _ajaxErrHandler : function( xhr, status, message ){
+        this.error( xhr, status, message );
+        var content = _l( 'An error occurred while creating this collection' );
+        if( xhr ){
+            if( xhr.readyState === 0 && xhr.status === 0 ){
+                content += ': ' + _l( 'Galaxy could not be reached and may be updating.' )
+                    + _l( ' Try again in a few minutes.' );
+            } else if( xhr.responseJSON ){
+                content += '<br /><pre>' + JSON.stringify( xhr.responseJSON ) + '</pre>';
+            } else {
+                content += ': ' + message;
+            }
+        }
+        creator._showAlert( content, 'alert-danger' );
+    },
+
+    // ------------------------------------------------------------------------ rendering
+    /** render the entire interface */
+    render : function( speed, callback ){
+        //this.debug( '-- _render' );
+        //this.$el.empty().html( PairedCollectionCreator.templates.main() );
+        this.$el.empty().html( PairedCollectionCreator.templates.main() );
+        this._renderHeader( speed );
+        this._renderMiddle( speed );
+        this._renderFooter( speed );
+        this._addPluginComponents();
+        this.trigger( 'rendered', this );
+        return this;
+    },
+
+    /** render the header section */
+    _renderHeader : function( speed, callback ){
+        //this.debug( '-- _renderHeader' );
+        var $header = this.$( '.header' ).empty().html( PairedCollectionCreator.templates.header() )
+            .find( '.help-content' ).prepend( $( PairedCollectionCreator.templates.helpContent() ) );
+
+        this._renderFilters();
+        return $header;
+    },
+    /** fill the filter inputs with the filter values */
+    _renderFilters : function(){
+        return    this.$( '.forward-column .column-header input' ).val( this.filters[0] )
+            .add( this.$( '.reverse-column .column-header input' ).val( this.filters[1] ) );
+    },
+
+    /** render the middle including unpaired and paired sections (which may be hidden) */
+    _renderMiddle : function( speed, callback ){
+        var $middle = this.$( '.middle' ).empty().html( PairedCollectionCreator.templates.middle() );
+
+        // (re-) hide the un/paired panels based on instance vars
+        if( this.unpairedPanelHidden ){
+            this.$( '.unpaired-columns' ).hide();
+        } else if( this.pairedPanelHidden ){
+            this.$( '.paired-columns' ).hide();
+        }
+
+        this._renderUnpaired();
+        this._renderPaired();
+        return $middle;
+    },
+    /** render the unpaired section, showing datasets accrd. to filters, update the unpaired counts */
+    _renderUnpaired : function( speed, callback ){
+        //this.debug( '-- _renderUnpaired' );
+        var creator = this,
+            $fwd, $rev, $prd = [],
+            split = this._splitByFilters();
+        // update unpaired counts
+        this.$( '.forward-column .title' )
+            .text([ split[0].length, _l( 'unpaired forward' ) ].join( ' ' ));
+        this.$( '.forward-column .unpaired-info' )
+            .text( this._renderUnpairedDisplayStr( this.unpaired.length - split[0].length ) );
+        this.$( '.reverse-column .title' )
+            .text([ split[1].length, _l( 'unpaired reverse' ) ].join( ' ' ));
+        this.$( '.reverse-column .unpaired-info' )
+            .text( this._renderUnpairedDisplayStr( this.unpaired.length - split[1].length ) );
+
+        this.$( '.unpaired-columns .column-datasets' ).empty();
+
+        // show/hide the auto pair button if any unpaired are left
+        this.$( '.autopair-link' ).toggle( this.unpaired.length !== 0 );
+        if( this.unpaired.length === 0 ){
+            this._renderUnpairedEmpty();
+            return;
+        }
+
+        // create the dataset dom arrays
+        $rev = split[1].map( function( dataset, i ){
+            // if there'll be a fwd dataset across the way, add a button to pair the row
+            if( ( split[0][ i ] !== undefined )
+            &&  ( split[0][ i ] !== dataset ) ){
+                $prd.push( creator._renderPairButton() );
+            }
+            return creator._renderUnpairedDataset( dataset );
+        });
+        $fwd = split[0].map( function( dataset ){
+            return creator._renderUnpairedDataset( dataset );
+        });
+
+        if( !$fwd.length && !$rev.length ){
+            this._renderUnpairedNotShown();
+            return;
+        }
+        // add to appropo cols
+        //TODO: not the best way to render - consider rendering the entire unpaired-columns section in a fragment
+        //  and swapping out that
+        this.$( '.unpaired-columns .forward-column .column-datasets' ).append( $fwd )
+            .add( this.$( '.unpaired-columns .paired-column .column-datasets' ).append( $prd ) )
+            .add( this.$( '.unpaired-columns .reverse-column .column-datasets' ).append( $rev ) );
+        this._adjUnpairedOnScrollbar();
+    },
+    /** return a string to display the count of filtered out datasets */
+    _renderUnpairedDisplayStr : function( numFiltered ){
+        return [ '(', numFiltered, ' ', _l( 'filtered out' ), ')' ].join('');
+    },
+    /** return an unattached jQuery DOM element to represent an unpaired dataset */
+    _renderUnpairedDataset : function( dataset ){
+        //TODO: to underscore template
+        return $( '<li/>')
+            .attr( 'id', 'dataset-' + dataset.id )
+            .addClass( 'dataset unpaired' )
+            .attr( 'draggable', true )
+            .addClass( dataset.selected? 'selected': '' )
+            .append( $( '<span/>' ).addClass( 'dataset-name' ).text( dataset.name ) )
+            //??
+            .data( 'dataset', dataset );
+    },
+    /** render the button that may go between unpaired datasets, allowing the user to pair a row */
+    _renderPairButton : function(){
+        //TODO: *not* a dataset - don't pretend like it is
+        return $( '<li/>').addClass( 'dataset unpaired' )
+            .append( $( '<span/>' ).addClass( 'dataset-name' ).text( _l( 'Pair these datasets' ) ) );
+    },
+    /** a message to display when no unpaired left */
+    _renderUnpairedEmpty : function(){
+        //this.debug( '-- renderUnpairedEmpty' );
+        var $msg = $( '<div class="empty-message"></div>' )
+            .text( '(' + _l( 'no remaining unpaired datasets' ) + ')' );
+        this.$( '.unpaired-columns .paired-column .column-datasets' ).empty().prepend( $msg );
+        return $msg;
+    },
+    /** a message to display when no unpaired can be shown with the current filters */
+    _renderUnpairedNotShown : function(){
+        //this.debug( '-- renderUnpairedEmpty' );
+        var $msg = $( '<div class="empty-message"></div>' )
+            .text( '(' + _l( 'no datasets were found matching the current filters' ) + ')' );
+        this.$( '.unpaired-columns .paired-column .column-datasets' ).empty().prepend( $msg );
+        return $msg;
+    },
+    /** try to detect if the unpaired section has a scrollbar and adjust left column for better centering of all */
+    _adjUnpairedOnScrollbar : function(){
+        var $unpairedColumns = this.$( '.unpaired-columns' ).last(),
+            $firstDataset = this.$( '.unpaired-columns .reverse-column .dataset' ).first();
+        if( !$firstDataset.length ){ return; }
+        var ucRight = $unpairedColumns.offset().left + $unpairedColumns.outerWidth(),
+            dsRight = $firstDataset.offset().left + $firstDataset.outerWidth(),
+            rightDiff = Math.floor( ucRight ) - Math.floor( dsRight );
+        //this.debug( 'rightDiff:', ucRight, '-', dsRight, '=', rightDiff );
+        this.$( '.unpaired-columns .forward-column' )
+            .css( 'margin-left', ( rightDiff > 0 )? rightDiff: 0 );
+    },
+
+    /** render the paired section and update counts of paired datasets */
+    _renderPaired : function( speed, callback ){
+        //this.debug( '-- _renderPaired' );
+        this.$( '.paired-column-title .title' ).text([ this.paired.length, _l( 'paired' ) ].join( ' ' ) );
+        // show/hide the unpair all link
+        this.$( '.unpair-all-link' ).toggle( this.paired.length !== 0 );
+        if( this.paired.length === 0 ){
+            this._renderPairedEmpty();
+            return;
+            //TODO: would be best to return here (the $columns)
+        } else {
+            // show/hide 'remove extensions link' when any paired and they seem to have extensions
+            this.$( '.remove-extensions-link' ).show();
+        }
+
+        this.$( '.paired-columns .column-datasets' ).empty();
+        var creator = this;
+        this.paired.forEach( function( pair, i ){
+            //TODO: cache these?
+            var pairView = new PairView({ pair: pair });
+            creator.$( '.paired-columns .column-datasets' )
+                .append( pairView.render().$el )
+                .append([
+                    '<button class="unpair-btn">',
+                        '<span class="fa fa-unlink" title="', _l( 'Unpair' ), '"></span>',
+                    '</button>'
+                ].join( '' ));
+        });
+    },
+    /** a message to display when none paired */
+    _renderPairedEmpty : function(){
+        var $msg = $( '<div class="empty-message"></div>' )
+            .text( '(' + _l( 'no paired datasets yet' ) + ')' );
+        this.$( '.paired-columns .column-datasets' ).empty().prepend( $msg );
+        return $msg;
+    },
+
+    /** render the footer, completion controls, and cancel controls */
+    _renderFooter : function( speed, callback ){
+        var $footer = this.$( '.footer' ).empty().html( PairedCollectionCreator.templates.footer() );
+        this.$( '.remove-extensions' ).prop( 'checked', this.removeExtensions );
+        if( typeof this.oncancel === 'function' ){
+            this.$( '.cancel-create.btn' ).show();
+        }
+        return $footer;
+    },
+
+    /** add any jQuery/bootstrap/custom plugins to elements rendered */
+    _addPluginComponents : function(){
+        this._chooseFiltersPopover( '.choose-filters-link' );
+        this.$( '.help-content i' ).hoverhighlight( '.collection-creator', 'rgba( 64, 255, 255, 1.0 )' );
+    },
+
+    /** build a filter selection popover allowing selection of common filter pairs */
+    _chooseFiltersPopover : function( selector ){
+        function filterChoice( val1, val2 ){
+            return [
+                '<button class="filter-choice btn" ',
+                        'data-forward="', val1, '" data-reverse="', val2, '">',
+                    _l( 'Forward' ), ': ', val1, ', ',
+                    _l( 'Reverse' ), ': ', val2,
+                '</button>'
+            ].join('');
+        }
+        var $popoverContent = $( _.template([
+            '<div class="choose-filters">',
+                '<div class="help">',
+                    _l( 'Choose from the following filters to change which unpaired reads are shown in the display' ),
+                ':</div>',
+                _.values( this.commonFilters ).map( function( filterSet ){
+                    return filterChoice( filterSet[0], filterSet[1] );
+                }).join( '' ),
+            '</div>'
+        ].join(''))({}));
+
+        return this.$( selector ).popover({
+            container   : '.collection-creator',
+            placement   : 'bottom',
+            html        : true,
+            //animation   : false,
+            content     : $popoverContent
+        });
+    },
+
+    /** add (or clear if clear is truthy) a validation warning to what */
+    _validationWarning : function( what, clear ){
+        var VALIDATION_CLASS = 'validation-warning';
+        if( what === 'name' ){
+            what = this.$( '.collection-name' ).add( this.$( '.collection-name-prompt' ) );
+            this.$( '.collection-name' ).focus().select();
+        }
+        if( clear ){
+            what = what || this.$( '.' + VALIDATION_CLASS );
+            what.removeClass( VALIDATION_CLASS );
+        } else {
+            what.addClass( VALIDATION_CLASS );
+        }
+    },
+
+    // ------------------------------------------------------------------------ events
+    /** set up event handlers on self */
+    _setUpBehaviors : function(){
+        this.once( 'rendered', function(){
+            this.trigger( 'rendered:initial', this );
+        });
+
+        this.on( 'pair:new', function(){
+            //TODO: ideally only re-render the columns (or even elements) involved
+            this._renderUnpaired();
+            this._renderPaired();
+
+            // scroll to bottom where new pairs are added
+            //TODO: this doesn't seem to work - innerHeight sticks at 133...
+            //  may have to do with improper flex columns
+            //var $pairedView = this.$( '.paired-columns' );
+            //$pairedView.scrollTop( $pairedView.innerHeight() );
+            //this.debug( $pairedView.height() )
+            this.$( '.paired-columns' ).scrollTop( 8000000 );
+        });
+        this.on( 'pair:unpair', function( pairs ){
+            //TODO: ideally only re-render the columns (or even elements) involved
+            this._renderUnpaired();
+            this._renderPaired();
+            this.splitView();
+        });
+
+        this.on( 'filter-change', function(){
+            this.filters = [
+                this.$( '.forward-unpaired-filter input' ).val(),
+                this.$( '.reverse-unpaired-filter input' ).val()
+            ];
+            this.metric( 'filter-change', this.filters );
+            this._renderFilters();
+            this._renderUnpaired();
+        });
+
+        this.on( 'autopair', function(){
+            this._renderUnpaired();
+            this._renderPaired();
+
+            var message, msgClass = null;
+            if( this.paired.length ){
+                msgClass = 'alert-success';
+                message = this.paired.length + ' ' + _l( 'pairs created' );
+                if( !this.unpaired.length ){
+                    message += ': ' + _l( 'all datasets have been successfully paired' );
+                    this.hideUnpaired();
+                    this.$( '.collection-name' ).focus();
+                }
+            } else {
+                message = _l([
+                    'Could not automatically create any pairs from the given dataset names.',
+                    'You may want to choose or enter different filters and try auto-pairing again.',
+                    'Close this message using the X on the right to view more help.'
+                ].join( ' ' ));
+            }
+            this._showAlert( message, msgClass );
+        });
+
+        //this.on( 'all', function(){
+        //    this.info( arguments );
+        //});
+        return this;
+    },
+
+    events : {
+        // header
+        'click .more-help'                          : '_clickMoreHelp',
+        'click .less-help'                          : '_clickLessHelp',
+        'click .header .alert button'               : '_hideAlert',
+        'click .forward-column .column-title'       : '_clickShowOnlyUnpaired',
+        'click .reverse-column .column-title'       : '_clickShowOnlyUnpaired',
+        'click .unpair-all-link'                    : '_clickUnpairAll',
+        //TODO: this seems kinda backasswards - re-sending jq event as a backbone event, can we listen directly?
+        'change .forward-unpaired-filter input'     : function( ev ){ this.trigger( 'filter-change' ); },
+        'focus .forward-unpaired-filter input'      : function( ev ){ $( ev.currentTarget ).select(); },
+        'click .autopair-link'                      : '_clickAutopair',
+        'click .choose-filters .filter-choice'      : '_clickFilterChoice',
+        'click .clear-filters-link'                 : '_clearFilters',
+        'change .reverse-unpaired-filter input'     : function( ev ){ this.trigger( 'filter-change' ); },
+        'focus .reverse-unpaired-filter input'      : function( ev ){ $( ev.currentTarget ).select(); },
+        // unpaired
+        'click .forward-column .dataset.unpaired'   : '_clickUnpairedDataset',
+        'click .reverse-column .dataset.unpaired'   : '_clickUnpairedDataset',
+        'click .paired-column .dataset.unpaired'    : '_clickPairRow',
+        'click .unpaired-columns'                   : 'clearSelectedUnpaired',
+        'mousedown .unpaired-columns .dataset'      : '_mousedownUnpaired',
+        // divider
+        'click .paired-column-title'                : '_clickShowOnlyPaired',
+        'mousedown .flexible-partition-drag'        : '_startPartitionDrag',
+        // paired
+        'click .paired-columns .dataset.paired'     : 'selectPair',
+        'click .paired-columns'                     : 'clearSelectedPaired',
+        'click .paired-columns .pair-name'          : '_clickPairName',
+        'click .unpair-btn'                         : '_clickUnpair',
+        // paired - drop target
+        //'dragenter .paired-columns'                 : '_dragenterPairedColumns',
+        //'dragleave .paired-columns .column-datasets': '_dragleavePairedColumns',
+        'dragover .paired-columns .column-datasets' : '_dragoverPairedColumns',
+        'drop .paired-columns .column-datasets'     : '_dropPairedColumns',
+
+        'pair.dragstart .paired-columns .column-datasets' : '_pairDragstart',
+        'pair.dragend   .paired-columns .column-datasets' : '_pairDragend',
+
+        // footer
+        'change .remove-extensions'                 : function( ev ){ this.toggleExtensions(); },
+        'change .collection-name'                   : '_changeName',
+        'keydown .collection-name'                  : '_nameCheckForEnter',
+        'click .cancel-create'                      : function( ev ){
+            if( typeof this.oncancel === 'function' ){
+                this.oncancel.call( this );
+            }
+        },
+        'click .create-collection'                  : '_clickCreate'//,
+    },
+
+    // ........................................................................ header
+    /** expand help */
+    _clickMoreHelp : function( ev ){
+        this.$( '.main-help' ).addClass( 'expanded' );
+        this.$( '.more-help' ).hide();
+    },
+    /** collapse help */
+    _clickLessHelp : function( ev ){
+        this.$( '.main-help' ).removeClass( 'expanded' );
+        this.$( '.more-help' ).show();
+    },
+
+    /** show an alert on the top of the interface containing message (alertClass is bootstrap's alert-*)*/
+    _showAlert : function( message, alertClass ){
+        alertClass = alertClass || 'alert-danger';
+        this.$( '.main-help' ).hide();
+        this.$( '.header .alert' ).attr( 'class', 'alert alert-dismissable' ).addClass( alertClass ).show()
+            .find( '.alert-message' ).html( message );
+    },
+    /** hide the alerts at the top */
+    _hideAlert : function( message ){
+        this.$( '.main-help' ).show();
+        this.$( '.header .alert' ).hide();
+    },
+
+    /** toggle between showing only unpaired and split view */
+    _clickShowOnlyUnpaired : function( ev ){
+        //this.debug( 'click unpaired', ev.currentTarget );
+        if( this.$( '.paired-columns' ).is( ':visible' ) ){
+            this.hidePaired();
+        } else {
+            this.splitView();
+        }
+    },
+    /** toggle between showing only paired and split view */
+    _clickShowOnlyPaired : function( ev ){
+        //this.debug( 'click paired' );
+        if( this.$( '.unpaired-columns' ).is( ':visible' ) ){
+            this.hideUnpaired();
+        } else {
+            this.splitView();
+        }
+    },
+
+    /** hide unpaired, show paired */
+    hideUnpaired : function( speed, callback ){
+        this.unpairedPanelHidden = true;
+        this.pairedPanelHidden = false;
+        this._renderMiddle( speed, callback );
+    },
+    /** hide paired, show unpaired */
+    hidePaired : function( speed, callback ){
+        this.unpairedPanelHidden = false;
+        this.pairedPanelHidden = true;
+        this._renderMiddle( speed, callback );
+    },
+    /** show both paired and unpaired (splitting evenly) */
+    splitView : function( speed, callback ){
+        this.unpairedPanelHidden = this.pairedPanelHidden = false;
+        this._renderMiddle( speed, callback );
+        return this;
+    },
+
+    /** unpair all paired and do other super neat stuff which I'm not really sure about yet... */
+    _clickUnpairAll : function( ev ){
+        this.metric( 'unpairAll' );
+        this.unpairAll();
+    },
+
+    /** attempt to autopair */
+    _clickAutopair : function( ev ){
+        var paired = this.autoPair();
+        this.metric( 'autopair', paired.length, this.unpaired.length );
+        this.trigger( 'autopair' );
+    },
+
+    /** set the filters based on the data attributes of the button click target */
+    _clickFilterChoice : function( ev ){
+        var $selected = $( ev.currentTarget );
+        this.$( '.forward-unpaired-filter input' ).val( $selected.data( 'forward' ) );
+        this.$( '.reverse-unpaired-filter input' ).val( $selected.data( 'reverse' ) );
+        this._hideChooseFilters();
+        this.trigger( 'filter-change' );
+    },
+
+    /** hide the choose filters popover */
+    _hideChooseFilters : function(){
+        //TODO: update bootstrap and remove the following hack
+        //  see also: https://github.com/twbs/bootstrap/issues/10260
+        this.$( '.choose-filters-link' ).popover( 'hide' );
+        this.$( '.popover' ).css( 'display', 'none' );
+    },
+
+    /** clear both filters */
+    _clearFilters : function( ev ){
+        this.$( '.forward-unpaired-filter input' ).val( '' );
+        this.$( '.reverse-unpaired-filter input' ).val( '' );
+        this.trigger( 'filter-change' );
+    },
+
+    // ........................................................................ unpaired
+    /** select an unpaired dataset */
+    _clickUnpairedDataset : function( ev ){
+        ev.stopPropagation();
+        return this.toggleSelectUnpaired( $( ev.currentTarget ) );
+    },
+
+    /** Toggle the selection of an unpaired dataset representation.
+     *  @param [jQuery] $dataset        the unpaired dataset dom rep to select
+     *  @param [Boolean] options.force  if defined, force selection based on T/F; otherwise, toggle
+     */
+    toggleSelectUnpaired : function( $dataset, options ){
+        options = options || {};
+        var dataset = $dataset.data( 'dataset' ),
+            select = options.force !== undefined? options.force: !$dataset.hasClass( 'selected' );
+        //this.debug( id, options.force, $dataset, dataset );
+        if( !$dataset.length || dataset === undefined ){ return $dataset; }
+
+        if( select ){
+            $dataset.addClass( 'selected' );
+            if( !options.waitToPair ){
+                this.pairAllSelected();
+            }
+
+        } else {
+            $dataset.removeClass( 'selected' );
+            //delete dataset.selected;
+        }
+        return $dataset;
+    },
+
+    /** pair all the currently selected unpaired datasets */
+    pairAllSelected : function( options ){
+        options = options || {};
+        var creator = this,
+            fwds = [],
+            revs = [],
+            pairs = [];
+        creator.$( '.unpaired-columns .forward-column .dataset.selected' ).each( function(){
+            fwds.push( $( this ).data( 'dataset' ) );
+        });
+        creator.$( '.unpaired-columns .reverse-column .dataset.selected' ).each( function(){
+            revs.push( $( this ).data( 'dataset' ) );
+        });
+        fwds.length = revs.length = Math.min( fwds.length, revs.length );
+        //this.debug( fwds );
+        //this.debug( revs );
+        fwds.forEach( function( fwd, i ){
+            try {
+                pairs.push( creator._pair( fwd, revs[i], { silent: true }) );
+
+            } catch( err ){
+                //TODO: preserve selected state of those that couldn't be paired
+                //TODO: warn that some could not be paired
+                creator.error( err );
+            }
+        });
+        if( pairs.length && !options.silent ){
+            this.trigger( 'pair:new', pairs );
+        }
+        return pairs;
+    },
+
+    /** clear the selection on all unpaired datasets */
+    clearSelectedUnpaired : function(){
+        this.$( '.unpaired-columns .dataset.selected' ).removeClass( 'selected' );
+    },
+
+    /** when holding down the shift key on a click, 'paint' the moused over datasets as selected */
+    _mousedownUnpaired : function( ev ){
+        if( ev.shiftKey ){
+            var creator = this,
+                $startTarget = $( ev.target ).addClass( 'selected' ),
+                moveListener = function( ev ){
+                    creator.$( ev.target ).filter( '.dataset' ).addClass( 'selected' );
+                };
+            $startTarget.parent().on( 'mousemove', moveListener );
+
+            // on any mouseup, stop listening to the move and try to pair any selected
+            $( document ).one( 'mouseup', function( ev ){
+                $startTarget.parent().off( 'mousemove', moveListener );
+                creator.pairAllSelected();
+            });
+        }
+    },
+
+    /** attempt to pair two datasets directly across from one another */
+    _clickPairRow : function( ev ){
+        //if( !ev.currentTarget ){ return true; }
+        var rowIndex = $( ev.currentTarget ).index(),
+            fwd = $( '.unpaired-columns .forward-column .dataset' ).eq( rowIndex ).data( 'dataset' ),
+            rev = $( '.unpaired-columns .reverse-column .dataset' ).eq( rowIndex ).data( 'dataset' );
+        //this.debug( 'row:', rowIndex, fwd, rev );
+        this._pair( fwd, rev );
+    },
+
+    // ........................................................................ divider/partition
+    /** start dragging the visible divider/partition between unpaired and paired panes */
+    _startPartitionDrag : function( ev ){
+        var creator = this,
+            startingY = ev.pageY;
+        //this.debug( 'partition drag START:', ev );
+        $( 'body' ).css( 'cursor', 'ns-resize' );
+        creator.$( '.flexible-partition-drag' ).css( 'color', 'black' );
+
+        function endDrag( ev ){
+            //creator.debug( 'partition drag STOP:', ev );
+            // doing this by an added class didn't really work well - kept flashing still
+            creator.$( '.flexible-partition-drag' ).css( 'color', '' );
+            $( 'body' ).css( 'cursor', '' ).unbind( 'mousemove', trackMouse );
+        }
+        function trackMouse( ev ){
+            var offset = ev.pageY - startingY;
+            //creator.debug( 'partition:', startingY, offset );
+            if( !creator.adjPartition( offset ) ){
+                //creator.debug( 'mouseup triggered' );
+                $( 'body' ).trigger( 'mouseup' );
+            }
+            creator._adjUnpairedOnScrollbar();
+            startingY += offset;
+        }
+        $( 'body' ).mousemove( trackMouse );
+        $( 'body' ).one( 'mouseup', endDrag );
+    },
+
+    /** adjust the parition up/down +/-adj pixels */
+    adjPartition : function( adj ){
+        var $unpaired = this.$( '.unpaired-columns' ),
+            $paired = this.$( '.paired-columns' ),
+            unpairedHi = parseInt( $unpaired.css( 'height' ), 10 ),
+            pairedHi = parseInt( $paired.css( 'height' ), 10 );
+        //this.debug( adj, 'hi\'s:', unpairedHi, pairedHi, unpairedHi + adj, pairedHi - adj );
+
+        unpairedHi = Math.max( 10, unpairedHi + adj );
+        pairedHi = pairedHi - adj;
+
+        var movingUpwards = adj < 0;
+        // when the divider gets close to the top - lock into hiding the unpaired section
+        if( movingUpwards ){
+            if( this.unpairedPanelHidden ){
+                return false;
+            } else if( unpairedHi <= 10 ){
+                this.hideUnpaired();
+                return false;
+            }
+        } else {
+            if( this.unpairedPanelHidden ){
+                $unpaired.show();
+                this.unpairedPanelHidden = false;
+            }
+        }
+
+        // when the divider gets close to the bottom - lock into hiding the paired section
+        if( !movingUpwards ){
+            if( this.pairedPanelHidden ){
+                return false;
+            } else if( pairedHi <= 15 ){
+                this.hidePaired();
+                return false;
+            }
+
+        } else {
+            if( this.pairedPanelHidden ){
+                $paired.show();
+                this.pairedPanelHidden = false;
+            }
+        }
+
+        $unpaired.css({
+            height  : unpairedHi + 'px',
+            flex    : '0 0 auto'
+        });
+        return true;
+    },
+
+    // ........................................................................ paired
+    /** select a pair when clicked */
+    selectPair : function( ev ){
+        ev.stopPropagation();
+        $( ev.currentTarget ).toggleClass( 'selected' );
+    },
+
+    /** deselect all pairs */
+    clearSelectedPaired : function( ev ){
+        this.$( '.paired-columns .dataset.selected' ).removeClass( 'selected' );
+    },
+
+    /** rename a pair when the pair name is clicked */
+    _clickPairName : function( ev ){
+        ev.stopPropagation();
+        var $name = $( ev.currentTarget ),
+            $pair = $name.parent().parent(),
+            index = $pair.index( '.dataset.paired' ),
+            pair = this.paired[ index ],
+            response = prompt( 'Enter a new name for the pair:', pair.name );
+        if( response ){
+            pair.name = response;
+            // set a flag (which won't be passed in json creation) for manual naming so we don't overwrite these
+            //  when adding/removing extensions
+            //hackish
+            pair.customizedName = true;
+            $name.text( pair.name );
+        }
+    },
+
+    /** unpair this pair */
+    _clickUnpair : function( ev ){
+        //if( !ev.currentTarget ){ return true; }
+        var pairIndex = Math.floor( $( ev.currentTarget ).index( '.unpair-btn' ) );
+        //this.debug( 'pair:', pairIndex );
+        this._unpair( this.paired[ pairIndex ] );
+    },
+
+    // ........................................................................ paired - drag and drop re-ordering
+    //_dragenterPairedColumns : function( ev ){
+    //    this.debug( '_dragenterPairedColumns:', ev );
+    //},
+    //_dragleavePairedColumns : function( ev ){
+    //    //this.debug( '_dragleavePairedColumns:', ev );
+    //},
+    /** track the mouse drag over the paired list adding a placeholder to show where the drop would occur */
+    _dragoverPairedColumns : function( ev ){
+        //this.debug( '_dragoverPairedColumns:', ev );
+        ev.preventDefault();
+
+        var $list = this.$( '.paired-columns .column-datasets' );
+        this._checkForAutoscroll( $list, ev.originalEvent.clientY );
+        //this.debug( ev.originalEvent.clientX, ev.originalEvent.clientY );
+        var $nearest = this._getNearestPairedDatasetLi( ev.originalEvent.clientY );
+
+        $( '.element-drop-placeholder' ).remove();
+        var $placeholder = $( '<div class="element-drop-placeholder"></div>' );
+        if( !$nearest.length ){
+            $list.append( $placeholder );
+        } else {
+            $nearest.before( $placeholder );
+        }
+    },
+
+    /** If the mouse is near enough to the list's top or bottom, scroll the list */
+    _checkForAutoscroll : function( $element, y ){
+        var AUTOSCROLL_SPEED = 2;
+        var offset = $element.offset(),
+            scrollTop = $element.scrollTop(),
+            upperDist = y - offset.top,
+            lowerDist = ( offset.top + $element.outerHeight() ) - y;
+        //this.debug( '_checkForAutoscroll:', scrollTop, upperDist, lowerDist );
+        if( upperDist >= 0 && upperDist < this.autoscrollDist ){
+            $element.scrollTop( scrollTop - AUTOSCROLL_SPEED );
+        } else if( lowerDist >= 0 && lowerDist < this.autoscrollDist ){
+            $element.scrollTop( scrollTop + AUTOSCROLL_SPEED );
+        }
+    },
+
+    /** get the nearest *previous* paired dataset PairView based on the mouse's Y coordinate.
+     *      If the y is at the end of the list, return an empty jQuery object.
+     */
+    _getNearestPairedDatasetLi : function( y ){
+        var WIGGLE = 4,
+            lis = this.$( '.paired-columns .column-datasets li' ).toArray();
+        for( var i=0; i<lis.length; i++ ){
+            var $li = $( lis[i] ),
+                top = $li.offset().top,
+                halfHeight = Math.floor( $li.outerHeight() / 2 ) + WIGGLE;
+            if( top + halfHeight > y && top - halfHeight < y ){
+                //this.debug( y, top + halfHeight, top - halfHeight )
+                return $li;
+            }
+        }
+        return $();
+    },
+    /** drop (dragged/selected PairViews) onto the list, re-ordering both the DOM and the internal array of pairs */
+    _dropPairedColumns : function( ev ){
+        // both required for firefox
+        ev.preventDefault();
+        ev.dataTransfer.dropEffect = 'move';
+
+        var $nearest = this._getNearestPairedDatasetLi( ev.originalEvent.clientY );
+        if( $nearest.length ){
+            this.$dragging.insertBefore( $nearest );
+
+        } else {
+            // no nearest before - insert after last element (unpair button)
+            this.$dragging.insertAfter( this.$( '.paired-columns .unpair-btn' ).last() );
+        }
+        // resync the creator's list of paired based on the new DOM order
+        this._syncPairsToDom();
+        return false;
+    },
+    /** resync the creator's list of paired based on the DOM order of pairs */
+    _syncPairsToDom : function(){
+        var newPaired = [];
+        //TODO: doesn't seem wise to use the dom to store these - can't we sync another way?
+        this.$( '.paired-columns .dataset.paired' ).each( function(){
+            newPaired.push( $( this ).data( 'pair' ) );
+        });
+        //this.debug( newPaired );
+        this.paired = newPaired;
+        this._renderPaired();
+    },
+    /** drag communication with pair sub-views: dragstart */
+    _pairDragstart : function( ev, pair ){
+        //this.debug( '_pairDragstart', ev, pair )
+        // auto select the pair causing the event and move all selected
+        pair.$el.addClass( 'selected' );
+        var $selected = this.$( '.paired-columns .dataset.selected' );
+        this.$dragging = $selected;
+    },
+    /** drag communication with pair sub-views: dragend - remove the placeholder */
+    _pairDragend : function( ev, pair ){
+        //this.debug( '_pairDragend', ev, pair )
+        $( '.element-drop-placeholder' ).remove();
+        this.$dragging = null;
+    },
+
+    // ........................................................................ footer
+    toggleExtensions : function( force ){
+        var creator = this;
+        creator.removeExtensions = ( force !== undefined )?( force ):( !creator.removeExtensions );
+
+        _.each( creator.paired, function( pair ){
+            // don't overwrite custom names
+            if( pair.customizedName ){ return; }
+            pair.name = creator._guessNameForPair( pair.forward, pair.reverse );
+        });
+
+        creator._renderPaired();
+        creator._renderFooter();
+    },
+
+    /** handle a collection name change */
+    _changeName : function( ev ){
+        this._validationWarning( 'name', !!this._getName() );
+    },
+
+    /** check for enter key press when in the collection name and submit */
+    _nameCheckForEnter : function( ev ){
+        if( ev.keyCode === 13 && !this.blocking ){
+            this._clickCreate();
+        }
+    },
+
+    /** get the current collection name */
+    _getName : function(){
+        return _.escape( this.$( '.collection-name' ).val() );
+    },
+
+    /** attempt to create the current collection */
+    _clickCreate : function( ev ){
+        var name = this._getName();
+        if( !name ){
+            this._validationWarning( 'name' );
+        } else if( !this.blocking ){
+            this.createList();
+        }
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** debug a dataset list */
+    _printList : function( list ){
+        var creator = this;
+        _.each( list, function( e ){
+            if( list === creator.paired ){
+                creator._printPair( e );
+            } else {
+                //creator.debug( e );
+            }
+        });
+    },
+
+    /** print a pair Object */
+    _printPair : function( pair ){
+        this.debug( pair.forward.name, pair.reverse.name, ': ->', pair.name );
+    },
+
+    /** string rep */
+    toString : function(){ return 'PairedCollectionCreator'; }
+});
+
+
+//TODO: move to require text plugin and load these as text
+//TODO: underscore currently unnecc. bc no vars are used
+//TODO: better way of localizing text-nodes in long strings
+/** underscore template fns attached to class */
+PairedCollectionCreator.templates = PairedCollectionCreator.templates || {
+
+    /** the skeleton */
+    main : _.template([
+        '<div class="header flex-row no-flex"></div>',
+        '<div class="middle flex-row flex-row-container"></div>',
+        '<div class="footer flex-row no-flex">'
+    ].join('')),
+
+    /** the header (not including help text) */
+    header : _.template([
+        '<div class="main-help well clear">',
+            '<a class="more-help" href="javascript:void(0);">', _l( 'More help' ), '</a>',
+            '<div class="help-content">',
+                '<a class="less-help" href="javascript:void(0);">', _l( 'Less' ), '</a>',
+            '</div>',
+        '</div>',
+        '<div class="alert alert-dismissable">',
+            '<button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>',
+            '<span class="alert-message"></span>',
+        '</div>',
+
+        '<div class="column-headers vertically-spaced flex-column-container">',
+            '<div class="forward-column flex-column column">',
+                '<div class="column-header">',
+                    '<div class="column-title">',
+                        '<span class="title">', _l( 'Unpaired forward' ), '</span>',
+                        '<span class="title-info unpaired-info"></span>',
+                    '</div>',
+                    '<div class="unpaired-filter forward-unpaired-filter pull-left">',
+                        '<input class="search-query" placeholder="', _l( 'Filter this list' ), '" />',
+                    '</div>',
+                '</div>',
+            '</div>',
+            '<div class="paired-column flex-column no-flex column">',
+                '<div class="column-header">',
+                    '<a class="choose-filters-link" href="javascript:void(0)">',
+                        _l( 'Choose filters' ),
+                    '</a>',
+                    '<a class="clear-filters-link" href="javascript:void(0);">',
+                        _l( 'Clear filters' ),
+                    '</a><br />',
+                    '<a class="autopair-link" href="javascript:void(0);">',
+                        _l( 'Auto-pair' ),
+                    '</a>',
+                '</div>',
+            '</div>',
+            '<div class="reverse-column flex-column column">',
+                '<div class="column-header">',
+                    '<div class="column-title">',
+                        '<span class="title">', _l( 'Unpaired reverse' ), '</span>',
+                        '<span class="title-info unpaired-info"></span>',
+                    '</div>',
+                    '<div class="unpaired-filter reverse-unpaired-filter pull-left">',
+                        '<input class="search-query" placeholder="', _l( 'Filter this list' ), '" />',
+                    '</div>',
+                '</div>',
+            '</div>',
+        '</div>'
+    ].join('')),
+
+    /** the middle: unpaired, divider, and paired */
+    middle : _.template([
+        // contains two flex rows (rows that fill available space) and a divider btwn
+        '<div class="unpaired-columns flex-column-container scroll-container flex-row">',
+            '<div class="forward-column flex-column column">',
+                '<ol class="column-datasets"></ol>',
+            '</div>',
+            '<div class="paired-column flex-column no-flex column">',
+                '<ol class="column-datasets"></ol>',
+            '</div>',
+            '<div class="reverse-column flex-column column">',
+                '<ol class="column-datasets"></ol>',
+            '</div>',
+        '</div>',
+        '<div class="flexible-partition">',
+            '<div class="flexible-partition-drag" title="', _l( 'Drag to change' ), '"></div>',
+            '<div class="column-header">',
+                '<div class="column-title paired-column-title">',
+                    '<span class="title"></span>',
+                '</div>',
+                '<a class="unpair-all-link" href="javascript:void(0);">',
+                    _l( 'Unpair all' ),
+                '</a>',
+            '</div>',
+        '</div>',
+        '<div class="paired-columns flex-column-container scroll-container flex-row">',
+            '<ol class="column-datasets"></ol>',
+        '</div>'
+    ].join('')),
+
+    /** creation and cancel controls */
+    footer : _.template([
+        '<div class="attributes clear">',
+            '<div class="clear">',
+                '<label class="remove-extensions-prompt pull-right">',
+                    _l( 'Remove file extensions from pair names' ), '?',
+                    '<input class="remove-extensions pull-right" type="checkbox" />',
+                '</label>',
+            '</div>',
+            '<div class="clear">',
+                '<input class="collection-name form-control pull-right" ',
+                    'placeholder="', _l( 'Enter a name for your new list' ), '" />',
+                '<div class="collection-name-prompt pull-right">', _l( 'Name' ), ':</div>',
+            '</div>',
+        '</div>',
+
+        '<div class="actions clear vertically-spaced">',
+            '<div class="other-options pull-left">',
+                '<button class="cancel-create btn" tabindex="-1">', _l( 'Cancel' ), '</button>',
+                '<div class="create-other btn-group dropup">',
+                    '<button class="btn btn-default dropdown-toggle" data-toggle="dropdown">',
+                          _l( 'Create a different kind of collection' ),
+                          ' <span class="caret"></span>',
+                    '</button>',
+                    '<ul class="dropdown-menu" role="menu">',
+                          '<li><a href="#">', _l( 'Create a <i>single</i> pair' ), '</a></li>',
+                          '<li><a href="#">', _l( 'Create a list of <i>unpaired</i> datasets' ), '</a></li>',
+                    '</ul>',
+                '</div>',
+            '</div>',
+
+            '<div class="main-options pull-right">',
+                '<button class="create-collection btn btn-primary">', _l( 'Create list' ), '</button>',
+            '</div>',
+        '</div>'
+    ].join('')),
+
+    /** help content */
+    helpContent : _.template([
+        '<p>', _l([
+            'Collections of paired datasets are ordered lists of dataset pairs (often forward and reverse reads). ',
+            'These collections can be passed to tools and workflows in order to have analyses done on each member of ',
+            'the entire group. This interface allows you to create a collection, choose which datasets are paired, ',
+            'and re-order the final collection.'
+        ].join( '' )), '</p>',
+        '<p>', _l([
+            'Unpaired datasets are shown in the <i data-target=".unpaired-columns">unpaired section</i> ',
+            '(hover over the underlined words to highlight below). ',
+            'Paired datasets are shown in the <i data-target=".paired-columns">paired section</i>.',
+            '<ul>To pair datasets, you can:',
+                '<li>Click a dataset in the ',
+                    '<i data-target=".unpaired-columns .forward-column .column-datasets,',
+                                    '.unpaired-columns .forward-column">forward column</i> ',
+                    'to select it then click a dataset in the ',
+                    '<i data-target=".unpaired-columns .reverse-column .column-datasets,',
+                                    '.unpaired-columns .reverse-column">reverse column</i>.',
+                '</li>',
+                '<li>Click one of the "Pair these datasets" buttons in the ',
+                    '<i data-target=".unpaired-columns .paired-column .column-datasets,',
+                                    '.unpaired-columns .paired-column">middle column</i> ',
+                    'to pair the datasets in a particular row.',
+                '</li>',
+                '<li>Click <i data-target=".autopair-link">"Auto-pair"</i> ',
+                    'to have your datasets automatically paired based on name.',
+                '</li>',
+            '</ul>'
+        ].join( '' )), '</p>',
+        '<p>', _l([
+            '<ul>You can filter what is shown in the unpaired sections by:',
+                '<li>Entering partial dataset names in either the ',
+                    '<i data-target=".forward-unpaired-filter input">forward filter</i> or ',
+                    '<i data-target=".reverse-unpaired-filter input">reverse filter</i>.',
+                '</li>',
+                '<li>Choosing from a list of preset filters by clicking the ',
+                    '<i data-target=".choose-filters-link">"Choose filters" link</i>.',
+                '</li>',
+                '<li>Entering regular expressions to match dataset names. See: ',
+                    '<a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions"',
+                        ' target="_blank">MDN\'s JavaScript Regular Expression Tutorial</a>. ',
+                    'Note: forward slashes (\\) are not needed.',
+                '</li>',
+                '<li>Clearing the filters by clicking the ',
+                    '<i data-target=".clear-filters-link">"Clear filters" link</i>.',
+                '</li>',
+            '</ul>'
+        ].join( '' )), '</p>',
+        '<p>', _l([
+            'To unpair individual dataset pairs, click the ',
+                '<i data-target=".unpair-btn">unpair buttons ( <span class="fa fa-unlink"></span> )</i>. ',
+            'Click the <i data-target=".unpair-all-link">"Unpair all" link</i> to unpair all pairs.'
+        ].join( '' )), '</p>',
+        '<p>', _l([
+            'You can include or remove the file extensions (e.g. ".fastq") from your pair names by toggling the ',
+                '<i data-target=".remove-extensions-prompt">"Remove file extensions from pair names?"</i> control.'
+        ].join( '' )), '</p>',
+        '<p>', _l([
+            'Once your collection is complete, enter a <i data-target=".collection-name">name</i> and ',
+            'click <i data-target=".create-collection">"Create list"</i>. ',
+            '(Note: you do not have to pair all unpaired datasets to finish.)'
+        ].join( '' )), '</p>'
+    ].join(''))
+};
+
+
+//=============================================================================
+/** a modal version of the paired collection creator */
+var pairedCollectionCreatorModal = function _pairedCollectionCreatorModal( datasets, options ){
+
+    var deferred = jQuery.Deferred(),
+        creator;
+
+    options = _.defaults( options || {}, {
+        datasets    : datasets,
+        oncancel    : function(){
+            Galaxy.modal.hide();
+            deferred.reject( 'cancelled' );
+        },
+        oncreate    : function( creator, response ){
+            Galaxy.modal.hide();
+            deferred.resolve( response );
+        }
+    });
+
+    if( !window.Galaxy || !Galaxy.modal ){
+        throw new Error( 'Galaxy or Galaxy.modal not found' );
+    }
+
+    creator = new PairedCollectionCreator( options );
+    Galaxy.modal.show({
+        title   : 'Create a collection of paired datasets',
+        body    : creator.$el,
+        width   : '80%',
+        height  : '800px',
+        closing_events: true
+    });
+    creator.render();
+    window.creator = creator;
+
+    //TODO: remove modal header
+    return deferred;
+};
+
+
+//=============================================================================
+function createListOfPairsCollection( collection ){
+    var elements = collection.toJSON();
+//TODO: validate elements
+    return pairedCollectionCreatorModal( elements, {
+        historyId : collection.historyId
+    });
+}
+
+
+//=============================================================================
+    return {
+        PairedCollectionCreator : PairedCollectionCreator,
+        pairedCollectionCreatorModal : pairedCollectionCreatorModal,
+        createListOfPairsCollection : createListOfPairsCollection
+    };
+});
diff --git a/client/galaxy/scripts/mvc/collection/pair-collection-creator.js b/client/galaxy/scripts/mvc/collection/pair-collection-creator.js
new file mode 100644
index 0000000..b6b3127
--- /dev/null
+++ b/client/galaxy/scripts/mvc/collection/pair-collection-creator.js
@@ -0,0 +1,253 @@
+define([
+    "mvc/collection/list-collection-creator",
+    "mvc/history/hdca-model",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( LIST_CREATOR, HDCA, BASE_MVC, _l ){
+
+'use strict';
+
+var logNamespace = 'collections';
+/*==============================================================================
+TODO:
+    the paired creator doesn't really mesh with the list creator as parent
+        it may be better to make an abstract super class for both
+    composites may inherit from this (or vis-versa)
+    PairedDatasetCollectionElementView doesn't make a lot of sense
+
+==============================================================================*/
+/**  */
+var PairedDatasetCollectionElementView = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+//TODO: use proper class (DatasetDCE or NestedDCDCE (or the union of both))
+    tagName     : 'li',
+    className   : 'collection-element',
+
+    initialize : function( attributes ){
+        this.element = attributes.element || {};
+        this.identifier = attributes.identifier;
+    },
+
+    render : function(){
+        this.$el
+            .attr( 'data-element-id', this.element.id )
+            .html( this.template({ identifier: this.identifier, element: this.element }) );
+        return this;
+    },
+
+    //TODO: lots of unused space in the element - possibly load details and display them horiz.
+    template : _.template([
+        '<span class="identifier"><%- identifier %></span>',
+        '<span class="name"><%- element.name %></span>',
+    ].join('')),
+
+    /** remove the DOM and any listeners */
+    destroy : function(){
+        this.off();
+        this.$el.remove();
+    },
+
+    /** string rep */
+    toString : function(){
+        return 'DatasetCollectionElementView()';
+    }
+});
+
+
+// ============================================================================
+var _super = LIST_CREATOR.ListCollectionCreator;
+
+/** An interface for building collections.
+ */
+var PairCollectionCreator = _super.extend({
+
+    /** the class used to display individual elements */
+    elementViewClass : PairedDatasetCollectionElementView,
+    /** the class this creator will create and save */
+    collectionClass : HDCA.HistoryPairDatasetCollection,
+    className : 'pair-collection-creator collection-creator flex-row-container',
+
+    /** override to no-op */
+    _mangleDuplicateNames : function(){},
+
+    // TODO: this whole pattern sucks. There needs to be two classes of problem area:
+    //      bad inital choices and
+    //      when the user has painted his/her self into a corner during creation/use-of-the-creator
+    /** render the entire interface */
+    render : function( speed, callback ){
+        if( this.workingElements.length === 2 ){
+            return _super.prototype.render.call( this, speed, callback );
+        }
+        return this._renderInvalid( speed, callback );
+    },
+
+    // ------------------------------------------------------------------------ rendering elements
+    /** render forward/reverse */
+    _renderList : function( speed, callback ){
+        //this.debug( '-- _renderList' );
+        //precondition: there are two valid elements in workingElements
+        var creator = this,
+            $tmp = jQuery( '<div/>' ),
+            $list = creator.$list();
+
+        // lose the original views, create the new, append all at once, then call their renders
+        _.each( this.elementViews, function( view ){
+            view.destroy();
+            creator.removeElementView( view );
+        });
+        $tmp.append( creator._createForwardElementView().$el );
+        $tmp.append( creator._createReverseElementView().$el );
+        $list.empty().append( $tmp.children() );
+        _.invoke( creator.elementViews, 'render' );
+    },
+
+    /** create the forward element view */
+    _createForwardElementView : function(){
+        return this._createElementView( this.workingElements[0], { identifier: 'forward' } );
+    },
+
+    /** create the forward element view */
+    _createReverseElementView : function(){
+        return this._createElementView( this.workingElements[1], { identifier: 'reverse' } );
+    },
+
+    /** create an element view, cache in elementViews, and return */
+    _createElementView : function( element, options ){
+        var elementView = new this.elementViewClass( _.extend( options, {
+            element : element,
+        }));
+        this.elementViews.push( elementView );
+        return elementView;
+    },
+
+    /** swap the forward, reverse elements and re-render */
+    swap : function(){
+        this.workingElements = [
+            this.workingElements[1],
+            this.workingElements[0],
+        ];
+        this._renderList();
+    },
+
+    events : _.extend( _.clone( _super.prototype.events ), {
+        'click .swap' : 'swap',
+    }),
+
+    // ------------------------------------------------------------------------ templates
+    //TODO: move to require text plugin and load these as text
+    //TODO: underscore currently unnecc. bc no vars are used
+    //TODO: better way of localizing text-nodes in long strings
+    /** underscore template fns attached to class */
+    templates : _.extend( _.clone( _super.prototype.templates ), {
+        /** the middle: element list */
+        middle : _.template([
+            '<div class="collection-elements-controls">',
+                '<a class="swap" href="javascript:void(0);" title="', _l( 'Swap forward and reverse datasets' ), '">',
+                    _l( 'Swap' ),
+                '</a>',
+            '</div>',
+            '<div class="collection-elements scroll-container flex-row">',
+            '</div>'
+        ].join('')),
+
+        /** help content */
+        helpContent : _.template([
+            '<p>', _l([
+                'Pair collections are permanent collections containing two datasets: one forward and one reverse. ',
+                'Often these are forward and reverse reads. The pair collections can be passed to tools and ',
+                'workflows in order to have analyses done on both datasets. This interface allows ',
+                'you to create a pair, name it, and swap which is forward and which reverse.'
+            ].join( '' )), '</p>',
+            '<ul>',
+                '<li>', _l([
+                    'Click the <i data-target=".swap">"Swap"</i> link to make your forward dataset the reverse ',
+                    'and the reverse dataset forward.'
+                ].join( '' )), '</li>',
+                '<li>', _l([
+                    'Click the <i data-target=".cancel-create">"Cancel"</i> button to exit the interface.'
+                ].join( '' )), '</li>',
+            '</ul><br />',
+            '<p>', _l([
+                'Once your collection is complete, enter a <i data-target=".collection-name">name</i> and ',
+                'click <i data-target=".create-collection">"Create list"</i>.'
+            ].join( '' )), '</p>'
+        ].join('')),
+
+        /** a simplified page communicating what went wrong and why the user needs to reselect something else */
+        invalidInitial : _.template([
+            '<div class="header flex-row no-flex">',
+                '<div class="alert alert-warning" style="display: block">',
+                    '<span class="alert-message">',
+                        '<% if( _.size( problems ) ){ %>',
+                            _l( 'The following selections could not be included due to problems' ),
+                            '<ul><% _.each( problems, function( problem ){ %>',
+                                '<li><b><%- problem.element.name %></b>: <%- problem.text %></li>',
+                            '<% }); %></ul>',
+                        '<% } else if( _.size( elements ) === 0 ){ %>',
+                            _l( 'No datasets were selected' ), '.',
+                        '<% } else if( _.size( elements ) === 1 ){ %>',
+                            _l( 'Only one dataset was selected' ), ': <%- elements[0].name %>',
+                        '<% } else if( _.size( elements ) > 2 ){ %>',
+                            _l( 'Too many datasets were selected' ),
+                            ': <%- _.pluck( elements, "name" ).join( ", ") %>',
+                        '<% } %>',
+                        '<br />',
+                        _l( 'Two (and only two) elements are needed for the pair' ), '. ',
+                        _l( 'You may need to ' ),
+                        '<a class="cancel-create" href="javascript:void(0)">', _l( 'cancel' ), '</a> ',
+                        _l( 'and reselect new elements' ), '.',
+                    '</span>',
+                '</div>',
+            '</div>',
+            '<div class="footer flex-row no-flex">',
+                '<div class="actions clear vertically-spaced">',
+                    '<div class="other-options pull-left">',
+                        '<button class="cancel-create btn" tabindex="-1">', _l( 'Cancel' ), '</button>',
+                        // _l( 'Create a different kind of collection' ),
+                    '</div>',
+                '</div>',
+            '</div>'
+        ].join('')),
+    }),
+
+    // ------------------------------------------------------------------------ misc
+    /** string rep */
+    toString : function(){ return 'PairCollectionCreator'; }
+});
+
+
+//==============================================================================
+/** List collection flavor of collectionCreatorModal. */
+var pairCollectionCreatorModal = function _pairCollectionCreatorModal( elements, options ){
+    options = options || {};
+    options.title = _l( 'Create a collection from a pair of datasets' );
+    return LIST_CREATOR.collectionCreatorModal( elements, options, PairCollectionCreator );
+};
+
+
+//==============================================================================
+/** Use a modal to create a pair collection, then add it to the given history contents.
+ *  @returns {Deferred} resolved when the collection is added to the history.
+ */
+function createPairCollection( contents ){
+    var elements = contents.toJSON(),
+        promise = pairCollectionCreatorModal( elements, {
+            creationFn : function( elements, name ){
+                elements = [
+                    { name: "forward", src: "hda", id: elements[0].id },
+                    { name: "reverse", src: "hda", id: elements[1].id }
+                ];
+                return contents.createHDCA( elements, 'paired', name );
+            }
+        });
+    return promise;
+}
+
+//==============================================================================
+    return {
+        PairCollectionCreator       : PairCollectionCreator,
+        pairCollectionCreatorModal  : pairCollectionCreatorModal,
+        createPairCollection        : createPairCollection,
+    };
+});
diff --git a/client/galaxy/scripts/mvc/dataset/data.js b/client/galaxy/scripts/mvc/dataset/data.js
new file mode 100644
index 0000000..22b7b6e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/data.js
@@ -0,0 +1,682 @@
+// Additional dependencies: jQuery, underscore.
+define(['mvc/ui/ui-modal', 'mvc/ui/ui-frames', 'mvc/ui/icon-button'], function(Modal, Frames, mod_icon_btn) {
+
+/**
+ * Dataset metedata.
+ */
+var DatasetMetadata = Backbone.Model.extend({});
+
+/**
+ * A dataset. In Galaxy, datasets are associated with a history, so
+ * this object is also known as a HistoryDatasetAssociation.
+ */
+var Dataset = Backbone.Model.extend({
+    defaults: {
+        id: '',
+        type: '',
+        name: '',
+        hda_ldda: 'hda',
+        metadata: null
+    },
+
+    initialize: function() {
+        // Metadata can be passed in as a model or a set of attributes; if it's
+        // already a model, there's no need to set metadata.
+        if (!this.get('metadata')) {
+            this._set_metadata();
+        }
+
+        // Update metadata on change.
+        this.on('change', this._set_metadata, this);
+    },
+
+    _set_metadata: function() {
+        var metadata = new DatasetMetadata();
+
+        // Move metadata from dataset attributes to metadata object.
+        _.each(_.keys(this.attributes), function(k) {
+            if (k.indexOf('metadata_') === 0) {
+                // Found metadata.
+                var new_key = k.split('metadata_')[1];
+                metadata.set(new_key, this.attributes[k]);
+                delete this.attributes[k];
+            }
+        }, this);
+
+        // Because this is an internal change, silence it.
+        this.set('metadata', metadata, { 'silent': true });
+    },
+
+    /**
+     * Returns dataset metadata for a given attribute.
+     */
+    get_metadata: function(attribute) {
+        return this.attributes.metadata.get(attribute);
+    },
+
+    urlRoot: Galaxy.root + "api/datasets"
+});
+
+/**
+ * A tabular dataset. This object extends dataset to provide incremental chunked data.
+ */
+var TabularDataset = Dataset.extend({
+    defaults: _.extend({}, Dataset.prototype.defaults, {
+        chunk_url: null,
+        first_data_chunk: null,
+        offset: 0,
+        at_eof: false
+    }),
+
+    initialize: function(options) {
+        Dataset.prototype.initialize.call(this);
+
+        // If first data chunk is available, next chunk is 1.
+        if (this.attributes.first_data_chunk){
+            this.attributes.offset = this.attributes.first_data_chunk.offset;
+        }
+        this.attributes.chunk_url = Galaxy.root + 'dataset/display?dataset_id=' + this.id;
+        this.attributes.url_viz = Galaxy.root + 'visualization';
+    },
+
+    /**
+     * Returns a jQuery Deferred object that resolves to the next data chunk or null if at EOF.
+     */
+    get_next_chunk: function() {
+        // If already at end of file, do nothing.
+        if (this.attributes.at_eof) {
+            return null;
+        }
+
+        // Get next chunk.
+        var self = this,
+            next_chunk = $.Deferred();
+        $.getJSON(this.attributes.chunk_url, {
+            offset: self.attributes.offset
+        }).success(function(chunk) {
+            var rval;
+            if (chunk.ck_data !== '') {
+                // Found chunk.
+                rval = chunk;
+                self.attributes.offset = chunk.offset;
+            }
+            else {
+                // At EOF.
+                self.attributes.at_eof = true;
+                rval = null;
+            }
+            next_chunk.resolve(rval);
+        });
+
+        return next_chunk;
+    }
+});
+
+var DatasetCollection = Backbone.Collection.extend({
+    model: Dataset
+});
+
+/**
+ * Provides a base for table-based, dynamic view of a tabular dataset.
+ * Do not instantiate directly; use either TopLevelTabularDatasetChunkedView
+ * or EmbeddedTabularDatasetChunkedView.
+ */
+var TabularDatasetChunkedView = Backbone.View.extend({
+
+    /**
+     * Initialize view and, importantly, set a scroll element.
+     */
+    initialize: function(options) {
+        // Row count for rendering.
+        this.row_count = 0;
+        this.loading_chunk = false;
+
+        // load trackster button
+        new TabularButtonTracksterView({
+            model   : options.model,
+            $el     : this.$el
+        });
+    },
+
+    expand_to_container: function(){
+        if (this.$el.height() < this.scroll_elt.height()){
+            this.attempt_to_fetch();
+        }
+    },
+
+    attempt_to_fetch: function( func ){
+        var self = this;
+        if ( !this.loading_chunk && this.scrolled_to_bottom() ) {
+            this.loading_chunk = true;
+            this.loading_indicator.show();
+            $.when(self.model.get_next_chunk()).then(function(result) {
+                if (result) {
+                    self._renderChunk(result);
+                    self.loading_chunk = false;
+                }
+                self.loading_indicator.hide();
+                self.expand_to_container();
+            });
+        }
+    },
+
+    render: function() {
+        // Add loading indicator.
+        this.loading_indicator = $('<div/>').attr('id', 'loading_indicator');
+        this.$el.append(this.loading_indicator);
+
+        // Add data table and header.
+        var data_table = $('<table/>').attr({
+            id: 'content_table',
+            cellpadding: 0
+        });
+        this.$el.append(data_table);
+        var column_names = this.model.get_metadata('column_names'),
+            header_container = $('<thead/>').appendTo(data_table),
+            header_row = $('<tr/>').appendTo(header_container);
+        if (column_names) {
+            header_row.append('<th>' + column_names.join('</th><th>') + '</th>');
+        } else {
+            for (var j = 1; j <= this.model.get_metadata('columns'); j++) {
+                header_row.append('<th>' + j + '</th>');
+            }
+        }
+
+        // Render first chunk.
+        var self = this,
+            first_chunk = this.model.get('first_data_chunk');
+        if (first_chunk) {
+            // First chunk is bootstrapped, so render now.
+            this._renderChunk(first_chunk);
+        }
+        else {
+            // No bootstrapping, so get first chunk and then render.
+            $.when(self.model.get_next_chunk()).then(function(result) {
+                self._renderChunk(result);
+            });
+        }
+
+        // -- Show new chunks during scrolling. --
+
+        // Set up chunk loading when scrolling using the scrolling element.
+        this.scroll_elt.scroll(function(){
+            self.attempt_to_fetch();
+        });
+    },
+
+    /**
+     * Returns true if user has scrolled to the bottom of the view.
+     */
+    scrolled_to_bottom: function() {
+        return false;
+    },
+
+    // -- Helper functions. --
+
+    _renderCell: function(cell_contents, index, colspan) {
+        var $cell = $('<td>').text(cell_contents);
+        var column_types = this.model.get_metadata('column_types');
+        if (colspan !== undefined) {
+            $cell.attr('colspan', colspan).addClass('stringalign');
+        } else if (column_types) {
+            if (index < column_types.length) {
+                if (column_types[index] === 'str' || column_types[index] === 'list') {
+                    /* Left align all str columns, right align the rest */
+                    $cell.addClass('stringalign');
+                }
+            }
+        }
+        return $cell;
+    },
+
+    _renderRow: function(line) {
+        // Check length of cells to ensure this is a complete row.
+        var cells = line.split('\t'),
+            row = $('<tr>'),
+            num_columns = this.model.get_metadata('columns');
+
+        if (this.row_count % 2 !== 0) {
+            row.addClass('dark_row');
+        }
+
+        if (cells.length === num_columns) {
+            _.each(cells, function(cell_contents, index) {
+                row.append(this._renderCell(cell_contents, index));
+            }, this);
+        }
+        else if (cells.length > num_columns) {
+            // SAM file or like format with optional metadata included.
+            _.each(cells.slice(0, num_columns - 1), function(cell_contents, index) {
+                row.append(this._renderCell(cell_contents, index));
+            }, this);
+            row.append(this._renderCell(cells.slice(num_columns - 1).join('\t'), num_columns - 1));
+        }
+        else if (cells.length === 1){
+            // Comment line, just return the one cell.
+            row.append(this._renderCell(line, 0, num_columns));
+        }
+        else {
+            // cells.length is greater than one, but less than num_columns.  Render cells and pad tds.
+            // Possibly a SAM file or like format with optional metadata missing.
+            // Could also be a tabular file with a line with missing columns.
+            _.each(cells, function(cell_contents, index) {
+                row.append(this._renderCell(cell_contents, index));
+            }, this);
+            _.each(_.range(num_columns - cells.length), function(){
+                row.append($('<td>'));
+            });
+        }
+
+        this.row_count++;
+        return row;
+    },
+
+    _renderChunk: function(chunk) {
+        var data_table = this.$el.find('table');
+        _.each(chunk.ck_data.split('\n'), function(line, index) {
+            if (line !== ''){
+                data_table.append(this._renderRow(line));
+            }
+        }, this);
+    }
+});
+
+/**
+ * Tabular view that is placed at the top level of page. Scrolling occurs
+ * view top-level elements outside of view.
+ */
+var TopLevelTabularDatasetChunkedView = TabularDatasetChunkedView.extend({
+
+    initialize: function(options) {
+        TabularDatasetChunkedView.prototype.initialize.call(this, options);
+
+        // Scrolling happens in top-level elements.
+        scroll_elt = _.find(this.$el.parents(), function(p) {
+            return $(p).css('overflow') === 'auto';
+        });
+
+        // If no scrolling element found, use window.
+        if (!scroll_elt) { scroll_elt = window; }
+
+        // Wrap scrolling element for easy access.
+        this.scroll_elt = $(scroll_elt);
+    },
+
+    /**
+     * Returns true if user has scrolled to the bottom of the view.
+     */
+    scrolled_to_bottom: function() {
+        return (this.$el.height() - this.scroll_elt.scrollTop() - this.scroll_elt.height() <= 0);
+    }
+
+});
+
+/**
+ * Tabular view tnat is embedded in a page. Scrolling occurs in view's el.
+ */
+var EmbeddedTabularDatasetChunkedView = TabularDatasetChunkedView.extend({
+
+    initialize: function(options) {
+        TabularDatasetChunkedView.prototype.initialize.call(this, options);
+
+        // Because view is embedded, set up div to do scrolling.
+        this.scroll_elt = this.$el.css({
+            position: 'relative',
+            overflow: 'scroll',
+            height: options.height || '500px'
+        });
+    },
+
+    /**
+     * Returns true if user has scrolled to the bottom of the view.
+     */
+    scrolled_to_bottom: function() {
+        return this.$el.scrollTop() + this.$el.innerHeight() >= this.el.scrollHeight;
+    }
+
+});
+
+// button for trackster visualization
+var TabularButtonTracksterView = Backbone.View.extend({
+
+    // gene region columns
+    col: {
+        chrom   : null,
+        start   : null,
+        end     : null
+    },
+
+    // url for trackster
+    url_viz     : null,
+
+    // dataset id
+    dataset_id  : null,
+
+    // database key
+    genome_build: null,
+
+    // data type
+    file_ext   : null,
+
+    // backbone initialize
+    initialize: function (options) {
+        // check if environment is available
+        var Galaxy = parent.Galaxy;
+
+        // link galaxy modal or create one
+        if (Galaxy && Galaxy.modal) {
+            this.modal = Galaxy.modal;
+        }
+
+        // link galaxy frames
+        if (Galaxy && Galaxy.frame) {
+            this.frame = Galaxy.frame;
+        }
+
+        // check
+        if (!this.modal || !this.frame) {
+            return;
+        }
+
+        // model/metadata
+        var model       = options.model;
+        var metadata    = model.get('metadata');
+
+        // check for datatype
+        if (!model.get('file_ext')) {
+            return;
+        }
+
+        // get data type
+        this.file_ext = model.get('file_ext');
+
+        // check for bed-file format
+        if (this.file_ext == 'bed')
+        {
+            // verify that metadata exists
+            if (metadata.get('chromCol') && metadata.get('startCol') && metadata.get('endCol'))
+            {
+                // read in columns
+                this.col.chrom   = metadata.get('chromCol') - 1;
+                this.col.start   = metadata.get('startCol') - 1;
+                this.col.end     = metadata.get('endCol') - 1;
+            } else {
+                console.log('TabularButtonTrackster : Bed-file metadata incomplete.');
+                return;
+            }
+        }
+
+        // check for vcf-file format
+        if (this.file_ext == 'vcf')
+        {
+            // search array
+            function search (str, array) {
+                for (var j = 0; j < array.length; j++)
+                    if (array[j].match(str)) return j;
+                return -1;
+            };
+
+            // load
+            this.col.chrom = search('Chrom', metadata.get('column_names'));
+            this.col.start = search('Pos', metadata.get('column_names'));
+            this.col.end   = null;
+
+            // verify that metadata exists
+            if (this.col.chrom == -1 || this.col.start == -1) {
+                console.log('TabularButtonTrackster : VCF-file metadata incomplete.');
+                return;
+            }
+        }
+
+        // check
+        if(this.col.chrom === undefined) {
+            return;
+        }
+
+        // get dataset id
+        if (model.id) {
+            this.dataset_id = model.id;
+        } else {
+            console.log('TabularButtonTrackster : Dataset identification is missing.');
+            return;
+        }
+
+        // get url
+        if (model.get('url_viz')) {
+            this.url_viz = model.get('url_viz');
+        } else {
+            console.log('TabularButtonTrackster : Url for visualization controller is missing.');
+            return;
+        }
+
+        // get genome_build / database key
+        if (model.get('genome_build')) {
+            this.genome_build = model.get('genome_build');
+        }
+
+        // create the icon
+        var btn_viz = new mod_icon_btn.IconButtonView({
+            model : new mod_icon_btn.IconButton({
+                title       : 'Visualize',
+                icon_class  : 'chart_curve',
+                id          : 'btn_viz'
+            })
+        });
+
+        // set element
+        this.setElement(options.$el);
+
+        // add to element
+        this.$el.append(btn_viz.render().$el);
+
+        // hide the button
+        this.hide();
+    },
+
+    // backbone events
+    events:
+    {
+        'mouseover tr'  : 'show',
+        'mouseleave'    : 'hide'
+    },
+
+    // show button
+    show: function (e) {
+        // is numeric
+        function is_numeric(n) {
+            return !isNaN(parseFloat(n)) && isFinite(n);
+        };
+
+        // check
+        if(this.col.chrom === null)
+            return;
+
+        // get selected data line
+        var row = $(e.target).parent();
+
+        // verify that location has been found
+        var chrom = row.children().eq(this.col.chrom).html();
+        var start = row.children().eq(this.col.start).html();
+
+        // end is optional
+        var end = this.col.end ? row.children().eq(this.col.end).html() : start;
+
+        // double check location
+        if (!chrom.match("^#") && chrom !== "" && is_numeric(start)) {
+
+            // get target gene region
+            var btn_viz_pars = {
+                dataset_id  : this.dataset_id,
+                gene_region : chrom + ":" + start + "-" + end
+            };
+
+            // get button position
+            var offset  = row.offset();
+            var left    = offset.left - 10;
+            var top     = offset.top - $(window).scrollTop() + 3;
+
+            // update css
+            $('#btn_viz').css({'position': 'fixed', 'top': top + 'px', 'left': left + 'px'});
+            $('#btn_viz').off('click');
+            $('#btn_viz').click(this.create_trackster_action(this.url_viz, btn_viz_pars, this.genome_build));
+
+            // show the button
+            $('#btn_viz').show();
+        } else {
+            // hide the button
+            $('#btn_viz').hide();
+        }
+    },
+
+    // hide button
+    hide: function () {
+        this.$el.find('#btn_viz').hide();
+    },
+
+    // create action
+    create_trackster_action : function (vis_url, dataset_params, dbkey) {
+        // link this
+        var self = this;
+
+        // create function
+        return function() {
+            var listTracksParams = {};
+            if (dbkey) {
+                listTracksParams[ 'f-dbkey' ] = dbkey;
+            }
+            $.ajax({
+                url: vis_url + '/list_tracks?' + $.param( listTracksParams ),
+                dataType: 'html',
+                error: function() {
+                    // show error message
+                    self.modal.show({
+                        title   : 'Something went wrong!',
+                        body    : 'Unfortunately we could not add this dataset to the track browser. Please try again or contact us.',
+                        buttons : {
+                            'Cancel': function(){
+                                self.modal.hide();
+                            }
+                        }
+                    });
+                },
+                success: function(table_html) {
+                    self.modal.show({
+                        title   : 'View Data in a New or Saved Visualization',
+                        buttons :{
+                            'Cancel': function(){
+                                self.modal.hide();
+                            },
+                            'View in saved visualization': function(){
+                                // show modal with saved visualizations
+                                self.modal.show(
+                                {
+                                    title   : 'Add Data to Saved Visualization',
+                                    body    : table_html,
+                                    buttons : {
+                                        'Cancel': function(){
+                                            self.modal.hide();
+                                        },
+                                        'Add to visualization': function(){
+                                            // hide
+                                            self.modal.hide();
+
+                                            // search selected fields
+                                            self.modal.$el.find('input[name=id]:checked').each(function(){
+                                                // get visualization id
+                                                var vis_id = $(this).val();
+                                                dataset_params.id = vis_id;
+
+                                                // add widget
+                                                self.frame.add({
+                                                    title    : 'Trackster',
+                                                    type     : 'url',
+                                                    content  : vis_url + '/trackster?' + $.param(dataset_params)
+                                                });
+                                            });
+                                        }
+                                    }
+                                });
+                            },
+                            'View in new visualization': function(){
+                                // hide
+                                self.modal.hide();
+
+                                // add widget
+                                self.frame.add({
+                                    title    : 'Trackster',
+                                    type     : 'url',
+                                    content  : vis_url + '/trackster?' + $.param(dataset_params)
+                                });
+                            }
+                        }
+                    });
+                }
+            });
+            return false;
+        };
+    }
+});
+
+// -- Utility functions. --
+
+/**
+ * Create a model, attach it to a view, render view, and attach it to a parent element.
+ */
+var createModelAndView = function(model, view, model_config, parent_elt) {
+    // Create model, view.
+    var a_view = new view({
+        model: new model(model_config)
+    });
+
+    // Render view and add to parent element.
+    a_view.render();
+    if (parent_elt) {
+        parent_elt.append(a_view.$el);
+    }
+
+    return a_view;
+};
+
+/**
+ * Create a tabular dataset chunked view (and requisite tabular dataset model)
+ * and appends to parent_elt.
+ */
+var createTabularDatasetChunkedView = function(options) {
+    // If no model, create and set model from dataset config.
+    if (!options.model) {
+        options.model = new TabularDataset(options.dataset_config);
+    }
+
+    var parent_elt = options.parent_elt;
+    var embedded = options.embedded;
+
+    // Clean up options so that only needed options are passed to view.
+    delete options.embedded;
+    delete options.parent_elt;
+    delete options.dataset_config;
+
+    // Create and set up view.
+    var view = (embedded ? new EmbeddedTabularDatasetChunkedView(options) :
+                           new TopLevelTabularDatasetChunkedView(options));
+    view.render();
+
+    if (parent_elt) {
+        parent_elt.append(view.$el);
+        // If we're sticking this in another element, once it's appended check
+        // to make sure we've filled enough space.
+        // Without this, the scroll elements don't work.
+        view.expand_to_container();
+    }
+
+    return view;
+};
+
+return {
+    Dataset: Dataset,
+    TabularDataset: TabularDataset,
+    DatasetCollection: DatasetCollection,
+    TabularDatasetChunkedView: TabularDatasetChunkedView,
+    createTabularDatasetChunkedView: createTabularDatasetChunkedView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/dataset/dataset-choice.js b/client/galaxy/scripts/mvc/dataset/dataset-choice.js
new file mode 100644
index 0000000..14a1347
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/dataset-choice.js
@@ -0,0 +1,431 @@
+define([
+    'mvc/dataset/dataset-model',
+    'mvc/dataset/dataset-list',
+    'mvc/ui/ui-modal',
+    'mvc/base-mvc',
+    'utils/localization'
+], function( DATASET, DATASET_LIST, MODAL, BASE_MVC, _l ){
+'use strict';
+
+var logNamespace = 'dataset';
+/* ============================================================================
+TODO:
+    does this really work with mixed contents?
+    Single dataset choice: allow none?
+    tooltips rendered *behind* modal
+    collection selector
+    better handling when no results returned from filterDatasetJSON
+    pass optional subtitle from choice display to modal
+    onfirstclick
+    drop target
+    return modal with promise?
+
+    auto showing the modal may not be best
+
+============================================================================ */
+/** Filters an array of dataset plain JSON objs.
+ */
+function _filterDatasetJSON( datasetJSON, where, datasetsOnly ){
+//TODO: replace with _.matches (underscore 1.6.0)
+    function matches( obj, toMatch ){
+        for( var key in toMatch ){
+            if( toMatch.hasOwnProperty( key ) ){
+                if( obj[ key ] !== toMatch[ key ] ){
+                    return false;
+                }
+            }
+        }
+        return true;
+    }
+
+    return datasetJSON.filter( function( json ){
+        console.debug( json )
+        return ( !json.deleted && json.visible )
+            && ( !datasetsOnly || json.collection_type === undefined )
+            && ( matches( json, where ) );
+    });
+}
+
+// ============================================================================
+/** Given an array of plain JSON objs rep. datasets, show a modal allowing a choice
+ *      of one or more of those datasets.
+ *
+ *  Pass:
+ *      an array of plain JSON objects representing allowed dataset choices
+ *      a map of options (see below)
+ *
+ *  Options:
+ *      datasetsOnly:   T: display only datasets, F: datasets + dataset collections
+ *      where:          a map of attributes available choices *must have* (defaults to { state: 'ok' })
+ *      multiselect:    T: user can select more than one, F: only one
+ *      selected:       array of dataset ids to make them selected by default
+ *
+ *  @example:
+ *      var datasetJSON = // from ajax or bootstrap
+ *      // returns a jQuery promise (that 'fail's only if no datasets are found matching 'where' below)
+ *      var choice = new DatasetChoiceModal( datasetJSON, {
+ *          datasetsOnly    : false,
+ *          where           : { state: 'ok', file_ext: 'bed', ... },
+ *          multiselect     : true,
+ *          selected        : [ 'df7a1f0c02a5b08e', 'abcdef0123456789' ]
+ *
+ *      }).done( function( json ){
+ *          if( json ){
+ *              console.debug( json );
+ *              // returned choice will always be an array (single or multi)
+ *              // [{ <selected dataset JSON 1>, <selected dataset JSON 2>, ... }]
+ *              // ... do stuff
+ *          } else {
+ *              // json will === null if the user cancelled selection
+ *              console.debug( 'cancelled' );
+ *          }
+ *      });
+ */
+var DatasetChoiceModal = function( datasetJSON, options ){
+
+    // option defaults
+    options = _.defaults( options || {}, {
+        // show datasets or datasets and collections
+        datasetsOnly    : true,
+        // map of attributes to filter datasetJSON by
+        where           : { state: 'ok' },
+        // select more than one dataset?
+        multiselect     : false,
+        // any dataset ids that will display as already selected
+        selected        : []
+    });
+    // default title should depend on multiselect
+    options.title = options.title ||
+        ( options.multiselect? _l( 'Choose datasets:' ): _l( 'Choose a dataset:' ) );
+
+    var modal, list, buttons,
+        promise = jQuery.Deferred(),
+        filterFn = options.filter || _filterDatasetJSON;
+
+    // filter the given datasets and if none left return a rejected promise for use with fail()
+    datasetJSON = filterFn( datasetJSON, options.where, options.datasetsOnly );
+    if( !datasetJSON.length ){
+        return promise.reject( 'No matches found' );
+    }
+
+    // resolve the returned promise with the json of the selected datasets
+    function resolveWithSelected(){
+        promise.resolve( list.getSelectedModels().map( function( model ){
+            return model.toJSON();
+        }));
+    }
+    // if multiselect - add a button for the user to complete the changes
+    if( options.multiselect ){
+        buttons = {};
+        buttons[ _l( 'Ok' ) ] = resolveWithSelected;
+    }
+
+    // create a full-height modal that's cancellable, remove unneeded elements and styles
+    modal = new MODAL.View({
+        height              : 'auto',
+        buttons             : buttons,
+        closing_events      : true,
+        closing_callback    : function(){ promise.resolve( null ); },
+        body                : [
+                '<div class="list-panel"></div>'
+            ].join('')
+    });
+    modal.$( '.modal-header' ).remove();
+    modal.$( '.modal-footer' ).css( 'margin-top', '0px' );
+
+    // attach a dataset list (of the filtered datasets) to that modal that's selectable
+    list = new DATASET_LIST.DatasetList({
+        title       : options.title,
+        subtitle    : options.subtitle || _l([
+//TODO: as option
+                'Click the checkboxes on the right to select datasets. ',
+                'Click the datasets names to see their details. '
+            ].join('')),
+        el          : modal.$body.find( '.list-panel' ),
+        selecting   : true,
+        selected    : options.selected,
+        collection  : new DATASET.DatasetAssociationCollection( datasetJSON )
+    });
+
+    // when the list is rendered, show the modal (also add a specifying class for css)
+    list.once( 'rendered:initial', function(){
+        modal.show();
+        modal.$el.addClass( 'dataset-choice-modal' );
+    });
+    if( !options.multiselect ){
+        // if single select, remove the all/none list actions from the panel
+        list.on( 'rendered', function(){
+            list.$( '.list-actions' ).hide();
+        });
+        // if single select, immediately resolve on a single selection
+        list.on( 'view:selected', function( view ){
+            promise.resolve([ view.model.toJSON() ]);
+        });
+    }
+    list.render( 0 );
+
+    // return the promise, and on any resolution close the modal
+    return promise.always( function(){
+        modal.hide();
+    });
+};
+
+
+// ============================================================================
+/** Activator for single dataset selection modal and display of the selected dataset.
+ *      The activator/display will show as a single div and, when a dataset is selected,
+ *      show the name and details of the selected dataset.
+ *
+ *      When clicked the div will generate a DatasetChoiceModal of the available choices.
+ *
+ *  Options:
+ *      datasetJSON:    array of plain json objects representing allowed choices
+ *      datasetsOnly:   T: only show datasets in the allowed choices, F: datasets + collections
+ *      where:          map of attributes to filter datasetJSON by (e.g. { file_ext: 'bed' })
+ *      label:          the label/prompt displayed
+ *      selected:       array of dataset ids that will show as already selected in the control
+ *
+ *  @example:
+ *      var choice1 = new DATASET_CHOICE.DatasetChoice({
+ *          datasetJSON : datasetJSON,
+ *          label       : 'Input dataset',
+ *          selected    : [ 'df7a1f0c02a5b08e' ]
+ *      });
+ *      $( 'body' ).append( choice1.render().$el )
+ *
+ *  Listen to the DatasetChoice to react to changes in the user's choice/selection:
+ *  @example:
+ *      choice1.on( 'selected', function( chooser, selectionJSONArray ){
+ *          // ... do stuff with new selections
+ *      });
+ */
+var DatasetChoice = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    className : 'dataset-choice',
+
+    /** set up defaults, options, and listeners */
+    initialize : function( attributes ){
+        this.debug( this + '(DatasetChoice).initialize:', attributes );
+
+        this.label = attributes.label !== undefined? _l( attributes.label ) : '';
+        this.where = attributes.where;
+        this.datasetsOnly = attributes.datasetsOnly !== undefined? attributes.datasetsOnly: true;
+
+        this.datasetJSON = attributes.datasetJSON || [];
+        this.selected = attributes.selected || [];
+
+        this._setUpListeners();
+    },
+
+    /** add any (bbone) listeners */
+    _setUpListeners : function(){
+        //this.on( 'all', function(){
+        //    this.log( this + '', arguments );
+        //});
+    },
+
+    /** render the view */
+    render : function(){
+        var json = this.toJSON();
+        this.$el.html( this._template( json ) );
+        this.$( '.selected' ).replaceWith( this._renderSelected( json ) );
+        return this;
+    },
+
+    /** return plain html for the overall control */
+    _template : function( json ){
+        return _.template([
+            '<label>',
+                '<span class="prompt"><%- label %></span>',
+                '<div class="selected"></div>',
+            '</label>'
+        ].join(''))( json );
+    },
+
+    /** return jQ DOM for the selected dataset (only one) */
+    _renderSelected : function( json ){
+        if( json.selected.length ){
+//TODO: break out?
+            return $( _.template([
+                '<div class="selected">',
+                    '<span class="title"><%- selected.hid %>: <%- selected.name %></span>',
+                    '<span class="subtitle">',
+                        '<i><%- selected.misc_blurb %></i>',
+                        '<i>', _l( 'format' ) + ': ', '<%- selected.file_ext %></i>',
+                        '<i><%- selected.misc_info %></i>',
+                    '</span>',
+                '</div>'
+            ].join( '' ), { variable : 'selected' })( json.selected[0] ));
+        }
+        return $([
+            '<span class="none-selected-msg">(',
+                _l( 'click to select a dataset' ),
+            ')</span>'
+        ].join( '' ));
+    },
+
+//TODO:?? why not just pass in view?
+    /** return a plain JSON object with both the view and dataset attributes */
+    toJSON : function(){
+        var chooser = this;
+        return {
+            label       : chooser.label,
+            datasets    : chooser.datasetJSON,
+            selected    : _.compact( _.map( chooser.selected, function( id ){
+                return _.findWhere( chooser.datasetJSON, { id: id });
+            }))
+        };
+    },
+
+    /** event map: when to open the modal */
+    events : {
+        // the whole thing functions as a button
+        'click' : 'chooseWithModal'
+    },
+
+//TODO:?? modal to prop of this?
+//TODO:?? should be able to handle 'none selectable' on initialize
+    /** open the modal and handle the promise representing the user's choice
+     *  @fires 'selected' when the user selects dataset(s) - passed full json of the selected datasets
+     *  @fires 'cancelled' when the user clicks away/closes the modal (no selection made) - passed this
+     *  @fires 'error' if the modal has no selectable datasets based on this.where - passed this and other args
+     */
+    chooseWithModal : function(){
+        var chooser = this;
+
+        return this._createModal()
+            .done( function( json ){
+                if( json ){
+                    chooser.selected = _.pluck( json, 'id' );
+                    chooser.trigger( 'selected', chooser, json );
+                    chooser.render();
+
+                } else {
+                    chooser.trigger( 'cancelled', chooser );
+                }
+            })
+
+            .fail( function(){
+                chooser.trigger( 'error', chooser, arguments );
+            });
+    },
+
+    /** create and return the modal to use for choosing */
+    _createModal : function(){
+        return new DatasetChoiceModal( this.datasetJSON, this._getModalOptions() );
+    },
+
+    /** return a plain JSON containing the options to pass to the modal */
+    _getModalOptions : function(){
+        return {
+            title           : this.label,
+            multiselect     : false,
+            selected        : this.selected,
+            where           : this.where,
+            datasetsOnly    : this.datasetsOnly
+        };
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** string rep */
+    toString : function(){
+        return 'DatasetChoice(' + this.selected + ')';
+    }
+});
+
+
+// ============================================================================
+/** Activator for multiple dataset selection modal and display of the selected datasets.
+ *      The activator/display will show as a table of all choices.
+ *
+ *  See DatasetChoice (above) for example usage.
+ *
+ *  Additional options:
+ *      showHeaders:    T: show headers for selected dataset attributes in the display table
+ *      cells:          map of attribute keys -> Human readable/localized column headers
+ *          (e.g. { file_ext: _l( 'Format' ) }) - defaults are listed below
+ */
+var MultiDatasetChoice = DatasetChoice.extend({
+
+    className : DatasetChoice.prototype.className + ' multi',
+
+    /** default (dataset attribute key -> table header text) map of what cells to display in the table */
+    cells : {
+        hid             : _l( 'History #' ),
+        name            : _l( 'Name' ),
+        misc_blurb      : _l( 'Summary' ),
+        file_ext        : _l( 'Format' ),
+        genome_build    : _l( 'Genome' ),
+        tags            : _l( 'Tags' ),
+        annotation      : _l( 'Annotation' )
+    },
+
+    /** in this override, add the showHeaders and cells options */
+    initialize : function( attributes ){
+        this.showHeaders = attributes.showHeaders !== undefined? attributes.showHeaders : true;
+        this.cells = attributes.cells || this.cells;
+        DatasetChoice.prototype.initialize.call( this, attributes );
+    },
+
+    /** in this override, display the selected datasets as a table with optional headers */
+    _renderSelected : function( json ){
+        if( json.selected.length ){
+            return $( _.template([
+                '<table class="selected">',
+                    '<% if( json.showHeaders ){ %>',
+                        '<thead><tr>',
+                            '<% _.map( json.cells, function( val, key ){ %>',
+                                '<th><%- val %></th>',
+                            '<% }); %>',
+                        '</tr></thead>',
+                    '<% } %>',
+                    '<tbody>',
+                        '<% _.map( json.selected, function( selected ){ %>',
+                            '<tr>',
+                                '<% _.map( json.cells, function( val, key ){ %>',
+                                    '<td class="cell-<%- key %>"><%- selected[ key ] %></td>',
+                                '<% }) %>',
+                            '</tr>',
+                        '<% }); %>',
+                    '</tbody>',
+                '</table>'
+            ].join( '' ), { variable: 'json' })( json ));
+        }
+        return $([
+            '<span class="none-selected-msg">(',
+                _l( 'click to select a dataset' ),
+            ')</span>'
+        ].join( '' ));
+    },
+
+    /** in this override, send the showHeaders and cells options as well */
+    toJSON : function(){
+        return _.extend( DatasetChoice.prototype.toJSON.call( this ), {
+            showHeaders : this.showHeaders,
+            cells       : this.cells
+        });
+    },
+
+    /** in this override, set multiselect to true */
+    _getModalOptions : function(){
+        return _.extend( DatasetChoice.prototype._getModalOptions.call( this ), {
+            multiselect : true
+        });
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** string rep */
+    toString : function(){
+        return 'DatasetChoice(' + this.selected + ')';
+    }
+});
+
+
+// ============================================================================
+    return {
+        DatasetChoiceModal      : DatasetChoiceModal,
+        DatasetChoice           : DatasetChoice,
+        MultiDatasetChoice      : MultiDatasetChoice
+    };
+});
diff --git a/client/galaxy/scripts/mvc/dataset/dataset-li-edit.js b/client/galaxy/scripts/mvc/dataset/dataset-li-edit.js
new file mode 100644
index 0000000..ea097fc
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/dataset-li-edit.js
@@ -0,0 +1,432 @@
+define([
+    "mvc/dataset/states",
+    "mvc/dataset/dataset-li",
+    "mvc/tag",
+    "mvc/annotation",
+    "ui/fa-icon-button",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( STATES, DATASET_LI, TAGS, ANNOTATIONS, faIconButton, BASE_MVC, _l ){
+
+'use strict';
+//==============================================================================
+var _super = DATASET_LI.DatasetListItemView;
+/** @class Editing view for DatasetAssociation.
+ */
+var DatasetListItemEdit = _super.extend(
+/** @lends DatasetListItemEdit.prototype */{
+
+    /** set up: options */
+    initialize  : function( attributes ){
+        _super.prototype.initialize.call( this, attributes );
+        this.hasUser = attributes.hasUser;
+
+        /** allow user purge of dataset files? */
+        this.purgeAllowed = attributes.purgeAllowed || false;
+
+        //TODO: move to HiddenUntilActivatedViewMixin
+        /** should the tags editor be shown or hidden initially? */
+        this.tagsEditorShown        = attributes.tagsEditorShown || false;
+        /** should the tags editor be shown or hidden initially? */
+        this.annotationEditorShown  = attributes.annotationEditorShown || false;
+    },
+
+    // ......................................................................... titlebar actions
+    /** In this override, add the other two primary actions: edit and delete */
+    _renderPrimaryActions : function(){
+        var actions = _super.prototype._renderPrimaryActions.call( this );
+        if( this.model.get( 'state' ) === STATES.NOT_VIEWABLE ){
+            return actions;
+        }
+        // render the display, edit attr and delete icon-buttons
+        return _super.prototype._renderPrimaryActions.call( this ).concat([
+            this._renderEditButton(),
+            this._renderDeleteButton()
+        ]);
+    },
+
+    //TODO: move titleButtons into state renderers, remove state checks in the buttons
+
+    /** Render icon-button to edit the attributes (format, permissions, etc.) this dataset. */
+    _renderEditButton : function(){
+        // don't show edit while uploading, in-accessible
+        // DO show if in error (ala previous history panel)
+        if( ( this.model.get( 'state' ) === STATES.DISCARDED )
+        ||  ( !this.model.get( 'accessible' ) ) ){
+            return null;
+        }
+
+        var purged = this.model.get( 'purged' ),
+            deleted = this.model.get( 'deleted' ),
+            editBtnData = {
+                title       : _l( 'Edit attributes' ),
+                href        : this.model.urls.edit,
+                target      : this.linkTarget,
+                faIcon      : 'fa-pencil',
+                classes     : 'edit-btn'
+            };
+
+        // disable if purged or deleted and explain why in the tooltip
+        if( deleted || purged ){
+            editBtnData.disabled = true;
+            if( purged ){
+                editBtnData.title = _l( 'Cannot edit attributes of datasets removed from disk' );
+            } else if( deleted ){
+                editBtnData.title = _l( 'Undelete dataset to edit attributes' );
+            }
+
+        // disable if still uploading or new
+        } else if( _.contains( [ STATES.UPLOAD, STATES.NEW ], this.model.get( 'state' ) ) ){
+            editBtnData.disabled = true;
+            editBtnData.title = _l( 'This dataset is not yet editable' );
+        }
+        return faIconButton( editBtnData );
+    },
+
+    /** Render icon-button to delete this hda. */
+    _renderDeleteButton : function(){
+        // don't show delete if...
+        if( ( !this.model.get( 'accessible' ) ) ){
+            return null;
+        }
+
+        var self = this,
+            deletedAlready = this.model.isDeletedOrPurged();
+        return faIconButton({
+                title       : !deletedAlready? _l( 'Delete' ) : _l( 'Dataset is already deleted' ),
+                disabled    : deletedAlready,
+                faIcon      : 'fa-times',
+                classes     : 'delete-btn',
+                onclick     : function() {
+                    // ...bler... tooltips being left behind in DOM (hover out never called on deletion)
+                    self.$el.find( '.icon-btn.delete-btn' ).trigger( 'mouseout' );
+                    self.model[ 'delete' ]();
+                }
+        });
+    },
+
+    // ......................................................................... details
+    /** In this override, add tags and annotations controls, make the ? dbkey a link to editing page */
+    _renderDetails : function(){
+        //TODO: generalize to be allow different details for each state
+        var $details = _super.prototype._renderDetails.call( this ),
+            state = this.model.get( 'state' );
+
+        if( !this.model.isDeletedOrPurged() && _.contains([ STATES.OK, STATES.FAILED_METADATA ], state ) ){
+            this._renderTags( $details );
+            this._renderAnnotation( $details );
+            this._makeDbkeyEditLink( $details );
+        }
+
+        this._setUpBehaviors( $details );
+        return $details;
+    },
+
+    /**************************************************************************
+     * Render help button to show tool help text without rerunning the tool.
+     * Issue #2100
+     */
+    _renderToolHelpButton : function() {
+        var datasetID = this.model.attributes.dataset_id;
+        var jobID = this.model.attributes.creating_job;
+        var self = this;
+
+        var parseToolBuild = function(data) {
+            var helpString = '<div id="thdiv-' + datasetID + '" class="toolhelp">'
+            if (data.name && data.help){
+                helpString += '<strong>Tool help for ' + data.name + '</strong><hr/>';
+                helpString += data.help;
+            } else {
+                helpString += '<strong>Tool help is unavailable for this dataset.</strong><hr/>';
+            }
+            helpString += '</div>';
+            self.$el.find( '.details' ).append($.parseHTML(helpString));
+        };
+        var parseToolID = function(data) {
+            $.ajax({
+                url: Galaxy.root + 'api/tools/' + data.tool_id + '/build'
+            }).done(function(data){
+                parseToolBuild(data);
+            }).fail(function(){
+                parseToolBuild({})
+            });
+        };
+        if (Galaxy.user.id === null){
+            return null
+        }
+        return faIconButton({
+            title: 'Tool Help',
+            classes: 'icon-btn',
+            href: '#',
+            faIcon: 'fa-question',
+            onclick: function() {
+                var divString = 'thdiv-' + datasetID;
+                if (self.$el.find(".toolhelp").length > 0){
+                    self.$el.find(".toolhelp").toggle();
+                } else {
+                    $.ajax({
+                        url: Galaxy.root + 'api/jobs/' + jobID
+                    }).done(function(data){
+                        parseToolID(data);
+                    }).fail(function(){
+                       console.log('Failed at recovering job information from the  Galaxy API for job id "' + jobID + '".');
+                    });
+                }
+            }
+        });
+    },
+    //*************************************************************************
+
+    /** Add less commonly used actions in the details section based on state */
+    _renderSecondaryActions : function(){
+        var actions = _super.prototype._renderSecondaryActions.call( this );
+        switch( this.model.get( 'state' ) ){
+            case STATES.UPLOAD:
+            case STATES.NOT_VIEWABLE:
+                return actions;
+            case STATES.ERROR:
+                // error button comes first
+                actions.unshift( this._renderErrButton() );
+                return actions.concat([ this._renderRerunButton(), this._renderToolHelpButton() ]);
+            case STATES.OK:
+            case STATES.FAILED_METADATA:
+                return actions.concat([ this._renderRerunButton(), this._renderVisualizationsButton(), this._renderToolHelpButton() ]);
+        }
+        return actions.concat([ this._renderRerunButton(), this._renderToolHelpButton() ]);
+    },
+
+    /** Render icon-button to report an error on this dataset to the galaxy admin. */
+    _renderErrButton : function(){
+        return faIconButton({
+            title       : _l( 'View or report this error' ),
+            href        : this.model.urls.report_error,
+            classes     : 'report-error-btn',
+            target      : this.linkTarget,
+            faIcon      : 'fa-bug'
+        });
+    },
+
+    /** Render icon-button to re-run the job that created this dataset. */
+    _renderRerunButton : function(){
+        var creating_job = this.model.get( 'creating_job' );
+        if( this.model.get( 'rerunnable' ) ){
+            return faIconButton({
+                title       : _l( 'Run this job again' ),
+                href        : this.model.urls.rerun,
+                classes     : 'rerun-btn',
+                target      : this.linkTarget,
+                faIcon      : 'fa-refresh',
+                onclick     : function( ev ) {
+                    ev.preventDefault();
+                    // create webpack split point in order to load the tool form async
+                    // TODO: split not working (tool loads fine)
+                    require([ 'mvc/tool/tool-form' ], function( ToolForm ){
+                        var form = new ToolForm.View({ 'job_id' : creating_job });
+                        form.deferred.execute( function(){
+                            Galaxy.app.display( form );
+                        });
+                    });
+                }
+            });
+        }
+    },
+
+    /** Render an icon-button or popupmenu of links based on the applicable visualizations */
+    _renderVisualizationsButton : function(){
+        //TODO: someday - lazyload visualizations
+        var visualizations = this.model.get( 'visualizations' );
+        if( ( this.model.isDeletedOrPurged() )
+        ||  ( !this.hasUser )
+        ||  ( !this.model.hasData() )
+        ||  ( _.isEmpty( visualizations ) ) ){
+            return null;
+        }
+        if( !_.isObject( visualizations[0] ) ){
+            this.warn( 'Visualizations have been switched off' );
+            return null;
+        }
+
+        var $visualizations = $( this.templates.visualizations( visualizations, this ) );
+        //HACK: need to re-write those directed at galaxy_main with linkTarget
+        $visualizations.find( '[target="galaxy_main"]').attr( 'target', this.linkTarget );
+        // use addBack here to include the root $visualizations elem (for the case of 1 visualization)
+        this._addScratchBookFn( $visualizations.find( '.visualization-link' ).addBack( '.visualization-link' ) );
+        return $visualizations;
+    },
+
+    /** add scratchbook functionality to visualization links */
+    _addScratchBookFn : function( $links ){
+        var li = this;
+        $links.click( function( ev ){
+            if( Galaxy.frame && Galaxy.frame.active ){
+                Galaxy.frame.add({
+                    title       : 'Visualization',
+                    url         : $( this ).attr( 'href' )
+                });
+                ev.preventDefault();
+                ev.stopPropagation();
+            }
+        });
+    },
+
+    //TODO: if possible move these to readonly view - but display the owner's tags/annotation (no edit)
+    /** Render the tags list/control */
+    _renderTags : function( $where ){
+        if( !this.hasUser ){ return; }
+        var view = this;
+        this.tagsEditor = new TAGS.TagsEditor({
+            model           : this.model,
+            el              : $where.find( '.tags-display' ),
+            onshowFirstTime : function(){ this.render(); },
+            // persist state on the hda view (and not the editor) since these are currently re-created each time
+            onshow          : function(){ view.tagsEditorShown = true; },
+            onhide          : function(){ view.tagsEditorShown = false; },
+            $activator      : faIconButton({
+                title   : _l( 'Edit dataset tags' ),
+                classes : 'tag-btn',
+                faIcon  : 'fa-tags'
+            }).appendTo( $where.find( '.actions .right' ) )
+        });
+        if( this.tagsEditorShown ){ this.tagsEditor.toggle( true ); }
+    },
+
+    /** Render the annotation display/control */
+    _renderAnnotation : function( $where ){
+        if( !this.hasUser ){ return; }
+        var view = this;
+        this.annotationEditor = new ANNOTATIONS.AnnotationEditor({
+            model           : this.model,
+            el              : $where.find( '.annotation-display' ),
+            onshowFirstTime : function(){ this.render(); },
+            // persist state on the hda view (and not the editor) since these are currently re-created each time
+            onshow          : function(){ view.annotationEditorShown = true; },
+            onhide          : function(){ view.annotationEditorShown = false; },
+            $activator      : faIconButton({
+                title   : _l( 'Edit dataset annotation' ),
+                classes : 'annotate-btn',
+                faIcon  : 'fa-comment'
+            }).appendTo( $where.find( '.actions .right' ) )
+        });
+        if( this.annotationEditorShown ){ this.annotationEditor.toggle( true ); }
+    },
+
+    /** If the format/dbkey/genome_build isn't set, make the display a link to the edit page */
+    _makeDbkeyEditLink : function( $details ){
+        // make the dbkey a link to editing
+        if( this.model.get( 'metadata_dbkey' ) === '?'
+        &&  !this.model.isDeletedOrPurged() ){
+            var editableDbkey = $( '<a class="value">?</a>' )
+                .attr( 'href', this.model.urls.edit )
+                .attr( 'target', this.linkTarget );
+            $details.find( '.dbkey .value' ).replaceWith( editableDbkey );
+        }
+    },
+
+    // ......................................................................... events
+    /** event map */
+    events : _.extend( _.clone( _super.prototype.events ), {
+        'click .undelete-link'  : '_clickUndeleteLink',
+        'click .purge-link'     : '_clickPurgeLink',
+
+        'click .edit-btn'       : function( ev ){ this.trigger( 'edit', this, ev ); },
+        'click .delete-btn'     : function( ev ){ this.trigger( 'delete', this, ev ); },
+        'click .rerun-btn'      : function( ev ){ this.trigger( 'rerun', this, ev ); },
+        'click .report-err-btn' : function( ev ){ this.trigger( 'report-err', this, ev ); },
+        'click .visualization-btn' : function( ev ){ this.trigger( 'visualize', this, ev ); },
+        'click .dbkey a'        : function( ev ){ this.trigger( 'edit', this, ev ); }
+    }),
+
+    /** listener for item undelete (in the messages section) */
+    _clickUndeleteLink : function( ev ){
+        this.model.undelete();
+        return false;
+    },
+
+    /** listener for item purge (in the messages section) */
+    _clickPurgeLink : function( ev ){
+        if( confirm( _l( 'This will permanently remove the data in your dataset. Are you sure?' ) ) ){
+            this.model.purge();
+        }
+        return false;
+    },
+
+    // ......................................................................... misc
+    /** string rep */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'HDAEditView(' + modelString + ')';
+    }
+});
+
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+DatasetListItemEdit.prototype.templates = (function(){
+
+    var warnings = _.extend( {}, _super.prototype.templates.warnings, {
+        failed_metadata : BASE_MVC.wrapTemplate([
+            // in this override, provide a link to the edit page
+            '<% if( dataset.state === "failed_metadata" ){ %>',
+                '<div class="failed_metadata-warning warningmessagesmall">',
+                    _l( 'An error occurred setting the metadata for this dataset' ),
+                    '<br /><a href="<%- dataset.urls.edit %>" target="<%- view.linkTarget %>">',
+                        _l( 'Set it manually or retry auto-detection' ),
+                    '</a>',
+                '</div>',
+            '<% } %>'
+        ], 'dataset' ),
+
+        deleted : BASE_MVC.wrapTemplate([
+            // in this override, provide links to undelete or purge the dataset
+            '<% if( dataset.deleted && !dataset.purged ){ %>',
+                // deleted not purged
+                '<div class="deleted-msg warningmessagesmall">',
+                    _l( 'This dataset has been deleted' ),
+                    '<br /><a class="undelete-link" href="javascript:void(0);">', _l( 'Undelete it' ), '</a>',
+                    '<% if( view.purgeAllowed ){ %>',
+                        '<br /><a class="purge-link" href="javascript:void(0);">',
+                            _l( 'Permanently remove it from disk' ),
+                        '</a>',
+                    '<% } %>',
+                '</div>',
+            '<% } %>'
+        ], 'dataset' )
+    });
+
+    var visualizationsTemplate = BASE_MVC.wrapTemplate([
+        '<% if( visualizations.length === 1 ){ %>',
+            '<a class="visualization-link icon-btn" href="<%- visualizations[0].href %>"',
+                    ' target="<%- visualizations[0].target %>" title="', _l( 'Visualize in' ),
+                    ' <%- visualizations[0].html %>">',
+                '<span class="fa fa-bar-chart-o"></span>',
+            '</a>',
+
+        '<% } else { %>',
+            '<div class="visualizations-dropdown dropdown icon-btn">',
+                '<a data-toggle="dropdown" title="', _l( 'Visualize' ), '">',
+                    '<span class="fa fa-bar-chart-o"></span>',
+                '</a>',
+                '<ul class="dropdown-menu" role="menu">',
+                    '<% _.each( visualizations, function( visualization ){ %>',
+                        '<li><a class="visualization-link" href="<%- visualization.href %>"',
+                                ' target="<%- visualization.target %>">',
+                            '<%- visualization.html %>',
+                        '</a></li>',
+                    '<% }); %>',
+                '</ul>',
+            '</div>',
+        '<% } %>'
+    ], 'visualizations' );
+
+    return _.extend( {}, _super.prototype.templates, {
+        warnings : warnings,
+        visualizations : visualizationsTemplate
+    });
+}());
+
+
+//==============================================================================
+    return {
+        DatasetListItemEdit : DatasetListItemEdit
+    };
+});
diff --git a/client/galaxy/scripts/mvc/dataset/dataset-li.js b/client/galaxy/scripts/mvc/dataset/dataset-li.js
new file mode 100644
index 0000000..55828ad
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/dataset-li.js
@@ -0,0 +1,501 @@
+define([
+    "mvc/list/list-item",
+    "mvc/dataset/states",
+    "ui/fa-icon-button",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( LIST_ITEM, STATES, faIconButton, BASE_MVC, _l ){
+'use strict';
+
+var logNamespace = 'dataset';
+/*==============================================================================
+TODO:
+    straighten out state rendering and templates used
+    inaccessible/STATES.NOT_VIEWABLE is a special case
+    simplify button rendering
+
+==============================================================================*/
+var _super = LIST_ITEM.ListItemView;
+/** @class Read only list view for either LDDAs, HDAs, or HDADCEs.
+ *      Roughly, any DatasetInstance (and not a raw Dataset).
+ */
+var DatasetListItemView = _super.extend(
+/** @lends DatasetListItemView.prototype */{
+    _logNamespace : logNamespace,
+
+    className   : _super.prototype.className + " dataset",
+    //TODO:?? doesn't exactly match an hda's type_id
+    id          : function(){
+        return [ 'dataset', this.model.get( 'id' ) ].join( '-' );
+    },
+
+    /** Set up: instance vars, options, and event handlers */
+    initialize : function( attributes ){
+        if( attributes.logger ){ this.logger = this.model.logger = attributes.logger; }
+        this.log( this + '.initialize:', attributes );
+        _super.prototype.initialize.call( this, attributes );
+
+        /** where should pages from links be displayed? (default to new tab/window) */
+        this.linkTarget = attributes.linkTarget || '_blank';
+    },
+
+    /** event listeners */
+    _setUpListeners : function(){
+        _super.prototype._setUpListeners.call( this );
+        var self = this;
+
+        // re-rendering on any model changes
+        return self.listenTo( self.model, {
+            'change': function( model, options ){
+                // if the model moved into the ready state and is expanded without details, fetch those details now
+                if( self.model.changedAttributes().state
+                &&  self.model.inReadyState()
+                &&  self.expanded
+                && !self.model.hasDetails() ){
+                    // normally, will render automatically (due to fetch -> change),
+                    // but! setting_metadata sometimes doesn't cause any other changes besides state
+                    // so, not rendering causes it to seem frozen in setting_metadata state
+                    self.model.fetch({ silent : true })
+                        .done( function(){ self.render(); });
+
+                } else {
+                    self.render();
+                }
+            }
+        });
+    },
+
+    // ......................................................................... expandable
+    /** In this override, only get details if in the ready state, get rerunnable if in other states.
+     *  Note: fetch with no 'change' event triggering to prevent automatic rendering.
+     */
+    _fetchModelDetails : function(){
+        var view = this;
+        if( view.model.inReadyState() && !view.model.hasDetails() ){
+            return view.model.fetch({ silent: true });
+        }
+        return jQuery.when();
+    },
+
+    // ......................................................................... removal
+    /** Remove this view's html from the DOM and remove all event listeners.
+     *  @param {Number or String} speed jq effect speed
+     *  @param {Function} callback      an optional function called when removal is done (scoped to this view)
+     */
+    remove : function( speed, callback ){
+        var view = this;
+        speed = speed || this.fxSpeed;
+        this.$el.fadeOut( speed, function(){
+            Backbone.View.prototype.remove.call( view );
+            if( callback ){ callback.call( view ); }
+        });
+    },
+
+    // ......................................................................... rendering
+    /* TODO:
+        dataset states are the issue primarily making dataset rendering complex
+            each state should have it's own way of displaying/set of details
+            often with different actions that can be applied
+        throw in deleted/purged/visible and things get complicated easily
+        I've considered (a couple of times) - creating a view for each state
+            - but recreating the view during an update...seems wrong
+    */
+    /** In this override, add the dataset state as a class for use with state-based CSS */
+    _swapNewRender : function( $newRender ){
+        _super.prototype._swapNewRender.call( this, $newRender );
+        if( this.model.has( 'state' ) ){
+            this.$el.addClass( 'state-' + this.model.get( 'state' ) );
+        }
+        return this.$el;
+    },
+
+    // ................................................................................ titlebar
+    /** In this override, add the dataset display button. */
+    _renderPrimaryActions : function(){
+        // render just the display for read-only
+        return [ this._renderDisplayButton() ];
+    },
+
+    /** Render icon-button to display dataset data */
+    _renderDisplayButton : function(){
+        // don't show display if not viewable or not accessible
+        var state = this.model.get( 'state' );
+        if( ( state === STATES.NOT_VIEWABLE )
+        ||  ( state === STATES.DISCARDED )
+        ||  ( !this.model.get( 'accessible' ) ) ){
+            return null;
+        }
+
+        var displayBtnData = {
+            target      : this.linkTarget,
+            classes     : 'display-btn'
+        };
+
+        // show a disabled display if the data's been purged
+        if( this.model.get( 'purged' ) ){
+            displayBtnData.disabled = true;
+            displayBtnData.title = _l( 'Cannot display datasets removed from disk' );
+
+        // disable if still uploading
+        } else if( state === STATES.UPLOAD ){
+            displayBtnData.disabled = true;
+            displayBtnData.title = _l( 'This dataset must finish uploading before it can be viewed' );
+
+        // disable if still new
+        } else if( state === STATES.NEW ){
+            displayBtnData.disabled = true;
+            displayBtnData.title = _l( 'This dataset is not yet viewable' );
+
+        } else {
+            displayBtnData.title = _l( 'View data' );
+
+            // default link for dataset
+            displayBtnData.href  = this.model.urls.display;
+
+            // add frame manager option onclick event
+            var self = this;
+            displayBtnData.onclick = function( ev ){
+                if (Galaxy.frame && Galaxy.frame.active) {
+                    // Add dataset to frames.
+                    Galaxy.frame.addDataset(self.model.get('id'));
+                    ev.preventDefault();
+                }
+            };
+        }
+        displayBtnData.faIcon = 'fa-eye';
+        return faIconButton( displayBtnData );
+    },
+
+    // ......................................................................... rendering details
+    /** Render the enclosing div of the hda body and, if expanded, the html in the body
+     *  @returns {jQuery} rendered DOM
+     */
+    _renderDetails : function(){
+        //TODO: generalize to be allow different details for each state
+
+        // no access - render nothing but a message
+        if( this.model.get( 'state' ) === STATES.NOT_VIEWABLE ){
+            return $( this.templates.noAccess( this.model.toJSON(), this ) );
+        }
+
+        var $details = _super.prototype._renderDetails.call( this );
+        $details.find( '.actions .left' ).empty().append( this._renderSecondaryActions() );
+        $details.find( '.summary' ).html( this._renderSummary() )
+            .prepend( this._renderDetailMessages() );
+        $details.find( '.display-applications' ).html( this._renderDisplayApplications() );
+
+        this._setUpBehaviors( $details );
+        return $details;
+    },
+
+    /** Defer to the appropo summary rendering fn based on state */
+    _renderSummary : function(){
+        var json = this.model.toJSON(),
+            summaryRenderFn = this.templates.summaries[ json.state ];
+        summaryRenderFn = summaryRenderFn || this.templates.summaries.unknown;
+        return summaryRenderFn( json, this );
+    },
+
+    /** Render messages to be displayed only when the details are shown */
+    _renderDetailMessages : function(){
+        var view = this,
+            $warnings = $( '<div class="detail-messages"></div>' ),
+            json = view.model.toJSON();
+        //TODO:! unordered (map)
+        _.each( view.templates.detailMessages, function( templateFn ){
+            $warnings.append( $( templateFn( json, view ) ) );
+        });
+        return $warnings;
+    },
+
+    /** Render the external display application links */
+    _renderDisplayApplications : function(){
+        if( this.model.isDeletedOrPurged() ){ return ''; }
+        // render both old and new display apps using the same template
+        return [
+            this.templates.displayApplications( this.model.get( 'display_apps' ), this ),
+            this.templates.displayApplications( this.model.get( 'display_types' ), this )
+        ].join( '' );
+    },
+
+    // ......................................................................... secondary/details actions
+    /** A series of links/buttons for less commonly used actions: re-run, info, etc. */
+    _renderSecondaryActions : function(){
+        this.debug( '_renderSecondaryActions' );
+        switch( this.model.get( 'state' ) ){
+            case STATES.NOT_VIEWABLE:
+                return [];
+            case STATES.OK:
+            case STATES.FAILED_METADATA:
+            case STATES.ERROR:
+                return [ this._renderDownloadButton(), this._renderShowParamsButton() ];
+        }
+        return [ this._renderShowParamsButton() ];
+    },
+
+    /** Render icon-button to show the input and output (stdout/err) for the job that created this.
+     *  @returns {jQuery} rendered DOM
+     */
+    _renderShowParamsButton : function(){
+        // gen. safe to show in all cases
+        return faIconButton({
+            title       : _l( 'View details' ),
+            classes     : 'params-btn',
+            href        : this.model.urls.show_params,
+            target      : this.linkTarget,
+            faIcon      : 'fa-info-circle',
+            onclick     : function( ev ) {
+                if ( Galaxy.frame && Galaxy.frame.active ) {
+                    Galaxy.frame.add( { title: 'Dataset details', url: this.href } );
+                    ev.preventDefault();
+                    ev.stopPropagation();
+                }
+            }
+        });
+    },
+
+
+    /** Render icon-button/popupmenu to download the data (and/or the associated meta files (bai, etc.)) for this.
+     *  @returns {jQuery} rendered DOM
+     */
+    _renderDownloadButton : function(){
+        // don't show anything if the data's been purged
+        if( this.model.get( 'purged' ) || !this.model.hasData() ){ return null; }
+
+        // return either: a popupmenu with links to download assoc. meta files (if there are meta files)
+        //  or a single download icon-button (if there are no meta files)
+        if( !_.isEmpty( this.model.get( 'meta_files' ) ) ){
+            return this._renderMetaFileDownloadButton();
+        }
+
+        return $([
+            '<a class="download-btn icon-btn" ',
+                'href="', this.model.urls.download, '" title="' + _l( 'Download' ) + '" download>',
+                '<span class="fa fa-floppy-o"></span>',
+            '</a>'
+        ].join( '' ));
+    },
+
+    /** Render the download button which opens a dropdown with links to download assoc. meta files (indeces, etc.) */
+    _renderMetaFileDownloadButton : function(){
+        var urls = this.model.urls;
+        return $([
+            '<div class="metafile-dropdown dropdown">',
+                '<a class="download-btn icon-btn" href="javascript:void(0)" data-toggle="dropdown"',
+                    ' title="' + _l( 'Download' ) + '">',
+                    '<span class="fa fa-floppy-o"></span>',
+                '</a>',
+                '<ul class="dropdown-menu" role="menu" aria-labelledby="dLabel">',
+                    '<li><a href="' + urls.download + '" download>', _l( 'Download dataset' ), '</a></li>',
+                    _.map( this.model.get( 'meta_files' ), function( meta_file ){
+                        return [
+                            '<li><a href="', urls.meta_download + meta_file.file_type, '">',
+                                _l( 'Download' ), ' ', meta_file.file_type,
+                            '</a></li>'
+                        ].join( '' );
+                    }).join( '\n' ),
+                '</ul>',
+            '</div>'
+        ].join( '\n' ));
+    },
+
+    // ......................................................................... misc
+    events : _.extend( _.clone( _super.prototype.events ), {
+        'click .display-btn'    : function( ev ){ this.trigger( 'display', this, ev ); },
+        'click .params-btn'     : function( ev ){ this.trigger( 'params', this, ev ); },
+        'click .download-btn'   : function( ev ){ this.trigger( 'download', this, ev ); }
+    }),
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'DatasetListItemView(' + modelString + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+DatasetListItemView.prototype.templates = (function(){
+//TODO: move to require text! plugin
+
+    var warnings = _.extend( {}, _super.prototype.templates.warnings, {
+        failed_metadata : BASE_MVC.wrapTemplate([
+            // failed metadata is rendered as a warning on an otherwise ok dataset view
+            '<% if( model.state === "failed_metadata" ){ %>',
+                '<div class="warningmessagesmall">',
+                    _l( 'An error occurred setting the metadata for this dataset' ),
+                '</div>',
+            '<% } %>'
+        ]),
+        error : BASE_MVC.wrapTemplate([
+            // error during index fetch - show error on dataset
+            '<% if( model.error ){ %>',
+                '<div class="errormessagesmall">',
+                    _l( 'There was an error getting the data for this dataset' ), ': <%- model.error %>',
+                '</div>',
+            '<% } %>'
+        ]),
+        purged : BASE_MVC.wrapTemplate([
+            '<% if( model.purged ){ %>',
+                '<div class="purged-msg warningmessagesmall">',
+                    _l( 'This dataset has been deleted and removed from disk' ),
+                '</div>',
+            '<% } %>'
+        ]),
+        deleted : BASE_MVC.wrapTemplate([
+            // deleted not purged
+            '<% if( model.deleted && !model.purged ){ %>',
+                '<div class="deleted-msg warningmessagesmall">',
+                    _l( 'This dataset has been deleted' ),
+                '</div>',
+            '<% } %>'
+        ])
+
+        //NOTE: hidden warning is only needed for HDAs
+    });
+
+    var detailsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="details">',
+            '<div class="summary"></div>',
+
+            '<div class="actions clear">',
+                '<div class="left"></div>',
+                '<div class="right"></div>',
+            '</div>',
+
+            // do not display tags, annotation, display apps, or peek when deleted
+            '<% if( !dataset.deleted && !dataset.purged ){ %>',
+                '<div class="tags-display"></div>',
+                '<div class="annotation-display"></div>',
+
+                '<div class="display-applications"></div>',
+
+                '<% if( dataset.peek ){ %>',
+                    '<pre class="dataset-peek"><%= dataset.peek %></pre>',
+                '<% } %>',
+            '<% } %>',
+        '</div>'
+    ], 'dataset' );
+
+    var noAccessTemplate = BASE_MVC.wrapTemplate([
+        '<div class="details">',
+            '<div class="summary">',
+                _l( 'You do not have permission to view this dataset' ),
+            '</div>',
+        '</div>'
+    ], 'dataset' );
+
+//TODO: still toooooooooooooo complex - rework
+    var summaryTemplates = {};
+    summaryTemplates[ STATES.OK ] = summaryTemplates[ STATES.FAILED_METADATA ] = BASE_MVC.wrapTemplate([
+        '<% if( dataset.misc_blurb ){ %>',
+            '<div class="blurb">',
+                '<span class="value"><%- dataset.misc_blurb %></span>',
+            '</div>',
+        '<% } %>',
+
+        '<% if( dataset.file_ext ){ %>',
+            '<div class="datatype">',
+                '<label class="prompt">', _l( 'format' ), '</label>',
+                '<span class="value"><%- dataset.file_ext %></span>',
+            '</div>',
+        '<% } %>',
+
+        '<% if( dataset.metadata_dbkey ){ %>',
+            '<div class="dbkey">',
+                '<label class="prompt">', _l( 'database' ), '</label>',
+                '<span class="value">',
+                    '<%- dataset.metadata_dbkey %>',
+                '</span>',
+            '</div>',
+        '<% } %>',
+
+        '<% if( dataset.misc_info ){ %>',
+            '<div class="info">',
+                '<span class="value"><%- dataset.misc_info %></span>',
+            '</div>',
+        '<% } %>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.NEW ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'This is a new dataset and not all of its data are available yet' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.NOT_VIEWABLE ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'You do not have permission to view this dataset' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.DISCARDED ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'The job creating this dataset was cancelled before completion' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.QUEUED ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'This job is waiting to run' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.RUNNING ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'This job is currently running' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.UPLOAD ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'This dataset is currently uploading' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.SETTING_METADATA ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'Metadata is being auto-detected' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.PAUSED ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'This job is paused. Use the "Resume Paused Jobs" in the history menu to resume' ), '</div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.ERROR ] = BASE_MVC.wrapTemplate([
+        '<% if( !dataset.purged ){ %>',
+            '<div><%- dataset.misc_blurb %></div>',
+        '<% } %>',
+        '<span class="help-text">', _l( 'An error occurred with this dataset' ), ':</span>',
+        '<div class="job-error-text"><%- dataset.misc_info %></div>'
+    ], 'dataset' );
+    summaryTemplates[ STATES.EMPTY ] = BASE_MVC.wrapTemplate([
+        '<div>', _l( 'No data' ), ': <i><%- dataset.misc_blurb %></i></div>'
+    ], 'dataset' );
+    summaryTemplates.unknown = BASE_MVC.wrapTemplate([
+        '<div>Error: unknown dataset state: "<%- dataset.state %>"</div>'
+    ], 'dataset' );
+
+    // messages to be displayed only within the details section ('below the fold')
+    var detailMessageTemplates = {
+        resubmitted : BASE_MVC.wrapTemplate([
+            // deleted not purged
+            '<% if( model.resubmitted ){ %>',
+                '<div class="resubmitted-msg infomessagesmall">',
+                    _l( 'The job creating this dataset has been resubmitted' ),
+                '</div>',
+            '<% } %>'
+        ])
+    };
+
+    // this is applied to both old and new style display apps
+    var displayApplicationsTemplate = BASE_MVC.wrapTemplate([
+        '<% _.each( apps, function( app ){ %>',
+            '<div class="display-application">',
+                '<span class="display-application-location"><%- app.label %></span> ',
+                '<span class="display-application-links">',
+                    '<% _.each( app.links, function( link ){ %>',
+                        '<a target="<%- link.target %>" href="<%- link.href %>">',
+                            '<% print( _l( link.text ) ); %>',
+                        '</a> ',
+                    '<% }); %>',
+                '</span>',
+            '</div>',
+        '<% }); %>'
+    ], 'apps' );
+
+    return _.extend( {}, _super.prototype.templates, {
+        warnings    : warnings,
+        details     : detailsTemplate,
+        noAccess    : noAccessTemplate,
+        summaries   : summaryTemplates,
+        detailMessages      : detailMessageTemplates,
+        displayApplications : displayApplicationsTemplate
+    });
+}());
+
+
+// ============================================================================
+    return {
+        DatasetListItemView : DatasetListItemView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/dataset/dataset-list.js b/client/galaxy/scripts/mvc/dataset/dataset-list.js
new file mode 100644
index 0000000..7f00f1c
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/dataset-list.js
@@ -0,0 +1,46 @@
+define([
+    "mvc/list/list-view",
+    "mvc/dataset/dataset-li",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( LIST_VIEW, DATASET_LI, BASE_MVC, _l ){
+'use strict';
+
+var logNamespace = 'dataset';
+/* =============================================================================
+TODO:
+
+============================================================================= */
+var _super = LIST_VIEW.ListPanel;
+/** @class  non-editable, read-only View/Controller for a list of datasets.
+ */
+var DatasetList = _super.extend(
+/** @lends DatasetList.prototype */{
+    _logNamespace : logNamespace,
+
+    /** class to use for constructing the sub-views */
+    viewClass       : DATASET_LI.DatasetListItemView,
+    className       : _super.prototype.className + ' dataset-list',
+
+    /** string to no hdas match the search terms */
+    noneFoundMsg    : _l( 'No matching datasets found' ),
+
+    // ......................................................................... SET UP
+    /** Set up the view, set up storage, bind listeners to HistoryContents events
+     *  @param {Object} attributes optional settings for the panel
+     */
+    initialize : function( attributes ){
+        _super.prototype.initialize.call( this, attributes );
+    },
+
+    /** Return a string rep of the history */
+    toString : function(){
+        return 'DatasetList(' + this.collection + ')';
+    }
+});
+
+//==============================================================================
+    return {
+        DatasetList : DatasetList
+    };
+});
diff --git a/client/galaxy/scripts/mvc/dataset/dataset-model.js b/client/galaxy/scripts/mvc/dataset/dataset-model.js
new file mode 100644
index 0000000..32f1f32
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/dataset-model.js
@@ -0,0 +1,334 @@
+define([
+    "mvc/dataset/states",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( STATES, BASE_MVC, _l ){
+'use strict';
+
+var logNamespace = 'dataset';
+//==============================================================================
+var searchableMixin = BASE_MVC.SearchableModelMixin;
+/** @class base model for any DatasetAssociation (HDAs, LDDAs, DatasetCollectionDAs).
+ *      No knowledge of what type (HDA/LDDA/DCDA) should be needed here.
+ *  The DA's are made searchable (by attribute) by mixing in SearchableModelMixin.
+ */
+var DatasetAssociation = Backbone.Model
+        .extend( BASE_MVC.LoggableMixin )
+        .extend( BASE_MVC.mixin( searchableMixin, /** @lends DatasetAssociation.prototype */{
+    _logNamespace : logNamespace,
+
+    /** default attributes for a model */
+    defaults : {
+        state               : STATES.NEW,
+        deleted             : false,
+        purged              : false,
+        name                : '(unnamed dataset)',
+        accessible          : true,
+        // sniffed datatype (sam, tabular, bed, etc.)
+        data_type           : '',
+        file_ext            : '',
+        file_size           : 0,
+
+        // array of associated file types (eg. [ 'bam_index', ... ])
+        meta_files          : [],
+
+        misc_blurb          : '',
+        misc_info           : '',
+
+        tags                : []
+        // do NOT default on annotation, as this default is valid and will be passed on 'save'
+        //  which is incorrect behavior when the model is only partially fetched (annos are not passed in summary data)
+        //annotation          : ''
+    },
+
+    /** instance vars and listeners */
+    initialize : function( attributes, options ){
+        this.debug( this + '(Dataset).initialize', attributes, options );
+
+        //!! this state is not in trans.app.model.Dataset.states - set it here -
+        if( !this.get( 'accessible' ) ){
+            this.set( 'state', STATES.NOT_VIEWABLE );
+        }
+
+        /** Datasets rely/use some web controllers - have the model generate those URLs on startup */
+        this.urls = this._generateUrls();
+
+        this._setUpListeners();
+    },
+
+    /** returns misc. web urls for rendering things like re-run, display, etc. */
+    _generateUrls : function(){
+        var id = this.get( 'id' );
+        if( !id ){ return {}; }
+        var urls = {
+            'purge'         : 'datasets/' + id + '/purge_async',
+            'display'       : 'datasets/' + id + '/display/?preview=True',
+            'edit'          : 'datasets/' + id + '/edit',
+            'download'      : 'datasets/' + id + '/display?to_ext=' + this.get( 'file_ext' ),
+            'report_error'  : 'dataset/errors?id=' + id,
+            'rerun'         : 'tool_runner/rerun?id=' + id,
+            'show_params'   : 'datasets/' + id + '/show_params',
+            'visualization' : 'visualization',
+            'meta_download' : 'dataset/get_metadata_file?hda_id=' + id + '&metadata_name='
+        };
+        _.each( urls, function( value, key ){
+            urls[ key ] = Galaxy.root + value;
+        });
+        this.urls = urls;
+        return urls;
+    },
+
+    /** set up any event listeners
+     *  event: state:ready  fired when this DA moves into/is already in a ready state
+     */
+    _setUpListeners : function(){
+        // if the state has changed and the new state is a ready state, fire an event
+        this.on( 'change:state', function( currModel, newState ){
+            this.log( this + ' has changed state:', currModel, newState );
+            if( this.inReadyState() ){
+                this.trigger( 'state:ready', currModel, newState, this.previous( 'state' ) );
+            }
+        });
+        // the download url (currently) relies on having a correct file extension
+        this.on( 'change:id change:file_ext', function( currModel ){
+            this._generateUrls();
+        });
+    },
+
+    // ........................................................................ common queries
+    /** override to add urls */
+    toJSON : function(){
+        var json = Backbone.Model.prototype.toJSON.call( this );
+        //console.warn( 'returning json?' );
+        //return json;
+        return _.extend( json, {
+            urls : this.urls
+        });
+    },
+
+    /** Is this dataset deleted or purged? */
+    isDeletedOrPurged : function(){
+        return ( this.get( 'deleted' ) || this.get( 'purged' ) );
+    },
+
+    /** Is this dataset in a 'ready' state; where 'Ready' states are states where no
+     *      processing (for the ds) is left to do on the server.
+     */
+    inReadyState : function(){
+        var ready = _.contains( STATES.READY_STATES, this.get( 'state' ) );
+        return ( this.isDeletedOrPurged() || ready );
+    },
+
+    /** Does this model already contain detailed data (as opposed to just summary level data)? */
+    hasDetails : function(){
+        // if it's inaccessible assume it has everything it needs
+        if( !this.get( 'accessible' ) ){ return true; }
+        return this.has( 'annotation' );
+    },
+
+    /** Convenience function to match dataset.has_data. */
+    hasData : function(){
+        return ( this.get( 'file_size' ) > 0 );
+    },
+
+    // ........................................................................ ajax
+    fetch : function( options ){
+        var dataset = this;
+        return Backbone.Model.prototype.fetch.call( this, options )
+            .always( function(){
+                dataset._generateUrls();
+            });
+    },
+
+    /** override to use actual Dates objects for create/update times */
+    parse : function( response, options ){
+        var parsed = Backbone.Model.prototype.parse.call( this, response, options );
+        if( parsed.create_time ){
+            parsed.create_time = new Date( parsed.create_time );
+        }
+        if( parsed.update_time ){
+            parsed.update_time = new Date( parsed.update_time );
+        }
+        return parsed;
+    },
+
+    /** override to wait by default */
+    save : function( attrs, options ){
+        options = options || {};
+        options.wait = _.isUndefined( options.wait ) ? true : options.wait;
+        return Backbone.Model.prototype.save.call( this, attrs, options );
+    },
+
+    //NOTE: subclasses of DA's will need to implement url and urlRoot in order to have these work properly
+    /** save this dataset, _Mark_ing it as deleted (just a flag) */
+    'delete' : function( options ){
+        if( this.get( 'deleted' ) ){ return jQuery.when(); }
+        return this.save( { deleted: true }, options );
+    },
+    /** save this dataset, _Mark_ing it as undeleted */
+    undelete : function( options ){
+        if( !this.get( 'deleted' ) || this.get( 'purged' ) ){ return jQuery.when(); }
+        return this.save( { deleted: false }, options );
+    },
+
+    /** remove the file behind this dataset from the filesystem (if permitted) */
+    purge : function _purge( options ){
+        //TODO: use, override model.destroy, HDA.delete({ purge: true })
+        if( this.get( 'purged' ) ){ return jQuery.when(); }
+        options = options || {};
+        options.url = this.urls.purge;
+
+        //TODO: ideally this would be a DELETE call to the api
+        //  using purge async for now
+        var hda = this,
+            xhr = jQuery.ajax( options );
+        xhr.done( function( message, status, responseObj ){
+            hda.set({ deleted: true, purged: true });
+        });
+        xhr.fail( function( xhr, status, message ){
+            // Exception messages are hidden within error page including:  '...not allowed in this Galaxy instance.'
+            // unbury and re-add to xhr
+            var error = _l( "Unable to purge dataset" );
+            var messageBuriedInUnfortunatelyFormattedError = ( 'Removal of datasets by users '
+                + 'is not allowed in this Galaxy instance' );
+            if( xhr.responseJSON && xhr.responseJSON.error ){
+                error = xhr.responseJSON.error;
+            } else if( xhr.responseText.indexOf( messageBuriedInUnfortunatelyFormattedError ) !== -1 ){
+                error = messageBuriedInUnfortunatelyFormattedError;
+            }
+            xhr.responseText = error;
+            hda.trigger( 'error', hda, xhr, options, _l( error ), { error: error } );
+        });
+        return xhr;
+    },
+
+    // ........................................................................ searching
+    /** what attributes of an HDA will be used in a text search */
+    searchAttributes : [
+        'name', 'file_ext', 'genome_build', 'misc_blurb', 'misc_info', 'annotation', 'tags'
+    ],
+
+    /** our attr keys don't often match the labels we display to the user - so, when using
+     *      attribute specifiers ('name="bler"') in a term, allow passing in aliases for the
+     *      following attr keys.
+     */
+    searchAliases : {
+        title       : 'name',
+        format      : 'file_ext',
+        database    : 'genome_build',
+        blurb       : 'misc_blurb',
+        description : 'misc_blurb',
+        info        : 'misc_info',
+        tag         : 'tags'
+    },
+
+    // ........................................................................ misc
+    /** String representation */
+    toString : function(){
+        var nameAndId = this.get( 'id' ) || '';
+        if( this.get( 'name' ) ){
+            nameAndId = '"' + this.get( 'name' ) + '",' + nameAndId;
+        }
+        return 'Dataset(' + nameAndId + ')';
+    }
+}));
+
+
+//==============================================================================
+/** @class Backbone collection for dataset associations.
+ */
+var DatasetAssociationCollection = Backbone.Collection.extend( BASE_MVC.LoggableMixin ).extend(
+/** @lends HistoryContents.prototype */{
+    _logNamespace : logNamespace,
+
+    model : DatasetAssociation,
+
+    /** root api url */
+    urlRoot : Galaxy.root + 'api/datasets',
+
+    /** url fn */
+    url : function(){
+        return this.urlRoot;
+    },
+
+    // ........................................................................ common queries
+    /** Get the ids of every item in this collection
+     *  @returns array of encoded ids
+     */
+    ids : function(){
+        return this.map( function( item ){ return item.get('id'); });
+    },
+
+    /** Get contents that are not ready
+     *  @returns array of content models
+     */
+    notReady : function(){
+        return this.filter( function( content ){
+            return !content.inReadyState();
+        });
+    },
+
+    /** return true if any datasets don't have details */
+    haveDetails : function(){
+        return this.all( function( dataset ){ return dataset.hasDetails(); });
+    },
+
+    // ........................................................................ ajax
+    /** using a queue, perform ajaxFn on each of the models in this collection */
+    ajaxQueue : function( ajaxFn, options ){
+        var deferred = jQuery.Deferred(),
+            startingLength = this.length,
+            responses = [];
+
+        if( !startingLength ){
+            deferred.resolve([]);
+            return deferred;
+        }
+
+        // use reverse order (stylistic choice)
+        var ajaxFns = this.chain().reverse().map( function( dataset, i ){
+            return function(){
+                var xhr = ajaxFn.call( dataset, options );
+                // if successful, notify using the deferred to allow tracking progress
+                xhr.done( function( response ){
+                    deferred.notify({ curr: i, total: startingLength, response: response, model: dataset });
+                });
+                // (regardless of previous error or success) if not last ajax call, shift and call the next
+                //  if last fn, resolve deferred
+                xhr.always( function( response ){
+                    responses.push( response );
+                    if( ajaxFns.length ){
+                        ajaxFns.shift()();
+                    } else {
+                        deferred.resolve( responses );
+                    }
+                });
+            };
+        }).value();
+        // start the queue
+        ajaxFns.shift()();
+
+        return deferred;
+    },
+
+    // ........................................................................ sorting/filtering
+    /** return a new collection of datasets whose attributes contain the substring matchesWhat */
+    matches : function( matchesWhat ){
+        return this.filter( function( dataset ){
+            return dataset.matches( matchesWhat );
+        });
+    },
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'DatasetAssociationCollection(', this.length, ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+    return {
+        DatasetAssociation              : DatasetAssociation,
+        DatasetAssociationCollection    : DatasetAssociationCollection
+    };
+});
diff --git a/client/galaxy/scripts/mvc/dataset/states.js b/client/galaxy/scripts/mvc/dataset/states.js
new file mode 100644
index 0000000..2d38b00
--- /dev/null
+++ b/client/galaxy/scripts/mvc/dataset/states.js
@@ -0,0 +1,62 @@
+define([
+], function(){
+
+'use strict';
+//==============================================================================
+/** Map of possible HDA/collection/job states to their string equivalents.
+ *      A port of galaxy.model.Dataset.states.
+ */
+var STATES = {
+    // NOT ready states
+    /** is uploading and not ready */
+    UPLOAD              : 'upload',
+    /** the job that will produce the dataset queued in the runner */
+    QUEUED              : 'queued',
+    /** the job that will produce the dataset is running */
+    RUNNING             : 'running',
+    /** metadata for the dataset is being discovered/set */
+    SETTING_METADATA    : 'setting_metadata',
+
+    // ready states
+    /** was created without a tool */
+    NEW                 : 'new',
+    /** has no data */
+    EMPTY               : 'empty',
+    /** has successfully completed running */
+    OK                  : 'ok',
+
+    /** the job that will produce the dataset paused */
+    PAUSED              : 'paused',
+    /** metadata discovery/setting failed or errored (but otherwise ok) */
+    FAILED_METADATA     : 'failed_metadata',
+//TODO: not in trans.app.model.Dataset.states - is in database
+    /** not accessible to the current user (i.e. due to permissions) */
+    NOT_VIEWABLE        : 'noPermission',
+    /** deleted while uploading */
+    DISCARDED           : 'discarded',
+    /** the tool producing this dataset failed */
+    ERROR               : 'error'
+};
+
+STATES.READY_STATES = [
+    STATES.OK,
+    STATES.EMPTY,
+    STATES.PAUSED,
+    STATES.FAILED_METADATA,
+    STATES.NOT_VIEWABLE,
+    STATES.DISCARDED,
+    STATES.ERROR
+];
+
+STATES.NOT_READY_STATES = [
+    STATES.UPLOAD,
+    STATES.QUEUED,
+    STATES.RUNNING,
+    STATES.SETTING_METADATA,
+    STATES.NEW
+];
+
+
+//==============================================================================
+    return STATES;
+});
diff --git a/client/galaxy/scripts/mvc/form/form-data.js b/client/galaxy/scripts/mvc/form/form-data.js
new file mode 100644
index 0000000..826636c
--- /dev/null
+++ b/client/galaxy/scripts/mvc/form/form-data.js
@@ -0,0 +1,240 @@
+/* This class maps the form dom to an api compatible javascript dictionary. */
+define([ 'utils/utils' ], function( Utils ) {
+    var Manager = Backbone.Model.extend({
+        initialize: function( app ) {
+            this.app = app;
+        },
+
+        /** Creates a checksum. */
+        checksum: function() {
+            var sum = '';
+            var self = this;
+            this.app.section.$el.find( '.section-row' ).each( function() {
+                var id = $(this).attr( 'id' );
+                var field = self.app.field_list[ id ];
+                if ( field ) {
+                    sum += id + ':' + JSON.stringify( field.value && field.value() ) + ':' + field.collapsed + ';';
+                }
+            });
+            return sum;
+        },
+
+        /** Convert dom into a dictionary of flat id/value pairs used e.g. on job submission. */
+        create: function() {
+            var self = this;
+
+            // get raw dictionary from dom
+            var dict = {};
+            this._iterate( this.app.section.$el, dict );
+
+            // add to result dictionary, label elements
+            var result_dict = {};
+            this.flat_dict = {};
+            function add( flat_id, input_id, input_value ) {
+                self.flat_dict[ flat_id ] = input_id;
+                result_dict[ flat_id ] = input_value;
+                self.app.element_list[ input_id ] && self.app.element_list[ input_id ].$el.attr( 'tour_id', flat_id );
+            }
+            // converter between raw dictionary and job dictionary
+            function convert( identifier, head ) {
+                for ( var index in head ) {
+                    var node = head[ index ];
+                    if ( node.input ) {
+                        var input = node.input;
+                        var flat_id = identifier;
+                        if ( identifier != '' ) {
+                            flat_id += '|';
+                        }
+                        flat_id += input.name;
+                        switch ( input.type ) {
+                            case 'repeat':
+                                var section_label = 'section-';
+                                var block_indices = [];
+                                var block_prefix = null;
+                                for ( var block_label in node ) {
+                                    var pos = block_label.indexOf( section_label );
+                                    if ( pos != -1 ) {
+                                        pos += section_label.length;
+                                        block_indices.push( parseInt( block_label.substr( pos ) ));
+                                        if ( !block_prefix ) {
+                                            block_prefix = block_label.substr( 0, pos );
+                                        }
+                                    }
+                                }
+                                block_indices.sort( function( a, b ) { return a - b; });
+                                var index = 0;
+                                for ( var i in block_indices ) {
+                                    convert( flat_id + '_' + index++, node[ block_prefix + block_indices[ i ] ]);
+                                }
+                                break;
+                            case 'conditional':
+                                var value = self.app.field_list[ input.id ].value();
+                                add( flat_id + '|' + input.test_param.name, input.id, value );
+                                var selectedCase = matchCase( input, value );
+                                if ( selectedCase != -1 ) {
+                                    convert( flat_id, head[ input.id + '-section-' + selectedCase ] );
+                                }
+                                break;
+                            case 'section':
+                                convert( !input.flat && flat_id || '', node );
+                                break;
+                            default:
+                                var field = self.app.field_list[ input.id ];
+                                if ( field && field.value ) {
+                                    var value = field.value();
+                                    if ( input.ignore === undefined || input.ignore != value ) {
+                                        if ( field.collapsed && input.collapsible_value ) {
+                                            value = input.collapsible_value;
+                                        }
+                                        add( flat_id, input.id, value );
+                                        if ( input.payload ) {
+                                            for ( var p_id in input.payload ) {
+                                                add( p_id, input.id, input.payload[ p_id ] );
+                                            }
+                                        }
+                                    }
+                                }
+                        }
+                    }
+                }
+            }
+            convert( '', dict );
+            return result_dict;
+        },
+
+        /** Matches flat ids to corresponding input element
+         * @param{string} flat_id - Flat input id to be looked up.
+         */
+        match: function ( flat_id ) {
+            return this.flat_dict && this.flat_dict[ flat_id ];
+        },
+
+        /** Match conditional values to selected cases
+        */
+        matchCase: function( input, value ) {
+            return matchCase( input, value );
+        },
+
+        /** Matches a new tool model to the current input elements e.g. used to update dynamic options
+        */
+        matchModel: function( model, callback ) {
+            var self = this;
+            visitInputs( model.inputs, function( input, name ) {
+                self.flat_dict[ name ] && callback ( input, self.flat_dict[ name ] );
+            });
+        },
+
+        /** Matches identifier from api response to input elements e.g. used to display validation errors
+        */
+        matchResponse: function( response ) {
+            var result = {};
+            var self = this;
+            function search ( id, head ) {
+                if ( typeof head === 'string' ) {
+                    var input_id = self.flat_dict[ id ];
+                    input_id && ( result[ input_id ] = head );
+                } else {
+                    for ( var i in head ) {
+                        var new_id = i;
+                        if ( id !== '' ) {
+                            var separator = '|';
+                            if ( head instanceof Array ) {
+                                separator = '_';
+                            }
+                            new_id = id + separator + new_id;
+                        }
+                        search ( new_id, head[ i ] );
+                    }
+                }
+            }
+            search( '', response );
+            return result;
+        },
+
+        /** Map dom tree to dictionary tree with input elements.
+        */
+        _iterate: function( parent, dict ) {
+            var self = this;
+            var children = $( parent ).children();
+            children.each( function() {
+                var child = this;
+                var id = $( child ).attr( 'id' );
+                if ( $( child ).hasClass( 'section-row' ) ) {
+                    var input = self.app.input_list[ id ];
+                    dict[ id ] = ( input && { input : input } ) || {};
+                    self._iterate( child, dict[ id ] );
+                } else {
+                    self._iterate( child, dict );
+                }
+            });
+        }
+    });
+
+    /** Match conditional values to selected cases
+     * @param{dict}   input     - Definition of conditional input parameter
+     * @param{dict}   value     - Current value
+     */
+    var matchCase = function( input, value ) {
+        if ( input.test_param.type == 'boolean' ) {
+            if ( value == 'true' ) {
+                value = input.test_param.truevalue || 'true';
+            } else {
+                value = input.test_param.falsevalue || 'false';
+            }
+        }
+        for ( var i in input.cases ) {
+            if ( input.cases[ i ].value == value ) {
+                return i;
+            }
+        }
+        return -1;
+    };
+
+    /** Visits tool inputs
+     * @param{dict}   inputs    - Nested dictionary of input elements
+     * @param{dict}   callback  - Called with the mapped dictionary object and corresponding model node
+     */
+    var visitInputs = function( inputs, callback, prefix, context ) {
+        context = $.extend( true, {}, context );
+        _.each( inputs, function ( input ) {
+            if ( input && input.type && input.name ) {
+                context[ input.name ] = input;
+            }
+        });
+        for ( var key in inputs ) {
+            var node = inputs[ key ];
+            node.name = node.name || key;
+            var name = prefix ? prefix + '|' + node.name : node.name;
+            switch ( node.type ) {
+                case 'repeat':
+                    _.each( node.cache, function( cache, j ) {
+                        visitInputs( cache, callback, name + '_' + j, context );
+                    });
+                    break;
+                case 'conditional':
+                    if ( node.test_param ) {
+                        callback( node.test_param, name + '|' + node.test_param.name, context );
+                        var selectedCase = matchCase( node, node.test_param.value );
+                        if ( selectedCase != -1 ) {
+                            visitInputs( node.cases[ selectedCase ].inputs, callback, name, context );
+                        } else {
+                            Galaxy.emit.debug( 'form-data::visitInputs() - Invalid case for ' + name + '.' );
+                        }
+                    } else {
+                        Galaxy.emit.debug( 'form-data::visitInputs() - Conditional test parameter missing for ' + name  + '.' );
+                    }
+                    break;
+                case 'section':
+                    visitInputs( node.inputs, callback, name, context )
+                    break;
+                default:
+                    callback( node, name, context );
+            }
+        }
+    };
+
+    return {
+        Manager         : Manager,
+        visitInputs     : visitInputs
+    }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/form/form-input.js b/client/galaxy/scripts/mvc/form/form-input.js
new file mode 100644
index 0000000..efaa3b1
--- /dev/null
+++ b/client/galaxy/scripts/mvc/form/form-input.js
@@ -0,0 +1,128 @@
+/**
+    This class creates a form input element wrapper
+*/
+define([], function() {
+    return Backbone.View.extend({
+        initialize: function( app, options ) {
+            this.app = app;
+            this.app_options = app.options || {};
+            this.field = options && options.field || new Backbone.View();
+            this.model = options && options.model || new Backbone.Model({
+                text_enable     : this.app_options.text_enable   || 'Enable',
+                text_disable    : this.app_options.text_disable  || 'Disable',
+                cls_enable      : this.app_options.cls_enable    || 'fa fa-caret-square-o-down',
+                cls_disable     : this.app_options.cls_disable   || 'fa fa-caret-square-o-up'
+            }).set( options );
+
+            // set element and link components
+            this.setElement( this._template() );
+            this.$field             = this.$( '.ui-form-field' );
+            this.$info              = this.$( '.ui-form-info' );
+            this.$preview           = this.$( '.ui-form-preview' );
+            this.$collapsible       = this.$( '.ui-form-collapsible' );
+            this.$collapsible_text  = this.$( '.ui-form-collapsible-text' );
+            this.$collapsible_icon  = this.$( '.ui-form-collapsible-icon' );
+            this.$title             = this.$( '.ui-form-title' );
+            this.$title_text        = this.$( '.ui-form-title-text' );
+            this.$error_text        = this.$( '.ui-form-error-text' );
+            this.$error             = this.$( '.ui-form-error' );
+            this.$backdrop          = this.$( '.ui-form-backdrop' );
+
+            // add field element
+            this.$field.prepend( this.field.$el );
+
+            // decide wether to expand or collapse fields
+            var collapsible_value = this.model.get( 'collapsible_value' );
+            this.field.collapsed = collapsible_value !== undefined && JSON.stringify( this.model.get( 'value' ) ) == JSON.stringify( collapsible_value );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+
+            // add click handler
+            var self = this;
+            this.$collapsible.on( 'click', function() {
+                self.field.collapsed = !self.field.collapsed;
+                app.trigger && app.trigger( 'change' );
+                self.render();
+            });
+        },
+
+        /** Set backdrop for input element
+        */
+        backdrop: function() {
+            this.model.set( 'backdrop', true );
+        },
+
+        /** Set error text
+        */
+        error: function( text ) {
+            this.model.set( 'error_text', text );
+        },
+
+        /** Reset this view
+        */
+        reset: function() {
+            this.model.set( 'error_text', null );
+        },
+
+        render: function() {
+            // render help
+            $( '.tooltip' ).hide();
+            var help_text = this.model.get( 'help', '' );
+            var help_argument = this.model.get( 'argument' );
+            if ( help_argument && help_text.indexOf( '(' + help_argument + ')' ) == -1 ) {
+                help_text += ' (' + help_argument + ')';
+            }
+            this.$info.html( help_text );
+            // render visibility
+            this.$el[ this.model.get( 'hidden' ) ? 'hide' : 'show' ]();
+            // render preview view for collapsed fields
+            this.$preview[ ( this.field.collapsed && this.model.get( 'collapsible_preview' ) || this.model.get( 'disabled' ) ) ? 'show' : 'hide' ]()
+                         .html( _.escape( this.model.get( 'text_value' ) ) );
+            // render error messages
+            var error_text = this.model.get( 'error_text' );
+            this.$error[ error_text ? 'show' : 'hide' ]();
+            this.$el[ error_text ? 'addClass' : 'removeClass' ]( 'ui-error' );
+            this.$error_text.html( error_text );
+            // render backdrop
+            this.$backdrop[ this.model.get( 'backdrop' ) ? 'show' : 'hide' ]();
+            // render input field
+            this.field.collapsed || this.model.get( 'disabled' ) ? this.$field.hide() : this.$field.show();
+            // render input field color and style
+            this.field.model && this.field.model.set( { 'color': this.model.get( 'color' ), 'style': this.model.get( 'style' ) } );
+            // render collapsible options
+            if ( !this.model.get( 'disabled' ) && this.model.get( 'collapsible_value' ) !== undefined ) {
+                var collapsible_state = this.field.collapsed ? 'enable' : 'disable';
+                this.$title_text.hide();
+                this.$collapsible.show();
+                this.$collapsible_text.text( this.model.get( 'label' ) );
+                this.$collapsible_icon.removeClass().addClass( 'icon' )
+                                      .addClass( this.model.get( 'cls_' +  collapsible_state ) )
+                                      .attr( 'data-original-title', this.model.get( 'text_' + collapsible_state ) )
+                                      .tooltip( { placement: 'bottom' } );
+            } else {
+                this.$title_text.show().text( this.model.get( 'label' ) );
+                this.$collapsible.hide();
+            }
+        },
+
+        _template: function() {
+            return  $( '<div/>' ).addClass( 'ui-form-element' )
+                                 .append( $( '<div/>' ).addClass( 'ui-form-error ui-error' )
+                                    .append( $( '<span/>' ).addClass( 'fa fa-arrow-down' ) )
+                                    .append( $( '<span/>' ).addClass( 'ui-form-error-text' ) )
+                                 )
+                                 .append( $( '<div/>' ).addClass( 'ui-form-title' )
+                                    .append( $( '<div/>' ).addClass( 'ui-form-collapsible' )
+                                        .append( $( '<i/>' ).addClass( 'ui-form-collapsible-icon' ) )
+                                        .append( $( '<span/>' ).addClass( 'ui-form-collapsible-text' ) )
+                                    )
+                                    .append( $( '<span/>' ).addClass( 'ui-form-title-text' ) )
+                                 )
+                                 .append( $( '<div/>' ).addClass( 'ui-form-field' )
+                                    .append( $( '<span/>' ).addClass( 'ui-form-info' ) )
+                                    .append( $( '<div/>' ).addClass( 'ui-form-backdrop' ) )
+                                 )
+                                 .append( $( '<div/>' ).addClass( 'ui-form-preview' ) );
+        }
+    });
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/form/form-parameters.js b/client/galaxy/scripts/mvc/form/form-parameters.js
new file mode 100644
index 0000000..b03f194
--- /dev/null
+++ b/client/galaxy/scripts/mvc/form/form-parameters.js
@@ -0,0 +1,217 @@
+/**
+    This class creates input elements. New input parameter types should be added to the types dictionary.
+*/
+define(['utils/utils',
+        'mvc/ui/ui-misc',
+        'mvc/ui/ui-select-content',
+        'mvc/ui/ui-select-library',
+        'mvc/ui/ui-select-ftp',
+        'mvc/ui/ui-color-picker'],
+    function( Utils, Ui, SelectContent, SelectLibrary, SelectFtp, ColorPicker ) {
+
+    // create form view
+    return Backbone.Model.extend({
+        /** Available parameter types */
+        types: {
+            'text'              : '_fieldText',
+            'select'            : '_fieldSelect',
+            'data_column'       : '_fieldSelect',
+            'genomebuild'       : '_fieldSelect',
+            'data'              : '_fieldData',
+            'data_collection'   : '_fieldData',
+            'integer'           : '_fieldSlider',
+            'float'             : '_fieldSlider',
+            'boolean'           : '_fieldBoolean',
+            'drill_down'        : '_fieldDrilldown',
+            'color'             : '_fieldColor',
+            'hidden'            : '_fieldHidden',
+            'hidden_data'       : '_fieldHidden',
+            'baseurl'           : '_fieldHidden',
+            'library_data'      : '_fieldLibrary',
+            'ftpfile'           : '_fieldFtp'
+        },
+
+        /** Returns an input field for a given field type */
+        create: function( input_def ) {
+            var fieldClass = this.types[ input_def.type ];
+            var field = typeof( this[ fieldClass ] ) === 'function' ? this[ fieldClass ].call( this, input_def ) : null;
+            if ( !field ) {
+                field = input_def.options ? this._fieldSelect( input_def ) : this._fieldText( input_def );
+                Galaxy.emit.debug('form-parameters::_addRow()', 'Auto matched field type (' + input_def.type + ').');
+            }
+            input_def.value === undefined && ( input_def.value = null );
+            field.value( input_def.value );
+            return field;
+        },
+
+        /** Data input field */
+        _fieldData: function( input_def ) {
+            return new SelectContent.View({
+                id          : 'field-' + input_def.id,
+                extensions  : input_def.extensions,
+                optional    : input_def.optional,
+                multiple    : input_def.multiple,
+                type        : input_def.type,
+                flavor      : input_def.flavor,
+                data        : input_def.options,
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** Select/Checkbox/Radio options field */
+        _fieldSelect: function ( input_def ) {
+            // show text field e.g. in workflow editor
+            if( input_def.is_workflow ) {
+                return this._fieldText( input_def );
+            }
+
+            // customize properties
+            if ( input_def.type == 'data_column' ) {
+                input_def.error_text = 'Missing columns in referenced dataset.'
+            }
+
+            // identify available options
+            var data = input_def.data;
+            if( !data ) {
+                data = [];
+                _.each( input_def.options, function( option ) {
+                    data.push( { label: option[ 0 ], value: option[ 1 ] } );
+                });
+            }
+
+            // identify display type
+            var SelectClass = Ui.Select;
+            switch ( input_def.display ) {
+                case 'checkboxes':
+                    SelectClass = Ui.Checkbox;
+                    break;
+                case 'radio':
+                    SelectClass = Ui.Radio;
+                    break;
+                case 'radiobutton':
+                    SelectClass = Ui.RadioButton;
+                    break;
+            }
+
+            // create select field
+            return new SelectClass.View({
+                id          : 'field-' + input_def.id,
+                data        : data,
+                error_text  : input_def.error_text || 'No options available',
+                multiple    : input_def.multiple,
+                optional    : input_def.optional,
+                onchange    : input_def.onchange,
+                searchable  : input_def.flavor !== 'workflow'
+            });
+        },
+
+        /** Drill down options field */
+        _fieldDrilldown: function ( input_def ) {
+            // show text field e.g. in workflow editor
+            if( input_def.is_workflow ) {
+                return this._fieldText( input_def );
+            }
+
+            // create drill down field
+            return new Ui.Drilldown.View({
+                id          : 'field-' + input_def.id,
+                data        : input_def.options,
+                display     : input_def.display,
+                optional    : input_def.optional,
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** Text input field */
+        _fieldText: function( input_def ) {
+            // field replaces e.g. a select field
+            if ( input_def.options && input_def.data ) {
+                input_def.area = input_def.multiple;
+                if ( Utils.isEmpty( input_def.value ) ) {
+                    input_def.value = null;
+                } else {
+                    if ( $.isArray( input_def.value ) ) {
+                        var str_value = '';
+                        for ( var i in input_def.value ) {
+                            str_value += String( input_def.value[ i ] );
+                            if ( !input_def.multiple ) {
+                                break;
+                            }
+                            str_value += '\n';
+                        }
+                        input_def.value = str_value;
+                    }
+                }
+            }
+            // create input element
+            return new Ui.Input({
+                id          : 'field-' + input_def.id,
+                area        : input_def.area,
+                placeholder : input_def.placeholder,
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** Slider field */
+        _fieldSlider: function( input_def ) {
+            return new Ui.Slider.View({
+                id          : 'field-' + input_def.id,
+                precise     : input_def.type == 'float',
+                is_workflow : input_def.is_workflow,
+                min         : input_def.min,
+                max         : input_def.max,
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** Hidden field */
+        _fieldHidden: function( input_def ) {
+            return new Ui.Hidden({
+                id          : 'field-' + input_def.id,
+                info        : input_def.info
+            });
+        },
+
+        /** Boolean field */
+        _fieldBoolean: function( input_def ) {
+            return new Ui.RadioButton.View({
+                id          : 'field-' + input_def.id,
+                data        : [ { label : 'Yes', value : 'true'  },
+                                { label : 'No',  value : 'false' }],
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** Color picker field */
+        _fieldColor: function( input_def ) {
+            return new ColorPicker({
+                id          : 'field-' + input_def.id,
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** Library dataset field */
+        _fieldLibrary: function( input_def ) {
+            return new SelectLibrary.View({
+                id          : 'field-' + input_def.id,
+                optional    : input_def.optional,
+                multiple    : input_def.multiple,
+                onchange    : input_def.onchange
+            });
+        },
+
+        /** FTP file field */
+        _fieldFtp: function( input_def ) {
+            return new SelectFtp.View({
+                id          : 'field-' + input_def.id,
+                optional    : input_def.optional,
+                multiple    : input_def.multiple,
+                onchange    : input_def.onchange
+            });
+        }
+    });
+
+    return {
+        View: View
+    };
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/form/form-repeat.js b/client/galaxy/scripts/mvc/form/form-repeat.js
new file mode 100644
index 0000000..5736270
--- /dev/null
+++ b/client/galaxy/scripts/mvc/form/form-repeat.js
@@ -0,0 +1,96 @@
+/** This class creates a ui component which enables the dynamic creation of portlets */
+define( [ 'utils/utils', 'mvc/ui/ui-portlet', 'mvc/ui/ui-misc' ],
+function( Utils, Portlet, Ui ) {
+    var View = Backbone.View.extend({
+        initialize: function( options ) {
+            this.list = {};
+            this.options = Utils.merge( options, {
+                title       : 'Repeat',
+                empty_text  : 'Not available.',
+                max         : null,
+                min         : null
+            });
+            this.button_new = new Ui.ButtonIcon({
+                icon    : 'fa-plus',
+                title   : 'Insert ' + this.options.title,
+                tooltip : 'Add new ' + this.options.title + ' block',
+                floating: 'clear',
+                cls     : 'ui-button-icon form-repeat-add',
+                onclick : function() { options.onnew && options.onnew() }
+            });
+            this.setElement( $( '<div/>' ).append( this.$list = $( '<div/>' ) )
+                                          .append( $( '<div/>' ).append( this.button_new.$el ) ) );
+        },
+
+        /** Number of repeat blocks */
+        size: function() {
+            return _.size( this.list );
+        },
+
+        /** Add new repeat block */
+        add: function( options ) {
+            if ( !options.id || this.list[ options.id ] ) {
+                Galaxy.emit.debug( 'form-repeat::add()', 'Duplicate or invalid repeat block id.' );
+                return;
+            }
+            var button_delete = new Ui.ButtonIcon({
+                icon    : 'fa-trash-o',
+                tooltip : 'Delete this repeat block',
+                cls     : 'ui-button-icon-plain form-repeat-delete',
+                onclick : function() { options.ondel && options.ondel() }
+            });
+            var portlet = new Portlet.View({
+                id              : options.id,
+                title           : 'placeholder',
+                cls             : options.cls || 'ui-portlet-repeat',
+                operations      : { button_delete: button_delete }
+            });
+            portlet.append( options.$el );
+            portlet.$el.addClass( 'section-row' ).hide();
+            this.list[ options.id ] = portlet;
+            this.$list.append( portlet.$el.fadeIn( 'fast' ) );
+            this.options.max > 0 && this.size() >= this.options.max && this.button_new.disable();
+            this._refresh();
+        },
+
+        /** Delete repeat block */
+        del: function( id ) {
+            if ( !this.list[ id ] ) {
+                Galaxy.emit.debug( 'form-repeat::del()', 'Invalid repeat block id.' );
+                return;
+            }
+            this.$list.find( '#' + id ).remove();
+            delete this.list[ id ];
+            this.button_new.enable();
+            this._refresh();
+        },
+
+        /** Remove all */
+        delAll: function() {
+            for( var id in this.list ) {
+                this.del( id );
+            }
+        },
+
+        /** Hides add/del options */
+        hideOptions: function() {
+            this.button_new.$el.hide();
+            _.each( this.list, function( portlet ) { portlet.hideOperation( 'button_delete' ) } );
+            _.isEmpty( this.list ) && this.$el.append( $( '<div/>' ).addClass( 'ui-form-info' ).html( this.options.empty_text ) );
+        },
+
+        /** Refresh view */
+        _refresh: function() {
+            var index = 0;
+            for ( var id in this.list ) {
+                var portlet = this.list[ id ];
+                portlet.title( ++index + ': ' + this.options.title );
+                portlet[ this.size() > this.options.min ? 'showOperation' : 'hideOperation' ]( 'button_delete' );
+            }
+        }
+    });
+
+    return {
+        View : View
+    }
+});
diff --git a/client/galaxy/scripts/mvc/form/form-section.js b/client/galaxy/scripts/mvc/form/form-section.js
new file mode 100644
index 0000000..60ab946
--- /dev/null
+++ b/client/galaxy/scripts/mvc/form/form-section.js
@@ -0,0 +1,175 @@
+/**
+    This class creates a form section and populates it with input elements. It also handles repeat blocks and conditionals by recursively creating new sub sections.
+*/
+define([ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-portlet', 'mvc/form/form-repeat', 'mvc/form/form-input', 'mvc/form/form-parameters' ],
+function( Utils, Ui, Portlet, Repeat, InputElement, Parameters ) {
+    var View = Backbone.View.extend({
+        initialize: function( app, options ) {
+            this.app = app;
+            this.inputs = options.inputs;
+            this.parameters = new Parameters();
+            this.setElement( $( '<div/>' ) );
+            this.render();
+        },
+
+        /** Render section view */
+        render: function() {
+            var self = this;
+            this.$el.empty();
+            _.each( this.inputs, function( input ) { self.add( input ) } );
+        },
+
+        /** Add a new input element */
+        add: function( input ) {
+            var input_def = jQuery.extend( true, {}, input );
+            input_def.id = input.id = Utils.uid();
+            this.app.input_list[ input_def.id ] = input_def;
+            switch( input_def.type ) {
+                case 'conditional':
+                    this._addConditional( input_def );
+                    break;
+                case 'repeat':
+                    this._addRepeat( input_def );
+                    break;
+                case 'section':
+                    this._addSection( input_def );
+                    break;
+                default:
+                    this._addRow( input_def );
+            }
+        },
+
+        /** Add a conditional block */
+        _addConditional: function( input_def ) {
+            var self = this;
+            input_def.test_param.id = input_def.id;
+            this.app.options.sustain_conditionals && ( input_def.test_param.disabled = true );
+            var field = this._addRow( input_def.test_param );
+
+            // set onchange event for test parameter
+            field.model && field.model.set( 'onchange', function( value ) {
+                var selectedCase = self.app.data.matchCase( input_def, value );
+                for ( var i in input_def.cases ) {
+                    var case_def = input_def.cases[ i ];
+                    var section_row = self.$( '#' + input_def.id + '-section-' + i );
+                    var nonhidden = false;
+                    for ( var j in case_def.inputs ) {
+                        if ( !case_def.inputs[ j ].hidden ) {
+                            nonhidden = true;
+                            break;
+                        }
+                    }
+                    if ( i == selectedCase && nonhidden ) {
+                        section_row.fadeIn( 'fast' );
+                    } else {
+                        section_row.hide();
+                    }
+                }
+                self.app.trigger( 'change' );
+            });
+
+            // add conditional sub sections
+            for ( var i in input_def.cases ) {
+                var sub_section = new View( this.app, { inputs: input_def.cases[ i ].inputs } );
+                this._append( sub_section.$el.addClass( 'ui-form-section' ), input_def.id + '-section-' + i );
+            }
+
+            // trigger refresh on conditional input field after all input elements have been created
+            field.trigger( 'change' );
+        },
+
+        /** Add a repeat block */
+        _addRepeat: function( input_def ) {
+            var self = this;
+            var block_index = 0;
+
+            // create repeat block element
+            var repeat = new Repeat.View({
+                title           : input_def.title || 'Repeat',
+                min             : input_def.min,
+                max             : input_def.max,
+                onnew           : function() { create( input_def.inputs ); self.app.trigger( 'change' ); }
+            });
+
+            // helper function to create new repeat blocks
+            function create ( inputs ) {
+                var sub_section_id = input_def.id + '-section-' + ( block_index++ );
+                var sub_section = new View( self.app, { inputs: inputs } );
+                repeat.add( { id      : sub_section_id,
+                              $el     : sub_section.$el,
+                              ondel   : function() { repeat.del( sub_section_id ); self.app.trigger( 'change' ); } } );
+            }
+
+            //
+            // add parsed/minimum number of repeat blocks
+            //
+            var n_cache = _.size( input_def.cache );
+            for ( var i = 0; i < Math.max( Math.max( n_cache, input_def.min ), input_def.default || 0 ); i++ ) {
+                create( i < n_cache ? input_def.cache[ i ] : input_def.inputs );
+            }
+
+            // hide options
+            this.app.options.sustain_repeats && repeat.hideOptions();
+
+            // create input field wrapper
+            var input_element = new InputElement( this.app, {
+                label   : input_def.title || input_def.name,
+                help    : input_def.help,
+                field   : repeat
+            });
+            this._append( input_element.$el, input_def.id );
+        },
+
+        /** Add a customized section */
+        _addSection: function( input_def ) {
+            var portlet = new Portlet.View({
+                title               : input_def.title || input_def.name,
+                cls                 : 'ui-portlet-section',
+                collapsible         : true,
+                collapsible_button  : true,
+                collapsed           : !input_def.expanded
+            });
+            portlet.append( new View( this.app, { inputs: input_def.inputs } ).$el );
+            portlet.append( $( '<div/>' ).addClass( 'ui-form-info' ).html( input_def.help ) );
+            this.app.on( 'expand', function( input_id ) { ( portlet.$( '#' + input_id ).length > 0 ) && portlet.expand(); } );
+            this._append( portlet.$el, input_def.id );
+        },
+
+        /** Add a single input field element */
+        _addRow: function( input_def ) {
+            var self = this;
+            var id = input_def.id;
+            input_def.onchange = input_def.onchange || function() { self.app.trigger( 'change', id ) };
+            var field = this.parameters.create( input_def );
+            this.app.field_list[ id ] = field;
+            var input_element = new InputElement( this.app, {
+                name                : input_def.name,
+                label               : input_def.label || input_def.name,
+                value               : input_def.value,
+                text_value          : input_def.text_value,
+                collapsible_value   : input_def.collapsible_value,
+                collapsible_preview : input_def.collapsible_preview,
+                help                : input_def.help,
+                argument            : input_def.argument,
+                disabled            : input_def.disabled,
+                color               : input_def.color,
+                style               : input_def.style,
+                backdrop            : input_def.backdrop,
+                hidden              : input_def.hidden,
+                field               : field
+            });
+            this.app.element_list[ id ] = input_element;
+            this._append( input_element.$el, input_def.id );
+            return field;
+        },
+
+        /** Append a new element to the form i.e. input element, repeat block, conditionals etc. */
+        _append: function( $el, id ) {
+            this.$el.append( $el.addClass( 'section-row' ).attr( 'id', id ) );
+        }
+    });
+
+    return {
+        View: View
+    };
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/form/form-view.js b/client/galaxy/scripts/mvc/form/form-view.js
new file mode 100644
index 0000000..3684e15
--- /dev/null
+++ b/client/galaxy/scripts/mvc/form/form-view.js
@@ -0,0 +1,145 @@
+/**
+    This is the main class of the form plugin. It is referenced as 'app' in lower level modules.
+*/
+define( [ 'utils/utils', 'mvc/ui/ui-portlet', 'mvc/ui/ui-misc', 'mvc/form/form-section', 'mvc/form/form-data' ],
+function( Utils, Portlet, Ui, FormSection, FormData ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            this.options = Utils.merge( options, {
+                initial_errors  : false,
+                cls             : 'ui-portlet-limited',
+                icon            : null,
+                always_refresh  : true
+            });
+            this.setElement( '<div/>' );
+            this.render();
+        },
+
+        /** Update available options */
+        update: function( new_model ){
+            var self = this;
+            this.data.matchModel( new_model, function( node, input_id ) {
+                var input = self.input_list[ input_id ];
+                if ( input && input.options ) {
+                    if ( !_.isEqual( input.options, node.options ) ) {
+                        input.options = node.options;
+                        var field = self.field_list[ input_id ];
+                        if ( field.update ) {
+                            var new_options = [];
+                            if ( ( [ 'data', 'data_collection', 'drill_down' ] ).indexOf( input.type ) != -1 ) {
+                                new_options = input.options;
+                            } else {
+                                for ( var i in node.options ) {
+                                    var opt = node.options[ i ];
+                                    if ( opt.length > 2 ) {
+                                        new_options.push( { label: opt[ 0 ], value: opt[ 1 ] } );
+                                    }
+                                }
+                            }
+                            field.update( new_options );
+                            field.trigger( 'change' );
+                            Galaxy.emit.debug( 'form-view::update()', 'Updating options for ' + input_id );
+                        }
+                    }
+                }
+            });
+        },
+
+        /** Set form into wait mode */
+        wait: function( active ) {
+            for ( var i in this.input_list ) {
+                var field = this.field_list[ i ];
+                var input = this.input_list[ i ];
+                if ( input.is_dynamic && field.wait && field.unwait ) {
+                    field[ active ? 'wait' : 'unwait' ]();
+                }
+            }
+        },
+
+        /** Highlight and scroll to input element (currently only used for error notifications) */
+        highlight: function ( input_id, message, silent ) {
+            var input_element = this.element_list[ input_id ];
+            if ( input_element ) {
+                input_element.error( message || 'Please verify this parameter.' );
+                this.portlet.expand();
+                this.trigger( 'expand', input_id );
+                if ( !silent ) {
+                    var $panel = this.$el.parents().filter(function() {
+                        return [ 'auto', 'scroll' ].indexOf( $( this ).css( 'overflow' ) ) != -1;
+                    }).first();
+                    $panel.animate( { scrollTop : $panel.scrollTop() + input_element.$el.offset().top - 120 }, 500 );
+                }
+            }
+        },
+
+        /** Highlights errors */
+        errors: function( options ) {
+            this.trigger( 'reset' );
+            if ( options && options.errors ) {
+                var error_messages = this.data.matchResponse( options.errors );
+                for ( var input_id in this.element_list ) {
+                    var input = this.element_list[ input_id ];
+                    if ( error_messages[ input_id ] ) {
+                        this.highlight( input_id, error_messages[ input_id ], true );
+                    }
+                }
+            }
+        },
+
+        /** Render tool form */
+        render: function() {
+            var self = this;
+            this.off('change');
+            this.off('reset');
+            // contains the dom field elements as created by the parameter factory i.e. form-parameters
+            this.field_list = {};
+            // contains input definitions/dictionaries as provided by the parameters to_dict() function through the api
+            this.input_list = {};
+            // contains the dom elements of each input element i.e. form-input which wraps the actual input field
+            this.element_list = {};
+            // converts the form into a json data structure
+            this.data = new FormData.Manager( this );
+            this._renderForm();
+            this.data.create();
+            this.options.initial_errors && this.errors( this.options );
+            // add listener which triggers on checksum change, and reset the form input wrappers
+            var current_check = this.data.checksum();
+            this.on('change', function( input_id ) {
+                var input = self.input_list[ input_id ];
+                if ( !input || input.refresh_on_change || self.options.always_refresh ) {
+                    var new_check = self.data.checksum();
+                    if ( new_check != current_check ) {
+                        current_check = new_check;
+                        self.options.onchange && self.options.onchange();
+                    }
+                }
+            });
+            this.on('reset', function() {
+                _.each( self.element_list, function( input_element ) { input_element.reset() } );
+            });
+            return this;
+        },
+
+        /** Renders/appends dom elements of the form */
+        _renderForm: function() {
+            $( '.tooltip' ).remove();
+            this.message = new Ui.Message();
+            this.section = new FormSection.View( this, { inputs: this.options.inputs } );
+            this.portlet = new Portlet.View({
+                icon        : this.options.icon,
+                title       : this.options.title,
+                cls         : this.options.cls,
+                operations  : this.options.operations,
+                buttons     : this.options.buttons,
+                collapsible : this.options.collapsible,
+                collapsed   : this.options.collapsed
+            });
+            this.portlet.append( this.message.$el );
+            this.portlet.append( this.section.$el );
+            this.$el.empty();
+            this.options.inputs && this.$el.append( this.portlet.$el );
+            this.options.message && this.message.update( { persistent: true, status: 'warning', message: this.options.message } );
+            Galaxy.emit.debug( 'form-view::initialize()', 'Completed' );
+        }
+    });
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/grid/grid-model.js b/client/galaxy/scripts/mvc/grid/grid-model.js
new file mode 100644
index 0000000..ee59554
--- /dev/null
+++ b/client/galaxy/scripts/mvc/grid/grid-model.js
@@ -0,0 +1,121 @@
+// dependencies
+define([], function() {
+
+// grid model
+return Backbone.Model.extend({
+    defaults: {
+        url_base: '',
+        async: false,
+        async_ops: [],
+        categorical_filters: [],
+        filters: {},
+        sort_key: null,
+        show_item_checkboxes: false,
+        advanced_search: false,
+        cur_page: 1,
+        num_pages: 1,
+        operation: undefined,
+        item_ids: undefined
+    },
+
+    /**
+     * Return true if operation can be done asynchronously.
+     */
+    can_async_op: function(op) {
+        return _.indexOf(this.attributes.async_ops, op) !== -1;
+    },
+
+    /**
+     * Add filtering criterion.
+     */
+    add_filter: function(key, value, append) {
+        // Update URL arg with new condition.            
+        if (append) {
+            // Update or append value.
+            var cur_val = this.attributes.filters[key],
+                new_val;
+            if (cur_val === null || cur_val === undefined) {
+                new_val = value;
+            } 
+            else if (typeof(cur_val) == 'string') {
+                if (cur_val == 'All') {
+                    new_val = value;
+                } else {
+                    // Replace string with array.
+                    var values = [];
+                    values[0] = cur_val;
+                    values[1] = value;
+                    new_val = values;   
+                }
+            } 
+            else {
+                // Current value is an array.
+                new_val = cur_val;
+                new_val.push(value);
+            }
+            this.attributes.filters[key] = new_val;
+        } 
+        else {
+            // Replace value.
+            this.attributes.filters[key] = value;
+        }
+    },
+
+    /**
+     * Remove filtering criterion.
+     */
+    remove_filter: function(key, condition) {
+        var cur_val = this.attributes.filters[key];
+        if (cur_val === null || cur_val === undefined) {
+            return false;            
+        }
+
+        if (typeof(cur_val) === 'string') {
+            // overwrite/remove condition.
+            this.attributes.filters[key] = '';
+        } else {
+            // filter contains an array of conditions.
+            var condition_index = _.indexOf(cur_val, condition);
+            if (condition_index !== -1) {
+                cur_val[condition_index] = '';
+            }
+        }
+    },
+
+    /**
+     * Returns URL data for obtaining a new grid.
+     */
+    get_url_data: function() {
+        var url_data = {
+            async: this.attributes.async,
+            sort: this.attributes.sort_key,
+            page: this.attributes.cur_page,
+            show_item_checkboxes: this.attributes.show_item_checkboxes,
+            advanced_search: this.attributes.advanced_search
+        };
+
+        // Add operation, item_ids only if they have values.
+        if (this.attributes.operation) {
+            url_data.operation = this.attributes.operation;
+        }
+        if (this.attributes.item_ids) {
+            url_data.id = this.attributes.item_ids;
+        }
+
+        // Add filter arguments to data, placing "f-" in front of all arguments.
+        var self = this;
+        _.each(_.pairs(self.attributes.filters), function(k) {
+            url_data['f-' + k[0]] = k[1];
+        });
+
+        return url_data;
+    },
+    
+    // Return URL for obtaining a new grid
+    get_url: function (args) {
+        return this.get('url_base') + "?" + $.param(this.get_url_data()) + '&' + $.param(args);
+    }
+    
+});
+
+});
diff --git a/client/galaxy/scripts/mvc/grid/grid-template.js b/client/galaxy/scripts/mvc/grid/grid-template.js
new file mode 100644
index 0000000..28b640d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/grid/grid-template.js
@@ -0,0 +1,618 @@
+// dependencies
+define(['utils/utils'], function(Utils) {
+
+// grid view templates
+return {
+    // template
+    grid: function(options) {
+        var tmpl = '';
+        if (options.embedded) {
+            tmpl = this.grid_header(options) + this.grid_table(options);
+        } else {
+            tmpl = '<div class="loading-elt-overlay"></div>' +
+                    '<table>' +
+                        '<tr>' +
+                            '<td width="75%">' +
+                                this.grid_header(options) +
+                            '</td>' +
+                            '<td></td>' +
+                            '<td></td>' +
+                        '</tr>' +
+                        '<tr>' +
+                            '<td width="100%" id="grid-message" valign="top"></td>' +
+                            '<td></td>' +
+                            '<td></td>' +
+                        '</tr>' +
+                    '</table>' +
+                    this.grid_table(options);
+        }
+       
+        // add info text
+        if (options.info_text) {
+            tmpl += '<br><div class="toolParamHelp" style="clear: both;">' + options.info_text + '</div>';
+        }
+       
+        // return
+        return tmpl;
+    },
+    
+    // template
+    grid_table: function(options) {
+        return  '<form method="post" onsubmit="return false;">' +
+                    '<table id="grid-table" class="grid">' +
+                        '<thead id="grid-table-header"></thead>' +
+                        '<tbody id="grid-table-body"></tbody>' +
+                        '<tfoot id="grid-table-footer"></tfoot>' +
+                    '</table>' +
+                '</form>';
+    },
+    
+    // template
+    grid_header: function(options) {
+        var tmpl =  '<div class="grid-header">';
+        if (!options.embedded) {
+            tmpl +=     '<h2>' + options.title + '</h2>';
+        }
+        if (options.global_actions) {
+            tmpl +=     '<ul class="manage-table-actions">';
+            var show_popup = (options.global_actions.length >= 3);
+            if (show_popup) {
+                tmpl +=     '<li><a class="action-button" id="popup-global-actions" class="menubutton">Actions</a></li>' +
+                            '<div popupmenu="popup-global-actions">';
+            }
+            for (i in options.global_actions) {
+                var action = options.global_actions[i];
+                var label_cls = '';
+                if (action.inbound) {
+                    label_cls = 'use-inbound'
+                } else {
+                    label_cls = 'use-outbound'
+                }
+                tmpl +=         '<li>' +
+                                    '<a class="action-button ' + label_cls + '" href="' + action.url_args + '" onclick="return false;">' + action.label + '</a>' +
+                                '</li>';
+            }
+            if (show_popup) {
+                tmpl +=     '</div>';
+            }
+            tmpl +=     '</ul>';
+        }
+        if (options.insert) {
+            tmpl +=     options.insert;
+        }
+       
+        // add grid filters
+        tmpl +=     this.grid_filters(options);
+        tmpl += '</div>'
+        
+        // return template
+        return tmpl;
+    },
+
+    // template
+    header: function(options) {
+        
+        // start
+        var tmpl =      '<tr>';
+        
+        // add checkbox
+        if (options.show_item_checkboxes) {
+            tmpl +=         '<th>';
+            if (options.items.length > 0) {
+                tmpl +=         '<input type="checkbox" id="check_all" name=select_all_checkbox value="true">' +
+                                '<input type="hidden" name=select_all_checkbox value="true">';
+            }
+            tmpl +=         '</th>';
+        }
+        
+        // create header elements
+        for (var i in options.columns) {
+            var column = options.columns[i];
+            if (column.visible) {
+                tmpl +=     '<th id="' + column.key + '-header">';
+                if (column.href) {
+                    tmpl +=     '<a href="' + column.href + '" class="sort-link" sort_key="' + column.key + '">' + column.label + '</a>';
+                } else {
+                    tmpl +=     column.label;
+                }
+                tmpl +=         '<span class="sort-arrow">' + column.extra + '</span>' +
+                            '</th>';
+            }
+        }
+        
+        // finalize
+        tmpl +=        '</tr>';
+        
+        // return template
+        return tmpl;
+    },
+    
+    // template
+    body: function(options) {
+        // initialize
+        var tmpl = '';
+        var num_rows_rendered = 0;
+        var items_length = options.items.length;
+        
+        // empty grid?
+        if (items_length == 0) {
+            // No results.
+            tmpl += '<tr><td colspan="100"><em>No Items</em></td></tr>';
+            num_rows_rendered = 1;
+        }
+        
+        // create rows
+        for (var i in options.items) {
+        
+            // encode ids
+            var item = options.items[i];
+            var encoded_id = item.encode_id;
+            var popupmenu_id = 'grid-' + i + '-popup';
+            
+            // Tag current
+            tmpl += '<tr ';
+            if (options.current_item_id == item.id) {
+                tmpl += 'class="current"';
+            }
+            tmpl += '>';
+            
+            // Item selection column
+            if (options.show_item_checkboxes) {
+                tmpl += '<td style="width: 1.5em;">' +
+                            '<input type="checkbox" name="id" value="' + encoded_id + '" id="' + encoded_id + '" class="grid-row-select-checkbox" />' +
+                        '</td>';
+            }
+            
+            // Data columns
+            for (j in options.columns) {
+                var column = options.columns[j];
+                if (column.visible) {
+                    // Nowrap
+                    var nowrap = '';
+                    if (column.nowrap) {
+                        nowrap = 'style="white-space:nowrap;"';
+                    }
+                    
+                    // get column settings
+                    var column_settings = item.column_config[column.label];
+                    
+                    // load attributes
+                    var link = column_settings.link;
+                    var value = column_settings.value;
+                    var inbound = column_settings.inbound;
+                        
+                    // unescape value
+                    if (jQuery.type( value ) === 'string') {
+                        value = value.replace(/\/\//g, '/');
+                    }
+                    
+                    // Attach popup menu?
+                    var id = '';
+                    var cls = '';
+                    if (column.attach_popup) {
+                        id = 'grid-' + i + '-popup';
+                        cls = 'menubutton';
+                        if (link != '') {
+                            cls += ' split';
+                        }
+                        cls += ' popup';
+                    }
+                    
+                    // Check for row wrapping
+                    tmpl += '<td ' + nowrap + '>';
+                
+                    // Link
+                    if (link) {
+                        if (options.operations.length != 0) {
+                            tmpl += '<div id="' + id + '" class="' + cls + '" style="float: left;">';
+                        }
+
+                        var label_class = '';
+                        if (inbound) {
+                            label_class = 'use-inbound';
+                        } else {
+                            label_class = 'use-outbound';
+                        }
+                        tmpl += '<a class="menubutton-label ' + label_class + '" href="' + link + '" onclick="return false;">' + value + '</a>';
+                        if (options.operations.length != 0) {
+                            tmpl += '</div>';
+                        }
+                    } else {
+                        tmpl += '<div id="' + id + '" class="' + cls + '"><label id="' + column.label_id_prefix + encoded_id + '" for="' + encoded_id + '">' + (value || '') + '</label></div>';
+                    }
+                    tmpl += '</td>';
+                }
+            }
+            tmpl += '</tr>';
+            num_rows_rendered++;
+        }
+        return tmpl;
+    },
+    
+    // template
+    footer: function(options) {
+    
+        // create template string
+        var tmpl = '';
+        
+        // paging
+        if (options.use_paging && options.num_pages > 1) {
+            // get configuration
+            var num_page_links      = options.num_page_links;
+            var cur_page_num        = options.cur_page_num;
+            var num_pages           = options.num_pages;
+            
+            // First pass on min page.
+            var page_link_range     = num_page_links / 2;
+            var min_page            = cur_page_num - page_link_range
+            var min_offset          = 0;
+            if (min_page <= 0) {
+                // Min page is too low.
+                min_page = 1;
+                min_offset = page_link_range - ( cur_page_num - min_page );
+            }
+            
+            // Set max page.
+            var max_range = page_link_range + min_offset;
+            var max_page = cur_page_num + max_range;
+            if (max_page <= num_pages) {
+                // Max page is fine.
+                max_offset = 0;
+            } else {
+                // Max page is too high.
+                max_page = num_pages;
+                // +1 to account for the +1 in the loop below.
+                max_offset = max_range - ( max_page + 1 - cur_page_num );
+            }
+            
+            // Second and final pass on min page to add any unused
+            // offset from max to min.
+            if (max_offset != 0) {
+                min_page -= max_offset
+                if (min_page < 1) {
+                    min_page = 1
+                }
+            }
+            
+            // template header
+            tmpl += '<tr id="page-links-row">';
+            if (options.show_item_checkboxes) {
+                tmpl += '<td></td>';
+            }
+            tmpl +=     '<td colspan="100">' +
+                            '<span id="page-link-container">' +
+                                'Page:';
+            
+            if (min_page > 1) {
+                tmpl +=         '<span class="page-link" id="page-link-1"><a href="javascript:void(0);" page_num="1" onclick="return false;">1</a></span> ...';
+            }
+            
+            // create page urls
+            for (var page_index = min_page; page_index < max_page + 1; page_index++) {
+                
+                if (page_index == options.cur_page_num) {
+                    tmpl +=     '<span class="page-link inactive-link" id="page-link-' + page_index + '">' + page_index + '</span>';
+                } else {
+                    tmpl +=     '<span class="page-link" id="page-link-' + page_index + '"><a href="javascript:void(0);" onclick="return false;" page_num="' + page_index + '">' + page_index + '</a></span>';
+                }
+            }
+            
+            // show last page
+            if (max_page < num_pages) {
+                    tmpl +=     '...' +
+                                '<span class="page-link" id="page-link-' + num_pages + '"><a href="javascript:void(0);" onclick="return false;" page_num="' + num_pages + '">' + num_pages + '</a></span>';
+            }
+            tmpl +=         '</span>';
+            
+            // Show all link
+            tmpl +=         '<span class="page-link" id="show-all-link-span"> | <a href="javascript:void(0);" onclick="return false;" page_num="all">Show All</a></span>' +
+                        '</td>' +
+                    '</tr>';
+        }
+        
+        // Grid operations for multiple items.
+        if (options.show_item_checkboxes) {
+            // start template
+            tmpl += '<tr>' +
+                        '<input type="hidden" id="operation" name="operation" value="">' +
+                        '<td></td>' +
+                        '<td colspan="100">' +
+                            'For <span class="grid-selected-count"></span> selected ' + options.get_class_plural + ': ';
+            
+            // configure buttons for operations
+            for (i in options.operations) {
+                var operation = options.operations[i];
+                if (operation.allow_multiple) {
+                    tmpl += '<input type="button" value="' + operation.label + '" class="operation-button action-button"> ';
+                }
+            }
+            
+            // finalize template
+            tmpl +=     '</td>' +
+                    '</tr>';
+        }
+    
+        // count global operations
+        var found_global = false;
+        for (i in options.operations) {
+            if (options.operations[i].global_operation) {
+                found_global = true;
+                break;
+            }
+        }
+    
+        // add global operations
+        if (found_global) {
+            tmpl += '<tr>' +
+                        '<td colspan="100">';
+            for (i in options.operations) {
+                var operation = options.operations[i];
+                if (operation.global_operation) {
+                    tmpl += '<a class="action-button" href="' + operation.global_operation + '">' + operation.label + '</a>';
+                }
+            }
+            tmpl +=     '</td>' +
+                    '</tr>';
+        }
+        
+        // add legend
+        if (options.legend) {
+            tmpl += '<tr>' +
+                        '<td colspan="100">' + options.legend + '</td>' +
+                    '</tr>';
+        }
+        
+        // return
+        return tmpl;
+    },
+        
+    // template
+    message: function(options) {
+        return  '<p>' +
+                    '<div class="' + options.status + 'message transient-message">' + options.message + '</div>' +
+                    '<div style="clear: both"></div>' +
+                '</p>';
+    },
+    
+    // template
+    grid_filters: function (options) {
+    
+        // get filters
+        var default_filter_dict = options.default_filter_dict;
+        var filters = options.filters;
+
+        // show advanced search if flag set or if there are filters for advanced search fields
+        var advanced_search_display = 'none';
+        if (options.advanced_search) {
+            advanced_search_display = 'block';
+        }
+        
+        // identify columns with advanced filtering
+        var show_advanced_search_link = false;
+        for (var i in options.columns) {
+            var column = options.columns[i];
+            if (column.filterable == 'advanced') {
+                var column_key = column.key;
+                var f_key = filters[column_key];
+                var d_key = default_filter_dict[column_key];
+                if (f_key && d_key && f_key != d_key) {
+                    advanced_search_display = 'block';
+                }
+                show_advanced_search_link = true;
+            }
+        }
+        
+        // hide standard search if advanced is shown
+        var standard_search_display = 'block';
+        if (advanced_search_display == 'block') {
+            standard_search_display = 'none';
+        }
+
+        //
+        // standard search
+        //
+        var tmpl =  '<div id="standard-search" style="display: ' + standard_search_display + ';">' +
+                        '<table>' +
+                            '<tr>' +
+                                '<td style="padding: 0;">' +
+                                    '<table>';
+        
+        // add standard filters
+        for (var i in options.columns) {
+            var column = options.columns[i];
+            if (column.filterable == 'standard') {
+                tmpl +=             this.grid_column_filter(options, column);
+            }
+        }
+        
+        // finalize standard search
+        tmpl +=                     '</table>' +
+                                '</td>' +
+                            '</tr>' +
+                            '<tr>' +
+                                '<td>';
+                
+        // show advanced search link in standard display
+        if (show_advanced_search_link) {
+            tmpl +=                 '<a href="" class="advanced-search-toggle">Advanced Search</a>';
+        }
+        
+        // finalize standard search display
+        tmpl +=                 '</td>' +
+                            '</tr>' +
+                        '</table>' +
+                    '</div>';
+    
+        //
+        // advanced search
+        //
+        tmpl +=     '<div id="advanced-search" style="display: ' + advanced_search_display + '; margin-top: 5px; border: 1px solid #ccc;">' +
+                        '<table>' +
+                            '<tr>' +
+                                '<td style="text-align: left" colspan="100">' +
+                                    '<a href="" class="advanced-search-toggle">Close Advanced Search</a>' +
+                                '</td>' +
+                            '</tr>';
+        
+        // add advanced filters
+        for (var i in options.columns) {
+            var column = options.columns[i];
+            if (column.filterable == 'advanced') {
+                tmpl +=             this.grid_column_filter(options, column);
+            }
+        }
+        
+        // finalize advanced search template
+        tmpl +=         '</table>' +
+                    '</div>';
+
+        // return template
+        return tmpl;
+    },
+    
+    // template
+    grid_column_filter: function(options, column) {
+        
+        // collect parameters
+        var default_filter_dict = options.default_filter_dict;
+        var filters = options.filters;
+        var column_label = column.label;
+        var column_key = column.key;
+        if (column.filterable == 'advanced') {
+            column_label = column_label.toLowerCase();
+        }
+        
+        // start
+        var tmpl =      '<tr>';
+        
+        if (column.filterable == 'advanced') {
+            tmpl +=         '<td align="left" style="padding-left: 10px">' + column_label + ':</td>';
+        }
+        tmpl +=             '<td style="padding-bottom: 1px;">';
+        if (column.is_text) {
+            tmpl +=             '<form class="text-filter-form" column_key="' + column_key + '" action="' + options.url + '" method="get" >';
+            // Carry forward filtering criteria with hidden inputs.
+            for (i in options.columns) {
+                var temp_column = options.columns[i];
+                var filter_value = filters[temp_column.key];
+                if (filter_value) {
+                    if (filter_value != 'All') {
+                        if (temp_column.is_text) {
+                            filter_value = JSON.stringify( filter_value )
+                        }
+                        tmpl +=     '<input type="hidden" id="' + temp_column.key + '" name="f-' + temp_column.key + '" value="' + filter_value + '"/>';
+                    }
+                }
+            }
+            // Print current filtering criteria and links to delete.
+            tmpl +=                 '<span id="' + column_key + '-filtering-criteria">';
+            
+            // add filters
+            var column_filter = filters[column_key];
+            if (column_filter) {
+                // identify type
+                var type = jQuery.type(column_filter);
+                
+                // single filter value
+                if (type == 'string') {
+                    if (column_filter != 'All') {
+                        // append template
+                        tmpl +=         this.filter_element(column_key, column_filter);
+                    }
+                }
+                
+                // multiple filter values
+                if (type == 'array') {
+                    for (var i in column_filter ) {
+                        // get filter
+                        var filter = column_filter[i];
+                        
+                        // copy filters and remove entry
+                        var params = column_filter;
+                        params = params.slice(i);
+
+                        // append template
+                        tmpl +=         this.filter_element(column_key, filter);
+                    }
+                }
+            }
+            
+            // close span
+            tmpl +=                 '</span>';
+            
+            // Set value, size of search input field. Minimum size is 20 characters.
+            var value = '';
+            if (column.filterable == 'standard') {
+                value = column.label.toLowerCase();
+                var size = value.length;
+                if (size < 20) {
+                    size = 20;
+                }
+                // +4 to account for space after placeholder
+                size = size + 4;
+            }
+            
+            // print input field for column
+            tmpl +=                 '<span class="search-box">' +
+                                        '<input class="search-box-input" id="input-' + column_key + '-filter" name="f-' + column_key + '" type="text" placeholder="' + value + '" size="' + size + '"/>' +
+                                        '<button type="submit" style="background: transparent; border: none; padding: 4px; margin: 0px;">' +
+                                            '<i class="fa fa-search"></i>' +
+                                        '</button>' +
+                                    '</span>' +
+                                '</form>';
+        } else {
+            // filter criteria
+            tmpl +=             '<span id="' + column_key + '-filtering-criteria">';
+            
+            // add category filters
+            var seperator = false;
+            for (cf_label in options.categorical_filters[column_key]) {
+                // get category filter
+                var cf = options.categorical_filters[column_key][cf_label];
+                
+                // each filter will have only a single argument, so get that single argument
+                var cf_key = '';
+                var cf_arg = '';
+                for (key in cf) {
+                    cf_key = key;
+                    cf_arg = cf[key];
+                }
+                
+                // add seperator
+                if (seperator) {
+                    tmpl += ' | ';
+                }
+                seperator = true;
+                
+                // add category
+                var filter = filters[column_key];
+                if (filter && cf[column_key] && filter == cf_arg) {
+                    tmpl +=         '<span class="categorical-filter ' + column_key + '-filter current-filter">' + cf_label + '</span>';
+                } else {
+                    tmpl +=         '<span class="categorical-filter ' + column_key + '-filter">' +
+                                        '<a href="javascript:void(0);" filter_key="' + cf_key + '" filter_val="' + cf_arg + '">' + cf_label + '</a>' +
+                                    '</span>';
+                }
+            }
+            tmpl +=             '</span>';
+        }
+        tmpl +=             '</td>' +
+                        '</tr>';
+        
+        // return template
+        return tmpl;
+    },
+    
+    // template for filter items
+    filter_element: function(filter_key, filter_value) {
+        filter_value = Utils.sanitize(filter_value);
+        return  '<span class="text-filter-val">' + filter_value +
+                    '<a href="javascript:void(0);" filter_key="' + filter_key + '" filter_val="' + filter_value + '">' +
+                        '<i class="fa fa-times" style="padding-left: 5px; padding-bottom: 6px;"/>' +
+                    '</a>' +
+                '</span>';
+
+    }
+};
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/grid/grid-view.js b/client/galaxy/scripts/mvc/grid/grid-view.js
new file mode 100644
index 0000000..89476dd
--- /dev/null
+++ b/client/galaxy/scripts/mvc/grid/grid-view.js
@@ -0,0 +1,675 @@
+// This is necessary so that, when nested arrays are used in ajax/post/get methods, square brackets ('[]') are
+// not appended to the identifier of a nested array.
+jQuery.ajaxSettings.traditional = true;
+
+// dependencies
+define([
+    'mvc/grid/grid-model',
+    'mvc/grid/grid-template',
+    "mvc/ui/popup-menu"
+], function(GridModel, Templates, PopupMenu) {
+
+// grid view
+return Backbone.View.extend({
+
+    // model
+    grid: null,
+
+    // Initialize
+    initialize: function(grid_config)
+    {
+        // set element
+        this.setElement('#grid-container');
+
+        // fix padding
+        if (grid_config.use_panels) {
+            $('#center').css ({
+                padding     : '10px',
+                overflow    : 'auto'
+            });
+        }
+
+        // initialize controls
+        this.init_grid(grid_config);
+    },
+
+    // refresh frames
+    handle_refresh: function (refresh_frames) {
+        if (refresh_frames) {
+            if ($.inArray('history', refresh_frames) > -1) {
+                if( top.Galaxy && top.Galaxy.currHistoryPanel ){
+                    top.Galaxy.currHistoryPanel.loadCurrentHistory();
+                }
+            }
+        }
+    },
+
+    // Initialize
+    init_grid: function(grid_config)
+    {
+        // link grid model
+        this.grid = new GridModel(grid_config);
+
+        // get options
+        var options = this.grid.attributes;
+
+        // handle refresh requests
+        this.handle_refresh(options.refresh_frames);
+
+        // strip protocol and domain
+        var url = this.grid.get('url_base');
+        url = url.replace(/^.*\/\/[^\/]+/, '');
+        this.grid.set('url_base', url);
+
+        // append main template
+        this.$el.html(Templates.grid(options));
+
+        // update div contents
+        this.$el.find('#grid-table-header').html(Templates.header(options));
+        this.$el.find('#grid-table-body').html(Templates.body(options));
+        this.$el.find('#grid-table-footer').html(Templates.footer(options));
+
+        // update message
+        if (options.message) {
+            this.$el.find('#grid-message').html(Templates.message(options));
+            var self = this;
+            if (options.use_hide_message) {
+                setTimeout( function() { self.$el.find('#grid-message').html(''); }, 5000);
+            }
+        }
+
+        // configure elements
+        this.init_grid_elements();
+        this.init_grid_controls();
+
+        // attach global event handler
+        // TODO: redundant (the onload/standard page handlers do this) - but needed because these are constructed after page ready
+        init_refresh_on_change();
+    },
+
+    // Initialize grid controls
+    init_grid_controls: function() {
+
+        // link
+        var self = this;
+
+        // Initialize grid operation button.
+        this.$el.find('.operation-button').each(function() {
+            $(this).off();
+            $(this).click(function() {
+                self.submit_operation(this);
+                return false;
+            });
+        });
+
+        // Initialize text filters to select text on click and use normal font when user is typing.
+        this.$el.find('input[type=text]').each(function() {
+            $(this).off();
+            $(this).click(function() { $(this).select(); } )
+                   .keyup(function () { $(this).css('font-style', 'normal'); });
+        });
+
+        // Initialize sort links.
+        this.$el.find('.sort-link').each( function() {
+            $(this).off();
+            $(this).click( function() {
+               self.set_sort_condition( $(this).attr('sort_key') );
+               return false;
+            });
+        });
+
+        // Initialize text filters.
+        this.$el.find('.text-filter-form').each( function() {
+            $(this).off();
+            $(this).submit( function() {
+                var column_key = $(this).attr('column_key');
+                var text_input_obj = $('#input-' + column_key + '-filter');
+                var text_input = text_input_obj.val();
+                text_input_obj.val('');
+                self.add_filter_condition(column_key, text_input);
+                return false;
+            });
+        });
+
+        // Initialize categorical filters.
+        this.$el.find('.text-filter-val > a').each( function() {
+            $(this).off();
+            $(this).click( function() {
+                // Remove visible element.
+                $(this).parent().remove();
+
+                // Remove filter condition.
+                self.remove_filter_condition ($(this).attr('filter_key'), $(this).attr('filter_val'));
+
+                // Return
+                return false;
+            });
+        });
+
+        // Initialize categorical filters.
+        this.$el.find('.categorical-filter > a').each( function() {
+            $(this).off();
+            $(this).click( function() {
+                self.set_categorical_filter( $(this).attr('filter_key'), $(this).attr('filter_val') );
+                return false;
+            });
+        });
+
+        // Initialize autocomplete for text inputs in search UI.
+        var t1 = this.$el.find('#input-tags-filter');
+        if (t1.length) {
+            t1.autocomplete(this.grid.history_tag_autocomplete_url,
+                { selectFirst: false, autoFill: false, highlight: false, mustMatch: false });
+        }
+        var t2 = this.$el.find('#input-name-filter');
+        if (t2.length) {
+            t2.autocomplete(this.grid.history_name_autocomplete_url,
+                { selectFirst: false, autoFill: false, highlight: false, mustMatch: false });
+        }
+
+        // Initialize standard, advanced search toggles.
+        this.$el.find('.advanced-search-toggle').each( function() {
+            $(this).off();
+            $(this).click( function() {
+                self.$el.find('#standard-search').slideToggle('fast');
+                self.$el.find('#advanced-search').slideToggle('fast');
+                return false;
+            });
+        });
+
+        // Add event to check all box
+        this.$el.find('#check_all').off();
+        this.$el.find('#check_all').on('click', function() {
+            self.check_all_items();
+        });
+    },
+
+    // Initialize grid elements.
+    init_grid_elements : function() {
+        // Initialize grid selection checkboxes.
+        this.$el.find('.grid').each( function() {
+            var checkboxes = $(this).find("input.grid-row-select-checkbox");
+            var check_count = $(this).find("span.grid-selected-count");
+            var update_checked = function() {
+                check_count.text( $(checkboxes).filter(":checked").length );
+            };
+
+            $(checkboxes).each( function() {
+                $(this).change(update_checked);
+            });
+            update_checked();
+        });
+
+        // Initialize ratings.
+        if (this.$el.find('.community_rating_star').length !== 0)
+            this.$el.find('.community_rating_star').rating({});
+
+        // get options
+        var options = this.grid.attributes;
+        var self = this;
+
+        //
+        // add page click events
+        //
+        this.$el.find('.page-link > a').each( function() {
+            $(this).click( function() {
+               self.set_page( $(this).attr('page_num') );
+               return false;
+            });
+        });
+
+        //
+        // add inbound/outbound events
+        //
+        this.$el.find('.use-inbound').each( function() {
+            $(this).click( function(e) {
+                self.execute({
+                    href : $(this).attr('href'),
+                    inbound : true
+                });
+                return false;
+
+            });
+        });
+
+        this.$el.find('.use-outbound').each( function() {
+            $(this).click( function(e) {
+                self.execute({
+                    href : $(this).attr('href')
+                });
+                return false;
+            });
+        });
+
+        // empty grid?
+        var items_length = options.items.length;
+        if (items_length == 0) {
+            return;
+        }
+
+        //
+        // add operation popup menus
+        //
+        for (var i in options.items)
+        {
+            // get items
+            var item = options.items[i];
+
+            // get identifiers
+            var button = this.$el.find('#grid-' + i + '-popup');
+            button.off();
+            var popup = new PopupMenu(button);
+
+            // load details
+            for (var j in options['operations'])
+            {
+                // get operation details
+                var operation = options['operations'][j];
+                var operation_id = operation['label'];
+                var operation_settings = item['operation_config'][operation_id];
+                var encode_id = item['encode_id'];
+
+                // check
+                if (operation_settings['allowed'] && operation['allow_popup'])
+                {
+                    // popup configuration
+                    var popupConfig =
+                    {
+                        html : operation['label'],
+                        href : operation_settings['url_args'],
+                        target : operation_settings['target'],
+                        confirmation_text : operation['confirm'],
+                        inbound : operation['inbound']
+                    };
+
+                    // add popup function
+                    popupConfig.func = function(e)
+                    {
+                        e.preventDefault();
+                        var label = $(e.target).html();
+                        var options = this.findItemByHtml(label);
+                        self.execute(options);
+                    };
+
+                    // add item
+                    popup.addItem(popupConfig);
+                }
+            }
+        }
+    },
+
+    // Add a condition to the grid filter; this adds the condition and refreshes the grid.
+    add_filter_condition: function (name, value) {
+        // Do nothing is value is empty.
+        if (value === "") {
+            return false;
+        }
+
+        // Add condition to grid.
+        this.grid.add_filter(name, value, true);
+
+        // Add button that displays filter and provides a button to delete it.
+        var t = $(Templates.filter_element(name, value));
+        var self = this;
+        t.click(function() {
+            // Remove visible element.
+            $(this).remove();
+
+            // Remove filter condition.
+            self.remove_filter_condition(name, value);
+        });
+
+        // append to container
+        var container = this.$el.find('#' + name + '-filtering-criteria');
+        container.append(t);
+
+        // execute
+        this.go_page_one();
+        this.execute();
+    },
+
+    // Remove a condition to the grid filter; this adds the condition and refreshes the grid.
+    remove_filter_condition: function (name, value) {
+        // Remove filter condition.
+        this.grid.remove_filter(name, value);
+
+        // Execute
+        this.go_page_one();
+        this.execute();
+    },
+
+    // Set sort condition for grid.
+    set_sort_condition: function (col_key) {
+        // Set new sort condition. New sort is col_key if sorting new column; if reversing sort on
+        // currently sorted column, sort is reversed.
+        var cur_sort = this.grid.get('sort_key');
+        var new_sort = col_key;
+        if (cur_sort.indexOf(col_key) !== -1) {
+            // Reverse sort.
+            if (cur_sort.substring(0, 1) !== '-') {
+                new_sort = '-' + col_key;
+            }
+        }
+
+        // Remove sort arrows elements.
+        this.$el.find('.sort-arrow').remove();
+
+        // Add sort arrow element to new sort column.
+        var sort_arrow = (new_sort.substring(0,1) == '-') ? '↑' : '↓';
+        var t = $('<span>' + sort_arrow + '</span>').addClass('sort-arrow');
+
+        // Add to header
+        this.$el.find('#' + col_key + '-header').append(t);
+
+        // Update grid.
+        this.grid.set('sort_key', new_sort);
+        this.go_page_one();
+        this.execute();
+    },
+
+    // Set new value for categorical filter.
+    set_categorical_filter: function (name, new_value) {
+        // Update filter hyperlinks to reflect new filter value.
+        var category_filter = this.grid.get('categorical_filters')[name],
+            cur_value = this.grid.get('filters')[name];
+        var self = this;
+        this.$el.find('.' + name + '-filter').each( function() {
+            var text = $.trim( $(this).text() );
+            var filter = category_filter[text];
+            var filter_value = filter[name];
+            if (filter_value == new_value) {
+                // Remove filter link since grid will be using this filter. It is assumed that
+                // this element has a single child, a hyperlink/anchor with text.
+                $(this).empty();
+                $(this).addClass('current-filter');
+                $(this).append(text);
+            } else if (filter_value == cur_value) {
+                // Add hyperlink for this filter since grid will no longer be using this filter. It is assumed that
+                // this element has a single child, a hyperlink/anchor.
+                $(this).empty();
+                var t = $('<a href="#">' + text + '</a>');
+                t.click(function() {
+                    self.set_categorical_filter( name, filter_value );
+                });
+                $(this).removeClass('current-filter');
+                $(this).append(t);
+            }
+        });
+
+        // Update grid.
+        this.grid.add_filter(name, new_value);
+        this.go_page_one();
+        this.execute();
+    },
+
+    // Set page to view.
+    set_page: function (new_page) {
+        // Update page hyperlink to reflect new page.
+        var self = this;
+        this.$el.find('.page-link').each( function() {
+            var id = $(this).attr('id'),
+                page_num = parseInt( id.split('-')[2], 10 ), // Id has form 'page-link-<page_num>
+                cur_page = self.grid.get('cur_page'),
+                text;
+            if (page_num === new_page) {
+                // Remove link to page since grid will be on this page. It is assumed that
+                // this element has a single child, a hyperlink/anchor with text.
+                text = $(this).children().text();
+                $(this).empty();
+                $(this).addClass('inactive-link');
+                $(this).text(text);
+            }
+            else if (page_num === cur_page) {
+                // Add hyperlink to this page since grid will no longer be on this page. It is assumed that
+                // this element has a single child, a hyperlink/anchor.
+                text = $(this).text();
+                $(this).empty();
+                $(this).removeClass('inactive-link');
+                var t = $('<a href="#">' + text + '</a>');
+                t.click(function() {
+                    self.set_page(page_num);
+                });
+                $(this).append(t);
+            }
+        });
+
+        if (new_page === 'all') {
+            this.grid.set('cur_page', new_page);
+        } else {
+            this.grid.set('cur_page', parseInt(new_page, 10));
+        }
+        this.execute();
+    },
+
+    // confirmation/submission of operation request
+    submit_operation: function (operation_button, confirmation_text)
+    {
+        // identify operation
+        var operation_name = $(operation_button).val();
+
+        // verify in any item is selected
+        var number_of_checked_ids = this.$el.find('input[name="id"]:checked').length;
+        if (!number_of_checked_ids > 0) {
+            return false;
+        }
+
+        // Check to see if there's grid confirmation text for this operation
+        var operation = _.findWhere(this.grid.attributes.operations, {label: operation_name});
+        if (operation && !confirmation_text){
+            confirmation_text = operation.confirm || '';
+        }
+
+        // collect ids
+        var item_ids = [];
+        this.$el.find('input[name=id]:checked').each(function() {
+            item_ids.push( $(this).val() );
+        });
+
+        // execute operation
+        this.execute({
+            operation: operation_name,
+            id: item_ids,
+            confirmation_text: confirmation_text
+        });
+
+        // return
+        return true;
+    },
+
+    check_all_items: function () {
+        var chk_all = document.getElementById('check_all'),
+            checks = document.getElementsByTagName('input'),
+            total = 0,
+            i;
+        if ( chk_all.checked === true ) {
+            for ( i=0; i < checks.length; i++ ) {
+                if ( checks[i].name.indexOf( 'id' ) !== -1) {
+                   checks[i].checked = true;
+                   total++;
+                }
+            }
+        }
+        else {
+            for ( i=0; i < checks.length; i++ ) {
+                if ( checks[i].name.indexOf( 'id' ) !== -1) {
+                   checks[i].checked = false;
+                }
+
+            }
+        }
+        this.init_grid_elements();
+    },
+
+    // Go back to page one; this is useful when a filter is applied.
+    go_page_one: function () {
+        // Need to go back to page 1 if not showing all.
+        var cur_page = this.grid.get('cur_page');
+        if (cur_page !== null && cur_page !== undefined && cur_page !== 'all') {
+            this.grid.set('cur_page', 1);
+        }
+    },
+
+    //
+    // execute operations and hyperlink requests
+    //
+    execute: function (options) {
+        // get url
+        var id = null;
+        var href = null;
+        var operation = null;
+        var confirmation_text = null;
+        var inbound = null;
+
+        // check for options
+        if (options)
+        {
+            // get options
+            href = options.href;
+            operation = options.operation;
+            id = options.id;
+            confirmation_text = options.confirmation_text;
+            inbound = options.inbound;
+
+            // check if input contains the operation tag
+            if (href !== undefined && href.indexOf('operation=') != -1) {
+                // Get operation, id in hyperlink's href.
+                var href_parts = href.split("?");
+                if (href_parts.length > 1) {
+                    var href_parms_str = href_parts[1];
+                    var href_parms = href_parms_str.split("&");
+                    for (var index = 0; index < href_parms.length; index++) {
+                        if (href_parms[index].indexOf('operation') != -1) {
+                            // Found operation parm; get operation value.
+                            operation = href_parms[index].split('=')[1];
+                            operation = operation.replace (/\+/g, ' ');
+                        } else if (href_parms[index].indexOf('id') != -1) {
+                            // Found id parm; get id value.
+                            id = href_parms[index].split('=')[1];
+                        }
+                    }
+                }
+            }
+        }
+
+        // check for operation details
+        if (operation && id) {
+            // show confirmation box
+            if (confirmation_text && confirmation_text != '' && confirmation_text != 'None' && confirmation_text != 'null')
+                if(!confirm(confirmation_text))
+                    return false;
+
+            // use small characters for operation?!
+            operation = operation.toLowerCase();
+
+            // Update grid.
+            this.grid.set({
+                operation: operation,
+                item_ids: id
+            });
+
+            // Do operation. If operation cannot be performed asynchronously, redirect to location.
+            if (this.grid.can_async_op(operation)) {
+                this.update_grid();
+            } else {
+                this.go_to(inbound, href);
+            }
+
+            // done
+            return false;
+        }
+
+        // refresh grid
+        if (href) {
+            this.go_to(inbound, href);
+            return false;
+        }
+
+        // refresh grid
+        if (this.grid.get('async')) {
+            this.update_grid();
+        } else {
+            this.go_to(inbound, href);
+        }
+
+        // done
+        return false;
+    },
+
+    // go to url
+    go_to: function (inbound, href) {
+        // get aysnc status
+        var async = this.grid.get('async');
+        this.grid.set('async', false);
+
+        // get slide status
+        advanced_search = this.$el.find('#advanced-search').is(':visible');
+        this.grid.set('advanced_search', advanced_search);
+
+        // get default url
+        if(!href) {
+            href = this.grid.get('url_base') + '?' + $.param(this.grid.get_url_data());
+        }
+
+        // clear grid of transient request attributes.
+        this.grid.set({
+            operation: undefined,
+            item_ids: undefined,
+            async: async
+        });
+
+        if (inbound) {
+            // this currently assumes that there is only a single grid shown at a time
+            var $div = $('.grid-header').closest('.inbound');
+            if ($div.length !== 0) {
+                $div.load(href);
+                return;
+            }
+        }
+
+        window.location = href;
+    },
+
+    // Update grid.
+    update_grid: function () {
+        // If there's an operation, do POST; otherwise, do GET.
+        var method = (this.grid.get('operation') ? 'POST' : 'GET' );
+
+        // Show overlay to indicate loading and prevent user actions.
+        this.$el.find('.loading-elt-overlay').show();
+        var self = this;
+        $.ajax({
+            type: method,
+            url: self.grid.get('url_base'),
+            data: self.grid.get_url_data(),
+            error: function(response) { alert( 'Grid refresh failed' );},
+            success: function(response_text) {
+
+                // backup
+                var embedded = self.grid.get('embedded');
+                var insert = self.grid.get('insert');
+
+                // request new configuration
+                var json = $.parseJSON(response_text);
+
+                // update
+                json.embedded = embedded;
+                json.insert = insert;
+
+                // Initialize new grid config
+                self.init_grid(json);
+
+                // Hide loading overlay.
+                self.$el.find('.loading-elt-overlay').hide();
+            },
+            complete: function() {
+                // Clear grid of transient request attributes.
+                self.grid.set({
+                    operation: undefined,
+                    item_ids: undefined
+                });
+            }
+        });
+    }
+});
+
+});
diff --git a/client/galaxy/scripts/mvc/history/copy-dialog.js b/client/galaxy/scripts/mvc/history/copy-dialog.js
new file mode 100644
index 0000000..e16352b
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/copy-dialog.js
@@ -0,0 +1,202 @@
+define([
+    "mvc/ui/ui-modal",
+    "mvc/ui/error-modal",
+    "utils/localization"
+], function( MODAL, ERROR_MODAL, _l ){
+
+'use strict';
+
+//==============================================================================
+/**
+ * A dialog/modal that allows copying a user history or 'importing' from user
+ * another. Generally called via historyCopyDialog below.
+ * @type {Object}
+ */
+var CopyDialog = {
+
+    // language related strings/fns
+    defaultName     : _.template( "Copy of '<%- name %>'" ),
+    title           : _.template( _l( 'Copying history' ) + ' "<%- name %>"' ),
+    submitLabel     : _l( 'Copy' ),
+    errorMessage    : _l( 'History could not be copied.' ),
+    progressive     : _l( 'Copying history' ),
+    activeLabel     : _l( 'Copy only the active, non-deleted datasets' ),
+    allLabel        : _l( 'Copy all datasets including deleted ones' ),
+    anonWarning     : _l( 'As an anonymous user, unless you login or register, you will lose your current history ' ) +
+                      _l( 'after copying this history. ' ),
+
+    // template for modal body
+    _template : _.template([
+        //TODO: remove inline styles
+        // show a warning message for losing current to anon users
+        '<% if( isAnon ){ %>',
+            '<div class="warningmessage">',
+                '<%- anonWarning %>',
+                _l( 'You can' ),
+                ' <a href="/user/login">', _l( 'login here' ), '</a> ', _l( 'or' ), ' ',
+                ' <a href="/user/create">', _l( 'register here' ), '</a>.',
+            '</div>',
+        '<% } %>',
+        '<form>',
+            '<label for="copy-modal-title">',
+                _l( 'Enter a title for the new history' ), ':',
+            '</label><br />',
+            // TODO: could use required here and the form validators
+            // NOTE: use unescaped here if escaped in the modal function below
+            '<input id="copy-modal-title" class="form-control" style="width: 100%" value="<%= name %>" />',
+            '<p class="invalid-title bg-danger" style="color: red; margin: 8px 0px 8px 0px; display: none">',
+                _l( 'Please enter a valid history title' ),
+            '</p>',
+            // if allowAll, add the option to copy deleted datasets, too
+            '<% if( allowAll ){ %>',
+                '<br />',
+                '<p>', _l( 'Choose which datasets from the original history to include:' ), '</p>',
+                // copy non-deleted is the default
+                '<input name="copy-what" type="radio" id="copy-non-deleted" value="copy-non-deleted" ',
+                    '<% if( copyWhat === "copy-non-deleted" ){ print( "checked" ); } %>/>',
+                '<label for="copy-non-deleted"> <%- activeLabel %></label>',
+                '<br />',
+                '<input name="copy-what" type="radio" id="copy-all" value="copy-all" ',
+                    '<% if( copyWhat === "copy-all" ){ print( "checked" ); } %>/>',
+                '<label for="copy-all"> <%- allLabel %></label>',
+            '<% } %>',
+        '</form>'
+    ].join( '' )),
+
+    // empty modal body and let the user know the copy is happening
+    _showAjaxIndicator : function _showAjaxIndicator(){
+        var indicator = '<p><span class="fa fa-spinner fa-spin"></span> ' + this.progressive + '...</p>';
+        this.modal.$( '.modal-body' ).empty().append( indicator ).css({ 'margin-top': '8px' });
+    },
+
+    // (sorta) public interface - display the modal, render the form, and potentially copy the history
+    // returns a jQuery.Deferred done->history copied, fail->user cancelled
+    dialog : function _dialog( modal, history, options ){
+        options = options || {};
+
+        var dialog = this,
+            deferred = jQuery.Deferred(),
+            // TODO: getting a little byzantine here
+            defaultCopyNameFn = options.nameFn || this.defaultName,
+            defaultCopyName = defaultCopyNameFn({ name: history.get( 'name' ) }),
+            // TODO: these two might be simpler as one 3 state option (all,active,no-choice)
+            defaultCopyWhat = options.allDatasets? 'copy-all' : 'copy-non-deleted',
+            allowAll = !_.isUndefined( options.allowAll )? options.allowAll : true,
+            autoClose = !_.isUndefined( options.autoClose )? options.autoClose : true;
+
+        this.modal = modal;
+
+
+        // validate the name and copy if good
+        function checkNameAndCopy(){
+            var name = modal.$( '#copy-modal-title' ).val();
+            if( !name ){
+                modal.$( '.invalid-title' ).show();
+                return;
+            }
+            // get further settings, shut down and indicate the ajax call, then hide and resolve/reject
+            var copyAllDatasets = modal.$( 'input[name="copy-what"]:checked' ).val() === 'copy-all';
+            modal.$( 'button' ).prop( 'disabled', true );
+            dialog._showAjaxIndicator();
+            history.copy( true, name, copyAllDatasets )
+                .done( function( response ){
+                    deferred.resolve( response );
+                })
+                .fail( function( xhr, status, message ){
+                    var options = { name: name, copyAllDatasets: copyAllDatasets };
+                    ERROR_MODAL.ajaxErrorModal( history, xhr, options, dialog.errorMessage );
+                    deferred.rejectWith( deferred, arguments );
+                })
+                .done( function(){
+                    if( autoClose ){ modal.hide(); }
+                });
+        }
+
+        var originalClosingCallback = options.closing_callback;
+        modal.show( _.extend( options, {
+            title   : this.title({ name: history.get( 'name' ) }),
+            body    : $( dialog._template({
+                    name        : defaultCopyName,
+                    isAnon      : Galaxy.user.isAnonymous(),
+                    allowAll    : allowAll,
+                    copyWhat    : defaultCopyWhat,
+                    activeLabel : this.activeLabel,
+                    allLabel    : this.allLabel,
+                    anonWarning : this.anonWarning,
+                })),
+            buttons : _.object([
+                    [ _l( 'Cancel' ),   function(){ modal.hide(); } ],
+                    [ this.submitLabel, checkNameAndCopy ]
+                ]),
+            height          : 'auto',
+            closing_events  : true,
+            closing_callback: function _historyCopyClose( cancelled ){
+                    if( cancelled ){
+                        deferred.reject({ cancelled : true });
+                    }
+                    if( originalClosingCallback ){
+                        originalClosingCallback( cancelled );
+                    }
+                }
+            }));
+
+        // set the default dataset copy, autofocus the title, and set up for a simple return
+        modal.$( '#copy-modal-title' ).focus().select();
+        modal.$( '#copy-modal-title' ).on( 'keydown', function( ev ){
+            if( ev.keyCode === 13 ){
+                ev.preventDefault();
+                checkNameAndCopy();
+            }
+        });
+
+        return deferred;
+    },
+};
+
+//==============================================================================
+// maintain the (slight) distinction between copy and import
+/**
+ * Subclass CopyDialog to use the import language.
+ */
+var ImportDialog = _.extend( {}, CopyDialog, {
+    defaultName     : _.template( "imported: <%- name %>" ),
+    title           : _.template( _l( 'Importing history' ) + ' "<%- name %>"' ),
+    submitLabel     : _l( 'Import' ),
+    errorMessage    : _l( 'History could not be imported.' ),
+    progressive     : _l( 'Importing history' ),
+    activeLabel     : _l( 'Import only the active, non-deleted datasets' ),
+    allLabel        : _l( 'Import all datasets including deleted ones' ),
+    anonWarning     : _l( 'As an anonymous user, unless you login or register, you will lose your current history ' ) +
+                      _l( 'after importing this history. ' ),
+
+});
+
+//==============================================================================
+/**
+ * Main interface for both history import and history copy dialogs.
+ * @param  {Backbone.Model} history     the history to copy
+ * @param  {Object}         options     a hash
+ * @return {jQuery.Deferred}            promise that fails on close and succeeds on copy
+ *
+ * options:
+ *     (this object is also passed to the modal used to display the dialog and accepts modal options)
+ *     {Function} nameFn    if defined, use this to build the default name shown to the user
+ *                          (the fn is passed: {name: <original history's name>})
+ *     {bool} useImport     if true, use the 'import' language (instead of Copy)
+ *     {bool} allowAll      if true, allow the user to choose between copying all datasets and
+ *                          only non-deleted datasets
+ *     {String} allDatasets default initial checked radio button: 'copy-all' or 'copy-non-deleted',
+ */
+var historyCopyDialog = function( history, options ){
+    options = options || {};
+    // create our own modal if Galaxy doesn't have one (mako tab without use_panels)
+    var modal = window.parent.Galaxy.modal || new MODAL.View({});
+    return options.useImport?
+        ImportDialog.dialog( modal, history, options ):
+        CopyDialog.dialog( modal, history, options );
+};
+
+
+//==============================================================================
+    return historyCopyDialog;
+});
diff --git a/client/galaxy/scripts/mvc/history/hda-li-edit.js b/client/galaxy/scripts/mvc/history/hda-li-edit.js
new file mode 100644
index 0000000..48d727e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/hda-li-edit.js
@@ -0,0 +1,79 @@
+define([
+    "mvc/dataset/dataset-li-edit",
+    "mvc/history/hda-li",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( DATASET_LI_EDIT, HDA_LI, BASE_MVC, _l ){
+
+'use strict';
+
+//==============================================================================
+var _super = DATASET_LI_EDIT.DatasetListItemEdit;
+/** @class Editing view for HistoryDatasetAssociation.
+ */
+var HDAListItemEdit = _super.extend(
+/** @lends HDAListItemEdit.prototype */{
+
+    className   : _super.prototype.className + " history-content",
+
+    /** In this override, only get details if in the ready state, get rerunnable if in other states.
+     *  Note: fetch with no 'change' event triggering to prevent automatic rendering.
+     */
+    _fetchModelDetails : function(){
+        var view = this;
+        if( view.model.inReadyState() && !view.model.hasDetails() ){
+            return view.model.fetch({ silent: true });
+
+        // special case the need for the rerunnable and creating_job attributes
+        // needed for rendering re-run button on queued, running datasets
+        } else if( !view.model.has( 'rerunnable' ) ){
+            return view.model.fetch({ silent: true, data: {
+                // only fetch rerunnable and creating_job to keep overhead down
+                keys: [ 'rerunnable', 'creating_job' ].join(',')
+            }});
+        }
+        return jQuery.when();
+    },
+
+    /** event map */
+    events : _.extend( _.clone( _super.prototype.events ), {
+        'click .unhide-link' : function( ev ){ this.model.unhide(); return false; }
+    }),
+
+    /** string rep */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'HDAListItemEdit(' + modelString + ')';
+    }
+});
+
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+HDAListItemEdit.prototype.templates = (function(){
+
+    var warnings = _.extend( {}, _super.prototype.templates.warnings, {
+        hidden : BASE_MVC.wrapTemplate([
+            '<% if( !dataset.visible ){ %>',
+                // add a link to unhide a dataset
+                '<div class="hidden-msg warningmessagesmall">',
+                    _l( 'This dataset has been hidden' ),
+                    '<br /><a class="unhide-link" a href="javascript:void(0);">', _l( 'Unhide it' ), '</a>',
+                '</div>',
+            '<% } %>'
+        ], 'dataset' )
+    });
+
+    return _.extend( {}, _super.prototype.templates, {
+        //NOTE: *steal* the HDAListItemView titleBar
+        titleBar : HDA_LI.HDAListItemView.prototype.templates.titleBar,
+        warnings : warnings
+    });
+}());
+
+
+//==============================================================================
+    return {
+        HDAListItemEdit  : HDAListItemEdit
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/hda-li.js b/client/galaxy/scripts/mvc/history/hda-li.js
new file mode 100644
index 0000000..398a1ee
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/hda-li.js
@@ -0,0 +1,72 @@
+define([
+    "mvc/dataset/dataset-li",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( DATASET_LI, BASE_MVC, _l ){
+
+'use strict';
+
+//==============================================================================
+var _super = DATASET_LI.DatasetListItemView;
+/** @class Read only view for HistoryDatasetAssociation.
+ *      Since there are no controls on the HDAView to hide the dataset,
+ *      the primary thing this class does (currently) is override templates
+ *      to render the HID.
+ */
+var HDAListItemView = _super.extend(
+/** @lends HDAListItemView.prototype */{
+
+    className   : _super.prototype.className + " history-content",
+
+    initialize : function( attributes, options ){
+        _super.prototype.initialize.call( this, attributes, options );
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'HDAListItemView(' + modelString + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+HDAListItemView.prototype.templates = (function(){
+
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        // adding the hid display to the title
+        '<div class="title-bar clear" tabindex="0">',
+            '<span class="state-icon"></span>',
+            '<div class="title">',
+                //TODO: remove whitespace and use margin-right
+                '<span class="hid"><%- dataset.hid %></span> ',
+                '<span class="name"><%- dataset.name %></span>',
+            '</div>',
+        '</div>'
+    ], 'dataset' );
+
+    var warnings = _.extend( {}, _super.prototype.templates.warnings, {
+        hidden : BASE_MVC.wrapTemplate([
+            // add a warning when hidden
+            '<% if( !dataset.visible ){ %>',
+                '<div class="hidden-msg warningmessagesmall">',
+                    _l( 'This dataset has been hidden' ),
+                '</div>',
+            '<% } %>'
+        ], 'dataset' )
+    });
+
+    return _.extend( {}, _super.prototype.templates, {
+        titleBar : titleBarTemplate,
+        warnings : warnings
+    });
+}());
+
+
+
+//==============================================================================
+    return {
+        HDAListItemView  : HDAListItemView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/hda-model.js b/client/galaxy/scripts/mvc/history/hda-model.js
new file mode 100644
index 0000000..294f68d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/hda-model.js
@@ -0,0 +1,28 @@
+define([
+    "mvc/dataset/dataset-model",
+    "mvc/history/history-content-model",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( DATASET, HISTORY_CONTENT, BASE_MVC, _l ){
+'use strict';
+
+//==============================================================================
+var _super = DATASET.DatasetAssociation,
+    hcontentMixin = HISTORY_CONTENT.HistoryContentMixin;
+/** @class (HDA) model for a Galaxy dataset contained in and related to a history.
+ */
+var HistoryDatasetAssociation = _super.extend( BASE_MVC.mixin( hcontentMixin,
+/** @lends HistoryDatasetAssociation.prototype */{
+
+    /** default attributes for a model */
+    defaults : _.extend( {}, _super.prototype.defaults, hcontentMixin.defaults, {
+        history_content_type: 'dataset',
+        model_class         : 'HistoryDatasetAssociation'
+    }),
+}));
+
+//==============================================================================
+    return {
+        HistoryDatasetAssociation   : HistoryDatasetAssociation
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/hdca-li-edit.js b/client/galaxy/scripts/mvc/history/hdca-li-edit.js
new file mode 100644
index 0000000..0960b94
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/hdca-li-edit.js
@@ -0,0 +1,75 @@
+define([
+    "mvc/history/hdca-li",
+    "mvc/collection/collection-view-edit",
+    "ui/fa-icon-button",
+    "utils/localization"
+], function( HDCA_LI, DC_VIEW_EDIT, faIconButton, _l ){
+
+'use strict';
+
+//==============================================================================
+var _super = HDCA_LI.HDCAListItemView;
+/** @class Editing view for HistoryDatasetCollectionAssociation.
+ */
+var HDCAListItemEdit = _super.extend(
+/** @lends HDCAListItemEdit.prototype */{
+
+    /** logger used to record this.log messages, commonly set to console */
+    //logger              : console,
+
+    /** Override to return editable versions of the collection panels */
+    _getFoldoutPanelClass : function(){
+        switch( this.model.get( 'collection_type' ) ){
+            case 'list':
+                return DC_VIEW_EDIT.ListCollectionViewEdit;
+            case 'paired':
+                return DC_VIEW_EDIT.PairCollectionViewEdit;
+            case 'list:paired':
+                return DC_VIEW_EDIT.ListOfPairsCollectionViewEdit;
+            case 'list:list':
+                return DC_VIEW_EDIT.ListOfListsCollectionViewEdit;
+        }
+        throw new TypeError( 'Uknown collection_type: ' + this.model.get( 'collection_type' ) );
+    },
+
+    // ......................................................................... delete
+    /** In this override, add the delete button. */
+    _renderPrimaryActions : function(){
+        this.log( this + '._renderPrimaryActions' );
+        // render the display, edit attr and delete icon-buttons
+        return _super.prototype._renderPrimaryActions.call( this )
+            .concat([
+                this._renderDeleteButton()
+            ]);
+    },
+
+    /** Render icon-button to delete this collection. */
+    _renderDeleteButton : function(){
+        var self = this,
+            deleted = this.model.get( 'deleted' );
+        return faIconButton({
+            title       : deleted? _l( 'Dataset collection is already deleted' ): _l( 'Delete' ),
+            classes     : 'delete-btn',
+            faIcon      : 'fa-times',
+            disabled    : deleted,
+            onclick     : function() {
+                // ...bler... tooltips being left behind in DOM (hover out never called on deletion)
+                self.$el.find( '.icon-btn.delete-btn' ).trigger( 'mouseout' );
+                self.model[ 'delete' ]();
+            }
+        });
+    },
+
+    // ......................................................................... misc
+    /** string rep */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'HDCAListItemEdit(' + modelString + ')';
+    }
+});
+
+//==============================================================================
+    return {
+        HDCAListItemEdit : HDCAListItemEdit
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/hdca-li.js b/client/galaxy/scripts/mvc/history/hdca-li.js
new file mode 100644
index 0000000..1e8133d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/hdca-li.js
@@ -0,0 +1,102 @@
+define([
+    "mvc/dataset/states",
+    "mvc/collection/collection-li",
+    "mvc/collection/collection-view",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( STATES, DC_LI, DC_VIEW, BASE_MVC, _l ){
+
+'use strict';
+
+//==============================================================================
+var _super = DC_LI.DCListItemView;
+/** @class Read only view for HistoryDatasetCollectionAssociation (a dataset collection inside a history).
+ */
+var HDCAListItemView = _super.extend(
+/** @lends HDCAListItemView.prototype */{
+
+    className   : _super.prototype.className + " history-content",
+
+    /** event listeners */
+    _setUpListeners : function(){
+        _super.prototype._setUpListeners.call( this );
+
+        this.listenTo( this.model, {
+            'change:populated change:visible' : function( model, options ){ this.render(); },
+        });
+    },
+
+    /** Override to provide the proper collections panels as the foldout */
+    _getFoldoutPanelClass : function(){
+        switch( this.model.get( 'collection_type' ) ){
+            case 'list':
+                return DC_VIEW.ListCollectionView;
+            case 'paired':
+                return DC_VIEW.PairCollectionView;
+            case 'list:paired':
+                return DC_VIEW.ListOfPairsCollectionView;
+            case 'list:list':
+                return DC_VIEW.ListOfListsCollectionView;
+        }
+        throw new TypeError( 'Uknown collection_type: ' + this.model.get( 'collection_type' ) );
+    },
+
+    /** In this override, add the state as a class for use with state-based CSS */
+    _swapNewRender : function( $newRender ){
+        _super.prototype._swapNewRender.call( this, $newRender );
+        //TODO: model currently has no state
+        var state = !this.model.get( 'populated' ) ? STATES.RUNNING : STATES.OK;
+        //if( this.model.has( 'state' ) ){
+        this.$el.addClass( 'state-' + state );
+        //}
+        return this.$el;
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'HDCAListItemView(' + modelString + ')';
+    }
+});
+
+/** underscore templates */
+HDCAListItemView.prototype.templates = (function(){
+
+    var warnings = _.extend( {}, _super.prototype.templates.warnings, {
+        hidden : BASE_MVC.wrapTemplate([
+            // add a warning when hidden
+            '<% if( !collection.visible ){ %>',
+                '<div class="hidden-msg warningmessagesmall">',
+                    _l( 'This collection has been hidden' ),
+                '</div>',
+            '<% } %>'
+        ], 'collection' )
+    });
+
+// could steal this from hda-base (or use mixed content)
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        // adding the hid display to the title
+        '<div class="title-bar clear" tabindex="0">',
+            '<span class="state-icon"></span>',
+            '<div class="title">',
+                //TODO: remove whitespace and use margin-right
+                '<span class="hid"><%- collection.hid %></span> ',
+                '<span class="name"><%- collection.name %></span>',
+            '</div>',
+            '<div class="subtitle"></div>',
+        '</div>'
+    ], 'collection' );
+
+    return _.extend( {}, _super.prototype.templates, {
+        warnings : warnings,
+        titleBar : titleBarTemplate
+    });
+}());
+
+
+//==============================================================================
+    return {
+        HDCAListItemView : HDCAListItemView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/hdca-model.js b/client/galaxy/scripts/mvc/history/hdca-model.js
new file mode 100644
index 0000000..f8df35e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/hdca-model.js
@@ -0,0 +1,131 @@
+define([
+    "mvc/collection/collection-model",
+    "mvc/history/history-content-model",
+    "utils/localization"
+], function( DC_MODEL, HISTORY_CONTENT, _l ){
+
+'use strict';
+
+/*==============================================================================
+
+Models for DatasetCollections contained within a history.
+
+TODO:
+    these might be compactable to one class if some duplication with
+    collection-model is used.
+
+==============================================================================*/
+var hcontentMixin = HISTORY_CONTENT.HistoryContentMixin,
+    ListDC = DC_MODEL.ListDatasetCollection,
+    PairDC = DC_MODEL.PairDatasetCollection,
+    ListPairedDC = DC_MODEL.ListPairedDatasetCollection,
+    ListOfListsDC = DC_MODEL.ListOfListsDatasetCollection;
+
+//==============================================================================
+/** Override to post to contents route w/o id. */
+function buildHDCASave( _super ){
+    return function _save( attributes, options ){
+        if( this.isNew() ){
+            options = options || {};
+            options.url = this.urlRoot + this.get( 'history_id' ) + '/contents';
+            attributes = attributes || {};
+            attributes.type = 'dataset_collection';
+        }
+        return _super.call( this, attributes, options );
+    };
+}
+
+
+//==============================================================================
+/** @class Backbone model for List Dataset Collection within a History.
+ */
+var HistoryListDatasetCollection = ListDC.extend( hcontentMixin ).extend(
+/** @lends HistoryListDatasetCollection.prototype */{
+
+    defaults : _.extend( _.clone( ListDC.prototype.defaults ), {
+        history_content_type: 'dataset_collection',
+        collection_type     : 'list',
+        model_class         : 'HistoryDatasetCollectionAssociation'
+    }),
+
+    /** Override to post to contents route w/o id. */
+    save : buildHDCASave( ListDC.prototype.save ),
+
+    /** String representation. */
+    toString : function(){
+        return 'History' + ListDC.prototype.toString.call( this );
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone model for Pair Dataset Collection within a History.
+ *  @constructs
+ */
+var HistoryPairDatasetCollection = PairDC.extend( hcontentMixin ).extend(
+/** @lends HistoryPairDatasetCollection.prototype */{
+
+    defaults : _.extend( _.clone( PairDC.prototype.defaults ), {
+        history_content_type: 'dataset_collection',
+        collection_type     : 'paired',
+        model_class         : 'HistoryDatasetCollectionAssociation'
+    }),
+
+    /** Override to post to contents route w/o id. */
+    save : buildHDCASave( PairDC.prototype.save ),
+
+    /** String representation. */
+    toString : function(){
+        return 'History' + PairDC.prototype.toString.call( this );
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone model for List of Pairs Dataset Collection within a History. */
+var HistoryListPairedDatasetCollection = ListPairedDC.extend( hcontentMixin ).extend({
+
+    defaults : _.extend( _.clone( ListPairedDC.prototype.defaults ), {
+        history_content_type: 'dataset_collection',
+        collection_type     : 'list:paired',
+        model_class         : 'HistoryDatasetCollectionAssociation'
+    }),
+
+    /** Override to post to contents route w/o id. */
+    save : buildHDCASave( ListPairedDC.prototype.save ),
+
+    /** String representation. */
+    toString : function(){
+        return 'History' + ListPairedDC.prototype.toString.call( this );
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone model for List of Lists Dataset Collection within a History. */
+var HistoryListOfListsDatasetCollection = ListOfListsDC.extend( hcontentMixin ).extend({
+
+    defaults : _.extend( _.clone( ListOfListsDC.prototype.defaults ), {
+        history_content_type: 'dataset_collection',
+        collection_type     : 'list:list',
+        model_class         : 'HistoryDatasetCollectionAssociation'
+    }),
+
+    /** Override to post to contents route w/o id. */
+    save : buildHDCASave( ListOfListsDC.prototype.save ),
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'HistoryListOfListsDatasetCollection(', this.get( 'name' ), ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+    return {
+        HistoryListDatasetCollection        : HistoryListDatasetCollection,
+        HistoryPairDatasetCollection        : HistoryPairDatasetCollection,
+        HistoryListPairedDatasetCollection  : HistoryListPairedDatasetCollection,
+        HistoryListOfListsDatasetCollection : HistoryListOfListsDatasetCollection
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-content-model.js b/client/galaxy/scripts/mvc/history/history-content-model.js
new file mode 100644
index 0000000..1c85909
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-content-model.js
@@ -0,0 +1,90 @@
+define([
+    "mvc/dataset/states",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( STATES, BASE_MVC, _l ){
+'use strict';
+
+//==============================================================================
+/** @class Mixin for HistoryContents content (HDAs, HDCAs).
+ */
+var HistoryContentMixin = {
+
+    /** default attributes for a model */
+    defaults : {
+        /** parent (containing) history */
+        history_id          : null,
+        /** some content_type (HistoryContents can contain mixed model classes) */
+        history_content_type: null,
+        /** indicating when/what order the content was generated in the context of the history */
+        hid                 : null,
+        /** whether the user wants the content shown (visible) */
+        visible             : true
+    },
+
+    // ........................................................................ mixed content element
+    // In order to be part of a MIXED bbone collection, we can't rely on the id
+    //  (which may collide btwn models of different classes)
+    // Instead, use type_id which prefixes the history_content_type so the bbone collection can differentiate
+    idAttribute : 'type_id',
+
+    // ........................................................................ common queries
+    /** the more common alias of visible */
+    hidden : function(){
+        return !this.get( 'visible' );
+    },
+
+//TODO: remove
+    /** based on includeDeleted, includeHidden (gen. from the container control),
+     *      would this ds show in the list of ds's?
+     *  @param {Boolean} includeDeleted are we showing deleted hdas?
+     *  @param {Boolean} includeHidden are we showing hidden hdas?
+     */
+    isVisible : function( includeDeleted, includeHidden ){
+        var isVisible = true;
+        if( ( !includeDeleted )
+        &&  ( this.get( 'deleted' ) || this.get( 'purged' ) ) ){
+            isVisible = false;
+        }
+        if( ( !includeHidden )
+        &&  ( !this.get( 'visible' ) ) ){
+            isVisible = false;
+        }
+        return isVisible;
+    },
+
+    // ........................................................................ ajax
+    //TODO?: these are probably better done on the leaf classes
+    /** history content goes through the 'api/histories' API */
+    urlRoot: Galaxy.root + 'api/histories/',
+
+    /** full url spec. for this content */
+    url : function(){
+        var url = this.urlRoot + this.get( 'history_id' ) + '/contents/'
+             + this.get('history_content_type') + 's/' + this.get( 'id' );
+        return url;
+    },
+
+    /** save this content as not visible */
+    hide : function( options ){
+        if( !this.get( 'visible' ) ){ return jQuery.when(); }
+        return this.save( { visible: false }, options );
+    },
+    /** save this content as visible */
+    unhide : function( options ){
+        if( this.get( 'visible' ) ){ return jQuery.when(); }
+        return this.save( { visible: true }, options );
+    },
+
+    // ........................................................................ misc
+    toString : function(){
+        return ([ this.get( 'type_id' ), this.get( 'hid' ), this.get( 'name' ) ].join(':'));
+    }
+};
+
+
+//==============================================================================
+    return {
+        HistoryContentMixin : HistoryContentMixin
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-contents.js b/client/galaxy/scripts/mvc/history/history-contents.js
new file mode 100644
index 0000000..3c2d15e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-contents.js
@@ -0,0 +1,478 @@
+define([
+    "mvc/base/controlled-fetch-collection",
+    "mvc/history/hda-model",
+    "mvc/history/hdca-model",
+    "mvc/history/history-preferences",
+    "mvc/base-mvc",
+    "utils/ajax-queue"
+], function( CONTROLLED_FETCH_COLLECTION, HDA_MODEL, HDCA_MODEL, HISTORY_PREFS, BASE_MVC, AJAX_QUEUE ){
+'use strict';
+
+//==============================================================================
+var _super = CONTROLLED_FETCH_COLLECTION.PaginatedCollection;
+/** @class Backbone collection for history content.
+ *      NOTE: history content seems like a dataset collection, but differs in that it is mixed:
+ *          each element can be either an HDA (dataset) or a DatasetCollection and co-exist on
+ *          the same level.
+ *      Dataset collections on the other hand are not mixed and (so far) can only contain either
+ *          HDAs or child dataset collections on one level.
+ *      This is why this does not inherit from any of the DatasetCollections (currently).
+ */
+var HistoryContents = _super.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : 'history',
+
+    // ........................................................................ composite collection
+    /** since history content is a mix, override model fn into a factory, creating based on history_content_type */
+    model : function( attrs, options ) {
+        if( attrs.history_content_type === "dataset" ) {
+            return new HDA_MODEL.HistoryDatasetAssociation( attrs, options );
+
+        } else if( attrs.history_content_type === "dataset_collection" ) {
+            switch( attrs.collection_type ){
+                case 'list':
+                    return new HDCA_MODEL.HistoryListDatasetCollection( attrs, options );
+                case 'paired':
+                    return new HDCA_MODEL.HistoryPairDatasetCollection( attrs, options );
+                case 'list:paired':
+                    return new HDCA_MODEL.HistoryListPairedDatasetCollection( attrs, options );
+                case 'list:list':
+                    return new HDCA_MODEL.HistoryListOfListsDatasetCollection( attrs, options );
+            }
+            // This is a hack inside a hack:
+            // Raise a plain object with validationError to fake a model.validationError
+            // (since we don't have a model to use validate with)
+            // (the outer hack being the mixed content/model function in this collection)
+            var msg = 'Unknown collection_type: ' + attrs.collection_type;
+            console.warn( msg, attrs );
+            return { validationError : msg };
+        }
+        return { validationError : 'Unknown history_content_type: ' + attrs.history_content_type };
+    },
+
+    // ........................................................................ set up
+    limitPerPage : 500,
+
+    /** @type {Integer} how many contents per call to fetch when using progressivelyFetchDetails */
+    limitPerProgressiveFetch : 500,
+
+    /** @type {String} order used here and when fetching from server */
+    order : 'hid',
+
+    /** root api url */
+    urlRoot : Galaxy.root + 'api/histories',
+
+    /** complete api url */
+    url : function(){
+        return this.urlRoot + '/' + this.historyId + '/contents';
+    },
+
+    /** Set up */
+    initialize : function( models, options ){
+        options = options || {};
+        _super.prototype.initialize.call( this, models, options );
+
+        this.history = options.history || null;
+        this.setHistoryId( options.historyId || null );
+        /** @type {Boolean} does this collection contain and fetch deleted elements */
+        this.includeDeleted = options.includeDeleted || this.includeDeleted;
+        /** @type {Boolean} does this collection contain and fetch non-visible elements */
+        this.includeHidden = options.includeHidden || this.includeHidden;
+
+        // backbonejs uses collection.model.prototype.idAttribute to determine if a model is *already* in a collection
+        //  and either merged or replaced. In this case, our 'model' is a function so we need to add idAttribute
+        //  manually here - if we don't, contents will not merge but be replaced/swapped.
+        this.model.prototype.idAttribute = 'type_id';
+    },
+
+    setHistoryId : function( newId ){
+        this.historyId = newId;
+        this._setUpWebStorage();
+    },
+
+    /** Set up client side storage. Currently PersistanStorage keyed under 'history:<id>' */
+    _setUpWebStorage : function( initialSettings ){
+        // TODO: use initialSettings
+        if( !this.historyId ){ return; }
+        this.storage = new HISTORY_PREFS.HistoryPrefs({
+            id: HISTORY_PREFS.HistoryPrefs.historyStorageKey( this.historyId )
+        });
+        this.trigger( 'new-storage', this.storage, this );
+
+        this.on({
+            'include-deleted' : function( newVal ){
+                this.storage.includeDeleted( newVal );
+            },
+            'include-hidden' : function( newVal ){
+                this.storage.includeHidden( newVal );
+            }
+        });
+
+        this.includeDeleted = this.storage.includeDeleted() || false;
+        this.includeHidden  = this.storage.includeHidden()  || false;
+        return this;
+    },
+
+    // ........................................................................ common queries
+    /** @type {Object} map of collection available sorting orders containing comparator fns */
+    comparators : _.extend( _.clone( _super.prototype.comparators ), {
+        'name'       : BASE_MVC.buildComparator( 'name', { ascending: true }),
+        'name-dsc'   : BASE_MVC.buildComparator( 'name', { ascending: false }),
+        'hid'        : BASE_MVC.buildComparator( 'hid',  { ascending: false }),
+        'hid-asc'    : BASE_MVC.buildComparator( 'hid',  { ascending: true }),
+    }),
+
+    /** Get every model in this collection not in a 'ready' state (running). */
+    running : function(){
+        return this.filter( function( c ){ return !c.inReadyState(); });
+    },
+
+    /** return contents that are not ready and not deleted/hidden */
+    runningAndActive : function(){
+        return this.filter( function( c ){
+            return ( !c.inReadyState() )
+                && (  c.get( 'visible' ) )
+                // TODO: deletedOrPurged?
+                && ( !c.get( 'deleted' ) );
+        });
+    },
+
+    /** Get the model with the given hid
+     *  @param {Int} hid the hid to search for
+     *  @returns {HistoryDatasetAssociation} the model with the given hid or undefined if not found
+     */
+    getByHid : function( hid ){
+        // note: there *can* be more than one content with a given hid, this finds the first based on order
+        return this.findWhere({ hid: hid });
+    },
+
+    /** return true if all contents have details */
+    haveDetails : function(){
+        return this.all( function( c ){ return c.hasDetails(); });
+    },
+
+    // ........................................................................ hidden / deleted
+    /** return a new contents collection of only hidden items */
+    hidden : function(){
+        return this.filter( function( c ){ return c.hidden(); });
+    },
+
+    /** return a new contents collection of only hidden items */
+    deleted : function(){
+        return this.filter( function( c ){ return c.get( 'deleted' ); });
+    },
+
+    /** return a new contents collection of only hidden items */
+    visibleAndUndeleted : function(){
+        return this.filter( function( c ){
+            return (  c.get( 'visible' ) )
+                // TODO: deletedOrPurged?
+                && ( !c.get( 'deleted' ) );
+        });
+    },
+
+    /** create a setter in order to publish the change */
+    setIncludeDeleted : function( setting, options ){
+        if( _.isBoolean( setting ) && setting !== this.includeDeleted ){
+            this.includeDeleted = setting;
+            if( _.result( options, 'silent' ) ){ return; }
+            this.trigger( 'include-deleted', setting, this );
+        }
+    },
+
+    /** create a setter in order to publish the change */
+    setIncludeHidden : function( setting, options ){
+        if( _.isBoolean( setting ) && setting !== this.includeHidden ){
+            this.includeHidden = setting;
+            options = options || {};
+            if( _.result( options, 'silent' ) ){ return; }
+            this.trigger( 'include-hidden', setting, this );
+        }
+    },
+
+    // ........................................................................ ajax
+    // ............ controlled fetch collection
+    /** override to get expanded ids from sessionStorage and pass to API as details */
+    fetch : function( options ){
+        options = options || {};
+        if( this.historyId && !options.details ){
+            var prefs = HISTORY_PREFS.HistoryPrefs.get( this.historyId ).toJSON();
+            if( !_.isEmpty( prefs.expandedIds ) ){
+                options.details = _.values( prefs.expandedIds ).join( ',' );
+            }
+        }
+        return _super.prototype.fetch.call( this, options );
+    },
+
+    // ............. ControlledFetch stuff
+    /** override to include the API versioning flag */
+    _buildFetchData : function( options ){
+        return _.extend( _super.prototype._buildFetchData.call( this, options ), {
+            v : 'dev'
+        });
+    },
+
+    /** Extend to include details and version */
+    _fetchParams : _super.prototype._fetchParams.concat([
+        // TODO: remove (the need for) both
+        /** version */
+        'v',
+        /** dataset ids to get full details of */
+        'details',
+    ]),
+
+    /** override to add deleted/hidden filters */
+    _buildFetchFilters : function( options ){
+        var superFilters = _super.prototype._buildFetchFilters.call( this, options ) || {};
+        var filters = {};
+        if( !this.includeDeleted ){
+            filters.deleted = false;
+            filters.purged = false;
+        }
+        if( !this.includeHidden ){
+            filters.visible = true;
+        }
+        return _.defaults( superFilters, filters );
+    },
+
+    // ............ paginated collection
+    getTotalItemCount : function(){
+        return this.history.contentsShown();
+    },
+
+    // ............ history contents specific ajax
+    /** override to filter requested contents to those updated after the Date 'since' */
+    fetchUpdated : function( since, options ){
+        if( since ){
+            options = options || { filters: {} };
+            options.remove = false;
+            options.filters = {
+                'update_time-ge' : since.toISOString(),
+                // workflows will produce hidden datasets (non-output datasets) that still
+                // need to be updated in the collection or they'll update forever
+                // we can remove the default visible filter by using an 'empty' value
+                visible          : ''
+            };
+        }
+        return this.fetch( options );
+    },
+
+    /** fetch all the deleted==true contents of this collection */
+    fetchDeleted : function( options ){
+        options = options || {};
+        var self = this;
+        options.filters = _.extend( options.filters, {
+            // all deleted, purged or not
+            deleted : true,
+            purged  : undefined
+        });
+        options.remove = false;
+
+        self.trigger( 'fetching-deleted', self );
+        return self.fetch( options )
+            .always( function(){ self.trigger( 'fetching-deleted-done', self ); });
+    },
+
+    /** fetch all the visible==false contents of this collection */
+    fetchHidden : function( options ){
+        options = options || {};
+        var self = this;
+        options.filters = _.extend( options.filters, {
+            visible : false
+        });
+        options.remove = false;
+
+        self.trigger( 'fetching-hidden', self );
+        return self.fetch( options )
+            .always( function(){ self.trigger( 'fetching-hidden-done', self ); });
+    },
+
+    /** fetch detailed model data for all contents in this collection */
+    fetchAllDetails : function( options ){
+        options = options || {};
+        var detailsFlag = { details: 'all' };
+        options.data = _.extend( options.data || {}, detailsFlag );
+        return this.fetch( options );
+    },
+
+    /** specialty fetch method for retrieving the element_counts of all hdcas in the history */
+    fetchCollectionCounts : function( options ){
+        options = options || {};
+        options.keys = [ 'type_id', 'element_count' ].join( ',' );
+        options.filters = _.extend( options.filters || {}, {
+            history_content_type: 'dataset_collection',
+        });
+        options.remove = false;
+        return this.fetch( options );
+    },
+
+    // ............. quasi-batch ops
+    // TODO: to batch
+    /** helper that fetches using filterParams then calls save on each fetched using updateWhat as the save params */
+    _filterAndUpdate : function( filterParams, updateWhat ){
+        var self = this;
+        var idAttribute = self.model.prototype.idAttribute;
+        var updateArgs = [ updateWhat ];
+
+        return self.fetch({ filters: filterParams, remove: false })
+            .then( function( fetched ){
+                // convert filtered json array to model array
+                fetched = fetched.reduce( function( modelArray, currJson, i ){
+                    var model = self.get( currJson[ idAttribute ] );
+                    return model? modelArray.concat( model ) : modelArray;
+                }, []);
+                return self.ajaxQueue( 'save', updateArgs, fetched );
+            });
+    },
+
+    /** using a queue, perform ajaxFn on each of the models in this collection */
+    ajaxQueue : function( ajaxFn, args, collection ){
+        collection = collection || this.models;
+        return new AJAX_QUEUE.AjaxQueue( collection.slice().reverse().map( function( content, i ){
+            var fn = _.isString( ajaxFn )? content[ ajaxFn ] : ajaxFn;
+            return function(){ return fn.apply( content, args ); };
+        })).deferred;
+    },
+
+    /** fetch contents' details in batches of limitPerCall - note: only get searchable details here */
+    progressivelyFetchDetails : function( options ){
+        options = options || {};
+        var deferred = jQuery.Deferred();
+        var self = this;
+        var limit = options.limitPerCall || self.limitPerProgressiveFetch;
+        // TODO: only fetch tags and annotations if specifically requested
+        var searchAttributes = HDA_MODEL.HistoryDatasetAssociation.prototype.searchAttributes;
+        var detailKeys = searchAttributes.join( ',' );
+
+        function _recursivelyFetch( offset ){
+            offset = offset || 0;
+            var _options = _.extend( _.clone( options ), {
+                view    : 'summary',
+                keys    : detailKeys,
+                limit   : limit,
+                offset  : offset,
+                reset   : offset === 0,
+                remove  : false
+            });
+
+            _.defer( function(){
+                self.fetch.call( self, _options )
+                    .fail( deferred.reject )
+                    .done( function( response ){
+                        deferred.notify( response, limit, offset );
+                        if( response.length !== limit ){
+                            self.allFetched = true;
+                            deferred.resolve( response, limit, offset );
+
+                        } else {
+                            _recursivelyFetch( offset + limit );
+                        }
+                    });
+            });
+        }
+        _recursivelyFetch();
+        return deferred;
+    },
+
+    /** does some bit of JSON represent something that can be copied into this contents collection */
+    isCopyable : function( contentsJSON ){
+        var copyableModelClasses = [
+            'HistoryDatasetAssociation',
+            'HistoryDatasetCollectionAssociation'
+        ];
+        return ( ( _.isObject( contentsJSON ) && contentsJSON.id )
+              && ( _.contains( copyableModelClasses, contentsJSON.model_class ) ) );
+    },
+
+    /** copy an existing, accessible hda into this collection */
+    copy : function( json ){
+        // TODO: somehow showhorn all this into 'save'
+        var id, type, contentType;
+        if( _.isString( json ) ){
+            id = json;
+            contentType = 'hda';
+            type = 'dataset';
+        } else {
+            id = json.id;
+            contentType = ({
+                'HistoryDatasetAssociation' : 'hda',
+                'LibraryDatasetDatasetAssociation' : 'ldda',
+                'HistoryDatasetCollectionAssociation' : 'hdca'
+            })[ json.model_class ] || 'hda';
+            type = ( contentType === 'hdca'? 'dataset_collection' : 'dataset' );
+        }
+        var collection = this,
+            xhr = jQuery.ajax( this.url(), {
+                method: 'POST',
+                contentType: 'application/json',
+                data: JSON.stringify({
+                    content : id,
+                    source  : contentType,
+                    type    : type
+                })
+            })
+            .done( function( response ){
+                collection.add([ response ], { parse: true });
+            })
+            .fail( function( error, status, message ){
+                collection.trigger( 'error', collection, xhr, {},
+                    'Error copying contents', { type: type, id: id, source: contentType });
+            });
+        return xhr;
+    },
+
+    /** create a new HDCA in this collection */
+    createHDCA : function( elementIdentifiers, collectionType, name, options ){
+        // normally collection.create returns the new model, but we need the promise from the ajax, so we fake create
+        //precondition: elementIdentifiers is an array of plain js objects
+        //  in the proper form to create the collectionType
+        var hdca = this.model({
+            history_content_type: 'dataset_collection',
+            collection_type     : collectionType,
+            history_id          : this.historyId,
+            name                : name,
+            // should probably be able to just send in a bunch of json here and restruct per class
+            // note: element_identifiers is now (incorrectly) an attribute
+            element_identifiers : elementIdentifiers
+        // do not create the model on the client until the ajax returns
+        });
+        return hdca.save( options );
+    },
+
+    // ........................................................................ searching
+    /** return true if all contents have the searchable attributes */
+    haveSearchDetails : function(){
+        return this.allFetched && this.all( function( content ){
+            // null (which is a valid returned annotation value)
+            // will return false when using content.has( 'annotation' )
+            //TODO: a bit hacky - formalize
+            return _.has( content.attributes, 'annotation' );
+        });
+    },
+
+    /** return a new collection of contents whose attributes contain the substring matchesWhat */
+    matches : function( matchesWhat ){
+        return this.filter( function( content ){
+            return content.matches( matchesWhat );
+        });
+    },
+
+    // ........................................................................ misc
+    /** In this override, copy the historyId to the clone */
+    clone : function(){
+        var clone = Backbone.Collection.prototype.clone.call( this );
+        clone.historyId = this.historyId;
+        return clone;
+    },
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'HistoryContents(', [ this.historyId, this.length ].join(), ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+    return {
+        HistoryContents : HistoryContents
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-model.js b/client/galaxy/scripts/mvc/history/history-model.js
new file mode 100644
index 0000000..8733a1e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-model.js
@@ -0,0 +1,518 @@
+
+define([
+    "mvc/history/history-contents",
+    "mvc/history/history-preferences",
+    "mvc/base/controlled-fetch-collection",
+    "utils/utils",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( HISTORY_CONTENTS, HISTORY_PREFS, CONTROLLED_FETCH_COLLECTION, UTILS, BASE_MVC, _l ){
+'use strict';
+
+//==============================================================================
+/** @class Model for a Galaxy history resource - both a record of user
+ *      tool use and a collection of the datasets those tools produced.
+ *  @name History
+ *  @augments Backbone.Model
+ */
+var History = Backbone.Model
+        .extend( BASE_MVC.LoggableMixin )
+        .extend( BASE_MVC.mixin( BASE_MVC.SearchableModelMixin, /** @lends History.prototype */{
+    _logNamespace : 'history',
+
+    /** ms between fetches when checking running jobs/datasets for updates */
+    UPDATE_DELAY : 4000,
+
+    // values from api (may need more)
+    defaults : {
+        model_class     : 'History',
+        id              : null,
+        name            : 'Unnamed History',
+        state           : 'new',
+
+        deleted         : false,
+        contents_active : {},
+        contents_states : {},
+    },
+
+    urlRoot: Galaxy.root + 'api/histories',
+
+    contentsClass : HISTORY_CONTENTS.HistoryContents,
+
+    /** What model fields to search with */
+    searchAttributes : [
+        'name', 'annotation', 'tags'
+    ],
+
+    /** Adding title and singular tag */
+    searchAliases : {
+        title       : 'name',
+        tag         : 'tags'
+    },
+
+    // ........................................................................ set up/tear down
+    /** Set up the model
+     *  @param {Object} historyJSON model data for this History
+     *  @param {Object} options     any extra settings including logger
+     */
+    initialize : function( historyJSON, options ){
+        options = options || {};
+        this.logger = options.logger || null;
+        this.log( this + ".initialize:", historyJSON, options );
+
+        /** HistoryContents collection of the HDAs contained in this history. */
+        this.contents = new this.contentsClass( [], {
+            history     : this,
+            historyId   : this.get( 'id' ),
+            order       : options.order,
+        });
+
+        this._setUpListeners();
+        this._setUpCollectionListeners();
+
+        /** cached timeout id for the dataset updater */
+        this.updateTimeoutId = null;
+    },
+
+    /** set up any event listeners for this history including those to the contained HDAs
+     *  events: error:contents  if an error occurred with the contents collection
+     */
+    _setUpListeners : function(){
+        // if the model's id changes ('current' or null -> an actual id), update the contents history_id
+        return this.on({
+            'error' : function( model, xhr, options, msg, details ){
+                this.clearUpdateTimeout();
+            },
+            'change:id' : function( model, newId ){
+                if( this.contents ){
+                    this.contents.historyId = newId;
+                }
+            },
+        });
+    },
+
+    /** event handlers for the contents submodels */
+    _setUpCollectionListeners : function(){
+        if( !this.contents ){ return this; }
+        // bubble up errors
+        return this.listenTo( this.contents, {
+            'error' : function(){
+                this.trigger.apply( this, jQuery.makeArray( arguments ) );
+            },
+        });
+    },
+
+    // ........................................................................ derived attributes
+    /**  */
+    contentsShown : function(){
+        var contentsActive = this.get( 'contents_active' );
+        var shown = contentsActive.active || 0;
+        shown += this.contents.includeDeleted? contentsActive.deleted : 0;
+        shown += this.contents.includeHidden?  contentsActive.hidden  : 0;
+        return shown;
+    },
+
+    /** convert size in bytes to a more human readable version */
+    nice_size : function(){
+        var size = this.get( 'size' );
+        return size? UTILS.bytesToString( size, true, 2 ) : _l( '(empty)' );
+    },
+
+    /** override to add nice_size */
+    toJSON : function(){
+        return _.extend( Backbone.Model.prototype.toJSON.call( this ), {
+            nice_size : this.nice_size()
+        });
+    },
+
+    /** override to allow getting nice_size */
+    get : function( key ){
+        if( key === 'nice_size' ){
+            return this.nice_size();
+        }
+        return Backbone.Model.prototype.get.apply( this, arguments );
+    },
+
+    // ........................................................................ common queries
+    /** T/F is this history owned by the current user (Galaxy.user)
+     *      Note: that this will return false for an anon user even if the history is theirs.
+     */
+    ownedByCurrUser : function(){
+        // no currUser
+        if( !Galaxy || !Galaxy.user ){
+            return false;
+        }
+        // user is anon or history isn't owned
+        if( Galaxy.user.isAnonymous() || Galaxy.user.id !== this.get( 'user_id' ) ){
+            return false;
+        }
+        return true;
+    },
+
+    /** Return the number of running jobs assoc with this history (note: unknown === 0) */
+    numOfUnfinishedJobs : function(){
+        var unfinishedJobIds = this.get( 'non_ready_jobs' );
+        return unfinishedJobIds? unfinishedJobIds.length : 0;
+    },
+
+    /** Return the number of running hda/hdcas in this history (note: unknown === 0) */
+    numOfUnfinishedShownContents : function(){
+        return this.contents.runningAndActive().length || 0;
+    },
+
+    // ........................................................................ updates
+    _fetchContentRelatedAttributes : function(){
+        var contentRelatedAttrs = [ 'size', 'non_ready_jobs', 'contents_active', 'hid_counter' ];
+        return this.fetch({ data : $.param({ keys : contentRelatedAttrs.join( ',' ) }) });
+    },
+
+    /** check for any changes since the last time we updated (or fetch all if ) */
+    refresh : function( options ){
+        // console.log( this + '.refresh' );
+        options = options || {};
+        var self = this;
+
+        // note if there was no previous update time, all summary contents will be fetched
+        var lastUpdateTime = self.lastUpdateTime;
+        // if we don't flip this, then a fully-fetched list will not be re-checked via fetch
+        this.contents.allFetched = false;
+        var fetchFn = self.contents.currentPage !== 0
+            ? function(){ return self.contents.fetchPage( 0 ); }
+            : function(){ return self.contents.fetchUpdated( lastUpdateTime ); };
+        // note: if there was no previous update time, all summary contents will be fetched
+        return fetchFn()
+            .done( function( response, status, xhr ){
+                var serverResponseDatetime;
+                try {
+                    serverResponseDatetime = new Date( xhr.getResponseHeader( 'Date' ) );
+                } catch( err ){}
+                self.lastUpdateTime = serverResponseDatetime || new Date();
+                self.checkForUpdates( options );
+            });
+    },
+
+    /** continuously fetch updated contents every UPDATE_DELAY ms if this history's datasets or jobs are unfinished */
+    checkForUpdates : function( options ){
+        // console.log( this + '.checkForUpdates' );
+        options = options || {};
+        var delay = this.UPDATE_DELAY;
+        var self = this;
+        if( !self.id ){ return; }
+
+        function _delayThenUpdate(){
+            // prevent buildup of updater timeouts by clearing previous if any, then set new and cache id
+            self.clearUpdateTimeout();
+            self.updateTimeoutId = setTimeout( function(){
+                self.refresh( options );
+            }, delay );
+        }
+
+        // if there are still datasets in the non-ready state, recurse into this function with the new time
+        var nonReadyContentCount = this.numOfUnfinishedShownContents();
+        // console.log( 'nonReadyContentCount:', nonReadyContentCount );
+        if( nonReadyContentCount > 0 ){
+            _delayThenUpdate();
+
+        } else {
+            // no datasets are running, but currently runnning jobs may still produce new datasets
+            // see if the history has any running jobs and continue to update if so
+            // (also update the size for the user in either case)
+            self._fetchContentRelatedAttributes()
+                .done( function( historyData ){
+                    // console.log( 'non_ready_jobs:', historyData.non_ready_jobs );
+                    if( self.numOfUnfinishedJobs() > 0 ){
+                        _delayThenUpdate();
+
+                    } else {
+                        // otherwise, let listeners know that all updates have stopped
+                        self.trigger( 'ready' );
+                    }
+                });
+        }
+    },
+
+    /** clear the timeout and the cached timeout id */
+    clearUpdateTimeout : function(){
+        if( this.updateTimeoutId ){
+            clearTimeout( this.updateTimeoutId );
+            this.updateTimeoutId = null;
+        }
+    },
+
+    // ........................................................................ ajax
+    /** override to use actual Dates objects for create/update times */
+    parse : function( response, options ){
+        var parsed = Backbone.Model.prototype.parse.call( this, response, options );
+        if( parsed.create_time ){
+            parsed.create_time = new Date( parsed.create_time );
+        }
+        if( parsed.update_time ){
+            parsed.update_time = new Date( parsed.update_time );
+        }
+        return parsed;
+    },
+
+    /** fetch this histories data (using options) then it's contents (using contentsOptions) */
+    fetchWithContents : function( options, contentsOptions ){
+        options = options || {};
+        var self = this;
+
+        // console.log( this + '.fetchWithContents' );
+        // TODO: push down to a base class
+        options.view = 'dev-detailed';
+
+        // fetch history then use history data to fetch (paginated) contents
+        return this.fetch( options ).then( function getContents( history ){
+            self.contents.history = self;
+            self.contents.setHistoryId( history.id );
+            return self.fetchContents( contentsOptions );
+        });
+    },
+
+    /** fetch this histories contents, adjusting options based on the stored history preferences */
+    fetchContents : function( options ){
+        options = options || {};
+        var self = this;
+
+        // we're updating, reset the update time
+        self.lastUpdateTime = new Date();
+        return self.contents.fetchCurrentPage( options );
+    },
+
+    /** save this history, _Mark_ing it as deleted (just a flag) */
+    _delete : function( options ){
+        if( this.get( 'deleted' ) ){ return jQuery.when(); }
+        return this.save( { deleted: true }, options );
+    },
+    /** purge this history, _Mark_ing it as purged and removing all dataset data from the server */
+    purge : function( options ){
+        if( this.get( 'purged' ) ){ return jQuery.when(); }
+        return this.save( { deleted: true, purged: true }, options );
+    },
+    /** save this history, _Mark_ing it as undeleted */
+    undelete : function( options ){
+        if( !this.get( 'deleted' ) ){ return jQuery.when(); }
+        return this.save( { deleted: false }, options );
+    },
+
+    /** Make a copy of this history on the server
+     *  @param {Boolean} current    if true, set the copy as the new current history (default: true)
+     *  @param {String} name        name of new history (default: none - server sets to: Copy of <current name>)
+     *  @fires copied               passed this history and the response JSON from the copy
+     *  @returns {xhr}
+     */
+    copy : function( current, name, allDatasets ){
+        current = ( current !== undefined )?( current ):( true );
+        if( !this.id ){
+            throw new Error( 'You must set the history ID before copying it.' );
+        }
+
+        var postData = { history_id  : this.id };
+        if( current ){
+            postData.current = true;
+        }
+        if( name ){
+            postData.name = name;
+        }
+        if( !allDatasets ){
+            postData.all_datasets = false;
+        }
+        postData.view = 'dev-detailed';
+
+        var history = this;
+        var copy = jQuery.post( this.urlRoot, postData );
+        // if current - queue to setAsCurrent before firing 'copied'
+        if( current ){
+            return copy.then( function( response ){
+                var newHistory = new History( response );
+                return newHistory.setAsCurrent()
+                    .done( function(){
+                        history.trigger( 'copied', history, response );
+                    });
+            });
+        }
+        return copy.done( function( response ){
+            history.trigger( 'copied', history, response );
+        });
+    },
+
+    setAsCurrent : function(){
+        var history = this,
+            xhr = jQuery.getJSON( Galaxy.root + 'history/set_as_current?id=' + this.id );
+
+        xhr.done( function(){
+            history.trigger( 'set-as-current', history );
+        });
+        return xhr;
+    },
+
+    // ........................................................................ misc
+    toString : function(){
+        return 'History(' + this.get( 'id' ) + ',' + this.get( 'name' ) + ')';
+    }
+}));
+
+
+//==============================================================================
+var _collectionSuper = CONTROLLED_FETCH_COLLECTION.InfinitelyScrollingCollection;
+/** @class A collection of histories (per user)
+ *      that maintains the current history as the first in the collection.
+ *  New or copied histories become the current history.
+ */
+var HistoryCollection = _collectionSuper.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace       : 'history',
+
+    model               : History,
+    /** @type {String} initial order used by collection */
+    order               : 'update_time',
+    /** @type {Number} limit used for the first fetch (or a reset) */
+    limitOnFirstFetch   : 10,
+    /** @type {Number} limit used for each subsequent fetch */
+    limitPerFetch       : 10,
+
+    initialize : function( models, options ){
+        options = options || {};
+        this.log( 'HistoryCollection.initialize', models, options );
+        _collectionSuper.prototype.initialize.call( this, models, options );
+
+        /** @type {boolean} should deleted histories be included */
+        this.includeDeleted = options.includeDeleted || false;
+
+        /** @type {String} encoded id of the history that's current */
+        this.currentHistoryId = options.currentHistoryId;
+
+        this.setUpListeners();
+        // note: models are sent to reset *after* this fn ends; up to this point
+        // the collection *is empty*
+    },
+
+    urlRoot : Galaxy.root + 'api/histories',
+    url     : function(){ return this.urlRoot; },
+
+    /** set up reflexive event handlers */
+    setUpListeners : function setUpListeners(){
+        return this.on({
+            // when a history is deleted, remove it from the collection (if optionally set to do so)
+            'change:deleted' : function( history ){
+                // TODO: this becomes complicated when more filters are used
+                this.debug( 'change:deleted', this.includeDeleted, history.get( 'deleted' ) );
+                if( !this.includeDeleted && history.get( 'deleted' ) ){
+                    this.remove( history );
+                }
+            },
+            // listen for a history copy, setting it to current
+            'copied' : function( original, newData ){
+                this.setCurrent( new History( newData, [] ) );
+            },
+            // when a history is made current, track the id in the collection
+            'set-as-current' : function( history ){
+                var oldCurrentId = this.currentHistoryId;
+                this.trigger( 'no-longer-current', oldCurrentId );
+                this.currentHistoryId = history.id;
+            }
+        });
+    },
+
+    /** override to change view */
+    _buildFetchData : function( options ){
+        return _.extend( _collectionSuper.prototype._buildFetchData.call( this, options ), {
+            view : 'dev-detailed'
+        });
+    },
+
+    /** override to filter out deleted and purged */
+    _buildFetchFilters : function( options ){
+        var superFilters = _collectionSuper.prototype._buildFetchFilters.call( this, options ) || {};
+        var filters = {};
+        if( !this.includeDeleted ){
+            filters.deleted = false;
+            filters.purged = false;
+        } else {
+            // force API to return both deleted and non
+            //TODO: when the API is updated, remove this
+            filters.deleted = null;
+        }
+        return _.defaults( superFilters, filters );
+    },
+
+    /** override to fetch current as well (as it may be outside the first 10, etc.) */
+    fetchFirst : function( options ){
+        var self = this;
+        // TODO: batch?
+        var xhr = $.when();
+        if( this.currentHistoryId ){
+            xhr = _collectionSuper.prototype.fetchFirst.call( self, {
+                silent: true,
+                limit : 1,
+                filters: {
+                    // without these a deleted current history will return [] here and block the other xhr
+                    'purged'        : '',
+                    'deleted'       : '',
+                    'encoded_id-in' : this.currentHistoryId,
+                }
+            });
+        }
+        return xhr.then( function(){
+            options = options || {};
+            options.offset = 0;
+            return self.fetchMore( options );
+        });
+    },
+
+    /** @type {Object} map of collection available sorting orders containing comparator fns */
+    comparators : _.extend( _.clone( _collectionSuper.prototype.comparators ), {
+        'name'       : BASE_MVC.buildComparator( 'name', { ascending: true }),
+        'name-dsc'   : BASE_MVC.buildComparator( 'name', { ascending: false }),
+        'size'       : BASE_MVC.buildComparator( 'size', { ascending: false }),
+        'size-asc'   : BASE_MVC.buildComparator( 'size', { ascending: true }),
+    }),
+
+    /** override to always have the current history first */
+    sort : function( options ){
+        options = options || {};
+        var silent = options.silent;
+        var currentHistory = this.remove( this.get( this.currentHistoryId ) );
+        _collectionSuper.prototype.sort.call( this, _.defaults({ silent: true }, options ) );
+        this.unshift( currentHistory, { silent: true });
+        if( !silent ){
+            this.trigger( 'sort', this, options );
+        }
+        return this;
+    },
+
+    /** create a new history and by default set it to be the current history */
+    create : function create( data, hdas, historyOptions, xhrOptions ){
+        //TODO: .create is actually a collection function that's overridden here
+        var collection = this,
+            xhr = jQuery.getJSON( Galaxy.root + 'history/create_new_current'  );
+        return xhr.done( function( newData ){
+            collection.setCurrent( new History( newData, [], historyOptions || {} ) );
+        });
+    },
+
+    /** set the current history to the given history, placing it first in the collection.
+     *  Pass standard bbone options for use in unshift.
+     *  @triggers new-current passed history and this collection
+     */
+    setCurrent : function( history, options ){
+        options = options || {};
+        // new histories go in the front
+        this.unshift( history, options );
+        this.currentHistoryId = history.get( 'id' );
+        if( !options.silent ){
+            this.trigger( 'new-current', history, this );
+        }
+        return this;
+    },
+
+    toString: function toString(){
+        return 'HistoryCollection(' + this.length + ',current:' + this.currentHistoryId + ')';
+    }
+});
+
+
+//==============================================================================
+return {
+    History           : History,
+    HistoryCollection : HistoryCollection
+};});
diff --git a/client/galaxy/scripts/mvc/history/history-preferences.js b/client/galaxy/scripts/mvc/history/history-preferences.js
new file mode 100644
index 0000000..31fba2d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-preferences.js
@@ -0,0 +1,98 @@
+define([
+    "mvc/base-mvc"
+], function( BASE_MVC ){
+
+'use strict';
+
+var logNamespace = 'history';
+
+// ============================================================================
+/** session storage for individual history preferences */
+var HistoryPrefs = BASE_MVC.SessionStorageModel.extend(
+/** @lends HistoryPrefs.prototype */{
+    //TODO:?? move to user prefs?
+    defaults : {
+        //TODO:?? expandedIds to array?
+        expandedIds : {},
+        show_deleted : false,
+        show_hidden  : false
+    },
+
+    /** add an hda id to the hash of expanded hdas */
+    addExpanded : function( model ){
+//TODO: use type_id and not model
+        var current = this.get( 'expandedIds' );
+        current[ model.id ] = model.get( 'id' );
+        this.save( 'expandedIds', current );
+    },
+
+    /** remove an hda id from the hash of expanded hdas */
+    removeExpanded : function( model ){
+        var current = this.get( 'expandedIds' );
+        delete current[ model.id ];
+        this.save( 'expandedIds', current );
+    },
+
+    isExpanded : function( contentId ){
+        return _.result( this.get( 'expandedIds' ), contentId, false );
+    },
+
+    allExpanded : function(){
+        return _.values( this.get( 'expandedIds' ) );
+    },
+
+    clearExpanded : function(){
+        this.set( 'expandedIds', {} );
+    },
+
+    includeDeleted : function( val ){
+        // moving the invocation here so other components don't need to know the key
+        // TODO: change this key later
+        if( !_.isUndefined( val ) ){ this.set( 'show_deleted', val ); }
+        return this.get( 'show_deleted' );
+    },
+
+    includeHidden : function( val ){
+        // TODO: change this key later
+        if( !_.isUndefined( val ) ){ this.set( 'show_hidden', val ); }
+        return this.get( 'show_hidden' );
+    },
+
+    toString : function(){
+        return 'HistoryPrefs(' + this.id + ')';
+    }
+
+}, {
+    // ........................................................................ class vars
+    // class lvl for access w/o instantiation
+    storageKeyPrefix : 'history:',
+
+    /** key string to store each histories settings under */
+    historyStorageKey : function historyStorageKey( historyId ){
+        if( !historyId ){
+            throw new Error( 'HistoryPrefs.historyStorageKey needs valid id: ' + historyId );
+        }
+        // single point of change
+        return ( HistoryPrefs.storageKeyPrefix + historyId );
+    },
+
+    /** return the existing storage for the history with the given id (or create one if it doesn't exist) */
+    get : function get( historyId ){
+        return new HistoryPrefs({ id: HistoryPrefs.historyStorageKey( historyId ) });
+    },
+
+    /** clear all history related items in sessionStorage */
+    clearAll : function clearAll( historyId ){
+        for( var key in sessionStorage ){
+            if( key.indexOf( HistoryPrefs.storageKeyPrefix ) === 0 ){
+                sessionStorage.removeItem( key );
+            }
+        }
+    }
+});
+
+//==============================================================================
+    return {
+        HistoryPrefs: HistoryPrefs
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-structure-view.js b/client/galaxy/scripts/mvc/history/history-structure-view.js
new file mode 100644
index 0000000..7d10dd1
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-structure-view.js
@@ -0,0 +1,583 @@
+define([
+    'mvc/history/job-dag',
+    'mvc/job/job-model',
+    'mvc/job/job-li',
+    'mvc/dataset/dataset-li',
+    'mvc/base-mvc',
+    'utils/localization',
+    'libs/d3'
+], function( JobDAG, JOB, JOB_LI, DATASET_LI, BASE_MVC, _l ){
+
+'use strict';
+
+var logNamespace = 'history';
+// ============================================================================
+/*
+TODO:
+    disruptive:
+        handle collections
+        retain contents to job relationships (out/input name)
+
+    display when *only* copied datasets
+        need to change when/how joblessVertices are created
+
+    components should be full height containers that scroll individually
+
+    use history contents views for job outputCollection, not vanilla datasets
+         need hid
+
+    show datasets when job not expanded
+        make them external to the job display
+    connect jobs by dataset
+        which datasets from job X are which inputs in job Y?
+
+    make job data human readable (needs tool data)
+        show only tool.inputs with labels (w/ job.params as values)
+        input datasets are special
+            they don't appear in job.params
+            have to connect to datasets in the dag
+                connect job.inputs to any tool.inputs by tool.input.name (in params)
+
+API: seems like this could be handled there - duplicating the input data in the proper param space
+
+    collections
+
+    use cases:
+        operations by thread:
+            copy to new history
+            rerun
+            to workflow
+        operations by branch (all descendants):
+            copy to new history
+            rerun
+            to workflow
+        signal to noise:
+            collapse/expand branch
+            hide jobs
+            visually isolate branch (hide other jobs) of thread
+            zoom (somehow)
+
+            layout changes:
+                move branch to new column in component
+                    complicated
+                pyramid
+                circular
+                    sources on inner radius
+            expansion in vertical:
+                obscures relations due to height
+                    could move details to side panel
+                difficult to compare two+ jobs/datasets when at different points in the topo
+
+    (other) controls:
+        (optionally) filter all deleted
+        (optionally) filter all hidden
+        //(optionally) filter __SET_METADATA__
+        //(optionally) filter error'd jobs
+        help and explanation
+        filtering/searching of jobs
+
+    challenges:
+        difficult to scale dom (for zoomout)
+            possible to use css transforms?
+                transform svg and dom elements
+                it is possible to use css transforms on svg nodes
+                use transform-origin to select origin to top left
+        on larger histories the svg section may become extremely large due to distance from output to input
+
+    how-to:
+        descendant ids: _.keys( component.depth/breadthFirstSearchTree( start ).vertices )
+
+    in-panel view of anc desc
+
+
+*/
+// ============================================================================
+/**
+ *
+ */
+window.JobDAG = JobDAG;
+var HistoryStructureComponent = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    className : 'history-structure-component',
+
+    _INITIAL_ZOOM_LEVEL     : 1.0,
+    _MIN_ZOOM_LEVEL         : 0.25,
+    _LINK_ID_SEP            : '-to-',
+    _VERTEX_NAME_DATA_KEY   : 'vertex-name',
+
+    JobItemClass        : JOB_LI.JobListItemView,
+    ContentItemClass    : DATASET_LI.DatasetListItemView,
+
+    initialize : function( attributes ){
+        this.log( this + '(HistoryStructureComponent).initialize:', attributes );
+        this.component = attributes.component;
+
+        this._liMap = {};
+        this._createVertexItems();
+
+        this.zoomLevel = attributes.zoomLevel || this._INITIAL_ZOOM_LEVEL;
+
+        this.layout = this._createLayout( attributes.layoutOptions );
+    },
+
+    _createVertexItems : function(){
+        var view = this;
+        view.component.eachVertex( function( vertex ){
+//TODO: hack
+            var type = vertex.data.job? 'job' : 'copy',
+                li;
+            if( type === 'job' ){
+                li = view._createJobListItem( vertex );
+            } else if( type === 'copy' ){
+                li = view._createContentListItem( vertex );
+            }
+            view._liMap[ vertex.name ] = li;
+        });
+        view.debug( '_liMap:', view._liMap );
+    },
+
+    _createJobListItem : function( vertex ){
+        this.debug( '_createJobListItem:', vertex );
+        var view = this,
+            jobData = vertex.data,
+            job = new JOB.Job( jobData.job );
+
+        // get the models of the outputs for this job from the history
+        var outputModels = _.map( job.get( 'outputs' ), function( output ){
+            //note: output is { src: 'hda/dataset_collection', id: <some id> }
+            // job output doesn't *quite* match up to normal typeId
+            return view.model.contents.get( output.type_id );
+        });
+        // set the collection (HistoryContents) for the job to that json (setting historyId for proper ajax urls)
+        job.outputCollection.reset( outputModels );
+        job.outputCollection.historyId = view.model.id;
+        //this.debug( job.outputCollection );
+
+        // create the bbone view for the job (to be positioned later accrd. to the layout) and cache
+        var li = new view.JobItemClass({ model: job, tool: jobData.tool, jobData: jobData });
+        view.listenTo( li, 'expanding expanded collapsing collapsed', view.renderGraph );
+        view.listenTo( li.foldout, 'view:expanding view:expanded view:collapsing view:collapsed', view.renderGraph );
+        return li;
+    },
+
+    _createContentListItem : function( vertex ){
+        this.debug( '_createContentListItem:', vertex );
+        var view = this;
+        var content = vertex.data;
+        content = view.model.contents.get( content.type_id );
+        var li = new view.ContentItemClass({ model: content });
+        view.listenTo( li, 'expanding expanded collapsing collapsed', view.renderGraph );
+        return li;
+    },
+
+    layoutDefaults : {
+        linkSpacing     : 16,
+        linkWidth       : 0,
+        linkHeight      : 0,
+        jobWidth        : 300,
+        jobHeight       : 300,
+        jobSpacing      : 12,
+        linkAdjX        : 4,
+        linkAdjY        : 0
+    },
+
+    _createLayout : function( options ){
+        options = _.defaults( _.clone( options || {} ), this.layoutDefaults );
+        var view = this,
+            vertices = _.values( view.component.vertices ),
+            layout = _.extend( options, {
+                nodeMap         : {},
+                links           : [],
+                svg             : { width: 0, height: 0 }
+            });
+
+        vertices.forEach( function( v, j ){
+            var node = { name: v.name, x: 0, y: 0 };
+            layout.nodeMap[ v.name ] = node;
+        });
+
+        view.component.edges( function( e ){
+            var link = {
+                    source: e.source,
+                    target: e.target
+                };
+            layout.links.push( link );
+        });
+        //this.debug( JSON.stringify( layout, null, '  ' ) );
+        return layout;
+    },
+
+    render : function( options ){
+        this.debug( this + '.render:', options );
+        var view = this;
+        view.$el.html([
+            '<header></header>',
+            '<nav class="controls"></nav>',
+            '<figure class="graph"></figure>',
+            '<footer></footer>'
+        ].join( '' ) );
+
+        var $graph = view.$graph();
+        view.component.eachVertex( function( vertex ){
+            view._liMap[ vertex.name ].render( 0 ).$el.appendTo( $graph )
+                // store the name in the DOM and cache by that name
+                .data( view._VERTEX_NAME_DATA_KEY, vertex.name );
+        });
+        view.renderGraph();
+        return this;
+    },
+
+    $graph : function(){
+        return this.$( '.graph' );
+    },
+
+    renderGraph : function( options ){
+        this.debug( this + '.renderGraph:', options );
+        var view = this;
+
+        function _render(){
+
+            view._updateLayout();
+            // set up the display containers
+            view.$graph()
+                // use css3 transform to scale component graph
+                .css( 'transform', [ 'scale(', view.zoomLevel, ',', view.zoomLevel, ')' ].join( '' ) )
+                .width( view.layout.svg.width )
+                .height( view.layout.svg.height );
+            view.renderSVG();
+
+            // position the job views accrd. to the layout
+            view.component.eachVertex( function( v ){
+//TODO:?? liMap needed - can't we attach to vertex?
+                var li = view._liMap[ v.name ],
+                    position = view.layout.nodeMap[ v.name ];
+                //this.debug( position );
+                li.$el.css({ top: position.y, left: position.x });
+            });
+        }
+//TODO: hack - li's invisible in updateLayout without this delay
+        if( !this.$el.is( ':visible' ) ){
+            _.delay( _render, 0 );
+        } else {
+            _render();
+        }
+        return this;
+    },
+
+    _updateLayout : function(){
+        this.debug( this + '._updateLayout:' );
+        var view = this,
+            layout = view.layout;
+
+        layout.linkHeight = layout.linkSpacing * _.size( layout.nodeMap );
+        layout.svg.height = layout.linkHeight + layout.jobHeight;
+
+        // reset for later max comparison
+        layout.svg.width = 0;
+
+//TODO:?? can't we just alter the component v and e's directly?
+        // layout the job views putting jobSpacing btwn each
+        var x = 0,
+            y = layout.linkHeight;
+        _.each( layout.nodeMap, function( node, jobId ){
+            //this.debug( node, jobId );
+            node.x = x;
+            node.y = y;
+            x += layout.jobWidth + layout.jobSpacing;
+        });
+        layout.svg.width = layout.linkWidth = Math.max( layout.svg.width, x );
+
+        // layout the links - connecting each job by it's main coords (currently)
+//TODO: somehow adjust the svg height based on the largest distance the longest connection needs
+        layout.links.forEach( function( link ){
+            var source = layout.nodeMap[ link.source ],
+                target = layout.nodeMap[ link.target ];
+            link.x1 = source.x + layout.linkAdjX;
+            link.y1 = source.y + layout.linkAdjY;
+            link.x2 = target.x + layout.linkAdjX;
+            link.y2 = target.y + layout.linkAdjY;
+        });
+
+        this.debug( JSON.stringify( layout, null, '  ' ) );
+        return this.layout;
+    },
+
+    renderSVG : function(){
+        this.debug( this + '.renderSVG:' );
+        var view = this,
+            layout = view.layout;
+
+        var svg = d3.select( this.$graph().get(0) ).select( 'svg' );
+        if( svg.empty() ){
+            svg = d3.select( this.$graph().get(0) ).append( 'svg' );
+        }
+
+        svg
+            .attr( 'width', layout.svg.width )
+            .attr( 'height', layout.svg.height );
+
+        function highlightConnect( d ){
+            d3.select( this ).classed( 'highlighted', true );
+            view._liMap[ d.source ].$el.addClass( 'highlighted' );
+            view._liMap[ d.target ].$el.addClass( 'highlighted' );
+        }
+
+        function unhighlightConnect( d ){
+            d3.select( this ).classed( 'highlighted', false );
+            view._liMap[ d.source ].$el.removeClass( 'highlighted' );
+            view._liMap[ d.target ].$el.removeClass( 'highlighted' );
+        }
+
+        var connections = svg.selectAll( '.connection' )
+                .data( layout.links );
+
+        connections
+            .enter().append( 'path' )
+                .attr( 'class', 'connection' )
+                .attr( 'id', function( d ){ return [ d.source, d.target ].join( view._LINK_ID_SEP ); })
+                .on( 'mouseover', highlightConnect )
+                .on( 'mouseout', unhighlightConnect );
+
+        connections
+                .attr( 'd', function( d ){ return view._connectionPath( d ); });
+
+        return svg.node();
+    },
+
+    _connectionPath : function( d ){
+        var CURVE_X = 0,
+            controlY = ( ( d.x2 - d.x1 ) / this.layout.svg.width ) * this.layout.linkHeight;
+        return [
+            'M', d.x1, ',', d.y1, ' ',
+            'C',
+                d.x1 + CURVE_X, ',', d.y1 - controlY, ' ',
+                d.x2 - CURVE_X, ',', d.y2 - controlY, ' ',
+            d.x2, ',', d.y2
+        ].join( '' );
+    },
+
+    events : {
+        'mouseover .graph > .list-item'  : function( ev ){ this.highlightConnected( ev.currentTarget, true ); },
+        'mouseout  .graph > .list-item'  : function( ev ){ this.highlightConnected( ev.currentTarget, false ); }
+    },
+
+    highlightConnected : function( jobElement, highlight ){
+        this.debug( 'highlightConnected', jobElement, highlight );
+        highlight = highlight !== undefined? highlight : true;
+
+        var view = this,
+            component = view.component,
+            jobClassFn = highlight? jQuery.prototype.addClass : jQuery.prototype.removeClass,
+            connectionClass = highlight? 'connection highlighted' : 'connection';
+
+        //console.debug( 'mouseover', this );
+        var $hoverTarget = jobClassFn.call( $( jobElement ), 'highlighted' ),
+            id = $hoverTarget.data( view._VERTEX_NAME_DATA_KEY );
+
+        // immed. ancestors
+        component.edges({ target: id }).forEach( function( edge ){
+            var ancestorId = edge.source,
+                ancestorLi = view._liMap[ ancestorId ];
+            //view.debug( '\t ancestor:', ancestorId, ancestorLi );
+            jobClassFn.call( ancestorLi.$el, 'highlighted' );
+            view.$( '#' + ancestorId + view._LINK_ID_SEP + id ).attr( 'class', connectionClass );
+        });
+        // descendants
+        component.vertices[ id ].eachEdge( function( edge ){
+            var descendantId = edge.target,
+                descendantLi = view._liMap[ descendantId ];
+            //view.debug( '\t descendant:', descendantId, descendantLi );
+            jobClassFn.call( descendantLi.$el, 'highlighted' );
+            view.$( '#' + id + view._LINK_ID_SEP + descendantId ).attr( 'class', connectionClass );
+        });
+    },
+
+    zoom : function( level ){
+        this.zoomLevel = Math.min( 1.0, Math.max( this._MIN_ZOOM_LEVEL, level ) );
+        return this.renderGraph();
+    },
+
+    toString : function(){
+        return 'HistoryStructureComponent(' + this.model.id + ')';
+    }
+});
+
+
+// ============================================================================
+/**
+ *
+ */
+var VerticalHistoryStructureComponent = HistoryStructureComponent.extend({
+
+    //logger : console,
+
+    className : HistoryStructureComponent.prototype.className + ' vertical',
+
+    layoutDefaults : _.extend( _.clone( HistoryStructureComponent.prototype.layoutDefaults ), {
+        linkAdjX        : 0,
+        linkAdjY        : 4
+    }),
+
+//TODO: how can we use the dom height of the job li's - they're not visible when this is called?
+    _updateLayout : function(){
+        this.debug( this + '._updateLayout:' );
+        var view = this,
+            layout = view.layout;
+        //this.info( this.cid, '_updateLayout' )
+
+        layout.linkWidth = layout.linkSpacing * _.size( layout.nodeMap );
+        layout.svg.width = layout.linkWidth + layout.jobWidth;
+
+        // reset height - we'll get the max Y below to assign to it
+        layout.svg.height = 0;
+
+        //TODO:?? can't we just alter the component v and e's directly?
+        var x = layout.linkWidth,
+            y = 0;
+        _.each( layout.nodeMap, function( node, nodeId ){
+            node.x = x;
+            node.y = y;
+            var li = view._liMap[ nodeId ];
+            y += li.$el.height() + layout.jobSpacing;
+        });
+        layout.linkHeight = layout.svg.height = Math.max( layout.svg.height, y );
+
+        // layout the links - connecting each job by it's main coords (currently)
+        layout.links.forEach( function( link ){
+            var source = layout.nodeMap[ link.source ],
+                target = layout.nodeMap[ link.target ];
+            link.x1 = source.x + layout.linkAdjX;
+            link.y1 = source.y + layout.linkAdjY;
+            link.x2 = target.x + layout.linkAdjX;
+            link.y2 = target.y + layout.linkAdjY;
+            //view.debug( 'link:', link.x1, link.y1, link.x2, link.y2, link );
+        });
+
+        this.debug( JSON.stringify( layout, null, '  ' ) );
+        return layout;
+    },
+
+    _connectionPath : function( d ){
+        var CURVE_Y = 0,
+            controlX = ( ( d.y2 - d.y1 ) / this.layout.svg.height ) * this.layout.linkWidth;
+        return [
+            'M', d.x1, ',', d.y1, ' ',
+            'C',
+                d.x1 - controlX, ',', d.y1 + CURVE_Y, ' ',
+                d.x2 - controlX, ',', d.y2 - CURVE_Y, ' ',
+            d.x2, ',', d.y2
+        ].join( '' );
+    },
+
+    toString : function(){
+        return 'VerticalHistoryStructureComponent(' + this.model.id + ')';
+    }
+});
+
+
+// ============================================================================
+/**
+ *
+ */
+var HistoryStructureView = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    className : 'history-structure',
+
+    _layoutToComponentClass : {
+        'horizontal'    : HistoryStructureComponent,
+        'vertical'      : VerticalHistoryStructureComponent
+    },
+    //_DEFAULT_LAYOUT : 'horizontal',
+    _DEFAULT_LAYOUT : 'vertical',
+
+    initialize : function( attributes ){
+        this.layout = _.contains( attributes.layout, _.keys( this._layoutToComponentClass ) )?
+            attributes.layout : this._DEFAULT_LAYOUT;
+        this.log( this + '(HistoryStructureView).initialize:', attributes, this.model );
+        //TODO:?? to model - maybe glom jobs onto model in order to persist
+        // cache jobs since we need to re-create the DAG if settings change
+        this._processTools( attributes.tools );
+        this._processJobs( attributes.jobs );
+        this._createDAG();
+    },
+
+    _processTools : function( tools ){
+        this.tools = tools || {};
+        return this.tools;
+    },
+
+    _processJobs : function( jobs ){
+        this.jobs = jobs || [];
+        return this.jobs;
+    },
+
+    _createDAG : function(){
+        this.dag = new JobDAG({
+            historyContents     : this.model.contents.toJSON(),
+            tools               : this.tools,
+            jobs                : this.jobs,
+            excludeSetMetadata  : true,
+            excludeErroredJobs  : true
+        });
+        this.debug( this + '.dag:', this.dag );
+        this._createComponents();
+    },
+
+    _createComponents : function(){
+        this.log( this + '._createComponents' );
+        var structure = this;
+
+        structure.componentViews = structure.dag.weakComponentGraphArray().map( function( componentGraph ){
+            return structure._createComponent( componentGraph );
+        });
+        return structure.componentViews;
+    },
+
+    _createComponent : function( component ){
+        this.log( this + '._createComponent:', component );
+        var ComponentClass = this._layoutToComponentClass[ this.layout ];
+        return new ComponentClass({
+                model       : this.model,
+                component   : component
+            });
+    },
+
+    render : function( options ){
+        this.log( this + '.render:', options );
+        var structure = this;
+
+        structure.$el.addClass( 'clear' ).html([
+            '<div class="controls"></div>',
+            '<div class="components"></div>'
+        ].join( '' ));
+
+        structure.componentViews.forEach( function( component ){
+            component.render().$el.appendTo( structure.$components() );
+        });
+        return structure;
+    },
+
+    $components : function(){
+        return this.$( '.components' );
+    },
+
+    changeLayout : function( layout ){
+        if( !( layout in this._layoutToComponentClass ) ){
+            throw new Error( this + ': unknown layout: ' + layout );
+        }
+        this.layout = layout;
+        this._createComponents();
+        return this.render();
+    },
+
+    toString : function(){
+        return 'HistoryStructureView(' + this.model.id + ')';
+    }
+});
+
+
+// ============================================================================
+    return HistoryStructureView;
+});
diff --git a/client/galaxy/scripts/mvc/history/history-view-annotated.js b/client/galaxy/scripts/mvc/history/history-view-annotated.js
new file mode 100644
index 0000000..6d1fc83
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-view-annotated.js
@@ -0,0 +1,106 @@
+define([
+    "mvc/history/history-view",
+    "mvc/history/hda-li",
+    "mvc/history/hdca-li",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( HISTORY_VIEW, HDA_LI, HDCA_LI, BASE_MVC, _l ){
+
+'use strict';
+
+/* =============================================================================
+TODO:
+
+============================================================================= */
+var _super = HISTORY_VIEW.HistoryView;
+// used in history/display.mako and history/embed.mako
+/** @class View/Controller for a tabular view of the history model.
+ *
+ *  As ReadOnlyHistoryView, but with:
+ *      history annotation always shown
+ *      datasets displayed in a table:
+ *          datasets in left cells, dataset annotations in the right
+ */
+var AnnotatedHistoryView = _super.extend(/** @lends AnnotatedHistoryView.prototype */{
+
+    className    : _super.prototype.className + ' annotated-history-panel',
+
+    // ------------------------------------------------------------------------ panel rendering
+    /** In this override, add the history annotation */
+    _buildNewRender : function(){
+        //TODO: shouldn't this display regardless (on all non-current panels)?
+        var $newRender = _super.prototype._buildNewRender.call( this );
+        this.renderHistoryAnnotation( $newRender );
+        return $newRender;
+    },
+
+    /** render the history's annotation as its own field */
+    renderHistoryAnnotation : function( $newRender ){
+        var annotation = this.model.get( 'annotation' );
+        if( !annotation ){ return; }
+        $newRender.find( '> .controls .subtitle' ).text( annotation );
+    },
+
+    /** override to add headers to indicate the dataset, annotation columns */
+    renderItems : function( $whereTo ){
+        $whereTo = $whereTo || this.$el;
+        _super.prototype.renderItems.call( this, $whereTo );
+
+        var $controls = $whereTo.find( '> .controls' );
+        $controls.find( '.contents-container.headers' ).remove();
+
+        var $headers = $( '<div class="contents-container headers"/>' )
+            .append([
+                $( '<div class="history-content header"/>' ).text( _l( 'Dataset' ) ),
+                $( '<div class="additional-info header"/>' ).text( _l( 'Annotation' ) )
+            ]).appendTo( $controls );
+
+        return self.views;
+    },
+
+    // ------------------------------------------------------------------------ sub-views
+    /** override to wrap each subview */
+    _renderItemView$el : function( view ){
+        return $( '<div class="contents-container"/>' ).append([
+            view.render(0).$el,
+            $( '<div class="additional-info"/>' ).text( view.model.get( 'annotation' ) || '' )
+        ]);
+    },
+
+    // ------------------------------------------------------------------------ panel events
+    events : _.extend( _.clone( _super.prototype.events ), {
+        // clicking on any part of the row will expand the items
+        'click .contents-container' : function( ev ){
+            ev.stopPropagation();
+            $( ev.currentTarget ).find( '.list-item .title-bar' ).click();
+        },
+        // prevent propagation on icon btns so they won't bubble up to tr and toggleBodyVisibility
+        'click .icon-btn' : function( ev ){
+            ev.stopPropagation();
+            // stopProp will prevent bootstrap from getting the click needed to open a dropdown
+            //  in the case of metafile download buttons - workaround here
+            var $currTarget = $( ev.currentTarget );
+            if( $currTarget.length && $currTarget.attr( 'data-toggle' ) === 'dropdown' ){
+                $currTarget.dropdown( 'toggle' );
+            }
+        }
+    }),
+
+    _clickSectionLink : function( ev ){
+        var sectionNumber = $( ev.currentTarget ).parent().parent().data( 'section' );
+        this.openSection( sectionNumber );
+    },
+
+    // ........................................................................ misc
+    /** Return a string rep of the history */
+    toString    : function(){
+        return 'AnnotatedHistoryView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+//==============================================================================
+    return {
+        AnnotatedHistoryView        : AnnotatedHistoryView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-view-edit-current.js b/client/galaxy/scripts/mvc/history/history-view-edit-current.js
new file mode 100644
index 0000000..2bd977a
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-view-edit-current.js
@@ -0,0 +1,474 @@
+define([
+    "mvc/history/history-model",
+    "mvc/history/history-view-edit",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( HISTORY_MODEL, HISTORY_VIEW_EDIT, BASE_MVC, _l ){
+
+'use strict';
+
+// ============================================================================
+/** session storage for history panel preferences (and to maintain state)
+ */
+var HistoryViewPrefs = BASE_MVC.SessionStorageModel.extend(
+/** @lends HistoryViewPrefs.prototype */{
+    defaults : {
+        /** should the tags editor be shown or hidden initially? */
+        tagsEditorShown : false,
+        /** should the annotation editor be shown or hidden initially? */
+        annotationEditorShown : false,
+        ///** what is the currently focused content (dataset or collection) in the current history?
+        // *      (the history panel will highlight and scroll to the focused content view)
+        // */
+        //focusedContentId : null
+        /** Current scroll position */
+        scrollPosition : 0
+    },
+    toString : function(){
+        return 'HistoryViewPrefs(' + JSON.stringify( this.toJSON() ) + ')';
+    }
+});
+
+/** key string to store panel prefs (made accessible on class so you can access sessionStorage directly) */
+HistoryViewPrefs.storageKey = function storageKey(){
+    return ( 'history-panel' );
+};
+
+/* =============================================================================
+TODO:
+
+============================================================================= */
+var _super = HISTORY_VIEW_EDIT.HistoryViewEdit;
+// used in root/index.mako
+/** @class View/Controller for the user's current history model as used in the history
+ *      panel (current right hand panel) of the analysis page.
+ *
+ *  The only history panel that:
+ *      will poll for updates.
+ *      displays datasets in reverse hid order.
+ */
+var CurrentHistoryView = _super.extend(/** @lends CurrentHistoryView.prototype */{
+
+    className           : _super.prototype.className + ' current-history-panel',
+
+    /** override to use drilldown (and not foldout) for how collections are displayed */
+    HDCAViewClass       : _super.prototype.HDCAViewClass.extend({
+        foldoutStyle : 'drilldown'
+    }),
+
+    emptyMsg : [
+        _l( 'This history is empty' ), '. ',
+        _l( 'You can ' ),
+        '<a class="uploader-link" href="javascript:void(0)">',
+            _l( 'load your own data' ),
+        '</a>',
+        _l( ' or ' ),
+        '<a class="get-data-link" href="javascript:void(0)">',
+            _l( 'get data from an external source' ),
+        '</a>'
+    ].join(''),
+
+    // ......................................................................... SET UP
+    /** Set up the view, set up storage, bind listeners to HistoryContents events */
+    initialize : function( attributes ){
+        attributes = attributes || {};
+
+        // ---- persistent preferences
+        /** maintain state / preferences over page loads */
+        this.preferences = new HistoryViewPrefs( _.extend({
+            id : HistoryViewPrefs.storageKey()
+        }, _.pick( attributes, _.keys( HistoryViewPrefs.prototype.defaults ) )));
+
+        _super.prototype.initialize.call( this, attributes );
+
+        /** sub-views that will overlay this panel (collections) */
+        this.panelStack = [];
+
+        /** id of currently focused content */
+        this.currentContentId = attributes.currentContentId || null;
+        //NOTE: purposely not sent to localstorage since panel recreation roughly lines up with a reset of this value
+    },
+
+    /** Override to cache the current scroll position with a listener */
+    _setUpListeners : function(){
+        _super.prototype._setUpListeners.call( this );
+
+        var panel = this;
+        // reset scroll position when there's a new history
+        this.on( 'new-model', function(){
+            panel.preferences.set( 'scrollPosition', 0 );
+        });
+    },
+
+    // ------------------------------------------------------------------------ loading history/item models
+    // TODO: next three more appropriate moved to the app level
+    /** (re-)loads the user's current history & contents w/ details */
+    loadCurrentHistory : function(){
+        return this.loadHistory( null, { url : Galaxy.root + 'history/current_history_json' });
+    },
+
+    /** loads a history & contents w/ details and makes them the current history */
+    switchToHistory : function( historyId, attributes ){
+        if( Galaxy.user.isAnonymous() ){
+            this.trigger( 'error', _l( 'You must be logged in to switch histories' ), _l( 'Anonymous user' ) );
+            return $.when();
+        }
+        return this.loadHistory( historyId, { url : Galaxy.root + 'history/set_as_current?id=' + historyId });
+    },
+
+    /** creates a new history on the server and sets it as the user's current history */
+    createNewHistory : function( attributes ){
+        if( Galaxy.user.isAnonymous() ){
+            this.trigger( 'error', _l( 'You must be logged in to create histories' ), _l( 'Anonymous user' )  );
+            return $.when();
+        }
+        return this.loadHistory( null, { url : Galaxy.root + 'history/create_new_current' });
+    },
+
+    /** release/free/shutdown old models and set up panel for new models */
+    setModel : function( model, attributes, render ){
+        _super.prototype.setModel.call( this, model, attributes, render );
+        if( this.model && this.model.id ){
+            this.log( 'checking for updates' );
+            this.model.checkForUpdates();
+        }
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ history/content event listening
+    /** listening for history events */
+    _setUpModelListeners : function(){
+        _super.prototype._setUpModelListeners.call( this );
+        // re-broadcast any model change events so that listeners don't have to re-bind to each history
+        return this.listenTo( this.model, {
+            'change:nice_size change:size' : function(){
+                this.trigger( 'history-size-change', this, this.model, arguments );
+            },
+            'change:id' : function(){
+                this.once( 'loading-done', function(){ this.model.checkForUpdates(); });
+            }
+        });
+    },
+
+    /** listening for collection events */
+    _setUpCollectionListeners : function(){
+        _super.prototype._setUpCollectionListeners.call( this );
+        // if a hidden item is created (gen. by a workflow), moves thru the updater to the ready state,
+        //  then: remove it from the collection if the panel is set to NOT show hidden datasets
+        this.listenTo( this.collection, 'state:ready', function( model, newState, oldState ){
+            if( ( !model.get( 'visible' ) )
+            &&  ( !this.collection.storage.includeHidden() ) ){
+                this.removeItemView( model );
+            }
+        });
+    },
+
+    // ------------------------------------------------------------------------ panel rendering
+    /** override to add a handler to capture the scroll position when the parent scrolls */
+    _setUpBehaviors : function( $where ){
+        $where = $where || this.$el;
+        // console.log( '_setUpBehaviors', this.$scrollContainer( $where ).get(0), this.$list( $where ) );
+        // we need to call this in _setUpBehaviors which is called after render since the $el
+        // may not be attached to $el.parent and $scrollContainer() may not work
+        var panel = this;
+        _super.prototype._setUpBehaviors.call( panel, $where );
+
+        // cache the handler to remove and re-add so we don't pile up the handlers
+        if( !this._debouncedScrollCaptureHandler ){
+            this._debouncedScrollCaptureHandler = _.debounce( function scrollCapture(){
+                // cache the scroll position (only if visible)
+                if( panel.$el.is( ':visible' ) ){
+                    panel.preferences.set( 'scrollPosition', $( this ).scrollTop() );
+                }
+            }, 40 );
+        }
+
+        panel.$scrollContainer( $where )
+            .off( 'scroll', this._debouncedScrollCaptureHandler )
+            .on( 'scroll', this._debouncedScrollCaptureHandler );
+        return panel;
+    },
+
+    /** In this override, handle null models and move the search input to the top */
+    _buildNewRender : function(){
+        if( !this.model ){ return $(); }
+        var $newRender = _super.prototype._buildNewRender.call( this );
+        $newRender.find( '.search' ).prependTo( $newRender.find( '> .controls' ) );
+        this._renderQuotaMessage( $newRender );
+        return $newRender;
+    },
+
+    /** render the message displayed when a user is over quota and can't run jobs */
+    _renderQuotaMessage : function( $whereTo ){
+        $whereTo = $whereTo || this.$el;
+        return $( this.templates.quotaMsg( {}, this ) ).prependTo( $whereTo.find( '.messages' ) );
+    },
+
+    /** In this override, get and set current panel preferences when editor is used */
+    _renderTags : function( $where ){
+        var panel = this;
+        // render tags and show/hide based on preferences
+        _super.prototype._renderTags.call( panel, $where );
+        if( panel.preferences.get( 'tagsEditorShown' ) ){
+            panel.tagsEditor.toggle( true );
+        }
+        // store preference when shown or hidden
+        panel.listenTo( panel.tagsEditor, 'hiddenUntilActivated:shown hiddenUntilActivated:hidden',
+            function( tagsEditor ){
+                panel.preferences.set( 'tagsEditorShown', tagsEditor.hidden );
+            }
+        );
+    },
+
+    /** In this override, get and set current panel preferences when editor is used */
+    _renderAnnotation : function( $where ){
+        var panel = this;
+        // render annotation and show/hide based on preferences
+        _super.prototype._renderAnnotation.call( panel, $where );
+        if( panel.preferences.get( 'annotationEditorShown' ) ){
+            panel.annotationEditor.toggle( true );
+        }
+        // store preference when shown or hidden
+        panel.listenTo( panel.annotationEditor, 'hiddenUntilActivated:shown hiddenUntilActivated:hidden',
+            function( annotationEditor ){
+                panel.preferences.set( 'annotationEditorShown', annotationEditor.hidden );
+            }
+        );
+    },
+
+    /** Override to scroll to cached position (in prefs) after swapping */
+    _swapNewRender : function( $newRender ){
+        _super.prototype._swapNewRender.call( this, $newRender );
+        var panel = this;
+        _.delay( function(){
+            var pos = panel.preferences.get( 'scrollPosition' );
+            if( pos ){
+                panel.scrollTo( pos, 0 );
+            }
+        }, 10 );
+        //TODO: is this enough of a delay on larger histories?
+
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ sub-views
+    /** Override to add the current-content highlight class to currentContentId's view */
+    _attachItems : function( $whereTo ){
+        _super.prototype._attachItems.call( this, $whereTo );
+        var panel = this;
+        if( panel.currentContentId ){
+            panel._setCurrentContentById( panel.currentContentId );
+        }
+        return this;
+    },
+
+    /** Override to remove any drill down panels */
+    addItemView : function( model, collection, options ){
+        var view = _super.prototype.addItemView.call( this, model, collection, options );
+        if( !view ){ return view; }
+        if( this.panelStack.length ){ return this._collapseDrilldownPanel(); }
+        return view;
+    },
+
+    // ------------------------------------------------------------------------ collection sub-views
+    /** In this override, add/remove expanded/collapsed model ids to/from web storage */
+    _setUpItemViewListeners : function( view ){
+        var panel = this;
+        _super.prototype._setUpItemViewListeners.call( panel, view );
+        // use pub-sub to: handle drilldown expansion and collapse
+        return panel.listenTo( view, {
+            'expanded:drilldown' : function( v, drilldown ){
+                this._expandDrilldownPanel( drilldown );
+            },
+            'collapsed:drilldown' : function( v, drilldown ){
+                this._collapseDrilldownPanel( drilldown );
+            },
+        });
+    },
+
+    /** display 'current content': add a visible highlight and store the id of a content item */
+    setCurrentContent : function( view ){
+        this.$( '.history-content.current-content' ).removeClass( 'current-content' );
+        if( view ){
+            view.$el.addClass( 'current-content' );
+            this.currentContentId = view.model.id;
+        } else {
+            this.currentContentId = null;
+        }
+    },
+
+    /** find the view with the id and then call setCurrentContent on it */
+    _setCurrentContentById : function( id ){
+        var view = this.viewFromModelId( id ) || null;
+        this.setCurrentContent( view );
+    },
+
+    /** Handle drill down by hiding this panels list and controls and showing the sub-panel */
+    _expandDrilldownPanel : function( drilldown ){
+        this.panelStack.push( drilldown );
+        // hide this panel's controls and list, set the name for back navigation, and attach to the $el
+        this.$controls().add( this.$list() ).hide();
+        drilldown.parentName = this.model.get( 'name' );
+        drilldown.delegateEvents().render().$el.appendTo( this.$el );
+    },
+
+    /** Handle drilldown close by freeing the panel and re-rendering this panel */
+    _collapseDrilldownPanel : function( drilldown ){
+        this.panelStack.pop();
+        //TODO: MEM: free the panel
+        this.$controls().add( this.$list() ).show();
+    },
+
+    // ........................................................................ panel events
+    /** event map */
+    events : _.extend( _.clone( _super.prototype.events ), {
+        // the two links in the empty message
+        'click .uploader-link' : function( ev ){ Galaxy.upload.show( ev ); },
+        'click .get-data-link' : function( ev ){
+            var $toolMenu = $( '.toolMenuContainer' );
+            $toolMenu.parent().scrollTop( 0 );
+            $toolMenu.find( 'span:contains("Get Data")' ).click();
+        }
+    }),
+
+    // ........................................................................ external objects/MVC
+    listenToGalaxy : function( galaxy ){
+        this.listenTo( galaxy, {
+            // when the galaxy_main iframe is loaded with a new page,
+            // compare the url to the following list and if there's a match
+            // pull the id from url and indicate in the history view that
+            // the dataset with that id is the 'current'ly active dataset
+            'galaxy_main:load': function( data ){
+                var pathToMatch = data.fullpath;
+                var hdaId = null;
+                var useToURLRegexMap = {
+                    'display'       : /datasets\/([a-f0-9]+)\/display/,
+                    'edit'          : /datasets\/([a-f0-9]+)\/edit/,
+                    'report_error'  : /dataset\/errors\?id=([a-f0-9]+)/,
+                    'rerun'         : /tool_runner\/rerun\?id=([a-f0-9]+)/,
+                    'show_params'   : /datasets\/([a-f0-9]+)\/show_params/,
+                    // no great way to do this here? (leave it in the dataset event handlers above?)
+                    // 'visualization' : 'visualization',
+                };
+                _.find( useToURLRegexMap, function( regex, use ){
+                    // grab the more specific match result (1), save, and use it as the find flag
+                    hdaId = _.result( pathToMatch.match( regex ), 1 );
+                    return hdaId;
+                });
+                // need to type mangle to go from web route to history contents
+                this._setCurrentContentById( hdaId? ( 'dataset-' + hdaId ) : null );
+            },
+            // when the center panel is given a new view, clear the current indicator
+            'center-panel:load': function( view ){
+                this._setCurrentContentById();
+            }
+        });
+    },
+
+    //TODO: remove quota meter from panel and remove this
+    /** add listeners to an external quota meter (mvc/user/user-quotameter.js) */
+    connectToQuotaMeter : function( quotaMeter ){
+        if( !quotaMeter ){
+            return this;
+        }
+        // show/hide the 'over quota message' in the history when the meter tells it to
+        this.listenTo( quotaMeter, 'quota:over',  this.showQuotaMessage );
+        this.listenTo( quotaMeter, 'quota:under', this.hideQuotaMessage );
+
+        // having to add this to handle re-render of hview while overquota (the above do not fire)
+        this.on( 'rendered rendered:initial', function(){
+            if( quotaMeter && quotaMeter.isOverQuota() ){
+                this.showQuotaMessage();
+            }
+        });
+        return this;
+    },
+
+    /** Override to preserve the quota message */
+    clearMessages : function( ev ){
+        var $target = !_.isUndefined( ev )?
+            $( ev.currentTarget )
+            :this.$messages().children( '[class$="message"]' );
+        $target = $target.not( '.quota-message' );
+        $target.fadeOut( this.fxSpeed, function(){
+            $( this ).remove();
+        });
+        return this;
+    },
+
+    /** Show the over quota message (which happens to be in the history panel).
+     */
+    showQuotaMessage : function(){
+        var $msg = this.$( '.quota-message' );
+        if( $msg.is( ':hidden' ) ){ $msg.slideDown( this.fxSpeed ); }
+    },
+
+    /** Hide the over quota message (which happens to be in the history panel).
+     */
+    hideQuotaMessage : function(){
+        var $msg = this.$( '.quota-message' );
+        if( !$msg.is( ':hidden' ) ){ $msg.slideUp( this.fxSpeed ); }
+    },
+
+    // ........................................................................ options menu
+    //TODO: remove to batch
+    /** unhide any hidden datasets */
+    unhideHidden : function() {
+        var self = this;
+        if( confirm( _l( 'Really unhide all hidden datasets?' ) ) ){
+            // get all hidden, regardless of deleted/purged
+            return self.model.contents._filterAndUpdate(
+                { visible: false, deleted: '', purged: '' },
+                { visible : true }
+            ).done( function(){
+                // TODO: would be better to render these as they're unhidden instead of all at once
+                if( !self.model.contents.includeHidden ){
+                    self.renderItems();
+                }
+            });
+        }
+        return jQuery.when();
+    },
+
+    /** delete any hidden datasets */
+    deleteHidden : function() {
+        var self = this;
+        if( confirm( _l( 'Really delete all hidden datasets?' ) ) ){
+            return self.model.contents._filterAndUpdate(
+                // get all hidden, regardless of deleted/purged
+                { visible: false, deleted: '', purged: '' },
+                // both delete *and* unhide them
+                { deleted : true, visible: true }
+            );
+        }
+        return jQuery.when();
+    },
+
+    /** Return a string rep of the history */
+    toString : function(){
+        return 'CurrentHistoryView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+//------------------------------------------------------------------------------ TEMPLATES
+CurrentHistoryView.prototype.templates = (function(){
+
+    var quotaMsgTemplate = BASE_MVC.wrapTemplate([
+        '<div class="quota-message errormessage">',
+            _l( 'You are over your disk quota' ), '. ',
+            _l( 'Tool execution is on hold until your disk usage drops below your allocated quota' ), '.',
+        '</div>'
+    ], 'history' );
+    return _.extend( _.clone( _super.prototype.templates ), {
+        quotaMsg : quotaMsgTemplate
+    });
+
+}());
+
+
+//==============================================================================
+    return {
+        CurrentHistoryView        : CurrentHistoryView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-view-edit.js b/client/galaxy/scripts/mvc/history/history-view-edit.js
new file mode 100644
index 0000000..ba00eca
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-view-edit.js
@@ -0,0 +1,599 @@
+define([
+    "mvc/history/history-view",
+    "mvc/history/history-contents",
+    "mvc/dataset/states",
+    "mvc/history/hda-model",
+    "mvc/history/hda-li-edit",
+    "mvc/history/hdca-li-edit",
+    "mvc/tag",
+    "mvc/annotation",
+    "mvc/collection/list-collection-creator",
+    "mvc/collection/pair-collection-creator",
+    "mvc/collection/list-of-pairs-collection-creator",
+    "ui/fa-icon-button",
+    "mvc/ui/popup-menu",
+    "mvc/base-mvc",
+    "utils/localization",
+    "ui/editable-text",
+], function(
+    HISTORY_VIEW,
+    HISTORY_CONTENTS,
+    STATES,
+    HDA_MODEL,
+    HDA_LI_EDIT,
+    HDCA_LI_EDIT,
+    TAGS,
+    ANNOTATIONS,
+    LIST_COLLECTION_CREATOR,
+    PAIR_COLLECTION_CREATOR,
+    LIST_OF_PAIRS_COLLECTION_CREATOR,
+    faIconButton,
+    PopupMenu,
+    BASE_MVC,
+    _l
+){
+
+'use strict';
+
+/* =============================================================================
+TODO:
+
+============================================================================= */
+var _super = HISTORY_VIEW.HistoryView;
+// base class for history-view-edit-current and used as-is in history/view.mako
+/** @class Editable View/Controller for the history model.
+ *
+ *  Allows:
+ *      (everything HistoryView allows)
+ *      changing the name
+ *      displaying and editing tags and annotations
+ *      multi-selection and operations on mulitple content items
+ */
+var HistoryViewEdit = _super.extend(
+/** @lends HistoryViewEdit.prototype */{
+
+    /** class to use for constructing the HistoryDatasetAssociation views */
+    HDAViewClass    : HDA_LI_EDIT.HDAListItemEdit,
+    /** class to use for constructing the HistoryDatasetCollectionAssociation views */
+    HDCAViewClass   : HDCA_LI_EDIT.HDCAListItemEdit,
+
+    // ......................................................................... SET UP
+    /** Set up the view, set up storage, bind listeners to HistoryContents events
+     *  @param {Object} attributes
+     */
+    initialize : function( attributes ){
+        attributes = attributes || {};
+        _super.prototype.initialize.call( this, attributes );
+
+        // ---- set up instance vars
+        /** editor for tags - sub-view */
+        this.tagsEditor = null;
+        /** editor for annotations - sub-view */
+        this.annotationEditor = null;
+
+        /** allow user purge of dataset files? */
+        this.purgeAllowed = attributes.purgeAllowed || false;
+
+        // states/modes the panel can be in
+        /** is the panel currently showing the dataset selection controls? */
+        this.annotationEditorShown  = attributes.annotationEditorShown || false;
+        this.tagsEditorShown  = attributes.tagsEditorShown || false;
+    },
+
+    /** Override to handle history as drag-drop target */
+    _setUpListeners : function(){
+        _super.prototype._setUpListeners.call( this );
+        return this.on({
+            'droptarget:drop': function( ev, data ){
+                // process whatever was dropped and re-hide the drop target
+                this.dataDropped( data );
+                this.dropTargetOff();
+            },
+            'view:attached view:removed': function(){
+                this._renderCounts();
+            },
+            'search:loading-progress': this._renderSearchProgress,
+            'search:searching': this._renderSearchFindings,
+        });
+    },
+
+    // ------------------------------------------------------------------------ listeners
+    /** listening for history and HDA events */
+    _setUpModelListeners : function(){
+        _super.prototype._setUpModelListeners.call( this );
+        this.listenTo( this.model, 'change:size', this.updateHistoryDiskSize );
+        return this;
+    },
+
+    /** listening for collection events */
+    _setUpCollectionListeners : function(){
+        _super.prototype._setUpCollectionListeners.call( this );
+        this.listenTo( this.collection, {
+            'change:deleted': this._handleItemDeletedChange,
+            'change:visible': this._handleItemVisibleChange,
+            'change:purged' : function( model ){
+                // hafta get the new nice-size w/o the purged model
+                this.model.fetch();
+            },
+            // loading indicators for deleted/hidden
+            'fetching-deleted'      : function( collection ){
+                this.$( '> .controls .deleted-count' )
+                    .html( '<i>' + _l( 'loading...' ) + '</i>' );
+            },
+            'fetching-hidden'       : function( collection ){
+                this.$( '> .controls .hidden-count' )
+                    .html( '<i>' + _l( 'loading...' ) + '</i>' );
+            },
+            'fetching-deleted-done fetching-hidden-done'  : this._renderCounts,
+        });
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ panel rendering
+    /** In this override, add tag and annotation editors and a btn to toggle the selectors */
+    _buildNewRender : function(){
+        // create a new render using a skeleton template, render title buttons, render body, and set up events, etc.
+        var $newRender = _super.prototype._buildNewRender.call( this );
+        if( !this.model ){ return $newRender; }
+
+        if( Galaxy && Galaxy.user && Galaxy.user.id && Galaxy.user.id === this.model.get( 'user_id' ) ){
+            this._renderTags( $newRender );
+            this._renderAnnotation( $newRender );
+        }
+        return $newRender;
+    },
+
+    /** Update the history size display (curr. upper right of panel). */
+    updateHistoryDiskSize : function(){
+        this.$( '.history-size' ).text( this.model.get( 'nice_size' ) );
+    },
+
+    /** override to render counts when the items are rendered */
+    renderItems : function( $whereTo ){
+        var views = _super.prototype.renderItems.call( this, $whereTo );
+        if( !this.searchFor ){ this._renderCounts( $whereTo ); }
+        return views;
+    },
+
+    /** override to show counts, what's deleted/hidden, and links to toggle those */
+    _renderCounts : function( $whereTo ){
+        $whereTo = $whereTo instanceof jQuery? $whereTo : this.$el;
+        var html = this.templates.counts( this.model.toJSON(), this );
+        return $whereTo.find( '> .controls .subtitle' ).html( html );
+    },
+
+    /** render the tags sub-view controller */
+    _renderTags : function( $where ){
+        var panel = this;
+        this.tagsEditor = new TAGS.TagsEditor({
+            model           : this.model,
+            el              : $where.find( '.controls .tags-display' ),
+            onshowFirstTime : function(){ this.render(); },
+            // show hide sub-view tag editors when this is shown/hidden
+            onshow          : function(){
+                panel.toggleHDATagEditors( true,  panel.fxSpeed );
+            },
+            onhide          : function(){
+                panel.toggleHDATagEditors( false, panel.fxSpeed );
+            },
+            $activator      : faIconButton({
+                title   : _l( 'Edit history tags' ),
+                classes : 'history-tag-btn',
+                faIcon  : 'fa-tags'
+            }).appendTo( $where.find( '.controls .actions' ) )
+        });
+    },
+    /** render the annotation sub-view controller */
+    _renderAnnotation : function( $where ){
+        var panel = this;
+        this.annotationEditor = new ANNOTATIONS.AnnotationEditor({
+            model           : this.model,
+            el              : $where.find( '.controls .annotation-display' ),
+            onshowFirstTime : function(){ this.render(); },
+            // show hide sub-view view annotation editors when this is shown/hidden
+            onshow          : function(){
+                panel.toggleHDAAnnotationEditors( true,  panel.fxSpeed );
+            },
+            onhide          : function(){
+                panel.toggleHDAAnnotationEditors( false, panel.fxSpeed );
+            },
+            $activator      : faIconButton({
+                title   : _l( 'Edit history annotation' ),
+                classes : 'history-annotate-btn',
+                faIcon  : 'fa-comment'
+            }).appendTo( $where.find( '.controls .actions' ) )
+        });
+    },
+
+    /** Set up HistoryViewEdit js/widget behaviours
+     *  In this override, make the name editable
+     */
+    _setUpBehaviors : function( $where ){
+        $where = $where || this.$el;
+        _super.prototype._setUpBehaviors.call( this, $where );
+        if( !this.model ){ return; }
+
+        // anon users shouldn't have access to any of the following
+        if( ( !Galaxy.user || Galaxy.user.isAnonymous() )
+        ||  ( Galaxy.user.id !== this.model.get( 'user_id' ) ) ){
+            return;
+        }
+
+        var panel = this,
+            nameSelector = '> .controls .name';
+        $where.find( nameSelector )
+            .attr( 'title', _l( 'Click to rename history' ) )
+            .tooltip({ placement: 'bottom' })
+            .make_text_editable({
+                on_finish: function( newName ){
+                    var previousName = panel.model.get( 'name' );
+                    if( newName && newName !== previousName ){
+                        panel.$el.find( nameSelector ).text( newName );
+                        panel.model.save({ name: newName })
+                            .fail( function(){
+                                panel.$el.find( nameSelector ).text( panel.model.previous( 'name' ) );
+                            });
+                    } else {
+                        panel.$el.find( nameSelector ).text( previousName );
+                    }
+                }
+            });
+    },
+
+    /** return a new popup menu for choosing a multi selection action
+     *  ajax calls made for multiple datasets are queued
+     */
+    multiselectActions : function(){
+        var panel = this,
+            actions = [
+                {   html: _l( 'Hide datasets' ), func: function(){
+                        var action = HDA_MODEL.HistoryDatasetAssociation.prototype.hide;
+                        panel.getSelectedModels().ajaxQueue( action );
+                    }
+                },
+                {   html: _l( 'Unhide datasets' ), func: function(){
+                        var action = HDA_MODEL.HistoryDatasetAssociation.prototype.unhide;
+                        panel.getSelectedModels().ajaxQueue( action );
+                    }
+                },
+                {   html: _l( 'Delete datasets' ), func: function(){
+                        var action = HDA_MODEL.HistoryDatasetAssociation.prototype['delete'];
+                        panel.getSelectedModels().ajaxQueue( action );
+                    }
+                },
+                {   html: _l( 'Undelete datasets' ), func: function(){
+                        var action = HDA_MODEL.HistoryDatasetAssociation.prototype.undelete;
+                        panel.getSelectedModels().ajaxQueue( action );
+                    }
+                }
+            ];
+        if( panel.purgeAllowed ){
+            actions.push({
+                html: _l( 'Permanently delete datasets' ), func: function(){
+                    if( confirm( _l( 'This will permanently remove the data in your datasets. Are you sure?' ) ) ){
+                        var action = HDA_MODEL.HistoryDatasetAssociation.prototype.purge;
+                        panel.getSelectedModels().ajaxQueue( action );
+                    }
+                }
+            });
+        }
+        actions = actions.concat( panel._collectionActions() );
+        return actions;
+    },
+
+    /**   */
+    _collectionActions : function(){
+        var panel = this;
+        return [
+            {   html: _l( 'Build Dataset List' ), func: function() {
+                    LIST_COLLECTION_CREATOR.createListCollection( panel.getSelectedModels() )
+                        .done( function(){ panel.model.refresh(); });
+                }
+            },
+            // TODO: Only show quick pair if two things selected.
+            {   html: _l( 'Build Dataset Pair' ), func: function() {
+                    PAIR_COLLECTION_CREATOR.createPairCollection( panel.getSelectedModels() )
+                        .done( function(){ panel.model.refresh(); });
+                }
+            },
+            {   html: _l( 'Build List of Dataset Pairs' ), func: function() {
+                    LIST_OF_PAIRS_COLLECTION_CREATOR.createListOfPairsCollection( panel.getSelectedModels() )
+                        .done( function(){ panel.model.refresh(); });
+                }
+            },
+        ];
+    },
+
+    // ------------------------------------------------------------------------ sub-views
+    /** In this override, add purgeAllowed and whether tags/annotation editors should be shown */
+    _getItemViewOptions : function( model ){
+        var options = _super.prototype._getItemViewOptions.call( this, model );
+        _.extend( options, {
+            purgeAllowed            : this.purgeAllowed,
+            tagsEditorShown         : ( this.tagsEditor && !this.tagsEditor.hidden ),
+            annotationEditorShown   : ( this.annotationEditor && !this.annotationEditor.hidden )
+        });
+        return options;
+    },
+
+    /** If this item is deleted and we're not showing deleted items, remove the view
+     *  @param {Model} the item model to check
+     */
+    _handleItemDeletedChange : function( itemModel ){
+        if( itemModel.get( 'deleted' ) ){
+            this._handleItemDeletion( itemModel );
+        } else {
+            this._handleItemUndeletion( itemModel );
+        }
+        this._renderCounts();
+    },
+
+    _handleItemDeletion : function( itemModel ){
+        var contentsShown = this.model.get( 'contents_active' );
+        contentsShown.deleted += 1;
+        contentsShown.active -= 1;
+        if( !this.model.contents.includeDeleted ){
+            this.removeItemView( itemModel );
+        }
+        this.model.set( 'contents_active', contentsShown );
+    },
+
+    _handleItemUndeletion : function( itemModel ){
+        var contentsShown = this.model.get( 'contents_active' );
+        contentsShown.deleted -= 1;
+        if( !this.model.contents.includeDeleted ){
+            contentsShown.active -= 1;
+        }
+        this.model.set( 'contents_active', contentsShown );
+    },
+
+    /** If this item is hidden and we're not showing hidden items, remove the view
+     *  @param {Model} the item model to check
+     */
+    _handleItemVisibleChange : function( itemModel ){
+        if( itemModel.hidden() ){
+            this._handleItemHidden( itemModel );
+        } else {
+            this._handleItemUnhidden( itemModel );
+        }
+        this._renderCounts();
+    },
+
+    _handleItemHidden : function( itemModel ){
+        var contentsShown = this.model.get( 'contents_active' );
+        contentsShown.hidden += 1;
+        contentsShown.active -= 1;
+        if( !this.model.contents.includeHidden ){
+            this.removeItemView( itemModel );
+        }
+        this.model.set( 'contents_active', contentsShown );
+    },
+
+    _handleItemUnhidden : function( itemModel ){
+        var contentsShown = this.model.get( 'contents_active' );
+        contentsShown.hidden -= 1;
+        if( !this.model.contents.includeHidden ){
+            contentsShown.active -= 1;
+        }
+        this.model.set( 'contents_active', contentsShown );
+    },
+
+    /** toggle the visibility of each content's tagsEditor applying all the args sent to this function */
+    toggleHDATagEditors : function( showOrHide, speed ){
+        _.each( this.views, function( view ){
+            if( view.tagsEditor ){
+                view.tagsEditor.toggle( showOrHide, speed );
+            }
+        });
+    },
+
+    /** toggle the visibility of each content's annotationEditor applying all the args sent to this function */
+    toggleHDAAnnotationEditors : function( showOrHide, speed ){
+        _.each( this.views, function( view ){
+            if( view.annotationEditor ){
+                view.annotationEditor.toggle( showOrHide, speed );
+            }
+        });
+    },
+
+    // ------------------------------------------------------------------------ panel events
+    /** event map */
+    events : _.extend( _.clone( _super.prototype.events ), {
+        'click .show-selectors-btn'                 : 'toggleSelectors',
+        'click .toggle-deleted-link'                : function( ev ){ this.toggleShowDeleted(); },
+        'click .toggle-hidden-link'                 : function( ev ){ this.toggleShowHidden(); }
+    }),
+
+    // ------------------------------------------------------------------------ search
+    _renderSearchProgress : function( limit, offset ){
+        var stop = limit + offset;
+        return this.$( '> .controls .subtitle' ).html([
+            '<i>',
+                _l( 'Searching ' ), stop, '/', this.model.contentsShown(),
+            '</i>'
+        ].join(''));
+    },
+
+    /** override to display number found in subtitle */
+    _renderSearchFindings : function(){
+        this.$( '> .controls .subtitle' ).html([
+            _l( 'Found' ), this.views.length
+        ].join(' '));
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ as drop target
+    /** turn all the drag and drop handlers on and add some help text above the drop area */
+    dropTargetOn : function(){
+        if( this.dropTarget ){ return this; }
+        this.dropTarget = true;
+
+        //TODO: to init
+        var dropHandlers = {
+            'dragenter' : _.bind( this.dragenter, this ),
+            'dragover'  : _.bind( this.dragover,  this ),
+            'dragleave' : _.bind( this.dragleave, this ),
+            'drop'      : _.bind( this.drop, this )
+        };
+
+        var $dropTarget = this._renderDropTarget();
+        this.$list().before([ this._renderDropTargetHelp(), $dropTarget ]);
+        for( var evName in dropHandlers ){
+            if( dropHandlers.hasOwnProperty( evName ) ){
+                //console.debug( evName, dropHandlers[ evName ] );
+                $dropTarget.on( evName, dropHandlers[ evName ] );
+            }
+        }
+        return this;
+    },
+
+    /** render a box to serve as a 'drop here' area on the history */
+    _renderDropTarget : function(){
+        this.$( '.history-drop-target' ).remove();
+        return $( '<div/>' ).addClass( 'history-drop-target' );
+    },
+
+    /** tell the user how it works  */
+    _renderDropTargetHelp : function(){
+        this.$( '.history-drop-target-help' ).remove();
+        return $( '<div/>' ).addClass( 'history-drop-target-help' )
+            .text( _l( 'Drag datasets here to copy them to the current history' ) );
+    },
+
+    /** shut down drag and drop event handlers and remove drop target */
+    dropTargetOff : function(){
+        if( !this.dropTarget ){ return this; }
+        //this.log( 'dropTargetOff' );
+        this.dropTarget = false;
+        var dropTarget = this.$( '.history-drop-target' ).get(0);
+        for( var evName in this._dropHandlers ){
+            if( this._dropHandlers.hasOwnProperty( evName ) ){
+                dropTarget.off( evName, this._dropHandlers[ evName ] );
+            }
+        }
+        this.$( '.history-drop-target' ).remove();
+        this.$( '.history-drop-target-help' ).remove();
+        return this;
+    },
+    /** toggle the target on/off */
+    dropTargetToggle : function(){
+        if( this.dropTarget ){
+            this.dropTargetOff();
+        } else {
+            this.dropTargetOn();
+        }
+        return this;
+    },
+
+    dragenter : function( ev ){
+        //console.debug( 'dragenter:', this, ev );
+        ev.preventDefault();
+        ev.stopPropagation();
+        this.$( '.history-drop-target' ).css( 'border', '2px solid black' );
+    },
+    dragover : function( ev ){
+        ev.preventDefault();
+        ev.stopPropagation();
+    },
+    dragleave : function( ev ){
+        //console.debug( 'dragleave:', this, ev );
+        ev.preventDefault();
+        ev.stopPropagation();
+        this.$( '.history-drop-target' ).css( 'border', '1px dashed black' );
+    },
+    /** when (text) is dropped try to parse as json and trigger an event */
+    drop : function( ev ){
+        ev.preventDefault();
+        //ev.stopPropagation();
+
+        var self = this;
+        var dataTransfer = ev.originalEvent.dataTransfer;
+        var data = dataTransfer.getData( "text" );
+
+        dataTransfer.dropEffect = 'move';
+        try {
+            data = JSON.parse( data );
+        } catch( err ){
+            self.warn( 'error parsing JSON from drop:', data );
+        }
+
+        self.trigger( 'droptarget:drop', ev, data, self );
+        return false;
+    },
+
+    /** handler that copies data into the contents */
+    dataDropped : function( data ){
+        var self = this;
+        // HDA: dropping will copy it to the history
+        if( _.isObject( data ) && data.model_class === 'HistoryDatasetAssociation' && data.id ){
+            if( self.contents.currentPage !== 0 ){
+                return self.contents.fetchPage( 0 )
+                    .then( function(){
+                        return self.model.contents.copy( data.id );
+                    });
+            }
+            return self.model.contents.copy( data.id );
+        }
+        return jQuery.when();
+    },
+
+    // ........................................................................ misc
+    /** Return a string rep of the history */
+    toString    : function(){
+        return 'HistoryViewEdit(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+//------------------------------------------------------------------------------ TEMPLATES
+HistoryViewEdit.prototype.templates = (function(){
+
+    var countsTemplate = BASE_MVC.wrapTemplate([
+        '<% var shown = Math.max( view.views.length, history.contents_active.active ) %>',
+        '<% if( shown ){ %>',
+            '<span class="shown-count">',
+                '<%- shown %> ', _l( 'shown' ),
+            '</span>',
+        '<% } %>',
+
+        '<% if( history.contents_active.deleted ){ %>',
+            '<span class="deleted-count">',
+            '<% if( view.model.contents.includeDeleted ){ %>',
+                '<a class="toggle-deleted-link" href="javascript:void(0);">',
+                    _l( 'hide deleted' ),
+                '</a>',
+            '<% } else { %>',
+                '<%- history.contents_active.deleted %> ',
+                '<a class="toggle-deleted-link" href="javascript:void(0);">',
+                    _l( 'deleted' ),
+                '</a>',
+            '<% } %>',
+            '</span>',
+        '<% } %>',
+
+        '<% if( history.contents_active.hidden ){ %>',
+            '<span class="hidden-count">',
+            '<% if( view.model.contents.includeHidden ){ %>',
+                '<a class="toggle-hidden-link" href="javascript:void(0);">',
+                    _l( 'hide hidden' ),
+                '</a>',
+            '<% } else { %>',
+                '<%- history.contents_active.hidden %> ',
+                '<a class="toggle-hidden-link" href="javascript:void(0);">',
+                    _l( 'hidden' ),
+                '</a>',
+            '<% } %>',
+            '</span>',
+        '<% } %>',
+    ], 'history' );
+
+    return _.extend( _.clone( _super.prototype.templates ), {
+        counts : countsTemplate
+    });
+}());
+
+
+//==============================================================================
+    return {
+        HistoryViewEdit : HistoryViewEdit
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/history-view.js b/client/galaxy/scripts/mvc/history/history-view.js
new file mode 100644
index 0000000..4eb01d0
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/history-view.js
@@ -0,0 +1,622 @@
+define([
+    "mvc/list/list-view",
+    "mvc/history/history-model",
+    "mvc/history/history-contents",
+    "mvc/history/history-preferences",
+    "mvc/history/hda-li",
+    "mvc/history/hdca-li",
+    "mvc/user/user-model",
+    "mvc/ui/error-modal",
+    "ui/fa-icon-button",
+    "mvc/base-mvc",
+    "utils/localization",
+    "ui/search-input"
+], function(
+    LIST_VIEW,
+    HISTORY_MODEL,
+    HISTORY_CONTENTS,
+    HISTORY_PREFS,
+    HDA_LI,
+    HDCA_LI,
+    USER,
+    ERROR_MODAL,
+    faIconButton,
+    BASE_MVC,
+    _l
+){
+'use strict';
+
+/* =============================================================================
+TODO:
+
+============================================================================= */
+/** @class  non-editable, read-only View/Controller for a history model.
+ *  Allows:
+ *      changing the loaded history
+ *      displaying data, info, and download
+ *      tracking history attrs: size, tags, annotations, name, etc.
+ *  Does not allow:
+ *      changing the name
+ */
+var _super = LIST_VIEW.ModelListPanel;
+var HistoryView = _super.extend(
+/** @lends HistoryView.prototype */{
+    _logNamespace : 'history',
+
+    /** class to use for constructing the HDA views */
+    HDAViewClass        : HDA_LI.HDAListItemView,
+    /** class to use for constructing the HDCA views */
+    HDCAViewClass       : HDCA_LI.HDCAListItemView,
+    /** class to used for constructing collection of sub-view models */
+    collectionClass     : HISTORY_CONTENTS.HistoryContents,
+    /** key of attribute in model to assign to this.collection */
+    modelCollectionKey  : 'contents',
+
+    tagName             : 'div',
+    className           : _super.prototype.className + ' history-panel',
+
+    /** string to display when the collection is empty */
+    emptyMsg            : _l( 'This history is empty' ),
+    /** displayed when no items match the search terms */
+    noneFoundMsg        : _l( 'No matching datasets found' ),
+    /** string used for search placeholder */
+    searchPlaceholder   : _l( 'search datasets' ),
+
+    /** @type {Number} ms to wait after history load to fetch/decorate hdcas with element_count */
+    FETCH_COLLECTION_COUNTS_DELAY : 2000,
+
+    // ......................................................................... SET UP
+    /** Set up the view, bind listeners.
+     *  @param {Object} attributes optional settings for the panel
+     */
+    initialize : function( attributes ){
+        _super.prototype.initialize.call( this, attributes );
+        // ---- instance vars
+        // control contents/behavior based on where (and in what context) the panel is being used
+        /** where should pages from links be displayed? (default to new tab/window) */
+        this.linkTarget = attributes.linkTarget || '_blank';
+    },
+
+    /** create and return a collection for when none is initially passed */
+    _createDefaultCollection : function(){
+        // override
+        return new this.collectionClass([], { history: this.model });
+    },
+
+    /** In this override, clear the update timer on the model */
+    freeModel : function(){
+        _super.prototype.freeModel.call( this );
+        if( this.model ){
+            this.model.clearUpdateTimeout();
+        }
+        return this;
+    },
+
+    /** create any event listeners for the panel
+     *  @fires: rendered:initial    on the first render
+     *  @fires: empty-history       when switching to a history with no contents or creating a new history
+     */
+    _setUpListeners : function(){
+        _super.prototype._setUpListeners.call( this );
+        this.on({
+            error : function( model, xhr, options, msg, details ){
+                this.errorHandler( model, xhr, options, msg, details );
+            },
+            'loading-done' : function(){
+                var self = this;
+                // after the initial load, decorate with more time consuming fields (like HDCA element_counts)
+                _.delay( function(){
+                    self.model.contents.fetchCollectionCounts();
+                }, self.FETCH_COLLECTION_COUNTS_DELAY );
+            },
+            'views:ready view:attached view:removed' : function( view ){
+                this._renderSelectButton();
+            },
+            'view:attached' : function( view ){
+                this.scrollTo(0);
+            },
+        });
+        // this.on( 'all', function(){ console.debug( arguments ); });
+    },
+
+    // ------------------------------------------------------------------------ loading history/hda models
+    /** load the history with the given id then it's contents, sending ajax options to both */
+    loadHistory : function( historyId, options, contentsOptions ){
+        contentsOptions = _.extend( contentsOptions || { silent: true });
+        this.info( 'loadHistory:', historyId, options, contentsOptions );
+        var self = this;
+        self.setModel( new HISTORY_MODEL.History({ id : historyId }) );
+
+        contentsOptions.silent = true;
+        self.trigger( 'loading' );
+        return self.model
+            .fetchWithContents( options, contentsOptions )
+            .always( function(){
+                self.render();
+                self.trigger( 'loading-done' );
+            });
+    },
+
+    /** convenience alias to the model. Updates the item list only (not the history) */
+    refreshContents : function( options ){
+        if( this.model ){
+            return this.model.refresh( options );
+        }
+        // may have callbacks - so return an empty promise
+        return $.when();
+    },
+
+    /** Override to reset web storage when the id changes (since it needs the id) */
+    _setUpCollectionListeners : function(){
+        _super.prototype._setUpCollectionListeners.call( this );
+        return this.listenTo( this.collection, {
+            // 'all' : function(){ console.log( this.collection + ':', arguments ); },
+            'fetching-more'     : function(){
+                this._toggleContentsLoadingIndicator( true );
+                this.$emptyMessage().hide();
+            },
+            'fetching-more-done': function(){ this._toggleContentsLoadingIndicator( false ); },
+        });
+    },
+
+    // ------------------------------------------------------------------------ panel rendering
+    /** hide the $el and display a loading indicator (in the $el's parent) when loading new data */
+    _showLoadingIndicator : function( msg, speed, callback ){
+        var $indicator = $( '<div class="loading-indicator"/>' );
+        this.$el.html( $indicator.text( msg ).slideDown( !_.isUndefined( speed )? speed : this.fxSpeed ) );
+    },
+
+    /** hide the loading indicator */
+    _hideLoadingIndicator : function( speed ){
+        // make speed a bit slower to compensate for slow rendering of up to 500 contents
+        this.$( '.loading-indicator' ).slideUp( !_.isUndefined( speed )? speed : ( this.fxSpeed + 200 ), function(){
+            $( this ).remove();
+        });
+    },
+
+    /** In this override, add a btn to toggle the selectors */
+    _buildNewRender : function(){
+        var $newRender = _super.prototype._buildNewRender.call( this );
+        this._renderSelectButton( $newRender );
+        return $newRender;
+    },
+
+    /** button for starting select mode */
+    _renderSelectButton : function( $where ){
+        $where = $where || this.$el;
+        // do not render selector option if no actions
+        if( !this.multiselectActions().length ){
+            return null;
+        }
+        // do not render (and remove even) if nothing to select
+        if( !this.views.length ){
+            this.hideSelectors();
+            $where.find( '.controls .actions .show-selectors-btn' ).remove();
+            return null;
+        }
+        // don't bother rendering if there's one already
+        var $existing = $where.find( '.controls .actions .show-selectors-btn' );
+        if( $existing.length ){
+            return $existing;
+        }
+
+        return faIconButton({
+            title   : _l( 'Operations on multiple datasets' ),
+            classes : 'show-selectors-btn',
+            faIcon  : 'fa-check-square-o'
+        }).prependTo( $where.find( '.controls .actions' ) );
+    },
+
+    /** override to avoid showing intial empty message using contents_active */
+    _renderEmptyMessage : function( $whereTo ){
+        var self = this;
+        var $emptyMsg = self.$emptyMessage( $whereTo );
+
+        var empty = self.model.get( 'contents_active' ).active <= 0;
+        if( empty ){
+            return $emptyMsg.empty().append( self.emptyMsg ).show();
+
+        } else if( self.searchFor && self.model.contents.haveSearchDetails() && !self.views.length ){
+            return $emptyMsg.empty().append( self.noneFoundMsg ).show();
+        }
+        $emptyMsg.hide();
+        return $();
+    },
+
+    /** the scroll container for this panel - can be $el, $el.parent(), or grandparent depending on context */
+    $scrollContainer : function( $where ){
+        // override or set via attributes.$scrollContainer
+        return this.$list( $where );
+    },
+
+    // ------------------------------------------------------------------------ subviews
+    _toggleContentsLoadingIndicator : function( show ){
+        if( !show ){
+            this.$list().find( '.contents-loading-indicator' ).remove();
+        } else {
+            this.$list().html( '<div class="contents-loading-indicator">'
+                + '<span class="fa fa-2x fa-spinner fa-spin"/></div>' );
+        }
+    },
+
+    /** override to render pagination also */
+    renderItems: function( $whereTo ){
+        // console.log( this + '.renderItems-----------------', new Date() );
+        $whereTo = $whereTo || this.$el;
+        var self = this;
+        var $list = self.$list( $whereTo );
+
+        // TODO: bootstrap hack to remove orphaned tooltips
+        $( '.tooltip' ).remove();
+
+        $list.empty();
+        self.views = [];
+
+        var models = self._filterCollection();
+        if( models.length ){
+            self._renderPagination( $whereTo );
+            self.views = self._renderSomeItems( models, $list );
+        } else {
+            // TODO: consolidate with _renderPagination above by (???) passing in models/length?
+            $whereTo.find( '> .controls .list-pagination' ).empty();
+        }
+        self._renderEmptyMessage( $whereTo ).toggle( !models.length );
+
+        self.trigger( 'views:ready', self.views );
+        return self.views;
+    },
+
+    /** render pagination controls if not searching and contents says we're paginating */
+    _renderPagination: function( $whereTo ){
+        var $paginationControls = $whereTo.find( '> .controls .list-pagination' );
+        if( this.searchFor || !this.model.contents.shouldPaginate() ) return $paginationControls.empty();
+
+        $paginationControls.html( this.templates.pagination({
+            // pagination is 1-based for the user
+            current : this.model.contents.currentPage + 1,
+            last    : this.model.contents.getLastPage() + 1,
+        }, this ));
+        $paginationControls.find( 'select.pages' ).tooltip();
+        return $paginationControls;
+    },
+
+    /** render a subset of the entire collection (client-side pagination) */
+    _renderSomeItems: function( models, $list ){
+        var self = this;
+        var views = [];
+        $list.append( models.map( function( m ){
+            var view = self._createItemView( m );
+            views.push( view );
+            return self._renderItemView$el( view );
+        }));
+        return views;
+    },
+
+    // ------------------------------------------------------------------------ sub-views
+    /** in this override, check if the contents would also display based on includeDeleted/hidden */
+    _filterItem : function( model ){
+        var self = this;
+        var contents = self.model.contents;
+        return ( contents.includeHidden  || !model.hidden() )
+            && ( contents.includeDeleted || !model.isDeletedOrPurged() )
+            && ( _super.prototype._filterItem.call( self, model ) );
+    },
+
+    /** In this override, since history contents are mixed,
+     *      get the appropo view class based on history_content_type
+     */
+    _getItemViewClass : function( model ){
+        var contentType = model.get( "history_content_type" );
+        switch( contentType ){
+            case 'dataset':
+                return this.HDAViewClass;
+            case 'dataset_collection':
+                return this.HDCAViewClass;
+        }
+        throw new TypeError( 'Unknown history_content_type: ' + contentType );
+    },
+
+    /** in this override, add a linktarget, and expand if id is in web storage */
+    _getItemViewOptions : function( model ){
+        var options = _super.prototype._getItemViewOptions.call( this, model );
+        return _.extend( options, {
+            linkTarget      : this.linkTarget,
+            expanded        : this.model.contents.storage.isExpanded( model.id ),
+            hasUser         : this.model.ownedByCurrUser()
+        });
+    },
+
+    /** In this override, add/remove expanded/collapsed model ids to/from web storage */
+    _setUpItemViewListeners : function( view ){
+        var panel = this;
+        _super.prototype._setUpItemViewListeners.call( panel, view );
+        //TODO: send from content view: this.model.collection.storage.addExpanded
+        // maintain a list of items whose bodies are expanded
+        return panel.listenTo( view, {
+            'expanded': function( v ){
+                panel.model.contents.storage.addExpanded( v.model );
+            },
+            'collapsed': function( v ){
+                panel.model.contents.storage.removeExpanded( v.model );
+            }
+        });
+    },
+
+    /** override to remove expandedIds from webstorage */
+    collapseAll : function(){
+        this.model.contents.storage.clearExpanded();
+        _super.prototype.collapseAll.call( this );
+    },
+
+    // ------------------------------------------------------------------------ selection
+    /** Override to correctly set the historyId of the new collection */
+    getSelectedModels : function(){
+        var collection = _super.prototype.getSelectedModels.call( this );
+        collection.historyId = this.collection.historyId;
+        return collection;
+    },
+
+
+    // ------------------------------------------------------------------------ panel events
+    /** event map */
+    events : _.extend( _.clone( _super.prototype.events ), {
+        'click .show-selectors-btn'         : 'toggleSelectors',
+        'click > .controls .prev'           : '_clickPrevPage',
+        'click > .controls .next'           : '_clickNextPage',
+        'change > .controls .pages'         : '_changePageSelect',
+        // allow (error) messages to be clicked away
+        'click .messages [class$=message]'  : 'clearMessages',
+    }),
+
+    _clickPrevPage : function( ev ){
+        this.model.contents.fetchPrevPage();
+    },
+
+    _clickNextPage : function( ev ){
+        this.model.contents.fetchNextPage();
+    },
+
+    _changePageSelect : function( ev ){
+        var page = $( ev.currentTarget ).val();
+        this.model.contents.fetchPage( page );
+    },
+
+    /** Toggle and store the deleted visibility and re-render items
+     * @returns {Boolean} new setting
+     */
+    toggleShowDeleted : function( show, options ){
+        show = ( show !== undefined )?( show ):( !this.model.contents.includeDeleted );
+        var self = this;
+        var contents = self.model.contents;
+        contents.setIncludeDeleted( show, options );
+        self.trigger( 'show-deleted', show );
+
+        contents.fetchCurrentPage({ renderAll: true });
+        return show;
+    },
+
+    /** Toggle and store whether to render explicity hidden contents
+     * @returns {Boolean} new setting
+     */
+    toggleShowHidden : function( show, store, options ){
+        // console.log( 'toggleShowHidden', show, store );
+        show = ( show !== undefined )?( show ):( !this.model.contents.includeHidden );
+        var self = this;
+        var contents = self.model.contents;
+        contents.setIncludeHidden( show, options );
+        self.trigger( 'show-hidden', show );
+
+        contents.fetchCurrentPage({ renderAll: true });
+        return show;
+    },
+
+    /** On the first search, if there are no details - load them, then search */
+    _firstSearch : function( searchFor ){
+        var self = this;
+        var inputSelector = '> .controls .search-input';
+        this.log( 'onFirstSearch', searchFor );
+
+        // if the contents already have enough details to search, search and return now
+        if( self.model.contents.haveSearchDetails() ){
+            self.searchItems( searchFor );
+            return;
+        }
+
+        // otherwise, load the details progressively here
+        self.$( inputSelector ).searchInput( 'toggle-loading' );
+        // set this now so that only results will show during progress
+        self.searchFor = searchFor;
+        var xhr = self.model.contents.progressivelyFetchDetails({ silent: true })
+            .progress( function( response, limit, offset ){
+                self.renderItems();
+                self.trigger( 'search:loading-progress', limit, offset );
+            })
+            .always( function(){
+                self.$el.find( inputSelector ).searchInput( 'toggle-loading' );
+            })
+            .done( function(){
+                self.searchItems( searchFor, 'force' );
+           });
+    },
+
+    /** clear the search filters and show all views that are normally shown */
+    clearSearch : function( searchFor ){
+        var self = this;
+        if( !self.searchFor ) return self;
+        //self.log( 'onSearchClear', self );
+        self.searchFor = '';
+        self.trigger( 'search:clear', self );
+        self.$( '> .controls .search-query' ).val( '' );
+        // NOTE: silent + render prevents collection update event with merge only
+        // - which causes an empty page due to event handler above
+        self.model.contents.fetchCurrentPage({ silent: true })
+            .done( function(){
+                self.renderItems();
+            });
+        return self;
+    },
+
+    // ........................................................................ error handling
+    /** Event handler for errors (from the panel, the history, or the history's contents)
+     *  Alternately use two strings for model and xhr to use custom message and title (respectively)
+     *  @param {Model or View} model    the (Backbone) source of the error
+     *  @param {XMLHTTPRequest} xhr     any ajax obj. assoc. with the error
+     *  @param {Object} options         the options map commonly used with bbone ajax
+     */
+    errorHandler : function( model, xhr, options ){
+        //TODO: to mixin or base model
+        // interrupted ajax or no connection
+        if( xhr && xhr.status === 0 && xhr.readyState === 0 ){
+            // return ERROR_MODAL.offlineErrorModal();
+            // fail silently
+            return;
+        }
+        // otherwise, leave something to report in the console
+        this.error( model, xhr, options );
+        // and feedback to a modal
+        // if sent two strings (and possibly details as 'options'), use those as message and title
+        if( _.isString( model ) && _.isString( xhr ) ){
+            var message = model;
+            var title = xhr;
+            return ERROR_MODAL.errorModal( message, title, options );
+        }
+        // bad gateway
+        // TODO: possibly to global handler
+        if( xhr && xhr.status === 502 ){
+            return ERROR_MODAL.badGatewayErrorModal();
+        }
+        return ERROR_MODAL.ajaxErrorModal( model, xhr, options );
+    },
+
+    /** Remove all messages from the panel. */
+    clearMessages : function( ev ){
+        var $target = !_.isUndefined( ev )?
+            $( ev.currentTarget )
+            :this.$messages().children( '[class$="message"]' );
+        $target.fadeOut( this.fxSpeed, function(){
+            $( this ).remove();
+        });
+        return this;
+    },
+
+    // ........................................................................ scrolling
+    /** Scrolls the panel to show the content sub-view with the given hid.
+     *  @param {Integer} hid    the hid of item to scroll into view
+     *  @returns {HistoryView} the panel
+     */
+    scrollToHid : function( hid ){
+        return this.scrollToItem( _.first( this.viewsWhereModel({ hid: hid }) ) );
+    },
+
+    // ........................................................................ misc
+    /** utility for adding -st, -nd, -rd, -th to numbers */
+    ordinalIndicator : function( number ){
+        var numStr = number + '';
+        switch( numStr.charAt( numStr.length - 1 )){
+            case '1': return numStr + 'st';
+            case '2': return numStr + 'nd';
+            case '3': return numStr + 'rd';
+            default : return numStr + 'th';
+        }
+    },
+
+    /** Return a string rep of the history */
+    toString : function(){
+        return 'HistoryView(' + (( this.model )?( this.model.get( 'name' )):( '' )) + ')';
+    }
+});
+
+
+//------------------------------------------------------------------------------ TEMPLATES
+HistoryView.prototype.templates = (function(){
+
+    var mainTemplate = BASE_MVC.wrapTemplate([
+        // temp container
+        '<div>',
+            '<div class="controls"></div>',
+            '<ul class="list-items"></ul>',
+            '<div class="empty-message infomessagesmall"></div>',
+        '</div>'
+    ]);
+
+    var controlsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="controls">',
+            '<div class="title">',
+                '<div class="name"><%- history.name %></div>',
+            '</div>',
+            '<div class="subtitle"></div>',
+            '<div class="history-size"><%- history.nice_size %></div>',
+
+            '<div class="actions"></div>',
+
+            '<div class="messages">',
+                '<% if( history.deleted && history.purged ){ %>',
+                    '<div class="deleted-msg warningmessagesmall">',
+                        _l( 'This history has been purged and deleted' ),
+                    '</div>',
+                '<% } else if( history.deleted ){ %>',
+                    '<div class="deleted-msg warningmessagesmall">',
+                        _l( 'This history has been deleted' ),
+                    '</div>',
+                '<% } else if( history.purged ){ %>',
+                    '<div class="deleted-msg warningmessagesmall">',
+                        _l( 'This history has been purged' ),
+                    '</div>',
+                '<% } %>',
+
+                '<% if( history.message ){ %>',
+                    // should already be localized
+                    '<div class="<%= history.message.level || "info" %>messagesmall">',
+                        '<%= history.message.text %>',
+                    '</div>',
+                '<% } %>',
+            '</div>',
+
+            // add tags and annotations
+            '<div class="tags-display"></div>',
+            '<div class="annotation-display"></div>',
+
+            '<div class="search">',
+                '<div class="search-input"></div>',
+            '</div>',
+
+            '<div class="list-actions">',
+                '<div class="btn-group">',
+                    '<button class="select-all btn btn-default"',
+                            'data-mode="select">', _l( 'All' ), '</button>',
+                    '<button class="deselect-all btn btn-default"',
+                            'data-mode="select">', _l( 'None' ), '</button>',
+                '</div>',
+                '<div class="list-action-menu btn-group">',
+                '</div>',
+            '</div>',
+            '<div class="list-pagination form-inline"></div>',
+        '</div>'
+    ], 'history' );
+
+    var paginationTemplate = BASE_MVC.wrapTemplate([
+        '<button class="prev" <%- pages.current === 1 ? "disabled" : "" %>>previous</button>',
+        '<select class="pages form-control" ',
+                'title="', _l( 'Click to open and select a page. Begin typing a page number to select it' ), '">',
+            '<% _.range( 1, pages.last + 1 ).forEach( function( i ){ %>',
+                '<option value="<%- i - 1 %>" <%- i === pages.current ? "selected" : "" %>>',
+                    '<%- view.ordinalIndicator( i ) %> of <%- pages.last %> pages',
+                '</option>',
+            '<% }); %>',
+        '</select>',
+        '<button class="next" <%- pages.current === pages.last ? "disabled" : "" %>>next</button>',
+    ], 'pages' );
+
+    return _.extend( _.clone( _super.prototype.templates ), {
+        el                      : mainTemplate,
+        controls                : controlsTemplate,
+        pagination              : paginationTemplate,
+    });
+}());
+
+
+//==============================================================================
+    return {
+        HistoryView: HistoryView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/job-dag.js b/client/galaxy/scripts/mvc/history/job-dag.js
new file mode 100644
index 0000000..eb88f2f
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/job-dag.js
@@ -0,0 +1,326 @@
+define([
+    'utils/graph',
+    'utils/add-logging'
+],function( GRAPH, addLogging ){
+
+'use strict';
+
+// ============================================================================
+var _super = GRAPH.Graph;
+/** A Directed acyclic Graph built from a history's job data.
+ *      Reads in job json, filters and process that json, and builds a graph
+ *      using the connections between job inputs and outputs.
+ */
+var JobDAG = function( options ){
+    options = options || {};
+    var self = this;
+    //this.logger = console;
+
+    self.filters = [];
+
+    // instance vars
+//TODO: needed?
+    self._jobsData = [];
+    self._historyContentsMap = {};
+    self._toolMap = {};
+
+    self._outputIdToJobMap = {};
+    self.noInputJobs = [];
+    self.noOutputJobs = [];
+
+//TODO: save these?
+    self.filteredSetMetadata = [];
+    self.filteredErroredJobs = [];
+
+    self.dataKeys = [ 'jobs', 'historyContents', 'tools' ];
+    _super.call( self, true,
+        _.pick( options, self.dataKeys ),
+        _.omit( options, self.dataKeys )
+    );
+};
+JobDAG.prototype = new GRAPH.Graph();
+JobDAG.prototype.constructor = JobDAG;
+
+// add logging ability - turn off/on using the this.logger statement above
+addLogging( JobDAG );
+
+
+// ----------------------------------------------------------------------------
+/** process jobs, options, filters, and any history data, then create the graph */
+JobDAG.prototype.init = function _init( options ){
+    options = options || {};
+
+    var self = this;
+    self.options = _.defaults( options, {
+        excludeSetMetadata : false
+    });
+    self.filters = self._initFilters();
+
+    _super.prototype.init.call( self, options );
+    return self;
+};
+
+/** add job filters based on options */
+JobDAG.prototype._initFilters = function __initFilters(){
+    var self = this,
+        filters = [];
+
+    if( self.options.excludeSetMetadata ){
+        self.filteredSetMetadata = [];
+        filters.push( function filterSetMetadata( jobData ){
+            if( jobData.job.tool_id !== '__SET_METADATA__' ){ return true; }
+            self.filteredSetMetadata.push( jobData.job.id );
+            return false;
+        });
+    }
+
+    if( self.options.excludeErroredJobs ){
+        self.filteredErroredJobs = [];
+        filters.push( function filterErrored( jobData ){
+            if( jobData.job.state !== 'error' ){ return true; }
+            self.filteredErroredJobs.push( jobData.job.id );
+            return false;
+        });
+    }
+
+    // all outputs deleted
+    // all outputs hidden
+
+    if( _.isArray( self.options.filters ) ){
+        filters = filters.concat( self.options.filters );
+    }
+    self.debug( 'filters len:', filters.length );
+    return filters;
+};
+
+/**  */
+JobDAG.prototype.read = function _read( data ){
+    var self = this;
+    if( _.has( data, 'historyContents' ) && _.has( data, 'jobs' ) && _.has( data, 'tools' ) ){
+        // a job dag is composed of these three elements:
+        //  clone the 3 data sources into the DAG, processing the jobs finally using the history and tools
+        self.preprocessHistoryContents( data.historyContents || [] )
+            .preprocessTools( data.tools || {} )
+            .preprocessJobs( data.jobs || [] );
+
+        // filter jobs and create the vertices and edges of the job DAG
+        self.createGraph( self._filterJobs() );
+        return self;
+    }
+    return _super.prototype.read.call( this, data );
+};
+
+/**  */
+JobDAG.prototype.preprocessHistoryContents = function _preprocessHistoryContents( historyContents ){
+    this.info( 'processing history' );
+    var self = this;
+    self._historyContentsMap = {};
+
+    historyContents.forEach( function( content, i ){
+        self._historyContentsMap[ content.id ] = _.clone( content );
+    });
+    return self;
+};
+
+/**  */
+JobDAG.prototype.preprocessTools = function _preprocessTools( tools ){
+    this.info( 'processing tools' );
+    var self = this;
+    self._toolMap = {};
+
+    _.each( tools, function( tool, id ){
+        self._toolMap[ id ] = _.clone( tool );
+    });
+    return self;
+};
+
+/** sort the cloned jobs, decorate with tool and history contents info, and store in prop array */
+JobDAG.prototype.preprocessJobs = function _preprocessJobs( jobs ){
+    this.info( 'processing jobs' );
+    var self = this;
+    self._outputIdToJobMap = {};
+
+    self._jobsData = self.sort( jobs ).map( function( job ){
+        return self.preprocessJob( _.clone( job ) );
+    });
+//console.debug( JSON.stringify( self._jobsData, null, '    ' ) );
+//console.debug( JSON.stringify( self._outputIdToJobMap, null, '    ' ) );
+    return self;
+};
+
+/** sort the jobs based on update time */
+JobDAG.prototype.sort = function _sort( jobs ){
+    function cmpCreate( a, b ){
+        if( a.create_time > b.create_time ){ return 1; }
+        if( a.create_time < b.create_time ){ return -1; }
+        return 0;
+    }
+    return jobs.sort( cmpCreate );
+};
+
+/** decorate with input/output datasets and tool */
+JobDAG.prototype.preprocessJob = function _preprocessJob( job, index ){
+    //this.info( 'preprocessJob', job, index );
+    var self = this,
+        jobData = { job: job };
+
+    jobData.inputs = self._processInputs( job );
+    if( _.size( jobData.inputs ) === 0 ){
+        self.noInputJobs.push( job.id );
+    }
+    jobData.outputs = self._processOutputs( job );
+    if( _.size( jobData.outputs ) === 0 ){
+        self.noOutputJobs.push( job.id );
+    }
+
+    jobData.tool = self._toolMap[ job.tool_id ];
+
+    //self.info( '\t jobData:', jobData );
+    return jobData;
+};
+
+/**
+ */
+JobDAG.prototype._processInputs = function __processInputs( job ){
+    var self = this,
+        inputs = job.inputs,
+        inputMap = {};
+    _.each( inputs, function( input, nameInJob ){
+        input = _.clone( self._validateInputOutput( input ) );
+        input.name = nameInJob;
+        // since this is a DAG and we're processing in order of create time,
+        //  the inputs for this job will already be listed in _outputIdToJobMap
+        //  TODO: we can possibly exploit this
+        //console.debug( 'input in _outputIdToJobMap', self._outputIdToJobMap[ input.id ] );
+        input.content = self._historyContentsMap[ input.id ];
+        inputMap[ input.id ] = input;
+    });
+    return inputMap;
+};
+
+/**
+ */
+JobDAG.prototype._validateInputOutput = function __validateInputOutput( inputOutput ){
+    if( !inputOutput.id ){
+        throw new Error( 'No id on job input/output: ', JSON.stringify( inputOutput ) );
+    }
+    if( !inputOutput.src || inputOutput.src !== 'hda' ){
+        throw new Error( 'Bad src on job input/output: ', JSON.stringify( inputOutput ) );
+    }
+    return inputOutput;
+};
+
+/**
+ */
+JobDAG.prototype._processOutputs = function __processOutputs( job ){
+    var self = this,
+        outputs = job.outputs,
+        outputMap = {};
+    _.each( outputs, function( output, nameInJob ){
+        output = _.clone( self._validateInputOutput( output ) );
+        output.name = nameInJob;
+        // add dataset content to jobData
+        output.content = self._historyContentsMap[ output.id ];
+        outputMap[ output.id ] = output;
+
+        self._outputIdToJobMap[ output.id ] = job.id;
+    });
+    return outputMap;
+};
+
+/**  */
+JobDAG.prototype._filterJobs = function __filterJobs(){
+    var self = this;
+    return self._jobsData.filter( function( j, i ){ return self._filterJob( j, i ); });
+};
+
+/**
+ */
+JobDAG.prototype._filterJob = function _filterJob( jobData, index ){
+    // apply filters after processing job allowing access to the additional data above inside the filters
+    var self = this;
+    for( var i=0; i<self.filters.length; i++ ){
+        if( !self.filters[i].call( self, jobData ) ){
+            self.debug( '\t job', jobData.job.id, ' has been filtered out by function:\n', self.filters[i] );
+            return false;
+        }
+    }
+    return true;
+};
+
+/** Walk all the jobs (vertices), attempting to find connections
+ *  between datasets used as both inputs and outputs (edges)
+ */
+JobDAG.prototype.createGraph = function _createGraph( jobsData ){
+    var self = this;
+    self.debug( 'connections:' );
+    //console.debug( jobsData );
+
+    _.each( jobsData, function( jobData ){
+        var id = jobData.job.id;
+        self.debug( '\t', id, jobData );
+        self.createVertex( id, jobData );
+    });
+    _.each( jobsData, function( jobData ){
+        var targetId = jobData.job.id;
+        _.each( jobData.inputs, function( input, inputId ){
+            //console.debug( '\t\t target input:', inputId, input );
+            var sourceId = self._outputIdToJobMap[ inputId ];
+            //console.debug( '\t\t source job id:', sourceId );
+            if( !sourceId ){
+                var joblessVertex = self.createJobLessVertex( inputId );
+                sourceId = joblessVertex.name;
+            }
+//TODO:?? no checking here whether sourceId is actually in the vertex map
+            //console.debug( '\t\t creating edge, source:', sourceId, self.vertices[ sourceId ] );
+            //console.debug( '\t\t creating edge, target:', targetId, self.vertices[ targetId ] );
+            self.createEdge( sourceId, targetId, self.directed, {
+                dataset : inputId
+            });
+        });
+    });
+    //console.debug( self.toVerticesAndEdges().edges );
+
+    self.debug( 'final graph: ', JSON.stringify( self.toVerticesAndEdges(), null, '  ' ) );
+    return self;
+};
+
+/** Return a 'mangled' version of history contents id to prevent contents <-> job id collision */
+JobDAG.prototype.createJobLessVertex = function _createJobLessVertex( contentId ){
+    // currently, copied contents are the only history contents without jobs (that I know of)
+    //note: following needed to prevent id collision btwn content and jobs in vertex map
+    var JOBLESS_ID_MANGLER = 'copy-',
+        mangledId = JOBLESS_ID_MANGLER + contentId;
+    return this.createVertex( mangledId, this._historyContentsMap[ contentId ] );
+};
+
+/** Override to re-sort (ugh) jobs in each component by update time */
+JobDAG.prototype.weakComponentGraphArray = function(){
+    var dag = this;
+    return this.weakComponents().map( function( component ){
+//TODO: this seems to belong above (in sort) - why isn't it preserved?
+        // note: using create_time (as opposed to update_time)
+        //  since update_time for jobless/copied datasets is changes more often
+        component.vertices.sort( function cmpCreate( a, b ){
+            var aCreateTime = a.data.job? a.data.job.create_time : a.data.create_time,
+                bCreateTime = b.data.job? b.data.job.create_time : b.data.create_time;
+            if( aCreateTime > bCreateTime ){ return 1; }
+            if( aCreateTime < bCreateTime ){ return -1; }
+            return 0;
+        });
+        return new Graph( dag.directed, component );
+    });
+};
+
+JobDAG.prototype._jobsDataMap = function(){
+    var jobsDataMap = {};
+    this._jobsData.forEach( function( jobData ){
+        jobsDataMap[ jobData.job.id ] = jobData;
+    });
+    return jobsDataMap;
+};
+
+
+// ============================================================================
+    return JobDAG;
+});
diff --git a/client/galaxy/scripts/mvc/history/multi-panel.js b/client/galaxy/scripts/mvc/history/multi-panel.js
new file mode 100644
index 0000000..284edbe
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/multi-panel.js
@@ -0,0 +1,1074 @@
+define([
+    "mvc/history/history-model",
+    "mvc/history/history-view-edit",
+    "mvc/history/copy-dialog",
+    "mvc/ui/error-modal",
+    "mvc/base-mvc",
+    "utils/ajax-queue",
+    "ui/mode-button",
+    "ui/search-input"
+], function( HISTORY_MODEL, HISTORY_VIEW_EDIT, historyCopyDialog, ERROR_MODAL, baseMVC, ajaxQueue ){
+'use strict';
+
+var logNamespace = 'history';
+/* ==============================================================================
+TODO:
+
+============================================================================== */
+/** @class A container for a history panel that renders controls for that history (delete, copy, etc.) */
+var HistoryViewColumn = Backbone.View.extend( baseMVC.LoggableMixin ).extend({
+
+    _logNamespace : logNamespace,
+
+    tagName     : 'div',
+    className   : 'history-column flex-column flex-row-container',
+    id : function id(){
+        if( !this.model ){ return ''; }
+        return 'history-column-' + this.model.get( 'id' );
+    },
+
+    // ------------------------------------------------------------------------ set up
+    /** set up passed-in panel (if any) and listeners */
+    initialize : function initialize( options ){
+        options = options || {};
+        this.purgeAllowed = !_.isUndefined( options.purgeAllowed )? options.purgeAllowed: false;
+        this.panel = options.panel || this.createPanel( options );
+
+        this.setUpListeners();
+    },
+
+    /** create a history panel for this column */
+    createPanel : function createPanel( panelOptions ){
+        return new HISTORY_VIEW_EDIT.HistoryViewEdit( _.defaults( panelOptions, {
+            model           : this.model,
+            // non-current panels should set their hdas to draggable
+            purgeAllowed    : this.purgeAllowed,
+            dragItems       : true,
+            $scrollContainer: function(){ return this.$el; },
+        }));
+    },
+
+    /** set up reflexive listeners */
+    setUpListeners : function setUpListeners(){
+        var column = this;
+        //this.log( 'setUpListeners', this );
+        this.once( 'rendered', function(){
+            column.trigger( 'rendered:initial', column );
+        });
+        this.setUpPanelListeners();
+    },
+
+    /** set listeners needed for panel */
+    setUpPanelListeners : function setUpPanelListeners(){
+        var column = this;
+        this.listenTo( this.panel, {
+            //'all': function(){ console.info( 'panel of ' + this, arguments ); },
+
+            // assumes panel will take the longest to render
+            'rendered': function(){
+                column.trigger( 'rendered', column );
+            },
+            // when a panel's view expands turn off the click handler on the rerun button so that it uses it's href
+            // this allows the button to open the tool rerun form in a new tab (instead of erroring)
+            // TODO: hack
+            'view:expanded view:rendered': function( view ){
+                view.$( '.rerun-btn' ).off();
+            }
+        }, this );
+    },
+
+    /** do the dimensions of this column overlap the given (horizontal) browser coords? */
+    inView : function( viewLeft, viewRight ){
+        var columnLeft = this.$el.offset().left,
+            columnRight = columnLeft + this.$el.width();
+        if( columnRight < viewLeft ){ return false; }
+        if( columnLeft > viewRight ){ return false; }
+        return true;
+    },
+
+    /** shortcut to the panel */
+    $panel : function $panel(){
+        return this.$( '.history-panel' );
+    },
+
+    // ------------------------------------------------------------------------ render
+    /** render ths column, its panel, and set up plugins */
+    render : function render( speed ){
+        speed = ( speed !== undefined )?( speed ):( 'fast' );
+        //this.log( this + '.render', this.$el, this.el );
+        //TODO: not needed
+        var modelData = this.model? this.model.toJSON(): {};
+        this.$el.html( this.template( modelData ) );
+        this.renderPanel( speed );
+        // jq 1.12 doesn't fade/show properly when display: flex, re-set here
+        this.panel.$el.css( 'display', 'flex' );
+        // if model and not children
+            // template
+            // render controls
+        this.setUpBehaviors();
+        // add panel
+        return this;
+    },
+
+    /** set up plugins */
+    setUpBehaviors : function setUpBehaviors(){
+        //this.log( 'setUpBehaviors:', this );
+        //var column = this;
+        // on panel size change, ...
+    },
+
+    /** column body template with inner div for panel based on data (model json) */
+    template : function template( data ){
+        data = _.extend( data || {}, {
+            isCurrentHistory : this.currentHistory
+        });
+        return $([
+            '<div class="panel-controls clear flex-row">',
+                this.controlsLeftTemplate({ history: data, view: this }),
+                //'<button class="btn btn-default">Herp</button>',
+                this.controlsRightTemplate({ history: data, view: this }),
+            '</div>',
+            '<div class="inner flex-row flex-column-container">',
+                '<div id="history-', data.id, '" class="history-column history-panel flex-column"></div>',
+            '</div>'
+        ].join( '' ));
+    },
+
+    /** render the panel contained in the column using speed for fx speed */
+    renderPanel : function renderPanel( speed ){
+        speed = ( speed !== undefined )?( speed ):( 'fast' );
+        this.panel.setElement( this.$panel() ).render( speed );
+        if( this.currentHistory ){
+            this.panel.$list().before( this.panel._renderDropTargetHelp() );
+        }
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ behaviors and events
+    /** event map */
+    events : {
+        // will make this the current history
+        'click .switch-to.btn'      : function(){ this.model.setAsCurrent(); },
+        //TODO: remove boiler plate from next 3
+        'click .delete-history' : function(){
+            var column = this;
+            this.model._delete()
+                .done( function( data ){ column.render(); });
+        },
+        'click .undelete-history' : function(){
+            var column = this;
+            this.model.undelete()
+                .done( function( data ){ column.render(); });
+        },
+        'click .purge-history' : function(){
+            if( confirm( _l( 'This will permanently remove the data. Are you sure?' ) ) ){
+                var column = this;
+                this.model.purge()
+                    .done( function( data ){ column.render(); });
+            }
+        },
+        // will copy this history and make the copy the current history
+        'click .copy-history'       : 'copy'
+    },
+
+    // ------------------------------------------------------------------------ non-current controls
+    /** Open a modal to get a new history name, copy it (if not canceled), and makes the copy current */
+    copy : function copy(){
+        historyCopyDialog( this.model );
+    },
+
+    // ------------------------------------------------------------------------ templates
+    /** controls template displaying controls above the panel based on this.currentHistory */
+    controlsLeftTemplate : _.template([
+        '<div class="pull-left">',
+            '<% if( data.history.isCurrentHistory ){ %>',
+                '<strong class="current-label">', _l( 'Current History' ), '</strong>',
+            '<% } else { %>',
+                '<button class="switch-to btn btn-default">', _l( 'Switch to' ), '</button>',
+            '<% } %>',
+        '</div>'
+    ].join( '' ), { variable : 'data' }),
+
+    /** controls template displaying controls above the panel based on this.currentHistory */
+    controlsRightTemplate : _.template([
+        '<div class="pull-right">',
+            '<% if( !data.history.purged ){ %>',
+                '<div class="panel-menu btn-group">',
+                    '<button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">',
+                        '<span class="caret"></span>',
+                    '</button>',
+                    '<ul class="dropdown-menu pull-right" role="menu">',
+                        '<% if( !data.history.deleted ){ %>',
+                            '<li><a href="javascript:void(0);" class="copy-history">',
+                                _l( 'Copy' ),
+                            '</a></li>',
+                            //'<li><a href="javascript:void(0);" class="publish-history">',
+                            //    _l( 'Publish' ),
+                            //'</a></li>',
+                            '<li><a href="javascript:void(0);" class="delete-history">',
+                                _l( 'Delete' ),
+                            '</a></li>',
+                        '<% } else /* if is deleted */ { %>',
+                            '<li><a href="javascript:void(0);" class="undelete-history">',
+                                _l( 'Undelete' ),
+                            '</a></li>',
+                        '<% } %>',
+                        '<% if( data.view.purgeAllowed ){ %>',
+                            '<li><a href="javascript:void(0);" class="purge-history">',
+                                _l( 'Purge' ),
+                            '</a></li>',
+                        '<% } %>',
+                    '</ul>',
+                '</div>',
+            '<% } %>',
+        '</div>'
+    ].join( '' ), { variable: 'data' }),
+
+    // ------------------------------------------------------------------------ misc
+    /** String rep */
+    toString : function(){
+        return 'HistoryViewColumn(' + ( this.panel? this.panel : '' ) + ')';
+    }
+});
+
+
+//==============================================================================
+/** @class A view of a HistoryCollection and displays histories similarly to the current history panel.
+ */
+var MultiPanelColumns = Backbone.View.extend( baseMVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    className : 'multi-panel-history',
+
+    // ------------------------------------------------------------------------ set up
+    /** Set up internals, history collection, and columns to display the history */
+    initialize : function initialize( options ){
+        options = options || {};
+        this.log( this + '.init', options );
+
+        // add the className here (since we gen. pass the el in options)
+        this.$el.addClass( this.className );
+
+        // --- instance vars
+        //TODO: move these to some defaults
+        this.options = {
+            columnWidth     : 312,
+            borderWidth     : 1,
+            columnGap       : 8,
+            headerHeight    : 29,
+            footerHeight    : 0,
+            controlsHeight  : 20
+        };
+
+        /** how many histories to get when fetching a new batch/page */
+        this.perPage = options.perPage || 10;
+
+        /** named ajax queue for loading hdas */
+        this.hdaQueue = new ajaxQueue.NamedAjaxQueue( [], false );
+
+        // --- set up models, sub-views, and listeners
+        /** the original unfiltered and unordered collection of histories */
+        this.collection = null;
+        /** model id to column map */
+        this.columnMap = {};
+        /** model id to column map */
+        this.columnOptions = options.columnOptions || {};
+
+        /** what to search for within all histories */
+        this.historySearch = null;
+        /** what to search for within all datasets */
+        this.datasetSearch = null;
+
+        this.setCollection( options.histories );
+        this.setUpListeners();
+    },
+
+    /** Set up reflexive listeners */
+    setUpListeners : function setUpListeners(){
+        var multipanel = this;
+        //multipanel.log( 'setUpListeners', multipanel );
+        this.on( 'end-of-scroll', function(){
+            multipanel.collection.fetchMore();
+        });
+    },
+
+    // ------------------------------------------------------------------------ collection
+    /** Set up a (new) history collection, sorting and adding listeners
+     *  @fires 'new-collection' when set with this view as the arg
+     */
+    setCollection : function setCollection( collection ){
+        // console.log( 'setCollection:', collection );
+        this.stopListening( this.collection );
+
+        this.collection = collection || new HISTORY_MODEL.HistoryCollection();
+        this.setUpCollectionListeners();
+
+        this.createColumns();
+        this.hdaQueue.clear();
+
+        this.trigger( 'new-collection', this );
+        return this;
+    },
+
+    /** Set up a (new) history collection, sorting and adding listeners
+     *  @fires 'new-collection' when set with this view as the arg
+     */
+    addModels : function addModels( models, collection, options ){
+        // console.log( 'addModels:', models, collection, options );
+        options = options || {};
+        var multipanel = this;
+        models = _.isArray( models )? models : [ models ];
+        models.forEach( function( model ){
+            multipanel.addColumn( model, false );
+            // if this is from a fetch, sort will be called and it will call render
+        });
+        return this;
+    },
+
+    /** Set up listeners for the collection - handling: added histories, change of current, deletion, and sorting */
+    setUpCollectionListeners : function(){
+        var multipanel = this;
+        multipanel.listenTo( multipanel.collection, {
+            // handle ajax errors from the collection
+            'error'                         : multipanel.errorHandler,
+            // add multiple models
+            'add'                           : multipanel.addModels,
+            // when all the histories a user has have been fetched
+            'all-fetched'                   : multipanel._postFetchAll,
+            // handle addition of histories, triggered by column copy and create new
+            'new-current'                   : multipanel.addAsCurrentColumn,
+            // handle setting a history as current, triggered by history.setAsCurrent
+            'set-as-current'                : multipanel.setCurrentHistory,
+            // handle deleting a history (depends on whether panels is including deleted or not)
+            'change:deleted change:purged'  : multipanel.handleDeletedHistory,
+            // re-render columns after a sort
+            'sort' : function(){
+                multipanel.renderColumns( 0 );
+            },
+        });
+    },
+
+    _postFetchAll : function( fetchData ){
+        // console.log( '_postFetchAll' );
+        this.$( '.histories-loading-indicator' ).remove();
+        // when new histories is fetched and the indicator is not required,
+        // the panel will jump slightly left - totally aesthetic but jarring
+        // TODO: this probably would be best handled elsewhere during a refinement cycle (if any)
+        if( !this.historySearch ){
+            var $scrollContainer = this.$( '.outer-middle' );
+            $scrollContainer.scrollLeft( $scrollContainer.scrollLeft() + 24 );
+        }
+    },
+
+    /** Re-render and set currentHistoryId to reflect a new current history */
+    setCurrentHistory : function setCurrentHistory( history ){
+        this.log( 'setCurrentHistory:', history );
+        var oldCurrentColumn = _.findWhere( this.columnMap, { currentHistory: true });
+        if( oldCurrentColumn ){
+            oldCurrentColumn.currentHistory = false;
+            oldCurrentColumn.$el.height( '' );
+        }
+
+        var newCurrentColumn = this.columnMap[ this.collection.currentHistoryId ];
+        newCurrentColumn.currentHistory = true;
+        this.collection.sort();
+        this._recalcFirstColumnHeight();
+        return newCurrentColumn;
+    },
+
+    /** Either remove a deleted history or re-render it to show the deleted message
+     *      based on collection.includeDeleted
+     */
+    handleDeletedHistory : function handleDeletedHistory( history ){
+        if( history.get( 'deleted' ) || history.get( 'purged' ) ){
+            this.log( 'handleDeletedHistory', this.collection.includeDeleted, history );
+            var multipanel = this,
+                column = multipanel.columnMap[ history.id ];
+            if( !column ){ return; }
+
+            // if it's the current column, create a new, empty history as the new current
+            if( column.model.id === this.collection.currentHistoryId ){
+                //TODO: figuring out the order of async here is tricky
+                //  - for now let the user handle the two step process
+                //multipanel.collection.create().done( function(){
+                //    if( !multipanel.collection.includeDeleted ){ multipanel.removeColumn( column, false ); }
+                //});
+            } else if( !multipanel.collection.includeDeleted ){
+                multipanel.removeColumn( column );
+            }
+       }
+    },
+
+    // ........................................................................ error handling
+    /** Event handler for errors (from the history collection mainly)
+     *  Alternately use two strings for model and xhr to use custom message and title (respectively)
+     *  (e.g. this.trigger( 'error', 'Heres a message', 'Heres a title' ))
+     *  @param {Model or View} model    the (Backbone) source of the error
+     *  @param {XMLHTTPRequest} xhr     any ajax obj. assoc. with the error
+     *  @param {Object} options         the options map commonly used with bbone ajax
+     */
+    errorHandler : function( model, xhr, options ){
+        // interrupted ajax or no connection
+        if( xhr && xhr.status === 0 && xhr.readyState === 0 ){
+            // return ERROR_MODAL.offlineErrorModal();
+            // fail silently
+            return;
+        }
+        // otherwise, leave something to report in the console
+        this.error( model, xhr, options );
+        // and feedback to a modal
+        // if sent two strings (and possibly details as 'options'), use those as message and title
+        if( _.isString( model ) && _.isString( xhr ) ){
+            var message = model;
+            var title = xhr;
+            return ERROR_MODAL.errorModal( message, title, options );
+        }
+        // bad gateway
+        // TODO: possibly to global handler
+        if( xhr && xhr.status === 502 ){
+            return ERROR_MODAL.badGatewayErrorModal();
+        }
+        return ERROR_MODAL.ajaxErrorModal( model, xhr, options );
+    },
+
+    /** If Galaxy object is available handle error there, otherwise, locally (and crudely) */
+    _ajaxErrorHandler : function(){
+        ERROR_MODAL.ajaxErrorModal.apply( null, _.toArray( arguments ) );
+    },
+
+    /** create a new history and set it to current */
+    create : function( ev ){
+        return this.collection.create({ current: true });
+    },
+
+    // ------------------------------------------------------------------------ columns
+    /** create columns from collection */
+    createColumns : function createColumns( models, columnOptions ){
+        columnOptions = columnOptions || this.options.columnOptions;
+        var multipanel = this;
+        // clear column map
+        // TODO: make cummulative
+        multipanel.columnMap = {};
+        multipanel.collection.each( function( model, i ){
+            var column = multipanel.createColumn( model, columnOptions );
+            multipanel.columnMap[ model.id ] = column;
+        });
+    },
+
+    /** create a column and its panel and set up any listeners to them */
+    createColumn : function createColumn( history, options ){
+        // options passed can be re-used, so extend them before adding the model to prevent pollution for the next
+        options = _.extend( {}, options, {
+            model       : history,
+            purgeAllowed: Galaxy.config.allow_user_dataset_purge
+        });
+        var column = new HistoryViewColumn( options );
+        if( history.id === this.collection.currentHistoryId ){ column.currentHistory = true; }
+        this.setUpColumnListeners( column );
+        if( this.datasetSearch ){
+            column.panel.searchItems( this.datasetSearch );
+            this.queueHdaFetchDetails( column );
+        }
+        return column;
+    },
+
+    /** add a new column for history and render all columns if render is true */
+    addColumn : function add( history, render ){
+        // console.debug( 'adding column for:', history, render );
+        render = render !== undefined? render : true;
+        var newColumn = this.createColumn( history );
+        this.columnMap[ history.id ] = newColumn;
+        if( render ){
+            this.renderColumns();
+        }
+        return newColumn;
+    },
+
+    /** add a new column for history and make it the current history/column */
+    addAsCurrentColumn : function add( history, collection, options ){
+        //this.log( 'adding current column for:', history );
+        var multipanel = this,
+            newColumn = this.addColumn( history, false );
+        this.setCurrentHistory( history );
+        newColumn.once( 'rendered', function(){
+            multipanel.queueHdaFetch( newColumn );
+        });
+        return newColumn;
+    },
+
+    /** remove the given column, it's listeners, and optionally render */
+    removeColumn : function remove( column, render ){
+        render = render !== undefined? render : true;
+        this.log( 'removeColumn', column );
+        if( !column ){ return; }
+        var multipanel = this,
+            widthToRemove = this.options.columnWidth + this.options.columnGap;
+        column.$el.fadeOut( 'fast', function(){
+            if( render ){
+                $( this ).remove();
+                multipanel.$( '.middle' ).width( multipanel.$( '.middle' ).width() - widthToRemove );
+                multipanel.checkColumnsInView();
+                multipanel._recalcFirstColumnHeight();
+            }
+
+            //TODO: to freeColumn (where Columns have freePanel)
+            multipanel.stopListening( column.panel );
+            multipanel.stopListening( column );
+            delete multipanel.columnMap[ column.model.id ];
+            column.remove();
+        });
+    },
+
+    /** set up listeners for a column and it's panel - handling: hda lazy-loading, drag and drop */
+    setUpColumnListeners : function setUpColumnListeners( column ){
+        var multipanel = this;
+        multipanel.listenTo( column, {
+            //'all': function(){ console.info( 'column ' + column + ':', arguments ) },
+            'in-view': multipanel.queueHdaFetch
+        });
+
+        multipanel.listenTo( column.panel, {
+            //'all': function(){ console.info( 'panel ' + column.panel + ':', arguments ) },
+
+            'view:draggable:dragstart': function( ev, view, panel, column ){
+                multipanel._dropData = JSON.parse( ev.dataTransfer.getData( 'text' ) );
+                multipanel.currentColumnDropTargetOn();
+            },
+            'view:draggable:dragend': function( ev, view, panel, column ){
+                multipanel._dropData = null;
+                multipanel.currentColumnDropTargetOff();
+            },
+            'droptarget:drop': function( ev, data, panel ){
+                //note: bad copy sources fail silently
+                var toCopy = multipanel._dropData.filter( function( json ){
+                    return panel.model.contents.isCopyable( json );
+                });
+                multipanel._dropData = null;
+
+                var queue = new ajaxQueue.NamedAjaxQueue();
+                if( panel.model.contents.currentPage !== 0 ){
+                    queue.add({
+                        name : 'fetch-front-page',
+                        fn : function(){
+                            return panel.model.contents.fetchPage( 0 );
+                        }
+                    });
+                }
+                // need to reverse to better match expected order
+                // TODO: reconsider order in list-view._setUpItemViewListeners, dragstart (instead of here)
+                toCopy.reverse().forEach( function( content ){
+                    queue.add({
+                        name : 'copy-' + content.id,
+                        fn : function(){
+                            return panel.model.contents.copy( content );
+                        }
+                    });
+                });
+                queue.start();
+                queue.done( function( responses ){
+                    panel.model.fetch();
+                });
+            }
+         });
+    },
+
+    /** conv. fn to count the columns in columnMap */
+    columnMapLength : function(){
+        return Object.keys( this.columnMap ).length;
+    },
+
+    /** return array of Columns filtered by filters and sorted to match the collection
+     *  @param: filters Function[] array of filter fns
+     */
+    sortedFilteredColumns : function( filters ){
+        filters = filters || this.filters;
+        if( !filters || !filters.length ){
+            return this.sortedColumns();
+        }
+        var multipanel = this;
+        return multipanel.sortedColumns().filter( function( column, index ){
+            var filtered = column.currentHistory || _.every( filters.map( function( filter ){
+                return filter.call( column );
+            }));
+            return filtered;
+        });
+    },
+
+    /** return array of Columns sorted to match the collection */
+    sortedColumns : function(){
+        var multipanel = this;
+        var sorted = this.collection.map( function( history, index ){
+            return multipanel.columnMap[ history.id ];
+        });
+        return sorted;
+    },
+
+    // ------------------------------------------------------------------------ render
+    /** Render this view, columns, and set up view plugins */
+    render : function render( speed ){
+        speed = speed !== undefined? speed: this.fxSpeed;
+        var multipanel = this;
+
+        multipanel.log( multipanel + '.render' );
+        multipanel.$el.html( multipanel.mainTemplate( multipanel ) );
+        multipanel.renderColumns( speed );
+
+        // set the columns to full height allowed and set up behaviors for thie multipanel
+        multipanel.setUpBehaviors();
+        //TODO: wrong - has to wait for columns to render
+        //  - create a column listener that fires this when all columns are rendered
+        multipanel.trigger( 'rendered', multipanel );
+        return multipanel;
+    },
+
+    /** Render the columns and panels */
+    renderColumns : function renderColumns( speed ){
+        speed = _.isNumber( speed )? speed: this.fxSpeed;
+        // console.log( 'renderColumns:', speed );
+        // render columns and track the total number rendered, firing an event when all are rendered
+        var self = this;
+        var sortedAndFiltered = self.sortedFilteredColumns();
+        // console.log( '\t sortedAndFiltered:', sortedAndFiltered );
+        var $middle = self.$( '.middle' ).empty();
+
+        self._addColumns( sortedAndFiltered, speed );
+        if( !self.collection.allFetched ){
+            $middle.append( self.loadingIndicatorTemplate( self ) );
+        }
+        //TODO: sorta - at least their fx queue has started the re-rendering
+        self.trigger( 'columns-rendered', sortedAndFiltered, self );
+
+        if( self.datasetSearch && sortedAndFiltered.length <= 1 ){
+
+        } else {
+            // check for in-view, hda lazy-loading if so
+            self.checkColumnsInView();
+            // the first, current column has position: fixed and flex css will not apply - adjust height manually
+            self._recalcFirstColumnHeight();
+        }
+        return sortedAndFiltered;
+    },
+
+    _addColumns : function( columns, speed ){
+        speed = _.isNumber( speed )? speed: this.fxSpeed;
+        var $middle = this.$( '.middle' );
+
+        var numExisting = $middle.children( '.history-column' ).length;
+        $middle.width( this._calcMiddleWidth( columns.length + numExisting ) );
+
+        columns.forEach( function( column, i ){
+            column.delegateEvents().render( speed ).$el.appendTo( $middle );
+        });
+    },
+
+    _calcMiddleWidth : function( numColumns ){
+        var preventStackWidthAdj = 16;
+        return (
+            numColumns * ( this.options.columnWidth + this.options.columnGap ) +
+            // last column gap
+            this.options.columnGap +
+            // the amount that safely prevents stacking of columns when adding a new one
+            preventStackWidthAdj
+        );
+    },
+
+    //TODO: combine the following two more sensibly
+    //TODO: could have HistoryContents.haveDetails return false
+    //      if column.model.contents.length === 0 && !column.model.get( 'empty' ) then just check that
+    /** Get the *summary* contents of a column's history (and details on any expanded contents),
+     *      queueing the ajax call and using a named queue to prevent the call being sent twice
+     */
+    queueHdaFetch : function queueHdaFetch( column ){
+        // console.log( column.model + '.contentsShown:', column.model.contentsShown() );
+        var contents = column.model.contents;
+        // console.log( 'queueHdaFetch:', column, column.model.get( 'contents_active' ) );
+        // if the history model says it has hdas but none are present, queue an ajax req for them
+        if( contents.length === 0 && column.model.contentsShown() ){
+            var fetchOptions = { silent: true };
+            var ids = _.values( contents.storage.allExpanded() ).join();
+            if( ids ){ fetchOptions.details = ids; }
+            // this uses a 'named' queue so that duplicate requests are ignored
+            this.hdaQueue.add({
+                name : column.model.id,
+                fn : function(){
+                    return contents.fetchCurrentPage( fetchOptions )
+                        .done( function(){ column.panel.renderItems(); });
+                }
+            });
+            // the queue is re-used, so if it's not processing requests - start it again
+            if( !this.hdaQueue.running ){ this.hdaQueue.start(); }
+        }
+    },
+
+    /** Get the *detailed* json for *all* of a column's history's contents - req'd for searching */
+    queueHdaFetchDetails : function( column ){
+        var contents = column.model.contents;
+        var needsContentsLoaded = contents.length === 0 && column.model.contentsShown();
+        if( needsContentsLoaded || !contents.haveDetails() ){
+            // this uses a 'named' queue so that duplicate requests are ignored
+            this.hdaQueue.add({
+                name : column.model.id,
+                fn : function(){
+                    return contents.progressivelyFetchDetails()
+                        .done( function(){ column.panel._renderEmptyMessage(); });
+                }
+            });
+            // the queue is re-used, so if it's not processing requests - start it again
+            if( !this.hdaQueue.running ){ this.hdaQueue.start(); }
+        }
+    },
+
+    /** put a text msg in the header */
+    renderInfo : function( msg ){
+        return this.$( '.header .header-info' ).text( msg );
+    },
+
+    // ------------------------------------------------------------------------ events/behaviors
+    events : {
+        // will move to the server root (gen. Analyze data)
+        'click .done.btn'           : 'close',
+        // creates a new empty history and makes it current
+        'click .create-new.btn'     : 'create',
+        'click #include-deleted'    : '_clickToggleDeletedHistories',
+        // these change the collection and column sort order
+        'click .order .set-order'   : '_chooseOrder',
+        'click #toggle-deleted'     : '_clickToggleDeletedDatasets',
+        'click #toggle-hidden'      : '_clickToggleHiddenDatasets',
+        //'dragstart .list-item .title-bar'                       : function( e ){ console.debug( 'ok' ); }
+    },
+
+    close : function( ev ){
+        //TODO: switch to pushState/router
+        window.location = Galaxy.root;
+    },
+
+    _clickToggleDeletedHistories : function( ev ){
+        this.toggleDeletedHistories( $( ev.currentTarget ).is( ':checked' ) );
+        this.toggleOptionsPopover();
+    },
+    /** Include deleted histories in the collection */
+    toggleDeletedHistories : function( show ){
+        if( show ){
+            window.location = Galaxy.root + 'history/view_multiple?include_deleted_histories=True';
+        } else {
+            window.location = Galaxy.root + 'history/view_multiple';
+        }
+    },
+
+    _clickToggleDeletedDatasets : function( ev ){
+        this.toggleDeletedDatasets( $( ev.currentTarget ).is( ':checked' ) );
+        this.toggleOptionsPopover();
+    },
+    toggleDeletedDatasets : function( show ){
+        show = show !== undefined? show : false;
+        var multipanel = this;
+        multipanel.sortedFilteredColumns().forEach( function( column, i ){
+            _.delay( function(){
+                column.panel.toggleShowDeleted( show, false );
+            }, i * 200 );
+        });
+    },
+
+    _clickToggleHiddenDatasets : function( ev ){
+        this.toggleHiddenDatasets( $( ev.currentTarget ).is( ':checked' ) );
+        this.toggleOptionsPopover();
+    },
+    toggleHiddenDatasets : function( show ){
+        show = show !== undefined? show : false;
+        var multipanel = this;
+        multipanel.sortedFilteredColumns().forEach( function( column, i ){
+            _.delay( function(){
+                column.panel.toggleShowHidden( show, false );
+            }, i * 200 );
+        });
+    },
+
+    /** change the collection order and re-fetch when the drop down in the options menu is changed */
+    _chooseOrder : function( ev ){
+        var multipanel = this,
+            collection = multipanel.collection,
+            orderKey = $( ev.currentTarget ).data( 'order' );
+        // set the sort order text also
+        multipanel.$( '.current-order' ).text( multipanel.orderDescriptions[ orderKey ] );
+        multipanel.toggleOptionsPopover();
+        // set the order and re-fetch using the new order, saving the current history as the first
+        collection.setOrder( orderKey );
+        var currentHistoryModel = collection.slice( 0, 1 );
+        collection.fetchFirst().done( function(){
+            collection.unshift( currentHistoryModel, { silent: true });
+            multipanel.createColumns();
+            // need to clear this or previously fetched contents won't refetch now (bc of named queue)
+            multipanel.hdaQueue.clear();
+            multipanel.render();
+        });
+        multipanel.once( 'columns-rendered', multipanel._scrollLeft );
+        //TODO: check allFetched and do not reset if so - just sort instead
+    },
+
+    /** scroll the column container right or left */
+    _scrollLeft : function( val ){
+        val = _.isNumber( val )? val : 0;
+        this.$( '.outer-middle' ).scrollLeft( val );
+    },
+
+    /** Set up any view plugins */
+    setUpBehaviors : function(){
+        var multipanel = this;
+        multipanel._moreOptionsPopover();
+
+        // input to search histories
+        multipanel.$( '#search-histories' ).searchInput({
+            name        : 'search-histories',
+            placeholder : _l( 'search histories' ),
+
+            onfirstsearch : function( searchFor ){
+                multipanel.$( '#search-histories' ).searchInput( 'toggle-loading' );
+                multipanel.renderInfo( _l( 'loading all histories for search' ) );
+                multipanel.collection.fetchAll()
+                    .done( function(){
+                        multipanel.$( '#search-histories' ).searchInput( 'toggle-loading' );
+                        multipanel.renderInfo( '' );
+                    });
+            },
+            onsearch : function( searchFor ){
+                multipanel.historySearch = searchFor;
+                multipanel.filters = [ function(){
+                    return this.model.matchesAll( multipanel.historySearch );
+                }];
+                multipanel.renderColumns( 0 );
+            },
+            onclear : function( searchFor ){
+                multipanel.historySearch = null;
+                //TODO: remove specifically not just reset
+                multipanel.filters = [];
+                multipanel.renderColumns( 0 );
+            }
+        });
+
+        // input to search datasets
+        multipanel.$( '#search-datasets' ).searchInput({
+            name        : 'search-datasets',
+            placeholder : _l( 'search all datasets' ),
+
+            onfirstsearch : function( searchFor ){
+                multipanel.hdaQueue.clear();
+                multipanel.$( '#search-datasets' ).searchInput( 'toggle-loading' );
+                multipanel.datasetSearch = searchFor;
+                multipanel.sortedFilteredColumns().forEach( function( column ){
+                    column.panel.searchItems( searchFor );
+                    // load details for them that need
+                    multipanel.queueHdaFetchDetails( column );
+                });
+                multipanel.hdaQueue.progress( function( progress ){
+                    multipanel.renderInfo([
+                        _l( 'searching' ), ( progress.curr + 1 ), _l( 'of' ), progress.total
+                    ].join( ' ' ));
+                });
+                multipanel.hdaQueue.deferred.done( function(){
+                    multipanel.renderInfo( '' );
+                    multipanel.$( '#search-datasets' ).searchInput( 'toggle-loading' );
+                });
+            },
+            onsearch : function( searchFor ){
+                multipanel.datasetSearch = searchFor;
+                multipanel.sortedFilteredColumns().forEach( function( column ){
+                    column.panel.searchItems( searchFor );
+                });
+            },
+            onclear : function( searchFor ){
+                multipanel.datasetSearch = null;
+                multipanel.sortedFilteredColumns().forEach( function( column ){
+                    column.panel.clearSearch();
+                });
+            }
+        });
+
+        // resize first (fixed position) column on page resize
+        $( window ).resize( function(){
+            multipanel._recalcFirstColumnHeight();
+        });
+
+        // when scrolling - check for histories now in view: they will fire 'in-view' and queueHdaLoading if necc.
+        //TODO:?? might be able to simplify and not use pub-sub
+        var debouncedInView = _.debounce( function _debouncedInner(){
+            var viewport = multipanel._viewport();
+            multipanel.checkColumnsInView( viewport );
+            multipanel.checkForEndOfScroll( viewport );
+        }, 100 );
+        this.$( '.middle' ).parent().scroll( debouncedInView );
+    },
+
+    /** create the options popover */
+    _moreOptionsPopover : function(){
+        return this.$( '.open-more-options.btn' ).popover({
+            container   : '.header',
+            placement   : 'bottom',
+            html        : true,
+            content     : $( this.optionsPopoverTemplate( this ) )
+        });
+    },
+
+    /** change the collection order and re-fetch when the drop down in the options menu is changed */
+    toggleOptionsPopover : function( ev ){
+        // hide seems broken in our version
+        this.$( '.open-more-options.btn' ).popover( 'toggle' );
+    },
+
+    /** Adjust the height of the first, current column since flex-boxes won't work with fixed postiion elements */
+    _recalcFirstColumnHeight : function(){
+        var $firstColumn = this.$( '.history-column' ).first(),
+            middleHeight = this.$( '.middle' ).height(),
+            controlHeight = $firstColumn.find( '.panel-controls' ).height();
+        $firstColumn.height( middleHeight )
+            .find( '.inner' ).height( middleHeight - controlHeight );
+    },
+
+    /** Get the left and right pixel coords of the middle element */
+    _viewport : function(){
+        var $outerMiddle = this.$( '.middle' ).parent(),
+            viewLeft = $outerMiddle.offset().left,
+            width = $outerMiddle.width();
+        return {
+            left    : viewLeft,
+            right   : viewLeft + width
+        };
+    },
+
+    /** returns the columns currently in the viewport */
+    columnsInView : function( viewport ){
+        //TODO: uses offset which is render intensive
+        //TODO: 2N - could use arg filter (sortedFilteredColumns( filter )) instead
+        var vp = viewport || this._viewport();
+        return this.sortedFilteredColumns().filter( function( column ){
+            return column.currentHistory || column.inView( vp.left, vp.right );
+        });
+    },
+
+    //TODO: sortByInView - return cols in view, then others
+    /** trigger in-view from columns in-view */
+    checkColumnsInView : function(){
+        //TODO: assbackward - don't fire from the column, fire from here and listen from here
+        this.columnsInView().forEach( function( column ){
+            column.trigger( 'in-view', column );
+        });
+    },
+
+    /** is the middle, horizontally scrolling section scrolled fully to the right? */
+    checkForEndOfScroll : function( viewport ){
+        viewport = viewport || this._viewport();
+        var END_PADDING = 16,
+            $middle = this.$( '.middle' ),
+            scrollRight = $middle.parent().scrollLeft() + viewport.right;
+        if( scrollRight >= ( $middle.width() - END_PADDING ) ){
+            this.trigger( 'end-of-scroll' );
+        }
+    },
+
+    /** Show and enable the current columns drop target */
+    currentColumnDropTargetOn : function(){
+        var currentColumn = this.columnMap[ this.collection.currentHistoryId ];
+        if( !currentColumn ){ return; }
+        //TODO: fix this - shouldn't need monkeypatch
+        currentColumn.panel.dataDropped = function( data ){};
+        currentColumn.panel.dropTargetOn();
+    },
+
+    /** Hide and disable the current columns drop target */
+    currentColumnDropTargetOff : function(){
+        var currentColumn = this.columnMap[ this.collection.currentHistoryId ];
+        if( !currentColumn ){ return; }
+        currentColumn.panel.dataDropped = HISTORY_VIEW_EDIT.HistoryViewEdit.prototype.dataDrop;
+        // slight override of dropTargetOff to not erase drop-target-help
+        currentColumn.panel.dropTarget = false;
+        currentColumn.panel.$( '.history-drop-target' ).remove();
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** String rep */
+    toString : function(){
+        return 'MultiPanelColumns(' + ( this.columns? this.columns.length : 0 ) + ')';
+    },
+
+    // ------------------------------------------------------------------------ templates
+    mainTemplate : _.template([
+        '<div class="header flex-column-container">',
+            '<div class="control-column control-column-left flex-column">',
+                '<button class="done btn btn-default" tabindex="1">', _l( 'Done' ), '</button>',
+                '<div id="search-histories" class="search-control"></div>',
+                '<div id="search-datasets" class="search-control"></div>',
+                '<a class="open-more-options btn btn-default" tabindex="3">',
+                    '<span class="fa fa-ellipsis-h"></span>',
+                '</a>',
+            '</div>',
+            // feedback
+            '<div class="control-column control-column-center flex-column">',
+                '<div class="header-info">', '</div>',
+            '</div>',
+            '<div class="control-column control-column-right flex-column">',
+                '<button class="create-new btn btn-default" tabindex="4">', _l( 'Create new' ), '</button> ',
+            '</div>',
+        '</div>',
+        // middle - where the columns go
+        '<div class="outer-middle flex-row flex-row-container">',
+            '<div class="middle flex-column-container flex-row"></div>',
+        '</div>',
+        // footer
+        '<div class="footer flex-column-container"></div>'
+    ].join(''), { variable: 'view' }),
+
+    loadingIndicatorTemplate : _.template([
+        '<div class="histories-loading-indicator">',
+            '<span class="fa fa-spin fa-spinner"></span>', _l( 'Loading histories' ), '...',
+        '</div>'
+    ].join(''), { variable: 'view' }),
+
+    orderDescriptions : {
+        'update_time'       : _l( 'most recent first' ),
+        'update_time-asc'   : _l( 'least recent first' ),
+        'name'              : _l( 'name, a to z' ),
+        'name-dsc'          : _l( 'name, z to a' ),
+        'size'              : _l( 'size, large to small' ),
+        'size-asc'          : _l( 'size, small to large' )
+    },
+
+    optionsPopoverTemplate : _.template([
+        '<div class="more-options">',
+            '<div class="order btn-group">',
+                '<button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">',
+                    _l( 'Order histories by' ) + ' ',
+                    '<span class="current-order"><%- view.orderDescriptions[ view.collection.order ] %></span> ',
+                    '<span class="caret"></span>',
+                '</button>',
+                '<ul class="dropdown-menu" role="menu">',
+                    '<% _.each( view.orderDescriptions, function( text, order ){ %>',
+                        '<li><a href="javascript:void(0);" class="set-order" data-order="<%- order %>">',
+                            '<%- text %>',
+                        '</a></li>',
+                    '<% }); %>',
+                '</ul>',
+            '</div>',
+
+            '<div class="checkbox"><label><input id="include-deleted" type="checkbox"',
+                '<%= view.collection.includeDeleted? " checked" : "" %>>',
+                _l( 'Include deleted histories' ),
+            '</label></div>',
+
+            '<hr />',
+
+            '<div class="checkbox"><label><input id="toggle-deleted" type="checkbox">',
+                _l( 'Include deleted datasets' ),
+            '</label></div>',
+            '<div class="checkbox"><label><input id="toggle-hidden" type="checkbox">',
+                _l( 'Include hidden datasets' ),
+            '</label></div>',
+        '</div>'
+    ].join(''), { variable: 'view' })
+
+});
+
+
+//==============================================================================
+    return {
+        MultiPanelColumns : MultiPanelColumns
+    };
+});
diff --git a/client/galaxy/scripts/mvc/history/options-menu.js b/client/galaxy/scripts/mvc/history/options-menu.js
new file mode 100644
index 0000000..93be067
--- /dev/null
+++ b/client/galaxy/scripts/mvc/history/options-menu.js
@@ -0,0 +1,247 @@
+define([
+    "mvc/ui/popup-menu",
+    "mvc/history/copy-dialog",
+    "mvc/base-mvc",
+    "utils/localization",
+    "mvc/webhooks"
+], function( PopupMenu, historyCopyDialog, BASE_MVC, _l, Webhooks ){
+
+'use strict';
+
+// ============================================================================
+var menu = [
+    {
+        html    : _l( 'History Lists' ),
+        header  : true
+    },
+    {
+        html    : _l( 'Saved Histories' ),
+        href    : 'history/list',
+    },
+    {
+        html    : _l( 'Histories Shared with Me' ),
+        href    : 'history/list_shared'
+    },
+
+    {
+        html    : _l( 'Current History' ),
+        header  : true,
+        anon    : true
+    },
+    {
+        html    : _l( 'Create New' ),
+        func    : function() {
+            if( Galaxy && Galaxy.currHistoryPanel ){
+                Galaxy.currHistoryPanel.createNewHistory();
+            }
+        },
+    },
+    {
+        html    : _l( 'Copy History' ),
+        func    : function() {
+            historyCopyDialog( Galaxy.currHistoryPanel.model )
+                .done( function(){
+                    Galaxy.currHistoryPanel.loadCurrentHistory();
+                });
+        },
+    },
+    {
+        html    : _l( 'Share or Publish' ),
+        href    : 'history/sharing',
+    },
+    {
+        html    : _l( 'Show Structure' ),
+        href    : 'history/display_structured',
+        anon    : true,
+    },
+    {
+        html    : _l( 'Extract Workflow' ),
+        href    : 'workflow/build_from_current_history',
+    },
+    {
+        html    : _l( 'Delete' ),
+        anon    : true,
+        func    : function() {
+            if( Galaxy && Galaxy.currHistoryPanel && confirm( _l( 'Really delete the current history?' ) ) ){
+                galaxy_main.window.location.href = 'history/delete?id=' + Galaxy.currHistoryPanel.model.id;
+            }
+        },
+    },
+    {
+        html    : _l( 'Delete Permanently' ),
+        purge   : true,
+        anon    : true,
+        func    : function() {
+            if( Galaxy && Galaxy.currHistoryPanel
+            &&  confirm( _l( 'Really delete the current history permanently? This cannot be undone.' ) ) ){
+                galaxy_main.window.location.href = 'history/delete?purge=True&id=' + Galaxy.currHistoryPanel.model.id;
+            }
+        },
+    },
+
+
+    {
+        html    : _l( 'Dataset Actions' ),
+        header  : true,
+        anon    : true
+    },
+    {
+        html    : _l( 'Copy Datasets' ),
+        href    : 'dataset/copy_datasets',
+    },
+    {
+        html    : _l( 'Dataset Security' ),
+        href    : 'root/history_set_default_permissions',
+    },
+    {
+        html    : _l( 'Resume Paused Jobs' ),
+        href    : 'history/resume_paused_jobs?current=True',
+        anon    : true,
+    },
+    {
+        html    : _l( 'Collapse Expanded Datasets' ),
+        func    : function() {
+            if( Galaxy && Galaxy.currHistoryPanel ){
+                Galaxy.currHistoryPanel.collapseAll();
+            }
+        },
+    },
+    {
+        html    : _l( 'Unhide Hidden Datasets' ),
+        anon    : true,
+        func    : function() {
+            if( Galaxy && Galaxy.currHistoryPanel && confirm( _l( 'Really unhide all hidden datasets?' ) ) ){
+                var filtered = Galaxy.currHistoryPanel.model.contents.hidden();
+                //TODO: batch
+                filtered.ajaxQueue( Backbone.Model.prototype.save, { visible : true })
+                    .done( function(){
+                        Galaxy.currHistoryPanel.renderItems();
+                    })
+                    .fail( function(){
+                        alert( 'There was an error unhiding the datasets' );
+                        console.error( arguments );
+                    });
+            }
+        },
+    },
+    {
+        html    : _l( 'Delete Hidden Datasets' ),
+        anon    : true,
+        func    : function() {
+            if( Galaxy && Galaxy.currHistoryPanel && confirm( _l( 'Really delete all hidden datasets?' ) ) ){
+                var filtered = Galaxy.currHistoryPanel.model.contents.hidden();
+                //TODO: batch
+                // both delete *and* unhide them
+                filtered.ajaxQueue( Backbone.Model.prototype.save, { deleted : true, visible: true })
+                    .done( function(){
+                        Galaxy.currHistoryPanel.renderItems();
+                    })
+                    .fail( function(){
+                        alert( 'There was an error deleting the datasets' );
+                        console.error( arguments );
+                    });
+            }
+        },
+    },
+    {
+        html    : _l( 'Purge Deleted Datasets' ),
+        confirm : _l( 'Really delete all deleted datasets permanently? This cannot be undone.' ),
+        href    : 'history/purge_deleted_datasets',
+        purge   : true,
+        anon    : true,
+    },
+
+
+    {
+        html    : _l( 'Downloads' ),
+        header  : true
+    },
+    {
+        html    : _l( 'Export Tool Citations' ),
+        href    : 'history/citations',
+        anon    : true,
+    },
+    {
+        html    : _l( 'Export History to File' ),
+        href    : 'history/export_archive?preview=True',
+        anon    : true,
+    },
+
+    {
+        html    : _l( 'Other Actions' ),
+        header  : true
+    },
+    {
+        html    : _l( 'Import from File' ),
+        href    : 'history/import_archive',
+    }
+];
+
+// Webhooks
+Webhooks.add({
+    url: 'api/webhooks/history-menu/all',
+    async: false,   // (hypothetically) slows down the performance
+    callback: function(webhooks) {
+        var webhooks_menu = [];
+
+        $.each(webhooks.models, function(index, model) {
+            var webhook = model.toJSON();
+            if (webhook.activate) {
+                webhooks_menu.push({
+                    html : _l( webhook.config.title ),
+                    // func: function() {},
+                    anon : true
+                });
+            }
+        });
+
+        if (webhooks_menu.length > 0) {
+            webhooks_menu.unshift({
+                html   : _l( 'Webhooks' ),
+                header : true
+            });
+            $.merge(menu, webhooks_menu);
+        }
+    }
+});
+
+
+function buildMenu( isAnon, purgeAllowed, urlRoot ){
+    return _.clone( menu ).filter( function( menuOption ){
+        if( isAnon && !menuOption.anon ){
+            return false;
+        }
+        if( !purgeAllowed && menuOption.purge ){
+            return false;
+        }
+
+        //TODO:?? hard-coded galaxy_main
+        if( menuOption.href ){
+            menuOption.href = urlRoot + menuOption.href;
+            menuOption.target = 'galaxy_main';
+        }
+
+        if( menuOption.confirm ){
+            menuOption.func = function(){
+                if( confirm( menuOption.confirm ) ){
+                    galaxy_main.location = menuOption.href;
+                }
+            };
+        }
+        return true;
+    });
+}
+
+var create = function( $button, options ){
+    options = options || {};
+    var isAnon = options.anonymous === undefined? true : options.anonymous,
+        purgeAllowed = options.purgeAllowed || false,
+        menu = buildMenu( isAnon, purgeAllowed, Galaxy.root );
+    //console.debug( 'menu:', menu );
+    return new PopupMenu( $button, menu );
+};
+
+
+// ============================================================================
+    return create;
+});
diff --git a/client/galaxy/scripts/mvc/job/job-li.js b/client/galaxy/scripts/mvc/job/job-li.js
new file mode 100644
index 0000000..3078f84
--- /dev/null
+++ b/client/galaxy/scripts/mvc/job/job-li.js
@@ -0,0 +1,206 @@
+define([
+    'mvc/list/list-item',
+    'mvc/dataset/dataset-list',
+    "mvc/base-mvc",
+    "utils/localization"
+], function( LIST_ITEM, DATASET_LIST, BASE_MVC, _l ){
+//==============================================================================
+var _super = LIST_ITEM.FoldoutListItemView;
+/** @class A job view used from within a larger list of jobs.
+ *      Each job itself is a foldout panel of history contents displaying the outputs of this job.
+ */
+var JobListItemView = _super.extend(/** @lends JobListItemView.prototype */{
+
+    /** logger used to record this.log messages, commonly set to console */
+    //logger              : console,
+
+    className   : _super.prototype.className + " job",
+    id          : function(){
+        return [ 'job', this.model.get( 'id' ) ].join( '-' );
+    },
+
+    foldoutPanelClass : DATASET_LIST.DatasetList,
+
+    /** Set up: instance vars, options, and event handlers */
+    initialize : function( attributes ){
+        if( attributes.logger ){ this.logger = this.model.logger = attributes.logger; }
+        this.log( this + '.initialize:', attributes );
+        _super.prototype.initialize.call( this, attributes );
+
+        this.tool = attributes.tool || {};
+        this.jobData = attributes.jobData || {};
+
+        /** where should pages from links be displayed? (default to new tab/window) */
+        this.linkTarget = attributes.linkTarget || '_blank';
+    },
+
+    /** In this override, add the state as a class for use with state-based CSS */
+    _swapNewRender : function( $newRender ){
+        _super.prototype._swapNewRender.call( this, $newRender );
+        if( this.model.has( 'state' ) ){
+            this.$el.addClass( 'state-' + this.model.get( 'state' ) );
+        }
+        return this.$el;
+    },
+
+    /** Stub to return proper foldout panel options */
+    _getFoldoutPanelOptions : function(){
+        var options = _super.prototype._getFoldoutPanelOptions.call( this );
+        return _.extend( options, {
+            collection  : this.model.outputCollection,
+            selecting   : false
+        });
+    },
+
+    // ........................................................................ template helpers
+    // all of these are ADAPTERs - in other words, it might be better if the API returned the final form
+    //  or something similar in order to remove some of the complexity here
+
+    /** Return tool.inputs that should/can be safely displayed */
+    _labelParamMap : function(){
+        //ADAPTER
+        var params = this.model.get( 'params' ),
+            labelParamMap = {};
+        _.each( this.tool.inputs, function( i ){
+            //console.debug( i.label, i.model_class );
+            if( i.label && i.model_class !== 'DataToolParameter' ){
+                labelParamMap[ i.label ] = params[ i.name ];
+            }
+        });
+        return labelParamMap;
+    },
+
+    _labelInputMap : function(){
+        //ADAPTER
+        var view = this,
+            labelInputMap = {};
+        _.each( this.jobData.inputs, function( input ){
+            var toolInput = view._findToolInput( input.name );
+            if( toolInput ){
+                labelInputMap[ toolInput.label ] = input;
+            }
+        });
+        return labelInputMap;
+    },
+
+    /** Return a tool.inputs object that matches (or partially matches) the given (job input) name */
+    _findToolInput : function( name ){
+        //ADAPTER
+        var toolInputs = this.tool.inputs,
+            exactMatch = _.findWhere( toolInputs, { name : name });
+        if( exactMatch ){ return exactMatch; }
+        return this._findRepeatToolInput( name, toolInputs );
+    },
+
+    /** Return a tool.inputs object that partially matches the given (job input) name (for repeat dataset inputs)*/
+    _findRepeatToolInput : function( name, toolInputs ){
+        //ADAPTER
+        toolInputs = toolInputs || this.tool.inputs;
+        var partialMatch = _.find( toolInputs, function( i ){
+            return name.indexOf( i.name ) === 0;
+        });
+        if( !partialMatch ){ return undefined; }
+
+        var subMatch = _.find( partialMatch.inputs, function( i ){
+            return name.indexOf( i.name ) !== -1;
+        });
+        return subMatch;
+    },
+
+    // ........................................................................ misc
+    /** String representation */
+    toString : function(){
+        return 'JobListItemView(' + this.model + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+JobListItemView.prototype.templates = (function(){
+//TODO: move to require text! plugin
+
+    var elTemplate = BASE_MVC.wrapTemplate([
+        '<div class="list-element">',
+            '<div class="id"><%- model.id %></div>',
+            // errors, messages, etc.
+            '<div class="warnings"></div>',
+
+            // multi-select checkbox
+            '<div class="selector">',
+                '<span class="fa fa-2x fa-square-o"></span>',
+            '</div>',
+            // space for title bar buttons - gen. floated to the right
+            '<div class="primary-actions"></div>',
+            '<div class="title-bar"></div>',
+
+            // expandable area for more details
+            '<div class="details"></div>',
+        '</div>'
+    ]);
+
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        // adding a tabindex here allows focusing the title bar and the use of keydown to expand the dataset display
+        '<div class="title-bar clear" tabindex="0">',
+            //'<span class="state-icon"></span>',
+            '<div class="title">',
+                '<span class="name"><%- view.tool.name %></span>',
+            '</div>',
+            '<div class="subtitle">',
+                '<span class="description"><%- view.tool.description %></span',
+                '<span class="create-time">',
+                    ' ', _l( 'Created' ), ': <%= new Date( job.create_time ).toString() %>, ',
+                '</span',
+            '</div>',
+        '</div>'
+    ], 'job' );
+
+    var subtitleTemplate = BASE_MVC.wrapTemplate([
+        '<div class="subtitle">',
+            '<span class="description"><%- view.tool.description %></span',
+            //'<span class="create-time">',
+            //    ' ', _l( 'Created' ), ': <%= new Date( job.create_time ).toString() %>, ',
+            //'</span',
+            //'<span class="version">',
+            //    ' (', _l( 'version' ), ': <%- view.tool.version %>)',
+            //'</span',
+        '</div>'
+    ], 'job' );
+
+    var detailsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="details">',
+            //'<div class="version">',
+            //    '<label class="prompt">', _l( 'Version' ), '</label>',
+            //    '<span class="value"><%- view.tool.version %></span>',
+            //'</div>',
+            '<div class="params">',
+                '<% _.each( view._labelInputMap(), function( input, label ){ %>',
+                    '<div class="input" data-input-name="<%- input.name %>" data-input-id="<%- input.id %>">',
+                        '<label class="prompt"><%- label %></label>',
+//TODO: input dataset name
+                        '<span class="value"><%- input.content.name %></span>',
+                    '</div>',
+                '<% }) %>',
+                '<% _.each( view._labelParamMap(), function( param, label ){ %>',
+                    '<div class="param" data-input-name="<%- param.name %>">',
+                        '<label class="prompt"><%- label %></label>',
+                        '<span class="value"><%- param %></span>',
+                    '</div>',
+                '<% }) %>',
+            '</div>',
+        '</div>'
+    ], 'job' );
+
+    return _.extend( {}, _super.prototype.templates, {
+        //el          : elTemplate,
+        titleBar    : titleBarTemplate,
+        subtitle    : subtitleTemplate,
+        details     : detailsTemplate
+    });
+}());
+
+
+//=============================================================================
+    return {
+        JobListItemView : JobListItemView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/job/job-model.js b/client/galaxy/scripts/mvc/job/job-model.js
new file mode 100644
index 0000000..487f8ff
--- /dev/null
+++ b/client/galaxy/scripts/mvc/job/job-model.js
@@ -0,0 +1,202 @@
+define([
+    "mvc/history/history-contents",
+    "mvc/dataset/states",
+    "utils/ajax-queue",
+    "mvc/base-mvc",
+    "utils/localization"
+], function( HISTORY_CONTENTS, STATES, AJAX_QUEUE, BASE_MVC, _l ){
+
+var logNamespace = 'jobs';
+//==============================================================================
+var searchableMixin = BASE_MVC.SearchableModelMixin;
+/** @class Represents a job running or ran on the server job handlers.
+ */
+var Job = Backbone.Model
+        .extend( BASE_MVC.LoggableMixin )
+        .extend( BASE_MVC.mixin( searchableMixin, /** @lends Job.prototype */{
+    _logNamespace : logNamespace,
+
+    /** default attributes for a model */
+    defaults : {
+        model_class : 'Job',
+
+        tool_id     : null,
+        exit_code   : null,
+
+        inputs      : {},
+        outputs     : {},
+        params      : {},
+
+        create_time : null,
+        update_time : null,
+        state       : STATES.NEW
+    },
+
+    /** override to parse params on incomming */
+    parse : function( response, options ){
+        response.params = this.parseParams( response.params );
+        return response;
+    },
+
+    /** override to treat param values as json */
+    parseParams : function( params ){
+        var newParams = {};
+        _.each( params, function( value, key ){
+            newParams[ key ] = JSON.parse( value );
+        });
+        return newParams;
+    },
+
+    /** instance vars and listeners */
+    initialize : function( attributes, options ){
+        this.debug( this + '(Job).initialize', attributes, options );
+
+        this.set( 'params', this.parseParams( this.get( 'params' ) ), { silent: true });
+
+        this.outputCollection = attributes.outputCollection || new HISTORY_CONTENTS.HistoryContents([]);
+        this._setUpListeners();
+    },
+
+    /** set up any event listeners
+     *  event: state:ready  fired when this DA moves into/is already in a ready state
+     */
+    _setUpListeners : function(){
+        // if the state has changed and the new state is a ready state, fire an event
+        this.on( 'change:state', function( currModel, newState ){
+            this.log( this + ' has changed state:', currModel, newState );
+            if( this.inReadyState() ){
+                this.trigger( 'state:ready', currModel, newState, this.previous( 'state' ) );
+            }
+        });
+    },
+
+    // ........................................................................ common queries
+    /** Is this job in a 'ready' state; where 'Ready' states are states where no
+     *      processing is left to do on the server.
+     */
+    inReadyState : function(){
+        return _.contains( STATES.READY_STATES, this.get( 'state' ) );
+    },
+
+    /** Does this model already contain detailed data (as opposed to just summary level data)? */
+    hasDetails : function(){
+        //?? this may not be reliable
+        return !_.isEmpty( this.get( 'outputs' ) );
+    },
+
+    // ........................................................................ ajax
+    /** root api url */
+    urlRoot : Galaxy.root + 'api/jobs',
+    //url : function(){ return this.urlRoot; },
+
+    // ........................................................................ searching
+    // see base-mvc, SearchableModelMixin
+    /** what attributes of an Job will be used in a text search */
+    //searchAttributes : [
+    //    'tool'
+    //],
+
+    // ........................................................................ misc
+    /** String representation */
+    toString : function(){
+        return [ 'Job(', this.get( 'id' ), ':', this.get( 'tool_id' ), ')' ].join( '' );
+    }
+}));
+
+
+//==============================================================================
+/** @class Backbone collection for Jobs.
+ */
+var JobCollection = Backbone.Collection
+        .extend( BASE_MVC.LoggableMixin )
+        .extend(/** @lends JobCollection.prototype */{
+    _logNamespace : logNamespace,
+
+    model : Job,
+
+    /** root api url */
+    urlRoot : Galaxy.root + 'api/jobs',
+    url : function(){ return this.urlRoot; },
+
+    intialize : function( models, options ){
+        console.debug( models, options );
+    },
+
+    // ........................................................................ common queries
+    /** Get the ids of every item in this collection
+     *  @returns array of encoded ids
+     */
+    ids : function(){
+        return this.map( function( item ){ return item.get( 'id' ); });
+    },
+
+    /** Get jobs that are not ready
+     *  @returns array of content models
+     */
+    notReady : function(){
+        return this.filter( function( job ){
+            return !job.inReadyState();
+        });
+    },
+
+    /** return true if any jobs don't have details */
+    haveDetails : function(){
+        return this.all( function( job ){ return job.hasDetails(); });
+    },
+
+    // ........................................................................ ajax
+    /** fetches all details for each job in the collection using a queue */
+    queueDetailFetching : function(){
+        var collection = this,
+            queue = new AJAX_QUEUE.AjaxQueue( this.map( function( job ){
+                return function(){
+                    return job.fetch({ silent: true });
+                };
+            }));
+        queue.done( function(){
+            collection.trigger( 'details-loaded' );
+        });
+        return queue;
+    },
+
+    //toDAG : function(){
+    //    return new JobDAG( this.toJSON() );
+    //},
+
+    // ........................................................................ sorting/filtering
+    /** return a new collection of jobs whose attributes contain the substring matchesWhat */
+    matches : function( matchesWhat ){
+        return this.filter( function( job ){
+            return job.matches( matchesWhat );
+        });
+    },
+
+    // ........................................................................ misc
+    /** String representation. */
+    toString : function(){
+         return ([ 'JobCollection(', this.length, ')' ].join( '' ));
+    }
+
+//----------------------------------------------------------------------------- class vars
+}, {
+    /** class level fn for fetching the job details for all jobs in a history */
+    fromHistory : function( historyId ){
+        console.debug( this );
+        var Collection = this,
+            collection = new Collection([]);
+        collection.fetch({ data: { history_id: historyId }})
+            .done( function(){
+                window.queue = collection.queueDetailFetching();
+
+            });
+        return collection;
+    }
+});
+
+
+//=============================================================================
+    return {
+        Job             : Job,
+        JobCollection   : JobCollection
+    };
+});
diff --git a/client/galaxy/scripts/mvc/library/library-dataset-view.js b/client/galaxy/scripts/mvc/library/library-dataset-view.js
new file mode 100644
index 0000000..dfa7dab
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-dataset-view.js
@@ -0,0 +1,1091 @@
+define([
+  "libs/toastr",
+  "mvc/library/library-model",
+  "utils/utils",
+  'mvc/ui/ui-select'
+  ],
+function(
+        mod_toastr,
+        mod_library_model,
+        mod_utils,
+        mod_select
+        ) {
+
+var LibraryDatasetView = Backbone.View.extend({
+  el: '#center',
+
+  model: null,
+
+  options: {
+
+  },
+
+  events: {
+    "click .toolbtn_modify_dataset"       :   "enableModification",
+    "click .toolbtn_cancel_modifications" :   "render",
+    "click .toolbtn-download-dataset"     :   "downloadDataset",
+    "click .toolbtn-import-dataset"       :   "importIntoHistory",
+    "click .toolbtn-share-dataset"        :   "shareDataset",
+    "click .btn-copy-link-to-clipboard"   :   "copyToClipboard",
+    "click .btn-make-private"             :   "makeDatasetPrivate",
+    "click .btn-remove-restrictions"      :   "removeDatasetRestrictions",
+    "click .toolbtn_save_permissions"     :   "savePermissions",
+    "click .toolbtn_save_modifications"   :   "comingSoon",
+
+  },
+
+  // genome select
+  select_genome : null,
+
+  // extension select
+  select_extension : null,
+
+  // extension types
+  list_extensions :[],
+
+  // datatype placeholder for extension auto-detection
+  auto: {
+      id          : 'auto',
+      text        : 'Auto-detect',
+      description : 'This system will try to detect the file type automatically.' +
+                    ' If your file is not detected properly as one of the known formats,' +
+                    ' it most likely means that it has some format problems (e.g., different' +
+                    ' number of columns on different rows). You can still coerce the system' +
+                    ' to set your data to the format you think it should be.' +
+                    ' You can also upload compressed files, which will automatically be decompressed.'
+  },
+
+  // genomes
+  list_genomes : [],
+
+  initialize: function(options){
+    this.options = _.extend(this.options, options);
+    this.fetchExtAndGenomes();
+    if (this.options.id){
+      this.fetchDataset();
+    }
+  },
+
+  fetchDataset: function(options){
+    this.options = _.extend(this.options, options);
+    this.model = new mod_library_model.Item({id: this.options.id});
+    var that = this;
+    this.model.fetch({
+      success: function() {
+        if (that.options.show_permissions){
+            that.showPermissions();
+        } else if (that.options.show_version) {
+            that.fetchVersion();
+        } else {
+            that.render();
+        }
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg + ' Click this to go back.', '', {onclick: function() {Galaxy.libraries.library_router.back();}});
+        } else {
+          mod_toastr.error('An error ocurred. Click this to go back.', '', {onclick: function() {Galaxy.libraries.library_router.back();}});
+        }
+      }
+    });
+  },
+
+  render: function(options){
+    this.options = _.extend(this.options, options);
+    $(".tooltip").remove();
+    var template = this.templateDataset();
+    this.$el.html(template({item: this.model}));
+    $(".peek").html(this.model.get("peek"));
+    $("#center [data-toggle]").tooltip();
+  },
+
+  fetchVersion: function(options){
+    this.options = _.extend(this.options, options);
+    that = this;
+    if (!this.options.ldda_id){
+      this.render();
+      mod_toastr.error('Library dataset version requested but no id provided.');
+    } else {
+      this.ldda = new mod_library_model.Ldda({id: this.options.ldda_id});
+      this.ldda.url = this.ldda.urlRoot + this.model.id + '/versions/' + this.ldda.id;
+      this.ldda.fetch({
+        success: function(){
+          that.renderVersion();
+        },
+        error: function(model, response){
+          if (typeof response.responseJSON !== "undefined"){
+            mod_toastr.error(response.responseJSON.err_msg);
+          } else {
+            mod_toastr.error('An error ocurred.');
+          }
+        }
+      });
+    }
+  },
+
+  renderVersion: function(){
+    $(".tooltip").remove();
+    var template = this.templateVersion();
+    this.$el.html(template({item: this.model, ldda: this.ldda}));
+    $(".peek").html(this.ldda.get("peek"));
+  },
+
+  enableModification: function(){
+    $(".tooltip").remove();
+    var template = this.templateModifyDataset();
+    this.$el.html(template({item: this.model}));
+    this.renderSelectBoxes({genome_build: this.model.get('genome_build'), file_ext: this.model.get('file_ext') });
+    $(".peek").html(this.model.get("peek"));
+    $("#center [data-toggle]").tooltip();
+  },
+
+  downloadDataset: function(){
+    var url = Galaxy.root + 'api/libraries/datasets/download/uncompressed';
+    var data = {'ld_ids': this.id};
+    this.processDownload(url, data);
+  },
+
+  processDownload: function(url, data, method){
+        //url and data options required
+        if( url && data ){
+          //data can be string of parameters or array/object
+          data = typeof data == 'string' ? data : $.param(data);
+          //split params into form inputs
+          var inputs = '';
+          $.each(data.split('&'), function(){
+            var pair = this.split('=');
+            inputs+='<input type="hidden" name="'+ pair[0] +'" value="'+ pair[1] +'" />';
+          });
+          //send request
+          $('<form action="'+ url +'" method="'+ (method||'post') +'">'+inputs+'</form>')
+          .appendTo('body').submit().remove();
+
+          mod_toastr.info('Your download will begin soon.');
+        }
+   },
+
+  importIntoHistory: function(){
+    this.refreshUserHistoriesList(function(self){
+            var template = self.templateBulkImportInModal();
+            self.modal = Galaxy.modal;
+            self.modal.show({
+                closing_events  : true,
+                title           : 'Import into History',
+                body            : template({histories : self.histories.models}),
+                buttons         : {
+                    'Import'    : function() {self.importCurrentIntoHistory();},
+                    'Close'     : function() {Galaxy.modal.hide();}
+                }
+            });
+          });
+  },
+
+ refreshUserHistoriesList: function(callback){
+    var self = this;
+    this.histories = new mod_library_model.GalaxyHistories();
+    this.histories.fetch({
+      success: function (histories){
+        if (histories.length === 0){
+          mod_toastr.warning('You have to create history first. Click this to do so.', '', {onclick: function() {window.location=Galaxy.root;}});
+        } else {
+          callback(self);
+        }
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg);
+        } else {
+          mod_toastr.error('An error ocurred.');
+        }
+      }
+    });
+  },
+
+  importCurrentIntoHistory: function(){
+    this.modal.disableButton('Import');
+    var new_history_name = this.modal.$('input[name=history_name]').val();
+    var that = this;
+    if (new_history_name !== ''){
+      $.post( Galaxy.root + 'api/histories', {name: new_history_name})
+        .done(function( new_history ) {
+          that.processImportToHistory(new_history.id);
+        })
+        .fail(function( xhr, status, error ) {
+          mod_toastr.error('An error ocurred.');
+        })
+        .always(function() {
+          that.modal.enableButton('Import');
+        });
+    } else {
+      var history_id = $(this.modal.$el).find('select[name=dataset_import_single] option:selected').val();
+      this.processImportToHistory(history_id);
+      this.modal.enableButton('Import');
+    }
+  },
+
+  processImportToHistory: function( history_id ){
+    var historyItem = new mod_library_model.HistoryItem();
+    historyItem.url = historyItem.urlRoot + history_id + '/contents';
+    // set the used history as current so user will see the last one
+    // that he imported into in the history panel on the 'analysis' page
+    jQuery.getJSON( Galaxy.root + 'history/set_as_current?id=' + history_id  );
+    // save the dataset into selected history
+    historyItem.save({ content : this.id, source : 'library' }, {
+      success : function(){
+        Galaxy.modal.hide();
+        mod_toastr.success('Dataset imported. Click this to start analyzing it.', '', {onclick: function() {window.location=Galaxy.root;}});
+      },
+      error : function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error('Dataset not imported. ' + response.responseJSON.err_msg);
+        } else {
+          mod_toastr.error('An error occured. Dataset not imported. Please try again.');
+        }
+      }
+      });
+  },
+
+  shareDataset: function(){
+    mod_toastr.info('Feature coming soon.');
+  },
+
+  goBack: function(){
+    Galaxy.libraries.library_router.back();
+  },
+
+  showPermissions: function(options){
+    this.options = _.extend(this.options, options);
+    $(".tooltip").remove();
+    if (this.options.fetched_permissions !== undefined){
+      if (this.options.fetched_permissions.access_dataset_roles.length === 0){
+        this.model.set({is_unrestricted:true});
+      } else{
+        this.model.set({is_unrestricted:false});
+      }
+    }
+    // Select works different for admins, details in this.prepareSelectBoxes
+    var is_admin = false;
+    if (Galaxy.user){
+      is_admin = Galaxy.user.isAdmin();
+    }
+    var template = this.templateDatasetPermissions();
+    this.$el.html(template({item: this.model, is_admin: is_admin}));
+    var self = this;
+    $.get( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?scope=current").done(function(fetched_permissions) {
+      self.prepareSelectBoxes({fetched_permissions: fetched_permissions, is_admin: is_admin});
+    }).fail(function(){
+        mod_toastr.error('An error occurred while attempting to fetch dataset permissions.');
+    });
+    $("#center [data-toggle]").tooltip();
+    $("#center").css('overflow','auto');
+  },
+
+  _serializeRoles : function(role_list){
+    var selected_roles = [];
+    for (var i = 0; i < role_list.length; i++) {
+      selected_roles.push(role_list[i][1] + ':' + role_list[i][0]);
+    }
+    return selected_roles;
+  },
+
+  prepareSelectBoxes: function(options){
+    this.options = _.extend(this.options, options);
+    var fetched_permissions = this.options.fetched_permissions;
+    var is_admin = this.options.is_admin
+    var self = this;
+    var selected_access_dataset_roles = [];
+    var selected_modify_item_roles = [];
+    var selected_manage_dataset_roles = [];
+    selected_access_dataset_roles = this._serializeRoles(fetched_permissions.access_dataset_roles);
+    selected_modify_item_roles = this._serializeRoles(fetched_permissions.modify_item_roles);
+    selected_manage_dataset_roles = this._serializeRoles(fetched_permissions.manage_dataset_roles);
+
+    if (is_admin){ // Admin has a special select that allows AJAX searching
+        var access_select_options = {
+          minimumInputLength: 0,
+          css: 'access_perm',
+          multiple:true,
+          placeholder: 'Click to select a role',
+          container: self.$el.find('#access_perm'),
+          ajax: {
+              url: Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?scope=available",
+              dataType: 'json',
+              quietMillis: 100,
+              data: function (term, page) { // page is the one-based page number tracked by Select2
+                  return {
+                      q: term, //search term
+                      page_limit: 10, // page size
+                      page: page // page number
+                  };
+              },
+              results: function (data, page) {
+                  var more = (page * 10) < data.total; // whether or not there are more results available
+                  // notice we return the value of more so Select2 knows if more results can be loaded
+                  return {results: data.roles, more: more};
+              }
+          },
+          formatResult : function roleFormatResult(role) {
+              return role.name + ' type: ' + role.type;
+          },
+
+          formatSelection: function roleFormatSelection(role) {
+              return role.name;
+          },
+          initSelection: function(element, callback) {
+          // the input tag has a value attribute preloaded that points to a preselected role's id
+          // this function resolves that id attribute to an object that select2 can render
+          // using its formatResult renderer - that way the role name is shown preselected
+              var data = [];
+              $(element.val().split(",")).each(function() {
+                  var item = this.split(':');
+                  data.push({
+                      id: item[0],
+                      name: item[1]
+                  });
+              });
+              callback(data);
+          },
+          initialData: selected_access_dataset_roles.join(','),
+          dropdownCssClass: "bigdrop" // apply css that makes the dropdown taller
+        };
+        var modify_select_options = {
+          minimumInputLength: 0,
+          css: 'modify_perm',
+          multiple:true,
+          placeholder: 'Click to select a role',
+          container: self.$el.find('#modify_perm'),
+          ajax: {
+              url: Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?scope=available",
+              dataType: 'json',
+              quietMillis: 100,
+              data: function (term, page) { // page is the one-based page number tracked by Select2
+                  return {
+                      q: term, //search term
+                      page_limit: 10, // page size
+                      page: page // page number
+                  };
+              },
+              results: function (data, page) {
+                  var more = (page * 10) < data.total; // whether or not there are more results available
+                  // notice we return the value of more so Select2 knows if more results can be loaded
+                  return {results: data.roles, more: more};
+              }
+          },
+          formatResult : function roleFormatResult(role) {
+              return role.name + ' type: ' + role.type;
+          },
+
+          formatSelection: function roleFormatSelection(role) {
+              return role.name;
+          },
+          initSelection: function(element, callback) {
+          // the input tag has a value attribute preloaded that points to a preselected role's id
+          // this function resolves that id attribute to an object that select2 can render
+          // using its formatResult renderer - that way the role name is shown preselected
+              var data = [];
+              $(element.val().split(",")).each(function() {
+                  var item = this.split(':');
+                  data.push({
+                      id: item[0],
+                      name: item[1]
+                  });
+              });
+              callback(data);
+          },
+          initialData: selected_modify_item_roles.join(','),
+          dropdownCssClass: "bigdrop" // apply css that makes the dropdown taller
+        };
+        var manage_select_options = {
+          minimumInputLength: 0,
+          css: 'manage_perm',
+          multiple:true,
+          placeholder: 'Click to select a role',
+          container: self.$el.find('#manage_perm'),
+          ajax: {
+              url: Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?scope=available",
+              dataType: 'json',
+              quietMillis: 100,
+              data: function (term, page) { // page is the one-based page number tracked by Select2
+                  return {
+                      q: term, //search term
+                      page_limit: 10, // page size
+                      page: page // page number
+                  };
+              },
+              results: function (data, page) {
+                  var more = (page * 10) < data.total; // whether or not there are more results available
+                  // notice we return the value of more so Select2 knows if more results can be loaded
+                  return {results: data.roles, more: more};
+              }
+          },
+          formatResult : function roleFormatResult(role) {
+              return role.name + ' type: ' + role.type;
+          },
+
+          formatSelection: function roleFormatSelection(role) {
+              return role.name;
+          },
+          initSelection: function(element, callback) {
+          // the input tag has a value attribute preloaded that points to a preselected role's id
+          // this function resolves that id attribute to an object that select2 can render
+          // using its formatResult renderer - that way the role name is shown preselected
+              var data = [];
+              $(element.val().split(",")).each(function() {
+                  var item = this.split(':');
+                  data.push({
+                      id: item[0],
+                      name: item[1]
+                  });
+              });
+              callback(data);
+          },
+          initialData: selected_manage_dataset_roles.join(','),
+          dropdownCssClass: "bigdrop" // apply css that makes the dropdown taller
+        };
+
+        self.accessSelectObject = new mod_select.View(access_select_options);
+        self.modifySelectObject = new mod_select.View(modify_select_options);
+        self.manageSelectObject = new mod_select.View(manage_select_options);
+    } else { // Non-admins have select with pre-loaded options
+        var template = self.templateAccessSelect();
+        $.get( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?scope=available", function( data ) {
+            $('.access_perm').html(template({options: data.roles}));
+            self.accessSelectObject = $('#access_select').select2();
+        }).fail(function() {
+            mod_toastr.error('An error occurred while attempting to fetch dataset permissions.');
+        });
+    }
+  },
+
+  comingSoon: function(){
+    mod_toastr.warning('Feature coming soon.');
+  },
+
+  copyToClipboard: function(){
+    var href = Backbone.history.location.href;
+    if (href.lastIndexOf('/permissions') !== -1){
+      href = href.substr(0, href.lastIndexOf('/permissions'));
+    }
+    window.prompt("Copy to clipboard: Ctrl+C, Enter", href);
+  },
+
+  makeDatasetPrivate: function(){
+    var self = this;
+    $.post( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?action=make_private").done(function(fetched_permissions) {
+      self.model.set({is_unrestricted: false});
+      self.showPermissions({fetched_permissions: fetched_permissions})
+      mod_toastr.success('The dataset is now private to you.');
+    }).fail(function(){
+      mod_toastr.error('An error occurred while attempting to make dataset private.');
+    });
+  },
+
+  removeDatasetRestrictions: function(){
+    var self = this;
+    $.post( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?action=remove_restrictions")
+    .done(function(fetched_permissions) {
+      self.model.set({is_unrestricted: true});
+      self.showPermissions({fetched_permissions: fetched_permissions})
+      mod_toastr.success('Access to this dataset is now unrestricted.');
+    })
+    .fail(function(){
+      mod_toastr.error('An error occurred while attempting to make dataset unrestricted.');
+    });
+  },
+
+  /**
+   * Extract the role ids from Select2 elements's 'data'
+   */
+  _extractIds: function(roles_list){
+    ids_list = [];
+    for (var i = roles_list.length - 1; i >= 0; i--) {
+      ids_list.push(roles_list[i].id);
+    };
+    return ids_list;
+  },
+
+  /**
+   * Save the permissions for roles entered in the select boxes.
+   */
+  savePermissions: function(event){
+    var self = this;
+    var access_ids = this._extractIds(this.accessSelectObject.$el.select2('data'));
+    var manage_ids = this._extractIds(this.manageSelectObject.$el.select2('data'));
+    var modify_ids = this._extractIds(this.modifySelectObject.$el.select2('data'));
+    $.post( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?action=set_permissions", { 'access_ids[]': access_ids, 'manage_ids[]': manage_ids, 'modify_ids[]': modify_ids, } )
+    .done(function(fetched_permissions){
+      self.showPermissions({fetched_permissions: fetched_permissions})
+      mod_toastr.success('Permissions saved.');
+    })
+    .fail(function(){
+      mod_toastr.error('An error occurred while attempting to set dataset permissions.');
+    })
+  },
+
+  /**
+   * Request all extensions and genomes from Galaxy
+   * and save them sorted in arrays.
+   */
+  fetchExtAndGenomes: function(){
+    var that = this;
+    mod_utils.get({
+        url      :  Galaxy.root + "api/datatypes?extension_only=False",
+        success  :  function( datatypes ) {
+                        for (key in datatypes) {
+                            that.list_extensions.push({
+                                id              : datatypes[key].extension,
+                                text            : datatypes[key].extension,
+                                description     : datatypes[key].description,
+                                description_url : datatypes[key].description_url
+                            });
+                        }
+                        that.list_extensions.sort(function(a, b) {
+                            return a.id > b.id ? 1 : a.id < b.id ? -1 : 0;
+                        });
+                        that.list_extensions.unshift(that.auto);
+                    }
+      });
+    mod_utils.get({
+        url     :    Galaxy.root + "api/genomes",
+        success : function( genomes ) {
+                    for ( key in genomes ) {
+                        that.list_genomes.push({
+                            id      : genomes[key][1],
+                            text    : genomes[key][0]
+                        });
+                    }
+                    that.list_genomes.sort(function(a, b) {
+                        return a.id > b.id ? 1 : a.id < b.id ? -1 : 0;
+                    });
+                }
+    });
+  },
+
+  renderSelectBoxes: function(options){
+    // This won't work properly unlesss we already have the data fetched.
+    // See this.fetchExtAndGenomes()
+    // TODO switch to common resources:
+    // https://trello.com/c/dIUE9YPl/1933-ui-common-resources-and-data-into-galaxy-object
+    var current_genome = '?';
+    var current_ext = 'auto';
+    if (typeof options !== 'undefined'){
+      if (typeof options.genome_build !== 'undefined'){
+        current_genome = options.genome_build;
+      }
+      if (typeof options.file_ext !== 'undefined'){
+        current_ext = options.file_ext;
+      }
+    }
+    var that = this;
+    this.select_genome = new mod_select.View( {
+        css: 'dataset-genome-select',
+        data: that.list_genomes,
+        container: that.$el.find( '#dataset_genome_select' ),
+        value: current_genome
+    } );
+    this.select_extension = new mod_select.View({
+      css: 'dataset-extension-select',
+      data: that.list_extensions,
+      container: that.$el.find( '#dataset_extension_select' ),
+      value: current_ext
+    });
+  },
+
+  templateDataset : function(){
+    return _.template([
+    // CONTAINER START
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<button data-toggle="tooltip" data-placement="top" title="Download dataset" class="btn btn-default toolbtn-download-dataset primary-button toolbar-item" type="button">',
+          '<span class="fa fa-download"></span>',
+          ' Download',
+        '</button>',
+        '<button data-toggle="tooltip" data-placement="top" title="Import dataset into history" class="btn btn-default toolbtn-import-dataset primary-button toolbar-item" type="button">',
+          '<span class="fa fa-book"></span>',
+          ' to History',
+        '</button>',
+        '<% if (item.get("can_user_modify")) { %>',
+          '<button data-toggle="tooltip" data-placement="top" title="Modify library item" class="btn btn-default toolbtn_modify_dataset primary-button toolbar-item" type="button">',
+            '<span class="fa fa-pencil"></span>',
+            ' Modify',
+          '</button>',
+        '<% } %>',
+        '<% if (item.get("can_user_manage")) { %>',
+          '<a href="#folders/<%- item.get("folder_id") %>/datasets/<%- item.id %>/permissions">',
+            '<button data-toggle="tooltip" data-placement="top" title="Manage permissions" class="btn btn-default toolbtn_change_permissions primary-button toolbar-item" type="button">',
+              '<span class="fa fa-group"></span>',
+              ' Permissions',
+            '</button>',
+          '</a>',
+        '<% } %>',
+      '</div>',
+
+    // BREADCRUMBS
+    '<ol class="breadcrumb">',
+      '<li><a title="Return to the list of libraries" href="#">Libraries</a></li>',
+      '<% _.each(item.get("full_path"), function(path_item) { %>',
+        '<% if (path_item[0] != item.id) { %>',
+          '<li><a title="Return to this folder" href="#/folders/<%- path_item[0] %>"><%- path_item[1] %></a> </li> ',
+        '<% } else { %>',
+          '<li class="active"><span title="You are here"><%- path_item[1] %></span></li>',
+        '<% } %>',
+      '<% }); %>',
+    '</ol>',
+
+    '<% if (item.get("is_unrestricted")) { %>',
+      '<div class="alert alert-info">',
+        'This dataset is unrestricted so everybody can access it. Just share the URL of this page. ',
+        '<button data-toggle="tooltip" data-placement="top" title="Copy to clipboard" class="btn btn-default btn-copy-link-to-clipboard primary-button" type="button">',
+          '<span class="fa fa-clipboard"></span>',
+          ' To Clipboard',
+        '</button> ',
+      '</div>',
+    '<% } %>',
+
+    // TABLE START
+    '<div class="dataset_table">',
+      '<table class="grid table table-striped table-condensed">',
+        '<tr>',
+          '<th class="dataset-first-column" scope="row" id="id_row" data-id="<%= _.escape(item.get("ldda_id")) %>">Name</th>',
+          '<td><%= _.escape(item.get("name")) %></td>',
+        '</tr>',
+        '<% if (item.get("file_ext")) { %>',
+          '<tr>',
+            '<th scope="row">Data type</th>',
+            '<td><%= _.escape(item.get("file_ext")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("genome_build")) { %>',
+          '<tr>',
+            '<th scope="row">Genome build</th>',
+            '<td><%= _.escape(item.get("genome_build")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("file_size")) { %>',
+          '<tr>',
+            '<th scope="row">Size</th>',
+            '<td><%= _.escape(item.get("file_size")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("date_uploaded")) { %>',
+          '<tr>',
+            '<th scope="row">Date uploaded (UTC)</th>',
+            '<td><%= _.escape(item.get("date_uploaded")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("uploaded_by")) { %>',
+          '<tr>',
+            '<th scope="row">Uploaded by</th>',
+            '<td><%= _.escape(item.get("uploaded_by")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("metadata_data_lines")) { %>',
+          '<tr>',
+            '<th scope="row">Data Lines</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_data_lines")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("metadata_comment_lines")) { %>',
+          '<tr>',
+            '<th scope="row">Comment Lines</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_comment_lines")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("metadata_columns")) { %>',
+          '<tr>',
+            '<th scope="row">Number of Columns</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_columns")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("metadata_column_types")) { %>',
+          '<tr>',
+            '<th scope="row">Column Types</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_column_types")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("message")) { %>',
+          '<tr>',
+            '<th scope="row">Message</th>',
+            '<td scope="row"><%= _.escape(item.get("message")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("misc_blurb")) { %>',
+          '<tr>',
+            '<th scope="row">Miscellaneous blurb</th>',
+            '<td scope="row"><%= _.escape(item.get("misc_blurb")) %></td>',
+          '</tr>',
+        '<% } %>',
+        '<% if (item.get("misc_info")) { %>',
+          '<tr>',
+            '<th scope="row">Miscellaneous information</th>',
+            '<td scope="row"><%= _.escape(item.get("misc_info")) %></td>',
+          '</tr>',
+        '<% } %>',
+      '</table>',
+
+    '<div>',
+      '<pre class="peek">',
+      '</pre>',
+    '</div>',
+
+    '<% if (item.get("has_versions")) { %>',
+      '<div>',
+        '<h3>Expired versions:</h3>',
+        '<ul>',
+          '<% _.each(item.get("expired_versions"), function(version) { %>',
+            '<li><a title="See details of this version" href="#folders/<%- item.get("folder_id") %>/datasets/<%- item.id %>/versions/<%- version[0] %>"><%- version[1] %></a></li>',
+          '<% }) %>',
+        '<ul>',
+      '</div>',
+    '<% } %>',
+    // TABLE END
+    '</div>',
+    // CONTAINER END
+    '</div>'
+    ].join(''));
+  },
+
+  templateVersion : function(){
+    return _.template([
+    // CONTAINER START
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<a href="#folders/<%- item.get("folder_id") %>/datasets/<%- item.id %>">',
+          '<button data-toggle="tooltip" data-placement="top" title="Go to latest dataset" class="btn btn-default primary-button toolbar-item" type="button">',
+            '<span class="fa fa-caret-left fa-lg"></span>',
+            ' Latest dataset',
+          '</button>',
+        '<a>',
+      '</div>',
+
+      // BREADCRUMBS
+      '<ol class="breadcrumb">',
+        '<li><a title="Return to the list of libraries" href="#">Libraries</a></li>',
+        '<% _.each(item.get("full_path"), function(path_item) { %>',
+          '<% if (path_item[0] != item.id) { %>',
+            '<li><a title="Return to this folder" href="#/folders/<%- path_item[0] %>"><%- path_item[1] %></a> </li> ',
+          '<% } else { %>',
+            '<li class="active"><span title="You are here"><%- path_item[1] %></span></li>',
+          '<% } %>',
+        '<% }); %>',
+      '</ol>',
+
+      '<div class="alert alert-warning">This is an expired version of the library dataset: <%= _.escape(item.get("name")) %></div>',
+      // DATASET START
+      '<div class="dataset_table">',
+        '<table class="grid table table-striped table-condensed">',
+          '<tr>',
+            '<th scope="row" id="id_row" data-id="<%= _.escape(ldda.id) %>">Name</th>',
+            '<td><%= _.escape(ldda.get("name")) %></td>',
+          '</tr>',
+          '<% if (ldda.get("file_ext")) { %>',
+            '<tr>',
+              '<th scope="row">Data type</th>',
+              '<td><%= _.escape(ldda.get("file_ext")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("genome_build")) { %>',
+            '<tr>',
+              '<th scope="row">Genome build</th>',
+              '<td><%= _.escape(ldda.get("genome_build")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("file_size")) { %>',
+            '<tr>',
+              '<th scope="row">Size</th>',
+              '<td><%= _.escape(ldda.get("file_size")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("date_uploaded")) { %>',
+            '<tr>',
+              '<th scope="row">Date uploaded (UTC)</th>',
+              '<td><%= _.escape(ldda.get("date_uploaded")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("uploaded_by")) { %>',
+            '<tr>',
+              '<th scope="row">Uploaded by</th>',
+              '<td><%= _.escape(ldda.get("uploaded_by")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("metadata_data_lines")) { %>',
+            '<tr>',
+              '<th scope="row">Data Lines</th>',
+              '<td scope="row"><%= _.escape(ldda.get("metadata_data_lines")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("metadata_comment_lines")) { %>',
+            '<tr>',
+              '<th scope="row">Comment Lines</th>',
+              '<td scope="row"><%= _.escape(ldda.get("metadata_comment_lines")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("metadata_columns")) { %>',
+            '<tr>',
+              '<th scope="row">Number of Columns</th>',
+              '<td scope="row"><%= _.escape(ldda.get("metadata_columns")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("metadata_column_types")) { %>',
+            '<tr>',
+              '<th scope="row">Column Types</th>',
+              '<td scope="row"><%= _.escape(ldda.get("metadata_column_types")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("message")) { %>',
+            '<tr>',
+              '<th scope="row">Message</th>',
+              '<td scope="row"><%= _.escape(ldda.get("message")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("misc_blurb")) { %>',
+            '<tr>',
+              '<th scope="row">Miscellaneous blurb</th>',
+              '<td scope="row"><%= _.escape(ldda.get("misc_blurb")) %></td>',
+            '</tr>',
+          '<% } %>',
+          '<% if (ldda.get("misc_info")) { %>',
+            '<tr>',
+              '<th scope="row">Miscellaneous information</th>',
+              '<td scope="row"><%= _.escape(ldda.get("misc_info")) %></td>',
+            '</tr>',
+          '<% } %>',
+        '</table>',
+        '<div>',
+          '<pre class="peek">',
+          '</pre>',
+        '</div>',
+      // DATASET END
+      '</div>',
+    // CONTAINER END
+    '</div>'
+    ].join(''));
+  },
+
+  templateModifyDataset : function(){
+    return _.template([
+    // CONTAINER START
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<button data-toggle="tooltip" data-placement="top" title="Cancel modifications" class="btn btn-default toolbtn_cancel_modifications primary-button toolbar-item" type="button">',
+          '<span class="fa fa-times"></span>',
+          ' Cancel',
+        '</button>',
+        '<button data-toggle="tooltip" data-placement="top" title="Save modifications" class="btn btn-default toolbtn_save_modifications primary-button toolbar-item" type="button">',
+          '<span class="fa fa-floppy-o"></span>',
+          ' Save',
+        '</button>',
+      '</div>',
+
+      // BREADCRUMBS
+      '<ol class="breadcrumb">',
+        '<li><a title="Return to the list of libraries" href="#">Libraries</a></li>',
+        '<% _.each(item.get("full_path"), function(path_item) { %>',
+          '<% if (path_item[0] != item.id) { %>',
+            '<li><a title="Return to this folder" href="#/folders/<%- path_item[0] %>"><%- path_item[1] %></a> </li> ',
+          '<% } else { %>',
+            '<li class="active"><span title="You are here"><%- path_item[1] %></span></li>',
+          '<% } %>',
+        '<% }); %>',
+      '</ol>',
+
+      '<div class="dataset_table">',
+        '<p>For full editing options please import the dataset to history and use "Edit attributes" on it.</p>',
+        '<table class="grid table table-striped table-condensed">',
+          '<tr>',
+            '<th class="dataset-first-column" scope="row" id="id_row" data-id="<%= _.escape(item.get("ldda_id")) %>">Name</th>',
+            '<td><input class="input_dataset_name form-control" type="text" placeholder="name" value="<%= _.escape(item.get("name")) %>"></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Data type</th>',
+            '<td>',
+              '<span id="dataset_extension_select" class="dataset-extension-select" />',
+            '</td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Genome build</th>',
+            '<td>',
+              '<span id="dataset_genome_select" class="dataset-genome-select" />',
+            '</td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Size</th>',
+            '<td><%= _.escape(item.get("file_size")) %></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Date uploaded (UTC)</th>',
+            '<td><%= _.escape(item.get("date_uploaded")) %></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Uploaded by</th>',
+            '<td><%= _.escape(item.get("uploaded_by")) %></td>',
+          '</tr>',
+          '<tr scope="row">',
+            '<th scope="row">Data Lines</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_data_lines")) %></td>',
+          '</tr>',
+            '<th scope="row">Comment Lines</th>',
+            '<% if (item.get("metadata_comment_lines") === "") { %>',
+              '<td scope="row"><%= _.escape(item.get("metadata_comment_lines")) %></td>',
+            '<% } else { %>',
+              '<td scope="row">unknown</td>',
+            '<% } %>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Number of Columns</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_columns")) %></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Column Types</th>',
+            '<td scope="row"><%= _.escape(item.get("metadata_column_types")) %></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Message</th>',
+            '<td scope="row"><%= _.escape(item.get("message")) %></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Miscellaneous information</th>',
+            '<td scope="row"><%= _.escape(item.get("misc_info")) %></td>',
+          '</tr>',
+          '<tr>',
+            '<th scope="row">Miscellaneous blurb</th>',
+            '<td scope="row"><%= _.escape(item.get("misc_blurb")) %></td>',
+          '</tr>',
+        '</table>',
+        '<div>',
+          '<pre class="peek">',
+          '</pre>',
+        '</div>',
+      '</div>',
+    // CONTAINER END
+    '</div>'
+    ].join(''));
+  },
+
+  templateDatasetPermissions : function(){
+    return _.template([
+    // CONTAINER START
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<a href="#folders/<%- item.get("folder_id") %>">',
+          '<button data-toggle="tooltip" data-placement="top" title="Go back to containing folder" class="btn btn-default primary-button toolbar-item" type="button">',
+            '<span class="fa fa-folder-open-o"></span>',
+            ' Containing Folder',
+          '</button>',
+        '</a>',
+        '<a href="#folders/<%- item.get("folder_id") %>/datasets/<%- item.id %>">',
+          '<button data-toggle="tooltip" data-placement="top" title="Go back to dataset" class="btn btn-default primary-button toolbar-item" type="button">',
+            '<span class="fa fa-file-o"></span>',
+            ' Dataset Details',
+          '</button>',
+        '<a>',
+      '</div>',
+
+      // BREADCRUMBS
+      '<ol class="breadcrumb">',
+        '<li><a title="Return to the list of libraries" href="#">Libraries</a></li>',
+          '<% _.each(item.get("full_path"), function(path_item) { %>',
+            '<% if (path_item[0] != item.id) { %>',
+              '<li><a title="Return to this folder" href="#/folders/<%- path_item[0] %>"><%- path_item[1] %></a> </li> ',
+            '<% } else { %>',
+              '<li class="active"><span title="You are here"><%- path_item[1] %></span></li>',
+            '<% } %>',
+        '<% }); %>',
+      '</ol>',
+
+      '<h1>Dataset: <%= _.escape(item.get("name")) %></h1>',
+      '<div class="alert alert-warning">',
+        '<% if (is_admin) { %>',
+          'You are logged in as an <strong>administrator</strong> therefore you can manage any dataset on this Galaxy instance. Please make sure you understand the consequences.',
+        '<% } else { %>',
+          'You can assign any number of roles to any of the following permission types. However please read carefully the implications of such actions.',
+        '<% } %>',
+      '</div>',
+      '<div class="dataset_table">',
+        '<h2>Library-related permissions</h2>',
+        '<h4>Roles that can modify the library item</h4>',
+        '<div id="modify_perm" class="modify_perm roles-selection"></div>',
+        '<div class="alert alert-info roles-selection">User with <strong>any</strong> of these roles can modify name, metadata, and other information about this library item.</div>',
+        '<hr/>',
+        '<h2>Dataset-related permissions</h2>',
+        '<div class="alert alert-warning">Changes made below will affect <strong>every</strong> library item that was created from this dataset and also every history this dataset is part of.</div>',
+        '<% if (!item.get("is_unrestricted")) { %>',
+          '<p>You can remove all access restrictions on this dataset. ',
+            '<button data-toggle="tooltip" data-placement="top" title="Everybody will be able to access the dataset." class="btn btn-default btn-remove-restrictions primary-button" type="button">',
+              '<span class="fa fa-globe"></span>',
+              ' Remove restrictions',
+            '</button>',
+          '</p>',
+        '<% } else { %>',
+          'This dataset is unrestricted so everybody can access it. Just share the URL of this page.',
+          '<button data-toggle="tooltip" data-placement="top" title="Copy to clipboard" class="btn btn-default btn-copy-link-to-clipboard primary-button" type="button">',
+            '<span class="fa fa-clipboard"></span>',
+            ' To Clipboard',
+            '</button>',
+          '<p>You can make this dataset private to you. ',
+            '<button data-toggle="tooltip" data-placement="top" title="Only you will be able to access the dataset." class="btn btn-default btn-make-private primary-button" type="button">',
+              '<span class="fa fa-key"></span>',
+              ' Make Private',
+            '</button>',
+          '</p>',
+        '<% } %>',
+        '<h4>Roles that can access the dataset</h4>',
+        '<div id="access_perm" class="access_perm roles-selection"></div>',
+        '<div class="alert alert-info roles-selection">',
+          'User has to have <strong>all these roles</strong> in order to access this dataset.',
+          ' Users without access permission <strong>cannot</strong> have other permissions on this dataset.',
+          ' If there are no access roles set on the dataset it is considered <strong>unrestricted</strong>.',
+        '</div>',
+        '<h4>Roles that can manage permissions on the dataset</h4>',
+        '<div id="manage_perm" class="manage_perm roles-selection"></div>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can manage permissions of this dataset. If you remove yourself you will loose the ability manage this dataset unless you are an admin.',
+        '</div>',
+        '<button data-toggle="tooltip" data-placement="top" title="Save modifications made on this page" class="btn btn-default toolbtn_save_permissions primary-button" type="button">',
+          '<span class="fa fa-floppy-o"></span>',
+          ' Save',
+        '</button>',
+      '</div>',
+    // CONTAINER END
+    '</div>'
+    ].join(''));
+  },
+
+  templateBulkImportInModal: function(){
+    return _.template([
+    '<div>',
+      '<div class="library-modal-item">',
+        'Select history: ',
+        '<select id="dataset_import_single" name="dataset_import_single" style="width:50%; margin-bottom: 1em; " autofocus>',
+          '<% _.each(histories, function(history) { %>',
+            '<option value="<%= _.escape(history.get("id")) %>"><%= _.escape(history.get("name")) %></option>',
+          '<% }); %>',
+        '</select>',
+      '</div>',
+      '<div class="library-modal-item">',
+        'or create new: ',
+        '<input type="text" name="history_name" value="" placeholder="name of the new history" style="width:50%;">',
+        '</input>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+
+  templateAccessSelect: function(){
+    return _.template([
+    '<select id="access_select" multiple>',
+      '<% _.each(options, function(option) { %>',
+        '<option value="<%- option.name %>"><%- option.name %></option>',
+      '<% }); %>',
+    '</select>'
+    ].join(''));
+  }
+
+});
+
+return {
+    LibraryDatasetView: LibraryDatasetView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-folder-view.js b/client/galaxy/scripts/mvc/library/library-folder-view.js
new file mode 100644
index 0000000..ed3c30e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-folder-view.js
@@ -0,0 +1,319 @@
+define([
+  "libs/toastr",
+  "mvc/library/library-model",
+  'mvc/ui/ui-select'
+  ],
+function(
+        mod_toastr,
+        mod_library_model,
+        mod_select
+        ) {
+
+var FolderView = Backbone.View.extend({
+  el: '#center',
+
+  model: null,
+
+  options: {
+
+  },
+
+  events: {
+    "click .toolbtn_save_permissions"     :   "savePermissions"
+  },
+
+  initialize: function(options){
+    this.options = _.extend(this.options, options);
+    if (this.options.id){
+      this.fetchFolder();
+    }
+  },
+
+  fetchFolder: function(options){
+    this.options = _.extend(this.options, options);
+    this.model = new mod_library_model.FolderAsModel({id:this.options.id});
+    var that = this;
+    this.model.fetch({
+      success: function() {
+        if (that.options.show_permissions){
+            that.showPermissions();
+        } else {
+            that.render();
+        }
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg + ' Click this to go back.', '', {onclick: function() {Galaxy.libraries.library_router.back();}});
+        } else {
+          mod_toastr.error('An error ocurred. Click this to go back.', '', {onclick: function() {Galaxy.libraries.library_router.back();}});
+        }
+      }
+    });
+  },
+
+  render: function(options){
+    $(".tooltip").remove();
+    this.options = _.extend(this.options, options);
+    var template = this.templateFolder();
+    this.$el.html(template({item: this.model}));
+    $(".peek").html(this.model.get("peek"));
+    $("#center [data-toggle]").tooltip();
+  },
+
+  shareFolder: function(){
+    mod_toastr.info('Feature coming soon.');
+  },
+
+  goBack: function(){
+    Galaxy.libraries.library_router.back();
+  },
+
+  showPermissions: function(options){
+    this.options = _.extend(this.options, options);
+    $(".tooltip").remove();
+
+    var is_admin = false;
+    if (Galaxy.user){
+      is_admin = Galaxy.user.isAdmin();
+    }
+    var template = this.templateFolderPermissions();
+    this.$el.html(template({folder: this.model, is_admin:is_admin}));
+
+    var self = this;
+    $.get( Galaxy.root + "api/folders/" + self.id + "/permissions?scope=current").done(function(fetched_permissions) {
+      self.prepareSelectBoxes({fetched_permissions:fetched_permissions});
+    }).fail(function(){
+        mod_toastr.error('An error occurred while attempting to fetch folder permissions.');
+    });
+
+    $("#center [data-toggle]").tooltip();
+    //hack to show scrollbars
+    $("#center").css('overflow','auto');
+  },
+
+  _serializeRoles : function(role_list){
+    var selected_roles = [];
+    for (var i = 0; i < role_list.length; i++) {
+      selected_roles.push(role_list[i][1] + ':' + role_list[i][0]);
+    }
+    return selected_roles;
+  },
+
+  prepareSelectBoxes: function(options){
+    this.options = _.extend(this.options, options);
+    var fetched_permissions = this.options.fetched_permissions;
+    var self = this;
+
+    var selected_add_item_roles = this._serializeRoles(fetched_permissions.add_library_item_role_list);
+    var selected_manage_folder_roles = this._serializeRoles(fetched_permissions.manage_folder_role_list);
+    var selected_modify_folder_roles = this._serializeRoles(fetched_permissions.modify_folder_role_list);
+
+    self.addSelectObject = new mod_select.View(this._createSelectOptions(this, 'add_perm', selected_add_item_roles, false));
+    self.manageSelectObject = new mod_select.View(this._createSelectOptions(this, 'manage_perm', selected_manage_folder_roles, false));
+    self.modifySelectObject = new mod_select.View(this._createSelectOptions(this, 'modify_perm', selected_modify_folder_roles, false));
+  },
+
+  _createSelectOptions: function(self, id, init_data){
+    var select_options = {
+      minimumInputLength: 0,
+      css: id,
+      multiple:true,
+      placeholder: 'Click to select a role',
+      container: self.$el.find('#' + id),
+      ajax: {
+          url: Galaxy.root + "api/folders/" + self.id + "/permissions?scope=available",
+          dataType: 'json',
+          quietMillis: 100,
+          data: function (term, page) { // page is the one-based page number tracked by Select2
+              return {
+                  q: term, //search term
+                  page_limit: 10, // page size
+                  page: page // page number
+              };
+          },
+          results: function (data, page) {
+              var more = (page * 10) < data.total; // whether or not there are more results available
+              // notice we return the value of more so Select2 knows if more results can be loaded
+              return {results: data.roles, more: more};
+          }
+      },
+      formatResult : function roleFormatResult(role) {
+          return role.name + ' type: ' + role.type;
+      },
+
+      formatSelection: function roleFormatSelection(role) {
+          return role.name;
+      },
+      initSelection: function(element, callback) {
+      // the input tag has a value attribute preloaded that points to a preselected role's id
+      // this function resolves that id attribute to an object that select2 can render
+      // using its formatResult renderer - that way the role name is shown preselected
+          var data = [];
+          $(element.val().split(",")).each(function() {
+              var item = this.split(':');
+              data.push({
+                  id: item[0],
+                  name: item[1]
+              });
+          });
+          callback(data);
+      },
+      initialData: init_data.join(','),
+      dropdownCssClass: "bigdrop" // apply css that makes the dropdown taller
+    };
+
+    return select_options;
+  },
+
+  comingSoon: function(){
+    mod_toastr.warning('Feature coming soon.');
+  },
+
+  copyToClipboard: function(){
+    var href = Backbone.history.location.href;
+    if (href.lastIndexOf('/permissions') !== -1){
+      href = href.substr(0, href.lastIndexOf('/permissions'));
+    }
+    window.prompt("Copy to clipboard: Ctrl+C, Enter", href);
+  },
+
+  /**
+   * Extract the role ids from Select2 elements's 'data'
+   */
+  _extractIds: function(roles_list){
+    ids_list = [];
+    for (var i = roles_list.length - 1; i >= 0; i--) {
+      ids_list.push(roles_list[i].id);
+    };
+    return ids_list;
+  },
+
+  /**
+   * Save the permissions for roles entered in the select boxes.
+   */
+  savePermissions: function(event){
+    var self = this;
+    var add_ids = this._extractIds(this.addSelectObject.$el.select2('data'));
+    var manage_ids = this._extractIds(this.manageSelectObject.$el.select2('data'));
+    var modify_ids = this._extractIds(this.modifySelectObject.$el.select2('data'));
+    $.post( Galaxy.root + "api/folders/" + self.id + "/permissions?action=set_permissions", { 'add_ids[]': add_ids, 'manage_ids[]': manage_ids, 'modify_ids[]': modify_ids, } )
+    .done(function(fetched_permissions){
+      self.showPermissions({fetched_permissions:fetched_permissions})
+      mod_toastr.success('Permissions saved.');
+    })
+    .fail(function(){
+      mod_toastr.error('An error occurred while attempting to set folder permissions.');
+    })
+  },
+
+  templateFolder : function(){
+    return _.template([
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<button data-toggle="tooltip" data-placement="top" title="Modify library item" class="btn btn-default toolbtn_modify_dataset primary-button" type="button">',
+          '<span class="fa fa-pencil"/>',
+          ' Modify',
+        '</button>',
+        '<a href="#folders/<%- item.get("folder_id") %>/datasets/<%- item.id %>/permissions">',
+          '<button data-toggle="tooltip" data-placement="top" title="Manage permissions" class="btn btn-default toolbtn_change_permissions primary-button" type="button">',
+            '<span class="fa fa-group"/>',
+            ' Permissions',
+          '</button>',
+        '</a>',
+        '<button data-toggle="tooltip" data-placement="top" title="Share dataset" class="btn btn-default toolbtn-share-dataset primary-button" type="button">',
+          '<span class="fa fa-share"/>',
+          ' Share',
+          '</span>',
+        '</button>',
+      '</div>',
+      '<p>',
+        'This dataset is unrestricted so everybody can access it. Just share the URL of this page. ',
+        '<button data-toggle="tooltip" data-placement="top" title="Copy to clipboard" class="btn btn-default btn-copy-link-to-clipboard primary-button" type="button">',
+          '<span class="fa fa-clipboard"/>',
+          ' To Clipboard',
+        '</button> ',
+      '</p>',
+      '<div class="dataset_table">',
+        '<table class="grid table table-striped table-condensed">',
+          '<tr>',
+            '<th scope="row" id="id_row" data-id="<%= _.escape(item.get("ldda_id")) %>">',
+              'Name',
+            '</th>',
+            '<td>',
+              '<%= _.escape(item.get("name")) %>',
+            '</td>',
+          '</tr>',
+          '<% if (item.get("file_ext")) { %>',
+            '<tr>',
+              '<th scope="row">Data type</th>',
+              '<td>',
+                '<%= _.escape(item.get("file_ext")) %>',
+              '</td>',
+            '</tr>',
+          '<% } %>',
+        '</table>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateFolderPermissions : function(){
+    return _.template([
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<a href="#/folders/<%= folder.get("parent_id") %>">',
+          '<button data-toggle="tooltip" data-placement="top" title="Go back to the parent folder" class="btn btn-default primary-button" type="button">',
+            '<span class="fa fa-caret-left fa-lg"/>',
+            ' Parent folder',
+          '</button>',
+        '</a>',
+      '</div>',
+      '<h1>',
+        'Folder: <%= _.escape(folder.get("name")) %>',
+      '</h1>',
+      '<div class="alert alert-warning">',
+        '<% if (is_admin) { %>',
+          'You are logged in as an <strong>administrator</strong> therefore you can manage any folder on this Galaxy instance. Please make sure you understand the consequences.',
+        '<% } else { %>',
+          'You can assign any number of roles to any of the following permission types. However please read carefully the implications of such actions.',
+        '<% }%>',
+      '</div>',
+      '<div class="dataset_table">',
+        '<h2>Folder permissions</h2>',
+        '<h4>',
+          'Roles that can manage permissions on this folder',
+        '</h4>',
+        '<div id="manage_perm" class="manage_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can manage permissions on this folder.',
+        '</div>',
+        '<h4>',
+          'Roles that can add items to this folder',
+        '</h4>',
+        '<div id="add_perm" class="add_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can add items to this folder (folders and datasets).',
+        '</div>',
+        '<h4>',
+          'Roles that can modify this folder',
+        '</h4>',
+        '<div id="modify_perm" class="modify_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can modify this folder (name, etc.).',
+        '</div>',
+        '<button data-toggle="tooltip" data-placement="top" title="Save modifications" class="btn btn-default toolbtn_save_permissions primary-button" type="button">',
+          '<span class="fa fa-floppy-o"/>',
+          ' Save',
+        '</button>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  }
+
+});
+
+return {
+    FolderView: FolderView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-folderlist-view.js b/client/galaxy/scripts/mvc/library/library-folderlist-view.js
new file mode 100644
index 0000000..5221125
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-folderlist-view.js
@@ -0,0 +1,401 @@
+define([
+    "layout/masthead",
+    "utils/utils",
+    "libs/toastr",
+    "mvc/library/library-model",
+    "mvc/library/library-folderrow-view",
+    "mvc/library/library-dataset-view"
+    ],
+function(mod_masthead,
+         mod_utils,
+         mod_toastr,
+         mod_library_model,
+         mod_library_folderrow_view,
+         mod_library_dataset_view
+         ) {
+
+var FolderListView = Backbone.View.extend({
+    el : '#folder_items_element',
+    // progress percentage
+    progress: 0,
+    // progress rate per one item
+    progressStep: 1,
+
+    folderContainer: null,
+
+    sort: 'asc',
+
+    events: {
+        'click #select-all-checkboxes'  : 'selectAll',
+        'click .dataset_row'            : 'selectClickedRow',
+        'click .folder_row'             : 'selectClickedRow',
+        'click .sort-folder-link'       : 'sortColumnClicked'
+    },
+
+    collection: null,
+
+    defaults: {
+        include_deleted: false,
+        page_count: null,
+        show_page: null
+    },
+
+    /**
+     * Initialize and fetch the folder from the server.
+     * @param  {object} options an object with options
+     */
+    initialize : function( options ){
+        this.options = _.defaults( this.options || {}, this.defaults, options );
+        this.modal = null;
+        // map of folder item ids to item views = cache
+        this.rowViews = {};
+
+        // create a collection of folder items for this view
+        this.collection = new mod_library_model.Folder();
+
+        // start to listen if someone modifies the collection
+        this.listenTo( this.collection, 'add', this.renderOne );
+        this.listenTo( this.collection, 'remove', this.removeOne );
+        this.listenTo( this.collection, 'sort', this.rePaint );
+        this.listenTo( this.collection, 'reset', this.rePaint );
+
+        this.fetchFolder();
+    },
+
+    fetchFolder: function( options ){
+        var options = options || {};
+        this.options.include_deleted = options.include_deleted;
+        var that = this;
+
+        this.folderContainer = new mod_library_model.FolderContainer( { id: this.options.id } );
+        this.folderContainer.url = this.folderContainer.attributes.urlRoot + this.options.id + '/contents';
+
+        if ( this.options.include_deleted ){
+            this.folderContainer.url = this.folderContainer.url + '?include_deleted=true';
+        }
+        this.folderContainer.fetch({
+            success: function( folder_container ) {
+              that.folder_container = folder_container;
+              that.render();
+            },
+            error: function( model, response ){
+              if ( typeof response.responseJSON !== "undefined" ){
+                mod_toastr.error( response.responseJSON.err_msg + ' Click this to go back.', '', { onclick: function() { Galaxy.libraries.library_router.back(); } } );
+              } else {
+                mod_toastr.error( 'An error ocurred. Click this to go back.', '', { onclick: function() { Galaxy.libraries.library_router.back(); } } );
+              }
+            }
+        });
+    },
+
+    render: function( options ){
+        this.options = _.extend( this.options, options );
+        var template = this.templateFolder();
+        $(".tooltip").hide();
+
+        // find the upper id in the full path
+        var path = this.folderContainer.attributes.metadata.full_path;
+        var upper_folder_id;
+        if ( path.length === 1 ){ // the library is above us
+          upper_folder_id = 0;
+        } else {
+          upper_folder_id = path[ path.length-2 ][ 0 ];
+        }
+
+        this.$el.html( template( {
+            path: this.folderContainer.attributes.metadata.full_path,
+            parent_library_id: this.folderContainer.attributes.metadata.parent_library_id,
+            id: this.options.id,
+            upper_folder_id: upper_folder_id,
+            order: this.sort
+        } ) );
+
+        // when dataset_id is present render its details too
+        if ( this.options.dataset_id ){
+            row = _.findWhere( that.rowViews, { id: this.options.dataset_id } );
+            if ( row ) {
+              row.showDatasetDetails();
+            } else {
+              mod_toastr.error( 'Requested dataset not found. Showing folder instead.' );
+            }
+        } else {
+            if ( this.options.show_page === null || this.options.show_page < 1 ){
+                this.options.show_page = 1;
+            }
+            this.paginate();
+        }
+        $("#center [data-toggle]").tooltip();
+        $("#center").css('overflow','auto');
+    },
+
+    paginate: function( options ){
+        this.options = _.extend( this.options, options );
+
+        if ( this.options.show_page === null || this.options.show_page < 1 ){
+            this.options.show_page = 1;
+        }
+        this.options.total_items_count = this.folder_container.get( 'folder' ).models.length;
+        this.options.page_count = Math.ceil( this.options.total_items_count / Galaxy.libraries.preferences.get( 'folder_page_size' ) );
+        var page_start = ( Galaxy.libraries.preferences.get( 'folder_page_size' ) * ( this.options.show_page - 1 ) );
+        var items_to_render = null;
+        items_to_render = this.folder_container.get( 'folder' ).models.slice( page_start, page_start + Galaxy.libraries.preferences.get( 'folder_page_size' ) );
+        this.options.items_shown = items_to_render.length;
+        // User requests page with no items
+        if ( Galaxy.libraries.preferences.get( 'folder_page_size' ) * this.options.show_page > ( this.options.total_items_count + Galaxy.libraries.preferences.get( 'folder_page_size' ) ) ){
+            items_to_render = [];
+        }
+        Galaxy.libraries.folderToolbarView.renderPaginator( this.options );
+        this.collection.reset( items_to_render );
+    },
+
+    rePaint: function( options ){
+        this.options = _.extend( this.options, options );
+        this.removeAllRows();
+        this.renderAll();
+        this.checkEmptiness();
+    },
+
+    /**
+     * Adds all given models to the collection.
+     * @param {array of Item or FolderAsModel} array of models that should
+     *  be added to the view's collection.
+     */
+    addAll: function( models ){
+        _.each(models, function( model ) {
+            Galaxy.libraries.folderListView.collection.add( model, { sort: false } );
+        });
+        $( "#center [data-toggle]" ).tooltip();
+        this.checkEmptiness();
+        this.postRender();
+    },
+
+    /**
+     * Call this after all models are added to the collection
+     * to ensure that the folder toolbar will show proper options
+     * and that event will be bound on all subviews.
+     */
+    postRender: function(){
+        var fetched_metadata = this.folderContainer.attributes.metadata;
+        fetched_metadata.contains_file_or_folder = typeof this.collection.findWhere({type: 'file'}) !== 'undefined' || typeof this.collection.findWhere({type: 'folder'}) !== 'undefined';
+        Galaxy.libraries.folderToolbarView.configureElements(fetched_metadata);
+    },
+
+    /**
+     * Iterates this view's collection and calls the render
+     * function for each. Also binds the hover behavior.
+     */
+    renderAll: function(){
+        var that = this;
+        _.each( this.collection.models.reverse(), function( model ) {
+          that.renderOne( model );
+        });
+        this.postRender();
+    },
+
+    /**
+     * Creates a view for the given model and adds it to the folder view.
+     * @param {Item or FolderAsModel} model of the view that will be rendered
+     */
+    renderOne: function(model){
+        this.options.contains_file_or_folder = true;
+        //if (model.get('type') !== 'folder'){
+            // model.set('readable_size', this.size_to_string(model.get('file_size')));
+        //}
+        model.set('folder_id', this.id);
+        var rowView = new mod_library_folderrow_view.FolderRowView({model: model});
+
+        // save new rowView to cache
+        this.rowViews[model.get('id')] = rowView;
+
+        this.$el.find('#first_folder_item').after(rowView.el);
+    },
+
+    /**
+     * Remove the view of the given model from the DOM.
+     * @param {Item or FolderAsModel} model of the view that will be removed
+     */
+    removeOne: function( model ){
+       this.$el.find('tr').filter(function(){
+           return $(this).data('id') && $(this).data('id') === model.id;
+       }).remove();
+    },
+
+    /**
+     * Remove all dataset and folder row elements from the DOM.
+     */
+    removeAllRows: function(){
+        $('.library-row').remove();
+    },
+
+    /** Checks whether the list is empty and adds/removes the message */
+    checkEmptiness : function(){
+        if ((this.$el.find('.dataset_row').length === 0) && (this.$el.find('.folder_row').length === 0)){
+          this.$el.find('.empty-folder-message').show();
+        } else {
+          this.$el.find('.empty-folder-message').hide();
+        }
+    },
+
+    /** User clicked the table heading = he wants to sort stuff */
+    sortColumnClicked : function(event){
+        event.preventDefault();
+        if (this.sort === 'asc'){
+            this.sortFolder('name','desc');
+            this.sort = 'desc';
+        } else {
+            this.sortFolder('name','asc');
+            this.sort = 'asc';
+        }
+        this.renderSortIcon();
+    },
+
+    /**
+     *  Sorts the underlying collection according to the parameters received.
+     *  Currently supports only sorting by name.
+     */
+    sortFolder: function(sort_by, order){
+        // default to asc sort by name
+        if (sort_by === 'undefined' && order === 'undefined'){
+            return this.collection.sortByNameAsc();
+        }
+        if (sort_by === 'name'){
+            if (order === 'asc'){
+                return this.collection.sortByNameAsc();
+            } else if (order === 'desc'){
+                return this.collection.sortByNameDesc();
+            }
+        }
+    },
+
+    /**
+     * User clicked the checkbox in the table heading
+     * @param  {context} event
+     */
+    selectAll : function (event) {
+         var selected = event.target.checked;
+         that = this;
+         // Iterate each checkbox
+         $(':checkbox', '#folder_list_body').each(function() {
+            this.checked = selected;
+            $row = $(this.parentElement.parentElement);
+            // Change color of selected/unselected
+            if (selected) {
+              that.makeDarkRow($row);
+            } else {
+              that.makeWhiteRow($row);
+            }
+        });
+     },
+
+    /**
+     * Check checkbox if user clicks on the whole row or
+     *  on the checkbox itself
+     */
+    selectClickedRow : function (event) {
+        var checkbox = '';
+        var $row;
+        var source;
+        if (event.target.localName === 'input'){
+            checkbox = event.target;
+            $row = $(event.target.parentElement.parentElement);
+            source = 'input';
+        } else if (event.target.localName === 'td') {
+            $row = $(event.target.parentElement);
+            checkbox = $row.find(':checkbox')[0];
+            source = 'td';
+        }
+        if (checkbox.checked){
+            if (source==='td'){
+                checkbox.checked = '';
+                this.makeWhiteRow($row);
+            } else if (source==='input') {
+                this.makeDarkRow($row);
+            }
+        } else {
+            if (source==='td'){
+                checkbox.checked = 'selected';
+                this.makeDarkRow($row);
+            } else if (source==='input') {
+                this.makeWhiteRow($row);
+            }
+        }
+    },
+
+    makeDarkRow: function($row){
+        $row.removeClass('light').addClass('dark');
+        $row.find('a').removeClass('light').addClass('dark');
+        $row.find('.fa-file-o').removeClass('fa-file-o').addClass('fa-file');
+        $row.find('.fa-folder-o').removeClass('fa-folder-o').addClass('fa-folder');
+    },
+
+    makeWhiteRow: function($row){
+        $row.removeClass('dark').addClass('light');
+        $row.find('a').removeClass('dark').addClass('light');
+        $row.find('.fa-file').removeClass('fa-file').addClass('fa-file-o');
+        $row.find('.fa-folder').removeClass('fa-folder').addClass('fa-folder-o');
+    },
+
+    renderSortIcon: function(){
+        if (this.sort === 'asc'){
+            $('.sort-icon').removeClass('fa-sort-alpha-desc').addClass('fa-sort-alpha-asc');
+        } else {
+            $('.sort-icon').removeClass('fa-sort-alpha-asc').addClass('fa-sort-alpha-desc');
+        }
+    },
+
+    templateFolder : function (){
+      return _.template([
+      // BREADCRUMBS
+      '<ol class="breadcrumb">',
+        '<li><a title="Return to the list of libraries" href="#">Libraries</a></li>',
+        '<% _.each(path, function(path_item) { %>',
+          '<% if (path_item[0] != id) { %>',
+            '<li><a title="Return to this folder" href="#/folders/<%- path_item[0] %>"><%- path_item[1] %></a> </li> ',
+          '<% } else { %>',
+            '<li class="active"><span title="You are in this folder"><%- path_item[1] %></span></li>',
+          '<% } %>',
+        '<% }); %>',
+      '</ol>',
+
+      // FOLDER CONTENT
+      '<table data-library-id="<%- parent_library_id  %>" id="folder_table" class="grid table table-condensed">',
+        '<thead>',
+          '<th class="button_heading"></th>',
+          '<th style="text-align: center; width: 20px; " title="Check to select all datasets"><input id="select-all-checkboxes" style="margin: 0;" type="checkbox"></th>',
+          '<th><a class="sort-folder-link" title="Click to reverse order" href="#">name</a> <span title="Sorted alphabetically" class="sort-icon fa fa-sort-alpha-<%- order %>"></span></th>',
+          '<th style="width:25%;">description</th>',
+          '<th style="width:5%;">data type</th>',
+          '<th style="width:10%;">size</th>',
+          '<th style="width:160px;">time updated (UTC)</th>',
+          '<th style="width:10%;"></th> ',
+        '</thead>',
+        '<tbody id="folder_list_body">',
+          '<tr id="first_folder_item">',
+          '<td>',
+            '<a href="#<% if (upper_folder_id !== 0){ print("folders/" + upper_folder_id)} %>" title="Go to parent folder" class="btn_open_folder btn btn-default btn-xs">..<a>',
+          '</td>',
+          '<td></td>',
+          '<td></td>',
+          '<td></td>',
+          '<td></td>',
+          '<td></td>',
+          '<td></td>',
+          '<td></td>',
+          '</tr>',
+        '</tbody>',
+      '</table>',
+      '<div class="empty-folder-message" style="display:none;">',
+          'This folder is either empty or you do not have proper access permissions to see the contents. If you expected something to show up',
+          ' please consult the <a href="https://wiki.galaxyproject.org/Admin/DataLibraries/LibrarySecurity" target="_blank">library security wikipage</a>',
+          ' or visit the <a href="https://biostar.usegalaxy.org/" target="_blank">Galaxy support site</a>.',
+      '</div>'
+      ].join(''));
+    }
+
+});
+
+return {
+    FolderListView: FolderListView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-folderrow-view.js b/client/galaxy/scripts/mvc/library/library-folderrow-view.js
new file mode 100644
index 0000000..ebb2d5e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-folderrow-view.js
@@ -0,0 +1,381 @@
+define([
+    "libs/toastr",
+    "mvc/library/library-model",
+    "mvc/library/library-dataset-view"],
+function(mod_toastr,
+         mod_library_model,
+         mod_library_dataset_view) {
+
+var FolderRowView = Backbone.View.extend({
+
+  events: {
+    'click .undelete_dataset_btn'    : 'undeleteDataset',
+    'click .undelete_folder_btn'     : 'undeleteFolder',
+    'click .edit_folder_btn'         : 'startModifications',
+    'click .cancel_folder_btn'       : 'cancelModifications',
+    'click .save_folder_btn'         : 'saveModifications',
+  },
+
+  defaults: {
+    type: null,
+    visibility_config: {
+      edit_folder_btn: true,
+      save_folder_btn: false,
+      cancel_folder_btn: false,
+      permission_folder_btn: true
+    },
+    edit_mode: false
+  },
+
+  initialize : function(options){
+    this.options = _.defaults( options || {}, this.defaults );
+    this.render(this.options);
+  },
+
+  render: function(options){
+    this.options = _.extend( this.options, options );
+    var folder_item = this.options.model;
+    var template = null;
+
+    if (folder_item.get('type') === 'folder' || folder_item.get('model_class') === 'LibraryFolder'){
+      this.options.type = 'folder';
+      this.prepareButtons(folder_item);
+      if (folder_item.get('deleted')){
+        template = this.templateRowDeletedFolder();
+      } else {
+        template = this.templateRowFolder();
+      }
+    } else if (folder_item.get('type') === 'file' || folder_item.get('model_class') === 'LibraryDatasetDatasetAssociation' || folder_item.get('model_class') === 'LibraryDataset'){
+      this.options.type = 'file';
+      if (folder_item.get('deleted')){
+        template = this.templateRowDeletedFile();
+      } else {
+        template = this.templateRowFile();
+      }
+    } else {
+      Galaxy.emit.error('Unknown library item type found.');
+      Galaxy.emit.error(folder_item.get('type') || folder_item.get('model_class'));
+    }
+    this.setElement(template({content_item: folder_item, edit_mode: this.options.edit_mode, button_config: this.options.visibility_config}));
+    this.$el.show();
+    return this;
+  },
+
+  /**
+   * Modify the visibility of buttons for
+   * the filling of the row template of a given folder.
+   */
+  prepareButtons: function(folder){
+    vis_config = this.options.visibility_config;
+    if (this.options.edit_mode === false){
+      vis_config.save_folder_btn = false;
+      vis_config.cancel_folder_btn = false;
+      if (folder.get('deleted') === true ){
+          vis_config.edit_folder_btn = false;
+          vis_config.permission_folder_btn = false;
+      } else if (folder.get('deleted') === false ) {
+        vis_config.save_folder_btn = false;
+        vis_config.cancel_folder_btn = false;
+        if (folder.get('can_modify') === true){
+          vis_config.edit_folder_btn = true;
+        }
+        if (folder.get('can_manage') === true){
+          vis_config.permission_folder_btn = true;
+        }
+      }
+    } else if (this.options.edit_mode === true){
+      vis_config.edit_folder_btn = false;
+      vis_config.permission_folder_btn = false;
+      vis_config.save_folder_btn = true;
+      vis_config.cancel_folder_btn = true;
+    }
+    this.options.visibility_config = vis_config;
+  },
+
+  /* Show the page with dataset details. */
+  showDatasetDetails : function(){
+    Galaxy.libraries.datasetView = new mod_library_dataset_view.LibraryDatasetView({id: this.id});
+  },
+
+  /* Undelete the dataset on server and render the row again. */
+  undeleteDataset : function(event){
+    $(".tooltip").hide();
+    var that = this;
+    var dataset_id = $(event.target).closest('tr').data('id');
+    var dataset = Galaxy.libraries.folderListView.collection.get(dataset_id);
+    dataset.url = dataset.urlRoot + dataset.id + '?undelete=true';
+    dataset.destroy({
+        success : function(model, response){
+          Galaxy.libraries.folderListView.collection.remove(dataset_id);
+          var updated_dataset = new mod_library_model.Item(response);
+          Galaxy.libraries.folderListView.collection.add(updated_dataset);
+          Galaxy.libraries.folderListView.collection.sortByNameAsc();
+          mod_toastr.success('Dataset undeleted. Click this to see it.', '', {onclick: function() {
+            var folder_id = that.model.get('folder_id');
+            window.location = Galaxy.root + 'library/list#folders/' + folder_id + '/datasets/' + that.id;
+          }});
+        },
+        error : function(model, response){
+          if (typeof response.responseJSON !== "undefined"){
+            mod_toastr.error('Dataset was not undeleted. ' + response.responseJSON.err_msg);
+          } else {
+            mod_toastr.error('An error occured! Dataset was not undeleted. Please try again.');
+          }
+        }
+  });
+  },
+
+  /* Undelete the folder on server and render the row again. */
+  undeleteFolder : function(event){
+    $(".tooltip").hide();
+    var that = this;
+    var folder_id = $(event.target).closest('tr').data('id');
+    var folder = Galaxy.libraries.folderListView.collection.get(folder_id);
+    folder.url = folder.urlRoot + folder.id + '?undelete=true';
+    folder.destroy({
+        success : function(model, response){
+          Galaxy.libraries.folderListView.collection.remove(folder_id);
+          var updated_folder = new mod_library_model.FolderAsModel(response);
+          Galaxy.libraries.folderListView.collection.add(updated_folder);
+          Galaxy.libraries.folderListView.collection.sortByNameAsc();
+          mod_toastr.success('Folder undeleted.');
+        },
+        error : function(model, response){
+          if (typeof response.responseJSON !== "undefined"){
+            mod_toastr.error('Folder was not undeleted. ' + response.responseJSON.err_msg);
+          } else {
+            mod_toastr.error('An error occured! Folder was not undeleted. Please try again.');
+          }
+        }
+  });
+  },
+
+
+  /* User clicked the 'edit' button on row so render the row as editable. */
+  startModifications: function(){
+    this.options.edit_mode = true;
+    this.repaint();
+  },
+
+  /* User clicked the 'cancel' button so render normal row */
+  cancelModifications: function(){
+    this.options.edit_mode = false;
+    this.repaint();
+  },
+
+  saveModifications: function(){
+    var folder = Galaxy.libraries.folderListView.collection.get(this.$el.data('id'));
+    var is_changed = false;
+    var new_name = this.$el.find('.input_folder_name').val();
+    if (typeof new_name !== 'undefined' && new_name !== folder.get('name') ){
+        if (new_name.length > 2){
+            folder.set("name", new_name);
+            is_changed = true;
+        } else{
+            mod_toastr.warning('Folder name has to be at least 3 characters long.');
+            return;
+        }
+    }
+    var new_description = this.$el.find('.input_folder_description').val();
+    if (typeof new_description !== 'undefined' && new_description !== folder.get('description') ){
+        folder.set("description", new_description);
+        is_changed = true;
+    }
+    if (is_changed){
+      var row_view = this;
+        folder.save(null, {
+          patch: true,
+          success: function(folder) {
+            row_view.options.edit_mode = false;
+            row_view.repaint(folder);
+            mod_toastr.success('Changes to folder saved.');
+          },
+          error: function(model, response){
+            if (typeof response.responseJSON !== "undefined"){
+              mod_toastr.error(response.responseJSON.err_msg);
+            } else {
+              mod_toastr.error('An error occured while attempting to update the folder.');
+            }
+          }
+        });
+    } else {
+      this.options.edit_mode = false;
+      this.repaint(folder);
+      mod_toastr.info('Nothing has changed.');
+    }
+  },
+
+  repaint: function(){
+    /* need to hide manually because of the element removal in setElement
+    invoked in render() */
+    $(".tooltip").hide();
+    /* we need to store the old element to be able to replace it with
+    new one */
+    var old_element = this.$el;
+    /* if user canceled the folder param is undefined,
+      if user saved and succeeded the updated folder is rendered */
+    this.render();
+    old_element.replaceWith(this.$el);
+    /* now we attach new tooltips to the newly created row element */
+    this.$el.find("[data-toggle]").tooltip();
+  },
+
+  templateRowFolder: function() {
+    return _.template([
+      '<tr class="folder_row light library-row" data-id="<%- content_item.id %>">',
+        '<td>',
+          '<span title="Folder" class="fa fa-folder-o"/>',
+        '</td>',
+        '<td style="text-align: center; "><input style="margin: 0;" type="checkbox"></td>',
+        '<% if(!edit_mode) { %>',
+          '<td>',
+            '<a href="#folders/<%- content_item.id %>"><%- content_item.get("name") %></a>',
+          '</td>',
+          '<td>',
+            '<%- content_item.get("description") %>',
+          '</td>',
+        '<% } else if(edit_mode){ %>',
+          '<td><textarea rows="4" class="form-control input_folder_name" placeholder="name" ><%- content_item.get("name") %></textarea></td>',
+          '<td><textarea rows="4" class="form-control input_folder_description" placeholder="description" ><%- content_item.get("description") %></textarea></td>',
+        '<% } %>',
+          '<td>folder</td>',
+          '<td></td>',
+          '<td>',
+            '<%= _.escape(content_item.get("update_time")) %>',
+          '</td>',
+          '<td>',
+            '<% if(edit_mode) { %>',  // start edit mode
+              '<button data-toggle="tooltip" data-placement="top" title="Save changes" class="primary-button btn-xs save_folder_btn" type="button" style="<% if(button_config.save_folder_btn === false) { print("display:none;") } %>">',
+                '<span class="fa fa-floppy-o"/>',
+                ' Save',
+              '</button>',
+              '<button data-toggle="tooltip" data-placement="top" title="Discard changes" class="primary-button btn-xs cancel_folder_btn" type="button" style="<% if(button_config.cancel_folder_btn === false) { print("display:none;") } %>">',
+                '<span class="fa fa-times"/>',
+                ' Cancel',
+              '</button>',
+            '<% } else if (!edit_mode){%>',  // start no edit mode
+              '<button data-toggle="tooltip" data-placement="top" title="Modify \'<%- content_item.get("name") %>\'" class="primary-button btn-xs edit_folder_btn" type="button" style="<% if(button_config.edit_folder_btn === false) { print("display:none;") } %>">',
+                '<span class="fa fa-pencil"/>',
+              '</button>',
+              '<a href="#/folders/<%- content_item.id %>/permissions">',
+                '<button data-toggle="tooltip" data-placement="top" class="primary-button btn-xs permission_folder_btn" title="Manage \'<%- content_item.get("name") %>\'" style="<% if(button_config.permission_folder_btn === false) { print("display:none;") } %>">',
+                  '<span class="fa fa-group"/>',
+                '</button>',
+              '</a>',
+            '<% } %>',  //end no edit mode
+          '</td>',
+      '</tr>'
+        ].join(''));
+  },
+
+  templateRowFile: function(){
+    return _.template([
+    '<tr class="dataset_row light library-row" data-id="<%- content_item.id %>">',
+      '<td>',
+        '<span title="Dataset" class="fa fa-file-o"/>',
+      '</td>',
+      '<td style="text-align: center; ">',
+        '<input style="margin: 0;" type="checkbox">',
+      '</td>',
+      '<td>',
+        '<a href="#folders/<%- content_item.get("folder_id") %>/datasets/<%- content_item.id %>" class="library-dataset">',
+          '<%- content_item.get("name") %>',
+        '<a>',
+      '</td>',
+      '<td><%- content_item.get("message") %></td>',
+      '<td><%= _.escape(content_item.get("file_ext")) %></td>',
+      '<td><%= _.escape(content_item.get("file_size")) %></td>',
+      '<td><%= _.escape(content_item.get("update_time")) %></td>',
+      '<td>',
+        '<% if (content_item.get("is_unrestricted")) { %>',
+          '<span data-toggle="tooltip" data-placement="top" title="Unrestricted dataset" style="color:grey;" class="fa fa-globe fa-lg"/>',
+        '<% } %>',
+        '<% if (content_item.get("is_private")) { %>',
+          '<span data-toggle="tooltip" data-placement="top" title="Private dataset" style="color:grey;" class="fa fa-key fa-lg"/>',
+        '<% } %>',
+        '<% if ((content_item.get("is_unrestricted") === false) && (content_item.get("is_private") === false)) { %>',
+          '<span data-toggle="tooltip" data-placement="top" title="Restricted dataset" style="color:grey;" class="fa fa-shield fa-lg"/>',
+        '<% } %>',
+        '<% if (content_item.get("can_manage")) { %>',
+          '<a href="#folders/<%- content_item.get("folder_id") %>/datasets/<%- content_item.id %>/permissions">',
+            '<button data-toggle="tooltip" data-placement="top" class="primary-button btn-xs permissions-dataset-btn" title="Manage permissions">',
+              '<span class="fa fa-group"/>',
+            '</button>',
+          '</a>',
+        '<% } %>',
+      '</td>',
+    '</tr>'
+    ].join(''));
+  },
+
+  templateRowDeletedFile: function(){
+    return _.template([
+    '<tr class="active deleted_dataset library-row" data-id="<%- content_item.id %>">',
+      '<td>',
+        '<span title="Dataset" class="fa fa-file-o"/>',
+      '</td>',
+      '<td></td>',
+      '<td style="color:grey;">',
+        '<%- content_item.get("name") %>',
+      '</td>',
+      '<td>',
+        '<%- content_item.get("message") %>',
+      '</td>',
+      '<td>',
+        '<%= _.escape(content_item.get("file_ext")) %>',
+      '</td>',
+      '<td>',
+        '<%= _.escape(content_item.get("file_size")) %>',
+      '</td>',
+      '<td>',
+        '<%= _.escape(content_item.get("update_time")) %>',
+      '</td>',
+      '<td>',
+        '<span data-toggle="tooltip" data-placement="top" title="Marked deleted" style="color:grey;" class="fa fa-ban fa-lg"/>',
+        '<button data-toggle="tooltip" data-placement="top" title="Undelete <%- content_item.get("name") %>" class="primary-button btn-xs undelete_dataset_btn" type="button" style="margin-left:1em;">',
+          '<span class="fa fa-unlock"/>',
+          ' Undelete',
+        '</button>',
+      '</td>',
+    '</tr>'
+    ].join(''));
+  },
+
+  templateRowDeletedFolder: function(){
+    return _.template([
+    '<tr class="active deleted_folder light library-row" data-id="<%- content_item.id %>">',
+      '<td>',
+        '<span title="Folder" class="fa fa-folder-o"/>',
+      '</td>',
+      '<td></td>',
+      '<td style="color:grey;">',
+        '<%- content_item.get("name") %>',
+      '</td>',
+      '<td>',
+        '<%- content_item.get("description") %>',
+      '</td>',
+      '<td>',
+        'folder',
+      '</td>',
+      '<td></td>',
+      '<td>',
+        '<%= _.escape(content_item.get("update_time")) %>',
+      '</td>',
+      '<td>',
+        '<span data-toggle="tooltip" data-placement="top" title="Marked deleted" style="color:grey;" class="fa fa-ban fa-lg"/>',
+        '<button data-toggle="tooltip" data-placement="top" title="Undelete <%- content_item.get("name") %>" class="primary-button btn-xs undelete_folder_btn" type="button" style="margin-left:1em;">',
+          '<span class="fa fa-unlock"/>',
+          ' Undelete',
+        '</button>',
+      '</td>',
+    '</tr>'
+    ].join(''));
+  }
+
+});
+
+return {
+    FolderRowView: FolderRowView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js b/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js
new file mode 100644
index 0000000..411bad9
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js
@@ -0,0 +1,1440 @@
+define([
+    "layout/masthead",
+    "utils/utils",
+    "libs/toastr",
+    "mvc/library/library-model",
+    "mvc/ui/ui-select"
+    ],
+function( mod_masthead,
+          mod_utils,
+          mod_toastr,
+          mod_library_model,
+          mod_select
+        ){
+
+var FolderToolbarView = Backbone.View.extend({
+  el: '#center',
+
+  events: {
+    'click #toolbtn_create_folder'        : 'createFolderFromModal',
+    'click #toolbtn_bulk_import'          : 'modalBulkImport',
+    'click #include_deleted_datasets_chk' : 'checkIncludeDeleted',
+    'click #toolbtn_bulk_delete'          : 'deleteSelectedItems',
+    'click .toolbtn-show-locinfo'         : 'showLocInfo',
+    'click .page_size_prompt'             : 'showPageSizePrompt'
+
+  },
+
+  defaults: {
+    'can_add_library_item'    : false,
+    'contains_file_or_folder' : false,
+    'chain_call_control'      : {
+                                'total_number'  : 0,
+                                'failed_number' : 0
+                              },
+    'disabled_jstree_element' : 'folders'
+  },
+
+  modal : null,
+
+  // directory browsing object
+  jstree: null,
+
+  // user's histories
+  histories : null,
+
+  // genome select
+  select_genome : null,
+
+  // extension select
+  select_extension : null,
+
+  // extension types
+  list_extensions :[],
+
+  // datatype placeholder for extension auto-detection
+  auto: {
+      id          : 'auto',
+      text        : 'Auto-detect',
+      description : 'This system will try to detect the file type automatically.' +
+                    ' If your file is not detected properly as one of the known formats,' +
+                    ' it most likely means that it has some format problems (e.g., different' +
+                    ' number of columns on different rows). You can still coerce the system' +
+                    ' to set your data to the format you think it should be.' +
+                    ' You can also upload compressed files, which will automatically be decompressed.'
+  },
+
+  // genomes
+  list_genomes : [],
+
+  initialize: function(options){
+    this.options = _.defaults( options || {}, this.defaults );
+    this.fetchExtAndGenomes();
+    this.render();
+  },
+
+  render: function(options){
+    this.options = _.extend( this.options, options );
+    var toolbar_template = this.templateToolBar();
+    var template_defaults = {
+        id: this.options.id,
+        is_admin: false,
+        is_anonym: true,
+        mutiple_add_dataset_options: false
+    }
+    if (Galaxy.user){
+      template_defaults.is_admin = Galaxy.user.isAdmin();
+      template_defaults.is_anonym = Galaxy.user.isAnonymous();
+      if ( Galaxy.config.user_library_import_dir !== null || Galaxy.config.allow_library_path_paste !== false || Galaxy.config.library_import_dir !== null ){
+        template_defaults.mutiple_add_dataset_options = true;
+      }
+    }
+    this.$el.html(toolbar_template(template_defaults));
+  },
+
+  /**
+   * Called from FolderListView when needed.
+   * @param  {object} options common options
+   */
+  renderPaginator: function( options ){
+      this.options = _.extend( this.options, options );
+      var paginator_template = this.templatePaginator();
+      $("body").find( '.folder-paginator' ).html( paginator_template({
+          id: this.options.id,
+          show_page: parseInt( this.options.show_page ),
+          page_count: parseInt( this.options.page_count ),
+          total_items_count: this.options.total_items_count,
+          items_shown: this.options.items_shown
+      }));
+  },
+
+  configureElements: function(options){
+    this.options = _.extend(this.options, options);
+
+    if (this.options.can_add_library_item === true){
+      $('.add-library-items').show();
+    } else{
+      $('.add-library-items').hide();
+    }
+    if (this.options.contains_file_or_folder === true){
+      if (Galaxy.user){
+        if (!Galaxy.user.isAnonymous()){
+          $('.logged-dataset-manipulation').show();
+          $('.dataset-manipulation').show();
+        } else {
+          $('.dataset-manipulation').show();
+          $('.logged-dataset-manipulation').hide();
+        }
+      } else {
+        $('.logged-dataset-manipulation').hide();
+        $('.dataset-manipulation').hide();
+      }
+    } else {
+      $('.logged-dataset-manipulation').hide();
+      $('.dataset-manipulation').hide();
+    }
+    this.$el.find('[data-toggle]').tooltip();
+  },
+
+  // shows modal for creating folder
+  createFolderFromModal: function( event ){
+    event.preventDefault();
+    event.stopPropagation();
+
+    // create modal
+    var self = this;
+    var template = this.templateNewFolderInModal();
+    this.modal = Galaxy.modal;
+    this.modal.show({
+        closing_events  : true,
+        title           : 'Create New Folder',
+        body            : template(),
+        buttons         : {
+            'Create'    : function() {self.create_new_folder_event();},
+            'Close'     : function() {Galaxy.modal.hide();}
+        }
+    });
+  },
+
+  // create the new folder from modal
+  create_new_folder_event: function(){
+      var folderDetails = this.serialize_new_folder();
+      if (this.validate_new_folder(folderDetails)){
+          var folder = new mod_library_model.FolderAsModel();
+          url_items = Backbone.history.fragment.split('/');
+          current_folder_id = url_items[url_items.length-1];
+          folder.url = folder.urlRoot + current_folder_id ;
+
+          folder.save(folderDetails, {
+            success: function (folder) {
+              Galaxy.modal.hide();
+              mod_toastr.success('Folder created.');
+              folder.set({'type' : 'folder'});
+              Galaxy.libraries.folderListView.collection.add(folder);
+            },
+            error: function(model, response){
+              Galaxy.modal.hide();
+              if (typeof response.responseJSON !== "undefined"){
+                mod_toastr.error(response.responseJSON.err_msg);
+              } else {
+                mod_toastr.error('An error ocurred.');
+              }
+            }
+          });
+      } else {
+          mod_toastr.error('Folder\'s name is missing.');
+      }
+      return false;
+  },
+
+  // serialize data from the modal
+  serialize_new_folder : function(){
+      return {
+          name: $("input[name='Name']").val(),
+          description: $("input[name='Description']").val()
+      };
+  },
+
+  // validate new folder info
+  validate_new_folder: function(folderDetails){
+      return folderDetails.name !== '';
+  },
+
+
+  // show bulk import modal
+  modalBulkImport : function(){
+      var checkedValues = $('#folder_table').find(':checked');
+      if(checkedValues.length === 0){
+          mod_toastr.info('You must select some datasets first.');
+      } else {
+          this.refreshUserHistoriesList(function(that){
+            var template = that.templateBulkImportInModal();
+            that.modal = Galaxy.modal;
+            that.modal.show({
+                closing_events  : true,
+                title           : 'Import into History',
+                body            : template({histories : that.histories.models}),
+                buttons         : {
+                    'Import'    : function() {that.importAllIntoHistory();},
+                    'Close'     : function() {Galaxy.modal.hide();}
+                }
+            });
+          });
+      }
+  },
+
+  refreshUserHistoriesList: function(callback){
+    var that = this;
+    this.histories = new mod_library_model.GalaxyHistories();
+    this.histories.fetch({
+      success: function (){
+        callback(that);
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg);
+        } else {
+          mod_toastr.error('An error ocurred.');
+        }
+      }
+    });
+  },
+
+  /**
+   * Import all selected datasets into history.
+   */
+  importAllIntoHistory : function (){
+    this.modal.disableButton('Import');
+    var new_history_name = this.modal.$('input[name=history_name]').val();
+    var that = this;
+    if (new_history_name !== ''){
+      $.post( Galaxy.root + 'api/histories', {name: new_history_name})
+        .done(function( new_history ) {
+          that.options.last_used_history_id = new_history.id;
+          that.processImportToHistory(new_history.id, new_history.name);
+        })
+        .fail(function( xhr, status, error ) {
+          mod_toastr.error('An error ocurred.');
+        })
+        .always(function() {
+          that.modal.enableButton('Import');
+        });
+    } else {
+      var history_id = $("select[name=dataset_import_bulk] option:selected").val();
+      this.options.last_used_history_id = history_id;
+      var history_name = $("select[name=dataset_import_bulk] option:selected").text();
+      this.processImportToHistory(history_id, history_name);
+      this.modal.enableButton('Import');
+    }
+  },
+
+  processImportToHistory: function( history_id, history_name ){
+    var dataset_ids = [];
+    var folder_ids = [];
+    $('#folder_table').find(':checked').each(function(){
+        if ($(this.parentElement.parentElement).data('id') !== '' && this.parentElement.parentElement.classList.contains('dataset_row') ) {
+            dataset_ids.push($(this.parentElement.parentElement).data('id'));
+        } else if ($(this.parentElement.parentElement).data('id') !== '' && this.parentElement.parentElement.classList.contains('folder_row') ) {
+            folder_ids.push($(this.parentElement.parentElement).data('id'));
+        }
+    });
+    // prepare the dataset objects to be imported
+    var datasets_to_import = [];
+    for (var i = dataset_ids.length - 1; i >= 0; i--) {
+        var library_dataset_id = dataset_ids[i];
+        var historyItem = new mod_library_model.HistoryItem();
+        historyItem.url = historyItem.urlRoot + history_id + '/contents';
+        historyItem.content = library_dataset_id;
+        historyItem.source = 'library';
+        datasets_to_import.push(historyItem);
+    }
+
+    // prepare the folder objects to be imported
+    var folders_to_import = [];
+    for (var i = folder_ids.length - 1; i >= 0; i--) {
+        var library_folder_id = folder_ids[i];
+        var historyItem = new mod_library_model.HistoryItem();
+        historyItem.url = historyItem.urlRoot + history_id + '/contents';
+        historyItem.content = library_folder_id;
+        historyItem.source = 'library_folder';
+        datasets_to_import.push(historyItem);
+    }
+
+    this.initChainCallControl( { length: datasets_to_import.length, action: 'to_history', history_name: history_name } );
+    // set the used history as current so user will see the last one
+    // that he imported into in the history panel on the 'analysis' page
+    jQuery.getJSON( Galaxy.root + 'history/set_as_current?id=' + history_id  );
+    this.chainCallImportingIntoHistory( datasets_to_import, history_name );
+  },
+
+  /**
+   * Update the progress bar in modal window.
+   */
+  updateProgress: function(){
+      this.progress += this.progressStep;
+      $( '.progress-bar-import' ).width( Math.round( this.progress ) + '%' );
+      txt_representation = Math.round( this.progress ) + '% Complete';
+      $( '.completion_span' ).text( txt_representation );
+  },
+
+  /**
+   * download selected datasets
+   * @param  {str} folder_id id of the current folder
+   * @param  {str} format    requested archive format
+   */
+  download : function( folder_id, format ){
+    var dataset_ids = [];
+    var folder_ids = [];
+        $( '#folder_table' ).find( ':checked' ).each( function(){
+            if ( $(this.parentElement.parentElement).data('id') !== '' && this.parentElement.parentElement.classList.contains('dataset_row') ) {
+                dataset_ids.push( $(this.parentElement.parentElement).data('id') );
+            } else if ( $(this.parentElement.parentElement).data('id') !== '' && this.parentElement.parentElement.classList.contains('folder_row') ) {
+                folder_ids.push( $(this.parentElement.parentElement).data('id') );
+            }
+        } );
+    var url = Galaxy.root + 'api/libraries/datasets/download/' + format;
+    var data = { 'ld_ids' : dataset_ids, 'folder_ids' : folder_ids };
+    this.processDownload( url, data, 'get' );
+  },
+
+  /**
+   * Create hidden form and submit it through POST
+   * to initialize the download.
+   * @param  {str} url    url to call
+   * @param  {obj} data   data to include in the request
+   * @param  {str} method method of the request
+   */
+  processDownload: function( url, data, method ){
+    if ( url && data ){
+      // data can be string of parameters or array/object
+      data = typeof data === 'string' ? data : $.param( data );
+      // split params into form inputs
+      var inputs = '';
+      $.each( data.split( '&' ), function(){
+              var pair = this.split( '=' );
+              inputs+='<input type="hidden" name="'+ pair[0] +'" value="'+ pair[1] +'" />';
+      });
+      // send request
+      $('<form action="'+ url +'" method="'+ (method||'post') +'">'+inputs+'</form>')
+      .appendTo( 'body' ).submit().remove();
+      mod_toastr.info( 'Your download will begin soon.' );
+    } else {
+      mod_toastr.error( 'An error occurred.' );
+    }
+  },
+
+  addFilesFromHistoryModal: function(){
+    this.refreshUserHistoriesList( function( self ){
+      self.modal = Galaxy.modal;
+      var template_modal = self.templateAddFilesFromHistory();
+      var folder_name = self.options.full_path[self.options.full_path.length - 1][1]
+      self.modal.show({
+          closing_events  : true,
+          title           : 'Adding datasets from your history to folder ' + folder_name,
+          body            : template_modal({histories: self.histories.models}),
+          buttons         : {
+              'Add'       : function() {self.addAllDatasetsFromHistory();},
+              'Close'     : function() {Galaxy.modal.hide();}
+          },
+          closing_callback: function(){
+            Galaxy.libraries.library_router.back();
+          }
+      });
+
+      // user should always have a history, even anonymous user
+      if (self.histories.models.length > 0){
+        self.fetchAndDisplayHistoryContents(self.histories.models[0].id);
+        $( "#dataset_add_bulk" ).change(function(event) {
+          self.fetchAndDisplayHistoryContents(event.target.value);
+        });
+      } else {
+        mod_toastr.error( 'An error ocurred.' );
+      }
+    });
+  },
+
+  /**
+   * Create modal for importing from Galaxy path.
+   * This feature is admin-only.
+   */
+  importFilesFromPathModal: function(){
+    var that = this;
+    this.modal = Galaxy.modal;
+    var template_modal = this.templateImportPathModal();
+    this.modal.show({
+        closing_events  : true,
+        title           : 'Please enter paths to import',
+        body            : template_modal({}),
+        buttons         : {
+            'Import'    : function() { that.importFromPathsClicked(that); },
+            'Close'     : function() { Galaxy.modal.hide(); }
+        },
+        closing_callback: function(){
+          //  TODO: should not trigger routes outside of the router
+          Galaxy.libraries.library_router.navigate( 'folders/' + that.id, { trigger: true } );
+        }
+    });
+    this.renderSelectBoxes();
+  },
+
+  /**
+   * Request all extensions and genomes from Galaxy
+   * and save them sorted in arrays.
+   */
+  fetchExtAndGenomes: function(){
+    var that = this;
+    mod_utils.get({
+        url      :  Galaxy.root + "api/datatypes?extension_only=False",
+        success  :  function( datatypes ) {
+                        that.list_extensions = [];
+                        for (key in datatypes) {
+                            that.list_extensions.push({
+                                id              : datatypes[key].extension,
+                                text            : datatypes[key].extension,
+                                description     : datatypes[key].description,
+                                description_url : datatypes[key].description_url
+                            });
+                        }
+                        that.list_extensions.sort(function(a, b) {
+                            return a.id > b.id ? 1 : a.id < b.id ? -1 : 0;
+                        });
+                        that.list_extensions.unshift(that.auto);
+                    },
+        cache    : true
+      });
+    mod_utils.get({
+        url     :    Galaxy.root + "api/genomes",
+        success : function( genomes ) {
+                    that.list_genomes = [];
+                    for ( key in genomes ) {
+                        that.list_genomes.push({
+                            id      : genomes[key][1],
+                            text    : genomes[key][0]
+                        });
+                    }
+                    that.list_genomes.sort(function(a, b) {
+                        return a.id > b.id ? 1 : a.id < b.id ? -1 : 0;
+                    });
+                },
+        cache   : true
+    });
+  },
+
+  renderSelectBoxes: function(){
+    // This won't work properly unlesss we already have the data fetched.
+    // See this.fetchExtAndGenomes()
+    // TODO switch to common resources:
+    // https://trello.com/c/dIUE9YPl/1933-ui-common-resources-and-data-into-galaxy-object
+    var that = this;
+    this.select_genome = new mod_select.View( {
+        css: 'library-genome-select',
+        data: that.list_genomes,
+        container: Galaxy.modal.$el.find( '#library_genome_select' ),
+        value: '?'
+    } );
+    this.select_extension = new mod_select.View({
+      css: 'library-extension-select',
+      data: that.list_extensions,
+      container: Galaxy.modal.$el.find( '#library_extension_select' ),
+      value: 'auto'
+    });
+  },
+
+  /**
+   * Create modal for importing from given directory
+   * on Galaxy. Bind jQuery events.
+   */
+  importFilesFromGalaxyFolderModal: function( options ){
+    var that = this;
+    var template_modal = this.templateBrowserModal();
+    this.modal = Galaxy.modal;
+    this.modal.show({
+      closing_events  : true,
+      title           : 'Please select folders or files',
+      body            : template_modal({}),
+      buttons         : {
+          'Import'    : function() {
+            that.importFromJstreePath( that, options );
+          },
+          'Close'     : function() {
+            Galaxy.modal.hide();
+          }
+      },
+      closing_callback: function(){
+        //  TODO: should not trigger routes outside of the router
+        Galaxy.libraries.library_router.navigate('folders/' + that.id, {trigger: true});
+      }
+    });
+
+    $('.libimport-select-all').bind("click", function(){
+      $('#jstree_browser').jstree("check_all");
+    });
+    $('.libimport-select-none').bind("click", function(){
+      $('#jstree_browser').jstree("uncheck_all");
+    });
+
+    this.renderSelectBoxes();
+    options.disabled_jstree_element = 'folders';
+    this.renderJstree( options );
+
+    $( 'input[type=radio]' ).change( function( event ){
+        if (event.target.value ==='jstree-disable-folders') {
+          options.disabled_jstree_element = 'folders';
+          that.renderJstree( options );
+          $('.jstree-folders-message').hide();
+          $('.jstree-preserve-structure').hide();
+          $('.jstree-link-files').hide();
+          $('.jstree-files-message').show();
+        } else if ( event.target.value ==='jstree-disable-files' ){
+          $('.jstree-files-message').hide();
+          $('.jstree-folders-message').show();
+          $('.jstree-link-files').show();
+          $('.jstree-preserve-structure').show();
+          options.disabled_jstree_element = 'files';
+          that.renderJstree( options );
+        }
+      }
+    );
+  },
+
+  /**
+   * Fetch the contents of user directory on Galaxy
+   * and render jstree component based on received
+   * data.
+   * @param  {[type]} options [description]
+   */
+  renderJstree: function( options ){
+    var that = this;
+    this.options = _.extend( this.options, options );
+    var target = options.source || 'userdir';
+    var disabled_jstree_element = this.options.disabled_jstree_element;
+    this.jstree = new mod_library_model.Jstree();
+    this.jstree.url = this.jstree.urlRoot +
+                        '?target=' + target +
+                        '&format=jstree' +
+                        '&disable=' + disabled_jstree_element;
+    this.jstree.fetch({
+      success: function(model, response){
+        // This is to prevent double jquery load. I think. Carl is magician.
+        define( 'jquery', function(){ return jQuery; });
+        // Now we need jstree, time to lazy load it.
+        require([ 'libs/jquery/jstree' ], function(jstree){
+          $('#jstree_browser').jstree("destroy");
+          $('#jstree_browser').jstree({
+            'core':{
+              'data': model
+            },
+            'plugins': ['types', 'checkbox'],
+            'types': {
+              "folder": {
+                "icon": "jstree-folder"
+              },
+              "file": {
+                "icon": "jstree-file"
+              }
+            },
+            'checkbox': {
+              three_state: false
+            }
+          });
+        });
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          if (response.responseJSON.err_code === 404001){
+            mod_toastr.warning(response.responseJSON.err_msg);
+          } else{
+            mod_toastr.error(response.responseJSON.err_msg);
+          }
+        } else {
+          mod_toastr.error('An error ocurred.');
+        }
+      }
+    })
+  },
+
+  /**
+   * Take the paths from the textarea, split it, create
+   * a request queue and call a function that starts sending
+   * one by one to be imported on the server.
+   */
+  importFromPathsClicked: function(){
+    var preserve_dirs = this.modal.$el.find('.preserve-checkbox').is(':checked');
+    var link_data = this.modal.$el.find('.link-checkbox').is(':checked');
+    var file_type = this.select_extension.value();
+    var dbkey = this.select_genome.value();
+    var paths = $('textarea#import_paths').val();
+    var valid_paths = [];
+    if (!paths){
+      mod_toastr.info('Please enter a path relative to Galaxy root.');
+    } else {
+      this.modal.disableButton('Import');
+      paths = paths.split('\n');
+      for (var i = paths.length - 1; i >= 0; i--) {
+        trimmed = paths[i].trim();
+        if (trimmed.length!==0){
+          valid_paths.push(trimmed);
+        }
+      };
+      this.initChainCallControl( { length: valid_paths.length, action: 'adding_datasets' } );
+      this.chainCallImportingFolders( { paths: valid_paths,
+                                        preserve_dirs: preserve_dirs,
+                                        link_data: link_data,
+                                        source: 'admin_path',
+                                        file_type: file_type,
+                                        dbkey: dbkey } );
+    }
+  },
+
+  /**
+   * Initialize the control of chaining requests
+   * in the current modal.
+   * @param {int} length The number of items in the chain call.
+   */
+  initChainCallControl: function( options ){
+    var template;
+    switch( options.action ){
+      case "adding_datasets":
+        template = this.templateAddingDatasetsProgressBar();
+        this.modal.$el.find( '.modal-body' ).html( template( { folder_name : this.options.folder_name } ) );
+        break;
+      case "deleting_datasets":
+        template = this.templateDeletingItemsProgressBar();
+        this.modal.$el.find( '.modal-body' ).html( template() );
+        break;
+      case "to_history":
+        template = this.templateImportIntoHistoryProgressBar();
+        this.modal.$el.find( '.modal-body' ).html( template( { history_name : options.history_name } ) );
+        break;
+      default:
+        Galaxy.emit.error( 'Wrong action specified.', 'datalibs');
+        break;
+    }
+
+    // var progress_bar_tmpl = this.templateAddingDatasetsProgressBar();
+    // this.modal.$el.find( '.modal-body' ).html( progress_bar_tmpl( { folder_name : this.options.folder_name } ) );
+    this.progress = 0;
+    this.progressStep = 100 / options.length;
+    this.options.chain_call_control.total_number = options.length;
+    this.options.chain_call_control.failed_number = 0;
+  },
+
+  /**
+   * Take the selected items from the jstree, create a request queue
+   * and send them one by one to the server for importing into
+   * the current folder.
+   *
+   * jstree.js has to be loaded before
+   * @see renderJstree
+   */
+  importFromJstreePath: function ( that, options ){
+    var all_nodes = $( '#jstree_browser' ).jstree().get_selected( true );
+    // remove the disabled elements that could have been trigerred with the 'select all'
+    selected_nodes = _.filter(all_nodes, function(node){ return node.state.disabled == false; })
+    var preserve_dirs = this.modal.$el.find( '.preserve-checkbox' ).is( ':checked' );
+    var link_data = this.modal.$el.find( '.link-checkbox' ).is( ':checked' );
+    var file_type = this.select_extension.value();
+    var dbkey = this.select_genome.value();
+    var selection_type = selected_nodes[0].type;
+    var paths = [];
+    if ( selected_nodes.length < 1 ){
+      mod_toastr.info( 'Please select some items first.' );
+    } else {
+      this.modal.disableButton( 'Import' );
+      for ( var i = selected_nodes.length - 1; i >= 0; i-- ){
+        if ( selected_nodes[i].li_attr.full_path !== undefined ){
+          paths.push( selected_nodes[i].li_attr.full_path );
+        }
+      }
+      this.initChainCallControl( { length: paths.length, action: 'adding_datasets' } );
+      if ( selection_type === 'folder' ){
+        var full_source = options.source + '_folder';
+        this.chainCallImportingFolders( { paths: paths,
+                                          preserve_dirs: preserve_dirs,
+                                          link_data: link_data,
+                                          source: full_source,
+                                          file_type: file_type,
+                                          dbkey: dbkey } );
+      } else if ( selection_type === 'file' ){
+        var full_source = options.source + '_file';
+        this.chainCallImportingUserdirFiles( { paths : paths,
+                                               file_type: file_type,
+                                               dbkey: dbkey,
+                                               source: full_source } );
+      }
+    }
+  },
+
+  fetchAndDisplayHistoryContents: function(history_id){
+    var history_contents = new mod_library_model.HistoryContents({id:history_id});
+    var self = this;
+    history_contents.fetch({
+      success: function(history_contents){
+        var history_contents_template = self.templateHistoryContents();
+        self.histories.get(history_id).set({'contents' : history_contents});
+        self.modal.$el.find('#selected_history_content').html(history_contents_template({history_contents: history_contents.models.reverse()}));
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg);
+        } else {
+          mod_toastr.error('An error ocurred.');
+        }
+      }
+    });
+  },
+
+  /**
+   * Import all selected datasets from history into the current folder.
+   */
+  addAllDatasetsFromHistory : function (){
+    var checked_hdas = this.modal.$el.find( '#selected_history_content' ).find( ':checked' );
+    var history_dataset_ids = [];
+    var hdas_to_add = [];
+    if ( checked_hdas.length < 1 ){
+      mod_toastr.info( 'You must select some datasets first.' );
+    } else {
+      this.modal.disableButton( 'Add' );
+      checked_hdas.each(function(){
+        var hid = $( this.parentElement ).data( 'id' );
+          if ( hid ) {
+            history_dataset_ids.push( hid );
+          }
+      });
+      for ( var i = history_dataset_ids.length - 1; i >= 0; i-- ) {
+        history_dataset_id = history_dataset_ids[i];
+        var folder_item = new mod_library_model.Item();
+        folder_item.url = Galaxy.root + 'api/folders/' + this.options.id + '/contents';
+        folder_item.set( { 'from_hda_id':history_dataset_id } );
+        hdas_to_add.push( folder_item );
+      }
+      this.initChainCallControl( { length: hdas_to_add.length, action: 'adding_datasets' } );
+      this.chainCallAddingHdas( hdas_to_add );
+    }
+  },
+
+  /**
+   * Take array of empty history items and make request for each of them
+   * to create it on server. Update progress in between calls.
+   * @param  {array} history_item_set array of empty history items
+   * @param  {str} history_name     name of the history to import to
+   */
+  chainCallImportingIntoHistory: function( history_item_set, history_name ){
+    var self = this;
+    var popped_item = history_item_set.pop();
+    if ( typeof popped_item == "undefined" ) {
+      if ( this.options.chain_call_control.failed_number === 0 ){
+        mod_toastr.success( 'Selected datasets imported into history. Click this to start analyzing it.', '', { onclick: function() { window.location=Galaxy.root } } );
+      } else if ( this.options.chain_call_control.failed_number === this.options.chain_call_control.total_number ){
+        mod_toastr.error( 'There was an error and no datasets were imported into history.' );
+      } else if ( this.options.chain_call_control.failed_number < this.options.chain_call_control.total_number ){
+        mod_toastr.warning( 'Some of the datasets could not be imported into history. Click this to see what was imported.', '', { onclick: function() { window.location=Galaxy.root } } );
+      }
+      Galaxy.modal.hide();
+      return true;
+    }
+    var promise = $.when( popped_item.save( { content: popped_item.content, source: popped_item.source } ) );
+
+    promise.done( function(){
+              self.updateProgress();
+              self.chainCallImportingIntoHistory( history_item_set, history_name );
+            } )
+            .fail( function(){
+              self.options.chain_call_control.failed_number += 1;
+              self.updateProgress();
+              self.chainCallImportingIntoHistory( history_item_set, history_name );
+            } );
+  },
+
+  /**
+   * Take the array of paths and createa request for each of them
+   * calling them in chain. Update the progress bar in between each.
+   * @param  {array} paths           paths relative to user folder on Galaxy
+   */
+  chainCallImportingUserdirFiles: function( options ){
+
+    var that = this;
+    var popped_item = options.paths.pop();
+    if ( typeof popped_item === "undefined" ) {
+      if ( this.options.chain_call_control.failed_number === 0 ){
+        mod_toastr.success( 'Selected files imported into the current folder' );
+        Galaxy.modal.hide();
+      } else {
+        mod_toastr.error( 'An error occured.' );
+      }
+      return true;
+    }
+    var promise = $.when( $.post( Galaxy.root + 'api/libraries/datasets?encoded_folder_id=' + that.id +
+                                                       '&source=' + options.source +
+                                                       '&path=' + popped_item +
+                                                       '&file_type=' + options.file_type +
+                                                       '&dbkey=' + options.dbkey ) )
+    promise.done( function( response ){
+              that.updateProgress();
+              that.chainCallImportingUserdirFiles( options );
+            } )
+            .fail( function(){
+              that.options.chain_call_control.failed_number += 1;
+              that.updateProgress();
+              that.chainCallImportingUserdirFiles( options );
+            } );
+  },
+
+  /**
+   * Take the array of paths and createa request for each of them
+   * calling them in chain. Update the progress bar in between each.
+   * @param  {array} paths           paths relative to Galaxy root folder
+   * @param  {boolean} preserve_dirs indicates whether to preserve folder structure
+   * @param  {boolean} link_data     copy files to Galaxy or link instead
+   * @param  {str} source            string representing what type of folder
+   *                                 is the source of import
+   */
+  chainCallImportingFolders: function( options ){
+    // TODO need to check which paths to call
+    var that = this;
+    var popped_item = options.paths.pop();
+    if (typeof popped_item == "undefined") {
+      if (this.options.chain_call_control.failed_number === 0){
+        mod_toastr.success('Selected folders and their contents imported into the current folder.');
+        Galaxy.modal.hide();
+      } else {
+        // TODO better error report
+        mod_toastr.error('An error occured.');
+      }
+      return true;
+    }
+    var promise = $.when( $.post( Galaxy.root + 'api/libraries/datasets?encoded_folder_id=' + that.id +
+                                                          '&source=' + options.source +
+                                                          '&path=' + popped_item +
+                                                          '&preserve_dirs=' + options.preserve_dirs +
+                                                          '&link_data=' + options.link_data +
+                                                          '&file_type=' + options.file_type +
+                                                          '&dbkey=' + options.dbkey ) )
+    promise.done(function(response){
+              that.updateProgress();
+              that.chainCallImportingFolders( options );
+            })
+            .fail(function(){
+              that.options.chain_call_control.failed_number += 1;
+              that.updateProgress();
+              that.chainCallImportingFolders( options );
+            });
+  },
+
+  /**
+   * Take the array of hdas and create a request for each.
+   * Call them in chain and update progress bar in between each.
+   * @param  {array} hdas_set array of empty hda objects
+   */
+  chainCallAddingHdas: function( hdas_set ){
+    var self = this;
+    this.added_hdas = new mod_library_model.Folder();
+    var popped_item = hdas_set.pop();
+    if ( typeof popped_item == "undefined" ) {
+      if ( this.options.chain_call_control.failed_number === 0 ){
+        mod_toastr.success( 'Selected datasets from history added to the folder' );
+      } else if ( this.options.chain_call_control.failed_number === this.options.chain_call_control.total_number ){
+        mod_toastr.error( 'There was an error and no datasets were added to the folder.' );
+      } else if ( this.options.chain_call_control.failed_number < this.options.chain_call_control.total_number ){
+        mod_toastr.warning( 'Some of the datasets could not be added to the folder' );
+      }
+      Galaxy.modal.hide();
+      return this.added_hdas;
+    }
+    var promise = $.when( popped_item.save( { from_hda_id: popped_item.get( 'from_hda_id' ) } ) );
+
+    promise.done( function( model ){
+              Galaxy.libraries.folderListView.collection.add( model );
+              self.updateProgress();
+              self.chainCallAddingHdas( hdas_set );
+            })
+            .fail( function(){
+              self.options.chain_call_control.failed_number += 1;
+              self.updateProgress();
+              self.chainCallAddingHdas( hdas_set );
+            });
+  },
+
+  /**
+   * Take the array of lddas, create request for each and
+   * call them in chain. Update progress bar in between each.
+   * @param  {array} lddas_set array of lddas to delete
+   */
+  chainCallDeletingItems: function( items_to_delete ){
+  var self = this;
+  this.deleted_items = new mod_library_model.Folder();
+  var popped_item = items_to_delete.pop();
+  if ( typeof popped_item === "undefined" ) {
+    if ( this.options.chain_call_control.failed_number === 0 ){
+      mod_toastr.success( 'Selected items were deleted.' );
+    } else if ( this.options.chain_call_control.failed_number === this.options.chain_call_control.total_number ){
+      mod_toastr.error( 'There was an error and no items were deleted. Please make sure you have sufficient permissions.' );
+    } else if ( this.options.chain_call_control.failed_number < this.options.chain_call_control.total_number ){
+      mod_toastr.warning( 'Some of the items could not be deleted. Please make sure you have sufficient permissions.' );
+    }
+    Galaxy.modal.hide();
+    return this.deleted_items;
+  }
+  var promise = $.when( popped_item.destroy() );
+
+  promise.done( function( item ){
+            Galaxy.libraries.folderListView.collection.remove( popped_item.id );
+            self.updateProgress();
+            // add the deleted item to collection, triggers rendering
+            if ( Galaxy.libraries.folderListView.options.include_deleted ){
+              var updated_item = null;
+              if (item.type === 'folder' || item.model_class === 'LibraryFolder'){
+                updated_item = new mod_library_model.FolderAsModel( item );
+              } else if (item.type === 'file' || item.model_class === 'LibraryDataset'){
+                updated_item = new mod_library_model.Item( item );
+              } else {
+                Galaxy.emit.error('Unknown library item type found.', 'datalibs');
+                Galaxy.emit.error(item.type || item.model_class, 'datalibs');
+              }
+              Galaxy.libraries.folderListView.collection.add( updated_item );
+            }
+            self.chainCallDeletingItems( items_to_delete );
+          })
+          .fail( function(){
+            self.options.chain_call_control.failed_number += 1;
+            self.updateProgress();
+            self.chainCallDeletingItems( items_to_delete );
+          });
+  },
+
+  /**
+   * Handles the click on 'show deleted' checkbox
+   */
+  checkIncludeDeleted: function(event){
+    if (event.target.checked){
+      Galaxy.libraries.folderListView.fetchFolder({include_deleted: true});
+    } else{
+      Galaxy.libraries.folderListView.fetchFolder({include_deleted: false});
+    }
+  },
+
+  /**
+   * Delete the selected items. Atomic. One by one.
+   */
+  deleteSelectedItems: function(){
+    var checkedValues = $('#folder_table').find(':checked');
+    if(checkedValues.length === 0){
+        mod_toastr.info('You must select at least one item for deletion.');
+    } else {
+      var template = this.templateDeletingItemsProgressBar();
+      this.modal = Galaxy.modal;
+      this.modal.show({
+          closing_events  : true,
+          title           : 'Deleting selected items',
+          body            : template({}),
+          buttons         : {
+              'Close'     : function() {Galaxy.modal.hide();}
+          }
+      });
+      // init the control counters
+      this.options.chain_call_control.total_number = 0;
+      this.options.chain_call_control.failed_number = 0;
+
+      var dataset_ids = [];
+      var folder_ids = [];
+      checkedValues.each(function(){
+          if ($(this.parentElement.parentElement).data('id') !== undefined) {
+              if ($(this.parentElement.parentElement).data('id').substring(0,1) == 'F'){
+                folder_ids.push($(this.parentElement.parentElement).data('id'));
+              } else {
+                dataset_ids.push($(this.parentElement.parentElement).data('id'));
+              }
+          }
+      });
+      // init the progress bar
+      var items_total = dataset_ids.length + folder_ids.length
+      this.progressStep = 100 / items_total;
+      this.progress = 0;
+
+      // prepare the dataset items to be added
+      var items_to_delete = [];
+      for (var i = dataset_ids.length - 1; i >= 0; i--) {
+          var dataset = new mod_library_model.Item({id:dataset_ids[i]});
+          items_to_delete.push(dataset);
+      }
+      for (var i = folder_ids.length - 1; i >= 0; i--) {
+          var folder = new mod_library_model.FolderAsModel({id:folder_ids[i]});
+          items_to_delete.push(folder);
+      }
+
+      this.options.chain_call_control.total_number = items_total.length;
+      // call the recursive function to call ajax one after each other (request FIFO queue)
+      this.chainCallDeletingItems(items_to_delete);
+    }
+  },
+
+
+  showLocInfo: function(){
+    var library = null;
+    var that = this;
+    if (Galaxy.libraries.libraryListView !== null){
+      library = Galaxy.libraries.libraryListView.collection.get(this.options.parent_library_id);
+      this.showLocInfoModal(library);
+    } else {
+      library = new mod_library_model.Library({id: this.options.parent_library_id});
+      library.fetch({
+        success: function(){
+          that.showLocInfoModal(library);
+        },
+        error: function(model, response){
+          if (typeof response.responseJSON !== "undefined"){
+            mod_toastr.error(response.responseJSON.err_msg);
+          } else {
+            mod_toastr.error('An error ocurred.');
+          }
+        }
+      })
+    }
+  },
+
+  showLocInfoModal: function(library){
+    var that = this;
+    var template = this.templateLocInfoInModal();
+    this.modal = Galaxy.modal;
+    this.modal.show({
+        closing_events  : true,
+        title           : 'Location Details',
+        body            : template({library: library, options: that.options}),
+        buttons         : {
+            'Close'     : function() {Galaxy.modal.hide();}
+        }
+    });
+  },
+
+  showImportModal: function(options){
+    switch(options.source){
+      case "history":
+        this.addFilesFromHistoryModal();
+        break;
+      case "importdir":
+        this.importFilesFromGalaxyFolderModal( { source: 'importdir' } );
+        break;
+      case "path":
+        this.importFilesFromPathModal();
+        break;
+      case "userdir":
+        this.importFilesFromGalaxyFolderModal( { source: 'userdir' } );
+        break;
+      default:
+        Galaxy.libraries.library_router.back();
+        mod_toastr.error('Invalid import source.');
+        break;
+    }
+  },
+
+  /**
+   * Show user the prompt to change the number of items shown on page.
+   */
+  showPageSizePrompt: function(){
+    var folder_page_size = prompt( 'How many items per page do you want to see?', Galaxy.libraries.preferences.get( 'folder_page_size' ) );
+    if ( ( folder_page_size != null ) && ( folder_page_size == parseInt( folder_page_size ) ) ) {
+        Galaxy.libraries.preferences.set( { 'folder_page_size': parseInt( folder_page_size ) } );
+        Galaxy.libraries.folderListView.render( { id: this.options.id, show_page: 1 } );
+    }
+  },
+
+  templateToolBar: function(){
+    return _.template([
+    // container start
+    '<div class="library_style_container">',
+      // toolbar start
+      '<div id="library_toolbar">',
+        '<form class="form-inline" role="form">',
+          '<span><strong>DATA LIBRARIES</strong></span>',
+          // paginator will append here
+          '<span class="library-paginator folder-paginator"></span>',
+          '<div class="checkbox toolbar-item logged-dataset-manipulation" style="height: 20px; display:none;">',
+            '<label>',
+              '<input id="include_deleted_datasets_chk" type="checkbox">include deleted</input>',
+            '</label>',
+          '</div>',
+          '<button style="display:none;" data-toggle="tooltip" data-placement="top" title="Create New Folder" id="toolbtn_create_folder" class="btn btn-default primary-button add-library-items toolbar-item" type="button">',
+            '<span class="fa fa-plus"></span><span class="fa fa-folder"></span>',
+          '</button>',
+          '<% if(mutiple_add_dataset_options) { %>',
+          '<div class="btn-group add-library-items" style="display:none;">',
+            '<button title="Add Datasets to Current Folder" id="" type="button" class="primary-button dropdown-toggle" data-toggle="dropdown">',
+              '<span class="fa fa-plus"></span><span class="fa fa-file"></span><span class="caret"></span>',
+            '</button>',
+            '<ul class="dropdown-menu" role="menu">',
+              '<li><a href="#folders/<%= id %>/import/history"> from History</a></li>',
+              '<% if(Galaxy.config.user_library_import_dir !== null) { %>',
+                '<li><a href="#folders/<%= id %>/import/userdir"> from User Directory</a></li>',
+              '<% } %>',
+              '<% if(Galaxy.config.allow_library_path_paste) { %>',
+                '<li class="divider"></li>',
+                '<li class="dropdown-header">Admins only</li>',
+                '<% if(Galaxy.config.library_import_dir !== null) { %>',
+                  '<li><a href="#folders/<%= id %>/import/importdir">from Import Directory</a></li>',
+                '<% } %>',
+                '<% if(Galaxy.config.allow_library_path_paste) { %>',
+                  '<li><a href="#folders/<%= id %>/import/path">from Path</a></li>',
+                '<% } %>',
+              '<% } %>',
+            '</ul>',
+          '</div>',
+          '<% } else { %>',
+            '<a  data-placement="top" title="Add Datasets to Current Folder" style="display:none;" class="btn btn-default add-library-items" href="#folders/<%= id %>/import/history" role="button">',
+              '<span class="fa fa-plus"></span><span class="fa fa-file"></span>',
+            '</a>',
+          '<% } %>',
+          '<button data-toggle="tooltip" data-placement="top" title="Import selected datasets into history" id="toolbtn_bulk_import" class="primary-button dataset-manipulation" style="margin-left: 0.5em; display:none;" type="button">',
+            '<span class="fa fa-book"></span>',
+            ' to History',
+          '</button>',
+          '<div class="btn-group dataset-manipulation" style="margin-left: 0.5em; display:none; ">',
+            '<button title="Download selected items as archive" type="button" class="primary-button dropdown-toggle" data-toggle="dropdown">',
+              '<span class="fa fa-download"></span> Download <span class="caret"></span>',
+            '</button>',
+            '<ul class="dropdown-menu" role="menu">',
+              '<li><a href="#/folders/<%= id %>/download/tgz">.tar.gz</a></li>',
+              '<li><a href="#/folders/<%= id %>/download/tbz">.tar.bz</a></li>',
+              '<li><a href="#/folders/<%= id %>/download/zip">.zip</a></li>',
+            '</ul>',
+          '</div>',
+            '<button data-toggle="tooltip" data-placement="top" title="Mark selected items deleted" id="toolbtn_bulk_delete" class="primary-button logged-dataset-manipulation" style="margin-left: 0.5em; display:none; " type="button">',
+            '<span class="fa fa-times"></span> Delete</button>',
+            '<button data-id="<%- id %>" data-toggle="tooltip" data-placement="top" title="Show location details" class="primary-button toolbtn-show-locinfo" style="margin-left: 0.5em;" type="button">',
+              '<span class="fa fa-info-circle"></span>',
+              ' Details',
+            '</button>',
+            '<span class="help-button" data-toggle="tooltip" data-placement="top" title="Visit Libraries Wiki">',
+              '<a href="https://wiki.galaxyproject.org/DataLibraries/screen/FolderContents" target="_blank">',
+                '<button class="primary-button" type="button">',
+                  '<span class="fa fa-question-circle"></span>',
+                  ' Help',
+                '</button>',
+              '</a>',
+            '</span>',
+          '</div>',
+        '</form>',
+      // toolbar end
+      '<div id="folder_items_element">',
+      '</div>',
+      // paginator will append here
+      '<div class="folder-paginator paginator-bottom"></div>',
+    // container end
+    '</div>',
+    ].join(''));
+  },
+
+  templateLocInfoInModal: function(){
+    return _.template([
+      '<div>',
+        '<table class="grid table table-condensed">',
+          '<thead>',
+            '<th style="width: 25%;">library</th>',
+            '<th></th>',
+          '</thead>',
+          '<tbody>',
+            '<tr>',
+              '<td>name</td>',
+              '<td><%- library.get("name") %></td>',
+            '</tr>',
+            '<% if(library.get("description") !== "") { %>',
+              '<tr>',
+                '<td>description</td>',
+                '<td><%- library.get("description") %></td>',
+              '</tr>',
+            '<% } %>',
+            '<% if(library.get("synopsis") !== "") { %>',
+              '<tr>',
+                '<td>synopsis</td>',
+                '<td><%- library.get("synopsis") %></td>',
+              '</tr>',
+            '<% } %>',
+            '<% if(library.get("create_time_pretty") !== "") { %>',
+              '<tr>',
+                '<td>created</td>',
+                '<td><span title="<%- library.get("create_time") %>"><%- library.get("create_time_pretty") %></span></td>',
+              '</tr>',
+            '<% } %>',
+            '<tr>',
+              '<td>id</td>',
+              '<td><%- library.get("id") %></td>',
+            '</tr>',
+          '</tbody>',
+        '</table>',
+        '<table class="grid table table-condensed">',
+          '<thead>',
+            '<th style="width: 25%;">folder</th>',
+            '<th></th>',
+          '</thead>',
+          '<tbody>',
+            '<tr>',
+              '<td>name</td>',
+              '<td><%- options.folder_name %></td>',
+            '</tr>',
+            '<% if(options.folder_description !== "") { %>',
+              '<tr>',
+                '<td>description</td>',
+                '<td><%- options.folder_description %></td>',
+              '</tr>',
+            '<% } %>',
+            '<tr>',
+              '<td>id</td>',
+              '<td><%- options.id %></td>',
+            '</tr>',
+            '</tbody>',
+        '</table>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateNewFolderInModal: function(){
+    return _.template([
+    '<div id="new_folder_modal">',
+      '<form>',
+        '<input type="text" name="Name" value="" placeholder="Name" autofocus>',
+        '<input type="text" name="Description" value="" placeholder="Description">',
+      '</form>',
+    '</div>'
+    ].join(''));
+  },
+
+
+  templateBulkImportInModal : function(){
+    return _.template([
+    '<div>',
+      '<div class="library-modal-item">',
+        'Select history: ',
+        '<select id="dataset_import_bulk" name="dataset_import_bulk" style="width:50%; margin-bottom: 1em; " autofocus>',
+          '<% _.each(histories, function(history) { %>',
+            '<option value="<%= _.escape(history.get("id")) %>"><%= _.escape(history.get("name")) %></option>',
+          '<% }); %>',
+        '</select>',
+      '</div>',
+      '<div class="library-modal-item">',
+        'or create new: ',
+        '<input type="text" name="history_name" value="" placeholder="name of the new history" style="width:50%;">',
+        '</input>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateImportIntoHistoryProgressBar : function (){
+    return _.template([
+    '<div class="import_text">',
+      'Importing selected items to history <b><%= _.escape(history_name) %></b>',
+    '</div>',
+    '<div class="progress">',
+      '<div class="progress-bar progress-bar-import" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 00%;">',
+        '<span class="completion_span">0% Complete</span>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateAddingDatasetsProgressBar: function (){
+    return _.template([
+    '<div class="import_text">',
+      'Adding selected datasets to library folder <b><%= _.escape(folder_name) %></b>',
+    '</div>',
+    '<div class="progress">',
+      '<div class="progress-bar progress-bar-import" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 00%;">',
+        '<span class="completion_span">0% Complete</span>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateDeletingItemsProgressBar: function (){
+    return _.template([
+    '<div class="import_text">',
+    '</div>',
+    '<div class="progress">',
+      '<div class="progress-bar progress-bar-import" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 00%;">',
+        '<span class="completion_span">0% Complete</span>',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateBrowserModal: function(){
+    return _.template([
+    '<div id="file_browser_modal">',
+      '<div class="alert alert-info jstree-files-message">All files you select will be imported into the current folder ignoring their folder structure.</div>',
+      '<div class="alert alert-info jstree-folders-message" style="display:none;">All files within the selected folders and their subfolders will be imported into the current folder.</div>',
+      '<div style="margin-bottom:1em;">',
+        '<label title="Switch to selecting files" class="radio-inline import-type-switch">',
+          '<input type="radio" name="jstree-radio" value="jstree-disable-folders" checked="checked"> Choose Files',
+        '</label>',
+        '<label title="Switch to selecting folders" class="radio-inline import-type-switch">',
+        '<input type="radio" name="jstree-radio" value="jstree-disable-files"> Choose Folders',
+        '</label>',
+      '</div>',
+      '<div style="margin-bottom:1em;">',
+        '<label class="checkbox-inline jstree-preserve-structure" style="display:none;">',
+          '<input class="preserve-checkbox" type="checkbox" value="preserve_directory_structure">',
+          'Preserve directory structure',
+        '</label>',
+        '<label class="checkbox-inline jstree-link-files" style="display:none;">',
+          '<input class="link-checkbox" type="checkbox" value="link_files">',
+          'Link files instead of copying',
+        '</label>',
+      '</div>',
+      '<button title="Select all files" type="button" class="button primary-button libimport-select-all">',
+        'Select all',
+      '</button>',
+      '<button title="Select no files" type="button" class="button primary-button libimport-select-none">',
+        'Select none',
+      '</button>',
+      '<hr />',
+      // append jstree object here
+      '<div id="jstree_browser">',
+      '</div>',
+      '<hr />',
+      '<p>You can set extension type and genome for all imported datasets at once:</p>',
+      '<div>',
+        'Type: <span id="library_extension_select" class="library-extension-select" />',
+        'Genome: <span id="library_genome_select" class="library-genome-select" />',
+        '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateImportPathModal: function(){
+    return _.template([
+    '<div id="file_browser_modal">',
+      '<div class="alert alert-info jstree-folders-message">All files within the given folders and their subfolders will be imported into the current folder.</div>',
+      '<div style="margin-bottom: 0.5em;">',
+        '<label class="checkbox-inline jstree-preserve-structure">',
+          '<input class="preserve-checkbox" type="checkbox" value="preserve_directory_structure">',
+          'Preserve directory structure',
+        '</label>',
+        '<label class="checkbox-inline jstree-link-files">',
+          '<input class="link-checkbox" type="checkbox" value="link_files">',
+          'Link files instead of copying',
+        '</label>',
+      '</div>',
+      '<textarea id="import_paths" class="form-control" rows="5" placeholder="Absolute paths (or paths relative to Galaxy root) separated by newline" autofocus></textarea>',
+      '<hr />',
+      '<p>You can set extension type and genome for all imported datasets at once:</p>',
+      '<div>',
+        'Type: <span id="library_extension_select" class="library-extension-select" />',
+        'Genome: <span id="library_genome_select" class="library-genome-select" />',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateAddFilesFromHistory: function (){
+    return _.template([
+    '<div id="add_files_modal">',
+      '<div>',
+        'Select history:  ',
+        '<select id="dataset_add_bulk" name="dataset_add_bulk" style="width:66%; "> ',
+          '<% _.each(histories, function(history) { %>', //history select box
+            '<option value="<%= _.escape(history.get("id")) %>"><%= _.escape(history.get("name")) %></option>',
+          '<% }); %>',
+        '</select>',
+      '</div>',
+      '<br/>',
+      '<div id="selected_history_content">',
+      '</div>',
+    '</div>'
+    ].join(''));
+  },
+
+  templateHistoryContents: function (){
+    return _.template([
+    '<strong>Choose the datasets to import:</strong>',
+    '<ul>',
+      '<% _.each(history_contents, function(history_item) { %>',
+        '<li data-id="<%= _.escape(history_item.get("id")) %>">',
+          '<input style="margin: 0;" type="checkbox"> <%= _.escape(history_item.get("hid")) %>: <%= _.escape(history_item.get("name")) %>',
+        '</li>',
+      '<% }); %>',
+    '</ul>'
+    ].join(''));
+  },
+
+  templatePaginator: function(){
+    return _.template([
+    '<ul class="pagination pagination-sm">',
+      '<% if ( ( show_page - 1 ) > 0 ) { %>',
+        '<% if ( ( show_page - 1 ) > page_count ) { %>', // we are on higher page than total page count
+          '<li><a href="#folders/<%= id %>/page/1"><span class="fa fa-angle-double-left"></span></a></li>',
+          '<li class="disabled"><a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page - 1 ) %></a></li>',
+        '<% } else { %>',
+          '<li><a href="#folders/<%= id %>/page/1"><span class="fa fa-angle-double-left"></span></a></li>',
+          '<li><a href="#folders/<%= id %>/page/<% print( show_page - 1 ) %>"><% print( show_page - 1 ) %></a></li>',
+        '<% } %>',
+      '<% } else { %>', // we are on the first page
+        '<li class="disabled"><a href="#folders/<%= id %>/page/1"><span class="fa fa-angle-double-left"></span></a></li>',
+        '<li class="disabled"><a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page - 1 ) %></a></li>',
+      '<% } %>',
+      '<li class="active">',
+        '<a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page ) %></a>',
+      '</li>',
+      '<% if ( ( show_page ) < page_count ) { %>',
+        '<li><a href="#folders/<%= id %>/page/<% print( show_page + 1 ) %>"><% print( show_page + 1 ) %></a></li>',
+        '<li><a href="#folders/<%= id %>/page/<% print( page_count ) %>"><span class="fa fa-angle-double-right"></span></a></li>',
+      '<% } else { %>',
+        '<li class="disabled"><a href="#folders/<%= id %>/page/<% print( show_page  ) %>"><% print( show_page + 1 ) %></a></li>',
+        '<li class="disabled"><a href="#folders/<%= id %>/page/<% print( page_count ) %>"><span class="fa fa-angle-double-right"></span></a></li>',
+      '<% } %>',
+    '</ul>',
+    '<span>',
+      ' showing ',
+      '<a data-toggle="tooltip" data-placement="top" title="Click to change the number of items on page" class="page_size_prompt">',
+        '<%- items_shown %>',
+      '</a>',
+      ' of <%- total_items_count %> items',
+    '</span>'
+    ].join(''));
+  },
+
+});
+
+return {
+    FolderToolbarView: FolderToolbarView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-library-view.js b/client/galaxy/scripts/mvc/library/library-library-view.js
new file mode 100644
index 0000000..42650e0
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-library-view.js
@@ -0,0 +1,348 @@
+define([
+  "libs/toastr",
+  "mvc/library/library-model",
+  'mvc/ui/ui-select'
+  ],
+function(
+        mod_toastr,
+        mod_library_model,
+        mod_select
+        ) {
+
+var LibraryView = Backbone.View.extend({
+  el: '#center',
+
+  model: null,
+
+  options: {
+
+  },
+
+  events: {
+    "click .toolbtn_save_permissions"     :   "savePermissions"
+  },
+
+  initialize: function(options){
+    this.options = _.extend(this.options, options);
+    if (this.options.id){
+      this.fetchLibrary();
+    }
+  },
+
+  fetchLibrary: function(options){
+    this.options = _.extend(this.options, options);
+    this.model = new mod_library_model.Library({id:this.options.id});
+    var that = this;
+    this.model.fetch({
+      success: function() {
+        if (that.options.show_permissions){
+            that.showPermissions();
+        } else {
+            that.render();
+        }
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg + ' Click this to go back.', '', {onclick: function() {Galaxy.libraries.library_router.back();}});
+        } else {
+          mod_toastr.error('An error ocurred. Click this to go back.', '', {onclick: function() {Galaxy.libraries.library_router.back();}});
+        }
+      }
+    });
+  },
+
+  render: function(options){
+    $(".tooltip").remove();
+    this.options = _.extend(this.options, options);
+    var template = this.templateLibrary();
+    this.$el.html(template({item: this.model}));
+    $("#center [data-toggle]").tooltip();
+  },
+
+  shareDataset: function(){
+    mod_toastr.info('Feature coming soon.');
+  },
+
+  goBack: function(){
+    Galaxy.libraries.library_router.back();
+  },
+
+  showPermissions: function(options){
+    this.options = _.extend(this.options, options);
+    $(".tooltip").remove();
+
+    if (this.options.fetched_permissions !== undefined){
+      if (this.options.fetched_permissions.access_library_role_list.length === 0){
+        this.model.set({is_unrestricted:true});
+      } else{
+        this.model.set({is_unrestricted:false});
+      }
+    }
+    var is_admin = false;
+    if (Galaxy.user){
+      is_admin = Galaxy.user.isAdmin();
+    }
+    var template = this.templateLibraryPermissions();
+    this.$el.html(template({library: this.model, is_admin:is_admin}));
+
+    var self = this;
+    $.get( Galaxy.root + "api/libraries/" + self.id + "/permissions?scope=current").done(function(fetched_permissions) {
+      self.prepareSelectBoxes({fetched_permissions:fetched_permissions});
+    }).fail(function(){
+        mod_toastr.error('An error occurred while attempting to fetch library permissions.');
+    });
+
+    $("#center [data-toggle]").tooltip();
+    //hack to show scrollbars
+    $("#center").css('overflow','auto');
+  },
+
+  _serializeRoles : function(role_list){
+    var selected_roles = [];
+    for (var i = 0; i < role_list.length; i++) {
+      selected_roles.push(role_list[i][1] + ':' + role_list[i][0]);
+    }
+    return selected_roles;
+  },
+
+  prepareSelectBoxes: function(options){
+    this.options = _.extend(this.options, options);
+    var fetched_permissions = this.options.fetched_permissions;
+    var self = this;
+
+    var selected_access_library_roles = this._serializeRoles(fetched_permissions.access_library_role_list);
+    var selected_add_item_roles = this._serializeRoles(fetched_permissions.add_library_item_role_list);
+    var selected_manage_library_roles = this._serializeRoles(fetched_permissions.manage_library_role_list);
+    var selected_modify_library_roles = this._serializeRoles(fetched_permissions.modify_library_role_list);
+
+    self.accessSelectObject = new mod_select.View(this._createSelectOptions(this, 'access_perm', selected_access_library_roles, true));
+    self.addSelectObject = new mod_select.View(this._createSelectOptions(this, 'add_perm', selected_add_item_roles, false));
+    self.manageSelectObject = new mod_select.View(this._createSelectOptions(this, 'manage_perm', selected_manage_library_roles, false));
+    self.modifySelectObject = new mod_select.View(this._createSelectOptions(this, 'modify_perm', selected_modify_library_roles, false));
+  },
+
+  _createSelectOptions: function(self, id, init_data, is_library_access){
+    is_library_access = is_library_access === true ? is_library_access : false;
+    var select_options = {
+      minimumInputLength: 0,
+      css: id,
+      multiple:true,
+      placeholder: 'Click to select a role',
+      container: self.$el.find('#' + id),
+      ajax: {
+          url: Galaxy.root + "api/libraries/" + self.id + "/permissions?scope=available&is_library_access=" + is_library_access,
+          dataType: 'json',
+          quietMillis: 100,
+          data: function (term, page) { // page is the one-based page number tracked by Select2
+              return {
+                  q: term, //search term
+                  page_limit: 10, // page size
+                  page: page // page number
+              };
+          },
+          results: function (data, page) {
+              var more = (page * 10) < data.total; // whether or not there are more results available
+              // notice we return the value of more so Select2 knows if more results can be loaded
+              return {results: data.roles, more: more};
+          }
+      },
+      formatResult : function roleFormatResult(role) {
+          return role.name + ' type: ' + role.type;
+      },
+
+      formatSelection: function roleFormatSelection(role) {
+          return role.name;
+      },
+      initSelection: function(element, callback) {
+      // the input tag has a value attribute preloaded that points to a preselected role's id
+      // this function resolves that id attribute to an object that select2 can render
+      // using its formatResult renderer - that way the role name is shown preselected
+          var data = [];
+          $(element.val().split(",")).each(function() {
+              var item = this.split(':');
+              data.push({
+                  id: item[0],
+                  name: item[1]
+              });
+          });
+          callback(data);
+      },
+      // initialData: init_data.join(','),
+      initialData: init_data,
+      dropdownCssClass: "bigdrop" // apply css that makes the dropdown taller
+    };
+
+    return select_options;
+  },
+
+  comingSoon: function(){
+    mod_toastr.warning('Feature coming soon.');
+  },
+
+  copyToClipboard: function(){
+    var href = Backbone.history.location.href;
+    if (href.lastIndexOf('/permissions') !== -1){
+      href = href.substr(0, href.lastIndexOf('/permissions'));
+    }
+    window.prompt("Copy to clipboard: Ctrl+C, Enter", href);
+  },
+
+  makeDatasetPrivate: function(){
+    var self = this;
+    $.post( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?action=make_private").done(function(fetched_permissions) {
+      self.model.set({is_unrestricted:false});
+      self.showPermissions({fetched_permissions:fetched_permissions})
+      mod_toastr.success('The dataset is now private to you.');
+    }).fail(function(){
+      mod_toastr.error('An error occurred while attempting to make dataset private.');
+    });
+  },
+
+  removeDatasetRestrictions: function(){
+    var self = this;
+    $.post( Galaxy.root + "api/libraries/datasets/" + self.id + "/permissions?action=remove_restrictions")
+    .done(function(fetched_permissions) {
+      self.model.set({is_unrestricted:true});
+      self.showPermissions({fetched_permissions:fetched_permissions})
+      mod_toastr.success('Access to this dataset is now unrestricted.');
+    })
+    .fail(function(){
+      mod_toastr.error('An error occurred while attempting to make dataset unrestricted.');
+    });
+  },
+
+  _extractIds: function(roles_list){
+    ids_list = [];
+    for (var i = roles_list.length - 1; i >= 0; i--) {
+      ids_list.push(roles_list[i].id);
+    };
+    return ids_list;
+  },
+  savePermissions: function(event){
+    var self = this;
+
+    var access_ids = this._extractIds(this.accessSelectObject.$el.select2('data'));
+    var add_ids = this._extractIds(this.addSelectObject.$el.select2('data'));
+    var manage_ids = this._extractIds(this.manageSelectObject.$el.select2('data'));
+    var modify_ids = this._extractIds(this.modifySelectObject.$el.select2('data'));
+
+    $.post( Galaxy.root + "api/libraries/" + self.id + "/permissions?action=set_permissions", { 'access_ids[]': access_ids, 'add_ids[]': add_ids, 'manage_ids[]': manage_ids, 'modify_ids[]': modify_ids, } )
+    .done(function(fetched_permissions){
+      //fetch dataset again
+      self.showPermissions({fetched_permissions:fetched_permissions})
+      mod_toastr.success('Permissions saved.');
+    })
+    .fail(function(){
+      mod_toastr.error('An error occurred while attempting to set library permissions.');
+    })
+  },
+
+  templateLibrary : function(){
+    return _.template([
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<button data-toggle="tooltip" data-placement="top" title="Modify library item" class="btn btn-default toolbtn_modify_dataset primary-button" type="button">',
+          '<span class="fa fa-pencil"/>',
+          ' Modify',
+        '</button>',
+        '<a href="#folders/<%- item.get("folder_id") %>/datasets/<%- item.id %>/permissions">',
+          '<button data-toggle="tooltip" data-placement="top" title="Manage permissions" class="btn btn-default toolbtn_change_permissions primary-button" type="button">',
+            '<span class="fa fa-group"/>',
+            ' Permissions',
+          '</button>',
+        '</a>',
+        '<button data-toggle="tooltip" data-placement="top" title="Share dataset" class="btn btn-default toolbtn-share-dataset primary-button" type="button">',
+          '<span class="fa fa-share"/>',
+          ' Share',
+        '</button>',
+      '</div>',
+      '<p>',
+        'This dataset is unrestricted so everybody can access it. Just share the URL of this page. ',
+        '<button data-toggle="tooltip" data-placement="top" title="Copy to clipboard" class="btn btn-default btn-copy-link-to-clipboard primary-button" type="button">',
+          '<span class="fa fa-clipboard"/>',
+          ' To Clipboard',
+        '</button> ',
+      '</p>',
+      '<div class="dataset_table">',
+        '<table class="grid table table-striped table-condensed">',
+          '<tr>',
+            '<th scope="row" id="id_row" data-id="<%= _.escape(item.get("ldda_id")) %>">',
+              'Name',
+            '</th>',
+            '<td>',
+              '<%= _.escape(item.get("name")) %>',
+            '</td>',
+          '</tr>',
+          '<% if (item.get("file_ext")) { %>',
+            '<tr>',
+              '<th scope="row">Data type</th>',
+              '<td>',
+                '<%= _.escape(item.get("file_ext")) %>',
+              '</td>',
+            '</tr>',
+          '<% } %>',
+        '</table>',
+      '</div>',
+    '</div>',
+    ].join(''));
+  },
+
+  templateLibraryPermissions : function(){
+    return _.template([
+    '<div class="library_style_container">',
+      '<div id="library_toolbar">',
+        '<a href="#">',
+          '<button data-toggle="tooltip" data-placement="top" title="Go back to the list of Libraries" class="btn btn-default primary-button" type="button">',
+            '<span class="fa fa-list"/>',
+            ' Libraries',
+          '</button>',
+        '</a>',
+      '</div>',
+      '<h1>',
+        'Library: <%= _.escape(library.get("name")) %>',
+      '</h1>',
+      '<div class="alert alert-warning">',
+        '<% if (is_admin) { %>',
+          'You are logged in as an <strong>administrator</strong> therefore you can manage any library on this Galaxy instance. Please make sure you understand the consequences.',
+        '<% } else { %>',
+          'You can assign any number of roles to any of the following permission types. However please read carefully the implications of such actions.',
+        '<% }%>',
+      '</div>',
+      '<div class="dataset_table">',
+        '<h2>Library permissions</h2>',
+        '<h4>Roles that can access the library</h4>',
+        '<div id="access_perm" class="access_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can access this library. If there are no access roles set on the library it is considered <strong>unrestricted</strong>.',
+        '</div>',
+        '<h4>Roles that can manage permissions on this library</h4>',
+        '<div id="manage_perm" class="manage_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can manage permissions on this library (includes giving access).',
+        '</div>',
+        '<h4>Roles that can add items to this library</h4>',
+        '<div id="add_perm" class="add_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can add items to this library (folders and datasets).',
+        '</div>',
+        '<h4>Roles that can modify this library</h4>',
+        '<div id="modify_perm" class="modify_perm roles-selection"/>',
+        '<div class="alert alert-info roles-selection">',
+          'User with <strong>any</strong> of these roles can modify this library (name, synopsis, etc.).',
+        '</div>',
+        '<button data-toggle="tooltip" data-placement="top" title="Save modifications made on this page" class="btn btn-default toolbtn_save_permissions primary-button" type="button">',
+          '<span class="fa fa-floppy-o"/>',
+          ' Save',
+        '</button>',
+      '</div>',
+    '</div>',
+    ].join(''));
+  }
+
+});
+
+return {
+    LibraryView: LibraryView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-librarylist-view.js b/client/galaxy/scripts/mvc/library/library-librarylist-view.js
new file mode 100644
index 0000000..7b92100
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-librarylist-view.js
@@ -0,0 +1,267 @@
+define([
+    "layout/masthead",
+    "mvc/base-mvc",
+    "utils/utils",
+    "libs/toastr",
+    "mvc/library/library-model",
+    "mvc/library/library-libraryrow-view",
+    "libs/underscore"
+], function(
+    mod_masthead,
+    mod_baseMVC,
+    mod_utils,
+    mod_toastr,
+    mod_library_model,
+    mod_library_libraryrow_view,
+    _
+){
+
+var LibraryListView = Backbone.View.extend({
+    el: '#libraries_element',
+
+    events: {
+        'click .sort-libraries-link'    : 'sort_clicked'
+    },
+
+    defaults: {
+        page_count: null,
+        show_page: null,
+        all_fetched: false
+    },
+
+    /**
+     * Initialize and fetch the libraries from server.
+     * Async render afterwards.
+     * @param  {object} options an object with options
+     */
+    initialize : function( options ){
+        this.options = _.defaults( this.options || {}, options, this.defaults );
+        var that = this;
+        this.modal = null;
+        // collection of {Item}s
+        this.collection = new mod_library_model.Libraries();
+        this.collection.url = this.collection.urlRoot + '?deleted=false';
+        this.collection.fetch({
+          success: function(){
+            that.render();
+          },
+          error: function( model, response ){
+              if ( typeof response.responseJSON !== "undefined" ){
+                mod_toastr.error( response.responseJSON.err_msg );
+              } else {
+                mod_toastr.error( 'An error ocurred.' );
+              }
+          }
+        });
+    },
+
+    /**
+     * Render the libraries table either from the object's own collection,
+     * or from a given array of library models,
+     * or render an empty list in case no data is given.
+     */
+    render: function ( options ) {
+        this.options = _.extend( this.options, options );
+        this.setElement('#libraries_element');
+        var template = this.templateLibraryList();
+        var libraries_to_render = null;
+        var models = null;
+        $( ".tooltip" ).hide();
+        if ( typeof options !== 'undefined' ){
+            models = typeof options.models !== 'undefined' ? options.models : null;
+        }
+        if ( this.collection !== null && models === null ){
+            this.sortLibraries();
+            if ( Galaxy.libraries.preferences.get( 'with_deleted' ) ){
+              libraries_to_render = this.collection.models;
+            } else {
+              libraries_to_render = this.collection.where( { deleted: false } );
+            }
+        } else if ( models !== null ){
+            if ( Galaxy.libraries.preferences.get( 'with_deleted' ) ){
+                libraries_to_render = models;
+            } else {
+                var is_deleted = function(model){ return model.get('deleted') === false; }
+                libraries_to_render = _.filter(models, is_deleted );
+            }
+        } else {
+            libraries_to_render = [];
+        }
+
+        // pagination
+        if ( this.options.show_page === null || this.options.show_page < 1 ){
+            this.options.show_page = 1;
+        }
+        this.options.total_libraries_count = libraries_to_render.length
+        var page_start = ( Galaxy.libraries.preferences.get( 'library_page_size' ) * ( this.options.show_page - 1 ) );
+        this.options.page_count = Math.ceil( this.options.total_libraries_count / Galaxy.libraries.preferences.get( 'library_page_size' ) );
+        if ( this.options.total_libraries_count > 0 && ( page_start < this.options.total_libraries_count ) ){
+            libraries_to_render = libraries_to_render.slice( page_start, page_start + Galaxy.libraries.preferences.get( 'library_page_size' ) );
+            this.options.libraries_shown = libraries_to_render.length;
+            // User requests page with no libraries
+            if ( Galaxy.libraries.preferences.get( 'library_page_size' ) * this.options.show_page > ( this.options.total_libraries_count + Galaxy.libraries.preferences.get( 'library_page_size' ) ) ){
+                libraries_to_render = [];
+            }
+            this.$el.html( template({
+                length: 1,
+                order: Galaxy.libraries.preferences.get( 'sort_order' ),
+                search_term: Galaxy.libraries.libraryToolbarView.options.search_term
+            }));
+            Galaxy.libraries.libraryToolbarView.renderPaginator( this.options );
+            this.renderRows( libraries_to_render );
+        } else {
+            this.$el.html( template({
+                length: 0,
+                order: Galaxy.libraries.preferences.get( 'sort_order' ),
+                search_term: Galaxy.libraries.libraryToolbarView.options.search_term
+            }));
+            Galaxy.libraries.libraryToolbarView.renderPaginator( this.options );
+        }
+        $( "#center [data-toggle]" ).tooltip();
+        $( "#center" ).css( 'overflow','auto' );
+    },
+
+    fetchDeleted: function(){
+      if (this.options.all_fetched){
+        this.render();
+      } else{
+        var that = this;
+        this.collection.url = this.collection.urlRoot + '?deleted=true';
+        this.collection.fetch({
+          remove: false,
+          success: function(){
+            that.options.all_fetched = true;
+            that.render();
+          },
+          error: function( model, response ){
+              if ( typeof response.responseJSON !== "undefined" ){
+                mod_toastr.error( response.responseJSON.err_msg );
+              } else {
+                mod_toastr.error( 'An error ocurred.' );
+              }
+          }
+        });
+      }
+    },
+
+    /**
+     * Render all given models as rows in the library list
+     * @param  {array} libraries_to_render array of library models to render
+     */
+    renderRows: function( libraries_to_render ){
+        for ( var i = 0; i < libraries_to_render.length; i++ ) {
+          var library = libraries_to_render[i];
+            this.renderOne( { library: library } );
+        }
+    },
+
+    /**
+     * Create a view for the given model and add it to the libraries view.
+     * @param {Library} model of the view that will be rendered
+     */
+    renderOne: function( options ){
+        var library = options.library;
+        var rowView = new mod_library_libraryrow_view.LibraryRowView( library );
+        this.$el.find( '#library_list_body' ).append( rowView.el );
+    },
+
+    /**
+     * Table heading was clicked, update sorting preferences and re-render.
+     * @return {[type]} [description]
+     */
+    sort_clicked : function(){
+        if (Galaxy.libraries.preferences.get('sort_order') === 'asc'){
+            Galaxy.libraries.preferences.set({'sort_order': 'desc'});
+        } else {
+            Galaxy.libraries.preferences.set({'sort_order': 'asc'});
+        }
+        this.render();
+    },
+
+    /**
+     * Sort the underlying collection according to the parameters received.
+     * Currently supports only sorting by name.
+     */
+    sortLibraries: function(){
+        if (Galaxy.libraries.preferences.get('sort_by') === 'name'){
+            if (Galaxy.libraries.preferences.get('sort_order') === 'asc'){
+                this.collection.sortByNameAsc();
+            } else if (Galaxy.libraries.preferences.get('sort_order') === 'desc'){
+                this.collection.sortByNameDesc();
+            }
+        }
+    },
+
+    redirectToHome: function(){
+        window.location = '../';
+    },
+    redirectToLogin: function(){
+        window.location = '/user/login';
+    },
+
+    /**
+     * In case the search_term is not empty perform the search and render
+     * the result. Render all visible libraries otherwise.
+     * @param  {string} search_term string to search for
+     */
+    searchLibraries: function(search_term){
+      var trimmed_term = $.trim(search_term);
+      if (trimmed_term !== ''){
+        var results = null
+        results = this.collection.search( search_term );
+        this.options.searching = true;
+        this.render({'models': results});
+      } else {
+        this.options.searching = false;
+        this.render();
+      }
+    },
+
+// MMMMMMMMMMMMMMMMMM
+// === TEMPLATES ====
+// MMMMMMMMMMMMMMMMMM
+
+    templateLibraryList: function(){
+      return _.template([
+      '<div class="library_container table-responsive">',
+        '<% if(length === 0) { %>',
+          '<% if(search_term.length > 0) { %>',
+            '<div>',
+              'There are no libraries matching your search. Try different keyword.',
+            '</div>',
+          '<% } else{ %>',
+            '<div>',
+              'There are no libraries visible to you here. If you expected some to show up please consult the',
+              ' <a href="https://wiki.galaxyproject.org/Admin/DataLibraries/LibrarySecurity" target="_blank">library security wikipage</a>',
+              ' or visit the <a href="https://biostar.usegalaxy.org/" target="_blank">Galaxy support site</a>.',
+            '</div>',
+          '<% }%>',
+        '<% } else{ %>',
+          '<table class="grid table table-condensed">',
+            '<thead>',
+              '<th style="width:30%;">',
+                '<a class="sort-libraries-link" title="Click to reverse order" href="#">',
+                  'name',
+                '</a>',
+                '<span title="Sorted alphabetically" class="fa fa-sort-alpha-<%- order %>"/>',
+              '</th>',
+              '<th style="width:22%;">description</th>',
+              '<th style="width:22%;">synopsis</th> ',
+              '<th style="width:26%;"></th>',
+            '</thead>',
+            '<tbody id="library_list_body">',
+            // library item views will attach here
+            '</tbody>',
+          '</table>',
+        '<% }%>',
+      '</div>'
+      ].join(''));
+    }
+
+});
+
+return {
+    LibraryListView: LibraryListView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-libraryrow-view.js b/client/galaxy/scripts/mvc/library/library-libraryrow-view.js
new file mode 100644
index 0000000..e39cc2b
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-libraryrow-view.js
@@ -0,0 +1,278 @@
+// dependencies
+define([
+    "layout/masthead",
+    "utils/utils",
+    "libs/toastr"],
+function(mod_masthead,
+         mod_utils,
+         mod_toastr) {
+
+// galaxy library row view
+var LibraryRowView = Backbone.View.extend({
+  events: {
+    'click .edit_library_btn'           : 'edit_button_clicked',
+    'click .cancel_library_btn'         : 'cancel_library_modification',
+    'click .save_library_btn'           : 'save_library_modification',
+    'click .delete_library_btn'         : 'delete_library',
+    'click .undelete_library_btn'       : 'undelete_library'
+  },
+
+  edit_mode: false,
+
+  element_visibility_config: {
+    upload_library_btn: false,
+    edit_library_btn: false,
+    permission_library_btn: false,
+    save_library_btn: false,
+    cancel_library_btn: false,
+    delete_library_btn: false,
+    undelete_library_btn: false
+  },
+
+  initialize : function(library){
+    this.render(library);
+  },
+
+  render: function(library){
+    if (typeof library === 'undefined'){
+      library = Galaxy.libraries.libraryListView.collection.get(this.$el.data('id'));
+    }
+    this.prepareButtons(library);
+    var tmpl = this.templateRow();
+    this.setElement(tmpl({library:library, button_config: this.element_visibility_config, edit_mode: this.edit_mode}));
+    this.$el.show();
+    return this;
+  },
+
+  repaint: function(library){
+    /* need to hide manually because of the element removal in setElement
+    invoked in render() */
+    $(".tooltip").hide();
+    /* we need to store the old element to be able to replace it with
+    new one */
+    var old_element = this.$el;
+    /* if user canceled the library param is undefined,
+      if user saved and succeeded the updated library is rendered */
+    this.render();
+    old_element.replaceWith(this.$el);
+    /* now we attach new tooltips to the newly created row element */
+    this.$el.find("[data-toggle]").tooltip();
+  },
+
+  /**
+   * Function modifies the visibility of buttons for
+   * the filling of the row template of given library.
+   */
+  prepareButtons: function(library){
+    vis_config = this.element_visibility_config;
+
+    if (this.edit_mode === false){
+      vis_config.save_library_btn = false;
+      vis_config.cancel_library_btn = false;
+      vis_config.delete_library_btn = false;
+      if (library.get('deleted') === true ){
+          vis_config.undelete_library_btn = true;
+          vis_config.upload_library_btn = false;
+          vis_config.edit_library_btn = false;
+          vis_config.permission_library_btn = false;
+      } else if (library.get('deleted') === false ) {
+        vis_config.save_library_btn = false;
+        vis_config.cancel_library_btn = false;
+        vis_config.undelete_library_btn = false;
+        if (library.get('can_user_add') === true){
+          vis_config.upload_library_btn = true;
+        }
+        if (library.get('can_user_modify') === true){
+          vis_config.edit_library_btn = true;
+        }
+        if (library.get('can_user_manage') === true){
+          vis_config.permission_library_btn = true;
+        }
+      }
+    } else if (this.edit_mode === true){
+      vis_config.upload_library_btn = false;
+      vis_config.edit_library_btn = false;
+      vis_config.permission_library_btn = false;
+      vis_config.save_library_btn = true;
+      vis_config.cancel_library_btn = true;
+      vis_config.delete_library_btn = true;
+      vis_config.undelete_library_btn = false;
+    }
+
+    this.element_visibility_config = vis_config;
+  },
+
+  /* User clicked the 'edit' button on row so we render a new row
+    that allows editing */
+  edit_button_clicked: function(){
+    this.edit_mode = true;
+    this.repaint();
+  },
+
+  /* User clicked the 'cancel' button so we render normal rowView */
+  cancel_library_modification: function(){
+    // mod_toastr.info('Modifications canceled');
+    this.edit_mode = false;
+    this.repaint();
+  },
+
+  save_library_modification: function(){
+    var library = Galaxy.libraries.libraryListView.collection.get(this.$el.data('id'));
+    var is_changed = false;
+
+    var new_name = this.$el.find('.input_library_name').val();
+    if (typeof new_name !== 'undefined' && new_name !== library.get('name') ){
+        if (new_name.length > 2){
+            library.set("name", new_name);
+            is_changed = true;
+        } else{
+            mod_toastr.warning('Library name has to be at least 3 characters long.');
+            return;
+        }
+    }
+
+    var new_description = this.$el.find('.input_library_description').val();
+    if (typeof new_description !== 'undefined' && new_description !== library.get('description') ){
+        library.set("description", new_description);
+        is_changed = true;
+    }
+
+    var new_synopsis = this.$el.find('.input_library_synopsis').val();
+    if (typeof new_synopsis !== 'undefined' && new_synopsis !== library.get('synopsis') ){
+        library.set("synopsis", new_synopsis);
+        is_changed = true;
+    }
+
+    if (is_changed){
+      var row_view = this;
+        library.save(null, {
+          patch: true,
+          success: function(library) {
+            row_view.edit_mode = false;
+            row_view.repaint(library);
+            mod_toastr.success('Changes to library saved.');
+          },
+          error: function(model, response){
+            if (typeof response.responseJSON !== "undefined"){
+              mod_toastr.error(response.responseJSON.err_msg);
+            } else {
+              mod_toastr.error('An error occured while attempting to update the library.');
+            }
+          }
+        });
+    } else {
+      this.edit_mode = false;
+      this.repaint(library);
+      mod_toastr.info('Nothing has changed.');
+    }
+  },
+
+  delete_library: function(){
+    var library = Galaxy.libraries.libraryListView.collection.get(this.$el.data('id'));
+    var row_view = this;
+    // mark the library deleted
+    library.destroy({
+      success: function (library) {
+        library.set('deleted', true);
+        // add the new deleted library back to the collection (Galaxy specialty)
+        Galaxy.libraries.libraryListView.collection.add(library);
+        row_view.edit_mode = false;
+        if (Galaxy.libraries.preferences.get('with_deleted') === false){
+          $('.tooltip').hide();
+          row_view.repaint(library);
+          row_view.$el.remove();
+        } else if (Galaxy.libraries.preferences.get('with_deleted') === true){
+          row_view.repaint(library);
+        }
+        mod_toastr.success('Library has been marked deleted.');
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg);
+        } else {
+          mod_toastr.error('An error occured during deleting the library.');
+        }
+      }
+    });
+  },
+
+  undelete_library: function(){
+    var library = Galaxy.libraries.libraryListView.collection.get(this.$el.data('id'));
+    var row_view = this;
+
+    // mark the library undeleted
+    library.url = library.urlRoot + library.id + '?undelete=true';
+    library.destroy({
+      success: function (library) {
+        // add the newly undeleted library back to the collection
+        // backbone does not accept changes through destroy, so update it too
+        library.set('deleted', false);
+        Galaxy.libraries.libraryListView.collection.add(library);
+        row_view.edit_mode = false;
+        row_view.repaint(library);
+        mod_toastr.success('Library has been undeleted.');
+      },
+      error: function(model, response){
+        if (typeof response.responseJSON !== "undefined"){
+          mod_toastr.error(response.responseJSON.err_msg);
+        } else {
+          mod_toastr.error('An error occured while undeleting the library.');
+        }
+      }
+    });
+  },
+
+  templateRow: function() {
+    return _.template([
+    '<tr class="<% if(library.get("deleted") === true) { print("active") } %>" style="display:none;" data-id="<%- library.get("id") %>">',
+      '<% if(!edit_mode) { %>',
+        '<% if(library.get("deleted")) { %>',
+          '<td style="color:grey;"><span data-toggle="tooltip" data-placement="top" title="Marked deleted" style="color:grey;" class="fa fa-ban fa-lg deleted_lib_ico"> </span> <%- library.get("name") %></td>',
+        '<% } else { %>',
+          '<td><a href="#folders/<%- library.get("root_folder_id") %>"><%- library.get("name") %></a></td>',
+        '<% } %>',
+      '<% if(library.get("description")) { %>',
+        '<% if( (library.get("description")).length> 80 ) { %>',
+          '<td data-toggle="tooltip" data-placement="bottom" title="<%= _.escape(library.get("description")) %>"><%= _.escape(library.get("description")).substring(0, 80) + "..." %></td>',
+        '<% } else { %>',
+          '<td><%= _.escape(library.get("description"))%></td>',
+        '<% } %>',
+      '<% } else { %>',
+        '<td></td>',
+      '<% } %>',
+      '<% if(library.get("synopsis")) { %>',
+        '<% if( (library.get("synopsis")).length> 120 ) { %>',
+          '<td data-toggle="tooltip" data-placement="bottom" title="<%= _.escape(library.get("synopsis")) %>"><%= _.escape(library.get("synopsis")).substring(0, 120) + "..." %></td>',
+        '<% } else { %>',
+          '<td><%= _.escape(library.get("synopsis"))%></td>',
+        '<% } %>',
+      '<% } else { %>',
+        '<td></td>',
+      '<% } %>',
+      '<% } else if(edit_mode){ %>',
+        '<td><textarea rows="4" class="form-control input_library_name" placeholder="name" ><%- library.get("name") %></textarea></td>',
+        '<td><textarea rows="4" class="form-control input_library_description" placeholder="description" ><%- library.get("description") %></textarea></td>',
+        '<td><textarea rows="4" class="form-control input_library_synopsis" placeholder="synopsis" ><%- library.get("synopsis") %></textarea></td>',
+      '<% } %>',
+      '<td class="right-center">',
+        '<% if( (library.get("public")) && (library.get("deleted") === false) ) { %>',
+          '<span data-toggle="tooltip" data-placement="top" title="Unrestricted library" style="color:grey;" class="fa fa-globe fa-lg public_lib_ico"> </span>',
+        '<% }%>',
+        '<button data-toggle="tooltip" data-placement="top" title="Modify \'<%- library.get("name") %>\'" class="primary-button btn-xs edit_library_btn" type="button" style="<% if(button_config.edit_library_btn === false) { print("display:none;") } %>"><span class="fa fa-pencil"></span></button>',
+        '<a href="#library/<%- library.get("id") %>/permissions"><button data-toggle="tooltip" data-placement="top" title="Manage \'<%- library.get("name") %>\'" class="primary-button btn-xs permission_library_btn" type="button" style="<% if(button_config.permission_library_btn === false) { print("display:none;") } %>"><span class="fa fa-group"></span></button></a>',
+        '<button data-toggle="tooltip" data-placement="top" title="Save changes" class="primary-button btn-xs save_library_btn" type="button" style="<% if(button_config.save_library_btn === false) { print("display:none;") } %>"><span class="fa fa-floppy-o"> Save</span></button>',
+        '<button data-toggle="tooltip" data-placement="top" title="Discard changes" class="primary-button btn-xs cancel_library_btn" type="button" style="<% if(button_config.cancel_library_btn === false) { print("display:none;") } %>"><span class="fa fa-times"> Cancel</span></button>',
+        '<button data-toggle="tooltip" data-placement="top" title="Delete <%- library.get("name") %>" class="primary-button btn-xs delete_library_btn" type="button" style="<% if(button_config.delete_library_btn === false) { print("display:none;") } %>"><span class="fa fa-trash-o"> Delete</span></button>',
+        '<button data-toggle="tooltip" data-placement="top" title="Undelete <%- library.get("name") %> " class="primary-button btn-xs undelete_library_btn" type="button" style="<% if(button_config.undelete_library_btn === false) { print("display:none;") } %>"><span class="fa fa-unlock"> Undelete</span></button>',
+      '</td>',
+    '</tr>'
+    ].join(''));
+  }
+
+});
+
+return {
+    LibraryRowView: LibraryRowView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-librarytoolbar-view.js b/client/galaxy/scripts/mvc/library/library-librarytoolbar-view.js
new file mode 100644
index 0000000..ea78d63
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-librarytoolbar-view.js
@@ -0,0 +1,258 @@
+define([
+  "libs/toastr",
+  "mvc/library/library-model"],
+function(mod_toastr,
+         mod_library_model) {
+/**
+ * This view represents the top part of the library page.
+ * It contains the tool bar with controls.
+ */
+var LibraryToolbarView = Backbone.View.extend({
+  el: '#center',
+
+  defaults: {
+    search_term: ''
+  },
+
+  events: {
+    'click #create_new_library_btn' : 'createLibraryFromModal',
+    'click #include_deleted_chk'    : 'includeDeletedChecked',
+    'click #lib_page_size_prompt'   : 'showPageSizePrompt',
+    'keyup .library-search-input'   : 'searchLibraries'
+  },
+
+  initialize: function( options ){
+    this.options = _.defaults( this.options || {}, options, this.defaults );
+    this.render();
+  },
+
+  render: function(){
+    var toolbar_template = this.templateToolBar();
+    var is_admin = false;
+    var is_anonym = true;
+    if ( Galaxy.user ){
+      is_admin = Galaxy.user.isAdmin();
+      is_anonym = Galaxy.user.isAnonymous();
+    }
+    this.$el.html(toolbar_template( { admin_user: is_admin, anon_user: is_anonym } ) );
+    if ( is_admin ){
+      this.$el.find( '#include_deleted_chk' )[0].checked = Galaxy.libraries.preferences.get( 'with_deleted' );
+    }
+  },
+
+  /**
+   * Renders the element that shows pages into its div within the toolbar.
+   */
+  renderPaginator: function( options ){
+    this.options = _.extend( this.options, options );
+    var paginator_template = this.templatePaginator();
+    this.$el.find( '#library_paginator' ).html( paginator_template({
+      show_page: parseInt( this.options.show_page ),
+      page_count: parseInt( this.options.page_count ),
+      total_libraries_count: this.options.total_libraries_count,
+      libraries_shown: this.options.libraries_shown
+    }));
+  },
+
+  /**
+   * User clicked on 'New library' button. Show modal to
+   * satisfy the wish.
+   */
+  createLibraryFromModal : function (event){
+    event.preventDefault();
+    event.stopPropagation();
+    var self = this;
+    this.modal = Galaxy.modal;
+    this.modal.show({
+      closing_events  : true,
+      title           : 'Create New Library',
+      body            : this.templateNewLibraryInModal(),
+      buttons         : {
+        'Create'      : function() { self.createNewLibrary(); },
+        'Close'       : function() { self.modal.hide(); }
+      }
+    });
+  },
+
+  /**
+   * Create the new library using the API asynchronously.
+   */
+  createNewLibrary: function(){
+    var libraryDetails = this.serializeNewLibrary();
+    if (this.validateNewLibrary(libraryDetails)){
+      var library = new mod_library_model.Library();
+      var self = this;
+      library.save(libraryDetails, {
+        success: function (library) {
+          Galaxy.libraries.libraryListView.collection.add(library);
+          self.modal.hide();
+          self.clearLibraryModal();
+          Galaxy.libraries.libraryListView.render();
+          mod_toastr.success('Library created.');
+        },
+        error: function(model, response){
+          if (typeof response.responseJSON !== "undefined"){
+            mod_toastr.error(response.responseJSON.err_msg);
+          } else {
+            mod_toastr.error('An error occured.');
+          }
+        }
+      });
+    } else {
+      mod_toastr.error('Library\'s name is missing.');
+    }
+    return false;
+  },
+
+  /**
+   * Show user the propmpt to change the number of libs shown on page.
+   */
+  showPageSizePrompt: function(){
+    var library_page_size = prompt( 'How many libraries per page do you want to see?', Galaxy.libraries.preferences.get( 'library_page_size' ) );
+    if ( ( library_page_size != null ) && ( library_page_size == parseInt( library_page_size ) ) ) {
+      Galaxy.libraries.preferences.set( { 'library_page_size': parseInt( library_page_size ) } );
+      Galaxy.libraries.libraryListView.render( { show_page: 1 } );
+    }
+  },
+
+  /**
+   * Clear the library modal once it is saved.
+   */
+  clearLibraryModal : function(){
+    $("input[name='Name']").val('');
+    $("input[name='Description']").val('');
+    $("input[name='Synopsis']").val('');
+  },
+
+  /**
+   * Prepare new library variables to be submitted to API.
+   */
+  serializeNewLibrary : function(){
+    return {
+      name: $("input[name='Name']").val(),
+      description: $("input[name='Description']").val(),
+      synopsis: $("input[name='Synopsis']").val()
+    };
+  },
+
+  /**
+   * Check whether entered values are valid.
+   */
+  validateNewLibrary: function( libraryDetails ){
+      return libraryDetails.name !== '';
+  },
+
+  /**
+   * Include or exclude deleted libraries in the view.
+   */
+  includeDeletedChecked: function( event ){
+    if (event.target.checked){
+        Galaxy.libraries.preferences.set( { 'with_deleted': true } );
+        Galaxy.libraries.libraryListView.fetchDeleted();
+    } else{
+        Galaxy.libraries.preferences.set( { 'with_deleted': false } );
+        Galaxy.libraries.libraryListView.render();
+    }
+  },
+
+  /**
+   * Take the contents of the search field and send it to the list view
+   * to query the collection of libraries.
+   */
+  searchLibraries: function(event){
+    var search_term = $(".library-search-input").val();
+    this.options.search_term = search_term;
+    Galaxy.libraries.libraryListView.searchLibraries(search_term);
+  },
+
+  templateToolBar: function(){
+    return _.template([
+      '<div class="library_style_container">',
+        '<div id="toolbar_form">',
+          '<div id="library_toolbar">',
+            '<form class="form-inline" role="form">',
+              '<span><strong><a href="#" title="Go to first page">DATA LIBRARIES</a></strong></span>',
+              '<span id="library_paginator" class="library-paginator">',
+              // paginator will append here
+              '</span>',
+              '<div class="form-group toolbar-item">',
+                '<input type="text" class="form-control library-search-input" placeholder="Search" size="30">',
+              '</div>',
+              // only admins see the following
+              '<% if(admin_user === true) { %>',
+                  '<div class="checkbox toolbar-item" style="height: 20px;">',
+                    '<label>',
+                      '<input id="include_deleted_chk" type="checkbox">',
+                        ' include deleted ',
+                      '</input>',
+                    '</label>',
+                  '</div>',
+                  '<span class="toolbar-item" data-toggle="tooltip" data-placement="top" title="Create New Library">',
+                    '<button id="create_new_library_btn" class="primary-button btn-xs" type="button"><span class="fa fa-plus"></span> New Library</button>',
+                '</span>',
+              '<% } %>',
+              '<span class="help-button" data-toggle="tooltip" data-placement="top" title="Visit Libraries Wiki">',
+                '<a href="https://wiki.galaxyproject.org/DataLibraries/screen/ListOfLibraries" target="_blank">',
+                  '<button class="primary-button" type="button"><span class="fa fa-question-circle"></span> Help</button>',
+                '</a>',
+              '</span>',
+            '</form>',
+          '</div>',
+        '</div>',
+        '<div id="libraries_element">',
+        // table with libraries will append here
+        '</div>',
+      '</div>'
+    ].join(''));
+  },
+
+  templatePaginator: function(){
+    return _.template([
+    '<ul class="pagination pagination-sm">',
+      '<% if ( ( show_page - 1 ) > 0 ) { %>',
+      '<% if ( ( show_page - 1 ) > page_count ) { %>', // we are on higher page than total page count
+        '<li><a href="#page/1"><span class="fa fa-angle-double-left"></span></a></li>',
+        '<li class="disabled"><a href="#page/<% print( show_page ) %>"><% print( show_page - 1 ) %></a></li>',
+      '<% } else { %>',
+        '<li><a href="#page/1"><span class="fa fa-angle-double-left"></span></a></li>',
+        '<li><a href="#page/<% print( show_page - 1 ) %>"><% print( show_page - 1 ) %></a></li>',
+      '<% } %>',
+      '<% } else { %>', // we are on the first page
+        '<li class="disabled"><a href="#page/1"><span class="fa fa-angle-double-left"></span></a></li>',
+        '<li class="disabled"><a href="#page/<% print( show_page ) %>"><% print( show_page - 1 ) %></a></li>',
+      '<% } %>',
+        '<li class="active">',
+          '<a href="#page/<% print( show_page ) %>"><% print( show_page ) %></a>',
+        '</li>',
+      '<% if ( ( show_page ) < page_count ) { %>',
+        '<li><a href="#page/<% print( show_page + 1 ) %>"><% print( show_page + 1 ) %></a></li>',
+        '<li><a href="#page/<% print( page_count ) %>"><span class="fa fa-angle-double-right"></span></a></li>',
+      '<% } else { %>',
+        '<li class="disabled"><a href="#page/<% print( show_page  ) %>"><% print( show_page + 1 ) %></a></li>',
+        '<li class="disabled"><a href="#page/<% print( page_count ) %>"><span class="fa fa-angle-double-right"></span></a></li>',
+      '<% } %>',
+    '</ul>',
+    '<span id="lib_page_size_prompt">',
+      ' showing <a data-toggle="tooltip" data-placement="top" title="Click to change the number of libraries on page"><%- libraries_shown %></a> of <%- total_libraries_count %> libraries',
+    '</span>'
+    ].join(''));
+  },
+
+  templateNewLibraryInModal: function(){
+    return _.template([
+      '<div id="new_library_modal">',
+        '<form>',
+          '<input type="text" name="Name" value="" placeholder="Name" autofocus>',
+          '<input type="text" name="Description" value="" placeholder="Description">',
+          '<input type="text" name="Synopsis" value="" placeholder="Synopsis">',
+        '</form>',
+      '</div>'
+    ].join(''));
+  }
+});
+
+return {
+  LibraryToolbarView: LibraryToolbarView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/library/library-model.js b/client/galaxy/scripts/mvc/library/library-model.js
new file mode 100644
index 0000000..91f00cd
--- /dev/null
+++ b/client/galaxy/scripts/mvc/library/library-model.js
@@ -0,0 +1,246 @@
+define([], function() {
+
+// ============================================================================
+// LIBRARY RELATED MODELS
+
+    var Library = Backbone.Model.extend({
+      urlRoot: Galaxy.root + 'api/libraries/',
+
+      /** based on show_deleted would this lib show in the list of lib's?
+       *  @param {Boolean} show_deleted are we including deleted libraries?
+       */
+      isVisible : function(show_deleted){
+          var isVisible = true;
+          if( (!show_deleted) && (this.get('deleted')) ){
+              isVisible = false;
+          }
+          return isVisible;
+      }
+    });
+
+    var Libraries = Backbone.Collection.extend({
+      urlRoot: Galaxy.root + 'api/libraries',
+
+      model: Library,
+
+      sort_key: 'name', // default
+
+      sort_order: null, // default
+
+      initialize : function(options){
+          options = options || {};
+      },
+
+      search : function(search_term){
+        /**
+         * Search the collection and return only the models that have
+         * the search term in their names.
+         * [the term to search]
+         * @type {string}
+         */
+        if (search_term == "") return this;
+        var lowercase_term = search_term.toLowerCase();
+        return this.filter(function(data) {
+          lowercase_name = data.get("name").toLowerCase();
+          return lowercase_name.indexOf(lowercase_term) !== -1;
+        });
+      },
+
+      /** Get every 'shown' library in this collection based on deleted filter
+       *  @param {Boolean} show_deleted are we including deleted libraries?
+       *  @returns array of library models
+       */
+      getVisible : function(show_deleted, filters){
+          filters = filters || [];
+          var filteredLibraries = new Libraries( this.filter( function( item ){
+              return item.isVisible(show_deleted);
+          }));
+
+          return filteredLibraries;
+      },
+
+      /** Sort collection by library name (ascending) and return the sorted
+       *  collection
+       */
+      sortByNameAsc: function(){
+        this.comparator = function(libraryA, libraryB){
+          if (libraryA.get('name').toLowerCase() > libraryB.get('name').toLowerCase()) {
+            return 1; // after
+          }
+          if (libraryB.get('name').toLowerCase() > libraryA.get('name').toLowerCase()) {
+            return -1; // before
+          }
+          return 0; // equal
+        };
+        this.sort();
+        return this;
+      },
+
+      /** Sort collection by library name (descending) and return the sorted
+       *  collection
+       */
+      sortByNameDesc: function(){
+        this.comparator = function(libraryA, libraryB){
+          if (libraryA.get('name').toLowerCase() > libraryB.get('name').toLowerCase()) {
+            return -1; // before
+          }
+            if (libraryB.get('name').toLowerCase() > libraryA.get('name').toLowerCase()) {
+          return 1; // after
+          }
+          return 0; // equal
+        };
+        this.sort();
+        return this;
+      }
+
+    });
+
+// ============================================================================
+// FOLDER RELATED MODELS
+
+    var LibraryItem = Backbone.Model.extend({
+    });
+
+    var Ldda = LibraryItem.extend({
+      urlRoot : Galaxy.root + 'api/libraries/datasets/'
+    });
+
+    var FolderAsModel = LibraryItem.extend({
+      urlRoot: Galaxy.root + 'api/folders/'
+    });
+
+    var Folder = Backbone.Collection.extend({
+      model: LibraryItem,
+
+      /** Sort collection by item name (ascending) and return the sorted
+       *  collection. Folders go before datasets.
+       */
+      sortByNameAsc: function(){
+        this.comparator = function(itemA, itemB){
+          if (itemA.get('type') === itemB.get('type')){
+            if (itemA.get('name').toLowerCase() > itemB.get('name').toLowerCase()) {
+              return 1; // after
+            }
+            if (itemB.get('name').toLowerCase() > itemA.get('name').toLowerCase()) {
+              return -1; // before
+            }
+            return 0; // equal
+          } else {
+            if (itemA.get('type') === 'folder'){
+              return -1; // folder is always before dataset
+            } else {
+              return 1;
+            }
+          }
+        };
+        this.sort();
+        return this;
+      },
+
+      /** Sort collection by item name (descending) and return the sorted
+       *  collection. Folders go before datasets.
+       */
+      sortByNameDesc: function(){
+        this.comparator = function(itemA, itemB){
+          if (itemA.get('type') === itemB.get('type')){
+            if (itemA.get('name').toLowerCase() > itemB.get('name').toLowerCase()) {
+              return -1; // after
+            }
+            if (itemB.get('name').toLowerCase() > itemA.get('name').toLowerCase()) {
+              return 1; // before
+            }
+            return 0; // equal
+          } else {
+            if (itemA.get('type') === 'folder'){
+              return -1; // folder is always before dataset
+            } else {
+              return 1;
+            }
+          }
+        };
+        this.sort();
+        return this;
+      }
+    });
+
+    var FolderContainer = Backbone.Model.extend({
+      defaults : {
+          folder : new Folder(),
+          urlRoot : Galaxy.root + 'api/folders/',
+          id : "unknown"
+      },
+      parse : function(obj) {
+        // empty the collection
+        this.get("folder").reset();
+        // response is not a simple array, it contains metadata
+        // this will update the inner collection
+          for (var i = 0; i < obj.folder_contents.length; i++) {
+            if (obj.folder_contents[i].type === 'folder'){
+              var folder_item = new FolderAsModel(obj.folder_contents[i])
+              this.get("folder").add(folder_item);
+            } else if(obj.folder_contents[i].type === 'file'){
+              var file_item = new Ldda(obj.folder_contents[i])
+              this.get("folder").add(file_item);
+            } else{
+              Galaxy.emit.error('Unknown folder item type encountered while parsing response.');
+            }
+          };
+        return obj;
+      }
+    });
+
+
+// ============================================================================
+// HISTORY RELATED MODELS
+// TODO UNITE
+
+    var HistoryItem = Backbone.Model.extend({
+      urlRoot : Galaxy.root + 'api/histories/'
+    });
+
+    var HistoryContents = Backbone.Collection.extend({
+      urlRoot : Galaxy.root + 'api/histories/',
+      initialize: function(options){
+        this.id = options.id;
+      },
+      url : function(){
+        return this.urlRoot + this.id + '/contents';
+      },
+      model : HistoryItem
+    });
+
+    var GalaxyHistory = Backbone.Model.extend({
+      urlRoot : Galaxy.root + 'api/histories/'
+    });
+
+    var GalaxyHistories = Backbone.Collection.extend({
+      url : Galaxy.root + 'api/histories',
+      model : GalaxyHistory
+    });
+
+// ============================================================================
+// JSTREE MODEL
+    /** Represents folder structure parsable by the jstree component.
+     *
+     */
+
+    var Jstree = Backbone.Model.extend({
+      urlRoot: Galaxy.root + 'api/remote_files'
+    });
+
+return {
+    Library: Library,
+    Libraries : Libraries,
+    Item : Ldda,
+    Ldda : Ldda,
+    FolderAsModel : FolderAsModel,
+    Folder : Folder,
+    FolderContainer : FolderContainer,
+    HistoryItem : HistoryItem,
+    HistoryContents : HistoryContents,
+    GalaxyHistory : GalaxyHistory,
+    GalaxyHistories : GalaxyHistories,
+    Jstree: Jstree
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/list/list-item.js b/client/galaxy/scripts/mvc/list/list-item.js
new file mode 100644
index 0000000..77f88b6
--- /dev/null
+++ b/client/galaxy/scripts/mvc/list/list-item.js
@@ -0,0 +1,489 @@
+define([
+    'mvc/base-mvc',
+    'utils/localization'
+], function( BASE_MVC, _l ){
+
+'use strict';
+
+var logNamespace = 'list';
+//==============================================================================
+/** A view which, when first rendered, shows only summary data/attributes, but
+ *      can be expanded to show further details (and optionally fetch those
+ *      details from the server).
+ */
+var ExpandableView = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend({
+    _logNamespace : logNamespace,
+
+    //TODO: Although the reasoning behind them is different, this shares a lot with HiddenUntilActivated above: combine them
+    //PRECONDITION: model must have method hasDetails
+    //PRECONDITION: subclasses must have templates.el and templates.details
+
+    initialize : function( attributes ){
+        /** are the details of this view expanded/shown or not? */
+        this.expanded   = attributes.expanded || false;
+        this.log( '\t expanded:', this.expanded );
+        this.fxSpeed = attributes.fxSpeed !== undefined? attributes.fxSpeed : this.fxSpeed;
+    },
+
+    // ........................................................................ render main
+    /** jq fx speed */
+    fxSpeed : 'fast',
+
+    /** Render this content, set up ui.
+     *  @param {Number or String} speed   the speed of the render
+     */
+    render : function( speed ){
+        var $newRender = this._buildNewRender();
+        this._setUpBehaviors( $newRender );
+        this._queueNewRender( $newRender, speed );
+        return this;
+    },
+
+    /** Build a temp div containing the new children for the view's $el.
+     *      If the view is already expanded, build the details as well.
+     */
+    _buildNewRender : function(){
+        // create a new render using a skeleton template, render title buttons, render body, and set up events, etc.
+        var $newRender = $( this.templates.el( this.model.toJSON(), this ) );
+        if( this.expanded ){
+            this.$details( $newRender ).replaceWith( this._renderDetails().show() );
+        }
+        return $newRender;
+    },
+
+    /** Fade out the old el, swap in the new contents, then fade in.
+     *  @param {Number or String} speed   jq speed to use for rendering effects
+     *  @fires rendered when rendered
+     */
+    _queueNewRender : function( $newRender, speed ) {
+        speed = ( speed === undefined )?( this.fxSpeed ):( speed );
+        var view = this;
+
+        if( speed === 0 ){
+            view._swapNewRender( $newRender );
+            view.trigger( 'rendered', view );
+
+        } else {
+            $( view ).queue( 'fx', [
+                function( next ){
+                    view.$el.fadeOut( speed, next );
+                },
+                function( next ){
+                    view._swapNewRender( $newRender );
+                    next();
+                },
+                function( next ){
+                    view.$el.fadeIn( speed, next );
+                },
+                function( next ){
+                    view.trigger( 'rendered', view );
+                    next();
+                }
+            ]);
+        }
+    },
+
+    /** empty out the current el, move the $newRender's children in */
+    _swapNewRender : function( $newRender ){
+        return this.$el.empty()
+            .attr( 'class', _.isFunction( this.className )? this.className(): this.className )
+            .append( $newRender.children() );
+    },
+
+    /** set up js behaviors, event handlers for elements within the given container
+     *  @param {jQuery} $container jq object that contains the elements to process (defaults to this.$el)
+     */
+    _setUpBehaviors : function( $where ){
+        $where = $where || this.$el;
+        // set up canned behavior on children (bootstrap, popupmenus, editable_text, etc.)
+        //make_popup_menus( $where );
+        $where.find( '[title]' ).tooltip({ placement : 'bottom' });
+    },
+
+    // ......................................................................... details
+    /** shortcut to details DOM (as jQ) */
+    $details : function( $where ){
+        $where = $where || this.$el;
+        return $where.find( '> .details' );
+    },
+
+    /** build the DOM for the details and set up behaviors on it */
+    _renderDetails : function(){
+        var $newDetails = $( this.templates.details( this.model.toJSON(), this ) );
+        this._setUpBehaviors( $newDetails );
+        return $newDetails;
+    },
+
+    // ......................................................................... expansion/details
+    /** Show or hide the details
+     *  @param {Boolean} expand if true, expand; if false, collapse
+     */
+    toggleExpanded : function( expand ){
+        expand = ( expand === undefined )?( !this.expanded ):( expand );
+        if( expand ){
+            this.expand();
+        } else {
+            this.collapse();
+        }
+        return this;
+    },
+
+    /** Render and show the full, detailed body of this view including extra data and controls.
+     *      note: if the model does not have detailed data, fetch that data before showing the body
+     *  @fires expanded when a body has been expanded
+     */
+    expand : function(){
+        var view = this;
+        return view._fetchModelDetails().always( function(){
+                view._expand();
+            });
+    },
+
+    /** Check for model details and, if none, fetch them.
+     *  @returns {jQuery.promise} the model.fetch.xhr if details are being fetched, an empty promise if not
+     */
+    _fetchModelDetails : function(){
+        if( !this.model.hasDetails() ){
+            return this.model.fetch();
+        }
+        return jQuery.when();
+    },
+
+    /** Inner fn called when expand (public) has fetched the details */
+    _expand : function(){
+        var view = this,
+            $newDetails = view._renderDetails();
+        view.$details().replaceWith( $newDetails );
+        // needs to be set after the above or the slide will not show
+        view.expanded = true;
+        view.$details().slideDown( view.fxSpeed, function(){
+            view.trigger( 'expanded', view );
+        });
+    },
+
+    /** Hide the body/details of an HDA.
+     *  @fires collapsed when a body has been collapsed
+     */
+    collapse : function(){
+        this.debug( this + '(ExpandableView).collapse' );
+        var view = this;
+        view.expanded = false;
+        this.$details().slideUp( view.fxSpeed, function(){
+            view.trigger( 'collapsed', view );
+        });
+    }
+
+});
+
+
+//==============================================================================
+/** A view that is displayed in some larger list/grid/collection.
+ *      Inherits from Expandable, Selectable, Draggable.
+ *  The DOM contains warnings, a title bar, and a series of primary action controls.
+ *      Primary actions are meant to be easily accessible item functions (such as delete)
+ *      that are rendered in the title bar.
+ *
+ *  Details are rendered when the user clicks the title bar or presses enter/space when
+ *      the title bar is in focus.
+ *
+ *  Designed as a base class for history panel contents - but usable elsewhere (I hope).
+ */
+var ListItemView = ExpandableView.extend(
+        BASE_MVC.mixin( BASE_MVC.SelectableViewMixin, BASE_MVC.DraggableViewMixin, {
+
+    tagName     : 'div',
+    className   : 'list-item',
+
+    /** Set up the base class and all mixins */
+    initialize : function( attributes ){
+        ExpandableView.prototype.initialize.call( this, attributes );
+        BASE_MVC.SelectableViewMixin.initialize.call( this, attributes );
+        BASE_MVC.DraggableViewMixin.initialize.call( this, attributes );
+        this._setUpListeners();
+    },
+
+    /** event listeners */
+    _setUpListeners : function(){
+        // hide the primary actions in the title bar when selectable and narrow
+        this.on( 'selectable', function( isSelectable ){
+            if( isSelectable ){
+                this.$( '.primary-actions' ).hide();
+            } else {
+                this.$( '.primary-actions' ).show();
+            }
+        }, this );
+        return this;
+    },
+
+    // ........................................................................ rendering
+    /** In this override, call methods to build warnings, titlebar and primary actions */
+    _buildNewRender : function(){
+        var $newRender = ExpandableView.prototype._buildNewRender.call( this );
+        $newRender.children( '.warnings' ).replaceWith( this._renderWarnings() );
+        $newRender.children( '.title-bar' ).replaceWith( this._renderTitleBar() );
+        $newRender.children( '.primary-actions' ).append( this._renderPrimaryActions() );
+        $newRender.find( '> .title-bar .subtitle' ).replaceWith( this._renderSubtitle() );
+        return $newRender;
+    },
+
+    /** In this override, render the selector controls and set up dragging before the swap */
+    _swapNewRender : function( $newRender ){
+        ExpandableView.prototype._swapNewRender.call( this, $newRender );
+        if( this.selectable ){ this.showSelector( 0 ); }
+        if( this.draggable ){ this.draggableOn(); }
+        return this.$el;
+    },
+
+    /** Render any warnings the item may need to show (e.g. "I'm deleted") */
+    _renderWarnings : function(){
+        var view = this,
+            $warnings = $( '<div class="warnings"></div>' ),
+            json = view.model.toJSON();
+        //TODO:! unordered (map)
+        _.each( view.templates.warnings, function( templateFn ){
+            $warnings.append( $( templateFn( json, view ) ) );
+        });
+        return $warnings;
+    },
+
+    /** Render the title bar (the main/exposed SUMMARY dom element) */
+    _renderTitleBar : function(){
+        return $( this.templates.titleBar( this.model.toJSON(), this ) );
+    },
+
+    /** Return an array of jQ objects containing common/easily-accessible item controls */
+    _renderPrimaryActions : function(){
+        // override this
+        return [];
+    },
+
+    /** Render the title bar (the main/exposed SUMMARY dom element) */
+    _renderSubtitle : function(){
+        return $( this.templates.subtitle( this.model.toJSON(), this ) );
+    },
+
+    // ......................................................................... events
+    /** event map */
+    events : {
+        // expand the body when the title is clicked or when in focus and space or enter is pressed
+        'click .title-bar'      : '_clickTitleBar',
+        'keydown .title-bar'    : '_keyDownTitleBar',
+        'click .selector'       : 'toggleSelect'
+    },
+
+    /** expand when the title bar is clicked */
+    _clickTitleBar : function( event ){
+        event.stopPropagation();
+        if( event.altKey ){
+            this.toggleSelect( event );
+            if( !this.selectable ){
+                this.showSelector();
+            }
+        } else {
+            this.toggleExpanded();
+        }
+    },
+
+    /** expand when the title bar is in focus and enter or space is pressed */
+    _keyDownTitleBar : function( event ){
+        // bail (with propagation) if keydown and not space or enter
+        var KEYCODE_SPACE = 32, KEYCODE_RETURN = 13;
+        if( event && ( event.type === 'keydown' )
+        &&( event.keyCode === KEYCODE_SPACE || event.keyCode === KEYCODE_RETURN ) ){
+            this.toggleExpanded();
+            event.stopPropagation();
+            return false;
+        }
+        return true;
+    },
+
+    // ......................................................................... misc
+    /** String representation */
+    toString : function(){
+        var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
+        return 'ListItemView(' + modelString + ')';
+    }
+}));
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+ListItemView.prototype.templates = (function(){
+
+    var elTemplato = BASE_MVC.wrapTemplate([
+        '<div class="list-element">',
+            // errors, messages, etc.
+            '<div class="warnings"></div>',
+
+            // multi-select checkbox
+            '<div class="selector">',
+                '<span class="fa fa-2x fa-square-o"></span>',
+            '</div>',
+            // space for title bar buttons - gen. floated to the right
+            '<div class="primary-actions"></div>',
+            '<div class="title-bar"></div>',
+
+            // expandable area for more details
+            '<div class="details"></div>',
+        '</div>'
+    ]);
+
+    var warnings = {};
+
+    var titleBarTemplate = BASE_MVC.wrapTemplate([
+        // adding a tabindex here allows focusing the title bar and the use of keydown to expand the dataset display
+        '<div class="title-bar clear" tabindex="0">',
+            //TODO: prob. belongs in dataset-list-item
+            '<span class="state-icon"></span>',
+            '<div class="title">',
+                '<span class="name"><%- element.name %></span>',
+            '</div>',
+            '<div class="subtitle"></div>',
+        '</div>'
+    ], 'element' );
+
+    var subtitleTemplate = BASE_MVC.wrapTemplate([
+        // override this
+        '<div class="subtitle"></div>'
+    ]);
+
+    var detailsTemplate = BASE_MVC.wrapTemplate([
+        // override this
+        '<div class="details"></div>'
+    ]);
+
+    return {
+        el          : elTemplato,
+        warnings    : warnings,
+        titleBar    : titleBarTemplate,
+        subtitle    : subtitleTemplate,
+        details     : detailsTemplate
+    };
+}());
+
+
+//==============================================================================
+/** A view that is displayed in some larger list/grid/collection.
+ *  *AND* can display some sub-list of it's own when expanded (e.g. dataset collections).
+ *  This list will 'foldout' when the item is expanded depending on this.foldoutStyle:
+ *      If 'foldout': will expand vertically to show the nested list
+ *      If 'drilldown': will overlay the parent list
+ *
+ *  Inherits from ListItemView.
+ *
+ *  _renderDetails does the work of creating this.details: a sub-view that shows the nested list
+ */
+var FoldoutListItemView = ListItemView.extend({
+
+    /** If 'foldout': show the sub-panel inside the expanded item
+     *  If 'drilldown': only fire events and handle by pub-sub
+     *      (allow the panel containing this item to attach it, hide itself, etc.)
+     */
+    foldoutStyle        : 'foldout',
+    /** Panel view class to instantiate for the sub-panel */
+    foldoutPanelClass   : null,
+
+    /** override to:
+     *      add attributes foldoutStyle and foldoutPanelClass for config poly
+     *      disrespect attributes.expanded if drilldown
+     */
+    initialize : function( attributes ){
+        if( this.foldoutStyle === 'drilldown' ){ this.expanded = false; }
+        this.foldoutStyle = attributes.foldoutStyle || this.foldoutStyle;
+        this.foldoutPanelClass = attributes.foldoutPanelClass || this.foldoutPanelClass;
+
+        ListItemView.prototype.initialize.call( this, attributes );
+        this.foldout = this._createFoldoutPanel();
+    },
+
+    /** in this override, attach the foldout panel when rendering details */
+    _renderDetails : function(){
+        if( this.foldoutStyle === 'drilldown' ){ return $(); }
+        var $newDetails = ListItemView.prototype._renderDetails.call( this );
+        return this._attachFoldout( this.foldout, $newDetails );
+    },
+
+    /** In this override, handle collection expansion. */
+    _createFoldoutPanel : function(){
+        var model = this.model;
+        var FoldoutClass = this._getFoldoutPanelClass( model ),
+            options = this._getFoldoutPanelOptions( model ),
+            foldout = new FoldoutClass( _.extend( options, {
+                model           : model
+            }));
+        return foldout;
+    },
+
+    /** Stub to return proper foldout panel class */
+    _getFoldoutPanelClass : function(){
+        // override
+        return this.foldoutPanelClass;
+    },
+
+    /** Stub to return proper foldout panel options */
+    _getFoldoutPanelOptions : function(){
+        return {
+            // propagate foldout style down
+            foldoutStyle : this.foldoutStyle,
+            fxSpeed      : this.fxSpeed
+        };
+    },
+
+    /** Render the foldout panel inside the view, hiding controls */
+    _attachFoldout : function( foldout, $whereTo ){
+        $whereTo = $whereTo || this.$( '> .details' );
+        this.foldout = foldout.render( 0 );
+        foldout.$( '> .controls' ).hide();
+        return $whereTo.append( foldout.$el );
+    },
+
+    /** In this override, branch on foldoutStyle to show expanded */
+    expand : function(){
+        var view = this;
+        return view._fetchModelDetails()
+            .always(function(){
+                if( view.foldoutStyle === 'foldout' ){
+                    view._expand();
+                } else if( view.foldoutStyle === 'drilldown' ){
+                    view._expandByDrilldown();
+                }
+            });
+    },
+
+    /** For drilldown, set up close handler and fire expanded:drilldown
+     *      containing views can listen to this and handle other things
+     *      (like hiding themselves) by listening for expanded/collapsed:drilldown
+     */
+    _expandByDrilldown : function(){
+        var view = this;
+        // attachment and rendering done by listener
+        view.listenTo( view.foldout, 'close', function(){
+            view.trigger( 'collapsed:drilldown', view, view.foldout );
+        });
+        view.trigger( 'expanded:drilldown', view, view.foldout );
+    }
+
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+FoldoutListItemView.prototype.templates = (function(){
+
+    var detailsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="details">',
+            // override with more info (that goes above the panel)
+        '</div>'
+    ], 'collection' );
+
+    return _.extend( {}, ListItemView.prototype.templates, {
+        details : detailsTemplate
+    });
+}());
+
+
+//==============================================================================
+    return {
+        ExpandableView                  : ExpandableView,
+        ListItemView                    : ListItemView,
+        FoldoutListItemView             : FoldoutListItemView
+    };
+});
diff --git a/client/galaxy/scripts/mvc/list/list-view.js b/client/galaxy/scripts/mvc/list/list-view.js
new file mode 100644
index 0000000..c85071d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/list/list-view.js
@@ -0,0 +1,1035 @@
+define([
+    "mvc/list/list-item",
+    "ui/loading-indicator",
+    "mvc/base-mvc",
+    "utils/localization",
+    "ui/search-input"
+], function( LIST_ITEM, LoadingIndicator, BASE_MVC, _l ){
+
+'use strict';
+
+var logNamespace = 'list';
+/* ============================================================================
+TODO:
+
+============================================================================ */
+/** @class View for a list/collection of models and the sub-views of those models.
+ *      Sub-views must (at least have the interface if not) inherit from ListItemView.
+ *      (For a list panel that also includes some 'container' model (History->HistoryContents)
+ *      use ModelWithListPanel)
+ *
+ *  Allows for:
+ *      searching collection/sub-views
+ *      selecting/multi-selecting sub-views
+ *
+ *  Currently used:
+ *      for dataset/dataset-choice
+ *      as superclass of ModelListPanel
+ */
+var ListPanel = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend(/** @lends ListPanel.prototype */{
+    _logNamespace : logNamespace,
+
+    /** class to use for constructing the sub-views */
+    viewClass           : LIST_ITEM.ListItemView,
+    /** class to used for constructing collection of sub-view models */
+    collectionClass     : Backbone.Collection,
+
+    tagName             : 'div',
+    className           : 'list-panel',
+
+    /** (in ms) that jquery effects will use */
+    fxSpeed             : 'fast',
+
+    /** string to display when the collection has no contents */
+    emptyMsg            : _l( 'This list is empty' ),
+    /** displayed when no items match the search terms */
+    noneFoundMsg        : _l( 'No matching items found' ),
+    /** string used for search placeholder */
+    searchPlaceholder   : _l( 'search' ),
+
+    // ......................................................................... SET UP
+    /** Set up the view, set up storage, bind listeners to HistoryContents events
+     *  @param {Object} attributes optional settings for the list
+     */
+    initialize : function( attributes, options ){
+        attributes = attributes || {};
+        // set the logger if requested
+        if( attributes.logger ){
+            this.logger = attributes.logger;
+        }
+        this.log( this + '.initialize:', attributes );
+
+        // ---- instance vars
+        /** how quickly should jquery fx run? */
+        this.fxSpeed = _.has( attributes, 'fxSpeed' )?( attributes.fxSpeed ):( this.fxSpeed );
+
+        /** filters for displaying subviews */
+        this.filters = [];
+        /** current search terms */
+        this.searchFor = attributes.searchFor || '';
+
+        /** loading indicator */
+        // this.indicator = new LoadingIndicator( this.$el );
+
+        /** currently showing selectors on items? */
+        this.selecting = ( attributes.selecting !== undefined )? attributes.selecting : true;
+        //this.selecting = false;
+
+        /** cached selected item.model.ids to persist btwn renders */
+        this.selected = attributes.selected || [];
+        /** the last selected item.model.id */
+        this.lastSelected = null;
+
+        /** are sub-views draggable */
+        this.dragItems = attributes.dragItems || false;
+
+        /** list item view class (when passed models) */
+        this.viewClass = attributes.viewClass || this.viewClass;
+
+        /** list item views */
+        this.views = [];
+        /** list item models */
+        this.collection = attributes.collection || this._createDefaultCollection();
+
+        /** filter fns run over collection items to see if they should show in the list */
+        this.filters = attributes.filters || [];
+
+        /** override $scrollContainer fn via attributes - fn should return jq for elem to call scrollTo on */
+        this.$scrollContainer = attributes.$scrollContainer || this.$scrollContainer;
+
+        /** @type {String} generic title */
+        this.title = attributes.title || '';
+        /** @type {String} generic subtitle */
+        this.subtitle = attributes.subtitle || '';
+
+        this._setUpListeners();
+    },
+
+    // ------------------------------------------------------------------------ listeners
+    /** create any event listeners for the list */
+    _setUpListeners : function(){
+        this.off();
+
+        //TODO: move errorHandler down into list-view from history-view or
+        //  pass to global error handler (Galaxy)
+        this.on({
+            error: function( model, xhr, options, msg, details ){
+                //this.errorHandler( model, xhr, options, msg, details );
+                console.error( model, xhr, options, msg, details );
+            },
+            // show hide the loading indicator
+            loading: function(){
+                this._showLoadingIndicator( 'loading...', 40 );
+            },
+            'loading-done': function(){
+                this._hideLoadingIndicator( 40 );
+            },
+        });
+
+        // throw the first render up as a diff namespace using once (for outside consumption)
+        this.once( 'rendered', function(){
+            this.trigger( 'rendered:initial', this );
+        });
+
+        this._setUpCollectionListeners();
+        this._setUpViewListeners();
+        return this;
+    },
+
+    /** create and return a collection for when none is initially passed */
+    _createDefaultCollection : function(){
+        // override
+        return new this.collectionClass([]);
+    },
+
+    /** listening for collection events */
+    _setUpCollectionListeners : function(){
+        this.log( this + '._setUpCollectionListeners', this.collection );
+        this.stopListening( this.collection );
+
+        // bubble up error events
+        this.listenTo( this.collection, {
+            error   : function( model, xhr, options, msg, details ){
+                this.trigger( 'error', model, xhr, options, msg, details );
+            },
+            update  : function( collection, options ){
+                var changes = options.changes;
+                // console.info( collection + ', update:', changes, '\noptions:', options );
+                // more than one: render everything
+                if( options.renderAll || ( changes.added.length + changes.removed.length > 1 ) ){
+                    return this.renderItems();
+                }
+                // otherwise, let the single add/remove handlers do it
+                if( changes.added.length === 1 ){
+                    return this.addItemView( _.first( changes.added ), collection, options );
+                }
+                if( changes.removed.length === 1 ){
+                    return this.removeItemView( _.first( changes.removed ), collection, options );
+                }
+            }
+        });
+        return this;
+    },
+
+    /** listening for sub-view events that bubble up with the 'view:' prefix */
+    _setUpViewListeners : function(){
+        this.log( this + '._setUpViewListeners' );
+
+        // shift to select a range
+        this.on({
+            'view:selected': function( view, ev ){
+                if( ev && ev.shiftKey && this.lastSelected ){
+                    var lastSelectedView = this.viewFromModelId( this.lastSelected );
+                    if( lastSelectedView ){
+                        this.selectRange( view, lastSelectedView );
+                    }
+                } else if( ev && ev.altKey && !this.selecting ){
+                    this.showSelectors();
+                }
+                this.selected.push( view.model.id );
+                this.lastSelected = view.model.id;
+            },
+
+            'view:de-selected': function( view, ev ){
+                this.selected = _.without( this.selected, view.model.id );
+            }
+        });
+    },
+
+    // ------------------------------------------------------------------------ rendering
+    /** Render this content, set up ui.
+     *  @param {Number or String} speed   the speed of the render
+     */
+    render : function( speed ){
+        this.log( this + '.render', speed );
+        var $newRender = this._buildNewRender();
+        this._setUpBehaviors( $newRender );
+        this._queueNewRender( $newRender, speed );
+        return this;
+    },
+
+    /** Build a temp div containing the new children for the view's $el. */
+    _buildNewRender : function(){
+        this.debug( this + '(ListPanel)._buildNewRender' );
+        var $newRender = $( this.templates.el( {}, this ) );
+        this._renderControls( $newRender );
+        this._renderTitle( $newRender );
+        this._renderSubtitle( $newRender );
+        this._renderSearch( $newRender );
+        this.renderItems( $newRender );
+        return $newRender;
+    },
+
+    /** Build a temp div containing the new children for the view's $el. */
+    _renderControls : function( $newRender ){
+        this.debug( this + '(ListPanel)._renderControls' );
+        var $controls = $( this.templates.controls( {}, this ) );
+        $newRender.find( '.controls' ).replaceWith( $controls );
+        return $controls;
+    },
+
+    /** return a jQuery object containing the title DOM */
+    _renderTitle : function( $where ){
+        //$where = $where || this.$el;
+        //$where.find( '.title' ).replaceWith( ... )
+    },
+
+    /** return a jQuery object containing the subtitle DOM (if any) */
+    _renderSubtitle : function( $where ){
+        //$where = $where || this.$el;
+        //$where.find( '.title' ).replaceWith( ... )
+    },
+
+    /** Fade out the old el, swap in the new contents, then fade in.
+     *  @param {Number or String} speed   jq speed to use for rendering effects
+     *  @fires rendered when rendered
+     */
+    _queueNewRender : function( $newRender, speed ) {
+        speed = ( speed === undefined )?( this.fxSpeed ):( speed );
+        var panel = this;
+        panel.log( '_queueNewRender:', $newRender, speed );
+
+        $( panel ).queue( 'fx', [
+            function( next ){
+                panel.$el.fadeOut( speed, next );
+            },
+            function( next ){
+                panel._swapNewRender( $newRender );
+                next();
+            },
+            function( next ){
+                panel.$el.fadeIn( speed, next );
+            },
+            function( next ){
+                panel.trigger( 'rendered', panel );
+                next();
+            }
+        ]);
+    },
+
+    /** empty out the current el, move the $newRender's children in */
+    _swapNewRender : function( $newRender ){
+        this.$el.empty().attr( 'class', this.className ).append( $newRender.children() );
+        if( this.selecting ){ this.showSelectors( 0 ); }
+        return this;
+    },
+
+    /** Set up any behaviors, handlers (ep. plugins) that need to be called when the entire view has been built but
+     *  not attached to the page yet.
+     */
+    _setUpBehaviors : function( $where ){
+        $where = $where || this.$el;
+        this.$controls( $where ).find('[title]').tooltip();
+        // set up the pupup for actions available when multi selecting
+        this._renderMultiselectActionMenu( $where );
+        return this;
+    },
+
+    /** render a menu containing the actions available to sets of selected items */
+    _renderMultiselectActionMenu : function( $where ){
+        $where = $where || this.$el;
+        var $menu = $where.find( '.list-action-menu' ),
+            actions = this.multiselectActions();
+        if( !actions.length ){
+            return $menu.empty();
+        }
+
+        var $newMenu = $([
+            '<div class="list-action-menu btn-group">',
+                '<button class="list-action-menu-btn btn btn-default dropdown-toggle" data-toggle="dropdown">',
+                    _l( 'For all selected' ), '...',
+                '</button>',
+                '<ul class="dropdown-menu pull-right" role="menu">', '</ul>',
+            '</div>'
+        ].join(''));
+        var $actions = actions.map( function( action ){
+            var html = [ '<li><a href="javascript:void(0);">', action.html, '</a></li>' ].join( '' );
+            return $( html ).click( function( ev ){
+                ev.preventDefault();
+                return action.func( ev );
+            });
+        });
+        $newMenu.find( 'ul' ).append( $actions );
+        $menu.replaceWith( $newMenu );
+        return $newMenu;
+    },
+
+    /** return a list of plain objects used to render multiselect actions menu. Each object should have:
+     *      html: an html string used as the anchor contents
+     *      func: a function called when the anchor is clicked (passed the click event)
+     */
+    multiselectActions : function(){
+        return [];
+    },
+
+    // ------------------------------------------------------------------------ sub-$element shortcuts
+    /** the scroll container for this panel - can be $el, $el.parent(), or grandparent depending on context */
+    $scrollContainer : function( $where ){
+        // override or set via attributes.$scrollContainer
+        return ( $where || this.$el ).parent().parent();
+    },
+    /** convenience selector for the section that displays the list controls */
+    $controls : function( $where ){
+        return ( $where || this.$el ).find( '> .controls' );
+    },
+    /** list-items: where the subviews are contained in the view's dom */
+    $list : function( $where ){
+        return ( $where || this.$el ).find( '> .list-items' );
+    },
+    /** container where list messages are attached */
+    $messages : function( $where ){
+        //TODO: controls isn't really correct here (only for ModelListPanel)
+        return ( $where || this.$el ).find( '> .controls .messages' );
+    },
+    /** the message displayed when no views can be shown (no views, none matching search) */
+    $emptyMessage : function( $where ){
+        return ( $where || this.$el ).find( '> .empty-message' );
+    },
+
+    // ------------------------------------------------------------------------ hda sub-views
+    /** render the subviews for the list's collection */
+    renderItems : function( $whereTo ){
+        $whereTo = $whereTo || this.$el;
+        var panel = this;
+        panel.log( this + '.renderItems', $whereTo );
+
+        var $list = panel.$list( $whereTo );
+        panel.freeViews();
+        // console.log( 'views freed' );
+        //TODO:? cache and re-use views?
+        var shownModels = panel._filterCollection();
+        // console.log( 'models filtered:', shownModels );
+
+        panel.views = shownModels.map( function( itemModel ){
+            var view = panel._createItemView( itemModel );
+            return view;
+        });
+
+        $list.empty();
+        // console.log( 'list emptied' );
+        if( panel.views.length ){
+            panel._attachItems( $whereTo );
+            // console.log( 'items attached' );
+        }
+        panel._renderEmptyMessage( $whereTo ).toggle( !panel.views.length );
+        panel.trigger( 'views:ready', panel.views );
+
+        // console.log( '------------------------------------------- rendering items' );
+        return panel.views;
+    },
+
+    /** Filter the collection to only those models that should be currently viewed */
+    _filterCollection : function(){
+        // override this
+        var panel = this;
+        return panel.collection.filter( _.bind( panel._filterItem, panel ) );
+    },
+
+    /** Should the model be viewable in the current state?
+     *     Checks against this.filters and this.searchFor
+     */
+    _filterItem : function( model ){
+        // override this
+        var panel = this;
+        return ( _.every( panel.filters.map( function( fn ){ return fn.call( model ); }) ) )
+            && ( !panel.searchFor || model.matchesAll( panel.searchFor ) );
+    },
+
+    /** Create a view for a model and set up it's listeners */
+    _createItemView : function( model ){
+        var ViewClass = this._getItemViewClass( model );
+        var options = _.extend( this._getItemViewOptions( model ), {
+            model : model
+        });
+        var view = new ViewClass( options );
+        this._setUpItemViewListeners( view );
+        return view;
+    },
+
+    /** Free a view for a model. Note: does not remove it from the DOM */
+    _destroyItemView : function( view ){
+        this.stopListening( view );
+        this.views = _.without( this.views, view );
+    },
+
+    _destroyItemViews : function( view ){
+        var self = this;
+        self.views.forEach( function( v ){
+            self.stopListening( v );
+        });
+        self.views = [];
+        return self;
+    },
+
+    /** free any sub-views the list has */
+    freeViews : function(){
+        return this._destroyItemViews();
+    },
+
+    /** Get the bbone view class based on the model */
+    _getItemViewClass : function( model ){
+        // override this
+        return this.viewClass;
+    },
+
+    /** Get the options passed to the new view based on the model */
+    _getItemViewOptions : function( model ){
+        // override this
+        return {
+            //logger      : this.logger,
+            fxSpeed     : this.fxSpeed,
+            expanded    : false,
+            selectable  : this.selecting,
+            selected    : _.contains( this.selected, model.id ),
+            draggable   : this.dragItems
+        };
+    },
+
+    /** Set up listeners for new models */
+    _setUpItemViewListeners : function( view ){
+        var panel = this;
+        // send all events to the panel, re-namspaceing them with the view prefix
+        this.listenTo( view, 'all', function(){
+            var args = Array.prototype.slice.call( arguments, 0 );
+            args[0] = 'view:' + args[0];
+            panel.trigger.apply( panel, args );
+        });
+
+        // drag multiple - hijack ev.setData to add all selected items
+        this.listenTo( view, 'draggable:dragstart', function( ev, v ){
+            //TODO: set multiple drag data here
+            var json = {},
+                selected = this.getSelectedModels();
+            if( selected.length ){
+                json = selected.toJSON();
+            } else {
+                json = [ v.model.toJSON() ];
+            }
+            ev.dataTransfer.setData( 'text', JSON.stringify( json ) );
+            //ev.dataTransfer.setDragImage( v.el, 60, 60 );
+        }, this );
+
+        return panel;
+    },
+
+    /** Attach views in this.views to the model based on $whereTo */
+    _attachItems : function( $whereTo ){
+        var self = this;
+        // console.log( '_attachItems:', $whereTo, this.$list( $whereTo ) );
+        //ASSUMES: $list has been emptied
+        this.$list( $whereTo ).append( this.views.map( function( view ){
+            return self._renderItemView$el( view );
+        }));
+        return this;
+    },
+
+    /** get a given subview's $el (or whatever may wrap it) and return it */
+    _renderItemView$el : function( view ){
+        // useful to wrap and override
+        return view.render(0).$el;
+    },
+
+    /** render the empty/none-found message */
+    _renderEmptyMessage : function( $whereTo ){
+        this.debug( '_renderEmptyMessage', $whereTo, this.searchFor );
+        var text = this.searchFor? this.noneFoundMsg : this.emptyMsg;
+        return this.$emptyMessage( $whereTo ).text( text );
+    },
+
+    /** expand all item views */
+    expandAll : function(){
+        _.each( this.views, function( view ){
+            view.expand();
+        });
+    },
+
+    /** collapse all item views */
+    collapseAll : function(){
+        _.each( this.views, function( view ){
+            view.collapse();
+        });
+    },
+
+    // ------------------------------------------------------------------------ collection/views syncing
+    /** Add a view (if the model should be viewable) to the panel */
+    addItemView : function( model, collection, options ){
+        // console.log( this + '.addItemView:', model );
+        var panel = this;
+        // get the index of the model in the list of filtered models shown by this list
+        // in order to insert the view in the proper place
+        //TODO:? potentially expensive
+        var modelIndex = panel._filterCollection().indexOf( model );
+        if( modelIndex === -1 ){ return undefined; }
+        var view = panel._createItemView( model );
+        // console.log( 'adding and rendering:', modelIndex, view.toString() );
+
+        $( view ).queue( 'fx', [
+            function( next ){
+                // hide the empty message first if only view
+                if( panel.$emptyMessage().is( ':visible' ) ){
+                    panel.$emptyMessage().fadeOut( panel.fxSpeed, next );
+                } else {
+                    next();
+                }
+            },
+            function( next ){
+                panel._attachView( view, modelIndex );
+                next();
+            }
+        ]);
+        return view;
+    },
+
+    /** internal fn to add view (to both panel.views and panel.$list) */
+    _attachView : function( view, modelIndex, useFx ){
+        // console.log( this + '._attachView:', view, modelIndex, useFx );
+        useFx = _.isUndefined( useFx )? true : useFx;
+        modelIndex = modelIndex || 0;
+        var panel = this;
+
+        // use the modelIndex to splice into views and insert at the proper index in the DOM
+        panel.views.splice( modelIndex, 0, view );
+        panel._insertIntoListAt( modelIndex, panel._renderItemView$el( view ).hide() );
+
+        panel.trigger( 'view:attached', view );
+        if( useFx ){
+            view.$el.slideDown( panel.fxSpeed, function(){
+                panel.trigger( 'view:attached:rendered' );
+            });
+        } else {
+            view.$el.show();
+            panel.trigger( 'view:attached:rendered' );
+        }
+        return view;
+    },
+
+    /** insert a jq object as a child of list-items at the specified *DOM index* */
+    _insertIntoListAt : function( index, $what ){
+        // console.log( this + '._insertIntoListAt:', index, $what );
+        var $list = this.$list();
+        if( index === 0 ){
+            $list.prepend( $what );
+        } else {
+            $list.children().eq( index - 1 ).after( $what );
+        }
+        return $what;
+    },
+
+    /** Remove a view from the panel (if found) */
+    removeItemView : function( model, collection, options ){
+        var panel = this;
+        var view = _.find( panel.views, function( v ){ return v.model === model; });
+        if( !view ){ return undefined; }
+        panel.views = _.without( panel.views, view );
+        panel.trigger( 'view:removed', view );
+
+        // potentially show the empty message if no views left
+        // use anonymous queue here - since remove can happen multiple times
+        $({}).queue( 'fx', [
+            function( next ){
+                view.$el.fadeOut( panel.fxSpeed, next );
+            },
+            function( next ){
+                view.remove();
+                panel.trigger( 'view:removed:rendered' );
+                if( !panel.views.length ){
+                    panel._renderEmptyMessage().fadeIn( panel.fxSpeed, next );
+                } else {
+                    next();
+                }
+            }
+        ]);
+        return view;
+    },
+
+    /** get views based on model.id */
+    viewFromModelId : function( id ){
+        return _.find( this.views, function( v ){ return v.model.id === id; });
+    },
+
+    /** get views based on model */
+    viewFromModel : function( model ){
+        return model ? this.viewFromModelId( model.id ) : undefined;
+    },
+
+    /** get views based on model properties */
+    viewsWhereModel : function( properties ){
+        return this.views.filter( function( view ){
+            return _.isMatch( view.model.attributes, properties );
+        });
+    },
+
+    /** A range of views between (and including) viewA and viewB */
+    viewRange : function( viewA, viewB ){
+        if( viewA === viewB ){ return ( viewA )?( [ viewA ] ):( [] ); }
+
+        var indexA = this.views.indexOf( viewA ),
+            indexB = this.views.indexOf( viewB );
+
+        // handle not found
+        if( indexA === -1 || indexB === -1 ){
+            if( indexA === indexB ){ return []; }
+            return ( indexA === -1 )?( [ viewB ] ):( [ viewA ] );
+        }
+        // reverse if indeces are
+        //note: end inclusive
+        return ( indexA < indexB )?
+            this.views.slice( indexA, indexB + 1 ) :
+            this.views.slice( indexB, indexA + 1 );
+    },
+
+    // ------------------------------------------------------------------------ searching
+    /** render a search input for filtering datasets shown
+     *      (see SearchableMixin in base-mvc for implementation of the actual searching)
+     *      return will start the search
+     *      esc will clear the search
+     *      clicking the clear button will clear the search
+     *      uses searchInput in ui.js
+     */
+    _renderSearch : function( $where ){
+        $where.find( '.controls .search-input' ).searchInput({
+            placeholder     : this.searchPlaceholder,
+            initialVal      : this.searchFor,
+            onfirstsearch   : _.bind( this._firstSearch, this ),
+            onsearch        : _.bind( this.searchItems, this ),
+            onclear         : _.bind( this.clearSearch, this )
+        });
+        return $where;
+    },
+
+    /** What to do on the first search entered */
+    _firstSearch : function( searchFor ){
+        // override to load model details if necc.
+        this.log( 'onFirstSearch', searchFor );
+        return this.searchItems( searchFor );
+    },
+
+    /** filter view list to those that contain the searchFor terms */
+    searchItems : function( searchFor, force ){
+        this.log( 'searchItems', searchFor, this.searchFor, force );
+        if( !force && this.searchFor === searchFor ){ return this; }
+        this.searchFor = searchFor;
+        this.renderItems();
+        this.trigger( 'search:searching', searchFor, this );
+        var $search = this.$( '> .controls .search-query' );
+        if( $search.val() !== searchFor ){
+            $search.val( searchFor );
+        }
+        return this;
+    },
+
+    /** clear the search filters and show all views that are normally shown */
+    clearSearch : function( searchFor ){
+        //this.log( 'onSearchClear', this );
+        this.searchFor = '';
+        this.trigger( 'search:clear', this );
+        this.$( '> .controls .search-query' ).val( '' );
+        this.renderItems();
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ selection
+    /** @type Integer when the number of list item views is >= to this, don't animate selectors */
+    THROTTLE_SELECTOR_FX_AT : 20,
+
+    /** show selectors on all visible itemViews and associated controls */
+    showSelectors : function( speed ){
+        speed = ( speed !== undefined )?( speed ):( this.fxSpeed );
+        this.selecting = true;
+        this.$( '.list-actions' ).slideDown( speed );
+        speed = this.views.length >= this.THROTTLE_SELECTOR_FX_AT? 0 : speed;
+        _.each( this.views, function( view ){
+            view.showSelector( speed );
+        });
+        //this.selected = [];
+        //this.lastSelected = null;
+    },
+
+    /** hide selectors on all visible itemViews and associated controls */
+    hideSelectors : function( speed ){
+        speed = ( speed !== undefined )?( speed ):( this.fxSpeed );
+        this.selecting = false;
+        this.$( '.list-actions' ).slideUp( speed );
+        speed = this.views.length >= this.THROTTLE_SELECTOR_FX_AT? 0 : speed;
+        _.each( this.views, function( view ){
+            view.hideSelector( speed );
+        });
+        this.selected = [];
+        this.lastSelected = null;
+    },
+
+    /** show or hide selectors on all visible itemViews and associated controls */
+    toggleSelectors : function(){
+        if( !this.selecting ){
+            this.showSelectors();
+        } else {
+            this.hideSelectors();
+        }
+    },
+
+    /** select all visible items */
+    selectAll : function( event ){
+        _.each( this.views, function( view ){
+            view.select( event );
+        });
+    },
+
+    /** deselect all visible items */
+    deselectAll : function( event ){
+        this.lastSelected = null;
+        _.each( this.views, function( view ){
+            view.deselect( event );
+        });
+    },
+
+    /** select a range of datasets between A and B */
+    selectRange : function( viewA, viewB ){
+        var range = this.viewRange( viewA, viewB );
+        _.each( range, function( view ){
+            view.select();
+        });
+        return range;
+    },
+
+    /** return an array of all currently selected itemViews */
+    getSelectedViews : function(){
+        return _.filter( this.views, function( v ){
+            return v.selected;
+        });
+    },
+
+    /** return a collection of the models of all currenly selected items */
+    getSelectedModels : function(){
+        // console.log( '(getSelectedModels)' );
+        return new this.collection.constructor( _.map( this.getSelectedViews(), function( view ){
+            return view.model;
+        }));
+    },
+
+    // ------------------------------------------------------------------------ loading indicator
+    /** hide the $el and display a loading indicator (in the $el's parent) when loading new data */
+    _showLoadingIndicator : function( msg, speed, callback ){
+        this.debug( '_showLoadingIndicator', this.indicator, msg, speed, callback );
+        speed = ( speed !== undefined )?( speed ):( this.fxSpeed );
+        if( !this.indicator ){
+            this.indicator = new LoadingIndicator( this.$el );
+            this.debug( '\t created', this.indicator );
+        }
+        if( !this.$el.is( ':visible' ) ){
+            this.indicator.show( 0, callback );
+        } else {
+            this.$el.fadeOut( speed );
+            this.indicator.show( msg, speed, callback );
+        }
+    },
+
+    /** hide the loading indicator */
+    _hideLoadingIndicator : function( speed, callback ){
+        this.debug( '_hideLoadingIndicator', this.indicator, speed, callback );
+        speed = ( speed !== undefined )?( speed ):( this.fxSpeed );
+        if( this.indicator ){
+            this.indicator.hide( speed, callback );
+        }
+    },
+
+    // ------------------------------------------------------------------------ scrolling
+    /** get the current scroll position of the panel in its parent */
+    scrollPosition : function(){
+        return this.$scrollContainer().scrollTop();
+    },
+
+    /** set the current scroll position of the panel in its parent */
+    scrollTo : function( pos, speed ){
+        speed = speed || 0;
+        this.$scrollContainer().animate({ scrollTop: pos }, speed );
+        return this;
+    },
+
+    /** Scrolls the panel to the top. */
+    scrollToTop : function( speed ){
+        return this.scrollTo( 0, speed );
+    },
+
+    /** scroll to the given view in list-items */
+    scrollToItem : function( view, speed ){
+        if( !view ){ return this; }
+        return this;
+    },
+
+    /** Scrolls the panel to show the content with the given id. */
+    scrollToId : function( id, speed ){
+        return this.scrollToItem( this.viewFromModelId( id ), speed );
+    },
+
+    // ------------------------------------------------------------------------ panel events
+    /** event map */
+    events : {
+        'click .select-all'     : 'selectAll',
+        'click .deselect-all'   : 'deselectAll'
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** Return a string rep of the panel */
+    toString : function(){
+        return 'ListPanel(' + this.collection + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+ListPanel.prototype.templates = (function(){
+
+    var elTemplate = BASE_MVC.wrapTemplate([
+        // temp container
+        '<div>',
+            '<div class="controls"></div>',
+            '<div class="list-items"></div>',
+            '<div class="empty-message infomessagesmall"></div>',
+        '</div>'
+    ]);
+
+    var controlsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="controls">',
+            '<div class="title">',
+                '<div class="name"><%- view.title %></div>',
+            '</div>',
+            '<div class="subtitle"><%- view.subtitle %></div>',
+            // buttons, controls go here
+            '<div class="actions"></div>',
+            // deleted msg, etc.
+            '<div class="messages"></div>',
+
+            '<div class="search">',
+                '<div class="search-input"></div>',
+            '</div>',
+
+            // show when selectors are shown
+            '<div class="list-actions">',
+                '<div class="btn-group">',
+                    '<button class="select-all btn btn-default"',
+                            'data-mode="select">', _l( 'All' ), '</button>',
+                    '<button class="deselect-all btn btn-default"',
+                            'data-mode="select">', _l( 'None' ), '</button>',
+                '</div>',
+                '<div class="list-action-menu btn-group">',
+                '</div>',
+            '</div>',
+        '</div>'
+    ]);
+
+    return {
+        el          : elTemplate,
+        controls    : controlsTemplate
+    };
+}());
+
+
+//=============================================================================
+/** View for a model that has a sub-collection (e.g. History, DatasetCollection)
+ *  Allows:
+ *      the model to be reset
+ *      auto assign panel.collection to panel.model[ panel.modelCollectionKey ]
+ *
+ */
+var ModelListPanel = ListPanel.extend({
+
+    /** key of attribute in model to assign to this.collection */
+    modelCollectionKey : 'contents',
+
+    initialize : function( attributes ){
+        ListPanel.prototype.initialize.call( this, attributes );
+        this.selecting = ( attributes.selecting !== undefined )? attributes.selecting : false;
+
+        this.setModel( this.model, attributes );
+    },
+
+    /** release/free/shutdown old models and set up panel for new models
+     *  @fires new-model with the panel as parameter
+     */
+    setModel : function( model, attributes ){
+        attributes = attributes || {};
+        this.debug( this + '.setModel:', model, attributes );
+
+        this.freeModel();
+        this.freeViews();
+
+        if( model ){
+            var oldModelId = this.model? this.model.get( 'id' ): null;
+
+            // set up the new model with user, logger, storage, events
+            this.model = model;
+            if( this.logger ){
+                this.model.logger = this.logger;
+            }
+            this._setUpModelListeners();
+
+            //TODO: relation btwn model, collection becoming tangled here
+            // free the collection, and assign the new collection to either
+            //  the model[ modelCollectionKey ], attributes.collection, or an empty vanilla collection
+            this.stopListening( this.collection );
+            this.collection = this.model[ this.modelCollectionKey ]
+                || attributes.collection
+                || this._createDefaultCollection();
+            this._setUpCollectionListeners();
+
+            if( oldModelId && model.get( 'id' ) !== oldModelId  ){
+                this.trigger( 'new-model', this );
+            }
+        }
+        return this;
+    },
+
+    /** free the current model and all listeners for it, free any views for the model */
+    freeModel : function(){
+        // stop/release the previous model, and clear cache to sub-views
+        if( this.model ){
+            this.stopListening( this.model );
+            //TODO: see base-mvc
+            //this.model.free();
+            //this.model = null;
+        }
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ listening
+    /** listening for model events */
+    _setUpModelListeners : function(){
+        // override
+        this.log( this + '._setUpModelListeners', this.model );
+        // bounce model errors up to the panel
+        this.listenTo( this.model, 'error', function(){
+            var args = Array.prototype.slice.call( arguments, 0 );
+            //args.unshift( 'model:error' );
+            args.unshift( 'error' );
+            this.trigger.apply( this, args );
+        }, this );
+
+        // debugging
+        if( this.logger ){
+            this.listenTo( this.model, 'all', function( event ){
+                this.info( this + '(model)', event, arguments );
+            });
+        }
+        return this;
+    },
+
+    /** Build a temp div containing the new children for the view's $el.
+     */
+    _renderControls : function( $newRender ){
+        this.debug( this + '(ModelListPanel)._renderControls' );
+        var json = this.model? this.model.toJSON() : {},
+            $controls = $( this.templates.controls( json, this ) );
+        $newRender.find( '.controls' ).replaceWith( $controls );
+        return $controls;
+    },
+
+    // ------------------------------------------------------------------------ misc
+    /** Return a string rep of the panel */
+    toString : function(){
+        return 'ModelListPanel(' + this.model + ')';
+    }
+});
+
+// ............................................................................ TEMPLATES
+/** underscore templates */
+ModelListPanel.prototype.templates = (function(){
+
+    var controlsTemplate = BASE_MVC.wrapTemplate([
+        '<div class="controls">',
+            '<div class="title">',
+                //TODO: this is really the only difference - consider factoring titlebar out
+                '<div class="name"><%- model.name %></div>',
+            '</div>',
+            '<div class="subtitle"><%- view.subtitle %></div>',
+            '<div class="actions"></div>',
+            '<div class="messages"></div>',
+
+            '<div class="search">',
+                '<div class="search-input"></div>',
+            '</div>',
+
+            '<div class="list-actions">',
+                '<div class="btn-group">',
+                    '<button class="select-all btn btn-default"',
+                            'data-mode="select">', _l( 'All' ), '</button>',
+                    '<button class="deselect-all btn btn-default"',
+                            'data-mode="select">', _l( 'None' ), '</button>',
+                '</div>',
+                '<div class="list-action-menu btn-group">',
+                '</div>',
+            '</div>',
+        '</div>'
+    ]);
+
+    return _.extend( _.clone( ListPanel.prototype.templates ), {
+        controls : controlsTemplate
+    });
+}());
+
+
+//=============================================================================
+    return {
+        ListPanel      : ListPanel,
+        ModelListPanel : ModelListPanel
+    };
+});
diff --git a/client/galaxy/scripts/mvc/tag.js b/client/galaxy/scripts/mvc/tag.js
new file mode 100644
index 0000000..e66e44d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tag.js
@@ -0,0 +1,120 @@
+define([
+    "mvc/base-mvc",
+    "utils/localization"
+], function( baseMVC, _l ){
+// =============================================================================
+/** A view on any model that has a 'tags' attribute (a list of tag strings)
+ *      Incorporates the select2 jQuery plugin for tags display/editing:
+ *      http://ivaynberg.github.io/select2/
+ */
+var TagsEditor = Backbone.View
+        .extend( baseMVC.LoggableMixin )
+        .extend( baseMVC.HiddenUntilActivatedViewMixin ).extend({
+
+    tagName     : 'div',
+    className   : 'tags-display',
+
+    /** Set up listeners, parse options */
+    initialize : function( options ){
+        //console.debug( this, options );
+        // only listen to the model only for changes to tags - re-render
+        this.listenTo( this.model, 'change:tags', function(){
+            this.render();
+        });
+        this.hiddenUntilActivated( options.$activator, options );
+    },
+
+    /** Build the DOM elements, call select to on the created input, and set up behaviors */
+    render : function(){
+        var view = this;
+        this.$el.html( this._template() );
+
+        this.$input().select2({
+            placeholder : 'Add tags',
+            width       : '100%',
+            tags : function(){
+                // initialize possible tags in the dropdown based on all the tags the user has used so far
+                return view._getTagsUsed();
+            }
+        });
+
+        this._setUpBehaviors();
+        return this;
+    },
+
+    /** @returns {String} the html text used to build the view's DOM */
+    _template : function(){
+        return [
+            //TODO: make prompt optional
+            '<label class="prompt">', _l( 'Tags' ), '</label>',
+            // set up initial tags by adding as CSV to input vals (necc. to init select2)
+            '<input class="tags-input" value="', this.tagsToCSV(), '" />'
+        ].join( '' );
+    },
+
+    /** @returns {String} the sorted, comma-separated tags from the model */
+    tagsToCSV : function(){
+        var tagsArray = this.model.get( 'tags' );
+        if( !_.isArray( tagsArray ) || _.isEmpty( tagsArray ) ){
+            return '';
+        }
+        return tagsArray.map( function( tag ){
+            return _.escape( tag );
+        }).sort().join( ',' );
+    },
+
+    /** @returns {jQuery} the input for this view */
+    $input : function(){
+        return this.$el.find( 'input.tags-input' );
+    },
+
+    /** @returns {String[]} all tags used by the current user */
+    _getTagsUsed : function(){
+//TODO: global
+        return Galaxy.user.get( 'tags_used' );
+    },
+
+    /** set up any event listeners on the view's DOM (mostly handled by select2) */
+    _setUpBehaviors : function(){
+        var view = this;
+        this.$input().on( 'change', function( event ){
+            // save the model's tags in either remove or added event
+            view.model.save({ tags: event.val }, { silent: true });
+            // if it's new, add the tag to the users tags
+            if( event.added ){
+                //??: solve weird behavior in FF on test.galaxyproject.org where
+                //  event.added.text is string object: 'String{ 0="o", 1="n", 2="e" }'
+                view._addNewTagToTagsUsed( event.added.text + '' );
+            }
+        });
+    },
+
+    /** add a new tag (if not already there) to the list of all tags used by the user
+     *  @param {String} newTag  the tag to add to the list of used
+     */
+    _addNewTagToTagsUsed : function( newTag ){
+//TODO: global
+        var tagsUsed = Galaxy.user.get( 'tags_used' );
+        if( !_.contains( tagsUsed, newTag ) ){
+            tagsUsed.push( newTag );
+            tagsUsed.sort();
+            Galaxy.user.set( 'tags_used', tagsUsed );
+        }
+    },
+
+    /** shut down event listeners and remove this view's DOM */
+    remove : function(){
+        this.$input.off();
+        this.stopListening( this.model );
+        Backbone.View.prototype.remove.call( this );
+    },
+
+    /** string rep */
+    toString : function(){ return [ 'TagsEditor(', this.model + '', ')' ].join(''); }
+});
+
+// =============================================================================
+return {
+    TagsEditor : TagsEditor
+};
+});
diff --git a/client/galaxy/scripts/mvc/tool/tool-form-base.js b/client/galaxy/scripts/mvc/tool/tool-form-base.js
new file mode 100644
index 0000000..129d97a
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tool/tool-form-base.js
@@ -0,0 +1,356 @@
+/**
+    This is the base class of the tool form plugin. This class is e.g. inherited by the regular and the workflow tool form.
+*/
+define(['utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view',
+        'mvc/citation/citation-model', 'mvc/citation/citation-view'],
+    function(Utils, Deferred, Ui, FormBase, CitationModel, CitationView) {
+    return FormBase.extend({
+        initialize: function(options) {
+            var self = this;
+            FormBase.prototype.initialize.call(this, options);
+            this.deferred = new Deferred();
+            if (options.inputs) {
+                this._buildForm(options);
+            } else {
+                this.deferred.execute(function(process) {
+                    self._buildModel(process, options, true);
+                });
+            }
+            // listen to history panel
+            if ( options.listen_to_history && parent.Galaxy && parent.Galaxy.currHistoryPanel ) {
+                this.listenTo( parent.Galaxy.currHistoryPanel.collection, 'change', function() {
+                    this.refresh();
+                });
+            }
+            // destroy dom elements
+            this.$el.on( 'remove', function() { self.remove() } );
+        },
+
+        /** Listen to history panel changes and update the tool form */
+        refresh: function() {
+            var self = this;
+            self.deferred.reset();
+            this.deferred.execute( function (process){
+                self._updateModel( process)
+            });
+        },
+
+        /** Wait for deferred build processes before removal */
+        remove: function() {
+            var self = this;
+            this.$el.hide();
+            this.deferred.execute(function(){
+                FormBase.prototype.remove.call(self);
+                Galaxy.emit.debug('tool-form-base::remove()', 'Destroy view.');
+            });
+        },
+
+        /** Build form */
+        _buildForm: function(options) {
+            var self = this;
+            this.options = Utils.merge(options, this.options);
+            this.options = Utils.merge({
+                icon            : options.icon,
+                title           : '<b>' + options.name + '</b> ' + options.description + ' (Galaxy Version ' + options.version + ')',
+                operations      : !this.options.hide_operations && this._operations(),
+                onchange        : function() {
+                    self.refresh();
+                }
+            }, this.options);
+            this.options.customize && this.options.customize( this.options );
+            this.render();
+            if ( !this.options.collapsible ) {
+                this.$el.append( $( '<div/>' ).addClass( 'ui-margin-top-large' ).append( this._footer() ) );
+            }
+        },
+
+        /** Builds a new model through api call and recreates the entire form
+        */
+        _buildModel: function(process, options, hide_message) {
+            var self = this;
+            this.options.id = options.id;
+            this.options.version = options.version;
+
+            // build request url
+            var build_url = '';
+            var build_data = {};
+            if ( options.job_id ) {
+                build_url = Galaxy.root + 'api/jobs/' + options.job_id + '/build_for_rerun';
+            } else {
+                build_url = Galaxy.root + 'api/tools/' + options.id + '/build';
+                if ( Galaxy.params && Galaxy.params.tool_id == options.id ) {
+                    build_data = $.extend( {}, Galaxy.params );
+                    options.version && ( build_data[ 'tool_version' ] = options.version );
+                }
+            }
+
+            // get initial model
+            Utils.get({
+                url     : build_url,
+                data    : build_data,
+                success : function(new_model) {
+                    new_model = new_model.tool_model || new_model;
+                    if( !new_model.display ) {
+                        window.location = Galaxy.root;
+                        return;
+                    }
+                    self._buildForm(new_model);
+                    !hide_message && self.message.update({
+                        status      : 'success',
+                        message     : 'Now you are using \'' + self.options.name + '\' version ' + self.options.version + ', id \'' + self.options.id + '\'.',
+                        persistent  : false
+                    });
+                    Galaxy.emit.debug('tool-form-base::initialize()', 'Initial tool model ready.', new_model);
+                    process.resolve();
+                },
+                error   : function(response, status) {
+                    var error_message = ( response && response.err_msg ) || 'Uncaught error.';
+                    if ( status == 401 ) {
+                        window.location = Galaxy.root + 'user/login?' + $.param({ redirect : Galaxy.root + '?tool_id=' + self.options.id });
+                    } else if ( self.$el.is(':empty') ) {
+                        self.$el.prepend((new Ui.Message({
+                            message     : error_message,
+                            status      : 'danger',
+                            persistent  : true,
+                            large       : true
+                        })).$el);
+                    } else {
+                        Galaxy.modal && Galaxy.modal.show({
+                            title   : 'Tool request failed',
+                            body    : error_message,
+                            buttons : {
+                                'Close' : function() {
+                                    Galaxy.modal.hide();
+                                }
+                            }
+                        });
+                    }
+                    Galaxy.emit.debug('tool-form::initialize()', 'Initial tool model request failed.', response);
+                    process.reject();
+                }
+            });
+        },
+
+        /** Request a new model for an already created tool form and updates the form inputs
+        */
+        _updateModel: function(process) {
+            // link this
+            var self = this;
+            var model_url = this.options.update_url || Galaxy.root + 'api/tools/' + this.options.id + '/build';
+            var current_state = {
+                tool_id         : this.options.id,
+                tool_version    : this.options.version,
+                inputs          : $.extend(true, {}, self.data.create())
+            }
+            this.wait(true);
+
+            // log tool state
+            Galaxy.emit.debug('tool-form-base::_updateModel()', 'Sending current state.', current_state);
+
+            // post job
+            Utils.request({
+                type    : 'POST',
+                url     : model_url,
+                data    : current_state,
+                success : function(new_model) {
+                    self.update(new_model['tool_model'] || new_model);
+                    self.options.update && self.options.update(new_model);
+                    self.wait(false);
+                    Galaxy.emit.debug('tool-form-base::_updateModel()', 'Received new model.', new_model);
+                    process.resolve();
+                },
+                error   : function(response) {
+                    Galaxy.emit.debug('tool-form-base::_updateModel()', 'Refresh request failed.', response);
+                    process.reject();
+                }
+            });
+        },
+
+        /** Create tool operation menu
+        */
+        _operations: function() {
+            var self = this;
+            var options = this.options;
+
+            // button for version selection
+            var versions_button = new Ui.ButtonMenu({
+                icon    : 'fa-cubes',
+                title   : (!options.narrow && 'Versions') || null,
+                tooltip : 'Select another tool version'
+            });
+            if (!options.sustain_version && options.versions && options.versions.length > 1) {
+                for (var i in options.versions) {
+                    var version = options.versions[i];
+                    if (version != options.version) {
+                        versions_button.addMenu({
+                            title   : 'Switch to ' + version,
+                            version : version,
+                            icon    : 'fa-cube',
+                            onclick : function() {
+                                // here we update the tool version (some tools encode the version also in the id)
+                                var id = options.id.replace(options.version, this.version);
+                                var version = this.version;
+                                // queue model request
+                                self.deferred.reset();
+                                self.deferred.execute(function(process) {
+                                    self._buildModel(process, {id: id, version: version})
+                                });
+                            }
+                        });
+                    }
+                }
+            } else {
+                versions_button.$el.hide();
+            }
+
+            // button for options e.g. search, help
+            var menu_button = new Ui.ButtonMenu({
+                icon    : 'fa-caret-down',
+                title   : (!options.narrow && 'Options') || null,
+                tooltip : 'View available options'
+            });
+            if(options.biostar_url) {
+                menu_button.addMenu({
+                    icon    : 'fa-question-circle',
+                    title   : 'Question?',
+                    tooltip : 'Ask a question about this tool (Biostar)',
+                    onclick : function() {
+                        window.open(options.biostar_url + '/p/new/post/');
+                    }
+                });
+                menu_button.addMenu({
+                    icon    : 'fa-search',
+                    title   : 'Search',
+                    tooltip : 'Search help for this tool (Biostar)',
+                    onclick : function() {
+                        window.open(options.biostar_url + '/local/search/page/?q=' + options.name);
+                    }
+                });
+            };
+            menu_button.addMenu({
+                icon    : 'fa-share',
+                title   : 'Share',
+                tooltip : 'Share this tool',
+                onclick : function() {
+                    prompt('Copy to clipboard: Ctrl+C, Enter', window.location.origin + Galaxy.root + 'root?tool_id=' + options.id);
+                }
+            });
+
+            // add admin operations
+            if (Galaxy.user && Galaxy.user.get('is_admin')) {
+                menu_button.addMenu({
+                    icon    : 'fa-download',
+                    title   : 'Download',
+                    tooltip : 'Download this tool',
+                    onclick : function() {
+                        window.location.href = Galaxy.root + 'api/tools/' + options.id + '/download';
+                    }
+                });
+            }
+
+            // add admin operations for tool XML reloading
+            if (Galaxy.user && Galaxy.user.get('is_admin')) {
+                menu_button.addMenu({
+                    icon    : 'fa-refresh',
+                    title   : 'Reload Tool XML',
+                    tooltip : 'Reload tool XML file',
+                    onclick : function() {
+                        var modalMessage = new Ui.Modal.View();
+                        $.ajax({
+                            url: '/api/tools/' + options.id + '/reload',
+                            type: "GET",
+                        }).done(function(data){
+                            modalMessage.show({
+                                title   : data.done ? 'Tool XML Reload' : 'Tool XML Reload Error',
+                                body    : data.done ? data.done : data.error,
+                                buttons : { 'Close' : function() { modalMessage.hide() } }
+                            });
+                            window.setTimeout(function(){modalMessage.hide();}, 2000);
+
+                        }).fail(function(error){
+                            modalMessage.show({
+                                title: "Tool XML Reload AJAX Error",
+                                body: options.id + " " + error,
+                                buttons : { 'Close' : function() { modalMessage.hide() } }
+                            });
+                        });
+                    }
+                });
+            }
+
+            // button for version selection
+            if (options.requirements && options.requirements.length > 0) {
+                menu_button.addMenu({
+                    icon    : 'fa-info-circle',
+                    title   : 'Requirements',
+                    tooltip : 'Display tool requirements',
+                    onclick : function() {
+                        if ( !this.requirements_visible || self.portlet.collapsed ) {
+                            this.requirements_visible = true;
+                            self.portlet.expand();
+                            self.message.update( { persistent : true, message : self._templateRequirements( options ), status : 'info' } );
+                        } else {
+                            this.requirements_visible = false;
+                            self.message.update( { message : '' } );
+                        }
+                    }
+                });
+            }
+
+            // add toolshed url
+            if (options.sharable_url) {
+                menu_button.addMenu({
+                    icon    : 'fa-external-link',
+                    title   : 'See in Tool Shed',
+                    tooltip : 'Access the repository',
+                    onclick : function() {
+                        window.open(options.sharable_url);
+                    }
+                });
+            }
+
+            return {
+                menu        : menu_button,
+                versions    : versions_button
+            }
+        },
+
+        /** Create footer
+        */
+        _footer: function() {
+            var options = this.options;
+            var $el = $( '<div/>' ).append( this._templateHelp( options ) );
+            if ( options.citations ) {
+                var $citations = $( '<div/>' );
+                var citations = new CitationModel.ToolCitationCollection();
+                citations.tool_id = options.id;
+                var citation_list_view = new CitationView.CitationListView({ el: $citations, collection: citations });
+                citation_list_view.render();
+                citations.fetch();
+                $el.append( $citations );
+            }
+            return $el;
+        },
+
+        /** Templates
+        */
+        _templateHelp: function( options ) {
+            var $tmpl = $( '<div/>' ).addClass( 'ui-form-help' ).append( options.help );
+            $tmpl.find( 'a' ).attr( 'target', '_blank' );
+            return $tmpl;
+        },
+
+        _templateRequirements: function( options ) {
+            var nreq = options.requirements.length;
+            if ( nreq > 0 ) {
+                var requirements_message = 'This tool requires ';
+                _.each( options.requirements, function( req, i ) {
+                    requirements_message += req.name + ( req.version ? ' (Version ' + req.version + ')' : '' ) + ( i < nreq - 2 ? ', ' : ( i == nreq - 2 ? ' and ' : '' ) );
+                });
+                var requirements_link = $( '<a/>' ).attr( 'target', '_blank' ).attr( 'href', 'https://wiki.galaxyproject.org/Tools/Requirements' ).text( 'here' );
+                return $( '<span/>' ).append( requirements_message + '. Click ' ).append( requirements_link ).append( ' for more information.' );
+            }
+            return 'No requirements found.';
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/tool/tool-form-composite.js b/client/galaxy/scripts/mvc/tool/tool-form-composite.js
new file mode 100644
index 0000000..4c65bea
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tool/tool-form-composite.js
@@ -0,0 +1,511 @@
+/** This is the run workflow tool form view. */
+define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view', 'mvc/form/form-data', 'mvc/tool/tool-form-base', 'mvc/ui/ui-modal', 'mvc/webhooks' ],
+    function( Utils, Deferred, Ui, Form, FormData, ToolFormBase, Modal, Webhooks ) {
+    var View = Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            this.modal = parent.Galaxy.modal || new Modal.View();
+            this.model = options && options.model || new Backbone.Model( options );
+            this.deferred = new Deferred();
+            this.setElement( $( '<div/>' ).addClass( 'ui-form-composite' )
+                                          .append( this.$message      = $( '<div/>' ) )
+                                          .append( this.$header       = $( '<div/>' ) )
+                                          .append( this.$steps        = $( '<div/>' ) ) );
+            $( 'body' ).append( this.$el );
+            this._configure();
+            this.render();
+            this._refresh();
+            this.$el.on( 'click', function() { self._refresh() } );
+            this.$steps.scroll( function() { self._refresh() } );
+            $( window ).resize( function() { self._refresh() } );
+        },
+
+        /** Refresh height of scrollable div below header, handle scrolling by lazy loading steps */
+        _refresh: function( step_index ) {
+            var margin = _.reduce( this.$el.children(), function( memo, child ) {
+                return memo + $( child ).outerHeight();
+            }, 0 ) - this.$steps.height() + 25;
+            this.$steps.css( 'height', $( window ).height() - margin );
+        },
+
+        /** Configures form/step options for each workflow step */
+        _configure: function() {
+            var self = this;
+            this.forms = [];
+            this.steps = [];
+            this.links = [];
+            this.parms = [];
+            _.each( this.model.get( 'steps' ), function( step, i ) {
+                Galaxy.emit.debug( 'tool-form-composite::initialize()', i + ' : Preparing workflow step.' );
+                step = Utils.merge( {
+                    index                   : i,
+                    name                    : 'Step ' + ( parseInt( i ) + 1 ) + ': ' + step.name,
+                    icon                    : '',
+                    help                    : null,
+                    description             : step.annotation && ' - ' + step.annotation || step.description,
+                    citations               : null,
+                    collapsible             : true,
+                    collapsed               : i > 0 && !self._isDataStep( step ),
+                    sustain_version         : true,
+                    sustain_repeats         : true,
+                    sustain_conditionals    : true,
+                    narrow                  : true,
+                    text_enable             : 'Edit',
+                    text_disable            : 'Undo',
+                    cls_enable              : 'fa fa-edit',
+                    cls_disable             : 'fa fa-undo',
+                    errors                  : step.messages,
+                    initial_errors          : true,
+                    cls                     : 'ui-portlet-narrow',
+                    hide_operations         : true,
+                    needs_refresh           : false,
+                    always_refresh          : step.step_type != 'tool'
+                }, step );
+                self.steps[ i ] = step;
+                self.links[ i ] = [];
+                self.parms[ i ] = {};
+            });
+
+            // build linear index of step input pairs
+            _.each( this.steps, function( step, i ) {
+                FormData.visitInputs( step.inputs, function( input, name ) {
+                    self.parms[ i ][ name ] = input;
+                });
+            });
+
+            // iterate through data input modules and collect linked sub steps
+            _.each( this.steps, function( step, i ) {
+                _.each( step.output_connections, function( output_connection ) {
+                    _.each( self.steps, function( sub_step, j ) {
+                        sub_step.step_index === output_connection.input_step_index && self.links[ i ].push( sub_step );
+                    });
+                });
+            });
+
+            // convert all connected data inputs to hidden fields with proper labels,
+            // and track the linked source step
+            _.each( this.steps, function( step, i ) {
+                _.each( self.steps, function( sub_step, j ) {
+                    var connections_by_name = {};
+                    _.each( step.output_connections, function( connection ) {
+                        sub_step.step_index === connection.input_step_index && ( connections_by_name[ connection.input_name ] = connection );
+                    });
+                    _.each( self.parms[ j ], function( input, name ) {
+                        var connection = connections_by_name[ name ];
+                        if ( connection ) {
+                            input.type = 'hidden';
+                            input.help = input.step_linked ? input.help + ', ' : '';
+                            input.help += 'Output dataset \'' + connection.output_name + '\' from step ' + ( parseInt( i ) + 1 );
+                            input.step_linked = input.step_linked || [];
+                            input.step_linked.push( step );
+                        }
+                    });
+                });
+            });
+
+            // identify and configure workflow parameters
+            var wp_count = 0;
+            this.wp_inputs = {};
+            function _handleWorkflowParameter( value, callback ) {
+                var wp_name = self._isWorkflowParameter( value );
+                wp_name && callback( self.wp_inputs[ wp_name ] = self.wp_inputs[ wp_name ] || {
+                    label   : wp_name,
+                    name    : wp_name,
+                    type    : 'text',
+                    color   : 'hsl( ' + ( ++wp_count * 100 ) + ', 70%, 30% )',
+                    style   : 'ui-form-wp-source',
+                    links   : []
+                });
+            }
+            _.each( this.steps, function( step, i ) {
+                _.each( self.parms[ i ], function( input, name ) {
+                    _handleWorkflowParameter( input.value, function( wp_input ) {
+                        wp_input.links.push( step );
+                        input.wp_linked = wp_input.name;
+                        input.color     = wp_input.color;
+                        input.type      = 'text';
+                        input.value     = null;
+                        input.backdrop  = true;
+                        input.style     = 'ui-form-wp-target';
+                    });
+                });
+                _.each( step.post_job_actions, function( pja ) {
+                    _.each( pja.action_arguments, function( arg ) {
+                        _handleWorkflowParameter( arg, function() {} );
+                    });
+                });
+            });
+
+            // select fields are shown for dynamic fields if all putative data inputs are available,
+            // or if an explicit reference is specified as data_ref and available
+            _.each( this.steps, function( step, i ) {
+                if ( step.step_type == 'tool' ) {
+                    var data_resolved = true;
+                    FormData.visitInputs( step.inputs, function ( input, name, context ) {
+                        var is_data_input = ([ 'data', 'data_collection' ]).indexOf( input.type ) != -1;
+                        var data_ref = context[ input.data_ref ];
+                        input.step_linked && !self._isDataStep( input.step_linked ) && ( data_resolved = false );
+                        input.options && ( ( input.options.length == 0 && !data_resolved ) || input.wp_linked ) && ( input.is_workflow = true );
+                        data_ref && ( input.is_workflow = ( data_ref.step_linked && !self._isDataStep( data_ref.step_linked ) ) || input.wp_linked );
+                        ( is_data_input || ( input.value && input.value.__class__ == 'RuntimeValue' && !input.step_linked ) ) && ( step.collapsed = false );
+                        input.value && input.value.__class__ == 'RuntimeValue' && ( input.value = null );
+                        input.flavor = 'workflow';
+                        if ( !is_data_input && input.type !== 'hidden' && !input.wp_linked ) {
+                            if ( input.optional || ( !Utils.isEmpty( input.value ) && input.value !== '' ) ) {
+                                input.collapsible_value = input.value;
+                                input.collapsible_preview = true;
+                            }
+                        }
+                    });
+                }
+            });
+        },
+
+        render: function() {
+            var self = this;
+            this.deferred.reset();
+            this._renderHeader();
+            this._renderMessage();
+            this._renderParameters();
+            this._renderHistory();
+            _.each ( this.steps, function( step ) { self._renderStep( step ) } );
+        },
+
+        /** Render header */
+        _renderHeader: function() {
+            var self = this;
+            this.execute_btn = new Ui.Button({
+                icon        : 'fa-check',
+                title       : 'Run workflow',
+                cls         : 'btn btn-primary',
+                onclick     : function() { self._execute() }
+            });
+            this.$header.addClass( 'ui-form-header' ).empty()
+                        .append( new Ui.Label( { title: 'Workflow: ' + this.model.get( 'name' ) } ).$el )
+                        .append( this.execute_btn.$el );
+        },
+
+        /** Render message */
+        _renderMessage: function() {
+            this.$message.empty();
+            if ( this.model.get( 'has_upgrade_messages' ) ) {
+                this.$message.append( new Ui.Message( {
+                    message     : 'Some tools in this workflow may have changed since it was last saved or some errors were found. The workflow may still run, but any new options will have default values. Please review the messages below to make a decision about whether the changes will affect your analysis.',
+                    status      : 'warning',
+                    persistent  : true,
+                    fade        : false
+                } ).$el );
+            }
+            var step_version_changes = this.model.get( 'step_version_changes' );
+            if ( step_version_changes && step_version_changes.length > 0 ) {
+                this.$message.append( new Ui.Message( {
+                    message     : 'Some tools are being executed with different versions compared to those available when this workflow was last saved because the other versions are not or no longer available on this Galaxy instance. To upgrade your workflow and dismiss this message simply edit the workflow and re-save it.',
+                    status      : 'warning',
+                    persistent  : true,
+                    fade        : false
+                } ).$el );
+            }
+        },
+
+        /** Render workflow parameters */
+        _renderParameters: function() {
+            var self = this;
+            this.wp_form = null;
+            if ( !_.isEmpty( this.wp_inputs ) ) {
+                this.wp_form = new Form({ title: '<b>Workflow Parameters</b>', inputs: this.wp_inputs, cls: 'ui-portlet-narrow', onchange: function() {
+                        _.each( self.wp_form.input_list, function( input_def, i ) {
+                            _.each( input_def.links, function( step ) { self._refreshStep( step ) } );
+                        });
+                    }
+                });
+                this._append( this.$steps.empty(), this.wp_form.$el );
+            }
+        },
+
+        /** Render workflow parameters */
+        _renderHistory: function() {
+            this.history_form = new Form({
+                cls    : 'ui-portlet-narrow',
+                title  : '<b>History Options</b>',
+                inputs : [{
+                    type        : 'conditional',
+                    name        : 'new_history',
+                    test_param  : {
+                        name        : 'check',
+                        label       : 'Send results to a new history',
+                        type        : 'boolean',
+                        value       : 'false',
+                        help        : ''
+                    },
+                    cases       : [{
+                        value   : 'true',
+                        inputs  : [{
+                            name    : 'name',
+                            label   : 'History name',
+                            type    : 'text',
+                            value   : this.model.get( 'name' )
+                        }]
+                    }]
+                }]
+            });
+            this._append( this.$steps, this.history_form.$el );
+        },
+
+        /** Render step */
+        _renderStep: function( step ) {
+            var self = this;
+            var form = null;
+            this.deferred.execute( function( promise ) {
+                self.$steps.addClass( 'ui-steps' );
+                if ( step.step_type == 'tool' ) {
+                    form = new ToolFormBase( step );
+                    if ( step.post_job_actions && step.post_job_actions.length ) {
+                        form.portlet.append( $( '<div/>' ).addClass( 'ui-form-element-disabled' )
+                            .append( $( '<div/>' ).addClass( 'ui-form-title' ).html( '<b>Job Post Actions</b>' ) )
+                            .append( $( '<div/>' ).addClass( 'ui-form-preview' ).html(
+                                _.reduce( step.post_job_actions, function( memo, value ) {
+                                    return memo + ' ' + value.short_str;
+                                }, '' ) ) )
+                            );
+                    }
+                } else {
+                    _.each( step.inputs, function( input ) { input.flavor = 'module' } );
+                    form = new Form( Utils.merge({
+                        title    : '<b>' + step.name + '</b>',
+                        onchange : function() { _.each( self.links[ step.index ], function( link ) { self._refreshStep( link ) } ) },
+                        inputs   : step.inputs && step.inputs.length > 0 ? step.inputs : [ { type: 'hidden', name: 'No options available.', ignore: null } ]
+                    }, step ) );
+                }
+                self.forms[ step.index ] = form;
+                self._append( self.$steps, form.$el );
+                step.needs_refresh && self._refreshStep( step );
+                form.portlet[ !self.show_progress ? 'enable' : 'disable' ]();
+                self.show_progress && self.execute_btn.model.set( { wait        : true,
+                                                                    wait_text   : 'Preparing...',
+                                                                    percentage  : ( step.index + 1 ) * 100.0 / self.steps.length } );
+                Galaxy.emit.debug( 'tool-form-composite::initialize()', step.index + ' : Workflow step state ready.', step );
+                setTimeout( function() { promise.resolve() }, 0 );
+            });
+        },
+
+        /** Refreshes step values from source step values */
+        _refreshStep: function( step ) {
+            var self = this;
+            var form = this.forms[ step.index ];
+            if ( form ) {
+                _.each( self.parms[ step.index ], function( input, name ) {
+                    if ( input.step_linked || input.wp_linked ) {
+                        var field = form.field_list[ form.data.match( name ) ];
+                        if ( field ) {
+                            var new_value = undefined;
+                            if ( input.step_linked ) {
+                                new_value = { values: [] };
+                                _.each( input.step_linked, function( source_step ) {
+                                    if ( self._isDataStep( source_step ) ) {
+                                        value = self.forms[ source_step.index ].data.create().input;
+                                        value && _.each( value.values, function( v ) { new_value.values.push( v ) } );
+                                    }
+                                });
+                                if ( !input.multiple && new_value.values.length > 0 ) {
+                                    new_value = { values: [ new_value.values[ 0 ] ] };
+                                }
+                            } else if ( input.wp_linked ) {
+                                var wp_field = self.wp_form.field_list[ self.wp_form.data.match( input.wp_linked ) ];
+                                wp_field && ( new_value = wp_field.value() );
+                            }
+                            if ( new_value !== undefined ) {
+                                field.value( new_value );
+                            }
+                        }
+                    }
+                });
+                form.trigger( 'change' );
+            } else {
+                step.needs_refresh = true;
+            }
+        },
+
+        /** Refresh the history after job submission while form is shown */
+        _refreshHistory: function() {
+            var self = this;
+            var history = parent.Galaxy && parent.Galaxy.currHistoryPanel && parent.Galaxy.currHistoryPanel.model;
+            this._refresh_history && clearTimeout( this._refresh_history );
+            if ( history ) {
+                history.refresh().success( function() {
+                    if ( history.numOfUnfinishedShownContents() === 0 ) {
+                        self._refresh_history = setTimeout( function() { self._refreshHistory() }, history.UPDATE_DELAY );
+                    }
+                });
+            }
+        },
+
+        /** Build remaining steps */
+        _execute: function() {
+            var self = this;
+            this.show_progress = true;
+            this._enabled( false );
+            this.deferred.execute( function( promise ) {
+                setTimeout( function() { promise.resolve(); self._submit(); }, 0 );
+            });
+        },
+
+        /** Validate and submit workflow */
+        _submit: function() {
+            var self = this;
+            var history_form_data = this.history_form.data.create();
+            var job_def = {
+                new_history_name      : history_form_data[ 'new_history|name' ] ? history_form_data[ 'new_history|name' ] : null,
+                history_id            : !history_form_data[ 'new_history|name' ] ? this.model.get( 'history_id' ) : null,
+                replacement_params    : this.wp_form ? this.wp_form.data.create() : {},
+                parameters            : {},
+                // Tool form will submit flat maps for each parameter
+                // (e.g. "repeat_0|cond|param": "foo" instead of nested
+                // data structures).
+                parameters_normalized : true,
+                // Tool form always wants a list of invocations back
+                // so that inputs can be batched.
+                batch                 : true
+            };
+            var validated = true;
+            for ( var i in this.forms ) {
+                var form = this.forms[ i ];
+                var job_inputs  = form.data.create();
+                var step        = self.steps[ i ];
+                var step_index  = step.step_index;
+                form.trigger( 'reset' );
+                for ( var job_input_id in job_inputs ) {
+                    var input_value = job_inputs[ job_input_id ];
+                    var input_id    = form.data.match( job_input_id );
+                    var input_field = form.field_list[ input_id ];
+                    var input_def   = form.input_list[ input_id ];
+                    if ( !input_def.step_linked ) {
+                        if ( this._isDataStep( step ) ) {
+                            validated = input_value && input_value.values && input_value.values.length > 0;
+                        } else {
+                            validated = input_def.optional || ( input_def.is_workflow && input_value !== '' ) || ( !input_def.is_workflow && input_value !== null );
+                        }
+                        if ( !validated ) {
+                            form.highlight( input_id );
+                            break;
+                        }
+                        job_def.parameters[ step_index ] = job_def.parameters[ step_index ] || {};
+                        job_def.parameters[ step_index ][ job_input_id ] = job_inputs[ job_input_id ];
+                    }
+                }
+                if ( !validated ) {
+                    break;
+                }
+            }
+            if ( !validated ) {
+                self._enabled( true );
+                Galaxy.emit.debug( 'tool-form-composite::submit()', 'Validation failed.', job_def );
+            } else {
+                Galaxy.emit.debug( 'tool-form-composite::submit()', 'Validation complete.', job_def );
+                Utils.request({
+                    type    : 'POST',
+                    url     : Galaxy.root + 'api/workflows/' + this.model.id + '/invocations',
+                    data    : job_def,
+                    success : function( response ) {
+                        Galaxy.emit.debug( 'tool-form-composite::submit', 'Submission successful.', response );
+                        self.$el.children().hide();
+                        self.$el.append( self._templateSuccess( response ) );
+                        
+                        // Show Webhook if job is running
+                        if ($.isArray( response ) && response.length > 0) {
+                            self.$el.append( $( '<div/>', { id: 'webhook-view' } ) );
+                            var WebhookApp = new Webhooks.WebhookView({
+                                urlRoot: Galaxy.root + 'api/webhooks/workflow'
+                            });
+                        }
+
+                        self._refreshHistory();
+                    },
+                    error   : function( response ) {
+                        Galaxy.emit.debug( 'tool-form-composite::submit', 'Submission failed.', response );
+                        var input_found = false;
+                        if ( response && response.err_data ) {
+                            for ( var i in self.forms ) {
+                                var form = self.forms[ i ];
+                                var step_related_errors = response.err_data[ form.options.step_index ];
+                                if ( step_related_errors ) {
+                                    var error_messages = form.data.matchResponse( step_related_errors );
+                                    for ( var input_id in error_messages ) {
+                                        form.highlight( input_id, error_messages[ input_id ] );
+                                        input_found = true;
+                                        break;
+                                    }
+                                }
+                            }
+                        }
+                        if ( !input_found ) {
+                            self.modal.show({
+                                title   : 'Workflow submission failed',
+                                body    : self._templateError( job_def, response && response.err_msg ),
+                                buttons : {
+                                    'Close' : function() {
+                                        self.modal.hide();
+                                    }
+                                }
+                            });
+                        }
+                    },
+                    complete: function() {
+                        self._enabled( true );
+                    }
+                });
+            }
+        },
+
+        /** Append new dom to body */
+        _append: function( $container, $el ) {
+            $container.append( '<p/>' ).append( $el );
+        },
+
+        /** Set enabled/disabled state */
+        _enabled: function( enabled ) {
+            this.execute_btn.model.set( { wait: !enabled, wait_text: 'Sending...', percentage: -1 } );
+            this.wp_form && this.wp_form.portlet[ enabled ? 'enable' : 'disable' ]();
+            this.history_form && this.history_form.portlet[ enabled ? 'enable' : 'disable' ]();
+            _.each( this.forms, function( form ) { form && form.portlet[ enabled ? 'enable' : 'disable' ]() } );
+        },
+
+        /** Handle workflow parameter */
+        _isWorkflowParameter: function( value ) {
+            if ( String( value ).substring( 0, 1 ) === '$' ) {
+                return Utils.sanitize( value.substring( 2,  value.length - 1 ) )
+            }
+        },
+
+        /** Is data input module/step */
+        _isDataStep: function( steps ) {
+            lst = $.isArray( steps ) ? steps : [ steps ] ;
+            for ( var i = 0; i < lst.length; i++ ) {
+                var step = lst[ i ];
+                if ( !step || !step.step_type || !step.step_type.startsWith( 'data' ) ) {
+                    return false;
+                }
+            }
+            return true;
+        },
+
+        /** Templates */
+        _templateSuccess: function( response ) {
+            if ( $.isArray( response ) && response.length > 0 ) {
+                return $( '<div/>' ).addClass( 'donemessagelarge' )
+                                    .append( $( '<p/>' ).html( 'Successfully invoked workflow <b>' + Utils.sanitize( this.model.get( 'name' ) ) + '</b>' + ( response.length > 1 ? ' <b>' + response.length + ' times</b>' : '' ) + '.' ) )
+                                    .append( $( '<p/>' ).append( '<b/>' ).text( 'You can check the status of queued jobs and view the resulting data by refreshing the History pane. When the job has been run the status will change from \'running\' to \'finished\' if completed successfully or \'error\' if problems were encountered.' ) );
+            } else {
+                return this._templateError( response, 'Invalid success response. No invocations found.' );
+            }
+        },
+
+        _templateError: function( response, err_msg ) {
+            return  $( '<div/>' ).addClass( 'errormessagelarge' )
+                                 .append( $( '<p/>' ).text( 'The server could not complete the request. Please contact the Galaxy Team if this error persists. ' + ( JSON.stringify( err_msg ) || '' ) ) )
+                                 .append( $( '<pre/>' ).text( JSON.stringify( response, null, 4 ) ) );
+        }
+    });
+    return {
+        View: View
+    };
+});
diff --git a/client/galaxy/scripts/mvc/tool/tool-form-workflow.js b/client/galaxy/scripts/mvc/tool/tool-form-workflow.js
new file mode 100644
index 0000000..6d676a5
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tool/tool-form-workflow.js
@@ -0,0 +1,240 @@
+/** This is the workflow tool form. */
+define( [ 'utils/utils', 'mvc/tool/tool-form-base' ],
+    function( Utils, ToolFormBase ) {
+    var View = Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            this.workflow = options.workflow;
+            this.node     = options.node;
+            this.setElement( '<div/>' );
+            if ( this.node ) {
+                this.post_job_actions = this.node.post_job_actions || {};
+                Utils.deepeach( options.inputs, function( input ) {
+                    if ( input.type ) {
+                        if ( [ 'data', 'data_collection' ].indexOf( input.type ) != -1 ) {
+                            input.type = 'hidden';
+                            input.info = 'Data input \'' + input.name + '\' (' + Utils.textify( input.extensions ) + ')';
+                            input.value = { '__class__': 'RuntimeValue' };
+                        } else {
+                            input.collapsible_value = { '__class__': 'RuntimeValue' };
+                            input.is_workflow = ( input.options && input.options.length == 0 ) ||
+                                                ( [ 'integer', 'float' ].indexOf( input.type ) != -1 );
+                        }
+                    }
+                });
+                Utils.deepeach( options.inputs, function( input ) {
+                    input.type == 'conditional' && ( input.test_param.collapsible_value = undefined );
+                });
+                this._makeSections( options );
+                this.form = new ToolFormBase( Utils.merge( options, {
+                    text_enable     : 'Set in Advance',
+                    text_disable    : 'Set at Runtime',
+                    narrow          : true,
+                    initial_errors  : true,
+                    sustain_version : true,
+                    cls             : 'ui-portlet-narrow',
+                    update_url      : Galaxy.root + 'api/workflows/build_module',
+                    update          : function( data ) {
+                        // This hasn't modified the workflow, just returned
+                        // module information for the tool to update the workflow
+                        // state stored on the client with. User needs to save
+                        // for this to take effect.
+                        self.node.update_field_data( data );
+                        self.form.errors( data && data.tool_model );
+                    }
+                }));
+                this.$el.append( this.form.$el );
+            } else {
+                Galaxy.emit.debug('tool-form-workflow::initialize()', 'Node not found in workflow.');
+            }
+        },
+
+        /** Builds all sub sections */
+        _makeSections: function( options ){
+            var inputs = options.inputs;
+            var datatypes = options.datatypes;
+            inputs[ Utils.uid() ] = {
+                label   : 'Annotation / Notes',
+                name    : 'annotation',
+                type    : 'text',
+                area    : true,
+                help    : 'Add an annotation or note for this step. It will be shown with the workflow.',
+                value   : this.node.annotation
+            }
+            var output_id = this.node.output_terminals && Object.keys( this.node.output_terminals )[ 0 ];
+            if ( output_id ) {
+                inputs[ Utils.uid() ] = {
+                    name        : 'pja__' + output_id + '__EmailAction',
+                    label       : 'Email notification',
+                    type        : 'boolean',
+                    value       : String( Boolean( this.post_job_actions[ 'EmailAction' + output_id ] ) ),
+                    ignore      : 'false',
+                    help        : 'An email notification will be sent when the job has completed.',
+                    payload     : {
+                        'host'  : window.location.host
+                    }
+                };
+                inputs[ Utils.uid() ] = {
+                    name        : 'pja__' + output_id + '__DeleteIntermediatesAction',
+                    label       : 'Output cleanup',
+                    type        : 'boolean',
+                    value       : String( Boolean( this.post_job_actions[ 'DeleteIntermediatesAction' + output_id ] ) ),
+                    ignore      : 'false',
+                    help        : 'Upon completion of this step, delete non-starred outputs from completed workflow steps if they are no longer required as inputs.'
+                };
+                for ( var i in this.node.output_terminals ) {
+                    inputs[ Utils.uid() ] = this._makeSection( i, datatypes );
+                }
+            }
+        },
+
+        /** Builds sub section with step actions/annotation */
+        _makeSection: function( output_id, datatypes ){
+            var self = this;
+            var extensions = [];
+            var input_terminal_names = [];
+            for ( key in datatypes  ) {
+                extensions.push( { 0 : datatypes[ key ], 1 : datatypes[ key ] } );
+            }
+            for ( key in this.node.input_terminals ){
+                input_terminal_names.push( this.node.input_terminals[ key ].name );
+            }
+            extensions.sort( function( a, b ) {
+                return a.label > b.label ? 1 : a.label < b.label ? -1 : 0;
+            });
+            extensions.unshift({
+                0 : 'Sequences',
+                1 : 'Sequences'
+            });
+            extensions.unshift({
+                0 : 'Roadmaps',
+                1 : 'Roadmaps'
+            });
+            extensions.unshift({
+                0 : 'Leave unchanged',
+                1 : '__empty__'
+            });
+            var input_config = {
+                title   : 'Configure Output: \'' + output_id + '\'',
+                type    : 'section',
+                flat    : true,
+                inputs  : [{
+                    label       : 'Label',
+                    type        : 'text',
+                    value       : ( output = this.node.getWorkflowOutput( output_id ) ) && output.label || '',
+                    help        : 'This will provide a short name to describe the output - this must be unique across workflows.',
+                    onchange    : function( new_value ) {
+                        self.workflow.attemptUpdateOutputLabel( self.node, output_id, new_value );
+                    }
+                },{
+                    action      : 'RenameDatasetAction',
+                    pja_arg     : 'newname',
+                    label       : 'Rename dataset',
+                    type        : 'text',
+                    value       : '',
+                    ignore      : '',
+                    help        : 'This action will rename the output dataset. Click <a href="https://wiki.galaxyproject.org/Learn/AdvancedWorkflow/Variables">here</a> for more information. Valid inputs are: <strong>' + input_terminal_names.join(', ') + '</strong>.'
+                },{
+                    action      : 'ChangeDatatypeAction',
+                    pja_arg     : 'newtype',
+                    label       : 'Change datatype',
+                    type        : 'select',
+                    ignore      : '__empty__',
+                    value       : '__empty__',
+                    options     : extensions,
+                    help        : 'This action will change the datatype of the output to the indicated value.'
+                },{
+                    action      : 'TagDatasetAction',
+                    pja_arg     : 'tags',
+                    label       : 'Tags',
+                    type        : 'text',
+                    value       : '',
+                    ignore      : '',
+                    help        : 'This action will set tags for the dataset.'
+                },{
+                    title   : 'Assign columns',
+                    type    : 'section',
+                    flat    : true,
+                    inputs  : [{
+                        action      : 'ColumnSetAction',
+                        pja_arg     : 'chromCol',
+                        label       : 'Chrom column',
+                        type        : 'integer',
+                        value       : '',
+                        ignore      : ''
+                    },{
+                        action      : 'ColumnSetAction',
+                        pja_arg     : 'startCol',
+                        label       : 'Start column',
+                        type        : 'integer',
+                        value       : '',
+                        ignore      : ''
+                    },{
+                        action      : 'ColumnSetAction',
+                        pja_arg     : 'endCol',
+                        label       : 'End column',
+                        type        : 'integer',
+                        value       : '',
+                        ignore      : ''
+                    },{
+                        action      : 'ColumnSetAction',
+                        pja_arg     : 'strandCol',
+                        label       : 'Strand column',
+                        type        : 'integer',
+                        value       : '',
+                        ignore      : ''
+                    },{
+                        action      : 'ColumnSetAction',
+                        pja_arg     : 'nameCol',
+                        label       : 'Name column',
+                        type        : 'integer',
+                        value       : '',
+                        ignore      : ''
+                    }],
+                    help    : 'This action will set column assignments in the output dataset. Blank fields are ignored.'
+                }]
+            };
+
+            // visit input nodes and enrich by name/value pairs from server data
+            function visit ( head, head_list ) {
+                head_list = head_list || [];
+                head_list.push( head );
+                for ( var i in head.inputs ) {
+                    var input = head.inputs[ i ];
+                    var action = input.action;
+                    if ( action ) {
+                        input.name = 'pja__' + output_id + '__' + input.action;
+                        if ( input.pja_arg ) {
+                            input.name += '__' + input.pja_arg;
+                        }
+                        if ( input.payload ) {
+                            for ( var p_id in input.payload ) {
+                                var p = input.payload[ p_id ];
+                                input.payload[ input.name + '__' + p_id ] = p;
+                                delete p;
+                            }
+                        }
+                        var d = self.post_job_actions[ input.action + output_id ];
+                        if ( d ) {
+                            for ( var j in head_list ) {
+                                head_list[ j ].expanded = true;
+                            }
+                            if ( input.pja_arg ) {
+                                input.value = d.action_arguments && d.action_arguments[ input.pja_arg ] || input.value;
+                            } else {
+                                input.value = 'true';
+                            }
+                        }
+                    }
+                    input.inputs && visit( input, head_list.slice( 0 ) );
+                }
+            }
+            visit( input_config );
+            return input_config;
+        }
+    });
+
+    return {
+        View: View
+    };
+});
diff --git a/client/galaxy/scripts/mvc/tool/tool-form.js b/client/galaxy/scripts/mvc/tool/tool-form.js
new file mode 100644
index 0000000..b4c0953
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tool/tool-form.js
@@ -0,0 +1,183 @@
+/* This is the regular tool form */
+define([ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-modal', 'mvc/tool/tool-form-base', 'mvc/webhooks' ],
+    function( Utils, Ui, Modal, ToolFormBase, Webhooks ) {
+    var View = Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            this.modal = parent.Galaxy.modal || new Modal.View();
+            this.form = new ToolFormBase( Utils.merge({
+                listen_to_history : true,
+                always_refresh    : false,
+                customize         : function( options ) {
+                    // build execute button
+                    options.buttons = {
+                        execute: execute_btn = new Ui.Button({
+                            icon     : 'fa-check',
+                            tooltip  : 'Execute: ' + options.name + ' (' + options.version + ')',
+                            title    : 'Execute',
+                            cls      : 'ui-button btn btn-primary',
+                            floating : 'clear',
+                            onclick  : function() {
+                                execute_btn.wait();
+                                self.form.portlet.disable();
+                                self.submit( options, function() {
+                                    execute_btn.unwait();
+                                    self.form.portlet.enable();
+                                } );
+                            }
+                        })
+                    };
+                    // remap feature
+                    if ( options.job_id && options.job_remap ) {
+                        options.inputs[ 'rerun_remap_job_id' ] = {
+                            label       : 'Resume dependencies from this job',
+                            name        : 'rerun_remap_job_id',
+                            type        : 'select',
+                            display     : 'radio',
+                            ignore      : '__ignore__',
+                            value       : '__ignore__',
+                            options     : [ [ 'Yes', options.job_id ], [ 'No', '__ignore__' ] ],
+                            help        : 'The previous run of this tool failed and other tools were waiting for it to finish successfully. Use this option to resume those tools using the new output(s) of this tool run.'
+                        }
+                    }
+                }
+            }, options ) );
+            this.deferred = this.form.deferred;
+            this.setElement( '<div/>' );
+            this.$el.append( this.form.$el );
+        },
+
+        /** Submit a regular job.
+         * @param{dict}     options   - Specifies tool id and version
+         * @param{function} callback  - Called when request has completed
+         */
+        submit: function( options, callback ) {
+            var self = this;
+            var job_def = {
+                tool_id         : options.id,
+                tool_version    : options.version,
+                inputs          : this.form.data.create()
+            }
+            this.form.trigger( 'reset' );
+            if ( !self.validate( job_def ) ) {
+                Galaxy.emit.debug( 'tool-form::submit()', 'Submission canceled. Validation failed.' );
+                callback && callback();
+                return;
+            }
+            if ( options.action !== Galaxy.root + 'tool_runner/index' ) {
+                var $f = $( '<form/>' ).attr( { action: options.action, method: options.method, enctype: options.enctype } );
+                _.each( job_def.inputs, function( value, key ) { $f.append( $( '<input/>' ).attr( { 'name': key, 'value': value } ) ) } );
+                $f.hide().appendTo( 'body' ).submit().remove();
+                callback && callback();
+                return;
+            }
+            Galaxy.emit.debug( 'tool-form::submit()', 'Validation complete.', job_def );
+            Utils.request({
+                type    : 'POST',
+                url     : Galaxy.root + 'api/tools',
+                data    : job_def,
+                success : function( response ) {
+                    callback && callback();
+                    self.$el.children().hide();
+                    self.$el.append( self._templateSuccess( response ) );
+                    // Show Webhook if job is running
+                    if ( response.jobs && response.jobs.length > 0 ) {
+                        self.$el.append( $( '<div/>', { id: 'webhook-view' } ) );
+                        var WebhookApp = new Webhooks.WebhookView({
+                            urlRoot: Galaxy.root + 'api/webhooks/tool'
+                        });
+                    }
+                    parent.Galaxy && parent.Galaxy.currHistoryPanel && parent.Galaxy.currHistoryPanel.refreshContents();
+                },
+                error   : function( response ) {
+                    callback && callback();
+                    Galaxy.emit.debug( 'tool-form::submit', 'Submission failed.', response );
+                    var input_found = false;
+                    if ( response && response.err_data ) {
+                        var error_messages = self.form.data.matchResponse( response.err_data );
+                        for ( var input_id in error_messages ) {
+                            self.form.highlight( input_id, error_messages[ input_id ]);
+                            input_found = true;
+                            break;
+                        }
+                    }
+                    if ( !input_found ) {
+                        self.modal.show({
+                            title   : 'Job submission failed',
+                            body    : self._templateError( job_def, response && response.err_msg ),
+                            buttons : { 'Close' : function() { self.modal.hide() } }
+                        });
+                    }
+                }
+            });
+        },
+
+        /** Validate job dictionary.
+         * @param{dict}     job_def   - Job execution dictionary
+        */
+        validate: function( job_def ) {
+            var job_inputs  = job_def.inputs;
+            var batch_n     = -1;
+            var batch_src   = null;
+            for ( var job_input_id in job_inputs ) {
+                var input_value = job_inputs[ job_input_id ];
+                var input_id    = this.form.data.match( job_input_id );
+                var input_field = this.form.field_list[ input_id ];
+                var input_def   = this.form.input_list[ input_id ];
+                if ( !input_id || !input_def || !input_field ) {
+                    Galaxy.emit.debug('tool-form::validate()', 'Retrieving input objects failed.');
+                    continue;
+                }
+                if ( !input_def.optional && input_value == null ) {
+                    this.form.highlight( input_id );
+                    return false;
+                }
+                if ( input_value && input_value.batch ) {
+                    var n = input_value.values.length;
+                    var src = n > 0 && input_value.values[ 0 ] && input_value.values[ 0 ].src;
+                    if ( src ) {
+                        if ( batch_src === null ) {
+                            batch_src = src;
+                        } else if ( batch_src !== src ) {
+                            this.form.highlight( input_id, 'Please select either dataset or dataset list fields for all batch mode fields.' );
+                            return false;
+                        }
+                    }
+                    if ( batch_n === -1 ) {
+                        batch_n = n;
+                    } else if ( batch_n !== n ) {
+                        this.form.highlight( input_id, 'Please make sure that you select the same number of inputs for all batch mode fields. This field contains <b>' + n + '</b> selection(s) while a previous field contains <b>' + batch_n + '</b>.' );
+                        return false;
+                    }
+                }
+            }
+            return true;
+        },
+
+        _templateSuccess: function( response ) {
+            if ( response.jobs && response.jobs.length > 0 ) {
+                var njobs = response.jobs.length;
+                var njobs_text = njobs == 1 ? '1 job has' : njobs + ' jobs have';
+                var $message = $( '<div/>' ).addClass( 'donemessagelarge' )
+                                            .append( $( '<p/>' ).text( njobs_text + ' been successfully added to the queue - resulting in the following datasets:' ) );
+                _.each( response.outputs, function( output ) {
+                    $message.append( $( '<p/>' ).addClass( 'messagerow' ).append( $( '<b/>' ).text( output.hid + ': ' + output.name ) ) );
+                });
+                $message.append( $( '<p/>' ).append( '<b/>' ).text( 'You can check the status of queued jobs and view the resulting data by refreshing the History pane. When the job has been run the status will change from \'running\' to \'finished\' if completed successfully or \'error\' if problems were encountered.' ) );
+                return $message;
+            } else {
+                return this._templateError( response, 'Invalid success response. No jobs found.' );
+            }
+        },
+
+        _templateError: function( response, err_msg ) {
+            return  $( '<div/>' ).addClass( 'errormessagelarge' )
+                                 .append( $( '<p/>' ).text( 'The server could not complete the request. Please contact the Galaxy Team if this error persists. ' + ( err_msg || '' ) ) )
+                                 .append( $( '<pre/>' ).text( JSON.stringify( response, null, 4 ) ) );
+        }
+    });
+
+    return {
+        View: View
+    };
+});
diff --git a/client/galaxy/scripts/mvc/tool/tools.js b/client/galaxy/scripts/mvc/tool/tools.js
new file mode 100644
index 0000000..8598123
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tool/tools.js
@@ -0,0 +1,869 @@
+/**
+ * Model, view, and controller objects for Galaxy tools and tool panel.
+ */
+
+ define([
+    "libs/underscore",
+    "viz/trackster/util",
+    "mvc/dataset/data",
+    "mvc/tool/tool-form"
+
+], function(_, util, data, ToolForm) {
+    'use strict';
+
+/**
+ * Mixin for tracking model visibility.
+ */
+var VisibilityMixin = {
+    hidden: false,
+
+    show: function() {
+        this.set("hidden", false);
+    },
+
+    hide: function() {
+        this.set("hidden", true);
+    },
+
+    toggle: function() {
+        this.set("hidden", !this.get("hidden"));
+    },
+
+    is_visible: function() {
+        return !this.attributes.hidden;
+    }
+
+};
+
+/**
+ * A tool parameter.
+ */
+var ToolParameter = Backbone.Model.extend({
+    defaults: {
+        name: null,
+        label: null,
+        type: null,
+        value: null,
+        html: null,
+        num_samples: 5
+    },
+
+    initialize: function(options) {
+        this.attributes.html = unescape(this.attributes.html);
+    },
+
+    copy: function() {
+        return new ToolParameter(this.toJSON());
+    },
+
+    set_value: function(value) {
+        this.set('value', value || '');
+    }
+});
+
+var ToolParameterCollection = Backbone.Collection.extend({
+    model: ToolParameter
+});
+
+/**
+ * A data tool parameter.
+ */
+var DataToolParameter = ToolParameter.extend({});
+
+/**
+ * An integer tool parameter.
+ */
+var IntegerToolParameter = ToolParameter.extend({
+    set_value: function(value) {
+        this.set('value', parseInt(value, 10));
+    },
+
+    /**
+     * Returns samples from a tool input.
+     */
+    get_samples: function() {
+        return d3.scale.linear()
+                        .domain([this.get('min'), this.get('max')])
+                        .ticks(this.get('num_samples'));
+    }
+});
+
+var FloatToolParameter = IntegerToolParameter.extend({
+    set_value: function(value) {
+        this.set('value', parseFloat(value));
+    }
+});
+
+/**
+ * A select tool parameter.
+ */
+var SelectToolParameter = ToolParameter.extend({
+    /**
+     * Returns tool options.
+     */
+    get_samples: function() {
+        return _.map(this.get('options'), function(option) {
+            return option[0];
+        });
+    }
+});
+
+// Set up dictionary of parameter types.
+ToolParameter.subModelTypes = {
+    'integer': IntegerToolParameter,
+    'float': FloatToolParameter,
+    'data': DataToolParameter,
+    'select': SelectToolParameter
+};
+
+/**
+ * A Galaxy tool.
+ */
+var Tool = Backbone.Model.extend({
+    // Default attributes.
+    defaults: {
+        id: null,
+        name: null,
+        description: null,
+        target: null,
+        inputs: [],
+        outputs: []
+    },
+
+    urlRoot: Galaxy.root + 'api/tools',
+
+    initialize: function(options) {
+
+        // Set parameters.
+        this.set('inputs', new ToolParameterCollection(_.map(options.inputs, function(p) {
+            var p_class = ToolParameter.subModelTypes[p.type] || ToolParameter;
+            return new p_class(p);
+        })));
+    },
+
+    /**
+     *
+     */
+    toJSON: function() {
+        var rval = Backbone.Model.prototype.toJSON.call(this);
+
+        // Convert inputs to JSON manually.
+        rval.inputs = this.get('inputs').map(function(i) { return i.toJSON(); });
+        return rval;
+    },
+
+    /**
+     * Removes inputs of a particular type; this is useful because not all inputs can be handled by
+     * client and server yet.
+     */
+    remove_inputs: function(types) {
+        var tool = this,
+            incompatible_inputs = tool.get('inputs').filter( function(input) {
+                return ( types.indexOf( input.get('type') ) !== -1);
+            });
+        tool.get('inputs').remove(incompatible_inputs);
+    },
+
+    /**
+     * Returns object copy, optionally including only inputs that can be sampled.
+     */
+    copy: function(only_samplable_inputs) {
+        var copy = new Tool(this.toJSON());
+
+        // Return only samplable inputs if flag is set.
+        if (only_samplable_inputs) {
+            var valid_inputs = new Backbone.Collection();
+            copy.get('inputs').each(function(input) {
+                if (input.get_samples()) {
+                    valid_inputs.push(input);
+                }
+            });
+            copy.set('inputs', valid_inputs);
+        }
+
+        return copy;
+    },
+
+    apply_search_results: function(results) {
+        ( _.indexOf(results, this.attributes.id) !== -1 ? this.show() : this.hide() );
+        return this.is_visible();
+    },
+
+    /**
+     * Set a tool input's value.
+     */
+    set_input_value: function(name, value) {
+        this.get('inputs').find(function(input) {
+            return input.get('name') === name;
+        }).set('value', value);
+    },
+
+    /**
+     * Set many input values at once.
+     */
+    set_input_values: function(inputs_dict) {
+        var self = this;
+        _.each(_.keys(inputs_dict), function(input_name) {
+            self.set_input_value(input_name, inputs_dict[input_name]);
+        });
+    },
+
+    /**
+     * Run tool; returns a Deferred that resolves to the tool's output(s).
+     */
+    run: function() {
+        return this._run();
+    },
+
+    /**
+     * Rerun tool using regions and a target dataset.
+     */
+    rerun: function(target_dataset, regions) {
+        return this._run({
+            action: 'rerun',
+            target_dataset_id: target_dataset.id,
+            regions: regions
+        });
+    },
+
+    /**
+     * Returns input dict for tool's inputs.
+     */
+    get_inputs_dict: function() {
+        var input_dict = {};
+        this.get('inputs').each(function(input) {
+            input_dict[input.get('name')] = input.get('value');
+        });
+        return input_dict;
+    },
+
+    /**
+     * Run tool; returns a Deferred that resolves to the tool's output(s).
+     * NOTE: this method is a helper method and should not be called directly.
+     */
+    _run: function(additional_params) {
+        // Create payload.
+        var payload = _.extend({
+                tool_id: this.id,
+                inputs: this.get_inputs_dict()
+            }, additional_params);
+
+        // Because job may require indexing datasets, use server-side
+        // deferred to ensure that job is run. Also use deferred that
+        // resolves to outputs from tool.
+        var run_deferred = $.Deferred(),
+            ss_deferred = new util.ServerStateDeferred({
+            ajax_settings: {
+                url: this.urlRoot,
+                data: JSON.stringify(payload),
+                dataType: "json",
+                contentType: 'application/json',
+                type: "POST"
+            },
+            interval: 2000,
+            success_fn: function(response) {
+                return response !== "pending";
+            }
+        });
+
+        // Run job and resolve run_deferred to tool outputs.
+        $.when(ss_deferred.go()).then(function(result) {
+            run_deferred.resolve(new data.DatasetCollection(result));
+        });
+        return run_deferred;
+    }
+});
+_.extend(Tool.prototype, VisibilityMixin);
+
+/**
+ * Tool view.
+ */
+var ToolView = Backbone.View.extend({
+
+});
+
+/**
+ * Wrap collection of tools for fast access/manipulation.
+ */
+var ToolCollection = Backbone.Collection.extend({
+    model: Tool
+});
+
+/**
+ * Label or section header in tool panel.
+ */
+var ToolSectionLabel = Backbone.Model.extend(VisibilityMixin);
+
+/**
+ * Section of tool panel with elements (labels and tools).
+ */
+var ToolSection = Backbone.Model.extend({
+    defaults: {
+        elems: [],
+        open: false
+    },
+
+    clear_search_results: function() {
+        _.each(this.attributes.elems, function(elt) {
+            elt.show();
+        });
+
+        this.show();
+        this.set("open", false);
+    },
+
+    apply_search_results: function(results) {
+        var all_hidden = true,
+            cur_label;
+        _.each(this.attributes.elems, function(elt) {
+            if (elt instanceof ToolSectionLabel) {
+                cur_label = elt;
+                cur_label.hide();
+            }
+            else if (elt instanceof Tool) {
+                if (elt.apply_search_results(results)) {
+                    all_hidden = false;
+                    if (cur_label) {
+                        cur_label.show();
+                    }
+                }
+            }
+        });
+
+        if (all_hidden) {
+            this.hide();
+        }
+        else {
+            this.show();
+            this.set("open", true);
+        }
+    }
+});
+_.extend(ToolSection.prototype, VisibilityMixin);
+
+/**
+ * Tool search that updates results when query is changed. Result value of null
+ * indicates that query was not run; if not null, results are from search using
+ * query.
+ */
+var ToolSearch = Backbone.Model.extend({
+    defaults: {
+        search_hint_string: "search tools",
+        min_chars_for_search: 3,
+        clear_btn_url: "",
+        visible: true,
+        query: "",
+        results: null,
+        // ESC (27) will clear the input field and tool search filters
+        clear_key: 27
+    },
+
+    urlRoot: Galaxy.root + 'api/tools',
+
+    initialize: function() {
+        this.on("change:query", this.do_search);
+    },
+
+    /**
+     * Do the search and update the results.
+     */
+    do_search: function() {
+        var query = this.attributes.query;
+
+        // If query is too short, do not search.
+        if (query.length < this.attributes.min_chars_for_search) {
+            this.set("results", null);
+            return;
+        }
+
+        // Do search via AJAX.
+        var q = query;
+        // Stop previous ajax-request
+        if (this.timer) {
+            clearTimeout(this.timer);
+        }
+        // Start a new ajax-request in X ms
+        $("#search-clear-btn").hide();
+        $("#search-spinner").show();
+        var self = this;
+        this.timer = setTimeout(function () {
+            // log the search to analytics if present
+            if ( typeof ga !== 'undefined' ) {
+                ga( 'send', 'pageview', Galaxy.root + '?q=' + q );
+            }
+            $.get( self.urlRoot, { q: q }, function (data) {
+                self.set("results", data);
+                $("#search-spinner").hide();
+                $("#search-clear-btn").show();
+            }, "json" );
+        }, 400 );
+    },
+
+    clear_search: function() {
+        this.set("query", "");
+        this.set("results", null);
+    }
+
+});
+_.extend(ToolSearch.prototype, VisibilityMixin);
+
+/**
+ * Tool Panel.
+ */
+var ToolPanel = Backbone.Model.extend({
+
+    initialize: function(options) {
+        this.attributes.tool_search = options.tool_search;
+        this.attributes.tool_search.on("change:results", this.apply_search_results, this);
+        this.attributes.tools = options.tools;
+        this.attributes.layout = new Backbone.Collection( this.parse(options.layout) );
+    },
+
+    /**
+     * Parse tool panel dictionary and return collection of tool panel elements.
+     */
+    parse: function(response) {
+        // Recursive function to parse tool panel elements.
+        var self = this,
+            // Helper to recursively parse tool panel.
+            parse_elt = function(elt_dict) {
+                var type = elt_dict.model_class;
+                // There are many types of tools; for now, anything that ends in 'Tool'
+                // is treated as a generic tool.
+                if ( type.indexOf('Tool') === type.length - 4 ) {
+                    return self.attributes.tools.get(elt_dict.id);
+                }
+                else if (type === 'ToolSection') {
+                    // Parse elements.
+                    var elems = _.map(elt_dict.elems, parse_elt);
+                    elt_dict.elems = elems;
+                    return new ToolSection(elt_dict);
+                }
+                else if (type === 'ToolSectionLabel') {
+                    return new ToolSectionLabel(elt_dict);
+                }
+            };
+
+        return _.map(response, parse_elt);
+    },
+
+    clear_search_results: function() {
+        this.get('layout').each(function(panel_elt) {
+            if (panel_elt instanceof ToolSection) {
+                panel_elt.clear_search_results();
+            }
+            else {
+                // Label or tool, so just show.
+                panel_elt.show();
+            }
+        });
+    },
+
+    apply_search_results: function() {
+        var results = this.get('tool_search').get('results');
+        if (results === null) {
+            this.clear_search_results();
+            return;
+        }
+
+        var cur_label = null;
+        this.get('layout').each(function(panel_elt) {
+            if (panel_elt instanceof ToolSectionLabel) {
+                cur_label = panel_elt;
+                cur_label.hide();
+            }
+            else if (panel_elt instanceof Tool) {
+                if (panel_elt.apply_search_results(results)) {
+                    if (cur_label) {
+                        cur_label.show();
+                    }
+                }
+            }
+            else {
+                // Starting new section, so clear current label.
+                cur_label = null;
+                panel_elt.apply_search_results(results);
+            }
+        });
+    }
+});
+
+/**
+ * View classes for Galaxy tools and tool panel.
+ *
+ * Views use the templates defined below for rendering. Views update as needed
+ * based on (a) model/collection events and (b) user interactions; in this sense,
+ * they are controllers are well and the HTML is the real view in the MVC architecture.
+ */
+
+/**
+ * Base view that handles visibility based on model's hidden attribute.
+ */
+var BaseView = Backbone.View.extend({
+    initialize: function() {
+        this.model.on("change:hidden", this.update_visible, this);
+        this.update_visible();
+    },
+    update_visible: function() {
+        ( this.model.attributes.hidden ? this.$el.hide() : this.$el.show() );
+    }
+});
+
+/**
+ * Link to a tool.
+ */
+var ToolLinkView = BaseView.extend({
+    tagName: 'div',
+
+    render: function() {
+        // create element
+        var $link = $('<div/>');
+        $link.append(templates.tool_link(this.model.toJSON()));
+
+        var formStyle = this.model.get( 'form_style', null );
+        // open upload dialog for upload tool
+        if (this.model.id === 'upload1') {
+            $link.find('a').on('click', function(e) {
+                e.preventDefault();
+                Galaxy.upload.show();
+            });
+        }
+        else if ( formStyle === 'regular' ) { // regular tools
+            var self = this;
+            $link.find('a').on('click', function(e) {
+                e.preventDefault();
+                var form = new ToolForm.View( { id : self.model.id, version : self.model.get('version') } );
+                form.deferred.execute(function() {
+                    Galaxy.app.display( form );
+                });
+            });
+        }
+
+        // add element
+        this.$el.append($link);
+        return this;
+    }
+});
+
+/**
+ * Panel label/section header.
+ */
+var ToolSectionLabelView = BaseView.extend({
+    tagName: 'div',
+    className: 'toolPanelLabel',
+
+    render: function() {
+        this.$el.append( $("<span/>").text(this.model.attributes.text) );
+        return this;
+    }
+});
+
+/**
+ * Panel section.
+ */
+var ToolSectionView = BaseView.extend({
+    tagName: 'div',
+    className: 'toolSectionWrapper',
+
+    initialize: function() {
+        BaseView.prototype.initialize.call(this);
+        this.model.on("change:open", this.update_open, this);
+    },
+
+    render: function() {
+        // Build using template.
+        this.$el.append( templates.panel_section(this.model.toJSON()) );
+
+        // Add tools to section.
+        var section_body = this.$el.find(".toolSectionBody");
+        _.each(this.model.attributes.elems, function(elt) {
+            if (elt instanceof Tool) {
+                var tool_view = new ToolLinkView({model: elt, className: "toolTitle"});
+                tool_view.render();
+                section_body.append(tool_view.$el);
+            }
+            else if (elt instanceof ToolSectionLabel) {
+                var label_view = new ToolSectionLabelView({model: elt});
+                label_view.render();
+                section_body.append(label_view.$el);
+            }
+            else {
+                // TODO: handle nested section bodies?
+            }
+        });
+        return this;
+    },
+
+    events: {
+        'click .toolSectionTitle > a': 'toggle'
+    },
+
+    /**
+     * Toggle visibility of tool section.
+     */
+    toggle: function() {
+        this.model.set("open", !this.model.attributes.open);
+    },
+
+    /**
+     * Update whether section is open or close.
+     */
+    update_open: function() {
+        (this.model.attributes.open ?
+            this.$el.children(".toolSectionBody").slideDown("fast") :
+            this.$el.children(".toolSectionBody").slideUp("fast")
+        );
+    }
+});
+
+var ToolSearchView = Backbone.View.extend({
+    tagName: 'div',
+    id: 'tool-search',
+    className: 'bar',
+
+    events: {
+        'click': 'focus_and_select',
+        'keyup :input': 'query_changed',
+        'click #search-clear-btn': 'clear'
+    },
+
+    render: function() {
+        this.$el.append( templates.tool_search(this.model.toJSON()) );
+        if (!this.model.is_visible()) {
+            this.$el.hide();
+        }
+
+        // Adjust top for issue 2907 depending on whether the messagebox is visible.
+        if ($("#messagebox").is(":visible")) {
+            this.$el.css("top","95px");
+        }
+
+        this.$el.find('[title]').tooltip();
+        return this;
+    },
+
+    focus_and_select: function() {
+        this.$el.find(":input").focus().select();
+    },
+
+    clear: function() {
+        this.model.clear_search();
+        this.$el.find(":input").val('');
+        this.focus_and_select();
+        return false;
+    },
+
+    query_changed: function( evData ) {
+        // check for the 'clear key' (ESC) first
+        if( ( this.model.attributes.clear_key ) &&
+            ( this.model.attributes.clear_key === evData.which ) ){
+            this.clear();
+            return false;
+        }
+        this.model.set("query", this.$el.find(":input").val());
+    }
+});
+
+/**
+ * Tool panel view. Events triggered include:
+ * tool_link_click(click event, tool_model)
+ */
+var ToolPanelView = Backbone.View.extend({
+    tagName: 'div',
+    className: 'toolMenu',
+
+    /**
+     * Set up view.
+     */
+    initialize: function() {
+        this.model.get('tool_search').on("change:results", this.handle_search_results, this);
+    },
+
+    render: function() {
+        var self = this;
+
+        // Render search.
+        var search_view = new ToolSearchView( { model: this.model.get('tool_search') } );
+        search_view.render();
+        self.$el.append(search_view.$el);
+
+        // Render panel.
+        this.model.get('layout').each(function(panel_elt) {
+            if (panel_elt instanceof ToolSection) {
+                var section_title_view = new ToolSectionView({model: panel_elt});
+                section_title_view.render();
+                self.$el.append(section_title_view.$el);
+            }
+            else if (panel_elt instanceof Tool) {
+                var tool_view = new ToolLinkView({model: panel_elt, className: "toolTitleNoSection"});
+                tool_view.render();
+                self.$el.append(tool_view.$el);
+            }
+            else if (panel_elt instanceof ToolSectionLabel) {
+                var label_view = new ToolSectionLabelView({model: panel_elt});
+                label_view.render();
+                self.$el.append(label_view.$el);
+            }
+        });
+
+        // Setup tool link click eventing.
+        self.$el.find("a.tool-link").click(function(e) {
+            // Tool id is always the first class.
+            var
+                tool_id = $(this).attr('class').split(/\s+/)[0],
+                tool = self.model.get('tools').get(tool_id);
+
+            self.trigger("tool_link_click", e, tool);
+        });
+
+        return this;
+    },
+
+    handle_search_results: function() {
+        var results = this.model.get('tool_search').get('results');
+        if (results && results.length === 0) {
+            $("#search-no-results").show();
+        }
+        else {
+            $("#search-no-results").hide();
+        }
+    }
+});
+
+/**
+ * View for working with a tool: setting parameters and inputs and executing the tool.
+ */
+var ToolFormView = Backbone.View.extend({
+    className: 'toolForm',
+
+    render: function() {
+        this.$el.children().remove();
+        this.$el.append( templates.tool_form(this.model.toJSON()) );
+    }
+});
+
+/**
+ * Integrated tool menu + tool execution.
+ */
+var IntegratedToolMenuAndView = Backbone.View.extend({
+    className: 'toolMenuAndView',
+
+    initialize: function() {
+        this.tool_panel_view = new ToolPanelView({collection: this.collection});
+        this.tool_form_view = new ToolFormView();
+    },
+
+    render: function() {
+        // Render and append tool panel.
+        this.tool_panel_view.render();
+        this.tool_panel_view.$el.css("float", "left");
+        this.$el.append(this.tool_panel_view.$el);
+
+        // Append tool form view.
+        this.tool_form_view.$el.hide();
+        this.$el.append(this.tool_form_view.$el);
+
+        // On tool link click, show tool.
+        var self = this;
+        this.tool_panel_view.on("tool_link_click", function(e, tool) {
+            // Prevents click from activating link:
+            e.preventDefault();
+            // Show tool that was clicked on:
+            self.show_tool(tool);
+        });
+    },
+
+    /**
+     * Fetch and display tool.
+     */
+    show_tool: function(tool) {
+        var self = this;
+        tool.fetch().done( function() {
+            self.tool_form_view.model = tool;
+            self.tool_form_view.render();
+            self.tool_form_view.$el.show();
+            $('#left').width("650px");
+        });
+    }
+});
+
+// TODO: move into relevant views
+var templates = {
+    // the search bar at the top of the tool panel
+    tool_search : _.template([
+        '<input id="tool-search-query" class="search-query parent-width" name="query" ',
+                'placeholder="<%- search_hint_string %>" autocomplete="off" type="text" />',
+        '<a id="search-clear-btn" title="clear search (esc)"> </a>',
+        //TODO: replace with icon
+        '<span id="search-spinner" class="search-spinner fa fa-spinner fa-spin"></span>',
+    ].join('')),
+
+    // the category level container in the tool panel (e.g. 'Get Data', 'Text Manipulation')
+    panel_section : _.template([
+        '<div class="toolSectionTitle" id="title_<%- id %>">',
+            '<a href="javascript:void(0)"><span><%- name %></span></a>',
+        '</div>',
+        '<div id="<%- id %>" class="toolSectionBody" style="display: none;">',
+            '<div class="toolSectionBg"></div>',
+        '<div>'
+    ].join('')),
+
+    // a single tool's link in the tool panel; will load the tool form in the center panel
+    tool_link : _.template([
+        '<span class="labels">',
+            '<% _.each( labels, function( label ){ %>',
+            '<span class="label label-default label-<%- label %>">',
+                '<%- label %>',
+            '</span>',
+            '<% }); %>',
+        '</span>',
+        '<a class="<%- id %> tool-link" href="<%= link %>" target="<%- target %>" minsizehint="<%- min_width %>">',
+            '<%- name %>',
+        '</a>',
+        ' <%- description %>'
+    ].join('')),
+
+    // the tool form for entering tool parameters, viewing help and executing the tool
+    // loaded when a tool link is clicked in the tool panel
+    tool_form : _.template([
+        '<div class="toolFormTitle"><%- tool.name %> (version <%- tool.version %>)</div>',
+        '<div class="toolFormBody">',
+            '<% _.each( tool.inputs, function( input ){ %>',
+            '<div class="form-row">',
+                '<label for="<%- input.name %>"><%- input.label %>:</label>',
+                '<div class="form-row-input">',
+                    '<%= input.html %>',
+                '</div>',
+                '<div class="toolParamHelp" style="clear: both;">',
+                    '<%- input.help %>',
+                '</div>',
+                '<div style="clear: both;"></div>',
+            '</div>',
+            '<% }); %>',
+        '</div>',
+        '<div class="form-row form-actions">',
+            '<input type="submit" class="btn btn-primary" name="runtool_btn" value="Execute" />',
+        '</div>',
+        '<div class="toolHelp">',
+            '<div class="toolHelpBody"><% tool.help %></div>',
+        '</div>',
+    // TODO: we need scoping here because 'help' is the dom for the help menu in the masthead
+    // which implies a leaky variable that I can't find
+    ].join(''), { variable: 'tool' }),
+};
+
+
+// Exports
+return {
+    ToolParameter: ToolParameter,
+    IntegerToolParameter: IntegerToolParameter,
+    SelectToolParameter: SelectToolParameter,
+    Tool: Tool,
+    ToolCollection: ToolCollection,
+    ToolSearch: ToolSearch,
+    ToolPanel: ToolPanel,
+    ToolPanelView: ToolPanelView,
+    ToolFormView: ToolFormView
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/tours.js b/client/galaxy/scripts/mvc/tours.js
new file mode 100644
index 0000000..6fd15b2
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tours.js
@@ -0,0 +1,122 @@
+/**
+ *  This is the primary galaxy tours definition, currently only used for
+ *  rendering a tour menu.
+ *
+ *  For now it's intended to be plunked into the center display a-la
+ *  Galaxy.app.display, but we could use a modal as well for more flexibility.
+ *
+ *  DBTODO - This is downright backbone abuse, rewrite it.
+ */
+
+define(['libs/bootstrap-tour'],function(BootstrapTour) {
+
+    var gxy_root = typeof Galaxy === "undefined" ? '/' : Galaxy.root;
+
+    var tour_opts = { storage: window.sessionStorage,
+                      onEnd: function(){
+                          sessionStorage.removeItem('activeGalaxyTour');
+                      },
+                      delay: 150, // Attempts to make it look natural
+                      orphan:true
+    };
+
+    var hooked_tour_from_data = function(data){
+        _.each(data.steps, function(step) {
+            if (step.preclick){
+                step.onShow= function(){
+                    _.each(step.preclick, function(preclick){
+                        // TODO: click delay between clicks
+                        $(preclick).click();
+                    });
+                };
+            }
+            if (step.postclick){
+                step.onHide = function(){
+                    _.each(step.postclick, function(postclick){
+                        // TODO: click delay between clicks
+                        $(postclick).click();
+                    });
+                };
+            }
+            if (step.textinsert){
+                // Have to manually trigger a change here, for some
+                // elements which have additional logic, like the
+                // upload input box
+                step.onShown= function(){
+                    $(step.element).val(step.textinsert).trigger("change");
+                };
+            }
+        });
+        return data;
+    };
+
+    var TourItem = Backbone.Model.extend({
+      urlRoot: gxy_root + 'api/tours',
+    });
+
+    var Tours = Backbone.Collection.extend({
+      url:  gxy_root + 'api/tours',
+      model: TourItem,
+    });
+
+
+    var giveTour =  function(tour_id){
+        var url = gxy_root + 'api/tours/' + tour_id;
+        $.getJSON( url, function( data ) {
+            // Set hooks for additional click and data entry actions.
+            var tourdata = hooked_tour_from_data(data);
+            sessionStorage.setItem('activeGalaxyTour', JSON.stringify(data));
+            // Store tour steps in sessionStorage to easily persist w/o hackery.
+            var tour = new Tour(_.extend({
+                steps: tourdata.steps,
+            }, tour_opts));
+            // Always clean restart, since this is a new, explicit giveTour execution.
+            tour.init();
+            tour.goTo(0);
+            tour.restart();
+        });
+    };
+    var ToursView = Backbone.View.extend({
+        // initialize
+        initialize: function() {
+            var self = this;
+            this.setElement('<div/>');
+            this.model = new Tours();
+            this.model.fetch({
+              success: function(){
+                self.render();
+              },
+              error: function(){
+                // Do something.
+                console.error("Failed to fetch tours.");
+              }
+            });
+        },
+
+        render: function(){
+            var tpl = _.template([
+                "<h2>Galaxy Tours</h2>",
+                "<p>This page presents a list of interactive tours available on this Galaxy server.  ",
+                "Select any tour to get started (and remember, you can click 'End Tour' at any time).</p>",
+                "<ul>",
+                '<% _.each(tours, function(tour) { %>',
+                    '<li>',
+                        '<a href="/tours/<%- tour.id %>" class="tourItem" data-tour.id=<%- tour.id %>>',
+                            '<%- tour.attributes.name || tour.id %>',
+                        '</a>',
+                        ' - <%- tour.attributes.description || "No description given." %>',
+                    '</li>',
+                '<% }); %>',
+                "</ul>"].join(''));
+            this.$el.html(tpl({tours: this.model.models})).on("click", ".tourItem", function(e){
+                e.preventDefault();
+                giveTour($(this).data("tour.id"));
+            });
+        }
+    });
+
+    return {ToursView: ToursView,
+            hooked_tour_from_data: hooked_tour_from_data,
+            tour_opts: tour_opts,
+            giveTour: giveTour};
+});
diff --git a/client/galaxy/scripts/mvc/ui/error-modal.js b/client/galaxy/scripts/mvc/ui/error-modal.js
new file mode 100644
index 0000000..9c09089
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/error-modal.js
@@ -0,0 +1,135 @@
+define([
+    "utils/localization"
+], function( _l ){
+'use strict';
+
+//TODO: toastr is another possibility - I didn't see where I might add details, tho
+
+/* ============================================================================
+Error modals meant to replace the o-so-easy alerts.
+
+These are currently styled as errormessages but use the Galaxy.modal
+infrastructure to be shown/closed. They're capable of showing details in a
+togglable dropdown and the details are formatted in a pre.
+
+Example:
+    errorModal( 'Heres a message', 'A Title', { some_details: 'here' });
+    errorModal( 'Heres a message' ); // no details, title is 'Error'
+
+There are three specialized forms:
+    offlineErrorModal       a canned response for when there's no connection
+    badGatewayErrorModal    canned response for when Galaxy is restarting
+    ajaxErrorModal          plugable into any Backbone class as an
+        error event handler by accepting the error args: model, xhr, options
+
+Examples:
+    if( navigator.offLine ){ offlineErrorModal(); }
+    if( xhr.status === 502 ){ badGatewayErrorModal(); }
+    this.listenTo( this.model, 'error', ajaxErrorModal );
+
+============================================================================ */
+
+var CONTACT_MSG = _l( 'Please contact a Galaxy administrator if the problem persists.' );
+var DEFAULT_AJAX_ERR_MSG = _l( 'An error occurred while updating information with the server.' );
+var DETAILS_MSG = _l( 'The following information can assist the developers in finding the source of the error:' );
+
+/** private helper that builds the modal and handles adding details */
+function _errorModal( message, title, details ){
+    // create and return the modal, adding details button only if needed
+    Galaxy.modal.show({
+        title           : title,
+        body            : message,
+        closing_events  : true,
+        buttons         : { Ok: function(){ Galaxy.modal.hide(); } },
+    });
+    Galaxy.modal.$el.addClass( 'error-modal' );
+
+    if( details ){
+        Galaxy.modal.$( '.error-details' ).add( Galaxy.modal.$( 'button:contains("Details")' ) ).remove();
+        $( '<div/>' ).addClass( 'error-details' )
+            .hide().appendTo( Galaxy.modal.$( '.modal-content' ) )
+            .append([
+                $( '<p/>' ).text( DETAILS_MSG ),
+                $( '<pre/>' ).text( JSON.stringify( details, null, '  ' ) )
+            ]);
+
+        $( '<button id="button-1" class="pull-left">' + _l( 'Details' ) + '</button>' )
+            .appendTo( Galaxy.modal.$( '.buttons' ) )
+            .click( function(){ Galaxy.modal.$( '.error-details' ).toggle(); });
+    }
+    return Galaxy.modal;
+}
+
+/** Display a modal showing an error message but fallback to alert if there's no modal */
+function errorModal( message, title, details ){
+    if( !message ){ return; }
+
+    message = _l( message );
+    title = _l( title ) || _l( 'Error:' );
+    if( window.Galaxy && Galaxy.modal ){
+        return _errorModal( message, title, details );
+    }
+
+    alert( title + '\n\n' + message );
+    console.log( 'error details:', JSON.stringify( details ) );
+}
+
+
+// ----------------------------------------------------------------------------
+/** display a modal when the user may be offline */
+function offlineErrorModal(){
+    return errorModal(
+        _l( 'You appear to be offline. Please check your connection and try again.' ),
+        _l( 'Offline?' )
+    );
+}
+
+
+// ----------------------------------------------------------------------------
+/** 502 messages that should be displayed when galaxy is restarting */
+function badGatewayErrorModal(){
+    return errorModal(
+        _l( 'Galaxy is currently unreachable. Please try again in a few minutes.' ) + ' ' + CONTACT_MSG,
+        _l( 'Cannot connect to Galaxy' )
+    );
+}
+
+
+// ----------------------------------------------------------------------------
+/** display a modal (with details) about a failed Backbone ajax operation */
+function ajaxErrorModal( model, xhr, options, message, title ){
+    message = message || DEFAULT_AJAX_ERR_MSG;
+    message += ' ' + CONTACT_MSG;
+    title = title || _l( 'An error occurred' );
+    var details = _ajaxDetails( model, xhr, options );
+    return errorModal( message, title, details );
+}
+
+/** build details which may help debugging the ajax call */
+function _ajaxDetails( model, xhr, options ){
+    return {
+//TODO: still can't manage Raven id
+        raven       : _.result( window.Raven, 'lastEventId' ),
+        userAgent   : navigator.userAgent,
+        onLine      : navigator.onLine,
+        version     : _.result( Galaxy.config, 'version_major' ),
+        xhr         : _.omit( xhr, _.functions( xhr ) ),
+        options     : _.omit( options, 'xhr' ),
+        // add ajax data from Galaxy object cache
+        url         : _.result( Galaxy.lastAjax, 'url' ),
+        data        : _.result( Galaxy.lastAjax, 'data' ),
+        // backbone stuff (auto-redacting email for user)
+        model       : _.result( model, 'toJSON' , model + '' ),
+        user        : _.omit( _.result( Galaxy.user, 'toJSON' ), 'email' ),
+    };
+}
+
+
+//=============================================================================
+    return {
+        errorModal          : errorModal,
+        offlineErrorModal   : offlineErrorModal,
+        badGatewayErrorModal: badGatewayErrorModal,
+        ajaxErrorModal      : ajaxErrorModal
+    };
+});
diff --git a/client/galaxy/scripts/mvc/ui/icon-button.js b/client/galaxy/scripts/mvc/ui/icon-button.js
new file mode 100644
index 0000000..424edf2
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/icon-button.js
@@ -0,0 +1,183 @@
+define([
+    //jquery
+    //backbone
+], function(){
+//=============================================================================
+/**
+ * backbone model for icon buttons
+ */
+var IconButton = Backbone.Model.extend({
+    defaults: {
+        title           : "",
+        icon_class      : "",
+        on_click        : null,
+        menu_options    : null,
+        is_menu_button  : true,
+        id              : null,
+        href            : null,
+        target          : null,
+        enabled         : true,
+        visible         : true,
+        tooltip_config  : {}
+    }
+});
+
+/**
+ *  backbone view for icon buttons
+ */
+var IconButtonView = Backbone.View.extend({
+
+    initialize : function(){
+        // better rendering this way
+        this.model.attributes.tooltip_config = { placement : 'bottom' };
+        this.model.bind( 'change', this.render, this );
+    },
+
+    render : function( ){
+        // hide tooltip
+        this.$el.tooltip( 'hide' );
+
+        var new_elem = this.template( this.model.toJSON() );
+        // configure tooltip
+        new_elem.tooltip( this.model.get( 'tooltip_config' ));
+        this.$el.replaceWith( new_elem );
+        this.setElement( new_elem );
+        return this;
+    },
+
+    events : {
+        'click' : 'click'
+    },
+
+    click : function( event ){
+        // if on_click pass to that function
+        if( _.isFunction( this.model.get( 'on_click' ) ) ){
+            this.model.get( 'on_click' )( event );
+            return false;
+        }
+        // otherwise, bubble up ( to href or whatever )
+        return true;
+    },
+
+    // generate html element
+    template: function( options ){
+        var buffer = 'title="' + options.title + '" class="icon-button';
+
+        if( options.is_menu_button ){
+            buffer += ' menu-button';
+        }
+
+        buffer += ' ' + options.icon_class;
+
+        if( !options.enabled ){
+            buffer += '_disabled';
+        }
+
+        // close class tag
+        buffer += '"';
+
+        if( options.id ){
+            buffer += ' id="' + options.id + '"';
+        }
+
+        buffer += ' href="' + options.href + '"';
+        // add target for href
+        if( options.target ){
+            buffer += ' target="' + options.target + '"';
+        }
+        // set visibility
+        if( !options.visible ){
+            buffer += ' style="display: none;"';
+        }
+
+        // enabled/disabled
+        if ( options.enabled ){
+            buffer = '<a ' + buffer + '/>';
+        } else {
+            buffer = '<span ' + buffer + '/>';
+        }
+
+        // return element
+        return $( buffer );
+    }
+} );
+
+// define collection
+var IconButtonCollection = Backbone.Collection.extend({
+    model: IconButton
+});
+
+/**
+ * menu with multiple icon buttons
+ * views are not needed nor used for individual buttons
+ */
+var IconButtonMenuView = Backbone.View.extend({
+
+    tagName: 'div',
+
+    initialize: function(){
+        this.render();
+    },
+
+    render: function(){
+        // initialize icon buttons
+        var self = this;
+        this.collection.each(function(button){
+            // create and add icon button to menu
+            var elt = $('<a/>')
+                .attr('href', 'javascript:void(0)')
+                .attr('title', button.attributes.title)
+                .addClass('icon-button menu-button')
+                .addClass(button.attributes.icon_class)
+                .appendTo(self.$el)
+                .click(button.attributes.on_click);
+
+            // configure tooltip
+            if (button.attributes.tooltip_config){
+                elt.tooltip(button.attributes.tooltip_config);
+            }
+
+            // add popup menu to icon
+            var menu_options = button.get('options');
+            if (menu_options){
+                make_popupmenu(elt, menu_options);
+            }
+        });
+
+        // return
+        return this;
+    }
+});
+
+/**
+ * Returns an IconButtonMenuView for the provided configuration.
+ * Configuration is a list of dictionaries where each dictionary
+ * defines an icon button. Each dictionary must have the following
+ * elements: icon_class, title, and on_click.
+ */
+var create_icon_buttons_menu = function(config, global_config)
+{
+    // initialize global configuration
+    if (!global_config) global_config = {};
+
+    // create and initialize menu
+    var buttons = new IconButtonCollection(
+        _.map(config, function(button_config){
+            return new IconButton(_.extend(button_config, global_config));
+        })
+    );
+
+    // return menu
+    return new IconButtonMenuView( {collection: buttons} );
+};
+
+
+//=============================================================================
+    return {
+        IconButton              : IconButton,
+        IconButtonView          : IconButtonView,
+        IconButtonCollection    : IconButtonCollection,
+        IconButtonMenuView      : IconButtonMenuView,
+        create_icon_buttons_menu: create_icon_buttons_menu
+    };
+})
diff --git a/client/galaxy/scripts/mvc/ui/popup-menu.js b/client/galaxy/scripts/mvc/ui/popup-menu.js
new file mode 100644
index 0000000..1cab184
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/popup-menu.js
@@ -0,0 +1,315 @@
+define([
+    //jquery
+    //backbone
+], function(){
+// =============================================================================
+/**
+ * view for a popup menu
+ */
+var PopupMenu = Backbone.View.extend({
+//TODO: maybe better as singleton off the Galaxy obj
+    /** Cache the desired button element and options, set up the button click handler
+     *  NOTE: attaches this view as HTML/jQ data on the button for later use.
+     */
+    initialize: function( $button, options ){
+        // default settings
+        this.$button = $button;
+        if( !this.$button.length ){
+            this.$button = $( '<div/>' );
+        }
+        this.options = options || [];
+        this.$button.data( 'popupmenu', this );
+
+        // set up button click -> open menu behavior
+        var menu = this;
+        this.$button.click( function( event ){
+            // if there's already a menu open, remove it
+            $( '.popmenu-wrapper' ).remove();
+            menu._renderAndShow( event );
+            return false;
+        });
+    },
+
+    // render the menu, append to the page body at the click position, and set up the 'click-away' handlers, show
+    _renderAndShow: function( clickEvent ){
+        this.render();
+        this.$el.appendTo( 'body' ).css( this._getShownPosition( clickEvent )).show();
+        this._setUpCloseBehavior();
+    },
+
+    // render the menu
+    // this menu doesn't attach itself to the DOM ( see _renderAndShow )
+    render: function(){
+        // render the menu body absolute and hidden, fill with template
+        this.$el.addClass( 'popmenu-wrapper' ).hide()
+            .css({ position : 'absolute' })
+            .html( this.template( this.$button.attr( 'id' ), this.options ));
+
+        // set up behavior on each link/anchor elem
+        if( this.options.length ){
+            var menu = this;
+            //precondition: there should be one option per li
+            this.$el.find( 'li' ).each( function( i, li ){
+                var option = menu.options[i];
+
+                // if the option has 'func', call that function when the anchor is clicked
+                if( option.func ){
+                    $( this ).children( 'a.popupmenu-option' ).click( function( event ){
+                        option.func.call( menu, event, option );
+                        // We must preventDefault otherwise clicking "cancel"
+                        // on a purge or something still navigates and causes
+                        // the action.
+                        event.preventDefault();
+                        // bubble up so that an option click will call the close behavior
+                    });
+                }
+            });
+        }
+        return this;
+    },
+
+    template : function( id, options ){
+        return [
+            '<ul id="', id, '-menu" class="dropdown-menu">', this._templateOptions( options ), '</ul>'
+        ].join( '' );
+    },
+
+    _templateOptions : function( options ){
+        if( !options.length ){
+            return '<li>(no options)</li>';
+        }
+        return _.map( options, function( option ){
+            if( option.divider ){
+                return '<li class="divider"></li>';
+            } else if( option.header ){
+                return [ '<li class="head"><a href="javascript:void(0);">', option.html, '</a></li>' ].join( '' );
+            }
+            var href   = option.href || 'javascript:void(0);',
+                target = ( option.target  )?( ' target="' + option.target + '"' ):( '' ),
+                check  = ( option.checked )?( '<span class="fa fa-check"></span>' ):( '' );
+            return [
+                '<li><a class="popupmenu-option" href="', href, '"', target, '>',
+                    check, option.html,
+                '</a></li>'
+            ].join( '' );
+        }).join( '' );
+    },
+
+    // get the absolute position/offset for the menu
+    _getShownPosition : function( clickEvent ){
+
+        // display menu horiz. centered on click...
+        var menuWidth = this.$el.width();
+        var x = clickEvent.pageX - menuWidth / 2 ;
+
+        // adjust to handle horiz. scroll and window dimensions ( draw entirely on visible screen area )
+        x = Math.min( x, $( document ).scrollLeft() + $( window ).width() - menuWidth - 5 );
+        x = Math.max( x, $( document ).scrollLeft() + 5 );
+        return {
+            top: clickEvent.pageY,
+            left: x
+        };
+    },
+
+    // bind an event handler to all available frames so that when anything is clicked
+    // the menu is removed from the DOM and the event handler unbinds itself
+    _setUpCloseBehavior: function(){
+        var menu = this;
+//TODO: alternately: focus hack, blocking overlay, jquery.blockui
+
+        // function to close popup and unbind itself
+        function closePopup( event ){
+            $( document ).off( 'click.close_popup' );
+            if( window && window.parent !== window ){
+                try {
+                    $( window.parent.document ).off( "click.close_popup" );
+                } catch( err ){}
+            } else {
+                try {
+                    $( 'iframe#galaxy_main' ).contents().off( "click.close_popup" );
+                } catch( err ){}
+            }
+            menu.remove();
+        }
+
+        $( 'html' ).one( "click.close_popup", closePopup );
+        if( window && window.parent !== window ){
+            try {
+                $( window.parent.document ).find( 'html' ).one( "click.close_popup", closePopup );
+            } catch( err ){}
+        } else {
+            try {
+                $( 'iframe#galaxy_main' ).contents().one( "click.close_popup", closePopup );
+            } catch( err ){}
+        }
+    },
+
+    // add a menu option/item at the given index
+    addItem: function( item, index ){
+        // append to end if no index
+        index = ( index >= 0 ) ? index : this.options.length;
+        this.options.splice( index, 0, item );
+        return this;
+    },
+
+    // remove a menu option/item at the given index
+    removeItem: function( index ){
+        if( index >=0 ){
+            this.options.splice( index, 1 );
+        }
+        return this;
+    },
+
+    // search for a menu option by its html
+    findIndexByHtml: function( html ){
+        for( var i = 0; i < this.options.length; i++ ){
+            if( _.has( this.options[i], 'html' ) && ( this.options[i].html === html )){
+                return i;
+            }
+        }
+        return null;
+    },
+
+    // search for a menu option by its html
+    findItemByHtml: function( html ){
+        return this.options[( this.findIndexByHtml( html ))];
+    },
+
+    // string representation
+    toString: function(){
+        return 'PopupMenu';
+    }
+});
+/** shortcut to new for when you don't need to preserve the ref */
+PopupMenu.create = function _create( $button, options ){
+    return new PopupMenu( $button, options );
+};
+
+// -----------------------------------------------------------------------------
+// the following class functions are bridges from the original make_popupmenu and make_popup_menus
+// to the newer backbone.js PopupMenu
+
+/** Create a PopupMenu from simple map initial_options activated by clicking button_element.
+ *      Converts initial_options to object array used by PopupMenu.
+ *  @param {jQuery|DOMElement} button_element element which, when clicked, activates menu
+ *  @param {Object} initial_options map of key -> values, where
+ *      key is option text, value is fn to call when option is clicked
+ *  @returns {PopupMenu} the PopupMenu created
+ */
+PopupMenu.make_popupmenu = function( button_element, initial_options ){
+    var convertedOptions = [];
+    _.each( initial_options, function( optionVal, optionKey ){
+        var newOption = { html: optionKey };
+
+        // keys with null values indicate: header
+        if( optionVal === null ){ // !optionVal? (null only?)
+            newOption.header = true;
+
+        // keys with function values indicate: a menu option
+        } else if( jQuery.type( optionVal ) === 'function' ){
+            newOption.func = optionVal;
+        }
+        //TODO:?? any other special optionVals?
+        // there was no divider option originally
+        convertedOptions.push( newOption );
+    });
+    return new PopupMenu( $( button_element ), convertedOptions );
+};
+
+/** Find all anchors in $parent (using selector) and covert anchors into a PopupMenu options map.
+ *  @param {jQuery} $parent the element that contains the links to convert to options
+ *  @param {String} selector jq selector string to find links
+ *  @returns {Object[]} the options array to initialize a PopupMenu
+ */
+//TODO: lose parent and selector, pass in array of links, use map to return options
+PopupMenu.convertLinksToOptions = function( $parent, selector ){
+    $parent = $( $parent );
+    selector = selector || 'a';
+    var options = [];
+    $parent.find( selector ).each( function( elem, i ){
+        var option = {}, $link = $( elem );
+
+        // convert link text to the option text (html) and the href into the option func
+        option.html = $link.text();
+        if( $link.attr( 'href' ) ){
+            var linkHref    = $link.attr( 'href' ),
+                linkTarget  = $link.attr( 'target' ),
+                confirmText = $link.attr( 'confirm' );
+
+            option.func = function(){
+                // if there's a "confirm" attribute, throw up a confirmation dialog, and
+                //  if the user cancels - do nothing
+                if( ( confirmText ) && ( !confirm( confirmText ) ) ){ return; }
+
+                // if there's no confirm attribute, or the user accepted the confirm dialog:
+                switch( linkTarget ){
+                    // relocate the center panel
+                    case '_parent':
+                        window.parent.location = linkHref;
+                        break;
+
+                    // relocate the entire window
+                    case '_top':
+                        window.top.location = linkHref;
+                        break;
+
+                    // relocate this panel
+                    default:
+                        window.location = linkHref;
+                }
+            };
+        }
+        options.push( option );
+    });
+    return options;
+};
+
+/** Create a single popupmenu from existing DOM button and anchor elements
+ *  @param {jQuery} $buttonElement the element that when clicked will open the menu
+ *  @param {jQuery} $menuElement the element that contains the anchors to convert into a menu
+ *  @param {String} menuElementLinkSelector jq selector string used to find anchors to be made into menu options
+ *  @returns {PopupMenu} the PopupMenu (Backbone View) that can render, control the menu
+ */
+PopupMenu.fromExistingDom = function( $buttonElement, $menuElement, menuElementLinkSelector ){
+    $buttonElement = $( $buttonElement );
+    $menuElement = $( $menuElement );
+    var options = PopupMenu.convertLinksToOptions( $menuElement, menuElementLinkSelector );
+    // we're done with the menu (having converted it to an options map)
+    $menuElement.remove();
+    return new PopupMenu( $buttonElement, options );
+};
+
+/** Create all popupmenus within a document or a more specific element
+ *  @param {DOMElement} parent the DOM element in which to search for popupmenus to build (defaults to document)
+ *  @param {String} menuSelector jq selector string to find popupmenu menu elements (defaults to "div[popupmenu]")
+ *  @param {Function} buttonSelectorBuildFn the function to build the jq button selector.
+ *      Will be passed $menuElement, parent.
+ *      (Defaults to return '#' + $menuElement.attr( 'popupmenu' ); )
+ *  @returns {PopupMenu[]} array of popupmenus created
+ */
+PopupMenu.make_popup_menus = function( parent, menuSelector, buttonSelectorBuildFn ){
+    parent = parent || document;
+    // orig. Glx popupmenu menus have a (non-std) attribute 'popupmenu'
+    //  which contains the id of the button that activates the menu
+    menuSelector = menuSelector || 'div[popupmenu]';
+    // default to (orig. Glx) matching button to menu by using the popupmenu attr of the menu as the id of the button
+    buttonSelectorBuildFn = buttonSelectorBuildFn || function( $menuElement, parent ){
+        return '#' + $menuElement.attr( 'popupmenu' );
+    };
+
+    // aggregate and return all PopupMenus
+    var popupMenusCreated = [];
+    $( parent ).find( menuSelector ).each( function(){
+        var $menuElement    = $( this ),
+            $buttonElement  = $( parent ).find( buttonSelectorBuildFn( $menuElement, parent ) );
+        popupMenusCreated.push( PopupMenu.fromDom( $buttonElement, $menuElement ) );
+        $buttonElement.addClass( 'popup' );
+    });
+    return popupMenusCreated;
+};
+
+
+// =============================================================================
+    return PopupMenu;
+});
+
diff --git a/client/galaxy/scripts/mvc/ui/ui-buttons.js b/client/galaxy/scripts/mvc/ui/ui-buttons.js
new file mode 100644
index 0000000..05a240d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-buttons.js
@@ -0,0 +1,304 @@
+/** This module contains all button views. */
+define( [ 'utils/utils' ], function( Utils ) {
+    /** This renders the default button which is used e.g. at the bottom of the upload modal. */
+    var ButtonDefault = Backbone.View.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                id          : Utils.uid(),
+                title       : '',
+                floating    : 'right',
+                icon        : '',
+                cls         : 'btn btn-default',
+                wait        : false,
+                wait_text   : 'Sending...',
+                wait_cls    : 'btn btn-info',
+                disabled    : false,
+                percentage  : -1
+            }).set( options );
+            this.setElement( $( '<button/>' ).attr( 'type', 'button' )
+                                             .append( this.$icon        = $( '<i/>' ) )
+                                             .append( this.$title       = $( '<span/>' ) )
+                                             .append( this.$progress    = $( '<div/>' ).append( this.$progress_bar = $( '<div/>' ) ) ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+
+        render: function() {
+            var self = this;
+            var options = this.model.attributes;
+            this.$el.removeClass()
+                    .addClass( 'ui-button-default' )
+                    .addClass( options.disabled && 'disabled' )
+                    .attr( 'id', options.id )
+                    .attr( 'disabled', options.disabled )
+                    .css( 'float', options.floating )
+                    .off( 'click' ).on( 'click' , function() {
+                        $( '.tooltip' ).hide();
+                        options.onclick && !self.disabled && options.onclick();
+                    })
+                    .tooltip( { title: options.tooltip, placement: 'bottom' } );
+            this.$progress.addClass( 'progress' ).css( 'display', options.percentage !== -1 ? 'block' : 'none' );
+            this.$progress_bar.addClass( 'progress-bar' ).css( { width : options.percentage + '%' } );
+            this.$icon.removeClass().addClass( 'icon fa' );
+            this.$title.removeClass().addClass( 'title' );
+            if ( options.wait ) {
+                this.$el.addClass( options.wait_cls ).prop( 'disabled', true );
+                this.$icon.addClass( 'fa-spinner fa-spin ui-margin-right' );
+                this.$title.html( options.wait_text );
+            } else {
+                this.$el.addClass( options.cls );
+                this.$icon.addClass( options.icon );
+                this.$title.html( options.title );
+                options.icon && options.title && this.$icon.addClass( 'ui-margin-right' );
+            }
+        },
+
+        /** Show button */
+        show: function() {
+            this.$el.show();
+        },
+
+        /** Hide button */
+        hide: function() {
+            this.$el.hide();
+        },
+
+        /** Disable button */
+        disable: function() {
+            this.model.set( 'disabled', true );
+        },
+
+        /** Enable button */
+        enable: function() {
+            this.model.set( 'disabled', false );
+        },
+
+        /** Show spinner to indicate that the button is not ready to be clicked */
+        wait: function() {
+            this.model.set( 'wait', true );
+        },
+
+        /** Hide spinner to indicate that the button is ready to be clicked */
+        unwait: function() {
+            this.model.set( 'wait', false );
+        },
+
+        /** Change icon */
+        setIcon: function( icon ) {
+            this.model.set( 'icon', icon );
+        }
+    });
+
+    /** This button allows the right-click/open-in-new-tab feature, its used e.g. for panel buttons. */
+    var ButtonLink = ButtonDefault.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                id          : Utils.uid(),
+                title       : '',
+                icon        : '',
+                cls         : ''
+            }).set( options );
+            this.setElement( $( '<a/>' ).append( this.$icon  = $( '<span/>' ) ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+
+        render: function() {
+            var options = this.model.attributes;
+            this.$el.removeClass()
+                    .addClass( options.cls )
+                    .attr( { id         : options.id,
+                             href       : options.href || 'javascript:void(0)',
+                             title      : options.title,
+                             target     : options.target || '_top',
+                             disabled   : options.disabled } )
+                    .off( 'click' ).on( 'click' , function() {
+                        options.onclick && !options.disabled && options.onclick();
+                    });
+            this.$icon.removeClass().addClass( options.icon );
+        }
+    });
+
+    /** The check button is used in the tool form and allows to distinguish between multiple states e.g. all, partially and nothing selected. */
+    var ButtonCheck = Backbone.View.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                id          : Utils.uid(),
+                title       : 'Select/Unselect all',
+                icons       : [ 'fa-square-o', 'fa-minus-square-o', 'fa-check-square-o' ],
+                value       : 0,
+                onchange    : function(){}
+            }).set( options );
+            this.setElement( $( '<div/>' ).append( this.$icon   = $( '<span/>' ) )
+                                          .append( this.$title  = $( '<span/>' ) ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+
+        render: function( options ) {
+            var self = this;
+            var options = this.model.attributes;
+            this.$el.addClass( 'ui-button-check' )
+                    .off( 'click' ).on('click', function() {
+                        self.model.set( 'value', ( self.model.get( 'value' ) === 0 && 2 ) || 0 );
+                        options.onclick && options.onclick();
+                    });
+            this.$title.html( options.title );
+            this.$icon.removeClass()
+                      .addClass( 'icon fa ui-margin-right' )
+                      .addClass( options.icons[ options.value ] );
+        },
+
+        /* Sets a new value and/or returns the value.
+        * @param{Integer}   new_val - Set a new value 0=unchecked, 1=partial and 2=checked.
+        * OR:
+        * @param{Integer}   new_val - Number of selected options.
+        * @param{Integer}   total   - Total number of available options.
+        */
+        value: function ( new_val, total ) {
+            if ( new_val !== undefined ) {
+                if ( total && new_val !== 0 ) {
+                    new_val = ( new_val !== total ) && 1 || 2;
+                }
+                this.model.set( 'value', new_val );
+                this.model.get( 'onchange' )( this.model.get( 'value' ) );
+            }
+            return this.model.get( 'value' );
+        }
+    });
+
+    /** This renders a differently styled, more compact button version. */
+    var ButtonIcon = ButtonDefault.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                id          : Utils.uid(),
+                title       : '',
+                floating    : 'right',
+                icon        : '',
+                cls         : 'ui-button-icon',
+                disabled    : false
+            }).set( options );
+            this.setElement( $( '<div/>' ).append( this.$button = $( '<div/>' ).append( this.$icon  = $( '<i/>' ) )
+                                                                               .append( this.$title = $( '<span/>' ) ) ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+
+        render : function( options ) {
+            var self = this;
+            var options = this.model.attributes;
+            this.$el.removeClass()
+                    .addClass( options.cls )
+                    .addClass( options.disabled && 'disabled' )
+                    .attr( 'disabled', options.disabled )
+                    .attr( 'id', options.id )
+                    .css( 'float', options.floating )
+                    .off( 'click' ).on( 'click', function() {
+                        $( '.tooltip' ).hide();
+                        !options.disabled && options.onclick && options.onclick();
+                    });
+            this.$button.addClass( 'button' ).tooltip( { title: options.tooltip, placement: 'bottom' } );
+            this.$icon.removeClass().addClass( 'icon fa' ).addClass( options.icon );
+            this.$title.addClass( 'title' ).html( options.title );
+            options.icon && options.title && this.$icon.addClass( 'ui-margin-right' );
+        }
+    });
+
+    /** This class creates a button with dropdown menu. */
+    var ButtonMenu = ButtonDefault.extend({
+        $menu: null,
+        initialize: function ( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                id              : '',
+                title           : '',
+                floating        : 'right',
+                pull            : 'right',
+                icon            : null,
+                onclick         : null,
+                cls             : 'ui-button-icon ui-button-menu',
+                tooltip         : '',
+                target          : '',
+                href            : '',
+                onunload        : null,
+                visible         : true,
+                tag             : ''
+            }).set( options );
+            this.collection = new Backbone.Collection();
+            this.setElement( $( '<div/>' ).append( this.$root = $( '<div/>' ).append( this.$icon  = $( '<i/>' ) )
+                                                                             .append( this.$title = $( '<span/>' ) ) ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.listenTo( this.collection, 'change add remove reset', this.render, this );
+            this.render();
+        },
+
+        render: function() {
+            var self = this;
+            var options = this.model.attributes;
+            this.$el.removeClass()
+                    .addClass( 'dropdown' )
+                    .addClass( options.cls )
+                    .attr( 'id', options.id )
+                    .css( { float   : options.floating,
+                            display : options.visible && this.collection.where( { visible: true } ).length > 0 ? 'block' : 'none' } );
+            this.$root.addClass( 'root button dropdown-toggle' )
+                      .attr( 'data-toggle', 'dropdown' )
+                      .tooltip( { title: options.tooltip, placement: 'bottom' } )
+                      .off( 'click' ).on( 'click', function( e ) {
+                            $( '.tooltip' ).hide();
+                            e.preventDefault();
+                            options.onclick && options.onclick();
+                      } );
+            this.$icon.removeClass().addClass( 'icon fa' ).addClass( options.icon );
+            this.$title.removeClass().addClass( 'title' ).html( options.title );
+            options.icon && options.title && this.$icon.addClass( 'ui-margin-right' );
+            this.$menu && this.$menu.remove();
+            if ( this.collection.length > 0 ) {
+                this.$menu = $( '<ul/>' ).addClass( 'menu dropdown-menu' )
+                                         .addClass( 'pull-' + self.model.get( 'pull' ) )
+                                         .attr( 'role', 'menu' );
+                this.$el.append( this.$menu );
+            }
+            this.collection.each( function( submodel ) {
+                var suboptions = submodel.attributes;
+                if ( suboptions.visible ) {
+                    var $link = $( '<a/>' ).addClass( 'dropdown-item' )
+                                           .attr( { href : suboptions.href, target : suboptions.target } )
+                                           .append( $( '<i/>' ).addClass( 'fa' )
+                                                               .addClass( suboptions.icon )
+                                                               .css( 'display', suboptions.icon ? 'inline-block' : 'none' ) )
+                                           .append( suboptions.title )
+                                           .on( 'click', function( e ) {
+                                                if ( suboptions.onclick ) {
+                                                    e.preventDefault();
+                                                    suboptions.onclick();
+                                                }
+                                           } );
+                    self.$menu.append( $( '<li/>' ).append( $link ) );
+                    suboptions.divider && self.$menu.append( $( '<li/>' ).addClass( 'divider' ) );
+                }
+            });
+        },
+
+        /** Add a new menu item */
+        addMenu: function ( options ) {
+            this.collection.add( Utils.merge( options, {
+                title       : '',
+                target      : '',
+                href        : '',
+                onclick     : null,
+                divider     : false,
+                visible     : true,
+                icon        : null,
+                cls         : 'button-menu btn-group'
+            }));
+        }
+    });
+
+    return {
+        ButtonDefault   : ButtonDefault,
+        ButtonLink      : ButtonLink,
+        ButtonIcon      : ButtonIcon,
+        ButtonCheck     : ButtonCheck,
+        ButtonMenu      : ButtonMenu
+    }
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-color-picker.js b/client/galaxy/scripts/mvc/ui/ui-color-picker.js
new file mode 100644
index 0000000..d5a341e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-color-picker.js
@@ -0,0 +1,162 @@
+/** Renders the color picker used e.g. in the tool form **/
+define(['utils/utils'], function( Utils ) {
+    return Backbone.View.extend({
+        colors: {
+            standard: ['c00000','ff0000','ffc000','ffff00','92d050','00b050','00b0f0','0070c0','002060','7030a0'],
+            base    : ['ffffff','000000','eeece1','1f497d','4f81bd','c0504d','9bbb59','8064a2','4bacc6','f79646'],
+            theme   :[['f2f2f2','7f7f7f','ddd9c3','c6d9f0','dbe5f1','f2dcdb','ebf1dd','e5e0ec','dbeef3','fdeada'],
+                      ['d8d8d8','595959','c4bd97','8db3e2','b8cce4','e5b9b7','d7e3bc','ccc1d9','b7dde8','fbd5b5'],
+                      ['bfbfbf','3f3f3f','938953','548dd4','95b3d7','d99694','c3d69b','b2a2c7','92cddc','fac08f'],
+                      ['a5a5a5','262626','494429','17365d','366092','953734','76923c','5f497a','31859b','e36c09'],
+                      ['7f7f7e','0c0c0c','1d1b10','0f243e','244061','632423','4f6128','3f3151','205867','974806']]
+        },
+
+        initialize : function( options ) {
+            this.options = Utils.merge( options, {} );
+            this.setElement( this._template() );
+            this.$panel = this.$( '.ui-color-picker-panel' );
+            this.$view = this.$( '.ui-color-picker-view' );
+            this.$value = this.$( '.ui-color-picker-value' );
+            this.$header = this.$( '.ui-color-picker-header' );
+            this._build();
+            this.visible = false;
+            this.value( this.options.value );
+            this.$boxes = this.$( '.ui-color-picker-box' );
+            var self = this;
+            this.$boxes.on( 'click', function() {
+                self.value( $( this ).css( 'background-color' ) );
+                self.$header.trigger( 'click' );
+            } );
+            this.$header.on( 'click', function() {
+                self.visible = !self.visible;
+                if ( self.visible ) {
+                    self.$view.fadeIn( 'fast' );
+                } else {
+                    self.$view.fadeOut( 'fast' );
+                }
+            } );
+        },
+
+        /** Get/set value */
+        value : function ( new_val ) {
+            if ( new_val !== undefined && new_val !== null ) {
+                this.$value.css( 'background-color', new_val );
+                this.$( '.ui-color-picker-box' ).empty();
+                this.$( this._getValue() ).html( this._templateCheck() );
+                this.options.onchange && this.options.onchange( new_val );
+            }
+            return this._getValue();
+        },
+
+        /** Get value from dom */
+        _getValue: function() {
+            var rgb = this.$value.css( 'background-color' );
+            rgb = rgb.match(/^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/);
+            if ( rgb ) {
+                function hex( x ) {
+                    return ( '0' + parseInt( x ).toString( 16 ) ).slice( -2 );
+                }
+                return '#' + hex( rgb[ 1] ) + hex( rgb[ 2 ] ) + hex( rgb[ 3 ] );
+            } else {
+                return null;
+            }
+        },
+
+        /** Build color panel */
+        _build: function() {
+            var $content = this._content({
+                label       : 'Theme Colors',
+                colors      : this.colors.base,
+                padding     : 10
+            });
+            for ( var i in this.colors.theme ) {
+                var line_def = {};
+                if ( i == 0 ) {
+                    line_def[ 'bottom' ] = true;
+                } else {
+                    if ( i != this.colors.theme.length - 1 ) {
+                        line_def[ 'top' ]     = true;
+                        line_def[ 'bottom' ]  = true;
+                    } else {
+                        line_def[ 'top' ]     = true;
+                        line_def[ 'padding' ] = 5;
+                    }
+                }
+                line_def[ 'colors' ] = this.colors.theme[ i ];
+                this._content( line_def );
+            }
+            this._content({
+                label       : 'Standard Colors',
+                colors      : this.colors.standard,
+                padding     : 5
+            });
+        },
+
+        /** Create content */
+        _content: function( options ) {
+            var label       = options.label;
+            var colors      = options.colors;
+            var padding     = options.padding;
+            var top         = options.top;
+            var bottom      = options.bottom;
+            var $content = $( this._templateContent() );
+            var $label = $content.find( '.label' );
+            if ( options.label ) {
+                $label.html( options.label );
+            } else {
+                $label.hide();
+            }
+            var $line = $content.find( '.line' );
+            this.$panel.append( $content );
+            for ( var i in colors ) {
+                var $box = $( this._templateBox( colors[ i ] ) );
+                if ( top ) {
+                    $box.css( 'border-top', 'none' );
+                    $box.css( 'border-top-left-radius', '0px' );
+                    $box.css( 'border-top-right-radius', '0px' );
+                }
+                if ( bottom ) {
+                    $box.css( 'border-bottom', 'none' );
+                    $box.css( 'border-bottom-left-radius', '0px' );
+                    $box.css( 'border-bottom-right-radius', '0px' );
+                }
+                $line.append( $box );
+            }
+            if (padding) {
+                $line.css( 'padding-bottom', padding );
+            }
+            return $content;
+        },
+
+        /** Check icon */
+        _templateCheck: function() {
+            return  '<div class="ui-color-picker-check fa fa-check"/>';
+        },
+
+        /** Content template */
+        _templateContent: function() {
+            return  '<div class="ui-color-picker-content">' +
+                        '<div class="label"/>' +
+                        '<div class="line"/>' +
+                    '</div>';
+        },
+
+        /** Box template */
+        _templateBox: function( color ) {
+            return '<div id="' + color + '" class="ui-color-picker-box" style="background-color: #' + color + ';"/>';
+        },
+
+        /** Main template */
+        _template: function() {
+            return  '<div class="ui-color-picker">' +
+                        '<div class="ui-color-picker-header">' +
+                            '<div class="ui-color-picker-value"/>' +
+                            '<div class="ui-color-picker-label">Select a color</div>' +
+                        '</div>' +
+                        '<div class="ui-color-picker-view ui-input">' +
+                            '<div class="ui-color-picker-panel"/>' +
+                        '</div>'
+                    '</div>';
+        }
+    });
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-drilldown.js b/client/galaxy/scripts/mvc/ui/ui-drilldown.js
new file mode 100644
index 0000000..71627f2
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-drilldown.js
@@ -0,0 +1,96 @@
+/** This class creates/wraps a drill down element. */
+define([ 'utils/utils', 'mvc/ui/ui-options' ], function( Utils, Options ) {
+
+var View = Options.BaseIcons.extend({
+    initialize: function( options ) {
+        options.type     = options.display || 'checkbox';
+        options.multiple = ( options.type == 'checkbox' );
+        Options.BaseIcons.prototype.initialize.call( this, options );
+    },
+
+    /** Set states for selected values */
+    _setValue: function ( new_value ) {
+        Options.BaseIcons.prototype._setValue.call( this, new_value );
+        if ( new_value !== undefined && new_value !== null && this.header_index ) {
+            var self = this;
+            var values = $.isArray( new_value ) ? new_value : [ new_value ];
+            _.each( values, function( v ) {
+                var list = self.header_index[ v ];
+                _.each( list, function( element ) {
+                    self._setState( element, true );
+                });
+            });
+        }
+    },
+
+    /** Expand/collapse a sub group */
+    _setState: function ( header_id, is_expanded ) {
+        var $button = this.$( '.button-' + header_id );
+        var $subgroup = this.$( '.subgroup-' + header_id );
+        $button.data( 'is_expanded', is_expanded );
+        if ( is_expanded ) {
+            $subgroup.show();
+            $button.removeClass( 'fa-plus-square' ).addClass( 'fa-minus-square' );
+        } else {
+            $subgroup.hide();
+            $button.removeClass( 'fa-minus-square' ).addClass( 'fa-plus-square' );
+        }
+    },
+
+    /** Template to create options tree */
+    _templateOptions: function() {
+        var self = this;
+        this.header_index = {};
+
+        // attach event handler
+        function attach( $el, header_id ) {
+            var $button = $el.find( '.button-' + header_id );
+            $button.on( 'click', function() {
+                self._setState( header_id, !$button.data( 'is_expanded' ) );
+            });
+        }
+
+        // recursive function which iterates through options
+        function iterate ( $tmpl, options, header ) {
+            header = header || [];
+            for ( i in options ) {
+                var level = options[ i ];
+                var has_options = level.options && level.options.length > 0;
+                var new_header = header.slice( 0 );
+                self.header_index[ level.value ] = new_header.slice( 0 );
+                var $group = $( '<div/>' );
+                if ( has_options ) {
+                    var header_id = Utils.uid();
+                    var $button = $( '<span/>' ).addClass( 'button-' + header_id ).addClass( 'ui-drilldown-button fa fa-plus-square' );
+                    var $subgroup = $( '<div/>' ).addClass( 'subgroup-' + header_id ).addClass( 'ui-drilldown-subgroup' );
+                    $group.append( $( '<div/>' )
+                                        .append( $button )
+                                        .append( self._templateOption( { label: level.name, value: level.value } ) ) );
+                    new_header.push( header_id );
+                    iterate ( $subgroup, level.options, new_header );
+                    $group.append( $subgroup );
+                    attach( $group, header_id );
+                } else {
+                    $group.append( self._templateOption( { label: level.name, value: level.value } ) );
+                }
+                $tmpl.append( $group );
+            }
+        }
+
+        // iterate through options and create dom
+        var $tmpl = $( '<div/>' );
+        iterate( $tmpl, this.model.get( 'data' ) );
+        return $tmpl;
+    },
+
+    /** Template for drill down view */
+    _template: function() {
+        return $( '<div/>' ).addClass( 'ui-options-list drilldown-container' ).attr( 'id', this.model.id );
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-frames.js b/client/galaxy/scripts/mvc/ui/ui-frames.js
new file mode 100644
index 0000000..280f147
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-frames.js
@@ -0,0 +1,573 @@
+define([], function() {
+
+/** Frame view */
+var FrameView = Backbone.View.extend({
+    initialize: function( options ) {
+        var self = this;
+        this.model = options && options.model || new Backbone.Model( options );
+        this.setElement( $( '<div/>' ).addClass( 'corner frame' ) );
+        this.$el.append( $( '<div/>' ).addClass( 'f-header corner' )
+                                      .append( $( '<div/>' ).addClass( 'f-title' ) )
+                                      .append( $( '<div/>' ).addClass( 'f-icon f-close fa fa-close' )
+                                                            .tooltip( { title: 'Close', placement: 'bottom' } ) ) )
+                .append( $( '<div/>' ).addClass( 'f-content' ) )
+                .append( $( '<div/>' ).addClass( 'f-resize f-icon corner fa fa-expand' ).tooltip( { title: 'Resize' } ) )
+                .append( $( '<div/>' ).addClass( 'f-cover' ) );
+        this.$header  = this.$( '.f-header' );
+        this.$title   = this.$( '.f-title' );
+        this.$content = this.$( '.f-content' );
+        this.render();
+        this.listenTo( this.model, 'change', this.render, this );
+    },
+
+    render: function() {
+        var self = this;
+        var options = this.model.attributes;
+        this.$title.html( options.title || '' );
+        this.$header.find( '.f-icon-left' ).remove();
+        _.each( options.menu, function( option ) {
+            var $option = $( '<div/>' ).addClass( 'f-icon-left' ).addClass( option.icon );
+            if ( _.isFunction( option.disabled ) && option.disabled() ) {
+                $option.attr( 'disabled', true );
+            } else {
+                $option.on( 'click', function() { option.onclick( self ) } )
+                       .tooltip( { title: option.tooltip, placement: 'bottom' } );
+            }
+            self.$header.append( $option );
+        } );
+        if ( options.url ) {
+            this.$content.html( $ ( '<iframe/>' ).addClass( 'f-iframe' )
+                                                 .attr( 'scrolling', 'auto' )
+                                                 .attr( 'src', options.url + ( options.url.indexOf( '?' ) === -1 ? '?' : '&' ) + 'widget=True' ) );
+        } else if ( options.content ) {
+            _.isFunction( options.content ) ? options.content( self.$content ) : self.$content.html( options.content );
+        }
+    }
+});
+
+/** Scratchbook viewer */
+var View = Backbone.View.extend({
+    defaultOptions: {
+        frame: {             // default frame size in cells
+            cols : 6,
+            rows : 3
+        },
+        rows        : 1000,  // maximum number of rows
+        cell        : 130,   // cell size in px
+        margin      : 5,     // margin between frames
+        scroll      : 5,     // scroll speed
+        top_min     : 40,    // top margin
+        frame_max   : 9,     // maximum number of frames
+        visible     : true,  // initial visibility
+    },
+
+    cols            : 0,     // number of columns
+    top             : 0,     // scroll/element top
+    top_max         : 0,     // viewport scrolling state
+    frame_z         : 0,     // frame z-index
+    frame_counter   : 0,     // frame counter
+    frame_uid       : 0,     // unique frame id counter
+    frame_list      : {},    // list of all frames
+    frame_shadow    : null,  // frame shown as placeholder when moving active frames
+    visible         : false, // flag indicating if scratchbook viewer is visible or not
+    event           : {},    // dictionary keeping track of current event
+
+    initialize : function( options ) {
+        var self = this;
+        this.options = _.defaults( options || {}, this.defaultOptions );
+        this.visible = this.options.visible;
+        this.top = this.top_max = this.options.top_min;
+        this.setElement( $( '<div/>' ).addClass( 'galaxy-frame' )
+                            .append( $( '<div/>' ).addClass( 'frame-background' ) )
+                            .append( $( '<div/>' ).addClass( 'frame-menu frame-scroll-up fa fa-chevron-up fa-2x' ) )
+                            .append( $( '<div/>' ).addClass( 'frame-menu frame-scroll-down fa fa-chevron-down fa-2x' ) ) );
+
+        // initialize shadow to guiding drag/resize events
+        this.frame_shadow = new Backbone.View({ el: $( '<div/>' ).addClass( 'corner frame-shadow' ) } );
+        this.$el.append( this.frame_shadow.$el );
+        this._frameInit( this.frame_shadow, '#frame-shadow' );
+        this._frameResize( this.frame_shadow, { width: 0, height: 0 } );
+        this.frame_list[ '#frame-shadow' ] = this.frame_shadow;
+
+        // initialize panel
+        this.visible ? this.show() : this.hide();
+        this._panelRefresh();
+        $( window ).resize( function() { self.visible && self._panelRefresh() } );
+    },
+
+    /** Render */
+    render: function() {
+        this.$( '.frame-scroll-up' ) [ this.top != this.options.top_min && 'show' || 'hide' ]();
+        this.$( '.frame-scroll-down')[ this.top != this.top_max && 'show' || 'hide' ]();
+    },
+
+    /**
+     * Adds and displays a new frame.
+     *
+     * options:
+     *  url     : loaded into an iframe
+     *  content : content is treated as a function or raw HTML, function is passed a single
+     *              argument that is the frame's content DOM element
+     */
+    add: function( options ) {
+        if ( this.frame_counter >= this.options.frame_max ) {
+            Galaxy.modal.show( {
+                title   : 'Warning',
+                body    : 'You have reached the maximum number of allowed frames (' + this.options.frame_max + ').',
+                buttons : { 'Close' : function() { Galaxy.modal.hide() } }
+            });
+
+        } else {
+            var frame_id = '#frame-' + ( this.frame_uid++ );
+            if ( $ ( frame_id ).length !== 0 ) {
+                Galaxy.modal.show( {
+                    title   : 'Error',
+                    body    : 'This frame already exists. This page might contain multiple frame managers.',
+                    buttons : { 'Close' : function() { Galaxy.modal.hide() } }
+                });
+            } else {
+                // initialize new frame elements
+                this.top = this.options.top_min;
+                var frame = new FrameView( options );
+                this.$el.append( frame.$el );
+
+                // set dimensions
+                options.width   = this._toPixelCoord( 'width', this.options.frame.cols );
+                options.height  = this._toPixelCoord( 'height', this.options.frame.rows );
+
+                // set default z-index and add to ui and frame list
+                this.frame_z = parseInt( frame.$el.css( 'z-index' ) );
+                this.frame_list[ frame_id ] = frame;
+                this.frame_counter++;
+                this._frameInit( frame, frame_id );
+                this._frameResize( frame, { width: options.width, height: options.height } );
+                this._frameInsert( frame, { top: 0, left: 0 }, true );
+                !this.visible && this.show();
+                this.trigger( 'add' );
+            }
+        }
+    },
+
+    /** Remove a frame */
+    del: function( frame ) {
+        var self = this;
+        var $frame = frame.$el;
+        $frame.fadeOut( 'fast', function() {
+            $frame.remove();
+            delete self.frame_list[ frame.id ];
+            self.frame_counter--;
+            self._panelRefresh( true );
+            self._panelAnimationComplete();
+            self.trigger( 'remove' );
+        });
+    },
+
+    /** Show panel */
+    show: function() {
+        this.visible = true;
+        this.$el.fadeIn( 'fast' );
+        this.trigger( 'show' );
+    },
+
+    /** Hide panel */
+    hide: function() {
+        if ( !this.event.type ) {
+            this.visible = false;
+            this.$el.fadeOut('fast', function() { $( this ).hide() });
+            this.trigger( 'hide' );
+        }
+    },
+
+    /** Returns the number of frames */
+    length: function() {
+        return this.frame_counter;
+    },
+
+    /*
+        EVENT HANDLING
+    */
+    events: {
+        // global frame events
+        'mousemove'                         : '_eventFrameMouseMove',
+        'mouseup'                           : '_eventFrameMouseUp',
+        'mouseleave'                        : '_eventFrameMouseUp',
+        'mousewheel'                        : '_eventPanelScroll',
+        'DOMMouseScroll'                    : '_eventPanelScroll',
+
+        // events fixed to elements
+        'mousedown .frame'                  : '_eventFrameMouseDown',
+        'mousedown .frame-background'       : '_eventHide',
+        'mousedown .frame-scroll-up'        : '_eventPanelScroll_up',
+        'mousedown .frame-scroll-down'      : '_eventPanelScroll_down',
+        'mousedown .f-close'                : '_eventFrameClose'
+    },
+
+    /** Start drag/resize event */
+    _eventFrameMouseDown: function ( e ) {
+        $( '.tooltip' ).hide();
+        if ( !this.event.type ) {
+            if ( $( e.target ).hasClass( 'f-header' ) || $( e.target ).hasClass( 'f-title' ) ) {
+                this.event.type = 'drag';
+            }
+            if ( $( e.target ).hasClass( 'f-resize' ) ) {
+                this.event.type = 'resize';
+            }
+            if ( this.event.type ) {
+                e.preventDefault();
+                this.event.target = this._frameIdentify( e.target );
+                this.event.xy = {
+                    x: e.originalEvent.pageX,
+                    y: e.originalEvent.pageY
+                };
+                this._frameDragStart( this.event.target );
+            }
+        }
+    },
+
+    /** Processes drag/resize events */
+    _eventFrameMouseMove: function ( e ) {
+        if ( this.event.type ) {
+            // get mouse motion and delta
+            var event_xy_new = {
+                x : e.originalEvent.pageX,
+                y : e.originalEvent.pageY
+            };
+            var event_xy_delta = {
+                x : event_xy_new.x - this.event.xy.x,
+                y : event_xy_new.y - this.event.xy.y
+            };
+            this.event.xy = event_xy_new;
+
+            // get current screen position and size of frame
+            var p = this._frameScreen ( this.event.target );
+
+            // drag/resize event
+            if ( this.event.type == 'resize' ) {
+                p.width  += event_xy_delta.x;
+                p.height += event_xy_delta.y;
+                var min_dim = this.options.cell - this.options.margin - 1;
+                p.width = Math.max( p.width, min_dim );
+                p.height = Math.max( p.height, min_dim );
+                this._frameResize( this.event.target, p );
+                p.width = this._toGridCoord( 'width', p.width ) + 1;
+                p.height = this._toGridCoord( 'height', p.height ) + 1;
+                p.width = this._toPixelCoord( 'width', p.width );
+                p.height = this._toPixelCoord( 'height', p.height );
+                this._frameResize( this.frame_shadow, p );
+                this._frameInsert( this.frame_shadow, {
+                    top     : this._toGridCoord( 'top', p.top ),
+                    left    : this._toGridCoord( 'left', p.left )
+                });
+            } else if ( this.event.type == 'drag' ) {
+                p.left  += event_xy_delta.x;
+                p.top   += event_xy_delta.y;
+                this._frameOffset( this.event.target, p );
+                var l = {
+                    top     : this._toGridCoord( 'top', p.top ),
+                    left    : this._toGridCoord( 'left', p.left )
+                };
+                l.left !== 0 && l.left++;
+                this._frameInsert( this.frame_shadow, l );
+            }
+        }
+    },
+
+    /** Stop drag/resize events */
+    _eventFrameMouseUp: function ( e ) {
+        if ( this.event.type ) {
+            this._frameDragStop( this.event.target );
+            this.event.type = null;
+        }
+    },
+
+    /** Destroy a frame */
+    _eventFrameClose: function ( e ) {
+        if ( !this.event.type ) {
+            e.preventDefault();
+            this.del( this._frameIdentify( e.target ) );
+        }
+    },
+
+    /** Hide all frames */
+    _eventHide: function ( e ) {
+        !this.event.type && this.hide();
+    },
+
+    /** Fired when scrolling occurs on panel */
+    _eventPanelScroll: function( e ) {
+        if ( !this.event.type && this.visible ) {
+            // Stop propagation if scrolling is happening inside a frame.
+            // TODO: could propagate scrolling if at top/bottom of frame.
+            var frames = $( e.srcElement ).parents( '.frame' );
+            if ( frames.length !== 0 ) {
+                e.stopPropagation();
+            } else {
+                e.preventDefault();
+                this._panelScroll( e.originalEvent.detail ? e.originalEvent.detail : e.originalEvent.wheelDelta / -3 );
+            }
+        }
+    },
+
+    /** Handle scroll up event */
+    _eventPanelScroll_up: function( e ) {
+        if ( !this.event.type ) {
+            e.preventDefault();
+            this._panelScroll( -this.options.scroll );
+        }
+    },
+
+    /** Handle scroll down */
+    _eventPanelScroll_down: function(e) {
+        if ( !this.event.type ) {
+            e.preventDefault();
+            this._panelScroll( this.options.scroll );
+        }
+    },
+
+    /*
+        FRAME EVENTS SUPPORT
+    */
+
+    /** Identify the target frame */
+    _frameIdentify: function( target ) {
+        return this.frame_list[ '#' + $( target ).closest( '.frame' ).attr( 'id' ) ];
+    },
+
+    /** Provides drag support */
+    _frameDragStart : function ( frame ) {
+        this._frameFocus( frame, true );
+        var p = this._frameScreen( frame );
+        this._frameResize( this.frame_shadow, p );
+        this._frameGrid( this.frame_shadow, frame.grid_location );
+        frame.grid_location = null;
+        this.frame_shadow.$el.show();
+        $( '.f-cover' ).show();
+    },
+
+    /** Removes drag support */
+    _frameDragStop : function ( frame ) {
+        this._frameFocus( frame, false );
+        var p = this._frameScreen( this.frame_shadow );
+        this._frameResize( frame, p );
+        this._frameGrid( frame, this.frame_shadow.grid_location, true );
+        this.frame_shadow.grid_location = null;
+        this.frame_shadow.$el.hide();
+        $( '.f-cover' ).hide();
+        this._panelAnimationComplete();
+    },
+
+    /*
+        GRID/PIXEL CONVERTER
+    */
+
+    /** Converts a pixel to a grid dimension */
+    _toGridCoord: function ( type, px ) {
+        var sign = ( type == 'width' || type == 'height' ) ? 1 : -1;
+        type == 'top' && ( px -= this.top );
+        return parseInt( ( px + sign * this.options.margin ) / this.options.cell, 10 );
+    },
+    
+    /** Converts a grid to a pixels dimension */
+    _toPixelCoord: function ( type, g ) {
+        var sign = ( type == 'width' || type == 'height' ) ? 1 : -1;
+        var px = ( g * this.options.cell ) - sign * this.options.margin;
+        type == 'top' && ( px += this.top );
+        return px;
+    },
+
+    /** Converts a pixel to a grid coordinate set */
+    _toGrid: function ( px ) {
+        return {
+            top     : this._toGridCoord( 'top', px.top ),
+            left    : this._toGridCoord( 'left', px.left ),
+            width   : this._toGridCoord( 'width', px.width ),
+            height  : this._toGridCoord( 'height', px.height )
+        };
+    },
+
+    /** Converts a pixel to a grid coordinate set */
+    _toPixel: function( g ) {
+        return {
+            top     : this._toPixelCoord( 'top', g.top ),
+            left    : this._toPixelCoord( 'left', g.left ),
+            width   : this._toPixelCoord( 'width', g.width ),
+            height  : this._toPixelCoord( 'height', g.height )
+        };
+    },
+
+    /* 
+        COLLISION DETECTION
+    */
+
+    /** Check collisions for a grid coordinate set */
+    _isCollision: function( g ) {
+        function is_collision_pair ( a, b ) {
+            return !( a.left > b.left + b.width - 1 || a.left + a.width - 1 < b.left ||
+                      a.top > b.top + b.height  - 1 || a.top + a.height - 1 < b.top );
+        }
+        for ( var i in this.frame_list ) {
+            var frame = this.frame_list[ i ];
+            if ( frame.grid_location !== null && is_collision_pair ( g, frame.grid_location ) ) {
+                return true;
+            }
+        }
+        return false;
+    },
+
+    /** Return location/grid rank */
+    _locationRank: function( loc ) {
+        return ( loc.top * this.cols ) + loc.left;
+    },
+
+    /*
+        PANEL/WINDOW FUNCTIONS
+    */
+
+    /** Refresh panel */
+    _panelRefresh: function( animate ) {
+        this.cols = parseInt( $( window ).width() / this.options.cell, 10 ) + 1;
+        this._frameInsert( null, null, animate );
+    },
+
+    /** Complete panel animation / frames not moving */
+    _panelAnimationComplete: function() {
+        var self = this;
+        $( '.frame' ).promise().done( function() { self._panelScroll( 0, true ) } );
+    },
+
+    /** Scroll panel */
+    _panelScroll: function( delta, animate ) {
+        var top_new = this.top - this.options.scroll * delta;
+        top_new = Math.max( top_new, this.top_max );
+        top_new = Math.min( top_new, this.options.top_min );
+        if ( this.top != top_new ) {
+            for ( var i in this.frame_list ) {
+                var frame = this.frame_list[ i ];
+                if ( frame.grid_location !== null ) {
+                    var screen_location = {
+                        top  : frame.screen_location.top - ( this.top - top_new ),
+                        left : frame.screen_location.left
+                    }
+                    this._frameOffset( frame, screen_location, animate );
+                }
+            }
+            this.top = top_new;
+        }
+        this.render();
+    },
+
+    /*
+        FRAME FUNCTIONS
+    */
+
+    /** Initialize a new frame */
+    _frameInit: function( frame, id ) {
+        frame.id              = id
+        frame.screen_location = {};
+        frame.grid_location   = {};
+        frame.grid_rank       = null;
+        frame.$el.attr( 'id', id.substring( 1 ) );
+    },
+
+    /** Insert frame at given location */
+    _frameInsert: function( frame, new_loc, animate ) {
+        var self = this;
+        var place_list = [];
+        if ( frame ) {
+            frame.grid_location = null;
+            place_list.push( [ frame, this._locationRank( new_loc ) ] );
+        }
+        _.each( this.frame_list, function( f ) {
+            if ( f.grid_location !== null ) {
+                f.grid_location = null;
+                place_list.push( [ f, f.grid_rank ] );
+            }
+        });
+        place_list.sort( function( a, b ) {
+            return a[ 1 ] < b[ 1 ] ? -1 : ( a[ 1 ] > b[ 1 ] ? 1 : 0 );
+        });
+        _.each( place_list, function( place ) {
+            self._framePlace( place[ 0 ], animate );
+        });
+        this.top_max = 0;
+        _.each( this.frame_list, function( f ) {
+            if ( f.grid_location !== null ) {
+                self.top_max = Math.max( self.top_max, f.grid_location.top + f.grid_location.height );
+            }
+        });
+        this.top_max = $( window ).height() - this.top_max * this.options.cell - 2 * this.options.margin;
+        this.top_max = Math.min( this.top_max, this.options.top_min );
+        this.render();
+    },
+
+    /** Naive frame placement */
+    _framePlace: function( frame, animate ) {
+        frame.grid_location = null;
+        var g = this._toGrid( this._frameScreen( frame ) );
+        var done = false;
+        for ( var i = 0; i < this.options.rows; i++ ) {
+            for ( var j = 0; j < Math.max(1, this.cols - g.width ); j++ ) {
+                g.top   = i;
+                g.left  = j;
+                if ( !this._isCollision( g ) ) {
+                    done = true;
+                    break;
+                }
+            }
+            if ( done ) {
+                break;
+            }
+        }
+        if ( done ) {
+            this._frameGrid( frame, g, animate );
+        } else {
+            console.log( 'Grid dimensions exceeded.' );
+        }
+    },
+
+    /** Handle frame focussing */
+    _frameFocus: function( frame, has_focus ) {
+        frame.$el.css( 'z-index', this.frame_z + ( has_focus ? 1 : 0 ) );
+    },
+
+    /** New left/top position frame */
+    _frameOffset: function( frame, p, animate ) {
+        frame.screen_location.left = p.left;
+        frame.screen_location.top = p.top;
+        if ( animate ) {
+            this._frameFocus( frame, true );
+            var self = this;
+            frame.$el.animate({ top: p.top, left: p.left }, 'fast', function() {
+                self._frameFocus( frame, false );
+            });
+        } else {
+            frame.$el.css( { top: p.top, left: p.left } );
+        }
+    },
+
+    /** Resize frame */
+    _frameResize: function( frame, p ) {
+        frame.$el.css( { width: p.width, height: p.height } );
+        frame.screen_location.width = p.width;
+        frame.screen_location.height = p.height;
+    },
+
+    /** Push frame to new grid location */
+    _frameGrid: function ( frame, l, animate ) {
+        frame.grid_location = l;
+        this._frameOffset( frame, this._toPixel( l ), animate );
+        frame.grid_rank = this._locationRank( l );
+    },
+
+    /** Get frame dimensions */
+    _frameScreen: function( frame ) {
+        var p = frame.screen_location;
+        return { top: p.top, left: p.left, width: p.width, height: p.height };
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-list.js b/client/galaxy/scripts/mvc/ui/ui-list.js
new file mode 100644
index 0000000..bdc7d08
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-list.js
@@ -0,0 +1,157 @@
+// dependencies
+define(['utils/utils', 'mvc/ui/ui-portlet', 'mvc/ui/ui-misc'], function(Utils, Portlet, Ui) {
+
+// ui list element
+var View = Backbone.View.extend({
+    // create portlet to keep track of selected list elements
+    initialize : function(options) {
+        // link this
+        var self = this;
+
+        // initialize options
+        this.options = options;
+        this.name = options.name || 'element';
+        this.multiple = options.multiple || false;
+
+        // create message handler
+        this.message = new Ui.Message();
+
+        // create portlet
+        this.portlet = new Portlet.View({ cls: 'ui-portlet-section' });
+
+        // create select field containing the options which can be inserted into the list
+        this.select = new Ui.Select.View({ optional : options.optional });
+
+        // create insert new list element button
+        this.button = new Ui.ButtonIcon({
+            icon        : 'fa fa-sign-in',
+            floating    : 'left',
+            tooltip     : 'Insert new ' + this.name,
+            onclick     : function() {
+                self.add({
+                    id      : self.select.value(),
+                    name    : self.select.text()
+                });
+            }
+        });
+
+        // build main element
+        this.setElement(this._template(options));
+        this.$('.ui-list-message').append(this.message.$el);
+        this.$('.ui-list-portlet').append(this.portlet.$el);
+        this.$('.ui-list-button').append(this.button.$el);
+        this.$('.ui-list-select').append(this.select.$el);
+    },
+
+    /** Return/Set currently selected list elements */
+    value: function(val) {
+        // set new value
+        if (val !== undefined) {
+            this.portlet.empty();
+            if ($.isArray(val)) {
+                for (var i in val) {
+                    var v = val[i];
+                    var v_id = null;
+                    var v_name = null;
+                    if ($.type(v) != 'string') {
+                        v_id = v.id;
+                        v_name = v.name;
+                    } else {
+                        v_id = v_name = v;
+                    }
+                    if (v_id != null) {
+                        this.add({
+                            id      : v_id,
+                            name    : v_name
+                        });
+                    }
+                }
+            }
+            this._refresh();
+        }
+        // get current value
+        var lst = [];
+        this.$('.ui-list-id').each(function() {
+            lst.push({
+                id      : $(this).prop('id'),
+                name    : $(this).find('.ui-list-name').html()
+            });
+        });
+        if (lst.length == 0) {
+            return null;
+        }
+        return lst;
+    },
+
+    /** Add row */
+    add: function(options) {
+        var self = this;
+        if (this.$('[id="' + options.id + '"]').length === 0) {
+            if (!Utils.isEmpty(options.id)) {
+                var $el = $(this._templateRow({
+                    id      : options.id,
+                    name    : options.name
+                }));
+                $el.on('click', function() {
+                    $el.remove();
+                    self._refresh();
+                });
+                $el.on('mouseover', function() {
+                    $el.addClass('portlet-highlight');
+                });
+                $el.on('mouseout', function() {
+                    $el.removeClass('portlet-highlight');
+                });
+                this.portlet.append($el);
+                this._refresh();
+            } else {
+                this.message.update({ message: 'Please select a valid ' + this.name + '.', status: 'danger' });
+            }
+        } else {
+            this.message.update({ message: 'This ' + this.name + ' is already in the list.' });
+        }
+    },
+
+    /** Update available options */
+    update: function(options) {
+        this.select.update(options);
+    },
+
+    /** Refresh view */
+    _refresh: function() {
+        if (this.$('.ui-list-id').length > 0) {
+            !this.multiple && this.button.disable();
+            this.$('.ui-list-portlet').show();
+        } else {
+            this.button.enable();
+            this.$('.ui-list-portlet').hide();
+        }
+        this.options.onchange && this.options.onchange();
+    },
+
+    /** Main Template */
+    _template: function(options) {
+        return  '<div class="ui-list">' +
+                    '<div class="ui-margin-top">' +
+                        '<span class="ui-list-button"/>' +
+                        '<span class="ui-list-select"/>' +
+                    '</div>' +
+                    '<div class="ui-list-message"/>' +
+                    '<div class="ui-list-portlet"/>' +
+                '</div>';
+    },
+
+    /** Row Template */
+    _templateRow: function(options) {
+        return  '<div id="' + options.id + '" class="ui-list-id">' +
+                    '<span class="ui-list-delete fa fa-trash"/>' +
+                    '<span class="ui-list-name">' + options.name + '</span>' +
+                '</div>';
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-misc.js b/client/galaxy/scripts/mvc/ui/ui-misc.js
new file mode 100644
index 0000000..5fb397d
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-misc.js
@@ -0,0 +1,168 @@
+/**
+ *  This class contains backbone wrappers for basic ui elements such as Images, Labels, Buttons, Input fields etc.
+ */
+define(['utils/utils',
+    'mvc/ui/ui-select-default',
+    'mvc/ui/ui-slider',
+    'mvc/ui/ui-options',
+    'mvc/ui/ui-drilldown',
+    'mvc/ui/ui-buttons',
+    'mvc/ui/ui-modal'],
+    function( Utils, Select, Slider, Options, Drilldown, Buttons, Modal ) {
+
+    /** Label wrapper */
+    var Label = Backbone.View.extend({
+        tagName: 'label',
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model( options );
+            this.tagName = options.tagName || this.tagName;
+            this.setElement( $( '<' + this.tagName + '/>' ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+        title: function( new_title ) {
+            this.model.set( 'title', new_title );
+        },
+        value: function() {
+            return this.model.get( 'title' );
+        },
+        render: function() {
+            this.$el.removeClass()
+                    .addClass( 'ui-label' )
+                    .addClass( this.model.get( 'cls' ) )
+                    .html( this.model.get( 'title' ) );
+            return this;
+        }
+    });
+
+    /** Displays messages used e.g. in the tool form */
+    var Message = Backbone.View.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                message     : null,
+                status      : 'info',
+                cls         : '',
+                persistent  : false,
+                fade        : true
+            }).set( options );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+        update: function( options ) {
+            this.model.set( options );
+        },
+        render: function() {
+            this.$el.removeClass().addClass( 'ui-message' ).addClass( this.model.get( 'cls' ) );
+            var status = this.model.get( 'status' );
+            if ( this.model.get( 'large' ) ) {
+                this.$el.addClass((( status == 'success' && 'done' ) ||
+                                   ( status == 'danger' && 'error' ) ||
+                                     status ) + 'messagelarge' );
+            } else {
+                this.$el.addClass( 'alert' ).addClass( 'alert-' + status );
+            }
+            if ( this.model.get( 'message' ) ) {
+                this.$el.html( this.model.get( 'message' ) );
+                this.$el[ this.model.get( 'fade' ) ? 'fadeIn' : 'show' ]();
+                this.timeout && window.clearTimeout( this.timeout );
+                if ( !this.model.get( 'persistent' ) ) {
+                    var self = this;
+                    this.timeout = window.setTimeout( function() {
+                        self.model.set( 'message', '' );
+                    }, 3000 );
+                }
+            } else {
+                this.$el.fadeOut();
+            }
+            return this;
+        }
+    });
+
+    /** Renders an input element used e.g. in the tool form */
+    var Input = Backbone.View.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model({
+                type            : 'text',
+                placeholder     : '',
+                disabled        : false,
+                visible         : true,
+                cls             : '',
+                area            : false,
+                color           : null,
+                style           : null
+            }).set( options );
+            this.tagName = this.model.get( 'area' ) ? 'textarea' : 'input';
+            this.setElement( $( '<' + this.tagName + '/>' ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+        events: {
+            'input': '_onchange'
+        },
+        value: function( new_val ) {
+            new_val !== undefined && this.model.set( 'value', typeof new_val === 'string' ? new_val : '' );
+            return this.model.get( 'value' );
+        },
+        render: function() {
+            this.$el.removeClass()
+                    .addClass( 'ui-' + this.tagName )
+                    .addClass( this.model.get( 'cls' ) )
+                    .addClass( this.model.get( 'style' ) )
+                    .attr( 'id', this.model.id )
+                    .attr( 'type', this.model.get( 'type' ) )
+                    .attr( 'placeholder', this.model.get( 'placeholder' ) )
+                    .css( 'color', this.model.get( 'color' ) || '' )
+                    .css( 'border-color', this.model.get( 'color' ) || '' );
+            if ( this.model.get( 'value' ) !== this.$el.val() ) {
+                this.$el.val( this.model.get( 'value' ) );
+            }
+            this.model.get( 'disabled' ) ? this.$el.attr( 'disabled', true ) : this.$el.removeAttr( 'disabled' );
+            this.$el[ this.model.get( 'visible' ) ? 'show' : 'hide' ]();
+            return this;
+        },
+        _onchange: function() {
+            this.value( this.$el.val() );
+            this.model.get( 'onchange' ) && this.model.get( 'onchange' )( this.model.get( 'value' ) );
+        }
+    });
+
+    /** Creates a hidden element input field used e.g. in the tool form */
+    var Hidden = Backbone.View.extend({
+        initialize: function( options ) {
+            this.model = options && options.model || new Backbone.Model( options );
+            this.setElement( $ ( '<div/>' ).append( this.$info = $( '<div/>' ) )
+                                           .append( this.$hidden = $( '<div/>' ) ) );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+        },
+        value: function( new_val ) {
+            new_val !== undefined && this.model.set( 'value', new_val );
+            return this.model.get( 'value' );
+        },
+        render: function() {
+            this.$el.attr( 'id', this.model.id );
+            this.$hidden.val( this.model.get( 'value' ) );
+            this.model.get( 'info' ) ? this.$info.show().html( this.model.get( 'info' ) ) : this.$info.hide();
+            return this;
+        }
+    });
+
+    return {
+        Button      : Buttons.ButtonDefault,
+        ButtonIcon  : Buttons.ButtonIcon,
+        ButtonCheck : Buttons.ButtonCheck,
+        ButtonMenu  : Buttons.ButtonMenu,
+        ButtonLink  : Buttons.ButtonLink,
+        Input       : Input,
+        Label       : Label,
+        Message     : Message,
+        Modal       : Modal,
+        RadioButton : Options.RadioButton,
+        Checkbox    : Options.Checkbox,
+        Radio       : Options.Radio,
+        Select      : Select,
+        Hidden      : Hidden,
+        Slider      : Slider,
+        Drilldown   : Drilldown
+    }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-modal.js b/client/galaxy/scripts/mvc/ui/ui-modal.js
new file mode 100644
index 0000000..73adf7b
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-modal.js
@@ -0,0 +1,189 @@
+define([], function() {
+var View = Backbone.View.extend({
+
+    className: "ui-modal",
+
+    // defaults
+    optionsDefault  : {
+        container        : 'body',
+        title            : 'ui-modal',
+        cls              : 'ui-modal',
+        body             : '',
+        backdrop         : true,
+        height           : null,
+        width            : null,
+        closing_events   : false,
+        closing_callback : null,
+        title_separator  : true
+    },
+
+    // button list
+    buttonList: {},
+
+    // initialize
+    initialize: function( options ) {
+        this.options = _.defaults( options || {}, this.optionsDefault );
+        $( this.options.container ).prepend( this.el );
+        // optional render
+        options && this.render();
+    },
+
+    /**
+     * Displays modal
+    */
+    show: function( options ) {
+        if ( options ) {
+            this.options = _.defaults( options, this.optionsDefault );
+            this.render();
+        }
+        if ( !this.visible ) {
+            this.visible = true;
+            this.$el.fadeIn( 'fast' );
+            if ( this.options.closing_events ) {
+                var self = this;
+                $( document ).on( 'keyup.ui-modal', function( e ) { e.keyCode == 27 && self.hide( true ) });
+                this.$backdrop.on( 'click', function() { self.hide( true ) } );
+            }
+        }
+    },
+
+    /**
+     * Hide modal
+    */
+    hide: function( canceled ) {
+        this.visible = false;
+        this.$el.fadeOut( 'fast' );
+        this.options.closing_callback && this.options.closing_callback( canceled );
+        $( document ).off( 'keyup.ui-modal' );
+        this.$backdrop.off( 'click' );
+    },
+
+    /**
+     * Render modal
+    */
+    render: function() {
+        var self = this;
+        this.$el.html( this._template() );
+
+        // link elements
+        this.$header    = this.$( '.modal-header' );
+        this.$dialog    = this.$( '.modal-dialog' );
+        this.$body      = this.$( '.modal-body' );
+        this.$footer    = this.$( '.modal-footer' );
+        this.$backdrop  = this.$( '.modal-backdrop' );
+        this.$buttons   = this.$( '.buttons' );
+
+        if (this.options.body == 'progress') {
+            this.options.body = $(  '<div class="progress progress-striped active">' +
+                                        '<div class="progress-bar progress-bar-info" style="width:100%"/>' +
+                                    '</div>' );
+        }
+
+        // fix main content
+        this.$el.removeClass().addClass( 'modal' ).addClass( this.options.cls );
+        this.$header.find( '.title' ).html( this.options.title );
+        this.$body.html( this.options.body );
+
+        // append buttons
+        this.$buttons.empty();
+        this.buttonList = {};
+        if ( this.options.buttons ) {
+            var counter = 0;
+            $.each( this.options.buttons, function( name, callback ) {
+                var $button = $( '<button/>' ).attr( 'id', 'button-' + counter++ ).text( name ).click( callback );
+                self.$buttons.append( $button ).append( ' ' );
+                self.buttonList[ name ] = $button;
+            });
+        } else {
+            this.$footer.hide();
+        }
+
+        // configure background, separator line
+        this.$backdrop[ this.options.backdrop && 'addClass' || 'removeClass' ]( 'in' );
+        this.$header[ !this.options.title_separator && 'addClass' || 'removeClass' ]( 'no-separator' );
+
+        // fix dimensions
+        // note: because this is a singleton, we need to clear inline styles from any previous invocations
+        this.$body.removeAttr( 'style' );
+        if ( this.options.height ) {
+            this.$body.css( 'height', this.options.height );
+            this.$body.css( 'overflow', 'hidden' );
+        } else {
+            this.$body.css( 'max-height', $( window ).height() / 2 );
+        }
+        if ( this.options.width ) {
+            this.$dialog.css( 'width', this.options.width );
+        }
+    },
+
+    /**
+     * Returns the button dom
+     * @param{String}   name    - Button name/title
+    */
+    getButton: function( name ) {
+        return this.buttonList[ name ];
+    },
+
+    /**
+     * Enables a button
+     * @param{String}   name    - Button name/title
+    */
+    enableButton: function( name ) {
+        this.getButton( name ).prop( 'disabled', false );
+    },
+
+    /**
+     * Disables a button
+     * @param{String}   name    - Button name/title
+    */
+    disableButton: function( name ) {
+        this.getButton( name ).prop( 'disabled', true );
+    },
+
+    /**
+     * Show a button
+     * @param{String}   name    - Button name/title
+    */
+    showButton: function( name ) {
+        this.getButton( name ).show();
+    },
+
+    /**
+     * Hide a button
+     * @param{String}   name    - Button name/title
+    */
+    hideButton: function( name ) {
+        this.getButton( name ).hide();
+    },
+
+    /**
+     * Returns scroll top for body element
+    */
+    scrollTop: function() {
+        return this.$body.scrollTop();
+    },
+
+    /**
+     * Returns the modal template
+    */
+    _template: function() {
+        return  '<div class="modal-backdrop fade"/>' +
+                '<div class="modal-dialog">' +
+                    '<div class="modal-content">' +
+                        '<div class="modal-header">' +
+                            '<h4 class="title"/>' +
+                        '</div>' +
+                        '<div class="modal-body"/>' +
+                        '<div class="modal-footer">' +
+                            '<div class="buttons"/>' +
+                        '</div>' +
+                    '</div>' +
+                '</div>';
+    }
+});
+
+return {
+    View : View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-options.js b/client/galaxy/scripts/mvc/ui/ui-options.js
new file mode 100644
index 0000000..6f32c03
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-options.js
@@ -0,0 +1,246 @@
+/** Base class for options based ui elements **/
+define([ 'utils/utils', 'mvc/ui/ui-buttons' ], function( Utils, Buttons ) {
+var Base = Backbone.View.extend({
+    initialize: function( options ) {
+        var self = this;
+        this.model = options && options.model || new Backbone.Model({
+            visible     : true,
+            data        : [],
+            id          : Utils.uid(),
+            error_text  : 'No options available.',
+            wait_text   : 'Please wait...',
+            multiple    : false,
+            optional    : false,
+            onchange    : function(){}
+        }).set( options );
+        this.listenTo( this.model, 'change:value', this._changeValue, this );
+        this.listenTo( this.model, 'change:wait', this._changeWait, this );
+        this.listenTo( this.model, 'change:data', this._changeData, this );
+        this.listenTo( this.model, 'change:visible', this._changeVisible, this );
+        this.on( 'change', function() { self.model.get( 'onchange' )( self.value() ) } );
+        this.render();
+    },
+
+    render: function() {
+        var self = this;
+        this.$el.empty()
+                .removeClass()
+                .addClass( 'ui-options' )
+                .append( this.$message   = $( '<div/>' ) )
+                .append( this.$menu      = $( '<div/>' ).addClass( 'ui-options-menu' ) )
+                .append( this.$options   = $( this._template() ) );
+
+        // add select/unselect all button
+        this.all_button = null;
+        if ( this.model.get( 'multiple' ) ) {
+            this.all_button = new Buttons.ButtonCheck({
+                onclick: function() {
+                    self.$( 'input' ).prop( 'checked', self.all_button.value() !== 0 );
+                    self.value( self._getValue() );
+                    self.trigger( 'change' );
+                }
+            });
+            this.$menu.append( this.all_button.$el );
+        }
+
+        // finalize dom
+        this._changeData();
+        this._changeWait();
+        this._changeVisible();
+    },
+
+    _changeData: function() {
+        var self = this;
+        this.$options.empty();
+        if ( this._templateOptions ) {
+            this.$options.append( this._templateOptions( this.model.get( 'data' ) ) );
+        } else {
+            _.each( this.model.get( 'data' ), function( option ) {
+                self.$options.append( $( self._templateOption( option ) )
+                                        .addClass( 'ui-option' )
+                                        .tooltip( { title: option.tooltip, placement: 'bottom' } ) );
+            });
+        }
+        var self = this;
+        this.$( 'input' ).on( 'change', function() {
+            self.value( self._getValue() );
+            self.trigger( 'change' );
+        });
+        this._changeValue();
+        this._changeWait();
+    },
+
+    _changeVisible: function() {
+        this.$el[ this.model.get( 'visible' ) ? 'show' : 'hide' ]();
+    },
+
+    _changeWait: function() {
+        if ( this.model.get( 'wait' ) ) {
+            if ( this.length() === 0 ) {
+                this._messageShow( this.model.get( 'wait_text' ), 'info' );
+                this.$options.hide();
+                this.$menu.hide();
+            }
+        } else {
+            if ( this.length() === 0 ) {
+                this._messageShow( this.model.get( 'error_text' ), 'danger' );
+                this.$options.hide();
+                this.$menu.hide();
+            } else {
+                this.$message.hide();
+                this.$options.css( 'display', 'inline-block' );
+                this.$menu.show();
+            }
+        }
+    },
+
+    _changeValue: function() {
+        this._setValue( this.model.get( 'value' ) );
+        if ( this._getValue() === null && !this.model.get( 'multiple' ) && !this.model.get( 'optional' ) ) {
+            this._setValue( this.first() );
+        }
+        this.all_button && this.all_button.value( $.isArray( this._getValue() ) ? this._getValue().length : 0, this.length() );
+    },
+
+    /** Return/Set current selection */
+    value: function ( new_value ) {
+        new_value !== undefined && this.model.set( 'value', new_value );
+        return this._getValue();
+    },
+
+    /** Return first available option */
+    first: function() {
+        var options = this.$( 'input' ).first();
+        return options.length > 0 ? options.val() : null;
+    },
+
+    /** Show a spinner indicating that the select options are currently loaded */
+    wait: function() {
+        this.model.set( 'wait', true );
+    },
+
+    /** Hide spinner indicating that the request has been completed */
+    unwait: function() {
+        this.model.set( 'wait', false );
+    },
+
+    /** Returns the number of options */
+    length: function() {
+        return this.$( '.ui-option' ).length;
+    },
+
+    /** Set value to dom */
+    _setValue: function( new_value ) {
+        var self = this;
+        if ( new_value !== undefined ) {
+            this.$( 'input' ).prop( 'checked', false );
+            if ( new_value !== null ) {
+                var values = $.isArray( new_value ) ? new_value : [ new_value ];
+                _.each( values, function( v ) {
+                    self.$( 'input[value="' + v + '"]' ).first().prop( 'checked', true );
+                });
+            }
+        }
+    },
+
+    /** Return current selection */
+    _getValue: function() {
+        var selected = [];
+        this.$( ':checked' ).each( function() {
+            selected.push( $( this ).val() );
+        });
+        if ( Utils.isEmpty( selected ) ) {
+            return null;
+        }
+        return this.model.get( 'multiple' ) ? selected : selected[ 0 ];
+    },
+
+    /** Show message instead if options */
+    _messageShow: function( text, status ) {
+        this.$message.show()
+                     .removeClass()
+                     .addClass( 'ui-message alert alert-' + status )
+                     .html( text );
+    },
+
+    /** Main template function */
+    _template: function() {
+        return $( '<div/>' ).addClass( 'ui-options-list' );
+    }
+});
+
+/** Iconized **/
+var BaseIcons = Base.extend({
+    _templateOption: function( pair ) {
+        var id = Utils.uid();
+        return  $( '<div/>' ).addClass( 'ui-option' )
+                    .append( $( '<input/>' ).attr( {
+                        id      : id,
+                        type    : this.model.get( 'type' ),
+                        name    : this.model.id,
+                        value   : pair.value } ) )
+                    .append( $( '<label/>' ).addClass( 'ui-options-label' )
+                                            .attr( 'for', id )
+                                            .html( pair.label ) );
+    }
+});
+
+/** Radio button field **/
+var Radio = {};
+Radio.View = BaseIcons.extend({
+    initialize: function( options ) {
+        options.type = 'radio';
+        BaseIcons.prototype.initialize.call( this, options );
+    }
+});
+
+/** Checkbox options field **/
+var Checkbox = {};
+Checkbox.View = BaseIcons.extend({
+    initialize: function( options ) {
+        options.type = 'checkbox';
+        options.multiple = true;
+        BaseIcons.prototype.initialize.call( this, options );
+    }
+});
+
+/** Radio button options field styled as classic buttons **/
+var RadioButton = {};
+RadioButton.View = Base.extend({
+    initialize: function( options ) {
+        Base.prototype.initialize.call( this, options );
+    },
+
+    /** Set current value */
+    _setValue: function ( new_value ) {
+        if ( new_value !== undefined ) {
+            this.$( 'input' ).prop( 'checked', false );
+            this.$( 'label' ).removeClass( 'active' );
+            this.$( '[value="' + new_value + '"]' ).prop( 'checked', true ).closest( 'label' ).addClass( 'active' );
+        }
+    },
+
+    /** Template for a single option */
+    _templateOption: function( pair ) {
+        var $el =  $( '<label/>' ).addClass( 'btn btn-default' );
+        pair.icon && $el.append( $( '<i/>' ).addClass( 'fa' ).addClass( pair.icon ).addClass( !pair.label && 'no-padding' ) );
+        $el.append( $( '<input/>' ).attr( { type: 'radio', name: this.model.id, value: pair.value } ) );
+        pair.label && $el.append( pair.label );
+        return $el;
+    },
+
+    /** Main template function */
+    _template: function() {
+        return $( '<div/>' ).addClass( 'btn-group ui-radiobutton' ).attr( 'data-toggle', 'buttons' );
+    }
+});
+
+return {
+    Base        : Base,
+    BaseIcons   : BaseIcons,
+    Radio       : Radio,
+    RadioButton : RadioButton,
+    Checkbox    : Checkbox
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-popover.js b/client/galaxy/scripts/mvc/ui/ui-popover.js
new file mode 100644
index 0000000..3227a1c
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-popover.js
@@ -0,0 +1,175 @@
+/**
+ * Popover wrapper
+*/
+define([ 'utils/utils' ], function( Utils ) {
+var View = Backbone.View.extend({
+    optionsDefault: {
+        with_close  : true,
+        title       : null,
+        placement   : 'top',
+        container   : 'body',
+        body        : null
+    },
+
+    initialize: function ( options ) {
+        this.setElement( this._template() );
+        this.uid = Utils.uid();
+        this.options = _.defaults( options || {}, this.optionsDefault );
+        this.options.container.parent().append( this.el );
+        this.$title = this.$( '.popover-title-label' );
+        this.$close = this.$( '.popover-close' );
+        this.$body  = this.$( '.popover-content' );
+
+        // add initial content
+        this.options.body && this.append( this.options.body );
+
+        // add event to hide if click is outside of popup and not on container
+        var self = this;
+        $( 'body' ).on( 'mousedown.' + this.uid,  function( e ) {
+            // the 'is' for buttons that trigger popups
+            // the 'has' for icons within a button that triggers a popup
+            self.visible && !$( self.options.container ).is( e.target ) && !$( self.el ).is( e.target ) &&
+                $( self.el ).has( e.target ).length === 0 && self.hide();
+        });
+    },
+
+    /**
+     * Render popover
+    */
+    render: function() {
+        this.$title.html( this.options.title );
+        this.$el.removeClass().addClass( 'ui-popover popover fade in' ).addClass( this.options.placement );
+        this.$el.css( this._get_placement( this.options.placement ) );
+
+        // configure close option
+        var self = this;
+        if ( this.options.with_close ) {
+            this.$close.on( 'click', function() { self.hide() } ).show();
+        } else {
+            this.$close.off().hide();
+        }
+    },
+
+    /**
+     * Set the popover title
+     * @params{ String }    newTitle    - New popover title
+    */
+    title: function( newTitle ) {
+        if ( newTitle !== undefined ) {
+            this.options.title = newTitle;
+            this.$title.html( newTitle );
+        }
+    },
+
+    /**
+     * Show popover
+    */
+    show: function() {
+        this.render();
+        this.$el.show();
+        this.visible = true;
+    },
+
+    /**
+     * Hide popover
+    */
+    hide: function() {
+        this.$el.hide();
+        this.visible = false;
+    },
+
+    /**
+     * Append new content to the popover
+     * @params{ Object }  $el - Dom element
+    */
+    append: function( $el ) {
+        this.$body.append( $el );
+    },
+
+    /**
+     * Remove all content
+    */
+    empty: function() {
+        this.$body.empty();
+    },
+
+    /**
+     * Remove popover
+    */
+    remove: function() {
+        $( 'body' ).off( 'mousedown.' + this.uid );
+        this.$el.remove();
+    },
+
+    /**
+     * Improve popover location/placement
+    */
+    _get_placement: function( placement ) {
+        // get popover dimensions
+        var width               = this._get_width( this.$el );
+        var height              = this.$el.height();
+
+        // get container details
+        var $container = this.options.container;
+        var container_width     = this._get_width( $container );
+        var container_height    = this._get_height( $container );
+        var container_position  = $container.position();
+
+        // get position
+        var top  = left = 0;
+        if ([ 'top', 'bottom' ].indexOf( placement ) != -1) {
+            left = container_position.left - width + ( container_width + width ) / 2;
+            switch ( placement ) {
+                case 'top':
+                    top = container_position.top - height - 5;
+                    break;
+                case 'bottom':
+                    top = container_position.top + container_height + 5;
+                    break;
+            }
+        } else {
+            top = container_position.top - height + ( container_height + height ) / 2;
+            switch ( placement ) {
+                case 'right':
+                    left = container_position.left + container_width;
+                    break;
+            }
+        }
+        return { top: top, left: left };
+    },
+
+    /**
+     * Returns padding/margin corrected width
+    */
+    _get_width: function( $el ) {
+        return $el.width() + parseInt( $el.css( 'padding-left' ) ) + parseInt( $el.css( 'margin-left' ) ) +
+                             parseInt( $el.css( 'padding-right' ) ) + parseInt( $el.css( 'margin-right' ) );
+    },
+
+    /**
+     * Returns padding corrected height
+    */
+    _get_height: function( $el ) {
+        return $el.height() + parseInt( $el.css( 'padding-top' ) ) + parseInt( $el.css( 'padding-bottom' ) );
+    },
+
+    /**
+     * Return the popover template
+    */
+    _template: function( options ) {
+        return  '<div class="ui-popover popover fade in">' +
+                    '<div class="arrow"/>' +
+                    '<div class="popover-title">' +
+                        '<div class="popover-title-label"/>' +
+                        '<div class="popover-close fa fa-times-circle"/>' +
+                    '</div>' +
+                    '<div class="popover-content"/>' +
+                '</div>';
+    }
+});
+
+return {
+    View: View
+}
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-portlet.js b/client/galaxy/scripts/mvc/ui/ui-portlet.js
new file mode 100644
index 0000000..6572034
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-portlet.js
@@ -0,0 +1,198 @@
+define([ 'utils/utils', 'mvc/ui/ui-misc' ], function( Utils, Ui ) {
+var View = Backbone.View.extend({
+    visible     : false,
+    initialize  : function( options ) {
+        var self = this;
+        this.model = options && options.model || new Backbone.Model( {
+            id                  : Utils.uid(),
+            cls                 : 'ui-portlet',
+            title               : '',
+            icon                : '',
+            buttons             : null,
+            body                : null,
+            scrollable          : true,
+            nopadding           : false,
+            operations          : null,
+            collapsible         : false,
+            collapsible_button  : false,
+            collapsed           : false
+        } ).set( options );
+        this.setElement( this._template() );
+
+        // link all dom elements
+        this.$body          = this.$( '.portlet-body' );
+        this.$title_text    = this.$( '.portlet-title-text' );
+        this.$title_icon    = this.$( '.portlet-title-icon' );
+        this.$header        = this.$( '.portlet-header' );
+        this.$content       = this.$( '.portlet-content' );
+        this.$backdrop      = this.$( '.portlet-backdrop' );
+        this.$buttons       = this.$( '.portlet-buttons' );
+        this.$operations    = this.$( '.portlet-operations' );
+
+        // add body to component list
+        this.model.get( 'body' ) && this.append( this.model.get( 'body' ) );
+
+        // add icon for collapsible option
+        this.collapsible_button = new Ui.ButtonIcon({
+            icon    : 'fa-eye',
+            tooltip : 'Collapse/Expand',
+            cls     : 'ui-button-icon-plain',
+            onclick : function() { self[ self.collapsed ? 'expand' : 'collapse' ]() }
+        });
+        this.render();
+    },
+
+    render: function() {
+        var self = this;
+        var options = this.model.attributes;
+        this.$el.removeClass().addClass( options.cls ).attr( 'id', options.id );
+        this.$header[ options.title ? 'show' : 'hide' ]();
+        this.$title_text.html( options.title );
+        _.each( [ this.$content, this.$body ], function( $el ) {
+            $el[ options.nopadding ? 'addClass' : 'removeClass' ]( 'no-padding' );
+        });
+
+        // render title icon
+        if ( options.icon ) {
+            this.$title_icon.removeClass().addClass( 'portlet-title-icon fa' ).addClass( options.icon ).show();
+        } else {
+            this.$title_icon.hide();
+        }
+
+        // make portlet collapsible
+        this.$title_text[ options.collapsible ? 'addClass' : 'removeClass' ]( 'no-highlight collapsible' ).off();
+        if ( options.collapsible ) {
+            this.$title_text.on( 'click', function() { self[ self.collapsed ? 'expand' : 'collapse' ]() } );
+            options.collapsed ? this.collapse() : this.expand();
+        }
+
+        // render buttons
+        if ( options.buttons ) {
+            this.$buttons.empty().show();
+            $.each( this.model.get( 'buttons' ), function( name, item ) {
+                item.$el.prop( 'id', name );
+                self.$buttons.append( item.$el );
+            });
+        } else {
+            this.$buttons.hide();
+        }
+
+        // render operations
+        this.$operations.empty;
+        if ( options.collapsible_button ) {
+            this.$operations.append( this.collapsible_button.$el );
+        }
+        if ( options.operations ) {
+            $.each( options.operations, function( name, item ) {
+                item.$el.prop( 'id', name );
+                self.$operations.append( item.$el );
+            });
+        }
+        return this;
+    },
+
+    /** Append new doms to body */
+    append: function( $el ) {
+        this.$body.append( $el );
+    },
+
+    /** Remove all content */
+    empty: function() {
+        this.$body.empty();
+    },
+
+    /** Return header element */
+    header: function() {
+        return this.$header;
+    },
+
+    /** Return body element */
+    body: function() {
+        return this.$body;
+    },
+
+    /** Show portlet */
+    show: function(){
+        this.visible = true;
+        this.$el.fadeIn( 'fast' );
+    },
+
+    /** Hide portlet */
+    hide: function(){
+        this.visible = false;
+        this.$el.hide();
+    },
+
+    /** Enable a particular button */
+    enableButton: function( id ) {
+        this.$buttons.find( '#' + id ).prop( 'disabled', false );
+    },
+
+    /** Disable a particular button */
+    disableButton: function( id ) {
+        this.$buttons.find( '#' + id ).prop( 'disabled', true );
+    },
+
+    /** Hide a particular operation */
+    hideOperation: function( id ) {
+        this.$operations.find( '#' + id ).hide();
+    },
+
+    /** Show a particular operation */
+    showOperation: function( id ) {
+        this.$operations.find( '#' + id ).show();
+    },
+
+    /** Replaces the event callback of an existing operation */
+    setOperation: function( id, callback ) {
+        this.$operations.find( '#' + id ).off( 'click' ).on( 'click', callback );
+    },
+
+    /** Change title */
+    title: function( new_title ) {
+        new_title && this.$title_text.html( new_title );
+        return this.$title_text.html();
+    },
+
+    /** Collapse portlet */
+    collapse: function() {
+        this.collapsed = true;
+        this.$content.height( '0%' );
+        this.$body.hide();
+        this.collapsible_button.setIcon( 'fa-eye-slash' );
+    },
+
+    /** Expand portlet */
+    expand: function() {
+        this.collapsed = false;
+        this.$content.height( '100%' );
+        this.$body.fadeIn( 'fast' );
+        this.collapsible_button.setIcon( 'fa-eye' );
+    },
+
+    /** Disable content access */
+    disable: function() {
+        this.$backdrop.show();
+    },
+
+    /** Enable content access */
+    enable: function() {
+        this.$backdrop.hide();
+    },
+
+    _template: function() {
+        return $( '<div/>' ).append( $( '<div/>' ).addClass( 'portlet-header' )
+                                .append( $( '<div/>' ).addClass( 'portlet-operations' ) )
+                                .append( $( '<div/>' ).addClass( 'portlet-title' )
+                                    .append( $( '<i/>' ).addClass( 'portlet-title-icon' ) )
+                                    .append( $( '<span/>' ).addClass( 'portlet-title-text' ) ) ) )
+                            .append( $( '<div/>' ).addClass( 'portlet-content' )
+                                .append( $( '<div/>' ).addClass( 'portlet-body' ) )
+                                .append( $( '<div/>' ).addClass( 'portlet-buttons' ) ) )
+                            .append( $( '<div/>' ).addClass( 'portlet-backdrop' ) );
+    }
+});
+return {
+    View : View
+}
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-content.js b/client/galaxy/scripts/mvc/ui/ui-select-content.js
new file mode 100644
index 0000000..0d1c911
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-select-content.js
@@ -0,0 +1,287 @@
+define([ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-select-default' ], function( Utils, Ui, Select ) {
+
+/** Batch mode variations */
+var Batch = { DISABLED: 'disabled', ENABLED: 'enabled', LINKED: 'linked' };
+
+/** List of available content selectors options */
+var Configurations = {
+    data: [
+        { src: 'hda',  icon: 'fa-file-o',   tooltip: 'Single dataset',       multiple: false,   batch: Batch.DISABLED   },
+        { src: 'hda',  icon: 'fa-files-o',  tooltip: 'Multiple datasets',    multiple: true,    batch: Batch.LINKED     },
+        { src: 'hdca', icon: 'fa-folder-o', tooltip: 'Dataset collection',   multiple: false,   batch: Batch.LINKED     } ],
+    data_multiple: [
+        { src: 'hda',  icon: 'fa-files-o',  tooltip: 'Multiple datasets',    multiple: true,    batch: Batch.DISABLED   },
+        { src: 'hdca', icon: 'fa-folder-o', tooltip: 'Dataset collection',   multiple: false,   batch: Batch.DISABLED   } ],
+    data_collection: [
+        { src: 'hdca', icon: 'fa-folder-o', tooltip: 'Dataset collection',   multiple: false,   batch: Batch.DISABLED   } ],
+    workflow_data: [
+        { src: 'hda',  icon: 'fa-file-o',   tooltip: 'Single dataset',       multiple: false,   batch: Batch.DISABLED   } ],
+    workflow_data_multiple: [
+        { src: 'hda',  icon: 'fa-files-o',  tooltip: 'Multiple datasets',    multiple: true,    batch: Batch.DISABLED   } ],
+    workflow_data_collection: [
+        { src: 'hdca', icon: 'fa-folder-o', tooltip: 'Dataset collection',   multiple: false,   batch: Batch.DISABLED   } ],
+    module_data: [
+        { src: 'hda',  icon: 'fa-file-o',   tooltip: 'Single dataset',       multiple: false,   batch: Batch.DISABLED   },
+        { src: 'hda',  icon: 'fa-files-o',  tooltip: 'Multiple datasets',    multiple: true,    batch: Batch.ENABLED    } ],
+    module_data_collection: [
+        { src: 'hdca', icon: 'fa-folder-o', tooltip: 'Dataset collection',   multiple: false,   batch: Batch.DISABLED   },
+        { src: 'hdca', icon: 'fa-folder',   tooltip: 'Multiple collections', multiple: true,    batch: Batch.ENABLED    } ]
+};
+
+/** View for hda and hdca content selector ui elements */
+var View = Backbone.View.extend({
+    initialize : function( options ) {
+        var self = this;
+        this.model = options && options.model || new Backbone.Model({
+            src_labels  : { 'hda' : 'dataset', 'hdca': 'dataset collection' },
+            pagelimit   : 100
+        }).set( options );
+        this.setElement( $( '<div/>' ).addClass( 'ui-select-content' ) );
+        this.button_product = new Ui.RadioButton.View( {
+            value       : 'false',
+            data        : [ { icon: 'fa fa-chain', value: 'false',
+                              tooltip: 'Linked inputs will be run in matched order with other datasets e.g. use this for matching forward and reverse reads.' },
+                            { icon: 'fa fa-chain-broken',  value: 'true',
+                              tooltip: 'Unlinked dataset inputs will be run against *all* other inputs.' } ] } );
+        var $batch_div = $( '<div/>' ).addClass( 'ui-form-info' )
+                            .append( $( '<i/>' ).addClass( 'fa fa-sitemap' ) )
+                            .append( $( '<span/>' ).html( 'This is a batch mode input field. Separate jobs will be triggered for each dataset selection.' ) );
+        this.$batch = {
+            linked  : $batch_div.clone(),
+            enabled : $batch_div.clone().append( $( '<div/>' )
+                                                    .append( $( '<div/>' ).addClass( 'ui-form-title' ).html( 'Batch options:' ) )
+                                                    .append( this.button_product.$el ) )
+                                                    .append( $( '<div/>' ).css( 'clear', 'both' ) )
+        };
+
+        // track current history elements
+        this.history = {};
+
+        // add listeners
+        this.listenTo( this.model, 'change:data', this._changeData, this );
+        this.listenTo( this.model, 'change:wait', this._changeWait, this );
+        this.listenTo( this.model, 'change:current', this._changeCurrent, this );
+        this.listenTo( this.model, 'change:value', this._changeValue, this );
+        this.listenTo( this.model, 'change:type change:optional change:multiple change:extensions', this._changeType, this );
+        this.render();
+
+        // add change event
+        this.on( 'change', function() { options.onchange && options.onchange( self.value() ) } );
+    },
+
+    render: function() {
+        this._changeType();
+        this._changeValue();
+        this._changeWait();
+    },
+
+    /** Indicate that select fields are being updated */
+    wait: function() {
+        this.model.set( 'wait', true );
+    },
+
+    /** Indicate that the options update has been completed */
+    unwait: function() {
+        this.model.set( 'wait', false );
+    },
+
+    /** Update data representing selectable options */
+    update: function( options ) {
+        this.model.set( 'data', options );
+    },
+
+    /** Return the currently selected dataset values */
+    value: function ( new_value ) {
+        new_value !== undefined && this.model.set( 'value', new_value );
+        var current = this.model.get( 'current' );
+        if ( this.config[ current ] ) {
+            var id_list = this.fields[ current ].value();
+            if (id_list !== null) {
+                id_list = $.isArray( id_list ) ? id_list : [ id_list ];
+                if ( id_list.length > 0 ) {
+                    var result = this._batch( { values: [] } );
+                    for ( var i in id_list ) {
+                        var details = this.history[ id_list[ i ] + '_' + this.config[ current ].src ];
+                        if ( details ) {
+                            result.values.push( details );
+                        } else {
+                            Galaxy.emit.debug( 'ui-select-content::value()', 'Requested details not found for \'' + id_list[ i ] + '\'.'  );
+                            return null;
+                        }
+                    }
+                    result.values.sort( function( a, b ) { return a.hid - b.hid } );
+                    return result;
+                }
+            }
+        } else {
+            Galaxy.emit.debug( 'ui-select-content::value()', 'Invalid value/source \'' + new_value + '\'.'  );
+        }
+        return null;
+    },
+
+    /** Change of current select field */
+    _changeCurrent: function() {
+        var self = this;
+        _.each( this.fields, function( field, i ) {
+            if ( self.model.get( 'current' ) == i ) {
+                field.$el.show();
+                _.each( self.$batch, function( $batchfield, batchmode ) {
+                    $batchfield[ self.config[ i ].batch == batchmode ? 'show' : 'hide' ]();
+                });
+                self.button_type.value( i );
+            } else {
+                field.$el.hide();
+            }
+        });
+    },
+
+    /** Change of type */
+    _changeType: function() {
+        var self = this;
+
+        // identify selector type identifier i.e. [ flavor ]_[ type ]_[ multiple ]
+        var config_id = ( this.model.get( 'flavor' ) ? this.model.get( 'flavor' ) + '_' : '' ) +
+                        String( this.model.get( 'type' ) ) + ( this.model.get( 'multiple' ) ? '_multiple' : '' );
+        if ( Configurations[ config_id ] ) {
+            this.config = Configurations[ config_id ];
+        } else {
+            this.config = Configurations[ 'data' ];
+            Galaxy.emit.debug( 'ui-select-content::_changeType()', 'Invalid configuration/type id \'' + config_id + '\'.'  );
+        }
+
+        // prepare extension component of error message
+        var data = self.model.get( 'data' );
+        var extensions = Utils.textify( this.model.get( 'extensions' ) );
+        var src_labels = this.model.get( 'src_labels' );
+
+        // build views
+        this.fields = [];
+        this.button_data = [];
+        _.each( this.config, function( c, i ) {
+            self.button_data.push({
+                value   : i,
+                icon    : c.icon,
+                tooltip : c.tooltip
+            });
+            self.fields.push(
+                new Select.View({
+                    optional    : self.model.get( 'optional' ),
+                    multiple    : c.multiple,
+                    searchable  : !c.multiple || ( data && data[ c.src ] && data[ c.src ].length > self.model.get( 'pagelimit' ) ),
+                    selectall   : false,
+                    error_text  : 'No ' + ( extensions ? extensions + ' ' : '' ) + ( src_labels[ c.src ] || 'content' ) + ' available.',
+                    onchange    : function() {
+                        self.trigger( 'change' );
+                    }
+                })
+            );
+        });
+        this.button_type = new Ui.RadioButton.View({
+            value   : this.model.get( 'current' ),
+            data    : this.button_data,
+            onchange: function( value ) {
+                self.model.set( 'current', value );
+                self.trigger( 'change' );
+            }
+        });
+
+        // append views
+        this.$el.empty();
+        var button_width = 0;
+        if ( this.fields.length > 1 ) {
+            this.$el.append( this.button_type.$el );
+            button_width = Math.max( 0, this.fields.length * 36 ) + 'px';
+        }
+        _.each( this.fields, function( field ) {
+            self.$el.append( field.$el.css( { 'margin-left': button_width } ) );
+        });
+        _.each( this.$batch, function( $batchfield, batchmode ) {
+            self.$el.append( $batchfield.css( { 'margin-left': button_width } ) );
+        });
+        this.model.set( 'current', 0 );
+        this._changeCurrent();
+        this._changeData();
+    },
+
+    /** Change of wait flag */
+    _changeWait: function() {
+        var self = this;
+        _.each( this.fields, function( field ) { field[ self.model.get( 'wait' ) ? 'wait' : 'unwait' ]() } );
+    },
+
+    /** Change of available options */
+    _changeData: function() {
+        var options = this.model.get( 'data' );
+        var self = this;
+        var select_options = {};
+        _.each( options, function( items, src ) {
+            select_options[ src ] = [];
+            _.each( items, function( item ) {
+                select_options[ src ].push({
+                    hid  : item.hid,
+                    keep : item.keep,
+                    label: item.hid + ': ' + item.name,
+                    value: item.id
+                });
+                self.history[ item.id + '_' + src ] = item;
+            });
+        });
+        _.each( this.config, function( c, i ) {
+            select_options[ c.src ] && self.fields[ i ].add( select_options[ c.src ], function( a, b ) { return b.hid - a.hid } );
+        });
+    },
+
+    /** Change of incoming value */
+    _changeValue: function () {
+        var new_value = this.model.get( 'value' );
+        if ( new_value && new_value.values && new_value.values.length > 0 ) {
+            // create list with content ids
+            var list = [];
+            _.each( new_value.values, function( value ) {
+                list.push( value.id );
+            });
+            // sniff first suitable field type from config list
+            var src = new_value.values[ 0 ].src;
+            var multiple = new_value.values.length > 1;
+            for( var i = 0; i < this.config.length; i++ ) {
+                var field = this.fields[ i ];
+                var c = this.config[ i ];
+                if ( c.src == src && [ multiple, true ].indexOf( c.multiple ) !== -1 ) {
+                    this.model.set( 'current', i );
+                    field.value( list );
+                    break;
+                }
+            }
+        } else {
+            _.each( this.fields, function( field ) {
+                field.value( null );
+            });
+        }
+    },
+
+    /** Assists in identifying the batch mode */
+    _batch: function( result ) {
+        result[ 'batch' ] = false;
+        var current = this.model.get( 'current' );
+        var config = this.config[ current ];
+        if ( config.src == 'hdca' && !config.multiple ) {
+            var hdca = this.history[ this.fields[ current ].value() + '_hdca' ];
+            if ( hdca && hdca.map_over_type ) {
+                result[ 'batch' ] = true;
+            }
+        }
+        if ( config.batch == Batch.LINKED || config.batch == Batch.ENABLED ) {
+            result[ 'batch' ] = true;
+            if ( config.batch == Batch.ENABLED && this.button_product.value() === 'true' ) {
+                result[ 'product' ] = true;
+            }
+        }
+        return result;
+    }
+});
+
+return {
+    View: View
+}
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-default.js b/client/galaxy/scripts/mvc/ui/ui-select-default.js
new file mode 100644
index 0000000..b96f3fd
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-select-default.js
@@ -0,0 +1,332 @@
+/**
+ *  This class creates/wraps a default html select field as backbone class.
+ */
+define([ 'utils/utils', 'mvc/ui/ui-buttons' ], function( Utils, Buttons ) {
+var View = Backbone.View.extend({
+    initialize: function( options ) {
+        var self = this;
+        this.data  = [];
+        this.data2 = [];
+        this.model = options && options.model || new Backbone.Model({
+            id          : Utils.uid(),
+            cls         : 'ui-select',
+            error_text  : 'No options available',
+            empty_text  : 'Nothing selected',
+            visible     : true,
+            wait        : false,
+            multiple    : false,
+            searchable  : true,
+            optional    : false,
+            disabled    : false,
+            onchange    : function(){},
+            value       : null,
+            selectall   : true,
+            pagesize    : 20
+        }).set( options );
+        this.on( 'change', function() { self.model.get( 'onchange' ) && self.model.get( 'onchange' )( self.value() ) } );
+        this.listenTo( this.model, 'change:data', this._changeData, this );
+        this.listenTo( this.model, 'change:disabled', this._changeDisabled, this );
+        this.listenTo( this.model, 'change:wait', this._changeWait, this );
+        this.listenTo( this.model, 'change:visible', this._changeVisible, this );
+        this.listenTo( this.model, 'change:value', this._changeValue, this );
+        this.listenTo( this.model, 'change:multiple change:searchable change:cls change:id', this.render, this );
+        this.render();
+    },
+
+    render: function() {
+        var self = this;
+        this.model.get( 'searchable' ) ? this._renderSearchable() : this._renderClassic();
+        this.$el.addClass( this.model.get( 'cls' ) )
+                .attr( 'id', this.model.get( 'id' ) );
+        this.$select.empty().addClass( 'select' )
+                    .attr( 'id', this.model.get( 'id' ) + '_select' )
+                    .prop( 'multiple', this.model.get( 'multiple' ) )
+                    .on( 'change', function() {
+                        self.value( self._getValue() );
+                        self.trigger( 'change' );
+                    });
+        this._changeData();
+        this._changeWait();
+        this._changeVisible();
+        this._changeDisabled();
+    },
+
+    /** Renders the classic selection field */
+    _renderClassic: function() {
+        var self = this;
+        this.$el.addClass( this.model.get( 'multiple' ) ? 'ui-select-multiple' : 'ui-select' )
+                .append( this.$select      = $( '<select/>' ) )
+                .append( this.$dropdown    = $( '<div/>' ) )
+                .append( this.$resize      = $( '<div/>' )
+                .append( this.$resize_icon = $( '<i/>' ) ) );
+        if ( this.model.get( 'multiple' ) ) {
+            this.$dropdown.hide();
+            this.$resize_icon.addClass( 'fa fa-angle-double-right fa-rotate-45' ).show();
+            this.$resize.removeClass()
+                        .addClass( 'icon-resize' )
+                        .show()
+                        .off( 'mousedown' ).on( 'mousedown', function( event ) {
+                            var currentY = event.pageY;
+                            var currentHeight = self.$select.height();
+                            self.minHeight = self.minHeight || currentHeight;
+                            $( '#dd-helper' ).show().on( 'mousemove', function( event ) {
+                                self.$select.height( Math.max( currentHeight + ( event.pageY - currentY ), self.minHeight ) );
+                            }).on( 'mouseup mouseleave', function() {
+                                $( '#dd-helper' ).hide().off();
+                            });
+                        });
+        } else {
+            this.$dropdown.show();
+            this.$resize.hide();
+            this.$resize_icon.hide();
+        }
+    },
+
+    /** Renders the default select2 field */
+    _renderSearchable: function() {
+        var self = this;
+        this.$el.append( this.$select   = $( '<div/>' ) )
+                .append( this.$dropdown = $( '<div/>' ) );
+        this.$dropdown.hide();
+        if ( !this.model.get( 'multiple' ) ) {
+            this.$dropdown.show().on( 'click', function() {
+                self.$select.select2 && self.$select.select2( 'open' );
+            });
+        }
+        this.all_button = null;
+        if ( this.model.get( 'multiple' ) && this.model.get( 'selectall' ) ) {
+            this.all_button = new Buttons.ButtonCheck({
+                onclick: function() {
+                    var new_value = [];
+                    self.all_button.value() !== 0 && _.each( self.model.get( 'data' ), function( option ) {
+                        new_value.push( option.value );
+                    });
+                    self.value( new_value );
+                    self.trigger( 'change' );
+                }
+            });
+            this.$el.prepend( this.all_button.$el );
+        }
+    },
+
+    /** Updates the selection options */
+    _changeData: function() {
+        var self = this;
+        this.data = [];
+        if ( !this.model.get( 'multiple' ) && this.model.get( 'optional' ) ) {
+            this.data.push( { value: '__null__', label: self.model.get( 'empty_text' ) } );
+        }
+        _.each( this.model.get( 'data' ), function( option ) {
+            self.data.push( option );
+        });
+        if ( this.length() == 0 ) {
+            this.data.push( { value: '__null__', label: this.model.get( 'error_text' ) } );
+        }
+        if ( this.model.get( 'searchable' ) ) {
+            this.data2 = [];
+            _.each( this.data, function( option, index ) {
+                self.data2.push( { order: index, id: option.value, text: option.label } );
+            });
+            this.$select.data( 'select2' ) && this.$select.select2( 'destroy' );
+            this.$select.select2({
+                data            : self.data2,
+                closeOnSelect   : !this.model.get( 'multiple' ),
+                multiple        : this.model.get( 'multiple' ),
+                query           : function( q ) {
+                    var pagesize = self.model.get( 'pagesize' );
+                    var results = _.filter( self.data2, function ( e ) {
+                        return !q.term || q.term == '' || e.text.toUpperCase().indexOf( q.term.toUpperCase() ) >= 0;
+                    });
+                    q.callback({
+                        results: results.slice( ( q.page - 1 ) * pagesize, q.page * pagesize ),
+                        more   : results.length >= q.page * pagesize
+                    });
+                }
+            });
+            this.$( '.select2-container .select2-search input' ).off( 'blur' );
+        } else {
+            this.$select.find( 'option' ).remove();
+            _.each( this.data, function( option ) {
+                self.$select.append( $( '<option/>' ).attr( 'value', option.value ).html( _.escape( option.label ) ) );
+            });
+        }
+        this.model.set( 'disabled', this.length() == 0 );
+        this._changeValue();
+    },
+
+    /** Handles field enabling/disabling, usually used when no options are available */
+    _changeDisabled: function() {
+        if ( this.model.get( 'searchable' ) ) {
+            this.$select.select2( this.model.get( 'disabled' ) ? 'disable' : 'enable' );
+        } else {
+            this.$select.prop( 'disabled', this.model.get( 'disabled' ) );
+        }
+    },
+
+    /** Searchable fields may display a spinner e.g. while waiting for a server response */
+    _changeWait: function() {
+        this.$dropdown.removeClass()
+                      .addClass( 'icon-dropdown fa' )
+                      .addClass( this.model.get( 'wait' ) ? 'fa-spinner fa-spin' : 'fa-caret-down' );
+    },
+
+    /** Handles field visibility */
+    _changeVisible: function() {
+        this.$el[ this.model.get( 'visible' ) ? 'show' : 'hide' ]();
+        this.$select[ this.model.get( 'visible' ) ? 'show' : 'hide' ]();
+    },
+
+    /** Synchronizes the model value with the actually selected field value */
+    _changeValue: function() {
+        this._setValue( this.model.get( 'value' ) );
+        if ( this.model.get( 'multiple' ) ) {
+            if ( this.all_button ) {
+                var value = this._getValue();
+                this.all_button.value( $.isArray( value ) ? value.length : 0, this.length() );
+            }
+        } else if ( this._getValue() === null && !this.model.get( 'optional' ) ) {
+            this._setValue( this.first() );
+        }
+    },
+
+    /** Return/Set current selection */
+    value: function ( new_value ) {
+        new_value !== undefined && this.model.set( 'value', new_value );
+        return this._getValue();
+    },
+
+    /** Return the first select option */
+    first: function() {
+        return this.data.length > 0 ? this.data[ 0 ].value : null;
+    },
+
+    /** Check if a value is an existing option */
+    exists: function( value ) {
+        return _.findWhere( this.data, { value: value } );
+    },
+
+    /** Return the label/text of the current selection */
+    text: function () {
+        var v = this._getValue();
+        var d = this.exists( $.isArray( v ) ? v[ 0 ] : v );
+        return d ? d.label : '';
+    },
+
+    /** Show the select field */
+    show: function() {
+        this.model.set( 'visible', true );
+    },
+
+    /** Hide the select field */
+    hide: function() {
+        this.model.set( 'visible', false );
+    },
+
+    /** Show a spinner indicating that the select options are currently loaded */
+    wait: function() {
+        this.model.set( 'wait', true );
+    },
+
+    /** Hide spinner indicating that the request has been completed */
+    unwait: function() {
+        this.model.set( 'wait', false );
+    },
+
+    /** Returns true if the field is disabled */
+    disabled: function() {
+        return this.model.get( 'disabled' );
+    },
+
+    /** Enable the select field */
+    enable: function() {
+        this.model.set( 'disabled', false );
+    },
+
+    /** Disable the select field */
+    disable: function() {
+        this.model.set( 'disabled', true );
+    },
+
+    /** Update all available options at once */
+    add: function( options, sorter ) {
+        _.each( this.model.get( 'data' ), function( v ) {
+            v.keep && !_.findWhere( options, { value: v.value } ) && options.push( v );
+        });
+        sorter && options && options.sort( sorter );
+        this.model.set( 'data', options );
+    },
+
+    /** Update available options */
+    update: function( options ) {
+        this.model.set( 'data', options );
+    },
+
+    /** Set the custom onchange callback function */
+    setOnChange: function( callback ) {
+        this.model.set( 'onchange', callback );
+    },
+
+    /** Number of available options */
+    length: function() {
+        return $.isArray( this.model.get( 'data' ) ) ? this.model.get( 'data' ).length : 0;
+    },
+
+    /** Set value to dom */
+    _setValue: function( new_value ) {
+        var self = this;
+        if( new_value === null || new_value === undefined ) {
+            new_value = '__null__';
+        }
+        if ( this.model.get( 'multiple' ) ) {
+            new_value = $.isArray( new_value ) ? new_value : [ new_value ];
+        } else if ( $.isArray( new_value ) ) {
+            if ( new_value.length > 0 ) {
+                new_value = new_value[ 0 ];
+            } else {
+                new_value = '__null__';
+            }
+        }
+        if ( this.model.get( 'searchable' ) ) {
+            if ( $.isArray( new_value ) ) {
+                val = [];
+                _.each( new_value, function( v ) {
+                    var d = _.findWhere( self.data2, { id: v } );
+                    d && val.push( d );
+                });
+                new_value = val;
+            } else {
+                var d = _.findWhere( this.data2, { id: new_value } );
+                new_value = d;
+            }
+            this.$select.select2( 'data', new_value );
+        } else {
+            this.$select.val( new_value );
+        }
+    },
+
+    /** Get value from dom */
+    _getValue: function() {
+        var val = null;
+        if ( this.model.get( 'searchable' ) ) {
+            var selected = this.$select.select2( 'data' );
+            if ( selected ) {
+                if ( $.isArray( selected ) ) {
+                    val = [];
+                    selected.sort( function( a, b ) { return a.order - b.order } );
+                    _.each( selected, function( v ) { val.push( v.id ) } );
+                } else {
+                    val = selected.id;
+                }
+            }
+        } else {
+            val = this.$select.val();
+        }
+        return Utils.isEmpty( val ) ? null : val;
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-ftp.js b/client/galaxy/scripts/mvc/ui/ui-select-ftp.js
new file mode 100644
index 0000000..ef8ea7c
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-select-ftp.js
@@ -0,0 +1,53 @@
+// dependencies
+define(['utils/utils', 'mvc/ui/ui-list'],
+        function(Utils, List) {
+
+/**
+ * FTP file selector
+ */
+var View = Backbone.View.extend({
+    // initialize
+    initialize : function(options) {
+        // link this
+        var self = this;
+
+        // create ui-list view to keep track of selected ftp files
+        this.ftpfile_list = new List.View({
+            name        : 'file',
+            optional    : options.optional,
+            multiple    : options.multiple,
+            onchange    : function() {
+                options.onchange && options.onchange(self.value());
+            }
+        });
+
+        // create elements
+        this.setElement(this.ftpfile_list.$el);
+
+        // initial fetch of ftps
+        Utils.get({
+            url     : Galaxy.root + 'api/remote_files',
+            success : function(response) {
+                var data = [];
+                for (var i in response) {
+                    data.push({
+                        value   : response[i]['path'],
+                        label   : response[i]['path']
+                    });
+                }
+                self.ftpfile_list.update(data);
+            }
+        });
+    },
+
+    /** Return/Set currently selected ftp datasets */
+    value: function(val) {
+        return this.ftpfile_list.value(val);
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-library.js b/client/galaxy/scripts/mvc/ui/ui-select-library.js
new file mode 100644
index 0000000..4cc6a49
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-select-library.js
@@ -0,0 +1,127 @@
+// dependencies
+define(['utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-table', 'mvc/ui/ui-list'],
+        function(Utils, Ui, Table, List) {
+
+// collection of libraries
+var Libraries = Backbone.Collection.extend({
+    url: Galaxy.root + 'api/libraries?deleted=false'
+});
+
+// collection of dataset
+var LibraryDatasets = Backbone.Collection.extend({
+    initialize: function() {
+        var self = this;
+        this.config = new Backbone.Model({ library_id: null });
+        this.config.on('change', function() {
+            self.fetch({ reset: true });
+        });
+    },
+    url: function() {
+        return Galaxy.root + 'api/libraries/' + this.config.get('library_id') + '/contents';
+    }
+});
+
+// hda/hdca content selector ui element
+var View = Backbone.View.extend({
+    // initialize
+    initialize : function(options) {
+        // link this
+        var self = this;
+
+        // collections
+        this.libraries  = new Libraries();
+        this.datasets   = new LibraryDatasets();
+
+        // link app and options
+        this.options = options;
+
+        // select field for the library
+        // TODO: Remove this once the library API supports searching for library datasets
+        this.library_select = new Ui.Select.View({
+            onchange    : function(value) {
+                self.datasets.config.set('library_id', value);
+            }
+        });
+
+        // create ui-list view to keep track of selected data libraries
+        this.dataset_list = new List.View({
+            name        : 'dataset',
+            optional    : options.optional,
+            multiple    : options.multiple,
+            onchange    : function() {
+                self.trigger('change');
+            }
+        });
+
+        // add reset handler for fetched libraries
+        this.libraries.on('reset', function() {
+            var data = [];
+            self.libraries.each(function(model) {
+                data.push({
+                    value   : model.id,
+                    label   : model.get('name')
+                });
+            });
+            self.library_select.update(data);
+        });
+
+        // add reset handler for fetched library datasets
+        this.datasets.on('reset', function() {
+            var data = [];
+            var library_current = self.library_select.text();
+            if (library_current !== null) {
+                self.datasets.each(function(model) {
+                    if (model.get('type') === 'file') {
+                        data.push({
+                            value   : model.id,
+                            label   : model.get('name')
+                        });
+                    }
+                });
+            }
+            self.dataset_list.update(data);
+        });
+
+        // add change event. fires on trigger
+        this.on('change', function() {
+            options.onchange && options.onchange(self.value());
+        });
+
+        // create elements
+        this.setElement(this._template());
+        this.$('.library-select').append(this.library_select.$el);
+        this.$el.append(this.dataset_list.$el);
+
+        // initial fetch of libraries
+        this.libraries.fetch({
+            reset: true,
+            success: function() {
+                self.library_select.trigger('change');
+                if (self.options.value !== undefined) {
+                    self.value(self.options.value);
+                }
+            }
+        });
+    },
+
+    /** Return/Set currently selected library datasets */
+    value: function(val) {
+        return this.dataset_list.value(val);
+    },
+
+    /** Template */
+    _template: function() {
+        return  '<div class="ui-select-library">' +
+                    '<div class="library ui-margin-bottom">' +
+                        '<span class="library-title">Select Library</span>' +
+                        '<span class="library-select"/>' +
+                    '</div>' +
+                '</div>';
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-select.js b/client/galaxy/scripts/mvc/ui/ui-select.js
new file mode 100644
index 0000000..b63822f
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-select.js
@@ -0,0 +1,213 @@
+// dependencies
+define(['utils/utils'], function(Utils) {
+
+/**
+ * A plugin for initializing select2 input items.
+ * Make sure the select2 library itself is loaded beforehand.
+ * Also the element to which select2 will be appended has to 
+ * be created before select2 initialization (and passed as option).
+ */
+var View = Backbone.View.extend(
+{
+    // options
+    optionsDefault: {
+        css                 : '',
+        placeholder         : 'No data available',
+        data                : [],
+        value               : null,
+        multiple            : false,
+        minimumInputLength  : 0,
+        // example format of initial data: "id:name,55:anotherrole at role.com,27:role at role.com"
+        initialData         : ''
+    },
+    
+    // initialize
+    initialize : function(options) {
+        // configure options
+        this.options = Utils.merge(options, this.optionsDefault);
+        
+        // create new element
+        this.setElement(this._template(this.options));
+        
+        // check if container exists
+        if (!this.options.container) {
+            console.log('ui-select::initialize() : container not specified.');
+            return;
+        }
+        
+        // add to dom
+        this.options.container.append(this.$el);
+        
+        // link selection dictionary
+        this.select_data = this.options.data;
+        
+        // refresh
+        this._refresh();
+        
+        if (!this.options.multiple){
+            // initial value
+            if (this.options.value) {
+                this._setValue(this.options.value);
+            }
+            
+            // add change event
+            var self = this;
+            if (this.options.onchange) {
+                this.$el.on('change', function() {
+                    self.options.onchange(self.value());
+                });
+            }
+        }
+    },
+    
+    // value
+    value : function (new_value) {
+        // get current id/value
+        var before = this._getValue();
+        
+        // check if new_value is defined
+        if (new_value !== undefined) {
+            this._setValue(new_value);
+        }
+        
+        // get current id/value
+        var after = this._getValue();
+        
+        // fire onchange
+        if ((after != before && this.options.onchange)) {
+            this.options.onchange(after);
+        }
+            
+        // return current value
+        return after;
+    },
+    
+    // label
+    text : function () {
+        return this.$el.select2('data').text;
+    },
+    
+    // disabled
+    disabled: function() {
+        return !this.$el.select2('enable');
+    },
+
+    // enable
+    enable: function() {
+        this.$el.select2('enable', true);
+    },
+        
+    // disable
+    disable: function() {
+        this.$el.select2('enable', false);
+    },
+    
+    // add
+    add: function(options) {
+        // add options
+        this.select_data.push({
+            id      : options.id,
+            text    : options.text
+        });
+        
+        // refresh
+        this._refresh();
+    },
+    
+    // remove
+    del: function(id) {
+        // search option
+        var index = this._getIndex(id);
+        
+        // check if found
+        if (index != -1) {
+            // remove options
+            this.select_data.splice(index, 1);
+        
+            // refresh
+            this._refresh();
+        }
+    },
+    
+    // remove
+    remove: function() {
+        this.$el.select2('destroy');
+    },
+    
+    // update
+    update: function(options) {
+        // copy options
+        this.select_data = [];
+        for (var key in options.data) {
+            this.select_data.push(options.data[key]);
+        }
+        
+        // refresh
+        this._refresh();
+    },
+    
+    // refresh
+    _refresh: function() {
+        // add select2 data based on type of input
+        if (!this.options.multiple){
+            var selected = this._getValue();
+            var select_opt = {
+                data                : this.select_data,
+                containerCssClass   : this.options.css,
+                placeholder         : this.options.placeholder,
+                dropdownAutoWidth   : true
+            };
+            this.$el.select2(select_opt);
+            // select previous value (if exists)
+            this._setValue(selected);
+        } else {
+            var select_opt = {
+                multiple            : this.options.multiple,
+                containerCssClass   : this.options.css,
+                placeholder         : this.options.placeholder,
+                minimumInputLength  : this.options.minimumInputLength,
+                ajax                : this.options.ajax,
+                dropdownCssClass    : this.options.dropdownCssClass,
+                escapeMarkup        : this.options.escapeMarkup,
+                formatResult        : this.options.formatResult,
+                formatSelection     : this.options.formatSelection,
+                initSelection       : this.options.initSelection,
+                initialData         : this.options.initialData
+            };
+            this.$el.select2(select_opt);
+        }
+    },
+    
+    // get index
+    _getIndex: function(value) {
+        // returns the index of the searched value
+        _.findIndex(this.select_data, {id: value});
+    },
+    
+    // get value
+    _getValue: function() {
+        return this.$el.select2('val');
+    },
+    
+    // set value
+    _setValue: function(new_value) {
+        var index = this._getIndex(new_value);
+        if (index == -1) {
+            if (this.select_data.length > 0) {
+                new_value = this.select_data[0].id;
+            }
+        }
+        this.$el.select2('val', new_value);
+    },
+    
+    // element
+    _template: function(options) {
+            return '<input type="hidden" value="' + this.options.initialData + '"/>';
+    }
+});
+
+return {
+    View : View
+};
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-slider.js b/client/galaxy/scripts/mvc/ui/ui-slider.js
new file mode 100644
index 0000000..abb3abc
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-slider.js
@@ -0,0 +1,105 @@
+define([ 'utils/utils' ], function( Utils ) {
+var View = Backbone.View.extend({
+    initialize : function( options ) {
+        var self = this;
+        this.options = Utils.merge( options, {
+            id      : Utils.uid(),
+            min     : null,
+            max     : null,
+            step    : null,
+            precise : false,
+            split   : 10000
+        } );
+
+        // create new element
+        this.setElement( this._template( this.options ) );
+
+        // determine wether to use the slider
+        this.useslider = this.options.max !== null && this.options.min !== null && this.options.max > this.options.min;
+
+        // set default step size
+        if ( this.options.step === null ) {
+            this.options.step = 1.0;
+            if ( this.options.precise && this.useslider ) {
+                this.options.step = ( this.options.max - this.options.min ) / this.options.split;
+            }
+        }
+
+        // create slider if min and max are defined properly
+        if ( this.useslider ) {
+            this.$slider = this.$( '#slider' );
+            this.$slider.slider( this.options );
+            this.$slider.on( 'slide', function ( event, ui ) {
+                self.value( ui.value );
+            });
+        } else {
+            this.$( '.ui-form-slider-text' ).css( 'width', '100%' );
+        }
+
+        // link text input field
+        this.$text = this.$( '#text' );
+
+        // set initial value
+        this.options.value !== undefined && ( this.value( this.options.value ) );
+
+        // add text field event
+        var pressed = [];
+        this.$text.on( 'change', function () {
+            self.value( $( this ).val() );
+        });
+        this.$text.on( 'keyup', function( e ) {
+            pressed[e.which] = false;
+            self.options.onchange && self.options.onchange( $( this ).val() );
+        });
+        this.$text.on( 'keydown', function ( e ) {
+            var v = e.which;
+            pressed[ v ] = true;
+            if ( self.options.is_workflow && pressed[ 16 ] && v == 52 ) {
+                self.value( '$' )
+                event.preventDefault();
+            } else if (!( v == 8 || v == 9 || v == 13 || v == 37 || v == 39 || ( v >= 48 && v <= 57 && !pressed[ 16 ] ) || ( v >= 96 && v <= 105 )
+                || ( ( v == 190 || v == 110 ) && $( this ).val().indexOf( '.' ) == -1 && self.options.precise )
+                || ( ( v == 189 || v == 109 ) && $( this ).val().indexOf( '-' ) == -1 )
+                || self._isParameter( $( this ).val() )
+                || pressed[ 91 ] || pressed[ 17 ] ) ) {
+                event.preventDefault();
+            }
+        });
+    },
+
+    /** Set and Return the current value
+    */
+    value : function ( new_val ) {
+        if ( new_val !== undefined ) {
+            if ( new_val !== null && new_val !== '' && !this._isParameter( new_val ) ) {
+                isNaN( new_val ) && ( new_val = 0 );
+                this.options.max !== null && ( new_val = Math.min( new_val, this.options.max ) );
+                this.options.min !== null && ( new_val = Math.max( new_val, this.options.min ) );
+            }
+            this.$slider && this.$slider.slider( 'value', new_val );
+            this.$text.val( new_val );
+            this.options.onchange && this.options.onchange( new_val );
+        }
+        return this.$text.val();
+    },
+
+    /** Return true if the field contains a workflow parameter i.e. $('name')
+    */
+    _isParameter: function( value ) {
+        return this.options.is_workflow && String( value ).substring( 0, 1 ) === '$';
+    },
+
+    /** Slider template
+    */
+    _template: function( options ) {
+        return  '<div id="' + options.id + '" class="ui-form-slider">' +
+                    '<input id="text" type="text" class="ui-form-slider-text"/>' +
+                    '<div id="slider" class="ui-form-slider-element"/>' +
+                '</div>';
+    }
+});
+
+return {
+    View : View
+};
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-table.js b/client/galaxy/scripts/mvc/ui/ui-table.js
new file mode 100644
index 0000000..bb4b3bc
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-table.js
@@ -0,0 +1,228 @@
+// dependencies
+define(['utils/utils'], function(Utils) {
+
+/**
+ *  This class creates a ui table element.
+ */
+var View = Backbone.View.extend({
+    // current row
+    row: null,
+    
+    // count rows
+    row_count: 0,
+    
+    // defaults options
+    optionsDefault: {
+        content     : 'No content available.',
+        onchange    : null,
+        ondblclick  : null,
+        onconfirm   : null,
+        cls         : 'ui-table',
+        cls_tr      : ''
+    },
+    
+    // events
+    events : {
+        'click'     : '_onclick',
+        'dblclick'  : '_ondblclick'
+    },
+    
+    // initialize
+    initialize : function(options) {
+        // configure options
+        this.options = Utils.merge(options, this.optionsDefault);
+        
+        // create new element
+        var $el = $(this._template(this.options));
+        
+        // link sub-elements
+        this.$thead = $el.find('thead');
+        this.$tbody = $el.find('tbody');
+        this.$tmessage = $el.find('tmessage');
+        
+        // set element
+        this.setElement($el);
+                
+        // initialize row
+        this.row = this._row();
+    },
+    
+    // add header cell
+    addHeader: function($el) {
+        var wrapper = $('<th></th>');
+        wrapper.append($el);
+        this.row.append(wrapper);
+    },
+    
+    // header
+    appendHeader: function() {
+        // append header row
+        this.$thead.append(this.row);
+
+        // row
+        this.row = $('<tr></tr>');
+    },
+    
+    // add row cell
+    add: function($el, width, align) {
+        var wrapper = $('<td></td>');
+        if (width) {
+            wrapper.css('width', width);
+        }
+        if (align) {
+            wrapper.css('text-align', align);
+        }
+        wrapper.append($el);
+        this.row.append(wrapper);
+    },
+    
+    // append
+    append: function(id, fade) {
+        this._commit(id, fade, false);
+    },
+    
+    // prepend
+    prepend: function(id, fade) {
+        this._commit(id, fade, true);
+    },
+    
+    // get element
+    get: function(id) {
+        return this.$el.find('#' + id);
+    },
+    
+    // delete
+    del: function(id) {
+        var item = this.$tbody.find('#' + id);
+        if (item.length > 0) {
+            item.remove();
+            this.row_count--;
+            this._refresh();
+        }
+    },
+
+    // delete all
+    delAll: function() {
+        this.$tbody.empty();
+        this.row_count = 0;
+        this._refresh();
+    },
+        
+    // value
+    value: function(new_value) {
+        // get current id/value
+        this.before = this.$tbody.find('.current').attr('id');
+        
+        // check if new_value is defined
+        if (new_value !== undefined) {
+            this.$tbody.find('tr').removeClass('current');
+            if (new_value) {
+                this.$tbody.find('#' + new_value).addClass('current');
+            }
+        }
+        
+        // get current id/value
+        var after = this.$tbody.find('.current').attr('id');
+        if(after === undefined) {
+            return null;
+        } else {
+            // fire onchange
+            if (after != this.before && this.options.onchange) {
+                this.options.onchange(new_value);
+            }
+            
+            // return current value
+            return after;
+        }
+    },
+    
+    // size
+    size: function() {
+        return this.$tbody.find('tr').length;
+    },
+    
+    // commit
+    _commit: function(id, fade, prepend) {
+        // remove previous item with same id
+        this.del(id);
+        
+        // add
+        this.row.attr('id', id);
+        
+        // add row
+        if (prepend) {
+            this.$tbody.prepend(this.row);
+        } else {
+            this.$tbody.append(this.row);
+        }
+        
+        // fade mode
+        if (fade) {
+            this.row.hide();
+            this.row.fadeIn();
+        }
+        
+        // row
+        this.row = this._row();
+        
+        // row count
+        this.row_count++;
+        this._refresh();
+    },
+    
+    // create new row
+    _row: function() {
+        return $('<tr class="' + this.options.cls_tr + '"></tr>');
+    },
+    
+    // onclick
+    _onclick: function(e) {
+        // get values
+        var old_value = this.value();
+        var new_value = $(e.target).closest('tr').attr('id');
+        if (new_value != ''){
+            // check equality
+            if (new_value && old_value != new_value) {
+                if (this.options.onconfirm) {
+                    this.options.onconfirm(new_value);
+                } else {
+                    this.value(new_value);
+                }
+            }
+        }
+    },
+
+    // ondblclick
+    _ondblclick: function(e) {
+        var value = this.value();
+        if (value && this.options.ondblclick) {
+            this.options.ondblclick(value);
+        }
+    },
+        
+    // refresh
+    _refresh: function() {
+        if (this.row_count == 0) {
+            this.$tmessage.show();
+        } else {
+            this.$tmessage.hide();
+        }
+    },
+        
+    // load html template
+    _template: function(options) {
+        return  '<div>' +
+                    '<table class="' + options.cls + '">' +
+                        '<thead></thead>' +
+                        '<tbody></tbody>' +
+                    '</table>' +
+                    '<tmessage>' + options.content + '</tmessage>' +
+                '<div>';
+    }
+});
+
+return {
+    View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/ui/ui-tabs.js b/client/galaxy/scripts/mvc/ui/ui-tabs.js
new file mode 100644
index 0000000..b12c339
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-tabs.js
@@ -0,0 +1,149 @@
+/**
+ *  Renders tabs e.g. used in the charts editor, behaves similar to repeat and section rendering
+ */
+define( [ 'utils/utils' ], function( Utils ) {
+    var View = Backbone.View.extend({
+        initialize : function( options ) {
+            var self = this;
+            this.collection = new Backbone.Collection();
+            this.model = options && options.model || new Backbone.Model( {
+                onchange    : null,
+                visible     : true
+            }).set( options );
+            this.setElement( $( this._template() ) );
+            this.$nav       = this.$( '.tab-navigation' );
+            this.$content   = this.$( '.tab-content' );
+            this.$el.on( 'click', function() { $( '.tooltip' ).hide() } );
+            this.render();
+            this.listenTo( this.model, 'change', this.render, this );
+            this.listenTo( this.collection, 'add', this._add, this );
+            this.listenTo( this.collection, 'remove', this._remove, this );
+            this.listenTo( this.collection, 'change', this._change, this );
+            this.listenTo( this.collection, 'reset', this._reset, this );
+            this.listenTo( this.collection, 'add remove reset', this.render, this );
+        },
+
+        render: function() {
+            var id = this.model.get( 'current' );
+            id = this.$( '#' + id ).length > 0 ? id : this.first();
+            if ( id ) {
+                this.$nav.children().removeClass('active' );
+                this.$content.children().removeClass('active' );
+                this.$( '#tab-' + id ).addClass( 'active' );
+                this.$( '#' + id ).addClass( 'active' );
+            }
+            this.$el[ this.model.get( 'visible' ) ? 'fadeIn' : 'fadeOut' ]( 'fast' );
+            this.$nav[ this.size() > 1 ? 'show' : 'hide' ]();
+        },
+
+        /** Returns tab id for currently shown tab */
+        current: function() {
+            return this.model.get( 'current' );
+        },
+
+        /** Show tab view and highlight a tab by id */
+        show: function( id ) {
+            if ( id ) {
+                this.model.set( { 'current': id, 'visible': true } );
+                this.model.get( 'onchange' ) && this.model.get( 'onchange' )( id );
+            }
+        },
+
+        /** Hide tab view */
+        hide: function(){
+            this.model.set( 'visible', false );
+        },
+
+        /** Returns first tab */
+        first: function() {
+            var model = this.collection.first();
+            return model && model.id;
+        },
+
+        /** Returns current number of tabs */
+        size: function() {
+            return this.collection.length;
+        },
+
+        /** Adds a new tab */
+        add: function( options ) {
+            this.collection.add( options );
+        },
+
+        /** Delete tab */
+        del: function( id ) {
+            this.collection.remove( id );
+        },
+
+        /** Delete all tabs */
+        delAll: function() {
+            this.collection.reset();
+        },
+
+        /** Show tab */
+        showTab: function( id ) {
+            this.collection.get( id ).set( 'hidden', false );
+        },
+
+        /** Hide tab */
+        hideTab: function( id ) {
+            this.collection.get( id ).set( 'hidden', true );
+        },
+
+        /** Adds a new tab */
+        _add: function( tab_model ) {
+            var self = this;
+            var options = tab_model.attributes;
+            this.$content.append( $( '<div/>' ).attr( 'id', options.id ).addClass( 'tab-pane' ).append( options.$el ) );
+            this.$nav.append( $( this._template_tab( options ) )
+                .show()
+                .tooltip( { title: options.tooltip, placement: 'bottom', container: self.$el } )
+                .on( 'click', function( e ) {
+                    e.preventDefault();
+                    self.show( options.id );
+                }));
+            if ( this.size() == 1 ) {
+                this.show( options.id );
+            }
+        },
+
+        /** Delete tab */
+        _remove: function( tab_model ) {
+            this.$( '#tab-' + tab_model.id ).remove();
+            this.$( '#' + tab_model.id ).remove();
+        },
+
+        /** Reset collection */
+        _reset: function() {
+            this.$nav.empty();
+            this.$content.empty();
+        },
+
+        /** Change tab */
+        _change: function( tab_model ) {
+            this.$( '#tab-' + tab_model.id )[ tab_model.get( 'hidden' ) ? 'hide' : 'show' ]();
+        },
+
+        /** Main template */
+        _template: function() {
+            return  $( '<div/>' ).addClass( 'ui-tabs tabbable tabs-left' )
+                                 .append( $( '<ul/>' ).addClass( 'tab-navigation nav nav-tabs' ) )
+                                 .append( $( '<div/>' ).addClass( 'tab-content' ) );
+        },
+
+        /** Tab template */
+        _template_tab: function( options ) {
+            var $tmpl = $( '<li/>' ).addClass( 'tab-element' )
+                                    .attr( 'id', 'tab-' + options.id )
+                                    .append( $( '<a/>' ).attr( 'id', 'tab-title-link-' + options.id ) );
+            var $href = $tmpl.find( 'a' );
+            options.icon && $href.append( $( '<i/>' ).addClass( 'tab-icon fa' ).addClass( options.icon ) );
+            $href.append( $( '<span/>' ).attr( 'id', 'tab-title-text-' + options.id )
+                                        .addClass( 'tab-title-text' )
+                                        .append( options.title ) );
+            return $tmpl;
+        }
+    });
+
+    return { View : View }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/ui/ui-thumbnails.js b/client/galaxy/scripts/mvc/ui/ui-thumbnails.js
new file mode 100644
index 0000000..1bb25a6
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-thumbnails.js
@@ -0,0 +1,111 @@
+/** This class renders the selection grid. */
+define( [ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-tabs' ], function( Utils, Ui, Tabs ) {
+    var View = Backbone.View.extend({
+        events : {
+            'click .ui-thumbnails-item'    : '_onclick',
+            'dblclick .ui-thumbnails-item' : '_ondblclick'
+        },
+
+        initialize : function( options ) {
+            this.model = options.model || new Backbone.Model( options );
+            this.collection = new Backbone.Collection( this.model.get( 'collection' ) );
+            this.tabs = new Tabs.View({});
+            this.setElement( this.tabs.$el.addClass( 'ui-thumbnails' ) );
+            this.render();
+            this.listenTo( this.model, 'change', this.render, this );
+            this.listenTo( this.collection, 'reset change add remove', this.render, this );
+        },
+
+        render: function() {
+            this.first = null;
+            this.tabs.delAll();
+            this._renderDefault();
+            this._renderList();
+        },
+
+        _renderDefault: function() {
+            var self = this;
+            var index = [];
+            var title_length = 20;
+            var $el = $( '<div/>' ).addClass( 'ui-thumbnails-grid' );
+            this.collection.each( function( model ) {
+                if ( model.get( 'keywords' ).indexOf( 'default' ) !== -1 ) {
+                    var title = model.get( 'title' );
+                    $el.append( $( self._templateThumbnailItem( {
+                        id          : model.id,
+                        title       : title.length < title_length ? title : title.substr( 0, title_length ) + '...',
+                        title_icon  : model.get( 'title_icon' ),
+                        image_src   : model.get( 'image_src' )
+                    })).tooltip( { title: model.get( 'description' ), placement: 'bottom' } ) );
+                }
+            });
+            if ( $el.children().length > 0 ) {
+                this.tabs.add( { id: Utils.uid(), title: self.model.get( 'title_default' ), $el: $el } );
+            }
+        },
+
+        _renderList: function() {
+            var self = this;
+            if ( this.collection.length > 0 ) {
+                this.first = this.first || this.collection.first().id;
+                var $el = $( '<div/>' ).addClass( 'ui-thumbnails-grid' );
+                this.collection.each( function( model ) {
+                    $el.append( self._templateRegularItem( model.attributes ) );
+                });
+                this.tabs.add( { id: Utils.uid(), title: self.model.get( 'title_list' ), $el: $el } );
+            }
+        },
+
+        /** Set/Get value */
+        value: function( new_value ) {
+            if ( new_value !== undefined ) {
+                new_value = new_value == '__first' ? this.first : new_value;
+                var before = this.$( '.ui-thumbnail-current' ).attr( 'value' );
+                this.$( '.ui-thumbnail-current' ).removeClass( 'ui-thumbnail-current' );
+                this.$( '[value="' + new_value + '"]' ).addClass( 'ui-thumbnail-current' );
+                var after = this.$( '.ui-thumbnail-current' ).attr( 'value' );
+                var change_handler = this.model.get( 'onchange' );
+                after != before && change_handler && change_handler( after );
+            }
+            return this.$( '.ui-thumbnail-current' ).attr( 'value' );
+        },
+
+        /** Add click handler */
+        _onclick: function( e ) {
+            this.value( $( e.target ).closest( '.ui-thumbnails-item' ).attr( 'value' ) );
+        },
+
+        /** Add double click handler */
+        _ondblclick: function( e ) {
+            this.model.get( 'ondblclick' ) && this.model.get( 'ondblclick' )( this.value() );
+        },
+
+        /* Thumbnail template with image */
+        _templateThumbnailItem: function( options ) {
+            return  '<div class="ui-thumbnails-item ui-thumbnails-item-float" value="' + options.id + '">' +
+                        '<img class="ui-thumbnails-image" src="' + options.image_src + '">' +
+                        '<div class="ui-thumbnails-title ui-form-info">' +
+                            '<span class="fa ' + options.title_icon + '"/>' + options.title +
+                        '</div>' +
+                    '<div>';
+        },
+
+        /* Thumbnail template with image and description */
+        _templateRegularItem: function( options ) {
+            return  '<div class="ui-thumbnails-item" value="' + options.id + '">' +
+                        '<table>' +
+                            '<tr>' +
+                                '<td>' +
+                                    '<img class="ui-thumbnails-image" src="' + options.image_src + '">' +
+                                '</td>' +
+                                '<td>' +
+                                    '<div class="ui-thumbnails-description-title ui-form-info">' + options.title + '</div>' +
+                                    '<div class="ui-thumbnails-description-text ui-form-info">' + options.description + '</div>' +
+                                '</td>' +
+                            '</tr>' +
+                    '<div>';
+        }
+    });
+
+    return { View: View }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/upload/composite/composite-row.js b/client/galaxy/scripts/mvc/upload/composite/composite-row.js
new file mode 100644
index 0000000..1f8ed91
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/composite/composite-row.js
@@ -0,0 +1,291 @@
+/** Renders the composite upload row view */
+define([ 'utils/utils', 'mvc/upload/upload-settings', 'mvc/upload/upload-ftp', 'mvc/ui/ui-popover', 'mvc/ui/ui-misc', 'mvc/ui/ui-select', 'utils/uploadbox' ],
+function( Utils, UploadSettings, UploadFtp, Popover, Ui, Select ) {
+    return Backbone.View.extend({
+        /** Dictionary of upload states and associated icons */
+        status_classes : {
+            init    : 'upload-mode fa fa-exclamation text-primary',
+            ready   : 'upload-mode fa fa-check text-success',
+            running : 'upload-mode fa fa-spinner fa-spin',
+            success : 'upload-mode fa fa-check',
+            error   : 'upload-mode fa fa-exclamation-triangle'
+        },
+
+        initialize: function( app, options ) {
+            var self = this;
+            this.app = app;
+            this.model = options.model;
+            this.setElement( this._template() );
+            this.$source        = this.$( '.upload-source' );
+            this.$settings      = this.$( '.upload-settings' );
+            this.$status        = this.$( '.upload-status' );
+            this.$text          = this.$( '.upload-text' );
+            this.$text_content  = this.$( '.upload-text-content' );
+            this.$info_text     = this.$( '.upload-info-text' );
+            this.$info_progress = this.$( '.upload-info-progress' );
+            this.$file_name     = this.$( '.upload-file-name' );
+            this.$file_desc     = this.$( '.upload-file-desc' );
+            this.$file_size     = this.$( '.upload-file-size' );
+            this.$progress_bar  = this.$( '.upload-progress-bar' );
+            this.$percentage    = this.$( '.upload-percentage' );
+
+            // build upload functions
+            this.uploadinput = this.$el.uploadinput({
+                ondragover: function() {
+                    self.model.get( 'enabled' ) && self.$el.addClass( 'warning' );
+                },
+                ondragleave: function() {
+                    self.$el.removeClass( 'warning' );
+                },
+                onchange: function( files ) {
+                    if ( self.model.get( 'status' ) != 'running' && files && files.length > 0 ) {
+                        self.model.reset({
+                            'file_data': files[ 0 ],
+                            'file_name': files[ 0 ].name,
+                            'file_size': files[ 0 ].size,
+                            'file_mode': files[ 0 ].mode || 'local'
+                        });
+                        self._refreshReady();
+                    }
+                }
+            });
+
+            // source selection popup
+            this.button_menu = new Ui.ButtonMenu({
+                icon        : 'fa-caret-down',
+                title       : 'Select',
+                pull        : 'left'
+            });
+            this.$source.append( this.button_menu.$el );
+            this.button_menu.addMenu({
+                icon        : 'fa-laptop',
+                title       : 'Choose local file',
+                onclick     : function() { self.uploadinput.dialog() }
+            });
+            if ( this.app.ftp_upload_site ) {
+                this.button_menu.addMenu({
+                    icon        : 'fa-folder-open-o',
+                    title       : 'Choose FTP file',
+                    onclick     : function() { self._showFtp() }
+                });
+            }
+            this.button_menu.addMenu({
+                icon        : 'fa-edit',
+                title       : 'Paste/Fetch data',
+                onclick     : function() {
+                    self.model.reset( { 'file_mode': 'new', 'file_name': 'New File' } );
+                }
+            });
+
+            // add ftp file viewer
+            this.ftp = new Popover.View({
+                title       : 'Choose FTP file:',
+                container   : this.$source.find( '.ui-button-menu' ),
+                placement   : 'right'
+            });
+
+            // append popup to settings icon
+            this.settings = new Popover.View({
+                title       : 'Upload configuration',
+                container   : this.$settings,
+                placement   : 'bottom'
+            });
+
+            // handle text editing event
+            this.$text_content.on( 'change input', function( e ) {
+                self.model.set( { 'url_paste': $( e.target ).val(),
+                                  'file_size': $( e.target ).val().length } );
+                self._refreshReady();
+            });
+
+            // handle settings popover
+            this.$settings.on( 'click',     function( e ) { self._showSettings() } )
+                          .on( 'mousedown', function( e ) { e.preventDefault() } );
+
+            // model events
+            this.listenTo( this.model, 'change:percentage', function() { self._refreshPercentage() } );
+            this.listenTo( this.model, 'change:status',     function() { self._refreshStatus() } );
+            this.listenTo( this.model, 'change:info',       function() { self._refreshInfo() } );
+            this.listenTo( this.model, 'change:file_name',  function() { self._refreshFileName() } );
+            this.listenTo( this.model, 'change:file_mode',  function() { self._refreshMode() } );
+            this.listenTo( this.model, 'change:file_size',  function() { self._refreshFileSize() } );
+            this.listenTo( this.model, 'remove', function() { self.remove() } );
+            this.app.collection.on( 'reset', function() { self.remove() } );
+        },
+
+        render: function() {
+            this.$el.attr( 'id', 'upload-row-' + this.model.id );
+            this.$file_name.html( _.escape( this.model.get( 'file_name' ) || '-' ) );
+            this.$file_desc.html( this.model.get( 'file_desc' ) || 'Unavailable' );
+            this.$file_size.html( Utils.bytesToString ( this.model.get( 'file_size' ) ) );
+            this.$status.removeClass().addClass( this.status_classes.init );
+        },
+
+        /** Remove view */
+        remove: function() {
+            // call the base class remove method
+            Backbone.View.prototype.remove.apply( this );
+        },
+
+        //
+        // handle model events
+        //
+
+        /** Refresh ready or not states */
+        _refreshReady: function() {
+            this.app.collection.each( function( model ) {
+                model.set( 'status', ( model.get( 'file_size' ) > 0 ) && 'ready' || 'init' );
+            });
+        },
+
+        /** Refresh mode and e.g. show/hide textarea field */
+        _refreshMode: function() {
+            var file_mode = this.model.get( 'file_mode' );
+            if ( file_mode == 'new' ) {
+                this.height = this.$el.height();
+                this.$text.css( { 'width' : this.$el.width() - 16 + 'px',
+                                  'top'   : this.$el.height() - 8 + 'px' } ).show();
+                this.$el.height( this.$el.height() - 8 + this.$text.height() + 16 );
+                this.$text_content.val( '' ).trigger( 'keyup' );
+            } else {
+                this.$el.height( this.height );
+                this.$text.hide();
+            }
+        },
+
+        /** Refresh information */
+        _refreshInfo: function() {
+            var info = this.model.get( 'info' );
+            if ( info ) {
+                this.$info_text.html( '<strong>Failed: </strong>' + info ).show();
+            } else {
+                this.$info_text.hide();
+            }
+        },
+
+        /** Refresh percentage */
+        _refreshPercentage : function() {
+            var percentage = parseInt( this.model.get( 'percentage' ) );
+            if ( percentage != 0 ) {
+                this.$progress_bar.css( { width : percentage + '%' } );
+            } else {
+                this.$progress_bar.addClass( 'no-transition' );
+                this.$progress_bar.css( { width : '0%' } );
+                this.$progress_bar[ 0 ].offsetHeight;
+                this.$progress_bar.removeClass( 'no-transition' );
+            }
+            this.$percentage.html( percentage != 100 ? percentage + '%' : 'Adding to history...' );
+        },
+
+        /** Refresh status */
+        _refreshStatus : function() {
+            var status = this.model.get( 'status' );
+            this.$status.removeClass().addClass( this.status_classes[ status ] );
+            this.model.set( 'enabled', status != 'running' );
+            this.$text_content.attr( 'disabled', !this.model.get( 'enabled' ) );
+            this.$el.removeClass( 'success danger warning' );
+            if ( status == 'running' || status == 'ready' ) {
+                this.model.set( 'percentage', 0 );
+            }
+            this.$source.find( '.button' )[ status == 'running' ? 'addClass' : 'removeClass' ]( 'disabled' );
+            if ( status == 'success' ) {
+                this.$el.addClass( 'success' );
+                this.model.set( 'percentage', 100 );
+                this.$percentage.html( '100%' );
+            }
+            if ( status == 'error' ) {
+                this.$el.addClass( 'danger' );
+                this.model.set( 'percentage', 0 );
+                this.$info_progress.hide();
+                this.$info_text.show();
+            } else {
+                this.$info_progress.show();
+                this.$info_text.hide();
+            }
+        },
+
+        /** File name */
+        _refreshFileName: function() {
+            this.$file_name.html( this.model.get( 'file_name' ) || '-' );
+        },
+
+        /** File size */
+        _refreshFileSize: function() {
+            this.$file_size.html( Utils.bytesToString ( this.model.get( 'file_size' ) ) );
+        },
+
+        /** Show/hide ftp popup */
+        _showFtp: function() {
+            if ( !this.ftp.visible ) {
+                var self = this;
+                this.ftp.empty();
+                this.ftp.append( ( new UploadFtp( {
+                    ftp_upload_site : this.app.ftp_upload_site,
+                    onchange        : function( ftp_file ) {
+                        self.ftp.hide();
+                        if ( self.model.get( 'status' ) != 'running' && ftp_file ) {
+                             self.model.reset({
+                                'file_mode': 'ftp',
+                                'file_name': ftp_file.path,
+                                'file_size': ftp_file.size,
+                                'file_path': ftp_file.path
+                            });
+                            self._refreshReady();
+                        }
+                    }
+                } ) ).$el );
+                this.ftp.show();
+            } else {
+                this.ftp.hide();
+            }
+        },
+
+        /** Show/hide settings popup */
+        _showSettings : function() {
+            if ( !this.settings.visible ) {
+                this.settings.empty();
+                this.settings.append( ( new UploadSettings( this ) ).$el );
+                this.settings.show();
+            } else {
+                this.settings.hide();
+            }
+        },
+
+        /** Template */
+        _template: function() {
+            return  '<tr class="upload-row">' +
+                        '<td>' +
+                            '<div class="upload-source"/>' +
+                            '<div class="upload-text-column">' +
+                                '<div class="upload-text">' +
+                                    '<div class="upload-text-info">You can tell Galaxy to download data from web by entering URL in this box (one per line). You can also directly paste the contents of a file.</div>' +
+                                    '<textarea class="upload-text-content form-control"/>' +
+                                '</div>' +
+                            '</div>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-status"/>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-file-desc upload-title"/>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-file-name upload-title"/>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-file-size upload-size"/>' +
+                        '</td>' +
+                        '<td><div class="upload-settings upload-icon-button fa fa-gear"/></td>' +
+                        '<td>' +
+                            '<div class="upload-info">' +
+                                '<div class="upload-info-text"/>' +
+                                '<div class="upload-info-progress progress">' +
+                                    '<div class="upload-progress-bar progress-bar progress-bar-success"/>' +
+                                    '<div class="upload-percentage">0%</div>' +
+                                '</div>' +
+                            '</div>' +
+                        '</td>' +
+                    '</tr>';
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/upload/composite/composite-view.js b/client/galaxy/scripts/mvc/upload/composite/composite-view.js
new file mode 100644
index 0000000..8d7b9f3
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/composite/composite-view.js
@@ -0,0 +1,191 @@
+/** Renders contents of the composite uploader */
+define([ 'utils/utils', 'mvc/upload/upload-model', 'mvc/upload/composite/composite-row', 'mvc/ui/ui-popover', 'mvc/ui/ui-select', 'mvc/ui/ui-misc'],
+function( Utils, UploadModel, UploadRow, Popover, Select, Ui ) {
+    return Backbone.View.extend({
+        collection: new UploadModel.Collection(),
+        initialize: function(app) {
+            var self = this;
+            this.app                = app;
+            this.options            = app.options;
+            this.list_extensions    = app.list_extensions;
+            this.list_genomes       = app.list_genomes;
+            this.ftp_upload_site    = app.currentFtp();
+            this.setElement( this._template() );
+
+            // create button section
+            this.btnStart = new Ui.Button( { title: 'Start', onclick: function() { self._eventStart() } } );
+            this.btnClose = new Ui.Button( { title: 'Close', onclick: function() { self.app.modal.hide() } } );
+
+            // append buttons to dom
+            _.each( [ this.btnStart, this.btnClose ], function( button ) {
+                self.$( '.upload-buttons' ).prepend( button.$el );
+            });
+
+            // select extension
+            this.select_extension = new Select.View({
+                css         : 'upload-footer-selection',
+                container   : this.$( '.upload-footer-extension' ),
+                data        : _.filter( this.list_extensions, function( ext ) { return ext.composite_files } ),
+                onchange    : function( extension ) {
+                    self.collection.reset();
+                    var details = _.findWhere( self.list_extensions, { id : extension } );
+                    if ( details && details.composite_files ) {
+                        _.each( details.composite_files, function( item ) {
+                            self.collection.add( { id          : self.collection.size(),
+                                                   file_desc   : item.description || item.name } );
+                        } );
+                    }
+                }
+            });
+
+            // handle extension info popover
+            this.$( '.upload-footer-extension-info' ).on( 'click', function( e ) {
+                self._showExtensionInfo({
+                    $el         : $( e.target ),
+                    title       : self.select_extension.text(),
+                    extension   : self.select_extension.value(),
+                    placement   : 'top'
+                });
+            }).on( 'mousedown', function( e ) { e.preventDefault() } );
+
+            // genome extension
+            this.select_genome = new Select.View({
+                css         : 'upload-footer-selection',
+                container   : this.$( '.upload-footer-genome' ),
+                data        : this.list_genomes,
+                value       : this.options.default_genome
+            });
+
+            // listener for collection triggers on change in composite datatype and extension selection
+            this.listenTo( this.collection, 'add', function ( model ) { self._eventAnnounce( model ) } );
+            this.listenTo( this.collection, 'change add', function() { self.render() } );
+            this.select_extension.options.onchange( this.select_extension.value() );
+            this.render();
+        },
+
+        render: function () {
+            var model = this.collection.first();
+            if ( model && model.get( 'status' ) == 'running' ) {
+                this.select_genome.disable();
+                this.select_extension.disable();
+            } else {
+                this.select_genome.enable();
+                this.select_extension.enable();
+            }
+            if ( this.collection.where( { status : 'ready' } ).length == this.collection.length && this.collection.length > 0 ) {
+                this.btnStart.enable();
+                this.btnStart.$el.addClass( 'btn-primary' );
+            } else {
+                this.btnStart.disable();
+                this.btnStart.$el.removeClass( 'btn-primary' );
+            }
+            this.$( '.upload-table' )[ this.collection.length > 0 ? 'show' : 'hide' ]();
+        },
+
+        //
+        // upload events / process pipeline
+        //
+
+        /** Builds the basic ui with placeholder rows for each composite data type file */
+        _eventAnnounce: function( model ) {
+            var upload_row = new UploadRow( this, { model : model } );
+            this.$( '.upload-table > tbody:first' ).append( upload_row.$el );
+            this.$( '.upload-table' )[ this.collection.length > 0 ? 'show' : 'hide' ]();
+            upload_row.render();
+        },
+
+        /** Start upload process */
+        _eventStart: function() {
+            var self = this;
+            this.collection.each( function( model ) {
+                model.set( { 'genome'   : self.select_genome.value(),
+                             'extension': self.select_extension.value() } );
+            });
+            $.uploadpost({
+                url      : this.app.options.nginx_upload_path,
+                data     : this.app.toData( this.collection.filter() ),
+                success  : function( message )      { self._eventSuccess( message ) },
+                error    : function( message )      { self._eventError( message ) },
+                progress : function( percentage )   { self._eventProgress( percentage ) }
+            });
+        },
+
+        /** Refresh progress state */
+        _eventProgress: function( percentage ) {
+            this.collection.each( function( it ) { it.set( 'percentage', percentage ) } );
+        },
+
+        /** Refresh success state */
+        _eventSuccess: function( message ) {
+            this.collection.each( function( it ) { it.set('status', 'success') } );
+            Galaxy.currHistoryPanel.refreshContents();
+        },
+
+        /** Refresh error state */
+        _eventError: function( message ) {
+            this.collection.each( function( it ) { it.set( { 'status': 'error', 'info': message } ) } );
+        },
+
+        /** Display extension info popup */
+        _showExtensionInfo: function(options) {
+            var self = this;
+            var $el         = options.$el;
+            var extension   = options.extension;
+            var title       = options.title;
+            var description = _.findWhere(this.list_extensions, { id : extension });
+            this.extension_popup && this.extension_popup.remove();
+            this.extension_popup = new Popover.View({
+                placement: options.placement || 'bottom',
+                container: $el,
+                destroy: true
+            });
+            this.extension_popup.title( title );
+            this.extension_popup.empty();
+            this.extension_popup.append( this._templateDescription( description ) );
+            this.extension_popup.show();
+        },
+
+        /* Template for extensions description */
+        _templateDescription: function( options ) {
+            if ( options.description ) {
+                var tmpl = options.description;
+                if ( options.description_url ) {
+                    tmpl += ' (<a href="' + options.description_url + '" target="_blank">read more</a>)';
+                }
+                return tmpl;
+            } else {
+                return 'There is no description available for this file extension.';
+            }
+        },
+
+        /** Load html template */
+        _template: function() {
+            return  '<div class="upload-view-composite">' +
+                        '<div class="upload-footer">' +
+                            '<span class="upload-footer-title">Composite Type:</span>' +
+                            '<span class="upload-footer-extension"/>' +
+                            '<span class="upload-footer-extension-info upload-icon-button fa fa-search"/> ' +
+                            '<span class="upload-footer-title">Genome/Build:</span>' +
+                            '<span class="upload-footer-genome"/>' +
+                        '</div>' +
+                        '<div class="upload-box">' +
+                            '<table class="upload-table ui-table-striped" style="display: none;">' +
+                                '<thead>' +
+                                    '<tr>' +
+                                        '<th/>' +
+                                        '<th/>' +
+                                        '<th>Description</th>' +
+                                        '<th>Name</th>' +
+                                        '<th>Size</th>' +
+                                        '<th>Settings</th>' +
+                                        '<th>Status</th>' +
+                                    '</tr>' +
+                                '</thead>' +
+                                '<tbody/>' +
+                            '</table>' +
+                        '</div>' +
+                        '<div class="upload-buttons"/>' +
+                    '</div>';
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/upload/default/default-row.js b/client/galaxy/scripts/mvc/upload/default/default-row.js
new file mode 100644
index 0000000..0525335
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/default/default-row.js
@@ -0,0 +1,232 @@
+/** Renders the default uploader rows */
+define( [ 'utils/utils', 'mvc/upload/upload-model', 'mvc/upload/upload-settings', 'mvc/ui/ui-popover', 'mvc/ui/ui-select' ],
+function( Utils, UploadModel, UploadSettings, Popover, Select ) {
+    return Backbone.View.extend({
+        /** Dictionary of upload states and associated icons */
+        status_classes : {
+            init    : 'upload-icon-button fa fa-trash-o',
+            queued  : 'upload-icon fa fa-spinner fa-spin',
+            running : 'upload-icon fa fa-spinner fa-spin',
+            success : 'upload-icon-button fa fa-check',
+            error   : 'upload-icon-button fa fa-exclamation-triangle'
+        },
+
+        initialize: function( app, options ) {
+            var self = this;
+            this.app = app;
+            this.model = options.model;
+            this.setElement( this._template( options.model ) );
+            this.$mode          = this.$( '.upload-mode' );
+            this.$title         = this.$( '.upload-title' );
+            this.$text          = this.$( '.upload-text' );
+            this.$size          = this.$( '.upload-size' );
+            this.$info_text     = this.$( '.upload-info-text' );
+            this.$info_progress = this.$( '.upload-info-progress' );
+            this.$text_content  = this.$( '.upload-text-content' );
+            this.$settings      = this.$( '.upload-settings' );
+            this.$symbol        = this.$( '.upload-symbol' );
+            this.$progress_bar  = this.$( '.upload-progress-bar' );
+            this.$percentage    = this.$( '.upload-percentage' );
+
+            // append popup to settings icon
+            this.settings = new Popover.View({
+                title       : 'Upload configuration',
+                container   : this.$( '.upload-settings' ),
+                placement   : 'bottom'
+            });
+
+            // identify default genome and extension values
+            var default_genome      = this.app.select_genome.value();
+            var default_extension   = this.app.select_extension.value();
+
+            // create select genomes
+            this.select_genome = new Select.View({
+                css         : 'upload-genome',
+                data        : self.app.list_genomes,
+                container   : this.$( '.upload-genome' ),
+                value       : default_genome,
+                onchange    : function( genome ) { self.model.set( 'genome', genome ) }
+            });
+
+            // create select extension
+            this.select_extension = new Select.View({
+                css         : 'upload-extension',
+                data        : self.app.list_extensions,
+                container   : this.$( '.upload-extension' ),
+                value       : default_extension,
+                onchange    : function( extension ) { self.model.set('extension', extension) }
+            });
+
+            // initialize genome and extension values
+            this.model.set( { 'genome': default_genome, 'extension': default_extension } );
+
+            // handle click event
+            this.$symbol.on('click', function() { self._removeRow(); });
+
+            // handle extension info popover
+            this.$( '.upload-extension-info' ).on( 'click' , function( e ) {
+                self.app.showExtensionInfo({
+                    $el         : $( e.target ),
+                    title       : self.select_extension.text(),
+                    extension   : self.select_extension.value()
+                });
+            }).on( 'mousedown', function( e ) { e.preventDefault() } );
+
+            // handle settings popover
+            this.$settings.on( 'click' ,    function( e ) { self._showSettings() } )
+                          .on( 'mousedown', function( e ) { e.preventDefault() });
+
+            // handle text editing event
+            this.$text_content.on( 'change input', function( e ) {
+                self.model.set( { 'url_paste': $( e.target ).val(),
+                                  'file_size': $( e.target ).val().length } );
+            });
+
+            // model events
+            this.listenTo( this.model, 'change:percentage',  function() { self._refreshPercentage() } );
+            this.listenTo( this.model, 'change:status',      function() { self._refreshStatus() } );
+            this.listenTo( this.model, 'change:info',        function() { self._refreshInfo() } );
+            this.listenTo( this.model, 'change:genome',      function() { self._refreshGenome() } );
+            this.listenTo( this.model, 'change:extension',   function() { self._refreshExtension() } );
+            this.listenTo( this.model, 'change:file_size',   function() { self._refreshFileSize() } );
+            this.listenTo( this.model, 'remove',             function() { self.remove() } );
+            this.app.collection.on('reset', function() { self.remove() } );
+        },
+
+        render: function() {
+            var options = this.model.attributes;
+            this.$title.html( _.escape( options.file_name ) );
+            this.$size.html( Utils.bytesToString ( options.file_size ) );
+            this.$mode.removeClass().addClass( 'upload-mode' ).addClass( 'text-primary' );
+            if ( options.file_mode == 'new' ) {
+                this.$text.css( { 'width' : this.$el.width() - 16 + 'px', 'top'  : this.$el.height() - 8 + 'px' } ).show();
+                this.$el.height( this.$el.height() - 8 + this.$text.height() + 16 );
+                this.$mode.addClass( 'fa fa-edit' );
+            } else if ( options.file_mode == 'local' ) {
+                this.$mode.addClass( 'fa fa-laptop' );
+            } else if ( options.file_mode == 'ftp' ) {
+                this.$mode.addClass( 'fa fa-folder-open-o' );
+            }
+        },
+
+        /** Remove view */
+        remove: function() {
+            this.select_genome.remove();
+            this.select_extension.remove();
+            Backbone.View.prototype.remove.apply( this );
+        },
+
+        /** Update extension */
+        _refreshExtension: function() {
+            this.select_extension.value( this.model.get( 'extension' ) );
+        },
+
+        /** Update genome */
+        _refreshGenome: function() {
+            this.select_genome.value( this.model.get( 'genome' ) );
+        },
+
+        /** Refresh info text */
+        _refreshInfo: function() {
+            var info = this.model.get( 'info' );
+            if ( info ) {
+                this.$info_text.html( '<strong>Failed: </strong>' + info ).show();
+            } else {
+                this.$info_text.hide();
+            }
+        },
+
+        /** Refresh percentage status */
+        _refreshPercentage : function() {
+            var percentage = parseInt( this.model.get( 'percentage' ) );
+            this.$progress_bar.css( { width : percentage + '%' } );
+            this.$percentage.html( percentage != 100 ? percentage + '%' : 'Adding to history...' );
+        },
+
+        /** Refresh status */
+        _refreshStatus : function() {
+            var status = this.model.get( 'status' );
+            this.$symbol.removeClass().addClass( 'upload-symbol' ).addClass( this.status_classes[ status ] );
+            this.model.set( 'enabled', status == 'init' );
+            var enabled = this.model.get( 'enabled' );
+            this.$text_content.attr( 'disabled', !enabled );
+            if ( enabled ) {
+                this.select_genome.enable();
+                this.select_extension.enable();
+            } else {
+                this.select_genome.disable();
+                this.select_extension.disable();
+            }
+            if ( status == 'success' ) {
+                this.$el.addClass( 'success' );
+                this.$percentage.html( '100%' );
+            }
+            if ( status == 'error' ) {
+                this.$el.addClass( 'danger' );
+                this.$info_progress.hide();
+            }
+        },
+
+        /** Refresh file size */
+        _refreshFileSize: function() {
+            this.$size.html( Utils.bytesToString ( this.model.get( 'file_size' ) ) );
+        },
+
+        /** Remove row */
+        _removeRow: function() {
+            if ( [ 'init', 'success', 'error' ].indexOf( this.model.get( 'status' ) ) !== -1 ) {
+                this.app.collection.remove( this.model );
+            }
+        },
+
+        /** Attach file info popup */
+        _showSettings : function() {
+            if ( !this.settings.visible ) {
+                this.settings.empty();
+                this.settings.append( ( new UploadSettings( this ) ).$el );
+                this.settings.show();
+            } else {
+                this.settings.hide();
+            }
+        },
+
+        /** View template */
+        _template: function( options ) {
+            return  '<tr id="upload-row-' + options.id + '" class="upload-row">' +
+                        '<td>' +
+                            '<div class="upload-text-column">' +
+                                '<div class="upload-mode"/>' +
+                                '<div class="upload-title"/>' +
+                                '<div class="upload-text">' +
+                                    '<div class="upload-text-info">You can tell Galaxy to download data from web by entering URL in this box (one per line). You can also directly paste the contents of a file.</div>' +
+                                    '<textarea class="upload-text-content form-control"/>' +
+                                '</div>' +
+                            '</div>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-size"/>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-extension" style="float: left;"/> &nbsp' +
+                            '<div class="upload-extension-info upload-icon-button fa fa-search"/>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-genome"/>' +
+                        '</td>' +
+                        '<td><div class="upload-settings upload-icon-button fa fa-gear"/></td>' +
+                        '<td>' +
+                            '<div class="upload-info">' +
+                                '<div class="upload-info-text"/>' +
+                                '<div class="upload-info-progress progress">' +
+                                    '<div class="upload-progress-bar progress-bar progress-bar-success"/>' +
+                                    '<div class="upload-percentage">0%</div>' +
+                                '</div>' +
+                            '</div>' +
+                        '</td>' +
+                        '<td>' +
+                            '<div class="upload-symbol ' + this.status_classes.init + '"/>' +
+                        '</td>' +
+                    '</tr>';
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/upload/default/default-view.js b/client/galaxy/scripts/mvc/upload/default/default-view.js
new file mode 100644
index 0000000..c606269
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/default/default-view.js
@@ -0,0 +1,358 @@
+/** Renders contents of the default uploader */
+define([ 'utils/utils', 'mvc/upload/upload-model', 'mvc/upload/default/default-row', 'mvc/upload/upload-ftp', 'mvc/ui/ui-popover', 'mvc/ui/ui-select', 'mvc/ui/ui-misc', 'utils/uploadbox'],
+function( Utils, UploadModel, UploadRow, UploadFtp, Popover, Select, Ui ) {
+    return Backbone.View.extend({
+        // current upload size in bytes
+        upload_size: 0,
+
+        // contains upload row models
+        collection : new UploadModel.Collection(),
+
+        // keeps track of the current uploader state
+        counter : {
+            announce    : 0,
+            success     : 0,
+            error       : 0,
+            running     : 0,
+            reset : function() { this.announce = this.success = this.error = this.running = 0 }
+        },
+
+        initialize : function( app ) {
+            var self = this;
+            this.app                = app;
+            this.options            = app.options;
+            this.list_extensions    = app.list_extensions;
+            this.list_genomes       = app.list_genomes;
+            this.ui_button          = app.ui_button;
+            this.ftp_upload_site    = app.currentFtp();
+            this.setElement( this._template() );
+
+            // append buttons to dom
+            this.btnLocal    = new Ui.Button( { id: 'btn-local', title: 'Choose local file',   onclick: function() { self.uploadbox.select() }, icon: 'fa fa-laptop' } );
+            this.btnFtp      = new Ui.Button( { id: 'btn-ftp',   title: 'Choose FTP file',     onclick: function() { self._eventFtp() }, icon: 'fa fa-folder-open-o' } );
+            this.btnCreate   = new Ui.Button( { id: 'btn-new',   title: 'Paste/Fetch data',    onclick: function() { self._eventCreate() }, icon: 'fa fa-edit' } );
+            this.btnStart    = new Ui.Button( { id: 'btn-start', title: 'Start',               onclick: function() { self._eventStart() } } );
+            this.btnStop     = new Ui.Button( { id: 'btn-stop',  title: 'Pause',               onclick: function() { self._eventStop() } } );
+            this.btnReset    = new Ui.Button( { id: 'btn-reset', title: 'Reset',               onclick: function() { self._eventReset() } } );
+            this.btnClose    = new Ui.Button( { id: 'btn-close', title: 'Close',               onclick: function() { self.app.modal.hide() } } );
+            _.each( [ this.btnLocal, this.btnFtp, this.btnCreate, this.btnStop, this.btnReset, this.btnStart, this.btnClose ], function( button ) {
+                self.$( '.upload-buttons' ).prepend( button.$el );
+            });
+
+            // file upload
+            this.uploadbox = this.$( '.upload-box' ).uploadbox({
+                url             : this.app.options.nginx_upload_path,
+                announce        : function( index, file )       { self._eventAnnounce( index, file ) },
+                initialize      : function( index )             { return self.app.toData( [ self.collection.get( index ) ], self.history_id ) },
+                progress        : function( index, percentage ) { self._eventProgress( index, percentage ) },
+                success         : function( index, message )    { self._eventSuccess( index, message ) },
+                error           : function( index, message )    { self._eventError( index, message ) },
+                complete        : function()                    { self._eventComplete() },
+                ondragover      : function()                    { self.$( '.upload-box' ).addClass( 'highlight' ) },
+                ondragleave     : function()                    { self.$( '.upload-box' ).removeClass( 'highlight' ) }
+            });
+
+            // add ftp file viewer
+            this.ftp = new Popover.View( { title: 'FTP files', container: this.btnFtp.$el } );
+
+            // select extension
+            this.select_extension = new Select.View({
+                css         : 'upload-footer-selection',
+                container   : this.$( '.upload-footer-extension' ),
+                data        : _.filter( this.list_extensions, function( ext ) { return !ext.composite_files } ),
+                value       : this.options.default_extension,
+                onchange    : function( extension ) { self.updateExtension( extension ) }
+            });
+
+            // handle extension info popover
+            this.$( '.upload-footer-extension-info' ).on( 'click', function( e ) {
+                self.showExtensionInfo({
+                    $el         : $( e.target ),
+                    title       : self.select_extension.text(),
+                    extension   : self.select_extension.value(),
+                    placement   : 'top'
+                });
+            }).on( 'mousedown', function( e ) { e.preventDefault() } );
+
+            // genome extension
+            this.select_genome = new Select.View({
+                css         : 'upload-footer-selection',
+                container   : this.$( '.upload-footer-genome' ),
+                data        : this.list_genomes,
+                value       : this.options.default_genome,
+                onchange    : function( genome ) { self.updateGenome(genome) }
+            });
+
+            // events
+            this.collection.on( 'remove', function( model ) { self._eventRemove( model ) } );
+            this._updateScreen();
+        },
+
+        /** A new file has been dropped/selected through the uploadbox plugin */
+        _eventAnnounce: function( index, file ) {
+            this.counter.announce++;
+            var new_model = new UploadModel.Model({
+                id          : index,
+                file_name   : file.name,
+                file_size   : file.size,
+                file_mode   : file.mode || 'local',
+                file_path   : file.path,
+                file_data   : file
+            });
+            this.collection.add( new_model );
+            var upload_row = new UploadRow( this, { model: new_model } );
+            this.$( '.upload-table > tbody:first' ).append( upload_row.$el );
+            this._updateScreen();
+            upload_row.render();
+        },
+
+        /** Progress */
+        _eventProgress: function( index, percentage ) {
+            var it = this.collection.get( index );
+            it.set( 'percentage', percentage );
+            this.ui_button.model.set( 'percentage', this._uploadPercentage( percentage, it.get( 'file_size' ) ) );
+        },
+
+        /** Success */
+        _eventSuccess: function( index, message ) {
+            var it = this.collection.get( index );
+            it.set( { 'percentage': 100, 'status': 'success' } );
+            this.ui_button.model.set( 'percentage', this._uploadPercentage( 100, it.get( 'file_size' ) ) );
+            this.upload_completed += it.get( 'file_size' ) * 100;
+            this.counter.announce--;
+            this.counter.success++;
+            this._updateScreen();
+            Galaxy.currHistoryPanel.refreshContents();
+        },
+
+        /** Error */
+        _eventError: function( index, message ) {
+            var it = this.collection.get( index );
+            it.set( { 'percentage': 100, 'status': 'error', 'info': message } );
+            this.ui_button.model.set( { 'percentage': this._uploadPercentage( 100, it.get( 'file_size' ) ), 'status': 'danger' } );
+            this.upload_completed += it.get( 'file_size' ) * 100;
+            this.counter.announce--;
+            this.counter.error++;
+            this._updateScreen();
+        },
+
+        /** Queue is done */
+        _eventComplete: function() {
+            this.collection.each( function( model ) { model.get( 'status' ) == 'queued' && model.set( 'status', 'init' ) } );
+            this.counter.running = 0;
+            this._updateScreen();
+        },
+
+        /** Remove model from upload list */
+        _eventRemove: function( model ) {
+            var status = model.get( 'status' );
+            if ( status == 'success' ) {
+                this.counter.success--;
+            } else if ( status == 'error' ) {
+                this.counter.error--;
+            } else {
+                this.counter.announce--;
+            }
+            this.uploadbox.remove( model.id );
+            this._updateScreen();
+        },
+
+        //
+        // events triggered by this view
+        //
+
+        /** [public] display extension info popup */
+        showExtensionInfo: function( options ) {
+            var self = this;
+            var $el = options.$el;
+            var extension = options.extension;
+            var title = options.title;
+            var description = _.findWhere( self.list_extensions, { 'id': extension } );
+            this.extension_popup && this.extension_popup.remove();
+            this.extension_popup = new Popover.View({ placement: options.placement || 'bottom', container: $el } );
+            this.extension_popup.title( title );
+            this.extension_popup.empty();
+            this.extension_popup.append( this._templateDescription( description ) );
+            this.extension_popup.show();
+        },
+
+        /** Show/hide ftp popup */
+        _eventFtp: function() {
+            if ( !this.ftp.visible ) {
+                this.ftp.empty();
+                var self = this;
+                this.ftp.append( ( new UploadFtp({
+                    collection      : this.collection,
+                    ftp_upload_site : this.ftp_upload_site,
+                    onadd           : function( ftp_file ) {
+                        self.uploadbox.add([{
+                            mode: 'ftp',
+                            name: ftp_file.path,
+                            size: ftp_file.size,
+                            path: ftp_file.path
+                        }]);
+                    },
+                    onremove: function( model_index ) {
+                        self.collection.remove( model_index );
+                    }
+                } ) ).$el );
+                this.ftp.show();
+            } else {
+                this.ftp.hide();
+            }
+        },
+
+        /** Create a new file */
+        _eventCreate: function (){
+            this.uploadbox.add( [ { name: 'New File', size: 0, mode: 'new' } ] );
+        },
+
+        /** Start upload process */
+        _eventStart: function() {
+            if ( this.counter.announce == 0 || this.counter.running > 0 ) {
+                return;
+            }
+            var self = this;
+            this.upload_size = 0;
+            this.upload_completed = 0;
+            this.collection.each( function( model ) {
+                if( model.get( 'status' ) == 'init' ) {
+                    model.set( 'status', 'queued' );
+                    self.upload_size += model.get( 'file_size' );
+                }
+            });
+            this.ui_button.model.set( { 'percentage': 0, 'status': 'success' } );
+            this.counter.running = this.counter.announce;
+            this.history_id = this.app.currentHistory();
+            this.uploadbox.start();
+            this._updateScreen();
+        },
+
+        /** Pause upload process */
+        _eventStop: function() {
+            if ( this.counter.running > 0 ) {
+                this.ui_button.model.set( 'status', 'info' );
+                $( '.upload-top-info' ).html( 'Queue will pause after completing the current file...' );
+                this.uploadbox.stop();
+            }
+        },
+
+        /** Remove all */
+        _eventReset: function() {
+            if ( this.counter.running == 0 ){
+                this.collection.reset();
+                this.counter.reset();
+                this.uploadbox.reset();
+                this.select_extension.value( this.options.default_extension );
+                this.select_genome.value( this.options.default_genome );
+                this.ui_button.model.set( 'percentage', 0 );
+                this._updateScreen();
+            }
+        },
+
+        /** Update extension for all models */
+        updateExtension: function( extension, defaults_only ) {
+            var self = this;
+            this.collection.each( function( model ) {
+                if ( model.get( 'status' ) == 'init' && ( model.get( 'extension' ) == self.options.default_extension || !defaults_only ) ) {
+                    model.set( 'extension', extension );
+                }
+            });
+        },
+
+        /** Update genome for all models */
+        updateGenome: function( genome, defaults_only ) {
+            var self = this;
+            this.collection.each( function( model ) {
+                if ( model.get( 'status' ) == 'init' && ( model.get( 'genome' ) == self.options.default_genome || !defaults_only ) ) {
+                    model.set( 'genome', genome );
+                }
+            });
+        },
+
+        /** Set screen */
+        _updateScreen: function () {
+            var message = '';
+            if( this.counter.announce == 0 ) {
+                if (this.uploadbox.compatible()) {
+                    message = ' ';
+                } else {
+                    message = 'Browser does not support Drag & Drop. Try Firefox 4+, Chrome 7+, IE 10+, Opera 12+ or Safari 6+.';
+                }
+            } else {
+                if ( this.counter.running == 0 ) {
+                    message = 'You added ' + this.counter.announce + ' file(s) to the queue. Add more files or click \'Start\' to proceed.';
+                } else {
+                    message = 'Please wait...' + this.counter.announce + ' out of ' + this.counter.running + ' remaining.';
+                }
+            }
+            this.$( '.upload-top-info' ).html( message );
+            var enable_reset = this.counter.running == 0 && this.counter.announce + this.counter.success + this.counter.error > 0;
+            var enable_start = this.counter.running == 0 && this.counter.announce > 0;
+            var enable_sources = this.counter.running == 0;
+            var show_table = this.counter.announce + this.counter.success + this.counter.error > 0;
+            this.btnReset[ enable_reset ? 'enable' : 'disable' ]();
+            this.btnStart[ enable_start ? 'enable' : 'disable' ]();
+            this.btnStart.$el[ enable_start ? 'addClass' : 'removeClass' ]( 'btn-primary' );
+            this.btnStop[ this.counter.running > 0 ? 'enable' : 'disable' ]();
+            this.btnLocal[ enable_sources ? 'enable' : 'disable' ]();
+            this.btnFtp[ enable_sources ? 'enable' : 'disable' ]();
+            this.btnCreate[ enable_sources ? 'enable' : 'disable' ]();
+            this.btnFtp.$el[ this.ftp_upload_site ? 'show' : 'hide' ]();
+            this.$( '.upload-table' )[ show_table ? 'show' : 'hide' ]();
+            this.$( '.upload-helper' )[ show_table ? 'hide' : 'show' ]();
+        },
+
+        /** Calculate percentage of all queued uploads */
+        _uploadPercentage: function( percentage, size ) {
+            return ( this.upload_completed + ( percentage * size ) ) / this.upload_size;
+        },
+
+        /** Template for extensions description */
+        _templateDescription: function( options ) {
+            if ( options.description ) {
+                var tmpl = options.description;
+                if ( options.description_url ) {
+                    tmpl += ' (<a href="' + options.description_url + '" target="_blank">read more</a>)';
+                }
+                return tmpl;
+            } else {
+                return 'There is no description available for this file extension.';
+            }
+        },
+
+        /** Template */
+        _template: function() {
+            return  '<div class="upload-view-default">' +
+                        '<div class="upload-top">' +
+                            '<h6 class="upload-top-info"/>' +
+                        '</div>' +
+                        '<div class="upload-box">' +
+                            '<div class="upload-helper"><i class="fa fa-files-o"/>Drop files here</div>' +
+                            '<table class="upload-table ui-table-striped" style="display: none;">' +
+                                '<thead>' +
+                                    '<tr>' +
+                                        '<th>Name</th>' +
+                                        '<th>Size</th>' +
+                                        '<th>Type</th>' +
+                                        '<th>Genome</th>' +
+                                        '<th>Settings</th>' +
+                                        '<th>Status</th>' +
+                                        '<th/>' +
+                                    '</tr>' +
+                                '</thead>' +
+                                '<tbody/>' +
+                            '</table>' +
+                        '</div>' +
+                        '<div class="upload-footer">' +
+                            '<span class="upload-footer-title">Type (set all):</span>' +
+                            '<span class="upload-footer-extension"/>' +
+                            '<span class="upload-footer-extension-info upload-icon-button fa fa-search"/> ' +
+                            '<span class="upload-footer-title">Genome (set all):</span>' +
+                            '<span class="upload-footer-genome"/>' +
+                        '</div>' +
+                        '<div class="upload-buttons"/>' +
+                    '</div>';
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/upload/upload-button.js b/client/galaxy/scripts/mvc/upload/upload-button.js
new file mode 100644
index 0000000..8e96175
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/upload-button.js
@@ -0,0 +1,49 @@
+/** View for upload/progress bar button */
+define( [], function() {
+    var View = Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            this.model = options && options.model || new Backbone.Model({
+                icon        : 'fa-upload',
+                tooltip     : 'Download from URL or upload files from disk',
+                label       : 'Load Data',
+                percentage  : 0,
+                status      : '',
+                onunload    : function(){},
+                onclick     : function(){}
+            }).set( options );
+            this.setElement( this._template() );
+            this.$progress = this.$( '.progress-bar' );
+            this.listenTo( this.model, 'change', this.render, this );
+            this.render();
+            $( window ).on( 'beforeunload', function() {
+                return self.model.get( 'onunload' )();
+            });
+        },
+
+        render: function() {
+            var self = this;
+            var options = this.model.attributes;
+            this.$el.off( 'click' ).on( 'click', function( e ) { options.onclick( e ) } )
+                    .tooltip( { title: this.model.get('tooltip'), placement: 'bottom' } );
+            this.$progress.removeClass()
+                          .addClass( 'progress-bar' )
+                          .addClass( 'progress-bar-notransition' )
+                          .addClass( options.status != '' && 'progress-bar-' + options.status )
+                          .css( { width : options.percentage + '%' } );
+        },
+
+        /** Template */
+        _template: function() {
+            return  '<div class="upload-button">' +
+                        '<div class="progress">' +
+                            '<div class="progress-bar"/>' +
+                            '<a class="panel-header-button" href="javascript:void(0)" id="tool-panel-upload-button">' +
+                                '<span class="fa fa-upload"/>' +
+                            '</a>' +
+                        '</div>' +
+                    '</div>';
+        }
+    });
+    return { View : View };
+});
diff --git a/client/galaxy/scripts/mvc/upload/upload-ftp.js b/client/galaxy/scripts/mvc/upload/upload-ftp.js
new file mode 100644
index 0000000..d7eaefc
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/upload-ftp.js
@@ -0,0 +1,153 @@
+/** This renders the content of the ftp popup **/
+define( [ 'utils/utils' ], function( Utils ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            this.options = Utils.merge( options, {
+                class_add       : 'upload-icon-button fa fa-square-o',
+                class_remove    : 'upload-icon-button fa fa-check-square-o',
+                class_partial   : 'upload-icon-button fa fa-minus-square-o',
+                collection      : null,
+                onchange        : function() {},
+                onadd           : function() {},
+                onremove        : function() {}
+            } );
+            this.collection = this.options.collection;
+            this.setElement( this._template() );
+            this.rows = [];
+            Utils.get({
+                url     : Galaxy.root + 'api/remote_files',
+                success : function( ftp_files ) { self._fill( ftp_files ) },
+                error   : function() { self._fill(); }
+            });
+        },
+
+        /** Fill table with ftp entries */
+        _fill: function( ftp_files ) {
+            if ( ftp_files && ftp_files.length > 0 ) {
+                this.$( '.upload-ftp-content' ).html( $( this._templateTable() ) );
+                var size = 0;
+                for ( index in ftp_files ) {
+                    this.rows.push( this._add( ftp_files[ index ] ) );
+                    size += ftp_files[ index ].size;
+                }
+                this.$( '.upload-ftp-number' ).html( ftp_files.length + ' files' );
+                this.$( '.upload-ftp-disk' ).html( Utils.bytesToString ( size, true ) );
+                if ( this.collection ) {
+                    var self = this;
+                    this.$( '._has_collection' ).show();
+                    this.$select_all = this.$( '.upload-selectall' ).addClass( this.options.class_add );
+                    this.$select_all.on( 'click', function() {
+                        var add = self.$select_all.hasClass( self.options.class_add );
+                        for ( index in ftp_files ) {
+                            var ftp_file = ftp_files[ index ];
+                            var model_index = self._find( ftp_file );
+                            if( !model_index && add || model_index && !add ) {
+                                self.rows[ index ].trigger( 'click' );
+                            }
+                        }
+                    });
+                    this._refresh();
+                }
+            } else {
+                this.$( '.upload-ftp-content' ).html( $( this._templateInfo() ) );
+            }
+            this.$( '.upload-ftp-wait' ).hide();
+        },
+
+        /** Add file to table */
+        _add: function( ftp_file ) {
+            var self = this;
+            var $it = $( this._templateRow( ftp_file ) );
+            var $icon = $it.find( '.icon' );
+            this.$( 'tbody' ).append( $it );
+            if ( this.collection ) {
+                $icon.addClass( this._find( ftp_file ) ? this.options.class_remove : this.options.class_add );
+                $it.on('click', function() {
+                    var model_index = self._find( ftp_file );
+                    $icon.removeClass();
+                    if ( !model_index ) {
+                        self.options.onadd( ftp_file );
+                        $icon.addClass( self.options.class_remove );
+                    } else {
+                        self.options.onremove( model_index );
+                        $icon.addClass( self.options.class_add );
+                    }
+                    self._refresh();
+                });
+            } else {
+                $it.on('click', function() { self.options.onchange( ftp_file ) } );
+            }
+            return $it;
+        },
+
+        /** Refresh select all button state */
+        _refresh: function() {
+            var filtered = this.collection.where( { file_mode: 'ftp', enabled: true } );
+            this.$select_all.removeClass();
+            if ( filtered.length == 0 ) {
+                this.$select_all.addClass( this.options.class_add );
+            } else {
+                this.$select_all.addClass( filtered.length == this.rows.length ? this.options.class_remove : this.options.class_partial );
+            }
+        },
+
+        /** Get model index */
+        _find: function( ftp_file ) {
+            var item = this.collection.findWhere({
+                file_path   : ftp_file.path,
+                file_mode   : 'ftp',
+                enabled     : true
+            });
+            return item && item.get('id');
+        },
+
+        /** Template of row */
+        _templateRow: function( options ) {
+            return  '<tr class="upload-ftp-row">' +
+                        '<td class="_has_collection" style="display: none;"><div class="icon"/></td>' +
+                        '<td class="ftp-name">' + options.path + '</td>' +
+                        '<td class="ftp-size">' + Utils.bytesToString( options.size ) + '</td>' +
+                        '<td class="ftp-time">' + options.ctime + '</td>' +
+                    '</tr>';
+        },
+
+        /** Template of table */
+        _templateTable: function() {
+            return  '<span style="whitespace: nowrap; float: left;">Available files: </span>' +
+                    '<span style="whitespace: nowrap; float: right;">' +
+                        '<span class="upload-icon fa fa-file-text-o"/>' +
+                        '<span class="upload-ftp-number"/>  ' +
+                        '<span class="upload-icon fa fa-hdd-o"/>' +
+                        '<span class="upload-ftp-disk"/>' +
+                    '</span>' +
+                    '<table class="grid" style="float: left;">' +
+                        '<thead>' +
+                            '<tr>' +
+                                '<th class="_has_collection" style="display: none;"><div class="upload-selectall"></th>' +
+                                '<th>Name</th>' +
+                                '<th>Size</th>' +
+                                '<th>Created</th>' +
+                            '</tr>' +
+                        '</thead>' +
+                        '<tbody/>' +
+                    '</table>';
+        },
+
+        /** Template of info message */
+        _templateInfo: function() {
+            return  '<div class="upload-ftp-warning warningmessage">' +
+                        'Your FTP directory does not contain any files.' +
+                    '</div>';
+        },
+
+        /** Template of main view */
+        _template: function() {
+            return  '<div class="upload-ftp">' +
+                        '<div class="upload-ftp-wait fa fa-spinner fa-spin"/>' +
+                        '<div class="upload-ftp-help">This Galaxy server allows you to upload files via FTP. To upload some files, log in to the FTP server at <strong>' + this.options.ftp_upload_site + '</strong> using your Galaxy credentials (email address and password).</div>' +
+                        '<div class="upload-ftp-content"/>' +
+                    '<div>';
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/upload/upload-model.js b/client/galaxy/scripts/mvc/upload/upload-model.js
new file mode 100644
index 0000000..7b754cd
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/upload-model.js
@@ -0,0 +1,26 @@
+define( [], function() {
+    var Model = Backbone.Model.extend({
+        defaults: {
+            extension       : 'auto',
+            genome          : '?',
+            url_paste       : '',
+            status          : 'init',
+            info            : null,
+            file_name       : '',
+            file_mode       : '',
+            file_size       : 0,
+            file_type       : null,
+            file_path       : '',
+            file_data       : null,
+            percentage      : 0,
+            space_to_tab    : false,
+            to_posix_lines  : true,
+            enabled         : true
+        },
+        reset: function( attr ) {
+            this.clear().set( this.defaults ).set( attr );
+        }
+    });
+    var Collection = Backbone.Collection.extend( { model: Model } );
+    return { Model: Model, Collection : Collection };
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/upload/upload-settings.js b/client/galaxy/scripts/mvc/upload/upload-settings.js
new file mode 100644
index 0000000..9d44588
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/upload-settings.js
@@ -0,0 +1,43 @@
+/** This renders the content of the settings popup, allowing users to specify flags i.e. for space-to-tab conversion **/
+define( [ 'utils/utils' ], function( Utils ) {
+    return Backbone.View.extend({
+        options: {
+            class_check     : 'fa-check-square-o',
+            class_uncheck   : 'fa-square-o',
+            parameters      : [{
+                id          : 'space_to_tab',
+                title       : 'Convert spaces to tabs',
+            },{
+                id          : 'to_posix_lines',
+                title       : 'Use POSIX standard'
+            }]
+        },
+
+        initialize: function( options ) {
+            var self = this;
+            this.model = options.model;
+            this.setElement( $( '<div/>' ).addClass( 'upload-settings' ) );
+            this.$el.append( $( '<div/>' ).addClass( 'upload-settings-cover' ) );
+            this.$el.append( $( '<table/>' ).addClass( 'upload-settings-table ui-table-striped' ).append( '<tbody/>' ) );
+            this.$cover = this.$( '.upload-settings-cover' );
+            this.$table = this.$( '.upload-settings-table > tbody' );
+            this.listenTo ( this.model, 'change', this.render, this );
+            this.model.trigger( 'change' );
+        },
+
+        render: function() {
+            var self = this;
+            this.$table.empty();
+            _.each( this.options.parameters, function( parameter ) {
+                var $checkbox = $( '<div/>' ).addClass( 'upload-' + parameter.id + ' upload-icon-button fa' )
+                                             .addClass( self.model.get( parameter.id ) && self.options.class_check || self.options.class_uncheck )
+                                             .on( 'click', function() {
+                                                self.model.get( 'enabled' ) && self.model.set( parameter.id, !self.model.get( parameter.id ) )
+                                             });
+                self.$table.append( $( '<tr/>' ).append( $( '<td/>' ).append( $checkbox ) )
+                                                .append( $( '<td/>' ).append( parameter.title ) ) )
+            });
+            this.$cover[ this.model.get( 'enabled' ) && 'hide' || 'show' ]();
+        }
+    });
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/upload/upload-view.js b/client/galaxy/scripts/mvc/upload/upload-view.js
new file mode 100644
index 0000000..4d10287
--- /dev/null
+++ b/client/galaxy/scripts/mvc/upload/upload-view.js
@@ -0,0 +1,184 @@
+/** Upload app contains the upload progress button and upload modal, compiles model data for API request **/
+define([ 'utils/utils', 'mvc/ui/ui-modal', 'mvc/ui/ui-tabs', 'mvc/upload/upload-button', 'mvc/upload/default/default-view', 'mvc/upload/composite/composite-view'],
+function( Utils, Modal, Tabs, UploadButton, UploadViewDefault, UploadViewComposite ) {
+    return Backbone.View.extend({
+        options : {
+            nginx_upload_path   : '',
+            ftp_upload_site     : 'n/a',
+            default_genome      : '?',
+            default_extension   : 'auto',
+            height              : 500,
+            width               : 900,
+            auto                : {
+                id          : 'auto',
+                text        : 'Auto-detect',
+                description : 'This system will try to detect the file type automatically. If your file is not detected properly as one of the known formats, it most likely means that it has some format problems (e.g., different number of columns on different rows). You can still coerce the system to set your data to the format you think it should be.  You can also upload compressed files, which will automatically be decompressed.'
+            }
+        },
+
+        // contains all available dataset extensions/types
+        list_extensions: [],
+
+        // contains all available genomes
+        list_genomes: [],
+
+        initialize: function( options ) {
+            var self = this;
+            this.options = Utils.merge( options, this.options );
+
+            // create view for upload/progress button
+            this.ui_button = new UploadButton.View({
+                onclick     : function(e) {
+                    e.preventDefault();
+                    self.show()
+                },
+                onunload    : function() {
+                    var percentage = self.ui_button.model.get('percentage', 0);
+                    if (percentage > 0 && percentage < 100) {
+                        return 'Several uploads are queued.';
+                    }
+                }
+            });
+
+            // set element to button view
+            this.setElement( this.ui_button.$el );
+
+            // load extensions
+            var self = this;
+            Utils.get({
+                url     : Galaxy.root + 'api/datatypes?extension_only=False',
+                success : function( datatypes ) {
+                    for ( key in datatypes ) {
+                        self.list_extensions.push({
+                            id              : datatypes[ key ].extension,
+                            text            : datatypes[ key ].extension,
+                            description     : datatypes[ key ].description,
+                            description_url : datatypes[ key ].description_url,
+                            composite_files : datatypes[ key ].composite_files
+                        });
+                    }
+                    self.list_extensions.sort( function( a, b ) {
+                        var a_text = a.text && a.text.toLowerCase();
+                        var b_text = b.text && b.text.toLowerCase();
+                        return a_text > b_text ? 1 : a_text < b_text ? -1 : 0;
+                    });
+                    if ( !self.options.datatypes_disable_auto ) {
+                        self.list_extensions.unshift( self.options.auto );
+                    }
+                }
+            });
+
+            // load genomes
+            Utils.get({
+                url     : Galaxy.root + 'api/genomes',
+                success : function( genomes ) {
+                    for ( key in genomes ) {
+                        self.list_genomes.push({
+                            id      : genomes[ key ][ 1 ],
+                            text    : genomes[ key ][ 0 ]
+                        });
+                    }
+                    self.list_genomes.sort( function( a, b ) {
+                        if ( a.id == self.options.default_genome ) { return -1; }
+                        if ( b.id == self.options.default_genome ) { return 1; }
+                        return a.text > b.text ? 1 : a.text < b.text ? -1 : 0;
+                    });
+                }
+            });
+        },
+
+        /** Show/hide upload dialog */
+        show: function () {
+            var self = this;
+            if ( !Galaxy.currHistoryPanel || !Galaxy.currHistoryPanel.model ) {
+                window.setTimeout(function() { self.show() }, 500)
+                return;
+            }
+            this.current_user = Galaxy.user.id;
+            if ( !this.modal ) {
+                this.tabs = new Tabs.View();
+                this.default_view = new UploadViewDefault( this );
+                this.tabs.add({
+                    id      : 'regular',
+                    title   : 'Regular',
+                    $el     : this.default_view.$el
+                });
+                this.composite_view = new UploadViewComposite( this );
+                this.tabs.add({
+                    id      : 'composite',
+                    title   : 'Composite',
+                    $el     : this.composite_view.$el
+                });
+                this.modal = new Modal.View({
+                    title           : 'Download from web or upload from disk',
+                    body            : this.tabs.$el,
+                    height          : this.options.height,
+                    width           : this.options.width,
+                    closing_events  : true,
+                    title_separator : false
+                });
+            }
+            this.modal.show();
+        },
+
+        /** Refresh user and current history */
+        currentHistory: function() {
+            return this.current_user && Galaxy.currHistoryPanel.model.get( 'id' );
+        },
+
+        /** Get ftp configuration */
+        currentFtp: function() {
+            return this.current_user && this.options.ftp_upload_site;
+        },
+
+        /**
+          * Package API data from array of models
+          * @param{Array} items - Upload items/rows filtered from a collection
+        */
+        toData: function( items, history_id ) {
+            // create dictionary for data submission
+            var data = {
+                payload: {
+                    'tool_id'       : 'upload1',
+                    'history_id'    : history_id || this.currentHistory(),
+                    'inputs'        : {}
+                },
+                files: [],
+                error_message: null
+            }
+            // add upload tools input data
+            if ( items && items.length > 0 ) {
+                var inputs = {};
+                inputs[ 'dbkey' ] = items[0].get( 'genome', null );
+                inputs[ 'file_type' ] = items[0].get( 'extension', null );
+                for ( var index in items ) {
+                    var it = items[ index ];
+                    it.set( 'status', 'running' );
+                    if ( it.get( 'file_size' ) > 0 ) {
+                        var prefix = 'files_' + index + '|';
+                        inputs[ prefix + 'type' ] = 'upload_dataset';
+                        inputs[ prefix + 'space_to_tab' ] = it.get( 'space_to_tab' ) && 'Yes' || null;
+                        inputs[ prefix + 'to_posix_lines' ] = it.get( 'to_posix_lines' ) && 'Yes' || null;
+                        switch ( it.get( 'file_mode' ) ) {
+                            case 'new':
+                                inputs[ prefix + 'url_paste' ] = it.get( 'url_paste' );
+                                break;
+                            case 'ftp':
+                                inputs[ prefix + 'ftp_files' ] = it.get( 'file_path' );
+                                break;
+                            case 'local':
+                                data.files.push( { name: prefix + 'file_data', file: it.get( 'file_data' ) } );
+                        }
+                    } else {
+                        data.error_message = 'Upload content incomplete.';
+                        it.set( 'status', 'error' );
+                        it.set( 'info', data.error_message );
+                        break;
+                    }
+                }
+                data.payload.inputs = JSON.stringify( inputs );
+            }
+            return data;
+        }
+    });
+});
diff --git a/client/galaxy/scripts/mvc/user/user-model.js b/client/galaxy/scripts/mvc/user/user-model.js
new file mode 100644
index 0000000..6e6a0ac
--- /dev/null
+++ b/client/galaxy/scripts/mvc/user/user-model.js
@@ -0,0 +1,127 @@
+define([
+    'libs/underscore',
+    'libs/backbone',
+    'mvc/base-mvc',
+    'utils/localization'
+], function( _, Backbone, baseMVC, _l ){
+'use strict';
+
+var logNamespace = 'user';
+//==============================================================================
+/** @class Model for a Galaxy user (including anonymous users).
+ *  @name User
+ */
+var User = Backbone.Model.extend( baseMVC.LoggableMixin ).extend(
+/** @lends User.prototype */{
+    _logNamespace : logNamespace,
+
+    /** API location for this resource */
+    urlRoot : function(){ return Galaxy.root + 'api/users'; },
+
+    /** Model defaults
+     *  Note: don't check for anon-users with the username as the default is '(anonymous user)'
+     *      a safer method is if( !user.get( 'email' ) ) -> anon user
+     */
+    defaults : /** @lends User.prototype */{
+        id                      : null,
+        username                : '(' + _l( "anonymous user" ) + ')',
+        email                   : "",
+        total_disk_usage        : 0,
+        nice_total_disk_usage   : "",
+        quota_percent           : null,
+        is_admin                : false
+    },
+
+    /** Set up and bind events
+     *  @param {Object} data Initial model data.
+     */
+    initialize : function( data ){
+        this.log( 'User.initialize:', data );
+
+        this.on( 'loaded', function( model, resp ){ this.log( this + ' has loaded:', model, resp ); });
+        this.on( 'change', function( model, data ){ this.log( this + ' has changed:', model, data.changes ); });
+    },
+
+    isAnonymous : function(){
+        return ( !this.get( 'email' ) );
+    },
+
+    isAdmin : function(){
+        return ( this.get( 'is_admin' ) );
+    },
+
+    /** Load a user with the API using an id.
+     *      If getting an anonymous user or no access to a user id, pass the User.CURRENT_ID_STR
+     *      (e.g. 'current') and the API will return the current transaction's user data.
+     *  @param {String} idOrCurrent encoded user id or the User.CURRENT_ID_STR
+     *  @param {Object} options hash to pass to Backbone.Model.fetch. Can contain success, error fns.
+     *  @fires loaded when the model has been loaded from the API, passing the newModel and AJAX response.
+     */
+    loadFromApi : function( idOrCurrent, options ){
+        idOrCurrent = idOrCurrent || User.CURRENT_ID_STR;
+
+        options = options || {};
+        var model = this,
+            userFn = options.success;
+
+        /** @ignore */
+        options.success = function( newModel, response ){
+            model.trigger( 'loaded', newModel, response );
+            if( userFn ){ userFn( newModel, response ); }
+        };
+
+        // requests for the current user must have a sep. constructed url (fetch don't work, ma)
+        if( idOrCurrent === User.CURRENT_ID_STR ){
+            options.url = this.urlRoot + '/' + User.CURRENT_ID_STR;
+        }
+        return Backbone.Model.prototype.fetch.call( this, options );
+    },
+
+    /** Clears all data from the sessionStorage.
+     */
+    clearSessionStorage : function(){
+        for( var key in sessionStorage ){
+            //TODO: store these under the user key so we don't have to do this
+            // currently only history
+            if( key.indexOf( 'history:' ) === 0 ){
+                sessionStorage.removeItem( key );
+
+            } else if( key === 'history-panel' ){
+                sessionStorage.removeItem( key );
+            }
+        }
+    },
+
+    /** string representation */
+    toString : function(){
+        var userInfo = [ this.get( 'username' ) ];
+        if( this.get( 'id' ) ){
+            userInfo.unshift( this.get( 'id' ) );
+            userInfo.push( this.get( 'email' ) );
+        }
+        return 'User(' + userInfo.join( ':' ) + ')';
+    }
+});
+
+// string to send to tell server to return this transaction's user (see api/users.py)
+User.CURRENT_ID_STR = 'current';
+
+// class method to load the current user via the api and return that model
+User.getCurrentUserFromApi = function( options ){
+    var currentUser = new User();
+    currentUser.loadFromApi( User.CURRENT_ID_STR, options );
+    return currentUser;
+};
+
+// (stub) collection for users (shouldn't be common unless admin UI)
+var UserCollection = Backbone.Collection.extend( baseMVC.LoggableMixin ).extend({
+    model   : User,
+    urlRoot : function(){ return Galaxy.root + 'api/users'; },
+    //logger  : console,
+});
+
+
+//==============================================================================
+return {
+    User : User
+};});
diff --git a/client/galaxy/scripts/mvc/user/user-quotameter.js b/client/galaxy/scripts/mvc/user/user-quotameter.js
new file mode 100644
index 0000000..ad48d0b
--- /dev/null
+++ b/client/galaxy/scripts/mvc/user/user-quotameter.js
@@ -0,0 +1,158 @@
+define([
+    "mvc/base-mvc",
+    "utils/localization"
+], function( baseMVC, _l ){
+'use strict';
+
+var logNamespace = 'user';
+//==============================================================================
+/** @class View to display a user's disk/storage usage
+ *      either as a progress bar representing the percentage of a quota used
+ *      or a simple text element displaying the human readable size used.
+ *  @name UserQuotaMeter
+ *  @augments Backbone.View
+ */
+var UserQuotaMeter = Backbone.View.extend( baseMVC.LoggableMixin ).extend(
+/** @lends UserQuotaMeter.prototype */{
+    _logNamespace : logNamespace,
+
+    /** Defaults for optional settings passed to initialize */
+    options : {
+        warnAtPercent   : 85,
+        errorAtPercent  : 100
+    },
+
+    /** Set up, accept options, and bind events */
+    initialize : function( options ){
+        this.log( this + '.initialize:', options );
+        _.extend( this.options, options );
+
+        //this.bind( 'all', function( event, data ){ this.log( this + ' event:', event, data ); }, this );
+        this.listenTo( this.model, 'change:quota_percent change:total_disk_usage', this.render );
+    },
+
+    /** Re-load user model data from the api */
+    update : function( options ){
+        this.log( this + ' updating user data...', options );
+        this.model.loadFromApi( this.model.get( 'id' ), options );
+        return this;
+    },
+
+    /** Is the user over their quota (if there is one)?
+     * @returns {Boolean} true if over quota, false if no quota or under quota
+     */
+    isOverQuota : function(){
+        return ( this.model.get( 'quota_percent' ) !== null
+              && this.model.get( 'quota_percent' ) >= this.options.errorAtPercent );
+    },
+
+    /** Render the meter when they have an applicable quota. Will render as a progress bar
+     *      with their percentage of that quota in text over the bar.
+     *  @fires quota:over when user is over quota (>= this.errorAtPercent)
+     *  @fires quota:under when user is under quota
+     *  @fires quota:under:approaching when user is >= this.warnAtPercent of their quota
+     *  @fires quota:under:ok when user is below this.warnAtPercent
+     *  @returns {jQuery} the rendered meter
+     */
+    _render_quota : function(){
+        var modelJson = this.model.toJSON(),
+            //prevPercent = this.model.previous( 'quota_percent' ),
+            percent = modelJson.quota_percent,
+            //meter = $( UserQuotaMeter.templates.quota( modelJson ) );
+            $meter = $( this._templateQuotaMeter( modelJson ) ),
+            $bar = $meter.find( '.progress-bar' );
+        //this.log( this + '.rendering quota, percent:', percent, 'meter:', meter );
+
+        // OVER QUOTA: color the quota bar and show the quota error message
+        if( this.isOverQuota() ){
+            //this.log( '\t over quota' );
+            $bar.attr( 'class', 'progress-bar progress-bar-danger' );
+            $meter.find( '.quota-meter-text' ).css( 'color', 'white' );
+            //TODO: only trigger event if state has changed
+            this.trigger( 'quota:over', modelJson );
+
+        // APPROACHING QUOTA: color the quota bar
+        } else if( percent >= this.options.warnAtPercent ){
+            //this.log( '\t approaching quota' );
+            $bar.attr( 'class', 'progress-bar progress-bar-warning' );
+            //TODO: only trigger event if state has changed
+            this.trigger( 'quota:under quota:under:approaching', modelJson );
+
+        // otherwise, hide/don't use the msg box
+        } else {
+            $bar.attr( 'class', 'progress-bar progress-bar-success' );
+            //TODO: only trigger event if state has changed
+            this.trigger( 'quota:under quota:under:ok', modelJson );
+        }
+        return $meter;
+    },
+
+    /** Render the meter when the user has NO applicable quota. Will render as text
+     *      showing the human readable sum storage their data is using.
+     *  @returns {jQuery} the rendered text
+     */
+    _render_usage : function(){
+        //var usage = $( UserQuotaMeter.templates.usage( this.model.toJSON() ) );
+        var usage = $( this._templateUsage( this.model.toJSON() ) );
+        this.log( this + '.rendering usage:', usage );
+        return usage;
+    },
+
+    /** Render either the quota percentage meter or the human readable disk usage
+     *      depending on whether the user model has quota info (quota_percent === null -> no quota)
+     *  @returns {Object} this UserQuotaMeter
+     */
+    render : function(){
+        //this.log( this + '.rendering' );
+        var meterHtml = null;
+
+        // no quota on server ('quota_percent' === null (can be valid at 0)), show usage instead
+        this.log( this + '.model.quota_percent:', this.model.get( 'quota_percent' ) );
+        if( ( this.model.get( 'quota_percent' ) === null )
+        ||  ( this.model.get( 'quota_percent' ) === undefined ) ){
+            meterHtml = this._render_usage();
+
+        // otherwise, render percent of quota (and warning, error)
+        } else {
+            meterHtml = this._render_quota();
+            //TODO: add the original text for unregistered quotas
+            //tooltip = "Your disk quota is %s.  You can increase your quota by registering a Galaxy account."
+        }
+
+        this.$el.html( meterHtml );
+        this.$el.find( '.quota-meter-text' ).tooltip();
+        return this;
+    },
+
+    _templateQuotaMeter : function( data ){
+        return [
+            '<div id="quota-meter" class="quota-meter progress">',
+                '<div class="progress-bar" style="width: ', data.quota_percent, '%"></div>',
+                '<div class="quota-meter-text" style="top: 6px"',
+                    (( data.nice_total_disk_usage )?( ' title="Using ' + data.nice_total_disk_usage + '">' ):( '>' )),
+                    _l( 'Using' ), ' ', data.quota_percent, '%',
+                '</div>',
+            '</div>'
+        ].join( '' );
+    },
+
+    _templateUsage : function( data ){
+        return [
+            '<div id="quota-meter" class="quota-meter" style="background-color: transparent">',
+                '<div class="quota-meter-text" style="top: 6px; color: white">',
+                    (( data.nice_total_disk_usage )?( _l( 'Using ' ) + data.nice_total_disk_usage ):( '' )),
+                '</div>',
+            '</div>'
+        ].join( '' );
+    },
+
+    toString : function(){
+        return 'UserQuotaMeter(' + this.model + ')';
+    }
+});
+
+
+//==============================================================================
+return {
+    UserQuotaMeter : UserQuotaMeter
+};});
diff --git a/client/galaxy/scripts/mvc/visualization/visualization-model.js b/client/galaxy/scripts/mvc/visualization/visualization-model.js
new file mode 100644
index 0000000..6c2f9de
--- /dev/null
+++ b/client/galaxy/scripts/mvc/visualization/visualization-model.js
@@ -0,0 +1,140 @@
+//define([
+//], function(){
+
+//==============================================================================
+/** @class Model for a saved Galaxy visualization.
+ *
+ *  @augments Backbone.Model
+ *  @constructs
+ */
+var Visualization = Backbone.Model.extend(
+/** @lends Visualization.prototype */{
+
+    ///** logger used to record this.log messages, commonly set to console */
+    //// comment this out to suppress log output
+    //logger              : console,
+
+    /** default attributes for a model */
+    defaults : {
+        config : {}
+    },
+
+    /** override urlRoot to handle prefix */
+    urlRoot : function(){
+        var apiUrl = 'api/visualizations';
+        return Galaxy.root + apiUrl;
+    },
+
+    /** Set up the model, determine if accessible, bind listeners
+     *  @see Backbone.Model#initialize
+     */
+    initialize : function( data ){
+        //this.log( this + '.initialize', data, this.attributes );
+
+        // munge config sub-object here since bbone won't handle defaults with this
+        if( _.isObject( data.config ) && _.isObject( this.defaults.config ) ){
+            _.defaults( data.config, this.defaults.config );
+        }
+
+        this._setUpListeners();
+    },
+
+    /** set up any event listeners
+     */
+    _setUpListeners : function(){
+        //this.on( 'change', function(){
+        //    console.info( 'change:', arguments );
+        //});
+    },
+
+    // ........................................................................ config
+    /** override set to properly allow update and trigger change when setting the sub-obj 'config' */
+    set: function( key, val ){
+        //TODO: validate config is object
+        if( key === 'config' ){
+            var oldConfig = this.get( 'config' );
+            // extend if already exists (is this correct behavior? no way to eliminate keys or reset entirely)
+            // clone in order to trigger change (diff. obj ref)
+            if( _.isObject( oldConfig ) ){
+                val = _.extend( _.clone( oldConfig ), val );
+            }
+        }
+        Backbone.Model.prototype.set.call( this, key, val );
+        return this;
+    },
+
+    // ........................................................................ misc
+    /** String representation */
+    toString : function(){
+        var idAndTitle = this.get( 'id' ) || '';
+        if( this.get( 'title' ) ){
+            idAndTitle += ':' + this.get( 'title' );
+        }
+        return 'Visualization(' + idAndTitle + ')';
+    }
+});
+
+
+//==============================================================================
+/** @class Backbone collection of visualization models
+ *
+ *  @constructs
+ */
+var VisualizationCollection = Backbone.Collection.extend(
+/** @lends VisualizationCollection.prototype */{
+    model           : Visualization,
+
+    ///** logger used to record this.log messages, commonly set to console */
+    //// comment this out to suppress log output
+    //logger              : console,
+
+    url : function(){
+        return Galaxy.root + 'api/visualizations';
+    },
+
+    /** Set up.
+     *  @see Backbone.Collection#initialize
+     */
+    initialize : function( models, options ){
+        options = options || {};
+        //this._setUpListeners();
+    },
+
+    //_setUpListeners : function(){
+    //},
+
+    // ........................................................................ common queries
+    // ........................................................................ ajax
+    // ........................................................................ misc
+    set : function( models, options ){
+        // arrrrrrrrrrrrrrrrrg...
+        // override to get a correct/smarter merge when incoming data is partial (e.g. stupid backbone)
+        //  w/o this partial models from the server will fill in missing data with model defaults
+        //  and overwrite existing data on the client
+        // see Backbone.Collection.set and _prepareModel
+        var collection = this;
+        models = _.map( models, function( model ){
+            var existing = collection.get( model.id );
+            if( !existing ){ return model; }
+
+            // merge the models _BEFORE_ calling the superclass version
+            var merged = existing.toJSON();
+            _.extend( merged, model );
+            return merged;
+        });
+        // now call superclass when the data is filled
+        Backbone.Collection.prototype.set.call( this, models, options );
+    },
+
+    /** String representation. */
+    toString : function(){
+         return ([ 'VisualizationCollection(', [ this.historyId, this.length ].join(), ')' ].join( '' ));
+    }
+});
+
+
+//==============================================================================
+//return {
+//    Visualization           : Visualization,
+//    VisualizationCollection : VisualizationCollection
+//};});
diff --git a/client/galaxy/scripts/mvc/webhooks.js b/client/galaxy/scripts/mvc/webhooks.js
new file mode 100644
index 0000000..fe5bdd4
--- /dev/null
+++ b/client/galaxy/scripts/mvc/webhooks.js
@@ -0,0 +1,57 @@
+/**
+  Webhooks
+**/
+define([], function() {
+
+    var WebhookModel = Backbone.Model.extend({
+        defaults: {
+            activate: false
+        }
+    });
+
+    var Webhooks = Backbone.Collection.extend({
+        model: WebhookModel
+    });
+
+    var WebhookView = Backbone.View.extend({
+        el: '#webhook-view',
+
+        initialize: function(options) {
+            var me = this;
+
+            this.model = new WebhookModel();
+            this.model.urlRoot = options.urlRoot;
+            this.model.fetch({
+                success: function() {
+                    me.render();
+                }
+            });
+        },
+
+        render: function() {
+            var webhook = this.model.toJSON();
+
+            this.$el.html('<div id="' + webhook.name + '"></div>');
+            if (webhook.styles) $('<style/>', {type: 'text/css'}).text(webhook.styles).appendTo('head');
+            if (webhook.script) $('<script/>', {type: 'text/javascript'}).text(webhook.script).appendTo('head');
+            
+            return this;
+        }
+    });
+
+    var add = function(options) {
+        var webhooks = new Webhooks();
+
+        webhooks.url = Galaxy.root + options.url;
+        webhooks.fetch({
+            async: (typeof options.async != 'undefined') ? options.async : true,
+            success: options.callback
+        });
+    };
+
+    return {
+        Webhooks: Webhooks,
+        WebhookView: WebhookView,
+        add: add
+    };
+});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-canvas.js b/client/galaxy/scripts/mvc/workflow/workflow-canvas.js
new file mode 100644
index 0000000..5ef52f9
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-canvas.js
@@ -0,0 +1,245 @@
+define([], function() {
+    function CanvasManager( app, canvas_viewport, overview ) {
+        this.app = app;
+        this.cv = canvas_viewport;
+        this.cc = this.cv.find( "#canvas-container" );
+        this.overview = overview;
+        this.oc = overview.find( "#overview-canvas" );
+        this.ov = overview.find( "#overview-viewport" );
+        // Make overview box draggable
+        this.init_drag();
+    }
+    $.extend( CanvasManager.prototype, {
+        init_drag : function () {
+            var self = this;
+            var move = function( x, y ) {
+                x = Math.min( x, self.cv.width() / 2 );
+                x = Math.max( x, - self.cc.width() + self.cv.width() / 2 );
+                y = Math.min( y, self.cv.height() / 2 );
+                y = Math.max( y, - self.cc.height() + self.cv.height() / 2 );
+                self.cc.css( {
+                    left: x,
+                    top: y
+                });
+                self.cv.css( { "background-position-x": x,
+                               "background-position-y": y
+                });
+                self.update_viewport_overlay();
+            };
+            // Dragging within canvas background
+            this.cc.each( function() {
+                this.scroll_panel = new ScrollPanel( this );
+            });
+            var x_adjust, y_adjust;
+            this.cv.bind( "dragstart", function() {
+                var o = $(this).offset();
+                var p = self.cc.position();
+                y_adjust = p.top - o.top;
+                x_adjust = p.left - o.left;
+            }).bind( "drag", function( e, d ) {
+                move( d.offsetX + x_adjust, d.offsetY + y_adjust );
+            }).bind( "dragend", function() {
+                self.app.workflow.fit_canvas_to_nodes();
+                self.draw_overview();
+            });
+            this.overview.click( function( e ) {
+                if (self.overview.hasClass('blockaclick')){
+                    self.overview.removeClass('blockaclick');
+                } else {
+                    var in_w = self.cc.width(),
+                        in_h = self.cc.height(),
+                        o_w = self.oc.width(),
+                        o_h = self.oc.height(),
+                        new_x_offset = e.pageX - self.oc.offset().left - self.ov.width() / 2,
+                        new_y_offset = e.pageY - self.oc.offset().top - self.ov.height() / 2;
+                    move( - ( new_x_offset / o_w * in_w ),
+                          - ( new_y_offset / o_h * in_h ) );
+                    self.app.workflow.fit_canvas_to_nodes();
+                    self.draw_overview();
+                }
+            });
+            // Dragging for overview pane
+            this.ov.bind( "drag", function( e, d ) {
+                var in_w = self.cc.width(),
+                    in_h = self.cc.height(),
+                    o_w = self.oc.width(),
+                    o_h = self.oc.height(),
+                    new_x_offset = d.offsetX - self.overview.offset().left,
+                    new_y_offset = d.offsetY - self.overview.offset().top;
+                move( - ( new_x_offset / o_w * in_w ),
+                      - ( new_y_offset / o_h * in_h ) );
+            }).bind( "dragend", function() {
+                self.overview.addClass('blockaclick');
+                self.app.workflow.fit_canvas_to_nodes();
+                self.draw_overview();
+            });
+            // Dragging for overview border (resize)
+            $("#overview-border").bind( "drag", function( e, d ) {
+                var op = $(this).offsetParent();
+                var opo = op.offset();
+                var new_size = Math.max( op.width() - ( d.offsetX - opo.left ),
+                                         op.height() - ( d.offsetY - opo.top ) );
+                $(this).css( {
+                    width: new_size,
+                    height: new_size
+                });
+                self.draw_overview();
+            });
+
+            /*  Disable dragging for child element of the panel so that resizing can
+                only be done by dragging the borders */
+            $("#overview-border div").bind("drag", function() { });
+
+        },
+        update_viewport_overlay: function() {
+            var cc = this.cc,
+                cv = this.cv,
+                oc = this.oc,
+                ov = this.ov,
+                in_w = cc.width(),
+                in_h = cc.height(),
+                o_w = oc.width(),
+                o_h = oc.height(),
+                cc_pos = cc.position();
+            ov.css( {
+                left: - ( cc_pos.left / in_w * o_w ),
+                top: - ( cc_pos.top / in_h * o_h ),
+                // Subtract 2 to account for borders (maybe just change box sizing style instead?)
+                width: ( cv.width() / in_w * o_w ) - 2,
+                height: ( cv.height() / in_h * o_h ) - 2
+            });
+        },
+        draw_overview: function() {
+            var canvas_el = $("#overview-canvas"),
+                size = canvas_el.parent().parent().width(),
+                c = canvas_el.get(0).getContext("2d"),
+                in_w = $("#canvas-container").width(),
+                in_h = $("#canvas-container").height();
+            var o_h, shift_h, o_w, shift_w;
+            // Fit canvas into overview area
+            var cv_w = this.cv.width();
+            var cv_h = this.cv.height();
+            if ( in_w < cv_w && in_h < cv_h ) {
+                // Canvas is smaller than viewport
+                o_w = in_w / cv_w * size;
+                shift_w = ( size - o_w ) / 2;
+                o_h = in_h / cv_h * size;
+                shift_h = ( size - o_h ) / 2;
+            } else if ( in_w < in_h ) {
+                // Taller than wide
+                shift_h = 0;
+                o_h = size;
+                o_w = Math.ceil( o_h * in_w / in_h );
+                shift_w = ( size - o_w ) / 2;
+            } else {
+                // Wider than tall
+                o_w = size;
+                shift_w = 0;
+                o_h = Math.ceil( o_w * in_h / in_w );
+                shift_h = ( size - o_h ) / 2;
+            }
+            canvas_el.parent().css( {
+               left: shift_w,
+               top: shift_h,
+               width: o_w,
+               height: o_h
+            });
+            canvas_el.attr( "width", o_w );
+            canvas_el.attr( "height", o_h );
+            // Draw overview
+            $.each( this.app.workflow.nodes, function( id, node ) {
+                c.fillStyle = "#D2C099";
+                c.strokeStyle = "#D8B365";
+                c.lineWidth = 1;
+                var node_element = $(node.element),
+                    position = node_element.position(),
+                    x = position.left / in_w * o_w,
+                    y = position.top / in_h * o_h,
+                    w = node_element.width() / in_w * o_w,
+                    h = node_element.height() / in_h * o_h;
+                if (node.tool_errors){
+                    c.fillStyle = "#FFCCCC";
+                    c.strokeStyle = "#AA6666";
+                } else if (node.workflow_outputs !== undefined && node.workflow_outputs.length > 0){
+                    c.fillStyle = "#E8A92D";
+                    c.strokeStyle = "#E8A92D";
+                }
+                c.fillRect( x, y, w, h );
+                c.strokeRect( x, y, w, h );
+            });
+            this.update_viewport_overlay();
+        }
+    });
+
+    // FIXME: merge scroll panel into CanvasManager, clean up hardcoded stuff.
+    function ScrollPanel( panel ) {
+        this.panel = panel;
+    }
+    $.extend( ScrollPanel.prototype, {
+        test: function( e, onmove ) {
+            clearTimeout( this.timeout );
+            var x = e.pageX,
+                y = e.pageY,
+                // Panel size and position
+                panel = $(this.panel),
+                panel_pos = panel.position(),
+                panel_w = panel.width(),
+                panel_h = panel.height(),
+                // Viewport size and offset
+                viewport = panel.parent(),
+                viewport_w = viewport.width(),
+                viewport_h = viewport.height(),
+                viewport_offset = viewport.offset(),
+                // Edges of viewport (in page coordinates)
+                min_x = viewport_offset.left,
+                min_y = viewport_offset.top,
+                max_x = min_x + viewport.width(),
+                max_y = min_y + viewport.height(),
+                // Legal panel range
+                p_min_x = - ( panel_w - ( viewport_w / 2 ) ),
+                p_min_y = - ( panel_h - ( viewport_h / 2 )),
+                p_max_x = ( viewport_w / 2 ),
+                p_max_y = ( viewport_h / 2 ),
+                // Did the panel move?
+                moved = false,
+                // Constants
+                close_dist = 5,
+                nudge = 23;
+            if ( x - close_dist < min_x ) {
+                if ( panel_pos.left < p_max_x ) {
+                    var t = Math.min( nudge, p_max_x - panel_pos.left );
+                    panel.css( "left", panel_pos.left + t );
+                    moved = true;
+                }
+            } else if ( x + close_dist > max_x ) {
+                if ( panel_pos.left > p_min_x ) {
+                    var t = Math.min( nudge, panel_pos.left  - p_min_x );
+                    panel.css( "left", panel_pos.left - t );
+                    moved = true;
+                }
+            } else if ( y - close_dist < min_y ) {
+                if ( panel_pos.top < p_max_y ) {
+                    var t = Math.min( nudge, p_max_y - panel_pos.top );
+                    panel.css( "top", panel_pos.top + t );
+                    moved = true;
+                }
+            } else if ( y + close_dist > max_y ) {
+                if ( panel_pos.top > p_min_y ) {
+                    var t = Math.min( nudge, panel_pos.top  - p_min_x );
+                    panel.css( "top", ( panel_pos.top - t ) + "px" );
+                    moved = true;
+                }
+            }
+            if ( moved ) {
+                // Keep moving even if mouse doesn't move
+                onmove();
+                var panel = this;
+                this.timeout = setTimeout( function() { panel.test( e, onmove ); }, 50 );
+            }
+        },
+        stop: function( e, ui ) {
+            clearTimeout( this.timeout );
+        }
+    });
+    return CanvasManager;
+});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-connector.js b/client/galaxy/scripts/mvc/workflow/workflow-connector.js
new file mode 100644
index 0000000..f0995f5
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-connector.js
@@ -0,0 +1,131 @@
+define([], function(){
+    function Connector( handle1, handle2 ) {
+        this.canvas = null;
+        this.dragging = false;
+        this.inner_color = "#FFFFFF";
+        this.outer_color = "#D8B365";
+        if ( handle1 && handle2 ) {
+            this.connect( handle1, handle2 );
+        }
+    }
+    $.extend( Connector.prototype, {
+        connect: function ( t1, t2 ) {
+            this.handle1 = t1;
+            if ( this.handle1 ) {
+                this.handle1.connect( this );
+            }
+            this.handle2 = t2;
+            if ( this.handle2 ) {
+                this.handle2.connect( this );
+            }
+        },
+        destroy : function () {
+            if ( this.handle1 ) {
+                this.handle1.disconnect( this );
+            }
+            if ( this.handle2 ) {
+                this.handle2.disconnect( this );
+            }
+            $(this.canvas).remove();
+        },
+        destroyIfInvalid: function() {
+            if( this.handle1 && this.handle2 && ! this.handle2.attachable( this.handle1 ) ) {
+                this.destroy();
+            }
+        },
+        redraw : function () {
+            var canvas_container = $("#canvas-container");
+            if ( ! this.canvas ) {
+                this.canvas = document.createElement( "canvas" );
+                canvas_container.append( $(this.canvas) );
+                if ( this.dragging ) {
+                    this.canvas.style.zIndex = "300";
+                }
+            }
+            var relativeLeft = function( e ) {
+                return $(e).offset().left - canvas_container.offset().left;
+            };
+            var relativeTop = function( e ) {
+                return $(e).offset().top - canvas_container.offset().top;
+            };
+            if (!this.handle1 || !this.handle2) {
+                return;
+            }
+            // Find the position of each handle
+            var start_x = relativeLeft( this.handle1.element ) + 5;
+            var start_y = relativeTop( this.handle1.element ) + 5;
+            var end_x = relativeLeft( this.handle2.element ) + 5;
+            var end_y = relativeTop( this.handle2.element ) + 5;
+            // Calculate canvas area
+            var canvas_extra = 100;
+            var canvas_min_x = Math.min( start_x, end_x );
+            var canvas_max_x = Math.max( start_x, end_x );
+            var canvas_min_y = Math.min( start_y, end_y );
+            var canvas_max_y = Math.max( start_y, end_y );
+            var cp_shift = Math.min( Math.max( Math.abs( canvas_max_y - canvas_min_y ) / 2, 100 ), 300 );
+            var canvas_left = canvas_min_x - canvas_extra;
+            var canvas_top = canvas_min_y - canvas_extra;
+            var canvas_width = canvas_max_x - canvas_min_x + 2 * canvas_extra;
+            var canvas_height = canvas_max_y - canvas_min_y + 2 * canvas_extra;
+            // Place the canvas
+            this.canvas.style.left = canvas_left + "px";
+            this.canvas.style.top = canvas_top + "px";
+            this.canvas.setAttribute( "width", canvas_width );
+            this.canvas.setAttribute( "height", canvas_height );
+            // Adjust points to be relative to the canvas
+            start_x -= canvas_left;
+            start_y -= canvas_top;
+            end_x -= canvas_left;
+            end_y -= canvas_top;
+            // Draw the line
+
+            var c = this.canvas.getContext("2d"),
+                start_offsets = null,
+                end_offsets = null;
+            var num_offsets = 1;
+            if ( this.handle1 && this.handle1.isMappedOver() ) {
+                var start_offsets = [ -6, -3, 0, 3, 6 ];
+                num_offsets = 5;
+            } else {
+                var start_offsets = [ 0 ];
+            }
+            if ( this.handle2 && this.handle2.isMappedOver() ) {
+                var end_offsets = [ -6, -3, 0, 3, 6 ];
+                num_offsets = 5;
+            } else {
+                var end_offsets = [ 0 ];
+            }
+            var connector = this;
+            for( var i = 0; i < num_offsets; i++ ) {
+                var inner_width = 5,
+                    outer_width = 7;
+                if( start_offsets.length > 1 || end_offsets.length > 1 ) {
+                    // We have a multi-run, using many lines, make them small.
+                    inner_width = 1;
+                    outer_width = 3;
+                }
+                connector.draw_outlined_curve( start_x, start_y, end_x, end_y, cp_shift, inner_width, outer_width, start_offsets[ i % start_offsets.length ], end_offsets[ i % end_offsets.length ] );
+            }
+        },
+        draw_outlined_curve : function( start_x, start_y, end_x, end_y, cp_shift, inner_width, outer_width, offset_start, offset_end ) {
+            var offset_start = offset_start || 0;
+            var offset_end = offset_end || 0;
+            var c = this.canvas.getContext("2d");
+            c.lineCap = "round";
+            c.strokeStyle = this.outer_color;
+            c.lineWidth = outer_width;
+            c.beginPath();
+            c.moveTo( start_x, start_y + offset_start );
+            c.bezierCurveTo( start_x + cp_shift, start_y + offset_start, end_x - cp_shift, end_y + offset_end, end_x, end_y + offset_end);
+            c.stroke();
+            // Inner line
+            c.strokeStyle = this.inner_color;
+            c.lineWidth = inner_width;
+            c.beginPath();
+            c.moveTo( start_x, start_y + offset_start );
+            c.bezierCurveTo( start_x + cp_shift, start_y + offset_start, end_x - cp_shift, end_y + offset_end, end_x, end_y + offset_end );
+            c.stroke();
+        }
+    });
+    return Connector;
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-manager.js b/client/galaxy/scripts/mvc/workflow/workflow-manager.js
new file mode 100644
index 0000000..c17c2a4
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-manager.js
@@ -0,0 +1,486 @@
+define([
+    'mvc/workflow/workflow-connector',
+    'libs/toastr'
+    ],
+function( Connector, Toastr ) {
+    function Workflow( app, canvas_container ) {
+        this.app = app;
+        this.canvas_container = canvas_container;
+        this.id_counter = 0;
+        this.nodes = {};
+        this.name = null;
+        this.has_changes = false;
+        this.active_form_has_changes = false;
+        this.nodeLabels = {};
+        this.workflowOutputLabels = {};
+    }
+    $.extend( Workflow.prototype, {
+        canLabelNodeWith: function( label ) {
+            if( label ) {
+                return ! (label in this.nodeLabels);
+            } else {
+                // empty labels are non-exclusive, so allow this one.
+                return true;
+            }
+        },
+        registerNodeLabel: function( label ) {
+            if( label ) {
+                this.nodeLabels[label] = true;
+            }
+        },
+        unregisterNodeLabel: function( label ) {
+            if( label ) {
+                delete this.nodeLabels[label];
+            }
+        },
+        updateNodeLabel: function( fromLabel, toLabel ) {
+            if( fromLabel ) {
+                this.unregisterNodeLabel( fromLabel );
+            }
+            if( ! this.canLabelNodeWith( toLabel ) ) {
+                Toastr.warning("Workflow contains duplicate node labels " + toLabel + ". This must be fixed before it can be saved.");
+            }
+            if( toLabel ) {
+                this.registerNodeLabel( toLabel );
+            }
+        },
+        attemptUpdateNodeLabel: function( node, label ) {
+            if( this.canLabelNodeWith( label ) ) {
+                node.setLabel( label );
+                return true;
+            } else {
+                return false;
+            }
+        },
+        canLabelOutputWith: function( label ) {
+            if( label ) {
+                return ! (label in this.workflowOutputLabels);
+            } else {
+                // empty labels are non-exclusive, so allow this one.
+                return true;
+            }
+        },
+        registerOutputLabel: function( label ) {
+            if( label ) {
+                this.workflowOutputLabels[label] = true;
+            }
+        },
+        unregisterOutputLabel: function( label ) {
+            if( label ) {
+                delete this.workflowOutputLabels[label];
+            }
+        },
+        updateOutputLabel: function( fromLabel, toLabel ) {
+            if( fromLabel ) {
+                this.unregisterOutputLabel( fromLabel );
+            }
+            if( ! this.canLabelOutputWith( toLabel ) ) {
+                Toastr.warning("Workflow contains duplicate workflow output labels " + toLabel + ". This must be fixed before it can be saved.");
+            }
+            if( toLabel ) {
+                this.registerOutputLabel( toLabel );
+            }
+        },
+        attemptUpdateOutputLabel: function( node, outputName, label ) {
+            if( this.canLabelOutputWith( label ) ) {
+                node.labelWorkflowOutput( outputName, label );
+                node.nodeView.redrawWorkflowOutputs();
+                return true;
+            } else {
+                return false;
+            }
+        },
+        create_node: function ( type, title_text, content_id ) {
+            var node = this.app.prebuildNode( type, title_text, content_id );
+            this.add_node( node );
+            this.fit_canvas_to_nodes();
+            this.app.canvas_manager.draw_overview();
+            this.activate_node( node );
+            return node;
+        },
+        add_node : function( node ) {
+            node.id = this.id_counter;
+            node.element.attr( 'id', 'wf-node-step-' + node.id );
+            this.id_counter++;
+            this.nodes[ node.id ] = node;
+            this.has_changes = true;
+            node.workflow = this;
+        },
+        remove_node : function( node ) {
+            if ( this.active_node == node ) {
+                this.clear_active_node();
+            }
+            delete this.nodes[ node.id ];
+            this.has_changes = true;
+        },
+        remove_all : function() {
+            wf = this;
+            $.each( this.nodes, function ( k, v ) {
+                v.destroy();
+                wf.remove_node( v );
+            });
+        },
+        rectify_workflow_outputs : function() {
+            // Find out if we're using workflow_outputs or not.
+            var using_workflow_outputs = false;
+            var has_existing_pjas = false;
+            $.each( this.nodes, function ( k, node ) {
+                if (node.workflow_outputs && node.workflow_outputs.length > 0){
+                    using_workflow_outputs = true;
+                }
+                $.each(node.post_job_actions, function(pja_id, pja){
+                    if (pja.action_type === "HideDatasetAction"){
+                        has_existing_pjas = true;
+                    }
+                });
+            });
+            if (using_workflow_outputs !== false || has_existing_pjas !== false){
+                // Using workflow outputs, or has existing pjas.  Remove all PJAs and recreate based on outputs.
+                var self = this;
+                $.each(this.nodes, function (k, node ){
+                    if (node.type === 'tool'){
+                        var node_changed = false;
+                        if (node.post_job_actions == null){
+                            node.post_job_actions = {};
+                            node_changed = true;
+                        }
+                        var pjas_to_rem = [];
+                        $.each(node.post_job_actions, function(pja_id, pja){
+                            if (pja.action_type == "HideDatasetAction"){
+                                pjas_to_rem.push(pja_id);
+                            }
+                        });
+                        if (pjas_to_rem.length > 0 ) {
+                            $.each(pjas_to_rem, function(i, pja_name){
+                                node_changed = true;
+                                delete node.post_job_actions[pja_name];
+                            });
+                        }
+                        if (using_workflow_outputs){
+                            $.each(node.output_terminals, function(ot_id, ot){
+                                var create_pja = !node.isWorkflowOutput(ot.name);
+                                if (create_pja === true){
+                                    node_changed = true;
+                                    var pja = {
+                                        action_type : "HideDatasetAction",
+                                        output_name : ot.name,
+                                        action_arguments : {}
+                                    }
+                                    node.post_job_actions['HideDatasetAction'+ot.name] = null;
+                                    node.post_job_actions['HideDatasetAction'+ot.name] = pja;
+                                }
+                            });
+                        }
+                        // lastly, if this is the active node, and we made changes, reload the display at right.
+                        if (self.active_node == node && node_changed === true) {
+                            self.reload_active_node();
+                        }
+                    }
+                });
+            }
+        },
+        to_simple : function () {
+            var nodes = {};
+            $.each( this.nodes, function ( i, node ) {
+                var input_connections = {};
+                $.each( node.input_terminals, function ( k, t ) {
+                    input_connections[ t.name ] = null;
+                    // There should only be 0 or 1 connectors, so this is
+                    // really a sneaky if statement
+                    var cons = []
+                    $.each( t.connectors, function ( i, c ) {
+                        var con_dict = { id: c.handle1.node.id, output_name: c.handle1.name };
+                        var input_subworkflow_step_id = t.attributes.input.input_subworkflow_step_id;
+                        if( input_subworkflow_step_id !== undefined ) {
+                            con_dict["input_subworkflow_step_id"] = input_subworkflow_step_id;
+                        }
+                        cons[i] = con_dict;
+                        input_connections[ t.name ] = cons;
+                    });
+                });
+                var post_job_actions = {};
+                if (node.post_job_actions){
+                    $.each( node.post_job_actions, function ( i, act ) {
+                        var pja = {
+                            action_type : act.action_type,
+                            output_name : act.output_name,
+                            action_arguments : act.action_arguments
+                        }
+                        post_job_actions[ act.action_type + act.output_name ] = null;
+                        post_job_actions[ act.action_type + act.output_name ] = pja;
+                    });
+                }
+                if (!node.workflow_outputs){
+                    node.workflow_outputs = [];
+                    // Just in case.
+                }
+                var node_data = {
+                    id : node.id,
+                    type : node.type,
+                    content_id : node.content_id,
+                    tool_state : node.tool_state,
+                    tool_errors : node.tool_errors,
+                    input_connections : input_connections,
+                    position : $(node.element).position(),
+                    annotation: node.annotation,
+                    post_job_actions: node.post_job_actions,
+                    uuid: node.uuid,
+                    label: node.label,
+                    workflow_outputs: node.workflow_outputs
+                };
+                nodes[ node.id ] = node_data;
+            });
+            return { steps: nodes };
+        },
+        from_simple : function ( data, initialImport_ ) {
+            var initialImport = (initialImport_ === undefined) ? true : initialImport_;
+            wf = this;
+            var offset = 0;
+            if( initialImport ) {
+                wf.name = data.name;
+            } else {
+                offset = Object.keys(wf.nodes).length;
+            }
+            var max_id = offset;
+            // First pass, nodes
+            var using_workflow_outputs = false;
+            $.each( data.steps, function( id, step ) {
+                var node = wf.app.prebuildNode( step.type, step.name, step.content_id );
+                // If workflow being copied into another, wipe UUID and let
+                // Galaxy assign new ones.
+                if( ! initialImport ) {
+                    step.uuid = null;
+                    $.each(step.workflow_outputs, function( name, workflow_output ) {
+                        workflow_output.uuid = null;
+                    });
+                }
+                node.init_field_data( step );
+                if ( step.position ) {
+                    node.element.css( { top: step.position.top, left: step.position.left } );
+                }
+                node.id = parseInt(step.id) + offset;
+                wf.nodes[ node.id ] = node;
+                max_id = Math.max( max_id, parseInt( id ) + offset );
+                // For older workflows, it's possible to have HideDataset PJAs, but not WorkflowOutputs.
+                // Check for either, and then add outputs in the next pass.
+                if (!using_workflow_outputs){
+                    if (node.workflow_outputs.length > 0){
+                        using_workflow_outputs = true;
+                    }
+                    else{
+                        $.each(node.post_job_actions || [], function(pja_id, pja){
+                            if (pja.action_type === "HideDatasetAction"){
+                                using_workflow_outputs = true;
+                            }
+                        });
+                    }
+                }
+            });
+            wf.id_counter = max_id + 1;
+            // Second pass, connections
+            $.each( data.steps, function( id, step ) {
+                var node = wf.nodes[parseInt(id) + offset];
+                $.each( step.input_connections, function( k, v ) {
+                    if ( v ) {
+                        if ( ! $.isArray( v ) ) {
+                            v = [ v ];
+                        }
+                        $.each( v, function( l, x ) {
+                            var other_node = wf.nodes[ parseInt(x.id) + offset ];
+                            var c = new Connector();
+                            c.connect( other_node.output_terminals[ x.output_name ],
+                                       node.input_terminals[ k ] );
+                            c.redraw();
+                        });
+                    }
+                });
+                if(using_workflow_outputs){
+                    // Ensure that every output terminal has a WorkflowOutput or HideDatasetAction.
+                    $.each(node.output_terminals, function(ot_id, ot){
+                        if(node.post_job_actions['HideDatasetAction'+ot.name] === undefined){
+                            node.addWorkflowOutput(ot.name);
+                            callout = $(node.element).find('.callout.'+ot.name);
+                            callout.find('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small.png');
+                            wf.has_changes = true;
+                        }
+                    });
+                }
+            });
+        },
+        check_changes_in_active_form : function() {
+            // If active form has changed, save it
+            if (this.active_form_has_changes) {
+                this.has_changes = true;
+                // Submit form.
+                $("#right-content").find("form").submit();
+                this.active_form_has_changes = false;
+            }
+        },
+        reload_active_node : function() {
+            if (this.active_node){
+                var node = this.active_node;
+                this.clear_active_node();
+                this.activate_node(node);
+            }
+        },
+        clear_active_node : function() {
+            if ( this.active_node ) {
+                this.active_node.make_inactive();
+                this.active_node = null;
+            }
+            this.app.showToolForm( "<div>No node selected</div>", {id: 'no-node'} );
+        },
+        activate_node : function( node ) {
+            if ( this.active_node != node ) {
+                this.check_changes_in_active_form();
+                this.clear_active_node();
+                this.app.showToolForm( node.form_html, node );
+                node.make_active();
+                this.active_node = node;
+            }
+        },
+        node_changed : function ( node, force ) {
+            this.has_changes = true;
+            if ( this.active_node == node && force ) {
+                // Force changes to be saved even on new connection (previously dumped)
+                this.check_changes_in_active_form();
+                this.app.showToolForm( node.form_html, node );
+            }
+        },
+        layout : function () {
+            this.check_changes_in_active_form();
+            this.has_changes = true;
+            // Prepare predecessor / successor tracking
+            var n_pred = {};
+            var successors = {};
+            // First pass to initialize arrays even for nodes with no connections
+            $.each( this.nodes, function( id, node ) {
+                if ( n_pred[id] === undefined ) { n_pred[id] = 0; }
+                if ( successors[id] === undefined ) { successors[id] = []; }
+            });
+            // Second pass to count predecessors and successors
+            $.each( this.nodes, function( id, node ) {
+                $.each( node.input_terminals, function ( j, t ) {
+                    $.each( t.connectors, function ( k, c ) {
+                        // A connection exists from `other` to `node`
+                        var other = c.handle1.node;
+                        // node gains a predecessor
+                        n_pred[node.id] += 1;
+                        // other gains a successor
+                        successors[other.id].push( node.id );
+                    });
+                });
+            });
+            // Assemble order, tracking levels
+            node_ids_by_level = [];
+            while ( true ) {
+                // Everything without a predecessor
+                level_parents = [];
+                for ( var pred_k in n_pred ) {
+                    if ( n_pred[ pred_k ] == 0 ) {
+                        level_parents.push( pred_k );
+                    }
+                }
+                if ( level_parents.length == 0 ) {
+                    break;
+                }
+                node_ids_by_level.push( level_parents );
+                // Remove the parents from this level, and decrement the number
+                // of predecessors for each successor
+                for ( var k in level_parents ) {
+                    var v = level_parents[k];
+                    delete n_pred[v];
+                    for ( var sk in successors[v] ) {
+                        n_pred[ successors[v][sk] ] -= 1;
+                    }
+                }
+            }
+            if ( n_pred.length ) {
+                // ERROR: CYCLE! Currently we do nothing
+                return;
+            }
+            // Layout each level
+            var all_nodes = this.nodes;
+            var h_pad = 80; v_pad = 30;
+            var left = h_pad;
+            $.each( node_ids_by_level, function( i, ids ) {
+                // We keep nodes in the same order in a level to give the user
+                // some control over ordering
+                ids.sort( function( a, b ) {
+                    return $(all_nodes[a].element).position().top - $(all_nodes[b].element).position().top;
+                });
+                // Position each node
+                var max_width = 0;
+                var top = v_pad;
+                $.each( ids, function( j, id ) {
+                    var node = all_nodes[id];
+                    var element = $(node.element);
+                    $(element).css( { top: top, left: left } );
+                    max_width = Math.max( max_width, $(element).width() );
+                    top += $(element).height() + v_pad;
+                });
+                left += max_width + h_pad;
+            });
+            // Need to redraw all connectors
+            $.each( all_nodes, function( _, node ) { node.redraw(); } );
+        },
+        bounds_for_all_nodes: function() {
+            var xmin = Infinity, xmax = -Infinity,
+                ymin = Infinity, ymax = -Infinity,
+                p;
+            $.each( this.nodes, function( id, node ) {
+                e = $(node.element);
+                p = e.position();
+                xmin = Math.min( xmin, p.left );
+                xmax = Math.max( xmax, p.left + e.width() );
+                ymin = Math.min( ymin, p.top );
+                ymax = Math.max( ymax, p.top + e.width() );
+            });
+            return  { xmin: xmin, xmax: xmax, ymin: ymin, ymax: ymax };
+        },
+        fit_canvas_to_nodes: function() {
+            // Math utils
+            function round_up( x, n ) {
+                return Math.ceil( x / n ) * n;
+            }
+            function fix_delta( x, n ) {
+                if ( x < n|| x > 3*n ) {
+                    new_pos = ( Math.ceil( ( ( x % n ) ) / n ) + 1 ) * n;
+                    return ( - ( x - new_pos ) );
+                }
+                return 0;
+            }
+            // Span of all elements
+            var bounds = this.bounds_for_all_nodes();
+            var position = this.canvas_container.position();
+            var parent = this.canvas_container.parent();
+            // Determine amount we need to expand on top/left
+            var xmin_delta = fix_delta( bounds.xmin, 100 );
+            var ymin_delta = fix_delta( bounds.ymin, 100 );
+            // May need to expand farther to fill viewport
+            xmin_delta = Math.max( xmin_delta, position.left );
+            ymin_delta = Math.max( ymin_delta, position.top );
+            var left = position.left - xmin_delta;
+            var top = position.top - ymin_delta;
+            // Same for width/height
+            var width = round_up( bounds.xmax + 100, 100 ) + xmin_delta;
+            var height = round_up( bounds.ymax + 100, 100 ) + ymin_delta;
+            width = Math.max( width, - left + parent.width() );
+            height = Math.max( height, - top + parent.height() );
+            // Grow the canvas container
+            this.canvas_container.css( {
+                left: left,
+                top: top,
+                width: width,
+                height: height
+            });
+            // Move elements back if needed
+            this.canvas_container.children().each( function() {
+                var p = $(this).position();
+                $(this).css( "left", p.left + xmin_delta );
+                $(this).css( "top", p.top + ymin_delta );
+            });
+        }
+    });
+    return Workflow;
+});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-node.js b/client/galaxy/scripts/mvc/workflow/workflow-node.js
new file mode 100644
index 0000000..ee02158
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-node.js
@@ -0,0 +1,249 @@
+define(['mvc/workflow/workflow-view-node'], function( NodeView ) {
+    var Node = Backbone.Model.extend({
+        initialize: function( app, attr ) {
+            this.app = app;
+            this.element = attr.element;
+            this.input_terminals = {};
+            this.output_terminals = {};
+            this.tool_errors = {};
+            this.workflow_outputs = [];
+        },
+        getWorkflowOutput: function(outputName) {
+            return _.findWhere(this.workflow_outputs, {"output_name": outputName});
+        },
+        isWorkflowOutput: function(outputName) {
+            return this.getWorkflowOutput(outputName) != undefined;
+        },
+        removeWorkflowOutput: function(outputName) {
+            while(this.isWorkflowOutput(outputName)) {
+                this.workflow_outputs.splice(this.getWorkflowOutput(outputName), 1);
+            }
+        },
+        addWorkflowOutput: function(outputName, label) {
+            if(!this.isWorkflowOutput(outputName)){
+                var output = {"output_name": outputName};
+                if( label ) {
+                    output["label"] = label;
+                }
+                this.workflow_outputs.push(output);
+                return true
+            }
+            return false;
+        },
+        labelWorkflowOutput: function(outputName, label) {
+            var changed = false;
+            var oldLabel = null;
+            if( this.isWorkflowOutput(outputName) ) {
+                var workflowOutput = this.getWorkflowOutput(outputName);
+                oldLabel = workflowOutput["label"];
+                workflowOutput["label"] = label;
+                changed = oldLabel != label;
+            } else {
+                changed = this.addWorkflowOutput(outputName, label);
+            }
+            if( changed ) {
+                this.app.workflow.updateOutputLabel(oldLabel, label);
+                this.markChanged();
+                this.nodeView.redrawWorkflowOutputs();
+            }
+            return changed;
+        },
+        connectedOutputTerminals: function() {
+            return this._connectedTerminals( this.output_terminals );
+        },
+        _connectedTerminals: function( terminals ) {
+            var connectedTerminals = [];
+            $.each( terminals, function( _, t ) {
+                if( t.connectors.length > 0 ) {
+                    connectedTerminals.push( t );
+                }
+            } );
+            return connectedTerminals;        
+        },
+        hasConnectedOutputTerminals: function() {
+            // return this.connectedOutputTerminals().length > 0; <- optimized this
+            var outputTerminals = this.output_terminals;
+            for( var outputName in outputTerminals ) {
+                if( outputTerminals[ outputName ].connectors.length > 0 ) {
+                    return true;
+                }
+            }
+            return false;
+        },
+        connectedMappedInputTerminals: function() {
+            return this._connectedMappedTerminals( this.input_terminals );
+        },
+        hasConnectedMappedInputTerminals: function() {
+            // return this.connectedMappedInputTerminals().length > 0; <- optimized this
+            var inputTerminals = this.input_terminals;
+            for( var inputName in inputTerminals ) {
+                var inputTerminal = inputTerminals[ inputName ];
+                if( inputTerminal.connectors.length > 0 && inputTerminal.isMappedOver() ) {
+                    return true;
+                }
+            }
+            return false;
+        },
+        _connectedMappedTerminals: function( terminals ) {
+            var mapped_outputs = [];
+            $.each( terminals, function( _, t ) {
+                var mapOver = t.mapOver();
+                if( mapOver.isCollection ) {
+                    if( t.connectors.length > 0 ) {
+                        mapped_outputs.push( t );
+                    }
+                }
+            });
+            return mapped_outputs;
+        },
+        mappedInputTerminals: function() {
+            return this._mappedTerminals( this.input_terminals );
+        },
+        _mappedTerminals: function( terminals ) {
+            var mappedTerminals = [];
+            $.each( terminals, function( _, t ) {
+                var mapOver = t.mapOver();
+                if( mapOver.isCollection ) {
+                    mappedTerminals.push( t );
+                }
+            } );
+            return mappedTerminals;
+        },
+        hasMappedOverInputTerminals: function() {
+            var found = false;
+            _.each( this.input_terminals, function( t ) {
+                var mapOver = t.mapOver();
+                if( mapOver.isCollection ) {
+                    found = true;
+                }
+            } );
+            return found;
+        },
+        redraw : function () {
+            $.each( this.input_terminals, function( _, t ) {
+                t.redraw();
+            });
+            $.each( this.output_terminals, function( _, t ) {
+                t.redraw();
+            });
+        },
+        destroy : function () {
+            $.each( this.input_terminals, function( k, t ) {
+                t.destroy();
+            });
+            $.each( this.output_terminals, function( k, t ) {
+                t.destroy();
+            });
+            this.app.workflow.remove_node( this );
+            $(this.element).remove();
+        },
+        make_active : function () {
+            $(this.element).addClass( "toolForm-active" );
+        },
+        make_inactive : function () {
+            // Keep inactive nodes stacked from most to least recently active
+            // by moving element to the end of parent's node list
+            var element = this.element.get(0);
+            (function(p) { p.removeChild( element ); p.appendChild( element ); })(element.parentNode);
+            // Remove active class
+            $(element).removeClass( "toolForm-active" );
+        },
+        setLabel: function(label) {
+            this.app.workflow.updateNodeLabel(this.label, label);
+            this.label = label || null;
+        },
+        init_field_data : function ( data ) {
+            if ( data.type ) {
+                this.type = data.type;
+            }
+            this.name = data.name;
+            this.form_html = data.form_html;
+            this.tool_state = data.tool_state;
+            this.tool_errors = data.tool_errors;
+            this.tooltip = data.tooltip ? data.tooltip : "";
+            this.annotation = data.annotation;
+            this.post_job_actions = data.post_job_actions ? data.post_job_actions : {};
+            this.setLabel(data.label);
+            this.uuid = data.uuid;
+            this.workflow_outputs = data.workflow_outputs ? data.workflow_outputs : [];
+
+            var node = this;
+            var nodeView = new NodeView({
+                el: this.element[ 0 ],
+                node: node,
+            });
+            node.nodeView = nodeView;
+
+            $.each( data.data_inputs, function( i, input ) {
+                nodeView.addDataInput( input );
+            });
+            if ( ( data.data_inputs.length > 0 ) && ( data.data_outputs.length > 0 ) ) {
+                nodeView.addRule();
+            }
+            $.each( data.data_outputs, function( i, output ) {
+                nodeView.addDataOutput( output );
+            } );
+            nodeView.render();
+            this.app.workflow.node_changed( this, true);
+        },
+        update_field_data : function( data ) {
+            var node = this;
+                nodeView = node.nodeView;
+            this.tool_state = data.tool_state;
+            this.form_html = data.form_html;
+            this.tool_errors = data.tool_errors;
+            this.annotation = data['annotation'];
+            this.setLabel(data.label);
+
+            if( "post_job_actions" in data ) {
+                // Won't be present in response for data inputs
+                var pja_in = $.parseJSON(data.post_job_actions);
+                this.post_job_actions = pja_in ? pja_in : {};
+            }
+            node.nodeView.renderToolErrors();
+            // Update input rows
+            var old_body = nodeView.$( "div.inputs" );
+            var new_body = nodeView.newInputsDiv();
+            var newTerminalViews = {};
+            _.each( data.data_inputs, function( input ) {
+                var terminalView = node.nodeView.addDataInput( input, new_body );
+                newTerminalViews[ input.name ] = terminalView;
+            });
+            // Cleanup any leftover terminals
+            _.each( _.difference( _.values( nodeView.terminalViews ), _.values( newTerminalViews ) ), function( unusedView ) {
+                unusedView.el.terminal.destroy();
+            } );
+            nodeView.terminalViews = newTerminalViews;
+            node.nodeView.render();
+            // In general workflow editor assumes tool outputs don't change in # or
+            // type (not really valid right?) but adding special logic here for
+            // data collection input parameters that can have their collection
+            // change.
+            if( data.data_outputs.length == 1 && "collection_type" in data.data_outputs[ 0 ] ) {
+                nodeView.updateDataOutput( data.data_outputs[ 0 ] );
+            }
+            old_body.replaceWith( new_body );
+
+            if( "workflow_outputs" in data ) {
+                // Won't be present in response for data inputs
+                this.workflow_outputs = workflow_outputs ? workflow_outputs : [];
+            }
+
+            // If active, reactivate with new form_html
+            this.markChanged();
+            this.redraw();
+        },
+        error : function ( text ) {
+            var b = $(this.element).find( ".toolFormBody" );
+            b.find( "div" ).remove();
+            var tmp = "<div style='color: red; text-style: italic;'>" + text + "</div>";
+            this.form_html = tmp;
+            b.html( tmp );
+            this.app.workflow.node_changed( this );
+        },
+        markChanged: function() {
+            this.app.workflow.node_changed( this );
+        }
+    });
+    return Node;
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-terminals.js b/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
new file mode 100644
index 0000000..cd6170e
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
@@ -0,0 +1,503 @@
+define(['mvc/workflow/workflow-globals'], function( Globals ) {
+    function CollectionTypeDescription( collectionType ) {
+        this.collectionType = collectionType;
+        this.isCollection = true;
+        this.rank = collectionType.split(":").length;
+    }
+    $.extend( CollectionTypeDescription.prototype, {
+        append: function( otherCollectionTypeDescription ) {
+            if( otherCollectionTypeDescription === NULL_COLLECTION_TYPE_DESCRIPTION ) {
+                return this;
+            }
+            if( otherCollectionTypeDescription === ANY_COLLECTION_TYPE_DESCRIPTION ) {
+                return otherCollectionType;
+            }
+            return new CollectionTypeDescription( this.collectionType + ":" + otherCollectionTypeDescription.collectionType );
+        },
+        canMatch: function( otherCollectionTypeDescription ) {
+            if( otherCollectionTypeDescription === NULL_COLLECTION_TYPE_DESCRIPTION ) {
+                return false;
+            }
+            if( otherCollectionTypeDescription === ANY_COLLECTION_TYPE_DESCRIPTION ) {
+                return true;
+            }
+            return otherCollectionTypeDescription.collectionType == this.collectionType;
+        },
+        canMapOver: function( otherCollectionTypeDescription ) {
+            if( otherCollectionTypeDescription === NULL_COLLECTION_TYPE_DESCRIPTION ) {
+                return false;
+            }
+            if( otherCollectionTypeDescription === ANY_COLLECTION_TYPE_DESCRIPTION ) {
+                return false;
+            }
+            if( this.rank <= otherCollectionTypeDescription.rank ) {
+                // Cannot map over self...
+                return false;
+            }
+            var requiredSuffix = otherCollectionTypeDescription.collectionType
+            return this._endsWith( this.collectionType, requiredSuffix );
+        },
+        effectiveMapOver: function( otherCollectionTypeDescription ) {
+            var otherCollectionType = otherCollectionTypeDescription.collectionType;
+            var effectiveCollectionType = this.collectionType.substring( 0, this.collectionType.length - otherCollectionType.length - 1 );
+            return new CollectionTypeDescription( effectiveCollectionType );
+        },
+        equal: function( otherCollectionTypeDescription ) {
+            return otherCollectionTypeDescription.collectionType == this.collectionType;
+        },
+        toString: function() {
+            return "CollectionType[" + this.collectionType + "]";
+        },
+        _endsWith: function( str, suffix ) {
+            return str.indexOf(suffix, str.length - suffix.length) !== -1;
+        }
+    } );
+
+    NULL_COLLECTION_TYPE_DESCRIPTION = {
+        isCollection: false,
+        canMatch: function( other ) { return false; },
+        canMapOver: function( other ) {
+            return false;
+        },
+        toString: function() {
+            return "NullCollectionType[]";
+        },
+        append: function( otherCollectionType ) {
+            return otherCollectionType;
+        },
+        equal: function( other ) {
+            return other === this;
+        }
+    };
+
+    ANY_COLLECTION_TYPE_DESCRIPTION = {
+        isCollection: true,
+        canMatch: function( other ) { return NULL_COLLECTION_TYPE_DESCRIPTION !== other; },
+        canMapOver: function( other ) {
+            return false;
+        },
+        toString: function() {
+            return "AnyCollectionType[]";
+        },
+        append: function( otherCollectionType ) {
+            throw "Cannot append to ANY_COLLECTION_TYPE_DESCRIPTION";
+        },
+        equal: function( other ) {
+            return other === this;
+        }
+    };
+
+    var TerminalMapping = Backbone.Model.extend( {
+        initialize: function( attr ) {
+            this.mapOver = attr.mapOver || NULL_COLLECTION_TYPE_DESCRIPTION;
+            this.terminal = attr.terminal;
+            this.terminal.terminalMapping = this;
+        },
+        disableMapOver: function() {
+            this.setMapOver( NULL_COLLECTION_TYPE_DESCRIPTION );
+        },
+        setMapOver: function( collectionTypeDescription ) {
+            // TODO: Can I use "attributes" or something to auto trigger "change"
+            // event?
+            this.mapOver = collectionTypeDescription;
+            this.trigger("change");
+        }
+    } );
+
+    var Terminal = Backbone.Model.extend( {
+        initialize: function( attr ) {
+            this.element = attr.element;
+            this.connectors = [];
+        },
+        connect: function ( connector ) {
+            this.connectors.push( connector );
+            if ( this.node ) {
+                this.node.markChanged();
+            }
+        },
+        disconnect: function ( connector ) {
+            this.connectors.splice( $.inArray( connector, this.connectors ), 1 );
+            if ( this.node ) {
+                this.node.markChanged();
+                this.resetMappingIfNeeded();
+            }
+        },
+        redraw: function () {
+            $.each( this.connectors, function( _, c ) {
+                c.redraw();  
+            });
+        },
+        destroy: function () {
+            $.each( this.connectors.slice(), function( _, c ) {
+                c.destroy();
+            });
+        },
+        destroyInvalidConnections: function( ) {
+            _.each( this.connectors, function( connector ) {
+                connector.destroyIfInvalid();
+            } );
+        },
+        setMapOver : function( val ) {
+            if( this.multiple ) {
+                return; // Cannot set this to be multirun...
+            }
+
+            if( ! this.mapOver().equal( val ) ) {
+                this.terminalMapping.setMapOver( val );
+                _.each( this.node.output_terminals, function( outputTerminal ) {
+                    outputTerminal.setMapOver( val );
+                } );
+            }
+        },
+        mapOver: function( ) {
+            if ( ! this.terminalMapping ) {
+                return NULL_COLLECTION_TYPE_DESCRIPTION;
+            } else {
+                return this.terminalMapping.mapOver;
+            }
+        },
+        isMappedOver: function( ) {
+            return this.terminalMapping && this.terminalMapping.mapOver.isCollection;
+        },
+        resetMapping: function() {
+            this.terminalMapping.disableMapOver();
+        },
+
+        resetMappingIfNeeded: function( ) {}, // Subclasses should override this...
+
+    } );
+
+    var OutputTerminal = Terminal.extend( {
+        initialize: function( attr ) {
+            Terminal.prototype.initialize.call( this, attr );
+            this.datatypes = attr.datatypes;
+        },
+
+        resetMappingIfNeeded: function( ) {
+            // If inputs were only mapped over to preserve
+            // an output just disconnected reset these...
+            if( ! this.node.hasConnectedOutputTerminals() && ! this.node.hasConnectedMappedInputTerminals()){
+                _.each( this.node.mappedInputTerminals(), function( mappedInput ) {
+                    mappedInput.resetMappingIfNeeded();
+                } );
+            }
+
+            var noMappedInputs = ! this.node.hasMappedOverInputTerminals();
+            if( noMappedInputs ) {
+                this.resetMapping();
+            }
+        },
+
+        resetMapping: function() {
+            this.terminalMapping.disableMapOver();
+            _.each( this.connectors, function( connector ) {
+                var connectedInput = connector.handle2;
+                if( connectedInput ) {
+                    // Not exactly right because this is still connected.
+                    // Either rewrite resetMappingIfNeeded or disconnect
+                    // and reconnect if valid.
+                    connectedInput.resetMappingIfNeeded();
+                    connector.destroyIfInvalid();
+                }
+            } );
+        }
+
+    } );
+
+    var BaseInputTerminal = Terminal.extend( {
+        initialize: function( attr ) {
+            Terminal.prototype.initialize.call( this, attr );
+            this.update( attr.input ); // subclasses should implement this...
+        },
+        canAccept: function ( other ) {
+            if( this._inputFilled() ) {
+                return false;
+            } else {
+                return this.attachable( other );
+            }
+        },
+        resetMappingIfNeeded: function( ) {
+            var mapOver = this.mapOver();
+            if( ! mapOver.isCollection ) {
+                return;
+            }
+            // No output terminals are counting on this being mapped
+            // over if connected inputs are still mapped over or if none
+            // of the outputs are connected...
+            var reset = this.node.hasConnectedMappedInputTerminals() ||
+                            ( ! this.node.hasConnectedOutputTerminals() );
+            if( reset ) {
+                this.resetMapping();
+            }
+        },
+        resetMapping: function() {
+            this.terminalMapping.disableMapOver();
+            if( ! this.node.hasMappedOverInputTerminals() ) {
+                _.each( this.node.output_terminals, function( terminal) {
+                    // This shouldn't be called if there are mapped over
+                    // outputs.
+                    terminal.resetMapping();
+                } );
+            }
+        },
+        connected: function() {
+            return this.connectors.length !== 0;
+        },
+        _inputFilled: function() {
+            var inputFilled;
+            if( ! this.connected() ) {
+                inputFilled = false;
+            } else {
+                if( this.multiple ) {
+                    if(this._collectionAttached()) {
+                        // Can only attach one collection to multiple input
+                        // data parameter.
+                        inputsFilled = true;
+                    } else {
+                        inputFilled = false;
+                    }
+                } else {
+                    inputFilled = true;
+                }
+            }
+            return inputFilled;
+        },
+        _collectionAttached: function( ) {
+            if( ! this.connected() ) {
+                return false;
+            } else {
+                var firstOutput = this.connectors[ 0 ].handle1;
+                if( ! firstOutput ){
+                    return false;
+                } else {
+                    if( firstOutput.isCollection || firstOutput.isMappedOver() || firstOutput.datatypes.indexOf( "input_collection" ) > 0 ) {
+                        return true;
+                    } else {
+                        return false;
+                    }
+                }
+            }
+        },
+        _mappingConstraints: function( ) {
+            // If this is a connected terminal, return list of collection types
+            // other terminals connected to node are constraining mapping to.
+            if( ! this.node ) {
+                return [];  // No node - completely unconstrained
+            }
+            var mapOver = this.mapOver();
+            if( mapOver.isCollection ) {
+                return [ mapOver ];
+            }
+
+            var constraints = [];
+            if( ! this.node.hasConnectedOutputTerminals() ) {
+                _.each( this.node.connectedMappedInputTerminals(), function( inputTerminal ) {
+                    constraints.push( inputTerminal.mapOver() );
+                } );
+            } else {
+                // All outputs should have same mapOver status - least specific.
+                constraints.push( _.first( _.values( this.node.output_terminals ) ).mapOver() );
+            }
+            return constraints;
+        },
+        _producesAcceptableDatatype: function( other ) {
+            // other is a non-collection output...
+            for ( var t in this.datatypes ) {
+                var thisDatatype = this.datatypes[t];
+                if( thisDatatype == "input" ) {
+                    return true;
+                }
+                var cat_outputs = new Array();
+                cat_outputs = cat_outputs.concat(other.datatypes);
+                if (other.node.post_job_actions){
+                    for (var pja_i in other.node.post_job_actions){
+                        var pja = other.node.post_job_actions[pja_i];
+                        if (pja.action_type == "ChangeDatatypeAction" && (pja.output_name == '' || pja.output_name == other.name) && pja.action_arguments){
+                            cat_outputs.push(pja.action_arguments['newtype']);
+                        }
+                    }
+                }
+                // FIXME: No idea what to do about case when datatype is 'input'
+                for ( var other_datatype_i in cat_outputs ) {
+                    var other_datatype = cat_outputs[other_datatype_i];
+                    if ( other_datatype == "input" || other_datatype == "_sniff_" || other_datatype == "input_collection" || Globals.app.isSubType( cat_outputs[other_datatype_i], thisDatatype ) ) {
+                        return true;
+                    }
+                }
+            }
+            return false;
+        },
+        _otherCollectionType: function( other ) {
+            var otherCollectionType = NULL_COLLECTION_TYPE_DESCRIPTION;
+            if( other.isCollection ) {
+                otherCollectionType = other.collectionType;
+            }
+            var otherMapOver = other.mapOver();
+            if( otherMapOver.isCollection ) {
+                otherCollectionType = otherMapOver.append(otherCollectionType);
+            }
+            return otherCollectionType;
+        },
+    } );
+
+    var InputTerminal = BaseInputTerminal.extend( {
+        update: function( input ) {
+            this.datatypes = input.extensions;
+            this.multiple = input.multiple;
+            this.collection = false;    	
+        },
+        connect: function( connector ) {
+            BaseInputTerminal.prototype.connect.call( this, connector );
+            var other_output = connector.handle1;
+            if( ! other_output ) {
+                return;
+            }
+            var otherCollectionType = this._otherCollectionType( other_output );
+            if( otherCollectionType.isCollection ) {
+                this.setMapOver( otherCollectionType );
+            }
+        },
+        attachable: function( other ) {
+            var otherCollectionType = this._otherCollectionType( other );
+            var thisMapOver = this.mapOver();
+            if( otherCollectionType.isCollection ) {
+                if( this.multiple ) {
+                    if( this.connected() && ! this._collectionAttached() ) {
+                        // if single inputs attached, cannot also attach a
+                        // collection (yet...)
+                        return false;
+                    }
+                    if( otherCollectionType.rank == 1 ) {
+                        return this._producesAcceptableDatatype( other );
+                    } else {
+                        // TODO: Allow subcollection mapping over this as if it were
+                        // a list collection input.
+                        return false;
+                    }
+                }
+                if( thisMapOver.isCollection && thisMapOver.canMatch( otherCollectionType ) ) {
+                    return this._producesAcceptableDatatype( other );
+                } else {
+                    //  Need to check if this would break constraints...
+                    var mappingConstraints = this._mappingConstraints();
+                    if( mappingConstraints.every( _.bind( otherCollectionType.canMatch, otherCollectionType ) ) ) {
+                        return this._producesAcceptableDatatype( other );
+                    } else {
+                        return false;
+                    }
+                }
+            } else if( thisMapOver.isCollection ) {
+                // Attempting to match a non-collection output to an
+                // explicitly collection input.
+                return false;
+            }
+            return this._producesAcceptableDatatype( other );
+        }
+    });
+
+    var InputCollectionTerminal = BaseInputTerminal.extend( {
+        update: function( input ) {
+            this.multiple = false;
+            this.collection = true;
+            this.datatypes = input.extensions;
+            var collectionTypes = [];
+            if( input.collection_types ) {
+                _.each(input.collection_types, function(collectionType) {
+                    collectionTypes.push( new CollectionTypeDescription( collectionType ) );
+                });
+            } else {
+                collectionTypes.push(ANY_COLLECTION_TYPE_DESCRIPTION);
+            }
+            this.collectionTypes = collectionTypes;
+        },
+        connect: function( connector ) {
+            BaseInputTerminal.prototype.connect.call( this, connector );
+            var other = connector.handle1;
+            if( ! other ) {
+                return;
+            }
+
+            var effectiveMapOver = this._effectiveMapOver( other );
+            this.setMapOver( effectiveMapOver );
+        },
+        _effectiveMapOver: function( other ) {
+            var collectionTypes = this.collectionTypes;
+            var otherCollectionType = this._otherCollectionType( other );
+            var canMatch = _.some(collectionTypes, function( collectionType) { return collectionType.canMatch( otherCollectionType ); });
+
+            if( ! canMatch ) {
+                for( var collectionTypeIndex in collectionTypes ) {
+                    var collectionType = collectionTypes[collectionTypeIndex];
+                    var effectiveMapOver = otherCollectionType.effectiveMapOver( collectionType );
+                    if( effectiveMapOver != NULL_COLLECTION_TYPE_DESCRIPTION ) {
+                        return effectiveMapOver;
+                    }
+                }
+            }
+            return NULL_COLLECTION_TYPE_DESCRIPTION;
+        },
+        _effectiveCollectionTypes: function( ) {
+            var thisMapOver = this.mapOver();
+            return _.map(this.collectionTypes, function(t) { return thisMapOver.append(t); });
+        },
+        attachable: function ( other ) {
+            var otherCollectionType = this._otherCollectionType( other );
+            if( otherCollectionType.isCollection ) {
+                var effectiveCollectionTypes = this._effectiveCollectionTypes( );
+                var thisMapOver = this.mapOver();
+                var canMatch = _.some(effectiveCollectionTypes, function( effectiveCollectionType) { return effectiveCollectionType.canMatch( otherCollectionType ); });
+                if( canMatch ) {
+                    // Only way a direct match...
+                    return this._producesAcceptableDatatype( other );
+                    // Otherwise we need to mapOver
+                } else if( thisMapOver.isCollection ) {
+                    // In this case, mapOver already set and we didn't match skipping...
+                    return false;
+                } else if( _.some(this.collectionTypes, function(collectionType) { return otherCollectionType.canMapOver( collectionType ); }) ) {
+                    var effectiveMapOver = this._effectiveMapOver( other );
+                    if( ! effectiveMapOver.isCollection ) {
+                        return false;
+                    }
+                    //  Need to check if this would break constraints...
+                    var mappingConstraints = this._mappingConstraints();
+                    if( mappingConstraints.every( effectiveMapOver.canMatch ) ) {
+                        return this._producesAcceptableDatatype( other );
+                    }
+                }
+            }
+            return false;
+        }
+    });
+
+    var OutputCollectionTerminal = Terminal.extend( {
+        initialize: function( attr ) {
+            Terminal.prototype.initialize.call( this, attr );
+            this.datatypes = attr.datatypes;
+            this.collectionType = new CollectionTypeDescription( attr.collection_type );
+            this.isCollection = true;
+        },
+        update: function( output ) {
+            var newCollectionType = new CollectionTypeDescription( output.collection_type );
+            if( newCollectionType.collectionType != this.collectionType.collectionType ) {
+                _.each( this.connectors, function( connector ) {
+                    // TODO: consider checking if connection valid before removing...
+                    connector.destroy();
+                } );
+            }
+            this.collectionType = newCollectionType;
+        }
+    } );
+
+    return {
+        InputTerminal                       : InputTerminal,
+        OutputTerminal                      : OutputTerminal,
+        InputCollectionTerminal             : InputCollectionTerminal,
+        OutputCollectionTerminal            : OutputCollectionTerminal,
+        TerminalMapping                     : TerminalMapping,
+
+        // test export
+        CollectionTypeDescription           : CollectionTypeDescription,
+        NULL_COLLECTION_TYPE_DESCRIPTION    : NULL_COLLECTION_TYPE_DESCRIPTION,
+        ANY_COLLECTION_TYPE_DESCRIPTION     : ANY_COLLECTION_TYPE_DESCRIPTION
+    }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-view-data.js b/client/galaxy/scripts/mvc/workflow/workflow-view-data.js
new file mode 100644
index 0000000..190b6c6
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-view-data.js
@@ -0,0 +1,132 @@
+define(['mvc/workflow/workflow-globals'], function( Globals ){
+
+    var DataInputView = Backbone.View.extend( {
+        className: "form-row dataRow input-data-row",
+
+        initialize: function( options ){
+            this.input = options.input;
+            this.nodeView = options.nodeView;
+            this.terminalElement = options.terminalElement;
+
+            this.$el.attr( "name", this.input.name )
+                    .html( this.input.label );
+
+            if( ! options.skipResize ) {
+                this.$el.css({  position:'absolute',
+                                left: -1000,
+                                top: -1000,
+                                display:'none'});
+            $('body').append(this.el);
+                this.nodeView.updateMaxWidth( this.$el.outerWidth() );
+                this.$el.css({ position:'',
+                               left:'',
+                               top:'',
+                               display:'' });
+                this.$el.remove();
+            }
+        }
+    });
+
+    var DataOutputView = Backbone.View.extend( {
+        className: "form-row dataRow",
+
+        initialize: function( options ) {
+            this.output = options.output;
+            this.terminalElement = options.terminalElement;
+            this.nodeView = options.nodeView;
+
+            var output = this.output;
+            var label = output.name;
+            var node = this.nodeView.node;
+
+            var isInput = output.extensions.indexOf( 'input' ) >= 0 || output.extensions.indexOf( 'input_collection' ) >= 0;
+            if ( ! isInput ) {
+                label = label + " (" + output.extensions.join(", ") + ")";
+            }
+            this.$el.html( label )
+            this.calloutView = null;
+            if (['tool', 'subworkflow'].indexOf(node.type) >= 0){
+                var calloutView = new OutputCalloutView( {
+                    "label": label,
+                    "output": output,
+                    "node": node,
+                });
+                this.calloutView = calloutView;
+                this.$el.append( calloutView.el );
+                this.$el.hover( function() { calloutView.hoverImage() }, function() { calloutView.resetImage() } );
+            }
+            this.$el.css({  position:'absolute',
+                            left: -1000,
+                            top: -1000,
+                            display:'none'});
+            $('body').append( this.el );
+            this.nodeView.updateMaxWidth( this.$el.outerWidth() + 17 );
+            this.$el.css({ position:'',
+                           left:'',
+                           top:'',
+                           display:'' })
+                    .detach();
+        },
+        redrawWorkflowOutput: function() {
+            if( this.calloutView ) {
+                this.calloutView.resetImage();
+            }
+        }
+    });
+
+    var OutputCalloutView = Backbone.View.extend( {
+        tagName: "div",
+
+        initialize: function( options ) {
+            this.label = options.label;
+            this.node = options.node;
+            this.output = options.output;
+
+            var view = this;
+            var node = this.node;
+            this.$el
+                .attr( "class", 'callout '+this.label )
+                .css( { display: 'none' } )
+                .append(
+                    $("<div class='buttons'></div>").append(
+                        $("<img/>").attr('src', Galaxy.root + 'static/images/fugue/asterisk-small-outline.png').click( function() {
+                            var outputName = view.output.name;
+                            if( node.isWorkflowOutput( outputName ) ) {
+                                node.removeWorkflowOutput( outputName );
+                                view.$('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small-outline.png');
+                            }else{
+                                node.addWorkflowOutput( outputName );
+                                view.$('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small.png');
+                            }
+                            Globals.workflow.has_changes = true;
+                            Globals.canvas_manager.draw_overview();
+                        })))
+                .tooltip({delay:500, title: "Mark dataset as a workflow output. All unmarked datasets will be hidden." });
+
+            this.$el.css({
+                    top: '50%',
+                    margin:'-8px 0px 0px 0px',
+                    right: 8
+                });
+            this.$el.show();
+            this.resetImage();
+        },
+
+        resetImage: function() {
+            if ( ! this.node.isWorkflowOutput( this.output.name ) ) {
+                this.$('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small-outline.png');
+            } else{
+                this.$('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small.png');
+            }
+        },
+
+        hoverImage: function() {
+            this.$('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small-yellow.png');
+        }
+    });
+
+    return {
+        DataInputView       : DataInputView,
+        DataOutputView      : DataOutputView
+    }
+});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-view-node.js b/client/galaxy/scripts/mvc/workflow/workflow-view-node.js
new file mode 100644
index 0000000..dfe987f
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-view-node.js
@@ -0,0 +1,102 @@
+define(['libs/underscore', 'mvc/workflow/workflow-view-terminals', 'mvc/workflow/workflow-view-data'], function( _, TerminalViews, DataViews ) {
+    return Backbone.View.extend( {
+        initialize: function( options ){
+            this.node = options.node;
+            this.output_width = Math.max( 150, this.$el.width() );
+            this.tool_body = this.$el.find( '.toolFormBody' );
+            this.tool_body.find( 'div' ).remove();
+            this.newInputsDiv().appendTo( this.tool_body );
+            this.terminalViews = {};
+            this.outputViews = {};
+        },
+
+        render: function() {
+            this.renderToolLabel();
+            this.renderToolErrors();
+            this.$el.css( 'width', Math.min( 250, Math.max( this.$el.width(), this.output_width ) ) );
+        },
+
+        renderToolLabel: function() {
+            this.$( '.nodeTitle' ).text( this.node.label || this.node.name );
+        },
+
+        renderToolErrors: function() {
+            this.node.tool_errors ? this.$el.addClass( 'tool-node-error' ) : this.$el.removeClass( 'tool-node-error' );
+        },
+
+        newInputsDiv: function() {
+            return $( '<div/>' ).addClass( 'inputs' );
+        },
+
+        updateMaxWidth: function( newWidth ) {
+            this.output_width = Math.max( this.output_width, newWidth );
+        },
+
+        addRule: function() {
+            this.tool_body.append( $( '<div/>' ).addClass( 'rule' ) );
+        },
+
+        addDataInput: function( input, body ) {
+            var skipResize = true;
+            if( !body ) {
+                body = this.$( '.inputs' );
+                // initial addition to node - resize input to help calculate node
+                // width.
+                skipResize = false;
+            }
+            var terminalView = this.terminalViews[ input.name ];
+            var terminalViewClass = ( input.input_type == 'dataset_collection' ) ? TerminalViews.InputCollectionTerminalView : TerminalViews.InputTerminalView;
+            if( terminalView && !( terminalView instanceof terminalViewClass ) ) {
+                terminalView.el.terminal.destroy();
+                terminalView = null;
+            }
+            if( !terminalView ) {
+                terminalView = new terminalViewClass( {
+                    node: this.node,
+                    input: input
+                } );             
+            } else {
+                var terminal = terminalView.el.terminal;
+                terminal.update( input );
+                terminal.destroyInvalidConnections();
+            }
+            this.terminalViews[ input.name ] = terminalView;
+            var terminalElement = terminalView.el;
+            var inputView = new DataViews.DataInputView( {
+                terminalElement: terminalElement,
+                input: input, 
+                nodeView: this,
+                skipResize: skipResize
+            } );
+            var ib = inputView.$el;
+            body.append( ib.prepend( terminalView.terminalElements() ) );
+            return terminalView;
+        },
+
+        addDataOutput: function( output ) {
+            var terminalViewClass = ( output.collection ) ? TerminalViews.OutputCollectionTerminalView : TerminalViews.OutputTerminalView;
+            var terminalView = new terminalViewClass( {
+                node: this.node,
+                output: output
+            } );
+            var outputView = new DataViews.DataOutputView( {
+                'output': output,
+                'terminalElement': terminalView.el,
+                'nodeView': this,
+            } );
+            this.outputViews[ output.name ] = outputView;
+            this.tool_body.append( outputView.$el.append( terminalView.terminalElements() ) );
+        },
+
+        redrawWorkflowOutputs: function() {
+            _.each( this.outputViews, function( outputView ) {
+                outputView.redrawWorkflowOutput();
+            });
+        },
+
+        updateDataOutput: function( output ) {
+            var outputTerminal = this.node.output_terminals[ output.name ];
+            outputTerminal.update( output );
+        }
+    });
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-view-terminals.js b/client/galaxy/scripts/mvc/workflow/workflow-view-terminals.js
new file mode 100644
index 0000000..0e0f139
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-view-terminals.js
@@ -0,0 +1,238 @@
+define(['mvc/workflow/workflow-globals', 'mvc/workflow/workflow-terminals',
+        'mvc/workflow/workflow-connector'], function( Globals, Terminals, Connector ) {
+
+    var TerminalMappingView = Backbone.View.extend( {
+        tagName: "div",
+        className: "fa-icon-button fa fa-folder-o",
+        initialize: function( options ) {
+            var mapText = "Run tool in parallel over collection";
+            this.$el.tooltip( {delay: 500, title: mapText } );
+            this.model.bind( "change", _.bind( this.render, this ) );
+        },
+        render: function() {
+            if( this.model.mapOver.isCollection ) {
+                this.$el.show();
+            } else {
+                this.$el.hide();
+            }
+        }
+    });
+
+    var InputTerminalMappingView = TerminalMappingView.extend( {
+        events: {
+            "click": "onClick",
+            "mouseenter": "onMouseEnter",
+            "mouseleave": "onMouseLeave",
+        },
+        onMouseEnter: function( e ) {
+            var model = this.model;
+            if( ! model.terminal.connected() && model.mapOver.isCollection ) {
+                this.$el.css( "color", "red" );
+            }
+        },
+        onMouseLeave: function( e ) {
+            this.$el.css( "color", "black" );
+        },
+        onClick: function( e ) {
+            var model = this.model;
+            if( ! model.terminal.connected() && model.mapOver.isCollection ) {
+                // TODO: Consider prompting...
+                model.terminal.resetMapping();
+            }
+        }
+    });
+
+    var TerminalView = Backbone.View.extend( {
+        setupMappingView: function( terminal ) {
+            var terminalMapping = new this.terminalMappingClass( { terminal: terminal } );
+            var terminalMappingView = new this.terminalMappingViewClass( { model: terminalMapping } );
+            terminalMappingView.render();
+            terminal.terminalMappingView = terminalMappingView;
+            this.terminalMappingView = terminalMappingView;
+        },
+        terminalElements: function() {
+            if( this.terminalMappingView ) {
+                return [ this.terminalMappingView.el, this.el ];
+            } else{
+                return [ this.el ];
+            }
+        }
+    });
+
+    var BaseInputTerminalView = TerminalView.extend( {
+        className: "terminal input-terminal",
+        initialize: function( options ) {
+            var node = options.node;
+            var input = options.input;
+            var name = input.name;
+            var terminal = this.terminalForInput( input );
+            if( ! terminal.multiple ) {
+                this.setupMappingView( terminal );
+            }
+            this.el.terminal = terminal;
+            terminal.node = node;
+            terminal.name = name;
+            node.input_terminals[name] = terminal;
+        },
+        events: {
+            "dropinit": "onDropInit",
+            "dropstart": "onDropStart",
+            "dropend": "onDropEnd",
+            "drop": "onDrop",
+            "hover": "onHover",
+        },
+        onDropInit: function( e, d ) {
+            var terminal = this.el.terminal;
+            // Accept a dragable if it is an output terminal and has a
+            // compatible type
+            return $(d.drag).hasClass( "output-terminal" ) && terminal.canAccept( d.drag.terminal );
+        },
+        onDropStart: function( e, d  ) {
+            if (d.proxy.terminal) { 
+                d.proxy.terminal.connectors[0].inner_color = "#BBFFBB";
+            }
+        },
+        onDropEnd: function ( e, d ) {
+            if (d.proxy.terminal) { 
+                d.proxy.terminal.connectors[0].inner_color = "#FFFFFF";
+            }
+        },
+        onDrop: function( e, d ) {
+            var terminal = this.el.terminal;        
+            new Connector( d.drag.terminal, terminal ).redraw();
+        },
+        onHover: function() {
+            var element = this.el;
+            var terminal = element.terminal;
+            // If connected, create a popup to allow disconnection
+            if ( terminal.connectors.length > 0 ) {
+                // Create callout
+                var t = $("<div class='callout'></div>")
+                    .css( { display: 'none' } )
+                    .appendTo( "body" )
+                    .append(
+                        $("<div class='button'></div>").append(
+                            $("<div/>").addClass("fa-icon-button fa fa-times").click( function() {
+                                $.each( terminal.connectors, function( _, x ) {
+                                    if (x) {
+                                        x.destroy();
+                                    }
+                                });
+                                t.remove();
+                            })))
+                    .bind( "mouseleave", function() {
+                        $(this).remove();
+                    });
+                // Position it and show
+                t.css({
+                        top: $(element).offset().top - 2,
+                        left: $(element).offset().left - t.width(),
+                        'padding-right': $(element).width()
+                    }).show();
+            }
+        }
+    });
+
+    var InputTerminalView = BaseInputTerminalView.extend( {
+        terminalMappingClass: Terminals.TerminalMapping,
+        terminalMappingViewClass: InputTerminalMappingView,
+        terminalForInput: function( input ) {
+            return new Terminals.InputTerminal( { element: this.el, input: input } );
+        }
+    });
+
+    var InputCollectionTerminalView = BaseInputTerminalView.extend( {
+        terminalMappingClass: Terminals.TerminalMapping,
+        terminalMappingViewClass: InputTerminalMappingView,
+        terminalForInput: function( input ) {
+            return new Terminals.InputCollectionTerminal( { element: this.el, input: input } );
+        }
+    });
+
+    var BaseOutputTerminalView = TerminalView.extend( {
+        className: "terminal output-terminal",
+        initialize: function( options ) {
+            var node = options.node;
+            var output = options.output;
+            var name = output.name;
+            var terminal = this.terminalForOutput( output );
+            this.setupMappingView( terminal );
+            this.el.terminal = terminal;
+            terminal.node = node;
+            terminal.name = name;
+            node.output_terminals[name] = terminal;
+        },
+        events: {
+            "drag": "onDrag",
+            "dragstart": "onDragStart",
+            "dragend": "onDragEnd",
+        },
+        onDrag: function ( e, d ) {
+            var onmove = function() {
+                var po = $(d.proxy).offsetParent().offset(),
+                    x = d.offsetX - po.left,
+                    y = d.offsetY - po.top;
+                $(d.proxy).css( { left: x, top: y } );
+                d.proxy.terminal.redraw();
+                // FIXME: global
+                Globals.canvas_manager.update_viewport_overlay();
+            };
+            onmove();
+            $("#canvas-container").get(0).scroll_panel.test( e, onmove );
+        },
+        onDragStart: function( e, d ) { 
+            $( d.available ).addClass( "input-terminal-active" );
+            // Save PJAs in the case of change datatype actions.
+            Globals.workflow.check_changes_in_active_form(); 
+            // Drag proxy div
+            var h = $( '<div class="drag-terminal" style="position: absolute;"></div>' )
+                .appendTo( "#canvas-container" ).get(0);
+            // Terminal and connection to display noodle while dragging
+            h.terminal = new Terminals.OutputTerminal( { element: h } );
+            var c = new Connector();
+            c.dragging = true;
+            c.connect( this.el.terminal, h.terminal );
+            return h;
+        },
+        onDragEnd: function ( e, d ) {
+            var connector = d.proxy.terminal.connectors[0];
+            // check_changes_in_active_form may change the state and cause a
+            // the connection to have already been destroyed. There must be better
+            // ways to handle this but the following check fixes some serious GUI
+            // bugs for now.
+            if(connector) {
+                connector.destroy();
+            }
+            $(d.proxy).remove();
+            $( d.available ).removeClass( "input-terminal-active" );
+            $("#canvas-container").get(0).scroll_panel.stop();
+        }
+    });
+
+    var OutputTerminalView = BaseOutputTerminalView.extend( {
+        terminalMappingClass: Terminals.TerminalMapping,
+        terminalMappingViewClass: TerminalMappingView,
+        terminalForOutput: function( output ) {
+            var type = output.extensions;
+            var terminal = new Terminals.OutputTerminal( { element: this.el, datatypes: type } );
+            return terminal;
+        }
+    });
+
+    var OutputCollectionTerminalView = BaseOutputTerminalView.extend( {
+        terminalMappingClass: Terminals.TerminalMapping,
+        terminalMappingViewClass: TerminalMappingView,
+        terminalForOutput: function( output ) {
+            var collection_type = output.collection_type;
+            var terminal = new Terminals.OutputCollectionTerminal( { element: this.el, collection_type: collection_type, datatypes: output.extensions } );
+            return terminal;
+        }
+    });
+
+    return {
+        InputTerminalView               : InputTerminalView,
+        OutputTerminalView              : OutputTerminalView,
+        InputCollectionTerminalView     : InputCollectionTerminalView,
+        OutputCollectionTerminalView    : OutputCollectionTerminalView
+    }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-view.js b/client/galaxy/scripts/mvc/workflow/workflow-view.js
new file mode 100644
index 0000000..096e312
--- /dev/null
+++ b/client/galaxy/scripts/mvc/workflow/workflow-view.js
@@ -0,0 +1,918 @@
+define('mvc/workflow/workflow-globals', {});
+define([
+    'utils/utils',
+    'mvc/workflow/workflow-globals',
+    'mvc/workflow/workflow-manager',
+    'mvc/workflow/workflow-canvas',
+    'mvc/workflow/workflow-node',
+    'mvc/tool/tool-form-workflow',
+    'mvc/ui/ui-misc',
+    'utils/async-save-text',
+    'libs/toastr',
+    'ui/editable-text'
+], function( Utils, Globals, Workflow, WorkflowCanvas, Node, ToolForm, Ui, async_save_text, Toastr ){
+
+// Reset tool search to start state.
+function reset_tool_search( initValue ) {
+    // Function may be called in top frame or in tool_menu_frame;
+    // in either case, get the tool menu frame.
+    var tool_menu_frame = $("#galaxy_tools").contents();
+    if (tool_menu_frame.length === 0) {
+        tool_menu_frame = $(document);
+    }
+
+    // Remove classes that indicate searching is active.
+    $(this).removeClass("search_active");
+    tool_menu_frame.find(".toolTitle").removeClass("search_match");
+
+    // Reset visibility of tools and labels.
+    tool_menu_frame.find(".toolSectionBody").hide();
+    tool_menu_frame.find(".toolTitle").show();
+    tool_menu_frame.find(".toolPanelLabel").show();
+    tool_menu_frame.find(".toolSectionWrapper").each( function() {
+        if ($(this).attr('id') !== 'recently_used_wrapper') {
+            // Default action.
+            $(this).show();
+        } else if ($(this).hasClass("user_pref_visible")) {
+            $(this).show();
+        }
+    });
+    tool_menu_frame.find("#search-no-results").hide();
+
+    // Reset search input.
+    tool_menu_frame.find("#search-spinner").hide();
+    if (initValue) {
+        var search_input = tool_menu_frame.find("#tool-search-query");
+        search_input.val("search tools");
+    }
+}
+
+NODE_ICONS = {
+    'tool': 'fa-wrench',
+    'data_input': 'fa-file-o',
+    'data_collection_input': 'fa-folder-o',
+    'subworkflow': 'fa-sitemap fa-rotate-270',
+    'pause': 'fa-pause'
+}
+
+add_node_icon = function($to_el, nodeType) {
+    var iconStyle = NODE_ICONS[nodeType];
+    if(iconStyle) {
+        var $icon = $('<i class="icon fa"> </i>').addClass(iconStyle);
+        $to_el.before($icon);
+    }
+}
+
+
+// Really a shell of a real backbone view, but refactoring in the right
+// direction I think.
+EditorFormView = Backbone.View.extend({
+
+    initialize: function(options) {
+        var self = this;
+        this.options = Utils.merge(options, {});
+        var $el = $('<div/>'),
+            workflowView = options.workflowView,
+            node = options.node;
+
+        if(options.html) {
+            $el.html(options.html);
+        }
+        this.setElement($el);
+
+        if (node && node.id != 'no-node') {
+            $el.find('table:first').after(this._genericStepAttributesTemplate( node ));
+            var nodeType = node.type;
+            add_node_icon($el.find('.portlet-title-text'), nodeType);
+            var $titleText = $el.find(".portlet-title-text");
+            $titleText.data('last-value', $titleText.text());
+            $titleText.make_text_editable({
+                on_finish: function( newLabel ){
+                    var lastValue = $titleText.data("last-value");
+                    if( newLabel == lastValue ) {
+                        return;
+                    }
+
+                    var workflow = workflowView.workflow;
+                    if( workflow.attemptUpdateNodeLabel( node, newLabel ) ) {
+                        $el.find("input[name='label']").val(newLabel);
+                        $titleText.data("last-value", newLabel);
+                        $el.find('form').submit();
+                        if(newLabel == "") {
+                            // If label unset restore default name as title.
+                            $titleText.text(node.name);
+                        }
+                    } else {
+                        Toastr.warning("Step label " + newLabel + " already exists, cannot update label.");
+                        $titleText.text(lastValue);
+                    }
+                }
+            });
+            ($el.find( 'form' ).length > 0) && $el.find( 'form' ).ajaxForm( {
+                type: 'POST',
+                dataType: 'json',
+                success: function( data ) {
+                    workflowView.workflow.active_form_has_changes = false;
+                    node.update_field_data( data );
+                    workflowView.showWorkflowParameters();
+                },
+                beforeSubmit: function( data ) {
+                    data.push( { name: 'content_id', value: node.content_id } );
+                    data.push( { name: 'tool_state', value: node.tool_state } );
+                    data.push( { name: '_', value: 'true' } );
+                }
+            }).each( function() {
+                var form = this;
+                $(this).find('select[refresh_on_change="true"]').change( function() {
+                    $(form).submit();
+                });
+                $(this).find('input[refresh_on_change="true"]').change( function() {
+                    $(form).submit();
+                });
+                $(this).find('input, textarea, select').each( function() {
+                    $(this).bind('focus click', function() {
+                        workflowView.workflow.active_form_has_changes = true;
+                    });
+                });
+            });
+        }
+
+    },
+
+    _genericStepAttributesTemplate: function( node ) {
+        return  '<p>' +
+                    '<div class="metadataForm">' +
+                        '<div class="metadataFormTitle">' +
+                            'Edit Step Attributes' +
+                        '</div>' +
+                        this._annotationTemplate(node) +
+                    '</div>' +
+                '</p>';
+    },
+
+    _annotationTemplate: function( node ){
+        return '<div class="form-row">' +
+            '<label>Annotation / Notes:</label>' +
+            '<div style="margin-right: 10px;">' +
+                '<textarea name="annotation" rows="3" style="width: 100%">' +
+                    node.annotation +
+                '</textarea>' +
+                '<div class="toolParamHelp">' +
+                    'Add an annotation or notes to this step; annotations are available when a workflow is viewed.' +
+                '</div>' +
+            '</div>' +
+        '</div>';
+    },
+
+});
+
+
+
+    // create form view
+    return Backbone.View.extend({
+        initialize: function(options) {
+            var self = Globals.app = this;
+            this.options = options;
+            this.urls = options && options.urls || {};
+            this.active_ajax_call = false;
+            var close_editor = function() {
+                self.workflow.check_changes_in_active_form();
+                if ( workflow && self.workflow.has_changes ) {
+                    do_close = function() {
+                        window.onbeforeunload = undefined;
+                        window.document.location = self.urls.workflow_index;
+                    };
+                    window.show_modal( "Close workflow editor",
+                                "There are unsaved changes to your workflow which will be lost.",
+                                {
+                                    "Cancel" : hide_modal,
+                                    "Save Changes" : function() {
+                                        save_current_workflow( null, do_close );
+                                    }
+                                }, {
+                                    "Don't Save": do_close
+                                } );
+                } else {
+                    window.document.location = self.urls.workflow_index;
+                }
+            };
+            var save_current_workflow = function ( eventObj, success_callback ) {
+                show_message( "Saving workflow", "progress" );
+                self.workflow.check_changes_in_active_form();
+                if (!self.workflow.has_changes) {
+                    hide_modal();
+                    if ( success_callback ) {
+                        success_callback();
+                    }
+                    return;
+                }
+                self.workflow.rectify_workflow_outputs();
+                var savefn = function(callback) {
+                    $.ajax( {
+                        url: self.urls.save_workflow,
+                        type: "POST",
+                        data: {
+                            id: self.options.id,
+                            workflow_data: function() { return JSON.stringify( self.workflow.to_simple() ); },
+                            "_": "true"
+                        },
+                        dataType: 'json',
+                        success: function( data ) {
+                            var body = $("<div></div>").text( data.message );
+                            if ( data.errors ) {
+                                body.addClass( "warningmark" );
+                                var errlist = $( "<ul/>" );
+                                $.each( data.errors, function( i, v ) {
+                                    $("<li></li>").text( v ).appendTo( errlist );
+                                });
+                                body.append( errlist );
+                            } else {
+                                body.addClass( "donemark" );
+                            }
+                            self.workflow.name = data.name;
+                            self.workflow.has_changes = false;
+                            self.workflow.stored = true;
+                            self.showWorkflowParameters();
+                            if ( data.errors ) {
+                                window.show_modal( "Saving workflow", body, { "Ok" : hide_modal } );
+                            } else {
+                                if (callback) {
+                                    callback();
+                                }
+                                hide_modal();
+                            }
+                        }
+                    });
+                };
+
+                // We bind to ajaxStop because of auto-saving, since the form submission ajax
+                // call needs to be completed so that the new data is saved
+                if (self.active_ajax_call) {
+                    $(document).bind('ajaxStop.save_workflow', function() {
+                        $(document).unbind('ajaxStop.save_workflow');
+                        savefn();
+                        $(document).unbind('ajaxStop.save_workflow'); // IE7 needs it here
+                        self.active_ajax_call = false;
+                    });
+                } else {
+                    savefn(success_callback);
+                }
+            };
+
+            // Init searching.
+            $("#tool-search-query").click( function (){
+                $(this).focus();
+                $(this).select();
+            })
+            .keyup( function () {
+                // Remove italics.
+                $(this).css("font-style", "normal");
+                // Don't update if same value as last time
+                if ( this.value.length < 3 ) {
+                    reset_tool_search(false);
+                } else if ( this.value != this.lastValue ) {
+                    // Add class to denote that searching is active.
+                    $(this).addClass("search_active");
+                    // input.addClass(config.loadingClass);
+                    // Add '*' to facilitate partial matching.
+                    var q = this.value;
+                    // Stop previous ajax-request
+                    if (this.timer) {
+                        clearTimeout(this.timer);
+                    }
+                    // Start a new ajax-request in X ms
+                    $("#search-spinner").show();
+                    this.timer = setTimeout(function () {
+                        $.get(self.urls.tool_search, { q: q }, function (data) {
+                            // input.removeClass(config.loadingClass);
+                            // Show live-search if results and search-term aren't empty
+                            $("#search-no-results").hide();
+                            // Hide all tool sections.
+                            $(".toolSectionWrapper").hide();
+                            // This hides all tools but not workflows link (which is in a .toolTitle div).
+                            $(".toolSectionWrapper").find(".toolTitle").hide();
+                            if ( data.length != 0 ) {
+                                // Map tool ids to element ids and join them.
+                                var s = $.map( data, function( n, i ) { return "link-" + n; } );
+                                // First pass to show matching tools and their parents.
+                                $(s).each( function(index,id) {
+                                    // Add class to denote match.
+                                    $("[id='"+id+"']").parent().addClass("search_match");
+                                    $("[id='"+id+"']").parent().show().parent().parent().show().parent().show();
+                                });
+                                // Hide labels that have no visible children.
+                                $(".toolPanelLabel").each( function() {
+                                   var this_label = $(this);
+                                   var next = this_label.next();
+                                   var no_visible_tools = true;
+                                   // Look through tools following label and, if none are visible, hide label.
+                                   while (next.length !== 0 && next.hasClass("toolTitle")) {
+                                       if (next.is(":visible")) {
+                                           no_visible_tools = false;
+                                           break;
+                                       } else {
+                                           next = next.next();
+                                       }
+                                    }
+                                    if (no_visible_tools) {
+                                        this_label.hide();
+                                    }
+                                });
+                            } else {
+                                $("#search-no-results").show();
+                            }
+                            $("#search-spinner").hide();
+                        }, "json" );
+                    }, 400 );
+                }
+                this.lastValue = this.value;
+            });
+
+            // Canvas overview management
+            this.canvas_manager = Globals.canvas_manager = new WorkflowCanvas( this, $("#canvas-viewport"), $("#overview") );
+
+            // Initialize workflow state
+            this.reset();
+
+            // get available datatypes for post job action options
+            this.datatypes = JSON.parse($.ajax({
+                url     : Galaxy.root + 'api/datatypes',
+                async   : false
+            }).responseText);
+
+            // get datatype mapping options
+            this.datatypes_mapping = JSON.parse($.ajax({
+                url     : Galaxy.root + 'api/datatypes/mapping',
+                async   : false
+            }).responseText);
+
+            // set mapping sub lists
+            this.ext_to_type = this.datatypes_mapping.ext_to_class_name;
+            this.type_to_type = this.datatypes_mapping.class_to_classes;
+
+            // Load workflow definition
+            this._workflowLoadAjax(self.options.id, {
+                success: function( data ) {
+                     self.reset();
+                     self.workflow.from_simple( data, true );
+                     self.workflow.has_changes = false;
+                     self.workflow.fit_canvas_to_nodes();
+                     self.scroll_to_nodes();
+                     self.canvas_manager.draw_overview();
+                     // Determine if any parameters were 'upgraded' and provide message
+                     upgrade_message = "";
+                     _.each( data.upgrade_messages, function( messages, step_id ) {
+                        var details = "";
+                        _.each( messages, function( m ) {
+                            details += "<li>" + m + "</li>";
+                        });
+                        if ( details ) {
+                            upgrade_message += "<li>Step " + ( parseInt( step_id, 10 ) + 1 ) + ": " + self.workflow.nodes[ step_id ].name + "<ul>" + details + "</ul></li>";
+                        }
+                     });
+                     if ( upgrade_message ) {
+                        window.show_modal( "Workflow loaded with changes",
+                                    "Problems were encountered loading this workflow (possibly a result of tool upgrades). Please review the following parameters and then save.<ul>" + upgrade_message + "</ul>",
+                                    { "Continue" : hide_modal } );
+                     } else {
+                        hide_modal();
+                     }
+                     self.showWorkflowParameters();
+                 },
+                 beforeSubmit: function( data ) {
+                     show_message( "Loading workflow", "progress" );
+                 }
+            });
+
+            // For autosave purposes
+            $(document).ajaxStart( function() {
+                self.active_ajax_call = true;
+                $(document).bind( "ajaxStop.global", function() {
+                    self.active_ajax_call = false;
+                });
+            });
+
+            $(document).ajaxError( function ( e, x ) {
+                // console.log( e, x );
+                var message = x.responseText || x.statusText || "Could not connect to server";
+                window.show_modal( "Server error", message, { "Ignore error" : hide_modal } );
+                return false;
+            });
+
+            window.make_popupmenu && make_popupmenu( $("#workflow-options-button"), {
+                "Save" : save_current_workflow,
+                "Save As": workflow_save_as,
+                "Run": function() {
+                    window.location = self.urls.run_workflow;
+                },
+                //"Create New" : create_new_workflow_dialog,
+                "Edit Attributes" : edit_workflow_attributes,
+                //"Edit Workflow Outputs": edit_workflow_outputs,
+                "Auto Re-layout": layout_editor,
+                //"Load a Workflow" : load_workflow,
+                "Close": close_editor
+            });
+
+            /******************************************** Issue 3000*/
+            function workflow_save_as() {
+                var body = $('<form><label style="display:inline-block; width: 100%;">Save as name: </label><input type="text" id="workflow_rename" style="width: 80%;" autofocus/>' + 
+                '<br><label style="display:inline-block; width: 100%;">Annotation: </label><input type="text" id="wf_annotation" style="width: 80%;" /></form>');
+                    window.show_modal("Save As a New Workflow", body, {
+                        "OK": function () {
+                            var rename_name = $('#workflow_rename').val().length > 0 ? $('#workflow_rename').val() : "SavedAs_" + self.workflow.name;
+                            var rename_annotation = $('#wf_annotation').val().length > 0 ? $('#wf_annotation').val() : "";
+                            $.ajax({
+                                url: self.urls.workflow_save_as,
+                                type: "POST",
+                                data: {
+                                    workflow_name: rename_name,
+                                    workflow_annotation: rename_annotation,
+                                    workflow_data: function() { return JSON.stringify( self.workflow.to_simple() ); }
+                                }
+                            }).done(function(id){
+                                window.onbeforeunload = undefined;
+                                window.location = "/workflow/editor?id=" + id;
+                                hide_modal();
+                            }).fail(function(){
+                                hide_modal();
+                                alert("Saving this workflow failed. Please contact this site's administrator.");
+                            });
+                        },
+                        "Cancel": hide_modal
+                    });
+            };
+
+            function edit_workflow_outputs(){
+                self.workflow.clear_active_node();
+                $('.right-content').hide();
+                var new_content = "";
+                for (var node_key in self.workflow.nodes){
+                    var node = self.workflow.nodes[node_key];
+                    if(['tool', 'subworkflow'].indexOf(node.type) >= 0){
+                        new_content += "<div class='toolForm' style='margin-bottom:5px;'><div class='toolFormTitle'>Step " + node.id + " - " + node.name + "</div>";
+                        for (var ot_key in node.output_terminals){
+                            var output = node.output_terminals[ot_key];
+                            // if (node.workflow_outputs[node.id + "|" + output.name]){
+                            if (node.isWorkflowOutput(output.name)) {
+                                new_content += "<p>"+output.name +"<input type='checkbox' name='"+ node.id + "|" + output.name +"' checked /></p>";
+                            }
+                            else{
+                                new_content += "<p>"+output.name +"<input type='checkbox' name='"+ node.id + "|" + output.name +"' /></p>";
+                            }
+                        }
+                        new_content += "</div>";
+                    }
+                }
+                $("#output-fill-area").html(new_content);
+                $("#output-fill-area input").bind('click', function(){
+                    var node_id = this.name.split('|')[0];
+                    var workflowNode = this.workflow.nodes[node_id];
+                    var output_name = this.name.split('|')[1];
+                    if (this.checked){
+                        workflowNode.addWorkflowOutput(output_name);
+                    }else{
+                        workflowNode.removeWorkflowOutput(output_name);
+                    }
+                    self.workflow.has_changes = true;
+                });
+                $('#workflow-output-area').show();
+            }
+
+            function layout_editor() {
+                self.workflow.layout();
+                self.workflow.fit_canvas_to_nodes();
+                self.scroll_to_nodes();
+                self.canvas_manager.draw_overview();
+            }
+
+            function edit_workflow_attributes() {
+                self.workflow.clear_active_node();
+                $('.right-content').hide();
+                $('#edit-attributes').show();
+            }
+
+            // On load, set the size to the pref stored in local storage if it exists
+            overview_size = $.jStorage.get("overview-size");
+            if (overview_size !== undefined) {
+                $("#overview-border").css( {
+                    width: overview_size,
+                    height: overview_size
+                });
+            }
+
+            // Show viewport on load unless pref says it's off
+            if ($.jStorage.get("overview-off")) {
+                hide_overview();
+            } else {
+                show_overview();
+            }
+
+            // Stores the size of the overview into local storage when it's resized
+            $("#overview-border").bind( "dragend", function( e, d ) {
+                var op = $(this).offsetParent();
+                var opo = op.offset();
+                var new_size = Math.max( op.width() - ( d.offsetX - opo.left ),
+                                         op.height() - ( d.offsetY - opo.top ) );
+                $.jStorage.set("overview-size", new_size + "px");
+            });
+
+            function show_overview() {
+                $.jStorage.set("overview-off", false);
+                $("#overview-border").css("right", "0px");
+                $("#close-viewport").css("background-position", "0px 0px");
+            }
+
+            function hide_overview() {
+                $.jStorage.set("overview-off", true);
+                $("#overview-border").css("right", "20000px");
+                $("#close-viewport").css("background-position", "12px 0px");
+            }
+
+            // Lets the overview be toggled visible and invisible, adjusting the arrows accordingly
+            $("#close-viewport").click( function() {
+                if ( $("#overview-border").css("right") === "0px" ) {
+                    hide_overview();
+                } else {
+                    show_overview();
+                }
+            });
+
+            // Unload handler
+            window.onbeforeunload = function() {
+                if ( workflow && self.workflow.has_changes ) {
+                    return "There are unsaved changes to your workflow which will be lost.";
+                }
+            };
+
+            this.options.workflows.length > 0 && $( "#left" ).find( ".toolMenu" ).append( this._buildToolPanelWorkflows() );
+
+            // Tool menu
+            $( "div.toolSectionBody" ).hide();
+            $( "div.toolSectionTitle > span" ).wrap( "<a href='#'></a>" );
+            var last_expanded = null;
+            $( "div.toolSectionTitle" ).each( function() {
+               var body = $(this).next( "div.toolSectionBody" );
+               $(this).click( function() {
+                   if ( body.is( ":hidden" ) ) {
+                       if ( last_expanded ) last_expanded.slideUp( "fast" );
+                       last_expanded = body;
+                       body.slideDown( "fast" );
+                   }
+                   else {
+                       body.slideUp( "fast" );
+                       last_expanded = null;
+                   }
+               });
+            });
+
+            // Rename async.
+            async_save_text("workflow-name", "workflow-name", self.urls.rename_async, "new_name");
+
+            // Tag async. Simply have the workflow edit element generate a click on the tag element to activate tagging.
+            $('#workflow-tag').click( function() {
+                $('.tag-area').click();
+                return false;
+            });
+            // Annotate async.
+            async_save_text("workflow-annotation", "workflow-annotation", self.urls.annotate_async, "new_annotation", 25, true, 4);
+        },
+
+        _buildToolPanelWorkflows: function() {
+            var self = this;
+            var $section = $(   '<div class="toolSectionWrapper">' +
+                                    '<div class="toolSectionTitle">' +
+                                        '<a href="#"><span>Workflows</span></a>' +
+                                    '</div>' +
+                                    '<div class="toolSectionBody">' +
+                                        '<div class="toolSectionBg"/>' +
+                                    '</div>' +
+                                '</div>' );
+            _.each( this.options.workflows, function( workflow ) {
+                if( workflow.id !== self.options.id ) {
+                    var copy = new Ui.ButtonIcon({
+                        icon        : 'fa fa-copy',
+                        cls         : 'ui-button-icon-plain',
+                        tooltip     : 'Copy and insert individual steps',
+                        onclick     : function() {
+                            if( workflow.step_count < 2 ) {
+                                self.copy_into_workflow( workflow.id, workflow.name );
+                            } else {
+                                // don't ruin the workflow by adding 50 steps unprompted.
+                                Galaxy.modal.show({
+                                    title   : 'Warning',
+                                    body    : 'This will copy ' + workflow.step_count + ' new steps into your workflow.',
+                                    buttons : {
+                                        'Cancel' : function() { Galaxy.modal.hide(); },
+                                        'Copy'   : function() { Galaxy.modal.hide(); self.copy_into_workflow( workflow.id, workflow.name ); }
+                                    }
+                                });
+                            }
+                        }
+                    });
+                    var $add = $( '<a/>' ).attr( 'href', '#' ).html( workflow.name ).on( 'click', function() {
+                        self.add_node_for_subworkflow( workflow.latest_id, workflow.name );
+                    });
+                    $section.find( '.toolSectionBg' ).append( $( '<div/>' ).addClass( 'toolTitle' ).append( $add ).append( copy.$el ) );
+                }
+            });
+            return $section;
+        },
+
+        copy_into_workflow: function(workflowId) {
+            // Load workflow definition
+            var self = this;
+            this._workflowLoadAjax(workflowId, {
+                success: function( data ) {
+                    self.workflow.from_simple( data, false );
+                    // Determine if any parameters were 'upgraded' and provide message
+                    upgrade_message = "";
+                    $.each( data.upgrade_messages, function( k, v ) {
+                       upgrade_message += ( "<li>Step " + ( parseInt(k, 10) + 1 ) + ": " + self.workflow.nodes[k].name + "<ul>");
+                       $.each( v, function( i, vv ) {
+                           upgrade_message += "<li>" + vv +"</li>";
+                       });
+                       upgrade_message += "</ul></li>";
+                    });
+                    if ( upgrade_message ) {
+                       window.show_modal( "Subworkflow embedded with changes",
+                                   "Problems were encountered loading this workflow (possibly a result of tool upgrades). Please review the following parameters and then save.<ul>" + upgrade_message + "</ul>",
+                                   { "Continue" : hide_modal } );
+                    } else {
+                       hide_modal();
+                    }
+                },
+                beforeSubmit: function( data ) {
+                   show_message( "Importing workflow", "progress" );
+                }
+            });
+        },
+
+        // Global state for the whole workflow
+        reset: function() {
+            this.workflow && this.workflow.remove_all();
+            this.workflow = Globals.workflow = new Workflow( this, $("#canvas-container") );
+        },
+
+        scroll_to_nodes: function () {
+            var cv = $("#canvas-viewport");
+            var cc = $("#canvas-container");
+            var top, left;
+            if ( cc.width() < cv.width() ) {
+                left = ( cv.width() - cc.width() ) / 2;
+            } else {
+                left = 0;
+            }
+            if ( cc.height() < cv.height() ) {
+                top = ( cv.height() - cc.height() ) / 2;
+            } else {
+                top = 0;
+            }
+            cc.css( { left: left, top: top } );
+        },
+
+        _workflowLoadAjax: function(workflowId, options) {
+            $.ajax(Utils.merge(options, {
+                url: this.urls.load_workflow,
+                data: { id: workflowId, "_": "true" },
+                dataType: 'json',
+                cache: false
+            }));
+        },
+
+        _moduleInitAjax: function(node, request_data) {
+            $.ajax( {
+                url: this.urls.get_new_module_info,
+                data: request_data,
+                global: false,
+                dataType: "json",
+                success: function( data ) {
+                    node.init_field_data( data );
+                },
+                error: function( x, e ) {
+                    var m = "error loading field data";
+                    if ( x.status === 0 ) {
+                        m += ", server unavailable";
+                    }
+                    node.error( m );
+                }
+            });
+        },
+
+        // Add a new step to the workflow by tool id
+        add_node_for_tool: function ( id, title ) {
+            node = this.workflow.create_node( 'tool', title, id );
+            this._moduleInitAjax(node, { type: "tool", content_id: id, "_": "true" });
+        },
+
+        // Add a new step to the workflow by tool id
+        add_node_for_subworkflow: function ( id, title ) {
+            node = this.workflow.create_node( 'subworkflow', title, id );
+            this._moduleInitAjax(node, { type: "subworkflow", content_id: id, "_": "true" });
+        },
+
+        add_node_for_module: function ( type, title ) {
+            node = this.workflow.create_node( type, title );
+            this._moduleInitAjax(node, { type: type, "_": "true" });
+        },
+
+        // This function preloads how to display known pja's.
+        display_pja: function (pja, node) {
+            // DBTODO SANITIZE INPUTS.
+            var self = this;
+            $("#pja_container").append( get_pja_form(pja, node) );
+            $("#pja_container>.toolForm:last>.toolFormTitle>.buttons").click(function (){
+                action_to_rem = $(this).closest(".toolForm", ".action_tag").children(".action_tag:first").text();
+                $(this).closest(".toolForm").remove();
+                delete self.workflow.active_node.post_job_actions[action_to_rem];
+                self.workflow.active_form_has_changes = true;
+            });
+        },
+
+        display_pja_list: function (){
+            return pja_list;
+        },
+
+        display_file_list: function (node){
+            addlist = "<select id='node_data_list' name='node_data_list'>";
+            for (var out_terminal in node.output_terminals){
+                addlist += "<option value='" + out_terminal + "'>"+ out_terminal +"</option>";
+            }
+            addlist += "</select>";
+            return addlist;
+        },
+
+        new_pja: function (action_type, target, node){
+            if (node.post_job_actions === undefined){
+                //New tool node, set up dict.
+                node.post_job_actions = {};
+            }
+            if (node.post_job_actions[action_type+target] === undefined) {
+                var new_pja = {};
+                new_pja.action_type = action_type;
+                new_pja.output_name = target;
+                node.post_job_actions[action_type+target] = null;
+                node.post_job_actions[action_type+target] =  new_pja;
+                display_pja(new_pja, node);
+                this.workflow.active_form_has_changes = true;
+                return true;
+            } else {
+                return false;
+            }
+        },
+
+        showWorkflowParameters: function () {
+            var parameter_re = /\$\{.+?\}/g;
+            var workflow_parameters = [];
+            var wf_parm_container = $("#workflow-parameters-container");
+            var wf_parm_box = $("#workflow-parameters-box");
+            var new_parameter_content = "";
+            var matches = [];
+            $.each(this.workflow.nodes, function (k, node){
+                var form_matches = node.form_html.match(parameter_re);
+                if (form_matches){
+                    matches = matches.concat(form_matches);
+                }
+                if (node.post_job_actions){
+                    $.each(node.post_job_actions, function(k, pja){
+                        if (pja.action_arguments){
+                            $.each(pja.action_arguments, function(k, action_argument){
+                                var arg_matches = action_argument.match(parameter_re);
+                                if (arg_matches){
+                                    matches = matches.concat(arg_matches);
+                                }
+                            });
+                        }
+                    });
+                    if (matches){
+                        $.each(matches, function(k, element){
+                            if ($.inArray(element, workflow_parameters) === -1){
+                                workflow_parameters.push(element);
+                            }
+                        });
+                    }
+                }
+            });
+            if (workflow_parameters && workflow_parameters.length !== 0){
+                $.each(workflow_parameters, function(k, element){
+                    new_parameter_content += "<div>" + element.substring(2, element.length -1) + "</div>";
+                });
+                wf_parm_container.html(new_parameter_content);
+                wf_parm_box.show();
+            }else{
+                wf_parm_container.html(new_parameter_content);
+                wf_parm_box.hide();
+            }
+        },
+
+        showToolForm: function ( text, node ) {
+            // initialize tags and identifiers
+            var cls = 'right-content';
+            var id  = cls + '-' + node.id;
+
+            // grab panel container
+            var $container = $('#' + cls);
+
+            // remove previous notifications
+            var $current = $container.find('#' + id);
+            if ($current.length > 0 && $current.find('.section-row').length == 0) {
+                $current.remove();
+            }
+
+            // check if tool form already exists
+            if ($container.find('#' + id).length == 0) {
+                var $el = $('<div id="' + id + '" class="' + cls + '"/>');
+                var formView = null;
+                if (node.type == 'tool' && Utils.isJSON(text)) {
+                    var options = JSON.parse(text);
+                    options.node = node;
+                    options.workflow = this.workflow;
+                    options.datatypes = this.datatypes;
+                    formView = new ToolForm.View(options);
+                } else {
+                    var options = {
+                        html: text,
+                        node: node,
+                        workflowView: this
+                    };
+                    formView = new EditorFormView(options);
+                }
+                $el.append(formView.$el);
+                $container.append($el);
+            }
+
+            // hide everything
+            $('.' + cls).hide();
+
+            // show current form
+            $container.find('#' + id).show();
+            $container.show();
+            $container.scrollTop();
+        },
+
+        isSubType: function ( child, parent ) {
+            child = this.ext_to_type[child];
+            parent = this.ext_to_type[parent];
+            return ( this.type_to_type[child] ) && ( parent in this.type_to_type[child] );
+        },
+
+        $newNodeElement: function(type, title_text) {
+            var $f = $("<div class='toolForm toolFormInCanvas'></div>");
+            var $title = $("<div class='toolFormTitle unselectable'><span class='nodeTitle'>" + title_text + "</div></div>" );
+            add_node_icon($title.find('.nodeTitle'), type);
+            $f.append( $title );
+            $f.css( "left", $(window).scrollLeft() + 20 );
+            $f.css( "top", $(window).scrollTop() + 20 );
+            var $b = $("<div class='toolFormBody'></div>");
+            $f.append($b);
+            return $f
+        },
+
+        prebuildNode: function ( type, title_text, content_id ) {
+            var self = this;
+            var $f = this.$newNodeElement( type, title_text );
+            var node = new Node( this, { element: $f } );
+            node.type = type;
+            node.content_id = content_id;
+            var tmp = "<div><img height='16' align='middle' src='" + Galaxy.root + "static/images/loading_small_white_bg.gif'/> loading tool info...</div>";
+            $f.find(".toolFormBody").append(tmp);
+            node.form_html = tmp;
+            // Fix width to computed width
+            // Now add floats
+            var buttons = $("<div class='buttons' style='float: right;'></div>");
+            buttons.append( $("<div/>").addClass("fa-icon-button fa fa-times").click( function( e ) {
+                node.destroy();
+            }));
+            // Place inside container
+            $f.appendTo( "#canvas-container" );
+            // Position in container
+            var o = $("#canvas-container").position();
+            var p = $("#canvas-container").parent();
+            var width = $f.width();
+            var height = $f.height();
+            $f.css( { left: ( - o.left ) + ( p.width() / 2 ) - ( width / 2 ), top: ( - o.top ) + ( p.height() / 2 ) - ( height / 2 ) } );
+            buttons.prependTo( $f.find(".toolFormTitle" ) );
+            width += ( buttons.width() + 10 );
+            $f.css( "width", width );
+            $f.bind( "dragstart", function() {
+                self.workflow.activate_node( node );
+            }).bind( "dragend", function() {
+                self.workflow.node_changed( this );
+                self.workflow.fit_canvas_to_nodes();
+                self.canvas_manager.draw_overview();
+            }).bind( "dragclickonly", function() {
+                self.workflow.activate_node( node );
+            }).bind( "drag", function( e, d ) {
+                // Move
+                var po = $(this).offsetParent().offset(),
+                    x = d.offsetX - po.left,
+                    y = d.offsetY - po.top;
+                $(this).css( { left: x, top: y } );
+                // Redraw
+                $(this).find( ".terminal" ).each( function() {
+                    this.terminal.redraw();
+                });
+            });
+            return node;
+        }
+    });
+});
diff --git a/client/galaxy/scripts/nls/ja/locale.js b/client/galaxy/scripts/nls/ja/locale.js
new file mode 100644
index 0000000..18b8175
--- /dev/null
+++ b/client/galaxy/scripts/nls/ja/locale.js
@@ -0,0 +1,217 @@
+/** ja localization */
+define({
+
+// ---------------------------------------------------------------------------- histories
+// ---- history-model
+// ---- history-view
+"This history is empty" :
+"ヒストリーは空です",
+"No matching datasets found" :
+"一致するデータセットが見つかりませんでした",
+//"An error occurred while getting updates from the server" :
+//false,
+//"Please contact a Galaxy administrator if the problem persists" :
+//false,
+//TODO:
+//"An error was encountered while <% where %>" :
+//false,
+"Search datasets" :
+"データセットを検索する",
+"You are currently viewing a deleted history!" :
+"消去したヒストリーをみています。",
+"You are over your disk quota" :
+"あなたはディスククォータを超えている",
+//"Tool execution is on hold until your disk usage drops below your allocated quota" :
+//false,
+"All" :
+"一式",
+"None" :
+"なし",
+"For all selected" :
+"各項目を",
+
+// ---- history-view-edit
+//"Edit history tags" :
+//false,
+//"Edit history Annotation" :
+//false,
+"Click to rename history" :
+"ヒストリーの名前を変更するにはクリック",
+// multi operations
+"Operations on multiple datasets" :
+"複数のデータセットに対する操作",
+//"Hide datasets" :
+//false,
+//"Unhide datasets" :
+//false,
+//"Delete datasets" :
+//false,
+//"Undelete datasets" :
+//false,
+"Permanently delete datasets" :
+"永久にデータセットを削除",
+"This will permanently remove the data in your datasets. Are you sure?" :
+"これは永久にあなたのデータセット内のデータを削除します。本当に?",
+
+// ---- history-view-annotated
+"Dataset" :
+"データセット",
+//"Annotation" :
+//false,
+
+// ---- history-view-edit-current
+"This history is empty. Click 'Get Data' on the left tool menu to start" :
+"ヒストリーは空です。解析をはじめるには、左パネルの 'データ取得' をクリック",
+"No matching datasets found" :
+"一致するデータセットが見つかりませんでした",
+"You must be logged in to create histories" :
+"ヒストリーを作成するためにはログインする必要があります",
+//TODO:
+//"You can <% loadYourOwn %> or <% externalSource %>" :
+//false,
+//"load your own data" :
+//false,
+//"get data from an external source" :
+//false,
+
+// these aren't in zh/ginga.po and the template doesn't localize
+//"Include Deleted Datasets" :
+//false,
+//"Include Hidden Datasets" :
+//false,
+
+
+// ---------------------------------------------------------------------------- datasets
+// ---- hda-model
+//"Unable to purge dataset" :
+//false,
+
+// ---- hda-base
+// display button
+//"Cannot display datasets removed from disk" :
+//false,
+//"This dataset must finish uploading before it can be viewed" :
+//false,
+//"This dataset is not yet viewable" :
+//false,
+"View data" :
+"データを表示",
+// download button
+"Download" :
+"ダウンロード",
+"Download dataset" :
+"データセットをダウンロード",
+//"Additional files" :
+//false,
+// info/show_params
+"View details" :
+"細部を表示",
+
+// dataset states
+// state: new
+//"This is a new dataset and not all of its data are available yet" :
+//false,
+// state: noPermission
+//"You do not have permission to view this dataset" :
+//false,
+// state: discarded
+//"The job creating this dataset was cancelled before completion" :
+//false,
+// state: queued
+"This job is waiting to run" :
+"ジョブは実行待ちです",
+// state: upload
+//"This dataset is currently uploading" :
+//false,
+// state: setting_metadata
+//"Metadata is being auto-detected" :
+//false,
+// state: running
+"This job is currently running" :
+"ジョブは実行中です",
+// state: paused
+//"This job is paused. Use the \"Resume Paused Jobs\" in the history menu to resume" :
+//false,
+// state: error
+"An error occurred with this dataset" :
+"このジョブの実行中に発生したエラー",
+// state: empty
+"No data" :
+"データ無し",
+// state: failed_metadata
+//"An error occurred setting the metadata for this dataset" :
+//false,
+
+// ajax error prefix
+//"There was an error getting the data for this dataset" :
+//false,
+
+// purged'd/del'd msg
+"This dataset has been deleted and removed from disk" :
+"このデータセットは、永続的にディスクから削除されました",
+"This dataset has been deleted" :
+"このデータセットは削除されました",
+"This dataset has been hidden" :
+"このデータセットは、非表示にされた",
+
+"format" :
+"フォーマット",
+"database" :
+"データベース",
+
+// ---- hda-edit
+"Edit attributes" :
+"変数を編集する",
+//"Cannot edit attributes of datasets removed from disk" :
+//false,
+//"Undelete dataset to edit attributes" :
+//false,
+//"This dataset must finish uploading before it can be edited" :
+//false,
+//"This dataset is not yet editable" :
+//false,
+
+"Delete" :
+"削除する",
+//"Dataset is already deleted" :
+//false,
+
+"View or report this error" :
+"このエラーを届け出る",
+
+"Run this job again" :
+"もう一度このジョブを実行する",
+
+"Visualize" :
+"可視化する",
+//"Visualize in" :
+//false,
+
+"Undelete it" :
+"復元する",
+"Permanently remove it from disk" :
+"永久にディスクから削除",
+"Unhide it" :
+"非表示解除する",
+
+//"You may be able to" :
+//false,
+//"set it manually or retry auto-detection" :
+//false,
+
+//"Edit dataset tags" :
+//false,
+//"Edit dataset annotation" :
+//false,
+
+
+// ---------------------------------------------------------------------------- misc. MVC
+//"Tags" :
+//false,
+//"Annotation" :
+//false,
+//"Edit annotation" :
+//false,
+
+
+});
diff --git a/client/galaxy/scripts/nls/locale.js b/client/galaxy/scripts/nls/locale.js
new file mode 100644
index 0000000..c259585
--- /dev/null
+++ b/client/galaxy/scripts/nls/locale.js
@@ -0,0 +1,222 @@
+/** en/main localization hash - for use with requirejs' i18n plugin */
+define({
+    root : {
+
+// ---------------------------------------------------------------------------- histories
+// ---- history-model
+// ---- history-view
+"This history is empty" :
+false,
+"No matching datasets found" :
+false,
+"An error occurred while getting updates from the server" :
+false,
+"Please contact a Galaxy administrator if the problem persists" :
+false,
+//TODO:
+//"An error was encountered while <% where %>" :
+//false,
+"Search datasets" :
+false,
+"You are currently viewing a deleted history!" :
+false,
+"You are over your disk quota" :
+false,
+"Tool execution is on hold until your disk usage drops below your allocated quota" :
+false,
+"All" :
+false,
+"None" :
+false,
+"For all selected" :
+false,
+
+// ---- history-view-edit
+"Edit history tags" :
+false,
+"Edit history Annotation" :
+false,
+"Click to rename history" :
+false,
+// multi operations
+"Operations on multiple datasets" :
+false,
+"Hide datasets" :
+false,
+"Unhide datasets" :
+false,
+"Delete datasets" :
+false,
+"Undelete datasets" :
+false,
+"Permanently delete datasets" :
+false,
+"This will permanently remove the data in your datasets. Are you sure?" :
+false,
+
+// ---- history-view-annotated
+"Dataset" :
+false,
+"Annotation" :
+false,
+
+// ---- history-view-edit-current
+"This history is empty. Click 'Get Data' on the left tool menu to start" :
+false,
+"No matching datasets found" :
+false,
+"You must be logged in to create histories" :
+false,
+//TODO:
+//"You can <% loadYourOwn %> or <% externalSource %>" :
+//false,
+//"load your own data" :
+//false,
+//"get data from an external source" :
+//false,
+
+// these aren't in zh/ginga.po and the template doesn't localize
+//"Include Deleted Datasets" :
+//false,
+//"Include Hidden Datasets" :
+//false,
+
+
+// ---------------------------------------------------------------------------- datasets
+// ---- hda-model
+"Unable to purge dataset" :
+false,
+
+// ---- hda-base
+// display button
+"Cannot display datasets removed from disk" :
+false,
+"This dataset must finish uploading before it can be viewed" :
+false,
+"This dataset is not yet viewable" :
+false,
+"View data" :
+false,
+// download button
+"Download" :
+false,
+"Download dataset" :
+false,
+"Additional files" :
+false,
+// info/show_params
+"View details" :
+false,
+
+// dataset states
+// state: new
+"This is a new dataset and not all of its data are available yet" :
+false,
+// state: noPermission
+"You do not have permission to view this dataset" :
+false,
+// state: discarded
+"The job creating this dataset was cancelled before completion" :
+false,
+// state: queued
+"This job is waiting to run" :
+false,
+// state: upload
+"This dataset is currently uploading" :
+false,
+// state: setting_metadata
+"Metadata is being auto-detected" :
+false,
+// state: running
+"This job is currently running" :
+false,
+// state: paused
+"This job is paused. Use the \"Resume Paused Jobs\" in the history menu to resume" :
+false,
+// state: error
+"An error occurred with this dataset" :
+false,
+// state: empty
+"No data" :
+false,
+// state: failed_metadata
+"An error occurred setting the metadata for this dataset" :
+false,
+
+// ajax error prefix
+"There was an error getting the data for this dataset" :
+false,
+
+// purged'd/del'd msg
+"This dataset has been deleted and removed from disk" :
+false,
+"This dataset has been deleted" :
+false,
+"This dataset has been hidden" :
+false,
+
+"format" :
+false,
+"database" :
+false,
+
+// ---- hda-edit
+"Edit attributes" :
+false,
+"Cannot edit attributes of datasets removed from disk" :
+false,
+"Undelete dataset to edit attributes" :
+false,
+"This dataset must finish uploading before it can be edited" :
+false,
+"This dataset is not yet editable" :
+false,
+
+"Delete" :
+false,
+"Dataset is already deleted" :
+false,
+
+"View or report this error" :
+false,
+
+"Run this job again" :
+false,
+
+"Visualize" :
+false,
+"Visualize in" :
+false,
+
+"Undelete it" :
+false,
+"Permanently remove it from disk" :
+false,
+"Unhide it" :
+false,
+
+"You may be able to" :
+false,
+"set it manually or retry auto-detection" :
+false,
+
+"Edit dataset tags" :
+false,
+"Edit dataset annotation" :
+false,
+
+
+// ---------------------------------------------------------------------------- misc. MVC
+"Tags" :
+false,
+"Annotation" :
+false,
+"Edit annotation" :
+false,
+
+
+// ----------------------------------------------------------------------------
+},
+    'ja'  : true,
+    'zh'  : true
+});
diff --git a/client/galaxy/scripts/nls/zh/locale.js b/client/galaxy/scripts/nls/zh/locale.js
new file mode 100644
index 0000000..3078c9c
--- /dev/null
+++ b/client/galaxy/scripts/nls/zh/locale.js
@@ -0,0 +1,209 @@
+/** zh localization */
+define({
+// ---------------------------------------------------------------------------- histories
+// ---- history-model
+// ---- history-view
+"This history is empty" :
+"历史已空",
+"No matching datasets found" :
+"未找到匹配的数据集",
+//"An error occurred while getting updates from the server" :
+//false,
+//"Please contact a Galaxy administrator if the problem persists" :
+//false,
+//TODO:
+//"An error was encountered while <% where %>" :
+//false,
+"Search datasets" :
+"搜索数据集",
+"You are currently viewing a deleted history!" :
+"正在查看已删除的历史",
+"You are over your disk quota" :
+"您已超过磁盘配额",
+//"Tool execution is on hold until your disk usage drops below your allocated quota" :
+//false,
+"All" :
+"皆",
+"None" :
+"一个也没有",
+"For all selected" :
+"为每个选定",
+
+// ---- history-view-edit
+//"Edit history tags" :
+//false,
+//"Edit history Annotation" :
+//false,
+"Click to rename history" :
+"单击要重命名的历史",
+// multi operations
+"Operations on multiple datasets" :
+"编辑多个数据集",
+//"Hide datasets" :
+//false,
+//"Unhide datasets" :
+//false,
+//"Delete datasets" :
+//false,
+//"Undelete datasets" :
+//false,
+"Permanently delete datasets" :
+"永久删除数据集",
+"This will permanently remove the data in your datasets. Are you sure?" :
+"这将永久在你的数据集删除数据。你确定?",
+
+// ---- history-view-annotated
+"Dataset" :
+"数据集",
+//"Annotation" :
+//false,
+
+// ---- history-view-edit-current
+"This history is empty. Click 'Get Data' on the left tool menu to start" :
+"历史已空,请单击左边窗格中‘获取数据’",
+"No matching datasets found" :
+"没有发现",
+"You must be logged in to create histories" :
+"你必须登录后才能创建历史",
+//TODO:
+//"You can <% loadYourOwn %> or <% externalSource %>" :
+//false,
+//"load your own data" :
+//false,
+//"get data from an external source" :
+//false,
+
+
+// ---------------------------------------------------------------------------- datasets
+// ---- hda-model
+//"Unable to purge dataset" :
+//false,
+
+// ---- hda-base
+// display button
+//"Cannot display datasets removed from disk" :
+//false,
+//"This dataset must finish uploading before it can be viewed" :
+//false,
+//"This dataset is not yet viewable" :
+//false,
+"View data" :
+"数据",
+// download button
+"Download" :
+"下载",
+"Download dataset" :
+"下载数据集",
+//"Additional files" :
+//false,
+// info/show_params
+"View details" :
+"查看详情",
+
+// dataset states
+// state: new
+//"This is a new dataset and not all of its data are available yet" :
+//false,
+// state: noPermission
+//"You do not have permission to view this dataset" :
+//false,
+// state: discarded
+//"The job creating this dataset was cancelled before completion" :
+//false,
+// state: queued
+"This job is waiting to run" :
+"等待运行的进程",
+// state: upload
+//"This dataset is currently uploading" :
+//false,
+// state: setting_metadata
+//"Metadata is being auto-detected" :
+//false,
+// state: running
+"This job is currently running" :
+"正在运行的进程",
+// state: paused
+//"This job is paused. Use the \"Resume Paused Jobs\" in the history menu to resume" :
+//false,
+// state: error
+"An error occurred with this dataset" :
+"进程运行时出错",
+// state: empty
+"No data" :
+"没有数据",
+// state: failed_metadata
+//"An error occurred setting the metadata for this dataset" :
+//false,
+
+// ajax error prefix
+//"There was an error getting the data for this dataset" :
+//false,
+
+// purged'd/del'd msg
+//"This dataset has been deleted and removed from disk" :
+//false,
+"This dataset has been deleted" :
+"此数据集已被删除",
+"This dataset has been hidden" :
+"此数据集已隐藏",
+
+"format" :
+"格式",
+"database" :
+"数据库",
+
+// ---- hda-edit
+"Edit attributes" :
+"编辑属性",
+//"Cannot edit attributes of datasets removed from disk" :
+//false,
+//"Undelete dataset to edit attributes" :
+//false,
+//"This dataset must finish uploading before it can be edited" :
+//false,
+//"This dataset is not yet editable" :
+//false,
+
+"Delete" :
+"删除",
+//"Dataset is already deleted" :
+//false,
+
+"View or report this error" :
+"报告错误",
+
+"Run this job again" :
+"重新运行",
+
+"Visualize" :
+"图形",
+//"Visualize in" :
+//false,
+
+"Undelete it" :
+"反删除",
+"Permanently remove it from disk" :
+"从磁盘中永久删除",
+"Unhide it" :
+"取消隐藏",
+
+//"You may be able to" :
+//false,
+//"set it manually or retry auto-detection" :
+//false,
+
+//"Edit dataset tags" :
+//false,
+//"Edit dataset annotation" :
+//false,
+
+
+// ---------------------------------------------------------------------------- misc. MVC
+//"Tags" :
+//false,
+//"Annotation" :
+//false,
+//"Edit annotation" :
+//false,
+
+});
diff --git a/client/galaxy/scripts/onload.js b/client/galaxy/scripts/onload.js
new file mode 100644
index 0000000..1c97868
--- /dev/null
+++ b/client/galaxy/scripts/onload.js
@@ -0,0 +1,186 @@
+// ============================================================================
+// Globals (temporary)
+// ============================================================================
+// HACK: add these to global scope until we stop asking for them there...
+// Via webpack: these are required here automatically by the provider plugin
+// Via script tag: these are redundant (identities) since they're already global
+window[ 'jQuery' ] = jQuery; // a weird form to prevent webpack from sub'ing 'window.jQuery' in the provider plugin
+window.$ = jQuery;
+window._ = _;
+window.Backbone = Backbone;
+// console.debug('globals loaded:', window.jQuery, window.Backbone, '...');
+
+// these are galaxy globals not defined in the provider (although they could be - but why encourage that?)
+window.panels = require( 'layout/panel' );
+// using extend bc there are multiple fns/objs to decorate the window with
+_.extend( window, require( 'layout/modal' ) );
+window.async_save_text = require( 'utils/async-save-text' );
+var POPUPMENU = require( 'ui/popupmenu' );
+window.make_popupmenu = POPUPMENU.make_popupmenu;
+window.make_popup_menus = POPUPMENU.make_popup_menus;
+window.init_tag_click_function = require( 'ui/autocom_tagging' );
+var TOURS = require( 'mvc/tours' );
+var QUERY_STRING = require( 'utils/query-string-parsing' );
+// console.debug( 'galaxy globals loaded' );
+
+// ============================================================================
+// jquery on document ready
+// ============================================================================
+// Replace select box with a text input box + autocomplete.
+function replace_big_select_inputs(min_length, max_length, select_elts) {
+
+    function refresh_select2( element ) {
+        var select_elt = $(element);
+        var options = { placeholder:'Click to select',
+                        closeOnSelect: !select_elt.is("[MULTIPLE]"),
+                        dropdownAutoWidth   : true,
+                        containerCssClass: 'select2-minwidth'
+                      };
+        return element.select2( options );
+    }
+
+    // To do replace, the select2 plugin must be loaded.
+    if (!jQuery.fn.select2) {
+        return;
+    }
+
+    // Set default for min_length and max_length
+    if (min_length === undefined) {
+        min_length = 20;
+    }
+    if (max_length === undefined) {
+        max_length = 3000;
+    }
+
+    select_elts = select_elts || $('select');
+
+    select_elts.each( function() {
+        var select_elt = $(this).not('[multiple]');
+        // Make sure that options is within range.
+        var num_options = select_elt.find('option').length;
+        if ( (num_options < min_length) || (num_options > max_length) ) {
+            return;
+        }
+
+        if (select_elt.hasClass("no-autocomplete")) {
+            return;
+        }
+
+        /* Replaced jQuery.autocomplete with select2, notes:
+         * - multiple selects are supported
+         * - the original element is updated with the value, convert_to_values should not be needed
+         * - events are fired when updating the original element, so refresh_on_change should just work
+         *
+         * - should we still sort dbkey fields here?
+         */
+        refresh_select2( select_elt );
+    });
+}
+
+// Initialize refresh events.
+function init_refresh_on_change () {
+    $("select[refresh_on_change='true']")
+        .off('change')
+        .change(function() {
+            var select_field = $(this),
+                select_val = select_field.val(),
+                refresh = false,
+                ref_on_change_vals = select_field.attr("refresh_on_change_values");
+            if (ref_on_change_vals) {
+                ref_on_change_vals = ref_on_change_vals.split(',');
+                var last_selected_value = select_field.attr("last_selected_value");
+                if ($.inArray(select_val, ref_on_change_vals) === -1 && $.inArray(last_selected_value, ref_on_change_vals) === -1) {
+                    return;
+                }
+            }
+            $(window).trigger("refresh_on_change");
+            $(document).trigger("convert_to_values"); // Convert autocomplete text to values
+            select_field.get(0).form.submit();
+        });
+
+    // checkboxes refresh on change
+    $(":checkbox[refresh_on_change='true']")
+        .off('click')
+        .click( function() {
+            var select_field = $(this),
+                select_val = select_field.val(),
+                refresh = false,
+                ref_on_change_vals = select_field.attr("refresh_on_change_values");
+            if (ref_on_change_vals) {
+                ref_on_change_vals = ref_on_change_vals.split(',');
+                var last_selected_value = select_field.attr("last_selected_value");
+                if ($.inArray(select_val, ref_on_change_vals) === -1 && $.inArray(last_selected_value, ref_on_change_vals) === -1) {
+                    return;
+                }
+            }
+            $(window).trigger("refresh_on_change");
+            select_field.get(0).form.submit();
+        });
+
+    // Links with confirmation
+    $( "a[confirm]" )
+        .off('click')
+        .click( function() {
+            return confirm( $(this).attr("confirm") );
+        });
+}
+// used globally in grid-view
+window.init_refresh_on_change = init_refresh_on_change;
+
+$(document).ready( function() {
+    // Refresh events for form fields.
+    init_refresh_on_change();
+
+    // Tooltips
+    if ( $.fn.tooltip ) {
+        // Put tooltips below items in panel header so that they do not overlap masthead.
+        $(".unified-panel-header [title]").tooltip( { placement: 'bottom' } );
+
+        // default tooltip initialization, it will follow the data-placement tag for tooltip location
+        // and fallback to 'top' if not present
+        $("[title]").tooltip();
+    }
+    // Make popup menus.
+    make_popup_menus();
+
+    // Replace big selects.
+    replace_big_select_inputs(20, 1500);
+
+    // If galaxy_main frame does not exist and link targets galaxy_main,
+    // add use_panels=True and set target to self.
+    $("a").click( function() {
+        var anchor = $(this);
+        var galaxy_main_exists = (parent.frames && parent.frames.galaxy_main);
+        if ( ( anchor.attr( "target" ) == "galaxy_main" ) && ( !galaxy_main_exists ) ) {
+            var href = anchor.attr("href");
+            if (href.indexOf("?") == -1) {
+                href += "?";
+            }
+            else {
+                href += "&";
+            }
+            href += "use_panels=True";
+            anchor.attr("href", href);
+            anchor.attr("target", "_self");
+        }
+        return anchor;
+    });
+
+    var et = JSON.parse(sessionStorage.getItem('activeGalaxyTour'));
+    if (et){
+        et = TOURS.hooked_tour_from_data(et);
+        if (et && et.steps){
+            if (window && window.self === window.top){
+                // Only kick off a new tour if this is the toplevel window (non-iframe).  This
+                // functionality actually *could* be useful, but we'd need to handle it better and
+                // come up with some design guidelines for tours jumping between windows.
+                // Disabling for now.
+                var tour = new Tour(_.extend({
+                    steps: et.steps,
+                }, TOURS.tour_opts));
+                tour.init();
+                tour.restart();
+            }
+        }
+    }
+});
diff --git a/client/galaxy/scripts/polyfills.js b/client/galaxy/scripts/polyfills.js
new file mode 100644
index 0000000..cfd9eac
--- /dev/null
+++ b/client/galaxy/scripts/polyfills.js
@@ -0,0 +1,77 @@
+/**
+    Perform feature inference and redirect if below some minimum (must have canvas, etc.)
+    and polyfill for non-standard features.
+ */
+
+(function() {
+    /* TODO: move to modernizr or something besides us doing this...
+     * These are across all of our apps (reports, tool shed), but:
+     *     these should be configurable via options because they all need different things.
+     * So, analysis-polyfills.js, reports-polyfills.js (or analysis/polyfills)
+     */
+    'use strict';
+    /*globals window, clearTimeout */
+
+    // ------------------------------------------------------------------ polyfills
+    // console protection needed in some versions of IE (at this point (IE>=9), shouldn't be needed)
+    window.console = window.console || {
+        log     : function(){},
+        debug   : function(){},
+        info    : function(){},
+        warn    : function(){},
+        error   : function(){},
+        assert  : function(){}
+    };
+
+    // phantomjs: does not have the native extend fn assign
+    Object.assign = Object.assign || _.extend;
+
+    // requestAnimationFrame polyfill
+    var lastTime = 0;
+    var vendors = ['ms', 'moz', 'webkit', 'o'];
+    for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
+        window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
+        window.cancelRequestAnimationFrame = window[vendors[x]+
+          'CancelRequestAnimationFrame'];
+    }
+
+    if (!window.requestAnimationFrame)
+        window.requestAnimationFrame = function(callback, element) {
+            var currTime = new Date().getTime();
+            var timeToCall = Math.max(0, 16 - (currTime - lastTime));
+            var id = window.setTimeout(function() { callback(currTime + timeToCall); },
+              timeToCall);
+            lastTime = currTime + timeToCall;
+            return id;
+        };
+
+    if (!window.cancelAnimationFrame)
+        window.cancelAnimationFrame = function(id) {
+            clearTimeout(id);
+    };
+
+    // ------------------------------------------------------------------ can't/won't polyfill
+    var features = [
+        { name: 'canvas',           compatible: function(){ return window.CanvasRenderingContext2D; } },
+        { name: 'sessionStorage',   compatible: function(){
+            try {
+                return window.sessionStorage.length >= 0;
+            } catch( err ){}
+            return false;
+        }},
+    ];
+    // build a list of feature names for features that were not found
+    var incompatibilities = features
+        .filter( function( feature ){ return !feature.compatible(); })
+        .map( function( feature ){ return feature.name; });
+
+    // if there are needed features missing, follow the index link to the static incompat warning
+    if( !!incompatibilities.length ){
+        var root = document.querySelectorAll( 'link[rel="index"]' ).item( 0 );
+        if( root ){
+            window.location = root.href + 'static/incompatible-browser.html';
+        }
+        console.log( 'incompatible browser:\n' + incompatibilities.join( '\n' ) );
+    }
+
+})();
diff --git a/client/galaxy/scripts/reports_webapp/run_stats.js b/client/galaxy/scripts/reports_webapp/run_stats.js
new file mode 100644
index 0000000..7542b63
--- /dev/null
+++ b/client/galaxy/scripts/reports_webapp/run_stats.js
@@ -0,0 +1,654 @@
+function days_in_month(month, year) {
+    return new Date(year, month, 0).getDate();
+}
+
+function date_by_subtracting_days(date, days) {
+    return new Date(
+        date.getFullYear(), 
+        date.getMonth(), 
+        date.getDate() - days,
+        date.getHours(),
+        date.getMinutes(),
+        date.getSeconds(),
+        date.getMilliseconds()
+    );
+}
+
+function date_by_subtracting_hours(date, hours) {
+    return new Date(
+        date.getFullYear(), 
+        date.getMonth(), 
+        date.getDate(),
+        date.getHours() - hours,
+        date.getMinutes(),
+        date.getSeconds(),
+        date.getMilliseconds()
+    );
+}
+
+// Gets the utc time without minutes and seconds
+function get_utc_time_hours() {
+    var date = new Date();
+    return new Date(
+        date.getUTCFullYear(),
+        date.getUTCMonth(),
+        date.getUTCDate(),
+        date.getUTCHours(),
+        0, 0
+    );
+}
+
+// Refreshes the page for more up to date information
+function refresh() {
+    window.location.reload(true);
+}
+
+// This is commented out until we make Reports more responsive.
+// setTimeout(refresh, 60000); //1 minute = 60000 ms
+
+function create_chart( inp_data, name, time, title ) {
+    // Initialize starting variables
+    var data = inp_data;
+
+    var hours_array = []
+    var now = get_utc_time_hours()
+    for(var i = 0; i < 24; i++) {
+        hours_array.push(date_by_subtracting_hours(now, i));
+    }
+
+    var days_array = []
+    var now = get_utc_time_hours()
+    for(var i = 0; i < 30; i++) {
+        days_array.push(date_by_subtracting_days(now, i));
+    }
+
+    var margin = {top: 60, right: 30, bottom: 50, left: 60};
+    var width = 300;
+    var barWidth = 0;
+    if(time == "hours") {
+        barWidth = width / 24;
+    } else if(time == "days") {
+        barWidth = width / 30;
+    }
+    var chart_width = width + margin.left + margin.right;
+
+    var chart_zoom = 1.75;
+    var height = 150;
+    if(d3.max(data) != 0) {
+        var zoom = height / d3.max(data);
+    } else {
+        var zoom = 1.0;
+    }
+    var chart_height = height + margin.top + margin.bottom;
+
+    // Function for zooming in and out of charts
+    function click() {
+        var classes = d3.select(this).attr("class");
+        classes = classes.split(" ");
+        d3.selectAll("." + classes[0]).filter("." + classes[1])
+            .style("cursor", "zoom-in")
+            .transition()
+                .duration(750)
+                .attr("height", chart_height)
+                .attr("width", chart_width);
+
+        d3.select(this)
+            .style("cursor", "default")
+            .transition()
+                .duration(750)
+                .attr("height", chart_height*chart_zoom)
+                .attr("width", chart_width*chart_zoom);
+    }
+
+    // Initialize all chart containers to have the correct height
+    $(".charts").css("height", chart_height * (chart_zoom));
+
+    // Create the chart object
+    var chart = d3.select("#" + name)
+        .attr("width", chart_width)
+        .attr("height", chart_height)
+        .attr("preserveAspectRatio", "xMidYMin")
+        .attr("viewBox", "0 0 " + chart_width + " " + chart_height)
+        .on("click", click);
+
+    // Create bars on the chart and assosciate data with it
+    var bar = chart.selectAll("g")
+        .data(data)
+        .enter().append("g")
+            .attr("transform", function(d, i) {
+                // Place the bar in the correct place
+                curr_margin = +margin.left;
+                curr_margin += +(i * barWidth);
+                return "translate(" + curr_margin + "," + margin.top + ")";
+            })
+            .on("mouseenter", function(d) {
+                // Show tool tip
+                var i = 1;
+                var size = d;
+
+                while( size >= 10) {
+                    size = size / 10;
+                    i++;
+                }
+
+                var wdth = (i * 4) + 10;
+                d3.select(d3.event.path[1]).select(".tool_tip")
+                    .select("text")
+                        .attr("transform", "translate( " + (margin.left - 5) + ", " + ((height - (d * zoom)) + +margin.top + 10) + " )" )
+                        .attr("visibility", "visible")
+                        .text(d);
+
+                d3.select(d3.event.path[1]).select(".tool_tip")
+                    .attr("width", wdth + "px")
+                    .attr("height", "15px")
+                    .select("rect")
+                        .attr("transform", "translate( " + ((+margin.left) - wdth) + ", " + ((height - (d * zoom)) + +margin.top) + " )" )
+                        .attr("width", wdth + "px")
+                        .attr("height", "15px")
+                        .attr("fill","#ebd9b2");
+            })
+            .on("mouseleave", function(d) {
+                // Remove tool tip
+                d3.select(d3.event.path[1]).select(".tool_tip")
+                    .select("text")
+                        .attr("visibility", "hidden");
+
+                d3.select(d3.event.path[1]).select(".tool_tip")
+                    .select("rect")
+                        .attr("width", "0")
+                        .attr("height", "0")
+                        .attr("fill","")
+                        .text(d);
+            });
+
+    // Add a title to the chart
+    chart.append("g")
+        .append("text")
+        .attr("class", "title")
+        .attr("text-anchor", "end")
+        .attr("transform", function(e) {
+            return "translate( " + width + ",15 )";
+        })
+        .text(title);
+
+    // Add an x axis line to the chart
+    chart.append("g")
+        .attr("class", "axis")
+        .append("path")
+            .attr("class", "x")
+            .attr("d", function(d) {
+                var m_x = margin.left;
+                var m_y = +margin.top + height;
+                var l_x = m_x + width;
+                var l_y = m_y;
+
+                return "M" + m_x + " " + m_y + " L " + l_x + " " + l_y;
+            });
+
+    // Declare how high the y axis goes
+    var y = d3.scale.linear()
+        .range([height, 0]);
+
+    // Create a yAxis object
+    var yAxis = d3.svg.axis()
+        .scale(y)
+        .orient("left")
+        .tickFormat( function(d) { return d3.round( d*d3.max(data), 0 ) });
+
+    // Put the y axis on the chart
+    chart.append("g")
+        .attr("class", "y axis")
+        .attr("id", ("y_" + name))
+        .attr("text-anchor", "end")
+        .attr("transform", "translate( " + margin.left + "," + margin.top + ")")
+        .call(yAxis)
+            .select(".domain");
+
+    // Put a title for y axis on chart
+    chart.append("g")
+        .append("text")
+            .attr("class", "ax_title")
+            .attr("transform", function(e) {
+                var axis = d3.select("#y_" + name).node()
+                var left_pad = +margin.left - axis.getBoundingClientRect().width - 5;
+                var top_pad = +margin.top + (axis.getBoundingClientRect().height/2) - 30
+                var trans = "translate(" + left_pad + "," + top_pad + ")rotate(-90)";
+
+                return trans;
+            })
+            .text("Number of Jobs");
+
+    // Add color to the chart's bars
+    bar.append("rect")
+        .attr("y", function(d) { return height - (d * zoom); })
+        .attr("height", function(d) { return (d * zoom); })
+        .attr("width", barWidth - 1);
+
+    // Append x axis
+    if(time == "hours") {
+        // Append hour lines
+        bar.append("line")
+            .attr("x1", 0)
+            .attr("y1", 0)
+            .attr("x2", 0)
+            .attr("y2", 3)
+            .attr("stroke", "black")
+            .attr("stroke-width", 1)
+            .attr("pointer-events", "none")
+            .attr("transform", function(d, i) {
+                return "translate( " + (barWidth/2) + ", " + height + ")"
+            });
+
+        // Append hour numbers
+        bar.append("text")
+            .attr("fill", "rgb(0,0,0)")
+            .attr("transform", "translate( 10, " + (height + 10) + " )")
+            .text(function(d, i) {
+                var time = "0000"
+
+                if( hours_array[i].getHours() < 10 ) {
+                    time = "0" + String(hours_array[i].getHours());
+                } else {
+                    time = hours_array[i].getHours();
+                }
+
+                return time;
+            });
+
+        // Append day lines
+        var curr_day = "";
+        var first = false;
+        bar.append("line")
+            .attr("x1", 0)
+            .attr("y1", 0)
+            .attr("x2", 0)
+            .attr("y2", function(d, i) {
+                var _y2 = 0;
+
+                if(hours_array[i].getDate() != curr_day) {
+                    if(!first) {
+                        _y2 = 27;
+                        first = true;
+                    } else {
+                        _y2 = 20;
+                    }
+                    
+                    curr_day = hours_array[i].getDate();
+                }
+
+                return _y2;
+            })
+            .attr("stroke", "black")
+            .attr("stroke-width", 1)
+            .attr("pointer-events", "none")
+            .attr("transform", function(d, i) {
+                return "translate( 0, " + height + ")";
+            });
+
+        // Append day numbers
+        curr_day = "";
+        curr_day_text = "";
+        first = false;
+        bar.append("text")
+            .attr("fill", "rgb(0,0,0)")
+            .attr("pointer-events", "none")
+            .text(function(d, i) {
+                var time = "";
+                var locale = "en-us";
+
+                if(hours_array[i].getDate() != curr_day_text) {
+                    time = String(hours_array[i].toLocaleString(locale, { month: "long" }));                  
+                    time += " " + String(hours_array[i].getDate())
+
+                    curr_day_text = hours_array[i].getDate();
+                }
+
+                return time;
+            })
+            .attr("transform", function(d, i) {
+                var text_height = height;
+                var this_width = d3.select(this).node().getBBox().width;
+
+                if(hours_array[i].getDate() != curr_day) {
+                    if(!first) {
+                        text_height += 26;
+                        first = true;
+                    } else {
+                        text_height += 18;
+                    }
+                    
+                    curr_day = hours_array[i].getDate();
+                }
+
+                return "translate( " + (this_width + 2) + ", " + (text_height) + " )"
+            });
+    } else if(time == "days") {
+        // Append day lines
+        bar.append("line")
+            .attr("x1", 0)
+            .attr("y1", 0)
+            .attr("x2", 0)
+            .attr("y2", 3)
+            .attr("stroke", "black")
+            .attr("stroke-width", 1)
+            .attr("pointer-events", "none")
+            .attr("transform", function(d, i) {
+                return "translate( " + (barWidth/2) + ", " + height + ")"
+            });
+
+        // Append day numbers
+        bar.append("text")
+            .attr("fill", "rgb(0,0,0)")
+            .attr("transform", "translate( 9, " + (height + 10) + " )")
+            .text(function(d, i) {
+                var time = "0000"
+
+                if( days_array[i].getDate() < 10 ) {
+                    time = "0" + String(days_array[i].getDate());
+                } else {
+                    time = days_array[i].getDate();
+                }
+
+                return time;
+            });
+
+        // Append month lines
+        var curr_month = "";
+        var first = false;
+        bar.append("line")
+            .attr("x1", 0)
+            .attr("y1", 0)
+            .attr("x2", 0)
+            .attr("y2", function(d, i) {
+                var _y2 = 0;
+
+                if(days_array[i].getMonth() != curr_month) {
+                    if(!first) {
+                        _y2 = 27;
+                        first = true;
+                    } else {
+                        _y2 = 20;
+                    }
+                    
+                    curr_month = days_array[i].getMonth();
+                }
+
+                return _y2;
+            })
+            .attr("stroke", "black")
+            .attr("stroke-width", 1)
+            .attr("pointer-events", "none")
+            .attr("transform", function(d, i) {
+                return "translate( 0, " + height + ")";
+            });
+
+        // Append month numbers
+        curr_month = "";
+        curr_month_text = "";
+        first = false;
+        bar.append("text")
+            .attr("fill", "rgb(0,100,0)")
+            .attr("pointer-events", "none")
+            .text(function(d, i) {
+                var time = "";
+                var locale = "en-us";
+
+                if(days_array[i].getMonth() != curr_month_text) {
+                    time = String(days_array[i].toLocaleString(locale, { month: "long" }));                  
+                    time += " " + String(days_array[i].getFullYear())
+
+                    curr_month_text = days_array[i].getMonth();
+                }
+
+                return time;
+            })
+            .attr("transform", function(d, i) {
+                var text_height = height;
+                var this_width = d3.select(this).node().getBBox().width;
+
+                if(days_array[i].getMonth() != curr_month) {
+                    if(!first) {
+                        text_height += 26;
+                        first = true;
+                    } else {
+                        text_height += 18;
+                    }
+                    
+                    curr_month = days_array[i].getMonth();
+                }
+
+                return "translate( " + (this_width + 2) + ", " + (text_height) + " )"
+            });
+    }
+
+    // Put an invisible tool tip on the chart
+    chart.append("g")
+        .attr("class", "tool_tip")
+        .append("rect");
+    chart.select(".tool_tip")
+        .append("text");
+
+    // Initialize initial zoomed charts
+    if(name == "jc_dy_chart" || name == "jc_hr_chart") {
+        d3.select("#" + name)
+            .attr("height", chart_height * chart_zoom)
+            .attr("width", chart_width * chart_zoom)
+            .style("cursor", "default");
+    }
+}
+
+//============================================================================================================
+
+function create_histogram( inp_data, name, title ) {
+    // Initialize initial variables
+    // inp_data is an array of numbers that are the amount of minutes per run
+    var data = inp_data;
+
+    var chart_zoom = 1.75;
+    var margin = {top: 60, right: 30, bottom: 50, left: 60};
+
+    var height = 150;
+    var chart_height = height + margin.top + margin.bottom;
+
+    var width = 300;
+    var chart_width = width + margin.left + margin.right;
+
+    // Cereate x axis metadata
+    // Used for x axis, histogram creation, and bar initialization
+    var x = d3.scale.linear()
+        .domain([0, d3.max(data)])
+        .range([0, width]);
+
+    // Generate a histogram using twenty uniformly-spaced bins.
+    var data = d3.layout.histogram()
+        .bins(x.ticks(20))(data);
+
+    // Create an array of the sizes of the bars
+    var lengths = [];
+    for(var i = 0; i < data.length; i ++) {
+        lengths.push(data[i].length)
+    }
+
+    // Find the amount needed to magnify the bars
+    if(d3.max(data) != 0) {
+        var zoom = height / d3.max(lengths);
+    } else {
+        var zoom = 1.0;
+    }
+
+    // Create y axis metadata
+    // Used for y axis and bar initialization
+    var y = d3.scale.linear()
+        .domain([0, d3.max(data, function(d) { return d.y; })])
+        .range([height, 0]);
+
+    // Function for zooming in and out of charts
+    function click() {
+        var classes = d3.select(this).attr("class");
+        classes = classes.split(" ");
+        d3.selectAll("." + classes[0]).filter("." + classes[1])
+            .style("cursor", "zoom-in")
+            .transition()
+                .duration(750)
+                .attr("height", chart_height)
+                .attr("width", chart_width);
+
+        d3.select(this)
+            .style("cursor", "default")
+            .transition()
+                .duration(750)
+                .attr("height", chart_height*chart_zoom)
+                .attr("width", chart_width*chart_zoom);
+    }
+
+    // Formatter for x axis times (converting minutes to HH:MM).
+    var formatMinutes = function(d) {
+            hours = Math.floor( d / 60 )
+            minutes = Math.floor(d - (hours * 60))
+
+            if(hours < 10) {
+                hours = "0" + hours
+            }
+            if(minutes < 10) {
+                minutes = "0" + minutes
+            }
+
+            return hours + ":" + minutes;
+        };
+
+    // Create a chart object
+    var chart = d3.select("#" + name)
+        .attr("viewBox", "0 0 " + chart_width + " " + chart_height)
+        .attr("width", chart_width)
+        .attr("height", chart_height)
+        .attr("preserveAspectRatio", "xMidYMin")
+        .on("click", click);
+
+    // Put title on chart
+    chart.append("g")
+        .append("text")
+        .attr("class", "title")
+        .attr("transform", function(e) {
+            return "translate( " + width + ",15 )";
+        })
+        .text(title);
+
+    // Put bars on chart
+    var bar = chart.selectAll(".bar")
+        .data(data)
+      .enter().append("g")
+        .attr("class", "bar")
+        .attr("transform", function(d) {
+            return "translate(" + (+x(d.x) + +margin.left) + "," + (+y(d.y) + +margin.top) + ")"; })
+        .on("mouseenter", function(d) {
+            // Show tool tip
+            i = 0;
+            size = d.length;
+
+            while( size >= 1) {
+                size = size / 10;
+                i++;
+            }
+            var wdth = (i * 4) + 10;
+            d3.select(d3.event.path[1]).select(".tool_tip")
+                .select("text")
+                    .attr("transform", "translate( " + (margin.left - 5) + ", " + (height - (d.length * zoom) + +margin.top + 10) + " )" )
+                    .attr("visibility", "visible")
+                    .text(d.length);
+
+            d3.select(d3.event.path[1]).select(".tool_tip")
+                .attr("width", wdth + "px")
+                .attr("height", "15px")
+                .select("rect")
+                    .attr("transform", "translate( " + ((+margin.left) - wdth) + ", " + (height - (d.length * zoom) + +margin.top) + " )")
+                    .attr("width", wdth + "px")
+                    .attr("height", "15px")
+                    .attr("fill","#ebd9b2");
+        })
+        .on("mouseleave", function(d) {
+            // Remove tool tip
+            d3.select(d3.event.path[1]).select(".tool_tip")
+                .select("text")
+                    .attr("visibility", "hidden");
+
+            d3.select(d3.event.path[1]).select(".tool_tip")
+                .select("rect")
+                    .attr("width", "0")
+                    .attr("height", "0")
+                    .attr("fill","")
+        });
+
+    // Create bar width
+    var bar_x;
+    if(data[0] == undefined) {
+        bar_x = 1;
+    } else {
+        bar_x = x(data[0].dx);
+    }
+
+    // Add color to bar
+    bar.append("rect")
+        .attr("x", 1)
+        .attr("width", bar_x - 1)
+        .attr("height", function(d) { return height - y(d.y); });
+
+    // Create x axis
+    var xAxis = d3.svg.axis()
+        .scale(x)
+        .orient("bottom")
+        .tickFormat(formatMinutes);
+
+    // Add x axis to chart
+    chart.append("g")
+        .attr("class", "x axis")
+        .attr("id", "x_" + name)
+        .attr("transform", "translate( " + margin.left + "," + (+height + +margin.top) + ")")
+        .call(xAxis);
+
+    // Add a title to the x axis
+    chart.append("g")
+        .append("text")
+            .attr("class", "ax_title")
+            .attr("transform", function(e) {
+                var axis = d3.select("#x_" + name).node()
+                var left_pad = +margin.left + (axis.getBoundingClientRect().width/2) + 30;
+                var top_pad = +margin.top + height + axis.getBoundingClientRect().height + 10
+                var trans = "translate(" + left_pad + "," + top_pad + ")";
+
+                return trans;
+            })
+            .text("ETA - hrs:mins");
+
+    // Create y axis
+    var yAxis = d3.svg.axis()
+        .scale(y)
+        .orient("left");
+
+    // Add y axis to chart
+    chart.append("g")
+        .attr("class", "y axis")
+        .attr("id", ("y_" + name))
+        .attr("transform", "translate( " + margin.left + "," + margin.top + ")")
+        .call(yAxis);
+
+    // Add a title to the y axis
+    chart.append("g")
+        .append("text")
+            .attr("class", "ax_title")
+            .attr("transform", function(e) {
+                var axis = d3.select("#y_" + name).node()
+                var left_pad = +margin.left - axis.getBoundingClientRect().width - 5;
+                var top_pad = +margin.top + (axis.getBoundingClientRect().height/2) - 30
+                var trans = "translate(" + left_pad + "," + top_pad + ")rotate(-90)";
+
+                return trans;
+            })
+            .text("Number of Jobs");
+
+    // Put an invisible tool tip on the chart
+    chart.append("g")
+        .attr("class", "tool_tip")
+        .append("rect");
+    chart.select(".tool_tip")
+        .append("text");
+}
diff --git a/client/galaxy/scripts/ui/autocom_tagging.js b/client/galaxy/scripts/ui/autocom_tagging.js
new file mode 100644
index 0000000..7f11131
--- /dev/null
+++ b/client/galaxy/scripts/ui/autocom_tagging.js
@@ -0,0 +1,391 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        //TODO: So...this turns out to be an all or nothing thing. If I load jQuery in the define below, it will
+        //  (of course) wipe the old jquery *and all the plugins loaded into it*. So the define below *is still
+        //  relying on jquery being loaded globally* in order to preserve plugins.
+        define([ 'jquery' ], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function ( jQuery ) {
+'use_strict';
+
+var $ = jQuery;
+
+// ============================================================================
+/**
+* JQuery extension for tagging with autocomplete.
+* @author: Jeremy Goecks
+* @require: jquery.autocomplete plugin
+*/
+//
+// Initialize "tag click functions" for tags.
+//
+function init_tag_click_function(tag_elt, click_func) {
+    $(tag_elt).find('.tag-name').each( function() {
+        $(this).click( function() {
+            var tag_str = $(this).text();
+            var tag_name_and_value = tag_str.split(":");
+            click_func(tag_name_and_value[0], tag_name_and_value[1]);
+            return true;
+        });
+    });
+}
+
+jQuery.fn.autocomplete_tagging = function(options) {
+
+    var defaults = {
+        get_toggle_link_text_fn: function(tags) {
+            var text = "";
+            var num_tags = _.size(tags);
+            if (num_tags > 0) {
+                text = num_tags + (num_tags > 1 ? " Tags" : " Tag");
+            } else {
+                text = "Add tags";
+            }
+            return text;
+        },
+        tag_click_fn : function (name, value) {},
+        editable: true,
+        input_size: 20,
+        in_form: false,
+        tags : {},
+        use_toggle_link: true,
+        item_id: "",
+        add_tag_img: "",
+        add_tag_img_rollover: "",
+        delete_tag_img: "",
+        ajax_autocomplete_tag_url: "",
+        ajax_retag_url: "",
+        ajax_delete_tag_url: "",
+        ajax_add_tag_url: ""
+    };
+
+    var settings = jQuery.extend(defaults, options);
+
+    //
+    // Initalize object's elements.
+    //
+
+    // Get elements for this object. For this_obj, assume the last element with the id is the "this"; this is somewhat of a hack to address the problem
+    // that there may be two tagging elements for a single item if there are both community and individual tags for an element.
+    var this_obj = $(this);
+    var tag_area = this_obj.find('.tag-area');
+    var toggle_link = this_obj.find('.toggle-link');
+    var tag_input_field = this_obj.find('.tag-input');
+    var add_tag_button = this_obj.find('.add-tag-button');
+
+    // Initialize toggle link.
+    toggle_link.click( function() {
+        // Take special actions depending on whether toggle is showing or hiding link.
+        var after_toggle_fn;
+        if (tag_area.is(":hidden")) {
+            after_toggle_fn = function() {
+                // If there are no tags, go right to editing mode by generating a click on the area.
+                var num_tags = $(this).find('.tag-button').length;
+                if (num_tags === 0) {
+                    tag_area.click();
+                }
+            };
+        } else {
+            after_toggle_fn = function() {
+                tag_area.blur();
+            };
+        }
+        tag_area.slideToggle("fast", after_toggle_fn);
+        return $(this);
+    });
+
+    // Initialize tag input field.
+    if (settings.editable) {
+        tag_input_field.hide();
+    }
+    tag_input_field.keyup( function(e) {
+        if ( e.keyCode === 27 ) {
+            // Escape key
+            $(this).trigger( "blur" );
+        } else if (
+            ( e.keyCode === 13 ) || // Return Key
+            ( e.keyCode === 188 ) || // Comma
+            ( e.keyCode === 32 ) // Space
+        ) {
+            //
+            // Check input.
+            //
+
+            var new_value = this.value;
+
+            // Suppress space after a ":"
+            if ( new_value.indexOf(": ", new_value.length - 2) !== -1) {
+                this.value = new_value.substring(0, new_value.length-1);
+                return false;
+            }
+
+            // Remove trigger keys from input.
+            if ( (e.keyCode === 188) || (e.keyCode === 32) ) {
+                new_value = new_value.substring( 0 , new_value.length - 1 );
+            }
+
+            // Trim whitespace.
+            new_value = $.trim(new_value);
+
+            // Too short?
+            if (new_value.length < 2) {
+                return false;
+            }
+
+            //
+            // New tag OK - apply it.
+            //
+
+            this.value = ""; // Reset text field now that tag is being added
+
+            // Add button for tag after all other tag buttons.
+            var new_tag_button = build_tag_button(new_value);
+            var tag_buttons = tag_area.children(".tag-button");
+            if (tag_buttons.length !== 0) {
+                var last_tag_button = tag_buttons.slice(tag_buttons.length-1);
+                last_tag_button.after(new_tag_button);
+            } else {
+                tag_area.prepend(new_tag_button);
+            }
+
+            // Add tag to internal list.
+            var tag_name_and_value = new_value.split(":");
+            settings.tags[tag_name_and_value[0]] = tag_name_and_value[1];
+
+            // Update toggle link text.
+            var new_text = settings.get_toggle_link_text_fn(settings.tags);
+            toggle_link.text(new_text);
+
+            // Commit tag to server.
+            var zz = $(this);
+            $.ajax({
+                url: settings.ajax_add_tag_url,
+                data: { new_tag: new_value },
+                error: function() {
+                    // Failed. Roll back changes and show alert.
+                    new_tag_button.remove();
+                    delete settings.tags[tag_name_and_value[0]];
+                    var new_text = settings.get_toggle_link_text_fn(settings.tags);
+                    toggle_link.text(new_text);
+                    alert( "Add tag failed" );
+                },
+                success: function() {
+                    // Flush autocomplete cache because it's not out of date.
+                    // TODO: in the future, we could remove the particular item
+                    // that was chosen from the cache rather than flush it.
+                    zz.data('autocompleter').cacheFlush();
+                }
+            });
+
+            return false;
+        }
+    });
+
+    // Add autocomplete to input.
+    var format_item_func = function(key, row_position, num_rows, value, search_term) {
+        var tag_name_and_value = value.split(":");
+        return (tag_name_and_value.length === 1 ? tag_name_and_value[0] : tag_name_and_value[1]);
+    };
+    var autocomplete_options = { selectFirst: false, formatItem: format_item_func,
+            autoFill: false, highlight: false };
+    tag_input_field.autocomplete(settings.ajax_autocomplete_tag_url, autocomplete_options);
+
+
+    // Initialize delete tag images for current tags.
+    this_obj.find('.delete-tag-img').each(function() {
+        init_delete_tag_image( $(this) );
+    });
+
+
+    // Initialize tag click function.
+    init_tag_click_function($(this), settings.tag_click_fn);
+
+    // Initialize "add tag" button.
+    add_tag_button.click( function() {
+        $(this).hide();
+
+        // Clicking on button is the same as clicking on the tag area.
+        tag_area.click();
+        return false;
+    });
+
+    //
+    // Set up tag area interactions; these are needed only if tags are editable.
+    //
+    if (settings.editable) {
+        // When the tag area blurs, go to "view tag" mode.
+        tag_area.bind("blur", function(e) {
+            if (_.size(settings.tags) > 0) {
+                add_tag_button.show();
+                tag_input_field.hide();
+                tag_area.removeClass("active-tag-area");
+                // tag_area.addClass("tooltip");
+            } else {
+                // No tags, so do nothing to ensure that input is still visible.
+            }
+        });
+
+        // On click, enable user to add tags.
+        tag_area.click( function(e) {
+            var is_active = $(this).hasClass("active-tag-area");
+
+            // If a "delete image" object was pressed and area is inactive, do nothing.
+            if ($(e.target).hasClass("delete-tag-img") && !is_active) {
+                return false;
+            }
+
+            // If a "tag name" object was pressed and area is inactive, do nothing.
+            if ($(e.target).hasClass("tag-name") && !is_active) {
+                return false;
+            }
+
+            // Remove tooltip.
+            // $(this).removeClass("tooltip");
+
+            // Hide add tag button, show tag_input field. Change background to show
+            // area is active.
+            $(this).addClass("active-tag-area");
+            add_tag_button.hide();
+            tag_input_field.show();
+            tag_input_field.focus();
+
+            // Add handler to document that will call blur when the tag area is blurred;
+            // a tag area is blurred when a user clicks on an element outside the area.
+            var handle_document_click = function(e)  {
+                var check_click = function(tag_area, target) {
+                    var tag_area_id = tag_area.attr("id");
+                    // Blur the tag area if the element clicked on is not in the tag area.
+                    if (target !== tag_area) {
+                        tag_area.blur();
+                        $(window).unbind("click.tagging_blur");
+                        $(this).addClass("tooltip");
+                    }
+                };
+                check_click(tag_area, $(e.target));
+            };
+            // TODO: we should attach the click handler to all frames in order to capture
+            // clicks outside the frame that this element is in.
+            //window.parent.document.onclick = handle_document_click;
+            //var temp = $(window.parent.document.body).contents().find("iframe").html();
+            //alert(temp);
+            //$(document).parent().click(handle_document_click);
+            $(window).bind("click.tagging_blur", handle_document_click);
+
+            return false;
+        });
+    }
+
+    // If using toggle link, hide the tag area. Otherwise, show the tag area.
+    if (settings.use_toggle_link) {
+        tag_area.hide();
+    }
+
+    //
+    // Helper functions.
+    //
+
+    //
+    // Collapse tag name + value into a single string.
+    //
+    function build_tag_str(tag_name, tag_value) {
+        return tag_name + ( tag_value ? ":" + tag_value : "");
+    }
+
+
+    // Initialize a "delete tag image": when click, delete tag from UI and send delete request to server.
+    function init_delete_tag_image(delete_img) {
+        $(delete_img).mouseenter( function () {
+            $(this).attr("src", settings.delete_tag_img_rollover);
+        });
+        $(delete_img).mouseleave( function () {
+            $(this).attr("src", settings.delete_tag_img);
+        });
+        $(delete_img).click( function () {
+            // Tag button is image's parent.
+            var tag_button = $(this).parent();
+
+            // Get tag name, value.
+            var tag_name_elt = tag_button.find(".tag-name").eq(0);
+            var tag_str = tag_name_elt.text();
+            var tag_name_and_value = tag_str.split(":");
+            var tag_name = tag_name_and_value[0];
+            var tag_value = tag_name_and_value[1];
+
+            var prev_button = tag_button.prev();
+            tag_button.remove();
+
+            // Remove tag from local list for consistency.
+            delete settings.tags[tag_name];
+
+            // Update toggle link text.
+            var new_text = settings.get_toggle_link_text_fn(settings.tags);
+            toggle_link.text(new_text);
+
+            // Delete tag.
+            $.ajax({
+                url: settings.ajax_delete_tag_url,
+                data: { tag_name: tag_name },
+                error: function() {
+                    // Failed. Roll back changes and show alert.
+                    settings.tags[tag_name] = tag_value;
+                    if (prev_button.hasClass("tag-button")) {
+                        prev_button.after(tag_button);
+                    } else {
+                        tag_area.prepend(tag_button);
+                    }
+                    alert( "Remove tag failed" );
+
+                    toggle_link.text(settings.get_toggle_link_text_fn(settings.tags));
+
+                    // TODO: no idea why it's necessary to set this up again.
+                    delete_img.mouseenter( function () {
+                        $(this).attr("src", settings.delete_tag_img_rollover);
+                    });
+                    delete_img.mouseleave( function () {
+                        $(this).attr("src", settings.delete_tag_img);
+                    });
+                },
+                success: function() {}
+            });
+
+            return true;
+        });
+    }
+
+    //
+    // Function that builds a tag button.
+    //
+    function build_tag_button(tag_str) {
+        // Build "delete tag" image.
+        var delete_img = $("<img/>").attr("src", settings.delete_tag_img).addClass("delete-tag-img");
+        init_delete_tag_image(delete_img);
+
+        // Build tag button.
+        var tag_name_elt = $("<span>").text(tag_str).addClass("tag-name");
+        tag_name_elt.click( function() {
+            var tag_name_and_value = tag_str.split(":");
+            settings.tag_click_fn(tag_name_and_value[0], tag_name_and_value[1]);
+            return true;
+        });
+
+        var tag_button = $("<span></span>").addClass("tag-button");
+        tag_button.append(tag_name_elt);
+        // Allow delete only if element is editable.
+        if (settings.editable) {
+            tag_button.append(delete_img);
+        }
+
+        return tag_button;
+    }
+
+};
+
+// ============================================================================
+    return init_tag_click_function;
+}));
diff --git a/client/galaxy/scripts/ui/editable-text.js b/client/galaxy/scripts/ui/editable-text.js
new file mode 100644
index 0000000..3625635
--- /dev/null
+++ b/client/galaxy/scripts/ui/editable-text.js
@@ -0,0 +1,119 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        //TODO: So...this turns out to be an all or nothing thing. If I load jQuery in the define below, it will
+        //  (of course) wipe the old jquery *and all the plugins loaded into it*. So the define below *is still
+        //  relying on jquery being loaded globally* in order to preserve plugins.
+        define([ 'jquery' ], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function ( jQuery ) {
+'use_strict';
+
+var $ = jQuery;
+
+// ============================================================================
+/**
+ * Make an element with text editable: (a) when user clicks on text, a textbox/area
+ * is provided for editing; (b) when enter key pressed, element's text is set and on_finish
+ * is called.
+ */
+$.fn.make_text_editable = function(config_dict) {
+    // Get config options.
+    var num_cols = ("num_cols" in config_dict ? config_dict.num_cols : 30),
+        num_rows = ("num_rows" in config_dict ? config_dict.num_rows : 4),
+        use_textarea = ("use_textarea" in config_dict ? config_dict.use_textarea : false),
+        on_finish = ("on_finish" in config_dict ? config_dict.on_finish : null),
+        help_text = ("help_text" in config_dict ? config_dict.help_text : null);
+
+    // Add element behavior.
+    var container = $(this);
+    container.addClass("editable-text").click(function(e) {
+        // If there's already an input element, editing is active, so do nothing.
+        if ($(this).children(":input").length > 0) {
+            return;
+        }
+
+        container.removeClass("editable-text");
+
+        // Handler for setting element text.
+        var set_text = function(new_text) {
+            container.find(":input").remove();
+
+            if (new_text !== "") {
+                container.text(new_text);
+            } else {
+                // No text; need a line so that there is a click target.
+                container.html("<br>");
+            }
+            container.addClass("editable-text");
+
+            if (on_finish) {
+                on_finish(new_text);
+            }
+        };
+
+        // Create input element(s) for editing.
+        var cur_text = ("cur_text" in config_dict ? config_dict.cur_text : container.text() ),
+            input_elt, button_elt;
+
+        if (use_textarea) {
+            input_elt = $("<textarea/>")
+                .attr({ rows: num_rows, cols: num_cols }).text($.trim(cur_text))
+                .keyup(function(e) {
+                    if (e.keyCode === 27) {
+                        // Escape key.
+                        set_text(cur_text);
+                    }
+                });
+            button_elt = $("<button/>").text("Done").click(function() {
+                set_text(input_elt.val());
+                // Return false so that click does not propogate to container.
+                return false;
+            });
+        }
+        else {
+            input_elt = $("<input type='text'/>").attr({ value: $.trim(cur_text), size: num_cols })
+            .blur(function() {
+                set_text(cur_text);
+            }).keyup(function(e) {
+                if (e.keyCode === 27) {
+                    // Escape key.
+                    $(this).trigger("blur");
+                } else if (e.keyCode === 13) {
+                    // Enter key.
+                    set_text($(this).val());
+                }
+
+                // Do not propogate event to avoid unwanted side effects.
+                e.stopPropagation();
+            });
+        }
+
+        // Replace text with input object(s) and focus & select.
+        container.text("");
+        container.append(input_elt);
+        if (button_elt) {
+            container.append(button_elt);
+        }
+        input_elt.focus();
+        input_elt.select();
+
+        // Do not propogate to elements below b/c that blurs input and prevents it from being used.
+        e.stopPropagation();
+    });
+
+    // Add help text if there some.
+    if (help_text) {
+        container.attr("title", help_text).tooltip();
+    }
+
+    return container;
+};
+
+// ============================================================================
+}));
diff --git a/client/galaxy/scripts/ui/fa-icon-button.js b/client/galaxy/scripts/ui/fa-icon-button.js
new file mode 100644
index 0000000..715d2b1
--- /dev/null
+++ b/client/galaxy/scripts/ui/fa-icon-button.js
@@ -0,0 +1,48 @@
+(function (root, factory) {
+    if (typeof define === 'function' && define.amd) {
+        define([], factory);
+    } else {
+        root.faIconButton = factory();
+    }
+
+}(this, function () {
+//============================================================================
+    /** Returns a jQuery object containing a clickable font-awesome button.
+     *      options:
+     *          tooltipConfig   : option map for bootstrap tool tip
+     *          classes         : array of class names (will always be classed as icon-btn)
+     *          disabled        : T/F - add the 'disabled' class?
+     *          title           : tooltip/title string
+     *          target          : optional href target
+     *          href            : optional href
+     *          faIcon          : which font awesome icon to use
+     *          onclick         : function to call when the button is clicked
+     */
+    var faIconButton = function( options ){
+        options = options || {};
+        options.tooltipConfig = options.tooltipConfig || { placement: 'bottom' };
+
+        options.classes = [ 'icon-btn' ].concat( options.classes || [] );
+        if( options.disabled ){
+            options.classes.push( 'disabled' );
+        }
+
+        var html = [
+            '<a class="', options.classes.join( ' ' ), '"',
+                    (( options.title )?( ' title="' + options.title + '"' ):( '' )),
+                    (( !options.disabled && options.target )?  ( ' target="' + options.target + '"' ):( '' )),
+                    ' href="', (( !options.disabled && options.href )?( options.href ):( 'javascript:void(0);' )), '">',
+                // could go with something less specific here - like 'html'
+                '<span class="fa ', options.faIcon, '"></span>',
+            '</a>'
+        ].join( '' );
+        var $button = $( html ).tooltip( options.tooltipConfig );
+        if( _.isFunction( options.onclick ) ){
+            $button.click( options.onclick );
+        }
+        return $button;
+    };
+
+//============================================================================
+    return faIconButton;
+}));
diff --git a/client/galaxy/scripts/ui/filter-control.js b/client/galaxy/scripts/ui/filter-control.js
new file mode 100644
index 0000000..a398acb
--- /dev/null
+++ b/client/galaxy/scripts/ui/filter-control.js
@@ -0,0 +1,204 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        // AMD. Register as an anonymous module.
+        define(['jquery'], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function ($) {
+	//==============================================================================
+	/**
+	 *  Template function that produces a bootstrap dropdown to replace the
+	 *  vanilla HTML select input. Pass in an array of options and an initial selection:
+	 *  $( '.my-div' ).append( dropDownSelect( [ 'option1', 'option2' ], 'option2' );
+	 *
+	 *  When the user changes the selected option a 'change.dropdown-select' event will
+	 *  fire with both the jq event and the new selection text as arguments.
+	 *
+	 *  Get the currently selected choice using:
+	 *  var userChoice = $( '.my-div .dropdown-select .dropdown-select-selected' ).text();
+	 *
+	 */
+	function dropDownSelect( options, selected ){
+		// replacement for vanilla select element using bootstrap dropdowns instead
+		selected = selected || (( !_.isEmpty( options ) )?( options[0] ):( '' ));
+		var $select = $([
+				'<div class="dropdown-select btn-group">',
+					'<button type="button" class="btn btn-default">',
+						'<span class="dropdown-select-selected">' + selected + '</span>',
+					'</button>',
+				'</div>'
+			].join( '\n' ));
+
+		// if there's only one option, do not style/create as buttons, dropdown - use simple span
+		// otherwise, a dropdown displaying the current selection
+		if( options && options.length > 1 ){
+			$select.find( 'button' )
+				.addClass( 'dropdown-toggle' ).attr( 'data-toggle', 'dropdown' )
+				.append( ' <span class="caret"></span>' );
+			$select.append([
+				'<ul class="dropdown-menu" role="menu">',
+					_.map( options, function( option ){
+						return [
+							'<li><a href="javascript:void(0)">', option, '</a></li>'
+						].join( '' );
+					}).join( '\n' ),
+				'</ul>'
+			].join( '\n' ));
+		}
+
+		// trigger 'change.dropdown-select' when a new selection is made using the dropdown
+		function selectThis( event ){
+			var $this = $( this ),
+				$select = $this.parents( '.dropdown-select' ),
+				newSelection = $this.text();
+			$select.find( '.dropdown-select-selected' ).text( newSelection );
+			$select.trigger( 'change.dropdown-select', newSelection );
+		}
+
+		$select.find( 'a' ).click( selectThis );
+		return $select;
+	}
+
+	//==============================================================================
+    /**
+     *  Creates a three part bootstrap button group (key, op, value) meant to
+     *  allow the user control of filters (e.g. { key: 'name', op: 'contains', value: 'my_history' })
+     *
+     *  Each field uses a dropDownSelect (from ui.js) to allow selection
+     *  (with the 'value' field appearing as an input when set to do so).
+     *
+     *  Any change or update in any of the fields will trigger a 'change.filter-control'
+     *  event which will be passed an object containing those fields (as the example above).
+     *
+     *  Pass in an array of possible filter objects to control what the user can select.
+     *  Each filter object should have:
+     *      key : generally the attribute name on which to filter something
+     *      ops : an array of 1 or more filter operations (e.g. [ 'is', '<', 'contains', '!=' ])
+     *      values (optional) : an array of possible values for the filter (e.g. [ 'true', 'false' ])
+     *  @example:
+     *  $( '.my-div' ).filterControl({
+     *      filters : [
+     *          { key: 'name',    ops: [ 'is exactly', 'contains' ] }
+     *          { key: 'deleted', ops: [ 'is' ], values: [ 'true', 'false' ] }
+     *      ]
+     *  });
+     *  // after initialization, you can prog. get the current value using:
+     *  $( '.my-div' ).filterControl( 'val' )
+     *
+     */
+    function FilterControl( element, options ){
+		return this.init( element, options );
+    }
+    /** the data key that this object will be stored under in the DOM element */
+	FilterControl.prototype.DATA_KEY = 'filter-control';
+
+    /** parses options, sets up instance vars, and does initial render */
+	FilterControl.prototype.init = function _init( element, options ){
+		options = options || { filters: [] };
+		this.$element = $( element ).addClass( 'filter-control btn-group' );
+		this.options = jQuery.extend( true, {}, this.defaults, options );
+
+        this.currFilter = this.options.filters[0];
+		return this.render();
+	};
+
+    /** render (or re-render) the controls on the element */
+	FilterControl.prototype.render = function _render(){
+        this.$element.empty()
+            .append([ this._renderKeySelect(), this._renderOpSelect(), this._renderValueInput() ]);
+        return this;
+    };
+
+    /** render the key dropDownSelect, bind a change event to it, and return it */
+	FilterControl.prototype._renderKeySelect = function __renderKeySelect(){
+        var filterControl = this;
+        var keys = this.options.filters.map( function( filter ){
+            return filter.key;
+        });
+        this.$keySelect = dropDownSelect( keys, this.currFilter.key )
+            .addClass( 'filter-control-key' )
+            .on( 'change.dropdown-select', function( event, selection ){
+                filterControl.currFilter = _.findWhere( filterControl.options.filters, { key: selection });
+                // when the filter/key changes, re-render the control entirely
+                filterControl.render()._triggerChange();
+            });
+        return this.$keySelect;
+    };
+
+    /** render the op dropDownSelect, bind a change event to it, and return it */
+	FilterControl.prototype._renderOpSelect = function __renderOpSelect(){
+        var filterControl = this,
+            ops = this.currFilter.ops;
+        //TODO: search for currOp in avail. ops: use that for selected if there; otherwise: first op
+        this.$opSelect = dropDownSelect( ops, ops[0] )
+            .addClass( 'filter-control-op' )
+            .on( 'change.dropdown-select', function( event, selection ){
+                filterControl._triggerChange();
+            });
+        return this.$opSelect;
+    };
+
+    /** render the value control, bind a change event to it, and return it */
+	FilterControl.prototype._renderValueInput = function __renderValueInput(){
+        var filterControl = this;
+        // if a values attribute is prov. on the filter - make this a dropdown; otherwise, use an input
+        if( this.currFilter.values ){
+            this.$valueSelect = dropDownSelect( this.currFilter.values, this.currFilter.values[0] )
+                .on( 'change.dropdown-select', function( event, selection ){
+                    filterControl._triggerChange();
+                });
+        } else {
+            //TODO: allow setting a value type (mainly for which html5 input to use: range, number, etc.)
+            this.$valueSelect = $( '<input/>' ).addClass( 'form-control' )
+                .on( 'change', function( event, value ){
+                    filterControl._triggerChange();
+                });
+        }
+        this.$valueSelect.addClass( 'filter-control-value' );
+        return this.$valueSelect;
+    };
+
+    /** return the current state/setting for the filter as a three key object: key, op, value */
+	FilterControl.prototype.val = function _val(){
+        var key = this.$element.find( '.filter-control-key .dropdown-select-selected' ).text(),
+            op  = this.$element.find( '.filter-control-op .dropdown-select-selected'  ).text(),
+            // handle either a dropdown or plain input
+            $value = this.$element.find( '.filter-control-value' ),
+            value = ( $value.hasClass( 'dropdown-select' ) )?( $value.find( '.dropdown-select-selected' ).text() )
+                                                            :( $value.val() );
+        return { key: key, op: op, value: value };
+    };
+
+    // single point of change for change event
+	FilterControl.prototype._triggerChange = function __triggerChange(){
+        this.$element.trigger( 'change.filter-control', this.val() );
+    };
+
+    // as jq plugin
+    jQuery.fn.extend({
+        filterControl : function $filterControl( options ){
+			var nonOptionsArgs = jQuery.makeArray( arguments ).slice( 1 );
+            return this.map( function(){
+				var $this = $( this ),
+					data = $this.data( FilterControl.prototype.DATA_KEY );
+
+				if( jQuery.type( options ) === 'object' ){
+					data = new FilterControl( $this, options );
+					$this.data( FilterControl.prototype.DATA_KEY, data );
+				}
+				if( data && jQuery.type( options ) === 'string' ){
+					var fn = data[ options ];
+					if( jQuery.type( fn ) === 'function' ){
+						return fn.apply( data, nonOptionsArgs );
+					}
+				}
+				return this;
+            });
+        }
+    });
+}));
diff --git a/client/galaxy/scripts/ui/hoverhighlight.js b/client/galaxy/scripts/ui/hoverhighlight.js
new file mode 100644
index 0000000..be15d9e
--- /dev/null
+++ b/client/galaxy/scripts/ui/hoverhighlight.js
@@ -0,0 +1,37 @@
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        define([], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function () {
+//=============================================================================
+
+    jQuery.fn.extend({
+        hoverhighlight : function $hoverhighlight( scope, color ){
+            scope = scope || 'body';
+            if( !this.length ){ return this; }
+
+            $( this ).each( function(){
+                var $this = $( this ),
+                    targetSelector = $this.data( 'target' );
+
+                if( targetSelector ){
+                    $this.mouseover( function( ev ){
+                        $( targetSelector, scope ).css({
+                            background: color
+                        });
+                    })
+                    .mouseout( function( ev ){
+                        $( targetSelector ).css({
+                            background: ''
+                        });
+                    });
+                }
+            });
+            return this;
+        }
+    });
+}));
diff --git a/client/galaxy/scripts/ui/loading-indicator.js b/client/galaxy/scripts/ui/loading-indicator.js
new file mode 100644
index 0000000..c6b7bc9
--- /dev/null
+++ b/client/galaxy/scripts/ui/loading-indicator.js
@@ -0,0 +1,98 @@
+(function (root, factory) {
+    if (typeof define === 'function' && define.amd) {
+        // AMD. Register as an anonymous module.
+        define([], factory);
+    } else {
+        // Browser globals
+        root.LoadingIndicator = factory();
+    }
+
+//============================================================================
+}(this, function () {
+    //TODO: too specific to history panel
+    function LoadingIndicator( $where, options ){
+
+        var self = this;
+        // defaults
+        options = jQuery.extend({
+            cover       : false
+        }, options || {} );
+
+        function render(){
+            var html = [
+                '<div class="loading-indicator">',
+                    '<div class="loading-indicator-text">',
+                        '<span class="fa fa-spinner fa-spin fa-lg"></span>',
+                        '<span class="loading-indicator-message">loading...</span>',
+                    '</div>',
+                '</div>'
+            ].join( '\n' );
+
+            var $indicator = $( html ).hide().css( options.css || {
+                    position    : 'fixed'
+                }),
+                $text = $indicator.children( '.loading-indicator-text' );
+
+            if( options.cover ){
+                $indicator.css({
+                    'z-index'   : 2,
+                    top         : $where.css( 'top' ),
+                    bottom      : $where.css( 'bottom' ),
+                    left        : $where.css( 'left' ),
+                    right       : $where.css( 'right' ),
+                    opacity     : 0.5,
+                    'background-color': 'white',
+                    'text-align': 'center'
+                });
+                $text = $indicator.children( '.loading-indicator-text' ).css({
+                    'margin-top'        : '20px'
+                });
+
+            } else {
+                $text = $indicator.children( '.loading-indicator-text' ).css({
+                    margin              : '12px 0px 0px 10px',
+                    opacity             : '0.85',
+                    color               : 'grey'
+                });
+                $text.children( '.loading-indicator-message' ).css({
+                    margin          : '0px 8px 0px 0px',
+                    'font-style'    : 'italic'
+                });
+            }
+            return $indicator;
+        }
+
+        self.show = function( msg, speed, callback ){
+            msg = msg || 'loading...';
+            speed = speed || 'fast';
+            // remove previous
+            $where.parent().find( '.loading-indicator' ).remove();
+            // since position is fixed - we insert as sibling
+            self.$indicator = render().insertBefore( $where );
+            self.message( msg );
+            self.$indicator.fadeIn( speed, callback );
+            return self;
+        };
+
+        self.message = function( msg ){
+            self.$indicator.find( 'i' ).text( msg );
+        };
+
+        self.hide = function( speed, callback ){
+            speed = speed || 'fast';
+            if( self.$indicator && self.$indicator.length ){
+                self.$indicator.fadeOut( speed, function(){
+                    self.$indicator.remove();
+                    if( callback ){ callback(); }
+                });
+            } else {
+                if( callback ){ callback(); }
+            }
+            return self;
+        };
+        return self;
+    }
+
+//============================================================================
+    return LoadingIndicator;
+}));
diff --git a/client/galaxy/scripts/ui/mode-button.js b/client/galaxy/scripts/ui/mode-button.js
new file mode 100644
index 0000000..82a7e7d
--- /dev/null
+++ b/client/galaxy/scripts/ui/mode-button.js
@@ -0,0 +1,191 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        //TODO: So...this turns out to be an all or nothing thing. If I load jQuery in the define below, it will
+        //  (of course) wipe the old jquery *and all the plugins loaded into it*. So the define below *is still
+        //  relying on jquery being loaded globally* in order to preserve plugins.
+        define([], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function () {
+
+    /** Multi 'mode' button (or any element really) that changes the html
+     *      contents of itself when clicked. Pass in an ordered list of
+     *      objects with 'html' and (optional) onclick functions.
+     *
+     *      When clicked in a particular node, the onclick function will
+     *      be called (with the element as this) and the element will
+     *      switch to the next mode, replacing its html content with
+     *      that mode's html.
+     *
+     *      If there is no next mode, the element will switch back to
+     *      the first mode.
+     * @example:
+     *     $( '.myElement' ).modeButton({
+     *         modes : [
+     *             {
+     *                 mode: 'bler',
+     *                 html: '<h5>Bler</h5>',
+     *                 onclick : function(){
+     *                     $( 'body' ).css( 'background-color', 'red' );
+     *                 }
+     *             },
+     *             {
+     *                 mode: 'bloo',
+     *                 html: '<h4>Bloo</h4>',
+     *                 onclick : function(){
+     *                     $( 'body' ).css( 'background-color', 'blue' );
+     *                 }
+     *             },
+     *             {
+     *                 mode: 'blah',
+     *                 html: '<h3>Blah</h3>',
+     *                 onclick : function(){
+     *                     $( 'body' ).css( 'background-color', 'grey' );
+     *                 }
+     *             },
+     *         ]
+     *     });
+     *     $( '.myElement' ).modeButton( 'callModeFn', 'bler' );
+     */
+    /** constructor */
+    function ModeButton( element, options ){
+		this.currModeIndex = 0;
+		return this._init( element, options );
+    }
+
+    /** html5 data key to store this object inside an element */
+	ModeButton.prototype.DATA_KEY = 'mode-button';
+    /** default options */
+	ModeButton.prototype.defaults = {
+        switchModesOnClick : true
+	};
+
+    // ---- private interface
+    /** set up options, intial mode, and the click handler */
+	ModeButton.prototype._init = function _init( element, options ){
+        //console.debug( 'ModeButton._init:', element, options );
+		options = options || {};
+		this.$element = $( element );
+		this.options = $.extend( true, {}, this.defaults, options );
+        if( !options.modes ){
+            throw new Error( 'ModeButton requires a "modes" array' );
+        }
+
+		var modeButton = this;
+		this.$element.click( function _ModeButtonClick( event ){
+			// call the curr mode fn
+			modeButton.callModeFn();
+			// inc the curr mode index
+			if( modeButton.options.switchModesOnClick ){ modeButton._incModeIndex(); }
+			// set the element html
+			$( this ).html( modeButton.options.modes[ modeButton.currModeIndex ].html );
+		});
+		return this.reset();
+	};
+    /** increment the mode index to the next in the array, looping back to zero if at the last */
+	ModeButton.prototype._incModeIndex = function _incModeIndex(){
+		this.currModeIndex += 1;
+		if( this.currModeIndex >= this.options.modes.length ){
+			this.currModeIndex = 0;
+		}
+		return this;
+	};
+    /** get the mode index in the modes array for the given key (mode name) */
+	ModeButton.prototype._getModeIndex = function _getModeIndex( modeKey ){
+		for( var i=0; i<this.options.modes.length; i+=1 ){
+			if( this.options.modes[ i ].mode === modeKey ){ return i; }
+		}
+		throw new Error( 'mode not found: ' + modeKey );
+	};
+    /** set the current mode to the one with the given index and set button html */
+	ModeButton.prototype._setModeByIndex = function _setModeByIndex( index ){
+        var newMode = this.options.modes[ index ];
+        if( !newMode ){
+            throw new Error( 'mode index not found: ' + index );
+        }
+        this.currModeIndex = index;
+        if( newMode.html ){
+            this.$element.html( newMode.html );
+        }
+		return this;
+	};
+
+    // ---- public interface
+    /** get the current mode object (not just the mode name) */
+	ModeButton.prototype.currentMode = function currentMode(){
+		return this.options.modes[ this.currModeIndex ];
+	};
+    /** return the mode key of the current mode */
+	ModeButton.prototype.current = function current(){
+        // sugar for returning mode name
+		return this.currentMode().mode;
+	};
+    /** get the mode with the given modeKey or the current mode if modeKey is undefined */
+	ModeButton.prototype.getMode = function getMode( modeKey ){
+		if( !modeKey ){ return this.currentMode(); }
+		return this.options.modes[( this._getModeIndex( modeKey ) )];
+	};
+    /** T/F if the button has the given mode */
+	ModeButton.prototype.hasMode = function hasMode( modeKey ){
+        try {
+            return !!this.getMode( modeKey );
+        } catch( err ){}
+        return false;
+	};
+    /** set the current mode to the mode with the given name */
+	ModeButton.prototype.setMode = function setMode( modeKey ){
+        return this._setModeByIndex( this._getModeIndex( modeKey ) );
+	};
+    /** reset to the initial mode */
+	ModeButton.prototype.reset = function reset(){
+		this.currModeIndex = 0;
+		if( this.options.initialMode ){
+			this.currModeIndex = this._getModeIndex( this.options.initialMode );
+		}
+        return this._setModeByIndex( this.currModeIndex );
+	};
+    /** manually call the click handler of the given mode */
+	ModeButton.prototype.callModeFn = function callModeFn( modeKey ){
+		var modeFn = this.getMode( modeKey ).onclick;
+		if( modeFn && $.type( modeFn === 'function' ) ){
+            // call with the element as context (std jquery pattern)
+			return modeFn.call( this.$element.get(0) );
+		}
+		return undefined;
+	};
+
+    // as jq plugin
+    $.fn.modeButton = function $modeButton( options ){
+        if( !this.length ){ return this; }
+
+        //TODO: does map still work with jq multi selection (i.e. $( '.class-for-many-btns' ).modeButton)?
+        if( $.type( options ) === 'object' ){
+            return this.map( function(){
+                var $this = $( this );
+                $this.data( 'mode-button', new ModeButton( $this, options ) );
+                return this;
+            });
+        }
+
+        var $first = $( this[0] ),
+            button = $first.data( 'mode-button' );
+
+        if( !button ){
+            throw new Error( 'modeButton needs an options object or string name of a function' );
+        }
+
+        if( button && $.type( options ) === 'string' ){
+            var fnName = options;
+            if( button && $.type( button[ fnName ] ) === 'function' ){
+                return button[ fnName ].apply( button, $.makeArray( arguments ).slice( 1 ) );
+            }
+        }
+        return button;
+    };
+
+}));
diff --git a/client/galaxy/scripts/ui/pagination.js b/client/galaxy/scripts/ui/pagination.js
new file mode 100644
index 0000000..5edc898
--- /dev/null
+++ b/client/galaxy/scripts/ui/pagination.js
@@ -0,0 +1,226 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        // AMD. Register as an anonymous module.
+        define(['jquery'], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function ($) {
+
+    /** Builds (twitter bootstrap styled) pagination controls.
+     *  If the totalDataSize is not null, a horizontal list of page buttons is displayed.
+     *  If totalDataSize is null, two links ('Prev' and 'Next) are displayed.
+     *  When pages are changed, a 'pagination.page-change' event is fired
+     *      sending the event and the (0-based) page requested.
+     */
+    function Pagination( element, options ){
+        /** the total number of pages */
+        this.numPages = null;
+        /** the current, active page */
+		this.currPage = 0;
+		return this.init( element, options );
+    }
+
+    /** data key under which this object will be stored in the element */
+	Pagination.prototype.DATA_KEY = 'pagination';
+    /** default options */
+    Pagination.prototype.defaults = {
+        /** which page to begin at */
+        startingPage    : 0,
+        /** number of data per page */
+        perPage         : 20,
+        /** the total number of data (null == unknown) */
+        totalDataSize   : null,
+        /** size of current data on current page */
+        currDataSize    : null
+	};
+
+    /** init the control, calc numPages if possible, and render
+     *  @param {jQuery} the element that will contain the pagination control
+     *  @param {Object} options a map containing overrides to the pagination default options
+     */
+	Pagination.prototype.init = function _init( $element, options ){
+		options = options || {};
+		this.$element = $element;
+		this.options = jQuery.extend( true, {}, this.defaults, options );
+
+        this.currPage = this.options.startingPage;
+        if( this.options.totalDataSize !== null ){
+            this.numPages = Math.ceil( this.options.totalDataSize / this.options.perPage );
+            // limit currPage by numPages
+            if( this.currPage >= this.numPages ){
+                this.currPage = this.numPages - 1;
+            }
+        }
+        //console.debug( 'Pagination.prototype.init:', this.$element, this.currPage );
+        //console.debug( JSON.stringify( this.options ) );
+
+        // bind to data of element
+        this.$element.data( Pagination.prototype.DATA_KEY, this );
+
+        this._render();
+		return this;
+	};
+
+    /** helper to create a simple li + a combo */
+    function _make$Li( contents ){
+        return $([
+            '<li><a href="javascript:void(0);">', contents, '</a></li>'
+        ].join( '' ));
+    }
+
+    /** render previous and next pagination buttons */
+    Pagination.prototype._render = function __render(){
+        // no data - no pagination
+        if( this.options.totalDataSize === 0 ){ return this; }
+        // only one page
+        if( this.numPages === 1 ){ return this; }
+
+        // when the number of pages are known, render each page as a link
+        if( this.numPages > 0 ){
+            this._renderPages();
+            this._scrollToActivePage();
+
+        // when the number of pages is not known, render previous or next
+        } else {
+            this._renderPrevNext();
+        }
+		return this;
+    };
+
+    /** render previous and next pagination buttons */
+    Pagination.prototype._renderPrevNext = function __renderPrevNext(){
+        var pagination = this,
+            $prev = _make$Li( 'Prev' ),
+            $next = _make$Li( 'Next' ),
+            $paginationContainer = $( '<ul/>' ).addClass( 'pagination pagination-prev-next' );
+
+        // disable if it either end
+        if( this.currPage === 0 ){
+            $prev.addClass( 'disabled' );
+        } else {
+            $prev.click( function(){ pagination.prevPage(); });
+        }
+        if( ( this.numPages && this.currPage === ( this.numPages - 1 ) )
+        ||  ( this.options.currDataSize && this.options.currDataSize < this.options.perPage ) ){
+            $next.addClass( 'disabled' );
+        } else {
+            $next.click( function(){ pagination.nextPage(); });
+        }
+
+        this.$element.html( $paginationContainer.append([ $prev, $next ]) );
+        //console.debug( this.$element, this.$element.html() );
+        return this.$element;
+    };
+
+    /** render page links for each possible page (if we can) */
+    Pagination.prototype._renderPages = function __renderPages(){
+        // it's better to scroll the control and let the user see all pages
+        //  than to force her/him to change pages in order to find the one they want (as traditional << >> does)
+        var pagination = this,
+            $scrollingContainer = $( '<div>' ).addClass( 'pagination-scroll-container' ),
+            $paginationContainer = $( '<ul/>' ).addClass( 'pagination pagination-page-list' ),
+            page$LiClick = function( ev ){
+                pagination.goToPage( $( this ).data( 'page' ) );
+            };
+
+        for( var i=0; i<this.numPages; i+=1 ){
+            // add html5 data tag 'page' for later click event handler use
+            var $pageLi = _make$Li( i + 1 ).attr( 'data-page', i ).click( page$LiClick );
+            // highlight the current page
+            if( i === this.currPage ){
+                $pageLi.addClass( 'active' );
+            }
+            //console.debug( '\t', $pageLi );
+            $paginationContainer.append( $pageLi );
+        }
+        return this.$element.html( $scrollingContainer.html( $paginationContainer ) );
+    };
+
+    /** scroll scroll-container (if any) to show the active page */
+    Pagination.prototype._scrollToActivePage = function __scrollToActivePage(){
+        // scroll to show active page in center of scrollable area
+        var $container = this.$element.find( '.pagination-scroll-container' );
+        // no scroll container : don't scroll
+        if( !$container.length ){ return this; }
+
+        var $activePage = this.$element.find( 'li.active' ),
+            midpoint = $container.width() / 2;
+        //console.debug( $container, $activePage, midpoint );
+        $container.scrollLeft( $container.scrollLeft() + $activePage.position().left - midpoint );
+        return this;
+    };
+
+    /** go to a certain page */
+    Pagination.prototype.goToPage = function goToPage( page ){
+        if( page <= 0 ){ page = 0; }
+        if( this.numPages && page >= this.numPages ){ page = this.numPages - 1; }
+        if( page === this.currPage ){ return this; }
+
+        //console.debug( '\t going to page ' + page )
+        this.currPage = page;
+        this.$element.trigger( 'pagination.page-change', this.currPage );
+        //console.info( 'pagination:page-change', this.currPage );
+        this._render();
+        return this;
+    };
+
+    /** go to the previous page */
+    Pagination.prototype.prevPage = function prevPage(){
+        return this.goToPage( this.currPage - 1 );
+    };
+
+    /** go to the next page */
+    Pagination.prototype.nextPage = function nextPage(){
+        return this.goToPage( this.currPage + 1 );
+    };
+
+    /** return the current page */
+    Pagination.prototype.page = function page(){
+        return this.currPage;
+    };
+
+    // alternate constructor invocation
+    Pagination.create = function _create( $element, options ){
+        return new Pagination( $element, options );
+    };
+
+    // as jq plugin
+    jQuery.fn.extend({
+        pagination : function $pagination( options ){
+			var nonOptionsArgs = jQuery.makeArray( arguments ).slice( 1 );
+
+            // if passed an object - use that as an options map to create pagination for each selected
+            if( jQuery.type( options ) === 'object' ){
+                return this.map( function(){
+                    Pagination.create( $( this ), options );
+                    return this;
+                });
+            }
+
+            // (other invocations only work on the first element in selected)
+            var $firstElement = $( this[0] ),
+                previousControl = $firstElement.data( Pagination.prototype.DATA_KEY );
+            // if a pagination control was found for this element, either...
+            if( previousControl ){
+                // invoke a function on the pagination object if passed a string (the function name)
+                if( jQuery.type( options ) === 'string' ){
+                    var fn = previousControl[ options ];
+                    if( jQuery.type( fn ) === 'function' ){
+                        return fn.apply( previousControl, nonOptionsArgs );
+                    }
+
+                // if passed nothing, return the previously set control
+                } else {
+                    return previousControl;
+                }
+            }
+            // if there is no control already set, return undefined
+            return undefined;
+        }
+    });
+}));
diff --git a/client/galaxy/scripts/ui/peek-column-selector.js b/client/galaxy/scripts/ui/peek-column-selector.js
new file mode 100644
index 0000000..6fb9c5f
--- /dev/null
+++ b/client/galaxy/scripts/ui/peek-column-selector.js
@@ -0,0 +1,317 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        // AMD. Register as an anonymous module.
+        define(['jquery'], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function ($) {
+    //==============================================================================
+    /** Column selection using the peek display as the control.
+     *  Adds rows to the bottom of the peek with clickable areas in each cell
+     *      to allow the user to select columns.
+     *  Column selection can be limited to a single column or multiple.
+     *  (Optionally) adds a left hand column of column selection prompts.
+     *  (Optionally) allows the column headers to be clicked/renamed
+     *      and set to some initial value.
+     *  (Optionally) hides comment rows.
+     *  (Optionally) allows pre-selecting and disabling certain columns for
+     *      each row control.
+     *
+     *  Construct by selecting a peek table to be used with jQuery and
+     *      calling 'peekColumnSelector' with options.
+     *  Options must include a 'controls' array and can include other options
+     *      listed below.
+     *  @example:
+     *  $( 'pre.peek' ).peekColumnSelector({
+     *          columnNames : ["Chromosome", "Start", "Base", "", "", "Qual" ],
+     *          controls : [
+     *              { label: 'X Column',  id: 'xColumn' },
+     *              { label: 'Y Column',  id: 'yColumn', selected: 2 },
+     *              { label: 'ID Column', id: 'idColumn', selected: 4, disabled: [ 1, 5 ] },
+     *              { label: 'Heatmap',   id: 'heatmap', selected: [ 2, 4 ], disabled: [ 0, 1 ], multiselect: true,
+     *                selectedText: 'Included', unselectedText: 'Excluded' }
+     *          ],
+     *          renameColumns       : true,
+     *          hideCommentRows     : true,
+     *          includePrompts      : true,
+     *          topLeftContent      : 'Data sample:'
+     *      }).on( 'peek-column-selector.change', function( ev, selection ){
+     *          console.info( 'new selection:', selection );
+     *          //{ yColumn: 2 }
+     *      }).on( 'peek-column-selector.rename', function( ev, names ){
+     *          console.info( 'column names', names );
+     *          //[ 'Bler', 'Start', 'Base', '', '', 'Qual' ]
+     *      });
+     *
+     *  An event is fired when column selection is changed and the event
+     *      is passed an object in the form: { the row id : the new selection value }.
+     *  An event is also fired when the table headers are re-named and
+     *      is passed the new array of column names.
+     */
+
+    /** option defaults */
+    var defaults = {
+            /** does this control allow renaming headers? */
+            renameColumns   : false,
+            /** does this control allow renaming headers? */
+            columnNames     : [],
+            /** the comment character used by the peek's datatype */
+            commentChar     : '#',
+            /** should comment rows be shown or hidden in the peek */
+            hideCommentRows : false,
+            /** should a column of row control prompts be used */
+            includePrompts  : true,
+            /** what is the content of the top left cell (often a title) */
+            topLeftContent  : 'Columns:'
+        },
+        /** class added to the pre.peek element (to allow css on just the control) */
+        PEEKCONTROL_CLASS = 'peek-column-selector',
+        /** the string of the event fired when a control row changes */
+        CHANGE_EVENT   = 'peek-column-selector.change',
+        /** the string of the event fired when a column is renamed */
+        RENAME_EVENT   = 'peek-column-selector.rename',
+        /** class added to the control rows */
+        ROW_CLASS      = 'control',
+        /** class added to the left-hand cells that serve as row prompts */
+        PROMPT_CLASS   = 'control-prompt',
+        /** class added to selected _cells_/tds */
+        SELECTED_CLASS = 'selected',
+        /** class added to disabled/un-clickable cells/tds */
+        DISABLED_CLASS = 'disabled',
+        /** class added to the clickable surface within a cell to select it */
+        BUTTON_CLASS   = 'button',
+        /** class added to peek table header (th) cells to indicate they can be clicked and are renamable */
+        RENAMABLE_HEADER_CLASS = 'renamable-header',
+        /** the data key used for each cell to store the column index ('data-...') */
+        COLUMN_INDEX_DATA_KEY = 'column-index',
+        /** renamable header data key used to store the column name (w/o the number and dot: '1.Bler') */
+        COLUMN_NAME_DATA_KEY = 'column-name';
+
+    //TODO: not happy with pure functional here - rows should polymorph (multi, single, etc.)
+    //TODO: needs clean up, move handlers to outer scope
+
+    // ........................................................................
+    /** validate the control data sent in for each row */
+    function validateControl( control ){
+        if( control.disabled && jQuery.type( control.disabled ) !== 'array' ){
+            throw new Error( '"disabled" must be defined as an array of indeces: ' + JSON.stringify( control ) );
+        }
+        if( control.multiselect && control.selected && jQuery.type( control.selected ) !== 'array' ){
+            throw new Error( 'Mulitselect rows need an array for "selected": ' + JSON.stringify( control ) );
+        }
+        if( !control.label || !control.id ){
+            throw new Error( 'Peek controls need a label and id for each control row: ' + JSON.stringify( control ) );
+        }
+        if( control.disabled && control.disabled.indexOf( control.selected ) !== -1 ){
+            throw new Error( 'Selected column is in the list of disabled columns: ' + JSON.stringify( control ) );
+        }
+        return control;
+    }
+
+    /** build the inner control surface (i.e. button-like) */
+    function buildButton( control, columnIndex ){
+        return $( '<div/>' ).addClass( BUTTON_CLASS ).text( control.label );
+    }
+
+    /** build the basic (shared) cell structure */
+    function buildControlCell( control, columnIndex ){
+        var $td = $( '<td/>' )
+            .html( buildButton( control, columnIndex ) )
+            .attr( 'data-' + COLUMN_INDEX_DATA_KEY, columnIndex );
+
+        // disable if index in disabled array
+        if( control.disabled && control.disabled.indexOf( columnIndex ) !== -1 ){
+            $td.addClass( DISABLED_CLASS );
+        }
+        return $td;
+    }
+
+    /** set the text of the control based on selected/un */
+    function setSelectedText( $cell, control, columnIndex ){
+        var $button = $cell.children( '.' + BUTTON_CLASS );
+        if( $cell.hasClass( SELECTED_CLASS ) ){
+            $button.html( ( control.selectedText !== undefined )?( control.selectedText ):( control.label ) );
+        } else {
+            $button.html( ( control.unselectedText !== undefined )?( control.unselectedText ):( control.label ) );
+        }
+    }
+
+    /** build a cell for a row that only allows one selection */
+    function buildSingleSelectCell( control, columnIndex ){
+        // only one selection - selected is single index
+        var $cell = buildControlCell( control, columnIndex );
+        if( control.selected === columnIndex ){
+            $cell.addClass( SELECTED_CLASS );
+        }
+        setSelectedText( $cell, control, columnIndex );
+
+        // only add the handler to non-disabled controls
+        if( !$cell.hasClass( DISABLED_CLASS ) ){
+            $cell.click( function selectClick( ev ){
+                var $cell = $( this );
+                // don't re-select or fire event if already selected
+                if( !$cell.hasClass( SELECTED_CLASS ) ){
+                    // only one can be selected - remove selected on all others, add it here
+                    var $otherSelected = $cell.parent().children( '.' + SELECTED_CLASS ).removeClass( SELECTED_CLASS );
+                    $otherSelected.each( function(){
+                        setSelectedText( $( this ), control, columnIndex );
+                    });
+
+                    $cell.addClass( SELECTED_CLASS );
+                    setSelectedText( $cell, control, columnIndex );
+
+                    // fire the event from the table itself, passing the id and index of selected
+                    var eventData = {},
+                        key = $cell.parent().attr( 'id' ),
+                        val = $cell.data( COLUMN_INDEX_DATA_KEY );
+                    eventData[ key ] = val;
+                    $cell.parents( '.peek' ).trigger( CHANGE_EVENT, eventData );
+                }
+            });
+        }
+        return $cell;
+    }
+
+    /** build a cell for a row that allows multiple selections */
+    function buildMultiSelectCell( control, columnIndex ){
+        var $cell = buildControlCell( control, columnIndex );
+        // multiple selection - selected is an array
+        if( control.selected && control.selected.indexOf( columnIndex ) !== -1 ){
+            $cell.addClass( SELECTED_CLASS );
+        }
+        setSelectedText( $cell, control, columnIndex );
+
+        // only add the handler to non-disabled controls
+        if( !$cell.hasClass( DISABLED_CLASS ) ){
+            $cell.click( function multiselectClick( ev ){
+                var $cell = $( this );
+                // can be more than one selected - toggle selected on this cell
+                $cell.toggleClass( SELECTED_CLASS );
+                setSelectedText( $cell, control, columnIndex );
+                var selectedColumnIndeces = $cell.parent().find( '.' + SELECTED_CLASS ).map( function( i, e ){
+                    return $( e ).data( COLUMN_INDEX_DATA_KEY );
+                });
+                // fire the event from the table itself, passing the id and index of selected
+                var eventData = {},
+                    key = $cell.parent().attr( 'id' ),
+                    val = jQuery.makeArray( selectedColumnIndeces );
+                eventData[ key ] = val;
+                $cell.parents( '.peek' ).trigger( CHANGE_EVENT, eventData );
+            });
+        }
+        return $cell;
+    }
+
+    /** iterate over columns in peek and create a control for each */
+    function buildControlCells( count, control ){
+        var $cells = [];
+        // build a control for each column - using a build fn based on control
+        for( var columnIndex=0; columnIndex<count; columnIndex+=1 ){
+            $cells.push( control.multiselect?  buildMultiSelectCell( control, columnIndex )
+                                            : buildSingleSelectCell( control, columnIndex ) );
+        }
+        return $cells;
+    }
+
+    /** build a row of controls for the peek */
+    function buildControlRow( cellCount, control, includePrompts ){
+        var $controlRow = $( '<tr/>' ).attr( 'id', control.id ).addClass( ROW_CLASS );
+        if( includePrompts ){
+            var $promptCell = $( '<td/>' ).addClass( PROMPT_CLASS ).text( control.label + ':' );
+            $controlRow.append( $promptCell );
+        }
+        $controlRow.append( buildControlCells( cellCount, control ) );
+        return $controlRow;
+    }
+
+    // ........................................................................
+    /** add to the peek, using options for configuration, return the peek */
+    function peekColumnSelector( options ){
+        options = jQuery.extend( true, {}, defaults, options );
+
+        var $peek = $( this ).addClass( PEEKCONTROL_CLASS ),
+            $peektable = $peek.find( 'table' ),
+            // get the size of the tables - width and height, number of comment rows
+            columnCount = $peektable.find( 'th' ).length,
+            rowCount = $peektable.find( 'tr' ).length,
+            // get the rows containing text starting with the comment char (also make them grey)
+            $commentRows = $peektable.find( 'td[colspan]' ).map( function( e, i ){
+                var $this = $( this );
+                if( $this.text() && $this.text().match( new RegExp( '^' + options.commentChar ) ) ){
+                    return $( this ).css( 'color', 'grey' ).parent().get(0);
+                }
+                return null;
+            });
+
+        // should comment rows in the peek be hidden?
+        if( options.hideCommentRows ){
+            $commentRows.hide();
+            rowCount -= $commentRows.length;
+        }
+        //console.debug( 'rowCount:', rowCount, 'columnCount:', columnCount, '$commentRows:', $commentRows );
+
+        // should a first column of control prompts be added?
+        if( options.includePrompts ){
+            var $topLeft = $( '<th/>' ).addClass( 'top-left' ).text( options.topLeftContent )
+                .attr( 'rowspan', rowCount );
+            $peektable.find( 'tr' ).first().prepend( $topLeft );
+        }
+
+        // save either the options column name or the parsed text of each column header in html5 data attr and text
+        var $headers = $peektable.find( 'th:not(.top-left)' ).each( function( i, e ){
+            var $this = $( this ),
+                // can be '1.name' or '1'
+                text  = $this.text().replace( /^\d+\.*/, '' ),
+                name  = options.columnNames[ i ] || text;
+            $this.attr( 'data-' + COLUMN_NAME_DATA_KEY, name )
+                .text( ( i + 1 ) + (( name )?( '.' + name ):( '' )) );
+        });
+
+        // allow renaming of columns when the header is clicked
+        if( options.renameColumns ){
+            $headers.addClass( RENAMABLE_HEADER_CLASS )
+                .click( function renameColumn(){
+                    // prompt for new name
+                    var $this = $( this ),
+                        index = $this.index() + ( options.includePrompts? 0: 1 ),
+                        prevName = $this.data( COLUMN_NAME_DATA_KEY ),
+                        newColumnName = prompt( 'New column name:', prevName );
+                    if( newColumnName !== null && newColumnName !== prevName ){
+                        // set the new text and data
+                        $this.text( index + ( newColumnName?( '.' + newColumnName ):'' ) )
+                            .data( COLUMN_NAME_DATA_KEY, newColumnName )
+                            .attr( 'data-', COLUMN_NAME_DATA_KEY, newColumnName );
+                        // fire event for new column names
+                        var columnNames = jQuery.makeArray(
+                                $this.parent().children( 'th:not(.top-left)' ).map( function(){
+                                    return $( this ).data( COLUMN_NAME_DATA_KEY );
+                                }));
+                        $this.parents( '.peek' ).trigger( RENAME_EVENT, columnNames );
+                    }
+                });
+        }
+
+        // build a row for each control
+        options.controls.forEach( function( control, i ){
+            validateControl( control );
+            var $controlRow = buildControlRow( columnCount, control, options.includePrompts );
+            $peektable.find( 'tbody' ).append( $controlRow );
+        });
+        return this;
+    }
+
+    // ........................................................................
+    // as jq plugin
+    jQuery.fn.extend({
+        peekColumnSelector : function $peekColumnSelector( options ){
+            return this.map( function(){
+                return peekColumnSelector.call( this, options );
+            });
+        }
+    });
+}));
diff --git a/client/galaxy/scripts/ui/popupmenu.js b/client/galaxy/scripts/ui/popupmenu.js
new file mode 100644
index 0000000..e8fdfb7
--- /dev/null
+++ b/client/galaxy/scripts/ui/popupmenu.js
@@ -0,0 +1,158 @@
+define([ 'jquery' ], function( jQuery ){
+'use_strict';
+
+var $ = jQuery;
+
+// ============================================================================
+// TODO: unify popup menus and/or eliminate this
+/**
+ * Sets up popupmenu rendering and binds options functions to the appropriate links.
+ * initial_options is a dict with text describing the option pointing to either (a) a
+ * function to perform; or (b) another dict with two required keys, 'url' and 'action' (the
+ * function to perform. (b) is useful for exposing the underlying URL of the option.
+ */
+function make_popupmenu(button_element, initial_options) {
+    /*  Use the $.data feature to store options with the link element.
+        This allows options to be changed at a later time
+    */
+    var element_menu_exists = (button_element.data("menu_options"));
+    button_element.data("menu_options", initial_options);
+
+    // If element already has menu, nothing else to do since HTML and actions are already set.
+    if (element_menu_exists) { return; }
+
+    button_element.bind("click.show_popup", function(e) {
+        // Close existing visible menus
+        $(".popmenu-wrapper").remove();
+
+        // Need setTimeouts so clicks don't interfere with each other
+        setTimeout( function() {
+            // Dynamically generate the wrapper holding all the selectable options of the menu.
+            var menu_element = $( "<ul class='dropdown-menu' id='" + button_element.attr('id') + "-menu'></ul>" );
+            var options = button_element.data("menu_options");
+            if (_.size(options) <= 0) {
+                $("<li>No Options.</li>").appendTo(menu_element);
+            }
+            $.each( options, function( k, v ) {
+                if (v) {
+                    // Action can be either an anonymous function and a mapped dict.
+                    var action = v.action || v;
+                    menu_element.append( $("<li></li>").append( $("<a>").attr("href", v.url).html(k).click(action) ) );
+                } else {
+                    menu_element.append( $("<li></li>").addClass( "head" ).append( $("<a href='#'></a>").html(k) ) );
+                }
+            });
+            var wrapper = $( "<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>" )
+                .append( menu_element ).appendTo( "body" );
+
+            var x = e.pageX - wrapper.width() / 2 ;
+            x = Math.min( x, $(document).scrollLeft() + $(window).width() - $(wrapper).width() - 5 );
+            x = Math.max( x, $(document).scrollLeft() + 5 );
+
+            wrapper.css({
+               top: e.pageY,
+               left: x
+            });
+        }, 10);
+
+        setTimeout( function() {
+            // Bind click event to current window and all frames to remove any visible menus
+            // Bind to document object instead of window object for IE compat
+            var close_popup = function(el) {
+                $(el).bind("click.close_popup", function() {
+                    $(".popmenu-wrapper").remove();
+                    el.unbind("click.close_popup");
+                });
+            };
+            close_popup( $(window.document) ); // Current frame
+            close_popup( $(window.top.document) ); // Parent frame
+            for (var frame_id = window.top.frames.length; frame_id--;) { // Sibling frames
+                var frame = $(window.top.frames[frame_id].document);
+                close_popup(frame);
+            }
+        }, 50);
+
+        return false;
+    });
+
+}
+
+/**
+ *  Convert two seperate (often adjacent) divs into galaxy popupmenu
+ *  - div 1 contains a number of anchors which become the menu options
+ *  - div 1 should have a 'popupmenu' attribute
+ *  - this popupmenu attribute contains the id of div 2
+ *  - div 2 becomes the 'face' of the popupmenu
+ *
+ *  NOTE: make_popup_menus finds and operates on all divs with a popupmenu attr (no need to point it at something)
+ *          but (since that selector searches the dom on the page), you can send a parent in
+ *  NOTE: make_popup_menus, and make_popupmenu are horrible names
+ */
+function make_popup_menus( parent ) {
+    // find all popupmenu menu divs (divs that contains anchors to be converted to menu options)
+    //  either in the parent or the document if no parent passed
+    parent = parent || document;
+    $( parent ).find( "div[popupmenu]" ).each( function() {
+        var options = {};
+        var menu = $(this);
+
+        // find each anchor in the menu, convert them into an options map: { a.text : click_function }
+        menu.find( "a" ).each( function() {
+            var link = $(this),
+                link_dom = link.get(0),
+                confirmtext = link_dom.getAttribute( "confirm" ),
+                href = link_dom.getAttribute( "href" ),
+                target = link_dom.getAttribute( "target" );
+
+            // no href - no function (gen. a label)
+            if (!href) {
+                options[ link.text() ] = null;
+
+            } else {
+                options[ link.text() ] = {
+                    url: href,
+                    action: function( event ) {
+
+                        // if theres confirm text, send the dialog
+                        if ( !confirmtext || confirm( confirmtext ) ) {
+                            // link.click() doesn't use target for some reason,
+                            // so manually do it here.
+                            if (target) {
+                                window.open(href, target);
+                                return false;
+                            }
+                            // For all other links, do the default action.
+                            else {
+                                link.click();
+                            }
+                        } else {
+                                event.preventDefault();
+                        }
+
+                    }
+                };
+            }
+        });
+        // locate the element with the id corresponding to the menu's popupmenu attr
+        var box = $( parent ).find( "#" + menu.attr( 'popupmenu' ) );
+
+        // For menus with clickable link text, make clicking on the link go through instead
+        // of activating the popup menu
+        box.find("a").bind("click", function(e) {
+            e.stopPropagation(); // Stop bubbling so clicking on the link goes through
+            return true;
+        });
+
+        // attach the click events and menu box building to the box element
+        make_popupmenu(box, options);
+        box.addClass("popup");
+        menu.remove();
+    });
+}
+
+// ============================================================================
+    return {
+        make_popupmenu : make_popupmenu,
+        make_popup_menus : make_popup_menus
+    };
+});
diff --git a/client/galaxy/scripts/ui/scroll-panel.js b/client/galaxy/scripts/ui/scroll-panel.js
new file mode 100644
index 0000000..3b9aafd
--- /dev/null
+++ b/client/galaxy/scripts/ui/scroll-panel.js
@@ -0,0 +1,88 @@
+// This is an extension to jQuery UI draggable
+// When dragging move the parent element ("panel") relative to its parent
+// ("viewport") so that the draggable is always visible. 
+
+$.ui.plugin.add("draggable", "scrollPanel", {
+    drag: function(e, ui) {
+        var instance = $(this).data("draggable");
+        clearTimeout( instance.timeout );
+        var o = ui.options,
+            element = instance.element,
+            panel = o.panel,
+            panel_pos = panel.position(),
+            panel_w = panel.width(),
+            panel_h = panel.height()
+            viewport = panel.parent();
+            viewport_w = viewport.width(),
+            viewport_h = viewport.height(),
+            element_w = element.width(),
+            element_h = element.height(),
+            moved = false,
+            close_dist = 5,
+            nudge = 23,
+            // Legal panel range
+            p_min_x = - ( panel_w - viewport_w ),
+            p_min_y = - ( panel_h - viewport_h ),
+            p_max_x = 0,
+            p_max_y = 0,
+            // Visible
+            min_vis_x = - panel_pos.left,
+            max_vis_x = min_vis_x + viewport_w,
+            min_vis_y = - panel_pos.top,
+            max_vis_y = min_vis_y + viewport_h,
+            // Mouse
+            mouse_x = ui.position.left + instance.offset.click.left;
+            mouse_y = ui.position.top + instance.offset.click.top;
+        // Move it
+        if ( ( panel_pos.left < p_max_x ) && ( mouse_x - close_dist < min_vis_x ) ) {
+            var t = Math.min( nudge, p_max_x - panel_pos.left );
+            panel.css( "left", panel_pos.left + t );
+            moved = true;
+            instance.offset.parent.left += t;
+            ui.position.left -= t
+        }
+        if ( ( ! moved ) && ( panel_pos.left > p_min_x ) && ( mouse_x + close_dist > max_vis_x ) ) {
+            var t = Math.min( nudge, panel_pos.left  - p_min_x );
+            panel.css( "left", panel_pos.left - t );
+            moved = true;
+            instance.offset.parent.left -= t;
+            ui.position.left += t;      
+        }
+        if ( ( ! moved ) && ( panel_pos.top < p_max_y ) && ( mouse_y - close_dist < min_vis_y ) ) {
+            var t = Math.min( nudge, p_max_y - panel_pos.top );
+            panel.css( "top", panel_pos.top + t );
+            // Firefox sometimes moves by less, so we need to check. Yuck.
+            var amount_moved = panel.position().top - panel_pos.top;
+            instance.offset.parent.top += amount_moved;
+            ui.position.top -= amount_moved;
+            moved = true;
+        }
+        if ( ( ! moved ) && ( panel_pos.top > p_min_y ) && ( mouse_y + close_dist > max_vis_y ) ) {
+            var t = Math.min( nudge, panel_pos.top  - p_min_x );
+            panel.css( "top", ( panel_pos.top - t ) + "px" );
+            // Firefox sometimes moves by less, so we need to check. Yuck.
+            var amount_moved = panel_pos.top - panel.position().top;   
+            instance.offset.parent.top -= amount_moved;
+            ui.position.top += amount_moved;
+            moved = true;
+        }
+        // Still contain in panel
+        ui.position.left = Math.max( ui.position.left, 0 );
+        ui.position.top = Math.max( ui.position.top, 0 );
+        ui.position.left = Math.min( ui.position.left, panel_w - element_w );
+        ui.position.top = Math.min( ui.position.top, panel_h - element_h );
+        // Update offsets
+        if ( moved ) {
+            $.ui.ddmanager.prepareOffsets( instance, e );
+        }
+        // Keep moving even if mouse doesn't move
+        if ( moved ) {
+            instance.old_e = e;
+            instance.timeout = setTimeout( function() { instance.mouseMove( e ) }, 50 );
+        }
+    },
+    stop: function( e, ui ) {
+        var instance = $(this).data("draggable");
+        clearTimeout( instance.timeout );
+    }
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/ui/search-input.js b/client/galaxy/scripts/ui/search-input.js
new file mode 100644
index 0000000..6c5933e
--- /dev/null
+++ b/client/galaxy/scripts/ui/search-input.js
@@ -0,0 +1,153 @@
+// from: https://raw.githubusercontent.com/umdjs/umd/master/jqueryPlugin.js
+// Uses AMD or browser globals to create a jQuery plugin.
+(function (factory) {
+    if (typeof define === 'function' && define.amd) {
+        //TODO: So...this turns out to be an all or nothing thing. If I load jQuery in the define below, it will
+        //  (of course) wipe the old jquery *and all the plugins loaded into it*. So the define below *is still
+        //  relying on jquery being loaded globally* in order to preserve plugins.
+        define([], factory);
+    } else {
+        // Browser globals
+        factory(jQuery);
+    }
+
+}(function () {
+    var _l = window._l || function( s ){ return s; };
+
+    //TODO: consolidate with tool menu functionality, use there
+
+    /** searchInput: (jQuery plugin)
+     *      Creates a search input, a clear button, and loading indicator
+     *      within the selected node.
+     *
+     *      When the user either presses return or enters some minimal number
+     *      of characters, a callback is called. Pressing ESC when the input
+     *      is focused will clear the input and call a separate callback.
+     */
+    function searchInput( parentNode, options ){
+        var KEYCODE_ESC     = 27,
+            KEYCODE_RETURN  = 13,
+            $parentNode     = $( parentNode ),
+            firstSearch     = true,
+            defaults = {
+                initialVal      : '',
+                name            : 'search',
+                placeholder     : 'search',
+                classes         : '',
+                onclear         : function(){},
+                onfirstsearch   : null,
+                onsearch        : function( inputVal ){},
+                minSearchLen    : 0,
+                escWillClear    : true,
+                oninit          : function(){}
+            };
+
+        // .................................................................... input rendering and events
+        // visually clear the search, trigger an event, and call the callback
+        function clearSearchInput( event ){
+            var $input = $( this ).parent().children( 'input' );
+            $input.val( '' ).trigger( 'searchInput.clear' ).blur();
+            options.onclear();
+        }
+
+        // search for searchTerms, trigger an event, call the appropo callback (based on whether this is the first)
+        function search( event, searchTerms ){
+            if( !searchTerms ){
+                return clearSearchInput();
+            }
+            $( this ).trigger( 'search.search', searchTerms );
+            if( typeof options.onfirstsearch === 'function' && firstSearch ){
+                firstSearch = false;
+                options.onfirstsearch( searchTerms );
+            } else {
+                options.onsearch( searchTerms );
+            }
+        }
+
+        // .................................................................... input rendering and events
+        function inputTemplate(){
+            // class search-query is bootstrap 2.3 style that now lives in base.less
+            return [ '<input type="text" name="', options.name, '" placeholder="', options.placeholder, '" ',
+                            'class="search-query ', options.classes, '" ', '/>' ].join( '' );
+        }
+
+        // the search input that responds to keyboard events and displays the search value
+        function $input(){
+            return $( inputTemplate() )
+                // select all text on a focus
+                .focus( function( event ){
+                    $( this ).select();
+                })
+                // attach behaviors to esc, return if desired, search on some min len string
+                .keyup( function( event ){
+                    event.preventDefault();
+                    event.stopPropagation();
+
+                    // esc key will clear if desired
+                    if( event.which === KEYCODE_ESC && options.escWillClear ){
+                        clearSearchInput.call( this, event );
+
+                    } else {
+                        var searchTerms = $( this ).val();
+                        // return key or the search string len > minSearchLen (if not 0) triggers search
+                        if( ( event.which === KEYCODE_RETURN )
+                        ||  ( options.minSearchLen && searchTerms.length >= options.minSearchLen ) ){
+                            search.call( this, event, searchTerms );
+                        }
+                    }
+                })
+                .val( options.initialVal );
+        }
+
+        // .................................................................... clear button rendering and events
+        // a button for clearing the search bar, placed on the right hand side
+        function $clearBtn(){
+            return $([ '<span class="search-clear fa fa-times-circle" ',
+                             'title="', _l( 'clear search (esc)' ), '"></span>' ].join('') )
+            .tooltip({ placement: 'bottom' })
+            .click( function( event ){
+                clearSearchInput.call( this, event );
+            });
+        }
+
+        // .................................................................... loadingIndicator rendering
+        // a button for clearing the search bar, placed on the right hand side
+        function $loadingIndicator(){
+            return $([ '<span class="search-loading fa fa-spinner fa-spin" ',
+                             'title="', _l( 'loading...' ), '"></span>' ].join('') )
+                .hide().tooltip({ placement: 'bottom' });
+        }
+
+        // .................................................................... commands
+        // visually swap the load, clear buttons
+        function toggleLoadingIndicator(){
+            $parentNode.find( '.search-loading' ).toggle();
+            $parentNode.find( '.search-clear' ).toggle();
+        }
+
+        // .................................................................... init
+        // string command (not constructor)
+        if( jQuery.type( options ) === 'string' ){
+            if( options === 'toggle-loading' ){
+                toggleLoadingIndicator();
+            }
+            return $parentNode;
+        }
+
+        // initial render
+        if( jQuery.type( options ) === 'object' ){
+            options = jQuery.extend( true, {}, defaults, options );
+        }
+        //NOTE: prepended
+        return $parentNode.addClass( 'search-input' ).prepend([ $input(), $clearBtn(), $loadingIndicator() ]);
+    }
+
+    // as jq plugin
+    jQuery.fn.extend({
+        searchInput : function $searchInput( options ){
+            return this.each( function(){
+                return searchInput( this, options );
+            });
+        }
+    });
+}));
diff --git a/client/galaxy/scripts/utils/add-logging.js b/client/galaxy/scripts/utils/add-logging.js
new file mode 100644
index 0000000..3c27918
--- /dev/null
+++ b/client/galaxy/scripts/utils/add-logging.js
@@ -0,0 +1,42 @@
+define([
+], function(){
+//==============================================================================
+var LOGGING_FNS = [ 'log', 'debug', 'info', 'warn', 'error', 'metric' ];
+/** adds logging functions to an obj.prototype (or obj directly) adding a namespace for filtering
+ *  @param {Object} obj
+ *  @param {String} namespace
+ */
+function addLogging( obj, namespace ){
+    var addTo = ( obj.prototype !== undefined )?( obj.prototype ):( obj );
+    if( namespace !== undefined ){
+        addTo._logNamespace = namespace;
+    }
+    //yagni?: without this, may not capture Galaxy.config.debug and add Galaxy.logger properly
+    // if( window.Galaxy && window.Galaxy.config && window.Galaxy.config.debug ){
+    //     addTo.logger = window.Galaxy.logger;
+    // }
+
+    // give the object each
+    LOGGING_FNS.forEach( function( logFn ){
+        addTo[ logFn ] = function(){
+            if( !this.logger ){
+                return undefined;
+            }
+            if( this.logger.emit ){
+                return this.logger.emit( logFn, this._logNamespace, arguments );
+            }
+            if( this.logger[ logFn ] ){
+//TODO:! there has to be a way to get the lineno/file into this
+// http://stackoverflow.com/questions/13815640/a-proper-wrapper-for-console-log-with-correct-line-number
+// http://www.paulirish.com/2009/log-a-lightweight-wrapper-for-consolelog/
+                return this.logger[ logFn ].apply( this.logger, arguments );
+            }
+            return undefined;
+        };
+    });
+    return obj;
+}
+
+//==============================================================================
+return addLogging;
+});
diff --git a/client/galaxy/scripts/utils/ajax-queue.js b/client/galaxy/scripts/utils/ajax-queue.js
new file mode 100644
index 0000000..ab7cd05
--- /dev/null
+++ b/client/galaxy/scripts/utils/ajax-queue.js
@@ -0,0 +1,179 @@
+define([
+], function(){
+//ASSUMES: jquery
+//=============================================================================
+/** @class AjaxQueue
+ *  Class that allows queueing functions that return jQuery promises (such
+ *  as ajax calls). Each function waits for the previous to complete before
+ *  being called
+ *
+ *  @constructor accepts a list of functions and automatically begins
+ *      processing them
+ */
+function AjaxQueue( initialFunctions ){
+    //TODO: possibly rename to DeferredQueue
+    var self = this;
+    /** the main deferred for the entire queue - note: also sends notifications of progress */
+    self.deferred = jQuery.Deferred();
+    /** the queue array of functions */
+    self.queue = [];
+    /** cache the response from each deferred call - error or success */
+    self.responses = [];
+    /** total number of fn's to process */
+    self.numToProcess = 0;
+    /** is the queue processing/waiting for any calls to return? */
+    self.running = false;
+
+    self.init( initialFunctions || [] );
+    self.start();
+
+    return self;
+}
+
+/** add all fns in initialFunctions (if any) to the queue */
+AjaxQueue.prototype.init = function init( initialFunctions ){
+    var self = this;
+    initialFunctions.forEach( function( fn ){
+        self.add( fn );
+    });
+};
+
+/** add a fn to the queue */
+AjaxQueue.prototype.add = function add( fn ){
+    //console.debug( 'AjaxQueue.prototype.add:', fn );
+    var self = this,
+        index = this.queue.length;
+    this.numToProcess += 1;
+
+    this.queue.push( function(){
+        var fnIndex = index,
+            xhr = fn();
+        // if successful, notify using the deferred to allow tracking progress
+        xhr.done( function( response ){
+            self.deferred.notify({ curr: fnIndex, total: self.numToProcess, response: response });
+        });
+        // (regardless of previous error or success) if not last ajax call, shift and call the next
+        //  if last fn, resolve deferred
+        xhr.always( function( response ){
+            self.responses.push( response );
+            if( self.queue.length ){
+                self.queue.shift()();
+            } else {
+                self.stop();
+            }
+        });
+    });
+    return this;
+};
+
+/** start processing the queue */
+AjaxQueue.prototype.start = function start(){
+    if( this.queue.length ){
+        this.running = true;
+        this.queue.shift()();
+    }
+    return this;
+};
+
+/** stop the queue
+ *  @param {boolean} causeFail  cause an error/fail on the main deferred
+ *  @param {String} msg         message to send when rejecting the main deferred
+ */
+AjaxQueue.prototype.stop = function stop( causeFail, msg ){
+    //TODO: doesn't abort current call
+    this.running = false;
+    this.queue = [];
+    if( causeFail ){
+        //TODO: spliced args instead
+        this.deferred.reject( msg );
+    } else {
+        this.deferred.resolve( this.responses );
+    }
+    this.numToProcess = 0;
+    this.deferred = jQuery.Deferred();
+    return this;
+};
+
+// only a handful of the deferred interface for now - possible YAGNI
+/** implement done from the jq deferred interface */
+AjaxQueue.prototype.done = function done( fn ){
+    return this.deferred.done( fn );
+};
+/** implement fail from the jq deferred interface */
+AjaxQueue.prototype.fail = function fail( fn ){
+    return this.deferred.fail( fn );
+};
+/** implement always from the jq deferred interface */
+AjaxQueue.prototype.always = function always( fn ){
+    return this.deferred.always( fn );
+};
+/** implement progress from the jq deferred interface */
+AjaxQueue.prototype.progress = function progress( fn ){
+    return this.deferred.progress( fn );
+};
+
+/** shortcut constructor / fire and forget
+ *  @returns {Deferred} the queue's main deferred
+ */
+AjaxQueue.create = function create( initialFunctions ){
+    return new AjaxQueue( initialFunctions ).deferred;
+};
+
+
+//=============================================================================
+/** @class NamedAjaxQueue
+ *  @augments AjaxQueue
+ *  Allows associating a name with a deferring fn and prevents adding deferring
+ *  fns if the name has already been used. Useful to prevent build up of duplicate
+ *  async calls.
+ *  Both the array initialFunctions sent to constructor and any added later with
+ *  add() should be objects (NOT functions) of the form:
+ *  { name: some unique id,
+ *    fn:   the deferring fn or ajax call }
+ */
+function NamedAjaxQueue( initialFunctions ){
+    var self = this;
+    self.names = {};
+    AjaxQueue.call( this, initialFunctions );
+    return self;
+}
+NamedAjaxQueue.prototype = new AjaxQueue();
+NamedAjaxQueue.prototype.constructor = NamedAjaxQueue;
+
+/** add the obj.fn to the queue if obj.name hasn't been used before */
+NamedAjaxQueue.prototype.add = function add( obj ){
+    //console.debug( 'NamedAjaxQueue.adding:', obj )
+    //console.debug( 'NamedAjaxQueue.prototype.add:', obj );
+    if( !( obj.hasOwnProperty( 'name' ) && obj.hasOwnProperty( 'fn' ) ) ){
+        throw new Error( 'NamedAjaxQueue.add requires an object with both "name" and "fn": ' + JSON.stringify( obj ) );
+    }
+    if( this.names.hasOwnProperty( obj.name ) ){
+        //console.warn( 'name has been used:', obj.name );
+        return;
+    }
+    this.names[ obj.name ] = true;
+    //console.debug( '\t names: ', this.names )
+    return AjaxQueue.prototype.add.call( this, obj.fn );
+    //console.debug( '\t queue: ', this.queue.length );
+};
+
+/** override to remove names */
+NamedAjaxQueue.prototype.clear = function clear(){
+    this.names = {};
+    return this;
+};
+
+/** shortcut constructor / fire and forget
+ *  @returns {Deferred} the queue's main deferred
+ */
+NamedAjaxQueue.create = function create( initialFunctions ){
+    return new NamedAjaxQueue( initialFunctions ).deferred;
+};
+
+
+//=============================================================================
+    return {
+        AjaxQueue       : AjaxQueue,
+        NamedAjaxQueue  : NamedAjaxQueue
+    };
+});
diff --git a/client/galaxy/scripts/utils/async-save-text.js b/client/galaxy/scripts/utils/async-save-text.js
new file mode 100644
index 0000000..59fe804
--- /dev/null
+++ b/client/galaxy/scripts/utils/async-save-text.js
@@ -0,0 +1,89 @@
+define([ 'jquery' ], function( jQuery ){
+'use_strict';
+
+var $ = jQuery;
+// ============================================================================
+/**
+ * Edit and save text asynchronously.
+ */
+function async_save_text( click_to_edit_elt, text_elt_id, save_url,
+                          text_parm_name, num_cols, use_textarea, num_rows, on_start, on_finish ) {
+    // Set defaults if necessary.
+    if (num_cols === undefined) {
+        num_cols = 30;
+    }
+    if (num_rows === undefined) {
+        num_rows = 4;
+    }
+
+    // Set up input element.
+    $("#" + click_to_edit_elt).click(function() {
+        // Check if this is already active
+        if ( $("#renaming-active").length > 0) {
+            return;
+        }
+        var text_elt = $("#" + text_elt_id),
+            old_text = text_elt.text(),
+            t;
+
+        if (use_textarea) {
+            t = $("<textarea></textarea>").attr({ rows: num_rows, cols: num_cols }).text( $.trim(old_text) );
+        } else {
+            t = $("<input type='text'></input>").attr({ value: $.trim(old_text), size: num_cols });
+        }
+        t.attr("id", "renaming-active");
+        t.blur( function() {
+            $(this).remove();
+            text_elt.show();
+            if (on_finish) {
+                on_finish(t);
+            }
+        });
+        t.keyup( function( e ) {
+            if ( e.keyCode === 27 ) {
+                // Escape key
+                $(this).trigger( "blur" );
+            } else if ( e.keyCode === 13 ) {
+                // Enter key submits
+                var ajax_data = {};
+                ajax_data[text_parm_name] = $(this).val();
+                $(this).trigger( "blur" );
+                $.ajax({
+                    url: save_url,
+                    data: ajax_data,
+                    error: function() {
+                        alert( "Text editing for elt " + text_elt_id + " failed" );
+                        // TODO: call finish or no? For now, let's not because error occurred.
+                    },
+                    success: function(processed_text) {
+                        // Set new text and call finish method.
+                        if (processed_text !== "") {
+                            text_elt.text(processed_text);
+                        } else {
+                            text_elt.html("<em>None</em>");
+                        }
+                        if (on_finish) {
+                            on_finish(t);
+                        }
+                    }
+                });
+            }
+        });
+
+        if (on_start) {
+            on_start(t);
+        }
+        // Replace text with input object and focus & select.
+        text_elt.hide();
+        t.insertAfter(text_elt);
+        t.focus();
+        t.select();
+
+        return;
+    });
+}
+
+// ============================================================================
+    return async_save_text;
+});
+
diff --git a/client/galaxy/scripts/utils/config.js b/client/galaxy/scripts/utils/config.js
new file mode 100644
index 0000000..50bec9c
--- /dev/null
+++ b/client/galaxy/scripts/utils/config.js
@@ -0,0 +1,312 @@
+define(['libs/underscore', 'viz/trackster/util', 'utils/config'], function(_, util_mod, config_mod) {
+
+/**
+ * A configuration setting. Currently key is used as id.
+ */
+var ConfigSetting = Backbone.Model.extend({
+
+    initialize: function(options) {
+        // Use key as id for now.
+        var key = this.get('key');
+        this.set('id', key);
+
+        // Set defaults based on key.
+        var defaults = _.find(ConfigSetting.known_settings_defaults, function(s) { return s.key === key; });
+        if (defaults) {
+            this.set(_.extend({}, defaults, options));
+        }
+
+        if (this.get('value') === undefined && this.get('default_value') !== undefined) {
+            // Use default to set value (if present).
+            this.set_value(this.get('default_value'));
+
+            // If no default value for color config, set random color.
+            if (!this.get('value') && this.get('type') === 'color') {
+                // For color setting, set random color.
+                this.set('value', util_mod.get_random_color());
+            }
+        }
+    },
+
+    /**
+     * Cast and set value. This should be instead of
+     *  setting.set('value', new_value)
+     */
+    set_value: function(value, options) {
+        var type = this.get('type');
+
+        if (type === 'float') {
+            value = parseFloat(value);
+        }
+        else if (type === 'int') {
+            value = parseInt(value, 10);
+        }
+        // TODO: handle casting from string to bool?
+
+        this.set({value: value}, options);
+    }
+}, {
+    // This is a master list of default settings for known settings.
+    known_settings_defaults: [
+        { key: 'name', label: 'Name', type: 'text', default_value: '' },
+        { key: 'color', label: 'Color', type: 'color', default_value: null },
+        { key: 'min_value', label: 'Min Value', type: 'float', default_value: null },
+        { key: 'max_value', label: 'Max Value', type: 'float', default_value: null },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'height', type: 'int', default_value: 32, hidden: true },
+        { key: 'pos_color', label: 'Positive Color', type: 'color', default_value: "#FF8C00" },
+        { key: 'neg_color', label: 'Negative Color', type: 'color', default_value: "#4169E1" },
+        { key: 'block_color', label: 'Block color', type: 'color', default_value: null },
+        { key: 'label_color', label: 'Label color', type: 'color', default_value: 'black' },
+        { key: 'show_insertions', label: 'Show insertions', type: 'bool', default_value: false },
+        { key: 'show_counts', label: 'Show summary counts', type: 'bool', default_value: true },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'reverse_strand_color', label: 'Antisense strand color', type: 'color', default_value: null },
+        { key: 'show_differences', label: 'Show differences only', type: 'bool', default_value: true },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true }
+    ]
+});
+
+/**
+ * Collection of config settings.
+ */
+var ConfigSettingCollection = Backbone.Collection.extend({
+    model: ConfigSetting,
+
+    /**
+     * Save settings as a dictionary of key-value pairs.
+     * This function is needed for backwards compatibility.
+     */
+    to_key_value_dict: function() {
+        var rval = {};
+        this.each(function(setting) {
+            rval[setting.get('key')] = setting.get('value');
+        });
+
+        return rval;
+    },
+
+    /**
+     * Returns value for a given key. Returns undefined if there is no setting with the specified key.
+     */
+    get_value: function(key) {
+        var s = this.get(key);
+        if (s) {
+            return s.get('value');
+        }
+
+        return undefined;
+    },
+
+    /**
+     * Set value for a setting.
+     */
+    set_value: function(key, value, options) {
+        var s = this.get(key);
+        if (s) {
+            return s.set_value(value, options);
+        }
+
+        return undefined;
+     },
+
+     /**
+      * Set default value for a setting.
+      */
+     set_default_value: function(key, default_value) {
+        var s = this.get(key);
+        if (s) {
+            return s.set('default_value', default_value);
+        }
+
+        return undefined;
+     }
+},
+{
+    /**
+     * Utility function that creates a ConfigSettingsCollection from a set of models
+     * and a saved_values dictionary.
+     */
+    from_models_and_saved_values: function(models, saved_values) {
+        // If there are saved values, copy models and update with saved values.
+        if (saved_values) {
+            models = _.map(models, function(m) {
+                return _.extend({}, m, { value: saved_values[m.key] });
+            });
+        }
+
+        return new ConfigSettingCollection(models);
+    }
+});
+
+/**
+ * Viewer for config settings collection.
+ */
+var ConfigSettingCollectionView = Backbone.View.extend({
+    className: 'config-settings-view',
+
+    /**
+     * Renders form for editing configuration settings.
+     */
+    render: function() {
+        var container = this.$el;
+
+        this.collection.each(function(param, index) {
+            // Hidden params have no representation in the form
+            if (param.get('hidden')) { return; }
+
+            // Build row for param.
+            var id = 'param_' + index,
+                type = param.get('type'),
+                value = param.get('value');
+            var row = $("<div class='form-row' />").appendTo(container);
+            row.append($('<label />').attr("for", id ).text(param.get('label') + ":" ));
+            // Draw parameter as checkbox
+            if ( type === 'bool' ) {
+                row.append( $('<input type="checkbox" />').attr("id", id ).attr("name", id ).attr( 'checked', value ) );
+            }
+            // Draw parameter as textbox
+            else if ( type === 'text' ) {
+                row.append( $('<input type="text"/>').attr("id", id ).val(value).click( function() { $(this).select(); }));
+            }
+            // Draw parameter as select area
+            else if ( type === 'select' ) {
+                var select = $('<select />').attr("id", id);
+                _.each(param.get('options'), function(option) {
+                    $("<option/>").text( option.label ).attr( "value", option.value ).appendTo( select );
+                });
+                select.val( value );
+                row.append( select );
+
+            }
+            // Draw parameter as color picker
+            else if ( type === 'color' ) {
+                var
+                    container_div = $("<div/>").appendTo(row),
+                    input = $('<input />').attr("id", id ).attr("name", id ).val( value ).css("float", "left")
+                        .appendTo(container_div).click(function(e) {
+                        // Hide other pickers.
+                        $(".tooltip").removeClass( "in" );
+
+                        // Show input's color picker.
+                        var tip = $(this).siblings(".tooltip").addClass( "in" );
+                        tip.css( {
+                            // left: $(this).position().left + ( $(input).width() / 2 ) - 60,
+                            // top: $(this).position().top + $(this.height)
+                            left: $(this).position().left + $(this).width() + 5,
+                            top: $(this).position().top - ( $(tip).height() / 2 ) + ( $(this).height() / 2 )
+                            } ).show();
+
+                        // Click management:
+
+                        // Keep showing tip if clicking in tip.
+                        tip.click(function(e) {
+                            e.stopPropagation();
+                        });
+
+                        // Hide tip if clicking outside of tip.
+                        $(document).bind( "click.color-picker", function() {
+                            tip.hide();
+                            $(document).unbind( "click.color-picker" );
+                        });
+
+                        // No propagation to avoid triggering document click (and tip hiding) above.
+                        e.stopPropagation();
+                    }),
+                    // Icon for setting a new random color; behavior set below.
+                    new_color_icon = $("<a href='javascript:void(0)'/>").addClass("icon-button arrow-circle").appendTo(container_div)
+                                     .attr("title", "Set new random color").tooltip(),
+                    // Color picker in tool tip style.
+                    tip = $( "<div class='tooltip right' style='position: absolute;' />" ).appendTo(container_div).hide(),
+                    // Inner div for padding purposes
+                    tip_inner = $("<div class='tooltip-inner' style='text-align: inherit'></div>").appendTo(tip),
+                    tip_arrow = $("<div class='tooltip-arrow'></div>").appendTo(tip),
+                    farb_obj = $.farbtastic(tip_inner, { width: 100, height: 100, callback: input, color: value });
+
+                // Clear floating.
+                container_div.append( $("<div/>").css("clear", "both"));
+
+                // Use function to fix farb_obj value.
+                (function(fixed_farb_obj) {
+                    new_color_icon.click(function() {
+                        fixed_farb_obj.setColor(util_mod.get_random_color());
+                    });
+                })(farb_obj);
+
+            }
+            else {
+                row.append( $('<input />').attr("id", id ).attr("name", id ).val( value ) );
+            }
+            // Help text
+            if ( param.help ) {
+                row.append( $("<div class='help'/>").text( param.help ) );
+            }
+        });
+
+        return this;
+    },
+
+    /**
+     * Render view in modal.
+     */
+    render_in_modal: function(title) {
+        // Set up handlers for cancel, ok button and for handling esc key.
+        var self = this,
+            cancel_fn = function() { Galaxy.modal.hide(); $(window).unbind("keypress.check_enter_esc"); },
+            ok_fn = function() {
+                Galaxy.modal.hide();
+                $(window).unbind("keypress.check_enter_esc");
+                self.update_from_form();
+            },
+            check_enter_esc = function(e) {
+                if ((e.keyCode || e.which) === 27) { // Escape key
+                    cancel_fn();
+                } else if ((e.keyCode || e.which) === 13) { // Enter key
+                    ok_fn();
+                }
+            };
+
+        // Set keypress handler.
+        $(window).bind("keypress.check_enter_esc", check_enter_esc);
+
+        // Show modal.
+        if (this.$el.children().length === 0) {
+            this.render();
+        }
+        Galaxy.modal.show({
+            title: title || "Configure",
+            body: this.$el,
+            buttons: {
+                "Cancel": cancel_fn,
+                "OK": ok_fn
+            }
+        });
+    },
+
+    /**
+     * Update settings with new values entered via form.
+     */
+    update_from_form: function() {
+        var self = this;
+        this.collection.each(function(setting, index) {
+            if ( !setting.get('hidden') ) {
+                // Set value from view.
+                var id = 'param_' + index;
+                var value = self.$el.find( '#' + id ).val();
+                if ( setting.get('type') === 'bool' ) {
+                    value = self.$el.find( '#' + id ).is( ':checked' );
+                }
+                setting.set_value(value);
+            }
+        });
+    }
+
+});
+
+return {
+    ConfigSetting: ConfigSetting,
+    ConfigSettingCollection: ConfigSettingCollection,
+    ConfigSettingCollectionView: ConfigSettingCollectionView
+};
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/utils/deferred.js b/client/galaxy/scripts/utils/deferred.js
new file mode 100644
index 0000000..0fd19f6
--- /dev/null
+++ b/client/galaxy/scripts/utils/deferred.js
@@ -0,0 +1,58 @@
+/**
+ *  This class defines a queue to ensure that multiple deferred callbacks are executed sequentially.
+ */
+define(['utils/utils'], function( Utils ) {
+return Backbone.Model.extend({
+    initialize: function(){
+        this.active = {};
+        this.last = null;
+    },
+
+    /** Adds a callback to the queue. Upon execution a deferred object is parsed to the callback i.e. callback( deferred ).
+     *  If the callback does not take any arguments, the deferred is resolved instantly.
+    */
+    execute: function( callback ) {
+        var self = this;
+        var id = Utils.uid();
+        var has_deferred = callback.length > 0;
+
+        // register process
+        this.active[ id ] = true;
+
+        // deferred process
+        var process = $.Deferred();
+        process.promise().always(function() {
+            delete self.active[ id ];
+            has_deferred && Galaxy.emit.debug( 'deferred::execute()', this.state().charAt(0).toUpperCase() + this.state().slice(1) + ' ' + id );
+        });
+
+        // deferred queue
+        $.when( this.last ).always(function() {
+            if ( self.active[ id ] ) {
+                has_deferred && Galaxy.emit.debug( 'deferred::execute()', 'Running ' + id );
+                callback( process );
+                !has_deferred && process.resolve();
+            } else {
+                process.reject();
+            }
+        });
+        this.last = process.promise();
+    },
+
+    /** Resets the promise queue. All currently queued but unexecuted callbacks/promises will be rejected.
+    */
+    reset: function() {
+        Galaxy.emit.debug('deferred::execute()', 'Reset');
+        for ( var i in this.active ) {
+            this.active[ i ] = false;
+        }
+    },
+
+    /** Returns true if all processes are done.
+    */
+    ready: function() {
+        return $.isEmptyObject( this.active );
+    }
+});
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/utils/graph.js b/client/galaxy/scripts/utils/graph.js
new file mode 100644
index 0000000..b252354
--- /dev/null
+++ b/client/galaxy/scripts/utils/graph.js
@@ -0,0 +1,638 @@
+define([
+],function(){
+/* ============================================================================
+TODO:
+
+============================================================================ */
+//TODO: go ahead and move to underscore...
+/** call fn on each key/value in d */
+function each( d, fn ){
+    for( var k in d ){
+        if( d.hasOwnProperty( k ) ){
+            fn( d[ k ], k, d );
+        }
+    }
+}
+
+/** copy key/values from d2 to d overwriting if present */
+function extend( d, d2 ){
+    for( var k in d2 ){
+        if( d2.hasOwnProperty( k ) ){
+            d[ k ] = d2[ k ];
+        }
+    }
+    return d;
+}
+
+/** deep equal of two dictionaries */
+function matches( d, d2 ){
+    for( var k in d2 ){
+        if( d2.hasOwnProperty( k ) ){
+            if( !d.hasOwnProperty( k ) || d[ k ] !== d2[ k ] ){
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+/** map key/values in obj
+ *      if propsOrFn is an object, return only those k/v that match the object
+ *      if propsOrFn is function, call the fn and returned the mapped values from it
+ */
+function iterate( obj, propsOrFn ){
+    var fn =    typeof propsOrFn === 'function'? propsOrFn : undefined,
+        props = typeof propsOrFn === 'object'?   propsOrFn : undefined,
+        returned = [],
+        index = 0;
+    for( var key in obj ){
+        if( obj.hasOwnProperty( key ) ){
+            var value = obj[ key ];
+            if( fn ){
+                returned.push( fn.call( value, value, key, index ) );
+            } else if( props ){
+//TODO: break out to sep?
+                if( typeof value === 'object' && matches( value, props ) ){
+                    returned.push( value );
+                }
+            } else {
+                returned.push( value );
+            }
+            index += 1;
+        }
+    }
+    return returned;
+}
+
+
+// ============================================================================
+/** A graph edge containing the name/id of both source and target and optional data
+ */
+function Edge( source, target, data ){
+    var self = this;
+    self.source = source !== undefined? source : null;
+    self.target = target !== undefined? target : null;
+    self.data = data || null;
+    //if( typeof data === 'object' ){
+    //    extend( self, data );
+    //}
+    return self;
+}
+/** String representation */
+Edge.prototype.toString = function(){
+    return this.source + '->' + this.target;
+};
+
+/** Return a plain object representing this edge */
+Edge.prototype.toJSON = function(){
+    //TODO: this is safe in most browsers (fns will be stripped) - alter tests to incorporate this in order to pass data
+    //return this;
+    var json = {
+        source : this.source,
+        target : this.target
+    };
+    if( this.data ){
+        json.data = this.data;
+    }
+    return json;
+};
+
+// ============================================================================
+/** A graph vertex with a (unique) name/id and optional data.
+ *      A vertex contains a list of Edges (whose sources are this vertex) and maintains the degree.
+ */
+function Vertex( name, data ){
+    var self = this;
+    self.name = name !== undefined? name : '(unnamed)';
+    self.data = data || null;
+    self.edges = {};
+    self.degree = 0;
+    return self;
+}
+
+/** String representation */
+Vertex.prototype.toString = function(){
+    return 'Vertex(' + this.name + ')';
+};
+
+//TODO: better name w no collision for either this.eachEdge or this.edges
+/** Iterate over each edge from this vertex */
+Vertex.prototype.eachEdge = function( propsOrFn ){
+    return iterate( this.edges, propsOrFn );
+};
+
+/** Return a plain object representing this vertex */
+Vertex.prototype.toJSON = function(){
+    //return this;
+    return {
+        name : this.name,
+        data : this.data
+    };
+};
+
+
+// ============================================================================
+/** Base (abstract) class for Graph search algorithms.
+ *      Pass in the graph to search
+ *      and an optional dictionary containing the 3 vertex/edge processing fns listed below.
+ */
+var GraphSearch = function( graph, processFns ){
+    var self = this;
+    self.graph = graph;
+
+    self.processFns = processFns || {
+        vertexEarly : function( vertex, search ){
+            //console.debug( 'processing vertex:', vertex.name, vertex );
+        },
+        edge        : function( from, edge, search ){
+            //console.debug( this, 'edge:', from, edge, search );
+        },
+        vertexLate  : function( vertex, search ){
+            //console.debug( this, 'vertexLate:', vertex, search );
+        }
+    };
+
+    self._cache = {};
+    return self;
+};
+
+/** Search interface where start is the vertex (or the name/id of the vertex) to begin the search at
+ *      This public interface caches searches and returns the cached version if it's already been done.
+ */
+GraphSearch.prototype.search = function _search( start ){
+    var self = this;
+    if( start in self._cache ){ return self._cache[ start ]; }
+    if( !( start instanceof Vertex ) ){ start = self.graph.vertices[ start ]; }
+    return ( self._cache[ start.name ] = self._search( start ) );
+};
+
+/** Actual search (private) function (abstract here) */
+GraphSearch.prototype._search = function __search( start, search ){
+    search = search || {
+        discovered : {},
+        //parents : {},
+        edges : []
+    };
+    return search;
+};
+
+/** Searches graph from start and returns a search tree of the results */
+GraphSearch.prototype.searchTree = function _searchTree( start ){
+    return this._searchTree( this.search( start ) );
+};
+
+/** Helper fn that returns a graph (a search tree) based on the search object passed in (does not actually search) */
+GraphSearch.prototype._searchTree = function __searchTree( search ){
+    var self = this;
+    return new Graph( true, {
+        edges: search.edges,
+        vertices: Object.keys( search.discovered ).map( function( key ){
+            return self.graph.vertices[ key ].toJSON();
+        })
+    });
+};
+
+
+// ============================================================================
+/** Breadth first search algo.
+ */
+var BreadthFirstSearch = function( graph, processFns ){
+    var self = this;
+    GraphSearch.call( this, graph, processFns );
+    return self;
+};
+BreadthFirstSearch.prototype = new GraphSearch();
+BreadthFirstSearch.prototype.constructor = BreadthFirstSearch;
+
+/** (Private) implementation of BFS */
+BreadthFirstSearch.prototype._search = function __search( start, search ){
+    search = search || {
+        discovered : {},
+        //parents : {},
+        edges : []
+    };
+
+    var self = this,
+        queue = [];
+
+    function discoverAdjacent( adj, edge ){
+        var source = this;
+        if( self.processFns.edge ){ self.processFns.edge.call( self, source, edge, search ); }
+        if( !search.discovered[ adj.name ] ){
+            //console.debug( '\t\t\t', adj.name, 'is undiscovered:', search.discovered[ adj.name ] );
+            search.discovered[ adj.name ] = true;
+            //search.parents[ adj.name ] = source;
+            search.edges.push({ source: source.name, target: adj.name });
+            //console.debug( '\t\t\t queuing undiscovered: ', adj );
+            queue.push( adj );
+        }
+    }
+
+    //console.debug( 'BFS starting. start:', start );
+    search.discovered[ start.name ] = true;
+    queue.push( start );
+    while( queue.length ){
+        var vertex = queue.shift();
+        //console.debug( '\t Queue is shifting. Current:', vertex, 'queue:', queue );
+        if( self.processFns.vertexEarly ){ self.processFns.vertexEarly.call( self, vertex, search ); }
+        self.graph.eachAdjacent( vertex, discoverAdjacent );
+        if( self.processFns.vertexLate ){ self.processFns.vertexLate.call( self, vertex, search ); }
+    }
+    //console.debug( 'search.edges:', JSON.stringify( search.edges ) );
+    return search;
+};
+
+
+// ============================================================================
+/** Depth first search algorithm.
+ */
+var DepthFirstSearch = function( graph, processFns ){
+    var self = this;
+    GraphSearch.call( this, graph, processFns );
+    return self;
+};
+DepthFirstSearch.prototype = new GraphSearch();
+DepthFirstSearch.prototype.constructor = DepthFirstSearch;
+
+/** (Private) implementation of DFS */
+DepthFirstSearch.prototype._search = function( start, search ){
+    //console.debug( 'depthFirstSearch:', start );
+    search = search || {
+        discovered : {},
+        //parents    : {},
+        edges      : [],
+        entryTimes : {},
+        exitTimes  : {}
+    };
+    var self = this,
+        time = 0;
+
+    // discover verts adjacent to the source (this):
+    //  processing each edge, saving the edge to the tree, and caching the reverse path with parents
+    function discoverAdjacentVertices( adjacent, edge ){
+        //console.debug( '\t\t adjacent:', adjacent, 'edge:', edge );
+        var sourceVertex = this;
+        if( self.processFns.edge ){ self.processFns.edge.call( self, sourceVertex, edge, search ); }
+        if( !search.discovered[ adjacent.name ] ){
+            //search.parents[ adjacent.name ] = sourceVertex;
+            search.edges.push({ source: sourceVertex.name, target: adjacent.name });
+            recurse( adjacent );
+        }
+    }
+
+    // use function stack for DFS stack process verts, times, and discover adjacent verts (recursing into them)
+    function recurse( vertex ){
+        //console.debug( '\t recursing into: ', vertex );
+        search.discovered[ vertex.name ] = true;
+        if( self.processFns.vertexEarly ){ self.processFns.vertexEarly.call( self, vertex, search ); }
+        search.entryTimes[ vertex.name ] = time++;
+
+        self.graph.eachAdjacent( vertex, discoverAdjacentVertices );
+
+        if( self.processFns.vertexLate ){ self.processFns.vertexLate.call( self, vertex, search ); }
+        search.exitTimes[ vertex.name ] = time++;
+    }
+    // begin recursion with the desired start
+    recurse( start );
+
+    return search;
+};
+
+
+// ============================================================================
+/** A directed/non-directed graph object.
+ */
+function Graph( directed, data, options ){
+//TODO: move directed to options
+    this.directed = directed || false;
+    return this.init( options ).read( data );
+}
+window.Graph = Graph;
+
+/** Set up options and instance variables */
+Graph.prototype.init = function( options ){
+    options = options || {};
+    var self = this;
+
+    self.allowReflexiveEdges = options.allowReflexiveEdges || false;
+
+    self.vertices = {};
+    self.numEdges = 0;
+    return self;
+};
+
+/** Read data from the plain object data - both in d3 form (nodes and links) or vertices and edges */
+Graph.prototype.read = function( data ){
+    if( !data ){ return this; }
+    var self = this;
+    if( data.hasOwnProperty( 'nodes' ) ){ return self.readNodesAndLinks( data ); }
+    if( data.hasOwnProperty( 'vertices' ) ){ return self.readVerticesAndEdges( data ); }
+    return self;
+};
+
+//TODO: the next two could be combined
+/** Create the graph using a list of nodes and a list of edges (where source and target are indeces into nodes) */
+Graph.prototype.readNodesAndLinks = function( data ){
+    if( !( data && data.hasOwnProperty( 'nodes' ) ) ){ return this; }
+    //console.debug( 'readNodesAndLinks:', data );
+    //console.debug( 'data:\n' + JSON.stringify( data, null, '  ' ) );
+    var self = this;
+    data.nodes.forEach( function( node ){
+        self.createVertex( node.name, node.data );
+    });
+    //console.debug( JSON.stringify( self.vertices, null, '  ' ) );
+
+    ( data.links || [] ).forEach( function( edge, i ){
+        var sourceName = data.nodes[ edge.source ].name,
+            targetName = data.nodes[ edge.target ].name;
+        self.createEdge( sourceName, targetName, self.directed );
+    });
+    //self.print();
+    //console.debug( JSON.stringify( self.toNodesAndLinks(), null, '  ' ) );
+    return self;
+};
+
+/** Create the graph using a list of nodes and a list of edges (where source and target are names of nodes) */
+Graph.prototype.readVerticesAndEdges = function( data ){
+    if( !( data && data.hasOwnProperty( 'vertices' ) ) ){ return this; }
+    //console.debug( 'readVerticesAndEdges:', data );
+    //console.debug( 'data:\n' + JSON.stringify( data, null, '  ' ) );
+    var self = this;
+    data.vertices.forEach( function( node ){
+        self.createVertex( node.name, node.data );
+    });
+    //console.debug( JSON.stringify( self.vertices, null, '  ' ) );
+
+    ( data.edges || [] ).forEach( function( edge, i ){
+        self.createEdge( edge.source, edge.target, self.directed );
+    });
+    //self.print();
+    //console.debug( JSON.stringify( self.toNodesAndLinks(), null, '  ' ) );
+    return self;
+};
+
+/** Return the vertex with name, creating it if necessary */
+Graph.prototype.createVertex = function( name, data ){
+    //console.debug( 'createVertex:', name, data );
+    if( this.vertices[ name ] ){ return this.vertices[ name ]; }
+    return ( this.vertices[ name ] = new Vertex( name, data ) );
+};
+
+/** Create an edge in vertex named sourceName to targetName (optionally adding data to it)
+ *      If directed is false, create a second edge from targetName to sourceName.
+ */
+Graph.prototype.createEdge = function( sourceName, targetName, directed, data ){
+    //note: allows multiple 'equivalent' edges (to/from same source/target)
+    //console.debug( 'createEdge:', source, target, directed );
+    var isReflexive = sourceName === targetName;
+    if( !this.allowReflexiveEdges && isReflexive ){ return null; }
+
+    sourceVertex = this.vertices[ sourceName ];
+    targetVertex = this.vertices[ targetName ];
+    //note: silently ignores edges from/to unknown vertices
+    if( !( sourceVertex && targetVertex ) ){ return null; }
+
+//TODO: prob. move to vertex
+    var self = this,
+        edge = new Edge( sourceName, targetName, data );
+    sourceVertex.edges[ targetName ] = edge;
+    sourceVertex.degree += 1;
+    self.numEdges += 1;
+    
+    //TODO:! don't like having duplicate edges for non-directed graphs
+    // mirror edges (reversing source and target) in non-directed graphs
+    //  but only if not reflexive
+    if( !isReflexive && !directed ){
+        // flip directed to prevent recursion loop
+        self.createEdge( targetName, sourceName, true );
+    }
+
+    return edge;
+};
+
+/** Walk over all the edges of the graph using the vertex.eachEdge iterator */
+Graph.prototype.edges = function( propsOrFn ){
+    return Array.prototype.concat.apply( [], this.eachVertex( function( vertex ){
+        return vertex.eachEdge( propsOrFn );
+    }));
+};
+
+/** Iterate over all the vertices in the graph */
+Graph.prototype.eachVertex = function( propsOrFn ){
+    return iterate( this.vertices, propsOrFn );
+};
+
+/** Return a list of the vertices adjacent to vertex */
+Graph.prototype.adjacent = function( vertex ){
+    var self = this;
+    return iterate( vertex.edges, function( edge ){
+        return self.vertices[ edge.target ];
+    });
+};
+
+/** Call fn on each vertex adjacent to vertex */
+Graph.prototype.eachAdjacent = function( vertex, fn ){
+    var self = this;
+    return iterate( vertex.edges, function( edge ){
+        var adj = self.vertices[ edge.target ];
+        return fn.call( vertex, adj, edge );
+    });
+};
+
+/** Print the graph to the console (debugging) */
+Graph.prototype.print = function(){
+    var self = this;
+    console.log( 'Graph has ' + Object.keys( self.vertices ).length + ' vertices' );
+    self.eachVertex( function( vertex ){
+        console.log( vertex.toString() );
+        vertex.eachEdge( function( edge ){
+            console.log( '\t ' + edge );
+        });
+    });
+    return self;
+};
+
+/** Return a DOT format string of this graph */
+Graph.prototype.toDOT = function(){
+    var self = this,
+        strings = [];
+    strings.push( 'graph bler {' );
+    self.edges( function( edge ){
+        strings.push( '\t' + edge.from + ' -- ' + edge.to + ';' );
+    });
+    strings.push( '}' );
+    return strings.join( '\n' );
+};
+
+/** Return vertices and edges of this graph in d3 node/link format */
+Graph.prototype.toNodesAndLinks = function(){
+    var self = this,
+        indeces = {};
+    return {
+        nodes : self.eachVertex( function( vertex, key, i ){
+            indeces[ vertex.name ] = i;
+            return vertex.toJSON();
+        }),
+        links : self.edges( function( edge ){
+            var json = edge.toJSON();
+            json.source = indeces[ edge.source ];
+            json.target = indeces[ edge.target ];
+            return json;
+        })
+    };
+};
+
+/** Return vertices and edges of this graph where edges use the name/id as source and target */
+Graph.prototype.toVerticesAndEdges = function(){
+    var self = this;
+    return {
+        vertices : self.eachVertex( function( vertex, key ){
+            return vertex.toJSON();
+        }),
+        edges : self.edges( function( edge ){
+            return edge.toJSON();
+        })
+    };
+};
+
+/** Search this graph using BFS */
+Graph.prototype.breadthFirstSearch = function( start, processFns ){
+    return new BreadthFirstSearch( this ).search( start );
+};
+
+/** Return a searchtree of this graph using BFS */
+Graph.prototype.breadthFirstSearchTree = function( start, processFns ){
+    return new BreadthFirstSearch( this ).searchTree( start );
+};
+
+/** Search this graph using DFS */
+Graph.prototype.depthFirstSearch = function( start, processFns ){
+    return new DepthFirstSearch( this ).search( start );
+};
+
+/** Return a searchtree of this graph using DFS */
+Graph.prototype.depthFirstSearchTree = function( start, processFns ){
+    return new DepthFirstSearch( this ).searchTree( start );
+};
+
+
+//Graph.prototype.shortestPath = function( start, end ){
+//};
+//
+//Graph.prototype.articulationVertices = function(){
+//};
+//
+//Graph.prototype.isAcyclic = function(){
+//};
+//
+//Graph.prototype.isBipartite = function(){
+//};
+
+/** Return an array of weakly connected (no edges between) sub-graphs in this graph */
+Graph.prototype.weakComponents = function(){
+//TODO: alternately, instead of returning graph-like objects:
+//  - could simply decorate the vertices (vertex.component = componentIndex), or clone the graph and do that
+    var self = this,
+        searchGraph = this,
+        undiscovered,
+        components = [];
+
+    function getComponent( undiscoveredVertex ){
+//TODO: better interface on dfs (search v. searchTree)
+        var search = new DepthFirstSearch( searchGraph )._search( undiscoveredVertex );
+
+        // remove curr discovered from undiscovered
+        undiscovered = undiscovered.filter( function( name ){
+            return !( name in search.discovered );
+        });
+
+        return {
+            vertices : Object.keys( search.discovered ).map( function( vertexName ){
+                return self.vertices[ vertexName ].toJSON();
+            }),
+            edges : search.edges.map( function( edge ){
+                // restore any reversed edges
+                var hasBeenReversed = self.vertices[ edge.target ].edges[ edge.source ] !== undefined;
+                if( self.directed && hasBeenReversed ){
+                    var swap = edge.source;
+                    edge.source = edge.target;
+                    edge.target = swap;
+                }
+                return edge;
+            })
+        };
+    }
+
+    if( self.directed ){
+        // if directed - convert to undirected for search
+        searchGraph = new Graph( false, self.toNodesAndLinks() );
+    }
+    undiscovered = Object.keys( searchGraph.vertices );
+    //console.debug( '(initial) undiscovered:', undiscovered );
+    while( undiscovered.length ){
+        var undiscoveredVertex = searchGraph.vertices[ undiscovered.shift() ];
+        components.push( getComponent( undiscoveredVertex ) );
+        //console.debug( 'undiscovered now:', undiscovered );
+    }
+
+    //console.debug( 'components:\n', JSON.stringify( components, null, '  ' ) );
+    return components;
+};
+
+/** Return a single graph containing the weakly connected components in this graph */
+Graph.prototype.weakComponentGraph = function(){
+    //note: although this can often look like the original graph - edges can be lost
+    var components = this.weakComponents();
+    return new Graph( this.directed, {
+        vertices : components.reduce( function( reduction, curr ){
+            return reduction.concat( curr.vertices );
+        }, [] ),
+        edges : components.reduce( function( reduction, curr ){
+            return reduction.concat( curr.edges );
+        }, [] )
+    });
+};
+
+/** Return an array of graphs of the weakly connected components in this graph */
+Graph.prototype.weakComponentGraphArray = function(){
+    //note: although this can often look like the original graph - edges can be lost
+    var graph = this;
+    return this.weakComponents().map( function( component ){
+        return new Graph( graph.directed, component );
+    });
+};
+
+
+// ============================================================================
+/** Create a random graph with numVerts vertices and numEdges edges (for testing)
+ */
+function randGraph( directed, numVerts, numEdges ){
+    //console.debug( 'randGraph', directed, numVerts, numEdges );
+    var data = { nodes : [], links : [] };
+    function randRange( range ){
+        return Math.floor( Math.random() * range );
+    }
+    for( var i=0; i<numVerts; i++ ){
+        data.nodes.push({ name: i });
+    }
+    for( i=0; i<numEdges; i++ ){
+        data.links.push({
+            source : randRange( numVerts ),
+            target : randRange( numVerts )
+        });
+    }
+    //console.debug( JSON.stringify( data, null, '  ' ) );
+    return new Graph( directed, data );
+}
+
+
+// ============================================================================
+    return {
+        Vertex : Vertex,
+        Edge : Edge,
+        BreadthFirstSearch : BreadthFirstSearch,
+        DepthFirstSearch : DepthFirstSearch,
+        Graph : Graph,
+        randGraph : randGraph
+    };
+});
diff --git a/client/galaxy/scripts/utils/levenshtein.js b/client/galaxy/scripts/utils/levenshtein.js
new file mode 100644
index 0000000..ff12829
--- /dev/null
+++ b/client/galaxy/scripts/utils/levenshtein.js
@@ -0,0 +1,60 @@
+define([
+], function(){
+//=============================================================================
+/**
+(Imported for edit distance algorith. From: https://gist.github.com/andrei-m/982927)
+Copyright (c) 2011 Andrei Mackenzie
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
+persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+// Compute the edit distance between the two given strings
+//exports.getEditDistance = function(a, b){
+function levenshteinDistance(a, b){
+  if(a.length === 0){ return b.length; }
+  if(b.length === 0){ return a.length; }
+
+  var matrix = [];
+
+  // increment along the first column of each row
+  var i;
+  for(i = 0; i <= b.length; i++){
+    matrix[i] = [i];
+  }
+
+  // increment each column in the first row
+  var j;
+  for(j = 0; j <= a.length; j++){
+    matrix[0][j] = j;
+  }
+
+  // Fill in the rest of the matrix
+  for(i = 1; i <= b.length; i++){
+    for(j = 1; j <= a.length; j++){
+      if(b.charAt(i-1) === a.charAt(j-1)){
+        matrix[i][j] = matrix[i-1][j-1];
+      } else {
+        matrix[i][j] = Math.min(matrix[i-1][j-1] + 1, // substitution
+                                Math.min(matrix[i][j-1] + 1, // insertion
+                                         matrix[i-1][j] + 1)); // deletion
+      }
+    }
+  }
+
+  //console.debug( '\t\t levenshteinDistance', a, b, matrix[b.length][a.length] );
+  return matrix[b.length][a.length];
+}
+
+//=============================================================================
+    return levenshteinDistance;
+});
diff --git a/client/galaxy/scripts/utils/localization.js b/client/galaxy/scripts/utils/localization.js
new file mode 100644
index 0000000..32db13b
--- /dev/null
+++ b/client/galaxy/scripts/utils/localization.js
@@ -0,0 +1,50 @@
+define([
+    'i18n!nls/locale'
+], function( localeStrings ){
+
+// =============================================================================
+/** Simple string replacement localization. Language data from galaxy/scripts/nls */
+
+// support both requirejs and webpack from the same file
+// if loaded via webpack, it will be a different format than via requirejs - convert
+if( localeStrings.hasOwnProperty( '__root' ) ){
+    //console.debug( 'amdi18n+webpack localization for ' + locale + ' loaded' );
+    var locale = (
+        typeof navigator === 'undefined' ? '__root'
+            : ( navigator.language || navigator.userLanguage || '__root' ).toLowerCase()
+    );
+    localeStrings = localeStrings[ '__' + locale ] || localeStrings[ '__' + locale.split('-')[0] ] ||localeStrings.__root;
+
+// } else {
+//     console.debug( 'i18n+requirejs localization for ' + locale + ' loaded' );
+}
+// TODO: when this is no longer necessary remove this, i18n.js, and the resolveModule in config
+
+// -----------------------------------------------------------------------------
+/** Attempt to get a localized string for strToLocalize. If not found, return
+ *      the original strToLocalize.
+ * @param {String} strToLocalize the string to localize
+ * @returns either the localized string if found or strToLocalize if not found
+ */
+var localize = function( strToLocalize ){
+    // console.debug( 'amdi18n.localize:', strToLocalize, '->', localeStrings[ strToLocalize ] || strToLocalize );
+
+    // //TODO: conditional compile on DEBUG flag
+    // // cache strings that need to be localized but haven't been?
+    // if( localize.cacheNonLocalized && !localeStrings.hasOwnProperty( strToLocalize ) ){
+    //     // console.debug( 'localization NOT found:', strToLocalize );
+    //     // add nonCached as hash directly to this function
+    //     localize.nonLocalized = localize.nonLocalized || {};
+    //     localize.nonLocalized[ locale ] = localize.nonLocalized[ locale ] || {};
+    //     localize.nonLocalized[ locale ][ strToLocalize ] = false;
+    // }
+
+    // return the localized version from the closure if it's there, the strToLocalize if not
+    return localeStrings[ strToLocalize ] || strToLocalize;
+};
+localize.cacheNonLocalized = false;
+
+
+// =============================================================================
+    return localize;
+});
diff --git a/client/galaxy/scripts/utils/metrics-logger.js b/client/galaxy/scripts/utils/metrics-logger.js
new file mode 100644
index 0000000..16082a9
--- /dev/null
+++ b/client/galaxy/scripts/utils/metrics-logger.js
@@ -0,0 +1,475 @@
+define([
+], function(){
+/*global window, jQuery, console */
+/*=============================================================================
+TODO:
+    while anon: logs saved to 'logs-null' - this will never post
+        unless we manually do so at/after login
+        OR prepend when userId and localStorage has 'logs-null'
+    wire up _delayPost and test
+
+=============================================================================*/
+/** @class MetricsLogger
+ *
+ *  Object to cache, output, and post log/metric messages to the server.
+ *  Meant to be attached to the Galaxy object.
+ *
+ *  Log from objects by either attaching logger directly:
+ *      panel.logger.metric( 'user dataset deletion', panel.user.id, hda.toJSON() )
+ *  or using the LoggableMixin or addLogging function:
+ *      MyBackboneModel.extend( LoggableMixin ).extend({ ... })
+ *      addLogging( MyBackboneModel, 'my-backbone-model' )
+ *
+ *  Log from templates by calling directly from Galaxy object:
+ *      Galaxy.logger.metric( 'template loaded', { ownedByUser : true });
+ *
+ *  If you attempt to log an un-serializable object (circular reference, window, etc.),
+ *  that entry will not be cached (or sent). If you set consoleLevel and consoleLogger
+ *  appropriately, a warning will be shown when this happens:
+ *      > panel.metric( 'something weird with window', { window : window })
+ *      !'Metrics logger could not stringify logArguments: ...'
+ */
+function MetricsLogger( options ){
+    options = options || {};
+    var self = this;
+
+    ///** get the current user's id from bootstrapped data or options */
+    self.userId = ( window.bootstrapped && window.bootstrapped.user )? window.bootstrapped.user.id: null;
+    self.userId = self.userId || options.userId || null;
+
+    /** the (optional) console to emit logs to */
+    self.consoleLogger = options.consoleLogger || null;
+
+    self._init( options );
+    return self;
+}
+
+//----------------------------------------------------------------------------- defaults and constants
+// see: python std lib, logging
+MetricsLogger.ALL   =  0;
+MetricsLogger.LOG   =  0;
+MetricsLogger.DEBUG = 10;
+MetricsLogger.INFO  = 20;
+MetricsLogger.WARN  = 30;
+MetricsLogger.ERROR = 40;
+// metrics levels here?
+//MetricsLogger.MinorEvent  = 45;
+//MetricsLogger.MajorEvent  = 50;
+MetricsLogger.METRIC = 50;
+MetricsLogger.NONE = 100;
+
+/** default options - override these through the constructor */
+MetricsLogger.defaultOptions = {
+    /** if an incoming message has a level >= this, it will be cached - can also be a string (e.g. 'debug') */
+    logLevel            : MetricsLogger.NONE,
+    /** if an incoming message has a level >= this, it will be output to the console */
+    consoleLevel        : MetricsLogger.NONE,
+    /** the default 'namespace' or label associated with an incoming message (if none is passed) */
+    defaultNamespace    : 'Galaxy',
+    /** the namespaces output to the console (all namespaces will be output if this is falsy)
+     *  note: applies only to the console (not the event/metrics log/cache)
+     */
+    consoleNamespaceWhitelist : null,
+    /** the prefix attached to client-side logs to distinguish them in the metrics db */
+    clientPrefix        : 'client.',
+
+    /** the maximum number of messages the cache should hold; if exceeded older messages are removed first */
+    maxCacheSize        : 3000,
+    /** the number of messages accumulate before posting to the server; should be <= maxCacheSize */
+    postSize            : 1000,
+    /** T/F whether to add a timestamp to incoming cached messages */
+    addTime             : true,
+    /** string to prefix to userid for cache web storage */
+    cacheKeyPrefix      : 'logs-',
+
+    /** the relative url to post messages to */
+    postUrl             : '/api/metrics',
+    /** delay before trying post again after two failures */
+    delayPostInMs       : 1000 * 60 * 10,
+
+    /** an (optional) function that should return an object; used to send additional data with the metrics */
+    getPingData         : undefined,
+    /** an (optional) function that will handle the servers response after successfully posting messages */
+    onServerResponse    : undefined
+};
+
+//----------------------------------------------------------------------------- set up
+/** initialize the logger with options, set up instance vars and cache, and add onpageunload to window */
+MetricsLogger.prototype._init = function _init( options ){
+    var self = this;
+    self.options = {};
+    for( var k in MetricsLogger.defaultOptions ){
+        if( MetricsLogger.defaultOptions.hasOwnProperty( k ) ){
+            self.options[ k ] = ( options.hasOwnProperty( k ) )?( options[ k ] ):( MetricsLogger.defaultOptions[ k ] );
+        }
+    }
+    self.options.logLevel = self._parseLevel( self.options.logLevel );
+    self.options.consoleLevel = self._parseLevel( self.options.consoleLevel );
+    //self._emitToConsole( 'debug', 'MetricsLogger', 'MetricsLogger.options:', self.options );
+
+    /** is the logger currently sending? */
+    self._sending = false;
+    /** the setTimeout id if the logger POST has failed more than once */
+    self._waiting = null;
+    /** the current number of entries to send in a POST */
+    self._postSize = self.options.postSize;
+
+    self._initCache();
+
+    return self;
+};
+
+/** initialize the cache */
+MetricsLogger.prototype._initCache = function _initCache(){
+    try {
+        this.cache = new LoggingCache({
+            maxSize : this.options.maxCacheSize,
+            key     : this.options.cacheKeyPrefix + this.userId
+        });
+    } catch( err ){
+        this._emitToConsole( 'warn', 'MetricsLogger', [ 'Could not intitialize logging cache:', err ] );
+        this.options.logLevel = MetricsLogger.NONE;
+    }
+};
+
+/** return the numeric log level if level in 'none, debug, log, info, warn, error' */
+MetricsLogger.prototype._parseLevel = function _parseLevel( level ){
+    var type = typeof level;
+    if( type === 'number' ){ return level; }
+    if( type === 'string' ){
+        var upper = level.toUpperCase();
+        if( MetricsLogger.hasOwnProperty( upper ) ){
+            return MetricsLogger[ upper ];
+        }
+    }
+    throw new Error( 'Unknown log level: ' + level );
+};
+
+
+//----------------------------------------------------------------------------- main entry point
+/** record a log/message's arguments to the cache and/or the console based on level and namespace */
+MetricsLogger.prototype.emit = function emit( level, namespace, logArguments ){
+    //this._emitToConsole( 'debug', 'MetricsLogger', [ 'emit:', level, namespace, logArguments ]);
+    var self = this;
+    namespace = namespace || self.options.defaultNamespace;
+    if( !level || !logArguments ){
+        return self;
+    }
+    // add to cache if proper level
+    //TODO: respect do not track?
+    //if( !navigator.doNotTrack && level >= self.options.logLevel ){
+    level = self._parseLevel( level );
+    if( level >= self.options.logLevel ){
+        self._addToCache( level, namespace, logArguments );
+    }
+    // also emit to consoleLogger if proper level for that
+    if( self.consoleLogger && level >= self.options.consoleLevel ){
+        self._emitToConsole( level, namespace, logArguments );
+    }
+    return self;
+};
+
+//----------------------------------------------------------------------------- cache
+/** add a message to the cache and if messages.length is high enough post them to the server */
+MetricsLogger.prototype._addToCache = function _addToCache( level, namespace, logArguments ){
+    this._emitToConsole( 'debug', 'MetricsLogger',
+        [ '_addToCache:', arguments, this.options.addTime, this.cache.length() ]);
+    //this._emitToConsole( 'debug', 'MetricsLogger', [ '\t logArguments:', logArguments ]);
+    var self = this;
+    // try add to the cache and if we've got _postSize number of entries, attempt to post them to the server
+    try {
+        var newLength = self.cache.add( self._buildEntry( level, namespace, logArguments ) );
+        if( newLength >= self._postSize ){
+            self._postCache();
+        }
+    // discard entry if an error occurs, but warn if level set to do so
+    } catch( err ){
+        self._emitToConsole( 'warn', 'MetricsLogger',
+            [ 'Metrics logger could not stringify logArguments:', namespace, logArguments ] );
+        self._emitToConsole( 'error', 'MetricsLogger', [ err ] );
+
+    }
+    return self;
+};
+
+/** build a log cache entry object from the given level, namespace, and arguments (optionally adding timestamp */
+MetricsLogger.prototype._buildEntry = function _buildEntry( level, namespace, logArguments ){
+    this._emitToConsole( 'debug', 'MetricsLogger', [ '_buildEntry:', arguments ]);
+    var entry = {
+            level       : level,
+            namespace   : this.options.clientPrefix + namespace,
+            args        : logArguments
+        };
+    if( this.options.addTime ){
+        entry.time = new Date().toISOString();
+    }
+    return entry;
+};
+
+/** post _postSize messages from the cache to the server, removing them if successful
+ *      if the post fails, wait until maxCacheSize is accumulated instead and try again then
+ *      in addition to the messages from the cache ('metrics'), any info from getPingData (if set) will be sent
+ *      onServerResponse will be called (if set) with any response from the server
+ */
+MetricsLogger.prototype._postCache = function _postCache( options ){
+    options = options || {};
+    this._emitToConsole( 'info', 'MetricsLogger', [ '_postCache', options, this._postSize ]);
+
+    // short circuit if we're already sending
+    if( !this.options.postUrl || this._sending ){
+        return jQuery.when({});
+    }
+
+    var self = this,
+        postSize = options.count || self._postSize,
+        // do not splice - remove after *successful* post
+        entries = self.cache.get( postSize ),
+        entriesLength = entries.length,
+        // use the optional getPingData to add any extra info we may want to send
+        postData = ( typeof self.options.getPingData === 'function' )?( self.options.getPingData() ):( {} );
+    //console.debug( postSize, entriesLength );
+
+    // add the metrics and send
+    postData.metrics = JSON.stringify( entries );
+    //console.debug( postData.metrics );
+    self._sending = true;
+    return jQuery.post( self.options.postUrl, postData )
+        .always( function(){
+            self._sending = false;
+        })
+        .fail( function( xhr, status, message ){
+            // if we failed the previous time, set the next post target to the max num of entries
+            self._postSize = self.options.maxCacheSize;
+//TODO:??
+            // log this failure to explain any gap in metrics
+            self.emit( 'error', 'MetricsLogger', [ '_postCache error:',
+                xhr.readyState, xhr.status, xhr.responseJSON || xhr.responseText ]);
+//TODO: still doesn't solve the problem that when cache == max, post will be tried on every emit
+//TODO: see _delayPost
+        })
+        .done( function( response ){
+            if( typeof self.options.onServerResponse === 'function' ){
+                self.options.onServerResponse( response );
+            }
+            // only remove if post successful
+            self.cache.remove( entriesLength );
+            //console.debug( 'removed entries:', entriesLength, 'size now:', self.cache.length() );
+            // if we succeeded, reset the post target to the normal num of entries
+            self._postSize = self.options.postSize;
+        });
+    // return the xhr promise
+};
+
+/** set _waiting to true and, after delayPostInMs, set it back to false */
+MetricsLogger.prototype._delayPost = function _delayPost(){
+//TODO: this won't work between pages
+    var self = this;
+    self._waiting = setTimeout( function(){
+        self._waiting = null;
+    }, self.options.delayPostInMs );
+};
+
+
+//----------------------------------------------------------------------------- console
+/** output message to console based on level and consoleLogger type */
+MetricsLogger.prototype._emitToConsole = function _emitToConsole( level, namespace, logArguments ){
+    //console.debug( '_emitToConsole:', level, namespace, logArguments );
+    var self = this,
+        whitelist = self.options.consoleNamespaceWhitelist;
+    if( !self.consoleLogger ){ return self; }
+    // if a whitelist for namespaces is set, bail if this namespace is not in the list
+    if( whitelist && whitelist.indexOf( namespace ) === -1 ){
+        return self;
+    }
+
+    var args = Array.prototype.slice.call( logArguments, 0 );
+    args.unshift( namespace );
+//TODO: script location and/or source maps?
+//TODO: branch on navigator.userAgent == AIIEEE - it only has log
+    if(        level >= MetricsLogger.METRIC && typeof( self.consoleLogger.info ) === 'function' ){
+        return self.consoleLogger.info.apply( self.consoleLogger, args );
+
+    } else if( level >= MetricsLogger.ERROR && typeof( self.consoleLogger.error ) === 'function' ){
+        return self.consoleLogger.error.apply( self.consoleLogger, args );
+    } else if( level >= MetricsLogger.WARN && typeof( self.consoleLogger.warn ) === 'function' ){
+        self.consoleLogger.warn.apply( self.consoleLogger, args );
+    } else if( level >= MetricsLogger.INFO && typeof( self.consoleLogger.info ) === 'function' ){
+        self.consoleLogger.info.apply( self.consoleLogger, args );
+    } else if( level >= MetricsLogger.DEBUG && typeof( self.consoleLogger.debug ) === 'function' ){
+        self.consoleLogger.debug.apply( self.consoleLogger, args );
+    } else if( typeof( self.consoleLogger.log ) === 'function' ){
+        self.consoleLogger.log.apply( self.consoleLogger, args );
+    }
+    return self;
+};
+
+//----------------------------------------------------------------------------- shortcuts
+// generic functions when logging from non-namespaced object (e.g. templates)
+/** log to default namespace */
+MetricsLogger.prototype.log = function log(){
+    this.emit( 1, this.options.defaultNamespace,
+        Array.prototype.slice.call( arguments, 0 ) );
+};
+
+/** debug to default namespace */
+MetricsLogger.prototype.debug = function debug(){
+    this.emit( MetricsLogger.DEBUG, this.options.defaultNamespace,
+        Array.prototype.slice.call( arguments, 0 ) );
+};
+
+/** info to default namespace */
+MetricsLogger.prototype.info = function info(){
+    this.emit( MetricsLogger.INFO, this.options.defaultNamespace,
+        Array.prototype.slice.call( arguments, 0 ) );
+};
+
+/** warn to default namespace */
+MetricsLogger.prototype.warn = function warn(){
+    this.emit( MetricsLogger.WARN, this.options.defaultNamespace,
+        Array.prototype.slice.call( arguments, 0 ) );
+};
+
+/** error to default namespace */
+MetricsLogger.prototype.error = function error(){
+    this.emit( MetricsLogger.ERROR, this.options.defaultNamespace,
+        Array.prototype.slice.call( arguments, 0 ) );
+};
+
+/** metric to default namespace */
+MetricsLogger.prototype.metric = function metric(){
+    this.emit( MetricsLogger.METRIC, this.options.defaultNamespace,
+        Array.prototype.slice.call( arguments, 0 ) );
+};
+
+
+/* ============================================================================
+TODO:
+    need a performance pass - the JSON un/parsing is a bit much
+
+============================================================================ */
+/** @class LoggingCache
+ *  Simple implementation of cache wrapping an array.
+ *
+ *  Formats an entry before it's cached and only keeps options.maxSize number
+ *  of entries. Older entries are deleted first.
+ */
+function LoggingCache( options ){
+    var self = this;
+    return self._init( options || {} );
+}
+
+/** default options */
+LoggingCache.defaultOptions = {
+    /** maximum number of entries to keep before discarding oldest */
+    maxSize     : 5000
+};
+
+/** initialize with options */
+LoggingCache.prototype._init = function _init( options ){
+    if( !this._hasStorage() ){
+        //TODO: fall back to jstorage
+        throw new Error( 'LoggingCache needs localStorage' );
+    }
+    if( !options.key ){
+        throw new Error( 'LoggingCache needs key for localStorage' );
+    }
+    this.key = options.key;
+    this._initStorage();
+
+    this.maxSize = options.maxSize || LoggingCache.defaultOptions.maxSize;
+    return this;
+};
+
+/** tests for localStorage fns */
+LoggingCache.prototype._hasStorage = function _hasStorage(){
+//TODO: modernizr
+    var test = 'test';
+    try {
+        localStorage.setItem( test, test );
+        localStorage.removeItem( test );
+        return true;
+    } catch( e ){
+        return false;
+    }
+};
+
+/** if no localStorage set for key, initialize to empty array */
+LoggingCache.prototype._initStorage = function _initStorage(){
+    if( localStorage.getItem( this.key ) === null ){
+        return this.empty();
+    }
+    return this;
+};
+
+/** add an entry to the cache, removing the oldest beforehand if size >= maxSize */
+LoggingCache.prototype.add = function add( entry ){
+    var self = this,
+        _cache = self._fetchAndParse(),
+        overage = ( _cache.length + 1 ) - self.maxSize;
+    if( overage > 0 ){
+        _cache.splice( 0, overage );
+    }
+    _cache.push( entry );
+    self._unparseAndStore( _cache );
+    return _cache.length;
+};
+
+/** get the entries from localStorage and parse them */
+LoggingCache.prototype._fetchAndParse = function _fetchAndParse(){
+    var self = this;
+    return JSON.parse( localStorage.getItem( self.key ) );
+};
+
+/** stringify the entries and put them in localStorage */
+LoggingCache.prototype._unparseAndStore = function _unparseAndStore( entries ){
+    var self = this;
+    return localStorage.setItem( self.key, JSON.stringify( entries ) );
+};
+
+///** process the entry before caching */
+//LoggingCache.prototype._preprocessEntry = function _preprocessEntry( entry ){
+//    return JSON.stringify( entry );
+//};
+
+/** return the length --- oh, getters where are you? */
+LoggingCache.prototype.length = function length(){
+    return this._fetchAndParse().length;
+};
+
+/** get count number of entries starting with the oldest */
+LoggingCache.prototype.get = function get( count ){
+    return this._fetchAndParse().slice( 0, count );
+};
+
+/** remove count number of entries starting with the oldest */
+LoggingCache.prototype.remove = function remove( count ){
+    var _cache = this._fetchAndParse(),
+        removed = _cache.splice( 0, count );
+    this._unparseAndStore( _cache );
+    return removed;
+};
+
+/** empty/clear the entire cache */
+LoggingCache.prototype.empty = function empty(){
+    localStorage.setItem( this.key, '[]' );
+    return this;
+};
+
+/** stringify count number of entries (but do not remove) */
+LoggingCache.prototype.stringify = function stringify( count ){
+    return JSON.stringify( this.get( count ) );
+};
+
+/** outputs entire cache to console */
+LoggingCache.prototype.print = function print(){
+    // popup? (really, carl? a popup?) - easier to copy/paste
+    console.log( JSON.stringify( this._fetchAndParse(), null, '  ' ) );
+};
+
+
+//=============================================================================
+    return {
+        MetricsLogger  : MetricsLogger,
+        LoggingCache   : LoggingCache
+    };
+});
diff --git a/client/galaxy/scripts/utils/natural-sort.js b/client/galaxy/scripts/utils/natural-sort.js
new file mode 100644
index 0000000..1f6cc5c
--- /dev/null
+++ b/client/galaxy/scripts/utils/natural-sort.js
@@ -0,0 +1,30 @@
+define([], function(){
+// Alphanumeric/natural sort fn
+function naturalSort(a, b) {
+    // setup temp-scope variables for comparison evauluation
+    var re = /(-?[0-9\.]+)/g,
+        x = a.toString().toLowerCase() || '',
+        y = b.toString().toLowerCase() || '',
+        nC = String.fromCharCode(0),
+        xN = x.replace( re, nC + '$1' + nC ).split(nC),
+        yN = y.replace( re, nC + '$1' + nC ).split(nC),
+        xD = (new Date(x)).getTime(),
+        yD = xD ? (new Date(y)).getTime() : null;
+    // natural sorting of dates
+    if ( yD ) {
+        if ( xD < yD ) { return -1; }
+        else if ( xD > yD ) { return 1; }
+    }
+    // natural sorting through split numeric strings and default strings
+    var oFxNcL, oFyNcL;
+    for ( var cLoc = 0, numS = Math.max(xN.length, yN.length); cLoc < numS; cLoc++ ) {
+        oFxNcL = parseFloat(xN[cLoc]) || xN[cLoc];
+        oFyNcL = parseFloat(yN[cLoc]) || yN[cLoc];
+        if (oFxNcL < oFyNcL) { return -1; }
+        else if (oFxNcL > oFyNcL) { return 1; }
+    }
+    return 0;
+}
+
+return naturalSort;
+})
diff --git a/client/galaxy/scripts/utils/query-string-parsing.js b/client/galaxy/scripts/utils/query-string-parsing.js
new file mode 100644
index 0000000..a866ffc
--- /dev/null
+++ b/client/galaxy/scripts/utils/query-string-parsing.js
@@ -0,0 +1,33 @@
+define([], function(){
+// ============================================================================
+function get( key, queryString ){
+    queryString = queryString || window.location.search.substr( 1 );
+    var keyRegex = new RegExp( key + '=([^&#$]+)' ),
+        matches = queryString.match( keyRegex );
+    if( !matches || !matches.length ){
+        return undefined;
+    }
+    matches = matches.splice( 1 );
+    if( matches.length === 1 ){
+        return matches[0];
+    }
+    return matches;
+}
+
+function parse( queryString ){
+    if( !queryString ){ return {}; }
+    var parsed = {},
+        split = queryString.split( '&' );
+    split.forEach( function( pairString ){
+        var pair = pairString.split( '=' );
+        parsed[ pair[0] ] = decodeURI( pair[1] );
+    });
+    return parsed;
+}
+
+// ============================================================================
+    return {
+        get     : get,
+        parse   : parse,
+    };
+});
diff --git a/client/galaxy/scripts/utils/uploadbox.js b/client/galaxy/scripts/utils/uploadbox.js
new file mode 100755
index 0000000..b75d855
--- /dev/null
+++ b/client/galaxy/scripts/utils/uploadbox.js
@@ -0,0 +1,303 @@
+/*
+    galaxy upload plugins - requires FormData and XMLHttpRequest
+*/
+;(function($){
+    // add event properties
+    jQuery.event.props.push("dataTransfer");
+
+    /**
+        Posts file data to the API
+    */
+    $.uploadpost = function (config) {
+        // parse options
+        var cnf = $.extend({}, {
+            data            : {},
+            success         : function() {},
+            error           : function() {},
+            progress        : function() {},
+            url             : null,
+            maxfilesize     : 2048,
+            error_filesize  : 'File exceeds 2GB. Please use a FTP client.',
+            error_default   : 'Please make sure the file is available.',
+            error_server    : 'Upload request failed.',
+            error_login     : 'Uploads require you to log in.'
+        }, config);
+
+        // link data
+        var data = cnf.data;
+
+        // check errors
+        if (data.error_message) {
+            cnf.error(data.error_message);
+            return;
+        }
+
+        // construct form data
+        var form = new FormData();
+        for (var key in data.payload) {
+            form.append(key, data.payload[key]);
+        }
+
+        // add files to submission
+        var sizes = 0;
+        for (var key in data.files) {
+            var d = data.files[key];
+            form.append(d.name, d.file, d.file.name);
+            sizes += d.file.size;
+        }
+
+        // check file size, unless it's an ftp file
+        if (sizes > 1048576 * cnf.maxfilesize) {
+            cnf.error(cnf.error_filesize);
+            return;
+        }
+
+        // prepare request
+        xhr = new XMLHttpRequest();
+        xhr.open('POST', cnf.url, true);
+        xhr.setRequestHeader('Accept', 'application/json');
+        xhr.setRequestHeader('Cache-Control', 'no-cache');
+        xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest');
+
+        // captures state changes
+        xhr.onreadystatechange = function() {
+            // check for request completed, server connection closed
+            if (xhr.readyState == xhr.DONE) {
+                // parse response
+                var response = null;
+                if (xhr.responseText) {
+                    try {
+                        response = jQuery.parseJSON(xhr.responseText);
+                    } catch (e) {
+                        response = xhr.responseText;
+                    }
+                }
+                // pass any error to the error option
+                if (xhr.status < 200 || xhr.status > 299) {
+                    var text = xhr.statusText;
+                    if (xhr.status == 403) {
+                        text = cnf.error_login;
+                    } else if (xhr.status == 0) {
+                        text = cnf.error_server;
+                    } else if (!text) {
+                        text = cnf.error_default;
+                    }
+                    cnf.error(text + ' (' + xhr.status + ')');
+                } else {
+                    cnf.success(response);
+                }
+            }
+        }
+
+        // prepare upload progress
+        xhr.upload.addEventListener('progress', function(e) {
+            if (e.lengthComputable) {
+                cnf.progress(Math.round((e.loaded * 100) / e.total));
+            }
+        }, false);
+
+        // send request
+        Galaxy.emit.debug('uploadbox::uploadpost()', 'Posting following data.', cnf);
+        xhr.send(form);
+    }
+
+    /**
+        Handles the upload events drag/drop etc.
+    */
+    $.fn.uploadinput = function(options) {
+        // initialize
+        var el = this;
+        var opts = $.extend({}, {
+            ondragover  : function() {},
+            ondragleave : function() {},
+            onchange    : function() {},
+            multiple    : false
+        }, options);
+
+        // append hidden upload field
+        var $input = $('<input type="file" style="display: none" ' + (opts.multiple && 'multiple' || '') + '/>');
+        el.append($input.change(function (e) {
+            opts.onchange(e.target.files);
+            $(this).val('');
+        }));
+
+        // drag/drop events
+        el.on('drop', function (e) {
+            opts.ondragleave(e);
+            if(e.dataTransfer) {
+                opts.onchange(e.dataTransfer.files);
+                e.preventDefault();
+            }
+        });
+        el.on('dragover',  function (e) {
+            e.preventDefault();
+            opts.ondragover(e);
+        });
+        el.on('dragleave', function (e) {
+            e.stopPropagation();
+            opts.ondragleave(e);
+        });
+
+        // exports
+        return {
+            dialog: function () {
+                $input.trigger('click');
+            }
+        }
+    }
+
+    /**
+        Handles the upload queue and events such as drag/drop etc.
+    */
+    $.fn.uploadbox = function(options) {
+        // parse options
+        var opts = $.extend({}, {
+            dragover        : function() {},
+            dragleave       : function() {},
+            announce        : function(d) {},
+            initialize      : function(d) {},
+            progress        : function(d, m) {},
+            success         : function(d, m) {},
+            error           : function(d, m) { alert(m); },
+            complete        : function() {}
+        }, options);
+
+        // file queue
+        var queue = {};
+
+        // queue index/length counter
+        var queue_index = 0;
+        var queue_length = 0;
+
+        // indicates if queue is currently running
+        var queue_running = false;
+        var queue_stop = false;
+
+        // element
+        var uploadinput = $(this).uploadinput({
+            multiple    : true,
+            onchange    : function(files) { add(files); },
+            ondragover  : options.ondragover,
+            ondragleave : options.ondragleave
+        });
+
+        // add new files to upload queue
+        function add(files) {
+            if (files && files.length && !queue_running) {
+                var current_index = queue_index;
+                _.each(files, function(file, key) {
+                    if (file.mode !== 'new' && _.filter(queue, function(f) {
+                        return f.name === file.name && f.size === file.size;
+                    }).length) {
+                        file.duplicate = true;
+                    }
+                });
+                _.each(files, function(file) {
+                    if (!file.duplicate) {
+                        var index = String(queue_index++);
+                        queue[index] = file;
+                        opts.announce(index, queue[index]);
+                        queue_length++;
+                    }
+                });
+                return current_index;
+            }
+        }
+
+        // remove file from queue
+        function remove(index) {
+            if (queue[index]) {
+                delete queue[index];
+                queue_length--;
+            }
+        }
+
+        // process an upload, recursive
+        function process() {
+            // validate
+            if (queue_length == 0 || queue_stop) {
+                queue_stop = false;
+                queue_running = false;
+                opts.complete();
+                return;
+            } else {
+                queue_running = true;
+            }
+
+            // get an identifier from the queue
+            var index = -1;
+            for (var key in queue) {
+                index = key;
+                break;
+            }
+
+            // get current file from queue
+            var file = queue[index];
+
+            // remove from queue
+            remove(index)
+
+            // create and submit data
+            $.uploadpost({
+                url      : opts.url,
+                data     : opts.initialize(index),
+                success  : function(message) { opts.success(index, message); process();},
+                error    : function(message) { opts.error(index, message); process();},
+                progress : function(percentage) { opts.progress(index, percentage); }
+            });
+        }
+
+        /*
+            public interface
+        */
+
+        // open file browser for selection
+        function select() {
+            uploadinput.dialog();
+        }
+
+        // remove all entries from queue
+        function reset(index) {
+            for (index in queue) {
+                remove(index);
+            }
+        }
+
+        // initiate upload process
+        function start() {
+            if (!queue_running) {
+                queue_running = true;
+                process();
+            }
+        }
+
+        // stop upload process
+        function stop() {
+            queue_stop = true;
+        }
+
+        // set options
+        function configure(options) {
+            opts = $.extend({}, opts, options);
+            return opts;
+        }
+
+        // verify browser compatibility
+        function compatible() {
+            return window.File && window.FormData && window.XMLHttpRequest && window.FileList;
+        }
+
+        // export functions
+        return {
+            'select'        : select,
+            'add'           : add,
+            'remove'        : remove,
+            'start'         : start,
+            'stop'          : stop,
+            'reset'         : reset,
+            'configure'     : configure,
+            'compatible'    : compatible
+        };
+    }
+})(jQuery);
+
diff --git a/client/galaxy/scripts/utils/utils.js b/client/galaxy/scripts/utils/utils.js
new file mode 100644
index 0000000..856064d
--- /dev/null
+++ b/client/galaxy/scripts/utils/utils.js
@@ -0,0 +1,298 @@
+/**
+ * Galaxy utilities comprises small functions, which at this point
+ * do not require their own classes/files
+*/
+define( [], function() {
+
+    /** Builds a basic iframe */
+    function iframe( src ) {
+        return '<iframe src="' + src + '" frameborder="0" style="width: 100%; height: 100%;"/>';
+    }
+
+    /** Traverse through json */
+    function deepeach( dict, callback ) {
+        for( var i in dict ) {
+            var d = dict[ i ];
+            if( _.isObject( d ) ) {
+                var new_dict = callback( d );
+                new_dict && ( dict[ i ] = new_dict );
+                deepeach( d, callback );
+            }
+        }
+    }
+
+    /** Identifies urls and replaces them with anchors */
+    function linkify( inputText ) {
+        var replacedText, replacePattern1, replacePattern2, replacePattern3;
+
+        // URLs starting with http://, https://, or ftp://
+        replacePattern1 = /(\b(https?|ftp):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/gim;
+        replacedText = inputText.replace(replacePattern1, '<a href="$1" target="_blank">$1</a>');
+
+        // URLs starting with "www." (without // before it, or it'd re-link the ones done above).
+        replacePattern2 = /(^|[^\/])(www\.[\S]+(\b|$))/gim;
+        replacedText = replacedText.replace(replacePattern2, '$1<a href="http://$2" target="_blank">$2</a>');
+
+        // Change email addresses to mailto:: links.
+        replacePattern3 = /(([a-zA-Z0-9\-\_\.])+@[a-zA-Z\_]+?(\.[a-zA-Z]{2,6})+)/gim;
+        replacedText = replacedText.replace(replacePattern3, '<a href="mailto:$1">$1</a>');
+
+        return replacedText;
+    }
+
+    /** Clone */
+    function clone( obj ) {
+        return JSON.parse( JSON.stringify( obj ) || null );
+    }
+
+    /**
+     * Check if a string is a json string
+     * @param{String}   text - Content to be validated
+     */
+    function isJSON(text) {
+        return /^[\],:{}\s]*$/.test(text.replace(/\\["\\\/bfnrtu]/g, '@').
+            replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
+            replace(/(?:^|:|,)(?:\s*\[)+/g, ''));
+    };
+
+    /**
+     * Sanitize/escape a string
+     * @param{String}   content - Content to be sanitized
+     */
+    function sanitize(content) {
+        return $('<div/>').text(content).html();
+    };
+
+    /**
+     * Checks if a value or list of values is `empty`
+     * usually used for selectable options
+     * @param{String}   value - Value or list to be validated
+     */
+    function isEmpty ( value ) {
+        if ( !( value instanceof Array ) ) {
+            value = [ value ];
+        }
+        if ( value.length === 0 ) {
+            return true;
+        }
+        for( var i in value ) {
+            if ( [ '__null__', '__undefined__', null, undefined ].indexOf( value[ i ] ) > -1 ) {
+                return true;
+            }
+        }
+        return false;
+    };
+
+    /**
+     * Convert list to pretty string
+     * @param{String}   lst - List of strings to be converted in human readable list sentence
+     */
+    function textify( lst ) {
+        if ( $.isArray( lst ) ) {
+            var lst = lst.toString().replace( /,/g, ', ' );
+            var pos = lst.lastIndexOf( ', ' );
+            if ( pos != -1 ) {
+                lst = lst.substr( 0, pos ) + ' or ' + lst.substr( pos + 2 );
+            }
+            return lst;
+        }
+        return '';
+    };
+
+    /**
+     * Request handler for GET
+     * @param{String}   url     - Url request is made to
+     * @param{Function} success - Callback on success
+     * @param{Function} error   - Callback on error
+     * @param{Boolean}  cache   - Use cached data if available
+     */
+    function get (options) {
+        top.__utils__get__ = top.__utils__get__ || {};
+        var cache_key = JSON.stringify( options );
+        if (options.cache && top.__utils__get__[cache_key]) {
+            options.success && options.success(top.__utils__get__[cache_key]);
+            window.console.debug('utils.js::get() - Fetching from cache [' + options.url + '].');
+        } else {
+            request({
+                url     : options.url,
+                data    : options.data,
+                success : function(response) {
+                    top.__utils__get__[cache_key] = response;
+                    options.success && options.success(response);
+                },
+                error : function(response, status) {
+                    options.error && options.error(response, status);
+                }
+            });
+        }
+    };
+
+    /**
+     * Request handler
+     * @param{String}   method  - Request method ['GET', 'POST', 'DELETE', 'PUT']
+     * @param{String}   url     - Url request is made to
+     * @param{Object}   data    - Data send to url
+     * @param{Function} success - Callback on success
+     * @param{Function} error   - Callback on error
+     */
+    function request (options) {
+        // prepare ajax
+        var ajaxConfig = {
+            contentType : 'application/json',
+            type        : options.type || 'GET',
+            data        : options.data || {},
+            url         : options.url
+        }
+        // encode data into url
+        if ( ajaxConfig.type == 'GET' || ajaxConfig.type == 'DELETE' ) {
+            if ( !$.isEmptyObject(ajaxConfig.data) ) {
+                ajaxConfig.url += ajaxConfig.url.indexOf('?') == -1 ? '?' : '&';
+                ajaxConfig.url += $.param(ajaxConfig.data, true);
+            }
+            ajaxConfig.data = null;
+        } else {
+            ajaxConfig.dataType = 'json';
+            ajaxConfig.url      = ajaxConfig.url;
+            ajaxConfig.data     = JSON.stringify(ajaxConfig.data);
+        }
+
+        // make request
+        $.ajax(ajaxConfig).done(function(response) {
+            if (typeof response === 'string') {
+                try {
+                    response = response.replace('Infinity,', '"Infinity",');
+                    response = jQuery.parseJSON(response);
+                } catch (e) {
+                    console.debug(e);
+                }
+            }
+            options.success && options.success(response);
+        }).fail(function(response) {
+            var response_text = null;
+            try {
+                response_text = jQuery.parseJSON(response.responseText);
+            } catch (e) {
+                response_text = response.responseText;
+            }
+            options.error && options.error(response_text, response.status);
+        }).always(function() {
+            options.complete && options.complete();
+        });
+    };
+
+    /**
+     * Read a property value from CSS
+     * @param{String}   classname   - CSS class
+     * @param{String}   name        - CSS property
+     */
+    function cssGetAttribute (classname, name) {
+        var el = $('<div class="' + classname + '"></div>');
+        el.appendTo(':eq(0)');
+        var value = el.css(name);
+        el.remove();
+        return value;
+    };
+
+    /**
+     * Load a CSS file
+     * @param{String}   url - Url of CSS file
+     */
+    function cssLoadFile (url) {
+        if (!$('link[href^="' + url + '"]').length) {
+            $('<link href="' + Galaxy.root + url + '" rel="stylesheet">').appendTo('head');
+        }
+    };
+
+    /**
+     * Safely merge to dictionaries
+     * @param{Object}   options         - Target dictionary
+     * @param{Object}   optionsDefault  - Source dictionary
+     */
+    function merge (options, optionsDefault) {
+        if (options) {
+            return _.defaults(options, optionsDefault);
+        } else {
+            return optionsDefault;
+        }
+    };
+
+
+    /**
+     * Round floaing point 'number' to 'numPlaces' number of decimal places.
+     * @param{Object}   number      a floaing point number
+     * @param{Object}   numPlaces   number of decimal places
+     */
+    function roundToDecimalPlaces( number, numPlaces ){
+        var placesMultiplier = 1;
+        for( var i=0; i<numPlaces; i++ ){
+            placesMultiplier *= 10;
+        }
+        return Math.round( number * placesMultiplier ) / placesMultiplier;
+    }
+
+    // calculate on import
+    var kb = 1024,
+        mb = kb * kb,
+        gb = mb * kb,
+        tb = gb * kb;
+    /**
+     * Format byte size to string with units
+     * @param{Integer}   size           - Size in bytes
+     * @param{Boolean}   normal_font    - Switches font between normal and bold
+     */
+    function bytesToString (size, normal_font, numberPlaces) {
+        numberPlaces = numberPlaces !== undefined? numberPlaces: 1;
+        // identify unit
+        var unit = "";
+        if (size >= tb){ size = size / tb; unit = 'TB'; } else
+        if (size >= gb){ size = size / gb; unit = 'GB'; } else
+        if (size >= mb){ size = size / mb; unit = 'MB'; } else
+        if (size >= kb){ size = size / kb; unit = 'KB'; } else
+        if (size >  0){ unit = 'b'; }
+        else { return normal_font? '0 b': '<strong>-</strong>'; }
+        // return formatted string
+        var rounded = unit == 'b'? size: roundToDecimalPlaces( size, numberPlaces );
+        if (normal_font) {
+           return  rounded + ' ' + unit;
+        } else {
+            return '<strong>' + rounded + '</strong> ' + unit;
+        }
+    };
+
+    /** Create a unique id */
+    function uid(){
+        top.__utils__uid__ = top.__utils__uid__ || 0;
+        return 'uid-' + top.__utils__uid__++;
+    };
+
+    /** Create a time stamp */
+    function time() {
+        var d = new Date();
+        var hours = (d.getHours() < 10 ? "0" : "") + d.getHours();
+        var minutes = (d.getMinutes() < 10 ? "0" : "") + d.getMinutes()
+        return datetime = d.getDate() + "/"
+                    + (d.getMonth() + 1)  + "/"
+                    + d.getFullYear() + ", "
+                    + hours + ":"
+                    + minutes;
+    };
+
+    return {
+        cssLoadFile: cssLoadFile,
+        cssGetAttribute: cssGetAttribute,
+        get: get,
+        merge: merge,
+        iframe: iframe,
+        bytesToString: bytesToString,
+        uid: uid,
+        time: time,
+        request: request,
+        sanitize: sanitize,
+        textify: textify,
+        isEmpty: isEmpty,
+        deepeach: deepeach,
+        isJSON: isJSON,
+        clone: clone,
+        linkify: linkify
+    };
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/viz/bbi-data-manager.js b/client/galaxy/scripts/viz/bbi-data-manager.js
new file mode 100644
index 0000000..a4efca6
--- /dev/null
+++ b/client/galaxy/scripts/viz/bbi-data-manager.js
@@ -0,0 +1,67 @@
+define( ["viz/visualization", "libs/bbi/bigwig"],
+        function(visualization, bigwig) {
+
+    /**
+     * Data manager for BBI datasets/files, including BigWig and BigBed.
+     */
+    var BBIDataManager = visualization.GenomeDataManager.extend({
+
+        /**
+         * Load data from server and manage data entries. Adds a Deferred to manager
+         * for region; when data becomes available, replaces Deferred with data.
+         * Returns the Deferred that resolves when data is available.
+         */
+        load_data: function(region, mode, resolution, extra_params) {
+            var deferred = $.Deferred();
+            this.set_data(region, deferred);
+
+            var url = Galaxy.root + 'datasets/' + this.get('dataset').id + '/display',
+                self = this;
+                var promise = new $.Deferred();
+                $.when(bigwig.makeBwg(url)).then(function(bb, err) {
+                    $.when(bb.readWigData(region.get("chrom"), region.get("start"), region.get("end"))).then(function(data) {
+                    // Transform data into "bigwig" format for LinePainter. "bigwig" format is an array of 2-element arrays
+                    // where each element is [position, score]; unlike real bigwig format, no gaps are allowed.
+                    var result = [],
+                        prev = { max: Number.MIN_VALUE };
+                    data.forEach(function(d) {
+                        // If there is a gap between prev and d, fill it with an interval with score 0.
+                        // This is necessary for LinePainter to draw correctly.
+                        if (prev.max !== d.min - 1) {
+                            // +1 to start after previous region.
+                            result.push([prev.max + 1, 0]);
+                            // -2 = -1 for converting from 1-based to 0-based coordinates,
+                            //      -1 for ending before current region.
+                            result.push([d.min - 2, 0]);
+                        }
+
+                        // Add data point for entry start. -1 to convert from wiggle
+                        // 1-based coordinates to 0-based browser coordinates.
+                        result.push([d.min - 1, d.score]);
+
+                        // Add data point for entry end:
+                        result.push([d.max, d.score]);
+
+                        prev = d;
+                    });
+
+                    var entry = {
+                            data: result,
+                            region: region,
+                            dataset_type: 'bigwig'
+                        };
+
+                    self.set_data(region, entry);
+                    deferred.resolve(entry);
+                });
+            });
+
+            return deferred;
+        },
+    });
+
+    return {
+        BBIDataManager: BBIDataManager
+    };
+
+});
diff --git a/client/galaxy/scripts/viz/circster.js b/client/galaxy/scripts/viz/circster.js
new file mode 100644
index 0000000..c994619
--- /dev/null
+++ b/client/galaxy/scripts/viz/circster.js
@@ -0,0 +1,1154 @@
+// load required libraries
+require([
+    'utils/utils',
+], function(mod_utils)
+{
+    // load css
+    mod_utils.cssLoadFile("static/style/circster.css");
+});
+
+define([
+    "libs/underscore",
+    "libs/d3",
+    "viz/visualization",
+    "utils/config",
+    'mvc/ui/icon-button',
+    'libs/farbtastic'
+], function(_, d3, visualization, config, mod_icon_btn) {
+
+/**
+ * Utility class for working with SVG.
+ */
+var SVGUtils = Backbone.Model.extend({
+
+    /**
+     * Returns true if element is visible.
+     */
+    is_visible: function(svg_elt, svg) {
+        var eltBRect = svg_elt.getBoundingClientRect(),
+            svgBRect = $('svg')[0].getBoundingClientRect();
+
+        if (// To the left of screen?
+            eltBRect.right < 0 ||
+            // To the right of screen?
+            eltBRect.left > svgBRect.right ||
+            // Above screen?
+            eltBRect.bottom < 0 ||
+            // Below screen?
+            eltBRect.top > svgBRect.bottom) {
+            return false;
+        }
+        return true;
+    }
+});
+
+/**
+ * Mixin for using ticks.
+ */
+var UsesTicks = {
+    drawTicks: function(parent_elt, data, dataHandler, textTransform, horizontal) {
+        // Set up group elements for chroms and for each tick.
+        var ticks = parent_elt.append("g")
+                        .selectAll("g")
+                            .data(data)
+                        .enter().append("g")
+                        .selectAll("g")
+                            .data(dataHandler)
+                        .enter().append("g")
+                            .attr("class", "tick")
+                            .attr("transform", function(d) {
+                                return "rotate(" + (d.angle * 180 / Math.PI - 90) + ")" +
+                                        "translate(" + d.radius + ",0)";
+                            });
+
+        // Add line + text for ticks.
+        var tick_coords = [],
+            text_coords = [],
+            text_anchor = function(d) {
+                return d.angle > Math.PI ? "end" : null;
+            };
+        if (horizontal) {
+            tick_coords = [0, 0, 0, -4];
+            text_coords = [4, 0, "", ".35em"];
+            text_anchor = null;
+        }
+        else {
+            tick_coords = [1, 0, 4, 0];
+            text_coords = [0, 4, ".35em", ""];
+
+        }
+
+        ticks.append("line")
+             .attr("x1", tick_coords[0])
+             .attr("y1", tick_coords[1])
+             .attr("x2", tick_coords[2])
+             .attr("y1", tick_coords[3])
+             .style("stroke", "#000");
+
+        return ticks.append("text")
+                    .attr("x", text_coords[0])
+                    .attr("y", text_coords[1])
+                    .attr("dx", text_coords[2])
+                    .attr("dy", text_coords[3])
+                    .attr("text-anchor", text_anchor)
+                    .attr("transform", textTransform)
+                    .text(function(d) { return d.label; });
+    },
+
+    /**
+     * Format number for display at a tick.
+     */
+    formatNum: function(num, sigDigits) {
+        // Use default of 2 sig. digits.
+        if (sigDigits === undefined)
+            sigDigits = 2;
+
+        // Verify input number
+        if (num === null)
+            return null;
+
+        // Calculate return value
+        var rval = null;
+        if (Math.abs(num) < 1) {
+            rval = num.toPrecision(sigDigits);
+        }
+        else {
+            // Use round to turn string from toPrecision() back into a number.
+            var roundedNum = Math.round(num.toPrecision(sigDigits));
+
+            // Use abbreviations.
+            num = Math.abs(num);
+            if (num < 1000) {
+                rval = roundedNum;
+            }
+            else if (num < 1000000) {
+                // Use K.
+                rval = Math.round((roundedNum/1000).toPrecision(3)).toFixed(0) + 'K';
+            }
+            else if (num < 1000000000) {
+                // Use M.
+                rval = Math.round((roundedNum/1000000).toPrecision(3)).toFixed(0) + 'M';
+            }
+        }
+
+        return rval;
+    }
+};
+
+/**
+ * A label track.
+ */
+var CircsterLabelTrack = Backbone.Model.extend({});
+
+/**
+ * Renders a full circster visualization.
+ */
+var CircsterView = Backbone.View.extend({
+    className: 'circster',
+
+    initialize: function(options) {
+        this.genome = options.genome;
+        this.label_arc_height = 50;
+        this.scale = 1;
+        this.circular_views = null;
+        this.chords_views = null;
+
+        // When tracks added to/removed from model, update view.
+        this.model.get('drawables').on('add', this.add_track, this);
+        this.model.get('drawables').on('remove', this.remove_track, this);
+
+        // When config settings change, update view.
+        var vis_config = this.model.get('config');
+        vis_config.get('arc_dataset_height').on('change:value', this.update_track_bounds, this);
+        vis_config.get('track_gap').on('change:value', this.update_track_bounds, this);
+    },
+
+    // HACKs: using track_type for circular/chord distinction in the functions below for now.
+
+    /**
+     * Returns tracks to be rendered using circular view.
+     */
+    get_circular_tracks: function() {
+        return this.model.get('drawables').filter(function(track) {
+            return track.get('track_type') !== 'DiagonalHeatmapTrack';
+        });
+    },
+
+    /**
+     * Returns tracks to be rendered using chords view.
+     */
+    get_chord_tracks: function() {
+        return this.model.get('drawables').filter(function(track) {
+            return track.get('track_type') === 'DiagonalHeatmapTrack';
+        });
+    },
+
+    /**
+     * Returns a list of circular tracks' radius bounds.
+     */
+    get_tracks_bounds: function() {
+        var circular_tracks = this.get_circular_tracks(),
+            dataset_arc_height = this.model.get('config').get_value('arc_dataset_height'),
+            track_gap = this.model.get('config').get_value('track_gap'),
+            // Subtract 20 to make sure chrom labels are on screen.
+            min_dimension = Math.min(this.$el.width(), this.$el.height()) - 20,
+            // Compute radius start based on model, will be centered
+            // and fit entirely inside element by default.
+            radius_start = min_dimension / 2 -
+                            circular_tracks.length * (dataset_arc_height + track_gap) +
+                            // Add track_gap back in because no gap is needed for last track.
+                            track_gap -
+                            this.label_arc_height,
+
+            // Compute range of track starting radii.
+            tracks_start_radii = d3.range(radius_start, min_dimension / 2, dataset_arc_height + track_gap);
+
+        // Map from track start to bounds.
+        var self = this;
+        return _.map(tracks_start_radii, function(radius) {
+            return [radius, radius + dataset_arc_height];
+        });
+    },
+
+    /**
+     * Renders circular tracks, chord tracks, and label tracks.
+     */
+    render: function() {
+        var self = this,
+            width = self.$el.width(),
+            height = self.$el.height(),
+            circular_tracks = this.get_circular_tracks(),
+            chords_tracks = this.get_chord_tracks(),
+            total_gap = self.model.get('config').get_value('total_gap'),
+            tracks_bounds = this.get_tracks_bounds(),
+
+            // Set up SVG element.
+            svg = d3.select(self.$el[0])
+              .append("svg")
+                .attr("width", width)
+                .attr("height", height)
+                .attr("pointer-events", "all")
+              // Set up zooming, dragging.
+              .append('svg:g')
+                .call(d3.behavior.zoom().on('zoom', function() {
+                    // Do zoom, drag.
+                    var scale = d3.event.scale;
+                    svg.attr("transform",
+                      "translate(" + d3.event.translate + ")" +
+                      " scale(" + scale + ")");
+
+                    // Propagate scale changes to views.
+                    if (self.scale !== scale) {
+                        // Use timeout to wait for zooming/dragging to stop before rendering more detail.
+                        if (self.zoom_drag_timeout) {
+                            clearTimeout(self.zoom_drag_timeout);
+                        }
+                        self.zoom_drag_timeout = setTimeout(function() {
+                            // Render more detail in tracks' visible elements.
+                            // FIXME: do not do this right now; it is not fully implemented--e.g. data bounds
+                            // are not updated when new data is fetched--and fetching more detailed quantitative
+                            // data is not that useful.
+                            /*
+                            _.each(self.circular_views, function(view) {
+                                view.update_scale(scale);
+                            });
+                            */
+                        }, 400);
+                    }
+                }))
+                .attr("transform", "translate(" + width / 2 + "," + height / 2 + ")")
+              .append('svg:g').attr('class', 'tracks');
+
+        // -- Render circular tracks. --
+
+        // Create a view for each track in the visualization and render.
+        this.circular_views = circular_tracks.map(function(track, index) {
+            var view = new CircsterBigWigTrackView({
+                    el: svg.append('g')[0],
+                    track: track,
+                    radius_bounds: tracks_bounds[index],
+                    genome: self.genome,
+                    total_gap: total_gap
+                });
+
+            view.render();
+
+            return view;
+        });
+
+        // -- Render chords tracks. --
+
+        this.chords_views = chords_tracks.map(function(track) {
+            var view = new CircsterChromInteractionsTrackView({
+                el: svg.append('g')[0],
+                track: track,
+                radius_bounds: tracks_bounds[0],
+                genome: self.genome,
+                total_gap: total_gap
+            });
+
+            view.render();
+
+            return view;
+        });
+
+        // -- Render label track. --
+
+        // Track bounds are:
+        // (a) outer radius of last circular track;
+        // (b)
+        var outermost_radius = this.circular_views[this.circular_views.length-1].radius_bounds[1],
+            track_bounds = [
+                outermost_radius,
+                outermost_radius + this.label_arc_height
+            ];
+        this.label_track_view = new CircsterChromLabelTrackView({
+            el: svg.append('g')[0],
+            track: new CircsterLabelTrack(),
+            radius_bounds: track_bounds,
+            genome: self.genome,
+            total_gap: total_gap
+        });
+
+        this.label_track_view.render();
+    },
+
+    /**
+     * Render a single track on the outside of the current visualization.
+     */
+    add_track: function(new_track) {
+        var total_gap = this.model.get('config').get_value('total_gap');
+
+        if (new_track.get('track_type') === 'DiagonalHeatmapTrack') {
+            // Added chords track.
+            var innermost_radius_bounds = this.circular_views[0].radius_bounds,
+                new_view = new CircsterChromInteractionsTrackView({
+                    el: d3.select('g.tracks').append('g')[0],
+                    track: new_track,
+                    radius_bounds: innermost_radius_bounds,
+                    genome: this.genome,
+                    total_gap: total_gap
+                });
+                new_view.render();
+                this.chords_views.push(new_view);
+        }
+        else {
+            // Added circular track.
+
+            // Recompute and update circular track bounds.
+            var new_track_bounds = this.get_tracks_bounds();
+            _.each(this.circular_views, function(track_view, i) {
+                track_view.update_radius_bounds(new_track_bounds[i]);
+            });
+
+            // Update chords tracks.
+            _.each(this.chords_views, function(track_view) {
+                track_view.update_radius_bounds(new_track_bounds[0]);
+            });
+
+            // Render new track.
+            var track_index = this.circular_views.length,
+                track_view = new CircsterBigWigTrackView({
+                    el: d3.select('g.tracks').append('g')[0],
+                    track: new_track,
+                    radius_bounds: new_track_bounds[track_index],
+                    genome: this.genome,
+                    total_gap: total_gap
+                });
+            track_view.render();
+            this.circular_views.push(track_view);
+
+            // Update label track.
+            /*
+            FIXME: should never have to update label track because vis always expands to fit area
+            within label track.
+            var track_bounds = new_track_bounds[ new_track_bounds.length-1 ];
+            track_bounds[1] = track_bounds[0];
+            this.label_track_view.update_radius_bounds(track_bounds);
+            */
+        }
+    },
+
+    /**
+     * Remove a track from the view.
+     */
+    remove_track: function(track, tracks, options) {
+        // -- Remove track from view. --
+        var track_view = this.circular_views[options.index];
+        this.circular_views.splice(options.index, 1);
+        track_view.$el.remove();
+
+        // Recompute and update track bounds.
+        var new_track_bounds = this.get_tracks_bounds();
+        _.each(this.circular_views, function(track_view, i) {
+            track_view.update_radius_bounds(new_track_bounds[i]);
+        });
+    },
+
+    update_track_bounds: function() {
+        // Recompute and update track bounds.
+        var new_track_bounds = this.get_tracks_bounds();
+        _.each(this.circular_views, function(track_view, i) {
+            track_view.update_radius_bounds(new_track_bounds[i]);
+        });
+
+        // Update chords tracks.
+        _.each(this.chords_views, function(track_view) {
+            track_view.update_radius_bounds(new_track_bounds[0]);
+        });
+
+    }
+});
+
+/**
+ * Renders a track in a Circster visualization.
+ */
+var CircsterTrackView = Backbone.View.extend({
+    tagName: 'g',
+
+    /* ----------------------- Public Methods ------------------------- */
+
+    initialize: function(options) {
+        this.bg_stroke = '#ddd';
+        // Fill color when loading data.
+        this.loading_bg_fill = '#ffc';
+        // Fill color when data has been loaded.
+        this.bg_fill = '#ddd';
+        this.total_gap = options.total_gap;
+        this.track = options.track;
+        this.radius_bounds = options.radius_bounds;
+        this.genome = options.genome;
+        this.chroms_layout = this._chroms_layout();
+        this.data_bounds = [];
+        this.scale = 1;
+        this.parent_elt = d3.select(this.$el[0]);
+    },
+
+    /**
+     * Get fill color from config.
+     */
+    get_fill_color: function() {
+        var color = this.track.get('config').get_value('block_color');
+        if (!color) { color = this.track.get('config').get_value('color'); }
+        return color;
+    },
+
+    /**
+     * Render track's data by adding SVG elements to parent.
+     */
+    render: function() {
+        // -- Create track group element. --
+        var track_parent_elt = this.parent_elt;
+
+        // -- Render background arcs. --
+        var genome_arcs = this.chroms_layout,
+            arc_gen = d3.svg.arc()
+                        .innerRadius(this.radius_bounds[0])
+                        .outerRadius(this.radius_bounds[1]),
+
+            // Attach data to group element.
+            chroms_elts = track_parent_elt.selectAll('g')
+                .data(genome_arcs).enter().append('svg:g'),
+
+            // Draw chrom arcs/paths.
+            chroms_paths = chroms_elts.append('path')
+                .attr("d", arc_gen)
+                .attr('class', 'chrom-background')
+                .style("stroke", this.bg_stroke)
+                .style("fill",  this.loading_bg_fill);
+
+            // Append titles to paths.
+            chroms_paths.append("title").text(function(d) { return d.data.chrom; });
+
+        // -- Render track data and, when track data is rendered, apply preferences and update chrom_elts fill. --
+
+        var self = this,
+            data_manager = self.track.get('data_manager'),
+            // If track has a data manager, get deferred that resolves when data is ready.
+            data_ready_deferred = (data_manager ? data_manager.data_is_ready() : true );
+
+        // When data is ready, render track.
+        $.when(data_ready_deferred).then(function() {
+            $.when(self._render_data(track_parent_elt)).then(function() {
+                chroms_paths.style("fill", self.bg_fill);
+
+                // Render labels after data is available so that data attributes are available.
+                self.render_labels();
+            });
+        });
+    },
+
+    /**
+     * Render track labels.
+     */
+    render_labels: function() {},
+
+    /**
+     * Update radius bounds.
+     */
+    update_radius_bounds: function(radius_bounds) {
+        // Update bounds.
+        this.radius_bounds = radius_bounds;
+
+        // -- Update background arcs. --
+        var new_d = d3.svg.arc()
+                        .innerRadius(this.radius_bounds[0])
+                        .outerRadius(this.radius_bounds[1]);
+
+        this.parent_elt.selectAll('g>path.chrom-background').transition().duration(1000).attr('d', new_d);
+
+        this._transition_chrom_data();
+
+        this._transition_labels();
+    },
+
+    /**
+     * Update view scale. This fetches more data if scale is increased.
+     */
+    update_scale: function(new_scale) {
+        // -- Update scale and return if new scale is less than old scale. --
+
+        var old_scale = this.scale;
+        this.scale = new_scale;
+        if (new_scale <= old_scale) {
+            return;
+        }
+
+        // -- Scale increased, so render visible data with more detail. --
+
+        var self = this,
+            utils = new SVGUtils();
+
+        // Select all chrom data and filter to operate on those that are visible.
+        this.parent_elt.selectAll('path.chrom-data').filter(function(d, i) {
+            return utils.is_visible(this);
+        }).each(function(d, i) {
+            // -- Now operating on a single path element representing chromosome data. --
+
+            var path_elt = d3.select(this),
+                chrom = path_elt.attr('chrom'),
+                chrom_region = self.genome.get_chrom_region(chrom),
+                data_manager = self.track.get('data_manager'),
+                data_deferred;
+
+            // If can't get more detailed data, return.
+            if (!data_manager.can_get_more_detailed_data(chrom_region)) {
+                return;
+            }
+
+            // -- Get more detailed data. --
+            data_deferred = self.track.get('data_manager').get_more_detailed_data(chrom_region, 'Coverage', 0, new_scale);
+
+            // When more data is available, use new data to redraw path.
+            $.when(data_deferred).then(function(data) {
+                // Remove current data path.
+                path_elt.remove();
+
+                // Update data bounds with new data.
+                self._update_data_bounds();
+
+                // Find chromosome arc to draw data on.
+                var chrom_arc = _.find(self.chroms_layout, function(layout) {
+                        return layout.data.chrom === chrom;
+                });
+
+                // Add new data path and apply preferences.
+                var color = self.get_fill_color();
+                self._render_chrom_data(self.parent_elt, chrom_arc, data).style('stroke', color).style('fill', color);
+            });
+        });
+
+        return self;
+    },
+
+    /* ----------------------- Internal Methods ------------------------- */
+
+    /**
+     * Transitions chrom data to new values (e.g new radius or data bounds).
+     */
+    _transition_chrom_data: function() {
+        var track = this.track,
+            chrom_arcs = this.chroms_layout,
+            chrom_data_paths = this.parent_elt.selectAll('g>path.chrom-data'),
+            num_paths = chrom_data_paths[0].length;
+
+        if (num_paths > 0) {
+            var self = this;
+            $.when(track.get('data_manager').get_genome_wide_data(this.genome)).then(function(genome_wide_data) {
+                // Map chrom data to path data, filtering out null values.
+                var path_data = _.reject( _.map(genome_wide_data, function(chrom_data, i) {
+                    var rval = null,
+                        path_fn = self._get_path_function(chrom_arcs[i], chrom_data);
+                    if (path_fn) {
+                        rval = path_fn(chrom_data.data);
+                    }
+                    return rval;
+                }), function(p_data) { return p_data === null; } );
+
+                // Transition each path for data and color.
+                var color = track.get('config').get_value('color');
+                chrom_data_paths.each(function(path, index) {
+                    d3.select(this).transition().duration(1000)
+                                   .style('stroke', color).style('fill', color)
+                                   .attr('d', path_data[index]);
+                });
+            });
+        }
+    },
+
+    /**
+     * Transition labels to new values (e.g new radius or data bounds).
+     */
+    _transition_labels: function() {},
+
+    /**
+     * Update data bounds. If there are new_bounds, use them; otherwise use
+     * default data bounds.
+     */
+    _update_data_bounds: function(new_bounds) {
+        var old_bounds = this.data_bounds;
+        this.data_bounds = new_bounds ||
+                           this.get_data_bounds(this.track.get('data_manager').get_genome_wide_data(this.genome));
+        this._transition_chrom_data();
+    },
+
+    /**
+     * Render data as elements attached to svg.
+     */
+    _render_data: function(svg) {
+        var self = this,
+            chrom_arcs = this.chroms_layout,
+            track = this.track,
+            rendered_deferred = $.Deferred();
+
+        // When genome-wide data is available, render data.
+        $.when(track.get('data_manager').get_genome_wide_data(this.genome)).then(function(genome_wide_data) {
+            // Set bounds.
+            self.data_bounds = self.get_data_bounds(genome_wide_data);
+
+            // Set min, max value in config so that they can be adjusted. Make this silent
+            // because these attributes are watched for changes and the viz is updated
+            // accordingly (set up in initialize). Because we are setting up, we don't want
+            // the watch to trigger events here.
+            track.get('config').set_value('min_value', self.data_bounds[0], {silent: true});
+            track.get('config').set_value('max_value', self.data_bounds[1], {silent: true});
+
+            // Merge chroms layout with data.
+            layout_and_data = _.zip(chrom_arcs, genome_wide_data),
+
+            // Render each chromosome's data.
+            chroms_data_layout = _.map(layout_and_data, function(chrom_info) {
+                var chrom_arc = chrom_info[0],
+                    data = chrom_info[1];
+                return self._render_chrom_data(svg, chrom_arc, data);
+            });
+
+            // Apply prefs to all track data.
+            var color = self.get_fill_color();
+            self.parent_elt.selectAll('path.chrom-data').style('stroke', color).style('fill', color);
+
+            rendered_deferred.resolve(svg);
+        });
+
+        return rendered_deferred;
+    },
+
+    /**
+     * Render a chromosome data and attach elements to svg.
+     */
+    _render_chrom_data: function(svg, chrom_arc, data) {},
+
+    /**
+     * Returns data for creating a path for the given data using chrom_arc and data bounds.
+     */
+    _get_path_function: function(chrom_arc, chrom_data) {},
+
+    /**
+     * Returns arc layouts for genome's chromosomes/contigs. Arcs are arranged in a circle
+     * separated by gaps.
+     */
+    _chroms_layout: function() {
+        // Setup chroms layout using pie.
+        var chroms_info = this.genome.get_chroms_info(),
+            pie_layout = d3.layout.pie().value(function(d) { return d.len; }).sort(null),
+            init_arcs = pie_layout(chroms_info),
+            gap_per_chrom = 2 * Math.PI * this.total_gap / chroms_info.length,
+            chrom_arcs = _.map(init_arcs, function(arc, index) {
+                // For short chroms, endAngle === startAngle.
+                var new_endAngle = arc.endAngle - gap_per_chrom;
+                arc.endAngle = (new_endAngle > arc.startAngle ? new_endAngle : arc.startAngle);
+                return arc;
+            });
+        return chrom_arcs;
+    }
+});
+
+/**
+ * Render chromosome labels.
+ */
+var CircsterChromLabelTrackView = CircsterTrackView.extend({
+
+    initialize: function(options) {
+        CircsterTrackView.prototype.initialize.call(this, options);
+        // Use a single arc for rendering data.
+        this.innerRadius = this.radius_bounds[0];
+        this.radius_bounds[0] = this.radius_bounds[1];
+        this.bg_stroke = '#fff';
+        this.bg_fill = '#fff';
+
+        // Minimum arc distance for labels to be applied.
+        this.min_arc_len = 0.05;
+    },
+
+    /**
+     * Render labels.
+     */
+    _render_data: function(svg) {
+        // -- Add chromosome label where it will fit; an alternative labeling mechanism
+        // would be nice for small chromosomes. --
+        var self = this,
+            chrom_arcs = svg.selectAll('g');
+
+        chrom_arcs.selectAll('path')
+            .attr('id', function(d) { return 'label-' + d.data.chrom; });
+
+        chrom_arcs.append("svg:text")
+            .filter(function(d) {
+                return d.endAngle - d.startAngle > self.min_arc_len;
+            })
+            .attr('text-anchor', 'middle')
+          .append("svg:textPath")
+            .attr("class", "chrom-label")
+            .attr("xlink:href", function(d) { return "#label-" + d.data.chrom; })
+            .attr('startOffset', '25%')
+            .text(function(d) {
+                return d.data.chrom;
+            });
+
+        // -- Add ticks to denote chromosome length. --
+
+        /** Returns an array of tick angles and labels, given a chrom arc. */
+        var chromArcTicks = function(d) {
+            var k = (d.endAngle - d.startAngle) / d.value,
+                ticks = d3.range(0, d.value, 25000000).map(function(v, i) {
+                    return {
+                        radius: self.innerRadius,
+                        angle: v * k + d.startAngle,
+                        label: i === 0 ? 0 : (i % 3 ? null : self.formatNum(v))
+                    };
+                });
+
+            // If there are fewer that 4 ticks, label last tick so that at least one non-zero tick is labeled.
+            if (ticks.length < 4) {
+                ticks[ticks.length-1].label = self.formatNum(
+                    Math.round( ( ticks[ticks.length-1].angle - d.startAngle ) / k )
+                );
+            }
+
+            return ticks;
+        };
+
+        /** Rotate and move text as needed. */
+        var textTransform = function(d) {
+                return d.angle > Math.PI ? "rotate(180)translate(-16)" : null;
+        };
+
+        // Filter chroms for only those large enough for display.
+        var visibleChroms = _.filter(this.chroms_layout, function(c) { return c.endAngle - c.startAngle > self.min_arc_len; });
+
+        this.drawTicks(this.parent_elt, visibleChroms, chromArcTicks, textTransform);
+    }
+});
+_.extend(CircsterChromLabelTrackView.prototype, UsesTicks);
+
+/**
+ * View for quantitative track in Circster.
+ */
+var CircsterQuantitativeTrackView = CircsterTrackView.extend({
+
+    initialize: function(options) {
+        CircsterTrackView.prototype.initialize.call(this, options);
+
+        // When config settings change, update view.
+        var track_config = this.track.get('config');
+        track_config.get('min_value').on('change:value', this._update_min_max, this);
+        track_config.get('max_value').on('change:value', this._update_min_max, this);
+        track_config.get('color').on('change:value', this._transition_chrom_data, this);
+    },
+
+    /**
+     * Update track when min and/or max are changed.
+     */
+    _update_min_max: function() {
+        var track_config = this.track.get('config'),
+            new_bounds = [track_config.get_value('min_value'), track_config.get_value('max_value')];
+        this._update_data_bounds(new_bounds);
+
+        // FIXME: this works to update tick/text bounds, but there's probably a better way to do this
+        // by updating the data itself.
+        this.parent_elt.selectAll('.min_max').text(function(d, i) {
+            return new_bounds[i];
+        });
+    },
+
+    /**
+     * Returns quantile for an array of numbers.
+     */
+    _quantile: function(numbers, quantile) {
+        numbers.sort(d3.ascending);
+        return d3.quantile(numbers, quantile);
+    },
+
+    /**
+     * Renders quantitative data with the form [x, value] and assumes data is equally spaced across
+     * chromosome. Attachs a dict with track and chrom name information to DOM element.
+     */
+    _render_chrom_data: function(svg, chrom_arc, chrom_data) {
+        var path_data = this._get_path_function(chrom_arc, chrom_data);
+
+        if (!path_data) { return null; }
+
+        // There is path data, so render as path.
+        var parent = svg.datum(chrom_data.data),
+            path = parent.append('path')
+                         .attr('class', 'chrom-data')
+                         .attr('chrom', chrom_arc.data.chrom)
+                         .attr('d', path_data);
+
+        return path;
+    },
+
+    /**
+     * Returns function for creating a path across the chrom arc.
+     */
+    _get_path_function: function(chrom_arc, chrom_data) {
+        // If no chrom data, return null.
+        if (typeof chrom_data === "string" || !chrom_data.data || chrom_data.data.length === 0) {
+            return null;
+        }
+
+        // Radius scaler.
+        var radius = d3.scale.linear()
+                       .domain(this.data_bounds)
+                       .range(this.radius_bounds)
+                       .clamp(true);
+
+        // Scaler for placing data points across arc.
+        var angle = d3.scale.linear()
+            .domain([0, chrom_data.data.length])
+            .range([chrom_arc.startAngle, chrom_arc.endAngle]);
+
+        // Use line generator to create area.
+        var line = d3.svg.line.radial()
+            .interpolate("linear")
+            .radius(function(d) { return radius(d[1]); })
+            .angle(function(d, i) { return angle(i); });
+
+        return d3.svg.area.radial()
+            .interpolate(line.interpolate())
+            .innerRadius(radius(0))
+            .outerRadius(line.radius())
+            .angle(line.angle());
+    },
+
+    /**
+     * Render track min, max using ticks.
+     */
+    render_labels: function() {
+        var self = this,
+            // Keep counter of visible chroms.
+            textTransform = function() {
+                return "rotate(90)";
+            };
+
+        // FIXME:
+        // (1) using min_max class below is needed for _update_min_max, which could be improved.
+        // (2) showing config on tick click should be replaced by proper track config icon.
+
+        // Draw min, max on first chrom only.
+        var ticks = this.drawTicks(this.parent_elt, [ this.chroms_layout[0] ],
+                                   this._data_bounds_ticks_fn(), textTransform, true)
+                        .classed('min_max', true);
+
+        // Show config when ticks are clicked on.
+        _.each(ticks, function(tick) {
+            $(tick).click(function() {
+                var view = new config.ConfigSettingCollectionView({
+                    collection: self.track.get('config')
+                });
+                view.render_in_modal('Configure Track');
+            });
+        });
+
+
+
+        /*
+        // Filter for visible chroms, then for every third chrom so that labels attached to only every
+        // third chrom.
+        var visibleChroms = _.filter(this.chroms_layout, function(c) { return c.endAngle - c.startAngle > 0.08; }),
+            labeledChroms = _.filter(visibleChroms, function(c, i) { return i % 3 === 0; });
+        this.drawTicks(this.parent_elt, labeledChroms, this._data_bounds_ticks_fn(), textTransform, true);
+        */
+    },
+
+    /**
+     * Transition labels to new values (e.g new radius or data bounds).
+     */
+    _transition_labels: function() {
+        // FIXME: (a) pull out function for getting labeled chroms? and (b) function used in transition below
+        // is copied from UseTicks mixin, so pull out and make generally available.
+
+        // If there are no data bounds, nothing to transition.
+        if (this.data_bounds.length === 0) { return; }
+
+        // Transition labels to new radius bounds.
+        var self = this,
+            visibleChroms = _.filter(this.chroms_layout, function(c) { return c.endAngle - c.startAngle > 0.08; }),
+            labeledChroms = _.filter(visibleChroms, function(c, i) { return i % 3 === 0; }),
+            new_data = _.flatten( _.map(labeledChroms, function(c) {
+                return self._data_bounds_ticks_fn()(c);
+            }));
+        this.parent_elt.selectAll('g.tick').data(new_data).transition().attr("transform", function(d) {
+            return "rotate(" + (d.angle * 180 / Math.PI - 90) + ")" +
+                    "translate(" + d.radius + ",0)";
+        });
+    },
+
+    /**
+     * Get function for locating data bounds ticks.
+     */
+    _data_bounds_ticks_fn: function() {
+        // Closure vars.
+        var self = this;
+            visibleChroms = 0;
+
+        // Return function for locating ticks based on chrom arc data.
+        return function(d) {
+            // Set up data to display min, max ticks.
+            return [
+                {
+                    radius: self.radius_bounds[0],
+                    angle: d.startAngle,
+                    label: self.formatNum(self.data_bounds[0])
+                },
+                {
+                    radius: self.radius_bounds[1],
+                    angle: d.startAngle,
+                    label: self.formatNum(self.data_bounds[1])
+                }
+            ];
+        };
+    },
+
+    /**
+     * Returns an array with two values denoting the minimum and maximum
+     * values for the track.
+     */
+    get_data_bounds: function(data) {}
+
+});
+_.extend(CircsterQuantitativeTrackView.prototype, UsesTicks);
+
+/**
+ * Bigwig track view in Circster.
+ */
+var CircsterBigWigTrackView = CircsterQuantitativeTrackView.extend({
+
+    get_data_bounds: function(data) {
+        // Set max across dataset by extracting all values, flattening them into a
+        // single array, and getting third quartile.
+        var values = _.flatten( _.map(data, function(d) {
+            if (d) {
+                // Each data point has the form [position, value], so return all values.
+                return _.map(d.data, function(p) {
+                    // Null is used for a lack of data; resolve null to 0 for comparison.
+                    return parseInt(p[1], 10) || 0;
+                });
+            }
+            else {
+                return 0;
+            }
+        }) );
+
+        // For max, use 98% quantile in attempt to avoid very large values. However, this max may be 0
+        // for sparsely populated data, so use max in that case.
+        return [ _.min(values), this._quantile(values, 0.98) || _.max(values) ];
+    }
+});
+
+/**
+ * Chromosome interactions track view in Circster.
+ */
+var CircsterChromInteractionsTrackView = CircsterTrackView.extend({
+
+    render: function() {
+        var self = this;
+
+        // When data is ready, render track.
+        $.when(self.track.get('data_manager').data_is_ready()).then(function() {
+            // When data has been fetched, render track.
+            $.when(self.track.get('data_manager').get_genome_wide_data(self.genome)).then(function(genome_wide_data) {
+                var chord_data = [],
+                    chroms_info = self.genome.get_chroms_info();
+                // Convert chromosome data into chord data.
+                _.each(genome_wide_data, function(chrom_data, index) {
+                    // Map each interaction into chord data.
+                    var cur_chrom = chroms_info[index].chrom;
+                    var chrom_chord_data = _.map(chrom_data.data, function(datum) {
+                        // Each datum is an interaction/chord.
+                        var source_angle = self._get_region_angle(cur_chrom, datum[1]),
+                            target_angle = self._get_region_angle(datum[3], datum[4]);
+                        return {
+                            source: {
+                                startAngle: source_angle,
+                                endAngle: source_angle + 0.01
+                            },
+                            target: {
+                                startAngle: target_angle,
+                                endAngle: target_angle + 0.01
+                            }
+                        };
+                    });
+
+                    chord_data = chord_data.concat(chrom_chord_data);
+                });
+
+                self.parent_elt.append("g")
+                        .attr("class", "chord")
+                    .selectAll("path")
+                        .data(chord_data)
+                    .enter().append("path")
+                        .style("fill", self.get_fill_color())
+                        .attr("d", d3.svg.chord().radius(self.radius_bounds[0]))
+                        .style("opacity", 1);
+            });
+        });
+    },
+
+    update_radius_bounds: function(radius_bounds) {
+        this.radius_bounds = radius_bounds;
+        this.parent_elt.selectAll("path").transition().attr("d", d3.svg.chord().radius(this.radius_bounds[0]));
+    },
+
+    /**
+     * Returns radians for a genomic position.
+     */
+    _get_region_angle: function(chrom, position) {
+        // Find chrom angle data
+        var chrom_angle_data = _.find(this.chroms_layout, function(chrom_layout) {
+            return chrom_layout.data.chrom === chrom;
+        });
+
+        // Return angle at position.
+        return  chrom_angle_data.endAngle -
+                (
+                    (chrom_angle_data.endAngle - chrom_angle_data.startAngle) *
+                    (chrom_angle_data.data.len - position) / chrom_angle_data.data.len
+                );
+    }
+
+});
+
+// circster app loader
+var Circster = Backbone.View.extend(
+{
+    initialize: function ()
+    {
+        // -- Configure visualization --
+        var genome = new visualization.Genome(galaxy_config.app.genome),
+            vis = new visualization.GenomeVisualization(galaxy_config.app.viz_config);
+
+        // Add Circster-specific config options.
+        vis.get('config').add([
+            { key: 'arc_dataset_height', label: 'Arc Dataset Height', type: 'int', value: 25, view: 'circster' },
+            { key: 'track_gap', label: 'Gap Between Tracks', type: 'int', value: 5, view: 'circster' },
+            { key: 'total_gap', label: 'Gap [0-1]', type: 'float', value: 0.4, view: 'circster', hidden: true }
+        ]);
+
+        var viz_view = new CircsterView(
+            {
+                // view pane
+                el                  : $('#center .unified-panel-body'),
+                genome              : genome,
+                model               : vis
+            });
+
+        // Render vizualization
+        viz_view.render();
+
+        // setup title
+        $('#center .unified-panel-header-inner').append(galaxy_config.app.viz_config.title + " " + galaxy_config.app.viz_config.dbkey);
+
+        // setup menu
+        var menu = mod_icon_btn.create_icon_buttons_menu([
+        {
+            icon_class: 'plus-button', title: 'Add tracks', on_click: function()
+            {
+                visualization.select_datasets(Galaxy.root + "visualization/list_current_history_datasets", Galaxy.root + "api/datasets", vis.get('dbkey'), function(tracks)
+                {
+                    vis.add_tracks(tracks);
+                });
+            }
+        },
+        {
+            icon_class: 'gear', title: 'Settings', on_click: function()
+            {
+                var view = new config.ConfigSettingCollectionView({
+                    collection: vis.get('config')
+                });
+                view.render_in_modal('Configure Visualization');
+            }
+        },
+        {
+            icon_class: 'disk--arrow', title: 'Save', on_click: function()
+            {
+                // show saving dialog box
+                Galaxy.modal.show({title: "Saving...", body: "progress" });
+
+                // send to server
+                $.ajax({
+                    url: Galaxy.root + "visualization/save",
+                    type: "POST",
+                    dataType: "json",
+                    data: {
+                        'id'        : vis.get('vis_id'),
+                        'title'     : vis.get('title'),
+                        'dbkey'     : vis.get('dbkey'),
+                        'type'      : 'trackster',
+                        'vis_json'  : JSON.stringify(vis)
+                    }
+                }).success(function(vis_info) {
+                    Galaxy.modal.hide();
+                    vis.set('vis_id', vis_info.vis_id);
+                }).error(function() {
+                    // show dialog
+                    Galaxy.modal.show({
+                        title   : "Could Not Save",
+                        body    : "Could not save visualization. Please try again later.",
+                        buttons : { "Cancel": function() { Galaxy.modal.hide(); } }
+                    });
+                });
+            }
+        },
+        {
+            icon_class: 'cross-circle', title: 'Close', on_click: function()
+            {
+                window.location = Galaxy.root + "visualization/list";
+            }
+        }], { tooltip_config: { placement: 'bottom' } });
+
+        // add menu
+        menu.$el.attr("style", "float: right");
+        $("#center .unified-panel-header-inner").append(menu.$el);
+
+        // manual tooltip config because default gravity is S and cannot be changed
+        $(".menu-button").tooltip( { placement: 'bottom' } );
+    }
+});
+
+// Module exports.
+return {
+    GalaxyApp: Circster
+};
+
+});
diff --git a/client/galaxy/scripts/viz/phyloviz.js b/client/galaxy/scripts/viz/phyloviz.js
new file mode 100644
index 0000000..15a17a0
--- /dev/null
+++ b/client/galaxy/scripts/viz/phyloviz.js
@@ -0,0 +1,1000 @@
+define([
+    'libs/d3',
+    'viz/visualization',
+    'mvc/dataset/data',
+    'mvc/ui/icon-button'
+], function(d3, visualization_mod, data_mod, mod_icon_btn) {
+
+/**
+ * Base class of any menus that takes in user interaction. Contains checking methods.
+ */
+var UserMenuBase = Backbone.View.extend({
+
+    className: 'UserMenuBase',
+
+    /**
+     * Check if an input value is a number and falls within max min.
+     */
+    isAcceptableValue : function ($inputKey, min, max) {
+        //TODO: use better feedback than alert
+        var value = $inputKey.val(),
+            fieldName = $inputKey.attr("displayLabel") || $inputKey.attr("id").replace("phyloViz", "");
+
+        function isNumeric(n) {
+            return !isNaN(parseFloat(n)) && isFinite(n);
+        }
+
+        if (!isNumeric(value)){
+            alert(fieldName + " is not a number!");
+            return false;
+        }
+
+        if ( value > max){
+            alert(fieldName + " is too large.");
+            return false;
+        } else if ( value < min) {
+            alert(fieldName + " is too small.");
+            return false;
+        }
+        return true;
+    },
+
+    /**
+     * Check if any user string inputs has illegal characters that json cannot accept
+     */
+    hasIllegalJsonCharacters : function($inputKey) {
+        if ($inputKey.val().search(/"|'|\\/) !== -1){
+            alert("Named fields cannot contain these illegal characters: "
+                + "double quote(\"), single guote(\'), or back slash(\\). ");
+            return true;
+        }
+        return false;
+    }
+});
+
+
+/**
+ * -- Custom Layout call for phyloViz to suit the needs of a phylogenetic tree.
+ * -- Specifically: 1) Nodes have a display display of (= evo dist X depth separation) from their parent
+ *                  2) Nodes must appear in other after they have expand and contracted
+ */
+function PhyloTreeLayout() {
+
+    var self = this,
+        hierarchy = d3.layout.hierarchy().sort(null).value(null),
+        height = 360, // ! represents both the layout angle and the height of the layout, in px
+        layoutMode = "Linear",
+        leafHeight = 18, // height of each individual leaf node
+        depthSeparation = 200, // separation between nodes of different depth, in px
+        leafIndex = 0, // change to recurssive call
+        defaultDist = 0.5, // tree defaults to 0.5 dist if no dist is specified
+        maxTextWidth = 50; // maximum length of the text labels
+
+
+    self.leafHeight = function(inputLeafHeight){
+        if (typeof inputLeafHeight === "undefined"){ return leafHeight; }
+        else { leafHeight = inputLeafHeight; return self;}
+    };
+
+    self.layoutMode = function(mode){
+        if (typeof mode === "undefined"){ return layoutMode; }
+        else { layoutMode = mode; return self;}
+    };
+
+    // changes the layout angle of the display, which is really changing the height
+    self.layoutAngle = function(angle) {
+        if (typeof angle === "undefined"){ return height; }
+        // to use default if the user puts in strange values
+        if (isNaN(angle) || angle < 0 || angle > 360) { return self; }
+        else { height = angle; return self;}
+    };
+
+    self.separation = function(dist){   // changes the dist between the nodes of different depth
+        if (typeof dist === "undefined"){ return depthSeparation; }
+        else { depthSeparation = dist; return self;}
+    };
+
+    self.links = function (nodes) {     // uses d3 native method to generate links. Done.
+        return d3.layout.tree().links(nodes);
+    };
+
+    // -- Custom method for laying out phylogeny tree in a linear fashion
+    self.nodes = function (d, i) {
+        //TODO: newick and phyloxml return arrays. where should this go (client (here, else), server)?
+        if( toString.call( d ) === '[object Array]' ){
+            // if d is an array, replate with the first object (newick, phyloxml)
+            d = d[0];
+        }
+        // self is to find the depth of all the nodes, assumes root is passed in
+        var _nodes = hierarchy.call(self, d, i),
+            nodes = [],
+            maxDepth = 0,
+            numLeaves = 0;
+        //console.debug( JSON.stringify( _nodes, null, 2 ) )
+        window._d = d;
+        window._nodes = _nodes;
+
+        //TODO: remove dbl-touch loop
+        // changing from hierarchy's custom format for data to usable format
+        _nodes.forEach(function (node){
+            maxDepth = node.depth > maxDepth ? node.depth : maxDepth;  //finding max depth of tree
+            nodes.push(node);
+        });
+        // counting the number of leaf nodes and assigning max depth
+        //  to nodes that do not have children to flush all the leave nodes
+        nodes.forEach(function(node){
+            if ( !node.children )  { //&& !node._children
+                numLeaves += 1;
+                node.depth = maxDepth; // if a leaf has no child it would be assigned max depth
+            }
+        });
+
+        leafHeight = layoutMode === "Circular" ? height / numLeaves : leafHeight;
+        leafIndex = 0;
+        layout(nodes[0], maxDepth, leafHeight, null);
+
+        return nodes;
+    };
+
+
+    /**
+     * -- Function with side effect of adding x0, y0 to all child; take in the root as starting point
+     *  assuming that the leave nodes would be sorted in presented order
+     *          horizontal(y0) is calculated according to (= evo dist X depth separation) from their parent
+     *          vertical (x0) - if leave node: find its order in all of the  leave node === node.id,
+     *                              then multiply by verticalSeparation
+     *                  - if parent node: is place in the mid point all of its children nodes
+     * -- The layout will first calculate the y0 field going towards the leaves, and x0 when returning
+     */
+    function layout (node, maxDepth, vertSeparation, parent) {
+        var children = node.children,
+            sumChildVertSeparation = 0;
+
+        // calculation of node's dist from parents, going down.
+        var dist = node.dist || defaultDist;
+        dist = dist > 1 ? 1 : dist;     // We constrain all dist to be less than one
+        node.dist = dist;
+        if (parent !== null){
+            node.y0 = parent.y0 + dist * depthSeparation;
+        } else {    //root node
+            node.y0 = maxTextWidth;
+        }
+
+
+        // if a node have no children, we will treat it as a leaf and start laying it out first
+        if (!children) {
+            node.x0 = leafIndex * vertSeparation;
+            leafIndex += 1;
+        } else {
+            // if it has children, we will visit all its children and calculate its position from its children
+            children.forEach( function (child) {
+                child.parent = node;
+                sumChildVertSeparation += layout(child, maxDepth, vertSeparation, node);
+            });
+            node.x0 = sumChildVertSeparation / children.length;
+        }
+
+        // adding properties to the newly created node
+        node.x = node.x0;
+        node.y = node.y0;
+        return node.x0;
+    }
+    return self;
+}
+
+
+/**
+ * -- PhyloTree Model --
+ */
+var PhyloTree = visualization_mod.Visualization.extend({
+    defaults : {
+        layout: "Linear",
+        separation : 250,    // px dist between nodes of different depth to represent 1 evolutionary until
+        leafHeight: 18,
+        type : "phyloviz",   // visualization type
+        title : "Title",
+        scaleFactor: 1,
+        translate: [0,0],
+        fontSize: 12,        //fontSize of node label
+        selectedNode : null,
+        nodeAttrChangedTime : 0
+    },
+
+    initialize: function(options) {
+        this.set("dataset", new data_mod.Dataset({
+            id: options.dataset_id
+        }));
+
+    },
+
+    root : {}, // Root has to be its own independent object because it is not part of the viz_config
+
+    /**
+     * Mechanism to expand or contract a single node. Expanded nodes have a children list, while for
+     * contracted nodes the list is stored in _children. Nodes with their children data stored in _children will not
+     * have their children rendered.
+     */
+    toggle : function (d) {
+        if(typeof d === "undefined") {return ;}
+        if (d.children ) {
+            d._children = d.children;
+            d.children = null;
+        } else {
+            d.children = d._children;
+            d._children = null;
+        }
+    },
+
+    /**
+     *  Contracts the phylotree to a single node by repeatedly calling itself to place all the list
+     *  of children under _children.
+     */
+    toggleAll : function(d) {
+        if (d.children && d.children.length !== 0) {
+            d.children.forEach(this.toggleAll);
+            toggle(d);
+        }
+    },
+
+    /**
+     *  Return the data of the tree. Used for preserving state.
+     */
+    getData : function (){
+        return this.root;
+    },
+
+    /**
+     * Overriding the default save mechanism to do some clean of circular reference of the
+     * phyloTree and to include phyloTree in the saved json
+     */
+    save: function() {
+        var root = this.root;
+        cleanTree(root);
+        //this.set("root", root);
+
+        function cleanTree(node){
+            // we need to remove parent to delete circular reference
+            delete node.parent;
+
+            // removing unnecessary attributes
+            if (node._selected){ delete node._selected;}
+
+            if (node.children) {
+                node.children.forEach(cleanTree);
+            }
+            if (node._children) {
+                node._children.forEach(cleanTree);
+            }
+        }
+
+        var config  = jQuery.extend(true, {}, this.attributes);
+        config.selectedNode = null;
+
+        show_message("Saving to Galaxy", "progress");
+
+        return $.ajax({
+            url     : this.url(),
+            type    : "POST",
+            dataType: "json",
+            data    : {
+                config  : JSON.stringify( config ),
+                type    : 'phyloviz'
+            },
+            success: function(res){
+                hide_modal();
+            }
+        });
+    }
+});
+
+
+// -- Views --
+/**
+ *  Stores the default variable for setting up the visualization
+ */
+var PhylovizLayoutBase =  Backbone.View.extend({
+    defaults : {
+        nodeRadius : 4.5 // radius of each node in the diagram
+    },
+
+
+    /**
+     *  Common initialization in layouts
+     */
+    stdInit : function (options) {
+
+        var self = this;
+        self.model.on("change:separation change:leafHeight change:fontSize change:nodeAttrChangedTime",
+            self.updateAndRender, self);
+
+        self.vis = options.vis;
+        self.i = 0;
+        self.maxDepth = -1; // stores the max depth of the tree
+
+        self.width = options.width;
+        self.height = options.height;
+    },
+
+
+    /**
+     *  Updates the visualization whenever there are changes in the expansion and contraction of nodes
+     *  AND possibly when the tree is edited.
+     */
+    updateAndRender : function(source) {
+        var vis = d3.select(".vis"),
+            self = this;
+        source = source || self.model.root;
+
+        self.renderNodes(source);
+        self.renderLinks(source);
+        self.addTooltips();
+    },
+
+
+    /**
+     * Renders the links for the visualization.
+     */
+    renderLinks : function(source) {
+        var self = this;
+        var diagonal = self.diagonal;
+        var duration = self.duration;
+        var layoutMode = self.layoutMode;
+        var link = self.vis.selectAll("g.completeLink")
+            .data(self.tree.links(self.nodes), function(d) { return d.target.id; });
+
+        var calcalateLinePos = function(d) {
+            // position of the source node <=> starting location of the line drawn
+            d.pos0 = d.source.y0 + " " + d.source.x0;
+            // position where the line makes a right angle bend
+            d.pos1 = d.source.y0 + " " + d.target.x0;
+            // point where the horizontal line becomes a dotted line
+            d.pos2 = d.target.y0 + " " + d.target.x0;
+        };
+
+        var linkEnter = link.enter().insert("svg:g","g.node")
+            .attr("class", "completeLink");
+
+        linkEnter.append("svg:path")
+            .attr("class", "link")
+            .attr("d", function(d) {
+                calcalateLinePos(d);
+                return "M " + d.pos0  + " L " + d.pos1;
+            });
+
+        var linkUpdate = link.transition().duration(500);
+
+        linkUpdate.select("path.link")
+            .attr("d", function(d) {
+                calcalateLinePos(d);
+                return "M " + d.pos0 + " L " + d.pos1 + " L " + d.pos2;
+            });
+
+        var linkExit = link.exit().remove();
+
+    },
+
+    // User Interaction methods below
+
+    /**
+     *  Displays the information for editing
+     */
+    selectNode : function(node){
+        var self = this;
+        d3.selectAll("g.node")
+            .classed("selectedHighlight", function(d){
+                if (node.id === d.id){
+                    if(node._selected) { // for de=selecting node.
+                        delete node._selected;
+                        return false;
+                    } else {
+                        node._selected = true;
+                        return true;
+                    }
+                }
+                return false;
+            });
+
+        self.model.set("selectedNode", node);
+        $("#phyloVizSelectedNodeName").val(node.name);
+        $("#phyloVizSelectedNodeDist").val(node.dist);
+        $("#phyloVizSelectedNodeAnnotation").val(node.annotation || "");
+    },
+
+    /**
+     *  Creates bootstrap tooltip for the visualization. Has to be called repeatedly due to newly generated
+     *  enterNodes
+     */
+    addTooltips : function (){
+        $(".tooltip").remove();      //clean up tooltip, just in case its listeners are removed by d3
+        $(".node")
+            .attr("data-original-title", function(){
+                var d = this.__data__,
+                    annotation = d.annotation || "None" ;
+                return d ? (d.name ? d.name + "<br/>" : "") + "Dist: " + d.dist + " <br/>Annotation: " + annotation: "";
+            })
+            .tooltip({'placement':'top', 'trigger' : 'hover'});
+
+    }
+});
+
+
+/**
+ * Linea layout class of Phyloviz, is responsible for rendering the nodes
+ * calls PhyloTreeLayout to determine the positions of the nodes
+ */
+var PhylovizLinearView =  PhylovizLayoutBase.extend({
+    initialize : function(options){
+        // Default values of linear layout
+        var self = this;
+        self.margins = options.margins;
+        self.layoutMode = "Linear";
+
+        self.stdInit(options);
+
+        self.layout();
+        self.updateAndRender(self.model.root);
+    },
+
+    /**
+     * Creates the basic layout of a linear tree by precalculating fixed values.
+     * One of calculations are also made here
+     */
+    layout : function() {
+        var self = this;
+        self.tree = new PhyloTreeLayout().layoutMode("Linear");
+        self.diagonal = d3.svg.diagonal()
+            .projection(function(d) { return [d.y, d.x ]; });
+    },
+
+    /**
+     * Renders the nodes base on Linear layout.
+     */
+    renderNodes : function (source) {
+        var self = this,
+            fontSize = self.model.get("fontSize") + "px";
+
+        // assigning properties from models
+        self.tree.separation(self.model.get("separation")).leafHeight(self.model.get("leafHeight"));
+
+        var duration = 500,
+            nodes = self.tree.separation(self.model.get("separation")).nodes(self.model.root);
+
+        var node = self.vis.selectAll("g.node")
+            .data(nodes, function(d) {
+                return d.name + d.id || (d.id = ++self.i);
+            });
+
+        // These variables has to be passed into update links which are in the base methods
+        self.nodes = nodes;
+        self.duration = duration;
+
+        // ------- D3 ENTRY --------
+        // Enter any new nodes at the parent's previous position.
+        var nodeEnter = node.enter().append("svg:g")
+            .attr("class", "node")
+            .on("dblclick", function(){ d3.event.stopPropagation();    })
+            .on("click", function(d) {
+                if (d3.event.altKey) {
+                    self.selectNode(d);        // display info if alt is pressed
+                } else {
+                    if(d.children && d.children.length === 0){ return;}  // there is no need to toggle leaves
+                    self.model.toggle(d);   // contract/expand nodes at data level
+                    self.updateAndRender(d);   // re-render the tree
+                }
+            });
+        //TODO: newick and phyloxml return arrays. where should this go (client (here, else), server)?
+        if( toString.call( source ) === '[object Array]' ){
+            // if d is an array, replate with the first object (newick, phyloxml)
+            source = source[0];
+        }
+        nodeEnter.attr("transform", function(d) { return "translate(" + source.y0 + "," + source.x0 + ")"; });
+
+        nodeEnter.append("svg:circle")
+            .attr("r", 1e-6)
+            .style("fill", function(d) { return d._children ? "lightsteelblue" : "#fff"; });
+
+        nodeEnter.append("svg:text")
+            .attr("class", "nodeLabel")
+            .attr("x", function(d) { return d.children || d._children ? -10 : 10; })
+            .attr("dy", ".35em")
+            .attr("text-anchor", function(d) { return d.children || d._children ? "end" : "start"; })
+            .style("fill-opacity", 1e-6);
+
+        // ------- D3 TRANSITION --------
+        // Transition nodes to their new position.
+        var nodeUpdate = node.transition()
+            .duration(duration);
+
+        nodeUpdate.attr("transform", function(d) {
+            return "translate(" + d.y + "," + d.x + ")"; });
+
+        nodeUpdate.select("circle")
+            .attr("r", self.defaults.nodeRadius)
+            .style("fill", function(d) { return d._children ? "lightsteelblue" : "#fff"; });
+
+        nodeUpdate.select("text")
+            .style("fill-opacity", 1)
+            .style("font-size", fontSize)
+            .text(function(d) { return d.name; });
+
+        // ------- D3 EXIT --------
+        // Transition exiting nodes to the parent's new position.
+        var nodeExit =node.exit().transition()
+            .duration(duration)
+            .remove();
+
+        nodeExit.select("circle")
+            .attr("r", 1e-6);
+
+        nodeExit.select("text")
+            .style("fill-opacity", 1e-6);
+
+        // Stash the old positions for transition.
+        nodes.forEach(function(d) {
+            d.x0 = d.x; // we need the x0, y0 for parents with children
+            d.y0 = d.y;
+        });
+    }
+
+});
+
+var PhylovizView = Backbone.View.extend({
+
+    className: 'phyloviz',
+
+    initialize: function(options) {
+        var self = this;
+        // -- Default values of the vis
+        self.MIN_SCALE = 0.05; //for zooming
+        self.MAX_SCALE = 5;
+        self.MAX_DISPLACEMENT = 500;
+        self.margins = [10, 60, 10, 80];
+
+        self.width = $("#PhyloViz").width();
+        self.height = $("#PhyloViz").height();
+        self.radius = self.width;
+        self.data = options.data;
+
+        // -- Events Phyloviz view responses to
+        $(window).resize(function(){
+            self.width = $("#PhyloViz").width();
+            self.height = $("#PhyloViz").height();
+            self.render();
+        });
+
+        // -- Create phyloTree model
+        self.phyloTree = new PhyloTree(options.config);
+        self.phyloTree.root = self.data;
+
+        // -- Set up UI functions of main view
+        self.zoomFunc = d3.behavior.zoom().scaleExtent([self.MIN_SCALE, self.MAX_SCALE]);
+        self.zoomFunc.translate(self.phyloTree.get("translate"));
+        self.zoomFunc.scale(self.phyloTree.get("scaleFactor"));
+
+        // -- set up header buttons, search and settings menu
+        self.navMenu = new HeaderButtons(self);
+        self.settingsMenu = new SettingsMenu({phyloTree : self.phyloTree});
+        self.nodeSelectionView = new NodeSelectionView({phyloTree : self.phyloTree});
+        self.search = new PhyloVizSearch();
+
+        // using settimeout to call the zoomAndPan function according to the stored attributes in viz_config
+        setTimeout(function(){
+            self.zoomAndPan();
+        }, 1000);
+    },
+
+    render: function(){
+        // -- Creating helper function for vis. --
+        var self = this;
+        $("#PhyloViz").empty();
+
+        // -- Layout viz. --
+        self.mainSVG = d3.select("#PhyloViz").append("svg:svg")
+            .attr("width", self.width)
+            .attr("height", self.height)
+            .attr("pointer-events", "all")
+            .call(self.zoomFunc.on("zoom", function(){
+            self.zoomAndPan();
+        }));
+
+        self.boundingRect = self.mainSVG.append("svg:rect")
+            .attr("class", "boundingRect")
+            .attr("width", self.width)
+            .attr("height", self.height)
+            .attr("stroke", "black")
+            .attr("fill", "white");
+
+        self.vis = self.mainSVG
+            .append("svg:g")
+            .attr("class", "vis");
+
+        self.layoutOptions = {
+            model : self.phyloTree,
+            width : self.width,
+            height : self.height,
+            vis: self.vis,
+            margins: self.margins
+        };
+
+        // -- Creating Title
+        $("#title").text("Phylogenetic Tree from " + self.phyloTree.get("title") + ":");
+
+        // -- Create Linear view instance --
+        var linearView = new PhylovizLinearView(self.layoutOptions);
+    },
+
+    /**
+     * Function to zoom and pan the svg element which the entire tree is contained within
+     * Uses d3.zoom events, and extend them to allow manual updates and keeping states in model
+     */
+    zoomAndPan : function(event){
+         var zoomParams,
+            translateParams;
+        if (typeof event !== "undefined") {
+            zoomParams = event.zoom;
+            translateParams = event.translate;
+        }
+
+        var self = this,
+            scaleFactor = self.zoomFunc.scale(),
+            translationCoor = self.zoomFunc.translate(),
+            zoomStatement = "",
+            translateStatement = "";
+
+        // Do manual scaling.
+        switch (zoomParams) {
+            case "reset":
+                scaleFactor = 1.0;
+                translationCoor = [0,0]; break;
+            case "+":
+                scaleFactor *= 1.1; break;
+            case "-":
+                scaleFactor *= 0.9; break;
+            default:
+                if (typeof zoomParams === "number") {
+                    scaleFactor = zoomParams;
+                } else if (d3.event !== null) {
+                    scaleFactor = d3.event.scale;
+                }
+        }
+        if (scaleFactor < self.MIN_SCALE || scaleFactor > self.MAX_SCALE) { return;}
+        self.zoomFunc.scale(scaleFactor); //update scale Factor
+        zoomStatement = "translate(" +  self.margins[3] + "," + self.margins[0] + ")" +
+            " scale(" + scaleFactor + ")";
+
+        // Do manual translation.
+        if( d3.event !== null) {
+            translateStatement = "translate(" + d3.event.translate + ")";
+        } else {
+            if(typeof translateParams !== "undefined") {
+                var x = translateParams.split(",")[0];
+                var y = translateParams.split(",")[1];
+                if (!isNaN(x) && !isNaN(y)){
+                    translationCoor = [translationCoor[0] + parseFloat(x), translationCoor[1] + parseFloat(y)];
+                }
+            }
+            self.zoomFunc.translate(translationCoor);   // update zoomFunc
+            translateStatement = "translate(" + translationCoor + ")";
+        }
+
+        self.phyloTree.set("scaleFactor", scaleFactor);
+        self.phyloTree.set("translate", translationCoor);
+        //refers to the view that we are actually zooming
+        self.vis.attr("transform", translateStatement + zoomStatement);
+    },
+
+
+    /**
+     * Primes the Ajax URL to load another Nexus tree
+     */
+    reloadViz : function() {
+        var self = this,
+            treeIndex = $("#phylovizNexSelector :selected").val();
+        $.getJSON(self.phyloTree.get("dataset").url(), {
+                tree_index: treeIndex,
+                data_type: 'raw_data'
+            },
+            function(packedJson){
+                self.data = packedJson.data;
+                self.config = packedJson;
+                self.render();
+            });
+    }
+});
+
+
+var HeaderButtons = Backbone.View.extend({
+
+    initialize : function(phylovizView){
+        var self = this;
+        self.phylovizView = phylovizView;
+
+        // Clean up code - if the class initialized more than once
+        $("#panelHeaderRightBtns").empty();
+        $("#phyloVizNavBtns").empty();
+        $("#phylovizNexSelector").off();
+
+        self.initNavBtns();
+        self.initRightHeaderBtns();
+
+        // Initial a tree selector in the case of nexus
+        $("#phylovizNexSelector").off().on("change",  function() {self.phylovizView.reloadViz();}  );
+
+    },
+
+    initRightHeaderBtns : function(){
+        var self = this;
+
+        rightMenu = mod_icon_btn.create_icon_buttons_menu([
+            { icon_class: 'gear', title: 'PhyloViz Settings', on_click: function(){
+                $("#SettingsMenu").show();
+                self.settingsMenu.updateUI();
+            } },
+            { icon_class: 'disk', title: 'Save visualization', on_click: function() {
+                var nexSelected = $("#phylovizNexSelector option:selected").text();
+                if(nexSelected) {
+                    self.phylovizView.phyloTree.set("title", nexSelected);
+                }
+                self.phylovizView.phyloTree.save();
+            } },
+            { icon_class: 'chevron-expand', title: 'Search / Edit Nodes', on_click: function() {
+                $("#nodeSelectionView").show();
+            } },
+            { icon_class: 'information', title: 'Phyloviz Help', on_click: function() {
+                window.open('https://wiki.galaxyproject.org/Learn/Visualization/PhylogeneticTree');
+                // https://docs.google.com/document/d/1AXFoJgEpxr21H3LICRs3EyMe1B1X_KFPouzIgrCz3zk/edit
+            } }
+        ],
+            {
+                tooltip_config: { placement: 'bottom' }
+            });
+        $("#panelHeaderRightBtns").append(rightMenu.$el);
+    },
+
+    initNavBtns: function() {
+        var self = this,
+            navMenu = mod_icon_btn.create_icon_buttons_menu([
+                { icon_class: 'zoom-in', title: 'Zoom in', on_click: function() {
+                    self.phylovizView.zoomAndPan({ zoom : "+"});
+                } },
+                { icon_class: 'zoom-out', title: 'Zoom out', on_click: function() {
+                    self.phylovizView.zoomAndPan({ zoom : "-"});
+                } },
+                { icon_class: 'arrow-circle', title: 'Reset Zoom/Pan', on_click: function() {
+                    self.phylovizView.zoomAndPan({ zoom : "reset"});
+                } }
+            ],
+                {
+                    tooltip_config: { placement: 'bottom' }
+                });
+        $("#phyloVizNavBtns").append(navMenu.$el);
+    }
+});
+
+
+var SettingsMenu = UserMenuBase.extend({
+
+    className: 'Settings',
+
+    initialize: function(options){
+        // settings needs to directly interact with the phyloviz model so it will get access to it.
+        var self = this;
+        self.phyloTree = options.phyloTree;
+        self.el = $("#SettingsMenu");
+        self.inputs = {
+            separation : $("#phyloVizTreeSeparation"),
+            leafHeight : $("#phyloVizTreeLeafHeight"),
+            fontSize   : $("#phyloVizTreeFontSize")
+        };
+
+        //init all buttons of settings
+        $("#settingsCloseBtn").off().on("click", function() { self.el.hide(); });
+        $("#phylovizResetSettingsBtn").off().on("click", function() { self.resetToDefaults(); });
+        $("#phylovizApplySettingsBtn").off().on("click", function() { self.apply(); });
+    },
+
+    /**
+     * Applying user values to phylotree model.
+     */
+    apply : function(){
+        var self = this;
+        if (!self.isAcceptableValue(self.inputs.separation, 50, 2500) ||
+            !self.isAcceptableValue(self.inputs.leafHeight, 5, 30) ||
+            !self.isAcceptableValue(self.inputs.fontSize, 5, 20)){
+            return;
+        }
+        $.each(self.inputs, function(key, $input){
+            self.phyloTree.set(key, $input.val());
+        });
+    },
+    /**
+     * Called to update the values input to that stored in the model
+     */
+    updateUI : function(){
+        var self = this;
+        $.each(self.inputs, function(key, $input){
+            $input.val(self.phyloTree.get(key));
+        });
+    },
+    /**
+     * Resets the value of the phyloTree model to its default
+     */
+    resetToDefaults : function(){
+        $(".tooltip").remove();      // just in case the tool tip was not removed
+        var self = this;
+        $.each(self.phyloTree.defaults, function(key, value) {
+            self.phyloTree.set(key, value);
+        });
+        self.updateUI();
+    },
+
+    render: function(){
+
+    }
+
+});
+
+
+/**
+ * View for inspecting node properties and editing them
+ */
+var NodeSelectionView = UserMenuBase.extend({
+
+    className: 'Settings',
+
+    initialize : function (options){
+        var self = this;
+        self.el = $("#nodeSelectionView");
+        self.phyloTree = options.phyloTree;
+
+        self.UI = {
+            enableEdit      : $('#phylovizEditNodesCheck'),
+            saveChanges     : $('#phylovizNodeSaveChanges'),
+            cancelChanges   : $("#phylovizNodeCancelChanges"),
+            name            : $("#phyloVizSelectedNodeName"),
+            dist            : $("#phyloVizSelectedNodeDist"),
+            annotation      : $("#phyloVizSelectedNodeAnnotation")
+        };
+
+        // temporarily stores the values in case user change their mind
+        self.valuesOfConcern = {
+            name : null,
+            dist : null,
+            annotation : null
+        };
+
+        //init UI buttons
+        $("#nodeSelCloseBtn").off().on("click", function() { self.el.hide(); });
+        self.UI.saveChanges.off().on("click", function(){ self.updateNodes(); });
+        self.UI.cancelChanges.off().on("click", function(){ self.cancelChanges(); });
+
+        (function ($) {
+            // extending jquery fxn for enabling and disabling nodes.
+            $.fn.enable = function (isEnabled) {
+                return $(this).each(function () {
+                    if(isEnabled){
+                        $(this).removeAttr('disabled');
+                    } else {
+                        $(this).attr('disabled', 'disabled');
+                    }
+                });
+            };
+        })(jQuery);
+
+        self.UI.enableEdit.off().on("click", function () {
+            self.toggleUI();
+        });
+    },
+
+    /**
+     * For turning on and off the child elements
+     */
+    toggleUI : function(){
+        var self = this,
+            checked = self.UI.enableEdit.is(':checked');
+
+        if (!checked) { self.cancelChanges(); }
+
+        $.each(self.valuesOfConcern, function(key, value) {
+            self.UI[key].enable(checked);
+        });
+        if(checked){
+            self.UI.saveChanges.show();
+            self.UI.cancelChanges.show();
+        } else {
+            self.UI.saveChanges.hide();
+            self.UI.cancelChanges.hide();
+        }
+
+    },
+
+    /**
+     * Reverting to previous values in case user change their minds
+     */
+    cancelChanges : function() {
+        var self = this,
+            node = self.phyloTree.get("selectedNode");
+        if (node){
+            $.each(self.valuesOfConcern, function(key, value) {
+                self.UI[key].val(node[key]);
+            });
+        }
+    },
+
+    /**
+     * Changing the data in the underlying tree with user-specified values
+     */
+    updateNodes : function (){
+        var self = this,
+            node = self.phyloTree.get("selectedNode");
+        if (node){
+            if (!self.isAcceptableValue(self.UI.dist, 0, 1) ||
+                self.hasIllegalJsonCharacters(self.UI.name) ||
+                self.hasIllegalJsonCharacters(self.UI.annotation) ) {
+                return;
+            }
+            $.each(self.valuesOfConcern, function(key, value) {
+                (node[key]) = self.UI[key].val();
+            });
+            self.phyloTree.set("nodeAttrChangedTime", new Date());
+        } else {
+            alert("No node selected");
+        }
+    }
+});
+
+
+
+/**
+ * Initializes the search panel on phyloviz and handles its user interaction
+ * It allows user to search the entire free based on some qualifer, like dist <= val.
+ */
+var PhyloVizSearch = UserMenuBase.extend({
+    initialize : function () {
+        var self = this;
+
+        $("#phyloVizSearchBtn").on("click", function(){
+            var searchTerm = $("#phyloVizSearchTerm"),
+                searchConditionVal = $("#phyloVizSearchCondition").val().split("-"),
+                attr = searchConditionVal[0],
+                condition = searchConditionVal[1];
+            self.hasIllegalJsonCharacters(searchTerm);
+
+            if (attr === "dist"){
+                self.isAcceptableValue(searchTerm, 0, 1);
+            }
+            self.searchTree(attr, condition, searchTerm.val());
+        });
+    },
+
+    /**
+     * Searches the entire tree and will highlight the nodes that match the condition in green
+     */
+    searchTree : function (attr, condition, val){
+        d3.selectAll("g.node")
+            .classed("searchHighlight", function(d){
+                var attrVal =  d[attr];
+                if (typeof attrVal !== "undefined" && attrVal !== null){
+                    if (attr === "dist"){
+                        switch (condition) {
+                            case "greaterEqual":
+                                return attrVal >= +val;
+                            case "lesserEqual":
+                                return attrVal <= +val;
+                            default:
+                                return;
+                        }
+
+                    } else if (attr === "name" || attr === "annotation") {
+                        return attrVal.toLowerCase().indexOf(val.toLowerCase()) !== -1;
+                    }
+                }
+            });
+    }
+});
+
+return {
+    PhylovizView: PhylovizView
+};
+
+});
diff --git a/client/galaxy/scripts/viz/sweepster.js b/client/galaxy/scripts/viz/sweepster.js
new file mode 100644
index 0000000..a01adaa
--- /dev/null
+++ b/client/galaxy/scripts/viz/sweepster.js
@@ -0,0 +1,952 @@
+/**
+ * Visualization and components for Sweepster, a visualization for exploring a tool's parameter space via
+ * genomic visualization.
+ */
+
+define([
+    "libs/underscore",
+    "libs/d3",
+    "viz/trackster/util",
+    "viz/visualization",
+    "viz/trackster/tracks",
+    "mvc/tool/tools",
+    "mvc/dataset/data",
+    "utils/config",
+    "mvc/ui/icon-button"
+
+], function(_, d3, util, visualization, tracks, tools, data, config, mod_icon_btn) {
+
+/**
+ * A collection of tool input settings. Object is useful for keeping a list of settings
+ * for future use without changing the input's value and for preserving inputs order.
+ */
+var ToolInputsSettings = Backbone.Model.extend({
+    defaults: {
+        inputs: null,
+        values: null
+    }
+});
+
+/**
+ * Tree for a tool's parameters.
+ */
+var ToolParameterTree = Backbone.Model.extend({
+    defaults: {
+        tool: null,
+        tree_data: null
+    },
+
+    initialize: function(options) {
+        // Set up tool parameters to work with tree.
+        var self = this;
+        this.get('tool').get('inputs').each(function(input) {
+            // Listen for changes to input's attributes.
+            input.on('change:min change:max change:num_samples', function(input) {
+                if (input.get('in_ptree')) {
+                    self.set_tree_data();
+                }
+            }, self);
+            input.on('change:in_ptree', function(input) {
+                if (input.get('in_ptree')) {
+                    self.add_param(input);
+                }
+                else {
+                    self.remove_param(input);
+                }
+                self.set_tree_data();
+            }, self);
+        });
+
+        // If there is a config, use it.
+        if (options.config) {
+            _.each(options.config, function(input_config) {
+                var input = self.get('tool').get('inputs').find(function(input) {
+                    return input.get('name') === input_config.name;
+                });
+                self.add_param(input);
+                input.set(input_config);
+            });
+        }
+    },
+
+    add_param: function(param) {
+        // If parameter already present, do not add it.
+        if (param.get('ptree_index')) { return; }
+
+        param.set('in_ptree', true);
+        param.set('ptree_index', this.get_tree_params().length);
+    },
+
+    remove_param: function(param) {
+        // Remove param from tree.
+        param.set('in_ptree', false);
+        param.set('ptree_index', null);
+
+        // Update ptree indices for remaining params.
+        _(this.get_tree_params()).each(function(input, index) {
+            // +1 to use 1-based indexing.
+            input.set('ptree_index', index + 1);
+        });
+    },
+
+    /**
+     * Sets tree data using tool's inputs.
+     */
+    set_tree_data: function() {
+        // Get samples for each parameter.
+        var params_samples = _.map(this.get_tree_params(), function(param) {
+                return {
+                    param: param,
+                    samples: param.get_samples()
+                };
+            });
+        var node_id = 0,
+            // Creates tree data recursively.
+            create_tree_data = function(params_samples, index) {
+                var param_samples = params_samples[index],
+                    param = param_samples.param,
+                    param_label = param.get('label'),
+                    settings = param_samples.samples;
+
+                // Create leaves when last parameter setting is reached.
+                if (params_samples.length - 1 === index) {
+                    return _.map(settings, function(setting) {
+                        return {
+                            id: node_id++,
+                            name: setting,
+                            param: param,
+                            value: setting
+                        };
+                    });
+                }
+
+                // Recurse to handle other parameters.
+                return _.map(settings, function(setting) {
+                    return {
+                        id: node_id++,
+                        name: setting,
+                        param: param,
+                        value: setting,
+                        children: create_tree_data(params_samples, index + 1)
+                    };
+                });
+            };
+
+        this.set('tree_data', {
+            name: 'Root',
+            id: node_id++,
+            children: (params_samples.length !== 0 ? create_tree_data(params_samples, 0) : null)
+        });
+    },
+
+    get_tree_params: function() {
+        // Filter and sort parameters to get list in tree.
+        return _(this.get('tool').get('inputs').where( {in_ptree: true} ))
+                 .sortBy( function(input) { return input.get('ptree_index'); } );
+    },
+
+    /**
+     * Returns number of leaves in tree.
+     */
+    get_num_leaves: function() {
+        return this.get_tree_params().reduce(function(memo, param) { return memo * param.get_samples().length; }, 1);
+    },
+
+    /**
+     * Returns array of ToolInputsSettings objects based on a node and its subtree.
+     */
+    get_node_settings: function(target_node) {
+        // -- Get fixed settings from tool and parent nodes.
+
+        // Start with tool's settings.
+        var fixed_settings = this.get('tool').get_inputs_dict();
+
+        // Get fixed settings using node's parents.
+        var cur_node = target_node.parent;
+        if (cur_node) {
+            while(cur_node.depth !== 0) {
+                fixed_settings[cur_node.param.get('name')] = cur_node.value;
+                cur_node = cur_node.parent;
+            }
+        }
+
+        // Walk subtree starting at clicked node to get full list of settings.
+        var self = this,
+            get_settings = function(node, settings) {
+                // Add setting for this node. Root node does not have a param,
+                // however.
+                if (node.param) {
+                    settings[node.param.get('name')] = node.value;
+                }
+
+                if (!node.children) {
+                    // At leaf node, so return settings.
+                    return new ToolInputsSettings({
+                        inputs: self.get('tool').get('inputs'),
+                        values: settings
+                    });
+                }
+                else {
+                    // At interior node: return list of subtree settings.
+                    return _.flatten( _.map(node.children, function(c) { return get_settings(c, _.clone(settings)); }) );
+                }
+            },
+            all_settings = get_settings(target_node, fixed_settings);
+
+        // If user clicked on leaf, settings is a single dict. Convert to array for simplicity.
+        if (!_.isArray(all_settings)) { all_settings = [ all_settings ]; }
+
+        return all_settings;
+    },
+
+    /**
+     * Returns all nodes connected a particular node; this includes parents and children of the node.
+     */
+    get_connected_nodes: function(node) {
+        var get_subtree_nodes = function(a_node) {
+            if (!a_node.children) {
+                return a_node;
+            }
+            else {
+                // At interior node: return subtree nodes.
+                return _.flatten( [a_node, _.map(a_node.children, function(c) { return get_subtree_nodes(c); })] );
+            }
+        };
+
+        // Get node's parents.
+        var parents = [],
+            cur_parent = node.parent;
+        while(cur_parent) {
+            parents.push(cur_parent);
+            cur_parent = cur_parent.parent;
+        }
+
+        return _.flatten([parents, get_subtree_nodes(node)]);
+    },
+
+    /**
+     * Returns the leaf that corresponds to a settings collection.
+     */
+    get_leaf: function(settings) {
+        var cur_node = this.get('tree_data'),
+            find_child = function(children) {
+                return _.find(children, function(child) {
+                    return settings[child.param.get('name')] === child.value;
+                });
+            };
+
+        while (cur_node.children) {
+            cur_node = find_child(cur_node.children);
+        }
+        return cur_node;
+    },
+
+    /**
+     * Returns a list of parameters used in tree.
+     */
+    toJSON: function() {
+        // FIXME: returning and jsonifying complete param causes trouble on the server side,
+        // so just use essential attributes for now.
+        return this.get_tree_params().map(function(param) {
+            return {
+                name: param.get('name'),
+                min: param.get('min'),
+                max: param.get('max'),
+                num_samples: param.get('num_samples')
+            };
+        });
+    }
+});
+
+var SweepsterTrack = Backbone.Model.extend({
+    defaults: {
+        track: null,
+        mode: 'Pack',
+        settings: null,
+        regions: null
+    },
+
+    initialize: function(options) {
+        this.set('regions', options.regions);
+        if (options.track) {
+            // FIXME: find a better way to deal with needed URLs:
+            var track_config = _.extend({
+                                    data_url: Galaxy.root + 'dummy1',
+                                    converted_datasets_state_url: Galaxy.root + 'dummy2'
+                                }, options.track);
+            this.set('track', tracks.object_from_template(track_config, {}, null));
+        }
+    },
+
+    same_settings: function(a_track) {
+        var this_settings = this.get('settings'),
+            other_settings = a_track.get('settings');
+        for (var prop in this_settings) {
+            if (!other_settings[prop] ||
+                this_settings[prop] !== other_settings[prop]) {
+                return false;
+            }
+        }
+        return true;
+    },
+
+    toJSON: function() {
+        return {
+            track: this.get('track').to_dict(),
+            settings: this.get('settings'),
+            regions: this.get('regions')
+        };
+    }
+});
+
+var TrackCollection = Backbone.Collection.extend({
+    model: SweepsterTrack
+});
+
+/**
+ * Sweepster visualization model.
+ */
+var SweepsterVisualization = visualization.Visualization.extend({
+    defaults: _.extend({}, visualization.Visualization.prototype.defaults, {
+        dataset: null,
+        tool: null,
+        parameter_tree: null,
+        regions: null,
+        tracks: null,
+        default_mode: 'Pack'
+    }),
+
+    initialize: function(options) {
+        this.set('dataset', new data.Dataset(options.dataset));
+        this.set('tool', new tools.Tool(options.tool));
+        this.set('regions', new visualization.GenomeRegionCollection(options.regions));
+        this.set('tracks', new TrackCollection(options.tracks));
+
+        var tool_with_samplable_inputs = this.get('tool');
+        this.set('tool_with_samplable_inputs', tool_with_samplable_inputs);
+        // Remove complex parameters for now.
+        tool_with_samplable_inputs.remove_inputs( [ 'data', 'hidden_data', 'conditional', 'text' ] );
+
+        this.set('parameter_tree', new ToolParameterTree({
+            tool: tool_with_samplable_inputs,
+            config: options.tree_config
+        }));
+    },
+
+    add_track: function(track) {
+        this.get('tracks').add(track);
+    },
+
+    toJSON: function() {
+        return {
+            id: this.get('id'),
+            title: 'Parameter exploration for dataset \''  + this.get('dataset').get('name') + '\'',
+            type: 'sweepster',
+            dataset_id: this.get('dataset').id,
+            tool_id: this.get('tool').id,
+            regions: this.get('regions').toJSON(),
+            tree_config: this.get('parameter_tree').toJSON(),
+            tracks: this.get('tracks').toJSON()
+        };
+    }
+});
+
+/**
+ * --- Views ---
+ */
+
+/**
+ * Sweepster track view.
+ */
+var SweepsterTrackView = Backbone.View.extend({
+    tagName: 'tr',
+
+    TILE_LEN: 250,
+
+    initialize: function(options) {
+        this.canvas_manager = options.canvas_manager;
+        this.render();
+        this.model.on('change:track change:mode', this.draw_tiles, this);
+    },
+
+    render: function() {
+        // Render settings icon and popup.
+        // TODO: use template.
+        var settings = this.model.get('settings'),
+            values = settings.get('values'),
+            settings_td = $('<td/>').addClass('settings').appendTo(this.$el),
+            settings_div = $('<div/>').addClass('track-info').hide().appendTo(settings_td);
+        settings_div.append( $('<div/>').css('font-weight', 'bold').text('Track Settings') );
+        settings.get('inputs').each(function(input) {
+            settings_div.append( input.get('label') + ': ' + values[input.get('name')] + '<br/>');
+        });
+        var self = this,
+            run_on_dataset_button = $('<button/>').appendTo(settings_div).text('Run on complete dataset').click(function() {
+                settings_div.toggle();
+                self.trigger('run_on_dataset', settings);
+            });
+        var icon_menu = mod_icon_btn.create_icon_buttons_menu([
+            {
+                title: 'Settings',
+                icon_class: 'gear track-settings',
+                on_click: function() {
+                    settings_div.toggle();
+                }
+            },
+            {
+                title: 'Remove',
+                icon_class: 'cross-circle',
+                on_click: function() {
+                    self.$el.remove();
+                    $('.tooltip').remove();
+                    // TODO: remove track from viz collection.
+                }
+            }
+        ]);
+        settings_td.prepend(icon_menu.$el);
+
+        // Render tile placeholders.
+        this.model.get('regions').each(function() {
+            self.$el.append($('<td/>').addClass('tile').html(
+                $('<img/>').attr('src', Galaxy.root + 'images/loading_large_white_bg.gif')
+            ));
+        });
+
+        if (this.model.get('track')) {
+            this.draw_tiles();
+        }
+    },
+
+    /**
+     * Draw tiles for regions.
+     */
+    draw_tiles: function() {
+        var self = this,
+            track = this.model.get('track'),
+            regions = this.model.get('regions'),
+            tile_containers = this.$el.find('td.tile');
+
+        // Do nothing if track is not defined.
+        if (!track) { return; }
+
+        // When data is ready, draw tiles.
+        $.when(track.data_manager.data_is_ready()).then(function(data_ok) {
+            // Draw tile for each region.
+            regions.each(function(region, index) {
+                var resolution = region.length() / self.TILE_LEN,
+                    w_scale = 1/resolution,
+                    mode = self.model.get('mode');
+                $.when(track.data_manager.get_data(region, mode, resolution, {})).then(function(tile_data) {
+                    var canvas = self.canvas_manager.new_canvas();
+                    canvas.width = self.TILE_LEN;
+                    canvas.height = track.get_canvas_height(tile_data, mode, w_scale, canvas.width);
+                    track.draw_tile(tile_data, canvas.getContext('2d'), mode, region, w_scale);
+                    $(tile_containers[index]).empty().append(canvas);
+                });
+            });
+        });
+    }
+});
+
+/**
+ * Tool input (parameter) that enables both value and sweeping inputs. View is unusual as
+ * it augments an existing input form row rather than creates a completely new HTML element.
+ */
+var ToolInputValOrSweepView = Backbone.View.extend({
+
+    // Template for rendering sweep inputs:
+    number_input_template: '<div class="form-row-input sweep">' +
+                           '<input class="min" type="text" size="6" value="<%= min %>"> - ' +
+                           '<input class="max" type="text" size="6" value="<%= max %>">' +
+                           ' samples: <input class="num_samples" type="text" size="1" value="<%= num_samples %>">' +
+                           '</div>',
+
+    select_input_template: '<div class="form-row-input sweep"><%= options %></div>',
+
+    initialize: function(options) {
+        this.$el = options.tool_row;
+        this.render();
+    },
+
+    render: function() {
+        var input = this.model,
+            type = input.get('type'),
+            single_input_row = this.$el.find('.form-row-input'),
+            sweep_inputs_row = null;
+
+        // Update tool inputs as single input changes.
+        single_input_row.find(':input').change(function() {
+            input.set('value', $(this).val());
+        });
+
+        // Add row for parameter sweep inputs.
+        if (input instanceof tools.IntegerToolParameter) {
+            sweep_inputs_row = $( _.template(this.number_input_template)(this.model.toJSON()) );
+        }
+        else if (input instanceof tools.SelectToolParameter) {
+            var options = _.map(this.$el.find('select option'), function(option) {
+                    return $(option).val();
+                }),
+                options_text = options.join(', ');
+            sweep_inputs_row = $( _.template(this.select_input_template)({
+                options: options_text
+            }) );
+        }
+        sweep_inputs_row.insertAfter(single_input_row);
+
+        // Add buttons for adding/removing parameter.
+        var self = this,
+            menu = mod_icon_btn.create_icon_buttons_menu([
+            {
+                title: 'Add parameter to tree',
+                icon_class: 'plus-button',
+                on_click: function () {
+                    input.set('in_ptree', true);
+                    single_input_row.hide();
+                    sweep_inputs_row.show();
+                    $(this).hide();
+                    self.$el.find('.icon-button.toggle').show();
+                }
+
+            },
+            {
+                title: 'Remove parameter from tree',
+                icon_class: 'toggle',
+                on_click: function() {
+                    // Remove parameter from tree params where name matches clicked paramter.
+                    input.set('in_ptree', false);
+                    sweep_inputs_row.hide();
+                    single_input_row.show();
+                    $(this).hide();
+                    self.$el.find('.icon-button.plus-button').show();
+                }
+            }
+            ],
+            {
+
+            });
+            this.$el.prepend(menu.$el);
+
+        // Show/hide input rows and icons depending on whether parameter is in the tree.
+        if (input.get('in_ptree')) {
+            single_input_row.hide();
+            self.$el.find('.icon-button.plus-button').hide();
+        }
+        else {
+            self.$el.find('.icon-button.toggle').hide();
+            sweep_inputs_row.hide();
+        }
+
+        // Update input's min, max, number of samples as values change.
+        _.each(['min', 'max', 'num_samples'], function(attr) {
+            sweep_inputs_row.find('.' + attr).change(function() {
+                input.set(attr, parseFloat( $(this).val() ));
+            });
+        });
+    }
+});
+
+var ToolParameterTreeDesignView = Backbone.View.extend({
+    className: 'tree-design',
+
+    initialize: function(options) {
+        this.render();
+    },
+
+    render: function() {
+        // Start with tool form view.
+        var tool_form_view = new tools.ToolFormView({
+            model: this.model.get('tool')
+        });
+        tool_form_view.render();
+        this.$el.append(tool_form_view.$el);
+
+        // Set up views for each tool input.
+        var self = this,
+            inputs = self.model.get('tool').get('inputs');
+        this.$el.find('.form-row').not('.form-actions').each(function(i) {
+            var input_view = new ToolInputValOrSweepView({
+                model: inputs.at(i),
+                tool_row: $(this)
+            });
+        });
+    }
+});
+
+/**
+ * Displays and updates parameter tree.
+ */
+var ToolParameterTreeView = Backbone.View.extend({
+    className: 'tool-parameter-tree',
+
+    initialize: function(options) {
+        // When tree data changes, re-render.
+        this.model.on('change:tree_data', this.render, this);
+    },
+
+    render: function() {
+        // Start fresh.
+        this.$el.children().remove();
+
+        var tree_params = this.model.get_tree_params();
+        if (!tree_params.length) {
+            return;
+        }
+
+        // Set width, height based on params and samples.
+        this.width = 100 * (2 + tree_params.length);
+        this.height = 15 * this.model.get_num_leaves();
+
+        var self = this;
+
+        // Layout tree.
+        var cluster = d3.layout.cluster()
+            .size([this.height, this.width - 160]);
+
+        var diagonal = d3.svg.diagonal()
+            .projection(function(d) { return [d.y, d.x]; });
+
+        // Layout nodes.
+        var nodes = cluster.nodes(this.model.get('tree_data'));
+
+        // Setup and add labels for tree levels.
+        var param_depths = _.uniq(_.pluck(nodes, "y"));
+        _.each(tree_params, function(param, index) {
+            var x = param_depths[index+1],
+                center_left = $('#center').position().left;
+            self.$el.append( $('<div>').addClass('label')
+                                       .text(param.get('label'))
+                                       .css('left', x + center_left) );
+        });
+
+        // Set up vis element.
+        var vis = d3.select(this.$el[0])
+          .append("svg")
+            .attr("width", this.width)
+            .attr("height", this.height + 30)
+          .append("g")
+            .attr("transform", "translate(40, 20)");
+
+        // Draw links.
+        var link = vis.selectAll("path.link")
+          .data(cluster.links(nodes))
+        .enter().append("path")
+          .attr("class", "link")
+          .attr("d", diagonal);
+
+        // Draw nodes.
+        var node = vis.selectAll("g.node")
+          .data(nodes)
+        .enter().append("g")
+          .attr("class", "node")
+          .attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; })
+          .on('mouseover', function(a_node) {
+            var connected_node_ids = _.pluck(self.model.get_connected_nodes(a_node), 'id');
+            // TODO: probably can use enter() to do this more easily.
+            node.filter(function(d) {
+                return _.find(connected_node_ids, function(id) { return id === d.id; }) !== undefined;
+            }).style('fill', '#f00');
+          })
+          .on('mouseout', function() {
+            node.style('fill', '#000');
+          });
+
+        node.append("circle")
+          .attr("r", 9);
+
+        node.append("text")
+          .attr("dx", function(d) { return d.children ? -12 : 12; })
+          .attr("dy", 3)
+          .attr("text-anchor", function(d) { return d.children ? "end" : "start"; })
+          .text(function(d) { return d.name; });
+    }
+});
+
+/**
+ * Sweepster visualization view. View requires rendering in 3-panel setup for now.
+ */
+var SweepsterVisualizationView = Backbone.View.extend({
+    className: 'Sweepster',
+
+    helpText:
+        '<div><h4>Getting Started</h4>' +
+        '<ol><li>Create a parameter tree by using the icons next to the tool\'s parameter names to add or remove parameters.' +
+        '<li>Adjust the tree by using parameter inputs to select min, max, and number of samples' +
+        '<li>Run the tool with different settings by clicking on tree nodes' +
+        '</ol></div>',
+
+    initialize: function(options) {
+        this.canvas_manager = new visualization.CanvasManager(this.$el.parents('body'));
+        this.tool_param_tree_view = new ToolParameterTreeView({ model: this.model.get('parameter_tree') });
+        this.track_collection_container = $('<table/>').addClass('tracks');
+
+        // Handle node clicks for tree data.
+        this.model.get('parameter_tree').on('change:tree_data', this.handle_node_clicks, this);
+
+        // Each track must have a view so it has a canvas manager.
+        var self = this;
+        this.model.get('tracks').each(function(track) {
+            track.get('track').view = self;
+        });
+
+        // Set block, reverse strand block colors; these colors will be used for all tracks.
+        this.config = config.ConfigSettingCollection.from_models_and_saved_values(
+            [
+                { key: 'name', label: 'Name', type: 'text', default_value: '' },
+                { key: 'a_color', label: 'A Color', type: 'color', default_value: "#FF0000" },
+                { key: 'c_color', label: 'C Color', type: 'color', default_value: "#00FF00" },
+                { key: 'g_color', label: 'G Color', type: 'color', default_value: "#0000FF" },
+                { key: 't_color', label: 'T Color', type: 'color', default_value: "#FF00FF" },
+                { key: 'n_color', label: 'N Color', type: 'color', default_value: "#AAAAAA" },
+                { key: 'block_color', label: 'Block color', type: 'color' },
+                { key: 'reverse_strand_color', label: 'Antisense strand color', type: 'color' },
+            ], {}
+        );
+    },
+
+    render: function() {
+        // Render tree design view in left panel.
+        var tree_design_view = new ToolParameterTreeDesignView({
+            model: this.model.get('parameter_tree')
+        });
+
+        $('#left').append(tree_design_view.$el);
+
+        // Render track collection container/view in right panel.
+        var self = this,
+            regions = self.model.get('regions'),
+            tr = $('<tr/>').appendTo(this.track_collection_container);
+
+        regions.each(function(region) {
+            tr.append( $('<th>').text(region.toString()) );
+        });
+        tr.children().first().attr('colspan', 2);
+
+        var tracks_div = $('<div>').addClass('tiles');
+        $('#right').append( tracks_div.append(this.track_collection_container) );
+
+        self.model.get('tracks').each(function(track) {
+            self.add_track(track);
+        });
+
+        // -- Render help and tool parameter tree in center panel. --
+
+        // Help includes text and a close button.
+        var help_div = $(this.helpText).addClass('help'),
+            close_button = mod_icon_btn.create_icon_buttons_menu([
+            {
+                title: 'Close',
+                icon_class: 'cross-circle',
+                on_click: function() {
+                    $('.tooltip').remove();
+                    help_div.remove();
+                }
+            }
+            ]);
+
+        help_div.prepend(close_button.$el.css('float', 'right'));
+        $('#center').append(help_div);
+
+        // Parameter tree:
+        this.tool_param_tree_view.render();
+        $('#center').append(this.tool_param_tree_view.$el);
+
+        // Set up handler for tree node clicks.
+        this.handle_node_clicks();
+
+        // Set up visualization menu.
+        var menu = mod_icon_btn.create_icon_buttons_menu(
+            [
+                // Save.
+                /*
+                { icon_class: 'disk--arrow', title: 'Save', on_click: function() {
+                    // Show saving dialog box
+                    show_modal("Saving...", "progress");
+
+                    viz.save().success(function(vis_info) {
+                        hide_modal();
+                        viz.set({
+                            'id': vis_info.vis_id,
+                            'has_changes': false
+                        });
+                    })
+                    .error(function() {
+                        show_modal( "Could Not Save", "Could not save visualization. Please try again later.",
+                                    { "Close" : hide_modal } );
+                    });
+                } },
+                */
+                // Change track modes.
+                {
+                    icon_class: 'chevron-expand',
+                    title: 'Set display mode'
+                },
+                // Close viz.
+                {
+                    icon_class: 'cross-circle',
+                    title: 'Close',
+                    on_click: function() {
+                        window.location = "${h.url_for( controller='visualization', action='list' )}";
+                    }
+                }
+            ],
+            {
+                tooltip_config: {placement: 'bottom'}
+            });
+
+            // Create mode selection popup. Mode selection changes default mode and mode for all tracks.
+            var modes = ['Squish', 'Pack'],
+                mode_mapping = {};
+            _.each(modes, function(mode) {
+                mode_mapping[mode] = function() {
+                    self.model.set('default_mode', mode);
+                    self.model.get('tracks').each(function(track) {
+                        track.set('mode', mode);
+                    });
+                };
+            });
+
+            make_popupmenu(menu.$el.find('.chevron-expand'), mode_mapping);
+
+        menu.$el.attr("style", "float: right");
+        $("#right .unified-panel-header-inner").append(menu.$el);
+    },
+
+    get_base_color: function(base) {
+        return this.config.get_value(base.toLowerCase() + '_color') ||
+               this.config.get_value('n_color');
+    },
+
+    run_tool_on_dataset: function(settings) {
+        var tool = this.model.get('tool'),
+            tool_name = tool.get('name'),
+            dataset = this.model.get('dataset');
+        tool.set_input_values(settings.get('values'));
+        $.when(tool.rerun(dataset)).then(function(outputs) {
+            // TODO.
+        });
+
+        show_modal('Running ' + tool_name + ' on complete dataset',
+                       tool_name + ' is running on dataset \'' +
+                       dataset.get('name') + '\'. Outputs are in the dataset\'s history.',
+                       {
+                        'Ok': function() { hide_modal(); }
+                       });
+    },
+
+    /**
+     * Add track to model and view.
+     */
+    add_track: function(pm_track) {
+        var self = this,
+            param_tree = this.model.get('parameter_tree');
+
+        // Add track to model.
+        self.model.add_track(pm_track);
+
+        var track_view = new SweepsterTrackView({
+            model: pm_track,
+            canvas_manager: self.canvas_manager
+        });
+        track_view.on('run_on_dataset', self.run_tool_on_dataset, self);
+        self.track_collection_container.append(track_view.$el);
+        track_view.$el.hover(function() {
+            var settings_leaf = param_tree.get_leaf(pm_track.get('settings').get('values'));
+            var connected_node_ids = _.pluck(param_tree.get_connected_nodes(settings_leaf), 'id');
+
+            // TODO: can do faster with enter?
+            d3.select(self.tool_param_tree_view.$el[0]).selectAll("g.node")
+            .filter(function(d) {
+                return _.find(connected_node_ids, function(id) { return id === d.id; }) !== undefined;
+            }).style('fill', '#f00');
+        },
+        function() {
+            d3.select(self.tool_param_tree_view.$el[0]).selectAll("g.node").style('fill', '#000');
+        });
+        return pm_track;
+    },
+
+    /**
+     * Sets up handling when tree nodes are clicked. When a node is clicked, the tool is run for each of
+     * the settings defined by the node's subtree and tracks are added for each run.
+     */
+    handle_node_clicks: function() {
+        // When node clicked in tree, run tool and add tracks to model.
+        var self = this,
+            param_tree = this.model.get('parameter_tree'),
+            regions = this.model.get('regions'),
+            node = d3.select(this.tool_param_tree_view.$el[0]).selectAll("g.node");
+        node.on("click", function(d, i) {
+            // Get all settings corresponding to node.
+            var tool = self.model.get('tool'),
+                dataset = self.model.get('dataset'),
+                all_settings = param_tree.get_node_settings(d),
+                run_jobs_deferred = $.Deferred();
+
+            // Do not allow 10+ jobs to be run.
+            if (all_settings.length >= 10) {
+                show_modal("Whoa there cowboy!",
+                            "You clicked on a node to try " + self.model.get('tool').get('name') +
+                            " with " + all_settings.length +
+                            " different combinations of settings. You can only run 10 jobs at a time.",
+                            {
+                                "Ok": function() { hide_modal(); run_jobs_deferred.resolve(false); }
+                            });
+            }
+            else {
+                run_jobs_deferred.resolve(true);
+            }
+
+            // Take action when deferred resolves.
+            $.when(run_jobs_deferred).then(function(run_jobs) {
+                if (!run_jobs) { return; }
+
+                // Create and add tracks for each settings group.
+                var new_tracks = _.map(all_settings, function(settings) {
+                    var pm_track = new SweepsterTrack({
+                        settings: settings,
+                        regions: regions,
+                        mode: self.model.get('default_mode')
+                    });
+                    self.add_track(pm_track);
+                    return pm_track;
+                });
+
+                // For each track, run tool using track's settings and update track.
+                _.each(new_tracks, function(pm_track, index) {
+                    setTimeout(function() {
+                        // Set inputs and run tool.
+                        tool.set_input_values(pm_track.get('settings').get('values'));
+                        $.when(tool.rerun(dataset, regions)).then(function(output) {
+                            // HACKish: output is an HDA with track config attribute. To create a track
+                            // that works correctly with Backbone relational, it is necessary to
+                            // use a modified version of the track config.
+                            var dataset = output.first(),
+                                track_config = dataset.get('track_config');
+                            // Set dataset to be the tool's output.
+                            track_config.dataset = dataset;
+                            // Set tool to null so that it is not unpacked; unpacking it messes with
+                            // the tool parameters and parameter tree.
+                            track_config.tool = null;
+
+                            track_config.prefs = self.config.to_key_value_dict();
+
+                            // Create and add track for output dataset.
+                            var track_obj = tracks.object_from_template(track_config, self, null);
+                            track_obj.init_for_tool_data();
+
+                            pm_track.set('track', track_obj);
+                        });
+                    }, index * 10000);
+                });
+            });
+        });
+    }
+});
+
+return {
+    SweepsterVisualization: SweepsterVisualization,
+    SweepsterVisualizationView: SweepsterVisualizationView
+};
+
+});
diff --git a/client/galaxy/scripts/viz/trackster.js b/client/galaxy/scripts/viz/trackster.js
new file mode 100644
index 0000000..2c65f9a
--- /dev/null
+++ b/client/galaxy/scripts/viz/trackster.js
@@ -0,0 +1,663 @@
+/**
+ * Top-level trackster code, used for creating/loading visualizations and user interface elements.
+ */
+
+// global variables
+var ui              = null;
+var view            = null;
+var browser_router  = null;
+
+// load required libraries
+require(
+[
+    // load js libraries
+    'utils/utils',
+    'libs/jquery/jquery.event.drag',
+    'libs/jquery/jquery.event.hover',
+    'libs/jquery/jquery.mousewheel',
+    'libs/jquery/jquery-ui',
+    'libs/jquery/select2',
+    'libs/farbtastic',
+    'libs/jquery/jquery.form',
+    'libs/jquery/jquery.rating',
+    "ui/editable-text",
+], function(mod_utils, mod_icon_btn)
+{
+    // load css
+    mod_utils.cssLoadFile("static/style/jquery.rating.css");
+    mod_utils.cssLoadFile("static/style/autocomplete_tagging.css");
+    mod_utils.cssLoadFile("static/style/jquery-ui/smoothness/jquery-ui.css");
+    mod_utils.cssLoadFile("static/style/library.css");
+    mod_utils.cssLoadFile("static/style/trackster.css");
+});
+
+// trackster viewer
+define([
+    "libs/underscore",
+    "viz/trackster/tracks",
+    "viz/visualization",
+    "mvc/ui/icon-button",
+    "utils/query-string-parsing"
+], function(_, tracks, visualization, mod_icon_btn, query_string) {
+
+/**
+ * Base Object/Model for inhertiance.
+ */
+var Base = function() {
+    if( this.initialize ) {
+        this.initialize.apply(this, arguments);
+    }
+};
+Base.extend = Backbone.Model.extend;
+
+/**
+ * User interface controls for trackster
+ */
+var TracksterUI = Base.extend({
+    initialize: function( baseURL ) {
+        this.baseURL = baseURL;
+    },
+
+    /**
+     * Save visualization, returning a Deferred object for the remote call to save.
+     */
+    save_viz: function() {
+        // show dialog
+        Galaxy.modal.show({title: "Saving...", body: "progress" });
+
+        // Save bookmarks.
+        var bookmarks = [];
+        $(".bookmark").each(function() {
+            bookmarks.push({
+                position: $(this).children(".position").text(),
+                annotation: $(this).children(".annotation").text()
+            });
+        });
+
+        // FIXME: give unique IDs to Drawables and save overview as ID.
+        var overview_track_name = (view.overview_drawable ? view.overview_drawable.config.get_value('name') : null),
+            viz_config = {
+                'view': view.to_dict(),
+                'viewport': { 'chrom': view.chrom, 'start': view.low , 'end': view.high, 'overview': overview_track_name },
+                'bookmarks': bookmarks
+            };
+
+        // Make call to save visualization.
+        return $.ajax({
+            url: Galaxy.root + "visualization/save",
+            type: "POST",
+            dataType: "json",
+            data: {
+                'id'        : view.vis_id,
+                'title'     : view.config.get_value('name'),
+                'dbkey'     : view.dbkey,
+                'type'      : 'trackster',
+                'vis_json'  : JSON.stringify(viz_config)
+            }
+        }).success(function(vis_info) {
+            Galaxy.modal.hide();
+            view.vis_id = vis_info.vis_id;
+            view.has_changes = false;
+
+            // Needed to set URL when first saving a visualization.
+            window.history.pushState({}, "", vis_info.url + window.location.hash);
+        }).error(function() {
+            // show dialog
+            Galaxy.modal.show({
+                title   : "Could Not Save",
+                body    : "Could not save visualization. Please try again later.",
+                buttons : { "Cancel": function() { Galaxy.modal.hide(); } }
+            });
+        });
+    },
+
+    /**
+     * Create button menu
+     */
+    createButtonMenu: function() {
+        var self = this,
+            menu = mod_icon_btn.create_icon_buttons_menu([
+            { icon_class: 'plus-button', title: 'Add tracks', on_click: function() {
+                visualization.select_datasets(Galaxy.root + "visualization/list_current_history_datasets", Galaxy.root + "api/datasets", { 'f-dbkey': view.dbkey },
+                function(new_tracks) {
+                    _.each(new_tracks, function(track) {
+                        view.add_drawable( tracks.object_from_template(track, view, view) );
+                    });
+                });
+            } },
+            { icon_class: 'block--plus', title: 'Add group', on_click: function() {
+                view.add_drawable( new tracks.DrawableGroup(view, view, { name: "New Group" }) );
+            } },
+            { icon_class: 'bookmarks', title: 'Bookmarks', on_click: function() {
+                // HACK -- use style to determine if panel is hidden and hide/show accordingly.
+                force_right_panel(($("div#right").css("right") == "0px" ? "hide" : "show"));
+            } },
+            {
+                icon_class: 'globe',
+                title: 'Circster',
+                on_click: function() {
+                    window.location = self.baseURL + 'visualization/circster?id=' + view.vis_id;
+                }
+            },
+            { icon_class: 'disk--arrow', title: 'Save', on_click: function() {
+                self.save_viz();
+            } },
+            {
+                icon_class: 'cross-circle',
+                title: 'Close',
+                on_click: function() {
+                    self.handle_unsaved_changes(view);
+                }
+            }
+        ],
+        {
+            tooltip_config: { placement: 'bottom' }
+        });
+        this.buttonMenu = menu;
+        return menu;
+    },
+
+    /**
+     * Use a popup to select a dataset of create bookmarks from
+     */
+    add_bookmarks: function() {
+        var self = this,
+            baseURL = this.baseURL;
+
+        // show modal while loading history
+        Galaxy.modal.show({title: "Select dataset for new bookmarks", body: "progress" });
+
+        $.ajax({
+            url: this.baseURL + "/visualization/list_histories",
+            data: { "f-dbkey": view.dbkey },
+            error: function() { alert( "Grid failed" ); },
+            success: function(table_html) {
+
+                // show modal to select bookmarks
+                Galaxy.modal.show(
+                {
+                    title   : "Select dataset for new bookmarks",
+                    body    : table_html,
+                    buttons :
+                    {
+                        "Cancel": function()
+                        {
+                            Galaxy.modal.hide();
+                        },
+
+                        "Insert": function()
+                        {
+                            // Just use the first selected
+                            $('input[name=id]:checked,input[name=ldda_ids]:checked').first().each(function()
+                            {
+                                var data, id = $(this).val();
+                                if ($(this).attr("name") === "id")
+                                    data = { hda_id: id };
+                                else
+                                    data = { ldda_id: id};
+
+                                $.ajax({
+                                    url: this.baseURL + "/visualization/bookmarks_from_dataset",
+                                    data: data,
+                                    dataType: "json"
+                                }).then( function(data) {
+                                    for( i = 0; i < data.data.length; i++ ) {
+                                        var row = data.data[i];
+                                        self.add_bookmark( row[0], row[1] );
+                                    }
+                                });
+                            });
+                            Galaxy.modal.hide();
+                        }
+                    }
+                });
+            }
+        });
+    },
+
+    /**
+     * Add bookmark.
+     */
+    add_bookmark: function(position, annotation, editable) {
+        // Create HTML.
+        var bookmarks_container = $("#right .unified-panel-body"),
+            new_bookmark = $("<div/>").addClass("bookmark").appendTo(bookmarks_container);
+
+        var position_div = $("<div/>").addClass("position").appendTo(new_bookmark),
+            position_link = $("<a href=''/>").text(position).appendTo(position_div).click(function() {
+                view.go_to(position);
+                return false;
+            }),
+            annotation_div = $("<div/>").text(annotation).appendTo(new_bookmark);
+
+        // If editable, enable bookmark deletion and annotation editing.
+        if (editable) {
+            var delete_icon_container = $("<div/>").addClass("delete-icon-container").prependTo(new_bookmark).click(function (){
+                    // Remove bookmark.
+                    new_bookmark.slideUp("fast");
+                    new_bookmark.remove();
+                    view.has_changes = true;
+                    return false;
+                }),
+                delete_icon = $("<a href=''/>").addClass("icon-button delete").appendTo(delete_icon_container);
+            annotation_div.make_text_editable({
+                num_rows: 3,
+                use_textarea: true,
+                help_text: "Edit bookmark note"
+            }).addClass("annotation");
+        }
+
+        view.has_changes = true;
+        return new_bookmark;
+    },
+
+    /**
+     * Create a complete Trackster visualization. Returns view.
+     */
+    create_visualization: function(view_config, viewport_config, drawables_config, bookmarks_config, editable) {
+
+        // Create view.
+        var self = this,
+            view = new tracks.TracksterView(_.extend(view_config, {header: false}));
+        view.editor = true;
+        $.when( view.load_chroms_deferred ).then(function(chrom_info) {
+            // Viewport config.
+            if (viewport_config) {
+                var chrom = viewport_config.chrom,
+                    start = viewport_config.start,
+                    end = viewport_config.end,
+                    overview_drawable_name = viewport_config.overview;
+
+                if (chrom && (start !== undefined) && end) {
+                    view.change_chrom(chrom, start, end);
+                }
+                else {
+                    // No valid viewport, so use first chromosome.
+                    view.change_chrom(chrom_info[0].chrom);
+                }
+            }
+            else {
+                // No viewport, so use first chromosome.
+                view.change_chrom(chrom_info[0].chrom);
+            }
+
+            // Add drawables to view.
+            if (drawables_config) {
+                // FIXME: can from_dict() be used to create view and add drawables?
+                var drawable_config,
+                    drawable_type,
+                    drawable;
+                for (var i = 0; i < drawables_config.length; i++) {
+                    view.add_drawable( tracks.object_from_template( drawables_config[i], view, view ) );
+                }
+            }
+
+            // Set overview.
+            var overview_drawable;
+            for (var i = 0; i < view.drawables.length; i++) {
+                if (view.drawables[i].config.get_value('name') === overview_drawable_name) {
+                    view.set_overview(view.drawables[i]);
+                    break;
+                }
+            }
+
+            // Load bookmarks.
+            if (bookmarks_config) {
+                var bookmark;
+                for (var i = 0; i < bookmarks_config.length; i++) {
+                    bookmark = bookmarks_config[i];
+                    self.add_bookmark(bookmark['position'], bookmark['annotation'], editable);
+                }
+            }
+
+            // View has no changes as of yet.
+            view.has_changes = false;
+        });
+
+        // Final initialization.
+        this.set_up_router({view: view});
+
+        return view;
+    },
+
+    /**
+     * Set up location router to use hashes as track browser locations.
+     */
+    set_up_router : function(options)
+    {
+        new visualization.TrackBrowserRouter(options);
+        Backbone.history.start();
+    },
+
+    /**
+     * Set up keyboard navigation for a visualization.
+     */
+    init_keyboard_nav: function(view) {
+        // Keyboard navigation. Scroll ~7% of height when scrolling up/down.
+        $(document).keyup(function(e) {
+            // Do not navigate if arrow keys used in input element.
+            if ($(e.srcElement).is(':input')) {
+                return;
+            }
+
+            // Key codes: left == 37, up == 38, right == 39, down == 40
+            switch(e.which) {
+                case 37:
+                    view.move_fraction(0.25);
+                    break;
+                case 38:
+                    var change = Math.round(view.viewport_container.height()/15.0);
+                    view.viewport_container.scrollTop( view.viewport_container.scrollTop() - 20);
+                    break;
+                case 39:
+                    view.move_fraction(-0.25);
+                    break;
+                case 40:
+                    var change = Math.round(view.viewport_container.height()/15.0);
+                    view.viewport_container.scrollTop( view.viewport_container.scrollTop() + 20);
+                    break;
+            }
+        });
+    },
+
+    /**
+     * Handle unsaved changes in visualization.
+     */
+    handle_unsaved_changes: function(view) {
+        if (view.has_changes) {
+            var self = this;
+            Galaxy.modal.show({
+                title: "Close visualization",
+                body: "There are unsaved changes to your visualization which will be lost if you do not save them.",
+                buttons: {
+                    "Cancel": function() { Galaxy.modal.hide(); },
+                    "Leave without Saving" : function() {
+                        $(window).off('beforeunload');
+                        window.location = Galaxy.root + 'visualization';
+                    },
+                    "Save" : function() {
+                        $.when(self.save_viz()).then(function() {
+                            window.location = Galaxy.root + 'visualization';
+                        });
+                    }
+                }
+            });
+
+        }
+        else {
+            window.location = Galaxy.root + 'visualization';
+        }
+    }
+
+});
+
+var TracksterView = Backbone.View.extend(
+{
+    // initalize trackster
+    initialize : function ()
+    {
+        // load ui
+        ui = new TracksterUI(Galaxy.root);
+
+        // create button menu
+        ui.createButtonMenu();
+
+        // attach the button menu to the panel header and float it left
+        ui.buttonMenu.$el.attr("style", "float: right");
+
+        // add to center panel
+        $("#center .unified-panel-header-inner").append(ui.buttonMenu.$el);
+
+        // configure right panel
+        $("#right .unified-panel-title").append("Bookmarks");
+        $("#right .unified-panel-icons").append("<a id='add-bookmark-button' class='icon-button menu-button plus-button' href='javascript:void(0);' title='Add bookmark'></a>");
+
+        // resize view when showing/hiding right panel (bookmarks for now).
+        $("#right-border").click(function() { view.resize_window(); });
+
+        // hide right panel
+        force_right_panel("hide");
+
+
+        // check if id is available
+        if (galaxy_config.app.id){
+            this.view_existing();
+        } else if( query_string.get( 'dataset_id' ) ){
+            this.choose_existing_or_new();
+        } else {
+            this.view_new();
+        }
+    },
+
+    choose_existing_or_new : function () {
+        var self = this;
+        var dbkey = query_string.get( 'dbkey' ),
+            listTracksParams = {},
+            dataset_params = {
+                dbkey       : dbkey,
+                dataset_id  : query_string.get( 'dataset_id' ),
+                hda_ldda    : query_string.get( 'hda_ldda' )
+            };
+        if (dbkey){
+            listTracksParams[ 'f-dbkey' ] = dbkey;
+        }
+        $.ajax({
+            url: Galaxy.root + 'visualization/list_tracks?' + $.param( listTracksParams ),
+            dataType: "html",
+            error: function() { alert( "Could not add this dataset to browser." ); },
+
+            success: function(table_html) {
+                console.debug( 'success' );
+                function view_in_saved(){
+                    // Show new modal with saved visualizations.
+                    Galaxy.modal.show({
+                        title : "Add Data to Saved Visualization",
+                        body : table_html,
+                        buttons : {
+                            "Cancel": function() {
+                                // go back to the first choice
+                                show_choice();
+                            },
+                            "Add to visualization": function() {
+                                $(parent.document).find('input[name=id]:checked').each(function() {
+                                    var vis_id = $(this).val();
+                                    dataset_params.id = vis_id;
+                                    window.location = Galaxy.root + "visualization/trackster?" + $.param(dataset_params);
+                                });
+                            }
+                        }
+                    });
+                }
+                function show_choice(){
+                    Galaxy.modal.show({
+                        title : "View Data in a New or Saved Visualization?",
+                        // either have text in here or have to remove body and the header/footer margins
+                        body  : (
+                            // TODO: yay. inline styles. thanks, galaxy
+                            '<p><ul style="list-style: disc inside none">You can add this dataset as:' +
+                                "<li>a new track to one of your existing, saved Trackster " +
+                                "sessions if they share the genome build: <b>" + dbkey + "</b></li>" +
+                                "<li>or create a new session with this dataset as the only track</li>" +
+                            "</ul></p>"
+                        ),
+                        buttons : {
+                            "Cancel": function() {
+                                window.location = Galaxy.root + "visualization/list";
+                            },
+                            "View in saved visualization": function() {
+                                view_in_saved();
+                            },
+                            "View in new visualization": function() {
+                                self.view_new();
+                            }
+                        }
+                    });
+                }
+                show_choice();
+            }
+        });
+    },
+
+    // view
+    view_existing : function ()
+    {
+        // get config
+        var viz_config = galaxy_config.app.viz_config;
+
+        // view
+        view = ui.create_visualization(
+        {
+            container: $("#center .unified-panel-body"),
+            name: viz_config.title,
+            vis_id: viz_config.vis_id,
+            dbkey: viz_config.dbkey
+        }, viz_config.viewport, viz_config.tracks, viz_config.bookmarks, true);
+
+        // initialize editor
+        this.init_editor();
+    },
+
+    // view
+    view_new : function ()
+    {
+        // availability of default database key
+        /*if (galaxy_config.app.default_dbkey !== undefined)
+        {
+            this.create_browser("Unnamed", galaxy_config.app.default_dbkey);
+            return;
+        }*/
+
+        // reference this
+        var self = this;
+
+        // ajax
+        $.ajax(
+        {
+            url: Galaxy.root + "api/genomes?chrom_info=True",
+            data: {},
+            error: function() { alert( "Couldn't create new browser." ); },
+            success: function(response)
+            {
+                // show dialog
+                Galaxy.modal.show({
+                    title   : "New Visualization",
+                    body    : self.template_view_new(response),
+                    buttons : {
+                        "Cancel": function() { window.location = Galaxy.root + "visualization/list"; },
+                        "Create": function() { self.create_browser($("#new-title").val(), $("#new-dbkey").val()); Galaxy.modal.hide(); }
+                    }
+                });
+
+                // select default
+                var dbkeys_in_genomes = response.map( function( r ){ return r[1]; });
+                if (galaxy_config.app.default_dbkey && _.contains(dbkeys_in_genomes,galaxy_config.app.default_dbkey)) {
+                    $("#new-dbkey").val( galaxy_config.app.default_dbkey );
+                }
+
+                // change focus
+                $("#new-title").focus();
+                $("select[name='dbkey']").select2();
+
+                // to support the large number of options for dbkey, enable scrolling in overlay.
+                $("#overlay").css("overflow", "auto");
+            }
+        });
+    },
+
+    // new browser form
+    template_view_new: function(response)
+    {
+        // start template
+        var html =  '<form id="new-browser-form" action="javascript:void(0);" method="post" onsubmit="return false;">' +
+                        '<div class="form-row">' +
+                            '<label for="new-title">Browser name:</label>' +
+                            '<div class="form-row-input">' +
+                                '<input type="text" name="title" id="new-title" value="Unnamed"></input>' +
+                            '</div>' +
+                            '<div style="clear: both;"></div>' +
+                        '</div>' +
+                        '<div class="form-row">' +
+                            '<label for="new-dbkey">Reference genome build (dbkey): </label>' +
+                            '<div class="form-row-input">' +
+                                '<select name="dbkey" id="new-dbkey">';
+
+        // add dbkeys
+        for (var i = 0; i < response.length; i++) {
+            html += '<option value="' + response[i][1] + '">' + response[i][0] + '</option>';
+        }
+
+        // close selection/finalize template
+        html +=                 '</select>' +
+                            '</div>' +
+                            '<div style="clear: both;"></div>' +
+                        '</div>' +
+                        '<div class="form-row">' +
+                            'Is the build not listed here? ' +
+                            '<a href="' + Galaxy.root + 'user/dbkeys?use_panels=True">Add a Custom Build</a>' +
+                        '</div>' +
+                    '</form>';
+
+        // return
+        return html;
+    },
+
+    // create
+    create_browser : function(name, dbkey)
+    {
+        $(document).trigger("convert_to_values");
+
+        view = ui.create_visualization (
+        {
+            container: $("#center .unified-panel-body"),
+            name: name,
+            dbkey: dbkey
+        }, galaxy_config.app.gene_region);
+
+        // initialize editor
+        this.init_editor();
+
+        // modify view setting
+        view.editor = true;
+    },
+
+    // initialization for editor-specific functions.
+    init_editor : function ()
+    {
+        // set title
+        $("#center .unified-panel-title").text(view.config.get_value('name') + " (" + view.dbkey + ")");
+
+        // add dataset
+        if (galaxy_config.app.add_dataset)
+            $.ajax({
+                url: Galaxy.root + "api/datasets/" + galaxy_config.app.add_dataset,
+                data: { hda_ldda: 'hda', data_type: 'track_config' },
+                dataType: "json",
+                success: function(track_data) { view.add_drawable( tracks.object_from_template(track_data, view, view) ); }
+            });
+
+        // initialize icons
+        $("#add-bookmark-button").click(function()
+        {
+            // add new bookmark.
+            var position = view.chrom + ":" + view.low + "-" + view.high,
+                annotation = "Bookmark description";
+            return ui.add_bookmark(position, annotation, true);
+        });
+
+        // initialize keyboard
+        ui.init_keyboard_nav(view);
+
+        $(window).on('beforeunload', function() {
+            if (view.has_changes) {
+                return "There are unsaved changes to your visualization that will be lost if you leave this page.";
+            }
+        });
+    }
+});
+
+return {
+    TracksterUI: TracksterUI,
+    GalaxyApp : TracksterView
+};
+
+});
diff --git a/client/galaxy/scripts/viz/trackster/filters.js b/client/galaxy/scripts/viz/trackster/filters.js
new file mode 100644
index 0000000..9c619de
--- /dev/null
+++ b/client/galaxy/scripts/viz/trackster/filters.js
@@ -0,0 +1,628 @@
+define( ["libs/underscore"], function(_) {
+
+var extend = _.extend;
+
+/**
+ * Filters that enable users to show/hide data points dynamically.
+ */
+var Filter = function(obj_dict) {
+    this.manager = null;
+    this.name = obj_dict.name;
+    // Index into payload to filter.
+    this.index = obj_dict.index;
+    this.tool_id = obj_dict.tool_id;
+    // Name to use for filter when building expression for tool.
+    this.tool_exp_name = obj_dict.tool_exp_name;
+};
+
+extend(Filter.prototype, {
+    /**
+     * Convert filter to dictionary.
+     */
+    to_dict: function() {
+        return {
+            name: this.name,
+            index: this.index,
+            tool_id: this.tool_id,
+            tool_exp_name: this.tool_exp_name
+        };
+    } 
+});
+
+/**
+ * Creates an action icon.
+ */
+var create_action_icon =  function(title, css_class, on_click_fn) {
+    return $("<a/>").attr("href", "javascript:void(0);").attr("title", title)
+                    .addClass("icon-button").addClass(css_class).tooltip()
+                    .click(on_click_fn);
+};
+
+/**
+ * Number filters have a min, max as well as a low, high; low and high are used 
+ */
+var NumberFilter = function(obj_dict) {
+    //
+    // Attribute init.
+    //
+    Filter.call(this, obj_dict);
+    // Filter low/high. These values are used to filter elements.
+    this.low = ('low' in obj_dict ? obj_dict.low : -Number.MAX_VALUE);
+    this.high = ('high' in obj_dict ? obj_dict.high : Number.MAX_VALUE);
+    // Slide min/max. These values are used to set/update slider.
+    this.min = ('min' in obj_dict ? obj_dict.min : Number.MAX_VALUE);
+    this.max = ('max' in obj_dict ? obj_dict.max : -Number.MAX_VALUE);
+    // UI elements associated with filter.
+    this.container = null;
+    this.slider = null;
+    this.slider_label = null;
+    
+    //
+    // Create HTML.
+    //
+    
+    // Function that supports inline text editing of slider values.
+    // Enable users to edit parameter's value via a text box.
+    var edit_slider_values = function(container, span, slider) {
+        container.click(function() {
+            var cur_value = span.text(),
+                max = parseFloat(slider.slider("option", "max")),
+                input_size = (max <= 1 ? 4 : max <= 1000000 ? max.toString().length : 6),
+                multi_value = false,
+                slider_row = $(this).parents(".slider-row");
+                
+            // Row now has input.
+            slider_row.addClass("input");
+                
+            // Increase input size if there are two values.
+            if (slider.slider("option", "values")) {
+                input_size = 2*input_size + 1;
+                multi_value = true;
+            }
+            span.text("");
+            // Temporary input for changing value.
+            $("<input type='text'/>").attr("size", input_size).attr("maxlength", input_size)
+                                     .attr("value", cur_value).appendTo(span).focus().select()
+                                     .click(function(e) {
+                // Don't want click to propogate up to values_span and restart everything.
+                e.stopPropagation();
+            }).blur(function() {
+                $(this).remove();
+                span.text(cur_value);
+                slider_row.removeClass("input");
+            }).keyup(function(e) {
+                if (e.keyCode === 27) {
+                    // Escape key.
+                    $(this).trigger("blur");
+                } else if (e.keyCode === 13) {
+                    //
+                    // Enter/return key initiates callback. If new value(s) are in slider range, 
+                    // change value (which calls slider's change() function).
+                    //
+                    var slider_min = slider.slider("option", "min"),
+                        slider_max = slider.slider("option", "max"),
+                        invalid = function(a_val) {
+                            return (isNaN(a_val) || a_val > slider_max || a_val < slider_min);
+                        },
+                        new_value = $(this).val();
+                    if (!multi_value) {
+                        new_value = parseFloat(new_value);
+                        if (invalid(new_value)) {
+                            alert("Parameter value must be in the range [" + slider_min + "-" + slider_max + "]");
+                            return $(this);
+                        }
+                    }
+                    else { // Multi value.
+                        new_value = new_value.split("-");
+                        new_value = [parseFloat(new_value[0]), parseFloat(new_value[1])];
+                        if (invalid(new_value[0]) || invalid(new_value[1])) {
+                            alert("Parameter value must be in the range [" + slider_min + "-" + slider_max + "]");
+                            return $(this);
+                        }
+                    }
+                    
+                    // Updating the slider also updates slider values and removes input. 
+                    slider.slider((multi_value ? "values" : "value"), new_value);
+                    slider_row.removeClass("input");
+                }
+            });
+        });
+    };
+    
+    var filter = this;
+    
+    filter.parent_div = $("<div/>").addClass("filter-row slider-row");
+    
+    // Set up filter label (name, values).
+    var filter_label = $("<div/>").addClass("elt-label").appendTo(filter.parent_div),
+        name_span = $("<span/>").addClass("slider-name").text(filter.name + "  ").appendTo(filter_label),
+        values_span = $("<span/>").text(this.low + "-" + this.high),
+        values_span_container = $("<span/>").addClass("slider-value").appendTo(filter_label).append("[").append(values_span).append("]");
+    filter.values_span = values_span;
+            
+    // Set up slider for filter.
+    var slider_div = $("<div/>").addClass("slider").appendTo(filter.parent_div);
+    filter.control_element = $("<div/>").attr("id", filter.name + "-filter-control").appendTo(slider_div);
+    filter.control_element.slider({
+        range: true,
+        min: this.min,
+        max: this.max,
+        step: this.get_slider_step(this.min, this.max),
+        values: [this.low, this.high],
+        slide: function(event, ui) { 
+            filter.slide(event, ui); 
+        },
+        change: function(event, ui) {
+            filter.control_element.slider("option", "slide").call(filter.control_element, event, ui);
+        }
+    });
+    filter.slider = filter.control_element;
+    filter.slider_label = values_span;
+    
+    // Enable users to edit slider values via text box.
+    edit_slider_values(values_span_container, values_span, filter.control_element);
+    
+    // Set up filter display controls.
+    var display_controls_div = $("<div/>").addClass("display-controls").appendTo(filter.parent_div);
+    this.transparency_icon = create_action_icon("Use filter for data transparency", "layer-transparent", 
+                                                function() {
+                                                    if (filter.manager.alpha_filter !== filter) {
+                                                        // Setting this filter as the alpha filter.
+                                                        filter.manager.alpha_filter = filter;
+                                                        // Update UI for new filter.
+                                                        filter.manager.parent_div.find(".layer-transparent").removeClass("active").hide();
+                                                        filter.transparency_icon.addClass("active").show();
+                                                    }
+                                                    else {
+                                                        // Clearing filter as alpha filter.
+                                                        filter.manager.alpha_filter = null;
+                                                        filter.transparency_icon.removeClass("active");
+                                                    }
+                                                    filter.manager.track.request_draw({ force: true, clear_after: true });
+                                                } )
+                                                .appendTo(display_controls_div).hide();
+    this.height_icon = create_action_icon("Use filter for data height", "arrow-resize-090", 
+                                                function() {
+                                                    if (filter.manager.height_filter !== filter) {
+                                                        // Setting this filter as the height filter.
+                                                        filter.manager.height_filter = filter;
+                                                        // Update UI for new filter.
+                                                        filter.manager.parent_div.find(".arrow-resize-090").removeClass("active").hide();
+                                                        filter.height_icon.addClass("active").show();
+                                                    }
+                                                    else {
+                                                        // Clearing filter as alpha filter.
+                                                        filter.manager.height_filter = null;
+                                                        filter.height_icon.removeClass("active");
+                                                    }
+                                                    filter.manager.track.request_draw({ force: true, clear_after: true });
+                                                } )
+                                                .appendTo(display_controls_div).hide();
+    filter.parent_div.hover( function() { 
+                                filter.transparency_icon.show();
+                                filter.height_icon.show(); 
+                            },
+                            function() {
+                                if (filter.manager.alpha_filter !== filter) {
+                                    filter.transparency_icon.hide();
+                                }
+                                if (filter.manager.height_filter !== filter) {
+                                    filter.height_icon.hide();
+                                }
+                            } );
+    
+    // Add to clear floating layout.
+    $("<div style='clear: both;'/>").appendTo(filter.parent_div);
+};
+extend(NumberFilter.prototype, {
+    /**
+     * Convert filter to dictionary.
+     */
+    to_dict: function() {
+        var obj_dict = Filter.prototype.to_dict.call(this);
+        return extend(obj_dict, {
+            type: 'number',
+            min: this.min,
+            max: this.max,
+            low: this.low,
+            high: this.high
+        });
+    },
+    /**
+     * Return a copy of filter.
+     */
+    copy: function() {
+        return new NumberFilter( 
+            {
+                name: this.name, 
+                index: this.index, 
+                tool_id: this.tool_id, 
+                tool_exp_name: this.tool_exp_name
+            });
+    },
+    /**
+     * Get step for slider.
+     */
+    // FIXME: make this a "static" function.
+    get_slider_step: function(min, max) {
+        var range = max - min;
+        return (range <= 2 ? 0.01 : 1);
+    },
+    /**
+     * Handle slide events.
+     */
+    slide: function(event, ui) {
+        var values = ui.values;
+
+        // Set new values in UI.
+        this.values_span.text(values[0] + "-" + values[1]);
+
+        // Set new values in filter.
+        this.low = values[0];
+        this.high = values[1];
+         
+        // Set timeout to update if filter low, high are stable.
+        var self = this;
+        setTimeout(function() {
+            if (values[0] === self.low && values[1] === self.high) {
+                self.manager.track.request_draw({ force: true, clear_after: true });
+            }
+        }, 25);
+         
+     },
+    /** 
+     * Returns true if filter can be applied to element.
+     */
+    applies_to: function(element) {
+        if (element.length > this.index) {
+            return true;
+        }
+        return false;
+    },
+    /**
+     * Helper function: returns true if value in in filter's [low, high] range.
+     */
+    _keep_val: function(val) {
+        return (isNaN(val) || (val >= this.low && val <= this.high));
+    },    
+    /**
+     * Returns true if (a) element's value(s) is in [low, high] (range is inclusive) 
+     * or (b) if value is non-numeric and hence unfilterable.
+     */
+    keep: function(element) {
+        if ( !this.applies_to( element ) ) {
+            // No element to filter on.
+            return true;
+        }
+
+        // Keep value function.
+        var filter = this;
+
+        // Do filtering.
+        var to_filter = element[this.index];
+        if (to_filter instanceof Array) {
+            var returnVal = true;
+            for (var i = 0; i < to_filter.length; i++) {
+                if (!this._keep_val(to_filter[i])) {
+                    // Exclude element.
+                    returnVal = false;
+                    break;
+                }
+            }
+            return returnVal;
+        }
+        else {
+            return this._keep_val(element[this.index]);
+        }
+    },
+    /**
+     * Update filter's min and max values based on element's values.
+     */
+    update_attrs: function(element) {
+        var updated = false;
+        if (!this.applies_to(element) ) {
+            return updated;
+        }
+        
+        //
+        // Update filter's min, max based on element values.
+        //
+        
+        // Make value(s) into an Array.
+        var values = element[this.index];
+        if (!(values instanceof Array)) {
+            values = [values];
+        }
+        
+        // Loop through values and update min, max.
+        for (var i = 0; i < values.length; i++) {
+            var value = values[i];
+            if (value < this.min) {
+                this.min = Math.floor(value);
+                updated = true;
+            }
+            if (value > this.max) {
+                this.max = Math.ceil(value);
+                updated = true;
+            }
+        }
+        return updated;
+    },
+    /**
+     * Update filter's slider.
+     */
+    update_ui_elt: function () {
+        // Only show filter if min < max because filter is not useful otherwise. This
+        // covers all corner cases, such as when min, max have not been defined and
+        // when min == max.
+        if (this.min < this.max) {
+            this.parent_div.show();
+        }
+        else {
+            this.parent_div.hide();
+        }
+        
+        var 
+            slider_min = this.slider.slider("option", "min"),
+            slider_max = this.slider.slider("option", "max");
+        if (this.min < slider_min || this.max > slider_max) {
+            // Update slider min, max, step.
+            this.slider.slider("option", "min", this.min);
+            this.slider.slider("option", "max", this.max);
+            this.slider.slider("option", "step", this.get_slider_step(this.min, this.max));
+            // Refresh slider:
+            // TODO: do we want to keep current values or reset to min/max?
+            // Currently we reset values:
+            this.slider.slider("option", "values", [this.min, this.max]);
+            // To use the current values.
+            //var values = this.slider.slider( "option", "values" );
+            //this.slider.slider( "option", "values", values );
+        }
+    }
+});
+ 
+/**
+ * Manages a set of filters.
+ */
+var FiltersManager = function(track, obj_dict) {
+    this.track = track;
+    this.alpha_filter = null;
+    this.height_filter = null;
+    this.filters = [];
+    
+    //
+    // Create HTML.
+    //
+        
+    //
+    // Create parent div.
+    //
+    this.parent_div = $("<div/>").addClass("filters").hide();
+    // Disable dragging, double clicking, keys on div so that actions on slider do not impact viz.
+    this.parent_div.bind("drag", function(e) {
+        e.stopPropagation();
+    }).click(function(e) {
+        e.stopPropagation();
+    }).bind("dblclick", function(e) {
+        e.stopPropagation();
+    }).bind("keydown", function(e) {
+        e.stopPropagation();
+    });
+    
+    //
+    // Restore state from dict.
+    //
+    if (obj_dict && 'filters' in obj_dict) { // Second condition needed for backward compatibility.
+        var 
+            alpha_filter_name = ('alpha_filter' in obj_dict ? obj_dict.alpha_filter : null),
+            height_filter_name = ('height_filter' in obj_dict ? obj_dict.height_filter : null),            
+            filters_dict = obj_dict.filters,
+            filter;
+        for (var i = 0; i < filters_dict.length; i++) {
+            if (filters_dict[i].type === 'number') {
+                filter = new NumberFilter(filters_dict[i]);
+                this.add_filter(filter);
+                if (filter.name === alpha_filter_name) {
+                    this.alpha_filter = filter;
+                    filter.transparency_icon.addClass("active").show();
+                }
+                if (filter.name === height_filter_name) {
+                    this.height_filter = filter;
+                    filter.height_icon.addClass("active").show();
+                }
+            } 
+            else {
+                console.log("ERROR: unsupported filter: ", name, type);
+            }
+        }
+        
+        
+        if ('visible' in obj_dict && obj_dict.visible) {
+            this.parent_div.show();
+        }
+    }
+    
+    // Add button to filter complete dataset.
+    if (this.filters.length !== 0) {
+        var run_buttons_row = $("<div/>").addClass("param-row").appendTo(this.parent_div);
+        var run_on_dataset_button = $("<input type='submit'/>").attr("value", "Run on complete dataset").appendTo(run_buttons_row);
+        var filter_manager = this;
+        run_on_dataset_button.click( function() {
+            filter_manager.run_on_dataset();
+        });
+    }
+        
+};
+
+extend(FiltersManager.prototype, {
+    // HTML manipulation and inspection.
+    show: function() { this.parent_div.show(); },
+    hide: function() { this.parent_div.hide(); },
+    toggle: function() { this.parent_div.toggle(); },
+    visible: function() { return this.parent_div.is(":visible"); },
+    /**
+     * Returns dictionary for manager.
+     */
+    to_dict: function() {
+        var obj_dict = {},
+            filter_dicts = [],
+            filter;
+            
+        // Include individual filter states.
+        for (var i = 0; i < this.filters.length; i++) {
+            filter = this.filters[i];
+            filter_dicts.push(filter.to_dict());
+        }
+        obj_dict.filters = filter_dicts;
+        
+        // Include transparency, height filters.
+        obj_dict.alpha_filter = (this.alpha_filter ? this.alpha_filter.name : null);
+        obj_dict.height_filter = (this.height_filter ? this.height_filter.name : null);
+        
+        // Include visibility.
+        obj_dict.visible = this.parent_div.is(":visible");
+        
+        return obj_dict;
+    },
+    /**
+     * Return a copy of the manager.
+     */
+    copy: function(new_track) {
+        var copy = new FiltersManager(new_track);
+        for (var i = 0; i < this.filters.length; i++) {
+            copy.add_filter(this.filters[i].copy());
+        }
+        return copy;
+    },
+    /**
+     * Add a filter to the manager.
+     */
+    add_filter: function(filter) {
+        filter.manager = this;
+        this.parent_div.append(filter.parent_div);
+        this.filters.push(filter);  
+    },
+    /**
+     * Remove all filters from manager.
+     */
+    remove_all: function() {
+        this.filters = [];
+        this.parent_div.children().remove();
+    },
+    /**
+     * Initialize filters.
+     */ 
+    init_filters: function() {
+        for (var i = 0; i < this.filters.length; i++) {
+            var filter = this.filters[i];
+            filter.update_ui_elt();
+        }
+    },
+    /**
+     * Clear filters so that they do not impact track display.
+     */
+    clear_filters: function() {
+        for (var i = 0; i < this.filters.length; i++) {
+            var filter = this.filters[i];
+            filter.slider.slider("option", "values", [filter.min, filter.max]);
+        }
+        this.alpha_filter = null;
+        this.height_filter = null;
+        
+        // Hide icons for setting filters.
+        this.parent_div.find(".icon-button").hide();
+    },
+    run_on_dataset: function() {
+        // Get or create dictionary item.
+        var get_or_create_dict_item = function(dict, key, new_item) {
+            // Add new item to dict if 
+            if (!(key in dict)) {
+                dict[key] = new_item;
+            }
+            return dict[key];
+        };
+        
+        //
+        // Find and group active filters. Active filters are those being used to hide data.
+        // Filters with the same tool id are grouped.
+        //
+        var active_filters = {},
+            filter, 
+            tool_filter_conditions;
+        for (var i = 0; i < this.filters.length; i++) {
+            filter = this.filters[i];
+            if (filter.tool_id) {
+                // Add filtering conditions if filter low/high are set.
+                if (filter.min !== filter.low) {
+                    tool_filter_conditions = get_or_create_dict_item(active_filters, filter.tool_id, []);
+                    tool_filter_conditions[tool_filter_conditions.length] = filter.tool_exp_name + " >= " + filter.low;
+                }
+                if (filter.max !== filter.high) {
+                    tool_filter_conditions = get_or_create_dict_item(active_filters, filter.tool_id, []);
+                    tool_filter_conditions[tool_filter_conditions.length] = filter.tool_exp_name + " <= " + filter.high;
+                }
+            }
+        }
+        
+        //
+        // Use tools to run filters.
+        //
+        
+        // Create list of (tool_id, tool_filters) tuples.
+        var active_filters_list = [];
+        for (var tool_id in active_filters) {
+            active_filters_list[active_filters_list.length] = [tool_id, active_filters[tool_id]];
+        }
+        
+        // Invoke recursive function to run filters; this enables chaining of filters via
+        // iteratively application.
+        (function run_filter(input_dataset_id, filters) {
+            var 
+                // Set up filtering info and params.
+                filter_tuple = filters[0],
+                tool_id = filter_tuple[0],
+                tool_filters = filter_tuple[1],
+                tool_filter_str = "(" + tool_filters.join(") and (") + ")",
+                url_params = {
+                    cond: tool_filter_str,
+                    input: input_dataset_id,
+                    target_dataset_id: input_dataset_id,
+                    tool_id: tool_id
+                };
+
+            // Remove current filter.
+            filters = filters.slice(1);
+                
+            $.getJSON(run_tool_url, url_params, function(response) {
+                if (response.error) {
+                    // General error.
+                    Galaxy.modal.show({
+                        title: "Filter Dataset",
+                        body : "Error running tool " + tool_id,
+                        buttons : { "Close" : Galaxy.modal.hide() }
+                    });
+                }
+                else if (filters.length === 0) {
+                    // No more filters to run.
+                    Galaxy.modal.show({
+                        title: "Filtering Dataset",
+                        body: "Filter(s) are running on the complete dataset. Outputs are in dataset's history.",
+                        buttons: { "Close" : Galaxy.modal.hide() }
+                    });
+                }
+                else {
+                    // More filters to run.
+                    run_filter(response.dataset_id, filters);
+                }
+            });
+              
+        })(this.track.dataset_id, active_filters_list);        
+    }
+});
+
+return {
+    FiltersManager: FiltersManager,
+    NumberFilter: NumberFilter
+};
+
+});
diff --git a/client/galaxy/scripts/viz/trackster/painters.js b/client/galaxy/scripts/viz/trackster/painters.js
new file mode 100644
index 0000000..5108d19
--- /dev/null
+++ b/client/galaxy/scripts/viz/trackster/painters.js
@@ -0,0 +1,1619 @@
+define( ["libs/underscore"], function( _ ) {
+
+/**
+ * Compute the type of overlap between two regions. They are assumed to be on the same chrom/contig.
+ * The overlap is computed relative to the second region; hence, OVERLAP_START indicates that the first
+ * region overlaps the start (but not the end) of the second region.
+ * NOTE: Coordinates are assumed to be in BED format: half open (start is closed, end is open).
+ */
+var BEFORE = 1001, CONTAINS = 1002, OVERLAP_START = 1003, OVERLAP_END = 1004, CONTAINED_BY = 1005, AFTER = 1006;
+var compute_overlap = function(first_region, second_region) {
+    var
+        first_start = first_region[0], first_end = first_region[1],
+        second_start = second_region[0], second_end = second_region[1],
+        overlap;
+    if (first_start < second_start) {
+        if (first_end <= second_start) {
+            overlap = BEFORE;
+        }
+        else if (first_end <= second_end) {
+            overlap = OVERLAP_START;
+        }
+        else { // first_end > second_end
+            overlap = CONTAINS;
+        }
+    }
+    else { // first_start >= second_start
+        if (first_start > second_end) {
+            overlap = AFTER;
+        }
+        else if (first_end <= second_end) {
+            overlap = CONTAINED_BY;
+        }
+        else {
+            overlap = OVERLAP_END;
+        }
+    }
+
+    return overlap;
+};
+
+/**
+ * Returns true if regions overlap.
+ */
+var is_overlap = function(first_region, second_region) {
+    var overlap = compute_overlap(first_region, second_region);
+    return (overlap !== BEFORE && overlap !== AFTER);
+};
+
+/**
+ * Draw a dashed line on a canvas using filled rectangles. This function is based on:
+ * http://vetruvet.blogspot.com/2010/10/drawing-dashed-lines-on-html5-canvas.html
+ * However, that approach uses lines, which don't seem to render as well, so use
+ * rectangles instead.
+ */
+var dashedLine = function(ctx, x1, y1, x2, y2, dashLen) {
+    if (dashLen === undefined) { dashLen = 4; }
+    var dX = x2 - x1;
+    var dY = y2 - y1;
+    var dashes = Math.floor(Math.sqrt(dX * dX + dY * dY) / dashLen);
+    var dashX = dX / dashes;
+    var dashY = dY / dashes;
+    var q;
+
+    for (q = 0; q < dashes; q++, x1 += dashX, y1 += dashY) {
+        if (q % 2 !== 0) {
+            continue;
+        }
+        ctx.fillRect(x1, y1, dashLen, 1);
+    }
+};
+
+/**
+ * Draw an isosceles triangle that points down.
+ */
+var drawDownwardEquilateralTriangle = function(ctx, down_vertex_x, down_vertex_y, side_len) {
+    // Compute other two points of triangle.
+    var
+        x1 = down_vertex_x - side_len/2,
+        x2 = down_vertex_x + side_len/2,
+        y = down_vertex_y - Math.sqrt( side_len*3/2 );
+
+    // Draw and fill.
+    ctx.beginPath();
+    ctx.moveTo(x1, y);
+    ctx.lineTo(x2, y);
+    ctx.lineTo(down_vertex_x, down_vertex_y);
+    ctx.lineTo(x1, y);
+
+    ctx.strokeStyle = this.fillStyle;
+    ctx.fill();
+    ctx.stroke();
+    ctx.closePath();
+};
+
+/**
+ * Base class for all scalers. Scalers produce values that are used to change (scale) drawing attributes.
+ */
+var Scaler = function(default_val) {
+    this.default_val = (default_val ? default_val : 1);
+};
+
+/**
+ * Produce a scaling value.
+ */
+Scaler.prototype.gen_val = function(input) {
+    return this.default_val;
+};
+
+/**
+ * Results from painter.draw()
+ */
+var DrawResults = function(options) {
+    this.incomplete_features = options.incomplete_features;
+    this.feature_mapper = options.feature_mapper;
+};
+
+/**
+ * Base class for painters
+ *
+ * -- Mode and prefs are both optional
+ */
+var Painter = function(data, view_start, view_end, prefs, mode) {
+    // Data and data properties
+    this.data = data;
+    // View
+    this.view_start = view_start;
+    this.view_end = view_end;
+    // Drawing prefs
+    this.prefs = _.extend({}, this.default_prefs, prefs);
+    this.mode = mode;
+};
+
+Painter.prototype.default_prefs = {};
+
+/**
+ * Draw on the context using a rectangle of width x height using scale w_scale.
+ */
+Painter.prototype.draw = function(ctx, width, height, w_scale) {};
+
+/**
+ * Get starting drawing position, which is offset a half-base left of coordinate.
+ */
+Painter.prototype.get_start_draw_pos = function(chrom_pos, w_scale) {
+    return this._chrom_pos_to_draw_pos(chrom_pos, w_scale, -0.5);
+};
+
+/**
+ * Get end drawing position, which is offset a half-base right of coordinate.
+ */
+Painter.prototype.get_end_draw_pos = function(chrom_pos, w_scale) {
+    return this._chrom_pos_to_draw_pos(chrom_pos, w_scale, 0.5);
+};
+
+/**
+ * Get drawing position.
+ */
+Painter.prototype.get_draw_pos = function(chrom_pos, w_scale) {
+    return this._chrom_pos_to_draw_pos(chrom_pos, w_scale, 0);
+};
+
+/**
+ * Convert chromosome position to drawing position.
+ */
+Painter.prototype._chrom_pos_to_draw_pos = function(chrom_pos, w_scale, offset) {
+    return Math.floor( w_scale * ( Math.max(0, chrom_pos - this.view_start) + offset) );
+};
+
+
+var LinePainter = function(data, view_start, view_end, prefs, mode) {
+    Painter.call( this, data, view_start, view_end, prefs, mode );
+};
+
+LinePainter.prototype.default_prefs = { min_value: undefined, max_value: undefined, mode: "Histogram", color: "#000", overflow_color: "#F66" };
+
+LinePainter.prototype.draw = function(ctx, width, height, w_scale) {
+    var in_path = false,
+        min_value = this.prefs.min_value,
+        max_value = this.prefs.max_value,
+        vertical_range = max_value - min_value,
+        height_px = height,
+        view_start = this.view_start,
+        mode = this.mode,
+        data = this.data;
+
+    ctx.save();
+
+    // Pixel position of 0 on the y axis
+    var y_zero = Math.round( height + min_value / vertical_range * height );
+
+    // Horizontal line to denote x-axis
+    if ( mode !== "Intensity" ) {
+        ctx.fillStyle = "#aaa";
+        ctx.fillRect( 0, y_zero, width, 1 );
+    }
+
+    ctx.beginPath();
+    var x_scaled, y, delta_x_pxs;
+    if (data.length > 1) {
+        delta_x_pxs = _.map(data.slice(0,-1), function(d, i) {
+            return Math.ceil((data[i+1][0] - data[i][0]) * w_scale);
+        });
+    }
+    else {
+        delta_x_pxs = [10];
+    }
+
+    // Painter color can be in either block_color (FeatureTrack) or color pref (LineTrack).
+    var painter_color = this.prefs.block_color || this.prefs.color,
+        // Extract RGB from preference color.
+        pref_color = parseInt( painter_color.slice(1), 16 ),
+        pref_r = (pref_color & 0xff0000) >> 16,
+        pref_g = (pref_color & 0x00ff00) >> 8,
+        pref_b = pref_color & 0x0000ff,
+        top_overflow = false,
+        bot_overflow = false;
+
+
+    // Paint track.
+    var delta_x_px;
+    for (var i = 0, len = data.length; i < len; i++) {
+        // Reset attributes for next point.
+        ctx.fillStyle = ctx.strokeStyle = painter_color;
+        top_overflow = bot_overflow = false;
+        delta_x_px = delta_x_pxs[i];
+
+        x_scaled = Math.floor((data[i][0] - view_start - 0.5) * w_scale);
+        y = data[i][1];
+
+        // Process Y (scaler) value.
+        if (y === null) {
+            if (in_path && mode === "Filled") {
+                ctx.lineTo(x_scaled, height_px);
+            }
+            in_path = false;
+            continue;
+        }
+
+        // Bound Y value by min, max.
+        if (y < min_value) {
+            bot_overflow = true;
+            y = min_value;
+        }
+        else if (y > max_value) {
+            top_overflow = true;
+            y = max_value;
+        }
+
+        // Draw point.
+        if (mode === "Histogram") {
+            // y becomes the bar height in pixels, which is the negated for canvas coords
+            y = Math.round( y / vertical_range * height_px );
+            ctx.fillRect(x_scaled, y_zero, delta_x_px, - y );
+        }
+        else if (mode === "Intensity") {
+            var
+                saturation = (y - min_value) / vertical_range,
+                // Range is [pref_color, 255] where saturation = 0 --> 255 and saturation = 1 --> pref color
+                new_r = Math.round( pref_r + (255 - pref_r) * (1 - saturation) ),
+                new_g = Math.round( pref_g + (255 - pref_g) * (1 - saturation) ),
+                new_b = Math.round( pref_b + (255 - pref_b) * (1 - saturation) );
+            ctx.fillStyle = "rgb(" + new_r + "," + new_g + "," + new_b + ")";
+            ctx.fillRect(x_scaled, 0, delta_x_px, height_px);
+        }
+        else { // mode is Coverage/Line or Filled.
+
+            // Scale Y value.
+            y = Math.round( height_px - (y - min_value) / vertical_range * height_px );
+            if (in_path) {
+                ctx.lineTo(x_scaled, y);
+            }
+            else {
+                in_path = true;
+                if (mode === "Filled") {
+                    ctx.moveTo(x_scaled, height_px);
+                    ctx.lineTo(x_scaled, y);
+                }
+                else {
+                    ctx.moveTo(x_scaled, y);
+                    // Use this approach (note: same as for filled) to draw line from 0 to
+                    // first data point.
+                    //ctx.moveTo(x_scaled, height_px);
+                    //ctx.lineTo(x_scaled, y);
+                }
+            }
+        }
+
+        // Draw lines at boundaries if overflowing min or max
+        ctx.fillStyle = this.prefs.overflow_color;
+        if (top_overflow || bot_overflow) {
+            var overflow_x;
+            if (mode === "Histogram" || mode === "Intensity") {
+                overflow_x = delta_x_px;
+            }
+            else { // Line and Filled, which are points
+                x_scaled -= 2; // Move it over to the left so it's centered on the point
+                overflow_x = 4;
+            }
+            if (top_overflow) {
+                ctx.fillRect(x_scaled, 0, overflow_x, 3);
+            }
+            if (bot_overflow) {
+                ctx.fillRect(x_scaled, height_px - 3, overflow_x, 3);
+            }
+        }
+        ctx.fillStyle = painter_color;
+    }
+    if (mode === "Filled") {
+        if (in_path) {
+            ctx.lineTo( x_scaled, y_zero );
+            ctx.lineTo( 0, y_zero );
+        }
+        ctx.fill();
+    }
+    else {
+        ctx.stroke();
+    }
+
+    ctx.restore();
+};
+
+/**
+ * Mapper that contains information about feature locations and data.
+ */
+var FeaturePositionMapper = function(slot_height) {
+    this.feature_positions = {};
+    this.slot_height = slot_height;
+    this.translation = 0;
+    this.y_translation = 0;
+};
+
+/**
+ * Map feature data to a position defined by <slot, x_start, x_end>.
+ */
+FeaturePositionMapper.prototype.map_feature_data = function(feature_data, slot, x_start, x_end) {
+    if (!this.feature_positions[slot]) {
+        this.feature_positions[slot] = [];
+    }
+    this.feature_positions[slot].push({
+        data: feature_data,
+        x_start: x_start,
+        x_end: x_end
+    });
+};
+
+/**
+ * Get feature data for position <x, y>
+ */
+FeaturePositionMapper.prototype.get_feature_data = function(x, y) {
+    // Find slot using Y.
+    var slot = Math.floor( (y-this.y_translation)/this.slot_height ),
+        feature_dict;
+
+    // May not be over a slot due to padding, margin, etc.
+    if (!this.feature_positions[slot]) {
+        return null;
+    }
+
+    // Find feature using X.
+    x += this.translation;
+    for (var i = 0; i < this.feature_positions[slot].length; i++) {
+        feature_dict = this.feature_positions[slot][i];
+        if (x >= feature_dict.x_start && x <= feature_dict.x_end) {
+            return feature_dict.data;
+        }
+    }
+};
+
+/**
+ * Abstract object for painting feature tracks. Subclasses must implement draw_element() for painting to work.
+ * Painter uses a 0-based, half-open coordinate system; start coordinate is closed--included--and the end is open.
+ * This coordinate system matches the BED format.
+ */
+var FeaturePainter = function(data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler) {
+    Painter.call(this, data, view_start, view_end, prefs, mode);
+    this.alpha_scaler = (alpha_scaler ? alpha_scaler : new Scaler());
+    this.height_scaler = (height_scaler ? height_scaler : new Scaler());
+    this.max_label_length = 200;
+};
+
+FeaturePainter.prototype.default_prefs = { block_color: "#FFF", connector_color: "#FFF" };
+
+_.extend(FeaturePainter.prototype, {
+    get_required_height: function(rows_required, width) {
+        // y_scale is the height per row
+        var required_height = this.get_row_height(),
+            y_scale = required_height,
+            mode = this.mode;
+        // If using a packing mode, need to multiply by the number of slots used
+        if (mode === "no_detail" || mode === "Squish" || mode === "Pack") {
+            required_height = rows_required * y_scale;
+        }
+        return required_height + this.get_top_padding(width);
+    },
+
+    /** Extra padding before first row of features */
+    get_top_padding: function(width) {
+        return 0;
+    },
+
+    /**
+     * Draw data on ctx using slots and within the rectangle defined by width and height. Returns
+     * a FeaturePositionMapper object with information about where features were drawn.
+     */
+    draw: function(ctx, width, height, w_scale, slots) {
+        var data = this.data,
+            view_start = this.view_start,
+            view_end = this.view_end;
+
+        ctx.save();
+
+        ctx.fillStyle = this.prefs.block_color;
+        ctx.textAlign = "right";
+
+        var y_scale = this.get_row_height(),
+            feature_mapper = new FeaturePositionMapper(y_scale),
+            x_draw_coords,
+            incomplete_features = [];
+
+        for (var i = 0, len = data.length; i < len; i++) {
+            var feature = data[i],
+                feature_uid = feature[0],
+                feature_start = feature[1],
+                feature_end = feature[2],
+                // Slot valid only if features are slotted and this feature is slotted;
+                // feature may not be due to lack of space.
+                slot = (slots && slots[feature_uid] !== undefined ? slots[feature_uid].slot : null);
+
+            // Draw feature if (a) mode is dense or feature is slotted (as it must be for all non-dense modes) and
+            // (b) there's overlap between the feature and drawing region.
+            if ( (this.mode === "Dense" || slot !== null) && ( feature_start < view_end && feature_end > view_start ) ) {
+                x_draw_coords = this.draw_element(ctx, this.mode, feature, slot, view_start, view_end, w_scale, y_scale, width);
+                feature_mapper.map_feature_data(feature, slot, x_draw_coords[0], x_draw_coords[1]);
+
+                // Add to incomplete features if it's not drawn completely in region.
+                if (feature_start < view_start || feature_end > view_end) {
+                    incomplete_features.push(feature);
+                }
+            }
+        }
+
+        ctx.restore();
+
+        feature_mapper.y_translation = this.get_top_padding(width);
+        return new DrawResults({
+            incomplete_features: incomplete_features,
+            feature_mapper: feature_mapper
+        });
+    },
+
+    /**
+     * Abstract function for drawing an individual feature.
+     */
+    draw_element: function(ctx, mode, feature, slot, tile_low, tile_high, w_scale, y_scale, width ) {
+        return [0, 0];
+    }
+});
+
+// Constants specific to feature tracks moved here (HACKING, these should
+// basically all be configuration options)
+var DENSE_TRACK_HEIGHT = 10,
+    NO_DETAIL_TRACK_HEIGHT = 3,
+    SQUISH_TRACK_HEIGHT = 5,
+    PACK_TRACK_HEIGHT = 10,
+    NO_DETAIL_FEATURE_HEIGHT = 1,
+    DENSE_FEATURE_HEIGHT = 9,
+    SQUISH_FEATURE_HEIGHT = 3,
+    PACK_FEATURE_HEIGHT = 9,
+    LABEL_SPACING = 2,
+    CONNECTOR_COLOR = "#ccc";
+
+var LinkedFeaturePainter = function(data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler) {
+    FeaturePainter.call(this, data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler);
+    // Whether to draw a single connector in the background that spans the entire feature (the intron fishbone)
+    this.draw_background_connector = true;
+    // Whether to call draw_connector for every pair of blocks
+    this.draw_individual_connectors = false;
+};
+
+_.extend(LinkedFeaturePainter.prototype, FeaturePainter.prototype, {
+
+    /**
+     * Height of a single row, depends on mode
+     */
+    get_row_height: function() {
+        var mode = this.mode, height;
+        if (mode === "Dense") {
+            height = DENSE_TRACK_HEIGHT;
+        }
+        else if (mode === "no_detail") {
+            height = NO_DETAIL_TRACK_HEIGHT;
+        }
+        else if (mode === "Squish") {
+            height = SQUISH_TRACK_HEIGHT;
+        }
+        else { // mode === "Pack"
+            height = PACK_TRACK_HEIGHT;
+        }
+        return height;
+    },
+
+    /**
+     * Draw a feature. Returns an array with feature's start and end X coordinates.
+     */
+    draw_element: function(ctx, mode, feature, slot, tile_low, tile_high, w_scale, y_scale, width) {
+        var feature_uid = feature[0],
+            feature_start = feature[1],
+            feature_end = feature[2],
+            feature_name = feature[3],
+            feature_strand = feature[4],
+            // -0.5 to offset region between bases.
+            f_start = Math.floor( Math.max(0, (feature_start - tile_low - 0.5) * w_scale) ),
+            f_end   = Math.ceil( Math.min(width, Math.max(0, (feature_end - tile_low - 0.5) * w_scale)) ),
+            draw_start = f_start,
+            draw_end = f_end,
+            y_start = (mode === "Dense" ? 0 : (0 + slot)) * y_scale + this.get_top_padding(width),
+            thickness, y_start, thick_start = null, thick_end = null,
+            // TODO: is there any reason why block, label color cannot be set at the Painter level?
+            // For now, assume '.' === '+'
+            block_color = (!feature_strand || feature_strand === "+" || feature_strand === "." ? this.prefs.block_color : this.prefs.reverse_strand_color);
+            label_color = this.prefs.label_color;
+
+        // Set global alpha.
+        ctx.globalAlpha = this.alpha_scaler.gen_val(feature);
+
+        // In dense mode, put all data in top slot.
+        if (mode === "Dense") {
+            slot = 1;
+        }
+
+        if (mode === "no_detail") {
+            // No details for feature, so only one way to display.
+            ctx.fillStyle = block_color;
+            ctx.fillRect(f_start, y_start + 5, f_end - f_start, NO_DETAIL_FEATURE_HEIGHT);
+        }
+        else { // Mode is either Squish or Pack:
+            // Feature details.
+            var feature_ts = feature[5],
+                feature_te = feature[6],
+                feature_blocks = feature[7],
+                // Whether we are drawing full height or squished features
+                full_height = true;
+
+            if (feature_ts && feature_te) {
+                thick_start = Math.floor( Math.max(0, (feature_ts - tile_low) * w_scale) );
+                thick_end = Math.ceil( Math.min(width, Math.max(0, (feature_te - tile_low) * w_scale)) );
+            }
+
+            // Set vars that depend on mode.
+            var thin_height, thick_height;
+            if (mode === "Squish" ) {
+                thin_height = 1;
+                thick_height = SQUISH_FEATURE_HEIGHT;
+                full_height = false;
+            } else if ( mode === "Dense" ) {
+                thin_height = 5;
+                thick_height = DENSE_FEATURE_HEIGHT;
+            } else { // mode === "Pack"
+                thin_height = 5;
+                thick_height = PACK_FEATURE_HEIGHT;
+            }
+
+            // Draw feature/feature blocks + connectors.
+            if (!feature_blocks) {
+                // If there are no blocks, treat the feature as one big exon.
+                ctx.fillStyle = block_color;
+                ctx.fillRect(f_start, y_start + 1, f_end - f_start, thick_height);
+                // If strand is specified, draw arrows over feature
+                if ( feature_strand && full_height ) {
+                    if (feature_strand === "+") {
+                        ctx.fillStyle = ctx.canvas.manager.get_pattern( 'right_strand_inv' );
+                    } else if (feature_strand === "-") {
+                        ctx.fillStyle = ctx.canvas.manager.get_pattern( 'left_strand_inv' );
+                    }
+                    ctx.fillRect(f_start, y_start + 1, f_end - f_start, thick_height);
+                }
+            } else {
+                //
+                // There are feature blocks and mode is either Squish or Pack.
+                //
+                // Approach: (a) draw whole feature as connector/intron and (b) draw blocks as
+                // needed. This ensures that whole feature, regardless of whether it starts with
+                // a block, is visible.
+                //
+
+                // Compute y axis start position and height
+                var cur_y_start, cur_height;
+                if (mode === "Squish" || mode === "Dense") {
+                    cur_y_start = y_start + Math.floor(SQUISH_FEATURE_HEIGHT/2) + 1;
+                    cur_height = 1;
+                }
+                else { // mode === "Pack"
+                    if (feature_strand) {
+                        cur_y_start = y_start;
+                        cur_height = thick_height;
+                    }
+                    else {
+                        cur_y_start += (SQUISH_FEATURE_HEIGHT/2) + 1;
+                        cur_height = 1;
+                    }
+                }
+
+                // Draw whole feature as connector/intron.
+                if ( this.draw_background_connector ) {
+                    if (mode === "Squish" || mode === "Dense") {
+                        ctx.fillStyle = CONNECTOR_COLOR;
+                    }
+                    else { // mode === "Pack"
+                        if (feature_strand) {
+                            if (feature_strand === "+") {
+                                ctx.fillStyle = ctx.canvas.manager.get_pattern( 'right_strand' );
+                            } else if (feature_strand === "-") {
+                                ctx.fillStyle = ctx.canvas.manager.get_pattern( 'left_strand' );
+                            }
+                        }
+                        else {
+                            ctx.fillStyle = CONNECTOR_COLOR;
+                        }
+                    }
+                    ctx.fillRect(f_start, cur_y_start, f_end - f_start, cur_height);
+                }
+
+                // Draw blocks.
+                var start_and_height;
+                for (var k = 0, k_len = feature_blocks.length; k < k_len; k++) {
+                    var block = feature_blocks[k],
+                        // -0.5 to offset block between bases.
+                        block_start = Math.floor( Math.max(0, (block[0] - tile_low - 0.5) * w_scale) ),
+                        block_end = Math.ceil( Math.min(width, Math.max((block[1] - tile_low - 0.5) * w_scale)) ),
+                        last_block_start, last_block_end;
+
+                    // Skip drawing if block not on tile.
+                    if (block_start > block_end) { continue; }
+
+                    // Draw thin block.
+                    ctx.fillStyle = block_color;
+                    ctx.fillRect(block_start, y_start + (thick_height-thin_height)/2 + 1, block_end - block_start, thin_height);
+
+                    // If block intersects with thick region, draw block as thick.
+                    // - No thick is sometimes encoded as thick_start == thick_end, so don't draw in that case
+                    if (thick_start !== undefined && feature_te > feature_ts && !(block_start > thick_end || block_end < thick_start) ) {
+                        var block_thick_start = Math.max(block_start, thick_start),
+                            block_thick_end = Math.min(block_end, thick_end);
+                        ctx.fillRect(block_thick_start, y_start + 1, block_thick_end - block_thick_start, thick_height);
+                        if ( feature_blocks.length === 1 && mode === "Pack") {
+                            // Exactly one block means we have no introns, but do have a distinct "thick" region,
+                            // draw arrows over it if in pack mode.
+                            if (feature_strand === "+") {
+                                ctx.fillStyle = ctx.canvas.manager.get_pattern( 'right_strand_inv' );
+                            } else if (feature_strand === "-") {
+                                ctx.fillStyle = ctx.canvas.manager.get_pattern( 'left_strand_inv' );
+                            }
+                            // If region is wide enough in pixels, pad a bit
+                            if ( block_thick_start + 14 < block_thick_end ) {
+                                block_thick_start += 2;
+                                block_thick_end -= 2;
+                            }
+                            ctx.fillRect(block_thick_start, y_start + 1, block_thick_end - block_thick_start, thick_height);
+                        }
+                    }
+                    // Draw individual connectors if required
+                    if ( this.draw_individual_connectors && last_block_start ) {
+                        this.draw_connector( ctx, last_block_start, last_block_end, block_start, block_end, y_start );
+                    }
+                    last_block_start = block_start;
+                    last_block_end = block_end;
+                }
+
+                // FIXME: Height scaling only works in Pack mode right now.
+                if (mode === "Pack") {
+                    // Reset alpha so height scaling is not impacted by alpha scaling.
+                    ctx.globalAlpha = 1;
+
+                    // Height scaling: draw white lines to reduce height according to height scale factor.
+                    ctx.fillStyle = "white"; // TODO: set this to background color.
+                    var
+                        hscale_factor = this.height_scaler.gen_val(feature),
+                        // Ceil ensures that min height is >= 1.
+                        new_height = Math.ceil(thick_height * hscale_factor),
+                        ws_height = Math.round( (thick_height-new_height)/2 );
+                    if (hscale_factor !== 1) {
+                        ctx.fillRect(f_start, cur_y_start + 1, f_end - f_start, ws_height);
+                        ctx.fillRect(f_start, cur_y_start + thick_height - ws_height + 1, f_end - f_start, ws_height);
+                    }
+                }
+            }
+
+            // Reset alpha so that label is not transparent.
+            ctx.globalAlpha = 1;
+
+            // Draw label for Pack mode.
+            if (feature_name && mode === "Pack" && feature_start > tile_low) {
+                ctx.fillStyle = label_color;
+                // FIXME: assumption here that the entire view starts at 0
+                if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
+                    ctx.textAlign = "left";
+                    ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8, this.max_label_length);
+                    draw_end += ctx.measureText(feature_name).width + LABEL_SPACING;
+                } else {
+                    ctx.textAlign = "right";
+                    ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8, this.max_label_length);
+                    draw_start -= ctx.measureText(feature_name).width + LABEL_SPACING;
+                }
+                //ctx.fillStyle = block_color;
+            }
+        }
+
+        // Reset global alpha.
+        ctx.globalAlpha = 1;
+
+        return [draw_start, draw_end];
+    }
+});
+
+var ReadPainter = function(data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler, ref_seq, base_color_fn) {
+    FeaturePainter.call(this, data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler);
+    this.ref_seq = (ref_seq ? ref_seq.data : null);
+    this.base_color_fn = base_color_fn;
+};
+
+_.extend(ReadPainter.prototype, FeaturePainter.prototype, {
+    /**
+     * Returns height based on mode.
+     */
+    get_row_height: function() {
+        var height, mode = this.mode;
+        if (mode === "Dense") {
+            height = DENSE_TRACK_HEIGHT;
+        }
+        else if (mode === "Squish") {
+            height = SQUISH_TRACK_HEIGHT;
+        }
+        else { // mode === "Pack"
+            height = PACK_TRACK_HEIGHT;
+            if (this.prefs.show_insertions) {
+                height *= 2;
+            }
+        }
+        return height;
+    },
+
+    /**
+     * Parse CIGAR string to get (a) a list of contiguous drawing blocks (MD=X) and
+     * (b) an array of [ op_index, op_len ] pairs where op_index is an index into the
+     * string 'MIDNSHP=X' Return value is a dictionary with two entries, blocks and cigar
+     */
+    _parse_cigar: function(cigar_str) {
+        var cigar_ops = 'MIDNSHP=X';
+
+        // Parse cigar.
+        var blocks = [ [0, 0] ],
+            cur_block = blocks[0],
+            base_pos = 0,
+
+            // Parse cigar operations out and update/create blocks as needed.
+            parsed_cigar = _.map(cigar_str.match(/[0-9]+[MIDNSHP=X]/g), function(op) {
+                // Get operation length, character.
+                var op_len = parseInt(op.slice(0, -1), 10),
+                    op_char = op.slice(-1);
+
+                // Update drawing block.
+                if (op_char === 'N') {
+                    // At skip, so need to start new block if current block represents
+                    // drawing area.
+                    if (cur_block[1] !== 0) {
+                        cur_block = [base_pos + op_len, base_pos + op_len];
+                        blocks.push(cur_block);
+                    }
+                }
+                else if ('ISHP'.indexOf(op_char) === -1) {
+                    // Operation is M,D,=,X.
+                    cur_block[1] += op_len;
+                    base_pos += op_len;
+                }
+
+                // Return parsed cigar.
+                return [ cigar_ops.indexOf(op_char), op_len ];
+            });
+
+        return {
+            blocks: blocks,
+            cigar: parsed_cigar
+        };
+    },
+
+    /**
+     * Draw a single read from reference-based read sequence and cigar.
+     */
+    draw_read: function(ctx, mode, w_scale, y_start, tile_low, tile_high, feature_start, cigar, strand, read_seq) {
+        // Helper function to update base and sequnence offsets.
+        var update_base_offset = function(offset, cig_op, cig_len) {
+                if ('M=NXD'.indexOf(cig_op) !== -1) {
+                    offset += cig_len;
+                }
+                return offset;
+            },
+            update_seq_offset = function(offset, cig_op, cig_len) {
+                if ('IX'.indexOf(cig_op) !== -1) {
+                    offset += cig_len;
+                }
+                return offset;
+            },
+            // Gets drawing coordinate for a sequence coordinate. Assumes closure variables w_scale and tile_low.
+            get_draw_coord = function(sequence_coord) {
+                // -0.5 to offset sequence between bases.
+                return Math.floor( Math.max(0, (sequence_coord - tile_low - 0.5) * w_scale) );
+            };
+
+        ctx.textAlign = "center";
+        var tile_region = [tile_low, tile_high],
+            base_offset = 0,
+            seq_offset = 0,
+            gap = Math.round(w_scale/2),
+            char_width_px = ctx.canvas.manager.char_width_px,
+            block_color = (strand === "+" ? this.prefs.detail_block_color : this.prefs.reverse_strand_color),
+            pack_mode = (mode === 'Pack'),
+            draw_height = (pack_mode ? PACK_FEATURE_HEIGHT : SQUISH_FEATURE_HEIGHT),
+            rect_y = y_start + 1,
+            paint_utils = new ReadPainterUtils(ctx, draw_height, w_scale, mode),
+            drawing_blocks = [],
+            s_start,
+            s_end;
+
+        // Keep list of items that need to be drawn on top of initial drawing layer.
+        var draw_last = [];
+
+        // Parse cigar and get drawing blocks.
+        var t = this._parse_cigar(cigar);
+        cigar = t.cigar;
+        drawing_blocks = t.blocks;
+
+        // Draw blocks.
+        for (var i = 0; i < drawing_blocks.length; i++) {
+            var block = drawing_blocks[i];
+
+            if (is_overlap([feature_start + block[0], feature_start + block[1]], tile_region)) {
+                s_start = get_draw_coord(feature_start + block[0]);
+                s_end = get_draw_coord(feature_start + block[1]);
+
+                // Make sure that block is drawn even if it too small to be rendered officially; in this case,
+                // read is drawn at 1px.
+                // TODO: need to ensure that s_start, s_end are calculated the same for both slotting
+                // and drawing.
+                if (s_start === s_end) {
+                    s_end += 1;
+                }
+
+                // Draw read base as rectangle.
+                ctx.fillStyle = block_color;
+                ctx.fillRect(s_start, rect_y, s_end - s_start, draw_height);
+            }
+        }
+
+        // Draw read features.
+        for (var cig_id = 0, len = cigar.length; cig_id < len; cig_id++) {
+            var cig = cigar[cig_id],
+                cig_op = "MIDNSHP=X"[ cig[0] ],
+                cig_len = cig[1];
+
+            var seq_start = feature_start + base_offset;
+            s_start = get_draw_coord(seq_start);
+            s_end = get_draw_coord(seq_start + cig_len);
+
+            // Skip feature if it's not in tile.
+            if (!is_overlap([seq_start, seq_start + cig_len], tile_region)) {
+                // Update offsets.
+                base_offset = update_base_offset(base_offset, cig_op, cig_len);
+                seq_offset = update_seq_offset(seq_offset, cig_op, cig_len);
+                continue;
+            }
+
+            // Make sure that read is drawn even if it too small to be rendered officially; in this case,
+            // read is drawn at 1px.
+            // TODO: need to ensure that s_start, s_end are calculated the same for both slotting
+            // and drawing.
+            if (s_start === s_end) {
+                s_end += 1;
+            }
+
+            // Draw read feature.
+            switch (cig_op) {
+                case "H": // Hard clipping.
+                case "S": // Soft clipping.
+                case "P": // Padding.
+                    // Sequence not present and not related to alignment; do nothing.
+                    break;
+                case "M": // "Match".
+                    // Because it's not known whether there is a match, ignore.
+                    base_offset += cig_len;
+                    break;
+                case "=": // Match with reference.
+                case "X": // Mismatch with reference.
+                    //
+                    // Draw sequence and/or variants.
+                    //
+
+                    // Get sequence to draw.
+                    var cur_seq = '';
+                    if (cig_op === 'X') {
+                        // Get sequence from read_seq.
+                        cur_seq = read_seq.slice(seq_offset, seq_offset + cig_len);
+                    }
+                    else if (this.ref_seq) { // && cig_op === '='
+                        // Use reference sequence.
+                        cur_seq = this.ref_seq.slice(
+                            // If read starts after tile start, slice at read start.
+                            Math.max(0, seq_start - tile_low),
+                            // If read ends before tile end, slice at read end.
+                            Math.min(seq_start - tile_low + cig_len, tile_high - tile_low)
+                        );
+                    }
+
+                    // Draw sequence. Because cur_seq starts and read/tile start, go to there to start writing.
+                    var start_pos = Math.max(seq_start, tile_low);
+                    for (var c = 0; c < cur_seq.length; c++) {
+                        // Draw base if showing all (i.e. not showing differences) or there is a mismatch.
+                        if (cur_seq && !this.prefs.show_differences || cig_op === 'X') {
+                            // Draw base.
+                            var c_start = Math.floor( Math.max(0, (start_pos + c - tile_low) * w_scale) );
+                            ctx.fillStyle = this.base_color_fn(cur_seq[c]);
+                            if (pack_mode && w_scale > char_width_px) {
+                                ctx.fillText(cur_seq[c], c_start, y_start + 9);
+                            }
+                            // Require a minimum w_scale so that variants are only drawn when somewhat zoomed in.
+                            else if (w_scale > 0.05) {
+                                ctx.fillRect(c_start - gap, rect_y, Math.max( 1, Math.round(w_scale) ), draw_height);
+                            }
+                        }
+                    }
+
+                    // Move forward in sequence only if sequence used to get mismatches.
+                    if (cig_op === 'X') { seq_offset += cig_len; }
+                    base_offset += cig_len;
+
+                    break;
+                case "N": // Skipped bases.
+                    ctx.fillStyle = CONNECTOR_COLOR;
+                    ctx.fillRect(s_start, rect_y + (draw_height - 1)/2, s_end - s_start, 1);
+                    // No change in seq_offset because sequence not used when skipping.
+                    base_offset += cig_len;
+                    break;
+                case "D": // Deletion.
+                    paint_utils.draw_deletion(s_start, rect_y, cig_len);
+                    base_offset += cig_len;
+                    break;
+                case "I": // Insertion.
+                    // Check to see if sequence should be drawn at all by looking at the overlap between
+                    // the sequence region and the tile region.
+                    var insert_x_coord = s_start - gap;
+
+                    if (is_overlap([seq_start, seq_start + cig_len], tile_region)) {
+                        var seq = read_seq.slice(seq_offset, seq_offset + cig_len);
+                        // Insertion point is between the sequence start and the previous base: (-gap) moves
+                        // back from sequence start to insertion point.
+                        if (this.prefs.show_insertions) {
+                            //
+                            // Show inserted sequence above, centered on insertion point.
+                            //
+
+                            // Draw sequence.
+                            // X center is offset + start - <half_sequence_length>
+                            var x_center = s_start - (s_end - s_start)/2;
+                            if ( (mode === "Pack" || this.mode === "Auto") && read_seq !== undefined && w_scale > char_width_px) {
+                                // Draw sequence container.
+                                ctx.fillStyle = "yellow";
+                                ctx.fillRect(x_center - gap, y_start - 9, s_end - s_start, 9);
+                                draw_last[draw_last.length] = {type: "triangle", data: [insert_x_coord, y_start + 4, 5]};
+                                ctx.fillStyle = CONNECTOR_COLOR;
+                                // Based on overlap b/t sequence and tile, get sequence to be drawn.
+                                switch( compute_overlap( [seq_start, seq_start + cig_len], tile_region ) ) {
+                                    case(OVERLAP_START):
+                                        seq = seq.slice(tile_low-seq_start);
+                                        break;
+                                    case(OVERLAP_END):
+                                        seq = seq.slice(0, seq_start-tile_high);
+                                        break;
+                                    case(CONTAINED_BY):
+                                        // All of sequence drawn.
+                                        break;
+                                    case(CONTAINS):
+                                        seq = seq.slice(tile_low-seq_start, seq_start-tile_high);
+                                        break;
+                                }
+                                // Draw sequence.
+                                for (var c = 0, str_len = seq.length; c < str_len; c++) {
+                                    var c_start = Math.floor( Math.max(0, (seq_start + c -  tile_low) * w_scale) );
+                                    ctx.fillText(seq[c], c_start - (s_end - s_start)/2, y_start);
+                                }
+                            }
+                            else {
+                                // Draw block.
+                                ctx.fillStyle = "yellow";
+                                // TODO: This is a pretty hack-ish way to fill rectangle based on mode.
+                                ctx.fillRect(x_center, y_start + (this.mode !== "Dense" ? 2 : 5),
+                                             s_end - s_start, (mode !== "Dense" ? SQUISH_FEATURE_HEIGHT : DENSE_FEATURE_HEIGHT));
+                            }
+                        }
+                        else {
+                            if ( (mode === "Pack" || this.mode === "Auto") && read_seq !== undefined && w_scale > char_width_px) {
+                                // Show insertions with a single number at the insertion point.
+                                draw_last.push( { type: "text", data: [seq.length, insert_x_coord, y_start + 9] } );
+                            }
+                            else {
+                                // TODO: probably can merge this case with code above.
+                            }
+                        }
+                    }
+                    seq_offset += cig_len;
+                    // No change to base offset because insertions are drawn above sequence/read.
+                    break;
+            }
+        }
+
+        //
+        // Draw last items.
+        //
+        ctx.fillStyle = "yellow";
+        var item, type, data;
+        for (var i = 0; i < draw_last.length; i++) {
+            item = draw_last[i];
+            type = item.type;
+            data = item.data;
+            if (type === "text") {
+                ctx.save();
+                ctx.font = "bold " + ctx.font;
+                ctx.fillText(data[0], data[1], data[2]);
+                ctx.restore();
+            }
+            else if (type === "triangle") {
+                drawDownwardEquilateralTriangle(ctx, data[0], data[1], data[2]);
+            }
+        }
+    },
+
+    /**
+     * Draw a complete read pair
+     */
+    draw_element: function(ctx, mode, feature, slot, tile_low, tile_high, w_scale, y_scale, width ) {
+        // All features need a start, end, and vertical center.
+        var feature_uid = feature[0],
+            feature_start = feature[1],
+            feature_end = feature[2],
+            feature_name = feature[3],
+            // -0.5 to put element between bases.
+            f_start = Math.floor( Math.max(-0.5 * w_scale, (feature_start - tile_low - 0.5) * w_scale) ),
+            f_end   = Math.ceil( Math.min(width, Math.max(0, (feature_end - tile_low - 0.5) * w_scale)) ),
+            y_start = (mode === "Dense" ? 0 : (0 + slot)) * y_scale,
+            draw_height = (mode === 'Pack' ? PACK_FEATURE_HEIGHT : SQUISH_FEATURE_HEIGHT),
+            label_color = this.prefs.label_color;
+
+        // Draw read.
+        if (feature[5] instanceof Array) {
+            // Read is paired.
+            var connector = true;
+
+            // Draw left/forward read.
+            if (feature[4][1] >= tile_low && feature[4][0] <= tile_high && feature[4][2]) {
+                this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature[4][0], feature[4][2], feature[4][3], feature[4][4]);
+            }
+            else {
+                connector = false;
+            }
+
+            // Draw right/reverse read.
+            if (feature[5][1] >= tile_low && feature[5][0] <= tile_high && feature[5][2]) {
+                this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature[5][0], feature[5][2], feature[5][3], feature[5][4]);
+            }
+            else {
+                connector = false;
+            }
+
+            // Draw connector if both reads were drawn.
+            // TODO: currently, there is no way to connect reads drawn on different tiles; to connect reads on different tiles, data manager
+            // code is needed to join mate pairs from different regions. Alternatively, requesting multiple regions of data at once would
+            // make it possible to put together more easily.
+            // -0.5 to position connector correctly between reads.
+            var b1_end   = Math.ceil( Math.min(width, Math.max(-0.5 * w_scale, (feature[4][1] - tile_low - 0.5) * w_scale)) ),
+                b2_start = Math.floor( Math.max(-0.5 * w_scale, (feature[5][0] - tile_low - 0.5) * w_scale) );
+            if (connector && b2_start > b1_end) {
+                ctx.fillStyle = CONNECTOR_COLOR;
+                var line_height = y_start + 1 + (draw_height - 1)/2;
+                dashedLine(ctx, b1_end, line_height, b2_start, line_height);
+            }
+        } else {
+            // Read is single.
+            this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature_start, feature[4], feature[5], feature[6]);
+        }
+        if (mode === "Pack" && feature_start >= tile_low && feature_name !== ".") {
+            // Draw label.
+            ctx.fillStyle = this.prefs.label_color;
+            if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
+                ctx.textAlign = "left";
+                ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 9, this.max_label_length);
+            } else {
+                ctx.textAlign = "right";
+                ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 9, this.max_label_length);
+            }
+        }
+
+        // FIXME: provide actual coordinates for drawn read.
+        return [0,0];
+    }
+});
+
+var ArcLinkedFeaturePainter = function(data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler) {
+    LinkedFeaturePainter.call(this, data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler);
+    // Need to know the longest feature length for adding spacing
+    this.longest_feature_length = this.calculate_longest_feature_length();
+    this.draw_background_connector = false;
+    this.draw_individual_connectors = true;
+};
+
+_.extend(ArcLinkedFeaturePainter.prototype, FeaturePainter.prototype, LinkedFeaturePainter.prototype, {
+
+    calculate_longest_feature_length: function () {
+        var longest_feature_length = 0;
+        for (var i = 0, len = this.data.length; i < len; i++) {
+            var feature = this.data[i], feature_start = feature[1], feature_end = feature[2];
+            longest_feature_length = Math.max( longest_feature_length, feature_end - feature_start );
+        }
+        return longest_feature_length;
+    },
+
+    get_top_padding: function( width ) {
+        var view_range = this.view_end - this.view_start,
+            w_scale = width / view_range;
+        return Math.min( 128, Math.ceil( ( this.longest_feature_length / 2 ) * w_scale ) );
+    },
+
+    draw_connector: function( ctx, block1_start, block1_end, block2_start, block2_end, y_start ) {
+        // Arc drawing -- from closest endpoints
+        var x_center = ( block1_end + block2_start ) / 2,
+            radius = block2_start - x_center;
+        // For full half circles
+        var angle1 = Math.PI, angle2 = 0;
+        if ( radius > 0 ) {
+            ctx.beginPath();
+            ctx.arc( x_center, y_start, block2_start - x_center, Math.PI, 0 );
+            ctx.stroke();
+        }
+    }
+});
+
+// Color stuff from less.js
+
+var Color = function (rgb, a) {
+    /**
+     * The end goal here, is to parse the arguments
+     * into an integer triplet, such as `128, 255, 0`
+     *
+     * This facilitates operations and conversions.
+     */
+    if (Array.isArray(rgb)) {
+        this.rgb = rgb;
+    } else if (rgb.length == 6) {
+        this.rgb = rgb.match(/.{2}/g).map(function (c) {
+            return parseInt(c, 16);
+        });
+    } else if (rgb.length == 7) {
+        this.rgb = rgb.substring(1,7).match(/.{2}/g).map(function (c) {
+            return parseInt(c, 16);
+        });
+    } else {
+        this.rgb = rgb.split('').map(function (c) {
+            return parseInt(c + c, 16);
+        });
+    }
+    this.alpha = typeof(a) === 'number' ? a : 1;
+};
+Color.prototype = {
+    eval: function () { return this; },
+
+    //
+    // If we have some transparency, the only way to represent it
+    // is via `rgba`. Otherwise, we use the hex representation,
+    // which has better compatibility with older browsers.
+    // Values are capped between `0` and `255`, rounded and zero-padded.
+    //
+    toCSS: function () {
+        if (this.alpha < 1.0) {
+            return "rgba(" + this.rgb.map(function (c) {
+                return Math.round(c);
+            }).concat(this.alpha).join(', ') + ")";
+        } else {
+            return '#' + this.rgb.map(function (i) {
+                i = Math.round(i);
+                i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16);
+                return i.length === 1 ? '0' + i : i;
+            }).join('');
+        }
+    },
+
+    toHSL: function () {
+        var r = this.rgb[0] / 255,
+            g = this.rgb[1] / 255,
+            b = this.rgb[2] / 255,
+            a = this.alpha;
+
+        var max = Math.max(r, g, b), min = Math.min(r, g, b);
+        var h, s, l = (max + min) / 2, d = max - min;
+
+        if (max === min) {
+            h = s = 0;
+        } else {
+            s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
+
+            switch (max) {
+                case r: h = (g - b) / d + (g < b ? 6 : 0); break;
+                case g: h = (b - r) / d + 2;               break;
+                case b: h = (r - g) / d + 4;               break;
+            }
+            h /= 6;
+        }
+        return { h: h * 360, s: s, l: l, a: a };
+    },
+
+    toARGB: function () {
+        var argb = [Math.round(this.alpha * 255)].concat(this.rgb);
+        return '#' + argb.map(function (i) {
+            i = Math.round(i);
+            i = (i > 255 ? 255 : (i < 0 ? 0 : i)).toString(16);
+            return i.length === 1 ? '0' + i : i;
+        }).join('');
+    },
+
+    mix: function (color2, weight) {
+        color1 = this;
+
+        var p = weight; // .value / 100.0;
+        var w = p * 2 - 1;
+        var a = color1.toHSL().a - color2.toHSL().a;
+
+        var w1 = (((w * a == -1) ? w : (w + a) / (1 + w * a)) + 1) / 2.0;
+        var w2 = 1 - w1;
+
+        var rgb = [color1.rgb[0] * w1 + color2.rgb[0] * w2,
+                   color1.rgb[1] * w1 + color2.rgb[1] * w2,
+                   color1.rgb[2] * w1 + color2.rgb[2] * w2];
+
+        var alpha = color1.alpha * p + color2.alpha * (1 - p);
+
+        return new Color(rgb, alpha);
+    }
+};
+
+
+// End colors from less.js
+
+var LinearRamp = function( start_color, end_color, start_value, end_value ) {
+    /**
+     * Simple linear gradient
+     */
+    this.start_color = new Color( start_color );
+    this.end_color = new Color( end_color );
+    this.start_value = start_value;
+    this.end_value = end_value;
+    this.value_range = end_value - start_value;
+};
+
+LinearRamp.prototype.map_value = function( value ) {
+    value = Math.max( value, this.start_value );
+    value = Math.min( value, this.end_value );
+    value = ( value - this.start_value ) / this.value_range;
+    // HACK: just red for now
+    // return "hsl(0,100%," + (value * 100) + "%)"
+    return this.start_color.mix( this.end_color, 1 - value ).toCSS();
+};
+
+var SplitRamp = function( start_color, middle_color, end_color, start_value, end_value ) {
+    /**
+     * Two gradients split away from 0
+     */
+    this.positive_ramp = new LinearRamp( middle_color, end_color, 0, end_value );
+    this.negative_ramp = new LinearRamp( middle_color, start_color, 0, -start_value );
+    this.start_value = start_value;
+    this.end_value = end_value;
+};
+
+SplitRamp.prototype.map_value = function( value ) {
+    value = Math.max( value, this.start_value );
+    value = Math.min( value, this.end_value );
+    if ( value >= 0 ) {
+        return this.positive_ramp.map_value( value );
+    } else {
+        return this.negative_ramp.map_value( -value );
+    }
+};
+
+var DiagonalHeatmapPainter = function(data, view_start, view_end, prefs, mode) {
+    Painter.call( this, data, view_start, view_end, prefs, mode );
+    var i, len;
+
+    if ( this.prefs.min_value === undefined ) {
+        var min_value = Infinity;
+        for (i = 0, len = this.data.length; i < len; i++) {
+            min_value = Math.min( min_value, this.data[i][6] );
+        }
+        this.prefs.min_value = min_value;
+    }
+    if ( this.prefs.max_value === undefined ) {
+        var max_value = -Infinity;
+        for (i = 0, len = this.data.length; i < len; i++) {
+            max_value = Math.max( max_value, this.data[i][6] );
+        }
+        this.prefs.max_value = max_value;
+    }
+};
+
+DiagonalHeatmapPainter.prototype.default_prefs = {
+    min_value: undefined,
+    max_value: undefined,
+    mode: "Heatmap",
+    pos_color: "#FF8C00",
+    neg_color: "#4169E1"
+};
+
+DiagonalHeatmapPainter.prototype.draw = function(ctx, width, height, w_scale) {
+    var
+        min_value = this.prefs.min_value,
+        max_value = this.prefs.max_value,
+        value_range = max_value - min_value,
+        height_px = height,
+        view_start = this.view_start,
+        mode = this.mode,
+        data = this.data,
+        invsqrt2 = 1 / Math.sqrt(2);
+
+    var ramp = ( new SplitRamp( this.prefs.neg_color, "#FFFFFF", this.prefs.pos_color, min_value, max_value ) );
+
+    var d, s1, e1, s2, e2, value;
+
+    var scale = function( p ) { return ( p - view_start ) * w_scale; };
+
+    ctx.save();
+
+    // Draw into triangle, then rotate and scale
+    ctx.rotate(-45 * Math.PI / 180);
+    ctx.scale( invsqrt2, invsqrt2 );
+
+    // Paint track.
+    for (var i = 0, len = data.length; i < len; i++) {
+        d = data[i];
+
+        s1 = scale( d[1] );
+        e1 = scale( d[2] );
+        s2 = scale( d[4] );
+        e2 = scale( d[5] );
+        value = d[6];
+
+        ctx.fillStyle = ( ramp.map_value( value ) );
+        ctx.fillRect( s1, s2, ( e1 - s1 ), ( e2 - s2 ) );
+    }
+
+    ctx.restore();
+};
+
+/**
+ * Utilities for painting reads.
+ */
+var ReadPainterUtils = function(ctx, row_height, px_per_base, mode) {
+    this.ctx = ctx;
+    this.row_height = row_height;
+    this.px_per_base = px_per_base;
+    this.draw_details = (mode === 'Pack' || mode === 'Auto') && (px_per_base >= ctx.canvas.manager.char_width_px);
+    this.delete_details_thickness = 0.2;
+};
+
+_.extend(ReadPainterUtils.prototype, {
+    /**
+     * Draw deletion of base(s).
+     * @param draw_detail if true, drawing in detail and deletion is drawn more subtly
+     */
+    draw_deletion: function(x, y, len) {
+        this.ctx.fillStyle = "black";
+        var thickness = (this.draw_details ? this.delete_details_thickness : 1) * this.row_height;
+        y += 0.5 * ( this.row_height - thickness );
+        this.ctx.fillRect(x, y, len * this.px_per_base, thickness);
+    }
+});
+
+/**
+ * Paints variant data onto canvas.
+ */
+var VariantPainter = function(data, view_start, view_end, prefs, mode, base_color_fn) {
+    Painter.call(this, data, view_start, view_end, prefs, mode);
+    this.base_color_fn = base_color_fn;
+    this.divider_height = 1;
+};
+
+_.extend(VariantPainter.prototype, Painter.prototype, {
+    /**
+     * Height of a single row, depends on mode
+     */
+    get_row_height: function() {
+        var mode = this.mode, height;
+        if (mode === "Dense") {
+            height = DENSE_TRACK_HEIGHT;
+        }
+        else if (mode === "Squish") {
+            height = SQUISH_TRACK_HEIGHT;
+        }
+        else { // mode === "Pack"
+            height = PACK_TRACK_HEIGHT;
+        }
+        return height;
+    },
+
+    /**
+     * Returns required height to draw a particular number of samples in a given mode.
+     */
+    get_required_height: function(num_samples) {
+        // FIXME: for single-sample data, height should be summary_height when zoomed out and
+        // row_height when zoomed in.
+        var height = this.prefs.summary_height;
+
+        // If showing sample data, height is summary + divider + samples.
+        if (num_samples > 1 && this.prefs.show_sample_data) {
+            height += this.divider_height + num_samples * this.get_row_height();
+        }
+        return height;
+    },
+
+    /**
+     * Draw on the context using a rectangle of width x height with scale w_scale.
+     */
+    draw: function(ctx, width, height, w_scale) {
+        ctx.save();
+
+        var
+        /**
+         * Returns dictionary of information about an indel; returns empty if there no indel. Assumes indel is left-aligned.
+         * Dict attributes:
+         *    -type: 'insertion' or 'deletion'
+         *    -start: where the deletion starts relative to reference start
+         *    -len: how long the deletion is
+         */
+        get_indel_info = function(ref, alt) {
+            var ref_len = ref.length,
+                alt_len = alt.length,
+                start = 0,
+                len = 1,
+                type = null;
+            if (alt === '-') {
+                type = 'deletion';
+                len = ref.length;
+            }
+            else if (ref.indexOf(alt) === 0 && ref_len > alt_len) {
+                type = 'deletion';
+                len = ref_len - alt_len;
+                start = alt_len;
+            }
+            else if (alt.indexOf(ref) === 0 && ref_len < alt_len) {
+                // Insertion.
+                type = 'insertion';
+                len = alt_len - ref_len;
+                start = alt_len;
+            }
+
+            return ( type !== null ? { type: type, start: start, len: len } : {} );
+        };
+
+        // Draw.
+        var locus_data,
+            pos,
+            id,
+            ref,
+            alt,
+            qual,
+            filter,
+            sample_gts,
+            allele_counts,
+            variant,
+            draw_x_start,
+            draw_y_start,
+            genotype,
+            // Always draw variants at least 1 pixel wide.
+            base_px = Math.max(1, Math.floor(w_scale)),
+            // Determine number of samples.
+            num_samples = (this.data.length ? this.data[0][7].split(',').length : 0),
+            row_height = (this.mode === 'Squish' ? SQUISH_TRACK_HEIGHT : PACK_TRACK_HEIGHT),
+            // If zoomed out, fill the whole row with feature to make it easier to read;
+            // when zoomed in, use feature height so that there are gaps in sample rows.
+            feature_height = (w_scale < 0.1 ?
+                              row_height :
+                              (this.mode === 'Squish' ? SQUISH_FEATURE_HEIGHT : PACK_FEATURE_HEIGHT)
+                             ),
+            draw_summary = true,
+            paint_utils = new ReadPainterUtils(ctx, row_height, w_scale, this.mode),
+            j;
+
+        // If there's a single sample, update drawing variables.
+        if (num_samples === 1) {
+            row_height = feature_height =
+                (w_scale < ctx.canvas.manager.char_width_px ? this.prefs.summary_height : row_height);
+            paint_utils.row_height = row_height;
+            // No summary when there's a single sample.
+            draw_summary = false;
+        }
+
+        // Draw divider between summary and samples.
+        if (this.prefs.show_sample_data && draw_summary) {
+            ctx.fillStyle = '#F3F3F3';
+            ctx.globalAlpha = 1;
+            ctx.fillRect(0, this.prefs.summary_height - this.divider_height, width, this.divider_height);
+        }
+
+        // Draw variants.
+        ctx.textAlign = "center";
+        for (var i = 0; i < this.data.length; i++) {
+            // Get locus data.
+            locus_data = this.data[i];
+            pos = locus_data[1];
+            ref = locus_data[3];
+            alt = [ locus_data[4].split(',') ];
+            sample_gts = locus_data[7].split(',');
+            allele_counts = locus_data.slice(8);
+
+            // Process alterate values to derive information about each alt.
+            alt = _.map(_.flatten(alt), function(a) {
+                var alt_info = {
+                        type: 'snp',
+                        value: a,
+                        start: 0
+                    },
+                    indel_info = get_indel_info(ref, a);
+
+                return _.extend(alt_info, indel_info);
+            });
+
+            // Only draw locus data if it's in viewing region.
+            if (pos < this.view_start || pos > this.view_end) {
+                continue;
+            }
+
+            // Draw summary for alleles.
+            if (draw_summary) {
+                ctx.fillStyle = '#999999';
+                ctx.globalAlpha = 1;
+                for (j = 0; j < alt.length; j++) {
+                    // Draw background for summary.
+                    draw_x_start = this.get_start_draw_pos(pos + alt[j].start, w_scale);
+                    ctx.fillRect(draw_x_start, 0, base_px, this.prefs.summary_height);
+                    draw_y_start = this.prefs.summary_height;
+                    // Draw allele fractions onto summary.
+                    for (j = 0; j < alt.length; j++) {
+                        ctx.fillStyle = ( alt[j].type === 'deletion' ? 'black' : this.base_color_fn(alt[j].value) );
+                        allele_frac = allele_counts / sample_gts.length;
+                        draw_height = Math.ceil(this.prefs.summary_height * allele_frac);
+                        ctx.fillRect(draw_x_start, draw_y_start - draw_height, base_px, draw_height);
+                        draw_y_start -= draw_height;
+                    }
+                }
+            }
+
+            // Done drawing if not showing samples data.
+            if (!this.prefs.show_sample_data) { continue; }
+
+            // Draw sample genotype(s).
+            draw_y_start = (draw_summary ? this.prefs.summary_height + this.divider_height : 0);
+            for (j = 0; j < sample_gts.length; j++, draw_y_start += row_height) {
+                genotype = (sample_gts[j] ? sample_gts[j].split(/\/|\|/) : ['0', '0']);
+
+                // Get variant to draw and set drawing properties.
+                variant = null;
+                if (genotype[0] === genotype[1]) {
+                    if (genotype[0] === '.') {
+                        // TODO: draw uncalled variant.
+                    }
+                    else if (genotype[0] !== '0') {
+                        // Homozygous for variant.
+                        variant = alt[ parseInt(genotype[0], 10) - 1 ];
+                        ctx.globalAlpha = 1;
+                    }
+                    // else reference
+                }
+                else { // Heterozygous for variant.
+                    variant = (genotype[0] !== '0' ? genotype[0] : genotype[1]);
+                    variant = alt[ parseInt(variant, 10) - 1 ];
+                    ctx.globalAlpha = 0.5;
+                }
+
+                // If there's a variant, draw it.
+                if (variant) {
+                    draw_x_start = this.get_start_draw_pos(pos + variant.start, w_scale);
+                    if (variant.type === 'snp') {
+                        var snp = variant.value;
+                        ctx.fillStyle = this.base_color_fn(snp);
+                        if (paint_utils.draw_details) {
+                            ctx.fillText(snp, this.get_draw_pos(pos, w_scale), draw_y_start + row_height);
+                        }
+                        else {
+                            ctx.fillRect(draw_x_start, draw_y_start + 1, base_px, feature_height);
+                        }
+                    }
+                    else if (variant.type === 'deletion') {
+                        paint_utils.draw_deletion(draw_x_start, draw_y_start + 1, variant.len);
+                    }
+                    else {
+                        // TODO: handle insertions.
+                    }
+                }
+            }
+        }
+
+        ctx.restore();
+    }
+});
+
+return {
+    Scaler: Scaler,
+    LinePainter: LinePainter,
+    LinkedFeaturePainter: LinkedFeaturePainter,
+    ReadPainter: ReadPainter,
+    ArcLinkedFeaturePainter: ArcLinkedFeaturePainter,
+    DiagonalHeatmapPainter: DiagonalHeatmapPainter,
+    VariantPainter: VariantPainter
+};
+
+
+});
diff --git a/client/galaxy/scripts/viz/trackster/slotting.js b/client/galaxy/scripts/viz/trackster/slotting.js
new file mode 100644
index 0000000..4d3e1a7
--- /dev/null
+++ b/client/galaxy/scripts/viz/trackster/slotting.js
@@ -0,0 +1,204 @@
+define( ["libs/underscore"], function( _ ) {
+
+var extend = _.extend;
+
+// HACK: LABEL_SPACING is currently duplicated between here and painters
+var LABEL_SPACING = 2,
+    PACK_SPACING = 5;
+
+/**
+ * Hold slotting information for a feature.
+ */
+var SlottedInfo = function(slot, feature) {
+    this.slot = slot;
+    this.feature = feature;
+};
+
+/**
+ * FeatureSlotter determines slots in which to draw features for vertical
+ * packing.
+ *
+ * This implementation is incremental, any feature assigned a slot will be
+ * retained for slotting future features.
+ */
+var FeatureSlotter = function (w_scale, mode, max_rows, measureText) {
+    this.slots = {};
+    this.start_end_dct = {};
+    this.w_scale = w_scale;
+    this.mode = mode;
+    this.include_label = (mode === "Pack");
+    this.max_rows = max_rows;
+    this.measureText = measureText;
+};
+
+/**
+ * Slot a set of features, `this.slots` will be updated with slots by id, and
+ * the largest slot required for the passed set of features is returned
+ */
+extend( FeatureSlotter.prototype, {
+    /**
+     * Get drawing coordinate for a feature.
+     */
+    _get_draw_coords: function(feature) {
+        // Get initial draw coordinates using w_scale.
+        var draw_start = Math.floor(feature[1] * this.w_scale),
+            draw_end = Math.ceil(feature[2] * this.w_scale),
+            f_name = feature[3],
+            text_align;
+
+        // Update start, end drawing locations to include feature name.
+        // Try to put the name on the left, if not, put on right.
+        if (f_name !== undefined && this.include_label ) {
+            // Add gap for label spacing and extra pack space padding
+            // TODO: Fix constants
+            var text_len = this.measureText(f_name).width + (LABEL_SPACING + PACK_SPACING);
+            if (draw_start - text_len >= 0) {
+                draw_start -= text_len;
+                text_align = "left";
+            } else {
+                draw_end += text_len;
+                text_align = "right";
+            }
+        }
+
+        /*
+        if (slot_num < 0) {
+            
+            TODO: this is not yet working --
+            console.log(feature_uid, "looking for slot with text on the right");
+            // Slot not found. If text was on left, try on right and see
+            // if slot can be found.
+            // TODO: are there any checks we need to do to ensure that text
+            // will fit on tile?
+            if (text_align === "left") {
+                draw_start -= text_len;
+                draw_end -= text_len;
+                text_align = "right";
+                slot_num = find_slot(draw_start, draw_end);
+            }
+            if (slot_num >= 0) {
+                console.log(feature_uid, "found slot with text on the right");
+            }
+
+        }
+        */
+
+        return [draw_start, draw_end];
+    },
+
+    /**
+     * Find the first slot such that current feature doesn't overlap any other features in that slot.
+     * Returns -1 if no slot was found.
+     */
+    _find_slot: function(draw_coords) {
+        // TODO: Use a data structure for faster searching of available slots.
+        var draw_start = draw_coords[0],
+            draw_end = draw_coords[1];
+        for (var slot_num = 0; slot_num <= this.max_rows; slot_num++) {
+            var has_overlap = false,
+                slot = this.start_end_dct[slot_num];
+            if (slot !== undefined) {
+                // Iterate through features already in slot to see if current feature will fit.
+                for (var k = 0, k_len = slot.length; k < k_len; k++) {
+                    var s_e = slot[k];
+                    if (draw_end > s_e[0] && draw_start < s_e[1]) {
+                        // There is overlap
+                        has_overlap = true;
+                        break;
+                    }
+                }
+            }
+            if (!has_overlap) {
+                return slot_num;
+            }
+        }
+        return -1;
+    },
+
+    /**
+     * Slot features.
+     */
+    slot_features: function( features ) {
+        var start_end_dct = this.start_end_dct,
+            undone = [], 
+            highest_slot = 0,
+            feature,
+            feature_uid;
+
+        // Loop through features to (a) find those that are not yet slotted and (b) update
+        // those that are slotted if new information is availabe. For (a), features already
+        // slotted (based on slotting from other tiles) will retain their current slot.
+        for (var i = 0, len = features.length; i < len; i++) {
+            feature = features[i];
+            feature_uid = feature[0];
+            var slotted_info = this.slots[feature_uid];
+
+            // Separate and handle slotted vs. unslotted features.
+            if (slotted_info) {
+                // Feature is slotted; if feature now has larger start/end coordinates,
+                // update drawing coordinates.
+                if (feature[1] < slotted_info.feature[1]  || slotted_info.feature[2] < feature[2]) {
+                    // Feature has changed (e.g. a single read now has its pair), so recalculate its 
+                    // drawing coordinates.
+                    var old_draw_coords = this._get_draw_coords(slotted_info.feature),
+                        new_draw_coords = this._get_draw_coords(feature),
+                        slotted_coords = this.start_end_dct[slotted_info.slot];
+                    for (var k = 0; k < slotted_coords.length; k++) {
+                        var dc = slotted_coords[k];
+                        if (dc[0] === old_draw_coords[0] && dc[1] === old_draw_coords[1]) {
+                            // Replace old drawing coordinates with new ones.
+                            slotted_coords[k] = new_draw_coords;
+                        }
+                    }
+                }
+                highest_slot = Math.max(highest_slot, this.slots[feature_uid].slot);
+            } 
+            else {
+                undone.push(i);
+            }
+        }
+        
+        // Slot unslotted features.
+        
+        // Do slotting.
+        for (var i = 0, len = undone.length; i < len; i++) {
+            feature = features[undone[i]];
+            feature_uid = feature[0];
+            var draw_coords = this._get_draw_coords(feature);
+                        
+            // Find slot.
+            var slot_num = this._find_slot(draw_coords);
+
+            // Do slotting.
+            if (slot_num >= 0) {
+                // Add current feature to slot.
+                if (start_end_dct[slot_num] === undefined) {
+                    start_end_dct[slot_num] = [];
+                }
+                start_end_dct[slot_num].push(draw_coords);
+                this.slots[feature_uid] = new SlottedInfo(slot_num, feature);
+                highest_slot = Math.max(highest_slot, slot_num);
+            }
+        }
+        
+        // Debugging: view slots data.
+        /*
+        for (var i = 0; i < MAX_FEATURE_DEPTH; i++) {
+            var slot = start_end_dct[i];
+            if (slot !== undefined) {
+                console.log(i, "*************");
+                for (var k = 0, k_len = slot.length; k < k_len; k++) {
+                    console.log("\t", slot[k][0], slot[k][1]);
+                }
+            }
+        }
+        */
+        return highest_slot + 1;
+    }
+});
+
+return {
+    FeatureSlotter: FeatureSlotter
+};
+
+});
diff --git a/client/galaxy/scripts/viz/trackster/tracks.js b/client/galaxy/scripts/viz/trackster/tracks.js
new file mode 100644
index 0000000..7807c2c
--- /dev/null
+++ b/client/galaxy/scripts/viz/trackster/tracks.js
@@ -0,0 +1,4313 @@
+define([
+    "libs/underscore",
+    "viz/visualization",
+    "viz/viz_views",
+    "viz/trackster/util",
+    "viz/trackster/slotting",
+    "viz/trackster/painters",
+    "viz/trackster/filters",
+    "mvc/dataset/data",
+    "mvc/tool/tools",
+    "utils/config",
+    "viz/bbi-data-manager",
+    "ui/editable-text",
+], function(_, visualization, viz_views, util, slotting, painters, filters_mod, data, tools_mod, config_mod, bbi) {
+
+
+var extend = _.extend;
+
+// ---- Web UI specific utilities ----
+
+/**
+ * Dictionary of HTML element-JavaScript object relationships.
+ */
+// TODO: probably should separate moveable objects from containers.
+var html_elt_js_obj_dict = {};
+
+/**
+ * Designates an HTML as a container.
+ */
+var is_container = function(element, obj) {
+    html_elt_js_obj_dict[element.attr("id")] = obj;
+};
+
+/**
+ * Make `element` moveable within parent and sibling elements by dragging `handle` (a selector).
+ * Function manages JS objects, containers as well.
+ *
+ * @param element HTML element to make moveable
+ * @param handle_class classname that denotes HTML element to be used as handle
+ * @param container_selector selector used to identify possible containers for this element
+ * @param element_js_obj JavaScript object associated with element; used
+ */
+var moveable = function(element, handle_class, container_selector, element_js_obj) {
+    // HACK: set default value for container selector.
+    container_selector = ".group";
+
+    // Register element with its object.
+    html_elt_js_obj_dict[element.attr("id")] = element_js_obj;
+
+    // Need to provide selector for handle, not class.
+    element.bind( "drag", { handle: "." + handle_class, relative: true }, function ( e, d ) {
+        var element = $(this),
+            parent = $(this).parent(),
+            // Only sorting amongst tracks and groups.
+            children = parent.children('.track,.group'),
+            this_obj = html_elt_js_obj_dict[$(this).attr("id")],
+            child,
+            container,
+            top,
+            bottom,
+            i;
+
+        //
+        // Enable three types of dragging: (a) out of container; (b) into container;
+        // (c) sibling movement, aka sorting. Handle in this order for simplicity.
+        //
+
+        // Handle dragging out of container.
+        container = $(this).parents(container_selector);
+        if (container.length !== 0) {
+            top = container.position().top;
+            bottom = top + container.outerHeight();
+            var cur_container = html_elt_js_obj_dict[container.attr("id")];
+            if (d.offsetY < top) {
+                // Moving above container.
+                $(this).insertBefore(container);
+                cur_container.remove_drawable(this_obj);
+                cur_container.container.add_drawable_before(this_obj, cur_container);
+                return;
+            }
+            else if (d.offsetY > bottom) {
+                // Moving below container.
+                $(this).insertAfter(container);
+                cur_container.remove_drawable(this_obj);
+                cur_container.container.add_drawable(this_obj);
+                return;
+            }
+        }
+
+        // Handle dragging into container. Child is appended to container's content_div.
+        container = null;
+        for ( i = 0; i < children.length; i++ ) {
+            child = $(children.get(i));
+            top = child.position().top;
+            bottom = top + child.outerHeight();
+            // Dragging into container if child is a container and offset is inside container.
+            if ( child.is(container_selector) && this !== child.get(0) &&
+                 d.offsetY >= top && d.offsetY <= bottom ) {
+                // Append/prepend based on where offsetY is closest to and return.
+                if (d.offsetY - top < bottom - d.offsetY) {
+                    child.find(".content-div").prepend(this);
+                }
+                else {
+                    child.find(".content-div").append(this);
+                }
+                // Update containers. Object may not have container if it is being moved quickly.
+                if (this_obj.container) {
+                    this_obj.container.remove_drawable(this_obj);
+                }
+                html_elt_js_obj_dict[child.attr("id")].add_drawable(this_obj);
+                return;
+            }
+        }
+
+        // Handle sibling movement, aka sorting.
+
+        // Determine new position
+        for ( i = 0; i < children.length; i++ ) {
+            child = $(children.get(i));
+            if ( d.offsetY < child.position().top &&
+                 // Cannot move tracks above reference track or intro div.
+                 !(child.hasClass("reference-track") || child.hasClass("intro")) ) {
+                break;
+            }
+        }
+
+        // If not already in the right place, move. Need
+        // to handle the end specially since we don't have
+        // insert at index
+        if ( i === children.length ) {
+            if ( this !== children.get(i - 1) ) {
+                parent.append(this);
+                html_elt_js_obj_dict[parent.attr("id")].move_drawable(this_obj, i);
+            }
+        }
+        else if ( this !== children.get(i) ) {
+            $(this).insertBefore( children.get(i) );
+            // Need to adjust insert position if moving down because move is changing
+            // indices of all list items.
+            html_elt_js_obj_dict[parent.attr("id")].move_drawable(this_obj, (d.deltaY > 0 ? i-1 : i) );
+        }
+    }).bind("dragstart", function() {
+        $(this).addClass('dragging');
+    }).bind("dragend", function() {
+        $(this).removeClass('dragging');
+    });
+};
+
+/**
+ * Init constants & functions used throughout trackster.
+ */
+var
+    // Padding at the top of tracks for error messages
+    ERROR_PADDING = 20,
+    // Maximum number of rows un a slotted track
+    MAX_FEATURE_DEPTH = 100,
+    // Minimum width for window for squish to be used.
+    MIN_SQUISH_VIEW_WIDTH = 12000,
+    // Number of pixels per tile, not including left offset.
+    TILE_SIZE = 400,
+    DEFAULT_DATA_QUERY_WAIT = 5000,
+    // Maximum number of chromosomes that are selectable at any one time.
+    MAX_CHROMS_SELECTABLE = 100,
+    DATA_ERROR = "Cannot display dataset due to an error. ",
+    DATA_NOCONVERTER = "A converter for this dataset is not installed. Please check your datatypes_conf.xml file.",
+    DATA_NONE = "No data for this chrom/contig.",
+    DATA_PENDING = "Preparing data. This can take a while for a large dataset. " +
+                   "If the visualization is saved and closed, preparation will continue in the background.",
+    DATA_CANNOT_RUN_TOOL = "Tool cannot be rerun: ",
+    DATA_LOADING = "Loading data...",
+    DATA_OK = "Ready for display",
+    TILE_CACHE_SIZE = 10,
+    DATA_CACHE_SIZE = 20,
+
+    // Numerical/continuous data display modes.
+    CONTINUOUS_DATA_MODES = ["Histogram", "Line", "Filled", "Intensity"];
+
+/**
+ * Round a number to a given number of decimal places.
+ */
+function round(num, places) {
+    // Default rounding is to integer.
+    if (!places) {
+        places = 0;
+    }
+
+    var val = Math.pow(10, places);
+    return Math.round(num * val) / val;
+}
+
+/**
+ * Check if a server can do byte range requests.
+ */
+function supportsByteRanges(url) {
+    var promise = $.Deferred();
+    $.ajax({
+        type: 'HEAD',
+        url: url,
+        beforeSend: function(xhr) {
+            xhr.setRequestHeader("Range", "bytes=0-10");
+        },
+        success: function(result, status, xhr) {
+            promise.resolve(xhr.status === 206);
+        }
+    });
+
+    return promise;
+}
+
+/**
+ * Drawables hierarchy:
+ *
+ * Drawable
+ *    --> DrawableCollection
+ *        --> DrawableGroup
+ *        --> View
+ *    --> Track
+ */
+
+/**
+ * Base class for all drawable objects. Drawable objects are associated with a view and live in a
+ * container. They have the following HTML elements and structure:
+ *  <container_div>
+ *      <header_div>
+ *      <content_div>
+ *
+ * They optionally have a drag handle class.
+ */
+var Drawable = function(view, container, obj_dict) {
+    if (!Drawable.id_counter) { Drawable.id_counter = 0; }
+    this.id = Drawable.id_counter++;
+    this.view = view;
+    this.container = container;
+    this.drag_handle_class = obj_dict.drag_handle_class;
+    this.is_overview = false;
+    this.action_icons = {};
+
+    // -- Set up drawable configuration. --
+    this.config = config_mod.ConfigSettingCollection.from_models_and_saved_values(this.config_params, obj_dict.prefs);
+
+    // If there's no saved name, use object name.
+    if (!this.config.get_value('name')) {
+        this.config.set_value('name', obj_dict.name);
+    }
+    if (this.config_onchange) {
+        this.config.on('change', this.config_onchange, this);
+    }
+
+    // Build Drawable HTML and behaviors.
+    this.container_div = this.build_container_div();
+    this.header_div = null;
+
+    // Use opt-out policy on header creation because this is the more frequent approach:
+    // unless flag set, create header.
+    if (obj_dict.header !== false) {
+        var header_view = new viz_views.TrackHeaderView({
+            model: this,
+            id: this.id
+        });
+
+        this.header_div = header_view.$el;
+        this.container_div.append(this.header_div);
+
+        // Show icons when users is hovering over track.
+        var icons_div = header_view.icons_div;
+        this.action_icons = header_view.action_icons;
+        this.container_div.hover(
+            function() { icons_div.show(); }, function() { icons_div.hide(); }
+        );
+    }
+};
+
+Drawable.prototype.action_icons_def = [
+    // Hide/show drawable content.
+    // FIXME: make this an odict for easier lookup.
+    {
+        name: "toggle_icon",
+        title: "Hide/show content",
+        css_class: "toggle",
+        on_click_fn: function(drawable) {
+            if ( drawable.config.get_value('content_visible') ) {
+                drawable.action_icons.toggle_icon.addClass("toggle-expand").removeClass("toggle");
+                drawable.hide_contents();
+                drawable.config.set_value('content_visible', false);
+            }
+            else {
+                drawable.action_icons.toggle_icon.addClass("toggle").removeClass("toggle-expand");
+                drawable.config.set_value('content_visible', true);
+                drawable.show_contents();
+            }
+        }
+    },
+    // Edit settings.
+    {
+        name: "settings_icon",
+        title: "Edit settings",
+        css_class: "gear",
+        on_click_fn: function(drawable) {
+            var view = new config_mod.ConfigSettingCollectionView({
+                collection: drawable.config
+            });
+            view.render_in_modal('Configure Track');
+        }
+    },
+    // Remove.
+    {
+        name: "remove_icon",
+        title: "Remove",
+        css_class: "remove-icon",
+        on_click_fn: function(drawable) {
+            // Tooltip for remove icon must be deleted when drawable is deleted.
+            $(".tooltip").remove();
+            drawable.remove();
+        }
+    }
+];
+
+extend(Drawable.prototype, {
+    config_params: [
+        { key: 'name', label: 'Name', type: 'text', default_value: '' },
+        { key: 'content_visible', type: 'bool', default_value: true, hidden: true }
+    ],
+
+    config_onchange: function() {},
+
+    init: function() {},
+
+    changed: function() {
+        this.view.changed();
+    },
+
+    can_draw: function() {
+        if (this.enabled && this.config.get_value('content_visible')) {
+            return true;
+        }
+
+        return false;
+    },
+
+    request_draw: function() {},
+
+    _draw: function(options) {},
+
+    /**
+     * Returns representation of object in a dictionary for easy saving.
+     * Use from_dict to recreate object.
+     */
+    to_dict: function() {},
+
+    /**
+     * Set drawable name.
+     */
+    set_name: function(new_name) {
+        this.old_name = this.config.get_value('name');
+        this.config.set_value('name', new_name);
+    },
+
+    /**
+     * Revert track name; currently name can be reverted only once.
+     */
+    revert_name: function() {
+        if (this.old_name) {
+            this.config.set_value('name', this.old_name);
+        }
+    },
+
+    /**
+     * Remove drawable (a) from its container and (b) from the HTML.
+     */
+    remove: function() {
+        this.changed();
+
+        this.container.remove_drawable(this);
+        var view = this.view;
+        this.container_div.hide(0, function() {
+            $(this).remove();
+            // HACK: is there a better way to update the view?
+            view.update_intro_div();
+        });
+    },
+
+    /**
+     * Build drawable's container div; this is the parent div for all drawable's elements.
+     */
+    build_container_div: function() {},
+
+    /**
+     * Update icons.
+     */
+    update_icons: function() {},
+
+    /**
+     * Hide drawable's contents.
+     */
+    hide_contents: function () {},
+
+    /**
+     * Show drawable's contents.
+     */
+    show_contents: function() {},
+
+    /**
+     * Returns a shallow copy of all drawables in this drawable.
+     */
+    get_drawables: function() {}
+});
+
+/**
+ * A collection of drawable objects.
+ */
+var DrawableCollection = function(view, container, obj_dict) {
+    Drawable.call(this, view, container, obj_dict);
+
+    // Attribute init.
+    this.obj_type = obj_dict.obj_type;
+    this.drawables = [];
+};
+
+extend(DrawableCollection.prototype, Drawable.prototype, {
+    /**
+     * Unpack and add drawables to the collection.
+     */
+    unpack_drawables: function(drawables_array) {
+        // Add drawables to collection.
+        this.drawables = [];
+        var drawable;
+        for (var i = 0; i < drawables_array.length; i++) {
+            drawable = object_from_template(drawables_array[i], this.view, this);
+            this.add_drawable(drawable);
+        }
+    },
+
+    /**
+     * Init each drawable in the collection.
+     */
+    init: function() {
+        for (var i = 0; i < this.drawables.length; i++) {
+            this.drawables[i].init();
+        }
+    },
+
+    /**
+     * Draw each drawable in the collection.
+     */
+    _draw: function(options) {
+        for (var i = 0; i < this.drawables.length; i++) {
+            this.drawables[i]._draw(options);
+        }
+    },
+
+    /**
+     * Returns representation of object in a dictionary for easy saving.
+     * Use from_dict to recreate object.
+     */
+    to_dict: function() {
+        var dictified_drawables = [];
+        for (var i = 0; i < this.drawables.length; i++) {
+            dictified_drawables.push(this.drawables[i].to_dict());
+        }
+        return {
+            prefs: this.config.to_key_value_dict(),
+            obj_type: this.obj_type,
+            drawables: dictified_drawables
+        };
+    },
+
+    /**
+     * Add a drawable to the end of the collection.
+     */
+    add_drawable: function(drawable) {
+        this.drawables.push(drawable);
+        drawable.container = this;
+        this.changed();
+    },
+
+    /**
+     * Add a drawable before another drawable.
+     */
+    add_drawable_before: function(drawable, other) {
+        this.changed();
+        var index = this.drawables.indexOf(other);
+        if (index !== -1) {
+            this.drawables.splice(index, 0, drawable);
+            return true;
+        }
+        return false;
+    },
+
+    /**
+     * Replace one drawable with another.
+     */
+    replace_drawable: function(old_drawable, new_drawable, update_html) {
+        var index = this.drawables.indexOf(old_drawable);
+        if (index !== -1) {
+            this.drawables[index] = new_drawable;
+            if (update_html) {
+                old_drawable.container_div.replaceWith(new_drawable.container_div);
+            }
+            this.changed();
+        }
+        return index;
+    },
+
+    /**
+     * Remove drawable from this collection.
+     */
+    remove_drawable: function(drawable) {
+        var index = this.drawables.indexOf(drawable);
+        if (index !== -1) {
+            // Found drawable to remove.
+            this.drawables.splice(index, 1);
+            drawable.container = null;
+            this.changed();
+            return true;
+        }
+        return false;
+    },
+
+    /**
+     * Move drawable to another location in collection.
+     */
+    move_drawable: function(drawable, new_position) {
+        var index = this.drawables.indexOf(drawable);
+        if (index !== -1) {
+            // Remove from current position:
+            this.drawables.splice(index, 1);
+            // insert into new position:
+            this.drawables.splice(new_position, 0, drawable);
+            this.changed();
+            return true;
+        }
+        return false;
+    },
+
+    /**
+     * Returns all drawables in this drawable.
+     */
+    get_drawables: function() {
+        return this.drawables;
+    },
+
+    /**
+     * Returns all <track_type> tracks in collection.
+     */
+    get_tracks: function(track_type) {
+        // Initialize queue with copy of drawables array.
+        var queue = this.drawables.slice(0),
+            tracks = [],
+            drawable;
+        while (queue.length !== 0) {
+            drawable = queue.shift();
+            if (drawable instanceof track_type) {
+                tracks.push(drawable);
+            }
+            else if (drawable.drawables) {
+                queue = queue.concat(drawable.drawables);
+            }
+        }
+        return tracks;
+    }
+});
+
+/**
+ * A group of drawables that are moveable, visible.
+ */
+var DrawableGroup = function(view, container, obj_dict) {
+    extend(obj_dict, {
+        obj_type: "DrawableGroup",
+        drag_handle_class: "group-handle"
+    });
+    DrawableCollection.call(this, view, container, obj_dict);
+
+    // Set up containers/moving for group: register both container_div and content div as container
+    // because both are used as containers (container div to recognize container, content_div to
+    // store elements). Group can be moved.
+    this.content_div = $("<div/>").addClass("content-div").attr("id", "group_" + this.id + "_content_div").appendTo(this.container_div);
+    is_container(this.container_div, this);
+    is_container(this.content_div, this);
+    moveable(this.container_div, this.drag_handle_class, ".group", this);
+
+    // Set up filters.
+    this.filters_manager = new filters_mod.FiltersManager(this);
+    this.header_div.after(this.filters_manager.parent_div);
+
+    // HACK: add div to clear floating elements.
+    this.filters_manager.parent_div.after( $("<div style='clear: both'/>") );
+
+    // For saving drawables' filter managers when group-level filtering is done:
+    this.saved_filters_managers = [];
+
+    // Add drawables.
+    if ('drawables' in obj_dict) {
+        this.unpack_drawables(obj_dict.drawables);
+    }
+
+    // Restore filters.
+    if ('filters' in obj_dict) {
+        // FIXME: Pass collection_dict to DrawableCollection/Drawable will make this easier.
+        var old_manager = this.filters_manager;
+        this.filters_manager = new filters_mod.FiltersManager(this, obj_dict.filters);
+        old_manager.parent_div.replaceWith(this.filters_manager.parent_div);
+
+        if (obj_dict.filters.visible) {
+            this.setup_multitrack_filtering();
+        }
+    }
+};
+
+extend(DrawableGroup.prototype, Drawable.prototype, DrawableCollection.prototype, {
+    action_icons_def: [
+        Drawable.prototype.action_icons_def[0],
+        Drawable.prototype.action_icons_def[1],
+        // Replace group with composite track.
+        {
+            name: "composite_icon",
+            title: "Show composite track",
+            css_class: "layers-stack",
+            on_click_fn: function(group) {
+                $(".tooltip").remove();
+                group.show_composite_track();
+            }
+        },
+        // Toggle track filters.
+        {
+            name: "filters_icon",
+            title: "Filters",
+            css_class: "ui-slider-050",
+            on_click_fn: function(group) {
+                // TODO: update Tooltip text.
+                if (group.filters_manager.visible()) {
+                    // Hiding filters.
+                    group.filters_manager.clear_filters();
+                    group._restore_filter_managers();
+                    // TODO: maintain current filter by restoring and setting saved manager's
+                    // settings to current/shared manager's settings.
+                    // TODO: need to restore filter managers when moving drawable outside group.
+                }
+                else {
+                    // Showing filters.
+                    group.setup_multitrack_filtering();
+                    group.request_draw({ clear_tile_cache: true });
+                }
+                group.filters_manager.toggle();
+            }
+        },
+        Drawable.prototype.action_icons_def[2]
+    ],
+
+    build_container_div: function() {
+        var container_div = $("<div/>").addClass("group").attr("id", "group_" + this.id);
+        if (this.container) {
+            this.container.content_div.append(container_div);
+        }
+        return container_div;
+    },
+
+    hide_contents: function () {
+        this.tiles_div.hide();
+    },
+
+    show_contents: function() {
+        // Show the contents div and labels (if present)
+        this.tiles_div.show();
+        // Request a redraw of the content
+        this.request_draw();
+    },
+
+    update_icons: function() {
+        //
+        // Handle update when there are no tracks.
+        //
+        var num_drawables = this.drawables.length;
+        if (num_drawables === 0) {
+            this.action_icons.composite_icon.hide();
+            this.action_icons.filters_icon.hide();
+        }
+        else if (num_drawables === 1) {
+            this.action_icons.composite_icon.toggle(this.drawables[0] instanceof CompositeTrack);
+            this.action_icons.filters_icon.hide();
+        }
+        else { // There are 2 or more tracks.
+
+            //
+            // Determine if a composite track can be created. Current criteria:
+            // (a) all tracks are line tracks;
+            //      OR
+            // FIXME: this is not enabled right now because it has not been well tested:
+            // (b) there is a single FeatureTrack.
+            //
+
+            // All tracks the same?
+            var i, j, drawable,
+                same_type = true,
+                a_type = this.drawables[0].get_type(),
+                num_feature_tracks = 0;
+            for (i = 0; i < num_drawables; i++) {
+                drawable = this.drawables[i];
+                if (drawable.get_type() !== a_type) {
+                    can_composite = false;
+                    break;
+                }
+                if (drawable instanceof FeatureTrack) {
+                    num_feature_tracks++;
+                }
+            }
+
+            if (same_type && this.drawables[0] instanceof LineTrack) {
+                this.action_icons.composite_icon.show();
+            }
+            else {
+                this.action_icons.composite_icon.hide();
+                $(".tooltip").remove();
+            }
+
+            //
+            // Set up group-level filtering and update filter icon.
+            //
+            if (num_feature_tracks > 1 && num_feature_tracks === this.drawables.length) {
+                //
+                // Find shared filters.
+                //
+                var shared_filters = {},
+                    filter;
+
+                // Init shared filters with filters from first drawable.
+                drawable = this.drawables[0];
+                for (j = 0; j < drawable.filters_manager.filters.length; j++) {
+                    filter = drawable.filters_manager.filters[j];
+                    shared_filters[filter.name] = [filter];
+                }
+
+                // Create lists of shared filters.
+                for (i = 1; i < this.drawables.length; i++) {
+                    drawable = this.drawables[i];
+                    for (j = 0; j < drawable.filters_manager.filters.length; j++) {
+                        filter = drawable.filters_manager.filters[j];
+                        if (filter.name in shared_filters) {
+                            shared_filters[filter.name].push(filter);
+                        }
+                    }
+                }
+
+                //
+                // Create filters for shared filters manager. Shared filters manager is group's
+                // manager.
+                //
+                this.filters_manager.remove_all();
+                var
+                    filters,
+                    new_filter,
+                    min,
+                    max;
+                for (var filter_name in shared_filters) {
+                    filters = shared_filters[filter_name];
+                    if (filters.length === num_feature_tracks) {
+                        // Add new filter.
+                        // FIXME: can filter.copy() be used?
+                        new_filter = new filters_mod.NumberFilter( {
+                                        name: filters[0].name,
+                                        index: filters[0].index
+                                        } );
+                        this.filters_manager.add_filter(new_filter);
+                    }
+                }
+
+                // Show/hide icon based on filter availability.
+                this.action_icons.filters_icon.toggle(this.filters_manager.filters.length > 0);
+            }
+            else {
+                this.action_icons.filters_icon.hide();
+            }
+        }
+    },
+
+    /**
+     * Restore individual track filter managers.
+     */
+    _restore_filter_managers: function() {
+        for (var i = 0; i < this.drawables.length; i++) {
+            this.drawables[i].filters_manager = this.saved_filters_managers[i];
+        }
+        this.saved_filters_managers = [];
+    },
+
+    /**
+     *
+     */
+    setup_multitrack_filtering: function() {
+        // Save tracks' managers and set up shared manager.
+        if (this.filters_manager.filters.length > 0) {
+            // For all tracks, save current filter manager and set manager to shared (this object's) manager.
+            this.saved_filters_managers = [];
+            for (var i = 0; i < this.drawables.length; i++) {
+                drawable = this.drawables[i];
+                this.saved_filters_managers.push(drawable.filters_manager);
+                drawable.filters_manager = this.filters_manager;
+            }
+
+            //TODO: hide filters icons for each drawable?
+        }
+        this.filters_manager.init_filters();
+    },
+
+    /**
+     * Replace group with a single composite track that includes all group's tracks.
+     */
+    show_composite_track: function() {
+        var composite_track = new CompositeTrack(this.view, this.view, {
+            name: this.config.get_value('name'),
+            drawables: this.drawables
+        });
+        var index = this.container.replace_drawable(this, composite_track, true);
+        composite_track.request_draw();
+    },
+
+    add_drawable: function(drawable) {
+        DrawableCollection.prototype.add_drawable.call(this, drawable);
+        this.update_icons();
+    },
+
+    remove_drawable: function(drawable) {
+        DrawableCollection.prototype.remove_drawable.call(this, drawable);
+        this.update_icons();
+    },
+
+    to_dict: function() {
+        // If filters are visible, need to restore original filter managers before converting to dict.
+        if (this.filters_manager.visible()) {
+            this._restore_filter_managers();
+        }
+
+        var obj_dict = extend(DrawableCollection.prototype.to_dict.call(this), { "filters": this.filters_manager.to_dict() });
+
+        // Setup multi-track filtering again.
+        if (this.filters_manager.visible()) {
+            this.setup_multitrack_filtering();
+        }
+
+        return obj_dict;
+    },
+
+    request_draw: function(options) {
+        _.each(this.drawables, function(d) {
+            d.request_draw(options);
+        });
+    }
+});
+
+/**
+ * View object manages a trackster visualization, including tracks and user interactions.
+ * Events triggered:
+ *      navigate: when browser view changes to a new locations
+ */
+var TracksterView = Backbone.View.extend({
+
+    initialize: function(obj_dict) {
+        extend(obj_dict, {
+            obj_type: "View"
+        });
+        DrawableCollection.call(this, "View", obj_dict.container, obj_dict);
+        this.chrom = null;
+        this.vis_id = obj_dict.vis_id;
+        this.dbkey = obj_dict.dbkey;
+        this.stand_alone = (obj_dict.stand_alone !== undefined ? obj_dict.stand_alone : true);
+        this.label_tracks = [];
+        this.tracks_to_be_redrawn = [];
+        this.max_low = 0;
+        this.max_high = 0;
+        this.zoom_factor = 3;
+        this.min_separation = 30;
+        this.has_changes = false;
+        // Deferred object that indicates when view's chrom data has been loaded.
+        this.load_chroms_deferred = null;
+        this.render();
+        this.canvas_manager = new visualization.CanvasManager( this.container.get(0).ownerDocument );
+        this.reset();
+
+        // Define track configuration
+        this.config = config_mod.ConfigSettingCollection.from_models_and_saved_values( [
+                { key: 'name', label: 'Name', type: 'text', default_value: '' },
+                { key: 'a_color', label: 'A Color', type: 'color', default_value: "#FF0000" },
+                { key: 'c_color', label: 'C Color', type: 'color', default_value: "#00FF00" },
+                { key: 'g_color', label: 'G Color', type: 'color', default_value: "#0000FF" },
+                { key: 't_color', label: 'T Color', type: 'color', default_value: "#FF00FF" },
+                { key: 'n_color', label: 'N Color', type: 'color', default_value: "#AAAAAA" }
+            ], { name: obj_dict.name });
+    },
+
+    render: function() {
+        // Attribute init.
+        this.requested_redraw = false;
+
+        // Create DOM elements
+        var parent_element = this.container,
+            view = this;
+        // Top container for things that are fixed at the top
+        this.top_container = $("<div/>").addClass("top-container").appendTo(parent_element);
+        // Browser content, primary tracks are contained in here
+        this.browser_content_div = $("<div/>").addClass("content").appendTo(parent_element);
+        // Bottom container for things that are fixed at the bottom
+        this.bottom_container = $("<div/>").addClass("bottom-container").appendTo(parent_element);
+        // Label track fixed at top
+        this.top_labeltrack = $("<div/>").addClass("top-labeltrack").appendTo(this.top_container);
+        // Viewport for dragging tracks in center
+        this.viewport_container = $("<div/>").addClass("viewport-container").attr("id", "viewport-container").appendTo(this.browser_content_div);
+        // Alias viewport_container as content_div so that it matches function of DrawableCollection/Group content_div.
+        this.content_div = this.viewport_container;
+        is_container(this.viewport_container, view);
+        // Introduction div shown when there are no tracks.
+        this.intro_div = $("<div/>").addClass("intro").appendTo(this.viewport_container);
+        var add_tracks_button = $("<div/>").text("Add Datasets to Visualization").addClass("action-button").appendTo(this.intro_div).click(function () {
+            visualization.select_datasets(Galaxy.root + "visualization/list_current_history_datasets", Galaxy.root + "api/datasets", { 'f-dbkey': view.dbkey }, function(tracks) {
+                _.each(tracks, function(track) {
+                    view.add_drawable( object_from_template(track, view, view) );
+                });
+            });
+        });
+
+        // Navigation at top
+        this.nav_container = $("<div/>").addClass("trackster-nav-container").prependTo(this.top_container);
+        this.nav = $("<div/>").addClass("trackster-nav").appendTo(this.nav_container);
+
+        if (this.stand_alone) {
+            this.nav_container.addClass("stand-alone");
+            this.nav.addClass("stand-alone");
+        }
+
+        // Overview (scrollbar and overview plot) at bottom
+        this.overview = $("<div/>").addClass("overview").appendTo(this.bottom_container);
+        this.overview_viewport = $("<div/>").addClass("overview-viewport").appendTo(this.overview);
+        this.overview_close = $("<a/>").attr("title", "Close overview")
+                                       .addClass("icon-button overview-close tooltip")
+                                       .hide()
+                                       .appendTo(this.overview_viewport);
+        this.overview_highlight = $("<div/>").addClass("overview-highlight").hide().appendTo(this.overview_viewport);
+        this.overview_box_background = $("<div/>").addClass("overview-boxback").appendTo(this.overview_viewport);
+        this.overview_box = $("<div/>").addClass("overview-box").appendTo(this.overview_viewport);
+        this.default_overview_height = this.overview_box.height();
+
+        this.nav_controls = $("<div/>").addClass("nav-controls").appendTo(this.nav);
+        this.chrom_select = $("<select/>").attr({ "name": "chrom"}).addClass('chrom-nav').append("<option value=''>Loading</option>").appendTo(this.nav_controls);
+        var submit_nav = function(e) {
+            if (e.type === "focusout" || (e.keyCode || e.which) === 13 || (e.keyCode || e.which) === 27 ) {
+                if ((e.keyCode || e.which) !== 27) { // Not escape key
+                    view.go_to( $(this).val() );
+                }
+                $(this).hide();
+                $(this).val('');
+                view.location_span.show();
+                view.chrom_select.show();
+            }
+
+            // Suppress key presses so that they do impact viz.
+            e.stopPropagation();
+        };
+        this.nav_input = $("<input/>").addClass("nav-input").hide().bind("keyup focusout", submit_nav).appendTo(this.nav_controls);
+        this.location_span = $("<span/>").addClass("location").attr('title', 'Click to change location').tooltip( { placement: 'bottom' } ).appendTo(this.nav_controls);
+        this.location_span.click(function() {
+            view.location_span.hide();
+            view.chrom_select.hide();
+            view.nav_input.val(view.chrom + ":" + view.low + "-" + view.high);
+            view.nav_input.css("display", "inline-block");
+            view.nav_input.select();
+            view.nav_input.focus();
+            // Set up autocomplete for tracks' features.
+            view.nav_input.autocomplete({
+                source: function(request, response) {
+                    // Using current text, query each track and create list of all matching features.
+                    var all_features = [],
+                        feature_search_deferreds = $.map(view.get_tracks(FeatureTrack), function(t) {
+                        return t.data_manager.search_features(request.term).success(function(dataset_features) {
+                            all_features = all_features.concat(dataset_features);
+                        });
+                    });
+
+                    // When all searching is done, fill autocomplete.
+                    $.when.apply($, feature_search_deferreds).done(function() {
+                        response($.map(all_features, function(feature) {
+                            return {
+                                label: feature[0],
+                                value: feature[1]
+                            };
+                        }));
+                    });
+                },
+                minLength: 2
+            });
+        });
+        if (this.vis_id !== undefined) {
+            this.hidden_input = $("<input/>").attr("type", "hidden").val(this.vis_id).appendTo(this.nav_controls);
+        }
+
+        this.zo_link = $("<a/>").attr("id", "zoom-out").attr("title", "Zoom out").tooltip( {placement: 'bottom'} )
+                                .click(function() { view.zoom_out(); }).appendTo(this.nav_controls);
+        this.zi_link = $("<a/>").attr("id", "zoom-in").attr("title", "Zoom in").tooltip( {placement: 'bottom'} )
+                                .click(function() { view.zoom_in(); }).appendTo(this.nav_controls);
+
+        // Get initial set of chroms.
+        this.load_chroms_deferred = this.load_chroms({low: 0});
+        this.chrom_select.bind("change", function() {
+            view.change_chrom(view.chrom_select.val());
+        });
+
+        /*
+        this.browser_content_div.bind("mousewheel", function( e, delta ) {
+            if (Math.abs(delta) < 0.5) {
+                return;
+            }
+            if (delta > 0) {
+                view.zoom_in(e.pageX, this.viewport_container);
+            } else {
+                view.zoom_out();
+            }
+            e.preventDefault();
+        });
+        */
+
+        // Blur tool/filter inputs when user clicks on content div.
+        this.browser_content_div.click(function( e ) {
+            $(this).find("input").trigger("blur");
+        });
+
+        // Double clicking zooms in
+        this.browser_content_div.bind("dblclick", function( e ) {
+            view.zoom_in(e.pageX, this.viewport_container);
+        });
+
+        // Dragging the overview box (~ horizontal scroll bar)
+        this.overview_box.bind("dragstart", function( e, d ) {
+            this.current_x = d.offsetX;
+        }).bind("drag", function( e, d ) {
+            var delta = d.offsetX - this.current_x;
+            this.current_x = d.offsetX;
+            var delta_chrom = Math.round(delta / view.viewport_container.width() * (view.max_high - view.max_low) );
+            view.move_delta(-delta_chrom);
+        });
+
+        this.overview_close.click(function() {
+            view.reset_overview();
+        });
+
+        // Dragging in the viewport scrolls
+        this.viewport_container.bind( "draginit", function( e, d ) {
+            // Disable interaction if started in scrollbar (for webkit)
+            if ( e.clientX > view.viewport_container.width() - 16 ) {
+                return false;
+            }
+        }).bind( "dragstart", function( e, d ) {
+            d.original_low = view.low;
+            d.current_height = e.clientY;
+            d.current_x = d.offsetX;
+        }).bind( "drag", function( e, d ) {
+            var container = $(this);
+            var delta = d.offsetX - d.current_x;
+            var new_scroll = container.scrollTop() - (e.clientY - d.current_height);
+            container.scrollTop(new_scroll);
+            d.current_height = e.clientY;
+            d.current_x = d.offsetX;
+            var delta_chrom = Math.round(delta / view.viewport_container.width() * (view.high - view.low));
+            view.move_delta(delta_chrom);
+        });
+        /*
+        FIXME: Do not do this for now because it's too jittery. Some kind of gravity approach is
+        needed here because moving left/right should be difficult.
+
+        // Also capture mouse wheel for left/right scrolling
+        }).bind( 'mousewheel', function( e, d, dx, dy ) {
+            // Only handle x axis scrolling; y axis scrolling is
+            // handled by the browser when the event bubbles up.
+            if (dx) {
+                var delta_chrom = Math.round( - dx / view.viewport_container.width() * (view.high - view.low) );
+                view.move_delta( delta_chrom );
+            }
+        });
+        */
+
+        // Dragging in the top label track allows selecting a region to zoom in on selected region.
+        this.top_labeltrack.bind( "dragstart", function( e, d ) {
+            return $("<div/>").addClass('zoom-area').css(
+                "height", view.browser_content_div.height() + view.top_labeltrack.height() + 1
+            ).appendTo( $(this) );
+        }).bind( "drag", function( e, d ) {
+            $( d.proxy ).css({ left: Math.min( e.pageX, d.startX ) - view.container.offset().left, width: Math.abs( e.pageX - d.startX ) });
+            var min = Math.min(e.pageX, d.startX ) - view.container.offset().left,
+                max = Math.max(e.pageX, d.startX ) - view.container.offset().left,
+                span = (view.high - view.low),
+                width = view.viewport_container.width();
+            view.update_location( Math.round(min / width * span) + view.low,
+                                  Math.round(max / width * span) + view.low );
+        }).bind( "dragend", function( e, d ) {
+            var min = Math.min(e.pageX, d.startX),
+                max = Math.max(e.pageX, d.startX),
+                span = (view.high - view.low),
+                width = view.viewport_container.width(),
+                old_low = view.low;
+            view.low = Math.round(min / width * span) + old_low;
+            view.high = Math.round(max / width * span) + old_low;
+            $(d.proxy).remove();
+            view.request_redraw();
+        });
+
+        // FIXME: this is still wonky for embedded visualizations.
+        /*
+        // For vertical alignment, track mouse with simple line.
+        var mouse_tracker_div = $('<div/>').addClass('mouse-pos').appendTo(parent_element);
+
+        // Show tracker only when hovering over view.
+        parent_element.hover(
+            function() {
+                mouse_tracker_div.show();
+                parent_element.mousemove(function(e) {
+                    mouse_tracker_div.css({
+                        // -1 makes line appear next to the mouse w/o preventing mouse actions.
+                        left: e.pageX - parent_element.offset().left - 1
+                    });
+                });
+            },
+            function() {
+                parent_element.off('mousemove');
+                mouse_tracker_div.hide();
+            }
+        );
+        */
+
+        this.add_label_track( new LabelTrack( this, { content_div: this.top_labeltrack } ) );
+
+        $(window).bind("resize", function() {
+            // Stop previous timer.
+            if (this.resize_timer) {
+                clearTimeout(this.resize_timer);
+            }
+
+            // When function activated, resize window and redraw.
+            this.resize_timer = setTimeout(function () {
+                view.resize_window();
+            }, 500 );
+        });
+        $(document).bind("redraw", function() { view.redraw(); });
+
+        this.reset();
+        $(window).trigger("resize");
+    },
+
+    get_base_color: function(base) {
+        return this.config.get_value(base.toLowerCase() + '_color') ||
+               this.config.get_value('n_color');
+    }
+
+});
+
+// FIXME: need to use this approach to enable inheritance of DrawableCollection functions.
+extend( TracksterView.prototype, DrawableCollection.prototype, {
+
+    changed: function() {
+        this.has_changes = true;
+    },
+
+    /** Add or remove intro div depending on view state. */
+    update_intro_div: function() {
+        this.intro_div.toggle(this.drawables.length === 0);
+    },
+
+    /**
+     * Triggers navigate events as needed. If there is a delay,
+     * then event is triggered only after navigation has stopped.
+     */
+    trigger_navigate: function(new_chrom, new_low, new_high, delay) {
+        // Stop previous timer.
+        if (this.timer) {
+            clearTimeout(this.timer);
+        }
+
+        if (delay) {
+            // To aggregate calls, use timer and only navigate once
+            // location has stabilized.
+            var self = this;
+            this.timer = setTimeout(function () {
+                self.trigger("navigate", new_chrom + ":" + new_low + "-" + new_high);
+            }, 500 );
+        }
+        else {
+            view.trigger("navigate", new_chrom + ":" + new_low + "-" + new_high);
+        }
+    },
+
+    update_location: function(low, high) {
+        this.location_span.text( util.commatize(low) + ' - ' + util.commatize(high) );
+        this.nav_input.val( this.chrom + ':' + util.commatize(low) + '-' + util.commatize(high) );
+
+        // Update location. Only update when there is a valid chrom; when loading vis, there may
+        // not be a valid chrom.
+        var chrom = this.chrom_select.val();
+        if (chrom !== "") {
+            this.trigger_navigate(chrom, this.low, this.high, true);
+        }
+    },
+
+    /**
+     * Load chrom data for the view. Returns a jQuery Deferred.
+     */
+    // FIXME: instead of loading chrom data, should load and store genome object.
+    load_chroms: function(url_parms) {
+        url_parms.num = MAX_CHROMS_SELECTABLE;
+
+        var
+            view = this,
+            chrom_data = $.Deferred();
+        $.ajax({
+            url: Galaxy.root + "api/genomes/" + this.dbkey,
+            data: url_parms,
+            dataType: "json",
+            success: function (result) {
+                // Do nothing if could not load chroms.
+                if (result.chrom_info.length === 0) {
+                    return;
+                }
+
+                // Load chroms.
+                if (result.reference) {
+                    var ref_track = new ReferenceTrack(view);
+                    view.add_label_track(ref_track);
+                    view.reference_track = ref_track;
+                }
+                view.chrom_data = result.chrom_info;
+                var chrom_options = '<option value="">Select Chrom/Contig</option>';
+                for (var i = 0, len = view.chrom_data.length; i < len; i++) {
+                    var chrom = view.chrom_data[i].chrom;
+                    chrom_options += '<option value="' + chrom + '">' + chrom + '</option>';
+                }
+                if (result.prev_chroms) {
+                    chrom_options += '<option value="previous">Previous ' + MAX_CHROMS_SELECTABLE + '</option>';
+                }
+                if (result.next_chroms) {
+                    chrom_options += '<option value="next">Next ' + MAX_CHROMS_SELECTABLE + '</option>';
+                }
+                view.chrom_select.html(chrom_options);
+                view.chrom_start_index = result.start_index;
+
+                chrom_data.resolve(result.chrom_info);
+            },
+            error: function() {
+                alert("Could not load chroms for this dbkey: " + view.dbkey);
+            }
+        });
+        return chrom_data;
+    },
+
+    change_chrom: function(chrom, low, high) {
+        var view = this;
+        // If chrom data is still loading, wait for it.
+        if (!view.chrom_data) {
+            view.load_chroms_deferred.then(function() {
+                view.change_chrom(chrom, low, high);
+            });
+            return;
+        }
+
+        // Don't do anything if chrom is "None" (hackish but some browsers already have this set), or null/blank
+        if (!chrom || chrom === "None") {
+            return;
+        }
+
+        //
+        // If user is navigating to previous/next set of chroms, load new chrom set and return.
+        //
+        if (chrom === "previous") {
+            view.load_chroms({low: this.chrom_start_index - MAX_CHROMS_SELECTABLE});
+            return;
+        }
+        if (chrom === "next") {
+            view.load_chroms({low: this.chrom_start_index + MAX_CHROMS_SELECTABLE});
+            return;
+        }
+
+        //
+        // User is loading a particular chrom. Look first in current set; if not in current set, load new
+        // chrom set.
+        //
+        var found = $.grep(view.chrom_data, function(v, i) {
+            return v.chrom === chrom;
+        })[0];
+        if (found === undefined) {
+            // Try to load chrom and then change to chrom.
+            view.load_chroms({'chrom': chrom}, function() { view.change_chrom(chrom, low, high); });
+            return;
+        }
+        else {
+            // Switching to local chrom.
+            if (chrom !== view.chrom) {
+                view.chrom = chrom;
+                view.chrom_select.val(view.chrom);
+                view.max_high = found.len-1; // -1 because we're using 0-based indexing.
+                view.reset();
+
+                for (var i = 0, len = view.drawables.length; i < len; i++) {
+                    var drawable = view.drawables[i];
+                    if (drawable.init) {
+                        drawable.init();
+                    }
+                }
+                if (view.reference_track) {
+                    view.reference_track.init();
+                }
+            }
+
+            // Resolve low, high.
+            if (low === undefined && high === undefined) {
+                // Both are undefined, so view is whole chromosome.
+                view.low = 0;
+                view.high = view.max_high;
+            }
+            else {
+                // Low and/or high is defined.
+                view.low = (low !== undefined ? Math.max(low, 0) : 0);
+                if (high === undefined) {
+                    // Center visualization around low.
+                    // HACK: max resolution is currently 30 bases.
+                    view.low = Math.max(view.low - 15, 0);
+                    view.high = view.low + 30;
+                }
+                else {
+                    // High is defined.
+                    view.high = Math.min(high, view.max_high);
+                }
+            }
+
+            view.request_redraw();
+        }
+    },
+
+    /**
+     * Change viewing region to that denoted by string. General format of string is:
+     *
+     * <chrom>[ {separator}<start>[-<end>] ]
+     *
+     * where separator can be whitespace or a colon. Examples:
+     *
+     * chr22
+     * chr1:100-200
+     * chr7 89999
+     * chr8 90000 990000
+     */
+    go_to: function(str) {
+        // Remove commas.
+        str = str.replace(/,/g, '');
+
+        // Replace colons and hyphens with space for easy parsing.
+        str = str.replace(/:|\-/g, ' ');
+
+        // Parse new location.
+        var chrom_pos = str.split(/\s+/),
+            chrom = chrom_pos[0],
+            new_low = (chrom_pos[1] ? parseInt(chrom_pos[1], 10) : undefined),
+            new_high = (chrom_pos[2] ? parseInt(chrom_pos[2], 10) : undefined);
+
+        this.change_chrom(chrom, new_low, new_high);
+    },
+
+    move_fraction: function(fraction) {
+        var view = this;
+        var span = view.high - view.low;
+        this.move_delta(fraction * span);
+    },
+
+    move_delta: function(delta_chrom) {
+        //
+        // Update low, high.
+        //
+
+        var view = this;
+        var current_chrom_span = view.high - view.low;
+        // Check for left and right boundaries
+        if (view.low - delta_chrom < view.max_low) {
+            view.low = view.max_low;
+            view.high = view.max_low + current_chrom_span;
+        } else if (view.high - delta_chrom > view.max_high) {
+            view.high = view.max_high;
+            view.low = view.max_high - current_chrom_span;
+        } else {
+            view.high -= delta_chrom;
+            view.low -= delta_chrom;
+        }
+
+        //
+        // Redraw view.
+        //
+
+        // Redraw without requesting more data immediately.
+        view.request_redraw({ data_fetch: false });
+
+        // Set up timeout to redraw with more data when moving stops.
+        if (this.redraw_on_move_fn) {
+            clearTimeout(this.redraw_on_move_fn);
+        }
+
+        this.redraw_on_move_fn = setTimeout(function() {
+            view.request_redraw();
+        }, 200);
+
+        // Navigate.
+        var chrom = view.chrom_select.val();
+        this.trigger_navigate(chrom, view.low, view.high, true);
+    },
+
+    /**
+     * Add a drawable to the view.
+     */
+    add_drawable: function(drawable) {
+        DrawableCollection.prototype.add_drawable.call(this, drawable);
+        drawable.init();
+        this.changed();
+        this.update_intro_div();
+
+        // When drawable config changes, mark view as changed. This
+        // captures most (all?) state change that needs to be saved.
+        var self = this;
+        drawable.config.on('change', function() {
+            self.changed();
+        });
+    },
+
+    add_label_track: function (label_track) {
+        label_track.view = this;
+        label_track.init();
+        this.label_tracks.push(label_track);
+    },
+
+    /**
+     * Remove drawable from the view.
+     */
+    remove_drawable: function(drawable, hide) {
+        DrawableCollection.prototype.remove_drawable.call(this, drawable);
+        if (hide) {
+            var view = this;
+            drawable.container_div.hide(0, function() {
+                $(this).remove();
+                view.update_intro_div();
+            });
+        }
+    },
+
+    reset: function() {
+        this.low = this.max_low;
+        this.high = this.max_high;
+        this.viewport_container.find(".yaxislabel").remove();
+    },
+
+    /**
+     * Request that view redraw one or more of view's drawables. If drawable is not specified,
+     * all drawables are redrawn.
+     */
+    request_redraw: function(options, drawable) {
+        var view = this,
+            // Either redrawing a single drawable or all view's drawables.
+            track_list = (drawable ? [drawable] : view.drawables);
+
+        // Add/update tracks in track list to redraw list.
+        _.each(track_list, function(track) {
+            var track_options = _.find(view.tracks_to_be_redrawn, function(to) {
+                return to[0] === track;
+            });
+
+            if (track_options) {
+                // Track already in list; update options.
+                track_options[1] = options;
+            }
+            else {
+                // Track not in list yet.
+                view.tracks_to_be_redrawn.push([track, options]);
+            }
+        });
+
+        // Set up redraw if it has not been requested since last redraw.
+        if (!this.requested_redraw) {
+            requestAnimationFrame(function() { view._redraw(); });
+            this.requested_redraw = true;
+        }
+    },
+
+    /**
+     * Redraws view and tracks.
+     * NOTE: this method should never be called directly; request_redraw() should be used so
+     * that requestAnimationFrame can manage redrawing.
+     */
+    _redraw: function() {
+        // TODO: move this code to function that does location setting.
+
+        // Clear because requested redraw is being handled now.
+        this.requested_redraw = false;
+
+        var low = this.low,
+            high = this.high;
+
+        if (low < this.max_low) {
+            low = this.max_low;
+        }
+        if (high > this.max_high) {
+            high = this.max_high;
+        }
+        var span = this.high - this.low;
+        if (this.high !== 0 && span < this.min_separation) {
+            high = low + this.min_separation;
+        }
+        this.low = Math.floor(low);
+        this.high = Math.ceil(high);
+
+        this.update_location(this.low, this.high);
+
+        // -- Drawing code --
+
+        // Resolution is a pixel density.
+        this.resolution_px_b = this.viewport_container.width() / (this.high - this.low);
+
+        // Overview
+        var left_px = ( this.low / (this.max_high - this.max_low) * this.overview_viewport.width() ) || 0;
+        var width_px = ( (this.high - this.low)/(this.max_high - this.max_low) * this.overview_viewport.width() ) || 0;
+        var min_width_px = 13;
+
+        this.overview_box.css({ left: left_px, width: Math.max(min_width_px, width_px) }).show();
+        if (width_px < min_width_px) {
+            this.overview_box.css("left", left_px - (min_width_px - width_px)/2);
+        }
+        if (this.overview_highlight) {
+            this.overview_highlight.css({ left: left_px, width: width_px });
+        }
+
+        // Draw data tracks.
+        _.each(this.tracks_to_be_redrawn, function(track_options) {
+            var track = track_options[0],
+                options = track_options[1];
+            if (track) {
+                track._draw(options);
+            }
+        });
+        this.tracks_to_be_redrawn = [];
+
+        // Draw label tracks.
+        _.each(this.label_tracks, function(label_track) {
+            label_track._draw();
+        });
+    },
+
+    zoom_in: function (point, container) {
+        if (this.max_high === 0 || this.high - this.low <= this.min_separation) {
+            return;
+        }
+        var span = this.high - this.low,
+            cur_center = span / 2 + this.low,
+            new_half = (span / this.zoom_factor) / 2;
+        if (point) {
+            cur_center = point / this.viewport_container.width() * (this.high - this.low) + this.low;
+        }
+        this.low = Math.round(cur_center - new_half);
+        this.high = Math.round(cur_center + new_half);
+
+        this.changed();
+        this.request_redraw();
+    },
+
+    zoom_out: function () {
+        if (this.max_high === 0) {
+            return;
+        }
+        var span = this.high - this.low,
+            cur_center = span / 2 + this.low,
+            new_half = (span * this.zoom_factor) / 2;
+        this.low = Math.round(cur_center - new_half);
+        this.high = Math.round(cur_center + new_half);
+        this.changed();
+        this.request_redraw();
+    },
+
+    /** Resize viewport. Use this method if header/footer content has changed in size. */
+    resize_viewport: function() {
+        this.viewport_container.height( this.container.height() - this.top_container.height() - this.bottom_container.height() );
+    },
+
+    /** Called when window is resized. */
+    resize_window: function() {
+        this.resize_viewport();
+        this.request_redraw();
+    },
+
+    /** Show a Drawable in the overview. */
+    set_overview: function(drawable) {
+        if (this.overview_drawable) {
+            // If drawable to be set as overview is already in overview, do nothing.
+            // Otherwise, remove overview.
+            if (this.overview_drawable.dataset.id === drawable.dataset.id) {
+                return;
+            }
+            this.overview_viewport.find(".track").remove();
+        }
+
+        // Set new overview.
+        var
+            overview_drawable = drawable.copy( { content_div: this.overview_viewport } ),
+            view = this;
+        overview_drawable.header_div.hide();
+        overview_drawable.is_overview = true;
+        view.overview_drawable = overview_drawable;
+        this.overview_drawable.postdraw_actions = function() {
+            view.overview_highlight.show().height(view.overview_drawable.content_div.height());
+            view.overview_viewport.height(view.overview_drawable.content_div.height() + view.overview_box.outerHeight());
+            view.overview_close.show();
+            view.resize_window();
+        };
+        view.overview_drawable.request_draw();
+        this.changed();
+    },
+
+    /** Close and reset overview. */
+    reset_overview: function() {
+        // Update UI.
+        $(".tooltip").remove();
+        this.overview_viewport.find(".track-tile").remove();
+        this.overview_viewport.height(this.default_overview_height);
+        this.overview_box.height(this.default_overview_height);
+        this.overview_close.hide();
+        this.overview_highlight.hide();
+        view.resize_window();
+        view.overview_drawable = null;
+    }
+});
+
+/**
+ * Encapsulation of a tool that users can apply to tracks/datasets.
+ */
+var TracksterTool = tools_mod.Tool.extend({
+    defaults: {
+        track: null
+    },
+
+    initialize: function(options) {
+        tools_mod.Tool.prototype.initialize.call(this, options);
+
+        // Restore tool visibility from state; default to hidden.
+        var hidden = true;
+        if (options.tool_state !== undefined && options.tool_state.hidden !== undefined) {
+            hidden = options.tool_state.hidden;
+        }
+        this.set('hidden', hidden);
+
+        // FIXME: need to restore tool values from options.tool_state
+
+        // HACK: remove some inputs because Trackster does yet not work with them.
+        this.remove_inputs( [ 'data', 'hidden_data', 'conditional' ] );
+    },
+
+    state_dict: function(options) {
+        return _.extend( this.get_inputs_dict(), { hidden: !this.is_visible() } );
+    }
+});
+
+/**
+ * View renders tool parameter HTML and updates parameter value as it is changed in the HTML.
+ */
+ var ToolParameterView = Backbone.View.extend({
+
+    events: {
+        'change :input': 'update_value'
+    },
+
+    render: function() {
+        var param_div = this.$el.addClass("param-row"),
+            param = this.model;
+
+        // Param label.
+        var label_div = $("<div>").addClass("param-label").text(param.get('label')).appendTo(param_div);
+        // Param HTML.
+        var html_div = $("<div/>").addClass("param-input").html(param.get('html')).appendTo(param_div);
+        // Set initial value.
+        html_div.find(":input").val(param.get('value'));
+
+        // Add to clear floating layout.
+        $("<div style='clear: both;'/>").appendTo(param_div);
+    },
+
+    update_value: function(update_event) {
+        this.model.set_value($(update_event.target).val());
+    }
+ });
+
+/**
+ * View for TracksterTool.
+ */
+var TracksterToolView = Backbone.View.extend({
+
+    initialize: function(options) {
+        this.model.on('change:hidden', this.set_visible, this);
+    },
+
+    /**
+     * Render tool UI.
+     */
+    render: function() {
+        var self = this;
+            tool = this.model,
+            parent_div = this.$el.addClass("dynamic-tool").hide();
+
+        // Prevent div events from propogating to other elements.
+        parent_div.bind("drag", function(e) {
+            e.stopPropagation();
+        }).click(function(e) {
+            e.stopPropagation();
+        }).bind("dblclick", function(e) {
+            e.stopPropagation();
+        }).keydown(function(e) { e.stopPropagation(); });
+
+        // Add name, inputs.
+        var name_div = $("<div class='tool-name'>").appendTo(parent_div).text(tool.get('name'));
+        tool.get('inputs').each(function(param) {
+            // Render parameter.
+            var param_view = new ToolParameterView({ model: param });
+            param_view.render();
+            parent_div.append(param_view.$el);
+        });
+
+        // Highlight value for inputs for easy replacement.
+        parent_div.find("input").click(function() { $(this).select(); });
+
+        // Add buttons for running on dataset, region.
+        var run_tool_row = $("<div>").addClass("param-row").appendTo(parent_div);
+        var run_on_dataset_button = $("<input type='submit'>").attr("value", "Run on complete dataset").appendTo(run_tool_row);
+        var run_on_region_button = $("<input type='submit'>").attr("value", "Run on visible region").appendTo(run_tool_row);
+        run_on_region_button.click( function() {
+            // Run tool to create new track.
+            self.run_on_region();
+        });
+        run_on_dataset_button.click( function() {
+            self.run_on_dataset();
+        });
+
+        if (tool.is_visible()) {
+            this.$el.show();
+        }
+    },
+
+    /**
+     * Show or hide tool depending on tool visibility state.
+     */
+    set_visible: function() {
+        this.$el.toggle(this.model.is_visible());
+    },
+
+    /**
+     * Update tool parameters.
+     */
+    update_params: function() {
+        for (var i = 0; i < this.params.length; i++) {
+            this.params[i].update_value();
+        }
+    },
+
+    /**
+     * Run tool on dataset. Output is placed in dataset's history and no changes to viz are made.
+     */
+    run_on_dataset: function() {
+        var tool = this.model;
+        this.run(
+            // URL params.
+            {
+                target_dataset_id: this.model.get('track').dataset.id,
+                action: 'rerun',
+                tool_id: tool.id
+            },
+            null,
+            // Success callback.
+            function(track_data) {
+                Galaxy.modal.show({title: tool.get('name') + " is Running", body: tool.get('name') + " is running on the complete dataset. Tool outputs are in dataset's history.", buttons : {'Close' : function() { Galaxy.modal.hide(); } } });
+            }
+        );
+    },
+
+    /**
+     * Run dataset on visible region. This creates a new track and sets the track's contents
+     * to the tool's output.
+     */
+    run_on_region: function() {
+        //
+        // Create track for tool's output immediately to provide user feedback.
+        //
+        var track = this.model.get('track'),
+            tool = this.model,
+            region = new visualization.GenomeRegion({
+                chrom: track.view.chrom,
+                start: track.view.low,
+                end: track.view.high
+            }),
+            url_params =
+            {
+                target_dataset_id: track.dataset.id,
+                action: 'rerun',
+                tool_id: tool.id,
+                regions: [
+                    region.toJSON()
+                ]
+            },
+            current_track = track,
+            // Set name of track to include tool name, parameters, and region used.
+            track_name = tool.get('name') +
+                         current_track.tool_region_and_parameters_str(region),
+            container;
+
+        // If track not in a group, create a group for it and add new track to group. If track
+        // already in group, add track to group.
+        if (current_track.container === view) {
+            // Create new group.
+            var group = new DrawableGroup(view, view, { name: track.config.get_value('name') });
+
+            // Replace track with group.
+            var index = current_track.container.replace_drawable(current_track, group, false);
+
+            // Update HTML.
+            // FIXME: this is ugly way to replace a track with a group -- make this easier via
+            // a Drawable or DrawableCollection function.
+            group.container_div.insertBefore(current_track.view.content_div.children()[index]);
+            group.add_drawable(current_track);
+            current_track.container_div.appendTo(group.content_div);
+            container = group;
+        }
+        else {
+            // Use current group.
+            container = current_track.container;
+        }
+
+        // Create and init new track.
+        var new_track = new current_track.constructor(view, container, {
+            name: track_name,
+            hda_ldda: "hda"
+        });
+        new_track.init_for_tool_data();
+        new_track.change_mode(current_track.mode);
+        new_track.set_filters_manager(current_track.filters_manager.copy(new_track));
+        new_track.update_icons();
+        container.add_drawable(new_track);
+        new_track.tiles_div.text("Starting job.");
+
+        // Run tool.
+        this.run(url_params, new_track,
+                // Success callback.
+                function(track_data) {
+                    new_track.set_dataset(new data.Dataset(track_data));
+                    new_track.tiles_div.text("Running job.");
+                    new_track.init();
+                }
+        );
+    },
+
+    /**
+     * Run tool using a set of URL params and a success callback.
+     */
+    run: function(url_params, new_track, success_callback) {
+        // Run tool.
+        url_params.inputs = this.model.get_inputs_dict();
+        var ss_deferred = new util.ServerStateDeferred({
+            ajax_settings: {
+                url: Galaxy.root + "api/tools",
+                data: JSON.stringify(url_params),
+                dataType: "json",
+                contentType: 'application/json',
+                type: "POST"
+            },
+            interval: 2000,
+            success_fn: function(response) {
+                return response !== "pending";
+            }
+        });
+
+        // Start with this status message.
+        //new_track.container_div.addClass("pending");
+        //new_track.content_div.html(DATA_PENDING);
+
+        $.when(ss_deferred.go()).then(function(response) {
+            if (response === "no converter") {
+                // No converter available for input datasets, so cannot run tool.
+                new_track.container_div.addClass("error");
+                new_track.content_div.text(DATA_NOCONVERTER);
+            }
+            else if (response.error) {
+                // General error.
+                new_track.container_div.addClass("error");
+                new_track.content_div.text(DATA_CANNOT_RUN_TOOL + response.message);
+            }
+            else {
+                // Job submitted and running.
+                success_callback(response);
+            }
+        });
+    }
+
+});
+
+/**
+ * Generates scale values based on filter and feature's value for filter.
+ */
+var FilterScaler = function(filter, default_val) {
+    painters.Scaler.call(this, default_val);
+    this.filter = filter;
+};
+
+FilterScaler.prototype.gen_val = function(feature_data) {
+    // If filter is not initalized yet, return default val.
+    if (this.filter.high === Number.MAX_VALUE || this.filter.low === -Number.MAX_VALUE || this.filter.low === this.filter.high) {
+        return this.default_val;
+    }
+
+    // Scaling value is ratio of (filter's value compared to low) to (complete filter range).
+    return ( ( parseFloat(feature_data[this.filter.index]) - this.filter.low ) / ( this.filter.high - this.filter.low ) );
+};
+
+/**
+ * Tiles drawn by tracks.
+ */
+var Tile = function(track, region, w_scale, canvas, data) {
+    this.track = track;
+    this.region = region;
+    this.low = region.get('start');
+    this.high = region.get('end');
+    this.w_scale = w_scale;
+    this.canvas = canvas;
+    // Wrap element in div for background and to provide container for tile-specific elements.
+    this.html_elt = $("<div class='track-tile'/>").append(canvas);
+    this.data = data;
+    this.stale = false;
+};
+
+/**
+ * Perform pre-display actions.
+ */
+Tile.prototype.predisplay_actions = function() {};
+
+var LineTrackTile = function(track, region, w_scale, canvas, data) {
+    Tile.call(this, track, region, w_scale, canvas, data);
+};
+LineTrackTile.prototype.predisplay_actions = function() {};
+
+var FeatureTrackTile = function(track, region, w_scale, canvas, data, mode, message, all_slotted,
+                                feature_mapper, incomplete_features, seq_data) {
+    // Attribute init.
+    Tile.call(this, track, region, w_scale, canvas, data);
+    this.mode = mode;
+    this.all_slotted = all_slotted;
+    this.feature_mapper = feature_mapper;
+    this.has_icons = false;
+    this.incomplete_features = incomplete_features;
+    // Features drawn based on data from other tiles.
+    this.other_tiles_features_drawn = {};
+    this.seq_data = seq_data;
+
+    // Add message + action icons to tile's html.
+    /*
+    This does not work right now because a random set of reads is returned by the server.
+    When the server can respond with more data systematically, renable these icons.
+    if (message) {
+        this.has_icons = true;
+
+        var
+            tile = this;
+            canvas = this.html_elt.children()[0],
+            message_div = $("<div/>").addClass("tile-message")
+                            // -1 to account for border.
+                            .css({'height': ERROR_PADDING, 'width': canvas.width}).prependTo(this.html_elt);
+
+        // Handle message; only message currently is that only the first N elements are displayed.
+        var tile_region = new visualization.GenomeRegion({
+                chrom: track.view.chrom,
+                start: this.low,
+                end: this.high
+            }),
+            num_features = data.length,
+            more_down_icon = $("<a/>").addClass("icon more-down")
+                                .attr("title", "For speed, only the first " + num_features + " features in this region were obtained from server. Click to get more data including depth")
+                                .tooltip().appendTo(message_div),
+            more_across_icon = $("<a/>").addClass("icon more-across")
+                                .attr("title", "For speed, only the first " + num_features + " features in this region were obtained from server. Click to get more data excluding depth")
+                                .tooltip().appendTo(message_div);
+
+        // Set up actions for icons.
+        more_down_icon.click(function() {
+            // Mark tile as stale, request more data, and redraw track.
+            tile.stale = true;
+            track.data_manager.get_more_data(tile_region, track.mode, 1 / tile.w_scale, {}, track.data_manager.DEEP_DATA_REQ);
+            $(".tooltip").hide();
+            track.request_draw();
+        }).dblclick(function(e) {
+            // Do not propogate as this would normally zoom in.
+            e.stopPropagation();
+        });
+
+        more_across_icon.click(function() {
+            // Mark tile as stale, request more data, and redraw track.
+            tile.stale = true;
+            track.data_manager.get_more_data(tile_region, track.mode, 1 / tile.w_scale, {}, track.data_manager.BROAD_DATA_REQ);
+            $(".tooltip").hide();
+            track.request_draw();
+        }).dblclick(function(e) {
+            // Do not propogate as this would normally zoom in.
+            e.stopPropagation();
+        });
+    }
+    */
+};
+extend(FeatureTrackTile.prototype, Tile.prototype);
+
+/**
+ * Sets up support for popups.
+ */
+FeatureTrackTile.prototype.predisplay_actions = function() {
+    /*
+    FIXME: use a canvas library to handle popups.
+    //
+    // Add support for popups.
+    //
+    var tile = this,
+        popups = {};
+
+    // Only show popups in Pack mode.
+    if (tile.mode !== "Pack") { return; }
+
+    $(this.html_elt).hover(
+    function() {
+        this.hovered = true;
+        $(this).mousemove();
+    },
+    function() {
+        this.hovered = false;
+        // Clear popup if it is still hanging around (this is probably not needed)
+        $(this).parents(".track-content").children(".overlay").children(".feature-popup").remove();
+    } ).mousemove(function (e) {
+        // Use the hover plugin to get a delay before showing popup
+        if ( !this.hovered ) { return; }
+        // Get feature data for position.
+        var
+            this_offset = $(this).offset(),
+            offsetX = e.pageX - this_offset.left,
+            offsetY = e.pageY - this_offset.top,
+            feature_data = tile.feature_mapper.get_feature_data(offsetX, offsetY),
+            feature_uid = (feature_data ? feature_data[0] : null);
+        // Hide visible popup if not over a feature or over a different feature.
+        $(this).parents(".track-content").children(".overlay").children(".feature-popup").each(function() {
+            if ( !feature_uid ||
+                 $(this).attr("id") !== feature_uid.toString() ) {
+                $(this).remove();
+            }
+        });
+
+        if (feature_data) {
+            // Get or create popup.
+            var popup = popups[feature_uid];
+            if (!popup) {
+                // Create feature's popup element.
+                var feature_dict = {
+                        name: feature_data[3],
+                        start: feature_data[1],
+                        end: feature_data[2],
+                        strand: feature_data[4]
+                    },
+                    filters = tile.track.filters_manager.filters,
+                    filter;
+
+                // Add filter values to feature dict.
+                for (var i = 0; i < filters.length; i++) {
+                    filter = filters[i];
+                    feature_dict[filter.name] = feature_data[filter.index];
+                }
+
+                // Build popup.
+                popup = $("<div/>").attr("id", feature_uid).addClass("feature-popup");
+                var table = $("<table/>"),
+                    key, value, row;
+                for (key in feature_dict) {
+                    value = feature_dict[key];
+                    row = $("<tr/>").appendTo(table);
+                    $("<th/>").appendTo(row).text(key);
+                    $("<td/>").attr("align", "left").appendTo(row)
+                              .text(typeof(value) === 'number' ? round(value, 2) : value);
+                }
+                popup.append( $("<div class='feature-popup-inner'>").append( table ) );
+                popups[feature_uid] = popup;
+            }
+
+            // Attach popup to track's overlay.
+            popup.appendTo( $(this).parents(".track-content").children(".overlay") );
+
+            // Offsets are within canvas, but popup must be positioned relative to parent element.
+            // parseInt strips "px" from left, top measurements. +7 so that mouse pointer does not
+            // overlap popup.
+            var
+                popupX = offsetX + parseInt( tile.html_elt.css("left"), 10 ) - popup.width() / 2,
+                popupY = offsetY + parseInt( tile.html_elt.css("top"), 10 ) + 7;
+            popup.css("left", popupX + "px").css("top", popupY + "px");
+        }
+        else if (!e.isPropagationStopped()) {
+            // Propogate event to other tiles because overlapping tiles prevent mousemove from being
+            // called on tiles under this tile.
+            e.stopPropagation();
+            $(this).siblings().each(function() {
+                $(this).trigger(e);
+            });
+        }
+    })
+    .mouseleave(function() {
+        $(this).parents(".track-content").children(".overlay").children(".feature-popup").remove();
+    });
+    */
+};
+
+/**
+ * Tracks are objects can be added to the View.
+ *
+ * Track object hierarchy:
+ * Track
+ * -> LabelTrack
+ * -> TiledTrack
+ * ----> LineTrack
+ * ----> ReferenceTrack
+ * ----> FeatureTrack
+ * -------> ReadTrack
+ * ----> VariantTrack
+ */
+var Track = function(view, container, obj_dict) {
+    // For now, track's container is always view.
+    extend(obj_dict, {
+        drag_handle_class: "draghandle"
+    });
+    Drawable.call(this, view, container, obj_dict);
+
+    //
+    // Attribute init.
+    //
+
+    // Set or create dataset.
+    this.dataset = null;
+    if (obj_dict.dataset) {
+        // Dataset can be a Backbone model or a dict that can be used to create a model.
+        this.dataset = (obj_dict.dataset instanceof Backbone.Model ? obj_dict.dataset : new data.Dataset(obj_dict.dataset) );
+    }
+    this.dataset_check_type = 'converted_datasets_state';
+    this.data_url_extra_params = {};
+    this.data_query_wait = ('data_query_wait' in obj_dict ? obj_dict.data_query_wait : DEFAULT_DATA_QUERY_WAIT);
+    // A little ugly creating data manager right now due to transition to Backbone-based objects.
+    this.data_manager = ('data_manager' in obj_dict ?
+                         obj_dict.data_manager :
+                         new visualization.GenomeDataManager({
+                             dataset: this.dataset,
+                             // HACK: simulate 'genome' attributes from view for now.
+                             // View should eventually use Genome object.
+                             genome: new visualization.Genome({
+                                key: view.dbkey,
+                                chroms_info: {
+                                    chrom_info: view.chrom_data
+                                }
+                             }),
+                             data_mode_compatible: this.data_and_mode_compatible,
+                             can_subset: this.can_subset
+                         }));
+
+    // Height attributes: min height, max height, and visible height.
+    this.min_height_px = 16;
+    this.max_height_px = 800;
+    this.visible_height_px = this.config.get_value('height');
+
+    //
+    // Create content div, which is where track is displayed, and add to container if available.
+    //
+    this.content_div = $("<div class='track-content'>").appendTo(this.container_div);
+    if (this.container) {
+        this.container.content_div.append(this.container_div);
+        if ( !("resize" in obj_dict) || obj_dict.resize ) {
+            this.add_resize_handle();
+        }
+    }
+};
+
+extend(Track.prototype, Drawable.prototype, {
+    action_icons_def: [
+        // Change track mode.
+        {
+            name: "mode_icon",
+            title: "Set display mode",
+            css_class: "chevron-expand",
+            on_click_fn: function() {}
+        },
+        // Hide/show content.
+        Drawable.prototype.action_icons_def[0],
+        // Set track as overview.
+        {
+            name: "overview_icon",
+            title: "Set as overview",
+            css_class: "application-dock-270",
+            on_click_fn: function(track) {
+                track.view.set_overview(track);
+            }
+        },
+        // Edit config.
+        Drawable.prototype.action_icons_def[1],
+        // Toggle track filters.
+        {
+            name: "filters_icon",
+            title: "Filters",
+            css_class: "ui-slider-050",
+            on_click_fn: function(drawable) {
+                // TODO: update Tooltip text.
+                if (drawable.filters_manager.visible()) {
+                    drawable.filters_manager.clear_filters();
+                }
+                else {
+                    drawable.filters_manager.init_filters();
+                }
+                drawable.filters_manager.toggle();
+            }
+        },
+        // Toggle track tool.
+        {
+            name: "tools_icon",
+            title: "Tool",
+            css_class: "hammer",
+            on_click_fn: function(track) {
+                // TODO: update Tooltip text.
+
+                track.tool.toggle();
+
+                // Update track name.
+                if (track.tool.is_visible()) {
+                    track.set_name(track.config.get_value('name') + track.tool_region_and_parameters_str());
+                }
+                else {
+                    track.revert_name();
+                }
+                // HACK: name change modifies icon placement, which leaves tooltip incorrectly placed.
+                $(".tooltip").remove();
+            }
+        },
+        // Go to parameter exploration visualization.
+        {
+            name: "param_space_viz_icon",
+            title: "Tool parameter space visualization",
+            css_class: "arrow-split",
+            on_click_fn: function(track) {
+                var html =
+                    '<strong>Tool</strong>:' + track.tool.get("name") + '<br/>' +
+                    '<strong>Dataset</strong>:' + track.config.get_value("name") + '<br/>' +
+                    '<strong>Region(s)</strong>: <select name="regions">' +
+                    '<option value="cur">current viewing area</option>' +
+                    '<option value="bookmarks">bookmarks</option>' +
+                    '<option value="both">current viewing area and bookmarks</option>' +
+                    '</select>';
+                var cancel_fn = function() { Galaxy.modal.hide(); $(window).unbind("keypress.check_enter_esc"); },
+                    ok_fn = function() {
+                        var regions_to_use = $('select[name="regions"] option:selected').val(),
+                            regions,
+                            view_region = new visualization.GenomeRegion({
+                                chrom: view.chrom,
+                                start: view.low,
+                                end: view.high
+                            }),
+                            bookmarked_regions = _.map($(".bookmark"), function(elt) {
+                                return new visualization.GenomeRegion({from_str: $(elt).children(".position").text()});
+                            });
+
+                        // Get regions for visualization.
+                        if (regions_to_use === 'cur') {
+                            // Use only current region.
+                            regions = [ view_region ];
+                        }
+                        else if (regions_to_use === 'bookmarks') {
+                            // Use only bookmarks.
+                            regions = bookmarked_regions;
+                        }
+                        else {
+                            // Use both current region and bookmarks.
+                            regions = [ view_region ].concat(bookmarked_regions);
+                        }
+
+                        Galaxy.modal.hide();
+
+                        // Go to visualization.
+                        window.location.href =
+                            Galaxy.root + "visualization/sweepster" + "?" +
+                            $.param({
+                                dataset_id: track.dataset.id,
+                                hda_ldda: track.dataset.get('hda_ldda'),
+                                regions: JSON.stringify(new Backbone.Collection(regions).toJSON())
+                            });
+                    },
+                    check_enter_esc = function(e) {
+                        if ((e.keyCode || e.which) === 27) { // Escape key
+                            cancel_fn();
+                        } else if ((e.keyCode || e.which) === 13) { // Enter key
+                            ok_fn();
+                        }
+                    };
+
+                // show dialog
+                Galaxy.modal.show({title: "Visualize tool parameter space and output from different parameter settings?", body: html, buttons : {'No' : cancel_fn, 'Yes' : ok_fn } });
+            }
+        },
+        // Remove track.
+        Drawable.prototype.action_icons_def[2]
+    ],
+
+    can_draw: function() {
+        return this.dataset && Drawable.prototype.can_draw.call(this);
+    },
+
+    build_container_div: function () {
+        return $("<div/>").addClass('track').attr("id", "track_" + this.id);
+    },
+
+    /**
+     * Set track's dataset.
+     */
+    set_dataset: function(dataset) {
+        this.dataset = dataset;
+        this.data_manager.set('dataset', dataset);
+    },
+
+    /**
+     * Action to take during resize.
+     */
+    on_resize: function() {
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Add resizing handle to drawable's container_div.
+     */
+    add_resize_handle: function () {
+        var track = this;
+        var in_handle = false;
+        var in_drag = false;
+        var drag_control = $( "<div class='track-resize'>" );
+        // Control shows on hover over track, stays while dragging
+        $(track.container_div).hover( function() {
+            if ( track.config.get_value('content_visible') ) {
+                in_handle = true;
+                drag_control.show();
+            }
+        }, function() {
+            in_handle = false;
+            if ( ! in_drag ) { drag_control.hide(); }
+        });
+        // Update height and force redraw of current view while dragging,
+        // clear cache to force redraw of other tiles.
+        drag_control.hide().bind( "dragstart", function( e, d ) {
+            in_drag = true;
+            d.original_height = $(track.content_div).height();
+        }).bind( "drag", function( e, d ) {
+            var new_height = Math.min( Math.max( d.original_height + d.deltaY, track.min_height_px ), track.max_height_px );
+            $(track.tiles_div).css( 'height', new_height );
+            track.visible_height_px = (track.max_height_px === new_height ? 0 : new_height);
+            track.on_resize();
+        }).bind( "dragend", function( e, d ) {
+            track.tile_cache.clear();
+            in_drag = false;
+            if (!in_handle) { drag_control.hide(); }
+            track.config.set_value('height', track.visible_height_px);
+            track.changed();
+        }).appendTo(track.container_div);
+    },
+
+    /**
+     * Hide any elements that are part of the tracks contents area. Should
+     * remove as approprite, the track will be redrawn by show_contents.
+     */
+    hide_contents: function () {
+        // Hide tiles.
+        this.tiles_div.hide();
+        // Hide any y axis labels (common to several track types)
+        this.container_div.find(".yaxislabel, .track-resize").hide();
+    },
+
+    show_contents: function() {
+        // Show the contents div and labels (if present)
+        this.tiles_div.show();
+        this.container_div.find(".yaxislabel, .track-resize").show();
+        // Request a redraw of the content
+        this.request_draw();
+    },
+
+    /**
+     * Returns track type.
+     */
+    get_type: function() {
+        // Order is important: start with most-specific classes and go up the track hierarchy.
+        if (this instanceof LabelTrack) {
+            return "LabelTrack";
+        }
+        else if (this instanceof ReferenceTrack) {
+            return "ReferenceTrack";
+        }
+        else if (this instanceof LineTrack) {
+            return "LineTrack";
+        }
+        else if (this instanceof ReadTrack) {
+            return "ReadTrack";
+        }
+        else if (this instanceof VariantTrack) {
+            return "VariantTrack";
+        }
+        else if (this instanceof CompositeTrack) {
+            return "CompositeTrack";
+        }
+        else if (this instanceof FeatureTrack) {
+            return "FeatureTrack";
+        }
+        return "";
+    },
+
+    /**
+     * Remove visualization content and display message.
+     */
+    show_message: function(msg_html) {
+        this.tiles_div.remove();
+        return $('<span/>').addClass('message').html(msg_html).appendTo(this.content_div);
+    },
+
+    /**
+     * Initialize and draw the track.
+     */
+    init: function(retry) {
+        // FIXME: track should have a 'state' attribute that is checked on load; this state attribute should be
+        // used in this function to determine what action(s) to take.
+
+        var track = this;
+        track.enabled = false;
+        track.tile_cache.clear();
+        track.data_manager.clear();
+        /*
+        if (!track.content_div.text()) {
+            track.content_div.text(DATA_LOADING);
+        }
+        */
+        // Remove old track content (e.g. tiles, messages).
+        track.content_div.children().remove();
+        track.container_div.removeClass("nodata error pending");
+
+        track.tiles_div = $("<div/>").addClass("tiles").appendTo(track.content_div);
+
+        //
+        // Tracks with no dataset id are handled differently.
+        // FIXME: is this really necessary?
+        //
+        if (!track.dataset.id) {
+            return;
+        }
+
+        // Get dataset state; if state is fine, enable and draw track. Otherwise, show message
+        // about track status.
+        var init_deferred = $.Deferred(),
+            params = {
+                hda_ldda: track.dataset.get('hda_ldda'),
+                data_type: this.dataset_check_type,
+                chrom: track.view.chrom,
+                retry: retry
+            };
+        $.getJSON(this.dataset.url(), params, function (result) {
+            if (!result || result === "error" || result.kind === "error") {
+                // Dataset is in error state.
+                track.container_div.addClass("error");
+                var msg_elt = track.show_message(DATA_ERROR);
+                if (result.message) {
+                    // Add links to (a) show error and (b) try again.
+                    msg_elt.append(
+                        $("<a href='javascript:void(0);'></a>").text("View error").click(function() {
+                            Galaxy.modal.show({title: "Trackster Error", body: "<pre>" + result.message + "</pre>", buttons : {'Close' : function() { Galaxy.modal.hide(); } } });
+                        })
+                    );
+                    msg_elt.append( $('<span/>').text(' ') );
+                    msg_elt.append(
+                        $("<a href='javascript:void(0);'></a>").text("Try again").click(function() {
+                            track.init(true);
+                        })
+                    );
+                }
+            }
+            else if (result === "no converter") {
+                track.container_div.addClass("error");
+                track.show_message(DATA_NOCONVERTER);
+            }
+            else if (result === "no data" || (result.data !== undefined && (result.data === null || result.data.length === 0))) {
+                track.container_div.addClass("nodata");
+                track.show_message(DATA_NONE);
+            }
+            else if (result === "pending") {
+                track.container_div.addClass("pending");
+                track.show_message(DATA_PENDING);
+                //$("<img/>").attr("src", image_path + "/yui/rel_interstitial_loading.gif").appendTo(track.tiles_div);
+                setTimeout(function() { track.init(); }, track.data_query_wait);
+            }
+            else if (result === "data" || result.status === "data") {
+                if (result.valid_chroms) {
+                    track.valid_chroms = result.valid_chroms;
+                    track.update_icons();
+                }
+                track.tiles_div.text(DATA_OK);
+                if (track.view.chrom) {
+                    track.tiles_div.text("");
+                    track.tiles_div.css( "height", track.visible_height_px + "px" );
+                    track.enabled = true;
+                    // predraw_init may be asynchronous, wait for it and then draw
+                    $.when.apply($, track.predraw_init()).done(function() {
+                        init_deferred.resolve();
+                        track.container_div.removeClass("nodata error pending");
+                        track.request_draw();
+                    });
+                }
+                else {
+                    init_deferred.resolve();
+                }
+            }
+        });
+
+        this.update_icons();
+        return init_deferred;
+    },
+
+    /**
+     * Additional initialization required before drawing track for the first time.
+     */
+    predraw_init: function() {
+        var track = this;
+        return $.getJSON( track.dataset.url(),
+            {  data_type: 'data', stats: true, chrom: track.view.chrom, low: 0,
+               high: track.view.max_high, hda_ldda: track.dataset.get('hda_ldda') }, function(result) {
+            var data = result.data;
+
+            // Tracks may not have stat data either because there is no data or data is not yet ready.
+            if (data && data.min !== undefined && data.max !== undefined) {
+                // Compute default minimum and maximum values
+                var min_value = data.min,
+                    max_value = data.max;
+                // If mean and sd are present, use them to compute a ~95% window
+                // but only if it would shrink the range on one side
+                min_value = Math.floor( Math.min( 0, Math.max( min_value, data.mean - 2 * data.sd ) ) );
+                max_value = Math.ceil( Math.max( 0, Math.min( max_value, data.mean + 2 * data.sd ) ) );
+                // Update config, prefs
+                track.config.set_default_value('min_value', min_value);
+                track.config.set_default_value('max_value', max_value);
+                track.config.set_value('min_value', min_value);
+                track.config.set_value('max_value', max_value);
+            }
+        });
+    },
+
+    /**
+     * Returns all drawables in this drawable.
+     */
+    get_drawables: function() {
+        return this;
+    }
+});
+
+var TiledTrack = function(view, container, obj_dict) {
+    Track.call(this, view, container, obj_dict);
+
+    var track = this;
+
+    // Make track moveable.
+    moveable(track.container_div, track.drag_handle_class, ".group", track);
+
+    // Attribute init.
+    this.filters_manager = new filters_mod.FiltersManager(this, ('filters' in obj_dict ? obj_dict.filters : null));
+    // HACK: set filters manager for data manager.
+    // FIXME: prolly need function to set filters and update data_manager reference.
+    this.data_manager.set('filters_manager', this.filters_manager);
+    this.filters_available = false;
+    this.tool = (obj_dict.tool ? new TracksterTool( _.extend( obj_dict.tool, {
+        'track': this,
+        'tool_state': obj_dict.tool_state
+    } ) )
+    : null);
+    this.tile_cache = new visualization.Cache(TILE_CACHE_SIZE);
+    this.left_offset = 0;
+
+    if (this.header_div) {
+        //
+        // Setup filters.
+        //
+        this.set_filters_manager(this.filters_manager);
+
+        //
+        // Create dynamic tool view.
+        //
+        if (this.tool) {
+            var tool_view = new TracksterToolView({ model: this.tool });
+            tool_view.render();
+            this.dynamic_tool_div = tool_view.$el;
+            this.header_div.after(this.dynamic_tool_div);
+        }
+    }
+
+    // Add tiles_div, overlay_div to content_div.
+    this.tiles_div = $("<div/>").addClass("tiles").appendTo(this.content_div);
+    if (!this.config.get_value('content_visible')) {
+        this.tiles_div.hide();
+    }
+    this.overlay_div = $("<div/>").addClass("overlay").appendTo(this.content_div);
+
+    if (obj_dict.mode) {
+        this.change_mode(obj_dict.mode);
+    }
+};
+extend(TiledTrack.prototype, Drawable.prototype, Track.prototype, {
+
+    action_icons_def: Track.prototype.action_icons_def.concat( [
+        // Show more rows when all features are not slotted.
+        {
+            name: "show_more_rows_icon",
+            title: "To minimize track height, not all feature rows are displayed. Click to display more rows.",
+            css_class: "exclamation",
+            on_click_fn: function(track) {
+                $(".tooltip").remove();
+                track.slotters[ track.view.resolution_px_b ].max_rows *= 2;
+                track.request_draw({ clear_tile_cache: true });
+            },
+            hide: true
+        }
+    ] ),
+
+    /**
+     * Returns a copy of the track. The copy uses the same data manager so that the tracks can share data.
+     */
+    copy: function(container) {
+        // Create copy.
+        var obj_dict = this.to_dict();
+        extend(obj_dict, {
+            data_manager: this.data_manager
+        });
+        var new_track = new this.constructor(this.view, container, obj_dict);
+        // Misc. init and return.
+        new_track.change_mode(this.mode);
+        new_track.enabled = this.enabled;
+        return new_track;
+    },
+
+    /**
+     * Set filters manager + HTML elements.
+     */
+    set_filters_manager: function(filters_manager) {
+        this.filters_manager = filters_manager;
+        this.header_div.after(this.filters_manager.parent_div);
+    },
+
+    /**
+     * Returns representation of object in a dictionary for easy saving.
+     * Use from_dict to recreate object.
+     */
+    to_dict: function() {
+        return {
+            track_type: this.get_type(),
+            dataset: {
+                id: this.dataset.id,
+                hda_ldda: this.dataset.get('hda_ldda')
+            },
+            prefs: this.config.to_key_value_dict(),
+            mode: this.mode,
+            filters: this.filters_manager.to_dict(),
+            tool_state: (this.tool ? this.tool.state_dict() : {})
+        };
+    },
+
+    /**
+     * Set track bounds for current chromosome.
+     */
+    set_min_max: function() {
+        var track = this;
+
+        return $.getJSON( track.dataset.url(),
+            { data_type: 'data', stats: true, chrom: track.view.chrom, low: 0,
+              high: track.view.max_high, hda_ldda: track.dataset.get('hda_ldda') },
+            function(result) {
+                var data = result.data;
+                if ( isNaN(parseFloat(track.config.get_value('min_value'))) ||
+                     isNaN(parseFloat(track.config.get_value('max_value'))) ) {
+                    // Compute default minimum and maximum values
+                    var min_value = data.min,
+                        max_value = data.max;
+                    // If mean and sd are present, use them to compute a ~95% window
+                    // but only if it would shrink the range on one side
+                    min_value = Math.floor( Math.min( 0, Math.max( min_value, data.mean - 2 * data.sd ) ) );
+                    max_value = Math.ceil( Math.max( 0, Math.min( max_value, data.mean + 2 * data.sd ) ) );
+                    // Update the prefs
+                    track.config.set_value('min_value', min_value);
+                    track.config.set_value('max_value', max_value);
+                }
+            });
+    },
+
+    /**
+     * Change track's mode.
+     */
+    change_mode: function(new_mode) {
+        var track = this;
+        // TODO: is it necessary to store the mode in two places (.mode and track_config)?
+        track.mode = new_mode;
+        track.config.set_value('mode', new_mode);
+        // FIXME: find a better way to get Auto data w/o clearing cache; using mode in the
+        // data manager would work if Auto data were checked for compatibility when a specific
+        // mode is chosen.
+        if (new_mode === 'Auto') { this.data_manager.clear(); }
+        track.request_draw({ clear_tile_cache: true });
+        this.action_icons.mode_icon.attr("title", "Set display mode (now: " + track.mode + ")");
+        return track;
+    },
+
+    /**
+     * Update track's buttons.
+     */
+    update_icons: function() {
+        var track = this;
+
+        //
+        // Show/hide filter icon.
+        //
+        track.action_icons.filters_icon.toggle(track.filters_available);
+
+        //
+        // Show/hide tool icons.
+        //
+        track.action_icons.tools_icon.toggle(track.tool !== null);
+        track.action_icons.param_space_viz_icon.toggle(track.tool !== null);
+    },
+
+    /**
+     * Generate a key for the tile cache.
+     * TODO: create a TileCache object (like DataCache) and generate key internally.
+     */
+    _gen_tile_cache_key: function(w_scale, tile_region) {
+        return w_scale + '_' + tile_region;
+    },
+
+    /**
+     * Request that track be drawn.
+     */
+    request_draw: function(options) {
+        if (options && options.clear_tile_cache) {
+            this.tile_cache.clear();
+        }
+        this.view.request_redraw(options, this);
+    },
+
+    /**
+     * Actions to be taken before drawing.
+     */
+    before_draw: function() {
+        // Clear because this is set when drawing.
+        this.max_height_px = 0;
+    },
+
+    /**
+     * Draw track. Options include:
+     * -force: force a redraw rather than use cached tiles (default: false)
+     * -clear_after: clear old tiles after drawing new tiles (default: false)
+     * -data_fetch: fetch data if necessary (default: true)
+     *
+     * NOTE: this function should never be called directly; use request_draw() so that drawing
+     * management can be used.
+     */
+    _draw: function(options) {
+        if ( !this.can_draw() ) { return; }
+
+        var clear_after = options && options.clear_after,
+            low = this.view.low,
+            high = this.view.high,
+            range = high - low,
+            width = this.view.container.width(),
+            w_scale = this.view.resolution_px_b,
+            resolution = 1 / w_scale;
+
+        // For overview, adjust high, low, resolution, and w_scale.
+        if (this.is_overview) {
+            low = this.view.max_low;
+            high = this.view.max_high;
+            w_scale = width / (view.max_high - view.max_low);
+            resolution = 1 / w_scale;
+        }
+
+        this.before_draw();
+
+        //
+        // Method for moving and/or removing tiles:
+        // (a) mark all elements for removal using class 'remove'
+        // (b) during tile drawing/placement, remove class for elements that are moved;
+        //     this occurs in show_tile()
+        // (c) after drawing tiles, remove elements still marked for removal
+        //     (i.e. that still have class 'remove').
+        //
+
+        // Step (a) for (re)moving tiles.
+        this.tiles_div.children().addClass("remove");
+
+        var
+            // Tile width in bases.
+            tile_width = Math.floor(TILE_SIZE * resolution),
+            // Index of first tile that overlaps visible region.
+            tile_index = Math.floor(low / tile_width),
+            tile_region,
+            tile_promise,
+            tile_promises = [],
+            tiles = [];
+        // Draw tiles.
+        while ( (tile_index * tile_width) < high ) {
+            // Get tile region.
+            tile_region = new visualization.GenomeRegion({
+                chrom: this.view.chrom,
+                start: tile_index * tile_width,
+                // Tile high cannot be larger than view.max_high, which the chromosome length.
+                end: Math.min( (tile_index + 1) * tile_width, this.view.max_high)
+            });
+            tile_promise = this.draw_helper(tile_region, w_scale, options);
+            tile_promises.push(tile_promise);
+            $.when(tile_promise).then(function(tile) {
+                tiles.push(tile);
+            });
+
+            // Go to next tile.
+            tile_index += 1;
+        }
+
+        // Step (c) for (re)moving tiles when clear_after is false.
+        if (!clear_after) { this.tiles_div.children(".remove").removeClass("remove").remove(); }
+
+        // When all tiles are drawn, call post-draw actions.
+        var track = this;
+        $.when.apply($, tile_promises).then(function() {
+            // Step (c) for (re)moving tiles when clear_after is true:
+            track.tiles_div.children(".remove").remove();
+
+            // Only do postdraw actions for tiles; instances where tiles may not be drawn include:
+            // (a) ReferenceTrack without sufficient resolution;
+            // (b) data_fetch = false.
+            tiles = _.filter(tiles, function(t) {
+                return t !== null;
+            });
+            if (tiles.length !== 0) {
+                track.postdraw_actions(tiles, width, w_scale, clear_after);
+            }
+        });
+    },
+
+    /**
+     * Add a maximum/minimum label to track.
+     */
+    _add_yaxis_label: function(type, on_change) {
+        var track = this,
+            css_class = (type === 'max' ? 'top' : 'bottom'),
+            text = (type === 'max' ? 'max' : 'min'),
+            pref_name = (type === 'max' ? 'max_value' : 'min_value'),
+            label = this.container_div.find(".yaxislabel." + css_class),
+            value = round( track.config.get_value(pref_name), 1 );
+
+        // Default action for on_change is to redraw track.
+        on_change = on_change || function() {
+            track.request_draw({ clear_tile_cache: true });
+        };
+
+        if (label.length !== 0) {
+            // Label already exists, so update value.
+            label.text(value);
+        }
+        else {
+            // Add label.
+            label = $("<div/>").text(value).make_text_editable({
+                num_cols: 12,
+                on_finish: function(new_val) {
+                    $(".tooltip").remove();
+                    track.config.set_value(pref_name, round( new_val, 1 ) );
+                    on_change();
+                },
+                help_text: "Set " + text + " value"
+            }).addClass('yaxislabel ' + css_class).css("color", this.config.get_value('label_color'));
+            this.container_div.prepend(label);
+        }
+    },
+
+    /**
+     * Actions to be taken after draw has been completed. Draw is completed when all tiles have been
+     * drawn/fetched and shown.
+     */
+    postdraw_actions: function(tiles, width, w_scale, clear_after) {
+        var line_track_tiles = _.filter(tiles, function(tile) {
+            return (tile instanceof LineTrackTile);
+        });
+
+        //
+        // Take different actions depending on whether there are LineTrack/Coverage tiles.
+        //
+
+        if (line_track_tiles.length > 0) {
+            // -- Drawing in Coverage mode. --
+
+            // Clear because this is set when drawing.
+            this.max_height_px = 0;
+            var track = this;
+            _.each(tiles, function(tile) {
+                if (!(tile instanceof LineTrackTile)) {
+                    tile.html_elt.remove();
+                    track.draw_helper(tile.region, w_scale, { force: true, mode: 'Coverage' });
+                }
+            });
+
+            track._add_yaxis_label('max');
+        }
+        else {
+            // -- Drawing in non-Coverage mode. --
+
+            // Remove Y-axis labels because there are no line track tiles.
+            this.container_div.find('.yaxislabel').remove();
+
+            //
+            // If some tiles have icons, set padding of tiles without icons so features and rows align.
+            //
+            var icons_present = _.find(tiles, function(tile) {
+                return tile.has_icons;
+            });
+
+            if (icons_present) {
+                _.each(tiles, function(tile) {
+                    if (!tile.has_icons) {
+                        // Need to align with other tile(s) that have icons.
+                        tile.html_elt.css("padding-top", ERROR_PADDING);
+                    }
+                });
+            }
+        }
+    },
+
+    /**
+     * Returns appropriate display mode based on data.
+     */
+    get_mode: function(data) {
+        return this.mode;
+    },
+
+    /**
+     * Update track interface to show display mode being used.
+     */
+    update_auto_mode: function( display_mode ) {
+        // FIXME: needs to be implemented.
+    },
+
+    /**
+     * Returns a list of drawables to draw. Defaults to current track.
+     */
+    _get_drawables: function() {
+        return [ this ];
+    },
+
+    /**
+     * Retrieves from cache, draws, or sets up drawing for a single tile. Returns either a Tile object or a
+     * jQuery.Deferred object that is fulfilled when tile can be drawn again. Options include:
+     * -force: force a redraw rather than use cached tiles (default: false)
+     * -data_fetch: fetch data if necessary (default: true)
+     */
+    draw_helper: function(region, w_scale, options) {
+        // Init options if necessary to avoid having to check if options defined.
+        if (!options) { options = {}; }
+
+        var force = options.force,
+            mode = options.mode || this.mode,
+            resolution = 1 / w_scale,
+
+            // Useful vars.
+            track = this,
+            drawables = this._get_drawables(),
+            key = this._gen_tile_cache_key(w_scale, region),
+            is_tile = function(o) { return (o && 'track' in o); };
+
+        // Check tile cache, if found show existing tile in correct position
+        var tile = (force ? undefined : track.tile_cache.get_elt(key));
+        if (tile) {
+            if (is_tile(tile)) {
+                track.show_tile(tile, w_scale);
+            }
+            return tile;
+        }
+
+        // If not fetching data, nothing more to do because data is needed to draw tile.
+        if (options.data_fetch === false) { return null; }
+
+        // Function that returns data/Deferreds needed to draw tile.
+        var get_tile_data = function() {
+            // HACK: if display mode (mode) is in continuous data modes, data mode must be coverage to get coverage data.
+            var data_mode = (_.find(CONTINUOUS_DATA_MODES, function(m) { return m === mode; }) ? "Coverage" : mode);
+
+            // Map drawable object to data needed for drawing.
+            var tile_data = _.map(drawables, function(d) {
+                // Get the track data/promise.
+                return d.data_manager.get_data(region, data_mode, resolution, track.data_url_extra_params);
+            });
+
+            // Get reference data/promise.
+            if (view.reference_track) {
+                tile_data.push(view.reference_track.data_manager.get_data(region, mode, resolution, view.reference_track.data_url_extra_params));
+            }
+
+            return tile_data;
+        };
+
+        //
+        // When data is available, draw tile.
+        //
+        var tile_drawn = $.Deferred();
+        track.tile_cache.set_elt(key, tile_drawn);
+        $.when.apply($, get_tile_data()).then( function() {
+            var tile_data = get_tile_data(),
+                tracks_data = tile_data,
+                seq_data;
+
+            // Deferreds may show up here if trying to fetch a subset of data from a superset data chunk
+            // that cannot be subsetted. This may occur if the superset has a message. If there is a
+            // Deferred, try again from the top. NOTE: this condition could (should?) be handled by the
+            // GenomeDataManager in visualization module.
+            if (_.find(tile_data, function(d) { return util.is_deferred(d); })) {
+                track.tile_cache.set_elt(key, undefined);
+                $.when(track.draw_helper(region, w_scale, options)).then(function(tile) {
+                    tile_drawn.resolve(tile);
+                });
+                return;
+            }
+
+            // If sequence data is available, subset to get only data in region.
+            if (view.reference_track) {
+                seq_data = view.reference_track.data_manager.subset_entry(tile_data.pop(), region);
+            }
+
+            // Get drawing modes, heights for all tracks.
+            var drawing_modes = [],
+                drawing_heights = [];
+
+            _.each(drawables, function(d, i) {
+                var mode = d.mode,
+                    data = tracks_data[i];
+                if (mode === "Auto") {
+                    mode = d.get_mode(data);
+                    d.update_auto_mode(mode);
+                }
+                drawing_modes.push(mode);
+                drawing_heights.push(d.get_canvas_height(data, mode, w_scale, width));
+            });
+
+            var canvas = track.view.canvas_manager.new_canvas(),
+                tile_low = region.get('start'),
+                tile_high = region.get('end'),
+                all_data_index = 0,
+                width = Math.ceil( (tile_high - tile_low) * w_scale ) + track.left_offset,
+                height = _.max(drawing_heights),
+                tile;
+
+            //
+            // Draw all tracks on tile.
+            //
+            canvas.width = width;
+            // Height is specified in options or is the height found above.
+            canvas.height = (options.height || height);
+            var ctx = canvas.getContext('2d');
+            ctx.translate(track.left_offset, 0);
+            if (drawables.length > 1) {
+                ctx.globalAlpha = 0.5;
+                ctx.globalCompositeOperation = "source-over";
+            }
+            _.each(drawables, function(d, i) {
+                tile = d.draw_tile(tracks_data[i], ctx, drawing_modes[i], region, w_scale, seq_data);
+            });
+
+            // Don't cache, show if no tile.
+            if (tile !== undefined) {
+                track.tile_cache.set_elt(key, tile);
+                track.show_tile(tile, w_scale);
+            }
+
+            tile_drawn.resolve(tile);
+        });
+
+        return tile_drawn;
+    },
+
+    /**
+     * Returns canvas height needed to display data; return value is an integer that denotes the
+     * number of pixels required.
+     */
+    get_canvas_height: function(result, mode, w_scale, canvas_width) {
+        return this.visible_height_px;
+    },
+
+    /**
+     * Draw line (bigwig) data onto tile.
+     */
+    _draw_line_track_tile: function(result, ctx, mode, region, w_scale) {
+        // Set min/max if they are not already set.
+        // FIXME: checking for different null/undefined/0 is messy; it would be nice to
+        // standardize this.
+        if ( [undefined, null].indexOf(this.config.get_value("min_value")) !== -1 ) {
+            this.config.set_value("min_value", 0);
+        }
+        if ( [undefined, null, 0].indexOf(this.config.get_value("max_value")) !== -1 ) {
+            this.config.set_value("max_value", _.max( _.map(result.data, function(d) { return d[1]; }) ) || 0);
+        }
+
+        var canvas = ctx.canvas,
+            painter = new painters.LinePainter(result.data, region.get('start'), region.get('end'), this.config.to_key_value_dict(), mode);
+        painter.draw(ctx, canvas.width, canvas.height, w_scale);
+
+        return new LineTrackTile(this, region, w_scale, canvas, result.data);
+    },
+
+    /**
+     * Draw a track tile.
+     * @param result result from server
+     * @param ctx canvas context to draw on
+     * @param mode mode to draw in
+     * @param region region to draw on tile
+     * @param w_scale pixels per base
+     * @param ref_seq reference sequence data
+     */
+    draw_tile: function(result, ctx, mode, region, w_scale, ref_seq) {},
+
+    /**
+     * Show track tile and perform associated actions. Showing tile may actually move
+     * an existing tile rather than reshowing it.
+     */
+    show_tile: function(tile, w_scale) {
+        var track = this,
+            tile_element = tile.html_elt;
+
+        // -- Show/move tile element. --
+
+        tile.predisplay_actions();
+
+        // Position tile element based on current viewport.
+        var left = Math.round( ( tile.low - (this.is_overview? this.view.max_low : this.view.low) ) * w_scale );
+        if (this.left_offset) {
+            left -= this.left_offset;
+        }
+        tile_element.css('left', left);
+
+        if ( tile_element.hasClass("remove") ) {
+            // Step (b) for (re)moving tiles. See _draw() function for description of algorithm
+            // for removing tiles.
+            tile_element.removeClass("remove");
+        }
+        else {
+            // Showing new tile.
+            this.tiles_div.append(tile_element);
+        }
+
+        // -- Update track, tile heights based on new tile. --
+
+        tile_element.css('height', 'auto');
+
+        // Update max height based on current tile's height.
+        // BUG/HACK: tile_element.height() returns a height that is always 2 pixels too big, so
+        // -2 to get the correct height.
+        this.max_height_px = Math.max(this.max_height_px, tile_element.height() - 2);
+
+        // Update height for all tiles based on max height.
+        tile_element.parent().children().css("height", this.max_height_px + "px");
+
+        // Update track height based on max height and visible height.
+        var track_height = this.max_height_px;
+        if (this.visible_height_px !== 0) {
+            track_height = Math.min(this.max_height_px, this.visible_height_px);
+        }
+        this.tiles_div.css("height", track_height + "px");
+    },
+
+    /**
+     * Utility function that creates a label string describing the region and parameters of a track's tool.
+     */
+    tool_region_and_parameters_str: function(region) {
+        var track = this,
+            region_str = (region !== undefined ? region.toString() : "all"),
+            param_str = _.values( track.tool.get_inputs_dict()).join(', ');
+        return " - region=[" + region_str + "], parameters=[" + param_str + "]";
+    },
+
+    /**
+     * Returns true if data is compatible with a given mode.
+     */
+    data_and_mode_compatible: function(data, mode) {
+        // Only handle modes that user can set.
+        if (mode === "Auto") {
+            return true;
+        }
+        // Histogram mode requires bigwig data.
+        else if (mode === "Coverage") {
+            return data.dataset_type === "bigwig";
+        }
+        // All other modes--Dense, Squish, Pack--require data + details.
+        else if (data.dataset_type === "bigwig" ||
+                 data.extra_info === "no_detail") {
+            return false;
+        }
+        else {
+            return true;
+        }
+    },
+
+    /**
+     * Returns true if entry can be subsetted.
+     */
+    can_subset: function(entry) {
+        // Do not subset entries with a message or data with no detail.
+        if (entry.message || entry.extra_info === "no_detail")  {
+            return false;
+        }
+        // Subset only if data is single-bp resolution.
+        else if (entry.dataset_type === 'bigwig') {
+            return (entry.data[1][0] - entry.data[0][0] === 1);
+        }
+
+        return true;
+    },
+
+    /**
+     * Set up track to receive tool data.
+     */
+    init_for_tool_data: function() {
+        // Set up track to fetch raw data rather than converted data.
+        this.data_manager.set('data_type', 'raw_data');
+        this.data_query_wait = 1000;
+        this.dataset_check_type = 'state';
+
+        // FIXME: this is optional and is disabled for now because it creates
+        // additional converter jobs without a clear benefit because indexing
+        // such a small dataset provides little benefit.
+        //
+        // Set up one-time, post-draw to clear tool execution settings.
+        //
+        /*
+        this.normal_postdraw_actions = this.postdraw_actions;
+        this.postdraw_actions = function(tiles, width, w_scale, clear_after) {
+            var self = this;
+
+            // Do normal postdraw init.
+            self.normal_postdraw_actions(tiles, width, w_scale, clear_after);
+
+            // Tool-execution specific post-draw init:
+
+            // Reset dataset check, wait time.
+            self.dataset_check_type = 'converted_datasets_state';
+            self.data_query_wait = DEFAULT_DATA_QUERY_WAIT;
+
+            // Reset data URL when dataset indexing has completed/when not pending.
+            var ss_deferred = new util.ServerStateDeferred({
+                url: self.dataset_state_url,
+                url_params: {dataset_id : self.dataset.id, hda_ldda: self.dataset.get('hda_ldda')},
+                interval: self.data_query_wait,
+                // Set up deferred to check dataset state until it is not pending.
+                success_fn: function(result) { return result !== "pending"; }
+            });
+            $.when(ss_deferred.go()).then(function() {
+                // Dataset is indexed, so use converted data.
+                self.data_manager.set('data_type', 'data');
+            });
+
+            // Reset post-draw actions function.
+            self.postdraw_actions = self.normal_postdraw_actions;
+        };
+        */
+    }
+});
+
+var LabelTrack = function (view, container) {
+    Track.call(this, view, container, {
+        resize: false,
+        header: false
+    });
+    this.container_div.addClass( "label-track" );
+};
+extend(LabelTrack.prototype, Track.prototype, {
+    init: function() {
+        // Enable by default because there should always be data when drawing track.
+        this.enabled = true;
+    },
+
+    /**
+     * Additional initialization required before drawing track for the first time.
+     */
+    predraw_init: function() {},
+
+    _draw: function(options) {
+        var view = this.view,
+            range = view.high - view.low,
+            tickDistance = Math.floor( Math.pow( 10, Math.floor( Math.log( range ) / Math.log( 10 ) ) ) ),
+            position = Math.floor( view.low / tickDistance ) * tickDistance,
+            width = this.view.container.width(),
+            new_div = $("<div/>").addClass('label-container');
+        while ( position < view.high ) {
+            var screenPosition = Math.floor( ( position - view.low ) / range * width );
+            new_div.append( $("<div/>").addClass('pos-label').text(util.commatize( position )).css( {
+                left: screenPosition
+            }));
+            position += tickDistance;
+        }
+        this.content_div.children( ":first" ).remove();
+        this.content_div.append( new_div );
+    }
+});
+
+// FIXME: Composite tracks have code for showing composite tracks with line tracks and
+// composite tracks with line + feature tracks. It's probably best if different classes
+// are created for each type of composite track.
+
+/**
+ * A tiled track composed of multiple other tracks. Composite tracks only work with
+ * bigwig data for now.
+ */
+var CompositeTrack = function(view, container, obj_dict) {
+    TiledTrack.call(this, view, container, obj_dict);
+
+    // Init drawables; each drawable is a copy so that config/preferences
+    // are independent of each other. Also init left offset.
+    this.drawables = [];
+    if ('drawables' in obj_dict) {
+        var drawable;
+        for (var i = 0; i < obj_dict.drawables.length; i++) {
+            drawable = obj_dict.drawables[i];
+            this.drawables[i] = object_from_template(drawable, view, null);
+
+            // Track's left offset is the max of all tracks.
+            if (drawable.left_offset > this.left_offset) {
+                this.left_offset = drawable.left_offset;
+            }
+        }
+        this.enabled = true;
+    }
+
+    // Set all feature tracks to use Coverage mode.
+    _.each(this.drawables, function(d) {
+        if (d instanceof FeatureTrack || d instanceof ReadTrack) {
+            d.change_mode("Coverage");
+        }
+    });
+
+    this.update_icons();
+
+    // HACK: needed for saving object for now. Need to generalize get_type() to all Drawables and use
+    // that for object type.
+    this.obj_type = "CompositeTrack";
+};
+
+extend(CompositeTrack.prototype, TiledTrack.prototype, {
+    display_modes: CONTINUOUS_DATA_MODES,
+
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'min_value', label: 'Min Value', type: 'float', default_value: undefined },
+        { key: 'max_value', label: 'Max Value', type: 'float', default_value: undefined },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'height', type: 'int', default_value: 30, hidden: true }
+    ] ),
+
+    action_icons_def:
+    [
+        // Create composite track from group's tracks.
+        {
+            name: "composite_icon",
+            title: "Show individual tracks",
+            css_class: "layers-stack",
+            on_click_fn: function(track) {
+                $(".tooltip").remove();
+                track.show_group();
+            }
+        }
+    ].concat(TiledTrack.prototype.action_icons_def),
+
+    // HACK: CompositeTrack should inherit from DrawableCollection as well.
+    /**
+     * Returns representation of object in a dictionary for easy saving.
+     * Use from_dict to recreate object.
+     */
+    to_dict: DrawableCollection.prototype.to_dict,
+
+    add_drawable: DrawableCollection.prototype.add_drawable,
+
+    unpack_drawables: DrawableCollection.prototype.unpack_drawables,
+
+    config_onchange: function() {
+        this.set_name(this.config.get_value('name'));
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Action to take during resize.
+     */
+    on_resize: function() {
+        // Propogate visible height to other tracks.
+        var visible_height = this.visible_height_px;
+        _.each(this.drawables, function(d) {
+            d.visible_height_px = visible_height;
+        });
+        Track.prototype.on_resize.call(this);
+    },
+
+    /**
+     * Change mode for all tracks.
+     */
+    change_mode: function(new_mode) {
+        TiledTrack.prototype.change_mode.call(this, new_mode);
+        for (var i = 0; i < this.drawables.length; i++) {
+            this.drawables[i].change_mode(new_mode);
+        }
+    },
+
+    /**
+     * Initialize component tracks and draw composite track when all components are initialized.
+     */
+    init: function() {
+        // Init components.
+        var init_deferreds = [];
+        for (var i = 0; i < this.drawables.length; i++) {
+            init_deferreds.push(this.drawables[i].init());
+        }
+
+        // Draw composite when all tracks available.
+        var track = this;
+        $.when.apply($, init_deferreds).then(function() {
+            track.enabled = true;
+            track.request_draw();
+        });
+    },
+
+    update_icons: function() {
+        // For now, hide filters and tool.
+        this.action_icons.filters_icon.hide();
+        this.action_icons.tools_icon.hide();
+        this.action_icons.param_space_viz_icon.hide();
+    },
+
+    can_draw: Drawable.prototype.can_draw,
+
+    _get_drawables: function() {
+        return this.drawables;
+    },
+
+    /**
+     * Replace this track with group that includes individual tracks.
+     */
+    show_group: function() {
+        // Create group with individual tracks.
+        var group = new DrawableGroup(this.view, this.container, {
+                name: this.config.get_value('name')
+            }),
+            track;
+        for (var i = 0; i < this.drawables.length; i++) {
+            track = this.drawables[i];
+            track.update_icons();
+            group.add_drawable(track);
+            track.container = group;
+            group.content_div.append(track.container_div);
+        }
+
+        // Replace track with group.
+        var index = this.container.replace_drawable(this, group, true);
+        group.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Actions taken before drawing.
+     */
+    before_draw: function() {
+        // FIXME: this is needed only if there are feature tracks in the composite track.
+        // TiledTrack.prototype.before_draw.call(this);
+
+        //
+        // Set min, max for tracks to be largest min, max.
+        //
+
+        // Get smallest min, biggest max.
+        var min = _.min(_.map(this.drawables, function(d) { return d.config.get_value('min_value'); })),
+            max = _.max(_.map(this.drawables, function(d) { return d.config.get_value('max_value'); }));
+
+        this.config.set_value('min_value', min);
+        this.config.set_value('max_value', max);
+
+        // Set all tracks to smallest min, biggest max.
+        _.each(this.drawables, function(d) {
+            d.config.set_value('min_value', min);
+            d.config.set_value('max_value', max);
+        });
+    },
+
+    /**
+     * Update minimum, maximum for component tracks.
+     */
+    update_all_min_max: function() {
+        var track = this,
+            min_value = this.config.get_value('min_value'),
+            max_value = this.config.get_value('max_value');
+        _.each(this.drawables, function(d) {
+            d.config.set_value('min_value', min_value);
+            d.config.set_value('max_value', max_value);
+        });
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Actions to be taken after draw has been completed. Draw is completed when all tiles have been
+     * drawn/fetched and shown.
+     */
+    postdraw_actions: function(tiles, width, w_scale, clear_after) {
+        // All tiles must be the same height in order to draw LineTracks, so redraw tiles as needed.
+        var max_height = -1, i;
+        for (i = 0; i < tiles.length; i++) {
+            var height = tiles[i].html_elt.find("canvas").height();
+            if (height > max_height) {
+                max_height = height;
+            }
+        }
+
+        for (i = 0; i < tiles.length; i++) {
+            var tile = tiles[i];
+            if (tile.html_elt.find("canvas").height() !== max_height) {
+                this.draw_helper(tile.region, w_scale, { force: true, height: max_height } );
+                tile.html_elt.remove();
+            }
+        }
+
+        // Wrap function so that it can be called without object reference.
+        var track = this,
+            t = function() { track.update_all_min_max(); };
+
+        // Add min, max labels.
+        this._add_yaxis_label('min', t);
+        this._add_yaxis_label('max', t);
+    }
+});
+
+/**
+ * Displays reference genome data.
+ */
+var ReferenceTrack = function (view) {
+    TiledTrack.call(this, view, { content_div: view.top_labeltrack }, { resize: false, header: false });
+
+    // Use offset to ensure that bases at tile edges are drawn.
+    this.left_offset = view.canvas_manager.char_width_px;
+    this.container_div.addClass("reference-track");
+    this.data_url = Galaxy.root + "api/genomes/" + this.view.dbkey;
+    this.data_url_extra_params = {reference: true};
+    this.data_manager = new visualization.GenomeReferenceDataManager({
+        data_url: this.data_url,
+        can_subset: this.can_subset
+    });
+    this.hide_contents();
+};
+extend(ReferenceTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'height', type: 'int', default_value: 13, hidden: true }
+    ] ),
+
+    init: function() {
+        this.data_manager.clear();
+        // Enable by default because there should always be data when drawing track.
+        this.enabled = true;
+    },
+
+    /**
+     * Additional initialization required before drawing track for the first time.
+     */
+    predraw_init: function() {},
+
+    can_draw: Drawable.prototype.can_draw,
+
+    /**
+     * Draws and shows tile if reference data can be displayed; otherwise track is hidden.
+     */
+    draw_helper: function(region, w_scale, options) {
+        var cur_visible = this.tiles_div.is(':visible'),
+            new_visible,
+            tile = null;
+        if (w_scale > this.view.canvas_manager.char_width_px) {
+            this.tiles_div.show();
+            new_visible = true;
+            tile = TiledTrack.prototype.draw_helper.call(this, region, w_scale, options);
+        }
+        else {
+            new_visible = false;
+            this.tiles_div.hide();
+        }
+
+        // NOTE: viewport resizing conceptually belongs in postdraw_actions(), but currently
+        // postdraw_actions is not called when reference track not shown due to no tiles. If
+        // it is moved to postdraw_actions, resize must be called each time because cannot
+        // easily detect showing/hiding.
+
+        // If showing or hiding reference track, resize viewport.
+        if (cur_visible !== new_visible) {
+            this.view.resize_viewport();
+        }
+
+        return tile;
+    },
+
+    can_subset: function(entry) { return true; },
+
+    /**
+     * Draw ReferenceTrack tile.
+     */
+    draw_tile: function(data, ctx, mode, region, w_scale) {
+        // Try to subset data.
+        var subset = this.data_manager.subset_entry(data, region),
+            seq_data = subset.data;
+
+        // Draw sequence data.
+        var canvas = ctx.canvas;
+        ctx.font = ctx.canvas.manager.default_font;
+        ctx.textAlign = "center";
+        for (var c = 0, str_len = seq_data.length; c < str_len; c++) {
+            ctx.fillStyle = this.view.get_base_color(seq_data[c]);
+            ctx.fillText(seq_data[c], Math.floor(c * w_scale), 10);
+        }
+        return new Tile(this, region, w_scale, canvas, subset);
+    }
+});
+
+/**
+ * Track displays continuous/numerical data. Track expects position data in 1-based format, i.e. wiggle format.
+ */
+var LineTrack = function (view, container, obj_dict) {
+    this.mode = "Histogram";
+    TiledTrack.call(this, view, container, obj_dict);
+    // Need left offset for drawing overlap near tile boundaries.
+    this.left_offset = 30;
+
+    // If server has byte-range support, use BBI data manager to read directly from the BBI file.
+    // FIXME: there should be a flag to wait for this check to complete before loading the track.
+    var self = this;
+    $.when(supportsByteRanges(Galaxy.root + 'datasets/' + this.dataset.id + '/display'))
+     .then(function(supportsByteRanges) {
+         if (supportsByteRanges) {
+             self.data_manager = new bbi.BBIDataManager({
+                 dataset: self.dataset
+             });
+         }
+
+    });
+};
+
+extend(LineTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
+    display_modes: CONTINUOUS_DATA_MODES,
+
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'color', label: 'Color', type: 'color' },
+        { key: 'min_value', label: 'Min Value', type: 'float', default_value: undefined },
+        { key: 'max_value', label: 'Max Value', type: 'float', default_value: undefined },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'height', type: 'int', default_value: 30, hidden: true }
+    ] ),
+
+    config_onchange: function() {
+        this.set_name(this.config.get_value('name'));
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Actions to be taken before drawing.
+     */
+    // FIXME: can the default behavior be used; right now it breaks during resize.
+    before_draw: function() {},
+
+    /**
+     * Draw track tile.
+     */
+    draw_tile: function(result, ctx, mode, region, w_scale) {
+        return this._draw_line_track_tile(result, ctx, mode, region, w_scale);
+    },
+
+    /**
+     * Subset data only if data is at single-base pair resolution.
+     */
+    can_subset: function(entry) {
+        return (entry.data[1][0] - entry.data[0][0] === 1);
+    },
+
+    /**
+     * Add min, max labels.
+     */
+    postdraw_actions: function(tiles, width, w_scale, clear_after) {
+        // Add min, max labels.
+        this._add_yaxis_label('max');
+        this._add_yaxis_label('min');
+    }
+});
+
+/**
+ * Diagonal heatmap for showing interactions data.
+ */
+var DiagonalHeatmapTrack = function (view, container, obj_dict) {
+    this.mode = "Heatmap";
+    TiledTrack.call(this, view, container, obj_dict);
+};
+
+extend(DiagonalHeatmapTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
+    display_modes: ["Heatmap"],
+
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'pos_color', label: 'Positive Color', type: 'color', default_value: "#FF8C00" },
+        { key: 'neg_color', label: 'Negative Color', type: 'color', default_value: "#4169E1" },
+        { key: 'min_value', label: 'Min Value', type: 'int', default_value: undefined },
+        { key: 'max_value', label: 'Max Value', type: 'int', default_value: undefined },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'height', type: 'int', default_value: 500, hidden: true }
+    ] ),
+
+    config_onchange: function() {
+        this.set_name(this.config.get_value('name'));
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Additional initialization required before drawing track for the first time.
+     */
+    predraw_init: function() {
+        var track = this;
+        return $.getJSON( track.dataset.url(),
+            {  data_type: 'data', stats: true, chrom: track.view.chrom, low: 0,
+               high: track.view.max_high, hda_ldda: track.dataset.get('hda_ldda') }, function(result) {
+            var data = result.data;
+        });
+    },
+
+    /**
+     * Draw tile.
+     */
+    draw_tile: function(result, ctx, mode, region, w_scale) {
+        // Paint onto canvas.
+        var canvas = ctx.canvas,
+            painter = new painters.DiagonalHeatmapPainter(result.data, region.get('start'), region.get('end'), this.config.to_key_value_dict(), mode);
+        painter.draw(ctx, canvas.width, canvas.height, w_scale);
+
+        return new Tile(this, region, w_scale, canvas, result.data);
+    }
+});
+
+/**
+ * A track that displays features/regions. Track expects position data in BED format, i.e. 0-based, half-open.
+ */
+var FeatureTrack = function(view, container, obj_dict) {
+    TiledTrack.call(this, view, container, obj_dict);
+    this.container_div.addClass( "feature-track" );
+    this.summary_draw_height = 30;
+    this.slotters = {};
+    this.start_end_dct = {};
+    this.left_offset = 200;
+
+    // this.painter = painters.LinkedFeaturePainter;
+    this.set_painter_from_config();
+};
+extend(FeatureTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
+    display_modes: ["Auto", "Coverage", "Dense", "Squish", "Pack"],
+
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'block_color', label: 'Block color', type: 'color' },
+        { key: 'reverse_strand_color', label: 'Antisense strand color', type: 'color' },
+        { key: 'label_color', label: 'Label color', type: 'color', default_value: 'black' },
+        { key: 'show_counts', label: 'Show summary counts', type: 'bool', default_value: true,
+          help: 'Show the number of items in each bin when drawing summary histogram' },
+        { key: 'min_value', label: 'Histogram minimum', type: 'float', default_value: undefined, help: 'clear value to set automatically' },
+        { key: 'max_value', label: 'Histogram maximum', type: 'float', default_value: undefined, help: 'clear value to set automatically' },
+        { key: 'connector_style', label: 'Connector style', type: 'select', default_value: 'fishbones',
+            options: [ { label: 'Line with arrows', value: 'fishbone' }, { label: 'Arcs', value: 'arcs' } ] },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'height', type: 'int', default_value: 0, hidden: true}
+    ] ),
+
+    config_onchange: function() {
+        this.set_name(this.config.get_value('name'));
+        this.set_painter_from_config();
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    set_painter_from_config: function() {
+        if ( this.config.get_value('connector_style') === 'arcs' ) {
+            this.painter = painters.ArcLinkedFeaturePainter;
+        } else {
+            this.painter = painters.LinkedFeaturePainter;
+        }
+    },
+
+    /**
+     * Actions to be taken after draw has been completed. Draw is completed when all tiles have been
+     * drawn/fetched and shown.
+     */
+    postdraw_actions: function(tiles, width, w_scale, clear_after) {
+        TiledTrack.prototype.postdraw_actions.call(this, tiles, width, w_scale, clear_after);
+
+        var track = this,
+            i,
+            line_track_tiles = _.filter(tiles, function(t) {
+                return (t instanceof LineTrackTile);
+            });
+
+        //
+        // Finish drawing of features that span multiple tiles. Features that span multiple tiles
+        // are labeled incomplete on the tile level because they cannot be completely drawn.
+        //
+        if (line_track_tiles.length === 0) {
+            // Gather incomplete features together.
+            var all_incomplete_features = {};
+            _.each(_.pluck(tiles, 'incomplete_features'), function(inc_features) {
+                _.each(inc_features, function(feature) {
+                    all_incomplete_features[feature[0]] = feature;
+                });
+            });
+
+            // Draw incomplete features on each tile.
+            var self = this;
+            _.each(tiles, function(tile) {
+                // Remove features already drawn on tile originally.
+                var tile_incomplete_features =_.omit(all_incomplete_features,
+                                                     _.map(tile.incomplete_features, function(f) { return f[0]; }));
+
+                // Remove features already drawn on tile in past postdraw actions.
+                tile_incomplete_features = _.omit(tile_incomplete_features, _.keys(tile.other_tiles_features_drawn));
+
+                // Draw tile's incomplete features.
+                if (_.size(tile_incomplete_features) !== 0) {
+                    // To draw incomplete features, create new canvas, copy original canvas/tile onto new
+                    // canvas, and then draw incomplete features on the new canvas.
+                    var features = { data: _.values( tile_incomplete_features ) },
+                        new_canvas = self.view.canvas_manager.new_canvas(),
+                        new_canvas_ctx = new_canvas.getContext('2d');
+                    new_canvas.height = Math.max(tile.canvas.height,
+                                                 self.get_canvas_height(features, tile.mode, tile.w_scale, 100));
+                    new_canvas.width = tile.canvas.width;
+                    new_canvas_ctx.drawImage(tile.canvas, 0, 0);
+                    new_canvas_ctx.translate(track.left_offset, 0);
+                    var new_tile = self.draw_tile(features, new_canvas_ctx, tile.mode,
+                                                  tile.region, tile.w_scale, tile.seq_data);
+                    $(tile.canvas).replaceWith($(new_tile.canvas));
+                    tile.canvas = new_canvas;
+                    _.extend(tile.other_tiles_features_drawn, all_incomplete_features);
+                }
+            });
+        }
+
+        // If mode is Coverage and tiles do not share max, redraw tiles as necessary using new max.
+        /*
+        This code isn't used right now because Coverage mode uses predefined max in preferences.
+        if (track.mode === "Coverage") {
+            // Get global max.
+            var global_max = -1;
+            for (i = 0; i < tiles.length; i++) {
+                var cur_max = tiles[i].max_val;
+                if (cur_max > global_max) {
+                    global_max = cur_max;
+                }
+            }
+
+            for (i = 0; i < tiles.length; i++) {
+                var tile = tiles[i];
+                if (tile.max_val !== global_max) {
+                    tile.html_elt.remove();
+                    track.draw_helper(tile.index, w_scale, { more_tile_data: { force: true, max: global_max } } );
+                }
+            }
+        }
+        */
+
+        //
+        // Update filter attributes, UI.
+        //
+
+        // Update filtering UI.
+        if (track.filters_manager) {
+            var filters = track.filters_manager.filters,
+                f;
+            for (f = 0; f < filters.length; f++) {
+                filters[f].update_ui_elt();
+            }
+
+            // Determine if filters are available; this is based on the tiles' data.
+            // Criteria for filter to be available: (a) it is applicable to tile data and (b) filter min != filter max.
+            var filters_available = false,
+                example_feature,
+                filter;
+            for (i = 0; i < tiles.length; i++) {
+                if (tiles[i].data.length) {
+                    example_feature = tiles[i].data[0];
+                    for (f = 0; f < filters.length; f++) {
+                        filter = filters[f];
+                        if ( filter.applies_to(example_feature) &&
+                             filter.min !== filter.max ) {
+                            filters_available = true;
+                            break;
+                        }
+                    }
+                }
+            }
+
+            // If filter availability changed, hide filter div if necessary and update menu.
+            if (track.filters_available !== filters_available) {
+                track.filters_available = filters_available;
+                if (!track.filters_available) {
+                    track.filters_manager.hide();
+                }
+                track.update_icons();
+            }
+        }
+
+        //
+        // If not all features slotted, show icon for showing more rows (slots).
+        //
+        if (tiles[0] instanceof FeatureTrackTile) {
+            var all_slotted = true;
+            for (i = 0; i < tiles.length; i++) {
+                if (!tiles[i].all_slotted) {
+                    all_slotted = false;
+                    break;
+                }
+            }
+            this.action_icons.show_more_rows_icon.toggle(!all_slotted);
+        }
+        else {
+            this.action_icons.show_more_rows_icon.hide();
+        }
+    },
+
+    /**
+     * Update track interface to show display mode being used.
+     */
+    update_auto_mode: function(mode) {
+        if (this.mode === "Auto") {
+            if (mode === "no_detail") {
+                mode = "feature spans";
+            }
+            this.action_icons.mode_icon.attr("title", "Set display mode (now: Auto/" + mode + ")");
+        }
+    },
+
+    /**
+     * Place features in slots for drawing (i.e. pack features).
+     * this.slotters[level] is created in this method. this.slotters[level]
+     * is a Slotter object. Returns the number of slots used to pack features.
+     */
+    incremental_slots: function(level, features, mode) {
+
+        // Get/create incremental slots for level. If display mode changed,
+        // need to create new slots.
+
+        var dummy_context = this.view.canvas_manager.dummy_context,
+            slotter = this.slotters[level];
+        if (!slotter || (slotter.mode !== mode)) {
+            slotter = new (slotting.FeatureSlotter)( level, mode, MAX_FEATURE_DEPTH, function ( x ) { return dummy_context.measureText( x ); } );
+            this.slotters[level] = slotter;
+        }
+
+        return slotter.slot_features( features );
+    },
+
+    /**
+     * Returns appropriate display mode based on data.
+     */
+    get_mode: function(data) {
+        // HACK: use no_detail mode track is in overview to prevent overview from being too large.
+        if (data.extra_info === "no_detail" || this.is_overview) {
+            mode = "no_detail";
+        }
+        else {
+            // Choose b/t Squish and Pack.
+            // Proxy measures for using Squish:
+            // (a) error message re: limiting number of features shown;
+            // (b) X number of features shown;
+            // (c) size of view shown.
+            // TODO: cannot use (a) and (b) because it requires coordinating mode across tiles;
+            // fix this so that tiles are redrawn as necessary to use the same mode.
+            //if ( (result.message && result.message.match(/^Only the first [\d]+/)) ||
+            //     (result.data && result.data.length > 2000) ||
+            //var data = result.data;
+            // if ( (data.length && data.length < 4) ||
+            //      (this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH) ) {
+            if ( this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH ) {
+                mode = "Squish";
+            } else {
+                mode = "Pack";
+            }
+        }
+        return mode;
+    },
+
+    /**
+     * Returns canvas height needed to display data; return value is an integer that denotes the
+     * number of pixels required.
+     */
+    get_canvas_height: function(result, mode, w_scale, canvas_width) {
+        if (mode === "Coverage" || result.dataset_type === 'bigwig') {
+            return this.summary_draw_height;
+        }
+        else {
+            // All other modes require slotting.
+            var rows_required = this.incremental_slots(w_scale, result.data, mode);
+            // HACK: use dummy painter to get required height. Painter should be extended so that get_required_height
+            // works as a static function.
+            var dummy_painter = new (this.painter)(null, null, null, this.config.to_key_value_dict(), mode);
+            return Math.max(this.min_height_px, dummy_painter.get_required_height(rows_required, canvas_width) );
+        }
+    },
+
+    /**
+     * Draw FeatureTrack tile.
+     * @param result result from server
+     * @param cxt canvas context to draw on
+     * @param mode mode to draw in
+     * @param region region to draw on tile
+     * @param w_scale pixels per base
+     * @param ref_seq reference sequence data
+     * @param cur_tile true if drawing is occurring on a currently visible tile.
+     */
+    draw_tile: function(result, ctx, mode, region, w_scale, ref_seq, cur_tile) {
+        var track = this,
+            canvas = ctx.canvas,
+            tile_low = region.get('start'),
+            tile_high = region.get('end'),
+            left_offset = this.left_offset;
+
+        // If data is line track data, draw line track tile.
+        if (result.dataset_type === 'bigwig') {
+            return this._draw_line_track_tile(result, ctx, mode, region, w_scale);
+        }
+
+        // Handle row-by-row tracks
+
+        // Preprocessing: filter features and determine whether all unfiltered features have been slotted.
+        var
+            filtered = [],
+            slots = this.slotters[w_scale].slots;
+            all_slotted = true;
+        if ( result.data ) {
+            var filters = this.filters_manager.filters;
+            for (var i = 0, len = result.data.length; i < len; i++) {
+                var feature = result.data[i];
+                var hide_feature = false;
+                var filter;
+                for (var f = 0, flen = filters.length; f < flen; f++) {
+                    filter = filters[f];
+                    filter.update_attrs(feature);
+                    if (!filter.keep(feature)) {
+                        hide_feature = true;
+                        break;
+                    }
+                }
+                if (!hide_feature) {
+                    // Feature visible.
+                    filtered.push(feature);
+                    // Set flag if not slotted.
+                    if ( !(feature[0] in slots) ) {
+                        all_slotted = false;
+                    }
+                }
+            }
+        }
+
+        // Create painter.
+        var filter_alpha_scaler = (this.filters_manager.alpha_filter ? new FilterScaler(this.filters_manager.alpha_filter) : null),
+            filter_height_scaler = (this.filters_manager.height_filter ? new FilterScaler(this.filters_manager.height_filter) : null),
+            painter = new (this.painter)(filtered, tile_low, tile_high, this.config.to_key_value_dict(), mode, filter_alpha_scaler, filter_height_scaler,
+                                         // HACK: ref_seq only be defined for ReadTracks, and only the ReadPainter accepts that argument
+                                         ref_seq,
+                                         // Only the ReadPainer will use this function
+                                         function(b) { return track.view.get_base_color(b); });
+        var feature_mapper = null;
+
+        ctx.fillStyle = this.config.get_value('block_color');
+        ctx.font = ctx.canvas.manager.default_font;
+        ctx.textAlign = "right";
+
+        if (result.data) {
+            // Draw features.
+            var draw_results = painter.draw(ctx, canvas.width, canvas.height, w_scale, slots);
+            feature_mapper = draw_results.feature_mapper;
+            incomplete_features = draw_results.incomplete_features;
+            feature_mapper.translation = -left_offset;
+        }
+
+        // If not drawing on current tile, create new tile.
+        if (!cur_tile) {
+            return new FeatureTrackTile(track, region, w_scale, canvas, result.data, mode,
+                                        result.message, all_slotted, feature_mapper,
+                                        incomplete_features, ref_seq);
+        }
+    }
+});
+
+/**
+ * Displays variant data.
+ */
+var VariantTrack = function(view, container, obj_dict) {
+    TiledTrack.call(this, view, container, obj_dict);
+    this.painter = painters.VariantPainter;
+    this.summary_draw_height = 30;
+
+    // Maximum resolution is ~45 pixels/base, so use this size left offset to ensure that full
+    // variant is drawn when variant is at start of tile.
+    this.left_offset = 30;
+};
+
+extend(VariantTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
+    display_modes: ["Auto", "Coverage", "Dense", "Squish", "Pack"],
+
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'color', label: 'Histogram color', type: 'color' },
+        { key: 'show_sample_data', label: 'Show sample data', type: 'bool', default_value: true },
+        { key: 'show_labels', label: 'Show summary and sample labels', type: 'bool', default_value: true },
+        { key: 'summary_height', label: 'Locus summary height', type: 'float', default_value: 20 },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'height', type: 'int', default_value: 0, hidden: true }
+    ] ),
+
+    config_onchange: function() {
+        this.set_name(this.config.get_value('name'));
+        this.request_draw({ clear_tile_cache: true });
+    },
+
+    /**
+     * Draw tile.
+     */
+    draw_tile: function(result, ctx, mode, region, w_scale) {
+        // Data could be coverage data or variant data.
+        if (result.dataset_type === 'bigwig') {
+            return this._draw_line_track_tile(result, ctx, "Histogram", region, w_scale);
+        }
+        else { // result.dataset_type === 'variant'
+            var view = this.view,
+                painter = new (this.painter)(result.data, region.get('start'), region.get('end'), this.config.to_key_value_dict(), mode,
+                                             function(b) { return view.get_base_color(b); });
+            painter.draw(ctx, ctx.canvas.width, ctx.canvas.height, w_scale);
+            return new Tile(this, region, w_scale, ctx.canvas, result.data);
+        }
+    },
+
+    /**
+     * Returns canvas height needed to display data; return value is an integer that denotes the
+     * number of pixels required.
+     */
+    get_canvas_height: function(result, mode, w_scale, canvas_width) {
+        if (result.dataset_type === 'bigwig') {
+            return this.summary_draw_height;
+        }
+        else {
+            // HACK: sample_names is not be defined when dataset definition is fetched before
+            // dataset is complete (as is done when running tools). In that case, fall back on
+            // # of samples in data. This can be fixed by re-requesting dataset definition
+            // in init.
+            var num_samples = ( this.dataset.get_metadata('sample_names') ? this.dataset.get_metadata('sample_names').length : 0);
+            if (num_samples === 0 && result.data.length !== 0) {
+                // Sample data is separated by commas, so this computes # of samples:
+                num_samples = result.data[0][7].match(/,/g);
+                if ( num_samples === null ) {
+                    num_samples = 1;
+                }
+                else {
+                    num_samples = num_samples.length + 1;
+                }
+            }
+
+            var dummy_painter = new (this.painter)(null, null, null, this.config.to_key_value_dict(), mode);
+            return dummy_painter.get_required_height(num_samples);
+        }
+    },
+
+    /**
+     * Additional initialization required before drawing track for the first time.
+     */
+    predraw_init: function() {
+        var deferreds = [ Track.prototype.predraw_init.call(this) ];
+        // FIXME: updating dataset metadata is only needed for visual analysis. Can
+        // this be moved somewhere else?
+        if (!this.dataset.get_metadata('sample_names')) {
+            deferreds.push(this.dataset.fetch());
+        }
+        return deferreds;
+    },
+
+    /**
+     * Actions to be taken after draw has been completed. Draw is completed when all tiles have been
+     * drawn/fetched and shown.
+     */
+    postdraw_actions: function(tiles, width, w_scale, clear_after) {
+        TiledTrack.prototype.postdraw_actions.call(this, tiles, width, w_scale, clear_after);
+
+        var line_track_tiles = _.filter(tiles, function(t) {
+            return (t instanceof LineTrackTile);
+        });
+
+        // Add summary/sample labels if needed and not already included.
+        var sample_names = this.dataset.get_metadata('sample_names');
+        if (line_track_tiles.length === 0 && this.config.get_value('show_labels') && sample_names && sample_names.length > 1) {
+            var font_size;
+
+            // Add and/or style labels.
+            if (this.container_div.find('.yaxislabel.variant').length === 0) {
+                // Add summary and sample labels.
+
+                // Add summary label to middle of summary area.
+                font_size = this.config.get_value('summary_height') / 2;
+                this.tiles_div.prepend(
+                    $("<div/>").text('Summary').addClass('yaxislabel variant top').css({
+                        'font-size': font_size + 'px',
+                        'top': (this.config.get_value('summary_height') - font_size) / 2 + 'px'
+                    })
+                );
+
+                // Show sample labels.
+                if (this.config.get_value('show_sample_data')) {
+                    var samples_div_html = sample_names.join('<br/>');
+
+                    this.tiles_div.prepend(
+                        $("<div/>").html(samples_div_html).addClass('yaxislabel variant top sample').css({
+                            'top': this.config.get_value('summary_height')
+                        })
+                    );
+                }
+            }
+
+            // Style labels.
+
+            // Match sample font size to mode.
+            font_size = (this.mode === 'Squish' ? 5 : 10) + 'px';
+            $(this.tiles_div).find('.sample').css({
+                'font-size': font_size,
+                'line-height': font_size
+            });
+            // Color labels to preference color.
+            $(this.tiles_div).find('.yaxislabel').css('color', this.config.get_value('label_color'));
+        }
+        else {
+            // Remove all labels.
+            this.container_div.find('.yaxislabel.variant').remove();
+        }
+    }
+});
+
+/**
+ * Track that displays mapped reads. Track expects position data in 1-based, closed format, i.e. SAM/BAM format.
+ */
+var ReadTrack = function (view, container, obj_dict) {
+    FeatureTrack.call(this, view, container, obj_dict);
+    this.painter = painters.ReadPainter;
+    this.update_icons();
+};
+
+extend(ReadTrack.prototype, Drawable.prototype, TiledTrack.prototype, FeatureTrack.prototype, {
+    config_params: _.union( Drawable.prototype.config_params, [
+        { key: 'block_color', label: 'Histogram color', type: 'color' },
+        { key: 'detail_block_color', label: 'Sense strand block color', type: 'color', 'default_value': '#AAAAAA' },
+        { key: 'reverse_strand_color', label: 'Antisense strand block color', type: 'color', 'default_value': '#DDDDDD' },
+        { key: 'label_color', label: 'Label color', type: 'color', default_value: 'black' },
+        { key: 'show_insertions', label: 'Show insertions', type: 'bool', default_value: false },
+        { key: 'show_differences', label: 'Show differences only', type: 'bool', default_value: true },
+        { key: 'show_counts', label: 'Show summary counts', type: 'bool', default_value: true },
+        { key: 'mode', type: 'string', default_value: this.mode, hidden: true },
+        { key: 'min_value', label: 'Histogram minimum', type: 'float', default_value: undefined, help: 'clear value to set automatically' },
+        { key: 'max_value', label: 'Histogram maximum', type: 'float', default_value: undefined, help: 'clear value to set automatically' },
+        { key: 'height', type: 'int', default_value: 0, hidden: true}
+    ] ),
+
+    config_onchange: function() {
+        this.set_name(this.config.get_value('name'));
+        this.request_draw({ clear_tile_cache: true });
+    }
+});
+
+/**
+ * Objects that can be added to a view.
+ */
+var addable_objects = {
+    "CompositeTrack": CompositeTrack,
+    "DrawableGroup": DrawableGroup,
+    "DiagonalHeatmapTrack": DiagonalHeatmapTrack,
+    "FeatureTrack": FeatureTrack,
+    "LineTrack": LineTrack,
+    "ReadTrack": ReadTrack,
+    "VariantTrack": VariantTrack,
+    // For backward compatibility, map vcf track to variant.
+    "VcfTrack": VariantTrack
+};
+
+/**
+ * Create new object from a template. A template can be either an object dictionary or an
+ * object itself.
+ */
+var object_from_template = function(template, view, container) {
+    if ('copy' in template) {
+        // Template is an object.
+        return template.copy(container);
+    }
+    else {
+        // Template is a dictionary.
+        var
+           drawable_type = template.obj_type;
+        // For backward compatibility:
+        if (!drawable_type) {
+            drawable_type = template.track_type;
+        }
+        return new addable_objects[ drawable_type ](view, container, template);
+    }
+};
+
+return {
+    TracksterView: TracksterView,
+    DrawableGroup: DrawableGroup,
+    LineTrack: LineTrack,
+    FeatureTrack: FeatureTrack,
+    DiagonalHeatmapTrack: DiagonalHeatmapTrack,
+    ReadTrack: ReadTrack,
+    VariantTrack: VariantTrack,
+    CompositeTrack: CompositeTrack,
+    object_from_template: object_from_template
+};
+
+});
diff --git a/client/galaxy/scripts/viz/trackster/util.js b/client/galaxy/scripts/viz/trackster/util.js
new file mode 100644
index 0000000..8e61133
--- /dev/null
+++ b/client/galaxy/scripts/viz/trackster/util.js
@@ -0,0 +1,137 @@
+define(function(){
+
+/**
+ * Stringifies a number adding commas for digit grouping as per North America.
+ */
+function commatize( number ) {
+    number += ''; // Convert to string
+    var rgx = /(\d+)(\d{3})/;
+    while (rgx.test(number)) {
+        number = number.replace(rgx, '$1' + ',' + '$2');
+    }
+    return number;
+}
+
+/**
+ * Helper to determine if object is jQuery deferred.
+ */
+var is_deferred = function ( d ) {
+    return ('promise' in d);
+};
+
+/**
+ * Implementation of a server-state based deferred. Server is repeatedly polled, and when
+ * condition is met, deferred is resolved.
+ */
+var ServerStateDeferred = Backbone.Model.extend({
+    defaults: {
+        ajax_settings: {},
+        interval: 1000,
+        success_fn: function(result) { return true; }
+    },
+
+    /**
+     * Returns a deferred that resolves when success function returns true.
+     */
+    go: function() {
+        var deferred = $.Deferred(),
+            self = this,
+            ajax_settings = self.get('ajax_settings'),
+            success_fn = self.get('success_fn'),
+            interval = self.get('interval'),
+             _go = function() {
+                 $.ajax(ajax_settings).success(function(result) {
+                     if (success_fn(result)) {
+                         // Result is good, so resolve.
+                         deferred.resolve(result);
+                     }
+                     else {
+                         // Result not good, try again.
+                         setTimeout(_go, interval);
+                     }
+                 });
+             };
+         _go();
+         return deferred;
+    }
+});
+
+/**
+ * Returns a random color in hexadecimal format that is sufficiently different from a single color
+ * or set of colors.
+ * @param colors a color or list of colors in the format '#RRGGBB'
+ */
+var get_random_color = function(colors) {
+    // Default for colors is white.
+    if (!colors) { colors = "#ffffff"; }
+
+    // If needed, create list of colors.
+    if ( typeof(colors) === "string" ) {
+        colors = [ colors ];
+    }
+
+    // Convert colors to numbers.
+    for (var i = 0; i < colors.length; i++) {
+        colors[i] = parseInt( colors[i].slice(1), 16 );
+    }
+
+    // -- Perceived brightness and difference formulas are from
+    // -- http://www.w3.org/WAI/ER/WD-AERT/#color-contrast
+
+    // Compute perceived color brightness (based on RGB-YIQ transformation):
+    var brightness = function(r, g, b) {
+        return ( (r * 299) + (g * 587) + (b * 114) ) / 1000;
+    };
+
+    // Compute color difference:
+    var difference = function(r1, g1, b1, r2, g2, b2) {
+        return ( Math.max(r1, r2) - Math.min(r1, r2) ) +
+               ( Math.max(g1, g2) - Math.min(g1, g2) ) +
+               ( Math.max(b1, b2) - Math.min(b1, b2) );
+    };
+
+    // Create new random color.
+    var new_color, nr, ng, nb,
+        other_color, or, og, ob,
+        n_brightness, o_brightness,
+        diff, ok = false,
+        num_tries = 0;
+    do {
+        // New color is never white b/c random in [0,1)
+        new_color = Math.round( Math.random() * 0xffffff );
+        nr = ( new_color & 0xff0000 ) >> 16;
+        ng = ( new_color & 0x00ff00 ) >> 8;
+        nb = new_color & 0x0000ff;
+        n_brightness = brightness(nr, ng, nb);
+        ok = true;
+        for (i = 0; i < colors.length; i++) {
+            other_color = colors[i];
+            or = ( other_color & 0xff0000 ) >> 16;
+            og = ( other_color & 0x00ff00 ) >> 8;
+            ob = other_color & 0x0000ff;
+            o_brightness = brightness(or, og, ob);
+            diff = difference(nr, ng, nb, or, og, ob);
+            // These thresholds may need to be adjusted. Brightness difference range is 125;
+            // color difference range is 500.
+            if ( ( Math.abs(n_brightness - o_brightness) < 40 ) ||
+                 ( diff < 200 ) ) {
+                ok = false;
+                break;
+            }
+        }
+
+        num_tries++
+;    } while (!ok && num_tries <= 10 );
+
+    // Add 0x1000000 to left pad number with 0s.
+    return '#' + ( 0x1000000 + new_color ).toString(16).substr(1,6);
+};
+
+return {
+    commatize: commatize,
+    is_deferred: is_deferred,
+    ServerStateDeferred : ServerStateDeferred,
+    get_random_color    : get_random_color
+};
+
+});
diff --git a/client/galaxy/scripts/viz/visualization.js b/client/galaxy/scripts/viz/visualization.js
new file mode 100644
index 0000000..01eec34
--- /dev/null
+++ b/client/galaxy/scripts/viz/visualization.js
@@ -0,0 +1,1076 @@
+define( ["libs/underscore", "mvc/dataset/data", "viz/trackster/util", "utils/config"], function(_, data_mod, util_mod, config_mod) {
+
+/**
+ * Mixin for returning custom JSON representation from toJSON. Class attribute to_json_keys defines a set of attributes
+ * to include in the representation; to_json_mappers defines mappers for returned objects.
+ */
+var CustomToJSON = {
+    /**
+     * Returns JSON representation of object using to_json_keys and to_json_mappers.
+     */
+    toJSON: function() {
+        var self = this,
+            json = {};
+        _.each(self.constructor.to_json_keys, function(k) {
+            var val = self.get(k);
+            if (k in self.constructor.to_json_mappers) {
+                val = self.constructor.to_json_mappers[k](val, self);
+            }
+            json[k] = val;
+        });
+        return json;
+    }
+};
+
+/**
+ * Model, view, and controller objects for Galaxy visualization framework.
+ *
+ * Models have no references to views, instead using events to indicate state
+ * changes; this is advantageous because multiple views can use the same object
+ * and models can be used without views.
+ */
+
+/**
+ * Use a popup grid to select datasets from histories or libraries. After datasets are selected,
+ * track definitions are obtained from the server and the success_fn is called with the list of
+ * definitions for selected datasets.
+ */
+var select_datasets = function(dataset_url, add_track_async_url, filters, success_fn) {
+    $.ajax({
+        url: dataset_url,
+        data: filters,
+        error: function() { alert( "Grid failed" ); },
+        success: function(table_html) {
+            Galaxy.modal.show({
+                title   : "Select datasets for new tracks",
+                body    : table_html,
+                buttons :
+                {
+                    "Cancel": function() {
+                        Galaxy.modal.hide();
+                    },
+                    "Add": function() {
+                       var requests = [];
+                        $('input[name=id]:checked,input[name=ldda_ids]:checked').each(function() {
+                            var data = {
+                                    data_type: 'track_config',
+                                   'hda_ldda': 'hda'
+                                },
+                                id = $(this).val();
+                               if ($(this).attr("name") !== "id") {
+                                    data.hda_ldda = 'ldda';
+                                }
+                                requests[requests.length] = $.ajax({
+                                   url: add_track_async_url + "/" + id,
+                                    data: data,
+                                    dataType: "json"
+                                });
+                        });
+                        // To preserve order, wait until there are definitions for all tracks and then add
+                        // them sequentially.
+                        $.when.apply($, requests).then(function() {
+                            // jQuery always returns an Array for arguments, so need to look at first element
+                            // to determine whether multiple requests were made and consequently how to
+                            // map arguments to track definitions.
+                            var track_defs = (arguments[0] instanceof Array ?
+                                               $.map(arguments, function(arg) { return arg[0]; }) :
+                                               [ arguments[0] ]
+                                               );
+                            success_fn(track_defs);
+                        });
+                        Galaxy.modal.hide();
+                    }
+               }
+            });
+        }
+    });
+};
+
+// --------- Models ---------
+
+/**
+ * Canvas manager is used to create canvases for browsers as well as providing a pattern cache
+ */
+var CanvasManager = function(default_font) {
+    this.default_font = default_font !== undefined ? default_font : "9px Monaco, Lucida Console, monospace";
+
+    this.dummy_canvas = this.new_canvas();
+    this.dummy_context = this.dummy_canvas.getContext('2d');
+    this.dummy_context.font = this.default_font;
+
+    this.char_width_px = this.dummy_context.measureText("A").width;
+
+    this.patterns = {};
+
+    // FIXME: move somewhere to make this more general
+    this.load_pattern( 'right_strand', "/visualization/strand_right.png" );
+    this.load_pattern( 'left_strand', "/visualization/strand_left.png" );
+    this.load_pattern( 'right_strand_inv', "/visualization/strand_right_inv.png" );
+    this.load_pattern( 'left_strand_inv', "/visualization/strand_left_inv.png" );
+};
+
+_.extend( CanvasManager.prototype, {
+    load_pattern: function( key, path ) {
+        var patterns = this.patterns,
+            dummy_context = this.dummy_context,
+            image = new Image();
+        image.src = Galaxy.root + "static/images" + path;
+        image.onload = function() {
+            patterns[key] = dummy_context.createPattern( image, "repeat" );
+        };
+    },
+    get_pattern: function( key ) {
+        return this.patterns[key];
+    },
+    new_canvas: function() {
+        var canvas = $("<canvas/>")[0];
+        // Keep a reference back to the manager
+        canvas.manager = this;
+        return canvas;
+    }
+});
+
+/**
+ * Generic cache that handles key/value pairs. Keys can be any object that can be
+ * converted to a String and compared.
+ */
+var Cache = Backbone.Model.extend({
+    defaults: {
+        num_elements: 20,
+        // Objects in cache; indexes into cache are strings of keys.
+        obj_cache: null,
+        // key_ary contains keys for objects in cache.
+        key_ary: null
+    },
+
+    initialize: function(options) {
+        this.clear();
+    },
+
+    /**
+     * Get an element from the cache using its key.
+     */
+    get_elt: function(key) {
+        var obj_cache = this.attributes.obj_cache,
+            key_ary = this.attributes.key_ary,
+            key_str = key.toString(),
+            index = _.indexOf(key_ary, function(k) {
+                return k.toString() === key_str;
+            });
+
+        // Update cache.
+        if (index !== -1) {
+            // Object is in cache, so update it.
+            if (obj_cache[key_str].stale) {
+                // Object is stale: remove key and object.
+                key_ary.splice(index, 1);
+                delete obj_cache[key_str];
+            }
+            else {
+                // Move key to back because it is most recently used.
+                this.move_key_to_end(key, index);
+            }
+        }
+
+        return obj_cache[key_str];
+    },
+
+    /**
+     * Put an element into the cache.
+     */
+    set_elt: function(key, value) {
+        var obj_cache = this.attributes.obj_cache,
+            key_ary = this.attributes.key_ary,
+            key_str = key.toString(),
+            num_elements = this.attributes.num_elements;
+
+        // Update keys, objects.
+        if (!obj_cache[key_str]) {
+            // Add object to cache.
+
+            if (key_ary.length >= num_elements) {
+                // Cache full, so remove first element.
+                var deleted_key = key_ary.shift();
+                delete obj_cache[deleted_key.toString()];
+            }
+
+            // Add key.
+            key_ary.push(key);
+        }
+
+        // Add object.
+        obj_cache[key_str] = value;
+        return value;
+    },
+
+    /**
+     * Move key to end of cache. Keys are removed from the front, so moving a key to the end
+     * delays the key's removal.
+     */
+    move_key_to_end: function(key, index) {
+        this.attributes.key_ary.splice(index, 1);
+        this.attributes.key_ary.push(key);
+    },
+
+    /**
+     * Clear all elements from the cache.
+     */
+    clear: function() {
+        this.attributes.obj_cache = {};
+        this.attributes.key_ary = [];
+    },
+
+    /** Returns the number of elements in the cache. */
+    size: function() {
+        return this.attributes.key_ary.length;
+    },
+
+    /** Returns key most recently added to cache. */
+    most_recently_added: function() {
+        return this.size() === 0 ? null :
+               // Most recent key is at the end of key array.
+               this.attributes.key_ary[this.attributes.key_ary.length - 1];
+    }
+});
+
+/**
+ * Data manager for genomic data. Data is connected to and queryable by genomic regions.
+ */
+var GenomeDataManager = Cache.extend({
+    defaults: _.extend({}, Cache.prototype.defaults, {
+        dataset: null,
+        genome: null,
+        init_data: null,
+        min_region_size: 200,
+        filters_manager: null,
+        data_type: "data",
+        data_mode_compatible: function(entry, mode) { return true; },
+        can_subset: function(entry) { return false; }
+    }),
+
+    /**
+     * Initialization.
+     */
+    initialize: function(options) {
+        Cache.prototype.initialize.call(this);
+
+        // Set initial entries in data manager.
+        var initial_entries = this.get('init_data');
+        if (initial_entries) {
+            this.add_data(initial_entries);
+        }
+    },
+
+    /**
+     * Add data entries to manager; each entry should be a dict with attributes region (key), data, and data_type.
+     * If necessary, manager size is increased to hold all data.
+     */
+    add_data: function(entries) {
+        // Increase size to accomodate all entries.
+        if (this.get('num_elements') < entries.length) {
+            this.set('num_elements', entries.length);
+        }
+
+        // Put data into manager.
+        var self = this;
+        _.each(entries, function(entry) {
+            self.set_data(entry.region, entry);
+        });
+    },
+
+    /**
+     * Returns deferred that resolves to true when dataset is ready (or false if dataset
+     * cannot be used).
+     */
+    data_is_ready: function() {
+        var dataset = this.get('dataset'),
+            ready_deferred = $.Deferred(),
+            // If requesting raw data, query dataset state; if requesting (converted) data,
+            // need to query converted datasets state.
+            query_type = (this.get('data_type') === 'raw_data' ? 'state' :
+                          this.get('data_type') === 'data' ? 'converted_datasets_state' : "error" ),
+            ss_deferred = new util_mod.ServerStateDeferred({
+                ajax_settings: {
+                    url: this.get('dataset').url(),
+                    data: {
+                        hda_ldda: dataset.get('hda_ldda'),
+                        data_type: query_type
+                    },
+                    dataType: "json"
+                },
+                interval: 5000,
+                success_fn: function(response) { return response !== "pending"; }
+            });
+
+        $.when(ss_deferred.go()).then(function(response) {
+            ready_deferred.resolve(response === "ok" || response === "data" );
+        });
+        return ready_deferred;
+    },
+
+    /**
+     * Perform a feature search from server; returns Deferred object that resolves when data is available.
+     */
+    search_features: function(query) {
+        var dataset = this.get('dataset'),
+            params = {
+                query: query,
+                hda_ldda: dataset.get('hda_ldda'),
+                data_type: 'features'
+            };
+        return $.getJSON(dataset.url(), params);
+    },
+
+    /**
+     * Load data from server and manages data entries. Adds a Deferred to manager
+     * for region; when data becomes available, replaces Deferred with data.
+     * Returns the Deferred that resolves when data is available.
+     */
+    load_data: function(region, mode, resolution, extra_params) {
+        // Setup data request params.
+        var dataset = this.get('dataset'),
+            params = {
+                        "data_type": this.get('data_type'),
+                        "chrom": region.get('chrom'),
+                        "low": region.get('start'),
+                        "high": region.get('end'),
+                        "mode": mode,
+                        "resolution": resolution,
+                        "hda_ldda": dataset.get('hda_ldda')
+                     };
+
+        $.extend(params, extra_params);
+
+        // Add track filters to params.
+        var filters_manager = this.get('filters_manager');
+        if (filters_manager) {
+            var filter_names = [];
+            var filters = filters_manager.filters;
+            for (var i = 0; i < filters.length; i++) {
+                filter_names.push(filters[i].name);
+            }
+            params.filter_cols = JSON.stringify(filter_names);
+        }
+
+        // Do request.
+        var manager = this,
+            entry = $.getJSON(dataset.url(), params, function (result) {
+                // Add region to the result.
+                result.region = region;
+                manager.set_data(region, result);
+            });
+
+        this.set_data(region, entry);
+        return entry;
+    },
+
+    /**
+     * Get data from dataset.
+     */
+    get_data: function(region, mode, resolution, extra_params) {
+        // Look for entry and return if it's a deferred or if data available is compatible with mode.
+        var entry = this.get_elt(region);
+        if ( entry &&
+             ( util_mod.is_deferred(entry) || this.get('data_mode_compatible')(entry, mode) ) ) {
+            return entry;
+        }
+
+        //
+        // Look in cache for data that can be used.
+        // TODO: this logic could be improved if the visualization knew whether
+        // the data was "index" or "data."
+        //
+        var key_ary = this.get('key_ary'),
+            obj_cache = this.get('obj_cache'),
+            entry_region, is_subregion;
+        for (var i = 0; i < key_ary.length; i++) {
+            entry_region = key_ary[i];
+
+            if (entry_region.contains(region)) {
+                is_subregion = true;
+
+                // This entry has data in the requested range. Return if data
+                // is compatible and can be subsetted.
+                entry = obj_cache[entry_region.toString()];
+                if ( util_mod.is_deferred(entry) ||
+                    ( this.get('data_mode_compatible')(entry, mode) && this.get('can_subset')(entry) ) ) {
+                    this.move_key_to_end(entry_region, i);
+
+                    // If there's data, subset it.
+                    if ( !util_mod.is_deferred(entry) ) {
+                        var subset_entry = this.subset_entry(entry, region);
+                        this.set_data(region, subset_entry);
+                        entry = subset_entry;
+                    }
+
+                    return entry;
+                }
+            }
+        }
+
+        // FIXME: There _may_ be instances where region is a subregion of another entry but cannot be
+        // subsetted. For these cases, do not increase length because region will never be found (and
+        // an infinite loop will occur.)
+        // If needed, extend region to make it minimum size.
+        if (!is_subregion && region.length() < this.attributes.min_region_size) {
+            // IDEA: alternative heuristic is to find adjacent cache entry to region and use that to extend.
+            // This would prevent bad extensions when zooming in/out while still preserving the behavior
+            // below.
+
+            // Use copy of region to avoid changing actual region.
+            region = region.copy();
+
+            // Use heuristic to extend region: extend relative to last data request.
+            var last_request = this.most_recently_added();
+            if (!last_request || (region.get('start') > last_request.get('start'))) {
+                // This request is after the last request, so extend right.
+                region.set('end', region.get('start') + this.attributes.min_region_size);
+            }
+            else {
+                // This request is after the last request, so extend left.
+                region.set('start', region.get('end') - this.attributes.min_region_size);
+            }
+
+            // Trim region to avoid invalid coordinates.
+            region.set('genome', this.attributes.genome);
+            region.trim();
+        }
+
+        return this.load_data(region, mode, resolution, extra_params);
+    },
+
+    /**
+     * Alias for set_elt for readbility.
+     */
+    set_data: function(region, entry) {
+        this.set_elt(region, entry);
+    },
+
+    /** "Deep" data request; used as a parameter for DataManager.get_more_data() */
+    DEEP_DATA_REQ: "deep",
+
+    /** "Broad" data request; used as a parameter for DataManager.get_more_data() */
+    BROAD_DATA_REQ: "breadth",
+
+    /**
+     * Gets more data for a region using either a depth-first or a breadth-first approach.
+     */
+    get_more_data: function(region, mode, resolution, extra_params, req_type) {
+        var cur_data = this._mark_stale(region);
+        if (!(cur_data && this.get('data_mode_compatible')(cur_data, mode))) {
+            console.log('ERROR: problem with getting more data: current data is not compatible');
+            return;
+        }
+
+        //
+        // Set parameters based on request type.
+        //
+        var query_low = region.get('start');
+        if (req_type === this.DEEP_DATA_REQ) {
+            // Use same interval but set start_val to skip data that's already in cur_data.
+            $.extend(extra_params, {start_val: cur_data.data.length + 1});
+        }
+        else if (req_type === this.BROAD_DATA_REQ) {
+            // To get past an area of extreme feature depth, set query low to be after either
+            // (a) the maximum high or HACK/FIXME (b) the end of the last feature returned.
+            query_low = (cur_data.max_high ? cur_data.max_high : cur_data.data[cur_data.data.length - 1][2]) + 1;
+        }
+        var query_region = region.copy().set('start', query_low);
+
+        //
+        // Get additional data, append to current data, and set new data. Use a custom deferred object
+        // to signal when new data is available.
+        //
+        var data_manager = this,
+            new_data_request = this.load_data(query_region, mode, resolution, extra_params),
+            new_data_available = $.Deferred();
+        // load_data sets cache to new_data_request, but use custom deferred object so that signal and data
+        // is all data, not just new data.
+        this.set_data(region, new_data_available);
+        $.when(new_data_request).then(function(result) {
+            // Update data and message.
+            if (result.data) {
+                result.data = cur_data.data.concat(result.data);
+                if (result.max_low) {
+                    result.max_low = cur_data.max_low;
+                }
+                if (result.message) {
+                    // HACK: replace number in message with current data length. Works but is ugly.
+                    result.message = result.message.replace(/[0-9]+/, result.data.length);
+                }
+            }
+            data_manager.set_data(region, result);
+            new_data_available.resolve(result);
+        });
+        return new_data_available;
+    },
+
+    /**
+     * Returns true if more detailed data can be obtained for entry.
+     */
+    can_get_more_detailed_data: function(region) {
+        var cur_data = this.get_elt(region);
+
+        // Can only get more detailed data for bigwig data that has less than 8000 data points.
+        // Summary tree returns *way* too much data, and 8000 data points ~ 500KB.
+        return (cur_data.dataset_type === 'bigwig' && cur_data.data.length < 8000);
+    },
+
+    /**
+     * Returns more detailed data for an entry.
+     */
+    get_more_detailed_data: function(region, mode, resolution, detail_multiplier, extra_params) {
+        // Mark current entry as stale.
+        var cur_data = this._mark_stale(region);
+        if (!cur_data) {
+            console.log("ERROR getting more detailed data: no current data");
+            return;
+        }
+
+        if (!extra_params) { extra_params = {}; }
+
+        // Use additional parameters to get more detailed data.
+        if (cur_data.dataset_type === 'bigwig') {
+            // FIXME: constant should go somewhere.
+            extra_params.num_samples = 1000 * detail_multiplier;
+        }
+
+        return this.load_data(region, mode, resolution, extra_params);
+    },
+
+    /**
+     * Marks cache data as stale.
+     */
+    _mark_stale: function(region) {
+        var entry = this.get_elt(region);
+        if (!entry) {
+            console.log("ERROR: no data to mark as stale: ", this.get('dataset'), region.toString());
+        }
+        entry.stale = true;
+        return entry;
+    },
+
+    /**
+     * Returns an array of data with each entry representing one chromosome/contig
+     * of data or, if data is not available, returns a Deferred that resolves to the
+     * data when it becomes available.
+     */
+    get_genome_wide_data: function(genome) {
+        // -- Get all data. --
+
+        var self = this,
+            all_data_available = true,
+
+            //  Map chromosome info into genome data.
+            gw_data = _.map(genome.get('chroms_info').chrom_info, function(chrom_info) {
+                var chrom_data = self.get_elt(
+                    new GenomeRegion({
+                        chrom: chrom_info.chrom,
+                        start: 0,
+                        end: chrom_info.len
+                    })
+                );
+
+                // Set flag if data is not available.
+                if (!chrom_data) { all_data_available = false; }
+
+                return chrom_data;
+            });
+
+        // -- If all data is available, return it. --
+        if (all_data_available) {
+            return gw_data;
+        }
+
+        // -- All data is not available, so load from server. --
+
+        var deferred = $.Deferred();
+        $.getJSON(this.get('dataset').url(), { data_type: 'genome_data' }, function(genome_wide_data) {
+            self.add_data(genome_wide_data.data);
+            deferred.resolve(genome_wide_data.data);
+        });
+
+        return deferred;
+    },
+
+    /**
+     * Returns entry with only data in the subregion.
+     */
+    subset_entry: function(entry, subregion) {
+        // Dictionary from entry type to function for subsetting data.
+        var subset_fns = {
+            bigwig: function(data, subregion) {
+                return _.filter(data, function(data_point) {
+                    return data_point[0] >= subregion.get('start') &&
+                           data_point[0] <= subregion.get('end');
+                });
+            },
+            refseq: function(data, subregion) {
+                var seq_start = subregion.get('start') - entry.region.get('start');
+                return entry.data.slice(seq_start, seq_start + subregion.length());
+            }
+        };
+
+        // Subset entry if there is a function for subsetting and regions are not the same.
+        var subregion_data = entry.data;
+        if (!entry.region.same(subregion) && entry.dataset_type in subset_fns) {
+            subregion_data = subset_fns[entry.dataset_type](entry.data, subregion);
+        }
+
+        // Return entry with subregion's data.
+        return {
+            region: subregion,
+            data: subregion_data,
+            dataset_type: entry.dataset_type
+        };
+    }
+});
+
+var GenomeReferenceDataManager = GenomeDataManager.extend({
+    initialize: function(options) {
+        // Use generic object in place of dataset and set urlRoot to fetch data.
+        var dataset_placeholder = new Backbone.Model();
+        dataset_placeholder.urlRoot = options.data_url;
+        this.set('dataset', dataset_placeholder);
+    },
+
+    load_data: function(region, mode, resolution, extra_params) {
+        // Fetch data if region is not too large.
+        return ( region.length() <= 100000 ?
+                 GenomeDataManager.prototype.load_data.call(this, region, mode, resolution, extra_params) :
+                 { data: null, region: region } );
+    }
+});
+
+/**
+ * A genome build.
+ */
+var Genome = Backbone.Model.extend({
+    defaults: {
+        name: null,
+        key: null,
+        chroms_info: null
+    },
+
+    initialize: function(options) {
+        this.id = options.dbkey;
+    },
+
+    /**
+     * Shorthand for getting to chromosome information.
+     */
+    get_chroms_info: function() {
+        return this.attributes.chroms_info.chrom_info;
+    },
+
+    /**
+     * Returns a GenomeRegion object denoting a complete chromosome.
+     */
+    get_chrom_region: function(chr_name) {
+        // FIXME: use findWhere in underscore 1.4
+        var chrom_info = _.find(this.get_chroms_info(), function(chrom_info) {
+            return chrom_info.chrom === chr_name;
+        });
+        return new GenomeRegion({
+            chrom: chrom_info.chrom,
+            end: chrom_info.len
+        });
+    },
+
+    /** Returns the length of a chromosome. */
+    get_chrom_len: function(chr_name) {
+        // FIXME: use findWhere in underscore 1.4
+        return _.find(this.get_chroms_info(), function(chrom_info) {
+            return chrom_info.chrom === chr_name;
+        }).len;
+    }
+});
+
+/**
+ * A genomic region.
+ */
+var GenomeRegion = Backbone.Model.extend({
+    defaults: {
+        chrom: null,
+        start: 0,
+        end: 0,
+        str_val: null,
+        genome: null
+    },
+
+    /**
+     * Returns true if this region is the same as a given region.
+     * It does not test the genome right now.
+     */
+    same: function(region) {
+        return this.attributes.chrom === region.get('chrom') &&
+               this.attributes.start === region.get('start') &&
+               this.attributes.end === region.get('end');
+    },
+
+    /**
+     * If from_str specified, use it to initialize attributes.
+     */
+    initialize: function(options) {
+        if (options.from_str) {
+            var pieces = options.from_str.split(':'),
+                chrom = pieces[0],
+                start_end = pieces[1].split('-');
+            this.set({
+                chrom: chrom,
+                start: parseInt(start_end[0], 10),
+                end: parseInt(start_end[1], 10)
+            });
+        }
+
+        // Keep a copy of region's string value for fast lookup.
+        this.attributes.str_val = this.get('chrom') + ":" + this.get('start') + "-" + this.get('end');
+
+        // Set str_val on attribute change.
+        this.on('change', function() {
+            this.attributes.str_val = this.get('chrom') + ":" + this.get('start') + "-" + this.get('end');
+        }, this);
+    },
+
+    copy: function() {
+        return new GenomeRegion({
+            chrom: this.get('chrom'),
+            start: this.get('start'),
+            end: this.get('end')
+        });
+    },
+
+    length: function() {
+        return this.get('end') - this.get('start');
+    },
+
+    /** Returns region in canonical form chrom:start-end */
+    toString: function() {
+        return this.attributes.str_val;
+    },
+
+    toJSON: function() {
+        return {
+            chrom: this.get('chrom'),
+            start: this.get('start'),
+            end: this.get('end')
+        };
+    },
+
+    /**
+     * Compute the type of overlap between this region and another region. The overlap is computed relative to the given/second region;
+     * hence, OVERLAP_START indicates that the first region overlaps the start (but not the end) of the second region.
+     */
+    compute_overlap: function(a_region) {
+        var first_chrom = this.get('chrom'), second_chrom = a_region.get('chrom'),
+            first_start = this.get('start'), second_start = a_region.get('start'),
+            first_end = this.get('end'), second_end = a_region.get('end'),
+            overlap;
+
+        // Compare chroms.
+        if (first_chrom && second_chrom && first_chrom !== second_chrom) {
+            return GenomeRegion.overlap_results.DIF_CHROMS;
+        }
+
+        // Compare regions.
+        if (first_start < second_start) {
+            if (first_end < second_start) {
+                overlap = GenomeRegion.overlap_results.BEFORE;
+            }
+            else if (first_end < second_end) {
+                overlap = GenomeRegion.overlap_results.OVERLAP_START;
+            }
+            else { // first_end >= second_end
+                overlap = GenomeRegion.overlap_results.CONTAINS;
+            }
+        }
+        else if (first_start > second_start) {
+            if (first_start > second_end) {
+                overlap = GenomeRegion.overlap_results.AFTER;
+            }
+            else if (first_end <= second_end) {
+                overlap = GenomeRegion.overlap_results.CONTAINED_BY;
+            }
+            else {
+                overlap = GenomeRegion.overlap_results.OVERLAP_END;
+            }
+        }
+        else { // first_start === second_start
+            overlap = (first_end >= second_end ?
+                       GenomeRegion.overlap_results.CONTAINS :
+                       GenomeRegion.overlap_results.CONTAINED_BY);
+        }
+
+        return overlap;
+    },
+
+    /**
+     * Trim a region to match genome's constraints.
+     */
+    trim: function(genome) {
+        // Assume that all chromosome/contigs start at 0.
+        if (this.attributes.start < 0) {
+            this.attributes.start = 0;
+        }
+
+        // Only try to trim the end if genome is set.
+        if (this.attributes.genome) {
+            var chrom_len = this.attributes.genome.get_chrom_len(this.attributes.chrom);
+            if (this.attributes.end > chrom_len) {
+                this.attributes.end = chrom_len - 1;
+            }
+        }
+
+        return this;
+    },
+
+    /**
+     * Returns true if this region contains a given region.
+     */
+    contains: function(a_region) {
+        return this.compute_overlap(a_region) === GenomeRegion.overlap_results.CONTAINS;
+    },
+
+    /**
+     * Returns true if regions overlap.
+     */
+    overlaps: function(a_region) {
+        return _.intersection( [this.compute_overlap(a_region)],
+                               [GenomeRegion.overlap_results.DIF_CHROMS, GenomeRegion.overlap_results.BEFORE, GenomeRegion.overlap_results.AFTER] ).length === 0;
+    }
+},
+{
+    overlap_results: {
+        DIF_CHROMS: 1000,
+        BEFORE: 1001,
+        CONTAINS: 1002,
+        OVERLAP_START: 1003,
+        OVERLAP_END: 1004,
+        CONTAINED_BY: 1005,
+        AFTER: 1006
+    }
+});
+
+var GenomeRegionCollection = Backbone.Collection.extend({
+    model: GenomeRegion
+});
+
+/**
+ * A genome browser bookmark.
+ */
+var BrowserBookmark = Backbone.Model.extend({
+    defaults: {
+        region: null,
+        note: ''
+    },
+
+    initialize: function(options) {
+        this.set('region', new GenomeRegion(options.region));
+    }
+});
+
+/**
+ * Bookmarks collection.
+ */
+var BrowserBookmarkCollection = Backbone.Collection.extend({
+    model: BrowserBookmark
+});
+
+/**
+ * A track of data in a genome visualization.
+ */
+// TODO: rename to Track and merge with Trackster's Track object.
+var BackboneTrack = Backbone.Model.extend(CustomToJSON).extend({
+    defaults: {
+        mode: 'Auto'
+    },
+
+    initialize: function(options) {
+        this.set('dataset', new data_mod.Dataset(options.dataset));
+
+        // -- Set up config settings. --
+        var models =  [
+            { key: 'name', default_value: this.get('dataset').get('name') },
+            { key: 'color' },
+            { key: 'min_value', label: 'Min Value', type: 'float', default_value: 0 },
+            { key: 'max_value', label: 'Max Value', type: 'float', default_value: 1 }
+        ];
+
+        this.set('config', config_mod.ConfigSettingCollection.from_models_and_saved_values(models, options.prefs));
+
+        // -- Set up data manager. --
+        var preloaded_data = this.get('preloaded_data');
+        if (preloaded_data) {
+            preloaded_data = preloaded_data.data;
+        }
+        else {
+            preloaded_data = [];
+        }
+        this.set('data_manager', new GenomeDataManager({
+            dataset: this.get('dataset'),
+            init_data: preloaded_data
+        }));
+    }
+},
+{
+    // This definition matches that produced by to_dict() methods in tracks.js
+    to_json_keys: [
+        'track_type',
+        'dataset',
+        'prefs',
+        'mode',
+        'filters',
+        'tool_state'
+    ],
+    to_json_mappers: {
+        prefs: function(p, self) {
+            if (_.size(p) === 0) {
+                p = {
+                    name: self.get('config').get('name').get('value'),
+                    color: self.get('config').get('color').get('value')
+                };
+            }
+            return p;
+        },
+        dataset: function(d) {
+            return {
+                id: d.id,
+                hda_ldda: d.get('hda_ldda')
+            };
+        }
+    }
+});
+
+var BackboneTrackCollection = Backbone.Collection.extend({
+    model: BackboneTrack
+});
+
+/**
+ * A visualization.
+ */
+var Visualization = Backbone.Model.extend({
+    defaults: {
+        title: '',
+        type: ''
+    },
+
+    urlRoot: Galaxy.root + "api/visualizations",
+
+    /**
+     * POSTs visualization's JSON to its URL using the parameter 'vis_json'
+     * Note: This is necessary because (a) Galaxy requires keyword args and
+     * (b) Galaxy does not handle PUT now.
+     */
+    save: function() {
+        return $.ajax({
+            url: this.url(),
+            type: "POST",
+            dataType: "json",
+            data: {
+                vis_json: JSON.stringify(this)
+            }
+        });
+    }
+});
+
+/**
+ * A visualization of genome data.
+ */
+var GenomeVisualization = Visualization.extend(CustomToJSON).extend({
+    defaults: _.extend({}, Visualization.prototype.defaults, {
+        dbkey: '',
+        drawables: null,
+        bookmarks: null,
+        viewport: null
+    }),
+
+    initialize: function(options) {
+        // Replace drawables with tracks.
+        this.set('drawables', new BackboneTrackCollection(options.tracks));
+
+        var models = [];
+        this.set('config', config_mod.ConfigSettingCollection.from_models_and_saved_values(models, options.prefs));
+
+        // Clear track and data definitions to avoid storing large objects.
+        this.unset('tracks');
+        this.get('drawables').each(function(d) {
+            d.unset('preloaded_data');
+        });
+    },
+
+    /**
+     * Add a track or array of tracks to the visualization.
+     */
+    add_tracks: function(tracks) {
+        this.get('drawables').add(tracks);
+    }
+},
+{
+    // This definition matches that produced by to_dict() methods in tracks.js
+    to_json_keys: [
+        'view',
+        'viewport',
+        'bookmarks'
+    ],
+
+    to_json_mappers: {
+        'view': function(dummy, self) {
+            return {
+                obj_type: 'View',
+                prefs: {
+                    name: self.get('title'),
+                    content_visible: true
+                },
+                drawables: self.get('drawables')
+            };
+        }
+    }
+}
+);
+
+/**
+ * -- Routers --
+ */
+
+/**
+ * Router for track browser.
+ */
+var TrackBrowserRouter = Backbone.Router.extend({
+    initialize: function(options) {
+        this.view = options.view;
+
+        // Can't put regular expression in routes dictionary.
+        // NOTE: parentheses are used to denote parameters returned to callback.
+        this.route(/([\w]+)$/, 'change_location');
+        this.route(/([\w\+]+\:[\d,]+-[\d,]+)$/, 'change_location');
+
+        // Handle navigate events from view.
+        var self = this;
+        self.view.on("navigate", function(new_loc) {
+            self.navigate(new_loc);
+        });
+    },
+
+    change_location: function(new_loc) {
+        this.view.go_to(new_loc);
+    }
+});
+
+return {
+    BackboneTrack: BackboneTrack,
+    BrowserBookmark: BrowserBookmark,
+    BrowserBookmarkCollection: BrowserBookmarkCollection,
+    Cache: Cache,
+    CanvasManager: CanvasManager,
+    Genome: Genome,
+    GenomeDataManager: GenomeDataManager,
+    GenomeRegion: GenomeRegion,
+    GenomeRegionCollection: GenomeRegionCollection,
+    GenomeVisualization: GenomeVisualization,
+    GenomeReferenceDataManager: GenomeReferenceDataManager,
+    TrackBrowserRouter: TrackBrowserRouter,
+    Visualization: Visualization,
+    select_datasets: select_datasets
+};
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/viz/viz_views.js b/client/galaxy/scripts/viz/viz_views.js
new file mode 100644
index 0000000..f277aa2
--- /dev/null
+++ b/client/galaxy/scripts/viz/viz_views.js
@@ -0,0 +1,100 @@
+define( ["libs/underscore"], function(_) {
+
+/**
+ * View for track/group header.
+ */
+var TrackHeaderView = Backbone.View.extend({
+    className: 'track-header',
+
+    initialize: function() {
+        // Watch and update name changes.
+        this.model.config.get('name').on('change:value', this.update_name, this);
+        this.render();
+    },
+
+    render: function() {
+        this.$el.append($("<div/>").addClass(this.model.drag_handle_class));
+        this.$el.append($("<div/>").addClass("track-name")
+                                   .text(this.model.config.get_value('name')));
+
+        // Icons container.
+        this.action_icons = {};
+        this.render_action_icons();
+
+        // Suppress double clicks in header so that they do not impact viz under header.
+        this.$el.dblclick( function(e) { e.stopPropagation(); } );
+        
+        // Needed for floating elts in header.
+        this.$el.append( $("<div style='clear: both'/>") );
+    },
+
+    update_name: function() {
+        this.$el.find('.track-name').text(this.model.config.get_value('name'));
+    },
+
+    render_action_icons: function() {
+        var self = this;
+        this.icons_div = $("<div/>").addClass('track-icons').hide().appendTo(this.$el);
+        _.each(this.model.action_icons_def, function(icon_dict) {
+            self.add_action_icon(icon_dict.name, icon_dict.title, icon_dict.css_class, 
+                                 icon_dict.on_click_fn, icon_dict.prepend, icon_dict.hide);
+        });
+
+        // Set up behavior for modes popup.
+        this.set_display_modes(this.model.display_modes);
+    },
+
+    /**
+     * Add an action icon to this object. Appends icon unless prepend flag is specified.
+     */
+    add_action_icon: function(name, title, css_class, on_click_fn, prepend, hide) {
+        var self = this;
+        this.action_icons[name] = $("<a/>").attr("title", title)
+                                           .addClass("icon-button").addClass(css_class).tooltip()
+                                           .click( function() { on_click_fn(self.model); } )
+                                           .appendTo(this.icons_div);
+        if (hide) {
+            this.action_icons[name].hide();
+        }
+    },
+
+    /**
+     * Set track's modes and update mode icon popup.
+     */
+    set_display_modes: function(new_modes, init_mode) {
+        if (!new_modes) { return; }
+
+        // HACK: move this out of view and into track.
+
+        // Set modes, init mode.
+        this.model.display_modes = new_modes;
+        this.model.mode = (init_mode || this.model.config.get_value('mode') || this.model.display_modes[0]);
+        
+        this.action_icons.mode_icon.attr("title", "Set display mode (now: " + this.mode + ")");
+
+        // Setup popup menu for changing modes.
+        var self = this,
+            track = this.model,
+            mode_mapping = {};
+        for (var i = 0, len = track.display_modes.length; i < len; i++) {
+            var mode = track.display_modes[i];
+            mode_mapping[mode] = function(mode) {
+                return function() { 
+                    track.change_mode(mode);
+                    // HACK: the popup menu messes with the track's hover event, so manually show/hide
+                    // icons div for now.
+                    //self.icons_div.show(); 
+                    //track.container_div.mouseleave(function() { track.icons_div.hide(); } ); 
+                };
+            }(mode);
+        }
+
+        make_popupmenu(this.action_icons.mode_icon, mode_mapping);
+    }
+});
+
+return {
+    TrackHeaderView: TrackHeaderView
+};
+
+});
\ No newline at end of file
diff --git a/client/galaxy/style/blue_colors.ini b/client/galaxy/style/blue_colors.ini
new file mode 100644
index 0000000..85ddd39
--- /dev/null
+++ b/client/galaxy/style/blue_colors.ini
@@ -0,0 +1,82 @@
+base_text=#303030
+base_bg_top=#FFFFFF
+base_bg_bottom=#FFFFFF
+link_text=#303030
+header_text=#023858
+#menu_bg_top=#DADFEF
+menu_bg_over=#DFE5F9
+menu_bg_hatch=-
+#menu_bg_hatch=#AAAAFF
+# Forms
+form_title_bg_top=#ebd9b2
+form_title_bg_bottom=#ebd9b2
+form_title_bg_hatch=-
+form_border=#d8b365
+#form_body_bg=#FFFFFF
+form_body_bg_top=#FFFFFF
+form_body_bg_bottom=#FFFFFF
+odd_row_bg=#DADFEF
+# Messages
+error_message_border=#AA6666
+error_message_bg=#FFCCCC
+warn_message_border=#AAAA66
+warn_message_bg=#FFFFCC
+done_message_border=#66AA66
+done_message_bg=#CCFFCC
+info_message_border=#6666AA
+info_message_bg=#d9edf7
+# Tables
+table_header_bg=#ebd9b2
+table_row_bg=white
+table_border=#d8b365
+# Footers
+footer_bg=#023858
+footer_title_bg=#023858
+footer_title_hatch=#000000
+# History - these are actually Job states (used in the reports webapp), but the same styles are used for Tool states in the community webapp
+history_new_border=#A86030
+history_new_bg=#FFB030
+history_upload_border=#990099
+history_upload_bg=#D090D0
+history_waiting_border=#A86030
+history_waiting_bg=#E8C060
+history_deleted_border=#330066
+history_deleted_bg=#3399FF
+history_error_border=#AA6666
+history_error_bg=#FFCCCC
+history_paused_border=#6666AA
+history_paused_bg=#d9edf7
+history_running_border=#AAAA66
+history_running_bg=#FFFFCC
+history_ok_border=#66AA66
+history_ok_bg=#CCFFCC
+history_upload_border=#6666AA
+history_upload_bg=#CCCCFF
+history_queued_border=#888888
+history_queued_bg=#EEEEEE
+peek_table_header=#023858
+# Masthead
+masthead_bg=#2C3143
+masthead_bg_highlight=#333
+masthead_text=#eeeeee
+masthead_bg_hatch=-
+masthead_link=#eeeeee
+masthead_active_tab_bg=#222532
+# Quota meter
+quota_meter_bg=#C1C9E5
+quota_meter_bar=#969DB3
+quota_meter_warn_bar=#FFB400
+quota_meter_error_bar=#FF4343
+quota_meter_text=#000
+# ---- Layout -----------------------------------------------------------------
+# Overall background color (including space between panels)
+layout_bg=#eee
+# Line underneath masthead
+layout_masthead_border=#444
+# Borders around panels
+layout_border=#999
+# Hover color when mouse over drag bars (panel resize)
+layout_hover=#AAAAEE
+# Gradient for the panel title backgrounds
+panel_header_bg_top=#f5f5f5
+panel_header_bg_bottom=#cccccc
diff --git a/client/galaxy/style/less/autocomplete_tagging.less b/client/galaxy/style/less/autocomplete_tagging.less
new file mode 100644
index 0000000..0518fb3
--- /dev/null
+++ b/client/galaxy/style/less/autocomplete_tagging.less
@@ -0,0 +1,148 @@
+/****************************************************************************/
+/* JQuery autocomplete code                                                 */
+/****************************************************************************/
+
+.ac_results {
+	padding: 0px;
+	border: 1px solid black;
+	background-color: white;
+	overflow: hidden;
+	z-index: 99999;
+}
+
+.ac_results ul {
+	width: 100%;
+	list-style-position: outside;
+	list-style: none;
+	padding: 0;
+	margin: 0;
+}
+
+.ac_results li {
+	padding: 2px 5px;
+	cursor: default;
+	display: block;
+	/* 
+	if width will be 100% horizontal scrollbar will apear 
+	when scroll mode will be used
+	*/
+	/*width: 100%;*/
+    /* font: menu; */
+	font-size: 12px;
+	/* 
+	it is very important, if line-height not setted or setted 
+	in relative units scroll will be broken in firefox
+	*/
+	line-height: 16px;
+	overflow: hidden;
+}
+
+.ac_loading {
+	background: white url('indicator.gif') right center no-repeat;
+}
+
+.ac_odd {
+	background-color: #fff; /* #eee */
+	margin-left: 0.3em;
+}
+
+.ac_even {
+    margin-left: 0.3em;
+}
+
+.ac_over {
+	background-color: #0A246A;
+	color: white;
+}
+
+.ac_header {
+    font-style: normal;
+    color: gray;
+    border-bottom: 0.1em solid gray;
+}
+
+/****************************************************************************/
+/* Custom code for supporting tags                                          */
+/****************************************************************************/
+.tag-area {
+    width: 100%;
+}
+
+.individual-tag-area
+{
+    cursor: pointer;
+    border: 1px dotted transparent;
+}
+
+.individual-tag-area:hover 
+{
+    border: 1px dotted #999999;
+}
+
+.active-tag-area {
+    background-color: white;
+}
+
+.toggle-link 
+{
+    font-weight: normal;  
+    padding: 0.3em;
+    margin-bottom: 1em;
+    width: 100%;
+    padding: 0.2em 0em 0.2em 0em;
+}
+
+.tag-button {
+    width: auto;
+    color: #444;
+    text-decoration: none;
+    display: inline-block;
+    cursor: pointer;
+    margin: 0.2em;
+    border: solid #bbb 1px;
+    padding: 0.1em 0.5em 0.1em 0.5em;
+    -moz-border-radius: .5em;
+    -webkit-border-radius: .5em;
+    border-radius: .5em;
+    background:#eee;
+}
+
+.tag-button img
+{
+    padding-left: 0.4em;
+}
+
+.tag-button .tag-name:hover
+{
+    color: black;
+}
+
+.add-tag-button
+{
+    margin-bottom: 0.3em;
+    vertical-align: middle;
+    padding: 0.3em;
+}
+
+.add-tag-button:hover
+{
+    cursor: pointer;
+}
+
+.tag-input {
+    vertical-align: bottom;
+    border: none;
+    outline: none;
+    resize: none;
+}
+
+.delete-tag-img
+{
+    
+    margin-left: 0.3em;
+}
+
+.active-tag-name
+{
+    font-weight: bold;
+}
diff --git a/client/galaxy/style/less/base.less b/client/galaxy/style/less/base.less
new file mode 100644
index 0000000..3c42e9c
--- /dev/null
+++ b/client/galaxy/style/less/base.less
@@ -0,0 +1,1771 @@
+// bootstrap.less modified for Galaxy
+ at import "bootstrap.less";
+
+// Galaxy specific variables
+ at import "galaxy_variables.less";
+
+// Galaxy Report Webapp
+ at import "reports";
+
+ at white: #fff;
+ at black: #000;
+
+ at import "fontawesome/font-awesome.less";
+.fa-icon {
+    // TODO: remove fa-icon and replace with fa
+    .fa;
+}
+
+ at import "select2.less";
+ /* fix for zero width select2 - remove when fixed there */
+.select2-minwidth {
+     min-width: 256px;
+}
+
+// galaxy sub-components
+ at import "frame.less";
+ at import "upload.less";
+ at import "ui.less";
+ at import "library.less";
+ at import "toastr.less";
+ at import "jstree.less";
+ at import "tour.less";
+ at import "flex.less";
+
+// Mixins
+.unselectable {
+    .user-select(none);
+}
+
+.parent-width {
+    .box-sizing(border-box);
+    width: 100%;
+    *width: 90%;
+}
+
+.border-radius(@radius) {
+    // from font-awesome 3.0
+    -webkit-border-radius: @radius;
+    -moz-border-radius: @radius;
+    border-radius: @radius;
+}
+
+// ==== Real (basic) styles ====
+
+.clear {
+    .clearfix();
+}
+
+body  {
+    background: @base-bg;
+    color: @base-text-color;
+    margin: 10px;
+
+    // For panel layout pages
+    &.full-content {
+        overflow: hidden;
+        margin: 0;
+        padding: 0;
+        width: 100%;
+        height: 100%;
+    }
+}
+
+// ==== Panel layout styles ====
+
+ at panel-message-height: 30px;
+
+ at layout_top_padding: 0px;
+ at border-default-color_padding: 0px;
+
+ at panel_header_height: 30px;
+ at panel_footer_height: 25px;
+
+#background {
+    position: absolute;
+    background: #fff;
+    z-index: -1;
+    top: 0;
+    left: 0;
+    margin: 0;
+    padding: 0;
+    width: 100%;
+    height: 100%;
+}
+
+#messagebox {
+    position:absolute;
+    top: @navbar-height + @layout_top_padding + 1;
+    left:0;
+    width:100%;
+    height: @panel-message-height !important;
+    overflow: hidden;
+    border-bottom: solid #999 1px;
+    font-size: 90%;
+    display: none;
+}
+
+#inactivebox {
+    position:absolute;
+    top: @navbar-height + @layout_top_padding + 1;
+    left:0;
+    width:100%;
+    height: @panel-message-height !important;
+    overflow: hidden;
+    border-bottom: solid #999 1px;
+    font-size: 90%;
+    display: none;
+}
+
+// Panels
+
+#left, #left-border, #center, #right-border, #right {
+    position: absolute;
+    top: @navbar-height + @layout_top_padding + 1;
+    bottom: 0px;
+    overflow: hidden;
+    background: #fff;
+}
+#left, #center, #right, #left-border, #right-border {
+    // border-top: solid @border-default-color 1px;
+}
+#left  {
+    left: 0px;
+    width: 250px;
+    z-index: 200;
+    border-right: solid @layout-border-color 1px;
+}
+#left-border  {
+    left: 250px;
+}
+#center  {
+    left:250 + @border-default-color_padding;
+    right: 250 + @border-default-color_padding;
+    overflow: hidden;
+    z-index: 1;
+}
+#right-border  {
+    right: 250px;
+}
+#right {
+    width: 250px;
+    right: 0px;
+    z-index: 200;
+    border-left: solid @layout-border-color 1px;
+}
+
+// If the message box or inactivity box is visible
+body.has-message-box, body.has-inactivity-box {
+    #left, #left-border, #center, #right-border, #right {
+        top: @panel-message-height + @navbar-height + @layout_top_padding + 1;
+    }
+}
+
+// If both are visible
+body.has-message-box.has-inactivity-box {
+    #left, #left-border, #center, #right-border, #right {
+        top: 2 * @panel-message-height + @navbar-height + @layout_top_padding + 1;
+    }
+    #inactivebox {
+        top: @panel-message-height + @navbar-height + @layout_top_padding + 1;
+    }
+}
+
+.subnavbar {
+    #gradient > .vertical-three-colors(@white, @white, 25%, darken(@white, 10%));
+    border-bottom: solid @border-default-color 1px;
+    border-top: solid @border-default-color 1px;
+    padding: 5px;
+    color: #555;
+}
+
+.unified-panel-header {
+    .unselectable();
+    height: @panel_header_height;
+    z-index: 1000;
+    text-shadow: rgba(255,255,255,.8) 0 1px 0;
+    background: @navbar-default-bg;
+    border-bottom: solid @layout-border-color 1px;
+    margin: 0;
+    padding: 0;
+    padding-right: 10px;
+    padding-left: 10px;
+    font-weight: bold;
+
+    color: #555;
+    a {
+        color: #555;
+    }
+    .fa {
+        font-size: 1.2em;
+    }
+}
+
+.unified-panel-header-inner {
+    padding-top: 8px;
+}
+
+.unified-panel-footer {
+    .unselectable();
+    position: absolute;
+    bottom: 0;
+    height: @panel_footer_height;
+    line-height: @panel_footer_height;
+    width: 100%;
+    z-index: 1000; // Above #dd-helper
+    border-top: solid @layout-border-color 1px;
+    background: @navbar-default-bg;
+
+    color: #555;
+    a {
+        color: #555;
+    }
+
+    .drag {
+        position: absolute;
+        top: 0;
+        right: 0;
+        padding: 0 5px;
+        text-align: center;
+        height: @panel_footer_height;
+        width: @panel_footer_height - 5;
+        background-image: url(../images/visualization/draggable_horizontal.png);
+        background-repeat: no-repeat;
+        background-position: 50% 50%;
+        cursor: w-resize;
+    }
+}
+
+#right > .unified-panel-footer {
+    .drag {
+        left: 0;
+    }
+}
+
+// Needs to be outside panel to show when hidden
+.panel-collapse {
+    .unified-panel-footer();
+    font-size: 4/3em;
+    .fa-icon;
+    &:before {
+        content:'\f053';
+    }
+    &.hidden:before {
+        content:'\f054';
+    }
+    z-index: 201;
+    cursor: pointer;
+    display: block;
+    position: fixed;
+    left: 0;
+    top: inherit;
+    bottom: 0;
+    padding: 0 5px;
+    text-align: center;
+    height: @panel_footer_height;
+    line-height: @panel_footer_height;
+    width: @panel_footer_height - 5;
+    background: none;
+    border-right: solid @layout-border-color 1px;
+    border-top: solid @layout-border-color 1px;
+    background: @navbar-default-bg;
+    &.right {
+        left: inherit;
+        right: 0;
+        border-right: none;
+        border-left: solid @border-default-color 1px;
+        &.right:before{
+            content:'\f054';
+        }
+        &.right.hidden:before {
+            content:'\f053';
+        }
+    }
+}
+
+.menu-bg {
+    background: whiteSmoke top repeat-x;
+}
+
+div.unified-panel-body {
+    position: absolute;
+    top: @panel_header_height;
+    bottom: 0;
+    width: 100%;
+    padding: 0px;
+}
+ at panel-bg-color: #DFE5F9;
+div.unified-panel-body-background {
+    background: none repeat scroll 0 0 @panel-bg-color;
+}
+
+#left > div.unified-panel-body, #right > div.unified-panel-body {
+    bottom: @panel_footer_height;
+    overflow: auto;
+}
+
+.panel-header-button {
+    color: #333;
+    text-decoration: none;
+    display: inline-block;
+    cursor: pointer;
+    margin: -1px; padding: 1px;
+    margin-top: -0.2em;
+    // border: solid #999 1px;
+    padding-right: 0.5em;
+    padding-left: 0.5em;
+    // Fade to maroon on hover
+    &:hover {
+        color: maroon;
+        .transition(color .25s linear);
+    }
+    // Bootstrap style span caret needs positioning
+    .caret {
+        margin-top: 7px;
+    }
+    // Another way to get a caret
+    &.popup {
+        padding-right: 1.75em;
+        background: url(../images/dropdownarrow.png) no-repeat right 7px;
+    }
+}
+
+// Used for dragging panels, popup menus, to deal with capturing clicks in iframes
+#dd-helper {
+    background: white;
+    opacity: 0;
+    z-index: 900; // Bootstrap elements start at 1000
+    position: absolute;
+    top: 0;
+    left: 0;
+    width: 100%;
+    height: 100%;
+    display: none;
+}
+
+// Messages for message box, slightly different style
+
+.panel-error-message, .panel-warning-message, .panel-done-message, .panel-info-message
+{
+    height: @panel-message-height;
+    line-height: @panel-message-height;
+    padding: 0px;
+    padding-left: 26px;
+    background-color: @state-danger-bg;
+    background-image: url(error_small.png);
+    background-repeat: no-repeat;
+    background-position: 6px 50%;
+}
+
+.panel-warning-message
+{
+    background-image: url(warn_small.png);
+    background-color: @state-warning-bg;
+}
+
+.panel-done-message
+{
+    background-image: url(ok_small.png);
+    background-color: @state-success-bg;
+}
+
+.panel-info-message
+{
+    background-image: url(info_small.png);
+    background-color: @state-info-bg;
+}
+
+// Masthead
+#masthead {
+    position:absolute;
+    top:0;
+    left:0;
+    width:100%;
+    min-width:990px;
+    padding: 0;
+    margin-bottom: 0px;
+    .nav {
+        z-index: 15001;
+    }
+    .nav > li > a {
+        cursor: pointer;
+        text-decoration: none;
+        &:hover {
+            color: gold;
+        }
+    }
+    .navbar-brand {
+        position: absolute;
+        left: 0;
+        top: 0;
+        font-family: verdana;
+        font-weight: bold;
+        font-size: 20px;
+        line-height: 1;
+        color: white;
+        // Override margin and padding due to shorter navbar height
+        padding: 5px 20px 12px;
+        margin-left: -15px;
+        z-index: 2000;
+        img {
+            display: inline;
+            width: 26px;
+            vertical-align: top;
+            margin-left: 0.35em;
+            border: none;
+        }
+        a {
+            color: white;
+            text-decoration: none;
+        }
+    }
+    .navbar-header {
+        position: relative;
+        right: -50%;
+        float: left;
+        .navbar-tabs {
+            display: block;
+            position: relative;
+            right: 50%;
+        }
+    }
+    .navbar-tabs {
+        .toggle {
+            color           : gold;
+        }
+        .dropdown-icon {
+            top             : 1px;
+            font-size       : 1.8em;
+        }
+        .dropdown-note {
+            font-weight     : bold;
+            font-size       : 10px;
+            position        : absolute;
+            left            : 35px;
+            top             : 20px;
+            color           : gold;
+        }
+    }
+    li.dropdown > a:hover .caret {
+        border-top-color: gold;
+        border-bottom-color: gold;
+    }
+}
+
+.quota-meter-container {
+    position: absolute;
+    top: 0;
+    right: 0;
+    height: 32px;
+}
+
+.quota-meter {
+    position: absolute;
+    top: 8px;
+    right: 8px;
+    height: 16px;
+    width: 100px;
+    background-color: @progress-bg;
+}
+
+.quota-meter-bar {
+    position: absolute;
+    top: 0;
+    left: 0;
+    height: 16px;
+    background-color: @brand-primary;
+}
+
+.quota-meter-bar-warn {
+    background-color: @brand-warning;
+}
+
+.quota-meter-bar-error {
+    background-color: @brand-danger;
+}
+
+.quota-meter-text {
+    position: absolute;
+    top: 50%;
+    left: 0;
+    width: 100px;
+    height: 16px;
+    margin-top: -6px;
+    text-align: center;
+    z-index: 9001;
+    color: @black;
+    white-space: nowrap;
+}
+
+// ==== Tool form styles ====
+
+div.metadataForm {
+    border:solid #aaaaaa 1px;
+}
+
+div.metadataFormTitle {
+    font-weight:bold;
+    padding:5px;
+    padding-left:10px;
+    padding-right:10px;
+    background:#cccccc;
+    background-repeat:repeat-x;
+    background-position:top;
+    border-bottom:solid #aaaaaa 1px;
+}
+
+div.metadataFormBody {
+    background:#FFFFFF;
+    padding:5px 0;
+}
+
+div.metadataFormBody div.metadataFormTitle {
+    background:transparent;
+    border:none;
+    font-weight:bold;
+    border-bottom:solid #dcb790 1px;
+    margin-bottom:5px;
+}
+
+div.metadataFormDisabled div.metadataFormTitle {
+    background:#eee;
+    border-color:#999;
+}
+
+div.metadataFormDisabled {
+    border-color:#999;
+}
+
+div.metadataHelpBody {
+    width:100%;overflow:auto;
+}
+
+div.titleRow {
+    font-weight: bold;
+    border-bottom: dotted gray 1px;
+    margin-bottom: 0.5em;
+    padding-bottom: 0.25em;
+}
+
+// Forms
+
+div.toolFormBody div.toolFormTitle {
+    background: transparent;
+    border: none;
+    font-weight: bold;
+    border-bottom: solid @form-border 1px;
+    margin-bottom: 5px;
+}
+
+div.toolFormDisabled div.toolFormTitle {
+    background: @panel-default-heading-bg;
+    border-color: @border-default-color;
+}
+
+div.toolFormDisabled {
+    border-color: @border-default-color;
+}
+
+div.toolHelp {
+    margin-top: 15px;
+    padding: 5px;
+}
+
+div.toolHelpBody {
+    width: 100%;
+}
+
+// In workflow
+
+.toolForm.toolFormInCanvas {
+    border: solid @form-border 1px;
+    .border-radius(2px);
+    background: @white;
+    margin: 0px;
+    &.toolForm-active {
+        box-shadow: 0 0 0 3px @brand-primary;
+        .border-radius(1px);
+    }
+    .toolFormTitle {
+        font-size: @font-size-base;
+        line-height: @line-height-base;
+    }
+}
+
+
+div.form, div.toolForm {
+    border: solid @form-border 1px;
+    .border-radius(@panel-border-radius);
+}
+
+div.form-title, div.toolFormTitle {
+    // font-size: @font-size-base * 1.25;
+    // line-height: @line-height-base * 1.5;
+    padding: 5px 10px;
+    background: @form-heading-bg;
+    border-bottom: solid @form-border 1px;
+}
+
+div.form-body {
+    padding: 5px 0;
+}
+
+div.form-row  {
+    padding: 5px 10px;
+}
+
+div.form-title-row {
+    padding: 5px 10px;
+}
+
+div.repeat-group-item {
+    border-left: solid @form-border 5px;
+    margin-left: 10px;
+    margin-bottom: 10px;
+}
+
+div.form-row-error {
+    background: @state-danger-bg;
+}
+
+div.form-row label {
+    font-weight: bold;
+    display: block;
+    margin-bottom: .2em;
+}
+
+div.form-row label.inline {
+    display: inline;
+}
+
+div.form-row-input {
+    width: 90%;
+    float: left;
+}
+
+div.form-row-input label {
+    font-weight: normal;
+    display: inline;
+}
+
+div.form-row-error-message {
+    width: 300px;
+    float: left;
+    color: red;
+    font-weight: bold;
+    padding: 3px 0;
+}
+
+.form-row .help, .toolParamHelp {
+    color: #666;
+    a {
+        color: #666;
+    }
+}
+
+.form-row.form-actions {
+    background: whiteSmoke;
+    border-top: solid #ddd 1px;
+    padding-top: 10px;
+    padding-bottom: 10px;
+    margin-top: 5px;
+}
+
+.workflow-right .right-content .section-row {
+    margin-bottom: 10px;
+}
+
+// Selects
+select {
+    //border: 1px solid @input-border;
+    padding: 2px;
+    font-size: @font-size-base;
+    line-height: @line-height-base;
+}
+
+select, input, textarea {
+    font: inherit;
+}
+
+.form-row {
+    select, textarea, input[type="text"], input[type="file"], input[type="password"] {
+        // -webkit-box-sizing: border-box;
+        max-width: 90%;
+    }
+}
+
+textarea, input[type="text"], input[type="password"] {
+    font-size: @font-size-base;
+    line-height: @line-height-base;
+    border: 1px solid @input-border;
+    padding: 3px;
+}
+
+// Always style buttons and submits as bootstrap buttons
+input[type="submit"], button {
+    .btn();
+    .btn-default();
+    // The above overrides the button variants (primary, danger, etc) because it occurs later, redefine btn-primary (bit of a hack)
+    &.btn-primary {
+        .btn-primary();
+    }
+}
+
+.search-query {
+  display: inline-block;
+  padding: 4px;
+  font-size: @font-size-base;
+  line-height: @line-height-base;
+  color: @gray;
+  border: 1px solid @input-border;
+  padding-left: 14px !important;
+  padding-right: 14px;
+  margin-bottom: 0; // remove the default margin on all inputs
+  .border-radius(14px);
+  max-width: auto;
+}
+.search-query:focus {
+  border-color: darken(rgba(82,168,236,.8),15%);
+  @shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(82,168,236,.6);
+  .box-shadow(@shadow);
+  outline: 0;
+  outline: thin dotted \9; /* IE6-8 */
+}
+
+.search-spinner {
+    position: absolute;
+    display: none;
+    right: 6px;
+    top: 9px;
+    font-size: 1.4em;
+    color: #888;
+}
+
+#search-clear-btn {
+    position: absolute;
+    right: 6px;
+    top: 9px;
+    display: block;
+    font-size: 1.4em !important;
+    text-decoration: none;
+    color: #888;
+    .fa-icon;
+    &:before {
+        content: "\f057";
+    }
+}
+
+// Messages
+
+.errormessagelarge, .warningmessagelarge, .donemessagelarge, .infomessagelarge, .ui-form-help .error, .ui-form-help .warning, .ui-form-help .note {
+    .alert();
+    min-height: 36px;
+    padding-left: 52px;
+    background-image: url(error_large.png);
+    background-repeat: no-repeat;
+    background-position: 10px 8px;
+    .messagerow {
+        padding: 10px 20px;
+    }
+}
+
+.errormessagelarge {
+    .alert-danger();
+    padding-left: 52px;
+}
+
+.warningmessagelarge {
+    .alert-warning();
+    padding-left: 52px;
+    background-image: url(warn_large.png);
+}
+
+.donemessagelarge {
+    .alert-success();
+    padding-left: 52px;
+    background-image: url(ok_large.png);
+}
+
+.infomessagelarge {
+    .alert-info();
+    background-image: url(info_large.png);
+}
+
+.screencastBox {
+    padding-left: 10px;
+    border-color: #AAAA66;
+    background-color: #FFFFCC;
+    background-image: none;
+}
+
+.errormessage, .warningmessage, .donemessage, .infomessage, .errormessagesmall, .warningmessagesmall, .donemessagesmall, .infomessagesmall, .ui-form-help .error, .ui-form-help .warning, .ui-form-help .note {
+    .alert();
+    padding: 5px;
+    padding-left: 25px;
+    min-height: 15px;
+    //border: 1px solid @error_message_border;
+    //background-color: @error_message_bg;
+    background-image: url(error_small.png);
+    background-repeat: no-repeat;
+    background-position: 5px 5px;
+}
+
+.errormessage, .errormessagesmall, .ui-form-help .error {
+    .alert-danger();
+}
+
+.warningmessage, .warningmessagesmall, .ui-form-help .warning {
+    .alert-warning();
+    background-image: url(warn_small.png);
+}
+
+.donemessage, .donemessagesmall {
+    .alert-success();
+    background-image: url(ok_small.png);
+}
+
+.infomessage, .infomessagesmall, .ui-form-help .note {
+    .alert-info();
+    background-image: url(info_small.png);
+}
+
+.errormark, .warningmark, .donemark, .infomark, .ok_bgr, .err_bgr {
+    padding-left: 20px;
+    min-height: 15px;
+    background: url(error_small.png) no-repeat;
+}
+
+.warningmark {
+    background-image: url(warn_small.png);
+}
+
+.donemark {
+    background-image: url(ok_small.png);
+}
+
+.infomark, .ok_bgr {
+    background-image: url(info_small.png);
+}
+
+/* I think this is only used in view details */
+table.simple {
+    font-size: 12px;
+    background: #fff;
+    margin: 1em;
+    border-collapse: collapse;
+    text-align: left;
+}
+table.simple th {
+    font-size: 14px;
+    font-weight: normal;
+    padding: 10px 8px;
+    border-bottom: 2px solid #333;
+}
+table.simple td {
+    padding: 10px 8px 0px 8px;
+}
+table.simple tbody tr:hover td {
+    color: #333;
+}
+table.tabletip {
+    width: 100%;
+    border-collapse: collapse;
+    text-align: left;
+}
+table.tabletip th {
+    white-space: nowrap;
+    border-bottom: 1px solid #444;
+    padding-right: 3px;
+}
+table.tabletip td {
+    border-bottom: 1px solid #ddd;
+}
+table.tabletip tbody tr:hover td {
+    background-color: #eee;
+}
+
+table.colored {
+    border-top: solid @table-border-color 1px;
+    border-bottom: solid @table-border-color 1px;
+}
+
+table.colored td, table.colored th {
+    text-align: left;
+    padding: 5px;
+    line-height: @line-height-base;
+}
+
+table.colored tr.header {
+    background: @table-heading-bg;
+    color: contrast(@complement-color-3);
+    background-repeat: repeat-x;
+    background-position: top;
+    border-bottom: solid @table-border-color 1px;
+    font-weight: bold;
+}
+
+table.colored tr {
+    background: @white;
+}
+
+table.colored tr.odd_row {
+    background: @table-bg-accent;
+}
+
+div.debug {
+    margin: 10px;
+    padding: 5px;
+    background: #FFFF99;
+    border: solid #FFFF33 1px;
+    color: black;
+}
+
+// Data grid style
+
+.grid-header {
+    padding-bottom: 1em;
+}
+.grid-header h2 {
+    margin: 0;
+    margin-bottom: 0.5em;
+}
+.grid-header .title {
+    font-weight: bold;
+}
+.grid {
+    padding-top: 1em;
+    border-collapse: collapse;
+    width: 100%;
+}
+.grid tbody td {
+    line-height: @line-height-base;
+    border-top: solid #DDDDDD 1px;
+    border-bottom: solid #DDDDDD 1px;
+    padding: 5px;
+}
+.grid tbody td:empty {
+    padding: 0;
+}
+.grid thead tr {
+    height: 2em;
+}
+.grid thead th {
+    line-height: @line-height-base;
+    background: @table-heading-bg;
+    color: contrast(@table-heading-bg);
+    border-top: solid @table-border-color 1px;
+    border-bottom: solid @table-border-color 1px;
+    padding: 5px;
+    text-align: left;
+    white-space: nowrap;
+}
+.grid tfoot td {
+    background-color: #F8F8F8;
+    border-top: solid #DDDDDD 1px;
+    border-bottom: solid #DDDDDD 1px;
+    padding: 5px;
+}
+.grid .current {
+    background-color: #EEEEFF;
+}
+
+// Pulled out of grid base
+.count-box {
+    min-width: 1.1em;
+    padding: 5px;
+    border-width: 1px;
+    border-style: solid;
+    text-align: center;
+    display: inline-block;
+}
+.text-filter-val {
+    border: solid 1px #AAAAAA;
+    padding: 1px 2px 1px 3px;
+    margin-right: 5px;
+    -moz-border-radius: .5em;
+    -webkit-border-radius: .5em;
+    font-style: italic;
+}
+.page-link a, .inactive-link {
+    padding: 0px 7px 0px 7px;
+    color: #555;
+}
+.inactive-link, .current-filter {
+    font-weight: bold;
+    color: #000;
+}
+.submit-image {
+    background: url(../images/fugue/magnifier-left.png) no-repeat center transparent;
+    background-color: #eee;
+    width: @line-height-base + 8;
+    height: @line-height-base + 8;
+    cursor: pointer;
+    margin: 0;
+    padding: 0;
+    border: 1px solid #aaa;
+    border-left: none;
+}
+#advanced-search td {
+    padding: 3px;
+}
+#advanced-search table {
+    border-collapse: separate;
+}
+.delete-search-icon {
+    background: url(../images/delete_tag_icon_gray.png) center no-repeat;
+    display: inline-block;
+    width: 10px;
+    cursor: pointer;
+    height: 18px;
+    vertical-align: middle;
+    margin-left: 2px;
+
+}
+.search-box-input {
+    border: 0;
+    float: left;
+    outline: medium none;
+    font-style: italic;
+    font-size: inherit;
+}
+.search-box {
+    vertical-align: bottom;
+    display: inline-block;
+    padding: 0;
+    white-space: nowrap;
+    // border: 1px solid #aaa;
+}
+.gray-background {
+    background-color: #DDDDDD;
+}
+.loading-elt-overlay {
+    background-color : white;
+    opacity : 0.5;
+    width : 100%;
+    height : 100%;
+    z-index : 14000;
+    position : fixed;
+    display: none;
+}
+
+
+div.odd_row {
+    background: @table-bg-accent;
+}
+
+#footer {
+  display: none;
+}
+
+// Tool panel stuff
+
+// Extra label colors
+.label-beta {
+  .label-warning();
+}
+
+.label-new {
+  .label-info();
+}
+
+span.toolParameterExpandableCollapsable {
+    font-weight: bold;
+    cursor: pointer;
+}
+ul.toolParameterExpandableCollapsable {
+    list-style: none;
+}
+
+ul.manage-table-actions {
+    float: right;
+    margin-top: -2.5em;
+}
+ul.manage-table-actions li {
+    display: block;
+    float: left;
+    margin-left: 0.5em;
+}
+
+// State colors
+
+.state-color-new {
+    border-color: @state-default-border;
+    background: @state-default-bg;
+}
+
+.state-color-upload {
+    border-color: @state-info-border;
+    background: @state-info-bg;
+}
+
+.state-color-waiting {
+    border-color: @state-default-border;
+    background: @state-default-bg;
+}
+
+.state-color-queued {
+    border-color: @state-default-border;
+    background: @state-default-bg;
+}
+
+.state-color-running {
+    border-color: @state-running-border;
+    background: @state-running-bg;
+}
+
+.state-color-ok {
+    border-color: @state-success-border;
+    background: @state-success-bg;
+}
+
+.state-color-error {
+    border-color: @state-danger-border;
+    background: @state-danger-bg;
+}
+
+.state-color-deleted {
+    border-color: @state-deleted-border;
+    background: @state-deleted-bg;
+}
+
+.state-fg-new {
+    color: #FFB030;
+}
+
+.state-fg-upload {
+    color: #D090D0;
+}
+
+.state-fg-waiting {
+    color: #E8C060;
+}
+
+.state-fg-queued {
+    color: #888888;
+}
+
+.state-fg-running {
+    color: #AAAA66;
+}
+
+.state-fg-ok {
+    color: #66AA66;
+}
+
+.state-fg-error {
+    color: #AA6666;
+}
+
+.state-fg-deleted {
+    color: #3399FF;
+}
+
+// Button styles
+
+button {
+    .btn();
+    .btn-default();
+}
+a.btn {
+    text-decoration: none;
+}
+.action-button {
+    .btn();
+    .btn-default();
+}
+a.action-button {
+    color: @btn-default-color;
+    text-decoration: none;
+}
+.action-button > img {
+    vertical-align: middle;
+}
+
+
+//.action-button > * {
+//    vertical-align: middle;
+//}
+
+.action-button:hover {
+    .btn.hover();
+    //color: black;
+    //background: #dddddd;
+}
+.action-button:active {
+    .btn.active();
+    //color: white;
+    //background: #aaaaaa;
+}
+
+// A menu button is a button that has an attached popup menu
+
+.menubutton {
+    .btn();
+    .btn-default();
+
+    &:hover { .btn.hover(); }
+    &:active { .btn.active(); }
+    &:focus { .tab-focus(); }
+
+    display: inline-block;
+    cursor: pointer;
+    position: relative;
+    .unselectable;
+
+    // border: solid transparent 1px;
+    // -moz-border-radius: 0.25em;
+    // -webkit-border-radius: 0.25em;
+    // border-radius: 0.25em;
+
+    // padding: 1px 0.25em;
+    // margin: -1px -0.25em;
+
+    a {
+        text-decoration: none;
+    }
+
+    .menubutton-label, > label {
+        position: relative;
+        display: inline-block;
+        border-right: none;
+        text-decoration: none;
+        text-align: left;
+        // The following properties truncate the text and force the button to have one line
+        max-height: 2*@line-height-base;
+        line-height: @line-height-base;
+        overflow: hidden;
+        text-overflow: ellipsis;
+    }
+
+    &.popup .menubutton-label {
+        border-right: solid @btn-default-border 1px;
+        padding-right: 6px;
+    }
+
+    &.popup, &.popup.split {
+        padding-right: 18px;
+        &:after {
+            top: 45%;
+            position: absolute;
+            right: 6px;
+            .caret();
+        }
+    }
+}
+
+// A split menu button, the main button has an action, the arrow causes the
+// popup menu to appear
+
+.menubutton.popup.split {
+    // padding-right: 2em;
+}
+
+.menubutton.popup.split:hover {
+    //background: url(../images/ddarrowsplit.png) no-repeat right -39px;
+}
+
+// Popup menu styles
+
+/*
+.overlay-border {
+    position: absolute;
+    top: 0;
+    left: 0;
+    height: 100%;
+    width: 100%;
+    padding: 1em;
+    margin: -1em;
+    background-color: rgba(0,0,0,0.5);
+    -moz-border-radius: 1em;
+    -webkit-border-radius: 1em;
+    z-index: -1;
+}
+*/
+
+// Galaxy popup menus
+div.popmenu-wrapper {
+
+    position: absolute;
+    top: 100%;
+    z-index: 20000;
+
+    ul.dropdown-menu {
+        // Since our menus are contained in a positioned div, need to override some styles
+        display: block;
+        position: relative;
+        float: none;
+    }
+}
+
+// For all dropdowns, define some additional item types
+ul.dropdown-menu {
+    // No underlines in dropdown menus
+    a {
+        text-decoration: none;
+    }
+    li.head > a {
+        // From nav-list nav-header
+        text-shadow: 0 1px 0 rgba(255,255,255,.5);
+        font-size: 11px;
+        font-weight: bold;
+        line-height: @line-height-base;
+        color: @gray-light;
+        text-transform: uppercase;
+    }
+
+    li.head > a:hover {
+        background: inherit;
+        border-color: transparent;
+    }
+}
+
+.popup-arrow {
+    cursor: pointer;
+    text-decoration: none;
+    color: #555;
+}
+
+.popup-arrow:hover {
+    color: black;
+}
+
+div.permissionContainer {
+    padding-left: 20px;
+}
+
+// Styles for areas of text content
+
+.text-content {
+
+    hr {
+            display:block;
+            background:black;
+            color:black;
+            width:100%;
+            height:1px;
+            border:none;
+            background:#aaa;
+            color:#aaa;
+    }
+
+    table
+    {
+            border-collapse:collapse;
+            border-top:1px solid #ccc;
+            border-left:1px solid #ccc;
+    }
+
+    blockquote {
+            color:#666;
+    }
+
+    fieldset {
+            border-color:#ccc;
+            border:1px solid #ccc;
+    }
+
+    th,td {
+            border-bottom:1px solid #ddd;
+            border-right:1px solid #ccc;
+    }
+
+    th,td {
+            padding:.8em;
+    }
+
+}
+
+.preserve-text-breaks {
+    white-space: pre-line;
+}
+
+// Icon buttons.
+
+.icon-button  {
+    width: 16px;
+    height: 16px;
+    display: block;
+    float: left;
+    margin-left: 2px;
+    // Allow alt text for screen readers
+    text-indent: 20px;
+    background-repeat:no-repeat;
+    background-position: 0px 0px;
+    padding: 0;
+}
+
+.fa-icon-button {
+    text-align: center;
+    text-decoration: none;
+    display: inline-block;
+    cursor: pointer;
+    width: 16px;
+    height: 16px;
+    line-height: 8px;
+    // Fade to maroon on hover
+    &:hover {
+        color: maroon;
+        .transition(color .25s linear);
+    }
+}
+
+.editable-text {
+    cursor:pointer;
+}
+
+.editable-text:hover {
+    cursor: text;
+    border: dotted #999999 1px;
+}
+
+.icon-button.multiinput{
+    background:url(../images/documents-stack.png) no-repeat;
+    cursor:pointer;
+    float:none;
+    display:inline-block;
+    margin-left:10px;
+}
+
+.icon-button.multiinput.disabled{
+    background:url(../images/documents-stack-faded.png) no-repeat;
+    cursor:pointer;
+}
+
+
+.icon-button.link{
+    background:url(../images/silk/link.png) no-repeat;
+    cursor:pointer;
+    float:none;
+    display:inline-block;
+    margin-left:10px;
+}
+
+.icon-button.link-broken{
+    background:url(../images/silk/link_break.png) no-repeat;
+    cursor:pointer;
+    float:none;
+    display:inline-block;
+    margin-left:10px;
+}
+
+.workflow-invocation-complete{
+    border:solid 1px #6A6;
+    border-left-width:5px;
+    margin:10px 0;
+    padding-left:5px;
+}
+
+// ============================================================================ History
+ at import "ui/icon-btn.less";
+ at import "list-item.less";
+ at import "dataset.less";
+ at import "history.less";
+ at import "collection.less";
+ at import "job.less";
+
+ at import "ui/paired-collection-creator.less";
+
+ at import "ui/search-input.less";
+ at import "ui/dataset-choice.less";
+ at import "ui/peek-column-selector.less";
+ at import "ui/pagination.less";
+ at import "ui/error-modal.less";
+
+
+// ==== Tool menu styles
+
+.toolMenuContainer
+{
+    color: @base-text-color;
+    background: @side-panel-bg;
+    min-height: 100%;
+    padding: 35px 10px;
+}
+
+div.toolSectionPad
+{
+    margin: 0;
+    padding: 0;
+    height: 5px;
+    font-size: 0px;
+}
+
+div.toolSectionWrapper {
+    margin-bottom: 5px;
+}
+
+div.toolSectionDetailsInner
+{
+  margin-left: 5px;
+  margin-right: 5px;
+}
+
+div.toolSectionTitle
+{
+  font-weight: bold;
+}
+
+div.toolPanelLabel
+{
+  padding-top: 10px;
+  padding-bottom: 5px;
+  font-weight: bold;
+  color: gray;
+  text-transform: uppercase;
+}
+
+div.toolTitle
+{
+  padding-top: 5px;
+  padding-bottom: 5px;
+  margin-left: 16px;
+  margin-right: 10px;
+  display: list-item;
+  list-style: square outside;
+  .labels {
+    float: right;
+    margin-left: 2px;
+  }
+}
+
+div.toolSectionBody div.toolPanelLabel
+{
+  padding-top: 5px;
+  padding-bottom: 5px;
+  margin-left: 16px;
+  margin-right: 10px;
+  display: list-item;
+  list-style: none outside;
+}
+
+div.toolTitleNoSection
+{
+  padding-bottom: 5px;
+  font-weight: bold;
+}
+
+#tool-search {
+    padding-top: 5px;
+    padding-bottom: 10px;
+    position: fixed;
+    top: 65px;
+    width: 210px;
+}
+
+// Dataset Display Styles
+
+#loading_indicator{
+    position:fixed;
+    right:10px;
+    top:10px;
+    height:32px;
+    width:32px;
+    display:none;
+    background:url(largespinner.gif);
+}
+
+#content_table td{
+    text-align:right;
+    white-space:nowrap;
+    padding:2px 10px;
+}
+
+#content_table th{
+    white-space:nowrap;
+    padding:2px 10px;
+}
+
+#content_table td.stringalign{
+    text-align:left;
+}
+
+#content_table .dark_row{
+    background-color: #DDD;
+}
+
+#content_table th{
+    background-color: #AAA;
+}
+
+// ==== Integrated tool form styles
+
+.toolMenuAndView .toolForm
+{
+    float: left;
+    background-color: white;
+    margin: 10px;
+}
+
+// @import "base_sprites";
+
+.text-and-autocomplete-select {
+    // -sprite-group: fugue;
+    // -sprite-image: fugue/control-270.png;
+    // -sprite-horiz-position: right;
+    background: none;
+    position: relative;
+    padding-right: 18px;
+    &:after {
+        margin-top: 6px;
+        position: absolute;
+        top: 2px;
+        right: 6px;
+        width: 10px;
+        height: 10px;
+        .caret();
+        .opacity(80);
+    }
+}
+
+.icon-button.general-question{
+    background: url(question-octagon-frame.png) no-repeat;
+    float: right;
+    margin-top: 3px;
+    margin-right: 4px;
+}
+
+.icon-button.tag-question{
+    background: url(question-balloon.png) no-repeat;
+    float: right;
+    margin-top: 3px;
+    margin-right: 4px;
+}
+
+// ==== General Sprites ====
+
+ at import "sprite-fugue.less";
+
+.icon-button.tag {
+    .sprite(@tag-label);
+}
+.icon-button.tags {
+    .sprite(@tags);
+}
+.icon-button.tag--plus {
+    .sprite(@tag-plus);
+}
+.icon-button.toggle-expand {
+    .sprite(@toggle-expand);
+}
+.icon-button.toggle {
+    .sprite(@toggle-bw);
+}
+.icon-button.toggle:hover {
+    .sprite(@toggle);
+}
+.icon-button.arrow-circle {
+    .sprite(@arrow-circle);
+}
+.icon-button.chevron {
+    .sprite(@chevron);
+}
+.icon-button.bug {
+    .sprite(@bug);
+}
+.icon-button.disk {
+    .sprite(@disk);
+}
+.icon-button.information {
+    .sprite(@information-white);
+}
+.icon-button.annotate {
+    .sprite(@sticky-note-text);
+}
+.icon-button.go-to-full-screen {
+    .sprite(@external);
+}
+.icon-button.import {
+    .sprite(@plus-circle);
+}
+.icon-button.plus-button {
+    .sprite(@plus-button-bw);
+}
+.icon-button.plus-button:hover {
+    .sprite(@plus-button);
+}
+.icon-button.gear {
+    .sprite(@gear);
+}
+.icon-button.chart_curve {
+    .sprite(@chart);
+}
+.icon-button.disk--arrow {
+    .sprite(@disk-arrow-bw);
+}
+.icon-button.disk--arrow:hover {
+    .sprite(@disk-arrow);
+}
+.icon-button.cross-circle {
+    .sprite(@cross-circle-bw);
+}
+.icon-button.cross-circle:hover {
+    .sprite(@cross-circle);
+}
+.icon-button.arrow-split {
+    .sprite(@arrow-split-bw);
+}
+.icon-button.arrow-split:hover {
+    .sprite(@arrow-split);
+}
+.icon-button.chevron-expand:hover {
+    .sprite(@chevron-expand);
+}
+.icon-button.chevron-expand {
+    .sprite(@chevron-expand-bw);
+}
+
+// noscript overlay - displayed when javascript is disabled
+.noscript-overlay {
+    position: absolute;
+    width: 100%;
+    height: 100%;
+    background: white;
+    z-index: 10000;
+    & > div {
+        margin: 64px 0px 0px 64px;
+    }
+}
+
+// honeypot for registration form
+#for_bears {
+    display: none;
+}
+
+// communication channel bootstrap modal 
+
+.chat-modal {
+    overflow: hidden;
+}
+
+.modal-header-body {
+    padding: 2px;
+}
+
+.communication-iframe {
+    width: 100%;
+    height: 100%;
+}
+
+.close-modal {
+    float: right; 
+    cursor: pointer;
+}
+
+.expand-compress-modal {
+    cursor: pointer;
+    font-size: 12px;
+    margin-left: 93.2%;
+}
+
+// password complexity monitor
+#change_password,#registrationForm {
+    .progress {
+        width:200px;
+        margin-left: 20px;
+    }
+    .progress-bar {
+        color:black;
+        text-align:left;
+    }
+}
+
diff --git a/client/galaxy/style/less/bootstrap.less b/client/galaxy/style/less/bootstrap.less
new file mode 100644
index 0000000..9cac3ec
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap.less
@@ -0,0 +1,68 @@
+/*
+ * bootstrap.less modified for Galaxy.
+ *  - only a subset of bootsrap components are used
+ *
+ * ---
+ *
+ * Bootstrap v3.0.0
+ *
+ *
+ * Copyright 2012 Twitter, Inc
+ * Licensed under the Apache License v2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Designed and built with all the love in the world by @mdo and @fat.
+ */
+
+// Core variables and mixins
+ at import "bootstrap/variables.less";
+ at import "bootstrap/mixins.less";
+
+// Reset
+ at import "bootstrap/normalize.less";
+ at import "bootstrap/print.less";
+
+// Core CSS
+ at import "bootstrap/scaffolding.less";
+ at import "bootstrap/type.less";
+// @import "bootstrap/code.less";
+ at import "bootstrap/grid.less";
+ at import "bootstrap/tables.less";
+ at import "bootstrap/forms.less";
+ at import "bootstrap/buttons.less";
+
+// Components
+ at import "bootstrap/component-animations.less";
+ at import "bootstrap/glyphicons.less";
+ at import "bootstrap/dropdowns.less";
+ at import "bootstrap/button-groups.less";
+ at import "bootstrap/input-groups.less";
+ at import "bootstrap/navs.less";
+ at import "bootstrap/navbar.less";
+ at import "bootstrap/breadcrumbs.less";
+ at import "bootstrap/pagination.less";
+ at import "bootstrap/pager.less";
+ at import "bootstrap/labels.less";
+ at import "bootstrap/badges.less";
+ at import "bootstrap/jumbotron.less";
+// @import "thumbnails.less";
+ at import "bootstrap/alerts.less";
+ at import "bootstrap/progress-bars.less";
+ at import "bootstrap/media.less";
+ at import "bootstrap/list-group.less";
+ at import "bootstrap/panels.less";
+ at import "bootstrap/wells.less";
+ at import "bootstrap/close.less";
+
+// Components w/ JavaScript
+ at import "bootstrap/modals.less";
+ at import "bootstrap/tooltip.less";
+ at import "bootstrap/popovers.less";
+// @import "carousel.less";
+
+// Utility classes
+ at import "bootstrap/utilities.less";
+
+// Galaxy modifications (override)
+ at import "galaxy_bootstrap/overrides.less";
+ at import "galaxy_bootstrap/variables.less";
diff --git a/client/galaxy/style/less/bootstrap/alerts.less b/client/galaxy/style/less/bootstrap/alerts.less
new file mode 100644
index 0000000..be09d18
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/alerts.less
@@ -0,0 +1,67 @@
+//
+// Alerts
+// --------------------------------------------------
+
+
+// Base styles
+// -------------------------
+
+.alert {
+  padding: @alert-padding;
+  margin-bottom: @line-height-computed;
+  border: 1px solid transparent;
+  border-radius: @alert-border-radius;
+
+  // Headings for larger alerts
+  h4 {
+    margin-top: 0;
+    // Specified for the h4 to prevent conflicts of changing @headingsColor
+    color: inherit;
+  }
+  // Provide class for links that match alerts
+  .alert-link {
+    font-weight: @alert-link-font-weight;
+  }
+
+  // Improve alignment and spacing of inner content
+  > p,
+  > ul {
+    margin-bottom: 0;
+  }
+  > p + p {
+    margin-top: 5px;
+  }
+}
+
+// Dismissable alerts
+//
+// Expand the right padding and account for the close button's positioning.
+
+.alert-dismissable {
+ padding-right: (@alert-padding + 20);
+
+  // Adjust close link position
+  .close {
+    position: relative;
+    top: -2px;
+    right: -21px;
+    color: inherit;
+  }
+}
+
+// Alternate styles
+//
+// Generate contextual modifier classes for colorizing the alert.
+
+.alert-success {
+  .alert-variant(@alert-success-bg; @alert-success-border; @alert-success-text);
+}
+.alert-info {
+  .alert-variant(@alert-info-bg; @alert-info-border; @alert-info-text);
+}
+.alert-warning {
+  .alert-variant(@alert-warning-bg; @alert-warning-border; @alert-warning-text);
+}
+.alert-danger {
+  .alert-variant(@alert-danger-bg; @alert-danger-border; @alert-danger-text);
+}
diff --git a/client/galaxy/style/less/bootstrap/badges.less b/client/galaxy/style/less/bootstrap/badges.less
new file mode 100644
index 0000000..0b69753
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/badges.less
@@ -0,0 +1,51 @@
+//
+// Badges
+// --------------------------------------------------
+
+
+// Base classes
+.badge {
+  display: inline-block;
+  min-width: 10px;
+  padding: 3px 7px;
+  font-size: @font-size-small;
+  font-weight: @badge-font-weight;
+  color: @badge-color;
+  line-height: @badge-line-height;
+  vertical-align: baseline;
+  white-space: nowrap;
+  text-align: center;
+  background-color: @badge-bg;
+  border-radius: @badge-border-radius;
+
+  // Empty badges collapse automatically (not available in IE8)
+  &:empty {
+    display: none;
+  }
+}
+
+// Hover state, but only for links
+a.badge {
+  &:hover,
+  &:focus {
+    color: @badge-link-hover-color;
+    text-decoration: none;
+    cursor: pointer;
+  }
+}
+
+// Quick fix for labels/badges in buttons
+.btn .badge {
+  position: relative;
+  top: -1px;
+}
+
+// Account for counters in navs
+a.list-group-item.active > .badge,
+.nav-pills > .active > a > .badge {
+  color: @badge-active-color;
+  background-color: @badge-active-bg;
+}
+.nav-pills > li > a > .badge {
+  margin-left: 3px;
+}
diff --git a/client/galaxy/style/less/bootstrap/bootstrap.less b/client/galaxy/style/less/bootstrap/bootstrap.less
new file mode 100644
index 0000000..1344b46
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/bootstrap.less
@@ -0,0 +1,59 @@
+/*!
+ * Bootstrap v3.0.0
+ *
+ * Copyright 2013 Twitter, Inc
+ * Licensed under the Apache License v2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Designed and built with all the love in the world by @mdo and @fat.
+ */
+
+// Core variables and mixins
+ at import "variables.less";
+ at import "mixins.less";
+
+// Reset
+ at import "normalize.less";
+ at import "print.less";
+
+// Core CSS
+ at import "scaffolding.less";
+ at import "type.less";
+ at import "code.less";
+ at import "grid.less";
+ at import "tables.less";
+ at import "forms.less";
+ at import "buttons.less";
+
+// Components
+ at import "component-animations.less";
+ at import "glyphicons.less";
+ at import "dropdowns.less";
+ at import "button-groups.less";
+ at import "input-groups.less";
+ at import "navs.less";
+ at import "navbar.less";
+ at import "breadcrumbs.less";
+ at import "pagination.less";
+ at import "pager.less";
+ at import "labels.less";
+ at import "badges.less";
+ at import "jumbotron.less";
+ at import "thumbnails.less";
+ at import "alerts.less";
+ at import "progress-bars.less";
+ at import "media.less";
+ at import "list-group.less";
+ at import "panels.less";
+ at import "wells.less";
+ at import "close.less";
+
+// Components w/ JavaScript
+ at import "modals.less";
+ at import "tooltip.less";
+ at import "popovers.less";
+ at import "carousel.less";
+
+// Utility classes
+ at import "utilities.less";
+ at import "responsive-utilities.less";
diff --git a/client/galaxy/style/less/bootstrap/breadcrumbs.less b/client/galaxy/style/less/bootstrap/breadcrumbs.less
new file mode 100644
index 0000000..92643d1
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/breadcrumbs.less
@@ -0,0 +1,23 @@
+//
+// Breadcrumbs
+// --------------------------------------------------
+
+
+.breadcrumb {
+  padding: 8px 15px;
+  margin-bottom: @line-height-computed;
+  list-style: none;
+  background-color: @breadcrumb-bg;
+  border-radius: @border-radius-base;
+  > li {
+    display: inline-block;
+    &+li:before {
+      content: "/\00a0"; // Unicode space added since inline-block means non-collapsing white-space
+      padding: 0 5px;
+      color: @breadcrumb-color;
+    }
+  }
+  > .active {
+    color: @breadcrumb-active-color;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/button-groups.less b/client/galaxy/style/less/bootstrap/button-groups.less
new file mode 100644
index 0000000..43ada11
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/button-groups.less
@@ -0,0 +1,248 @@
+//
+// Button groups
+// --------------------------------------------------
+
+// Button carets
+//
+// Match the button text color to the arrow/caret for indicating dropdown-ness.
+
+.caret {
+  .btn-default & {
+    border-top-color: @btn-default-color;
+  }
+  .btn-primary &,
+  .btn-success &,
+  .btn-warning &,
+  .btn-danger &,
+  .btn-info & {
+    border-top-color: #fff;
+  }
+}
+.dropup {
+  & .btn-default .caret {
+    border-bottom-color: @btn-default-color;
+  }
+  .btn-primary,
+  .btn-success,
+  .btn-warning,
+  .btn-danger,
+  .btn-info {
+   .caret {
+      border-bottom-color: #fff;
+    }
+  }
+}
+
+// Make the div behave like a button
+.btn-group,
+.btn-group-vertical {
+  position: relative;
+  display: inline-block;
+  vertical-align: middle; // match .btn alignment given font-size hack above
+  > .btn {
+    position: relative;
+    float: left;
+    // Bring the "active" button to the front
+    &:hover,
+    &:focus,
+    &:active,
+    &.active {
+      z-index: 2;
+    }
+    &:focus {
+      // Remove focus outline when dropdown JS adds it after closing the menu
+      outline: none;
+    }
+  }
+}
+
+// Prevent double borders when buttons are next to each other
+.btn-group {
+  .btn + .btn,
+  .btn + .btn-group,
+  .btn-group + .btn,
+  .btn-group + .btn-group {
+    margin-left: -1px;
+  }
+}
+
+// Optional: Group multiple button groups together for a toolbar
+.btn-toolbar {
+  .clearfix();
+
+  .btn-group {
+    float: left;
+  }
+  // Space out series of button groups
+  > .btn,
+  > .btn-group {
+    + .btn,
+    + .btn-group {
+      margin-left: 5px;
+    }
+  }
+}
+
+.btn-group > .btn:not(:first-child):not(:last-child):not(.dropdown-toggle) {
+  border-radius: 0;
+}
+
+// Set corners individual because sometimes a single button can be in a .btn-group and we need :first-child and :last-child to both match
+.btn-group > .btn:first-child {
+  margin-left: 0;
+  &:not(:last-child):not(.dropdown-toggle) {
+    .border-right-radius(0);
+  }
+}
+// Need .dropdown-toggle since :last-child doesn't apply given a .dropdown-menu immediately after it
+.btn-group > .btn:last-child:not(:first-child),
+.btn-group > .dropdown-toggle:not(:first-child) {
+  .border-left-radius(0);
+}
+
+// Custom edits for including btn-groups within btn-groups (useful for including dropdown buttons within a btn-group)
+.btn-group > .btn-group {
+  float: left;
+}
+.btn-group > .btn-group:not(:first-child):not(:last-child) > .btn {
+  border-radius: 0;
+}
+.btn-group > .btn-group:first-child {
+  > .btn:last-child,
+  > .dropdown-toggle {
+    .border-right-radius(0);
+  }
+}
+.btn-group > .btn-group:last-child > .btn:first-child {
+  .border-left-radius(0);
+}
+
+// On active and open, don't show outline
+.btn-group .dropdown-toggle:active,
+.btn-group.open .dropdown-toggle {
+  outline: 0;
+}
+
+
+// Sizing
+//
+// Remix the default button sizing classes into new ones for easier manipulation.
+
+.btn-group-xs > .btn { .btn-xs(); }
+.btn-group-sm > .btn { .btn-sm(); }
+.btn-group-lg > .btn { .btn-lg(); }
+
+
+// Split button dropdowns
+// ----------------------
+
+// Give the line between buttons some depth
+.btn-group > .btn + .dropdown-toggle {
+  padding-left: 8px;
+  padding-right: 8px;
+}
+.btn-group > .btn-lg + .dropdown-toggle {
+  padding-left: 12px;
+  padding-right: 12px;
+}
+
+// The clickable button for toggling the menu
+// Remove the gradient and set the same inset shadow as the :active state
+.btn-group.open .dropdown-toggle {
+  .box-shadow(inset 0 3px 5px rgba(0,0,0,.125));
+}
+
+
+// Reposition the caret
+.btn .caret {
+  margin-left: 0;
+}
+// Carets in other button sizes
+.btn-lg .caret {
+  border-width: @caret-width-large @caret-width-large 0;
+  border-bottom-width: 0;
+}
+// Upside down carets for .dropup
+.dropup .btn-lg .caret {
+  border-width: 0 @caret-width-large @caret-width-large;
+}
+
+
+// Vertical button groups
+// ----------------------
+
+.btn-group-vertical {
+  > .btn,
+  > .btn-group {
+    display: block;
+    float: none;
+    width: 100%;
+    max-width: 100%;
+  }
+
+  // Clear floats so dropdown menus can be properly placed
+  > .btn-group {
+    .clearfix();
+    > .btn {
+      float: none;
+    }
+  }
+
+  > .btn + .btn,
+  > .btn + .btn-group,
+  > .btn-group + .btn,
+  > .btn-group + .btn-group {
+    margin-top: -1px;
+    margin-left: 0;
+  }
+}
+
+.btn-group-vertical > .btn {
+  &:not(:first-child):not(:last-child) {
+    border-radius: 0;
+  }
+  &:first-child:not(:last-child) {
+    border-top-right-radius: @border-radius-base;
+    .border-bottom-radius(0);
+  }
+  &:last-child:not(:first-child) {
+    border-bottom-left-radius: @border-radius-base;
+    .border-top-radius(0);
+  }
+}
+.btn-group-vertical > .btn-group:not(:first-child):not(:last-child) > .btn {
+  border-radius: 0;
+}
+.btn-group-vertical > .btn-group:first-child {
+  > .btn:last-child,
+  > .dropdown-toggle {
+    .border-bottom-radius(0);
+  }
+}
+.btn-group-vertical > .btn-group:last-child > .btn:first-child {
+  .border-top-radius(0);
+}
+
+
+
+// Justified button groups
+// ----------------------
+
+.btn-group-justified {
+  display: table;
+  width: 100%;
+  table-layout: fixed;
+  border-collapse: separate;
+  .btn {
+    float: none;
+    display: table-cell;
+    width: 1%;
+  }
+}
+
+
+// Checkbox and radio options
+[data-toggle="buttons"] > .btn > input[type="radio"],
+[data-toggle="buttons"] > .btn > input[type="checkbox"] {
+  display: none;
+}
diff --git a/client/galaxy/style/less/bootstrap/buttons.less b/client/galaxy/style/less/bootstrap/buttons.less
new file mode 100644
index 0000000..5d60d39
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/buttons.less
@@ -0,0 +1,160 @@
+//
+// Buttons
+// --------------------------------------------------
+
+
+// Base styles
+// --------------------------------------------------
+
+// Core styles
+.btn {
+  display: inline-block;
+  padding: @padding-base-vertical @padding-base-horizontal;
+  margin-bottom: 0; // For input.btn
+  font-size: @font-size-base;
+  font-weight: @btn-font-weight;
+  line-height: @line-height-base;
+  text-align: center;
+  vertical-align: middle;
+  cursor: pointer;
+  border: 1px solid transparent;
+  border-radius: @border-radius-base;
+  white-space: nowrap;
+  .user-select(none);
+
+  &:focus {
+    .tab-focus();
+  }
+
+  &:hover,
+  &:focus {
+    color: @btn-default-color;
+    text-decoration: none;
+  }
+
+  &:active,
+  &.active {
+    outline: 0;
+    background-image: none;
+    .box-shadow(inset 0 3px 5px rgba(0,0,0,.125));
+  }
+
+  &.disabled,
+  &[disabled],
+  fieldset[disabled] & {
+    cursor: not-allowed;
+    pointer-events: none; // Future-proof disabling of clicks
+    .opacity(.65);
+    .box-shadow(none);
+  }
+
+}
+
+
+// Alternate buttons
+// --------------------------------------------------
+
+.btn-default {
+  .button-variant(@btn-default-color; @btn-default-bg; @btn-default-border);
+}
+.btn-primary {
+  .button-variant(@btn-primary-color; @btn-primary-bg; @btn-primary-border);
+}
+// Warning appears as orange
+.btn-warning {
+  .button-variant(@btn-warning-color; @btn-warning-bg; @btn-warning-border);
+}
+// Danger and error appear as red
+.btn-danger {
+  .button-variant(@btn-danger-color; @btn-danger-bg; @btn-danger-border);
+}
+// Success appears as green
+.btn-success {
+  .button-variant(@btn-success-color; @btn-success-bg; @btn-success-border);
+}
+// Info appears as blue-green
+.btn-info {
+  .button-variant(@btn-info-color; @btn-info-bg; @btn-info-border);
+}
+
+
+// Link buttons
+// -------------------------
+
+// Make a button look and behave like a link
+.btn-link {
+  color: @link-color;
+  font-weight: normal;
+  cursor: pointer;
+  border-radius: 0;
+
+  &,
+  &:active,
+  &[disabled],
+  fieldset[disabled] & {
+    background-color: transparent;
+    .box-shadow(none);
+  }
+  &,
+  &:hover,
+  &:focus,
+  &:active {
+    border-color: transparent;
+  }
+  &:hover,
+  &:focus {
+    color: @link-hover-color;
+    text-decoration: underline;
+    background-color: transparent;
+  }
+  &[disabled],
+  fieldset[disabled] & {
+    &:hover,
+    &:focus {
+      color: @btn-link-disabled-color;
+      text-decoration: none;
+    }
+  }
+}
+
+
+// Button Sizes
+// --------------------------------------------------
+
+.btn-lg {
+  // line-height: ensure even-numbered height of button next to large input
+  .button-size(@padding-large-vertical; @padding-large-horizontal; @font-size-large; @line-height-large; @border-radius-large);
+}
+.btn-sm,
+.btn-xs {
+  // line-height: ensure proper height of button next to small input
+  .button-size(@padding-small-vertical; @padding-small-horizontal; @font-size-small; @line-height-small; @border-radius-small);
+}
+.btn-xs {
+  padding: 1px 5px;
+}
+
+
+// Block button
+// --------------------------------------------------
+
+.btn-block {
+  display: block;
+  width: 100%;
+  padding-left: 0;
+  padding-right: 0;
+}
+
+// Vertically space out multiple block buttons
+.btn-block + .btn-block {
+  margin-top: 5px;
+}
+
+// Specificity overrides
+input[type="submit"],
+input[type="reset"],
+input[type="button"] {
+  &.btn-block {
+    width: 100%;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/carousel.less b/client/galaxy/style/less/bootstrap/carousel.less
new file mode 100644
index 0000000..de5e4c9
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/carousel.less
@@ -0,0 +1,209 @@
+//
+// Carousel
+// --------------------------------------------------
+
+
+// Wrapper for the slide container and indicators
+.carousel {
+  position: relative;
+}
+
+.carousel-inner {
+  position: relative;
+  overflow: hidden;
+  width: 100%;
+
+  > .item {
+    display: none;
+    position: relative;
+    .transition(.6s ease-in-out left);
+
+    // Account for jankitude on images
+    > img,
+    > a > img {
+      .img-responsive();
+      line-height: 1;
+    }
+  }
+
+  > .active,
+  > .next,
+  > .prev { display: block; }
+
+  > .active {
+    left: 0;
+  }
+
+  > .next,
+  > .prev {
+    position: absolute;
+    top: 0;
+    width: 100%;
+  }
+
+  > .next {
+    left: 100%;
+  }
+  > .prev {
+    left: -100%;
+  }
+  > .next.left,
+  > .prev.right {
+    left: 0;
+  }
+
+  > .active.left {
+    left: -100%;
+  }
+  > .active.right {
+    left: 100%;
+  }
+
+}
+
+// Left/right controls for nav
+// ---------------------------
+
+.carousel-control {
+  position: absolute;
+  top: 0;
+  left: 0;
+  bottom: 0;
+  width: @carousel-control-width;
+  .opacity(@carousel-control-opacity);
+  font-size: @carousel-control-font-size;
+  color: @carousel-control-color;
+  text-align: center;
+  text-shadow: @carousel-text-shadow;
+  // We can't have this transition here because webkit cancels the carousel
+  // animation if you trip this while in the middle of another animation.
+
+  // Set gradients for backgrounds
+  &.left {
+    #gradient > .horizontal(@start-color: rgba(0,0,0,.5); @end-color: rgba(0,0,0,.0001));
+  }
+  &.right {
+    left: auto;
+    right: 0;
+    #gradient > .horizontal(@start-color: rgba(0,0,0,.0001); @end-color: rgba(0,0,0,.5));
+  }
+
+  // Hover/focus state
+  &:hover,
+  &:focus {
+    color: @carousel-control-color;
+    text-decoration: none;
+    .opacity(.9);
+  }
+
+  // Toggles
+  .icon-prev,
+  .icon-next,
+  .glyphicon-chevron-left,
+  .glyphicon-chevron-right {
+    position: absolute;
+    top: 50%;
+    left: 50%;
+    z-index: 5;
+    display: inline-block;
+  }
+  .icon-prev,
+  .icon-next {
+    width:  20px;
+    height: 20px;
+    margin-top: -10px;
+    margin-left: -10px;
+    font-family: serif;
+  }
+
+  .icon-prev {
+    &:before {
+      content: '\2039';// SINGLE LEFT-POINTING ANGLE QUOTATION MARK (U+2039)
+    }
+  }
+  .icon-next {
+    &:before {
+      content: '\203a';// SINGLE RIGHT-POINTING ANGLE QUOTATION MARK (U+203A)
+    }
+  }
+}
+
+// Optional indicator pips
+//
+// Add an unordered list with the following class and add a list item for each
+// slide your carousel holds.
+
+.carousel-indicators {
+  position: absolute;
+  bottom: 10px;
+  left: 50%;
+  z-index: 15;
+  width: 60%;
+  margin-left: -30%;
+  padding-left: 0;
+  list-style: none;
+  text-align: center;
+
+  li {
+    display: inline-block;
+    width:  10px;
+    height: 10px;
+    margin: 1px;
+    text-indent: -999px;
+    border: 1px solid @carousel-indicator-border-color;
+    border-radius: 10px;
+    cursor: pointer;
+  }
+  .active {
+    margin: 0;
+    width:  12px;
+    height: 12px;
+    background-color: @carousel-indicator-active-bg;
+  }
+}
+
+// Optional captions
+// -----------------------------
+// Hidden by default for smaller viewports
+.carousel-caption {
+  position: absolute;
+  left: 15%;
+  right: 15%;
+  bottom: 20px;
+  z-index: 10;
+  padding-top: 20px;
+  padding-bottom: 20px;
+  color: @carousel-caption-color;
+  text-align: center;
+  text-shadow: @carousel-text-shadow;
+  & .btn {
+    text-shadow: none; // No shadow for button elements in carousel-caption
+  }
+}
+
+
+// Scale up controls for tablets and up
+ at media screen and (min-width: @screen-sm) {
+
+  // Scale up the controls a smidge
+  .carousel-control .icon-prev,
+  .carousel-control .icon-next {
+    width: 30px;
+    height: 30px;
+    margin-top: -15px;
+    margin-left: -15px;
+    font-size: 30px;
+  }
+
+  // Show and left align the captions
+  .carousel-caption {
+    left: 20%;
+    right: 20%;
+    padding-bottom: 30px;
+  }
+
+  // Move up the indicators
+  .carousel-indicators {
+    bottom: 20px;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/close.less b/client/galaxy/style/less/bootstrap/close.less
new file mode 100644
index 0000000..9b4e74f
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/close.less
@@ -0,0 +1,33 @@
+//
+// Close icons
+// --------------------------------------------------
+
+
+.close {
+  float: right;
+  font-size: (@font-size-base * 1.5);
+  font-weight: @close-font-weight;
+  line-height: 1;
+  color: @close-color;
+  text-shadow: @close-text-shadow;
+  .opacity(.2);
+
+  &:hover,
+  &:focus {
+    color: @close-color;
+    text-decoration: none;
+    cursor: pointer;
+    .opacity(.5);
+  }
+
+  // Additional properties for button version
+  // iOS requires the button element instead of an anchor tag.
+  // If you want the anchor version, it requires `href="#"`.
+  button& {
+    padding: 0;
+    cursor: pointer;
+    background: transparent;
+    border: 0;
+    -webkit-appearance: none;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/code.less b/client/galaxy/style/less/bootstrap/code.less
new file mode 100644
index 0000000..d6661d2
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/code.less
@@ -0,0 +1,56 @@
+//
+// Code (inline and blocK)
+// --------------------------------------------------
+
+
+// Inline and block code styles
+code,
+pre {
+  font-family: @font-family-monospace;
+}
+
+// Inline code
+code {
+  padding: 2px 4px;
+  font-size: 90%;
+  color: @code-color;
+  background-color: @code-bg;
+  white-space: nowrap;
+  border-radius: @border-radius-base;
+}
+
+// Blocks of code
+pre {
+  display: block;
+  padding: ((@line-height-computed - 1) / 2);
+  margin: 0 0 (@line-height-computed / 2);
+  font-size: (@font-size-base - 1); // 14px to 13px
+  line-height: @line-height-base;
+  word-break: break-all;
+  word-wrap: break-word;
+  color: @pre-color;
+  background-color: @pre-bg;
+  border: 1px solid @pre-border-color;
+  border-radius: @border-radius-base;
+
+  // Make prettyprint styles more spaced out for readability
+  &.prettyprint {
+    margin-bottom: @line-height-computed;
+  }
+
+  // Account for some code outputs that place code tags in pre tags
+  code {
+    padding: 0;
+    font-size: inherit;
+    color: inherit;
+    white-space: pre-wrap;
+    background-color: transparent;
+    border: 0;
+  }
+}
+
+// Enable scrollable blocks of code
+.pre-scrollable {
+  max-height: @pre-scrollable-max-height;
+  overflow-y: scroll;
+}
diff --git a/client/galaxy/style/less/bootstrap/component-animations.less b/client/galaxy/style/less/bootstrap/component-animations.less
new file mode 100644
index 0000000..1efe45e
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/component-animations.less
@@ -0,0 +1,29 @@
+//
+// Component animations
+// --------------------------------------------------
+
+// Heads up!
+//
+// We don't use the `.opacity()` mixin here since it causes a bug with text
+// fields in IE7-8. Source: https://github.com/twitter/bootstrap/pull/3552.
+
+.fade {
+  opacity: 0;
+  .transition(opacity .15s linear);
+  &.in {
+    opacity: 1;
+  }
+}
+
+.collapse {
+  display: none;
+  &.in {
+    display: block;
+  }
+}
+.collapsing {
+  position: relative;
+  height: 0;
+  overflow: hidden;
+  .transition(height .35s ease);
+}
diff --git a/client/galaxy/style/less/bootstrap/dropdowns.less b/client/galaxy/style/less/bootstrap/dropdowns.less
new file mode 100644
index 0000000..96af127
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/dropdowns.less
@@ -0,0 +1,193 @@
+//
+// Dropdown menus
+// --------------------------------------------------
+
+
+// Dropdown arrow/caret
+.caret {
+  display: inline-block;
+  width: 0;
+  height: 0;
+  margin-left: 2px;
+  vertical-align: middle;
+  border-top:   @caret-width-base solid @dropdown-caret-color;
+  border-right: @caret-width-base solid transparent;
+  border-left:  @caret-width-base solid transparent;
+  // Firefox fix for https://github.com/twbs/bootstrap/issues/9538. Once fixed,
+  // we can just straight up remove this.
+  border-bottom: 0 dotted;
+  content: "";
+}
+
+// The dropdown wrapper (div)
+.dropdown {
+  position: relative;
+}
+
+// Prevent the focus on the dropdown toggle when closing dropdowns
+.dropdown-toggle:focus {
+  outline: 0;
+}
+
+// The dropdown menu (ul)
+.dropdown-menu {
+  position: absolute;
+  top: 100%;
+  left: 0;
+  z-index: @zindex-dropdown;
+  display: none; // none by default, but block on "open" of the menu
+  float: left;
+  min-width: 160px;
+  padding: 5px 0;
+  margin: 2px 0 0; // override default ul
+  list-style: none;
+  font-size: @font-size-base;
+  background-color: @dropdown-bg;
+  border: 1px solid @dropdown-fallback-border; // IE8 fallback
+  border: 1px solid @dropdown-border;
+  border-radius: @border-radius-base;
+  .box-shadow(0 6px 12px rgba(0,0,0,.175));
+  background-clip: padding-box;
+
+  // Aligns the dropdown menu to right
+  &.pull-right {
+    right: 0;
+    left: auto;
+  }
+
+  // Dividers (basically an hr) within the dropdown
+  .divider {
+    .nav-divider(@dropdown-divider-bg);
+  }
+
+  // Links within the dropdown menu
+  > li > a {
+    display: block;
+    padding: 3px 20px;
+    clear: both;
+    font-weight: normal;
+    line-height: @line-height-base;
+    color: @dropdown-link-color;
+    white-space: nowrap; // prevent links from randomly breaking onto new lines
+  }
+}
+
+// Hover/Focus state
+.dropdown-menu > li > a {
+  &:hover,
+  &:focus {
+    text-decoration: none;
+    color: @dropdown-link-hover-color;
+    background-color: @dropdown-link-hover-bg;
+  }
+}
+
+// Active state
+.dropdown-menu > .active > a {
+  &,
+  &:hover,
+  &:focus {
+    color: @dropdown-link-active-color;
+    text-decoration: none;
+    outline: 0;
+    background-color: @dropdown-link-active-bg;
+  }
+}
+
+// Disabled state
+//
+// Gray out text and ensure the hover/focus state remains gray
+
+.dropdown-menu > .disabled > a {
+  &,
+  &:hover,
+  &:focus {
+    color: @dropdown-link-disabled-color;
+  }
+}
+// Nuke hover/focus effects
+.dropdown-menu > .disabled > a {
+  &:hover,
+  &:focus {
+    text-decoration: none;
+    background-color: transparent;
+    background-image: none; // Remove CSS gradient
+    .reset-filter();
+    cursor: not-allowed;
+  }
+}
+
+// Open state for the dropdown
+.open {
+  // Show the menu
+  > .dropdown-menu {
+    display: block;
+  }
+
+  // Remove the outline when :focus is triggered
+  > a {
+    outline: 0;
+  }
+}
+
+// Dropdown section headers
+.dropdown-header {
+  display: block;
+  padding: 3px 20px;
+  font-size: @font-size-small;
+  line-height: @line-height-base;
+  color: @dropdown-header-color;
+}
+
+// Backdrop to catch body clicks on mobile, etc.
+.dropdown-backdrop {
+  position: fixed;
+  left: 0;
+  right: 0;
+  bottom: 0;
+  top: 0;
+  z-index: @zindex-dropdown - 10;
+}
+
+// Right aligned dropdowns
+.pull-right > .dropdown-menu {
+  right: 0;
+  left: auto;
+}
+
+// Allow for dropdowns to go bottom up (aka, dropup-menu)
+//
+// Just add .dropup after the standard .dropdown class and you're set, bro.
+// TODO: abstract this so that the navbar fixed styles are not placed here?
+
+.dropup,
+.navbar-fixed-bottom .dropdown {
+  // Reverse the caret
+  .caret {
+    // Firefox fix for https://github.com/twbs/bootstrap/issues/9538. Once this
+    // gets fixed, restore `border-top: 0;`.
+    border-top: 0 dotted;
+    border-bottom: 4px solid @dropdown-caret-color;
+    content: "";
+  }
+  // Different positioning for bottom up menu
+  .dropdown-menu {
+    top: auto;
+    bottom: 100%;
+    margin-bottom: 1px;
+  }
+}
+
+
+// Component alignment
+//
+// Reiterate per navbar.less and the modified component alignment there.
+
+ at media (min-width: @grid-float-breakpoint) {
+  .navbar-right {
+    .dropdown-menu {
+      .pull-right > .dropdown-menu();
+    }
+  }
+}
+
diff --git a/client/galaxy/style/less/bootstrap/forms.less b/client/galaxy/style/less/bootstrap/forms.less
new file mode 100644
index 0000000..8966198
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/forms.less
@@ -0,0 +1,362 @@
+//
+// Forms
+// --------------------------------------------------
+
+
+// Normalize non-controls
+//
+// Restyle and baseline non-control form elements.
+
+fieldset {
+  padding: 0;
+  margin: 0;
+  border: 0;
+}
+
+legend {
+  display: block;
+  width: 100%;
+  padding: 0;
+  margin-bottom: @line-height-computed;
+  font-size: (@font-size-base * 1.5);
+  line-height: inherit;
+  color: @legend-color;
+  border: 0;
+  border-bottom: 1px solid @legend-border-color;
+}
+
+label {
+  display: inline-block;
+  margin-bottom: 5px;
+  font-weight: bold;
+}
+
+
+// Normalize form controls
+
+// Override content-box in Normalize (* isn't specific enough)
+input[type="search"] {
+  .box-sizing(border-box);
+}
+
+// Position radios and checkboxes better
+input[type="radio"],
+input[type="checkbox"] {
+  margin: 4px 0 0;
+  margin-top: 1px \9; /* IE8-9 */
+  line-height: normal;
+}
+
+// Set the height of select and file controls to match text inputs
+input[type="file"] {
+  display: block;
+}
+
+// Make multiple select elements height not fixed
+select[multiple],
+select[size] {
+  height: auto;
+}
+
+// Fix optgroup Firefox bug per https://github.com/twbs/bootstrap/issues/7611
+select optgroup {
+  font-size: inherit;
+  font-style: inherit;
+  font-family: inherit;
+}
+
+// Focus for select, file, radio, and checkbox
+input[type="file"]:focus,
+input[type="radio"]:focus,
+input[type="checkbox"]:focus {
+  .tab-focus();
+}
+
+// Fix for Chrome number input
+// Setting certain font-sizes causes the `I` bar to appear on hover of the bottom increment button.
+// See https://github.com/twbs/bootstrap/issues/8350 for more.
+input[type="number"] {
+  &::-webkit-outer-spin-button,
+  &::-webkit-inner-spin-button {
+    height: auto;
+  }
+}
+
+// Adjust output element
+output {
+  display: block;
+  padding-top: (@padding-base-vertical + 1);
+  font-size: @font-size-base;
+  line-height: @line-height-base;
+  color: @input-color;
+  vertical-align: middle;
+}
+
+// Placeholder
+//
+// Placeholder text gets special styles because when browsers invalidate entire
+// lines if it doesn't understand a selector/
+.form-control {
+  .placeholder();
+}
+
+
+// Common form controls
+//
+// Shared size and type resets for form controls. Apply `.form-control` to any
+// of the following form controls:
+//
+// select
+// textarea
+// input[type="text"]
+// input[type="password"]
+// input[type="datetime"]
+// input[type="datetime-local"]
+// input[type="date"]
+// input[type="month"]
+// input[type="time"]
+// input[type="week"]
+// input[type="number"]
+// input[type="email"]
+// input[type="url"]
+// input[type="search"]
+// input[type="tel"]
+// input[type="color"]
+
+.form-control {
+  display: block;
+  width: 100%;
+  height: @input-height-base; // Make inputs at least the height of their button counterpart (base line-height + padding + border)
+  padding: @padding-base-vertical @padding-base-horizontal;
+  font-size: @font-size-base;
+  line-height: @line-height-base;
+  color: @input-color;
+  vertical-align: middle;
+  background-color: @input-bg;
+  border: 1px solid @input-border;
+  border-radius: @input-border-radius;
+  .box-shadow(inset 0 1px 1px rgba(0,0,0,.075));
+  .transition(~"border-color ease-in-out .15s, box-shadow ease-in-out .15s");
+
+  // Customize the `:focus` state to imitate native WebKit styles.
+  .form-control-focus();
+
+  // Disabled and read-only inputs
+  // Note: HTML5 says that controls under a fieldset > legend:first-child won't
+  // be disabled if the fieldset is disabled. Due to implementation difficulty,
+  // we don't honor that edge case; we style them as disabled anyway.
+  &[disabled],
+  &[readonly],
+  fieldset[disabled] & {
+    cursor: not-allowed;
+    background-color: @input-bg-disabled;
+  }
+
+  // Reset height for `textarea`s
+  textarea& {
+    height: auto;
+  }
+}
+
+
+// Form groups
+//
+// Designed to help with the organization and spacing of vertical forms. For
+// horizontal forms, use the predefined grid classes.
+
+.form-group {
+  margin-bottom: 15px;
+}
+
+
+// Checkboxes and radios
+//
+// Indent the labels to position radios/checkboxes as hanging controls.
+
+.radio,
+.checkbox {
+  display: block;
+  min-height: @line-height-computed; // clear the floating input if there is no label text
+  margin-top: 10px;
+  margin-bottom: 10px;
+  padding-left: 20px;
+  vertical-align: middle;
+  label {
+    display: inline;
+    margin-bottom: 0;
+    font-weight: normal;
+    cursor: pointer;
+  }
+}
+.radio input[type="radio"],
+.radio-inline input[type="radio"],
+.checkbox input[type="checkbox"],
+.checkbox-inline input[type="checkbox"] {
+  float: left;
+  margin-left: -20px;
+}
+.radio + .radio,
+.checkbox + .checkbox {
+  margin-top: -5px; // Move up sibling radios or checkboxes for tighter spacing
+}
+
+// Radios and checkboxes on same line
+.radio-inline,
+.checkbox-inline {
+  display: inline-block;
+  padding-left: 20px;
+  margin-bottom: 0;
+  vertical-align: middle;
+  font-weight: normal;
+  cursor: pointer;
+}
+.radio-inline + .radio-inline,
+.checkbox-inline + .checkbox-inline {
+  margin-top: 0;
+  margin-left: 10px; // space out consecutive inline controls
+}
+
+// Apply same disabled cursor tweak as for inputs
+//
+// Note: Neither radios nor checkboxes can be readonly.
+input[type="radio"],
+input[type="checkbox"],
+.radio,
+.radio-inline,
+.checkbox,
+.checkbox-inline {
+  &[disabled],
+  fieldset[disabled] & {
+    cursor: not-allowed;
+  }
+}
+
+// Form control sizing
+.input-sm {
+  .input-size(@input-height-small; @padding-small-vertical; @padding-small-horizontal; @font-size-small; @line-height-small; @border-radius-small);
+}
+
+.input-lg {
+  .input-size(@input-height-large; @padding-large-vertical; @padding-large-horizontal; @font-size-large; @line-height-large; @border-radius-large);
+}
+
+
+// Form control feedback states
+//
+// Apply contextual and semantic states to individual form controls.
+
+// Warning
+.has-warning {
+  .form-control-validation(@state-warning-text; @state-warning-text; @state-warning-bg);
+}
+// Error
+.has-error {
+  .form-control-validation(@state-danger-text; @state-danger-text; @state-danger-bg);
+}
+// Success
+.has-success {
+  .form-control-validation(@state-success-text; @state-success-text; @state-success-bg);
+}
+
+
+// Static form control text
+//
+// Apply class to a `p` element to make any string of text align with labels in
+// a horizontal form layout.
+
+.form-control-static {
+  margin-bottom: 0; // Remove default margin from `p`
+  padding-top: (@padding-base-vertical + 1);
+}
+
+
+// Help text
+//
+// Apply to any element you wish to create light text for placement immediately
+// below a form control. Use for general help, formatting, or instructional text.
+
+.help-block {
+  display: block; // account for any element using help-block
+  margin-top: 5px;
+  margin-bottom: 10px;
+  color: lighten(@text-color, 25%); // lighten the text some for contrast
+}
+
+
+
+// Inline forms
+//
+// Make forms appear inline(-block) by adding the `.form-inline` class. Inline
+// forms begin stacked on extra small (mobile) devices and then go inline when
+// viewports reach <768px.
+//
+// Requires wrapping inputs and labels with `.form-group` for proper display of
+// default HTML form controls and our custom form controls (e.g., input groups).
+//
+// Heads up! This is mixin-ed into `.navbar-form` in navbars.less.
+
+.form-inline {
+
+  // Kick in the inline
+  @media (min-width: @screen-sm) {
+    // Inline-block all the things for "inline"
+    .form-group  {
+      display: inline-block;
+      margin-bottom: 0;
+      vertical-align: middle;
+    }
+
+    // In navbar-form, allow folks to *not* use `.form-group`
+    .form-control {
+      display: inline-block;
+    }
+
+    // Remove default margin on radios/checkboxes that were used for stacking, and
+    // then undo the floating of radios and checkboxes to match (which also avoids
+    // a bug in WebKit: https://github.com/twbs/bootstrap/issues/1969).
+    .radio,
+    .checkbox {
+      display: inline-block;
+      margin-top: 0;
+      margin-bottom: 0;
+      padding-left: 0;
+    }
+    .radio input[type="radio"],
+    .checkbox input[type="checkbox"] {
+      float: none;
+      margin-left: 0;
+    }
+  }
+}
+
+
+// Horizontal forms
+//
+// Horizontal forms are built on grid classes and allow you to create forms with
+// labels on the left and inputs on the right.
+
+.form-horizontal {
+
+  // Consistent vertical alignment of labels, radios, and checkboxes
+  .control-label,
+  .radio,
+  .checkbox,
+  .radio-inline,
+  .checkbox-inline {
+    margin-top: 0;
+    margin-bottom: 0;
+    padding-top: (@padding-base-vertical + 1); // Default padding plus a border
+  }
+
+  // Make form groups behave like rows
+  .form-group {
+    .make-row();
+  }
+
+  // Only right align form labels here when the columns stop stacking
+  @media (min-width: @screen-sm) {
+    .control-label {
+      text-align: right;
+    }
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/glyphicons.less b/client/galaxy/style/less/bootstrap/glyphicons.less
new file mode 100644
index 0000000..7b66e6b
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/glyphicons.less
@@ -0,0 +1,236 @@
+//
+// Glyphicons for Bootstrap
+//
+// Since icons are fonts, they can be placed anywhere text is placed and are
+// thus automatically sized to match the surrounding child. To use, create an
+// inline element with the appropriate classes, like so:
+//
+// <a href="#"><span class="glyphicon glyphicon-star"></span> Star</a>
+
+// Import the fonts
+ at font-face {
+  font-family: 'Glyphicons Halflings';
+  src: url('@{icon-font-path}@{icon-font-name}.eot');
+  src: url('@{icon-font-path}@{icon-font-name}.eot?#iefix') format('embedded-opentype'),
+       url('@{icon-font-path}@{icon-font-name}.woff') format('woff'),
+       url('@{icon-font-path}@{icon-font-name}.ttf') format('truetype'),
+       url('@{icon-font-path}@{icon-font-name}.svg#glyphicons-halflingsregular') format('svg');
+}
+
+// Catchall baseclass
+.glyphicon {
+  position: relative;
+  top: 1px;
+  display: inline-block;
+  font-family: 'Glyphicons Halflings';
+  font-style: normal;
+  font-weight: normal;
+  line-height: 1;
+  -webkit-font-smoothing: antialiased;
+
+  &:empty{
+    width: 1em;
+  }
+}
+
+// Individual icons
+.glyphicon-asterisk               { &:before { content: "\2a"; } }
+.glyphicon-plus                   { &:before { content: "\2b"; } }
+.glyphicon-euro                   { &:before { content: "\20ac"; } }
+.glyphicon-minus                  { &:before { content: "\2212"; } }
+.glyphicon-cloud                  { &:before { content: "\2601"; } }
+.glyphicon-envelope               { &:before { content: "\2709"; } }
+.glyphicon-pencil                 { &:before { content: "\270f"; } }
+.glyphicon-glass                  { &:before { content: "\e001"; } }
+.glyphicon-music                  { &:before { content: "\e002"; } }
+.glyphicon-search                 { &:before { content: "\e003"; } }
+.glyphicon-heart                  { &:before { content: "\e005"; } }
+.glyphicon-star                   { &:before { content: "\e006"; } }
+.glyphicon-star-empty             { &:before { content: "\e007"; } }
+.glyphicon-user                   { &:before { content: "\e008"; } }
+.glyphicon-film                   { &:before { content: "\e009"; } }
+.glyphicon-th-large               { &:before { content: "\e010"; } }
+.glyphicon-th                     { &:before { content: "\e011"; } }
+.glyphicon-th-list                { &:before { content: "\e012"; } }
+.glyphicon-ok                     { &:before { content: "\e013"; } }
+.glyphicon-remove                 { &:before { content: "\e014"; } }
+.glyphicon-zoom-in                { &:before { content: "\e015"; } }
+.glyphicon-zoom-out               { &:before { content: "\e016"; } }
+.glyphicon-off                    { &:before { content: "\e017"; } }
+.glyphicon-signal                 { &:before { content: "\e018"; } }
+.glyphicon-cog                    { &:before { content: "\e019"; } }
+.glyphicon-trash                  { &:before { content: "\e020"; } }
+.glyphicon-home                   { &:before { content: "\e021"; } }
+.glyphicon-file                   { &:before { content: "\e022"; } }
+.glyphicon-time                   { &:before { content: "\e023"; } }
+.glyphicon-road                   { &:before { content: "\e024"; } }
+.glyphicon-download-alt           { &:before { content: "\e025"; } }
+.glyphicon-download               { &:before { content: "\e026"; } }
+.glyphicon-upload                 { &:before { content: "\e027"; } }
+.glyphicon-inbox                  { &:before { content: "\e028"; } }
+.glyphicon-play-circle            { &:before { content: "\e029"; } }
+.glyphicon-repeat                 { &:before { content: "\e030"; } }
+.glyphicon-refresh                { &:before { content: "\e031"; } }
+.glyphicon-list-alt               { &:before { content: "\e032"; } }
+.glyphicon-flag                   { &:before { content: "\e034"; } }
+.glyphicon-headphones             { &:before { content: "\e035"; } }
+.glyphicon-volume-off             { &:before { content: "\e036"; } }
+.glyphicon-volume-down            { &:before { content: "\e037"; } }
+.glyphicon-volume-up              { &:before { content: "\e038"; } }
+.glyphicon-qrcode                 { &:before { content: "\e039"; } }
+.glyphicon-barcode                { &:before { content: "\e040"; } }
+.glyphicon-tag                    { &:before { content: "\e041"; } }
+.glyphicon-tags                   { &:before { content: "\e042"; } }
+.glyphicon-book                   { &:before { content: "\e043"; } }
+.glyphicon-print                  { &:before { content: "\e045"; } }
+.glyphicon-font                   { &:before { content: "\e047"; } }
+.glyphicon-bold                   { &:before { content: "\e048"; } }
+.glyphicon-italic                 { &:before { content: "\e049"; } }
+.glyphicon-text-height            { &:before { content: "\e050"; } }
+.glyphicon-text-width             { &:before { content: "\e051"; } }
+.glyphicon-align-left             { &:before { content: "\e052"; } }
+.glyphicon-align-center           { &:before { content: "\e053"; } }
+.glyphicon-align-right            { &:before { content: "\e054"; } }
+.glyphicon-align-justify          { &:before { content: "\e055"; } }
+.glyphicon-list                   { &:before { content: "\e056"; } }
+.glyphicon-indent-left            { &:before { content: "\e057"; } }
+.glyphicon-indent-right           { &:before { content: "\e058"; } }
+.glyphicon-facetime-video         { &:before { content: "\e059"; } }
+.glyphicon-picture                { &:before { content: "\e060"; } }
+.glyphicon-map-marker             { &:before { content: "\e062"; } }
+.glyphicon-adjust                 { &:before { content: "\e063"; } }
+.glyphicon-tint                   { &:before { content: "\e064"; } }
+.glyphicon-edit                   { &:before { content: "\e065"; } }
+.glyphicon-share                  { &:before { content: "\e066"; } }
+.glyphicon-check                  { &:before { content: "\e067"; } }
+.glyphicon-move                   { &:before { content: "\e068"; } }
+.glyphicon-step-backward          { &:before { content: "\e069"; } }
+.glyphicon-fast-backward          { &:before { content: "\e070"; } }
+.glyphicon-backward               { &:before { content: "\e071"; } }
+.glyphicon-play                   { &:before { content: "\e072"; } }
+.glyphicon-pause                  { &:before { content: "\e073"; } }
+.glyphicon-stop                   { &:before { content: "\e074"; } }
+.glyphicon-forward                { &:before { content: "\e075"; } }
+.glyphicon-fast-forward           { &:before { content: "\e076"; } }
+.glyphicon-step-forward           { &:before { content: "\e077"; } }
+.glyphicon-eject                  { &:before { content: "\e078"; } }
+.glyphicon-chevron-left           { &:before { content: "\e079"; } }
+.glyphicon-chevron-right          { &:before { content: "\e080"; } }
+.glyphicon-plus-sign              { &:before { content: "\e081"; } }
+.glyphicon-minus-sign             { &:before { content: "\e082"; } }
+.glyphicon-remove-sign            { &:before { content: "\e083"; } }
+.glyphicon-ok-sign                { &:before { content: "\e084"; } }
+.glyphicon-question-sign          { &:before { content: "\e085"; } }
+.glyphicon-info-sign              { &:before { content: "\e086"; } }
+.glyphicon-screenshot             { &:before { content: "\e087"; } }
+.glyphicon-remove-circle          { &:before { content: "\e088"; } }
+.glyphicon-ok-circle              { &:before { content: "\e089"; } }
+.glyphicon-ban-circle             { &:before { content: "\e090"; } }
+.glyphicon-arrow-left             { &:before { content: "\e091"; } }
+.glyphicon-arrow-right            { &:before { content: "\e092"; } }
+.glyphicon-arrow-up               { &:before { content: "\e093"; } }
+.glyphicon-arrow-down             { &:before { content: "\e094"; } }
+.glyphicon-share-alt              { &:before { content: "\e095"; } }
+.glyphicon-resize-full            { &:before { content: "\e096"; } }
+.glyphicon-resize-small           { &:before { content: "\e097"; } }
+.glyphicon-exclamation-sign       { &:before { content: "\e101"; } }
+.glyphicon-gift                   { &:before { content: "\e102"; } }
+.glyphicon-leaf                   { &:before { content: "\e103"; } }
+.glyphicon-eye-open               { &:before { content: "\e105"; } }
+.glyphicon-eye-close              { &:before { content: "\e106"; } }
+.glyphicon-warning-sign           { &:before { content: "\e107"; } }
+.glyphicon-plane                  { &:before { content: "\e108"; } }
+.glyphicon-random                 { &:before { content: "\e110"; } }
+.glyphicon-comment                { &:before { content: "\e111"; } }
+.glyphicon-magnet                 { &:before { content: "\e112"; } }
+.glyphicon-chevron-up             { &:before { content: "\e113"; } }
+.glyphicon-chevron-down           { &:before { content: "\e114"; } }
+.glyphicon-retweet                { &:before { content: "\e115"; } }
+.glyphicon-shopping-cart          { &:before { content: "\e116"; } }
+.glyphicon-folder-close           { &:before { content: "\e117"; } }
+.glyphicon-folder-open            { &:before { content: "\e118"; } }
+.glyphicon-resize-vertical        { &:before { content: "\e119"; } }
+.glyphicon-resize-horizontal      { &:before { content: "\e120"; } }
+.glyphicon-hdd                    { &:before { content: "\e121"; } }
+.glyphicon-bullhorn               { &:before { content: "\e122"; } }
+.glyphicon-certificate            { &:before { content: "\e124"; } }
+.glyphicon-thumbs-up              { &:before { content: "\e125"; } }
+.glyphicon-thumbs-down            { &:before { content: "\e126"; } }
+.glyphicon-hand-right             { &:before { content: "\e127"; } }
+.glyphicon-hand-left              { &:before { content: "\e128"; } }
+.glyphicon-hand-up                { &:before { content: "\e129"; } }
+.glyphicon-hand-down              { &:before { content: "\e130"; } }
+.glyphicon-circle-arrow-right     { &:before { content: "\e131"; } }
+.glyphicon-circle-arrow-left      { &:before { content: "\e132"; } }
+.glyphicon-circle-arrow-up        { &:before { content: "\e133"; } }
+.glyphicon-circle-arrow-down      { &:before { content: "\e134"; } }
+.glyphicon-globe                  { &:before { content: "\e135"; } }
+.glyphicon-tasks                  { &:before { content: "\e137"; } }
+.glyphicon-filter                 { &:before { content: "\e138"; } }
+.glyphicon-fullscreen             { &:before { content: "\e140"; } }
+.glyphicon-dashboard              { &:before { content: "\e141"; } }
+.glyphicon-heart-empty            { &:before { content: "\e143"; } }
+.glyphicon-link                   { &:before { content: "\e144"; } }
+.glyphicon-phone                  { &:before { content: "\e145"; } }
+.glyphicon-usd                    { &:before { content: "\e148"; } }
+.glyphicon-gbp                    { &:before { content: "\e149"; } }
+.glyphicon-sort                   { &:before { content: "\e150"; } }
+.glyphicon-sort-by-alphabet       { &:before { content: "\e151"; } }
+.glyphicon-sort-by-alphabet-alt   { &:before { content: "\e152"; } }
+.glyphicon-sort-by-order          { &:before { content: "\e153"; } }
+.glyphicon-sort-by-order-alt      { &:before { content: "\e154"; } }
+.glyphicon-sort-by-attributes     { &:before { content: "\e155"; } }
+.glyphicon-sort-by-attributes-alt { &:before { content: "\e156"; } }
+.glyphicon-unchecked              { &:before { content: "\e157"; } }
+.glyphicon-expand                 { &:before { content: "\e158"; } }
+.glyphicon-collapse-down          { &:before { content: "\e159"; } }
+.glyphicon-collapse-up            { &:before { content: "\e160"; } }
+.glyphicon-log-in                 { &:before { content: "\e161"; } }
+.glyphicon-flash                  { &:before { content: "\e162"; } }
+.glyphicon-log-out                { &:before { content: "\e163"; } }
+.glyphicon-new-window             { &:before { content: "\e164"; } }
+.glyphicon-record                 { &:before { content: "\e165"; } }
+.glyphicon-save                   { &:before { content: "\e166"; } }
+.glyphicon-open                   { &:before { content: "\e167"; } }
+.glyphicon-saved                  { &:before { content: "\e168"; } }
+.glyphicon-import                 { &:before { content: "\e169"; } }
+.glyphicon-export                 { &:before { content: "\e170"; } }
+.glyphicon-send                   { &:before { content: "\e171"; } }
+.glyphicon-floppy-disk            { &:before { content: "\e172"; } }
+.glyphicon-floppy-saved           { &:before { content: "\e173"; } }
+.glyphicon-floppy-remove          { &:before { content: "\e174"; } }
+.glyphicon-floppy-save            { &:before { content: "\e175"; } }
+.glyphicon-floppy-open            { &:before { content: "\e176"; } }
+.glyphicon-credit-card            { &:before { content: "\e177"; } }
+.glyphicon-transfer               { &:before { content: "\e178"; } }
+.glyphicon-cutlery                { &:before { content: "\e179"; } }
+.glyphicon-header                 { &:before { content: "\e180"; } }
+.glyphicon-compressed             { &:before { content: "\e181"; } }
+.glyphicon-earphone               { &:before { content: "\e182"; } }
+.glyphicon-phone-alt              { &:before { content: "\e183"; } }
+.glyphicon-tower                  { &:before { content: "\e184"; } }
+.glyphicon-stats                  { &:before { content: "\e185"; } }
+.glyphicon-sd-video               { &:before { content: "\e186"; } }
+.glyphicon-hd-video               { &:before { content: "\e187"; } }
+.glyphicon-subtitles              { &:before { content: "\e188"; } }
+.glyphicon-sound-stereo           { &:before { content: "\e189"; } }
+.glyphicon-sound-dolby            { &:before { content: "\e190"; } }
+.glyphicon-sound-5-1              { &:before { content: "\e191"; } }
+.glyphicon-sound-6-1              { &:before { content: "\e192"; } }
+.glyphicon-sound-7-1              { &:before { content: "\e193"; } }
+.glyphicon-copyright-mark         { &:before { content: "\e194"; } }
+.glyphicon-registration-mark      { &:before { content: "\e195"; } }
+.glyphicon-cloud-download         { &:before { content: "\e197"; } }
+.glyphicon-cloud-upload           { &:before { content: "\e198"; } }
+.glyphicon-tree-conifer           { &:before { content: "\e199"; } }
+.glyphicon-tree-deciduous         { &:before { content: "\e200"; } }
+.glyphicon-briefcase              { &:before { content: "\1f4bc"; } }
+.glyphicon-calendar               { &:before { content: "\1f4c5"; } }
+.glyphicon-pushpin                { &:before { content: "\1f4cc"; } }
+.glyphicon-paperclip              { &:before { content: "\1f4ce"; } }
+.glyphicon-camera                 { &:before { content: "\1f4f7"; } }
+.glyphicon-lock                   { &:before { content: "\1f512"; } }
+.glyphicon-bell                   { &:before { content: "\1f514"; } }
+.glyphicon-bookmark               { &:before { content: "\1f516"; } }
+.glyphicon-fire                   { &:before { content: "\1f525"; } }
+.glyphicon-wrench                 { &:before { content: "\1f527"; } }
diff --git a/client/galaxy/style/less/bootstrap/grid.less b/client/galaxy/style/less/bootstrap/grid.less
new file mode 100644
index 0000000..d3a3cf6
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/grid.less
@@ -0,0 +1,346 @@
+//
+// Grid system
+// --------------------------------------------------
+
+
+// Set the container width, and override it for fixed navbars in media queries
+.container {
+  .container-fixed();
+}
+
+// mobile first defaults
+.row {
+  .make-row();
+}
+
+// Common styles for small and large grid columns
+.col-xs-1,
+.col-xs-2,
+.col-xs-3,
+.col-xs-4,
+.col-xs-5,
+.col-xs-6,
+.col-xs-7,
+.col-xs-8,
+.col-xs-9,
+.col-xs-10,
+.col-xs-11,
+.col-xs-12,
+.col-sm-1,
+.col-sm-2,
+.col-sm-3,
+.col-sm-4,
+.col-sm-5,
+.col-sm-6,
+.col-sm-7,
+.col-sm-8,
+.col-sm-9,
+.col-sm-10,
+.col-sm-11,
+.col-sm-12,
+.col-md-1,
+.col-md-2,
+.col-md-3,
+.col-md-4,
+.col-md-5,
+.col-md-6,
+.col-md-7,
+.col-md-8,
+.col-md-9,
+.col-md-10,
+.col-md-11,
+.col-md-12,
+.col-lg-1,
+.col-lg-2,
+.col-lg-3,
+.col-lg-4,
+.col-lg-5,
+.col-lg-6,
+.col-lg-7,
+.col-lg-8,
+.col-lg-9,
+.col-lg-10,
+.col-lg-11,
+.col-lg-12 {
+  position: relative;
+  // Prevent columns from collapsing when empty
+  min-height: 1px;
+  // Inner gutter via padding
+  padding-left:  (@grid-gutter-width / 2);
+  padding-right: (@grid-gutter-width / 2);
+}
+
+
+// Extra small grid
+//
+// Grid classes for extra small devices like smartphones. No offset, push, or
+// pull classes are present here due to the size of the target.
+//
+// Note that `.col-xs-12` doesn't get floated on purpose—there's no need since
+// it's full-width.
+
+.col-xs-1,
+.col-xs-2,
+.col-xs-3,
+.col-xs-4,
+.col-xs-5,
+.col-xs-6,
+.col-xs-7,
+.col-xs-8,
+.col-xs-9,
+.col-xs-10,
+.col-xs-11 {
+  float: left;
+}
+.col-xs-1  { width: percentage((1 / @grid-columns)); }
+.col-xs-2  { width: percentage((2 / @grid-columns)); }
+.col-xs-3  { width: percentage((3 / @grid-columns)); }
+.col-xs-4  { width: percentage((4 / @grid-columns)); }
+.col-xs-5  { width: percentage((5 / @grid-columns)); }
+.col-xs-6  { width: percentage((6 / @grid-columns)); }
+.col-xs-7  { width: percentage((7 / @grid-columns)); }
+.col-xs-8  { width: percentage((8 / @grid-columns)); }
+.col-xs-9  { width: percentage((9 / @grid-columns)); }
+.col-xs-10 { width: percentage((10/ @grid-columns)); }
+.col-xs-11 { width: percentage((11/ @grid-columns)); }
+.col-xs-12 { width: 100%; }
+
+
+// Small grid
+//
+// Columns, offsets, pushes, and pulls for the small device range, from phones
+// to tablets.
+//
+// Note that `.col-sm-12` doesn't get floated on purpose—there's no need since
+// it's full-width.
+
+ at media (min-width: @screen-sm) {
+  .container {
+    max-width: @container-tablet;
+  }
+
+  .col-sm-1,
+  .col-sm-2,
+  .col-sm-3,
+  .col-sm-4,
+  .col-sm-5,
+  .col-sm-6,
+  .col-sm-7,
+  .col-sm-8,
+  .col-sm-9,
+  .col-sm-10,
+  .col-sm-11 {
+    float: left;
+  }
+  .col-sm-1  { width: percentage((1 / @grid-columns)); }
+  .col-sm-2  { width: percentage((2 / @grid-columns)); }
+  .col-sm-3  { width: percentage((3 / @grid-columns)); }
+  .col-sm-4  { width: percentage((4 / @grid-columns)); }
+  .col-sm-5  { width: percentage((5 / @grid-columns)); }
+  .col-sm-6  { width: percentage((6 / @grid-columns)); }
+  .col-sm-7  { width: percentage((7 / @grid-columns)); }
+  .col-sm-8  { width: percentage((8 / @grid-columns)); }
+  .col-sm-9  { width: percentage((9 / @grid-columns)); }
+  .col-sm-10 { width: percentage((10/ @grid-columns)); }
+  .col-sm-11 { width: percentage((11/ @grid-columns)); }
+  .col-sm-12 { width: 100%; }
+
+  // Push and pull columns for source order changes
+  .col-sm-push-1  { left: percentage((1 / @grid-columns)); }
+  .col-sm-push-2  { left: percentage((2 / @grid-columns)); }
+  .col-sm-push-3  { left: percentage((3 / @grid-columns)); }
+  .col-sm-push-4  { left: percentage((4 / @grid-columns)); }
+  .col-sm-push-5  { left: percentage((5 / @grid-columns)); }
+  .col-sm-push-6  { left: percentage((6 / @grid-columns)); }
+  .col-sm-push-7  { left: percentage((7 / @grid-columns)); }
+  .col-sm-push-8  { left: percentage((8 / @grid-columns)); }
+  .col-sm-push-9  { left: percentage((9 / @grid-columns)); }
+  .col-sm-push-10 { left: percentage((10/ @grid-columns)); }
+  .col-sm-push-11 { left: percentage((11/ @grid-columns)); }
+
+  .col-sm-pull-1  { right: percentage((1 / @grid-columns)); }
+  .col-sm-pull-2  { right: percentage((2 / @grid-columns)); }
+  .col-sm-pull-3  { right: percentage((3 / @grid-columns)); }
+  .col-sm-pull-4  { right: percentage((4 / @grid-columns)); }
+  .col-sm-pull-5  { right: percentage((5 / @grid-columns)); }
+  .col-sm-pull-6  { right: percentage((6 / @grid-columns)); }
+  .col-sm-pull-7  { right: percentage((7 / @grid-columns)); }
+  .col-sm-pull-8  { right: percentage((8 / @grid-columns)); }
+  .col-sm-pull-9  { right: percentage((9 / @grid-columns)); }
+  .col-sm-pull-10 { right: percentage((10/ @grid-columns)); }
+  .col-sm-pull-11 { right: percentage((11/ @grid-columns)); }
+
+  // Offsets
+  .col-sm-offset-1  { margin-left: percentage((1 / @grid-columns)); }
+  .col-sm-offset-2  { margin-left: percentage((2 / @grid-columns)); }
+  .col-sm-offset-3  { margin-left: percentage((3 / @grid-columns)); }
+  .col-sm-offset-4  { margin-left: percentage((4 / @grid-columns)); }
+  .col-sm-offset-5  { margin-left: percentage((5 / @grid-columns)); }
+  .col-sm-offset-6  { margin-left: percentage((6 / @grid-columns)); }
+  .col-sm-offset-7  { margin-left: percentage((7 / @grid-columns)); }
+  .col-sm-offset-8  { margin-left: percentage((8 / @grid-columns)); }
+  .col-sm-offset-9  { margin-left: percentage((9 / @grid-columns)); }
+  .col-sm-offset-10 { margin-left: percentage((10/ @grid-columns)); }
+  .col-sm-offset-11 { margin-left: percentage((11/ @grid-columns)); }
+}
+
+
+// Medium grid
+//
+// Columns, offsets, pushes, and pulls for the desktop device range.
+//
+// Note that `.col-md-12` doesn't get floated on purpose—there's no need since
+// it's full-width.
+
+ at media (min-width: @screen-md) {
+  .container {
+    max-width: @container-desktop;
+  }
+  .col-md-1,
+  .col-md-2,
+  .col-md-3,
+  .col-md-4,
+  .col-md-5,
+  .col-md-6,
+  .col-md-7,
+  .col-md-8,
+  .col-md-9,
+  .col-md-10,
+  .col-md-11 {
+    float: left;
+  }
+  .col-md-1  { width: percentage((1 / @grid-columns)); }
+  .col-md-2  { width: percentage((2 / @grid-columns)); }
+  .col-md-3  { width: percentage((3 / @grid-columns)); }
+  .col-md-4  { width: percentage((4 / @grid-columns)); }
+  .col-md-5  { width: percentage((5 / @grid-columns)); }
+  .col-md-6  { width: percentage((6 / @grid-columns)); }
+  .col-md-7  { width: percentage((7 / @grid-columns)); }
+  .col-md-8  { width: percentage((8 / @grid-columns)); }
+  .col-md-9  { width: percentage((9 / @grid-columns)); }
+  .col-md-10 { width: percentage((10/ @grid-columns)); }
+  .col-md-11 { width: percentage((11/ @grid-columns)); }
+  .col-md-12 { width: 100%; }
+
+  // Push and pull columns for source order changes
+  .col-md-push-0  { left: auto; }
+  .col-md-push-1  { left: percentage((1 / @grid-columns)); }
+  .col-md-push-2  { left: percentage((2 / @grid-columns)); }
+  .col-md-push-3  { left: percentage((3 / @grid-columns)); }
+  .col-md-push-4  { left: percentage((4 / @grid-columns)); }
+  .col-md-push-5  { left: percentage((5 / @grid-columns)); }
+  .col-md-push-6  { left: percentage((6 / @grid-columns)); }
+  .col-md-push-7  { left: percentage((7 / @grid-columns)); }
+  .col-md-push-8  { left: percentage((8 / @grid-columns)); }
+  .col-md-push-9  { left: percentage((9 / @grid-columns)); }
+  .col-md-push-10 { left: percentage((10/ @grid-columns)); }
+  .col-md-push-11 { left: percentage((11/ @grid-columns)); }
+
+  .col-md-pull-0  { right: auto; }
+  .col-md-pull-1  { right: percentage((1 / @grid-columns)); }
+  .col-md-pull-2  { right: percentage((2 / @grid-columns)); }
+  .col-md-pull-3  { right: percentage((3 / @grid-columns)); }
+  .col-md-pull-4  { right: percentage((4 / @grid-columns)); }
+  .col-md-pull-5  { right: percentage((5 / @grid-columns)); }
+  .col-md-pull-6  { right: percentage((6 / @grid-columns)); }
+  .col-md-pull-7  { right: percentage((7 / @grid-columns)); }
+  .col-md-pull-8  { right: percentage((8 / @grid-columns)); }
+  .col-md-pull-9  { right: percentage((9 / @grid-columns)); }
+  .col-md-pull-10 { right: percentage((10/ @grid-columns)); }
+  .col-md-pull-11 { right: percentage((11/ @grid-columns)); }
+
+  // Offsets
+  .col-md-offset-0  { margin-left: 0; }
+  .col-md-offset-1  { margin-left: percentage((1 / @grid-columns)); }
+  .col-md-offset-2  { margin-left: percentage((2 / @grid-columns)); }
+  .col-md-offset-3  { margin-left: percentage((3 / @grid-columns)); }
+  .col-md-offset-4  { margin-left: percentage((4 / @grid-columns)); }
+  .col-md-offset-5  { margin-left: percentage((5 / @grid-columns)); }
+  .col-md-offset-6  { margin-left: percentage((6 / @grid-columns)); }
+  .col-md-offset-7  { margin-left: percentage((7 / @grid-columns)); }
+  .col-md-offset-8  { margin-left: percentage((8 / @grid-columns)); }
+  .col-md-offset-9  { margin-left: percentage((9 / @grid-columns)); }
+  .col-md-offset-10 { margin-left: percentage((10/ @grid-columns)); }
+  .col-md-offset-11 { margin-left: percentage((11/ @grid-columns)); }
+}
+
+
+// Large grid
+//
+// Columns, offsets, pushes, and pulls for the large desktop device range.
+//
+// Note that `.col-lg-12` doesn't get floated on purpose—there's no need since
+// it's full-width.
+
+ at media (min-width: @screen-lg-desktop) {
+  .container {
+    max-width: @container-lg-desktop;
+  }
+
+  .col-lg-1,
+  .col-lg-2,
+  .col-lg-3,
+  .col-lg-4,
+  .col-lg-5,
+  .col-lg-6,
+  .col-lg-7,
+  .col-lg-8,
+  .col-lg-9,
+  .col-lg-10,
+  .col-lg-11 {
+    float: left;
+  }
+  .col-lg-1  { width: percentage((1 / @grid-columns)); }
+  .col-lg-2  { width: percentage((2 / @grid-columns)); }
+  .col-lg-3  { width: percentage((3 / @grid-columns)); }
+  .col-lg-4  { width: percentage((4 / @grid-columns)); }
+  .col-lg-5  { width: percentage((5 / @grid-columns)); }
+  .col-lg-6  { width: percentage((6 / @grid-columns)); }
+  .col-lg-7  { width: percentage((7 / @grid-columns)); }
+  .col-lg-8  { width: percentage((8 / @grid-columns)); }
+  .col-lg-9  { width: percentage((9 / @grid-columns)); }
+  .col-lg-10 { width: percentage((10/ @grid-columns)); }
+  .col-lg-11 { width: percentage((11/ @grid-columns)); }
+  .col-lg-12 { width: 100%; }
+
+  // Push and pull columns for source order changes
+  .col-lg-push-0  { left: auto; }
+  .col-lg-push-1  { left: percentage((1 / @grid-columns)); }
+  .col-lg-push-2  { left: percentage((2 / @grid-columns)); }
+  .col-lg-push-3  { left: percentage((3 / @grid-columns)); }
+  .col-lg-push-4  { left: percentage((4 / @grid-columns)); }
+  .col-lg-push-5  { left: percentage((5 / @grid-columns)); }
+  .col-lg-push-6  { left: percentage((6 / @grid-columns)); }
+  .col-lg-push-7  { left: percentage((7 / @grid-columns)); }
+  .col-lg-push-8  { left: percentage((8 / @grid-columns)); }
+  .col-lg-push-9  { left: percentage((9 / @grid-columns)); }
+  .col-lg-push-10 { left: percentage((10/ @grid-columns)); }
+  .col-lg-push-11 { left: percentage((11/ @grid-columns)); }
+
+  .col-lg-pull-0  { right: auto; }
+  .col-lg-pull-1  { right: percentage((1 / @grid-columns)); }
+  .col-lg-pull-2  { right: percentage((2 / @grid-columns)); }
+  .col-lg-pull-3  { right: percentage((3 / @grid-columns)); }
+  .col-lg-pull-4  { right: percentage((4 / @grid-columns)); }
+  .col-lg-pull-5  { right: percentage((5 / @grid-columns)); }
+  .col-lg-pull-6  { right: percentage((6 / @grid-columns)); }
+  .col-lg-pull-7  { right: percentage((7 / @grid-columns)); }
+  .col-lg-pull-8  { right: percentage((8 / @grid-columns)); }
+  .col-lg-pull-9  { right: percentage((9 / @grid-columns)); }
+  .col-lg-pull-10 { right: percentage((10/ @grid-columns)); }
+  .col-lg-pull-11 { right: percentage((11/ @grid-columns)); }
+
+  // Offsets
+  .col-lg-offset-0  { margin-left: 0; }
+  .col-lg-offset-1  { margin-left: percentage((1 / @grid-columns)); }
+  .col-lg-offset-2  { margin-left: percentage((2 / @grid-columns)); }
+  .col-lg-offset-3  { margin-left: percentage((3 / @grid-columns)); }
+  .col-lg-offset-4  { margin-left: percentage((4 / @grid-columns)); }
+  .col-lg-offset-5  { margin-left: percentage((5 / @grid-columns)); }
+  .col-lg-offset-6  { margin-left: percentage((6 / @grid-columns)); }
+  .col-lg-offset-7  { margin-left: percentage((7 / @grid-columns)); }
+  .col-lg-offset-8  { margin-left: percentage((8 / @grid-columns)); }
+  .col-lg-offset-9  { margin-left: percentage((9 / @grid-columns)); }
+  .col-lg-offset-10 { margin-left: percentage((10/ @grid-columns)); }
+  .col-lg-offset-11 { margin-left: percentage((11/ @grid-columns)); }
+}
diff --git a/client/galaxy/style/less/bootstrap/input-groups.less b/client/galaxy/style/less/bootstrap/input-groups.less
new file mode 100644
index 0000000..570f03f
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/input-groups.less
@@ -0,0 +1,127 @@
+//
+// Input groups
+// --------------------------------------------------
+
+// Base styles
+// -------------------------
+.input-group {
+  position: relative; // For dropdowns
+  display: table;
+  border-collapse: separate; // prevent input groups from inheriting border styles from table cells when placed within a table
+
+  // Undo padding and float of grid classes
+  &.col {
+    float: none;
+    padding-left: 0;
+    padding-right: 0;
+  }
+
+  .form-control {
+    width: 100%;
+    margin-bottom: 0;
+  }
+}
+
+// Sizing options
+//
+// Remix the default form control sizing classes into new ones for easier
+// manipulation.
+
+.input-group-lg > .form-control,
+.input-group-lg > .input-group-addon,
+.input-group-lg > .input-group-btn > .btn { .input-lg(); }
+.input-group-sm > .form-control,
+.input-group-sm > .input-group-addon,
+.input-group-sm > .input-group-btn > .btn { .input-sm(); }
+
+
+// Display as table-cell
+// -------------------------
+.input-group-addon,
+.input-group-btn,
+.input-group .form-control {
+  display: table-cell;
+
+  &:not(:first-child):not(:last-child) {
+    border-radius: 0;
+  }
+}
+// Addon and addon wrapper for buttons
+.input-group-addon,
+.input-group-btn {
+  width: 1%;
+  white-space: nowrap;
+  vertical-align: middle; // Match the inputs
+}
+
+// Text input groups
+// -------------------------
+.input-group-addon {
+  padding: @padding-base-vertical @padding-base-horizontal;
+  font-size: @font-size-base;
+  font-weight: normal;
+  line-height: 1;
+  text-align: center;
+  background-color: @input-group-addon-bg;
+  border: 1px solid @input-group-addon-border-color;
+  border-radius: @border-radius-base;
+
+  // Sizing
+  &.input-sm {
+    padding: @padding-small-vertical @padding-small-horizontal;
+    font-size: @font-size-small;
+    border-radius: @border-radius-small;
+  }
+  &.input-lg {
+    padding: @padding-large-vertical @padding-large-horizontal;
+    font-size: @font-size-large;
+    border-radius: @border-radius-large;
+  }
+
+  // Nuke default margins from checkboxes and radios to vertically center within.
+  input[type="radio"],
+  input[type="checkbox"] {
+    margin-top: 0;
+  }
+}
+
+// Reset rounded corners
+.input-group .form-control:first-child,
+.input-group-addon:first-child,
+.input-group-btn:first-child > .btn,
+.input-group-btn:first-child > .dropdown-toggle,
+.input-group-btn:last-child > .btn:not(:last-child):not(.dropdown-toggle) {
+  .border-right-radius(0);
+}
+.input-group-addon:first-child {
+  border-right: 0;
+}
+.input-group .form-control:last-child,
+.input-group-addon:last-child,
+.input-group-btn:last-child > .btn,
+.input-group-btn:last-child > .dropdown-toggle,
+.input-group-btn:first-child > .btn:not(:first-child) {
+  .border-left-radius(0);
+}
+.input-group-addon:last-child {
+  border-left: 0;
+}
+
+// Button input groups
+// -------------------------
+.input-group-btn {
+  position: relative;
+  white-space: nowrap;
+}
+.input-group-btn > .btn {
+  position: relative;
+  // Jankily prevent input button groups from wrapping
+  + .btn {
+    margin-left: -4px;
+  }
+  // Bring the "active" button to the front
+  &:hover,
+  &:active {
+    z-index: 2;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/jumbotron.less b/client/galaxy/style/less/bootstrap/jumbotron.less
new file mode 100644
index 0000000..53289fb
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/jumbotron.less
@@ -0,0 +1,40 @@
+//
+// Jumbotron
+// --------------------------------------------------
+
+
+.jumbotron {
+  padding: @jumbotron-padding;
+  margin-bottom: @jumbotron-padding;
+  font-size: (@font-size-base * 1.5);
+  font-weight: 200;
+  line-height: (@line-height-base * 1.5);
+  color: @jumbotron-color;
+  background-color: @jumbotron-bg;
+
+  h1 {
+    line-height: 1;
+    color: @jumbotron-heading-color;
+  }
+  p {
+    line-height: 1.4;
+  }
+
+  .container & {
+    border-radius: @border-radius-large; // Only round corners at higher resolutions if contained in a container
+  }
+
+  @media screen and (min-width: @screen-sm) {
+    padding-top:    (@jumbotron-padding * 1.6);
+    padding-bottom: (@jumbotron-padding * 1.6);
+
+    .container & {
+      padding-left:  (@jumbotron-padding * 2);
+      padding-right: (@jumbotron-padding * 2);
+    }
+
+    h1 {
+      font-size: (@font-size-base * 4.5);
+    }
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/labels.less b/client/galaxy/style/less/bootstrap/labels.less
new file mode 100644
index 0000000..cad5ce5
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/labels.less
@@ -0,0 +1,58 @@
+//
+// Labels
+// --------------------------------------------------
+
+.label {
+  display: inline;
+  padding: .2em .6em .3em;
+  font-size: 75%;
+  font-weight: bold;
+  line-height: 1;
+  color: @label-color;
+  text-align: center;
+  white-space: nowrap;
+  vertical-align: baseline;
+  border-radius: .25em;
+
+  // Add hover effects, but only for links
+  &[href] {
+    &:hover,
+    &:focus {
+      color: @label-link-hover-color;
+      text-decoration: none;
+      cursor: pointer;
+    }
+  }
+
+  // Empty labels collapse automatically (not available in IE8)
+  &:empty {
+    display: none;
+  }
+}
+
+// Colors
+// Contextual variations (linked labels get darker on :hover)
+
+.label-default {
+  .label-variant(@label-default-bg);
+}
+
+.label-primary {
+  .label-variant(@label-primary-bg);
+}
+
+.label-success {
+  .label-variant(@label-success-bg);
+}
+
+.label-info {
+  .label-variant(@label-info-bg);
+}
+
+.label-warning {
+  .label-variant(@label-warning-bg);
+}
+
+.label-danger {
+  .label-variant(@label-danger-bg);
+}
diff --git a/client/galaxy/style/less/bootstrap/list-group.less b/client/galaxy/style/less/bootstrap/list-group.less
new file mode 100644
index 0000000..46cda40
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/list-group.less
@@ -0,0 +1,88 @@
+//
+// List groups
+// --------------------------------------------------
+
+// Base class
+//
+// Easily usable on <ul>, <ol>, or <div>.
+.list-group {
+  // No need to set list-style: none; since .list-group-item is block level
+  margin-bottom: 20px;
+  padding-left: 0; // reset padding because ul and ol
+}
+
+// Individual list items
+// -------------------------
+
+.list-group-item {
+  position: relative;
+  display: block;
+  padding: 10px 15px;
+  // Place the border on the list items and negative margin up for better styling
+  margin-bottom: -1px;
+  background-color: @list-group-bg;
+  border: 1px solid @list-group-border;
+
+  // Round the first and last items
+  &:first-child {
+    .border-top-radius(@list-group-border-radius);
+  }
+  &:last-child {
+    margin-bottom: 0;
+    .border-bottom-radius(@list-group-border-radius);
+  }
+
+  // Align badges within list items
+  > .badge {
+    float: right;
+  }
+  > .badge + .badge {
+    margin-right: 5px;
+  }
+
+  // Linked list items
+  a& {
+    color: @list-group-link-color;
+
+    .list-group-item-heading {
+      color: @list-group-link-heading-color;
+    }
+
+    // Hover state
+    &:hover,
+    &:focus {
+      text-decoration: none;
+      background-color: @list-group-hover-bg;
+    }
+  }
+
+  // Active class on item itself, not parent
+  &.active,
+  &.active:hover,
+  &.active:focus {
+    z-index: 2; // Place active items above their siblings for proper border styling
+    color: @list-group-active-color;
+    background-color: @list-group-active-bg;
+    border-color: @list-group-active-border;
+
+    // Force color to inherit for custom content
+    .list-group-item-heading {
+      color: inherit;
+    }
+    .list-group-item-text {
+      color: lighten(@list-group-active-bg, 40%);
+    }
+  }
+}
+
+// Custom content options
+// -------------------------
+
+.list-group-item-heading {
+  margin-top: 0;
+  margin-bottom: 5px;
+}
+.list-group-item-text {
+  margin-bottom: 0;
+  line-height: 1.3;
+}
diff --git a/client/galaxy/style/less/bootstrap/media.less b/client/galaxy/style/less/bootstrap/media.less
new file mode 100644
index 0000000..5ad22cd
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/media.less
@@ -0,0 +1,56 @@
+// Media objects
+// Source: http://stubbornella.org/content/?p=497
+// --------------------------------------------------
+
+
+// Common styles
+// -------------------------
+
+// Clear the floats
+.media,
+.media-body {
+  overflow: hidden;
+  zoom: 1;
+}
+
+// Proper spacing between instances of .media
+.media,
+.media .media {
+  margin-top: 15px;
+}
+.media:first-child {
+  margin-top: 0;
+}
+
+// For images and videos, set to block
+.media-object {
+  display: block;
+}
+
+// Reset margins on headings for tighter default spacing
+.media-heading {
+  margin: 0 0 5px;
+}
+
+
+// Media image alignment
+// -------------------------
+
+.media {
+  > .pull-left {
+    margin-right: 10px;
+  }
+  > .pull-right {
+    margin-left: 10px;
+  }
+}
+
+
+// Media list variation
+// -------------------------
+
+// Undo default ul/ol styles
+.media-list {
+  padding-left: 0;
+  list-style: none;
+}
diff --git a/client/galaxy/style/less/bootstrap/mixins.less b/client/galaxy/style/less/bootstrap/mixins.less
new file mode 100644
index 0000000..f956361
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/mixins.less
@@ -0,0 +1,744 @@
+//
+// Mixins
+// --------------------------------------------------
+
+
+// Utilities
+// -------------------------
+
+// Clearfix
+// Source: http://nicolasgallagher.com/micro-clearfix-hack/
+//
+// For modern browsers
+// 1. The space content is one way to avoid an Opera bug when the
+//    contenteditable attribute is included anywhere else in the document.
+//    Otherwise it causes space to appear at the top and bottom of elements
+//    that are clearfixed.
+// 2. The use of `table` rather than `block` is only necessary if using
+//    `:before` to contain the top-margins of child elements.
+.clearfix() {
+  &:before,
+  &:after {
+    content: " "; /* 1 */
+    display: table; /* 2 */
+  }
+  &:after {
+    clear: both;
+  }
+}
+
+// Webkit-style focus
+.tab-focus() {
+  // Default
+  outline: thin dotted #333;
+  // Webkit
+  outline: 5px auto -webkit-focus-ring-color;
+  outline-offset: -2px;
+}
+
+// Center-align a block level element
+.center-block() {
+  display: block;
+  margin-left: auto;
+  margin-right: auto;
+}
+
+// Sizing shortcuts
+.size(@width; @height) {
+  width: @width;
+  height: @height;
+}
+.square(@size) {
+  .size(@size; @size);
+}
+
+// Placeholder text
+.placeholder(@color: @input-color-placeholder) {
+  &:-moz-placeholder            { color: @color; } // Firefox 4-18
+  &::-moz-placeholder           { color: @color; } // Firefox 19+
+  &:-ms-input-placeholder       { color: @color; } // Internet Explorer 10+
+  &::-webkit-input-placeholder  { color: @color; } // Safari and Chrome
+}
+
+// Text overflow
+// Requires inline-block or block for proper styling
+.text-overflow() {
+  overflow: hidden;
+  text-overflow: ellipsis;
+  white-space: nowrap;
+}
+
+// CSS image replacement
+//
+// Heads up! v3 launched with with only `.hide-text()`, but per our pattern for
+// mixins being reused as classes with the same name, this doesn't hold up. As
+// of v3.0.1 we have added `.text-hide()` and deprecated `.hide-text()`. Note
+// that we cannot chain the mixins together in Less, so they are repeated.
+//
+// Source: https://github.com/h5bp/html5-boilerplate/commit/aa0396eae757
+
+// Deprecated as of v3.0.1 (will be removed in v4)
+.hide-text() {
+  font: ~"0/0" a;
+  color: transparent;
+  text-shadow: none;
+  background-color: transparent;
+  border: 0;
+}
+// New mixin to use as of v3.0.1
+.text-hide() {
+  font: ~"0/0" a;
+  color: transparent;
+  text-shadow: none;
+  background-color: transparent;
+  border: 0;
+}
+
+
+
+// CSS3 PROPERTIES
+// --------------------------------------------------
+
+// Single side border-radius
+.border-top-radius(@radius) {
+  border-top-right-radius: @radius;
+   border-top-left-radius: @radius;
+}
+.border-right-radius(@radius) {
+  border-bottom-right-radius: @radius;
+     border-top-right-radius: @radius;
+}
+.border-bottom-radius(@radius) {
+  border-bottom-right-radius: @radius;
+   border-bottom-left-radius: @radius;
+}
+.border-left-radius(@radius) {
+  border-bottom-left-radius: @radius;
+     border-top-left-radius: @radius;
+}
+
+// Drop shadows
+.box-shadow(@shadow) {
+  -webkit-box-shadow: @shadow; // iOS <4.3 & Android <4.1
+          box-shadow: @shadow;
+}
+
+// Transitions
+.transition(@transition) {
+  -webkit-transition: @transition;
+          transition: @transition;
+}
+.transition-property(@transition-property) {
+  -webkit-transition-property: @transition-property;
+          transition-property: @transition-property;
+}
+.transition-delay(@transition-delay) {
+  -webkit-transition-delay: @transition-delay;
+          transition-delay: @transition-delay;
+}
+.transition-duration(@transition-duration) {
+  -webkit-transition-duration: @transition-duration;
+          transition-duration: @transition-duration;
+}
+.transition-transform(@transition) {
+  -webkit-transition: -webkit-transform @transition;
+     -moz-transition: -moz-transform @transition;
+       -o-transition: -o-transform @transition;
+          transition: transform @transition;
+}
+
+// Transformations
+.rotate(@degrees) {
+  -webkit-transform: rotate(@degrees);
+      -ms-transform: rotate(@degrees); // IE9+
+          transform: rotate(@degrees);
+}
+.scale(@ratio) {
+  -webkit-transform: scale(@ratio);
+      -ms-transform: scale(@ratio); // IE9+
+          transform: scale(@ratio);
+}
+.translate(@x; @y) {
+  -webkit-transform: translate(@x, @y);
+      -ms-transform: translate(@x, @y); // IE9+
+          transform: translate(@x, @y);
+}
+.skew(@x; @y) {
+  -webkit-transform: skew(@x, @y);
+      -ms-transform: skewX(@x) skewY(@y); // See https://github.com/twbs/bootstrap/issues/4885; IE9+
+          transform: skew(@x, @y);
+}
+.translate3d(@x; @y; @z) {
+  -webkit-transform: translate3d(@x, @y, @z);
+          transform: translate3d(@x, @y, @z);
+}
+
+// Backface visibility
+// Prevent browsers from flickering when using CSS 3D transforms.
+// Default value is `visible`, but can be changed to `hidden`
+// See git pull https://github.com/dannykeane/bootstrap.git backface-visibility for examples
+.backface-visibility(@visibility){
+  -webkit-backface-visibility: @visibility;
+     -moz-backface-visibility: @visibility;
+          backface-visibility: @visibility;
+}
+
+// Box sizing
+.box-sizing(@boxmodel) {
+  -webkit-box-sizing: @boxmodel;
+     -moz-box-sizing: @boxmodel;
+          box-sizing: @boxmodel;
+}
+
+// User select
+// For selecting text on the page
+.user-select(@select) {
+  -webkit-user-select: @select;
+     -moz-user-select: @select;
+      -ms-user-select: @select; // IE10+
+       -o-user-select: @select;
+          user-select: @select;
+}
+
+// Resize anything
+.resizable(@direction) {
+  resize: @direction; // Options: horizontal, vertical, both
+  overflow: auto; // Safari fix
+}
+
+// CSS3 Content Columns
+.content-columns(@column-count; @column-gap: @grid-gutter-width) {
+  -webkit-column-count: @column-count;
+     -moz-column-count: @column-count;
+          column-count: @column-count;
+  -webkit-column-gap: @column-gap;
+     -moz-column-gap: @column-gap;
+          column-gap: @column-gap;
+}
+
+// Optional hyphenation
+.hyphens(@mode: auto) {
+  word-wrap: break-word;
+  -webkit-hyphens: @mode;
+     -moz-hyphens: @mode;
+      -ms-hyphens: @mode; // IE10+
+       -o-hyphens: @mode;
+          hyphens: @mode;
+}
+
+// Opacity
+.opacity(@opacity) {
+  opacity: @opacity;
+  // IE8 filter
+  @opacity-ie: (@opacity * 100);
+  filter: ~"alpha(opacity=@{opacity-ie})";
+}
+
+
+
+// GRADIENTS
+// --------------------------------------------------
+
+#gradient {
+
+  // Horizontal gradient, from left to right
+  //
+  // Creates two color stops, start and end, by specifying a color and position for each color stop.
+  // Color stops are not available in IE9 and below.
+  .horizontal(@start-color: #555; @end-color: #333; @start-percent: 0%; @end-percent: 100%) {
+    background-image: -webkit-gradient(linear, @start-percent top, @end-percent top, from(@start-color), to(@end-color)); // Safari 4+, Chrome 2+
+    background-image: -webkit-linear-gradient(left, color-stop(@start-color @start-percent), color-stop(@end-color @end-percent)); // Safari 5.1+, Chrome 10+
+    background-image: -moz-linear-gradient(left, @start-color @start-percent, @end-color @end-percent); // FF 3.6+
+    background-image:  linear-gradient(to right, @start-color @start-percent, @end-color @end-percent); // Standard, IE10
+    background-repeat: repeat-x;
+    filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=1)",argb(@start-color),argb(@end-color))); // IE9 and down
+  }
+
+  // Vertical gradient, from top to bottom
+  //
+  // Creates two color stops, start and end, by specifying a color and position for each color stop.
+  // Color stops are not available in IE9 and below.
+  .vertical(@start-color: #555; @end-color: #333; @start-percent: 0%; @end-percent: 100%) {
+    background-image: -webkit-gradient(linear, left @start-percent, left @end-percent, from(@start-color), to(@end-color)); // Safari 4+, Chrome 2+
+    background-image: -webkit-linear-gradient(top, @start-color, @start-percent, @end-color, @end-percent); // Safari 5.1+, Chrome 10+
+    background-image:  -moz-linear-gradient(top, @start-color @start-percent, @end-color @end-percent); // FF 3.6+
+    background-image: linear-gradient(to bottom, @start-color @start-percent, @end-color @end-percent); // Standard, IE10
+    background-repeat: repeat-x;
+    filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=0)",argb(@start-color),argb(@end-color))); // IE9 and down
+  }
+
+  .directional(@start-color: #555; @end-color: #333; @deg: 45deg) {
+    background-repeat: repeat-x;
+    background-image: -webkit-linear-gradient(@deg, @start-color, @end-color); // Safari 5.1+, Chrome 10+
+    background-image: -moz-linear-gradient(@deg, @start-color, @end-color); // FF 3.6+
+    background-image: linear-gradient(@deg, @start-color, @end-color); // Standard, IE10
+  }
+  .horizontal-three-colors(@start-color: #00b3ee; @mid-color: #7a43b6; @color-stop: 50%; @end-color: #c3325f) {
+    background-image: -webkit-gradient(left, linear, 0 0, 0 100%, from(@start-color), color-stop(@color-stop, @mid-color), to(@end-color));
+    background-image: -webkit-linear-gradient(left, @start-color, @mid-color @color-stop, @end-color);
+    background-image: -moz-linear-gradient(left, @start-color, @mid-color @color-stop, @end-color);
+    background-image: linear-gradient(to right, @start-color, @mid-color @color-stop, @end-color);
+    background-repeat: no-repeat;
+    filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=1)",argb(@start-color),argb(@end-color))); // IE9 and down, gets no color-stop at all for proper fallback
+  }
+  .vertical-three-colors(@start-color: #00b3ee; @mid-color: #7a43b6; @color-stop: 50%; @end-color: #c3325f) {
+    background-image: -webkit-gradient(linear, 0 0, 0 100%, from(@start-color), color-stop(@color-stop, @mid-color), to(@end-color));
+    background-image: -webkit-linear-gradient(@start-color, @mid-color @color-stop, @end-color);
+    background-image: -moz-linear-gradient(top, @start-color, @mid-color @color-stop, @end-color);
+    background-image: linear-gradient(@start-color, @mid-color @color-stop, @end-color);
+    background-repeat: no-repeat;
+    filter: e(%("progid:DXImageTransform.Microsoft.gradient(startColorstr='%d', endColorstr='%d', GradientType=0)",argb(@start-color),argb(@end-color))); // IE9 and down, gets no color-stop at all for proper fallback
+  }
+  .radial(@inner-color: #555; @outer-color: #333) {
+    background-image: -webkit-gradient(radial, center center, 0, center center, 460, from(@inner-color), to(@outer-color));
+    background-image: -webkit-radial-gradient(circle, @inner-color, @outer-color);
+    background-image: -moz-radial-gradient(circle, @inner-color, @outer-color);
+    background-image: radial-gradient(circle, @inner-color, @outer-color);
+    background-repeat: no-repeat;
+  }
+  .striped(@color: #555; @angle: 45deg) {
+    background-image: -webkit-gradient(linear, 0 100%, 100% 0, color-stop(.25, rgba(255,255,255,.15)), color-stop(.25, transparent), color-stop(.5, transparent), color-stop(.5, rgba(255,255,255,.15)), color-stop(.75, rgba(255,255,255,.15)), color-stop(.75, transparent), to(transparent));
+    background-image: -webkit-linear-gradient(@angle, rgba(255,255,255,.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,.15) 50%, rgba(255,255,255,.15) 75%, transparent 75%, transparent);
+    background-image: -moz-linear-gradient(@angle, rgba(255,255,255,.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,.15) 50%, rgba(255,255,255,.15) 75%, transparent 75%, transparent);
+    background-image: linear-gradient(@angle, rgba(255,255,255,.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,.15) 50%, rgba(255,255,255,.15) 75%, transparent 75%, transparent);
+  }
+}
+
+// Reset filters for IE
+//
+// When you need to remove a gradient background, do not forget to use this to reset
+// the IE filter for IE9 and below.
+.reset-filter() {
+  filter: e(%("progid:DXImageTransform.Microsoft.gradient(enabled = false)"));
+}
+
+
+
+// Retina images
+//
+// Short retina mixin for setting background-image and -size
+
+.img-retina(@file-1x; @file-2x; @width-1x; @height-1x) {
+  background-image: url("@{file-1x}");
+
+  @media
+  only screen and (-webkit-min-device-pixel-ratio: 2),
+  only screen and (   min--moz-device-pixel-ratio: 2),
+  only screen and (     -o-min-device-pixel-ratio: 2/1),
+  only screen and (        min-device-pixel-ratio: 2),
+  only screen and (                min-resolution: 192dpi),
+  only screen and (                min-resolution: 2dppx) {
+    background-image: url("@{file-2x}");
+    background-size: @width-1x @height-1x;
+  }
+}
+
+
+// Responsive image
+//
+// Keep images from scaling beyond the width of their parents.
+
+.img-responsive(@display: block;) {
+  display: @display;
+  max-width: 100%; // Part 1: Set a maximum relative to the parent
+  height: auto; // Part 2: Scale the height according to the width, otherwise you get stretching
+}
+
+
+// COMPONENT MIXINS
+// --------------------------------------------------
+
+// Horizontal dividers
+// -------------------------
+// Dividers (basically an hr) within dropdowns and nav lists
+.nav-divider(@color: #e5e5e5) {
+  height: 1px;
+  margin: ((@line-height-computed / 2) - 1) 0;
+  overflow: hidden;
+  background-color: @color;
+}
+
+// Panels
+// -------------------------
+.panel-variant(@border; @heading-text-color; @heading-bg-color; @heading-border;) {
+  border-color: @border;
+  & > .panel-heading {
+    color: @heading-text-color;
+    background-color: @heading-bg-color;
+    border-color: @heading-border;
+    + .panel-collapse .panel-body {
+      border-top-color: @border;
+    }
+  }
+  & > .panel-footer {
+    + .panel-collapse .panel-body {
+      border-bottom-color: @border;
+    }
+  }
+}
+
+// Alerts
+// -------------------------
+.alert-variant(@background; @border; @text-color) {
+  background-color: @background;
+  border-color: @border;
+  color: @text-color;
+  hr {
+    border-top-color: darken(@border, 5%);
+  }
+  .alert-link {
+    color: darken(@text-color, 10%);
+  }
+}
+
+// Tables
+// -------------------------
+.table-row-variant(@state; @background; @border) {
+  // Exact selectors below required to override `.table-striped` and prevent
+  // inheritance to nested tables.
+  .table > thead > tr,
+  .table > tbody > tr,
+  .table > tfoot > tr {
+    > td.@{state},
+    > th.@{state},
+    &.@{state} > td,
+    &.@{state} > th {
+      background-color: @background;
+      border-color: @border;
+    }
+  }
+
+  // Hover states for `.table-hover`
+  // Note: this is not available for cells or rows within `thead` or `tfoot`.
+  .table-hover > tbody > tr {
+    > td.@{state}:hover,
+    > th.@{state}:hover,
+    &.@{state}:hover > td,
+    &.@{state}:hover > th {
+      background-color: darken(@background, 5%);
+      border-color: darken(@border, 5%);
+    }
+  }
+}
+
+// Button variants
+// -------------------------
+// Easily pump out default styles, as well as :hover, :focus, :active,
+// and disabled options for all buttons
+.button-variant(@color; @background; @border) {
+  color: @color;
+  background-color: @background;
+  border-color: @border;
+
+  &:hover,
+  &:focus,
+  &:active,
+  &.active,
+  .open .dropdown-toggle& {
+    color: @color;
+    background-color: darken(@background, 8%);
+        border-color: darken(@border, 12%);
+  }
+  &:active,
+  &.active,
+  .open .dropdown-toggle& {
+    background-image: none;
+  }
+  &.disabled,
+  &[disabled],
+  fieldset[disabled] & {
+    &,
+    &:hover,
+    &:focus,
+    &:active,
+    &.active {
+      background-color: @background;
+          border-color: @border
+    }
+  }
+}
+
+// Button sizes
+// -------------------------
+.button-size(@padding-vertical; @padding-horizontal; @font-size; @line-height; @border-radius) {
+  padding: @padding-vertical @padding-horizontal;
+  font-size: @font-size;
+  line-height: @line-height;
+  border-radius: @border-radius;
+}
+
+// Pagination
+// -------------------------
+.pagination-size(@padding-vertical; @padding-horizontal; @font-size; @border-radius) {
+  > li {
+    > a,
+    > span {
+      padding: @padding-vertical @padding-horizontal;
+      font-size: @font-size;
+    }
+    &:first-child {
+      > a,
+      > span {
+        .border-left-radius(@border-radius);
+      }
+    }
+    &:last-child {
+      > a,
+      > span {
+        .border-right-radius(@border-radius);
+      }
+    }
+  }
+}
+
+// Labels
+// -------------------------
+.label-variant(@color) {
+  background-color: @color;
+  &[href] {
+    &:hover,
+    &:focus {
+      background-color: darken(@color, 10%);
+    }
+  }
+}
+
+// Navbar vertical align
+// -------------------------
+// Vertically center elements in the navbar.
+// Example: an element has a height of 30px, so write out `.navbar-vertical-align(30px);` to calculate the appropriate top margin.
+.navbar-vertical-align(@element-height) {
+  margin-top: ((@navbar-height - @element-height) / 2);
+  margin-bottom: ((@navbar-height - @element-height) / 2);
+}
+
+// Progress bars
+// -------------------------
+.progress-bar-variant(@color) {
+  background-color: @color;
+  .progress-striped & {
+    #gradient > .striped(@color);
+  }
+}
+
+// Responsive utilities
+// -------------------------
+// More easily include all the states for responsive-utilities.less.
+.responsive-visibility() {
+  display: block !important;
+  tr& { display: table-row !important; }
+  th&,
+  td& { display: table-cell !important; }
+}
+
+.responsive-invisibility() {
+  display: none !important;
+  tr& { display: none !important; }
+  th&,
+  td& { display: none !important; }
+}
+
+// Grid System
+// -----------
+
+// Centered container element
+.container-fixed() {
+  margin-right: auto;
+  margin-left: auto;
+  padding-left:  (@grid-gutter-width / 2);
+  padding-right: (@grid-gutter-width / 2);
+  .clearfix();
+}
+
+// Creates a wrapper for a series of columns
+.make-row(@gutter: @grid-gutter-width) {
+  margin-left:  (@gutter / -2);
+  margin-right: (@gutter / -2);
+  .clearfix();
+}
+
+// Generate the extra small columns
+.make-xs-column(@columns; @gutter: @grid-gutter-width) {
+  position: relative;
+  float: left;
+  width: percentage((@columns / @grid-columns));
+  // Prevent columns from collapsing when empty
+  min-height: 1px;
+  // Inner gutter via padding
+  padding-left:  (@gutter / 2);
+  padding-right: (@gutter / 2);
+}
+
+// Generate the small columns
+.make-sm-column(@columns; @gutter: @grid-gutter-width) {
+  position: relative;
+  // Prevent columns from collapsing when empty
+  min-height: 1px;
+  // Inner gutter via padding
+  padding-left:  (@gutter / 2);
+  padding-right: (@gutter / 2);
+
+  // Calculate width based on number of columns available
+  @media (min-width: @screen-sm-min) {
+    float: left;
+    width: percentage((@columns / @grid-columns));
+  }
+}
+
+// Generate the small column offsets
+.make-sm-column-offset(@columns) {
+  @media (min-width: @screen-sm-min) {
+    margin-left: percentage((@columns / @grid-columns));
+  }
+}
+.make-sm-column-push(@columns) {
+  @media (min-width: @screen-sm-min) {
+    left: percentage((@columns / @grid-columns));
+  }
+}
+.make-sm-column-pull(@columns) {
+  @media (min-width: @screen-sm-min) {
+    right: percentage((@columns / @grid-columns));
+  }
+}
+
+// Generate the medium columns
+.make-md-column(@columns; @gutter: @grid-gutter-width) {
+  position: relative;
+  // Prevent columns from collapsing when empty
+  min-height: 1px;
+  // Inner gutter via padding
+  padding-left:  (@gutter / 2);
+  padding-right: (@gutter / 2);
+
+  // Calculate width based on number of columns available
+  @media (min-width: @screen-md-min) {
+    float: left;
+    width: percentage((@columns / @grid-columns));
+  }
+}
+
+// Generate the large column offsets
+.make-md-column-offset(@columns) {
+  @media (min-width: @screen-md-min) {
+    margin-left: percentage((@columns / @grid-columns));
+  }
+}
+.make-md-column-push(@columns) {
+  @media (min-width: @screen-md) {
+    left: percentage((@columns / @grid-columns));
+  }
+}
+.make-md-column-pull(@columns) {
+  @media (min-width: @screen-md-min) {
+    right: percentage((@columns / @grid-columns));
+  }
+}
+
+// Generate the large columns
+.make-lg-column(@columns; @gutter: @grid-gutter-width) {
+  position: relative;
+  // Prevent columns from collapsing when empty
+  min-height: 1px;
+  // Inner gutter via padding
+  padding-left:  (@gutter / 2);
+  padding-right: (@gutter / 2);
+
+  // Calculate width based on number of columns available
+  @media (min-width: @screen-lg-min) {
+    float: left;
+    width: percentage((@columns / @grid-columns));
+  }
+}
+
+// Generate the large column offsets
+.make-lg-column-offset(@columns) {
+  @media (min-width: @screen-lg-min) {
+    margin-left: percentage((@columns / @grid-columns));
+  }
+}
+.make-lg-column-push(@columns) {
+  @media (min-width: @screen-lg-min) {
+    left: percentage((@columns / @grid-columns));
+  }
+}
+.make-lg-column-pull(@columns) {
+  @media (min-width: @screen-lg-min) {
+    right: percentage((@columns / @grid-columns));
+  }
+}
+
+
+// Form validation states
+//
+// Used in forms.less to generate the form validation CSS for warnings, errors,
+// and successes.
+
+.form-control-validation(@text-color: #555; @border-color: #ccc; @background-color: #f5f5f5) {
+  // Color the label and help text
+  .help-block,
+  .control-label {
+    color: @text-color;
+  }
+  // Set the border and box shadow on specific inputs to match
+  .form-control {
+    border-color: @border-color;
+    .box-shadow(inset 0 1px 1px rgba(0,0,0,.075)); // Redeclare so transitions work
+    &:focus {
+      border-color: darken(@border-color, 10%);
+      @shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 6px lighten(@border-color, 20%);
+      .box-shadow(@shadow);
+    }
+  }
+  // Set validation states also for addons
+  .input-group-addon {
+    color: @text-color;
+    border-color: @border-color;
+    background-color: @background-color;
+  }
+}
+
+// Form control focus state
+//
+// Generate a customized focus state and for any input with the specified color,
+// which defaults to the `@input-focus-border` variable.
+//
+// We highly encourage you to not customize the default value, but instead use
+// this to tweak colors on an as-needed basis. This aesthetic change is based on
+// WebKit's default styles, but applicable to a wider range of browsers. Its
+// usability and accessibility should be taken into account with any change.
+//
+// Example usage: change the default blue border and shadow to white for better
+// contrast against a dark gray background.
+
+.form-control-focus(@color: @input-border-focus) {
+  @color-rgba: rgba(red(@color), green(@color), blue(@color), .6);
+  &:focus {
+    border-color: @color;
+    outline: 0;
+    .box-shadow(~"inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px @{color-rgba}");
+  }
+}
+
+// Form control sizing
+//
+// Relative text size, padding, and border-radii changes for form controls. For
+// horizontal sizing, wrap controls in the predefined grid classes. `<select>`
+// element gets special love because it's special, and that's a fact!
+
+.input-size(@input-height; @padding-vertical; @padding-horizontal; @font-size; @line-height; @border-radius) {
+  height: @input-height;
+  padding: @padding-vertical @padding-horizontal;
+  font-size: @font-size;
+  line-height: @line-height;
+  border-radius: @border-radius;
+
+  select& {
+    height: @input-height;
+    line-height: @input-height;
+  }
+
+  textarea& {
+    height: auto;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/modals.less b/client/galaxy/style/less/bootstrap/modals.less
new file mode 100644
index 0000000..f886fb2
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/modals.less
@@ -0,0 +1,141 @@
+//
+// Modals
+// --------------------------------------------------
+
+// .modal-open      - body class for killing the scroll
+// .modal           - container to scroll within
+// .modal-dialog    - positioning shell for the actual modal
+// .modal-content   - actual modal w/ bg and corners and shit
+
+// Kill the scroll on the body
+.modal-open {
+  overflow: hidden;
+
+
+  // Account for hiding of scrollbar
+  body&,
+  .navbar-fixed-top,
+  .navbar-fixed-bottom {
+    margin-right: 15px
+  }
+}
+
+// Container that the modal scrolls within
+.modal {
+  display: none;
+  overflow: auto;
+  overflow-y: scroll;
+  position: fixed;
+  top: 0;
+  right: 0;
+  bottom: 0;
+  left: 0;
+  z-index: @zindex-modal-background;
+
+  // When fading in the modal, animate it to slide down
+  &.fade .modal-dialog {
+    .translate(0, -25%);
+    .transition-transform(~"0.3s ease-out");
+  }
+  &.in .modal-dialog { .translate(0, 0)}
+}
+
+// Shell div to position the modal with bottom padding
+.modal-dialog {
+  margin-left: auto;
+  margin-right: auto;
+  width: auto;
+  padding: 10px;
+  z-index: (@zindex-modal-background + 10);
+}
+
+// Actual modal
+.modal-content {
+  position: relative;
+  background-color: @modal-content-bg;
+  border: 1px solid @modal-content-fallback-border-color; //old browsers fallback (ie8 etc)
+  border: 1px solid @modal-content-border-color;
+  border-radius: @border-radius-large;
+  .box-shadow(0 3px 9px rgba(0,0,0,.5));
+  background-clip: padding-box;
+  // Remove focus outline from opened modal
+  outline: none;
+}
+
+// Modal background
+.modal-backdrop {
+  position: fixed;
+  top: 0;
+  right: 0;
+  bottom: 0;
+  left: 0;
+  z-index: (@zindex-modal-background - 10);
+  background-color: @modal-backdrop-bg;
+  // Fade for backdrop
+  &.fade { .opacity(0); }
+  &.in { .opacity(.5); }
+}
+
+// Modal header
+// Top section of the modal w/ title and dismiss
+.modal-header {
+  padding: @modal-title-padding;
+  border-bottom: 1px solid @modal-header-border-color;
+  min-height: (@modal-title-padding + @modal-title-line-height);
+}
+// Close icon
+.modal-header .close {
+  margin-top: -2px;
+}
+
+// Title text within header
+.modal-title {
+  margin: 0;
+  line-height: @modal-title-line-height;
+}
+
+// Modal body
+// Where all modal content resides (sibling of .modal-header and .modal-footer)
+.modal-body {
+  position: relative;
+  padding: @modal-inner-padding;
+}
+
+// Footer (for actions)
+.modal-footer {
+  margin-top: 15px;
+  padding: (@modal-inner-padding - 1) @modal-inner-padding @modal-inner-padding;
+  text-align: right; // right align buttons
+  border-top: 1px solid @modal-footer-border-color;
+  .clearfix(); // clear it in case folks use .pull-* classes on buttons
+
+  // Properly space out buttons
+  .btn + .btn {
+    margin-left: 5px;
+    margin-bottom: 0; // account for input[type="submit"] which gets the bottom margin like all other inputs
+  }
+  // but override that for button groups
+  .btn-group .btn + .btn {
+    margin-left: -1px;
+  }
+  // and override it for block buttons as well
+  .btn-block + .btn-block {
+    margin-left: 0;
+  }
+}
+
+// Scale up the modal
+ at media screen and (min-width: @screen-sm) {
+
+  .modal-dialog {
+    left: 50%;
+    right: auto;
+    width: 600px;
+    padding-top: 30px;
+    padding-bottom: 30px;
+  }
+  .modal-content {
+    .box-shadow(0 5px 15px rgba(0,0,0,.5));
+  }
+
+}
diff --git a/client/galaxy/style/less/bootstrap/navbar.less b/client/galaxy/style/less/bootstrap/navbar.less
new file mode 100644
index 0000000..351e204
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/navbar.less
@@ -0,0 +1,621 @@
+//
+// Navbars
+// --------------------------------------------------
+
+
+// Wrapper and base class
+//
+// Provide a static navbar from which we expand to create full-width, fixed, and
+// other navbar variations.
+
+.navbar {
+  position: relative;
+  z-index: @zindex-navbar;
+  min-height: @navbar-height; // Ensure a navbar always shows (e.g., without a .navbar-brand in collapsed mode)
+  margin-bottom: @navbar-margin-bottom;
+  border: 1px solid transparent;
+
+  // Prevent floats from breaking the navbar
+  .clearfix();
+
+  @media (min-width: @grid-float-breakpoint) {
+    border-radius: @navbar-border-radius;
+  }
+}
+
+
+// Navbar heading
+//
+// Groups `.navbar-brand` and `.navbar-toggle` into a single component for easy
+// styling of responsive aspects.
+
+.navbar-header {
+  .clearfix();
+
+  @media (min-width: @grid-float-breakpoint) {
+    float: left;
+  }
+}
+
+
+// Navbar collapse (body)
+//
+// Group your navbar content into this for easy collapsing and expanding across
+// various device sizes. By default, this content is collapsed when <768px, but
+// will expand past that for a horizontal display.
+//
+// To start (on mobile devices) the navbar links, forms, and buttons are stacked
+// vertically and include a `max-height` to overflow in case you have too much
+// content for the user's viewport.
+
+.navbar-collapse {
+  max-height: 340px;
+  overflow-x: visible;
+  padding-right: @navbar-padding-horizontal;
+  padding-left:  @navbar-padding-horizontal;
+  border-top: 1px solid transparent;
+  box-shadow: inset 0 1px 0 rgba(255,255,255,.1);
+  .clearfix();
+  -webkit-overflow-scrolling: touch;
+
+  &.in {
+    overflow-y: auto;
+  }
+
+  @media (min-width: @grid-float-breakpoint) {
+    width: auto;
+    border-top: 0;
+    box-shadow: none;
+
+    &.collapse {
+      display: block !important;
+      height: auto !important;
+      padding-bottom: 0; // Override default setting
+      overflow: visible !important;
+    }
+
+    &.in {
+      overflow-y: visible;
+    }
+
+    // Account for first and last children spacing
+    .navbar-nav.navbar-left:first-child {
+      margin-left: - at navbar-padding-horizontal;
+    }
+    .navbar-nav.navbar-right:last-child {
+      margin-right: - at navbar-padding-horizontal;
+    }
+    .navbar-text:last-child {
+      margin-right: 0;
+    }
+  }
+}
+
+
+// Both navbar header and collapse
+//
+// When a container is present, change the behavior of the header and collapse.
+
+.container > .navbar-header,
+.container > .navbar-collapse {
+  margin-right: - at navbar-padding-horizontal;
+  margin-left:  - at navbar-padding-horizontal;
+
+  @media (min-width: @grid-float-breakpoint) {
+    margin-right: 0;
+    margin-left:  0;
+  }
+}
+
+
+//
+// Navbar alignment options
+//
+// Display the navbar across the entirity of the page or fixed it to the top or
+// bottom of the page.
+
+// Static top (unfixed, but 100% wide) navbar
+.navbar-static-top {
+  border-width: 0 0 1px;
+  @media (min-width: @grid-float-breakpoint) {
+    border-radius: 0;
+  }
+}
+
+// Fix the top/bottom navbars when screen real estate supports it
+.navbar-fixed-top,
+.navbar-fixed-bottom {
+  position: fixed;
+  right: 0;
+  left: 0;
+  border-width: 0 0 1px;
+
+  // Undo the rounded corners
+  @media (min-width: @grid-float-breakpoint) {
+    border-radius: 0;
+  }
+}
+.navbar-fixed-top {
+  z-index: @zindex-navbar-fixed;
+  top: 0;
+}
+.navbar-fixed-bottom {
+  bottom: 0;
+  margin-bottom: 0; // override .navbar defaults
+}
+
+
+// Brand/project name
+
+.navbar-brand {
+  float: left;
+  padding: @navbar-padding-vertical @navbar-padding-horizontal;
+  font-size: @font-size-large;
+  line-height: @line-height-computed;
+  &:hover,
+  &:focus {
+    text-decoration: none;
+  }
+
+  @media (min-width: @grid-float-breakpoint) {
+    .navbar > .container & {
+      margin-left: - at navbar-padding-horizontal;
+    }
+  }
+}
+
+
+// Navbar toggle
+//
+// Custom button for toggling the `.navbar-collapse`, powered by the collapse
+// JavaScript plugin.
+
+.navbar-toggle {
+  position: relative;
+  float: right;
+  margin-right: @navbar-padding-horizontal;
+  padding: 9px 10px;
+  .navbar-vertical-align(34px);
+  background-color: transparent;
+  border: 1px solid transparent;
+  border-radius: @border-radius-base;
+
+  // Bars
+  .icon-bar {
+    display: block;
+    width: 22px;
+    height: 2px;
+    border-radius: 1px;
+  }
+  .icon-bar + .icon-bar {
+    margin-top: 4px;
+  }
+
+  @media (min-width: @grid-float-breakpoint) {
+    display: none;
+  }
+}
+
+
+// Navbar nav links
+//
+// Builds on top of the `.nav` components with it's own modifier class to make
+// the nav the full height of the horizontal nav (above 768px).
+
+.navbar-nav {
+  margin: (@navbar-padding-vertical / 2) - at navbar-padding-horizontal;
+
+  > li > a {
+    padding-top:    10px;
+    padding-bottom: 10px;
+    line-height: @line-height-computed;
+  }
+
+  @media (max-width: @screen-xs-max) {
+    // Dropdowns get custom display when collapsed
+    .open .dropdown-menu {
+      position: static;
+      float: none;
+      width: auto;
+      margin-top: 0;
+      background-color: transparent;
+      border: 0;
+      box-shadow: none;
+      > li > a,
+      .dropdown-header {
+        padding: 5px 15px 5px 25px;
+      }
+      > li > a {
+        line-height: @line-height-computed;
+        &:hover,
+        &:focus {
+          background-image: none;
+        }
+      }
+    }
+  }
+
+  // Uncollapse the nav
+  @media (min-width: @grid-float-breakpoint) {
+    float: left;
+    margin: 0;
+
+    > li {
+      float: left;
+      > a {
+        padding-top: ((@navbar-height - @line-height-computed) / 2);
+        padding-bottom: ((@navbar-height - @line-height-computed) / 2);
+      }
+    }
+  }
+
+}
+
+
+// Component alignment
+//
+// Repurpose the pull utilities as their own navbar utilities to avoid specifity
+// issues with parents and chaining. Only do this when the navbar is uncollapsed
+// though so that navbar contents properly stack and align in mobile.
+
+ at media (min-width: @grid-float-breakpoint) {
+  .navbar-left  { .pull-left(); }
+  .navbar-right { .pull-right(); }
+}
+
+
+// Navbar form
+//
+// Extension of the `.form-inline` with some extra flavor for optimum display in
+// our navbars.
+
+.navbar-form {
+  margin-left: - at navbar-padding-horizontal;
+  margin-right: - at navbar-padding-horizontal;
+  padding: 10px @navbar-padding-horizontal;
+  border-top: 1px solid transparent;
+  border-bottom: 1px solid transparent;
+  @shadow: inset 0 1px 0 rgba(255,255,255,.1), 0 1px 0 rgba(255,255,255,.1);
+  .box-shadow(@shadow);
+
+  // Mixin behavior for optimum display
+  .form-inline();
+
+  .form-group {
+    @media (max-width: @screen-xs-max) {
+      margin-bottom: 5px;
+    }
+  }
+
+  // Vertically center in expanded, horizontal navbar
+  .navbar-vertical-align(@input-height-base);
+
+  // Undo 100% width for pull classes
+  @media (min-width: @grid-float-breakpoint) {
+    width: auto;
+    border: 0;
+    margin-left: 0;
+    margin-right: 0;
+    padding-top: 0;
+    padding-bottom: 0;
+    .box-shadow(none);
+  }
+}
+
+
+// Dropdown menus
+
+// Menu position and menu carets
+.navbar-nav > li > .dropdown-menu {
+  margin-top: 0;
+  .border-top-radius(0);
+}
+// Menu position and menu caret support for dropups via extra dropup class
+.navbar-fixed-bottom .navbar-nav > li > .dropdown-menu {
+  .border-bottom-radius(0);
+}
+
+// Right aligned menus need alt position
+.navbar-nav.pull-right > li > .dropdown-menu,
+.navbar-nav > li > .dropdown-menu.pull-right {
+  left: auto;
+  right: 0;
+}
+
+
+// Buttons in navbars
+//
+// Vertically center a button within a navbar (when *not* in a form).
+
+.navbar-btn {
+  .navbar-vertical-align(@input-height-base);
+}
+
+
+// Text in navbars
+//
+// Add a class to make any element properly align itself vertically within the navbars.
+
+.navbar-text {
+  float: left;
+  .navbar-vertical-align(@line-height-computed);
+
+  @media (min-width: @grid-float-breakpoint) {
+    margin-left: @navbar-padding-horizontal;
+    margin-right: @navbar-padding-horizontal;
+  }
+}
+
+// Alternate navbars
+// --------------------------------------------------
+
+// Default navbar
+.navbar-default {
+  background-color: @navbar-default-bg;
+  border-color: @navbar-default-border;
+
+  .navbar-brand {
+    color: @navbar-default-brand-color;
+    &:hover,
+    &:focus {
+      color: @navbar-default-brand-hover-color;
+      background-color: @navbar-default-brand-hover-bg;
+    }
+  }
+
+  .navbar-text {
+    color: @navbar-default-color;
+  }
+
+  .navbar-nav {
+    > li > a {
+      color: @navbar-default-link-color;
+
+      &:hover,
+      &:focus {
+        color: @navbar-default-link-hover-color;
+        background-color: @navbar-default-link-hover-bg;
+      }
+    }
+    > .active > a {
+      &,
+      &:hover,
+      &:focus {
+        color: @navbar-default-link-active-color;
+        background-color: @navbar-default-link-active-bg;
+      }
+    }
+    > .disabled > a {
+      &,
+      &:hover,
+      &:focus {
+        color: @navbar-default-link-disabled-color;
+        background-color: @navbar-default-link-disabled-bg;
+      }
+    }
+  }
+
+  .navbar-toggle {
+    border-color: @navbar-default-toggle-border-color;
+    &:hover,
+    &:focus {
+      background-color: @navbar-default-toggle-hover-bg;
+    }
+    .icon-bar {
+      background-color: @navbar-default-toggle-icon-bar-bg;
+    }
+  }
+
+  .navbar-collapse,
+  .navbar-form {
+    border-color: darken(@navbar-default-bg, 7%);
+  }
+
+  // Dropdown menu items and carets
+  .navbar-nav {
+    // Caret should match text color on hover
+    > .dropdown > a:hover .caret,
+    > .dropdown > a:focus .caret {
+      border-top-color: @navbar-default-link-hover-color;
+      border-bottom-color: @navbar-default-link-hover-color;
+    }
+
+    // Remove background color from open dropdown
+    > .open > a {
+      &,
+      &:hover,
+      &:focus {
+        background-color: @navbar-default-link-active-bg;
+        color: @navbar-default-link-active-color;
+        .caret {
+          border-top-color: @navbar-default-link-active-color;
+          border-bottom-color: @navbar-default-link-active-color;
+        }
+      }
+    }
+    > .dropdown > a .caret {
+      border-top-color: @navbar-default-link-color;
+      border-bottom-color: @navbar-default-link-color;
+    }
+
+
+    @media (max-width: @screen-xs-max) {
+      // Dropdowns get custom display when collapsed
+      .open .dropdown-menu {
+        > li > a {
+          color: @navbar-default-link-color;
+          &:hover,
+          &:focus {
+            color: @navbar-default-link-hover-color;
+            background-color: @navbar-default-link-hover-bg;
+          }
+        }
+        > .active > a {
+          &,
+          &:hover,
+          &:focus {
+            color: @navbar-default-link-active-color;
+            background-color: @navbar-default-link-active-bg;
+          }
+        }
+        > .disabled > a {
+          &,
+          &:hover,
+          &:focus {
+            color: @navbar-default-link-disabled-color;
+            background-color: @navbar-default-link-disabled-bg;
+          }
+        }
+      }
+    }
+  }
+
+
+  // Links in navbars
+  //
+  // Add a class to ensure links outside the navbar nav are colored correctly.
+
+  .navbar-link {
+    color: @navbar-default-link-color;
+    &:hover {
+      color: @navbar-default-link-hover-color;
+    }
+  }
+
+}
+
+// Inverse navbar
+
+.navbar-inverse {
+  background-color: @navbar-inverse-bg;
+  border-color: @navbar-inverse-border;
+
+  .navbar-brand {
+    color: @navbar-inverse-brand-color;
+    &:hover,
+    &:focus {
+      color: @navbar-inverse-brand-hover-color;
+      background-color: @navbar-inverse-brand-hover-bg;
+    }
+  }
+
+  .navbar-text {
+    color: @navbar-inverse-color;
+  }
+
+  .navbar-nav {
+    > li > a {
+      color: @navbar-inverse-link-color;
+
+      &:hover,
+      &:focus {
+        color: @navbar-inverse-link-hover-color;
+        background-color: @navbar-inverse-link-hover-bg;
+      }
+    }
+    > .active > a {
+      &,
+      &:hover,
+      &:focus {
+        color: @navbar-inverse-link-active-color;
+        background-color: @navbar-inverse-link-active-bg;
+      }
+    }
+    > .disabled > a {
+      &,
+      &:hover,
+      &:focus {
+        color: @navbar-inverse-link-disabled-color;
+        background-color: @navbar-inverse-link-disabled-bg;
+      }
+    }
+  }
+
+  // Darken the responsive nav toggle
+  .navbar-toggle {
+    border-color: @navbar-inverse-toggle-border-color;
+    &:hover,
+    &:focus {
+      background-color: @navbar-inverse-toggle-hover-bg;
+    }
+    .icon-bar {
+      background-color: @navbar-inverse-toggle-icon-bar-bg;
+    }
+  }
+
+  .navbar-collapse,
+  .navbar-form {
+    border-color: darken(@navbar-inverse-bg, 7%);
+  }
+
+  // Dropdowns
+  .navbar-nav {
+    > .open > a {
+      &,
+      &:hover,
+      &:focus {
+        background-color: @navbar-inverse-link-active-bg;
+        color: @navbar-inverse-link-active-color;
+      }
+    }
+    > .dropdown > a:hover .caret {
+      border-top-color: @navbar-inverse-link-hover-color;
+      border-bottom-color: @navbar-inverse-link-hover-color;
+    }
+    > .dropdown > a .caret {
+      border-top-color: @navbar-inverse-link-color;
+      border-bottom-color: @navbar-inverse-link-color;
+    }
+    > .open > a {
+      &,
+      &:hover,
+      &:focus {
+        .caret {
+          border-top-color: @navbar-inverse-link-active-color;
+          border-bottom-color: @navbar-inverse-link-active-color;
+        }
+      }
+    }
+
+    @media (max-width: @screen-xs-max) {
+      // Dropdowns get custom display
+      .open .dropdown-menu {
+        > .dropdown-header {
+          border-color: @navbar-inverse-border;
+        }
+        > li > a {
+          color: @navbar-inverse-link-color;
+          &:hover,
+          &:focus {
+            color: @navbar-inverse-link-hover-color;
+            background-color: @navbar-inverse-link-hover-bg;
+          }
+        }
+        > .active > a {
+          &,
+          &:hover,
+          &:focus {
+            color: @navbar-inverse-link-active-color;
+            background-color: @navbar-inverse-link-active-bg;
+          }
+        }
+        > .disabled > a {
+          &,
+          &:hover,
+          &:focus {
+            color: @navbar-inverse-link-disabled-color;
+            background-color: @navbar-inverse-link-disabled-bg;
+          }
+        }
+      }
+    }
+  }
+
+  .navbar-link {
+    color: @navbar-inverse-link-color;
+    &:hover {
+      color: @navbar-inverse-link-hover-color;
+    }
+  }
+
+}
diff --git a/client/galaxy/style/less/bootstrap/navs.less b/client/galaxy/style/less/bootstrap/navs.less
new file mode 100644
index 0000000..27861f8
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/navs.less
@@ -0,0 +1,248 @@
+//
+// Navs
+// --------------------------------------------------
+
+
+// Base class
+// --------------------------------------------------
+
+.nav {
+  margin-bottom: 0;
+  padding-left: 0; // Override default ul/ol
+  list-style: none;
+  .clearfix();
+
+  > li {
+    position: relative;
+    display: block;
+
+    > a {
+      position: relative;
+      display: block;
+      padding: @nav-link-padding;
+      &:hover,
+      &:focus {
+        text-decoration: none;
+        background-color: @nav-link-hover-bg;
+      }
+    }
+
+    // Disabled state sets text to gray and nukes hover/tab effects
+    &.disabled > a {
+      color: @nav-disabled-link-color;
+
+      &:hover,
+      &:focus {
+        color: @nav-disabled-link-hover-color;
+        text-decoration: none;
+        background-color: transparent;
+        cursor: not-allowed;
+      }
+    }
+  }
+
+  // Open dropdowns
+  .open > a {
+    &,
+    &:hover,
+    &:focus {
+      background-color: @nav-link-hover-bg;
+      border-color: @link-color;
+    }
+  }
+
+  // Dividers (basically an hr) within the dropdown
+  .nav-divider {
+    .nav-divider();
+  }
+
+  // Prevent IE8 from misplacing imgs
+  // See https://github.com/h5bp/html5-boilerplate/issues/984#issuecomment-3985989
+  > li > a > img {
+    max-width: none;
+  }
+}
+
+
+// Tabs
+// -------------------------
+
+// Give the tabs something to sit on
+.nav-tabs {
+  border-bottom: 1px solid @nav-tabs-border-color;
+  > li {
+    float: left;
+    // Make the list-items overlay the bottom border
+    margin-bottom: -1px;
+
+    // Actual tabs (as links)
+    > a {
+      margin-right: 2px;
+      line-height: @line-height-base;
+      border: 1px solid transparent;
+      border-radius: @border-radius-base @border-radius-base 0 0;
+      &:hover {
+        border-color: @nav-tabs-link-hover-border-color @nav-tabs-link-hover-border-color @nav-tabs-border-color;
+      }
+    }
+
+    // Active state, and it's :hover to override normal :hover
+    &.active > a {
+      &,
+      &:hover,
+      &:focus {
+        color: @nav-tabs-active-link-hover-color;
+        background-color: @nav-tabs-active-link-hover-bg;
+        border: 1px solid @nav-tabs-active-link-hover-border-color;
+        border-bottom-color: transparent;
+        cursor: default;
+      }
+    }
+  }
+  // pulling this in mainly for less shorthand
+  &.nav-justified {
+    .nav-justified();
+    .nav-tabs-justified();
+  }
+}
+
+
+// Pills
+// -------------------------
+.nav-pills {
+  > li {
+    float: left;
+
+    // Links rendered as pills
+    > a {
+      border-radius: 5px;
+    }
+    + li {
+      margin-left: 2px;
+    }
+
+    // Active state
+    &.active > a {
+      &,
+      &:hover,
+      &:focus {
+        color: @nav-pills-active-link-hover-color;
+        background-color: @nav-pills-active-link-hover-bg;
+      }
+    }
+  }
+}
+
+
+// Stacked pills
+.nav-stacked {
+  > li {
+    float: none;
+    + li {
+      margin-top: 2px;
+      margin-left: 0; // no need for this gap between nav items
+    }
+  }
+}
+
+
+// Nav variations
+// --------------------------------------------------
+
+// Justified nav links
+// -------------------------
+
+.nav-justified {
+  width: 100%;
+
+  > li {
+    float: none;
+     > a {
+      text-align: center;
+      margin-bottom: 5px;
+    }
+  }
+
+  @media (min-width: @screen-sm-min) {
+    > li {
+      display: table-cell;
+      width: 1%;
+      > a {
+        margin-bottom: 0;
+      }
+    }
+  }
+}
+
+// Move borders to anchors instead of bottom of list
+.nav-tabs-justified {
+  border-bottom: 0;
+
+  > li > a {
+    // Override margin from .nav-tabs
+    margin-right: 0;
+    border-radius: @border-radius-base;
+  }
+
+  > .active > a,
+  > .active > a:hover,
+  > .active > a:focus {
+    border: 1px solid @nav-tabs-justified-link-border-color;
+  }
+
+  @media (min-width: @screen-sm-min) {
+    > li > a {
+      border-bottom: 1px solid @nav-tabs-justified-link-border-color;
+      border-radius: @border-radius-base @border-radius-base 0 0;
+    }
+    > .active > a,
+    > .active > a:hover,
+    > .active > a:focus {
+      border-bottom-color: @nav-tabs-justified-active-link-border-color;
+    }
+  }
+}
+
+
+// Tabbable tabs
+// -------------------------
+
+// Clear any floats
+.tabbable {
+  .clearfix();
+}
+
+// Show/hide tabbable areas
+.tab-content > .tab-pane,
+.pill-content > .pill-pane {
+  display: none;
+}
+.tab-content,
+.pill-content {
+  > .active {
+    display: block;
+  }
+}
+
+
+
+// Dropdowns
+// -------------------------
+
+// Make dropdown carets use link color in navs
+.nav .caret {
+  border-top-color: @link-color;
+  border-bottom-color: @link-color;
+}
+.nav a:hover .caret {
+  border-top-color: @link-hover-color;
+  border-bottom-color: @link-hover-color;
+}
+
+// Specific dropdowns
+.nav-tabs .dropdown-menu {
+  // make dropdown border overlap tab border
+  margin-top: -1px;
+  // Remove the top rounded corners here since there is a hard edge above the menu
+  .border-top-radius(0);
+}
diff --git a/client/galaxy/style/less/bootstrap/normalize.less b/client/galaxy/style/less/bootstrap/normalize.less
new file mode 100644
index 0000000..a2e9c64
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/normalize.less
@@ -0,0 +1,396 @@
+/*! normalize.css v2.1.0 | MIT License | git.io/normalize */
+
+// ==========================================================================
+// HTML5 display definitions
+// ==========================================================================
+
+//
+// Correct `block` display not defined in IE 8/9.
+//
+
+article,
+aside,
+details,
+figcaption,
+figure,
+footer,
+header,
+hgroup,
+main,
+nav,
+section,
+summary {
+  display: block;
+}
+
+//
+// Correct `inline-block` display not defined in IE 8/9.
+//
+
+audio,
+canvas,
+video {
+  display: inline-block;
+}
+
+//
+// Prevent modern browsers from displaying `audio` without controls.
+// Remove excess height in iOS 5 devices.
+//
+
+audio:not([controls]) {
+  display: none;
+  height: 0;
+}
+
+//
+// Address styling not present in IE 8/9.
+//
+
+[hidden] {
+  display: none;
+}
+
+// ==========================================================================
+// Base
+// ==========================================================================
+
+//
+// 1. Set default font family to sans-serif.
+// 2. Prevent iOS text size adjust after orientation change, without disabling
+//    user zoom.
+//
+
+html {
+  font-family: sans-serif; // 1
+  -webkit-text-size-adjust: 100%; // 2
+  -ms-text-size-adjust: 100%; // 2
+}
+
+//
+// Remove default margin.
+//
+
+body {
+  margin: 0;
+}
+
+// ==========================================================================
+// Links
+// ==========================================================================
+
+//
+// Address `outline` inconsistency between Chrome and other browsers.
+//
+
+a:focus {
+  outline: thin dotted;
+}
+
+//
+// Improve readability when focused and also mouse hovered in all browsers.
+//
+
+a:active,
+a:hover {
+  outline: 0;
+}
+
+// ==========================================================================
+// Typography
+// ==========================================================================
+
+//
+// Address variable `h1` font-size and margin within `section` and `article`
+// contexts in Firefox 4+, Safari 5, and Chrome.
+//
+
+h1 {
+  font-size: 2em;
+  margin: 0.67em 0;
+}
+
+//
+// Address styling not present in IE 8/9, Safari 5, and Chrome.
+//
+
+abbr[title] {
+  border-bottom: 1px dotted;
+}
+
+//
+// Address style set to `bolder` in Firefox 4+, Safari 5, and Chrome.
+//
+
+b,
+strong {
+  font-weight: bold;
+}
+
+//
+// Address styling not present in Safari 5 and Chrome.
+//
+
+dfn {
+  font-style: italic;
+}
+
+//
+// Address differences between Firefox and other browsers.
+//
+
+hr {
+  -moz-box-sizing: content-box;
+  box-sizing: content-box;
+  height: 0;
+}
+
+//
+// Address styling not present in IE 8/9.
+//
+
+mark {
+  background: #ff0;
+  color: #000;
+}
+
+//
+// Correct font family set oddly in Safari 5 and Chrome.
+//
+
+code,
+kbd,
+pre,
+samp {
+  font-family: monospace, serif;
+  font-size: 1em;
+}
+
+//
+// Improve readability of pre-formatted text in all browsers.
+//
+
+pre {
+  white-space: pre-wrap;
+}
+
+//
+// Set consistent quote types.
+//
+
+q {
+  quotes: "\201C" "\201D" "\2018" "\2019";
+}
+
+//
+// Address inconsistent and variable font size in all browsers.
+//
+
+small {
+  font-size: 80%;
+}
+
+//
+// Prevent `sub` and `sup` affecting `line-height` in all browsers.
+//
+
+sub,
+sup {
+  font-size: 75%;
+  line-height: 0;
+  position: relative;
+  vertical-align: baseline;
+}
+
+sup {
+  top: -0.5em;
+}
+
+sub {
+  bottom: -0.25em;
+}
+
+// ==========================================================================
+// Embedded content
+// ==========================================================================
+
+//
+// Remove border when inside `a` element in IE 8/9.
+//
+
+img {
+  border: 0;
+}
+
+//
+// Correct overflow displayed oddly in IE 9.
+//
+
+svg:not(:root) {
+  overflow: hidden;
+}
+
+// ==========================================================================
+// Figures
+// ==========================================================================
+
+//
+// Address margin not present in IE 8/9 and Safari 5.
+//
+
+figure {
+  margin: 0;
+}
+
+// ==========================================================================
+// Forms
+// ==========================================================================
+
+//
+// Define consistent border, margin, and padding.
+//
+
+fieldset {
+  border: 1px solid #c0c0c0;
+  margin: 0 2px;
+  padding: 0.35em 0.625em 0.75em;
+}
+
+//
+// 1. Correct `color` not being inherited in IE 8/9.
+// 2. Remove padding so people aren't caught out if they zero out fieldsets.
+//
+
+legend {
+  border: 0; // 1
+  padding: 0; // 2
+}
+
+//
+// 1. Correct font family not being inherited in all browsers.
+// 2. Correct font size not being inherited in all browsers.
+// 3. Address margins set differently in Firefox 4+, Safari 5, and Chrome.
+//
+
+button,
+input,
+select,
+textarea {
+  font-family: inherit; // 1
+  font-size: 100%; // 2
+  margin: 0; // 3
+}
+
+//
+// Address Firefox 4+ setting `line-height` on `input` using `!important` in
+// the UA stylesheet.
+//
+
+button,
+input {
+  line-height: normal;
+}
+
+//
+// Address inconsistent `text-transform` inheritance for `button` and `select`.
+// All other form control elements do not inherit `text-transform` values.
+// Correct `button` style inheritance in Chrome, Safari 5+, and IE 8+.
+// Correct `select` style inheritance in Firefox 4+ and Opera.
+//
+
+button,
+select {
+  text-transform: none;
+}
+
+//
+// 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`
+//    and `video` controls.
+// 2. Correct inability to style clickable `input` types in iOS.
+// 3. Improve usability and consistency of cursor style between image-type
+//    `input` and others.
+//
+
+button,
+html input[type="button"], // 1
+input[type="reset"],
+input[type="submit"] {
+  -webkit-appearance: button; // 2
+  cursor: pointer; // 3
+}
+
+//
+// Re-set default cursor for disabled elements.
+//
+
+button[disabled],
+html input[disabled] {
+  cursor: default;
+}
+
+//
+// 1. Address box sizing set to `content-box` in IE 8/9.
+// 2. Remove excess padding in IE 8/9.
+//
+
+input[type="checkbox"],
+input[type="radio"] {
+  box-sizing: border-box; // 1
+  padding: 0; // 2
+}
+
+//
+// 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome.
+// 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome
+//    (include `-moz` to future-proof).
+//
+
+input[type="search"] {
+  -webkit-appearance: textfield; // 1
+  -moz-box-sizing: content-box;
+  -webkit-box-sizing: content-box; // 2
+  box-sizing: content-box;
+}
+
+//
+// Remove inner padding and search cancel button in Safari 5 and Chrome
+// on OS X.
+//
+
+input[type="search"]::-webkit-search-cancel-button,
+input[type="search"]::-webkit-search-decoration {
+  -webkit-appearance: none;
+}
+
+//
+// Remove inner padding and border in Firefox 4+.
+//
+
+button::-moz-focus-inner,
+input::-moz-focus-inner {
+  border: 0;
+  padding: 0;
+}
+
+//
+// 1. Remove default vertical scrollbar in IE 8/9.
+// 2. Improve readability and alignment in all browsers.
+//
+
+textarea {
+  overflow: auto; // 1
+  vertical-align: top; // 2
+}
+
+// ==========================================================================
+// Tables
+// ==========================================================================
+
+//
+// Remove most spacing between table cells.
+//
+
+table {
+  border-collapse: collapse;
+  border-spacing: 0;
+}
diff --git a/client/galaxy/style/less/bootstrap/pager.less b/client/galaxy/style/less/bootstrap/pager.less
new file mode 100644
index 0000000..16993dd
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/pager.less
@@ -0,0 +1,55 @@
+//
+// Pager pagination
+// --------------------------------------------------
+
+
+.pager {
+  padding-left: 0;
+  margin: @line-height-computed 0;
+  list-style: none;
+  text-align: center;
+  .clearfix();
+  li {
+    display: inline;
+    > a,
+    > span {
+      display: inline-block;
+      padding: 5px 14px;
+      background-color: @pagination-bg;
+      border: 1px solid @pagination-border;
+      border-radius: @pager-border-radius;
+    }
+
+    > a:hover,
+    > a:focus {
+      text-decoration: none;
+      background-color: @pagination-hover-bg;
+    }
+  }
+
+  .next {
+    > a,
+    > span {
+      float: right;
+    }
+  }
+
+  .previous {
+    > a,
+    > span {
+      float: left;
+    }
+  }
+
+  .disabled {
+    > a,
+    > a:hover,
+    > a:focus,
+    > span {
+      color: @pager-disabled-color;
+      background-color: @pagination-bg;
+      cursor: not-allowed;
+    }
+  }
+
+}
diff --git a/client/galaxy/style/less/bootstrap/pagination.less b/client/galaxy/style/less/bootstrap/pagination.less
new file mode 100644
index 0000000..5c68b8b
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/pagination.less
@@ -0,0 +1,85 @@
+//
+// Pagination (multiple pages)
+// --------------------------------------------------
+.pagination {
+  display: inline-block;
+  padding-left: 0;
+  margin: @line-height-computed 0;
+  border-radius: @border-radius-base;
+
+  > li {
+    display: inline; // Remove list-style and block-level defaults
+    > a,
+    > span {
+      position: relative;
+      float: left; // Collapse white-space
+      padding: @padding-base-vertical @padding-base-horizontal;
+      line-height: @line-height-base;
+      text-decoration: none;
+      background-color: @pagination-bg;
+      border: 1px solid @pagination-border;
+      margin-left: -1px;
+    }
+    &:first-child {
+      > a,
+      > span {
+        margin-left: 0;
+        .border-left-radius(@border-radius-base);
+      }
+    }
+    &:last-child {
+      > a,
+      > span {
+        .border-right-radius(@border-radius-base);
+      }
+    }
+  }
+
+  > li > a,
+  > li > span {
+    &:hover,
+    &:focus {
+      background-color: @pagination-hover-bg;
+    }
+  }
+
+  > .active > a,
+  > .active > span {
+    &,
+    &:hover,
+    &:focus {
+      z-index: 2;
+      color: @pagination-active-color;
+      background-color: @pagination-active-bg;
+      border-color: @pagination-active-bg;
+      cursor: default;
+    }
+  }
+
+  > .disabled {
+    > span,
+    > span:hover,
+    > span:focus,
+    > a,
+    > a:hover,
+    > a:focus {
+      color: @pagination-disabled-color;
+      background-color: @pagination-bg;
+      border-color: @pagination-border;
+      cursor: not-allowed;
+    }
+  }
+}
+
+// Sizing
+// --------------------------------------------------
+
+// Large
+.pagination-lg {
+  .pagination-size(@padding-large-vertical; @padding-large-horizontal; @font-size-large; @border-radius-large);
+}
+
+// Small
+.pagination-sm {
+  .pagination-size(@padding-small-vertical; @padding-small-horizontal; @font-size-small; @border-radius-small);
+}
diff --git a/client/galaxy/style/less/bootstrap/panels.less b/client/galaxy/style/less/bootstrap/panels.less
new file mode 100644
index 0000000..2343b25
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/panels.less
@@ -0,0 +1,148 @@
+//
+// Panels
+// --------------------------------------------------
+
+
+// Base class
+.panel {
+  margin-bottom: @line-height-computed;
+  background-color: @panel-bg;
+  border: 1px solid transparent;
+  border-radius: @panel-border-radius;
+  .box-shadow(0 1px 1px rgba(0,0,0,.05));
+}
+
+// Panel contents
+.panel-body {
+  padding: 15px;
+  .clearfix();
+}
+
+
+// List groups in panels
+//
+// By default, space out list group content from panel headings to account for
+// any kind of custom content between the two.
+
+.panel {
+  > .list-group {
+    margin-bottom: 0;
+
+    .list-group-item {
+      border-width: 1px 0;
+
+      // Remove border radius for top one
+      &:first-child {
+        .border-top-radius(0);
+      }
+      // But keep it for the last one
+      &:last-child {
+        border-bottom: 0;
+      }
+    }
+  }
+}
+// Collapse space between when there's no additional content.
+.panel-heading + .list-group {
+  .list-group-item:first-child {
+    border-top-width: 0;
+  }
+}
+
+
+// Tables in panels
+//
+// Place a non-bordered `.table` within a panel (not within a `.panel-body`) and
+// watch it go full width.
+
+.panel {
+  > .table {
+    margin-bottom: 0;
+  }
+  > .panel-body + .table {
+    border-top: 1px solid @table-border-color;
+  }
+}
+
+
+// Optional heading
+.panel-heading {
+  padding: 10px 15px;
+  border-bottom: 1px solid transparent;
+  .border-top-radius(@panel-border-radius - 1);
+}
+
+// Within heading, strip any `h*` tag of it's default margins for spacing.
+.panel-title {
+  margin-top: 0;
+  margin-bottom: 0;
+  font-size: ceil((@font-size-base * 1.125));
+  > a {
+    color: inherit;
+  }
+}
+
+// Optional footer (stays gray in every modifier class)
+.panel-footer {
+  padding: 10px 15px;
+  background-color: @panel-footer-bg;
+  border-top: 1px solid @panel-inner-border;
+  .border-bottom-radius(@panel-border-radius - 1);
+}
+
+
+// Collapsable panels (aka, accordion)
+//
+// Wrap a series of panels in `.panel-group` to turn them into an accordion with
+// the help of our collapse JavaScript plugin.
+
+.panel-group {
+  // Tighten up margin so it's only between panels
+  .panel {
+    margin-bottom: 0;
+    border-radius: @panel-border-radius;
+    overflow: hidden; // crop contents when collapsed
+    + .panel {
+      margin-top: 5px;
+    }
+  }
+
+  .panel-heading {
+    border-bottom: 0;
+    + .panel-collapse .panel-body {
+      border-top: 1px solid @panel-inner-border;
+    }
+  }
+  .panel-footer {
+    border-top: 0;
+    + .panel-collapse .panel-body {
+      border-bottom: 1px solid @panel-inner-border;
+    }
+  }
+
+  // New subcomponent for wrapping collapsable content for proper animations
+  .panel-collapse {
+
+  }
+}
+
+
+// Contextual variations
+.panel-default {
+  .panel-variant(@panel-default-border; @panel-default-text; @panel-default-heading-bg; @panel-default-border);
+}
+.panel-primary {
+  .panel-variant(@panel-primary-border; @panel-primary-text; @panel-primary-heading-bg; @panel-primary-border);
+}
+.panel-success {
+  .panel-variant(@panel-success-border; @panel-success-text; @panel-success-heading-bg; @panel-success-border);
+}
+.panel-warning {
+  .panel-variant(@panel-warning-border; @panel-warning-text; @panel-warning-heading-bg; @panel-warning-border);
+}
+.panel-danger {
+  .panel-variant(@panel-danger-border; @panel-danger-text; @panel-danger-heading-bg; @panel-danger-border);
+}
+.panel-info {
+  .panel-variant(@panel-info-border; @panel-info-text; @panel-info-heading-bg; @panel-info-border);
+}
diff --git a/client/galaxy/style/less/bootstrap/popovers.less b/client/galaxy/style/less/bootstrap/popovers.less
new file mode 100644
index 0000000..345bb1a
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/popovers.less
@@ -0,0 +1,133 @@
+//
+// Popovers
+// --------------------------------------------------
+
+
+.popover {
+  position: absolute;
+  top: 0;
+  left: 0;
+  z-index: @zindex-popover;
+  display: none;
+  max-width: @popover-max-width;
+  padding: 1px;
+  text-align: left; // Reset given new insertion method
+  background-color: @popover-bg;
+  background-clip: padding-box;
+  border: 1px solid @popover-fallback-border-color;
+  border: 1px solid @popover-border-color;
+  border-radius: @border-radius-large;
+  .box-shadow(0 5px 10px rgba(0,0,0,.2));
+
+  // Overrides for proper insertion
+  white-space: normal;
+
+  // Offset the popover to account for the popover arrow
+  &.top     { margin-top: -10px; }
+  &.right   { margin-left: 10px; }
+  &.bottom  { margin-top: 10px; }
+  &.left    { margin-left: -10px; }
+}
+
+.popover-title {
+  margin: 0; // reset heading margin
+  padding: 8px 14px;
+  font-size: @font-size-base;
+  font-weight: normal;
+  line-height: 18px;
+  background-color: @popover-title-bg;
+  border-bottom: 1px solid darken(@popover-title-bg, 5%);
+  border-radius: 5px 5px 0 0;
+}
+
+.popover-content {
+  padding: 9px 14px;
+}
+
+// Arrows
+//
+// .arrow is outer, .arrow:after is inner
+
+.popover .arrow {
+  &,
+  &:after {
+    position: absolute;
+    display: block;
+    width: 0;
+    height: 0;
+    border-color: transparent;
+    border-style: solid;
+  }
+}
+.popover .arrow {
+  border-width: @popover-arrow-outer-width;
+}
+.popover .arrow:after {
+  border-width: @popover-arrow-width;
+  content: "";
+}
+
+.popover {
+  &.top .arrow {
+    left: 50%;
+    margin-left: - at popover-arrow-outer-width;
+    border-bottom-width: 0;
+    border-top-color: @popover-arrow-outer-fallback-color; // IE8 fallback
+    border-top-color: @popover-arrow-outer-color;
+    bottom: - at popover-arrow-outer-width;
+    &:after {
+      content: " ";
+      bottom: 1px;
+      margin-left: - at popover-arrow-width;
+      border-bottom-width: 0;
+      border-top-color: @popover-arrow-color;
+    }
+  }
+  &.right .arrow {
+    top: 50%;
+    left: - at popover-arrow-outer-width;
+    margin-top: - at popover-arrow-outer-width;
+    border-left-width: 0;
+    border-right-color: @popover-arrow-outer-fallback-color; // IE8 fallback
+    border-right-color: @popover-arrow-outer-color;
+    &:after {
+      content: " ";
+      left: 1px;
+      bottom: - at popover-arrow-width;
+      border-left-width: 0;
+      border-right-color: @popover-arrow-color;
+    }
+  }
+  &.bottom .arrow {
+    left: 50%;
+    margin-left: - at popover-arrow-outer-width;
+    border-top-width: 0;
+    border-bottom-color: @popover-arrow-outer-fallback-color; // IE8 fallback
+    border-bottom-color: @popover-arrow-outer-color;
+    top: - at popover-arrow-outer-width;
+    &:after {
+      content: " ";
+      top: 1px;
+      margin-left: - at popover-arrow-width;
+      border-top-width: 0;
+      border-bottom-color: @popover-arrow-color;
+    }
+  }
+
+  &.left .arrow {
+    top: 50%;
+    right: - at popover-arrow-outer-width;
+    margin-top: - at popover-arrow-outer-width;
+    border-right-width: 0;
+    border-left-color: @popover-arrow-outer-fallback-color; // IE8 fallback
+    border-left-color: @popover-arrow-outer-color;
+    &:after {
+      content: " ";
+      right: 1px;
+      border-right-width: 0;
+      border-left-color: @popover-arrow-color;
+      bottom: - at popover-arrow-width;
+    }
+  }
+
+}
diff --git a/client/galaxy/style/less/bootstrap/print.less b/client/galaxy/style/less/bootstrap/print.less
new file mode 100644
index 0000000..1e4bffe
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/print.less
@@ -0,0 +1,100 @@
+//
+// Basic print styles
+// --------------------------------------------------
+// Source: https://github.com/h5bp/html5-boilerplate/blob/master/css/main.css
+
+ at media print {
+
+  * {
+    text-shadow: none !important;
+    color: #000 !important; // Black prints faster: h5bp.com/s
+    background: transparent !important;
+    box-shadow: none !important;
+  }
+
+  a,
+  a:visited {
+    text-decoration: underline;
+  }
+
+  a[href]:after {
+    content: " (" attr(href) ")";
+  }
+
+  abbr[title]:after {
+    content: " (" attr(title) ")";
+  }
+
+  // Don't show links for images, or javascript/internal links
+  .ir a:after,
+  a[href^="javascript:"]:after,
+  a[href^="#"]:after {
+    content: "";
+  }
+
+  pre,
+  blockquote {
+    border: 1px solid #999;
+    page-break-inside: avoid;
+  }
+
+  thead {
+    display: table-header-group; // h5bp.com/t
+  }
+
+  tr,
+  img {
+    page-break-inside: avoid;
+  }
+
+  img {
+    max-width: 100% !important;
+  }
+
+  @page {
+    margin: 2cm .5cm;
+  }
+
+  p,
+  h2,
+  h3 {
+    orphans: 3;
+    widows: 3;
+  }
+
+  h2,
+  h3 {
+    page-break-after: avoid;
+  }
+
+  // Bootstrap components
+  .navbar {
+    display: none;
+  }
+  .table {
+    td,
+    th {
+      background-color: #fff !important;
+    }
+  }
+  .btn,
+  .dropup > .btn {
+    > .caret {
+      border-top-color: #000 !important;
+    }
+  }
+  .label {
+    border: 1px solid #000;
+  }
+
+  .table {
+    border-collapse: collapse !important;
+  }
+  .table-bordered {
+    th,
+    td {
+      border: 1px solid #ddd !important;
+    }
+  }
+
+}
diff --git a/client/galaxy/style/less/bootstrap/progress-bars.less b/client/galaxy/style/less/bootstrap/progress-bars.less
new file mode 100644
index 0000000..49e5df8
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/progress-bars.less
@@ -0,0 +1,95 @@
+//
+// Progress bars
+// --------------------------------------------------
+
+
+// Bar animations
+// -------------------------
+
+// Webkit
+ at -webkit-keyframes progress-bar-stripes {
+  from  { background-position: 40px 0; }
+  to    { background-position: 0 0; }
+}
+
+// Firefox
+ at -moz-keyframes progress-bar-stripes {
+  from  { background-position: 40px 0; }
+  to    { background-position: 0 0; }
+}
+
+// Opera
+ at -o-keyframes progress-bar-stripes {
+  from  { background-position: 0 0; }
+  to    { background-position: 40px 0; }
+}
+
+// Spec and IE10+
+ at keyframes progress-bar-stripes {
+  from  { background-position: 40px 0; }
+  to    { background-position: 0 0; }
+}
+
+
+
+// Bar itself
+// -------------------------
+
+// Outer container
+.progress {
+  overflow: hidden;
+  height: @line-height-computed;
+  margin-bottom: @line-height-computed;
+  background-color: @progress-bg;
+  border-radius: @border-radius-base;
+  .box-shadow(inset 0 1px 2px rgba(0,0,0,.1));
+}
+
+// Bar of progress
+.progress-bar {
+  float: left;
+  width: 0%;
+  height: 100%;
+  font-size: @font-size-small;
+  color: @progress-bar-color;
+  text-align: center;
+  background-color: @progress-bar-bg;
+  .box-shadow(inset 0 -1px 0 rgba(0,0,0,.15));
+  .transition(width .6s ease);
+}
+
+// Striped bars
+.progress-striped .progress-bar {
+  #gradient > .striped(@progress-bar-bg);
+  background-size: 40px 40px;
+}
+
+// Call animation for the active one
+.progress.active .progress-bar {
+  -webkit-animation: progress-bar-stripes 2s linear infinite;
+     -moz-animation: progress-bar-stripes 2s linear infinite;
+      -ms-animation: progress-bar-stripes 2s linear infinite;
+       -o-animation: progress-bar-stripes 2s linear infinite;
+          animation: progress-bar-stripes 2s linear infinite;
+}
+
+
+
+// Variations
+// -------------------------
+
+.progress-bar-success {
+  .progress-bar-variant(@progress-bar-success-bg);
+}
+
+.progress-bar-info {
+  .progress-bar-variant(@progress-bar-info-bg);
+}
+
+.progress-bar-warning {
+  .progress-bar-variant(@progress-bar-warning-bg);
+}
+
+.progress-bar-danger {
+  .progress-bar-variant(@progress-bar-danger-bg);
+}
diff --git a/client/galaxy/style/less/bootstrap/responsive-utilities.less b/client/galaxy/style/less/bootstrap/responsive-utilities.less
new file mode 100644
index 0000000..02bb39b
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/responsive-utilities.less
@@ -0,0 +1,220 @@
+//
+// Responsive: Utility classes
+// --------------------------------------------------
+
+
+// IE10 Metro responsive
+// Required for Windows 8 Metro split-screen snapping with IE10
+//
+// Source: http://timkadlec.com/2012/10/ie10-snap-mode-and-responsive-design/
+ at -ms-viewport{
+  width: device-width;
+}
+
+// IE10 on Windows Phone 8
+// IE10 on WP8 doesn't report CSS pixels, but actual device pixels. In
+// other words, say on a Lumia, you'll get 768px as the device width,
+// meaning users will see the tablet styles and not phone styles.
+//
+// Alternatively you can override this with JS (see source below), but
+// we won't be doing that here given our limited scope.
+//
+// Source: http://timkadlec.com/2013/01/windows-phone-8-and-device-width/
+ at media screen and (max-width: 400px) {
+  @-ms-viewport{
+    width: 320px;
+  }
+}
+
+// Hide from screenreaders and browsers
+// Credit: HTML5 Boilerplate
+.hidden {
+  display: none !important;
+  visibility: hidden !important;
+}
+
+// Visibility utilities
+
+.visible-xs {
+  .responsive-invisibility();
+  @media (max-width: @screen-xs-max) {
+    .responsive-visibility();
+  }
+  &.visible-sm {
+    @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+      .responsive-visibility();
+    }
+  }
+  &.visible-md {
+    @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+      .responsive-visibility();
+    }
+  }
+  &.visible-lg {
+    @media (min-width: @screen-lg-min) {
+      .responsive-visibility();
+    }
+  }
+}
+.visible-sm {
+  .responsive-invisibility();
+  &.visible-xs {
+    @media (max-width: @screen-xs-max) {
+      .responsive-visibility();
+    }
+  }
+  @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+    .responsive-visibility();
+  }
+  &.visible-md {
+    @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+      .responsive-visibility();
+    }
+  }
+  &.visible-lg {
+    @media (min-width: @screen-lg-min) {
+      .responsive-visibility();
+    }
+  }
+}
+.visible-md {
+  .responsive-invisibility();
+  &.visible-xs {
+    @media (max-width: @screen-xs-max) {
+      .responsive-visibility();
+    }
+  }
+  &.visible-sm {
+    @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+      .responsive-visibility();
+    }
+  }
+  @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+    .responsive-visibility();
+  }
+  &.visible-lg {
+    @media (min-width: @screen-lg-min) {
+      .responsive-visibility();
+    }
+  }
+}
+.visible-lg {
+  .responsive-invisibility();
+  &.visible-xs {
+    @media (max-width: @screen-xs-max) {
+      .responsive-visibility();
+    }
+  }
+  &.visible-sm {
+    @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+      .responsive-visibility();
+    }
+  }
+  &.visible-md {
+    @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+      .responsive-visibility();
+    }
+  }
+  @media (min-width: @screen-lg-min) {
+    .responsive-visibility();
+  }
+}
+
+.hidden-xs {
+  .responsive-visibility();
+  @media (max-width: @screen-xs-max) {
+    .responsive-invisibility();
+  }
+  &.hidden-sm {
+    @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+      .responsive-invisibility();
+    }
+  }
+  &.hidden-md {
+    @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+      .responsive-invisibility();
+    }
+  }
+  &.hidden-lg {
+    @media (min-width: @screen-lg-min) {
+      .responsive-invisibility();
+    }
+  }
+}
+.hidden-sm {
+  .responsive-visibility();
+  &.hidden-xs {
+    @media (max-width: @screen-xs-max) {
+      .responsive-invisibility();
+    }
+  }
+  @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+    .responsive-invisibility();
+  }
+  &.hidden-md {
+    @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+      .responsive-invisibility();
+    }
+  }
+  &.hidden-lg {
+    @media (min-width: @screen-lg-min) {
+      .responsive-invisibility();
+    }
+  }
+}
+.hidden-md {
+  .responsive-visibility();
+  &.hidden-xs {
+    @media (max-width: @screen-xs-max) {
+      .responsive-invisibility();
+    }
+  }
+  &.hidden-sm {
+    @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+      .responsive-invisibility();
+    }
+  }
+  @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+    .responsive-invisibility();
+  }
+  &.hidden-lg {
+    @media (min-width: @screen-lg-min) {
+      .responsive-invisibility();
+    }
+  }
+}
+.hidden-lg {
+  .responsive-visibility();
+  &.hidden-xs {
+    @media (max-width: @screen-xs-max) {
+      .responsive-invisibility();
+    }
+  }
+  &.hidden-sm {
+    @media (min-width: @screen-sm-min) and (max-width: @screen-sm-max) {
+      .responsive-invisibility();
+    }
+  }
+  &.hidden-md {
+    @media (min-width: @screen-md-min) and (max-width: @screen-md-max) {
+      .responsive-invisibility();
+    }
+  }
+  @media (min-width: @screen-lg-min) {
+    .responsive-invisibility();
+  }
+}
+
+// Print utilities
+.visible-print {
+  .responsive-invisibility();
+}
+
+ at media print {
+  .visible-print {
+    .responsive-visibility();
+  }
+  .hidden-print {
+    .responsive-invisibility();
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/scaffolding.less b/client/galaxy/style/less/bootstrap/scaffolding.less
new file mode 100644
index 0000000..53e1be5
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/scaffolding.less
@@ -0,0 +1,130 @@
+//
+// Scaffolding
+// --------------------------------------------------
+
+
+// Reset the box-sizing
+
+*,
+*:before,
+*:after {
+  .box-sizing(border-box);
+}
+
+
+// Body reset
+
+html {
+  font-size: 62.5%;
+  -webkit-tap-highlight-color: rgba(0,0,0,0);
+}
+
+body {
+  font-family: @font-family-base;
+  font-size: @font-size-base;
+  line-height: @line-height-base;
+  color: @text-color;
+  background-color: @body-bg;
+}
+
+// Reset fonts for relevant elements
+input,
+button,
+select,
+textarea {
+  font-family: inherit;
+  font-size: inherit;
+  line-height: inherit;
+}
+
+// Reset unusual Firefox-on-Android default style.
+//
+// See https://github.com/necolas/normalize.css/issues/214
+
+button,
+input,
+select[multiple],
+textarea {
+  background-image: none;
+}
+
+
+// Links
+
+a {
+  color: @link-color;
+  text-decoration: none;
+
+  &:hover,
+  &:focus {
+    color: @link-hover-color;
+    text-decoration: underline;
+  }
+
+  &:focus {
+    .tab-focus();
+  }
+}
+
+
+// Images
+
+img {
+  vertical-align: middle;
+}
+
+// Responsive images (ensure images don't scale beyond their parents)
+.img-responsive {
+  .img-responsive();
+}
+
+// Rounded corners
+.img-rounded {
+  border-radius: @border-radius-large;
+}
+
+// Image thumbnails
+//
+// Heads up! This is mixin-ed into thumbnails.less for `.thumbnail`.
+.img-thumbnail {
+  padding: @thumbnail-padding;
+  line-height: @line-height-base;
+  background-color: @thumbnail-bg;
+  border: 1px solid @thumbnail-border;
+  border-radius: @thumbnail-border-radius;
+  .transition(all .2s ease-in-out);
+
+  // Keep them at most 100% wide
+  .img-responsive(inline-block);
+}
+
+// Perfect circle
+.img-circle {
+  border-radius: 50%; // set radius in percents
+}
+
+
+// Horizontal rules
+
+hr {
+  margin-top:    @line-height-computed;
+  margin-bottom: @line-height-computed;
+  border: 0;
+  border-top: 1px solid @hr-border;
+}
+
+
+// Only display content to screen readers
+//
+// See: http://a11yproject.com/posts/how-to-hide-content/
+
+.sr-only {
+  position: absolute;
+  width: 1px;
+  height: 1px;
+  margin: -1px;
+  padding: 0;
+  overflow: hidden;
+  clip: rect(0 0 0 0);
+  border: 0;
+}
diff --git a/client/galaxy/style/less/bootstrap/tables.less b/client/galaxy/style/less/bootstrap/tables.less
new file mode 100644
index 0000000..bd86db2
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/tables.less
@@ -0,0 +1,238 @@
+//
+// Tables
+// --------------------------------------------------
+
+
+table {
+  max-width: 100%;
+  background-color: @table-bg;
+}
+th {
+  text-align: left;
+}
+
+
+// Baseline styles
+
+.table {
+  width: 100%;
+  margin-bottom: @line-height-computed;
+  // Cells
+  thead,
+  tbody,
+  tfoot {
+    > tr {
+      > th,
+      > td {
+        padding: @table-cell-padding;
+        line-height: @line-height-base;
+        vertical-align: top;
+        border-top: 1px solid @table-border-color;
+      }
+    }
+  }
+  // Bottom align for column headings
+  thead > tr > th {
+    vertical-align: bottom;
+    border-bottom: 2px solid @table-border-color;
+  }
+  // Remove top border from thead by default
+  caption + thead,
+  colgroup + thead,
+  thead:first-child {
+    tr:first-child {
+      th, td {
+        border-top: 0;
+      }
+    }
+  }
+  // Account for multiple tbody instances
+  tbody + tbody {
+    border-top: 2px solid @table-border-color;
+  }
+
+  // Nesting
+  .table {
+    background-color: @body-bg;
+  }
+}
+
+
+// Condensed table w/ half padding
+
+.table-condensed {
+  thead,
+  tbody,
+  tfoot {
+    > tr {
+      > th,
+      > td {
+        padding: @table-condensed-cell-padding;
+      }
+    }
+  }
+}
+
+
+// Bordered version
+//
+// Add borders all around the table and between all the columns.
+
+.table-bordered {
+  border: 1px solid @table-border-color;
+  > thead,
+  > tbody,
+  > tfoot {
+    > tr {
+      > th,
+      > td {
+        border: 1px solid @table-border-color;
+      }
+    }
+  }
+  > thead {
+    > tr {
+      > th,
+      > td {
+        border-bottom-width: 2px;
+      }
+    }
+  }
+}
+
+
+// Zebra-striping
+//
+// Default zebra-stripe styles (alternating gray and transparent backgrounds)
+
+.table-striped {
+  > tbody {
+    > tr:nth-child(odd) {
+      > td,
+      > th {
+        background-color: @table-bg-accent;
+      }
+    }
+  }
+}
+
+
+// Hover effect
+//
+// Placed here since it has to come after the potential zebra striping
+
+.table-hover {
+  > tbody {
+    > tr:hover {
+      > td,
+      > th {
+        background-color: @table-bg-hover;
+      }
+    }
+  }
+}
+
+
+// Table cell sizing
+//
+// Reset default table behavior
+
+table col[class*="col-"] {
+  float: none;
+  display: table-column;
+}
+table {
+  td,
+  th {
+    &[class*="col-"] {
+      float: none;
+      display: table-cell;
+    }
+  }
+}
+
+
+// Table backgrounds
+//
+// Exact selectors below required to override `.table-striped` and prevent
+// inheritance to nested tables.
+
+.table > thead > tr,
+.table > tbody > tr,
+.table > tfoot > tr {
+  > td.active,
+  > th.active,
+  &.active > td,
+  &.active > th  {
+    background-color: @table-bg-active;
+  }
+}
+
+// Generate the contextual variants
+.table-row-variant(success; @state-success-bg; @state-success-border);
+.table-row-variant(danger; @state-danger-bg; @state-danger-border);
+.table-row-variant(warning; @state-warning-bg; @state-warning-border);
+
+
+// Responsive tables
+//
+// Wrap your tables in `.table-responsive` and we'll make them mobile friendly
+// by enabling horizontal scrolling. Only applies <768px. Everything above that
+// will display normally.
+
+ at media (max-width: @screen-sm-min) {
+  .table-responsive {
+    width: 100%;
+    margin-bottom: 15px;
+    overflow-y: hidden;
+    overflow-x: scroll;
+    -ms-overflow-style: -ms-autohiding-scrollbar;
+    border: 1px solid @table-border-color;
+    -webkit-overflow-scrolling: touch;
+
+    // Tighten up spacing and give a background color
+    > .table {
+      margin-bottom: 0;
+      background-color: #fff;
+
+      // Ensure the content doesn't wrap
+      > thead,
+      > tbody,
+      > tfoot {
+        > tr {
+          > th,
+          > td {
+            white-space: nowrap;
+          }
+        }
+      }
+    }
+
+    // Special overrides for the bordered tables
+    > .table-bordered {
+      border: 0;
+
+      // Nuke the appropriate borders so that the parent can handle them
+      > thead,
+      > tbody,
+      > tfoot {
+        > tr {
+          > th:first-child,
+          > td:first-child {
+            border-left: 0;
+          }
+          > th:last-child,
+          > td:last-child {
+            border-right: 0;
+          }
+        }
+        > tr:last-child {
+          > th,
+          > td {
+            border-bottom: 0;
+          }
+        }
+      }
+    }
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/theme.less b/client/galaxy/style/less/bootstrap/theme.less
new file mode 100644
index 0000000..32c8066
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/theme.less
@@ -0,0 +1,241 @@
+
+//
+// Load core variables and mixins
+// --------------------------------------------------
+
+ at import "variables.less";
+ at import "mixins.less";
+
+
+
+//
+// Buttons
+// --------------------------------------------------
+
+// Common styles
+.btn-default,
+.btn-primary,
+.btn-success,
+.btn-info,
+.btn-warning,
+.btn-danger {
+  text-shadow: 0 -1px 0 rgba(0,0,0,.2);
+  @shadow: inset 0 1px 0 rgba(255,255,255,.15), 0 1px 1px rgba(0,0,0,.075);
+  .box-shadow(@shadow);
+
+  // Reset the shadow
+  &:active,
+  &.active {
+    .box-shadow(inset 0 3px 5px rgba(0,0,0,.125));
+  }
+}
+
+// Mixin for generating new styles
+.btn-styles(@btn-color: #555;) {
+  #gradient > .vertical(@start-color: @btn-color; @end-color: darken(@btn-color, 12%));
+  background-repeat: repeat-x;
+  border-color: darken(@btn-color, 14%);
+
+  &:hover,
+  &:focus  {
+    background-color: darken(@btn-color, 12%);
+    background-position: 0 -15px;
+  }
+
+  &:active,
+  &.active {
+    background-color: darken(@btn-color, 12%);
+    border-color: darken(@btn-color, 14%);
+  }
+}
+
+// Common styles
+.btn {
+  // Remove the gradient for the pressed/active state
+  &:active,
+  &.active {
+    background-image: none;
+  }
+}
+
+// Apply the mixin to the buttons
+.btn-default { .btn-styles(@btn-default-bg;); text-shadow: 0 1px 0 #fff; border-color: #ccc; }
+.btn-primary { .btn-styles(@btn-primary-bg); }
+.btn-success { .btn-styles(@btn-success-bg); }
+.btn-warning { .btn-styles(@btn-warning-bg); }
+.btn-danger  { .btn-styles(@btn-danger-bg); }
+.btn-info    { .btn-styles(@btn-info-bg); }
+
+
+
+//
+// Images
+// --------------------------------------------------
+
+.thumbnail,
+.img-thumbnail {
+  .box-shadow(0 1px 2px rgba(0,0,0,.075));
+}
+
+
+
+//
+// Dropdowns
+// --------------------------------------------------
+
+.dropdown-menu > li > a:hover,
+.dropdown-menu > li > a:focus,
+.dropdown-menu > .active > a,
+.dropdown-menu > .active > a:hover,
+.dropdown-menu > .active > a:focus {
+  #gradient > .vertical(@start-color: @dropdown-link-hover-bg; @end-color: darken(@dropdown-link-hover-bg, 5%));
+  background-color: darken(@dropdown-link-hover-bg, 5%);
+}
+
+
+
+//
+// Navbar
+// --------------------------------------------------
+
+// Default navbar
+.navbar-default {
+  #gradient > .vertical(@start-color: lighten(@navbar-default-bg, 10%); @end-color: @navbar-default-bg;);
+  border-radius: @navbar-border-radius;
+  @shadow: inset 0 1px 0 rgba(255,255,255,.15), 0 1px 5px rgba(0,0,0,.075);
+  .box-shadow(@shadow);
+
+  .navbar-nav > .active > a {
+    #gradient > .vertical(@start-color: darken(@navbar-default-bg, 5%); @end-color: darken(@navbar-default-bg, 2%););
+    .box-shadow(inset 0 3px 9px rgba(0,0,0,.075));
+  }
+}
+.navbar-brand,
+.navbar-nav > li > a {
+  text-shadow: 0 1px 0 rgba(255,255,255,.25);
+}
+
+// Inverted navbar
+.navbar-inverse {
+  #gradient > .vertical(@start-color: lighten(@navbar-inverse-bg, 10%); @end-color: @navbar-inverse-bg;);
+
+  .navbar-nav > .active > a {
+    #gradient > .vertical(@start-color: @navbar-inverse-bg; @end-color: lighten(@navbar-inverse-bg, 2.5%););
+    .box-shadow(inset 0 3px 9px rgba(0,0,0,.25));
+  }
+
+  .navbar-brand,
+  .navbar-nav > li > a {
+    text-shadow: 0 -1px 0 rgba(0,0,0,.25);
+  }
+}
+
+// Undo rounded corners in static and fixed navbars
+.navbar-static-top,
+.navbar-fixed-top,
+.navbar-fixed-bottom {
+  border-radius: 0;
+}
+
+
+
+//
+// Alerts
+// --------------------------------------------------
+
+// Common styles
+.alert {
+  text-shadow: 0 1px 0 rgba(255,255,255,.2);
+  @shadow: inset 0 1px 0 rgba(255,255,255,.25), 0 1px 2px rgba(0,0,0,.05);
+  .box-shadow(@shadow);
+}
+
+// Mixin for generating new styles
+.alert-styles(@color) {
+  #gradient > .vertical(@start-color: @color; @end-color: darken(@color, 7.5%));
+  border-color: darken(@color, 15%);
+}
+
+// Apply the mixin to the alerts
+.alert-success    { .alert-styles(@alert-success-bg); }
+.alert-info       { .alert-styles(@alert-info-bg); }
+.alert-warning    { .alert-styles(@alert-warning-bg); }
+.alert-danger     { .alert-styles(@alert-danger-bg); }
+
+
+
+//
+// Progress bars
+// --------------------------------------------------
+
+// Give the progress background some depth
+.progress {
+  #gradient > .vertical(@start-color: darken(@progress-bg, 4%); @end-color: @progress-bg;)
+}
+
+// Mixin for generating new styles
+.progress-bar-styles(@color) {
+  #gradient > .vertical(@start-color: @color; @end-color: darken(@color, 10%));
+}
+
+// Apply the mixin to the progress bars
+.progress-bar            { .progress-bar-styles(@progress-bar-bg); }
+.progress-bar-success    { .progress-bar-styles(@progress-bar-success-bg); }
+.progress-bar-info       { .progress-bar-styles(@progress-bar-info-bg); }
+.progress-bar-warning    { .progress-bar-styles(@progress-bar-warning-bg); }
+.progress-bar-danger     { .progress-bar-styles(@progress-bar-danger-bg); }
+
+
+
+//
+// List groups
+// --------------------------------------------------
+
+.list-group {
+  border-radius: @border-radius-base;
+  .box-shadow(0 1px 2px rgba(0,0,0,.075));
+}
+.list-group-item.active,
+.list-group-item.active:hover,
+.list-group-item.active:focus {
+  text-shadow: 0 -1px 0 darken(@list-group-active-bg, 10%);
+  #gradient > .vertical(@start-color: @list-group-active-bg; @end-color: darken(@list-group-active-bg, 7.5%));
+  border-color: darken(@list-group-active-border, 7.5%);
+}
+
+
+
+//
+// Panels
+// --------------------------------------------------
+
+// Common styles
+.panel {
+  .box-shadow(0 1px 2px rgba(0,0,0,.05));
+}
+
+// Mixin for generating new styles
+.panel-heading-styles(@color) {
+  #gradient > .vertical(@start-color: @color; @end-color: darken(@color, 5%));
+}
+
+// Apply the mixin to the panel headings only
+.panel-default > .panel-heading   { .panel-heading-styles(@panel-default-heading-bg); }
+.panel-primary > .panel-heading   { .panel-heading-styles(@panel-primary-heading-bg); }
+.panel-success > .panel-heading   { .panel-heading-styles(@panel-success-heading-bg); }
+.panel-info > .panel-heading      { .panel-heading-styles(@panel-info-heading-bg); }
+.panel-warning > .panel-heading   { .panel-heading-styles(@panel-warning-heading-bg); }
+.panel-danger > .panel-heading    { .panel-heading-styles(@panel-danger-heading-bg); }
+
+
+
+//
+// Wells
+// --------------------------------------------------
+
+.well {
+  #gradient > .vertical(@start-color: darken(@well-bg, 5%); @end-color: @well-bg;);
+  border-color: darken(@well-bg, 10%);
+  @shadow: inset 0 1px 3px rgba(0,0,0,.05), 0 1px 0 rgba(255,255,255,.1);
+  .box-shadow(@shadow);
+}
diff --git a/client/galaxy/style/less/bootstrap/thumbnails.less b/client/galaxy/style/less/bootstrap/thumbnails.less
new file mode 100644
index 0000000..a210cac
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/thumbnails.less
@@ -0,0 +1,32 @@
+//
+// Thumbnails
+// --------------------------------------------------
+
+
+// Mixin and adjust the regular image class
+.thumbnail {
+  .img-thumbnail();
+  display: block; // Override the inline-block from `.img-thumbnail`
+  margin-bottom: @line-height-computed;
+
+  > img {
+    .img-responsive();
+  }
+}
+
+
+// Add a hover state for linked versions only
+a.thumbnail:hover,
+a.thumbnail:focus {
+  border-color: @link-color;
+}
+
+// Images and captions
+.thumbnail > img {
+  margin-left: auto;
+  margin-right: auto;
+}
+.thumbnail .caption {
+  padding: @thumbnail-caption-padding;
+  color: @thumbnail-caption-color;
+}
diff --git a/client/galaxy/style/less/bootstrap/tooltip.less b/client/galaxy/style/less/bootstrap/tooltip.less
new file mode 100644
index 0000000..5a95b84
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/tooltip.less
@@ -0,0 +1,95 @@
+//
+// Tooltips
+// --------------------------------------------------
+
+
+// Base class
+.tooltip {
+  position: absolute;
+  z-index: @zindex-tooltip;
+  display: block;
+  visibility: visible;
+  font-size: @font-size-small;
+  line-height: 1.4;
+  .opacity(0);
+
+  &.in     { .opacity(.9); }
+  &.top    { margin-top:  -3px; padding: 5px 0; }
+  &.right  { margin-left:  3px; padding: 0 5px; }
+  &.bottom { margin-top:   3px; padding: 5px 0; }
+  &.left   { margin-left: -3px; padding: 0 5px; }
+}
+
+// Wrapper for the tooltip content
+.tooltip-inner {
+  max-width: @tooltip-max-width;
+  padding: 3px 8px;
+  color: @tooltip-color;
+  text-align: center;
+  text-decoration: none;
+  background-color: @tooltip-bg;
+  border-radius: @border-radius-base;
+}
+
+// Arrows
+.tooltip-arrow {
+  position: absolute;
+  width: 0;
+  height: 0;
+  border-color: transparent;
+  border-style: solid;
+}
+.tooltip {
+  &.top .tooltip-arrow {
+    bottom: 0;
+    left: 50%;
+    margin-left: - at tooltip-arrow-width;
+    border-width: @tooltip-arrow-width @tooltip-arrow-width 0;
+    border-top-color: @tooltip-arrow-color;
+  }
+  &.top-left .tooltip-arrow {
+    bottom: 0;
+    left: 5px;
+    border-width: @tooltip-arrow-width @tooltip-arrow-width 0;
+    border-top-color: @tooltip-arrow-color;
+  }
+  &.top-right .tooltip-arrow {
+    bottom: 0;
+    right: 5px;
+    border-width: @tooltip-arrow-width @tooltip-arrow-width 0;
+    border-top-color: @tooltip-arrow-color;
+  }
+  &.right .tooltip-arrow {
+    top: 50%;
+    left: 0;
+    margin-top: - at tooltip-arrow-width;
+    border-width: @tooltip-arrow-width @tooltip-arrow-width @tooltip-arrow-width 0;
+    border-right-color: @tooltip-arrow-color;
+  }
+  &.left .tooltip-arrow {
+    top: 50%;
+    right: 0;
+    margin-top: - at tooltip-arrow-width;
+    border-width: @tooltip-arrow-width 0 @tooltip-arrow-width @tooltip-arrow-width;
+    border-left-color: @tooltip-arrow-color;
+  }
+  &.bottom .tooltip-arrow {
+    top: 0;
+    left: 50%;
+    margin-left: - at tooltip-arrow-width;
+    border-width: 0 @tooltip-arrow-width @tooltip-arrow-width;
+    border-bottom-color: @tooltip-arrow-color;
+  }
+  &.bottom-left .tooltip-arrow {
+    top: 0;
+    left: 5px;
+    border-width: 0 @tooltip-arrow-width @tooltip-arrow-width;
+    border-bottom-color: @tooltip-arrow-color;
+  }
+  &.bottom-right .tooltip-arrow {
+    top: 0;
+    right: 5px;
+    border-width: 0 @tooltip-arrow-width @tooltip-arrow-width;
+    border-bottom-color: @tooltip-arrow-color;
+  }
+}
diff --git a/client/galaxy/style/less/bootstrap/type.less b/client/galaxy/style/less/bootstrap/type.less
new file mode 100644
index 0000000..b0423e9
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/type.less
@@ -0,0 +1,242 @@
+//
+// Typography
+// --------------------------------------------------
+
+
+// Body text
+// -------------------------
+
+p {
+  margin: 0 0 (@line-height-computed / 2);
+}
+.lead {
+  margin-bottom: @line-height-computed;
+  font-size: (@font-size-base * 1.15);
+  font-weight: 200;
+  line-height: 1.4;
+
+  @media (min-width: 768px) {
+    font-size: (@font-size-base * 1.5);
+  }
+}
+
+
+// Emphasis & misc
+// -------------------------
+
+// Ex: 14px base font * 85% = about 12px
+small   { font-size: 85%; }
+
+// Undo browser default styling
+cite    { font-style: normal; }
+
+// Contextual emphasis
+.text-muted          { color: @text-muted; }
+.text-primary        { color: @brand-primary; }
+.text-warning        { color: @state-warning-text; }
+.text-danger         { color: @state-danger-text; }
+.text-success        { color: @state-success-text; }
+.text-info           { color: @state-info-text; }
+
+// Alignment
+.text-left           { text-align: left; }
+.text-right          { text-align: right; }
+.text-center         { text-align: center; }
+
+
+// Headings
+// -------------------------
+
+h1, h2, h3, h4, h5, h6,
+.h1, .h2, .h3, .h4, .h5, .h6 {
+  font-family: @headings-font-family;
+  font-weight: @headings-font-weight;
+  line-height: @headings-line-height;
+
+  small {
+    font-weight: normal;
+    line-height: 1;
+    color: @headings-small-color;
+  }
+}
+
+h1,
+h2,
+h3 {
+  margin-top: @line-height-computed;
+  margin-bottom: (@line-height-computed / 2);
+
+  small {
+    font-size: 65%;
+  }
+}
+h4,
+h5,
+h6 {
+  margin-top: (@line-height-computed / 2);
+  margin-bottom: (@line-height-computed / 2);
+
+  small {
+    font-size: 75%;
+  }
+}
+
+h1, .h1 { font-size: @font-size-h1; }
+h2, .h2 { font-size: @font-size-h2; }
+h3, .h3 { font-size: @font-size-h3; }
+h4, .h4 { font-size: @font-size-h4; }
+h5, .h5 { font-size: @font-size-h5; }
+h6, .h6 { font-size: @font-size-h6; }
+
+
+// Page header
+// -------------------------
+
+.page-header {
+  padding-bottom: ((@line-height-computed / 2) - 1);
+  margin: (@line-height-computed * 2) 0 @line-height-computed;
+  border-bottom: 1px solid @page-header-border-color;
+}
+
+
+
+// Lists
+// --------------------------------------------------
+
+// Unordered and Ordered lists
+ul,
+ol {
+  margin-top: 0;
+  margin-bottom: (@line-height-computed / 2);
+  ul,
+  ol{
+    margin-bottom: 0;
+  }
+}
+
+// List options
+
+// Unstyled keeps list items block level, just removes default browser padding and list-style
+.list-unstyled {
+  padding-left: 0;
+  list-style: none;
+}
+// Inline turns list items into inline-block
+.list-inline {
+  .list-unstyled();
+  > li {
+    display: inline-block;
+    padding-left: 5px;
+    padding-right: 5px;
+  }
+}
+
+// Description Lists
+dl {
+  margin-bottom: @line-height-computed;
+}
+dt,
+dd {
+  line-height: @line-height-base;
+}
+dt {
+  font-weight: bold;
+}
+dd {
+  margin-left: 0; // Undo browser default
+}
+
+// Horizontal description lists
+//
+// Defaults to being stacked without any of the below styles applied, until the
+// grid breakpoint is reached (default of ~768px).
+
+ at media (min-width: @grid-float-breakpoint) {
+  .dl-horizontal {
+    dt {
+      float: left;
+      width: (@component-offset-horizontal - 20);
+      clear: left;
+      text-align: right;
+      .text-overflow();
+    }
+    dd {
+      margin-left: @component-offset-horizontal;
+      .clearfix(); // Clear the floated `dt` if an empty `dd` is present
+    }
+  }
+}
+
+// MISC
+// ----
+
+// Abbreviations and acronyms
+abbr[title],
+// Added data-* attribute to help out our tooltip plugin, per https://github.com/twbs/bootstrap/issues/5257
+abbr[data-original-title] {
+  cursor: help;
+  border-bottom: 1px dotted @abbr-border-color;
+}
+abbr.initialism {
+  font-size: 90%;
+  text-transform: uppercase;
+}
+
+// Blockquotes
+blockquote {
+  padding: (@line-height-computed / 2) @line-height-computed;
+  margin: 0 0 @line-height-computed;
+  border-left: 5px solid @blockquote-border-color;
+  p {
+    font-size: (@font-size-base * 1.25);
+    font-weight: 300;
+    line-height: 1.25;
+  }
+  p:last-child {
+    margin-bottom: 0;
+  }
+  small {
+    display: block;
+    line-height: @line-height-base;
+    color: @blockquote-small-color;
+    &:before {
+      content: '\2014 \00A0';// EM DASH, NBSP
+    }
+  }
+
+  // Float right with text-align: right
+  &.pull-right {
+    padding-right: 15px;
+    padding-left: 0;
+    border-right: 5px solid @blockquote-border-color;
+    border-left: 0;
+    p,
+    small {
+      text-align: right;
+    }
+    small {
+      &:before {
+        content: '';
+      }
+      &:after {
+        content: '\00A0 \2014';// NBSP, EM DASH
+      }
+    }
+  }
+}
+
+// Quotes
+q:before,
+q:after,
+blockquote:before,
+blockquote:after {
+  content: "";
+}
+
+// Addresses
+address {
+  display: block;
+  margin-bottom: @line-height-computed;
+  font-style: normal;
+  line-height: @line-height-base;
+}
diff --git a/client/galaxy/style/less/bootstrap/utilities.less b/client/galaxy/style/less/bootstrap/utilities.less
new file mode 100644
index 0000000..a2807cc
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/utilities.less
@@ -0,0 +1,42 @@
+//
+// Utility classes
+// --------------------------------------------------
+
+
+// Floats
+// -------------------------
+
+.clearfix {
+  .clearfix();
+}
+.pull-right {
+  float: right !important;
+}
+.pull-left {
+  float: left !important;
+}
+
+
+// Toggling content
+// -------------------------
+
+.hide {
+  display: none !important;
+}
+.show {
+  display: block !important;
+}
+.invisible {
+  visibility: hidden;
+}
+.text-hide {
+  .text-hide();
+}
+
+
+// For Affix plugin
+// -------------------------
+
+.affix {
+  position: fixed;
+}
diff --git a/client/galaxy/style/less/bootstrap/variables.less b/client/galaxy/style/less/bootstrap/variables.less
new file mode 100644
index 0000000..caa7630
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/variables.less
@@ -0,0 +1,628 @@
+//
+// Variables
+// --------------------------------------------------
+
+
+// Global values
+// --------------------------------------------------
+
+// Grays
+// -------------------------
+
+ at gray-darker:            lighten(#000, 13.5%); // #222
+ at gray-dark:              lighten(#000, 20%);   // #333
+ at gray:                   lighten(#000, 33.5%); // #555
+ at gray-light:             lighten(#000, 60%);   // #999
+ at gray-lighter:           lighten(#000, 93.5%); // #eee
+
+// Brand colors
+// -------------------------
+
+ at brand-primary:         #428bca;
+ at brand-success:         #5cb85c;
+ at brand-warning:         #f0ad4e;
+ at brand-danger:          #d9534f;
+ at brand-info:            #5bc0de;
+
+// Scaffolding
+// -------------------------
+
+ at body-bg:               #fff;
+ at text-color:            @gray-dark;
+
+// Links
+// -------------------------
+
+ at link-color:            @brand-primary;
+ at link-hover-color:      darken(@link-color, 15%);
+
+// Typography
+// -------------------------
+
+ at font-family-sans-serif:  "Helvetica Neue", Helvetica, Arial, sans-serif;
+ at font-family-serif:       Georgia, "Times New Roman", Times, serif;
+ at font-family-monospace:   Monaco, Menlo, Consolas, "Courier New", monospace;
+ at font-family-base:        @font-family-sans-serif;
+
+ at font-size-base:          14px;
+ at font-size-large:         ceil(@font-size-base * 1.25); // ~18px
+ at font-size-small:         ceil(@font-size-base * 0.85); // ~12px
+
+ at font-size-h1:            floor(@font-size-base * 2.60); // ~36px
+ at font-size-h2:            floor(@font-size-base * 2.15); // ~30px
+ at font-size-h3:            ceil(@font-size-base * 1.70); // ~24px
+ at font-size-h4:            ceil(@font-size-base * 1.25); // ~18px
+ at font-size-h5:            @font-size-base;
+ at font-size-h6:            ceil(@font-size-base * 0.85); // ~12px
+
+ at line-height-base:        1.428571429; // 20/14
+ at line-height-computed:    floor(@font-size-base * @line-height-base); // ~20px
+
+ at headings-font-family:    @font-family-base;
+ at headings-font-weight:    500;
+ at headings-line-height:    1.1;
+
+// Iconography
+// -------------------------
+
+ at icon-font-path:          "../fonts/";
+ at icon-font-name:          "glyphicons-halflings-regular";
+
+
+// Components
+// -------------------------
+// Based on 14px font-size and 1.428 line-height (~20px to start)
+
+ at padding-base-vertical:          6px;
+ at padding-base-horizontal:        12px;
+
+ at padding-large-vertical:         10px;
+ at padding-large-horizontal:       16px;
+
+ at padding-small-vertical:         5px;
+ at padding-small-horizontal:       10px;
+
+ at line-height-large:              1.33;
+ at line-height-small:              1.5;
+
+ at border-radius-base:             4px;
+ at border-radius-large:            6px;
+ at border-radius-small:            3px;
+
+ at component-active-bg:            @brand-primary;
+
+ at caret-width-base:               4px;
+ at caret-width-large:              5px;
+
+// Tables
+// -------------------------
+
+ at table-cell-padding:                 8px;
+ at table-condensed-cell-padding:       5px;
+
+ at table-bg:                           transparent; // overall background-color
+ at table-bg-accent:                    #f9f9f9; // for striping
+ at table-bg-hover:                     #f5f5f5;
+ at table-bg-active:                    @table-bg-hover;
+
+ at table-border-color:                 #ddd; // table and cell border
+
+
+// Buttons
+// -------------------------
+
+ at btn-font-weight:                normal;
+
+ at btn-default-color:              #333;
+ at btn-default-bg:                 #fff;
+ at btn-default-border:             #ccc;
+
+ at btn-primary-color:              #fff;
+ at btn-primary-bg:                 @brand-primary;
+ at btn-primary-border:             darken(@btn-primary-bg, 5%);
+
+ at btn-success-color:              #fff;
+ at btn-success-bg:                 @brand-success;
+ at btn-success-border:             darken(@btn-success-bg, 5%);
+
+ at btn-warning-color:              #fff;
+ at btn-warning-bg:                 @brand-warning;
+ at btn-warning-border:             darken(@btn-warning-bg, 5%);
+
+ at btn-danger-color:               #fff;
+ at btn-danger-bg:                  @brand-danger;
+ at btn-danger-border:              darken(@btn-danger-bg, 5%);
+
+ at btn-info-color:                 #fff;
+ at btn-info-bg:                    @brand-info;
+ at btn-info-border:                darken(@btn-info-bg, 5%);
+
+ at btn-link-disabled-color:        @gray-light;
+
+
+// Forms
+// -------------------------
+
+ at input-bg:                       #fff;
+ at input-bg-disabled:              @gray-lighter;
+
+ at input-color:                    @gray;
+ at input-border:                   #ccc;
+ at input-border-radius:            @border-radius-base;
+ at input-border-focus:             #66afe9;
+
+ at input-color-placeholder:        @gray-light;
+
+ at input-height-base:              (@line-height-computed + (@padding-base-vertical * 2) + 2);
+ at input-height-large:             (floor(@font-size-large * @line-height-large) + (@padding-large-vertical * 2) + 2);
+ at input-height-small:             (floor(@font-size-small * @line-height-small) + (@padding-small-vertical * 2) + 2);
+
+ at legend-color:                   @gray-dark;
+ at legend-border-color:            #e5e5e5;
+
+ at input-group-addon-bg:           @gray-lighter;
+ at input-group-addon-border-color: @input-border;
+
+
+// Dropdowns
+// -------------------------
+
+ at dropdown-bg:                    #fff;
+ at dropdown-border:                rgba(0,0,0,.15);
+ at dropdown-fallback-border:       #ccc;
+ at dropdown-divider-bg:            #e5e5e5;
+
+ at dropdown-link-active-color:     #fff;
+ at dropdown-link-active-bg:        @component-active-bg;
+
+ at dropdown-link-color:            @gray-dark;
+ at dropdown-link-hover-color:      #fff;
+ at dropdown-link-hover-bg:         @dropdown-link-active-bg;
+
+ at dropdown-link-disabled-color:   @gray-light;
+
+ at dropdown-header-color:          @gray-light;
+
+ at dropdown-caret-color:           #000;
+
+
+// COMPONENT VARIABLES
+// --------------------------------------------------
+
+
+// Z-index master list
+// -------------------------
+// Used for a bird's eye view of components dependent on the z-axis
+// Try to avoid customizing these :)
+
+ at zindex-navbar:            1000;
+ at zindex-dropdown:          1000;
+ at zindex-popover:           1010;
+ at zindex-tooltip:           1030;
+ at zindex-navbar-fixed:      1030;
+ at zindex-modal-background:  1040;
+ at zindex-modal:             1050;
+
+// Media queries breakpoints
+// --------------------------------------------------
+
+// Extra small screen / phone
+// Note: Deprecated @screen-xs and @screen-phone as of v3.0.1
+ at screen-xs:                  480px;
+ at screen-xs-min:              @screen-xs;
+ at screen-phone:               @screen-xs-min;
+
+// Small screen / tablet
+// Note: Deprecated @screen-sm and @screen-tablet as of v3.0.1
+ at screen-sm:                  768px;
+ at screen-sm-min:              @screen-sm;
+ at screen-tablet:              @screen-sm-min;
+
+// Medium screen / desktop
+// Note: Deprecated @screen-md and @screen-desktop as of v3.0.1
+ at screen-md:                  992px;
+ at screen-md-min:              @screen-md;
+ at screen-desktop:             @screen-md-min;
+
+// Large screen / wide desktop
+// Note: Deprecated @screen-lg and @screen-lg-desktop as of v3.0.1
+ at screen-lg:                  1200px;
+ at screen-lg-min:              @screen-lg;
+ at screen-lg-desktop:          @screen-lg-min;
+
+// So media queries don't overlap when required, provide a maximum
+ at screen-xs-max:              (@screen-sm-min - 1);
+ at screen-sm-max:              (@screen-md-min - 1);
+ at screen-md-max:              (@screen-lg-min - 1);
+
+
+// Grid system
+// --------------------------------------------------
+
+// Number of columns in the grid system
+ at grid-columns:              12;
+// Padding, to be divided by two and applied to the left and right of all columns
+ at grid-gutter-width:         30px;
+// Point at which the navbar stops collapsing
+ at grid-float-breakpoint:     @screen-sm;
+
+
+// Navbar
+// -------------------------
+
+// Basics of a navbar
+ at navbar-height:                    50px;
+ at navbar-margin-bottom:             @line-height-computed;
+ at navbar-default-color:             #777;
+ at navbar-default-bg:                #f8f8f8;
+ at navbar-default-border:            darken(@navbar-default-bg, 6.5%);
+ at navbar-border-radius:             @border-radius-base;
+ at navbar-padding-horizontal:        floor(@grid-gutter-width / 2);
+ at navbar-padding-vertical:          ((@navbar-height - @line-height-computed) / 2);
+
+// Navbar links
+ at navbar-default-link-color:                #777;
+ at navbar-default-link-hover-color:          #333;
+ at navbar-default-link-hover-bg:             transparent;
+ at navbar-default-link-active-color:         #555;
+ at navbar-default-link-active-bg:            darken(@navbar-default-bg, 6.5%);
+ at navbar-default-link-disabled-color:       #ccc;
+ at navbar-default-link-disabled-bg:          transparent;
+
+// Navbar brand label
+ at navbar-default-brand-color:               @navbar-default-link-color;
+ at navbar-default-brand-hover-color:         darken(@navbar-default-link-color, 10%);
+ at navbar-default-brand-hover-bg:            transparent;
+
+// Navbar toggle
+ at navbar-default-toggle-hover-bg:           #ddd;
+ at navbar-default-toggle-icon-bar-bg:        #ccc;
+ at navbar-default-toggle-border-color:       #ddd;
+
+
+// Inverted navbar
+//
+// Reset inverted navbar basics
+ at navbar-inverse-color:                      @gray-light;
+ at navbar-inverse-bg:                         #222;
+ at navbar-inverse-border:                     darken(@navbar-inverse-bg, 10%);
+
+// Inverted navbar links
+ at navbar-inverse-link-color:                 @gray-light;
+ at navbar-inverse-link-hover-color:           #fff;
+ at navbar-inverse-link-hover-bg:              transparent;
+ at navbar-inverse-link-active-color:          @navbar-inverse-link-hover-color;
+ at navbar-inverse-link-active-bg:             darken(@navbar-inverse-bg, 10%);
+ at navbar-inverse-link-disabled-color:        #444;
+ at navbar-inverse-link-disabled-bg:           transparent;
+
+// Inverted navbar brand label
+ at navbar-inverse-brand-color:                @navbar-inverse-link-color;
+ at navbar-inverse-brand-hover-color:          #fff;
+ at navbar-inverse-brand-hover-bg:             transparent;
+
+// Inverted navbar toggle
+ at navbar-inverse-toggle-hover-bg:            #333;
+ at navbar-inverse-toggle-icon-bar-bg:         #fff;
+ at navbar-inverse-toggle-border-color:        #333;
+
+
+// Navs
+// -------------------------
+
+ at nav-link-padding:                          10px 15px;
+ at nav-link-hover-bg:                         @gray-lighter;
+
+ at nav-disabled-link-color:                   @gray-light;
+ at nav-disabled-link-hover-color:             @gray-light;
+
+ at nav-open-link-hover-color:                 #fff;
+ at nav-open-caret-border-color:               #fff;
+
+// Tabs
+ at nav-tabs-border-color:                     #ddd;
+
+ at nav-tabs-link-hover-border-color:          @gray-lighter;
+
+ at nav-tabs-active-link-hover-bg:             @body-bg;
+ at nav-tabs-active-link-hover-color:          @gray;
+ at nav-tabs-active-link-hover-border-color:   #ddd;
+
+ at nav-tabs-justified-link-border-color:            #ddd;
+ at nav-tabs-justified-active-link-border-color:     @body-bg;
+
+// Pills
+ at nav-pills-active-link-hover-bg:            @component-active-bg;
+ at nav-pills-active-link-hover-color:         #fff;
+
+
+// Pagination
+// -------------------------
+
+ at pagination-bg:                        #fff;
+ at pagination-border:                    #ddd;
+
+ at pagination-hover-bg:                  @gray-lighter;
+
+ at pagination-active-bg:                 @brand-primary;
+ at pagination-active-color:              #fff;
+
+ at pagination-disabled-color:            @gray-light;
+
+
+// Pager
+// -------------------------
+
+ at pager-border-radius:                  15px;
+ at pager-disabled-color:                 @gray-light;
+
+
+// Jumbotron
+// -------------------------
+
+ at jumbotron-padding:              30px;
+ at jumbotron-color:                inherit;
+ at jumbotron-bg:                   @gray-lighter;
+
+ at jumbotron-heading-color:        inherit;
+
+
+// Form states and alerts
+// -------------------------
+
+ at state-warning-text:             #c09853;
+ at state-warning-bg:               #fcf8e3;
+ at state-warning-border:           darken(spin(@state-warning-bg, -10), 3%);
+
+ at state-danger-text:              #b94a48;
+ at state-danger-bg:                #f2dede;
+ at state-danger-border:            darken(spin(@state-danger-bg, -10), 3%);
+
+ at state-success-text:             #468847;
+ at state-success-bg:               #dff0d8;
+ at state-success-border:           darken(spin(@state-success-bg, -10), 5%);
+
+ at state-info-text:                #3a87ad;
+ at state-info-bg:                  #d9edf7;
+ at state-info-border:              darken(spin(@state-info-bg, -10), 7%);
+
+
+// Tooltips
+// -------------------------
+ at tooltip-max-width:           200px;
+ at tooltip-color:               #fff;
+ at tooltip-bg:                  #000;
+
+ at tooltip-arrow-width:         5px;
+ at tooltip-arrow-color:         @tooltip-bg;
+
+
+// Popovers
+// -------------------------
+ at popover-bg:                          #fff;
+ at popover-max-width:                   276px;
+ at popover-border-color:                rgba(0,0,0,.2);
+ at popover-fallback-border-color:       #ccc;
+
+ at popover-title-bg:                    darken(@popover-bg, 3%);
+
+ at popover-arrow-width:                 10px;
+ at popover-arrow-color:                 #fff;
+
+ at popover-arrow-outer-width:           (@popover-arrow-width + 1);
+ at popover-arrow-outer-color:           rgba(0,0,0,.25);
+ at popover-arrow-outer-fallback-color:  #999;
+
+
+// Labels
+// -------------------------
+
+ at label-default-bg:            @gray-light;
+ at label-primary-bg:            @brand-primary;
+ at label-success-bg:            @brand-success;
+ at label-info-bg:               @brand-info;
+ at label-warning-bg:            @brand-warning;
+ at label-danger-bg:             @brand-danger;
+
+ at label-color:                 #fff;
+ at label-link-hover-color:      #fff;
+
+
+// Modals
+// -------------------------
+ at modal-inner-padding:         20px;
+
+ at modal-title-padding:         15px;
+ at modal-title-line-height:     @line-height-base;
+
+ at modal-content-bg:                             #fff;
+ at modal-content-border-color:                   rgba(0,0,0,.2);
+ at modal-content-fallback-border-color:          #999;
+
+ at modal-backdrop-bg:           #000;
+ at modal-header-border-color:   #e5e5e5;
+ at modal-footer-border-color:   @modal-header-border-color;
+
+
+// Alerts
+// -------------------------
+ at alert-padding:               15px;
+ at alert-border-radius:         @border-radius-base;
+ at alert-link-font-weight:      bold;
+
+ at alert-success-bg:            @state-success-bg;
+ at alert-success-text:          @state-success-text;
+ at alert-success-border:        @state-success-border;
+
+ at alert-info-bg:               @state-info-bg;
+ at alert-info-text:             @state-info-text;
+ at alert-info-border:           @state-info-border;
+
+ at alert-warning-bg:            @state-warning-bg;
+ at alert-warning-text:          @state-warning-text;
+ at alert-warning-border:        @state-warning-border;
+
+ at alert-danger-bg:             @state-danger-bg;
+ at alert-danger-text:           @state-danger-text;
+ at alert-danger-border:         @state-danger-border;
+
+
+// Progress bars
+// -------------------------
+ at progress-bg:                 #f5f5f5;
+ at progress-bar-color:          #fff;
+
+ at progress-bar-bg:             @brand-primary;
+ at progress-bar-success-bg:     @brand-success;
+ at progress-bar-warning-bg:     @brand-warning;
+ at progress-bar-danger-bg:      @brand-danger;
+ at progress-bar-info-bg:        @brand-info;
+
+
+// List group
+// -------------------------
+ at list-group-bg:               #fff;
+ at list-group-border:           #ddd;
+ at list-group-border-radius:    @border-radius-base;
+
+ at list-group-hover-bg:         #f5f5f5;
+ at list-group-active-color:     #fff;
+ at list-group-active-bg:        @component-active-bg;
+ at list-group-active-border:    @list-group-active-bg;
+
+ at list-group-link-color:          #555;
+ at list-group-link-heading-color:  #333;
+
+
+// Panels
+// -------------------------
+ at panel-bg:                    #fff;
+ at panel-inner-border:          #ddd;
+ at panel-border-radius:         @border-radius-base;
+ at panel-footer-bg:             #f5f5f5;
+
+ at panel-default-text:          @gray-dark;
+ at panel-default-border:        #ddd;
+ at panel-default-heading-bg:    #f5f5f5;
+
+ at panel-primary-text:          #fff;
+ at panel-primary-border:        @brand-primary;
+ at panel-primary-heading-bg:    @brand-primary;
+
+ at panel-success-text:          @state-success-text;
+ at panel-success-border:        @state-success-border;
+ at panel-success-heading-bg:    @state-success-bg;
+
+ at panel-warning-text:          @state-warning-text;
+ at panel-warning-border:        @state-warning-border;
+ at panel-warning-heading-bg:    @state-warning-bg;
+
+ at panel-danger-text:           @state-danger-text;
+ at panel-danger-border:         @state-danger-border;
+ at panel-danger-heading-bg:     @state-danger-bg;
+
+ at panel-info-text:             @state-info-text;
+ at panel-info-border:           @state-info-border;
+ at panel-info-heading-bg:       @state-info-bg;
+
+
+// Thumbnails
+// -------------------------
+ at thumbnail-padding:           4px;
+ at thumbnail-bg:                @body-bg;
+ at thumbnail-border:            #ddd;
+ at thumbnail-border-radius:     @border-radius-base;
+
+ at thumbnail-caption-color:     @text-color;
+ at thumbnail-caption-padding:   9px;
+
+
+// Wells
+// -------------------------
+ at well-bg:                     #f5f5f5;
+
+
+// Badges
+// -------------------------
+ at badge-color:                 #fff;
+ at badge-link-hover-color:      #fff;
+ at badge-bg:                    @gray-light;
+
+ at badge-active-color:          @link-color;
+ at badge-active-bg:             #fff;
+
+ at badge-font-weight:           bold;
+ at badge-line-height:           1;
+ at badge-border-radius:         10px;
+
+
+// Breadcrumbs
+// -------------------------
+ at breadcrumb-bg:               #f5f5f5;
+ at breadcrumb-color:            #ccc;
+ at breadcrumb-active-color:     @gray-light;
+
+
+// Carousel
+// ------------------------
+
+ at carousel-text-shadow:                        0 1px 2px rgba(0,0,0,.6);
+
+ at carousel-control-color:                      #fff;
+ at carousel-control-width:                      15%;
+ at carousel-control-opacity:                    .5;
+ at carousel-control-font-size:                  20px;
+
+ at carousel-indicator-active-bg:                #fff;
+ at carousel-indicator-border-color:             #fff;
+
+ at carousel-caption-color:                      #fff;
+
+
+// Close
+// ------------------------
+ at close-color:                 #000;
+ at close-font-weight:           bold;
+ at close-text-shadow:           0 1px 0 #fff;
+
+
+// Code
+// ------------------------
+ at code-color:                  #c7254e;
+ at code-bg:                     #f9f2f4;
+
+ at pre-bg:                      #f5f5f5;
+ at pre-color:                   @gray-dark;
+ at pre-border-color:            #ccc;
+ at pre-scrollable-max-height:   340px;
+
+// Type
+// ------------------------
+ at text-muted:                  @gray-light;
+ at abbr-border-color:           @gray-light;
+ at headings-small-color:        @gray-light;
+ at blockquote-small-color:      @gray-light;
+ at blockquote-border-color:     @gray-lighter;
+ at page-header-border-color:    @gray-lighter;
+
+// Miscellaneous
+// -------------------------
+
+// Hr border color
+ at hr-border:                   @gray-lighter;
+
+// Horizontal forms & lists
+ at component-offset-horizontal: 180px;
+
+
+// Container sizes
+// --------------------------------------------------
+
+// Small screen / tablet
+ at container-tablet:            ((720px + @grid-gutter-width));
+
+// Medium screen / desktop
+ at container-desktop:           ((940px + @grid-gutter-width));
+
+// Large screen / wide desktop
+ at container-lg-desktop:        ((1140px + @grid-gutter-width));
diff --git a/client/galaxy/style/less/bootstrap/wells.less b/client/galaxy/style/less/bootstrap/wells.less
new file mode 100644
index 0000000..865abc2
--- /dev/null
+++ b/client/galaxy/style/less/bootstrap/wells.less
@@ -0,0 +1,29 @@
+//
+// Wells
+// --------------------------------------------------
+
+
+// Base class
+.well {
+  min-height: 20px;
+  padding: 19px;
+  margin-bottom: 20px;
+  background-color: @well-bg;
+  border: 1px solid darken(@well-bg, 7%);
+  border-radius: @border-radius-base;
+  .box-shadow(inset 0 1px 1px rgba(0,0,0,.05));
+  blockquote {
+    border-color: #ddd;
+    border-color: rgba(0,0,0,.15);
+  }
+}
+
+// Sizes
+.well-lg {
+  padding: 24px;
+  border-radius: @border-radius-large;
+}
+.well-sm {
+  padding: 9px;
+  border-radius: @border-radius-small;
+}
diff --git a/client/galaxy/style/less/circster.less b/client/galaxy/style/less/circster.less
new file mode 100644
index 0000000..2578d0f
--- /dev/null
+++ b/client/galaxy/style/less/circster.less
@@ -0,0 +1,7 @@
+.chrom-label {
+	font-size: 80%;
+}
+
+.tick {
+	font-size: 80%;
+}
\ No newline at end of file
diff --git a/client/galaxy/style/less/collection.less b/client/galaxy/style/less/collection.less
new file mode 100644
index 0000000..fbd595f
--- /dev/null
+++ b/client/galaxy/style/less/collection.less
@@ -0,0 +1,74 @@
+//============================================================================= collection element
+.dataset-collection-element {
+    .has-job-state-mixin;
+    .title-bar {
+        .subtitle {
+            margin-top: 2px;
+        }
+    }
+    .details {
+        display: none;
+    }
+}
+
+//.dataset-collection-element.dataset {
+//}
+//
+//.dataset-collection-element.dataset-collection {
+//}
+
+//============================================================================= collection
+.history-content.dataset-collection {
+    .has-job-state-mixin;
+}
+
+//============================================================================= collection panel
+.dataset-collection-panel {
+    .flex-vertical-container;
+
+    .vertically-spaced {
+        margin-top: 4px;
+    }
+
+    > .controls {
+        margin: 8px 8px 1px;
+        padding: 0px;
+        // padding: 8px;
+
+        .navigation {
+            margin-top: 4px;
+            .back {
+                span {
+                    .fa-lg;
+                    margin-right: 4px;
+                    font-weight: bold;
+                }
+            }
+        }
+        .title {
+            margin-top: 12px;
+            .name {
+                font-weight: bold;
+            }
+        }
+    }
+
+    > .list-items {
+        .flex-column;
+        overflow-x: hidden;
+        overflow-y: auto;
+
+        &:not(:empty) {
+            border-top: 1px solid @border-default-color;
+        }
+        .list-item {
+            border-width: 1px 0px 0px 0px;
+            &:first-child {
+                border-top-width: 0px;
+            }
+            &:last-child {
+                border-bottom-width: 1px;
+            }
+        }
+    }
+}
diff --git a/client/galaxy/style/less/dataset.less b/client/galaxy/style/less/dataset.less
new file mode 100644
index 0000000..e85f75c
--- /dev/null
+++ b/client/galaxy/style/less/dataset.less
@@ -0,0 +1,340 @@
+ at import "sprite-history-states.less";
+ at import "sprite-history-buttons.less";
+
+
+// ---------------------------------------------------------------------------- dataset states
+//.dataset .state-icon {
+.state-icon {
+    .fa-icon;
+    display: inline-block;
+    margin-right: 4px;
+    vertical-align: middle;
+    width: 16px;
+    height: 16px;
+    line-height: 16px;
+    text-align: center;
+    font-size: 16px;
+}
+
+// ............................................................................ animated or composite state icons
+.state-icon-running {
+    //TODO: couldn't find a way to do this with fa/spinning.less as mixin
+    -webkit-animation: spin 2s infinite linear;
+    -moz-animation: spin 2s infinite linear;
+    -o-animation: spin 2s infinite linear;
+    animation: spin 2s infinite linear;
+
+    @-moz-keyframes spin {
+        0% { -moz-transform: rotate(0deg); }
+        100% { -moz-transform: rotate(359deg); }
+    }
+    @-webkit-keyframes spin {
+        0% { -webkit-transform: rotate(0deg); }
+        100% { -webkit-transform: rotate(359deg); }
+    }
+    @-o-keyframes spin {
+        0% { -o-transform: rotate(0deg); }
+        100% { -o-transform: rotate(359deg); }
+    }
+    @-ms-keyframes spin {
+        0% { -ms-transform: rotate(0deg); }
+        100% { -ms-transform: rotate(359deg); }
+    }
+    @keyframes spin {
+        0% { transform: rotate(0deg); }
+        100% { transform: rotate(359deg); }
+    }
+    &:before {
+        content: @fa-var-spinner;
+    }
+}
+
+.state-icon-upload {
+    overflow: hidden;
+
+    &:before {
+        display: inline-block;
+        -webkit-animation: eclipse 2s infinite linear;
+        -moz-animation: eclipse 2s infinite linear;
+        -o-animation: eclipse 2s infinite linear;
+        animation: eclipse 2s infinite linear;
+
+        @-moz-keyframes eclipse {
+            0% { -moz-transform: translate(0px,16px); }
+            100% { -moz-transform: translate(0px,-16px); }
+        }
+        @-webkit-keyframes eclipse {
+            0% { -webkit-transform: translate(0px,16px); }
+            100% { -webkit-transform: translate(0px,-16px); }
+        }
+        @-o-keyframes eclipse {
+            0% { -o-transform: translate(0px,16px); }
+            100% { -o-transform: translate(0px,-16px); }
+        }
+        @-ms-keyframes eclipse {
+            0% { -ms-transform: translate(0px,16px); }
+            100% { -ms-transform: translate(0px,-16px); }
+        }
+        @keyframes eclipse {
+            0% { transform: translate(0px,16px); }
+            100% { transform: translate(0px,-16px); }
+        }
+        content: @fa-var-arrow-up;
+    }
+}
+
+.state-icon-error {
+    background-color: white;
+    border-radius: 8px;
+    &:before {
+        font-size: 20px;
+        line-height: 16px;
+        color: red;
+        content: @fa-var-times-circle;
+    }
+}
+
+// ............................................................................ state bgs and icons
+.has-job-state-mixin {
+    // Change background/border color depending on state
+    &.state-ok,
+    &.state-failed_metadata {
+        background: @state-success-bg;
+        .state-icon {
+            display: none;
+        }
+    }
+
+    &.state-error,
+    &.state-empty {
+        background: @state-danger-bg;
+        .state-icon {
+            .state-icon-error;
+        }
+    }
+
+    &.state-upload {
+        background: @state-info-bg;
+        .state-icon {
+            .state-icon-upload;
+        }
+    }
+
+    &.state-queued {
+        background: @state-default-bg;
+        .state-icon {
+            &:before {
+                content: @fa-var-clock-o;
+            }
+        }
+    }
+
+    &.state-running,
+    &.state-setting_metadata {
+        background: @state-running-bg;
+        .state-icon {
+            .state-icon-running;
+        }
+    }
+
+    &.state-paused {
+        background: @state-paused-bg;
+        .state-icon {
+            &:before {
+                content: @fa-var-pause;
+            }
+        }
+    }
+
+    &.state-discarded {
+        background: @state-default-bg;
+        .state-icon {
+            &:before {
+                content: @fa-var-trash-o;
+            }
+        }
+    }
+
+    &.state-noPermission {
+        background: @state-default-bg;
+        filter: alpha(opacity=60);
+        -moz-opacity: .60;
+        opacity: .60;
+        .state-icon {
+            font-size: 20px;
+            &:before {
+                content: @fa-var-lock;
+            }
+        }
+    }
+
+    &.state-new {
+        background: @state-default-bg;
+        .state-icon {
+            &:before {
+                content: @fa-var-exclamation-circle;
+            }
+        }
+    }
+}
+
+// ---------------------------------------------------------------------------- datasets as list-items
+.dataset {
+    .has-job-state-mixin;
+
+    .details {
+        .summary {
+            .blurb {
+                margin-bottom: 2px;
+            }
+            .datatype, .dbkey {
+                display: inline;
+                .value {
+                    font-weight: bold;
+                }
+            }
+            .datatype .value:after {
+                content: ',';
+                .list-item .help-text;
+                margin-right: 4px;
+            }
+            .dbkey:after {
+                content: ' ';
+                display: block;
+                .list-item .vertical-spacing;
+            }
+            .info {
+                .list-item .info-section;
+                overflow: auto;
+                .value {
+                    white-space: pre-line;
+                }
+            }
+            .job-error-text {
+                .list-item .info-section;
+                overflow: auto;
+                white-space: pre;
+            }
+            // handle the last vertical gap in the parent as summary is used in other states as well (w/o info)
+            .list-item .vertical-spacing;
+        }
+
+        .actions {
+            // bit hacky here bc dropdown wrapped in div
+            .metafile-dropdown {
+                display: inline-block;
+            }
+            .visualizations-dropdown {
+                display: inline-block;
+            }
+
+            .left {
+                .icon-btn-group();
+                float: left;
+                .icon-btn {
+                    margin-right: @icon-btn-margin;
+                }
+            }
+            .right {
+                .icon-btn-group();
+                float: right;
+                .icon-btn {
+                    margin-left: @icon-btn-margin;
+                }
+            }
+            &:not(:last-child) {
+                .list-item .vertical-spacing;
+            }
+        }
+
+        //TODO: move these out
+        .tags-display {
+            display: none;
+            .list-item .vertical-spacing;
+            .select2-container {
+                min-width: 0px;
+                .select2-choices {
+                    border-radius: 3px;
+                }
+            }
+        }
+        .annotation-display {
+            display: none;
+            .list-item .vertical-spacing;
+            .annotation {
+                .list-item .info-section;
+                //white-space: pre-line;
+                white-space: pre-wrap;
+                overflow: auto;
+            }
+            // fake placeholder for editable text annotation
+            .annotation:empty:after {
+                position: relative;
+                top: -4px;
+                font-size: 10px;
+                font-style: italic;
+                color: grey;
+                //TODO: move out for localization
+                content : 'Click here to edit annotation';
+            }
+            textarea {
+                margin: 0px 0px 2px 0px;
+                display: inline-block;
+                border-radius: 3px;
+                width: 100%;
+            }
+        }
+
+        .display-applications {
+            .display-application:last-child {
+                .list-item .vertical-spacing;
+            }
+        }
+
+        .toolhelp {
+            width: 100%;
+            background:#eee;
+            border-radius: 3px;
+            margin-top: 5px;
+            padding: 3px;
+        }
+
+        .dataset-peek {
+            .list-item .vertical-spacing;
+            width: 100%;
+            margin: 0px;
+            border-radius: 3px;
+            background: white;
+            color: black;
+            font-size: 10px;
+            overflow: auto;
+
+//TODO: there doesn't seem to be a way to pad text-only peeks (binary data, etc.)
+            th {
+                color: white;
+                background: @panel-primary-heading-bg;
+            }
+            table, th, tr, td {
+                font-family: @font-family-monospace;
+                font-size: 10px;
+            }
+        }
+    }
+}
+
+// ---------------------------------------------------------------------------- older styles
+pre.peek {
+    background: white;
+    color: black;
+    width: 100%;
+    font-size: 10px;
+    overflow: auto;
+    th {
+        color: white;
+        background: @panel-primary-heading-bg;
+    }
+    table, th, tr, td {
+        font-family: @font-family-monospace;
+        font-size: 10px;
+    }
+}
diff --git a/client/galaxy/style/less/embed_item.less b/client/galaxy/style/less/embed_item.less
new file mode 100644
index 0000000..9334133
--- /dev/null
+++ b/client/galaxy/style/less/embed_item.less
@@ -0,0 +1,90 @@
+.embedded-item { 
+    margin: 0em auto;
+    width: 90%;
+    -moz-border-radius: 0.5em;
+    -webkit-border-radius: 0.5em;
+    border-radius: 0.5em;
+
+    .expanded-content {
+        display: none;
+        background-color: white;
+    }
+
+    .item-content {
+        max-height: 45em;
+        overflow: auto;
+    }
+
+    & > .title {
+        vertical-align: top;
+        text-align: center;
+        font-weight: bold;
+        padding-bottom: 5px;
+    }
+
+    &.placeholder .content {
+        padding: 0.5em 0.5em;
+        font-style: italic;
+        text-align: center;
+    }
+
+    p {
+        background: inherit;
+        margin-top:0;
+        margin-bottom:0;
+    }
+
+    table.annotated-item {
+        width: 100%;
+        border-collapse: collapse;
+    }
+
+    table.annotated-item td,th {
+        padding: 0;
+    }
+
+    table.annotated-item .annotation {
+        padding-left: 2em;
+        width: 40%;
+    }
+
+    table.annotated-item td.annotation {
+        vertical-align: text-top;
+        padding-top: 1em;
+    }
+}
+
+.embedded-item.display {
+    padding: 5px 10px 10px 10px;
+}
+
+.embedded-item.history {
+    background-color: #C1C9E5;
+}
+
+.embedded-item.dataset {
+    background-color: #CFC;
+    .item-content {
+        padding: 5px;
+    }
+}
+
+.embedded-item.workflow { 
+    background-color: #FBDDB3;
+    .item-content {
+        padding: 5px;
+    }
+}
+
+.embedded-item.visualization {
+    background-color: #BBBBBB;
+    .item-content {
+        overflow: hidden;
+        height: 100%;
+        iframe {
+            min-height: 320px;
+            //TODO: hack to fix small gap after iframe
+            margin-bottom: -4px;
+        }
+    }
+}
diff --git a/client/galaxy/style/less/flex.less b/client/galaxy/style/less/flex.less
new file mode 100644
index 0000000..e0a5ef4
--- /dev/null
+++ b/client/galaxy/style/less/flex.less
@@ -0,0 +1,73 @@
+// ------------------------------------ prefix function
+.vendor-prefix-property(@property; @value){
+    -webkit-@{property}: @value;
+       // -moz-@{property}: @value;
+        -ms-@{property}: @value;
+         // -o-@{property}: @value;
+            @{property}: @value;
+}
+
+// ------------------------------------ containers
+.flex-vertical-container,
+.flex-horizontal-container {
+    display: -webkit-flex;
+    display: -ms-flexbox;
+    display: flex;
+
+    /* force ff to squish beyond content:
+       https://developer.mozilla.org/en-US/Firefox/Releases/34/Site_Compatibility#CSS */
+    // min-width: 0px;
+    // min-height: 0px;
+
+    .vendor-prefix-property(flex-wrap, wrap);
+    .vendor-prefix-property(align-items, stretch);
+    -ms-flex-align: stretch;
+
+    .vendor-prefix-property(align-content, stretch);
+    -ms-flex-line-pack: stretch;
+
+    .vendor-prefix-property(justify-content, flex-start);
+    -ms-flex-pack: start;
+}
+
+.flex-vertical-container {
+    flex-flow: column nowrap;
+}
+
+.flex-horizontal-container {
+    flex-flow: row nowrap;
+}
+
+// ------------------------------------ contents
+.flex-row,
+.flex-column {
+    .vendor-prefix-property(flex, 1 1 auto);
+
+    .vendor-prefix-property(align-self, auto);
+    -ms-flex-item-align: auto;
+}
+
+// ------------------------------------ convenience versions
+// has a fixed height header
+.two-rows-header(@header-height){
+    .flex-vertical-container;
+
+    > * {
+        .flex-column;
+    }
+    > .header {
+        .vendor-prefix-property(flex, 0 0 @header-height);
+    }
+    > .middle {
+        overflow: auto;
+    }
+}
+
+// has a fixed height header and footer
+.three-rows-header-footer(@header-height, @footer-height){
+    .two-rows-header(@header-height);
+    > .footer {
+        .flex-column;
+        .vendor-prefix-property(flex, 0 0 @footer-height);
+    }
+}
diff --git a/client/galaxy/style/less/fontawesome/bordered-pulled.less b/client/galaxy/style/less/fontawesome/bordered-pulled.less
new file mode 100644
index 0000000..0c90eb5
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/bordered-pulled.less
@@ -0,0 +1,16 @@
+// Bordered & Pulled
+// -------------------------
+
+.@{fa-css-prefix}-border {
+  padding: .2em .25em .15em;
+  border: solid .08em @fa-border-color;
+  border-radius: .1em;
+}
+
+.pull-right { float: right; }
+.pull-left { float: left; }
+
+.@{fa-css-prefix} {
+  &.pull-left { margin-right: .3em; }
+  &.pull-right { margin-left: .3em; }
+}
diff --git a/client/galaxy/style/less/fontawesome/core.less b/client/galaxy/style/less/fontawesome/core.less
new file mode 100644
index 0000000..1da1085
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/core.less
@@ -0,0 +1,10 @@
+// Base Class Definition
+// -------------------------
+
+.@{fa-css-prefix} {
+  display: inline-block;
+  font: normal normal normal 14px/1 FontAwesome; // shortening font declaration
+  text-rendering: auto; // optimizelegibility throws things off #1094
+  -webkit-font-smoothing: antialiased;
+  -moz-osx-font-smoothing: grayscale;
+}
diff --git a/client/galaxy/style/less/fontawesome/fixed-width.less b/client/galaxy/style/less/fontawesome/fixed-width.less
new file mode 100644
index 0000000..110289f
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/fixed-width.less
@@ -0,0 +1,6 @@
+// Fixed Width Icons
+// -------------------------
+.@{fa-css-prefix}-fw {
+  width: (18em / 14);
+  text-align: center;
+}
diff --git a/client/galaxy/style/less/fontawesome/font-awesome.less b/client/galaxy/style/less/fontawesome/font-awesome.less
new file mode 100644
index 0000000..195fd46
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/font-awesome.less
@@ -0,0 +1,17 @@
+/*!
+ *  Font Awesome 4.2.0 by @davegandy - http://fontawesome.io - @fontawesome
+ *  License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
+ */
+
+ at import "variables.less";
+ at import "mixins.less";
+ at import "path.less";
+ at import "core.less";
+ at import "larger.less";
+ at import "fixed-width.less";
+ at import "list.less";
+ at import "bordered-pulled.less";
+ at import "spinning.less";
+ at import "rotated-flipped.less";
+ at import "stacked.less";
+ at import "icons.less";
diff --git a/client/galaxy/style/less/fontawesome/icons.less b/client/galaxy/style/less/fontawesome/icons.less
new file mode 100644
index 0000000..b5c26c7
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/icons.less
@@ -0,0 +1,552 @@
+/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen
+   readers do not read off random characters that represent icons */
+
+.@{fa-css-prefix}-glass:before { content: @fa-var-glass; }
+.@{fa-css-prefix}-music:before { content: @fa-var-music; }
+.@{fa-css-prefix}-search:before { content: @fa-var-search; }
+.@{fa-css-prefix}-envelope-o:before { content: @fa-var-envelope-o; }
+.@{fa-css-prefix}-heart:before { content: @fa-var-heart; }
+.@{fa-css-prefix}-star:before { content: @fa-var-star; }
+.@{fa-css-prefix}-star-o:before { content: @fa-var-star-o; }
+.@{fa-css-prefix}-user:before { content: @fa-var-user; }
+.@{fa-css-prefix}-film:before { content: @fa-var-film; }
+.@{fa-css-prefix}-th-large:before { content: @fa-var-th-large; }
+.@{fa-css-prefix}-th:before { content: @fa-var-th; }
+.@{fa-css-prefix}-th-list:before { content: @fa-var-th-list; }
+.@{fa-css-prefix}-check:before { content: @fa-var-check; }
+.@{fa-css-prefix}-remove:before,
+.@{fa-css-prefix}-close:before,
+.@{fa-css-prefix}-times:before { content: @fa-var-times; }
+.@{fa-css-prefix}-search-plus:before { content: @fa-var-search-plus; }
+.@{fa-css-prefix}-search-minus:before { content: @fa-var-search-minus; }
+.@{fa-css-prefix}-power-off:before { content: @fa-var-power-off; }
+.@{fa-css-prefix}-signal:before { content: @fa-var-signal; }
+.@{fa-css-prefix}-gear:before,
+.@{fa-css-prefix}-cog:before { content: @fa-var-cog; }
+.@{fa-css-prefix}-trash-o:before { content: @fa-var-trash-o; }
+.@{fa-css-prefix}-home:before { content: @fa-var-home; }
+.@{fa-css-prefix}-file-o:before { content: @fa-var-file-o; }
+.@{fa-css-prefix}-clock-o:before { content: @fa-var-clock-o; }
+.@{fa-css-prefix}-road:before { content: @fa-var-road; }
+.@{fa-css-prefix}-download:before { content: @fa-var-download; }
+.@{fa-css-prefix}-arrow-circle-o-down:before { content: @fa-var-arrow-circle-o-down; }
+.@{fa-css-prefix}-arrow-circle-o-up:before { content: @fa-var-arrow-circle-o-up; }
+.@{fa-css-prefix}-inbox:before { content: @fa-var-inbox; }
+.@{fa-css-prefix}-play-circle-o:before { content: @fa-var-play-circle-o; }
+.@{fa-css-prefix}-rotate-right:before,
+.@{fa-css-prefix}-repeat:before { content: @fa-var-repeat; }
+.@{fa-css-prefix}-refresh:before { content: @fa-var-refresh; }
+.@{fa-css-prefix}-list-alt:before { content: @fa-var-list-alt; }
+.@{fa-css-prefix}-lock:before { content: @fa-var-lock; }
+.@{fa-css-prefix}-flag:before { content: @fa-var-flag; }
+.@{fa-css-prefix}-headphones:before { content: @fa-var-headphones; }
+.@{fa-css-prefix}-volume-off:before { content: @fa-var-volume-off; }
+.@{fa-css-prefix}-volume-down:before { content: @fa-var-volume-down; }
+.@{fa-css-prefix}-volume-up:before { content: @fa-var-volume-up; }
+.@{fa-css-prefix}-qrcode:before { content: @fa-var-qrcode; }
+.@{fa-css-prefix}-barcode:before { content: @fa-var-barcode; }
+.@{fa-css-prefix}-tag:before { content: @fa-var-tag; }
+.@{fa-css-prefix}-tags:before { content: @fa-var-tags; }
+.@{fa-css-prefix}-book:before { content: @fa-var-book; }
+.@{fa-css-prefix}-bookmark:before { content: @fa-var-bookmark; }
+.@{fa-css-prefix}-print:before { content: @fa-var-print; }
+.@{fa-css-prefix}-camera:before { content: @fa-var-camera; }
+.@{fa-css-prefix}-font:before { content: @fa-var-font; }
+.@{fa-css-prefix}-bold:before { content: @fa-var-bold; }
+.@{fa-css-prefix}-italic:before { content: @fa-var-italic; }
+.@{fa-css-prefix}-text-height:before { content: @fa-var-text-height; }
+.@{fa-css-prefix}-text-width:before { content: @fa-var-text-width; }
+.@{fa-css-prefix}-align-left:before { content: @fa-var-align-left; }
+.@{fa-css-prefix}-align-center:before { content: @fa-var-align-center; }
+.@{fa-css-prefix}-align-right:before { content: @fa-var-align-right; }
+.@{fa-css-prefix}-align-justify:before { content: @fa-var-align-justify; }
+.@{fa-css-prefix}-list:before { content: @fa-var-list; }
+.@{fa-css-prefix}-dedent:before,
+.@{fa-css-prefix}-outdent:before { content: @fa-var-outdent; }
+.@{fa-css-prefix}-indent:before { content: @fa-var-indent; }
+.@{fa-css-prefix}-video-camera:before { content: @fa-var-video-camera; }
+.@{fa-css-prefix}-photo:before,
+.@{fa-css-prefix}-image:before,
+.@{fa-css-prefix}-picture-o:before { content: @fa-var-picture-o; }
+.@{fa-css-prefix}-pencil:before { content: @fa-var-pencil; }
+.@{fa-css-prefix}-map-marker:before { content: @fa-var-map-marker; }
+.@{fa-css-prefix}-adjust:before { content: @fa-var-adjust; }
+.@{fa-css-prefix}-tint:before { content: @fa-var-tint; }
+.@{fa-css-prefix}-edit:before,
+.@{fa-css-prefix}-pencil-square-o:before { content: @fa-var-pencil-square-o; }
+.@{fa-css-prefix}-share-square-o:before { content: @fa-var-share-square-o; }
+.@{fa-css-prefix}-check-square-o:before { content: @fa-var-check-square-o; }
+.@{fa-css-prefix}-arrows:before { content: @fa-var-arrows; }
+.@{fa-css-prefix}-step-backward:before { content: @fa-var-step-backward; }
+.@{fa-css-prefix}-fast-backward:before { content: @fa-var-fast-backward; }
+.@{fa-css-prefix}-backward:before { content: @fa-var-backward; }
+.@{fa-css-prefix}-play:before { content: @fa-var-play; }
+.@{fa-css-prefix}-pause:before { content: @fa-var-pause; }
+.@{fa-css-prefix}-stop:before { content: @fa-var-stop; }
+.@{fa-css-prefix}-forward:before { content: @fa-var-forward; }
+.@{fa-css-prefix}-fast-forward:before { content: @fa-var-fast-forward; }
+.@{fa-css-prefix}-step-forward:before { content: @fa-var-step-forward; }
+.@{fa-css-prefix}-eject:before { content: @fa-var-eject; }
+.@{fa-css-prefix}-chevron-left:before { content: @fa-var-chevron-left; }
+.@{fa-css-prefix}-chevron-right:before { content: @fa-var-chevron-right; }
+.@{fa-css-prefix}-plus-circle:before { content: @fa-var-plus-circle; }
+.@{fa-css-prefix}-minus-circle:before { content: @fa-var-minus-circle; }
+.@{fa-css-prefix}-times-circle:before { content: @fa-var-times-circle; }
+.@{fa-css-prefix}-check-circle:before { content: @fa-var-check-circle; }
+.@{fa-css-prefix}-question-circle:before { content: @fa-var-question-circle; }
+.@{fa-css-prefix}-info-circle:before { content: @fa-var-info-circle; }
+.@{fa-css-prefix}-crosshairs:before { content: @fa-var-crosshairs; }
+.@{fa-css-prefix}-times-circle-o:before { content: @fa-var-times-circle-o; }
+.@{fa-css-prefix}-check-circle-o:before { content: @fa-var-check-circle-o; }
+.@{fa-css-prefix}-ban:before { content: @fa-var-ban; }
+.@{fa-css-prefix}-arrow-left:before { content: @fa-var-arrow-left; }
+.@{fa-css-prefix}-arrow-right:before { content: @fa-var-arrow-right; }
+.@{fa-css-prefix}-arrow-up:before { content: @fa-var-arrow-up; }
+.@{fa-css-prefix}-arrow-down:before { content: @fa-var-arrow-down; }
+.@{fa-css-prefix}-mail-forward:before,
+.@{fa-css-prefix}-share:before { content: @fa-var-share; }
+.@{fa-css-prefix}-expand:before { content: @fa-var-expand; }
+.@{fa-css-prefix}-compress:before { content: @fa-var-compress; }
+.@{fa-css-prefix}-plus:before { content: @fa-var-plus; }
+.@{fa-css-prefix}-minus:before { content: @fa-var-minus; }
+.@{fa-css-prefix}-asterisk:before { content: @fa-var-asterisk; }
+.@{fa-css-prefix}-exclamation-circle:before { content: @fa-var-exclamation-circle; }
+.@{fa-css-prefix}-gift:before { content: @fa-var-gift; }
+.@{fa-css-prefix}-leaf:before { content: @fa-var-leaf; }
+.@{fa-css-prefix}-fire:before { content: @fa-var-fire; }
+.@{fa-css-prefix}-eye:before { content: @fa-var-eye; }
+.@{fa-css-prefix}-eye-slash:before { content: @fa-var-eye-slash; }
+.@{fa-css-prefix}-warning:before,
+.@{fa-css-prefix}-exclamation-triangle:before { content: @fa-var-exclamation-triangle; }
+.@{fa-css-prefix}-plane:before { content: @fa-var-plane; }
+.@{fa-css-prefix}-calendar:before { content: @fa-var-calendar; }
+.@{fa-css-prefix}-random:before { content: @fa-var-random; }
+.@{fa-css-prefix}-comment:before { content: @fa-var-comment; }
+.@{fa-css-prefix}-magnet:before { content: @fa-var-magnet; }
+.@{fa-css-prefix}-chevron-up:before { content: @fa-var-chevron-up; }
+.@{fa-css-prefix}-chevron-down:before { content: @fa-var-chevron-down; }
+.@{fa-css-prefix}-retweet:before { content: @fa-var-retweet; }
+.@{fa-css-prefix}-shopping-cart:before { content: @fa-var-shopping-cart; }
+.@{fa-css-prefix}-folder:before { content: @fa-var-folder; }
+.@{fa-css-prefix}-folder-open:before { content: @fa-var-folder-open; }
+.@{fa-css-prefix}-arrows-v:before { content: @fa-var-arrows-v; }
+.@{fa-css-prefix}-arrows-h:before { content: @fa-var-arrows-h; }
+.@{fa-css-prefix}-bar-chart-o:before,
+.@{fa-css-prefix}-bar-chart:before { content: @fa-var-bar-chart; }
+.@{fa-css-prefix}-twitter-square:before { content: @fa-var-twitter-square; }
+.@{fa-css-prefix}-facebook-square:before { content: @fa-var-facebook-square; }
+.@{fa-css-prefix}-camera-retro:before { content: @fa-var-camera-retro; }
+.@{fa-css-prefix}-key:before { content: @fa-var-key; }
+.@{fa-css-prefix}-gears:before,
+.@{fa-css-prefix}-cogs:before { content: @fa-var-cogs; }
+.@{fa-css-prefix}-comments:before { content: @fa-var-comments; }
+.@{fa-css-prefix}-thumbs-o-up:before { content: @fa-var-thumbs-o-up; }
+.@{fa-css-prefix}-thumbs-o-down:before { content: @fa-var-thumbs-o-down; }
+.@{fa-css-prefix}-star-half:before { content: @fa-var-star-half; }
+.@{fa-css-prefix}-heart-o:before { content: @fa-var-heart-o; }
+.@{fa-css-prefix}-sign-out:before { content: @fa-var-sign-out; }
+.@{fa-css-prefix}-linkedin-square:before { content: @fa-var-linkedin-square; }
+.@{fa-css-prefix}-thumb-tack:before { content: @fa-var-thumb-tack; }
+.@{fa-css-prefix}-external-link:before { content: @fa-var-external-link; }
+.@{fa-css-prefix}-sign-in:before { content: @fa-var-sign-in; }
+.@{fa-css-prefix}-trophy:before { content: @fa-var-trophy; }
+.@{fa-css-prefix}-github-square:before { content: @fa-var-github-square; }
+.@{fa-css-prefix}-upload:before { content: @fa-var-upload; }
+.@{fa-css-prefix}-lemon-o:before { content: @fa-var-lemon-o; }
+.@{fa-css-prefix}-phone:before { content: @fa-var-phone; }
+.@{fa-css-prefix}-square-o:before { content: @fa-var-square-o; }
+.@{fa-css-prefix}-bookmark-o:before { content: @fa-var-bookmark-o; }
+.@{fa-css-prefix}-phone-square:before { content: @fa-var-phone-square; }
+.@{fa-css-prefix}-twitter:before { content: @fa-var-twitter; }
+.@{fa-css-prefix}-facebook:before { content: @fa-var-facebook; }
+.@{fa-css-prefix}-github:before { content: @fa-var-github; }
+.@{fa-css-prefix}-unlock:before { content: @fa-var-unlock; }
+.@{fa-css-prefix}-credit-card:before { content: @fa-var-credit-card; }
+.@{fa-css-prefix}-rss:before { content: @fa-var-rss; }
+.@{fa-css-prefix}-hdd-o:before { content: @fa-var-hdd-o; }
+.@{fa-css-prefix}-bullhorn:before { content: @fa-var-bullhorn; }
+.@{fa-css-prefix}-bell:before { content: @fa-var-bell; }
+.@{fa-css-prefix}-certificate:before { content: @fa-var-certificate; }
+.@{fa-css-prefix}-hand-o-right:before { content: @fa-var-hand-o-right; }
+.@{fa-css-prefix}-hand-o-left:before { content: @fa-var-hand-o-left; }
+.@{fa-css-prefix}-hand-o-up:before { content: @fa-var-hand-o-up; }
+.@{fa-css-prefix}-hand-o-down:before { content: @fa-var-hand-o-down; }
+.@{fa-css-prefix}-arrow-circle-left:before { content: @fa-var-arrow-circle-left; }
+.@{fa-css-prefix}-arrow-circle-right:before { content: @fa-var-arrow-circle-right; }
+.@{fa-css-prefix}-arrow-circle-up:before { content: @fa-var-arrow-circle-up; }
+.@{fa-css-prefix}-arrow-circle-down:before { content: @fa-var-arrow-circle-down; }
+.@{fa-css-prefix}-globe:before { content: @fa-var-globe; }
+.@{fa-css-prefix}-wrench:before { content: @fa-var-wrench; }
+.@{fa-css-prefix}-tasks:before { content: @fa-var-tasks; }
+.@{fa-css-prefix}-filter:before { content: @fa-var-filter; }
+.@{fa-css-prefix}-briefcase:before { content: @fa-var-briefcase; }
+.@{fa-css-prefix}-arrows-alt:before { content: @fa-var-arrows-alt; }
+.@{fa-css-prefix}-group:before,
+.@{fa-css-prefix}-users:before { content: @fa-var-users; }
+.@{fa-css-prefix}-chain:before,
+.@{fa-css-prefix}-link:before { content: @fa-var-link; }
+.@{fa-css-prefix}-cloud:before { content: @fa-var-cloud; }
+.@{fa-css-prefix}-flask:before { content: @fa-var-flask; }
+.@{fa-css-prefix}-cut:before,
+.@{fa-css-prefix}-scissors:before { content: @fa-var-scissors; }
+.@{fa-css-prefix}-copy:before,
+.@{fa-css-prefix}-files-o:before { content: @fa-var-files-o; }
+.@{fa-css-prefix}-paperclip:before { content: @fa-var-paperclip; }
+.@{fa-css-prefix}-save:before,
+.@{fa-css-prefix}-floppy-o:before { content: @fa-var-floppy-o; }
+.@{fa-css-prefix}-square:before { content: @fa-var-square; }
+.@{fa-css-prefix}-navicon:before,
+.@{fa-css-prefix}-reorder:before,
+.@{fa-css-prefix}-bars:before { content: @fa-var-bars; }
+.@{fa-css-prefix}-list-ul:before { content: @fa-var-list-ul; }
+.@{fa-css-prefix}-list-ol:before { content: @fa-var-list-ol; }
+.@{fa-css-prefix}-strikethrough:before { content: @fa-var-strikethrough; }
+.@{fa-css-prefix}-underline:before { content: @fa-var-underline; }
+.@{fa-css-prefix}-table:before { content: @fa-var-table; }
+.@{fa-css-prefix}-magic:before { content: @fa-var-magic; }
+.@{fa-css-prefix}-truck:before { content: @fa-var-truck; }
+.@{fa-css-prefix}-pinterest:before { content: @fa-var-pinterest; }
+.@{fa-css-prefix}-pinterest-square:before { content: @fa-var-pinterest-square; }
+.@{fa-css-prefix}-google-plus-square:before { content: @fa-var-google-plus-square; }
+.@{fa-css-prefix}-google-plus:before { content: @fa-var-google-plus; }
+.@{fa-css-prefix}-money:before { content: @fa-var-money; }
+.@{fa-css-prefix}-caret-down:before { content: @fa-var-caret-down; }
+.@{fa-css-prefix}-caret-up:before { content: @fa-var-caret-up; }
+.@{fa-css-prefix}-caret-left:before { content: @fa-var-caret-left; }
+.@{fa-css-prefix}-caret-right:before { content: @fa-var-caret-right; }
+.@{fa-css-prefix}-columns:before { content: @fa-var-columns; }
+.@{fa-css-prefix}-unsorted:before,
+.@{fa-css-prefix}-sort:before { content: @fa-var-sort; }
+.@{fa-css-prefix}-sort-down:before,
+.@{fa-css-prefix}-sort-desc:before { content: @fa-var-sort-desc; }
+.@{fa-css-prefix}-sort-up:before,
+.@{fa-css-prefix}-sort-asc:before { content: @fa-var-sort-asc; }
+.@{fa-css-prefix}-envelope:before { content: @fa-var-envelope; }
+.@{fa-css-prefix}-linkedin:before { content: @fa-var-linkedin; }
+.@{fa-css-prefix}-rotate-left:before,
+.@{fa-css-prefix}-undo:before { content: @fa-var-undo; }
+.@{fa-css-prefix}-legal:before,
+.@{fa-css-prefix}-gavel:before { content: @fa-var-gavel; }
+.@{fa-css-prefix}-dashboard:before,
+.@{fa-css-prefix}-tachometer:before { content: @fa-var-tachometer; }
+.@{fa-css-prefix}-comment-o:before { content: @fa-var-comment-o; }
+.@{fa-css-prefix}-comments-o:before { content: @fa-var-comments-o; }
+.@{fa-css-prefix}-flash:before,
+.@{fa-css-prefix}-bolt:before { content: @fa-var-bolt; }
+.@{fa-css-prefix}-sitemap:before { content: @fa-var-sitemap; }
+.@{fa-css-prefix}-umbrella:before { content: @fa-var-umbrella; }
+.@{fa-css-prefix}-paste:before,
+.@{fa-css-prefix}-clipboard:before { content: @fa-var-clipboard; }
+.@{fa-css-prefix}-lightbulb-o:before { content: @fa-var-lightbulb-o; }
+.@{fa-css-prefix}-exchange:before { content: @fa-var-exchange; }
+.@{fa-css-prefix}-cloud-download:before { content: @fa-var-cloud-download; }
+.@{fa-css-prefix}-cloud-upload:before { content: @fa-var-cloud-upload; }
+.@{fa-css-prefix}-user-md:before { content: @fa-var-user-md; }
+.@{fa-css-prefix}-stethoscope:before { content: @fa-var-stethoscope; }
+.@{fa-css-prefix}-suitcase:before { content: @fa-var-suitcase; }
+.@{fa-css-prefix}-bell-o:before { content: @fa-var-bell-o; }
+.@{fa-css-prefix}-coffee:before { content: @fa-var-coffee; }
+.@{fa-css-prefix}-cutlery:before { content: @fa-var-cutlery; }
+.@{fa-css-prefix}-file-text-o:before { content: @fa-var-file-text-o; }
+.@{fa-css-prefix}-building-o:before { content: @fa-var-building-o; }
+.@{fa-css-prefix}-hospital-o:before { content: @fa-var-hospital-o; }
+.@{fa-css-prefix}-ambulance:before { content: @fa-var-ambulance; }
+.@{fa-css-prefix}-medkit:before { content: @fa-var-medkit; }
+.@{fa-css-prefix}-fighter-jet:before { content: @fa-var-fighter-jet; }
+.@{fa-css-prefix}-beer:before { content: @fa-var-beer; }
+.@{fa-css-prefix}-h-square:before { content: @fa-var-h-square; }
+.@{fa-css-prefix}-plus-square:before { content: @fa-var-plus-square; }
+.@{fa-css-prefix}-angle-double-left:before { content: @fa-var-angle-double-left; }
+.@{fa-css-prefix}-angle-double-right:before { content: @fa-var-angle-double-right; }
+.@{fa-css-prefix}-angle-double-up:before { content: @fa-var-angle-double-up; }
+.@{fa-css-prefix}-angle-double-down:before { content: @fa-var-angle-double-down; }
+.@{fa-css-prefix}-angle-left:before { content: @fa-var-angle-left; }
+.@{fa-css-prefix}-angle-right:before { content: @fa-var-angle-right; }
+.@{fa-css-prefix}-angle-up:before { content: @fa-var-angle-up; }
+.@{fa-css-prefix}-angle-down:before { content: @fa-var-angle-down; }
+.@{fa-css-prefix}-desktop:before { content: @fa-var-desktop; }
+.@{fa-css-prefix}-laptop:before { content: @fa-var-laptop; }
+.@{fa-css-prefix}-tablet:before { content: @fa-var-tablet; }
+.@{fa-css-prefix}-mobile-phone:before,
+.@{fa-css-prefix}-mobile:before { content: @fa-var-mobile; }
+.@{fa-css-prefix}-circle-o:before { content: @fa-var-circle-o; }
+.@{fa-css-prefix}-quote-left:before { content: @fa-var-quote-left; }
+.@{fa-css-prefix}-quote-right:before { content: @fa-var-quote-right; }
+.@{fa-css-prefix}-spinner:before { content: @fa-var-spinner; }
+.@{fa-css-prefix}-circle:before { content: @fa-var-circle; }
+.@{fa-css-prefix}-mail-reply:before,
+.@{fa-css-prefix}-reply:before { content: @fa-var-reply; }
+.@{fa-css-prefix}-github-alt:before { content: @fa-var-github-alt; }
+.@{fa-css-prefix}-folder-o:before { content: @fa-var-folder-o; }
+.@{fa-css-prefix}-folder-open-o:before { content: @fa-var-folder-open-o; }
+.@{fa-css-prefix}-smile-o:before { content: @fa-var-smile-o; }
+.@{fa-css-prefix}-frown-o:before { content: @fa-var-frown-o; }
+.@{fa-css-prefix}-meh-o:before { content: @fa-var-meh-o; }
+.@{fa-css-prefix}-gamepad:before { content: @fa-var-gamepad; }
+.@{fa-css-prefix}-keyboard-o:before { content: @fa-var-keyboard-o; }
+.@{fa-css-prefix}-flag-o:before { content: @fa-var-flag-o; }
+.@{fa-css-prefix}-flag-checkered:before { content: @fa-var-flag-checkered; }
+.@{fa-css-prefix}-terminal:before { content: @fa-var-terminal; }
+.@{fa-css-prefix}-code:before { content: @fa-var-code; }
+.@{fa-css-prefix}-mail-reply-all:before,
+.@{fa-css-prefix}-reply-all:before { content: @fa-var-reply-all; }
+.@{fa-css-prefix}-star-half-empty:before,
+.@{fa-css-prefix}-star-half-full:before,
+.@{fa-css-prefix}-star-half-o:before { content: @fa-var-star-half-o; }
+.@{fa-css-prefix}-location-arrow:before { content: @fa-var-location-arrow; }
+.@{fa-css-prefix}-crop:before { content: @fa-var-crop; }
+.@{fa-css-prefix}-code-fork:before { content: @fa-var-code-fork; }
+.@{fa-css-prefix}-unlink:before,
+.@{fa-css-prefix}-chain-broken:before { content: @fa-var-chain-broken; }
+.@{fa-css-prefix}-question:before { content: @fa-var-question; }
+.@{fa-css-prefix}-info:before { content: @fa-var-info; }
+.@{fa-css-prefix}-exclamation:before { content: @fa-var-exclamation; }
+.@{fa-css-prefix}-superscript:before { content: @fa-var-superscript; }
+.@{fa-css-prefix}-subscript:before { content: @fa-var-subscript; }
+.@{fa-css-prefix}-eraser:before { content: @fa-var-eraser; }
+.@{fa-css-prefix}-puzzle-piece:before { content: @fa-var-puzzle-piece; }
+.@{fa-css-prefix}-microphone:before { content: @fa-var-microphone; }
+.@{fa-css-prefix}-microphone-slash:before { content: @fa-var-microphone-slash; }
+.@{fa-css-prefix}-shield:before { content: @fa-var-shield; }
+.@{fa-css-prefix}-calendar-o:before { content: @fa-var-calendar-o; }
+.@{fa-css-prefix}-fire-extinguisher:before { content: @fa-var-fire-extinguisher; }
+.@{fa-css-prefix}-rocket:before { content: @fa-var-rocket; }
+.@{fa-css-prefix}-maxcdn:before { content: @fa-var-maxcdn; }
+.@{fa-css-prefix}-chevron-circle-left:before { content: @fa-var-chevron-circle-left; }
+.@{fa-css-prefix}-chevron-circle-right:before { content: @fa-var-chevron-circle-right; }
+.@{fa-css-prefix}-chevron-circle-up:before { content: @fa-var-chevron-circle-up; }
+.@{fa-css-prefix}-chevron-circle-down:before { content: @fa-var-chevron-circle-down; }
+.@{fa-css-prefix}-html5:before { content: @fa-var-html5; }
+.@{fa-css-prefix}-css3:before { content: @fa-var-css3; }
+.@{fa-css-prefix}-anchor:before { content: @fa-var-anchor; }
+.@{fa-css-prefix}-unlock-alt:before { content: @fa-var-unlock-alt; }
+.@{fa-css-prefix}-bullseye:before { content: @fa-var-bullseye; }
+.@{fa-css-prefix}-ellipsis-h:before { content: @fa-var-ellipsis-h; }
+.@{fa-css-prefix}-ellipsis-v:before { content: @fa-var-ellipsis-v; }
+.@{fa-css-prefix}-rss-square:before { content: @fa-var-rss-square; }
+.@{fa-css-prefix}-play-circle:before { content: @fa-var-play-circle; }
+.@{fa-css-prefix}-ticket:before { content: @fa-var-ticket; }
+.@{fa-css-prefix}-minus-square:before { content: @fa-var-minus-square; }
+.@{fa-css-prefix}-minus-square-o:before { content: @fa-var-minus-square-o; }
+.@{fa-css-prefix}-level-up:before { content: @fa-var-level-up; }
+.@{fa-css-prefix}-level-down:before { content: @fa-var-level-down; }
+.@{fa-css-prefix}-check-square:before { content: @fa-var-check-square; }
+.@{fa-css-prefix}-pencil-square:before { content: @fa-var-pencil-square; }
+.@{fa-css-prefix}-external-link-square:before { content: @fa-var-external-link-square; }
+.@{fa-css-prefix}-share-square:before { content: @fa-var-share-square; }
+.@{fa-css-prefix}-compass:before { content: @fa-var-compass; }
+.@{fa-css-prefix}-toggle-down:before,
+.@{fa-css-prefix}-caret-square-o-down:before { content: @fa-var-caret-square-o-down; }
+.@{fa-css-prefix}-toggle-up:before,
+.@{fa-css-prefix}-caret-square-o-up:before { content: @fa-var-caret-square-o-up; }
+.@{fa-css-prefix}-toggle-right:before,
+.@{fa-css-prefix}-caret-square-o-right:before { content: @fa-var-caret-square-o-right; }
+.@{fa-css-prefix}-euro:before,
+.@{fa-css-prefix}-eur:before { content: @fa-var-eur; }
+.@{fa-css-prefix}-gbp:before { content: @fa-var-gbp; }
+.@{fa-css-prefix}-dollar:before,
+.@{fa-css-prefix}-usd:before { content: @fa-var-usd; }
+.@{fa-css-prefix}-rupee:before,
+.@{fa-css-prefix}-inr:before { content: @fa-var-inr; }
+.@{fa-css-prefix}-cny:before,
+.@{fa-css-prefix}-rmb:before,
+.@{fa-css-prefix}-yen:before,
+.@{fa-css-prefix}-jpy:before { content: @fa-var-jpy; }
+.@{fa-css-prefix}-ruble:before,
+.@{fa-css-prefix}-rouble:before,
+.@{fa-css-prefix}-rub:before { content: @fa-var-rub; }
+.@{fa-css-prefix}-won:before,
+.@{fa-css-prefix}-krw:before { content: @fa-var-krw; }
+.@{fa-css-prefix}-bitcoin:before,
+.@{fa-css-prefix}-btc:before { content: @fa-var-btc; }
+.@{fa-css-prefix}-file:before { content: @fa-var-file; }
+.@{fa-css-prefix}-file-text:before { content: @fa-var-file-text; }
+.@{fa-css-prefix}-sort-alpha-asc:before { content: @fa-var-sort-alpha-asc; }
+.@{fa-css-prefix}-sort-alpha-desc:before { content: @fa-var-sort-alpha-desc; }
+.@{fa-css-prefix}-sort-amount-asc:before { content: @fa-var-sort-amount-asc; }
+.@{fa-css-prefix}-sort-amount-desc:before { content: @fa-var-sort-amount-desc; }
+.@{fa-css-prefix}-sort-numeric-asc:before { content: @fa-var-sort-numeric-asc; }
+.@{fa-css-prefix}-sort-numeric-desc:before { content: @fa-var-sort-numeric-desc; }
+.@{fa-css-prefix}-thumbs-up:before { content: @fa-var-thumbs-up; }
+.@{fa-css-prefix}-thumbs-down:before { content: @fa-var-thumbs-down; }
+.@{fa-css-prefix}-youtube-square:before { content: @fa-var-youtube-square; }
+.@{fa-css-prefix}-youtube:before { content: @fa-var-youtube; }
+.@{fa-css-prefix}-xing:before { content: @fa-var-xing; }
+.@{fa-css-prefix}-xing-square:before { content: @fa-var-xing-square; }
+.@{fa-css-prefix}-youtube-play:before { content: @fa-var-youtube-play; }
+.@{fa-css-prefix}-dropbox:before { content: @fa-var-dropbox; }
+.@{fa-css-prefix}-stack-overflow:before { content: @fa-var-stack-overflow; }
+.@{fa-css-prefix}-instagram:before { content: @fa-var-instagram; }
+.@{fa-css-prefix}-flickr:before { content: @fa-var-flickr; }
+.@{fa-css-prefix}-adn:before { content: @fa-var-adn; }
+.@{fa-css-prefix}-bitbucket:before { content: @fa-var-bitbucket; }
+.@{fa-css-prefix}-bitbucket-square:before { content: @fa-var-bitbucket-square; }
+.@{fa-css-prefix}-tumblr:before { content: @fa-var-tumblr; }
+.@{fa-css-prefix}-tumblr-square:before { content: @fa-var-tumblr-square; }
+.@{fa-css-prefix}-long-arrow-down:before { content: @fa-var-long-arrow-down; }
+.@{fa-css-prefix}-long-arrow-up:before { content: @fa-var-long-arrow-up; }
+.@{fa-css-prefix}-long-arrow-left:before { content: @fa-var-long-arrow-left; }
+.@{fa-css-prefix}-long-arrow-right:before { content: @fa-var-long-arrow-right; }
+.@{fa-css-prefix}-apple:before { content: @fa-var-apple; }
+.@{fa-css-prefix}-windows:before { content: @fa-var-windows; }
+.@{fa-css-prefix}-android:before { content: @fa-var-android; }
+.@{fa-css-prefix}-linux:before { content: @fa-var-linux; }
+.@{fa-css-prefix}-dribbble:before { content: @fa-var-dribbble; }
+.@{fa-css-prefix}-skype:before { content: @fa-var-skype; }
+.@{fa-css-prefix}-foursquare:before { content: @fa-var-foursquare; }
+.@{fa-css-prefix}-trello:before { content: @fa-var-trello; }
+.@{fa-css-prefix}-female:before { content: @fa-var-female; }
+.@{fa-css-prefix}-male:before { content: @fa-var-male; }
+.@{fa-css-prefix}-gittip:before { content: @fa-var-gittip; }
+.@{fa-css-prefix}-sun-o:before { content: @fa-var-sun-o; }
+.@{fa-css-prefix}-moon-o:before { content: @fa-var-moon-o; }
+.@{fa-css-prefix}-archive:before { content: @fa-var-archive; }
+.@{fa-css-prefix}-bug:before { content: @fa-var-bug; }
+.@{fa-css-prefix}-vk:before { content: @fa-var-vk; }
+.@{fa-css-prefix}-weibo:before { content: @fa-var-weibo; }
+.@{fa-css-prefix}-renren:before { content: @fa-var-renren; }
+.@{fa-css-prefix}-pagelines:before { content: @fa-var-pagelines; }
+.@{fa-css-prefix}-stack-exchange:before { content: @fa-var-stack-exchange; }
+.@{fa-css-prefix}-arrow-circle-o-right:before { content: @fa-var-arrow-circle-o-right; }
+.@{fa-css-prefix}-arrow-circle-o-left:before { content: @fa-var-arrow-circle-o-left; }
+.@{fa-css-prefix}-toggle-left:before,
+.@{fa-css-prefix}-caret-square-o-left:before { content: @fa-var-caret-square-o-left; }
+.@{fa-css-prefix}-dot-circle-o:before { content: @fa-var-dot-circle-o; }
+.@{fa-css-prefix}-wheelchair:before { content: @fa-var-wheelchair; }
+.@{fa-css-prefix}-vimeo-square:before { content: @fa-var-vimeo-square; }
+.@{fa-css-prefix}-turkish-lira:before,
+.@{fa-css-prefix}-try:before { content: @fa-var-try; }
+.@{fa-css-prefix}-plus-square-o:before { content: @fa-var-plus-square-o; }
+.@{fa-css-prefix}-space-shuttle:before { content: @fa-var-space-shuttle; }
+.@{fa-css-prefix}-slack:before { content: @fa-var-slack; }
+.@{fa-css-prefix}-envelope-square:before { content: @fa-var-envelope-square; }
+.@{fa-css-prefix}-wordpress:before { content: @fa-var-wordpress; }
+.@{fa-css-prefix}-openid:before { content: @fa-var-openid; }
+.@{fa-css-prefix}-institution:before,
+.@{fa-css-prefix}-bank:before,
+.@{fa-css-prefix}-university:before { content: @fa-var-university; }
+.@{fa-css-prefix}-mortar-board:before,
+.@{fa-css-prefix}-graduation-cap:before { content: @fa-var-graduation-cap; }
+.@{fa-css-prefix}-yahoo:before { content: @fa-var-yahoo; }
+.@{fa-css-prefix}-google:before { content: @fa-var-google; }
+.@{fa-css-prefix}-reddit:before { content: @fa-var-reddit; }
+.@{fa-css-prefix}-reddit-square:before { content: @fa-var-reddit-square; }
+.@{fa-css-prefix}-stumbleupon-circle:before { content: @fa-var-stumbleupon-circle; }
+.@{fa-css-prefix}-stumbleupon:before { content: @fa-var-stumbleupon; }
+.@{fa-css-prefix}-delicious:before { content: @fa-var-delicious; }
+.@{fa-css-prefix}-digg:before { content: @fa-var-digg; }
+.@{fa-css-prefix}-pied-piper:before { content: @fa-var-pied-piper; }
+.@{fa-css-prefix}-pied-piper-alt:before { content: @fa-var-pied-piper-alt; }
+.@{fa-css-prefix}-drupal:before { content: @fa-var-drupal; }
+.@{fa-css-prefix}-joomla:before { content: @fa-var-joomla; }
+.@{fa-css-prefix}-language:before { content: @fa-var-language; }
+.@{fa-css-prefix}-fax:before { content: @fa-var-fax; }
+.@{fa-css-prefix}-building:before { content: @fa-var-building; }
+.@{fa-css-prefix}-child:before { content: @fa-var-child; }
+.@{fa-css-prefix}-paw:before { content: @fa-var-paw; }
+.@{fa-css-prefix}-spoon:before { content: @fa-var-spoon; }
+.@{fa-css-prefix}-cube:before { content: @fa-var-cube; }
+.@{fa-css-prefix}-cubes:before { content: @fa-var-cubes; }
+.@{fa-css-prefix}-behance:before { content: @fa-var-behance; }
+.@{fa-css-prefix}-behance-square:before { content: @fa-var-behance-square; }
+.@{fa-css-prefix}-steam:before { content: @fa-var-steam; }
+.@{fa-css-prefix}-steam-square:before { content: @fa-var-steam-square; }
+.@{fa-css-prefix}-recycle:before { content: @fa-var-recycle; }
+.@{fa-css-prefix}-automobile:before,
+.@{fa-css-prefix}-car:before { content: @fa-var-car; }
+.@{fa-css-prefix}-cab:before,
+.@{fa-css-prefix}-taxi:before { content: @fa-var-taxi; }
+.@{fa-css-prefix}-tree:before { content: @fa-var-tree; }
+.@{fa-css-prefix}-spotify:before { content: @fa-var-spotify; }
+.@{fa-css-prefix}-deviantart:before { content: @fa-var-deviantart; }
+.@{fa-css-prefix}-soundcloud:before { content: @fa-var-soundcloud; }
+.@{fa-css-prefix}-database:before { content: @fa-var-database; }
+.@{fa-css-prefix}-file-pdf-o:before { content: @fa-var-file-pdf-o; }
+.@{fa-css-prefix}-file-word-o:before { content: @fa-var-file-word-o; }
+.@{fa-css-prefix}-file-excel-o:before { content: @fa-var-file-excel-o; }
+.@{fa-css-prefix}-file-powerpoint-o:before { content: @fa-var-file-powerpoint-o; }
+.@{fa-css-prefix}-file-photo-o:before,
+.@{fa-css-prefix}-file-picture-o:before,
+.@{fa-css-prefix}-file-image-o:before { content: @fa-var-file-image-o; }
+.@{fa-css-prefix}-file-zip-o:before,
+.@{fa-css-prefix}-file-archive-o:before { content: @fa-var-file-archive-o; }
+.@{fa-css-prefix}-file-sound-o:before,
+.@{fa-css-prefix}-file-audio-o:before { content: @fa-var-file-audio-o; }
+.@{fa-css-prefix}-file-movie-o:before,
+.@{fa-css-prefix}-file-video-o:before { content: @fa-var-file-video-o; }
+.@{fa-css-prefix}-file-code-o:before { content: @fa-var-file-code-o; }
+.@{fa-css-prefix}-vine:before { content: @fa-var-vine; }
+.@{fa-css-prefix}-codepen:before { content: @fa-var-codepen; }
+.@{fa-css-prefix}-jsfiddle:before { content: @fa-var-jsfiddle; }
+.@{fa-css-prefix}-life-bouy:before,
+.@{fa-css-prefix}-life-buoy:before,
+.@{fa-css-prefix}-life-saver:before,
+.@{fa-css-prefix}-support:before,
+.@{fa-css-prefix}-life-ring:before { content: @fa-var-life-ring; }
+.@{fa-css-prefix}-circle-o-notch:before { content: @fa-var-circle-o-notch; }
+.@{fa-css-prefix}-ra:before,
+.@{fa-css-prefix}-rebel:before { content: @fa-var-rebel; }
+.@{fa-css-prefix}-ge:before,
+.@{fa-css-prefix}-empire:before { content: @fa-var-empire; }
+.@{fa-css-prefix}-git-square:before { content: @fa-var-git-square; }
+.@{fa-css-prefix}-git:before { content: @fa-var-git; }
+.@{fa-css-prefix}-hacker-news:before { content: @fa-var-hacker-news; }
+.@{fa-css-prefix}-tencent-weibo:before { content: @fa-var-tencent-weibo; }
+.@{fa-css-prefix}-qq:before { content: @fa-var-qq; }
+.@{fa-css-prefix}-wechat:before,
+.@{fa-css-prefix}-weixin:before { content: @fa-var-weixin; }
+.@{fa-css-prefix}-send:before,
+.@{fa-css-prefix}-paper-plane:before { content: @fa-var-paper-plane; }
+.@{fa-css-prefix}-send-o:before,
+.@{fa-css-prefix}-paper-plane-o:before { content: @fa-var-paper-plane-o; }
+.@{fa-css-prefix}-history:before { content: @fa-var-history; }
+.@{fa-css-prefix}-circle-thin:before { content: @fa-var-circle-thin; }
+.@{fa-css-prefix}-header:before { content: @fa-var-header; }
+.@{fa-css-prefix}-paragraph:before { content: @fa-var-paragraph; }
+.@{fa-css-prefix}-sliders:before { content: @fa-var-sliders; }
+.@{fa-css-prefix}-share-alt:before { content: @fa-var-share-alt; }
+.@{fa-css-prefix}-share-alt-square:before { content: @fa-var-share-alt-square; }
+.@{fa-css-prefix}-bomb:before { content: @fa-var-bomb; }
+.@{fa-css-prefix}-soccer-ball-o:before,
+.@{fa-css-prefix}-futbol-o:before { content: @fa-var-futbol-o; }
+.@{fa-css-prefix}-tty:before { content: @fa-var-tty; }
+.@{fa-css-prefix}-binoculars:before { content: @fa-var-binoculars; }
+.@{fa-css-prefix}-plug:before { content: @fa-var-plug; }
+.@{fa-css-prefix}-slideshare:before { content: @fa-var-slideshare; }
+.@{fa-css-prefix}-twitch:before { content: @fa-var-twitch; }
+.@{fa-css-prefix}-yelp:before { content: @fa-var-yelp; }
+.@{fa-css-prefix}-newspaper-o:before { content: @fa-var-newspaper-o; }
+.@{fa-css-prefix}-wifi:before { content: @fa-var-wifi; }
+.@{fa-css-prefix}-calculator:before { content: @fa-var-calculator; }
+.@{fa-css-prefix}-paypal:before { content: @fa-var-paypal; }
+.@{fa-css-prefix}-google-wallet:before { content: @fa-var-google-wallet; }
+.@{fa-css-prefix}-cc-visa:before { content: @fa-var-cc-visa; }
+.@{fa-css-prefix}-cc-mastercard:before { content: @fa-var-cc-mastercard; }
+.@{fa-css-prefix}-cc-discover:before { content: @fa-var-cc-discover; }
+.@{fa-css-prefix}-cc-amex:before { content: @fa-var-cc-amex; }
+.@{fa-css-prefix}-cc-paypal:before { content: @fa-var-cc-paypal; }
+.@{fa-css-prefix}-cc-stripe:before { content: @fa-var-cc-stripe; }
+.@{fa-css-prefix}-bell-slash:before { content: @fa-var-bell-slash; }
+.@{fa-css-prefix}-bell-slash-o:before { content: @fa-var-bell-slash-o; }
+.@{fa-css-prefix}-trash:before { content: @fa-var-trash; }
+.@{fa-css-prefix}-copyright:before { content: @fa-var-copyright; }
+.@{fa-css-prefix}-at:before { content: @fa-var-at; }
+.@{fa-css-prefix}-eyedropper:before { content: @fa-var-eyedropper; }
+.@{fa-css-prefix}-paint-brush:before { content: @fa-var-paint-brush; }
+.@{fa-css-prefix}-birthday-cake:before { content: @fa-var-birthday-cake; }
+.@{fa-css-prefix}-area-chart:before { content: @fa-var-area-chart; }
+.@{fa-css-prefix}-pie-chart:before { content: @fa-var-pie-chart; }
+.@{fa-css-prefix}-line-chart:before { content: @fa-var-line-chart; }
+.@{fa-css-prefix}-lastfm:before { content: @fa-var-lastfm; }
+.@{fa-css-prefix}-lastfm-square:before { content: @fa-var-lastfm-square; }
+.@{fa-css-prefix}-toggle-off:before { content: @fa-var-toggle-off; }
+.@{fa-css-prefix}-toggle-on:before { content: @fa-var-toggle-on; }
+.@{fa-css-prefix}-bicycle:before { content: @fa-var-bicycle; }
+.@{fa-css-prefix}-bus:before { content: @fa-var-bus; }
+.@{fa-css-prefix}-ioxhost:before { content: @fa-var-ioxhost; }
+.@{fa-css-prefix}-angellist:before { content: @fa-var-angellist; }
+.@{fa-css-prefix}-cc:before { content: @fa-var-cc; }
+.@{fa-css-prefix}-shekel:before,
+.@{fa-css-prefix}-sheqel:before,
+.@{fa-css-prefix}-ils:before { content: @fa-var-ils; }
+.@{fa-css-prefix}-meanpath:before { content: @fa-var-meanpath; }
diff --git a/client/galaxy/style/less/fontawesome/larger.less b/client/galaxy/style/less/fontawesome/larger.less
new file mode 100644
index 0000000..c9d6467
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/larger.less
@@ -0,0 +1,13 @@
+// Icon Sizes
+// -------------------------
+
+/* makes the font 33% larger relative to the icon container */
+.@{fa-css-prefix}-lg {
+  font-size: (4em / 3);
+  line-height: (3em / 4);
+  vertical-align: -15%;
+}
+.@{fa-css-prefix}-2x { font-size: 2em; }
+.@{fa-css-prefix}-3x { font-size: 3em; }
+.@{fa-css-prefix}-4x { font-size: 4em; }
+.@{fa-css-prefix}-5x { font-size: 5em; }
diff --git a/client/galaxy/style/less/fontawesome/list.less b/client/galaxy/style/less/fontawesome/list.less
new file mode 100644
index 0000000..0b44038
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/list.less
@@ -0,0 +1,19 @@
+// List Icons
+// -------------------------
+
+.@{fa-css-prefix}-ul {
+  padding-left: 0;
+  margin-left: @fa-li-width;
+  list-style-type: none;
+  > li { position: relative; }
+}
+.@{fa-css-prefix}-li {
+  position: absolute;
+  left: - at fa-li-width;
+  width: @fa-li-width;
+  top: (2em / 14);
+  text-align: center;
+  &.@{fa-css-prefix}-lg {
+    left: (- at fa-li-width + (4em / 14));
+  }
+}
diff --git a/client/galaxy/style/less/fontawesome/mixins.less b/client/galaxy/style/less/fontawesome/mixins.less
new file mode 100644
index 0000000..b7bfadc
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/mixins.less
@@ -0,0 +1,25 @@
+// Mixins
+// --------------------------
+
+.fa-icon() {
+  display: inline-block;
+  font: normal normal normal 14px/1 FontAwesome; // shortening font declaration
+  font-size: inherit; // can't have font-size inherit on line above, so need to override
+  text-rendering: auto; // optimizelegibility throws things off #1094
+  -webkit-font-smoothing: antialiased;
+  -moz-osx-font-smoothing: grayscale;
+}
+
+.fa-icon-rotate(@degrees, @rotation) {
+  filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=@rotation);
+  -webkit-transform: rotate(@degrees);
+      -ms-transform: rotate(@degrees);
+          transform: rotate(@degrees);
+}
+
+.fa-icon-flip(@horiz, @vert, @rotation) {
+  filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=@rotation, mirror=1);
+  -webkit-transform: scale(@horiz, @vert);
+      -ms-transform: scale(@horiz, @vert);
+          transform: scale(@horiz, @vert);
+}
diff --git a/client/galaxy/style/less/fontawesome/path.less b/client/galaxy/style/less/fontawesome/path.less
new file mode 100644
index 0000000..c5a6912
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/path.less
@@ -0,0 +1,14 @@
+/* FONT PATH
+ * -------------------------- */
+
+ at font-face {
+  font-family: 'FontAwesome';
+  src: url('@{fa-font-path}/fontawesome-webfont.eot?v=@{fa-version}');
+  src: url('@{fa-font-path}/fontawesome-webfont.eot?#iefix&v=@{fa-version}') format('embedded-opentype'),
+    url('@{fa-font-path}/fontawesome-webfont.woff?v=@{fa-version}') format('woff'),
+    url('@{fa-font-path}/fontawesome-webfont.ttf?v=@{fa-version}') format('truetype'),
+    url('@{fa-font-path}/fontawesome-webfont.svg?v=@{fa-version}#fontawesomeregular') format('svg');
+//  src: url('@{fa-font-path}/FontAwesome.otf') format('opentype'); // used when developing fonts
+  font-weight: normal;
+  font-style: normal;
+}
diff --git a/client/galaxy/style/less/fontawesome/rotated-flipped.less b/client/galaxy/style/less/fontawesome/rotated-flipped.less
new file mode 100644
index 0000000..f6ba814
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/rotated-flipped.less
@@ -0,0 +1,20 @@
+// Rotated & Flipped Icons
+// -------------------------
+
+.@{fa-css-prefix}-rotate-90  { .fa-icon-rotate(90deg, 1);  }
+.@{fa-css-prefix}-rotate-180 { .fa-icon-rotate(180deg, 2); }
+.@{fa-css-prefix}-rotate-270 { .fa-icon-rotate(270deg, 3); }
+
+.@{fa-css-prefix}-flip-horizontal { .fa-icon-flip(-1, 1, 0); }
+.@{fa-css-prefix}-flip-vertical   { .fa-icon-flip(1, -1, 2); }
+
+// Hook for IE8-9
+// -------------------------
+
+:root .@{fa-css-prefix}-rotate-90,
+:root .@{fa-css-prefix}-rotate-180,
+:root .@{fa-css-prefix}-rotate-270,
+:root .@{fa-css-prefix}-flip-horizontal,
+:root .@{fa-css-prefix}-flip-vertical {
+  filter: none;
+}
diff --git a/client/galaxy/style/less/fontawesome/spinning.less b/client/galaxy/style/less/fontawesome/spinning.less
new file mode 100644
index 0000000..6e1564e
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/spinning.less
@@ -0,0 +1,29 @@
+// Spinning Icons
+// --------------------------
+
+.@{fa-css-prefix}-spin {
+  -webkit-animation: fa-spin 2s infinite linear;
+          animation: fa-spin 2s infinite linear;
+}
+
+ at -webkit-keyframes fa-spin {
+  0% {
+    -webkit-transform: rotate(0deg);
+            transform: rotate(0deg);
+  }
+  100% {
+    -webkit-transform: rotate(359deg);
+            transform: rotate(359deg);
+  }
+}
+
+ at keyframes fa-spin {
+  0% {
+    -webkit-transform: rotate(0deg);
+            transform: rotate(0deg);
+  }
+  100% {
+    -webkit-transform: rotate(359deg);
+            transform: rotate(359deg);
+  }
+}
diff --git a/client/galaxy/style/less/fontawesome/stacked.less b/client/galaxy/style/less/fontawesome/stacked.less
new file mode 100644
index 0000000..fc53fb0
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/stacked.less
@@ -0,0 +1,20 @@
+// Stacked Icons
+// -------------------------
+
+.@{fa-css-prefix}-stack {
+  position: relative;
+  display: inline-block;
+  width: 2em;
+  height: 2em;
+  line-height: 2em;
+  vertical-align: middle;
+}
+.@{fa-css-prefix}-stack-1x, .@{fa-css-prefix}-stack-2x {
+  position: absolute;
+  left: 0;
+  width: 100%;
+  text-align: center;
+}
+.@{fa-css-prefix}-stack-1x { line-height: inherit; }
+.@{fa-css-prefix}-stack-2x { font-size: 2em; }
+.@{fa-css-prefix}-inverse { color: @fa-inverse; }
diff --git a/client/galaxy/style/less/fontawesome/variables.less b/client/galaxy/style/less/fontawesome/variables.less
new file mode 100644
index 0000000..a2f345d
--- /dev/null
+++ b/client/galaxy/style/less/fontawesome/variables.less
@@ -0,0 +1,561 @@
+// Variables
+// --------------------------
+
+ at fa-font-path:        "../images/fonts";
+//@fa-font-path:        "//netdna.bootstrapcdn.com/font-awesome/4.2.0/fonts"; // for referencing Bootstrap CDN font files directly
+ at fa-css-prefix:       fa;
+ at fa-version:          "4.2.0";
+ at fa-border-color:     #eee;
+ at fa-inverse:          #fff;
+ at fa-li-width:         (30em / 14);
+
+ at fa-var-adjust: "\f042";
+ at fa-var-adn: "\f170";
+ at fa-var-align-center: "\f037";
+ at fa-var-align-justify: "\f039";
+ at fa-var-align-left: "\f036";
+ at fa-var-align-right: "\f038";
+ at fa-var-ambulance: "\f0f9";
+ at fa-var-anchor: "\f13d";
+ at fa-var-android: "\f17b";
+ at fa-var-angellist: "\f209";
+ at fa-var-angle-double-down: "\f103";
+ at fa-var-angle-double-left: "\f100";
+ at fa-var-angle-double-right: "\f101";
+ at fa-var-angle-double-up: "\f102";
+ at fa-var-angle-down: "\f107";
+ at fa-var-angle-left: "\f104";
+ at fa-var-angle-right: "\f105";
+ at fa-var-angle-up: "\f106";
+ at fa-var-apple: "\f179";
+ at fa-var-archive: "\f187";
+ at fa-var-area-chart: "\f1fe";
+ at fa-var-arrow-circle-down: "\f0ab";
+ at fa-var-arrow-circle-left: "\f0a8";
+ at fa-var-arrow-circle-o-down: "\f01a";
+ at fa-var-arrow-circle-o-left: "\f190";
+ at fa-var-arrow-circle-o-right: "\f18e";
+ at fa-var-arrow-circle-o-up: "\f01b";
+ at fa-var-arrow-circle-right: "\f0a9";
+ at fa-var-arrow-circle-up: "\f0aa";
+ at fa-var-arrow-down: "\f063";
+ at fa-var-arrow-left: "\f060";
+ at fa-var-arrow-right: "\f061";
+ at fa-var-arrow-up: "\f062";
+ at fa-var-arrows: "\f047";
+ at fa-var-arrows-alt: "\f0b2";
+ at fa-var-arrows-h: "\f07e";
+ at fa-var-arrows-v: "\f07d";
+ at fa-var-asterisk: "\f069";
+ at fa-var-at: "\f1fa";
+ at fa-var-automobile: "\f1b9";
+ at fa-var-backward: "\f04a";
+ at fa-var-ban: "\f05e";
+ at fa-var-bank: "\f19c";
+ at fa-var-bar-chart: "\f080";
+ at fa-var-bar-chart-o: "\f080";
+ at fa-var-barcode: "\f02a";
+ at fa-var-bars: "\f0c9";
+ at fa-var-beer: "\f0fc";
+ at fa-var-behance: "\f1b4";
+ at fa-var-behance-square: "\f1b5";
+ at fa-var-bell: "\f0f3";
+ at fa-var-bell-o: "\f0a2";
+ at fa-var-bell-slash: "\f1f6";
+ at fa-var-bell-slash-o: "\f1f7";
+ at fa-var-bicycle: "\f206";
+ at fa-var-binoculars: "\f1e5";
+ at fa-var-birthday-cake: "\f1fd";
+ at fa-var-bitbucket: "\f171";
+ at fa-var-bitbucket-square: "\f172";
+ at fa-var-bitcoin: "\f15a";
+ at fa-var-bold: "\f032";
+ at fa-var-bolt: "\f0e7";
+ at fa-var-bomb: "\f1e2";
+ at fa-var-book: "\f02d";
+ at fa-var-bookmark: "\f02e";
+ at fa-var-bookmark-o: "\f097";
+ at fa-var-briefcase: "\f0b1";
+ at fa-var-btc: "\f15a";
+ at fa-var-bug: "\f188";
+ at fa-var-building: "\f1ad";
+ at fa-var-building-o: "\f0f7";
+ at fa-var-bullhorn: "\f0a1";
+ at fa-var-bullseye: "\f140";
+ at fa-var-bus: "\f207";
+ at fa-var-cab: "\f1ba";
+ at fa-var-calculator: "\f1ec";
+ at fa-var-calendar: "\f073";
+ at fa-var-calendar-o: "\f133";
+ at fa-var-camera: "\f030";
+ at fa-var-camera-retro: "\f083";
+ at fa-var-car: "\f1b9";
+ at fa-var-caret-down: "\f0d7";
+ at fa-var-caret-left: "\f0d9";
+ at fa-var-caret-right: "\f0da";
+ at fa-var-caret-square-o-down: "\f150";
+ at fa-var-caret-square-o-left: "\f191";
+ at fa-var-caret-square-o-right: "\f152";
+ at fa-var-caret-square-o-up: "\f151";
+ at fa-var-caret-up: "\f0d8";
+ at fa-var-cc: "\f20a";
+ at fa-var-cc-amex: "\f1f3";
+ at fa-var-cc-discover: "\f1f2";
+ at fa-var-cc-mastercard: "\f1f1";
+ at fa-var-cc-paypal: "\f1f4";
+ at fa-var-cc-stripe: "\f1f5";
+ at fa-var-cc-visa: "\f1f0";
+ at fa-var-certificate: "\f0a3";
+ at fa-var-chain: "\f0c1";
+ at fa-var-chain-broken: "\f127";
+ at fa-var-check: "\f00c";
+ at fa-var-check-circle: "\f058";
+ at fa-var-check-circle-o: "\f05d";
+ at fa-var-check-square: "\f14a";
+ at fa-var-check-square-o: "\f046";
+ at fa-var-chevron-circle-down: "\f13a";
+ at fa-var-chevron-circle-left: "\f137";
+ at fa-var-chevron-circle-right: "\f138";
+ at fa-var-chevron-circle-up: "\f139";
+ at fa-var-chevron-down: "\f078";
+ at fa-var-chevron-left: "\f053";
+ at fa-var-chevron-right: "\f054";
+ at fa-var-chevron-up: "\f077";
+ at fa-var-child: "\f1ae";
+ at fa-var-circle: "\f111";
+ at fa-var-circle-o: "\f10c";
+ at fa-var-circle-o-notch: "\f1ce";
+ at fa-var-circle-thin: "\f1db";
+ at fa-var-clipboard: "\f0ea";
+ at fa-var-clock-o: "\f017";
+ at fa-var-close: "\f00d";
+ at fa-var-cloud: "\f0c2";
+ at fa-var-cloud-download: "\f0ed";
+ at fa-var-cloud-upload: "\f0ee";
+ at fa-var-cny: "\f157";
+ at fa-var-code: "\f121";
+ at fa-var-code-fork: "\f126";
+ at fa-var-codepen: "\f1cb";
+ at fa-var-coffee: "\f0f4";
+ at fa-var-cog: "\f013";
+ at fa-var-cogs: "\f085";
+ at fa-var-columns: "\f0db";
+ at fa-var-comment: "\f075";
+ at fa-var-comment-o: "\f0e5";
+ at fa-var-comments: "\f086";
+ at fa-var-comments-o: "\f0e6";
+ at fa-var-compass: "\f14e";
+ at fa-var-compress: "\f066";
+ at fa-var-copy: "\f0c5";
+ at fa-var-copyright: "\f1f9";
+ at fa-var-credit-card: "\f09d";
+ at fa-var-crop: "\f125";
+ at fa-var-crosshairs: "\f05b";
+ at fa-var-css3: "\f13c";
+ at fa-var-cube: "\f1b2";
+ at fa-var-cubes: "\f1b3";
+ at fa-var-cut: "\f0c4";
+ at fa-var-cutlery: "\f0f5";
+ at fa-var-dashboard: "\f0e4";
+ at fa-var-database: "\f1c0";
+ at fa-var-dedent: "\f03b";
+ at fa-var-delicious: "\f1a5";
+ at fa-var-desktop: "\f108";
+ at fa-var-deviantart: "\f1bd";
+ at fa-var-digg: "\f1a6";
+ at fa-var-dollar: "\f155";
+ at fa-var-dot-circle-o: "\f192";
+ at fa-var-download: "\f019";
+ at fa-var-dribbble: "\f17d";
+ at fa-var-dropbox: "\f16b";
+ at fa-var-drupal: "\f1a9";
+ at fa-var-edit: "\f044";
+ at fa-var-eject: "\f052";
+ at fa-var-ellipsis-h: "\f141";
+ at fa-var-ellipsis-v: "\f142";
+ at fa-var-empire: "\f1d1";
+ at fa-var-envelope: "\f0e0";
+ at fa-var-envelope-o: "\f003";
+ at fa-var-envelope-square: "\f199";
+ at fa-var-eraser: "\f12d";
+ at fa-var-eur: "\f153";
+ at fa-var-euro: "\f153";
+ at fa-var-exchange: "\f0ec";
+ at fa-var-exclamation: "\f12a";
+ at fa-var-exclamation-circle: "\f06a";
+ at fa-var-exclamation-triangle: "\f071";
+ at fa-var-expand: "\f065";
+ at fa-var-external-link: "\f08e";
+ at fa-var-external-link-square: "\f14c";
+ at fa-var-eye: "\f06e";
+ at fa-var-eye-slash: "\f070";
+ at fa-var-eyedropper: "\f1fb";
+ at fa-var-facebook: "\f09a";
+ at fa-var-facebook-square: "\f082";
+ at fa-var-fast-backward: "\f049";
+ at fa-var-fast-forward: "\f050";
+ at fa-var-fax: "\f1ac";
+ at fa-var-female: "\f182";
+ at fa-var-fighter-jet: "\f0fb";
+ at fa-var-file: "\f15b";
+ at fa-var-file-archive-o: "\f1c6";
+ at fa-var-file-audio-o: "\f1c7";
+ at fa-var-file-code-o: "\f1c9";
+ at fa-var-file-excel-o: "\f1c3";
+ at fa-var-file-image-o: "\f1c5";
+ at fa-var-file-movie-o: "\f1c8";
+ at fa-var-file-o: "\f016";
+ at fa-var-file-pdf-o: "\f1c1";
+ at fa-var-file-photo-o: "\f1c5";
+ at fa-var-file-picture-o: "\f1c5";
+ at fa-var-file-powerpoint-o: "\f1c4";
+ at fa-var-file-sound-o: "\f1c7";
+ at fa-var-file-text: "\f15c";
+ at fa-var-file-text-o: "\f0f6";
+ at fa-var-file-video-o: "\f1c8";
+ at fa-var-file-word-o: "\f1c2";
+ at fa-var-file-zip-o: "\f1c6";
+ at fa-var-files-o: "\f0c5";
+ at fa-var-film: "\f008";
+ at fa-var-filter: "\f0b0";
+ at fa-var-fire: "\f06d";
+ at fa-var-fire-extinguisher: "\f134";
+ at fa-var-flag: "\f024";
+ at fa-var-flag-checkered: "\f11e";
+ at fa-var-flag-o: "\f11d";
+ at fa-var-flash: "\f0e7";
+ at fa-var-flask: "\f0c3";
+ at fa-var-flickr: "\f16e";
+ at fa-var-floppy-o: "\f0c7";
+ at fa-var-folder: "\f07b";
+ at fa-var-folder-o: "\f114";
+ at fa-var-folder-open: "\f07c";
+ at fa-var-folder-open-o: "\f115";
+ at fa-var-font: "\f031";
+ at fa-var-forward: "\f04e";
+ at fa-var-foursquare: "\f180";
+ at fa-var-frown-o: "\f119";
+ at fa-var-futbol-o: "\f1e3";
+ at fa-var-gamepad: "\f11b";
+ at fa-var-gavel: "\f0e3";
+ at fa-var-gbp: "\f154";
+ at fa-var-ge: "\f1d1";
+ at fa-var-gear: "\f013";
+ at fa-var-gears: "\f085";
+ at fa-var-gift: "\f06b";
+ at fa-var-git: "\f1d3";
+ at fa-var-git-square: "\f1d2";
+ at fa-var-github: "\f09b";
+ at fa-var-github-alt: "\f113";
+ at fa-var-github-square: "\f092";
+ at fa-var-gittip: "\f184";
+ at fa-var-glass: "\f000";
+ at fa-var-globe: "\f0ac";
+ at fa-var-google: "\f1a0";
+ at fa-var-google-plus: "\f0d5";
+ at fa-var-google-plus-square: "\f0d4";
+ at fa-var-google-wallet: "\f1ee";
+ at fa-var-graduation-cap: "\f19d";
+ at fa-var-group: "\f0c0";
+ at fa-var-h-square: "\f0fd";
+ at fa-var-hacker-news: "\f1d4";
+ at fa-var-hand-o-down: "\f0a7";
+ at fa-var-hand-o-left: "\f0a5";
+ at fa-var-hand-o-right: "\f0a4";
+ at fa-var-hand-o-up: "\f0a6";
+ at fa-var-hdd-o: "\f0a0";
+ at fa-var-header: "\f1dc";
+ at fa-var-headphones: "\f025";
+ at fa-var-heart: "\f004";
+ at fa-var-heart-o: "\f08a";
+ at fa-var-history: "\f1da";
+ at fa-var-home: "\f015";
+ at fa-var-hospital-o: "\f0f8";
+ at fa-var-html5: "\f13b";
+ at fa-var-ils: "\f20b";
+ at fa-var-image: "\f03e";
+ at fa-var-inbox: "\f01c";
+ at fa-var-indent: "\f03c";
+ at fa-var-info: "\f129";
+ at fa-var-info-circle: "\f05a";
+ at fa-var-inr: "\f156";
+ at fa-var-instagram: "\f16d";
+ at fa-var-institution: "\f19c";
+ at fa-var-ioxhost: "\f208";
+ at fa-var-italic: "\f033";
+ at fa-var-joomla: "\f1aa";
+ at fa-var-jpy: "\f157";
+ at fa-var-jsfiddle: "\f1cc";
+ at fa-var-key: "\f084";
+ at fa-var-keyboard-o: "\f11c";
+ at fa-var-krw: "\f159";
+ at fa-var-language: "\f1ab";
+ at fa-var-laptop: "\f109";
+ at fa-var-lastfm: "\f202";
+ at fa-var-lastfm-square: "\f203";
+ at fa-var-leaf: "\f06c";
+ at fa-var-legal: "\f0e3";
+ at fa-var-lemon-o: "\f094";
+ at fa-var-level-down: "\f149";
+ at fa-var-level-up: "\f148";
+ at fa-var-life-bouy: "\f1cd";
+ at fa-var-life-buoy: "\f1cd";
+ at fa-var-life-ring: "\f1cd";
+ at fa-var-life-saver: "\f1cd";
+ at fa-var-lightbulb-o: "\f0eb";
+ at fa-var-line-chart: "\f201";
+ at fa-var-link: "\f0c1";
+ at fa-var-linkedin: "\f0e1";
+ at fa-var-linkedin-square: "\f08c";
+ at fa-var-linux: "\f17c";
+ at fa-var-list: "\f03a";
+ at fa-var-list-alt: "\f022";
+ at fa-var-list-ol: "\f0cb";
+ at fa-var-list-ul: "\f0ca";
+ at fa-var-location-arrow: "\f124";
+ at fa-var-lock: "\f023";
+ at fa-var-long-arrow-down: "\f175";
+ at fa-var-long-arrow-left: "\f177";
+ at fa-var-long-arrow-right: "\f178";
+ at fa-var-long-arrow-up: "\f176";
+ at fa-var-magic: "\f0d0";
+ at fa-var-magnet: "\f076";
+ at fa-var-mail-forward: "\f064";
+ at fa-var-mail-reply: "\f112";
+ at fa-var-mail-reply-all: "\f122";
+ at fa-var-male: "\f183";
+ at fa-var-map-marker: "\f041";
+ at fa-var-maxcdn: "\f136";
+ at fa-var-meanpath: "\f20c";
+ at fa-var-medkit: "\f0fa";
+ at fa-var-meh-o: "\f11a";
+ at fa-var-microphone: "\f130";
+ at fa-var-microphone-slash: "\f131";
+ at fa-var-minus: "\f068";
+ at fa-var-minus-circle: "\f056";
+ at fa-var-minus-square: "\f146";
+ at fa-var-minus-square-o: "\f147";
+ at fa-var-mobile: "\f10b";
+ at fa-var-mobile-phone: "\f10b";
+ at fa-var-money: "\f0d6";
+ at fa-var-moon-o: "\f186";
+ at fa-var-mortar-board: "\f19d";
+ at fa-var-music: "\f001";
+ at fa-var-navicon: "\f0c9";
+ at fa-var-newspaper-o: "\f1ea";
+ at fa-var-openid: "\f19b";
+ at fa-var-outdent: "\f03b";
+ at fa-var-pagelines: "\f18c";
+ at fa-var-paint-brush: "\f1fc";
+ at fa-var-paper-plane: "\f1d8";
+ at fa-var-paper-plane-o: "\f1d9";
+ at fa-var-paperclip: "\f0c6";
+ at fa-var-paragraph: "\f1dd";
+ at fa-var-paste: "\f0ea";
+ at fa-var-pause: "\f04c";
+ at fa-var-paw: "\f1b0";
+ at fa-var-paypal: "\f1ed";
+ at fa-var-pencil: "\f040";
+ at fa-var-pencil-square: "\f14b";
+ at fa-var-pencil-square-o: "\f044";
+ at fa-var-phone: "\f095";
+ at fa-var-phone-square: "\f098";
+ at fa-var-photo: "\f03e";
+ at fa-var-picture-o: "\f03e";
+ at fa-var-pie-chart: "\f200";
+ at fa-var-pied-piper: "\f1a7";
+ at fa-var-pied-piper-alt: "\f1a8";
+ at fa-var-pinterest: "\f0d2";
+ at fa-var-pinterest-square: "\f0d3";
+ at fa-var-plane: "\f072";
+ at fa-var-play: "\f04b";
+ at fa-var-play-circle: "\f144";
+ at fa-var-play-circle-o: "\f01d";
+ at fa-var-plug: "\f1e6";
+ at fa-var-plus: "\f067";
+ at fa-var-plus-circle: "\f055";
+ at fa-var-plus-square: "\f0fe";
+ at fa-var-plus-square-o: "\f196";
+ at fa-var-power-off: "\f011";
+ at fa-var-print: "\f02f";
+ at fa-var-puzzle-piece: "\f12e";
+ at fa-var-qq: "\f1d6";
+ at fa-var-qrcode: "\f029";
+ at fa-var-question: "\f128";
+ at fa-var-question-circle: "\f059";
+ at fa-var-quote-left: "\f10d";
+ at fa-var-quote-right: "\f10e";
+ at fa-var-ra: "\f1d0";
+ at fa-var-random: "\f074";
+ at fa-var-rebel: "\f1d0";
+ at fa-var-recycle: "\f1b8";
+ at fa-var-reddit: "\f1a1";
+ at fa-var-reddit-square: "\f1a2";
+ at fa-var-refresh: "\f021";
+ at fa-var-remove: "\f00d";
+ at fa-var-renren: "\f18b";
+ at fa-var-reorder: "\f0c9";
+ at fa-var-repeat: "\f01e";
+ at fa-var-reply: "\f112";
+ at fa-var-reply-all: "\f122";
+ at fa-var-retweet: "\f079";
+ at fa-var-rmb: "\f157";
+ at fa-var-road: "\f018";
+ at fa-var-rocket: "\f135";
+ at fa-var-rotate-left: "\f0e2";
+ at fa-var-rotate-right: "\f01e";
+ at fa-var-rouble: "\f158";
+ at fa-var-rss: "\f09e";
+ at fa-var-rss-square: "\f143";
+ at fa-var-rub: "\f158";
+ at fa-var-ruble: "\f158";
+ at fa-var-rupee: "\f156";
+ at fa-var-save: "\f0c7";
+ at fa-var-scissors: "\f0c4";
+ at fa-var-search: "\f002";
+ at fa-var-search-minus: "\f010";
+ at fa-var-search-plus: "\f00e";
+ at fa-var-send: "\f1d8";
+ at fa-var-send-o: "\f1d9";
+ at fa-var-share: "\f064";
+ at fa-var-share-alt: "\f1e0";
+ at fa-var-share-alt-square: "\f1e1";
+ at fa-var-share-square: "\f14d";
+ at fa-var-share-square-o: "\f045";
+ at fa-var-shekel: "\f20b";
+ at fa-var-sheqel: "\f20b";
+ at fa-var-shield: "\f132";
+ at fa-var-shopping-cart: "\f07a";
+ at fa-var-sign-in: "\f090";
+ at fa-var-sign-out: "\f08b";
+ at fa-var-signal: "\f012";
+ at fa-var-sitemap: "\f0e8";
+ at fa-var-skype: "\f17e";
+ at fa-var-slack: "\f198";
+ at fa-var-sliders: "\f1de";
+ at fa-var-slideshare: "\f1e7";
+ at fa-var-smile-o: "\f118";
+ at fa-var-soccer-ball-o: "\f1e3";
+ at fa-var-sort: "\f0dc";
+ at fa-var-sort-alpha-asc: "\f15d";
+ at fa-var-sort-alpha-desc: "\f15e";
+ at fa-var-sort-amount-asc: "\f160";
+ at fa-var-sort-amount-desc: "\f161";
+ at fa-var-sort-asc: "\f0de";
+ at fa-var-sort-desc: "\f0dd";
+ at fa-var-sort-down: "\f0dd";
+ at fa-var-sort-numeric-asc: "\f162";
+ at fa-var-sort-numeric-desc: "\f163";
+ at fa-var-sort-up: "\f0de";
+ at fa-var-soundcloud: "\f1be";
+ at fa-var-space-shuttle: "\f197";
+ at fa-var-spinner: "\f110";
+ at fa-var-spoon: "\f1b1";
+ at fa-var-spotify: "\f1bc";
+ at fa-var-square: "\f0c8";
+ at fa-var-square-o: "\f096";
+ at fa-var-stack-exchange: "\f18d";
+ at fa-var-stack-overflow: "\f16c";
+ at fa-var-star: "\f005";
+ at fa-var-star-half: "\f089";
+ at fa-var-star-half-empty: "\f123";
+ at fa-var-star-half-full: "\f123";
+ at fa-var-star-half-o: "\f123";
+ at fa-var-star-o: "\f006";
+ at fa-var-steam: "\f1b6";
+ at fa-var-steam-square: "\f1b7";
+ at fa-var-step-backward: "\f048";
+ at fa-var-step-forward: "\f051";
+ at fa-var-stethoscope: "\f0f1";
+ at fa-var-stop: "\f04d";
+ at fa-var-strikethrough: "\f0cc";
+ at fa-var-stumbleupon: "\f1a4";
+ at fa-var-stumbleupon-circle: "\f1a3";
+ at fa-var-subscript: "\f12c";
+ at fa-var-suitcase: "\f0f2";
+ at fa-var-sun-o: "\f185";
+ at fa-var-superscript: "\f12b";
+ at fa-var-support: "\f1cd";
+ at fa-var-table: "\f0ce";
+ at fa-var-tablet: "\f10a";
+ at fa-var-tachometer: "\f0e4";
+ at fa-var-tag: "\f02b";
+ at fa-var-tags: "\f02c";
+ at fa-var-tasks: "\f0ae";
+ at fa-var-taxi: "\f1ba";
+ at fa-var-tencent-weibo: "\f1d5";
+ at fa-var-terminal: "\f120";
+ at fa-var-text-height: "\f034";
+ at fa-var-text-width: "\f035";
+ at fa-var-th: "\f00a";
+ at fa-var-th-large: "\f009";
+ at fa-var-th-list: "\f00b";
+ at fa-var-thumb-tack: "\f08d";
+ at fa-var-thumbs-down: "\f165";
+ at fa-var-thumbs-o-down: "\f088";
+ at fa-var-thumbs-o-up: "\f087";
+ at fa-var-thumbs-up: "\f164";
+ at fa-var-ticket: "\f145";
+ at fa-var-times: "\f00d";
+ at fa-var-times-circle: "\f057";
+ at fa-var-times-circle-o: "\f05c";
+ at fa-var-tint: "\f043";
+ at fa-var-toggle-down: "\f150";
+ at fa-var-toggle-left: "\f191";
+ at fa-var-toggle-off: "\f204";
+ at fa-var-toggle-on: "\f205";
+ at fa-var-toggle-right: "\f152";
+ at fa-var-toggle-up: "\f151";
+ at fa-var-trash: "\f1f8";
+ at fa-var-trash-o: "\f014";
+ at fa-var-tree: "\f1bb";
+ at fa-var-trello: "\f181";
+ at fa-var-trophy: "\f091";
+ at fa-var-truck: "\f0d1";
+ at fa-var-try: "\f195";
+ at fa-var-tty: "\f1e4";
+ at fa-var-tumblr: "\f173";
+ at fa-var-tumblr-square: "\f174";
+ at fa-var-turkish-lira: "\f195";
+ at fa-var-twitch: "\f1e8";
+ at fa-var-twitter: "\f099";
+ at fa-var-twitter-square: "\f081";
+ at fa-var-umbrella: "\f0e9";
+ at fa-var-underline: "\f0cd";
+ at fa-var-undo: "\f0e2";
+ at fa-var-university: "\f19c";
+ at fa-var-unlink: "\f127";
+ at fa-var-unlock: "\f09c";
+ at fa-var-unlock-alt: "\f13e";
+ at fa-var-unsorted: "\f0dc";
+ at fa-var-upload: "\f093";
+ at fa-var-usd: "\f155";
+ at fa-var-user: "\f007";
+ at fa-var-user-md: "\f0f0";
+ at fa-var-users: "\f0c0";
+ at fa-var-video-camera: "\f03d";
+ at fa-var-vimeo-square: "\f194";
+ at fa-var-vine: "\f1ca";
+ at fa-var-vk: "\f189";
+ at fa-var-volume-down: "\f027";
+ at fa-var-volume-off: "\f026";
+ at fa-var-volume-up: "\f028";
+ at fa-var-warning: "\f071";
+ at fa-var-wechat: "\f1d7";
+ at fa-var-weibo: "\f18a";
+ at fa-var-weixin: "\f1d7";
+ at fa-var-wheelchair: "\f193";
+ at fa-var-wifi: "\f1eb";
+ at fa-var-windows: "\f17a";
+ at fa-var-won: "\f159";
+ at fa-var-wordpress: "\f19a";
+ at fa-var-wrench: "\f0ad";
+ at fa-var-xing: "\f168";
+ at fa-var-xing-square: "\f169";
+ at fa-var-yahoo: "\f19e";
+ at fa-var-yelp: "\f1e9";
+ at fa-var-yen: "\f157";
+ at fa-var-youtube: "\f167";
+ at fa-var-youtube-play: "\f16a";
+ at fa-var-youtube-square: "\f166";
+
diff --git a/client/galaxy/style/less/frame.less b/client/galaxy/style/less/frame.less
new file mode 100644
index 0000000..cb4e328
--- /dev/null
+++ b/client/galaxy/style/less/frame.less
@@ -0,0 +1,154 @@
+.galaxy-frame{
+    .corner {
+        -moz-border-radius: @border-radius-large;
+        border-radius: @border-radius-large;
+    }
+
+    .toggle {
+        color: gold;
+    }
+
+    .frame-background {
+        z-index         : @zindex-navbar;
+        position        : absolute;
+        top             : 0px;
+        left            : 0px;
+        height          : 100%;
+        width           : 100%;
+        opacity         : 0.6;
+        background      : @black;
+        overflow        : auto;
+    }
+
+    .frame-shadow {
+        z-index         : @zindex-navbar + 1;
+        position        : absolute;
+        display         : none;
+        top             : 0px;
+        left            : 0px;
+        opacity         : 0.5;
+        background      : @navbar-inverse-bg;
+        border          : 1px solid @navbar-default-border;
+    }
+
+    /*
+        panel menu button
+    */
+    .frame-menu {
+        z-index         : @zindex-navbar + 5;
+        position        : absolute;
+        cursor          : pointer;
+        color           : gold;
+        right           : 10px;
+    }
+
+    .frame-scroll-up {
+        top             : 50px;
+    }
+
+    .frame-scroll-down {
+        bottom          : 20px;
+    }
+
+    /*
+        frame components
+    */
+    
+    .frame {
+        z-index         : @zindex-navbar + 2;
+        overflow        : hidden;
+        position        : absolute;
+        background      : @white;
+        border          : 1px solid @navbar-default-border;
+        -webkit-box-shadow: 0 0 5px rgba(0,0,0,0.3);
+    
+        .f-content{
+            position        : absolute;
+            overflow        : hidden;
+            background      : @white;
+            border          : none;
+            top             : 24px;
+            bottom          : 3px;
+            left            : 3px;
+            right           : 3px;
+        }
+
+        .f-cover{
+            position        : absolute;
+            display         : none;
+            top             : 0px;
+            left            : 0px;
+            height          : 100%;
+            width           : 100%;
+            opacity         : 0.0;
+            background      : @white;
+        }
+
+        .f-iframe{
+            border          : none;
+            width           : 100%;
+            height          : 100%;
+        }
+
+        .f-header{
+            height          : 17px;
+            margin          : 2px;
+            cursor          : pointer;
+            border          : 1px solid @black;
+            background      : @base-color-1;
+            color           : @white;
+            .f-icon-left{
+                cursor      : pointer;
+                font-size   : 15px;
+                margin-left : 3px;
+                float       : left;
+                &[disabled] {
+                    opacity : 0.25;
+                }
+            }
+        }
+
+        .f-title {
+            position        : absolute;
+            top             : 2px;
+            left            : 32px;
+            right           : 32px;
+            font-size       : 12px;
+            font-family     : @font-family-sans-serif;
+            text-align      : center;
+        }
+
+        /*
+            frame icons
+        */
+
+        .f-not-allowed{
+            cursor          : not-allowed;
+        }
+
+        .f-close{
+            cursor          : pointer;
+            position        : absolute;
+            font-size       : 15px;
+            right           : 5px;
+            top             : 2px;
+        }
+
+        .f-resize{
+            cursor          : pointer;
+            position        : absolute;
+            font-size       : 15px;
+            right           : 0px;
+            bottom          : 0px;
+            background      : @white;
+            width           : 16px;
+            height          : 16px;
+            color           : @base-color-1;
+            right           : 0px;
+            bottom          : 0px;
+            text-align      : center;
+            line-height     : 16px;
+            border          : 0px;
+        }
+    }
+}
diff --git a/client/galaxy/style/less/galaxy_bootstrap.less b/client/galaxy/style/less/galaxy_bootstrap.less
new file mode 100644
index 0000000..4638efd
--- /dev/null
+++ b/client/galaxy/style/less/galaxy_bootstrap.less
@@ -0,0 +1,67 @@
+/*
+ * bootsrap.less modified for Galaxy. 
+ *  - only a subset of bootsrap components are used
+ *  - include 'galaxy_bootsrap_variables' instead of 'variables'
+ *  - overrides that can't be set with variables at end 
+ *
+ * ---
+ *
+ * Bootstrap v3.0.0
+ *
+ *
+ * Copyright 2012 Twitter, Inc
+ * Licensed under the Apache License v2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Designed and built with all the love in the world by @mdo and @fat.
+ */
+
+// Core variables and mixins
+// @import "bootstrap/variables.less";
+ at import "galaxy_bootstrap_variables.less";
+ at import "bootstrap/mixins.less";
+
+// Reset
+ at import "bootstrap/normalize.less";
+ at import "bootstrap/print.less";
+
+// Core CSS
+ at import "bootstrap/scaffolding.less";
+ at import "bootstrap/type.less";
+// @import "bootstrap/code.less";
+ at import "bootstrap/grid.less";
+ at import "bootstrap/tables.less";
+ at import "bootstrap/forms.less";
+ at import "bootstrap/buttons.less";
+
+// Components
+ at import "bootstrap/component-animations.less";
+ at import "bootstrap/glyphicons.less";
+ at import "bootstrap/dropdowns.less";
+ at import "bootstrap/button-groups.less";
+ at import "bootstrap/input-groups.less";
+ at import "bootstrap/navs.less";
+ at import "bootstrap/navbar.less";
+ at import "bootstrap/breadcrumbs.less";
+ at import "bootstrap/pagination.less";
+ at import "bootstrap/pager.less";
+// @import "bootstrap/labels.less";
+ at import "bootstrap/badges.less";
+ at import "bootstrap/jumbotron.less";
+// @import "thumbnails.less";
+ at import "bootstrap/alerts.less";
+ at import "bootstrap/progress-bars.less";
+ at import "bootstrap/media.less";
+ at import "bootstrap/list-group.less";
+ at import "bootstrap/panels.less";
+ at import "bootstrap/wells.less";
+ at import "bootstrap/close.less";
+
+// Components w/ JavaScript
+ at import "bootstrap/modals.less";
+ at import "bootstrap/tooltip.less";
+ at import "bootstrap/popovers.less";
+// @import "carousel.less";
+
+// Utility classes
+ at import "bootstrap/utilities.less";
diff --git a/client/galaxy/style/less/galaxy_bootstrap/overrides.less b/client/galaxy/style/less/galaxy_bootstrap/overrides.less
new file mode 100644
index 0000000..711d141
--- /dev/null
+++ b/client/galaxy/style/less/galaxy_bootstrap/overrides.less
@@ -0,0 +1,83 @@
+.navbar-fixed-top .navbar-brand {
+  font-family: Verdana;
+  font-weight: bold;
+  font-size: 160%;
+}
+
+// Don't wrap text inside pre
+pre {
+  overflow: auto;
+  word-wrap: normal;
+  white-space: pre;
+}
+
+.btn.hover {
+    @btn:hover();
+}
+
+// Allow text in buttons to wrap
+.btn {
+  white-space: normal;
+}
+
+.dropdown-menu {
+    max-width: auto;
+}
+
+input[type="checkbox"],input[type="radio"] {
+    margin-left: 0.5ex;
+    margin-right: 0.5ex;
+}
+
+// Modal -- wider by default, scroll like Trello
+
+.modal-dialog {
+    width: 690px;
+}
+
+.pagination {
+  > .active > a {
+    background-color: #ebd9b2;
+    color: black;
+    &:hover {
+      color: black;
+      background-color: #ebd9b2;
+    }
+  }
+}
+/*
+
+NOTE: these styles do not currently work.
+
+.modal { 
+  position: absolute;
+  top: 50px;
+  width: 660px;
+  margin-left: -330px;
+}
+
+.modal:after {
+  content: "";
+  height: 0px;
+  position: relative;
+  top: 50px;
+  display: block;
+  border: solid transparent 1px;
+}
+
+.modal-body {
+  height: auto;
+  max-height: none;
+}
+*/
+
+// Scroll modal body.
+.modal-body {
+  overflow: auto;
+}
+
+// Tabs -- border color is hardcoded in navs.less, change to @btnBorder here
+
+.nav-tabs {
+  margin-bottom: 15px;
+}
diff --git a/client/galaxy/style/less/galaxy_bootstrap/variables.less b/client/galaxy/style/less/galaxy_bootstrap/variables.less
new file mode 100644
index 0000000..496ce0c
--- /dev/null
+++ b/client/galaxy/style/less/galaxy_bootstrap/variables.less
@@ -0,0 +1,698 @@
+// Everything is derived from @base-color
+// Caveat: currently only works for light colored backgrounds, less provides functions (contrast) that could be used to improve this
+
+// Base color -- Based on the Galaxy masthead color
+ at base-color-1: #2C3143;
+ at base-color-2: lighten(@base-color-1,25%);
+ at base-color-3: lighten(@base-color-1,50%);
+
+ at complement-color-1: spin(@base-color-1,180);
+ at complement-color-2: spin(@base-color-2,180);
+ at complement-color-3: spin(@base-color-3,180);
+
+ at base-color: @base-color-2;
+
+// Default border bolor, this is between gray-light and gray-lighter -- classic Galaxy border color
+ at border-default-color: lighten(black, 75%);
+
+// When making borders for components with other colors, how much to darken
+ at border-darken-percent: 40%;
+
+// For making state colors, how much to darken and lighten
+ at state-text-darken-percent: 15%;
+ at state-bg-lighten-percent: 40%;
+ at state-bg-saturate-percent: 30%;
+
+// Why bootstrap stopped defining these as variables I don't know
+ at white: #fff;
+ at black: #000;
+
+// Bootstrap default colors for states, will be adjusted based on @base-color
+
+ at bs-primary:         #428bca;
+ at bs-success:         #5cb85c;
+ at bs-warning:         #f0ad4e;
+ at bs-danger:          #d9534f;
+ at bs-info:            #5bc0de;
+
+// Derived variables used below
+
+ at sat:   saturation(@base-color);
+ at luma:  luminance(@base-color);
+ at light: lightness(@base-color);
+ at tone:  desaturate(@base-color,100%);
+
+// Standard boostrap variables begin here
+
+//
+// Variables
+// --------------------------------------------------
+
+
+// Global values
+// --------------------------------------------------
+
+// Grays
+// -------------------------
+
+ at gray-darker:            lighten(black, 13.5%); // #222
+ at gray-dark:              lighten(black, 20%);   // #333
+ at gray:                   lighten(black, 33.5%); // #555
+ at gray-light:             lighten(black, 60%);   // #999
+ at gray-lighter:           lighten(black, 93.5%); // #eee
+
+// Brand colors
+// -------------------------
+
+ at brand-primary:         hsl(
+                            hue(@bs-primary),
+                            (saturation(@bs-info) + @sat + 20)/2,
+                            @luma - 10
+                        );
+
+ at brand-info:            hsl(
+                            hue(@bs-info),
+                            (saturation(@bs-info) + @sat + 20)/2,
+                            @luma
+                        );
+ at brand-success:         hsl(
+                            hue(@bs-success),
+                            (saturation(@bs-success) + @sat + 20)/2,
+                            (luminance(@bs-success) + @luma - 20)/2
+                        );
+ at brand-warning:         hsl(
+                            hue(@bs-warning),
+                            (saturation(@bs-warning) + @sat + 20)/2,
+                            (luminance(@bs-warning) + @luma - 20)/2
+                        );
+ at brand-danger:          hsl(
+                            hue(@bs-danger),
+                            (saturation(@bs-danger) + @sat + 20)/2,
+                            (luminance(@bs-danger) + @luma + 10)/2
+                        );
+
+// Scaffolding
+// -------------------------
+
+ at body-bg:               @white;
+ at text-color:            @gray-dark;
+
+// Links
+// -------------------------
+
+ at link-color:            @brand-primary;
+ at link-hover-color:      darken(@link-color, 15%);
+
+// Typography
+// -------------------------
+
+ at font-family-sans-serif:  "Lucida Grande",verdana,arial,helvetica,sans-serif; //JT "Helvetica Neue", Helvetica, Arial, sans-serif;
+ at font-family-serif:       Georgia, "Times New Roman", Times, serif;
+ at font-family-monospace:   Monaco, Menlo, Consolas, "Courier New", monospace;
+ at font-family-base:        @font-family-sans-serif;
+
+ at font-size-base:          12px; //JT 14px;
+ at font-size-large:         ceil(@font-size-base * 1.25); // ~18px
+ at font-size-small:         ceil(@font-size-base * 0.85); // ~12px
+
+ at font-size-h1:            floor(@font-size-base * 2); // ~36px
+ at font-size-h2:            floor(@font-size-base * 1.75); // ~30px
+ at font-size-h3:            ceil(@font-size-base * 1.5); // ~24px
+ at font-size-h4:            ceil(@font-size-base * 1.25); // ~18px
+ at font-size-h5:            @font-size-base;
+ at font-size-h6:            ceil(@font-size-base * 0.85); // ~12px
+
+ at line-height-base:        1.428571429; // 20/12 //JT 1.428571429; // 20/14
+ at line-height-computed:    floor(@font-size-base * @line-height-base); // ~20px
+
+ at headings-font-family:    @font-family-base;
+ at headings-font-weight:    800;
+ at headings-line-height:    1.1;
+
+// Iconography
+// -------------------------
+
+ at icon-font-path:          "../fonts/";
+ at icon-font-name:          "glyphicons-halflings-regular";
+
+
+// Components
+// -------------------------
+// Based on 14px font-size and 1.428 line-height (~20px to start)
+
+ at padding-base-vertical:          4px; //JT 6px;
+ at padding-base-horizontal:        10px; //JT 12px;
+
+ at padding-large-vertical:         8px; //JT 10px;
+ at padding-large-horizontal:       14px; //JT 16px;
+
+ at padding-small-vertical:         3px; //JT 5px;
+ at padding-small-horizontal:       8px; //JT 10px;
+
+ at line-height-large:              1.33;
+ at line-height-small:              1.5;
+
+ at border-radius-base:             3px; //JT 4px;
+ at border-radius-large:            5px; //JT px;
+ at border-radius-small:            2px; //JT 3px;
+
+ at component-active-bg:            @brand-primary;
+
+ at caret-width-base:               4px;
+ at caret-width-large:              5px;
+
+// Tables
+// -------------------------
+
+ at table-cell-padding:                 8px;
+ at table-condensed-cell-padding:       5px;
+
+ at table-bg:                           transparent; // overall background-color
+ at table-bg-accent:                    #f9f9f9; // for striping
+ at table-bg-hover:                     #f5f5f5;
+ at table-bg-active:                    @table-bg-hover;
+
+ at table-border-color:                 @border-default-color; // table and cell border
+
+
+// Buttons
+// -------------------------
+
+ at btn-font-weight:                normal;
+
+ at btn-default-color:              #333;
+ at btn-default-bg:                 darken(@white,5%);
+ at btn-default-border:             @border-default-color;
+
+ at btn-primary-color:              @white;
+ at btn-primary-bg:                 @brand-primary;
+ at btn-primary-border:             darken(@btn-primary-bg, @border-darken-percent);
+
+ at btn-success-color:              @white;
+ at btn-success-bg:                 @brand-success;
+ at btn-success-border:             darken(@btn-success-bg, @border-darken-percent);
+
+ at btn-warning-color:              @white;
+ at btn-warning-bg:                 @brand-warning;
+ at btn-warning-border:             darken(@btn-warning-bg, @border-darken-percent);
+
+ at btn-danger-color:               @white;
+ at btn-danger-bg:                  @brand-danger;
+ at btn-danger-border:              darken(@btn-danger-bg, @border-darken-percent);
+
+ at btn-info-color:                 @white;
+ at btn-info-bg:                    @brand-info;
+ at btn-info-border:                darken(@btn-info-bg, @border-darken-percent);
+
+ at btn-link-disabled-color:        @gray-light;
+
+
+// Forms
+// -------------------------
+
+ at input-bg:                       @white;
+ at input-bg-disabled:              @gray-lighter;
+
+ at input-color:                    @gray;
+ at input-border:                   @border-default-color;
+ at input-border-radius:            @border-radius-base;
+ at input-border-focus:             #66afe9;
+
+ at input-color-placeholder:        @gray-light;
+
+// JT
+ at input-height-base:              (@line-height-computed + (@padding-base-vertical * 2) + 2);
+ at input-height-large:             (floor(@font-size-large * @line-height-large) + (@padding-large-vertical * 2) + 2);
+ at input-height-small:             (floor(@font-size-small * @line-height-small) + (@padding-small-vertical * 2) + 2);
+// JT
+
+
+ at legend-color:                   @gray-dark;
+ at legend-border-color:            @border-default-color;
+
+ at input-group-addon-bg:           @gray-lighter;
+ at input-group-addon-border-color: @input-border;
+
+
+// Dropdowns
+// -------------------------
+
+ at dropdown-bg:                    @white;
+ at dropdown-border:                rgba(0,0,0,.15);
+ at dropdown-fallback-border:       #ccc;
+ at dropdown-divider-bg:            #e5e5e5;
+
+ at dropdown-link-active-color:     @white;
+ at dropdown-link-active-bg:        @component-active-bg;
+
+ at dropdown-link-color:            @gray-dark;
+ at dropdown-link-hover-color:      @white;
+ at dropdown-link-hover-bg:         @dropdown-link-active-bg;
+
+ at dropdown-link-disabled-color:   @gray-light;
+
+ at dropdown-header-color:          @gray-light;
+
+ at dropdown-caret-color:           @black;
+
+
+// COMPONENT VARIABLES
+// --------------------------------------------------
+
+
+// Z-index master list
+// -------------------------
+// Used for a bird's eye view of components dependent on the z-axis
+// Try to avoid customizing these :)
+
+ at zindex-navbar:            1000;
+ at zindex-dropdown:          1000;
+ at zindex-popover:           1010;
+ at zindex-tooltip:           1030;
+ at zindex-navbar-fixed:      1030;
+ at zindex-modal-background:  1040;
+ at zindex-modal:             1050;
+
+// Media queries breakpoints
+// --------------------------------------------------
+
+// Extra small screen / phone
+// Note: Deprecated @screen-xs and @screen-phone as of v3.0.1
+ at screen-xs:                  480px;
+ at screen-xs-min:              @screen-xs;
+ at screen-phone:               @screen-xs-min;
+
+// Small screen / tablet
+// Note: Deprecated @screen-sm and @screen-tablet as of v3.0.1
+ at screen-sm:                  768px;
+ at screen-sm-min:              @screen-sm;
+ at screen-tablet:              @screen-sm-min;
+
+// Medium screen / desktop
+// Note: Deprecated @screen-md and @screen-desktop as of v3.0.1
+ at screen-md:                  992px;
+ at screen-md-min:              @screen-md;
+ at screen-desktop:             @screen-md-min;
+
+// Large screen / wide desktop
+// Note: Deprecated @screen-lg and @screen-lg-desktop as of v3.0.1
+ at screen-lg:                  1200px;
+ at screen-lg-min:              @screen-lg;
+ at screen-lg-desktop:          @screen-lg-min;
+
+// So media queries don't overlap when required, provide a maximum
+ at screen-xs-max:              (@screen-sm-min - 1);
+ at screen-sm-max:              (@screen-md-min - 1);
+ at screen-md-max:              (@screen-lg-min - 1);
+
+
+// Grid system
+// --------------------------------------------------
+
+// Number of columns in the grid system
+ at grid-columns:              12;
+// Padding, to be divided by two and applied to the left and right of all columns
+ at grid-gutter-width:         30px;
+// Point at which the navbar stops collapsing
+ at grid-float-breakpoint:     0px;
+
+
+// Navbar
+// -------------------------
+
+// Basics of a navbar
+ at navbar-height:                    33px;
+ at navbar-margin-bottom:             @line-height-computed;
+ at navbar-default-color:             #777;
+ at navbar-default-bg:                @gray-lighter; // JT #f8f8f8;
+ at navbar-default-border:            darken(@navbar-default-bg, @border-darken-percent);
+ at navbar-border-radius:             @border-radius-base;
+ at navbar-padding-horizontal:        floor(@grid-gutter-width / 2);
+ at navbar-padding-vertical:          ((@navbar-height - @line-height-computed) / 2);
+
+// Navbar links
+ at navbar-default-link-color:                #777;
+ at navbar-default-link-hover-color:          #333;
+ at navbar-default-link-hover-bg:             transparent;
+ at navbar-default-link-active-color:         #555;
+ at navbar-default-link-active-bg:            darken(@navbar-default-bg, 6.5%);
+ at navbar-default-link-disabled-color:       #ccc;
+ at navbar-default-link-disabled-bg:          transparent;
+
+// Navbar brand label
+ at navbar-default-brand-color:               @navbar-default-link-color;
+ at navbar-default-brand-hover-color:         darken(@navbar-default-link-color, 10%);
+ at navbar-default-brand-hover-bg:            transparent;
+
+// Navbar toggle
+ at navbar-default-toggle-hover-bg:           #ddd;
+ at navbar-default-toggle-icon-bar-bg:        #ccc;
+ at navbar-default-toggle-border-color:       #ddd;
+
+
+// Inverted navbar
+//
+// Reset inverted navbar basics
+ at navbar-inverse-color:                      @gray-light;
+ at navbar-inverse-bg:                         darken(@base-color,25%); //JT #222;
+ at navbar-inverse-border:                     darken(@navbar-inverse-bg, 10%);
+
+// Inverted navbar links
+ at navbar-inverse-link-color:                 @gray-light;
+ at navbar-inverse-link-hover-color:           @white;
+ at navbar-inverse-link-hover-bg:              transparent;
+ at navbar-inverse-link-active-color:          @navbar-inverse-link-hover-color;
+ at navbar-inverse-link-active-bg:             darken(@navbar-inverse-bg, 10%);
+ at navbar-inverse-link-disabled-color:        #666;
+ at navbar-inverse-link-disabled-bg:           transparent;
+
+// Inverted navbar brand label
+ at navbar-inverse-brand-color:                @navbar-inverse-link-color;
+ at navbar-inverse-brand-hover-color:          @white;
+ at navbar-inverse-brand-hover-bg:             transparent;
+
+// Inverted navbar toggle
+ at navbar-inverse-toggle-hover-bg:            #333;
+ at navbar-inverse-toggle-icon-bar-bg:         @white;
+ at navbar-inverse-toggle-border-color:        #333;
+
+
+// Navs
+// -------------------------
+
+ at nav-link-padding:                          @padding-base-vertical @padding-base-horizontal; //JT 10px 15px;
+ at nav-link-hover-bg:                         @gray-lighter;
+
+ at nav-disabled-link-color:                   @gray-light;
+ at nav-disabled-link-hover-color:             @gray-light;
+
+ at nav-open-link-hover-color:                 @white;
+ at nav-open-caret-border-color:               @white;
+
+// Tabs
+ at nav-tabs-border-color:                     @border-default-color;
+
+ at nav-tabs-link-hover-border-color:          @gray-lighter;
+
+ at nav-tabs-active-link-hover-bg:             @body-bg;
+ at nav-tabs-active-link-hover-color:          @gray;
+ at nav-tabs-active-link-hover-border-color:   @border-default-color;
+
+ at nav-tabs-justified-link-border-color:            @border-default-color;
+ at nav-tabs-justified-active-link-border-color:     @body-bg;
+
+// Pills
+ at nav-pills-active-link-hover-bg:            @component-active-bg;
+ at nav-pills-active-link-hover-color:         @white;
+
+
+// Pagination
+// -------------------------
+
+ at pagination-bg:                        @white;
+ at pagination-border:                    @border-default-color;
+
+ at pagination-hover-bg:                  @gray-lighter;
+
+ at pagination-active-bg:                 @brand-primary;
+ at pagination-active-color:              @white;
+
+ at pagination-disabled-color:            @gray-light;
+
+
+// Pager
+// -------------------------
+
+ at pager-border-radius:                  15px;
+ at pager-disabled-color:                 @gray-light;
+
+
+// Jumbotron
+// -------------------------
+
+ at jumbotron-padding:              30px;
+ at jumbotron-color:                inherit;
+ at jumbotron-bg:                   @gray-lighter;
+
+ at jumbotron-heading-color:        inherit;
+
+
+// Form states and alerts
+// -------------------------
+
+ at state-warning-text:             darken(@brand-warning, @state-text-darken-percent);
+ at state-warning-bg:               saturate(lighten(@brand-warning, @state-bg-lighten-percent), at state-bg-saturate-percent);
+ at state-warning-border:           darken(@state-warning-bg, @border-darken-percent);
+
+ at state-danger-text:              darken(@brand-danger, @state-text-darken-percent);
+ at state-danger-bg:                saturate(lighten(@brand-danger, @state-bg-lighten-percent), at state-bg-saturate-percent);
+ at state-danger-border:            darken(@state-danger-bg, @border-darken-percent);
+
+ at state-success-text:             darken(@brand-success, @state-text-darken-percent);
+ at state-success-bg:               saturate(lighten(@brand-success, @state-bg-lighten-percent), at state-bg-saturate-percent);
+ at state-success-border:           darken(@state-success-bg, @border-darken-percent);
+
+ at state-info-text:                darken(@brand-info, @state-text-darken-percent);
+ at state-info-bg:                  saturate(lighten(@brand-info, @state-bg-lighten-percent), at state-bg-saturate-percent);
+ at state-info-border:              darken(@state-info-bg, @border-darken-percent);
+
+
+// Tooltips
+// -------------------------
+ at tooltip-max-width:           200px;
+ at tooltip-color:               @white;
+ at tooltip-bg:                  @black;
+
+ at tooltip-arrow-width:         5px;
+ at tooltip-arrow-color:         @tooltip-bg;
+
+
+// Popovers
+// -------------------------
+ at popover-bg:                          @white;
+ at popover-max-width:                   276px;
+ at popover-border-color:                rgba(0,0,0,.2);
+ at popover-fallback-border-color:       #ccc;
+
+ at popover-title-bg:                    darken(@popover-bg, 3%);
+
+ at popover-arrow-width:                 10px;
+ at popover-arrow-color:                 @white;
+
+ at popover-arrow-outer-width:           (@popover-arrow-width + 1);
+ at popover-arrow-outer-color:           rgba(0,0,0,.25);
+ at popover-arrow-outer-fallback-color:  #999;
+
+
+// Labels
+// -------------------------
+
+ at label-default-bg:            @gray-light;
+ at label-primary-bg:            @brand-primary;
+ at label-success-bg:            @brand-success;
+ at label-info-bg:               @brand-info;
+ at label-warning-bg:            @brand-warning;
+ at label-danger-bg:             @brand-danger;
+
+ at label-color:                 @white;
+ at label-link-hover-color:      @white;
+
+
+// Modals
+// -------------------------
+ at modal-inner-padding:         20px;
+
+ at modal-title-padding:         15px;
+ at modal-title-line-height:     @line-height-base;
+
+ at modal-content-bg:                             @white;
+ at modal-content-border-color:                   rgba(0,0,0,.2);
+ at modal-content-fallback-border-color:          #999;
+
+ at modal-backdrop-bg:           @black;
+ at modal-header-border-color:   @border-default-color;
+ at modal-footer-border-color:   @modal-header-border-color;
+
+
+// Alerts
+// -------------------------
+ at alert-padding:               15px;
+ at alert-border-radius:         @border-radius-base;
+ at alert-link-font-weight:      bold;
+
+ at alert-success-bg:            @state-success-bg;
+ at alert-success-text:          @state-success-text;
+ at alert-success-border:        @state-success-border;
+
+ at alert-info-bg:               @state-info-bg;
+ at alert-info-text:             @state-info-text;
+ at alert-info-border:           @state-info-border;
+
+ at alert-warning-bg:            @state-warning-bg;
+ at alert-warning-text:          @state-warning-text;
+ at alert-warning-border:        @state-warning-border;
+
+ at alert-danger-bg:             @state-danger-bg;
+ at alert-danger-text:           @state-danger-text;
+ at alert-danger-border:         @state-danger-border;
+
+
+// Progress bars
+// -------------------------
+ at progress-bg:                 #f5f5f5;
+ at progress-bar-color:          @white;
+
+ at progress-bar-bg:             @brand-primary;
+ at progress-bar-success-bg:     @brand-success;
+ at progress-bar-warning-bg:     @brand-warning;
+ at progress-bar-danger-bg:      @brand-danger;
+ at progress-bar-info-bg:        @brand-info;
+
+
+// List group
+// -------------------------
+ at list-group-bg:               @white;
+ at list-group-border:           @border-default-color;
+ at list-group-border-radius:    @border-radius-base;
+
+ at list-group-hover-bg:         #f5f5f5;
+ at list-group-active-color:     @white;
+ at list-group-active-bg:        @component-active-bg;
+ at list-group-active-border:    @list-group-active-bg;
+
+ at list-group-link-color:          #555;
+ at list-group-link-heading-color:  #333;
+
+
+// Panels
+// -------------------------
+ at panel-bg:                    @white;
+ at panel-inner-border:          @border-default-color;
+ at panel-border-radius:         @border-radius-base;
+ at panel-footer-bg:             #f5f5f5;
+
+ at panel-default-text:          @gray-dark;
+ at panel-default-border:        @border-default-color;
+ at panel-default-heading-bg:    #f5f5f5;
+
+ at panel-primary-text:          @white;
+ at panel-primary-border:        @brand-primary;
+ at panel-primary-heading-bg:    @brand-primary;
+
+ at panel-success-text:          @state-success-text;
+ at panel-success-border:        @state-success-border;
+ at panel-success-heading-bg:    @state-success-bg;
+
+ at panel-warning-text:          @state-warning-text;
+ at panel-warning-border:        @state-warning-border;
+ at panel-warning-heading-bg:    @state-warning-bg;
+
+ at panel-danger-text:           @state-danger-text;
+ at panel-danger-border:         @state-danger-border;
+ at panel-danger-heading-bg:     @state-danger-bg;
+
+ at panel-info-text:             @state-info-text;
+ at panel-info-border:           @state-info-border;
+ at panel-info-heading-bg:       @state-info-bg;
+
+
+// Thumbnails
+// -------------------------
+ at thumbnail-padding:           4px;
+ at thumbnail-bg:                @body-bg;
+ at thumbnail-border:            @border-default-color;
+ at thumbnail-border-radius:     @border-radius-base;
+
+ at thumbnail-caption-color:     @text-color;
+ at thumbnail-caption-padding:   9px;
+
+
+// Wells
+// -------------------------
+ at well-bg:                     @gray-lighter;
+
+
+// Badges
+// -------------------------
+ at badge-color:                 @white;
+ at badge-link-hover-color:      @white;
+ at badge-bg:                    @gray-light;
+
+ at badge-active-color:          @link-color;
+ at badge-active-bg:             @white;
+
+ at badge-font-weight:           bold;
+ at badge-line-height:           1;
+ at badge-border-radius:         10px;
+
+
+// Breadcrumbs
+// -------------------------
+ at breadcrumb-bg:               #f5f5f5;
+ at breadcrumb-color:            #ccc;
+ at breadcrumb-active-color:     @gray-light;
+
+
+// Carousel
+// ------------------------
+
+ at carousel-text-shadow:                        0 1px 2px rgba(0,0,0,.6);
+
+ at carousel-control-color:                      @white;
+ at carousel-control-width:                      15%;
+ at carousel-control-opacity:                    .5;
+ at carousel-control-font-size:                  20px;
+
+ at carousel-indicator-active-bg:                @white;
+ at carousel-indicator-border-color:             @white;
+
+ at carousel-caption-color:                      @white;
+
+
+// Close
+// ------------------------
+ at close-color:                 @black;
+ at close-font-weight:           bold;
+ at close-text-shadow:           0 1px 0 @white;
+
+
+// Code
+// ------------------------
+ at code-color:                  #c7254e;
+ at code-bg:                     #f9f2f4;
+
+ at pre-bg:                      #f5f5f5;
+ at pre-color:                   @gray-dark;
+ at pre-border-color:            #ccc;
+ at pre-scrollable-max-height:   340px;
+
+// Type
+// ------------------------
+ at text-muted:                  @gray-light;
+ at abbr-border-color:           @gray-light;
+ at headings-small-color:        @gray-light;
+ at blockquote-small-color:      @gray-light;
+ at blockquote-border-color:     @gray-lighter;
+ at page-header-border-color:    @gray-lighter;
+
+// Miscellaneous
+// -------------------------
+
+// Hr border color
+ at hr-border:                   @border-default-color;
+
+// Horizontal forms & lists
+ at component-offset-horizontal: 180px;
+
+
+// Container sizes
+// --------------------------------------------------
+
+// Small screen / tablet
+ at container-tablet:            ((720px + @grid-gutter-width));
+
+// Medium screen / desktop
+ at container-desktop:           ((940px + @grid-gutter-width));
+
+// Large screen / wide desktop
+ at container-lg-desktop:        ((1140px + @grid-gutter-width));
diff --git a/client/galaxy/style/less/galaxy_variables.less b/client/galaxy/style/less/galaxy_variables.less
new file mode 100644
index 0000000..f87eb8e
--- /dev/null
+++ b/client/galaxy/style/less/galaxy_variables.less
@@ -0,0 +1,37 @@
+// Additional variables that are not used by bootstrap but are used
+// in Galaxy stylesheets 
+
+ at base-bg: @white;
+ at base-text-color: @black;
+ at header-text-color: @base-text-color;
+
+ at side-panel-bg: lighten(black,95%);
+ at layout-border-color: darken(@border-default-color,15%);
+
+ at form-heading-bg: @complement-color-3;
+ at form-border: @complement-color-3;
+
+ at table-heading-bg: @complement-color-3;
+ at table-border: @complement-color-3;
+
+// Additional state colors
+
+ at state-default-text: @gray-dark;
+ at state-default-bg: @gray-lighter;
+ at state-default-border: @border-default-color;
+
+ at state-upload-border: @state-info-border;
+ at state-upload-bg: @state-info-bg;
+
+ at state-paused-border: #6666AA;
+ at state-paused-bg: #d9edf7;
+
+ at state-running-border: #AAAA66;
+ at state-running-bg: #FFFFCC;
+
+ at state-deleted-border: #330066;
+ at state-deleted-bg: #3399FF;
+
+// Theme, expects tmp-site-config.less written by grunt
+ at import "tmp-site-config.less";
+ at import "theme/blue.less";
\ No newline at end of file
diff --git a/client/galaxy/style/less/history.less b/client/galaxy/style/less/history.less
new file mode 100644
index 0000000..dbdd15e
--- /dev/null
+++ b/client/galaxy/style/less/history.less
@@ -0,0 +1,741 @@
+// ---------------------------------------------------------------------------- all histories
+.history-panel {
+    .flex-vertical-container;
+
+    > .controls {
+        margin: 8px 8px 1px;
+        padding: 0px;
+        flex: 0 0 auto;
+
+        .title {
+            margin-bottom: 4px;
+            input {
+                width: 100%;
+                // keep the text in position
+                margin: -3px 0px -3px -3px;
+                font-weight: bold;
+            }
+        }
+        .subtitle:not(:empty){
+            min-height: 15px;
+            margin: -4px 0px 8px;
+            span+span:before {
+                content: ', ';
+            }
+        }
+        .history-size {
+            float: left;
+            margin: 4px 0 0;
+        }
+        .actions {
+            margin-bottom: 8px;
+            &:empty {
+                height: @icon-btn-size;
+            }
+        }
+
+        // TODO: remove
+        .messages {
+            clear: both;
+            margin-bottom: 0px;
+            & > * {
+                cursor: pointer;
+                &:last-child {
+                    margin-bottom: 8px;
+                }
+            }
+            .quota-message {
+                display: none;
+                margin: 8px 0px 5px 0px;
+            }
+        }
+
+        //TODO: move these out
+        .tags-display, .annotation-display {
+            display: none;
+            margin-bottom: 8px;
+            .prompt {
+                display: block;
+                margin: 0px;
+                padding: 0px;
+                font-size: 90%;
+                font-weight: normal;
+                color: #555;
+            }
+            .prompt:after {
+                content: ':';
+            }
+        }
+        .tags-display {
+            .select2-container {
+                min-width: 0px;
+                .select2-choices {
+                    border-radius: 3px;
+                }
+            }
+            input {
+                border-radius: 3px;
+            }
+        }
+        .annotation-display {
+            .annotation {
+                background: white;
+                border-radius: 3px;
+                border: 1px solid fadeout( @layout-border-color, 50% );
+                padding: 4px;
+                white-space: pre-wrap;
+                overflow: auto;
+            }
+            // fake placeholder for editable text annotation
+            .annotation:empty:after {
+                position: relative;
+                top: -4px;
+                font-size: 10px;
+                font-style: italic;
+                color: grey;
+                //TODO: move out for localization
+                content : 'Click here to edit annotation';
+            }
+            textarea {
+                margin: 0px 0px 2px 0px;
+                //display: block;
+                border-radius: 3px;
+                width: 100%;
+            }
+        }
+
+        .list-pagination {
+            width: 100%;
+            text-align: center;
+            margin: 0;
+            border-top: 1px solid @btn-default-border;
+
+            button {
+                margin: 0;
+                padding: 0;
+                width: 25%;
+                max-width: 128px;
+                height: 20px;
+                color: transparent;
+                overflow: hidden;
+            }
+            .pages {
+                // restyle closer to button
+                -moz-appearance: none;
+                -webkit-appearance: none;
+                appearance: none;
+
+                margin: 0;
+                padding: 0;
+                width: 48%;
+                max-width: 256px;
+                height: 20px;
+                box-shadow: none;
+                text-align: center;
+                text-align-last: center;
+
+                &:focus {
+                    outline: none;
+                }
+                &:hover {
+                    cursor: pointer;
+                }
+            }
+            &:empty {
+                display: none;
+            }
+            & > * {
+                border: none;
+                border-radius: 0px;
+                background: transparent;
+                &.pages {
+                    color: grey;
+                    text-decoration: underline;
+                }
+                &:hover,
+                &:focus,
+                &:active {
+                    background: @btn-default-bg;
+                    color: @btn-default-color;
+                    box-shadow: none;
+                    text-decoration: none;
+                }
+            }
+
+            &:hover > *:not([disabled]) {
+                color: @btn-default-color;
+            }
+        }
+    }
+
+    // the help text that appears above the drop target
+    .history-drop-target-help {
+        margin: 10px 10px 4px 10px;
+        color: grey;
+        font-size: 80%;
+        font-style: italic;
+        text-align: center;
+    }
+    .history-drop-target {
+        min-height: 64px;
+        margin: 0px 10px 10px 10px;
+        border: 1px dashed black;
+        border-radius: 3px;
+    }
+
+    > .list-items {
+        .flex-column;
+        overflow-x: hidden;
+        overflow-y: auto;
+
+        &:not(:empty) {
+            border-top: 1px solid @border-default-color;
+        }
+        &:empty {
+            flex-grow: 0;
+        }
+        .list-item:first-child {
+            border-top-width: 0px;
+        }
+    }
+
+    > .empty-message {
+        margin: 0px 10px 0px 10px;
+    }
+
+    .loading-indicator,
+    .contents-loading-indicator {
+        display: inline-block;
+        width: 100%;
+        padding: 16px;
+        text-align: center;
+        color: grey;
+    }
+
+    .history-content.dataset,
+    .history-content.dataset-collection {
+        .title-bar {
+            .title {
+                .hid:after {
+                    content: ':';
+                }
+            }
+        }
+    }
+}
+
+
+// ---------------------------------------------------------------------------- collapsed history controls
+// compact the history controls when scrolling away from the top
+.history-panel > .controls.collapsed {
+    margin: 0px;
+    padding: 4px 8px 4px;
+    flex-basis: 32px;
+
+    .messages,
+    .title,
+    .subtitle,
+    .history-size,
+    //TODO: move these out
+    .tags-display .prompt,
+    .annotation-display .prompt {
+        display: none;
+    }
+
+    .search {
+        margin-bottom: 4px;
+    }
+    .actions {
+        margin-bottom: 0;
+    }
+    .list-pagination {
+        .prev,
+        .next {
+            display: none;
+        }
+        .pages {
+            float: left;
+        }
+    }
+}
+
+
+// ---------------------------------------------------------------------------- added class for wider display
+// since these are rendered *within parts* of other pages, we need something more than a media query
+.history-panel.wide {
+    & > .controls {
+        .actions:empty {
+            height: auto;
+        }
+    }
+}
+
+
+// ---------------------------------------------------------------------------- collections nested in histories
+.history-panel {
+    .dataset-collection {
+        .subtitle {
+            margin-top: 2px;
+        }
+    }
+}
+
+
+// ---------------------------------------------------------------------------- annotated-history-panel
+.annotated-history-panel {
+    & > .controls {
+        margin: 0px;
+
+        .name {
+            font-size: 150%;
+            margin-bottom: 4px;
+        }
+        // annotations are displayed by default in this view
+        .subtitle {
+            display: block;
+            white-space: pre-wrap;
+            &:empty {
+                display: none;
+            }
+        }
+        .history-size {
+            float: none;
+            margin: 0;
+        }
+    }
+
+    // defined outside of list-items because .headers is actually added to controls
+    // so that it will stay in place despite scrolling
+    .headers.contents-container {
+        margin-bottom: 0;
+        border-bottom: 2px solid grey;
+        cursor: unset;
+        & > * {
+            vertical-align: middle;
+            padding: 8px;
+            font-weight: bold;
+        }
+        .additional-info {
+            text-align: right;
+        }
+    }
+    .contents-container {
+        display: table;
+        table-layout: fixed;
+        width: 100%;
+
+        // show clickable for expansion
+        cursor: pointer;
+        border-bottom: 1px solid fade(grey, %50);
+        & > * {
+            display: table-cell;
+            vertical-align: top;
+            width: 50%;
+        }
+        .additional-info:not(.header) {
+            /* top and bottom should be == title-bar padding */
+            padding: 8px;
+            // do not html format
+            white-space: pre-wrap;
+        }
+    }
+
+
+    & > .list-items {
+        width: 100%;
+        border-bottom: 1px solid lightgrey;
+    }
+
+    .empty-message {
+        margin-top: 8px;
+    }
+}
+
+
+// ---------------------------------------------------------------------------- embedded in pages
+.embedded-item.display.history .annotated-history-panel {
+    & > .controls {
+        .name {
+            display: none;
+        }
+    }
+}
+
+
+// ---------------------------------------------------------------------------- current-history-panel
+.current-history-panel {
+
+    & > .controls .list-pagination {
+        // text a bit smaller for all
+        font-size: 90%;
+        & > * {
+            font-size: inherit;
+        }
+    }
+
+    // .current-content is added to dataset/collection when displayed/edited/visualized, etc.
+    //  (roughly: when it's being used in the center panel)
+    .list-item.history-content.current-content {
+        //TODO: Galaxy's 'bs-primary' color - not sure where this is defined
+        border-left: 5px solid #4E5777;
+    }
+}
+
+
+// ---------------------------------------------------------------------------- analyze data page - right panel
+.history-right-panel {
+    .three-rows-header-footer(@panel_header_height, @panel_footer_height);
+
+    > .header,
+    > .footer {
+        .unselectable();
+        border: solid @layout-border-color 0px;
+        background: @navbar-default-bg;
+        color: #555;
+        a {
+            color: #555;
+            &:hover {
+                color: maroon;
+            }
+        }
+    }
+
+    > .header {
+        text-shadow: rgba(255,255,255,.8) 0 1px 0;
+        border-bottom-width: 1px;
+        padding: 8px 10px 0;
+        font-weight: bold;
+        .buttons {
+            float: right;
+            & > * {
+                margin: 0;
+                padding: 0;
+                &:not(:first-child) {
+                    margin-left: 8px;
+                }
+                .fa {
+                    font-size: 1.4em;
+                }
+            }
+        }
+    }
+
+    > .middle {
+        overflow: auto;
+    }
+
+    > .footer {
+        border-top-width: 1px;
+        max-height: 25px;
+        // drag icon used as handle for changing width of panel
+        .drag {
+            height: @panel_footer_height;
+            width: @panel_footer_height - 5;
+            background: url(../images/visualization/draggable_horizontal.png) 50% 50% no-repeat;
+            cursor: ew-resize;
+        }
+    }
+}
+
+// yay! specificity wars!
+#right.history-right-panel {
+    background-color: @panel-bg-color;
+}
+
+// ---------------------------------------------------------------------------- panel header buttons
+.history-panel-header {
+    .panel-header-button {
+        display : inline-block;
+        height  : @icon-btn-size;
+        width   : @icon-btn-size;
+        text-align  : center;
+        line-height : @icon-btn-size - 3;
+        font-size   : 1.2em;
+
+        padding: 0px;
+        &:not(:last-child) {
+            margin-right: 2px;
+        }
+    }
+}
+
+
+// ---------------------------------------------------------------------------- multi-view / multi-panel
+.multi-panel-history {
+    @mph-column_width: 312px;
+    @mph-border_width: 1px;
+    @mph-column_gap: 8px;
+    @mph-header_height: 29px;
+    @mph-footer_height: 0;
+    @mph-controls_height: 20px;
+
+    // for some reason, .flex-row-container below won't be applied to multi-panel-history since it's the enclosing node
+    // re-apply here
+    display: -webkit-box;
+    display: -webkit-flex;
+    display: -ms-flexbox;
+    display: flex;
+
+    -webkit-flex-direction: column;
+    -ms-flex-direction: column;
+    flex-direction: column;
+
+    .flex-row-container,
+    .flex-column-container {
+        display: -webkit-box;
+        display: -webkit-flex;
+        display: -ms-flexbox;
+        display: flex;
+
+        /* force ff to squish beyond content:
+           https://developer.mozilla.org/en-US/Firefox/Releases/34/Site_Compatibility#CSS */
+        min-width: 0px;
+        min-height: 0px;
+
+        -webkit-align-items: stretch;
+        -ms-align-items: stretch;
+        align-items: stretch;
+
+        -webkit-align-content: stretch;
+        -ms-align-content: stretch;
+        align-content: stretch;
+
+        -webkit-justify-content: flex-start;
+        -ms-flex-pack: start;
+        justify-content: flex-start;
+    }
+    .flex-row-container {
+        -webkit-flex-direction: column;
+        -ms-flex-direction: column;
+        flex-direction: column;
+    }
+    .flex-column-container {
+        -webkit-flex-direction: row;
+        -ms-flex-direction: row;
+        flex-direction: row;
+    }
+    .flex-row,
+    .flex-column {
+        -webkit-flex: 1 1 auto;
+        -ms-flex: 1 1 auto;
+        flex: 1 1 auto;
+
+        -webkit-align-self: auto;
+        -ms-flex-item-align: auto;
+        align-self: auto;
+    }
+
+    /* ---------------------- header & footer */
+    & > .header,
+    & > .footer {
+        width: 100%;
+        background-color: lightgrey;
+        .btn {
+            height: 21px;
+        }
+    }
+    & > .header {
+        min-height: @mph-header_height;
+        max-height: @mph-header_height;
+        // popover containing sorting and filters
+        .more-options {
+            .btn {
+                width: 100%;
+                height: 26px;
+            }
+            input[type=checkbox] {
+                margin-top: 1px;
+            }
+        }
+        .popover {
+            min-width: 290px;
+        }
+    }
+    & > .footer {
+        min-height: @mph-footer_height;
+        max-height: @mph-footer_height;
+    }
+
+    // make the buttons in the header/footers and the panel controls slightly smaller
+    .smaller-btn {
+        height: 20px;
+        line-height: normal;
+        font-size: 90%;
+        padding-top: 0px;
+        padding-bottom: 0px;
+    }
+
+    // the header/footers controls
+    .control-column {
+        margin-top: 4px;
+
+        .btn {
+            .smaller-btn;
+        }
+        .search-control {
+            display: inline-block;
+            width: 40%;
+
+            .search-clear,
+            .search-loading {
+                margin-top: -22px;
+            }
+        }
+        input.search-query {
+            font-size: 90%;
+            height: 21px;
+            line-height: normal;
+            padding: 2px 2px 1px 2px;
+        }
+        .open-more-options {
+            padding: 2px 6px 2px 6px;
+            font-size: 100%;
+        }
+        .header-info {
+            display: inline-block;
+            padding: 2px 4px 2px 4px;
+            color: grey;
+        }
+
+        &.control-column-right,
+        &.control-column-left {
+            margin-right: 8px;
+            margin-left: 8px;
+            & > * {
+                margin: 0px 4px 4px 0px;
+            }
+        }
+        &.control-column-center {
+            text-align: center;
+            max-height: 22px;
+            -webkit-flex: 0 1 auto;
+            -ms-flex: 0 1 auto;
+            flex: 0 1 auto;
+
+            /* truncate */
+            overflow: hidden;
+            text-overflow: ellipsis;
+            white-space: nowrap;
+            max-height: 22px;
+        }
+        &.control-column-right {
+            text-align: right;
+        }
+    }
+
+    /* ---------------------- middle */
+    .outer-middle {
+        overflow: auto;
+    }
+    .middle {
+        min-width: 100%;
+        margin: 0px 0px 0px 0px;
+        background-color: white;
+        padding: @mph-column_gap;
+    }
+
+    .history-column {
+        width: @mph-column_width;
+        margin: 0px @mph-column_gap 0px 0px;
+
+        // current history
+        &:first-child {
+            position: fixed;
+            z-index : 10;
+
+            // visually differentiate the current history
+            .history-panel {
+                border: 1px solid black;
+                box-shadow: 4px 4px 4px rgba( 96, 96, 96, 0.3 );
+            }
+        }
+        &:nth-child(2) {
+            // push the column after the current away from the left (since it's fixed)
+            margin-left: ( @mph-column_width + @mph-column_gap );
+        }
+        &:last-child {
+            margin-right: 0px;
+        }
+        .dropdown-menu {
+            z-index: inherit;
+        }
+
+        .panel-controls {
+            width: 100%;
+            height: ( @mph-controls_height + 4 );
+            border-radius: 3px;
+            background-color: white;
+            text-align: center;
+
+            -webkit-flex: 0 0 auto;
+            -ms-flex: 0 0 auto;
+            flex: 0 0 auto;
+
+            -webkit-align-self: auto;
+            -ms-flex-item-align: auto;
+            align-self: auto;
+
+            .btn {
+                .smaller-btn;
+            }
+            .pull-left .btn {
+                margin-right: 4px;
+            }
+            .pull-right .btn {
+                margin-left: 4px;
+            }
+            .panel-menu {
+                z-index: 1;
+                .dropdown-menu a {
+                    text-align: left;
+                }
+            }
+            .current-label {
+                display: inline-block;
+                color: grey;
+                padding-left: 2px;
+                margin-top: 2px;
+            }
+        }
+        .history-panel {
+            width: 100%;
+
+            border: @mph-border_width solid grey;
+            border-radius: 3px 3px 0px 0px;
+            background-color: @panel-bg-color;
+
+            overflow: auto;
+
+            & > .controls .subtitle {
+                min-height: 15px;
+                margin: -4px 0px 8px;
+                span+span:before {
+                    content: ', ';
+                }
+            }
+
+            & > .controls .list-pagination {
+                // text a bit smaller for all
+                font-size: 90%;
+                & > * {
+                    font-size: inherit;
+                }
+            }
+        }
+    }
+
+    // loading indicator on far right of multipanel view
+    .histories-loading-indicator {
+        // if we don't bring the width down here, the space is way too large for vertical text
+        width: 8px;
+        transform: rotate(90deg);
+        transform-origin: left top 0;
+        margin-left: 16px;
+        white-space: nowrap;
+        color: grey;
+        span {
+            // space btwn icon and text
+            margin-right: 8px;
+        }
+    }
+}
diff --git a/client/galaxy/style/less/iconic_stroke.less b/client/galaxy/style/less/iconic_stroke.less
new file mode 100644
index 0000000..a14a4cf
--- /dev/null
+++ b/client/galaxy/style/less/iconic_stroke.less
@@ -0,0 +1,461 @@
+ at font-face {
+  font-family: 'IconicStroke';
+  src: url('/static/images/fonts/iconic_stroke.eot');
+  src: url('/static/images/fonts/iconic_stroke.eot?#iefix') format('embedded-opentype'), url('../images/fonts/iconic_stroke.ttf') format('truetype'), url('../images/fonts/iconic_stroke.svg#iconic') format('svg');
+  font-weight: normal;
+  font-style: normal;
+}
+.iconic {
+  display: inline-block;
+  font-family: 'IconicStroke';
+}
+.iconic.lightbulb:before {
+  content: '\e063';
+}
+.iconic.equalizer:before {
+  content: '\e052';
+}
+.iconic.map_pin_stroke:before {
+  content: '\e068';
+}
+.iconic.brush_alt:before {
+  content: '\e01c';
+}
+.iconic.move:before {
+  content: '\e03e';
+}
+.iconic.pen_alt_stroke:before {
+  content: '\e005';
+}
+.iconic.move_vertical:before {
+  content: '\e03b';
+}
+.iconic.book_alt2:before {
+  content: '\e06a';
+}
+.iconic.layers:before {
+  content: '\e01f';
+}
+.iconic.pause:before {
+  content: '\e049';
+}
+.iconic.layers_alt:before {
+  content: '\e020';
+}
+.iconic.cloud_upload:before {
+  content: '\e045';
+}
+.iconic.chart_alt:before {
+  content: '\e029';
+}
+.iconic.fullscreen_exit_alt:before {
+  content: '\e051';
+}
+.iconic.cloud_download:before {
+  content: '\e044';
+}
+.iconic.comment_alt2_stroke:before {
+  content: '\e004';
+}
+.iconic.mail:before {
+  content: '\2709';
+}
+.iconic.check_alt:before {
+  content: '\2718';
+}
+.iconic.document_stroke:before {
+  content: '\e066';
+}
+.iconic.battery_charging:before {
+  content: '\e05d';
+}
+.iconic.stop:before {
+  content: '\e04a';
+}
+.iconic.arrow_up:before {
+  content: '\2191';
+}
+.iconic.move_horizontal:before {
+  content: '\e038';
+}
+.iconic.compass:before {
+  content: '\e021';
+}
+.iconic.minus_alt:before {
+  content: '\e009';
+}
+.iconic.battery_empty:before {
+  content: '\e05c';
+}
+.iconic.map_pin_alt:before {
+  content: '\e002';
+}
+.iconic.unlock_stroke:before {
+  content: '\e076';
+}
+.iconic.lock_stroke:before {
+  content: '\e075';
+}
+.iconic.question_mark:before {
+  content: '\003f';
+}
+.iconic.list:before {
+  content: '\e055';
+}
+.iconic.upload:before {
+  content: '\e043';
+}
+.iconic.reload:before {
+  content: '\e030';
+}
+.iconic.loop_alt4:before {
+  content: '\e035';
+}
+.iconic.loop_alt3:before {
+  content: '\e034';
+}
+.iconic.loop_alt2:before {
+  content: '\e033';
+}
+.iconic.loop_alt1:before {
+  content: '\e032';
+}
+.iconic.left_quote:before {
+  content: '\275d';
+}
+.iconic.x:before {
+  content: '\2713';
+}
+.iconic.last:before {
+  content: '\e04d';
+}
+.iconic.document_alt_stroke:before {
+  content: '\e000';
+}
+.iconic.bars:before {
+  content: '\e06f';
+}
+.iconic.arrow_left:before {
+  content: '\2190';
+}
+.iconic.arrow_down:before {
+  content: '\2193';
+}
+.iconic.download:before {
+  content: '\e042';
+}
+.iconic.home:before {
+  content: '\2302';
+}
+.iconic.calendar:before {
+  content: '\e001';
+}
+.iconic.right_quote_alt:before {
+  content: '\e012';
+}
+.iconic.fullscreen:before {
+  content: '\e04e';
+}
+.iconic.dial:before {
+  content: '\e058';
+}
+.iconic.plus_alt:before {
+  content: '\e008';
+}
+.iconic.clock:before {
+  content: '\e079';
+}
+.iconic.movie:before {
+  content: '\e060';
+}
+.iconic.steering_wheel:before {
+  content: '\e024';
+}
+.iconic.pen:before {
+  content: '\270e';
+}
+.iconic.tag_stroke:before {
+  content: '\e02b';
+}
+.iconic.pin:before {
+  content: '\e067';
+}
+.iconic.denied:before {
+  content: '\26d4';
+}
+.iconic.left_quote_alt:before {
+  content: '\e011';
+}
+.iconic.volume_mute:before {
+  content: '\e071';
+}
+.iconic.arrow_up_alt2:before {
+  content: '\e018';
+}
+.iconic.list_nested:before {
+  content: '\e056';
+}
+.iconic.arrow_up_alt1:before {
+  content: '\e014';
+}
+.iconic.comment_stroke:before {
+  content: '\e06d';
+}
+.iconic.undo:before {
+  content: '\e02f';
+}
+.iconic.umbrella:before {
+  content: '\2602';
+}
+.iconic.bolt:before {
+  content: '\26a1';
+}
+.iconic.article:before {
+  content: '\e053';
+}
+.iconic.read_more:before {
+  content: '\e054';
+}
+.iconic.beaker:before {
+  content: '\e023';
+}
+.iconic.beaker_alt:before {
+  content: '\e010';
+}
+.iconic.battery_full:before {
+  content: '\e073';
+}
+.iconic.arrow_right:before {
+  content: '\2192';
+}
+.iconic.new_window:before {
+  content: '\e059';
+}
+.iconic.plus:before {
+  content: '\2795';
+}
+.iconic.cog:before {
+  content: '\2699';
+}
+.iconic.key_stroke:before {
+  content: '\26bf';
+}
+.iconic.first:before {
+  content: '\e04c';
+}
+.iconic.comment_alt1_stroke:before {
+  content: '\e003';
+}
+.iconic.trash_stroke:before {
+  content: '\e05a';
+}
+.iconic.image:before {
+  content: '\e027';
+}
+.iconic.chat_alt_stroke:before {
+  content: '\e007';
+}
+.iconic.cd:before {
+  content: '\e064';
+}
+.iconic.right_quote:before {
+  content: '\275e';
+}
+.iconic.brush:before {
+  content: '\e01b';
+}
+.iconic.cloud:before {
+  content: '\2601';
+}
+.iconic.eye:before {
+  content: '\e025';
+}
+.iconic.play_alt:before {
+  content: '\e048';
+}
+.iconic.transfer:before {
+  content: '\e041';
+}
+.iconic.pen_alt2:before {
+  content: '\e006';
+}
+.iconic.camera:before {
+  content: '\e070';
+}
+.iconic.move_horizontal_alt2:before {
+  content: '\e03a';
+}
+.iconic.curved_arrow:before {
+  content: '\2935';
+}
+.iconic.move_horizontal_alt1:before {
+  content: '\e039';
+}
+.iconic.aperture:before {
+  content: '\e026';
+}
+.iconic.reload_alt:before {
+  content: '\e031';
+}
+.iconic.magnifying_glass:before {
+  content: '\e074';
+}
+.iconic.iphone:before {
+  content: '\e06e';
+}
+.iconic.fork:before {
+  content: '\e046';
+}
+.iconic.box:before {
+  content: '\e06b';
+}
+.iconic.bars_alt:before {
+  content: '\e00a';
+}
+.iconic.heart_stroke:before {
+  content: '\2764';
+}
+.iconic.volume:before {
+  content: '\e072';
+}
+.iconic.x_alt:before {
+  content: '\2714';
+}
+.iconic.link:before {
+  content: '\e077';
+}
+.iconic.moon_stroke:before {
+  content: '\263e';
+}
+.iconic.eyedropper:before {
+  content: '\e01e';
+}
+.iconic.spin:before {
+  content: '\e036';
+}
+.iconic.rss:before {
+  content: '\e02c';
+}
+.iconic.info:before {
+  content: '\2139';
+}
+.iconic.target:before {
+  content: '\e02a';
+}
+.iconic.cursor:before {
+  content: '\e057';
+}
+.iconic.minus:before {
+  content: '\2796';
+}
+.iconic.book_alt:before {
+  content: '\e00b';
+}
+.iconic.headphones:before {
+  content: '\e061';
+}
+.iconic.hash:before {
+  content: '\0023';
+}
+.iconic.arrow_left_alt1:before {
+  content: '\e013';
+}
+.iconic.arrow_left_alt2:before {
+  content: '\e017';
+}
+.iconic.fullscreen_exit:before {
+  content: '\e050';
+}
+.iconic.share:before {
+  content: '\e02e';
+}
+.iconic.fullscreen_alt:before {
+  content: '\e04f';
+}
+.iconic.at:before {
+  content: '\0040';
+}
+.iconic.chat:before {
+  content: '\e05e';
+}
+.iconic.move_vertical_alt2:before {
+  content: '\e03d';
+}
+.iconic.move_vertical_alt1:before {
+  content: '\e03c';
+}
+.iconic.check:before {
+  content: '\2717';
+}
+.iconic.mic:before {
+  content: '\e05f';
+}
+.iconic.calendar_alt_stroke:before {
+  content: '\e06c';
+}
+.iconic.book:before {
+  content: '\e069';
+}
+.iconic.move_alt1:before {
+  content: '\e03f';
+}
+.iconic.move_alt2:before {
+  content: '\e040';
+}
+.iconic.award_stroke:before {
+  content: '\e022';
+}
+.iconic.wrench:before {
+  content: '\e078';
+}
+.iconic.play:before {
+  content: '\e047';
+}
+.iconic.star:before {
+  content: '\2605';
+}
+.iconic.chart:before {
+  content: '\e028';
+}
+.iconic.rain:before {
+  content: '\26c6';
+}
+.iconic.folder_stroke:before {
+  content: '\e065';
+}
+.iconic.sun_stroke:before {
+  content: '\2600';
+}
+.iconic.user:before {
+  content: '\e062';
+}
+.iconic.battery_half:before {
+  content: '\e05b';
+}
+.iconic.aperture_alt:before {
+  content: '\e00c';
+}
+.iconic.eject:before {
+  content: '\e04b';
+}
+.iconic.arrow_down_alt1:before {
+  content: '\e016';
+}
+.iconic.pilcrow:before {
+  content: '\00b6';
+}
+.iconic.arrow_down_alt2:before {
+  content: '\e01a';
+}
+.iconic.arrow_right_alt1:before {
+  content: '\e015';
+}
+.iconic.arrow_right_alt2:before {
+  content: '\e019';
+}
+.iconic.rss_alt:before {
+  content: '\e02d';
+}
+.iconic.spin_alt:before {
+  content: '\e037';
+}
diff --git a/client/galaxy/style/less/iphone.less b/client/galaxy/style/less/iphone.less
new file mode 100644
index 0000000..965d757
--- /dev/null
+++ b/client/galaxy/style/less/iphone.less
@@ -0,0 +1,438 @@
+// Parts taken from iui.css (c) 2007-8 by iUI Project Members, see LICENSE.txt for license
+
+ at import "galaxy_bootstrap/variables.less";
+ at import "galaxy_variables.less";
+
+body {
+    margin: 0;
+    font-family: Helvetica;
+    background: #FFFFFF;
+    color: #000000;
+    overflow-x: hidden;
+    -webkit-user-select: none;
+    -webkit-text-size-adjust: none;
+}
+
+body > *:not(.toolbar) {
+    display: none;
+    position: absolute;
+    margin: 0;
+    padding: 0;
+    left: 0;
+    width: 100%;
+    min-height: 372px;
+}
+
+body[orient="landscape"] > *:not(.toolbar) {
+    min-height: 268px;
+}
+
+body > *[selected="true"] {
+    display: block;
+}
+
+a[selected], a:active {
+    background-color: #194fdb !important;
+    background-image: url(../iui/listArrowSel.png), url(../iui/selection.png) !important;
+    background-repeat: no-repeat, repeat-x;
+    background-position: right center, left top;
+    color: #FFFFFF !important;
+}
+
+a[selected="progress"] {
+    background-image: url(../iui/loading.gif), url(../iui/selection.png) !important;
+}
+
+/************************************************************************************************/
+
+body > .toolbar {
+    position: relative;
+    box-sizing: border-box;
+    -moz-box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    border-bottom: 1px solid #2d3642;
+    padding: 10px;
+    height: 45px;
+    background: url(../iui/toolbar.png) #6d84a2 repeat-x;
+}
+
+.toolbar > h1 {
+    position: absolute;
+    overflow: hidden;
+    left: 50%;
+    margin: 1px 0 0 -75px;
+    height: 45px;
+    font-size: 20px;
+    width: 150px;
+    font-weight: bold;
+    text-shadow: rgba(0, 0, 0, 0.4) 0px -1px 0;
+    text-align: center;
+    text-overflow: ellipsis;
+    white-space: nowrap;
+    color: #FFFFFF;
+}
+
+body[orient="landscape"] > .toolbar > h1 {
+    margin-left: -125px;
+    width: 250px;
+}
+
+body > .toolbar.masthead {
+    background: @navbar-inverse-bg;
+}
+
+body > .toolbar.masthead > h1 {
+    left: 0; margin-left: 0; width: 100%;
+}
+
+
+.button {
+    position: absolute;
+    overflow: hidden;
+    top: 8px;
+    right: 6px;
+    margin: 0;
+    border-width: 0 5px;
+    padding: 0 3px;
+    width: auto;
+    height: 30px;
+    line-height: 30px;
+    font-family: inherit;
+    font-size: 12px;
+    font-weight: bold;
+    color: #FFFFFF;
+    text-shadow: rgba(0, 0, 0, 0.6) 0px -1px 0;
+    text-overflow: ellipsis;
+    text-decoration: none;
+    white-space: nowrap;
+    background: none;
+    -webkit-border-image: url(../iui/toolButton.png) 0 5 0 5;
+}
+
+.blueButton {
+    -webkit-border-image: url(../iui/blueButton.png) 0 5 0 5;
+    border-width: 0 5px;
+}
+
+.leftButton {
+    left: 6px;
+    right: auto;
+}
+
+#backButton {
+    display: none;
+    left: 6px;
+    right: auto;
+    padding: 0;
+    max-width: 55px;
+    border-width: 0 8px 0 14px;
+    -webkit-border-image: url(../iui/backButton.png) 0 8 0 14;
+}
+
+.whiteButton,
+.grayButton {
+    display: block;
+    border-width: 0 12px;
+    padding: 10px;
+    text-align: center;
+    font-size: 20px;
+    font-weight: bold;
+    text-decoration: inherit;
+    color: inherit;
+}
+
+.whiteButton {
+    -webkit-border-image: url(../iui/whiteButton.png) 0 12 0 12;
+    text-shadow: rgba(255, 255, 255, 0.7) 0 1px 0;
+}
+
+.grayButton {
+    -webkit-border-image: url(../iui/grayButton.png) 0 12 0 12;
+    color: #FFFFFF;
+}
+
+/************************************************************************************************/
+
+body > ul > li {
+    position: relative;
+    margin: 0;
+    border-bottom: 1px solid #E0E0E0;
+    padding: 8px 0 8px 10px;
+    font-size: 20px;
+    font-weight: bold;
+    list-style: none;
+}
+
+body > ul > li.group {
+    position: relative;
+    top: -1px;
+    margin-bottom: -2px;
+    border-top: 1px solid #7d7d7d;
+    border-bottom: 1px solid #999999;
+    padding: 1px 10px;
+    background: url(../iui/listGroup.png) repeat-x;
+    font-size: 17px;
+    font-weight: bold;
+    text-shadow: rgba(0, 0, 0, 0.4) 0 1px 0;
+    color: #FFFFFF;
+}
+
+body > ul > li.group:first-child {
+    top: 0;
+    border-top: none;
+}
+
+body > ul > li > a {
+    display: block;
+    margin: -8px 0 -8px -10px;
+    padding: 8px 32px 8px 10px;
+    text-decoration: none;
+    color: inherit;
+    background: url(../iui/listArrow.png) no-repeat right center;
+}
+
+a[target="_replace"] {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    padding-top: 25px;
+    padding-bottom: 25px;
+    font-size: 18px;
+    color: cornflowerblue;
+    background-color: #FFFFFF;
+    background-image: none;
+}
+
+/************************************************************************************************/
+    
+body > .dialog {
+    top: 0;
+    width: 100%;
+    min-height: 417px;
+    z-index: 2;
+    background: rgba(0, 0, 0, 0.8);
+    padding: 0;
+    text-align: right;
+}
+
+.dialog > fieldset {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    width: 100%;
+    margin: 0;
+    border: none;
+    border-top: 1px solid #6d84a2;
+    padding: 10px 6px;
+    background: url(../iui/toolbar.png) #7388a5 repeat-x;
+}
+
+.dialog > fieldset > h1 {
+    margin: 0 10px 0 10px;
+    padding: 0;
+    font-size: 20px;
+    font-weight: bold;
+    color: #FFFFFF;
+    text-shadow: rgba(0, 0, 0, 0.4) 0px -1px 0;
+    text-align: center;
+}
+
+.dialog > fieldset > label {
+    position: absolute;
+    margin: 16px 0 0 6px;
+    font-size: 14px;
+    color: #999999;
+}
+
+
+
+input:not(input[type|=radio]):not(input[type|=checkbox]) {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    width: 100%;
+    margin: 8px 0 0 0;
+    padding: 6px 6px 6px 44px;
+    font-size: 16px;
+    font-weight: normal;
+}
+
+/************************************************************************************************/
+
+body > .panel {
+    box-sizing: border-box;
+    -moz-box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    padding: 10px;
+    background: #c8c8c8 url(../iui/pinstripes.png);
+}
+
+.panel > fieldset {
+    position: relative;
+    margin: 0 0 20px 0;
+    padding: 0;
+    background: #FFFFFF;
+    -webkit-border-radius: 10px;
+    -moz-border-radius: 10px;
+    border: 1px solid #999999;
+    text-align: right;
+    font-size: 16px;
+}
+
+.row  {
+    position: relative;
+    min-height: 42px;
+    border-bottom: 1px solid #999999;
+    -webkit-border-radius: 0;
+    text-align: right;
+    overflow: hidden;
+    text-overflow: ellipsis;
+}
+
+fieldset > .row:last-child {
+    border-bottom: none !important;
+}
+
+.row > input:not(input[type|=radio]):not(input[type|=checkbox]) {
+    box-sizing: border-box;
+    -moz-box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    margin: 0;
+    border: none;
+    padding: 12px 10px 0 110px;
+    height: 42px;
+    background: none;
+}
+.row > input[type|=radio], .row > input[type|=checkbox] {
+  margin: 7px 7px 0 0;
+  height: 25px;
+  width: 25px;
+}
+
+.row > label {
+    position: absolute;
+    margin: 0 0 0 14px;
+    line-height: 42px;
+    font-weight: bold;
+}
+
+.row > a {
+    font-weight: bold;
+    text-align: left;
+    display: block;
+    padding: 8px 32px 8px 14px;
+    text-decoration: none;
+    color: inherit;
+    background: url(../iui/listArrow.png) no-repeat right center;
+}
+
+.row > .error {
+    height: 25px;
+    text-align: left;
+    font-size: 14px;
+    padding: 0 0 0 110px;
+    color: red;
+}
+
+.row > span {
+    position: absolute;
+    padding: 12px 10px 0 110px;
+    margin: 0;
+}
+
+.row > .toggle {
+    position: absolute;
+    top: 6px;
+    right: 6px;
+    width: 100px;
+    height: 28px;
+}
+
+.toggle {
+    border: 1px solid #888888;
+    -webkit-border-radius: 6px;
+    background: #FFFFFF url(../iui/toggle.png) repeat-x;
+    font-size: 19px;
+    font-weight: bold;
+    line-height: 30px;
+}
+
+.toggle[toggled="true"] {
+    border: 1px solid #143fae;
+    background: #194fdb url(../iui/toggleOn.png) repeat-x;
+}
+
+.toggleOn {
+    display: none;
+    position: absolute;
+    width: 60px;
+    text-align: center;
+    left: 0;
+    top: 0;
+    color: #FFFFFF;
+    text-shadow: rgba(0, 0, 0, 0.4) 0px -1px 0;
+}
+
+.toggleOff {
+    position: absolute;
+    width: 60px;
+    text-align: center;
+    right: 0;
+    top: 0;
+    color: #666666;
+}
+
+.toggle[toggled="true"] > .toggleOn {
+    display: block;
+}
+
+.toggle[toggled="true"] > .toggleOff {
+    display: none;
+}
+
+.thumb {
+    position: absolute;
+    top: -1px;
+    left: -1px;
+    width: 40px;
+    height: 28px;    
+    border: 1px solid #888888;
+    -webkit-border-radius: 6px;
+    background: #ffffff url(../iui/thumb.png) repeat-x;
+}
+
+.toggle[toggled="true"] > .thumb {
+    left: auto;
+    right: -1px;
+}
+
+.panel > h2 {
+    margin: 0 0 8px 14px;
+    font-size: inherit;
+    font-weight: bold;
+    color: #4d4d70;
+    text-shadow: rgba(255, 255, 255, 0.75) 2px 2px 0;
+}
+
+/************************************************************************************************/
+
+#preloader {
+    display: none;
+    background-image: url(loading.gif), url(selection.png),
+        url(blueButton.png), url(listArrowSel.png), url(listGroup.png);
+}
+
+// State colors
+
+.state-color-queued {
+    background: @state-default-bg;
+}
+
+.state-color-ok {
+    background: @state-success-bg;
+}
+
+.state-color-error {
+    background: @state-danger-bg;
+}
+
+.state-color-running {
+    background: @state-running-bg;
+}
diff --git a/client/galaxy/style/less/job.less b/client/galaxy/style/less/job.less
new file mode 100644
index 0000000..8f7f816
--- /dev/null
+++ b/client/galaxy/style/less/job.less
@@ -0,0 +1,56 @@
+// ---------------------------------------------------------------------------- generic panel list items
+.job.list-item {
+    border: 1px solid @border-default-color;
+
+    .title-bar {
+        .subtitle {
+            color: grey;
+            font-size: 80%;
+        }
+    }
+
+    & > .details {
+        padding: 0px;
+
+        .params {
+            padding: 0px 8px 0px 8px;
+            //display: table;
+            
+            .param,
+            .input {
+                //display: table-row;
+
+                .prompt {
+                    //display: block;
+                    //display: table-cell;
+                    //margin-right: 8px;
+                    //text-align: right;
+                    font-size: 80%;
+                    //font-style: italic;
+                    color: grey;
+                }
+                .value {
+                    display: inline-block;
+                    font-weight: bold;
+                    //display: table-cell;
+                }
+            }
+        }
+
+        .list-panel {
+            padding : 0px;
+            .list-items {
+                border: 0px;
+                border-radius: 0px;
+                .list-item {
+                    border: 0px;
+                    border-radius: 0px;
+                    border-top: 1px solid @border-default-color;
+                    &:last-child {
+
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/client/galaxy/style/less/jstree.less b/client/galaxy/style/less/jstree.less
new file mode 100755
index 0000000..5903154
--- /dev/null
+++ b/client/galaxy/style/less/jstree.less
@@ -0,0 +1,989 @@
+/* jsTree default theme */
+.jstree-node,
+.jstree-children,
+.jstree-container-ul {
+  display: block;
+  margin: 0;
+  padding: 0;
+  list-style-type: none;
+  list-style-image: none;
+}
+.jstree-node {
+  white-space: nowrap;
+}
+.jstree-anchor {
+  display: inline-block;
+  color: black;
+  white-space: nowrap;
+  padding: 0 4px 0 1px;
+  margin: 0;
+  vertical-align: top;
+}
+.jstree-anchor:focus {
+  outline: 0;
+}
+.jstree-anchor,
+.jstree-anchor:link,
+.jstree-anchor:visited,
+.jstree-anchor:hover,
+.jstree-anchor:active {
+  text-decoration: none;
+  color: inherit;
+}
+.jstree-icon {
+  display: inline-block;
+  text-decoration: none;
+  margin: 0;
+  padding: 0;
+  vertical-align: top;
+  text-align: center;
+}
+.jstree-icon:empty {
+  display: inline-block;
+  text-decoration: none;
+  margin: 0;
+  padding: 0;
+  vertical-align: top;
+  text-align: center;
+}
+.jstree-ocl {
+  cursor: pointer;
+}
+.jstree-leaf > .jstree-ocl {
+  cursor: default;
+}
+.jstree .jstree-open > .jstree-children {
+  display: block;
+}
+.jstree .jstree-closed > .jstree-children,
+.jstree .jstree-leaf > .jstree-children {
+  display: none;
+}
+.jstree-anchor > .jstree-themeicon {
+  margin-right: 2px;
+}
+.jstree-no-icons .jstree-themeicon,
+.jstree-anchor > .jstree-themeicon-hidden {
+  display: none;
+}
+.jstree-rtl .jstree-anchor {
+  padding: 0 1px 0 4px;
+}
+.jstree-rtl .jstree-anchor > .jstree-themeicon {
+  margin-left: 2px;
+  margin-right: 0;
+}
+.jstree-rtl .jstree-node {
+  margin-left: 0;
+}
+.jstree-rtl .jstree-container-ul > .jstree-node {
+  margin-right: 0;
+}
+.jstree-wholerow-ul {
+  position: relative;
+  display: inline-block;
+  min-width: 100%;
+}
+.jstree-wholerow-ul .jstree-leaf > .jstree-ocl {
+  cursor: pointer;
+}
+.jstree-wholerow-ul .jstree-anchor,
+.jstree-wholerow-ul .jstree-icon {
+  position: relative;
+}
+.jstree-wholerow-ul .jstree-wholerow {
+  width: 100%;
+  cursor: pointer;
+  position: absolute;
+  left: 0;
+  -webkit-user-select: none;
+  -moz-user-select: none;
+  -ms-user-select: none;
+  user-select: none;
+}
+.vakata-context {
+  display: none;
+}
+.vakata-context,
+.vakata-context ul {
+  margin: 0;
+  padding: 2px;
+  position: absolute;
+  background: #f5f5f5;
+  border: 1px solid #979797;
+  -moz-box-shadow: 5px 5px 4px -4px #666666;
+  -webkit-box-shadow: 2px 2px 2px #999999;
+  box-shadow: 2px 2px 2px #999999;
+}
+.vakata-context ul {
+  list-style: none;
+  left: 100%;
+  margin-top: -2.7em;
+  margin-left: -4px;
+}
+.vakata-context .vakata-context-right ul {
+  left: auto;
+  right: 100%;
+  margin-left: auto;
+  margin-right: -4px;
+}
+.vakata-context li {
+  list-style: none;
+  display: inline;
+}
+.vakata-context li > a {
+  display: block;
+  padding: 0 2em 0 2em;
+  text-decoration: none;
+  width: auto;
+  color: black;
+  white-space: nowrap;
+  line-height: 2.4em;
+  -moz-text-shadow: 1px 1px 0 white;
+  -webkit-text-shadow: 1px 1px 0 white;
+  text-shadow: 1px 1px 0 white;
+  -moz-border-radius: 1px;
+  -webkit-border-radius: 1px;
+  border-radius: 1px;
+}
+.vakata-context li > a:hover {
+  position: relative;
+  background-color: #e8eff7;
+  -moz-box-shadow: 0 0 2px #0a6aa1;
+  -webkit-box-shadow: 0 0 2px #0a6aa1;
+  box-shadow: 0 0 2px #0a6aa1;
+}
+.vakata-context li > a.vakata-context-parent {
+  background-image: url("data:image/gif;base64,R0lGODlhCwAHAIAAACgoKP///yH5BAEAAAEALAAAAAALAAcAAAIORI4JlrqN1oMSnmmZDQUAOw==");
+  background-position: right center;
+  background-repeat: no-repeat;
+}
+.vakata-context li > a:focus {
+  outline: 0;
+}
+.vakata-context .vakata-context-hover > a {
+  position: relative;
+  background-color: #e8eff7;
+  -moz-box-shadow: 0 0 2px #0a6aa1;
+  -webkit-box-shadow: 0 0 2px #0a6aa1;
+  box-shadow: 0 0 2px #0a6aa1;
+}
+.vakata-context .vakata-context-separator > a,
+.vakata-context .vakata-context-separator > a:hover {
+  background: white;
+  border: 0;
+  border-top: 1px solid #e2e3e3;
+  height: 1px;
+  min-height: 1px;
+  max-height: 1px;
+  padding: 0;
+  margin: 0 0 0 2.4em;
+  border-left: 1px solid #e0e0e0;
+  -moz-text-shadow: 0 0 0 transparent;
+  -webkit-text-shadow: 0 0 0 transparent;
+  text-shadow: 0 0 0 transparent;
+  -moz-box-shadow: 0 0 0 transparent;
+  -webkit-box-shadow: 0 0 0 transparent;
+  box-shadow: 0 0 0 transparent;
+  -moz-border-radius: 0;
+  -webkit-border-radius: 0;
+  border-radius: 0;
+}
+.vakata-context .vakata-contextmenu-disabled a,
+.vakata-context .vakata-contextmenu-disabled a:hover {
+  color: silver;
+  background-color: transparent;
+  border: 0;
+  box-shadow: 0 0 0;
+}
+.vakata-context li > a > i {
+  text-decoration: none;
+  display: inline-block;
+  width: 2.4em;
+  height: 2.4em;
+  background: transparent;
+  margin: 0 0 0 -2em;
+  vertical-align: top;
+  text-align: center;
+  line-height: 2.4em;
+}
+.vakata-context li > a > i:empty {
+  width: 2.4em;
+  line-height: 2.4em;
+}
+.vakata-context li > a .vakata-contextmenu-sep {
+  display: inline-block;
+  width: 1px;
+  height: 2.4em;
+  background: white;
+  margin: 0 0.5em 0 0;
+  border-left: 1px solid #e2e3e3;
+}
+.vakata-context .vakata-contextmenu-shortcut {
+  font-size: 0.8em;
+  color: silver;
+  opacity: 0.5;
+  display: none;
+}
+.vakata-context-rtl ul {
+  left: auto;
+  right: 100%;
+  margin-left: auto;
+  margin-right: -4px;
+}
+.vakata-context-rtl li > a.vakata-context-parent {
+  background-image: url("data:image/gif;base64,R0lGODlhCwAHAIAAACgoKP///yH5BAEAAAEALAAAAAALAAcAAAINjI+AC7rWHIsPtmoxLAA7");
+  background-position: left center;
+  background-repeat: no-repeat;
+}
+.vakata-context-rtl .vakata-context-separator > a {
+  margin: 0 2.4em 0 0;
+  border-left: 0;
+  border-right: 1px solid #e2e3e3;
+}
+.vakata-context-rtl .vakata-context-left ul {
+  right: auto;
+  left: 100%;
+  margin-left: -4px;
+  margin-right: auto;
+}
+.vakata-context-rtl li > a > i {
+  margin: 0 -2em 0 0;
+}
+.vakata-context-rtl li > a .vakata-contextmenu-sep {
+  margin: 0 0 0 0.5em;
+  border-left-color: white;
+  background: #e2e3e3;
+}
+#jstree-marker {
+  position: absolute;
+  top: 0;
+  left: 0;
+  margin: -5px 0 0 0;
+  padding: 0;
+  border-right: 0;
+  border-top: 5px solid transparent;
+  border-bottom: 5px solid transparent;
+  border-left: 5px solid;
+  width: 0;
+  height: 0;
+  font-size: 0;
+  line-height: 0;
+}
+#jstree-dnd {
+  line-height: 16px;
+  margin: 0;
+  padding: 4px;
+}
+#jstree-dnd .jstree-icon,
+#jstree-dnd .jstree-copy {
+  display: inline-block;
+  text-decoration: none;
+  margin: 0 2px 0 0;
+  padding: 0;
+  width: 16px;
+  height: 16px;
+}
+#jstree-dnd .jstree-ok {
+  background: green;
+}
+#jstree-dnd .jstree-er {
+  background: red;
+}
+#jstree-dnd .jstree-copy {
+  margin: 0 2px 0 2px;
+}
+.jstree-default .jstree-node,
+.jstree-default .jstree-icon {
+  background-repeat: no-repeat;
+  background-color: transparent;
+}
+.jstree-default .jstree-anchor,
+.jstree-default .jstree-wholerow {
+  transition: background-color 0.15s, box-shadow 0.15s;
+}
+.jstree-default .jstree-hovered {
+  background: #e7f4f9;
+  border-radius: 2px;
+  box-shadow: inset 0 0 1px #ccc;
+}
+.jstree-default .jstree-clicked {
+  background: #beebff;
+  border-radius: 2px;
+  box-shadow: inset 0 0 1px #999;
+}
+.jstree-default .jstree-no-icons .jstree-anchor > .jstree-themeicon {
+  display: none;
+}
+.jstree-default .jstree-disabled {
+  background: transparent;
+  color: #666;
+}
+.jstree-default .jstree-disabled.jstree-hovered {
+  background: transparent;
+  box-shadow: none;
+}
+.jstree-default .jstree-disabled.jstree-clicked {
+  background: #efefef;
+}
+.jstree-default .jstree-disabled > .jstree-icon {
+  opacity: 0.8;
+  filter: url("data:image/svg+xml;utf8,<svg xmlns=\'http://www.w3.org/2000/svg\'><filter id=\'jstree-grayscale\'><feColorMatrix type=\'matrix\' values=\'0.3333 0.3333 0.3333 0 0 0.3333 0.3333 0.3333 0 0 0.3333 0.3333 0.3333 0 0 0 0 0 1 0\'/></filter></svg>#jstree-grayscale");
+  /* Firefox 10+ */
+  filter: gray;
+  /* IE6-9 */
+  -webkit-filter: grayscale(100%);
+  /* Chrome 19+ & Safari 6+ */
+}
+.jstree-default .jstree-search {
+  font-style: italic;
+  color: #8b0000;
+  font-weight: bold;
+}
+.jstree-default .jstree-no-checkboxes .jstree-checkbox {
+  display: none !important;
+}
+.jstree-default.jstree-checkbox-no-clicked .jstree-clicked {
+  background: transparent;
+  box-shadow: none;
+}
+.jstree-default.jstree-checkbox-no-clicked .jstree-clicked.jstree-hovered {
+  background: #e7f4f9;
+}
+.jstree-default.jstree-checkbox-no-clicked > .jstree-wholerow-ul .jstree-wholerow-clicked {
+  background: transparent;
+}
+.jstree-default.jstree-checkbox-no-clicked > .jstree-wholerow-ul .jstree-wholerow-clicked.jstree-wholerow-hovered {
+  background: #e7f4f9;
+}
+#jstree-dnd.jstree-default .jstree-ok,
+#jstree-dnd.jstree-default .jstree-er {
+  background-image: url("../images/jstree/32px.png");
+  background-repeat: no-repeat;
+  background-color: transparent;
+}
+#jstree-dnd.jstree-default i {
+  background: transparent;
+  width: 16px;
+  height: 16px;
+}
+#jstree-dnd.jstree-default .jstree-ok {
+  background-position: -9px -71px;
+}
+#jstree-dnd.jstree-default .jstree-er {
+  background-position: -39px -71px;
+}
+.jstree-default > .jstree-striped {
+  background: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAAkCAMAAAB/qqA+AAAABlBMVEUAAAAAAAClZ7nPAAAAAnRSTlMNAMM9s3UAAAAXSURBVHjajcEBAQAAAIKg/H/aCQZ70AUBjAATb6YPDgAAAABJRU5ErkJggg==") left top repeat;
+}
+.jstree-default > .jstree-wholerow-ul .jstree-hovered,
+.jstree-default > .jstree-wholerow-ul .jstree-clicked {
+  background: transparent;
+  box-shadow: none;
+  border-radius: 0;
+}
+.jstree-default .jstree-wholerow {
+  -moz-box-sizing: border-box;
+  -webkit-box-sizing: border-box;
+  box-sizing: border-box;
+}
+.jstree-default .jstree-wholerow-hovered {
+  background: #e7f4f9;
+}
+.jstree-default .jstree-wholerow-clicked {
+  background: #beebff;
+  background: -moz-linear-gradient(top, #beebff 0%, #a8e4ff 100%);
+  background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #beebff), color-stop(100%, #a8e4ff));
+  background: -webkit-linear-gradient(top, #beebff 0%, #a8e4ff 100%);
+  background: -o-linear-gradient(top, #beebff 0%, #a8e4ff 100%);
+  background: -ms-linear-gradient(top, #beebff 0%, #a8e4ff 100%);
+  background: linear-gradient(to bottom, #beebff 0%, #a8e4ff 100%);
+  /*filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='@color1', endColorstr='@color2',GradientType=0 );*/
+}
+.jstree-default .jstree-node {
+  min-height: 24px;
+  line-height: 24px;
+  margin-left: 24px;
+  min-width: 24px;
+}
+.jstree-default .jstree-anchor {
+  line-height: 24px;
+  height: 24px;
+}
+.jstree-default .jstree-icon {
+  width: 24px;
+  height: 24px;
+  line-height: 24px;
+}
+.jstree-default .jstree-icon:empty {
+  width: 24px;
+  height: 24px;
+  line-height: 24px;
+}
+.jstree-default.jstree-rtl .jstree-node {
+  margin-right: 24px;
+}
+.jstree-default .jstree-wholerow {
+  height: 24px;
+}
+.jstree-default .jstree-node,
+.jstree-default .jstree-icon {
+  background-image: url("../images/jstree/32px.png");
+}
+.jstree-default .jstree-node {
+  background-position: -292px -4px;
+  background-repeat: repeat-y;
+}
+.jstree-default .jstree-last {
+  background: transparent;
+}
+.jstree-default .jstree-open > .jstree-ocl {
+  background-position: -132px -4px;
+}
+.jstree-default .jstree-closed > .jstree-ocl {
+  background-position: -100px -4px;
+}
+.jstree-default .jstree-leaf > .jstree-ocl {
+  background-position: -68px -4px;
+}
+.jstree-default .jstree-themeicon {
+  background-position: -260px -4px;
+}
+.jstree-default > .jstree-no-dots .jstree-node,
+.jstree-default > .jstree-no-dots .jstree-leaf > .jstree-ocl {
+  background: transparent;
+}
+.jstree-default > .jstree-no-dots .jstree-open > .jstree-ocl {
+  background-position: -36px -4px;
+}
+.jstree-default > .jstree-no-dots .jstree-closed > .jstree-ocl {
+  background-position: -4px -4px;
+}
+.jstree-default .jstree-disabled {
+  background: transparent;
+}
+.jstree-default .jstree-disabled.jstree-hovered {
+  background: transparent;
+}
+.jstree-default .jstree-disabled.jstree-clicked {
+  background: #efefef;
+}
+.jstree-default .jstree-checkbox {
+  background-position: -164px -4px;
+}
+.jstree-default .jstree-checkbox:hover {
+  background-position: -164px -36px;
+}
+.jstree-default.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox,
+.jstree-default .jstree-checked > .jstree-checkbox {
+  background-position: -228px -4px;
+}
+.jstree-default.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox:hover,
+.jstree-default .jstree-checked > .jstree-checkbox:hover {
+  background-position: -228px -36px;
+}
+.jstree-default .jstree-anchor > .jstree-undetermined {
+  background-position: -196px -4px;
+}
+.jstree-default .jstree-anchor > .jstree-undetermined:hover {
+  background-position: -196px -36px;
+}
+.jstree-default > .jstree-striped {
+  background-size: auto 48px;
+}
+.jstree-default.jstree-rtl .jstree-node {
+  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAACAQMAAAB49I5GAAAABlBMVEUAAAAdHRvEkCwcAAAAAXRSTlMAQObYZgAAAAxJREFUCNdjAAMOBgAAGAAJMwQHdQAAAABJRU5ErkJggg==");
+  background-position: 100% 1px;
+  background-repeat: repeat-y;
+}
+.jstree-default.jstree-rtl .jstree-last {
+  background: transparent;
+}
+.jstree-default.jstree-rtl .jstree-open > .jstree-ocl {
+  background-position: -132px -36px;
+}
+.jstree-default.jstree-rtl .jstree-closed > .jstree-ocl {
+  background-position: -100px -36px;
+}
+.jstree-default.jstree-rtl .jstree-leaf > .jstree-ocl {
+  background-position: -68px -36px;
+}
+.jstree-default.jstree-rtl > .jstree-no-dots .jstree-node,
+.jstree-default.jstree-rtl > .jstree-no-dots .jstree-leaf > .jstree-ocl {
+  background: transparent;
+}
+.jstree-default.jstree-rtl > .jstree-no-dots .jstree-open > .jstree-ocl {
+  background-position: -36px -36px;
+}
+.jstree-default.jstree-rtl > .jstree-no-dots .jstree-closed > .jstree-ocl {
+  background-position: -4px -36px;
+}
+.jstree-default .jstree-themeicon-custom {
+  background-color: transparent;
+  background-image: none;
+  background-position: 0 0;
+}
+.jstree-default > .jstree-container-ul .jstree-loading > .jstree-ocl {
+  background: url("../images/jstree/throbber.gif") center center no-repeat;
+}
+.jstree-default .jstree-file {
+  background: url("../images/jstree/32px.png") -100px -68px no-repeat;
+}
+.jstree-default .jstree-folder {
+  background: url("../images/jstree/32px.png") -260px -4px no-repeat;
+}
+.jstree-default.jstree-rtl .jstree-node {
+  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAACAQMAAAB49I5GAAAABlBMVEUAAAAdHRvEkCwcAAAAAXRSTlMAQObYZgAAAAxJREFUCNdjAAMOBgAAGAAJMwQHdQAAAABJRU5ErkJggg==");
+}
+.jstree-default.jstree-rtl .jstree-last {
+  background: transparent;
+}
+.jstree-default-small .jstree-node {
+  min-height: 18px;
+  line-height: 18px;
+  margin-left: 18px;
+  min-width: 18px;
+}
+.jstree-default-small .jstree-anchor {
+  line-height: 18px;
+  height: 18px;
+}
+.jstree-default-small .jstree-icon {
+  width: 18px;
+  height: 18px;
+  line-height: 18px;
+}
+.jstree-default-small .jstree-icon:empty {
+  width: 18px;
+  height: 18px;
+  line-height: 18px;
+}
+.jstree-default-small.jstree-rtl .jstree-node {
+  margin-right: 18px;
+}
+.jstree-default-small .jstree-wholerow {
+  height: 18px;
+}
+.jstree-default-small .jstree-node,
+.jstree-default-small .jstree-icon {
+  background-image: url("../images/jstree/32px.png");
+}
+.jstree-default-small .jstree-node {
+  background-position: -295px -7px;
+  background-repeat: repeat-y;
+}
+.jstree-default-small .jstree-last {
+  background: transparent;
+}
+.jstree-default-small .jstree-open > .jstree-ocl {
+  background-position: -135px -7px;
+}
+.jstree-default-small .jstree-closed > .jstree-ocl {
+  background-position: -103px -7px;
+}
+.jstree-default-small .jstree-leaf > .jstree-ocl {
+  background-position: -71px -7px;
+}
+.jstree-default-small .jstree-themeicon {
+  background-position: -263px -7px;
+}
+.jstree-default-small > .jstree-no-dots .jstree-node,
+.jstree-default-small > .jstree-no-dots .jstree-leaf > .jstree-ocl {
+  background: transparent;
+}
+.jstree-default-small > .jstree-no-dots .jstree-open > .jstree-ocl {
+  background-position: -39px -7px;
+}
+.jstree-default-small > .jstree-no-dots .jstree-closed > .jstree-ocl {
+  background-position: -7px -7px;
+}
+.jstree-default-small .jstree-disabled {
+  background: transparent;
+}
+.jstree-default-small .jstree-disabled.jstree-hovered {
+  background: transparent;
+}
+.jstree-default-small .jstree-disabled.jstree-clicked {
+  background: #efefef;
+}
+.jstree-default-small .jstree-checkbox {
+  background-position: -167px -7px;
+}
+.jstree-default-small .jstree-checkbox:hover {
+  background-position: -167px -39px;
+}
+.jstree-default-small.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox,
+.jstree-default-small .jstree-checked > .jstree-checkbox {
+  background-position: -231px -7px;
+}
+.jstree-default-small.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox:hover,
+.jstree-default-small .jstree-checked > .jstree-checkbox:hover {
+  background-position: -231px -39px;
+}
+.jstree-default-small .jstree-anchor > .jstree-undetermined {
+  background-position: -199px -7px;
+}
+.jstree-default-small .jstree-anchor > .jstree-undetermined:hover {
+  background-position: -199px -39px;
+}
+.jstree-default-small > .jstree-striped {
+  background-size: auto 36px;
+}
+.jstree-default-small.jstree-rtl .jstree-node {
+  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAACAQMAAAB49I5GAAAABlBMVEUAAAAdHRvEkCwcAAAAAXRSTlMAQObYZgAAAAxJREFUCNdjAAMOBgAAGAAJMwQHdQAAAABJRU5ErkJggg==");
+  background-position: 100% 1px;
+  background-repeat: repeat-y;
+}
+.jstree-default-small.jstree-rtl .jstree-last {
+  background: transparent;
+}
+.jstree-default-small.jstree-rtl .jstree-open > .jstree-ocl {
+  background-position: -135px -39px;
+}
+.jstree-default-small.jstree-rtl .jstree-closed > .jstree-ocl {
+  background-position: -103px -39px;
+}
+.jstree-default-small.jstree-rtl .jstree-leaf > .jstree-ocl {
+  background-position: -71px -39px;
+}
+.jstree-default-small.jstree-rtl > .jstree-no-dots .jstree-node,
+.jstree-default-small.jstree-rtl > .jstree-no-dots .jstree-leaf > .jstree-ocl {
+  background: transparent;
+}
+.jstree-default-small.jstree-rtl > .jstree-no-dots .jstree-open > .jstree-ocl {
+  background-position: -39px -39px;
+}
+.jstree-default-small.jstree-rtl > .jstree-no-dots .jstree-closed > .jstree-ocl {
+  background-position: -7px -39px;
+}
+.jstree-default-small .jstree-themeicon-custom {
+  background-color: transparent;
+  background-image: none;
+  background-position: 0 0;
+}
+.jstree-default-small > .jstree-container-ul .jstree-loading > .jstree-ocl {
+  background: url("../images/jstree/throbber.gif") center center no-repeat;
+}
+.jstree-default-small .jstree-file {
+  background: url("../images/jstree/32px.png") -103px -71px no-repeat;
+}
+.jstree-default-small .jstree-folder {
+  background: url("../images/jstree/32px.png") -263px -7px no-repeat;
+}
+.jstree-default-small.jstree-rtl .jstree-node {
+  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABIAAAACAQMAAABv1h6PAAAABlBMVEUAAAAdHRvEkCwcAAAAAXRSTlMAQObYZgAAAAxJREFUCNdjAAMHBgAAiABBI4gz9AAAAABJRU5ErkJggg==");
+}
+.jstree-default-small.jstree-rtl .jstree-last {
+  background: transparent;
+}
+.jstree-default-large .jstree-node {
+  min-height: 32px;
+  line-height: 32px;
+  margin-left: 32px;
+  min-width: 32px;
+}
+.jstree-default-large .jstree-anchor {
+  line-height: 32px;
+  height: 32px;
+}
+.jstree-default-large .jstree-icon {
+  width: 32px;
+  height: 32px;
+  line-height: 32px;
+}
+.jstree-default-large .jstree-icon:empty {
+  width: 32px;
+  height: 32px;
+  line-height: 32px;
+}
+.jstree-default-large.jstree-rtl .jstree-node {
+  margin-right: 32px;
+}
+.jstree-default-large .jstree-wholerow {
+  height: 32px;
+}
+.jstree-default-large .jstree-node,
+.jstree-default-large .jstree-icon {
+  background-image: url("../images/jstree/32px.png");
+}
+.jstree-default-large .jstree-node {
+  background-position: -288px 0px;
+  background-repeat: repeat-y;
+}
+.jstree-default-large .jstree-last {
+  background: transparent;
+}
+.jstree-default-large .jstree-open > .jstree-ocl {
+  background-position: -128px 0px;
+}
+.jstree-default-large .jstree-closed > .jstree-ocl {
+  background-position: -96px 0px;
+}
+.jstree-default-large .jstree-leaf > .jstree-ocl {
+  background-position: -64px 0px;
+}
+.jstree-default-large .jstree-themeicon {
+  background-position: -256px 0px;
+}
+.jstree-default-large > .jstree-no-dots .jstree-node,
+.jstree-default-large > .jstree-no-dots .jstree-leaf > .jstree-ocl {
+  background: transparent;
+}
+.jstree-default-large > .jstree-no-dots .jstree-open > .jstree-ocl {
+  background-position: -32px 0px;
+}
+.jstree-default-large > .jstree-no-dots .jstree-closed > .jstree-ocl {
+  background-position: 0px 0px;
+}
+.jstree-default-large .jstree-disabled {
+  background: transparent;
+}
+.jstree-default-large .jstree-disabled.jstree-hovered {
+  background: transparent;
+}
+.jstree-default-large .jstree-disabled.jstree-clicked {
+  background: #efefef;
+}
+.jstree-default-large .jstree-checkbox {
+  background-position: -160px 0px;
+}
+.jstree-default-large .jstree-checkbox:hover {
+  background-position: -160px -32px;
+}
+.jstree-default-large.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox,
+.jstree-default-large .jstree-checked > .jstree-checkbox {
+  background-position: -224px 0px;
+}
+.jstree-default-large.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox:hover,
+.jstree-default-large .jstree-checked > .jstree-checkbox:hover {
+  background-position: -224px -32px;
+}
+.jstree-default-large .jstree-anchor > .jstree-undetermined {
+  background-position: -192px 0px;
+}
+.jstree-default-large .jstree-anchor > .jstree-undetermined:hover {
+  background-position: -192px -32px;
+}
+.jstree-default-large > .jstree-striped {
+  background-size: auto 64px;
+}
+.jstree-default-large.jstree-rtl .jstree-node {
+  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAACAQMAAAB49I5GAAAABlBMVEUAAAAdHRvEkCwcAAAAAXRSTlMAQObYZgAAAAxJREFUCNdjAAMOBgAAGAAJMwQHdQAAAABJRU5ErkJggg==");
+  background-position: 100% 1px;
+  background-repeat: repeat-y;
+}
+.jstree-default-large.jstree-rtl .jstree-last {
+  background: transparent;
+}
+.jstree-default-large.jstree-rtl .jstree-open > .jstree-ocl {
+  background-position: -128px -32px;
+}
+.jstree-default-large.jstree-rtl .jstree-closed > .jstree-ocl {
+  background-position: -96px -32px;
+}
+.jstree-default-large.jstree-rtl .jstree-leaf > .jstree-ocl {
+  background-position: -64px -32px;
+}
+.jstree-default-large.jstree-rtl > .jstree-no-dots .jstree-node,
+.jstree-default-large.jstree-rtl > .jstree-no-dots .jstree-leaf > .jstree-ocl {
+  background: transparent;
+}
+.jstree-default-large.jstree-rtl > .jstree-no-dots .jstree-open > .jstree-ocl {
+  background-position: -32px -32px;
+}
+.jstree-default-large.jstree-rtl > .jstree-no-dots .jstree-closed > .jstree-ocl {
+  background-position: 0px -32px;
+}
+.jstree-default-large .jstree-themeicon-custom {
+  background-color: transparent;
+  background-image: none;
+  background-position: 0 0;
+}
+.jstree-default-large > .jstree-container-ul .jstree-loading > .jstree-ocl {
+  background: url("../images/jstree/throbber.gif") center center no-repeat;
+}
+.jstree-default-large .jstree-file {
+  background: url("../images/jstree/32px.png") -96px -64px no-repeat;
+}
+.jstree-default-large .jstree-folder {
+  background: url("../images/jstree/32px.png") -256px 0px no-repeat;
+}
+.jstree-default-large.jstree-rtl .jstree-node {
+  background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAACAQMAAAAD0EyKAAAABlBMVEUAAAAdHRvEkCwcAAAAAXRSTlMAQObYZgAAAAxJREFUCNdjgIIGBgABCgCBvVLXcAAAAABJRU5ErkJggg==");
+}
+.jstree-default-large.jstree-rtl .jstree-last {
+  background: transparent;
+}
+ at media (max-width: 768px) {
+  #jstree-dnd.jstree-dnd-responsive {
+    line-height: 40px;
+    font-weight: bold;
+    font-size: 1.1em;
+    text-shadow: 1px 1px white;
+  }
+  #jstree-dnd.jstree-dnd-responsive > i {
+    background: transparent;
+    width: 40px;
+    height: 40px;
+  }
+  #jstree-dnd.jstree-dnd-responsive > .jstree-ok {
+    background-image: url("../images/jstree/40px.png");
+    background-position: 0 -200px;
+    background-size: 120px 240px;
+  }
+  #jstree-dnd.jstree-dnd-responsive > .jstree-er {
+    background-image: url("../images/jstree/40px.png");
+    background-position: -40px -200px;
+    background-size: 120px 240px;
+  }
+  #jstree-marker.jstree-dnd-responsive {
+    border-left-width: 10px;
+    border-top-width: 10px;
+    border-bottom-width: 10px;
+    margin-top: -10px;
+  }
+}
+ at media (max-width: 768px) {
+  .jstree-default-responsive {
+    /*
+		.jstree-open > .jstree-ocl,
+		.jstree-closed > .jstree-ocl { border-radius:20px; background-color:white; }
+		*/
+  }
+  .jstree-default-responsive .jstree-icon {
+    background-image: url("../images/jstree/40px.png");
+  }
+  .jstree-default-responsive .jstree-node,
+  .jstree-default-responsive .jstree-leaf > .jstree-ocl {
+    background: transparent;
+  }
+  .jstree-default-responsive .jstree-node {
+    min-height: 40px;
+    line-height: 40px;
+    margin-left: 40px;
+    min-width: 40px;
+    white-space: nowrap;
+  }
+  .jstree-default-responsive .jstree-anchor {
+    line-height: 40px;
+    height: 40px;
+  }
+  .jstree-default-responsive .jstree-icon,
+  .jstree-default-responsive .jstree-icon:empty {
+    width: 40px;
+    height: 40px;
+    line-height: 40px;
+  }
+  .jstree-default-responsive > .jstree-container-ul > .jstree-node {
+    margin-left: 0;
+  }
+  .jstree-default-responsive.jstree-rtl .jstree-node {
+    margin-left: 0;
+    margin-right: 40px;
+  }
+  .jstree-default-responsive.jstree-rtl .jstree-container-ul > .jstree-node {
+    margin-right: 0;
+  }
+  .jstree-default-responsive .jstree-ocl,
+  .jstree-default-responsive .jstree-themeicon,
+  .jstree-default-responsive .jstree-checkbox {
+    background-size: 120px 240px;
+  }
+  .jstree-default-responsive .jstree-leaf > .jstree-ocl {
+    background: transparent;
+  }
+  .jstree-default-responsive .jstree-open > .jstree-ocl {
+    background-position: 0 0px !important;
+  }
+  .jstree-default-responsive .jstree-closed > .jstree-ocl {
+    background-position: 0 -40px !important;
+  }
+  .jstree-default-responsive.jstree-rtl .jstree-closed > .jstree-ocl {
+    background-position: -40px 0px !important;
+  }
+  .jstree-default-responsive .jstree-themeicon {
+    background-position: -40px -40px;
+  }
+  .jstree-default-responsive .jstree-checkbox,
+  .jstree-default-responsive .jstree-checkbox:hover {
+    background-position: -40px -80px;
+  }
+  .jstree-default-responsive.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox,
+  .jstree-default-responsive.jstree-checkbox-selection .jstree-clicked > .jstree-checkbox:hover,
+  .jstree-default-responsive .jstree-checked > .jstree-checkbox,
+  .jstree-default-responsive .jstree-checked > .jstree-checkbox:hover {
+    background-position: 0 -80px;
+  }
+  .jstree-default-responsive .jstree-anchor > .jstree-undetermined,
+  .jstree-default-responsive .jstree-anchor > .jstree-undetermined:hover {
+    background-position: 0 -120px;
+  }
+  .jstree-default-responsive .jstree-anchor {
+    font-weight: bold;
+    font-size: 1.1em;
+    text-shadow: 1px 1px white;
+  }
+  .jstree-default-responsive > .jstree-striped {
+    background: transparent;
+  }
+  .jstree-default-responsive .jstree-wholerow {
+    border-top: 1px solid rgba(255, 255, 255, 0.7);
+    border-bottom: 1px solid rgba(64, 64, 64, 0.2);
+    background: #ebebeb;
+    height: 40px;
+  }
+  .jstree-default-responsive .jstree-wholerow-hovered {
+    background: #e7f4f9;
+  }
+  .jstree-default-responsive .jstree-wholerow-clicked {
+    background: #beebff;
+  }
+  .jstree-default-responsive .jstree-children .jstree-last > .jstree-wholerow {
+    box-shadow: inset 0 -6px 3px -5px #666666;
+  }
+  .jstree-default-responsive .jstree-children .jstree-open > .jstree-wholerow {
+    box-shadow: inset 0 6px 3px -5px #666666;
+    border-top: 0;
+  }
+  .jstree-default-responsive .jstree-children .jstree-open + .jstree-open {
+    box-shadow: none;
+  }
+  .jstree-default-responsive .jstree-node,
+  .jstree-default-responsive .jstree-icon,
+  .jstree-default-responsive .jstree-node > .jstree-ocl,
+  .jstree-default-responsive .jstree-themeicon,
+  .jstree-default-responsive .jstree-checkbox {
+    background-image: url("../images/jstree/40px.png");
+    background-size: 120px 240px;
+  }
+  .jstree-default-responsive .jstree-node {
+    background-position: -80px 0;
+    background-repeat: repeat-y;
+  }
+  .jstree-default-responsive .jstree-last {
+    background: transparent;
+  }
+  .jstree-default-responsive .jstree-leaf > .jstree-ocl {
+    background-position: -40px -120px;
+  }
+  .jstree-default-responsive .jstree-last > .jstree-ocl {
+    background-position: -40px -160px;
+  }
+  .jstree-default-responsive .jstree-themeicon-custom {
+    background-color: transparent;
+    background-image: none;
+    background-position: 0 0;
+  }
+  .jstree-default-responsive .jstree-file {
+    background: url("../images/jstree/40px.png") 0 -160px no-repeat;
+    background-size: 120px 240px;
+  }
+  .jstree-default-responsive .jstree-folder {
+    background: url("../images/jstree/40px.png") -40px -40px no-repeat;
+    background-size: 120px 240px;
+  }
+}
+.jstree-default > .jstree-container-ul > .jstree-node {
+  margin-left: 0;
+  margin-right: 0;
+}
diff --git a/client/galaxy/style/less/library.less b/client/galaxy/style/less/library.less
new file mode 100644
index 0000000..c9074f6
--- /dev/null
+++ b/client/galaxy/style/less/library.less
@@ -0,0 +1,201 @@
+ at import "galaxy_bootstrap/variables.less";
+ at import "galaxy_variables.less";
+
+.library_style_container{
+
+width: 95%;
+margin: auto;
+margin-top:2em;
+overflow: auto !important;
+
+.fa{
+    font-size: 12px;
+}
+.fa-globe{
+    font-size: initial;
+    margin-left: 0.6em;
+}
+.libraryRow{
+    background-color: @table-heading-bg;
+}
+.datasetHighlighted{
+    background-color: @table-bg-accent;
+}
+.libraryItemDeleted-True{
+    font-style: italic;
+}
+div.libraryItemBody{
+    padding: 4px 4px 2px 4px;
+}
+li.folderRow, li.datasetRow{
+    border-top: solid 1px @table-border;
+}
+li.folderRow:hover, li.datasetRow:hover{
+    background-color: @table-bg-accent;
+}
+td.right-center{
+    vertical-align: middle !important;
+    text-align: right;
+}
+.library-genome-select{
+    max-width: 350px;
+}
+.library-extension-select{
+    max-width: 140px;
+}
+.library_table{
+    td{
+        border-top:1px solid #5f6990 !important;
+    }
+    th{
+        border-bottom: 2px solid #5f6990 !important;
+    }
+    a{
+        color: #0A143D;
+        &:hover{
+            color: maroon;
+        }
+    }
+}
+tr.light td{
+    background-color: white;
+    color: black;
+}
+tr.light:hover td{
+    background-color: #f5e8cc;
+}
+tr.dark td{
+    background-color: #d6b161;
+    color: white;
+}
+tr.dark:hover td{
+    background-color: #ebd4a4;
+    color: white;
+}
+a.dark{
+    color: white;
+}
+.dataset_table{
+    tr, th, td{
+        border: none;
+    }
+    .dataset-first-column{
+        width: 30%;
+    }
+}
+th.button_heading{
+    width: 2em;
+}
+.bigdrop.select2-container .select2-results{
+    max-height: 300px;
+}
+.bigdrop .select2-results{
+    max-height: 300px;
+}
+.select2-container-multi{
+    width: 100%;
+}
+.roles-selection{
+    width: 66%;
+}
+#library_toolbar{
+    margin-bottom: 0.5em;
+    span{
+        margin-right: 0.2em;
+    }
+    .toolbar-item{
+        margin-left: 1em;
+    }
+}
+#libraries_element, #folder_items_element{
+    button{
+        margin-left: 0.5em;
+    }
+}
+.help-button{
+    float: right;
+}
+.pagination-sm{
+    height: 15px;
+}
+.library-paginator{
+    margin-left: 2em;
+}
+.import-type-switch{
+    text-decoration: underline;
+}
+.libimport-select-none,
+.libimport-select-all{
+    margin-left: 0.5em;
+}
+.library-modal-item{
+    width:90%;
+    margin-left: 1em;
+    margin-right: 1em;
+}
+.paginator-bottom{
+    width: 27em;
+    margin-left: auto;
+    margin-right: auto;
+    margin-top: 2em;
+    margin-bottom: 2em;
+}
+
+}
+
+// Follows the style for the deprecated admin libraries interface
+#library-grid{
+    ul{
+        list-style: none;
+    }
+    span.expandLink{
+        padding-left: 12px;
+        display: inline-block;
+        vertical-align: middle;
+        background: url(../images/silk/resultset_next.png) no-repeat;
+    }
+    .folderRow.expanded span.expandLink{
+        background: url(../images/silk/resultset_bottom.png) no-repeat;
+    }
+    .folderRow span.rowIcon{
+        float: left;
+        margin-right: 5px;
+        width: 16px;
+        height: 16px;
+        display: inline-block;
+        vertical-align: middle;
+        background: url(../images/silk/folder.png);
+    }
+    .libraryItem-error{
+        margin-right: 2px;
+        padding: 0 2px 0 2px;
+        border: 1px solid @state-danger-border;
+        background: @state-danger-bg;
+    }
+    .libraryItem-queued{
+        margin-right: 2px;
+        padding: 0 2px 0 2px;
+        border: 1px solid @state-default-border;
+        background: @state-default-bg;
+    }
+    .libraryItem-running{
+        margin-right: 2px;
+        padding: 0 2px 0 2px;
+        border: 1px solid @state-running-border;
+        background: @state-running-bg;
+    }
+    .libraryItem-upload{
+        margin-right: 2px;
+        padding: 0 2px 0 2px;
+        border: 1px solid @state-upload-border;
+        background: @state-upload-bg;
+    }
+}
+.libraryTitle{
+    th{
+        text-align: left;
+    }
+    .rowTitle{
+        padding: 2px;
+    }
+}
diff --git a/client/galaxy/style/less/list-item.less b/client/galaxy/style/less/list-item.less
new file mode 100644
index 0000000..ca5f61b
--- /dev/null
+++ b/client/galaxy/style/less/list-item.less
@@ -0,0 +1,232 @@
+// ---------------------------------------------------------------------------- generic panel list items
+.list-item {
+    border: 1px solid @border-default-color;
+
+    @vertical-gap: 8px;
+    .vertical-spacing {
+        margin-bottom: @vertical-gap;
+    }
+
+    .info-section {
+        border-radius: 3px;
+        border: 1px solid fadeout( @layout-border-color, 70% );
+        padding: 4px;
+    }
+
+    @spacing-top     : 6px;
+    @spacing-right   : 10px;
+    @spacing-bottom  : 6px;
+    @spacing-left    : 8px;
+    .padded {
+        padding: @spacing-top @spacing-right @spacing-bottom @spacing-left;
+    }
+
+    .warnings {
+        &:not(:empty) {
+            padding-top: 4px;
+        }
+        [class$=messagesmall] {
+            margin: @spacing-top @spacing-right ( @spacing-bottom - 4 ) @spacing-left;
+            font-size: 90%;
+            &:first-child {
+                margin-top: 0px;
+            }
+            &:last-child {
+                margin-bottom: 0px;
+            }
+        }
+    }
+
+    .help-text {
+        font-weight: normal;
+        color: #555;
+    }
+
+    .selector {
+        float: left;
+        // always initially hidden and zero width
+        display: none;
+        width: 32px;
+        height: 32px;
+        //HACK: neg. on the bottom due to padding-bottom in title-bar - very tweakish
+        margin: 0px 0px -( @spacing-bottom ) 0px;
+        padding: ( @spacing-top + 2 ) 0px 2px ( @spacing-left - 1 );
+        font-size: 80%;
+        color: @btn-default-color;
+        cursor: pointer;
+        vertical-align: middle;
+        // prevent text highlight when holding shift to select a range
+        .user-select(none);
+    }
+    .selector:hover {
+        color: maroon;
+    }
+
+    .title-bar {
+        cursor: pointer;
+        outline: none;
+        .padded;
+
+        .title {
+            display: inline;
+            font-weight: bold;
+            text-decoration: underline;
+            word-wrap: break-word;
+            word-break: break-all;
+            line-height: 16px;
+        }
+//TODO: show keyboard/tab focus for keyboard users
+        //&:focus {
+        //    color: maroon;
+        //}
+        .subtitle {
+            color: #777;
+            font-size: 90%;
+            a {
+                color: inherit;
+            }
+        }
+    }
+
+    .primary-actions {
+        .icon-btn-group();
+        float: right;
+        margin: @spacing-top @spacing-right 0;
+        .icon-btn {
+            margin-left: @icon-btn-margin;
+        }
+    }
+
+    .details {
+        display: none;
+        padding: 0px @spacing-right @spacing-bottom @spacing-left;
+
+        & > [class$=messagesmall] {
+            margin: 0px 0px 8px 0px;
+        }
+
+        label {
+            margin: 0px;
+            padding: 0px;
+            font-weight: normal;
+        }
+        .prompt {
+            .help-text;
+        }
+        .prompt:after {
+            content: ':';
+            margin-right: 4px;
+        }
+    }
+}
+
+// ---------------------------------------------------------------------------- generic panel list (of list-items)
+.list-panel {
+
+    overflow: hidden;
+    // overflow-x: hidden;
+
+    //TODO: duplicated
+    @vertical-gap: 8px;
+    .vertical-spacing {
+        margin-bottom: @vertical-gap;
+    }
+
+    .controls {
+        & > *:not(:empty) {
+            margin-bottom: 8px;
+        }
+        .name {
+            word-wrap: break-word;
+            font-weight: bold;
+            input {
+                width: 100%;
+                // keep the text in position
+                margin: -3px 0px -3px -3px;
+                font-weight: bold;
+            }
+        }
+        .subtitle {
+            color: #777;
+            font-size: 90%;
+            a {
+                color: inherit;
+            }
+        }
+        .editable-text {
+            border: solid transparent 1px;
+        }
+        .editable-text:hover {
+            cursor: pointer;
+            border: 1px dotted #999999;
+        }
+
+        .actions {
+            .icon-btn-group();
+            float: right;
+            .icon-btn {
+                margin-left: @icon-btn-margin;
+            }
+        }
+        .messages {
+            .clearfix();
+            [class$=message],
+            [class$=messagesmall] {
+                margin: 0px;
+                &:not(:last-child) {
+                    margin-bottom: 8px;
+                }
+            }
+        }
+        .list-actions {
+            display: none;
+            .clear;
+            .btn {
+                padding-top: 2px;
+                padding-bottom: 2px;
+                font-size: 90%;
+            }
+            .list-action-menu {
+                float: right;
+            }
+        }
+    }
+    // display only a top border on all but the last
+    .list-items {
+        margin: 0;
+        padding: 0;
+
+        .list-item {
+            border-width: 1px 0px 1px 0px;
+            &:not(:last-child) {
+                border-bottom-width: 0px;
+            }
+        }
+    }
+    .empty-message {
+        display: none;
+        margin: 0px;
+    }
+}
+
+// ---------------------------------------------------------------------------- a list panel nested inside a list-item
+.list-item .details .list-panel {
+    margin-top: 8px;
+    border-radius: 3px;
+    background: white;
+    padding: 4px;
+    .list-items {
+        border: 1px solid @border-default-color;
+        border-radius: 3px;
+        .list-item {
+            &:first-child {
+                border-top-width: 0px;
+                border-radius: 3px 3px 0px 0px;
+            }
+            &:last-child {
+                border-bottom-width: 0px;
+                border-radius: 0px 0px 3px 3px;
+            }
+        }
+    }
+}
diff --git a/client/galaxy/style/less/reports.less b/client/galaxy/style/less/reports.less
new file mode 100644
index 0000000..e1b0aec
--- /dev/null
+++ b/client/galaxy/style/less/reports.less
@@ -0,0 +1,237 @@
+// ############################################################# Reports Webapp
+
+
+// ======================================================== Left ==============
+.reports-panel-container {
+    position: absolute;
+    /*From base.less, top should be @panel_header_height*/
+    top: 30px;
+    bottom: 10px;
+    overflow: scroll;
+    width: 100%;
+    padding: 10px;
+}
+#reports_home {
+    position: relative;
+    top: -7px;
+    float: right;
+}
+
+// ======================================================== SparkLines ========
+#spark_time_select {
+    display: inline-block;
+}
+#spark_select {
+    height: 23px;
+    width: 30px;
+    padding: 0;
+    border-radius: 2px;
+}
+
+.jqstooltip {
+        -webkit-box-sizing: content-box;
+        -moz-box-sizing: content-box;
+        box-sizing: content-box;
+    }
+
+// ======================================================== Sorting ===========
+.dir_arrow {
+    visibility: hidden
+}
+
+// ======================================================== Home page =========
+.chart {
+    cursor: zoom-in;
+    position: relative;
+    z-index: 2;
+}
+
+.chart rect {
+    fill: steelblue;
+    shape-rendering: preserveAspectRatio;
+}
+
+.chart text {
+    fill: black;
+    font: 7px sans-serif;
+    text-anchor: end;
+}
+
+.tick text {
+    fill: black;
+    font-family: Arial, sans-serif;
+    font-size: 7px;
+    text-anchor: end;
+}
+
+.axis path, .axis line {
+    fill: none;
+    stroke: #000;
+    shape-rendering: preserveAspectRatio;
+}
+
+.x_path {
+    stroke: #000;
+    stroke-width: 1;
+}
+
+.chart .title {
+    fill: black;
+    font-size: 15px;
+    font-family: "Lucida Grande",verdana,arial,helvetica,sans-serif;
+    font-weight: 800;
+    line-height: 1.1;
+}
+
+.chart .ax_title {
+    fill: black;
+}
+
+.hr_container, .dy_container {
+    position: relative;
+    z-index: 2;
+    display: inline-block;
+    vertical-align: top;
+}
+
+.charts {
+    margin-top: 10px;
+    margin-left: 0;
+    margin-right: 0;
+    text-align: center;
+    position: relative;
+    display: block;
+    overflow-x: auto;
+    overflow-y: hidden;
+    white-space: nowrap
+}
+
+.trim {
+    position:relative;
+    height:10px;
+    top: 50px;
+    background: #ebd9b2;
+    color: #000;
+    border-top: solid #d6b161 1px;
+    border-bottom: solid #d6b161 1px;
+}
+
+.tool_tip text {
+    fill: white;
+}
+
+// ======================================================== Paging ============
+// ------------------------------------ Pages ---------------------------------
+#back_button, #next_button, #curr_button, .miss_pages, .page_button {
+    position: relative;
+    float: left;
+    height: 24px;
+    width: 23px;
+    margin: 0 -1px 0 0;
+    padding-top: 2.5px;
+    border: 1px solid #bfbfbf;
+    z-index: 0;
+}
+
+#back_button {
+    cursor: pointer;
+    border-top-left-radius: 3px;
+    border-bottom-left-radius: 3px;
+}
+
+#curr_button {
+    background: #ebd9b2;
+    border: 1px solid #5f6990;
+    z-index: 1;
+}
+
+#formHeader > tbody > tr {
+    vertical-align: middle;
+}
+
+#next_button {
+    cursor: pointer;
+    border-top-right-radius: 3px;
+    border-bottom-right-radius: 3px;
+}
+
+// TODO: min-width in page_selector should be set when the amount of
+//       buttons is calculated
+#page_selector {
+    cursor: default;
+    position: relative;
+    text-align: center;
+    min-width: 132px;
+}
+
+.page_button > a {
+    text-decoration: none;
+    padding: 8px;
+    margin: -8px;
+    height: 100%;
+    width: 100%;
+}
+// ------------------------------------ Entry Selector ------------------------
+#entries_edit {
+    position: relative;
+    padding: 0;
+    width: 36px;
+    height: 20px;
+    text-align: center;
+    border: 1px solid black;
+    border-radius: 3px;
+    z-index: 6;
+}
+
+#entry_form {
+    min-width: 123px;
+}
+
+#entry_submit {
+    cursor: default;
+    position: relative;
+    display: inline-block;
+    border: 1px solid black;
+    padding: 0;
+    width: 22px;
+    height: 22px;
+    border-radius: 11px;
+    background-color: #ebd9b2;
+    text-align: center;
+    opacity: 0.0;
+    z-index: 0;
+}
+
+
+// ======================================================== Misc. =============
+.half_width {
+    width: 50%
+}
+.third_width {
+    width: 30%;
+}
+.quarter_width {
+    width: 25%;
+}
+
+.reportBody {
+    text-align: center;
+    text-align: -moz-center;
+}
+
+#systemForm {
+    width: 90%;
+    margin-left: auto;
+    margin-right: auto;
+    padding: 5px;
+    border-spacing: 5px;
+    border-collapse: separate;
+}
+
+.lastAccessForm, .diskUsageForm {
+    width: 70%;
+    margin-left: auto;
+    margin-right: auto;
+    padding: 5px;
+    border-spacing: 5px;
+}
\ No newline at end of file
diff --git a/client/galaxy/style/less/select2.less b/client/galaxy/style/less/select2.less
new file mode 100644
index 0000000..be9358a
--- /dev/null
+++ b/client/galaxy/style/less/select2.less
@@ -0,0 +1,704 @@
+/*
+Version: 3.5.1 Timestamp: Tue Jul 22 18:58:56 EDT 2014
+*/
+.select2-container {
+    margin: 0;
+    position: relative;
+    display: inline-block;
+    /* inline-block for ie7 */
+    zoom: 1;
+    *display: inline;
+    vertical-align: middle;
+}
+
+.select2-container,
+.select2-drop,
+.select2-search,
+.select2-search input {
+  /*
+    Force border-box so that % widths fit the parent
+    container without overlap because of margin/padding.
+    More Info : http://www.quirksmode.org/css/box.html
+  */
+  -webkit-box-sizing: border-box; /* webkit */
+     -moz-box-sizing: border-box; /* firefox */
+          box-sizing: border-box; /* css3 */
+}
+
+.select2-container .select2-choice {
+    display: block;
+    height: 26px;
+    padding: 0 0 0 8px;
+    overflow: hidden;
+    position: relative;
+
+    border: 1px solid #aaa;
+    white-space: nowrap;
+    line-height: 26px;
+    color: #444;
+    text-decoration: none;
+
+    border-radius: 4px;
+
+    background-clip: padding-box;
+
+    -webkit-touch-callout: none;
+      -webkit-user-select: none;
+         -moz-user-select: none;
+          -ms-user-select: none;
+              user-select: none;
+
+    background-color: #fff;
+    background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #eee), color-stop(0.5, #fff));
+    background-image: -webkit-linear-gradient(center bottom, #eee 0%, #fff 50%);
+    background-image: -moz-linear-gradient(center bottom, #eee 0%, #fff 50%);
+    filter: progid:DXImageTransform.Microsoft.gradient(startColorstr = '#ffffff', endColorstr = '#eeeeee', GradientType = 0);
+    background-image: linear-gradient(to top, #eee 0%, #fff 50%);
+}
+
+html[dir="rtl"] .select2-container .select2-choice {
+    padding: 0 8px 0 0;
+}
+
+.select2-container.select2-drop-above .select2-choice {
+    border-bottom-color: #aaa;
+
+    border-radius: 0 0 4px 4px;
+
+    background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #eee), color-stop(0.9, #fff));
+    background-image: -webkit-linear-gradient(center bottom, #eee 0%, #fff 90%);
+    background-image: -moz-linear-gradient(center bottom, #eee 0%, #fff 90%);
+    filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#eeeeee', GradientType=0);
+    background-image: linear-gradient(to bottom, #eee 0%, #fff 90%);
+}
+
+.select2-container.select2-allowclear .select2-choice .select2-chosen {
+    margin-right: 42px;
+}
+
+.select2-container .select2-choice > .select2-chosen {
+    margin-right: 26px;
+    display: block;
+    overflow: hidden;
+
+    white-space: nowrap;
+
+    text-overflow: ellipsis;
+    float: none;
+    width: auto;
+}
+
+html[dir="rtl"] .select2-container .select2-choice > .select2-chosen {
+    margin-left: 26px;
+    margin-right: 0;
+}
+
+.select2-container .select2-choice abbr {
+    display: none;
+    width: 12px;
+    height: 12px;
+    position: absolute;
+    right: 24px;
+    top: 8px;
+
+    font-size: 1px;
+    text-decoration: none;
+
+    border: 0;
+    background: url('../images/select2.png') right top no-repeat;
+    cursor: pointer;
+    outline: 0;
+}
+
+.select2-container.select2-allowclear .select2-choice abbr {
+    display: inline-block;
+}
+
+.select2-container .select2-choice abbr:hover {
+    background-position: right -11px;
+    cursor: pointer;
+}
+
+.select2-drop-mask {
+    border: 0;
+    margin: 0;
+    padding: 0;
+    position: fixed;
+    left: 0;
+    top: 0;
+    min-height: 100%;
+    min-width: 100%;
+    height: auto;
+    width: auto;
+    opacity: 0;
+    z-index: 9998;
+    /* styles required for IE to work */
+    background-color: #fff;
+    filter: alpha(opacity=0);
+}
+
+.select2-drop {
+    width: 100%;
+    margin-top: -1px;
+    position: absolute;
+    z-index: 9999;
+    top: 100%;
+
+    background: #fff;
+    color: #000;
+    border: 1px solid #aaa;
+    border-top: 0;
+
+    border-radius: 0 0 4px 4px;
+
+    -webkit-box-shadow: 0 4px 5px rgba(0, 0, 0, .15);
+            box-shadow: 0 4px 5px rgba(0, 0, 0, .15);
+}
+
+.select2-drop.select2-drop-above {
+    margin-top: 1px;
+    border-top: 1px solid #aaa;
+    border-bottom: 0;
+
+    border-radius: 4px 4px 0 0;
+
+    -webkit-box-shadow: 0 -4px 5px rgba(0, 0, 0, .15);
+            box-shadow: 0 -4px 5px rgba(0, 0, 0, .15);
+}
+
+.select2-drop-active {
+    border: 1px solid #5897fb;
+    border-top: none;
+}
+
+.select2-drop.select2-drop-above.select2-drop-active {
+    border-top: 1px solid #5897fb;
+}
+
+.select2-drop-auto-width {
+    border-top: 1px solid #aaa;
+    width: auto;
+}
+
+.select2-drop-auto-width .select2-search {
+    padding-top: 4px;
+}
+
+.select2-container .select2-choice .select2-arrow {
+    display: inline-block;
+    width: 18px;
+    height: 100%;
+    position: absolute;
+    right: 0;
+    top: 0;
+
+    border-left: 1px solid #aaa;
+    border-radius: 0 4px 4px 0;
+
+    background-clip: padding-box;
+
+    background: #ccc;
+    background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #ccc), color-stop(0.6, #eee));
+    background-image: -webkit-linear-gradient(center bottom, #ccc 0%, #eee 60%);
+    background-image: -moz-linear-gradient(center bottom, #ccc 0%, #eee 60%);
+    filter: progid:DXImageTransform.Microsoft.gradient(startColorstr = '#eeeeee', endColorstr = '#cccccc', GradientType = 0);
+    background-image: linear-gradient(to top, #ccc 0%, #eee 60%);
+}
+
+html[dir="rtl"] .select2-container .select2-choice .select2-arrow {
+    left: 0;
+    right: auto;
+
+    border-left: none;
+    border-right: 1px solid #aaa;
+    border-radius: 4px 0 0 4px;
+}
+
+.select2-container .select2-choice .select2-arrow b {
+    display: block;
+    width: 100%;
+    height: 100%;
+    background: url('../images/select2.png') no-repeat 0 1px;
+}
+
+html[dir="rtl"] .select2-container .select2-choice .select2-arrow b {
+    background-position: 2px 1px;
+}
+
+.select2-search {
+    display: inline-block;
+    width: 100%;
+    min-height: 26px;
+    margin: 0;
+    padding-left: 4px;
+    padding-right: 4px;
+
+    position: relative;
+    z-index: 10000;
+
+    white-space: nowrap;
+}
+
+.select2-search input {
+    width: 100%;
+    height: auto !important;
+    min-height: 26px;
+    padding: 4px 20px 4px 5px;
+    margin: 0;
+
+    outline: 0;
+    font-family: sans-serif;
+    font-size: 1em;
+
+    border: 1px solid #aaa;
+    border-radius: 0;
+
+    -webkit-box-shadow: none;
+            box-shadow: none;
+
+    background: #fff url('../images/select2.png') no-repeat 100% -22px;
+    background: url('../images/select2.png') no-repeat 100% -22px, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, #fff), color-stop(0.99, #eee));
+    background: url('../images/select2.png') no-repeat 100% -22px, -webkit-linear-gradient(center bottom, #fff 85%, #eee 99%);
+    background: url('../images/select2.png') no-repeat 100% -22px, -moz-linear-gradient(center bottom, #fff 85%, #eee 99%);
+    background: url('../images/select2.png') no-repeat 100% -22px, linear-gradient(to bottom, #fff 85%, #eee 99%) 0 0;
+}
+
+html[dir="rtl"] .select2-search input {
+    padding: 4px 5px 4px 20px;
+
+    background: #fff url('../images/select2.png') no-repeat -37px -22px;
+    background: url('../images/select2.png') no-repeat -37px -22px, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, #fff), color-stop(0.99, #eee));
+    background: url('../images/select2.png') no-repeat -37px -22px, -webkit-linear-gradient(center bottom, #fff 85%, #eee 99%);
+    background: url('../images/select2.png') no-repeat -37px -22px, -moz-linear-gradient(center bottom, #fff 85%, #eee 99%);
+    background: url('../images/select2.png') no-repeat -37px -22px, linear-gradient(to bottom, #fff 85%, #eee 99%) 0 0;
+}
+
+.select2-drop.select2-drop-above .select2-search input {
+    margin-top: 4px;
+}
+
+.select2-search input.select2-active {
+    background: #fff url('../images/select2-spinner.gif') no-repeat 100%;
+    background: url('../images/select2-spinner.gif') no-repeat 100%, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, #fff), color-stop(0.99, #eee));
+    background: url('../images/select2-spinner.gif') no-repeat 100%, -webkit-linear-gradient(center bottom, #fff 85%, #eee 99%);
+    background: url('../images/select2-spinner.gif') no-repeat 100%, -moz-linear-gradient(center bottom, #fff 85%, #eee 99%);
+    background: url('../images/select2-spinner.gif') no-repeat 100%, linear-gradient(to bottom, #fff 85%, #eee 99%) 0 0;
+}
+
+.select2-container-active .select2-choice,
+.select2-container-active .select2-choices {
+    border: 1px solid #5897fb;
+    outline: none;
+
+    -webkit-box-shadow: 0 0 5px rgba(0, 0, 0, .3);
+            box-shadow: 0 0 5px rgba(0, 0, 0, .3);
+}
+
+.select2-dropdown-open .select2-choice {
+    border-bottom-color: transparent;
+    -webkit-box-shadow: 0 1px 0 #fff inset;
+            box-shadow: 0 1px 0 #fff inset;
+
+    border-bottom-left-radius: 0;
+    border-bottom-right-radius: 0;
+
+    background-color: #eee;
+    background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #fff), color-stop(0.5, #eee));
+    background-image: -webkit-linear-gradient(center bottom, #fff 0%, #eee 50%);
+    background-image: -moz-linear-gradient(center bottom, #fff 0%, #eee 50%);
+    filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#ffffff', GradientType=0);
+    background-image: linear-gradient(to top, #fff 0%, #eee 50%);
+}
+
+.select2-dropdown-open.select2-drop-above .select2-choice,
+.select2-dropdown-open.select2-drop-above .select2-choices {
+    border: 1px solid #5897fb;
+    border-top-color: transparent;
+
+    background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0, #fff), color-stop(0.5, #eee));
+    background-image: -webkit-linear-gradient(center top, #fff 0%, #eee 50%);
+    background-image: -moz-linear-gradient(center top, #fff 0%, #eee 50%);
+    filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#ffffff', GradientType=0);
+    background-image: linear-gradient(to bottom, #fff 0%, #eee 50%);
+}
+
+.select2-dropdown-open .select2-choice .select2-arrow {
+    background: transparent;
+    border-left: none;
+    filter: none;
+}
+html[dir="rtl"] .select2-dropdown-open .select2-choice .select2-arrow {
+    border-right: none;
+}
+
+.select2-dropdown-open .select2-choice .select2-arrow b {
+    background-position: -18px 1px;
+}
+
+html[dir="rtl"] .select2-dropdown-open .select2-choice .select2-arrow b {
+    background-position: -16px 1px;
+}
+
+.select2-hidden-accessible {
+    border: 0;
+    clip: rect(0 0 0 0);
+    height: 1px;
+    margin: -1px;
+    overflow: hidden;
+    padding: 0;
+    position: absolute;
+    width: 1px;
+}
+
+/* results */
+.select2-results {
+    max-height: 200px;
+    padding: 0 0 0 4px;
+    margin: 4px 4px 4px 0;
+    position: relative;
+    overflow-x: hidden;
+    overflow-y: auto;
+    -webkit-tap-highlight-color: rgba(0, 0, 0, 0);
+}
+
+html[dir="rtl"] .select2-results {
+    padding: 0 4px 0 0;
+    margin: 4px 0 4px 4px;
+}
+
+.select2-results ul.select2-result-sub {
+    margin: 0;
+    padding-left: 0;
+}
+
+.select2-results li {
+    list-style: none;
+    display: list-item;
+    background-image: none;
+}
+
+.select2-results li.select2-result-with-children > .select2-result-label {
+    font-weight: bold;
+}
+
+.select2-results .select2-result-label {
+    padding: 3px 7px 4px;
+    margin: 0;
+    cursor: pointer;
+
+    min-height: 1em;
+
+    -webkit-touch-callout: none;
+      -webkit-user-select: none;
+         -moz-user-select: none;
+          -ms-user-select: none;
+              user-select: none;
+}
+
+.select2-results-dept-1 .select2-result-label { padding-left: 20px }
+.select2-results-dept-2 .select2-result-label { padding-left: 40px }
+.select2-results-dept-3 .select2-result-label { padding-left: 60px }
+.select2-results-dept-4 .select2-result-label { padding-left: 80px }
+.select2-results-dept-5 .select2-result-label { padding-left: 100px }
+.select2-results-dept-6 .select2-result-label { padding-left: 110px }
+.select2-results-dept-7 .select2-result-label { padding-left: 120px }
+
+.select2-results .select2-highlighted {
+    background: #3875d7;
+    color: #fff;
+}
+
+.select2-results li em {
+    background: #feffde;
+    font-style: normal;
+}
+
+.select2-results .select2-highlighted em {
+    background: transparent;
+}
+
+.select2-results .select2-highlighted ul {
+    background: #fff;
+    color: #000;
+}
+
+.select2-results .select2-no-results,
+.select2-results .select2-searching,
+.select2-results .select2-ajax-error,
+.select2-results .select2-selection-limit {
+    background: #f4f4f4;
+    display: list-item;
+    padding-left: 5px;
+}
+
+/*
+disabled look for disabled choices in the results dropdown
+*/
+.select2-results .select2-disabled.select2-highlighted {
+    color: #666;
+    background: #f4f4f4;
+    display: list-item;
+    cursor: default;
+}
+.select2-results .select2-disabled {
+  background: #f4f4f4;
+  display: list-item;
+  cursor: default;
+}
+
+.select2-results .select2-selected {
+    display: none;
+}
+
+.select2-more-results.select2-active {
+    background: #f4f4f4 url('../images/select2-spinner.gif') no-repeat 100%;
+}
+
+.select2-results .select2-ajax-error {
+    background: rgba(255, 50, 50, .2);
+}
+
+.select2-more-results {
+    background: #f4f4f4;
+    display: list-item;
+}
+
+/* disabled styles */
+
+.select2-container.select2-container-disabled .select2-choice {
+    background-color: #f4f4f4;
+    background-image: none;
+    border: 1px solid #ddd;
+    cursor: default;
+}
+
+.select2-container.select2-container-disabled .select2-choice .select2-arrow {
+    background-color: #f4f4f4;
+    background-image: none;
+    border-left: 0;
+}
+
+.select2-container.select2-container-disabled .select2-choice abbr {
+    display: none;
+}
+
+
+/* multiselect */
+
+.select2-container-multi .select2-choices {
+    height: auto !important;
+    height: 1%;
+    margin: 0;
+    padding: 0 5px 0 0;
+    position: relative;
+
+    border: 1px solid #aaa;
+    cursor: text;
+    overflow: hidden;
+
+    background-color: #fff;
+    background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, color-stop(1%, #eee), color-stop(15%, #fff));
+    background-image: -webkit-linear-gradient(top, #eee 1%, #fff 15%);
+    background-image: -moz-linear-gradient(top, #eee 1%, #fff 15%);
+    background-image: linear-gradient(to bottom, #eee 1%, #fff 15%);
+}
+
+html[dir="rtl"] .select2-container-multi .select2-choices {
+    padding: 0 0 0 5px;
+}
+
+.select2-locked {
+  padding: 3px 5px 3px 5px !important;
+}
+
+.select2-container-multi .select2-choices {
+    min-height: 26px;
+}
+
+.select2-container-multi.select2-container-active .select2-choices {
+    border: 1px solid #5897fb;
+    outline: none;
+
+    -webkit-box-shadow: 0 0 5px rgba(0, 0, 0, .3);
+            box-shadow: 0 0 5px rgba(0, 0, 0, .3);
+}
+.select2-container-multi .select2-choices li {
+    float: left;
+    list-style: none;
+}
+html[dir="rtl"] .select2-container-multi .select2-choices li
+{
+    float: right;
+}
+.select2-container-multi .select2-choices .select2-search-field {
+    margin: 0;
+    padding: 0;
+    white-space: nowrap;
+}
+
+.select2-container-multi .select2-choices .select2-search-field input {
+    padding: 5px;
+    margin: 1px 0;
+
+    font-family: sans-serif;
+    font-size: 100%;
+    color: #666;
+    outline: 0;
+    border: 0;
+    -webkit-box-shadow: none;
+            box-shadow: none;
+    background: transparent !important;
+}
+
+.select2-container-multi .select2-choices .select2-search-field input.select2-active {
+    background: #fff url('../images/select2-spinner.gif') no-repeat 100% !important;
+}
+
+.select2-default {
+    color: #999 !important;
+}
+
+.select2-container-multi .select2-choices .select2-search-choice {
+    padding: 3px 5px 3px 18px;
+    margin: 3px 0 3px 5px;
+    position: relative;
+
+    line-height: 13px;
+    color: #333;
+    cursor: default;
+    border: 1px solid #aaaaaa;
+
+    border-radius: 3px;
+
+    -webkit-box-shadow: 0 0 2px #fff inset, 0 1px 0 rgba(0, 0, 0, 0.05);
+            box-shadow: 0 0 2px #fff inset, 0 1px 0 rgba(0, 0, 0, 0.05);
+
+    background-clip: padding-box;
+
+    -webkit-touch-callout: none;
+      -webkit-user-select: none;
+         -moz-user-select: none;
+          -ms-user-select: none;
+              user-select: none;
+
+    background-color: #e4e4e4;
+    filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#f4f4f4', GradientType=0);
+    background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, color-stop(20%, #f4f4f4), color-stop(50%, #f0f0f0), color-stop(52%, #e8e8e8), color-stop(100%, #eee));
+    background-image: -webkit-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eee 100%);
+    background-image: -moz-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eee 100%);
+    background-image: linear-gradient(to top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eee 100%);
+}
+html[dir="rtl"] .select2-container-multi .select2-choices .select2-search-choice
+{
+    margin: 3px 5px 3px 0;
+    padding: 3px 18px 3px 5px;
+}
+.select2-container-multi .select2-choices .select2-search-choice .select2-chosen {
+    cursor: default;
+}
+.select2-container-multi .select2-choices .select2-search-choice-focus {
+    background: #d4d4d4;
+}
+
+.select2-search-choice-close {
+    display: block;
+    width: 12px;
+    height: 13px;
+    position: absolute;
+    right: 3px;
+    top: 4px;
+
+    font-size: 1px;
+    outline: none;
+    background: url('../images/select2.png') right top no-repeat;
+}
+html[dir="rtl"] .select2-search-choice-close {
+    right: auto;
+    left: 3px;
+}
+
+.select2-container-multi .select2-search-choice-close {
+    left: 3px;
+}
+
+html[dir="rtl"] .select2-container-multi .select2-search-choice-close {
+    left: auto;
+    right: 2px;
+}
+
+.select2-container-multi .select2-choices .select2-search-choice .select2-search-choice-close:hover {
+  background-position: right -11px;
+}
+.select2-container-multi .select2-choices .select2-search-choice-focus .select2-search-choice-close {
+    background-position: right -11px;
+}
+
+/* disabled styles */
+.select2-container-multi.select2-container-disabled .select2-choices {
+    background-color: #f4f4f4;
+    background-image: none;
+    border: 1px solid #ddd;
+    cursor: default;
+}
+
+.select2-container-multi.select2-container-disabled .select2-choices .select2-search-choice {
+    padding: 3px 5px 3px 5px;
+    border: 1px solid #ddd;
+    background-image: none;
+    background-color: #f4f4f4;
+}
+
+.select2-container-multi.select2-container-disabled .select2-choices .select2-search-choice .select2-search-choice-close {    display: none;
+    background: none;
+}
+/* end multiselect */
+
+
+.select2-result-selectable .select2-match,
+.select2-result-unselectable .select2-match {
+    text-decoration: underline;
+}
+
+.select2-offscreen, .select2-offscreen:focus {
+    clip: rect(0 0 0 0) !important;
+    width: 1px !important;
+    height: 1px !important;
+    border: 0 !important;
+    margin: 0 !important;
+    padding: 0 !important;
+    overflow: hidden !important;
+    position: absolute !important;
+    outline: 0 !important;
+    left: 0px !important;
+    top: 0px !important;
+}
+
+.select2-display-none {
+    display: none;
+}
+
+.select2-measure-scrollbar {
+    position: absolute;
+    top: -10000px;
+    left: -10000px;
+    width: 100px;
+    height: 100px;
+    overflow: scroll;
+}
+
+/* Retina-ize icons */
+
+ at media only screen and (-webkit-min-device-pixel-ratio: 1.5), only screen and (min-resolution: 2dppx)  {
+    .select2-search input,
+    .select2-search-choice-close,
+    .select2-container .select2-choice abbr,
+    .select2-container .select2-choice .select2-arrow b {
+        background-image: url('../images/select2x2.png') !important;
+        background-repeat: no-repeat !important;
+        background-size: 60px 40px !important;
+    }
+
+    .select2-search input {
+        background-position: 100% -21px !important;
+    }
+}
diff --git a/client/galaxy/style/less/theme/blue.less b/client/galaxy/style/less/theme/blue.less
new file mode 100644
index 0000000..f1b7f93
--- /dev/null
+++ b/client/galaxy/style/less/theme/blue.less
@@ -0,0 +1,24 @@
+// Override colors to closely resemble the style as of Fall 2013
+
+ at side-panel-bg: #DFE5F9;
+
+ at layout-border-color: #999;
+
+ at form-heading-bg: #ebd9b2;
+ at form-border: darken(@form-heading-bg,20%);
+
+ at table-heading-bg: #ebd9b2;
+ at table-border-color: darken(@table-heading-bg,20%);
+
+ at link-color: #303030;
+
+ at input-border: #aaa;
+
+a {
+	text-decoration: underline;
+}
+
+// Label is used in places it probably shouldn't be (grid cells), replace with data- attributes and remove this.
+label {
+	font-weight: normal;
+}
\ No newline at end of file
diff --git a/client/galaxy/style/less/toastr.less b/client/galaxy/style/less/toastr.less
new file mode 100644
index 0000000..f306cf5
--- /dev/null
+++ b/client/galaxy/style/less/toastr.less
@@ -0,0 +1,232 @@
+// Mix-ins
+.borderRadius(@radius) {
+	-moz-border-radius: @radius;
+	-webkit-border-radius: @radius;
+	border-radius: @radius;
+}
+
+.boxShadow(@boxShadow) {
+	-moz-box-shadow: @boxShadow;
+	-webkit-box-shadow: @boxShadow;
+	box-shadow: @boxShadow;
+}
+
+.opacity(@opacity) {
+	@opacityPercent: @opacity * 100;
+	opacity: @opacity;
+	-ms-filter: ~"progid:DXImageTransform.Microsoft.Alpha(Opacity=@{opacityPercent})";
+	filter: ~"alpha(opacity=@{opacityPercent})";
+}
+
+.wordWrap(@wordWrap: break-word) {
+	-ms-word-wrap: @wordWrap;
+	word-wrap: @wordWrap;
+}
+
+// Variables
+ at black: #000000;
+ at grey: #999999;
+ at light-grey: #CCCCCC;
+ at white: #FFFFFF;
+ at near-black: #030303;
+ at green: #51A351;
+ at red: #BD362F;
+ at blue: #2F96B4;
+ at orange: #F89406;
+
+// Styles
+.toast-title {
+	font-weight: bold;
+}
+
+.toast-message {
+	.wordWrap();
+
+	a,
+	label {
+		color: @white;
+	}
+
+		a:hover {
+			color: @light-grey;
+			text-decoration: none;
+		}
+}
+
+.toast-close-button {
+	position: relative;
+	right: -0.3em;
+	top: -0.3em;
+	float: right;
+	font-size: 20px;
+	font-weight: bold;
+	color: @white;
+	-webkit-text-shadow: 0 1px 0 rgba(255,255,255,1);
+	text-shadow: 0 1px 0 rgba(255,255,255,1);
+	.opacity(1);
+
+	&:hover,
+	&:focus {
+		color: @black;
+		text-decoration: none;
+		cursor: pointer;
+		.opacity(1);
+	}
+}
+
+/*Additional properties for button version
+ iOS requires the button element instead of an anchor tag.
+ If you want the anchor version, it requires `href="#"`.*/
+button.toast-close-button {
+	padding: 0;
+	cursor: pointer;
+	background: transparent;
+	border: 0;
+	-webkit-appearance: none;
+}
+
+//#endregion
+
+.toast-top-full-width {
+	top: 0;
+	right: 0;
+	width: 100%;
+}
+
+.toast-bottom-full-width {
+	bottom: 0;
+	right: 0;
+	width: 100%;
+}
+
+.toast-top-left {
+	top: 12px;
+	left: 12px;
+}
+
+.toast-top-right {
+	top: 12px;
+	right: 12px;
+}
+
+.toast-bottom-right {
+	right: 12px;
+	bottom: 12px;
+}
+
+.toast-bottom-left {
+	bottom: 12px;
+	left: 12px;
+}
+
+#toast-container {
+	position: fixed;
+	z-index: 999999;
+
+	* {
+		-moz-box-sizing: border-box;
+		-webkit-box-sizing: border-box;
+		box-sizing: border-box;
+	}
+
+	> div {
+		margin: 4em 0 6px;
+		padding: 15px 15px 15px 50px;
+		width: 300px;
+		.borderRadius(3px 3px 3px 3px);
+		background-position: 15px center;
+		background-repeat: no-repeat;
+		.boxShadow(0 0 12px @grey);
+		color: @white;
+		.opacity(1);
+	}
+
+	> :hover {
+		.boxShadow(0 0 12px @black);
+		.opacity(1);
+		cursor: pointer;
+	}
+
+	> .toast-info {
+		background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGwSURBVEhLtZa9SgNBEMc9sUxxRcoUKSzSWIhXpFMhhYWFhaBg4yPYiWCXZxBLERsLRS3EQkEfwCKdjWJAwSKCgoKCcudv4O5YLrt7EzgXhiU3/4+b2ckmwVjJSpKkQ6wAi4gwhT+z3wRBcEz0yjSseUTrcRyfsHsXmD0AmbHOC9Ii8VImnuXBPglHpQ5wwSVM7sNnTG7Za4JwDdCjxyAiH3nyA2mtaTJufiDZ5dCaqlItILh1NHatfN5skvjx9Z38m69CgzuXmZgVrPIGE763Jx9qKsRozWYw6xOHdER+nn2KkO+Bb+UV5CBN6WC6QtBgbRVozrahA [...]
+	}
+
+	> .toast-error {
+		background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAHOSURBVEhLrZa/SgNBEMZzh0WKCClSCKaIYOED+AAKeQQLG8HWztLCImBrYadgIdY+gIKNYkBFSwu7CAoqCgkkoGBI/E28PdbLZmeDLgzZzcx83/zZ2SSXC1j9fr+I1Hq93g2yxH4iwM1vkoBWAdxCmpzTxfkN2RcyZNaHFIkSo10+8kgxkXIURV5HGxTmFuc75B2RfQkpxHG8aAgaAFa0tAHqYFfQ7Iwe2yhODk8+J4C7yAoRTWI3w/4klGRgR4lO7Rpn9+gvMyWp+uxFh8+H+ARlgN1nJuJuQAYvNkEnwGFck18Er4q3egEc/oO+mhLdKgRyhdNFi [...]
+	}
+
+	> .toast-success {
+		background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAADsSURBVEhLY2AYBfQMgf///3P8+/evAIgvA/FsIF+BavYDDWMBGroaSMMBiE8VC7AZDrIFaMFnii3AZTjUgsUUWUDA8OdAH6iQbQEhw4HyGsPEcKBXBIC4ARhex4G4BsjmweU1soIFaGg/WtoFZRIZdEvIMhxkCCjXIVsATV6gFGACs4Rsw0EGgIIH3QJYJgHSARQZDrWAB+jawzgs+Q2UO49D7jnRSRGoEFRILcdmEMWGI0cm0JJ2QpYA1RDvcmzJEWhABhD/pqrL0S0CWuABKgnRki9lLseS7g2AlqwHWQSKH4oKLrILpRGhEQCw2LiRUIa4lwAAA [...]
+	}
+
+	> .toast-warning {
+		background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGYSURBVEhL5ZSvTsNQFMbXZGICMYGYmJhAQIJAICYQPAACiSDB8AiICQQJT4CqQEwgJvYASAQCiZiYmJhAIBATCARJy+9rTsldd8sKu1M0+dLb057v6/lbq/2rK0mS/TRNj9cWNAKPYIJII7gIxCcQ51cvqID+GIEX8ASG4B1bK5gIZFeQfoJdEXOfgX4QAQg7kH2A65yQ87lyxb27sggkAzAuFhbbg1K2kgCkB1bVwyIR9m2L7PRPIhDUIXgGtyKw575yz3lTNs6X4JXnjV+LKM/m3MydnTbtOKIjtz6VhCBq4vSm3ncdrD2lk0VgUXSVKjVDJXJzi [...]
+	}
+
+	/*overrides*/
+	&.toast-top-full-width > div,
+	&.toast-bottom-full-width > div {
+		width: 96%;
+		margin: auto;
+	}
+}
+
+.toast {
+	background-color: @near-black;
+}
+
+.toast-success {
+	background-color: @green;
+}
+
+.toast-error {
+	background-color: @red;
+}
+
+.toast-info {
+	background-color: @blue;
+}
+
+.toast-warning {
+	background-color: @orange;
+}
+
+/*Responsive Design*/
+
+ at media all and (max-width: 240px) {
+	#toast-container {
+
+		> div {
+			padding: 8px 8px 8px 50px;
+			width: 11em;
+		}
+
+		& .toast-close-button {
+			right: -0.2em;
+			top: -0.2em;
+		}
+	}
+}
+
+ at media all and (min-width: 241px) and (max-width: 480px) {
+	#toast-container {
+		> div {
+			padding: 8px 8px 8px 50px;
+			width: 18em;
+		}
+
+		& .toast-close-button {
+			right: -0.2em;
+			top: -0.2em;
+		}
+	}
+}
+
+ at media all and (min-width: 481px) and (max-width: 768px) {
+	#toast-container {
+		> div {
+			padding: 15px 15px 15px 50px;
+			width: 25em;
+		}
+	}
+}
diff --git a/client/galaxy/style/less/tour.less b/client/galaxy/style/less/tour.less
new file mode 100644
index 0000000..6eb7458
--- /dev/null
+++ b/client/galaxy/style/less/tour.less
@@ -0,0 +1,4 @@
+.tour-tour > .popover-title{
+      background-color: @base-color-1;
+      color: @white;
+}
diff --git a/client/galaxy/style/less/trackster.less b/client/galaxy/style/less/trackster.less
new file mode 100644
index 0000000..d68ccbb
--- /dev/null
+++ b/client/galaxy/style/less/trackster.less
@@ -0,0 +1,572 @@
+/* Z-indexes for levels used in viz; overlay level is shown above base level. */
+ at base-index:    1;
+ at overlay-index: @base-index + 1;
+ at track-header-height: 16px;
+ at min-track-height: (@track-header-height + 4);
+ at separator-color: #888;
+
+.viewport-container {
+    overflow-x: hidden;
+    overflow-y: auto;
+    background: white;
+}
+
+.trackster-nav-container {
+    width: 100%;
+
+    &.stand-alone {
+        /* Used to push nav into title bar: */
+        height: 0;
+    }
+
+    text-align: center;
+}
+
+.trackster-nav {
+    padding: 0 0;
+    color:#333;
+    font-weight:bold;
+    background:#cccccc;
+    display: inline-block;
+    top: -2em;
+    background: transparent;
+    border: none;
+
+    &.stand-alone {
+        /* Push nav up into title bar */
+        position: relative;
+    }
+}
+
+.chrom-nav {
+    width: 15em;
+}
+
+.content {
+    font: 10px verdana;
+    position: relative;
+}
+
+.nav-controls {
+    text-align: center;
+    padding: 1px 0;
+    input {
+        margin: 0 5px;
+    }
+}
+
+#zoom-in, #zoom-out {
+    display:inline-block;
+    height: 16px;
+    width: 16px;
+    margin-bottom: -3px;
+    cursor: pointer;
+}
+
+#zoom-out {
+    background: transparent url(../images/fugue/magnifier-zoom-out.png) center center no-repeat;
+}
+
+#zoom-in {
+    margin-left: 10px;
+    background: transparent url(../images/fugue/magnifier-zoom.png) center center no-repeat;
+}
+
+.nav-input {
+    font-size: 12px;
+    width: 30em;
+    z-index: @overlay-index;
+}
+
+.location {
+    display: inline-block;
+    width: 15em;
+    margin: 0 10px;
+    white-space: nowrap;
+}
+
+.intro {
+    z-index: @overlay-index;
+/*    margin-top: 200px;*/
+    margin-left: auto;
+    margin-right: auto;
+    color: #555;
+    text-align: center;
+    font-size: 16px;
+
+    .action-button {
+        background-color: #CCC;
+        margin-top: 10px;
+        padding: 1em;
+        text-decoration:underline;
+    }
+}
+
+.overview {
+    width: 100%;
+    margin: 0px;
+    color: white;
+}
+
+.overview-viewport {
+    position: relative;
+    height: 14px;
+/*    border-top: solid #666 1px;*/
+/*    border-bottom: solid #aaa 1px;*/
+    background: white;
+    margin: 0;
+}
+
+.overview-close {
+    font: 9px verdana;
+    position: absolute;
+    top: 0px;
+    right: 0px;
+    padding: 5px;
+    z-index: @overlay-index;
+    background-color: white;
+}
+
+.overview-highlight {
+    top: 0px;
+    position: absolute;
+    z-index: @overlay-index;
+    border-style: solid;
+    border-color: #666;
+    border-width: 0px 1px;
+}
+
+.overview-boxback {
+    width: 100%;
+    bottom: 0px;
+    z-index: @overlay-index;
+    position: absolute;
+    height: 14px;
+    background: #eee;
+    border: solid #999 1px;
+}
+
+.overview-box {
+    cursor: pointer;
+    bottom: 0px;
+    z-index: @overlay-index;
+    position: absolute;
+    margin-top: 0px;
+    height: 14px;
+    background: #C1C9E5 url(../images/visualization/draggable_horizontal.png) center center no-repeat;
+    border: solid #666 1px;
+}
+
+.viewport-canvas {
+    width: 100%;
+    height: 100px;
+}
+
+.yaxislabel {
+    z-index: @overlay-index;
+    position: absolute;
+    right: 20px;
+}
+
+.yaxislabel.bottom {
+    bottom: 2px;
+}
+
+.group-handle {
+    cursor: move;
+    float: left;
+    background: #eee url('../images/tracks/block.png');
+    width: 12px;
+    height: 12px;
+}
+
+.group {
+    min-height: @min-track-height;
+    border-top: 1px solid @separator-color;
+    border-bottom: 1px solid @separator-color;
+
+    // Put group header on its own line to avoid overlap with track header.
+    > .track-header {
+        position: relative;
+        float: left;
+    }
+}
+
+.track-header {
+    /* Overlay header drawn semi-transparently on track. */
+    height: @track-header-height;
+    position: absolute;
+    z-index: @overlay-index;
+    background-color: rgba(1, 1, 1, 0.1);
+    border-radius: 5px;
+    padding: 0px 2px;
+    text-align: left;
+    margin: 2px;
+
+    /* Make header opaque when focused on. */
+    &:hover {
+        background-color: #DDDDDD;
+    }
+
+    // Truncate long track names.
+    .track-name {
+        max-width: 200px;
+        white-space: nowrap;
+        overflow: hidden;
+        text-overflow: ellipsis;
+    }
+
+    .menubutton {
+        margin-left: 0px;
+    }
+}
+
+.track-name {
+    float: left;
+    margin-top: 2px;
+}
+
+.tiles {
+    background: url('../images/tracks/diag_bg.gif');
+    position: relative;
+    overflow: hidden;
+}
+
+.overlay {
+    position: absolute;
+    left: 0;
+    top: 0;
+}
+
+.track-tile {
+    position: absolute;
+    background: white;
+
+    canvas {
+        position: relative;
+        z-index: @base-index;
+    }
+}
+
+.tile-message {
+    border-bottom: solid 1px red;
+    text-align: center;
+    color: red;
+    background-color: white;
+}
+
+.track {
+    position: relative;
+
+    // Separates tracks.
+    border-bottom: 1px solid @separator-color;
+
+    &.error {
+        background: #ECB4AF;
+    }
+
+    &.nodata {
+        background: #EEEEEE;
+    }
+
+    &.pending {
+        background: #FFFFCC;
+    }
+}
+
+.track-content {
+    text-align: center;
+    position: relative;
+    min-height: @min-track-height;
+    padding: 0px 0px 1px 0px;
+
+    .message {
+        position: relative;
+        // To vertically center message in track:
+        top: 4px;
+    }
+}
+
+.loading {
+    min-height: 100px;
+}
+
+.label-track {
+    font-size: 10px;
+    border: none;
+    padding: 0;
+    margin: 0;
+    height: 1.5em;
+    overflow: hidden;
+
+    .label-container {
+        position: relative;
+        height: 1.3em;
+    }
+
+    .pos-label {
+        position: absolute;
+        border-left: solid #999 1px;
+        padding: 1px;
+        padding-bottom: 2px;
+        display: inline-block;
+    }
+
+    .track-content {
+       border: none;
+       background: white;
+    }
+
+}
+
+.reference-track {
+    border: none;
+    margin: 0;
+    padding: 0;
+    // Override Galaxy defaults.
+    line-height: 1;
+
+    .track-content {
+        min-height: 0px;
+    }
+}
+
+.right-float {
+    float: right;
+    margin-left: 5px;
+}
+
+.top-labeltrack {
+    position: relative;
+    border-bottom: solid #999 1px;
+}
+
+.nav-labeltrack {
+    border-top: solid #999 1px;
+    border-bottom: solid #333 1px;
+}
+
+// Styles for tools and filters.
+input {
+    font: 10px verdana;
+
+    // Override Galaxy defaults to make a smaller button.
+    &[type="submit"] {
+        padding: 0px;
+        margin-right: 20px;
+        font-size: inherit;
+    }
+
+    &.color-input {
+        float: left;
+    }
+}
+
+.dynamic-tool, .filters {
+    padding-top: (@track-header-height + 2);
+    padding-bottom: 0.5em;
+    margin-left: 0.25em;
+}
+
+.dynamic-tool {
+    width: 410px;
+}
+
+.filters {
+    float: left;
+    margin: 1em;
+    width: 60%;
+    position: relative;
+}
+
+.display-controls {
+    float: left;
+    margin-left: 1em;
+}
+
+.slider-row {
+    margin-left: 1em;
+    height: 16px;
+
+    &.input {
+        height: 22px;
+    }
+}
+
+.elt-label {
+    float: left;
+    width: 30%;
+    font-weight: bold;
+    margin-right: 1em;
+}
+
+.slider {
+    float: left;
+    width: 40%;
+    position: relative;
+    padding-top:2px;
+}
+
+.tool-name {
+    font-size: 110%;
+    font-weight: bold;
+}
+
+.param-row {
+    margin-top: 0.2em;
+    margin-left: 1em;
+}
+
+.param-label {
+    float: left;
+    font-weight: bold;
+    padding-top: 0.2em;
+    width: 50%;
+}
+
+.menu-button {
+    margin: 0px 4px 0px 4px;
+}
+
+.exclamation {
+    background: transparent url(../images/fugue/exclamation.png) no-repeat;
+    margin-left: 5em;
+}
+
+.track-icons {
+    float: left
+}
+
+// Defines icons that change from black/white to normal on hover. Note that the images
+// are required to exist in the location specified.
+// TODO: make images into spritemap.
+.icon-hover-mixin(@name) {
+    &.@{name} {
+        background: transparent url('../images/fugue/@{name}-bw.png') no-repeat;
+
+        &:hover {
+            background: transparent url('../images/fugue/@{name}.png') no-repeat;
+        }
+    }
+}
+
+.icon-button {
+    .icon-hover-mixin(bookmarks);
+    .icon-hover-mixin(layers-stack);
+    .icon-hover-mixin(hammer);
+    .icon-hover-mixin(toolbox);
+    .icon-hover-mixin(globe);
+    .icon-hover-mixin(block--plus);
+    .icon-hover-mixin(toggle);
+    .icon-hover-mixin(toggle-expand);
+    .icon-hover-mixin(gear);
+    .icon-hover-mixin(application-dock-270);
+    .icon-hover-mixin(ui-slider-050);
+    .icon-hover-mixin(arrow-resize-090);
+    .icon-hover-mixin(layer-transparent);
+}
+
+.remove-icon, .overview-close {
+    background: transparent url(../images/fugue/cross-small-bw.png) no-repeat;
+}
+
+.icon-button.remove-icon:hover, .overview-close:hover {
+    background: transparent url(../images/fugue/cross-circle.png) no-repeat;
+}
+
+.child-track-icon {
+    background:url('../images/fugue/arrow-000-small-bw.png') no-repeat;
+    width: 30px;
+    cursor: move;
+}
+
+.track-resize {
+    background: white url('../images/visualization/draggable_vertical.png') no-repeat top center;
+    position: absolute;
+    right: 3px;
+    bottom: -4px;
+    width: 14px;
+    height: 7px;
+    border: solid #999 1px;
+    z-index: @overlay-index;
+}
+
+.bookmark {
+    background:white;
+    border:solid #999 1px;
+    border-right:none;
+    margin:0.5em;
+    margin-right:0;
+    padding:0.5em;
+
+    .position {
+        font-weight: bold;
+    }
+}
+
+.delete-icon-container {
+    float:right;
+}
+
+.icon {
+    display: inline-block;
+    width: 16px;
+    height: 16px;
+
+    &.more-down {
+        background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px;
+    }
+
+    &.more-across {
+        background: url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px;
+    }
+}
+
+.feature-popup {
+    position: absolute;
+    z-index: @overlay-index;
+    padding: 5px;
+    font-size: 10px;
+    filter: alpha(opacity=80);
+    background-repeat: no-repeat;
+    background-image: url(../images/tipsy.gif);
+    background-position: top center;
+}
+
+.feature-popup-inner {
+    padding: 5px 8px 4px 8px;
+    background-color: black;
+    color: white;
+}
+
+.zoom-area {
+    position: absolute;
+    top: 0px;
+    background-color: #ccf;
+    opacity: 0.5;
+    z-index: @overlay-index;
+}
+
+.mouse-pos {
+    position: absolute;
+    top: 0px;
+    background-color: black;
+    opacity: 0.15;
+    height: 100%;
+    width: 1px;
+}
+
+.draghandle {
+    margin-top: 2px;
+    cursor: move;
+    float: left;
+    background: transparent url(../images/visualization/draggable_horizontal.png) center center no-repeat;
+    width: 10px;
+    height: 12px;
+}
+
+// Highlight for element(s) being dragged. This definition must be after definition of draggable
+// elements (e.g. track, group) in order to take precedence.
+.dragging {
+    border: 1px solid blue;
+}
diff --git a/client/galaxy/style/less/ui.less b/client/galaxy/style/less/ui.less
new file mode 100644
index 0000000..5269dbe
--- /dev/null
+++ b/client/galaxy/style/less/ui.less
@@ -0,0 +1,897 @@
+// prevents highlighting i.e. text selection on double click
+.no-highlight {
+    -webkit-user-select: none; /* webkit (safari, chrome) browsers */
+    -moz-user-select: none; /* mozilla browsers */
+    -khtml-user-select: none; /* webkit (konqueror) browsers */
+    -ms-user-select: none; /* IE10+ */
+}
+
+// remove padding
+.no-padding {
+    padding: 0px !important;
+}
+
+.no-transition {
+  -webkit-transition: none !important;
+  -moz-transition: none !important;
+  -o-transition: none !important;
+  -ms-transition: none !important;
+  transition: none !important;
+}
+
+.fa-rotate-45 {
+    -webkit-transform: rotate(45deg);
+    -moz-transform: rotate(45deg);
+    -ms-transform: rotate(45deg);
+    -o-transform: rotate(45deg);
+    transform: rotate(45deg);
+}
+
+// default margins
+ at ui-margin-vertical: 5px;
+ at ui-margin-vertical-large: 10px;
+ at ui-margin-horizontal: 5px;
+ at ui-margin-horizontal-small: 4px;
+ at ui-margin-horizontal-large: 10px;
+
+// margins
+.ui-margin-top {
+    padding-top: @ui-margin-vertical !important;
+}
+
+.ui-margin-top-large {
+    padding-top: @ui-margin-vertical-large !important;
+}
+
+.ui-margin-bottom {
+    margin-bottom: @ui-margin-vertical !important;
+}
+
+.ui-margin-right {
+    margin-right: @ui-margin-horizontal-small !important;
+}
+
+.ui-margin-both {
+    &:extend(ui-margin-top);
+    &:extend(ui-margin-bottom);
+}
+
+// non-interactive elements
+.ui-error {
+    -moz-border-radius: @border-radius-base;
+    border-radius: @border-radius-base;
+    background: @state-danger-bg;
+    padding: @ui-margin-vertical;
+}
+
+.ui-label {
+	font-weight: bold;
+}
+
+.ui-message {
+    padding: 2px 10px 2px 10px;
+    margin-top: @ui-margin-vertical-large;
+    margin-bottom: 0px;
+    width: 100%;
+    display: none;
+}
+
+.ui-modal {
+    display: none;
+    .modal-header.no-separator {
+        border: none !important;
+        padding-bottom: 0px !important;
+    }
+    .modal-backdrop {
+        z-index: -1;
+    }
+    .modal-body {
+        position: static;
+    }
+    .modal-footer {
+        .buttons {
+            float: right;
+        }
+    }
+}
+
+.ui-modal-styled {
+    &:extend(.ui-modal all);
+    ul {
+        list-style-type: square;
+        padding: 5px 25px;
+    }
+}
+
+// icons
+.ui-icon {
+    font-size: 1.2em;
+    width: 1.2em;
+}
+.ui-icon-button {
+    &:extend(.ui-icon);
+    cursor: pointer;
+}
+
+// buttons
+.ui-button-default {
+    .icon {
+        font-size: 1.1em;
+    }
+    .progress {
+        height: 4px;
+        margin: 0px;
+        margin-top: 2px;
+        .progress-bar {
+            &:extend(.no-transition);
+            background: darken(@state-success-bg, 30%);
+        }
+    }
+}
+
+.ui-button-icon {
+    &:extend(.icon-btn);
+    &:extend(.no-highlight);
+    height: auto !important;
+    width: auto !important;
+    ul {
+        i {
+            width: 1.2em;
+            font-size: 1.2em;
+            margin-right: @ui-margin-horizontal;
+            position: relative;
+            top: 1px;
+        }
+    }
+    .button {
+        margin-right: @ui-margin-horizontal;
+        margin-left: @ui-margin-horizontal;
+        .title {
+            position: relative;
+            font-size: 0.8em;
+            font-weight: normal;
+            top: -1px;
+            word-wrap:break-word;
+        }
+    }
+    .button.disabled {
+        .opacity(.65);
+    }
+}
+
+.ui-button-menu {
+    .dropdown-menu {
+        min-width: 50px;
+    }
+    .dropdown-item {
+        text-align: left;
+    }
+}
+
+.ui-button-icon-plain {
+    &:extend(.icon-btn);
+    border: none !important;
+    background: none !important;
+    height: inherit !important;
+    width: inherit !important;
+    padding-right: 3px !important;
+}
+
+// tabs
+.ui-tabs {
+    .tab-operations {
+        float: right;
+        margin-bottom: 4px;
+        .ui-button-icon {
+            margin-left: 4px;
+        }
+    }
+    .tab-element {
+        cursor: pointer;
+    }
+    .tab-icon {
+        font-size: 1.1em;
+        margin-right: @ui-margin-horizontal;
+    }
+    .tab-delete {
+        font-size: 0.8em;
+        margin-left: @ui-margin-horizontal;
+        cursor: pointer;
+    }
+}
+
+// thumbnails
+.ui-thumbnails {
+    margin-bottom: 10px;
+    .ui-thumbnails-grid {
+        .ui-thumbnails-item {
+            padding: 5px;
+            margin: 5px;
+            cursor: pointer;
+            border-radius:5px;
+            .ui-thumbnails-title {
+                text-align: center;
+                font-size: 0.9em;
+                word-wrap: break-work;
+                font-weight: bold;
+                width: 100px;
+            }
+            .ui-thumbnails-image {
+                padding: 10px;
+                width: 100px;
+                height: 86px;
+            }
+            .ui-thumbnails-description-title {
+                font-weight: bold;
+            }
+            .ui-thumbnails-description-text {
+                word-wrap: break-word;
+            }
+        }
+        .ui-thumbnail-current {
+            border-color:#66afe9;
+            background-color:#EEEEFF;
+            outline:0;
+            -webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);
+            box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);
+        }
+        .ui-thumbnails-item-float {
+            float: left;
+            height: 140px;
+        }
+    }
+}
+
+// tables
+.ui-table {
+    &:extend(.grid);
+    tbody {
+        cursor: pointer;
+        td {
+            &:extend(.grid tbody td);
+        }
+    }
+}
+
+.ui-table-striped {
+    &:extend(.table all);
+    &:extend(.table-striped all);
+    width : 100%;
+    margin-bottom: 0px;
+    th {
+        text-align: center;
+        white-space: nowrap;
+    }
+    td {
+        margin: 0px;
+        .ui-button-icon {
+            line-height: 1.2em;
+        }
+    }
+}
+
+.ui-table-plain {
+    &:extend(.ui-table all);
+    table-layout:fixed;
+
+    tbody {
+        cursor: auto !important;
+        td {
+            padding: 0px !important;
+            border: none !important;
+        }
+    }
+
+    .ui-table-section {
+        border-left: solid 3px @form-heading-bg;
+        border-radius: @border-radius-large !important;
+        margin-bottom: @ui-margin-vertical;
+        padding-left: 10px;
+    }
+}
+
+// table form elements
+.ui-form-element {
+    margin-top: @ui-margin-vertical;
+    margin-bottom: @ui-margin-vertical;
+    overflow: visible;
+    clear: both;
+    .ui-form-title {
+        word-wrap: break-word;
+        font-weight: bold;
+        .ui-form-collapsible {
+            cursor: pointer;
+            .icon {
+                font-size: 1.2em;
+                width: 20px;
+                position: relative;
+                top: 1px;
+            }
+        }
+    }
+    .ui-form-field {
+        position: relative;
+        margin-top: @ui-margin-vertical;
+        .ui-form-wp-source {
+            border-left-width: 10px;
+        }
+        .ui-form-wp-target {
+            box-shadow: none;
+            border-top: none;
+            border-bottom: none;
+            border-right: none;
+            border-left-width: 5px;
+            font-style: italic;
+        }
+        .ui-form-backdrop {
+            display: none;
+            z-index: 10;
+            position: absolute;
+            top: 0px;
+            width: 100%;
+            height: 100%;
+            background: @white;
+            display: block;
+            opacity: 0.0;
+            cursor: default;
+        }
+    }
+    .ui-form-preview {
+        &:extend(.ui-input);
+        margin-top: @ui-margin-vertical;
+        border-color: transparent !important;
+        box-shadow: none !important;
+        height: auto !important;
+    }
+    .workflow-right .right-content {
+        margin-bottom: 10px;
+    }
+}
+
+.ui-form-element-disabled {
+    &:extend(.ui-form-element all);
+    .ui-form-title {
+        font-weight: normal;
+    }
+}
+
+.ui-form-separator {
+	font-weight: bold;
+	font-size: 0.9em;
+}
+
+.ui-form-help {
+    overflow: auto;
+}
+
+.ui-form-info {
+    &:extend(.toolParamHelp);
+    clear: both !important;
+    word-wrap: break-word;
+    i {
+        font-size: 1.2em;
+        padding: 2px 5px;
+    }
+    .ui-form-title {
+        clear:both;
+        font-weight: bold;
+        padding-right: @ui-margin-horizontal;
+    }
+}
+
+.ui-form-error {
+    display: none;
+    .ui-form-error-text  {
+        padding-left: @ui-margin-horizontal;
+    }
+}
+
+.ui-form-section {
+    border-left: solid 3px @form-heading-bg;
+    border-radius: @border-radius-large !important;
+    padding-left: 10px;
+}
+
+.ui-form-composite {
+    max-width: 900px;
+    .ui-form-header {
+        &:extend(h3);
+        margin-top: 0px;
+        .ui-label {
+            width: ~'calc(100% - 200px)';
+        }
+    }
+    .ui-steps {
+        overflow: auto;
+        margin-top: 15px;
+        border: dashed 1px darken(@form-heading-bg, 30%);
+        border-radius: @border-radius-base;
+        padding: @ui-margin-horizontal;
+        .ui-portlet-narrow {
+            .portlet-header {
+                padding: 1px 5px 0px 8px;
+            }
+            padding: 0px @ui-margin-horizontal 0px @ui-margin-horizontal;
+            .portlet-content {
+                padding: 0px @ui-margin-horizontal 0px @ui-margin-horizontal;
+            }
+            .ui-table-section, .ui-portlet-repeat .portlet-content {
+                padding-left: @ui-margin-horizontal-large;
+            }
+        }
+    }
+    .ui-message {
+        margin-top: 0px;
+        margin-bottom:10px;
+    }
+}
+
+// portlets
+.ui-portlet {
+    border: solid @form-border 1px;
+    border-radius: @border-radius-base;
+    position: relative;
+    clear: both;
+    width: auto;
+    height: 100%;
+    .portlet-header {
+        background: @form-heading-bg;
+        border-bottom: solid @form-border 1px;
+        padding: 2px 8px;
+        overflow: visible;
+        width:100%;
+        .portlet-title {
+            word-wrap: break-word;
+            display: inline;
+            vertical-align: middle;
+            .portlet-title-text {
+                vertical-align: middle;
+                line-height: 22px;
+            }
+            .portlet-title-text.no-highlight.collapsible {
+                cursor: pointer;
+                text-decoration: underline;
+            }
+            .portlet-title-icon {
+                font-size: 1.2em;
+                vertical-align: middle;
+                margin-right: 5px;
+            }
+        }
+        .portlet-operations {
+            .ui-button-icon {
+                margin-left: @ui-margin-horizontal;
+            }
+        }
+    }
+    .portlet-content {
+        height: inherit;
+        padding-left: 10px;
+        padding-right: 10px;
+        clear: both;
+        .portlet-body {
+            padding     : 0px;
+            height      : 100%;
+            width       : 100%;
+        }
+        .portlet-buttons {
+            margin-top: @ui-margin-vertical;
+            margin-bottom: @ui-margin-vertical;
+        }
+    }
+    .portlet-content.nopadding, .portlet-body.nopadding {
+        padding: 0px;
+    }
+    .portlet-backdrop {
+        display: none;
+        z-index: 10;
+        position: absolute;
+        top: 0px;
+        width: 100%;
+        height: 100%;
+        opacity: 0.2;
+        background: @white;
+        cursor: not-allowed;
+    }
+}
+
+.ui-portlet-repeat {
+    &:extend(.ui-portlet);
+    border: none;
+    border-left: solid 3px @form-heading-bg;
+    border-radius: @border-radius-large;
+    margin-bottom: @ui-margin-vertical;
+    .portlet-header {
+        background: @form-heading-bg;
+        border-radius: @border-radius-large;
+        border-bottom-left-radius: 0px;
+        border-top-left-radius: 0px;
+        padding: 0px 2px;
+        .portlet-title-text {
+            vertical-align: middle;
+            line-height: 20px !important;
+        }
+    }
+    .portlet-content {
+        padding-right: 0px;
+    }
+}
+
+.ui-portlet-panel {
+    &:extend(.ui-portlet-repeat all);
+    margin-top: @ui-margin-vertical;
+    border-left: solid 3px @side-panel-bg;
+    .portlet-header {
+        background: @side-panel-bg;
+        border-bottom: solid darken(@side-panel-bg, 10%) 1px;
+    }
+}
+
+.ui-portlet-section {
+    &:extend(.ui-portlet-panel all);
+    .portlet-highlight {
+        text-decoration: underline;
+    }
+}
+
+.ui-portlet-narrow {
+    &:extend(.ui-portlet-limited all);
+    border: none;
+    margin-bottom: @ui-margin-vertical-large;
+    .portlet-header {
+        border-radius: @border-radius-base;
+        .portlet-operations {
+            .ui-button-icon {
+                margin-left: 3px;
+            }
+        }
+    }
+    .ui-portlet-repeat {
+        .portlet-header {
+            border-radius: @border-radius-large;
+            border-bottom-left-radius: 0px;
+            border-top-left-radius: 0px;
+        }
+    }
+    .portlet-content {
+        padding: 0px;
+    }
+    .ui-table-section, .ui-portlet-repeat .portlet-content {
+        padding-left: @ui-margin-horizontal;
+    }
+}
+
+.ui-portlet-limited {
+    &:extend(.ui-portlet all);
+    max-width: 900px;
+}
+
+.ui-portlet-plain {
+    &:extend(.ui-portlet all);
+    border: none;
+    .portlet-content {
+        padding: 0px;
+    }
+}
+
+// popovers
+.ui-popover {
+    max-width: 700px;
+    display: none;
+    .popover-close {
+        position: absolute;
+        right: 10px;
+        top: 7px;
+        font-size: 1.2em;
+        cursor: pointer;
+    }
+    .popover-title {
+        padding: 4px 10px;
+    }
+}
+
+// input elements
+.ui-input {
+    width: 100%;
+    display: block;
+    height: @input-height-base;
+    padding: @padding-base-vertical @ui-margin-horizontal !important;
+    font-size: @font-size-base;
+    line-height: @line-height-base;
+    color: @input-color;
+    border: 1px solid @input-border;
+    border-radius: @input-border-radius;
+    .box-shadow(inset 0 1px 1px rgba(0,0,0,.075));
+    background-image: none;
+    background: transparent;
+    -webkit-appearance: none;
+    -moz-border-radius: @border-radius-base;
+    &[disabled],
+    &[readonly],
+    fieldset[disabled] & {
+        cursor: not-allowed;
+        background-color: @input-bg-disabled;
+    }
+}
+
+.ui-input-basic {
+    &:extend(.ui-input);
+    .box-shadow(inset 0 -1px 1px rgba(0,0,0,.075));
+}
+
+.ui-textarea {
+    &:extend(.ui-input);
+    height: 100px !important;
+}
+
+.ui-options {
+    .ui-options-list {
+        &:extend(.ui-input);
+        height: 100% !important;
+        input[type=checkbox], input[type=radio] {
+            display: none;
+        }
+        label {
+            margin: 0px;
+            cursor: pointer;
+        }
+        .ui-drilldown-button {
+            float:left;
+            cursor: pointer;
+            font-size: 1.2em;
+            margin-right: 5px;
+            margin-top: 3px;
+
+        }
+        .ui-drilldown-subgroup {
+            display: none;
+            margin-left: 25px;
+        }
+        input[type=checkbox] + label:before {
+            font-family: FontAwesome;
+            display: inline-block;
+            font-style: normal;
+            font-size: 1.2em;
+            margin-right: 5px;
+            position: relative;
+            top: 1px;
+            width: 10px;
+            content: "\f096";
+        }
+        input[type=checkbox]:checked + label:before {
+            content: "\f046";
+        }
+        input[type=radio] + label:before {
+            font-family: FontAwesome;
+            display: inline-block;
+            font-style: normal;
+            font-size: 1.2em;
+            margin-right: 5px;
+            position: relative;
+            top: 1px;
+            width: 10px;
+            content: "\f1db";
+        }
+        input[type=radio]:checked + label:before {
+            content: "\f192";
+        }
+    }
+}
+
+.ui-form-slider {
+    display: inline-block;
+    width: 100%;
+    .ui-form-slider-text {
+        &:extend(.ui-input);
+        width: 100px;
+        float: left;
+    }
+
+    .ui-form-slider-element {
+        width: ~'calc(100% - 110px)';
+        float: left;
+        top: 8px;
+        left: 10px;
+    }
+}
+
+.ui-radiobutton {
+    width: 100%;
+    label {
+        height: 23px;
+        line-height: 1em;
+    }
+    
+    i {
+        padding-right: @ui-margin-horizontal;
+        font-size: 1.1em;
+    }
+}
+
+.ui-button-check {
+    .icon {
+        font-size: 1.2em;
+        width: 1.2em;
+    }
+    cursor: pointer;
+    margin-bottom: @ui-margin-vertical;
+}
+
+.ui-color-picker {
+    .ui-color-picker-header {
+        cursor: pointer;
+    }
+    .ui-color-picker-value {
+        float: left;
+        border-radius: 3px;
+        border: solid 1px #000000;
+        width: 12px;
+        height: 12px;
+        margin-right: 5px;
+    }
+    .ui-color-picker-label {
+        float: left;
+        line-height: 1.2em;
+    }
+    .ui-color-picker-view {
+        height: 100%;
+        overflow: auto;
+        display: none;
+        float: left;
+        margin-top: 5px;
+        .ui-color-picker-panel {
+            width: 210px;
+            .ui-color-picker-content {
+                margin-bottom: 15px;
+                .label {
+                    padding-bottom: 2px;
+                }
+                .line {
+                    .ui-color-picker-box {
+                        cursor: pointer;
+                        float: left;
+                        margin-right: 5px;
+                        border: solid 1px #c0c0c0;
+                        width: 15px;
+                        height: 15px;
+                        border-radius: 2px;
+                        .ui-color-picker-check {
+                            color: black;
+                            font-size: 1.2em;
+                            position: relative;
+                            left: 1px;
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
+
+.ui-list {
+    .ui-list-select {
+        float: left;
+        width: ~'calc(100% - 27px)';
+    }
+    .ui-list-button {
+        .ui-button-icon {
+            margin-top: 3px;
+            margin-right: 5px;
+        }
+    }
+    .ui-list-message, .ui-list-portlet {
+        clear: both;
+    }
+    .ui-list-id {
+        cursor: pointer;
+        margin-top: 5px;
+        .ui-list-delete {
+            font-size: 1.2em;
+            margin-right: 5px;
+        }
+    }
+}
+
+.ui-select {
+    position: relative;
+    .icon-dropdown {
+        position: absolute;
+        top: 7px;
+        right: 8px;
+        cursor: pointer;
+    }
+    .icon-resize {
+        display: none;
+    }
+    select {
+        position: relative;
+        top: 0px;
+        height: 27px;
+        width: 100%;
+        padding-right: 20px;
+        &:extend(.ui-input-basic);
+        cursor: pointer;
+        padding-left: 5px;
+    }
+    .select2-container {
+        width: 100%;
+        .select2-choice {
+            height: 27px;
+            &:extend(.ui-input-basic);
+            padding-left: 5px;
+            .select2-arrow {
+                display: none;
+            }
+        }
+    }
+    .select2-container-multi {
+        .select2-choices {
+            &:extend(.ui-input);
+            padding: 0px;
+        }
+    }
+}
+
+.ui-select-multiple {
+    .icon-resize {
+        display: block;
+        position: absolute;
+        bottom: 0px;
+        right: 0px;
+        width: 20px;
+        height: 20px;
+        cursor: row-resize;
+        i {
+            position: absolute;
+            right: 3px;
+            bottom: 2px;
+        }
+    }
+    select {
+        height: 80px;
+        .box-shadow(inset 0 1px 1px rgba(0,0,0,.075));
+    }
+}
+
+.ui-select-content {
+    .ui-options {
+        float: left;
+        padding-top: 2px;
+    }
+}
+
+// global select2 adjustments
+.select2-container .select2-choice,
+.select2-container .select2-choices {
+    border: 1px solid @input-border;
+}
+.select2-dropdown-open .select2-choice,
+.select2-dropdown-open .select2-choices {
+    border: 1px solid @input-border;
+    border-bottom-left-radius: 0px ;
+    border-bottom-right-radius: 0px;
+}
+.select2-dropdown-open.select2-drop-above .select2-choice,
+.select2-dropdown-open.select2-drop-above .select2-choices {
+    border: 1px solid @input-border;
+    border-top-left-radius: 0px;
+    border-top-right-radius: 0px;
+}
+.select2-drop-active {
+    border: 1px solid @input-border;
+    border-radius: @input-border-radius;
+    border-top-left-radius: 0px;
+    border-top-right-radius: 0px;
+    .select2-search {
+        padding-top: 5px;
+    }
+}
+.select2-drop.select2-drop-above.select2-drop-active {
+    border: 1px solid @input-border;
+    border-radius: @input-border-radius;
+    border-bottom-left-radius: 0px;
+    border-bottom-right-radius: 0px;
+    .select2-search {
+        padding-top: 0px;
+    }
+}
diff --git a/client/galaxy/style/less/ui/dataset-choice.less b/client/galaxy/style/less/ui/dataset-choice.less
new file mode 100644
index 0000000..c23979c
--- /dev/null
+++ b/client/galaxy/style/less/ui/dataset-choice.less
@@ -0,0 +1,94 @@
+// component to display a dataset choice and activate a modal chooser (single *and* multiple datasets)
+.dataset-choice {
+    border: 1px solid lightgrey;
+    border-radius: 3px;
+    overflow: hidden;
+    padding: 10px 8px 8px 8px;
+
+    &:hover {
+        border-color: black;
+        cursor: pointer;
+        & > * {
+            cursor: pointer;
+        }
+    }
+    .prompt {
+        margin-right: 8px;
+        &:after {
+            content: ':';
+        }
+        &:empty {
+            display: none;
+        }
+    }
+    .none-selected-msg {
+        color: grey;
+    }
+
+    .selected {
+        display: inline-block;
+
+        .title {
+            font-weight: bold;
+        }
+        .subtitle {
+            color: grey;
+            &:before {
+                content: '-';
+                margin: 0px 4px 0px 4px;
+            }
+            i {
+                font-style: normal;
+                &:not(:last-child):after {
+                    content: ', ';
+                }
+            }
+        }
+    }
+}
+
+// component to display a dataset choice and activate a modal chooser (multiple datasets)
+.dataset-choice.multi {
+    .selected {
+        display: block;
+        font-weight: normal;
+    }
+
+    table {
+        width: 100%;
+        margin-top: 8px;
+        cursor: pointer;
+
+        &:hover {
+            border-color: black;
+        }
+        tr:nth-child(even) {
+            background-color: aliceblue;
+        }
+        th {
+            padding: 0px;
+            font-weight: normal;
+            font-size: 80%;
+            color: grey;
+        }
+        th:not(:last-child),
+        td:not(:last-child) {
+            padding-right: 8px;
+        }
+        td.cell-name {
+            font-weight: bold;
+        }
+    }
+}
+
+// modal allowing single or multiple dataset selection - often activated by .dataset-choice above
+.dataset-choice-modal .list-panel {
+    .list-item.dataset {
+        border-left-width: 1px;
+        border-right-width: 1px;
+    }
+    .controls .title .name {
+        font-size: 120%;
+    }
+}
+
diff --git a/client/galaxy/style/less/ui/error-modal.less b/client/galaxy/style/less/ui/error-modal.less
new file mode 100644
index 0000000..34d0d51
--- /dev/null
+++ b/client/galaxy/style/less/ui/error-modal.less
@@ -0,0 +1,34 @@
+// augmented ui-modal meant to replace alert() and display error details
+.error-modal .modal-content {
+    .errormessagelarge;
+    & > * {
+        padding: 0;
+        margin: 0;
+        border: none;
+    }
+    // error details are in modal-content after modal-body
+    .error-details {
+        display: none;
+        margin-top: 8px;
+        // after a short preamble, show the details
+        pre {
+            // error details are black & white, word-wrapped
+            border-radius: 3px;
+            background: white;
+            padding: 8px;
+            white-space: pre-wrap;
+            color: black;
+        }
+    }
+    .modal-footer {
+        margin-top: 8px;
+        // float buttons individually, not as a div
+        .buttons {
+            width: 100%;
+            float: none;
+            & > * {
+                float: right;
+            }
+        }
+    }
+}
diff --git a/client/galaxy/style/less/ui/icon-btn.less b/client/galaxy/style/less/ui/icon-btn.less
new file mode 100644
index 0000000..a880656
--- /dev/null
+++ b/client/galaxy/style/less/ui/icon-btn.less
@@ -0,0 +1,61 @@
+ at icon-btn-size:     22px;
+ at icon-btn-margin:   2px;
+ at icon-btn-color:    @btn-default-color;
+
+ at icon-btn-bg:       @btn-default-bg;
+ at icon-btn-border:   @btn-default-border;
+
+.icon-btn {
+    display : inline-block;
+    height  : @icon-btn-size;
+    width   : @icon-btn-size;
+    font-size: 125%;
+
+    // center the icon
+    text-align: center;
+    line-height: @icon-btn-size - 3;
+
+    // colors and borders
+    border-radius: 3px;
+    border: 1px solid @icon-btn-border;
+    background-color: @icon-btn-bg;
+    color           : @icon-btn-color;
+
+    cursor: pointer;
+
+    &:focus,
+    &:hover {
+        outline: 0;
+        background-color: white;
+        color: maroon;
+    }
+}
+
+.icon-btn.disabled {
+    background-color: transparent;
+    color: #BBB;
+    border-color: #BBB;
+    //color: @icon-btn-border;
+}
+
+.icon-btn-group {
+    display: inline-block;
+    .icon-btn:not(:last-child) {
+        margin: 0px;
+        border-radius: 0px;
+        border-right: none;
+    }
+    .icon-btn:first-child {
+        margin-right: 0px;
+        border-top-left-radius: 3px;
+        border-bottom-left-radius: 3px;
+    }
+    .icon-btn:last-child {
+        margin-left: 0px;
+        border-radius: 0px 3px 3px 0px;
+    }
+    .icon-btn:only-child {
+        margin: 0px;
+        border-radius: 3px;
+    }
+}
diff --git a/client/galaxy/style/less/ui/pagination.less b/client/galaxy/style/less/ui/pagination.less
new file mode 100644
index 0000000..735f849
--- /dev/null
+++ b/client/galaxy/style/less/ui/pagination.less
@@ -0,0 +1,34 @@
+// pagination & scrolling pagination: see scripts/jq-plugins/ui/pagination.js
+.pagination {
+    margin: 0px;
+}
+
+.pagination-scroll-container {
+    display         : inline-block;
+    background-color: #F8F8F8;
+    border-radius   : 3px;
+    border          : 1px solid #BFBFBF;
+    overflow        : auto;
+}
+
+.pagination-scroll-container .pagination-page-list {
+    margin          : 3px 0px 3px 0px;
+}
+
+.pagination-scroll-container .pagination-page-list > li:first-child > a,
+.pagination-scroll-container .pagination-page-list > li:first-child > span {
+    border-radius   : 0px;
+    border-left     : 0px;
+}
+
+.pagination-scroll-container .pagination-page-list > li:last-child > a,
+.pagination-scroll-container .pagination-page-list > li:last-child > span {
+    border-radius   : 0px;
+}
+
+.pagination-scroll-container .pagination-page-list > li > a {
+    float           : none;
+    position        : static;
+    border          : 1px solid #BFBFBF;
+    border-width    : 0px 0px 0px 1px;
+}
diff --git a/client/galaxy/style/less/ui/paired-collection-creator.less b/client/galaxy/style/less/ui/paired-collection-creator.less
new file mode 100644
index 0000000..e5270b3
--- /dev/null
+++ b/client/galaxy/style/less/ui/paired-collection-creator.less
@@ -0,0 +1,605 @@
+// ======================================================================== all types
+.collection-creator {
+
+    height: 100%;
+    overflow: hidden;
+
+    -webkit-user-select: none;
+    -moz-user-select: none;
+    -ms-user-select: none;
+
+    // ------------------------------------------------------------------------ general
+    ol, li {
+        list-style: none;
+        padding: 0;
+        margin: 0;
+    }
+    > *:not(.popover) {
+        padding: 0px 8px 0px 8px;
+    }
+    .btn {
+        border-color: #BFBFBF;
+    }
+
+    .vertically-spaced {
+        margin-top: 8px;
+    }
+    .scroll-container {
+        overflow: auto;
+        //overflow-y: scroll;
+    }
+    .truncate {
+        overflow: hidden;
+        white-space: nowrap;
+        text-overflow: ellipsis;
+    }
+    .empty-message {
+        color: grey;
+        font-style: italic;
+    }
+
+    // ------------------------------------------------------------------------ flex
+    &.flex-row-container,
+    .flex-row-container,
+    .flex-column-container {
+        display: -webkit-box;
+        display: -webkit-flex;
+        display: -ms-flexbox;
+        display: flex;
+
+        -webkit-align-items: stretch;
+        -ms-align-items: stretch;
+        align-items: stretch;
+
+        -webkit-align-content: stretch;
+        -ms-align-content: stretch;
+        align-content: stretch;
+    }
+
+    // a series of vertical elements that will expand to fill available space
+    &.flex-row-container,
+    .flex-row-container {
+        -webkit-flex-direction: column;
+        -ms-flex-direction: column;
+        flex-direction: column;
+    }
+    .flex-row {
+        -webkit-flex: 1 auto;
+        -ms-flex: 1 auto;
+        flex: 1 0 auto;
+    }
+    .flex-row.no-flex {
+        -webkit-flex: 0 auto;
+        -ms-flex: 0 auto;
+        flex: 0 0 auto;
+    }
+
+    // a series of horizontal elements that will expand to fill available space
+    .flex-column-container {
+        -webkit-flex-direction: row;
+        -ms-flex-direction: row;
+        flex-direction: row;
+    }
+    .flex-column {
+        -webkit-flex: 1 auto;
+        -ms-flex: 1 auto;
+        flex: 1 1 auto;
+    }
+    .flex-column.no-flex {
+        -webkit-flex: 0 auto;
+        -ms-flex: 0 auto;
+        flex: 0 0 auto;
+    }
+
+    // ------------------------------------------------------------------------ sub-components
+    .choose-filters {
+        .help {
+            margin-bottom: 2px;
+            font-size: 90%;
+            color: grey;
+        }
+        button {
+            width: 100%;
+            margin-top: 2px;
+        }
+    }
+
+    .header .alert {
+        display: none;
+
+        li {
+            list-style: circle;
+            margin-left: 32px;
+        }
+    }
+
+    // ------------------------------------------------------------------------ columns
+    .column {
+        width: 30%;
+    }
+    .column-title {
+        height: 22px;
+        line-height: 22px;
+        overflow: hidden;
+        &:hover {
+            text-decoration: underline;
+            cursor: pointer;
+        }
+        .title {
+            font-weight: bold;
+        }
+        .title-info {
+            color: grey;
+            &:before {
+                content: ' - ';
+            }
+        }
+    }
+
+    .paired-column {
+        text-align: center;
+        // mess with these two to make center more/less priority
+        width: 22%;
+    }
+
+    // ------------------------------------------------------------------------ header
+    .header {
+        .main-help {
+            margin-bottom: 17px;
+            overflow: hidden;
+            padding: 15px;
+            background: #f8f8f8;
+
+            &:not(.expanded) {
+                // chosen to match alert - dependent on line height and .alert padding
+                max-height: 49px;
+                .help-content {
+                    p:first-child {
+                        overflow: hidden;
+                        white-space: nowrap;
+                        text-overflow: ellipsis;
+                    }
+                    > *:not(:first-child) {
+                        display: none;
+                    }
+                }
+            }
+            &.expanded {
+                max-height: none;
+            }
+
+            .help-content {
+                i {
+                    cursor: help;
+                    border-bottom: 1px dotted grey;
+                    font-style: normal;
+                    //font-weight: bold;
+                    //text-decoration: underline;
+                    //text-decoration-style: dashed;
+                }
+                ul, li {
+                    list-style: circle;
+                    margin-left: 16px;
+                }
+                .less-help {
+                    display: inline-block;
+                    width: 100%;
+                    text-align: right;
+                }
+            }
+            .more-help {
+                //display: inline-block;
+                float: right;
+            }
+        }
+        .column-headers {
+            .column-header {
+                //min-height: 45px;
+                .unpaired-filter {
+                    width: 100%;
+                    .search-query {
+                        width: 100%;
+                        height: 22px;
+                    }
+                }
+            }
+        }
+        .paired-column a:not(:last-child) {
+            margin-right: 8px;
+        }
+        .reverse-column .column-title {
+            text-align: right;
+        }
+
+    }
+
+    // ------------------------------------------------------------------------ middle
+    // ---- all
+    // macro
+    .flex-bordered-vertically {
+        // huh! - giving these any static height will pull them in
+        height: 0;
+        // NOT setting the above will give a full-height page
+
+        border: 1px solid lightgrey;
+        border-width: 1px 0 1px 0;
+    }
+
+    .column-datasets {
+        list-style: none;
+        overflow: hidden;
+
+        .dataset {
+            height: 32px;
+            margin-top: 2px;
+            &:last-of-type {
+                margin-bottom: 2px;
+            }
+            border: 1px solid lightgrey;
+            border-radius: 3px;
+            padding: 0 8px 0 8px;
+            line-height: 28px;
+            cursor: pointer;
+
+            &.unpaired {
+                border-color: grey;
+            }
+
+            &.paired {
+                margin-left: 34px;
+                margin-right: 34px;
+                border: 2px solid grey;
+                background: #AFF1AF;
+
+                span {
+                    display: inline-block;
+                    overflow: hidden;
+                }
+                .forward-dataset-name {
+                    text-align: right;
+                    border-right: 1px solid grey;
+                    padding-right: 8px;
+                    &:after {
+                        margin-left: 8px;
+                        font-family: FontAwesome;
+                        content: '\f061';
+                    }
+                }
+                .pair-name-column {
+                    text-align: center;
+                    .pair-name:hover {
+                        text-decoration: underline;
+                    }
+                }
+                .reverse-dataset-name {
+                    border-left: 1px solid grey;
+                    padding-left: 8px;
+                    &:before {
+                        margin-right: 8px;
+                        font-family: FontAwesome;
+                        content: '\f060';
+                    }
+                }
+            }
+
+            &:hover {
+                border-color: black;
+            }
+
+            &.selected {
+                border-color: black;
+                background: black;
+                color: white;
+                a {
+                    color: white;
+                }
+            }
+        }
+    }
+
+    // ---- unpaired
+    .unpaired-columns {
+        .flex-bordered-vertically;
+
+        .forward-column {
+            .dataset.unpaired {
+                margin-right: 32px;
+            }
+        }
+        .paired-column {
+            .dataset.unpaired {
+                border-color: lightgrey;
+                color: lightgrey;
+                &:hover {
+                    border-color: black;
+                    color: black;
+                }
+            }
+        }
+        .reverse-column {
+            .dataset.unpaired {
+                text-align: right;
+                margin-left: 32px;
+            }
+        }
+    }
+
+    // ---- paritition/divider
+    .flexible-partition {
+        .flexible-partition-drag {
+            width: 100%;
+            height: 8px;
+            cursor: ns-resize;
+            &:before {
+                content: '...';
+            }
+            line-height: 2px;
+            text-align: center;
+            color: lightgrey;
+            &:hover {
+                background: lightgrey;
+                color: black;
+            }
+        }
+        .column-header {
+            width: 100%;
+            text-align: center;
+            .column-title {
+                display: inline;
+            }
+            & > *:not(:last-child) {
+                margin-right: 8px;
+            }
+            .remove-extensions-link {
+                display: none;
+            }
+        }
+    }
+
+    // ---- paired datasets
+    .paired-columns {
+        .flex-bordered-vertically;
+        margin-bottom: 8px;
+
+        .column-datasets {
+            width: 100%;
+            overflow: auto;
+        }
+        .unpair-btn {
+            float: right;
+            margin-top: -32px;
+            width: 31px;
+            height: 32px;
+            //z-index: 1;
+            border-color: transparent;
+            //border-color: #BFBFBF;
+            background: transparent;
+            font-size: 120%;
+            &:hover {
+                border-color: #BFBFBF;
+                background: #DEDEDE;
+            }
+        }
+        .empty-message {
+            text-align: center;
+        }
+    }
+    .element-drop-placeholder {
+        width   : 60px;
+        height  : 3px;
+        margin : 2px 0px 0px 14px;
+        background : black;
+        &:before {
+            float: left;
+            font-size: 120%;
+            margin: -9px 0px 0px -8px;
+            font-family: FontAwesome;
+            content: '\f0da';
+        }
+        &:last-child {
+            margin-bottom: 8px;
+        }
+    }
+
+    // ------------------------------------------------------------------------ footer
+    .footer {
+        .attributes {
+            .remove-extensions-prompt {
+                //margin-right: 32px;
+                line-height: 32px;
+                .remove-extensions {
+                    display: inline-block;
+                    width: 24px;
+                    height: 24px;
+                }
+            }
+            // actually appears/floats to the left of the input
+            .collection-name-prompt {
+                margin: 5px 4px 0 0;
+            }
+            .collection-name-prompt.validation-warning:before {
+                //TODO: localize (somehow)
+                content: '(required)';
+                margin-right: 4px;
+                color: red;
+            }
+            .collection-name {
+                width: 50%;
+                &.validation-warning {
+                    border-color: red;
+                }
+            }
+        }
+        .actions {
+            .other-options > * {
+                // do not display the links to create other collections yet
+                display: none;
+                margin-left: 4px;
+            }
+        }
+        padding-bottom: 8px;
+    }
+}
+
+// ======================================================================== list
+.collection-creator.list-collection-creator {
+
+    .footer {
+        margin-top: 8px;
+    }
+
+    .main-help {
+        cursor: pointer;
+    }
+
+    .collection-elements-controls {
+        margin-bottom: 8px;
+
+        .clear-selected {
+            .pull-right;
+            display: none;
+        }
+    }
+    .collection-elements {
+        max-height: 400px;
+        border: 0px solid lightgrey;
+        overflow-y: auto;
+        overflow-x: hidden;
+    }
+
+    // TODO: taken from .dataset above - swap these out
+    .collection-element {
+        height: 32px;
+        margin: 2px 4px 0px 4px;
+        opacity: 1.0;
+        border: 1px solid lightgrey;
+        border-radius: 3px;
+        padding: 0 8px 0 8px;
+        line-height: 28px;
+        cursor: pointer;
+        overflow: hidden;
+
+        &:last-of-type {
+            margin-bottom: 2px;
+        }
+        &:hover {
+            border-color: black;
+        }
+        &.selected {
+            border-color: black;
+            background: black;
+            color: white;
+            a {
+                color: white;
+            }
+        }
+        &.dragging {
+            opacity: 0.4;
+            button {
+                display: none;
+            }
+        }
+
+        .name {
+            &:hover {
+                text-decoration: underline;
+            }
+        }
+        button {
+            margin-top: 3px;
+        }
+        .discard {
+            .pull-right;
+        }
+    }
+    .element-drop-placeholder {
+        margin-left : 8px;
+        &:before {
+            margin: -8.5px 0px 0px -8px;
+        }
+    }
+    .empty-message {
+        margin: 8px;
+        color: grey;
+        font-style: italic;
+        text-align: center;
+    }
+    .no-elements-left-message {
+        text-align: left;
+    }
+}
+
+// ======================================================================== pair
+.collection-creator.pair-collection-creator {
+    .footer {
+        margin-top: 8px;
+    }
+
+    .main-help {
+        cursor: pointer;
+    }
+
+    .collection-elements-controls {
+        margin-bottom: 8px;
+
+        .clear-selected {
+            .pull-right;
+            display: none;
+        }
+    }
+    .collection-elements {
+        max-height: 400px;
+        border: 0px solid lightgrey;
+        overflow-y: auto;
+        overflow-x: hidden;
+    }
+
+    // TODO: taken from .dataset above - swap these out
+    .collection-element {
+        height: 32px;
+        margin: 2px 4px 0px 4px;
+        opacity: 1.0;
+        border: 1px solid lightgrey;
+        border-radius: 3px;
+        padding: 0 8px 0 8px;
+        line-height: 28px;
+        cursor: pointer;
+        overflow: hidden;
+
+        &:last-of-type {
+            margin-bottom: 2px;
+        }
+        &:hover {
+            border-color: black;
+        }
+
+        button {
+            margin-top: 3px;
+        }
+        .identifier {
+            &:after {
+                content: ':';
+                margin-right: 6px;
+            }
+        }
+        .name {
+            &:hover {
+                text-decoration: none;
+            }
+        }
+    }
+    .empty-message {
+        margin: 8px;
+        color: grey;
+        font-style: italic;
+        text-align: center;
+    }
+}
+
+// ======================================================================== list of pairs
+.collection-creator.list-of-pairs-collection-creator {
+    .column-headers {
+        margin-bottom: 8px;
+    }
+}
+
diff --git a/client/galaxy/style/less/ui/peek-column-selector.less b/client/galaxy/style/less/ui/peek-column-selector.less
new file mode 100644
index 0000000..a8963a7
--- /dev/null
+++ b/client/galaxy/style/less/ui/peek-column-selector.less
@@ -0,0 +1,69 @@
+// peek-based column chooser, see: scripts/jq-plugins/ui/peek-column-selector.js
+.peek-column-selector {
+    border-radius: 3px;
+    border: 1px solid rgb(95, 105, 144);
+}
+
+.peek-column-selector td,
+.peek-column-selector th {
+    padding: 4px 10px 4px 4px;
+}
+
+.peek-column-selector th:last-child {
+    width: 100%;
+}
+
+.peek-column-selector .top-left {
+    width: 10%;
+    white-space: normal;
+    vertical-align: top;
+    text-align: right;
+    font-family: "Lucida Grande", verdana, arial, helvetica, sans-serif;
+    font-weight: normal;
+}
+
+.peek-column-selector .renamable-header:hover {
+    background-color: black;
+}
+
+.peek-column-selector .control td.control-prompt {
+    background-color: rgb(95, 105, 144);
+    padding: 0px 4px 0px 8px;
+    text-align: right;
+    color: white;
+}
+
+.peek-column-selector .control td {
+    padding: 1px;
+    font-family: "Lucida Grande", verdana, arial, helvetica, sans-serif;
+    color: grey;
+}
+
+.peek-column-selector .control td .button {
+    min-width: 28px;
+    border: 1px solid grey;
+    border-radius: 3px;
+    padding: 4px;
+    color: grey;
+}
+
+.peek-column-selector .control td:hover .button {
+    background-color: #EEE;
+    border: 1px solid black;
+    cursor: pointer;
+    color: black;
+}
+
+.peek-column-selector .control td.disabled .button,
+.peek-column-selector .control td.disabled:hover .button {
+    background-color: transparent;
+    border: 1px solid #CCC;
+    cursor: not-allowed;
+    color: #CCC;
+}
+
+.peek-column-selector .control td.selected .button {
+    background-color: black;
+    border: 1px solid black;
+    color: white;
+}
diff --git a/client/galaxy/style/less/ui/search-input.less b/client/galaxy/style/less/ui/search-input.less
new file mode 100644
index 0000000..c7e7422
--- /dev/null
+++ b/client/galaxy/style/less/ui/search-input.less
@@ -0,0 +1,27 @@
+// search bar, see: scripts/jq-plugins/ui/search-input.js
+.search-input {
+    .search-query {
+        width           : 100%;
+        padding-right   : 24px;
+    }
+    .search-clear,
+    .search-loading {
+        // it places the icons on the right of the bar (and puts the lotion on its skin)
+        position    : relative;
+        display     : inline-block;
+
+        float       : right;
+        margin-top  : -24px;
+        margin-right : 4px;
+
+        font-size   : 1.4em;
+        line-height : 23px;
+        color       : grey;
+    }
+
+    .search-clear:hover {
+        color       : @link-color;
+        cursor      : pointer;
+    }
+}
+
diff --git a/client/galaxy/style/less/upload.less b/client/galaxy/style/less/upload.less
new file mode 100644
index 0000000..d851c78
--- /dev/null
+++ b/client/galaxy/style/less/upload.less
@@ -0,0 +1,287 @@
+.upload-button {
+    position: relative;
+    width: 31px;
+    height: 21px;
+    top: -4px;
+    cursor: pointer;
+    -moz-border-radius: @border-radius-base;
+    border-radius: @border-radius-base;
+    .progress {
+        height: 21px;
+        margin: 0px;
+    }
+    .progress-bar-danger {
+        background: @state-danger-bg;
+    }
+    .progress-bar-success {
+        background: @state-success-bg;
+    }
+    .progress-bar-info {
+        background: @state-running-bg;
+    }
+    .progress-bar-notransition {
+        -webkit-transition: none;
+        -moz-transition: none;
+        -ms-transition: none;
+        -o-transition: none;
+        transition: none;
+    }
+    .panel-header-button {
+        position: absolute;
+        top: 4px;
+        right: 8px;
+        font-size: 1.2em;
+        padding: 0px;
+    }
+}
+
+.upload-view-default {
+    .highlight {
+        border-color    : @table-border-color !important;
+        border-width    : 2px !important;
+        padding         : 9px !important;
+        .upload-helper {
+            color  : @table-border-color !important;
+        }
+    }
+    .upload-box {
+        width           : 100%;
+        height          : 310px;
+        text-align      : center;
+        overflow        : scroll;
+        font-size       : @font-size-base;
+        line-height     : @line-height-large;
+        -moz-border-radius: @border-radius-large;
+        border-radius   : @border-radius-large;
+        border          : 1px dashed @btn-default-border;
+        padding         : 10px;
+        overflow-x      : hidden;
+        -ms-overflow-style: none;
+        .upload-helper {
+            &:extend( h3 );
+            padding: 120px;
+            color: @btn-default-border;
+            i {
+                margin-right: 10px;
+                font-size: inherit;
+            }
+        }
+        .upload-row {
+            .select2-arrow b {
+                background-position: 0 -3px;
+            }
+            .select2-choice {
+                max-height: 20px;
+                line-height: 18px;
+                background: transparent;
+            }
+            .select2-container-disabled {
+                .select2-arrow {
+                    background: transparent;
+                }
+            }
+            .upload-title {
+                width: 130px;
+                word-wrap: break-word;
+                font-size : @font-size-small;
+                float: left;
+            }
+            .upload-size {
+                width: 60px;
+                white-space: nowrap;
+            }
+            .upload-extension {
+                width: 100px;
+                min-width: 100px;
+                font-size : @font-size-small;
+            }
+            .upload-genome {
+                width: 150px;
+                min-width: 150px;
+                font-size : @font-size-small;
+            }
+            .upload-mode {
+                font-size: 1.2em;
+                width: 1.2em;
+                float: left;
+                position: relative;
+                top: 2px;
+            }
+            .upload-text-column {
+                position: relative;
+                .upload-text {
+                    position: absolute;
+                    display: none;
+                    .upload-text-content {
+                        font-size : @font-size-small;
+                        width: 100%;
+                        height: 50px;
+                        resize: none;
+                        background: inherit;
+                        color: @black;
+                    }
+                    .upload-text-info {
+                        &:extend(.text-primary);
+                        font-size : @font-size-small;
+                    }
+                }
+            }
+            .upload-info {
+                width: 130px;
+                font-size : @font-size-small;
+                line-height: 1.2em;
+                .progress {
+                    top:1px;
+                    position: relative;
+                    width: 100%;
+                    padding: 0px;
+                    margin: 0px;
+                    line-height: 1.55em;
+                    .progress-bar {
+                        border-radius: inherit;
+                        -moz-border-radius: inherit;
+                    }
+                    .upload-percentage {
+                        position: absolute;
+                        text-align: center;
+                        width: 100%;
+                        color: @white;
+                    }
+                }
+            }
+        }
+    }
+    .upload-buttons {
+        padding-top: 30px;
+        button {
+            margin-left: 5px;
+        }
+    }
+    .upload-icon {
+        &:extend(.ui-icon);
+    }
+    .upload-icon-button {
+        &:extend(.ui-icon-button);
+        position: relative;
+        top: 1px;
+    }
+}
+
+.upload-view-composite {
+    &:extend(.upload-view-default all);
+    .upload-box {
+        margin-top: 10px;
+        background: lighten(@btn-default-border, 30%);
+        .upload-row {
+            .upload-title {
+                width: 175px;
+            }
+            .upload-source {
+                .ui-button-menu {
+                    width: 70px !important;
+                }
+            }
+        }
+    }
+    .upload-footer {
+        margin-top: 35px;
+    }
+}
+
+.upload-ftp {
+    height: 250px;
+    width: 500px;
+    overflow-y: auto;
+    overflow-x: hidden;
+    .upload-ftp-wait {
+        font-size: 1.2em;
+        position: absolute;
+        bottom: 10px;
+        right: 10px;
+    }
+    .upload-ftp-help {
+        margin-bottom: 10px;
+    }
+    .upload-ftp-warning {
+        text-align: center;
+        margin-top: 20px;
+    }
+    .upload-ftp-row {
+        cursor: pointer;
+        .ftp-name {
+            position: relative;
+            width: 240px;
+            word-wrap: break-word;
+        }
+        .ftp-size {
+            width: 60px;
+            white-space: nowrap;
+        }
+        .ftp-time {
+            width: 165px;
+            white-space: nowrap;
+        }
+    }
+    .upload-ftp-row:hover {
+        &:extend(.grid .current);
+    }
+    th {
+        padding: 5px !important;
+    }
+    td {
+        padding: 8px 5px !important;
+    }
+}
+
+.upload-settings {
+    position: relative;
+    .upload-settings-cover {
+        position: absolute;
+        width: 100%;
+        height: 100%;
+        top: 0px;
+        left: 0px;
+        background: @white;
+        opacity: 0.4;
+        cursor: no-drop;
+    }
+}
+
+.upload-footer {
+    height:20px;
+    text-align:left;
+    max-height:20px;
+    line-height:18px;
+    margin-top: 10px;
+    .upload-footer-title {
+        font-weight: bold;
+        padding-left: 60px;
+        padding-right: 20px;
+    }
+    .upload-footer-selection{
+        width:200px;
+        min-width:200px;
+        font-size:11px;
+        padding-right: 10px;
+    }
+    .select2-choice {
+        .select2-arrow b {
+            background-position: 0 -3px;
+        }
+        max-height: 20px;
+        line-height: 18px;
+        background: transparent;
+        text-align:center;
+        font-weight: normal;
+    }
+}
+
+.upload-top {
+    height:20px;
+    text-align:center;
+    .upload-top-info {
+        margin-top: 0px;
+        font-weight: normal;
+        text-align: center;
+    }
+}
diff --git a/client/galaxy/style/source_material/circle.py b/client/galaxy/style/source_material/circle.py
new file mode 100755
index 0000000..6f6e977
--- /dev/null
+++ b/client/galaxy/style/source_material/circle.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""
+usage: %prog width height bg_color hatch_color [color alpha stop_pos] +
+"""
+from __future__ import division
+
+import sys
+from math import pi
+
+import cairo
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def parse_css_color( color ):
+    if color.startswith( '#' ):
+        color = color[1:]
+    if len( color ) == 3:
+        r = int( color[0], 16 )
+        g = int( color[1], 16 )
+        b = int( color[2], 16 )
+    elif len( color ) == 6:
+        r = int( color[0:2], 16 )
+        g = int( color[2:4], 16 )
+        b = int( color[4:6], 16 )
+    else:
+        raise Exception( "Color should be 3 hex numbers" )
+    return r / 256, g / 256, b / 256
+
+
+size = int( sys.argv[1] )
+
+surface = cairo.ImageSurface( cairo.FORMAT_ARGB32, size, size )
+c = cairo.Context( surface )
+
+c.set_line_width( 1 )
+
+c.arc( size / 2.0, size / 2.0, ( size - 1 ) / 2.0, 0, 2 * pi )
+
+c.set_source_rgb( *parse_css_color( sys.argv[2] ) )
+c.fill_preserve()
+
+c.set_source_rgb( *parse_css_color( sys.argv[3] ) )
+c.stroke()
+
+t = size / 4.0
+
+arrow = sys.argv[4]
+if arrow == 'right':
+    c.move_to( t + 1, t )
+    c.line_to( 3 * t - 1, 2 * t )
+    c.line_to( t + 1, 3 * t )
+    c.stroke()
+
+surface.write_to_png( "/dev/stdout" )
diff --git a/client/galaxy/style/source_material/galaxy_icons.svg b/client/galaxy/style/source_material/galaxy_icons.svg
new file mode 100644
index 0000000..6a34999
--- /dev/null
+++ b/client/galaxy/style/source_material/galaxy_icons.svg
@@ -0,0 +1,345 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://web.resource.org/cc/"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://inkscape.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="24in"
+   height="4in"
+   id="svg2"
+   sodipodi:version="0.32"
+   inkscape:version="0.43"
+   version="1.0"
+   sodipodi:docbase="/Users/james/Desktop"
+   sodipodi:docname="galaxy_icons.svg"
+   inkscape:export-filename="/Users/james/Desktop/galaxy_icons.png"
+   inkscape:export-xdpi="90"
+   inkscape:export-ydpi="90">
+  <defs
+     id="defs4" />
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#66f266"
+     borderopacity="1"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.87055056"
+     inkscape:cx="1969.2512"
+     inkscape:cy="134.59503"
+     inkscape:document-units="px"
+     inkscape:current-layer="layer1"
+     inkscape:showpageshadow="false"
+     showgrid="true"
+     showguides="false"
+     inkscape:guide-bbox="true"
+     inkscape:window-width="956"
+     inkscape:window-height="784"
+     inkscape:window-x="161"
+     inkscape:window-y="46"
+     inkscape:grid-points="true">
+    <sodipodi:guide
+       orientation="vertical"
+       position="400"
+       id="guide2200" />
+    <sodipodi:guide
+       orientation="horizontal"
+       position="320"
+       id="guide2204" />
+    <sodipodi:guide
+       orientation="horizontal"
+       position="42"
+       id="guide2206" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="41"
+       id="guide2208" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="321"
+       id="guide2210" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="678"
+       id="guide2212" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="759"
+       id="guide2214" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="1120"
+       id="guide2218" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="1038.0328"
+       id="guide2240" />
+    <sodipodi:guide
+       orientation="horizontal"
+       position="179.60512"
+       id="guide2274" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="1260.0643"
+       id="guide2276" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="1482.0958"
+       id="guide2336" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="1759.2817"
+       id="guide2340" />
+    <sodipodi:guide
+       orientation="vertical"
+       position="1841.3061"
+       id="guide2363" />
+  </sodipodi:namedview>
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Layer 1"
+     inkscape:groupmode="layer"
+     id="layer1">
+    <path
+       sodipodi:type="arc"
+       style="fill:#04cd00;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+       id="path1307"
+       sodipodi:cx="205"
+       sodipodi:cy="205"
+       sodipodi:rx="134.28572"
+       sodipodi:ry="134.28572"
+       d="M 339.28572 205 A 134.28572 134.28572 0 1 1  70.714279,205 A 134.28572 134.28572 0 1 1  339.28572 205 z"
+       transform="matrix(1.034976,0,0,1.034976,-31.24154,-32.6701)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#838383;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+       id="path2220"
+       sodipodi:cx="205"
+       sodipodi:cy="205"
+       sodipodi:rx="134.28572"
+       sodipodi:ry="134.28572"
+       d="M 339.28572 205 A 134.28572 134.28572 0 1 1  70.714279,205 A 134.28572 134.28572 0 1 1  339.28572 205 z"
+       transform="matrix(1.034976,0,0,1.034976,1047.812,-33.15258)" />
+    <rect
+       style="fill:#5050ff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2224"
+       width="277"
+       height="278"
+       x="401"
+       y="43"
+       ry="103"
+       rx="103" />
+    <path
+       style="fill:none;fill-opacity:0.75;fill-rule:evenodd;stroke:#ffffff;stroke-width:40;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       d="M 498.7868,172 L 541.58579,172 L 541.58579,278.00505"
+       id="path2230"
+       sodipodi:nodetypes="ccc" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#ffffff;fill-opacity:1;stroke:#ffffff;stroke-width:40.00038147;stroke-linecap:square;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="path2232"
+       sodipodi:cx="501.33871"
+       sodipodi:cy="84.935463"
+       sodipodi:rx="6.3639612"
+       sodipodi:ry="6.3639612"
+       d="M 507.70268 84.935463 A 6.3639612 6.3639612 0 1 1  494.97475,84.935463 A 6.3639612 6.3639612 0 1 1  507.70268 84.935463 z"
+       transform="matrix(1.0731,0,0,0.951722,-0.622656,25.52899)" />
+    <path
+       sodipodi:type="star"
+       style="fill:#ffb400;fill-opacity:1;stroke:none;stroke-width:50;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="path2254"
+       sodipodi:sides="3"
+       sodipodi:cx="885.29767"
+       sodipodi:cy="14.931891"
+       sodipodi:r1="517.84943"
+       sodipodi:r2="344.36987"
+       sodipodi:arg1="0.50762901"
+       sodipodi:arg2="1.5548266"
+       inkscape:flatsided="false"
+       inkscape:rounded="0.22"
+       inkscape:randomized="0"
+       d="M 1337.846,266.66192 C 1289.0223,354.43494 991.22247,357.65394 890.79693,359.25785 C 790.37139,360.86176 492.62068,367.15437 441.01889,280.98526 C 389.41709,194.81614 535.52926,-64.695565 584.35301,-152.46859 C 633.17675,-240.24161 776.60255,-501.2476 877.02809,-502.8515 C 977.45363,-504.45541 1129.1413,-248.1627 1180.7431,-161.99359 C 1232.3449,-75.824474 1386.6698,178.8889 1337.846,266.66192 z "
+       transform="matrix(0.306393,0,0,0.318225,624.2074,203.0224)" />
+    <path
+       style="fill:none;fill-opacity:0.75;fill-rule:evenodd;stroke:#ffffff;stroke-width:40;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       d="M 113.97056,196.98066 L 168.17157,236.85786 L 248.34315,104.65685"
+       id="path2270"
+       sodipodi:nodetypes="ccc" />
+    <path
+       style="fill:none;fill-opacity:0.75;fill-rule:evenodd;stroke:#ffffff;stroke-width:40;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       d="M 1177.1421,96.07107 L 1263.1421,190 L 1317.1217,162.2807"
+       id="path2272"
+       sodipodi:nodetypes="ccc" />
+    <rect
+       style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2280"
+       width="29.446899"
+       height="21"
+       x="1140.5531"
+       y="170"
+       ry="0" />
+    <rect
+       style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2326"
+       width="29.446899"
+       height="21"
+       x="1351.5531"
+       y="169.77354"
+       ry="0" />
+    <rect
+       style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2328"
+       width="29.446899"
+       height="21"
+       x="60.260208"
+       y="-1271.2715"
+       ry="0"
+       transform="matrix(0,1,-1,0,0,0)" />
+    <rect
+       style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2330"
+       width="29.446899"
+       height="21"
+       x="270"
+       y="-1270.5857"
+       ry="0"
+       transform="matrix(0,1,-1,0,0,0)" />
+    <rect
+       style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2332"
+       width="131"
+       height="52"
+       x="100"
+       y="-920.58582"
+       ry="0"
+       transform="matrix(0,1,-1,0,0,0)" />
+    <rect
+       style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:40;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       id="rect2334"
+       width="39.027634"
+       height="52"
+       x="250"
+       y="-919.87872"
+       ry="0"
+       transform="matrix(0,1,-1,0,0,0)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#ff4343;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+       id="path2338"
+       sodipodi:cx="205"
+       sodipodi:cy="205"
+       sodipodi:rx="134.28572"
+       sodipodi:ry="134.28572"
+       d="M 339.28572 205 A 134.28572 134.28572 0 1 1  70.714279,205 A 134.28572 134.28572 0 1 1  339.28572 205 z"
+       transform="matrix(1.034976,0,0,1.034976,1409.347,-33.15258)" />
+    <path
+       style="fill:none;fill-opacity:0.75;fill-rule:evenodd;stroke:#ffffff;stroke-width:40;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       d="M 1552.0629,100.60072 L 1692.5138,254.57128"
+       id="path2348"
+       sodipodi:nodetypes="cc" />
+    <path
+       style="fill:none;fill-opacity:0.75;fill-rule:evenodd;stroke:#ffffff;stroke-width:40;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+       d="M 1692.5138,100.60072 L 1552.0629,254.57128"
+       id="path2357"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#838383;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
+       id="path2359"
+       sodipodi:cx="205"
+       sodipodi:cy="205"
+       sodipodi:rx="134.28572"
+       sodipodi:ry="134.28572"
+       d="M 339.28572 205 A 134.28572 134.28572 0 1 1  70.714279,205 A 134.28572 134.28572 0 1 1  339.28572 205 z"
+       transform="matrix(1.034976,0,0,1.034976,1768.416,-30.36099)" />
+    <g
+       id="g2389">
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2373"
+         d="M 1930,170.49538 L 1860,155 C 1857.0248,172.3719 1855.5,189.5625 1860,206 L 1930,189.58727 C 1929.1741,183.22331 1928.9462,176.85934 1930,170.49538 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2375"
+         d="M 1941,146.46593 L 1888.1259,98.046542 C 1875.1851,112.76897 1868.0802,127.49141 1862.6259,142.21384 L 1931.4541,163 C 1933.471,155.74098 1937.1235,150.93552 1941,146.46593 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2377"
+         d="M 1963.7479,130.36947 L 1942.1673,62 C 1927,66 1912.7224,73.141087 1898,87.50001 L 1947.2139,139.91545 C 1952.1833,135.10778 1957.8405,132.36324 1963.7479,130.36947 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+    </g>
+    <g
+       id="g2394"
+       transform="matrix(0,1,-1,0,2161,-1801.748)">
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2396"
+         d="M 1930,170.49538 L 1860,155 C 1857.0248,172.3719 1855.5,189.5625 1860,206 L 1930,189.58727 C 1929.1741,183.22331 1928.9462,176.85934 1930,170.49538 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2398"
+         d="M 1941,146.46593 L 1888.1259,98.046542 C 1875.1851,112.76897 1868.0802,127.49141 1862.6259,142.21384 L 1931.4541,163 C 1933.471,155.74098 1937.1235,150.93552 1941,146.46593 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2400"
+         d="M 1963.7479,130.36947 L 1942.1673,62 C 1927,66 1912.7224,73.141087 1898,87.50001 L 1947.2139,139.91545 C 1952.1833,135.10778 1957.8405,132.36324 1963.7479,130.36947 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+    </g>
+    <g
+       id="g2402"
+       transform="matrix(-1,0,0,-1,3962.168,360.1885)">
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2404"
+         d="M 1930,170.49538 L 1860,155 C 1857.0248,172.3719 1855.5,189.5625 1860,206 L 1930,189.58727 C 1929.1741,183.22331 1928.9462,176.85934 1930,170.49538 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2406"
+         d="M 1941,146.46593 L 1888.1259,98.046542 C 1875.1851,112.76897 1868.0802,127.49141 1862.6259,142.21384 L 1931.4541,163 C 1933.471,155.74098 1937.1235,150.93552 1941,146.46593 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2408"
+         d="M 1963.7479,130.36947 L 1942.1673,62 C 1927,66 1912.7224,73.141087 1898,87.50001 L 1947.2139,139.91545 C 1952.1833,135.10778 1957.8405,132.36324 1963.7479,130.36947 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+    </g>
+    <g
+       id="g2410"
+       transform="matrix(0,-1,1,0,1800.429,2161.522)">
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2412"
+         d="M 1930,170.49538 L 1860,155 C 1857.0248,172.3719 1855.5,189.5625 1860,206 L 1930,189.58727 C 1929.1741,183.22331 1928.9462,176.85934 1930,170.49538 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2414"
+         d="M 1941,146.46593 L 1888.1259,98.046542 C 1875.1851,112.76897 1868.0802,127.49141 1862.6259,142.21384 L 1931.4541,163 C 1933.471,155.74098 1937.1235,150.93552 1941,146.46593 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="ccccc"
+         id="path2416"
+         d="M 1963.7479,130.36947 L 1942.1673,62 C 1927,66 1912.7224,73.141087 1898,87.50001 L 1947.2139,139.91545 C 1952.1833,135.10778 1957.8405,132.36324 1963.7479,130.36947 z "
+         style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+    </g>
+  </g>
+</svg>
diff --git a/client/galaxy/style/source_material/galaxy_spinner.fla b/client/galaxy/style/source_material/galaxy_spinner.fla
new file mode 100644
index 0000000..e984be2
Binary files /dev/null and b/client/galaxy/style/source_material/galaxy_spinner.fla differ
diff --git a/client/grunt-tasks/install-libs.js b/client/grunt-tasks/install-libs.js
new file mode 100644
index 0000000..b5c671f
--- /dev/null
+++ b/client/grunt-tasks/install-libs.js
@@ -0,0 +1,93 @@
+/**
+ * Grunt task to fetch third-party libraries using bower and copy them to the proper location in client/.
+ * @param  {Object} grunt main grunt file
+ * @return {Function} callback to build this task
+ */
+module.exports = function( grunt ){
+    "use strict";
+
+    var dev_path = './galaxy/scripts',
+
+        // where to move fetched bower components into the build structure (libName: [ bower-location, libs-location ])
+        libraryLocations = {
+            'jquery':         [ 'dist/jquery.js', 'jquery/jquery.js' ],
+            'jquery-migrate': [ 'jquery-migrate.js', 'jquery/jquery.migrate.js' ],
+            'ravenjs':        [ 'dist/raven.js', 'raven.js' ],
+            'underscore':     [ 'underscore.js', 'underscore.js' ],
+            'backbone':       [ 'backbone.js', 'backbone.js' ],
+            'requirejs':      [ 'require.js', 'require.js' ],
+            'd3':             [ 'd3.js', 'd3.js' ],
+            'bib2json':       [ 'Parser.js', 'bibtex.js' ],
+
+            'farbtastic':     [ 'src/farbtastic.js', 'farbtastic.js' ],
+            'jQTouch':        [ 'src/reference/jqtouch.js', 'jquery/jqtouch.js' ],
+            'bootstrap-tour': [ 'build/js/bootstrap-tour.js', 'bootstrap-tour.js' ],
+            'jquery.complexify':     [ 'jquery.complexify.js', 'jquery.complexify.js' ],
+
+            // these need to be updated and tested
+            //'jquery-form': [ 'jquery.form.js', 'jquery/jquery.form.js' ],
+            //'jquery-autocomplete': [ 'src/jquery.autocomplete.js', 'jquery/jquery.autocomplete.js' ],
+            //'select2': [ 'select2.js', 'jquery/select2.js' ],
+            //'jStorage': [ 'jstorage.js', 'jquery/jstorage.js' ],
+            //'jquery.cookie': [ '', 'jquery/jquery.cookie.js' ],
+            //'dynatree': [ 'dist/jquery.dynatree.js', 'jquery/jquery.dynatree.js' ],
+            //'jquery-mousewheel': [ 'jquery.mousewheel.js', 'jquery/jquery.mousewheel.js' ],
+            //'jquery.event.drag-drop': [
+            //  [ 'event.drag/jquery.event.drag.js', 'jquery/jquery.event.drag.js' ],
+            //  [ 'event.drag/jquery.event.drop.js', 'jquery/jquery.event.drop.js' ]
+            //],
+
+            // these are complicated by additional css/less
+            //'toastr': [ 'toastr.js', 'toastr.js' ],
+            //'wymeditor': [ 'dist/wymeditor/jquery.wymeditor.js', 'jquery/jquery.wymeditor.js' ],
+            //'jstree': [ 'jstree.js', 'jquery/jstree.js' ],
+
+            // these have been customized by Galaxy
+            //'bootstrap': [ 'dist/js/bootstrap.js', 'bootstrap.js' ],
+            //'jquery-ui': [
+            //  // multiple components now
+            //  [ '', 'jquery/jquery-ui.js' ]
+            //],
+
+        };
+
+    // call bower to install libraries and other external resources
+    grunt.config( 'bower-install-simple', {
+        options: {
+            color: true
+        },
+        "prod": {
+            options: {
+                production: true
+            }
+        },
+        "dev": {
+            options: {
+                production: false
+            }
+        }
+    });
+
+    /** copy external libraries from bower components to scripts/libs */
+    function copyLibs(){
+        var lib_path = dev_path + '/libs/';
+        for( var libName in libraryLocations ){
+            if( libraryLocations.hasOwnProperty( libName ) ){
+
+                var bower_dir = 'bower_components',
+                    location = libraryLocations[ libName ],
+                    source = [ bower_dir, libName, location[0] ].join( '/' ),
+                    destination =  lib_path + location[1];
+
+                grunt.log.writeln( source + ' -> ' + destination );
+                grunt.file.copy( source, destination );
+            }
+        }
+    }
+
+    grunt.loadNpmTasks( 'grunt-bower-install-simple' );
+
+    grunt.registerTask( 'copy-libs', 'copy external libraries to src', copyLibs );
+    grunt.registerTask( 'install-libs', 'fetch external libraries and copy to src',
+                      [ 'bower-install-simple:prod', 'copy-libs' ] );
+};
diff --git a/client/grunt-tasks/scripts.js b/client/grunt-tasks/scripts.js
new file mode 100644
index 0000000..6bfdd71
--- /dev/null
+++ b/client/grunt-tasks/scripts.js
@@ -0,0 +1,123 @@
+/**
+ * Grunt task to compress/bundle static/scripts.
+ * @param  {Object} grunt main grunt file
+ * @return {Function} callback to build this task
+ */
+module.exports = function( grunt ){
+    "use strict";
+
+    var app = grunt.config.get( 'app' ),
+        paths = grunt.config.get( 'paths' ),
+        // uglify settings used when scripts are decompressed/not-minified
+        decompressedSettings = {
+            mangle   : false,
+            beautify : true,
+            compress : {
+                drop_debugger : false
+            }
+        },
+        // uglify settings used when scripts are compressed/minified
+        compressedSettings = {
+            mangle : {
+                screw_ie8 : true
+            },
+            compress : {
+                // high compression options
+                screw_ie8 : true,
+                sequences: true,
+                dead_code : true,
+                drop_debugger : true,
+                comparisons : true,
+                conditionals : true,
+                evaluate : true,
+                booleans : true,
+                loops : true,
+                unused : true,
+                hoist_funs : true,
+                if_return : true,
+                join_vars : true,
+                cascade : true,
+            }
+        };
+
+    grunt.config( 'uglify', {
+        target : {
+            files: [{
+                expand : true,
+                cwd : paths.srcSymlink,
+                // NOTE: do not use uglify in the apps dir (webpack will do that section)
+                src : [ '**/*.js', '!apps/**/*.js' ],
+                // src : '**/*.js',
+                dest : paths.dist
+            }],
+        }
+    });
+
+
+    if (grunt.option( 'develop' )){
+        grunt.config( 'uglify.options', decompressedSettings );
+    } else {
+        grunt.config( 'uglify.target.options', {
+            sourceMap : true,
+            sourceMapName : function( path ){
+                // rewrite name to have all source maps in 'static/maps'
+                return path.replace( paths.dist, paths.maps ) + '.map';
+            }
+        });
+        grunt.config( 'uglify.options', compressedSettings );
+    }
+
+    // -------------------------------------------------------------------------- decompress for easier debugging
+    grunt.registerTask( 'decompress', function(){
+        grunt.log.writeln( "decompressing... (don't forget to call 'grunt' again before committing)" );
+        grunt.config( 'uglify.options', decompressedSettings );
+        grunt.config( 'uglify.target.options', {});
+        grunt.task.run( 'uglify' );
+    });
+    // alias for symmetry
+    grunt.registerTask( 'compress', [ 'uglify' ] );
+
+    // -------------------------------------------------------------------------- watch & copy,pack only those changed
+    // use 'grunt watch' (from a new tab in your terminal) to have grunt re-copy changed files automatically
+    grunt.config( 'watch', {
+        watch: {
+            // watch for changes in the src dir
+            // NOTE: but not in the apps dir (which is only used by webpack)
+            files: [ paths.srcSymlink + '/**', '!' + paths.srcSymlink + '/apps/**' ],
+            tasks: [ 'uglify' ],
+            options: {
+                spawn: false
+            }
+        }
+    });
+
+
+    // outer scope variable for the event handler and onChange fn - begin with empty hash
+    var changedFiles = Object.create(null);
+
+    // when files are changed, set the copy src and packScripts target to the filenames of the updated files
+    var onChange = grunt.util._.debounce(function() {
+        grunt.log.writeln( 'onChange, changedFiles:', Object.keys( changedFiles ) );
+        grunt.config( 'uglify.target.files', [{
+            expand : true,
+            cwd : paths.srcSymlink,
+            src : Object.keys( changedFiles ),
+            dest : paths.dist
+        }]);
+        changedFiles = Object.create(null);
+    }, 200);
+
+    var addChangedFile = function( action, filepath ) {
+        // store each filepath in a Files obj, the debounced fn above will use it as an aggregate list for copying
+        // we need to take galaxy/scripts out of the filepath or it will be copied to the wrong loc
+        filepath = filepath.replace( paths.srcSymlink + '/', '' );
+        // grunt.log.writeln( 'on.watch, filepath:', filepath );
+        changedFiles[ filepath ] = action;
+        onChange();
+    };
+    grunt.event.on( 'watch', addChangedFile );
+
+    // --------------------------------------------------------------------------
+    grunt.loadNpmTasks( 'grunt-contrib-watch' );
+    grunt.loadNpmTasks( 'grunt-contrib-uglify' );
+};
diff --git a/client/grunt-tasks/style.js b/client/grunt-tasks/style.js
new file mode 100644
index 0000000..1ae7b3a
--- /dev/null
+++ b/client/grunt-tasks/style.js
@@ -0,0 +1,109 @@
+/**
+ * Grunt task to build static/style resources (css, sprites)
+ * @param  {Object} grunt main grunt file
+ * @return {Function} callback to build this task
+ */
+module.exports = function( grunt ){
+    "use strict";
+
+    var _ = grunt.util._,
+        fmt = _.sprintf,
+    	theme = grunt.option( 'theme', 'blue' ),
+        styleDistPath = '../static/style/blue',
+        imagesPath = '../static/images',
+        lessPath = './galaxy/style/less',
+        lessFiles = [
+            'base',
+            'autocomplete_tagging',
+            'embed_item',
+            'iphone',
+            'library',
+            'trackster',
+            'circster',
+            'reports'
+        ];
+
+
+    // Create sprite images and .less files
+    grunt.config( 'sprite', {
+        options: {
+            algorithm: 'binary-tree'
+        },
+        'history-buttons': {
+            src: fmt( '%s/history-buttons/*.png', imagesPath ),
+            dest: fmt( '%s/sprite-history-buttons.png', styleDistPath ),
+            imgPath: fmt( 'sprite-history-buttons.png' ),
+            destCss: fmt( '%s/sprite-history-buttons.less', lessPath )
+        },
+        'history-states': {
+            src: fmt( '%s/history-states/*.png', imagesPath ),
+            dest: fmt( '%s/sprite-history-states.png', styleDistPath ),
+            imgPath: fmt( 'sprite-history-states.png' ),
+            destCss: fmt( '%s/sprite-history-states.less', lessPath )
+        },
+        'fugue': {
+            src: fmt( '%s/fugue/*.png', imagesPath ),
+            dest: fmt( '%s/sprite-fugue.png', styleDistPath ),
+            imgPath: fmt( 'sprite-fugue.png' ),
+            destCss: fmt( '%s/sprite-fugue.less', lessPath )
+        }
+    });
+
+    // Compile less files
+    grunt.config( 'less', {
+        options: {
+            compress: true,
+            paths: [ lessPath ],
+            strictImports: true
+        },
+        dist: {
+            files: _.reduce( lessFiles, function( d, s ) {
+                var output = fmt( '%s/%s.css', styleDistPath, s ),
+                    input = fmt( '%s/%s.less', lessPath, s );
+                d[ output ] = [ input ]; return d;
+            }, {} )
+        }
+    });
+
+    // remove tmp files
+    grunt.config( 'clean', {
+        options : {
+            force: true
+        },
+        clean : [
+            fmt( '%s/tmp-site-config.less', lessPath )
+        ]
+    });
+
+
+    // -------------------------------------------------------------------------- watch & rebuild less files
+    // use 'grunt watch-style' (from a new tab in your terminal) to have grunt re-copy changed files automatically
+    //
+    // the conditional prevents reconfiguration of 'normal' (.js) grunt watch from grunt-tasks/scripts.js
+
+    if (this.cli.tasks.indexOf("watch-style") > -1){
+        grunt.config( 'watch', {
+            watch: {
+                // watch for changes in the src dir
+                files: [ lessPath + '/**' ],
+                tasks: ['check-modules', 'sprite', 'less-site-config', 'less', 'clean'],
+                options: {
+                    spawn: false
+                }
+            }
+        });
+    }
+
+    grunt.loadNpmTasks( 'grunt-contrib-less' );
+    grunt.loadNpmTasks( 'grunt-spritesmith' );
+    grunt.loadNpmTasks( 'grunt-contrib-clean' );
+    grunt.loadNpmTasks( 'grunt-contrib-watch' );
+
+    // Write theme variable for less
+    grunt.registerTask( 'less-site-config', 'Write site configuration for less', function() {
+        grunt.file.write( fmt( '%s/tmp-site-config.less', lessPath ), fmt( "@theme-name: %s;", theme ) );
+    });
+
+    grunt.registerTask( 'watch-style', [ 'watch' ] );
+    grunt.registerTask( 'style', [  'check-modules', 'sprite', 'less-site-config', 'less', 'clean' ] );
+};
diff --git a/client/grunt-tasks/webpack.js b/client/grunt-tasks/webpack.js
new file mode 100644
index 0000000..a10d5e2
--- /dev/null
+++ b/client/grunt-tasks/webpack.js
@@ -0,0 +1,38 @@
+/**
+ * Grunt task to bundle apps using webpack (see also ../webpack.config.js).
+ */
+module.exports = function( grunt ){
+    "use strict";
+
+    var path = require( 'path' );
+
+    // there is a taskrunner specifically for this - but it's not straightforward
+    // and duplicates some of the config/code in webpack.config,
+    // https://github.com/webpack/grunt-webpack
+    // http://webpack.github.io/docs/usage-with-grunt.html
+
+    // so we'll use grunt-exec and call the webpack cli instead (until CLI and runner are unified a bit more?)
+
+    grunt.loadNpmTasks( 'grunt-exec' );
+
+    function webpackCLI(){
+        var args = Array.prototype.slice.call( arguments );
+        args.unshift( path.join( __dirname, '../node_modules/webpack/bin/webpack.js' ) );
+        return args.join( ' ' );
+    }
+    grunt.registerTask( 'webpack', function(){
+        grunt.log.writeln( 'bundling webpack apps for production...' );
+        grunt.config( 'exec.webpack-production.command', webpackCLI( '-p' ) );
+        grunt.task.run( 'exec:webpack-production' );
+    });
+    grunt.registerTask( 'webpack-dev', function(){
+        grunt.log.writeln( 'bundling webpack apps for development...' );
+        grunt.config( 'exec.webpack-dev.command', webpackCLI( '-d' ) );
+        grunt.task.run( 'exec:webpack-dev' );
+    });
+    grunt.registerTask( 'webpack-watch', function(){
+        grunt.log.writeln( 'bundling webpack apps for development and watching for changes...' );
+        grunt.config( 'exec.webpack-watch.command', webpackCLI( '-d', '--watch' ) );
+        grunt.task.run( 'exec:webpack-watch' );
+    });
+};
diff --git a/client/package.json b/client/package.json
new file mode 100644
index 0000000..e2cbf0c
--- /dev/null
+++ b/client/package.json
@@ -0,0 +1,29 @@
+{
+  "name": "galaxy-client",
+  "version": "0.1.0",
+  "description": "Galaxy client application build system",
+  "keywords": [
+    "galaxy"
+  ],
+  "repository": { "type": "git",
+                  "url": "https://github.com/galaxyproject/galaxy.git"
+  },
+  "license": "AFL-3.0",
+  "dependencies": {
+    "amdi18n-loader": "^0.2.0",
+    "grunt": "^0.4.5",
+    "grunt-bower-install-simple": "^1.1.0",
+    "grunt-check-modules": "^1.0.0",
+    "grunt-cli": "^0.1.13",
+    "grunt-contrib-clean": "^0.6.0",
+    "grunt-contrib-copy": "^0.5.0",
+    "grunt-contrib-less": "^1.1.0",
+    "grunt-contrib-uglify": "^0.8.0",
+    "grunt-contrib-watch": "^0.6.1",
+    "grunt-exec": "^0.4.6",
+    "grunt-spritesmith": "^4.7.1",
+    "i18n-webpack-plugin": "^0.2.7",
+    "webpack": "^1.10.1",
+    "webpack-dev-server": "^1.7.0"
+  }
+}
diff --git a/client/toolshed/scripts/mvc/groups/group-detail-view.js b/client/toolshed/scripts/mvc/groups/group-detail-view.js
new file mode 100644
index 0000000..c582550
--- /dev/null
+++ b/client/toolshed/scripts/mvc/groups/group-detail-view.js
@@ -0,0 +1,146 @@
+define([
+    "libs/toastr",
+    "toolshed/scripts/mvc/groups/group-model"
+    ],
+function(
+    mod_toastr,
+    mod_group_model
+    ) {
+
+// toolshed group detail view
+var GroupDetailView = Backbone.View.extend({
+    el: "#groups_element",
+    options: {},
+    app: null,
+
+    initialize : function( options ){
+        this.options = _.extend( this.options, options );
+        this.app = window.globalTS.groups;
+
+        if ( this.app.collection !== null ){
+            this.model = this.app.collection.get( this.options.group_id );
+            this.render();
+        } else {
+            this.fetchGroup();
+        }
+
+    },
+
+    fetchGroup: function( options ){
+        var that = this;
+        this.options = _.extend( this.options, options );
+        this.model = new mod_group_model.Group( { id: this.options.group_id } );
+        this.model.fetch({
+          success: function( model ){
+            console.log( 'received data: ' );
+            console.log( model );
+            that.render();
+          },
+          error: function( model, response ){
+              if ( typeof response.responseJSON !== "undefined" ){
+                mod_toastr.error( response.responseJSON.err_msg );
+              } else {
+                mod_toastr.error( 'An error ocurred.' );
+              }
+          }
+        });
+    },
+
+    render: function(){
+        var template = this.templateRow();
+        this.$el.html( template( { group: this.model } ) );
+        $( "#center [data-toggle]" ).tooltip();
+        $( "#center" ).css( 'overflow','auto' );
+    },
+
+    templateRow: function() {
+        return _.template([
+                '<div>',
+                    '<h3><%= _.escape(group.get("name")) %></h3>',
+                    '<p class="" style="color:gray;">',
+                    'A group of <%= group.get("members").length %> members with <%= group.get("repositories").length %> repositories and a total of <%= group.get("total_downloads") %> combined repository clones.</p>',
+
+                    '<h3>Members</h3>',
+                    '<table class="grid table table-condensed">',
+                        '<thead>',
+                            '<th>Name</th>',
+                            '<th>Repositories</th>',
+                            '<th>Registered</th>',
+                        '</thead>',
+                        '<tbody>',
+                            '<% _.each(group.get("members"), function(member) { %>',
+                            '<tr>',
+                                '<td>',
+                                    '<%= _.escape(member.username) %>',
+                                '</td>',
+                                '<td>',
+                                    '<a data-toggle="tooltip" data-placement="top" title="Repositories of <%= _.escape(member.username) %>" href="/repository/browse_repositories_by_user?user_id=<%= member.id %>&use_panels=true" id="<%= member.id %>"><%= member.user_repos_count %></a>',
+                                '</td>',
+                                '<td>',
+                                    '<%= member.time_created %>',
+                                '</td>',
+                            '</tr>',
+                            '<% }); %>',
+                        '</tbody>',
+                    '</table>',
+
+                    '<h3>Repositories</h3>',
+                    '<table class="grid table table-condensed">',
+                        '<thead>',
+                            '<th>Name</th>',
+                            '<th>Description</th>',
+                            '<th>Clones</th>',
+                            '<th>Owner</th>',
+                            '<th>Categories</th>',
+                            '<th>Created</th>',
+                            '<th>Updated</th>',
+                            '<th>Rating</th>',
+                            '<th>Verified</th>',
+                        '</thead>',
+                        '<tbody>',
+                            '<% _.each(group.get("repositories"), function(repo) { %>',
+                                '<tr>',
+                                    '<td>',
+                                        '<a data-toggle="tooltip" data-placement="top" title="Details of <%= _.escape(repo.name) %>" href="/view/<%= _.escape(repo.owner) %>/<%= _escape(repo.name) %>" id="<%= repo.id %>"><%= _.escape(repo.name) %></a>',
+                                    '</td>',
+                                    '<td>',
+                                        '<%= _.escape(repo.description) %>',
+                                    '</td>',
+                                    '<td>',
+                                        '<%= repo.times_downloaded %>',
+                                    '</td>',
+                                    '<td>',
+                                        '<%= _.escape(repo.owner) %>',
+                                    '</td>',
+                                    '<td>',
+                                        '<% _.each((repo.categories), function(cat) { %>',
+                                            '<a data-toggle="tooltip" data-placement="top" title="Repositories in <%= cat.name %>" href="/repository/browse_repositories_in_category?id=<%= cat.id %>&use_panels=true"><%= cat.name %></a><br/>',
+                                        // '<%= repo.categories %>',
+                                        '<% }); %>',
+                                    '</td>',
+                                    '<td data-toggle="tooltip" data-placement="top" title="<%= repo.time_created_full %>">',
+                                        '<%= repo.time_created %>',
+                                    '</td>',
+                                    '<td data-toggle="tooltip" data-placement="top" title="<%= repo.time_updated_full %>">',
+                                        '<%= repo.time_updated %>',
+                                    '</td>',
+                                    '<td>',
+                                        '<%= repo.ratings_mean %>',
+                                    '</td>',
+                                    '<td>',
+                                        '<%= repo.approved %>',
+                                    '</td>',
+                                '</tr>',
+                            '<% }); %>',
+                        '</tbody>',
+                    '</table>',
+                '</div>'
+        ].join(''));
+  }
+});
+return {
+    GroupDetailView: GroupDetailView
+};
+
+});
+
diff --git a/client/toolshed/scripts/mvc/groups/group-list-view.js b/client/toolshed/scripts/mvc/groups/group-list-view.js
new file mode 100644
index 0000000..75fffd3
--- /dev/null
+++ b/client/toolshed/scripts/mvc/groups/group-list-view.js
@@ -0,0 +1,148 @@
+define([
+    "libs/toastr",
+    "toolshed/scripts/mvc/groups/group-model",
+    "toolshed/scripts/mvc/groups/group-listrow-view"
+
+], function(
+    mod_toastr,
+    mod_group_model,
+    mod_group_row
+){
+
+var GroupListView = Backbone.View.extend({
+    el: '#groups_element',
+    defaults: {},
+
+    /**
+     * Initialize and fetch the groups from server. 
+     * Async render afterwards.
+     * @param  {object} options an object with options
+     */
+    initialize : function( options ){
+        this.options = _.defaults( this.options || {}, this.defaults, options );
+        var that = this;
+        window.globalTS.groups.collection = new mod_group_model.Groups();
+        window.globalTS.groups.collection.fetch({
+          success: function( model ){
+            console.log( 'received data: ' );
+            console.log( model );
+            that.render();
+          },
+          error: function( model, response ){
+              if ( typeof response.responseJSON !== "undefined" ){
+                mod_toastr.error( response.responseJSON.err_msg );
+              } else {
+                mod_toastr.error( 'An error ocurred.' );
+              }
+          }
+        });
+        
+    },
+
+    fetch: function(){
+    },
+
+    /** 
+     * Render the groups table from the object's own collection. 
+     */
+    render: function ( options ) {
+        this.options = _.extend( this.options, options );
+        $( ".tooltip" ).hide();
+        var template = this.templateGroupsList();
+        this.$el.html( template( { length: window.globalTS.groups.collection.models.length } ) );
+        this.renderRows( window.globalTS.groups.collection.models );
+        $( "#center [data-toggle]" ).tooltip();
+        $( "#center" ).css( 'overflow','auto' );
+    },
+
+    /** 
+     * Render all given models as rows in the groups list
+     * @param  {array} groups_to_render array of group models to render
+     */
+    renderRows: function( groups_to_render ){
+        for ( var i = 0; i < groups_to_render.length; i++ ) {
+            var group = groups_to_render[i];
+            this.renderOne( { group: group } )
+        }
+    },
+
+    /**
+     * Create a view for the given model and add it to the groups view.
+     * @param {Group} model of the view that will be rendered
+     */
+    renderOne: function( options ){
+        var rowView = new mod_group_row.GroupListRowView( options );
+        this.$el.find( '#group_list_body' ).append( rowView.el );
+    },
+
+    /**
+     * Table heading was clicked, update sorting preferences and re-render.
+     * @return {[type]} [description]
+     */
+    // sort_clicked : function(){
+    //     if (Galaxy.libraries.preferences.get('sort_order') === 'asc'){
+    //         Galaxy.libraries.preferences.set({'sort_order': 'desc'});
+    //     } else {
+    //         Galaxy.libraries.preferences.set({'sort_order': 'asc'});
+    //     }
+    //     this.render();
+    // },
+
+    /**
+     * Sort the underlying collection according to the parameters received.
+     * Currently supports only sorting by name. 
+     */
+    // sortLibraries: function(){
+    //     if (Galaxy.libraries.preferences.get('sort_by') === 'name'){
+    //         if (Galaxy.libraries.preferences.get('sort_order') === 'asc'){
+    //             this.collection.sortByNameAsc();
+    //         } else if (Galaxy.libraries.preferences.get('sort_order') === 'desc'){
+    //             this.collection.sortByNameDesc();
+    //         }
+    //     }
+    // },
+
+    // redirectToHome: function(){
+    //     window.location = '../';
+    // },
+    // redirectToLogin: function(){
+    //     window.location = '/user/login';
+    // },
+
+// MMMMMMMMMMMMMMMMMM
+// === TEMPLATES ====
+// MMMMMMMMMMMMMMMMMM
+
+    templateGroupsList: function(){
+        tmpl_array = [];
+
+        tmpl_array.push('<div id="groups">');
+        tmpl_array.push('</div>');
+        tmpl_array.push('<div class="groups_container table-responsive">');
+        tmpl_array.push('<% if(length === 0) { %>');
+        tmpl_array.push('<div>There are no groups yet.</div>');
+        tmpl_array.push('<% } else{ %>');
+        tmpl_array.push('<table class="grid table table-condensed">');
+        tmpl_array.push('   <thead>');
+        tmpl_array.push('     <th>Name</th>');
+        // tmpl_array.push('     <th style="width:22%;">description</th>');
+        tmpl_array.push('     <th>Members</th> ');
+        tmpl_array.push('     <th>Repositories</th>');
+        tmpl_array.push('   </thead>');
+        tmpl_array.push('   <tbody id="group_list_body">');
+        // group item views will attach here
+        tmpl_array.push('   </tbody>');
+        tmpl_array.push('</table>');
+        tmpl_array.push('<% }%>');
+        tmpl_array.push('</div>');
+
+        return _.template(tmpl_array.join(''));
+    },
+
+});
+
+return {
+    GroupListView: GroupListView
+};
+
+});
diff --git a/client/toolshed/scripts/mvc/groups/group-listrow-view.js b/client/toolshed/scripts/mvc/groups/group-listrow-view.js
new file mode 100644
index 0000000..7ffcf7c
--- /dev/null
+++ b/client/toolshed/scripts/mvc/groups/group-listrow-view.js
@@ -0,0 +1,36 @@
+define([],
+function() {
+
+// toolshed group row view
+var GroupListRowView = Backbone.View.extend({
+    events: {},
+
+    initialize : function( options ){
+        this.render( options.group );
+    },
+
+    render: function( group ){
+        var tmpl = this.templateRow();
+        this.setElement(tmpl( { group:group } ));
+        this.$el.show();
+        return this;
+    },
+
+    templateRow: function() {
+        return _.template([
+                '<tr class="" data-id="<%- group.get("id") %>">',
+                '<td><a href="groups#/<%= group.get("id") %>"><%= _.escape(group.get("name")) %></a></td>',
+                // '<td>description</td>',
+                '<td><%= group.get("total_members") %></td>',
+                '<td><%= group.get("total_repos") %></td>',
+                '</tr>'
+        ].join(''));
+  }
+   
+});
+
+return {
+    GroupListRowView: GroupListRowView
+};
+
+});
diff --git a/client/toolshed/scripts/mvc/groups/group-model.js b/client/toolshed/scripts/mvc/groups/group-model.js
new file mode 100644
index 0000000..30b4663
--- /dev/null
+++ b/client/toolshed/scripts/mvc/groups/group-model.js
@@ -0,0 +1,22 @@
+define([], function() {
+
+// ============================================================================
+// TS GROUP RELATED MODELS
+
+    var Group = Backbone.Model.extend({
+      urlRoot: '/api/groups',
+    });
+
+    var Groups = Backbone.Collection.extend({
+      url: '/api/groups',
+
+      model: Group,
+
+    });
+
+return {
+    Group: Group,
+    Groups: Groups
+};
+
+});
diff --git a/client/toolshed/scripts/toolshed.groups.js b/client/toolshed/scripts/toolshed.groups.js
new file mode 100644
index 0000000..d4843e4
--- /dev/null
+++ b/client/toolshed/scripts/toolshed.groups.js
@@ -0,0 +1,51 @@
+// MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
+// === MAIN TOOLSHED GROUP MODULE ====
+// MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
+
+define([
+        'toolshed/scripts/mvc/groups/group-list-view',
+        'toolshed/scripts/mvc/groups/group-detail-view',
+        'toolshed/scripts/mvc/groups/group-model'
+    ],
+    function(
+        mod_group_list,
+        mod_group_detail,
+        mod_group_model
+    ) {
+
+// ============================================================================
+// ROUTER
+var ToolshedRouter = Backbone.Router.extend({
+    routes: {
+        ""                        : "groups",
+        ":group_id"               : "group_page"
+    }
+
+});
+
+var ToolshedGroups = Backbone.View.extend({
+    groupListView: null,
+    groupDetailView: null,
+    collection: null,
+
+    initialize : function(){
+        window.globalTS.groups = this;
+
+        this.ts_router = new ToolshedRouter();
+        this.ts_router.on( 'route:groups', function() {
+            window.globalTS.groups.groupListView = new mod_group_list.GroupListView();
+        });
+
+        this.ts_router.on( 'route:group_page', function( group_id ) {
+            window.globalTS.groups.groupDetailView = new mod_group_detail.GroupDetailView( { group_id: group_id } );
+        });
+        
+        Backbone.history.start({pushState: false});
+    }
+});
+
+return {
+    ToolshedGroups: ToolshedGroups
+};
+
+});
diff --git a/client/webpack.config.js b/client/webpack.config.js
new file mode 100644
index 0000000..edf2855
--- /dev/null
+++ b/client/webpack.config.js
@@ -0,0 +1,79 @@
+var webpack = require( 'webpack' ),
+    // paths
+    path = require( 'path' ),
+    scriptsBase = path.join( __dirname, 'galaxy/scripts' ),
+    libsBase = path.join( scriptsBase, 'libs' ),
+
+    // libraries used on almost every page
+    // TODO: reduce
+    commonLibs = [
+        'polyfills',
+        // jquery et al
+        'jquery',
+        'libs/jquery/jquery.migrate',
+        // jquery plugins
+        'libs/jquery/select2',
+        'libs/jquery/jquery.event.hover',
+        'libs/jquery/jquery.form',
+        'libs/jquery/jquery.rating',
+        'libs/jquery.sparklines',
+        'libs/jquery/jquery-ui',
+        'libs/bootstrap',
+        'libs/bootstrap-tour',
+        'libs/jquery.complexify',
+        // mvc
+        'libs/underscore',
+        'libs/backbone',
+        // all pages get these
+        'ui/autocom_tagging',
+        'layout/modal',
+        'layout/panel',
+        'onload',
+    ];
+
+
+module.exports = {
+    devtool : 'source-map',
+    entry   : {
+        libs    : commonLibs,
+        login   : './galaxy/scripts/apps/login.js',
+        analysis: './galaxy/scripts/apps/analysis.js',
+    },
+    output  : {
+        path        : '../static/scripts/bundled',
+        filename    : '[name].bundled.js'
+    },
+    resolve : {
+        root  : scriptsBase,
+        alias : {
+            //TODO: correct our imports and remove these rules
+            // Backbone looks for these in the same root directory
+            jquery      : path.join( libsBase, 'jquery/jquery' ),
+            underscore  : path.join( libsBase, 'underscore.js' ),
+        }
+    },
+    module : {
+        loaders : [
+        ],
+    },
+    resolveLoader : {
+        alias : {
+            // since we support both requirejs i18n and non-requirejs and both use a similar syntax,
+            // use an alias so we can just use one file
+            i18n : 'amdi18n'
+        }
+    },
+    plugins : [
+        new webpack.optimize.CommonsChunkPlugin( 'libs', 'libs.bundled.js' ),
+        // this plugin allows using the following keys/globals in scripts (w/o req'ing them first)
+        // and webpack will automagically require them in the bundle for you
+        new webpack.ProvidePlugin({
+            $:                  'jquery',
+            jQuery:             'jquery',
+            'window.jQuery':    'jquery',
+            _:                  "underscore",
+            Backbone:           'libs/backbone',
+        }),
+        // new webpack.optimize.LimitChunkCountPlugin({ maxChunks: 1 })
+    ],
+};
diff --git a/config/auth_conf.xml.sample b/config/auth_conf.xml.sample
new file mode 100644
index 0000000..70d7db0
--- /dev/null
+++ b/config/auth_conf.xml.sample
@@ -0,0 +1,110 @@
+<?xml version="1.0"?>
+<auth>
+<!--<authenticator>
+        <type>ldap</type>
+-->
+        <!-- Replacement fields: instances of {email}, {username} and {password}
+             are replaced with the corresponding user's values inside the
+             <filter>, <server>, <ldap-options>, <search-fields>,
+             <search-filter>, <search-base>, <search-user> and <search-password>
+             elements. -->
+        <!-- Filter users for which this authenticator applies. This is a Python
+             expression which is evaluated after field replacement. -->
+<!--    <filter>'{email}'.endswith('@example.com')</filter>
+        <options>
+-->
+            <!-- Whether to allow user registration. Possible values are True,
+                 False and Challenge (i.e. allow registration in case of
+                 successful authentication). Default is True. -->
+<!--        <allow-register>False</allow-register>
+-->
+            <!-- Whether Galaxy should automatically register users when they
+                 first login. Default is False. -->
+<!--        <auto-register>True</auto-register>
+-->
+            <!-- Whether users are allowed to change their password. Default is
+                 False. -->
+<!--        <allow-password-change>False</allow-password-change>
+-->
+
+            <!-- LDAP-specific options -->
+<!--        <server>ldap://dc1.example.com</server>
+-->
+            <!-- Additional options for the LDAP connection. The syntax is:
+                 option1=value1,option2=value2,...
+                 Options and values should match those from the python-ldap
+                 documentation.
+                 The following example allows connecting to ldaps:// (SSL/TLS)
+                 when self-signed certificates are used -->
+<!--        <ldap-options>OPT_X_TLS_REQUIRE_CERT=OPT_X_TLS_ALLOW</ldap-options>
+-->
+            <!-- Whether unregistered users should use their LDAP username
+                 instead of the email at their first login when auto-register is
+                 True. Default is False. -->
+<!--        <login-use-username>False</login-use-username>
+-->
+            <!-- Whether to continue with the following authenticators if LDAP
+                 fails. Default is False. -->
+<!--        <continue-on-failure>False</continue-on-failure>
+-->
+            <!-- If search-fields is not specified, all other search-* elements
+                 are ignored.
+                 If search-user is not specified, Galaxy will bind anonymously
+                 to the LDAP server for search. -->
+            <!-- For Active Directory: -->
+<!--        <search-fields>sAMAccountName,mail</search-fields>
+            <search-base>dc=dc1,dc=example,dc=com</search-base>
+-->
+            <!-- If login-use-username is False -->
+<!--        <search-filter>(&(objectClass=user)(mail={email}))</search-filter>
+-->
+            <!-- If login-use-username is True -->
+<!--        <search-filter>(&(objectClass=user)(sAMAccountName={username}))</search-filter>
+-->
+<!--        <search-user>jsmith at dc1.example.com</search-user>
+            <search-password>mysecret</search-password>
+-->
+            <!-- For OpenLDAP: -->
+<!--        <search-fields>uid,mail</search-fields>
+            <search-base>ou=People,dc=example,dc=com</search-base>
+-->
+            <!-- If login-use-username is False -->
+<!--        <search-filter>(mail={email})</search-filter>
+-->
+            <!-- If login-use-username is True -->
+<!--        <search-filter>(uid={username})</search-filter>
+-->
+<!--        <search-user>cn=jsmith,ou=People,dc=domain,dc=com</search-user>
+            <search-password>mysecret</search-password>
+-->
+
+            <!-- Replacement fields: instances of {email}, {username},
+                 {password}, {dn} plus all fields defined in <search-fields> are
+                 replaced with the corresponding user's values inside the
+                 <bind-user>, <bind-password>, <auto-register-username> and
+                 <auto-register-email> elements. -->
+            <!-- For Active Directory: -->
+<!--        <bind-user>{sAMAccountName}@dc1.example.com</bind-user>
+            <bind-password>{password}</bind-password>
+            <auto-register-username>{sAMAccountName}</auto-register-username>
+            <auto-register-email>{mail}</auto-register-email>
+-->
+            <!-- For OpenLDAP: -->
+<!--        <bind-user>{dn}</bind-user>
+            <bind-password>{password}</bind-password>
+            <auto-register-username>{uid}</auto-register-username>
+            <auto-register-email>{mail}</auto-register-email>
+-->
+<!--    </options>
+    </authenticator>
+-->
+
+    <authenticator>
+        <type>localdb</type>
+        <options>
+            <!-- Whether users are allowed to change their password. Default is
+                 False. -->
+            <allow-password-change>true</allow-password-change>
+        </options>
+    </authenticator>
+</auth>
diff --git a/config/data_manager_conf.xml.sample b/config/data_manager_conf.xml.sample
new file mode 100644
index 0000000..04b4637
--- /dev/null
+++ b/config/data_manager_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<data_managers>
+</data_managers>
diff --git a/config/datatypes_conf.xml.sample b/config/datatypes_conf.xml.sample
new file mode 100644
index 0000000..2676db8
--- /dev/null
+++ b/config/datatypes_conf.xml.sample
@@ -0,0 +1,702 @@
+<?xml version="1.0"?>
+<datatypes>
+  <registration converters_path="lib/galaxy/datatypes/converters" display_path="display_applications">
+    <datatype extension="ab1" type="galaxy.datatypes.binary:Ab1" mimetype="application/octet-stream" display_in_upload="true" description="A binary sequence file in 'ab1' format with a '.ab1' file extension.  You must manually select this 'File Format' when uploading the file." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Ab1"/>
+    <datatype extension="afg" type="galaxy.datatypes.assembly:Amos" display_in_upload="false" />
+    <datatype extension="arff" type="galaxy.datatypes.text:Arff" mimetype="text/plain" display_in_upload="True" />
+    <datatype extension="asn1" type="galaxy.datatypes.data:GenericAsn1" mimetype="text/plain" display_in_upload="true" />
+    <datatype extension="asn1-binary" type="galaxy.datatypes.binary:GenericAsn1Binary" mimetype="application/octet-stream" display_in_upload="true" />
+    <datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true" description="blastz pairwise alignment format.  Each alignment block in an axt file contains three lines: a summary line and 2 sequence lines.  Blocks are separated from one another by blank lines.  The summary line contains chromosomal position and size information about the alignment. It consists of 9 required fields." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Axt"/>
+    <datatype extension="fli" type="galaxy.datatypes.tabular:FeatureLocationIndex" display_in_upload="false"/>
+    <datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream" display_in_upload="true" description="A binary file compressed in the BGZF format with a '.bam' file extension." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#BAM">
+      <converter file="bam_to_bai.xml" target_datatype="bai"/>
+      <converter file="bam_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <display file="ucsc/bam.xml" />
+      <display file="ensembl/ensembl_bam.xml" />
+      <display file="igv/bam.xml" />
+      <display file="igb/bam.xml" />
+      <display file="iobio/bam.xml" />
+    </datatype>
+    <datatype extension="cram" type="galaxy.datatypes.binary:CRAM" mimetype="application/octet-stream" display_in_upload="true" description="CRAM is a file format for highly efficient and tunable reference-based compression of alignment data." description_url="http://www.ebi.ac.uk/ena/software/cram-usage"/>
+    <datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true" description="BED format provides a flexible way to define the data lines that are displayed in an annotation track. BED lines have three required columns and nine additional optional columns. The three required columns are chrom, chromStart and chromEnd." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Bed">
+      <converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
+      <converter file="bed_to_bgzip_converter.xml" target_datatype="bgzip"/>
+      <converter file="bed_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+      <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <converter file="bed_to_fli_converter.xml" target_datatype="fli"/>
+      <!-- <display file="ucsc/interval_as_bed.xml" /> -->
+      <display file="igb/bed.xml" />
+    </datatype>
+    <datatype extension="bedgraph" type="galaxy.datatypes.interval:BedGraph" display_in_upload="true">
+      <converter file="bedgraph_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <display file="igb/bedgraph.xml" />
+    </datatype>
+    <datatype extension="bedstrict" type="galaxy.datatypes.interval:BedStrict" display_in_upload="true"/>
+    <datatype extension="bed6" type="galaxy.datatypes.interval:Bed6" display_in_upload="true">
+    </datatype>
+    <datatype extension="bed12" type="galaxy.datatypes.interval:Bed12" display_in_upload="true"/>
+    <datatype extension="len" type="galaxy.datatypes.chrominfo:ChromInfo" display_in_upload="true">
+      <converter file="len_to_linecount.xml" target_datatype="linecount" />
+    </datatype>
+    <datatype extension="bigbed" type="galaxy.datatypes.binary:BigBed" mimetype="application/octet-stream" display_in_upload="true">
+      <display file="ucsc/bigbed.xml" />
+      <display file="igb/bb.xml" />
+    </datatype>
+    <datatype extension="bigwig" type="galaxy.datatypes.binary:BigWig" mimetype="application/octet-stream" display_in_upload="true">
+      <display file="ucsc/bigwig.xml" />
+      <display file="igb/bigwig.xml" />
+      <display file="igv/bigwig.xml" />
+    </datatype>
+    <datatype extension="cxb" type="galaxy.datatypes.binary:Binary" mimetype="application/octet-stream" subclass="True" display_in_upload="true" description="Cuffquant output format" />
+    <datatype extension="chrint" type="galaxy.datatypes.interval:ChromatinInteractions" display_in_upload="True">
+      <converter file="interval_to_bgzip_converter.xml" target_datatype="bgzip"/>
+      <converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+      <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/>
+    </datatype>
+    <datatype extension="csv" type="galaxy.datatypes.tabular:CSV" display_in_upload="true" />
+    <datatype extension="tsv" type="galaxy.datatypes.tabular:TSV" display_in_upload="true" />
+    <datatype extension="customtrack" type="galaxy.datatypes.interval:CustomTrack"/>
+    <datatype extension="bowtie_color_index" type="galaxy.datatypes.ngsindex:BowtieColorIndex" mimetype="text/html" display_in_upload="False"/>
+    <datatype extension="bowtie_base_index" type="galaxy.datatypes.ngsindex:BowtieBaseIndex" mimetype="text/html" display_in_upload="False"/>
+    <datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/>
+    <datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream" max_optional_metadata_filesize="1048576" />
+    <datatype extension="data_manager_json" type="galaxy.datatypes.text:Json" mimetype="application/json" subclass="True" display_in_upload="False"/>
+    <datatype extension="dbn" type="galaxy.datatypes.sequence:DotBracket" display_in_upload="true" description="Dot-Bracket format is a text-based format for storing both an RNA sequence and its corresponding 2D structure." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Dbn"/>
+    <datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true" description="A sequence in FASTA format consists of a single-line description, followed by lines of sequence data. The first character of the description line is a greater-than ('>') symbol in the first column. All lines should be shorter than 80 characters." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Fasta">
+      <converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/>
+      <converter file="fasta_to_bowtie_base_index_converter.xml" target_datatype="bowtie_base_index"/>
+      <converter file="fasta_to_bowtie_color_index_converter.xml" target_datatype="bowtie_color_index"/>
+      <converter file="fasta_to_2bit.xml" target_datatype="twobit"/>
+      <converter file="fasta_to_len.xml" target_datatype="len"/>
+    </datatype>
+    <datatype extension="fastq" type="galaxy.datatypes.sequence:Fastq" display_in_upload="true" description="FASTQ format is a text-based format for storing both a biological sequence (usually nucleotide sequence) and its corresponding quality scores." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Fastq">
+        <converter file="fastq_to_fqtoc.xml" target_datatype="fqtoc"/>
+    </datatype>
+    <datatype extension="fastqsanger" type="galaxy.datatypes.sequence:FastqSanger" display_in_upload="true">
+        <converter file="fastq_to_fqtoc.xml" target_datatype="fqtoc"/>
+    </datatype>
+    <datatype extension="fastqsolexa" type="galaxy.datatypes.sequence:FastqSolexa" display_in_upload="true" description="FastqSolexa is the Illumina (Solexa) variant of the Fastq format, which stores sequences and quality scores in a single file." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#FastqSolexa">
+        <converter file="fastq_to_fqtoc.xml" target_datatype="fqtoc"/>
+    </datatype>
+    <datatype extension="fastqcssanger" type="galaxy.datatypes.sequence:FastqCSSanger" display_in_upload="true">
+        <converter file="fastq_to_fqtoc.xml" target_datatype="fqtoc"/>
+    </datatype>
+    <datatype extension="fastqillumina" type="galaxy.datatypes.sequence:FastqIllumina" display_in_upload="true">
+        <converter file="fastq_to_fqtoc.xml" target_datatype="fqtoc"/>
+    </datatype>
+    <datatype extension="fqtoc" type="galaxy.datatypes.sequence:SequenceSplitLocations" display_in_upload="true"/>
+    <datatype extension="eland" type="galaxy.datatypes.tabular:Eland" display_in_upload="true"/>
+    <datatype extension="elandmulti" type="galaxy.datatypes.tabular:ElandMulti" display_in_upload="true"/>
+    <datatype extension="genetrack" type="galaxy.datatypes.tracks:GeneTrack">
+      <!-- <display file="genetrack.xml" /> -->
+    </datatype>
+    <datatype extension="gff" type="galaxy.datatypes.interval:Gff" display_in_upload="true" description="GFF lines have nine required fields that must be tab-separated." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#GFF">
+      <converter file="gff_to_bed_converter.xml" target_datatype="bed"/>
+      <converter file="gff_to_interval_index_converter.xml" target_datatype="interval_index"/>
+      <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <converter file="gff_to_fli_converter.xml" target_datatype="fli"/>
+      <display file="ensembl/ensembl_gff.xml" inherit="True"/>
+      <display file="igv/gff.xml" inherit="True"/>
+      <!-- <display file="gbrowse/gbrowse_gff.xml" inherit="True" /> -->
+    </datatype>
+    <datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true" description="The GFF3 format addresses the most common extensions to GFF, while preserving backward compatibility with previous formats." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#GFF3"/>
+    <datatype extension="gif" type="galaxy.datatypes.images:Gif" mimetype="image/gif"/>
+    <datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/>
+    <datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true">
+        <converter file="gff_to_interval_index_converter.xml" target_datatype="interval_index"/>
+        <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/>
+        <display file="igb/gtf.xml" />
+    </datatype>
+    <datatype extension="toolshed.gz" type="galaxy.datatypes.binary:Binary" mimetype="multipart/x-gzip" subclass="True" />
+    <datatype extension="h5" type="galaxy.datatypes.binary:H5" mimetype="application/octet-stream" display_in_upload="True"/>
+    <datatype extension="html" type="galaxy.datatypes.text:Html" mimetype="text/html"/>
+    <datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true" description="File must start with definition line in the following format (columns may be in any order)." >
+      <converter file="interval_to_bed_converter.xml" target_datatype="bed"/>
+      <converter file="interval_to_bedstrict_converter.xml" target_datatype="bedstrict"/>
+      <converter file="interval_to_bed6_converter.xml" target_datatype="bed6"/>
+      <converter file="interval_to_bed12_converter.xml" target_datatype="bed12"/>
+      <converter file="interval_to_bgzip_converter.xml" target_datatype="bgzip"/>
+      <converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+        <converter file="interval_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <!-- <display file="ucsc/interval_as_bed.xml" inherit="True" /> -->
+      <display file="ensembl/ensembl_interval_as_bed.xml" inherit="True"/>
+      <display file="gbrowse/gbrowse_interval_as_bed.xml" inherit="True"/>
+      <display file="rviewer/bed.xml" inherit="True"/>
+      <display file="igv/interval_as_bed.xml" inherit="True"/>
+    </datatype>
+    <datatype extension="picard_interval_list" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True">
+      <converter file="picard_interval_list_to_bed6_converter.xml" target_datatype="bed6"/>
+    </datatype>
+    <datatype extension="gatk_interval" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/>
+    <datatype extension="gatk_report" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="True"/>
+    <datatype extension="gatk_dbsnp" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True"/>
+    <datatype extension="gatk_tranche" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True"/>
+    <datatype extension="gatk_recal" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True"/>
+    <datatype extension="jpg" type="galaxy.datatypes.images:Jpg" mimetype="image/jpeg"/>
+    <datatype extension="tiff" type="galaxy.datatypes.images:Tiff" mimetype="image/tiff" display_in_upload="true"/>
+    <datatype extension="bmp" type="galaxy.datatypes.images:Bmp" mimetype="image/bmp"/>
+    <datatype extension="im" type="galaxy.datatypes.images:Im" mimetype="image/im"/>
+    <datatype extension="pcd" type="galaxy.datatypes.images:Pcd" mimetype="image/pcd"/>
+    <datatype extension="pcx" type="galaxy.datatypes.images:Pcx" mimetype="image/pcx"/>
+    <datatype extension="ppm" type="galaxy.datatypes.images:Ppm" mimetype="image/ppm"/>
+    <datatype extension="psd" type="galaxy.datatypes.images:Psd" mimetype="image/psd"/>
+    <datatype extension="xbm" type="galaxy.datatypes.images:Xbm" mimetype="image/xbm"/>
+    <datatype extension="xpm" type="galaxy.datatypes.images:Xpm" mimetype="image/xpm"/>
+    <datatype extension="rgb" type="galaxy.datatypes.images:Rgb" mimetype="image/rgb"/>
+    <datatype extension="pbm" type="galaxy.datatypes.images:Pbm" mimetype="image/pbm"/>
+    <datatype extension="pgm" type="galaxy.datatypes.images:Pgm" mimetype="image/pgm"/>
+    <datatype extension="rna_eps" type="galaxy.datatypes.sequence:RNADotPlotMatrix" mimetype="image/eps" display_in_upload="True"/>
+    <datatype extension="zip" type="galaxy.datatypes.binary:CompressedZipArchive" display_in_upload="True"/>
+    <datatype extension="tar" type="galaxy.datatypes.binary:CompressedArchive" subclass="True" display_in_upload="True"/>
+    <!-- Proteomics Datatypes -->
+    <datatype extension="pepxml" type="galaxy.datatypes.proteomics:PepXml" mimetype="application/xml" display_in_upload="True"/>
+    <datatype extension="raw_pepxml" type="galaxy.datatypes.proteomics:PepXml" mimetype="application/xml" subclass="True"/>
+    <datatype extension="peptideprophet_pepxml" type="galaxy.datatypes.proteomics:PepXml" mimetype="application/xml" subclass="True"/>
+    <datatype extension="interprophet_pepxml" type="galaxy.datatypes.proteomics:PepXml" mimetype="application/xml" subclass="True"/>
+    <datatype extension="protxml" type="galaxy.datatypes.proteomics:ProtXML" mimetype="application/xml" display_in_upload="True" />
+    <datatype extension="trafoxml" type="galaxy.datatypes.xml:GenericXml" mimetype="application/xml"
+        subclass="True" display_in_upload="True" description="Transformation of retention times"/>
+    <datatype extension="qcml" type="galaxy.datatypes.xml:GenericXml" mimetype="application/xml"
+        subclass="True" display_in_upload="True" description="Quality control data in XML format (https://code.google.com/p/qcml/)."/>
+    <datatype extension="pepxml.tsv" type="galaxy.datatypes.proteomics:PepXmlReport" display_in_upload="true" />
+    <datatype extension="protxml.tsv" type="galaxy.datatypes.proteomics:ProtXmlReport" display_in_upload="true" />
+    <datatype extension="mascotdat" type="galaxy.datatypes.proteomics:MascotDat" display_in_upload="false" />
+    <datatype extension="mzid" type="galaxy.datatypes.proteomics:MzIdentML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="idxml" type="galaxy.datatypes.proteomics:IdXML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="tandem" type="galaxy.datatypes.proteomics:TandemXML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="thermo.raw" type="galaxy.datatypes.proteomics:ThermoRAW" mimetype="application/octet-stream" display_in_upload="true" />
+    <datatype extension="mzml" type="galaxy.datatypes.proteomics:MzML" mimetype="application/xml" display_in_upload="true"/>
+    <datatype extension="mgf" type="galaxy.datatypes.proteomics:Mgf" display_in_upload="true" />
+    <datatype extension="wiff" type="galaxy.datatypes.proteomics:Wiff" display_in_upload="true" />
+    <datatype extension="mzxml" type="galaxy.datatypes.proteomics:MzXML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="ms2" type="galaxy.datatypes.proteomics:Ms2" display_in_upload="true" />
+    <datatype extension="mzq" type="galaxy.datatypes.proteomics:MzQuantML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="mz.sqlite" type="galaxy.datatypes.binary:MzSQlite" mimetype="application/octet-stream" display_in_upload="true" />
+    <datatype extension="traml" type="galaxy.datatypes.proteomics:TraML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="featurexml" type="galaxy.datatypes.proteomics:FeatureXML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="consensusxml" type="galaxy.datatypes.proteomics:ConsensusXML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="uniprotxml" type="galaxy.datatypes.proteomics:UniProtXML" mimetype="application/xml" display_in_upload="true" />
+    <datatype extension="msp" type="galaxy.datatypes.proteomics:Msp" display_in_upload="true" />
+    <datatype extension="splib_noindex" type="galaxy.datatypes.proteomics:SPLibNoIndex" display_in_upload="true" />
+    <datatype extension="splib" type="galaxy.datatypes.proteomics:SPLib" display_in_upload="true" />
+    <datatype extension="hlf" type="galaxy.datatypes.proteomics:XHunterAslFormat" mimetype="application/octet-stream" display_in_upload="true" />
+    <datatype extension="idpdb" type="galaxy.datatypes.binary:IdpDB" mimetype="application/octet-stream" display_in_upload="true" />
+    <datatype extension="sf3" type="galaxy.datatypes.proteomics:Sf3" display_in_upload="true" />
+    <datatype extension="cps" type="galaxy.datatypes.binary:Binary" subclass="True" display_in_upload="true" />
+    <datatype extension="ct" type="galaxy.datatypes.tabular:ConnectivityTable" display_in_upload="True"/>
+    <datatype extension="searchgui_archive" type="galaxy.datatypes.binary:SearchGuiArchive" display_in_upload="True"/>
+    <datatype extension="peptideshaker_archive" type="galaxy.datatypes.binary:CompressedArchive" subclass="True" display_in_upload="True"/>
+    <!-- End Proteomics Datatypes -->
+    <datatype extension="netcdf" type="galaxy.datatypes.binary:NetCDF" mimetype="application/octet-stream" display_in_upload="true" description="Format used by netCDF software library for writing and reading chromatography-MS data files." />
+    <datatype extension="eps" type="galaxy.datatypes.images:Eps" mimetype="image/eps"/>
+    <datatype extension="rast" type="galaxy.datatypes.images:Rast" mimetype="image/rast"/>
+    <datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
+    <datatype extension="lav" type="galaxy.datatypes.sequence:Lav" display_in_upload="true" description="Lav is the primary output format for BLASTZ.  The first line of a .lav file begins with #:lav.." />
+    <datatype extension="maf" type="galaxy.datatypes.sequence:Maf" display_in_upload="true" description="TBA and multiz multiple alignment format.  The first line of a .maf file begins with ##maf. This word is followed by white-space-separated 'variable=value' pairs. There should be no white space surrounding the '='." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#MAF">
+      <converter file="maf_to_fasta_converter.xml" target_datatype="fasta"/>
+      <converter file="maf_to_interval_converter.xml" target_datatype="interval"/>
+    </datatype>
+    <datatype extension="mafcustomtrack" type="galaxy.datatypes.sequence:MafCustomTrack">
+      <display file="ucsc/maf_customtrack.xml" />
+    </datatype>
+    <datatype extension="encodepeak" type="galaxy.datatypes.interval:ENCODEPeak" display_in_upload="True">
+      <converter file="encodepeak_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+      <converter file="encodepeak_to_bgzip_converter.xml" target_datatype="bgzip"/>
+      <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/>
+    </datatype>
+    <datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf" display_in_upload="true"/>
+    <datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true">
+      <converter file="interval_to_bgzip_converter.xml" target_datatype="bgzip"/>
+      <converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+    </datatype>
+    <datatype extension="obo" type="galaxy.datatypes.text:Obo" mimetype="text/html" display_in_upload="True" />
+    <datatype extension="owl" type="galaxy.datatypes.xml:Owl" mimetype="text/html" display_in_upload="True" />
+    <datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/>
+    <datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" />
+    <datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
+    <datatype extension="qualillumina" type="galaxy.datatypes.qualityscore:QualityScoreIllumina" display_in_upload="true"/>
+    <datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/>
+    <datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/>
+    <datatype extension="Roadmaps" type="galaxy.datatypes.assembly:Roadmaps" display_in_upload="false"/>
+    <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true">
+        <converter file="sam_to_bam.xml" target_datatype="bam"/>
+        <converter file="sam_to_bigwig_converter.xml" target_datatype="bigwig"/>
+    </datatype>
+    <datatype extension="scf" type="galaxy.datatypes.binary:Scf" mimetype="application/octet-stream" display_in_upload="true" description="A binary sequence file in 'scf' format with a '.scf' file extension.  You must manually select this 'File Format' when uploading the file." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Scf"/>
+    <datatype extension="Sequences" type="galaxy.datatypes.assembly:Sequences" display_in_upload="false"/>
+    <datatype extension="snpeffdb" type="galaxy.datatypes.text:SnpEffDb" display_in_upload="True"/>
+    <datatype extension="snpsiftdbnsfp" type="galaxy.datatypes.text:SnpSiftDbNSFP" display_in_upload="True"/>
+    <datatype extension="dbnsfp.tabular" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="True">
+        <converter file="tabular_to_dbnsfp.xml" target_datatype="snpsiftdbnsfp"/>
+    </datatype>
+    <datatype extension="sff" type="galaxy.datatypes.binary:Sff" mimetype="application/octet-stream" display_in_upload="true" description="A binary file in 'Standard Flowgram Format' with a '.sff' file extension." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Sff"/>
+    <datatype extension="sra" type="galaxy.datatypes.binary:Sra" mimetype="application/octet-stream" display_in_upload="true" description="A binary file archive format from the NCBI Sequence Read Archive with a '.sra' file extension." description_url="http://www.ncbi.nlm.nih.gov/books/n/helpsra/SRA_Overview_BK/#SRA_Overview_BK.4_SRA_Data_Structure"/>
+    <datatype extension="svg" type="galaxy.datatypes.xml:GenericXml" mimetype="image/svg+xml" subclass="True"/>
+    <datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/>
+    <datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true" description="Any data in tab delimited format (tabular)." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Tabular_.28tab_delimited.29"/>
+    <datatype extension="twobit" type="galaxy.datatypes.binary:TwoBit" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="sqlite" type="galaxy.datatypes.binary:SQlite" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="gemini.sqlite" type="galaxy.datatypes.binary:GeminiSQLite" mimetype="application/octet-stream" display_in_upload="True" />
+    <datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true" description="Any text file." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Plain_text"/>
+    <datatype extension="linecount" type="galaxy.datatypes.data:LineCount" display_in_upload="false"/>
+    <datatype extension="memexml" type="galaxy.datatypes.xml:MEMEXml" mimetype="application/xml" display_in_upload="true"/>
+    <datatype extension="cisml" type="galaxy.datatypes.xml:CisML" mimetype="application/xml" display_in_upload="true"/>
+    <datatype extension="xml" type="galaxy.datatypes.xml:GenericXml" mimetype="application/xml" display_in_upload="true"/>
+    <datatype extension="vcf" type="galaxy.datatypes.tabular:Vcf" display_in_upload="true">
+      <converter file="vcf_to_bgzip_converter.xml" target_datatype="bgzip"/>
+      <converter file="vcf_to_vcf_bgzip_converter.xml" target_datatype="vcf_bgzip"/>
+      <converter file="vcf_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+      <converter file="bed_gff_or_vcf_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <display file="ucsc/vcf.xml" />
+      <display file="igv/vcf.xml" />
+      <display file="rviewer/vcf.xml" inherit="True"/>
+      <display file="iobio/vcf.xml" />
+    </datatype>
+    <datatype extension="bcf" type="galaxy.datatypes.binary:Bcf" mimetype="application/octet-stream" display_in_upload="True">
+      <converter file="bcf_to_bcf_bgzip_converter.xml" target_datatype="bcf_bgzip"/>
+    </datatype>
+    <datatype extension="bcf_bgzip" type="galaxy.datatypes.binary:Binary" mimetype="application/octet-stream" subclass="True">
+      <converter file="bcf_bgzip_to_bcf_converter.xml" target_datatype="bcf"/>
+    </datatype>
+    <datatype extension="velvet" type="galaxy.datatypes.assembly:Velvet" display_in_upload="false"/>
+    <datatype extension="wig" type="galaxy.datatypes.interval:Wiggle" display_in_upload="true" description="The wiggle format is line-oriented.  Wiggle data is preceded by a track definition line, which adds a number of options for controlling the default display of this track." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#Wig">
+      <converter file="wig_to_bigwig_converter.xml" target_datatype="bigwig"/>
+      <converter file="wiggle_to_simple_converter.xml" target_datatype="interval"/>
+      <!-- <display file="gbrowse/gbrowse_wig.xml" /> -->
+      <display file="igb/wig.xml" />
+    </datatype>
+    <datatype extension="interval_index" type="galaxy.datatypes.binary:Binary" subclass="True" />
+    <datatype extension="tabix" type="galaxy.datatypes.binary:Binary" subclass="True" />
+    <datatype extension="bgzip" type="galaxy.datatypes.binary:Binary" subclass="True" />
+    <datatype extension="vcf_bgzip" type_extension="bgzip" subclass="True" >
+      <display file="igv/vcf.xml" />
+      <converter file="vcf_bgzip_to_tabix_converter.xml" target_datatype="tabix"/>
+    </datatype>
+    <!-- Phylogenetic tree datatypes -->
+    <datatype extension="phyloxml" type="galaxy.datatypes.xml:Phyloxml" display_in_upload="true" />
+    <datatype extension="nhx" type="galaxy.datatypes.data:Newick" display_in_upload="true" />
+    <datatype extension="nex" type="galaxy.datatypes.data:Nexus" display_in_upload="true" />
+    <!-- Start RGenetics Datatypes -->
+    <datatype extension="affybatch" type="galaxy.datatypes.genetics:Affybatch" display_in_upload="true"/>
+    <!-- eigenstrat pedigree input file -->
+    <datatype extension="eigenstratgeno" type="galaxy.datatypes.genetics:Eigenstratgeno"/>
+    <!-- eigenstrat pca output file for adjusted eigenQTL eg -->
+    <datatype extension="eigenstratpca" type="galaxy.datatypes.genetics:Eigenstratpca"/>
+    <datatype extension="eset" type="galaxy.datatypes.genetics:Eset" display_in_upload="true" />
+    <!-- fbat/pbat format pedigree (header row of marker names) -->
+    <datatype extension="fped" type="galaxy.datatypes.genetics:Fped" display_in_upload="true"/>
+    <!-- phenotype file - fbat format -->
+    <datatype extension="fphe" type="galaxy.datatypes.genetics:Fphe" display_in_upload="true" mimetype="text/html"/>
+    <!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
+    <datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
+    <!-- part of linkage format pedigree -->
+    <!-- information redundancy (LD) filtered plink pbed -->
+    <datatype extension="ldindep" type="galaxy.datatypes.genetics:ldIndep" display_in_upload="true">
+    </datatype>
+    <datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
+    <!-- linkage format pedigree (separate .map file) -->
+    <datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
+      <converter file="lped_to_fped_converter.xml" target_datatype="fped"/>
+      <converter file="lped_to_pbed_converter.xml" target_datatype="pbed"/>
+    </datatype>
+    <!-- plink compressed file - has bed extension unfortunately -->
+    <datatype extension="pbed" type="galaxy.datatypes.genetics:Pbed" display_in_upload="true">
+      <converter file="pbed_to_lped_converter.xml" target_datatype="lped"/>
+      <converter file="pbed_ldreduced_converter.xml" target_datatype="ldindep"/>
+    </datatype>
+    <datatype extension="pheno" type="galaxy.datatypes.genetics:Pheno"/>
+    <!-- phenotype file - plink format -->
+    <datatype extension="pphe" type="galaxy.datatypes.genetics:Pphe" display_in_upload="true" mimetype="text/html"/>
+    <datatype extension="rexpbase" type="galaxy.datatypes.genetics:RexpBase"/>
+    <datatype extension="rgenetics" type="galaxy.datatypes.genetics:Rgenetics"/>
+    <datatype extension="snptest" type="galaxy.datatypes.genetics:Snptest" display_in_upload="true"/>
+    <datatype extension="snpmatrix" type="galaxy.datatypes.genetics:SNPMatrix" display_in_upload="true"/>
+    <datatype extension="xls" type="galaxy.datatypes.tabular:Tabular"/>
+    <!-- End RGenetics Datatypes -->
+    <datatype extension="ipynb" type="galaxy.datatypes.text:Ipynb" display_in_upload="True" />
+    <datatype extension="json" type="galaxy.datatypes.text:Json" display_in_upload="True" />
+    <!-- graph datatypes -->
+    <datatype extension="xgmml" type="galaxy.datatypes.graph:Xgmml" display_in_upload="true"/>
+    <datatype extension="sif" type="galaxy.datatypes.graph:Sif" display_in_upload="true"/>
+    <!-- datatypes storing triples -->
+    <datatype extension="triples" type="galaxy.datatypes.triples:Triples" display_in_upload="false"/>
+    <datatype extension="hdt" type="galaxy.datatypes.triples:HDT" display_in_upload="true"/>
+    <datatype extension="nt" type="galaxy.datatypes.triples:NTriples" display_in_upload="true"/>
+    <datatype extension="n3" type="galaxy.datatypes.triples:N3" display_in_upload="true"/>
+    <datatype extension="ttl" type="galaxy.datatypes.triples:Turtle" display_in_upload="true"/>
+    <datatype extension="rdf" type="galaxy.datatypes.triples:Rdf" display_in_upload="true"/>
+    <datatype extension="jsonld" type="galaxy.datatypes.triples:Jsonld" display_in_upload="true"/>
+    <!-- Excel datatypes -->
+    <datatype extension="xlsx" type="galaxy.datatypes.binary:Xlsx" display_in_upload="true" />
+    <datatype extension="btwisted" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="cai" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="charge" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="checktrans" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="chips" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="codcmp" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="coderet" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="compseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="cpgplot" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="cpgreport" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="cusp" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="cut" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="dan" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="digest" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="dreg" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="einverted" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="epestfind" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="equicktandem" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="est2genome" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="etandem" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="freak" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="fuzznuc" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="fuzzpro" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="fuzztran" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="garnier" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="geecee" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="helixturnhelix" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="hmoment" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="isochore" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="match" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="nametable" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="needle" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="newcpgreport" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="newcpgseek" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="noreturn" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="palindrome" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="pepcoil" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="pepinfo" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="pepstats" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="polydot" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="preg" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="prettyseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="primersearch" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="showfeat" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="showorf" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="sixpack" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="strider" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="supermatcher" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="syco" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="textsearch" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="vectorstrip" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="wobble" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="wordcount" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <!-- Report formats http://emboss.sourceforge.net/docs/themes/ReportFormats.html -->
+    <datatype extension="dbmotif" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="diffseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="excel" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="feattable" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="motif" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="regions" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="seqtable" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="simple" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="table" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="tagseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <!-- Sequence formats http://emboss.sourceforge.net/docs/themes/SequenceFormats.html -->
+    <datatype extension="acedb" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="clustal" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="codata" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="embl" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="fitch" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="gcg" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="genbank" type="galaxy.datatypes.data:Text" subclass="True" edam_format="format_1936"/>
+    <datatype extension="hennig86" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="ig" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="jackknifer" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="jackknifernon" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="mega" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="meganon" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="ncbi" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="nexus" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="nexusnon" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="phylip" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="phylipnon" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="pir" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="staden" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="swiss" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <!-- Alignment Formats http://emboss.sourceforge.net/docs/themes/AlignFormats.html -->
+    <datatype extension="markx0" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="markx1" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="markx10" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="markx2" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="markx3" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="pair" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="score" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="srs" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <datatype extension="srspair" type="galaxy.datatypes.data:Text" subclass="True"/>
+    <!-- MSA Datatypes -->
+    <datatype extension="hmm2" type="galaxy.datatypes.msa:Hmmer2" display_in_upload="true" />
+    <datatype extension="hmm3" type="galaxy.datatypes.msa:Hmmer3" display_in_upload="true" />
+    <datatype extension="stockholm" type="galaxy.datatypes.msa:Stockholm_1_0" display_in_upload="True" />
+    <datatype extension="xmfa" type="galaxy.datatypes.msa:MauveXmfa" display_in_upload="True" />
+    <datatype extension="cel" type="galaxy.datatypes.binary:Cel" display_in_upload="True" />
+
+    <datatype extension="RData" type="galaxy.datatypes.binary:RData" display_in_upload="true" description="Stored data from an R session"/>
+    <datatype extension="oxlicg" type="galaxy.datatypes.binary:OxliCountGraph" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="oxling" type="galaxy.datatypes.binary:OxliNodeGraph" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="oxlits" type="galaxy.datatypes.binary:OxliTagSet" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="oxlist" type="galaxy.datatypes.binary:OxliStopTags" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="oxliss" type="galaxy.datatypes.binary:OxliSubset" mimetype="application/octet-stream" display_in_upload="true"/>
+    <datatype extension="oxligl" type="galaxy.datatypes.binary:OxliGraphLabels" mimetype="application/octet-stream" display_in_upload="true"/>
+    <!-- Constructive solid geometry datatypes -->
+    <datatype extension="plyascii" type="galaxy.datatypes.constructive_solid_geometry:PlyAscii" display_in_upload="true" />
+    <datatype extension="plybinary" type="galaxy.datatypes.constructive_solid_geometry:PlyBinary" display_in_upload="true" />
+    <datatype extension="vtkascii" type="galaxy.datatypes.constructive_solid_geometry:VtkAscii" display_in_upload="true" />
+    <datatype extension="vtkbinary" type="galaxy.datatypes.constructive_solid_geometry:VtkBinary" display_in_upload="true" />
+    <!-- Metagenomic Datatype -->
+    <datatype extension="biom1" type="galaxy.datatypes.text:Biom1" display_in_upload="True" subclass="True" mimetype="application/json">
+        <display file="biom/biom_simple.xml" />
+    </datatype>
+    <!-- Strand-specific Coordinate Count Datatype used by the Center for Eukaryotic Gene Regulation labs at Penn State -->
+    <datatype extension="scidx" type="galaxy.datatypes.interval:ScIdx" display_in_upload="true" />
+
+    <!--Cheminformatics Datatypes -->
+    <datatype extension="smi" type="galaxy.datatypes.molecules:SMILES" display_in_upload="True">
+        <!-- The ordering is important. The first one is considered as default converter in the build-in conversion function -> (as sdf)-->
+        <converter file="smi_to_sdf_converter.xml" target_datatype="sdf"/>
+        <converter file="smi_to_inchi_converter.xml" target_datatype="inchi"/>
+        <converter file="smi_to_cml_converter.xml" target_datatype="cml"/>
+        <converter file="smi_to_mol_converter.xml" target_datatype="mol"/>
+        <converter file="smi_to_mol2_converter.xml" target_datatype="mol2"/>
+        <converter file="smi_to_smi_converter.xml" target_datatype="smi"/>
+    </datatype>
+    <datatype extension="sdf" type="galaxy.datatypes.molecules:SDF" display_in_upload="True">
+        <converter file="sdf_to_smi_converter.xml" target_datatype="smi"/>
+        <converter file="sdf_to_inchi_converter.xml" target_datatype="inchi"/>
+        <converter file="sdf_to_mol2_converter.xml" target_datatype="mol2"/>
+        <converter file="sdf_to_cml_converter.xml" target_datatype="cml"/>
+    </datatype>
+    <datatype extension="inchi" type="galaxy.datatypes.molecules:InChI" display_in_upload="True">
+        <converter file="inchi_to_smi_converter.xml" target_datatype="smi"/>
+        <converter file="inchi_to_sdf_converter.xml" target_datatype="sdf"/>
+        <converter file="inchi_to_mol_converter.xml" target_datatype="mol"/>
+        <converter file="inchi_to_mol2_converter.xml" target_datatype="mol2"/>
+        <converter file="inchi_to_cml_converter.xml" target_datatype="cml"/>
+    </datatype>
+    <datatype extension="mol" type="galaxy.datatypes.molecules:MOL" display_in_upload="True">
+        <converter file="mol_to_smi_converter.xml" target_datatype="smi"/>
+        <converter file="mol_to_inchi_converter.xml" target_datatype="inchi"/>
+        <converter file="mol_to_mol2_converter.xml" target_datatype="mol2"/>
+        <converter file="mol_to_cml_converter.xml" target_datatype="cml"/>
+    </datatype>
+    <datatype extension="mol2" type="galaxy.datatypes.molecules:MOL2" display_in_upload="False">
+        <converter file="mol2_to_smi_converter.xml" target_datatype="smi"/>
+        <converter file="mol2_to_sdf_converter.xml" target_datatype="sdf"/>
+        <converter file="mol2_to_inchi_converter.xml" target_datatype="inchi"/>
+        <converter file="mol2_to_mol_converter.xml" target_datatype="mol"/>
+        <converter file="mol2_to_cml_converter.xml" target_datatype="cml"/>
+    </datatype>
+    <datatype extension="cml" type="galaxy.datatypes.molecules:CML" display_in_upload="True">
+        <converter file="cml_to_smi_converter.xml" target_datatype="smi"/>
+        <converter file="cml_to_inchi_converter.xml" target_datatype="inchi"/>
+        <converter file="cml_to_sdf_converter.xml" target_datatype="sdf"/>
+        <converter file="cml_to_mol2_converter.xml" target_datatype="mol2"/>
+        </datatype>
+    <datatype extension="fps" type="galaxy.datatypes.molecules:FPS" mimetype="text/html" display_in_upload="True" />
+    <datatype extension="obfs" type="galaxy.datatypes.molecules:OBFS" mimetype="text/html" display_in_upload="True" />
+    <datatype extension="phar" type="galaxy.datatypes.molecules:PHAR" display_in_upload="False" />
+    <datatype extension="pdb" type="galaxy.datatypes.molecules:PDB" display_in_upload="True" />
+    <!-- mothur formats -->
+    <datatype extension="mothur.otu" type="galaxy.datatypes.mothur:Otu" display_in_upload="true"/>
+    <datatype extension="mothur.list" type="galaxy.datatypes.mothur:Otu" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.sabund" type="galaxy.datatypes.mothur:Sabund" display_in_upload="true"/>
+    <datatype extension="mothur.rabund" type="galaxy.datatypes.mothur:Sabund" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.shared" type="galaxy.datatypes.mothur:GroupAbund" display_in_upload="true"/>
+    <datatype extension="mothur.relabund" type="galaxy.datatypes.mothur:GroupAbund" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.names" type="galaxy.datatypes.mothur:Names" display_in_upload="true"/>
+    <datatype extension="mothur.design" type="galaxy.datatypes.mothur:Group" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.summary" type="galaxy.datatypes.mothur:Summary" display_in_upload="true"/>
+    <datatype extension="mothur.groups" type="galaxy.datatypes.mothur:Group" display_in_upload="true"/>
+    <datatype extension="mothur.oligos" type="galaxy.datatypes.mothur:Oligos" display_in_upload="true"/>
+    <datatype extension="mothur.align" type="galaxy.datatypes.sequence:Fasta" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.accnos" type="galaxy.datatypes.mothur:AccNos" display_in_upload="true"/>
+    <datatype extension="mothur.otulabels" type="galaxy.datatypes.mothur:AccNos" display_in_upload="true"/>
+    <datatype extension="mothur.otu.corr" type="galaxy.datatypes.tabular:Tabular" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.map" type="galaxy.datatypes.mothur:SecondaryStructureMap" display_in_upload="true"/>
+    <datatype extension="mothur.align.check" type="galaxy.datatypes.mothur:AlignCheck" display_in_upload="true"/>
+    <datatype extension="mothur.align.report" type="galaxy.datatypes.mothur:AlignReport" display_in_upload="true"/>
+    <datatype extension="mothur.filter" type="galaxy.datatypes.mothur:LaneMask" display_in_upload="true"/>
+    <datatype extension="mothur.dist" type="galaxy.datatypes.mothur:DistanceMatrix" display_in_upload="true"/>
+    <datatype extension="mothur.tre" type="galaxy.datatypes.data:Text" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.pair.dist" type="galaxy.datatypes.mothur:PairwiseDistanceMatrix" display_in_upload="true"/>
+    <datatype extension="mothur.square.dist" type="galaxy.datatypes.mothur:SquareDistanceMatrix" display_in_upload="true"/>
+    <datatype extension="mothur.lower.dist" type="galaxy.datatypes.mothur:LowerTriangleDistanceMatrix" display_in_upload="true"/>
+    <datatype extension="mothur.ref.taxonomy" type="galaxy.datatypes.mothur:RefTaxonomy" display_in_upload="true">
+      <converter file="ref_to_seq_taxonomy_converter.xml" target_datatype="mothur.seq.taxonomy"/>
+    </datatype>
+    <datatype extension="mothur.seq.taxonomy" type="galaxy.datatypes.mothur:RefTaxonomy" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.rdp.taxonomy" type="galaxy.datatypes.mothur:RefTaxonomy" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.cons.taxonomy" type="galaxy.datatypes.mothur:ConsensusTaxonomy" display_in_upload="true"/>
+    <datatype extension="mothur.tax.summary" type="galaxy.datatypes.mothur:TaxonomySummary" display_in_upload="true"/>
+    <datatype extension="mothur.freq" type="galaxy.datatypes.mothur:Frequency" display_in_upload="true"/>
+    <datatype extension="mothur.quan" type="galaxy.datatypes.mothur:Quantile" display_in_upload="true"/>
+    <datatype extension="mothur.filtered.quan" type="galaxy.datatypes.mothur:Quantile" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.masked.quan" type="galaxy.datatypes.mothur:Quantile" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.filtered.masked.quan" type="galaxy.datatypes.mothur:Quantile" subclass="True" display_in_upload="true"/>
+    <datatype extension="mothur.axes" type="galaxy.datatypes.mothur:Axes" display_in_upload="true"/>
+    <datatype extension="mothur.sff.flow" type="galaxy.datatypes.mothur:SffFlow" display_in_upload="true"/>
+    <datatype extension="mothur.count_table" type="galaxy.datatypes.mothur:CountTable" display_in_upload="true"/>
+    <datatype extension="neostore" type="galaxy.datatypes.neo4j:Neo4jDB" mimetype="text/html"  display_in_upload="false"/>
+    <datatype extension="trackhub" type="galaxy.datatypes.tracks:UCSCTrackHub" display_in_upload="true">
+        <display file="ucsc/trackhub.xml" />
+    </datatype>
+    <datatype extension="blastxml" type="galaxy.datatypes.blast:BlastXml" mimetype="application/xml" display_in_upload="true"/>
+    <datatype extension="blastdbn" type="galaxy.datatypes.blast:BlastNucDb" mimetype="text/html" display_in_upload="false"/>
+    <datatype extension="blastdbp" type="galaxy.datatypes.blast:BlastProtDb" mimetype="text/html" display_in_upload="false"/>
+    <datatype extension="blastdbd" type="galaxy.datatypes.blast:BlastDomainDb" mimetype="text/html" display_in_upload="false"/>
+    <datatype extension="maskinfo-asn1" type="galaxy.datatypes.data:GenericAsn1" mimetype="text/plain" subclass="True" display_in_upload="true" />
+    <datatype extension="maskinfo-asn1-binary" type="galaxy.datatypes.binary:GenericAsn1Binary" mimetype="application/octet-stream" subclass="True" display_in_upload="true" />
+    <datatype extension="pssm-asn1" type="galaxy.datatypes.data:GenericAsn1" mimetype="text/plain" subclass="True" display_in_upload="true" />
+  </registration>
+  <sniffers>
+    <!--
+    The order in which Galaxy attempts to determine data types is
+    important because some formats are much more loosely defined
+    than others.  The following list should be the most rigidly
+    defined format first, followed by next-most rigidly defined,
+    and so on.
+    -->
+    <sniffer type="galaxy.datatypes.mothur:Sabund"/>
+    <sniffer type="galaxy.datatypes.mothur:Otu"/>
+    <sniffer type="galaxy.datatypes.mothur:GroupAbund"/>
+    <sniffer type="galaxy.datatypes.mothur:SecondaryStructureMap"/>
+    <sniffer type="galaxy.datatypes.mothur:LowerTriangleDistanceMatrix"/>
+    <sniffer type="galaxy.datatypes.mothur:SquareDistanceMatrix"/>
+    <sniffer type="galaxy.datatypes.mothur:PairwiseDistanceMatrix"/>
+    <sniffer type="galaxy.datatypes.mothur:Oligos"/>
+    <sniffer type="galaxy.datatypes.mothur:Quantile"/>
+    <sniffer type="galaxy.datatypes.mothur:Frequency"/>
+    <sniffer type="galaxy.datatypes.mothur:LaneMask"/>
+    <sniffer type="galaxy.datatypes.mothur:RefTaxonomy"/>
+    <sniffer type="galaxy.datatypes.mothur:Axes"/>
+    <sniffer type="galaxy.datatypes.constructive_solid_geometry:PlyAscii"/>
+    <sniffer type="galaxy.datatypes.constructive_solid_geometry:PlyBinary"/>
+    <sniffer type="galaxy.datatypes.constructive_solid_geometry:VtkAscii"/>
+    <sniffer type="galaxy.datatypes.constructive_solid_geometry:VtkBinary"/>
+    <sniffer type="galaxy.datatypes.interval:ScIdx"/>
+    <sniffer type="galaxy.datatypes.tabular:Vcf"/>
+    <sniffer type="galaxy.datatypes.binary:TwoBit"/>
+    <sniffer type="galaxy.datatypes.binary:GeminiSQLite"/>
+    <sniffer type="galaxy.datatypes.binary:MzSQlite"/>
+    <sniffer type="galaxy.datatypes.binary:IdpDB"/>
+    <sniffer type="galaxy.datatypes.binary:SQlite"/>
+    <sniffer type="galaxy.datatypes.binary:H5"/>
+    <sniffer type="galaxy.datatypes.binary:Bam"/>
+    <sniffer type="galaxy.datatypes.binary:CRAM"/>
+    <sniffer type="galaxy.datatypes.binary:Sff"/>
+    <sniffer type="galaxy.datatypes.binary:Sra"/>
+    <sniffer type="galaxy.datatypes.binary:NetCDF"/>
+    <sniffer type="galaxy.datatypes.triples:Rdf"/>
+    <sniffer type="galaxy.datatypes.blast:BlastXml"/>
+    <sniffer type="galaxy.datatypes.xml:Phyloxml"/>
+    <sniffer type="galaxy.datatypes.xml:Owl"/>
+    <sniffer type="galaxy.datatypes.proteomics:MzML"/>
+    <sniffer type="galaxy.datatypes.proteomics:TandemXML"/>
+    <sniffer type="galaxy.datatypes.proteomics:PepXml"/>
+    <sniffer type="galaxy.datatypes.proteomics:Mgf"/>
+    <sniffer type="galaxy.datatypes.proteomics:ProtXML"/>
+    <sniffer type="galaxy.datatypes.proteomics:MzXML"/>
+    <sniffer type="galaxy.datatypes.proteomics:TraML"/>
+    <sniffer type="galaxy.datatypes.proteomics:MzIdentML"/>
+    <sniffer type="galaxy.datatypes.proteomics:MzQuantML"/>
+    <sniffer type="galaxy.datatypes.proteomics:UniProtXML"/>
+    <sniffer type="galaxy.datatypes.proteomics:Msp"/>
+    <sniffer type="galaxy.datatypes.proteomics:SPLib"/>
+    <sniffer type="galaxy.datatypes.proteomics:ThermoRAW"/>
+    <sniffer type="galaxy.datatypes.molecules:CML"/>
+    <sniffer type="galaxy.datatypes.xml:GenericXml"/>
+    <sniffer type="galaxy.datatypes.triples:HDT"/>
+    <sniffer type="galaxy.datatypes.triples:Turtle"/>
+    <sniffer type="galaxy.datatypes.triples:NTriples"/>
+    <sniffer type="galaxy.datatypes.triples:Jsonld"/>
+    <sniffer type="galaxy.datatypes.sequence:Maf"/>
+    <sniffer type="galaxy.datatypes.sequence:Lav"/>
+    <sniffer type="galaxy.datatypes.sequence:csFasta"/>
+    <sniffer type="galaxy.datatypes.qualityscore:QualityScoreSOLiD"/>
+    <sniffer type="galaxy.datatypes.qualityscore:QualityScore454"/>
+    <sniffer type="galaxy.datatypes.molecules:SDF"/>
+    <sniffer type="galaxy.datatypes.molecules:PDB"/>
+    <sniffer type="galaxy.datatypes.molecules:MOL2"/>
+    <sniffer type="galaxy.datatypes.molecules:InChI"/>
+    <sniffer type="galaxy.datatypes.molecules:FPS"/>
+    <!-- TODO: see molecules.py <sniffer type="galaxy.datatypes.molecules:SMILES"/>-->
+    <sniffer type="galaxy.datatypes.sequence:Fasta"/>
+    <sniffer type="galaxy.datatypes.sequence:Fastq"/>
+    <sniffer type="galaxy.datatypes.interval:Wiggle"/>
+    <sniffer type="galaxy.datatypes.text:Html"/>
+    <sniffer type="galaxy.datatypes.images:Pdf"/>
+    <sniffer type="galaxy.datatypes.sequence:Axt"/>
+    <sniffer type="galaxy.datatypes.interval:Bed"/>
+    <sniffer type="galaxy.datatypes.interval:CustomTrack"/>
+    <sniffer type="galaxy.datatypes.interval:Gtf"/>
+    <sniffer type="galaxy.datatypes.interval:Gff"/>
+    <sniffer type="galaxy.datatypes.interval:Gff3"/>
+    <sniffer type="galaxy.datatypes.tabular:Pileup"/>
+    <sniffer type="galaxy.datatypes.interval:Interval"/>
+    <sniffer type="galaxy.datatypes.tabular:Sam"/>
+    <sniffer type="galaxy.datatypes.data:Newick"/>
+    <sniffer type="galaxy.datatypes.data:Nexus"/>
+    <sniffer type="galaxy.datatypes.text:Obo"/>
+    <sniffer type="galaxy.datatypes.text:Arff"/>
+    <sniffer type="galaxy.datatypes.text:Ipynb"/>
+    <sniffer type="galaxy.datatypes.text:Biom1"/>
+    <sniffer type="galaxy.datatypes.text:Json"/>
+    <sniffer type="galaxy.datatypes.sequence:RNADotPlotMatrix"/>
+    <sniffer type="galaxy.datatypes.sequence:DotBracket"/>
+    <sniffer type="galaxy.datatypes.tabular:ConnectivityTable"/>
+    <sniffer type="galaxy.datatypes.tabular:CSV"/>
+    <sniffer type="galaxy.datatypes.tabular:TSV"/>
+    <sniffer type="galaxy.datatypes.msa:Hmmer2" />
+    <sniffer type="galaxy.datatypes.msa:Hmmer3" />
+    <sniffer type="galaxy.datatypes.msa:Stockholm_1_0" />
+    <sniffer type="galaxy.datatypes.msa:MauveXmfa" />
+    <sniffer type="galaxy.datatypes.binary:Cel" />
+    <sniffer type="galaxy.datatypes.binary:RData" />
+    <sniffer type="galaxy.datatypes.images:Jpg"/>
+    <sniffer type="galaxy.datatypes.images:Png"/>
+    <sniffer type="galaxy.datatypes.images:Tiff"/>
+    <sniffer type="galaxy.datatypes.images:Bmp"/>
+    <sniffer type="galaxy.datatypes.images:Gif"/>
+    <sniffer type="galaxy.datatypes.images:Im"/>
+    <sniffer type="galaxy.datatypes.images:Pcd"/>
+    <sniffer type="galaxy.datatypes.images:Pcx"/>
+    <sniffer type="galaxy.datatypes.images:Ppm"/>
+    <sniffer type="galaxy.datatypes.images:Psd"/>
+    <sniffer type="galaxy.datatypes.images:Xbm"/>
+    <sniffer type="galaxy.datatypes.images:Rgb"/>
+    <sniffer type="galaxy.datatypes.images:Pbm"/>
+    <sniffer type="galaxy.datatypes.images:Pgm"/>
+    <sniffer type="galaxy.datatypes.images:Xpm"/>
+    <sniffer type="galaxy.datatypes.images:Eps"/>
+    <sniffer type="galaxy.datatypes.images:Rast"/>
+    <!--
+    Keep this commented until the sniff method in the assembly.py
+    module is fixed to not read the entire file.
+    <sniffer type="galaxy.datatypes.assembly:Amos"/>
+    -->
+    <sniffer type="galaxy.datatypes.binary:OxliCountGraph"/>
+    <sniffer type="galaxy.datatypes.binary:OxliNodeGraph"/>
+    <sniffer type="galaxy.datatypes.binary:OxliTagSet"/>
+    <sniffer type="galaxy.datatypes.binary:OxliStopTags"/>
+    <sniffer type="galaxy.datatypes.binary:OxliSubset"/>
+    <sniffer type="galaxy.datatypes.binary:OxliGraphLabels"/>
+  </sniffers>
+  <build_sites>
+      <!--
+      Build sites define the builds (dbkeys) available at sites used by display
+      applications and the URL to those sites.
+
+      The `display` attributes on the `ucsc` and `gbrowse` sites replace the
+      `ucsc_display_sites` and `gbrowse_display_sites` options in galaxy.ini.
+      Because these are used by "old-style" display applications, their types
+      cannot change if you want the old-style display links for these sites to
+      work.
+      -->
+      <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt" display="main,test,archaea,ucla"/>
+      <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" display="modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225"/>
+      <site type="ensembl" file="tool-data/shared/ensembl/ensembl_sites.txt"/>
+      <site type="ensembl_data_url" file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt"/>
+      <site type="igv" file="tool-data/shared/igv/igv_build_sites.txt"/>
+      <site type="rviewer" file="tool-data/shared/rviewer/rviewer_build_sites.txt"/>
+  </build_sites>
+</datatypes>
diff --git a/config/dependency_resolvers_conf.xml.sample b/config/dependency_resolvers_conf.xml.sample
new file mode 100644
index 0000000..243fff0
--- /dev/null
+++ b/config/dependency_resolvers_conf.xml.sample
@@ -0,0 +1,28 @@
+<dependency_resolvers>
+<!-- the default configuration, first look for dependencies installed from the toolshed -->
+  <tool_shed_packages />
+<!-- then look for env.sh files in directories according to the "galaxy packages" schema.
+     These resolvers can take a base_path attribute to specify where to look for
+     package definitions, but by default look in the directory specified by tool_dependency_dir
+     in Galaxy's config/galaxy.ini -->
+  <galaxy_packages />
+  <galaxy_packages versionless="true" />
+  <conda />
+  <conda versionless="true" />
+
+<!-- Example configuration of modules dependency resolver, uses Environment Modules -->
+<!--
+  <modules modulecmd="/opt/Modules/3.2.9/bin/modulecmd" />
+  <modules modulecmd="/opt/Modules/3.2.9/bin/modulecmd" versionless="true" default_indicator="default" />
+Attributes are:
+* modulecmd - path to modulecmd
+* versionless - default: false - whether to resolve tools using a version number or not
+* find_by - directory or avail - use the DirectoryModuleChecker or AvailModuleChecker
+* prefetch - default: true - in the AvailModuleChecker prefetch module info with 'module avail'
+* default_indicator - default: '(default)' - what indicate to the AvailModuleChecker that a module is the default version
+-->
+<!-- other resolvers
+  <tool_shed_tap />
+  <homebrew />
+-->
+</dependency_resolvers>
\ No newline at end of file
diff --git a/config/disposable_email_blacklist.conf.sample b/config/disposable_email_blacklist.conf.sample
new file mode 100644
index 0000000..4475945
--- /dev/null
+++ b/config/disposable_email_blacklist.conf.sample
@@ -0,0 +1,9 @@
+If you want to disable registration for users that are using disposable email address
+rename this file to disposable_email_blacklist.conf and fill it with the disposable domains
+that you want to have blacklisted. Each on its own line without the '@' character as shown below.
+Users using emails from these domains will get an error during the registration.
+
+mailinator.com
+sogetthis.com
+spamgourmet.com
+trashmail.net
\ No newline at end of file
diff --git a/config/external_service_types_conf.xml.sample b/config/external_service_types_conf.xml.sample
new file mode 100644
index 0000000..b64c0fd
--- /dev/null
+++ b/config/external_service_types_conf.xml.sample
@@ -0,0 +1,5 @@
+<?xml version="1.0"?>
+<external_service_types>
+    <external_service_type file="simple_unknown_sequencer.xml" visible="False"/>
+    <external_service_type file="applied_biosystems_solid.xml" visible="True"/>
+</external_service_types>
diff --git a/config/galaxy.ini.sample b/config/galaxy.ini.sample
new file mode 100644
index 0000000..2e4a325
--- /dev/null
+++ b/config/galaxy.ini.sample
@@ -0,0 +1,1286 @@
+#
+# Galaxy is configured by default to be usable in a single-user development
+# environment.  To tune the application for a multi-user production
+# environment, see the documentation at:
+#
+#  http://usegalaxy.org/production
+#
+
+# Throughout this sample configuration file, except where stated otherwise,
+# uncommented values override the default if left unset, whereas commented
+# values are set to the default value.  Relative paths are relative to the root
+# Galaxy directory.
+#
+# Examples of many of these options are explained in more detail in the wiki:
+#
+#   https://wiki.galaxyproject.org/Admin/Config
+#
+# Config hackers are encouraged to check there before asking for help.
+
+# ---- HTTP Server ----------------------------------------------------------
+
+# Configuration of the internal HTTP server.
+
+[server:main]
+
+# The internal HTTP server to use.  Currently only Paste is provided.  This
+# option is required.
+use = egg:Paste#http
+
+# The port on which to listen.
+#port = 8080
+
+# The address on which to listen.  By default, only listen to localhost (Galaxy
+# will not be accessible over the network).  Use '0.0.0.0' to listen on all
+# available network interfaces.
+#host = 127.0.0.1
+
+# Use a threadpool for the web server instead of creating a thread for each
+# request.
+use_threadpool = True
+
+# Number of threads in the web server thread pool.
+#threadpool_workers = 10
+
+# Set the number of seconds a thread can work before you should kill it
+# (assuming it will never finish) to 3 hours.  Default is 600 (10 minutes).
+threadpool_kill_thread_limit = 10800
+
+# ---- Filters --------------------------------------------------------------
+
+# Filters sit between Galaxy and the HTTP server.
+
+# These filters are disabled by default.  They can be enabled with
+# 'filter-with' in the [app:main] section below.
+
+# Define the gzip filter.
+[filter:gzip]
+use = egg:Paste#gzip
+
+# Define the proxy-prefix filter.
+[filter:proxy-prefix]
+use = egg:PasteDeploy#prefix
+prefix = /galaxy
+
+# ---- Galaxy ---------------------------------------------------------------
+
+# Configuration of the Galaxy application.
+
+[app:main]
+
+# -- Application and filtering
+
+# The factory for the WSGI application.  This should not be changed.
+paste.app_factory = galaxy.web.buildapp:app_factory
+
+# If not running behind a proxy server, you may want to enable gzip compression
+# to decrease the size of data transferred over the network.  If using a proxy
+# server, please enable gzip compression there instead.
+#filter-with = gzip
+
+# If running behind a proxy server and Galaxy is served from a subdirectory,
+# enable the proxy-prefix filter and set the prefix in the
+# [filter:proxy-prefix] section above.
+#filter-with = proxy-prefix
+
+# If proxy-prefix is enabled and you're running more than one Galaxy instance
+# behind one hostname, you will want to set this to the same path as the prefix
+# in the filter above.  This value becomes the "path" attribute set in the
+# cookie so the cookies from each instance will not clobber each other.
+#cookie_path = None
+
+# -- Database
+
+# By default, Galaxy uses a SQLite database at 'database/universe.sqlite'.  You
+# may use a SQLAlchemy connection string to specify an external database
+# instead.  This string takes many options which are explained in detail in the
+# config file documentation.
+#database_connection = sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE
+
+# If the server logs errors about not having enough database pool connections,
+# you will want to increase these values, or consider running more Galaxy
+# processes.
+#database_engine_option_pool_size = 5
+#database_engine_option_max_overflow = 10
+
+# If using MySQL and the server logs the error "MySQL server has gone away",
+# you will want to set this to some positive value (7200 should work).
+#database_engine_option_pool_recycle = -1
+
+# If large database query results are causing memory or response time issues in
+# the Galaxy process, leave the result on the server instead.  This option is
+# only available for PostgreSQL and is highly recommended.
+#database_engine_option_server_side_cursors = False
+
+# Log all database transactions, can be useful for debugging and performance
+# profiling.  Logging is done via Python's 'logging' module under the qualname
+# 'galaxy.model.orm.logging_connection_proxy'
+#database_query_profiling_proxy = False
+
+# By default, Galaxy will use the same database to track user data and
+# tool shed install data.  There are many situations in which it is
+# valuable to separate these - for instance bootstrapping fresh Galaxy
+# instances with pretested installs.  The following option can be used to
+# separate the tool shed install database (all other options listed above
+# but prefixed with install_ are also available).
+#install_database_connection = sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE
+
+# Setting the following option to true will cause Galaxy to automatically
+# migrate the database forward after updates. This is not recommended for production
+# use.
+#database_auto_migrate = False
+
+# -- Files and directories
+
+# Dataset files are stored in this directory.
+#file_path = database/files
+
+# Temporary files are stored in this directory.
+#new_file_path = database/tmp
+
+# Tool config files, defines what tools are available in Galaxy.
+# Tools can be locally developed or installed from Galaxy tool sheds.
+# (config/tool_conf.xml.sample will be used if left unset and
+# config/tool_conf.xml does not exist).
+#tool_config_file = config/tool_conf.xml,config/shed_tool_conf.xml
+
+# Enable / disable checking if any tools defined in the above non-shed
+# tool_config_files (i.e., tool_conf.xml) have been migrated from the Galaxy
+# code distribution to the Tool Shed.  This setting should generally be set to
+# False only for development Galaxy environments that are often rebuilt from
+# scratch where migrated tools do not need to be available in the Galaxy tool
+# panel.  If the following setting remains commented, the default setting will
+# be True.
+#check_migrate_tools = True
+
+# Tool config maintained by tool migration scripts.  If you use the migration
+# scripts to install tools that have been migrated to the tool shed upon a new
+# release, they will be added to this tool config file.
+#migrated_tools_config = config/migrated_tools_conf.xml
+
+# File that contains the XML section and tool tags from all tool panel config
+# files integrated into a single file that defines the tool panel layout.  This
+# file can be changed by the Galaxy administrator to alter the layout of the
+# tool panel.  If not present, Galaxy will create it.
+#integrated_tool_panel_config = integrated_tool_panel.xml
+
+# Default path to the directory containing the tools defined in tool_conf.xml.
+# Other tool config files must include the tool_path as an attribute in the
+# <toolbox> tag.
+#tool_path = tools
+
+# -- Tool dependencies
+
+# Path to the directory in which tool dependencies are placed.  This is used by
+# the Tool Shed to install dependencies and can also be used by administrators
+# to manually install or link to dependencies.  For details, see:
+#   https://wiki.galaxyproject.org/Admin/Config/ToolDependencies
+# Set the string to None to explicitly disable tool dependency handling.
+# If this option is set to none or an invalid path, installing tools with dependencies
+# from the Tool Shed will fail.
+#tool_dependency_dir = database/dependencies
+
+# The dependency resolvers config file specifies an ordering and options for how
+# Galaxy resolves tool dependencies (requirement tags in Tool XML). The default
+# ordering is to the use the Tool Shed for tools installed that way, use local
+# Galaxy packages, and then use Conda if available.
+# See https://github.com/galaxyproject/galaxy/blob/dev/doc/source/admin/dependency_resolvers.rst
+# for more information on these options.
+#dependency_resolvers_config_file = config/dependency_resolvers_conf.xml
+
+# The following Conda dependency resolution options will change the defaults for
+# all Conda resolvers, but multiple resolvers can be configured independently
+# in dependency_resolvers_config_file and these options overridden.
+# Location on the filesystem where Conda packages are installed
+
+# conda_prefix is the location on the filesystem where Conda packages and environments are installed
+# IMPORTANT: Due to a current limitation in conda, the total length of the
+# conda_prefix and the job_working_directory path should be less than 50 characters!
+#conda_prefix = <tool_dependency_dir>/_conda
+# Override the Conda executable to use, it will default to the one on the
+# PATH (if available) and then to <conda_prefix>/bin/conda
+#conda_exec =
+# Pass debug flag to conda commands.
+#conda_debug = False
+# conda channels to enable by default (http://conda.pydata.org/docs/custom-channels.html)
+#conda_ensure_channels = conda-forge,r,bioconda,iuc
+# Set to True to instruct Galaxy to look for and install missing tool
+# dependencies before each job runs.
+#conda_auto_install = False
+# Set to True to instruct Galaxy to install Conda from the web automatically
+# if it cannot find a local copy and conda_exec is not configured.
+#conda_auto_init = False
+# You must set this to True if conda_prefix and job_working_directory are not on the same
+# volume, or some conda dependencies will fail to execute at job runtime.
+# Conda will copy packages content instead of creating hardlinks or symlinks.
+# This will prevent problems with some specific packages (perl, R), at the cost
+# of extra disk space usage and extra time spent copying packages.
+#conda_copy_dependencies = False
+
+# Certain dependency resolvers (namely Conda) take a considerable amount of
+# time to build an isolated job environment in the job_working_directory if the
+# job working directory is on a network share.  Set the following option to True
+# to cache the dependencies in a folder. This option is beta and should only be
+# used if you experience long waiting times before a job is actually submitted
+# to your cluster.
+#use_cached_dependency_manager = False
+
+# By default the tool_dependency_cache_dir is the _cache directory
+# of the tool dependency directory
+#tool_dependency_cache_dir = <tool_dependency_dir>/_cache
+
+# File containing the Galaxy Tool Sheds that should be made available to
+# install from in the admin interface (.sample used if default does not exist).
+#tool_sheds_config_file = config/tool_sheds_conf.xml
+
+# Set to True to enable monitoring of tools and tool directories
+# listed in any tool config file specified in tool_config_file option.
+# If changes are found, tools are automatically reloaded.  Watchdog (
+# https://pypi.python.org/pypi/watchdog ) must be installed and
+# available to Galaxy to use this option. Other options include 'auto'
+# which will attempt to watch tools if the watchdog library is available
+# but won't fail to load Galaxy if it is not and 'polling' which will use
+# a less efficient monitoring scheme that may work in wider range of scenarios
+# than the watchdog default.
+#watch_tools = False
+
+# Enable Galaxy to fetch Docker containers registered with quay.io generated
+# from tool requirements resolved through conda. These containers (when
+# available) have been generated using mulled - https://github.com/mulled.
+# These containers are highly beta and availablity will vary by tool.
+# This option will additionally only be used for job destinations with
+# Docker enabled.
+#enable_beta_mulled_containers = False
+
+# Container resolvers configuration (beta). Setup a file describing container
+# resolvers to use when discovering containers for Galaxy. If this is set to
+# None, the default containers loaded is determined by
+# enable_beta_mulled_containers.
+#containers_resolvers_config_file = None
+
+# involucro is a tool used to build Docker containers for tools from Conda
+# dependencies referenced in tools as `requirement`s. The following path is
+# the location of involucro on the Galaxy host. This is ignored if the relevant
+# container resolver isn't enabled, and will install on demand unless
+# involucro_auto_init is set to False.
+#involucro_path = database/dependencies/involucro
+
+# Install involucro as needed to build Docker containers for tools. Ignored if
+# relevant container resolver is not used.
+#involucro_auto_init = True
+
+# Enable automatic polling of relative tool sheds to see if any updates
+# are available for installed repositories.  Ideally only one Galaxy
+# server process should be able to check for repository updates.  The
+# setting for hours_between_check should be an integer between 1 and 24.
+#enable_tool_shed_check = False
+#hours_between_check = 12
+
+# Enable use of an in-memory registry with bi-directional relationships between
+# repositories (i.e., in addition to lists of dependencies for a repository,
+# keep an in-memory registry of dependent items for each repository.
+#manage_dependency_relationships = False
+
+# XML config file that contains data table entries for the
+# ToolDataTableManager.  This file is manually # maintained by the Galaxy
+# administrator (.sample used if default does not exist).
+#tool_data_table_config_path = config/tool_data_table_conf.xml
+
+# XML config file that contains additional data table entries for the
+# ToolDataTableManager.  This file is automatically generated based on the
+# current installed tool shed repositories that contain valid
+# tool_data_table_conf.xml.sample files.  At the time of installation, these
+# entries are automatically added to the following file, which is parsed and
+# applied to the ToolDataTableManager at server start up.
+#shed_tool_data_table_config = config/shed_tool_data_table_conf.xml
+
+# Directory where data used by tools is located, see the samples in that
+# directory and the wiki for help:
+#   https://wiki.galaxyproject.org/Admin/DataIntegration
+#tool_data_path = tool-data
+
+# Directory where Tool Data Table related files will be placed
+# when installed from a ToolShed. Defaults to tool_data_path.
+#shed_tool_data_path = tool-data
+
+# File containing old-style genome builds
+#builds_file_path = tool-data/shared/ucsc/builds.txt
+
+# Directory where chrom len files are kept, currently mainly used by trackster
+#len_file_path = tool-data/shared/ucsc/chrom
+
+# Datatypes config file(s), defines what data (file) types are available in
+# Galaxy (.sample is used if default does not exist).  If a datatype appears in
+# multiple files, the last definition is used (though the first sniffer is used
+# so limit sniffer definitions to one file).
+#datatypes_config_file = config/datatypes_conf.xml
+
+# Disable the 'Auto-detect' option for file uploads
+#datatypes_disable_auto = False
+
+# Visualizations config directory: where to look for individual visualization
+# plugins.  The path is relative to the Galaxy root dir.  To use an absolute
+# path begin the path with '/'.  This is a comma separated list.
+# Defaults to "config/plugins/visualizations".
+#visualization_plugins_directory = config/plugins/visualizations
+
+# Interactive environment plugins root directory: where to look for interactive
+# environment plugins.  By default none will be loaded.  Set to
+# config/plugins/interactive_environments to load Galaxy's stock plugins
+# (currently just IPython).  These will require Docker to be configured and
+# have security considerations, so proceed with caution. The path is relative to the
+# Galaxy root dir.  To use an absolute path begin the path with '/'.  This is a comma
+# separated list.
+#interactive_environment_plugins_directory =
+
+# Interactive tour directory: where to store interactive tour definition files.
+# Galaxy ships with several basic interface tours enabled, though a different
+# directory with custom tours can be specified here. The path is relative to the
+# Galaxy root dir.  To use an absolute path begin the path with '/'.  This is a comma
+# separated list.
+#tour_config_dir = config/plugins/tours
+
+# Webhooks directory: where to store webhooks - plugins to extend the Galaxy UI.
+# By default none will be loaded.  Set to config/plugins/webhooks/demo to load Galaxy's
+# demo webhooks.  To use an absolute path begin the path with '/'.  This is a comma
+# separated list.
+# webhooks_dir = config/plugins/webhooks
+
+# Each job is given a unique empty directory as its current working directory.
+# This option defines in what parent directory those directories will be
+# created.
+#job_working_directory = database/jobs_directory
+
+# If using a cluster, Galaxy will write job scripts and stdout/stderr to this
+# directory.
+#cluster_files_directory = database/pbs
+
+# Mako templates are compiled as needed and cached for reuse, this directory is
+# used for the cache
+#template_cache_path = database/compiled_templates
+
+# Set to false to disable various checks Galaxy will do to ensure it
+# can run job scripts before attempting to execute or submit them.
+#check_job_script_integrity = True
+# Number of checks to execute if check_job_script_integrity is enabled.
+#check_job_script_integrity_count = 35
+# Time to sleep between checks if check_job_script_integrity is enabled (in seconds).
+#check_job_script_integrity_sleep = .25
+
+# Set the default shell used by non-containerized jobs Galaxy-wide. This
+# defaults to bash for all jobs and can be overidden at the destination
+# level for heterogenous clusters. conda job resolution requires bash or zsh
+# so if this is switched to /bin/sh for instance - conda resolution
+# should be disabled. Containerized jobs always use /bin/sh - so more maximum
+# portability tool authors should assume generated commands run in sh.
+#default_job_shell = /bin/bash
+
+# Citation related caching.  Tool citations information maybe fetched from
+# external sources such as http://dx.doi.org/ by Galaxy - the following
+# parameters can be used to control the caching used to store this information.
+#citation_cache_type = file
+#citation_cache_data_dir = database/citations/data
+#citation_cache_lock_dir = database/citations/lock
+
+# External service types config file, defining what types of external_services
+# configurations are available in Galaxy (.sample is used if default does not
+# exist).
+#external_service_type_config_file = config/external_service_types_conf.xml
+
+# Path to the directory containing the external_service_types defined in the
+# config.
+#external_service_type_path = external_service_types
+
+# Tools with a number of outputs not known until runtime can write these
+# outputs to a directory for collection by Galaxy when the job is done.
+# Previously, this directory was new_file_path, but using one global directory
+# can cause performance problems, so using job_working_directory ('.' or cwd
+# when a job is run) is encouraged.  By default, both are checked to avoid
+# breaking existing tools.
+#collect_outputs_from = new_file_path,job_working_directory
+
+# -- Data Storage (Object Store)
+#
+# Configuration file for the object store
+# If this is set and exists, it overrides any other objectstore settings.
+# object_store_config_file = config/object_store_conf.xml
+
+
+# -- Mail and notification
+
+# Galaxy sends mail for various things: subscribing users to the mailing list
+# if they request it, password resets, notifications from the Galaxy Sample
+# Tracking system, reporting dataset errors, and sending activation emails.
+# To do this, it needs to send mail through an SMTP server, which you may
+# define here (host:port).
+# Galaxy will automatically try STARTTLS but will continue upon failure.
+#smtp_server = None
+
+# If your SMTP server requires a username and password, you can provide them
+# here (password in cleartext here, but if your server supports STARTTLS it
+# will be sent over the network encrypted).
+#smtp_username = None
+#smtp_password = None
+
+# If your SMTP server requires SSL from the beginning of the connection
+# smtp_ssl = False
+
+# On the user registration form, users may choose to join a mailing list. This
+# is the address used to subscribe to the list. Uncomment and leave empty if you
+# want to remove this option from the user registration form.
+#mailing_join_addr = galaxy-announce-join at bx.psu.edu
+
+# Datasets in an error state include a link to report the error.  Those reports
+# will be sent to this address.  Error reports are disabled if no address is
+# set.  Also this email is shown as a contact to user in case of Galaxy
+# misconfiguration and other events user may encounter.
+#error_email_to = None
+
+# Email address to use in the 'From' field when sending emails for
+# account activations, workflow step notifications and password resets.
+# We recommend using string in the following format:
+# Galaxy Project <galaxy-no-reply at example.com>
+# If not configured, '<galaxy-no-reply at HOSTNAME>' will be used.
+#email_from = None
+
+# URL of the support resource for the galaxy instance.  Used in activation
+# emails.
+#instance_resource_url = https://wiki.galaxyproject.org/
+
+# E-mail domains blacklist is used for filtering out users that are using
+# disposable email address during the registration.  If their address domain
+# matches any domain in the blacklist, they are refused the registration.
+#blacklist_file = config/disposable_email_blacklist.conf
+
+# Registration warning message is used to discourage people from registering
+# multiple accounts.  Applies mostly for the main Galaxy instance.
+# If no message specified the warning box will not be shown.
+#registration_warning_message = Please register only one account - we provide this service free of charge and have limited computational resources. Multi-accounts are tracked and will be subjected to account termination and data deletion.
+
+
+# -- Account activation
+
+# User account activation feature global flag.  If set to "False", the rest of
+# the Account activation configuration is ignored and user activation is
+# disabled (i.e. accounts are active since registration).
+# The activation is also not working in case the SMTP server is not defined.
+#user_activation_on = False
+
+# Activation grace period (in hours).  Activation is not forced (login is not
+# disabled) until grace period has passed.  Users under grace period can't run
+# jobs. Enter 0 to disable grace period.
+# Users with OpenID logins have grace period forever.
+#activation_grace_period = 3
+
+# Shown in warning box to users that were not activated yet.
+# In use only if activation_grace_period is set.
+#inactivity_box_content = Your account has not been activated yet.  Feel free to browse around and see what's available, but you won't be able to upload data or run jobs until you have verified your email address.
+
+# Password expiration period (in days). Users are required to change their
+# password every x days. Users will be redirected to the change password
+# screen when they log in after their password expires. Enter 0 to disable
+# password expiration.
+#password_expiration_period = 0
+
+# Galaxy Session Timeout
+# This provides a timeout (in minutes) after which a user will have to log back in.
+# A duration of 0 disables this feature.
+#session_duration = 0
+
+
+# -- Analytics
+
+# You can enter tracking code here to track visitor's behavior
+# through your Google Analytics account.  Example: UA-XXXXXXXX-Y
+#ga_code = None
+
+# -- Display sites
+
+# Galaxy can display data at various external browsers.  These options specify
+# which browsers should be available.  URLs and builds available at these
+# browsers are defined in the specifield files.
+
+# If use_remote_user = True, display application servers will be denied access
+# to Galaxy and so displaying datasets in these sites will fail.
+# display_servers contains a list of hostnames which should be allowed to
+# bypass security to display datasets.  Please be aware that there are security
+# implications if this is allowed.  More details (including required changes to
+# the proxy server config) are available in the Apache proxy documentation on
+# the wiki.
+#
+# The list of servers in this sample config are for the UCSC Main, Test and
+# Archaea browsers, but the default if left commented is to not allow any
+# display sites to bypass security (you must uncomment the line below to allow
+# them).
+#display_servers = hgw1.cse.ucsc.edu,hgw2.cse.ucsc.edu,hgw3.cse.ucsc.edu,hgw4.cse.ucsc.edu,hgw5.cse.ucsc.edu,hgw6.cse.ucsc.edu,hgw7.cse.ucsc.edu,hgw8.cse.ucsc.edu,lowepub.cse.ucsc.edu
+
+# To disable the old-style display applications that are hardcoded into
+# datatype classes, set enable_old_display_applications = False.
+# This may be desirable due to using the new-style, XML-defined, display
+# applications that have been defined for many of the datatypes that have the
+# old-style.
+# There is also a potential security concern with the old-style applications,
+# where a malicious party could provide a link that appears to reference the
+# Galaxy server, but contains a redirect to a third-party server, tricking a
+# Galaxy user to access said site.
+#enable_old_display_applications = True
+
+# -- Next gen LIMS interface on top of existing Galaxy Sample/Request
+# management code.
+
+use_nglims = False
+nglims_config_file = tool-data/nglims.yaml
+
+# -- UI Localization
+
+# Show a message box under the masthead.
+#message_box_visible = False
+#message_box_content = None
+#message_box_class = info
+
+# Append "/{brand}" to the "Galaxy" text in the masthead.
+#brand = None
+
+# Format string used when showing date and time information.
+# The string may contain:
+# - the directives used by Python time.strftime() function (see
+#   https://docs.python.org/2/library/time.html#time.strftime ),
+# - $locale (complete format string for the server locale),
+# - $iso8601 (complete format string as specified by ISO 8601 international
+#   standard).
+# pretty_datetime_format = $locale (UTC)
+
+# URL (with schema http/https) of the Galaxy instance as accessible within your
+# local network - if specified used as a default by pulsar file staging and
+# IPython Docker container for communicating back with Galaxy via the API.
+#galaxy_infrastructure_url = http://localhost:8080
+
+# If the above URL cannot be determined ahead of time in dynamic environments
+# but the port which should be used to access Galaxy can be - this should be
+# set to prevent Galaxy from having to guess.  For example if Galaxy is sitting
+# behind a proxy with REMOTE_USER enabled - infrastructure shouldn't talk to
+# Python processes directly and this should be set to 80 or 443, etc... If
+# unset this file will be read for a server block defining a port corresponding
+# to the webapp.
+#galaxy_infrastructure_web_port = 8080
+
+# The URL of the page to display in Galaxy's middle pane when loaded.  This can
+# be an absolute or relative URL.
+#welcome_url = /static/welcome.html
+
+# The URL linked by the "Galaxy/brand" text.
+#logo_url = /
+
+# The URL linked by the "Wiki" link in the "Help" menu.
+#wiki_url = https://wiki.galaxyproject.org/
+
+# The URL linked by the "Support" link in the "Help" menu.
+#support_url = https://wiki.galaxyproject.org/Support
+
+# The URL linked by the "How to Cite Galaxy" link in the "Help" menu.
+#citation_url = https://wiki.galaxyproject.org/CitingGalaxy
+
+# The URL linked by the "Search" link in the "Help" menu.
+#search_url = http://galaxyproject.org/search/usegalaxy/
+
+# The URL linked by the "Mailing Lists" link in the "Help" menu.
+#mailing_lists_url = https://wiki.galaxyproject.org/MailingLists
+
+# The URL linked by the "Videos" link in the "Help" menu.
+#screencasts_url = https://vimeo.com/galaxyproject
+
+# The URL linked by the "Terms and Conditions" link in the "Help" menu, as well
+# as on the user registration and login forms and in the activation emails.
+#terms_url = None
+
+# The URL linked by the "Galaxy Q&A" link in the "Help" menu
+# The Galaxy Q&A site is under development; when the site is done, this URL
+# will be set and uncommented.
+#qa_url =
+
+# Serve static content, which must be enabled if you're not serving it via a
+# proxy server.  These options should be self explanatory and so are not
+# documented individually.  You can use these paths (or ones in the proxy
+# server) to point to your own styles.
+#static_enabled = True
+#static_cache_time = 360
+#static_dir = static/
+#static_images_dir = static/images
+#static_favicon_dir = static/favicon.ico
+#static_scripts_dir = static/scripts/
+#static_style_dir = static/june_2007_style/blue
+#static_robots_txt = static/robots.txt
+
+# Incremental Display Options
+
+#display_chunk_size = 65536
+
+# -- Advanced proxy features
+
+# For help on configuring the Advanced proxy features, see:
+# http://usegalaxy.org/production
+
+# Apache can handle file downloads (Galaxy-to-user) via mod_xsendfile.  Set
+# this to True to inform Galaxy that mod_xsendfile is enabled upstream.
+#apache_xsendfile = False
+
+# The same download handling can be done by nginx using X-Accel-Redirect.  This
+# should be set to the path defined in the nginx config as an internal redirect
+# with access to Galaxy's data files (see documentation linked above).
+#nginx_x_accel_redirect_base = False
+
+# nginx can make use of mod_zip to create zip files containing multiple library
+# files.  If using X-Accel-Redirect, this can be the same value as that option.
+#nginx_x_archive_files_base = False
+
+# If using compression in the upstream proxy server, use this option to disable
+# gzipping of library .tar.gz and .zip archives, since the proxy server will do
+# it faster on the fly.
+#upstream_gzip = False
+
+# The following default adds a header to web request responses that
+# will cause modern web browsers to not allow Galaxy to be embedded in
+# the frames of web applications hosted at other hosts - this can help
+# prevent a class of attack called clickjacking
+# (https://www.owasp.org/index.php/Clickjacking).  If you configure a
+# proxy in front of Galaxy - please ensure this header remains intact
+# to protect your users.  Uncomment and leave empty to not set the
+# `X-Frame-Options` header.
+#x_frame_options = SAMEORIGIN
+
+# nginx can also handle file uploads (user-to-Galaxy) via nginx_upload_module.
+# Configuration for this is complex and explained in detail in the
+# documentation linked above.  The upload store is a temporary directory in
+# which files uploaded by the upload module will be placed.
+#nginx_upload_store = False
+
+# This value overrides the action set on the file upload form, e.g. the web
+# path where the nginx_upload_module has been configured to intercept upload
+# requests.
+#nginx_upload_path = False
+
+# Galaxy can also use nginx_upload_module to receive files staged out upon job
+# completion by remote job runners (i.e. Pulsar) that initiate staging
+# operations on the remote end.  See the Galaxy nginx documentation for the
+# corresponding nginx configuration.
+#nginx_upload_job_files_store = False
+#nginx_upload_job_files_path = False
+
+# Have Galaxy manage dynamic proxy component for routing requests to other
+# services based on Galaxy's session cookie.  It will attempt to do this by
+# default though you do need to install node+npm and do an npm install from
+# `lib/galaxy/web/proxy/js`.  It is generally more robust to configure this
+# externally managing it however Galaxy is managed.  If True Galaxy will only
+# launch the proxy if it is actually going to be used (e.g. for IPython).
+#dynamic_proxy_manage=True
+
+# As of 16.04 Galaxy supports multiple proxy types. The original NodeJS
+# implementation, alongside a new Golang single-binary-no-dependencies
+# version. Valid values are (node, golang)
+#dynamic_proxy=node
+
+# The NodeJS dynamic proxy can use an SQLite database or a JSON file for IPC,
+# set that here.
+#dynamic_proxy_session_map=database/session_map.sqlite
+
+# Set the port and IP for the the dynamic proxy to bind to, this must match
+# the external configuration if dynamic_proxy_manage is False.
+#dynamic_proxy_bind_port=8800
+#dynamic_proxy_bind_ip=0.0.0.0
+
+# Enable verbose debugging of Galaxy-managed dynamic proxy.
+#dynamic_proxy_debug=False
+
+# The dynamic proxy is proxied by an external proxy (e.g. apache frontend to
+# nodejs to wrap connections in SSL).
+#dynamic_proxy_external_proxy=False
+
+# Additionally, when the dynamic proxy is proxied by an upstream server, you'll
+# want to specify a prefixed URL so both Galaxy and the proxy reside under the
+# same path that your cookies are under. This will result in a url like
+# https://FQDN/galaxy-prefix/gie_proxy for proxying
+#dynamic_proxy_prefix=gie_proxy
+
+# The Golang proxy also manages the docker containers more closely than the
+# NodeJS proxy, so is able to expose more container management related options
+
+# This attribute governs the minimum length of time between consecutive HTTP/WS
+# requests through the proxy, before the proxy considers a container as being
+# inactive and kills it.
+#dynamic_proxy_golang_noaccess = 60
+
+# In order to kill containers, the golang proxy has to check at some interval
+# for possibly dead containers. This is exposed as a configurable parameter,
+# but the default value is probably fine.
+#dynamic_proxy_golang_clean_interval = 10
+
+# The golang proxy needs to know how to talk to your docker daemon. Currently
+# TLS is not supported, that will come in an update.
+#dynamic_proxy_golang_docker_address = unix:///var/run/docker.sock
+
+# The golang proxy uses a RESTful HTTP API for communication with Galaxy
+# instead of a JSON or SQLite file for IPC. If you do not specify this, it will
+# be set randomly for you. You should set this if you are managing the proxy
+# manually.
+#dynamic_proxy_golang_api_key = None
+
+# -- Logging and Debugging
+
+# If True, Galaxy will attempt to configure a simple root logger if a
+# "loggers" section does not appear in this configuration file.
+#auto_configure_logging = True
+
+# Verbosity of console log messages.  Acceptable values can be found here:
+# https://docs.python.org/2/library/logging.html#logging-levels
+#log_level = DEBUG
+
+# Print database operations to the server log (warning, quite verbose!).
+#database_engine_option_echo = False
+
+# Print database pool operations to the server log (warning, quite verbose!).
+#database_engine_option_echo_pool = False
+
+# Turn on logging of application events and some user events to the database.
+#log_events = True
+
+# Turn on logging of user actions to the database.  Actions currently logged
+# are grid views, tool searches, and use of "recently" used tools menu.  The
+# log_events and log_actions functionality will eventually be merged.
+#log_actions = True
+
+# Fluentd configuration.  Various events can be logged to the fluentd instance
+# configured below by enabling fluent_log.
+#fluent_log = False
+#fluent_host = localhost
+#fluent_port = 24224
+
+# Sanitize all HTML tool output.  By default, all tool output served as
+# 'text/html' will be sanitized thoroughly.  This can be disabled if you have
+# special tools that require unaltered output.  WARNING: disabling this does
+# make the Galaxy instance susceptible to XSS attacks initiated by your users.
+#sanitize_all_html = True
+
+# Whitelist sanitization file.
+# Datasets created by tools listed in this file are trusted and will not have
+# their HTML sanitized on display.  This can be manually edited or manipulated
+# through the Admin control panel -- see "Manage Display Whitelist"
+#sanitize_whitelist_file = config/sanitize_whitelist.txt
+
+# By default Galaxy will serve non-HTML tool output that may potentially
+# contain browser executable JavaScript content as plain text.  This will for
+# instance cause SVG datasets to not render properly and so may be disabled
+# by setting the following option to True.
+#serve_xss_vulnerable_mimetypes = False
+
+# Return a Access-Control-Allow-Origin response header that matches the Origin
+# header of the request if that Origin hostname matches one of the strings or
+# regular expressions listed here. This is a comma separated list of hostname
+# strings or regular expressions beginning and ending with /.
+# E.g. mysite.com,google.com,usegalaxy.org,/^[\w\.]*example\.com/
+# See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS
+#allowed_origin_hostnames = None
+
+# Set the following to True to use IPython nbconvert to build HTML from IPython
+# notebooks in Galaxy histories.  This process may allow users to execute
+# arbitrary code or serve arbitrary HTML.  If enabled, IPython must be
+# available and on Galaxy's PATH, to do this run
+# `pip install jinja2 pygments ipython` in Galaxy's virtualenv.
+#trust_ipython_notebook_conversion = False
+
+# Debug enables access to various config options useful for development and
+# debugging: use_lint, use_profile, use_printdebug and use_interactive.  It
+# also causes the files used by PBS/SGE (submission script, output, and error)
+# to remain on disk after the job is complete.
+#debug = False
+
+# Check for WSGI compliance.
+#use_lint = False
+
+# Run the Python profiler on each request.
+#use_profile = False
+
+# Intercept print statements and show them on the returned page.
+#use_printdebug = True
+
+# Enable live debugging in your browser.  This should NEVER be enabled on a
+# public site.  Enabled in the sample config for development.
+use_interactive = True
+
+# Write thread status periodically to 'heartbeat.log',  (careful, uses disk
+# space rapidly!).  Useful to determine why your processes may be consuming a
+# lot of CPU.
+#use_heartbeat = False
+
+# Control the period (in seconds) between dumps. Use -1 to disable. Regardless
+# of this setting, if use_heartbeat is enabled, you can send a Galaxy process
+# (unless running with uWSGI) SIGUSR1 (`kill -USR1`) to force a dump.
+#heartbeat_interval = 20
+
+# Heartbeat log filename. Can accept the template variables {server_name} and
+# {pid}
+#heartbeat_log = heartbeat_{server_name}.log
+
+# Log to Sentry
+# Sentry is an open source logging and error aggregation platform.  Setting
+# sentry_dsn will enable the Sentry middleware and errors will be sent to the
+# indicated sentry instance.  This connection string is available in your
+# sentry instance under <project_name> -> Settings -> API Keys.
+#sentry_dsn = None
+
+# Log to statsd
+# Statsd is an external statistics aggregator (https://github.com/etsy/statsd)
+# Enabling the following options will cause galaxy to log request timing and
+# other statistics to the configured statsd instance.  The statsd_prefix is
+# useful if you are running multiple Galaxy instances and want to segment
+# statistics between them within the same aggregator.
+#statsd_host=
+#statsd_port=8125
+#statsd_prefix=galaxy
+
+# -- Data Libraries
+
+# These library upload options are described in much more detail in the wiki:
+#   https://wiki.galaxyproject.org/Admin/DataLibraries/UploadingLibraryFiles
+
+# Add an option to the library upload form which allows administrators to
+# upload a directory of files.
+#library_import_dir = None
+
+# Add an option to the library upload form which allows authorized
+# non-administrators to upload a directory of files.  The configured directory
+# must contain sub-directories named the same as the non-admin user's Galaxy
+# login ( email ).  The non-admin user is restricted to uploading files or
+# sub-directories of files contained in their directory.
+#user_library_import_dir = None
+
+# Add an option to the admin library upload tool allowing admins to paste
+# filesystem paths to files and directories in a box, and these paths will be
+# added to a library.  Set to True to enable.  Please note the security
+# implication that this will give Galaxy Admins access to anything your Galaxy
+# user has access to.
+#allow_library_path_paste = False
+
+# Users may choose to download multiple files from a library in an archive.  By
+# default, Galaxy allows users to select from a few different archive formats
+# if testing shows that Galaxy is able to create files using these formats.
+# Specific formats can be disabled with this option, separate more than one
+# format with commas.  Available formats are currently 'zip', 'gz', and 'bz2'.
+#disable_library_comptypes =
+
+# Some sequencer integration features in beta allow you to automatically
+# transfer datasets.  This is done using a lightweight transfer manager which
+# runs outside of Galaxy (but is spawned by it automatically).  Galaxy will
+# communicate with this manager over the port specified here.
+#transfer_manager_port = 8163
+
+# Search data libraries with whoosh
+#enable_whoosh_library_search = True
+# Whoosh indexes are stored in this directory.
+#whoosh_index_dir = database/whoosh_indexes
+
+# Search data libraries with lucene
+#enable_lucene_library_search = False
+# maximum file size to index for searching, in MB
+#fulltext_max_size = 500
+#fulltext_noindex_filetypes = bam,sam,wig,bigwig,fasta,fastq,fastqsolexa,fastqillumina,fastqsanger
+# base URL of server providing search functionality using lucene
+#fulltext_url = http://localhost:8081
+
+# -- Toolbox Search
+
+# The following boosts are used to customize this instance's toolbox search.
+# The higher the boost, the more importance the scoring algorithm gives to the
+# given field.  Section refers to the tool group in the tool panel.  Rest of
+# the fields are tool's attributes.
+# tool_name_boost = 9
+# tool_section_boost = 3
+# tool_description_boost = 2
+# tool_label_boost = 1
+# tool_stub_boost = 5
+# tool_help_boost = 0.5
+
+# Limits the number of results in toolbox search.  Can be used to tweak how many
+# results will appear.
+# tool_search_limit = 20
+
+# -- Users and Security
+
+# Galaxy encodes various internal values when these values will be output in
+# some format (for example, in a URL or cookie).  You should set a key to be
+# used by the algorithm that encodes and decodes these values.  It can be any
+# string.
+# One simple way to generate a value for this is with the shell command:
+#   python -c 'import time; print time.time()' | md5sum | cut -f 1 -d ' '
+#id_secret = USING THE DEFAULT IS NOT SECURE!
+
+# User authentication can be delegated to an upstream proxy server (usually
+# Apache).  The upstream proxy should set a REMOTE_USER header in the request.
+# Enabling remote user disables regular logins.  For more information, see:
+#   https://wiki.galaxyproject.org/Admin/Config/ApacheProxy
+#use_remote_user = False
+
+# If use_remote_user is enabled and your external authentication
+# method just returns bare usernames, set a default mail domain to be appended
+# to usernames, to become your Galaxy usernames (email addresses).
+#remote_user_maildomain = None
+
+# If use_remote_user is enabled, the header that the upstream proxy provides
+# the remote username in defaults to HTTP_REMOTE_USER (the 'HTTP_' is prepended
+# by WSGI).  This option allows you to change the header.  Note, you still need
+# to prepend 'HTTP_' to the header in this option, but your proxy server should
+# *not* include 'HTTP_' at the beginning of the header name.
+#remote_user_header = HTTP_REMOTE_USER
+
+# If use_remote_user is enabled, anyone who can log in to the Galaxy host may
+# impersonate any other user by simply sending the appropriate header.  Thus a
+# secret shared between the upstream proxy server, and Galaxy is required.
+# If anyone other than the Galaxy user is using the server, then apache/nginx
+# should pass a value in the header 'GX_SECRET' that is identical to the one
+# below.
+#remote_user_secret = USING THE DEFAULT IS NOT SECURE!
+
+# If use_remote_user is enabled, you can set this to a URL that will log your
+# users out.
+#remote_user_logout_href = None
+
+# If your proxy and/or authentication source does not normalize e-mail
+# addresses or user names being passed to Galaxy - set the following option
+# to True to force these to lower case.
+#normalize_remote_user_email = False
+
+# If an e-mail address is specified here, it will hijack remote user mechanics
+# (``use_remote_user``) and have the webapp inject a single fixed user. This
+# has the effect of turning Galaxy into a single user application with no
+# login or external proxy required. Such applications should not be exposed to
+# the world.
+#single_user =
+
+# Administrative users - set this to a comma-separated list of valid Galaxy
+# users (email addresses).  These users will have access to the Admin section
+# of the server, and will have access to create users, groups, roles,
+# libraries, and more.  For more information, see:
+#   https://wiki.galaxyproject.org/Admin/Interface
+#admin_users = None
+
+# Force everyone to log in (disable anonymous access).
+#require_login = False
+
+# Show the site's welcome page (see welcome_url) alongside the login page
+# (even if require_login is True)
+#show_welcome_with_login = False
+
+# Allow unregistered users to create new accounts (otherwise, they will have to
+# be created by an admin).
+#allow_user_creation = True
+
+# Allow administrators to delete accounts.
+#allow_user_deletion = False
+
+# Allow administrators to log in as other users (useful for debugging)
+#allow_user_impersonation = False
+
+# Allow users to remove their datasets from disk immediately (otherwise,
+# datasets will be removed after a time period specified by an administrator in
+# the cleanup scripts run via cron)
+#allow_user_dataset_purge = True
+
+# By default, users' data will be public, but setting this to True will cause
+# it to be private.  Does not affect existing users and data, only ones created
+# after this option is set.  Users may still change their default back to
+# public.
+#new_user_dataset_access_role_default_private = False
+
+# Expose user list.  Setting this to True will expose the user list to
+# authenticated users.  This makes sharing datasets in smaller galaxy instances
+# much easier as they can type a name/email and have the correct user show up.
+# This makes less sense on large public Galaxy instances where that data
+# shouldn't be exposed.  For semi-public Galaxies, it may make sense to expose
+# just the username and not email, or vice versa.
+#expose_user_name = False
+#expose_user_email = False
+
+# -- Beta features
+
+# Enable new run workflow form
+#run_workflow_toolform_upgrade = True
+
+# Enable Galaxy to communicate directly with a sequencer
+#enable_sequencer_communication = False
+
+# Enable the new interface for installing tools from Tool Shed
+# via the API. Admin menu will list both if enabled.
+#enable_beta_ts_api_install = False
+
+# Set the following to a number of threads greater than 1 to spawn
+# a Python task queue for dealing with large tool submissions (either
+# through the tool form or as part of an individual workflow step across
+# large collection). The size of a "large" tool request is controlled by
+# the second parameter below and defaults to 10. This affects workflow
+# scheduling and web processes, not job handlers.
+#tool_submission_burst_threads = 1
+#tool_submission_burst_at = 10
+
+# Enable beta workflow modules that should not yet be considered part of Galaxy's
+# stable API.
+#enable_beta_workflow_modules = False
+
+# Force usage of Galaxy's beta workflow scheduler under certain circumstances -
+# this workflow scheduling forces Galaxy to schedule workflows in the background
+# so initial submission of the workflows is signficantly sped up. This does
+# however force the user to refresh their history manually to see newly scheduled
+# steps (for "normal" workflows - steps are still scheduled far in advance of
+# them being queued and scheduling here doesn't refer to actual cluster job
+# scheduling).
+# Workflows containing more than the specified number of steps will always use
+# the Galaxy's beta workflow scheduling.
+#force_beta_workflow_scheduled_min_steps=250
+# Switch to using Galaxy's beta workflow scheduling for all workflows involving
+# ccollections.
+#force_beta_workflow_scheduled_for_collections=False
+
+# Enable authentication via OpenID.  Allows users to log in to their Galaxy
+# account by authenticating with an OpenID provider.
+#enable_openid = False
+# .sample used if default does not exist
+#openid_config_file = config/openid_conf.xml
+#openid_consumer_cache_path = database/openid_consumer_cache
+
+# XML config file that allows the use of different authentication providers
+# (e.g. LDAP) instead or in addition to local authentication (.sample is used
+# if default does not exist).
+#auth_config_file = config/auth_conf.xml
+
+# Optional list of email addresses of API users who can make calls on behalf of
+# other users.
+#api_allow_run_as = None
+
+# Master key that allows many API admin actions to be used without actually
+# having a defined admin user in the database/config.  Only set this if you
+# need to bootstrap Galaxy, you probably do not want to set this on public
+# servers.
+#master_api_key = changethis
+
+# Enable tool tags (associating tools with tags).  This has its own option
+# since its implementation has a few performance implications on startup for
+# large servers.
+#enable_tool_tags = False
+
+# Enable a feature when running workflows.  When enabled, default datasets
+# are selected for "Set at Runtime" inputs from the history such that the
+# same input will not be selected twice, unless there are more inputs than
+# compatible datasets in the history.
+# When False, the most recently added compatible item in the history will
+# be used for each "Set at Runtime" input, independent of others in the Workflow
+#enable_unique_workflow_defaults = False
+
+# The URL to the myExperiment instance being used (omit scheme but include port)
+#myexperiment_url = www.myexperiment.org:80
+
+# Enable Galaxy's "Upload via FTP" interface.  You'll need to install and
+# configure an FTP server (we've used ProFTPd since it can use Galaxy's
+# database for authentication) and set the following two options.
+
+# This should point to a directory containing subdirectories matching users'
+# identifier (defaults to e-mail), where Galaxy will look for files.
+#ftp_upload_dir = None
+
+# This should be the hostname of your FTP server, which will be provided to
+# users in the help text.
+#ftp_upload_site = None
+
+# User attribute to use as subdirectory in calculating default ftp_upload_dir
+# pattern. By default this will be email so a user's FTP upload directory will be
+# ${ftp_upload_dir}/${user.email}. Can set this to other attributes such as id or
+# username though.
+#ftp_upload_dir_identifier = email
+
+# Python string template used to determine an FTP upload directory for a
+# particular user.
+#ftp_upload_dir_template = ${ftp_upload_dir}/${ftp_upload_dir_identifier}
+
+# This should be set to False to prevent Galaxy from deleting uploaded FTP files
+# as it imports them.
+#ftp_upload_purge = True
+
+# Enable enforcement of quotas.  Quotas can be set from the Admin interface.
+#enable_quotas = False
+
+# This option allows users to see the full path of datasets via the "View
+# Details" option in the history.  Administrators can always see this.
+#expose_dataset_path = False
+
+# Data manager configuration options
+# Allow non-admin users to view available Data Manager options.
+#enable_data_manager_user_view = False
+# File where Data Managers are configured (.sample used if default does not
+# exist).
+#data_manager_config_file = config/data_manager_conf.xml
+# File where Tool Shed based Data Managers are configured.
+#shed_data_manager_config_file = config/shed_data_manager_conf.xml
+# Directory to store Data Manager based tool-data; defaults to tool_data_path.
+#galaxy_data_manager_data_path = tool-data
+
+# -- Job Execution
+
+# To increase performance of job execution and the web interface, you can
+# separate Galaxy into multiple processes.  There are more than one way to do
+# this, and they are explained in detail in the documentation:
+#
+#   https://wiki.galaxyproject.org/Admin/Config/Performance/Scaling
+
+# By default, Galaxy manages and executes jobs from within a single process and
+# notifies itself of new jobs via in-memory queues.  Jobs are run locally on
+# the system on which Galaxy is started.  Advanced job running capabilities can
+# be configured through the job configuration file.
+#job_config_file = config/job_conf.xml
+
+# In multiprocess configurations, notification between processes about new jobs
+# must be done via the database.  In single process configurations, this can be
+# done in memory, which is a bit quicker.
+#track_jobs_in_database = True
+
+# This enables splitting of jobs into tasks, if specified by the particular tool
+# config.
+# This is a new feature and not recommended for production servers yet.
+#use_tasked_jobs = False
+#local_task_queue_workers = 2
+
+# Enable job recovery (if Galaxy is restarted while cluster jobs are running,
+# it can "recover" them when it starts).  This is not safe to use if you are
+# running more than one Galaxy server using the same database.
+#enable_job_recovery = True
+
+# Although it is fairly reliable, setting metadata can occasionally fail.  In
+# these instances, you can choose to retry setting it internally or leave it in
+# a failed state (since retrying internally may cause the Galaxy process to be
+# unresponsive).  If this option is set to False, the user will be given the
+# option to retry externally, or set metadata manually (when possible).
+#retry_metadata_internally = True
+
+# Very large metadata values can cause Galaxy crashes.  This will allow
+# limiting the maximum metadata key size (in bytes used in memory, not the end
+# result database value size) Galaxy will attempt to save with a dataset.  Use
+# 0 to disable this feature.  The default is 5MB, but as low as 1MB seems to be
+# a reasonable size.
+#max_metadata_value_size = 5242880
+
+# If (for example) you run on a cluster and your datasets (by default,
+# database/files/) are mounted read-only, this option will override tool output
+# paths to write outputs to the working directory instead, and the job manager
+# will move the outputs to their proper place in the dataset directory on the
+# Galaxy server after the job completes.
+#outputs_to_working_directory = False
+
+# If your network filesystem's caching prevents the Galaxy server from seeing
+# the job's stdout and stderr files when it completes, you can retry reading
+# these files.  The job runner will retry the number of times specified below,
+# waiting 1 second between tries.  For NFS, you may want to try the -noac mount
+# option (Linux) or -actimeo=0 (Solaris).
+#retry_job_output_collection = 0
+
+# Clean up various bits of jobs left on the filesystem after completion.  These
+# bits include the job working directory, external metadata temporary files,
+# and DRM stdout and stderr files (if using a DRM).  Possible values are:
+# always, onsuccess, never
+#cleanup_job = always
+
+# For sites where all users in Galaxy match users on the system on which Galaxy
+# runs, the DRMAA job runner can be configured to submit jobs to the DRM as the
+# actual user instead of as the user running the Galaxy server process.  For
+# details on these options, see the documentation at:
+#
+# https://wiki.galaxyproject.org/Admin/Config/Performance/Cluster
+#
+#drmaa_external_runjob_script = scripts/drmaa_external_runner.py
+#drmaa_external_killjob_script = scripts/drmaa_external_killer.py
+#external_chown_script = scripts/external_chown_script.py
+
+# File to source to set up the environment when running jobs.  By default, the
+# environment in which the Galaxy server starts is used when running jobs
+# locally, and the environment set up per the DRM's submission method and
+# policy is used when running jobs on a cluster (try testing with `qsub` on the
+# command line).  environment_setup_file can be set to the path of a file on
+# the cluster that should be sourced by the user to set up the environment
+# prior to running tools.  This can be especially useful for running jobs as
+# the actual user, to remove the need to configure each user's environment
+# individually.
+#environment_setup_file = None
+
+# Optional file containing job resource data entry fields definition.
+# These fields will be presented to users in the tool forms and allow them to
+# overwrite default job resources such as number of processors, memory and
+# walltime.
+#job_resource_params_file = config/job_resource_params_conf.xml
+
+# If using job concurrency limits (configured in job_config_file), several
+# extra database queries must be performed to determine the number of jobs a
+# user has dispatched to a given destination.  By default, these queries will
+# happen for every job that is waiting to run, but if cache_user_job_count is
+# set to True, it will only happen once per iteration of the handler queue.
+# Although better for performance due to reduced queries, the tradeoff is a
+# greater possibility that jobs will be dispatched past the configured limits
+# if running many handlers.
+#cache_user_job_count = False
+
+# -- ToolBox filtering
+
+# Modules from lib/galaxy/tools/toolbox/filters/ can be specified in
+# the following lines.  tool_* filters will be applied for all users
+# and can not be changed by them.  user_tool_* filters will be shown
+# under user preferences and can be toggled on and off at
+# runtime.  Example shown below are not real defaults (no custom
+# filters are applied by default), but can be enabled by renaming the
+# example.py.sample in the filters directory to example.py.
+
+#tool_filters =
+#tool_label_filters =
+#tool_section_filters =
+#user_tool_filters = examples:restrict_upload_to_admins, examples:restrict_encode
+#user_tool_section_filters = examples:restrict_text
+#user_tool_label_filters = examples:restrict_upload_to_admins, examples:restrict_encode
+
+# The base modules that are searched for modules as described above
+# can be modified and modules external to Galaxy can be searched by
+# modifying the following option.
+#toolbox_filter_base_modules = galaxy.tools.toolbox.filters,galaxy.tools.filters
+
+# -- Galaxy Application Internal Message Queue
+
+# Galaxy uses AMQP internally TODO more documentation on what for.
+# For examples, see http://ask.github.io/kombu/userguide/connections.html
+#
+# Without specifying anything here, galaxy will first attempt to use your
+# specified database_connection above.  If that's not specified either, Galaxy
+# will automatically create and use a separate sqlite database located in your
+# <galaxy>/database folder (indicated in the commented out line below).
+
+#amqp_internal_connection = sqlalchemy+sqlite:///./database/control.sqlite?isolation_level=IMMEDIATE
+
+# Galaxy real time communication server settings
+#enable_communication_server = False
+#communication_server_host = http://localhost
+#communication_server_port = 7070
+# persistent_communication_rooms is a comma-separated list of rooms that should be always available.
+#persistent_communication_rooms =
+
+
+# ---- Galaxy External Message Queue -------------------------------------------------
+
+# Galaxy uses Advanced Message Queuing Protocol (AMQP) to receive messages from
+# external sources like barcode scanners.  Galaxy has been tested against
+# RabbitMQ AMQP implementation.  For Galaxy to receive messages from a message
+# queue, the RabbitMQ server has to be set up with a user account and other
+# parameters listed below.  The 'host' and 'port' fields should point to where
+# the RabbitMQ server is running.
+
+[galaxy_amqp]
+
+#host = 127.0.0.1
+#port = 5672
+#userid = galaxy
+#password = galaxy
+#virtual_host = galaxy_messaging_engine
+#queue = galaxy_queue
+#exchange = galaxy_exchange
+#routing_key = bar_code_scanner
+#rabbitmqctl_path = /path/to/rabbitmqctl
diff --git a/config/job_conf.xml.sample_advanced b/config/job_conf.xml.sample_advanced
new file mode 100644
index 0000000..4192008
--- /dev/null
+++ b/config/job_conf.xml.sample_advanced
@@ -0,0 +1,784 @@
+<?xml version="1.0"?>
+<job_conf>
+    <plugins workers="4">
+        <!-- "workers" is the number of threads for the runner's work queue.
+             The default from <plugins> is used if not defined for a <plugin>.
+          -->
+        <plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner"/>
+        <plugin id="pbs" type="runner" load="galaxy.jobs.runners.pbs:PBSJobRunner" workers="2"/>
+        <plugin id="drmaa" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner">
+            <!-- Different DRMs handle successfully completed jobs differently,
+                 these options can be changed to handle such differences and
+                 are explained in detail on the Galaxy wiki. Defaults are shown -->
+            <param id="invalidjobexception_state">ok</param>
+            <param id="invalidjobexception_retries">0</param>
+            <param id="internalexception_state">ok</param>
+            <param id="internalexception_retries">0</param>
+        </plugin>
+        <plugin id="sge" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner">
+            <!-- Override the $DRMAA_LIBRARY_PATH environment variable -->
+            <param id="drmaa_library_path">/sge/lib/libdrmaa.so</param>
+        </plugin>
+        <plugin id="cli" type="runner" load="galaxy.jobs.runners.cli:ShellJobRunner" />
+        <plugin id="condor" type="runner" load="galaxy.jobs.runners.condor:CondorJobRunner" />
+        <plugin id="slurm" type="runner" load="galaxy.jobs.runners.slurm:SlurmJobRunner" />
+        <plugin id="dynamic" type="runner">
+            <!-- The dynamic runner is not a real job running plugin and is
+                 always loaded, so it does not need to be explicitly stated in
+                 <plugins>. However, if you wish to change the base module
+                 containing your dynamic rules, you can do so.
+
+                 The `load` attribute is not required (and ignored if
+                 included).
+            -->
+            <param id="rules_module">galaxy.jobs.rules</param>
+        </plugin>
+        <!-- Pulsar runners (see more at https://pulsar.readthedocs.org) -->
+        <plugin id="pulsar_rest" type="runner" load="galaxy.jobs.runners.pulsar:PulsarRESTJobRunner">
+          <!-- Allow optimized HTTP calls with libcurl (defaults to urllib) -->
+          <!-- <param id="transport">curl</param> -->
+
+          <!-- *Experimental Caching*: Next parameter enables caching.
+                Likely will not work with newer features such as MQ support.
+
+                If this is enabled be sure to specify a `file_cache_dir` in
+                the remote Pulsar's servers main configuration file.
+          -->
+          <!-- <param id="cache">True</param> -->
+        </plugin>
+        <plugin id="pulsar_mq" type="runner" load="galaxy.jobs.runners.pulsar:PulsarMQJobRunner">
+          <!-- AMQP URL to connect to. -->
+          <param id="amqp_url">amqp://guest:guest@localhost:5672//</param>
+          <!-- URL remote Pulsar apps should transfer files to this Galaxy
+               instance to/from. This can be unspecified/empty if
+               galaxy_infrastructure_url is set in galaxy.ini.
+          -->
+          <param id="galaxy_url">http://localhost:8080</param>
+          <!-- AMQP does not guarantee that a published message is received by
+               the AMQP server, so Galaxy/Pulsar can request that the consumer
+               acknowledge messages and will resend them if acknowledgement is
+               not received after a configurable timeout.  -->
+          <!-- <param id="amqp_acknowledge">False</param> -->
+          <!-- Galaxy reuses Pulsar's persistence_directory parameter (via the
+               Pulsar client lib) to store a record of received
+               acknowledgements, and to keep track of messages which have not
+               been acknowledged. -->
+          <!-- <param id="persistence_directory">/path/to/dir</param> -->
+          <!-- Number of seconds to wait for an acknowledgement before
+               republishing a message. -->
+          <!-- <param id="amqp_republish_time">30</param> -->
+          <!-- Pulsar job manager to communicate with (see Pulsar
+               docs for information on job managers). -->
+          <!-- <param id="manager">_default_</param> -->
+          <!-- The AMQP client can provide an SSL client certificate (e.g. for
+               validation), the following options configure that certificate
+               (see for reference:
+                 http://kombu.readthedocs.org/en/latest/reference/kombu.connection.html
+               ). If you simply want to use SSL but not use/validate a client
+               cert, just use the ?ssl=1 query on the amqp URL instead. -->
+          <!-- <param id="amqp_connect_ssl_ca_certs">/path/to/cacert.pem</param> -->
+          <!-- <param id="amqp_connect_ssl_keyfile">/path/to/key.pem</param> -->
+          <!-- <param id="amqp_connect_ssl_certfile">/path/to/cert.pem</param> -->
+          <!-- <param id="amqp_connect_ssl_cert_reqs">cert_required</param> -->
+          <!-- By default, the AMQP consumer uses a nonblocking connection with
+               a 0.2 second timeout. In testing, this works fine for
+               unencrypted AMQP connections, but with SSL it will cause the
+               client to reconnect to the server after each timeout. Set to a
+               higher value (in seconds) (or `None` to use blocking connections). -->
+          <!-- <param id="amqp_consumer_timeout">None</param> -->
+        </plugin>
+        <plugin id="pulsar_legacy" type="runner" load="galaxy.jobs.runners.pulsar:PulsarLegacyJobRunner" shell="none">
+          <!-- Pulsar job runner with default parameters matching those
+               of old LWR job runner. If your Pulsar server is running on a
+               Windows machine for instance this runner should still be used.
+
+               These destinations still needs to target a Pulsar server,
+               older LWR plugins and destinations still work in Galaxy can
+               target LWR servers, but this support should be considered
+               deprecated and will disappear with a future release of Galaxy.
+          -->
+        </plugin>
+        <plugin id="pulsar_embedded" type="runner" load="galaxy.jobs.runners.pulsar:PulsarEmbeddedJobRunner">
+          <!-- The embedded Pulsar runner starts a Pulsar app
+               internal to Galaxy and communicates it directly.
+               This maybe be useful for instance when Pulsar
+               staging is important but a Pulsar server is
+               unneeded (for instance if compute servers cannot
+               mount Galaxy's files but Galaxy can mount a
+               scratch directory available on compute). -->
+          <!-- Specify a complete description of the Pulsar app
+               to create. If this configuration defines more than
+               one manager - you can specify the manager name 
+               using the "manager" destination parameter. For more
+               information on configuring a Pulsar app see:
+
+               https://github.com/galaxyproject/pulsar/blob/master/app.yml.sample
+               http://pulsar.readthedocs.org/en/latest/configure.html
+          -->
+          <!-- <param id="pulsar_config">path/to/pulsar/app.yml</param> -->
+        </plugin>
+        <plugin id="k8s" type="runner" load="galaxy.jobs.runners.kubernetes:kubernetes">
+            <!-- The Kubernetes (k8s) plugin allows to send jobs to a k8s cluster which shares filesystem with Galaxy.
+
+                 This requires installing pykube. Install pykube by activating Galaxy's virtual
+                 and then executing the following pip command:
+
+                 pip install -e git+https://github.com/pcm32/pykube.git@feature/allMergedFeatures#egg=pykube
+
+                 The shared file system needs to be exposed to k8s through a Persistent Volume (rw) and a Persistent
+                 Volume Claim. An example of a Persistent Volume could be, in yaml (access modes, reclaim policy and
+                 path are relevant) (persistent_volume.yaml):
+
+                    kind: PersistentVolume
+                    apiVersion: v1
+                    metadata:
+                      name: pv-galaxy-nfs
+                      labels:
+                        type: nfs
+                    spec:
+                      capacity:
+                        storage: 10Gi
+                      accessModes:
+                        - ReadWriteMany
+                      persistentVolumeReclaimPolicy: Retained
+                      nfs:
+                        path: /scratch1/galaxy_data
+                        server: 192.168.64.1
+
+                 The path (nfs:path: in the example) set needs to be a parent directory of the directories used for
+                 variables “file_path” and “new_file_path” on the galaxy.ini files. Clearly, for this particular example
+                 to work, there needs to be a NFS server serving that directory on that ip. Please make sure that you
+                 use reasonable storage size for your set up (possibly larger that the 10Gi written).
+                 An example of the volume claim should be (this needs to be followed more closely) (pv_claim.yaml):
+
+                    kind: PersistentVolumeClaim
+                    apiVersion: v1
+                    metadata:
+                      name: galaxy-pvc
+                    spec:
+                      accessModes:
+                        - ReadWriteMany
+                      volumeName: pv-galaxy-nfs
+                      resources:
+                        requests:
+                          storage: 2Gi
+
+                 The volume claim needs to reference the name of the volume in spec:volumeName. The name of the claim
+                 (metadat:name) is referenced in the plugin definition (see below), through param
+                 "k8s_persistent_volume_claim_name". These two k8s object need to be created before galaxy can use them:
+
+                 kubectl create -f <path/to/persistent_volume.yaml>
+                 kubectl create -f <path/to/pv_claim.yaml>
+
+                 pointing of course to the same Kubernetes cluster that you intend to use.
+            -->
+
+            <param id="k8s_config_path">/path/to/kubeconfig</param>
+            <!-- This is the path to the kube config file, which is normally on ~/.kube/config, but that will depend on
+                 your installation. This is the file that tells the plugin where the k8s cluster is, access credentials,
+                 etc. This parameter is not necessary and ignored if k8s_use_service_account is set to True -->
+
+            <param id="k8s_use_service_account">false</param>
+            <!-- For use when Kubernetes should be accessed from inside a Pod running Galaxy (that is,
+                 galaxy is running inside Kubernetes). If this variable is True, then the previous k8s_config_path is
+                 not required and actually ignored. It is not necessary to set this value if not setting it to true -->
+
+            <param id="k8s_persistent_volume_claim_name">galaxy_pvc</param>
+            <!-- The name of the Persisten Volume Claim (PVC) to be used, details above, needs to match the PVC's
+                 metadata:name -->
+
+            <param id="k8s_persistent_volume_claim_mount_path">/scratch1/galaxy_data</param>
+            <!-- The mount path needs to be parent directory of the "file_path" and "new_file_path" paths
+                 set in universe_wsgi.ini (or equivalent general galaxy config file). This is the mount path of the
+                 PVC within the docker container that will be actually running the tool -->
+
+            <param id="k8s_namespace">galaxy-instanceA</param>
+            <!-- The namespace to be used on the Kubernetes cluster, if different from default, this needs to be set
+                 accordingly in the PV and PVC detailed above -->
+
+            <param id="k8s_pod_retrials">4</param>
+            <!-- Allows pods to retry up to this number of times, before marking the galaxy job failed. k8s is a state
+                 setter essentially, so by default it will try to take a job submitted to successful completion. A job
+                 submits pods, until the number of successes (1 in this use case) is achieved, assuming that whatever is
+                 making the pods fail will be fixed (such as a stale disk or a dead node that it is being restarted).
+                 This option sets a limit of retrials, so that after that number of failed pods, the job is re-scaled to
+                 zero (no execution) and the stderr/stdout of the k8s job is reported in galaxy (and the galaxy job set
+                 to failed) -->
+        </plugin>
+        <plugin id="godocker" type="runner" load="galaxy.jobs.runners.godocker:GodockerJobRunner">
+            <!-- Go-Docker is a batch computing/cluster management tool using Docker
+                 See https://bitbucket.org/osallou/go-docker for more details.  -->
+            <!--  REST based runner , submits jobs to godocker -->
+            <param id="godocker_master">GODOCKER_URL</param>
+            <!-- Specify the instance of GoDocker -->
+            <param id="user">USERNAME</param>
+            <!-- GoDocker username -->
+            <param id="key">APIKEY</param>
+            <!-- GoDocker API key -->
+            <param id="godocker_project">galaxy</param>
+            <!-- Specify the project present in the GoDocker setup -->
+        </plugin>
+
+    </plugins>
+    <handlers default="handlers">
+        <!-- Additional job handlers - the id should match the name of a
+             [server:<id>] in galaxy.ini.
+         -->
+        <handler id="handler0" tags="handlers"/>
+        <handler id="handler1" tags="handlers"/>
+        <!-- Handlers will load all plugins defined in the <plugins> collection
+             above by default, but can be limited to a subset using <plugin>
+             tags. This is useful for heterogenous environments where the DRMAA
+             plugin would need to be loaded more than once with different
+             configs.
+         -->
+        <handler id="sge_handler">
+            <plugin id="sge"/>
+        </handler>
+        <handler id="special_handler0" tags="special_handlers"/>
+        <handler id="special_handler1" tags="special_handlers"/>
+        <handler id="trackster_handler"/>
+    </handlers>
+    <destinations default="local">
+        <!-- Destinations define details about remote resources and how jobs
+             should be executed on those remote resources.
+         -->
+        <destination id="local" runner="local"/>
+        <destination id="multicore_local" runner="local">
+          <param id="local_slots">4</param> <!-- Specify GALAXY_SLOTS for local jobs. -->
+          <!-- Warning: Local slot count doesn't tie up additional worker threads, to prevent over
+               allocating machine define a second local runner with different name and fewer workers
+               to run this destination. -->
+          <param id="embed_metadata_in_job">True</param>
+          <!-- Above parameter will be default (with no option to set
+               to False) in an upcoming release of Galaxy, but you can
+               try it early - it will slightly speed up local jobs by
+               embedding metadata calculation in job script itself.
+          -->
+          <job_metrics />
+          <!-- Above element demonstrates embedded job metrics definition - see
+               job_metrics_conf.xml.sample for full documentation on possible nested
+               elements. This empty block will simply disable job metrics for the
+               corresponding destination. -->
+        </destination>
+        <destination id="docker_local" runner="local">
+          <param id="docker_enabled">true</param>
+          <!-- docker_volumes can be used to configure volumes to expose to docker,
+               For added isolation append :ro to the path to mount it read only.
+               Galaxy will attempt to infer a reasonable set of defaults which
+               volumes should be exposed how based on Galaxy's settings and the
+               destination - but be sure to add any library paths or data incides
+               that may be needed read-only.
+          -->
+          <!--
+          <param id="docker_volumes">$defaults,/mnt/galaxyData/libraries:ro,/mnt/galaxyData/indices:ro</param>
+          -->
+          <!-- For a stock Galaxy instance and traditional job runner $defaults will
+               expand out as:
+
+               $galaxy_root:ro,$tool_directory:ro,$job_directory:ro,$working_directory:rw,$default_file_path:rw
+
+               This assumes most of what is needed is available under Galaxy's root directory,
+               the tool directory, and the Galaxy's file_path (if using object store creatively
+               you will definitely need to expand defaults).
+
+               This configuration allows any docker instance to write to any Galaxy
+               file - for greater isolation set outputs_to_working_directory in
+               galaxy.ini. This will cause $defaults to allow writing to much
+               less. It will then expand as follows:
+
+               $galaxy_root:ro,$tool_directory:ro,$job_directory:ro,$working_directory:rw,$default_file_path:ro
+
+               If using the Pulsar, defaults will be even further restricted because the
+               Pulsar will (by default) stage all needed inputs into the job's job_directory
+               (so there is not need to allow the docker container to read all the
+               files - let alone write over them). Defaults in this case becomes:
+
+               $job_directory:ro,$tool_directory:ro,$job_directory/outputs:rw,$working_directory:rw
+
+               Python string.Template is used to expand volumes and values $defaults,
+               $galaxy_root, $default_file_path, $tool_directory, $working_directory,
+               are available to all jobs and $job_directory is also available for
+               Pulsar jobs.
+          -->
+          <!-- One can run docker using volumes-from tag by setting the following
+               parameter. For more information on volumes-from check out the following
+               docker tutorial. https://docs.docker.com/userguide/dockervolumes/
+          -->
+          <!-- <param id="docker_volumes_from">parent_container_name</param> -->
+          <!-- Control memory allocatable by docker container with following option:
+          -->
+          <!-- <param id="docker_memory">24G</param> -->
+          <!-- By default Docker will need to runnable by Galaxy using
+               password-less sudo - this can be configured by adding the
+               following line to the sudoers file of all compute nodes
+               with docker enabled:
+
+               galaxy  ALL = (root) NOPASSWD: SETENV: /usr/bin/docker
+
+               The follow option is set to false to disable sudo (docker
+               must likewise be configured to allow this).
+          -->
+          <!-- <param id="docker_sudo">false</param> -->
+          <!-- Following option can be used to tweak sudo command used by
+               default. -->
+          <!-- <param id="docker_sudo_cmd">/usr/bin/sudo -extra_param</param> -->
+          <!-- By default, docker container will not have any networking
+               enabled. host networking can be bridged by uncommenting next option
+               http://docs.docker.io/reference/run/#network-settings
+          -->
+          <!-- <param id="docker_net">bridge</param> -->
+          <!-- By default, a container will live on past its run. By
+               adding the '\-\-rm' flag to the command line, the container
+               will be removed automatically after the program is complete.
+          -->
+          <!-- <param id="docker_auto_rm">true</param> -->
+          <!-- Override which user to launch Docker container as - defaults to 
+               Galaxy's user id. For remote job execution (e.g. Pulsar) set to
+               remote job user. Leave empty to not use the -u argument with
+               Docker. -->
+          <!-- <param id="docker_set_user">$UID</param> -->
+          <!-- Pass extra arguments to the docker run command not covered by the
+               above options. -->
+          <!-- <param id="docker_run_extra_arguments"></param> -->
+          <!-- Following command can be used to tweak docker command. -->
+          <!-- <param id="docker_cmd">/usr/local/custom_docker/docker</param> -->
+          <!-- Following can be used to connect to docke server in different
+               ways (translated as -H argument to docker client). -->
+          <!-- <param id="docker_host">unix:///var/run/docker.sock</param> -->
+          <!-- <param id="docker_host">:5555</param> -->
+          <!-- <param id="docker_host">:5555</param> -->
+          <!-- <param id="docker_host">tcp://127.0.0.1:4243</param> -->
+
+          <!-- If deployer wants to use docker for isolation, but does not
+               trust tool's specified container - a destination wide override
+               can be set. This will cause all jobs on this destination to use
+               that docker image. -->
+          <!-- <param id="docker_container_id_override">busybox:ubuntu-14.04</param> -->
+
+          <!-- Likewise, if deployer wants to use docker for isolation and
+               does trust tool's specified container - but also wants tool's not
+               configured to run in a container the following option can provide
+               a fallback. -->
+          <!-- <param id="docker_default_container_id">busybox:ubuntu-14.04</param> -->
+
+          <!-- If the destination should be secured to only allow containerized jobs
+               the following parameter may be set for the job destination. Not all,
+               or even most, tools available in Galaxy core or in the Tool Shed
+               support Docker yet so this option may require a lot of extra work for
+               the deployer. -->
+          <!-- <param id="require_container">true</param> -->
+        </destination>
+        <destination id="pbs" runner="pbs" tags="mycluster"/>
+        <destination id="pbs_longjobs" runner="pbs" tags="mycluster,longjobs">
+            <!-- Define parameters that are native to the job runner plugin. -->
+            <param id="Resource_List">walltime=72:00:00</param>
+        </destination>
+        <destination id="remote_cluster" runner="drmaa" tags="longjobs"/>
+        <destination id="java_cluster" runner="drmaa">
+          <!-- Allow users that are not mapped to any real users to run jobs
+               as a Galaxy (fallback). Default is False.
+          -->
+          <param id="allow_guests">True</param>
+          <!-- Set to False if cluster nodes don't shared Galaxy library,
+               it will perform metadata calculation locally after the job finishes.
+          -->
+          <param id="embed_metadata_in_job">True</param>
+          <!-- set arbitrary environment variables at runtime. General
+               dependencies for tools should be configured via
+               tool_dependency_dir and package options and these
+               options should be reserved for defining cluster
+               specific options.
+          -->
+          <env id="_JAVA_OPTIONS">-Xmx6G</env>
+          <env id="ANOTHER_OPTION" raw="true">'5'</env> <!-- raw disables auto quoting -->
+          <env file="/mnt/java_cluster/environment_setup.sh" /> <!-- will be sourced -->
+          <env exec="module load javastuff/2.10" /> <!-- will be sourced -->
+          <!-- files to source and exec statements will be handled on remote
+               clusters. These don't need to be available on the Galaxy server
+               itself.
+          -->
+        </destination>
+        <destination id="real_user_cluster" runner="drmaa">
+            <!-- Make sure to setup 3 real user parameters in galaxy.ini. -->
+        </destination>
+        <destination id="dynamic" runner="dynamic">
+            <!-- A destination that represents a method in the dynamic runner.
+
+                 foo should be a Python function defined in any file in
+                 lib/galaxy/jobs/rules.
+            -->
+            <param id="function">foo</param>
+        </destination>
+        <destination id="dtd_destination" runner="dynamic">
+            <!-- DTD is a special dynamic job destination type that builds up
+                 rules given a YAML-based DSL (see config/tool_destinations.yml.sample
+                 for the syntax).
+            -->
+            <param id="type">dtd</param>
+        </destination>
+        <destination id="load_balance" runner="dynamic">
+            <param id="type">choose_one</param>
+            <!-- Randomly assign jobs to various static destination ids -->
+            <param id="destination_ids">cluster1,cluster2,cluster3</param>
+        </destination>
+        <destination id="load_balance_with_data_locality" runner="dynamic">
+            <!-- Randomly assign jobs to various static destination ids,
+                 but keep jobs in the same workflow invocation together and
+                 for those jobs ran outside of workflows keep jobs in same
+                 history together.
+            -->
+            <param id="type">choose_one</param>
+            <param id="destination_ids">cluster1,cluster2,cluster3</param>
+            <param id="hash_by">workflow_invocation,history</param>
+        </destination>
+        <destination id="burst_out" runner="dynamic">
+            <!-- Burst out from static destination local_cluster_8_core to
+            static destination shared_cluster_8_core when there are about
+            50 Galaxy jobs assigned to any of the local_cluster_XXX
+            destinations (either running or queued). If there are fewer
+            than 50 jobs, just use local_cluster_8_core destination.
+
+            Uncomment job_state parameter to make this bursting happen when
+            roughly 50 jobs are queued instead.
+            -->
+            <param id="type">burst</param>
+            <param id="from_destination_ids">local_cluster_8_core,local_cluster_1_core,local_cluster_16_core</param>
+            <param id="to_destination_id">shared_cluster_8_core</param>
+            <param id="num_jobs">50</param>
+            <!-- <param id="job_states">queued</param> -->
+        </destination>
+        <destination id="docker_dispatch" runner="dynamic">
+            <!-- Follow dynamic destination type will send all tool's that
+            support docker to static destination defined by
+            docker_destination_id (docker_cluster in this example) and all
+            other tools to default_destination_id (normal_cluster in this
+            example).
+            -->
+            <param id="type">docker_dispatch</param>
+            <param id="docker_destination_id">docker_cluster</param>
+            <param id="default_destination_id">normal_cluster</param>
+        </destination>
+        <destination id="secure_pulsar_rest_dest" runner="pulsar_rest">
+            <param id="url">https://examle.com:8913/</param>
+            <!-- If set, private_token must match token in remote Pulsar's
+                 configuration. -->
+            <param id="private_token">123456789changeme</param>
+            <!-- Uncomment the following statement to disable file staging (e.g.
+                 if there is a shared file system between Galaxy and the Pulsar
+                 server). Alternatively action can be set to 'copy' - to replace
+                 http transfers with file system copies, 'remote_transfer' to cause
+                 the Pulsar to initiate HTTP transfers instead of Galaxy, or
+                 'remote_copy' to cause Pulsar to initiate file system copies.
+                 If setting this to 'remote_transfer' be sure to specify a
+                 'galaxy_url' attribute on the runner plugin above. -->
+            <!-- <param id="default_file_action">none</param> -->
+            <!-- The above option is just the default, the transfer behavior
+                 none|copy|http can be configured on a per path basis via the
+                 following file. See Pulsar documentation for more details and
+                 examples.
+            -->
+            <!-- <param id="file_action_config">file_actions.yaml</param> -->
+            <!-- The non-legacy Pulsar runners will attempt to resolve Galaxy
+                 dependencies remotely - to enable this set a tool_dependency_dir
+                 in Pulsar's configuration (can work with all the same dependency
+                 resolutions mechanisms as Galaxy - tool Shed installs, Galaxy
+                 packages, etc...). To disable this behavior, set the follow parameter
+                 to none. To generate the dependency resolution command locally
+                 set the following parameter local.
+            -->
+            <!-- <param id="dependency_resolution">none</params> -->
+            <!-- Uncomment following option to enable setting metadata on remote
+                 Pulsar server. The 'use_remote_datatypes' option is available for
+                 determining whether to use remotely configured datatypes or local
+                 ones (both alternatives are a little brittle). -->
+            <!-- <param id="remote_metadata">true</param> -->
+            <!-- <param id="use_remote_datatypes">false</param> -->
+            <!-- <param id="remote_property_galaxy_home">/path/to/remote/galaxy-central</param> -->
+            <!-- If remote Pulsar server is configured to run jobs as the real user,
+                 uncomment the following line to pass the current Galaxy user
+                 along. -->
+            <!-- <param id="submit_user">$__user_name__</param> -->
+            <!-- Various other submission parameters can be passed along to the Pulsar
+                 whose use will depend on the remote Pulsar's configured job manager.
+                 For instance:
+            -->
+            <!-- <param id="submit_native_specification">-P bignodes -R y -pe threads 8</param> -->
+            <!-- Disable parameter rewriting and rewrite generated commands
+                 instead. This may be required if remote host is Windows machine
+                 but probably not otherwise.
+            -->
+            <!-- <param id="rewrite_parameters">false</params> -->
+        </destination>
+        <destination id="pulsar_mq_dest" runner="pulsar_mq" >
+            <!-- The RESTful Pulsar client sends a request to Pulsar
+                 to populate various system properties. This
+                 extra step can be disabled and these calculated here
+                 on client by uncommenting jobs_directory and
+                 specifying any additional remote_property_ of
+                 interest, this is not optional when using message
+                 queues.
+            -->
+            <param id="jobs_directory">/path/to/remote/pulsar/files/staging/</param>
+            <!-- Otherwise MQ and Legacy pulsar destinations can be supplied
+                 all the same destination parameters as the RESTful client documented
+                 above (though url and private_token are ignored when using a MQ).
+            -->
+        </destination>
+        <destination id="ssh_torque" runner="cli">
+            <param id="shell_plugin">SecureShell</param>
+            <param id="job_plugin">Torque</param>
+            <param id="shell_username">foo</param>
+            <param id="shell_hostname">foo.example.org</param>
+            <param id="job_Resource_List">walltime=24:00:00,ncpus=4</param>
+        </destination>
+
+        <!-- Example CLI Slurm runner. -->
+        <destination id="ssh_slurm" runner="cli">
+            <param id="shell_plugin">SecureShell</param>
+            <param id="job_plugin">Slurm</param>
+            <param id="shell_username">foo</param>
+            <param id="shell_hostname">my_host</param>
+            <param id="job_time">2:00:00</param>
+            <param id="job_ncpus">4</param>
+            <param id="job_partition">my_partition</param>
+        </destination>
+
+        <destination id="condor" runner="condor">
+            <!-- With no params, jobs are submitted to the 'vanilla' universe with:
+                    notification = NEVER
+                    getenv = true
+                 Additional/override query ClassAd params can be specified with
+                 <param> tags.
+            -->
+            <param id="request_cpus">8</param>
+
+            <!-- Recent version of HTCondor do have a `docker` universe to handle containers.
+                 Activate this feature by explicitly specifying the `docker` universe.
+            -->
+            <!-- <param id="universe">docker</param> -->
+
+            <!-- If the tool has a container specified this one is used.
+
+                <requirements>
+                    <container type="docker">bgruening/galaxy-stable</container>
+                </requirements>
+
+            Unless the job destination specifies an override 
+            with docker_container_id_override. If neither of 
+            these is set a default container can be specified
+            with docker_default_container_id. The resolved
+            container ID will be passed along to condor as
+            the docker_image submission parameter.
+            -->
+            <!-- <param id="docker_default_container_id">busybox:ubuntu-14.04</param> -->
+        </destination>
+
+        <!-- Jobs that hit the walltime on one destination can be automatically
+             resubmitted to another destination. Walltime detection is
+             currently only implemented in the slurm runner.
+
+             Multiple resubmit tags can be defined, the first resubmit matching
+             the terminal condition of a job will be used.
+
+             The 'condition' attribute is optional, if not present, the
+             resubmit destination will be used for all conditions. The
+             conditions currently implemented are:
+
+               - "walltime_reached"
+               - "memory_limit_reached"
+
+             The 'handler' tag is optional, if not present, the job's original
+             handler will be reused for the resubmitted job.
+        -->
+        <destination id="short_fast" runner="slurm">
+            <param id="nativeSpecification">--time=00:05:00 --nodes=1</param>
+            <resubmit condition="walltime_reached" destination="long_slow" handler="sge_handler" />
+        </destination>
+        <destination id="long_slow" runner="sge">
+            <!-- The destination that you resubmit jobs to can be any runner type -->
+            <param id="nativeSpecification">-l h_rt=96:00:00</param>
+        </destination>
+        <destination id="smallmem" runner="slurm">
+            <param id="nativeSpecification">--mem-per-cpu=512</param>
+            <resubmit condition="memory_limit_reached" destination="bigmem" />
+        </destination>
+        <!-- Any tag param in this file can be set using an environment variable or using
+             values from galaxy.ini using the from_environ and from_config attributes
+             repectively. The text of the param will still be used if that environment variable
+             or config value isn't set.
+        -->
+        <destination id="params_from_environment" runner="slurm">
+            <param id="nativeSpecification" from_environ="NATIVE_SPECIFICATION">--time=00:05:00 --nodes=1</param>
+            <param id="docker_enabled" from_config="use_docker">false</param>
+        </destination>
+
+        <destination id="my-tool-container" runner="k8s">
+            <!-- For the kubernetes (k8s) runner, each container is a destination.
+
+                 Make sure that the container is able to execute the calls that will be passed by the galaxy built
+                 command. Most notably, containers that execute scripts through an interpreter in the form
+                 Rscript my-script.R <arguments>
+                 should have this wrapped as the container set working directory won't be the one actually used by
+                 galaxy (galaxy creates a new working director and moves to it). Recommendation is hence to wrap this
+                 type of calls on a shell script, and leave that script with execution privileges on the PATH of the
+                 container:
+
+                 RUN echo '#!/bin/bash' > /usr/local/bin/myScriptExec
+                 RUN echo 'Rscript /path/to/my-script.r "$@"' >> /usr/local/bin/myScriptExec
+                 RUN chmod a+x /usr/local/bin/myScriptExec
+
+            -->
+
+            <!-- The following four fields assemble the container's full name:
+                 docker pull <repo>/<owner>/<image>:tag
+            -->
+            <param id="docker_repo_override">my-docker-registry.org</param>
+            <param id="docker_owner_override">superbioinfo</param>
+            <param id="docker_image_override">my-tool</param>
+            <param id="docker_tag_override">latest</param>
+            <!-- Alternatively you could specify a different type of container, such as rkt (not tested with Kubernetes)
+                <param id="rkt_repo_override">my-docker-registry.org</param>
+                <param id="rkt_owner_override">superbioinfo</param>
+                <param id="rkt_image_override">my-tool</param>
+                <param id="rkt_tag_override">latest</param>
+            -->
+            <!-- You can also allow the destination to accept the docker container set in the tool, and only fall into
+                 the docker image set by this destination if the tool doesn't set a docker container, by using the
+                 "default" suffix instead of "override".
+                <param id="docker_repo_default">my-docker-registry.org</param>
+                <param id="docker_owner_default">superbioinfo</param>
+                <param id="docker_image_default">my-tool</param>
+                <param id="docker_tag_default">latest</param>
+            -->
+            <param id="max_pod_retrials">3</param>
+            <!-- Allows pods to retry up to this number of times, before marking the galaxy job failed. k8s is a state
+                 setter essentially, so by default it will try to take a job submitted to successful completion. A job
+                 submits pods, until the number of successes (1 in this use case) is achieved, assuming that whatever is
+                 making the pods fail will be fixed (such as a stale disk or a dead node that it is being restarted).
+                 This option sets a limit of retrials, so that after that number of failed pods, the job is re-scaled to
+                 zero (no execution) and the stderr/stdout of the k8s job is reported in galaxy (and the galaxy job set
+                 to failed).
+
+                 Overrides the runner config. (Not implemented yet)
+            -->
+            <!-- REQUIRED: To play nicely with the existing galaxy setup for containers. This could be set though
+                 internally by the runner. -->
+            <param id="docker_enabled">true</param>
+        </destination>
+        <destination id="god" runner="godocker">
+            <!-- The following are configurations for the container -->
+            <param id="docker_enabled">true</param>
+            <param id="docker_cpu">1</param>
+            <param id="docker_memory">2</param>
+            <param id="docker_default_container_id">centos:latest</param>
+            <!-- Specify the image on which the jobs have to be executed -->
+            <param id="godocker_volumes"></param>
+            <!-- Mount the godocker volumes 
+                 volumes must be separated by commas.
+                 eg: <param id="godocker_volumes">home,galaxy</param>
+            -->
+            <param id="virtualenv">false</param>
+            <!-- If a tool execution in container requires galaxy virtualenv, 
+                 then enable it by setting the value to true.
+                 Disable venv by setting the value to false.
+            -->
+        </destination>
+
+        <!-- Templatized destinations - macros can be used to create templated
+        destinations with reduced XML duplication. Here we are creating 4 destinations in 4 lines instead of 28 using the macros defined below.
+        -->
+        <expand macro="foohost_destination" id="foo_small" ncpus="1" walltime="1:00:00" />
+        <expand macro="foohost_destination" id="foo_medium" ncpus="2" walltime="4:00:00" />
+        <expand macro="foohost_destination" id="foo_large" ncpus="8" walltime="24:00:00" />
+        <expand macro="foohost_destination" id="foo_longrunning" ncpus="1" walltime="48:00:00" />
+    </destinations>
+    <resources default="default">
+      <!-- Group different parameters defined in job_resource_params_conf.xml
+           together and assign these groups ids. Tool section below can map
+           tools to different groups. This is experimental functionality!
+      -->
+      <group id="default"></group>
+      <group id="memoryonly">memory</group>
+      <group id="all">processors,memory,time,project</group>
+    </resources>
+    <tools>
+        <!-- Tools can be configured to use specific destinations or handlers,
+             identified by either the "id" or "tags" attribute.  If assigned to
+             a tag, a handler or destination that matches that tag will be
+             chosen at random.
+         -->
+        <tool id="foo" handler="trackster_handler">
+            <param id="source">trackster</param>
+        </tool>
+        <tool id="bar" destination="dynamic"/>
+        <!-- Next example defines resource group to insert into tool interface
+             and pass to dynamic destination (as resource_params argument). -->
+        <tool id="longbar" destination="dynamic" resources="all" />
+        <tool id="baz" handler="special_handlers" destination="bigmem"/>
+
+        <!-- Finally for Kubernetes runner, the following connects a particular tool to be executed with
+             the container of choice in Kubernetes.
+        -->
+        <tool id="my-tool" destination="my-tool-container"/>
+    </tools>
+    <limits>
+        <!-- Certain limits can be defined. The 'concurrent_jobs' limits all
+             control the number of jobs that can be "active" at a time, that
+             is, dispatched to a runner and in the 'queued' or 'running'
+             states.
+
+             A race condition exists that will allow destination_* concurrency
+             limits to be surpassed when multiple handlers are allowed to
+             handle jobs for the same destination. To prevent this, assign all
+             jobs for a specific destination to a single handler.
+        -->
+        <!-- registered_user_concurrent_jobs:
+                Limit on the number of jobs a user with a registered Galaxy
+                account can have active across all destinations.
+        -->
+        <limit type="registered_user_concurrent_jobs">2</limit>
+        <!-- anonymous_user_concurrent_jobs:
+                Likewise, but for unregistered/anonymous users.
+        -->
+        <limit type="anonymous_user_concurrent_jobs">1</limit>
+        <!-- destination_user_concurrent_jobs:
+                The number of jobs a user can have active in the specified
+                destination, or across all destinations identified by the
+                specified tag. (formerly: concurrent_jobs)
+        -->
+        <limit type="destination_user_concurrent_jobs" id="local">1</limit>
+        <limit type="destination_user_concurrent_jobs" tag="mycluster">2</limit>
+        <limit type="destination_user_concurrent_jobs" tag="longjobs">1</limit>
+        <!-- destination_total_concurrent_jobs:
+                The number of jobs that can be active in the specified
+                destination (or across all destinations identified by the
+                specified tag) by any/all users.
+        -->
+        <limit type="destination_total_concurrent_jobs" id="local">16</limit>
+        <limit type="destination_total_concurrent_jobs" tag="longjobs">100</limit>
+        <!-- walltime:
+                Amount of time a job can run (in any destination) before it
+                will be terminated by Galaxy.
+         -->
+        <limit type="walltime">24:00:00</limit>
+        <!-- output_size:
+                Size that any defined tool output can grow to before the job
+                will be terminated. This does not include temporary files
+                created by the job. Format is flexible, e.g.:
+                '10GB' = '10g' = '10240 Mb' = '10737418240'
+        -->
+        <limit type="output_size">10GB</limit>
+    </limits>
+    <macros>
+        <xml name="foohost_destination" tokens="id,walltime,ncpus">
+            <destination id="@ID@" runner="cli">
+                <param id="shell_plugin">SecureShell</param>
+                <param id="job_plugin">Torque</param>
+                <param id="shell_username">galaxy</param>
+                <param id="shell_hostname">foohost_destination.example.org</param>
+                <param id="job_Resource_List">walltime=@WALLTIME@,ncpus=@NCPUS@</param>
+            </destination>
+        </xml>
+    </macros>
+</job_conf>
diff --git a/config/job_conf.xml.sample_basic b/config/job_conf.xml.sample_basic
new file mode 100644
index 0000000..66d9531
--- /dev/null
+++ b/config/job_conf.xml.sample_basic
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<!-- A sample job config that explicitly configures job running the way it is configured by default (if there is no explicit config). -->
+<job_conf>
+    <plugins>
+        <plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner" workers="4"/>
+    </plugins>
+    <handlers>
+        <handler id="main"/>
+    </handlers>
+    <destinations>
+        <destination id="local" runner="local"/>
+    </destinations>
+</job_conf>
diff --git a/config/job_metrics_conf.xml.sample b/config/job_metrics_conf.xml.sample
new file mode 100644
index 0000000..1fdfb38
--- /dev/null
+++ b/config/job_metrics_conf.xml.sample
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<!-- If job_metrics.xml exists, this file will define the default job metric
+     plugin used for all jobs. Individual job_conf.xml destinations can
+     disable metric collection by setting metrics="off" on that destination.
+     The metrics attribute on destination definition elements can also be
+     a path - in which case that XML metrics file will be loaded and used for
+     that destination. Finally, the destination element may contain a job_metrics
+     child element (with all options defined below) to define job metrics in an
+     embedded manner directly in the job_conf.xml file.
+-->
+<job_metrics>
+  <!-- Each element in this file corresponds to a job instrumentation plugin
+       used to generate metrics in lib/galaxy/jobs/metrics/instrumenters. -->
+
+  <!-- Core plugin captures Galaxy slots, start and end of job (in seconds
+       since epoch) and computes runtime in seconds. -->
+  <core />
+  
+  <!-- Uncomment to dump processor count for each job - linux only. -->
+  <!-- <cpuinfo /> -->
+  <!-- Uncomment to dump information about all processors for for each
+       job - this is likely too much data. Linux only. -->
+  <!-- <cpuinfo verbose="true" /> -->
+
+  <!-- Uncomment to dump system memory information for each job - linux
+       only. -->
+  <!-- <meminfo /> -->
+
+  <!-- Uncomment to record operating system each job is executed on - linux
+       only. -->
+  <!-- <uname /> -->
+
+  <!-- Uncomment following to enable plugin dumping complete environment 
+       for each job, potentially useful for debuging -->
+  <!-- <env /> -->
+  <!-- env plugin can also record more targetted, obviously useful variables
+       as well. -->
+  <!-- <env variables="HOSTNAME,SLURM_CPUS_ON_NODE,SLURM_JOBID" /> -->
+
+  <!-- <collectl /> -->
+  <!-- Collectl (http://collectl.sourceforge.net/) is a powerful monitoring
+       utility capable of gathering numerous system and process level
+       statistics of running applications. The Galaxy collectl job metrics
+       plugin by default will grab a variety of process level metrics
+       aggregated across all processes corresponding to a job, this behavior
+       is highly customiziable - both using the attributes documented below
+       or simply hacking up the code in lib/galaxy/jobs/metrics.
+
+       Warning: In order to use this plugin collectl must be available on the 
+       compute server the job runs on and on the local Galaxy server as well
+       (unless in this latter case summarize_process_data is set to False).
+
+       Attributes (the follow describes attributes that can be used with
+       the collectl job metrics element above to modify its behavior).
+
+       'summarize_process_data': Boolean indicating whether to run collectl
+              in playback mode after jobs complete and gather process level
+              statistics for the job run. These statistics can be customized
+              with the 'process_statistics' attribute. (defaults to True)
+
+       'saved_logs_path': If set (it is off by default), all collectl logs
+              will be saved to the specified path after jobs complete. These
+              logs can later be replayed using collectl offline to generate
+              full time-series data corresponding to a job run.
+
+       'subsystems': Comma separated list of collectl subystems to collect
+              data for. Plugin doesn't currently expose all of them or offer 
+              summary data for any of them except 'process' but extensions 
+              would be welcome. May seem pointless to include subsystems 
+              beside process since they won't be processed online by Galaxy -
+              but if 'saved_logs_path' these files can be played back at anytime.
+              
+              Available subsystems - 'process', 'cpu', 'memory', 'network',
+              'disk', 'network'. (Default 'process').
+
+              Warning: If you override this - be sure to include 'process'
+              unless 'summarize_process_data' is set to false.
+
+       'process_statistics': If 'summarize_process_data' this attribute can be
+              specified as a comma separated list to override the statistics
+              that are gathered. Each statistics is of the for X_Y where X
+              if one of 'min', 'max', 'count', 'avg', or 'sum' and Y is a
+              value from 'S', 'VmSize', 'VmLck', 'VmRSS', 'VmData', 'VmStk',
+              'VmExe', 'VmLib', 'CPU', 'SysT', 'UsrT', 'PCT', 'AccumT' 'WKB',
+              'RKBC', 'WKBC', 'RSYS', 'WSYS', 'CNCL', 'MajF', 'MinF'. Consult
+              lib/galaxy/jobs/metrics/collectl/processes.py for more details
+              on what each of these resource types means.
+
+              Defaults to 'max_VmSize,avg_VmSize,max_VmRSS,avg_VmRSS,sum_SysT,sum_UsrT,max_PCT avg_PCT,max_AccumT,sum_RSYS,sum_WSYS'
+              as variety of statistics roughly describing CPU and memory
+              usage of the program and VERY ROUGHLY describing I/O consumption.
+
+       'procfilt_on': By default Galaxy will tell collectl to only collect
+             'process' level data for the current user (as identified)
+             by 'username' (default) - this can be disabled by settting this
+             to 'none' - the plugin will still only aggregate process level
+             statistics for the jobs process tree - but the additional
+             information can still be used offline with 'saved_logs_path'
+             if set. Obsecurely, this can also be set 'uid' to identify
+             the current user to filter on by UID instead of username -
+             this may needed on some clusters(?).
+
+       'interval': The time (in seconds) between data collection points.
+              Collectl uses a variety of different defaults for different
+              subsystems if this is not set, but process information (likely
+              the most pertinent for Galaxy jobs will collect data every
+              60 seconds).
+
+       'flush': Interval (in seconds I think) between when collectl will
+              flush its buffer to disk. Galaxy overrides this to disable
+              flushing by default if not set. 
+
+       'local_collectl_path', 'remote_collectl_path', 'collectl_path':
+              By default, jobs will just assume collectl is on the PATH, but 
+              it can be overridden with 'local_collectl_path' and 
+              'remote_collectl_path' (or simply 'collectl_path' if it is not 
+              on the path but installed in the same location both locally and 
+              remotely).
+
+        There are more and more increasingly obsecure options including -
+        log_collectl_program_output, interval2, and interval3. Consult
+        source code for more details.
+  -->
+</job_metrics>
diff --git a/config/job_resource_params_conf.xml.sample b/config/job_resource_params_conf.xml.sample
new file mode 100644
index 0000000..9a201c8
--- /dev/null
+++ b/config/job_resource_params_conf.xml.sample
@@ -0,0 +1,6 @@
+<parameters>
+  <param label="Processors" name="processors" type="integer" size="2" min="1" max="64" value="" help="Number of processing cores, 'ppn' value (1-64). Leave blank to use default value." />
+  <param label="Memory" name="memory" type="integer" size="3" min="1" max="256" value="" help="Memory size in gigabytes, 'pmem' value (1-256). Leave blank to use default value." />
+  <param label="Time" name="time" type="integer" size="3" min="1" max="744" value="" help="Maximum job time in hours, 'walltime' value (1-744). Leave blank to use default value." />
+  <param label="Project" name="project" type="text" value="" help="Project to assign resource allocation to. Leave blank to use default value." />
+</parameters>
diff --git a/config/migrated_tools_conf.xml.sample b/config/migrated_tools_conf.xml.sample
new file mode 100644
index 0000000..6425281
--- /dev/null
+++ b/config/migrated_tools_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<toolbox tool_path="../shed_tools">
+</toolbox>
\ No newline at end of file
diff --git a/config/object_store_conf.xml.sample b/config/object_store_conf.xml.sample
new file mode 100644
index 0000000..655e727
--- /dev/null
+++ b/config/object_store_conf.xml.sample
@@ -0,0 +1,57 @@
+<?xml version="1.0"?>
+<object_store type="hierarchical">
+    <backends>
+        <object_store type="distributed" id="primary" order="0">
+            <backends>
+                <backend id="files1" type="disk" weight="1">
+                    <files_dir path="database/files1"/>
+                    <extra_dir type="temp" path="database/tmp1"/>
+                    <extra_dir type="job_work" path="database/job_working_directory1"/>
+                </backend>
+                <backend id="files2" type="disk" weight="1">
+                    <files_dir path="database/files2"/>
+                    <extra_dir type="temp" path="database/tmp2"/>
+                    <extra_dir type="job_work" path="database/job_working_directory2"/>
+                </backend>
+            </backends>
+        </object_store>
+        <object_store type="disk" id="secondary" order="1">
+            <files_dir path="database/files3"/>
+            <extra_dir type="temp" path="database/tmp3"/>
+            <extra_dir type="job_work" path="database/job_working_directory3"/>
+        </object_store>
+
+        <!--  Sample S3 Object Store
+        <object_store type="s3">
+             <auth access_key="...." secret_key="....." />
+             <bucket name="unique_bucket_name_all_lowercase" use_reduced_redundancy="False" />
+             <cache path="database/object_store_cache" size="1000" />
+             <extra_dir type="job_work" path="database/job_working_directory_s3"/>
+             <extra_dir type="temp" path="database/tmp_s3"/>
+        </object_store>
+        -->
+
+        <!--  Sample Swift Object Store
+        <object_store type="swift">
+            <auth access_key="...." secret_key="....." />
+            <bucket name="unique_bucket_name" use_reduced_redundancy="False" max_chunk_size="250"/>
+            <connection host="" port="" is_secure="" conn_path="" multipart="True"/>
+            <cache path="database/object_store_cache" size="1000" />
+            <extra_dir type="job_work" path="database/job_working_directory_swift"/>
+            <extra_dir type="temp" path="database/tmp_swift"/>
+        </object_store>
+        -->
+
+        <!-- Sample Azure Object Store
+        <object_store type="azure_blob">
+            <auth account_name="..." account_key="...." />
+            <container name="unique_container_name" max_chunk_size="250"/>
+            <cache path="database/object_store_cache" size="100" />
+            <extra_dir type="job_work" path="database/job_working_directory_azure"/>
+            <extra_dir type="temp" path="database/tmp_azure"/>
+        </object_store>
+        -->
+
+
+    </backends>
+</object_store>
diff --git a/config/openid_conf.xml.sample b/config/openid_conf.xml.sample
new file mode 100644
index 0000000..3fce517
--- /dev/null
+++ b/config/openid_conf.xml.sample
@@ -0,0 +1,8 @@
+<?xml version="1.0"?>
+<openid>
+    <provider file="google.xml" />
+    <provider file="yahoo.xml" />
+    <provider file="aol.xml" />
+    <provider file="launchpad.xml" />
+    <provider file="genomespace.xml" />
+</openid>
diff --git a/config/plugins/interactive_environments/bam_iobio/config/bam_iobio.ini.sample b/config/plugins/interactive_environments/bam_iobio/config/bam_iobio.ini.sample
new file mode 100644
index 0000000..d6ff35d
--- /dev/null
+++ b/config/plugins/interactive_environments/bam_iobio/config/bam_iobio.ini.sample
@@ -0,0 +1,43 @@
+[main]
+# Following options are ignored if using the Galaxy dynamic proxy but
+# are useful if mapping a range of ports for environment consumption.
+#password_auth = False
+#ssl = False
+
+[docker]
+# Command to launch docker container. For example `sudo docker` or `docker-lxc`.
+# If you need to use a command like `sg` you can do that here, just be sure to
+# wrap all of the docker portion in single quotes. E.g. `sg 'docker' 'docker {docker_args}'`
+#
+# It is recommended that you use command_inject if you need to inject
+# additional parameters. This command string is re-used for a `docker inspect`
+# command and will likely cause errors if it is extensively modified, past the
+# usual group/sudo changes.
+#command = docker {docker_args}
+
+# The docker image name that should be started.
+image = qiaoy/iobio-bundle.bam-iobio:1.0-ondemand
+
+# Additional arguments that are passed to the `docker run` command.
+#command_inject = --sig-proxy=true -e DEBUG=false
+
+# URL to access the Galaxy API with from the spawn Docker containter, if empty
+# this falls back to galaxy.ini's galaxy_infrastructure_url and finally to the
+# Docker host of the spawned container if that is also not set.
+#galaxy_url =
+
+# The Docker hostname. It can be useful to run the Docker daemon on a different
+# host than Galaxy.
+#docker_hostname = localhost
+
+# Try to set the tempdirectory to world execute - this can fix the issue 
+# where 'sudo docker' is not able to mount the folder otherwise.
+# "finalize namespace chdir to /import permission denied"
+#wx_tempdir = False
+
+# Overwride the IE tempdirectory. This can be useful if you regular tempdir is
+# located on an NFS share, which does not work well as Docker volume. In this case
+# you can have a shared sshfs share which you can use as temporary directory to
+# share data between the IE and Galaxy.
+#docker_galaxy_temp_dir = None
+
diff --git a/config/plugins/interactive_environments/bam_iobio/config/bam_iobio.xml b/config/plugins/interactive_environments/bam_iobio/config/bam_iobio.xml
new file mode 100644
index 0000000..fa96cdb
--- /dev/null
+++ b/config/plugins/interactive_environments/bam_iobio/config/bam_iobio.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+<interactive_environment name="BAM iobio">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">binary.Bam</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <entry_point entry_point_type="mako">bam_iobio.mako</entry_point>
+</interactive_environment>
diff --git a/config/plugins/interactive_environments/bam_iobio/static/js/bam_iobio.js b/config/plugins/interactive_environments/bam_iobio/static/js/bam_iobio.js
new file mode 100644
index 0000000..395904e
--- /dev/null
+++ b/config/plugins/interactive_environments/bam_iobio/static/js/bam_iobio.js
@@ -0,0 +1,39 @@
+function message_failed_auth(password){
+    toastr.info(
+        "Automatic authorization failed.",
+        "Please login manually",
+        {'closeButton': true, 'timeOut': 100000, 'tapToDismiss': false}
+    );
+}
+
+function message_failed_connection(){
+    toastr.error(
+        "Could not connect to BAM iobio. Please contact your administrator.",
+    "Security warning",
+        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': true}
+    );
+}
+
+/**
+ * Load an interactive environment (IE) from a remote URL
+ * @param {String} password: password used to authenticate to the remote resource
+ * @param {String} notebook_login_url: URL that should be POSTed to for login
+ * @param {String} notebook_access_url: the URL embeded in the page and loaded
+ *
+ */
+function load_notebook(notebook_access_url){
+    $( document ).ready(function() {
+        // Test notebook_login_url for accessibility, executing the login+load function whenever
+        // we've successfully connected to the IE.
+        test_ie_availability(notebook_access_url, function(){
+            _handle_notebook_loading(notebook_access_url);
+        });
+    });
+}
+
+/**
+ * Must be implemented by IEs
+ */
+function _handle_notebook_loading(notebook_access_url){
+    append_notebook(notebook_access_url);
+}
diff --git a/config/plugins/interactive_environments/bam_iobio/templates/bam_iobio.mako b/config/plugins/interactive_environments/bam_iobio/templates/bam_iobio.mako
new file mode 100644
index 0000000..8f18630
--- /dev/null
+++ b/config/plugins/interactive_environments/bam_iobio/templates/bam_iobio.mako
@@ -0,0 +1,62 @@
+ <%namespace name="ie" file="ie.mako" />
+
+<%
+import subprocess
+from galaxy.util import sockets
+
+# Sets ID and sets up a lot of other variables
+ie_request.load_deploy_config()
+ie_request.attr.docker_port = 80
+ie_request.attr.import_volume = False
+
+bam = ie_request.volume(hda.file_name, '/input/bamfile.bam', how='ro')
+bam_index = ie_request.volume(hda.metadata.bam_index.file_name, '/input/bamfile.bam.bai', how='ro')
+
+ie_request.launch(volumes=[bam, bam_index], env_override={
+    'PUB_HTTP_PORT': ie_request.attr.galaxy_config.dynamic_proxy_bind_port,
+    'PUB_HOSTNAME': ie_request.attr.HOST,
+})
+
+notebook_access_url = ie_request.url_template('${PROXY_URL}/?bam=http://localhost/tmp/bamfile.bam')
+
+root = h.url_for( '/' )
+%>
+<html>
+<head>
+    ${ ie.load_default_js() }
+</head>
+<body>
+
+    <script type="text/javascript">
+
+        ${ ie.default_javascript_variables() }
+        var notebook_access_url = '${ notebook_access_url }';
+        ${ ie.plugin_require_config() }
+
+        requirejs(['interactive_environments', 'plugin/bam_iobio'], function(){
+            display_spinner();
+        });
+
+        toastr.info(
+            "BAM io.bio is starting up!",
+            "transferring data ...",
+            {'closeButton': true, 'timeOut': 5000, 'tapToDismiss': false}
+        );
+
+        var startup = function(){
+            // Load notebook
+            requirejs(['interactive_environments', 'plugin/bam_iobio'], function(){
+                load_notebook(notebook_access_url);
+            });
+
+        };
+        // sleep 5 seconds
+        // this is currently needed to get the vis right
+        // plans exists to move this spinner into the container
+        setTimeout(startup, 5000);
+
+    </script>
+<div id="main">
+</div>
+</body>
+</html>
diff --git a/config/plugins/interactive_environments/common/templates/ie.mako b/config/plugins/interactive_environments/common/templates/ie.mako
new file mode 100644
index 0000000..fac6366
--- /dev/null
+++ b/config/plugins/interactive_environments/common/templates/ie.mako
@@ -0,0 +1,35 @@
+<%def name="default_javascript_variables()">
+// Globals
+
+// Following three are for older-style IE proxies, newer dynamic Galaxy proxy
+// does not use these.
+ie_password_auth = ${ ie_request.javascript_boolean(ie_request.attr.PASSWORD_AUTH) };
+ie_password = '${ ie_request.notebook_pw }';
+
+
+var galaxy_root = '${ ie_request.attr.root }';
+var app_root = '${ ie_request.attr.app_root }';
+</%def>
+
+
+<%def name="load_default_js()">
+${h.css( 'base' ) }
+${h.js( 'libs/jquery/jquery',
+        'libs/toastr',
+        'libs/require')}
+</%def>
+
+<%def name="plugin_require_config()">
+require.config({
+    baseUrl: app_root,
+    paths: {
+        "plugin" : app_root + "js/",
+        "interactive_environments": "${h.url_for('/static/scripts/galaxy.interactive_environments')}",
+    },
+});
+
+window.onbeforeunload = function() {
+    return 'You are leaving your Interactive Environment.';
+};
+
+</%def>
diff --git a/config/plugins/interactive_environments/interactive_environments.dtd b/config/plugins/interactive_environments/interactive_environments.dtd
new file mode 120000
index 0000000..bcf902d
--- /dev/null
+++ b/config/plugins/interactive_environments/interactive_environments.dtd
@@ -0,0 +1 @@
+../visualizations/visualization.dtd
\ No newline at end of file
diff --git a/config/plugins/interactive_environments/jupyter/config/allowed_images.yml.sample b/config/plugins/interactive_environments/jupyter/config/allowed_images.yml.sample
new file mode 100644
index 0000000..89797ba
--- /dev/null
+++ b/config/plugins/interactive_environments/jupyter/config/allowed_images.yml.sample
@@ -0,0 +1,14 @@
+# This file lists acceptable images to allow runing.
+#
+# This allows you, the admin, to create multiple flavours
+# for your users to run. E.g. maybe you need a geosciences flavour,
+# you can create the image based on our default image and add the
+# appropriate `apt-get/pip install` statements.
+---
+-
+    image: bgruening/docker-jupyter-notebook:16.01
+    description: |
+        The Jupyter notebook is the next iteration of IPython, allowing
+        analysis in many different languages. This image features the Python,
+        R, Julia, Haskell, Bash kernels and many scientific analysis stacks for
+        each.
diff --git a/config/plugins/interactive_environments/jupyter/config/jupyter.ini.sample b/config/plugins/interactive_environments/jupyter/config/jupyter.ini.sample
new file mode 100644
index 0000000..ec7cedb
--- /dev/null
+++ b/config/plugins/interactive_environments/jupyter/config/jupyter.ini.sample
@@ -0,0 +1,42 @@
+[main]
+# Following options are ignored if using the Galaxy dynamic proxy but
+# are useful if mapping a range of ports for environment consumption.
+#password_auth = False
+#ssl = False
+
+[docker]
+# Command to launch docker container. For example `sudo docker` or `docker-lxc`.
+# If you need to use a command like `sg` you can do that here, just be sure to
+# wrap all of the docker portion in single quotes. E.g. `sg 'docker' 'docker {docker_args}'`
+#
+# It is recommended that you use command_inject if you need to inject
+# additional parameters. This command string is re-used for a `docker inspect`
+# command and will likely cause errors if it is extensively modified, past the
+# usual group/sudo changes.
+#command = docker {docker_args}
+
+# The image argument was moved to "allowed_images.yml.sample"
+
+# Additional arguments that are passed to the `docker run` command.
+command_inject = --sig-proxy=true -e DEBUG=false -e DEFAULT_CONTAINER_RUNTIME=120
+
+# URL to access the Galaxy API with from the spawn Docker containter, if empty
+# this falls back to galaxy.ini's galaxy_infrastructure_url and finally to the
+# Docker host of the spawned container if that is also not set.
+#galaxy_url =
+
+# The Docker hostname. It can be useful to run the Docker daemon on a different
+# host than Galaxy.
+#docker_hostname = localhost
+
+# Try to set the tempdirectory to world execute - this can fix the issue 
+# where 'sudo docker' is not able to mount the folder otherwise.
+# "finalize namespace chdir to /import permission denied"
+#wx_tempdir = False
+
+# Overwride the IE tempdirectory. This can be useful if you regular tempdir is
+# located on an NFS share, which does not work well as Docker volume. In this case
+# you can have a shared sshfs share which you can use as temporary directory to
+# share data between the IE and Galaxy.
+#docker_galaxy_temp_dir = None
+
diff --git a/config/plugins/interactive_environments/jupyter/config/jupyter.xml b/config/plugins/interactive_environments/jupyter/config/jupyter.xml
new file mode 100644
index 0000000..d4f42d5
--- /dev/null
+++ b/config/plugins/interactive_environments/jupyter/config/jupyter.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+<interactive_environment name="Jupyter (Programming Environment)">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Text</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">binary.Binary</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <entry_point entry_point_type="mako">jupyter.mako</entry_point>
+</interactive_environment>
diff --git a/config/plugins/interactive_environments/jupyter/static/js/jupyter.js b/config/plugins/interactive_environments/jupyter/static/js/jupyter.js
new file mode 100644
index 0000000..3b0ca53
--- /dev/null
+++ b/config/plugins/interactive_environments/jupyter/static/js/jupyter.js
@@ -0,0 +1,123 @@
+function message_failed_auth(password){
+    toastr.info(
+        "Automatic authorization failed. You can manually login with:<br>" + password + "<br> <a href='https://github.com/bgruening/galaxy-ipython/wiki/Automatic-Authorization-Failed' target='_blank'>More details ...</a>",
+        "Please login manually",
+        {'closeButton': true, 'timeOut': 100000, 'tapToDismiss': false}
+    );
+}
+
+function message_failed_connection(){
+    toastr.error(
+        "Could not connect to Jupyter Notebook. Please contact your administrator. <a href='https://github.com/bgruening/galaxy-ipython/wiki/Could-not-connect-to-IPython-Notebook' target='_blank'>More details ...</a>",
+    "Security warning",
+        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': true}
+    );
+}
+
+function message_no_auth(){
+    // No longer a security issue, proxy validates Galaxy session token.
+    /*
+    toastr.warning(
+        "IPython Notebook was lunched without authentication. This is a security issue. <a href='https://github.com/bgruening/galaxy-ipython/wiki/IPython-Notebook-was-lunched-without-authentication' target='_blank'>More details ...</a>",
+        "Security warning",
+        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': false}
+    );
+    */
+}
+
+
+/**
+ * Load an interactive environment (IE) from a remote URL
+ * @param {String} password: password used to authenticate to the remote resource
+ * @param {String} notebook_login_url: URL that should be POSTed to for login
+ * @param {String} notebook_access_url: the URL embeded in the page and loaded
+ *
+ */
+function load_notebook(password, notebook_login_url, notebook_access_url){
+    $( document ).ready(function() {
+        // Test notebook_login_url for accessibility, executing the login+load function whenever
+        // we've successfully connected to the IE.
+        test_ie_availability(notebook_login_url, function(){
+            _handle_notebook_loading(password, notebook_login_url, notebook_access_url);
+        });
+    });
+}
+
+
+function keep_alive(){
+    /**
+    * This is needed to keep the container alive. If the user leaves this site
+    * this function is not constantly pinging the container, the container will
+    * terminate itself.
+    */
+
+    var request_count = 0;
+    interval = setInterval(function(){
+        $.ajax({
+            url: notebook_access_url,
+            xhrFields: {
+                withCredentials: true
+            },
+            type: "GET",
+            timeout: 500,
+            success: function(){
+                console.log("Connected to IE, returning");
+            },
+            error: function(jqxhr, status, error){
+                request_count++;
+                console.log("Request " + request_count);
+                if(request_count > 30){
+                    clearInterval(interval);
+                    clear_main_area();
+                    toastr.error(
+                        "Could not connect to IE, contact your administrator",
+                        "Error",
+                        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': false}
+                    );
+                }
+            }
+        });
+    }, 30000);
+}
+
+
+/**
+ * Must be implemented by IEs
+ */
+function _handle_notebook_loading(password, notebook_login_url, notebook_access_url){
+    if ( ie_password_auth ) {
+        // Make an AJAX POST
+        $.ajax({
+            type: "POST",
+            // to the Login URL
+            url: notebook_login_url,
+            // With our password
+            data: {
+                'password': password
+            },
+            xhrFields: {
+                withCredentials: true
+            },
+            // If that is successful, load the notebook
+            success: function(){
+                append_notebook(notebook_access_url);
+            },
+            error: function(jqxhr, status, error){
+                if(ie_password_auth){
+                    // Failure happens due to CORS
+                    message_failed_auth(password);
+                    append_notebook(notebook_access_url);
+                }else{
+                    message_failed_connection();
+                    // Do we want to try and load the notebook anyway? Just in case?
+                    append_notebook(notebook_access_url);
+                }
+            }
+        });
+    }
+    else {
+        // Not using password auth, just embed it to avoid content-origin issues.
+        message_no_auth();
+        append_notebook(notebook_access_url);
+    }
+}
diff --git a/config/plugins/interactive_environments/jupyter/templates/jupyter.mako b/config/plugins/interactive_environments/jupyter/templates/jupyter.mako
new file mode 100644
index 0000000..69bf37f
--- /dev/null
+++ b/config/plugins/interactive_environments/jupyter/templates/jupyter.mako
@@ -0,0 +1,74 @@
+<%namespace name="ie" file="ie.mako" />
+
+<%
+import os
+import shutil
+import hashlib
+
+# Sets ID and sets up a lot of other variables
+ie_request.load_deploy_config()
+ie_request.attr.docker_port = 8888
+ie_request.attr.import_volume = False
+
+if ie_request.attr.PASSWORD_AUTH:
+    m = hashlib.sha1()
+    m.update( ie_request.notebook_pw + ie_request.notebook_pw_salt )
+    PASSWORD = 'sha1:%s:%s' % (ie_request.notebook_pw_salt, m.hexdigest())
+else:
+    PASSWORD = "none"
+
+## Jupyter Notbook Specific
+if hda.datatype.__class__.__name__ == "Ipynb":
+    DATASET_HID = hda.hid
+else:
+    DATASET_HID = None
+
+# Add all environment variables collected from Galaxy's IE infrastructure
+ie_request.launch(
+    image=trans.request.params.get('image_tag', None),
+    additional_ids=trans.request.params.get('additional_dataset_ids', None),
+    env_override={
+        'notebook_password': PASSWORD,
+        'dataset_hid': DATASET_HID,
+    }
+)
+
+## General IE specific
+# Access URLs for the notebook from within galaxy.
+notebook_access_url = ie_request.url_template('${PROXY_URL}/ipython/notebooks/ipython_galaxy_notebook.ipynb')
+notebook_login_url = ie_request.url_template('${PROXY_URL}/ipython/login?next=${PROXY_PREFIX}%2Fipython%2Ftree')
+
+%>
+<html>
+<head>
+${ ie.load_default_js() }
+</head>
+<body>
+
+<script type="text/javascript">
+${ ie.default_javascript_variables() }
+var notebook_login_url = '${ notebook_login_url }';
+var notebook_access_url = '${ notebook_access_url }';
+${ ie.plugin_require_config() }
+
+// Keep container running
+requirejs(['interactive_environments', 'plugin/jupyter'], function(){
+    keep_alive();
+});
+
+
+// Load notebook
+
+requirejs(['interactive_environments', 'plugin/jupyter'], function(){
+    load_notebook(ie_password, notebook_login_url, notebook_access_url);
+});
+
+
+
+
+
+</script>
+<div id="main" width="100%" height="100%">
+</div>
+</body>
+</html>
diff --git a/config/plugins/interactive_environments/neo/config/allowed_images.yml.sample b/config/plugins/interactive_environments/neo/config/allowed_images.yml.sample
new file mode 100644
index 0000000..872faf9
--- /dev/null
+++ b/config/plugins/interactive_environments/neo/config/allowed_images.yml.sample
@@ -0,0 +1,11 @@
+# This file lists acceptable images to allow runing.
+#
+# This allows you, the admin, to create multiple flavours
+# for your users to run. E.g. maybe you need a geosciences flavour,
+# you can create the image based on our default image and add the
+# appropriate `apt-get/pip install` statements.
+---
+-
+    image: thoba/neo4j_galaxy_ie:v1
+    description: |
+        Neo4j is a highly scalable, robust native graph database.
diff --git a/config/plugins/interactive_environments/neo/config/neo.ini.sample b/config/plugins/interactive_environments/neo/config/neo.ini.sample
new file mode 100644
index 0000000..2741147
--- /dev/null
+++ b/config/plugins/interactive_environments/neo/config/neo.ini.sample
@@ -0,0 +1,42 @@
+[main]
+# Following options are ignored if using the Galaxy dynamic proxy but
+# are useful if mapping a range of ports for environment consumption.
+#password_auth = False
+#ssl = False
+
+[docker]
+# Command to launch docker container. For example `sudo docker` or `docker-lxc`.
+# If you need to use a command like `sg` you can do that here, just be sure to
+# wrap all of the docker portion in single quotes. E.g. `sg 'docker' 'docker {docker_args}'`
+#
+# It is recommended that you use command_inject if you need to inject
+# additional parameters. This command string is re-used for a `docker inspect`
+# command and will likely cause errors if it is extensively modified, past the
+# usual group/sudo changes.
+#command = docker {docker_args}
+
+# The image argument was moved to "allowed_images.yml.sample"
+
+# Additional arguments that are passed to the `docker run` command.
+command_inject = --sig-proxy=true -e DEBUG=false -e DEFAULT_CONTAINER_RUNTIME=120 -e NEO4J_UID=$(id -u) -e NEO4J_GID=$(id -g)
+
+
+# URL to access the Galaxy API with from the spawn Docker containter, if empty
+# this falls back to galaxy.ini's galaxy_infrastructure_url and finally to the
+# Docker host of the spawned container if that is also not set.
+#galaxy_url =
+
+# The Docker hostname. It can be useful to run the Docker daemon on a different
+# host than Galaxy.
+#docker_hostname = localhost
+
+# Try to set the tempdirectory to world execute - this can fix the issue
+# where 'sudo docker' is not able to mount the folder otherwise.
+# "finalize namespace chdir to /import permission denied"
+#wx_tempdir = False
+
+# Overwride the IE tempdirectory. This can be useful if you regular tempdir is
+# located on an NFS share, which does not work well as Docker volume. In this case
+# you can have a shared sshfs share which you can use as temporary directory to
+# share data between the IE and Galaxy.
+#docker_galaxy_temp_dir = None
diff --git a/config/plugins/interactive_environments/neo/config/neo.xml b/config/plugins/interactive_environments/neo/config/neo.xml
new file mode 100644
index 0000000..2faa185
--- /dev/null
+++ b/config/plugins/interactive_environments/neo/config/neo.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+<!-- This is the name which will show up in the User's Browser -->
+<interactive_environment name="Neo4j (Graph Database)">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <!-- here you filter out which types of datasets are appropriate for this GIE.
+            The Neo4jDB datatype is generated by the build_ctb_gene tool in the test toolshed.-->
+            <test type="isinstance" test_attr="datatype" result_type="datatype">Neo4jDB.Neo4jDB</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <!-- Be sure that your entrypoint name is correct! -->
+    <entry_point entry_point_type="mako">neo.mako</entry_point>
+</interactive_environment>
diff --git a/config/plugins/interactive_environments/neo/static/js/neo.js b/config/plugins/interactive_environments/neo/static/js/neo.js
new file mode 100644
index 0000000..068d68b
--- /dev/null
+++ b/config/plugins/interactive_environments/neo/static/js/neo.js
@@ -0,0 +1,12 @@
+// Load an interactive environment (IE) from a remote URL
+// @param {String} notebook_access_url: the URL embeded in the page and loaded
+function load_notebook(notebook_access_url){
+    // When the page has completely loaded...
+    $( document ).ready(function() {
+        // Test if we can access the GIE, and if so, execute the function
+        // to load the GIE for the user.
+        test_ie_availability(notebook_access_url, function(){
+            append_notebook(notebook_access_url);
+        });
+    });
+}
diff --git a/config/plugins/interactive_environments/neo/templates/neo.mako b/config/plugins/interactive_environments/neo/templates/neo.mako
new file mode 100644
index 0000000..b241b9d
--- /dev/null
+++ b/config/plugins/interactive_environments/neo/templates/neo.mako
@@ -0,0 +1,43 @@
+<%namespace name="ie" file="ie.mako" />
+<%
+    # Sets ID and sets up a lot of other variables
+    ie_request.load_deploy_config()
+    # Define a volume that will be mounted into the container.
+    # This is a useful way to provide access to large files in the container,
+    # if the user knows ahead of time that they will need it.
+    import os
+    mount_path = str(os.path.dirname(hda.file_name)) + '/dataset_{}_files'.format( hda.dataset.id )
+    data_vol = ie_request.volume(mount_path, '/data', how='rw')
+    # data_vol = ie_request.volume('${HOME}/neo4j/data', '/data/', how='rw')
+    # Add all environment variables collected from Galaxy's IE infrastructure
+    # Launch the IE.
+    ie_request.launch(
+       image=trans.request.params.get('image_tag', None),
+       additional_ids=trans.request.params.get('additional_dataset_ids', None),
+       volumes =[data_vol]
+    )
+    # Only once the container is launched can we template our URLs. The ie_request
+    # doesn't have all of the information needed until the container is running.
+    url = ie_request.url_template('${PROXY_URL}')
+%>
+<html>
+<head>
+    ${ ie.load_default_js() }
+</head>
+<body>
+<script type="text/javascript">
+
+        ${ ie.default_javascript_variables() }
+    var url = '${ url }';
+        ${ ie.plugin_require_config() }
+
+
+    requirejs(['interactive_environments', 'plugin/neo'], function () {
+        load_notebook(url);
+    });
+
+</script>
+<div id="main" width="100%" height="100%">
+</div>
+</body>
+</html>
diff --git a/config/plugins/interactive_environments/phinch/config/phinch.ini.sample b/config/plugins/interactive_environments/phinch/config/phinch.ini.sample
new file mode 100644
index 0000000..be08cb6
--- /dev/null
+++ b/config/plugins/interactive_environments/phinch/config/phinch.ini.sample
@@ -0,0 +1,21 @@
+[main]
+# Unused
+
+[docker]
+# Command to execute docker. For example `sudo docker` or `docker-lxc`.
+command = docker {docker_args}
+
+# The docker image name that should be started.
+image = shiltemann/docker-phinch-galaxy:16.04
+
+# Additional arguments that are passed to the `docker run` command.
+#command_inject = --sig-proxy=true -e DEBUG=false
+
+# URL to access the Galaxy API with from the spawn Docker container, if empty
+# this falls back to galaxy.ini's galaxy_infrastructure_url and finally to the
+# Docker host of the spawned container if that is also not set.
+#galaxy_url =
+
+# The Docker hostname. It can be useful to run the Docker daemon on a different
+# host than Galaxy.
+#docker_hostname = localhost
diff --git a/config/plugins/interactive_environments/phinch/config/phinch.xml b/config/plugins/interactive_environments/phinch/config/phinch.xml
new file mode 100644
index 0000000..3af1eee
--- /dev/null
+++ b/config/plugins/interactive_environments/phinch/config/phinch.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+<interactive_environment name="Phinch">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">Biom1</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <entry_point entry_point_type="mako">phinch.mako</entry_point>
+</interactive_environment>
\ No newline at end of file
diff --git a/config/plugins/interactive_environments/phinch/static/js/phinch.js b/config/plugins/interactive_environments/phinch/static/js/phinch.js
new file mode 100644
index 0000000..1b4c993
--- /dev/null
+++ b/config/plugins/interactive_environments/phinch/static/js/phinch.js
@@ -0,0 +1,51 @@
+function load_notebook(url){
+    $( document ).ready(function() {
+        test_ie_availability(url, function(){
+            append_notebook(url)
+        });
+    });
+}
+
+function append_notebook(url){
+    clear_main_area();
+    $('#main').append('<iframe frameBorder="0" seamless="seamless" style="width: 100%; height: 100%; overflow:auto;" scrolling="yes" src="'+ url +'"></iframe>'
+    );
+}
+
+
+function keep_alive(notebook_access_url){
+    /**
+    * This is needed to keep the container alive. If the user leaves this site
+    * this function is not constantly pinging the container, the container will
+    * terminate itself.
+    */
+
+    var request_count = 0;
+    interval = setInterval(function(){
+        $.ajax({
+            url: notebook_access_url,
+            xhrFields: {
+                withCredentials: true
+            },
+            type: "GET",
+            timeout: 500,
+            success: function(){
+                console.log("Connected to IE, returning");
+            },
+            error: function(jqxhr, status, error){
+                request_count++;
+                console.log("Request " + request_count);
+                if(request_count > 30){
+                    clearInterval(interval);
+                    clear_main_area();
+                    toastr.error(
+                        "Could not connect to IE, contact your administrator",
+                        "Error",
+                        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': false}
+                    );
+                }
+            }
+        });
+    }, 10000);
+}
+
diff --git a/config/plugins/interactive_environments/phinch/templates/phinch.mako b/config/plugins/interactive_environments/phinch/templates/phinch.mako
new file mode 100644
index 0000000..c446bf7
--- /dev/null
+++ b/config/plugins/interactive_environments/phinch/templates/phinch.mako
@@ -0,0 +1,46 @@
+<%namespace name="ie" file="ie.mako" />
+<%
+
+import os
+
+# Sets ID and sets up a lot of other variables
+ie_request.load_deploy_config()
+
+
+# Launch the IE. This builds and runs the docker command in the background.
+ie_request.launch(env_override={
+    'dataset_hid': hda.hid,
+    'dataset_filename': hda.file_name
+})
+
+# Only once the container is launched can we template our URLs. The ie_request
+# doesn't have all of the information needed until the container is running.
+url = ie_request.url_template('${PROXY_URL}/phinch/')
+
+
+%>
+<html>
+<head>
+${ ie.load_default_js() }
+</head>
+<body>
+<script type="text/javascript">
+${ ie.default_javascript_variables() }
+var url = '${ url }';
+${ ie.plugin_require_config() }
+
+// Keep container running
+requirejs(['interactive_environments', 'plugin/phinch'], function(){
+    keep_alive(url);
+});
+
+// Load notebook
+requirejs(['interactive_environments', 'plugin/phinch'], function(){
+    load_notebook(url);
+});
+
+</script>
+<div id="main" style="width: 100%; height: 100%; overflow:hidden;">
+</div>
+</body>
+</html>
diff --git a/config/plugins/interactive_environments/rstudio/config/allowed_images.yml.sample b/config/plugins/interactive_environments/rstudio/config/allowed_images.yml.sample
new file mode 100644
index 0000000..0c97c0c
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/config/allowed_images.yml.sample
@@ -0,0 +1,14 @@
+# This file lists acceptable images to allow runing.
+#
+# This allows you, the admin, to create multiple flavours
+# for your users to run. E.g. maybe you need a geosciences flavour,
+# you can create the image based on our default image and add the
+# appropriate `apt-get/pip install` statements.
+---
+-
+    image: erasche/docker-rstudio-notebook:16.10
+    description: |
+        If you've ever done R analysis, you probably used RStudio. This
+        familiar R analysis software suite will let you explore your datasets
+        in depth. Comes with ggplot2, RODBC, maps, shinyapps, knitr, LaTeX,
+        bioconductor, cummeRbund, and many more pre-installed packages.
diff --git a/config/plugins/interactive_environments/rstudio/config/rstudio.ini.sample b/config/plugins/interactive_environments/rstudio/config/rstudio.ini.sample
new file mode 100644
index 0000000..ac15696
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/config/rstudio.ini.sample
@@ -0,0 +1,43 @@
+[main]
+# This CANNOT be changed. Eventually will be deprecated
+password_auth = True
+# Following options are ignored if using the Galaxy dynamic proxy but
+# are useful if mapping a range of ports for environment consumption.
+#apache_urls = False
+#ssl = False
+
+[docker]
+# Command to launch docker container. For example `sudo docker` or `docker-lxc`.
+# If you need to use a command like `sg` you can do that here, just be sure to
+# wrap all of the docker portion in single quotes. E.g. `sg 'docker' 'docker {docker_args}'`
+#
+# It is recommended that you use command_inject if you need to inject
+# additional parameters. This command string is re-used for a `docker inspect`
+# command and will likely cause errors if it is extensively modified, past the
+# usual group/sudo changes.
+#command = docker {docker_args}
+
+# The image argument was moved to "allowed_images.yml.sample"
+
+# Additional arguments that are passed to the `docker run` command.
+#command_inject = --sig-proxy=true -e DEBUG=false
+
+# URL to access the Galaxy API with from the spawn Docker containter, if empty
+# this falls back to galaxy.ini's galaxy_infrastructure_url and finally to the
+# Docker host of the spawned container if that is also not set.
+#galaxy_url =
+
+# The Docker hostname. It can be useful to run the Docker daemon on a different
+# host than Galaxy.
+#docker_hostname = localhost
+
+# Try to set the tempdirectory to world execute - this can fix the issue 
+# where 'sudo docker' is not able to mount the folder otherwise.
+# "finalize namespace chdir to /import permission denied"
+#wx_tempdir = False
+
+# Overwride the IE tempdirectory. This can be useful if you regular tempdir is
+# located on an NFS share, which does not work well as Docker volume. In this case
+# you can have a shared sshfs share which you can use as temporary directory to
+# share data between the IE and Galaxy.
+#docker_galaxy_temp_dir = None
diff --git a/config/plugins/interactive_environments/rstudio/config/rstudio.xml b/config/plugins/interactive_environments/rstudio/config/rstudio.xml
new file mode 100644
index 0000000..ad42cd9
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/config/rstudio.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+<interactive_environment name="RStudio">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Text</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">binary.RData</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <template>rstudio.mako</template>
+</interactive_environment>
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js
new file mode 100644
index 0000000..77b3868
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js
@@ -0,0 +1,73 @@
+// ==== File: base64.js
+var b64map="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+var b64pad="=";
+
+function hex2b64(h) {
+  var i;
+  var c;
+  var ret = "";
+  for(i = 0; i+3 <= h.length; i+=3) {
+    c = parseInt(h.substring(i,i+3),16);
+    ret += b64map.charAt(c >> 6) + b64map.charAt(c & 63);
+  }
+  if(i+1 == h.length) {
+    c = parseInt(h.substring(i,i+1),16);
+    ret += b64map.charAt(c << 2);
+  }
+  else if(i+2 == h.length) {
+    c = parseInt(h.substring(i,i+2),16);
+    ret += b64map.charAt(c >> 2) + b64map.charAt((c & 3) << 4);
+  }
+  while((ret.length & 3) > 0) ret += b64pad;
+  return ret;
+}
+
+// convert a base64 string to hex
+function b64tohex(s) {
+  var ret = ""
+  var i;
+  var k = 0; // b64 state, 0-3
+  var slop;
+  for(i = 0; i < s.length; ++i) {
+    if(s.charAt(i) == b64pad) break;
+    v = b64map.indexOf(s.charAt(i));
+    if(v < 0) continue;
+    if(k == 0) {
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 1;
+    }
+    else if(k == 1) {
+      ret += int2char((slop << 2) | (v >> 4));
+      slop = v & 0xf;
+      k = 2;
+    }
+    else if(k == 2) {
+      ret += int2char(slop);
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 3;
+    }
+    else {
+      ret += int2char((slop << 2) | (v >> 4));
+      ret += int2char(v & 0xf);
+      k = 0;
+    }
+  }
+  if(k == 1)
+    ret += int2char(slop << 2);
+  return ret;
+}
+
+// convert a base64 string to a byte/number array
+function b64toBA(s) {
+  //piggyback on b64tohex for now, optimize later
+  var h = b64tohex(s);
+  var i;
+  var a = new Array();
+  for(i = 0; 2*i < h.length; ++i) {
+    a[i] = parseInt(h.substring(2*i,2*i+2),16);
+  }
+  return a;
+}
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/jsbn.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/jsbn.js
new file mode 100644
index 0000000..801841d
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/jsbn.js
@@ -0,0 +1,562 @@
+// Downloaded from http://www-cs-students.stanford.edu/~tjw/ at Tue Nov 30 00:42:57 PST 2010
+// ==== File: jsbn.js
+// Copyright (c) 2005  Tom Wu
+// All Rights Reserved.
+// See "LICENSE" for details.
+
+// Basic JavaScript BN library - subset useful for RSA encryption.
+
+// Bits per digit
+var dbits;
+
+// JavaScript engine analysis
+var canary = 0xdeadbeefcafe;
+var j_lm = ((canary&0xffffff)==0xefcafe);
+
+// (public) Constructor
+function BigInteger(a,b,c) {
+  if(a != null)
+    if("number" == typeof a) this.fromNumber(a,b,c);
+    else if(b == null && "string" != typeof a) this.fromString(a,256);
+    else this.fromString(a,b);
+}
+
+// return new, unset BigInteger
+function nbi() { return new BigInteger(null); }
+
+// am: Compute w_j += (x*this_i), propagate carries,
+// c is initial carry, returns final carry.
+// c < 3*dvalue, x < 2*dvalue, this_i < dvalue
+// We need to select the fastest one that works in this environment.
+
+// am1: use a single mult and divide to get the high bits,
+// max digit bits should be 26 because
+// max internal value = 2*dvalue^2-2*dvalue (< 2^53)
+function am1(i,x,w,j,c,n) {
+  while(--n >= 0) {
+    var v = x*this[i++]+w[j]+c;
+    c = Math.floor(v/0x4000000);
+    w[j++] = v&0x3ffffff;
+  }
+  return c;
+}
+// am2 avoids a big mult-and-extract completely.
+// Max digit bits should be <= 30 because we do bitwise ops
+// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
+function am2(i,x,w,j,c,n) {
+  var xl = x&0x7fff, xh = x>>15;
+  while(--n >= 0) {
+    var l = this[i]&0x7fff;
+    var h = this[i++]>>15;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff);
+    c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
+    w[j++] = l&0x3fffffff;
+  }
+  return c;
+}
+// Alternately, set max digit bits to 28 since some
+// browsers slow down when dealing with 32-bit numbers.
+function am3(i,x,w,j,c,n) {
+  var xl = x&0x3fff, xh = x>>14;
+  while(--n >= 0) {
+    var l = this[i]&0x3fff;
+    var h = this[i++]>>14;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x3fff)<<14)+w[j]+c;
+    c = (l>>28)+(m>>14)+xh*h;
+    w[j++] = l&0xfffffff;
+  }
+  return c;
+}
+if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) {
+  BigInteger.prototype.am = am2;
+  dbits = 30;
+}
+else if(j_lm && (navigator.appName != "Netscape")) {
+  BigInteger.prototype.am = am1;
+  dbits = 26;
+}
+else { // Mozilla/Netscape seems to prefer am3
+  BigInteger.prototype.am = am3;
+  dbits = 28;
+}
+
+BigInteger.prototype.DB = dbits;
+BigInteger.prototype.DM = ((1<<dbits)-1);
+BigInteger.prototype.DV = (1<<dbits);
+
+var BI_FP = 52;
+BigInteger.prototype.FV = Math.pow(2,BI_FP);
+BigInteger.prototype.F1 = BI_FP-dbits;
+BigInteger.prototype.F2 = 2*dbits-BI_FP;
+
+// Digit conversions
+var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz";
+var BI_RC = new Array();
+var rr,vv;
+rr = "0".charCodeAt(0);
+for(vv = 0; vv <= 9; ++vv) BI_RC[rr++] = vv;
+rr = "a".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+rr = "A".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+
+function int2char(n) { return BI_RM.charAt(n); }
+function intAt(s,i) {
+  var c = BI_RC[s.charCodeAt(i)];
+  return (c==null)?-1:c;
+}
+
+// (protected) copy this to r
+function bnpCopyTo(r) {
+  for(var i = this.t-1; i >= 0; --i) r[i] = this[i];
+  r.t = this.t;
+  r.s = this.s;
+}
+
+// (protected) set from integer value x, -DV <= x < DV
+function bnpFromInt(x) {
+  this.t = 1;
+  this.s = (x<0)?-1:0;
+  if(x > 0) this[0] = x;
+  else if(x < -1) this[0] = x+DV;
+  else this.t = 0;
+}
+
+// return bigint initialized to value
+function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
+
+// (protected) set from string and radix
+function bnpFromString(s,b) {
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 256) k = 8; // byte array
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else { this.fromRadix(s,b); return; }
+  this.t = 0;
+  this.s = 0;
+  var i = s.length, mi = false, sh = 0;
+  while(--i >= 0) {
+    var x = (k==8)?s[i]&0xff:intAt(s,i);
+    if(x < 0) {
+      if(s.charAt(i) == "-") mi = true;
+      continue;
+    }
+    mi = false;
+    if(sh == 0)
+      this[this.t++] = x;
+    else if(sh+k > this.DB) {
+      this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<<sh;
+      this[this.t++] = (x>>(this.DB-sh));
+    }
+    else
+      this[this.t-1] |= x<<sh;
+    sh += k;
+    if(sh >= this.DB) sh -= this.DB;
+  }
+  if(k == 8 && (s[0]&0x80) != 0) {
+    this.s = -1;
+    if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)<<sh;
+  }
+  this.clamp();
+  if(mi) BigInteger.ZERO.subTo(this,this);
+}
+
+// (protected) clamp off excess high words
+function bnpClamp() {
+  var c = this.s&this.DM;
+  while(this.t > 0 && this[this.t-1] == c) --this.t;
+}
+
+// (public) return string representation in given radix
+function bnToString(b) {
+  if(this.s < 0) return "-"+this.negate().toString(b);
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else return this.toRadix(b);
+  var km = (1<<k)-1, d, m = false, r = "", i = this.t;
+  var p = this.DB-(i*this.DB)%k;
+  if(i-- > 0) {
+    if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
+    while(i >= 0) {
+      if(p < k) {
+        d = (this[i]&((1<<p)-1))<<(k-p);
+        d |= this[--i]>>(p+=this.DB-k);
+      }
+      else {
+        d = (this[i]>>(p-=k))&km;
+        if(p <= 0) { p += this.DB; --i; }
+      }
+      if(d > 0) m = true;
+      if(m) r += int2char(d);
+    }
+  }
+  return m?r:"0";
+}
+
+// (public) -this
+function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
+
+// (public) |this|
+function bnAbs() { return (this.s<0)?this.negate():this; }
+
+// (public) return + if this > a, - if this < a, 0 if equal
+function bnCompareTo(a) {
+  var r = this.s-a.s;
+  if(r != 0) return r;
+  var i = this.t;
+  r = i-a.t;
+  if(r != 0) return r;
+  while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
+  return 0;
+}
+
+// returns bit length of the integer x
+function nbits(x) {
+  var r = 1, t;
+  if((t=x>>>16) != 0) { x = t; r += 16; }
+  if((t=x>>8) != 0) { x = t; r += 8; }
+  if((t=x>>4) != 0) { x = t; r += 4; }
+  if((t=x>>2) != 0) { x = t; r += 2; }
+  if((t=x>>1) != 0) { x = t; r += 1; }
+  return r;
+}
+
+// (public) return the number of bits in "this"
+function bnBitLength() {
+  if(this.t <= 0) return 0;
+  return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
+}
+
+// (protected) r = this << n*DB
+function bnpDLShiftTo(n,r) {
+  var i;
+  for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
+  for(i = n-1; i >= 0; --i) r[i] = 0;
+  r.t = this.t+n;
+  r.s = this.s;
+}
+
+// (protected) r = this >> n*DB
+function bnpDRShiftTo(n,r) {
+  for(var i = n; i < this.t; ++i) r[i-n] = this[i];
+  r.t = Math.max(this.t-n,0);
+  r.s = this.s;
+}
+
+// (protected) r = this << n
+function bnpLShiftTo(n,r) {
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<cbs)-1;
+  var ds = Math.floor(n/this.DB), c = (this.s<<bs)&this.DM, i;
+  for(i = this.t-1; i >= 0; --i) {
+    r[i+ds+1] = (this[i]>>cbs)|c;
+    c = (this[i]&bm)<<bs;
+  }
+  for(i = ds-1; i >= 0; --i) r[i] = 0;
+  r[ds] = c;
+  r.t = this.t+ds+1;
+  r.s = this.s;
+  r.clamp();
+}
+
+// (protected) r = this >> n
+function bnpRShiftTo(n,r) {
+  r.s = this.s;
+  var ds = Math.floor(n/this.DB);
+  if(ds >= this.t) { r.t = 0; return; }
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<bs)-1;
+  r[0] = this[ds]>>bs;
+  for(var i = ds+1; i < this.t; ++i) {
+    r[i-ds-1] |= (this[i]&bm)<<cbs;
+    r[i-ds] = this[i]>>bs;
+  }
+  if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<<cbs;
+  r.t = this.t-ds;
+  r.clamp();
+}
+
+// (protected) r = this - a
+function bnpSubTo(a,r) {
+  var i = 0, c = 0, m = Math.min(a.t,this.t);
+  while(i < m) {
+    c += this[i]-a[i];
+    r[i++] = c&this.DM;
+    c >>= this.DB;
+  }
+  if(a.t < this.t) {
+    c -= a.s;
+    while(i < this.t) {
+      c += this[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c += this.s;
+  }
+  else {
+    c += this.s;
+    while(i < a.t) {
+      c -= a[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c -= a.s;
+  }
+  r.s = (c<0)?-1:0;
+  if(c < -1) r[i++] = this.DV+c;
+  else if(c > 0) r[i++] = c;
+  r.t = i;
+  r.clamp();
+}
+
+// (protected) r = this * a, r != this,a (HAC 14.12)
+// "this" should be the larger one if appropriate.
+function bnpMultiplyTo(a,r) {
+  var x = this.abs(), y = a.abs();
+  var i = x.t;
+  r.t = i+y.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
+  r.s = 0;
+  r.clamp();
+  if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
+}
+
+// (protected) r = this^2, r != this (HAC 14.16)
+function bnpSquareTo(r) {
+  var x = this.abs();
+  var i = r.t = 2*x.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < x.t-1; ++i) {
+    var c = x.am(i,x[i],r,2*i,0,1);
+    if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
+      r[i+x.t] -= x.DV;
+      r[i+x.t+1] = 1;
+    }
+  }
+  if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
+  r.s = 0;
+  r.clamp();
+}
+
+// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
+// r != q, this != m.  q or r may be null.
+function bnpDivRemTo(m,q,r) {
+  var pm = m.abs();
+  if(pm.t <= 0) return;
+  var pt = this.abs();
+  if(pt.t < pm.t) {
+    if(q != null) q.fromInt(0);
+    if(r != null) this.copyTo(r);
+    return;
+  }
+  if(r == null) r = nbi();
+  var y = nbi(), ts = this.s, ms = m.s;
+  var nsh = this.DB-nbits(pm[pm.t-1]);	// normalize modulus
+  if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
+  else { pm.copyTo(y); pt.copyTo(r); }
+  var ys = y.t;
+  var y0 = y[ys-1];
+  if(y0 == 0) return;
+  var yt = y0*(1<<this.F1)+((ys>1)?y[ys-2]>>this.F2:0);
+  var d1 = this.FV/yt, d2 = (1<<this.F1)/yt, e = 1<<this.F2;
+  var i = r.t, j = i-ys, t = (q==null)?nbi():q;
+  y.dlShiftTo(j,t);
+  if(r.compareTo(t) >= 0) {
+    r[r.t++] = 1;
+    r.subTo(t,r);
+  }
+  BigInteger.ONE.dlShiftTo(ys,t);
+  t.subTo(y,y);	// "negative" y so we can replace sub with am later
+  while(y.t < ys) y[y.t++] = 0;
+  while(--j >= 0) {
+    // Estimate quotient digit
+    var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
+    if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) {	// Try it out
+      y.dlShiftTo(j,t);
+      r.subTo(t,r);
+      while(r[i] < --qd) r.subTo(t,r);
+    }
+  }
+  if(q != null) {
+    r.drShiftTo(ys,q);
+    if(ts != ms) BigInteger.ZERO.subTo(q,q);
+  }
+  r.t = ys;
+  r.clamp();
+  if(nsh > 0) r.rShiftTo(nsh,r);	// Denormalize remainder
+  if(ts < 0) BigInteger.ZERO.subTo(r,r);
+}
+
+// (public) this mod a
+function bnMod(a) {
+  var r = nbi();
+  this.abs().divRemTo(a,null,r);
+  if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
+  return r;
+}
+
+// Modular reduction using "classic" algorithm
+function Classic(m) { this.m = m; }
+function cConvert(x) {
+  if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
+  else return x;
+}
+function cRevert(x) { return x; }
+function cReduce(x) { x.divRemTo(this.m,null,x); }
+function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+Classic.prototype.convert = cConvert;
+Classic.prototype.revert = cRevert;
+Classic.prototype.reduce = cReduce;
+Classic.prototype.mulTo = cMulTo;
+Classic.prototype.sqrTo = cSqrTo;
+
+// (protected) return "-1/this % 2^DB"; useful for Mont. reduction
+// justification:
+//         xy == 1 (mod m)
+//         xy =  1+km
+//   xy(2-xy) = (1+km)(1-km)
+// x[y(2-xy)] = 1-k^2m^2
+// x[y(2-xy)] == 1 (mod m^2)
+// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
+// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
+// JS multiply "overflows" differently from C/C++, so care is needed here.
+function bnpInvDigit() {
+  if(this.t < 1) return 0;
+  var x = this[0];
+  if((x&1) == 0) return 0;
+  var y = x&3;		// y == 1/x mod 2^2
+  y = (y*(2-(x&0xf)*y))&0xf;	// y == 1/x mod 2^4
+  y = (y*(2-(x&0xff)*y))&0xff;	// y == 1/x mod 2^8
+  y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;	// y == 1/x mod 2^16
+  // last step - calculate inverse mod DV directly;
+  // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
+  y = (y*(2-x*y%this.DV))%this.DV;		// y == 1/x mod 2^dbits
+  // we really want the negative inverse, and -DV < y < DV
+  return (y>0)?this.DV-y:-y;
+}
+
+// Montgomery reduction
+function Montgomery(m) {
+  this.m = m;
+  this.mp = m.invDigit();
+  this.mpl = this.mp&0x7fff;
+  this.mph = this.mp>>15;
+  this.um = (1<<(m.DB-15))-1;
+  this.mt2 = 2*m.t;
+}
+
+// xR mod m
+function montConvert(x) {
+  var r = nbi();
+  x.abs().dlShiftTo(this.m.t,r);
+  r.divRemTo(this.m,null,r);
+  if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
+  return r;
+}
+
+// x/R mod m
+function montRevert(x) {
+  var r = nbi();
+  x.copyTo(r);
+  this.reduce(r);
+  return r;
+}
+
+// x = x/R mod m (HAC 14.32)
+function montReduce(x) {
+  while(x.t <= this.mt2)	// pad x so am has enough room later
+    x[x.t++] = 0;
+  for(var i = 0; i < this.m.t; ++i) {
+    // faster way of calculating u0 = x[i]*mp mod DV
+    var j = x[i]&0x7fff;
+    var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
+    // use am to combine the multiply-shift-add into one call
+    j = i+this.m.t;
+    x[j] += this.m.am(0,u0,x,i,0,this.m.t);
+    // propagate carry
+    while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
+  }
+  x.clamp();
+  x.drShiftTo(this.m.t,x);
+  if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
+}
+
+// r = "x^2/R mod m"; x != r
+function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+// r = "xy/R mod m"; x,y != r
+function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+
+Montgomery.prototype.convert = montConvert;
+Montgomery.prototype.revert = montRevert;
+Montgomery.prototype.reduce = montReduce;
+Montgomery.prototype.mulTo = montMulTo;
+Montgomery.prototype.sqrTo = montSqrTo;
+
+// (protected) true iff this is even
+function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
+
+// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
+function bnpExp(e,z) {
+  if(e > 0xffffffff || e < 1) return BigInteger.ONE;
+  var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
+  g.copyTo(r);
+  while(--i >= 0) {
+    z.sqrTo(r,r2);
+    if((e&(1<<i)) > 0) z.mulTo(r2,g,r);
+    else { var t = r; r = r2; r2 = t; }
+  }
+  return z.revert(r);
+}
+
+// (public) this^e % m, 0 <= e < 2^32
+function bnModPowInt(e,m) {
+  var z;
+  if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
+  return this.exp(e,z);
+}
+
+// protected
+BigInteger.prototype.copyTo = bnpCopyTo;
+BigInteger.prototype.fromInt = bnpFromInt;
+BigInteger.prototype.fromString = bnpFromString;
+BigInteger.prototype.clamp = bnpClamp;
+BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
+BigInteger.prototype.drShiftTo = bnpDRShiftTo;
+BigInteger.prototype.lShiftTo = bnpLShiftTo;
+BigInteger.prototype.rShiftTo = bnpRShiftTo;
+BigInteger.prototype.subTo = bnpSubTo;
+BigInteger.prototype.multiplyTo = bnpMultiplyTo;
+BigInteger.prototype.squareTo = bnpSquareTo;
+BigInteger.prototype.divRemTo = bnpDivRemTo;
+BigInteger.prototype.invDigit = bnpInvDigit;
+BigInteger.prototype.isEven = bnpIsEven;
+BigInteger.prototype.exp = bnpExp;
+
+// public
+BigInteger.prototype.toString = bnToString;
+BigInteger.prototype.negate = bnNegate;
+BigInteger.prototype.abs = bnAbs;
+BigInteger.prototype.compareTo = bnCompareTo;
+BigInteger.prototype.bitLength = bnBitLength;
+BigInteger.prototype.mod = bnMod;
+BigInteger.prototype.modPowInt = bnModPowInt;
+
+// "constants"
+BigInteger.ZERO = nbv(0);
+BigInteger.ONE = nbv(1);
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/prng4.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/prng4.js
new file mode 100644
index 0000000..5cd6812
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/prng4.js
@@ -0,0 +1,47 @@
+// ==== File: prng4.js
+// prng4.js - uses Arcfour as a PRNG
+
+function Arcfour() {
+  this.i = 0;
+  this.j = 0;
+  this.S = new Array();
+}
+
+// Initialize arcfour context from key, an array of ints, each from [0..255]
+function ARC4init(key) {
+  var i, j, t;
+  for(i = 0; i < 256; ++i)
+    this.S[i] = i;
+  j = 0;
+  for(i = 0; i < 256; ++i) {
+    j = (j + this.S[i] + key[i % key.length]) & 255;
+    t = this.S[i];
+    this.S[i] = this.S[j];
+    this.S[j] = t;
+  }
+  this.i = 0;
+  this.j = 0;
+}
+
+function ARC4next() {
+  var t;
+  this.i = (this.i + 1) & 255;
+  this.j = (this.j + this.S[this.i]) & 255;
+  t = this.S[this.i];
+  this.S[this.i] = this.S[this.j];
+  this.S[this.j] = t;
+  return this.S[(t + this.S[this.i]) & 255];
+}
+
+Arcfour.prototype.init = ARC4init;
+Arcfour.prototype.next = ARC4next;
+
+// Plug in your RNG constructor here
+function prng_newstate() {
+  return new Arcfour();
+}
+
+// Pool size must be a multiple of 4 and greater than 32.
+// An array of bytes the size of the pool will be passed to init()
+var rng_psize = 256;
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rng.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rng.js
new file mode 100644
index 0000000..24bae0f
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rng.js
@@ -0,0 +1,70 @@
+// ==== File: rng.js
+// Random number generator - requires a PRNG backend, e.g. prng4.js
+
+// For best results, put code like
+// <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
+// in your main HTML document.
+
+var rng_state;
+var rng_pool;
+var rng_pptr;
+
+// Mix in a 32-bit integer into the pool
+function rng_seed_int(x) {
+  rng_pool[rng_pptr++] ^= x & 255;
+  rng_pool[rng_pptr++] ^= (x >> 8) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 16) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 24) & 255;
+  if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
+}
+
+// Mix in the current time (w/milliseconds) into the pool
+function rng_seed_time() {
+  rng_seed_int(new Date().getTime());
+}
+
+// Initialize the pool with junk if needed.
+if(rng_pool == null) {
+  rng_pool = new Array();
+  rng_pptr = 0;
+  var t;
+  if(navigator.appName == "Netscape" && navigator.appVersion < "5" && window.crypto) {
+    // Extract entropy (256 bits) from NS4 RNG if available
+    var z = window.crypto.random(32);
+    for(t = 0; t < z.length; ++t)
+      rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
+  }
+  while(rng_pptr < rng_psize) {  // extract some randomness from Math.random()
+    t = Math.floor(65536 * Math.random());
+    rng_pool[rng_pptr++] = t >>> 8;
+    rng_pool[rng_pptr++] = t & 255;
+  }
+  rng_pptr = 0;
+  rng_seed_time();
+  //rng_seed_int(window.screenX);
+  //rng_seed_int(window.screenY);
+}
+
+function rng_get_byte() {
+  if(rng_state == null) {
+    rng_seed_time();
+    rng_state = prng_newstate();
+    rng_state.init(rng_pool);
+    for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
+      rng_pool[rng_pptr] = 0;
+    rng_pptr = 0;
+    //rng_pool = null;
+  }
+  // TODO: allow reseeding after first request
+  return rng_state.next();
+}
+
+function rng_get_bytes(ba) {
+  var i;
+  for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
+}
+
+function SecureRandom() {}
+
+SecureRandom.prototype.nextBytes = rng_get_bytes;
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rsa.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rsa.js
new file mode 100644
index 0000000..b2e37c3
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rsa.js
@@ -0,0 +1,114 @@
+// ==== File: rsa.js
+// Depends on jsbn.js and rng.js
+
+// Version 1.1: support utf-8 encoding in pkcs1pad2
+
+// convert a (hex) string to a bignum object
+function parseBigInt(str,r) {
+  return new BigInteger(str,r);
+}
+
+function linebrk(s,n) {
+  var ret = "";
+  var i = 0;
+  while(i + n < s.length) {
+    ret += s.substring(i,i+n) + "\n";
+    i += n;
+  }
+  return ret + s.substring(i,s.length);
+}
+
+function byte2Hex(b) {
+  if(b < 0x10)
+    return "0" + b.toString(16);
+  else
+    return b.toString(16);
+}
+
+// PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
+function pkcs1pad2(s,n) {
+  if(n < s.length + 11) { // TODO: fix for utf-8
+    alert("Message too long for RSA");
+    return null;
+  }
+  var ba = new Array();
+  var i = s.length - 1;
+  while(i >= 0 && n > 0) {
+    var c = s.charCodeAt(i--);
+    if(c < 128) { // encode using utf-8
+      ba[--n] = c;
+    }
+    else if((c > 127) && (c < 2048)) {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = (c >> 6) | 192;
+    }
+    else {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = ((c >> 6) & 63) | 128;
+      ba[--n] = (c >> 12) | 224;
+    }
+  }
+  ba[--n] = 0;
+  var rng = new SecureRandom();
+  var x = new Array();
+  while(n > 2) { // random non-zero pad
+    x[0] = 0;
+    while(x[0] == 0) rng.nextBytes(x);
+    ba[--n] = x[0];
+  }
+  ba[--n] = 2;
+  ba[--n] = 0;
+  return new BigInteger(ba);
+}
+
+// "empty" RSA key constructor
+function RSAKey() {
+  this.n = null;
+  this.e = 0;
+  this.d = null;
+  this.p = null;
+  this.q = null;
+  this.dmp1 = null;
+  this.dmq1 = null;
+  this.coeff = null;
+}
+
+// Set the public key fields N and e from hex strings
+function RSASetPublic(N,E) {
+  if(N != null && E != null && N.length > 0 && E.length > 0) {
+    this.n = parseBigInt(N,16);
+    this.e = parseInt(E,16);
+  }
+  else
+    alert("Invalid RSA public key");
+}
+
+// Perform raw public operation on "x": return x^e (mod n)
+function RSADoPublic(x) {
+  return x.modPowInt(this.e, this.n);
+}
+
+// Return the PKCS#1 RSA encryption of "text" as an even-length hex string
+function RSAEncrypt(text) {
+  var m = pkcs1pad2(text,(this.n.bitLength()+7)>>3);
+  if(m == null) return null;
+  var c = this.doPublic(m);
+  if(c == null) return null;
+  var h = c.toString(16);
+  if((h.length & 1) == 0) return h; else return "0" + h;
+}
+
+// Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
+//function RSAEncryptB64(text) {
+//  var h = this.encrypt(text);
+//  if(h) return hex2b64(h); else return null;
+//}
+
+// protected
+RSAKey.prototype.doPublic = RSADoPublic;
+
+// public
+RSAKey.prototype.setPublic = RSASetPublic;
+RSAKey.prototype.encrypt = RSAEncrypt;
+//RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js
new file mode 100644
index 0000000..2389ea4
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js
@@ -0,0 +1,861 @@
+// Downloaded from http://www-cs-students.stanford.edu/~tjw/ at Tue Nov 30 00:42:57 PST 2010
+// ==== File: jsbn.js
+// Copyright (c) 2005  Tom Wu
+// All Rights Reserved.
+// See "LICENSE" for details.
+
+// Basic JavaScript BN library - subset useful for RSA encryption.
+
+// Bits per digit
+var dbits;
+
+// JavaScript engine analysis
+var canary = 0xdeadbeefcafe;
+var j_lm = ((canary&0xffffff)==0xefcafe);
+
+// (public) Constructor
+function BigInteger(a,b,c) {
+  if(a != null)
+    if("number" == typeof a) this.fromNumber(a,b,c);
+    else if(b == null && "string" != typeof a) this.fromString(a,256);
+    else this.fromString(a,b);
+}
+
+// return new, unset BigInteger
+function nbi() { return new BigInteger(null); }
+
+// am: Compute w_j += (x*this_i), propagate carries,
+// c is initial carry, returns final carry.
+// c < 3*dvalue, x < 2*dvalue, this_i < dvalue
+// We need to select the fastest one that works in this environment.
+
+// am1: use a single mult and divide to get the high bits,
+// max digit bits should be 26 because
+// max internal value = 2*dvalue^2-2*dvalue (< 2^53)
+function am1(i,x,w,j,c,n) {
+  while(--n >= 0) {
+    var v = x*this[i++]+w[j]+c;
+    c = Math.floor(v/0x4000000);
+    w[j++] = v&0x3ffffff;
+  }
+  return c;
+}
+// am2 avoids a big mult-and-extract completely.
+// Max digit bits should be <= 30 because we do bitwise ops
+// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
+function am2(i,x,w,j,c,n) {
+  var xl = x&0x7fff, xh = x>>15;
+  while(--n >= 0) {
+    var l = this[i]&0x7fff;
+    var h = this[i++]>>15;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff);
+    c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
+    w[j++] = l&0x3fffffff;
+  }
+  return c;
+}
+// Alternately, set max digit bits to 28 since some
+// browsers slow down when dealing with 32-bit numbers.
+function am3(i,x,w,j,c,n) {
+  var xl = x&0x3fff, xh = x>>14;
+  while(--n >= 0) {
+    var l = this[i]&0x3fff;
+    var h = this[i++]>>14;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x3fff)<<14)+w[j]+c;
+    c = (l>>28)+(m>>14)+xh*h;
+    w[j++] = l&0xfffffff;
+  }
+  return c;
+}
+if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) {
+  BigInteger.prototype.am = am2;
+  dbits = 30;
+}
+else if(j_lm && (navigator.appName != "Netscape")) {
+  BigInteger.prototype.am = am1;
+  dbits = 26;
+}
+else { // Mozilla/Netscape seems to prefer am3
+  BigInteger.prototype.am = am3;
+  dbits = 28;
+}
+
+BigInteger.prototype.DB = dbits;
+BigInteger.prototype.DM = ((1<<dbits)-1);
+BigInteger.prototype.DV = (1<<dbits);
+
+var BI_FP = 52;
+BigInteger.prototype.FV = Math.pow(2,BI_FP);
+BigInteger.prototype.F1 = BI_FP-dbits;
+BigInteger.prototype.F2 = 2*dbits-BI_FP;
+
+// Digit conversions
+var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz";
+var BI_RC = new Array();
+var rr,vv;
+rr = "0".charCodeAt(0);
+for(vv = 0; vv <= 9; ++vv) BI_RC[rr++] = vv;
+rr = "a".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+rr = "A".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+
+function int2char(n) { return BI_RM.charAt(n); }
+function intAt(s,i) {
+  var c = BI_RC[s.charCodeAt(i)];
+  return (c==null)?-1:c;
+}
+
+// (protected) copy this to r
+function bnpCopyTo(r) {
+  for(var i = this.t-1; i >= 0; --i) r[i] = this[i];
+  r.t = this.t;
+  r.s = this.s;
+}
+
+// (protected) set from integer value x, -DV <= x < DV
+function bnpFromInt(x) {
+  this.t = 1;
+  this.s = (x<0)?-1:0;
+  if(x > 0) this[0] = x;
+  else if(x < -1) this[0] = x+DV;
+  else this.t = 0;
+}
+
+// return bigint initialized to value
+function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
+
+// (protected) set from string and radix
+function bnpFromString(s,b) {
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 256) k = 8; // byte array
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else { this.fromRadix(s,b); return; }
+  this.t = 0;
+  this.s = 0;
+  var i = s.length, mi = false, sh = 0;
+  while(--i >= 0) {
+    var x = (k==8)?s[i]&0xff:intAt(s,i);
+    if(x < 0) {
+      if(s.charAt(i) == "-") mi = true;
+      continue;
+    }
+    mi = false;
+    if(sh == 0)
+      this[this.t++] = x;
+    else if(sh+k > this.DB) {
+      this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<<sh;
+      this[this.t++] = (x>>(this.DB-sh));
+    }
+    else
+      this[this.t-1] |= x<<sh;
+    sh += k;
+    if(sh >= this.DB) sh -= this.DB;
+  }
+  if(k == 8 && (s[0]&0x80) != 0) {
+    this.s = -1;
+    if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)<<sh;
+  }
+  this.clamp();
+  if(mi) BigInteger.ZERO.subTo(this,this);
+}
+
+// (protected) clamp off excess high words
+function bnpClamp() {
+  var c = this.s&this.DM;
+  while(this.t > 0 && this[this.t-1] == c) --this.t;
+}
+
+// (public) return string representation in given radix
+function bnToString(b) {
+  if(this.s < 0) return "-"+this.negate().toString(b);
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else return this.toRadix(b);
+  var km = (1<<k)-1, d, m = false, r = "", i = this.t;
+  var p = this.DB-(i*this.DB)%k;
+  if(i-- > 0) {
+    if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
+    while(i >= 0) {
+      if(p < k) {
+        d = (this[i]&((1<<p)-1))<<(k-p);
+        d |= this[--i]>>(p+=this.DB-k);
+      }
+      else {
+        d = (this[i]>>(p-=k))&km;
+        if(p <= 0) { p += this.DB; --i; }
+      }
+      if(d > 0) m = true;
+      if(m) r += int2char(d);
+    }
+  }
+  return m?r:"0";
+}
+
+// (public) -this
+function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
+
+// (public) |this|
+function bnAbs() { return (this.s<0)?this.negate():this; }
+
+// (public) return + if this > a, - if this < a, 0 if equal
+function bnCompareTo(a) {
+  var r = this.s-a.s;
+  if(r != 0) return r;
+  var i = this.t;
+  r = i-a.t;
+  if(r != 0) return r;
+  while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
+  return 0;
+}
+
+// returns bit length of the integer x
+function nbits(x) {
+  var r = 1, t;
+  if((t=x>>>16) != 0) { x = t; r += 16; }
+  if((t=x>>8) != 0) { x = t; r += 8; }
+  if((t=x>>4) != 0) { x = t; r += 4; }
+  if((t=x>>2) != 0) { x = t; r += 2; }
+  if((t=x>>1) != 0) { x = t; r += 1; }
+  return r;
+}
+
+// (public) return the number of bits in "this"
+function bnBitLength() {
+  if(this.t <= 0) return 0;
+  return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
+}
+
+// (protected) r = this << n*DB
+function bnpDLShiftTo(n,r) {
+  var i;
+  for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
+  for(i = n-1; i >= 0; --i) r[i] = 0;
+  r.t = this.t+n;
+  r.s = this.s;
+}
+
+// (protected) r = this >> n*DB
+function bnpDRShiftTo(n,r) {
+  for(var i = n; i < this.t; ++i) r[i-n] = this[i];
+  r.t = Math.max(this.t-n,0);
+  r.s = this.s;
+}
+
+// (protected) r = this << n
+function bnpLShiftTo(n,r) {
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<cbs)-1;
+  var ds = Math.floor(n/this.DB), c = (this.s<<bs)&this.DM, i;
+  for(i = this.t-1; i >= 0; --i) {
+    r[i+ds+1] = (this[i]>>cbs)|c;
+    c = (this[i]&bm)<<bs;
+  }
+  for(i = ds-1; i >= 0; --i) r[i] = 0;
+  r[ds] = c;
+  r.t = this.t+ds+1;
+  r.s = this.s;
+  r.clamp();
+}
+
+// (protected) r = this >> n
+function bnpRShiftTo(n,r) {
+  r.s = this.s;
+  var ds = Math.floor(n/this.DB);
+  if(ds >= this.t) { r.t = 0; return; }
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<bs)-1;
+  r[0] = this[ds]>>bs;
+  for(var i = ds+1; i < this.t; ++i) {
+    r[i-ds-1] |= (this[i]&bm)<<cbs;
+    r[i-ds] = this[i]>>bs;
+  }
+  if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<<cbs;
+  r.t = this.t-ds;
+  r.clamp();
+}
+
+// (protected) r = this - a
+function bnpSubTo(a,r) {
+  var i = 0, c = 0, m = Math.min(a.t,this.t);
+  while(i < m) {
+    c += this[i]-a[i];
+    r[i++] = c&this.DM;
+    c >>= this.DB;
+  }
+  if(a.t < this.t) {
+    c -= a.s;
+    while(i < this.t) {
+      c += this[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c += this.s;
+  }
+  else {
+    c += this.s;
+    while(i < a.t) {
+      c -= a[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c -= a.s;
+  }
+  r.s = (c<0)?-1:0;
+  if(c < -1) r[i++] = this.DV+c;
+  else if(c > 0) r[i++] = c;
+  r.t = i;
+  r.clamp();
+}
+
+// (protected) r = this * a, r != this,a (HAC 14.12)
+// "this" should be the larger one if appropriate.
+function bnpMultiplyTo(a,r) {
+  var x = this.abs(), y = a.abs();
+  var i = x.t;
+  r.t = i+y.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
+  r.s = 0;
+  r.clamp();
+  if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
+}
+
+// (protected) r = this^2, r != this (HAC 14.16)
+function bnpSquareTo(r) {
+  var x = this.abs();
+  var i = r.t = 2*x.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < x.t-1; ++i) {
+    var c = x.am(i,x[i],r,2*i,0,1);
+    if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
+      r[i+x.t] -= x.DV;
+      r[i+x.t+1] = 1;
+    }
+  }
+  if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
+  r.s = 0;
+  r.clamp();
+}
+
+// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
+// r != q, this != m.  q or r may be null.
+function bnpDivRemTo(m,q,r) {
+  var pm = m.abs();
+  if(pm.t <= 0) return;
+  var pt = this.abs();
+  if(pt.t < pm.t) {
+    if(q != null) q.fromInt(0);
+    if(r != null) this.copyTo(r);
+    return;
+  }
+  if(r == null) r = nbi();
+  var y = nbi(), ts = this.s, ms = m.s;
+  var nsh = this.DB-nbits(pm[pm.t-1]);	// normalize modulus
+  if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
+  else { pm.copyTo(y); pt.copyTo(r); }
+  var ys = y.t;
+  var y0 = y[ys-1];
+  if(y0 == 0) return;
+  var yt = y0*(1<<this.F1)+((ys>1)?y[ys-2]>>this.F2:0);
+  var d1 = this.FV/yt, d2 = (1<<this.F1)/yt, e = 1<<this.F2;
+  var i = r.t, j = i-ys, t = (q==null)?nbi():q;
+  y.dlShiftTo(j,t);
+  if(r.compareTo(t) >= 0) {
+    r[r.t++] = 1;
+    r.subTo(t,r);
+  }
+  BigInteger.ONE.dlShiftTo(ys,t);
+  t.subTo(y,y);	// "negative" y so we can replace sub with am later
+  while(y.t < ys) y[y.t++] = 0;
+  while(--j >= 0) {
+    // Estimate quotient digit
+    var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
+    if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) {	// Try it out
+      y.dlShiftTo(j,t);
+      r.subTo(t,r);
+      while(r[i] < --qd) r.subTo(t,r);
+    }
+  }
+  if(q != null) {
+    r.drShiftTo(ys,q);
+    if(ts != ms) BigInteger.ZERO.subTo(q,q);
+  }
+  r.t = ys;
+  r.clamp();
+  if(nsh > 0) r.rShiftTo(nsh,r);	// Denormalize remainder
+  if(ts < 0) BigInteger.ZERO.subTo(r,r);
+}
+
+// (public) this mod a
+function bnMod(a) {
+  var r = nbi();
+  this.abs().divRemTo(a,null,r);
+  if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
+  return r;
+}
+
+// Modular reduction using "classic" algorithm
+function Classic(m) { this.m = m; }
+function cConvert(x) {
+  if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
+  else return x;
+}
+function cRevert(x) { return x; }
+function cReduce(x) { x.divRemTo(this.m,null,x); }
+function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+Classic.prototype.convert = cConvert;
+Classic.prototype.revert = cRevert;
+Classic.prototype.reduce = cReduce;
+Classic.prototype.mulTo = cMulTo;
+Classic.prototype.sqrTo = cSqrTo;
+
+// (protected) return "-1/this % 2^DB"; useful for Mont. reduction
+// justification:
+//         xy == 1 (mod m)
+//         xy =  1+km
+//   xy(2-xy) = (1+km)(1-km)
+// x[y(2-xy)] = 1-k^2m^2
+// x[y(2-xy)] == 1 (mod m^2)
+// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
+// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
+// JS multiply "overflows" differently from C/C++, so care is needed here.
+function bnpInvDigit() {
+  if(this.t < 1) return 0;
+  var x = this[0];
+  if((x&1) == 0) return 0;
+  var y = x&3;		// y == 1/x mod 2^2
+  y = (y*(2-(x&0xf)*y))&0xf;	// y == 1/x mod 2^4
+  y = (y*(2-(x&0xff)*y))&0xff;	// y == 1/x mod 2^8
+  y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;	// y == 1/x mod 2^16
+  // last step - calculate inverse mod DV directly;
+  // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
+  y = (y*(2-x*y%this.DV))%this.DV;		// y == 1/x mod 2^dbits
+  // we really want the negative inverse, and -DV < y < DV
+  return (y>0)?this.DV-y:-y;
+}
+
+// Montgomery reduction
+function Montgomery(m) {
+  this.m = m;
+  this.mp = m.invDigit();
+  this.mpl = this.mp&0x7fff;
+  this.mph = this.mp>>15;
+  this.um = (1<<(m.DB-15))-1;
+  this.mt2 = 2*m.t;
+}
+
+// xR mod m
+function montConvert(x) {
+  var r = nbi();
+  x.abs().dlShiftTo(this.m.t,r);
+  r.divRemTo(this.m,null,r);
+  if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
+  return r;
+}
+
+// x/R mod m
+function montRevert(x) {
+  var r = nbi();
+  x.copyTo(r);
+  this.reduce(r);
+  return r;
+}
+
+// x = x/R mod m (HAC 14.32)
+function montReduce(x) {
+  while(x.t <= this.mt2)	// pad x so am has enough room later
+    x[x.t++] = 0;
+  for(var i = 0; i < this.m.t; ++i) {
+    // faster way of calculating u0 = x[i]*mp mod DV
+    var j = x[i]&0x7fff;
+    var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
+    // use am to combine the multiply-shift-add into one call
+    j = i+this.m.t;
+    x[j] += this.m.am(0,u0,x,i,0,this.m.t);
+    // propagate carry
+    while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
+  }
+  x.clamp();
+  x.drShiftTo(this.m.t,x);
+  if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
+}
+
+// r = "x^2/R mod m"; x != r
+function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+// r = "xy/R mod m"; x,y != r
+function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+
+Montgomery.prototype.convert = montConvert;
+Montgomery.prototype.revert = montRevert;
+Montgomery.prototype.reduce = montReduce;
+Montgomery.prototype.mulTo = montMulTo;
+Montgomery.prototype.sqrTo = montSqrTo;
+
+// (protected) true iff this is even
+function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
+
+// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
+function bnpExp(e,z) {
+  if(e > 0xffffffff || e < 1) return BigInteger.ONE;
+  var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
+  g.copyTo(r);
+  while(--i >= 0) {
+    z.sqrTo(r,r2);
+    if((e&(1<<i)) > 0) z.mulTo(r2,g,r);
+    else { var t = r; r = r2; r2 = t; }
+  }
+  return z.revert(r);
+}
+
+// (public) this^e % m, 0 <= e < 2^32
+function bnModPowInt(e,m) {
+  var z;
+  if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
+  return this.exp(e,z);
+}
+
+// protected
+BigInteger.prototype.copyTo = bnpCopyTo;
+BigInteger.prototype.fromInt = bnpFromInt;
+BigInteger.prototype.fromString = bnpFromString;
+BigInteger.prototype.clamp = bnpClamp;
+BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
+BigInteger.prototype.drShiftTo = bnpDRShiftTo;
+BigInteger.prototype.lShiftTo = bnpLShiftTo;
+BigInteger.prototype.rShiftTo = bnpRShiftTo;
+BigInteger.prototype.subTo = bnpSubTo;
+BigInteger.prototype.multiplyTo = bnpMultiplyTo;
+BigInteger.prototype.squareTo = bnpSquareTo;
+BigInteger.prototype.divRemTo = bnpDivRemTo;
+BigInteger.prototype.invDigit = bnpInvDigit;
+BigInteger.prototype.isEven = bnpIsEven;
+BigInteger.prototype.exp = bnpExp;
+
+// public
+BigInteger.prototype.toString = bnToString;
+BigInteger.prototype.negate = bnNegate;
+BigInteger.prototype.abs = bnAbs;
+BigInteger.prototype.compareTo = bnCompareTo;
+BigInteger.prototype.bitLength = bnBitLength;
+BigInteger.prototype.mod = bnMod;
+BigInteger.prototype.modPowInt = bnModPowInt;
+
+// "constants"
+BigInteger.ZERO = nbv(0);
+BigInteger.ONE = nbv(1);
+// ==== File: prng4.js
+// prng4.js - uses Arcfour as a PRNG
+
+function Arcfour() {
+  this.i = 0;
+  this.j = 0;
+  this.S = new Array();
+}
+
+// Initialize arcfour context from key, an array of ints, each from [0..255]
+function ARC4init(key) {
+  var i, j, t;
+  for(i = 0; i < 256; ++i)
+    this.S[i] = i;
+  j = 0;
+  for(i = 0; i < 256; ++i) {
+    j = (j + this.S[i] + key[i % key.length]) & 255;
+    t = this.S[i];
+    this.S[i] = this.S[j];
+    this.S[j] = t;
+  }
+  this.i = 0;
+  this.j = 0;
+}
+
+function ARC4next() {
+  var t;
+  this.i = (this.i + 1) & 255;
+  this.j = (this.j + this.S[this.i]) & 255;
+  t = this.S[this.i];
+  this.S[this.i] = this.S[this.j];
+  this.S[this.j] = t;
+  return this.S[(t + this.S[this.i]) & 255];
+}
+
+Arcfour.prototype.init = ARC4init;
+Arcfour.prototype.next = ARC4next;
+
+// Plug in your RNG constructor here
+function prng_newstate() {
+  return new Arcfour();
+}
+
+// Pool size must be a multiple of 4 and greater than 32.
+// An array of bytes the size of the pool will be passed to init()
+var rng_psize = 256;
+// ==== File: rng.js
+// Random number generator - requires a PRNG backend, e.g. prng4.js
+
+// For best results, put code like
+// <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
+// in your main HTML document.
+
+var rng_state;
+var rng_pool;
+var rng_pptr;
+
+// Mix in a 32-bit integer into the pool
+function rng_seed_int(x) {
+  rng_pool[rng_pptr++] ^= x & 255;
+  rng_pool[rng_pptr++] ^= (x >> 8) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 16) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 24) & 255;
+  if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
+}
+
+// Mix in the current time (w/milliseconds) into the pool
+function rng_seed_time() {
+  rng_seed_int(new Date().getTime());
+}
+
+// Initialize the pool with junk if needed.
+if(rng_pool == null) {
+  rng_pool = new Array();
+  rng_pptr = 0;
+  var t;
+  if(navigator.appName == "Netscape" && navigator.appVersion < "5" && window.crypto) {
+    // Extract entropy (256 bits) from NS4 RNG if available
+    var z = window.crypto.random(32);
+    for(t = 0; t < z.length; ++t)
+      rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
+  }  
+  while(rng_pptr < rng_psize) {  // extract some randomness from Math.random()
+    t = Math.floor(65536 * Math.random());
+    rng_pool[rng_pptr++] = t >>> 8;
+    rng_pool[rng_pptr++] = t & 255;
+  }
+  rng_pptr = 0;
+  rng_seed_time();
+  //rng_seed_int(window.screenX);
+  //rng_seed_int(window.screenY);
+}
+
+function rng_get_byte() {
+  if(rng_state == null) {
+    rng_seed_time();
+    rng_state = prng_newstate();
+    rng_state.init(rng_pool);
+    for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
+      rng_pool[rng_pptr] = 0;
+    rng_pptr = 0;
+    //rng_pool = null;
+  }
+  // TODO: allow reseeding after first request
+  return rng_state.next();
+}
+
+function rng_get_bytes(ba) {
+  var i;
+  for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
+}
+
+function SecureRandom() {}
+
+SecureRandom.prototype.nextBytes = rng_get_bytes;
+// ==== File: rsa.js
+// Depends on jsbn.js and rng.js
+
+// Version 1.1: support utf-8 encoding in pkcs1pad2
+
+// convert a (hex) string to a bignum object
+function parseBigInt(str,r) {
+  return new BigInteger(str,r);
+}
+
+function linebrk(s,n) {
+  var ret = "";
+  var i = 0;
+  while(i + n < s.length) {
+    ret += s.substring(i,i+n) + "\n";
+    i += n;
+  }
+  return ret + s.substring(i,s.length);
+}
+
+function byte2Hex(b) {
+  if(b < 0x10)
+    return "0" + b.toString(16);
+  else
+    return b.toString(16);
+}
+
+// PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
+function pkcs1pad2(s,n) {
+  if(n < s.length + 11) { // TODO: fix for utf-8
+    alert("Message too long for RSA");
+    return null;
+  }
+  var ba = new Array();
+  var i = s.length - 1;
+  while(i >= 0 && n > 0) {
+    var c = s.charCodeAt(i--);
+    if(c < 128) { // encode using utf-8
+      ba[--n] = c;
+    }
+    else if((c > 127) && (c < 2048)) {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = (c >> 6) | 192;
+    }
+    else {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = ((c >> 6) & 63) | 128;
+      ba[--n] = (c >> 12) | 224;
+    }
+  }
+  ba[--n] = 0;
+  var rng = new SecureRandom();
+  var x = new Array();
+  while(n > 2) { // random non-zero pad
+    x[0] = 0;
+    while(x[0] == 0) rng.nextBytes(x);
+    ba[--n] = x[0];
+  }
+  ba[--n] = 2;
+  ba[--n] = 0;
+  return new BigInteger(ba);
+}
+
+// "empty" RSA key constructor
+function RSAKey() {
+  this.n = null;
+  this.e = 0;
+  this.d = null;
+  this.p = null;
+  this.q = null;
+  this.dmp1 = null;
+  this.dmq1 = null;
+  this.coeff = null;
+}
+
+// Set the public key fields N and e from hex strings
+function RSASetPublic(N,E) {
+  if(N != null && E != null && N.length > 0 && E.length > 0) {
+    this.n = parseBigInt(N,16);
+    this.e = parseInt(E,16);
+  }
+  else
+    alert("Invalid RSA public key");
+}
+
+// Perform raw public operation on "x": return x^e (mod n)
+function RSADoPublic(x) {
+  return x.modPowInt(this.e, this.n);
+}
+
+// Return the PKCS#1 RSA encryption of "text" as an even-length hex string
+function RSAEncrypt(text) {
+  var m = pkcs1pad2(text,(this.n.bitLength()+7)>>3);
+  if(m == null) return null;
+  var c = this.doPublic(m);
+  if(c == null) return null;
+  var h = c.toString(16);
+  if((h.length & 1) == 0) return h; else return "0" + h;
+}
+
+// Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
+//function RSAEncryptB64(text) {
+//  var h = this.encrypt(text);
+//  if(h) return hex2b64(h); else return null;
+//}
+
+// protected
+RSAKey.prototype.doPublic = RSADoPublic;
+
+// public
+RSAKey.prototype.setPublic = RSASetPublic;
+RSAKey.prototype.encrypt = RSAEncrypt;
+//RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
+// ==== File: base64.js
+var b64map="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+var b64pad="=";
+
+function hex2b64(h) {
+  var i;
+  var c;
+  var ret = "";
+  for(i = 0; i+3 <= h.length; i+=3) {
+    c = parseInt(h.substring(i,i+3),16);
+    ret += b64map.charAt(c >> 6) + b64map.charAt(c & 63);
+  }
+  if(i+1 == h.length) {
+    c = parseInt(h.substring(i,i+1),16);
+    ret += b64map.charAt(c << 2);
+  }
+  else if(i+2 == h.length) {
+    c = parseInt(h.substring(i,i+2),16);
+    ret += b64map.charAt(c >> 2) + b64map.charAt((c & 3) << 4);
+  }
+  while((ret.length & 3) > 0) ret += b64pad;
+  return ret;
+}
+
+// convert a base64 string to hex
+function b64tohex(s) {
+  var ret = ""
+  var i;
+  var k = 0; // b64 state, 0-3
+  var slop;
+  for(i = 0; i < s.length; ++i) {
+    if(s.charAt(i) == b64pad) break;
+    v = b64map.indexOf(s.charAt(i));
+    if(v < 0) continue;
+    if(k == 0) {
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 1;
+    }
+    else if(k == 1) {
+      ret += int2char((slop << 2) | (v >> 4));
+      slop = v & 0xf;
+      k = 2;
+    }
+    else if(k == 2) {
+      ret += int2char(slop);
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 3;
+    }
+    else {
+      ret += int2char((slop << 2) | (v >> 4));
+      ret += int2char(v & 0xf);
+      k = 0;
+    }
+  }
+  if(k == 1)
+    ret += int2char(slop << 2);
+  return ret;
+}
+
+// convert a base64 string to a byte/number array
+function b64toBA(s) {
+  //piggyback on b64tohex for now, optimize later
+  var h = b64tohex(s);
+  var i;
+  var a = new Array();
+  for(i = 0; 2*i < h.length; ++i) {
+    a[i] = parseInt(h.substring(2*i,2*i+2),16);
+  }
+  return a;
+}
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.min.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.min.js
new file mode 100644
index 0000000..f74debe
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.min.js
@@ -0,0 +1,24 @@
+var g,j,k=(244837814094590&16777215)==15715070;function l(b,a,c){if(b!=null)if("number"==typeof b)this.ca(b,a,c);else a==null&&"string"!=typeof b?this.z(b,256):this.z(b,a)}function m(){return new l(null)}function o(b,a,c,d,e,f){for(;--f>=0;){var h=a*this[b++]+c[d]+e;e=Math.floor(h/67108864);c[d++]=h&67108863}return e}
+function p(b,a,c,d,e,f){var h=a&32767;for(a=a>>15;--f>=0;){var i=this[b]&32767,n=this[b++]>>15,r=a*i+n*h;i=h*i+((r&32767)<<15)+c[d]+(e&1073741823);e=(i>>>30)+(r>>>15)+a*n+(e>>>30);c[d++]=i&1073741823}return e}function s(b,a,c,d,e,f){var h=a&16383;for(a=a>>14;--f>=0;){var i=this[b]&16383,n=this[b++]>>14,r=a*i+n*h;i=h*i+((r&16383)<<14)+c[d]+e;e=(i>>28)+(r>>14)+a*n;c[d++]=i&268435455}return e}
+if(k&&navigator.appName=="Microsoft Internet Explorer"){l.prototype.i=p;j=30}else if(k&&navigator.appName!="Netscape"){l.prototype.i=o;j=26}else{l.prototype.i=s;j=28}g=l.prototype;g.c=j;g.g=(1<<j)-1;g.h=1<<j;g.K=Math.pow(2,52);g.r=52-j;g.s=2*j-52;var t="0123456789abcdefghijklmnopqrstuvwxyz",u=[],x,z;x="0".charCodeAt(0);for(z=0;z<=9;++z)u[x++]=z;x="a".charCodeAt(0);for(z=10;z<36;++z)u[x++]=z;x="A".charCodeAt(0);for(z=10;z<36;++z)u[x++]=z;function A(b,a){b=u[b.charCodeAt(a)];return b==null?-1:b}
+function B(b){for(var a=this.a-1;a>=0;--a)b[a]=this[a];b.a=this.a;b.b=this.b}function D(b){this.a=1;this.b=b<0?-1:0;if(b>0)this[0]=b;else if(b<-1)this[0]=b+DV;else this.a=0}function E(b){var a=m();a.w(b);return a}
+function F(b,a){if(a==16)a=4;else if(a==8)a=3;else if(a==256)a=8;else if(a==2)a=1;else if(a==32)a=5;else if(a==4)a=2;else{this.da(b,a);return}this.b=this.a=0;for(var c=b.length,d=false,e=0;--c>=0;){var f=a==8?b[c]&255:A(b,c);if(f<0){if(b.charAt(c)=="-")d=true}else{d=false;if(e==0)this[this.a++]=f;else if(e+a>this.c){this[this.a-1]|=(f&(1<<this.c-e)-1)<<e;this[this.a++]=f>>this.c-e}else this[this.a-1]|=f<<e;e+=a;if(e>=this.c)e-=this.c}}if(a==8&&(b[0]&128)!=0){this.b=-1;if(e>0)this[this.a- [...]
+e)-1<<e}this.j();d&&G.f(this,this)}function H(){for(var b=this.b&this.g;this.a>0&&this[this.a-1]==b;)--this.a}
+function I(b){if(this.b<0)return"-"+this.G().toString(b);if(b==16)b=4;else if(b==8)b=3;else if(b==2)b=1;else if(b==32)b=5;else if(b==4)b=2;else return this.ga(b);var a=(1<<b)-1,c,d=false,e="",f=this.a,h=this.c-f*this.c%b;if(f-- >0){if(h<this.c&&(c=this[f]>>h)>0){d=true;e=t.charAt(c)}for(;f>=0;){if(h<b){c=(this[f]&(1<<h)-1)<<b-h;c|=this[--f]>>(h+=this.c-b)}else{c=this[f]>>(h-=b)&a;if(h<=0){h+=this.c;--f}}if(c>0)d=true;if(d)e+=t.charAt(c)}}return d?e:"0"}
+function J(){var b=m();G.f(this,b);return b}function K(){return this.b<0?this.G():this}function L(b){var a=this.b-b.b;if(a!=0)return a;var c=this.a;a=c-b.a;if(a!=0)return a;for(;--c>=0;)if((a=this[c]-b[c])!=0)return a;return 0}function M(b){var a=1,c;if((c=b>>>16)!=0){b=c;a+=16}if((c=b>>8)!=0){b=c;a+=8}if((c=b>>4)!=0){b=c;a+=4}if((c=b>>2)!=0){b=c;a+=2}if(b>>1!=0)a+=1;return a}function N(){if(this.a<=0)return 0;return this.c*(this.a-1)+M(this[this.a-1]^this.b&this.g)}
+function aa(b,a){var c;for(c=this.a-1;c>=0;--c)a[c+b]=this[c];for(c=b-1;c>=0;--c)a[c]=0;a.a=this.a+b;a.b=this.b}function ba(b,a){for(var c=b;c<this.a;++c)a[c-b]=this[c];a.a=Math.max(this.a-b,0);a.b=this.b}function ca(b,a){var c=b%this.c,d=this.c-c,e=(1<<d)-1;b=Math.floor(b/this.c);var f=this.b<<c&this.g,h;for(h=this.a-1;h>=0;--h){a[h+b+1]=this[h]>>d|f;f=(this[h]&e)<<c}for(h=b-1;h>=0;--h)a[h]=0;a[b]=f;a.a=this.a+b+1;a.b=this.b;a.j()}
+function da(b,a){a.b=this.b;var c=Math.floor(b/this.c);if(c>=this.a)a.a=0;else{b=b%this.c;var d=this.c-b,e=(1<<b)-1;a[0]=this[c]>>b;for(var f=c+1;f<this.a;++f){a[f-c-1]|=(this[f]&e)<<d;a[f-c]=this[f]>>b}if(b>0)a[this.a-c-1]|=(this.b&e)<<d;a.a=this.a-c;a.j()}}
+function ea(b,a){for(var c=0,d=0,e=Math.min(b.a,this.a);c<e;){d+=this[c]-b[c];a[c++]=d&this.g;d>>=this.c}if(b.a<this.a){for(d-=b.b;c<this.a;){d+=this[c];a[c++]=d&this.g;d>>=this.c}d+=this.b}else{for(d+=this.b;c<b.a;){d-=b[c];a[c++]=d&this.g;d>>=this.c}d-=b.b}a.b=d<0?-1:0;if(d<-1)a[c++]=this.h+d;else if(d>0)a[c++]=d;a.a=c;a.j()}function fa(b,a){var c=this.abs(),d=b.abs(),e=c.a;for(a.a=e+d.a;--e>=0;)a[e]=0;for(e=0;e<d.a;++e)a[e+c.a]=c.i(0,d[e],a,e,0,c.a);a.b=0;a.j();this.b!=b.b&&G.f(a,a)}
+function ga(b){for(var a=this.abs(),c=b.a=2*a.a;--c>=0;)b[c]=0;for(c=0;c<a.a-1;++c){var d=a.i(c,a[c],b,2*c,0,1);if((b[c+a.a]+=a.i(c+1,2*a[c],b,2*c+1,d,a.a-c-1))>=a.h){b[c+a.a]-=a.h;b[c+a.a+1]=1}}if(b.a>0)b[b.a-1]+=a.i(c,a[c],b,2*c,0,1);b.b=0;b.j()}
+function ha(b,a,c){var d=b.abs();if(!(d.a<=0)){var e=this.abs();if(e.a<d.a){a!=null&&a.w(0);c!=null&&this.m(c)}else{if(c==null)c=m();var f=m(),h=this.b;b=b.b;var i=this.c-M(d[d.a-1]);if(i>0){d.A(i,f);e.A(i,c)}else{d.m(f);e.m(c)}d=f.a;e=f[d-1];if(e!=0){var n=e*(1<<this.r)+(d>1?f[d-2]>>this.s:0),r=this.K/n;n=(1<<this.r)/n;var ia=1<<this.s,w=c.a,y=w-d,q=a==null?m():a;f.o(y,q);if(c.l(q)>=0){c[c.a++]=1;c.f(q,c)}O.o(d,q);for(q.f(f,f);f.a<d;)f[f.a++]=0;for(;--y>=0;){var C=c[--w]==e?this.g:Math. [...]
+r+(c[w-1]+ia)*n);if((c[w]+=f.i(0,C,c,y,0,d))<C){f.o(y,q);for(c.f(q,c);c[w]<--C;)c.f(q,c)}}if(a!=null){c.u(d,a);h!=b&&G.f(a,a)}c.a=d;c.j();i>0&&c.W(i,c);h<0&&G.f(c,c)}}}}function ja(b){var a=m();this.abs().p(b,null,a);this.b<0&&a.l(G)>0&&b.f(a,a);return a}function P(b){this.d=b}function ka(b){return b.b<0||b.l(this.d)>=0?b.R(this.d):b}function la(b){return b}function ma(b){b.p(this.d,null,b)}function na(b,a,c){b.F(a,c);this.reduce(c)}function oa(b,a){b.J(a);this.reduce(a)}g=P.prototype;g.t=ka;
+g.H=la;g.reduce=ma;g.D=na;g.I=oa;function pa(){if(this.a<1)return 0;var b=this[0];if((b&1)==0)return 0;var a=b&3;a=a*(2-(b&15)*a)&15;a=a*(2-(b&255)*a)&255;a=a*(2-((b&65535)*a&65535))&65535;a=a*(2-b*a%this.h)%this.h;return a>0?this.h-a:-a}function Q(b){this.d=b;this.B=b.P();this.C=this.B&32767;this.T=this.B>>15;this.Y=(1<<b.c-15)-1;this.U=2*b.a}function qa(b){var a=m();b.abs().o(this.d.a,a);a.p(this.d,null,a);b.b<0&&a.l(G)>0&&this.d.f(a,a);return a}
+function ra(b){var a=m();b.m(a);this.reduce(a);return a}function sa(b){for(;b.a<=this.U;)b[b.a++]=0;for(var a=0;a<this.d.a;++a){var c=b[a]&32767,d=c*this.C+((c*this.T+(b[a]>>15)*this.C&this.Y)<<15)&b.g;c=a+this.d.a;for(b[c]+=this.d.i(0,d,b,a,0,this.d.a);b[c]>=b.h;){b[c]-=b.h;b[++c]++}}b.j();b.u(this.d.a,b);b.l(this.d)>=0&&b.f(this.d,b)}function ta(b,a){b.J(a);this.reduce(a)}function ua(b,a,c){b.F(a,c);this.reduce(c)}g=Q.prototype;g.t=qa;g.H=ra;g.reduce=sa;g.D=ua;g.I=ta;
+function va(){return(this.a>0?this[0]&1:this.b)==0}function wa(b,a){if(b>4294967295||b<1)return O;var c=m(),d=m(),e=a.t(this),f=M(b)-1;for(e.m(c);--f>=0;){a.I(c,d);if((b&1<<f)>0)a.D(d,e,c);else{var h=c;c=d;d=h}}return a.H(c)}function xa(b,a){a=b<256||a.Q()?new P(a):new Q(a);return this.exp(b,a)}g=l.prototype;g.m=B;g.w=D;g.z=F;g.j=H;g.o=aa;g.u=ba;g.A=ca;g.W=da;g.f=ea;g.F=fa;g.J=ga;g.p=ha;g.P=pa;g.Q=va;g.exp=wa;g.toString=I;g.G=J;g.abs=K;g.l=L;g.L=N;g.R=ja;g.S=xa;var G=E(0),O=E(1);
+function R(){this.n=this.k=0;this.e=[]}function ya(b){var a,c,d;for(a=0;a<256;++a)this.e[a]=a;for(a=c=0;a<256;++a){c=c+this.e[a]+b[a%b.length]&255;d=this.e[a];this.e[a]=this.e[c];this.e[c]=d}this.n=this.k=0}function za(){var b;this.k=this.k+1&255;this.n=this.n+this.e[this.k]&255;b=this.e[this.k];this.e[this.k]=this.e[this.n];this.e[this.n]=b;return this.e[b+this.e[this.k]&255]}R.prototype.O=ya;R.prototype.next=za;var S=256,T,U,V;
+function W(b){U[V++]^=b&255;U[V++]^=b>>8&255;U[V++]^=b>>16&255;U[V++]^=b>>24&255;if(V>=S)V-=S}if(U==null){U=[];V=0;var X;if(navigator.appName=="Netscape"&&navigator.appVersion<"5"&&window.crypto){var Y=window.crypto.random(32);for(X=0;X<Y.length;++X)U[V++]=Y.charCodeAt(X)&255}for(;V<S;){X=Math.floor(65536*Math.random());U[V++]=X>>>8;U[V++]=X&255}V=0;W((new Date).getTime())}function Aa(){if(T==null){W((new Date).getTime());T=new R;T.O(U);for(V=0;V<U.length;++V)U[V]=0;V=0}return T.next()}
+function Ba(b){var a;for(a=0;a<b.length;++a)b[a]=Aa()}function Z(){}Z.prototype.V=Ba;function ZZZ(){this.q=null;this.v=0;this.Z=this.ba=this.aa=this.fa=this.ea=this.ZZZ=null}function Ca(b,a){if(b!=null&&a!=null&&b.length>0&&a.length>0){this.q=new l(b,16);this.v=parseInt(a,16)}else alert("Invalid RSA public key")}function Da(b){return b.S(this.v,this.q)}
+function Ea(b){var a;a=this.q.L()+7>>3;if(a<b.length+11){alert("Message too long for RSA");a=null}else{for(var c=[],d=b.length-1;d>=0&&a>0;){var e=b.charCodeAt(d--);if(e<128)c[--a]=e;else if(e>127&&e<2048){c[--a]=e&63|128;c[--a]=e>>6|192}else{c[--a]=e&63|128;c[--a]=e>>6&63|128;c[--a]=e>>12|224}}c[--a]=0;b=new Z;for(d=[];a>2;){for(d[0]=0;d[0]==0;)b.V(d);c[--a]=d[0]}c[--a]=2;c[--a]=0;a=new l(c)}if(a==null)return null;a=this.M(a);if(a==null)return null;a=a.toString(16);return(a.length&1)==0 [...]
+ZZZ.prototype.M=Da;ZZZ.prototype.X=Ca;ZZZ.prototype.N=Ea;window.encrypt=function(b,a,c){var d=new ZZZ;d.X(c,a);b=d.N(b);d="";for(a=0;a+3<=b.length;a+=3){c=parseInt(b.substring(a,a+3),16);d+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(c>>6)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(c&63)}if(a+1==b.length){c=parseInt(b.substring(a,a+1),16);d+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(c<<2)}else if [...]
+2)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt((c&3)<<4)}for(;(d.length&3)>0;)d+="=";return d};
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js
new file mode 100644
index 0000000..77b3868
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js
@@ -0,0 +1,73 @@
+// ==== File: base64.js
+var b64map="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+var b64pad="=";
+
+function hex2b64(h) {
+  var i;
+  var c;
+  var ret = "";
+  for(i = 0; i+3 <= h.length; i+=3) {
+    c = parseInt(h.substring(i,i+3),16);
+    ret += b64map.charAt(c >> 6) + b64map.charAt(c & 63);
+  }
+  if(i+1 == h.length) {
+    c = parseInt(h.substring(i,i+1),16);
+    ret += b64map.charAt(c << 2);
+  }
+  else if(i+2 == h.length) {
+    c = parseInt(h.substring(i,i+2),16);
+    ret += b64map.charAt(c >> 2) + b64map.charAt((c & 3) << 4);
+  }
+  while((ret.length & 3) > 0) ret += b64pad;
+  return ret;
+}
+
+// convert a base64 string to hex
+function b64tohex(s) {
+  var ret = ""
+  var i;
+  var k = 0; // b64 state, 0-3
+  var slop;
+  for(i = 0; i < s.length; ++i) {
+    if(s.charAt(i) == b64pad) break;
+    v = b64map.indexOf(s.charAt(i));
+    if(v < 0) continue;
+    if(k == 0) {
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 1;
+    }
+    else if(k == 1) {
+      ret += int2char((slop << 2) | (v >> 4));
+      slop = v & 0xf;
+      k = 2;
+    }
+    else if(k == 2) {
+      ret += int2char(slop);
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 3;
+    }
+    else {
+      ret += int2char((slop << 2) | (v >> 4));
+      ret += int2char(v & 0xf);
+      k = 0;
+    }
+  }
+  if(k == 1)
+    ret += int2char(slop << 2);
+  return ret;
+}
+
+// convert a base64 string to a byte/number array
+function b64toBA(s) {
+  //piggyback on b64tohex for now, optimize later
+  var h = b64tohex(s);
+  var i;
+  var a = new Array();
+  for(i = 0; 2*i < h.length; ++i) {
+    a[i] = parseInt(h.substring(2*i,2*i+2),16);
+  }
+  return a;
+}
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/jsbn.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/jsbn.js
new file mode 100644
index 0000000..801841d
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/jsbn.js
@@ -0,0 +1,562 @@
+// Downloaded from http://www-cs-students.stanford.edu/~tjw/ at Tue Nov 30 00:42:57 PST 2010
+// ==== File: jsbn.js
+// Copyright (c) 2005  Tom Wu
+// All Rights Reserved.
+// See "LICENSE" for details.
+
+// Basic JavaScript BN library - subset useful for RSA encryption.
+
+// Bits per digit
+var dbits;
+
+// JavaScript engine analysis
+var canary = 0xdeadbeefcafe;
+var j_lm = ((canary&0xffffff)==0xefcafe);
+
+// (public) Constructor
+function BigInteger(a,b,c) {
+  if(a != null)
+    if("number" == typeof a) this.fromNumber(a,b,c);
+    else if(b == null && "string" != typeof a) this.fromString(a,256);
+    else this.fromString(a,b);
+}
+
+// return new, unset BigInteger
+function nbi() { return new BigInteger(null); }
+
+// am: Compute w_j += (x*this_i), propagate carries,
+// c is initial carry, returns final carry.
+// c < 3*dvalue, x < 2*dvalue, this_i < dvalue
+// We need to select the fastest one that works in this environment.
+
+// am1: use a single mult and divide to get the high bits,
+// max digit bits should be 26 because
+// max internal value = 2*dvalue^2-2*dvalue (< 2^53)
+function am1(i,x,w,j,c,n) {
+  while(--n >= 0) {
+    var v = x*this[i++]+w[j]+c;
+    c = Math.floor(v/0x4000000);
+    w[j++] = v&0x3ffffff;
+  }
+  return c;
+}
+// am2 avoids a big mult-and-extract completely.
+// Max digit bits should be <= 30 because we do bitwise ops
+// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
+function am2(i,x,w,j,c,n) {
+  var xl = x&0x7fff, xh = x>>15;
+  while(--n >= 0) {
+    var l = this[i]&0x7fff;
+    var h = this[i++]>>15;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff);
+    c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
+    w[j++] = l&0x3fffffff;
+  }
+  return c;
+}
+// Alternately, set max digit bits to 28 since some
+// browsers slow down when dealing with 32-bit numbers.
+function am3(i,x,w,j,c,n) {
+  var xl = x&0x3fff, xh = x>>14;
+  while(--n >= 0) {
+    var l = this[i]&0x3fff;
+    var h = this[i++]>>14;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x3fff)<<14)+w[j]+c;
+    c = (l>>28)+(m>>14)+xh*h;
+    w[j++] = l&0xfffffff;
+  }
+  return c;
+}
+if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) {
+  BigInteger.prototype.am = am2;
+  dbits = 30;
+}
+else if(j_lm && (navigator.appName != "Netscape")) {
+  BigInteger.prototype.am = am1;
+  dbits = 26;
+}
+else { // Mozilla/Netscape seems to prefer am3
+  BigInteger.prototype.am = am3;
+  dbits = 28;
+}
+
+BigInteger.prototype.DB = dbits;
+BigInteger.prototype.DM = ((1<<dbits)-1);
+BigInteger.prototype.DV = (1<<dbits);
+
+var BI_FP = 52;
+BigInteger.prototype.FV = Math.pow(2,BI_FP);
+BigInteger.prototype.F1 = BI_FP-dbits;
+BigInteger.prototype.F2 = 2*dbits-BI_FP;
+
+// Digit conversions
+var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz";
+var BI_RC = new Array();
+var rr,vv;
+rr = "0".charCodeAt(0);
+for(vv = 0; vv <= 9; ++vv) BI_RC[rr++] = vv;
+rr = "a".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+rr = "A".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+
+function int2char(n) { return BI_RM.charAt(n); }
+function intAt(s,i) {
+  var c = BI_RC[s.charCodeAt(i)];
+  return (c==null)?-1:c;
+}
+
+// (protected) copy this to r
+function bnpCopyTo(r) {
+  for(var i = this.t-1; i >= 0; --i) r[i] = this[i];
+  r.t = this.t;
+  r.s = this.s;
+}
+
+// (protected) set from integer value x, -DV <= x < DV
+function bnpFromInt(x) {
+  this.t = 1;
+  this.s = (x<0)?-1:0;
+  if(x > 0) this[0] = x;
+  else if(x < -1) this[0] = x+DV;
+  else this.t = 0;
+}
+
+// return bigint initialized to value
+function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
+
+// (protected) set from string and radix
+function bnpFromString(s,b) {
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 256) k = 8; // byte array
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else { this.fromRadix(s,b); return; }
+  this.t = 0;
+  this.s = 0;
+  var i = s.length, mi = false, sh = 0;
+  while(--i >= 0) {
+    var x = (k==8)?s[i]&0xff:intAt(s,i);
+    if(x < 0) {
+      if(s.charAt(i) == "-") mi = true;
+      continue;
+    }
+    mi = false;
+    if(sh == 0)
+      this[this.t++] = x;
+    else if(sh+k > this.DB) {
+      this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<<sh;
+      this[this.t++] = (x>>(this.DB-sh));
+    }
+    else
+      this[this.t-1] |= x<<sh;
+    sh += k;
+    if(sh >= this.DB) sh -= this.DB;
+  }
+  if(k == 8 && (s[0]&0x80) != 0) {
+    this.s = -1;
+    if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)<<sh;
+  }
+  this.clamp();
+  if(mi) BigInteger.ZERO.subTo(this,this);
+}
+
+// (protected) clamp off excess high words
+function bnpClamp() {
+  var c = this.s&this.DM;
+  while(this.t > 0 && this[this.t-1] == c) --this.t;
+}
+
+// (public) return string representation in given radix
+function bnToString(b) {
+  if(this.s < 0) return "-"+this.negate().toString(b);
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else return this.toRadix(b);
+  var km = (1<<k)-1, d, m = false, r = "", i = this.t;
+  var p = this.DB-(i*this.DB)%k;
+  if(i-- > 0) {
+    if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
+    while(i >= 0) {
+      if(p < k) {
+        d = (this[i]&((1<<p)-1))<<(k-p);
+        d |= this[--i]>>(p+=this.DB-k);
+      }
+      else {
+        d = (this[i]>>(p-=k))&km;
+        if(p <= 0) { p += this.DB; --i; }
+      }
+      if(d > 0) m = true;
+      if(m) r += int2char(d);
+    }
+  }
+  return m?r:"0";
+}
+
+// (public) -this
+function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
+
+// (public) |this|
+function bnAbs() { return (this.s<0)?this.negate():this; }
+
+// (public) return + if this > a, - if this < a, 0 if equal
+function bnCompareTo(a) {
+  var r = this.s-a.s;
+  if(r != 0) return r;
+  var i = this.t;
+  r = i-a.t;
+  if(r != 0) return r;
+  while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
+  return 0;
+}
+
+// returns bit length of the integer x
+function nbits(x) {
+  var r = 1, t;
+  if((t=x>>>16) != 0) { x = t; r += 16; }
+  if((t=x>>8) != 0) { x = t; r += 8; }
+  if((t=x>>4) != 0) { x = t; r += 4; }
+  if((t=x>>2) != 0) { x = t; r += 2; }
+  if((t=x>>1) != 0) { x = t; r += 1; }
+  return r;
+}
+
+// (public) return the number of bits in "this"
+function bnBitLength() {
+  if(this.t <= 0) return 0;
+  return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
+}
+
+// (protected) r = this << n*DB
+function bnpDLShiftTo(n,r) {
+  var i;
+  for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
+  for(i = n-1; i >= 0; --i) r[i] = 0;
+  r.t = this.t+n;
+  r.s = this.s;
+}
+
+// (protected) r = this >> n*DB
+function bnpDRShiftTo(n,r) {
+  for(var i = n; i < this.t; ++i) r[i-n] = this[i];
+  r.t = Math.max(this.t-n,0);
+  r.s = this.s;
+}
+
+// (protected) r = this << n
+function bnpLShiftTo(n,r) {
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<cbs)-1;
+  var ds = Math.floor(n/this.DB), c = (this.s<<bs)&this.DM, i;
+  for(i = this.t-1; i >= 0; --i) {
+    r[i+ds+1] = (this[i]>>cbs)|c;
+    c = (this[i]&bm)<<bs;
+  }
+  for(i = ds-1; i >= 0; --i) r[i] = 0;
+  r[ds] = c;
+  r.t = this.t+ds+1;
+  r.s = this.s;
+  r.clamp();
+}
+
+// (protected) r = this >> n
+function bnpRShiftTo(n,r) {
+  r.s = this.s;
+  var ds = Math.floor(n/this.DB);
+  if(ds >= this.t) { r.t = 0; return; }
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<bs)-1;
+  r[0] = this[ds]>>bs;
+  for(var i = ds+1; i < this.t; ++i) {
+    r[i-ds-1] |= (this[i]&bm)<<cbs;
+    r[i-ds] = this[i]>>bs;
+  }
+  if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<<cbs;
+  r.t = this.t-ds;
+  r.clamp();
+}
+
+// (protected) r = this - a
+function bnpSubTo(a,r) {
+  var i = 0, c = 0, m = Math.min(a.t,this.t);
+  while(i < m) {
+    c += this[i]-a[i];
+    r[i++] = c&this.DM;
+    c >>= this.DB;
+  }
+  if(a.t < this.t) {
+    c -= a.s;
+    while(i < this.t) {
+      c += this[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c += this.s;
+  }
+  else {
+    c += this.s;
+    while(i < a.t) {
+      c -= a[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c -= a.s;
+  }
+  r.s = (c<0)?-1:0;
+  if(c < -1) r[i++] = this.DV+c;
+  else if(c > 0) r[i++] = c;
+  r.t = i;
+  r.clamp();
+}
+
+// (protected) r = this * a, r != this,a (HAC 14.12)
+// "this" should be the larger one if appropriate.
+function bnpMultiplyTo(a,r) {
+  var x = this.abs(), y = a.abs();
+  var i = x.t;
+  r.t = i+y.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
+  r.s = 0;
+  r.clamp();
+  if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
+}
+
+// (protected) r = this^2, r != this (HAC 14.16)
+function bnpSquareTo(r) {
+  var x = this.abs();
+  var i = r.t = 2*x.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < x.t-1; ++i) {
+    var c = x.am(i,x[i],r,2*i,0,1);
+    if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
+      r[i+x.t] -= x.DV;
+      r[i+x.t+1] = 1;
+    }
+  }
+  if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
+  r.s = 0;
+  r.clamp();
+}
+
+// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
+// r != q, this != m.  q or r may be null.
+function bnpDivRemTo(m,q,r) {
+  var pm = m.abs();
+  if(pm.t <= 0) return;
+  var pt = this.abs();
+  if(pt.t < pm.t) {
+    if(q != null) q.fromInt(0);
+    if(r != null) this.copyTo(r);
+    return;
+  }
+  if(r == null) r = nbi();
+  var y = nbi(), ts = this.s, ms = m.s;
+  var nsh = this.DB-nbits(pm[pm.t-1]);	// normalize modulus
+  if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
+  else { pm.copyTo(y); pt.copyTo(r); }
+  var ys = y.t;
+  var y0 = y[ys-1];
+  if(y0 == 0) return;
+  var yt = y0*(1<<this.F1)+((ys>1)?y[ys-2]>>this.F2:0);
+  var d1 = this.FV/yt, d2 = (1<<this.F1)/yt, e = 1<<this.F2;
+  var i = r.t, j = i-ys, t = (q==null)?nbi():q;
+  y.dlShiftTo(j,t);
+  if(r.compareTo(t) >= 0) {
+    r[r.t++] = 1;
+    r.subTo(t,r);
+  }
+  BigInteger.ONE.dlShiftTo(ys,t);
+  t.subTo(y,y);	// "negative" y so we can replace sub with am later
+  while(y.t < ys) y[y.t++] = 0;
+  while(--j >= 0) {
+    // Estimate quotient digit
+    var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
+    if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) {	// Try it out
+      y.dlShiftTo(j,t);
+      r.subTo(t,r);
+      while(r[i] < --qd) r.subTo(t,r);
+    }
+  }
+  if(q != null) {
+    r.drShiftTo(ys,q);
+    if(ts != ms) BigInteger.ZERO.subTo(q,q);
+  }
+  r.t = ys;
+  r.clamp();
+  if(nsh > 0) r.rShiftTo(nsh,r);	// Denormalize remainder
+  if(ts < 0) BigInteger.ZERO.subTo(r,r);
+}
+
+// (public) this mod a
+function bnMod(a) {
+  var r = nbi();
+  this.abs().divRemTo(a,null,r);
+  if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
+  return r;
+}
+
+// Modular reduction using "classic" algorithm
+function Classic(m) { this.m = m; }
+function cConvert(x) {
+  if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
+  else return x;
+}
+function cRevert(x) { return x; }
+function cReduce(x) { x.divRemTo(this.m,null,x); }
+function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+Classic.prototype.convert = cConvert;
+Classic.prototype.revert = cRevert;
+Classic.prototype.reduce = cReduce;
+Classic.prototype.mulTo = cMulTo;
+Classic.prototype.sqrTo = cSqrTo;
+
+// (protected) return "-1/this % 2^DB"; useful for Mont. reduction
+// justification:
+//         xy == 1 (mod m)
+//         xy =  1+km
+//   xy(2-xy) = (1+km)(1-km)
+// x[y(2-xy)] = 1-k^2m^2
+// x[y(2-xy)] == 1 (mod m^2)
+// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
+// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
+// JS multiply "overflows" differently from C/C++, so care is needed here.
+function bnpInvDigit() {
+  if(this.t < 1) return 0;
+  var x = this[0];
+  if((x&1) == 0) return 0;
+  var y = x&3;		// y == 1/x mod 2^2
+  y = (y*(2-(x&0xf)*y))&0xf;	// y == 1/x mod 2^4
+  y = (y*(2-(x&0xff)*y))&0xff;	// y == 1/x mod 2^8
+  y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;	// y == 1/x mod 2^16
+  // last step - calculate inverse mod DV directly;
+  // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
+  y = (y*(2-x*y%this.DV))%this.DV;		// y == 1/x mod 2^dbits
+  // we really want the negative inverse, and -DV < y < DV
+  return (y>0)?this.DV-y:-y;
+}
+
+// Montgomery reduction
+function Montgomery(m) {
+  this.m = m;
+  this.mp = m.invDigit();
+  this.mpl = this.mp&0x7fff;
+  this.mph = this.mp>>15;
+  this.um = (1<<(m.DB-15))-1;
+  this.mt2 = 2*m.t;
+}
+
+// xR mod m
+function montConvert(x) {
+  var r = nbi();
+  x.abs().dlShiftTo(this.m.t,r);
+  r.divRemTo(this.m,null,r);
+  if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
+  return r;
+}
+
+// x/R mod m
+function montRevert(x) {
+  var r = nbi();
+  x.copyTo(r);
+  this.reduce(r);
+  return r;
+}
+
+// x = x/R mod m (HAC 14.32)
+function montReduce(x) {
+  while(x.t <= this.mt2)	// pad x so am has enough room later
+    x[x.t++] = 0;
+  for(var i = 0; i < this.m.t; ++i) {
+    // faster way of calculating u0 = x[i]*mp mod DV
+    var j = x[i]&0x7fff;
+    var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
+    // use am to combine the multiply-shift-add into one call
+    j = i+this.m.t;
+    x[j] += this.m.am(0,u0,x,i,0,this.m.t);
+    // propagate carry
+    while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
+  }
+  x.clamp();
+  x.drShiftTo(this.m.t,x);
+  if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
+}
+
+// r = "x^2/R mod m"; x != r
+function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+// r = "xy/R mod m"; x,y != r
+function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+
+Montgomery.prototype.convert = montConvert;
+Montgomery.prototype.revert = montRevert;
+Montgomery.prototype.reduce = montReduce;
+Montgomery.prototype.mulTo = montMulTo;
+Montgomery.prototype.sqrTo = montSqrTo;
+
+// (protected) true iff this is even
+function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
+
+// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
+function bnpExp(e,z) {
+  if(e > 0xffffffff || e < 1) return BigInteger.ONE;
+  var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
+  g.copyTo(r);
+  while(--i >= 0) {
+    z.sqrTo(r,r2);
+    if((e&(1<<i)) > 0) z.mulTo(r2,g,r);
+    else { var t = r; r = r2; r2 = t; }
+  }
+  return z.revert(r);
+}
+
+// (public) this^e % m, 0 <= e < 2^32
+function bnModPowInt(e,m) {
+  var z;
+  if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
+  return this.exp(e,z);
+}
+
+// protected
+BigInteger.prototype.copyTo = bnpCopyTo;
+BigInteger.prototype.fromInt = bnpFromInt;
+BigInteger.prototype.fromString = bnpFromString;
+BigInteger.prototype.clamp = bnpClamp;
+BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
+BigInteger.prototype.drShiftTo = bnpDRShiftTo;
+BigInteger.prototype.lShiftTo = bnpLShiftTo;
+BigInteger.prototype.rShiftTo = bnpRShiftTo;
+BigInteger.prototype.subTo = bnpSubTo;
+BigInteger.prototype.multiplyTo = bnpMultiplyTo;
+BigInteger.prototype.squareTo = bnpSquareTo;
+BigInteger.prototype.divRemTo = bnpDivRemTo;
+BigInteger.prototype.invDigit = bnpInvDigit;
+BigInteger.prototype.isEven = bnpIsEven;
+BigInteger.prototype.exp = bnpExp;
+
+// public
+BigInteger.prototype.toString = bnToString;
+BigInteger.prototype.negate = bnNegate;
+BigInteger.prototype.abs = bnAbs;
+BigInteger.prototype.compareTo = bnCompareTo;
+BigInteger.prototype.bitLength = bnBitLength;
+BigInteger.prototype.mod = bnMod;
+BigInteger.prototype.modPowInt = bnModPowInt;
+
+// "constants"
+BigInteger.ZERO = nbv(0);
+BigInteger.ONE = nbv(1);
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/prng4.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/prng4.js
new file mode 100644
index 0000000..5cd6812
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/prng4.js
@@ -0,0 +1,47 @@
+// ==== File: prng4.js
+// prng4.js - uses Arcfour as a PRNG
+
+function Arcfour() {
+  this.i = 0;
+  this.j = 0;
+  this.S = new Array();
+}
+
+// Initialize arcfour context from key, an array of ints, each from [0..255]
+function ARC4init(key) {
+  var i, j, t;
+  for(i = 0; i < 256; ++i)
+    this.S[i] = i;
+  j = 0;
+  for(i = 0; i < 256; ++i) {
+    j = (j + this.S[i] + key[i % key.length]) & 255;
+    t = this.S[i];
+    this.S[i] = this.S[j];
+    this.S[j] = t;
+  }
+  this.i = 0;
+  this.j = 0;
+}
+
+function ARC4next() {
+  var t;
+  this.i = (this.i + 1) & 255;
+  this.j = (this.j + this.S[this.i]) & 255;
+  t = this.S[this.i];
+  this.S[this.i] = this.S[this.j];
+  this.S[this.j] = t;
+  return this.S[(t + this.S[this.i]) & 255];
+}
+
+Arcfour.prototype.init = ARC4init;
+Arcfour.prototype.next = ARC4next;
+
+// Plug in your RNG constructor here
+function prng_newstate() {
+  return new Arcfour();
+}
+
+// Pool size must be a multiple of 4 and greater than 32.
+// An array of bytes the size of the pool will be passed to init()
+var rng_psize = 256;
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/rng.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/rng.js
new file mode 100644
index 0000000..24bae0f
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/rng.js
@@ -0,0 +1,70 @@
+// ==== File: rng.js
+// Random number generator - requires a PRNG backend, e.g. prng4.js
+
+// For best results, put code like
+// <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
+// in your main HTML document.
+
+var rng_state;
+var rng_pool;
+var rng_pptr;
+
+// Mix in a 32-bit integer into the pool
+function rng_seed_int(x) {
+  rng_pool[rng_pptr++] ^= x & 255;
+  rng_pool[rng_pptr++] ^= (x >> 8) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 16) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 24) & 255;
+  if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
+}
+
+// Mix in the current time (w/milliseconds) into the pool
+function rng_seed_time() {
+  rng_seed_int(new Date().getTime());
+}
+
+// Initialize the pool with junk if needed.
+if(rng_pool == null) {
+  rng_pool = new Array();
+  rng_pptr = 0;
+  var t;
+  if(navigator.appName == "Netscape" && navigator.appVersion < "5" && window.crypto) {
+    // Extract entropy (256 bits) from NS4 RNG if available
+    var z = window.crypto.random(32);
+    for(t = 0; t < z.length; ++t)
+      rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
+  }
+  while(rng_pptr < rng_psize) {  // extract some randomness from Math.random()
+    t = Math.floor(65536 * Math.random());
+    rng_pool[rng_pptr++] = t >>> 8;
+    rng_pool[rng_pptr++] = t & 255;
+  }
+  rng_pptr = 0;
+  rng_seed_time();
+  //rng_seed_int(window.screenX);
+  //rng_seed_int(window.screenY);
+}
+
+function rng_get_byte() {
+  if(rng_state == null) {
+    rng_seed_time();
+    rng_state = prng_newstate();
+    rng_state.init(rng_pool);
+    for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
+      rng_pool[rng_pptr] = 0;
+    rng_pptr = 0;
+    //rng_pool = null;
+  }
+  // TODO: allow reseeding after first request
+  return rng_state.next();
+}
+
+function rng_get_bytes(ba) {
+  var i;
+  for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
+}
+
+function SecureRandom() {}
+
+SecureRandom.prototype.nextBytes = rng_get_bytes;
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/rsa.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/rsa.js
new file mode 100644
index 0000000..b2e37c3
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/rsa.js
@@ -0,0 +1,114 @@
+// ==== File: rsa.js
+// Depends on jsbn.js and rng.js
+
+// Version 1.1: support utf-8 encoding in pkcs1pad2
+
+// convert a (hex) string to a bignum object
+function parseBigInt(str,r) {
+  return new BigInteger(str,r);
+}
+
+function linebrk(s,n) {
+  var ret = "";
+  var i = 0;
+  while(i + n < s.length) {
+    ret += s.substring(i,i+n) + "\n";
+    i += n;
+  }
+  return ret + s.substring(i,s.length);
+}
+
+function byte2Hex(b) {
+  if(b < 0x10)
+    return "0" + b.toString(16);
+  else
+    return b.toString(16);
+}
+
+// PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
+function pkcs1pad2(s,n) {
+  if(n < s.length + 11) { // TODO: fix for utf-8
+    alert("Message too long for RSA");
+    return null;
+  }
+  var ba = new Array();
+  var i = s.length - 1;
+  while(i >= 0 && n > 0) {
+    var c = s.charCodeAt(i--);
+    if(c < 128) { // encode using utf-8
+      ba[--n] = c;
+    }
+    else if((c > 127) && (c < 2048)) {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = (c >> 6) | 192;
+    }
+    else {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = ((c >> 6) & 63) | 128;
+      ba[--n] = (c >> 12) | 224;
+    }
+  }
+  ba[--n] = 0;
+  var rng = new SecureRandom();
+  var x = new Array();
+  while(n > 2) { // random non-zero pad
+    x[0] = 0;
+    while(x[0] == 0) rng.nextBytes(x);
+    ba[--n] = x[0];
+  }
+  ba[--n] = 2;
+  ba[--n] = 0;
+  return new BigInteger(ba);
+}
+
+// "empty" RSA key constructor
+function RSAKey() {
+  this.n = null;
+  this.e = 0;
+  this.d = null;
+  this.p = null;
+  this.q = null;
+  this.dmp1 = null;
+  this.dmq1 = null;
+  this.coeff = null;
+}
+
+// Set the public key fields N and e from hex strings
+function RSASetPublic(N,E) {
+  if(N != null && E != null && N.length > 0 && E.length > 0) {
+    this.n = parseBigInt(N,16);
+    this.e = parseInt(E,16);
+  }
+  else
+    alert("Invalid RSA public key");
+}
+
+// Perform raw public operation on "x": return x^e (mod n)
+function RSADoPublic(x) {
+  return x.modPowInt(this.e, this.n);
+}
+
+// Return the PKCS#1 RSA encryption of "text" as an even-length hex string
+function RSAEncrypt(text) {
+  var m = pkcs1pad2(text,(this.n.bitLength()+7)>>3);
+  if(m == null) return null;
+  var c = this.doPublic(m);
+  if(c == null) return null;
+  var h = c.toString(16);
+  if((h.length & 1) == 0) return h; else return "0" + h;
+}
+
+// Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
+//function RSAEncryptB64(text) {
+//  var h = this.encrypt(text);
+//  if(h) return hex2b64(h); else return null;
+//}
+
+// protected
+RSAKey.prototype.doPublic = RSADoPublic;
+
+// public
+RSAKey.prototype.setPublic = RSASetPublic;
+RSAKey.prototype.encrypt = RSAEncrypt;
+//RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js
new file mode 100644
index 0000000..ad53bb8
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js
@@ -0,0 +1,71 @@
+var b64map="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+var b64padchar="=";
+
+function hex2b64(h) {
+  var i;
+  var c;
+  var ret = "";
+  for(i = 0; i+3 <= h.length; i+=3) {
+    c = parseInt(h.substring(i,i+3),16);
+    ret += b64map.charAt(c >> 6) + b64map.charAt(c & 63);
+  }
+  if(i+1 == h.length) {
+    c = parseInt(h.substring(i,i+1),16);
+    ret += b64map.charAt(c << 2);
+  }
+  else if(i+2 == h.length) {
+    c = parseInt(h.substring(i,i+2),16);
+    ret += b64map.charAt(c >> 2) + b64map.charAt((c & 3) << 4);
+  }
+  while((ret.length & 3) > 0) ret += b64padchar;
+  return ret;
+}
+
+// convert a base64 string to hex
+function b64tohex(s) {
+  var ret = ""
+  var i;
+  var k = 0; // b64 state, 0-3
+  var slop;
+  for(i = 0; i < s.length; ++i) {
+    if(s.charAt(i) == b64padchar) break;
+    v = b64map.indexOf(s.charAt(i));
+    if(v < 0) continue;
+    if(k == 0) {
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 1;
+    }
+    else if(k == 1) {
+      ret += int2char((slop << 2) | (v >> 4));
+      slop = v & 0xf;
+      k = 2;
+    }
+    else if(k == 2) {
+      ret += int2char(slop);
+      ret += int2char(v >> 2);
+      slop = v & 3;
+      k = 3;
+    }
+    else {
+      ret += int2char((slop << 2) | (v >> 4));
+      ret += int2char(v & 0xf);
+      k = 0;
+    }
+  }
+  if(k == 1)
+    ret += int2char(slop << 2);
+  return ret;
+}
+
+// convert a base64 string to a byte/number array
+function b64toBA(s) {
+  //piggyback on b64tohex for now, optimize later
+  var h = b64tohex(s);
+  var i;
+  var a = new Array();
+  for(i = 0; 2*i < h.length; ++i) {
+    a[i] = parseInt(h.substring(2*i,2*i+2),16);
+  }
+  return a;
+}
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/jsbn.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/jsbn.js
new file mode 100644
index 0000000..4ed7c83
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/jsbn.js
@@ -0,0 +1,559 @@
+// Copyright (c) 2005  Tom Wu
+// All Rights Reserved.
+// See "LICENSE" for details.
+
+// Basic JavaScript BN library - subset useful for RSA encryption.
+
+// Bits per digit
+var dbits;
+
+// JavaScript engine analysis
+var canary = 0xdeadbeefcafe;
+var j_lm = ((canary&0xffffff)==0xefcafe);
+
+// (public) Constructor
+function BigInteger(a,b,c) {
+  if(a != null)
+    if("number" == typeof a) this.fromNumber(a,b,c);
+    else if(b == null && "string" != typeof a) this.fromString(a,256);
+    else this.fromString(a,b);
+}
+
+// return new, unset BigInteger
+function nbi() { return new BigInteger(null); }
+
+// am: Compute w_j += (x*this_i), propagate carries,
+// c is initial carry, returns final carry.
+// c < 3*dvalue, x < 2*dvalue, this_i < dvalue
+// We need to select the fastest one that works in this environment.
+
+// am1: use a single mult and divide to get the high bits,
+// max digit bits should be 26 because
+// max internal value = 2*dvalue^2-2*dvalue (< 2^53)
+function am1(i,x,w,j,c,n) {
+  while(--n >= 0) {
+    var v = x*this[i++]+w[j]+c;
+    c = Math.floor(v/0x4000000);
+    w[j++] = v&0x3ffffff;
+  }
+  return c;
+}
+// am2 avoids a big mult-and-extract completely.
+// Max digit bits should be <= 30 because we do bitwise ops
+// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
+function am2(i,x,w,j,c,n) {
+  var xl = x&0x7fff, xh = x>>15;
+  while(--n >= 0) {
+    var l = this[i]&0x7fff;
+    var h = this[i++]>>15;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff);
+    c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
+    w[j++] = l&0x3fffffff;
+  }
+  return c;
+}
+// Alternately, set max digit bits to 28 since some
+// browsers slow down when dealing with 32-bit numbers.
+function am3(i,x,w,j,c,n) {
+  var xl = x&0x3fff, xh = x>>14;
+  while(--n >= 0) {
+    var l = this[i]&0x3fff;
+    var h = this[i++]>>14;
+    var m = xh*l+h*xl;
+    l = xl*l+((m&0x3fff)<<14)+w[j]+c;
+    c = (l>>28)+(m>>14)+xh*h;
+    w[j++] = l&0xfffffff;
+  }
+  return c;
+}
+if(j_lm && (navigator.appName == "Microsoft Internet Explorer")) {
+  BigInteger.prototype.am = am2;
+  dbits = 30;
+}
+else if(j_lm && (navigator.appName != "Netscape")) {
+  BigInteger.prototype.am = am1;
+  dbits = 26;
+}
+else { // Mozilla/Netscape seems to prefer am3
+  BigInteger.prototype.am = am3;
+  dbits = 28;
+}
+
+BigInteger.prototype.DB = dbits;
+BigInteger.prototype.DM = ((1<<dbits)-1);
+BigInteger.prototype.DV = (1<<dbits);
+
+var BI_FP = 52;
+BigInteger.prototype.FV = Math.pow(2,BI_FP);
+BigInteger.prototype.F1 = BI_FP-dbits;
+BigInteger.prototype.F2 = 2*dbits-BI_FP;
+
+// Digit conversions
+var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz";
+var BI_RC = new Array();
+var rr,vv;
+rr = "0".charCodeAt(0);
+for(vv = 0; vv <= 9; ++vv) BI_RC[rr++] = vv;
+rr = "a".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+rr = "A".charCodeAt(0);
+for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
+
+function int2char(n) { return BI_RM.charAt(n); }
+function intAt(s,i) {
+  var c = BI_RC[s.charCodeAt(i)];
+  return (c==null)?-1:c;
+}
+
+// (protected) copy this to r
+function bnpCopyTo(r) {
+  for(var i = this.t-1; i >= 0; --i) r[i] = this[i];
+  r.t = this.t;
+  r.s = this.s;
+}
+
+// (protected) set from integer value x, -DV <= x < DV
+function bnpFromInt(x) {
+  this.t = 1;
+  this.s = (x<0)?-1:0;
+  if(x > 0) this[0] = x;
+  else if(x < -1) this[0] = x+this.DV;
+  else this.t = 0;
+}
+
+// return bigint initialized to value
+function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
+
+// (protected) set from string and radix
+function bnpFromString(s,b) {
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 256) k = 8; // byte array
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else { this.fromRadix(s,b); return; }
+  this.t = 0;
+  this.s = 0;
+  var i = s.length, mi = false, sh = 0;
+  while(--i >= 0) {
+    var x = (k==8)?s[i]&0xff:intAt(s,i);
+    if(x < 0) {
+      if(s.charAt(i) == "-") mi = true;
+      continue;
+    }
+    mi = false;
+    if(sh == 0)
+      this[this.t++] = x;
+    else if(sh+k > this.DB) {
+      this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<<sh;
+      this[this.t++] = (x>>(this.DB-sh));
+    }
+    else
+      this[this.t-1] |= x<<sh;
+    sh += k;
+    if(sh >= this.DB) sh -= this.DB;
+  }
+  if(k == 8 && (s[0]&0x80) != 0) {
+    this.s = -1;
+    if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)<<sh;
+  }
+  this.clamp();
+  if(mi) BigInteger.ZERO.subTo(this,this);
+}
+
+// (protected) clamp off excess high words
+function bnpClamp() {
+  var c = this.s&this.DM;
+  while(this.t > 0 && this[this.t-1] == c) --this.t;
+}
+
+// (public) return string representation in given radix
+function bnToString(b) {
+  if(this.s < 0) return "-"+this.negate().toString(b);
+  var k;
+  if(b == 16) k = 4;
+  else if(b == 8) k = 3;
+  else if(b == 2) k = 1;
+  else if(b == 32) k = 5;
+  else if(b == 4) k = 2;
+  else return this.toRadix(b);
+  var km = (1<<k)-1, d, m = false, r = "", i = this.t;
+  var p = this.DB-(i*this.DB)%k;
+  if(i-- > 0) {
+    if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
+    while(i >= 0) {
+      if(p < k) {
+        d = (this[i]&((1<<p)-1))<<(k-p);
+        d |= this[--i]>>(p+=this.DB-k);
+      }
+      else {
+        d = (this[i]>>(p-=k))&km;
+        if(p <= 0) { p += this.DB; --i; }
+      }
+      if(d > 0) m = true;
+      if(m) r += int2char(d);
+    }
+  }
+  return m?r:"0";
+}
+
+// (public) -this
+function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
+
+// (public) |this|
+function bnAbs() { return (this.s<0)?this.negate():this; }
+
+// (public) return + if this > a, - if this < a, 0 if equal
+function bnCompareTo(a) {
+  var r = this.s-a.s;
+  if(r != 0) return r;
+  var i = this.t;
+  r = i-a.t;
+  if(r != 0) return (this.s<0)?-r:r;
+  while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
+  return 0;
+}
+
+// returns bit length of the integer x
+function nbits(x) {
+  var r = 1, t;
+  if((t=x>>>16) != 0) { x = t; r += 16; }
+  if((t=x>>8) != 0) { x = t; r += 8; }
+  if((t=x>>4) != 0) { x = t; r += 4; }
+  if((t=x>>2) != 0) { x = t; r += 2; }
+  if((t=x>>1) != 0) { x = t; r += 1; }
+  return r;
+}
+
+// (public) return the number of bits in "this"
+function bnBitLength() {
+  if(this.t <= 0) return 0;
+  return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
+}
+
+// (protected) r = this << n*DB
+function bnpDLShiftTo(n,r) {
+  var i;
+  for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
+  for(i = n-1; i >= 0; --i) r[i] = 0;
+  r.t = this.t+n;
+  r.s = this.s;
+}
+
+// (protected) r = this >> n*DB
+function bnpDRShiftTo(n,r) {
+  for(var i = n; i < this.t; ++i) r[i-n] = this[i];
+  r.t = Math.max(this.t-n,0);
+  r.s = this.s;
+}
+
+// (protected) r = this << n
+function bnpLShiftTo(n,r) {
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<cbs)-1;
+  var ds = Math.floor(n/this.DB), c = (this.s<<bs)&this.DM, i;
+  for(i = this.t-1; i >= 0; --i) {
+    r[i+ds+1] = (this[i]>>cbs)|c;
+    c = (this[i]&bm)<<bs;
+  }
+  for(i = ds-1; i >= 0; --i) r[i] = 0;
+  r[ds] = c;
+  r.t = this.t+ds+1;
+  r.s = this.s;
+  r.clamp();
+}
+
+// (protected) r = this >> n
+function bnpRShiftTo(n,r) {
+  r.s = this.s;
+  var ds = Math.floor(n/this.DB);
+  if(ds >= this.t) { r.t = 0; return; }
+  var bs = n%this.DB;
+  var cbs = this.DB-bs;
+  var bm = (1<<bs)-1;
+  r[0] = this[ds]>>bs;
+  for(var i = ds+1; i < this.t; ++i) {
+    r[i-ds-1] |= (this[i]&bm)<<cbs;
+    r[i-ds] = this[i]>>bs;
+  }
+  if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<<cbs;
+  r.t = this.t-ds;
+  r.clamp();
+}
+
+// (protected) r = this - a
+function bnpSubTo(a,r) {
+  var i = 0, c = 0, m = Math.min(a.t,this.t);
+  while(i < m) {
+    c += this[i]-a[i];
+    r[i++] = c&this.DM;
+    c >>= this.DB;
+  }
+  if(a.t < this.t) {
+    c -= a.s;
+    while(i < this.t) {
+      c += this[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c += this.s;
+  }
+  else {
+    c += this.s;
+    while(i < a.t) {
+      c -= a[i];
+      r[i++] = c&this.DM;
+      c >>= this.DB;
+    }
+    c -= a.s;
+  }
+  r.s = (c<0)?-1:0;
+  if(c < -1) r[i++] = this.DV+c;
+  else if(c > 0) r[i++] = c;
+  r.t = i;
+  r.clamp();
+}
+
+// (protected) r = this * a, r != this,a (HAC 14.12)
+// "this" should be the larger one if appropriate.
+function bnpMultiplyTo(a,r) {
+  var x = this.abs(), y = a.abs();
+  var i = x.t;
+  r.t = i+y.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
+  r.s = 0;
+  r.clamp();
+  if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
+}
+
+// (protected) r = this^2, r != this (HAC 14.16)
+function bnpSquareTo(r) {
+  var x = this.abs();
+  var i = r.t = 2*x.t;
+  while(--i >= 0) r[i] = 0;
+  for(i = 0; i < x.t-1; ++i) {
+    var c = x.am(i,x[i],r,2*i,0,1);
+    if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
+      r[i+x.t] -= x.DV;
+      r[i+x.t+1] = 1;
+    }
+  }
+  if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
+  r.s = 0;
+  r.clamp();
+}
+
+// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
+// r != q, this != m.  q or r may be null.
+function bnpDivRemTo(m,q,r) {
+  var pm = m.abs();
+  if(pm.t <= 0) return;
+  var pt = this.abs();
+  if(pt.t < pm.t) {
+    if(q != null) q.fromInt(0);
+    if(r != null) this.copyTo(r);
+    return;
+  }
+  if(r == null) r = nbi();
+  var y = nbi(), ts = this.s, ms = m.s;
+  var nsh = this.DB-nbits(pm[pm.t-1]);	// normalize modulus
+  if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
+  else { pm.copyTo(y); pt.copyTo(r); }
+  var ys = y.t;
+  var y0 = y[ys-1];
+  if(y0 == 0) return;
+  var yt = y0*(1<<this.F1)+((ys>1)?y[ys-2]>>this.F2:0);
+  var d1 = this.FV/yt, d2 = (1<<this.F1)/yt, e = 1<<this.F2;
+  var i = r.t, j = i-ys, t = (q==null)?nbi():q;
+  y.dlShiftTo(j,t);
+  if(r.compareTo(t) >= 0) {
+    r[r.t++] = 1;
+    r.subTo(t,r);
+  }
+  BigInteger.ONE.dlShiftTo(ys,t);
+  t.subTo(y,y);	// "negative" y so we can replace sub with am later
+  while(y.t < ys) y[y.t++] = 0;
+  while(--j >= 0) {
+    // Estimate quotient digit
+    var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
+    if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) {	// Try it out
+      y.dlShiftTo(j,t);
+      r.subTo(t,r);
+      while(r[i] < --qd) r.subTo(t,r);
+    }
+  }
+  if(q != null) {
+    r.drShiftTo(ys,q);
+    if(ts != ms) BigInteger.ZERO.subTo(q,q);
+  }
+  r.t = ys;
+  r.clamp();
+  if(nsh > 0) r.rShiftTo(nsh,r);	// Denormalize remainder
+  if(ts < 0) BigInteger.ZERO.subTo(r,r);
+}
+
+// (public) this mod a
+function bnMod(a) {
+  var r = nbi();
+  this.abs().divRemTo(a,null,r);
+  if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
+  return r;
+}
+
+// Modular reduction using "classic" algorithm
+function Classic(m) { this.m = m; }
+function cConvert(x) {
+  if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
+  else return x;
+}
+function cRevert(x) { return x; }
+function cReduce(x) { x.divRemTo(this.m,null,x); }
+function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+Classic.prototype.convert = cConvert;
+Classic.prototype.revert = cRevert;
+Classic.prototype.reduce = cReduce;
+Classic.prototype.mulTo = cMulTo;
+Classic.prototype.sqrTo = cSqrTo;
+
+// (protected) return "-1/this % 2^DB"; useful for Mont. reduction
+// justification:
+//         xy == 1 (mod m)
+//         xy =  1+km
+//   xy(2-xy) = (1+km)(1-km)
+// x[y(2-xy)] = 1-k^2m^2
+// x[y(2-xy)] == 1 (mod m^2)
+// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
+// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
+// JS multiply "overflows" differently from C/C++, so care is needed here.
+function bnpInvDigit() {
+  if(this.t < 1) return 0;
+  var x = this[0];
+  if((x&1) == 0) return 0;
+  var y = x&3;		// y == 1/x mod 2^2
+  y = (y*(2-(x&0xf)*y))&0xf;	// y == 1/x mod 2^4
+  y = (y*(2-(x&0xff)*y))&0xff;	// y == 1/x mod 2^8
+  y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;	// y == 1/x mod 2^16
+  // last step - calculate inverse mod DV directly;
+  // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
+  y = (y*(2-x*y%this.DV))%this.DV;		// y == 1/x mod 2^dbits
+  // we really want the negative inverse, and -DV < y < DV
+  return (y>0)?this.DV-y:-y;
+}
+
+// Montgomery reduction
+function Montgomery(m) {
+  this.m = m;
+  this.mp = m.invDigit();
+  this.mpl = this.mp&0x7fff;
+  this.mph = this.mp>>15;
+  this.um = (1<<(m.DB-15))-1;
+  this.mt2 = 2*m.t;
+}
+
+// xR mod m
+function montConvert(x) {
+  var r = nbi();
+  x.abs().dlShiftTo(this.m.t,r);
+  r.divRemTo(this.m,null,r);
+  if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
+  return r;
+}
+
+// x/R mod m
+function montRevert(x) {
+  var r = nbi();
+  x.copyTo(r);
+  this.reduce(r);
+  return r;
+}
+
+// x = x/R mod m (HAC 14.32)
+function montReduce(x) {
+  while(x.t <= this.mt2)	// pad x so am has enough room later
+    x[x.t++] = 0;
+  for(var i = 0; i < this.m.t; ++i) {
+    // faster way of calculating u0 = x[i]*mp mod DV
+    var j = x[i]&0x7fff;
+    var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
+    // use am to combine the multiply-shift-add into one call
+    j = i+this.m.t;
+    x[j] += this.m.am(0,u0,x,i,0,this.m.t);
+    // propagate carry
+    while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
+  }
+  x.clamp();
+  x.drShiftTo(this.m.t,x);
+  if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
+}
+
+// r = "x^2/R mod m"; x != r
+function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+// r = "xy/R mod m"; x,y != r
+function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+
+Montgomery.prototype.convert = montConvert;
+Montgomery.prototype.revert = montRevert;
+Montgomery.prototype.reduce = montReduce;
+Montgomery.prototype.mulTo = montMulTo;
+Montgomery.prototype.sqrTo = montSqrTo;
+
+// (protected) true iff this is even
+function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
+
+// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
+function bnpExp(e,z) {
+  if(e > 0xffffffff || e < 1) return BigInteger.ONE;
+  var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
+  g.copyTo(r);
+  while(--i >= 0) {
+    z.sqrTo(r,r2);
+    if((e&(1<<i)) > 0) z.mulTo(r2,g,r);
+    else { var t = r; r = r2; r2 = t; }
+  }
+  return z.revert(r);
+}
+
+// (public) this^e % m, 0 <= e < 2^32
+function bnModPowInt(e,m) {
+  var z;
+  if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
+  return this.exp(e,z);
+}
+
+// protected
+BigInteger.prototype.copyTo = bnpCopyTo;
+BigInteger.prototype.fromInt = bnpFromInt;
+BigInteger.prototype.fromString = bnpFromString;
+BigInteger.prototype.clamp = bnpClamp;
+BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
+BigInteger.prototype.drShiftTo = bnpDRShiftTo;
+BigInteger.prototype.lShiftTo = bnpLShiftTo;
+BigInteger.prototype.rShiftTo = bnpRShiftTo;
+BigInteger.prototype.subTo = bnpSubTo;
+BigInteger.prototype.multiplyTo = bnpMultiplyTo;
+BigInteger.prototype.squareTo = bnpSquareTo;
+BigInteger.prototype.divRemTo = bnpDivRemTo;
+BigInteger.prototype.invDigit = bnpInvDigit;
+BigInteger.prototype.isEven = bnpIsEven;
+BigInteger.prototype.exp = bnpExp;
+
+// public
+BigInteger.prototype.toString = bnToString;
+BigInteger.prototype.negate = bnNegate;
+BigInteger.prototype.abs = bnAbs;
+BigInteger.prototype.compareTo = bnCompareTo;
+BigInteger.prototype.bitLength = bnBitLength;
+BigInteger.prototype.mod = bnMod;
+BigInteger.prototype.modPowInt = bnModPowInt;
+
+// "constants"
+BigInteger.ZERO = nbv(0);
+BigInteger.ONE = nbv(1);
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/prng4.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/prng4.js
new file mode 100644
index 0000000..3034f3f
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/prng4.js
@@ -0,0 +1,45 @@
+// prng4.js - uses Arcfour as a PRNG
+
+function Arcfour() {
+  this.i = 0;
+  this.j = 0;
+  this.S = new Array();
+}
+
+// Initialize arcfour context from key, an array of ints, each from [0..255]
+function ARC4init(key) {
+  var i, j, t;
+  for(i = 0; i < 256; ++i)
+    this.S[i] = i;
+  j = 0;
+  for(i = 0; i < 256; ++i) {
+    j = (j + this.S[i] + key[i % key.length]) & 255;
+    t = this.S[i];
+    this.S[i] = this.S[j];
+    this.S[j] = t;
+  }
+  this.i = 0;
+  this.j = 0;
+}
+
+function ARC4next() {
+  var t;
+  this.i = (this.i + 1) & 255;
+  this.j = (this.j + this.S[this.i]) & 255;
+  t = this.S[this.i];
+  this.S[this.i] = this.S[this.j];
+  this.S[this.j] = t;
+  return this.S[(t + this.S[this.i]) & 255];
+}
+
+Arcfour.prototype.init = ARC4init;
+Arcfour.prototype.next = ARC4next;
+
+// Plug in your RNG constructor here
+function prng_newstate() {
+  return new Arcfour();
+}
+
+// Pool size must be a multiple of 4 and greater than 32.
+// An array of bytes the size of the pool will be passed to init()
+var rng_psize = 256;
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/rng.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/rng.js
new file mode 100644
index 0000000..9db1382
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/rng.js
@@ -0,0 +1,75 @@
+// Random number generator - requires a PRNG backend, e.g. prng4.js
+
+// For best results, put code like
+// <body onClick='rng_seed_time();' onKeyPress='rng_seed_time();'>
+// in your main HTML document.
+
+var rng_state;
+var rng_pool;
+var rng_pptr;
+
+// Mix in a 32-bit integer into the pool
+function rng_seed_int(x) {
+  rng_pool[rng_pptr++] ^= x & 255;
+  rng_pool[rng_pptr++] ^= (x >> 8) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 16) & 255;
+  rng_pool[rng_pptr++] ^= (x >> 24) & 255;
+  if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
+}
+
+// Mix in the current time (w/milliseconds) into the pool
+function rng_seed_time() {
+  rng_seed_int(new Date().getTime());
+}
+
+// Initialize the pool with junk if needed.
+if(rng_pool == null) {
+  rng_pool = new Array();
+  rng_pptr = 0;
+  var t;
+  if(window.crypto && window.crypto.getRandomValues) {
+    // Use webcrypto if available
+    var ua = new Uint8Array(32);
+    window.crypto.getRandomValues(ua);
+    for(t = 0; t < 32; ++t)
+      rng_pool[rng_pptr++] = ua[t];
+  }
+  if(navigator.appName == "Netscape" && navigator.appVersion < "5" && window.crypto) {
+    // Extract entropy (256 bits) from NS4 RNG if available
+    var z = window.crypto.random(32);
+    for(t = 0; t < z.length; ++t)
+      rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
+  }  
+  while(rng_pptr < rng_psize) {  // extract some randomness from Math.random()
+    t = Math.floor(65536 * Math.random());
+    rng_pool[rng_pptr++] = t >>> 8;
+    rng_pool[rng_pptr++] = t & 255;
+  }
+  rng_pptr = 0;
+  rng_seed_time();
+  //rng_seed_int(window.screenX);
+  //rng_seed_int(window.screenY);
+}
+
+function rng_get_byte() {
+  if(rng_state == null) {
+    rng_seed_time();
+    rng_state = prng_newstate();
+    rng_state.init(rng_pool);
+    for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
+      rng_pool[rng_pptr] = 0;
+    rng_pptr = 0;
+    //rng_pool = null;
+  }
+  // TODO: allow reseeding after first request
+  return rng_state.next();
+}
+
+function rng_get_bytes(ba) {
+  var i;
+  for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
+}
+
+function SecureRandom() {}
+
+SecureRandom.prototype.nextBytes = rng_get_bytes;
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/rsa.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/rsa.js
new file mode 100644
index 0000000..9f86640
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/rsa.js
@@ -0,0 +1,112 @@
+// Depends on jsbn.js and rng.js
+
+// Version 1.1: support utf-8 encoding in pkcs1pad2
+
+// convert a (hex) string to a bignum object
+function parseBigInt(str,r) {
+  return new BigInteger(str,r);
+}
+
+function linebrk(s,n) {
+  var ret = "";
+  var i = 0;
+  while(i + n < s.length) {
+    ret += s.substring(i,i+n) + "\n";
+    i += n;
+  }
+  return ret + s.substring(i,s.length);
+}
+
+function byte2Hex(b) {
+  if(b < 0x10)
+    return "0" + b.toString(16);
+  else
+    return b.toString(16);
+}
+
+// PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
+function pkcs1pad2(s,n) {
+  if(n < s.length + 11) { // TODO: fix for utf-8
+    alert("Message too long for RSA");
+    return null;
+  }
+  var ba = new Array();
+  var i = s.length - 1;
+  while(i >= 0 && n > 0) {
+    var c = s.charCodeAt(i--);
+    if(c < 128) { // encode using utf-8
+      ba[--n] = c;
+    }
+    else if((c > 127) && (c < 2048)) {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = (c >> 6) | 192;
+    }
+    else {
+      ba[--n] = (c & 63) | 128;
+      ba[--n] = ((c >> 6) & 63) | 128;
+      ba[--n] = (c >> 12) | 224;
+    }
+  }
+  ba[--n] = 0;
+  var rng = new SecureRandom();
+  var x = new Array();
+  while(n > 2) { // random non-zero pad
+    x[0] = 0;
+    while(x[0] == 0) rng.nextBytes(x);
+    ba[--n] = x[0];
+  }
+  ba[--n] = 2;
+  ba[--n] = 0;
+  return new BigInteger(ba);
+}
+
+// "empty" RSA key constructor
+function RSAKey() {
+  this.n = null;
+  this.e = 0;
+  this.d = null;
+  this.p = null;
+  this.q = null;
+  this.dmp1 = null;
+  this.dmq1 = null;
+  this.coeff = null;
+}
+
+// Set the public key fields N and e from hex strings
+function RSASetPublic(N,E) {
+  if(N != null && E != null && N.length > 0 && E.length > 0) {
+    this.n = parseBigInt(N,16);
+    this.e = parseInt(E,16);
+  }
+  else
+    alert("Invalid RSA public key");
+}
+
+// Perform raw public operation on "x": return x^e (mod n)
+function RSADoPublic(x) {
+  return x.modPowInt(this.e, this.n);
+}
+
+// Return the PKCS#1 RSA encryption of "text" as an even-length hex string
+function RSAEncrypt(text) {
+  var m = pkcs1pad2(text,(this.n.bitLength()+7)>>3);
+  if(m == null) return null;
+  var c = this.doPublic(m);
+  if(c == null) return null;
+  var h = c.toString(16);
+  if((h.length & 1) == 0) return h; else return "0" + h;
+}
+
+// Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
+//function RSAEncryptB64(text) {
+//  var h = this.encrypt(text);
+//  if(h) return hex2b64(h); else return null;
+//}
+
+// protected
+RSAKey.prototype.doPublic = RSADoPublic;
+
+// public
+RSAKey.prototype.setPublic = RSASetPublic;
+RSAKey.prototype.encrypt = RSAEncrypt;
+//RSAKey.prototype.encrypt_b64 = RSAEncryptB64;
diff --git a/config/plugins/interactive_environments/rstudio/static/js/rstudio.js b/config/plugins/interactive_environments/rstudio/static/js/rstudio.js
new file mode 100644
index 0000000..cc0b62e
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/static/js/rstudio.js
@@ -0,0 +1,72 @@
+function message_failed_connection(){
+    toastr.error(
+        "Could not connect to RStudio. Please contact your administrator.",
+        "Security warning",
+        {'closeButton': true, 'timeOut': 20000, 'tapToDismiss': true}
+    );
+}
+
+
+/**
+ * Load an interactive environment (IE) from a remote URL
+ * @param {String} password: password used to authenticate to the remote resource
+ * @param {String} notebook_login_url: URL that should be POSTed to for login
+ * @param {String} notebook_access_url: the URL embeded in the page and loaded
+ *
+ */
+function load_notebook(notebook_login_url, notebook_access_url, notebook_pubkey_url, username){
+    $( document ).ready(function() {
+        // Test notebook_login_url for accessibility, executing the login+load function whenever
+        // we've successfully connected to the IE.
+        test_ie_availability(notebook_pubkey_url, function(){
+            var payload = username + "\n" + ie_password;
+            $.ajax({
+                type: 'GET',
+                url: notebook_pubkey_url,
+                xhrFields: {
+                        withCredentials: true
+                },
+                success: function(response_text){
+                    var chunks = response_text.split(':', 2);
+                    var exp = chunks[0];
+                    var mod = chunks[1];
+                    console.log("Found " + exp +" and " + mod);
+                    var rsa = new RSAKey();
+                    rsa.setPublic(mod, exp);
+                    console.log("Encrypting '" + username + "', '" + ie_password + "'");
+                    var enc_hex = rsa.encrypt(payload);
+                    var encrypted = hex2b64(enc_hex);
+                    console.log("E: " + encrypted);
+
+                    // Now we can login
+                    $.ajax({
+                        type: "POST",
+                        // to the Login URL
+                        url: notebook_login_url,
+                        // With our password
+                        data: {
+                            'v': encrypted,
+                            'persist': 1,
+                            'clientPath': '/rstudio/auth-sign-in',
+                            'appUri': '',
+                        },
+                        contentType: "application/x-www-form-urlencoded",
+                        xhrFields: {
+                            withCredentials: true
+                        },
+                        // If that is successful, load the notebook
+                        success: function(){
+                            append_notebook(notebook_access_url);
+                        },
+                        error: function(jqxhr, status, error){
+                            message_failed_connection();
+                            // Do we want to try and load the notebook anyway? Just in case?
+                            append_notebook(notebook_access_url);
+                        }
+                    });
+                }
+            });
+
+        });
+    });
+}
diff --git a/config/plugins/interactive_environments/rstudio/templates/rstudio.mako b/config/plugins/interactive_environments/rstudio/templates/rstudio.mako
new file mode 100644
index 0000000..46b9b2b
--- /dev/null
+++ b/config/plugins/interactive_environments/rstudio/templates/rstudio.mako
@@ -0,0 +1,73 @@
+<%namespace file="ie.mako" name="ie"/>
+<%
+import os
+import shutil
+import time
+
+# Sets ID and sets up a lot of other variables
+ie_request.load_deploy_config()
+ie_request.attr.docker_port = 80
+# Create tempdir in galaxy
+temp_dir = ie_request.temp_dir
+PASSWORD = "rstudio"
+USERNAME = "rstudio"
+# Then override it again
+ie_request.notebook_pw = "rstudio"
+
+# Did the user give us an RData file?
+if hda.datatype.__class__.__name__ == "RData":
+    shutil.copy( hda.file_name, os.path.join(temp_dir, '.RData') )
+
+ie_request.launch(
+    image=trans.request.params.get('image_tag', None),
+    additional_ids=trans.request.params.get('additional_dataset_ids', None),
+    env_override={
+        'notebook_username': USERNAME,
+        'notebook_password': PASSWORD,
+    }
+)
+
+## General IE specific
+# Access URLs for the notebook from within galaxy.
+# TODO: Make this work without pointing directly to IE. Currently does not work
+# through proxy.
+notebook_pubkey_url = ie_request.url_template('${PROXY_URL}/rstudio/auth-public-key')
+notebook_access_url = ie_request.url_template('${PROXY_URL}/rstudio/')
+notebook_login_url =  ie_request.url_template('${PROXY_URL}/rstudio/auth-sign-in')
+
+%>
+<html>
+<head>
+${ ie.load_default_js() }
+</head>
+<body style="margin:0px">
+<script type="text/javascript">
+${ ie.default_javascript_variables() }
+var notebook_login_url = '${ notebook_login_url }';
+var notebook_access_url = '${ notebook_access_url }';
+var notebook_pubkey_url = '${ notebook_pubkey_url }';
+var notebook_username = '${ USERNAME }';
+require.config({
+    baseUrl: app_root,
+    paths: {
+        "interactive_environments": "${h.url_for('/static/scripts/galaxy.interactive_environments')}",
+        "plugin" : app_root + "js/",
+        "crypto" : app_root + "js/crypto/",
+    },
+});
+requirejs([
+    'interactive_environments',
+    'crypto/prng4',
+    'crypto/rng',
+    'crypto/rsa',
+    'crypto/jsbn',
+    'crypto/base64',
+    'plugin/rstudio'
+], function(){
+    load_notebook(notebook_login_url, notebook_access_url, notebook_pubkey_url, "${ USERNAME }");
+});
+</script>
+<div id="main">
+</div>
+</body>
+</html>
diff --git a/config/plugins/tours/core.galaxy_ui.yaml b/config/plugins/tours/core.galaxy_ui.yaml
new file mode 100644
index 0000000..2450ec2
--- /dev/null
+++ b/config/plugins/tours/core.galaxy_ui.yaml
@@ -0,0 +1,216 @@
+id: galaxy_ui
+name: Galaxy UI
+description: A gentle introduction to the Galaxy User Interface
+title_default: "Welcome to Galaxy"
+
+# A tour is made of several steps, each of them beginning with a dash '-'
+steps:
+    # 'title's will be displayed in the header of each step-container
+    # If you don't specify any title, a default title is used, defined above.
+    - title: "Welcome to Galaxy"
+      # 'content' is the actual text that is shown to the user
+      content: "This short tour will guide you through Galaxy's user interface.<br>
+                You can navigate with your arrow keys and leave the tour at any time point with 'Escape' or the 'End tour' button."
+      # backdrop is just one of many properties you can attach to one step-container,
+      # a full reference can be found at http://bootstraptour.com/api/
+      backdrop: true
+
+    # 'element' is the JQuery Selector (http://api.jquery.com/category/selectors/) of the element you want to describe
+    # In this case we want to highlight the Upload button with the `.upload-button` selector
+    - title: "Upload your data"
+      element: ".upload-button"
+      intro: "Galaxy supports many ways to get in your data.<br>
+              Use this button to upload your data."
+      # position of the text box relative to the selected element
+      position: "right"
+      # You can trigger click() events on arbitrary elements before (preclick) or after (postclick) the element is shown
+      postclick:
+        - ".upload-button"
+
+    - title: "Upload your data"
+      element: "button#btn-local"
+      intro: "You can upload data from your computer."
+      position: "top"
+      postclick:
+        - "button#btn-new"
+
+    - title: "Upload your data"
+      element: "button#btn-new"
+      intro: "Copy and paste data directly into Galaxy or include URLs that lead to your data"
+      position: "top"
+      preclick:
+        - ".upload-button"
+
+    - title: "Insert URLs"
+      element: ".upload-text-content:first"
+      intro: "URLs separated by a line break are automatically downloaded by Galaxy."
+      position: "top"
+      textinsert: |
+        https://raw.githubusercontent.com/bgruening/galaxytools/adf077b912ddebd97b07b947b855cdd2862ed8ef/tools/augustus/test-data/human_augustus.fa
+        https://raw.githubusercontent.com/bgruening/galaxytools/adf077b912ddebd97b07b947b855cdd2862ed8ef/tools/sailfish/test-data/reads_2.fastq
+        https://raw.githubusercontent.com/bgruening/galaxytools/adf077b912ddebd97b07b947b855cdd2862ed8ef/tools/statistics/test-data/anderson.tabular
+
+    - title: "Start the upload"
+      element: "button#btn-start"
+      intro: "Upload the data into your Galaxy history."
+      position: "top"
+      postclick:
+        - "button#btn-start"
+
+    - title: "Close upload/download manager"
+      element: "button#btn-close"
+      intro: "Close the upload manager with this button or with a click outside of the manager window."
+      position: "top"
+      postclick:
+        - "#btn-close"
+
+    - title: "Tools"
+      element: "#left"
+      intro: "This is your ToolBox. All tools available in your Galaxy instance you can find here."
+      position: "right"
+
+    - title: "Tool search bar"
+      element: '#tool-search-query'
+      intro: "You can search for tools by keywords."
+      position: "right"
+      textinsert: 'filter'
+
+    - title: "Tool categories"
+      element: '#title_filter > a'
+      intro: "Tools are grouped into categories to make them easier to find."
+      position: "right"
+      preclick:
+        - "#title_filter a"
+
+    - title: "Select a tool"
+      element: 'a[href$="tool_runner?tool_id=Filter1"]'
+      intro: "Select and load a tool for your analysis by clicking the underlined link."
+      position: "right"
+      postclick:
+        - 'a[href$="tool_runner?tool_id=Filter1"]'
+
+    - title: "Filter Tool"
+      element: '#center-panel'
+      intro: "Your tool is loaded into the main Galaxy page and ready for use."
+      position: "right"
+      #backdropContainer: 'background'
+      #backdrop: true
+
+    - title: "Tool parameters"
+      element: '#uid-0'
+      intro: "Here you can choose your tool parameters. Select your input dataset from your history and specify parameters for your analysis."
+      position: "right"
+
+    - title: "Tool help"
+      element: 'div.ui-form-help'
+      intro: "Every Galaxy tool has a help section with detailed information about the tool and its parameters.
+              Have a look and study your tool in depth by reading it."
+      position: "top"
+
+    - title: "Run your tool"
+      element: '#execute'
+      intro: "Click on 'Execute' to run your tool and send it to the compute cluster.
+              Don't be afraid to test different settings in parallel. Galaxy can handle multiple runs of the same tool."
+      position: "right"
+      postclick:
+        - '#execute'
+
+    - title: "History"
+      element: "#right"
+      intro: "This is your Galaxy history! All analysis steps with results of the tools and their parameters will be recorded here and can be redone at any time.<br>
+              A running tool is shown in yellow and if your calculation is done, it turns green. If your dataset is
+              grey this means you are queued and need to wait until your tool can be started. If your dataset turns into red, an error has occurred. Please report the bug to the Galaxy team with the bug report button."
+      position: "left"
+
+    - title: "Rename history"
+      element: "#current-history-panel > div.controls > div.title > div"
+      intro: "Name your history here and press enter."
+      position: "left"
+      preclick:
+        - "#current-history-panel > div.controls > div.title > div"
+
+    - title: "View dataset"
+      element: "#current-history-panel .fa-eye:eq(0)"
+      intro: "View your dataset by clicking the eye button."
+      position: "left"
+      #preclick:
+      #  - "#current-history-panel .fa-eye:eq(0)"
+
+    - title: "Rename dataset"
+      element: "#current-history-panel .fa-pencil:eq(0)"
+      intro: "Rename your dataset by clicking the pencil button."
+      position: "left"
+      #preclick:
+      #  - "#current-history-panel .fa-pencil:eq(0)"
+
+    - title: "Remove dataset"
+      element: "#current-history-panel .fa-times:eq(0)"
+      intro: "Delete your dataset by clicking the x-button."
+      position: "left"
+      #postclick:
+      #  - "#current-history-panel .fa-times:eq(0)"
+
+    - title: "Dataset information"
+      element: "div.title-bar.clear:eq(0)"
+      intro: "Clicking on your dataset provides you with more information regarding your dataset (e.g. filetype or size)."
+      position: "left"
+      preclick:
+        - "div.title-bar.clear:eq(0)"
+
+    - title: "Re-run tool"
+      element: ".fa-refresh:eq(1)"
+      intro: "By clicking the reload button, you can re-run your tool again (e.g. with different parameters or on another dataset)."
+      position: "left"
+      preclick:
+        - ".fa-refresh:eq(1)"
+
+
+    - title: "Panel collapse"
+      element: "#left > div.unified-panel-footer > div.panel-collapse.left"
+      intro: "To extend the view for your main Galaxy page in the middle, you can collapse the tool panel on the left hand side. Clicking the panel arrow on the right hand side, collapses the history."
+      position: "top"
+      postclick:
+        - "#left > div.unified-panel-footer > div.panel-collapse.left"
+
+    - title: 'Top panel'
+      element: "#masthead"
+      intro: "The top panel will give you access to a lot of useful things."
+      position: "bottom"
+
+    - title: 'Analyze data'
+      element: "#analysis"
+      intro: "This is the current view. Start your <b>analysis</b> from here."
+      position: "bottom"
+
+    - title: 'Workflow'
+      element: "#workflow"
+      intro: "Create, manage, import, export and share your <b>Workflows</b>."
+      position: "bottom"
+
+    - title: 'Shared data'
+      element: "#shared"
+      intro: "Get access to all <b>Workflows</b>, <b>Histories</b>, <b>Pages</b>, <b>Visualizations</b> and your <b>Data Library</b> that are shared with you."
+      position: "bottom"
+
+    - title: 'Visualization'
+      element: "#visualization"
+      intro: "Do you want to start to visualize your data in Trackster? Start here!<br>Galaxy offers a lot more visualizations!"
+      position: "bottom"
+
+    - title: 'Help'
+      element: "#help"
+      intro: "Here you get more help from the Galaxy <b>Community</b> with Videos, a QA board and more tours."
+      position: "bottom"
+
+    - title: 'User Preferences'
+      element: "#user"
+      intro: "Login and start your analysis!"
+      position: "bottom"
+
+    - title: 'Scratchbook'
+      element: "#enable-scratchbook"
+      intro: "Scratchbook offers you multiple visualizations next to each other. Have a look at our Scratchbook tour to learn more about it."
+      position: "bottom"
+
+    - title: "Enjoy Galaxy"
+      intro: "Thanks for taking this tour! Happy research with Galaxy!"
diff --git a/config/plugins/tours/core.history.yaml b/config/plugins/tours/core.history.yaml
new file mode 100644
index 0000000..cb4bf34
--- /dev/null
+++ b/config/plugins/tours/core.history.yaml
@@ -0,0 +1,148 @@
+name: History Introduction
+description: A detailed introduction to the Galaxy History
+title_default: "Galaxy History Introduction"
+steps:
+    - content: "This short tour will guide you through <b>Galaxy's History</b> and datasets."
+
+    - title: "Upload data"
+      element: ".upload-button"
+      intro: |
+            "At first we upload some data into your Galaxy history.<br>
+            Use this button to upload your data."
+      position: "right"
+      postclick:
+        - ".upload-button"
+
+    - title: "Upload your data"
+      element: "button#btn-local"
+      intro: "You can upload data from your computer."
+      position: "top"
+      postclick:
+        - "button#btn-new"
+
+    - title: "Upload your data"
+      element: "button#btn-new"
+      intro: "Copy and paste data directly into Galaxy or include URLs that lead to your data."
+      position: "top"
+
+    - title: "Insert URLs"
+      element: ".upload-text-content:first"
+      intro: "URLs separated by a line break are automatically downloaded by Galaxy."
+      position: "bottom"
+      textinsert: |
+        https://wiki.galaxyproject.org/Images/GalaxyLogo?action=AttachFile&do=get&target=galaxy_project_logo.jpg
+        https://raw.githubusercontent.com/bgruening/galaxytools/adf077b912ddebd97b07b947b855cdd2862ed8ef/tools/statistics/test-data/anderson.tabular
+
+    - title: "Upload your data"
+      element: "button#btn-start"
+      intro: "Upload the data into your Galaxy <b>History</b>."
+      position: "top"
+      postclick:
+        - "button#btn-start"
+        - "button#btn-close"
+
+    - title: "History"
+      element: "#right"
+      intro: "This is your history! All analysis steps will be recorded and can be redone at any time."
+      position: "left"
+
+    - title: "Dataset information"
+      element: "div.title-bar.clear:eq(0)"
+      intro: "This is one of your uploaded datasets. You can get more informations and options like different visualizations by clicking on it."
+      position: "left"
+      postclick:
+        - "div.title-bar.clear:eq(0)"
+
+    - title: "Metadata"
+      element: "#current-history-panel > div.list-items > div:nth-child(1) > div.details > div.summary > div.datatype > label"
+      intro: "Galaxy has assigned a datatype to your dataset during upload, which you can see here."
+      position: "left"
+
+    - element: "#current-history-panel > div.list-items > div:nth-child(1) > div.details > div.actions.clear > div.left > a.download-btn.icon-btn"
+      title: "Download your dataset"
+      intro: "You can download every dataset by using the floppy disc symbol."
+      position: "left"
+
+    - element: "#current-history-panel > div.list-items > div:nth-child(1) > div.details > div.actions.clear > div.left > a.icon-btn.params-btn"
+      title: "Even more information"
+      intro: "Get an overview of all metadata associated with your dataset by using the Information symbol."
+      position: "left"
+      preclick:
+        - "#current-history-panel > div.list-items > div:nth-child(1) > div.details > div.actions.clear > div.left > a.icon-btn.params-btn"
+
+    - element: "#current-history-panel > div.list-items > div:nth-child(1) > div.primary-actions > a.icon-btn.display-btn"
+      title: "Inspect your data"
+      intro: "The eye symbol can be used to look at your data."
+      position: "left"
+      preclick:
+        - "#current-history-panel > div.list-items > div:nth-child(1) > div.primary-actions > a.icon-btn.display-btn"
+
+    - element: "#current-history-panel > div.list-items > div:nth-child(1) > div.primary-actions > a.icon-btn.edit-btn"
+      title: "Edit metadata"
+      intro: "With the pencil button you can edit metadata attributes of your dataset, like the associated filetype or the dataset name."
+      position: "left"
+      preclick:
+        - "#current-history-panel > div.list-items > div:nth-child(1) > div.primary-actions > a.icon-btn.edit-btn"
+
+    - element: "#current-history-panel > div.list-items > div:nth-child(1) > div.primary-actions > a.icon-btn.delete-btn"
+      title: "Remove datasets"
+      intro: "You can remove a dataset from the history with the cross symbol."
+      position: "left"
+      postclick:
+        - "#current-history-panel > div.list-items > div:nth-child(1) > div.primary-actions > a.icon-btn.delete-btn"
+
+    - element: "#current-history-panel > div.controls > div.subtitle > a"
+      title: "Include all deleted datasets"
+      intro: "By default your history will hide all deleted datasets from you. You can visualize them by toggling this button."
+      position: "bottom"
+      postclick:
+        - "#current-history-panel > div.controls > div.subtitle > a"
+
+    - element: "#current-history-panel > div.list-items > div:nth-child(1) > div.warnings > div > a"
+      title: "Undeleting a dataset"
+      intro: |
+            Galaxy datasets are only marked as deleled and can be recovered by clicking this link.
+            Please note that datasets marked as deleted can be purged by your administrator at any time.
+      position: "bottom"
+      postclick:
+        - "#current-history-panel > div.list-items > div:nth-child(1) > div.warnings > div > a"
+
+    - element: "#current-history-panel > div.controls > div.subtitle > a"
+      title: "Hiding all deleted datasets"
+      intro: "Hiding datasets that were previously deleted works in the same way."
+      position: "bottom"
+      preclick:
+        - "#current-history-panel > div.controls > div.subtitle > a"
+
+    - element: "#current-history-panel > div.controls > div.title > div"
+      title: "Change your History name"
+      intro: "You can change the history name clicking on the title."
+      position: "bottom"
+      preclick:
+        - "#current-history-panel > div.controls > div.title > div"
+
+    - element: "#current-history-panel > div.controls > div.search > div > input"
+      title: "Search your History"
+      intro: "You can filter your history by typing your search term in here. Galaxy supports more advanced filters that can be seen here."
+      position: "left"
+      textinsert: "WWFSMD"
+      preclick:
+        - "#current-history-panel > div.controls > div.search > div > input"
+
+    - element: "#history-options-button"
+      title: "History Options"
+      intro: "In the History menu you will find a lot more useful History options."
+      position: "left"
+      postclick:
+        - "#history-options-button"
+
+    - element: "#history-options-button-menu > li:nth-child(2) > a"
+      title: "History structure"
+      intro: "See the structure of your History by clicking this item."
+      position: "left"
+      postclick:
+        - "#history-options-button-menu > li:nth-child(2) > a"
+
+    - title: "Enjoy your Galaxy Histories"
+      intro: "Thanks for taking this tour! Happy research with Galaxy!"
+
diff --git a/config/plugins/tours/core.scratchbook.yaml b/config/plugins/tours/core.scratchbook.yaml
new file mode 100644
index 0000000..f3662a3
--- /dev/null
+++ b/config/plugins/tours/core.scratchbook.yaml
@@ -0,0 +1,77 @@
+name: Scratchbook - Introduction
+title_default: "Scratchbook Introduction"
+description: "An introduction on how to display multiple datasets and visualizations next to each other."
+steps:
+    - content: "This short tour will walk you through <b>Galaxy's Scratchbook</b> feature"
+
+    - element: "#tool-panel-upload-button"
+      intro: "Before using the Scratchbook, we will upload some tabular data."
+      position: "right"
+      postclick:
+        - "#tool-panel-upload-button"
+
+    - element: "#btn-new"
+      intro: "We will be using the paste feature to create a new dataset."
+      position: "top"
+      postclick:
+        - "#btn-new"
+
+    - element: ".upload-text-content:first"
+      intro: "...and paste content into the text area field."
+      position: "top"
+      textinsert: |
+        1 0.039 0.000
+        2 0.455 0.000
+        3 0.472 0.000
+        4 0.030 0.000
+        5 0.000 0.000
+        6 0.000 0.000
+        7 0.000 0.000
+        8 0.000 0.000
+
+    - element: ".upload-settings:first"
+      intro: "Now, we may further configure the upload content."
+      position: "right"
+      postclick:
+        - ".upload-settings:first"
+
+    - element: ".upload-space_to_tab:first"
+      intro: "...by specifying that all spaces in our table should be converted into tabs."
+      position: "left"
+      postclick:
+        - ".upload-space_to_tab:first"
+
+    - element: "#btn-start"
+      intro: "Upload the data into your Galaxy history."
+      position: "top"
+      postclick:
+        - "#btn-start"
+
+    - element: "#btn-close"
+      intro: "Hit the close button to close the upload dialog."
+      position: "top"
+      postclick:
+        - "#btn-close"
+
+    - element: "#enable-scratchbook > .dropdown > .dropdown-toggle"
+      intro: "Clicking this button will enable Galaxy's Scratchbook mode."
+      position: "right"
+      postclick:
+        - "#enable-scratchbook > .dropdown > .dropdown-toggle"
+
+    - element: "#right"
+      intro: "This is your history. It contains all datasets you are currently working with including our uploaded table."
+      position: "left"
+
+    - element: "#current-history-panel .fa-eye:first"
+      intro: "Clicking the eye-icon usually displays a dataset in the center panel."
+      position: "left"
+      postclick:
+        - "#current-history-panel .fa-eye:first"
+
+    - element: "#frame-0"
+      intro: "However while in Scratchbook mode, the dataset will be shown as resizable window."
+      position: "right"
+
+    - title: "Done."
+      intro: "You have created a Scratchbook view. Click on the background and select more datasets."
diff --git a/config/plugins/visualizations/README.txt b/config/plugins/visualizations/README.txt
new file mode 100644
index 0000000..07ca615
--- /dev/null
+++ b/config/plugins/visualizations/README.txt
@@ -0,0 +1,30 @@
+Custom visualization plugins
+----------------------------
+
+Visualizations can be added to your Galaxy instance by creating
+sub-directories, templates, and static files here.
+
+Properly configured and written visualizations will be accessible to
+the user when they click the 'visualizations' icon for a dataset
+in their history panel.
+
+For more information, see http://wiki.galaxyproject.org/VisualizationsRegistry
+
+
+Sub-directory structure
+-----------------------
+
+In general, sub-directories should follow the pattern:
+
+    my_visualization/
+        config/
+            my_visualization.xml
+        static/
+            ... any static files the visualization needs (if any)
+        templates/
+            ... any Mako templates the visualization needs
+
+The XML config file for a visualization plugin can be validated on the command
+line using (from your plugin directory):
+
+    xmllint my_visualization/config/my_visualization.xml --valid --noout
diff --git a/config/plugins/visualizations/additional_template_paths.xml b/config/plugins/visualizations/additional_template_paths.xml
new file mode 100644
index 0000000..f921224
--- /dev/null
+++ b/config/plugins/visualizations/additional_template_paths.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- these relative paths can contain common templates importable by all visualization plugins -->
+<paths>
+    <path>common/templates</path>
+    <path>../interactive_environments/common/templates</path>
+</paths>
diff --git a/config/plugins/visualizations/charts/config/charts.xml b/config/plugins/visualizations/charts/config/charts.xml
new file mode 100644
index 0000000..caae65f
--- /dev/null
+++ b/config/plugins/visualizations/charts/config/charts.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Charts">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.CSV</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Text</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <entry_point entry_point_type="mako">charts.mako</entry_point>
+</visualization>
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/package.json b/config/plugins/visualizations/charts/package.json
new file mode 100644
index 0000000..a4d6b87
--- /dev/null
+++ b/config/plugins/visualizations/charts/package.json
@@ -0,0 +1,41 @@
+{
+  "name": "galaxy-charts-plugins",
+  "version": "0.1.0",
+  "description": "Charts visualization plugin.",
+  "keywords": [
+    "galaxy",
+    "visualization",
+    "d3"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/guerler/galaxy-charts"
+  },
+  "license": "AFL-3.0",
+  "dependencies": {
+    "amdi18n-loader": "^0.2.0",
+    "grunt": "^0.4.5",
+    "grunt-bower-install-simple": "^1.1.0",
+    "grunt-check-modules": "^1.0.0",
+    "grunt-cli": "^0.1.13",
+    "grunt-contrib-clean": "^0.6.0",
+    "grunt-contrib-copy": "^0.5.0",
+    "grunt-contrib-less": "^1.1.0",
+    "grunt-contrib-uglify": "^0.8.0",
+    "grunt-contrib-watch": "^0.6.1",
+    "grunt-exec": "^0.4.6",
+    "grunt-spritesmith": "^4.7.1",
+    "i18n-webpack-plugin": "^0.2.7",
+    "webpack": "^1.10.1",
+    "webpack-dev-server": "^1.7.0",
+    "backbone": "^1.3.3",
+    "bootstrap": "^3.3.7",
+    "cytoscape": "^2.7.10",
+    "jquery": "^3.1.1",
+    "cytoscape-edgehandles": "^2.6.0"
+  },
+  "devDependencies": {
+    "css-loader": "^0.24.0",
+    "style-loader": "^0.13.1"
+  }
+}
diff --git a/config/plugins/visualizations/charts/static/client/app.css b/config/plugins/visualizations/charts/static/client/app.css
new file mode 100644
index 0000000..6c61eda
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/app.css
@@ -0,0 +1,83 @@
+.charts-viewer {
+    overflow: auto;
+}
+
+.charts-viewer .ui-message {
+    position: absolute;
+    width: calc(100% - 20px);
+    z-index: 100;
+}
+
+.charts-description {
+    margin-bottom: 20px;
+}
+.charts-description-image-td {
+    vertical-align: top;
+}
+
+.charts-description-image {
+    width: 50px;
+    height: 43px;
+    margin-right: 10px;
+}
+
+.charts-description-title {
+    font-weight: bold;
+}
+
+.charts-description-text {
+    word-wrap: break-word;
+}
+
+.charts-tooltip {
+    position: absolute;
+    text-align: center;
+    padding: 3px 5px 3px 5px;
+    font: 12px sans-serif;
+    background: white;
+    border: 1px solid gray;
+    border-radius: 4px;
+    pointer-events: none;
+}
+
+.charts-tooltip-first {
+    font-weight: bold;
+    text-align: left;
+    padding-right: 5px;
+}  
+
+.charts-viewport {
+    height: inherit;
+    min-height: 50px;
+}
+
+.charts-viewport .info {
+	position: absolute;
+	margin: 40px 20px 50px 10px;
+}
+
+.charts-viewport .text {
+    position: relative;
+    margin-left: 5px;
+    top: -1px;
+    font-size: 1.0em;
+    display: inline;
+}
+
+.charts-viewport .icon {
+    font-size: 1.2em;
+    display: inline-block;
+}
+
+.charts-viewport .charts-viewport-container {
+    float: left;
+    display: block;
+    height: 100%;
+}
+
+.charts-viewport .charts-viewport-canvas {
+    display: block;
+    width: 100%;
+    min-height: 100px;
+    height: inherit;
+}
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/app.js b/config/plugins/visualizations/charts/static/client/app.js
new file mode 100644
index 0000000..5fc609d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/app.js
@@ -0,0 +1,68 @@
+/**
+ *  Main application class.
+ */
+define( [ 'mvc/ui/ui-modal', 'mvc/ui/ui-portlet', 'mvc/ui/ui-misc', 'utils/utils', 'plugin/components/model', 'utils/deferred', 'plugin/views/viewer', 'plugin/views/editor' ],
+    function( Modal, Portlet, Ui, Utils, Chart, Deferred, Viewer, Editor ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            require( [ 'repository/build/registry' ], function( Registry ) {
+                console.debug( 'app::initialize() - Loaded Registry:' );
+                console.debug( Registry );
+                Utils.get({
+                    url     : Galaxy.root + 'api/datasets/' + options.dataset_id,
+                    success : function( dataset ) {
+                        self.dataset = dataset;
+                        self.types = {};
+                        _.each( Registry, function( type, type_id ) {
+                            if ( !type.datatypes || type.datatypes.indexOf( dataset.file_ext ) != -1  ) {
+                                self.types[ type_id ] = type;
+                            }
+                        });
+                        if ( _.size( self.types ) === 0 ) {
+                            self.$el.append( $( '<div/>' ).addClass( 'errormessagelarge' )
+                                    .append( $( '<p/>' ).text( 'Unfortunately we could not identify a suitable plugin. Feel free to contact us if you are aware of visualizations for this datatype.' )  ) );
+                        } else {
+                            self._build( options );
+                        }
+                    }
+                });
+            }, function( err ) {
+                self.$el.append( $( '<div/>' ).addClass( 'errormessagelarge' )
+                        .append( $( '<p/>' ).text( 'Unable to access the plugin repository:' ) )
+                        .append( $( '<pre/>' ).text( 'charts_repository_url = ' + repository_root ) )
+                        .append( $( '<p/>' ).html( 'Please verify that your internet connection works properly and that the above url is correct. Contact your admin if this error persists.' ) ) );
+            });
+        },
+
+        _build: function( options ) {
+            this.options    = options;
+            this.modal      = parent.Galaxy && parent.Galaxy.modal || new Modal.View();
+            this.chart      = new Chart( {}, options );
+            this.deferred   = new Deferred();
+            this.viewer     = new Viewer( this );
+            this.editor     = new Editor( this );
+            this.$el.append( this.viewer.$el );
+            this.$el.append( this.editor.$el );
+            this.go( this.chart.load() ? 'viewer' : 'editor' );
+        },
+
+        /** Loads a view and makes sure that all others are hidden */
+        go: function( view_id ) {
+            $( '.tooltip' ).hide();
+            this.viewer.hide();
+            this.editor.hide();
+            this[ view_id ].show();
+        },
+
+        /** Split chart type into path components */
+        split: function( chart_type ) {
+            var path = chart_type.split( /_(.+)/ );
+            if ( path.length >= 2 ) {
+                return path[ 0 ] + '/' + path[ 1 ];
+            } else {
+                return chart_type;
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/components/model.js b/config/plugins/visualizations/charts/static/client/components/model.js
new file mode 100644
index 0000000..8f12033
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/components/model.js
@@ -0,0 +1,98 @@
+define( [ 'utils/utils', 'mvc/visualization/visualization-model' ], function( Utils ) {
+    return Backbone.Model.extend({
+        defaults : {
+            title           : '',
+            type            : '',
+            date            : null,
+            state           : '',
+            state_info      : '',
+            modified        : false,
+            dataset_id      : '',
+            dataset_id_job  : ''
+        },
+
+        initialize: function( options, viz_options ) {
+            this.groups         = new Backbone.Collection();
+            this.settings       = new Backbone.Model();
+            this.definition     = {};
+            this.viz_options    = viz_options;
+            console.debug( 'model::initialize() - Initialized with configuration:' );
+            console.debug( viz_options );
+        },
+
+        reset: function() {
+            this.clear().set({
+                title       : 'New Chart',
+                type        : '__first',
+                dataset_id  : this.viz_options.dataset_id
+            });
+            this.settings.clear();
+            this.groups.reset();
+            this.groups.add( { id : Utils.uid() } );
+        },
+
+        state: function( value, info ) {
+            this.set( { state : value, state_info : info } );
+            this.trigger( 'set:state' );
+            console.debug( 'model::state() - ' + info + ' (' + value + ')' );
+        },
+
+        /** Pack and save nested chart model */
+        save: function( options ) {
+            var self = this;
+            options = options || {};
+            var chart_dict = {
+                attributes : this.attributes,
+                settings   : this.settings.attributes,
+                groups     : []
+            };
+            this.groups.each( function( group ) {
+                chart_dict.groups.push( group.attributes );
+            });
+            var viz = new Visualization({
+                id      : this.viz_options.visualization_id || undefined,
+                type    : 'charts',
+                title   : this.get( 'title' ) || '',
+                config  : {
+                    dataset_id  : this.viz_options.dataset_id,
+                    chart_dict  : this.viz_options.chart_dict = chart_dict
+                }
+            });
+            viz.save().then( function( response ) {
+                if ( response && response.id ) {
+                    self.viz_options.visualization_id = response.id;
+                    options.success && options.success();
+                    console.debug( 'model::save() - Received visualization id: ' + response.id );
+                } else {
+                    options.error && options.error();
+                    console.debug( 'model::save() - Unrecognized response. Saving may have failed.' );
+                }
+            }).fail( function( response ) {
+                options.error && options.error();
+                console.debug( 'model::save() - Saving failed.' );
+            });
+            console.debug( 'model::save() - Saved with configuration:' );
+            console.debug( this.viz_options );
+        },
+
+        /** Load nested models/collections from packed dictionary */
+        load: function() {
+            console.debug( 'model::load() - Attempting to load with configuration:' );
+            console.debug( this.viz_options );
+            var chart_dict = this.viz_options.chart_dict;
+            if ( chart_dict && chart_dict.attributes ) {
+                this.set( chart_dict.attributes );
+                this.state( 'ok', 'Loading saved visualization...' );
+                this.settings.set( chart_dict.settings );
+                this.groups.reset();
+                this.groups.add( chart_dict.groups );
+                this.set( 'modified', false );
+                console.debug( 'model::load() - Loading chart model ' + chart_dict.attributes.type + '.' );
+                this.trigger( 'redraw' );
+                return true;
+            }
+            console.debug( 'model::load() - Visualization attributes unavailable.' );
+            return false;
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/components/screenshot.js b/config/plugins/visualizations/charts/static/client/components/screenshot.js
new file mode 100644
index 0000000..dc4414c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/components/screenshot.js
@@ -0,0 +1,169 @@
+/** This class enables users to export/download a chart as PNG, SVG or PDF. */
+define( [ 'libs/underscore' ], function( _ ) {
+    /** PNG export */
+    function createPNG (options) {
+        if ( options.$el.find( 'svg' ).length > 0 ) {
+            _svg2png( options );
+        } else {
+            _canvas2png( options.$el.find( '.charts-viewport-canvas' ) );
+        }
+    };
+
+    /** Convert canvas to png */
+    function _canvas2png( $canvas ) {
+        try {
+            if ( $canvas.width() !== 0 && $canvas.height() !== 0) {
+                var newCanvas = document.createElement( 'canvas' );
+                newCanvas.width = $canvas.outerWidth( true );
+                newCanvas.height = $canvas.outerHeight( true );
+                var newContext = newCanvas.getContext( '2d' );
+                newContext.save();
+                newContext.fillStyle = 'rgb(255,255,255)';
+                newContext.fillRect( 0,0, newCanvas.width, newCanvas.height );
+                newContext.restore();
+                newContext.translate( 0, 0 );
+                newContext.textAlign = 'left';
+                newContext.textBaseline = 'top';
+                function _toImage( $el, x_offset, y_offset ) {
+                    var tagname = $el.prop( 'tagName' ).toLowerCase();
+                    var p = $el.position();
+                    var left = x_offset + p.left + parseInt( $el.css( 'marginLeft' ), 10 ) + parseInt( $el.css( 'borderLeftWidth' ), 10 ) + parseInt( $el.css( 'paddingLeft' ), 10 );
+                    var top = y_offset + p.top + parseInt( $el.css( 'marginTop' ), 10 ) + parseInt( $el.css( 'borderTopWidth' ), 10 ) + parseInt( $el.css( 'paddingTop' ), 10 );
+                    var w = newCanvas.width;
+                    if ( tagname == 'div' || tagname == 'span' ) {
+                        $el.children().each( function() { _toImage( $( this ), left, top ) } );
+                    } else if ( tagname == 'canvas' ) {
+                        newContext.drawImage( $el[ 0 ], left, top );
+                    }
+                }
+                $canvas.children().each( function() { _toImage( $( this ), 0, 0 ) } );
+                imgData = newCanvas.toDataURL( 'image/png' );
+                if ( imgData ) {
+                    window.location.href = imgData.replace( 'image/png', 'image/octet-stream' );
+                }
+            }
+        } catch ( err ) {
+            console.debug( 'FAILED - screenshot::_canvas2png() - ' + err );
+            if (options.error) {
+                options.error( 'Please reduce your visualization to a single panel and try again.' );
+            }
+        }
+    };
+
+    /** Convert svg to png */
+    function _svg2png ( options ) {
+        var scale = 5;
+        var xml = toXML( options );
+        var canvas = document.createElement( 'canvas' );
+        var context = canvas.getContext( '2d' );
+        var source = new Image();
+        var $container = $( '<div style="display:none;"/>' ).append( $ ( canvas ) );
+        $( 'body' ).append( $container );
+        canvas.width = xml.width * scale;
+        canvas.height = xml.height * scale;
+        source.src = 'data:image/svg+xml;base64,' + btoa( xml.string );
+        source.onload = function() {
+            context.drawImage( source, 0, 0, canvas.width, canvas.height );
+            window.location.href = canvas.toDataURL( 'image/png' ).replace( 'image/png', 'image/octet-stream' );
+            $container.remove();
+        }
+    };
+
+    /** SVG export */
+    function createSVG (options) {
+        window.location.href = 'data:none/none;base64,' + btoa(toXML(options).string);
+    };
+
+    /** PDF export */
+    function createPDF( options ) {
+        var xml = toXML( options );
+        var data = {
+            filename    : 'visualization',
+            type        : 'application/pdf',
+            height      : xml.height,
+            width       : xml.width,
+            scale       : 2,
+            svg         : xml.string
+        };
+        var $el = $( 'body' );
+        var form = $el.find( '#viewport-form' );
+        if ( form.length === 0 ) {
+            form = $( '<form>', {
+                id      : 'viewport-form',
+                method  : 'post',
+                action  : 'http://export.highcharts.com/',
+                display : 'none'
+            });
+            $el.append( form );
+        }
+        form.empty();
+        for ( name in data ) {
+            var input = $( '<input/>', {
+                type    : 'hidden',
+                name    : name,
+                value   : data[ name ]
+            });
+            form.append( input );
+        }
+        try {
+            form.submit();
+        } catch( err ) {
+            console.log( err );
+        }
+    };
+
+    /** XML export */
+    function toXML( options ) {
+        var $svg = options.$el.find( 'svg' );
+        if ( $svg.length == 0 ) {
+            if ( options.error ) {
+                options.error( 'No SVG found. This visualization type does not support SVG/PDF export.' );
+                return;
+            }
+        }
+        var $el = options.$el;
+        var nsvgs  = $svg.length;
+        var height = parseInt( $svg.first().css( 'height' ) );
+        var width  = parseInt( $svg.first().css( 'width' ) );
+        var serializer = new XMLSerializer();
+        var $composite = $( '<svg/>' ).attr( { version: '1.1', xmlns: 'http://www.w3.org/2000/svg', width: width * nsvgs, height: height } );
+        var xmlString = '';
+        var offsetX = 0;
+        $svg.each(function() {
+            var $svg = $( this ).clone();
+            _inline( $svg );
+            var $g = $( '<g transform="translate(' + offsetX + ', 0)">' ).attr( 'xmlns', 'http://www.w3.org/2000/svg' );
+            $g.append( $svg.find( 'g' ).first() );
+            $composite.append( $g );
+            offsetX += width;
+        });
+        return {
+            string  : serializer.serializeToString( $composite[ 0 ] ),
+            height  : height,
+            width   : width
+        }
+    };
+
+    /** inlines CSS code */
+    function _inline( $target ) {
+        for ( var sheet_id in document.styleSheets ) {
+            var sheet = document.styleSheets[ sheet_id ];
+            var rules = sheet.cssRules;
+            if ( rules ) {
+                for ( var idx = 0, len = rules.length; idx < len; idx++ ) {
+                    try {
+                        $target.find( rules[ idx ].selectorText ).each( function ( i, elem ) {
+                            elem.style.cssText += rules[idx].style.cssText;
+                        });
+                    } catch( err ) {}
+                }
+            }
+        }
+    };
+
+    return {
+        createPNG: createPNG,
+        createSVG: createSVG,
+        createPDF: createPDF
+    };
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/views/description.js b/config/plugins/visualizations/charts/static/client/views/description.js
new file mode 100644
index 0000000..4164b62
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/views/description.js
@@ -0,0 +1,41 @@
+/** This class renders the chart configuration form. */
+define( [ 'utils/utils' ], function( Utils ) {
+    return Backbone.View.extend({
+        initialize: function( app, options ) {
+            var self = this;
+            this.chart = app.chart;
+            this.app = app;
+            this.setElement( this._template() );
+            this.$title = this.$( '.charts-description-title' );
+            this.$image = this.$( '.charts-description-image' );
+            this.$text  = this.$( '.charts-description-text' );
+            this.listenTo( this.chart, 'change', function() { self.render() } );
+            this.render();
+        },
+        render: function() {
+            if ( this.chart.get( 'type' ) ) {
+                this.$image.attr( 'src', repository_root + '/visualizations/' + this.app.split( this.chart.get( 'type' ) ) + '/logo.png' );
+                this.$title.html( this.chart.definition.title + ' (' + this.chart.definition.library + ')' );
+                this.$text.html( Utils.linkify( this.chart.definition.description || '' ) );
+                this.$el.show();
+            } else {
+                this.$el.hide();
+            }
+        },
+        _template: function() {
+            return  '<div class="charts-description">' +
+                        '<table>' +
+                            '<tr>' +
+                                '<td class="charts-description-image-td">' +
+                                    '<img class="charts-description-image"/>' +
+                                '</td>' +
+                                '<td>' +
+                                    '<div class="charts-description-title ui-form-info"/>' +
+                                    '<div class="charts-description-text ui-form-info"/>' +
+                                '</td>' +
+                            '</tr>' +
+                        '</table>' +
+                    '</div>';
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/views/editor.js b/config/plugins/visualizations/charts/static/client/views/editor.js
new file mode 100644
index 0000000..c7c4d8d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/views/editor.js
@@ -0,0 +1,172 @@
+/**
+ *  The charts editor holds the tabs for selecting chart types, chart configuration
+ *  and data group selections.
+ */
+define( [ 'mvc/ui/ui-tabs', 'mvc/ui/ui-misc', 'mvc/ui/ui-portlet', 'mvc/ui/ui-thumbnails', 'utils/utils', 'plugin/views/settings', 'plugin/views/groups' ],
+    function( Tabs, Ui, Portlet, Thumbnails, Utils, SettingsView, GroupsView ) {
+    return Backbone.View.extend({
+        initialize: function( app, options ){
+            var self = this;
+            this.app = app;
+            this.chart = this.app.chart;
+            this.message = new Ui.Message( { cls: 'ui-margin-bottom' } );
+            this.portlet = new Portlet.View({
+                icon : 'fa-bar-chart-o',
+                title: 'Editor',
+                operations      : {
+                    'draw'  : new Ui.ButtonIcon({
+                        icon    : 'fa-line-chart',
+                        tooltip : 'Render Visualization',
+                        title   : 'Visualize',
+                        onclick : function() {
+                            self._drawChart();
+                        }
+                    }),
+                    'back'  : new Ui.ButtonIcon({
+                        icon    : 'fa-caret-left',
+                        tooltip : 'Return to Viewer',
+                        title   : 'Cancel',
+                        onclick : function() {
+                            self.app.go( 'viewer' );
+                            self.chart.load();
+                        }
+                    })
+                }
+            });
+
+            // grid with chart types
+            this.types = new Thumbnails.View({
+                title_default   : 'Suggested visualizations',
+                title_list      : 'List of available visualizations',
+                collection      : _.map( this.app.types, function( type, type_id ) {
+                    return {
+                        id          : type_id,
+                        keywords    : type.keywords,
+                        title       : type.title + ' (' + type.library + ')',
+                        title_icon  : type.zoomable && 'fa-search-plus',
+                        image_src   : repository_root + '/visualizations/' + self.app.split( type_id ) + '/logo.png',
+                        description : type.description
+                    }
+                }),
+                ondblclick      : function( chart_type ) { self._drawChart() },
+                onchange        : function( chart_type ) {
+                    var chart_definition = self.app.types[ chart_type ];
+                    if ( !chart_definition ) {
+                        self.tabs.hideTab( 'settings' );
+                        self.tabs.hideTab( 'groups' );
+                        self.portlet.hideOperation( 'draw' );
+                        console.debug( 'editor::onchange() - Chart type not found.' );
+                        self.message.update( { message: 'The requested visualization type could not be found. Please select a new type from below or contact us.', status: 'danger', persistent: true } );
+                    } else {
+                        self.tabs.showTab( 'settings' );
+                        self.tabs.showTab( 'groups' );
+                        self.portlet.showOperation( 'draw' );
+                        self.chart.definition = chart_definition;
+                        self.chart.set( { type : chart_type, modified : true } );
+                        self.message.model.set( 'message', '' );
+                        console.debug( 'editor::onchange() - Switched visualization type.' );
+                    }
+                }
+            });
+
+            // input field for chart title
+            this.title = new Ui.Input({
+                placeholder: 'Chart title',
+                onchange: function() {
+                    self.chart.set( 'title', self.title.value() );
+                }
+            });
+
+            // create tabs
+            this.tabs = new Tabs.View( {} );
+            this.tabs.add({
+                id      : 'main',
+                title   : 'Start',
+                icon    : 'fa fa-bars',
+                tooltip : 'Start by selecting a visualization.',
+                $el     : $( '<div/>' ).append( ( new Ui.Label( { title : 'Provide a title:' } ).$el ) )
+                                       .append( this.title.$el )
+                                       .append( $( '<div/>' ).addClass( 'ui-form-info ui-margin-bottom' ).html( 'This title will appear in the list of \'Saved Visualizations\'.' ) )
+                                       .append( ( new Ui.Label( { title : 'Select a visualization:' } ).$el.addClass( 'ui-margin-top' ) ) )
+                                       .append( this.types.$el )
+            });
+            this.tabs.add({
+                id      : 'settings',
+                title   : 'Customize',
+                icon    : 'fa-gear',
+                tooltip : 'Customize the visualization.',
+                $el     : ( new SettingsView( this.app ) ).$el
+            });
+            this.tabs.add({
+                id      : 'groups',
+                title   : 'Select data',
+                icon    : 'fa-database',
+                tooltip : 'Specify data options.',
+                $el     : ( new GroupsView( this.app ) ).$el
+            });
+
+            // set elements
+            this.portlet.append( this.message.$el );
+            this.portlet.append( this.tabs.$el.addClass( 'ui-margin-top-large' ) );
+            this.portlet.hideOperation( 'back' );
+            this.setElement( this.portlet.$el );
+
+            // chart events
+            this.listenTo( this.chart, 'change:title', function( chart ) { self._refreshTitle() } );
+            this.listenTo( this.chart, 'change:type', function( chart ) { self.types.value( chart.get( 'type' ) ) } );
+            this.listenTo( this.chart, 'redraw', function( chart ) { self.portlet.showOperation( 'back' ) } );
+            this.chart.reset();
+        },
+
+        /** Show editor */
+        show: function() {
+            this.$el.show();
+        },
+
+        /** Hide editor */
+        hide: function() {
+            this.$el.hide();
+        },
+
+        /** Refresh title handler */
+        _refreshTitle: function() {
+            var title = this.chart.get( 'title' );
+            this.portlet.title( title );
+            this.title.value( title );
+        },
+
+        /** Draw chart data */
+        _drawChart: function() {
+            var self = this;
+            this.chart.set({
+                type        : this.types.value(),
+                title       : this.title.value(),
+                date        : Utils.time()
+            });
+            if ( this.chart.groups.length == 0 ) {
+                this.message.update( { message: 'Please specify data options before rendering the visualization.', persistent: false } );
+                this.tabs.show( 'groups' );
+                return;
+            }
+            var valid = true;
+            var chart_def = this.chart.definition;
+            this.chart.groups.each( function( group ) {
+                if ( valid ) {
+                    _.each( group.get( '__data_columns' ), function( data_columns, name ) {
+                        if ( group.attributes[ name ] === null ) {
+                            self.message.update( { status: 'danger', message: 'This visualization type requires column types not found in your tabular file.', persistent: false } );
+                            self.tabs.show( 'groups' );
+                            valid = false;
+                        }
+                    });
+                }
+            });
+            if ( valid ) {
+                this.app.go( 'viewer' );
+                this.app.deferred.execute( function() {
+                    self.chart.trigger( 'redraw' );
+                });
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/views/groups.js b/config/plugins/visualizations/charts/static/client/views/groups.js
new file mode 100644
index 0000000..85385d7
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/views/groups.js
@@ -0,0 +1,105 @@
+/** This class renders the chart data selection form with repeats. */
+define( [ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/form/form-view', 'mvc/form/form-repeat', 'mvc/form/form-data', 'plugin/views/description' ],
+    function( Utils, Ui, Form, Repeat, FormData, Description ) {
+    var GroupView = Backbone.View.extend({
+        initialize: function( app, options ) {
+            var self = this;
+            this.app    = app;
+            this.chart  = app.chart;
+            this.group  = options.group;
+            this.setElement( $( '<div/>' ) );
+            this.listenTo( this.chart, 'change:dataset_id change:type', function() { self.render() } );
+            this.render();
+        },
+
+        render: function() {
+            var self = this;
+            var inputs = this.chart.definition.groups ? Utils.clone( this.chart.definition.groups ) : {};
+            var dataset_id = this.chart.get( 'dataset_id' );
+            var chart_type = this.chart.get( 'type' );
+            var chart_definition = this.chart.definition;
+            if ( dataset_id && chart_type ) {
+                this.chart.state( 'wait', 'Loading metadata...' );
+                this.app.deferred.execute( function( process ) {
+                    Utils.get({
+                        url     : Galaxy.root + 'api/datasets/' + dataset_id,
+                        cache   : true,
+                        success : function( dataset ) {
+                            var data_columns = {};
+                            FormData.visitInputs( inputs, function( input, prefixed ) {
+                                if ( input.type == 'data_column' ) {
+                                    data_columns[ prefixed ] = Utils.clone( input );
+                                    var columns = [];
+                                    input.is_auto && columns.push( { 'label': 'Column: Row Number', 'value': 'auto' } );
+                                    input.is_zero && columns.push( { 'label' : 'Column: None', 'value' : 'zero' } );
+                                    var meta = dataset.metadata_column_types;
+                                    for ( var key in meta ) {
+                                        var valid = ( [ 'int', 'float' ].indexOf( meta[ key ] ) != -1 && input.is_numeric ) || input.is_label;
+                                        valid && columns.push( { 'label' : 'Column: ' + ( parseInt( key ) + 1 ), 'value' : key } );
+                                    }
+                                    input.data = columns;
+                                }
+                                var model_value = self.group.get( prefixed );
+                                model_value !== undefined && !input.hidden && ( input.value = model_value );
+                            });
+                            inputs[ '__data_columns' ] = { name: '__data_columns', type: 'hidden', hidden: true, value: data_columns };
+                            self.chart.state( 'ok', 'Metadata initialized...' );
+                            self.form = new Form( {
+                                inputs  : inputs,
+                                cls     : 'ui-portlet-plain',
+                                onchange: function() {
+                                    self.group.set( self.form.data.create() );
+                                    self.chart.set( 'modified', true );
+                                }
+                            } );
+                            self.group.set( self.form.data.create() );
+                            self.$el.empty().append( self.form.$el );
+                            process.resolve();
+                        }
+                    });
+                });
+            }
+        }
+    });
+
+    return Backbone.View.extend({
+        initialize: function( app ) {
+            var self    = this;
+            this.app    = app;
+            this.chart  = app.chart;
+            this.repeat = new Repeat.View({
+                title       : 'Data series',
+                title_new   : 'Data series',
+                min         : 1,
+                onnew       : function() { self.chart.groups.add( { id : Utils.uid() } ) }
+            });
+            this.description = new Description( this.app );
+            this.message = new Ui.Message( { message : 'There are no options for this visualization type.', persistent : true, status : 'info' } );
+            this.setElement( $( '<div/>' ).append( this.description.$el )
+                                          .append( this.repeat.$el.addClass( 'ui-margin-bottom' ) )
+                                          .append( this.message.$el.addClass( 'ui-margin-bottom' ) ) );
+            this.listenTo( this.chart, 'change', function() { self.render() } );
+            this.listenTo( this.chart.groups, 'add remove reset', function() { self.chart.set( 'modified', true ) } );
+            this.listenTo( this.chart.groups, 'remove', function( group ) { self.repeat.del( group.id ) } );
+            this.listenTo( this.chart.groups, 'reset', function() { self.repeat.delAll() } );
+            this.listenTo( this.chart.groups, 'add', function( group ) {
+                self.repeat.add({
+                     id      : group.id,
+                     cls     : 'ui-portlet-panel',
+                     $el     : ( new GroupView( self.app, { group: group } ) ).$el,
+                     ondel   : function() { self.chart.groups.remove( group ) }
+                });
+            });
+        },
+
+        render: function() {
+            if ( _.size( this.chart.definition.groups ) > 0 ) {
+                this.repeat.$el.show();
+                this.message.$el.hide();
+            } else {
+                this.repeat.$el.hide();
+                this.message.$el.show();
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/views/settings.js b/config/plugins/visualizations/charts/static/client/views/settings.js
new file mode 100644
index 0000000..8adf447
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/views/settings.js
@@ -0,0 +1,47 @@
+/** This class renders the chart configuration form. */
+define( [ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/form/form-view', 'mvc/form/form-data', 'plugin/views/description' ], function( Utils, Ui, Form, FormData, Description ) {
+    return Backbone.View.extend({
+        initialize: function( app, options ) {
+            var self = this;
+            this.app = app;
+            this.chart = app.chart;
+            this.description = new Description( this.app );
+            this.message = new Ui.Message( { message: 'There are no options for this visualization type.', persistent: true, status: 'info' } );
+            this.setElement( $( '<div/>' ).append( this.description.$el )
+                                          .append( this.message.$el.addClass( 'ui-margin-bottom' ) )
+                                          .append( this.$form = $( '<div/>' ).addClass( 'ui-margin-bottom' ) ) );
+            this.listenTo( this.chart, 'change', function() { self.render() } );
+        },
+        render: function() {
+            var self = this;
+            var inputs = Utils.clone( this.chart.definition.settings ) || {};
+            var panel_option = this.chart.definition.use_panels;
+            if ( panel_option == 'both' ) {
+                inputs[ '__use_panels' ] = {
+                    type    : 'boolean',
+                    label   : 'Use multi-panels',
+                    help    : 'Would you like to separate your data into individual panels?'
+                };
+            } else {
+                this.chart.settings.set( '__use_panels', panel_option == 'yes' ? 'true' : 'false' );
+            }
+            if ( _.size( inputs ) > 0 ) {
+                FormData.visitInputs( inputs, function( input, name ) {
+                    var model_value = self.chart.settings.get( name );
+                    model_value !== undefined && !input.hidden && ( input.value = model_value );
+                });
+                this.form = new Form({
+                    inputs   : inputs,
+                    cls      : 'ui-portlet-plain',
+                    onchange : function() { self.chart.settings.set( self.form.data.create() ); }
+                });
+                this.chart.settings.set( this.form.data.create() );
+                this.$form.empty().append( this.form.$el );
+                this.message.$el.hide();
+            } else {
+                this.$form.empty();
+                this.message.$el.show();
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/views/viewer.js b/config/plugins/visualizations/charts/static/client/views/viewer.js
new file mode 100644
index 0000000..fb6eb13
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/views/viewer.js
@@ -0,0 +1,137 @@
+/** This class renders the chart viewer which encapsulates the chart viewport. */
+define( [ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-portlet', 'plugin/views/viewport', 'plugin/components/screenshot' ],
+    function( Utils, Ui, Portlet, Viewport, Screenshot ) {
+    return Backbone.View.extend({
+        initialize: function(app, options){
+            var self = this;
+            this.app = app;
+            this.chart = this.app.chart;
+            this.viewport = new Viewport( app );
+            this.message = new Ui.Message();
+            this.export_button = new Ui.ButtonMenu({
+                icon    : 'fa-camera',
+                title   : 'Export',
+                tooltip : 'Export/Download Visualization'
+            });
+            this.export_button.addMenu({
+                key         : 'png',
+                title       : 'Save as PNG',
+                icon        : 'fa-file',
+                onclick     : function() {
+                    self._wait( self.chart, function() {
+                        Screenshot.createPNG({
+                            $el     : self.viewport.$el,
+                            title   : self.chart.get( 'title' ),
+                            error   : function( err ) {
+                                self.message.update( { message: err, status: 'danger' } );
+                            }
+                        });
+                    });
+                }
+            });
+            this.export_button.addMenu({
+                key         : 'svg',
+                title       : 'Save as SVG',
+                icon        : 'fa-file-text-o',
+                onclick     : function() {
+                    self._wait( self.chart, function() {
+                        Screenshot.createSVG({
+                            $el     : self.viewport.$el,
+                            title   : self.chart.get( 'title' ),
+                            error   : function( err ) {
+                                self.message.update( { message: err, status: 'danger' } );
+                            }
+                        });
+                    });
+                }
+            });
+            this.export_button.addMenu({
+                key         : 'pdf',
+                title       : 'Save as PDF',
+                icon        : 'fa-file-o',
+                onclick     : function() {
+                    self.app.modal.show({
+                        title   : 'Send visualization data for PDF creation',
+                        body    : 'Galaxy does not provide integrated PDF export scripts. You may click \'Continue\' to create the PDF by using a 3rd party service (https://export.highcharts.com).',
+                        buttons : {
+                            'Cancel' : function() { self.app.modal.hide() },
+                            'Continue' : function() {
+                                self.app.modal.hide();
+                                self._wait( self.chart, function() {
+                                    Screenshot.createPDF({
+                                        $el     : self.viewport.$el,
+                                        title   : self.chart.get( 'title' ),
+                                        error   : function( err ) {
+                                            self.message.update( { message: err, status: 'danger' } );
+                                        }
+                                    });
+                                });
+                            }
+                        }
+                    });
+                }
+            });
+            this.portlet = new Portlet.View({
+                icon : 'fa-bar-chart-o',
+                title: 'Viewport',
+                cls  : 'ui-portlet charts-viewer',
+                operations: {
+                    edit_button: new Ui.ButtonIcon({
+                        icon    : 'fa-edit',
+                        tooltip : 'Customize this Visualization',
+                        title   : 'Editor',
+                        onclick : function() {
+                            self._wait( self.chart, function() {
+                                self.app.go( 'editor' );
+                            });
+                        }
+                    }),
+                    export_button: this.export_button,
+                    save_button: new Ui.ButtonIcon({
+                        icon    : 'fa-save',
+                        tooltip : 'Save this Visualization',
+                        title   : 'Save',
+                        onclick : function() {
+                            self.message.update( { message: 'Saving \'' + self.chart.get( 'title' ) + '\'. It will appear in the list of \'Saved Visualizations\'.', status: 'success' } );
+                            self.chart.save( { error : function() { self.message.update( { message: 'Could not save visualization.', status: 'danger' } ) } } );
+                        }
+                    })
+                }
+            });
+            this.portlet.append( this.message.$el );
+            this.portlet.append( this.viewport.$el.addClass( 'ui-margin-top' ) );
+            this.setElement( this.portlet.$el );
+            this.listenTo( this.chart, 'change', function() { self.render() } );
+        },
+
+        /** Show and refresh viewer */
+        show: function() {
+            this.$el.show();
+            $( window ).trigger( 'resize' );
+        },
+
+        /** Hide viewer */
+        hide: function() {
+            this.$el.hide();
+        },
+
+        /** Change title */
+        render: function() {
+            var title = this.chart.get( 'title' );
+            this.portlet.title( title );
+            var exports = this.chart.definition && this.chart.definition.exports || [];
+            this.export_button.collection.each( function( model ) {
+                model.set( 'visible', exports.indexOf( model.get( 'key' ) ) !== -1 );
+            });
+        },
+
+        /** Check if chart is ready for export */
+        _wait: function( chart, callback ) {
+            if ( this.app.deferred.ready() ) {
+                callback();
+            } else {
+                this.message.update( { message: 'Your visualization is currently being processed. Please wait and try again.' } );
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/client/views/viewport.js b/config/plugins/visualizations/charts/static/client/views/viewport.js
new file mode 100644
index 0000000..47359c2
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/client/views/viewport.js
@@ -0,0 +1,100 @@
+/**
+ *  The viewport creates and manages the dom elements used by the visualization plugins to draw the chart.
+ *  This is the last class of the charts core classes before handing control over to the visualization plugins.
+ */
+define( [ 'mvc/ui/ui-portlet', 'mvc/ui/ui-misc', 'utils/utils' ], function( Portlet, Ui, Utils ) {
+    return Backbone.View.extend({
+        initialize: function( app, options ) {
+            var self = this;
+            this.app = app;
+            this.chart = this.app.chart;
+            this.options = options;
+            this.setElement( $( '<div/>' ).addClass( 'charts-viewport' )
+                                          .append( $( '<div/>' ).addClass( 'info' )
+                                                                .append( $( '<span/>' ).addClass( 'icon' ) )
+                                                                .append( $( '<span/>' ).addClass( 'text' ) ) ) );
+            this.$info = this.$( '.info' );
+            this.$icon = this.$( '.icon' );
+            this.$text = this.$( '.text' );
+            this._fullscreen( this.$el, 55 );
+            this._createContainer( 'div' );
+            this.chart.on( 'redraw', function() {
+                self.app.deferred.execute( function( process ) {
+                    self._draw( process, self.chart );
+                });
+            });
+            this.chart.on( 'set:state', function() {
+                var $container = self.$( '.charts-viewport-container' );
+                var $info = self.$info;
+                var $icon = self.$icon;
+                var $text = self.$text;
+                $icon.removeClass();
+                $info.show();
+                $text.html( self.chart.get( 'state_info' ) );
+                var state = self.chart.get( 'state' );
+                switch ( state ) {
+                    case 'ok':
+                        $info.hide();
+                        $container.show();
+                        break;
+                    case 'failed':
+                        $icon.addClass( 'icon fa fa-warning' );
+                        $container.hide();
+                        break;
+                    default:
+                        $icon.addClass( 'icon fa fa-spinner fa-spin' );
+                        $container.show();
+                }
+            });
+        },
+
+        /** Show rendered chart */
+        show: function() {
+            this.$el.show();
+        },
+
+        /** Hide chart */
+        hide: function() {
+            this.$el.hide();
+        },
+
+        /** Force resize to fullscreen */
+        _fullscreen: function( $el, margin ) {
+            $el.css( 'height', $( window ).height() - margin );
+            $( window ).resize( function() {
+                $el.css( 'height', $( window ).height()  - margin );
+            });
+        },
+
+        /** A chart may contain multiple sub charts/containers which are created here */
+        _createContainer: function( tag, n ) {
+            tag = tag || 'div';
+            n = n || 1;
+            this.$( '.charts-viewport-container' ).remove();
+            this.targets = [];
+            for ( var i = 0; i < n; i++ ) {
+                var container_id = Utils.uid();
+                var container_el = $( '<div/>' ).addClass( 'charts-viewport-container' )
+                                                .width( parseInt( 100 / n ) + '%' )
+                                                .append( $( '<' + tag + ' class="charts-viewport-canvas" />' ).attr( 'id', container_id ) );
+                this.$el.append( container_el );
+                this.targets.push( container_id );
+            }
+        },
+
+        /** Draws a new chart by loading and executing the corresponding chart wrapper */
+        _draw: function( process, chart ) {
+            var self = this;
+            var n_panels = chart.settings.get( '__use_panels' ) == 'true' ? chart.groups.length : 1;
+            this._createContainer( chart.definition.tag, n_panels );
+            chart.state( 'wait', 'Please wait...' );
+            require( [ 'repository/build/' + chart.get( 'type' ) ], function( ChartView ) {
+                new ChartView( { process: process, chart: chart, dataset: self.app.dataset, targets: self.targets } );
+            }, function( err ) {
+                chart.state( 'failed', 'Please verify that your internet connection works properly. This visualization could not be accessed in the repository. Please contact the Galaxy Team if this error persists.' );
+                console.debug( err );
+                process.resolve();
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/benfred_venn.js b/config/plugins/visualizations/charts/static/repository/build/benfred_venn.js
new file mode 100644
index 0000000..d183272
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/benfred_venn.js
@@ -0,0 +1,2 @@
+define(function(){return function(t){function e(n){if(r[n])return r[n].exports;var a=r[n]={exports:{},id:n,loaded:!1};return t[n].call(a.exports,a,a.exports,e),a.loaded=!0,a.exports}var r={};return e.m=t,e.c=r,e.p="",e(0)}({0:function(t,e,r){var n,a;n=[r(5),r(160),r(159)],a=function(t,e){return Backbone.View.extend({_combinations:function(t,e,r){var n=this;_.each(e,function(a,i){var s=t.slice(),o=e.slice();o.splice(0,i+1),s.push(a),r.push(s),n._combinations(s,o,r)})},initialize:function( [...]
+//# sourceMappingURL=benfred_venn.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/benfred_venn.js.map b/config/plugins/visualizations/charts/static/repository/build/benfred_venn.js.map
new file mode 100644
index 0000000..f742b34
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/benfred_venn.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///benfred_venn.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*******************","webpack:///./static/repository/visualizations/benfred/venn/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*****************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*****************","webpack:///./~/css-loader/lib/css-base.js?da04*****************","webpack:///./~/style-loader/addStyles.js?b980***** [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/biojs_drawrnajs.js b/config/plugins/visualizations/charts/static/repository/build/biojs_drawrnajs.js
new file mode 100644
index 0000000..ce6251d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/biojs_drawrnajs.js
@@ -0,0 +1,91 @@
+define(function(){return function(e){function t(r){if(n[r])return n[r].exports;var i=n[r]={exports:{},id:r,loaded:!1};return e[r].call(i.exports,i,i.exports,t),i.loaded=!0,i.exports}var n={};return t.m=e,t.c=n,t.p="",t(0)}([function(e,t,n){var r,i;r=[n(162)],i=function(e){return Backbone.Model.extend({initialize:function(t){var n=t.chart,r=t.dataset;$.ajax({url:r.download_url,success:function(r){var i=r.split("\n"),a=new e({el:document.getElementById(t.targets[0]),seq:i[1],dotbr:i[2],res [...]
+return t.previousModels=this.models,this._reset(),e=this.add(e,n.extend({silent:!0},t)),t.silent||this.trigger("reset",this,t),e},push:function(e,t){return this.add(e,n.extend({at:this.length},t))},pop:function(e){var t=this.at(this.length-1);return this.remove(t,e)},unshift:function(e,t){return this.add(e,n.extend({at:0},t))},shift:function(e){var t=this.at(0);return this.remove(t,e)},slice:function(){return a.apply(this.models,arguments)},get:function(e){if(null!=e)return this._byId[e] [...]
+	Embeddable Minimum Strictly-Compliant Promises/A+ 1.1.1 Thenable
+	Copyright (c) 2013-2014 Ralf S. Engelschall (http://engelschall.com)
+	Licensed under The MIT License (http://opensource.org/licenses/MIT)
+	*/
+"use strict";var n=0,r=1,i=2,a=function(e){return this instanceof a?(this.id="Thenable/1.0.7",this.state=n,this.fulfillValue=void 0,this.rejectReason=void 0,this.onFulfilled=[],this.onRejected=[],this.proxy={then:this.then.bind(this)},void("function"==typeof e&&e.call(this,this.fulfill.bind(this),this.reject.bind(this)))):new a(e)};a.prototype={fulfill:function(e){return o(this,r,"fulfillValue",e)},reject:function(e){return o(this,i,"rejectReason",e)},then:function(e,t){var n=this,r=new  [...]
+	Ported by Xueqiao Xu <xueqiaoxu at gmail.com>;
+	
+	PSF LICENSE AGREEMENT FOR PYTHON 2.7.2
+	
+	1. This LICENSE AGREEMENT is between the Python Software Foundation (“PSF”), and the Individual or Organization (“Licensee”) accessing and otherwise using Python 2.7.2 software in source or binary form and its associated documentation.
+	2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 2.7.2 alone or in any derivative version, provided, however, that PSF’s License Agreement and PSF’s notice of copyright, i.e., “Copyright © 2001-2012 Python Software Foundation; All Rights Reserved” are retained in Python 2. [...]
+	3. In the event Licensee prepares a derivative work that is based on or incorporates Python 2.7.2 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 2.7.2.
+	4. PSF is making Python 2.7.2 available to Licensee on an “AS IS” basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 2.7.2 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+	5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.2 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.2, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+	6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
+	7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
+	8. By copying, installing or otherwise using Python 2.7.2, Licensee agrees to be bound by the terms and conditions of this License Agreement.
+	*/
+"use strict";var n,r,i,a,o,s,l,u,c,d,h,p,f,g,v;i=Math.floor,d=Math.min,r=function(e,t){return e<t?-1:e>t?1:0},c=function(e,t,n,a,o){var s;if(null==n&&(n=0),null==o&&(o=r),n<0)throw new Error("lo must be non-negative");for(null==a&&(a=e.length);n<a;)s=i((n+a)/2),o(t,e[s])<0?a=s:n=s+1;return[].splice.apply(e,[n,n-n].concat(t)),t},s=function(e,t,n){return null==n&&(n=r),e.push(t),g(e,0,e.length-1,n)},o=function(e,t){var n,i;return null==t&&(t=r),n=e.pop(),e.length?(i=e[0],e[0]=n,v(e,0,t)):i [...]
+	 * jQuery JavaScript Library v3.1.1
+	 * https://jquery.com/
+	 *
+	 * Includes Sizzle.js
+	 * https://sizzlejs.com/
+	 *
+	 * Copyright jQuery Foundation and other contributors
+	 * Released under the MIT license
+	 * https://jquery.org/license
+	 *
+	 * Date: 2016-09-22T22:30Z
+	 */
+!function(t,n){"use strict";"object"==typeof e&&"object"==typeof e.exports?e.exports=t.document?n(t,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return n(e)}:n(t)}("undefined"!=typeof window?window:this,function(n,a){"use strict";function o(e,t){t=t||ae;var n=t.createElement("script");n.text=e,t.head.appendChild(n).parentNode.removeChild(n)}function s(e){var t=!!e&&"length"in e&&e.length,n=me.type(e);return"function"!==n&&!me.isWindow(e)&&("a [...]
+	 * Sizzle CSS Selector Engine v2.3.3
+	 * https://sizzlejs.com/
+	 *
+	 * Copyright jQuery Foundation and other contributors
+	 * Released under the MIT license
+	 * http://jquery.org/license
+	 *
+	 * Date: 2016-08-08
+	 */
+function(e){function t(e,t,n,r){var i,a,o,s,l,u,c,h=t&&t.ownerDocument,f=t?t.nodeType:9;if(n=n||[],"string"!=typeof e||!e||1!==f&&9!==f&&11!==f)return n;if(!r&&((t?t.ownerDocument||t:$)!==I&&A(t),t=t||I,L)){if(11!==f&&(l=ye.exec(e)))if(i=l[1]){if(9===f){if(!(o=t.getElementById(i)))return n;if(o.id===i)return n.push(o),n}else if(h&&(o=h.getElementById(i))&&z(t,o)&&o.id===i)return n.push(o),n}else{if(l[2])return K.apply(n,t.getElementsByTagName(e)),n;if((i=l[3])&&E.getElementsByClassName&& [...]
+Ke.th=Ke.td;var Je=/<|&#?\w+;/;!function(){var e=ae.createDocumentFragment(),t=e.appendChild(ae.createElement("div")),n=ae.createElement("input");n.setAttribute("type","radio"),n.setAttribute("checked","checked"),n.setAttribute("name","t"),t.appendChild(n),ve.checkClone=t.cloneNode(!0).cloneNode(!0).lastChild.checked,t.innerHTML="<textarea>x</textarea>",ve.noCloneChecked=!!t.cloneNode(!0).lastChild.defaultValue}();var et=ae.documentElement,tt=/^key/,nt=/^(?:mouse|pointer|contextmenu|drag [...]
+return this.parent(e).not("body").each(function(){me(this).replaceWith(this.childNodes)}),this}}),me.expr.pseudos.hidden=function(e){return!me.expr.pseudos.visible(e)},me.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},me.ajaxSettings.xhr=function(){try{return new n.XMLHttpRequest}catch(e){}};var Vt={0:200,1223:204},Yt=me.ajaxSettings.xhr();ve.cors=!!Yt&&"withCredentials"in Yt,ve.ajax=Yt=!!Yt,me.ajaxTransport(function(e){var t,r;if(ve. [...]
+	Event object based on jQuery events, MIT license
+	
+	https://jquery.org/license/
+	https://tldrlegal.com/license/mit-license
+	https://github.com/jquery/jquery/blob/master/src/event.js
+	*/
+var i=function(e,t){return this instanceof i?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented?r:n):this.type=e,t&&(this.type=void 0!==t.type?t.type:this.type,this.cy=t.cy,this.cyTarget=t.cyTarget,this.cyPosition=t.cyPosition,this.cyRenderedPosition=t.cyRenderedPosition,this.namespace=t.namespace,this.layout=t.layout,this.data=t.data,this.message=t.message),void(this.timeStamp=e&&e.timeStamp||Date.now())):new i(e,t)};i.prototype={instanceString: [...]
+"use strict";var window=__webpack_require__(25),util=__webpack_require__(3),Promise=__webpack_require__(24),Event=__webpack_require__(33),define=__webpack_require__(13),is=__webpack_require__(2),Thread=function(e){if(!(this instanceof Thread))return new Thread(e);var t=this._private={requires:[],files:[],queue:null,pass:[],disabled:!1};is.plainObject(e)&&null!=e.disabled&&(t.disabled=!!e.disabled)},thdfn=Thread.prototype,stringifyFieldVal=function(e){var t=is.fn(e)?e.toString():"JSON.par [...]
+return null===e},E.isUndefined=function(e){return void 0===e},E.has=function(e,t){return null!=e&&v.call(e,t)},E.noConflict=function(){return l._=u,this},E.identity=function(e){return e},E.constant=function(e){return function(){return e}},E.noop=function(){},E.property=_,E.propertyOf=function(e){return null==e?function(){}:function(t){return e[t]}},E.matcher=E.matches=function(e){return e=E.extendOwn({},e),function(t){return E.isMatch(t,e)}},E.times=function(e,t,n){var r=Array(Math.max(0 [...]
+e.fn.popover.Constructor=n,e.fn.popover.noConflict=function(){return e.fn.popover=r,this}}(jQuery)},function(e,t){+function(e){"use strict";function t(n,r){this.$body=e(document.body),this.$scrollElement=e(e(n).is(document.body)?window:n),this.options=e.extend({},t.DEFAULTS,r),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",e.proxy(this.process,this)),this.refres [...]
+	
+	Cytoscape.js {{VERSION}} (MIT licensed)
+	
+	Copyright (c) The Cytoscape Consortium
+	
+	Permission is hereby granted, free of charge, to any person obtaining a copy of
+	this software and associated documentation files (the “Software”), to deal in
+	the Software without restriction, including without limitation the rights to
+	use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+	of the Software, and to permit persons to whom the Software is furnished to do
+	so, subject to the following conditions:
+	
+	The above copyright notice and this permission notice shall be included in all
+	copies or substantial portions of the Software.
+	
+	THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+	IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+	FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+	AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+	LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+	OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+	SOFTWARE.
+	
+	*/
+"use strict"},function(e,t,n){"use strict";var r=n(3),i=n(2),a=n(24),o=function(e,t,n){if(!(this instanceof o))return new o(e,t,n);var a=this._private=r.extend({duration:1e3},t,n);a.target=e,a.style=a.style||a.css,a.started=!1,a.playing=!1,a.hooked=!1,a.applying=!1,a.progress=0,a.completes=[],a.frames=[],a.complete&&i.fn(a.complete)&&a.completes.push(a.complete),this.length=1,this[0]=this},s=o.prototype;r.extend(s,{instanceString:function(){return"animation"},hook:function(){var e=this._ [...]
+return a+o+s}return 1}},r["rendered"+e.uppercaseName]=function(){var t=this[0];if(t){var n=t[e.name]();return n*this.cy().zoom()}},r["rendered"+e.uppercaseOuterName]=function(){var t=this[0];if(t){var n=t[e.outerName]();return n*this.cy().zoom()}}};w({name:"width",paddings:["padding-left","padding-right"]}),w({name:"height",paddings:["padding-top","padding-bottom"]}),r.modelPosition=r.point=r.position,r.modelPositions=r.points=r.positions,r.renderedPoint=r.renderedPosition,r.relativePoin [...]
+var h=function(){function e(e){return-e.tension*e.x-e.friction*e.v}function t(t,n,r){var i={x:t.x+r.dx*n,v:t.v+r.dv*n,tension:t.tension,friction:t.friction};return{dx:i.v,dv:e(i)}}function n(n,r){var i={dx:n.v,dv:e(n)},a=t(n,.5*r,i),o=t(n,.5*r,a),s=t(n,r,o),l=1/6*(i.dx+2*(a.dx+o.dx)+s.dx),u=1/6*(i.dv+2*(a.dv+o.dv)+s.dv);return n.x=n.x+l*r,n.v=n.v+u*r,n}return function r(e,t,i){var a,o,s,l={x:-1,v:0,tension:null,friction:null},u=[0],c=0,d=1e-4,h=.016;for(e=parseFloat(e)||500,t=parseFloat( [...]
+"use strict";function r(e){this.options=i.extend({},o,e)}var i=n(3),a=n(12),o={fit:!0,padding:30,boundingBox:void 0,avoidOverlap:!0,avoidOverlapPadding:10,condense:!1,rows:void 0,cols:void 0,position:function(e){},sort:void 0,animate:!1,animationDuration:500,animationEasing:void 0,ready:void 0,stop:void 0};r.prototype.run=function(){var e=this.options,t=e,n=e.cy,r=t.eles,i=r.nodes().not(":parent");t.sort&&(i=i.sort(t.sort));var o=a.makeBoundingBox(t.boundingBox?t.boundingBox:{x1:0,y1:0,w [...]
+t.fullQualityMb=!1,t.clearedForMotionBlur=[],t.desktopTapThreshold=e.desktopTapThreshold,t.desktopTapThreshold2=e.desktopTapThreshold*e.desktopTapThreshold,t.touchTapThreshold=e.touchTapThreshold,t.touchTapThreshold2=e.touchTapThreshold*e.touchTapThreshold,t.tapholdDuration=500,t.bindings=[],t.beforeRenderCallbacks=[],t.beforeRenderPriorities={animations:400,eleCalcs:300,eleTxrDeq:200,lyrTxrDeq:100},t.registerNodeShapes(),t.registerArrowShapes(),t.registerCalculationListeners(),t.load()} [...]
+if(!n){var r=t._private.rscratch,i="haystack"===r.edgeType;i||this.drawArrowhead(e,t,"source",r.arrowStartX,r.arrowStartY,r.srcArrowAngle),this.drawArrowhead(e,t,"mid-target",r.midX,r.midY,r.midtgtArrowAngle),this.drawArrowhead(e,t,"mid-source",r.midX,r.midY,r.midsrcArrowAngle),i||this.drawArrowhead(e,t,"target",r.arrowEndX,r.arrowEndY,r.tgtArrowAngle)}},n.drawArrowhead=function(e,t,n,r,i,a){if(!(isNaN(r)||null==r||isNaN(i)||null==i||isNaN(a)||null==a)){var o=this,s=t.pstyle(n+"-arrow-sh [...]
+a.width=r,a.height=i;var o={id:D=++D%P,bb:e,level:t,width:r,height:i,canvas:a,context:a.getContext("2d"),eles:[],elesQueue:[],reqs:0},s=o.context,l=-o.bb.x1,u=-o.bb.y1;return s.scale(n,n),s.translate(l,u),o},_.getLayers=function(e,t,n){var r=this,o=r.renderer,s=o.cy,l=s.zoom(),p=r.firstGet;if(r.firstGet=!1,null==n)if(n=Math.ceil(a.log2(l*t)),n<c)n=c;else if(l>=h||n>d)return null;r.validateLayersElesOrdering(n,e);var f,g,v=r.layersByLevel,y=Math.pow(2,n),m=v[n]=v[n]||[],b=r.levelIsComplet [...]
+"use strict";var r=n(2),i=n(3),a=n(34),o=n(24),s=n(13),l=function(e){if(!(this instanceof l))return new l(e);this._private={pass:[]};var t=4;if(r.number(e),"undefined"!=typeof navigator&&null!=navigator.hardwareConcurrency)e=navigator.hardwareConcurrency;else try{e=n(157).cpus().length}catch(i){e=t}for(var o=0;o<e;o++)this[o]=new a;this.length=e},u=l.prototype;i.extend(u,{instanceString:function(){return"fabric"},require:function(e,t){for(var n=0;n<this.length;n++){var r=this[n];r.requir [...]
+for(var s=0;s<o.length;s++){var l=o[s];t.css(l.name,l.value)}}return t},e.exports=o},function(e,t,n){"use strict";var r=n(2);e.exports={hex2tuple:function(e){if((4===e.length||7===e.length)&&"#"===e[0]){var t,n,r,i=4===e.length,a=16;return i?(t=parseInt(e[1]+e[1],a),n=parseInt(e[2]+e[2],a),r=parseInt(e[3]+e[3],a)):(t=parseInt(e[1]+e[2],a),n=parseInt(e[3]+e[4],a),r=parseInt(e[5]+e[6],a)),[t,n,r]}},hsl2tuple:function(e){function t(e,t,n){return n<0&&(n+=1),n>1&&(n-=1),n<1/6?e+6*(t-e)*n:n<. [...]
+this.defineStructure(),this.on("change:renderSwitch",this.defineStructure)},reconstruct:function(){for(var e,t="",n=[],r=this.get("residues"),i=0;i<r.length;i++)if(t+=r.at(i).get("name"),e=this.getPartner(i.toString()),e===-1)n[i]=".";else{if(!(e>i))continue;n[i]="(",n[e]=")"}this.set("seq",t),this.set("dotbr",n.join("")),this.set("renderSwitch",!this.get("renderSwitch"))},defineStructure:function(){for(var e=this.get("seq"),t=this.get("dotbr"),n=this.get("layout"),r=this.get("style"),o= [...]
+//# sourceMappingURL=biojs_drawrnajs.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/biojs_drawrnajs.js.map b/config/plugins/visualizations/charts/static/repository/build/biojs_drawrnajs.js.map
new file mode 100644
index 0000000..1cd8cf9
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/biojs_drawrnajs.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///biojs_drawrnajs.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1","webpack:///./static/repository/visualizations/biojs/drawrnajs/wrapper.js","webpack:///./~/cytoscape/src/is.js","webpack:///./~/cytoscape/src/util/index.js","webpack:///./~/cytoscape/src/math.js","webpack:///./~/cytoscape/src/define.js","webpack:///./~/backbone/backbone.js","webpack:///./~/timers-browserify/main.js","webpack:///./~/cytoscape/src/promise.js","webpack:///./~/cyt [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/biojs_msa.js b/config/plugins/visualizations/charts/static/repository/build/biojs_msa.js
new file mode 100644
index 0000000..a337fdf
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/biojs_msa.js
@@ -0,0 +1,21 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var i=n[r]={exports:{},id:r,loaded:!1};return t[r].call(i.exports,i,i.exports,e),i.loaded=!0,i.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}({0:function(t,e,n){var r,i;r=[n(161)],i=function(){return Backbone.Model.extend({initialize:function(t){var e=t.chart,n=t.dataset,r=t.chart.settings,i=new msa.msa({el:$("#"+t.targets[0]),vis:{conserv:"true"==r.get("conserv"),overviewbox:"true"==r.get("overviewbox")},me [...]
+}function zo(t,e,n){var r=t?t.length:0;if(!r)return-1;var i=null==n?0:Bu(n);return i<0&&(i=Ql(r+i,0)),S(t,e,i)}function Co(t){return yo(t,1)}function Ao(t,e){return t?Jl.call(t,e):""}function To(t){var e=t?t.length:0;return e?t[e-1]:Q}function Io(t,e,n){var r=t?t.length:0;if(!r)return-1;var i=r;if(n!==Q&&(i=Bu(n),i=(i<0?Ql(r+i,0):tc(i,r-1))+1),e!==e)return D(t,i-1,!0);for(;i--;)if(t[i]===e)return i;return-1}function No(t,e){return t&&t.length?gr(t,Bu(e)):Q}function Lo(t,e){return t&&t.le [...]
+e.prototype[t]=function(){var t=arguments;if(i&&!this.__chain__){var e=this.value();return n.apply(yf(e)?e:[],t)}return this[r](function(e){return n.apply(yf(e)?e:[],t)})}}),Un(i.prototype,function(t,n){var r=e[n];if(r){var i=r.name+"";(pc[i]||(pc[i]=[])).push({name:n,func:r})}}),pc[mi(Q,st).name]=[{name:"wrapper",func:Q}],i.prototype.clone=R,i.prototype.reverse=Re,i.prototype.value=qe,e.prototype.at=Yc,e.prototype.chain=cs,e.prototype.commit=fs,e.prototype.next=hs,e.prototype.plant=ps,e [...]
+		 * jBone v1.2.0 - 2016-04-13 - Library for DOM manipulation
+		 *
+		 * http://jbone.js.org
+		 *
+		 * Copyright 2016 Alexey Kupriyanenko
+		 * Released under the MIT license.
+		 */
+!function(o){function s(t){var e=t.length,n="undefined"==typeof t?"undefined":i(t);return!_(n)&&t!==o&&(!(1!==t.nodeType||!e)||b(n)||0===e||"number"==typeof e&&e>0&&e-1 in t)}function u(t,e){var n,r;this.originalEvent=t,r=function(t,e){"preventDefault"===t?this[t]=function(){return this.defaultPrevented=!0,e[t]()}:"stopImmediatePropagation"===t?this[t]=function(){return this.immediatePropagationStopped=!0,e[t]()}:_(e[t])?this[t]=function(){return e[t]()}:this[t]=e[t]};for(n in t)(t[n]||" [...]
+return e}})},function(t,e,n){"use strict";var r,i=n(1).Model;t.exports=r=i.extend({defaults:{searchText:""}})},function(t,e,n){"use strict";var r,i=n(1).Model;t.exports=r=i.extend({defaults:{searchBox:-10,overviewBox:30,headerBox:-1,alignmentBody:0,scaleSlider:50}})},function(t,e,n){"use strict";var r,i=n(1).Model;t.exports=r=i.extend({defaults:{sequences:!0,markers:!0,metacell:!1,conserv:!1,overviewbox:!1,seqlogo:!1,gapHeader:!1,leftHeader:!0,scaleslider:!1,labels:!0,labelName:!0,labelI [...]
+e}),o+="';\n",e.variable||(o="with(obj||{}){\n"+o+"}\n"),o="var __t,__p='',__j=Array.prototype.join,print=function(){__p+=__j.call(arguments,'');};\n"+o+"return __p;\n";try{var s=new Function(e.variable||"obj","_",o)}catch(u){throw u.source=o,u}var a=function(t){return s.call(this,t,S)},l=e.variable||"obj";return a.source="function("+l+"){\n"+o+"}",a},S.chain=function(t){var e=S(t);return e._chain=!0,e};var $=function(t,e){return t._chain?S(e).chain():e};S.mixin=function(t){S.each(S.func [...]
+return t&&"function"==typeof Symbol&&t.constructor===Symbol?"symbol":typeof t};!function(){function n(){return{keys:Object.keys||function(t){if("object"!==("undefined"==typeof t?"undefined":r(t))&&"function"!=typeof t||null===t)throw new TypeError("keys() called on a non-object");var e,n=[];for(e in t)t.hasOwnProperty(e)&&(n[n.length]=e);return n},uniqueId:function(t){var e=++a+"";return t?t+e:e},has:function(t,e){return s.call(t,e)},each:function(t,e,n){if(null!=t)if(o&&t.forEach===o)t. [...]
+var e,n=[];for(e in t)t.hasOwnProperty(e)&&(n[n.length]=e);return n},uniqueId:function(t){var e=++a+"";return t?t+e:e},has:function(t,e){return s.call(t,e)},each:function(t,e,n){if(null!=t)if(o&&t.forEach===o)t.forEach(e,n);else if(t.length===+t.length)for(var r=0,i=t.length;r<i;r++)e.call(n,t[r],r,t);else for(var s in t)this.has(t,s)&&e.call(n,t[s],s,t)},once:function(t){var e,n=!1;return function(){return n?e:(n=!0,e=t.apply(this,arguments),t=null,e)}}}}var i,o=Array.prototype.forEach, [...]
+o=s(t).find("#canv_"+r)),s(o).attr("width",n).attr("height",e),o[0]}var i=n(86),o=n(85),s=n(5);t.exports=function(t){if(this.data){t=t||{};var e=t.zoom||this.zoom,n=t.target||1,s=(t.scaled||null,this.dom_element.parent().attr("width")),u=1,a=null,l=null,c=0;if(this.previous_target=n,t.start&&(this.start=t.start),t.end&&(this.end=t.end),e<=.1?e=.1:e>=1&&(e=1),this.zoom=e,a=this.end||this.data.heightArr.length,l=this.start||1,a=a>this.data.heightArr.length?this.data.heightArr.length:a,a=a< [...]
+var e=e||"undefined"!=typeof navigator&&navigator.msSaveOrOpenBlob&&navigator.msSaveOrOpenBlob.bind(navigator)||function(t){if("undefined"==typeof navigator||!/MSIE [1-9]\./.test(navigator.userAgent)){var e=t.document,n=function(){return t.URL||t.webkitURL||t},r=e.createElementNS("http://www.w3.org/1999/xhtml","a"),i=!t.externalHost&&"download"in r,o=function(n){var r=e.createEvent("MouseEvents");r.initMouseEvent("click",!0,!1,t,0,0,0,0,0,!1,!1,!1,!1,0,null),n.dispatchEvent(r)},s=t.webki [...]
+this.listenTo(this.g.colorscheme,"change",function(){return this.render()})},render:function(){var t=this.setName("Color scheme");this.removeAllNodes();for(var e,n=this.getColorschemes(),r=0;r<n.length;r++)e=n[r],this.addScheme(t,e);return this.grey(t),s.removeAllChilds(this.el),this.el.appendChild(this.buildDOM()),this},addScheme:function(t,e){var n=this,r={};return this.g.colorscheme.get("scheme")===e.id&&(r.backgroundColor="#77ED80"),this.addNode(e.name,function(){n.g.colorscheme.set( [...]
+this.ctx.lineTo(m+f+e,n)),"undefined"!=typeof u&&null!==u&&u.indexOf(b)>=0||(this.ctx.moveTo(m+e,h+n),this.ctx.lineTo(m+f+e,h+n)),m+=f)}return this.ctx.moveTo(e,n),this.ctx.lineTo(e,h+n),this.ctx.moveTo(e+d,n),this.ctx.lineTo(e+d,h+n),this.ctx.stroke(),this.ctx.strokeStyle=v,this.ctx.lineWidth=g},_getPrevNextSelection:function(t){var e=t.collection.prev(t),n=t.collection.next(t),r=void 0,i=void 0;return"undefined"!=typeof e&&null!==e&&(r=this._getSelection(e)),"undefined"!=typeof n&&null [...]
+},0);n=(100*n/e.length).toFixed(0)+"%";var r=document.createElement("span");r.textContent=n,r.style.display="inline-block",r.style.width=35,this.el.appendChild(r)}if(this.g.vis.get("metaIdentity")){var o=this.g.stats.identity()[this.model.id],a=document.createElement("span");this.model.get("ref")&&this.g.config.get("hasRef")?a.textContent="ref.":"undefined"!=typeof o&&null!==o&&(a.textContent=o.toFixed(2)),a.style.display="inline-block",a.style.width=40,this.el.appendChild(a)}if(this.g.v [...]
+//# sourceMappingURL=biojs_msa.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/biojs_msa.js.map b/config/plugins/visualizations/charts/static/repository/build/biojs_msa.js.map
new file mode 100644
index 0000000..c8e18cf
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/biojs_msa.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///biojs_msa.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*************************","webpack:///./static/repository/visualizations/biojs/msa/wrapper.js","webpack:///./static/repository/plugins/biojs/biojs.msa.js"],"names":["define","modules","__webpack_require__","moduleId","installedModules","exports","module","id","loaded","call","m","c","p","0","__WEBPACK_AMD_DEFINE_ARRAY__","__WEBPACK_AMD_DEFINE_RESULT__","Backbone","Model","extend","in [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/cytoscape_basic.js b/config/plugins/visualizations/charts/static/repository/build/cytoscape_basic.js
new file mode 100644
index 0000000..40ae8bc
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/cytoscape_basic.js
@@ -0,0 +1,37 @@
+define(function(){return function(e){function t(n){if(r[n])return r[n].exports;var i=r[n]={exports:{},id:n,loaded:!1};return e[n].call(i.exports,i,i.exports,t),i.loaded=!0,i.exports}var r={};return t.m=e,t.c=r,t.p="",t(0)}({0:function(e,t,r){var n,i;n=[r(1),r(175)],i=function(e,t){return Backbone.Model.extend({initialize:function(r){var n=r.chart,i=r.dataset;e.get({url:i.download_url,success:function(i){try{t(e.merge(i,{container:$("#"+r.targets[0]),layout:{name:"cose",idealEdgeLength:10 [...]
+x:l.x-h.x,y:l.y-h.y},void 0===e?i:i[e]}for(var p=0;p<this.length;p++){var r=this[p],u=s?r.parent():null,c=u&&u.length>0,d=c;c&&(u=u[0]);var h=d?u._private.position:{x:0,y:0};void 0!==t?r._private.position[e]=t+h[e]:void 0!==i&&(r._private.position={x:i.x+h.x,y:i.y+h.y})}this.rtrigger("position")}else if(!a)return;return this},renderedBoundingBox:function(e){var t=this.boundingBox(e),r=this.cy(),n=r.zoom(),i=r.pan(),a=t.x1*n+i.x,o=t.x2*n+i.x,s=t.y1*n+i.y,l=t.y2*n+i.y;return{x1:a,x2:o,y1:s [...]
+parallelEdges:u(a(),"parallelEdges"),codirectedEdges:u(a({codirected:!0}),"codirectedEdges")}),o.extend(l,{components:function(){var e=this,t=e.cy(),r=e.spawn(),n=e.nodes().spawnSelf(),i=[],a=function(e,t){r.merge(e),n.unmerge(e),t.merge(e)};if(n.empty())return e.spawn();do{var o=t.collection();i.push(o);var s=n[0];a(s,o),e.bfs({directed:!1,roots:s,visit:function(e,t,r,n,i){a(r,o)}})}while(n.length>0);return i.map(function(e){var t=e.connectedEdges().stdFilter(function(t){return e.anySam [...]
+var h=function(){function e(e){return-e.tension*e.x-e.friction*e.v}function t(t,r,n){var i={x:t.x+n.dx*r,v:t.v+n.dv*r,tension:t.tension,friction:t.friction};return{dx:i.v,dv:e(i)}}function r(r,n){var i={dx:r.v,dv:e(r)},a=t(r,.5*n,i),o=t(r,.5*n,a),s=t(r,n,o),l=1/6*(i.dx+2*(a.dx+o.dx)+s.dx),u=1/6*(i.dv+2*(a.dv+o.dv)+s.dv);return r.x=r.x+l*n,r.v=r.v+u*n,r}return function n(e,t,i){var a,o,s,l={x:-1,v:0,tension:null,friction:null},u=[0],c=0,d=1e-4,h=.016;for(e=parseFloat(e)||500,t=parseFloat( [...]
+	Event object based on jQuery events, MIT license
+	
+	https://jquery.org/license/
+	https://tldrlegal.com/license/mit-license
+	https://github.com/jquery/jquery/blob/master/src/event.js
+	*/
+var a=function(e,t){return this instanceof a?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented?i:n):this.type=e,t&&(this.type=void 0!==t.type?t.type:this.type,this.cy=t.cy,this.cyTarget=t.cyTarget,this.cyPosition=t.cyPosition,this.cyRenderedPosition=t.cyRenderedPosition,this.namespace=t.namespace,this.layout=t.layout,this.data=t.data,this.message=t.message),void(this.timeStamp=e&&e.timeStamp||Date.now())):new a(e,t)};a.prototype={instanceString: [...]
+var d=!r||n.visible()&&!n.transparent();if(r&&!d)return;var h=f.nodeShapes[p.getNodeShape(n)];h.checkPoint(e,t,0,o,s,c.x,c.y)&&a(n,0)}}}function l(n){var o=n._private;if("no"!==n.pstyle("events").strValue){var l,u,c=o.rscratch,d=n.pstyle("width").pfValue/2+b,h=d*d,v=2*d,y=o.source,x=o.target,w=!1,E=function(){if(void 0!==u)return u;if(!r)return u=!0,!0;var e=n.visible()&&!n.transparent();return e?(u=!0,!0):(u=!1,!1)};if("segments"===c.edgeType||"straight"===c.edgeType||"haystack"===c.edg [...]
+if(Y&&(X.x+=S[0],X.y+=S[1]),q){var $=e.hoverData.dragDelta;Y&&$&&n.number($[0])&&n.number($[1])&&(X.x+=$[0],X.y+=$[1])}}}}e.hoverData.draggingEles=!0;var W=v.collection(V);W.updateCompoundBounds(),W.trigger("position drag"),e.redrawHint("drag",!0),e.redraw()}else L();l=!0}else if(T){if(e.hoverData.dragging||!v.boxSelectionEnabled()||!z&&v.panningEnabled()&&v.userPanningEnabled()){if(!e.hoverData.selecting&&v.panningEnabled()&&v.userPanningEnabled()){var H=s(k,e.hoverData.downs);H&&(e.hov [...]
+e.lineWidth=R,e.strokeStyle=A}}var j=2*t.pstyle("text-outline-width").pfValue;if(j>0&&(e.lineWidth=j),"wrap"===t.pstyle("text-wrap").value){var X=o.labelWrapCachedLines,Y=p/X.length;switch(x){case"top":u-=(X.length-1)*Y;break;case"bottom":break;default:case"center":u-=(X.length-1)*Y/2}for(var $=0;$<X.length;$++)j>0&&e.strokeText(X[$],l,u),e.fillText(X[$],l,u),u+=Y}else j>0&&e.strokeText(c,l,u),e.fillText(c,l,u);0!==w&&(e.rotate(-w),e.translate(-_,-T)),this.shadowStyle(e,"transparent",0)} [...]
+"use strict";var n=e("./is"),i=e("./util"),a=e("./thread"),o=e("./promise"),s=e("./define"),l=function(t){if(!(this instanceof l))return new l(t);this._private={pass:[]};var r=4;if(n.number(t),"undefined"!=typeof navigator&&null!=navigator.hardwareConcurrency)t=navigator.hardwareConcurrency;else try{t=e("os").cpus().length}catch(i){t=r}for(var o=0;t>o;o++)this[o]=new a;this.length=t},u=l.prototype;i.extend(u,{instanceString:function(){return"fabric"},require:function(e,t){for(var r=0;r<t [...]
+	Ported by Xueqiao Xu <xueqiaoxu at gmail.com>;
+	
+	PSF LICENSE AGREEMENT FOR PYTHON 2.7.2
+	
+	1. This LICENSE AGREEMENT is between the Python Software Foundation (“PSF”), and the Individual or Organization (“Licensee”) accessing and otherwise using Python 2.7.2 software in source or binary form and its associated documentation.
+	2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 2.7.2 alone or in any derivative version, provided, however, that PSF’s License Agreement and PSF’s notice of copyright, i.e., “Copyright © 2001-2012 Python Software Foundation; All Rights Reserved” are retained in Python 2. [...]
+	3. In the event Licensee prepares a derivative work that is based on or incorporates Python 2.7.2 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 2.7.2.
+	4. PSF is making Python 2.7.2 available to Licensee on an “AS IS” basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 2.7.2 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+	5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.2 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.2, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+	6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
+	7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
+	8. By copying, installing or otherwise using Python 2.7.2, Licensee agrees to be bound by the terms and conditions of this License Agreement.
+	*/
+"use strict";(function(){var e,n,i,a,o,s,l,u,c,d,h,p,f,v,g;i=Math.floor,d=Math.min,n=function(e,t){return t>e?-1:e>t?1:0},c=function(e,t,r,a,o){var s;if(null==r&&(r=0),null==o&&(o=n),0>r)throw new Error("lo must be non-negative");for(null==a&&(a=e.length);a>r;)s=i((r+a)/2),o(t,e[s])<0?a=s:r=s+1;return[].splice.apply(e,[r,r-r].concat(t)),t},s=function(e,t,r){return null==r&&(r=n),e.push(t),v(e,0,e.length-1,r)},o=function(e,t){var r,i;return null==t&&(t=n),r=e.pop(),e.length?(i=e[0],e[0]=r [...]
+	Embeddable Minimum Strictly-Compliant Promises/A+ 1.1.1 Thenable
+	Copyright (c) 2013-2014 Ralf S. Engelschall (http://engelschall.com)
+	Licensed under The MIT License (http://opensource.org/licenses/MIT)
+	*/
+"use strict";var n=0,i=1,a=2,o=function(e){return this instanceof o?(this.id="Thenable/1.0.7",this.state=n,this.fulfillValue=void 0,this.rejectReason=void 0,this.onFulfilled=[],this.onRejected=[],this.proxy={then:this.then.bind(this)},void("function"==typeof e&&e.call(this,this.fulfill.bind(this),this.reject.bind(this)))):new o(e)};o.prototype={fulfill:function(e){return s(this,i,"fulfillValue",e)},reject:function(e){return s(this,a,"rejectReason",e)},then:function(e,t){var r=this,n=new  [...]
+type:u.visibility},{name:"opacity",type:u.zeroOneNumber},{name:"z-index",type:u.nonNegativeInt},{name:"overlay-padding",type:u.size},{name:"overlay-color",type:u.color},{name:"overlay-opacity",type:u.zeroOneNumber},{name:"shadow-blur",type:u.size},{name:"shadow-color",type:u.color},{name:"shadow-opacity",type:u.zeroOneNumber},{name:"shadow-offset-x",type:u.bidirectionalSize},{name:"shadow-offset-y",type:u.bidirectionalSize},{name:"text-shadow-blur",type:u.size},{name:"text-shadow-color", [...]
+"use strict";var window=_dereq_("./window"),util=_dereq_("./util"),Promise=_dereq_("./promise"),Event=_dereq_("./event"),define=_dereq_("./define"),is=_dereq_("./is"),Thread=function(e){if(!(this instanceof Thread))return new Thread(e);var t=this._private={requires:[],files:[],queue:null,pass:[],disabled:!1};is.plainObject(e)&&null!=e.disabled&&(t.disabled=!!e.disabled)},thdfn=Thread.prototype,stringifyFieldVal=function(e){var t=is.fn(e)?e.toString():"JSON.parse('"+JSON.stringify(e)+"')" [...]
+//# sourceMappingURL=cytoscape_basic.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/cytoscape_basic.js.map b/config/plugins/visualizations/charts/static/repository/build/cytoscape_basic.js.map
new file mode 100644
index 0000000..bd04581
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/cytoscape_basic.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///cytoscape_basic.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1**********************","webpack:///./static/repository/visualizations/cytoscape/basic/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c********************","webpack:///./~/timers-browserify/main.js?b088","webpack:///./~/process/browser.js?82e4","webpack:///./static/repository/plugins/cytoscape/cytoscape.js"],"names":["define","modules","__webpack_require__"," [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_bar.js b/config/plugins/visualizations/charts/static/repository/build/jqplot_bar.js
new file mode 100644
index 0000000..f968545
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_bar.js
@@ -0,0 +1,10 @@
+define(function(){return function(t){function e(s){if(i[s])return i[s].exports;var r=i[s]={exports:{},id:s,loaded:!1};return t[s].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var i={};return e.m=t,e.c=i,e.p="",e(0)}([function(t,e,i){var s,r;s=[i(22)],r=function(t){return Backbone.Model.extend({initialize:function(e){e.makeConfig=function(t,e){$.extend(!0,e,{seriesDefaults:{renderer:$.jqplot.BarRenderer},axes:{xaxis:{min:-1},yaxis:{pad:1.2}}})},new t(e)}})}.apply(e,s),!(void 0!==r& [...]
+for(var h,d,p,c,l=0,u=P.length;l<u;l++){h=P[l],c=this.axes[h],d=c._ticks;for(var p=0,g=d.length;p<g;p++){var f=d[p]._elem;f&&(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T&&window.G_vmlCanvasManager.uninitElement(f.get(0)),f.emptyForce(),f=null,d._elem=null)}d=null,delete c.ticks,delete c._ticks,this.axes[h]=new e(h),this.axes[h]._plotWidth=this._width,this.axes[h]._plotHeight=this._height}i&&(r.dataRenderer&&t.isFunction(r.dataRenderer)&&(r.dataRendererOptions&&(thi [...]
+e=[]},this.freeCanvas=function(e){if(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T)window.G_vmlCanvasManager.uninitElement(t.jqplot.CanvasManager.canvases[e]),t.jqplot.CanvasManager.canvases[e]=null;else{var i=t.jqplot.CanvasManager.canvases[e];i.getContext("2d").clearRect(0,0,i.width,i.height),t(i).unbind().removeAttr("class").removeAttr("style"),t(i).css({left:"",top:"",position:""}),i.width=0,i.height=0,t.jqplot.CanvasManager.free[e]=!0}}},t.jqplot.log=function(){ [...]
+solid:null};if("string"==typeof i)if("."===i[0]||"-"===i[0]){var r=i;i=[];for(var o=0,a=r.length;o<a;o++){if("."===r[o])i.push(O);else{if("-"!==r[o])continue;i.push(t.jqplot.config.dashLength)}i.push(t.jqplot.config.gapLength)}}else i=s[i];if(!i||!i.length)return e;var n=0,l=i[0],h=0,d=0,p=0,c=0,u=function(t,i){e.moveTo(t,i),h=t,d=i,p=t,c=i},g=function(t,s){var r=e.lineWidth,o=t-h,a=s-d,p=Math.sqrt(o*o+a*a);if(p>0&&r>0)for(o/=p,a/=p;;){var c=r*l;if(!(c<p)){h=t,d=s,0==(1&n)?e.lineTo(h,d): [...]
+this.lineWidth=1.5,this.linePattern="solid",this.lineJoin="miter",this.lineCap="round",this.closePath=!1,this.fill=!1,this.isarc=!1,this.fillRect=!1,this.strokeRect=!1,this.clearRect=!1,this.strokeStyle="#999999",this.fillStyle="#999999",t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.init=function(e){t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.draw=function(e,i,s){e.save();var r=null!=s?s:{},o=null!=r.fill?r.fill:this.fill,a=null!=r.closePath?r.closePath:this.closePath,n [...]
+e:"Date",A:"DayName",a:"AbbrDayName",w:"Day",H:"Hours.2","#H":"Hours",I:"Hours12.2","#I":"Hours12",p:"AMPM",M:"Minutes.2","#M":"Minutes",S:"Seconds.2","#S":"Seconds",s:"Unix",N:"Milliseconds.3","#N":"Milliseconds",O:"TimezoneOffset",Z:"TimezoneName",G:"GmtOffset"},shortcuts:{F:"%Y-%m-%d",T:"%H:%M:%S",X:"%H:%M:%S",x:"%m/%d/%y",D:"%m/%d/%y","#c":"%a %b %e %H:%M:%S %Y",v:"%e-%b-%Y",R:"%H:%M",r:"%I:%M:%S %p",t:"\t",n:"\n","%":"%"}},G.formats.php={codes:{matcher:/()%((%|[a-z]))/i,a:"AbbrDayNa [...]
+this._textRenderer.draw(this._elem.get(0).getContext("2d"),this.label)}}(jQuery),function(t){function e(){this.uid=null,this.type=null,this.gridStart=null,this.gridStop=null,this.tooltipWidthFactor=0,this.options={name:null,show:!0,lineWidth:2,lineCap:"round",color:"#666666",shadow:!0,shadowAngle:45,shadowOffset:1,shadowDepth:3,shadowAlpha:"0.07",xaxis:"xaxis",yaxis:"yaxis",showTooltip:!1,showTooltipPrecision:.6,tooltipLocation:"nw",fadeTooltip:!0,tooltipFadeSpeed:"fast",tooltipOffset:4, [...]
+var _=t.inArray(u,g)+1;"xaxis"==this.name||"x2axis"==this.name?x.data[v][0]=_:x.data[v][1]=_}if(this.groups>1&&!this._grouped){for(var l=g.length,h=parseInt(l/this.groups,10),d=0,o=h;o<l;o+=h+1)g[o]=" ";this._grouped=!0}s=f+.5,null==this.numberTicks&&(this.numberTicks=2*f+1);var p=s-i;this.min=i,this.max=s;var b=0,w=parseInt(3+e/10,10),h=parseInt(f/w,10);null==this.tickInterval&&(this.tickInterval=p/(this.numberTicks-1));for(var o=0;o<this.numberTicks;o++){r=this.min+o*this.tickInterval; [...]
+break;case"w":var k=_.x+e._gridPadding.left-l.outerWidth(!0)-d.tooltipOffset-b,j=_.y+e._gridPadding.top-l.outerHeight(!0)/2;break;default:var k=_.x+e._gridPadding.left-l.outerWidth(!0)-d.tooltipOffset-w*b,j=_.y+e._gridPadding.top-d.tooltipOffset-l.outerHeight(!0)-w*b}l.css("left",k),l.css("top",j),d.fadeTooltip?l.stop(!0,!0).fadeIn(d.tooltipFadeSpeed):l.show(),l=null}function s(t,s,r,o,a){var n=a.plugins.highlighter,l=a.plugins.cursor;if(n.show)if(null==o&&n.isHighlighting){var h=jQuery. [...]
+//# sourceMappingURL=jqplot_bar.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_bar.js.map b/config/plugins/visualizations/charts/static/repository/build/jqplot_bar.js.map
new file mode 100644
index 0000000..1fe3242
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_bar.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///jqplot_bar.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1******","webpack:///./static/repository/visualizations/jqplot/bar/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c****","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0****","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b****","webpack:///./~/css-loader/lib/css-base.js?da04****","webpack:///./~/sty [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_boxplot.js b/config/plugins/visualizations/charts/static/repository/build/jqplot_boxplot.js
new file mode 100644
index 0000000..3c7fe6a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_boxplot.js
@@ -0,0 +1,10 @@
+define(function(){return function(t){function e(s){if(i[s])return i[s].exports;var r=i[s]={exports:{},id:s,loaded:!1};return t[s].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var i={};return e.m=t,e.c=i,e.p="",e(0)}([function(t,e,i){var s,r;s=[i(22),i(20),i(4)],r=function(t,e,i){return Backbone.View.extend({initialize:function(s){e.request(s.chart,i.buildJobDictionary(s.chart,"boxplot"),function(e){var r=s.chart,o=new Backbone.Collection;r.groups.each(function(t,e){o.add({__data_c [...]
+if(r===!0&&(r=this.axes),t.isArray(r))for(var o=0;o<r.length;o++)this.axes[r[o]].resetScale(s[r[o]]);else if("object"==typeof r)for(var a in r)this.axes[a].resetScale(s[a])},this.reInitialize=function(i,s){for(var r=t.extend(!0,{},this.options,s),o=this.targetId.substr(1),a=null==i?this.data:i,l=0;l<t.jqplot.preInitHooks.length;l++)t.jqplot.preInitHooks[l].call(this,o,a,r);for(var l=0;l<this.preInitHooks.hooks.length;l++)this.preInitHooks.hooks[l].call(this,o,a,r);if(this._height=this.ta [...]
+document.getElementById(e).style.fontWeight=t.jqplot.config.errorFontWeight}},t.jqplot.version="1.0.8",t.jqplot.revision="1250",t.jqplot.targetCounter=1,t.jqplot.CanvasManager=function(){"undefined"==typeof t.jqplot.CanvasManager.canvases&&(t.jqplot.CanvasManager.canvases=[],t.jqplot.CanvasManager.free=[]);var e=[];this.getCanvas=function(){var i,s=!0;if(!t.jqplot.use_excanvas)for(var r=0,o=t.jqplot.CanvasManager.canvases.length;r<o;r++)if(t.jqplot.CanvasManager.free[r]===!0){s=!1,i=t.jq [...]
+this._elem=null);var e=(this.renderer,document.createElement("div"));if(this._elem=t(e),this._elem.addClass("jqplot-title"),this.text){if(this.text){var i;this.color?i=this.color:this.textColor&&(i=this.textColor);var s={position:"absolute",top:"0px",left:"0px"};this._plotWidth&&(s.width=this._plotWidth+"px"),this.fontSize&&(s.fontSize=this.fontSize),"string"==typeof this.textAlign?s.textAlign=this.textAlign:s.textAlign="center",i&&(s.color=i),this.paddingBottom&&(s.paddingBottom=this.pa [...]
+e.lineWidth=null!=r.lineWidth?r.lineWidth:this.lineWidth,e.lineJoin=null!=r.lineJoin?r.lineJoin:this.lineJoin,e.lineCap=null!=r.lineCap?r.lineCap:this.lineCap,e.strokeStyle=r.strokeStyle||this.strokeStyle||"rgba(0,0,0,"+h+")",e.fillStyle=r.fillStyle||this.fillStyle||"rgba(0,0,0,"+h+")";for(var u=0;u<d;u++){var g=t.jqplot.LinePattern(e,c);if(e.translate(Math.cos(this.angle*Math.PI/180)*l,Math.sin(this.angle*Math.PI/180)*l),g.beginPath(),p)e.arc(i[0],i[1],i[2],i[3],i[4],!0);else if(a)a&&e. [...]
+r};G.strftime=function(t,e,i,s){var r="perl",o=G.regional.getLocale();i&&G.formats.hasOwnProperty(i)?r=i:i&&G.regional.hasOwnProperty(i)&&(o=i),s&&G.formats.hasOwnProperty(s)?r=s:s&&G.regional.hasOwnProperty(s)&&(o=s),"[object Object]"==D(t)&&"jsDate"==t._type||(t=new G(t),t.locale=o),e||(e=t.formatString||G.regional[o].formatString);for(var a,n=e||"%Y-%m-%d",l="";n.length>0;)(a=n.match(G.formats[r].codes.matcher))?(l+=n.slice(0,a.index),l+=(a[1]||"")+U(t,a[2],r),n=n.slice(a.index+a[0].l [...]
+this},t.jqplot.CanvasAxisTickRenderer.prototype.draw=function(e,i){this.label||(this.label=this.prefix+this.formatter(this.formatString,this.value)),this._elem&&(t.jqplot.use_excanvas&&void 0!==window.G_vmlCanvasManager.uninitElement&&window.G_vmlCanvasManager.uninitElement(this._elem.get(0)),this._elem.emptyForce(),this._elem=null);var s=i.canvasManager.getCanvas();this._textRenderer.setText(this.label,e);var r=this.getWidth(e),o=this.getHeight(e);return s.width=r,s.height=o,s.style.wid [...]
+null!=this.min&&null!=this.max&&null!=this.numberTicks&&(this.tickInterval=null),null!=this.min&&null!=this.max&&null!=this.tickInterval&&parseInt((this.max-this.min)/this.tickInterval,10)!=(this.max-this.min)/this.tickInterval&&(this.tickInterval=null);for(var s,u,g=[],f=0,i=.5,m=!1,o=0;o<this._series.length;o++)for(var x=this._series[o],v=0;v<x.data.length;v++)u="xaxis"==this.name||"x2axis"==this.name?x.data[v][0]:x.data[v][1],t.inArray(u,g)==-1&&(m=!0,f+=1,g.push(u));m&&this.sortMerge [...]
+break;case"n":var k=_.x+e._gridPadding.left-l.outerWidth(!0)/2,j=_.y+e._gridPadding.top-d.tooltipOffset-l.outerHeight(!0)-b;break;case"ne":var k=_.x+e._gridPadding.left+d.tooltipOffset+w*b,j=_.y+e._gridPadding.top-d.tooltipOffset-l.outerHeight(!0)-w*b;break;case"e":var k=_.x+e._gridPadding.left+d.tooltipOffset+b,j=_.y+e._gridPadding.top-l.outerHeight(!0)/2;break;case"se":var k=_.x+e._gridPadding.left+d.tooltipOffset+w*b,j=_.y+e._gridPadding.top+d.tooltipOffset+w*b;break;case"s":var k=_.x [...]
+//# sourceMappingURL=jqplot_boxplot.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_boxplot.js.map b/config/plugins/visualizations/charts/static/repository/build/jqplot_boxplot.js.map
new file mode 100644
index 0000000..8009621
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_boxplot.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///jqplot_boxplot.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1***","webpack:///./static/repository/visualizations/jqplot/boxplot/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0*","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*","webpack:///./~/css-loader/lib/css-base.js?da04*","webpack:///./~/style-load [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_histogram_discrete.js b/config/plugins/visualizations/charts/static/repository/build/jqplot_histogram_discrete.js
new file mode 100644
index 0000000..4641994
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_histogram_discrete.js
@@ -0,0 +1,10 @@
+define(function(){return function(t){function e(s){if(i[s])return i[s].exports;var r=i[s]={exports:{},id:s,loaded:!1};return t[s].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var i={};return e.m=t,e.c=i,e.p="",e(0)}([function(t,e,i){var s,r;s=[i(4),i(20),i(22)],r=function(t,e,i){return Backbone.Model.extend({initialize:function(s){e.request(s.chart,t.buildJobDictionary(s.chart,"histogramdiscrete"),function(t){var e=new Backbone.Collection;s.chart.groups.each(function(t,i){e.add({_ [...]
+this._plotDimensions.width=this._width,this.grid._plotDimensions=this._plotDimensions,this.title._plotDimensions=this._plotDimensions,this.baseCanvas._plotDimensions=this._plotDimensions,this.eventCanvas._plotDimensions=this._plotDimensions,this.legend._plotDimensions=this._plotDimensions;for(var h,d,p,c,l=0,u=P.length;l<u;l++){h=P[l],c=this.axes[h],d=c._ticks;for(var p=0,g=d.length;p<g;p++){var f=d[p]._elem;f&&(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T&&window.G [...]
+t.jqplot.CanvasManager.canvases.push(i),t.jqplot.CanvasManager.free.push(!1)),i},this.initCanvas=function(e){return t.jqplot.use_excanvas?window.G_vmlCanvasManager.initElement(e):e},this.freeAllCanvases=function(){for(var t=0,i=e.length;t<i;t++)this.freeCanvas(e[t]);e=[]},this.freeCanvas=function(e){if(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T)window.G_vmlCanvasManager.uninitElement(t.jqplot.CanvasManager.canvases[e]),t.jqplot.CanvasManager.canvases[e]=null;else{ [...]
+this._elem.width(0);return e=null,this._elem},t.jqplot.DivTitleRenderer.prototype.pack=function(){};var O=.1;t.jqplot.LinePattern=function(e,i){var s={dotted:[O,t.jqplot.config.dotGapLength],dashed:[t.jqplot.config.dashLength,t.jqplot.config.gapLength],solid:null};if("string"==typeof i)if("."===i[0]||"-"===i[0]){var r=i;i=[];for(var o=0,a=r.length;o<a;o++){if("."===r[o])i.push(O);else{if("-"!==r[o])continue;i.push(t.jqplot.config.dashLength)}i.push(t.jqplot.config.gapLength)}}else i=s[i] [...]
+f=!1):g.lineTo(i[m][0],i[m][1]):f=!0;n&&g.closePath(),o?e.fill():e.stroke()}e.restore()},t.jqplot.ShapeRenderer=function(e){this.lineWidth=1.5,this.linePattern="solid",this.lineJoin="miter",this.lineCap="round",this.closePath=!1,this.fill=!1,this.isarc=!1,this.fillRect=!1,this.strokeRect=!1,this.clearRect=!1,this.strokeStyle="#999999",this.fillStyle="#999999",t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.init=function(e){t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.draw= [...]
+Y:"FullYear",y:"ShortYear.2",m:"MonthNumber.2","#m":"MonthNumber",B:"MonthName",b:"AbbrMonthName",d:"Date.2","#d":"Date",e:"Date",A:"DayName",a:"AbbrDayName",w:"Day",H:"Hours.2","#H":"Hours",I:"Hours12.2","#I":"Hours12",p:"AMPM",M:"Minutes.2","#M":"Minutes",S:"Seconds.2","#S":"Seconds",s:"Unix",N:"Milliseconds.3","#N":"Milliseconds",O:"TimezoneOffset",Z:"TimezoneName",G:"GmtOffset"},shortcuts:{F:"%Y-%m-%d",T:"%H:%M:%S",X:"%H:%M:%S",x:"%m/%d/%y",D:"%m/%d/%y","#c":"%a %b %e %H:%M:%S %Y",v: [...]
+this._elem.addClass("jqplot-"+this.axis+"-tick"),s=null,this._elem},t.jqplot.CanvasAxisTickRenderer.prototype.pack=function(){this._textRenderer.draw(this._elem.get(0).getContext("2d"),this.label)}}(jQuery),function(t){function e(){this.uid=null,this.type=null,this.gridStart=null,this.gridStop=null,this.tooltipWidthFactor=0,this.options={name:null,show:!0,lineWidth:2,lineCap:"round",color:"#666666",shadow:!0,shadowAngle:45,shadowOffset:1,shadowDepth:3,shadowAlpha:"0.07",xaxis:"xaxis",yax [...]
+u="xaxis"==this.name||"x2axis"==this.name?x.data[v][0]:x.data[v][1];var _=t.inArray(u,g)+1;"xaxis"==this.name||"x2axis"==this.name?x.data[v][0]=_:x.data[v][1]=_}if(this.groups>1&&!this._grouped){for(var l=g.length,h=parseInt(l/this.groups,10),d=0,o=h;o<l;o+=h+1)g[o]=" ";this._grouped=!0}s=f+.5,null==this.numberTicks&&(this.numberTicks=2*f+1);var p=s-i;this.min=i,this.max=s;var b=0,w=parseInt(3+e/10,10),h=parseInt(f/w,10);null==this.tickInterval&&(this.tickInterval=p/(this.numberTicks-1)) [...]
+break;case"w":var k=_.x+e._gridPadding.left-l.outerWidth(!0)-d.tooltipOffset-b,j=_.y+e._gridPadding.top-l.outerHeight(!0)/2;break;default:var k=_.x+e._gridPadding.left-l.outerWidth(!0)-d.tooltipOffset-w*b,j=_.y+e._gridPadding.top-d.tooltipOffset-l.outerHeight(!0)-w*b}l.css("left",k),l.css("top",j),d.fadeTooltip?l.stop(!0,!0).fadeIn(d.tooltipFadeSpeed):l.show(),l=null}function s(t,s,r,o,a){var n=a.plugins.highlighter,l=a.plugins.cursor;if(n.show)if(null==o&&n.isHighlighting){var h=jQuery. [...]
+//# sourceMappingURL=jqplot_histogram_discrete.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_histogram_discrete.js.map b/config/plugins/visualizations/charts/static/repository/build/jqplot_histogram_discrete.js.map
new file mode 100644
index 0000000..160b9de
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_histogram_discrete.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///jqplot_histogram_discrete.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1**","webpack:///./static/repository/visualizations/jqplot/histogram_discrete/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b","webpack:///./~/css-loader/lib/css-base.js?da04","webpack: [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_line.js b/config/plugins/visualizations/charts/static/repository/build/jqplot_line.js
new file mode 100644
index 0000000..d7825c3
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_line.js
@@ -0,0 +1,10 @@
+define(function(){return function(t){function e(s){if(i[s])return i[s].exports;var r=i[s]={exports:{},id:s,loaded:!1};return t[s].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var i={};return e.m=t,e.c=i,e.p="",e(0)}([function(t,e,i){var s,r;s=[i(22)],r=function(t){return Backbone.Model.extend({initialize:function(e){new t(e)}})}.apply(e,s),!(void 0!==r&&(t.exports=r))},function(t,e,i){var s,r;s=[],r=function(){function t(t){return JSON.parse(JSON.stringify(t)||null)}function e(t){ [...]
+var f=d[p]._elem;f&&(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T&&window.G_vmlCanvasManager.uninitElement(f.get(0)),f.emptyForce(),f=null,d._elem=null)}d=null,delete c.ticks,delete c._ticks,this.axes[h]=new e(h),this.axes[h]._plotWidth=this._width,this.axes[h]._plotHeight=this._height}i&&(r.dataRenderer&&t.isFunction(r.dataRenderer)&&(r.dataRendererOptions&&(this.dataRendererOptions=r.dataRendererOptions),this.dataRenderer=r.dataRenderer,i=this.dataRenderer(i,this, [...]
+t.jqplot.CanvasManager.canvases[e]=null;else{var i=t.jqplot.CanvasManager.canvases[e];i.getContext("2d").clearRect(0,0,i.width,i.height),t(i).unbind().removeAttr("class").removeAttr("style"),t(i).css({left:"",top:"",position:""}),i.width=0,i.height=0,t.jqplot.CanvasManager.free[e]=!0}}},t.jqplot.log=function(){window.console&&window.console.log.apply(window.console,arguments)},t.jqplot.config={addDomReference:!1,enablePlugins:!1,defaultHeight:300,defaultWidth:400,UTCAdjust:!1,timezoneOff [...]
+}i.push(t.jqplot.config.gapLength)}}else i=s[i];if(!i||!i.length)return e;var n=0,l=i[0],h=0,d=0,p=0,c=0,u=function(t,i){e.moveTo(t,i),h=t,d=i,p=t,c=i},g=function(t,s){var r=e.lineWidth,o=t-h,a=s-d,p=Math.sqrt(o*o+a*a);if(p>0&&r>0)for(o/=p,a/=p;;){var c=r*l;if(!(c<p)){h=t,d=s,0==(1&n)?e.lineTo(h,d):e.moveTo(h,d),l-=p/r;break}h+=c*o,d+=c*a,0==(1&n)?e.lineTo(h,d):e.moveTo(h,d),p-=c,n++,n>=i.length&&(n=0),l=i[n]}},f=function(){e.beginPath()},m=function(){g(p,c)};return{moveTo:u,lineTo:g,beg [...]
+this.strokeStyle="#999999",this.fillStyle="#999999",t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.init=function(e){t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.draw=function(e,i,s){e.save();var r=null!=s?s:{},o=null!=r.fill?r.fill:this.fill,a=null!=r.closePath?r.closePath:this.closePath,n=null!=r.fillRect?r.fillRect:this.fillRect,l=null!=r.strokeRect?r.strokeRect:this.strokeRect,h=null!=r.clearRect?r.clearRect:this.clearRect,d=null!=r.isarc?r.isarc:this.isarc,p=null!=r.l [...]
+"#N":"Milliseconds",O:"TimezoneOffset",Z:"TimezoneName",G:"GmtOffset"},shortcuts:{F:"%Y-%m-%d",T:"%H:%M:%S",X:"%H:%M:%S",x:"%m/%d/%y",D:"%m/%d/%y","#c":"%a %b %e %H:%M:%S %Y",v:"%e-%b-%Y",R:"%H:%M",r:"%I:%M:%S %p",t:"\t",n:"\n","%":"%"}},G.formats.php={codes:{matcher:/()%((%|[a-z]))/i,a:"AbbrDayName",A:"DayName",d:"Date.2",e:"Date",j:"DayOfYear.3",u:"DayOfWeek",w:"Day",U:"FullWeekOfYear.2",V:"IsoWeek.2",W:"WeekOfYear.2",b:"AbbrMonthName",B:"MonthName",m:"MonthNumber.2",h:"AbbrMonthName", [...]
+this.tooltipWidthFactor=0,this.options={name:null,show:!0,lineWidth:2,lineCap:"round",color:"#666666",shadow:!0,shadowAngle:45,shadowOffset:1,shadowDepth:3,shadowAlpha:"0.07",xaxis:"xaxis",yaxis:"yaxis",showTooltip:!1,showTooltipPrecision:.6,tooltipLocation:"nw",fadeTooltip:!0,tooltipFadeSpeed:"fast",tooltipOffset:4,tooltipFormatString:"%d, %d"}}function i(i){e.call(this),this.type="rectangle";var s={xmin:null,xmax:null,xOffset:"6px",xminOffset:null,xmaxOffset:null,ymin:null,ymax:null,yO [...]
+this._grouped=!0}s=f+.5,null==this.numberTicks&&(this.numberTicks=2*f+1);var p=s-i;this.min=i,this.max=s;var b=0,w=parseInt(3+e/10,10),h=parseInt(f/w,10);null==this.tickInterval&&(this.tickInterval=p/(this.numberTicks-1));for(var o=0;o<this.numberTicks;o++){r=this.min+o*this.tickInterval;var c=new this.tickRenderer(this.tickOptions);o/2==parseInt(o/2,10)?(c.showLabel=!1,c.showMark=!0):(h>0&&b<h?(c.showLabel=!1,b+=1):(c.showLabel=!0,b=0),c.label=c.formatter(c.formatString,g[(o-1)/2]),c.sh [...]
+}l.css("left",k),l.css("top",j),d.fadeTooltip?l.stop(!0,!0).fadeIn(d.tooltipFadeSpeed):l.show(),l=null}function s(t,s,r,o,a){var n=a.plugins.highlighter,l=a.plugins.cursor;if(n.show)if(null==o&&n.isHighlighting){var h=jQuery.Event("jqplotHighlighterUnhighlight");a.target.trigger(h);var d=n.highlightCanvas._ctx;d.clearRect(0,0,d.canvas.width,d.canvas.height),n.fadeTooltip?n._tooltipElem.fadeOut(n.tooltipFadeSpeed):n._tooltipElem.hide(),n.bringSeriesToFront&&a.restorePreviousSeriesOrder(), [...]
+//# sourceMappingURL=jqplot_line.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_line.js.map b/config/plugins/visualizations/charts/static/repository/build/jqplot_line.js.map
new file mode 100644
index 0000000..cd4d364
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_line.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///jqplot_line.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*****","webpack:///./static/repository/visualizations/jqplot/line/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c***","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0***","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b***","webpack:///./~/css-loader/lib/css-base.js?da04***","webpack:///./~/style- [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_scatter.js b/config/plugins/visualizations/charts/static/repository/build/jqplot_scatter.js
new file mode 100644
index 0000000..a7c46b3
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_scatter.js
@@ -0,0 +1,10 @@
+define(function(){return function(t){function e(s){if(i[s])return i[s].exports;var r=i[s]={exports:{},id:s,loaded:!1};return t[s].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var i={};return e.m=t,e.c=i,e.p="",e(0)}([function(t,e,i){var s,r;s=[i(22)],r=function(t){return Backbone.Model.extend({initialize:function(e){e.makeConfig=function(t,e){$.extend(!0,e,{seriesDefaults:{renderer:$.jqplot.LineRenderer,showLine:!1,markerOptions:{show:!0}}})},new t(e)}})}.apply(e,s),!(void 0!==r&& [...]
+for(var h,d,p,c,l=0,u=P.length;l<u;l++){h=P[l],c=this.axes[h],d=c._ticks;for(var p=0,g=d.length;p<g;p++){var f=d[p]._elem;f&&(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T&&window.G_vmlCanvasManager.uninitElement(f.get(0)),f.emptyForce(),f=null,d._elem=null)}d=null,delete c.ticks,delete c._ticks,this.axes[h]=new e(h),this.axes[h]._plotWidth=this._width,this.axes[h]._plotHeight=this._height}i&&(r.dataRenderer&&t.isFunction(r.dataRenderer)&&(r.dataRendererOptions&&(thi [...]
+e=[]},this.freeCanvas=function(e){if(t.jqplot.use_excanvas&&window.G_vmlCanvasManager.uninitElement!==T)window.G_vmlCanvasManager.uninitElement(t.jqplot.CanvasManager.canvases[e]),t.jqplot.CanvasManager.canvases[e]=null;else{var i=t.jqplot.CanvasManager.canvases[e];i.getContext("2d").clearRect(0,0,i.width,i.height),t(i).unbind().removeAttr("class").removeAttr("style"),t(i).css({left:"",top:"",position:""}),i.width=0,i.height=0,t.jqplot.CanvasManager.free[e]=!0}}},t.jqplot.log=function(){ [...]
+solid:null};if("string"==typeof i)if("."===i[0]||"-"===i[0]){var r=i;i=[];for(var o=0,a=r.length;o<a;o++){if("."===r[o])i.push(O);else{if("-"!==r[o])continue;i.push(t.jqplot.config.dashLength)}i.push(t.jqplot.config.gapLength)}}else i=s[i];if(!i||!i.length)return e;var n=0,l=i[0],h=0,d=0,p=0,c=0,u=function(t,i){e.moveTo(t,i),h=t,d=i,p=t,c=i},g=function(t,s){var r=e.lineWidth,o=t-h,a=s-d,p=Math.sqrt(o*o+a*a);if(p>0&&r>0)for(o/=p,a/=p;;){var c=r*l;if(!(c<p)){h=t,d=s,0==(1&n)?e.lineTo(h,d): [...]
+this.lineWidth=1.5,this.linePattern="solid",this.lineJoin="miter",this.lineCap="round",this.closePath=!1,this.fill=!1,this.isarc=!1,this.fillRect=!1,this.strokeRect=!1,this.clearRect=!1,this.strokeStyle="#999999",this.fillStyle="#999999",t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.init=function(e){t.extend(!0,this,e)},t.jqplot.ShapeRenderer.prototype.draw=function(e,i,s){e.save();var r=null!=s?s:{},o=null!=r.fill?r.fill:this.fill,a=null!=r.closePath?r.closePath:this.closePath,n [...]
+e:"Date",A:"DayName",a:"AbbrDayName",w:"Day",H:"Hours.2","#H":"Hours",I:"Hours12.2","#I":"Hours12",p:"AMPM",M:"Minutes.2","#M":"Minutes",S:"Seconds.2","#S":"Seconds",s:"Unix",N:"Milliseconds.3","#N":"Milliseconds",O:"TimezoneOffset",Z:"TimezoneName",G:"GmtOffset"},shortcuts:{F:"%Y-%m-%d",T:"%H:%M:%S",X:"%H:%M:%S",x:"%m/%d/%y",D:"%m/%d/%y","#c":"%a %b %e %H:%M:%S %Y",v:"%e-%b-%Y",R:"%H:%M",r:"%I:%M:%S %p",t:"\t",n:"\n","%":"%"}},G.formats.php={codes:{matcher:/()%((%|[a-z]))/i,a:"AbbrDayNa [...]
+this._textRenderer.draw(this._elem.get(0).getContext("2d"),this.label)}}(jQuery),function(t){function e(){this.uid=null,this.type=null,this.gridStart=null,this.gridStop=null,this.tooltipWidthFactor=0,this.options={name:null,show:!0,lineWidth:2,lineCap:"round",color:"#666666",shadow:!0,shadowAngle:45,shadowOffset:1,shadowDepth:3,shadowAlpha:"0.07",xaxis:"xaxis",yaxis:"yaxis",showTooltip:!1,showTooltipPrecision:.6,tooltipLocation:"nw",fadeTooltip:!0,tooltipFadeSpeed:"fast",tooltipOffset:4, [...]
+var _=t.inArray(u,g)+1;"xaxis"==this.name||"x2axis"==this.name?x.data[v][0]=_:x.data[v][1]=_}if(this.groups>1&&!this._grouped){for(var l=g.length,h=parseInt(l/this.groups,10),d=0,o=h;o<l;o+=h+1)g[o]=" ";this._grouped=!0}s=f+.5,null==this.numberTicks&&(this.numberTicks=2*f+1);var p=s-i;this.min=i,this.max=s;var b=0,w=parseInt(3+e/10,10),h=parseInt(f/w,10);null==this.tickInterval&&(this.tickInterval=p/(this.numberTicks-1));for(var o=0;o<this.numberTicks;o++){r=this.min+o*this.tickInterval; [...]
+break;case"w":var k=_.x+e._gridPadding.left-l.outerWidth(!0)-d.tooltipOffset-b,j=_.y+e._gridPadding.top-l.outerHeight(!0)/2;break;default:var k=_.x+e._gridPadding.left-l.outerWidth(!0)-d.tooltipOffset-w*b,j=_.y+e._gridPadding.top-d.tooltipOffset-l.outerHeight(!0)-w*b}l.css("left",k),l.css("top",j),d.fadeTooltip?l.stop(!0,!0).fadeIn(d.tooltipFadeSpeed):l.show(),l=null}function s(t,s,r,o,a){var n=a.plugins.highlighter,l=a.plugins.cursor;if(n.show)if(null==o&&n.isHighlighting){var h=jQuery. [...]
+//# sourceMappingURL=jqplot_scatter.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/jqplot_scatter.js.map b/config/plugins/visualizations/charts/static/repository/build/jqplot_scatter.js.map
new file mode 100644
index 0000000..38ec46f
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/jqplot_scatter.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///jqplot_scatter.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1****","webpack:///./static/repository/visualizations/jqplot/scatter/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c**","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0**","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b**","webpack:///./~/css-loader/lib/css-base.js?da04**","webpack:///./~/style [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar.js
new file mode 100644
index 0000000..f5c0f0c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="multiBarChart",new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){function t(t){return JSON.parse(JSON.stringify(t [...]
+var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style("text-anchor",u%360>0?"start":"end")}b.enter().append("text").attr("class","nv-axislabel");var k=2==i.range().len [...]
+}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([n]),S=A.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),C=S.append("defs"),M=A.select( [...]
+t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.legend=u,t.lines=a,t.bars=i,t.xAxis=l,t.y1Axis=s,t.y2Axis=c,d3.rebind(t,a,"defined","size","clipVoronoi","inter [...]
+M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n){M.elementDblClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+. [...]
+return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,pointIndex:e.point})}};v.on("click",function(t){m(t,H.elementClick)}).on("mouseover",function(t){m(t,H.elementMouseover)}).on("mouseout",function(t,e){m(t,H.elementM [...]
+return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n.filter(function(t){return!t.disabled});n=d3.layout.stack().order(f).offset(p).values(function(t){return t.values}).x(c).y(u).out(function(t,e,n){var r=0 [...]
+//# sourceMappingURL=nvd3_bar.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar.js.map
new file mode 100644
index 0000000..46979f9
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_bar.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*****************","webpack:///./static/repository/visualizations/nvd3/bar/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c***************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0***************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b***************","webpack:///./~/css-loader/lib/c [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal.js
new file mode 100644
index 0000000..f0d530a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="multiBarHorizontalChart",new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){function t(t){return JSON.parse(JSON.s [...]
+var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style("text-anchor",u%360>0?"start":"end")}b.enter().append("text").attr("class","nv-axislabel");var k=2==i.range().len [...]
+}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([n]),S=A.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),C=S.append("defs"),M=A.select( [...]
+t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.legend=u,t.lines=a,t.bars=i,t.xAxis=l,t.y1Axis=s,t.y2Axis=c,d3.rebind(t,a,"defined","size","clipVoronoi","inter [...]
+M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n){M.elementDblClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+. [...]
+return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,pointIndex:e.point})}};v.on("click",function(t){m(t,H.elementClick)}).on("mouseover",function(t){m(t,H.elementMouseover)}).on("mouseout",function(t,e){m(t,H.elementM [...]
+return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n.filter(function(t){return!t.disabled});n=d3.layout.stack().order(f).offset(p).values(function(t){return t.values}).x(c).y(u).out(function(t,e,n){var r=0 [...]
+//# sourceMappingURL=nvd3_bar_horizontal.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal.js.map
new file mode 100644
index 0000000..827afa2
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_bar_horizontal.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1****************","webpack:///./static/repository/visualizations/nvd3/bar_horizontal/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c**************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0**************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b**************","webpack:///./ [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal_stacked.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal_stacked.js
new file mode 100644
index 0000000..ecb7ce8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal_stacked.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="multiBarHorizontalChart",e.makeConfig=function(t){t.stacked(!0)},new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function( [...]
+}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style( [...]
+if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").da [...]
+n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.l [...]
+pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n) [...]
+return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=Z.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,poi [...]
+t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n. [...]
+//# sourceMappingURL=nvd3_bar_horizontal_stacked.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal_stacked.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal_stacked.js.map
new file mode 100644
index 0000000..b7c7e2f
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_horizontal_stacked.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_bar_horizontal_stacked.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1***************","webpack:///./static/repository/visualizations/nvd3/bar_horizontal_stacked/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0*************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*************","w [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_stacked.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_stacked.js
new file mode 100644
index 0000000..16dd5f8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_stacked.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="multiBarChart",e.makeConfig=function(t){t.stacked(!0)},new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){function [...]
+}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style( [...]
+if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").da [...]
+n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.l [...]
+pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n) [...]
+return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=Z.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,poi [...]
+t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n. [...]
+//# sourceMappingURL=nvd3_bar_stacked.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_stacked.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_stacked.js.map
new file mode 100644
index 0000000..d9f28cf
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_bar_stacked.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_bar_stacked.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1**************","webpack:///./static/repository/visualizations/nvd3/bar_stacked/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b************","webpack:///./~/css-loader/l [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram.js
new file mode 100644
index 0000000..e049eb5
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(4),n(20),n(11)],o=function(t,e,n){return Backbone.Model.extend({initialize:function(r){e.request(r.chart,t.buildJobDictionary(r.chart,"histogram"),function(t){var e=new Backbone.Collection;r.chart.groups.each(function(t,n){e.add({__data_co [...]
+if(b.attr("text-anchor","middle").attr("y",0).attr("x",k/2),s){var w=a.selectAll("g.nv-axisMaxMin").data(i.domain());w.enter().append("g").attr("class","nv-axisMaxMin").append("text"),w.exit().remove(),w.attr("transform",function(t,e){return"translate("+i(t)+",0)"}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"} [...]
+return arguments.length?(k=e,t):k},t},n.models.discreteBarChart=function(){"use strict";function t(n){return n.each(function(n){var u=d3.select(this),v=this,x=(s||parseInt(u.style("width"))||960)-l.left-l.right,k=(c||parseInt(u.style("height"))||400)-l.top-l.bottom;if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class"," [...]
+s.orient("left"),c.orient("right");var S=function(e,r){var o=e.pos[0]+(r.offsetLeft||0),i=e.pos[1]+(r.offsetTop||0),u=l.tickFormat()(a.x()(e.point,e.pointIndex)),d=(e.series.bar?s:c).tickFormat()(a.y()(e.point,e.pointIndex)),p=x(e.series.key,u,d,e,t);n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i. [...]
+W.on("mouseover",function(e,n){d3.select(this).classed("hover",!0),M.elementMouseover({value:v(e,n),point:e,series:t[e.series],pos:[h(v(e,n)+(b?e.y0:0)),f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),poi [...]
+e=e.sort(function(t,e){return t[0]-e[0]||t[1]-e[1]});for(var c=0;c<e.length-1;)Math.abs(e[c][0]-e[c+1][0])<a&&Math.abs(e[c][1]-e[c+1][1])<a?e.splice(c+1,1):c++;var f=d3.geom.voronoi(e).map(function(t,n){return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=G.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 [...]
+return arguments.length?(l=e,t):l},t.xTickFormat=function(e){return arguments.length?(u=e,t):u},t.yTickFormat=function(e){return arguments.length?(d=e,t):d},t.showValue=function(e){return arguments.length?(p=e,t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a [...]
+//# sourceMappingURL=nvd3_histogram.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram.js.map
new file mode 100644
index 0000000..8f23e35
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_histogram.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1********","webpack:///./static/repository/visualizations/nvd3/histogram/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c******","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0******","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b******","webpack:///./~/css-loader/lib/css-base.js?da04******"," [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram_discrete.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram_discrete.js
new file mode 100644
index 0000000..f7cb4bf
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram_discrete.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(4),n(20),n(11)],o=function(t,e,n){return Backbone.Model.extend({initialize:function(r){e.request(r.chart,t.buildJobDictionary(r.chart,"histogramdiscrete"),function(t){var e=new Backbone.Collection;r.chart.groups.each(function(t,n){e.add({_ [...]
+if(b.attr("text-anchor","middle").attr("y",0).attr("x",k/2),s){var w=a.selectAll("g.nv-axisMaxMin").data(i.domain());w.enter().append("g").attr("class","nv-axisMaxMin").append("text"),w.exit().remove(),w.attr("transform",function(t,e){return"translate("+i(t)+",0)"}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"} [...]
+return arguments.length?(k=e,t):k},t},n.models.discreteBarChart=function(){"use strict";function t(n){return n.each(function(n){var u=d3.select(this),v=this,x=(s||parseInt(u.style("width"))||960)-l.left-l.right,k=(c||parseInt(u.style("height"))||400)-l.top-l.bottom;if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class"," [...]
+s.orient("left"),c.orient("right");var S=function(e,r){var o=e.pos[0]+(r.offsetLeft||0),i=e.pos[1]+(r.offsetTop||0),u=l.tickFormat()(a.x()(e.point,e.pointIndex)),d=(e.series.bar?s:c).tickFormat()(a.y()(e.point,e.pointIndex)),p=x(e.series.key,u,d,e,t);n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i. [...]
+W.on("mouseover",function(e,n){d3.select(this).classed("hover",!0),M.elementMouseover({value:v(e,n),point:e,series:t[e.series],pos:[h(v(e,n)+(b?e.y0:0)),f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),poi [...]
+e=e.sort(function(t,e){return t[0]-e[0]||t[1]-e[1]});for(var c=0;c<e.length-1;)Math.abs(e[c][0]-e[c+1][0])<a&&Math.abs(e[c][1]-e[c+1][1])<a?e.splice(c+1,1):c++;var f=d3.geom.voronoi(e).map(function(t,n){return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=G.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 [...]
+return arguments.length?(l=e,t):l},t.xTickFormat=function(e){return arguments.length?(u=e,t):u},t.yTickFormat=function(e){return arguments.length?(d=e,t):d},t.showValue=function(e){return arguments.length?(p=e,t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a [...]
+//# sourceMappingURL=nvd3_histogram_discrete.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram_discrete.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram_discrete.js.map
new file mode 100644
index 0000000..024f653
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_histogram_discrete.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_histogram_discrete.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*******","webpack:///./static/repository/visualizations/nvd3/histogram_discrete/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*****","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0*****","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*****","webpack:///./~/css-loader/lib/css-base.js [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_line.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_line.js
new file mode 100644
index 0000000..98f8aca
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_line.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="lineChart",new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){function t(t){return JSON.parse(JSON.stringify(t)||n [...]
+var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style("text-anchor",u%360>0?"start":"end")}b.enter().append("text").attr("class","nv-axislabel");var k=2==i.range().len [...]
+}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([n]),S=A.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),C=S.append("defs"),M=A.select( [...]
+t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.legend=u,t.lines=a,t.bars=i,t.xAxis=l,t.y1Axis=s,t.y2Axis=c,d3.rebind(t,a,"defined","size","clipVoronoi","inter [...]
+M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n){M.elementDblClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+. [...]
+return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,pointIndex:e.point})}};v.on("click",function(t){m(t,H.elementClick)}).on("mouseover",function(t){m(t,H.elementMouseover)}).on("mouseout",function(t,e){m(t,H.elementM [...]
+return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n.filter(function(t){return!t.disabled});n=d3.layout.stack().order(f).offset(p).values(function(t){return t.values}).x(c).y(u).out(function(t,e,n){var r=0 [...]
+//# sourceMappingURL=nvd3_line.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_line.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_line.js.map
new file mode 100644
index 0000000..7091533
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_line.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_line.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*************","webpack:///./static/repository/visualizations/nvd3/line/wrapper.js","webpack:///./static/repository/utilities/utils.js","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0***********","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b***********","webpack:///./~/css-loader/lib/css-base.js?da04***********","w [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_line_focus.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_line_focus.js
new file mode 100644
index 0000000..4cc0d47
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_line_focus.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="lineWithFocusChart",new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){function t(t){return JSON.parse(JSON.string [...]
+var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style("text-anchor",u%360>0?"start":"end")}b.enter().append("text").attr("class","nv-axislabel");var k=2==i.range().len [...]
+}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([n]),S=A.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),C=S.append("defs"),M=A.select( [...]
+t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.legend=u,t.lines=a,t.bars=i,t.xAxis=l,t.y1Axis=s,t.y2Axis=c,d3.rebind(t,a,"defined","size","clipVoronoi","inter [...]
+M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n){M.elementDblClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+. [...]
+return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,pointIndex:e.point})}};v.on("click",function(t){m(t,H.elementClick)}).on("mouseover",function(t){m(t,H.elementMouseover)}).on("mouseout",function(t,e){m(t,H.elementM [...]
+return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n.filter(function(t){return!t.disabled});n=d3.layout.stack().order(f).offset(p).values(function(t){return t.values}).x(c).y(u).out(function(t,e,n){var r=0 [...]
+//# sourceMappingURL=nvd3_line_focus.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_line_focus.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_line_focus.js.map
new file mode 100644
index 0000000..02ce9f9
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_line_focus.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_line_focus.js","webpack:///webpack/bootstrap e3622d0033a89edad914","webpack:///./static/repository/visualizations/nvd3/line_focus/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c***********","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0**********","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js","webpack:///./~/css-loader/lib/css-base.js?da04**********","webpack:/ [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_pie.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_pie.js
new file mode 100644
index 0000000..fdbc6d5
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_pie.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(1),n(5),n(10),n(9)],o=function(t,e){return Backbone.View.extend({initialize:function(t){var n=this,r=t.chart,o=t.targets,a=t.process;e.request({dataset_id:r.get("dataset_id"),dataset_groups:r.groups,success:function(t){for(var e in t){var  [...]
+return"translate(0,"+v(t)+")"}).select("text").attr("dy",".32em").attr("y",0).attr("x",-e.tickPadding()).attr("text-anchor","end").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate(0,"+i.range()[e]+")"}).select("text").style("opacity",1)}}if(b.text(function(t){return t}),!s||"left"!==e.orient()&&"right"!==e.orient()||(y.selectAll("g").each(function(t,e){d3.select(this).select("text").attr("opacity",1),(i(t)<i.range()[1]+10||i [...]
+t.pos=[t.pos[0]+l.left,t.pos[1]+l.top],y.tooltipShow(t)}),o.dispatch.on("elementMouseout.tooltip",function(t){y.tooltipHide(t)}),y.on("tooltipHide",function(){g&&n.tooltip.cleanup()}),t.dispatch=y,t.discretebar=o,t.xAxis=a,t.yAxis=i,d3.rebind(t,o,"x","y","xDomain","yDomain","xRange","yRange","forceX","forceY","id","showValues","valueFormat"),t.options=n.utils.optionsFunc.bind(t),t.margin=function(e){return arguments.length?(l.top="undefined"!=typeof e.top?e.top:l.top,l.right="undefined"! [...]
+E.select(".nv-focus .nv-y.nv-axis").call(c)}}var z=d3.select(this),W=this,_=(m||parseInt(z.style("width"))||960)-h.left-h.right,L=(y||parseInt(z.style("height"))||400)-h.top-h.bottom-x,N=x-g.top-g.bottom;if(t.update=function(){z.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var F=z.selectAll(".nv-noData").data([S]);return F.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),F.attr("x",h. [...]
+return t.dispatch=M,t.options=n.utils.optionsFunc.bind(t),t.x=function(e){return arguments.length?(g=e,t):g},t.y=function(e){return arguments.length?(v=e,t):v},t.margin=function(e){return arguments.length?(c.top="undefined"!=typeof e.top?e.top:c.top,c.right="undefined"!=typeof e.right?e.right:c.right,c.bottom="undefined"!=typeof e.bottom?e.bottom:c.bottom,c.left="undefined"!=typeof e.left?e.left:c.left,t):c},t.width=function(e){return arguments.length?(u=e,t):u},t.height=function(e){retu [...]
+j.append("clipPath").attr("id","nv-edge-clip-"+u).append("rect"),G.select("#nv-edge-clip-"+u+" rect").attr("width",V).attr("height",Y>0?Y:0),U.attr("clip-path",D?"url(#nv-edge-clip-"+u+")":""),P=!0;var q=G.select(".nv-groups").selectAll(".nv-group").data(function(t){return t},function(t){return t.key});if(q.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),q.exit().remove(),q.attr("class",function(t,e){return"nv-group nv-series-"+e}).classed("hover",function(t){ [...]
+}),v.dispatch.on("elementMouseout.area",function(t){M.select(".nv-chart-"+s+" .nv-area-"+t.seriesIndex).classed("hover",!1)}),t.d3_stackedOffset_stackPercent=function(t){var e,n,r,o=t.length,a=t[0].length,i=1/o,l=[];for(n=0;n<a;++n){for(e=0,r=0;e<b.length;e++)r+=u(b[e].values[n]);if(r)for(e=0;e<o;e++)t[e][n][1]/=r;else for(e=0;e<o;e++)t[e][n][1]=i}for(n=0;n<a;++n)l[n]=0;return l}}),t}var e,r,o={top:0,right:0,bottom:0,left:0},a=960,i=500,l=n.utils.defaultColor(),s=Math.floor(1e5*Math.rand [...]
+//# sourceMappingURL=nvd3_pie.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_pie.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_pie.js.map
new file mode 100644
index 0000000..524400e
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_pie.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_pie.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1******************","webpack:///./static/repository/visualizations/nvd3/pie/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c****************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b****************","webpack:///./~/css-loader/lib/css-base.js?da04****************","webpack:///./~/style-loader/addStyles.js?b980****************" [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_scatter.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_scatter.js
new file mode 100644
index 0000000..f9c12dd
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_scatter.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="scatterChart",e.makeConfig=function(t){t.showDistX(!0).showDistY(!0).color(d3.scale.category10().range())},new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o)) [...]
+return"translate("+i(t)+",0)"}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){retur [...]
+if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").da [...]
+n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.l [...]
+pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n) [...]
+return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=Z.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,poi [...]
+t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n. [...]
+//# sourceMappingURL=nvd3_scatter.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_scatter.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_scatter.js.map
new file mode 100644
index 0000000..8cb9a2d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_scatter.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_scatter.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1************","webpack:///./static/repository/visualizations/nvd3/scatter/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c**********","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b**********","webpack:///./~/css-loader/lib/css-base.js?da04*********"," [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea.js
new file mode 100644
index 0000000..c13eddc
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="stackedAreaChart",new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){function t(t){return JSON.parse(JSON.stringif [...]
+var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style("text-anchor",u%360>0?"start":"end")}b.enter().append("text").attr("class","nv-axislabel");var k=2==i.range().len [...]
+}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([n]),S=A.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),C=S.append("defs"),M=A.select( [...]
+t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.legend=u,t.lines=a,t.bars=i,t.xAxis=l,t.y1Axis=s,t.y2Axis=c,d3.rebind(t,a,"defined","size","clipVoronoi","inter [...]
+M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n){M.elementDblClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+. [...]
+return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,pointIndex:e.point})}};v.on("click",function(t){m(t,H.elementClick)}).on("mouseover",function(t){m(t,H.elementMouseover)}).on("mouseout",function(t,e){m(t,H.elementM [...]
+return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n.filter(function(t){return!t.disabled});n=d3.layout.stack().order(f).offset(p).values(function(t){return t.values}).x(c).y(u).out(function(t,e,n){var r=0 [...]
+//# sourceMappingURL=nvd3_stackedarea.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea.js.map
new file mode 100644
index 0000000..812f015
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_stackedarea.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1***********","webpack:///./static/repository/visualizations/nvd3/stackedarea/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*********","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0*********","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*********","webpack:///./~/css-loader/lib/css-base. [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_full.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_full.js
new file mode 100644
index 0000000..78dc63b
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_full.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="stackedAreaChart",e.makeConfig=function(t){t.style("expand")},new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){f [...]
+}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style( [...]
+if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").da [...]
+n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.l [...]
+pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n) [...]
+return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=Z.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,poi [...]
+t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n. [...]
+//# sourceMappingURL=nvd3_stackedarea_full.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_full.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_full.js.map
new file mode 100644
index 0000000..3ab3c31
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_full.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_stackedarea_full.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1**********","webpack:///./static/repository/visualizations/nvd3/stackedarea_full/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c********","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0********","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b********","webpack:///./~/css-loader/lib/css [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_stream.js b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_stream.js
new file mode 100644
index 0000000..be9cb4a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_stream.js
@@ -0,0 +1,8 @@
+define(function(){return function(t){function e(r){if(n[r])return n[r].exports;var o=n[r]={exports:{},id:r,loaded:!1};return t[r].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){var r,o;r=[n(11)],o=function(t){return Backbone.Model.extend({initialize:function(e){e.type="stackedAreaChart",e.makeConfig=function(t){t.style("stream")},new t(e)}})}.apply(e,r),!(void 0!==o&&(t.exports=o))},function(t,e,n){var r,o;r=[],o=function(){f [...]
+}).select("text").attr("dy","-0.5em").attr("y",-e.tickPadding()).attr("text-anchor","middle").text(function(t,e){var n=x(t);return(""+n).match("NaN")?"":n}),w.attr("transform",function(t,e){return"translate("+i.range()[e]+",0)"})}break;case"bottom":var A=36,S=30,C=y.selectAll("g").select("text");if(u%360){C.each(function(t,e){var n=this.getBBox().width;n>S&&(S=n)});var M=Math.abs(Math.sin(u*Math.PI/180)),A=(M?M*S:S)+30;C.attr("transform",function(t,e,n){return"rotate("+u+" 0,0)"}).style( [...]
+if(t.update=function(){y.beforeUpdate(),u.call(t)},t.container=this,!(n&&n.length&&n.filter(function(t){return t.values.length}).length)){var w=u.selectAll(".nv-noData").data([m]);return w.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),w.attr("x",l.left+x/2).attr("y",l.top+k/2).text(function(t){return t}),t}u.selectAll(".nv-noData").remove(),e=o.xScale(),r=o.yScale().clamp(!0);var A=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").da [...]
+n.tooltip.show([o,i],p,e.value<0?"n":"s",null,r)};return a.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),a.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),i.dispatch.on("elementMouseover.tooltip",function(t){t.pos=[t.pos[0]+d.left,t.pos[1]+d.top],A.tooltipShow(t)}),i.dispatch.on("elementMouseout.tooltip",function(t){A.tooltipHide(t)}),A.on("tooltipHide",function(){y&&n.tooltip.cleanup()}),t.dispatch=A,t.l [...]
+pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("mouseout",function(e,n){d3.select(this).classed("hover",!1),M.elementMouseout({value:v(e,n),point:e,series:t[e.series],pointIndex:n,seriesIndex:e.series,e:d3.event})}).on("click",function(e,n){M.elementClick({value:v(e,n),point:e,series:t[e.series],pos:[f(g(e,n))+f.rangeBand()*(b?t.length/2:e.series+.5)/t.length,h(v(e,n)+(b?e.y0:0))],pointIndex:n,seriesIndex:e.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(e,n) [...]
+return{data:o.clip(t),series:e[n][2],point:e[n][3]}}),v=Z.select(".nv-point-paths").selectAll("path").data(f);v.enter().append("path").attr("class",function(t,e){return"nv-path-"+e}),v.exit().remove(),v.attr("d",function(t){return t&&t.data&&0!==t.data.length?"M"+t.data.join("L")+"Z":"M 0 0"});var m=function(e,n){if(P)return 0;var r=t[e.series];if("undefined"!=typeof r){var o=r.values[e.point];n({point:o,series:r,pos:[d(h(o,e.point))+i.left,p(g(o,e.point))+i.top],seriesIndex:e.series,poi [...]
+t):p},t.alignValue=function(e){return arguments.length?(f=e,t):f},t.rightAlignValue=function(e){return arguments.length?(h=e,t):h},t.noData=function(e){return arguments.length?(g=e,t):g},t},n.models.stackedArea=function(){"use strict";function t(n){return n.each(function(n){var d=a-o.left-o.right,y=i-o.top-o.bottom,x=d3.select(this);e=v.xScale(),r=v.yScale();var b=n;n.forEach(function(t,e){t.seriesIndex=e,t.values=t.values.map(function(t,n){return t.index=n,t.seriesIndex=e,t})});var k=n. [...]
+//# sourceMappingURL=nvd3_stackedarea_stream.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_stream.js.map b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_stream.js.map
new file mode 100644
index 0000000..a6b9570
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/nvd3_stackedarea_stream.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///nvd3_stackedarea_stream.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*********","webpack:///./static/repository/visualizations/nvd3/stackedarea_stream/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*******","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0*******","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*******","webpack:///./~/css-loader/lib/css [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/others_example.js b/config/plugins/visualizations/charts/static/repository/build/others_example.js
new file mode 100644
index 0000000..99998d6
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/others_example.js
@@ -0,0 +1,2 @@
+define(function(){return function(t){function e(r){if(a[r])return a[r].exports;var n=a[r]={exports:{},id:r,loaded:!1};return t[r].call(n.exports,n,n.exports,e),n.loaded=!0,n.exports}var a={};return e.m=t,e.c=a,e.p="",e(0)}([function(t,e,a){var r,n;r=[a(4),a(5)],n=function(t,e){return Backbone.View.extend({initialize:function(t){var a=t.chart;e.request({dataset_id:a.get("dataset_id"),dataset_groups:a.groups,success:function(e){var r=d3.scale.category20(),n=null;_.each(e,function(e,a){try{ [...]
+//# sourceMappingURL=others_example.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/others_example.js.map b/config/plugins/visualizations/charts/static/repository/build/others_example.js.map
new file mode 100644
index 0000000..07c3e8d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/others_example.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///others_example.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1***********************","webpack:///./static/repository/visualizations/others/example/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*********************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0******************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b********************"] [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/others_heatmap.js b/config/plugins/visualizations/charts/static/repository/build/others_heatmap.js
new file mode 100644
index 0000000..1924c87
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/others_heatmap.js
@@ -0,0 +1,2 @@
+define(function(){return function(t){function e(i){if(a[i])return a[i].exports;var r=a[i]={exports:{},id:i,loaded:!1};return t[i].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var a={};return e.m=t,e.c=a,e.p="",e(0)}({0:function(t,e,a){var i,r;i=[a(4),a(39)],r=function(t,e){return Backbone.View.extend({initialize:function(a){a.render=function(t,i){return new e({chart:a.chart,canvas_id:t,groups:i}),!0},t.panelHelper(a)}})}.apply(e,i),!(void 0!==r&&(t.exports=r))},1:function(t,e,a){v [...]
+//# sourceMappingURL=others_heatmap.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/others_heatmap.js.map b/config/plugins/visualizations/charts/static/repository/build/others_heatmap.js.map
new file mode 100644
index 0000000..7351525
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/others_heatmap.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///others_heatmap.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1*********************","webpack:///./static/repository/visualizations/others/heatmap/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c*******************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0*****************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b*******************","webpa [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/others_heatmap_cluster.js b/config/plugins/visualizations/charts/static/repository/build/others_heatmap_cluster.js
new file mode 100644
index 0000000..e7f210e
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/others_heatmap_cluster.js
@@ -0,0 +1,2 @@
+define(function(){return function(t){function e(i){if(a[i])return a[i].exports;var r=a[i]={exports:{},id:i,loaded:!1};return t[i].call(r.exports,r,r.exports,e),r.loaded=!0,r.exports}var a={};return e.m=t,e.c=a,e.p="",e(0)}({0:function(t,e,a){var i,r;i=[a(4),a(20),a(39)],r=function(t,e,a){return Backbone.View.extend({initialize:function(i){e.request(i.chart,t.buildJobDictionary(i.chart,"heatmap"),function(e){var r=new Backbone.Collection;i.chart.groups.each(function(t,e){r.add({__data_col [...]
+//# sourceMappingURL=others_heatmap_cluster.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/others_heatmap_cluster.js.map b/config/plugins/visualizations/charts/static/repository/build/others_heatmap_cluster.js.map
new file mode 100644
index 0000000..366a27c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/others_heatmap_cluster.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///others_heatmap_cluster.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1********************","webpack:///./static/repository/visualizations/others/heatmap_cluster/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c******************","webpack:///./static/repository/visualizations/utilities/tabular-utilities.js?ccb0****************","webpack:///./static/repository/visualizations/utilities/tabular-datasets.js?bf7b************** [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/pv_viewer.js b/config/plugins/visualizations/charts/static/repository/build/pv_viewer.js
new file mode 100644
index 0000000..23457a8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/pv_viewer.js
@@ -0,0 +1,7 @@
+define(function(){return function(t){function e(n){if(r[n])return r[n].exports;var i=r[n]={exports:{},id:n,loaded:!1};return t[n].call(i.exports,i,i.exports,e),i.loaded=!0,i.exports}var r={};return e.m=t,e.c=r,e.p="",e(0)}({0:function(t,e,r){var n,i;n=[r(1),r(176)],i=function(t,e){return Backbone.Model.extend({initialize:function(r){var n=r.chart.settings,i=e.Viewer(document.getElementById(r.targets[0]),{quality:n.get("quality"),width:"auto",height:"auto",antialias:!0,outline:!0});t.get( [...]
+s=h}return-1},r.indexFirstLargerEqualThan=function(e,r,n){if(n=n||t,0===e.length||n(e[e.length-1],r))return-1;for(var i=0,o=e.length,s=i+o>>1;;){var a=e[s];n(r,a)?o=s:n(a,r)?i=s+1:o=s;var h=i+o>>1;if(h===s)return s;s=h}},r.indexLastSmallerThan=function(e,r,n){if(n=n||t,0===e.length||n(e[e.length-1],r))return e.length-1;if(n(r,e[0])||!n(e[0],r))return-1;for(var i=0,o=e.length,s=i+o>>1;;){var a=e[s];n(r,a)||!n(a,r)?o=s:i=s;var h=i+o>>1;if(h===s)return s;s=h}},r.indexLastSmallerEqualThan=fu [...]
+var _=l.residues()[f].centralAtom();null!==_&&(i.transformMat4(t,_.pos(),u),n(_,t))}}}();return n.derive(r,H,{setShowRelated:function(t){return t&&"asym"!==t&&null===this.structure().assembly(t)?void 0:(this._cS=t,t)},symWithIndex:function(t){if("asym"===this.showRelated())return null;var e=this.structure().assembly(this.showRelated());if(!e)return null;for(var r=e.generators(),n=0;n<r.length;++n){if(r[n].matrices().length>t)return r[n].matrix(t);t-=r[n].matrices().length}return null},sh [...]
+return function(u,c,f,_,d){var m=U([f],4*d.arcDetail,d.splineDetail),b=d.float32Allocator.request(3*f.length()),p=d.float32Allocator.request(4*f.length()),g=d.float32Allocator.request(3*f.length()),A=[],y=d.idPool.getContinuousRange(f.length());u.addIdRange(y),G(u,f,p,b,g,A,y,d);var C=u.vertArrayWithSpaceFor(m),R=l.catmullRomSpline(b,f.length(),d.splineDetail,d.strength,!1,d.float32Allocator),S=l.catmullRomSpline(g,f.length(),d.splineDetail,d.strength,!1,d.float32Allocator);c.setPerResid [...]
+return null}return t>=this._r.length||0>t?null:this._r[t]},centralAtom:function(){return this.isAminoacid()?this.atom("CA"):this.isNucleotide()?this.atom("C3'"):null},center:function(){var t=0,e=s.create();return this.eachAtom(function(r){s.add(e,e,r.pos()),t+=1}),t>0&&s.scale(e,e,1/t),e},isAminoacid:function(){return this._dg},isNucleotide:function(){return this._df}},n.derive(i,r,{_ev:function(){this._df=null!==this.atom("P")&&null!==this.atom("C3'"),this._dg=null!==this.atom("N")&&nul [...]
+s+=1;var c=r.getUint32(8,o),l=r.getUint32(84,o),f=!1;0!==l&&(f=0!==r.getUint32(48,o)),s+=8;var _=r.getUint32(s,o);for(s+=8,h=0;c>h;++h){var d=new Float32Array(3*_);f&&(s+=56);for(var v=0;3>v;++v){s+=4;for(var m=0;_>m;++m){var b=r.getFloat32(s,o);d[3*m+v]=b,s+=4}s+=4}n.addFrame(d)}return n}function n(t,e){var r=new XMLHttpRequest;r.open("GET",t,!0),r.responseType="arraybuffer",r.onload=function(){r.response&&e(new DataView(r.response))},r.send(null)}function i(t,e,i){n(t,function(t){var n [...]
+//# sourceMappingURL=pv_viewer.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/pv_viewer.js.map b/config/plugins/visualizations/charts/static/repository/build/pv_viewer.js.map
new file mode 100644
index 0000000..621a7d1
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/pv_viewer.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///pv_viewer.js","webpack:///webpack/bootstrap e3622d0033a89edad914?63f1************************","webpack:///./static/repository/visualizations/pv/viewer/wrapper.js","webpack:///./static/repository/utilities/utils.js?825c**********************","webpack:///./static/repository/plugins/pv/viewer.js"],"names":["define","modules","__webpack_require__","moduleId","installedModules","exports","module","id","loaded","call","m","c","p","0","__WEBPACK_AMD_DEFINE_ [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/registry.js b/config/plugins/visualizations/charts/static/repository/build/registry.js
new file mode 100644
index 0000000..0f304d7
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/registry.js
@@ -0,0 +1,2 @@
+define(function(){return function(e){function a(l){if(t[l])return t[l].exports;var o=t[l]={exports:{},id:l,loaded:!1};return e[l].call(o.exports,o,o.exports,a),o.loaded=!0,o.exports}var t={};return a.m=e,a.c=t,a.p="",a(0)}([function(e,a,t){var l,o;l=[],o=function(){return{nvd3_bar:t(14),nvd3_bar_horizontal:t(15),nvd3_bar_horizontal_stacked:t(16),nvd3_bar_stacked:t(17),nvd3_line:t(20),nvd3_line_focus:t(21),nvd3_scatter:t(23),nvd3_stackedarea:t(24),nvd3_stackedarea_full:t(25),nvd3_stackeda [...]
+//# sourceMappingURL=registry.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/registry.js.map b/config/plugins/visualizations/charts/static/repository/build/registry.js.map
new file mode 100644
index 0000000..37e0f9e
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/registry.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///registry.js","webpack:///webpack/bootstrap c98ec4c4223b0de74900","webpack:///./static/repository/build/registry.tmp.js","webpack:///./static/repository/visualizations/nvd3/common/config.js","webpack:///./static/repository/visualizations/utilities/tabular-form.js","webpack:///./static/repository/visualizations/jqplot/common/config.js","webpack:///./static/repository/visualizations/others/heatmap/config.js","webpack:///./static/repository/visualizations/ [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/build/registry.tmp.js b/config/plugins/visualizations/charts/static/repository/build/registry.tmp.js
new file mode 100644
index 0000000..ebd8167
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/build/registry.tmp.js
@@ -0,0 +1 @@
+define( [], function() { return {nvd3_bar:require( "visualizations/nvd3/bar/config" ), nvd3_bar_horizontal:require( "visualizations/nvd3/bar_horizontal/config" ), nvd3_bar_horizontal_stacked:require( "visualizations/nvd3/bar_horizontal_stacked/config" ), nvd3_bar_stacked:require( "visualizations/nvd3/bar_stacked/config" ), nvd3_line:require( "visualizations/nvd3/line/config" ), nvd3_line_focus:require( "visualizations/nvd3/line_focus/config" ), nvd3_scatter:require( "visualizations/nvd3/ [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/benfred/venn.css b/config/plugins/visualizations/charts/static/repository/plugins/benfred/venn.css
new file mode 100644
index 0000000..235c255
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/benfred/venn.css
@@ -0,0 +1,14 @@
+.venntooltip {
+    position: absolute;
+    text-align: center;
+    width: 128px;
+    height: 22px;
+    background: #333;
+    color: #ddd;
+    border: 0px;
+    border-radius: 8px;
+    opacity: 0;
+}
+.venn-area > .label {
+    font-size: 1em;
+}
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/benfred/venn.js b/config/plugins/visualizations/charts/static/repository/plugins/benfred/venn.js
new file mode 100644
index 0000000..58a29e7
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/benfred/venn.js
@@ -0,0 +1,1789 @@
+(function (global, factory) {
+    typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
+    typeof define === 'function' && define.amd ? define(['exports'], factory) :
+    factory((global.venn = {}),global.d3,global.d3);
+}(this, function (exports) { 'use strict';
+
+    /** finds the zeros of a function, given two starting points (which must
+     * have opposite signs */
+    function bisect(f, a, b, parameters) {
+        parameters = parameters || {};
+        var maxIterations = parameters.maxIterations || 100,
+            tolerance = parameters.tolerance || 1e-10,
+            fA = f(a),
+            fB = f(b),
+            delta = b - a;
+
+        if (fA * fB > 0) {
+            throw "Initial bisect points must have opposite signs";
+        }
+
+        if (fA === 0) return a;
+        if (fB === 0) return b;
+
+        for (var i = 0; i < maxIterations; ++i) {
+            delta /= 2;
+            var mid = a + delta,
+                fMid = f(mid);
+
+            if (fMid * fA >= 0) {
+                a = mid;
+            }
+
+            if ((Math.abs(delta) < tolerance) || (fMid === 0)) {
+                return mid;
+            }
+        }
+        return a + delta;
+    }
+
+    // need some basic operations on vectors, rather than adding a dependency,
+    // just define here
+    function zeros(x) { var r = new Array(x); for (var i = 0; i < x; ++i) { r[i] = 0; } return r; }
+    function zerosM(x,y) { return zeros(x).map(function() { return zeros(y); }); }
+
+    function dot(a, b) {
+        var ret = 0;
+        for (var i = 0; i < a.length; ++i) {
+            ret += a[i] * b[i];
+        }
+        return ret;
+    }
+
+    function norm2(a)  {
+        return Math.sqrt(dot(a, a));
+    }
+
+    function multiplyBy(a, c) {
+        for (var i = 0; i < a.length; ++i) {
+            a[i] *= c;
+        }
+    }
+
+    function weightedSum(ret, w1, v1, w2, v2) {
+        for (var j = 0; j < ret.length; ++j) {
+            ret[j] = w1 * v1[j] + w2 * v2[j];
+        }
+    }
+
+    /** minimizes a function using the downhill simplex method */
+    function fmin(f, x0, parameters) {
+        parameters = parameters || {};
+
+        var maxIterations = parameters.maxIterations || x0.length * 200,
+            nonZeroDelta = parameters.nonZeroDelta || 1.1,
+            zeroDelta = parameters.zeroDelta || 0.001,
+            minErrorDelta = parameters.minErrorDelta || 1e-6,
+            minTolerance = parameters.minErrorDelta || 1e-5,
+            rho = parameters.rho || 1,
+            chi = parameters.chi || 2,
+            psi = parameters.psi || -0.5,
+            sigma = parameters.sigma || 0.5,
+            callback = parameters.callback,
+            maxDiff,
+            temp;
+
+        // initialize simplex.
+        var N = x0.length,
+            simplex = new Array(N + 1);
+        simplex[0] = x0;
+        simplex[0].fx = f(x0);
+        for (var i = 0; i < N; ++i) {
+            var point = x0.slice();
+            point[i] = point[i] ? point[i] * nonZeroDelta : zeroDelta;
+            simplex[i+1] = point;
+            simplex[i+1].fx = f(point);
+        }
+
+        var sortOrder = function(a, b) { return a.fx - b.fx; };
+
+        var centroid = x0.slice(),
+            reflected = x0.slice(),
+            contracted = x0.slice(),
+            expanded = x0.slice();
+
+        for (var iteration = 0; iteration < maxIterations; ++iteration) {
+            simplex.sort(sortOrder);
+            if (callback) {
+                callback(simplex);
+            }
+
+            maxDiff = 0;
+            for (i = 0; i < N; ++i) {
+                maxDiff = Math.max(maxDiff, Math.abs(simplex[0][i] - simplex[1][i]));
+            }
+
+            if ((Math.abs(simplex[0].fx - simplex[N].fx) < minErrorDelta) &&
+                (maxDiff < minTolerance)) {
+                break;
+            }
+
+            // compute the centroid of all but the worst point in the simplex
+            for (i = 0; i < N; ++i) {
+                centroid[i] = 0;
+                for (var j = 0; j < N; ++j) {
+                    centroid[i] += simplex[j][i];
+                }
+                centroid[i] /= N;
+            }
+
+            // reflect the worst point past the centroid  and compute loss at reflected
+            // point
+            var worst = simplex[N];
+            weightedSum(reflected, 1+rho, centroid, -rho, worst);
+            reflected.fx = f(reflected);
+
+            // if the reflected point is the best seen, then possibly expand
+            if (reflected.fx <= simplex[0].fx) {
+                weightedSum(expanded, 1+chi, centroid, -chi, worst);
+                expanded.fx = f(expanded);
+                if (expanded.fx < reflected.fx) {
+                    temp = simplex[N];
+                    simplex[N] = expanded;
+                    expanded = temp;
+                }  else {
+                    temp = simplex[N];
+                    simplex[N] = reflected;
+                    reflected = temp;
+                }
+            }
+
+            // if the reflected point is worse than the second worst, we need to
+            // contract
+            else if (reflected.fx >= simplex[N-1].fx) {
+                var shouldReduce = false;
+
+                if (reflected.fx > worst.fx) {
+                    // do an inside contraction
+                    weightedSum(contracted, 1+psi, centroid, -psi, worst);
+                    contracted.fx = f(contracted);
+                    if (contracted.fx < worst.fx) {
+                        temp = simplex[N];
+                        simplex[N] = contracted;
+                        contracted = temp;
+                    } else {
+                        shouldReduce = true;
+                    }
+                } else {
+                    // do an outside contraction
+                    weightedSum(contracted, 1-psi * rho, centroid, psi*rho, worst);
+                    contracted.fx = f(contracted);
+                    if (contracted.fx <= reflected.fx) {
+                        temp = simplex[N];
+                        simplex[N] = contracted;
+                        contracted = temp;
+                    } else {
+                        shouldReduce = true;
+                    }
+                }
+
+                if (shouldReduce) {
+                    // do reduction. doesn't actually happen that often
+                    for (i = 1; i < simplex.length; ++i) {
+                        weightedSum(simplex[i], 1 - sigma, simplex[0], sigma, simplex[i]);
+                        simplex[i].fx = f(simplex[i]);
+                    }
+                }
+            } else {
+                temp = simplex[N];
+                simplex[N] = reflected;
+                reflected = temp;
+            }
+
+        }
+
+        simplex.sort(sortOrder);
+        return {f : simplex[0].fx,
+                solution : simplex[0]};
+    }
+
+    function minimizeConjugateGradient(f, initial, params) {
+        // allocate all memory up front here, keep out of the loop for perfomance
+        // reasons
+        var current = {x: initial.slice(), fx: 0, fxprime: initial.slice()},
+            next = {x: initial.slice(), fx: 0, fxprime: initial.slice()},
+            yk = initial.slice(),
+            pk, temp,
+            a = 1,
+            maxIterations;
+
+        params = params || {};
+        maxIterations = params.maxIterations || initial.length * 5;
+
+        current.fx = f(current.x, current.fxprime);
+        pk = current.fxprime.slice();
+        multiplyBy(pk, -1);
+
+        for (var i = 0; i < maxIterations; ++i) {
+            if (params.history) {
+                params.history.push({x: current.x.slice(),
+                                     fx: current.fx,
+                                     fxprime: current.fxprime.slice()});
+            }
+
+            a = wolfeLineSearch(f, pk, current, next, a);
+            if (!a) {
+                // faiiled to find point that satifies wolfe conditions.
+                // reset direction for next iteration
+                for (var j = 0; j < pk.length; ++j) {
+                    pk[j] = -1 * current.fxprime[j];
+                }
+            } else {
+                // update direction using Polak–Ribiere CG method
+                weightedSum(yk, 1, next.fxprime, -1, current.fxprime);
+
+                var delta_k = dot(current.fxprime, current.fxprime),
+                    beta_k = Math.max(0, dot(yk, next.fxprime) / delta_k);
+
+                weightedSum(pk, beta_k, pk, -1, next.fxprime);
+
+                temp = current;
+                current = next;
+                next = temp;
+            }
+
+            if (norm2(current.fxprime) <= 1e-5) {
+                break;
+            }
+        }
+
+        if (params.history) {
+            params.history.push({x: current.x.slice(),
+                                 fx: current.fx,
+                                 fxprime: current.fxprime.slice()});
+        }
+
+        return current;
+    }
+
+    var c1 = 1e-6;
+    var c2 = 0.1;
+    /// searches along line 'pk' for a point that satifies the wolfe conditions
+    /// See 'Numerical Optimization' by Nocedal and Wright p59-60
+    function wolfeLineSearch(f, pk, current, next, a) {
+        var phi0 = current.fx, phiPrime0 = dot(current.fxprime, pk),
+            phi = phi0, phi_old = phi0,
+            phiPrime = phiPrime0,
+            a0 = 0;
+
+        a = a || 1;
+
+        function zoom(a_lo, a_high, phi_lo) {
+            for (var iteration = 0; iteration < 16; ++iteration) {
+                a = (a_lo + a_high)/2;
+                weightedSum(next.x, 1.0, current.x, a, pk);
+                phi = next.fx = f(next.x, next.fxprime);
+                phiPrime = dot(next.fxprime, pk);
+
+                if ((phi > (phi0 + c1 * a * phiPrime0)) ||
+                    (phi >= phi_lo)) {
+                    a_high = a;
+
+                } else  {
+                    if (Math.abs(phiPrime) <= -c2 * phiPrime0) {
+                        return a;
+                    }
+
+                    if (phiPrime * (a_high - a_lo) >=0) {
+                        a_high = a_lo;
+                    }
+
+                    a_lo = a;
+                    phi_lo = phi;
+                }
+            }
+
+            return 0;
+        }
+
+        for (var iteration = 0; iteration < 10; ++iteration) {
+            weightedSum(next.x, 1.0, current.x, a, pk);
+            phi = next.fx = f(next.x, next.fxprime);
+            phiPrime = dot(next.fxprime, pk);
+            if ((phi > (phi0 + c1 * a * phiPrime0)) ||
+                (iteration && (phi >= phi_old))) {
+                return zoom(a0, a, phi_old);
+            }
+
+            if (Math.abs(phiPrime) <= -c2 * phiPrime0) {
+                return a;
+            }
+
+            if (phiPrime >= 0 ) {
+                return zoom(a, a0, phi);
+            }
+
+            phi_old = phi;
+            a0 = a;
+            a *= 2;
+        }
+
+        return 0;
+    }
+
+    var SMALL = 1e-10;
+
+    /** Returns the intersection area of a bunch of circles (where each circle
+     is an object having an x,y and radius property) */
+    function intersectionArea(circles, stats) {
+        // get all the intersection points of the circles
+        var intersectionPoints = getIntersectionPoints(circles);
+
+        // filter out points that aren't included in all the circles
+        var innerPoints = intersectionPoints.filter(function (p) {
+            return containedInCircles(p, circles);
+        });
+
+        var arcArea = 0, polygonArea = 0, arcs = [], i;
+
+        // if we have intersection points that are within all the circles,
+        // then figure out the area contained by them
+        if (innerPoints.length > 1) {
+            // sort the points by angle from the center of the polygon, which lets
+            // us just iterate over points to get the edges
+            var center = getCenter(innerPoints);
+            for (i = 0; i < innerPoints.length; ++i ) {
+                var p = innerPoints[i];
+                p.angle = Math.atan2(p.x - center.x, p.y - center.y);
+            }
+            innerPoints.sort(function(a,b) { return b.angle - a.angle;});
+
+            // iterate over all points, get arc between the points
+            // and update the areas
+            var p2 = innerPoints[innerPoints.length - 1];
+            for (i = 0; i < innerPoints.length; ++i) {
+                var p1 = innerPoints[i];
+
+                // polygon area updates easily ...
+                polygonArea += (p2.x + p1.x) * (p1.y - p2.y);
+
+                // updating the arc area is a little more involved
+                var midPoint = {x : (p1.x + p2.x) / 2,
+                                y : (p1.y + p2.y) / 2},
+                    arc = null;
+
+                for (var j = 0; j < p1.parentIndex.length; ++j) {
+                    if (p2.parentIndex.indexOf(p1.parentIndex[j]) > -1) {
+                        // figure out the angle halfway between the two points
+                        // on the current circle
+                        var circle = circles[p1.parentIndex[j]],
+                            a1 = Math.atan2(p1.x - circle.x, p1.y - circle.y),
+                            a2 = Math.atan2(p2.x - circle.x, p2.y - circle.y);
+
+                        var angleDiff = (a2 - a1);
+                        if (angleDiff < 0) {
+                            angleDiff += 2*Math.PI;
+                        }
+
+                        // and use that angle to figure out the width of the
+                        // arc
+                        var a = a2 - angleDiff/2,
+                            width = distance(midPoint, {
+                                x : circle.x + circle.radius * Math.sin(a),
+                                y : circle.y + circle.radius * Math.cos(a)
+                            });
+
+                        // pick the circle whose arc has the smallest width
+                        if ((arc === null) || (arc.width > width)) {
+                            arc = { circle : circle,
+                                    width : width,
+                                    p1 : p1,
+                                    p2 : p2};
+                        }
+                    }
+                }
+
+                if (arc !== null) {
+                    arcs.push(arc);
+                    arcArea += circleArea(arc.circle.radius, arc.width);
+                    p2 = p1;
+                }
+            }
+        } else {
+            // no intersection points, is either disjoint - or is completely
+            // overlapped. figure out which by examining the smallest circle
+            var smallest = circles[0];
+            for (i = 1; i < circles.length; ++i) {
+                if (circles[i].radius < smallest.radius) {
+                    smallest = circles[i];
+                }
+            }
+
+            // make sure the smallest circle is completely contained in all
+            // the other circles
+            var disjoint = false;
+            for (i = 0; i < circles.length; ++i) {
+                if (distance(circles[i], smallest) > Math.abs(smallest.radius - circles[i].radius)) {
+                    disjoint = true;
+                    break;
+                }
+            }
+
+            if (disjoint) {
+                arcArea = polygonArea = 0;
+
+            } else {
+                arcArea = smallest.radius * smallest.radius * Math.PI;
+                arcs.push({circle : smallest,
+                           p1: { x: smallest.x,        y : smallest.y + smallest.radius},
+                           p2: { x: smallest.x - SMALL, y : smallest.y + smallest.radius},
+                           width : smallest.radius * 2 });
+            }
+        }
+
+        polygonArea /= 2;
+        if (stats) {
+            stats.area = arcArea + polygonArea;
+            stats.arcArea = arcArea;
+            stats.polygonArea = polygonArea;
+            stats.arcs = arcs;
+            stats.innerPoints = innerPoints;
+            stats.intersectionPoints = intersectionPoints;
+        }
+
+        return arcArea + polygonArea;
+    }
+
+    /** returns whether a point is contained by all of a list of circles */
+    function containedInCircles(point, circles) {
+        for (var i = 0; i < circles.length; ++i) {
+            if (distance(point, circles[i]) > circles[i].radius + SMALL) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /** Gets all intersection points between a bunch of circles */
+    function getIntersectionPoints(circles) {
+        var ret = [];
+        for (var i = 0; i < circles.length; ++i) {
+            for (var j = i + 1; j < circles.length; ++j) {
+                var intersect = circleCircleIntersection(circles[i],
+                                                              circles[j]);
+                for (var k = 0; k < intersect.length; ++k) {
+                    var p = intersect[k];
+                    p.parentIndex = [i,j];
+                    ret.push(p);
+                }
+            }
+        }
+        return ret;
+    }
+
+    function circleIntegral(r, x) {
+        var y = Math.sqrt(r * r - x * x);
+        return x * y + r * r * Math.atan2(x, y);
+    }
+
+    /** Returns the area of a circle of radius r - up to width */
+    function circleArea(r, width) {
+        return circleIntegral(r, width - r) - circleIntegral(r, -r);
+    }
+
+    /** euclidean distance between two points */
+    function distance(p1, p2) {
+        return Math.sqrt((p1.x - p2.x) * (p1.x - p2.x) +
+                         (p1.y - p2.y) * (p1.y - p2.y));
+    }
+
+
+    /** Returns the overlap area of two circles of radius r1 and r2 - that
+    have their centers separated by distance d. Simpler faster
+    circle intersection for only two circles */
+    function circleOverlap(r1, r2, d) {
+        // no overlap
+        if (d >= r1 + r2) {
+            return 0;
+        }
+
+        // completely overlapped
+        if (d <= Math.abs(r1 - r2)) {
+            return Math.PI * Math.min(r1, r2) * Math.min(r1, r2);
+        }
+
+        var w1 = r1 - (d * d - r2 * r2 + r1 * r1) / (2 * d),
+            w2 = r2 - (d * d - r1 * r1 + r2 * r2) / (2 * d);
+        return circleArea(r1, w1) + circleArea(r2, w2);
+    }
+
+    /** Given two circles (containing a x/y/radius attributes),
+    returns the intersecting points if possible.
+    note: doesn't handle cases where there are infinitely many
+    intersection points (circles are equivalent):, or only one intersection point*/
+    function circleCircleIntersection(p1, p2) {
+        var d = distance(p1, p2),
+            r1 = p1.radius,
+            r2 = p2.radius;
+
+        // if to far away, or self contained - can't be done
+        if ((d >= (r1 + r2)) || (d <= Math.abs(r1 - r2))) {
+            return [];
+        }
+
+        var a = (r1 * r1 - r2 * r2 + d * d) / (2 * d),
+            h = Math.sqrt(r1 * r1 - a * a),
+            x0 = p1.x + a * (p2.x - p1.x) / d,
+            y0 = p1.y + a * (p2.y - p1.y) / d,
+            rx = -(p2.y - p1.y) * (h / d),
+            ry = -(p2.x - p1.x) * (h / d);
+
+        return [{x: x0 + rx, y : y0 - ry },
+                {x: x0 - rx, y : y0 + ry }];
+    }
+
+    /** Returns the center of a bunch of points */
+    function getCenter(points) {
+        var center = {x: 0, y: 0};
+        for (var i =0; i < points.length; ++i ) {
+            center.x += points[i].x;
+            center.y += points[i].y;
+        }
+        center.x /= points.length;
+        center.y /= points.length;
+        return center;
+    }
+
+    /** given a list of set objects, and their corresponding overlaps.
+    updates the (x, y, radius) attribute on each set such that their positions
+    roughly correspond to the desired overlaps */
+    function venn(areas, parameters) {
+        parameters = parameters || {};
+        parameters.maxIterations = parameters.maxIterations || 500;
+        var initialLayout = parameters.initialLayout || bestInitialLayout;
+
+        // add in missing pairwise areas as having 0 size
+        areas = addMissingAreas(areas);
+
+        // initial layout is done greedily
+        var circles = initialLayout(areas);
+
+        // transform x/y coordinates to a vector to optimize
+        var initial = [], setids = [], setid;
+        for (setid in circles) {
+            if (circles.hasOwnProperty(setid)) {
+                initial.push(circles[setid].x);
+                initial.push(circles[setid].y);
+                setids.push(setid);
+            }
+        }
+
+        // optimize initial layout from our loss function
+        var totalFunctionCalls = 0;
+        var solution = fmin(
+            function(values) {
+                totalFunctionCalls += 1;
+                var current = {};
+                for (var i = 0; i < setids.length; ++i) {
+                    var setid = setids[i];
+                    current[setid] = {x: values[2 * i],
+                                      y: values[2 * i + 1],
+                                      radius : circles[setid].radius,
+                                     // size : circles[setid].size
+                                     };
+                }
+                return lossFunction(current, areas);
+            },
+            initial,
+            parameters);
+
+        // transform solution vector back to x/y points
+        var positions = solution.solution;
+        for (var i = 0; i < setids.length; ++i) {
+            setid = setids[i];
+            circles[setid].x = positions[2 * i];
+            circles[setid].y = positions[2 * i + 1];
+        }
+
+        return circles;
+    }
+
+    var SMALL$1 = 1e-10;
+
+    /** Returns the distance necessary for two circles of radius r1 + r2 to
+    have the overlap area 'overlap' */
+    function distanceFromIntersectArea(r1, r2, overlap) {
+        // handle complete overlapped circles
+        if (Math.min(r1, r2) * Math.min(r1,r2) * Math.PI <= overlap + SMALL$1) {
+            return Math.abs(r1 - r2);
+        }
+
+        return bisect(function(distance) {
+            return circleOverlap(r1, r2, distance) - overlap;
+        }, 0, r1 + r2);
+    }
+
+    /** Missing pair-wise intersection area data can cause problems:
+     treating as an unknown means that sets will be laid out overlapping,
+     which isn't what people expect. To reflect that we want disjoint sets
+     here, set the overlap to 0 for all missing pairwise set intersections */
+    function addMissingAreas(areas) {
+        areas = areas.slice();
+
+        // two circle intersections that aren't defined
+        var ids = [], pairs = {}, i, j, a, b;
+        for (i = 0; i < areas.length; ++i) {
+            var area = areas[i];
+            if (area.sets.length == 1) {
+                ids.push(area.sets[0]);
+            } else if (area.sets.length == 2) {
+                a = area.sets[0];
+                b = area.sets[1];
+                pairs[[a, b]] = true;
+                pairs[[b, a]] = true;
+            }
+        }
+        ids.sort(function(a, b) { return a > b; });
+
+        for (i = 0; i < ids.length; ++i) {
+            a = ids[i];
+            for (j = i + 1; j < ids.length; ++j) {
+                b = ids[j];
+                if (!([a, b] in pairs)) {
+                    areas.push({'sets': [a, b],
+                                'size': 0});
+                }
+            }
+        }
+        return areas;
+    }
+
+    /// Returns two matrices, one of the euclidean distances between the sets
+    /// and the other indicating if there are subset or disjoint set relationships
+    function getDistanceMatrices(areas, sets, setids) {
+        // initialize an empty distance matrix between all the points
+        var distances = zerosM(sets.length, sets.length),
+            constraints = zerosM(sets.length, sets.length);
+
+        // compute required distances between all the sets such that
+        // the areas match
+        areas.filter(function(x) { return x.sets.length == 2; })
+            .map(function(current) {
+            var left = setids[current.sets[0]],
+                right = setids[current.sets[1]],
+                r1 = Math.sqrt(sets[left].size / Math.PI),
+                r2 = Math.sqrt(sets[right].size / Math.PI),
+                distance = distanceFromIntersectArea(r1, r2, current.size);
+
+            distances[left][right] = distances[right][left] = distance;
+
+            // also update constraints to indicate if its a subset or disjoint
+            // relationship
+            var c = 0;
+            if (current.size + 1e-10 >= Math.min(sets[left].size,
+                                                 sets[right].size)) {
+                c = 1;
+            } else if (current.size <= 1e-10) {
+                c = -1;
+            }
+            constraints[left][right] = constraints[right][left] = c;
+        });
+
+        return {distances: distances, constraints: constraints};
+    }
+
+    /// computes the gradient and loss simulatenously for our constrained MDS optimizer
+    function constrainedMDSGradient(x, fxprime, distances, constraints) {
+        var loss = 0, i;
+        for (i = 0; i < fxprime.length; ++i) {
+            fxprime[i] = 0;
+        }
+
+        for (i = 0; i < distances.length; ++i) {
+            var xi = x[2 * i], yi = x[2 * i + 1];
+            for (var j = i + 1; j < distances.length; ++j) {
+                var xj = x[2 * j], yj = x[2 * j + 1],
+                    dij = distances[i][j],
+                    constraint = constraints[i][j];
+
+                var squaredDistance = (xj - xi) * (xj - xi) + (yj - yi) * (yj - yi),
+                    distance = Math.sqrt(squaredDistance),
+                    delta = squaredDistance - dij * dij;
+
+                if (((constraint > 0) && (distance <= dij)) ||
+                    ((constraint < 0) && (distance >= dij))) {
+                    continue;
+                }
+
+                loss += 2 * delta * delta;
+
+                fxprime[2*i]     += 4 * delta * (xi - xj);
+                fxprime[2*i + 1] += 4 * delta * (yi - yj);
+
+                fxprime[2*j]     += 4 * delta * (xj - xi);
+                fxprime[2*j + 1] += 4 * delta * (yj - yi);
+            }
+        }
+        return loss;
+    }
+
+    /// takes the best working variant of either constrained MDS or greedy
+    function bestInitialLayout(areas, params) {
+        var initial = greedyLayout(areas, params);
+
+        // greedylayout is sufficient for all 2/3 circle cases. try out
+        // constrained MDS for higher order problems, take its output
+        // if it outperforms. (greedy is aesthetically better on 2/3 circles
+        // since it axis aligns)
+        if (areas.length >= 8) {
+            var constrained  = constrainedMDSLayout(areas, params),
+                constrainedLoss = lossFunction(constrained, areas),
+                greedyLoss = lossFunction(initial, areas);
+
+            if (constrainedLoss + 1e-8 < greedyLoss) {
+                initial = constrained;
+            }
+        }
+        return initial;
+    }
+
+    /// use the constrained MDS variant to generate an initial layout
+    function constrainedMDSLayout(areas, params) {
+        params = params || {};
+        var restarts = params.restarts || 10;
+
+        // bidirectionally map sets to a rowid  (so we can create a matrix)
+        var sets = [], setids = {}, i;
+        for (i = 0; i < areas.length; ++i ) {
+            var area = areas[i];
+            if (area.sets.length == 1) {
+                setids[area.sets[0]] = sets.length;
+                sets.push(area);
+            }
+        }
+
+        var matrices = getDistanceMatrices(areas, sets, setids),
+            distances = matrices.distances,
+            constraints = matrices.constraints;
+
+        // keep distances bounded, things get messed up otherwise.
+        // TODO: proper preconditioner?
+        var norm = norm2(distances.map(norm2))/(distances.length);
+        distances = distances.map(function (row) {
+            return row.map(function (value) { return value / norm; });});
+
+        var obj = function(x, fxprime) {
+            return constrainedMDSGradient(x, fxprime, distances, constraints);
+        };
+
+        var best, current;
+        for (i = 0; i < restarts; ++i) {
+            var initial = zeros(distances.length*2).map(Math.random);
+
+            current = minimizeConjugateGradient(obj, initial, params);
+            if (!best || (current.fx < best.fx)) {
+                best = current;
+            }
+        }
+        var positions = best.x;
+
+        // translate rows back to (x,y,radius) coordinates
+        var circles = {};
+        for (i = 0; i < sets.length; ++i) {
+            var set = sets[i];
+            circles[set.sets[0]] = {
+                x: positions[2*i] * norm,
+                y: positions[2*i + 1] * norm,
+                radius:  Math.sqrt(set.size / Math.PI)
+            };
+        }
+
+        if (params.history) {
+            for (i = 0; i < params.history.length; ++i) {
+                multiplyBy(params.history[i].x, norm);
+            }
+        }
+        return circles;
+    }
+
+    /** Lays out a Venn diagram greedily, going from most overlapped sets to
+    least overlapped, attempting to position each new set such that the
+    overlapping areas to already positioned sets are basically right */
+    function greedyLayout(areas) {
+        // define a circle for each set
+        var circles = {}, setOverlaps = {}, set;
+        for (var i = 0; i < areas.length; ++i) {
+            var area = areas[i];
+            if (area.sets.length == 1) {
+                set = area.sets[0];
+                circles[set] = {x: 1e10, y: 1e10,
+                                rowid: circles.length,
+                                size: area.size,
+                                radius: Math.sqrt(area.size / Math.PI)};
+                setOverlaps[set] = [];
+            }
+        }
+        areas = areas.filter(function(a) { return a.sets.length == 2; });
+
+        // map each set to a list of all the other sets that overlap it
+        for (i = 0; i < areas.length; ++i) {
+            var current = areas[i];
+            var weight = current.hasOwnProperty('weight') ? current.weight : 1.0;
+            var left = current.sets[0], right = current.sets[1];
+
+            // completely overlapped circles shouldn't be positioned early here
+            if (current.size + SMALL$1 >= Math.min(circles[left].size,
+                                                 circles[right].size)) {
+                weight = 0;
+            }
+
+            setOverlaps[left].push ({set:right, size:current.size, weight:weight});
+            setOverlaps[right].push({set:left,  size:current.size, weight:weight});
+        }
+
+        // get list of most overlapped sets
+        var mostOverlapped = [];
+        for (set in setOverlaps) {
+            if (setOverlaps.hasOwnProperty(set)) {
+                var size = 0;
+                for (i = 0; i < setOverlaps[set].length; ++i) {
+                    size += setOverlaps[set][i].size * setOverlaps[set][i].weight;
+                }
+
+                mostOverlapped.push({set: set, size:size});
+            }
+        }
+
+        // sort by size desc
+        function sortOrder(a,b) {
+            return b.size - a.size;
+        }
+        mostOverlapped.sort(sortOrder);
+
+        // keep track of what sets have been laid out
+        var positioned = {};
+        function isPositioned(element) {
+            return element.set in positioned;
+        }
+
+        // adds a point to the output
+        function positionSet(point, index) {
+            circles[index].x = point.x;
+            circles[index].y = point.y;
+            positioned[index] = true;
+        }
+
+        // add most overlapped set at (0,0)
+        positionSet({x: 0, y: 0}, mostOverlapped[0].set);
+
+        // get distances between all points. TODO, necessary?
+        // answer: probably not
+        // var distances = venn.getDistanceMatrices(circles, areas).distances;
+        for (i = 1; i < mostOverlapped.length; ++i) {
+            var setIndex = mostOverlapped[i].set,
+                overlap = setOverlaps[setIndex].filter(isPositioned);
+            set = circles[setIndex];
+            overlap.sort(sortOrder);
+
+            if (overlap.length === 0) {
+                // this shouldn't happen anymore with addMissingAreas
+                throw "ERROR: missing pairwise overlap information";
+            }
+
+            var points = [];
+            for (var j = 0; j < overlap.length; ++j) {
+                // get appropriate distance from most overlapped already added set
+                var p1 = circles[overlap[j].set],
+                    d1 = distanceFromIntersectArea(set.radius, p1.radius,
+                                                   overlap[j].size);
+
+                // sample positions at 90 degrees for maximum aesthetics
+                points.push({x : p1.x + d1, y : p1.y});
+                points.push({x : p1.x - d1, y : p1.y});
+                points.push({y : p1.y + d1, x : p1.x});
+                points.push({y : p1.y - d1, x : p1.x});
+
+                // if we have at least 2 overlaps, then figure out where the
+                // set should be positioned analytically and try those too
+                for (var k = j + 1; k < overlap.length; ++k) {
+                    var p2 = circles[overlap[k].set],
+                        d2 = distanceFromIntersectArea(set.radius, p2.radius,
+                                                       overlap[k].size);
+
+                    var extraPoints = circleCircleIntersection(
+                        { x: p1.x, y: p1.y, radius: d1},
+                        { x: p2.x, y: p2.y, radius: d2});
+
+                    for (var l = 0; l < extraPoints.length; ++l) {
+                        points.push(extraPoints[l]);
+                    }
+                }
+            }
+
+            // we have some candidate positions for the set, examine loss
+            // at each position to figure out where to put it at
+            var bestLoss = 1e50, bestPoint = points[0];
+            for (j = 0; j < points.length; ++j) {
+                circles[setIndex].x = points[j].x;
+                circles[setIndex].y = points[j].y;
+                var loss = lossFunction(circles, areas);
+                if (loss < bestLoss) {
+                    bestLoss = loss;
+                    bestPoint = points[j];
+                }
+            }
+
+            positionSet(bestPoint, setIndex);
+        }
+
+        return circles;
+    }
+
+    /** Given a bunch of sets, and the desired overlaps between these sets - computes
+    the distance from the actual overlaps to the desired overlaps. Note that
+    this method ignores overlaps of more than 2 circles */
+    function lossFunction(sets, overlaps) {
+        var output = 0;
+
+        function getCircles(indices) {
+            return indices.map(function(i) { return sets[i]; });
+        }
+
+        for (var i = 0; i < overlaps.length; ++i) {
+            var area = overlaps[i], overlap;
+            if (area.sets.length == 1) {
+                continue;
+            } else if (area.sets.length == 2) {
+                var left = sets[area.sets[0]],
+                    right = sets[area.sets[1]];
+                overlap = circleOverlap(left.radius, right.radius,
+                                        distance(left, right));
+            } else {
+                overlap = intersectionArea(getCircles(area.sets));
+            }
+
+            var weight = area.hasOwnProperty('weight') ? area.weight : 1.0;
+            output += weight * (overlap - area.size) * (overlap - area.size);
+        }
+
+        return output;
+    }
+
+    // orientates a bunch of circles to point in orientation
+    function orientateCircles(circles, orientation, orientationOrder) {
+        if (orientationOrder === null) {
+            circles.sort(function (a, b) { return b.radius - a.radius; });
+        } else {
+            circles.sort(orientationOrder);
+        }
+
+        var i;
+        // shift circles so largest circle is at (0, 0)
+        if (circles.length > 0) {
+            var largestX = circles[0].x,
+                largestY = circles[0].y;
+
+            for (i = 0; i < circles.length; ++i) {
+                circles[i].x -= largestX;
+                circles[i].y -= largestY;
+            }
+        }
+
+        // rotate circles so that second largest is at an angle of 'orientation'
+        // from largest
+        if (circles.length > 1) {
+            var rotation = Math.atan2(circles[1].x, circles[1].y) - orientation,
+                c = Math.cos(rotation),
+                s = Math.sin(rotation), x, y;
+
+            for (i = 0; i < circles.length; ++i) {
+                x = circles[i].x;
+                y = circles[i].y;
+                circles[i].x = c * x - s * y;
+                circles[i].y = s * x + c * y;
+            }
+        }
+
+        // mirror solution if third solution is above plane specified by
+        // first two circles
+        if (circles.length > 2) {
+            var angle = Math.atan2(circles[2].x, circles[2].y) - orientation;
+            while (angle < 0) { angle += 2* Math.PI; }
+            while (angle > 2*Math.PI) { angle -= 2* Math.PI; }
+            if (angle > Math.PI) {
+                var slope = circles[1].y / (1e-10 + circles[1].x);
+                for (i = 0; i < circles.length; ++i) {
+                    var d = (circles[i].x + slope * circles[i].y) / (1 + slope*slope);
+                    circles[i].x = 2 * d - circles[i].x;
+                    circles[i].y = 2 * d * slope - circles[i].y;
+                }
+            }
+        }
+    }
+
+    function disjointCluster(circles) {
+        // union-find clustering to get disjoint sets
+        circles.map(function(circle) { circle.parent = circle; });
+
+        // path compression step in union find
+        function find(circle) {
+            if (circle.parent !== circle) {
+                circle.parent = find(circle.parent);
+            }
+            return circle.parent;
+        }
+
+        function union(x, y) {
+            var xRoot = find(x), yRoot = find(y);
+            xRoot.parent = yRoot;
+        }
+
+        // get the union of all overlapping sets
+        for (var i = 0; i < circles.length; ++i) {
+            for (var j = i + 1; j < circles.length; ++j) {
+                var maxDistance = circles[i].radius + circles[j].radius;
+                if (distance(circles[i], circles[j]) + 1e-10 < maxDistance) {
+                    union(circles[j], circles[i]);
+                }
+            }
+        }
+
+        // find all the disjoint clusters and group them together
+        var disjointClusters = {}, setid;
+        for (i = 0; i < circles.length; ++i) {
+            setid = find(circles[i]).parent.setid;
+            if (!(setid in disjointClusters)) {
+                disjointClusters[setid] = [];
+            }
+            disjointClusters[setid].push(circles[i]);
+        }
+
+        // cleanup bookkeeping
+        circles.map(function(circle) { delete circle.parent; });
+
+        // return in more usable form
+        var ret = [];
+        for (setid in disjointClusters) {
+            if (disjointClusters.hasOwnProperty(setid)) {
+                ret.push(disjointClusters[setid]);
+            }
+        }
+        return ret;
+    }
+
+    function getBoundingBox(circles) {
+        var minMax = function(d) {
+            var hi = Math.max.apply(null, circles.map(
+                                    function(c) { return c[d] + c.radius; } )),
+                lo = Math.min.apply(null, circles.map(
+                                    function(c) { return c[d] - c.radius;} ));
+            return {max:hi, min:lo};
+        };
+
+        return {xRange: minMax('x'), yRange: minMax('y')};
+    }
+
+    function normalizeSolution(solution, orientation, orientationOrder) {
+        if (orientation === null){
+            orientation = Math.PI/2;
+        }
+
+        // work with a list instead of a dictionary, and take a copy so we
+        // don't mutate input
+        var circles = [], i, setid;
+        for (setid in solution) {
+            if (solution.hasOwnProperty(setid)) {
+                var previous = solution[setid];
+                circles.push({x: previous.x,
+                              y: previous.y,
+                              radius: previous.radius,
+                              setid: setid});
+            }
+        }
+
+        // get all the disjoint clusters
+        var clusters = disjointCluster(circles);
+
+        // orientate all disjoint sets, get sizes
+        for (i = 0; i < clusters.length; ++i) {
+            orientateCircles(clusters[i], orientation, orientationOrder);
+            var bounds = getBoundingBox(clusters[i]);
+            clusters[i].size = (bounds.xRange.max - bounds.xRange.min) * (bounds.yRange.max - bounds.yRange.min);
+            clusters[i].bounds = bounds;
+        }
+        clusters.sort(function(a, b) { return b.size - a.size; });
+
+        // orientate the largest at 0,0, and get the bounds
+        circles = clusters[0];
+        var returnBounds = circles.bounds;
+
+        var spacing = (returnBounds.xRange.max - returnBounds.xRange.min)/50;
+
+        function addCluster(cluster, right, bottom) {
+            if (!cluster) return;
+
+            var bounds = cluster.bounds, xOffset, yOffset, centreing;
+
+            if (right) {
+                xOffset = returnBounds.xRange.max  - bounds.xRange.min + spacing;
+            } else {
+                xOffset = returnBounds.xRange.max  - bounds.xRange.max;
+                centreing = (bounds.xRange.max - bounds.xRange.min) / 2 -
+                            (returnBounds.xRange.max - returnBounds.xRange.min) / 2;
+                if (centreing < 0) xOffset += centreing;
+            }
+
+            if (bottom) {
+                yOffset = returnBounds.yRange.max  - bounds.yRange.min + spacing;
+            } else {
+                yOffset = returnBounds.yRange.max  - bounds.yRange.max;
+                centreing = (bounds.yRange.max - bounds.yRange.min) / 2 -
+                            (returnBounds.yRange.max - returnBounds.yRange.min) / 2;
+                if (centreing < 0) yOffset += centreing;
+            }
+
+            for (var j = 0; j < cluster.length; ++j) {
+                cluster[j].x += xOffset;
+                cluster[j].y += yOffset;
+                circles.push(cluster[j]);
+            }
+        }
+
+        var index = 1;
+        while (index < clusters.length) {
+            addCluster(clusters[index], true, false);
+            addCluster(clusters[index+1], false, true);
+            addCluster(clusters[index+2], true, true);
+            index += 3;
+
+            // have one cluster (in top left). lay out next three relative
+            // to it in a grid
+            returnBounds = getBoundingBox(circles);
+        }
+
+        // convert back to solution form
+        var ret = {};
+        for (i = 0; i < circles.length; ++i) {
+            ret[circles[i].setid] = circles[i];
+        }
+        return ret;
+    }
+
+    /** Scales a solution from venn.venn or venn.greedyLayout such that it fits in
+    a rectangle of width/height - with padding around the borders. also
+    centers the diagram in the available space at the same time */
+    function scaleSolution(solution, width, height, padding) {
+        var circles = [], setids = [];
+        for (var setid in solution) {
+            if (solution.hasOwnProperty(setid)) {
+                setids.push(setid);
+                circles.push(solution[setid]);
+            }
+        }
+
+        width -= 2*padding;
+        height -= 2*padding;
+
+        var bounds = getBoundingBox(circles),
+            xRange = bounds.xRange,
+            yRange = bounds.yRange,
+            xScaling = width  / (xRange.max - xRange.min),
+            yScaling = height / (yRange.max - yRange.min),
+            scaling = Math.min(yScaling, xScaling),
+
+            // while we're at it, center the diagram too
+            xOffset = (width -  (xRange.max - xRange.min) * scaling) / 2,
+            yOffset = (height - (yRange.max - yRange.min) * scaling) / 2;
+
+        var scaled = {};
+        for (var i = 0; i < circles.length; ++i) {
+            var circle = circles[i];
+            scaled[setids[i]] = {
+                radius: scaling * circle.radius,
+                x: padding + xOffset + (circle.x - xRange.min) * scaling,
+                y: padding + yOffset + (circle.y - yRange.min) * scaling,
+            };
+        }
+
+        return scaled;
+    }
+
+    /*global console:true*/
+
+    function VennDiagram() {
+        var width = 600,
+            height = 350,
+            padding = 15,
+            duration = 1000,
+            orientation = Math.PI / 2,
+            normalize = true,
+            wrap = true,
+            styled = true,
+            fontSize = null,
+            orientationOrder = null,
+
+            // mimic the behaviour of d3.scale.category10 from the previous
+            // version of d3
+            colourMap = {},
+
+            // so this is the same as d3.schemeCategory10, which is only defined in d3 4.0
+            // since we can support older versions of d3 as long as we don't force this,
+            // I'm hackily redefining below. TODO: remove this and change to d3.schemeCategory10
+            colourScheme = ["#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd", "#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf"],
+            colourIndex = 0,
+            colours = function(key) {
+                if (key in colourMap) {
+                    return colourMap[key];
+                }
+                var ret = colourMap[key] = colourScheme[colourIndex];
+                colourIndex += 1;
+                if (colourIndex >= colourScheme.length) {
+                    colourIndex = 0;
+                }
+                return ret;
+            },
+            layoutFunction = venn;
+
+        function chart(selection) {
+            var data = selection.datum();
+            var solution = layoutFunction(data);
+            if (normalize) {
+                solution = normalizeSolution(solution,
+                                             orientation,
+                                             orientationOrder);
+            }
+            var circles = scaleSolution(solution, width, height, padding);
+            var textCentres = computeTextCentres(circles, data);
+
+            // create svg if not already existing
+            selection.selectAll("svg").data([circles]).enter().append("svg");
+
+            var svg = selection.select("svg")
+                .attr("width", width)
+                .attr("height", height);
+
+            // to properly transition intersection areas, we need the
+            // previous circles locations. load from elements
+            var previous = {}, hasPrevious = false;
+            svg.selectAll("g path").each(function (d) {
+                var path = d3.select(this).attr("d");
+                if ((d.sets.length == 1) && path) {
+                    hasPrevious = true;
+                    previous[d.sets[0]] = circleFromPath(path);
+                }
+            });
+
+            // interpolate intersection area paths between previous and
+            // current paths
+            var pathTween = function(d) {
+                return function(t) {
+                    var c = d.sets.map(function(set) {
+                        var start = previous[set], end = circles[set];
+                        if (!start) {
+                            start = {x : width/2, y : height/2, radius : 1};
+                        }
+                        if (!end) {
+                            end = {x : width/2, y : height/2, radius : 1};
+                        }
+                        return {'x' : start.x * (1 - t) + end.x * t,
+                                'y' : start.y * (1 - t) + end.y * t,
+                                'radius' : start.radius * (1 - t) + end.radius * t};
+                    });
+                    return intersectionAreaPath(c);
+                };
+            };
+
+            // update data, joining on the set ids
+            var nodes = svg.selectAll("g")
+                .data(data, function(d) { return d.sets; });
+
+            // create new nodes
+            var enter = nodes.enter()
+                .append('g')
+                .attr("class", function(d) {
+                    return "venn-area venn-" +
+                        (d.sets.length == 1 ? "circle" : "intersection");
+                })
+                .attr("data-venn-sets", function(d) {
+                    return d.sets.join("_");
+                });
+
+            var enterPath = enter.append("path"),
+                enterText = enter.append("text")
+                .attr("class", "label")
+                .text(function (d) { return label(d); } )
+                .attr("text-anchor", "middle")
+                .attr("dy", ".35em")
+                .attr("x", width/2)
+                .attr("y", height/2);
+
+
+            // apply minimal style if wanted
+            if (styled) {
+                enterPath.style("fill-opacity", "0")
+                    .filter(function (d) { return d.sets.length == 1; } )
+                    .style("fill", function(d) { return colours(label(d)); })
+                    .style("fill-opacity", ".25");
+
+                enterText
+                    .style("fill", function(d) { return d.sets.length == 1 ? colours(label(d)) : "#444"; });
+            }
+
+            // update existing, using pathTween if necessary
+            var update = selection;
+            if (hasPrevious) {
+                update = selection.transition("venn").duration(duration);
+                update.selectAll("path")
+                    .attrTween("d", pathTween);
+            } else {
+                update.selectAll("path")
+                    .attr("d", function(d) {
+                        return intersectionAreaPath(d.sets.map(function (set) { return circles[set]; }));
+                    });
+            }
+
+            var updateText = update.selectAll("text")
+                .filter(function (d) { return d.sets in textCentres; })
+                .text(function (d) { return label(d); } )
+                .attr("x", function(d) { return Math.floor(textCentres[d.sets].x);})
+                .attr("y", function(d) { return Math.floor(textCentres[d.sets].y);});
+
+            if (wrap) {
+                if (hasPrevious) {
+                    // d3 4.0 uses 'on' for events on transitions,
+                    // but d3 3.0 used 'each' instead. switch appropiately
+                    if ('on' in updateText) {
+                        updateText.on("end", wrapText(circles, label));
+                    } else {
+                        updateText.each("end", wrapText(circles, label));
+                    }
+                } else {
+                    updateText.each(wrapText(circles, label));
+                }
+            }
+
+            // remove old
+            var exit = nodes.exit().transition('venn').duration(duration).remove();
+            exit.selectAll("path")
+                .attrTween("d", pathTween);
+
+            var exitText = exit.selectAll("text")
+                .attr("x", width/2)
+                .attr("y", height/2);
+
+            // if we've been passed a fontSize explicitly, use it to
+            // transition
+            if (fontSize !== null) {
+                enterText.style("font-size", "0px");
+                updateText.style("font-size", fontSize);
+                exitText.style("font-size", "0px");
+            }
+
+
+            return {'circles': circles,
+                    'textCentres': textCentres,
+                    'nodes': nodes,
+                    'enter': enter,
+                    'update': update,
+                    'exit': exit};
+        }
+
+        function label(d) {
+            if (d.label) {
+                return d.label;
+            }
+            if (d.sets.length == 1) {
+                return '' + d.sets[0];
+            }
+        }
+
+        chart.wrap = function(_) {
+            if (!arguments.length) return wrap;
+            wrap = _;
+            return chart;
+        };
+
+        chart.width = function(_) {
+            if (!arguments.length) return width;
+            width = _;
+            return chart;
+        };
+
+        chart.height = function(_) {
+            if (!arguments.length) return height;
+            height = _;
+            return chart;
+        };
+
+        chart.padding = function(_) {
+            if (!arguments.length) return padding;
+            padding = _;
+            return chart;
+        };
+
+        chart.colours = function(_) {
+            if (!arguments.length) return colours;
+            colours = _;
+            return chart;
+        };
+
+        chart.fontSize = function(_) {
+            if (!arguments.length) return fontSize;
+            fontSize = _;
+            return chart;
+        };
+
+        chart.duration = function(_) {
+            if (!arguments.length) return duration;
+            duration = _;
+            return chart;
+        };
+
+        chart.layoutFunction = function(_) {
+            if (!arguments.length) return layoutFunction;
+            layoutFunction = _;
+            return chart;
+        };
+
+        chart.normalize = function(_) {
+            if (!arguments.length) return normalize;
+            normalize = _;
+            return chart;
+        };
+
+        chart.styled = function(_) {
+            if (!arguments.length) return styled;
+            styled = _;
+            return chart;
+        };
+
+        chart.orientation = function(_) {
+            if (!arguments.length) return orientation;
+            orientation = _;
+            return chart;
+        };
+
+        chart.orientationOrder = function(_) {
+            if (!arguments.length) return orientationOrder;
+            orientationOrder = _;
+            return chart;
+        };
+
+        return chart;
+    }
+    // sometimes text doesn't fit inside the circle, if thats the case lets wrap
+    // the text here such that it fits
+    // todo: looks like this might be merged into d3 (
+    // https://github.com/mbostock/d3/issues/1642),
+    // also worth checking out is
+    // http://engineering.findthebest.com/wrapping-axis-labels-in-d3-js/
+    // this seems to be one of those things that should be easy but isn't
+    function wrapText(circles, labeller) {
+        return function() {
+            var text = d3.select(this),
+                data = text.datum(),
+                width = circles[data.sets[0]].radius || 50,
+                label = labeller(data) || '';
+
+                var words = label.split(/\s+/).reverse(),
+                maxLines = 3,
+                minChars = (label.length + words.length) / maxLines,
+                word = words.pop(),
+                line = [word],
+                joined,
+                lineNumber = 0,
+                lineHeight = 1.1, // ems
+                tspan = text.text(null).append("tspan").text(word);
+
+            while (true) {
+                word = words.pop();
+                if (!word) break;
+                line.push(word);
+                joined = line.join(" ");
+                tspan.text(joined);
+                if (joined.length > minChars && tspan.node().getComputedTextLength() > width) {
+                    line.pop();
+                    tspan.text(line.join(" "));
+                    line = [word];
+                    tspan = text.append("tspan").text(word);
+                    lineNumber++;
+                }
+            }
+
+            var initial = 0.35 - lineNumber * lineHeight / 2,
+                x = text.attr("x"),
+                y = text.attr("y");
+
+            text.selectAll("tspan")
+                .attr("x", x)
+                .attr("y", y)
+                .attr("dy", function(d, i) {
+                     return (initial + i * lineHeight) + "em";
+                });
+        };
+    }
+
+    function circleMargin(current, interior, exterior) {
+        var margin = interior[0].radius - distance(interior[0], current), i, m;
+        for (i = 1; i < interior.length; ++i) {
+            m = interior[i].radius - distance(interior[i], current);
+            if (m <= margin) {
+                margin = m;
+            }
+        }
+
+        for (i = 0; i < exterior.length; ++i) {
+            m = distance(exterior[i], current) - exterior[i].radius;
+            if (m <= margin) {
+                margin = m;
+            }
+        }
+        return margin;
+    }
+
+    // compute the center of some circles by maximizing the margin of
+    // the center point relative to the circles (interior) after subtracting
+    // nearby circles (exterior)
+    function computeTextCentre(interior, exterior) {
+        // get an initial estimate by sampling around the interior circles
+        // and taking the point with the biggest margin
+        var points = [], i;
+        for (i = 0; i < interior.length; ++i) {
+            var c = interior[i];
+            points.push({x: c.x, y: c.y});
+            points.push({x: c.x + c.radius/2, y: c.y});
+            points.push({x: c.x - c.radius/2, y: c.y});
+            points.push({x: c.x, y: c.y + c.radius/2});
+            points.push({x: c.x, y: c.y - c.radius/2});
+        }
+        var initial = points[0], margin = circleMargin(points[0], interior, exterior);
+        for (i = 1; i < points.length; ++i) {
+            var m = circleMargin(points[i], interior, exterior);
+            if (m >= margin) {
+                initial = points[i];
+                margin = m;
+            }
+        }
+
+        // maximize the margin numerically
+        var solution = fmin(
+                    function(p) { return -1 * circleMargin({x: p[0], y: p[1]}, interior, exterior); },
+                    [initial.x, initial.y],
+                    {maxIterations:500, minErrorDelta:1e-10}).solution;
+        var ret = {x: solution[0], y: solution[1]};
+
+        // check solution, fallback as needed (happens if fully overlapped
+        // etc)
+        var valid = true;
+        for (i = 0; i < interior.length; ++i) {
+            if (distance(ret, interior[i]) > interior[i].radius) {
+                valid = false;
+                break;
+            }
+        }
+
+        for (i = 0; i < exterior.length; ++i) {
+            if (distance(ret, exterior[i]) < exterior[i].radius) {
+                valid = false;
+                break;
+            }
+        }
+
+        if (!valid) {
+            if (interior.length == 1) {
+                ret = {x: interior[0].x, y: interior[0].y};
+            } else {
+                var areaStats = {};
+                intersectionArea(interior, areaStats);
+
+                if (areaStats.arcs.length === 0) {
+                    ret = {'x': 0, 'y': -1000, disjoint:true};
+
+                } else if (areaStats.arcs.length == 1) {
+                    ret = {'x': areaStats.arcs[0].circle.x,
+                           'y': areaStats.arcs[0].circle.y};
+
+                } else if (exterior.length) {
+                    // try again without other circles
+                    ret = computeTextCentre(interior, []);
+
+                } else {
+                    // take average of all the points in the intersection
+                    // polygon. this should basically never happen
+                    // and has some issues:
+                    // https://github.com/benfred/venn.js/issues/48#issuecomment-146069777
+                    ret = getCenter(areaStats.arcs.map(function (a) { return a.p1; }));
+                }
+            }
+        }
+
+        return ret;
+    }
+
+    // given a dictionary of {setid : circle}, returns
+    // a dictionary of setid to list of circles that completely overlap it
+    function getOverlappingCircles(circles) {
+        var ret = {}, circleids = [];
+        for (var circleid in circles) {
+            circleids.push(circleid);
+            ret[circleid] = [];
+        }
+        for (var i  = 0; i < circleids.length; i++) {
+            var a = circles[circleids[i]];
+            for (var j = i + 1; j < circleids.length; ++j) {
+                var b = circles[circleids[j]],
+                    d = distance(a, b);
+
+                if (d + b.radius <= a.radius + 1e-10) {
+                    ret[circleids[j]].push(circleids[i]);
+
+                } else if (d + a.radius <= b.radius + 1e-10) {
+                    ret[circleids[i]].push(circleids[j]);
+                }
+            }
+        }
+        return ret;
+    }
+
+    function computeTextCentres(circles, areas) {
+        var ret = {}, overlapped = getOverlappingCircles(circles);
+        for (var i = 0; i < areas.length; ++i) {
+            var area = areas[i].sets, areaids = {}, exclude = {};
+            for (var j = 0; j < area.length; ++j) {
+                areaids[area[j]] = true;
+                var overlaps = overlapped[area[j]];
+                // keep track of any circles that overlap this area,
+                // and don't consider for purposes of computing the text
+                // centre
+                for (var k = 0; k < overlaps.length; ++k) {
+                    exclude[overlaps[k]] = true;
+                }
+            }
+
+            var interior = [], exterior = [];
+            for (var setid in circles) {
+                if (setid in areaids) {
+                    interior.push(circles[setid]);
+                } else if (!(setid in exclude)) {
+                    exterior.push(circles[setid]);
+                }
+            }
+            var centre = computeTextCentre(interior, exterior);
+            ret[area] = centre;
+            if (centre.disjoint && (areas[i].size > 0)) {
+                console.log("WARNING: area " + area + " not represented on screen");
+            }
+        }
+        return  ret;
+    }
+
+    // sorts all areas in the venn diagram, so that
+    // a particular area is on top (relativeTo) - and
+    // all other areas are so that the smallest areas are on top
+    function sortAreas(div, relativeTo) {
+
+        // figure out sets that are completly overlapped by relativeTo
+        var overlaps = getOverlappingCircles(div.selectAll("svg").datum());
+        var exclude = {};
+        for (var i = 0; i < relativeTo.sets.length; ++i) {
+            var check = relativeTo.sets[i];
+            for (var setid in overlaps) {
+                var overlap = overlaps[setid];
+                for (var j = 0; j < overlap.length; ++j) {
+                    if (overlap[j] == check) {
+                        exclude[setid] = true;
+                        break;
+                    }
+                }
+            }
+        }
+
+        // checks that all sets are in exclude;
+        function shouldExclude(sets) {
+            for (var i = 0; i < sets.length; ++i) {
+                if (!(sets[i] in exclude)) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        // need to sort div's so that Z order is correct
+        div.selectAll("g").sort(function (a, b) {
+            // highest order set intersections first
+            if (a.sets.length != b.sets.length) {
+                return a.sets.length - b.sets.length;
+            }
+
+            if (a == relativeTo) {
+                return shouldExclude(b.sets) ? -1 : 1;
+            }
+            if (b == relativeTo) {
+                return shouldExclude(a.sets) ? 1 : -1;
+            }
+
+            // finally by size
+            return b.size - a.size;
+        });
+    }
+
+    function circlePath(x, y, r) {
+        var ret = [];
+        ret.push("\nM", x, y);
+        ret.push("\nm", -r, 0);
+        ret.push("\na", r, r, 0, 1, 0, r *2, 0);
+        ret.push("\na", r, r, 0, 1, 0,-r *2, 0);
+        return ret.join(" ");
+    }
+
+    // inverse of the circlePath function, returns a circle object from an svg path
+    function circleFromPath(path) {
+        var tokens = path.split(' ');
+        return {'x' : parseFloat(tokens[1]),
+                'y' : parseFloat(tokens[2]),
+                'radius' : -parseFloat(tokens[4])
+                };
+    }
+
+    /** returns a svg path of the intersection area of a bunch of circles */
+    function intersectionAreaPath(circles) {
+        var stats = {};
+        intersectionArea(circles, stats);
+        var arcs = stats.arcs;
+
+        if (arcs.length === 0) {
+            return "M 0 0";
+
+        } else if (arcs.length == 1) {
+            var circle = arcs[0].circle;
+            return circlePath(circle.x, circle.y, circle.radius);
+
+        } else {
+            // draw path around arcs
+            var ret = ["\nM", arcs[0].p2.x, arcs[0].p2.y];
+            for (var i = 0; i < arcs.length; ++i) {
+                var arc = arcs[i], r = arc.circle.radius, wide = arc.width > r;
+                ret.push("\nA", r, r, 0, wide ? 1 : 0, 1,
+                         arc.p1.x, arc.p1.y);
+            }
+            return ret.join(" ");
+        }
+    }
+
+    exports.fmin = fmin;
+    exports.minimizeConjugateGradient = minimizeConjugateGradient;
+    exports.bisect = bisect;
+    exports.intersectionArea = intersectionArea;
+    exports.circleCircleIntersection = circleCircleIntersection;
+    exports.circleOverlap = circleOverlap;
+    exports.circleArea = circleArea;
+    exports.distance = distance;
+    exports.circleIntegral = circleIntegral;
+    exports.venn = venn;
+    exports.greedyLayout = greedyLayout;
+    exports.scaleSolution = scaleSolution;
+    exports.normalizeSolution = normalizeSolution;
+    exports.bestInitialLayout = bestInitialLayout;
+    exports.lossFunction = lossFunction;
+    exports.disjointCluster = disjointCluster;
+    exports.distanceFromIntersectArea = distanceFromIntersectArea;
+    exports.VennDiagram = VennDiagram;
+    exports.wrapText = wrapText;
+    exports.computeTextCentres = computeTextCentres;
+    exports.computeTextCentre = computeTextCentre;
+    exports.sortAreas = sortAreas;
+    exports.circlePath = circlePath;
+    exports.circleFromPath = circleFromPath;
+    exports.intersectionAreaPath = intersectionAreaPath;
+
+}));
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/biojs.msa.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/biojs.msa.js
new file mode 100755
index 0000000..e44545e
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/biojs.msa.js
@@ -0,0 +1,21 @@
+!function(t){function e(r){if(n[r])return n[r].exports;var i=n[r]={exports:{},id:r,loaded:!1};return t[r].call(i.exports,i,i.exports,e),i.loaded=!0,i.exports}var n={};return e.m=t,e.c=n,e.p="",e(0)}([function(t,e,n){"use strict";function r(t){if(t&&t.__esModule)return t;var e={};if(null!=t)for(var n in t)Object.prototype.hasOwnProperty.call(t,n)&&(e[n]=t[n]);return e["default"]=t,e}var i=n(111),o=r(i);n(154);var s=o["default"];for(var u in o)o.hasOwnProperty(u)&&(s[u]=o[u]);window&&(wind [...]
+var r=-1,i=[],o=t.length;for(e=Ri(e,3);++r<o;){var s=t[r];e(s,r,t)&&(n.push(s),i.push(r))}return wr(t,i),n}function Po(t){return t?ic.call(t):t}function Bo(t,e,n){var r=t?t.length:0;return r?(n&&"number"!=typeof n&&Yi(t,e,n)?(e=0,n=r):(e=null==e?0:Bu(e),n=n===Q?r:Bu(n)),Er(t,e,n)):[]}function Wo(t,e){return zr(t,e)}function Do(t,e,n){return Ar(t,e,Ri(n))}function Ho(t,e){var n=t?t.length:0;if(n){var r=zr(t,e);if(r<n&&nu(t[r],e))return r}return-1}function Uo(t,e){return zr(t,e,!0)}functio [...]
+Mn[Zt]=Mn[Jt]=Mn[Yt]=Mn[Qt]=Mn[te]=Mn[ee]=Mn[ne]=Mn[re]=Mn[ie]=!0,Mn[Ct]=Mn[Tt]=Mn[Kt]=Mn[It]=Mn[Xt]=Mn[Nt]=Mn[Lt]=Mn[Rt]=Mn[Ft]=Mn[Pt]=Mn[Bt]=Mn[Dt]=Mn[Ht]=Mn[Ut]=Mn[$t]=!1;var zn={};zn[Ct]=zn[Tt]=zn[Kt]=zn[Xt]=zn[It]=zn[Nt]=zn[Zt]=zn[Jt]=zn[Yt]=zn[Qt]=zn[te]=zn[Ft]=zn[Pt]=zn[Bt]=zn[Dt]=zn[Ht]=zn[Ut]=zn[Vt]=zn[ee]=zn[ne]=zn[re]=zn[ie]=!0,zn[Lt]=zn[Rt]=zn[$t]=!1;var An={"À":"A","Á":"A","Â":"A","Ã":"A","Ä":"A","Å":"A","à ":"a","á":"a","â":"a","ã":"a","ä":"a","å":"a","Ç":"C","à [...]
+	 * jBone v1.2.0 - 2016-04-13 - Library for DOM manipulation
+	 *
+	 * http://jbone.js.org
+	 *
+	 * Copyright 2016 Alexey Kupriyanenko
+	 * Released under the MIT license.
+	 */
+!function(o){function s(t){var e=t.length,n="undefined"==typeof t?"undefined":i(t);return!_(n)&&t!==o&&(!(1!==t.nodeType||!e)||b(n)||0===e||"number"==typeof e&&e>0&&e-1 in t)}function u(t,e){var n,r;this.originalEvent=t,r=function(t,e){"preventDefault"===t?this[t]=function(){return this.defaultPrevented=!0,e[t]()}:"stopImmediatePropagation"===t?this[t]=function(){return this.immediatePropagationStopped=!0,e[t]()}:_(e[t])?this[t]=function(){return e[t]()}:this[t]=e[t]};for(n in t)(t[n]||" [...]
+return e}})},function(t,e,n){"use strict";var r,i=n(1).Model;t.exports=r=i.extend({defaults:{searchText:""}})},function(t,e,n){"use strict";var r,i=n(1).Model;t.exports=r=i.extend({defaults:{searchBox:-10,overviewBox:30,headerBox:-1,alignmentBody:0,scaleSlider:50}})},function(t,e,n){"use strict";var r,i=n(1).Model;t.exports=r=i.extend({defaults:{sequences:!0,markers:!0,metacell:!1,conserv:!1,overviewbox:!1,seqlogo:!1,gapHeader:!1,leftHeader:!0,scaleslider:!1,labels:!0,labelName:!0,labelI [...]
+e}),o+="';\n",e.variable||(o="with(obj||{}){\n"+o+"}\n"),o="var __t,__p='',__j=Array.prototype.join,print=function(){__p+=__j.call(arguments,'');};\n"+o+"return __p;\n";try{var s=new Function(e.variable||"obj","_",o)}catch(u){throw u.source=o,u}var a=function(t){return s.call(this,t,S)},l=e.variable||"obj";return a.source="function("+l+"){\n"+o+"}",a},S.chain=function(t){var e=S(t);return e._chain=!0,e};var $=function(t,e){return t._chain?S(e).chain():e};S.mixin=function(t){S.each(S.func [...]
+return t&&"function"==typeof Symbol&&t.constructor===Symbol?"symbol":typeof t};!function(){function n(){return{keys:Object.keys||function(t){if("object"!==("undefined"==typeof t?"undefined":r(t))&&"function"!=typeof t||null===t)throw new TypeError("keys() called on a non-object");var e,n=[];for(e in t)t.hasOwnProperty(e)&&(n[n.length]=e);return n},uniqueId:function(t){var e=++a+"";return t?t+e:e},has:function(t,e){return s.call(t,e)},each:function(t,e,n){if(null!=t)if(o&&t.forEach===o)t. [...]
+var e,n=[];for(e in t)t.hasOwnProperty(e)&&(n[n.length]=e);return n},uniqueId:function(t){var e=++a+"";return t?t+e:e},has:function(t,e){return s.call(t,e)},each:function(t,e,n){if(null!=t)if(o&&t.forEach===o)t.forEach(e,n);else if(t.length===+t.length)for(var r=0,i=t.length;r<i;r++)e.call(n,t[r],r,t);else for(var s in t)this.has(t,s)&&e.call(n,t[s],s,t)},once:function(t){var e,n=!1;return function(){return n?e:(n=!0,e=t.apply(this,arguments),t=null,e)}}}}var i,o=Array.prototype.forEach, [...]
+o=s(t).find("#canv_"+r)),s(o).attr("width",n).attr("height",e),o[0]}var i=n(86),o=n(85),s=n(5);t.exports=function(t){if(this.data){t=t||{};var e=t.zoom||this.zoom,n=t.target||1,s=(t.scaled||null,this.dom_element.parent().attr("width")),u=1,a=null,l=null,c=0;if(this.previous_target=n,t.start&&(this.start=t.start),t.end&&(this.end=t.end),e<=.1?e=.1:e>=1&&(e=1),this.zoom=e,a=this.end||this.data.heightArr.length,l=this.start||1,a=a>this.data.heightArr.length?this.data.heightArr.length:a,a=a< [...]
+var e=e||"undefined"!=typeof navigator&&navigator.msSaveOrOpenBlob&&navigator.msSaveOrOpenBlob.bind(navigator)||function(t){if("undefined"==typeof navigator||!/MSIE [1-9]\./.test(navigator.userAgent)){var e=t.document,n=function(){return t.URL||t.webkitURL||t},r=e.createElementNS("http://www.w3.org/1999/xhtml","a"),i=!t.externalHost&&"download"in r,o=function(n){var r=e.createEvent("MouseEvents");r.initMouseEvent("click",!0,!1,t,0,0,0,0,0,!1,!1,!1,!1,0,null),n.dispatchEvent(r)},s=t.webki [...]
+this.listenTo(this.g.colorscheme,"change",function(){return this.render()})},render:function(){var t=this.setName("Color scheme");this.removeAllNodes();for(var e,n=this.getColorschemes(),r=0;r<n.length;r++)e=n[r],this.addScheme(t,e);return this.grey(t),s.removeAllChilds(this.el),this.el.appendChild(this.buildDOM()),this},addScheme:function(t,e){var n=this,r={};return this.g.colorscheme.get("scheme")===e.id&&(r.backgroundColor="#77ED80"),this.addNode(e.name,function(){n.g.colorscheme.set( [...]
+this.ctx.lineTo(m+f+e,n)),"undefined"!=typeof u&&null!==u&&u.indexOf(b)>=0||(this.ctx.moveTo(m+e,h+n),this.ctx.lineTo(m+f+e,h+n)),m+=f)}return this.ctx.moveTo(e,n),this.ctx.lineTo(e,h+n),this.ctx.moveTo(e+d,n),this.ctx.lineTo(e+d,h+n),this.ctx.stroke(),this.ctx.strokeStyle=v,this.ctx.lineWidth=g},_getPrevNextSelection:function(t){var e=t.collection.prev(t),n=t.collection.next(t),r=void 0,i=void 0;return"undefined"!=typeof e&&null!==e&&(r=this._getSelection(e)),"undefined"!=typeof n&&null [...]
+},0);n=(100*n/e.length).toFixed(0)+"%";var r=document.createElement("span");r.textContent=n,r.style.display="inline-block",r.style.width=35,this.el.appendChild(r)}if(this.g.vis.get("metaIdentity")){var o=this.g.stats.identity()[this.model.id],a=document.createElement("span");this.model.get("ref")&&this.g.config.get("hasRef")?a.textContent="ref.":"undefined"!=typeof o&&null!==o&&(a.textContent=o.toFixed(2)),a.style.display="inline-block",a.style.width=40,this.el.appendChild(a)}if(this.g.v [...]
+//# sourceMappingURL=msa.js.map
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/drawrna.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/drawrna.js
new file mode 100644
index 0000000..9c83f9b
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/drawrna.js
@@ -0,0 +1,15 @@
+var Backbone = require("backbone");
+var Structure = require("./models/structure");
+var Vispanel = require("./views/vispanel");
+
+var Drawrnajs = Backbone.View.extend({
+    initialize: function(opts){
+        this.struct = new Structure(opts.seq, opts.dotbr, 'naview');
+        this.vis = new Vispanel({ el: opts.el, struct: this.struct, resindex: opts.resindex });
+    },
+    render: function(){
+        this.vis.render();
+    }
+});
+
+module.exports = Drawrnajs;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/layout.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/layout.js
new file mode 100644
index 0000000..b1ef67d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/layout.js
@@ -0,0 +1,24 @@
+var radiate = require("./radiate/getradiate");
+var naview = require("./naview/getnaview");
+
+var Layout = function(layout, nodes, links){
+    this.layout = layout;
+    this.nodes = nodes;
+    this.links = links;
+}
+
+Layout.prototype.getCoords = function(){
+    var coords = null;
+    if(this.layout === "radiate"){
+        coords = radiate(this.nodes, this.links);
+    }
+    else if(this.layout === "naview"){
+        coords = naview(this.nodes, this.links);
+    }
+    else {
+        throw new Error("Invalid layout");
+    }
+    return coords;
+}
+
+module.exports = Layout;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/base.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/base.js
new file mode 100644
index 0000000..f7b1029
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/base.js
@@ -0,0 +1,49 @@
+var Region = require("./region");
+
+var Base = module.exports = function(){
+	this.mate = null;
+	this.x = null;
+    this.y = null;
+	this.extracted = null;
+	this.region = new Region();
+}
+
+Base.prototype.getMate = function(){
+	return this.mate;
+}
+
+Base.prototype.setMate = function(mate){
+	this.mate = mate;
+}
+
+Base.prototype.getX = function(){
+	return this.x;
+}
+
+Base.prototype.setX = function(x){
+	this.x = x;
+}
+
+Base.prototype.getY = function(){
+	return this.y;
+}
+
+Base.prototype.setY = function(y){
+	this.y = y;
+}
+
+Base.prototype.isExtracted = function(){
+	return this.extracted;
+}
+
+Base.prototype.setExtracted = function(extracted){
+	this.extracted = extracted;
+}
+
+Base.prototype.getRegion = function(){
+	return this.region;
+}
+
+Base.prototype.setRegion = function(region){
+	this.region = region;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/connection.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/connection.js
new file mode 100644
index 0000000..ddc479f
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/connection.js
@@ -0,0 +1,100 @@
+var Loop = require("./loop");
+var Region = require("./region");
+
+var Connection = module.exports = function(){
+	this.loop = new Loop();
+	this.region = new Region();
+	// Start and end form the 1st base pair of the region.
+	this.start = null;
+    this.end = null;
+	this.xrad = null;
+    this.yrad = null;
+    this.angle = null;
+	// True if segment between this connection and the
+	// next must be extruded out of the circle
+	this.extruded = null;
+	// True if the extruded segment must be drawn long.
+	this.broken = null;
+
+	this._isNull = false;
+}
+
+Connection.prototype.isNull = function(){
+	return this._isNull;
+}
+
+Connection.prototype.setNull = function(isNull){
+	this._isNull = isNull;
+}
+
+Connection.prototype.getLoop = function(){
+	return this.loop;
+}
+
+Connection.prototype.setLoop = function(loop) {
+	this.loop = loop;
+}
+
+Connection.prototype.getRegion = function(){
+	return this.region;
+}
+
+Connection.prototype.setRegion = function(region){
+	this.region = region;
+}
+
+Connection.prototype.getStart = function(){
+	return this.start;
+}
+
+Connection.prototype.setStart = function(start) {
+	this.start = start;
+}
+
+Connection.prototype.getEnd = function(){
+	return this.end;
+}
+
+Connection.prototype.setEnd = function(end){
+	this.end = end;
+}
+
+Connection.prototype.getXrad = function(){
+	return this.xrad;
+}
+
+Connection.prototype.setXrad = function(xrad){
+	this.xrad = xrad;
+}
+
+Connection.prototype.getYrad = function(){
+	return this.yrad;
+}
+
+Connection.prototype.setYrad = function(yrad) {
+	this.yrad = yrad;
+}
+
+Connection.prototype.getAngle = function(){
+	return this.angle;
+}
+
+Connection.prototype.setAngle = function(angle){
+	this.angle = angle;
+}
+
+Connection.prototype.isExtruded = function(){
+	return this.extruded;
+}
+
+Connection.prototype.setExtruded = function(extruded){
+	this.extruded = extruded;
+}
+
+Connection.prototype.isBroken = function(){
+	return this.broken;
+}
+
+Connection.prototype.setBroken = function(broken){
+	this.broken = broken;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/getnaview.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/getnaview.js
new file mode 100644
index 0000000..5c88ffa
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/getnaview.js
@@ -0,0 +1,44 @@
+var NAView = require("./naview");
+
+getCoordsNAVIEW = module.exports = function(nodes, links){
+	//Calculates coordinates according to the NAView layout
+	var pairTable = [];
+
+	for(var i=0; i<nodes.length; i++){
+		pairTable.push(getPartner(i, links));
+	}
+	var naView = new NAView();
+	var xy = naView.naview_xy_coordinates(pairTable);
+
+	// Updating individual base positions
+	var coords = []
+	for (var i = 0; i < nodes.length; i++) {
+		coords.push({
+			x: Math.round(xy.x[i] * 2.5),
+			y: Math.round(xy.y[i] * 2.5)
+		});
+	}
+	return coords;
+}
+
+function getPartner(srcIndex, links){
+	//Returns the partner of a nucleotide:
+	//-1 means there is no partner
+	var partner = -1;
+	for(var i = 0; i < links.length; i++){
+		if(links[i].type !== "phosphodiester" && links[i].type !== "index"){
+			if(links[i].source === srcIndex){
+				partner = links[i].target;
+				break;
+			}
+			else if(links[i].target === srcIndex){
+				partner = links[i].source;
+				break;
+			}
+			else {
+				continue;
+			}
+		}
+	}
+	return partner;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/loop.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/loop.js
new file mode 100644
index 0000000..35fa6d4
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/loop.js
@@ -0,0 +1,98 @@
+var Loop = module.exports = function(){
+	this.nconnection = null;
+	this.connections = [];
+	this._connections = [];
+	this.number = null;
+	this.depth = null;
+	this.mark = null;
+	this.x = null;
+    this.y = null;
+    this.radius = null;
+}
+
+Loop.prototype.getNconnection = function() {
+	return this.nconnection;
+}
+
+Loop.prototype.setNconnection = function(nconnection) {
+	this.nconnection = nconnection;
+}
+
+Loop.prototype.setConnection = function(i, c){
+	var Connection = require("./connection");
+	if (c != null){
+		this._connections[i] = c;
+    }
+	else {
+		if (!this._connections[i]){
+			this._connections[i] = new Connection();
+		}
+		this._connections[i].setNull(true);
+	}
+}
+
+Loop.prototype.getConnection = function(i){
+	var Connection = require("./connection");
+	if (!this._connections[i]){
+        this._connections[i] = new Connection();
+    }
+	var c = this._connections[i];
+	if (c.isNull()){
+		return null;
+    }
+	else {
+		return c;
+    }
+}
+
+Loop.prototype.addConnection = function(i, c){
+	this._connections.push(c);
+}
+
+Loop.prototype.getNumber = function(){
+	return this.number;
+}
+
+Loop.prototype.setNumber = function(number){
+	this.number = number;
+}
+
+Loop.prototype.getDepth = function(){
+	return this.depth;
+}
+
+Loop.prototype.setDepth = function(depth){
+	this.depth = depth;
+}
+
+Loop.prototype.isMark = function(){
+	return this.mark;
+}
+
+Loop.prototype.setMark = function(mark){
+	this.mark = mark;
+}
+
+Loop.prototype.getX = function(){
+	return this.x;
+}
+
+Loop.prototype.setX = function(x){
+	this.x = x;
+}
+
+Loop.prototype.getY = function(){
+	return this.y;
+}
+
+Loop.prototype.setY = function(y){
+	this.y = y;
+}
+
+Loop.prototype.getRadius = function(){
+	return this.radius;
+}
+
+Loop.prototype.setRadius = function(radius){
+	this.radius = radius;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/naview.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/naview.js
new file mode 100644
index 0000000..383ff7a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/naview.js
@@ -0,0 +1,1098 @@
+var Loop = require("./loop");
+var Base = require("./base");
+var Region = require("./region");
+var Connection = require("./connection");
+var Radloop = require("./radloop");
+
+var NAView = module.exports = function(){
+    this.ANUM = 9999.0;
+	this.MAXITER = 500;
+
+	this.bases = [];
+	this.nbase = null;
+    this.nregion = null;
+    this.loop_count = null;
+
+	this.root = new Loop();
+	this.loops = [];
+
+	this.regions = [];
+
+	this.rlphead = new Radloop();
+
+	this.lencut = 0.8;
+	this.RADIUS_REDUCTION_FACTOR = 1.4;
+
+	// show algorithm step by step
+	this.angleinc = null;
+
+	this._h = null;
+
+	// private boolean noIterationFailureYet = true;
+
+	this.HELIX_FACTOR = 0.6;
+	this.BACKBONE_DISTANCE = 27;
+}
+
+NAView.prototype.naview_xy_coordinates = function(pair_table2){
+    var x = [];
+	var y = [];
+    if (pair_table2.length === 0){
+        return 0;
+    }
+    var i;
+    var pair_table = [];
+    pair_table.push(pair_table2.length);
+    for (var j = 0; j < pair_table2.length; j++){
+        pair_table.push(pair_table2[j] + 1);
+    }
+    this.nbase = pair_table[0];
+    this.bases = [];
+    for (var index = 0; index < this.nbase + 1; index++){
+        this.bases.push(new Base());
+    }
+    this.regions = [];
+    for (var index = 0; index < this.nbase + 1; index++){
+        this.regions.push(new Region());
+    }
+    this.read_in_bases(pair_table);
+    this.rlphead = null;
+    this.find_regions();
+    this.loop_count = 0;
+    this.loops = [];
+    for (var index = 0; index < this.nbase + 1; index++){
+        this.loops.push(new Loop());
+    }
+    this.construct_loop(0);
+    this.find_central_loop();
+    this.traverse_loop(this.root, null);
+
+    for (i = 0; i < this.nbase; i++){
+        x.push(100 + this.BACKBONE_DISTANCE * this.bases[i + 1].getX());
+        y.push(100 + this.BACKBONE_DISTANCE * this.bases[i + 1].getY());
+    }
+
+    return {
+        nbase: this.nbase,
+        x: x,
+        y: y
+    }
+}
+
+NAView.prototype.read_in_bases = function read_in_bases(pair_table){
+    var i = null;
+    var npairs = null;
+
+    // Set up an origin.
+    this.bases.push(new Base());
+    this.bases[0].setMate(0);
+    this.bases[0].setExtracted(false);
+    this.bases[0].setX(this.ANUM);
+    this.bases[0].setY(this.ANUM);
+
+    for (npairs = 0, i = 1; i <= this.nbase; i++){
+        this.bases.push(new Base());
+        this.bases[i].setExtracted(false);
+        this.bases[i].setX(this.ANUM);
+        this.bases[i].setY(this.ANUM);
+        this.bases[i].setMate(pair_table[i]);
+        if (pair_table[i] > i)
+            npairs++;
+    }
+    // must have at least 1 pair to avoid segfault
+    if (npairs == 0){
+        this.bases[1].setMate(this.nbase);
+        this.bases[this.nbase].setMate(1);
+    }
+}
+
+NAView.prototype.find_regions = function find_regions(){
+    var i = null;
+    var mate = null;
+    var nb1 = null;
+
+    nb1 = this.nbase + 1;
+    var mark = [];
+    for (i = 0; i < nb1; i++){
+        mark.push(false);
+    }
+    this.nregion = 0;
+    for (i = 0; i <= this.nbase; i++) {
+        if ((mate = this.bases[i].getMate()) != 0 && !mark[i]) {
+            this.regions[this.nregion].setStart1(i);
+            this.regions[this.nregion].setEnd2(mate);
+            mark[i] = true;
+            mark[mate] = true;
+            this.bases[i].setRegion(this.regions[this.nregion]);
+            this.bases[mate].setRegion(this.regions[this.nregion]);
+            for (i++, mate--; i < mate && this.bases[i].getMate() == mate; i++, mate--) {
+                mark[mate] = true;
+                mark[i]= true;
+                this.bases[i].setRegion(this.regions[this.nregion]);
+                this.bases[mate].setRegion(this.regions[this.nregion]);
+            }
+            this.regions[this.nregion].setEnd1(--i);
+            this.regions[this.nregion].setStart2(mate + 1);
+
+            this.nregion++;
+        }
+    }
+}
+
+NAView.prototype.construct_loop = function construct_loop(ibase){
+    var i = null;
+    var mate = null;
+    var retloop = new Loop();
+    var lp = new Loop();
+    var cp = new Connection();
+    var rp = new Region();
+    var rlp = new Radloop();
+    retloop = this.loops[this.loop_count++];
+    retloop.setNconnection(0);
+    retloop.setDepth(0);
+    retloop.setNumber(this.loop_count);
+    retloop.setRadius(0.0);
+
+    for (rlp = this.rlphead; rlp != null; rlp = rlp.getNext())
+        if (rlp.getLoopnumber() == this.loop_count)
+            retloop.setRadius(rlp.getRadius());
+    i = ibase;
+    do {
+        if ((mate = this.bases[i].getMate()) != 0) {
+            rp = this.bases[i].getRegion();
+            if (!this.bases[rp.getStart1()].isExtracted()) {
+                if (i == rp.getStart1()) {
+                    this.bases[rp.getStart1()].setExtracted(true);
+                    this.bases[rp.getEnd1()].setExtracted(true);
+                    this.bases[rp.getStart2()].setExtracted(true);
+                    this.bases[rp.getEnd2()].setExtracted(true);
+                    lp = this.construct_loop(rp.getEnd1() < this.nbase ? rp.getEnd1() + 1
+                            : 0);
+                } else if (i == rp.getStart2()) {
+                    this.bases[rp.getStart2()].setExtracted(true);
+                    this.bases[rp.getEnd2()].setExtracted(true);
+                    this.bases[rp.getStart1()].setExtracted(true);
+                    this.bases[rp.getEnd1()].setExtracted(true);
+                    lp = this.construct_loop(rp.getEnd2() < this.nbase ? rp.getEnd2() + 1
+                            : 0);
+                } else {
+                    console.log("Something went terribly wrong ....");
+                }
+                retloop.setNconnection(retloop.getNconnection() + 1);
+                cp = new Connection();
+                retloop.setConnection(retloop.getNconnection() - 1,	cp);
+                retloop.setConnection(retloop.getNconnection(), null);
+                cp.setLoop(lp);
+                cp.setRegion(rp);
+                if(i == rp.getStart1()) {
+                    cp.setStart(rp.getStart1());
+                    cp.setEnd(rp.getEnd2());
+                } else {
+                    cp.setStart(rp.getStart2());
+                    cp.setEnd(rp.getEnd1());
+                }
+                cp.setExtruded(false);
+                cp.setBroken(false);
+                lp.setNconnection(lp.getNconnection() + 1);
+                cp = new Connection();
+                lp.setConnection(lp.getNconnection() - 1, cp);
+                lp.setConnection(lp.getNconnection(), null);
+                cp.setLoop(retloop);
+                cp.setRegion(rp);
+                if (i == rp.getStart1()) {
+                    cp.setStart(rp.getStart2());
+                    cp.setEnd(rp.getEnd1());
+                } else {
+                    cp.setStart(rp.getStart1());
+                    cp.setEnd(rp.getEnd2());
+                }
+                cp.setExtruded(false);
+                cp.setBroken(false);
+            }
+            i = mate;
+        }
+        if (++i > this.nbase)
+            i = 0;
+    } while (i != ibase);
+    return retloop;
+}
+
+NAView.prototype.find_central_loop = function find_central_loop(){
+    var lp = new Loop();
+    var maxconn = null;
+    var maxdepth = null;
+    var i = null;
+
+    determine_depths();
+    maxconn = 0;
+    maxdepth = -1;
+    for (i = 0; i < this.loop_count; i++) {
+        lp = this.loops[i];
+        if (lp.getNconnection() > maxconn) {
+            maxdepth = lp.getDepth();
+            maxconn = lp.getNconnection();
+            this.root = lp;
+        } else if (lp.getDepth() > maxdepth
+                && lp.getNconnection() == maxconn) {
+            maxdepth = lp.getDepth();
+            this.root = lp;
+        }
+    }
+}
+
+function determine_depths() {
+    var lp = new Loop();
+    var i = null;
+    var j = null;
+
+    for (i = 0; i < this.loop_count; i++) {
+        lp = this.loops[i];
+        for (j = 0; j < this.loop_count; j++){
+            this.loops[j].setMark(false);
+        }
+        lp.setDepth(depth(lp));
+    }
+}
+
+function depth(lp){
+    var count = null;
+    var ret = null;
+    var d = null;
+
+    if (lp.getNconnection() <= 1){
+        return 0;
+    }
+    if (lp.isMark()){
+        return -1;
+    }
+    lp.setMark(true);
+    count = 0;
+    ret = 0;
+    for (var i = 0; lp.getConnection(i) != null; i++) {
+        d = depth(lp.getConnection(i).getLoop());
+        if (d >= 0) {
+            if (++count == 1){
+                ret = d;
+            }
+            else if (ret > d){
+                ret = d;
+            }
+        }
+    }
+    lp.setMark(false);
+    return ret + 1;
+}
+
+NAView.prototype.traverse_loop = function traverse_loop(lp, anchor_connection){
+    var xs, ys, xe, ye, xn, yn, angleinc, r;
+    var radius, xc, yc, xo, yo, astart, aend, a;
+    var cp, cpnext, acp, cpprev;
+    var i, j, n, ic;
+    var da, maxang;
+    var count, icstart, icend, icmiddle, icroot;
+    var done, done_all_connections, rooted;
+    var sign;
+    var midx, midy, nrx, nry, mx, my, vx, vy, dotmv, nmidx, nmidy;
+    var icstart1, icup, icdown, icnext, direction;
+    var dan, dx, dy, rr;
+    var cpx, cpy, cpnextx, cpnexty, cnx, cny, rcn, rc, lnx, lny, rl, ac, acn, sx, sy, dcp;
+    var imaxloop = 0;
+
+    angleinc = 2 * Math.PI / (this.nbase + 1);
+    acp = null;
+    icroot = -1;
+    var indice = 0;
+
+    for (ic = 0; (cp = lp.getConnection(indice)) != null; indice++, ic++) {
+        xs = -Math.sin(angleinc * cp.getStart());
+        ys = Math.cos(angleinc * cp.getStart());
+        xe = -Math.sin(angleinc * cp.getEnd());
+        ye = Math.cos(angleinc * cp.getEnd());
+        xn = ye - ys;
+        yn = xs - xe;
+        r = Math.sqrt(xn * xn + yn * yn);
+        cp.setXrad(xn / r);
+        cp.setYrad(yn / r);
+        cp.setAngle(Math.atan2(yn, xn));
+        if (cp.getAngle() < 0.0){
+            cp.setAngle(cp.getAngle() + 2 * Math.PI);
+        }
+        if (anchor_connection != null
+                && anchor_connection.getRegion() == cp.getRegion()) {
+            acp = cp;
+            icroot = ic;
+        }
+    }
+    set_radius: while (true) {
+        this.determine_radius(lp, this.lencut);
+        radius = lp.getRadius()/this.RADIUS_REDUCTION_FACTOR;
+        if (anchor_connection == null){
+            xc = yc = 0.0;
+        }
+        else {
+            xo = (this.bases[acp.getStart()].getX() + this.bases
+                    [acp.getEnd()].getX()) / 2.0;
+            yo = (this.bases[acp.getStart()].getY() + this.bases
+                    [acp.getEnd()].getY()) / 2.0;
+            xc = xo - radius * acp.getXrad();
+            yc = yo - radius * acp.getYrad();
+        }
+
+        // The construction of the connectors will proceed in blocks of
+        // connected connectors, where a connected connector pairs means two
+        // connectors that are forced out of the drawn circle because they
+        // are too close together in angle.
+
+        // First, find the start of a block of connected connectors
+
+        if (icroot == -1){
+            icstart = 0;
+        }
+        else {
+            icstart = icroot;
+        }
+        cp = lp.getConnection(icstart);
+        count = 0;
+        done = false;
+        do {
+            j = icstart - 1;
+            if (j < 0){
+                j = lp.getNconnection() - 1;
+            }
+            cpprev = lp.getConnection(j);
+            if (!this.connected_connection(cpprev, cp)) {
+                done = true;
+            }
+            else {
+                icstart = j;
+                cp = cpprev;
+            }
+            if (++count > lp.getNconnection()) {
+                // Here everything is connected. Break on maximum angular
+                // separation between connections.
+                maxang = -1.0;
+                for (ic = 0; ic < lp.getNconnection(); ic++) {
+                    j = ic + 1;
+                    if (j >= lp.getNconnection()){
+                        j = 0;
+                    }
+                    cp = lp.getConnection(ic);
+                    cpnext = lp.getConnection(j);
+                    ac = cpnext.getAngle() - cp.getAngle();
+                    if (ac < 0.0){
+                        ac += 2 * Math.PI;
+                    }
+                    if (ac > maxang) {
+                        maxang = ac;
+                        imaxloop = ic;
+                    }
+                }
+                icend = imaxloop;
+                icstart = imaxloop + 1;
+                if (icstart >= lp.getNconnection()){
+                    icstart = 0;
+                }
+                cp = lp.getConnection(icend);
+                cp.setBroken(true);
+                done = true;
+            }
+        } while (!done);
+        done_all_connections = false;
+        icstart1 = icstart;
+        while (!done_all_connections) {
+            count = 0;
+            done = false;
+            icend = icstart;
+            rooted = false;
+            while (!done) {
+                cp = lp.getConnection(icend);
+                if (icend == icroot){
+                    rooted = true;
+                }
+                j = icend + 1;
+                if (j >= lp.getNconnection()) {
+                    j = 0;
+                }
+                cpnext = lp.getConnection(j);
+                if (this.connected_connection(cp, cpnext)) {
+                    if (++count >= lp.getNconnection()){
+                        break;
+                    }
+                    icend = j;
+                }
+                else {
+                    done = true;
+                }
+            }
+            icmiddle = this.find_ic_middle(icstart, icend, anchor_connection,
+                    acp, lp);
+            ic = icup = icdown = icmiddle;
+            done = false;
+            direction = 0;
+            while (!done) {
+                if (direction < 0) {
+                    ic = icup;
+                }
+                else if (direction == 0) {
+                    ic = icmiddle;
+                }
+                else {
+                    ic = icdown;
+                }
+                if (ic >= 0) {
+                    cp = lp.getConnection(ic);
+                    if (anchor_connection == null || acp != cp) {
+                        if (direction == 0) {
+                            astart = cp.getAngle()
+                                    - Math.asin(1.0 / 2.0 / radius);
+                            aend = cp.getAngle()
+                                    + Math.asin(1.0 / 2.0 / radius);
+                            this.bases[cp.getStart()].setX(
+                                    xc + radius * Math.cos(astart));
+                            this.bases[cp.getStart()].setY(
+                                    yc + radius * Math.sin(astart));
+                            this.bases[cp.getEnd()].setX(
+                                    xc + radius * Math.cos(aend));
+                            this.bases[cp.getEnd()].setY(
+                                    yc + radius * Math.sin(aend));
+                        }
+                        else if (direction < 0) {
+                            j = ic + 1;
+                            if (j >= lp.getNconnection()){
+                                j = 0;
+                            }
+                            cp = lp.getConnection(ic);
+                            cpnext = lp.getConnection(j);
+                            cpx = cp.getXrad();
+                            cpy = cp.getYrad();
+                            ac = (cp.getAngle() + cpnext.getAngle()) / 2.0;
+                            if (cp.getAngle() > cpnext.getAngle()){
+                                ac -= Math.PI;
+                            }
+                            cnx = Math.cos(ac);
+                            cny = Math.sin(ac);
+                            lnx = cny;
+                            lny = -cnx;
+                            da = cpnext.getAngle() - cp.getAngle();
+                            if (da < 0.0){
+                                da += 2 * Math.PI;
+                            }
+                            if (cp.isExtruded()) {
+                                if (da <= Math.PI / 2){
+                                    rl = 2.0;
+                                }
+                                else {
+                                    rl = 1.5;
+                                }
+                            }
+                            else {
+                                rl = 1.0;
+                            }
+                            this.bases[cp.getEnd()].setX(
+                                    this.bases[cpnext.getStart()].getX()
+                                            + rl * lnx);
+                            this.bases[cp.getEnd()].setY(
+                                    this.bases[cpnext.getStart()].getY()
+                                            + rl * lny);
+                            this.bases[cp.getStart()].setX(
+                                    this.bases[cp.getEnd()].getX() + cpy);
+                            this.bases[cp.getStart()].setY(
+                                    this.bases[cp.getEnd()].getY() - cpx);
+                        } else {
+                            j = ic - 1;
+                            if (j < 0){
+                                j = lp.getNconnection() - 1;
+                            }
+                            cp = lp.getConnection(j);
+                            cpnext = lp.getConnection(ic);
+                            cpnextx = cpnext.getXrad();
+                            cpnexty = cpnext.getYrad();
+                            ac = (cp.getAngle() + cpnext.getAngle()) / 2.0;
+                            if (cp.getAngle() > cpnext.getAngle()){
+                                ac -= Math.PI;
+                            }
+                            cnx = Math.cos(ac);
+                            cny = Math.sin(ac);
+                            lnx = -cny;
+                            lny = cnx;
+                            da = cpnext.getAngle() - cp.getAngle();
+                            if (da < 0.0){
+                                da += 2 * Math.PI;
+                            }
+                            if (cp.isExtruded()) {
+                                if (da <= Math.PI / 2){
+                                    rl = 2.0;
+                                }
+                                else {
+                                    rl = 1.5;
+                                }
+                            }
+                            else {
+                                rl = 1.0;
+                            }
+                            this.bases[cpnext.getStart()].setX(
+                                    this.bases[cp.getEnd()].getX() + rl
+                                            * lnx);
+                            this.bases[cpnext.getStart()].setY(
+                                    this.bases[cp.getEnd()].getY() + rl
+                                            * lny);
+                            this.bases[cpnext.getEnd()].setX(
+                                    this.bases[cpnext.getStart()].getX()
+                                            - cpnexty);
+                            this.bases[cpnext.getEnd()].setY(
+                                    this.bases[cpnext.getStart()].getY()
+                                            + cpnextx);
+                        }
+                    }
+                }
+                if (direction < 0) {
+                    if (icdown == icend) {
+                        icdown = -1;
+                    }
+                    else if (icdown >= 0) {
+                        if (++icdown >= lp.getNconnection()) {
+                            icdown = 0;
+                        }
+                    }
+                    direction = 1;
+                }
+                else {
+                    if (icup == icstart){
+                        icup = -1;
+                    }
+                    else if (icup >= 0) {
+                        if (--icup < 0) {
+                            icup = lp.getNconnection() - 1;
+                        }
+                    }
+                    direction = -1;
+                }
+                done = icup == -1 && icdown == -1;
+            }
+            icnext = icend + 1;
+            if (icnext >= lp.getNconnection()){
+                icnext = 0;
+            }
+            if (icend != icstart
+                    && (!(icstart == icstart1 && icnext == icstart1))) {
+
+                // Move the bases just constructed (or the radius) so that
+                // the bisector of the end points is radius distance away
+                // from the loop center.
+
+                cp = lp.getConnection(icstart);
+                cpnext = lp.getConnection(icend);
+                dx = this.bases[cpnext.getEnd()].getX()
+                        - this.bases[cp.getStart()].getX();
+                dy = this.bases[cpnext.getEnd()].getY()
+                        - this.bases[cp.getStart()].getY();
+                midx = this.bases[cp.getStart()].getX() + dx / 2.0;
+                midy = this.bases[cp.getStart()].getY() + dy / 2.0;
+                rr = Math.sqrt(dx * dx + dy * dy);
+                mx = dx / rr;
+                my = dy / rr;
+                vx = xc - midx;
+                vy = yc - midy;
+                rr = Math.sqrt(dx * dx + dy * dy);
+                vx /= rr;
+                vy /= rr;
+                dotmv = vx * mx + vy * my;
+                nrx = dotmv * mx - vx;
+                nry = dotmv * my - vy;
+                rr = Math.sqrt(nrx * nrx + nry * nry);
+                nrx /= rr;
+                nry /= rr;
+
+                // Determine which side of the bisector the center should
+                // be.
+
+                dx = this.bases[cp.getStart()].getX() - xc;
+                dy = this.bases[cp.getStart()].getY() - yc;
+                ac = Math.atan2(dy, dx);
+                if (ac < 0.0){
+                    ac += 2 * Math.PI;
+                }
+                dx = this.bases[cpnext.getEnd()].getX() - xc;
+                dy = this.bases[cpnext.getEnd()].getY() - yc;
+                acn = Math.atan2(dy, dx);
+                if (acn < 0.0){
+                    acn += 2 * Math.PI;
+                }
+                if (acn < ac){
+                    acn += 2 * Math.PI;
+                }
+                if (acn - ac > Math.PI){
+                    sign = -1;
+                }
+                else {
+                    sign = 1;
+                }
+                nmidx = xc + sign * radius * nrx;
+                nmidy = yc + sign * radius * nry;
+                if (rooted) {
+                    xc -= nmidx - midx;
+                    yc -= nmidy - midy;
+                }
+                else {
+                    for (ic = icstart;;) {
+                        cp = lp.getConnection(ic);
+                        i = cp.getStart();
+                        this.bases[i].setX(
+                                this.bases[i].getX() + nmidx - midx);
+                        this.bases[i].setY(
+                                this.bases[i].getY() + nmidy - midy);
+                        i = cp.getEnd();
+                        this.bases[i].setX(
+                                this.bases[i].getX() + nmidx - midx);
+                        this.bases[i].setY(
+                                this.bases[i].getY() + nmidy - midy);
+                        if (ic == icend){
+                            break;
+                        }
+                        if (++ic >= lp.getNconnection()){
+                            ic = 0;
+                        }
+                    }
+                }
+            }
+            icstart = icnext;
+            done_all_connections = icstart == icstart1;
+        }
+        for (ic = 0; ic < lp.getNconnection(); ic++) {
+            cp = lp.getConnection(ic);
+            j = ic + 1;
+            if (j >= lp.getNconnection()){
+                j = 0;
+            }
+            cpnext = lp.getConnection(j);
+            dx = this.bases[cp.getEnd()].getX() - xc;
+            dy = this.bases[cp.getEnd()].getY() - yc;
+            rc = Math.sqrt(dx * dx + dy * dy);
+            ac = Math.atan2(dy, dx);
+            if (ac < 0.0){
+                ac += 2 * Math.PI;
+            }
+            dx = this.bases[cpnext.getStart()].getX() - xc;
+            dy = this.bases[cpnext.getStart()].getY() - yc;
+            rcn = Math.sqrt(dx * dx + dy * dy);
+            acn = Math.atan2(dy, dx);
+            if (acn < 0.0){
+                acn += 2 * Math.PI;
+            }
+            if (acn < ac){
+                acn += 2 * Math.PI;
+            }
+            dan = acn - ac;
+            dcp = cpnext.getAngle() - cp.getAngle();
+            if (dcp <= 0.0){
+                dcp += 2 * Math.PI;
+            }
+            if (Math.abs(dan - dcp) > Math.PI) {
+                if (cp.isExtruded()) {
+                    console.log("Warning from traverse_loop. Loop "
+                            + lp.getNumber() + " has crossed regions\n");
+                }
+                else if ((cpnext.getStart() - cp.getEnd()) != 1) {
+                    cp.setExtruded(true);
+                    continue set_radius; // remplacement du goto
+                }
+            }
+            if (cp.isExtruded()) {
+                this.construct_extruded_segment(cp, cpnext);
+            }
+            else {
+                n = cpnext.getStart() - cp.getEnd();
+                if (n < 0){
+                    n += this.nbase + 1;
+                }
+                angleinc = dan / n;
+                for (j = 1; j < n; j++) {
+                    i = cp.getEnd() + j;
+                    if (i > this.nbase){
+                        i -= this.nbase + 1;
+                    }
+                    a = ac + j * angleinc;
+                    rr = rc + (rcn - rc) * (a - ac) / dan;
+                    this.bases[i].setX(xc + rr * Math.cos(a));
+                    this.bases[i].setY(yc + rr * Math.sin(a));
+                }
+            }
+        }
+        break;
+    }
+    for (ic = 0; ic < lp.getNconnection(); ic++) {
+        if (icroot != ic) {
+            cp = lp.getConnection(ic);
+            //IM HERE
+            this.generate_region(cp);
+            this.traverse_loop(cp.getLoop(), cp);
+        }
+    }
+    n = 0;
+    sx = 0.0;
+    sy = 0.0;
+    for (ic = 0; ic < lp.getNconnection(); ic++) {
+        j = ic + 1;
+        if (j >= lp.getNconnection()){
+            j = 0;
+        }
+        cp = lp.getConnection(ic);
+        cpnext = lp.getConnection(j);
+        n += 2;
+        sx += this.bases[cp.getStart()].getX()
+                + this.bases[cp.getEnd()].getX();
+        sy += this.bases[cp.getStart()].getY()
+                + this.bases[cp.getEnd()].getY();
+        if (!cp.isExtruded()) {
+            for (j = cp.getEnd() + 1; j != cpnext.getStart(); j++) {
+                if (j > this.nbase){
+                    j -= this.nbase + 1;
+                }
+                n++;
+                sx += this.bases[j].getX();
+                sy += this.bases[j].getY();
+            }
+        }
+    }
+    lp.setX(sx / n);
+    lp.setY(sy / n);
+}
+
+NAView.prototype.determine_radius = function determine_radius(lp, lencut){
+    var mindit, ci, dt, sumn, sumd, radius, dit;
+    var i, j, end, start, imindit = 0;
+    var cp = new Connection(), cpnext = new Connection();
+    var rt2_2 = 0.7071068;
+
+    do {
+        mindit = 1.0e10;
+        for (sumd = 0.0, sumn = 0.0, i = 0; i < lp.getNconnection(); i++) {
+            cp = lp.getConnection(i);
+            j = i + 1;
+            if (j >= lp.getNconnection()){
+                j = 0;
+            }
+            cpnext = lp.getConnection(j);
+            end = cp.getEnd();
+            start = cpnext.getStart();
+            if (start < end){
+                start += this.nbase + 1;
+            }
+            dt = cpnext.getAngle() - cp.getAngle();
+            if (dt <= 0.0){
+                dt += 2 * Math.PI;
+            }
+            if (!cp.isExtruded()){
+                ci = start - end;
+            }
+            else {
+                if (dt <= Math.PI / 2){
+                    ci = 2.0;
+                }
+                else {
+                    ci = 1.5;
+                }
+            }
+            sumn += dt * (1.0 / ci + 1.0);
+            sumd += dt * dt / ci;
+            dit = dt / ci;
+            if (dit < mindit && !cp.isExtruded() && ci > 1.0) {
+                mindit = dit;
+                imindit = i;
+            }
+        }
+        radius = sumn / sumd;
+        if (radius < rt2_2){
+            radius = rt2_2;
+        }
+        if (mindit * radius < lencut) {
+            lp.getConnection(imindit).setExtruded(true);
+        }
+    } while (mindit * radius < lencut);
+    if (lp.getRadius() > 0.0){
+        radius = lp.getRadius();
+    }
+    else {
+        lp.setRadius(radius);
+    }
+}
+
+NAView.prototype.find_ic_middle = function find_ic_middle(icstart, icend, anchor_connection, acp, lp){
+    var count, ret, ic, i;
+    var done;
+
+    count = 0;
+    ret = -1;
+    ic = icstart;
+    done = false;
+    while (!done) {
+        if (count++ > lp.getNconnection() * 2) {
+            console.log("Infinite loop in 'find_ic_middle'");
+        }
+        if (anchor_connection != null && lp.getConnection(ic) == acp) {
+            ret = ic;
+        }
+        done = ic == icend;
+        if (++ic >= lp.getNconnection()) {
+            ic = 0;
+        }
+    }
+    if (ret == -1) {
+        for (i = 1, ic = icstart; i < (count + 1) / 2; i++) {
+            if (++ic >= lp.getNconnection())
+                ic = 0;
+        }
+        ret = ic;
+    }
+    return ret;
+}
+
+NAView.prototype.construct_extruded_segment = function construct_extruded_segment(cp, cpnext){
+    var astart, aend1, aend2, aave, dx, dy, a1, a2, ac, rr, da, dac;
+    var start, end, n, nstart, nend;
+    var collision;
+
+    astart = cp.getAngle();
+    aend2 = aend1 = cpnext.getAngle();
+    if (aend2 < astart){
+        aend2 += 2 * Math.PI;
+    }
+    aave = (astart + aend2) / 2.0;
+    start = cp.getEnd();
+    end = cpnext.getStart();
+    n = end - start;
+    if (n < 0){
+        n += this.nbase + 1;
+    }
+    da = cpnext.getAngle() - cp.getAngle();
+    if (da < 0.0) {
+        da += 2 * Math.PI;
+    }
+    if (n == 2) {
+        this.construct_circle_segment(start, end);
+    }
+    else {
+        dx = this.bases[end].getX() - this.bases[start].getX();
+        dy = this.bases[end].getY() - this.bases[start].getY();
+        rr = Math.sqrt(dx * dx + dy * dy);
+        dx /= rr;
+        dy /= rr;
+        if (rr >= 1.5 && da <= Math.PI / 2) {
+            nstart = start + 1;
+            if (nstart > this.nbase){
+                nstart -= this.nbase + 1;
+            }
+            nend = end - 1;
+            if (nend < 0){
+                nend += this.nbase + 1;
+            }
+            this.bases[nstart].setX(this.bases[start].getX() + 0.5 * dx);
+            this.bases[nstart].setY(this.bases[start].getY() + 0.5 * dy);
+            this.bases[nend].setX(this.bases[end].getX() - 0.5 * dx);
+            this.bases[nend].setY(this.bases[end].getY() - 0.5 * dy);
+            start = nstart;
+            end = nend;
+        }
+        do {
+            collision = false;
+            this.construct_circle_segment(start, end);
+            nstart = start + 1;
+            if (nstart > this.nbase) {
+                nstart -= this.nbase + 1;
+            }
+            dx = this.bases[nstart].getX() - this.bases[start].getX();
+            dy = this.bases[nstart].getY() - this.bases[start].getY();
+            a1 = Math.atan2(dy, dx);
+            if (a1 < 0.0){
+                a1 += 2 * Math.PI;
+            }
+            dac = a1 - astart;
+            if (dac < 0.0){
+                dac += 2 * Math.PI;
+            }
+            if (dac > Math.PI){
+                collision = true;
+            }
+            nend = end - 1;
+            if (nend < 0){
+                nend += this.nbase + 1;
+            }
+            dx = this.bases[nend].getX() - this.bases[end].getX();
+            dy = this.bases[nend].getY() - this.bases[end].getY();
+            a2 = Math.atan2(dy, dx);
+            if (a2 < 0.0){
+                a2 += 2 * Math.PI;
+            }
+            dac = aend1 - a2;
+            if (dac < 0.0){
+                dac += 2 * Math.PI;
+            }
+            if (dac > Math.PI){
+                collision = true;
+            }
+            if (collision) {
+                ac = this.minf2(aave, astart + 0.5);
+                this.bases[nstart].setX(
+                        this.bases[start].getX() + Math.cos(ac));
+                this.bases[nstart].setY(
+                        this.bases[start].getY() + Math.sin(ac));
+                start = nstart;
+                ac = this.maxf2(aave, aend2 - 0.5);
+                this.bases[nend].setX(this.bases[end].getX() + Math.cos(ac));
+                this.bases[nend].setY(this.bases[end].getY() + Math.sin(ac));
+                end = nend;
+                n -= 2;
+            }
+        } while (collision && n > 1);
+    }
+}
+
+NAView.prototype.construct_circle_segment = function construct_circle_segment(start, end){
+    var dx, dy, rr, midx, midy, xn, yn, nrx, nry, mx, my, a;
+    var l, j, i;
+
+    dx = this.bases[end].getX() - this.bases[start].getX();
+    dy = this.bases[end].getY() - this.bases[start].getY();
+    rr = Math.sqrt(dx * dx + dy * dy);
+    l = end - start;
+    if (l < 0){
+        l += this.nbase + 1;
+    }
+    if (rr >= l) {
+        dx /= rr;
+        dy /= rr;
+        for (j = 1; j < l; j++) {
+            i = start + j;
+            if (i > this.nbase){
+                i -= this.nbase + 1;
+            }
+            this.bases[i].setX(
+                    this.bases[start].getX() + dx * j / l);
+            this.bases[i].setY(
+                    this.bases[start].getY() + dy * j / l);
+        }
+    }
+    else {
+        this.find_center_for_arc((l - 1), rr);
+        dx /= rr;
+        dy /= rr;
+        midx = this.bases[start].getX() + dx * rr / 2.0;
+        midy = this.bases[start].getY() + dy * rr / 2.0;
+        xn = dy;
+        yn = -dx;
+        nrx = midx + this._h * xn;
+        nry = midy + this._h * yn;
+        mx = this.bases[start].getX() - nrx;
+        my = this.bases[start].getY() - nry;
+        rr = Math.sqrt(mx * mx + my * my);
+        a = Math.atan2(my, mx);
+        for (j = 1; j < l; j++) {
+            i = start + j;
+            if (i > this.nbase){
+                i -= this.nbase + 1;
+            }
+            this.bases[i].setX(nrx + rr * Math.cos(a + j * this.angleinc));
+            this.bases[i].setY(nry + rr * Math.sin(a + j * this.angleinc));
+        }
+    }
+}
+
+NAView.prototype.find_center_for_arc = function find_center_for_arc(n, b){
+    var h, hhi, hlow, r, disc, theta, e, phi;
+    var iter;
+
+    hhi = (n + 1.0) / Math.PI;
+    // changed to prevent div by zero if (ih)
+    hlow = -hhi - b / (n + 1.000001 - b);
+    if (b < 1){
+        // otherwise we might fail below (ih)
+        hlow = 0;
+    }
+    iter = 0;
+    do {
+        h = (hhi + hlow) / 2.0;
+        r = Math.sqrt(h * h + b * b / 4.0);
+        disc = 1.0 - 0.5 / (r * r);
+        if (Math.abs(disc) > 1.0) {
+            console.log("Unexpected large magnitude discriminant = " + disc
+                            + " " + r);
+        }
+        theta = Math.acos(disc);
+        phi = Math.acos(h / r);
+        e = theta * (n + 1) + 2 * phi - 2 * Math.PI;
+        if (e > 0.0) {
+            hlow = h;
+        }
+        else {
+            hhi = h;
+        }
+    } while (Math.abs(e) > 0.0001 && ++iter < this.MAXITER);
+    if (iter >= this.MAXITER) {
+        if (noIterationFailureYet) {
+            console.log("Iteration failed in find_center_for_arc");
+            noIterationFailureYet = false;
+        }
+        h = 0.0;
+        theta = 0.0;
+    }
+    this._h = h;
+    this.angleinc = theta;
+}
+
+NAView.prototype.generate_region = function generate_region(cp){
+    var l, start, end, i, mate;
+    var rp;
+
+    rp = cp.getRegion();
+    l = 0;
+    if (cp.getStart() == rp.getStart1()) {
+        start = rp.getStart1();
+        end = rp.getEnd1();
+    }
+    else {
+        start = rp.getStart2();
+        end = rp.getEnd2();
+    }
+    if (this.bases[cp.getStart()].getX() > this.ANUM - 100.0
+            || this.bases[cp.getEnd()].getX() > this.ANUM - 100.0) {
+        console.log(
+                "Bad region passed to generate_region. Coordinates not defined.");
+    }
+    for (i = start + 1; i <= end; i++) {
+        l++;
+        this.bases[i].setX(
+                this.bases[cp.getStart()].getX() + this.HELIX_FACTOR * l
+                        * cp.getXrad());
+        this.bases[i].setY(
+                this.bases[cp.getStart()].getY() + this.HELIX_FACTOR * l
+                        * cp.getYrad());
+        mate = this.bases[i].getMate();
+        this.bases[mate].setX(
+                this.bases[cp.getEnd()].getX() + this.HELIX_FACTOR * l
+                        * cp.getXrad());
+        this.bases[mate].setY(
+                this.bases[cp.getEnd()].getY() + this.HELIX_FACTOR * l
+                        * cp.getYrad());
+
+    }
+}
+
+NAView.prototype.minf2 = function minf2(x1, x2) {
+    return ((x1) < (x2)) ? (x1) : (x2);
+}
+
+NAView.prototype.maxf2 = function maxf2(x1, x2) {
+    return ((x1) > (x2)) ? (x1) : (x2);
+}
+
+NAView.prototype.connected_connection = function connected_connection(cp, cpnext) {
+    if (cp.isExtruded()) {
+        return true;
+    }
+    else if (cp.getEnd() + 1 == cpnext.getStart()) {
+        return true;
+    }
+    else {
+        return false;
+    }
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/radloop.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/radloop.js
new file mode 100644
index 0000000..6d90fa0
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/radloop.js
@@ -0,0 +1,38 @@
+var Radloop = module.exports = function(){
+	this.radius = null;
+	this.loopnumber = null;
+	this.next = null;
+    this.prev = null;
+}
+
+Radloop.prototype.getRadius = function(){
+	return this.radius;
+}
+
+Radloop.prototype.setRadius = function(radius){
+	this.radius = radius;
+}
+
+Radloop.prototype.getLoopnumber = function(){
+	return this.loopnumber;
+}
+
+Radloop.prototype.setLoopnumber = function(loopnumber){
+	this.loopnumber = loopnumber;
+}
+
+Radloop.prototype.getNext = function(){
+	return this.next;
+}
+
+Radloop.prototype.setNext = function(next){
+	this.next = next;
+}
+
+Radloop.prototype.getPrev = function(){
+	return this.prev;
+}
+
+Radloop.prototype.setPrev = function(prev){
+	this.prev = prev;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/region.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/region.js
new file mode 100644
index 0000000..15dd497
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/naview/region.js
@@ -0,0 +1,38 @@
+var Region = module.exports = function(){
+	this._start1 = null;
+    this._end1 = null;
+    this._start2 = null;
+    this._end2 = null;
+}
+
+Region.prototype.getStart1 = function(){
+	return this._start1;
+}
+
+Region.prototype.setStart1 = function(start1){
+	this._start1 = start1;
+}
+
+Region.prototype.getEnd1 = function(){
+	return this._end1;
+}
+
+Region.prototype.setEnd1 = function(end1){
+	this._end1 = end1;
+}
+
+Region.prototype.getStart2 = function(){
+	return this._start2;
+}
+
+Region.prototype.setStart2 = function(start2){
+	this._start2 = start2;
+}
+
+Region.prototype.getEnd2 = function(){
+	return this._end2;
+}
+
+Region.prototype.setEnd2 = function(end2){
+	this._end2 = end2;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/radiate/getradiate.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/radiate/getradiate.js
new file mode 100644
index 0000000..4d9b679
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/layouts/radiate/getradiate.js
@@ -0,0 +1,271 @@
+var calculateCoords = module.exports = function(seq, dotbr, links){
+    //This function calculates the coordinates for each nucleotide
+	//according to the radiate layout
+	var coords = [];
+	var centers = [];
+	var angles = [];
+	var dirAngle = -1;
+
+	for(var i = 0; i < seq.length; i++){
+		coords[i] = {x: 0, y: 0};
+		centers[i] = {x: 0, y: 0};
+	}
+
+	if(2>1){
+		dirAngle += 1.0 - Math.PI / 2.0;
+		var i = 0;
+		var x = 0.0;
+		var y = 0.0;
+		var vx = -Math.sin(dirAngle);
+		var vy = Math.cos(dirAngle);
+
+		while(i < seq.length){
+			coords[i].x = x;
+			coords[i].y = y;
+			centers[i].x = x + 65 * vy;
+			centers[i].y = y - 65 * vx;
+			var j = getPartner(i, links);
+
+			if(j > i){
+				drawLoop(i, j, 	x + (65 * vx / 2.0), y
+									+ (65 * vy / 2.0), dirAngle,
+									coords, centers, angles, seq, links);
+				centers[i].x = coords[i].x + 65 * vy;
+				centers[i].y = y - 65 * vx;
+				i = j;
+				x += 65 * vx;
+				y += 65 * vy;
+				centers[i].x = coords[i].x + 65 * vy;
+				centers[i].y = y - 65 * vx;
+			}
+			x += 35 * vx;
+			y += 35 * vy;
+			i += 1;
+		}
+	}
+	else{
+		drawLoop(0, seq.length-1, 0, 0, dirAngle, coords, centers, angles, seq, links);
+	}
+	return coords;
+}
+
+function getPartner(srcIndex, links){
+	//Returns the partner of a nucleotide:
+	//-1 means there is no partner
+	var partner = -1;
+	for(var i = 0; i < links.length; i++){
+		if(links[i].type != "phosphodiester"){
+			if(links[i].source === srcIndex){
+				partner = links[i].target;
+				break;
+			}
+			else if(links[i].target === srcIndex){
+				partner = links[i].source;
+				break;
+			}
+			else {
+				continue;
+			}
+		}
+	}
+	return partner;
+}
+
+function drawLoop(i, j, x, y, dirAngle, coords, centers, angles, seq, links){
+	//Calculates loop coordinates
+	if (i > j) {
+		return;
+	}
+
+	// BasePaired
+	if (getPartner(i, links) === j) {
+		var normalAngle = Math.PI / 2.0;
+		centers[i] = {x: x, y: y};
+		centers[j] = {x: x, y: y};
+		coords[i].x = (x + 65 * Math.cos(dirAngle - normalAngle) / 2.0);
+			coords[i].y = (y + 65 * Math.sin(dirAngle - normalAngle) / 2.0);
+			coords[j].x = (x + 65 * Math.cos(dirAngle + normalAngle) / 2.0);
+			coords[j].y = (y + 65 * Math.sin(dirAngle + normalAngle) / 2.0);
+			drawLoop(i + 1, j - 1, x + 40 * Math.cos(dirAngle), y + 40 * Math.sin(dirAngle), dirAngle, coords,
+					centers, angles, seq, links);
+	}
+	else {
+		//multi loop now
+		var k = i;
+		var basesMultiLoop = [];
+		var helices = [];
+		var l;
+		while (k <= j) {
+			l = getPartner(k, links);
+			if (l > k) {
+				basesMultiLoop.push(k);
+				basesMultiLoop.push(l);
+				helices.push(k);
+				k = l + 1;
+			}
+			else {
+				basesMultiLoop.push(k);
+				k++;
+			}
+		}
+		var mlSize = basesMultiLoop.length + 2;
+		var numHelices = helices.length + 1;
+		var totalLength = 35 * (mlSize - numHelices) + 65 * numHelices;
+		var multiLoopRadius;
+		var angleIncrementML;
+		var angleIncrementBP;
+		if (mlSize > 3) {
+			multiLoopRadius = determineRadius(numHelices, mlSize - numHelices, (totalLength) / (2.0 * Math.PI), 65, 35);
+			angleIncrementML = -2.0 * Math.asin(35 / (2.0 * multiLoopRadius));
+			angleIncrementBP = -2.0 * Math.asin(65 / (2.0 * multiLoopRadius));
+		}
+		else {
+			multiLoopRadius = 35.0;
+			angleIncrementBP = -2.0 * Math.asin(65 / (2.0 * multiLoopRadius));
+			angleIncrementML = (-2.0 * Math.PI - angleIncrementBP) / 2.0;
+		}
+		var centerDist = Math.sqrt(Math.max(Math.pow(multiLoopRadius, 2) - Math.pow(65 / 2.0, 2), 0.0)) - 40;
+		var mlCenter = {x: x + (centerDist * Math.cos(dirAngle)),
+						y: y + (centerDist * Math.sin(dirAngle))}
+		// Base directing angle for (multi|hairpin) loop, from the center's
+		// perspective
+		var baseAngle = dirAngle
+				// U-turn
+				+ Math.PI
+				// Account for already drawn supporting base-pair
+				+ 0.5 * angleIncrementBP
+				// Base cannot be paired twice, so next base is at
+				// "unpaired base distance"
+				+ 1.0 * angleIncrementML;
+
+		var currUnpaired = [];
+		var currInterval = {el1: 0, el2: baseAngle-1.0 * angleIncrementML};
+		var intervals = [];
+
+		for (k = basesMultiLoop.length - 1; k >= 0; k--) {
+			l = basesMultiLoop[k];
+			centers[l] = mlCenter;
+			var isPaired = (getPartner(i, links) != -1);
+			var isPaired3 = isPaired && (getPartner(i, links) < l);
+			var isPaired5 = isPaired && !isPaired3;
+			if (isPaired3) {
+				baseAngle = correctHysteresis(baseAngle+angleIncrementBP/2.)-angleIncrementBP/2.;
+				currInterval.el1 = baseAngle;
+				intervals.push({el1: currUnpaired, el2: currInterval });
+				currInterval = { el1: -1.0, el2: -1.0 };
+				currUnpaired = [];
+			}
+			else if (isPaired5)
+			{
+				currInterval.el2 = baseAngle;
+			}
+			else
+			{
+				currUnpaired.push(l);
+			}
+			angles[l] = baseAngle;
+			if (isPaired3)
+			{
+				baseAngle += angleIncrementBP;
+			}
+			else {
+				baseAngle += angleIncrementML;
+			}
+		}
+		currInterval.el1 = dirAngle - Math.PI - 0.5 * angleIncrementBP;
+		intervals.push( {el1: currUnpaired, el2: currInterval } );
+
+		for(var z = 0; z < intervals.length; z++){
+			var mina = intervals[z].el2.el1;
+			var maxa = normalizeAngle(intervals[z].el2.el2, mina);
+
+			for (var n = 0; n < intervals[z].el1.length; n++){
+				var ratio = (1. + n)/(1. + intervals[z].el1.length);
+				var b = intervals[z].el1[n];
+				angles[b] = mina + (1.-ratio)*(maxa-mina);
+			}
+		}
+
+		for (k = basesMultiLoop.length - 1; k >= 0; k--) {
+			l = basesMultiLoop[k];
+			coords[l].x = mlCenter.x + multiLoopRadius * Math.cos(angles[l]);
+			coords[l].y = mlCenter.y + multiLoopRadius * Math.sin(angles[l]);
+		}
+
+		var newAngle;
+		var m, n;
+		for (k = 0; k < helices.length; k++) {
+			m = helices[k];
+			n = getPartner(m, links);
+			newAngle = (angles[m] + angles[n]) / 2.0;
+			drawLoop(m + 1, n - 1, (40 * Math.cos(newAngle)) + (coords[m].x + coords[n].x) / 2.0,
+						(40 * Math.sin(newAngle))
+								+ (coords[m].y + coords[n].y) / 2.0, newAngle,
+						coords, centers, angles, seq, links);
+			}
+		}
+}
+
+function determineRadius(nbHel, nbUnpaired, startRadius, bpdist, multidist) {
+	var xmin = bpdist / 2.0;
+	var xmax = 3.0 * multidist + 1;
+	var x = (xmin + xmax) / 2.0;
+	var y = 10000.0;
+	var ymin = -1000.0;
+	var ymax = 1000.0;
+	var numIt = 0;
+	var precision = 0.00001;
+	while ((Math.abs(y) > precision) && (numIt < 10000)) {
+		x = (xmin + xmax) / 2.0;
+		y = objFun(nbHel, nbUnpaired, x, bpdist, multidist);
+		ymin = objFun(nbHel, nbUnpaired, xmax, bpdist, multidist);
+		ymax = objFun(nbHel, nbUnpaired, xmin, bpdist, multidist);
+		if (ymin > 0.0) {
+			xmax = xmax + (xmax - xmin);
+		} else if ((y <= 0.0) && (ymax > 0.0)) {
+			xmax = x;
+		} else if ((y >= 0.0) && (ymin < 0.0)) {
+			xmin = x;
+		} else if (ymax < 0.0) {
+			xmin = Math.max(xmin - (x - xmin),
+					Math.max(bpdist / 2.0, multidist / 2.0));
+			xmax = x;
+		}
+		numIt++;
+	}
+	return x;
+}
+
+function objFun(n1, n2, r, bpdist, multidist) {
+	return ( n1 * 2.0 * Math.asin(bpdist / (2.0 * r)) + n2 * 2.0
+				* Math.asin( multidist / (2.0 * r)) - (2.0 * Math.PI));
+}
+
+function correctHysteresis(angle){
+	var hystAttr = [ 0.0, Math.PI/4.0, Math.PI/2.0, 3.0*Math.PI/4.0, Math.PI, 5.0*(Math.PI)/4.0, 3.0*(Math.PI)/2.0, 7.0*(Math.PI)/4.0];
+	var result = normalizeAngleSec(angle);
+	for (var i = 0; i < hystAttr.length; i++){
+		var att = hystAttr[i];
+		if (Math.abs(t.normalizeAngle(att-result,-Math.PI)) < 0.15){
+			result = att;
+		}
+	}
+	return result;
+}
+
+function normalizeAngleSec(angle){
+	return t.normalizeAngle(angle,0.0);
+}
+
+function normalizeAngle(angle,fromVal) {
+	var toVal = fromVal +2.0*Math.PI;
+	var result = angle;
+	while(result<fromVal){
+		result += 2.0*Math.PI;
+	}
+	while(result >= toVal)
+	{
+		result -= 2.0*Math.PI;
+	}
+	return result;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/link.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/link.js
new file mode 100644
index 0000000..e223851
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/link.js
@@ -0,0 +1,17 @@
+var Backbone = require("backbone");
+
+var Link = Backbone.Model.extend({
+    defaults: {
+        group: "edges"
+    },
+    initialize: function(data){
+        this.set("id", data.id);
+        this.set("source", data.source);
+        this.set("target", data.target);
+        this.set("label", data.label);
+        this.set("weight", data.weight);
+        this.set("color", data.color);
+    }
+})
+
+module.exports = Link;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/linkcol.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/linkcol.js
new file mode 100644
index 0000000..a2d324a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/linkcol.js
@@ -0,0 +1,28 @@
+var Backbone = require("backbone");
+var Link = require("./link");
+var pdbr = require("../utils/parsedbr");
+
+var LinkCol = Backbone.Collection.extend({
+    model: Link,
+    initialize: function(model, stl, residues){
+        this.style = stl;
+        this.residues = residues;
+    },
+    newBond: function(src, target){
+        var res1 = this.residues.at(parseInt(src));
+        var res2 = this.residues.at(parseInt(target));
+        var type = pdbr.getType(pdbr.isWatsonCrick(res1, res2));
+        var style = this.style;
+
+        this.add(new Link({
+            id: src + "to" + target,
+            source: src,
+            target: target,
+            label: type,
+            weight: style.getWeight(type),
+            color: style.getColor(type)
+        }));
+    }
+});
+
+module.exports = LinkCol;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/ncanno.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/ncanno.js
new file mode 100644
index 0000000..70f3c57
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/ncanno.js
@@ -0,0 +1,36 @@
+var Backbone = require("backbone");
+
+var Anno = Backbone.Model.extend({
+    defaults: {
+        base1: "wc",
+        base2: "wc",
+        stericity: "cis",
+        edge: null
+    },
+    initialize: function(b1, b2, st, edge){
+        this.set("base1", b1);
+        this.set("base2", b2);
+        this.set("edge", edge);
+        this.set("stericity", st);
+    },
+    getLabels: function(){
+        var labels = [];
+        for(var i=1; i<3; i++){
+            switch(this.get("base" + i)){
+                case "wc":
+                    labels.push("Watson-Crick");
+                    break;
+                case "hg":
+                    labels.push("Hoogsteen");
+                    break;
+                case "sg":
+                    labels.push("Sugar");
+                    break;
+            }
+        }
+        labels.push(this.get("stericity"));
+        return labels;
+    }
+})
+
+module.exports = Anno;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/residue.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/residue.js
new file mode 100644
index 0000000..eb57522
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/residue.js
@@ -0,0 +1,19 @@
+var Backbone = require("backbone");
+
+var Residue = Backbone.Model.extend({
+    defaults: {
+        group: "nodes",
+        selectable: true,
+        locked: false,
+        grabbable: true
+    },
+    initialize: function(data){
+        this.set("name", data.name);
+        this.set("color", data.color);
+        this.set("x", data.x);
+        this.set("y", data.y);
+        this.set("id", data.id);
+    }
+})
+
+module.exports = Residue;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/residuecol.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/residuecol.js
new file mode 100644
index 0000000..961db41
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/residuecol.js
@@ -0,0 +1,21 @@
+var Backbone = require("backbone");
+var Residue = require("./residue");
+var _ = require("underscore");
+
+var ResidueCol = Backbone.Collection.extend({
+    model: Residue,
+    setResidueColor: function(group, color){
+        _.each(this.where({name: group}),
+            function(el){ el.set("color", color); });
+    },
+    setSelectionColor: function(group, color){
+        var el = null;
+
+        for(var i=0; i<group.length; i++){
+            el = group[i];
+            this.where({id: el.id()})[0].set("color", color);
+        }
+    }
+});
+
+module.exports = ResidueCol;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/structure.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/structure.js
new file mode 100644
index 0000000..4b13e26
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/models/structure.js
@@ -0,0 +1,158 @@
+var Backbone = require("backbone");
+var parseStruct = require("../utils/parsedbr");
+var Layout = require("../layouts/layout.js");
+var Style = require("../utils/style");
+var Link = require("./link");
+var LinkCol = require("./linkcol");
+var Residue = require("./residue");
+var ResidueCol = require("./residuecol");
+
+var Structure = Backbone.Model.extend({
+    initialize: function(seq, dotbr, layout){
+        this.set("seq", seq);
+        this.set("dotbr", dotbr);
+        this.set("layout", layout);
+        this.set("style", new Style("standard"));
+        this.set("renderSwitch", true);
+        this.defineStructure();
+        //event listening
+        this.on("change:renderSwitch", this.defineStructure);
+    },
+    reconstruct: function(){
+        //This function is used to update String/Dot-Bracket Notation
+    	//after a hbond was inserted
+    	var seq = "";
+    	var dotbr = [];
+    	var partner;
+        var res = this.get("residues");
+
+    	for(var i=0; i<res.length; i++){
+    		seq += res.at(i).get("name");
+    		partner = this.getPartner(i.toString());
+    		if(partner === -1){
+    			dotbr[i] = ".";
+    		}
+    		else if(partner > i){
+    			dotbr[i] = "(";
+    			dotbr[partner] = ")";
+    		}
+    		else {
+    			continue;
+    		}
+    	}
+        this.set("seq", seq);
+        this.set("dotbr", dotbr.join(""));
+        this.set("renderSwitch", !this.get("renderSwitch"));
+    },
+    defineStructure: function(){
+        var seq = this.get("seq"),
+            dotbr = this.get("dotbr"),
+            layout = this.get("layout"),
+            style = this.get("style");
+
+        //set residues
+        var resCol = new ResidueCol();
+        var graph = parseStruct.parseDbr(seq, dotbr);
+        var coords = new Layout(layout, graph.nodes, graph.links).getCoords();
+        for(var i=0; i<graph.nodes.length; i++){
+            resCol.add(new Residue({
+                name: graph.nodes[i].name,
+                color: style.getColor(graph.nodes[i].name),
+                x: coords[i].x,
+                y: coords[i].y,
+                id: i.toString()
+            }));
+        }
+        this.set("residues", resCol);
+
+        //set bonds
+        var linkCol = new LinkCol(null, style, resCol);
+        for(var i=0; i<graph.links.length; i++){
+            linkCol.add(new Link({
+                id: graph.links[i].source + "to" + graph.links[i].target,
+                source: graph.links[i].source.toString(),
+                target: graph.links[i].target.toString(),
+                label: graph.links[i].type,
+                weight: style.getWeight(graph.links[i].type),
+                color: style.getColor(graph.links[i].type)
+            }));
+        }
+        this.set("links", linkCol);
+        this.listenTo(this.get("links"), "update", this.reconstruct);
+    },
+    toCytoscape: function(){
+        //Create a JSON structure from a graph object built by the
+    	//transformDotBracket function
+    	//The JSON structure fits the requirements of CytoscapeJS
+    	var elements = [];
+    	var el;
+
+    	var nodes = this.get("residues");
+    	for(var i = 0; i < nodes.length; i++){
+            el = nodes.at(i);
+    		elements.push({
+    			group: el.get("group"),
+    			data: {
+    				id: el.get("id"),
+    				label: el.get("name"),
+                    type: "residue"
+    			},
+    			position: {
+    				x: el.get("x"),
+    				y: el.get("y")
+    			},
+    			selected: el.get("selected"),
+    			selectable: el.get("selectable"),
+    			locked: el.get("locked"),
+    			grabbable: el.get("grabbable"),
+    			css: {
+    				'background-color': el.get("color")
+    			}
+    		});
+    	}
+    	var links = this.get("links");
+    	for(var i = 0; i < links.length; i++){
+    		el = links.at(i);
+            elements.push({
+    			group: el.get("group"),
+    			data: {
+    				id: el.get("id"),
+    				source: el.get("source"),
+        			target: el.get("target"),
+        			label: el.get("label"),
+        			weight: el.get("weight")
+    			},
+    			css: {
+    				'line-color': el.get("color"),
+    				'width': el.get("weight")
+    			}
+    		});
+    	}
+    	return elements;
+    },
+    getPartner: function(srcIndex){
+    	//Returns the partner of a nucleotide:
+    	//-1 means there is no partner
+        var links = this.get("links");
+    	var partner = -1;
+
+    	for(var i=0; i<links.length; i++){
+    		if(links.at(i).get("label") !== "phosphodiester"){
+    			if(links.at(i).get("source") === srcIndex){
+    				partner = links.at(i).get("target");
+    				break;
+    			}
+    			else if(links.at(i).get("target") === srcIndex){
+    				partner = links.at(i).get("source");
+    				break;
+    			}
+    			else {
+    				continue;
+    			}
+    		}
+    	}
+    	return partner;
+    }
+});
+
+module.exports = Structure;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/.Rhistory b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/.Rhistory
new file mode 100644
index 0000000..e69de29
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/lasso.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/lasso.js
new file mode 100644
index 0000000..401cf1a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/lasso.js
@@ -0,0 +1,34 @@
+var setupLasso = module.exports = function($){
+    //Lasso Functionality
+    var $ = require( 'jquery' );
+    $.fn.extend({
+      lasso: function () {
+         this.mousedown(function (e) {
+            // left mouse down switches on "capturing mode"
+            if (e.which === 1 && !$(this).is(".lassoRunning")) {
+              $(this).addClass("lassoRunning");
+              $(this).data("lassoPoints", []);
+              $(this).trigger("lassoBegin");
+            }
+          });
+          this.mouseup(function (e) {
+            // left mouse up ends "capturing mode" + triggers "Done" event
+            if (e.which === 1 && $(this).is(".lassoRunning")) {
+              $(this).removeClass("lassoRunning");
+              $(this).trigger("lassoDone", [$(this).data("lassoPoints")]);
+            }
+          });
+          this.mousemove(function (e) {
+            // mouse move captures co-ordinates + triggers "Point" event
+            if ($(this).is(".lassoRunning")) {
+              var px = (e.offsetX || e.clientX - $(e.target).offset().left + window.pageXOffset);
+              var py = (e.offsetY || e.clientY - $(e.target).offset().top + window.pageYOffset);
+              var point = [px, py];
+              $(this).data("lassoPoints").push(point);
+              $(this).trigger("lassoPoint", [point]);
+            }
+          });
+          return this;
+      }
+    });
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/parsedbr.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/parsedbr.js
new file mode 100644
index 0000000..6472118
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/parsedbr.js
@@ -0,0 +1,58 @@
+var exp = {};
+
+exp.parseDbr = function(seq, dotbr){
+	var round = [],
+        nodes = [],
+        links = [];
+
+	var src, type;
+	//Indices corresponding to opening brackets are pushed onto a stack
+	//and are popped when a closing bracket is read.
+	//Links (hbonds, phosphodiester bonds) are created as needed.
+	for(var i = 0; i < seq.length; i++){
+		nodes.push({name: seq[i].toUpperCase()});
+		if(i > 0){
+			links.push({source: i-1, target: i, type: "phosphodiester"});
+		}
+		switch(dotbr[i]){
+			case "(":
+				round.push(i);
+				break;
+			case ")":
+				src = round.pop();
+				type = exp.getType(exp.isWatsonCrick(seq[src], seq[i]));
+				links.push({source: src, target: i, type: type});
+				break;
+			case ".":
+				break;
+		}
+	}
+	//Return graph in object format
+	return {
+        nodes: nodes,
+		links: links
+    };
+}
+
+exp.isWatsonCrick = function(nucOne, nucTwo){
+	var watsonCrick = false;
+	if(nucOne === "G" && nucTwo === "C" ||
+		nucOne === "C" && nucTwo === "G" ||
+		nucOne === "A" && nucTwo === "U" ||
+		nucOne === "U" && nucTwo === "A" ||
+		nucOne === "A" && nucTwo === "T" ||
+		nucOne === "T" && nucTwo === "A") {
+		watsonCrick = true;
+	}
+	return watsonCrick;
+}
+
+exp.getType = function(watsonCrick){
+	if(watsonCrick){
+		return "hbond";
+	} else {
+		return "violation";
+	}
+}
+
+module.exports = exp;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/spectrum.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/spectrum.js
new file mode 100644
index 0000000..5667329
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/spectrum.js
@@ -0,0 +1,2287 @@
+// Spectrum Colorpicker v1.6.0
+// https://github.com/bgrins/spectrum
+// Author: Brian Grinstead
+// License: MIT
+
+(function (factory) {
+    "use strict";
+
+    if (typeof define === 'function' && define.amd) { // AMD
+        define(['jquery'], factory);
+    }
+    else if (typeof exports == "object" && typeof module == "object") { // CommonJS
+        module.exports = factory;
+    }
+    else { // Browser
+        factory(jQuery);
+    }
+})(function($, undefined) {
+    "use strict";
+
+    var defaultOpts = {
+
+        // Callbacks
+        beforeShow: noop,
+        move: noop,
+        change: noop,
+        show: noop,
+        hide: noop,
+
+        // Options
+        color: false,
+        flat: false,
+        showInput: false,
+        allowEmpty: false,
+        showButtons: true,
+        clickoutFiresChange: false,
+        showInitial: false,
+        showPalette: false,
+        showPaletteOnly: false,
+        hideAfterPaletteSelect: false,
+        togglePaletteOnly: false,
+        showSelectionPalette: true,
+        localStorageKey: false,
+        appendTo: "body",
+        maxSelectionSize: 7,
+        cancelText: "cancel",
+        chooseText: "choose",
+        togglePaletteMoreText: "more",
+        togglePaletteLessText: "less",
+        clearText: "Clear Color Selection",
+        noColorSelectedText: "No Color Selected",
+        preferredFormat: false,
+        className: "", // Deprecated - use containerClassName and replacerClassName instead.
+        containerClassName: "",
+        replacerClassName: "",
+        showAlpha: false,
+        theme: "sp-light",
+        palette: [["#ffffff", "#000000", "#ff0000", "#ff8000", "#ffff00", "#008000", "#0000ff", "#4b0082", "#9400d3"]],
+        selectionPalette: [],
+        disabled: false,
+        offset: null
+    },
+    spectrums = [],
+    IE = !!/msie/i.exec( window.navigator.userAgent ),
+    rgbaSupport = (function() {
+        function contains( str, substr ) {
+            return !!~('' + str).indexOf(substr);
+        }
+
+        var elem = document.createElement('div');
+        var style = elem.style;
+        style.cssText = 'background-color:rgba(0,0,0,.5)';
+        return contains(style.backgroundColor, 'rgba') || contains(style.backgroundColor, 'hsla');
+    })(),
+    inputTypeColorSupport = (function() {
+        var colorInput = $("<input type='color' value='!' />")[0];
+        return colorInput.type === "color" && colorInput.value !== "!";
+    })(),
+    replaceInput = [
+        "<div class='sp-replacer'>",
+            "<div class='sp-preview'><div class='sp-preview-inner'></div></div>",
+            "<div class='sp-dd'>▼</div>",
+        "</div>"
+    ].join(''),
+    markup = (function () {
+
+        // IE does not support gradients with multiple stops, so we need to simulate
+        //  that for the rainbow slider with 8 divs that each have a single gradient
+        var gradientFix = "";
+        if (IE) {
+            for (var i = 1; i <= 6; i++) {
+                gradientFix += "<div class='sp-" + i + "'></div>";
+            }
+        }
+
+        return [
+            "<div class='sp-container sp-hidden'>",
+                "<div class='sp-palette-container'>",
+                    "<div class='sp-palette sp-thumb sp-cf'></div>",
+                    "<div class='sp-palette-button-container sp-cf'>",
+                        "<button type='button' class='sp-palette-toggle'></button>",
+                    "</div>",
+                "</div>",
+                "<div class='sp-picker-container'>",
+                    "<div class='sp-top sp-cf'>",
+                        "<div class='sp-fill'></div>",
+                        "<div class='sp-top-inner'>",
+                            "<div class='sp-color'>",
+                                "<div class='sp-sat'>",
+                                    "<div class='sp-val'>",
+                                        "<div class='sp-dragger'></div>",
+                                    "</div>",
+                                "</div>",
+                            "</div>",
+                            "<div class='sp-clear sp-clear-display'>",
+                            "</div>",
+                            "<div class='sp-hue'>",
+                                "<div class='sp-slider'></div>",
+                                gradientFix,
+                            "</div>",
+                        "</div>",
+                        "<div class='sp-alpha'><div class='sp-alpha-inner'><div class='sp-alpha-handle'></div></div></div>",
+                    "</div>",
+                    "<div class='sp-input-container sp-cf'>",
+                        "<input class='sp-input' type='text' spellcheck='false'  />",
+                    "</div>",
+                    "<div class='sp-initial sp-thumb sp-cf'></div>",
+                    "<div class='sp-button-container sp-cf'>",
+                        "<a class='sp-cancel' href='#'></a>",
+                        "<button type='button' class='sp-choose'></button>",
+                    "</div>",
+                "</div>",
+            "</div>"
+        ].join("");
+    })();
+
+    function paletteTemplate (p, color, className, opts) {
+        var html = [];
+        for (var i = 0; i < p.length; i++) {
+            var current = p[i];
+            if(current) {
+                var tiny = tinycolor(current);
+                var c = tiny.toHsl().l < 0.5 ? "sp-thumb-el sp-thumb-dark" : "sp-thumb-el sp-thumb-light";
+                c += (tinycolor.equals(color, current)) ? " sp-thumb-active" : "";
+                var formattedString = tiny.toString(opts.preferredFormat || "rgb");
+                var swatchStyle = rgbaSupport ? ("background-color:" + tiny.toRgbString()) : "filter:" + tiny.toFilter();
+                html.push('<span title="' + formattedString + '" data-color="' + tiny.toRgbString() + '" class="' + c + '"><span class="sp-thumb-inner" style="' + swatchStyle + ';" /></span>');
+            } else {
+                var cls = 'sp-clear-display';
+                html.push($('<div />')
+                    .append($('<span data-color="" style="background-color:transparent;" class="' + cls + '"></span>')
+                        .attr('title', opts.noColorSelectedText)
+                    )
+                    .html()
+                );
+            }
+        }
+        return "<div class='sp-cf " + className + "'>" + html.join('') + "</div>";
+    }
+
+    function hideAll() {
+        for (var i = 0; i < spectrums.length; i++) {
+            if (spectrums[i]) {
+                spectrums[i].hide();
+            }
+        }
+    }
+
+    function instanceOptions(o, callbackContext) {
+        var opts = $.extend({}, defaultOpts, o);
+        opts.callbacks = {
+            'move': bind(opts.move, callbackContext),
+            'change': bind(opts.change, callbackContext),
+            'show': bind(opts.show, callbackContext),
+            'hide': bind(opts.hide, callbackContext),
+            'beforeShow': bind(opts.beforeShow, callbackContext)
+        };
+
+        return opts;
+    }
+
+    function spectrum(element, o) {
+
+        var opts = instanceOptions(o, element),
+            flat = opts.flat,
+            showSelectionPalette = opts.showSelectionPalette,
+            localStorageKey = opts.localStorageKey,
+            theme = opts.theme,
+            callbacks = opts.callbacks,
+            resize = throttle(reflow, 10),
+            visible = false,
+            dragWidth = 0,
+            dragHeight = 0,
+            dragHelperHeight = 0,
+            slideHeight = 0,
+            slideWidth = 0,
+            alphaWidth = 0,
+            alphaSlideHelperWidth = 0,
+            slideHelperHeight = 0,
+            currentHue = 0,
+            currentSaturation = 0,
+            currentValue = 0,
+            currentAlpha = 1,
+            palette = [],
+            paletteArray = [],
+            paletteLookup = {},
+            selectionPalette = opts.selectionPalette.slice(0),
+            maxSelectionSize = opts.maxSelectionSize,
+            draggingClass = "sp-dragging",
+            shiftMovementDirection = null;
+
+        var doc = element.ownerDocument,
+            body = doc.body,
+            boundElement = $(element),
+            disabled = false,
+            container = $(markup, doc).addClass(theme),
+            pickerContainer = container.find(".sp-picker-container"),
+            dragger = container.find(".sp-color"),
+            dragHelper = container.find(".sp-dragger"),
+            slider = container.find(".sp-hue"),
+            slideHelper = container.find(".sp-slider"),
+            alphaSliderInner = container.find(".sp-alpha-inner"),
+            alphaSlider = container.find(".sp-alpha"),
+            alphaSlideHelper = container.find(".sp-alpha-handle"),
+            textInput = container.find(".sp-input"),
+            paletteContainer = container.find(".sp-palette"),
+            initialColorContainer = container.find(".sp-initial"),
+            cancelButton = container.find(".sp-cancel"),
+            clearButton = container.find(".sp-clear"),
+            chooseButton = container.find(".sp-choose"),
+            toggleButton = container.find(".sp-palette-toggle"),
+            isInput = boundElement.is("input"),
+            isInputTypeColor = isInput && inputTypeColorSupport && boundElement.attr("type") === "color",
+            shouldReplace = isInput && !flat,
+            replacer = (shouldReplace) ? $(replaceInput).addClass(theme).addClass(opts.className).addClass(opts.replacerClassName) : $([]),
+            offsetElement = (shouldReplace) ? replacer : boundElement,
+            previewElement = replacer.find(".sp-preview-inner"),
+            initialColor = opts.color || (isInput && boundElement.val()),
+            colorOnShow = false,
+            preferredFormat = opts.preferredFormat,
+            currentPreferredFormat = preferredFormat,
+            clickoutFiresChange = !opts.showButtons || opts.clickoutFiresChange,
+            isEmpty = !initialColor,
+            allowEmpty = opts.allowEmpty && !isInputTypeColor;
+
+        function applyOptions() {
+
+            if (opts.showPaletteOnly) {
+                opts.showPalette = true;
+            }
+
+            toggleButton.text(opts.showPaletteOnly ? opts.togglePaletteMoreText : opts.togglePaletteLessText);
+
+            if (opts.palette) {
+                palette = opts.palette.slice(0);
+                paletteArray = $.isArray(palette[0]) ? palette : [palette];
+                paletteLookup = {};
+                for (var i = 0; i < paletteArray.length; i++) {
+                    for (var j = 0; j < paletteArray[i].length; j++) {
+                        var rgb = tinycolor(paletteArray[i][j]).toRgbString();
+                        paletteLookup[rgb] = true;
+                    }
+                }
+            }
+
+            container.toggleClass("sp-flat", flat);
+            container.toggleClass("sp-input-disabled", !opts.showInput);
+            container.toggleClass("sp-alpha-enabled", opts.showAlpha);
+            container.toggleClass("sp-clear-enabled", allowEmpty);
+            container.toggleClass("sp-buttons-disabled", !opts.showButtons);
+            container.toggleClass("sp-palette-buttons-disabled", !opts.togglePaletteOnly);
+            container.toggleClass("sp-palette-disabled", !opts.showPalette);
+            container.toggleClass("sp-palette-only", opts.showPaletteOnly);
+            container.toggleClass("sp-initial-disabled", !opts.showInitial);
+            container.addClass(opts.className).addClass(opts.containerClassName);
+
+            reflow();
+        }
+
+        function initialize() {
+
+            if (IE) {
+                container.find("*:not(input)").attr("unselectable", "on");
+            }
+
+            applyOptions();
+
+            if (shouldReplace) {
+                boundElement.after(replacer).hide();
+            }
+
+            if (!allowEmpty) {
+                clearButton.hide();
+            }
+
+            if (flat) {
+                boundElement.after(container).hide();
+            }
+            else {
+
+                var appendTo = opts.appendTo === "parent" ? boundElement.parent() : $(opts.appendTo);
+                if (appendTo.length !== 1) {
+                    appendTo = $("body");
+                }
+
+                appendTo.append(container);
+            }
+
+            updateSelectionPaletteFromStorage();
+
+            offsetElement.bind("click.spectrum touchstart.spectrum", function (e) {
+                if (!disabled) {
+                    toggle();
+                }
+
+                e.stopPropagation();
+
+                if (!$(e.target).is("input")) {
+                    e.preventDefault();
+                }
+            });
+
+            if(boundElement.is(":disabled") || (opts.disabled === true)) {
+                disable();
+            }
+
+            // Prevent clicks from bubbling up to document.  This would cause it to be hidden.
+            container.click(stopPropagation);
+
+            // Handle user typed input
+            textInput.change(setFromTextInput);
+            textInput.bind("paste", function () {
+                setTimeout(setFromTextInput, 1);
+            });
+            textInput.keydown(function (e) { if (e.keyCode == 13) { setFromTextInput(); } });
+
+            cancelButton.text(opts.cancelText);
+            cancelButton.bind("click.spectrum", function (e) {
+                e.stopPropagation();
+                e.preventDefault();
+                revert();
+                hide();
+            });
+
+            clearButton.attr("title", opts.clearText);
+            clearButton.bind("click.spectrum", function (e) {
+                e.stopPropagation();
+                e.preventDefault();
+                isEmpty = true;
+                move();
+
+                if(flat) {
+                    //for the flat style, this is a change event
+                    updateOriginalInput(true);
+                }
+            });
+
+            chooseButton.text(opts.chooseText);
+            chooseButton.bind("click.spectrum", function (e) {
+                e.stopPropagation();
+                e.preventDefault();
+
+                if (isValid()) {
+                    updateOriginalInput(true);
+                    hide();
+                }
+            });
+
+            toggleButton.text(opts.showPaletteOnly ? opts.togglePaletteMoreText : opts.togglePaletteLessText);
+            toggleButton.bind("click.spectrum", function (e) {
+                e.stopPropagation();
+                e.preventDefault();
+
+                opts.showPaletteOnly = !opts.showPaletteOnly;
+
+                // To make sure the Picker area is drawn on the right, next to the
+                // Palette area (and not below the palette), first move the Palette
+                // to the left to make space for the picker, plus 5px extra.
+                // The 'applyOptions' function puts the whole container back into place
+                // and takes care of the button-text and the sp-palette-only CSS class.
+                if (!opts.showPaletteOnly && !flat) {
+                    container.css('left', '-=' + (pickerContainer.outerWidth(true) + 5));
+                }
+                applyOptions();
+            });
+
+            draggable(alphaSlider, function (dragX, dragY, e) {
+                currentAlpha = (dragX / alphaWidth);
+                isEmpty = false;
+                if (e.shiftKey) {
+                    currentAlpha = Math.round(currentAlpha * 10) / 10;
+                }
+
+                move();
+            }, dragStart, dragStop);
+
+            draggable(slider, function (dragX, dragY) {
+                currentHue = parseFloat(dragY / slideHeight);
+                isEmpty = false;
+                if (!opts.showAlpha) {
+                    currentAlpha = 1;
+                }
+                move();
+            }, dragStart, dragStop);
+
+            draggable(dragger, function (dragX, dragY, e) {
+
+                // shift+drag should snap the movement to either the x or y axis.
+                if (!e.shiftKey) {
+                    shiftMovementDirection = null;
+                }
+                else if (!shiftMovementDirection) {
+                    var oldDragX = currentSaturation * dragWidth;
+                    var oldDragY = dragHeight - (currentValue * dragHeight);
+                    var furtherFromX = Math.abs(dragX - oldDragX) > Math.abs(dragY - oldDragY);
+
+                    shiftMovementDirection = furtherFromX ? "x" : "y";
+                }
+
+                var setSaturation = !shiftMovementDirection || shiftMovementDirection === "x";
+                var setValue = !shiftMovementDirection || shiftMovementDirection === "y";
+
+                if (setSaturation) {
+                    currentSaturation = parseFloat(dragX / dragWidth);
+                }
+                if (setValue) {
+                    currentValue = parseFloat((dragHeight - dragY) / dragHeight);
+                }
+
+                isEmpty = false;
+                if (!opts.showAlpha) {
+                    currentAlpha = 1;
+                }
+
+                move();
+
+            }, dragStart, dragStop);
+
+            if (!!initialColor) {
+                set(initialColor);
+
+                // In case color was black - update the preview UI and set the format
+                // since the set function will not run (default color is black).
+                updateUI();
+                currentPreferredFormat = preferredFormat || tinycolor(initialColor).format;
+
+                addColorToSelectionPalette(initialColor);
+            }
+            else {
+                updateUI();
+            }
+
+            if (flat) {
+                show();
+            }
+
+            function paletteElementClick(e) {
+                if (e.data && e.data.ignore) {
+                    set($(e.target).closest(".sp-thumb-el").data("color"));
+                    move();
+                }
+                else {
+                    set($(e.target).closest(".sp-thumb-el").data("color"));
+                    move();
+                    updateOriginalInput(true);
+                    if (opts.hideAfterPaletteSelect) {
+                      hide();
+                    }
+                }
+
+                return false;
+            }
+
+            var paletteEvent = IE ? "mousedown.spectrum" : "click.spectrum touchstart.spectrum";
+            paletteContainer.delegate(".sp-thumb-el", paletteEvent, paletteElementClick);
+            initialColorContainer.delegate(".sp-thumb-el:nth-child(1)", paletteEvent, { ignore: true }, paletteElementClick);
+        }
+
+        function updateSelectionPaletteFromStorage() {
+
+            if (localStorageKey && window.localStorage) {
+
+                // Migrate old palettes over to new format.  May want to remove this eventually.
+                try {
+                    var oldPalette = window.localStorage[localStorageKey].split(",#");
+                    if (oldPalette.length > 1) {
+                        delete window.localStorage[localStorageKey];
+                        $.each(oldPalette, function(i, c) {
+                             addColorToSelectionPalette(c);
+                        });
+                    }
+                }
+                catch(e) { }
+
+                try {
+                    selectionPalette = window.localStorage[localStorageKey].split(";");
+                }
+                catch (e) { }
+            }
+        }
+
+        function addColorToSelectionPalette(color) {
+            if (showSelectionPalette) {
+                var rgb = tinycolor(color).toRgbString();
+                if (!paletteLookup[rgb] && $.inArray(rgb, selectionPalette) === -1) {
+                    selectionPalette.push(rgb);
+                    while(selectionPalette.length > maxSelectionSize) {
+                        selectionPalette.shift();
+                    }
+                }
+
+                if (localStorageKey && window.localStorage) {
+                    try {
+                        window.localStorage[localStorageKey] = selectionPalette.join(";");
+                    }
+                    catch(e) { }
+                }
+            }
+        }
+
+        function getUniqueSelectionPalette() {
+            var unique = [];
+            if (opts.showPalette) {
+                for (var i = 0; i < selectionPalette.length; i++) {
+                    var rgb = tinycolor(selectionPalette[i]).toRgbString();
+
+                    if (!paletteLookup[rgb]) {
+                        unique.push(selectionPalette[i]);
+                    }
+                }
+            }
+
+            return unique.reverse().slice(0, opts.maxSelectionSize);
+        }
+
+        function drawPalette() {
+
+            var currentColor = get();
+
+            var html = $.map(paletteArray, function (palette, i) {
+                return paletteTemplate(palette, currentColor, "sp-palette-row sp-palette-row-" + i, opts);
+            });
+
+            updateSelectionPaletteFromStorage();
+
+            if (selectionPalette) {
+                html.push(paletteTemplate(getUniqueSelectionPalette(), currentColor, "sp-palette-row sp-palette-row-selection", opts));
+            }
+
+            paletteContainer.html(html.join(""));
+        }
+
+        function drawInitial() {
+            if (opts.showInitial) {
+                var initial = colorOnShow;
+                var current = get();
+                initialColorContainer.html(paletteTemplate([initial, current], current, "sp-palette-row-initial", opts));
+            }
+        }
+
+        function dragStart() {
+            if (dragHeight <= 0 || dragWidth <= 0 || slideHeight <= 0) {
+                reflow();
+            }
+            container.addClass(draggingClass);
+            shiftMovementDirection = null;
+            boundElement.trigger('dragstart.spectrum', [ get() ]);
+        }
+
+        function dragStop() {
+            container.removeClass(draggingClass);
+            boundElement.trigger('dragstop.spectrum', [ get() ]);
+        }
+
+        function setFromTextInput() {
+
+            var value = textInput.val();
+
+            if ((value === null || value === "") && allowEmpty) {
+                set(null);
+                updateOriginalInput(true);
+            }
+            else {
+                var tiny = tinycolor(value);
+                if (tiny.isValid()) {
+                    set(tiny);
+                    updateOriginalInput(true);
+                }
+                else {
+                    textInput.addClass("sp-validation-error");
+                }
+            }
+        }
+
+        function toggle() {
+            if (visible) {
+                hide();
+            }
+            else {
+                show();
+            }
+        }
+
+        function show() {
+            var event = $.Event('beforeShow.spectrum');
+
+            if (visible) {
+                reflow();
+                return;
+            }
+
+            boundElement.trigger(event, [ get() ]);
+
+            if (callbacks.beforeShow(get()) === false || event.isDefaultPrevented()) {
+                return;
+            }
+
+            hideAll();
+            visible = true;
+
+            $(doc).bind("click.spectrum", clickout);
+            $(window).bind("resize.spectrum", resize);
+            replacer.addClass("sp-active");
+            container.removeClass("sp-hidden");
+
+            reflow();
+            updateUI();
+
+            colorOnShow = get();
+
+            drawInitial();
+            callbacks.show(colorOnShow);
+            boundElement.trigger('show.spectrum', [ colorOnShow ]);
+        }
+
+        function clickout(e) {
+            // Return on right click.
+            if (e.button == 2) { return; }
+
+            if (clickoutFiresChange) {
+                updateOriginalInput(true);
+            }
+            else {
+                revert();
+            }
+            hide();
+        }
+
+        function hide() {
+            // Return if hiding is unnecessary
+            if (!visible || flat) { return; }
+            visible = false;
+
+            $(doc).unbind("click.spectrum", clickout);
+            $(window).unbind("resize.spectrum", resize);
+
+            replacer.removeClass("sp-active");
+            container.addClass("sp-hidden");
+
+            callbacks.hide(get());
+            boundElement.trigger('hide.spectrum', [ get() ]);
+        }
+
+        function revert() {
+            set(colorOnShow, true);
+        }
+
+        function set(color, ignoreFormatChange) {
+            if (tinycolor.equals(color, get())) {
+                // Update UI just in case a validation error needs
+                // to be cleared.
+                updateUI();
+                return;
+            }
+
+            var newColor, newHsv;
+            if (!color && allowEmpty) {
+                isEmpty = true;
+            } else {
+                isEmpty = false;
+                newColor = tinycolor(color);
+                newHsv = newColor.toHsv();
+
+                currentHue = (newHsv.h % 360) / 360;
+                currentSaturation = newHsv.s;
+                currentValue = newHsv.v;
+                currentAlpha = newHsv.a;
+            }
+            updateUI();
+
+            if (newColor && newColor.isValid() && !ignoreFormatChange) {
+                currentPreferredFormat = preferredFormat || newColor.getFormat();
+            }
+        }
+
+        function get(opts) {
+            opts = opts || { };
+
+            if (allowEmpty && isEmpty) {
+                return null;
+            }
+
+            return tinycolor.fromRatio({
+                h: currentHue,
+                s: currentSaturation,
+                v: currentValue,
+                a: Math.round(currentAlpha * 100) / 100
+            }, { format: opts.format || currentPreferredFormat });
+        }
+
+        function isValid() {
+            return !textInput.hasClass("sp-validation-error");
+        }
+
+        function move() {
+            updateUI();
+
+            callbacks.move(get());
+            boundElement.trigger('move.spectrum', [ get() ]);
+        }
+
+        function updateUI() {
+
+            textInput.removeClass("sp-validation-error");
+
+            updateHelperLocations();
+
+            // Update dragger background color (gradients take care of saturation and value).
+            var flatColor = tinycolor.fromRatio({ h: currentHue, s: 1, v: 1 });
+            dragger.css("background-color", flatColor.toHexString());
+
+            // Get a format that alpha will be included in (hex and names ignore alpha)
+            var format = currentPreferredFormat;
+            if (currentAlpha < 1 && !(currentAlpha === 0 && format === "name")) {
+                if (format === "hex" || format === "hex3" || format === "hex6" || format === "name") {
+                    format = "rgb";
+                }
+            }
+
+            var realColor = get({ format: format }),
+                displayColor = '';
+
+             //reset background info for preview element
+            previewElement.removeClass("sp-clear-display");
+            previewElement.css('background-color', 'transparent');
+
+            if (!realColor && allowEmpty) {
+                // Update the replaced elements background with icon indicating no color selection
+                previewElement.addClass("sp-clear-display");
+            }
+            else {
+                var realHex = realColor.toHexString(),
+                    realRgb = realColor.toRgbString();
+
+                // Update the replaced elements background color (with actual selected color)
+                if (rgbaSupport || realColor.alpha === 1) {
+                    previewElement.css("background-color", realRgb);
+                }
+                else {
+                    previewElement.css("background-color", "transparent");
+                    previewElement.css("filter", realColor.toFilter());
+                }
+
+                if (opts.showAlpha) {
+                    var rgb = realColor.toRgb();
+                    rgb.a = 0;
+                    var realAlpha = tinycolor(rgb).toRgbString();
+                    var gradient = "linear-gradient(left, " + realAlpha + ", " + realHex + ")";
+
+                    if (IE) {
+                        alphaSliderInner.css("filter", tinycolor(realAlpha).toFilter({ gradientType: 1 }, realHex));
+                    }
+                    else {
+                        alphaSliderInner.css("background", "-webkit-" + gradient);
+                        alphaSliderInner.css("background", "-moz-" + gradient);
+                        alphaSliderInner.css("background", "-ms-" + gradient);
+                        // Use current syntax gradient on unprefixed property.
+                        alphaSliderInner.css("background",
+                            "linear-gradient(to right, " + realAlpha + ", " + realHex + ")");
+                    }
+                }
+
+                displayColor = realColor.toString(format);
+            }
+
+            // Update the text entry input as it changes happen
+            if (opts.showInput) {
+                textInput.val(displayColor);
+            }
+
+            if (opts.showPalette) {
+                drawPalette();
+            }
+
+            drawInitial();
+        }
+
+        function updateHelperLocations() {
+            var s = currentSaturation;
+            var v = currentValue;
+
+            if(allowEmpty && isEmpty) {
+                //if selected color is empty, hide the helpers
+                alphaSlideHelper.hide();
+                slideHelper.hide();
+                dragHelper.hide();
+            }
+            else {
+                //make sure helpers are visible
+                alphaSlideHelper.show();
+                slideHelper.show();
+                dragHelper.show();
+
+                // Where to show the little circle in that displays your current selected color
+                var dragX = s * dragWidth;
+                var dragY = dragHeight - (v * dragHeight);
+                dragX = Math.max(
+                    -dragHelperHeight,
+                    Math.min(dragWidth - dragHelperHeight, dragX - dragHelperHeight)
+                );
+                dragY = Math.max(
+                    -dragHelperHeight,
+                    Math.min(dragHeight - dragHelperHeight, dragY - dragHelperHeight)
+                );
+                dragHelper.css({
+                    "top": dragY + "px",
+                    "left": dragX + "px"
+                });
+
+                var alphaX = currentAlpha * alphaWidth;
+                alphaSlideHelper.css({
+                    "left": (alphaX - (alphaSlideHelperWidth / 2)) + "px"
+                });
+
+                // Where to show the bar that displays your current selected hue
+                var slideY = (currentHue) * slideHeight;
+                slideHelper.css({
+                    "top": (slideY - slideHelperHeight) + "px"
+                });
+            }
+        }
+
+        function updateOriginalInput(fireCallback) {
+            var color = get(),
+                displayColor = '',
+                hasChanged = !tinycolor.equals(color, colorOnShow);
+
+            if (color) {
+                displayColor = color.toString(currentPreferredFormat);
+                // Update the selection palette with the current color
+                addColorToSelectionPalette(color);
+            }
+
+            if (isInput) {
+                boundElement.val(displayColor);
+            }
+
+            if (fireCallback && hasChanged) {
+                callbacks.change(color);
+                boundElement.trigger('change', [ color ]);
+            }
+        }
+
+        function reflow() {
+            dragWidth = dragger.width();
+            dragHeight = dragger.height();
+            dragHelperHeight = dragHelper.height();
+            slideWidth = slider.width();
+            slideHeight = slider.height();
+            slideHelperHeight = slideHelper.height();
+            alphaWidth = alphaSlider.width();
+            alphaSlideHelperWidth = alphaSlideHelper.width();
+
+            if (!flat) {
+                container.css("position", "absolute");
+                if (opts.offset) {
+                    container.offset(opts.offset);
+                } else {
+                    container.offset(getOffset(container, offsetElement));
+                }
+            }
+
+            updateHelperLocations();
+
+            if (opts.showPalette) {
+                drawPalette();
+            }
+
+            boundElement.trigger('reflow.spectrum');
+        }
+
+        function destroy() {
+            boundElement.show();
+            offsetElement.unbind("click.spectrum touchstart.spectrum");
+            container.remove();
+            replacer.remove();
+            spectrums[spect.id] = null;
+        }
+
+        function option(optionName, optionValue) {
+            if (optionName === undefined) {
+                return $.extend({}, opts);
+            }
+            if (optionValue === undefined) {
+                return opts[optionName];
+            }
+
+            opts[optionName] = optionValue;
+            applyOptions();
+        }
+
+        function enable() {
+            disabled = false;
+            boundElement.attr("disabled", false);
+            offsetElement.removeClass("sp-disabled");
+        }
+
+        function disable() {
+            hide();
+            disabled = true;
+            boundElement.attr("disabled", true);
+            offsetElement.addClass("sp-disabled");
+        }
+
+        function setOffset(coord) {
+            opts.offset = coord;
+            reflow();
+        }
+
+        initialize();
+
+        var spect = {
+            show: show,
+            hide: hide,
+            toggle: toggle,
+            reflow: reflow,
+            option: option,
+            enable: enable,
+            disable: disable,
+            offset: setOffset,
+            set: function (c) {
+                set(c);
+                updateOriginalInput();
+            },
+            get: get,
+            destroy: destroy,
+            container: container
+        };
+
+        spect.id = spectrums.push(spect) - 1;
+
+        return spect;
+    }
+
+    /**
+    * checkOffset - get the offset below/above and left/right element depending on screen position
+    * Thanks https://github.com/jquery/jquery-ui/blob/master/ui/jquery.ui.datepicker.js
+    */
+    function getOffset(picker, input) {
+        var extraY = 0;
+        var dpWidth = picker.outerWidth();
+        var dpHeight = picker.outerHeight();
+        var inputHeight = input.outerHeight();
+        var doc = picker[0].ownerDocument;
+        var docElem = doc.documentElement;
+        var viewWidth = docElem.clientWidth + $(doc).scrollLeft();
+        var viewHeight = docElem.clientHeight + $(doc).scrollTop();
+        var offset = input.offset();
+        offset.top += inputHeight;
+
+        offset.left -=
+            Math.min(offset.left, (offset.left + dpWidth > viewWidth && viewWidth > dpWidth) ?
+            Math.abs(offset.left + dpWidth - viewWidth) : 0);
+
+        offset.top -=
+            Math.min(offset.top, ((offset.top + dpHeight > viewHeight && viewHeight > dpHeight) ?
+            Math.abs(dpHeight + inputHeight - extraY) : extraY));
+
+        return offset;
+    }
+
+    /**
+    * noop - do nothing
+    */
+    function noop() {
+
+    }
+
+    /**
+    * stopPropagation - makes the code only doing this a little easier to read in line
+    */
+    function stopPropagation(e) {
+        e.stopPropagation();
+    }
+
+    /**
+    * Create a function bound to a given object
+    * Thanks to underscore.js
+    */
+    function bind(func, obj) {
+        var slice = Array.prototype.slice;
+        var args = slice.call(arguments, 2);
+        return function () {
+            return func.apply(obj, args.concat(slice.call(arguments)));
+        };
+    }
+
+    /**
+    * Lightweight drag helper.  Handles containment within the element, so that
+    * when dragging, the x is within [0,element.width] and y is within [0,element.height]
+    */
+    function draggable(element, onmove, onstart, onstop) {
+        onmove = onmove || function () { };
+        onstart = onstart || function () { };
+        onstop = onstop || function () { };
+        var doc = document;
+        var dragging = false;
+        var offset = {};
+        var maxHeight = 0;
+        var maxWidth = 0;
+        var hasTouch = ('ontouchstart' in window);
+
+        var duringDragEvents = {};
+        duringDragEvents["selectstart"] = prevent;
+        duringDragEvents["dragstart"] = prevent;
+        duringDragEvents["touchmove mousemove"] = move;
+        duringDragEvents["touchend mouseup"] = stop;
+
+        function prevent(e) {
+            if (e.stopPropagation) {
+                e.stopPropagation();
+            }
+            if (e.preventDefault) {
+                e.preventDefault();
+            }
+            e.returnValue = false;
+        }
+
+        function move(e) {
+            if (dragging) {
+                // Mouseup happened outside of window
+                if (IE && doc.documentMode < 9 && !e.button) {
+                    return stop();
+                }
+
+                var touches = e.originalEvent && e.originalEvent.touches;
+                var pageX = touches ? touches[0].pageX : e.pageX;
+                var pageY = touches ? touches[0].pageY : e.pageY;
+
+                var dragX = Math.max(0, Math.min(pageX - offset.left, maxWidth));
+                var dragY = Math.max(0, Math.min(pageY - offset.top, maxHeight));
+
+                if (hasTouch) {
+                    // Stop scrolling in iOS
+                    prevent(e);
+                }
+
+                onmove.apply(element, [dragX, dragY, e]);
+            }
+        }
+
+        function start(e) {
+            var rightclick = (e.which) ? (e.which == 3) : (e.button == 2);
+
+            if (!rightclick && !dragging) {
+                if (onstart.apply(element, arguments) !== false) {
+                    dragging = true;
+                    maxHeight = $(element).height();
+                    maxWidth = $(element).width();
+                    offset = $(element).offset();
+
+                    $(doc).bind(duringDragEvents);
+                    $(doc.body).addClass("sp-dragging");
+
+                    if (!hasTouch) {
+                        move(e);
+                    }
+
+                    prevent(e);
+                }
+            }
+        }
+
+        function stop() {
+            if (dragging) {
+                $(doc).unbind(duringDragEvents);
+                $(doc.body).removeClass("sp-dragging");
+                onstop.apply(element, arguments);
+            }
+            dragging = false;
+        }
+
+        $(element).bind("touchstart mousedown", start);
+    }
+
+    function throttle(func, wait, debounce) {
+        var timeout;
+        return function () {
+            var context = this, args = arguments;
+            var throttler = function () {
+                timeout = null;
+                func.apply(context, args);
+            };
+            if (debounce) clearTimeout(timeout);
+            if (debounce || !timeout) timeout = setTimeout(throttler, wait);
+        };
+    }
+
+    /**
+    * Define a jQuery plugin
+    */
+    var dataID = "spectrum.id";
+    $.fn.spectrum = function (opts, extra) {
+
+        if (typeof opts == "string") {
+
+            var returnValue = this;
+            var args = Array.prototype.slice.call( arguments, 1 );
+
+            this.each(function () {
+                var spect = spectrums[$(this).data(dataID)];
+                if (spect) {
+                    var method = spect[opts];
+                    if (!method) {
+                        throw new Error( "Spectrum: no such method: '" + opts + "'" );
+                    }
+
+                    if (opts == "get") {
+                        returnValue = spect.get();
+                    }
+                    else if (opts == "container") {
+                        returnValue = spect.container;
+                    }
+                    else if (opts == "option") {
+                        returnValue = spect.option.apply(spect, args);
+                    }
+                    else if (opts == "destroy") {
+                        spect.destroy();
+                        $(this).removeData(dataID);
+                    }
+                    else {
+                        method.apply(spect, args);
+                    }
+                }
+            });
+
+            return returnValue;
+        }
+
+        // Initializing a new instance of spectrum
+        return this.spectrum("destroy").each(function () {
+            var options = $.extend({}, opts, $(this).data());
+            var spect = spectrum(this, options);
+            $(this).data(dataID, spect.id);
+        });
+    };
+
+    $.fn.spectrum.load = true;
+    $.fn.spectrum.loadOpts = {};
+    $.fn.spectrum.draggable = draggable;
+    $.fn.spectrum.defaults = defaultOpts;
+
+    $.spectrum = { };
+    $.spectrum.localization = { };
+    $.spectrum.palettes = { };
+
+    $.fn.spectrum.processNativeColorInputs = function () {
+        if (!inputTypeColorSupport) {
+            $("input[type=color]").spectrum({
+                preferredFormat: "hex6"
+            });
+        }
+    };
+
+    // TinyColor v1.1.1
+    // https://github.com/bgrins/TinyColor
+    // Brian Grinstead, MIT License
+
+    (function() {
+
+    var trimLeft = /^[\s,#]+/,
+        trimRight = /\s+$/,
+        tinyCounter = 0,
+        math = Math,
+        mathRound = math.round,
+        mathMin = math.min,
+        mathMax = math.max,
+        mathRandom = math.random;
+
+    var tinycolor = function tinycolor (color, opts) {
+
+        color = (color) ? color : '';
+        opts = opts || { };
+
+        // If input is already a tinycolor, return itself
+        if (color instanceof tinycolor) {
+           return color;
+        }
+        // If we are called as a function, call using new instead
+        if (!(this instanceof tinycolor)) {
+            return new tinycolor(color, opts);
+        }
+
+        var rgb = inputToRGB(color);
+        this._originalInput = color,
+        this._r = rgb.r,
+        this._g = rgb.g,
+        this._b = rgb.b,
+        this._a = rgb.a,
+        this._roundA = mathRound(100*this._a) / 100,
+        this._format = opts.format || rgb.format;
+        this._gradientType = opts.gradientType;
+
+        // Don't let the range of [0,255] come back in [0,1].
+        // Potentially lose a little bit of precision here, but will fix issues where
+        // .5 gets interpreted as half of the total, instead of half of 1
+        // If it was supposed to be 128, this was already taken care of by `inputToRgb`
+        if (this._r < 1) { this._r = mathRound(this._r); }
+        if (this._g < 1) { this._g = mathRound(this._g); }
+        if (this._b < 1) { this._b = mathRound(this._b); }
+
+        this._ok = rgb.ok;
+        this._tc_id = tinyCounter++;
+    };
+
+    tinycolor.prototype = {
+        isDark: function() {
+            return this.getBrightness() < 128;
+        },
+        isLight: function() {
+            return !this.isDark();
+        },
+        isValid: function() {
+            return this._ok;
+        },
+        getOriginalInput: function() {
+          return this._originalInput;
+        },
+        getFormat: function() {
+            return this._format;
+        },
+        getAlpha: function() {
+            return this._a;
+        },
+        getBrightness: function() {
+            var rgb = this.toRgb();
+            return (rgb.r * 299 + rgb.g * 587 + rgb.b * 114) / 1000;
+        },
+        setAlpha: function(value) {
+            this._a = boundAlpha(value);
+            this._roundA = mathRound(100*this._a) / 100;
+            return this;
+        },
+        toHsv: function() {
+            var hsv = rgbToHsv(this._r, this._g, this._b);
+            return { h: hsv.h * 360, s: hsv.s, v: hsv.v, a: this._a };
+        },
+        toHsvString: function() {
+            var hsv = rgbToHsv(this._r, this._g, this._b);
+            var h = mathRound(hsv.h * 360), s = mathRound(hsv.s * 100), v = mathRound(hsv.v * 100);
+            return (this._a == 1) ?
+              "hsv("  + h + ", " + s + "%, " + v + "%)" :
+              "hsva(" + h + ", " + s + "%, " + v + "%, "+ this._roundA + ")";
+        },
+        toHsl: function() {
+            var hsl = rgbToHsl(this._r, this._g, this._b);
+            return { h: hsl.h * 360, s: hsl.s, l: hsl.l, a: this._a };
+        },
+        toHslString: function() {
+            var hsl = rgbToHsl(this._r, this._g, this._b);
+            var h = mathRound(hsl.h * 360), s = mathRound(hsl.s * 100), l = mathRound(hsl.l * 100);
+            return (this._a == 1) ?
+              "hsl("  + h + ", " + s + "%, " + l + "%)" :
+              "hsla(" + h + ", " + s + "%, " + l + "%, "+ this._roundA + ")";
+        },
+        toHex: function(allow3Char) {
+            return rgbToHex(this._r, this._g, this._b, allow3Char);
+        },
+        toHexString: function(allow3Char) {
+            return '#' + this.toHex(allow3Char);
+        },
+        toHex8: function() {
+            return rgbaToHex(this._r, this._g, this._b, this._a);
+        },
+        toHex8String: function() {
+            return '#' + this.toHex8();
+        },
+        toRgb: function() {
+            return { r: mathRound(this._r), g: mathRound(this._g), b: mathRound(this._b), a: this._a };
+        },
+        toRgbString: function() {
+            return (this._a == 1) ?
+              "rgb("  + mathRound(this._r) + ", " + mathRound(this._g) + ", " + mathRound(this._b) + ")" :
+              "rgba(" + mathRound(this._r) + ", " + mathRound(this._g) + ", " + mathRound(this._b) + ", " + this._roundA + ")";
+        },
+        toPercentageRgb: function() {
+            return { r: mathRound(bound01(this._r, 255) * 100) + "%", g: mathRound(bound01(this._g, 255) * 100) + "%", b: mathRound(bound01(this._b, 255) * 100) + "%", a: this._a };
+        },
+        toPercentageRgbString: function() {
+            return (this._a == 1) ?
+              "rgb("  + mathRound(bound01(this._r, 255) * 100) + "%, " + mathRound(bound01(this._g, 255) * 100) + "%, " + mathRound(bound01(this._b, 255) * 100) + "%)" :
+              "rgba(" + mathRound(bound01(this._r, 255) * 100) + "%, " + mathRound(bound01(this._g, 255) * 100) + "%, " + mathRound(bound01(this._b, 255) * 100) + "%, " + this._roundA + ")";
+        },
+        toName: function() {
+            if (this._a === 0) {
+                return "transparent";
+            }
+
+            if (this._a < 1) {
+                return false;
+            }
+
+            return hexNames[rgbToHex(this._r, this._g, this._b, true)] || false;
+        },
+        toFilter: function(secondColor) {
+            var hex8String = '#' + rgbaToHex(this._r, this._g, this._b, this._a);
+            var secondHex8String = hex8String;
+            var gradientType = this._gradientType ? "GradientType = 1, " : "";
+
+            if (secondColor) {
+                var s = tinycolor(secondColor);
+                secondHex8String = s.toHex8String();
+            }
+
+            return "progid:DXImageTransform.Microsoft.gradient("+gradientType+"startColorstr="+hex8String+",endColorstr="+secondHex8String+")";
+        },
+        toString: function(format) {
+            var formatSet = !!format;
+            format = format || this._format;
+
+            var formattedString = false;
+            var hasAlpha = this._a < 1 && this._a >= 0;
+            var needsAlphaFormat = !formatSet && hasAlpha && (format === "hex" || format === "hex6" || format === "hex3" || format === "name");
+
+            if (needsAlphaFormat) {
+                // Special case for "transparent", all other non-alpha formats
+                // will return rgba when there is transparency.
+                if (format === "name" && this._a === 0) {
+                    return this.toName();
+                }
+                return this.toRgbString();
+            }
+            if (format === "rgb") {
+                formattedString = this.toRgbString();
+            }
+            if (format === "prgb") {
+                formattedString = this.toPercentageRgbString();
+            }
+            if (format === "hex" || format === "hex6") {
+                formattedString = this.toHexString();
+            }
+            if (format === "hex3") {
+                formattedString = this.toHexString(true);
+            }
+            if (format === "hex8") {
+                formattedString = this.toHex8String();
+            }
+            if (format === "name") {
+                formattedString = this.toName();
+            }
+            if (format === "hsl") {
+                formattedString = this.toHslString();
+            }
+            if (format === "hsv") {
+                formattedString = this.toHsvString();
+            }
+
+            return formattedString || this.toHexString();
+        },
+
+        _applyModification: function(fn, args) {
+            var color = fn.apply(null, [this].concat([].slice.call(args)));
+            this._r = color._r;
+            this._g = color._g;
+            this._b = color._b;
+            this.setAlpha(color._a);
+            return this;
+        },
+        lighten: function() {
+            return this._applyModification(lighten, arguments);
+        },
+        brighten: function() {
+            return this._applyModification(brighten, arguments);
+        },
+        darken: function() {
+            return this._applyModification(darken, arguments);
+        },
+        desaturate: function() {
+            return this._applyModification(desaturate, arguments);
+        },
+        saturate: function() {
+            return this._applyModification(saturate, arguments);
+        },
+        greyscale: function() {
+            return this._applyModification(greyscale, arguments);
+        },
+        spin: function() {
+            return this._applyModification(spin, arguments);
+        },
+
+        _applyCombination: function(fn, args) {
+            return fn.apply(null, [this].concat([].slice.call(args)));
+        },
+        analogous: function() {
+            return this._applyCombination(analogous, arguments);
+        },
+        complement: function() {
+            return this._applyCombination(complement, arguments);
+        },
+        monochromatic: function() {
+            return this._applyCombination(monochromatic, arguments);
+        },
+        splitcomplement: function() {
+            return this._applyCombination(splitcomplement, arguments);
+        },
+        triad: function() {
+            return this._applyCombination(triad, arguments);
+        },
+        tetrad: function() {
+            return this._applyCombination(tetrad, arguments);
+        }
+    };
+
+    // If input is an object, force 1 into "1.0" to handle ratios properly
+    // String input requires "1.0" as input, so 1 will be treated as 1
+    tinycolor.fromRatio = function(color, opts) {
+        if (typeof color == "object") {
+            var newColor = {};
+            for (var i in color) {
+                if (color.hasOwnProperty(i)) {
+                    if (i === "a") {
+                        newColor[i] = color[i];
+                    }
+                    else {
+                        newColor[i] = convertToPercentage(color[i]);
+                    }
+                }
+            }
+            color = newColor;
+        }
+
+        return tinycolor(color, opts);
+    };
+
+    // Given a string or object, convert that input to RGB
+    // Possible string inputs:
+    //
+    //     "red"
+    //     "#f00" or "f00"
+    //     "#ff0000" or "ff0000"
+    //     "#ff000000" or "ff000000"
+    //     "rgb 255 0 0" or "rgb (255, 0, 0)"
+    //     "rgb 1.0 0 0" or "rgb (1, 0, 0)"
+    //     "rgba (255, 0, 0, 1)" or "rgba 255, 0, 0, 1"
+    //     "rgba (1.0, 0, 0, 1)" or "rgba 1.0, 0, 0, 1"
+    //     "hsl(0, 100%, 50%)" or "hsl 0 100% 50%"
+    //     "hsla(0, 100%, 50%, 1)" or "hsla 0 100% 50%, 1"
+    //     "hsv(0, 100%, 100%)" or "hsv 0 100% 100%"
+    //
+    function inputToRGB(color) {
+
+        var rgb = { r: 0, g: 0, b: 0 };
+        var a = 1;
+        var ok = false;
+        var format = false;
+
+        if (typeof color == "string") {
+            color = stringInputToObject(color);
+        }
+
+        if (typeof color == "object") {
+            if (color.hasOwnProperty("r") && color.hasOwnProperty("g") && color.hasOwnProperty("b")) {
+                rgb = rgbToRgb(color.r, color.g, color.b);
+                ok = true;
+                format = String(color.r).substr(-1) === "%" ? "prgb" : "rgb";
+            }
+            else if (color.hasOwnProperty("h") && color.hasOwnProperty("s") && color.hasOwnProperty("v")) {
+                color.s = convertToPercentage(color.s);
+                color.v = convertToPercentage(color.v);
+                rgb = hsvToRgb(color.h, color.s, color.v);
+                ok = true;
+                format = "hsv";
+            }
+            else if (color.hasOwnProperty("h") && color.hasOwnProperty("s") && color.hasOwnProperty("l")) {
+                color.s = convertToPercentage(color.s);
+                color.l = convertToPercentage(color.l);
+                rgb = hslToRgb(color.h, color.s, color.l);
+                ok = true;
+                format = "hsl";
+            }
+
+            if (color.hasOwnProperty("a")) {
+                a = color.a;
+            }
+        }
+
+        a = boundAlpha(a);
+
+        return {
+            ok: ok,
+            format: color.format || format,
+            r: mathMin(255, mathMax(rgb.r, 0)),
+            g: mathMin(255, mathMax(rgb.g, 0)),
+            b: mathMin(255, mathMax(rgb.b, 0)),
+            a: a
+        };
+    }
+
+
+    // Conversion Functions
+    // --------------------
+
+    // `rgbToHsl`, `rgbToHsv`, `hslToRgb`, `hsvToRgb` modified from:
+    // <http://mjijackson.com/2008/02/rgb-to-hsl-and-rgb-to-hsv-color-model-conversion-algorithms-in-javascript>
+
+    // `rgbToRgb`
+    // Handle bounds / percentage checking to conform to CSS color spec
+    // <http://www.w3.org/TR/css3-color/>
+    // *Assumes:* r, g, b in [0, 255] or [0, 1]
+    // *Returns:* { r, g, b } in [0, 255]
+    function rgbToRgb(r, g, b){
+        return {
+            r: bound01(r, 255) * 255,
+            g: bound01(g, 255) * 255,
+            b: bound01(b, 255) * 255
+        };
+    }
+
+    // `rgbToHsl`
+    // Converts an RGB color value to HSL.
+    // *Assumes:* r, g, and b are contained in [0, 255] or [0, 1]
+    // *Returns:* { h, s, l } in [0,1]
+    function rgbToHsl(r, g, b) {
+
+        r = bound01(r, 255);
+        g = bound01(g, 255);
+        b = bound01(b, 255);
+
+        var max = mathMax(r, g, b), min = mathMin(r, g, b);
+        var h, s, l = (max + min) / 2;
+
+        if(max == min) {
+            h = s = 0; // achromatic
+        }
+        else {
+            var d = max - min;
+            s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
+            switch(max) {
+                case r: h = (g - b) / d + (g < b ? 6 : 0); break;
+                case g: h = (b - r) / d + 2; break;
+                case b: h = (r - g) / d + 4; break;
+            }
+
+            h /= 6;
+        }
+
+        return { h: h, s: s, l: l };
+    }
+
+    // `hslToRgb`
+    // Converts an HSL color value to RGB.
+    // *Assumes:* h is contained in [0, 1] or [0, 360] and s and l are contained [0, 1] or [0, 100]
+    // *Returns:* { r, g, b } in the set [0, 255]
+    function hslToRgb(h, s, l) {
+        var r, g, b;
+
+        h = bound01(h, 360);
+        s = bound01(s, 100);
+        l = bound01(l, 100);
+
+        function hue2rgb(p, q, t) {
+            if(t < 0) t += 1;
+            if(t > 1) t -= 1;
+            if(t < 1/6) return p + (q - p) * 6 * t;
+            if(t < 1/2) return q;
+            if(t < 2/3) return p + (q - p) * (2/3 - t) * 6;
+            return p;
+        }
+
+        if(s === 0) {
+            r = g = b = l; // achromatic
+        }
+        else {
+            var q = l < 0.5 ? l * (1 + s) : l + s - l * s;
+            var p = 2 * l - q;
+            r = hue2rgb(p, q, h + 1/3);
+            g = hue2rgb(p, q, h);
+            b = hue2rgb(p, q, h - 1/3);
+        }
+
+        return { r: r * 255, g: g * 255, b: b * 255 };
+    }
+
+    // `rgbToHsv`
+    // Converts an RGB color value to HSV
+    // *Assumes:* r, g, and b are contained in the set [0, 255] or [0, 1]
+    // *Returns:* { h, s, v } in [0,1]
+    function rgbToHsv(r, g, b) {
+
+        r = bound01(r, 255);
+        g = bound01(g, 255);
+        b = bound01(b, 255);
+
+        var max = mathMax(r, g, b), min = mathMin(r, g, b);
+        var h, s, v = max;
+
+        var d = max - min;
+        s = max === 0 ? 0 : d / max;
+
+        if(max == min) {
+            h = 0; // achromatic
+        }
+        else {
+            switch(max) {
+                case r: h = (g - b) / d + (g < b ? 6 : 0); break;
+                case g: h = (b - r) / d + 2; break;
+                case b: h = (r - g) / d + 4; break;
+            }
+            h /= 6;
+        }
+        return { h: h, s: s, v: v };
+    }
+
+    // `hsvToRgb`
+    // Converts an HSV color value to RGB.
+    // *Assumes:* h is contained in [0, 1] or [0, 360] and s and v are contained in [0, 1] or [0, 100]
+    // *Returns:* { r, g, b } in the set [0, 255]
+     function hsvToRgb(h, s, v) {
+
+        h = bound01(h, 360) * 6;
+        s = bound01(s, 100);
+        v = bound01(v, 100);
+
+        var i = math.floor(h),
+            f = h - i,
+            p = v * (1 - s),
+            q = v * (1 - f * s),
+            t = v * (1 - (1 - f) * s),
+            mod = i % 6,
+            r = [v, q, p, p, t, v][mod],
+            g = [t, v, v, q, p, p][mod],
+            b = [p, p, t, v, v, q][mod];
+
+        return { r: r * 255, g: g * 255, b: b * 255 };
+    }
+
+    // `rgbToHex`
+    // Converts an RGB color to hex
+    // Assumes r, g, and b are contained in the set [0, 255]
+    // Returns a 3 or 6 character hex
+    function rgbToHex(r, g, b, allow3Char) {
+
+        var hex = [
+            pad2(mathRound(r).toString(16)),
+            pad2(mathRound(g).toString(16)),
+            pad2(mathRound(b).toString(16))
+        ];
+
+        // Return a 3 character hex if possible
+        if (allow3Char && hex[0].charAt(0) == hex[0].charAt(1) && hex[1].charAt(0) == hex[1].charAt(1) && hex[2].charAt(0) == hex[2].charAt(1)) {
+            return hex[0].charAt(0) + hex[1].charAt(0) + hex[2].charAt(0);
+        }
+
+        return hex.join("");
+    }
+        // `rgbaToHex`
+        // Converts an RGBA color plus alpha transparency to hex
+        // Assumes r, g, b and a are contained in the set [0, 255]
+        // Returns an 8 character hex
+        function rgbaToHex(r, g, b, a) {
+
+            var hex = [
+                pad2(convertDecimalToHex(a)),
+                pad2(mathRound(r).toString(16)),
+                pad2(mathRound(g).toString(16)),
+                pad2(mathRound(b).toString(16))
+            ];
+
+            return hex.join("");
+        }
+
+    // `equals`
+    // Can be called with any tinycolor input
+    tinycolor.equals = function (color1, color2) {
+        if (!color1 || !color2) { return false; }
+        return tinycolor(color1).toRgbString() == tinycolor(color2).toRgbString();
+    };
+    tinycolor.random = function() {
+        return tinycolor.fromRatio({
+            r: mathRandom(),
+            g: mathRandom(),
+            b: mathRandom()
+        });
+    };
+
+
+    // Modification Functions
+    // ----------------------
+    // Thanks to less.js for some of the basics here
+    // <https://github.com/cloudhead/less.js/blob/master/lib/less/functions.js>
+
+    function desaturate(color, amount) {
+        amount = (amount === 0) ? 0 : (amount || 10);
+        var hsl = tinycolor(color).toHsl();
+        hsl.s -= amount / 100;
+        hsl.s = clamp01(hsl.s);
+        return tinycolor(hsl);
+    }
+
+    function saturate(color, amount) {
+        amount = (amount === 0) ? 0 : (amount || 10);
+        var hsl = tinycolor(color).toHsl();
+        hsl.s += amount / 100;
+        hsl.s = clamp01(hsl.s);
+        return tinycolor(hsl);
+    }
+
+    function greyscale(color) {
+        return tinycolor(color).desaturate(100);
+    }
+
+    function lighten (color, amount) {
+        amount = (amount === 0) ? 0 : (amount || 10);
+        var hsl = tinycolor(color).toHsl();
+        hsl.l += amount / 100;
+        hsl.l = clamp01(hsl.l);
+        return tinycolor(hsl);
+    }
+
+    function brighten(color, amount) {
+        amount = (amount === 0) ? 0 : (amount || 10);
+        var rgb = tinycolor(color).toRgb();
+        rgb.r = mathMax(0, mathMin(255, rgb.r - mathRound(255 * - (amount / 100))));
+        rgb.g = mathMax(0, mathMin(255, rgb.g - mathRound(255 * - (amount / 100))));
+        rgb.b = mathMax(0, mathMin(255, rgb.b - mathRound(255 * - (amount / 100))));
+        return tinycolor(rgb);
+    }
+
+    function darken (color, amount) {
+        amount = (amount === 0) ? 0 : (amount || 10);
+        var hsl = tinycolor(color).toHsl();
+        hsl.l -= amount / 100;
+        hsl.l = clamp01(hsl.l);
+        return tinycolor(hsl);
+    }
+
+    // Spin takes a positive or negative amount within [-360, 360] indicating the change of hue.
+    // Values outside of this range will be wrapped into this range.
+    function spin(color, amount) {
+        var hsl = tinycolor(color).toHsl();
+        var hue = (mathRound(hsl.h) + amount) % 360;
+        hsl.h = hue < 0 ? 360 + hue : hue;
+        return tinycolor(hsl);
+    }
+
+    // Combination Functions
+    // ---------------------
+    // Thanks to jQuery xColor for some of the ideas behind these
+    // <https://github.com/infusion/jQuery-xcolor/blob/master/jquery.xcolor.js>
+
+    function complement(color) {
+        var hsl = tinycolor(color).toHsl();
+        hsl.h = (hsl.h + 180) % 360;
+        return tinycolor(hsl);
+    }
+
+    function triad(color) {
+        var hsl = tinycolor(color).toHsl();
+        var h = hsl.h;
+        return [
+            tinycolor(color),
+            tinycolor({ h: (h + 120) % 360, s: hsl.s, l: hsl.l }),
+            tinycolor({ h: (h + 240) % 360, s: hsl.s, l: hsl.l })
+        ];
+    }
+
+    function tetrad(color) {
+        var hsl = tinycolor(color).toHsl();
+        var h = hsl.h;
+        return [
+            tinycolor(color),
+            tinycolor({ h: (h + 90) % 360, s: hsl.s, l: hsl.l }),
+            tinycolor({ h: (h + 180) % 360, s: hsl.s, l: hsl.l }),
+            tinycolor({ h: (h + 270) % 360, s: hsl.s, l: hsl.l })
+        ];
+    }
+
+    function splitcomplement(color) {
+        var hsl = tinycolor(color).toHsl();
+        var h = hsl.h;
+        return [
+            tinycolor(color),
+            tinycolor({ h: (h + 72) % 360, s: hsl.s, l: hsl.l}),
+            tinycolor({ h: (h + 216) % 360, s: hsl.s, l: hsl.l})
+        ];
+    }
+
+    function analogous(color, results, slices) {
+        results = results || 6;
+        slices = slices || 30;
+
+        var hsl = tinycolor(color).toHsl();
+        var part = 360 / slices;
+        var ret = [tinycolor(color)];
+
+        for (hsl.h = ((hsl.h - (part * results >> 1)) + 720) % 360; --results; ) {
+            hsl.h = (hsl.h + part) % 360;
+            ret.push(tinycolor(hsl));
+        }
+        return ret;
+    }
+
+    function monochromatic(color, results) {
+        results = results || 6;
+        var hsv = tinycolor(color).toHsv();
+        var h = hsv.h, s = hsv.s, v = hsv.v;
+        var ret = [];
+        var modification = 1 / results;
+
+        while (results--) {
+            ret.push(tinycolor({ h: h, s: s, v: v}));
+            v = (v + modification) % 1;
+        }
+
+        return ret;
+    }
+
+    // Utility Functions
+    // ---------------------
+
+    tinycolor.mix = function(color1, color2, amount) {
+        amount = (amount === 0) ? 0 : (amount || 50);
+
+        var rgb1 = tinycolor(color1).toRgb();
+        var rgb2 = tinycolor(color2).toRgb();
+
+        var p = amount / 100;
+        var w = p * 2 - 1;
+        var a = rgb2.a - rgb1.a;
+
+        var w1;
+
+        if (w * a == -1) {
+            w1 = w;
+        } else {
+            w1 = (w + a) / (1 + w * a);
+        }
+
+        w1 = (w1 + 1) / 2;
+
+        var w2 = 1 - w1;
+
+        var rgba = {
+            r: rgb2.r * w1 + rgb1.r * w2,
+            g: rgb2.g * w1 + rgb1.g * w2,
+            b: rgb2.b * w1 + rgb1.b * w2,
+            a: rgb2.a * p  + rgb1.a * (1 - p)
+        };
+
+        return tinycolor(rgba);
+    };
+
+
+    // Readability Functions
+    // ---------------------
+    // <http://www.w3.org/TR/AERT#color-contrast>
+
+    // `readability`
+    // Analyze the 2 colors and returns an object with the following properties:
+    //    `brightness`: difference in brightness between the two colors
+    //    `color`: difference in color/hue between the two colors
+    tinycolor.readability = function(color1, color2) {
+        var c1 = tinycolor(color1);
+        var c2 = tinycolor(color2);
+        var rgb1 = c1.toRgb();
+        var rgb2 = c2.toRgb();
+        var brightnessA = c1.getBrightness();
+        var brightnessB = c2.getBrightness();
+        var colorDiff = (
+            Math.max(rgb1.r, rgb2.r) - Math.min(rgb1.r, rgb2.r) +
+            Math.max(rgb1.g, rgb2.g) - Math.min(rgb1.g, rgb2.g) +
+            Math.max(rgb1.b, rgb2.b) - Math.min(rgb1.b, rgb2.b)
+        );
+
+        return {
+            brightness: Math.abs(brightnessA - brightnessB),
+            color: colorDiff
+        };
+    };
+
+    // `readable`
+    // http://www.w3.org/TR/AERT#color-contrast
+    // Ensure that foreground and background color combinations provide sufficient contrast.
+    // *Example*
+    //    tinycolor.isReadable("#000", "#111") => false
+    tinycolor.isReadable = function(color1, color2) {
+        var readability = tinycolor.readability(color1, color2);
+        return readability.brightness > 125 && readability.color > 500;
+    };
+
+    // `mostReadable`
+    // Given a base color and a list of possible foreground or background
+    // colors for that base, returns the most readable color.
+    // *Example*
+    //    tinycolor.mostReadable("#123", ["#fff", "#000"]) => "#000"
+    tinycolor.mostReadable = function(baseColor, colorList) {
+        var bestColor = null;
+        var bestScore = 0;
+        var bestIsReadable = false;
+        for (var i=0; i < colorList.length; i++) {
+
+            // We normalize both around the "acceptable" breaking point,
+            // but rank brightness constrast higher than hue.
+
+            var readability = tinycolor.readability(baseColor, colorList[i]);
+            var readable = readability.brightness > 125 && readability.color > 500;
+            var score = 3 * (readability.brightness / 125) + (readability.color / 500);
+
+            if ((readable && ! bestIsReadable) ||
+                (readable && bestIsReadable && score > bestScore) ||
+                ((! readable) && (! bestIsReadable) && score > bestScore)) {
+                bestIsReadable = readable;
+                bestScore = score;
+                bestColor = tinycolor(colorList[i]);
+            }
+        }
+        return bestColor;
+    };
+
+
+    // Big List of Colors
+    // ------------------
+    // <http://www.w3.org/TR/css3-color/#svg-color>
+    var names = tinycolor.names = {
+        aliceblue: "f0f8ff",
+        antiquewhite: "faebd7",
+        aqua: "0ff",
+        aquamarine: "7fffd4",
+        azure: "f0ffff",
+        beige: "f5f5dc",
+        bisque: "ffe4c4",
+        black: "000",
+        blanchedalmond: "ffebcd",
+        blue: "00f",
+        blueviolet: "8a2be2",
+        brown: "a52a2a",
+        burlywood: "deb887",
+        burntsienna: "ea7e5d",
+        cadetblue: "5f9ea0",
+        chartreuse: "7fff00",
+        chocolate: "d2691e",
+        coral: "ff7f50",
+        cornflowerblue: "6495ed",
+        cornsilk: "fff8dc",
+        crimson: "dc143c",
+        cyan: "0ff",
+        darkblue: "00008b",
+        darkcyan: "008b8b",
+        darkgoldenrod: "b8860b",
+        darkgray: "a9a9a9",
+        darkgreen: "006400",
+        darkgrey: "a9a9a9",
+        darkkhaki: "bdb76b",
+        darkmagenta: "8b008b",
+        darkolivegreen: "556b2f",
+        darkorange: "ff8c00",
+        darkorchid: "9932cc",
+        darkred: "8b0000",
+        darksalmon: "e9967a",
+        darkseagreen: "8fbc8f",
+        darkslateblue: "483d8b",
+        darkslategray: "2f4f4f",
+        darkslategrey: "2f4f4f",
+        darkturquoise: "00ced1",
+        darkviolet: "9400d3",
+        deeppink: "ff1493",
+        deepskyblue: "00bfff",
+        dimgray: "696969",
+        dimgrey: "696969",
+        dodgerblue: "1e90ff",
+        firebrick: "b22222",
+        floralwhite: "fffaf0",
+        forestgreen: "228b22",
+        fuchsia: "f0f",
+        gainsboro: "dcdcdc",
+        ghostwhite: "f8f8ff",
+        gold: "ffd700",
+        goldenrod: "daa520",
+        gray: "808080",
+        green: "008000",
+        greenyellow: "adff2f",
+        grey: "808080",
+        honeydew: "f0fff0",
+        hotpink: "ff69b4",
+        indianred: "cd5c5c",
+        indigo: "4b0082",
+        ivory: "fffff0",
+        khaki: "f0e68c",
+        lavender: "e6e6fa",
+        lavenderblush: "fff0f5",
+        lawngreen: "7cfc00",
+        lemonchiffon: "fffacd",
+        lightblue: "add8e6",
+        lightcoral: "f08080",
+        lightcyan: "e0ffff",
+        lightgoldenrodyellow: "fafad2",
+        lightgray: "d3d3d3",
+        lightgreen: "90ee90",
+        lightgrey: "d3d3d3",
+        lightpink: "ffb6c1",
+        lightsalmon: "ffa07a",
+        lightseagreen: "20b2aa",
+        lightskyblue: "87cefa",
+        lightslategray: "789",
+        lightslategrey: "789",
+        lightsteelblue: "b0c4de",
+        lightyellow: "ffffe0",
+        lime: "0f0",
+        limegreen: "32cd32",
+        linen: "faf0e6",
+        magenta: "f0f",
+        maroon: "800000",
+        mediumaquamarine: "66cdaa",
+        mediumblue: "0000cd",
+        mediumorchid: "ba55d3",
+        mediumpurple: "9370db",
+        mediumseagreen: "3cb371",
+        mediumslateblue: "7b68ee",
+        mediumspringgreen: "00fa9a",
+        mediumturquoise: "48d1cc",
+        mediumvioletred: "c71585",
+        midnightblue: "191970",
+        mintcream: "f5fffa",
+        mistyrose: "ffe4e1",
+        moccasin: "ffe4b5",
+        navajowhite: "ffdead",
+        navy: "000080",
+        oldlace: "fdf5e6",
+        olive: "808000",
+        olivedrab: "6b8e23",
+        orange: "ffa500",
+        orangered: "ff4500",
+        orchid: "da70d6",
+        palegoldenrod: "eee8aa",
+        palegreen: "98fb98",
+        paleturquoise: "afeeee",
+        palevioletred: "db7093",
+        papayawhip: "ffefd5",
+        peachpuff: "ffdab9",
+        peru: "cd853f",
+        pink: "ffc0cb",
+        plum: "dda0dd",
+        powderblue: "b0e0e6",
+        purple: "800080",
+        rebeccapurple: "663399",
+        red: "f00",
+        rosybrown: "bc8f8f",
+        royalblue: "4169e1",
+        saddlebrown: "8b4513",
+        salmon: "fa8072",
+        sandybrown: "f4a460",
+        seagreen: "2e8b57",
+        seashell: "fff5ee",
+        sienna: "a0522d",
+        silver: "c0c0c0",
+        skyblue: "87ceeb",
+        slateblue: "6a5acd",
+        slategray: "708090",
+        slategrey: "708090",
+        snow: "fffafa",
+        springgreen: "00ff7f",
+        steelblue: "4682b4",
+        tan: "d2b48c",
+        teal: "008080",
+        thistle: "d8bfd8",
+        tomato: "ff6347",
+        turquoise: "40e0d0",
+        violet: "ee82ee",
+        wheat: "f5deb3",
+        white: "fff",
+        whitesmoke: "f5f5f5",
+        yellow: "ff0",
+        yellowgreen: "9acd32"
+    };
+
+    // Make it easy to access colors via `hexNames[hex]`
+    var hexNames = tinycolor.hexNames = flip(names);
+
+
+    // Utilities
+    // ---------
+
+    // `{ 'name1': 'val1' }` becomes `{ 'val1': 'name1' }`
+    function flip(o) {
+        var flipped = { };
+        for (var i in o) {
+            if (o.hasOwnProperty(i)) {
+                flipped[o[i]] = i;
+            }
+        }
+        return flipped;
+    }
+
+    // Return a valid alpha value [0,1] with all invalid values being set to 1
+    function boundAlpha(a) {
+        a = parseFloat(a);
+
+        if (isNaN(a) || a < 0 || a > 1) {
+            a = 1;
+        }
+
+        return a;
+    }
+
+    // Take input from [0, n] and return it as [0, 1]
+    function bound01(n, max) {
+        if (isOnePointZero(n)) { n = "100%"; }
+
+        var processPercent = isPercentage(n);
+        n = mathMin(max, mathMax(0, parseFloat(n)));
+
+        // Automatically convert percentage into number
+        if (processPercent) {
+            n = parseInt(n * max, 10) / 100;
+        }
+
+        // Handle floating point rounding errors
+        if ((math.abs(n - max) < 0.000001)) {
+            return 1;
+        }
+
+        // Convert into [0, 1] range if it isn't already
+        return (n % max) / parseFloat(max);
+    }
+
+    // Force a number between 0 and 1
+    function clamp01(val) {
+        return mathMin(1, mathMax(0, val));
+    }
+
+    // Parse a base-16 hex value into a base-10 integer
+    function parseIntFromHex(val) {
+        return parseInt(val, 16);
+    }
+
+    // Need to handle 1.0 as 100%, since once it is a number, there is no difference between it and 1
+    // <http://stackoverflow.com/questions/7422072/javascript-how-to-detect-number-as-a-decimal-including-1-0>
+    function isOnePointZero(n) {
+        return typeof n == "string" && n.indexOf('.') != -1 && parseFloat(n) === 1;
+    }
+
+    // Check to see if string passed in is a percentage
+    function isPercentage(n) {
+        return typeof n === "string" && n.indexOf('%') != -1;
+    }
+
+    // Force a hex value to have 2 characters
+    function pad2(c) {
+        return c.length == 1 ? '0' + c : '' + c;
+    }
+
+    // Replace a decimal with it's percentage value
+    function convertToPercentage(n) {
+        if (n <= 1) {
+            n = (n * 100) + "%";
+        }
+
+        return n;
+    }
+
+    // Converts a decimal to a hex value
+    function convertDecimalToHex(d) {
+        return Math.round(parseFloat(d) * 255).toString(16);
+    }
+    // Converts a hex value to a decimal
+    function convertHexToDecimal(h) {
+        return (parseIntFromHex(h) / 255);
+    }
+
+    var matchers = (function() {
+
+        // <http://www.w3.org/TR/css3-values/#integers>
+        var CSS_INTEGER = "[-\\+]?\\d+%?";
+
+        // <http://www.w3.org/TR/css3-values/#number-value>
+        var CSS_NUMBER = "[-\\+]?\\d*\\.\\d+%?";
+
+        // Allow positive/negative integer/number.  Don't capture the either/or, just the entire outcome.
+        var CSS_UNIT = "(?:" + CSS_NUMBER + ")|(?:" + CSS_INTEGER + ")";
+
+        // Actual matching.
+        // Parentheses and commas are optional, but not required.
+        // Whitespace can take the place of commas or opening paren
+        var PERMISSIVE_MATCH3 = "[\\s|\\(]+(" + CSS_UNIT + ")[,|\\s]+(" + CSS_UNIT + ")[,|\\s]+(" + CSS_UNIT + ")\\s*\\)?";
+        var PERMISSIVE_MATCH4 = "[\\s|\\(]+(" + CSS_UNIT + ")[,|\\s]+(" + CSS_UNIT + ")[,|\\s]+(" + CSS_UNIT + ")[,|\\s]+(" + CSS_UNIT + ")\\s*\\)?";
+
+        return {
+            rgb: new RegExp("rgb" + PERMISSIVE_MATCH3),
+            rgba: new RegExp("rgba" + PERMISSIVE_MATCH4),
+            hsl: new RegExp("hsl" + PERMISSIVE_MATCH3),
+            hsla: new RegExp("hsla" + PERMISSIVE_MATCH4),
+            hsv: new RegExp("hsv" + PERMISSIVE_MATCH3),
+            hsva: new RegExp("hsva" + PERMISSIVE_MATCH4),
+            hex3: /^([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,
+            hex6: /^([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})$/,
+            hex8: /^([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})$/
+        };
+    })();
+
+    // `stringInputToObject`
+    // Permissive string parsing.  Take in a number of formats, and output an object
+    // based on detected format.  Returns `{ r, g, b }` or `{ h, s, l }` or `{ h, s, v}`
+    function stringInputToObject(color) {
+
+        color = color.replace(trimLeft,'').replace(trimRight, '').toLowerCase();
+        var named = false;
+        if (names[color]) {
+            color = names[color];
+            named = true;
+        }
+        else if (color == 'transparent') {
+            return { r: 0, g: 0, b: 0, a: 0, format: "name" };
+        }
+
+        // Try to match string input using regular expressions.
+        // Keep most of the number bounding out of this function - don't worry about [0,1] or [0,100] or [0,360]
+        // Just return an object and let the conversion functions handle that.
+        // This way the result will be the same whether the tinycolor is initialized with string or object.
+        var match;
+        if ((match = matchers.rgb.exec(color))) {
+            return { r: match[1], g: match[2], b: match[3] };
+        }
+        if ((match = matchers.rgba.exec(color))) {
+            return { r: match[1], g: match[2], b: match[3], a: match[4] };
+        }
+        if ((match = matchers.hsl.exec(color))) {
+            return { h: match[1], s: match[2], l: match[3] };
+        }
+        if ((match = matchers.hsla.exec(color))) {
+            return { h: match[1], s: match[2], l: match[3], a: match[4] };
+        }
+        if ((match = matchers.hsv.exec(color))) {
+            return { h: match[1], s: match[2], v: match[3] };
+        }
+        if ((match = matchers.hsva.exec(color))) {
+            return { h: match[1], s: match[2], v: match[3], a: match[4] };
+        }
+        if ((match = matchers.hex8.exec(color))) {
+            return {
+                a: convertHexToDecimal(match[1]),
+                r: parseIntFromHex(match[2]),
+                g: parseIntFromHex(match[3]),
+                b: parseIntFromHex(match[4]),
+                format: named ? "name" : "hex8"
+            };
+        }
+        if ((match = matchers.hex6.exec(color))) {
+            return {
+                r: parseIntFromHex(match[1]),
+                g: parseIntFromHex(match[2]),
+                b: parseIntFromHex(match[3]),
+                format: named ? "name" : "hex"
+            };
+        }
+        if ((match = matchers.hex3.exec(color))) {
+            return {
+                r: parseIntFromHex(match[1] + '' + match[1]),
+                g: parseIntFromHex(match[2] + '' + match[2]),
+                b: parseIntFromHex(match[3] + '' + match[3]),
+                format: named ? "name" : "hex"
+            };
+        }
+
+        return false;
+    }
+
+    window.tinycolor = tinycolor;
+    })();
+
+
+    $(function () {
+        if ($.fn.spectrum.load) {
+            $.fn.spectrum.processNativeColorInputs();
+        }
+    });
+
+});
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/style.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/style.js
new file mode 100644
index 0000000..37fbcb8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/utils/style.js
@@ -0,0 +1,235 @@
+var $ = jQuery = require("jquery");
+var cytoscape = require("cytoscape");
+
+var Style = module.exports = function(theme){
+    this.theme = theme;
+}
+
+Style.prototype.getColor = function(element){
+    //Get color for a certain nucleotide as specified by the color
+	//picker in the options column of the page.
+	var col = "black";
+	if($("#acolor").length > 0){
+		if (element === "A"){
+			col = $("#acolor").spectrum('get').toHexString();
+		}
+		else if (element === "C"){
+			col = $("#ccolor").spectrum('get').toHexString();
+		}
+		else if (element === "U"){
+			col = $("#ucolor").spectrum('get').toHexString();
+		}
+		else if (element === "G"){
+			col = $("#gcolor").spectrum('get').toHexString();
+		}
+		else if (element === "hbond"){
+			col = "#3A9AD9";
+		}
+		else if(element === "violation") {
+			col = "red";
+		}
+	} else {
+		if (element === "A"){
+			col = "#64F73F";
+		}
+		else if (element === "C"){
+			col = "#FFB340";
+		}
+		else if (element === "U"){
+			col = "#3C88EE";
+		}
+		else if (element === "G"){
+			col = "#EB413C";
+		}
+		else if (element === "hbond"){
+			col = "#3A9AD9";
+		}
+		else if(element === "violation") {
+			col = "red";
+		}
+	}
+	return col;
+}
+
+Style.prototype.getWeight = function(type){
+    //Get weight for a certain bond type
+    var weight;
+    if(type=== "hbond" || type === "violation"){
+        weight = 4;
+    } else {
+        weight = 5;
+    }
+    return weight;
+}
+
+Style.prototype.getCytoscapeStyle = function(){
+    var css = cytoscape.stylesheet()
+            .selector("node")
+            .css({
+                "content": "data(label)",
+                "text-valign": "center",
+                "color": "white",
+                "text-outline-width": 2,
+                "text-outline-color": "#778899"
+            })
+            .selector("edge")
+            .css({
+                "background-color": "white"
+            })
+            .selector(".chosen")
+            .css({
+                "background-color": "black",
+                "opacity": 0.6
+            })
+            .selector(".edgehandles-hover")
+            .css({
+                "background-color": "red"
+            })
+            .selector(".edgehandles-source")
+            .css({
+                "border-width": 2,
+                "border-color": "red"
+            })
+            .selector(".edgehandles-target")
+            .css({
+                "border-width": 2,
+                "border-color": "red"
+            })
+            .selector(".edgehandles-preview, .edgehandles-ghost-edge")
+            .css({
+                "line-color": "red",
+                "target-arrow-color": "red",
+                "target-arrow-color": "red"
+            })
+            .selector(".wcwccis")
+            .css({
+                "mid-target-arrow-shape": "circle",
+                "mid-target-arrow-color": "red"
+            })
+            .selector(".wcsgcis")
+            .css({
+                "source-arrow-shape": "circle",
+                "source-arrow-color": "red",
+                "target-arrow-shape": "triangle",
+                "target-arrow-color": "red"
+            })
+            .selector(".sgwccis")
+            .css({
+                "target-arrow-shape": "circle",
+                "target-arrow-color": "red",
+                "source-arrow-shape": "triangle",
+                "source-arrow-color": "red"
+            })
+            .selector(".hgsgcis")
+            .css({
+                "source-arrow-shape": "square",
+                "source-arrow-color": "red",
+                "target-arrow-shape": "triangle",
+                "target-arrow-color": "red"
+            })
+            .selector(".sghgcis")
+            .css({
+                "target-arrow-shape": "square",
+                "target-arrow-color": "red",
+                "source-arrow-shape": "triangle",
+                "source-arrow-color": "red"
+            })
+            .selector(".wchgcis")
+            .css({
+                "source-arrow-shape": "circle",
+                "source-arrow-color": "red",
+                "target-arrow-shape": "square",
+                "target-arrow-color": "red"
+            })
+            .selector(".hgwccis")
+            .css({
+                "target-arrow-shape": "circle",
+                "target-arrow-color": "red",
+                "source-arrow-shape": "square",
+                "source-arrow-color": "red"
+            })
+            .selector(".sgsgcis")
+            .css({
+                "mid-target-arrow-shape": "triangle",
+                "mid-target-arrow-color": "red"
+            })
+            .selector(".wcwctrans")
+            .css({
+                "mid=target-arrow-shape": "circle",
+                "mid-target-arrow-color": "red",
+                "mid-target-arrow-fill": "hollow"
+            })
+            .selector(".wcsgtrans")
+            .css({
+                "source-arrow-shape": "circle",
+                "source-arrow-color": "red",
+                "target-arrow-shape": "triangle",
+                "target-arrow-color": "red",
+                "target-arrow-fill": "hollow",
+                "source-arrow-fill": "hollow"
+            })
+            .selector(".sgwctrans")
+            .css({
+                "target-arrow-shape": "circle",
+                "target-arrow-color": "red",
+                "source-arrow-shape": "triangle",
+                "source-arrow-color": "red",
+                "source-arrow-fill": "hollow",
+                "target-arrow-fill": "hollow"
+            })
+            .selector(".hgsgtrans")
+            .css({
+                "source-arrow-shape": "square",
+                "source-arrow-color": "red",
+                "target-arrow-shape": "triangle",
+                "target-arrow-color": "red",
+                "target-arrow-fill": "hollow",
+                "source-arrow-fill": "hollow"
+            })
+            .selector(".sghgtrans")
+            .css({
+                "target-arrow-shape": "square",
+                "target-arrow-color": "red",
+                "source-arrow-shape": "triangle",
+                "source-arrow-color": "red",
+                "source-arrow-fill": "hollow",
+                "target-arrow-fill": "hollow"
+            })
+            .selector(".wchgtrans")
+            .css({
+                "source-arrow-shape": "circle",
+                "source-arrow-color": "red",
+                "target-arrow-shape": "square",
+                "target-arrow-color": "red",
+                "target-arrow-fill": "hollow",
+                "source-arrow-fill": "hollow"
+            })
+            .selector(".hgwctrans")
+            .css({
+                "target-arrow-shape": "circle",
+                "target-arrow-color": "red",
+                "source-arrow-shape": "square",
+                "source-arrow-color": "red",
+                "source-arrow-fill": "hollow",
+                "target-arrow-fill": "hollow"
+            })
+            .selector(".sgsgtrans")
+            .css({
+                "mid-target-arrow-shape": "triangle",
+                "mid-target-arrow-color": "red",
+                "mid-target-arrow-fill": "hollow"
+            })
+            .selector(".hghgcis")
+            .css({
+                "mid-target-arrow-shape": "square",
+                "mid-target-arrow-color": "red",
+            })
+            .selector(".hghgtrans")
+            .css({
+                "mid-target-arrow-shape": "square",
+                "mid-target-arrow-color": "red",
+                "mid-target-arrow-fill": "hollow"
+            });
+
+    return css;
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/annoview.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/annoview.js
new file mode 100644
index 0000000..4af87fa
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/annoview.js
@@ -0,0 +1,129 @@
+var Backbone = require("backbone");
+var NCAnno = require("../models/ncanno");
+var $ = jQuery = require("jquery");
+var bootstrap = require("bootstrap");
+
+var AnnoView = Backbone.View.extend({
+    events: {
+        "click #annook": "applyAnno",
+        "click #wc1": "selectWc1",
+        "click #hg1": "selectHg1",
+        "click #sg1": "selectSg1",
+        "click #wc2": "selectWc2",
+        "click #hg2": "selectHg2",
+        "click #sg2": "selectSg2",
+        "click #st1": "selectStericityCis",
+        "click #st2": "selectStericityTrans"
+    },
+    initialize: function(b1, b2, st, edge, cyEle){
+        var el = document.createElement("div");
+        cyEle.parentNode.appendChild(el);
+        this.setElement(el);
+        this.anno = new NCAnno(b1, b2, st, edge);
+        //CSS
+        this.defineStyle();
+        this.render();
+    },
+    applyAnno: function(){
+        var e1 = this.anno.get("base1");
+        var e2 = this.anno.get("base2");
+        var st = this.anno.get("stericity");
+        this.anno.get("edge")._private.classes = {};
+        this.anno.get("edge").addClass(e1 + e2 + st);
+        this.remove();
+    },
+    render: function(){
+        //HTML SETUP
+        var res1 = this.anno.get("edge").source().data("label");
+        var res2 = this.anno.get("edge").target().data("label");
+        var edefs = this.anno.getLabels();
+
+        this.el.innerHTML += "<div class='col-md-4'>"
+            + "<span>Residue: " +  res1 + "</span>"
+            + "<div class='dropdown'>"
+            + "<button class='btn btn-default dropdown-toggle' type='button' id='menu1' data-toggle='dropdown'>" + edefs[0]
+            + "<span class='caret'></span></button>"
+            + "<ul class='dropdown-menu' role='menu' aria-labelledby='menu1'>"
+            + "<li role='presentation'><span class='text-muted' id='wc1' style='cursor: pointer;'>Watson-Crick</span></li>"
+            + "<li role='presentation'><span class='text-muted' id='hg1' style='cursor: pointer;'>Hoogsteen</span></li>"
+            + "<li role='presentation'><span class='text-muted' id='sg1' style='cursor: pointer;'>Sugar</span></li>"
+            + "</ul>"
+            + "</div>"
+            + "</div>";
+        this.el.innerHTML += "<div class='col-md-4'>"
+            + "<span>Residue: " +  res2 + "</span>"
+            + "<div class='dropdown'>"
+            + "<button class='btn btn-default dropdown-toggle' type='button' id='menu2' data-toggle='dropdown'>" + edefs[1]
+            + "<span class='caret'></span></button>"
+            + "<ul class='dropdown-menu' role='menu' aria-labelledby='menu2'>"
+            + "<li role='presentation'><span class='text-muted' id='wc2' style='cursor: pointer;'>Watson-Crick</span></li>"
+            + "<li role='presentation'><span class='text-muted' id='hg2' style='cursor: pointer;'>Hoogsteen</span></li>"
+            + "<li role='presentation'><span class='text-muted' id='sg2' style='cursor: pointer;'>Sugar</span></li>"
+            + "</ul>"
+            + "</div>"
+            + "</div>";
+        this.el.innerHTML += ""
+            + "<div class='col-md-3'><div class='dropdown'>"
+            + "<button class='btn btn-default dropdown-toggle' type='button' id='menu3' data-toggle='dropdown' style='margin-top: 19px;'>" + edefs[2]
+            + "<span class='caret'></span></button>"
+            + "<ul class='dropdown-menu'>"
+            + "<li role='presentation'><span class='text-muted' id='st1' style='cursor: pointer;'>cis</span></li>"
+            + "<li role='presentation'><span class='text-muted' id='st2' style='cursor: pointer;'>trans</span></li>"
+            + "</ul>"
+            + "</div></div>";
+
+        this.el.innerHTML += "<div class='col-md-1'><button class='btn btn-default' type='button' id='annook' style='margin-top: 19px;'>OK"
+            + "</button></div>";
+    },
+    defineStyle: function(){
+        var st = this.el.style;
+        st.display = "block";
+        st.position = "absolute";
+        st.left = "1%";
+        st.width = "435px";
+        st.height = "170px";
+        st.padding = "10px";
+        st.paddingRight = "55px";
+        st.border = "1px solid black";
+        st.borderRadius = "1px";
+        st.backgroundColor = "#F6F6F6";
+        st.overflow = "auto";
+        st.fontWeight = "bold";
+        st.textAlign = "center";
+        st.zIndex = 1002;
+    },
+    selectWc1: function(){
+        this.anno.set("base1", "wc");
+        document.getElementById("menu1").innerHTML = "Watson-Crick<span class='caret'></span>";
+    },
+    selectHg1: function(){
+        this.anno.set("base1", "hg");
+        document.getElementById("menu1").innerHTML = "Hoogsteen<span class='caret'></span>";
+    },
+    selectSg1: function(){
+        this.anno.set("base1", "sg");
+        document.getElementById("menu1").innerHTML = "Sugar<span class='caret'></span>";
+    },
+    selectWc2: function(){
+        this.anno.set("base2", "wc");
+        document.getElementById("menu2").innerHTML = "Watson-Crick<span class='caret'></span>";
+    },
+    selectHg2: function(){
+        this.anno.set("base2", "hg");
+        document.getElementById("menu2").innerHTML = "Hoogsteen<span class='caret'></span>";
+    },
+    selectSg2: function(){
+        this.anno.set("base2", "sg");
+        document.getElementById("menu2").innerHTML = "Sugar<span class='caret'></span>";
+    },
+    selectStericityCis: function(){
+        this.anno.set("stericity", "cis");
+        document.getElementById("menu3").innerHTML = "Cis<span class='caret'></span>";
+    },
+    selectStericityTrans: function(){
+        this.anno.set("stericity", "trans");
+        document.getElementById("menu3").innerHTML = "Trans<span class='caret'></span>";
+    }
+});
+
+module.exports = AnnoView;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js
new file mode 100644
index 0000000..2334d87
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js
@@ -0,0 +1,189 @@
+var Backbone = require("backbone");
+var $ = jQuery = require("jquery");
+var spectrum = require("../utils/spectrum");
+var lassotool = require("../utils/lasso");
+var _ = require("underscore");
+var tooltip = require("tooltip");
+
+var Optspanel = Backbone.View.extend({
+    events: {
+        "click #export": "exportAsPNG",
+        "click #center": "center",
+        "click #lasso": "activateLasso",
+        "click #discovery": "activateDiscovery",
+        "click #newbond": "activateBondDrawing"
+    },
+    initialize: function(opts){
+        this.el = opts.el;
+        this.el.className += "container-fluid";
+        this.el.id = "opts";
+        this.struct = opts.struct;
+        this.vis = opts.vis;
+
+        this.listenTo(this.vis, "rendered", this.checkMode)
+    },
+    render: function(){
+        tooltip({
+            showDelay: 100,
+            offset: {
+                x: -85,
+                y: 0
+            },
+            style: {
+                "border-radius": "5px"
+            }
+        });
+        this.el.innerHTML += '<div class="cntrl"><button class="icon" id="discovery" data-tooltip="Exploration mode" value="Discovery mode" readonly="readonly"><img class="mode" src="http://www.cipherpoint.com/wp-content/uploads/2014/07/search.png"></button>'
+                        + '<button class="icon" id="lasso" value="Lasso mode"  data-tooltip="Selection mode" readonly="readonly"><img class="mode" src="https://d30y9cdsu7xlg0.cloudfront.net/png/21906-200.png"></button>'
+                        + '<button class="icon" id="newbond" value="Bond drawing mode" data-tooltip="Editing mode" readonly="readonly"><img class="mode" src="http://vseo.vn/dao-tao-seo/uploads/tin-tuc/anchor-link.png"></button></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><p class="res">A</p></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><p class="res">C</p></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><p class="res">G</p></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><p class="res">U</p></div>';
+        this.el.innerHTML += '<div class="col-md-3"><input type="text" id="acolor"></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><input type="text" id="ccolor"></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><input type="text" id="gcolor"></div>';
+  		this.el.innerHTML += '<div class="col-md-3"><input type="text" id="ucolor"></div>';
+        this.el.innerHTML += '<div class="col-md-9 colsel"><p>Color of selected nucleic acids</p></div>';
+        this.el.innerHTML += '<div class="col-md-3 colsel"><input type="text" id="selcolor"></div>';
+    	this.el.innerHTML += '<div class="cntrl"><input class="button" id="center" value="Reset viewport" readonly="readonly">'
+    	               + '<input class="button" id="export" value="Export as PNG" readonly="readonly"></div>';
+
+        //init colors
+        this.initColors(this);
+    },
+    initColors: function(self){
+        var res = self.struct.get("residues");
+        var cy = self;
+        $("#acolor").spectrum({
+            color: "#64F73F",
+            change: function(color){
+                res.setResidueColor("A", color.toHexString());
+                cy.vis.cy.nodes("[label='A']").css("background-color", color.toHexString());
+            }
+        });
+        $("#ccolor").spectrum({
+            color: "#FFB340",
+            change: function(color){
+                res.setResidueColor("C", color.toHexString());
+                cy.vis.cy.nodes("[label='C']").css("background-color", color.toHexString());
+            }
+        });
+        $("#gcolor").spectrum({
+            color: "#EB413C",
+            change: function(color){
+                res.setResidueColor("G", color.toHexString());
+                cy.vis.cy.nodes("[label='G']").css("background-color", color.toHexString());
+            }
+        });
+        $("#ucolor").spectrum({
+            color: "#3C88EE",
+            change: function(color){
+                res.setResidueColor("U", color.toHexString());
+                cy.vis.cy.nodes("[label='U']").css("background-color", color.toHexString());
+            }
+        });
+        $("#selcolor").spectrum({
+            color: "#F6F6F6",
+            change: function(color){
+                var sel = cy.vis.cy.$(".chosen");
+                res.setSelectionColor(sel, color.toHexString());
+                sel.css("background-color", color.toHexString());
+            }
+        });
+    },
+    exportAsPNG: function(){
+        var cy = this.vis.cy;
+        var png64 = cy.png({scale: 5});
+        var newTab = window.open();
+        newTab.document.write("<img src=" + png64 + " />");
+        newTab.focus();
+    },
+    center: function(){
+        var cy = this.vis.cy;
+        cy.center();
+        cy.fit();
+    },
+    activateLasso: function(){
+        var polygon = [];
+        var cy = this.vis.cy;
+        var self = this;
+        //turn off panning and zoom
+        cy.userPanningEnabled(false);
+        cy.userZoomingEnabled(false);
+        cy.nodes().lock();
+        //disable edge drawing
+        cy.edgehandles("disable");
+        this.bondDrawing = false;
+        //add lasso
+        $(".cy")
+        .lasso()
+        .on("lassoBegin", function(e, lassoPoints) {
+            polygon = [];
+            canvas = self.vis.el.childNodes[1];
+            c2 = canvas.getContext('2d');
+            c2.fillStyle = "rgba(100, 100, 100, 0.02)";
+            c2.beginPath();
+
+            c2.moveTo(e.pageX, e.pageY);
+        })
+        .bind("lassoPoint", function(e, lassoPoint) {
+            c2.lineTo(lassoPoint[0], lassoPoint[1] );
+            c2.fill();
+            polygon.push({x: lassoPoint[0], y: lassoPoint[1]});
+        })
+        .on("lassoDone", function(e, lassoPoints) {
+            // do something with lassoPoints
+            c2.closePath();
+            c2.clearRect(0,0,canvas.width,canvas.height);
+            var graphNodes = cy.nodes("[type!='index']");
+            var nd = null;
+            for(var i=0; i<graphNodes.length; i++){
+                if(self.isPointInPoly(polygon, cy.$("#" + graphNodes[i].id()).renderedPosition())){
+                    cy.$("#" + graphNodes[i].id()).addClass("chosen");
+                }
+            }
+        });
+    },
+    isPointInPoly: function(poly, pt){
+        var i, j, c = false;
+        for (i = 0, j = poly.length-1; i < poly.length; j = i++) {
+          if ( ((poly[i].y>pt.y) != (poly[j].y>pt.y)) &&
+            (pt.x < (poly[j].x-poly[i].x) * (pt.y-poly[i].y) / (poly[j].y-poly[i].y) + poly[i].x) )
+            c = !c;
+        }
+        return c;
+    },
+    activateDiscovery: function(){
+        var cy = this.vis.cy;
+        //remove lasso
+        this.removeLasso();
+        //disable edge drawing
+        cy.edgehandles("disable");
+        //turn zooming and panning back on
+        cy.userPanningEnabled(true);
+        cy.userZoomingEnabled(true);
+        cy.nodes().unlock();
+        this.bondDrawing = false;
+    },
+    activateBondDrawing: function(){
+        var cy = this.vis.cy;
+        //remove lasso
+        this.removeLasso();
+        cy.edgehandles("enable");
+        this.bondDrawing = true;
+    },
+    removeLasso: function(){
+        //remove lasso
+        $(".cy").lasso().off("lassoBegin");
+        $(".cy").lasso().off("lassoDone");
+        $(".cy").lasso().unbind("lassoPoint");
+    },
+    checkMode: function(){
+        if(this.bondDrawing){
+            this.vis.cy.edgehandles("enable");
+        }
+    }
+});
+
+module.exports = Optspanel;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/seqpanel.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/seqpanel.js
new file mode 100644
index 0000000..3db84cc
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/seqpanel.js
@@ -0,0 +1,78 @@
+var Backbone = require("backbone");
+
+var Seqpanel = Backbone.View.extend({
+    events: {
+        "click #perform": "drawInput"
+    },
+    initialize: function(opts){
+        this.struct = opts.struct;
+        this.el = opts.el;
+        this.el.className += "container-fluid";
+        this.el.id = "seqinput";
+
+        this.listenTo(this.struct, "change:seq", this.updateSeq);
+        this.listenTo(this.struct, "change:dotbr", this.updateDotbr);
+    },
+    render: function(){
+        this.el.innerHTML += '<div class="col-md-11 seqin"><input type="text" class="alertfield" id="alert" readonly>'
+                        + '<input class="textbox" id="seqbox">'
+                        + '<input class="textbox" id="dotbrbox"></div>';
+        this.el.innerHTML += '<div class="col-md-1 seqin">'
+                        + '<input class="button" id="perform" value="Display" readonly="readonly">'
+                        + '</div>';
+        document.getElementById("seqbox").value = this.struct.get("seq");
+        document.getElementById("dotbrbox").value = this.struct.get("dotbr");
+        this.el.style.paddingBottom = "20px";
+    },
+    updateSeq: function(){
+        document.getElementById("seqbox").value = this.struct.get("seq");
+    },
+    updateDotbr: function(){
+        document.getElementById("dotbrbox").value = this.struct.get("dotbr");
+    },
+    drawInput: function(){
+        var sequ = [
+            document.getElementById("seqbox").value,
+            document.getElementById("dotbrbox").value
+        ];
+        var state = this.checkInput(sequ);
+        if(state.fine){
+            this.struct.set("seq", sequ[0]);
+            this.struct.set("dotbr", sequ[1]);
+            this.struct.set("renderSwitch", !this.struct.get("renderSwitch"));
+        }
+        else {
+            var al = document.getElementById("alert");
+            if(al){
+                al.value = state.msg;
+            }
+        }
+    },
+    checkInput: function(sequences){
+        var isFine = true;
+        var errMsg = "";
+
+        if(sequences[0].length === 0 || sequences[1].length === 0){
+          isFine = false;
+          errMsg = "Please enter a sequence!";
+        }
+        else if(sequences[0].length != sequences[1].length){
+          isFine = false;
+          errMsg = "Sequences must have equal length!";
+        }
+        else if(! sequences[1].match('^[().]+$')){
+          isFine = false;
+          errMsg = "Dot-bracket sequence may only contain \"(\", \")\", or \".\"!";
+        }
+        else if(! sequences[0].match('^[ACGUTRYSWKMBDHVNacgutryswkmbdhvn-]+$')){
+          isFine = false;
+          errMsg = "Sequence may only contain IUPAC-characters!";
+        }
+        return {
+            fine: isFine,
+            msg: errMsg
+        };
+    }
+});
+
+module.exports = Seqpanel;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js
new file mode 100644
index 0000000..779477a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js
@@ -0,0 +1,218 @@
+var Backbone = require("backbone");
+var cytoscape = require("cytoscape");
+var $ = jQuery = require("jquery");
+var Link = require("../models/link");
+var AnnoView = require("./annoview");
+var edgehandles = require("cytoscape-edgehandles")(cytoscape, $);
+var Style = require("../utils/style");
+
+var Vispanel = Backbone.View.extend({
+    initialize: function(opts){
+        this.el = opts.el;
+        this.struct = opts.struct;
+        this.resindex = opts.resindex;
+        this.annotate = true;
+
+        //events
+        this.listenTo(this.struct, "change:renderSwitch", this.render);
+    },
+    render: function(){
+        var self = this;
+        var srcNode = null;
+        var targetNode = null;
+        self.innerHTML = "";
+
+        this.cy = cytoscape({
+      		container: self.el,
+      		style: (new Style()).getCytoscapeStyle(),
+      		elements: self.struct.toCytoscape(),
+      		layout: {
+      			//Use preset layout with precalculated
+      			//nucleotide coordinates
+        		name: 'preset',
+      		},
+          	ready: function(){
+                this.edges("[label='violation']").each(function(index, ele){
+                    if(Object.keys(ele._private.classes).length === 0){
+                        ele.addClass("wcwccis");
+                    }
+                });
+                //events
+                this.on("tapstart", function(evt){
+                    this.$(".chosen").removeClass("chosen");
+                });
+                this.on("tapdragover", "node", function(evt){
+                    var seqbox = document.getElementById("seqbox");
+                    if(seqbox && (this.id() < seqbox.value.length)){
+                        seqbox.selectionStart = parseInt(this.id());
+                        seqbox.selectionEnd = parseInt(this.id())+1;
+                    }
+                });
+                this.on("tapdragout", "node", function(evt){
+                    var seqbox = document.getElementById("seqbox");
+                    if(seqbox && (this.id() < seqbox.value.length)){
+                        seqbox.selectionEnd = -1;
+                    }
+                });
+                this.on("tap", "edge", function(evt){
+                    if(self.annotate && this.data("label") === "violation"){
+                        var obj = this._private.classes;
+                        for(var c in obj) break;
+                        new AnnoView(c.substring(0, 2), c.substring(2, 4), c.substring(4, c.length+1), this, self.el);
+                    }
+                });
+
+                //Residue Nodes
+                if(self.resindex){
+                    self.setResidueNodes(this);
+                }
+          	},
+            userPanningEnabled: true,
+            userZoomingEnabled: true
+    	});
+
+        this.cy.edgehandles({
+            loopAllowed: function(node){
+                // for the specified node, return whether edges from itself to itself are allowed
+                return false;
+            },
+            complete: function(srcNode, targetNode, addedEntities){
+                // fired when edgehandles is done and entities are added
+                self.struct.get("links").newBond(srcNode[0].id(), targetNode[0].id());
+            },
+            enabled: false,
+            preview: false
+		});
+
+        this.trigger("rendered");
+    },
+    setResidueNodes: function(cy){
+        //index nodes
+        for(var i=1; i<this.struct.get("seq").length/5; i++){
+            pos = this.getPos(this.struct, (i*5)-1);
+            this.cy.add({
+                group: "nodes",
+                data: {
+                    id: (this.struct.get("seq").length + i).toString(),
+                    label: (i*5) + "",
+                    type: "index"
+                },
+                position: {
+                    x: pos[0],
+                    y: pos[1]
+                },
+                selected: false,
+                selectable: false,
+                locked: false,
+                grabbable: true,
+                css: {
+                    "background-color": "#fff"
+                }
+            });
+            cy.add({
+                group: "edges",
+                data: {
+                    id: "index" + i,
+                    source: (i*5) - 1,
+                    target: this.struct.get("seq").length + i,
+                    label: i*5,
+                    weight: 4
+                },
+                css: {
+                    'line-color': "black",
+                    'width': 4
+                }
+            });
+        }
+    },
+    getPos: function(struct, target){
+        var distance = 50;
+        var found = false;
+        var originX = struct.get("residues").at(target).get("x");
+        var originY = struct.get("residues").at(target).get("y");
+        var angleFactor = 0.0;
+        var angle, x, y, tx, ty;
+        while(!found && angleFactor<1){
+            angle = angleFactor*Math.PI*2;
+            x = Math.cos(angle)*distance + originX;
+            y = Math.sin(angle)*distance + originY;
+            for(var i=0; i<struct.get("seq").length+1; i++){
+                if(i === struct.get("seq").length){
+                    found = true;
+                    break;
+                }
+                tx = struct.get("residues").at(i).get("x");
+                ty = struct.get("residues").at(i).get("y");
+                if(Math.pow((x - tx), 2) + Math.pow((y - ty), 2) < Math.pow(distance, 2)){
+                    break;
+                }
+            }
+            angleFactor += 0.05
+        }
+        return [x, y];
+    },
+    changeBondType: function(bondid, type){
+        if(type === "canonical"){
+            this.cy.$("#" + bondid)[0].style("line-color", "#3A9AD9")
+            this.cy.$("#" + bondid)[0].style("width", 4);
+            this.cy.$("#" + bondid)[0]._private.classes = {};
+        }
+        else if(type === "non-canonical"){
+            this.cy.$("#" + bondid)[0].style("line-color", "red");
+            this.cy.$("#" + bondid)[0].style("width", 4);
+            this.cy.$("#" + bondid)[0].addClass("wcwccis");
+        }
+        else {
+            throw new Error("Type must be 'canoncial' or 'non-canonical'");
+        }
+    },
+    addNCBond: function(source, target){
+        this.cy.add({
+            group: "edges",
+            data: {
+                id: source + "to" + target,
+                source: source.toString(),
+                target: target.toString()
+            },
+            css: {
+                "line-color": "red",
+                "width": 4
+            }
+        });
+        this.cy.$("#" + source + "to" + target)[0].addClass("wcwccis");
+    },
+    setLeontisWesthof: function(edge, lwclass){
+        var validClasses = [
+            "wcwccis",
+            "wcwctrans",
+            "sgsgcis",
+            "sgsgtrans",
+            "hghgcis",
+            "hghgtrans",
+            "wcsgcis",
+            "wcsgtrans",
+            "sgwccis",
+            "sgwctrans",
+            "wchgcis",
+            "wchgtrans",
+            "hgwccis",
+            "hgwctrans",
+            "hgsgcis",
+            "hgsgtrans",
+            "sghgcis",
+            "sghgtrans"
+        ];
+        if(validClasses.indexOf(lwclass) === -1){
+            throw new Error("LW-Class must be one of " + validClasses);
+        }
+        else {
+            var classes = Object.keys(this.cy.$("#" + edge)[0]._private.classes);
+            for(var i=0; i<classes.length; i++){
+                this.cy.$("#" + edge)[0].removeClass(classes[i]);
+            }
+            this.cy.$("#" + edge)[0].addClass(lwclass);
+        }
+    }
+});
+
+module.exports = Vispanel;
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/cytoscape/cytoscape.js b/config/plugins/visualizations/charts/static/repository/plugins/cytoscape/cytoscape.js
new file mode 100644
index 0000000..fb608c6
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/cytoscape/cytoscape.js
@@ -0,0 +1,62 @@
+!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var t;t="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,t.cytoscape=e()}}(function(){var define,module,exports;return function e(t,r,n){function i(o,s){if(!r[o]){if(!t[o]){var l="function"==typeof require&&require;if(!s&&l)return l(o,!0);if(a)return a(o,!0);var u=new Error("Cannot fin [...]
+
+Cytoscape.js 2.7.8 (MIT licensed)
+
+Copyright (c) The Cytoscape Consortium
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the “Software”), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+*/
+"use strict"},{}],2:[function(e,t,r){"use strict";var n=e("./util"),i=e("./is"),a=e("./promise"),o=function(e,t,r){if(!(this instanceof o))return new o(e,t,r);var a=this._private=n.extend({duration:1e3},t,r);a.target=e,a.style=a.style||a.css,a.started=!1,a.playing=!1,a.hooked=!1,a.applying=!1,a.progress=0,a.completes=[],a.frames=[],a.complete&&i.fn(a.complete)&&a.completes.push(a.complete),this.length=1,this[0]=this},s=o.prototype;n.extend(s,{instanceString:function(){return"animation"}, [...]
+o&&a.recalculateRenderedStyle(n,r.useCache);for(var l=0;l<n.length;l++){var c=n[l];o&&c.isEdge()&&"bezier"===c.pstyle("curve-style").strValue&&a.recalculateRenderedStyle(c.parallelEdges(),r.useCache),d(t,m(c,r))}return t.x1=u(t.x1),t.y1=u(t.y1),t.x2=u(t.x2),t.y2=u(t.y2),t.w=u(t.x2-t.x1),t.h=u(t.y2-t.y1),t};var w=function(e){e.uppercaseName=s.capitalize(e.name),e.autoName="auto"+e.uppercaseName,e.labelName="label"+e.uppercaseName,e.outerName="outer"+e.uppercaseName,e.uppercaseOuterName=s. [...]
+/* Given a tension, friction, and duration, a simulation at 60FPS will first run without a defined duration in order to calculate the full path. A second pass
+       then adjusts the time delta -- using the relation between actual time and duration -- to calculate the path for the duration-constrained animation. */
+var h=function(){function e(e){return-e.tension*e.x-e.friction*e.v}function t(t,r,n){var i={x:t.x+n.dx*r,v:t.v+n.dv*r,tension:t.tension,friction:t.friction};return{dx:i.v,dv:e(i)}}function r(r,n){var i={dx:r.v,dv:e(r)},a=t(r,.5*n,i),o=t(r,.5*n,a),s=t(r,n,o),l=1/6*(i.dx+2*(a.dx+o.dx)+s.dx),u=1/6*(i.dv+2*(a.dv+o.dv)+s.dv);return r.x=r.x+l*n,r.v=r.v+u*n,r}return function n(e,t,i){var a,o,s,l={x:-1,v:0,tension:null,friction:null},u=[0],c=0,d=1e-4,h=.016;for(e=parseFloat(e)||500,t=parseFloat( [...]
+Event object based on jQuery events, MIT license
+
+https://jquery.org/license/
+https://tldrlegal.com/license/mit-license
+https://github.com/jquery/jquery/blob/master/src/event.js
+*/
+var a=function(e,t){return this instanceof a?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented?i:n):this.type=e,t&&(this.type=void 0!==t.type?t.type:this.type,this.cy=t.cy,this.cyTarget=t.cyTarget,this.cyPosition=t.cyPosition,this.cyRenderedPosition=t.cyRenderedPosition,this.namespace=t.namespace,this.layout=t.layout,this.data=t.data,this.message=t.message),void(this.timeStamp=e&&e.timeStamp||Date.now())):new a(e,t)};a.prototype={instanceString: [...]
+var d=!r||n.visible()&&!n.transparent();if(r&&!d)return;var h=f.nodeShapes[p.getNodeShape(n)];h.checkPoint(e,t,0,o,s,c.x,c.y)&&a(n,0)}}}function l(n){var o=n._private;if("no"!==n.pstyle("events").strValue){var l,u,c=o.rscratch,d=n.pstyle("width").pfValue/2+b,h=d*d,v=2*d,y=o.source,x=o.target,w=!1,E=function(){if(void 0!==u)return u;if(!r)return u=!0,!0;var e=n.visible()&&!n.transparent();return e?(u=!0,!0):(u=!1,!1)};if("segments"===c.edgeType||"straight"===c.edgeType||"haystack"===c.edg [...]
+if(Y&&(X.x+=T[0],X.y+=T[1]),q){var W=e.hoverData.dragDelta;Y&&W&&n.number(W[0])&&n.number(W[1])&&(X.x+=W[0],X.y+=W[1])}}}}e.hoverData.draggingEles=!0;var $=v.collection(V);$.updateCompoundBounds(),$.trigger("position drag"),e.redrawHint("drag",!0),e.redraw()}else L();l=!0}else if(P){if(e.hoverData.dragging||!v.boxSelectionEnabled()||!I&&v.panningEnabled()&&v.userPanningEnabled()){if(!e.hoverData.selecting&&v.panningEnabled()&&v.userPanningEnabled()){var H=s(k,e.hoverData.downs);H&&(e.hov [...]
+e.lineWidth=R,e.strokeStyle=A}}var j=2*t.pstyle("text-outline-width").pfValue;if(j>0&&(e.lineWidth=j),"wrap"===t.pstyle("text-wrap").value){var X=o.labelWrapCachedLines,Y=p/X.length;switch(x){case"top":u-=(X.length-1)*Y;break;case"bottom":break;default:case"center":u-=(X.length-1)*Y/2}for(var W=0;W<X.length;W++)j>0&&e.strokeText(X[W],l,u),e.fillText(X[W],l,u),u+=Y}else j>0&&e.strokeText(c,l,u),e.fillText(c,l,u);0!==w&&(e.rotate(-w),e.translate(-_,-P)),this.shadowStyle(e,"transparent",0)} [...]
+"use strict";var n=e("./is"),i=e("./util"),a=e("./thread"),o=e("./promise"),s=e("./define"),l=function(t){if(!(this instanceof l))return new l(t);this._private={pass:[]};var r=4;if(n.number(t),"undefined"!=typeof navigator&&null!=navigator.hardwareConcurrency)t=navigator.hardwareConcurrency;else try{t=e("os").cpus().length}catch(i){t=r}for(var o=0;t>o;o++)this[o]=new a;this.length=t},u=l.prototype;i.extend(u,{instanceString:function(){return"fabric"},require:function(e,t){for(var r=0;r<t [...]
+Ported by Xueqiao Xu <xueqiaoxu at gmail.com>;
+
+PSF LICENSE AGREEMENT FOR PYTHON 2.7.2
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation (“PSF”), and the Individual or Organization (“Licensee”) accessing and otherwise using Python 2.7.2 software in source or binary form and its associated documentation.
+2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 2.7.2 alone or in any derivative version, provided, however, that PSF’s License Agreement and PSF’s notice of copyright, i.e., “Copyright © 2001-2012 Python Software Foundation; All Rights Reserved” are retained in Python 2.7 [...]
+3. In the event Licensee prepares a derivative work that is based on or incorporates Python 2.7.2 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python 2.7.2.
+4. PSF is making Python 2.7.2 available to Licensee on an “AS IS” basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 2.7.2 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.2 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.2, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
+7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
+8. By copying, installing or otherwise using Python 2.7.2, Licensee agrees to be bound by the terms and conditions of this License Agreement.
+*/
+"use strict";(function(){var e,n,i,a,o,s,l,u,c,d,h,p,f,v,g;i=Math.floor,d=Math.min,n=function(e,t){return t>e?-1:e>t?1:0},c=function(e,t,r,a,o){var s;if(null==r&&(r=0),null==o&&(o=n),0>r)throw new Error("lo must be non-negative");for(null==a&&(a=e.length);a>r;)s=i((r+a)/2),o(t,e[s])<0?a=s:r=s+1;return[].splice.apply(e,[r,r-r].concat(t)),t},s=function(e,t,r){return null==r&&(r=n),e.push(t),v(e,0,e.length-1,r)},o=function(e,t){var r,i;return null==t&&(t=n),r=e.pop(),e.length?(i=e[0],e[0]=r [...]
+Embeddable Minimum Strictly-Compliant Promises/A+ 1.1.1 Thenable
+Copyright (c) 2013-2014 Ralf S. Engelschall (http://engelschall.com)
+Licensed under The MIT License (http://opensource.org/licenses/MIT)
+*/
+"use strict";var n=0,i=1,a=2,o=function(e){return this instanceof o?(this.id="Thenable/1.0.7",this.state=n,this.fulfillValue=void 0,this.rejectReason=void 0,this.onFulfilled=[],this.onRejected=[],this.proxy={then:this.then.bind(this)},void("function"==typeof e&&e.call(this,this.fulfill.bind(this),this.reject.bind(this)))):new o(e)};o.prototype={fulfill:function(e){return s(this,i,"fulfillValue",e)},reject:function(e){return s(this,a,"rejectReason",e)},then:function(e,t){var r=this,n=new  [...]
+type:u.visibility},{name:"opacity",type:u.zeroOneNumber},{name:"z-index",type:u.nonNegativeInt},{name:"overlay-padding",type:u.size},{name:"overlay-color",type:u.color},{name:"overlay-opacity",type:u.zeroOneNumber},{name:"shadow-blur",type:u.size},{name:"shadow-color",type:u.color},{name:"shadow-opacity",type:u.zeroOneNumber},{name:"shadow-offset-x",type:u.bidirectionalSize},{name:"shadow-offset-y",type:u.bidirectionalSize},{name:"text-shadow-blur",type:u.size},{name:"text-shadow-color", [...]
+"use strict";var window=_dereq_("./window"),util=_dereq_("./util"),Promise=_dereq_("./promise"),Event=_dereq_("./event"),define=_dereq_("./define"),is=_dereq_("./is"),Thread=function(e){if(!(this instanceof Thread))return new Thread(e);var t=this._private={requires:[],files:[],queue:null,pass:[],disabled:!1};is.plainObject(e)&&null!=e.disabled&&(t.disabled=!!e.disabled)},thdfn=Thread.prototype,stringifyFieldVal=function(e){var t=is.fn(e)?e.toString():"JSON.parse('"+JSON.stringify(e)+"')" [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.css b/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.css
new file mode 100644
index 0000000..a3cc086
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.css
@@ -0,0 +1,261 @@
+/*rules for the plot target div.  These will be cascaded down to all plot elements according to css rules*/
+.jqplot-target {
+    position: relative;
+    color: #666666;
+    font-family: "Trebuchet MS", Arial, Helvetica, sans-serif;
+    font-size: 1em;
+/*    height: 300px;
+    width: 400px;*/
+}
+
+/*rules applied to all axes*/
+.jqplot-axis {
+    font-size: 0.75em;
+}
+
+.jqplot-xaxis {
+    margin-top: 10px;
+}
+
+.jqplot-x2axis {
+    margin-bottom: 10px;
+}
+
+.jqplot-yaxis {
+    margin-right: 10px;
+}
+
+.jqplot-y2axis, .jqplot-y3axis, .jqplot-y4axis, .jqplot-y5axis, .jqplot-y6axis, .jqplot-y7axis, .jqplot-y8axis, .jqplot-y9axis, .jqplot-yMidAxis {
+    margin-left: 10px;
+    margin-right: 10px;
+}
+
+/*rules applied to all axis tick divs*/
+.jqplot-axis-tick, .jqplot-xaxis-tick, .jqplot-yaxis-tick, .jqplot-x2axis-tick, .jqplot-y2axis-tick, .jqplot-y3axis-tick, .jqplot-y4axis-tick, .jqplot-y5axis-tick, .jqplot-y6axis-tick, .jqplot-y7axis-tick, .jqplot-y8axis-tick, .jqplot-y9axis-tick, .jqplot-yMidAxis-tick {
+    position: absolute;
+    white-space: pre;
+}
+
+
+.jqplot-xaxis-tick {
+    top: 0px;
+    /* initial position untill tick is drawn in proper place */
+    left: 15px;
+/*    padding-top: 10px;*/
+    vertical-align: top;
+}
+
+.jqplot-x2axis-tick {
+    bottom: 0px;
+    /* initial position untill tick is drawn in proper place */
+    left: 15px;
+/*    padding-bottom: 10px;*/
+    vertical-align: bottom;
+}
+
+.jqplot-yaxis-tick {
+    right: 0px;
+    /* initial position untill tick is drawn in proper place */
+    top: 15px;
+/*    padding-right: 10px;*/
+    text-align: right;
+}
+
+.jqplot-yaxis-tick.jqplot-breakTick {
+	right: -20px;
+	margin-right: 0px;
+	padding:1px 5px 1px 5px;
+/*	background-color: white;*/
+	z-index: 2;
+	font-size: 1.5em;
+}
+
+.jqplot-y2axis-tick, .jqplot-y3axis-tick, .jqplot-y4axis-tick, .jqplot-y5axis-tick, .jqplot-y6axis-tick, .jqplot-y7axis-tick, .jqplot-y8axis-tick, .jqplot-y9axis-tick {
+    left: 0px;
+    /* initial position untill tick is drawn in proper place */
+    top: 15px;
+/*    padding-left: 10px;*/
+/*    padding-right: 15px;*/
+    text-align: left;
+}
+
+.jqplot-yMidAxis-tick {
+    text-align: center;
+    white-space: nowrap;
+}
+
+.jqplot-xaxis-label {
+    margin-top: 10px;
+    font-size: 11pt;
+    position: absolute;
+}
+
+.jqplot-x2axis-label {
+    margin-bottom: 10px;
+    font-size: 11pt;
+    position: absolute;
+}
+
+.jqplot-yaxis-label {
+    margin-right: 10px;
+/*    text-align: center;*/
+    font-size: 11pt;
+    position: absolute;
+}
+
+.jqplot-yMidAxis-label {
+    font-size: 11pt;
+    position: absolute;
+}
+
+.jqplot-y2axis-label, .jqplot-y3axis-label, .jqplot-y4axis-label, .jqplot-y5axis-label, .jqplot-y6axis-label, .jqplot-y7axis-label, .jqplot-y8axis-label, .jqplot-y9axis-label {
+/*    text-align: center;*/
+    font-size: 11pt;
+    margin-left: 10px;
+    position: absolute;
+}
+
+.jqplot-meterGauge-tick {
+    font-size: 0.75em;
+    color: #999999;
+}
+
+.jqplot-meterGauge-label {
+    font-size: 1em;
+    color: #999999;
+}
+
+table.jqplot-table-legend {
+    margin-top: 12px;
+    margin-bottom: 12px;
+    margin-left: 12px;
+    margin-right: 12px;
+}
+
+table.jqplot-table-legend, table.jqplot-cursor-legend {
+    background-color: rgba(255,255,255,0.6);
+    border: 1px solid #cccccc;
+    position: absolute;
+    font-size: 0.75em;
+}
+
+td.jqplot-table-legend {
+    vertical-align:middle;
+}
+
+/*
+These rules could be used instead of assigning
+element styles and relying on js object properties.
+*/
+
+/*
+td.jqplot-table-legend-swatch {
+    padding-top: 0.5em;
+    text-align: center;
+}
+
+tr.jqplot-table-legend:first td.jqplot-table-legend-swatch {
+    padding-top: 0px;
+}
+*/
+
+td.jqplot-seriesToggle:hover, td.jqplot-seriesToggle:active {
+    cursor: pointer;
+}
+
+.jqplot-table-legend .jqplot-series-hidden {
+    text-decoration: line-through;
+}
+
+div.jqplot-table-legend-swatch-outline {
+    border: 1px solid #cccccc;
+    padding:1px;
+}
+
+div.jqplot-table-legend-swatch {
+    width:0px;
+    height:0px;
+    border-top-width: 5px;
+    border-bottom-width: 5px;
+    border-left-width: 6px;
+    border-right-width: 6px;
+    border-top-style: solid;
+    border-bottom-style: solid;
+    border-left-style: solid;
+    border-right-style: solid;
+}
+
+.jqplot-title {
+    top: 0px;
+    left: 0px;
+    padding-bottom: 0.5em;
+    font-size: 1.2em;
+}
+
+table.jqplot-cursor-tooltip {
+    border: 1px solid #cccccc;
+    font-size: 0.75em;
+}
+
+
+.jqplot-cursor-tooltip {
+    border: 1px solid #cccccc;
+    font-size: 0.75em;
+    white-space: nowrap;
+    background: rgba(208,208,208,0.5);
+    padding: 1px;
+}
+
+.jqplot-highlighter-tooltip, .jqplot-canvasOverlay-tooltip {
+    border: 1px solid #cccccc;
+    font-size: 1.2em;
+    font-weight: bold;
+    white-space: nowrap;
+    background: rgba(235,235,235,1);
+    padding: 1px;
+    border-radius: 5px;
+}
+
+.jqplot-point-label {
+    font-size: 0.75em;
+    z-index: 2;
+}
+      
+td.jqplot-cursor-legend-swatch {
+    vertical-align: middle;
+    text-align: center;
+}
+
+div.jqplot-cursor-legend-swatch {
+    width: 1.2em;
+    height: 0.7em;
+}
+
+.jqplot-error {
+/*   Styles added to the plot target container when there is an error go here.*/
+    text-align: center;
+}
+
+.jqplot-error-message {
+/*    Styling of the custom error message div goes here.*/
+    position: relative;
+    top: 46%;
+    display: inline-block;
+}
+
+div.jqplot-bubble-label {
+    font-size: 0.8em;
+/*    background: rgba(90%, 90%, 90%, 0.15);*/
+    padding-left: 2px;
+    padding-right: 2px;
+    color: rgb(20%, 20%, 20%);
+}
+
+div.jqplot-bubble-label.jqplot-bubble-label-highlight {
+    background: rgba(90%, 90%, 90%, 0.7);
+}
+
+div.jqplot-noData-container {
+	text-align: center;
+	background-color: rgba(96%, 96%, 96%, 0.3);
+}
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.js b/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.js
new file mode 100644
index 0000000..8facc6c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.js
@@ -0,0 +1,3 @@
+/* jqPlot 1.0.8r1250 | (c) 2009-2013 Chris Leonello | jplot.com
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(L){var u;L.fn.emptyForce=function(){for(var ah=0,ai;(ai=L(this)[ah])!=null;ah++){if(ai.nodeType===1){L.cleanData(ai.getElementsByTagName("*"))}if(L.jqplot.use_excanvas){ai.outerHTML=""}else{while(ai.firstChild){ai.removeChild(ai.firstChild)}}ai=null}return L(this)};L.fn.removeChildForce=function(ah){while(ah.firstChild){this.removeChildForce(ah.firstChild);ah.removeChild(ah.firstChild)}};L.fn.jqplot=function(){var ah=[];var aj=[];for(var ak=0,ai=arguments.length;ak<ai;ak++){ [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.plugins.js b/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.plugins.js
new file mode 100644
index 0000000..27df286
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/jqplot/jquery.jqplot.plugins.js
@@ -0,0 +1,151 @@
+/* jqPlot 1.0.8r1250 | (c) 2009-2013 Chris Leonello | jplot.com
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(d){d.jqplot.BarRenderer=function(){d.jqplot.LineRenderer.call(this)};d.jqplot.BarRenderer.prototype=new d.jqplot.LineRenderer();d.jqplot.BarRenderer.prototype.constructor=d.jqplot.BarRenderer;d.jqplot.BarRenderer.prototype.init=function(o,q){this.barPadding=8;this.barMargin=10;this.barDirection="vertical";this.barWidth=null;this.shadowOffset=2;this.shadowDepth=5;this.shadowAlpha=0.08;this.waterfall=false;this.groups=1;this.varyBarColor=false;this.highlightMouseOver=true;this [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(a){a.jqplot.BlockRenderer=function(){a.jqplot.LineRenderer.call(this)};a.jqplot.BlockRenderer.prototype=new a.jqplot.LineRenderer();a.jqplot.BlockRenderer.prototype.constructor=a.jqplot.BlockRenderer;a.jqplot.BlockRenderer.prototype.init=function(b){this.css={padding:"2px",border:"1px solid #999",textAlign:"center"};this.escapeHtml=false;this.insertBreaks=true;this.varyBlockColors=false;a.extend(true,this,b);if(this.css.backgroundColor){this.color=this.css.backgroundColor}el [...]
+    /**
+     * Class: $.jqplot.BoxplotRenderer
+     * jqPlot Plugin to draw box plots <http://en.wikipedia.org/wiki/Box_plot>.
+     * 
+     * To use this plugin, include the renderer js file in 
+     * your source:
+     * 
+     * > <script type="text/javascript" src="plugins/jqplot.boxplotRenderer.js"></script>
+     * 
+     * Then you set the renderer in the series options on your plot:
+     * 
+     * > series: [{renderer:$.jqplot.BoxplotRenderer}]
+     * 
+     * Data should be specified like so:
+     * 
+     * > dat = [[sample_id, min, q1, median, q3, max], ...]
+     * 
+     */
+    $.jqplot.BoxplotRenderer = function(){
+        // subclass line renderer to make use of some of its methods.
+        $.jqplot.LineRenderer.call(this);
+        // prop: boxWidth
+        // Default will auto calculate based on plot width and number
+        // of boxes displayed.
+        this.boxWidth = 'auto';
+        this._boxMaxWidth = 100; // if 'auto', cap at this max
+        // prop: lineWidth
+        // The thickness of all lines drawn. Default is 1.5 pixels.
+        this.lineWidth = 1.5;
+    };
+    
+    $.jqplot.BoxplotRenderer.prototype = new $.jqplot.LineRenderer();
+    $.jqplot.BoxplotRenderer.prototype.constructor = $.jqplot.BoxplotRenderer;
+    
+    // called with scope of series.
+    $.jqplot.BoxplotRenderer.prototype.init = function(options, plot) {
+
+        plot.postDrawHooks.add($.jqplot.BoxplotRenderer.removePointerLabels);
+
+        this.lineWidth = options.lineWidth || this.renderer.lineWidth;
+        $.jqplot.LineRenderer.prototype.init.call(this, options);
+        // set the yaxis data bounds here to account for high and low values
+        var db = {"min" : this._yaxis.min, "max" : this._yaxis.max};
+        var d = this._plotData;
+        for (var j=0, dj=d[j]; j<d.length; dj=d[++j]) {
+            for (var k=1, dk=dj[k]; k<dj.length; dk=dj[++k]) {
+                if (dk < db.min || db.min == null)
+                    db.min = dk;
+                if (dk > db.max || db.max == null)
+                    db.max = dk;
+            }
+        }
+        if(this._yaxis.min==null) {
+            this._yaxis.min = db.min*this._yaxis.pad;
+        }
+        if(this._yaxis.max==null) {
+            this._yaxis.max = db.max*this._yaxis.pad;
+        }
+    };
+    
+    // called within scope of series.
+    $.jqplot.BoxplotRenderer.prototype.draw = function(ctx, gd, options) {
+        var d = this.data;
+        var r = this.renderer;
+        // set the shape renderer options
+        var xp = this._xaxis.series_u2p;
+        var yp = this._yaxis.series_u2p;
+        if (!options)
+            options = {};
+        if (!('lineWidth' in options))
+            $.extend(true, options, {lineWidth: this.lineWidth});
+        var boxopts = $.extend(true, {}, options, {strokeRect: true});
+        var boxW = options.boxWidth || r.boxWidth;
+        if (boxW == 'auto')
+            boxW = Math.min(r._boxMaxWidth, 0.6 * ctx.canvas.width/d.length);
+        var endW = boxW / 2; // min and max ticks are half the box width
+        boxW -= this.lineWidth*2;
+
+        var sctx = this.canvas._ctx;
+
+        ctx.save();
+        if (this.show) {
+            for (var i=0, di=d[i]; i<d.length; di=d[++i]) {
+               var  x = xp(di[0]),
+                  min = yp(di[7]),
+                   q1 = yp(di[6]),
+                  med = yp(di[5]),
+                   q3 = yp(di[4]),
+                  max = yp(di[3]);
+
+               var endL = x - endW/2; // start (left) x coord of min/max ticks
+               var endR = x + endW/2; // end (right) x coord of min/max ticks
+               var medL = x - boxW/2; // start (left) x coord of median tick
+               var medR = x + boxW/2; // end (right) x coord of median tick
+
+               // median tick is full box width
+               r.shapeRenderer.draw(ctx, [[medL, med], [medR, med]], options);
+                
+               // draw whiskers
+               r.shapeRenderer.draw(ctx, [[x, min], [x, q1]], options);
+               r.shapeRenderer.draw(ctx, [[x, q3], [x, max]], options);
+
+               // draw min and max ticks
+               r.shapeRenderer.draw(ctx, [[endL, min], [endR, min]], options);
+               r.shapeRenderer.draw(ctx, [[endL, max], [endR, max]], options);
+
+               // draw box
+               boxH = q1 - q3;
+               boxpoints = [medL, q3, boxW, boxH];
+               r.shapeRenderer.draw(ctx, boxpoints, boxopts);
+            }
+        }
+        ctx.restore();
+    };
+    
+    $.jqplot.BoxplotRenderer.prototype.drawShadow = function(ctx, gd, options) {
+        // This is a no-op, shadows drawn with lines.
+    };
+    
+    $.jqplot.BoxplotRenderer.removePointerLabels = function() {
+        $(".jqplot-point-label").each(function() {
+           $(this).hide();
+        });
+    }
+    
+
+    
+})(jQuery);/* jqPlot 1.0.8r1250 | (c) 2009-2013 Chris Leonello | jplot.com
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(a){a.jqplot.CanvasAxisLabelRenderer=function(b){this.angle=0;this.axis;this.show=true;this.showLabel=true;this.label="";this.fontFamily='"Trebuchet MS", Arial, Helvetica, sans-serif';this.fontSize="11pt";this.fontWeight="normal";this.fontStretch=1;this.textColor="#666666";this.enableFontSupport=true;this.pt2px=null;this._elem;this._ctx;this._plotWidth;this._plotHeight;this._plotDimensions={height:null,width:null};a.extend(true,this,b);if(b.angle==null&&this.axis!="xaxis"&&th [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(a){a.jqplot.CanvasAxisTickRenderer=function(b){this.mark="outside";this.showMark=true;this.showGridline=true;this.isMinorTick=false;this.angle=0;this.markSize=4;this.show=true;this.showLabel=true;this.labelPosition="auto";this.label="";this.value=null;this._styles={};this.formatter=a.jqplot.DefaultTickFormatter;this.formatString="";this.prefix="";this.fontFamily='"Trebuchet MS", Arial, Helvetica, sans-serif';this.fontSize="10pt";this.fontWeight="normal";this.fontStretch=1;th [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(d){var f=0;d.jqplot.CanvasOverlay=function(q){var o=q||{};this.options={show:d.jqplot.config.enablePlugins,deferDraw:false};this.objects=[];this.objectNames=[];this.canvas=null;this.markerRenderer=new d.jqplot.MarkerRenderer({style:"line"});this.markerRenderer.init();this.highlightObjectIndex=null;if(o.objects){var s=o.objects,r;for(var p=0;p<s.length;p++){r=s[p];for(var t in r){switch(t){case"line":this.addLine(r[t]);break;case"horizontalLine":this.addHorizontalLine(r[t]);b [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(a){a.jqplot.CanvasTextRenderer=function(b){this.fontStyle="normal";this.fontVariant="normal";this.fontWeight="normal";this.fontSize="10px";this.fontFamily="sans-serif";this.fontStretch=1;this.fillStyle="#666666";this.angle=0;this.textAlign="start";this.textBaseline="alphabetic";this.text;this.width;this.height;this.pt2px=1.28;a.extend(true,this,b);this.normalizedFontSize=this.normalizeFontSize(this.fontSize);this.setHeight()};a.jqplot.CanvasTextRenderer.prototype.init=functi [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(a){a.jqplot.CategoryAxisRenderer=function(b){a.jqplot.LinearAxisRenderer.call(this);this.sortMergedLabels=false};a.jqplot.CategoryAxisRenderer.prototype=new a.jqplot.LinearAxisRenderer();a.jqplot.CategoryAxisRenderer.prototype.constructor=a.jqplot.CategoryAxisRenderer;a.jqplot.CategoryAxisRenderer.prototype.init=function(e){this.groups=1;this.groupLabels=[];this._groupLabels=[];this._grouped=false;this._barsPerGroup=null;this.reverse=false;a.extend(true,this,{tickOptions:{fo [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(j){j.jqplot.Cursor=function(q){this.style="crosshair";this.previousCursor="auto";this.show=j.jqplot.config.enablePlugins;this.showTooltip=true;this.followMouse=false;this.tooltipLocation="se";this.tooltipOffset=6;this.showTooltipGridPosition=false;this.showTooltipUnitPosition=true;this.showTooltipDataPosition=false;this.tooltipFormatString="%.4P, %.4P";this.useAxesFormatters=true;this.tooltipAxisGroups=[];this.zoom=false;this.zoomProxy=false;this.zoomTarget=false;this.looseZ [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(c){c.jqplot.EnhancedLegendRenderer=function(){c.jqplot.TableLegendRenderer.call(this)};c.jqplot.EnhancedLegendRenderer.prototype=new c.jqplot.TableLegendRenderer();c.jqplot.EnhancedLegendRenderer.prototype.constructor=c.jqplot.EnhancedLegendRenderer;c.jqplot.EnhancedLegendRenderer.prototype.init=function(d){this.numberRows=null;this.numberColumns=null;this.seriesToggle="normal";this.seriesToggleReplot=false;this.disableIEFading=true;c.extend(true,this,d);if(this.seriesToggle [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(d){d.jqplot.eventListenerHooks.push(["jqplotMouseMove",f]);d.jqplot.Highlighter=function(h){this.show=d.jqplot.config.enablePlugins;this.markerRenderer=new d.jqplot.MarkerRenderer({shadow:false});this.showMarker=true;this.lineWidthAdjust=2.5;this.sizeAdjust=5;this.showTooltip=true;this.tooltipLocation="nw";this.fadeTooltip=true;this.tooltipFadeSpeed="fast";this.tooltipOffset=2;this.tooltipAxes="both";this.tooltipSeparator=", ";this.tooltipContentEditor=null;this.useAxesForma [...]
+   jsDate | (c) 2010-2013 Chris Leonello
+ */(function(a){a.jqplot.OHLCRenderer=function(){a.jqplot.LineRenderer.call(this);this.candleStick=false;this.tickLength="auto";this.bodyWidth="auto";this.openColor=null;this.closeColor=null;this.wickColor=null;this.fillUpBody=false;this.fillDownBody=true;this.upBodyColor=null;this.downBodyColor=null;this.hlc=false;this.lineWidth=1.5;this._tickLength;this._bodyWidth};a.jqplot.OHLCRenderer.prototype=new a.jqplot.LineRenderer();a.jqplot.OHLCRenderer.prototype.constructor=a.jqplot.OHLCRende [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/nvd3/nv.d3.css b/config/plugins/visualizations/charts/static/repository/plugins/nvd3/nv.d3.css
new file mode 100644
index 0000000..239982a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/nvd3/nv.d3.css
@@ -0,0 +1 @@
+.chartWrap{margin:0;padding:0;overflow:hidden}.nvtooltip.with-3d-shadow,.with-3d-shadow .nvtooltip{-moz-box-shadow:0 5px 10px rgba(0,0,0,.2);-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nvtooltip{position:absolute;background-color:rgba(255,255,255,1);padding:1px;border:1px solid rgba(0,0,0,.2);z-index:10000;font-family:Arial;font-size:13px;text-align:left;pointer-events:none;white-spa [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/nvd3/nv.d3.js b/config/plugins/visualizations/charts/static/repository/plugins/nvd3/nv.d3.js
new file mode 100755
index 0000000..8b95bcc
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/nvd3/nv.d3.js
@@ -0,0 +1,14364 @@
+(function(){
+
+var nv = window.nv || {};
+
+
+nv.version = '1.1.15b';
+nv.dev = true //set false when in production
+
+window.nv = nv;
+
+nv.tooltip = nv.tooltip || {}; // For the tooltip system
+nv.utils = nv.utils || {}; // Utility subsystem
+nv.models = nv.models || {}; //stores all the possible models/components
+nv.charts = {}; //stores all the ready to use charts
+nv.graphs = []; //stores all the graphs currently on the page
+nv.logs = {}; //stores some statistics and potential error messages
+
+nv.dispatch = d3.dispatch('render_start', 'render_end');
+
+// *************************************************************************
+//  Development render timers - disabled if dev = false
+
+if (nv.dev) {
+  nv.dispatch.on('render_start', function(e) {
+    nv.logs.startTime = +new Date();
+  });
+
+  nv.dispatch.on('render_end', function(e) {
+    nv.logs.endTime = +new Date();
+    nv.logs.totalTime = nv.logs.endTime - nv.logs.startTime;
+    nv.log('total', nv.logs.totalTime); // used for development, to keep track of graph generation times
+  });
+}
+
+// ********************************************
+//  Public Core NV functions
+
+// Logs all arguments, and returns the last so you can test things in place
+// Note: in IE8 console.log is an object not a function, and if modernizr is used
+// then calling Function.prototype.bind with with anything other than a function
+// causes a TypeError to be thrown.
+nv.log = function() {
+  if (nv.dev && console.log && console.log.apply)
+    console.log.apply(console, arguments)
+  else if (nv.dev && typeof console.log == "function" && Function.prototype.bind) {
+    var log = Function.prototype.bind.call(console.log, console);
+    log.apply(console, arguments);
+  }
+  return arguments[arguments.length - 1];
+};
+
+
+nv.render = function render(step) {
+  step = step || 1; // number of graphs to generate in each timeout loop
+
+  nv.render.active = true;
+  nv.dispatch.render_start();
+
+  setTimeout(function() {
+    var chart, graph;
+
+    for (var i = 0; i < step && (graph = nv.render.queue[i]); i++) {
+      chart = graph.generate();
+      if (typeof graph.callback == typeof(Function)) graph.callback(chart);
+      nv.graphs.push(chart);
+    }
+
+    nv.render.queue.splice(0, i);
+
+    if (nv.render.queue.length) setTimeout(arguments.callee, 0);
+    else {
+      nv.dispatch.render_end();
+      nv.render.active = false;
+    }
+  }, 0);
+};
+
+nv.render.active = false;
+nv.render.queue = [];
+
+nv.addGraph = function(obj) {
+  if (typeof arguments[0] === typeof(Function))
+    obj = {generate: arguments[0], callback: arguments[1]};
+
+  nv.render.queue.push(obj);
+
+  if (!nv.render.active) nv.render();
+};
+
+nv.identity = function(d) { return d; };
+
+nv.strip = function(s) { return s.replace(/(\s|&)/g,''); };
+
+function daysInMonth(month,year) {
+  return (new Date(year, month+1, 0)).getDate();
+}
+
+function d3_time_range(floor, step, number) {
+  return function(t0, t1, dt) {
+    var time = floor(t0), times = [];
+    if (time < t0) step(time);
+    if (dt > 1) {
+      while (time < t1) {
+        var date = new Date(+time);
+        if ((number(date) % dt === 0)) times.push(date);
+        step(time);
+      }
+    } else {
+      while (time < t1) { times.push(new Date(+time)); step(time); }
+    }
+    return times;
+  };
+}
+
+d3.time.monthEnd = function(date) {
+  return new Date(date.getFullYear(), date.getMonth(), 0);
+};
+
+d3.time.monthEnds = d3_time_range(d3.time.monthEnd, function(date) {
+    date.setUTCDate(date.getUTCDate() + 1);
+    date.setDate(daysInMonth(date.getMonth() + 1, date.getFullYear()));
+  }, function(date) {
+    return date.getMonth();
+  }
+);
+
+/* Utility class to handle creation of an interactive layer.
+This places a rectangle on top of the chart. When you mouse move over it, it sends a dispatch
+containing the X-coordinate. It can also render a vertical line where the mouse is located.
+
+dispatch.elementMousemove is the important event to latch onto.  It is fired whenever the mouse moves over
+the rectangle. The dispatch is given one object which contains the mouseX/Y location.
+It also has 'pointXValue', which is the conversion of mouseX to the x-axis scale.
+*/
+nv.interactiveGuideline = function() {
+	"use strict";
+	var tooltip = nv.models.tooltip();
+	//Public settings
+	var width = null
+	, height = null
+    //Please pass in the bounding chart's top and left margins
+    //This is important for calculating the correct mouseX/Y positions.
+	, margin = {left: 0, top: 0}
+	, xScale = d3.scale.linear()
+	, yScale = d3.scale.linear()
+	, dispatch = d3.dispatch('elementMousemove', 'elementMouseout','elementDblclick')
+	, showGuideLine = true
+	, svgContainer = null  
+    //Must pass in the bounding chart's <svg> container.
+    //The mousemove event is attached to this container.
+	;
+
+	//Private variables
+	var isMSIE = navigator.userAgent.indexOf("MSIE") !== -1  //Check user-agent for Microsoft Internet Explorer.
+	;
+
+
+	function layer(selection) {
+		selection.each(function(data) {
+				var container = d3.select(this);
+				
+				var availableWidth = (width || 960), availableHeight = (height || 400);
+
+				var wrap = container.selectAll("g.nv-wrap.nv-interactiveLineLayer").data([data]);
+				var wrapEnter = wrap.enter()
+								.append("g").attr("class", " nv-wrap nv-interactiveLineLayer");
+								
+				
+				wrapEnter.append("g").attr("class","nv-interactiveGuideLine");
+				
+				if (!svgContainer) {
+					return;
+				}
+
+                function mouseHandler() {
+                      var d3mouse = d3.mouse(this);
+                      var mouseX = d3mouse[0];
+                      var mouseY = d3mouse[1];
+                      var subtractMargin = true;
+                      var mouseOutAnyReason = false;
+                      if (isMSIE) {
+                         /*
+                            D3.js (or maybe SVG.getScreenCTM) has a nasty bug in Internet Explorer 10.
+                            d3.mouse() returns incorrect X,Y mouse coordinates when mouse moving
+                            over a rect in IE 10.
+                            However, d3.event.offsetX/Y also returns the mouse coordinates
+                            relative to the triggering <rect>. So we use offsetX/Y on IE.  
+                         */
+                         mouseX = d3.event.offsetX;
+                         mouseY = d3.event.offsetY;
+
+                         /*
+                            On IE, if you attach a mouse event listener to the <svg> container,
+                            it will actually trigger it for all the child elements (like <path>, <circle>, etc).
+                            When this happens on IE, the offsetX/Y is set to where ever the child element
+                            is located.
+                            As a result, we do NOT need to subtract margins to figure out the mouse X/Y
+                            position under this scenario. Removing the line below *will* cause 
+                            the interactive layer to not work right on IE.
+                         */
+                         if(d3.event.target.tagName !== "svg")
+                            subtractMargin = false;
+
+                         if (d3.event.target.className.baseVal.match("nv-legend"))
+                         	mouseOutAnyReason = true;
+                          
+                      }
+
+                      if(subtractMargin) {
+                         mouseX -= margin.left;
+                         mouseY -= margin.top;
+                      }
+
+                      /* If mouseX/Y is outside of the chart's bounds,
+                      trigger a mouseOut event.
+                      */
+                      if (mouseX < 0 || mouseY < 0 
+                        || mouseX > availableWidth || mouseY > availableHeight
+                        || (d3.event.relatedTarget && d3.event.relatedTarget.ownerSVGElement === undefined)
+                        || mouseOutAnyReason
+                        ) 
+                      {
+                      		if (isMSIE) {
+                      			if (d3.event.relatedTarget 
+                      				&& d3.event.relatedTarget.ownerSVGElement === undefined
+                      				&& d3.event.relatedTarget.className.match(tooltip.nvPointerEventsClass)) {
+                      				return;
+                      			}
+                      		}
+                            dispatch.elementMouseout({
+                               mouseX: mouseX,
+                               mouseY: mouseY
+                            });
+                            layer.renderGuideLine(null); //hide the guideline
+                            return;
+                      }
+                      
+                      var pointXValue = xScale.invert(mouseX);
+                      dispatch.elementMousemove({
+                            mouseX: mouseX,
+                            mouseY: mouseY,
+                            pointXValue: pointXValue
+                      });
+
+                      //If user double clicks the layer, fire a elementDblclick dispatch.
+                      if (d3.event.type === "dblclick") {
+                        dispatch.elementDblclick({
+                            mouseX: mouseX,
+                            mouseY: mouseY,
+                            pointXValue: pointXValue
+                        });
+                      }
+                }
+
+				svgContainer
+				      .on("mousemove",mouseHandler, true)
+				      .on("mouseout" ,mouseHandler,true)
+                      .on("dblclick" ,mouseHandler)
+				      ;
+
+				 //Draws a vertical guideline at the given X postion.
+				layer.renderGuideLine = function(x) {
+				 	if (!showGuideLine) return;
+				 	var line = wrap.select(".nv-interactiveGuideLine")
+				 	      .selectAll("line")
+				 	      .data((x != null) ? [nv.utils.NaNtoZero(x)] : [], String);
+
+				 	line.enter()
+				 		.append("line")
+				 		.attr("class", "nv-guideline")
+				 		.attr("x1", function(d) { return d;})
+				 		.attr("x2", function(d) { return d;})
+				 		.attr("y1", availableHeight)
+				 		.attr("y2",0)
+				 		;
+				 	line.exit().remove();
+
+				}
+		});
+	}
+
+	layer.dispatch = dispatch;
+	layer.tooltip = tooltip;
+
+	layer.margin = function(_) {
+	    if (!arguments.length) return margin;
+	    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+	    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+	    return layer;
+    };
+
+	layer.width = function(_) {
+		if (!arguments.length) return width;
+		width = _;
+		return layer;
+	};
+
+	layer.height = function(_) {
+		if (!arguments.length) return height;
+		height = _;
+		return layer;
+	};
+
+	layer.xScale = function(_) {
+		if (!arguments.length) return xScale;
+		xScale = _;
+		return layer;
+	};
+
+	layer.showGuideLine = function(_) {
+		if (!arguments.length) return showGuideLine;
+		showGuideLine = _;
+		return layer;
+	};
+
+	layer.svgContainer = function(_) {
+		if (!arguments.length) return svgContainer;
+		svgContainer = _;
+		return layer;
+	};
+
+
+	return layer;
+};
+
+/* Utility class that uses d3.bisect to find the index in a given array, where a search value can be inserted.
+This is different from normal bisectLeft; this function finds the nearest index to insert the search value.
+
+For instance, lets say your array is [1,2,3,5,10,30], and you search for 28. 
+Normal d3.bisectLeft will return 4, because 28 is inserted after the number 10.  But interactiveBisect will return 5
+because 28 is closer to 30 than 10.
+
+Unit tests can be found in: interactiveBisectTest.html
+
+Has the following known issues:
+   * Will not work if the data points move backwards (ie, 10,9,8,7, etc) or if the data points are in random order.
+   * Won't work if there are duplicate x coordinate values.
+*/
+nv.interactiveBisect = function (values, searchVal, xAccessor) {
+	  "use strict";
+      if (! values instanceof Array) return null;
+      if (typeof xAccessor !== 'function') xAccessor = function(d,i) { return d.x;}
+
+      var bisect = d3.bisector(xAccessor).left;
+      var index = d3.max([0, bisect(values,searchVal) - 1]);
+      var currentValue = xAccessor(values[index], index);
+      if (typeof currentValue === 'undefined') currentValue = index;
+
+      if (currentValue === searchVal) return index;  //found exact match
+
+      var nextIndex = d3.min([index+1, values.length - 1]);
+      var nextValue = xAccessor(values[nextIndex], nextIndex);
+      if (typeof nextValue === 'undefined') nextValue = nextIndex;
+
+      if (Math.abs(nextValue - searchVal) >= Math.abs(currentValue - searchVal))
+          return index;
+      else
+          return nextIndex
+};
+
+/*
+Returns the index in the array "values" that is closest to searchVal.
+Only returns an index if searchVal is within some "threshold".
+Otherwise, returns null.
+*/
+nv.nearestValueIndex = function (values, searchVal, threshold) {
+      "use strict";
+      var yDistMax = Infinity, indexToHighlight = null;
+      values.forEach(function(d,i) {
+         var delta = Math.abs(searchVal - d);
+         if ( delta <= yDistMax && delta < threshold) {
+            yDistMax = delta;
+            indexToHighlight = i;
+         }
+      });
+      return indexToHighlight;
+};/* Tooltip rendering model for nvd3 charts.
+window.nv.models.tooltip is the updated,new way to render tooltips.
+
+window.nv.tooltip.show is the old tooltip code.
+window.nv.tooltip.* also has various helper methods.
+*/
+(function() {
+  "use strict";
+  window.nv.tooltip = {};
+
+  /* Model which can be instantiated to handle tooltip rendering.
+    Example usage: 
+    var tip = nv.models.tooltip().gravity('w').distance(23)
+                .data(myDataObject);
+
+        tip();    //just invoke the returned function to render tooltip.
+  */
+  window.nv.models.tooltip = function() {
+        var content = null    //HTML contents of the tooltip.  If null, the content is generated via the data variable.
+        ,   data = null     /* Tooltip data. If data is given in the proper format, a consistent tooltip is generated.
+        Format of data:
+        {
+            key: "Date",
+            value: "August 2009", 
+            series: [
+                    {
+                        key: "Series 1",
+                        value: "Value 1",
+                        color: "#000"
+                    },
+                    {
+                        key: "Series 2",
+                        value: "Value 2",
+                        color: "#00f"
+                    }
+            ]
+
+        }
+
+        */
+        ,   gravity = 'w'   //Can be 'n','s','e','w'. Determines how tooltip is positioned.
+        ,   distance = 50   //Distance to offset tooltip from the mouse location.
+        ,   snapDistance = 25   //Tolerance allowed before tooltip is moved from its current position (creates 'snapping' effect)
+        ,   fixedTop = null //If not null, this fixes the top position of the tooltip.
+        ,   classes = null  //Attaches additional CSS classes to the tooltip DIV that is created.
+        ,   chartContainer = null   //Parent DIV, of the SVG Container that holds the chart.
+        ,   tooltipElem = null  //actual DOM element representing the tooltip.
+        ,   position = {left: null, top: null}      //Relative position of the tooltip inside chartContainer.
+        ,   enabled = true  //True -> tooltips are rendered. False -> don't render tooltips.
+        //Generates a unique id when you create a new tooltip() object
+        ,   id = "nvtooltip-" + Math.floor(Math.random() * 100000)
+        ;
+
+        //CSS class to specify whether element should not have mouse events.
+        var  nvPointerEventsClass = "nv-pointer-events-none";
+
+        //Format function for the tooltip values column
+        var valueFormatter = function(d,i) {
+            return d;
+        };
+
+        //Format function for the tooltip header value.
+        var headerFormatter = function(d) {
+            return d;
+        };
+
+        //By default, the tooltip model renders a beautiful table inside a DIV.
+        //You can override this function if a custom tooltip is desired.
+        var contentGenerator = function(d) {
+            if (content != null) return content;
+
+            if (d == null) return '';
+
+            var table = d3.select(document.createElement("table"));
+            var theadEnter = table.selectAll("thead")
+                .data([d])
+                .enter().append("thead");
+            theadEnter.append("tr")
+                .append("td")
+                .attr("colspan",3)
+                .append("strong")
+                    .classed("x-value",true)
+                    .html(headerFormatter(d.value));
+
+            var tbodyEnter = table.selectAll("tbody")
+                .data([d])
+                .enter().append("tbody");
+            var trowEnter = tbodyEnter.selectAll("tr")
+                .data(function(p) { return p.series})
+                .enter()
+                .append("tr")
+                .classed("highlight", function(p) { return p.highlight})
+                ;
+
+            trowEnter.append("td")
+                .classed("legend-color-guide",true)
+                .append("div")
+                    .style("background-color", function(p) { return p.color});
+            trowEnter.append("td")
+                .classed("key",true)
+                .html(function(p) {return p.key});
+            trowEnter.append("td")
+                .classed("value",true)
+                .html(function(p,i) { return valueFormatter(p.value,i) });
+
+
+            trowEnter.selectAll("td").each(function(p) {
+                if (p.highlight) {
+                    var opacityScale = d3.scale.linear().domain([0,1]).range(["#fff",p.color]);
+                    var opacity = 0.6;
+                    d3.select(this)
+                        .style("border-bottom-color", opacityScale(opacity))
+                        .style("border-top-color", opacityScale(opacity))
+                        ;
+                }
+            });
+
+            var html = table.node().outerHTML;
+            if (d.footer !== undefined)
+                html += "<div class='footer'>" + d.footer + "</div>";
+            return html;
+
+        };
+
+        var dataSeriesExists = function(d) {
+            if (d && d.series && d.series.length > 0) return true;
+
+            return false;
+        };
+
+        //In situations where the chart is in a 'viewBox', re-position the tooltip based on how far chart is zoomed.
+        function convertViewBoxRatio() {
+            if (chartContainer) {
+              var svg = d3.select(chartContainer);
+              if (svg.node().tagName !== "svg") {
+                 svg = svg.select("svg");
+              }
+              var viewBox = (svg.node()) ? svg.attr('viewBox') : null;
+              if (viewBox) {
+                viewBox = viewBox.split(' ');
+                var ratio = parseInt(svg.style('width')) / viewBox[2];
+                
+                position.left = position.left * ratio;
+                position.top  = position.top * ratio;
+              }
+            }
+        }
+
+        //Creates new tooltip container, or uses existing one on DOM.
+        function getTooltipContainer(newContent) {
+            var body;
+            if (chartContainer)
+                body = d3.select(chartContainer);
+            else
+                body = d3.select("body");
+
+            var container = body.select(".nvtooltip");
+            if (container.node() === null) {
+                //Create new tooltip div if it doesn't exist on DOM.
+                container = body.append("div")
+                    .attr("class", "nvtooltip " + (classes? classes: "xy-tooltip"))
+                    .attr("id",id)
+                    ;
+            }
+        
+
+            container.node().innerHTML = newContent;
+            container.style("top",0).style("left",0).style("opacity",0);
+            container.selectAll("div, table, td, tr").classed(nvPointerEventsClass,true)
+            container.classed(nvPointerEventsClass,true);
+            return container.node();
+        }
+
+        
+
+        //Draw the tooltip onto the DOM.
+        function nvtooltip() {
+            if (!enabled) return;
+            if (!dataSeriesExists(data)) return;
+
+            convertViewBoxRatio();
+
+            var left = position.left;
+            var top = (fixedTop != null) ? fixedTop : position.top;
+            var container = getTooltipContainer(contentGenerator(data));
+            tooltipElem = container;
+            if (chartContainer) {
+                var svgComp = chartContainer.getElementsByTagName("svg")[0];
+                var boundRect = (svgComp) ? svgComp.getBoundingClientRect() : chartContainer.getBoundingClientRect();
+                var svgOffset = {left:0,top:0};
+                if (svgComp) {
+                    var svgBound = svgComp.getBoundingClientRect();
+                    var chartBound = chartContainer.getBoundingClientRect();
+                    var svgBoundTop = svgBound.top;
+                    
+                    //Defensive code. Sometimes, svgBoundTop can be a really negative
+                    //  number, like -134254. That's a bug. 
+                    //  If such a number is found, use zero instead. FireFox bug only
+                    if (svgBoundTop < 0) {
+                        var containerBound = chartContainer.getBoundingClientRect();
+                        svgBoundTop = (Math.abs(svgBoundTop) > containerBound.height) ? 0 : svgBoundTop;
+                    } 
+                    svgOffset.top = Math.abs(svgBoundTop - chartBound.top);
+                    svgOffset.left = Math.abs(svgBound.left - chartBound.left);
+                }
+                //If the parent container is an overflow <div> with scrollbars, subtract the scroll offsets.
+                //You need to also add any offset between the <svg> element and its containing <div>
+                //Finally, add any offset of the containing <div> on the whole page.
+                left += chartContainer.offsetLeft + svgOffset.left - 2*chartContainer.scrollLeft;
+                top += chartContainer.offsetTop + svgOffset.top - 2*chartContainer.scrollTop;
+            }
+
+            if (snapDistance && snapDistance > 0) {
+                top = Math.floor(top/snapDistance) * snapDistance;
+            }
+
+            nv.tooltip.calcTooltipPosition([left,top], gravity, distance, container);
+            return nvtooltip;
+        };
+
+        nvtooltip.nvPointerEventsClass = nvPointerEventsClass;
+        
+        nvtooltip.content = function(_) {
+            if (!arguments.length) return content;
+            content = _;
+            return nvtooltip;
+        };
+
+        //Returns tooltipElem...not able to set it.
+        nvtooltip.tooltipElem = function() {
+            return tooltipElem;
+        };
+
+        nvtooltip.contentGenerator = function(_) {
+            if (!arguments.length) return contentGenerator;
+            if (typeof _ === 'function') {
+                contentGenerator = _;
+            }
+            return nvtooltip;
+        };
+
+        nvtooltip.data = function(_) {
+            if (!arguments.length) return data;
+            data = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.gravity = function(_) {
+            if (!arguments.length) return gravity;
+            gravity = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.distance = function(_) {
+            if (!arguments.length) return distance;
+            distance = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.snapDistance = function(_) {
+            if (!arguments.length) return snapDistance;
+            snapDistance = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.classes = function(_) {
+            if (!arguments.length) return classes;
+            classes = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.chartContainer = function(_) {
+            if (!arguments.length) return chartContainer;
+            chartContainer = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.position = function(_) {
+            if (!arguments.length) return position;
+            position.left = (typeof _.left !== 'undefined') ? _.left : position.left;
+            position.top = (typeof _.top !== 'undefined') ? _.top : position.top;
+            return nvtooltip;
+        };
+
+        nvtooltip.fixedTop = function(_) {
+            if (!arguments.length) return fixedTop;
+            fixedTop = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.enabled = function(_) {
+            if (!arguments.length) return enabled;
+            enabled = _;
+            return nvtooltip;
+        };
+
+        nvtooltip.valueFormatter = function(_) {
+            if (!arguments.length) return valueFormatter;
+            if (typeof _ === 'function') {
+                valueFormatter = _;
+            }
+            return nvtooltip;
+        };
+
+        nvtooltip.headerFormatter = function(_) {
+            if (!arguments.length) return headerFormatter;
+            if (typeof _ === 'function') {
+                headerFormatter = _;
+            }
+            return nvtooltip;
+        };
+
+        //id() is a read-only function. You can't use it to set the id.
+        nvtooltip.id = function() {
+            return id;
+        };
+
+
+        return nvtooltip;
+  };
+
+
+  //Original tooltip.show function. Kept for backward compatibility.
+  // pos = [left,top]
+  nv.tooltip.show = function(pos, content, gravity, dist, parentContainer, classes) {
+      
+        //Create new tooltip div if it doesn't exist on DOM.
+        var   container = document.createElement('div');
+        container.className = 'nvtooltip ' + (classes ? classes : 'xy-tooltip');
+
+        var body = parentContainer;
+        if ( !parentContainer || parentContainer.tagName.match(/g|svg/i)) {
+            //If the parent element is an SVG element, place tooltip in the <body> element.
+            body = document.getElementsByTagName('body')[0];
+        }
+   
+        container.style.left = 0;
+        container.style.top = 0;
+        container.style.opacity = 0;
+        container.innerHTML = content;
+        body.appendChild(container);
+        
+        //If the parent container is an overflow <div> with scrollbars, subtract the scroll offsets.
+        if (parentContainer) {
+           pos[0] = pos[0] - parentContainer.scrollLeft;
+           pos[1] = pos[1] - parentContainer.scrollTop;
+        }
+        nv.tooltip.calcTooltipPosition(pos, gravity, dist, container);
+  };
+
+  //Looks up the ancestry of a DOM element, and returns the first NON-svg node.
+  nv.tooltip.findFirstNonSVGParent = function(Elem) {
+            while(Elem.tagName.match(/^g|svg$/i) !== null) {
+                Elem = Elem.parentNode;
+            }
+            return Elem;
+  };
+
+  //Finds the total offsetTop of a given DOM element.
+  //Looks up the entire ancestry of an element, up to the first relatively positioned element.
+  nv.tooltip.findTotalOffsetTop = function ( Elem, initialTop ) {
+                var offsetTop = initialTop;
+                
+                do {
+                    if( !isNaN( Elem.offsetTop ) ) {
+                        offsetTop += (Elem.offsetTop);
+                    }
+                } while( Elem = Elem.offsetParent );
+                return offsetTop;
+  };
+
+  //Finds the total offsetLeft of a given DOM element.
+  //Looks up the entire ancestry of an element, up to the first relatively positioned element.
+  nv.tooltip.findTotalOffsetLeft = function ( Elem, initialLeft) {
+                var offsetLeft = initialLeft;
+                
+                do {
+                    if( !isNaN( Elem.offsetLeft ) ) {
+                        offsetLeft += (Elem.offsetLeft);
+                    }
+                } while( Elem = Elem.offsetParent );
+                return offsetLeft;
+  };
+
+  //Global utility function to render a tooltip on the DOM.
+  //pos = [left,top] coordinates of where to place the tooltip, relative to the SVG chart container.
+  //gravity = how to orient the tooltip
+  //dist = how far away from the mouse to place tooltip
+  //container = tooltip DIV
+  nv.tooltip.calcTooltipPosition = function(pos, gravity, dist, container) {
+
+            var height = parseInt(container.offsetHeight),
+                width = parseInt(container.offsetWidth),
+                windowWidth = nv.utils.windowSize().width,
+                windowHeight = nv.utils.windowSize().height,
+                scrollTop = window.pageYOffset,
+                scrollLeft = window.pageXOffset,
+                left, top;
+
+            windowHeight = window.innerWidth >= document.body.scrollWidth ? windowHeight : windowHeight - 16;
+            windowWidth = window.innerHeight >= document.body.scrollHeight ? windowWidth : windowWidth - 16;
+
+            gravity = gravity || 's';
+            dist = dist || 20;
+
+            var tooltipTop = function ( Elem ) {
+                return nv.tooltip.findTotalOffsetTop(Elem, top);
+            };
+
+            var tooltipLeft = function ( Elem ) {
+                return nv.tooltip.findTotalOffsetLeft(Elem,left);
+            };
+
+            switch (gravity) {
+              case 'e':
+                left = pos[0] - width - dist;
+                top = pos[1] - (height / 2);
+                var tLeft = tooltipLeft(container);
+                var tTop = tooltipTop(container);
+                if (tLeft < scrollLeft) left = pos[0] + dist > scrollLeft ? pos[0] + dist : scrollLeft - tLeft + left;
+                if (tTop < scrollTop) top = scrollTop - tTop + top;
+                if (tTop + height > scrollTop + windowHeight) top = scrollTop + windowHeight - tTop + top - height;
+                break;
+              case 'w':
+                left = pos[0] + dist;
+                top = pos[1] - (height / 2);
+                var tLeft = tooltipLeft(container);
+                var tTop = tooltipTop(container);
+                if (tLeft + width > windowWidth) left = pos[0] - width - dist;
+                if (tTop < scrollTop) top = scrollTop + 5;
+                if (tTop + height > scrollTop + windowHeight) top = scrollTop + windowHeight - tTop + top - height;
+                break;
+              case 'n':
+                left = pos[0] - (width / 2) - 5;
+                top = pos[1] + dist;
+                var tLeft = tooltipLeft(container);
+                var tTop = tooltipTop(container);
+                if (tLeft < scrollLeft) left = scrollLeft + 5;
+                if (tLeft + width > windowWidth) left = left - width/2 + 5;
+                if (tTop + height > scrollTop + windowHeight) top = scrollTop + windowHeight - tTop + top - height;
+                break;
+              case 's':
+                left = pos[0] - (width / 2);
+                top = pos[1] - height - dist;
+                var tLeft = tooltipLeft(container);
+                var tTop = tooltipTop(container);
+                if (tLeft < scrollLeft) left = scrollLeft + 5;
+                if (tLeft + width > windowWidth) left = left - width/2 + 5;
+                if (scrollTop > tTop) top = scrollTop;
+                break;
+              case 'none':
+                left = pos[0];
+                top = pos[1] - dist;
+                var tLeft = tooltipLeft(container);
+                var tTop = tooltipTop(container);
+                break;
+            }
+
+
+            container.style.left = left+'px';
+            container.style.top = top+'px';
+            container.style.opacity = 1;
+            container.style.position = 'absolute'; 
+
+            return container;
+    };
+
+    //Global utility function to remove tooltips from the DOM.
+    nv.tooltip.cleanup = function() {
+
+              // Find the tooltips, mark them for removal by this class (so others cleanups won't find it)
+              var tooltips = document.getElementsByClassName('nvtooltip');
+              var purging = [];
+              while(tooltips.length) {
+                purging.push(tooltips[0]);
+                tooltips[0].style.transitionDelay = '0 !important';
+                tooltips[0].style.opacity = 0;
+                tooltips[0].className = 'nvtooltip-pending-removal';
+              }
+
+              setTimeout(function() {
+
+                  while (purging.length) {
+                     var removeMe = purging.pop();
+                      removeMe.parentNode.removeChild(removeMe);
+                  }
+            }, 500);
+    };
+
+})();
+
+nv.utils.windowSize = function() {
+    // Sane defaults
+    var size = {width: 640, height: 480};
+
+    // Earlier IE uses Doc.body
+    if (document.body && document.body.offsetWidth) {
+        size.width = document.body.offsetWidth;
+        size.height = document.body.offsetHeight;
+    }
+
+    // IE can use depending on mode it is in
+    if (document.compatMode=='CSS1Compat' &&
+        document.documentElement &&
+        document.documentElement.offsetWidth ) {
+        size.width = document.documentElement.offsetWidth;
+        size.height = document.documentElement.offsetHeight;
+    }
+
+    // Most recent browsers use
+    if (window.innerWidth && window.innerHeight) {
+        size.width = window.innerWidth;
+        size.height = window.innerHeight;
+    }
+    return (size);
+};
+
+
+
+// Easy way to bind multiple functions to window.onresize
+// TODO: give a way to remove a function after its bound, other than removing all of them
+nv.utils.windowResize = function(fun){
+  if (fun === undefined) return;
+  var oldresize = window.onresize;
+
+  window.onresize = function(e) {
+    if (typeof oldresize == 'function') oldresize(e);
+    fun(e);
+  }
+}
+
+// Backwards compatible way to implement more d3-like coloring of graphs.
+// If passed an array, wrap it in a function which implements the old default
+// behavior
+nv.utils.getColor = function(color) {
+    if (!arguments.length) return nv.utils.defaultColor(); //if you pass in nothing, get default colors back
+
+    if( Object.prototype.toString.call( color ) === '[object Array]' )
+        return function(d, i) { return d.color || color[i % color.length]; };
+    else
+        return color;
+        //can't really help it if someone passes rubbish as color
+}
+
+// Default color chooser uses the index of an object as before.
+nv.utils.defaultColor = function() {
+    var colors = d3.scale.category20().range();
+    return function(d, i) { return d.color || colors[i % colors.length] };
+}
+
+
+// Returns a color function that takes the result of 'getKey' for each series and
+// looks for a corresponding color from the dictionary,
+nv.utils.customTheme = function(dictionary, getKey, defaultColors) {
+  getKey = getKey || function(series) { return series.key }; // use default series.key if getKey is undefined
+  defaultColors = defaultColors || d3.scale.category20().range(); //default color function
+
+  var defIndex = defaultColors.length; //current default color (going in reverse)
+
+  return function(series, index) {
+    var key = getKey(series);
+
+    if (!defIndex) defIndex = defaultColors.length; //used all the default colors, start over
+
+    if (typeof dictionary[key] !== "undefined")
+      return (typeof dictionary[key] === "function") ? dictionary[key]() : dictionary[key];
+    else
+      return defaultColors[--defIndex]; // no match in dictionary, use default color
+  }
+}
+
+
+
+// From the PJAX example on d3js.org, while this is not really directly needed
+// it's a very cool method for doing pjax, I may expand upon it a little bit,
+// open to suggestions on anything that may be useful
+nv.utils.pjax = function(links, content) {
+  d3.selectAll(links).on("click", function() {
+    history.pushState(this.href, this.textContent, this.href);
+    load(this.href);
+    d3.event.preventDefault();
+  });
+
+  function load(href) {
+    d3.html(href, function(fragment) {
+      var target = d3.select(content).node();
+      target.parentNode.replaceChild(d3.select(fragment).select(content).node(), target);
+      nv.utils.pjax(links, content);
+    });
+  }
+
+  d3.select(window).on("popstate", function() {
+    if (d3.event.state) load(d3.event.state);
+  });
+}
+
+/* For situations where we want to approximate the width in pixels for an SVG:text element.
+Most common instance is when the element is in a display:none; container.
+Forumla is : text.length * font-size * constant_factor
+*/
+nv.utils.calcApproxTextWidth = function (svgTextElem) {
+    if (typeof svgTextElem.style === 'function'
+        && typeof svgTextElem.text === 'function') {
+        var fontSize = parseInt(svgTextElem.style("font-size").replace("px",""));
+        var textLength = svgTextElem.text().length;
+
+        return textLength * fontSize * 0.5;
+    }
+    return 0;
+};
+
+/* Numbers that are undefined, null or NaN, convert them to zeros.
+*/
+nv.utils.NaNtoZero = function(n) {
+    if (typeof n !== 'number'
+        || isNaN(n)
+        || n === null
+        || n === Infinity) return 0;
+
+    return n;
+};
+
+/*
+Snippet of code you can insert into each nv.models.* to give you the ability to
+do things like:
+chart.options({
+  showXAxis: true,
+  tooltips: true
+});
+
+To enable in the chart:
+chart.options = nv.utils.optionsFunc.bind(chart);
+*/
+nv.utils.optionsFunc = function(args) {
+    if (args) {
+      d3.map(args).forEach((function(key,value) {
+        if (typeof this[key] === "function") {
+           this[key](value);
+        }
+      }).bind(this));
+    }
+    return this;
+};nv.models.axis = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var axis = d3.svg.axis()
+    ;
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 75 //only used for tickLabel currently
+    , height = 60 //only used for tickLabel currently
+    , scale = d3.scale.linear()
+    , axisLabelText = null
+    , showMaxMin = true //TODO: showMaxMin should be disabled on all ordinal scaled axes
+    , highlightZero = true
+    , rotateLabels = 0
+    , rotateYLabel = true
+    , staggerLabels = false
+    , isOrdinal = false
+    , ticks = null
+    , axisLabelDistance = 12 //The larger this number is, the closer the axis label is to the axis.
+    ;
+
+  axis
+    .scale(scale)
+    .orient('bottom')
+    .tickFormat(function(d) { return d })
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var scale0;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-axis').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-axis');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g')
+
+      //------------------------------------------------------------
+
+
+      if (ticks !== null)
+        axis.ticks(ticks);
+      else if (axis.orient() == 'top' || axis.orient() == 'bottom')
+        axis.ticks(Math.abs(scale.range()[1] - scale.range()[0]) / 100);
+
+
+      //TODO: consider calculating width/height based on whether or not label is added, for reference in charts using this component
+
+
+      g.call(axis);
+
+      scale0 = scale0 || axis.scale();
+
+      var fmt = axis.tickFormat();
+      if (fmt == null) {
+        fmt = scale0.tickFormat();
+      }
+
+      var axisLabel = g.selectAll('text.nv-axislabel')
+          .data([axisLabelText || null]);
+      axisLabel.exit().remove();
+      switch (axis.orient()) {
+        case 'top':
+          axisLabel.enter().append('text').attr('class', 'nv-axislabel');
+          var w = (scale.range().length==2) ? scale.range()[1] : (scale.range()[scale.range().length-1]+(scale.range()[1]-scale.range()[0]));
+          axisLabel
+              .attr('text-anchor', 'middle')
+              .attr('y', 0)
+              .attr('x', w/2);
+          if (showMaxMin) {
+            var axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')
+                           .data(scale.domain());
+            axisMaxMin.enter().append('g').attr('class', 'nv-axisMaxMin').append('text');
+            axisMaxMin.exit().remove();
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(' + scale(d) + ',0)'
+                })
+              .select('text')
+                .attr('dy', '-0.5em')
+                .attr('y', -axis.tickPadding())
+                .attr('text-anchor', 'middle')
+                .text(function(d,i) {
+                  var v = fmt(d);
+                  return ('' + v).match('NaN') ? '' : v;
+                });
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(' + scale.range()[i] + ',0)'
+                });
+          }
+          break;
+        case 'bottom':
+          var xLabelMargin = 36;
+          var maxTextWidth = 30;
+          var xTicks = g.selectAll('g').select("text");
+          if (rotateLabels%360) {
+            //Calculate the longest xTick width
+            xTicks.each(function(d,i){
+              var width = this.getBBox().width;
+              if(width > maxTextWidth) maxTextWidth = width;
+            });
+            //Convert to radians before calculating sin. Add 30 to margin for healthy padding.
+            var sin = Math.abs(Math.sin(rotateLabels*Math.PI/180));
+            var xLabelMargin = (sin ? sin*maxTextWidth : maxTextWidth)+30;
+            //Rotate all xTicks
+            xTicks
+              .attr('transform', function(d,i,j) { return 'rotate(' + rotateLabels + ' 0,0)' })
+              .style('text-anchor', rotateLabels%360 > 0 ? 'start' : 'end');
+          }
+          axisLabel.enter().append('text').attr('class', 'nv-axislabel');
+          var w = (scale.range().length==2) ? scale.range()[1] : (scale.range()[scale.range().length-1]+(scale.range()[1]-scale.range()[0]));
+          axisLabel
+              .attr('text-anchor', 'middle')
+              .attr('y', xLabelMargin)
+              .attr('x', w/2);
+          if (showMaxMin) {
+          //if (showMaxMin && !isOrdinal) {
+            var axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')
+                           //.data(scale.domain())
+                           .data([scale.domain()[0], scale.domain()[scale.domain().length - 1]]);
+            axisMaxMin.enter().append('g').attr('class', 'nv-axisMaxMin').append('text');
+            axisMaxMin.exit().remove();
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(' + (scale(d) + (isOrdinal ? scale.rangeBand() / 2 : 0)) + ',0)'
+                })
+              .select('text')
+                .attr('dy', '.71em')
+                .attr('y', axis.tickPadding())
+                .attr('transform', function(d,i,j) { return 'rotate(' + rotateLabels + ' 0,0)' })
+                .style('text-anchor', rotateLabels ? (rotateLabels%360 > 0 ? 'start' : 'end') : 'middle')
+                .text(function(d,i) {
+                  var v = fmt(d);
+                  return ('' + v).match('NaN') ? '' : v;
+                });
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  //return 'translate(' + scale.range()[i] + ',0)'
+                  //return 'translate(' + scale(d) + ',0)'
+                  return 'translate(' + (scale(d) + (isOrdinal ? scale.rangeBand() / 2 : 0)) + ',0)'
+                });
+          }
+          if (staggerLabels)
+            xTicks
+                .attr('transform', function(d,i) { return 'translate(0,' + (i % 2 == 0 ? '0' : '12') + ')' });
+
+          break;
+        case 'right':
+          axisLabel.enter().append('text').attr('class', 'nv-axislabel');
+          axisLabel
+              .style('text-anchor', rotateYLabel ? 'middle' : 'begin')
+              .attr('transform', rotateYLabel ? 'rotate(90)' : '')
+              .attr('y', rotateYLabel ? (-Math.max(margin.right,width) + 12) : -10) //TODO: consider calculating this based on largest tick width... OR at least expose this on chart
+              .attr('x', rotateYLabel ? (scale.range()[0] / 2) : axis.tickPadding());
+          if (showMaxMin) {
+            var axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')
+                           .data(scale.domain());
+            axisMaxMin.enter().append('g').attr('class', 'nv-axisMaxMin').append('text')
+                .style('opacity', 0);
+            axisMaxMin.exit().remove();
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(0,' + scale(d) + ')'
+                })
+              .select('text')
+                .attr('dy', '.32em')
+                .attr('y', 0)
+                .attr('x', axis.tickPadding())
+                .style('text-anchor', 'start')
+                .text(function(d,i) {
+                  var v = fmt(d);
+                  return ('' + v).match('NaN') ? '' : v;
+                });
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(0,' + scale.range()[i] + ')'
+                })
+              .select('text')
+                .style('opacity', 1);
+          }
+          break;
+        case 'left':
+          /*
+          //For dynamically placing the label. Can be used with dynamically-sized chart axis margins
+          var yTicks = g.selectAll('g').select("text");
+          yTicks.each(function(d,i){
+            var labelPadding = this.getBBox().width + axis.tickPadding() + 16;
+            if(labelPadding > width) width = labelPadding;
+          });
+          */
+          axisLabel.enter().append('text').attr('class', 'nv-axislabel');
+          axisLabel
+              .style('text-anchor', rotateYLabel ? 'middle' : 'end')
+              .attr('transform', rotateYLabel ? 'rotate(-90)' : '')
+              .attr('y', rotateYLabel ? (-Math.max(margin.left,width) + axisLabelDistance) : -10) //TODO: consider calculating this based on largest tick width... OR at least expose this on chart
+              .attr('x', rotateYLabel ? (-scale.range()[0] / 2) : -axis.tickPadding());
+          if (showMaxMin) {
+            var axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')
+                           .data(scale.domain());
+            axisMaxMin.enter().append('g').attr('class', 'nv-axisMaxMin').append('text')
+                .style('opacity', 0);
+            axisMaxMin.exit().remove();
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(0,' + scale0(d) + ')'
+                })
+              .select('text')
+                .attr('dy', '.32em')
+                .attr('y', 0)
+                .attr('x', -axis.tickPadding())
+                .attr('text-anchor', 'end')
+                .text(function(d,i) {
+                  var v = fmt(d);
+                  return ('' + v).match('NaN') ? '' : v;
+                });
+            axisMaxMin
+                .attr('transform', function(d,i) {
+                  return 'translate(0,' + scale.range()[i] + ')'
+                })
+              .select('text')
+                .style('opacity', 1);
+          }
+          break;
+      }
+      axisLabel
+          .text(function(d) { return d });
+
+
+      if (showMaxMin && (axis.orient() === 'left' || axis.orient() === 'right')) {
+        //check if max and min overlap other values, if so, hide the values that overlap
+        g.selectAll('g') // the g's wrapping each tick
+            .each(function(d,i) {
+              d3.select(this).select('text').attr('opacity', 1);
+              if (scale(d) < scale.range()[1] + 10 || scale(d) > scale.range()[0] - 10) { // 10 is assuming text height is 16... if d is 0, leave it!
+                if (d > 1e-10 || d < -1e-10) // accounts for minor floating point errors... though could be problematic if the scale is EXTREMELY SMALL
+                  d3.select(this).attr('opacity', 0);
+
+                d3.select(this).select('text').attr('opacity', 0); // Don't remove the ZERO line!!
+              }
+            });
+
+        //if Max and Min = 0 only show min, Issue #281
+        if (scale.domain()[0] == scale.domain()[1] && scale.domain()[0] == 0)
+          wrap.selectAll('g.nv-axisMaxMin')
+            .style('opacity', function(d,i) { return !i ? 1 : 0 });
+
+      }
+
+      if (showMaxMin && (axis.orient() === 'top' || axis.orient() === 'bottom')) {
+        var maxMinRange = [];
+        wrap.selectAll('g.nv-axisMaxMin')
+            .each(function(d,i) {
+              try {
+                  if (i) // i== 1, max position
+                      maxMinRange.push(scale(d) - this.getBBox().width - 4)  //assuming the max and min labels are as wide as the next tick (with an extra 4 pixels just in case)
+                  else // i==0, min position
+                      maxMinRange.push(scale(d) + this.getBBox().width + 4)
+              }catch (err) {
+                  if (i) // i== 1, max position
+                      maxMinRange.push(scale(d) - 4)  //assuming the max and min labels are as wide as the next tick (with an extra 4 pixels just in case)
+                  else // i==0, min position
+                      maxMinRange.push(scale(d) + 4)
+              }
+            });
+        g.selectAll('g') // the g's wrapping each tick
+            .each(function(d,i) {
+              if (scale(d) < maxMinRange[0] || scale(d) > maxMinRange[1]) {
+                if (d > 1e-10 || d < -1e-10) // accounts for minor floating point errors... though could be problematic if the scale is EXTREMELY SMALL
+                  d3.select(this).remove();
+                else
+                  d3.select(this).select('text').remove(); // Don't remove the ZERO line!!
+              }
+            });
+      }
+
+
+      //highlight zero line ... Maybe should not be an option and should just be in CSS?
+      if (highlightZero)
+        g.selectAll('.tick')
+          .filter(function(d) { return !parseFloat(Math.round(d.__data__*100000)/1000000) && (d.__data__ !== undefined) }) //this is because sometimes the 0 tick is a very small fraction, TODO: think of cleaner technique
+            .classed('zero', true);
+
+      //store old scales for use in transitions on update
+      scale0 = scale.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.axis = axis;
+
+  d3.rebind(chart, axis, 'orient', 'tickValues', 'tickSubdivide', 'tickSize', 'tickPadding', 'tickFormat');
+  d3.rebind(chart, scale, 'domain', 'range', 'rangeBand', 'rangeBands'); //these are also accessible by chart.scale(), but added common ones directly for ease of use
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if(!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  }
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.ticks = function(_) {
+    if (!arguments.length) return ticks;
+    ticks = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.axisLabel = function(_) {
+    if (!arguments.length) return axisLabelText;
+    axisLabelText = _;
+    return chart;
+  }
+
+  chart.showMaxMin = function(_) {
+    if (!arguments.length) return showMaxMin;
+    showMaxMin = _;
+    return chart;
+  }
+
+  chart.highlightZero = function(_) {
+    if (!arguments.length) return highlightZero;
+    highlightZero = _;
+    return chart;
+  }
+
+  chart.scale = function(_) {
+    if (!arguments.length) return scale;
+    scale = _;
+    axis.scale(scale);
+    isOrdinal = typeof scale.rangeBands === 'function';
+    d3.rebind(chart, scale, 'domain', 'range', 'rangeBand', 'rangeBands');
+    return chart;
+  }
+
+  chart.rotateYLabel = function(_) {
+    if(!arguments.length) return rotateYLabel;
+    rotateYLabel = _;
+    return chart;
+  }
+
+  chart.rotateLabels = function(_) {
+    if(!arguments.length) return rotateLabels;
+    rotateLabels = _;
+    return chart;
+  }
+
+  chart.staggerLabels = function(_) {
+    if (!arguments.length) return staggerLabels;
+    staggerLabels = _;
+    return chart;
+  };
+
+  chart.axisLabelDistance = function(_) {
+    if (!arguments.length) return axisLabelDistance;
+    axisLabelDistance = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+//TODO: consider deprecating and using multibar with single series for this
+nv.models.historicalBar = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one
+    , x = d3.scale.linear()
+    , y = d3.scale.linear()
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , forceX = []
+    , forceY = [0]
+    , padData = false
+    , clipEdge = true
+    , color = nv.utils.defaultColor()
+    , xDomain
+    , yDomain
+    , xRange
+    , yRange
+    , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout')
+    , interactive = true
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x   .domain(xDomain || d3.extent(data[0].values.map(getX).concat(forceX) ))
+
+      if (padData)
+        x.range(xRange || [availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);
+      else
+        x.range(xRange || [0, availableWidth]);
+
+      y   .domain(yDomain || d3.extent(data[0].values.map(getY).concat(forceY) ))
+          .range(yRange || [availableHeight, 0]);
+
+      // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point
+
+      if (x.domain()[0] === x.domain()[1])
+        x.domain()[0] ?
+            x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])
+          : x.domain([-1,1]);
+
+      if (y.domain()[0] === y.domain()[1])
+        y.domain()[0] ?
+            y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])
+          : y.domain([-1,1]);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-historicalBar-' + id).data([data[0].values]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-historicalBar-' + id);
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-bars');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      container
+          .on('click', function(d,i) {
+            dispatch.chartClick({
+                data: d,
+                index: i,
+                pos: d3.event,
+                id: id
+            });
+          });
+
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-chart-clip-path-' + id)
+        .append('rect');
+
+      wrap.select('#nv-chart-clip-path-' + id + ' rect')
+          .attr('width', availableWidth)
+          .attr('height', availableHeight);
+
+      g   .attr('clip-path', clipEdge ? 'url(#nv-chart-clip-path-' + id + ')' : '');
+
+
+
+      var bars = wrap.select('.nv-bars').selectAll('.nv-bar')
+          .data(function(d) { return d }, function(d,i) {return getX(d,i)});
+
+      bars.exit().remove();
+
+
+      var barsEnter = bars.enter().append('rect')
+          //.attr('class', function(d,i,j) { return (getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive') + ' nv-bar-' + j + '-' + i })
+          .attr('x', 0 )
+          .attr('y', function(d,i) {  return nv.utils.NaNtoZero(y(Math.max(0, getY(d,i)))) })
+          .attr('height', function(d,i) { return nv.utils.NaNtoZero(Math.abs(y(getY(d,i)) - y(0))) })
+          .attr('transform', function(d,i) { return 'translate(' + (x(getX(d,i)) - availableWidth / data[0].values.length * .45) + ',0)'; }) 
+          .on('mouseover', function(d,i) {
+            if (!interactive) return;
+            d3.select(this).classed('hover', true);
+            dispatch.elementMouseover({
+                point: d,
+                series: data[0],
+                pos: [x(getX(d,i)), y(getY(d,i))],  // TODO: Figure out why the value appears to be shifted
+                pointIndex: i,
+                seriesIndex: 0,
+                e: d3.event
+            });
+
+          })
+          .on('mouseout', function(d,i) {
+                if (!interactive) return;
+                d3.select(this).classed('hover', false);
+                dispatch.elementMouseout({
+                    point: d,
+                    series: data[0],
+                    pointIndex: i,
+                    seriesIndex: 0,
+                    e: d3.event
+                });
+          })
+          .on('click', function(d,i) {
+                if (!interactive) return;
+                dispatch.elementClick({
+                    //label: d[label],
+                    value: getY(d,i),
+                    data: d,
+                    index: i,
+                    pos: [x(getX(d,i)), y(getY(d,i))],
+                    e: d3.event,
+                    id: id
+                });
+              d3.event.stopPropagation();
+          })
+          .on('dblclick', function(d,i) {
+              if (!interactive) return;
+              dispatch.elementDblClick({
+                  //label: d[label],
+                  value: getY(d,i),
+                  data: d,
+                  index: i,
+                  pos: [x(getX(d,i)), y(getY(d,i))],
+                  e: d3.event,
+                  id: id
+              });
+              d3.event.stopPropagation();
+          });
+
+      bars
+          .attr('fill', function(d,i) { return color(d, i); })
+          .attr('class', function(d,i,j) { return (getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive') + ' nv-bar-' + j + '-' + i })
+          
+          .attr('transform', function(d,i) { return 'translate(' + (x(getX(d,i)) - availableWidth / data[0].values.length * .45) + ',0)'; }) 
+           //TODO: better width calculations that don't assume always uniform data spacing;w
+          .attr('width', (availableWidth / data[0].values.length) * .9 );
+
+
+      bars
+          .attr('y', function(d,i) {
+            var rval = getY(d,i) < 0 ?
+                    y(0) :
+                    y(0) - y(getY(d,i)) < 1 ?
+                      y(0) - 1 :
+                      y(getY(d,i));
+            return nv.utils.NaNtoZero(rval);
+          })
+          .attr('height', function(d,i) { return nv.utils.NaNtoZero(Math.max(Math.abs(y(getY(d,i)) - y(0)),1)) });
+
+    });
+
+    return chart;
+  }
+
+  //Create methods to allow outside functions to highlight a specific bar.
+  chart.highlightPoint = function(pointIndex, isHoverOver) {
+      d3.select(".nv-historicalBar-" + id)
+        .select(".nv-bars .nv-bar-0-" + pointIndex)
+              .classed("hover", isHoverOver)
+               ;
+  };
+
+  chart.clearHighlights = function() {
+      d3.select(".nv-historicalBar-" + id)
+        .select(".nv-bars .nv-bar.hover")
+              .classed("hover", false)
+               ;
+  };
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.forceX = function(_) {
+    if (!arguments.length) return forceX;
+    forceX = _;
+    return chart;
+  };
+
+  chart.forceY = function(_) {
+    if (!arguments.length) return forceY;
+    forceY = _;
+    return chart;
+  };
+
+  chart.padData = function(_) {
+    if (!arguments.length) return padData;
+    padData = _;
+    return chart;
+  };
+
+  chart.clipEdge = function(_) {
+    if (!arguments.length) return clipEdge;
+    clipEdge = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.interactive = function(_) {
+    if(!arguments.length) return interactive;
+    interactive = false;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+// Chart design based on the recommendations of Stephen Few. Implementation
+// based on the work of Clint Ivy, Jamie Love, and Jason Davies.
+// http://projects.instantcognition.com/protovis/bulletchart/
+
+nv.models.bullet = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , orient = 'left' // TODO top & bottom
+    , reverse = false
+    , ranges = function(d) { return d.ranges }
+    , markers = function(d) { return d.markers }
+    , measures = function(d) { return d.measures }
+    , rangeLabels = function(d) { return d.rangeLabels ? d.rangeLabels : [] }
+    , markerLabels = function(d) { return d.markerLabels ? d.markerLabels : []  }
+    , measureLabels = function(d) { return d.measureLabels ? d.measureLabels : []  }
+    , forceX = [0] // List of numbers to Force into the X scale (ie. 0, or a max / min, etc.)
+    , width = 380
+    , height = 30
+    , tickFormat = null
+    , color = nv.utils.getColor(['#1f77b4'])
+    , dispatch = d3.dispatch('elementMouseover', 'elementMouseout')
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(d, i) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+      var rangez = ranges.call(this, d, i).slice().sort(d3.descending),
+          markerz = markers.call(this, d, i).slice().sort(d3.descending),
+          measurez = measures.call(this, d, i).slice().sort(d3.descending),
+          rangeLabelz = rangeLabels.call(this, d, i).slice(),
+          markerLabelz = markerLabels.call(this, d, i).slice(),
+          measureLabelz = measureLabels.call(this, d, i).slice();
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      // Compute the new x-scale.
+      var x1 = d3.scale.linear()
+          .domain( d3.extent(d3.merge([forceX, rangez])) )
+          .range(reverse ? [availableWidth, 0] : [0, availableWidth]);
+
+      // Retrieve the old x-scale, if this is an update.
+      var x0 = this.__chart__ || d3.scale.linear()
+          .domain([0, Infinity])
+          .range(x1.range());
+
+      // Stash the new scale.
+      this.__chart__ = x1;
+
+
+      var rangeMin = d3.min(rangez), //rangez[2]
+          rangeMax = d3.max(rangez), //rangez[0]
+          rangeAvg = rangez[1];
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-bullet').data([d]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-bullet');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('rect').attr('class', 'nv-range nv-rangeMax');
+      gEnter.append('rect').attr('class', 'nv-range nv-rangeAvg');
+      gEnter.append('rect').attr('class', 'nv-range nv-rangeMin');
+      gEnter.append('rect').attr('class', 'nv-measure');
+      gEnter.append('path').attr('class', 'nv-markerTriangle');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+
+      var w0 = function(d) { return Math.abs(x0(d) - x0(0)) }, // TODO: could optimize by precalculating x0(0) and x1(0)
+          w1 = function(d) { return Math.abs(x1(d) - x1(0)) };
+      var xp0 = function(d) { return d < 0 ? x0(d) : x0(0) },
+          xp1 = function(d) { return d < 0 ? x1(d) : x1(0) };
+
+
+      g.select('rect.nv-rangeMax')
+          .attr('height', availableHeight)
+          .attr('width', w1(rangeMax > 0 ? rangeMax : rangeMin))
+          .attr('x', xp1(rangeMax > 0 ? rangeMax : rangeMin))
+          .datum(rangeMax > 0 ? rangeMax : rangeMin)
+          /*
+          .attr('x', rangeMin < 0 ?
+                         rangeMax > 0 ?
+                             x1(rangeMin)
+                           : x1(rangeMax)
+                       : x1(0))
+                      */
+
+      g.select('rect.nv-rangeAvg')
+          .attr('height', availableHeight)
+          .attr('width', w1(rangeAvg))
+          .attr('x', xp1(rangeAvg))
+          .datum(rangeAvg)
+          /*
+          .attr('width', rangeMax <= 0 ?
+                             x1(rangeMax) - x1(rangeAvg)
+                           : x1(rangeAvg) - x1(rangeMin))
+          .attr('x', rangeMax <= 0 ?
+                         x1(rangeAvg)
+                       : x1(rangeMin))
+                      */
+
+      g.select('rect.nv-rangeMin')
+          .attr('height', availableHeight)
+          .attr('width', w1(rangeMax))
+          .attr('x', xp1(rangeMax))
+          .attr('width', w1(rangeMax > 0 ? rangeMin : rangeMax))
+          .attr('x', xp1(rangeMax > 0 ? rangeMin : rangeMax))
+          .datum(rangeMax > 0 ? rangeMin : rangeMax)
+          /*
+          .attr('width', rangeMax <= 0 ?
+                             x1(rangeAvg) - x1(rangeMin)
+                           : x1(rangeMax) - x1(rangeAvg))
+          .attr('x', rangeMax <= 0 ?
+                         x1(rangeMin)
+                       : x1(rangeAvg))
+                      */
+
+      g.select('rect.nv-measure')
+          .style('fill', color)
+          .attr('height', availableHeight / 3)
+          .attr('y', availableHeight / 3)
+          .attr('width', measurez < 0 ?
+                             x1(0) - x1(measurez[0])
+                           : x1(measurez[0]) - x1(0))
+          .attr('x', xp1(measurez))
+          .on('mouseover', function() {
+              dispatch.elementMouseover({
+                value: measurez[0],
+                label: measureLabelz[0] || 'Current',
+                pos: [x1(measurez[0]), availableHeight/2]
+              })
+          })
+          .on('mouseout', function() {
+              dispatch.elementMouseout({
+                value: measurez[0],
+                label: measureLabelz[0] || 'Current'
+              })
+          })
+
+      var h3 =  availableHeight / 6;
+      if (markerz[0]) {
+        g.selectAll('path.nv-markerTriangle')
+            .attr('transform', function(d) { return 'translate(' + x1(markerz[0]) + ',' + (availableHeight / 2) + ')' })
+            .attr('d', 'M0,' + h3 + 'L' + h3 + ',' + (-h3) + ' ' + (-h3) + ',' + (-h3) + 'Z')
+            .on('mouseover', function() {
+              dispatch.elementMouseover({
+                value: markerz[0],
+                label: markerLabelz[0] || 'Previous',
+                pos: [x1(markerz[0]), availableHeight/2]
+              })
+            })
+            .on('mouseout', function() {
+              dispatch.elementMouseout({
+                value: markerz[0],
+                label: markerLabelz[0] || 'Previous'
+              })
+            });
+      } else {
+        g.selectAll('path.nv-markerTriangle').remove();
+      }
+
+
+      wrap.selectAll('.nv-range')
+          .on('mouseover', function(d,i) {
+            var label = rangeLabelz[i] || (!i ? "Maximum" : i == 1 ? "Mean" : "Minimum");
+
+            dispatch.elementMouseover({
+              value: d,
+              label: label,
+              pos: [x1(d), availableHeight/2]
+            })
+          })
+          .on('mouseout', function(d,i) {
+            var label = rangeLabelz[i] || (!i ? "Maximum" : i == 1 ? "Mean" : "Minimum");
+
+            dispatch.elementMouseout({
+              value: d,
+              label: label
+            })
+          })
+
+/* // THIS IS THE PREVIOUS BULLET IMPLEMENTATION, WILL REMOVE SHORTLY
+      // Update the range rects.
+      var range = g.selectAll('rect.nv-range')
+          .data(rangez);
+
+      range.enter().append('rect')
+          .attr('class', function(d, i) { return 'nv-range nv-s' + i; })
+          .attr('width', w0)
+          .attr('height', availableHeight)
+          .attr('x', reverse ? x0 : 0)
+          .on('mouseover', function(d,i) { 
+              dispatch.elementMouseover({
+                value: d,
+                label: (i <= 0) ? 'Maximum' : (i > 1) ? 'Minimum' : 'Mean', //TODO: make these labels a variable
+                pos: [x1(d), availableHeight/2]
+              })
+          })
+          .on('mouseout', function(d,i) { 
+              dispatch.elementMouseout({
+                value: d,
+                label: (i <= 0) ? 'Minimum' : (i >=1) ? 'Maximum' : 'Mean' //TODO: make these labels a variable
+              })
+          })
+
+      d3.transition(range)
+          .attr('x', reverse ? x1 : 0)
+          .attr('width', w1)
+          .attr('height', availableHeight);
+
+
+      // Update the measure rects.
+      var measure = g.selectAll('rect.nv-measure')
+          .data(measurez);
+
+      measure.enter().append('rect')
+          .attr('class', function(d, i) { return 'nv-measure nv-s' + i; })
+          .style('fill', function(d,i) { return color(d,i ) })
+          .attr('width', w0)
+          .attr('height', availableHeight / 3)
+          .attr('x', reverse ? x0 : 0)
+          .attr('y', availableHeight / 3)
+          .on('mouseover', function(d) { 
+              dispatch.elementMouseover({
+                value: d,
+                label: 'Current', //TODO: make these labels a variable
+                pos: [x1(d), availableHeight/2]
+              })
+          })
+          .on('mouseout', function(d) { 
+              dispatch.elementMouseout({
+                value: d,
+                label: 'Current' //TODO: make these labels a variable
+              })
+          })
+
+      d3.transition(measure)
+          .attr('width', w1)
+          .attr('height', availableHeight / 3)
+          .attr('x', reverse ? x1 : 0)
+          .attr('y', availableHeight / 3);
+
+
+
+      // Update the marker lines.
+      var marker = g.selectAll('path.nv-markerTriangle')
+          .data(markerz);
+
+      var h3 =  availableHeight / 6;
+      marker.enter().append('path')
+          .attr('class', 'nv-markerTriangle')
+          .attr('transform', function(d) { return 'translate(' + x0(d) + ',' + (availableHeight / 2) + ')' })
+          .attr('d', 'M0,' + h3 + 'L' + h3 + ',' + (-h3) + ' ' + (-h3) + ',' + (-h3) + 'Z')
+          .on('mouseover', function(d,i) {
+              dispatch.elementMouseover({
+                value: d,
+                label: 'Previous',
+                pos: [x1(d), availableHeight/2]
+              })
+          })
+          .on('mouseout', function(d,i) {
+              dispatch.elementMouseout({
+                value: d,
+                label: 'Previous'
+              })
+          });
+
+      d3.transition(marker)
+          .attr('transform', function(d) { return 'translate(' + (x1(d) - x1(0)) + ',' + (availableHeight / 2) + ')' });
+
+      marker.exit().remove();
+*/
+
+    });
+
+    // d3.timer.flush();  // Not needed?
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  // left, right, top, bottom
+  chart.orient = function(_) {
+    if (!arguments.length) return orient;
+    orient = _;
+    reverse = orient == 'right' || orient == 'bottom';
+    return chart;
+  };
+
+  // ranges (bad, satisfactory, good)
+  chart.ranges = function(_) {
+    if (!arguments.length) return ranges;
+    ranges = _;
+    return chart;
+  };
+
+  // markers (previous, goal)
+  chart.markers = function(_) {
+    if (!arguments.length) return markers;
+    markers = _;
+    return chart;
+  };
+
+  // measures (actual, forecast)
+  chart.measures = function(_) {
+    if (!arguments.length) return measures;
+    measures = _;
+    return chart;
+  };
+
+  chart.forceX = function(_) {
+    if (!arguments.length) return forceX;
+    forceX = _;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.tickFormat = function(_) {
+    if (!arguments.length) return tickFormat;
+    tickFormat = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+};
+
+
+
+// Chart design based on the recommendations of Stephen Few. Implementation
+// based on the work of Clint Ivy, Jamie Love, and Jason Davies.
+// http://projects.instantcognition.com/protovis/bulletchart/
+nv.models.bulletChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var bullet = nv.models.bullet()
+    ;
+
+  var orient = 'left' // TODO top & bottom
+    , reverse = false
+    , margin = {top: 5, right: 40, bottom: 20, left: 120}
+    , ranges = function(d) { return d.ranges }
+    , markers = function(d) { return d.markers }
+    , measures = function(d) { return d.measures }
+    , width = null
+    , height = 55
+    , tickFormat = null
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + x + '</h3>' +
+               '<p>' + y + '</p>'
+      }
+    , noData = 'No Data Available.'
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ) + margin.left,
+        top = e.pos[1] + ( offsetElement.offsetTop || 0) + margin.top,
+        content = tooltip(e.key, e.label, e.value, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'e' : 'w', null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(d, i) {
+      var container = d3.select(this);
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          that = this;
+
+
+      chart.update = function() { chart(selection) };
+      chart.container = this;
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!d || !ranges.call(this, d, i)) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', 18 + margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+
+      var rangez = ranges.call(this, d, i).slice().sort(d3.descending),
+          markerz = markers.call(this, d, i).slice().sort(d3.descending),
+          measurez = measures.call(this, d, i).slice().sort(d3.descending);
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-bulletChart').data([d]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-bulletChart');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-bulletWrap');
+      gEnter.append('g').attr('class', 'nv-titles');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      // Compute the new x-scale.
+      var x1 = d3.scale.linear()
+          .domain([0, Math.max(rangez[0], markerz[0], measurez[0])])  // TODO: need to allow forceX and forceY, and xDomain, yDomain
+          .range(reverse ? [availableWidth, 0] : [0, availableWidth]);
+
+      // Retrieve the old x-scale, if this is an update.
+      var x0 = this.__chart__ || d3.scale.linear()
+          .domain([0, Infinity])
+          .range(x1.range());
+
+      // Stash the new scale.
+      this.__chart__ = x1;
+
+      /*
+      // Derive width-scales from the x-scales.
+      var w0 = bulletWidth(x0),
+          w1 = bulletWidth(x1);
+
+      function bulletWidth(x) {
+        var x0 = x(0);
+        return function(d) {
+          return Math.abs(x(d) - x(0));
+        };
+      }
+
+      function bulletTranslate(x) {
+        return function(d) {
+          return 'translate(' + x(d) + ',0)';
+        };
+      }
+      */
+
+      var w0 = function(d) { return Math.abs(x0(d) - x0(0)) }, // TODO: could optimize by precalculating x0(0) and x1(0)
+          w1 = function(d) { return Math.abs(x1(d) - x1(0)) };
+
+
+      var title = gEnter.select('.nv-titles').append('g')
+          .attr('text-anchor', 'end')
+          .attr('transform', 'translate(-6,' + (height - margin.top - margin.bottom) / 2 + ')');
+      title.append('text')
+          .attr('class', 'nv-title')
+          .text(function(d) { return d.title; });
+
+      title.append('text')
+          .attr('class', 'nv-subtitle')
+          .attr('dy', '1em')
+          .text(function(d) { return d.subtitle; });
+
+
+
+      bullet
+        .width(availableWidth)
+        .height(availableHeight)
+
+      var bulletWrap = g.select('.nv-bulletWrap');
+
+      d3.transition(bulletWrap).call(bullet);
+
+
+
+      // Compute the tick format.
+      var format = tickFormat || x1.tickFormat( availableWidth / 100 );
+
+      // Update the tick groups.
+      var tick = g.selectAll('g.nv-tick')
+          .data(x1.ticks( availableWidth / 50 ), function(d) {
+            return this.textContent || format(d);
+          });
+
+      // Initialize the ticks with the old scale, x0.
+      var tickEnter = tick.enter().append('g')
+          .attr('class', 'nv-tick')
+          .attr('transform', function(d) { return 'translate(' + x0(d) + ',0)' })
+          .style('opacity', 1e-6);
+
+      tickEnter.append('line')
+          .attr('y1', availableHeight)
+          .attr('y2', availableHeight * 7 / 6);
+
+      tickEnter.append('text')
+          .attr('text-anchor', 'middle')
+          .attr('dy', '1em')
+          .attr('y', availableHeight * 7 / 6)
+          .text(format);
+
+
+      // Transition the updating ticks to the new scale, x1.
+      var tickUpdate = d3.transition(tick)
+          .attr('transform', function(d) { return 'translate(' + x1(d) + ',0)' })
+          .style('opacity', 1);
+
+      tickUpdate.select('line')
+          .attr('y1', availableHeight)
+          .attr('y2', availableHeight * 7 / 6);
+
+      tickUpdate.select('text')
+          .attr('y', availableHeight * 7 / 6);
+
+      // Transition the exiting ticks to the new scale, x1.
+      d3.transition(tick.exit())
+          .attr('transform', function(d) { return 'translate(' + x1(d) + ',0)' })
+          .style('opacity', 1e-6)
+          .remove();
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      dispatch.on('tooltipShow', function(e) {
+        e.key = d.title;
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      //============================================================
+
+    });
+
+    d3.timer.flush();
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  bullet.dispatch.on('elementMouseover.tooltip', function(e) {
+    dispatch.tooltipShow(e);
+  });
+
+  bullet.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+  chart.bullet = bullet;
+
+  d3.rebind(chart, bullet, 'color');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  // left, right, top, bottom
+  chart.orient = function(x) {
+    if (!arguments.length) return orient;
+    orient = x;
+    reverse = orient == 'right' || orient == 'bottom';
+    return chart;
+  };
+
+  // ranges (bad, satisfactory, good)
+  chart.ranges = function(x) {
+    if (!arguments.length) return ranges;
+    ranges = x;
+    return chart;
+  };
+
+  // markers (previous, goal)
+  chart.markers = function(x) {
+    if (!arguments.length) return markers;
+    markers = x;
+    return chart;
+  };
+
+  // measures (actual, forecast)
+  chart.measures = function(x) {
+    if (!arguments.length) return measures;
+    measures = x;
+    return chart;
+  };
+
+  chart.width = function(x) {
+    if (!arguments.length) return width;
+    width = x;
+    return chart;
+  };
+
+  chart.height = function(x) {
+    if (!arguments.length) return height;
+    height = x;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.tickFormat = function(x) {
+    if (!arguments.length) return tickFormat;
+    tickFormat = x;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+};
+
+
+
+nv.models.cumulativeLineChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var lines = nv.models.line()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , legend = nv.models.legend()
+    , controls = nv.models.legend()
+    , interactiveLayer = nv.interactiveGuideline()
+    ;
+
+  var margin = {top: 30, right: 30, bottom: 50, left: 60}
+    , color = nv.utils.defaultColor()
+    , width = null
+    , height = null
+    , showLegend = true
+    , showXAxis = true
+    , showYAxis = true
+    , rightAlignYAxis = false
+    , tooltips = true
+    , showControls = true
+    , useInteractiveGuideline = false
+    , rescaleY = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>'
+      }
+    , x //can be accessed via chart.xScale()
+    , y //can be accessed via chart.yScale()
+    , id = lines.id()
+    , state = { index: 0, rescaleY: rescaleY }
+    , defaultState = null
+    , noData = 'No Data Available.'
+    , average = function(d) { return d.average }
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , transitionDuration = 0
+    , noErrorCheck = false  //if set to TRUE, will bypass an error check in the indexify function.
+    ;
+
+  xAxis
+    .orient('bottom')
+    .tickPadding(7)
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    ;
+
+  //============================================================
+  controls.updateState(false);
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+   var dx = d3.scale.linear()
+     , index = {i: 0, x: 0}
+     ;
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(lines.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(lines.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, null, null, offsetElement);
+  };
+
+  //============================================================
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this).classed('nv-chart-' + id, true),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+
+      chart.update = function() { container.call(chart) };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      var indexDrag = d3.behavior.drag()
+                        .on('dragstart', dragStart)
+                        .on('drag', dragMove)
+                        .on('dragend', dragEnd);
+
+
+      function dragStart(d,i) {
+        d3.select(chart.container)
+            .style('cursor', 'ew-resize');
+      }
+
+      function dragMove(d,i) {
+        index.x = d3.event.x;
+        index.i = Math.round(dx.invert(index.x));
+        updateZero();
+      }
+
+      function dragEnd(d,i) {
+        d3.select(chart.container)
+            .style('cursor', 'auto');
+
+        // update state and send stateChange with new index
+        state.index = index.i;
+        dispatch.stateChange(state);
+      }
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = lines.xScale();
+      y = lines.yScale();
+
+
+      if (!rescaleY) {
+        var seriesDomains = data
+          .filter(function(series) { return !series.disabled })
+          .map(function(series,i) {
+            var initialDomain = d3.extent(series.values, lines.y());
+
+            //account for series being disabled when losing 95% or more
+            if (initialDomain[0] < -.95) initialDomain[0] = -.95;
+
+            return [
+              (initialDomain[0] - initialDomain[1]) / (1 + initialDomain[1]),
+              (initialDomain[1] - initialDomain[0]) / (1 + initialDomain[0])
+            ];
+          });
+
+        var completeDomain = [
+          d3.min(seriesDomains, function(d) { return d[0] }),
+          d3.max(seriesDomains, function(d) { return d[1] })
+        ]
+
+        lines.yDomain(completeDomain);
+      } else {
+        lines.yDomain(null);
+      }
+
+
+      dx  .domain([0, data[0].values.length - 1]) //Assumes all series have same length
+          .range([0, availableWidth])
+          .clamp(true);
+
+      //------------------------------------------------------------
+
+
+      var data = indexify(index.i, data);
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+      var interactivePointerEvents = (useInteractiveGuideline) ? "none" : "all";
+      var wrap = container.selectAll('g.nv-wrap.nv-cumulativeLine').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-cumulativeLine').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-interactive');
+      gEnter.append('g').attr('class', 'nv-x nv-axis').style("pointer-events","none");
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-background');
+      gEnter.append('g').attr('class', 'nv-linesWrap').style("pointer-events",interactivePointerEvents);
+      gEnter.append('g').attr('class', 'nv-avgLinesWrap').style("pointer-events","none");
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-controlsWrap');
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width(availableWidth);
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Controls
+
+      if (showControls) {
+        var controlsData = [
+          { key: 'Re-scale y-axis', disabled: !rescaleY }
+        ];
+
+        controls
+            .width(140)
+            .color(['#444', '#444', '#444'])
+            .rightAlign(false)
+            .margin({top: 5, right: 0, bottom: 5, left: 20})
+            ;
+
+        g.select('.nv-controlsWrap')
+            .datum(controlsData)
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+            .call(controls);
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      // Show error if series goes below 100%
+      var tempDisabled = data.filter(function(d) { return d.tempDisabled });
+
+      wrap.select('.tempDisabled').remove(); //clean-up and prevent duplicates
+      if (tempDisabled.length) {
+        wrap.append('text').attr('class', 'tempDisabled')
+            .attr('x', availableWidth / 2)
+            .attr('y', '-.71em')
+            .style('text-anchor', 'end')
+            .text(tempDisabled.map(function(d) { return d.key }).join(', ') + ' values cannot be calculated for this time period.');
+      }
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      //------------------------------------------------------------
+      //Set up interactive layer
+      if (useInteractiveGuideline) {
+        interactiveLayer
+          .width(availableWidth)
+          .height(availableHeight)
+          .margin({left:margin.left,top:margin.top})
+          .svgContainer(container)
+          .xScale(x);
+        wrap.select(".nv-interactive").call(interactiveLayer);
+      }
+
+      gEnter.select('.nv-background')
+        .append('rect');
+
+      g.select('.nv-background rect')
+          .attr('width', availableWidth)
+          .attr('height', availableHeight);
+
+      lines
+        //.x(function(d) { return d.x })
+        .y(function(d) { return d.display.y })
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && !data[i].tempDisabled; }));
+
+
+
+      var linesWrap = g.select('.nv-linesWrap')
+          .datum(data.filter(function(d) { return  !d.disabled && !d.tempDisabled }));
+
+      //d3.transition(linesWrap).call(lines);
+      linesWrap.call(lines);
+
+      /*Handle average lines [AN-612] ----------------------------*/
+
+      //Store a series index number in the data array.
+      data.forEach(function(d,i) {
+            d.seriesIndex = i;
+      });
+
+      var avgLineData = data.filter(function(d) {
+          return !d.disabled && !!average(d);
+      });
+
+      var avgLines = g.select(".nv-avgLinesWrap").selectAll("line")
+              .data(avgLineData, function(d) { return d.key; });
+
+      var getAvgLineY = function(d) {
+          //If average lines go off the svg element, clamp them to the svg bounds.
+          var yVal = y(average(d));
+          if (yVal < 0) return 0;
+          if (yVal > availableHeight) return availableHeight;
+          return yVal;
+      };
+
+      avgLines.enter()
+              .append('line')
+              .style('stroke-width',2)
+              .style('stroke-dasharray','10,10')
+              .style('stroke',function (d,i) {
+                  return lines.color()(d,d.seriesIndex);
+              })
+              .attr('x1',0)
+              .attr('x2',availableWidth)
+              .attr('y1', getAvgLineY)
+              .attr('y2', getAvgLineY);
+
+      avgLines
+              .style('stroke-opacity',function(d){
+                  //If average lines go offscreen, make them transparent
+                  var yVal = y(average(d));
+                  if (yVal < 0 || yVal > availableHeight) return 0;
+                  return 1;
+              })
+              .attr('x1',0)
+              .attr('x2',availableWidth)
+              .attr('y1', getAvgLineY)
+              .attr('y2', getAvgLineY);
+
+      avgLines.exit().remove();
+
+      //Create index line -----------------------------------------
+
+      var indexLine = linesWrap.selectAll('.nv-indexLine')
+          .data([index]);
+      indexLine.enter().append('rect').attr('class', 'nv-indexLine')
+          .attr('width', 3)
+          .attr('x', -2)
+          .attr('fill', 'red')
+          .attr('fill-opacity', .5)
+          .style("pointer-events","all")
+          .call(indexDrag)
+
+      indexLine
+          .attr('transform', function(d) { return 'translate(' + dx(d.i) + ',0)' })
+          .attr('height', availableHeight)
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+        xAxis
+          .scale(x)
+          //Suggest how many ticks based on the chart width and D3 should listen (70 is the optimal number for MM/DD/YY dates)
+          .ticks( Math.min(data[0].values.length,availableWidth/70) )
+          .tickSize(-availableHeight, 0);
+
+        g.select('.nv-x.nv-axis')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')');
+        d3.transition(g.select('.nv-x.nv-axis'))
+            .call(xAxis);
+      }
+
+
+      if (showYAxis) {
+        yAxis
+          .scale(y)
+          .ticks( availableHeight / 36 )
+          .tickSize( -availableWidth, 0);
+
+        d3.transition(g.select('.nv-y.nv-axis'))
+            .call(yAxis);
+      }
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+
+      function updateZero() {
+        indexLine
+          .data([index]);
+
+        //When dragging the index line, turn off line transitions.
+        // Then turn them back on when done dragging.
+        var oldDuration = chart.transitionDuration();
+        chart.transitionDuration(0);
+        chart.update();
+        chart.transitionDuration(oldDuration);
+      }
+
+      g.select('.nv-background rect')
+          .on('click', function() {
+            index.x = d3.mouse(this)[0];
+            index.i = Math.round(dx.invert(index.x));
+
+            // update state and send stateChange with new index
+            state.index = index.i;
+            dispatch.stateChange(state);
+
+            updateZero();
+          });
+
+      lines.dispatch.on('elementClick', function(e) {
+        index.i = e.pointIndex;
+        index.x = dx(index.i);
+
+        // update state and send stateChange with new index
+        state.index = index.i;
+        dispatch.stateChange(state);
+
+        updateZero();
+      });
+
+      controls.dispatch.on('legendClick', function(d,i) {
+        d.disabled = !d.disabled;
+        rescaleY = !d.disabled;
+
+        state.rescaleY = rescaleY;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+
+      legend.dispatch.on('stateChange', function(newState) {
+        state.disabled = newState.disabled;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      interactiveLayer.dispatch.on('elementMousemove', function(e) {
+          lines.clearHighlights();
+          var singlePoint, pointIndex, pointXLocation, allData = [];
+
+
+          data
+          .filter(function(series, i) {
+            series.seriesIndex = i;
+            return !series.disabled;
+          })
+          .forEach(function(series,i) {
+              pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());
+              lines.highlightPoint(i, pointIndex, true);
+              var point = series.values[pointIndex];
+              if (typeof point === 'undefined') return;
+              if (typeof singlePoint === 'undefined') singlePoint = point;
+              if (typeof pointXLocation === 'undefined') pointXLocation = chart.xScale()(chart.x()(point,pointIndex));
+              allData.push({
+                  key: series.key,
+                  value: chart.y()(point, pointIndex),
+                  color: color(series,series.seriesIndex)
+              });
+          });
+
+          //Highlight the tooltip entry based on which point the mouse is closest to.
+          if (allData.length > 2) {
+            var yValue = chart.yScale().invert(e.mouseY);
+            var domainExtent = Math.abs(chart.yScale().domain()[0] - chart.yScale().domain()[1]);
+            var threshold = 0.03 * domainExtent;
+            var indexToHighlight = nv.nearestValueIndex(allData.map(function(d){return d.value}),yValue,threshold);
+            if (indexToHighlight !== null)
+              allData[indexToHighlight].highlight = true;
+          }
+
+          var xValue = xAxis.tickFormat()(chart.x()(singlePoint,pointIndex), pointIndex);
+          interactiveLayer.tooltip
+                  .position({left: pointXLocation + margin.left, top: e.mouseY + margin.top})
+                  .chartContainer(that.parentNode)
+                  .enabled(tooltips)
+                  .valueFormatter(function(d,i) {
+                     return yAxis.tickFormat()(d);
+                  })
+                  .data(
+                      {
+                        value: xValue,
+                        series: allData
+                      }
+                  )();
+
+          interactiveLayer.renderGuideLine(pointXLocation);
+
+      });
+
+      interactiveLayer.dispatch.on("elementMouseout",function(e) {
+          dispatch.tooltipHide();
+          lines.clearHighlights();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+
+        if (typeof e.index !== 'undefined') {
+          index.i = e.index;
+          index.x = dx(index.i);
+
+          state.index = e.index;
+
+          indexLine
+            .data([index]);
+        }
+
+
+        if (typeof e.rescaleY !== 'undefined') {
+          rescaleY = e.rescaleY;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  lines.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.lines = lines;
+  chart.legend = legend;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+  chart.interactiveLayer = interactiveLayer;
+
+  d3.rebind(chart, lines, 'defined', 'isArea', 'x', 'y', 'xScale','yScale', 'size', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'interactive', 'clipEdge', 'clipVoronoi','useVoronoi',  'id');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.rescaleY = function(_) {
+    if (!arguments.length) return rescaleY;
+    rescaleY = _;
+    return chart;
+  };
+
+  chart.showControls = function(_) {
+    if (!arguments.length) return showControls;
+    showControls = _;
+    return chart;
+  };
+
+  chart.useInteractiveGuideline = function(_) {
+    if(!arguments.length) return useInteractiveGuideline;
+    useInteractiveGuideline = _;
+    if (_ === true) {
+       chart.interactive(false);
+       chart.useVoronoi(false);
+    }
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.average = function(_) {
+     if(!arguments.length) return average;
+     average = _;
+     return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  chart.noErrorCheck = function(_) {
+    if (!arguments.length) return noErrorCheck;
+    noErrorCheck = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  //============================================================
+  // Functions
+  //------------------------------------------------------------
+
+  /* Normalize the data according to an index point. */
+  function indexify(idx, data) {
+    return data.map(function(line, i) {
+      if (!line.values) {
+         return line;
+      }
+      var indexValue = line.values[idx];
+      if (indexValue == null) {
+        return line;
+      }
+      var v = lines.y()(indexValue, idx);
+
+      //TODO: implement check below, and disable series if series loses 100% or more cause divide by 0 issue
+      if (v < -.95 && !noErrorCheck) {
+        //if a series loses more than 100%, calculations fail.. anything close can cause major distortion (but is mathematically correct till it hits 100)
+
+        line.tempDisabled = true;
+        return line;
+      }
+
+      line.tempDisabled = false;
+
+      line.values = line.values.map(function(point, pointIndex) {
+        point.display = {'y': (lines.y()(point, pointIndex) - v) / (1 + v) };
+        return point;
+      })
+
+      return line;
+    })
+  }
+
+  //============================================================
+
+
+  return chart;
+}
+//TODO: consider deprecating by adding necessary features to multiBar model
+nv.models.discreteBar = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one
+    , x = d3.scale.ordinal()
+    , y = d3.scale.linear()
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , forceY = [0] // 0 is forced by default.. this makes sense for the majority of bar graphs... user can always do chart.forceY([]) to remove
+    , color = nv.utils.defaultColor()
+    , showValues = false
+    , valueFormat = d3.format(',.2f')
+    , xDomain
+    , yDomain
+    , xRange
+    , yRange
+    , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout')
+    , rectClass = 'discreteBar'
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+
+      //add series index to each data point for reference
+      data.forEach(function(series, i) {
+        series.values.forEach(function(point) {
+          point.series = i;
+        });
+      });
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      // remap and flatten the data for use in calculating the scales' domains
+      var seriesData = (xDomain && yDomain) ? [] : // if we know xDomain and yDomain, no need to calculate
+            data.map(function(d) {
+              return d.values.map(function(d,i) {
+                return { x: getX(d,i), y: getY(d,i), y0: d.y0 }
+              })
+            });
+
+      x   .domain(xDomain || d3.merge(seriesData).map(function(d) { return d.x }))
+          .rangeBands(xRange || [0, availableWidth], .1);
+
+      y   .domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return d.y }).concat(forceY)));
+
+
+      // If showValues, pad the Y axis range to account for label height
+      if (showValues) y.range(yRange || [availableHeight - (y.domain()[0] < 0 ? 12 : 0), y.domain()[1] > 0 ? 12 : 0]);
+      else y.range(yRange || [availableHeight, 0]);
+
+      //store old scales if they exist
+      x0 = x0 || x;
+      y0 = y0 || y.copy().range([y(0),y(0)]);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-discretebar').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-discretebar');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-groups');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+
+      //TODO: by definition, the discrete bar should not have multiple groups, will modify/remove later
+      var groups = wrap.select('.nv-groups').selectAll('.nv-group')
+          .data(function(d) { return d }, function(d) { return d.key });
+      groups.enter().append('g')
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6);
+      groups.exit()
+          
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6)
+          .remove();
+      groups
+          .attr('class', function(d,i) { return 'nv-group nv-series-' + i })
+          .classed('hover', function(d) { return d.hover });
+      groups
+          
+          .style('stroke-opacity', 1)
+          .style('fill-opacity', .75);
+
+
+      var bars = groups.selectAll('g.nv-bar')
+          .data(function(d) { return d.values });
+
+      bars.exit().remove();
+
+
+      var barsEnter = bars.enter().append('g')
+          .attr('transform', function(d,i,j) {
+              return 'translate(' + (x(getX(d,i)) + x.rangeBand() * .05 ) + ', ' + y(0) + ')'
+          })
+          .on('mouseover', function(d,i) { //TODO: figure out why j works above, but not here
+            d3.select(this).classed('hover', true);
+            dispatch.elementMouseover({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (d.series + .5) / data.length), y(getY(d,i))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+          })
+          .on('mouseout', function(d,i) {
+            d3.select(this).classed('hover', false);
+            dispatch.elementMouseout({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+          })
+          .on('click', function(d,i) {
+            dispatch.elementClick({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (d.series + .5) / data.length), y(getY(d,i))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+            d3.event.stopPropagation();
+          })
+          .on('dblclick', function(d,i) {
+            dispatch.elementDblClick({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (d.series + .5) / data.length), y(getY(d,i))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+            d3.event.stopPropagation();
+          });
+
+      barsEnter.append('rect')
+          .attr('height', 0)
+          .attr('width', x.rangeBand() * .9 / data.length )
+
+      if (showValues) {
+        barsEnter.append('text')
+          .attr('text-anchor', 'middle')
+          ;
+
+        bars.select('text')
+          .text(function(d,i) { return valueFormat(getY(d,i)) })
+          
+          .attr('x', x.rangeBand() * .9 / 2)
+          .attr('y', function(d,i) { return getY(d,i) < 0 ? y(getY(d,i)) - y(0) + 12 : -4 })
+
+          ;
+      } else {
+        bars.selectAll('text').remove();
+      }
+
+      bars
+          .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive' })
+          .style('fill', function(d,i) { return d.color || color(d,i) })
+          .style('stroke', function(d,i) { return d.color || color(d,i) })
+        .select('rect')
+          .attr('class', rectClass)
+          
+          .attr('width', x.rangeBand() * .9 / data.length);
+      bars
+          .attr('transform', function(d,i) {
+            var left = x(getX(d,i)) + x.rangeBand() * .05,
+                top = getY(d,i) < 0 ?
+                        y(0) :
+                        y(0) - y(getY(d,i)) < 1 ?
+                          y(0) - 1 : //make 1 px positive bars show up above y=0
+                          y(getY(d,i));
+
+              return 'translate(' + left + ', ' + top + ')'
+          })
+        .select('rect')
+          .attr('height', function(d,i) {
+            return  Math.max(Math.abs(y(getY(d,i)) - y((yDomain && yDomain[0]) || 0)) || 1)
+          });
+
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.forceY = function(_) {
+    if (!arguments.length) return forceY;
+    forceY = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.showValues = function(_) {
+    if (!arguments.length) return showValues;
+    showValues = _;
+    return chart;
+  };
+
+  chart.valueFormat= function(_) {
+    if (!arguments.length) return valueFormat;
+    valueFormat = _;
+    return chart;
+  };
+
+  chart.rectClass= function(_) {
+    if (!arguments.length) return rectClass;
+    rectClass = _;
+    return chart;
+  };
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.discreteBarChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var discretebar = nv.models.discreteBar()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    ;
+
+  var margin = {top: 15, right: 10, bottom: 50, left: 60}
+    , width = null
+    , height = null
+    , color = nv.utils.getColor()
+    , showXAxis = true
+    , showYAxis = true
+    , rightAlignYAxis = false
+    , staggerLabels = false
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + x + '</h3>' +
+               '<p>' +  y + '</p>'
+      }
+    , x
+    , y
+    , noData = "No Data Available."
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'beforeUpdate')
+    , transitionDuration = 0
+    ;
+
+  xAxis
+    .orient('bottom')
+    .highlightZero(false)
+    .showMaxMin(false)
+    .tickFormat(function(d) { return d })
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    .tickFormat(d3.format(',.1f'))
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(discretebar.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(discretebar.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+
+      chart.update = function() { 
+        dispatch.beforeUpdate(); 
+        container.call(chart); 
+      };
+      chart.container = this;
+
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = discretebar.xScale();
+      y = discretebar.yScale().clamp(true);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-discreteBarWithAxes').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-discreteBarWithAxes').append('g');
+      var defsEnter = gEnter.append('defs');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis')
+            .append('g').attr('class', 'nv-zeroLine')
+            .append('line');
+        
+      gEnter.append('g').attr('class', 'nv-barsWrap');
+
+      g.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      discretebar
+        .width(availableWidth)
+        .height(availableHeight);
+
+
+      var barsWrap = g.select('.nv-barsWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      barsWrap.call(discretebar);
+
+      //------------------------------------------------------------
+
+
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-x-label-clip-' + discretebar.id())
+        .append('rect');
+
+      g.select('#nv-x-label-clip-' + discretebar.id() + ' rect')
+          .attr('width', x.rangeBand() * (staggerLabels ? 2 : 1))
+          .attr('height', 16)
+          .attr('x', -x.rangeBand() / (staggerLabels ? 1 : 2 ));
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+          xAxis
+            .scale(x)
+            .ticks( availableWidth / 100 )
+            .tickSize(-availableHeight, 0);
+
+          g.select('.nv-x.nv-axis')
+              .attr('transform', 'translate(0,' + (y.range()[0] + ((discretebar.showValues() && y.domain()[0] < 0) ? 16 : 0)) + ')');
+          //d3.transition(g.select('.nv-x.nv-axis'))
+          g.select('.nv-x.nv-axis')
+              .call(xAxis);
+
+
+          var xTicks = g.select('.nv-x.nv-axis').selectAll('g');
+
+          if (staggerLabels) {
+            xTicks
+                .selectAll('text')
+                .attr('transform', function(d,i,j) { return 'translate(0,' + (j % 2 == 0 ? '5' : '17') + ')' })
+          }
+      }
+
+      if (showYAxis) {
+          yAxis
+            .scale(y)
+            .ticks( availableHeight / 36 )
+            .tickSize( -availableWidth, 0);
+
+          g.select('.nv-y.nv-axis')
+              .call(yAxis);
+      }
+
+      // Zero line
+      g.select(".nv-zeroLine line")
+        .attr("x1",0)
+        .attr("x2",availableWidth)
+        .attr("y1", y(0))
+        .attr("y2", y(0))
+        ;
+
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      //============================================================
+
+
+    });
+
+    return chart;
+  }
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  discretebar.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  discretebar.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.discretebar = discretebar;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+
+  d3.rebind(chart, discretebar, 'x', 'y', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'id', 'showValues', 'valueFormat');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    discretebar.color(color);
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.staggerLabels = function(_) {
+    if (!arguments.length) return staggerLabels;
+    staggerLabels = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.distribution = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 400 //technically width or height depending on x or y....
+    , size = 8
+    , axis = 'x' // 'x' or 'y'... horizontal or vertical
+    , getData = function(d) { return d[axis] }  // defaults d.x or d.y
+    , color = nv.utils.defaultColor()
+    , scale = d3.scale.linear()
+    , domain
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var scale0;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableLength = width - (axis === 'x' ? margin.left + margin.right : margin.top + margin.bottom),
+          naxis = axis == 'x' ? 'y' : 'x',
+          container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      scale0 = scale0 || scale;
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-distribution').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-distribution');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')
+
+      //------------------------------------------------------------
+
+
+      var distWrap = g.selectAll('g.nv-dist')
+          .data(function(d) { return d }, function(d) { return d.key });
+
+      distWrap.enter().append('g');
+      distWrap
+          .attr('class', function(d,i) { return 'nv-dist nv-series-' + i })
+          .style('stroke', function(d,i) { return color(d, i) });
+
+      var dist = distWrap.selectAll('line.nv-dist' + axis)
+          .data(function(d) { return d.values })
+      dist.enter().append('line')
+          .attr(axis + '1', function(d,i) { return scale0(getData(d,i)) })
+          .attr(axis + '2', function(d,i) { return scale0(getData(d,i)) })
+      distWrap.exit().selectAll('line.nv-dist' + axis)
+          
+          .attr(axis + '1', function(d,i) { return scale(getData(d,i)) })
+          .attr(axis + '2', function(d,i) { return scale(getData(d,i)) })
+          .style('stroke-opacity', 0)
+          .remove();
+      dist
+          .attr('class', function(d,i) { return 'nv-dist' + axis + ' nv-dist' + axis + '-' + i })
+          .attr(naxis + '1', 0)
+          .attr(naxis + '2', size);
+      dist
+          
+          .attr(axis + '1', function(d,i) { return scale(getData(d,i)) })
+          .attr(axis + '2', function(d,i) { return scale(getData(d,i)) })
+
+
+      scale0 = scale.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.axis = function(_) {
+    if (!arguments.length) return axis;
+    axis = _;
+    return chart;
+  };
+
+  chart.size = function(_) {
+    if (!arguments.length) return size;
+    size = _;
+    return chart;
+  };
+
+  chart.getData = function(_) {
+    if (!arguments.length) return getData;
+    getData = d3.functor(_);
+    return chart;
+  };
+
+  chart.scale = function(_) {
+    if (!arguments.length) return scale;
+    scale = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.historicalBarChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var bars = nv.models.historicalBar()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , legend = nv.models.legend()
+    ;
+
+
+  var margin = {top: 30, right: 90, bottom: 50, left: 90}
+    , color = nv.utils.defaultColor()
+    , width = null
+    , height = null
+    , showLegend = false
+    , showXAxis = true
+    , showYAxis = true
+    , rightAlignYAxis = false
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>'
+      }
+    , x
+    , y
+    , state = {}
+    , defaultState = null
+    , noData = 'No Data Available.'
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , transitionDuration = 0
+    ;
+
+  xAxis
+    .orient('bottom')
+    .tickPadding(7)
+    ;
+  yAxis
+    .orient( (rightAlignYAxis) ? 'right' : 'left')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+
+    // New addition to calculate position if SVG is scaled with viewBox, may move TODO: consider implementing everywhere else
+    if (offsetElement) {
+      var svg = d3.select(offsetElement).select('svg');
+      var viewBox = (svg.node()) ? svg.attr('viewBox') : null;
+      if (viewBox) {
+        viewBox = viewBox.split(' ');
+        var ratio = parseInt(svg.style('width')) / viewBox[2];
+        e.pos[0] = e.pos[0] * ratio;
+        e.pos[1] = e.pos[1] * ratio;
+      }
+    }
+
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(bars.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(bars.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, null, null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+
+      chart.update = function() { container.call(chart) };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display noData message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = bars.xScale();
+      y = bars.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-historicalBarChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-historicalBarChart').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-barsWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width(availableWidth);
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        wrap.select('.nv-legendWrap')
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+      }
+
+      //------------------------------------------------------------
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+        g.select(".nv-y.nv-axis")
+            .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      bars
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled }));
+
+
+      var barsWrap = g.select('.nv-barsWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      barsWrap.call(bars);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+        xAxis
+          .scale(x)
+          .tickSize(-availableHeight, 0);
+
+        g.select('.nv-x.nv-axis')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')');
+        g.select('.nv-x.nv-axis')
+            
+            .call(xAxis);
+      }
+
+      if (showYAxis) {
+        yAxis
+          .scale(y)
+          .ticks( availableHeight / 36 )
+          .tickSize( -availableWidth, 0);
+
+        g.select('.nv-y.nv-axis')
+          
+            .call(yAxis);
+      }
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('legendClick', function(d,i) {
+        d.disabled = !d.disabled;
+
+        if (!data.filter(function(d) { return !d.disabled }).length) {
+          data.map(function(d) {
+            d.disabled = false;
+            wrap.selectAll('.nv-series').classed('disabled', false);
+            return d;
+          });
+        }
+
+        state.disabled = data.map(function(d) { return !!d.disabled });
+        dispatch.stateChange(state);
+
+        selection.call(chart);
+      });
+
+      legend.dispatch.on('legendDblclick', function(d) {
+          //Double clicking should always enable current series, and disabled all others.
+          data.forEach(function(d) {
+             d.disabled = true;
+          });
+          d.disabled = false;
+
+          state.disabled = data.map(function(d) { return !!d.disabled });
+          dispatch.stateChange(state);
+          chart.update();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  bars.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  bars.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.bars = bars;
+  chart.legend = legend;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+
+  d3.rebind(chart, bars, 'defined', 'isArea', 'x', 'y', 'size', 'xScale', 'yScale',
+    'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'interactive', 'clipEdge', 'clipVoronoi', 'id', 'interpolate','highlightPoint','clearHighlights', 'interactive');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+nv.models.indentedTree = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0} //TODO: implement, maybe as margin on the containing div
+    , width = 960
+    , height = 500
+    , color = nv.utils.defaultColor()
+    , id = Math.floor(Math.random() * 10000)
+    , header = true
+    , filterZero = false
+    , noData = "No Data Available."
+    , childIndent = 20
+    , columns = [{key:'key', label: 'Name', type:'text'}] //TODO: consider functions like chart.addColumn, chart.removeColumn, instead of a block like this
+    , tableClass = null
+    , iconOpen = 'images/grey-plus.png' //TODO: consider removing this and replacing with a '+' or '-' unless user defines images
+    , iconClose = 'images/grey-minus.png'
+    , dispatch = d3.dispatch('elementClick', 'elementDblclick', 'elementMouseover', 'elementMouseout')
+    , getUrl = function(d) { return d.url }
+    ;
+
+  //============================================================
+
+  var idx = 0;
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var depth = 1,
+          container = d3.select(this);
+
+      var tree = d3.layout.tree()
+          .children(function(d) { return d.values })
+          .size([height, childIndent]); //Not sure if this is needed now that the result is HTML
+
+      chart.update = function() { container.call(chart) };
+
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+      if (!data[0]) data[0] = {key: noData};
+
+      //------------------------------------------------------------
+
+
+      var nodes = tree.nodes(data[0]);
+
+      // nodes.map(function(d) {
+      //   d.id = i++;
+      // })
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = d3.select(this).selectAll('div').data([[nodes]]);
+      var wrapEnter = wrap.enter().append('div').attr('class', 'nvd3 nv-wrap nv-indentedtree');
+      var tableEnter = wrapEnter.append('table');
+      var table = wrap.select('table').attr('width', '100%').attr('class', tableClass);
+
+      //------------------------------------------------------------
+
+
+      if (header) {
+        var thead = tableEnter.append('thead');
+
+        var theadRow1 = thead.append('tr');
+
+        columns.forEach(function(column) {
+          theadRow1
+            .append('th')
+              .attr('width', column.width ? column.width : '10%')
+              .style('text-align', column.type == 'numeric' ? 'right' : 'left')
+            .append('span')
+              .text(column.label);
+        });
+      }
+
+
+      var tbody = table.selectAll('tbody')
+                    .data(function(d) { return d });
+      tbody.enter().append('tbody');
+
+
+
+      //compute max generations
+      depth = d3.max(nodes, function(node) { return node.depth });
+      tree.size([height, depth * childIndent]); //TODO: see if this is necessary at all
+
+
+      // Update the nodes…
+      var node = tbody.selectAll('tr')
+          // .data(function(d) { return d; }, function(d) { return d.id || (d.id == ++i)});
+          .data(function(d) { return d.filter(function(d) { return (filterZero && !d.children) ? filterZero(d) :  true; } )}, function(d,i) { return d.id || (d.id || ++idx)});
+          //.style('display', 'table-row'); //TODO: see if this does anything
+
+      node.exit().remove();
+
+      node.select('img.nv-treeicon')
+          .attr('src', icon)
+          .classed('folded', folded);
+
+      var nodeEnter = node.enter().append('tr');
+
+
+      columns.forEach(function(column, index) {
+
+        var nodeName = nodeEnter.append('td')
+            .style('padding-left', function(d) { return (index ? 0 : d.depth * childIndent + 12 + (icon(d) ? 0 : 16)) + 'px' }, 'important') //TODO: check why I did the ternary here
+            .style('text-align', column.type == 'numeric' ? 'right' : 'left');
+
+
+        if (index == 0) {
+          nodeName.append('img')
+              .classed('nv-treeicon', true)
+              .classed('nv-folded', folded)
+              .attr('src', icon)
+              .style('width', '14px')
+              .style('height', '14px')
+              .style('padding', '0 1px')
+              .style('display', function(d) { return icon(d) ? 'inline-block' : 'none'; })
+              .on('click', click);
+        }
+
+
+        nodeName.each(function(d) {
+          if (!index && getUrl(d))
+            d3.select(this)
+              .append('a')
+              .attr('href',getUrl)
+              .attr('class', d3.functor(column.classes))
+              .append('span')
+          else
+            d3.select(this)
+              .append('span')
+
+            d3.select(this).select('span')
+              .attr('class', d3.functor(column.classes) )
+              .text(function(d) { return column.format ? (d[column.key] ? column.format(d[column.key]) : '-') :  (d[column.key] || '-'); });
+          });
+
+        if  (column.showCount) {
+          nodeName.append('span')
+              .attr('class', 'nv-childrenCount');
+
+          node.selectAll('span.nv-childrenCount').text(function(d) {
+                return ((d.values && d.values.length) || (d._values && d._values.length)) ?                                   //If this is a parent
+                    '(' + ((d.values && (d.values.filter(function(d) { return filterZero ? filterZero(d) :  true; }).length)) //If children are in values check its children and filter
+                    || (d._values && d._values.filter(function(d) { return filterZero ? filterZero(d) :  true; }).length)     //Otherwise, do the same, but with the other name, _values...
+                    || 0) + ')'                                                                                               //This is the catch-all in case there are no children after a filter
+                    : ''                                                                                                     //If this is not a parent, just give an empty string
+            });
+        }
+
+        // if (column.click)
+        //   nodeName.select('span').on('click', column.click);
+
+      });
+
+      node
+        .order()
+        .on('click', function(d) { 
+          dispatch.elementClick({
+            row: this, //TODO: decide whether or not this should be consistent with scatter/line events or should be an html link (a href)
+            data: d,
+            pos: [d.x, d.y]
+          });
+        })
+        .on('dblclick', function(d) { 
+          dispatch.elementDblclick({
+            row: this,
+            data: d,
+            pos: [d.x, d.y]
+          });
+        })
+        .on('mouseover', function(d) { 
+          dispatch.elementMouseover({
+            row: this,
+            data: d,
+            pos: [d.x, d.y]
+          });
+        })
+        .on('mouseout', function(d) { 
+          dispatch.elementMouseout({
+            row: this,
+            data: d,
+            pos: [d.x, d.y]
+          });
+        });
+
+
+
+
+      // Toggle children on click.
+      function click(d, _, unshift) {
+        d3.event.stopPropagation();
+
+        if(d3.event.shiftKey && !unshift) {
+          //If you shift-click, it'll toggle fold all the children, instead of itself
+          d3.event.shiftKey = false;
+          d.values && d.values.forEach(function(node){
+            if (node.values || node._values) {
+              click(node, 0, true);
+            }
+          });
+          return true;
+        }
+        if(!hasChildren(d)) {
+          //download file
+          //window.location.href = d.url;
+          return true;
+        }
+        if (d.values) {
+          d._values = d.values;
+          d.values = null;
+        } else {
+          d.values = d._values;
+          d._values = null;
+        }
+        chart.update();
+      }
+
+
+      function icon(d) {
+        return (d._values && d._values.length) ? iconOpen : (d.values && d.values.length) ? iconClose : '';
+      }
+
+      function folded(d) {
+        return (d._values && d._values.length);
+      }
+
+      function hasChildren(d) {
+        var values = d.values || d._values;
+
+        return (values && values.length);
+      }
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    scatter.color(color);
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.header = function(_) {
+    if (!arguments.length) return header;
+    header = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.filterZero = function(_) {
+    if (!arguments.length) return filterZero;
+    filterZero = _;
+    return chart;
+  };
+
+  chart.columns = function(_) {
+    if (!arguments.length) return columns;
+    columns = _;
+    return chart;
+  };
+
+  chart.tableClass = function(_) {
+    if (!arguments.length) return tableClass;
+    tableClass = _;
+    return chart;
+  };
+
+  chart.iconOpen = function(_){
+     if (!arguments.length) return iconOpen;
+    iconOpen = _;
+    return chart;
+  }
+
+  chart.iconClose = function(_){
+     if (!arguments.length) return iconClose;
+    iconClose = _;
+    return chart;
+  }
+
+  chart.getUrl = function(_){
+     if (!arguments.length) return getUrl;
+    getUrl = _;
+    return chart;
+  }
+
+  //============================================================
+
+
+  return chart;
+};nv.models.legend = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 5, right: 0, bottom: 5, left: 0}
+    , width = 400
+    , height = 20
+    , getKey = function(d) { return d.key }
+    , color = nv.utils.defaultColor()
+    , align = true
+    , rightAlign = true
+    , updateState = true   //If true, legend will update data.disabled and trigger a 'stateChange' dispatch.
+    , radioButtonMode = false   //If true, clicking legend items will cause it to behave like a radio button. (only one can be selected at a time)
+    , dispatch = d3.dispatch('legendClick', 'legendDblclick', 'legendMouseover', 'legendMouseout', 'stateChange')
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-legend').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-legend').append('g');
+      var g = wrap.select('g');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      var series = g.selectAll('.nv-series')
+          .data(function(d) { return d });
+      var seriesEnter = series.enter().append('g').attr('class', 'nv-series')
+          .on('mouseover', function(d,i) {
+            dispatch.legendMouseover(d,i);  //TODO: Make consistent with other event objects
+          })
+          .on('mouseout', function(d,i) {
+            dispatch.legendMouseout(d,i);
+          })
+          .on('click', function(d,i) {
+            dispatch.legendClick(d,i);
+            if (updateState) {
+               if (radioButtonMode) {
+                   //Radio button mode: set every series to disabled,
+                   //  and enable the clicked series.
+                   data.forEach(function(series) { series.disabled = true});
+                   d.disabled = false;
+               }
+               else {
+                   d.disabled = !d.disabled;
+                   if (data.every(function(series) { return series.disabled})) {
+                       //the default behavior of NVD3 legends is, if every single series
+                       // is disabled, turn all series' back on.
+                       data.forEach(function(series) { series.disabled = false});
+                   }
+               }
+               dispatch.stateChange({
+                  disabled: data.map(function(d) { return !!d.disabled })
+               });
+            }
+          })
+          .on('dblclick', function(d,i) {
+            dispatch.legendDblclick(d,i);
+            if (updateState) {
+                //the default behavior of NVD3 legends, when double clicking one,
+                // is to set all other series' to false, and make the double clicked series enabled.
+                data.forEach(function(series) {
+                   series.disabled = true;
+                });
+                d.disabled = false;
+                dispatch.stateChange({
+                    disabled: data.map(function(d) { return !!d.disabled })
+                });
+            }
+          });
+      seriesEnter.append('circle')
+          .style('stroke-width', 2)
+          .attr('class','nv-legend-symbol')
+          .attr('r', 5);
+      seriesEnter.append('text')
+          .attr('text-anchor', 'start')
+          .attr('class','nv-legend-text')
+          .attr('dy', '.32em')
+          .attr('dx', '8');
+      series.classed('disabled', function(d) { return d.disabled });
+      series.exit().remove();
+      series.select('circle')
+          .style('fill', function(d,i) { return d.color || color(d,i)})
+          .style('stroke', function(d,i) { return d.color || color(d, i) });
+      series.select('text').text(getKey);
+
+
+      //TODO: implement fixed-width and max-width options (max-width is especially useful with the align option)
+
+      // NEW ALIGNING CODE, TODO: clean up
+      if (align) {
+
+        var seriesWidths = [];
+        series.each(function(d,i) {
+              var legendText = d3.select(this).select('text');
+              var nodeTextLength;
+              try {
+                nodeTextLength = legendText.getComputedTextLength();
+                // If the legendText is display:none'd (nodeTextLength == 0), simulate an error so we approximate, instead
+                if(nodeTextLength <= 0) throw Error();
+              }
+              catch(e) {
+                nodeTextLength = nv.utils.calcApproxTextWidth(legendText);
+              }
+
+              seriesWidths.push(nodeTextLength + 28); // 28 is ~ the width of the circle plus some padding
+            });
+
+        var seriesPerRow = 0;
+        var legendWidth = 0;
+        var columnWidths = [];
+
+        while ( legendWidth < availableWidth && seriesPerRow < seriesWidths.length) {
+          columnWidths[seriesPerRow] = seriesWidths[seriesPerRow];
+          legendWidth += seriesWidths[seriesPerRow++];
+        }
+        if (seriesPerRow === 0) seriesPerRow = 1; //minimum of one series per row
+
+
+        while ( legendWidth > availableWidth && seriesPerRow > 1 ) {
+          columnWidths = [];
+          seriesPerRow--;
+
+          for (var k = 0; k < seriesWidths.length; k++) {
+            if (seriesWidths[k] > (columnWidths[k % seriesPerRow] || 0) )
+              columnWidths[k % seriesPerRow] = seriesWidths[k];
+          }
+
+          legendWidth = columnWidths.reduce(function(prev, cur, index, array) {
+                          return prev + cur;
+                        });
+        }
+
+        var xPositions = [];
+        for (var i = 0, curX = 0; i < seriesPerRow; i++) {
+            xPositions[i] = curX;
+            curX += columnWidths[i];
+        }
+
+        series
+            .attr('transform', function(d, i) {
+              return 'translate(' + xPositions[i % seriesPerRow] + ',' + (5 + Math.floor(i / seriesPerRow) * 20) + ')';
+            });
+
+        //position legend as far right as possible within the total width
+        if (rightAlign) {
+           g.attr('transform', 'translate(' + (width - margin.right - legendWidth) + ',' + margin.top + ')');
+        }
+        else {
+           g.attr('transform', 'translate(0' + ',' + margin.top + ')');
+        }
+
+        height = margin.top + margin.bottom + (Math.ceil(seriesWidths.length / seriesPerRow) * 20);
+
+      } else {
+
+        var ypos = 5,
+            newxpos = 5,
+            maxwidth = 0,
+            xpos;
+        series
+            .attr('transform', function(d, i) {
+              var length = d3.select(this).select('text').node().getComputedTextLength() + 28;
+              xpos = newxpos;
+
+              if (width < margin.left + margin.right + xpos + length) {
+                newxpos = xpos = 5;
+                ypos += 20;
+              }
+
+              newxpos += length;
+              if (newxpos > maxwidth) maxwidth = newxpos;
+
+              return 'translate(' + xpos + ',' + ypos + ')';
+            });
+
+        //position legend as far right as possible within the total width
+        g.attr('transform', 'translate(' + (width - margin.right - maxwidth) + ',' + margin.top + ')');
+
+        height = margin.top + margin.bottom + ypos + 15;
+
+      }
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.key = function(_) {
+    if (!arguments.length) return getKey;
+    getKey = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.align = function(_) {
+    if (!arguments.length) return align;
+    align = _;
+    return chart;
+  };
+
+  chart.rightAlign = function(_) {
+    if (!arguments.length) return rightAlign;
+    rightAlign = _;
+    return chart;
+  };
+
+  chart.updateState = function(_) {
+    if (!arguments.length) return updateState;
+    updateState = _;
+    return chart;
+  };
+
+  chart.radioButtonMode = function(_) {
+    if (!arguments.length) return radioButtonMode;
+    radioButtonMode = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.line = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var  scatter = nv.models.scatter()
+    ;
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , color = nv.utils.defaultColor() // a function that returns a color
+    , getX = function(d) { return d.x } // accessor to get the x value from a data point
+    , getY = function(d) { return d.y } // accessor to get the y value from a data point
+    , defined = function(d,i) { return !isNaN(getY(d,i)) && getY(d,i) !== null } // allows a line to be not continuous when it is not defined
+    , isArea = function(d) { return d.area } // decides if a line is an area or just a line
+    , clipEdge = false // if true, masks lines within x and y scale
+    , x //can be accessed via chart.xScale()
+    , y //can be accessed via chart.yScale()
+    , interpolate = "linear" // controls the line interpolation
+    ;
+
+  scatter
+    .size(16) // default size
+    .sizeDomain([16,256]) //set to speed up calculation, needs to be unset if there is a custom size accessor
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0 //used to store previous scales
+      ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = scatter.xScale();
+      y = scatter.yScale();
+
+      x0 = x0 || x;
+      y0 = y0 || y;
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-line').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-line');
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g')
+
+      gEnter.append('g').attr('class', 'nv-groups');
+      gEnter.append('g').attr('class', 'nv-scatterWrap');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+
+
+      scatter
+        .width(availableWidth)
+        .height(availableHeight)
+
+      var scatterWrap = wrap.select('.nv-scatterWrap');
+          //.datum(data); // Data automatically trickles down from the wrap
+
+      scatterWrap.call(scatter);
+
+
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-edge-clip-' + scatter.id())
+        .append('rect');
+
+      wrap.select('#nv-edge-clip-' + scatter.id() + ' rect')
+          .attr('width', availableWidth)
+          .attr('height', (availableHeight > 0) ? availableHeight : 0);
+
+      g   .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + scatter.id() + ')' : '');
+      scatterWrap
+          .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + scatter.id() + ')' : '');
+
+
+
+
+      var groups = wrap.select('.nv-groups').selectAll('.nv-group')
+          .data(function(d) { return d }, function(d) { return d.key });
+      groups.enter().append('g')
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6);
+
+      groups.exit().remove();
+
+      groups
+          .attr('class', function(d,i) { return 'nv-group nv-series-' + i })
+          .classed('hover', function(d) { return d.hover })
+          .style('fill', function(d,i){ return color(d, i) })
+          .style('stroke', function(d,i){ return color(d, i)});
+      groups
+          
+          .style('stroke-opacity', 1)
+          .style('fill-opacity', .5);
+
+
+
+      var areaPaths = groups.selectAll('path.nv-area')
+          .data(function(d) { return isArea(d) ? [d] : [] }); // this is done differently than lines because I need to check if series is an area
+      areaPaths.enter().append('path')
+          .attr('class', 'nv-area')
+          .attr('d', function(d) {
+            return d3.svg.area()
+                .interpolate(interpolate)
+                .defined(defined)
+                .x(function(d,i) { return nv.utils.NaNtoZero(x0(getX(d,i))) })
+                .y0(function(d,i) { return nv.utils.NaNtoZero(y0(getY(d,i))) })
+                .y1(function(d,i) { return y0( y.domain()[0] <= 0 ? y.domain()[1] >= 0 ? 0 : y.domain()[1] : y.domain()[0] ) })
+                //.y1(function(d,i) { return y0(0) }) //assuming 0 is within y domain.. may need to tweak this
+                .apply(this, [d.values])
+          });
+      groups.exit().selectAll('path.nv-area')
+           .remove();
+
+      areaPaths
+          
+          .attr('d', function(d) {
+            return d3.svg.area()
+                .interpolate(interpolate)
+                .defined(defined)
+                .x(function(d,i) { return nv.utils.NaNtoZero(x(getX(d,i))) })
+                .y0(function(d,i) { return nv.utils.NaNtoZero(y(getY(d,i))) })
+                .y1(function(d,i) { return y( y.domain()[0] <= 0 ? y.domain()[1] >= 0 ? 0 : y.domain()[1] : y.domain()[0] ) })
+                //.y1(function(d,i) { return y0(0) }) //assuming 0 is within y domain.. may need to tweak this
+                .apply(this, [d.values])
+          });
+
+
+
+      var linePaths = groups.selectAll('path.nv-line')
+          .data(function(d) { return [d.values] });
+      linePaths.enter().append('path')
+          .attr('class', 'nv-line')
+          .attr('d',
+            d3.svg.line()
+              .interpolate(interpolate)
+              .defined(defined)
+              .x(function(d,i) { return nv.utils.NaNtoZero(x0(getX(d,i))) })
+              .y(function(d,i) { return nv.utils.NaNtoZero(y0(getY(d,i))) })
+          );
+
+      linePaths
+          
+          .attr('d',
+            d3.svg.line()
+              .interpolate(interpolate)
+              .defined(defined)
+              .x(function(d,i) { return nv.utils.NaNtoZero(x(getX(d,i))) })
+              .y(function(d,i) { return nv.utils.NaNtoZero(y(getY(d,i))) })
+          );
+
+
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = scatter.dispatch;
+  chart.scatter = scatter;
+
+  d3.rebind(chart, scatter, 'id', 'interactive', 'size', 'xScale', 'yScale', 'zScale', 'xDomain', 'yDomain', 'xRange', 'yRange',
+    'sizeDomain', 'forceX', 'forceY', 'forceSize', 'clipVoronoi', 'useVoronoi', 'clipRadius', 'padData','highlightPoint','clearHighlights');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    scatter.x(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    scatter.y(_);
+    return chart;
+  };
+
+  chart.clipEdge = function(_) {
+    if (!arguments.length) return clipEdge;
+    clipEdge = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    scatter.color(color);
+    return chart;
+  };
+
+  chart.interpolate = function(_) {
+    if (!arguments.length) return interpolate;
+    interpolate = _;
+    return chart;
+  };
+
+  chart.defined = function(_) {
+    if (!arguments.length) return defined;
+    defined = _;
+    return chart;
+  };
+
+  chart.isArea = function(_) {
+    if (!arguments.length) return isArea;
+    isArea = d3.functor(_);
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.lineChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var lines = nv.models.line()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , legend = nv.models.legend()
+    , interactiveLayer = nv.interactiveGuideline()
+    ;
+
+  var margin = {top: 30, right: 20, bottom: 50, left: 60}
+    , color = nv.utils.defaultColor()
+    , width = null
+    , height = null
+    , showLegend = true
+    , showXAxis = true
+    , showYAxis = true
+    , rightAlignYAxis = false
+    , useInteractiveGuideline = false
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>'
+      }
+    , x
+    , y
+    , state = {}
+    , defaultState = null
+    , noData = 'No Data Available.'
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , transitionDuration = 0
+    ;
+
+  xAxis
+    .orient('bottom')
+    .tickPadding(7)
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(lines.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(lines.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, null, null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+
+      chart.update = function() { container.call(chart) };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display noData message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = lines.xScale();
+      y = lines.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-lineChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-lineChart').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append("rect").style("opacity",0);
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-linesWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-interactive');
+
+      g.select("rect")
+        .attr("width",availableWidth)
+        .attr("height",(availableHeight > 0) ? availableHeight : 0);
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width(availableWidth);
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        wrap.select('.nv-legendWrap')
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+      }
+
+      //------------------------------------------------------------
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+
+      //------------------------------------------------------------
+      //Set up interactive layer
+      if (useInteractiveGuideline) {
+        interactiveLayer
+           .width(availableWidth)
+           .height(availableHeight)
+           .margin({left:margin.left, top:margin.top})
+           .svgContainer(container)
+           .xScale(x);
+        wrap.select(".nv-interactive").call(interactiveLayer);
+      }
+
+
+      lines
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled }));
+
+
+      var linesWrap = g.select('.nv-linesWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      linesWrap.call(lines);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+        xAxis
+          .scale(x)
+          .ticks( availableWidth / 100 )
+          .tickSize(-availableHeight, 0);
+
+        g.select('.nv-x.nv-axis')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')');
+        g.select('.nv-x.nv-axis')
+            
+            .call(xAxis);
+      }
+
+      if (showYAxis) {
+        yAxis
+          .scale(y)
+          .ticks( availableHeight / 36 )
+          .tickSize( -availableWidth, 0);
+
+        g.select('.nv-y.nv-axis')
+            
+            .call(yAxis);
+      }
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) {
+          state = newState;
+          dispatch.stateChange(state);
+          chart.update();
+      });
+
+      interactiveLayer.dispatch.on('elementMousemove', function(e) {
+          lines.clearHighlights();
+          var singlePoint, pointIndex, pointXLocation, allData = [];
+          data
+          .filter(function(series, i) {
+            series.seriesIndex = i;
+            return !series.disabled;
+          })
+          .forEach(function(series,i) {
+              pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());
+              lines.highlightPoint(i, pointIndex, true);
+              var point = series.values[pointIndex];
+              if (typeof point === 'undefined') return;
+              if (typeof singlePoint === 'undefined') singlePoint = point;
+              if (typeof pointXLocation === 'undefined') pointXLocation = chart.xScale()(chart.x()(point,pointIndex));
+              allData.push({
+                  key: series.key,
+                  value: chart.y()(point, pointIndex),
+                  color: color(series,series.seriesIndex)
+              });
+          });
+          //Highlight the tooltip entry based on which point the mouse is closest to.
+          if (allData.length > 2) {
+            var yValue = chart.yScale().invert(e.mouseY);
+            var domainExtent = Math.abs(chart.yScale().domain()[0] - chart.yScale().domain()[1]);
+            var threshold = 0.03 * domainExtent;
+            var indexToHighlight = nv.nearestValueIndex(allData.map(function(d){return d.value}),yValue,threshold);
+            if (indexToHighlight !== null)
+              allData[indexToHighlight].highlight = true;
+          }
+
+          var xValue = xAxis.tickFormat()(chart.x()(singlePoint,pointIndex));
+          interactiveLayer.tooltip
+                  .position({left: pointXLocation + margin.left, top: e.mouseY + margin.top})
+                  .chartContainer(that.parentNode)
+                  .enabled(tooltips)
+                  .valueFormatter(function(d,i) {
+                     return yAxis.tickFormat()(d);
+                  })
+                  .data(
+                      {
+                        value: xValue,
+                        series: allData
+                      }
+                  )();
+
+          interactiveLayer.renderGuideLine(pointXLocation);
+
+      });
+
+      interactiveLayer.dispatch.on("elementMouseout",function(e) {
+          dispatch.tooltipHide();
+          lines.clearHighlights();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined' && data.length === e.disabled.length) {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  lines.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.lines = lines;
+  chart.legend = legend;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+  chart.interactiveLayer = interactiveLayer;
+
+  d3.rebind(chart, lines, 'defined', 'isArea', 'x', 'y', 'size', 'xScale', 'yScale', 'xDomain', 'yDomain', 'xRange', 'yRange'
+    , 'forceX', 'forceY', 'interactive', 'clipEdge', 'clipVoronoi', 'useVoronoi','id', 'interpolate');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.useInteractiveGuideline = function(_) {
+    if(!arguments.length) return useInteractiveGuideline;
+    useInteractiveGuideline = _;
+    if (_ === true) {
+       chart.interactive(false);
+       chart.useVoronoi(false);
+    }
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.linePlusBarChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var lines = nv.models.line()
+    , bars = nv.models.historicalBar()
+    , xAxis = nv.models.axis()
+    , y1Axis = nv.models.axis()
+    , y2Axis = nv.models.axis()
+    , legend = nv.models.legend()
+    ;
+
+  var margin = {top: 30, right: 60, bottom: 50, left: 60}
+    , width = null
+    , height = null
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , color = nv.utils.defaultColor()
+    , showLegend = true
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>';
+      }
+    , x
+    , y1
+    , y2
+    , state = {}
+    , defaultState = null
+    , noData = "No Data Available."
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    ;
+
+  bars
+    .padData(true)
+    ;
+  lines
+    .clipEdge(false)
+    .padData(true)
+    ;
+  xAxis
+    .orient('bottom')
+    .tickPadding(7)
+    .highlightZero(false)
+    ;
+  y1Axis
+    .orient('left')
+    ;
+  y2Axis
+    .orient('right')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+      var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+          top = e.pos[1] + ( offsetElement.offsetTop || 0),
+          x = xAxis.tickFormat()(lines.x()(e.point, e.pointIndex)),
+          y = (e.series.bar ? y1Axis : y2Axis).tickFormat()(lines.y()(e.point, e.pointIndex)),
+          content = tooltip(e.series.key, x, y, e, chart);
+
+      nv.tooltip.show([left, top], content, e.value < 0 ? 'n' : 's', null, offsetElement);
+    }
+    ;
+
+  //------------------------------------------------------------
+
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart); };
+      // chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      var dataBars = data.filter(function(d) { return !d.disabled && d.bar });
+      var dataLines = data.filter(function(d) { return !d.bar }); // removed the !d.disabled clause here to fix Issue #240
+
+      //x = xAxis.scale();
+       x = dataLines.filter(function(d) { return !d.disabled; }).length && dataLines.filter(function(d) { return !d.disabled; })[0].values.length ? lines.xScale() : bars.xScale();
+      //x = dataLines.filter(function(d) { return !d.disabled; }).length ? lines.xScale() : bars.xScale(); //old code before change above
+      y1 = bars.yScale();
+      y2 = lines.yScale();
+
+      //------------------------------------------------------------
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = d3.select(this).selectAll('g.nv-wrap.nv-linePlusBar').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-linePlusBar').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y1 nv-axis');
+      gEnter.append('g').attr('class', 'nv-y2 nv-axis');
+      gEnter.append('g').attr('class', 'nv-barsWrap');
+      gEnter.append('g').attr('class', 'nv-linesWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width( availableWidth / 2 );
+
+        g.select('.nv-legendWrap')
+            .datum(data.map(function(series) {
+              series.originalKey = series.originalKey === undefined ? series.key : series.originalKey;
+              series.key = series.originalKey + (series.bar ? ' (left axis)' : ' (right axis)');
+              return series;
+            }))
+          .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + ( availableWidth / 2 ) + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+
+      lines
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && !data[i].bar }))
+
+      bars
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && data[i].bar }))
+
+
+
+      var barsWrap = g.select('.nv-barsWrap')
+          .datum(dataBars.length ? dataBars : [{values:[]}])
+
+      var linesWrap = g.select('.nv-linesWrap')
+          .datum(dataLines[0] && !dataLines[0].disabled ? dataLines : [{values:[]}] );
+          //.datum(!dataLines[0].disabled ? dataLines : [{values:dataLines[0].values.map(function(d) { return [d[0], null] }) }] );
+
+      d3.transition(barsWrap).call(bars);
+      d3.transition(linesWrap).call(lines);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      xAxis
+        .scale(x)
+        .ticks( availableWidth / 100 )
+        .tickSize(-availableHeight, 0);
+
+      g.select('.nv-x.nv-axis')
+          .attr('transform', 'translate(0,' + y1.range()[0] + ')');
+      d3.transition(g.select('.nv-x.nv-axis'))
+          .call(xAxis);
+
+
+      y1Axis
+        .scale(y1)
+        .ticks( availableHeight / 36 )
+        .tickSize(-availableWidth, 0);
+
+      d3.transition(g.select('.nv-y1.nv-axis'))
+          .style('opacity', dataBars.length ? 1 : 0)
+          .call(y1Axis);
+
+
+      y2Axis
+        .scale(y2)
+        .ticks( availableHeight / 36 )
+        .tickSize(dataBars.length ? 0 : -availableWidth, 0); // Show the y2 rules only if y1 has none
+
+      g.select('.nv-y2.nv-axis')
+          .style('opacity', dataLines.length ? 1 : 0)
+          .attr('transform', 'translate(' + availableWidth + ',0)');
+          //.attr('transform', 'translate(' + x.range()[1] + ',0)');
+
+      d3.transition(g.select('.nv-y2.nv-axis'))
+          .call(y2Axis);
+
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) { 
+        state = newState;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  lines.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  bars.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  bars.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.legend = legend;
+  chart.lines = lines;
+  chart.bars = bars;
+  chart.xAxis = xAxis;
+  chart.y1Axis = y1Axis;
+  chart.y2Axis = y2Axis;
+
+  d3.rebind(chart, lines, 'defined', 'size', 'clipVoronoi', 'interpolate');
+  //TODO: consider rebinding x, y and some other stuff, and simply do soemthign lile bars.x(lines.x()), etc.
+  //d3.rebind(chart, lines, 'x', 'y', 'size', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'interactive', 'clipEdge', 'clipVoronoi', 'id');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    lines.x(_);
+    bars.x(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    lines.y(_);
+    bars.y(_);
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+nv.models.lineWithFocusChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var lines = nv.models.line()
+    , lines2 = nv.models.line()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , x2Axis = nv.models.axis()
+    , y2Axis = nv.models.axis()
+    , legend = nv.models.legend()
+    , brush = d3.svg.brush()
+    ;
+
+  var margin = {top: 30, right: 30, bottom: 30, left: 60}
+    , margin2 = {top: 0, right: 30, bottom: 20, left: 60}
+    , color = nv.utils.defaultColor()
+    , width = null
+    , height = null
+    , height2 = 100
+    , x
+    , y
+    , x2
+    , y2
+    , showLegend = true
+    , brushExtent = null
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>'
+      }
+    , noData = "No Data Available."
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'brush')
+    , transitionDuration = 0
+    ;
+
+  lines
+    .clipEdge(true)
+    ;
+  lines2
+    .interactive(false)
+    ;
+  xAxis
+    .orient('bottom')
+    .tickPadding(5)
+    ;
+  yAxis
+    .orient('left')
+    ;
+  x2Axis
+    .orient('bottom')
+    .tickPadding(5)
+    ;
+  y2Axis
+    .orient('left')
+    ;
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(lines.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(lines.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, null, null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight1 = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom - height2,
+          availableHeight2 = height2 - margin2.top - margin2.bottom;
+
+      chart.update = function() { container.call(chart) };
+      chart.container = this;
+
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight1 / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = lines.xScale();
+      y = lines.yScale();
+      x2 = lines2.xScale();
+      y2 = lines2.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-lineWithFocusChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-lineWithFocusChart').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+
+      var focusEnter = gEnter.append('g').attr('class', 'nv-focus');
+      focusEnter.append('g').attr('class', 'nv-x nv-axis');
+      focusEnter.append('g').attr('class', 'nv-y nv-axis');
+      focusEnter.append('g').attr('class', 'nv-linesWrap');
+
+      var contextEnter = gEnter.append('g').attr('class', 'nv-context');
+      contextEnter.append('g').attr('class', 'nv-x nv-axis');
+      contextEnter.append('g').attr('class', 'nv-y nv-axis');
+      contextEnter.append('g').attr('class', 'nv-linesWrap');
+      contextEnter.append('g').attr('class', 'nv-brushBackground');
+      contextEnter.append('g').attr('class', 'nv-x nv-brush');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width(availableWidth);
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight1 = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom - height2;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      lines
+        .width(availableWidth)
+        .height(availableHeight1)
+        .color(
+          data
+            .map(function(d,i) {
+              return d.color || color(d, i);
+            })
+            .filter(function(d,i) {
+              return !data[i].disabled;
+          })
+        );
+
+      lines2
+        .defined(lines.defined())
+        .width(availableWidth)
+        .height(availableHeight2)
+        .color(
+          data
+            .map(function(d,i) {
+              return d.color || color(d, i);
+            })
+            .filter(function(d,i) {
+              return !data[i].disabled;
+          })
+        );
+
+      g.select('.nv-context')
+          .attr('transform', 'translate(0,' + ( availableHeight1 + margin.bottom + margin2.top) + ')')
+
+      var contextLinesWrap = g.select('.nv-context .nv-linesWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      d3.transition(contextLinesWrap).call(lines2);
+
+      //------------------------------------------------------------
+
+
+      /*
+      var focusLinesWrap = g.select('.nv-focus .nv-linesWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      d3.transition(focusLinesWrap).call(lines);
+     */
+
+
+      //------------------------------------------------------------
+      // Setup Main (Focus) Axes
+
+      xAxis
+        .scale(x)
+        .ticks( availableWidth / 100 )
+        .tickSize(-availableHeight1, 0);
+
+      yAxis
+        .scale(y)
+        .ticks( availableHeight1 / 36 )
+        .tickSize( -availableWidth, 0);
+
+      g.select('.nv-focus .nv-x.nv-axis')
+          .attr('transform', 'translate(0,' + availableHeight1 + ')');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Brush
+
+      brush
+        .x(x2)
+        .on('brush', function() {
+            //When brushing, turn off transitions because chart needs to change immediately.
+            var oldTransition = chart.transitionDuration();
+            chart.transitionDuration(0); 
+            onBrush();
+            chart.transitionDuration(oldTransition);
+        });
+
+      if (brushExtent) brush.extent(brushExtent);
+
+      var brushBG = g.select('.nv-brushBackground').selectAll('g')
+          .data([brushExtent || brush.extent()])
+
+      var brushBGenter = brushBG.enter()
+          .append('g');
+
+      brushBGenter.append('rect')
+          .attr('class', 'left')
+          .attr('x', 0)
+          .attr('y', 0)
+          .attr('height', availableHeight2);
+
+      brushBGenter.append('rect')
+          .attr('class', 'right')
+          .attr('x', 0)
+          .attr('y', 0)
+          .attr('height', availableHeight2);
+
+      var gBrush = g.select('.nv-x.nv-brush')
+          .call(brush);
+      gBrush.selectAll('rect')
+          //.attr('y', -5)
+          .attr('height', availableHeight2);
+      gBrush.selectAll('.resize').append('path').attr('d', resizePath);
+
+      onBrush();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Secondary (Context) Axes
+
+      x2Axis
+        .scale(x2)
+        .ticks( availableWidth / 100 )
+        .tickSize(-availableHeight2, 0);
+
+      g.select('.nv-context .nv-x.nv-axis')
+          .attr('transform', 'translate(0,' + y2.range()[0] + ')');
+      d3.transition(g.select('.nv-context .nv-x.nv-axis'))
+          .call(x2Axis);
+
+
+      y2Axis
+        .scale(y2)
+        .ticks( availableHeight2 / 36 )
+        .tickSize( -availableWidth, 0);
+
+      d3.transition(g.select('.nv-context .nv-y.nv-axis'))
+          .call(y2Axis);
+
+      g.select('.nv-context .nv-x.nv-axis')
+          .attr('transform', 'translate(0,' + y2.range()[0] + ')');
+
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) { 
+        chart.update();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      //============================================================
+
+
+      //============================================================
+      // Functions
+      //------------------------------------------------------------
+
+      // Taken from crossfilter (http://square.github.com/crossfilter/)
+      function resizePath(d) {
+        var e = +(d == 'e'),
+            x = e ? 1 : -1,
+            y = availableHeight2 / 3;
+        return 'M' + (.5 * x) + ',' + y
+            + 'A6,6 0 0 ' + e + ' ' + (6.5 * x) + ',' + (y + 6)
+            + 'V' + (2 * y - 6)
+            + 'A6,6 0 0 ' + e + ' ' + (.5 * x) + ',' + (2 * y)
+            + 'Z'
+            + 'M' + (2.5 * x) + ',' + (y + 8)
+            + 'V' + (2 * y - 8)
+            + 'M' + (4.5 * x) + ',' + (y + 8)
+            + 'V' + (2 * y - 8);
+      }
+
+
+      function updateBrushBG() {
+        if (!brush.empty()) brush.extent(brushExtent);
+        brushBG
+            .data([brush.empty() ? x2.domain() : brushExtent])
+            .each(function(d,i) {
+              var leftWidth = x2(d[0]) - x.range()[0],
+                  rightWidth = x.range()[1] - x2(d[1]);
+              d3.select(this).select('.left')
+                .attr('width',  leftWidth < 0 ? 0 : leftWidth);
+
+              d3.select(this).select('.right')
+                .attr('x', x2(d[1]))
+                .attr('width', rightWidth < 0 ? 0 : rightWidth);
+            });
+      }
+
+
+      function onBrush() {
+        brushExtent = brush.empty() ? null : brush.extent();
+        var extent = brush.empty() ? x2.domain() : brush.extent();
+
+        //The brush extent cannot be less than one.  If it is, don't update the line chart.
+        if (Math.abs(extent[0] - extent[1]) <= 1) {
+          return;
+        }
+
+        dispatch.brush({extent: extent, brush: brush});
+
+
+        updateBrushBG();
+
+        // Update Main (Focus)
+        var focusLinesWrap = g.select('.nv-focus .nv-linesWrap')
+            .datum(
+              data
+                .filter(function(d) { return !d.disabled })
+                .map(function(d,i) {
+                  return {
+                    key: d.key,
+                    values: d.values.filter(function(d,i) {
+                      return lines.x()(d,i) >= extent[0] && lines.x()(d,i) <= extent[1];
+                    })
+                  }
+                })
+            );
+        focusLinesWrap.call(lines);
+
+
+        // Update Main (Focus) Axes
+        g.select('.nv-focus .nv-x.nv-axis')
+            .call(xAxis);
+        g.select('.nv-focus .nv-y.nv-axis')
+            .call(yAxis);
+      }
+
+      //============================================================
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  lines.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.legend = legend;
+  chart.lines = lines;
+  chart.lines2 = lines2;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+  chart.x2Axis = x2Axis;
+  chart.y2Axis = y2Axis;
+
+  d3.rebind(chart, lines, 'defined', 'isArea', 'size', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'interactive', 'clipEdge', 'clipVoronoi', 'id');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.x = function(_) {
+    if (!arguments.length) return lines.x;
+    lines.x(_);
+    lines2.x(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return lines.y;
+    lines.y(_);
+    lines2.y(_);
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.margin2 = function(_) {
+    if (!arguments.length) return margin2;
+    margin2 = _;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.height2 = function(_) {
+    if (!arguments.length) return height2;
+    height2 = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color =nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.interpolate = function(_) {
+    if (!arguments.length) return lines.interpolate();
+    lines.interpolate(_);
+    lines2.interpolate(_);
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  // Chart has multiple similar Axes, to prevent code duplication, probably need to link all axis functions manually like below
+  chart.xTickFormat = function(_) {
+    if (!arguments.length) return xAxis.tickFormat();
+    xAxis.tickFormat(_);
+    x2Axis.tickFormat(_);
+    return chart;
+  };
+
+  chart.yTickFormat = function(_) {
+    if (!arguments.length) return yAxis.tickFormat();
+    yAxis.tickFormat(_);
+    y2Axis.tickFormat(_);
+    return chart;
+  };
+  
+  chart.brushExtent = function(_) {
+    if (!arguments.length) return brushExtent;
+    brushExtent = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.linePlusBarWithFocusChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var lines = nv.models.line()
+    , lines2 = nv.models.line()
+    , bars = nv.models.historicalBar()
+    , bars2 = nv.models.historicalBar()
+    , xAxis = nv.models.axis()
+    , x2Axis = nv.models.axis()
+    , y1Axis = nv.models.axis()
+    , y2Axis = nv.models.axis()
+    , y3Axis = nv.models.axis()
+    , y4Axis = nv.models.axis()
+    , legend = nv.models.legend()
+    , brush = d3.svg.brush()
+    ;
+
+  var margin = {top: 30, right: 30, bottom: 30, left: 60}
+    , margin2 = {top: 0, right: 30, bottom: 20, left: 60}
+    , width = null
+    , height = null
+    , height2 = 100
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , color = nv.utils.defaultColor()
+    , showLegend = true
+    , extent
+    , brushExtent = null
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>';
+      }
+    , x
+    , x2
+    , y1
+    , y2
+    , y3
+    , y4
+    , noData = "No Data Available."
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'brush')
+    , transitionDuration = 0
+    ;
+
+  lines
+    .clipEdge(true)
+    ;
+  lines2
+    .interactive(false)
+    ;
+  xAxis
+    .orient('bottom')
+    .tickPadding(5)
+    ;
+  y1Axis
+    .orient('left')
+    ;
+  y2Axis
+    .orient('right')
+    ;
+  x2Axis
+    .orient('bottom')
+    .tickPadding(5)
+    ;
+  y3Axis
+    .orient('left')
+    ;
+  y4Axis
+    .orient('right')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    if (extent) {
+        e.pointIndex += Math.ceil(extent[0]);
+    }
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(lines.x()(e.point, e.pointIndex)),
+        y = (e.series.bar ? y1Axis : y2Axis).tickFormat()(lines.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  //------------------------------------------------------------
+
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight1 = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom - height2,
+          availableHeight2 = height2 - margin2.top - margin2.bottom;
+
+      chart.update = function() { container.call(chart); };
+      chart.container = this;
+
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight1 / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      var dataBars = data.filter(function(d) { return !d.disabled && d.bar });
+      var dataLines = data.filter(function(d) { return !d.bar }); // removed the !d.disabled clause here to fix Issue #240
+
+      x = bars.xScale();
+      x2 = x2Axis.scale();
+      y1 = bars.yScale();
+      y2 = lines.yScale();
+      y3 = bars2.yScale();
+      y4 = lines2.yScale();
+
+      var series1 = data
+        .filter(function(d) { return !d.disabled && d.bar })
+        .map(function(d) {
+          return d.values.map(function(d,i) {
+            return { x: getX(d,i), y: getY(d,i) }
+          })
+        });
+
+      var series2 = data
+        .filter(function(d) { return !d.disabled && !d.bar })
+        .map(function(d) {
+          return d.values.map(function(d,i) {
+            return { x: getX(d,i), y: getY(d,i) }
+          })
+        });
+
+      x   .range([0, availableWidth]);
+      
+      x2  .domain(d3.extent(d3.merge(series1.concat(series2)), function(d) { return d.x } ))
+          .range([0, availableWidth]);
+
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-linePlusBar').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-linePlusBar').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      
+      var focusEnter = gEnter.append('g').attr('class', 'nv-focus');
+      focusEnter.append('g').attr('class', 'nv-x nv-axis');
+      focusEnter.append('g').attr('class', 'nv-y1 nv-axis');
+      focusEnter.append('g').attr('class', 'nv-y2 nv-axis');
+      focusEnter.append('g').attr('class', 'nv-barsWrap');
+      focusEnter.append('g').attr('class', 'nv-linesWrap');
+
+      var contextEnter = gEnter.append('g').attr('class', 'nv-context');
+      contextEnter.append('g').attr('class', 'nv-x nv-axis');
+      contextEnter.append('g').attr('class', 'nv-y1 nv-axis');
+      contextEnter.append('g').attr('class', 'nv-y2 nv-axis');
+      contextEnter.append('g').attr('class', 'nv-barsWrap');
+      contextEnter.append('g').attr('class', 'nv-linesWrap');
+      contextEnter.append('g').attr('class', 'nv-brushBackground');
+      contextEnter.append('g').attr('class', 'nv-x nv-brush');
+
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width( availableWidth / 2 );
+
+        g.select('.nv-legendWrap')
+            .datum(data.map(function(series) {
+              series.originalKey = series.originalKey === undefined ? series.key : series.originalKey;
+              series.key = series.originalKey + (series.bar ? ' (left axis)' : ' (right axis)');
+              return series;
+            }))
+          .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight1 = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom - height2;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + ( availableWidth / 2 ) + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+
+      //------------------------------------------------------------
+      // Context Components
+
+      bars2
+        .width(availableWidth)
+        .height(availableHeight2)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && data[i].bar }));
+
+      lines2
+        .width(availableWidth)
+        .height(availableHeight2)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && !data[i].bar }));
+        
+      var bars2Wrap = g.select('.nv-context .nv-barsWrap')
+          .datum(dataBars.length ? dataBars : [{values:[]}]);
+
+      var lines2Wrap = g.select('.nv-context .nv-linesWrap')
+          .datum(!dataLines[0].disabled ? dataLines : [{values:[]}]);
+          
+      g.select('.nv-context')
+          .attr('transform', 'translate(0,' + ( availableHeight1 + margin.bottom + margin2.top) + ')')
+
+      bars2Wrap.call(bars2);
+      lines2Wrap.call(lines2);
+
+      //------------------------------------------------------------
+
+
+
+      //------------------------------------------------------------
+      // Setup Brush
+
+      brush
+        .x(x2)
+        .on('brush', onBrush);
+
+      if (brushExtent) brush.extent(brushExtent);
+
+      var brushBG = g.select('.nv-brushBackground').selectAll('g')
+          .data([brushExtent || brush.extent()])
+
+      var brushBGenter = brushBG.enter()
+          .append('g');
+
+      brushBGenter.append('rect')
+          .attr('class', 'left')
+          .attr('x', 0)
+          .attr('y', 0)
+          .attr('height', availableHeight2);
+
+      brushBGenter.append('rect')
+          .attr('class', 'right')
+          .attr('x', 0)
+          .attr('y', 0)
+          .attr('height', availableHeight2);
+
+      var gBrush = g.select('.nv-x.nv-brush')
+          .call(brush);
+      gBrush.selectAll('rect')
+          //.attr('y', -5)
+          .attr('height', availableHeight2);
+      gBrush.selectAll('.resize').append('path').attr('d', resizePath);
+
+      //------------------------------------------------------------
+
+      //------------------------------------------------------------
+      // Setup Secondary (Context) Axes
+
+      x2Axis
+        .ticks( availableWidth / 100 )
+        .tickSize(-availableHeight2, 0);
+
+      g.select('.nv-context .nv-x.nv-axis')
+          .attr('transform', 'translate(0,' + y3.range()[0] + ')');
+      g.select('.nv-context .nv-x.nv-axis')
+          .call(x2Axis);
+
+
+      y3Axis
+        .scale(y3)
+        .ticks( availableHeight2 / 36 )
+        .tickSize( -availableWidth, 0);
+
+      g.select('.nv-context .nv-y1.nv-axis')
+          .style('opacity', dataBars.length ? 1 : 0)
+          .attr('transform', 'translate(0,' + x2.range()[0] + ')');
+          
+      g.select('.nv-context .nv-y1.nv-axis')
+          .call(y3Axis);
+          
+
+      y4Axis
+        .scale(y4)
+        .ticks( availableHeight2 / 36 )
+        .tickSize(dataBars.length ? 0 : -availableWidth, 0); // Show the y2 rules only if y1 has none
+
+      g.select('.nv-context .nv-y2.nv-axis')
+          .style('opacity', dataLines.length ? 1 : 0)
+          .attr('transform', 'translate(' + x2.range()[1] + ',0)');
+
+      g.select('.nv-context .nv-y2.nv-axis')
+          .call(y4Axis);
+          
+      //------------------------------------------------------------
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) { 
+        chart.update();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      //============================================================
+
+
+      //============================================================
+      // Functions
+      //------------------------------------------------------------
+
+      // Taken from crossfilter (http://square.github.com/crossfilter/)
+      function resizePath(d) {
+        var e = +(d == 'e'),
+            x = e ? 1 : -1,
+            y = availableHeight2 / 3;
+        return 'M' + (.5 * x) + ',' + y
+            + 'A6,6 0 0 ' + e + ' ' + (6.5 * x) + ',' + (y + 6)
+            + 'V' + (2 * y - 6)
+            + 'A6,6 0 0 ' + e + ' ' + (.5 * x) + ',' + (2 * y)
+            + 'Z'
+            + 'M' + (2.5 * x) + ',' + (y + 8)
+            + 'V' + (2 * y - 8)
+            + 'M' + (4.5 * x) + ',' + (y + 8)
+            + 'V' + (2 * y - 8);
+      }
+
+
+      function updateBrushBG() {
+        if (!brush.empty()) brush.extent(brushExtent);
+        brushBG
+            .data([brush.empty() ? x2.domain() : brushExtent])
+            .each(function(d,i) {
+              var leftWidth = x2(d[0]) - x2.range()[0],
+                  rightWidth = x2.range()[1] - x2(d[1]);
+              d3.select(this).select('.left')
+                .attr('width',  leftWidth < 0 ? 0 : leftWidth);
+
+              d3.select(this).select('.right')
+                .attr('x', x2(d[1]))
+                .attr('width', rightWidth < 0 ? 0 : rightWidth);
+            });
+      }
+
+
+      function onBrush() {
+        brushExtent = brush.empty() ? null : brush.extent();
+        extent = brush.empty() ? x2.domain() : brush.extent();
+
+
+        dispatch.brush({extent: extent, brush: brush});
+
+        updateBrushBG();
+
+
+        //------------------------------------------------------------
+        // Prepare Main (Focus) Bars and Lines
+        
+        bars
+        .width(availableWidth)
+        .height(availableHeight1)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && data[i].bar }));
+
+
+        lines
+        .width(availableWidth)
+        .height(availableHeight1)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled && !data[i].bar }));
+
+        var focusBarsWrap = g.select('.nv-focus .nv-barsWrap')
+            .datum(!dataBars.length ? [{values:[]}] :
+              dataBars
+                .map(function(d,i) {
+                  return {
+                    key: d.key,
+                    values: d.values.filter(function(d,i) {
+                      return bars.x()(d,i) >= extent[0] && bars.x()(d,i) <= extent[1];
+                    })
+                  }
+                })
+            );
+        
+        var focusLinesWrap = g.select('.nv-focus .nv-linesWrap')
+            .datum(dataLines[0].disabled ? [{values:[]}] :
+              dataLines
+                .map(function(d,i) {
+                  return {
+                    key: d.key,
+                    values: d.values.filter(function(d,i) {
+                      return lines.x()(d,i) >= extent[0] && lines.x()(d,i) <= extent[1];
+                    })
+                  }
+                })
+             );
+                 
+        //------------------------------------------------------------
+        
+        
+        //------------------------------------------------------------
+        // Update Main (Focus) X Axis
+
+        if (dataBars.length) {
+            x = bars.xScale();
+        } else {
+            x = lines.xScale();
+        }
+        
+        xAxis
+        .scale(x)
+        .ticks( availableWidth / 100 )
+        .tickSize(-availableHeight1, 0);
+
+        xAxis.domain([Math.ceil(extent[0]), Math.floor(extent[1])]);
+        
+        g.select('.nv-x.nv-axis')
+          .call(xAxis);
+        //------------------------------------------------------------
+        
+        
+        //------------------------------------------------------------
+        // Update Main (Focus) Bars and Lines
+
+        focusBarsWrap.call(bars);
+        focusLinesWrap.call(lines);
+        
+        //------------------------------------------------------------
+        
+          
+        //------------------------------------------------------------
+        // Setup and Update Main (Focus) Y Axes
+        
+        g.select('.nv-focus .nv-x.nv-axis')
+          .attr('transform', 'translate(0,' + y1.range()[0] + ')');
+
+
+        y1Axis
+        .scale(y1)
+        .ticks( availableHeight1 / 36 )
+        .tickSize(-availableWidth, 0);
+
+        g.select('.nv-focus .nv-y1.nv-axis')
+          .style('opacity', dataBars.length ? 1 : 0);
+
+
+        y2Axis
+        .scale(y2)
+        .ticks( availableHeight1 / 36 )
+        .tickSize(dataBars.length ? 0 : -availableWidth, 0); // Show the y2 rules only if y1 has none
+
+        g.select('.nv-focus .nv-y2.nv-axis')
+          .style('opacity', dataLines.length ? 1 : 0)
+          .attr('transform', 'translate(' + x.range()[1] + ',0)');
+
+        g.select('.nv-focus .nv-y1.nv-axis')
+            .call(y1Axis);
+        g.select('.nv-focus .nv-y2.nv-axis')
+            .call(y2Axis);
+      }
+
+      //============================================================
+
+      onBrush();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  lines.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  bars.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  bars.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.legend = legend;
+  chart.lines = lines;
+  chart.lines2 = lines2;
+  chart.bars = bars;
+  chart.bars2 = bars2;
+  chart.xAxis = xAxis;
+  chart.x2Axis = x2Axis;
+  chart.y1Axis = y1Axis;
+  chart.y2Axis = y2Axis;
+  chart.y3Axis = y3Axis;
+  chart.y4Axis = y4Axis;
+
+  d3.rebind(chart, lines, 'defined', 'size', 'clipVoronoi', 'interpolate');
+  //TODO: consider rebinding x, y and some other stuff, and simply do soemthign lile bars.x(lines.x()), etc.
+  //d3.rebind(chart, lines, 'x', 'y', 'size', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'interactive', 'clipEdge', 'clipVoronoi', 'id');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    lines.x(_);
+    bars.x(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    lines.y(_);
+    bars.y(_);
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.brushExtent = function(_) {
+    if (!arguments.length) return brushExtent;
+    brushExtent = _;
+    return chart;
+  };
+
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.multiBar = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , x = d3.scale.ordinal()
+    , y = d3.scale.linear()
+    , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , forceY = [0] // 0 is forced by default.. this makes sense for the majority of bar graphs... user can always do chart.forceY([]) to remove
+    , clipEdge = true
+    , stacked = false
+    , stackOffset = 'zero' // options include 'silhouette', 'wiggle', 'expand', 'zero', or a custom function
+    , color = nv.utils.defaultColor()
+    , hideable = false
+    , barColor = null // adding the ability to set the color for each rather than the whole group
+    , disabled // used in conjunction with barColor to communicate from multiBarHorizontalChart what series are disabled
+    , delay = 1200
+    , xDomain
+    , yDomain
+    , xRange
+    , yRange
+    , groupSpacing = 0.1
+    , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0 //used to store previous scales
+      ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+      if(hideable && data.length) hideable = [{
+        values: data[0].values.map(function(d) {
+        return {
+          x: d.x,
+          y: 0,
+          series: d.series,
+          size: 0.01
+        };}
+      )}];
+
+      if (stacked)
+        data = d3.layout.stack()
+                 .offset(stackOffset)
+                 .values(function(d){ return d.values })
+                 .y(getY)
+                 (!data.length && hideable ? hideable : data);
+
+
+      //add series index to each data point for reference
+      data.forEach(function(series, i) {
+        series.values.forEach(function(point) {
+          point.series = i;
+        });
+      });
+
+
+      //------------------------------------------------------------
+      // HACK for negative value stacking
+      if (stacked)
+        data[0].values.map(function(d,i) {
+          var posBase = 0, negBase = 0;
+          data.map(function(d) {
+            var f = d.values[i]
+            f.size = Math.abs(f.y);
+            if (f.y<0)  {
+              f.y1 = negBase;
+              negBase = negBase - f.size;
+            } else
+            {
+              f.y1 = f.size + posBase;
+              posBase = posBase + f.size;
+            }
+          });
+        });
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      // remap and flatten the data for use in calculating the scales' domains
+      var seriesData = (xDomain && yDomain) ? [] : // if we know xDomain and yDomain, no need to calculate
+            data.map(function(d) {
+              return d.values.map(function(d,i) {
+                return { x: getX(d,i), y: getY(d,i), y0: d.y0, y1: d.y1 }
+              })
+            });
+
+      x   .domain(xDomain || d3.merge(seriesData).map(function(d) { return d.x }))
+          .rangeBands(xRange || [0, availableWidth], groupSpacing);
+
+      //y   .domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return d.y + (stacked ? d.y1 : 0) }).concat(forceY)))
+      y   .domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return stacked ? (d.y > 0 ? d.y1 : d.y1 + d.y ) : d.y }).concat(forceY)))
+          .range(yRange || [availableHeight, 0]);
+
+      // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point
+      if (x.domain()[0] === x.domain()[1])
+        x.domain()[0] ?
+            x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])
+          : x.domain([-1,1]);
+
+      if (y.domain()[0] === y.domain()[1])
+        y.domain()[0] ?
+            y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])
+          : y.domain([-1,1]);
+
+
+      x0 = x0 || x;
+      y0 = y0 || y;
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-multibar').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multibar');
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g')
+
+      gEnter.append('g').attr('class', 'nv-groups');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-edge-clip-' + id)
+        .append('rect');
+      wrap.select('#nv-edge-clip-' + id + ' rect')
+          .attr('width', availableWidth)
+          .attr('height', availableHeight);
+
+      g   .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + id + ')' : '');
+
+
+
+      var groups = wrap.select('.nv-groups').selectAll('.nv-group')
+          .data(function(d) { return d }, function(d,i) { return i });
+      groups.enter().append('g')
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6);
+      groups.exit()
+        .selectAll('rect.nv-bar')
+          .attr('y', function(d) { return stacked ? y0(d.y0) : y0(0) })
+          .attr('height', 0)
+          .remove();
+      groups
+          .attr('class', function(d,i) { return 'nv-group nv-series-' + i })
+          .classed('hover', function(d) { return d.hover })
+          .style('fill', function(d,i){ return color(d, i) })
+          .style('stroke', function(d,i){ return color(d, i) });
+      groups
+          .style('stroke-opacity', 1)
+          .style('fill-opacity', .75);
+
+
+      var bars = groups.selectAll('rect.nv-bar')
+          .data(function(d) { return (hideable && !data.length) ? hideable.values : d.values });
+
+      bars.exit().remove();
+
+
+      var barsEnter = bars.enter().append('rect')
+          .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive'})
+          .attr('x', function(d,i,j) {
+              return stacked ? 0 : (j * x.rangeBand() / data.length )
+          })
+          .attr('y', function(d) { return y0(stacked ? d.y0 : 0) })
+          .attr('height', 0)
+          .attr('width', x.rangeBand() / (stacked ? 1 : data.length) )
+          .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',0)'; })
+          ;
+      bars
+          .style('fill', function(d,i,j){ return color(d, j, i);  })
+          .style('stroke', function(d,i,j){ return color(d, j, i); })
+          .on('mouseover', function(d,i) { //TODO: figure out why j works above, but not here
+            d3.select(this).classed('hover', true);
+            dispatch.elementMouseover({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (stacked ? data.length / 2 : d.series + .5) / data.length), y(getY(d,i) + (stacked ? d.y0 : 0))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+          })
+          .on('mouseout', function(d,i) {
+            d3.select(this).classed('hover', false);
+            dispatch.elementMouseout({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+          })
+          .on('click', function(d,i) {
+            dispatch.elementClick({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (stacked ? data.length / 2 : d.series + .5) / data.length), y(getY(d,i) + (stacked ? d.y0 : 0))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+            d3.event.stopPropagation();
+          })
+          .on('dblclick', function(d,i) {
+            dispatch.elementDblClick({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (stacked ? data.length / 2 : d.series + .5) / data.length), y(getY(d,i) + (stacked ? d.y0 : 0))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+            d3.event.stopPropagation();
+          });
+      bars
+          .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive'})
+          
+          .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',0)'; })
+
+      if (barColor) {
+        if (!disabled) disabled = data.map(function() { return true });
+        bars
+          .style('fill', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); })
+          .style('stroke', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); });
+      }
+
+
+      if (stacked)
+          bars
+            .attr('y', function(d,i) {
+
+              return y((stacked ? d.y1 : 0));
+            })
+            .attr('height', function(d,i) {
+              return Math.max(Math.abs(y(d.y + (stacked ? d.y0 : 0)) - y((stacked ? d.y0 : 0))),1);
+            })
+            .attr('x', function(d,i) {
+                  return stacked ? 0 : (d.series * x.rangeBand() / data.length )
+            })
+            .attr('width', x.rangeBand() / (stacked ? 1 : data.length) );
+      else
+          bars
+            .attr('x', function(d,i) {
+              return d.series * x.rangeBand() / data.length
+            })
+            .attr('width', x.rangeBand() / data.length)
+            .attr('y', function(d,i) {
+                return getY(d,i) < 0 ?
+                        y(0) :
+                        y(0) - y(getY(d,i)) < 1 ?
+                          y(0) - 1 :
+                        y(getY(d,i)) || 0;
+            })
+            .attr('height', function(d,i) {
+                return Math.max(Math.abs(y(getY(d,i)) - y(0)),1) || 0;
+            });
+
+
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.forceY = function(_) {
+    if (!arguments.length) return forceY;
+    forceY = _;
+    return chart;
+  };
+
+  chart.stacked = function(_) {
+    if (!arguments.length) return stacked;
+    stacked = _;
+    return chart;
+  };
+
+  chart.stackOffset = function(_) {
+    if (!arguments.length) return stackOffset;
+    stackOffset = _;
+    return chart;
+  };
+
+  chart.clipEdge = function(_) {
+    if (!arguments.length) return clipEdge;
+    clipEdge = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.barColor = function(_) {
+    if (!arguments.length) return barColor;
+    barColor = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.disabled = function(_) {
+    if (!arguments.length) return disabled;
+    disabled = _;
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.hideable = function(_) {
+    if (!arguments.length) return hideable;
+    hideable = _;
+    return chart;
+  };
+
+  chart.delay = function(_) {
+    if (!arguments.length) return delay;
+    delay = _;
+    return chart;
+  };
+
+  chart.groupSpacing = function(_) {
+    if (!arguments.length) return groupSpacing;
+    groupSpacing = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.multiBarChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var multibar = nv.models.multiBar()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , legend = nv.models.legend()
+    , controls = nv.models.legend()
+    ;
+
+  var margin = {top: 30, right: 20, bottom: 50, left: 60}
+    , width = null
+    , height = null
+    , color = nv.utils.defaultColor()
+    , showControls = true
+    , showLegend = true
+    , showXAxis = true
+    , showYAxis = true
+    , rightAlignYAxis = false
+    , reduceXTicks = true // if false a tick will show for every data point
+    , staggerLabels = false
+    , rotateLabels = 0
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' on ' + x + '</p>'
+      }
+    , x //can be accessed via chart.xScale()
+    , y //can be accessed via chart.yScale()
+    , state = { stacked: false }
+    , defaultState = null
+    , noData = "No Data Available."
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , controlWidth = function() { return showControls ? 180 : 0 }
+    , transitionDuration = 0
+    ;
+
+  multibar
+    .stacked(false)
+    ;
+  xAxis
+    .orient('bottom')
+    .tickPadding(7)
+    .highlightZero(true)
+    .showMaxMin(false)
+    .tickFormat(function(d) { return d })
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    .tickFormat(d3.format(',.1f'))
+    ;
+
+  controls.updateState(false);
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(multibar.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(multibar.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart) };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+      //------------------------------------------------------------
+      // Display noData message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = multibar.xScale();
+      y = multibar.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-multiBarWithLegend').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multiBarWithLegend').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-barsWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-controlsWrap');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width(availableWidth - controlWidth());
+
+        if (multibar.barColor())
+          data.forEach(function(series,i) {
+            series.color = d3.rgb('#ccc').darker(i * 1.5).toString();
+          })
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + controlWidth() + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Controls
+
+      if (showControls) {
+        var controlsData = [
+          { key: 'Grouped', disabled: multibar.stacked() },
+          { key: 'Stacked', disabled: !multibar.stacked() }
+        ];
+
+        controls.width(controlWidth()).color(['#444', '#444', '#444']);
+        g.select('.nv-controlsWrap')
+            .datum(controlsData)
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+            .call(controls);
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      multibar
+        .disabled(data.map(function(series) { return series.disabled }))
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled }))
+
+
+      var barsWrap = g.select('.nv-barsWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      barsWrap.call(multibar);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+          xAxis
+            .scale(x)
+            .ticks( availableWidth / 100 )
+            .tickSize(-availableHeight, 0);
+
+          g.select('.nv-x.nv-axis')
+              .attr('transform', 'translate(0,' + y.range()[0] + ')');
+          g.select('.nv-x.nv-axis')
+              .call(xAxis);
+
+          var xTicks = g.select('.nv-x.nv-axis > g').selectAll('g');
+
+          xTicks
+              .selectAll('line, text')
+              .style('opacity', 1)
+
+          if (staggerLabels) {
+              var getTranslate = function(x,y) {
+                  return "translate(" + x + "," + y + ")";
+              };
+
+              var staggerUp = 5, staggerDown = 17;  //pixels to stagger by
+              // Issue #140
+              xTicks
+                .selectAll("text")
+                .attr('transform', function(d,i,j) { 
+                    return  getTranslate(0, (j % 2 == 0 ? staggerUp : staggerDown));
+                  });
+
+              var totalInBetweenTicks = d3.selectAll(".nv-x.nv-axis .nv-wrap g g text")[0].length;
+              g.selectAll(".nv-x.nv-axis .nv-axisMaxMin text")
+                .attr("transform", function(d,i) {
+                    return getTranslate(0, (i === 0 || totalInBetweenTicks % 2 !== 0) ? staggerDown : staggerUp);
+                });
+          }
+
+          if (reduceXTicks)
+            xTicks
+              .filter(function(d,i) {
+                  return i % Math.ceil(data[0].values.length / (availableWidth / 100)) !== 0;
+                })
+              .selectAll('text, line')
+              .style('opacity', 0);
+
+          if(rotateLabels)
+            xTicks
+              .selectAll('.tick text')
+              .attr('transform', 'rotate(' + rotateLabels + ' 0,0)')
+              .style('text-anchor', rotateLabels > 0 ? 'start' : 'end');
+          
+          g.select('.nv-x.nv-axis').selectAll('g.nv-axisMaxMin text')
+              .style('opacity', 1);
+      }
+
+
+      if (showYAxis) {      
+          yAxis
+            .scale(y)
+            .ticks( availableHeight / 36 )
+            .tickSize( -availableWidth, 0);
+
+          g.select('.nv-y.nv-axis')
+              .call(yAxis);
+      }
+
+
+      //------------------------------------------------------------
+
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) { 
+        state = newState;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      controls.dispatch.on('legendClick', function(d,i) {
+        if (!d.disabled) return;
+        controlsData = controlsData.map(function(s) {
+          s.disabled = true;
+          return s;
+        });
+        d.disabled = false;
+
+        switch (d.key) {
+          case 'Grouped':
+            multibar.stacked(false);
+            break;
+          case 'Stacked':
+            multibar.stacked(true);
+            break;
+        }
+
+        state.stacked = multibar.stacked();
+        dispatch.stateChange(state);
+
+        chart.update();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode)
+      });
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        if (typeof e.stacked !== 'undefined') {
+          multibar.stacked(e.stacked);
+          state.stacked = e.stacked;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  multibar.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  multibar.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.multibar = multibar;
+  chart.legend = legend;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+
+  d3.rebind(chart, multibar, 'x', 'y', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY', 'clipEdge',
+   'id', 'stacked', 'stackOffset', 'delay', 'barColor','groupSpacing');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showControls = function(_) {
+    if (!arguments.length) return showControls;
+    showControls = _;
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.reduceXTicks= function(_) {
+    if (!arguments.length) return reduceXTicks;
+    reduceXTicks = _;
+    return chart;
+  };
+
+  chart.rotateLabels = function(_) {
+    if (!arguments.length) return rotateLabels;
+    rotateLabels = _;
+    return chart;
+  }
+
+  chart.staggerLabels = function(_) {
+    if (!arguments.length) return staggerLabels;
+    staggerLabels = _;
+    return chart;
+  };
+
+  chart.tooltip = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+  
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.multiBarHorizontal = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one
+    , x = d3.scale.ordinal()
+    , y = d3.scale.linear()
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , forceY = [0] // 0 is forced by default.. this makes sense for the majority of bar graphs... user can always do chart.forceY([]) to remove
+    , color = nv.utils.defaultColor()
+    , barColor = null // adding the ability to set the color for each rather than the whole group
+    , disabled // used in conjunction with barColor to communicate from multiBarHorizontalChart what series are disabled
+    , stacked = false
+    , showValues = false
+    , showBarLabels = false
+    , valuePadding = 60
+    , valueFormat = d3.format(',.2f')
+    , delay = 1200
+    , xDomain
+    , yDomain
+    , xRange
+    , yRange
+    , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0 //used to store previous scales
+      ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+
+      if (stacked)
+        data = d3.layout.stack()
+                 .offset('zero')
+                 .values(function(d){ return d.values })
+                 .y(getY)
+                 (data);
+
+
+      //add series index to each data point for reference
+      data.forEach(function(series, i) {
+        series.values.forEach(function(point) {
+          point.series = i;
+        });
+      });
+
+
+
+      //------------------------------------------------------------
+      // HACK for negative value stacking
+      if (stacked)
+        data[0].values.map(function(d,i) {
+          var posBase = 0, negBase = 0;
+          data.map(function(d) {
+            var f = d.values[i]
+            f.size = Math.abs(f.y);
+            if (f.y<0)  {
+              f.y1 = negBase - f.size;
+              negBase = negBase - f.size;
+            } else
+            {
+              f.y1 = posBase;
+              posBase = posBase + f.size;
+            }
+          });
+        });
+
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      // remap and flatten the data for use in calculating the scales' domains
+      var seriesData = (xDomain && yDomain) ? [] : // if we know xDomain and yDomain, no need to calculate
+            data.map(function(d) {
+              return d.values.map(function(d,i) {
+                return { x: getX(d,i), y: getY(d,i), y0: d.y0, y1: d.y1 }
+              })
+            });
+
+      x   .domain(xDomain || d3.merge(seriesData).map(function(d) { return d.x }))
+          .rangeBands(xRange || [0, availableHeight], .1);
+
+      //y   .domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return d.y + (stacked ? d.y0 : 0) }).concat(forceY)))
+      y   .domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return stacked ? (d.y > 0 ? d.y1 + d.y : d.y1 ) : d.y }).concat(forceY)))
+
+      if (showValues && !stacked)
+        y.range(yRange || [(y.domain()[0] < 0 ? valuePadding : 0), availableWidth - (y.domain()[1] > 0 ? valuePadding : 0) ]);
+      else
+        y.range(yRange || [0, availableWidth]);
+
+      x0 = x0 || x;
+      y0 = y0 || d3.scale.linear().domain(y.domain()).range([y(0),y(0)]);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = d3.select(this).selectAll('g.nv-wrap.nv-multibarHorizontal').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multibarHorizontal');
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-groups');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+
+      var groups = wrap.select('.nv-groups').selectAll('.nv-group')
+          .data(function(d) { return d }, function(d,i) { return i });
+      groups.enter().append('g')
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6);
+      groups.exit()
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6)
+          .remove();
+      groups
+          .attr('class', function(d,i) { return 'nv-group nv-series-' + i })
+          .classed('hover', function(d) { return d.hover })
+          .style('fill', function(d,i){ return color(d, i) })
+          .style('stroke', function(d,i){ return color(d, i) });
+      groups
+          .style('stroke-opacity', 1)
+          .style('fill-opacity', .75);
+
+
+      var bars = groups.selectAll('g.nv-bar')
+          .data(function(d) { return d.values });
+
+      bars.exit().remove();
+
+
+      var barsEnter = bars.enter().append('g')
+          .attr('transform', function(d,i,j) {
+              return 'translate(' + y0(stacked ? d.y0 : 0) + ',' + (stacked ? 0 : (j * x.rangeBand() / data.length ) + x(getX(d,i))) + ')'
+          });
+
+      barsEnter.append('rect')
+          .attr('width', 0)
+          .attr('height', x.rangeBand() / (stacked ? 1 : data.length) )
+
+      bars
+          .on('mouseover', function(d,i) { //TODO: figure out why j works above, but not here
+            d3.select(this).classed('hover', true);
+            dispatch.elementMouseover({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [ y(getY(d,i) + (stacked ? d.y0 : 0)), x(getX(d,i)) + (x.rangeBand() * (stacked ? data.length / 2 : d.series + .5) / data.length) ],
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+          })
+          .on('mouseout', function(d,i) {
+            d3.select(this).classed('hover', false);
+            dispatch.elementMouseout({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+          })
+          .on('click', function(d,i) {
+            dispatch.elementClick({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (stacked ? data.length / 2 : d.series + .5) / data.length), y(getY(d,i) + (stacked ? d.y0 : 0))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+            d3.event.stopPropagation();
+          })
+          .on('dblclick', function(d,i) {
+            dispatch.elementDblClick({
+              value: getY(d,i),
+              point: d,
+              series: data[d.series],
+              pos: [x(getX(d,i)) + (x.rangeBand() * (stacked ? data.length / 2 : d.series + .5) / data.length), y(getY(d,i) + (stacked ? d.y0 : 0))],  // TODO: Figure out why the value appears to be shifted
+              pointIndex: i,
+              seriesIndex: d.series,
+              e: d3.event
+            });
+            d3.event.stopPropagation();
+          });
+
+
+      barsEnter.append('text');
+
+      if (showValues && !stacked) {
+        bars.select('text')
+            .attr('text-anchor', function(d,i) { return getY(d,i) < 0 ? 'end' : 'start' })
+            .attr('y', x.rangeBand() / (data.length * 2))
+            .attr('dy', '.32em')
+            .text(function(d,i) { return valueFormat(getY(d,i)) })
+        bars
+          .select('text')
+            .attr('x', function(d,i) { return getY(d,i) < 0 ? -4 : y(getY(d,i)) - y(0) + 4 })
+      } else {
+        bars.selectAll('text').text('');
+      }
+
+      if (showBarLabels && !stacked) {
+        barsEnter.append('text').classed('nv-bar-label',true);
+        bars.select('text.nv-bar-label')
+            .attr('text-anchor', function(d,i) { return getY(d,i) < 0 ? 'start' : 'end' })
+            .attr('y', x.rangeBand() / (data.length * 2))
+            .attr('dy', '.32em')
+            .text(function(d,i) { return getX(d,i) });
+        bars
+          .select('text.nv-bar-label')
+            .attr('x', function(d,i) { return getY(d,i) < 0 ? y(0) - y(getY(d,i)) + 4 : -4 });
+      }
+      else {
+        bars.selectAll('text.nv-bar-label').text('');
+      }
+
+      bars
+          .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive'})
+
+      if (barColor) {
+        if (!disabled) disabled = data.map(function() { return true });
+        bars
+          .style('fill', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); })
+          .style('stroke', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); });
+      }
+
+      if (stacked)
+        bars
+            .attr('transform', function(d,i) {
+              return 'translate(' + y(d.y1) + ',' + x(getX(d,i)) + ')'
+            })
+          .select('rect')
+            .attr('width', function(d,i) {
+              return Math.abs(y(getY(d,i) + d.y0) - y(d.y0))
+            })
+            .attr('height', x.rangeBand() );
+      else
+        bars
+            .attr('transform', function(d,i) {
+              //TODO: stacked must be all positive or all negative, not both?
+              return 'translate(' +
+              (getY(d,i) < 0 ? y(getY(d,i)) : y(0))
+              + ',' +
+              (d.series * x.rangeBand() / data.length
+              +
+              x(getX(d,i)) )
+              + ')'
+            })
+          .select('rect')
+            .attr('height', x.rangeBand() / data.length )
+            .attr('width', function(d,i) {
+              return Math.max(Math.abs(y(getY(d,i)) - y(0)),1)
+            });
+
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.forceY = function(_) {
+    if (!arguments.length) return forceY;
+    forceY = _;
+    return chart;
+  };
+
+  chart.stacked = function(_) {
+    if (!arguments.length) return stacked;
+    stacked = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.barColor = function(_) {
+    if (!arguments.length) return barColor;
+    barColor = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.disabled = function(_) {
+    if (!arguments.length) return disabled;
+    disabled = _;
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.delay = function(_) {
+    if (!arguments.length) return delay;
+    delay = _;
+    return chart;
+  };
+
+  chart.showValues = function(_) {
+    if (!arguments.length) return showValues;
+    showValues = _;
+    return chart;
+  };
+
+  chart.showBarLabels = function(_) {
+    if (!arguments.length) return showBarLabels;
+    showBarLabels = _;
+    return chart;
+  };
+
+
+  chart.valueFormat= function(_) {
+    if (!arguments.length) return valueFormat;
+    valueFormat = _;
+    return chart;
+  };
+
+  chart.valuePadding = function(_) {
+    if (!arguments.length) return valuePadding;
+    valuePadding = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.multiBarHorizontalChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var multibar = nv.models.multiBarHorizontal()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , legend = nv.models.legend().height(30)
+    , controls = nv.models.legend().height(30)
+    ;
+
+  var margin = {top: 30, right: 20, bottom: 50, left: 60}
+    , width = null
+    , height = null
+    , color = nv.utils.defaultColor()
+    , showControls = true
+    , showLegend = true
+    , showXAxis = true
+    , showYAxis = true
+    , stacked = false
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + ' - ' + x + '</h3>' +
+               '<p>' +  y + '</p>'
+      }
+    , x //can be accessed via chart.xScale()
+    , y //can be accessed via chart.yScale()
+    , state = { stacked: stacked }
+    , defaultState = null
+    , noData = 'No Data Available.'
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , controlWidth = function() { return showControls ? 180 : 0 }
+    , transitionDuration = 0
+    ;
+
+  multibar
+    .stacked(stacked)
+    ;
+  xAxis
+    .orient('left')
+    .tickPadding(5)
+    .highlightZero(false)
+    .showMaxMin(false)
+    .tickFormat(function(d) { return d })
+    ;
+  yAxis
+    .orient('bottom')
+    .tickFormat(d3.format(',.1f'))
+    ;
+
+  controls.updateState(false);
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(multibar.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(multibar.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'e' : 'w', null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart) };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = multibar.xScale();
+      y = multibar.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-multiBarHorizontalChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multiBarHorizontalChart').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis')
+            .append('g').attr('class', 'nv-zeroLine')
+            .append('line');
+      gEnter.append('g').attr('class', 'nv-barsWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-controlsWrap');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width(availableWidth - controlWidth());
+
+        if (multibar.barColor())
+          data.forEach(function(series,i) {
+            series.color = d3.rgb('#ccc').darker(i * 1.5).toString();
+          })
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + controlWidth() + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Controls
+
+      if (showControls) {
+        var controlsData = [
+          { key: 'Grouped', disabled: multibar.stacked() },
+          { key: 'Stacked', disabled: !multibar.stacked() }
+        ];
+
+        controls.width(controlWidth()).color(['#444', '#444', '#444']);
+        g.select('.nv-controlsWrap')
+            .datum(controlsData)
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+            .call(controls);
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      multibar
+        .disabled(data.map(function(series) { return series.disabled }))
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color(d, i);
+        }).filter(function(d,i) { return !data[i].disabled }))
+
+
+      var barsWrap = g.select('.nv-barsWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+
+      barsWrap.call(multibar);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+          xAxis
+            .scale(x)
+            .ticks( availableHeight / 24 )
+            .tickSize(-availableWidth, 0);
+
+          g.select('.nv-x.nv-axis')
+              .call(xAxis);
+
+          var xTicks = g.select('.nv-x.nv-axis').selectAll('g');
+
+          xTicks
+              .selectAll('line, text');
+      }
+
+      if (showYAxis) {
+          yAxis
+            .scale(y)
+            .ticks( availableWidth / 100 )
+            .tickSize( -availableHeight, 0);
+
+          g.select('.nv-y.nv-axis')
+              .attr('transform', 'translate(0,' + availableHeight + ')');
+          g.select('.nv-y.nv-axis')
+              .call(yAxis);
+      }
+
+      // Zero line
+      g.select(".nv-zeroLine line")
+        .attr("x1", y(0))
+        .attr("x2", y(0))
+        .attr("y1", 0)
+        .attr("y2", -availableHeight)
+        ;
+
+      //------------------------------------------------------------
+
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) {
+        state = newState;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      controls.dispatch.on('legendClick', function(d,i) {
+        if (!d.disabled) return;
+        controlsData = controlsData.map(function(s) {
+          s.disabled = true;
+          return s;
+        });
+        d.disabled = false;
+
+        switch (d.key) {
+          case 'Grouped':
+            multibar.stacked(false);
+            break;
+          case 'Stacked':
+            multibar.stacked(true);
+            break;
+        }
+
+        state.stacked = multibar.stacked();
+        dispatch.stateChange(state);
+
+        chart.update();
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        if (typeof e.stacked !== 'undefined') {
+          multibar.stacked(e.stacked);
+          state.stacked = e.stacked;
+        }
+
+        chart.update();
+      });
+      //============================================================
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  multibar.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  multibar.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.multibar = multibar;
+  chart.legend = legend;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+
+  d3.rebind(chart, multibar, 'x', 'y', 'xDomain', 'yDomain', 'xRange', 'yRange', 'forceX', 'forceY',
+    'clipEdge', 'id', 'delay', 'showValues','showBarLabels', 'valueFormat', 'stacked', 'barColor');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    return chart;
+  };
+
+  chart.showControls = function(_) {
+    if (!arguments.length) return showControls;
+    showControls = _;
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.tooltip = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+  //============================================================
+
+
+  return chart;
+}
+nv.models.multiChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 30, right: 20, bottom: 50, left: 60},
+      color = d3.scale.category20().range(),
+      width = null, 
+      height = null,
+      showLegend = true,
+      tooltips = true,
+      tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' at ' + x + '</p>'
+      },
+      x,
+      y,
+      yDomain1,
+      yDomain2
+      ; //can be accessed via chart.lines.[x/y]Scale()
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x = d3.scale.linear(),
+      yScale1 = d3.scale.linear(),
+      yScale2 = d3.scale.linear(),
+
+      lines1 = nv.models.line().yScale(yScale1),
+      lines2 = nv.models.line().yScale(yScale2),
+
+      bars1 = nv.models.multiBar().stacked(false).yScale(yScale1),
+      bars2 = nv.models.multiBar().stacked(false).yScale(yScale2),
+
+      stack1 = nv.models.stackedArea().yScale(yScale1),
+      stack2 = nv.models.stackedArea().yScale(yScale2),
+
+      xAxis = nv.models.axis().scale(x).orient('bottom').tickPadding(5),
+      yAxis1 = nv.models.axis().scale(yScale1).orient('left'),
+      yAxis2 = nv.models.axis().scale(yScale2).orient('right'),
+
+      legend = nv.models.legend().height(30),
+      dispatch = d3.dispatch('tooltipShow', 'tooltipHide');
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(lines1.x()(e.point, e.pointIndex)),
+        y = ((e.series.yAxis == 2) ? yAxis2 : yAxis1).tickFormat()(lines1.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, undefined, undefined, offsetElement.offsetParent);
+  };
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      chart.update = function() { container.call(chart); };
+      chart.container = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      var dataLines1 = data.filter(function(d) {return !d.disabled && d.type == 'line' && d.yAxis == 1})
+      var dataLines2 = data.filter(function(d) {return !d.disabled && d.type == 'line' && d.yAxis == 2})
+      var dataBars1 = data.filter(function(d) {return !d.disabled && d.type == 'bar' && d.yAxis == 1})
+      var dataBars2 = data.filter(function(d) {return !d.disabled && d.type == 'bar' && d.yAxis == 2})
+      var dataStack1 = data.filter(function(d) {return !d.disabled && d.type == 'area' && d.yAxis == 1})
+      var dataStack2 = data.filter(function(d) {return !d.disabled && d.type == 'area' && d.yAxis == 2})
+
+      var series1 = data.filter(function(d) {return !d.disabled && d.yAxis == 1})
+            .map(function(d) {
+              return d.values.map(function(d,i) {
+                return { x: d.x, y: d.y }
+              })
+            })
+
+      var series2 = data.filter(function(d) {return !d.disabled && d.yAxis == 2})
+            .map(function(d) {
+              return d.values.map(function(d,i) {
+                return { x: d.x, y: d.y }
+              })
+            })
+
+      x   .domain(d3.extent(d3.merge(series1.concat(series2)), function(d) { return d.x } ))
+          .range([0, availableWidth]);
+
+      var wrap = container.selectAll('g.wrap.multiChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'wrap nvd3 multiChart').append('g');
+
+      gEnter.append('g').attr('class', 'x axis');
+      gEnter.append('g').attr('class', 'y1 axis');
+      gEnter.append('g').attr('class', 'y2 axis');
+      gEnter.append('g').attr('class', 'lines1Wrap');
+      gEnter.append('g').attr('class', 'lines2Wrap');
+      gEnter.append('g').attr('class', 'bars1Wrap');
+      gEnter.append('g').attr('class', 'bars2Wrap');
+      gEnter.append('g').attr('class', 'stack1Wrap');
+      gEnter.append('g').attr('class', 'stack2Wrap');
+      gEnter.append('g').attr('class', 'legendWrap');
+
+      var g = wrap.select('g');
+
+      if (showLegend) {
+        legend.width( availableWidth / 2 );
+
+        g.select('.legendWrap')
+            .datum(data.map(function(series) { 
+              series.originalKey = series.originalKey === undefined ? series.key : series.originalKey;
+              series.key = series.originalKey + (series.yAxis == 1 ? '' : ' (right axis)');
+              return series;
+            }))
+          .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        g.select('.legendWrap')
+            .attr('transform', 'translate(' + ( availableWidth / 2 ) + ',' + (-margin.top) +')');
+      }
+
+
+      lines1
+        .width(availableWidth)
+        .height(availableHeight)
+        .interpolate("monotone")
+        .color(data.map(function(d,i) {
+          return d.color || color[i % color.length];
+        }).filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'line'}));
+
+      lines2
+        .width(availableWidth)
+        .height(availableHeight)
+        .interpolate("monotone")
+        .color(data.map(function(d,i) {
+          return d.color || color[i % color.length];
+        }).filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'line'}));
+
+      bars1
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color[i % color.length];
+        }).filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'bar'}));
+
+      bars2
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color[i % color.length];
+        }).filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'bar'}));
+
+      stack1
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color[i % color.length];
+        }).filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'area'}));
+
+      stack2
+        .width(availableWidth)
+        .height(availableHeight)
+        .color(data.map(function(d,i) {
+          return d.color || color[i % color.length];
+        }).filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'area'}));
+
+      g.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+
+      var lines1Wrap = g.select('.lines1Wrap')
+          .datum(dataLines1)
+      var bars1Wrap = g.select('.bars1Wrap')
+          .datum(dataBars1)
+      var stack1Wrap = g.select('.stack1Wrap')
+          .datum(dataStack1)
+
+      var lines2Wrap = g.select('.lines2Wrap')
+          .datum(dataLines2)
+      var bars2Wrap = g.select('.bars2Wrap')
+          .datum(dataBars2)
+      var stack2Wrap = g.select('.stack2Wrap')
+          .datum(dataStack2)
+
+      var extraValue1 = dataStack1.length ? dataStack1.map(function(a){return a.values}).reduce(function(a,b){
+        return a.map(function(aVal,i){return {x: aVal.x, y: aVal.y + b[i].y}})
+      }).concat([{x:0, y:0}]) : []
+      var extraValue2 = dataStack2.length ? dataStack2.map(function(a){return a.values}).reduce(function(a,b){
+        return a.map(function(aVal,i){return {x: aVal.x, y: aVal.y + b[i].y}})
+      }).concat([{x:0, y:0}]) : []
+
+      yScale1 .domain(yDomain1 || d3.extent(d3.merge(series1).concat(extraValue1), function(d) { return d.y } ))
+              .range([0, availableHeight])
+
+      yScale2 .domain(yDomain2 || d3.extent(d3.merge(series2).concat(extraValue2), function(d) { return d.y } ))
+              .range([0, availableHeight])
+
+      lines1.yDomain(yScale1.domain())
+      bars1.yDomain(yScale1.domain())
+      stack1.yDomain(yScale1.domain())
+
+      lines2.yDomain(yScale2.domain())
+      bars2.yDomain(yScale2.domain())
+      stack2.yDomain(yScale2.domain())
+
+      if(dataStack1.length){d3.transition(stack1Wrap).call(stack1);}
+      if(dataStack2.length){d3.transition(stack2Wrap).call(stack2);}
+
+      if(dataBars1.length){d3.transition(bars1Wrap).call(bars1);}
+      if(dataBars2.length){d3.transition(bars2Wrap).call(bars2);}
+
+      if(dataLines1.length){d3.transition(lines1Wrap).call(lines1);}
+      if(dataLines2.length){d3.transition(lines2Wrap).call(lines2);}
+      
+
+
+      xAxis
+        .ticks( availableWidth / 100 )
+        .tickSize(-availableHeight, 0);
+
+      g.select('.x.axis')
+          .attr('transform', 'translate(0,' + availableHeight + ')');
+      d3.transition(g.select('.x.axis'))
+          .call(xAxis);
+
+      yAxis1
+        .ticks( availableHeight / 36 )
+        .tickSize( -availableWidth, 0);
+
+
+      d3.transition(g.select('.y1.axis'))
+          .call(yAxis1);
+
+      yAxis2
+        .ticks( availableHeight / 36 )
+        .tickSize( -availableWidth, 0);
+
+      d3.transition(g.select('.y2.axis'))
+          .call(yAxis2);
+
+      g.select('.y2.axis')
+          .style('opacity', series2.length ? 1 : 0)
+          .attr('transform', 'translate(' + x.range()[1] + ',0)');
+
+      legend.dispatch.on('stateChange', function(newState) { 
+        chart.update();
+      });
+     
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  lines1.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines1.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  lines2.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines2.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  bars1.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  bars1.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  bars2.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  bars2.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  stack1.dispatch.on('tooltipShow', function(e) {
+    //disable tooltips when value ~= 0
+    //// TODO: consider removing points from voronoi that have 0 value instead of this hack
+    if (!Math.round(stack1.y()(e.point) * 100)) {  // 100 will not be good for very small numbers... will have to think about making this valu dynamic, based on data range
+      setTimeout(function() { d3.selectAll('.point.hover').classed('hover', false) }, 0);
+      return false;
+    }
+
+    e.pos = [e.pos[0] + margin.left, e.pos[1] + margin.top],
+    dispatch.tooltipShow(e);
+  });
+
+  stack1.dispatch.on('tooltipHide', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  stack2.dispatch.on('tooltipShow', function(e) {
+    //disable tooltips when value ~= 0
+    //// TODO: consider removing points from voronoi that have 0 value instead of this hack
+    if (!Math.round(stack2.y()(e.point) * 100)) {  // 100 will not be good for very small numbers... will have to think about making this valu dynamic, based on data range
+      setTimeout(function() { d3.selectAll('.point.hover').classed('hover', false) }, 0);
+      return false;
+    }
+
+    e.pos = [e.pos[0] + margin.left, e.pos[1] + margin.top],
+    dispatch.tooltipShow(e);
+  });
+
+  stack2.dispatch.on('tooltipHide', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+    lines1.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines1.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  lines2.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  lines2.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+
+
+  //============================================================
+  // Global getters and setters
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+  chart.lines1 = lines1;
+  chart.lines2 = lines2;
+  chart.bars1 = bars1;
+  chart.bars2 = bars2;
+  chart.stack1 = stack1;
+  chart.stack2 = stack2;
+  chart.xAxis = xAxis;
+  chart.yAxis1 = yAxis1;
+  chart.yAxis2 = yAxis2;
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    lines1.x(_);
+    bars1.x(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    lines1.y(_);
+    bars1.y(_);
+    return chart;
+  };
+
+  chart.yDomain1 = function(_) {
+    if (!arguments.length) return yDomain1;
+    yDomain1 = _;
+    return chart;
+  };
+
+  chart.yDomain2 = function(_) {
+    if (!arguments.length) return yDomain2;
+    yDomain2 = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin = _;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = _;
+    legend.color(_);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  return chart;
+}
+
+
+nv.models.ohlcBar = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one
+    , x = d3.scale.linear()
+    , y = d3.scale.linear()
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , getOpen = function(d) { return d.open }
+    , getClose = function(d) { return d.close }
+    , getHigh = function(d) { return d.high }
+    , getLow = function(d) { return d.low }
+    , forceX = []
+    , forceY = []
+    , padData     = false // If true, adds half a data points width to front and back, for lining up a line chart with a bar chart
+    , clipEdge = true
+    , color = nv.utils.defaultColor()
+    , xDomain
+    , yDomain
+    , xRange
+    , yRange
+    , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout')
+    ;
+
+  //============================================================
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  //TODO: store old scales for transitions
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x   .domain(xDomain || d3.extent(data[0].values.map(getX).concat(forceX) ));
+
+      if (padData)
+        x.range(xRange || [availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);
+      else
+        x.range(xRange || [0, availableWidth]);
+
+      y   .domain(yDomain || [
+            d3.min(data[0].values.map(getLow).concat(forceY)),
+            d3.max(data[0].values.map(getHigh).concat(forceY))
+          ])
+          .range(yRange || [availableHeight, 0]);
+
+      // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point
+      if (x.domain()[0] === x.domain()[1])
+        x.domain()[0] ?
+            x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])
+          : x.domain([-1,1]);
+
+      if (y.domain()[0] === y.domain()[1])
+        y.domain()[0] ?
+            y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])
+          : y.domain([-1,1]);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = d3.select(this).selectAll('g.nv-wrap.nv-ohlcBar').data([data[0].values]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-ohlcBar');
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-ticks');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      container
+          .on('click', function(d,i) {
+            dispatch.chartClick({
+                data: d,
+                index: i,
+                pos: d3.event,
+                id: id
+            });
+          });
+
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-chart-clip-path-' + id)
+        .append('rect');
+
+      wrap.select('#nv-chart-clip-path-' + id + ' rect')
+          .attr('width', availableWidth)
+          .attr('height', availableHeight);
+
+      g   .attr('clip-path', clipEdge ? 'url(#nv-chart-clip-path-' + id + ')' : '');
+
+
+
+      var ticks = wrap.select('.nv-ticks').selectAll('.nv-tick')
+          .data(function(d) { return d });
+
+      ticks.exit().remove();
+
+
+      var ticksEnter = ticks.enter().append('path')
+          .attr('class', function(d,i,j) { return (getOpen(d,i) > getClose(d,i) ? 'nv-tick negative' : 'nv-tick positive') + ' nv-tick-' + j + '-' + i })
+          .attr('d', function(d,i) {
+            var w = (availableWidth / data[0].values.length) * .9;
+            return 'm0,0l0,'
+                 + (y(getOpen(d,i))
+                 - y(getHigh(d,i)))
+                 + 'l'
+                 + (-w/2)
+                 + ',0l'
+                 + (w/2)
+                 + ',0l0,'
+                 + (y(getLow(d,i)) - y(getOpen(d,i)))
+                 + 'l0,'
+                 + (y(getClose(d,i))
+                 - y(getLow(d,i)))
+                 + 'l'
+                 + (w/2)
+                 + ',0l'
+                 + (-w/2)
+                 + ',0z';
+          })
+          .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',' + y(getHigh(d,i)) + ')'; })
+          //.attr('fill', function(d,i) { return color[0]; })
+          //.attr('stroke', function(d,i) { return color[0]; })
+          //.attr('x', 0 )
+          //.attr('y', function(d,i) {  return y(Math.max(0, getY(d,i))) })
+          //.attr('height', function(d,i) { return Math.abs(y(getY(d,i)) - y(0)) })
+          .on('mouseover', function(d,i) {
+            d3.select(this).classed('hover', true);
+            dispatch.elementMouseover({
+                point: d,
+                series: data[0],
+                pos: [x(getX(d,i)), y(getY(d,i))],  // TODO: Figure out why the value appears to be shifted
+                pointIndex: i,
+                seriesIndex: 0,
+                e: d3.event
+            });
+
+          })
+          .on('mouseout', function(d,i) {
+                d3.select(this).classed('hover', false);
+                dispatch.elementMouseout({
+                    point: d,
+                    series: data[0],
+                    pointIndex: i,
+                    seriesIndex: 0,
+                    e: d3.event
+                });
+          })
+          .on('click', function(d,i) {
+                dispatch.elementClick({
+                    //label: d[label],
+                    value: getY(d,i),
+                    data: d,
+                    index: i,
+                    pos: [x(getX(d,i)), y(getY(d,i))],
+                    e: d3.event,
+                    id: id
+                });
+              d3.event.stopPropagation();
+          })
+          .on('dblclick', function(d,i) {
+              dispatch.elementDblClick({
+                  //label: d[label],
+                  value: getY(d,i),
+                  data: d,
+                  index: i,
+                  pos: [x(getX(d,i)), y(getY(d,i))],
+                  e: d3.event,
+                  id: id
+              });
+              d3.event.stopPropagation();
+          });
+
+      ticks
+          .attr('class', function(d,i,j) { return (getOpen(d,i) > getClose(d,i) ? 'nv-tick negative' : 'nv-tick positive') + ' nv-tick-' + j + '-' + i })
+      d3.transition(ticks)
+          .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',' + y(getHigh(d,i)) + ')'; })
+          .attr('d', function(d,i) {
+            var w = (availableWidth / data[0].values.length) * .9;
+            return 'm0,0l0,'
+                 + (y(getOpen(d,i))
+                 - y(getHigh(d,i)))
+                 + 'l'
+                 + (-w/2)
+                 + ',0l'
+                 + (w/2)
+                 + ',0l0,'
+                 + (y(getLow(d,i))
+                 - y(getOpen(d,i)))
+                 + 'l0,'
+                 + (y(getClose(d,i))
+                 - y(getLow(d,i)))
+                 + 'l'
+                 + (w/2)
+                 + ',0l'
+                 + (-w/2)
+                 + ',0z';
+          })
+          //.attr('width', (availableWidth / data[0].values.length) * .9 )
+
+
+      //d3.transition(ticks)
+          //.attr('y', function(d,i) {  return y(Math.max(0, getY(d,i))) })
+          //.attr('height', function(d,i) { return Math.abs(y(getY(d,i)) - y(0)) });
+          //.order();  // not sure if this makes any sense for this model
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = _;
+    return chart;
+  };
+
+  chart.open = function(_) {
+    if (!arguments.length) return getOpen;
+    getOpen = _;
+    return chart;
+  };
+
+  chart.close = function(_) {
+    if (!arguments.length) return getClose;
+    getClose = _;
+    return chart;
+  };
+
+  chart.high = function(_) {
+    if (!arguments.length) return getHigh;
+    getHigh = _;
+    return chart;
+  };
+
+  chart.low = function(_) {
+    if (!arguments.length) return getLow;
+    getLow = _;
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.forceX = function(_) {
+    if (!arguments.length) return forceX;
+    forceX = _;
+    return chart;
+  };
+
+  chart.forceY = function(_) {
+    if (!arguments.length) return forceY;
+    forceY = _;
+    return chart;
+  };
+
+  chart.padData = function(_) {
+    if (!arguments.length) return padData;
+    padData = _;
+    return chart;
+  };
+
+  chart.clipEdge = function(_) {
+    if (!arguments.length) return clipEdge;
+    clipEdge = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+nv.models.pie = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 500
+    , height = 500
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , getDescription = function(d) { return d.description }
+    , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one
+    , color = nv.utils.defaultColor()
+    , valueFormat = d3.format(',.2f')
+    , showLabels = true
+    , pieLabelsOutside = true
+    , donutLabelsOutside = false
+    , labelType = "key"
+    , labelThreshold = .02 //if slice percentage is under this, don't show label
+    , donut = false
+    , labelSunbeamLayout = false
+    , startAngle = false
+    , endAngle = false
+    , donutRatio = 0.5
+    , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout')
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          radius = Math.min(availableWidth, availableHeight) / 2,
+          arcRadius = radius-(radius / 5),
+          container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      //var wrap = container.selectAll('.nv-wrap.nv-pie').data([data]);
+      var wrap = container.selectAll('.nv-wrap.nv-pie').data(data);
+      var wrapEnter = wrap.enter().append('g').attr('class','nvd3 nv-wrap nv-pie nv-chart-' + id);
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-pie');
+      gEnter.append('g').attr('class', 'nv-pieLabels');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+      g.select('.nv-pie').attr('transform', 'translate(' + availableWidth / 2 + ',' + availableHeight / 2 + ')');
+      g.select('.nv-pieLabels').attr('transform', 'translate(' + availableWidth / 2 + ',' + availableHeight / 2 + ')');
+
+      //------------------------------------------------------------
+
+
+      container
+          .on('click', function(d,i) {
+              dispatch.chartClick({
+                  data: d,
+                  index: i,
+                  pos: d3.event,
+                  id: id
+              });
+          });
+
+
+      var arc = d3.svg.arc()
+                  .outerRadius(arcRadius);
+
+      if (startAngle) arc.startAngle(startAngle)
+      if (endAngle) arc.endAngle(endAngle);
+      if (donut) arc.innerRadius(radius * donutRatio);
+
+      // Setup the Pie chart and choose the data element
+      var pie = d3.layout.pie()
+          .sort(null)
+          .value(function(d) { return d.disabled ? 0 : getY(d) });
+
+      var slices = wrap.select('.nv-pie').selectAll('.nv-slice')
+          .data(pie);
+
+      var pieLabels = wrap.select('.nv-pieLabels').selectAll('.nv-label')
+          .data(pie);
+
+      slices.exit().remove();
+      pieLabels.exit().remove();
+
+      var ae = slices.enter().append('g')
+              .attr('class', 'nv-slice')
+              .on('mouseover', function(d,i){
+                d3.select(this).classed('hover', true);
+                dispatch.elementMouseover({
+                    label: getX(d.data),
+                    value: getY(d.data),
+                    point: d.data,
+                    pointIndex: i,
+                    pos: [d3.event.pageX, d3.event.pageY],
+                    id: id
+                });
+              })
+              .on('mouseout', function(d,i){
+                d3.select(this).classed('hover', false);
+                dispatch.elementMouseout({
+                    label: getX(d.data),
+                    value: getY(d.data),
+                    point: d.data,
+                    index: i,
+                    id: id
+                });
+              })
+              .on('click', function(d,i) {
+                dispatch.elementClick({
+                    label: getX(d.data),
+                    value: getY(d.data),
+                    point: d.data,
+                    index: i,
+                    pos: d3.event,
+                    id: id
+                });
+                d3.event.stopPropagation();
+              })
+              .on('dblclick', function(d,i) {
+                dispatch.elementDblClick({
+                    label: getX(d.data),
+                    value: getY(d.data),
+                    point: d.data,
+                    index: i,
+                    pos: d3.event,
+                    id: id
+                });
+                d3.event.stopPropagation();
+              });
+
+        slices.attr('fill', function(d,i) { return color(d, i); })
+            .attr('stroke', function(d,i) { return color(d, i); });
+
+        var paths = ae.append('path')
+            .each(function(d) { this._current = d; });
+            //.attr('d', arc);
+
+        slices.select('path')
+            .attr('d', arc);
+            
+        if (showLabels) {
+          // This does the normal label
+          var labelsArc = d3.svg.arc().innerRadius(0);
+
+          if (pieLabelsOutside){ labelsArc = arc; }
+
+          if (donutLabelsOutside) { labelsArc = d3.svg.arc().outerRadius(arc.outerRadius()); }
+
+          pieLabels.enter().append("g").classed("nv-label",true)
+            .each(function(d,i) {
+                var group = d3.select(this);
+
+                group
+                  .attr('transform', function(d) {
+                       if (labelSunbeamLayout) {
+                         d.outerRadius = arcRadius + 10; // Set Outer Coordinate
+                         d.innerRadius = arcRadius + 15; // Set Inner Coordinate
+                         var rotateAngle = (d.startAngle + d.endAngle) / 2 * (180 / Math.PI);
+                         if ((d.startAngle+d.endAngle)/2 < Math.PI) {
+                           rotateAngle -= 90;
+                         } else {
+                           rotateAngle += 90;
+                         }
+                         return 'translate(' + labelsArc.centroid(d) + ') rotate(' + rotateAngle + ')';
+                       } else {
+                         d.outerRadius = radius + 10; // Set Outer Coordinate
+                         d.innerRadius = radius + 15; // Set Inner Coordinate
+                         return 'translate(' + labelsArc.centroid(d) + ')'
+                       }
+                  });
+
+                group.append('rect')
+                    .style('stroke', '#fff')
+                    .style('fill', '#fff')
+                    .attr("rx", 3)
+                    .attr("ry", 3);
+
+                group.append('text')
+                    .style('text-anchor', labelSunbeamLayout ? ((d.startAngle + d.endAngle) / 2 < Math.PI ? 'start' : 'end') : 'middle') //center the text on it's origin or begin/end if orthogonal aligned
+                    .style('fill', '#000')
+
+            });
+
+          var labelLocationHash = {};
+          var avgHeight = 14;
+          var avgWidth = 140;
+          var createHashKey = function(coordinates) {
+
+              return Math.floor(coordinates[0]/avgWidth) * avgWidth + ',' + Math.floor(coordinates[1]/avgHeight) * avgHeight;
+          };
+          pieLabels
+                .attr('transform', function(d) {
+                  if (labelSunbeamLayout) {
+                      d.outerRadius = arcRadius + 10; // Set Outer Coordinate
+                      d.innerRadius = arcRadius + 15; // Set Inner Coordinate
+                      var rotateAngle = (d.startAngle + d.endAngle) / 2 * (180 / Math.PI);
+                      if ((d.startAngle+d.endAngle)/2 < Math.PI) {
+                        rotateAngle -= 90;
+                      } else {
+                        rotateAngle += 90;
+                      }
+                      return 'translate(' + labelsArc.centroid(d) + ') rotate(' + rotateAngle + ')';
+                    } else {
+                      d.outerRadius = radius + 10; // Set Outer Coordinate
+                      d.innerRadius = radius + 15; // Set Inner Coordinate
+
+                      /*
+                      Overlapping pie labels are not good. What this attempts to do is, prevent overlapping.
+                      Each label location is hashed, and if a hash collision occurs, we assume an overlap.
+                      Adjust the label's y-position to remove the overlap.
+                      */
+                      var center = labelsArc.centroid(d);
+                      var hashKey = createHashKey(center);
+                      if (labelLocationHash[hashKey]) {
+                        center[1] -= avgHeight;
+                      }
+                      labelLocationHash[createHashKey(center)] = true;
+                      return 'translate(' + center + ')'
+                    }
+                });
+          pieLabels.select(".nv-label text")
+                .style('text-anchor', labelSunbeamLayout ? ((d.startAngle + d.endAngle) / 2 < Math.PI ? 'start' : 'end') : 'middle') //center the text on it's origin or begin/end if orthogonal aligned
+                .text(function(d, i) {
+                  var percent = (d.endAngle - d.startAngle) / (2 * Math.PI);
+                  var labelTypes = {
+                    "key" : getX(d.data),
+                    "value": getY(d.data),
+                    "percent": d3.format('%')(percent)
+                  };
+                  return (d.value && percent > labelThreshold) ? labelTypes[labelType] : '';
+                });
+        }
+
+
+        // Computes the angle of an arc, converting from radians to degrees.
+        function angle(d) {
+          var a = (d.startAngle + d.endAngle) * 90 / Math.PI - 90;
+          return a > 90 ? a - 180 : a;
+        }
+
+        function arcTween(a) {
+          a.endAngle = isNaN(a.endAngle) ? 0 : a.endAngle;
+          a.startAngle = isNaN(a.startAngle) ? 0 : a.startAngle;
+          if (!donut) a.innerRadius = 0;
+          var i = d3.interpolate(this._current, a);
+          this._current = i(0);
+          return function(t) {
+            return arc(i(t));
+          };
+        }
+
+        function tweenPie(b) {
+          b.innerRadius = 0;
+          var i = d3.interpolate({startAngle: 0, endAngle: 0}, b);
+          return function(t) {
+              return arc(i(t));
+          };
+        }
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.values = function(_) {
+    nv.log("pie.values() is no longer supported.");
+    return chart;
+  };
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = _;
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = d3.functor(_);
+    return chart;
+  };
+
+  chart.description = function(_) {
+    if (!arguments.length) return getDescription;
+    getDescription = _;
+    return chart;
+  };
+
+  chart.showLabels = function(_) {
+    if (!arguments.length) return showLabels;
+    showLabels = _;
+    return chart;
+  };
+
+  chart.labelSunbeamLayout = function(_) {
+    if (!arguments.length) return labelSunbeamLayout;
+    labelSunbeamLayout = _;
+    return chart;
+  };
+
+  chart.donutLabelsOutside = function(_) {
+    if (!arguments.length) return donutLabelsOutside;
+    donutLabelsOutside = _;
+    return chart;
+  };
+
+  chart.pieLabelsOutside = function(_) {
+    if (!arguments.length) return pieLabelsOutside;
+    pieLabelsOutside = _;
+    return chart;
+  };
+
+  chart.labelType = function(_) {
+    if (!arguments.length) return labelType;
+    labelType = _;
+    labelType = labelType || "key";
+    return chart;
+  };
+
+  chart.donut = function(_) {
+    if (!arguments.length) return donut;
+    donut = _;
+    return chart;
+  };
+
+  chart.donutRatio = function(_) {
+    if (!arguments.length) return donutRatio;
+    donutRatio = _;
+    return chart;
+  };
+
+  chart.startAngle = function(_) {
+    if (!arguments.length) return startAngle;
+    startAngle = _;
+    return chart;
+  };
+
+  chart.endAngle = function(_) {
+    if (!arguments.length) return endAngle;
+    endAngle = _;
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.valueFormat = function(_) {
+    if (!arguments.length) return valueFormat;
+    valueFormat = _;
+    return chart;
+  };
+
+  chart.labelThreshold = function(_) {
+    if (!arguments.length) return labelThreshold;
+    labelThreshold = _;
+    return chart;
+  };
+  //============================================================
+
+
+  return chart;
+}
+nv.models.pieChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var pie = nv.models.pie()
+    , legend = nv.models.legend()
+    ;
+
+  var margin = {top: 30, right: 20, bottom: 20, left: 20}
+    , width = null
+    , height = null
+    , showLegend = true
+    , color = nv.utils.defaultColor()
+    , tooltips = true
+    , tooltip = function(key, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + '</p>'
+      }
+    , state = {}
+    , defaultState = null
+    , noData = "No Data Available."
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var tooltipLabel = pie.description()(e.point) || pie.x()(e.point)
+    var left = e.pos[0] + ( (offsetElement && offsetElement.offsetLeft) || 0 ),
+        top = e.pos[1] + ( (offsetElement && offsetElement.offsetTop) || 0),
+        y = pie.valueFormat()(pie.y()(e.point)),
+        content = tooltip(tooltipLabel, y, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart); };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-pieChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-pieChart').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-pieWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend
+          .width( availableWidth )
+          .key(pie.x());
+
+        wrap.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        wrap.select('.nv-legendWrap')
+            .attr('transform', 'translate(0,' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      pie
+        .width(availableWidth)
+        .height(availableHeight);
+
+
+      var pieWrap = g.select('.nv-pieWrap')
+          .datum([data]);
+
+      d3.transition(pieWrap).call(pie);
+
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      legend.dispatch.on('stateChange', function(newState) {
+        state = newState;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      pie.dispatch.on('elementMouseout.tooltip', function(e) {
+        dispatch.tooltipHide(e);
+      });
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+
+    });
+
+    return chart;
+  }
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  pie.dispatch.on('elementMouseover.tooltip', function(e) {
+    e.pos = [e.pos[0] +  margin.left, e.pos[1] + margin.top];
+    dispatch.tooltipShow(e);
+  });
+
+  dispatch.on('tooltipShow', function(e) {
+    if (tooltips) showTooltip(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.legend = legend;
+  chart.dispatch = dispatch;
+  chart.pie = pie;
+
+  d3.rebind(chart, pie, 'valueFormat', 'values', 'x', 'y', 'description', 'id', 'showLabels', 'donutLabelsOutside', 'pieLabelsOutside', 'labelType', 'donut', 'donutRatio', 'labelThreshold');
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    pie.color(color);
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.scatter = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin       = {top: 0, right: 0, bottom: 0, left: 0}
+    , width        = 960
+    , height       = 500
+    , color        = nv.utils.defaultColor() // chooses color
+    , id           = Math.floor(Math.random() * 100000) //Create semi-unique ID incase user doesn't select one
+    , x            = d3.scale.linear()
+    , y            = d3.scale.linear()
+    , z            = d3.scale.linear() //linear because d3.svg.shape.size is treated as area
+    , getX         = function(d) { return d.x } // accessor to get the x value
+    , getY         = function(d) { return d.y } // accessor to get the y value
+    , getSize      = function(d) { return d.size || 1} // accessor to get the point size
+    , getShape     = function(d) { return d.shape || 'circle' } // accessor to get point shape
+    , onlyCircles  = true // Set to false to use shapes
+    , forceX       = [] // List of numbers to Force into the X scale (ie. 0, or a max / min, etc.)
+    , forceY       = [] // List of numbers to Force into the Y scale
+    , forceSize    = [] // List of numbers to Force into the Size scale
+    , interactive  = true // If true, plots a voronoi overlay for advanced point intersection
+    , pointKey     = null
+    , pointActive  = function(d) { return !d.notActive } // any points that return false will be filtered out
+    , padData      = false // If true, adds half a data points width to front and back, for lining up a line chart with a bar chart
+    , padDataOuter = .1 //outerPadding to imitate ordinal scale outer padding
+    , clipEdge     = false // if true, masks points within x and y scale
+    , clipVoronoi  = true // if true, masks each point with a circle... can turn off to slightly increase performance
+    , clipRadius   = function() { return 25 } // function to get the radius for voronoi point clips
+    , xDomain      = null // Override x domain (skips the calculation from data)
+    , yDomain      = null // Override y domain
+    , xRange       = null // Override x range
+    , yRange       = null // Override y range
+    , sizeDomain   = null // Override point size domain
+    , sizeRange    = null
+    , singlePoint  = false
+    , dispatch     = d3.dispatch('elementClick', 'elementMouseover', 'elementMouseout')
+    , useVoronoi   = true
+    ;
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0, z0 // used to store previous scales
+    , timeoutID
+    , needsUpdate = false // Flag for when the points are visually updating, but the interactive layer is behind, to disable tooltips
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+      //add series index to each data point for reference
+      data.forEach(function(series, i) {
+        series.values.forEach(function(point) {
+          point.series = i;
+        });
+      });
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      // remap and flatten the data for use in calculating the scales' domains
+      var seriesData = (xDomain && yDomain && sizeDomain) ? [] : // if we know xDomain and yDomain and sizeDomain, no need to calculate.... if Size is constant remember to set sizeDomain to speed up performance
+            d3.merge(
+              data.map(function(d) {
+                return d.values.map(function(d,i) {
+                  return { x: getX(d,i), y: getY(d,i), size: getSize(d,i) }
+                })
+              })
+            );
+
+      x   .domain(xDomain || d3.extent(seriesData.map(function(d) { return d.x; }).concat(forceX)))
+
+      if (padData && data[0])
+        x.range(xRange || [(availableWidth * padDataOuter +  availableWidth) / (2 *data[0].values.length), availableWidth - availableWidth * (1 + padDataOuter) / (2 * data[0].values.length)  ]);
+        //x.range([availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);
+      else
+        x.range(xRange || [0, availableWidth]);
+
+      y   .domain(yDomain || d3.extent(seriesData.map(function(d) { return d.y }).concat(forceY)))
+          .range(yRange || [availableHeight, 0]);
+
+      z   .domain(sizeDomain || d3.extent(seriesData.map(function(d) { return d.size }).concat(forceSize)))
+          .range(sizeRange || [16, 256]);
+
+      // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point
+      if (x.domain()[0] === x.domain()[1] || y.domain()[0] === y.domain()[1]) singlePoint = true;
+      if (x.domain()[0] === x.domain()[1])
+        x.domain()[0] ?
+            x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])
+          : x.domain([-1,1]);
+
+      if (y.domain()[0] === y.domain()[1])
+        y.domain()[0] ?
+            y.domain([y.domain()[0] - y.domain()[0] * 0.01, y.domain()[1] + y.domain()[1] * 0.01])
+          : y.domain([-1,1]);
+
+      if ( isNaN(x.domain()[0])) {
+          x.domain([-1,1]);
+      }
+
+      if ( isNaN(y.domain()[0])) {
+          y.domain([-1,1]);
+      }
+
+
+      x0 = x0 || x;
+      y0 = y0 || y;
+      z0 = z0 || z;
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-scatter').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-scatter nv-chart-' + id + (singlePoint ? ' nv-single-point' : ''));
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-groups');
+      gEnter.append('g').attr('class', 'nv-point-paths');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-edge-clip-' + id)
+        .append('rect');
+
+      wrap.select('#nv-edge-clip-' + id + ' rect')
+          .attr('width', availableWidth)
+          .attr('height', (availableHeight > 0) ? availableHeight : 0);
+
+      g   .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + id + ')' : '');
+
+
+      function updateInteractiveLayer() {
+
+        if (!interactive) return false;
+
+        var eventElements;
+
+        var vertices = d3.merge(data.map(function(group, groupIndex) {
+            return group.values
+              .map(function(point, pointIndex) {
+                // *Adding noise to make duplicates very unlikely
+                // *Injecting series and point index for reference
+                /* *Adding a 'jitter' to the points, because there's an issue in d3.geom.voronoi.
+                */
+                var pX = getX(point,pointIndex);
+                var pY = getY(point,pointIndex);
+
+                return [x(pX),
+                        y(pY),
+                        groupIndex,
+                        pointIndex, point];
+              })
+              .filter(function(pointArray, pointIndex) {
+                return pointActive(pointArray[4], pointIndex); // Issue #237.. move filter to after map, so pointIndex is correct!
+              })
+          })
+        );
+
+
+
+        //inject series and point index for reference into voronoi
+        if (useVoronoi === true) {
+
+          if (clipVoronoi) {
+            var pointClipsEnter = wrap.select('defs').selectAll('.nv-point-clips')
+                .data([id])
+              .enter();
+
+            pointClipsEnter.append('clipPath')
+                  .attr('class', 'nv-point-clips')
+                  .attr('id', 'nv-points-clip-' + id);
+
+            var pointClips = wrap.select('#nv-points-clip-' + id).selectAll('circle')
+                .data(vertices);
+            pointClips.enter().append('circle')
+                .attr('r', clipRadius);
+            pointClips.exit().remove();
+            pointClips
+                .attr('cx', function(d) { return d[0] })
+                .attr('cy', function(d) { return d[1] });
+
+            wrap.select('.nv-point-paths')
+                .attr('clip-path', 'url(#nv-points-clip-' + id + ')');
+          }
+
+
+          if(vertices.length) {
+            // Issue #283 - Adding 2 dummy points to the voronoi b/c voronoi requires min 3 points to work
+            vertices.push([x.range()[0] - 20, y.range()[0] - 20, null, null]);
+            vertices.push([x.range()[1] + 20, y.range()[1] + 20, null, null]);
+            vertices.push([x.range()[0] - 20, y.range()[0] + 20, null, null]);
+            vertices.push([x.range()[1] + 20, y.range()[1] - 20, null, null]);
+          }
+
+          var bounds = d3.geom.polygon([
+              [-10,-10],
+              [-10,height + 10],
+              [width + 10,height + 10],
+              [width + 10,-10]
+          ]);
+
+          // delete duplicates from vertices - essential assumption for d3.geom.voronoi
+          var epsilon = 1e-6; // d3 uses 1e-6 to determine equivalence.
+          vertices = vertices.sort(function(a,b){return ((a[0] - b[0]) || (a[1] - b[1]))});
+          for (var i = 0; i < vertices.length - 1; ) {
+              if ((Math.abs(vertices[i][0] - vertices[i+1][0]) < epsilon) &&
+                  (Math.abs(vertices[i][1] - vertices[i+1][1]) < epsilon)) {
+                      vertices.splice(i+1, 1);
+              } else {
+                  i++;
+              }
+          }
+ 
+          var voronoi = d3.geom.voronoi(vertices).map(function(d, i) {
+              return {
+                'data': bounds.clip(d),
+                'series': vertices[i][2],
+                'point': vertices[i][3]
+              }
+            });
+
+
+          var pointPaths = wrap.select('.nv-point-paths').selectAll('path')
+              .data(voronoi);
+          pointPaths.enter().append('path')
+              .attr('class', function(d,i) { return 'nv-path-'+i; });
+          pointPaths.exit().remove();
+          pointPaths
+              .attr('d', function(d) {
+                if (!d || !d.data || d.data.length === 0)
+                    return 'M 0 0';
+                else
+                    return 'M' + d.data.join('L') + 'Z';
+              });
+
+          var mouseEventCallback = function(d,mDispatch) {
+                if (needsUpdate) return 0;
+                var series = data[d.series];
+                if (typeof series === 'undefined') return;
+
+                var point  = series.values[d.point];
+
+                mDispatch({
+                  point: point,
+                  series: series,
+                  pos: [x(getX(point, d.point)) + margin.left, y(getY(point, d.point)) + margin.top],
+                  seriesIndex: d.series,
+                  pointIndex: d.point
+                });
+          };
+
+          pointPaths
+              .on('click', function(d) {
+                mouseEventCallback(d, dispatch.elementClick);
+              })
+              .on('mouseover', function(d) {
+                mouseEventCallback(d, dispatch.elementMouseover);
+              })
+              .on('mouseout', function(d, i) {
+                mouseEventCallback(d, dispatch.elementMouseout);
+              });
+
+
+        } else {
+          /*
+          // bring data in form needed for click handlers
+          var dataWithPoints = vertices.map(function(d, i) {
+              return {
+                'data': d,
+                'series': vertices[i][2],
+                'point': vertices[i][3]
+              }
+            });
+           */
+
+          // add event handlers to points instead voronoi paths
+          wrap.select('.nv-groups').selectAll('.nv-group')
+            .selectAll('.nv-point')
+              //.data(dataWithPoints)
+              //.style('pointer-events', 'auto') // recativate events, disabled by css
+              .on('click', function(d,i) {
+                //nv.log('test', d, i);
+                if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point
+                var series = data[d.series],
+                    point  = series.values[i];
+
+                dispatch.elementClick({
+                  point: point,
+                  series: series,
+                  pos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],
+                  seriesIndex: d.series,
+                  pointIndex: i
+                });
+              })
+              .on('mouseover', function(d,i) {
+                if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point
+                var series = data[d.series],
+                    point  = series.values[i];
+
+                dispatch.elementMouseover({
+                  point: point,
+                  series: series,
+                  pos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],
+                  seriesIndex: d.series,
+                  pointIndex: i
+                });
+              })
+              .on('mouseout', function(d,i) {
+                if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point
+                var series = data[d.series],
+                    point  = series.values[i];
+
+                dispatch.elementMouseout({
+                  point: point,
+                  series: series,
+                  seriesIndex: d.series,
+                  pointIndex: i
+                });
+              });
+          }
+
+          needsUpdate = false;
+      }
+
+      needsUpdate = true;
+
+      var groups = wrap.select('.nv-groups').selectAll('.nv-group')
+          .data(function(d) { return d }, function(d) { return d.key });
+      groups.enter().append('g')
+          .style('stroke-opacity', 1e-6)
+          .style('fill-opacity', 1e-6);
+      groups.exit()
+          .remove();
+      groups
+          .attr('class', function(d,i) { return 'nv-group nv-series-' + i })
+          .classed('hover', function(d) { return d.hover });
+      groups
+          
+          .style('fill', function(d,i) { return color(d, i) })
+          .style('stroke', function(d,i) { return color(d, i) })
+          .style('stroke-opacity', 1)
+          .style('fill-opacity', .5);
+
+
+      if (onlyCircles) {
+
+        var points = groups.selectAll('circle.nv-point')
+            .data(function(d) { return d.values }, pointKey);
+        points.enter().append('circle')
+            .style('fill', function (d,i) { return d.color })
+            .style('stroke', function (d,i) { return d.color })
+            .attr('cx', function(d,i) { return nv.utils.NaNtoZero(x0(getX(d,i))) })
+            .attr('cy', function(d,i) { return nv.utils.NaNtoZero(y0(getY(d,i))) })
+            .attr('r', function(d,i) { return Math.sqrt(z(getSize(d,i))/Math.PI) });
+        points.exit().remove();
+        groups.exit().selectAll('path.nv-point')
+            .attr('cx', function(d,i) { return nv.utils.NaNtoZero(x(getX(d,i))) })
+            .attr('cy', function(d,i) { return nv.utils.NaNtoZero(y(getY(d,i))) })
+            .remove();
+        points.each(function(d,i) {
+          d3.select(this)
+            .classed('nv-point', true)
+            .classed('nv-point-' + i, true)
+            .classed('hover',false)
+            ;
+        });
+        points
+            .attr('cx', function(d,i) { return nv.utils.NaNtoZero(x(getX(d,i))) })
+            .attr('cy', function(d,i) { return nv.utils.NaNtoZero(y(getY(d,i))) })
+            .attr('r', function(d,i) { return Math.sqrt(z(getSize(d,i))/Math.PI) });
+
+      } else {
+
+        var points = groups.selectAll('path.nv-point')
+            .data(function(d) { return d.values });
+        points.enter().append('path')
+            .style('fill', function (d,i) { return d.color })
+            .style('stroke', function (d,i) { return d.color })
+            .attr('transform', function(d,i) {
+              return 'translate(' + x0(getX(d,i)) + ',' + y0(getY(d,i)) + ')'
+            })
+            .attr('d',
+              d3.svg.symbol()
+                .type(getShape)
+                .size(function(d,i) { return z(getSize(d,i)) })
+            );
+        points.exit().remove();
+        groups.exit().selectAll('path.nv-point')
+            
+            .attr('transform', function(d,i) {
+              return 'translate(' + x(getX(d,i)) + ',' + y(getY(d,i)) + ')'
+            })
+            .remove();
+        points.each(function(d,i) {
+          d3.select(this)
+            .classed('nv-point', true)
+            .classed('nv-point-' + i, true)
+            .classed('hover',false)
+            ;
+        });
+        points
+            .attr('transform', function(d,i) {
+              //nv.log(d,i,getX(d,i), x(getX(d,i)));
+              return 'translate(' + x(getX(d,i)) + ',' + y(getY(d,i)) + ')'
+            })
+            .attr('d',
+              d3.svg.symbol()
+                .type(getShape)
+                .size(function(d,i) { return z(getSize(d,i)) })
+            );
+      }
+
+
+      // Delay updating the invisible interactive layer for smoother animation
+      clearTimeout(timeoutID); // stop repeat calls to updateInteractiveLayer
+      timeoutID = setTimeout(updateInteractiveLayer, 300);
+      //updateInteractiveLayer();
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+      z0 = z.copy();
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+  chart.clearHighlights = function() {
+      //Remove the 'hover' class from all highlighted points.
+      d3.selectAll(".nv-chart-" + id + " .nv-point.hover").classed("hover",false);
+  };
+
+  chart.highlightPoint = function(seriesIndex,pointIndex,isHoverOver) {
+      d3.select(".nv-chart-" + id + " .nv-series-" + seriesIndex + " .nv-point-" + pointIndex)
+          .classed("hover",isHoverOver);
+  };
+
+
+  dispatch.on('elementMouseover.point', function(d) {
+     if (interactive) chart.highlightPoint(d.seriesIndex,d.pointIndex,true);
+  });
+
+  dispatch.on('elementMouseout.point', function(d) {
+     if (interactive) chart.highlightPoint(d.seriesIndex,d.pointIndex,false);
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = d3.functor(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = d3.functor(_);
+    return chart;
+  };
+
+  chart.size = function(_) {
+    if (!arguments.length) return getSize;
+    getSize = d3.functor(_);
+    return chart;
+  };
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.zScale = function(_) {
+    if (!arguments.length) return z;
+    z = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.sizeDomain = function(_) {
+    if (!arguments.length) return sizeDomain;
+    sizeDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.sizeRange = function(_) {
+    if (!arguments.length) return sizeRange;
+    sizeRange = _;
+    return chart;
+  };
+
+  chart.forceX = function(_) {
+    if (!arguments.length) return forceX;
+    forceX = _;
+    return chart;
+  };
+
+  chart.forceY = function(_) {
+    if (!arguments.length) return forceY;
+    forceY = _;
+    return chart;
+  };
+
+  chart.forceSize = function(_) {
+    if (!arguments.length) return forceSize;
+    forceSize = _;
+    return chart;
+  };
+
+  chart.interactive = function(_) {
+    if (!arguments.length) return interactive;
+    interactive = _;
+    return chart;
+  };
+
+  chart.pointKey = function(_) {
+    if (!arguments.length) return pointKey;
+    pointKey = _;
+    return chart;
+  };
+
+  chart.pointActive = function(_) {
+    if (!arguments.length) return pointActive;
+    pointActive = _;
+    return chart;
+  };
+
+  chart.padData = function(_) {
+    if (!arguments.length) return padData;
+    padData = _;
+    return chart;
+  };
+
+  chart.padDataOuter = function(_) {
+    if (!arguments.length) return padDataOuter;
+    padDataOuter = _;
+    return chart;
+  };
+
+  chart.clipEdge = function(_) {
+    if (!arguments.length) return clipEdge;
+    clipEdge = _;
+    return chart;
+  };
+
+  chart.clipVoronoi= function(_) {
+    if (!arguments.length) return clipVoronoi;
+    clipVoronoi = _;
+    return chart;
+  };
+
+  chart.useVoronoi= function(_) {
+    if (!arguments.length) return useVoronoi;
+    useVoronoi = _;
+    if (useVoronoi === false) {
+        clipVoronoi = false;
+    }
+    return chart;
+  };
+
+  chart.clipRadius = function(_) {
+    if (!arguments.length) return clipRadius;
+    clipRadius = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.shape = function(_) {
+    if (!arguments.length) return getShape;
+    getShape = _;
+    return chart;
+  };
+
+  chart.onlyCircles = function(_) {
+    if (!arguments.length) return onlyCircles;
+    onlyCircles = _;
+    return chart;
+  };
+
+  chart.id = function(_) {
+    if (!arguments.length) return id;
+    id = _;
+    return chart;
+  };
+
+  chart.singlePoint = function(_) {
+    if (!arguments.length) return singlePoint;
+    singlePoint = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+nv.models.scatterChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var scatter      = nv.models.scatter()
+    , xAxis        = nv.models.axis()
+    , yAxis        = nv.models.axis()
+    , legend       = nv.models.legend()
+    , controls     = nv.models.legend()
+    , distX        = nv.models.distribution()
+    , distY        = nv.models.distribution()
+    ;
+
+  var margin       = {top: 30, right: 20, bottom: 50, left: 75}
+    , width        = null
+    , height       = null
+    , color        = nv.utils.defaultColor()
+    , x            = d3.fisheye ? d3.fisheye.scale(d3.scale.linear).distortion(0) : scatter.xScale()
+    , y            = d3.fisheye ? d3.fisheye.scale(d3.scale.linear).distortion(0) : scatter.yScale()
+    , xPadding     = 0
+    , yPadding     = 0
+    , showDistX    = false
+    , showDistY    = false
+    , showLegend   = true
+    , showXAxis    = true
+    , showYAxis    = true
+    , rightAlignYAxis = false
+    , showControls = !!d3.fisheye
+    , fisheye      = 0
+    , pauseFisheye = false
+    , tooltips     = true
+    , tooltipX     = function(key, x, y) { return '<strong>' + x + '</strong>' }
+    , tooltipY     = function(key, x, y) { return '<strong>' + y + '</strong>' }
+    , tooltip      = null
+    , state = {}
+    , defaultState = null
+    , dispatch     = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , noData       = "No Data Available."
+    , transitionDuration = 0
+    ;
+
+  scatter
+    .xScale(x)
+    .yScale(y)
+    ;
+  xAxis
+    .orient('bottom')
+    .tickPadding(10)
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    .tickPadding(10)
+    ;
+  distX
+    .axis('x')
+    ;
+  distY
+    .axis('y')
+    ;
+
+  controls.updateState(false);
+
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0;
+
+  var showTooltip = function(e, offsetElement) {
+    //TODO: make tooltip style an option between single or dual on axes (maybe on all charts with axes?)
+
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        leftX = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        topX = y.range()[0] + margin.top + ( offsetElement.offsetTop || 0),
+        leftY = x.range()[0] + margin.left + ( offsetElement.offsetLeft || 0 ),
+        topY = e.pos[1] + ( offsetElement.offsetTop || 0),
+        xVal = xAxis.tickFormat()(scatter.x()(e.point, e.pointIndex)),
+        yVal = yAxis.tickFormat()(scatter.y()(e.point, e.pointIndex));
+
+      if( tooltipX != null )
+          nv.tooltip.show([leftX, topX], tooltipX(e.series.key, xVal, yVal, e, chart), 'n', 1, offsetElement, 'x-nvtooltip');
+      if( tooltipY != null )
+          nv.tooltip.show([leftY, topY], tooltipY(e.series.key, xVal, yVal, e, chart), 'e', 1, offsetElement, 'y-nvtooltip');
+      if( tooltip != null )
+          nv.tooltip.show([left, top], tooltip(e.series.key, xVal, yVal, e, chart), e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  var controlsData = [
+    { key: 'Magnify', disabled: true }
+  ];
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart); };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display noData message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x0 = x0 || x;
+      y0 = y0 || y;
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-scatterChart').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-scatterChart nv-chart-' + scatter.id());
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      // background for pointer events
+      gEnter.append('rect').attr('class', 'nvd3 nv-background');
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-scatterWrap');
+      gEnter.append('g').attr('class', 'nv-distWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-controlsWrap');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        var legendWidth = (showControls) ? availableWidth / 2 : availableWidth;
+        legend.width(legendWidth);
+
+        wrap.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        wrap.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + (availableWidth - legendWidth) + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Controls
+
+      if (showControls) {
+        controls.width(180).color(['#444']);
+        g.select('.nv-controlsWrap')
+            .datum(controlsData)
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+            .call(controls);
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      scatter
+          .width(availableWidth)
+          .height(availableHeight)
+          .color(data.map(function(d,i) {
+            return d.color || color(d, i);
+          }).filter(function(d,i) { return !data[i].disabled }));
+
+      if (xPadding !== 0)
+        scatter.xDomain(null);
+
+      if (yPadding !== 0)
+        scatter.yDomain(null);
+
+      wrap.select('.nv-scatterWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+          .call(scatter);
+
+      //Adjust for x and y padding
+      if (xPadding !== 0) {
+        var xRange = x.domain()[1] - x.domain()[0];
+        scatter.xDomain([x.domain()[0] - (xPadding * xRange), x.domain()[1] + (xPadding * xRange)]);
+      }
+
+      if (yPadding !== 0) {
+        var yRange = y.domain()[1] - y.domain()[0];
+        scatter.yDomain([y.domain()[0] - (yPadding * yRange), y.domain()[1] + (yPadding * yRange)]);
+      }
+
+      //Only need to update the scatter again if x/yPadding changed the domain.
+      if (yPadding !== 0 || xPadding !== 0) {
+        wrap.select('.nv-scatterWrap')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(scatter);
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+      if (showXAxis) {
+        xAxis
+            .scale(x)
+            .ticks( xAxis.ticks() && xAxis.ticks().length ? xAxis.ticks() : availableWidth / 100 )
+            .tickSize( -availableHeight , 0);
+
+        g.select('.nv-x.nv-axis')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')')
+            .call(xAxis);
+
+      }
+
+      if (showYAxis) {
+        yAxis
+            .scale(y)
+            .ticks( yAxis.ticks() && yAxis.ticks().length ? yAxis.ticks() : availableHeight / 36 )
+            .tickSize( -availableWidth, 0);
+
+        g.select('.nv-y.nv-axis')
+            .call(yAxis);
+      }
+
+
+      if (showDistX) {
+        distX
+            .getData(scatter.x())
+            .scale(x)
+            .width(availableWidth)
+            .color(data.map(function(d,i) {
+              return d.color || color(d, i);
+            }).filter(function(d,i) { return !data[i].disabled }));
+        gEnter.select('.nv-distWrap').append('g')
+            .attr('class', 'nv-distributionX');
+        g.select('.nv-distributionX')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distX);
+      }
+
+      if (showDistY) {
+        distY
+            .getData(scatter.y())
+            .scale(y)
+            .width(availableHeight)
+            .color(data.map(function(d,i) {
+              return d.color || color(d, i);
+            }).filter(function(d,i) { return !data[i].disabled }));
+        gEnter.select('.nv-distWrap').append('g')
+            .attr('class', 'nv-distributionY');
+        g.select('.nv-distributionY')
+            .attr('transform', 
+              'translate(' + (rightAlignYAxis ? availableWidth : -distY.size() ) + ',0)')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distY);
+      }
+
+      //------------------------------------------------------------
+
+
+
+
+      if (d3.fisheye) {
+        g.select('.nv-background')
+            .attr('width', availableWidth)
+            .attr('height', availableHeight);
+
+        g.select('.nv-background').on('mousemove', updateFisheye);
+        g.select('.nv-background').on('click', function() { pauseFisheye = !pauseFisheye;});
+        scatter.dispatch.on('elementClick.freezeFisheye', function() {
+          pauseFisheye = !pauseFisheye;
+        });
+      }
+
+
+      function updateFisheye() {
+        if (pauseFisheye) {
+          g.select('.nv-point-paths').style('pointer-events', 'all');
+          return false;
+        }
+
+        g.select('.nv-point-paths').style('pointer-events', 'none' );
+
+        var mouse = d3.mouse(this);
+        x.distortion(fisheye).focus(mouse[0]);
+        y.distortion(fisheye).focus(mouse[1]);
+
+        g.select('.nv-scatterWrap')
+            .call(scatter);
+
+        if (showXAxis)
+          g.select('.nv-x.nv-axis').call(xAxis);
+        
+        if (showYAxis)
+          g.select('.nv-y.nv-axis').call(yAxis);
+        
+        g.select('.nv-distributionX')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distX);
+        g.select('.nv-distributionY')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distY);
+      }
+
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      controls.dispatch.on('legendClick', function(d,i) {
+        d.disabled = !d.disabled;
+
+        fisheye = d.disabled ? 0 : 2.5;
+        g.select('.nv-background') .style('pointer-events', d.disabled ? 'none' : 'all');
+        g.select('.nv-point-paths').style('pointer-events', d.disabled ? 'all' : 'none' );
+
+        if (d.disabled) {
+          x.distortion(fisheye).focus(0);
+          y.distortion(fisheye).focus(0);
+
+          g.select('.nv-scatterWrap').call(scatter);
+          g.select('.nv-x.nv-axis').call(xAxis);
+          g.select('.nv-y.nv-axis').call(yAxis);
+        } else {
+          pauseFisheye = false;
+        }
+
+        chart.update();
+      });
+
+      legend.dispatch.on('stateChange', function(newState) {
+        state.disabled = newState.disabled;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      scatter.dispatch.on('elementMouseover.tooltip', function(e) {
+        d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-distx-' + e.pointIndex)
+            .attr('y1', function(d,i) { return e.pos[1] - availableHeight;});
+        d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-disty-' + e.pointIndex)
+            .attr('x2', e.pos[0] + distX.size());
+
+        e.pos = [e.pos[0] + margin.left, e.pos[1] + margin.top];
+        dispatch.tooltipShow(e);
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  scatter.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+
+    d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-distx-' + e.pointIndex)
+        .attr('y1', 0);
+    d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-disty-' + e.pointIndex)
+        .attr('x2', distY.size());
+  });
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.scatter = scatter;
+  chart.legend = legend;
+  chart.controls = controls;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+  chart.distX = distX;
+  chart.distY = distY;
+
+  d3.rebind(chart, scatter, 'id', 'interactive', 'pointActive', 'x', 'y', 'shape', 'size', 'xScale', 'yScale', 'zScale', 'xDomain', 'yDomain', 'xRange', 'yRange', 'sizeDomain', 'sizeRange', 'forceX', 'forceY', 'forceSize', 'clipVoronoi', 'clipRadius', 'useVoronoi');
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    distX.color(color);
+    distY.color(color);
+    return chart;
+  };
+
+  chart.showDistX = function(_) {
+    if (!arguments.length) return showDistX;
+    showDistX = _;
+    return chart;
+  };
+
+  chart.showDistY = function(_) {
+    if (!arguments.length) return showDistY;
+    showDistY = _;
+    return chart;
+  };
+
+  chart.showControls = function(_) {
+    if (!arguments.length) return showControls;
+    showControls = _;
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+
+  chart.fisheye = function(_) {
+    if (!arguments.length) return fisheye;
+    fisheye = _;
+    return chart;
+  };
+
+  chart.xPadding = function(_) {
+    if (!arguments.length) return xPadding;
+    xPadding = _;
+    return chart;
+  };
+
+  chart.yPadding = function(_) {
+    if (!arguments.length) return yPadding;
+    yPadding = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.tooltipXContent = function(_) {
+    if (!arguments.length) return tooltipX;
+    tooltipX = _;
+    return chart;
+  };
+
+  chart.tooltipYContent = function(_) {
+    if (!arguments.length) return tooltipY;
+    tooltipY = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+  
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.scatterPlusLineChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var scatter      = nv.models.scatter()
+    , xAxis        = nv.models.axis()
+    , yAxis        = nv.models.axis()
+    , legend       = nv.models.legend()
+    , controls     = nv.models.legend()
+    , distX        = nv.models.distribution()
+    , distY        = nv.models.distribution()
+    ;
+
+  var margin       = {top: 30, right: 20, bottom: 50, left: 75}
+    , width        = null
+    , height       = null
+    , color        = nv.utils.defaultColor()
+    , x            = d3.fisheye ? d3.fisheye.scale(d3.scale.linear).distortion(0) : scatter.xScale()
+    , y            = d3.fisheye ? d3.fisheye.scale(d3.scale.linear).distortion(0) : scatter.yScale()
+    , showDistX    = false
+    , showDistY    = false
+    , showLegend   = true
+    , showXAxis    = true
+    , showYAxis    = true
+    , rightAlignYAxis = false
+    , showControls = !!d3.fisheye
+    , fisheye      = 0
+    , pauseFisheye = false
+    , tooltips     = true
+    , tooltipX     = function(key, x, y) { return '<strong>' + x + '</strong>' }
+    , tooltipY     = function(key, x, y) { return '<strong>' + y + '</strong>' }
+    , tooltip      = function(key, x, y, date) { return '<h3>' + key + '</h3>' 
+                                                      + '<p>' + date + '</p>' }
+    , state = {}
+    , defaultState = null
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , noData       = "No Data Available."
+    , transitionDuration = 0
+    ;
+
+  scatter
+    .xScale(x)
+    .yScale(y)
+    ;
+  xAxis
+    .orient('bottom')
+    .tickPadding(10)
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    .tickPadding(10)
+    ;
+  distX
+    .axis('x')
+    ;
+  distY
+    .axis('y')
+    ;
+  
+  controls.updateState(false);
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var x0, y0;
+
+  var showTooltip = function(e, offsetElement) {
+    //TODO: make tooltip style an option between single or dual on axes (maybe on all charts with axes?)
+
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        leftX = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        topX = y.range()[0] + margin.top + ( offsetElement.offsetTop || 0),
+        leftY = x.range()[0] + margin.left + ( offsetElement.offsetLeft || 0 ),
+        topY = e.pos[1] + ( offsetElement.offsetTop || 0),
+        xVal = xAxis.tickFormat()(scatter.x()(e.point, e.pointIndex)),
+        yVal = yAxis.tickFormat()(scatter.y()(e.point, e.pointIndex));
+
+      if( tooltipX != null )
+          nv.tooltip.show([leftX, topX], tooltipX(e.series.key, xVal, yVal, e, chart), 'n', 1, offsetElement, 'x-nvtooltip');
+      if( tooltipY != null )
+          nv.tooltip.show([leftY, topY], tooltipY(e.series.key, xVal, yVal, e, chart), 'e', 1, offsetElement, 'y-nvtooltip');
+      if( tooltip != null )
+          nv.tooltip.show([left, top], tooltip(e.series.key, xVal, yVal, e.point.tooltip, e, chart), e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  var controlsData = [
+    { key: 'Magnify', disabled: true }
+  ];
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart); };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display noData message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = scatter.xScale();
+      y = scatter.yScale();
+
+      x0 = x0 || x;
+      y0 = y0 || y;
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-scatterChart').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-scatterChart nv-chart-' + scatter.id());
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g')
+
+      // background for pointer events
+      gEnter.append('rect').attr('class', 'nvd3 nv-background').style("pointer-events","none");
+
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-scatterWrap');
+      gEnter.append('g').attr('class', 'nv-regressionLinesWrap');
+      gEnter.append('g').attr('class', 'nv-distWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-controlsWrap');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        legend.width( availableWidth / 2 );
+
+        wrap.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        wrap.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + (availableWidth / 2) + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Controls
+
+      if (showControls) {
+        controls.width(180).color(['#444']);
+        g.select('.nv-controlsWrap')
+            .datum(controlsData)
+            .attr('transform', 'translate(0,' + (-margin.top) +')')
+            .call(controls);
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      scatter
+          .width(availableWidth)
+          .height(availableHeight)
+          .color(data.map(function(d,i) {
+            return d.color || color(d, i);
+          }).filter(function(d,i) { return !data[i].disabled }))
+
+      wrap.select('.nv-scatterWrap')
+          .datum(data.filter(function(d) { return !d.disabled }))
+          .call(scatter);
+
+      wrap.select('.nv-regressionLinesWrap')
+          .attr('clip-path', 'url(#nv-edge-clip-' + scatter.id() + ')');
+
+      var regWrap = wrap.select('.nv-regressionLinesWrap').selectAll('.nv-regLines')
+                      .data(function(d) {return d });
+      
+      regWrap.enter().append('g').attr('class', 'nv-regLines');
+
+      var regLine = regWrap.selectAll('.nv-regLine').data(function(d){return [d]});
+      var regLineEnter = regLine.enter()
+                       .append('line').attr('class', 'nv-regLine')
+                       .style('stroke-opacity', 0);
+
+      regLine
+          
+          .attr('x1', x.range()[0])
+          .attr('x2', x.range()[1])
+          .attr('y1', function(d,i) {return y(x.domain()[0] * d.slope + d.intercept) })
+          .attr('y2', function(d,i) { return y(x.domain()[1] * d.slope + d.intercept) })
+          .style('stroke', function(d,i,j) { return color(d,j) })
+          .style('stroke-opacity', function(d,i) {
+            return (d.disabled || typeof d.slope === 'undefined' || typeof d.intercept === 'undefined') ? 0 : 1 
+          });
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+        xAxis
+            .scale(x)
+            .ticks( xAxis.ticks() ? xAxis.ticks() : availableWidth / 100 )
+            .tickSize( -availableHeight , 0);
+
+        g.select('.nv-x.nv-axis')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')')
+            .call(xAxis);
+      }
+
+      if (showYAxis) {
+        yAxis
+            .scale(y)
+            .ticks( yAxis.ticks() ? yAxis.ticks() : availableHeight / 36 )
+            .tickSize( -availableWidth, 0);
+
+        g.select('.nv-y.nv-axis')
+            .call(yAxis);
+      }
+
+
+      if (showDistX) {
+        distX
+            .getData(scatter.x())
+            .scale(x)
+            .width(availableWidth)
+            .color(data.map(function(d,i) {
+              return d.color || color(d, i);
+            }).filter(function(d,i) { return !data[i].disabled }));
+        gEnter.select('.nv-distWrap').append('g')
+            .attr('class', 'nv-distributionX');
+        g.select('.nv-distributionX')
+            .attr('transform', 'translate(0,' + y.range()[0] + ')')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distX);
+      }
+
+      if (showDistY) {
+        distY
+            .getData(scatter.y())
+            .scale(y)
+            .width(availableHeight)
+            .color(data.map(function(d,i) {
+              return d.color || color(d, i);
+            }).filter(function(d,i) { return !data[i].disabled }));
+        gEnter.select('.nv-distWrap').append('g')
+            .attr('class', 'nv-distributionY');
+        g.select('.nv-distributionY')
+            .attr('transform', 'translate(' + (rightAlignYAxis ? availableWidth : -distY.size() ) + ',0)')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distY);
+      }
+
+      //------------------------------------------------------------
+
+
+
+
+      if (d3.fisheye) {
+        g.select('.nv-background')
+            .attr('width', availableWidth)
+            .attr('height', availableHeight)
+            ;
+
+        g.select('.nv-background').on('mousemove', updateFisheye);
+        g.select('.nv-background').on('click', function() { pauseFisheye = !pauseFisheye;});
+        scatter.dispatch.on('elementClick.freezeFisheye', function() {
+          pauseFisheye = !pauseFisheye;
+        });
+      }
+
+
+      function updateFisheye() {
+        if (pauseFisheye) {
+          g.select('.nv-point-paths').style('pointer-events', 'all');
+          return false;
+        }
+
+        g.select('.nv-point-paths').style('pointer-events', 'none' );
+
+        var mouse = d3.mouse(this);
+        x.distortion(fisheye).focus(mouse[0]);
+        y.distortion(fisheye).focus(mouse[1]);
+
+        g.select('.nv-scatterWrap')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(scatter);
+
+        if (showXAxis)
+          g.select('.nv-x.nv-axis').call(xAxis);
+
+        if (showYAxis)
+          g.select('.nv-y.nv-axis').call(yAxis);
+        
+        g.select('.nv-distributionX')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distX);
+        g.select('.nv-distributionY')
+            .datum(data.filter(function(d) { return !d.disabled }))
+            .call(distY);
+      }
+
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      controls.dispatch.on('legendClick', function(d,i) {
+        d.disabled = !d.disabled;
+
+        fisheye = d.disabled ? 0 : 2.5;
+        g.select('.nv-background') .style('pointer-events', d.disabled ? 'none' : 'all');
+        g.select('.nv-point-paths').style('pointer-events', d.disabled ? 'all' : 'none' );
+
+        if (d.disabled) {
+          x.distortion(fisheye).focus(0);
+          y.distortion(fisheye).focus(0);
+
+          g.select('.nv-scatterWrap').call(scatter);
+          g.select('.nv-x.nv-axis').call(xAxis);
+          g.select('.nv-y.nv-axis').call(yAxis);
+        } else {
+          pauseFisheye = false;
+        }
+
+        chart.update();
+      });
+
+      legend.dispatch.on('stateChange', function(newState) { 
+        state = newState;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+
+      scatter.dispatch.on('elementMouseover.tooltip', function(e) {
+        d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-distx-' + e.pointIndex)
+            .attr('y1', e.pos[1] - availableHeight);
+        d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-disty-' + e.pointIndex)
+            .attr('x2', e.pos[0] + distX.size());
+
+        e.pos = [e.pos[0] + margin.left, e.pos[1] + margin.top];
+        dispatch.tooltipShow(e);
+      });
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined') {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        chart.update();
+      });
+
+      //============================================================
+
+
+      //store old scales for use in transitions on update
+      x0 = x.copy();
+      y0 = y.copy();
+
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  scatter.dispatch.on('elementMouseout.tooltip', function(e) {
+    dispatch.tooltipHide(e);
+
+    d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-distx-' + e.pointIndex)
+        .attr('y1', 0);
+    d3.select('.nv-chart-' + scatter.id() + ' .nv-series-' + e.seriesIndex + ' .nv-disty-' + e.pointIndex)
+        .attr('x2', distY.size());
+  });
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.scatter = scatter;
+  chart.legend = legend;
+  chart.controls = controls;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+  chart.distX = distX;
+  chart.distY = distY;
+
+  d3.rebind(chart, scatter, 'id', 'interactive', 'pointActive', 'x', 'y', 'shape', 'size', 'xScale', 'yScale', 'zScale', 'xDomain', 'yDomain', 'xRange', 'yRange', 'sizeDomain', 'sizeRange', 'forceX', 'forceY', 'forceSize', 'clipVoronoi', 'clipRadius', 'useVoronoi');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    distX.color(color);
+    distY.color(color);
+    return chart;
+  };
+
+  chart.showDistX = function(_) {
+    if (!arguments.length) return showDistX;
+    showDistX = _;
+    return chart;
+  };
+
+  chart.showDistY = function(_) {
+    if (!arguments.length) return showDistY;
+    showDistY = _;
+    return chart;
+  };
+
+  chart.showControls = function(_) {
+    if (!arguments.length) return showControls;
+    showControls = _;
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.fisheye = function(_) {
+    if (!arguments.length) return fisheye;
+    fisheye = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.tooltipXContent = function(_) {
+    if (!arguments.length) return tooltipX;
+    tooltipX = _;
+    return chart;
+  };
+
+  chart.tooltipYContent = function(_) {
+    if (!arguments.length) return tooltipY;
+    tooltipY = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.sparkline = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 2, right: 0, bottom: 2, left: 0}
+    , width = 400
+    , height = 32
+    , animate = true
+    , x = d3.scale.linear()
+    , y = d3.scale.linear()
+    , getX = function(d) { return d.x }
+    , getY = function(d) { return d.y }
+    , color = nv.utils.getColor(['#000'])
+    , xDomain
+    , yDomain
+    , xRange
+    , yRange
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x   .domain(xDomain || d3.extent(data, getX ))
+          .range(xRange || [0, availableWidth]);
+
+      y   .domain(yDomain || d3.extent(data, getY ))
+          .range(yRange || [availableHeight, 0]);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-sparkline').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-sparkline');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')
+
+      //------------------------------------------------------------
+
+
+      var paths = wrap.selectAll('path')
+          .data(function(d) { return [d] });
+      paths.enter().append('path');
+      paths.exit().remove();
+      paths
+          .style('stroke', function(d,i) { return d.color || color(d, i) })
+          .attr('d', d3.svg.line()
+            .x(function(d,i) { return x(getX(d,i)) })
+            .y(function(d,i) { return y(getY(d,i)) })
+          );
+
+
+      // TODO: Add CURRENT data point (Need Min, Mac, Current / Most recent)
+      var points = wrap.selectAll('circle.nv-point')
+          .data(function(data) {
+              var yValues = data.map(function(d, i) { return getY(d,i); });
+              function pointIndex(index) {
+                  if (index != -1) {
+	              var result = data[index];
+                      result.pointIndex = index;
+                      return result;
+                  } else {
+                      return null;
+                  }
+              }
+              var maxPoint = pointIndex(yValues.lastIndexOf(y.domain()[1])),
+                  minPoint = pointIndex(yValues.indexOf(y.domain()[0])),
+                  currentPoint = pointIndex(yValues.length - 1);
+              return [minPoint, maxPoint, currentPoint].filter(function (d) {return d != null;});
+          });
+      points.enter().append('circle');
+      points.exit().remove();
+      points
+          .attr('cx', function(d,i) { return x(getX(d,d.pointIndex)) })
+          .attr('cy', function(d,i) { return y(getY(d,d.pointIndex)) })
+          .attr('r', 2)
+          .attr('class', function(d,i) {
+            return getX(d, d.pointIndex) == x.domain()[1] ? 'nv-point nv-currentValue' :
+                   getY(d, d.pointIndex) == y.domain()[0] ? 'nv-point nv-minValue' : 'nv-point nv-maxValue'
+          });
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = d3.functor(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = d3.functor(_);
+    return chart;
+  };
+
+  chart.xScale = function(_) {
+    if (!arguments.length) return x;
+    x = _;
+    return chart;
+  };
+
+  chart.yScale = function(_) {
+    if (!arguments.length) return y;
+    y = _;
+    return chart;
+  };
+
+  chart.xDomain = function(_) {
+    if (!arguments.length) return xDomain;
+    xDomain = _;
+    return chart;
+  };
+
+  chart.yDomain = function(_) {
+    if (!arguments.length) return yDomain;
+    yDomain = _;
+    return chart;
+  };
+
+  chart.xRange = function(_) {
+    if (!arguments.length) return xRange;
+    xRange = _;
+    return chart;
+  };
+
+  chart.yRange = function(_) {
+    if (!arguments.length) return yRange;
+    yRange = _;
+    return chart;
+  };
+
+  chart.animate = function(_) {
+    if (!arguments.length) return animate;
+    animate = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.sparklinePlus = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var sparkline = nv.models.sparkline();
+
+  var margin = {top: 15, right: 100, bottom: 10, left: 50}
+    , width = null
+    , height = null
+    , x
+    , y
+    , index = []
+    , paused = false
+    , xTickFormat = d3.format(',r')
+    , yTickFormat = d3.format(',.2f')
+    , showValue = true
+    , alignValue = true
+    , rightAlignValue = false
+    , noData = "No Data Available."
+    ;
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this);
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      
+
+      chart.update = function() { chart(selection) };
+      chart.container = this;
+
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      var currentValue = sparkline.y()(data[data.length-1], data.length-1);
+
+      //------------------------------------------------------------
+
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = sparkline.xScale();
+      y = sparkline.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-sparklineplus').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-sparklineplus');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-sparklineWrap');
+      gEnter.append('g').attr('class', 'nv-valueWrap');
+      gEnter.append('g').attr('class', 'nv-hoverArea');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      var sparklineWrap = g.select('.nv-sparklineWrap');
+
+      sparkline
+        .width(availableWidth)
+        .height(availableHeight);
+
+      sparklineWrap
+          .call(sparkline);
+
+      //------------------------------------------------------------
+
+
+      var valueWrap = g.select('.nv-valueWrap');
+      
+      var value = valueWrap.selectAll('.nv-currentValue')
+          .data([currentValue]);
+
+      value.enter().append('text').attr('class', 'nv-currentValue')
+          .attr('dx', rightAlignValue ? -8 : 8)
+          .attr('dy', '.9em')
+          .style('text-anchor', rightAlignValue ? 'end' : 'start');
+
+      value
+          .attr('x', availableWidth + (rightAlignValue ? margin.right : 0))
+          .attr('y', alignValue ? function(d) { return y(d) } : 0)
+          .style('fill', sparkline.color()(data[data.length-1], data.length-1))
+          .text(yTickFormat(currentValue));
+
+
+
+      gEnter.select('.nv-hoverArea').append('rect')
+          .on('mousemove', sparklineHover)
+          .on('click', function() { paused = !paused })
+          .on('mouseout', function() { index = []; updateValueLine(); });
+          //.on('mouseout', function() { index = null; updateValueLine(); });
+
+      g.select('.nv-hoverArea rect')
+          .attr('transform', function(d) { return 'translate(' + -margin.left + ',' + -margin.top + ')' })
+          .attr('width', availableWidth + margin.left + margin.right)
+          .attr('height', availableHeight + margin.top);
+
+
+
+      function updateValueLine() { //index is currently global (within the chart), may or may not keep it that way
+        if (paused) return;
+
+        var hoverValue = g.selectAll('.nv-hoverValue').data(index)
+
+        var hoverEnter = hoverValue.enter()
+          .append('g').attr('class', 'nv-hoverValue')
+            .style('stroke-opacity', 0)
+            .style('fill-opacity', 0);
+
+        hoverValue.exit()
+          
+            .style('stroke-opacity', 0)
+            .style('fill-opacity', 0)
+            .remove();
+
+        hoverValue
+            .attr('transform', function(d) { return 'translate(' + x(sparkline.x()(data[d],d)) + ',0)' })
+          
+            .style('stroke-opacity', 1)
+            .style('fill-opacity', 1);
+
+        if (!index.length) return;
+
+        hoverEnter.append('line')
+            .attr('x1', 0)
+            .attr('y1', -margin.top)
+            .attr('x2', 0)
+            .attr('y2', availableHeight);
+
+
+        hoverEnter.append('text').attr('class', 'nv-xValue')
+            .attr('x', -6)
+            .attr('y', -margin.top)
+            .attr('text-anchor', 'end')
+            .attr('dy', '.9em')
+
+
+        g.select('.nv-hoverValue .nv-xValue')
+            .text(xTickFormat(sparkline.x()(data[index[0]], index[0])));
+
+        hoverEnter.append('text').attr('class', 'nv-yValue')
+            .attr('x', 6)
+            .attr('y', -margin.top)
+            .attr('text-anchor', 'start')
+            .attr('dy', '.9em')
+
+        g.select('.nv-hoverValue .nv-yValue')
+            .text(yTickFormat(sparkline.y()(data[index[0]], index[0])));
+
+      }
+
+
+      function sparklineHover() {
+        if (paused) return;
+
+        var pos = d3.mouse(this)[0] - margin.left;
+
+        function getClosestIndex(data, x) {
+          var distance = Math.abs(sparkline.x()(data[0], 0) - x);
+          var closestIndex = 0;
+          for (var i = 0; i < data.length; i++){
+            if (Math.abs(sparkline.x()(data[i], i) - x) < distance) {
+              distance = Math.abs(sparkline.x()(data[i], i) - x);
+              closestIndex = i;
+            }
+          }
+          return closestIndex;
+        }
+
+        index = [getClosestIndex(data, Math.round(x.invert(pos)))];
+
+        updateValueLine();
+      }
+
+    });
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.sparkline = sparkline;
+
+  d3.rebind(chart, sparkline, 'x', 'y', 'xScale', 'yScale', 'color');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+  
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.xTickFormat = function(_) {
+    if (!arguments.length) return xTickFormat;
+    xTickFormat = _;
+    return chart;
+  };
+
+  chart.yTickFormat = function(_) {
+    if (!arguments.length) return yTickFormat;
+    yTickFormat = _;
+    return chart;
+  };
+
+  chart.showValue = function(_) {
+    if (!arguments.length) return showValue;
+    showValue = _;
+    return chart;
+  };
+
+  chart.alignValue = function(_) {
+    if (!arguments.length) return alignValue;
+    alignValue = _;
+    return chart;
+  };
+
+  chart.rightAlignValue = function(_) {
+    if (!arguments.length) return rightAlignValue;
+    rightAlignValue = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.stackedArea = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var margin = {top: 0, right: 0, bottom: 0, left: 0}
+    , width = 960
+    , height = 500
+    , color = nv.utils.defaultColor() // a function that computes the color
+    , id = Math.floor(Math.random() * 100000) //Create semi-unique ID incase user doesn't selet one
+    , getX = function(d) { return d.x } // accessor to get the x value from a data point
+    , getY = function(d) { return d.y } // accessor to get the y value from a data point
+    , style = 'stack'
+    , offset = 'zero'
+    , order = 'default'
+    , interpolate = 'linear'  // controls the line interpolation
+    , clipEdge = false // if true, masks lines within x and y scale
+    , x //can be accessed via chart.xScale()
+    , y //can be accessed via chart.yScale()
+    , scatter = nv.models.scatter()
+    , dispatch =  d3.dispatch('tooltipShow', 'tooltipHide', 'areaClick', 'areaMouseover', 'areaMouseout')
+    ;
+
+  scatter
+    .size(2.2) // default size
+    .sizeDomain([2.2,2.2]) // all the same size by default
+    ;
+
+  /************************************
+   * offset:
+   *   'wiggle' (stream)
+   *   'zero' (stacked)
+   *   'expand' (normalize to 100%)
+   *   'silhouette' (simple centered)
+   *
+   * order:
+   *   'inside-out' (stream)
+   *   'default' (input order)
+   ************************************/
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var availableWidth = width - margin.left - margin.right,
+          availableHeight = height - margin.top - margin.bottom,
+          container = d3.select(this);
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = scatter.xScale();
+      y = scatter.yScale();
+
+      //------------------------------------------------------------
+
+      var dataRaw = data;
+      // Injecting point index into each point because d3.layout.stack().out does not give index
+      data.forEach(function(aseries, i) {
+        aseries.seriesIndex = i;
+        aseries.values = aseries.values.map(function(d, j) {
+          d.index = j;
+          d.seriesIndex = i;
+          return d;
+        });
+      });
+
+      var dataFiltered = data.filter(function(series) {
+            return !series.disabled;
+      });
+
+      data = d3.layout.stack()
+               .order(order)
+               .offset(offset)
+               .values(function(d) { return d.values })  //TODO: make values customizeable in EVERY model in this fashion
+               .x(getX)
+               .y(getY)
+               .out(function(d, y0, y) {
+                    var yHeight = (getY(d) === 0) ? 0 : y;
+                    d.display = {
+                      y: yHeight,
+                     y0: y0
+                    };
+                })
+              (dataFiltered);
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-stackedarea').data([data]);
+      var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-stackedarea');
+      var defsEnter = wrapEnter.append('defs');
+      var gEnter = wrapEnter.append('g');
+      var g = wrap.select('g');
+
+      gEnter.append('g').attr('class', 'nv-areaWrap');
+      gEnter.append('g').attr('class', 'nv-scatterWrap');
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      //------------------------------------------------------------
+
+
+      scatter
+        .width(availableWidth)
+        .height(availableHeight)
+        .x(getX)
+        .y(function(d) { return d.display.y + d.display.y0 })
+        .forceY([0])
+        .color(data.map(function(d,i) {
+          return d.color || color(d, d.seriesIndex);
+        }));
+
+
+      var scatterWrap = g.select('.nv-scatterWrap')
+          .datum(data);
+
+      scatterWrap.call(scatter);
+
+      defsEnter.append('clipPath')
+          .attr('id', 'nv-edge-clip-' + id)
+        .append('rect');
+
+      wrap.select('#nv-edge-clip-' + id + ' rect')
+          .attr('width', availableWidth)
+          .attr('height', availableHeight);
+
+      g   .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + id + ')' : '');
+
+      var area = d3.svg.area()
+          .x(function(d,i)  { return x(getX(d,i)) })
+          .y0(function(d) {
+              return y(d.display.y0)
+          })
+          .y1(function(d) {
+              return y(d.display.y + d.display.y0)
+          })
+          .interpolate(interpolate);
+
+      var zeroArea = d3.svg.area()
+          .x(function(d,i)  { return x(getX(d,i)) })
+          .y0(function(d) { return y(d.display.y0) })
+          .y1(function(d) { return y(d.display.y0) });
+
+
+      var path = g.select('.nv-areaWrap').selectAll('path.nv-area')
+          .data(function(d) { return d });
+
+      path.enter().append('path').attr('class', function(d,i) { return 'nv-area nv-area-' + i })
+          .attr('d', function(d,i){
+            return zeroArea(d.values, d.seriesIndex);
+          })
+          .on('mouseover', function(d,i) {
+            d3.select(this).classed('hover', true);
+            dispatch.areaMouseover({
+              point: d,
+              series: d.key,
+              pos: [d3.event.pageX, d3.event.pageY],
+              seriesIndex: d.seriesIndex
+            });
+          })
+          .on('mouseout', function(d,i) {
+            d3.select(this).classed('hover', false);
+            dispatch.areaMouseout({
+              point: d,
+              series: d.key,
+              pos: [d3.event.pageX, d3.event.pageY],
+              seriesIndex: d.seriesIndex
+            });
+          })
+          .on('click', function(d,i) {
+            d3.select(this).classed('hover', false);
+            dispatch.areaClick({
+              point: d,
+              series: d.key,
+              pos: [d3.event.pageX, d3.event.pageY],
+              seriesIndex: d.seriesIndex
+            });
+          })
+
+      path.exit().remove();
+
+      path
+          .style('fill', function(d,i){
+            return d.color || color(d, d.seriesIndex)
+          })
+          .style('stroke', function(d,i){ return d.color || color(d, d.seriesIndex) });
+      path
+          .attr('d', function(d,i) {
+            return area(d.values,i)
+          });
+
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      scatter.dispatch.on('elementMouseover.area', function(e) {
+        g.select('.nv-chart-' + id + ' .nv-area-' + e.seriesIndex).classed('hover', true);
+      });
+      scatter.dispatch.on('elementMouseout.area', function(e) {
+        g.select('.nv-chart-' + id + ' .nv-area-' + e.seriesIndex).classed('hover', false);
+      });
+
+      //============================================================
+      //Special offset functions
+      chart.d3_stackedOffset_stackPercent = function(stackData) {
+          var n = stackData.length,    //How many series
+          m = stackData[0].length,     //how many points per series
+          k = 1 / n,
+           i,
+           j,
+           o,
+           y0 = [];
+
+          for (j = 0; j < m; ++j) { //Looping through all points
+            for (i = 0, o = 0; i < dataRaw.length; i++)  //looping through series'
+                o += getY(dataRaw[i].values[j])   //total value of all points at a certian point in time.
+
+            if (o) for (i = 0; i < n; i++)
+               stackData[i][j][1] /= o;
+            else
+              for (i = 0; i < n; i++)
+               stackData[i][j][1] = k;
+          }
+          for (j = 0; j < m; ++j) y0[j] = 0;
+          return y0;
+      };
+
+    });
+
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  scatter.dispatch.on('elementClick.area', function(e) {
+    dispatch.areaClick(e);
+  })
+  scatter.dispatch.on('elementMouseover.tooltip', function(e) {
+        e.pos = [e.pos[0] + margin.left, e.pos[1] + margin.top],
+        dispatch.tooltipShow(e);
+  });
+  scatter.dispatch.on('elementMouseout.tooltip', function(e) {
+        dispatch.tooltipHide(e);
+  });
+
+  //============================================================
+
+  //============================================================
+  // Global getters and setters
+  //------------------------------------------------------------
+
+  chart.dispatch = dispatch;
+  chart.scatter = scatter;
+
+  d3.rebind(chart, scatter, 'interactive', 'size', 'xScale', 'yScale', 'zScale', 'xDomain', 'yDomain', 'xRange', 'yRange',
+    'sizeDomain', 'forceX', 'forceY', 'forceSize', 'clipVoronoi', 'useVoronoi','clipRadius','highlightPoint','clearHighlights');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.x = function(_) {
+    if (!arguments.length) return getX;
+    getX = d3.functor(_);
+    return chart;
+  };
+
+  chart.y = function(_) {
+    if (!arguments.length) return getY;
+    getY = d3.functor(_);
+    return chart;
+  }
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.clipEdge = function(_) {
+    if (!arguments.length) return clipEdge;
+    clipEdge = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    return chart;
+  };
+
+  chart.offset = function(_) {
+    if (!arguments.length) return offset;
+    offset = _;
+    return chart;
+  };
+
+  chart.order = function(_) {
+    if (!arguments.length) return order;
+    order = _;
+    return chart;
+  };
+
+  //shortcut for offset + order
+  chart.style = function(_) {
+    if (!arguments.length) return style;
+    style = _;
+
+    switch (style) {
+      case 'stack':
+        chart.offset('zero');
+        chart.order('default');
+        break;
+      case 'stream':
+        chart.offset('wiggle');
+        chart.order('inside-out');
+        break;
+      case 'stream-center':
+          chart.offset('silhouette');
+          chart.order('inside-out');
+          break;
+      case 'expand':
+        chart.offset('expand');
+        chart.order('default');
+        break;
+      case 'stack_percent':
+        chart.offset(chart.d3_stackedOffset_stackPercent);
+        chart.order('default');
+        break;
+    }
+
+    return chart;
+  };
+
+  chart.interpolate = function(_) {
+	    if (!arguments.length) return interpolate;
+	    interpolate = _;
+	    return chart;
+  };
+  //============================================================
+
+
+  return chart;
+}
+
+nv.models.stackedAreaChart = function() {
+  "use strict";
+  //============================================================
+  // Public Variables with Default Settings
+  //------------------------------------------------------------
+
+  var stacked = nv.models.stackedArea()
+    , xAxis = nv.models.axis()
+    , yAxis = nv.models.axis()
+    , legend = nv.models.legend()
+    , controls = nv.models.legend()
+    , interactiveLayer = nv.interactiveGuideline()
+    ;
+
+  var margin = {top: 30, right: 25, bottom: 50, left: 60}
+    , width = null
+    , height = null
+    , color = nv.utils.defaultColor() // a function that takes in d, i and returns color
+    , showControls = true
+    , showLegend = true
+    , showXAxis = true
+    , showYAxis = true
+    , rightAlignYAxis = false
+    , useInteractiveGuideline = false
+    , tooltips = true
+    , tooltip = function(key, x, y, e, graph) {
+        return '<h3>' + key + '</h3>' +
+               '<p>' +  y + ' on ' + x + '</p>'
+      }
+    , x //can be accessed via chart.xScale()
+    , y //can be accessed via chart.yScale()
+    , yAxisTickFormat = d3.format(',.2f')
+    , state = { style: stacked.style() }
+    , defaultState = null
+    , noData = 'No Data Available.'
+    , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState')
+    , controlWidth = 250
+    , cData = ['Stacked','Stream','Expanded']
+    , controlLabels = {}
+    , transitionDuration = 0
+    ;
+
+  xAxis
+    .orient('bottom')
+    .tickPadding(7)
+    ;
+  yAxis
+    .orient((rightAlignYAxis) ? 'right' : 'left')
+    ;
+
+  controls.updateState(false);
+  //============================================================
+
+
+  //============================================================
+  // Private Variables
+  //------------------------------------------------------------
+
+  var showTooltip = function(e, offsetElement) {
+    var left = e.pos[0] + ( offsetElement.offsetLeft || 0 ),
+        top = e.pos[1] + ( offsetElement.offsetTop || 0),
+        x = xAxis.tickFormat()(stacked.x()(e.point, e.pointIndex)),
+        y = yAxis.tickFormat()(stacked.y()(e.point, e.pointIndex)),
+        content = tooltip(e.series.key, x, y, e, chart);
+
+    nv.tooltip.show([left, top], content, e.value < 0 ? 'n' : 's', null, offsetElement);
+  };
+
+  //============================================================
+
+
+  function chart(selection) {
+    selection.each(function(data) {
+      var container = d3.select(this),
+          that = this;
+
+      var availableWidth = (width  || parseInt(container.style('width')) || 960)
+                             - margin.left - margin.right,
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+
+      chart.update = function() { container.call(chart); };
+      chart.container = this;
+
+      //set state.disabled
+      state.disabled = data.map(function(d) { return !!d.disabled });
+
+      if (!defaultState) {
+        var key;
+        defaultState = {};
+        for (key in state) {
+          if (state[key] instanceof Array)
+            defaultState[key] = state[key].slice(0);
+          else
+            defaultState[key] = state[key];
+        }
+      }
+
+      //------------------------------------------------------------
+      // Display No Data message if there's nothing to show.
+
+      if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {
+        var noDataText = container.selectAll('.nv-noData').data([noData]);
+
+        noDataText.enter().append('text')
+          .attr('class', 'nvd3 nv-noData')
+          .attr('dy', '-.7em')
+          .style('text-anchor', 'middle');
+
+        noDataText
+          .attr('x', margin.left + availableWidth / 2)
+          .attr('y', margin.top + availableHeight / 2)
+          .text(function(d) { return d });
+
+        return chart;
+      } else {
+        container.selectAll('.nv-noData').remove();
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Scales
+
+      x = stacked.xScale();
+      y = stacked.yScale();
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup containers and skeleton of chart
+
+      var wrap = container.selectAll('g.nv-wrap.nv-stackedAreaChart').data([data]);
+      var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-stackedAreaChart').append('g');
+      var g = wrap.select('g');
+
+      gEnter.append("rect").style("opacity",0);
+      gEnter.append('g').attr('class', 'nv-x nv-axis');
+      gEnter.append('g').attr('class', 'nv-y nv-axis');
+      gEnter.append('g').attr('class', 'nv-stackedWrap');
+      gEnter.append('g').attr('class', 'nv-legendWrap');
+      gEnter.append('g').attr('class', 'nv-controlsWrap');
+      gEnter.append('g').attr('class', 'nv-interactive');
+
+      g.select("rect").attr("width",availableWidth).attr("height",availableHeight);
+      //------------------------------------------------------------
+      // Legend
+
+      if (showLegend) {
+        var legendWidth = (showControls) ? availableWidth - controlWidth : availableWidth;
+        legend
+          .width(legendWidth);
+
+        g.select('.nv-legendWrap')
+            .datum(data)
+            .call(legend);
+
+        if ( margin.top != legend.height()) {
+          margin.top = legend.height();
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+        g.select('.nv-legendWrap')
+            .attr('transform', 'translate(' + (availableWidth-legendWidth) + ',' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Controls
+
+      if (showControls) {
+        var controlsData = [
+          {
+            key: controlLabels.stacked || 'Stacked',
+            metaKey: 'Stacked',
+            disabled: stacked.style() != 'stack',
+            style: 'stack'
+          },
+          {
+            key: controlLabels.stream || 'Stream',
+            metaKey: 'Stream',
+            disabled: stacked.style() != 'stream',
+            style: 'stream'
+          },
+          {
+            key: controlLabels.expanded || 'Expanded',
+            metaKey: 'Expanded',
+            disabled: stacked.style() != 'expand',
+            style: 'expand'
+          },
+          {
+            key: controlLabels.stack_percent || 'Stack %',
+            metaKey: 'Stack_Percent',
+            disabled: stacked.style() != 'stack_percent',
+            style: 'stack_percent'
+          }
+        ];
+
+        controlWidth = (cData.length/3) * 260;
+
+        controlsData = controlsData.filter(function(d) {
+          return cData.indexOf(d.metaKey) !== -1;
+        })
+
+        controls
+          .width( controlWidth )
+          .color(['#444', '#444', '#444']);
+
+        g.select('.nv-controlsWrap')
+            .datum(controlsData)
+            .call(controls);
+
+
+        if ( margin.top != Math.max(controls.height(), legend.height()) ) {
+          margin.top = Math.max(controls.height(), legend.height());
+          availableHeight = (height || parseInt(container.style('height')) || 400)
+                             - margin.top - margin.bottom;
+        }
+
+
+        g.select('.nv-controlsWrap')
+            .attr('transform', 'translate(0,' + (-margin.top) +')');
+      }
+
+      //------------------------------------------------------------
+
+
+      wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');
+
+      if (rightAlignYAxis) {
+          g.select(".nv-y.nv-axis")
+              .attr("transform", "translate(" + availableWidth + ",0)");
+      }
+
+      //------------------------------------------------------------
+      // Main Chart Component(s)
+
+      //------------------------------------------------------------
+      //Set up interactive layer
+      if (useInteractiveGuideline) {
+        interactiveLayer
+           .width(availableWidth)
+           .height(availableHeight)
+           .margin({left: margin.left, top: margin.top})
+           .svgContainer(container)
+           .xScale(x);
+        wrap.select(".nv-interactive").call(interactiveLayer);
+      }
+
+      stacked
+        .width(availableWidth)
+        .height(availableHeight)
+
+      var stackedWrap = g.select('.nv-stackedWrap')
+          .datum(data);
+
+      stackedWrap.call(stacked);
+
+      //------------------------------------------------------------
+
+
+      //------------------------------------------------------------
+      // Setup Axes
+
+      if (showXAxis) {
+        xAxis
+          .scale(x)
+          .ticks( availableWidth / 100 )
+          .tickSize( -availableHeight, 0);
+
+        g.select('.nv-x.nv-axis')
+            .attr('transform', 'translate(0,' + availableHeight + ')');
+
+        g.select('.nv-x.nv-axis')
+          
+            .call(xAxis);
+      }
+
+      if (showYAxis) {
+        yAxis
+          .scale(y)
+          .ticks(stacked.offset() == 'wiggle' ? 0 : availableHeight / 36)
+          .tickSize(-availableWidth, 0)
+          .setTickFormat( (stacked.style() == 'expand' || stacked.style() == 'stack_percent')
+                ? d3.format('%') : yAxisTickFormat);
+
+        g.select('.nv-y.nv-axis')
+          
+            .call(yAxis);
+      }
+
+      //------------------------------------------------------------
+
+
+      //============================================================
+      // Event Handling/Dispatching (in chart's scope)
+      //------------------------------------------------------------
+
+      stacked.dispatch.on('areaClick.toggle', function(e) {
+        if (data.filter(function(d) { return !d.disabled }).length === 1)
+          data.forEach(function(d) {
+            d.disabled = false;
+          });
+        else
+          data.forEach(function(d,i) {
+            d.disabled = (i != e.seriesIndex);
+          });
+
+        state.disabled = data.map(function(d) { return !!d.disabled });
+        dispatch.stateChange(state);
+
+        chart.update();
+      });
+
+      legend.dispatch.on('stateChange', function(newState) {
+        state.disabled = newState.disabled;
+        dispatch.stateChange(state);
+        chart.update();
+      });
+
+      controls.dispatch.on('legendClick', function(d,i) {
+        if (!d.disabled) return;
+
+        controlsData = controlsData.map(function(s) {
+          s.disabled = true;
+          return s;
+        });
+        d.disabled = false;
+
+        stacked.style(d.style);
+
+
+        state.style = stacked.style();
+        dispatch.stateChange(state);
+
+        chart.update();
+      });
+
+
+      interactiveLayer.dispatch.on('elementMousemove', function(e) {
+          stacked.clearHighlights();
+          var singlePoint, pointIndex, pointXLocation, allData = [];
+          data
+          .filter(function(series, i) {
+            series.seriesIndex = i;
+            return !series.disabled;
+          })
+          .forEach(function(series,i) {
+              pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());
+              stacked.highlightPoint(i, pointIndex, true);
+              var point = series.values[pointIndex];
+              if (typeof point === 'undefined') return;
+              if (typeof singlePoint === 'undefined') singlePoint = point;
+              if (typeof pointXLocation === 'undefined') pointXLocation = chart.xScale()(chart.x()(point,pointIndex));
+
+              //If we are in 'expand' mode, use the stacked percent value instead of raw value.
+              var tooltipValue = (stacked.style() == 'expand') ? point.display.y : chart.y()(point,pointIndex);
+              allData.push({
+                  key: series.key,
+                  value: tooltipValue,
+                  color: color(series,series.seriesIndex),
+                  stackedValue: point.display
+              });
+          });
+
+          allData.reverse();
+
+          //Highlight the tooltip entry based on which stack the mouse is closest to.
+          if (allData.length > 2) {
+            var yValue = chart.yScale().invert(e.mouseY);
+            var yDistMax = Infinity, indexToHighlight = null;
+            allData.forEach(function(series,i) {
+
+               //To handle situation where the stacked area chart is negative, we need to use absolute values
+               //when checking if the mouse Y value is within the stack area.
+               yValue = Math.abs(yValue);
+               var stackedY0 = Math.abs(series.stackedValue.y0);
+               var stackedY = Math.abs(series.stackedValue.y);
+               if ( yValue >= stackedY0 && yValue <= (stackedY + stackedY0))
+               {
+                  indexToHighlight = i;
+                  return;
+               }
+            });
+            if (indexToHighlight != null)
+               allData[indexToHighlight].highlight = true;
+          }
+
+          var xValue = xAxis.tickFormat()(chart.x()(singlePoint,pointIndex));
+
+          //If we are in 'expand' mode, force the format to be a percentage.
+          var valueFormatter = (stacked.style() == 'expand') ?
+               function(d,i) {return d3.format(".1%")(d);} :
+               function(d,i) {return yAxis.tickFormat()(d); };
+          interactiveLayer.tooltip
+                  .position({left: pointXLocation + margin.left, top: e.mouseY + margin.top})
+                  .chartContainer(that.parentNode)
+                  .enabled(tooltips)
+                  .valueFormatter(valueFormatter)
+                  .data(
+                      {
+                        value: xValue,
+                        series: allData
+                      }
+                  )();
+
+          interactiveLayer.renderGuideLine(pointXLocation);
+
+      });
+
+      interactiveLayer.dispatch.on("elementMouseout",function(e) {
+          dispatch.tooltipHide();
+          stacked.clearHighlights();
+      });
+
+
+      dispatch.on('tooltipShow', function(e) {
+        if (tooltips) showTooltip(e, that.parentNode);
+      });
+
+      // Update chart from a state object passed to event handler
+      dispatch.on('changeState', function(e) {
+
+        if (typeof e.disabled !== 'undefined' && data.length === e.disabled.length) {
+          data.forEach(function(series,i) {
+            series.disabled = e.disabled[i];
+          });
+
+          state.disabled = e.disabled;
+        }
+
+        if (typeof e.style !== 'undefined') {
+          stacked.style(e.style);
+        }
+
+        chart.update();
+      });
+
+    });
+
+
+    return chart;
+  }
+
+
+  //============================================================
+  // Event Handling/Dispatching (out of chart's scope)
+  //------------------------------------------------------------
+
+  stacked.dispatch.on('tooltipShow', function(e) {
+    //disable tooltips when value ~= 0
+    //// TODO: consider removing points from voronoi that have 0 value instead of this hack
+    /*
+    if (!Math.round(stacked.y()(e.point) * 100)) {  // 100 will not be good for very small numbers... will have to think about making this valu dynamic, based on data range
+      setTimeout(function() { d3.selectAll('.point.hover').classed('hover', false) }, 0);
+      return false;
+    }
+   */
+
+    e.pos = [e.pos[0] + margin.left, e.pos[1] + margin.top],
+    dispatch.tooltipShow(e);
+  });
+
+  stacked.dispatch.on('tooltipHide', function(e) {
+    dispatch.tooltipHide(e);
+  });
+
+  dispatch.on('tooltipHide', function() {
+    if (tooltips) nv.tooltip.cleanup();
+  });
+
+  //============================================================
+
+
+  //============================================================
+  // Expose Public Variables
+  //------------------------------------------------------------
+
+  // expose chart's sub-components
+  chart.dispatch = dispatch;
+  chart.stacked = stacked;
+  chart.legend = legend;
+  chart.controls = controls;
+  chart.xAxis = xAxis;
+  chart.yAxis = yAxis;
+  chart.interactiveLayer = interactiveLayer;
+
+  d3.rebind(chart, stacked, 'x', 'y', 'size', 'xScale', 'yScale', 'xDomain', 'yDomain', 'xRange', 'yRange', 'sizeDomain', 'interactive', 'useVoronoi', 'offset', 'order', 'style', 'clipEdge', 'forceX', 'forceY', 'forceSize', 'interpolate');
+
+  chart.options = nv.utils.optionsFunc.bind(chart);
+
+  chart.margin = function(_) {
+    if (!arguments.length) return margin;
+    margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;
+    margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;
+    margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;
+    margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;
+    return chart;
+  };
+
+  chart.width = function(_) {
+    if (!arguments.length) return width;
+    width = _;
+    return chart;
+  };
+
+  chart.height = function(_) {
+    if (!arguments.length) return height;
+    height = _;
+    return chart;
+  };
+
+  chart.color = function(_) {
+    if (!arguments.length) return color;
+    color = nv.utils.getColor(_);
+    legend.color(color);
+    stacked.color(color);
+    return chart;
+  };
+
+  chart.showControls = function(_) {
+    if (!arguments.length) return showControls;
+    showControls = _;
+    return chart;
+  };
+
+  chart.showLegend = function(_) {
+    if (!arguments.length) return showLegend;
+    showLegend = _;
+    return chart;
+  };
+
+  chart.showXAxis = function(_) {
+    if (!arguments.length) return showXAxis;
+    showXAxis = _;
+    return chart;
+  };
+
+  chart.showYAxis = function(_) {
+    if (!arguments.length) return showYAxis;
+    showYAxis = _;
+    return chart;
+  };
+
+  chart.rightAlignYAxis = function(_) {
+    if(!arguments.length) return rightAlignYAxis;
+    rightAlignYAxis = _;
+    yAxis.orient( (_) ? 'right' : 'left');
+    return chart;
+  };
+
+  chart.useInteractiveGuideline = function(_) {
+    if(!arguments.length) return useInteractiveGuideline;
+    useInteractiveGuideline = _;
+    if (_ === true) {
+       chart.interactive(false);
+       chart.useVoronoi(false);
+    }
+    return chart;
+  };
+
+  chart.tooltip = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.tooltips = function(_) {
+    if (!arguments.length) return tooltips;
+    tooltips = _;
+    return chart;
+  };
+
+  chart.tooltipContent = function(_) {
+    if (!arguments.length) return tooltip;
+    tooltip = _;
+    return chart;
+  };
+
+  chart.state = function(_) {
+    if (!arguments.length) return state;
+    state = _;
+    return chart;
+  };
+
+  chart.defaultState = function(_) {
+    if (!arguments.length) return defaultState;
+    defaultState = _;
+    return chart;
+  };
+
+  chart.noData = function(_) {
+    if (!arguments.length) return noData;
+    noData = _;
+    return chart;
+  };
+
+  chart.transitionDuration = function(_) {
+    if (!arguments.length) return transitionDuration;
+    transitionDuration = _;
+    return chart;
+  };
+
+  chart.controlsData = function(_) {
+    if (!arguments.length) return cData;
+    cData = _;
+    return chart;
+  };
+
+  chart.controlLabels = function(_) {
+    if (!arguments.length) return controlLabels;
+    if (typeof _ !== 'object') return controlLabels;
+    controlLabels = _;
+    return chart;
+  };
+
+  yAxis.setTickFormat = yAxis.tickFormat;
+
+  yAxis.tickFormat = function(_) {
+    if (!arguments.length) return yAxisTickFormat;
+    yAxisTickFormat = _;
+    return yAxis;
+  };
+
+
+  //============================================================
+
+  return chart;
+}
+})();
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/pv/viewer.js b/config/plugins/visualizations/charts/static/repository/plugins/pv/viewer.js
new file mode 100644
index 0000000..f5aae5f
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/plugins/pv/viewer.js
@@ -0,0 +1,13 @@
+/**
+ * PV - WebGL protein viewer v1.9.0dev
+ * http://biasmv.github.io/pv
+ * 
+ * Copyright 2013-2015 Marco Biasini
+ * Released under the MIT license
+ */
+!function(a,b){if("function"==typeof define&&define.amd)define([],b);else if("object"==typeof exports)exports=b(),"object"==typeof module&&(module.exports=exports);else{var c=b();a.pv=c,a.io=c.io,a.mol=c.mol,a.color=c.color,a.rgb=c.rgb,a.viewpoint=c.viewpoint,a.vec3=c.vec3,a.vec4=c.vec4,a.mat3=c.mat3,a.mat4=c.mat4,a.quat=c.quat}}(this,function(){var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X;return a=function(){var a={};if(!b)var b [...]
+}},resize:function(a,b){(a!==this._k||b!==this._l)&&(this._6=!0,this._k=a,this._l=b)},fitParent:function(){var a=this._F.getBoundingClientRect();this.resize(a.width,a.height)},gl:function(){return this._b},imageData:function(){return this._c.toDataURL()},_eo:function(){try{var a={antialias:this._dF&&!this._do,preserveDrawingBuffer:!0};this._b=this._c.getContext("experimental-webgl",a)}catch(b){return!1}return this._b?!0:!1},_dj:function(a){var b=1/a,c=.5*-(1-b)*this._bh,d=.5*-(1-b)*this. [...]
+return function(l){for(var m=24,n=i.fromValues(0,0,0,1),o=0;m>o;++o){h.fromQuat(a,n);var p=h.transpose(c,a);h.mul(b,a,h.mul(d,l,p)),f.set(j,b[5],b[2],b[1]),f.set(k,Math.abs(j[0]),Math.abs(j[1]),Math.abs(j[2]));var q=k[0]>k[1]&&k[0]>k[2]?0:k[1]>k[2]?1:2,r=(q+1)%3,s=(q+2)%3;if(0===j[q])break;var t=(b[3*s+s]-b[3*r+r])/(2*j[q]),u=t>0?1:-1;t*=u;var v=t+(1e6>t?Math.sqrt(t*t+1):t),w=u/v,x=1/Math.sqrt(w*w+1);if(1===x)break;if(g.set(e,0,0,0,0),e[q]=u*Math.sqrt((1-x)/2),e[q]*=-1,e[3]=Math.sqrt(1-e [...]
+for(k=1;z>k;++k){a[0]=v[3*(k-1)],a[1]=v[3*(k-1)+1],a[2]=v[3*(k-1)+2],b[0]=v[3*(k-0)],b[1]=v[3*(k-0)+1],b[2]=v[3*(k-0)+2],c[0]=w[4*(k-1)+0],c[1]=w[4*(k-1)+1],c[2]=w[4*(k-1)+2],c[3]=w[4*(k-1)+3],d[0]=w[4*(k-0)+0],d[1]=w[4*(k-0)+1],d[2]=w[4*(k-0)+2],d[3]=w[4*(k-0)+3];var A=Math.floor((k+y)/h.splineDetail);u=q[Math.min(q.length-1,A)],g.addLine(a,c,b,d,t,u),t=u;var B=g.numVerts();f.addAssoc(i,g,m+k,B-1,B+(k===j.length-1?0:1))}return f.setPerResidueColors(i,o),h.float32Allocator.release(n),h.f [...]
+return this.add(a,e)},tube:function(a,b,c){return c=c||{},c.forceTube=!0,this.cartoon(a,b,c)},ballsAndSticks:function(a,c,d){var e=this._0(d,c);e.color=e.color||b.byElement(),e.cylRadius=e.radius||e.cylRadius||.1,e.sphereRadius=e.radius||e.sphereRadius||.2,e.arcDetail=2*(e.arcDetail||this.options("arcDetail")),e.sphereDetail=e.sphereDetail||this.options("sphereDetail"),e.scaleByAtomRadius=w(e,"scaleByAtomRadius",!0);var f=m.ballsAndSticks(c,this._c.gl(),e);return this.add(a,f)},lines:fun [...]
+h[0]=0,h[1]=0,h[2]=0,h[3]=0,h[4]=0,h[5]=0,h[6]=0,h[7]=0,h[8]=0;for(var i=0;i<d.length;++i){g.sub(a,c[i].pos(),e),g.sub(b,d[i].pos(),f);var j=a,k=b;h[0]+=j[0]*k[0],h[1]+=j[0]*k[1],h[2]+=j[0]*k[2],h[3]+=j[1]*k[0],h[4]+=j[1]*k[1],h[5]+=j[1]*k[2],h[6]+=j[2]*k[0],h[7]+=j[2]*k[1],h[8]+=j[2]*k[2]}}}(),k=function(){var a=g.create(),b=g.create(),c=g.create(),d=h.create(),e=h.create(),f=h.create(),k=h.create(),l=h.create();return function(m,n){var o=m.atoms(),p=n.atoms();if(i(p,a),i(o,b),o.length! [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/registry.json b/config/plugins/visualizations/charts/static/repository/registry.json
new file mode 100644
index 0000000..6a501f2
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/registry.json
@@ -0,0 +1,9 @@
+{
+    "nvd3"      : [ "bar", "bar_horizontal", "bar_horizontal_stacked", "bar_stacked", "line", "line_focus", "scatter", "stackedarea", "stackedarea_full", "stackedarea_stream", "pie", "histogram", "histogram_discrete" ],
+    "jqplot"    : [ "bar", "boxplot", "histogram_discrete", "line", "scatter" ],
+    "biojs"     : [ "msa", "drawrnajs" ],
+    "others"    : [ "example", "heatmap", "heatmap_cluster" ],
+    "cytoscape" : [ "basic" ],
+    "pv"        : [ "viewer" ],
+    "benfred"   : [ "venn" ]
+}
diff --git a/config/plugins/visualizations/charts/static/repository/utilities/jobs.js b/config/plugins/visualizations/charts/static/repository/utilities/jobs.js
new file mode 100644
index 0000000..e198b14
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/utilities/jobs.js
@@ -0,0 +1,101 @@
+/** This class handles job submissions to the Galaxy API. */
+define( [ 'utilities/utils' ], function( Utils ) {
+
+    /** Time to wait before refreshing to check if job has completed */
+    var WAITTIME = 1000;
+
+    /** Submit job request to charts tool */
+    var request = function( chart, parameters, success, error ) {
+        chart.state( 'wait', 'Requesting job results...' );
+         if ( chart.get( 'modified' ) ) {
+            cleanup( chart );
+            chart.set( 'modified', false );
+        }
+        var dataset_id_job = chart.get( 'dataset_id_job' );
+        if ( dataset_id_job != '' ) {
+            wait( chart, success, error );
+        } else {
+            var chart_id            = chart.id;
+            var chart_type          = chart.get( 'type' );
+            var chart_definition    = chart.definition;
+            chart.state( 'wait', 'Sending job request...' );
+            Utils.request({
+                type    : 'POST',
+                url     : Galaxy.root + 'api/tools',
+                data    : parameters,
+                success : function( response ) {
+                    if ( !response.outputs || response.outputs.length == 0 ) {
+                        chart.state( 'failed', 'Job submission failed. No response.' );
+                        error && error();
+                    } else {
+                        refreshHdas();
+                        var job = response.outputs[0];
+                        chart.state( 'wait', 'Your job has been queued. You may close the browser window. The job will run in the background.' );
+                        chart.set( 'dataset_id_job', job.id );
+                        chart.save();
+                        wait( chart, success, error );
+                    }
+                },
+                error   : function( response ) {
+                    var message = '';
+                    if ( response && response.message && response.message.data && response.message.data.input ) {
+                        message = response.message.data.input + '.';
+                    }
+                    chart.state( 'failed', 'This visualization requires the \'' + parameters.tool_id + '\' tool. Please make sure it is installed. ' + message );
+                    error && error();
+                          
+                }
+            });
+        }
+    };
+
+    /* Remove previous data when re-running jobs */
+    var cleanup = function( chart ) {
+        var self = this;
+        var previous =  chart.get( 'dataset_id_job' );
+        if (previous != '') {
+            Utils.request({
+                type    : 'PUT',
+                url     : Galaxy.root + 'api/histories/none/contents/' + previous,
+                data    : { deleted: true },
+                success : function() { refreshHdas() }
+            });
+            chart.set( 'dataset_id_job', '' );
+        }
+    };
+
+    /** Request job details */
+    var wait = function( chart, success, error ) {
+        var self = this;
+        Utils.request({
+            type    : 'GET',
+            url     : Galaxy.root + 'api/datasets/' + chart.get( 'dataset_id_job' ),
+            data    : {},
+            success : function( dataset ) {
+                var ready = false;
+                switch ( dataset.state ) {
+                    case 'ok':
+                        chart.state( 'wait', 'Job completed successfully...' );
+                        success && success( dataset );
+                        ready = true;
+                        break;
+                    case 'error':
+                        chart.state( 'failed', 'Job has failed. Please check the history for details.' );
+                        error && error( dataset );
+                        ready = true;
+                        break;
+                    case 'running':
+                        chart.state( 'wait', 'Your job is running. You may close the browser window. The job will continue in the background.' );
+                }
+                !ready && setTimeout( function() { wait( chart, success, error ) }, WAITTIME );
+            }
+        });
+    };
+
+    /** Refresh history panel */
+    var refreshHdas = function() {
+        Galaxy && Galaxy.currHistoryPanel && Galaxy.currHistoryPanel.refreshContents();
+    };
+
+    return { request: request }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/utilities/utils.js b/config/plugins/visualizations/charts/static/repository/utilities/utils.js
new file mode 100644
index 0000000..0da706b
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/utilities/utils.js
@@ -0,0 +1,123 @@
+/** Useful helper functions */
+define( [], function() {
+
+    /** Clone */
+    function clone( obj ) {
+        return JSON.parse( JSON.stringify( obj ) || null );
+    };
+
+    /**
+     * Check if a string is a json string
+     * @param{String}   text - Content to be validated
+     */
+    function isJSON(text) {
+        return /^[\],:{}\s]*$/.test(text.replace(/\\["\\\/bfnrtu]/g, '@').
+            replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
+            replace(/(?:^|:|,)(?:\s*\[)+/g, ''));
+    };
+
+    /**
+     * Request handler for GET
+     * @param{String}   url     - Url request is made to
+     * @param{Function} success - Callback on success
+     * @param{Function} error   - Callback on error
+     * @param{Boolean}  cache   - Use cached data if available
+     */
+    function get (options) {
+        top.__utils__get__ = top.__utils__get__ || {};
+        var cache_key = JSON.stringify( options );
+        if (options.cache && top.__utils__get__[cache_key]) {
+            options.success && options.success(top.__utils__get__[cache_key]);
+            window.console.debug('utils.js::get() - Fetching from cache [' + options.url + '].');
+        } else {
+            request({
+                url     : options.url,
+                data    : options.data,
+                success : function(response) {
+                    top.__utils__get__[cache_key] = response;
+                    options.success && options.success(response);
+                },
+                error : function(response) {
+                    options.error && options.error(response);
+                }
+            });
+        }
+    };
+
+    /**
+     * Request handler
+     * @param{String}   method  - Request method ['GET', 'POST', 'DELETE', 'PUT']
+     * @param{String}   url     - Url request is made to
+     * @param{Object}   data    - Data send to url
+     * @param{Function} success - Callback on success
+     * @param{Function} error   - Callback on error
+     */
+    function request (options) {
+        var ajaxConfig = {
+            contentType : 'application/json',
+            type        : options.type || 'GET',
+            data        : options.data || {},
+            url         : options.url
+        }
+        if ( ajaxConfig.type == 'GET' || ajaxConfig.type == 'DELETE' ) {
+            if ( !$.isEmptyObject(ajaxConfig.data) ) {
+                ajaxConfig.url += ajaxConfig.url.indexOf('?') == -1 ? '?' : '&';
+                ajaxConfig.url += $.param(ajaxConfig.data, true);
+            }
+            ajaxConfig.data = null;
+        } else {
+            ajaxConfig.dataType = 'json';
+            ajaxConfig.url      = ajaxConfig.url;
+            ajaxConfig.data     = JSON.stringify( ajaxConfig.data );
+        }
+        $.ajax( ajaxConfig ).done( function( response ) {
+            if ( typeof response === 'string' && isJSON( response ) ) {
+                try {
+                    response = response.replace( 'Infinity,', '"Infinity",' );
+                    response = jQuery.parseJSON( response );
+                } catch ( e ) {
+                    console.debug( e );
+                }
+            }
+            options.success && options.success( response );
+        }).fail( function( response ) {
+            var response_text = null;
+            try {
+                response_text = jQuery.parseJSON( response.responseText );
+            } catch (e) {
+                response_text = response.responseText;
+            }
+            options.error && options.error( response_text, response );
+        }).always( function() {
+            options.complete && options.complete();
+        });
+    };
+
+    /**
+     * Safely merge to dictionaries
+     * @param{Object}   options         - Target dictionary
+     * @param{Object}   optionsDefault  - Source dictionary
+     */
+    function merge (options, optionsDefault) {
+        if (options) {
+            return _.defaults(options, optionsDefault);
+        } else {
+            return optionsDefault;
+        }
+    };
+
+    /** Create a unique id */
+    function uid(){
+        top.__utils__uid__ = top.__utils__uid__ || 0;
+        return 'uid-' + top.__utils__uid__++;
+    };
+
+    return {
+        get     : get,
+        merge   : merge,
+        uid     : uid,
+        request : request,
+        clone   : clone,
+        isJSON  : isJSON
+    };
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/config.js
new file mode 100644
index 0000000..5d50948
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/config.js
@@ -0,0 +1,24 @@
+define( [], function() {
+    return {
+        title       : 'Venn Diagram',
+        library     : 'benfred',
+        tag         : 'svg',
+        datatypes   : [ 'tabular', 'csv' ],
+        keywords    : 'default venn overlap circle',
+        description : 'A javascript library for laying out area proportional venn and euler diagrams hosted at https://github.com/benfred/venn.js.',
+        exports     : [ 'png', 'svg', 'pdf' ],
+        groups      : {
+            key: {
+                label       : 'Provide a label',
+                type        : 'text',
+                placeholder : 'Data label',
+                value       : 'Data label'
+            },
+            observation : {
+                label       : 'Column with observations',
+                type        : 'data_column',
+                is_label    : true
+            }
+        }
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/logo.png
new file mode 100644
index 0000000..3d0b706
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/wrapper.js
new file mode 100644
index 0000000..eec653c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/benfred/venn/wrapper.js
@@ -0,0 +1,87 @@
+define( [ 'visualizations/utilities/tabular-datasets', 'plugins/benfred/venn', 'style!css!plugins/benfred/venn.css' ], function( Datasets, Venn ) {
+    return Backbone.View.extend({
+        _combinations: function( current, remaining, results ) {
+            var self = this;
+            _.each( remaining, function( value, index ) {
+                var new_current = current.slice();
+                var new_remaining = remaining.slice();
+                new_remaining.splice( 0, index + 1 );
+                new_current.push( value );
+                results.push( new_current );
+                self._combinations( new_current, new_remaining, results );
+            });
+        },
+
+        initialize: function( options ) {
+            var self = this;
+            var separator = '_';
+            Datasets.request({
+                dataset_id      : options.chart.get( 'dataset_id' ),
+                dataset_groups  : options.chart.groups,
+                success         : function( result ) {
+                    var group_keys   = [];
+                    var group_values = [];
+                    var all_values   = {};
+                    var set_size     = {};
+                    var group_ids    = [];
+                    _.each( result, function( group, i ) {
+                        var group_index = {};
+                        _.each( group.values, function( d ) {
+                            all_values[ d.observation ] = group_index[ d.observation ] = true;
+                        });
+                        group_keys.push( group.key );
+                        group_values.push( group_index );
+                        group_ids.push( i );
+                    });
+                    var combos = [];
+                    self._combinations( [], group_ids, combos );
+                    var sets = [];
+                    _.each( combos, function( c ) {
+                        var size = 0;
+                        for ( var value in all_values ) {
+                            var found = 0;
+                            _.each( c, function( group_id ) {
+                                if ( group_values[ group_id ][ value ] ) {
+                                    found++;
+                                }
+                            });
+                            if ( found == c.length ) {
+                                size++;
+                            }
+                        }
+                        if ( size > 0 ) {
+                            var set_labels = [];
+                            _.each( c, function( id ) {
+                                set_labels.push( group_keys[ id ]);
+                            });
+                            sets.push( { sets: set_labels, size: size } );
+                        }
+                    });
+                    var svg = d3.select( '#' + options.targets[ 0 ] ).datum( sets ).call( Venn.VennDiagram() );
+                    var tooltip = null;
+                    svg.selectAll( 'g' )
+                       .on( 'mouseover', function( d, i ) {
+                            Venn.sortAreas( svg, d );
+                            tooltip = d3.select( 'body' ).append( 'div' ).attr( 'class', 'venntooltip' );
+                            tooltip.transition().duration( 400 ).style( 'opacity', .9 );
+                            tooltip.text(d.size );
+                            var selection = d3.select( this ).transition( 'tooltip' ).duration( 400 );
+                            selection.select( 'path' )
+                                     .style( 'stroke-width', 3 )
+                                     .style( 'fill-opacity', d.sets.length == 1 ? .4 : .1 )
+                                     .style( 'stroke-opacity', 1 );
+                       })
+                       .on( 'mousemove', function() {
+                            tooltip.style( 'left', ( d3.event.pageX ) + 'px')
+                                   .style( 'top', ( d3.event.pageY - 28 ) + 'px');
+                       })
+                       .on( 'mouseout', function( d, i ) {
+                            tooltip.remove();
+                       });
+                    options.chart.state( 'ok', 'Venn diagram drawn.' );
+                    options.process.resolve();
+                }
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/config.js
new file mode 100644
index 0000000..2c1add6
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/config.js
@@ -0,0 +1,9 @@
+define( [], function() {
+    return {
+        title       : 'RNA Structure Viewer',
+        library     : 'BioJS',
+        datatypes   : [ 'dbn' ],
+        keywords    : 'biojs',
+        description : 'Renders RNA structures hosted at https://github.com/bene200/drawrnajs.'
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/logo.png
new file mode 100644
index 0000000..4c71a44
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/wrapper.js
new file mode 100644
index 0000000..f12166e
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/drawrnajs/wrapper.js
@@ -0,0 +1,27 @@
+define( [ 'plugins/biojs/drawrnajs/drawrna' ], function( DrawRNA ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            var chart    = options.chart;
+            var dataset  = options.dataset;
+            $.ajax({
+                url: dataset.download_url,
+                success: function( response ) {
+                    var input = response.split( '\n' );
+                    var app = new DrawRNA({
+                        el          : document.getElementById( options.targets[ 0 ] ),
+                        seq         : input[ 1 ],
+                        dotbr       : input[ 2 ],
+                        resindex    : false
+                    });
+                    app.render();
+                    chart.state( 'ok', 'Done.' );
+                    options.process.resolve();
+                },
+                error: function() {
+                    chart.state( 'failed', 'Could not access dataset content of \'' + dataset.name + '\'.' );
+                    options.process.resolve();
+                }
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/config.js
new file mode 100644
index 0000000..8953f85
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/config.js
@@ -0,0 +1,14 @@
+define( [], function() {
+    return {
+        title       : 'Multiple Sequence Alignment',
+        library     : 'BioJS',
+        datatypes   : [ 'txt' ],
+        keywords    : 'biojs',
+        description : 'The MSA viewer is a modular, reusable component to visualize large MSAs interactively on the web.',
+        settings    : {
+            conserv: { label :'Show conservation', help : 'Do you want to display a bar diagram indicating sequence conservation?', type : 'boolean', value: 'true' },
+            overviewbox: { label :'Show overview box', help : 'Do you want to display the overview box below the sequence alignments?', type : 'boolean', value: 'true' },
+            menu: { label :'Show interactive menu', help : 'Do you want to show a menu for interactive configuration?', type : 'boolean', value: 'true' }
+        }
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/logo.png
new file mode 100644
index 0000000..ad87f32
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/wrapper.js
new file mode 100644
index 0000000..bcda33f
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/biojs/msa/wrapper.js
@@ -0,0 +1,21 @@
+define( [ 'plugins/biojs/biojs.msa' ], function() {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            var chart    = options.chart;
+            var dataset  = options.dataset;
+            var settings = options.chart.settings;
+            var m = new msa.msa({
+                el: $( '#'  + options.targets[ 0 ] ),
+                vis: {  conserv: settings.get( 'conserv' ) == 'true',
+                        overviewbox: settings.get( 'overviewbox' ) == 'true' },
+                menu: 'small',
+                bootstrapMenu: settings.get( 'menu' ) == 'true'
+            });
+            m.u.file.importURL( dataset.download_url, function() {
+                m.render();
+                chart.state( 'ok', 'Chart drawn.' );
+                options.process.resolve();
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/config.js
new file mode 100644
index 0000000..8ea82d8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/config.js
@@ -0,0 +1,9 @@
+define( [], function() {
+    return {
+        title       : 'Cytoscape (Default)',
+        library     : 'Cytoscape',
+        datatypes   : [ 'json' ],
+        keywords    : 'cytoscape graph nodes edges',
+        description : 'Basic Cytoscape viewer for connected graphs hosted at http://js.cytoscape.org.'
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/logo.png
new file mode 100644
index 0000000..e3c138a
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/wrapper.js
new file mode 100644
index 0000000..a9e0ee7
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/cytoscape/basic/wrapper.js
@@ -0,0 +1,56 @@
+define( [ 'utilities/utils', 'plugins/cytoscape/cytoscape' ], function( Utils, Cytoscape ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            var chart    = options.chart;
+            var dataset  = options.dataset;
+            Utils.get( {
+                url     : dataset.download_url,
+                success : function( content ) {
+                    try {
+                        Cytoscape( Utils.merge( content, {
+                            container: $( '#'  + options.targets[ 0 ] ),
+                            layout: {
+                                name: 'cose',
+                                idealEdgeLength: 100,
+                                nodeOverlap: 20
+                            },
+                            style: Cytoscape.stylesheet()
+                                            .selector('node')
+                                            .css({
+                                                'content': 'data(name)',
+                                                'text-valign': 'center',
+                                                'color': 'white',
+                                                'text-outline-width': 2,
+                                                'text-outline-color': '#888'
+                                            })
+                                            .selector('edge')
+                                            .css({
+                                                'target-arrow-shape': 'triangle'
+                                            })
+                                            .selector(':selected')
+                                            .css({
+                                                'background-color': 'black',
+                                                'line-color': 'black',
+                                                'target-arrow-color': 'black',
+                                                'source-arrow-color': 'black'
+                                            })
+                                            .selector('.faded')
+                                            .css({
+                                                'opacity': 0.25,
+                                                'text-opacity': 0
+                                            })
+                        } ) );
+                        chart.state( 'ok', 'Chart drawn.' );
+                    } catch( err ) {
+                        chart.state( 'failed', err );
+                    }
+                    options.process.resolve();
+                },
+                error: function() {
+                    chart.state( 'failed', 'Failed to access dataset.' );
+                    options.process.resolve();
+                }
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/config.js
new file mode 100644
index 0000000..c3bc9ed
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/config.js
@@ -0,0 +1,19 @@
+define( [ 'visualizations/jqplot/common/config' ], function( plot_config ) {
+    return $.extend( true, {}, plot_config, {
+        title       : 'Bar diagram',
+        description : 'Renders a bar diagram using jqPlot hosted at http://www.jqplot.com.',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/logo.png
new file mode 100644
index 0000000..ff048af
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/wrapper.js
new file mode 100644
index 0000000..c5125b8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/bar/wrapper.js
@@ -0,0 +1,22 @@
+define( [ 'visualizations/jqplot/common/wrapper' ], function( Plot ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.makeConfig = function( groups, plot_config ){
+                $.extend( true, plot_config, {
+                    seriesDefaults: {
+                        renderer : $.jqplot.BarRenderer
+                    },
+                    axes: {
+                        xaxis: {
+                            min  : -1
+                        },
+                        yaxis: {
+                            pad  : 1.2
+                        }
+                    }
+                });
+            };
+            new Plot( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/config.js
new file mode 100644
index 0000000..4bb7390
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/config.js
@@ -0,0 +1,16 @@
+define( [ 'visualizations/jqplot/common/config' ], function( plot_config ) {
+    return $.extend( true, {}, plot_config, {
+        title       : 'Box plot',
+        library     : 'jqPlot',
+        description : 'Processes tabular data using R and renders a box plot using jqPlot hosted at http://www.jqplot.com.',
+        tag         : 'div',
+        keywords    : 'jqplot default',
+        groups      : {
+            y : {
+                label       : 'Observations',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/logo.png
new file mode 100644
index 0000000..e679f1d
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/wrapper.js
new file mode 100644
index 0000000..ff86a37
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/boxplot/wrapper.js
@@ -0,0 +1,102 @@
+define( [ 'visualizations/jqplot/common/wrapper', 'utilities/jobs', 'visualizations/utilities/tabular-utilities' ], function( Plot, Jobs, Utilities ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            Jobs.request( options.chart, Utilities.buildJobDictionary( options.chart, 'boxplot' ), function( dataset ) {
+                var chart = options.chart;
+                var dataset_groups = new Backbone.Collection();
+                chart.groups.each( function( group, index ) {
+                    dataset_groups.add({
+                        __data_columns: { x: { is_numeric: true } },
+                        x     : index,
+                        key   : group.get( 'key' )
+                    });
+                });
+                var plot = new Plot( {
+                    process             : options.process,
+                    chart               : options.chart,
+                    dataset_id          : dataset.id,
+                    dataset_groups      : dataset_groups,
+                    targets             : options.targets,
+                    makeConfig          : function( groups, plot_config ){
+                        var boundary = Utilities.getDomains( groups, 'x' );
+                        $.extend( true, plot_config, {
+                            seriesDefaults: {
+                                renderer: $.jqplot.OHLCRenderer,
+                                rendererOptions : {
+                                    candleStick     : true,
+                                    fillUpBody      : true,
+                                    fillDownBody    : true
+                                }
+                            },
+                            axes : {
+                                xaxis: {
+                                    min: -1,
+                                    max: groups.length + 0.01
+                                },
+                                yaxis: {
+                                    min: boundary.x.min,
+                                    max: boundary.x.max
+                                }
+                            }
+                        });
+                    },
+                    makeCategories: function( groups ) {
+                        var x_labels = [];
+                        for ( var group_index in groups ) {
+                            x_labels.push( groups[ group_index ].key );
+                        }
+                        Utilities.mapCategories ( groups, x_labels );
+                        return {
+                            array: {
+                                x : x_labels
+                            }
+                        }
+                    },
+                    makeSeriesLabels : function ( groups, plot_config ) {
+                        return [ { label : 'Boxplot values' } ];
+                    },
+                    makeSeries: function ( groups ) {
+                        /*/ example data
+                        var catOHLC = [
+                            [0, 138.7, 139.68, 135.18, 135.4],
+                            [1, 143.46, 144.66, 139.79, 140.02],
+                            [2, 140.67, 143.56, 132.88, 142.44],
+                            [3, 136.01, 139.5, 134.53, 139.48]
+                        ];
+                        return [catOHLC];*/
+                        
+                        // plot data
+                        var plot_data = [];
+                        
+                        // check group length
+                        if ( groups.length == 0 || groups[0].values.length < 5 ) {
+                            chart.state( 'failed', 'Boxplot data could not be found.' );
+                            return [ plot_data ];
+                        }
+                        
+                        // loop through data groups
+                        var indeces = [ 2, 4, 0, 1 ];
+                        for ( var group_index in groups ) {
+                            var group = groups[ group_index ];
+                            var point = [];
+                            point.push( parseInt( group_index ) );
+                            for ( var key in indeces ) {
+                                point.push( group.values[ indeces[ key ] ].x );
+                            }
+                            plot_data.push( point );
+                        }
+                        
+                        // HACK: the boxplot renderer has an issue with single elements
+                        var point = [];
+                        point[ 0 ] = plot_data.length;
+                        for ( var key in indeces ) {
+                            point.push( 0 );
+                        }
+                        plot_data.push ( point );
+                        return [ plot_data ];
+                    }
+                });
+            }, function() { options.process.reject() } );
+        }
+    });
+});
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/config.js
new file mode 100644
index 0000000..2ea828a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/config.js
@@ -0,0 +1,22 @@
+define( [ 'visualizations/utilities/tabular-form' ], function( default_config ) {
+    return $.extend(true, {}, default_config, {
+        title       : '',
+        library     : 'jqPlot',
+        tag         : 'div',
+        zoomable    : true,
+        keywords    : 'jqplot',
+        exports     : [ 'png' ],
+        settings    : {
+            x_axis_grid : {
+                label       : 'Axis grid',
+                help        : 'Would you like to show grid lines for the X axis?',
+                type        : 'boolean'
+            },
+            y_axis_grid : {
+                label       : 'Axis grid',
+                help        : 'Would you like to show grid lines for the Y axis?',
+                type        : 'boolean'
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/plot-config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/plot-config.js
new file mode 100644
index 0000000..901c7c3
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/plot-config.js
@@ -0,0 +1,110 @@
+define( [], function() {
+    return function( chart ) {
+
+        // get chart settings
+        var settings = chart.settings;
+        var plot_config = {
+            enablePlugins: true,
+            seriesColors: d3.scale.category20().range(),
+            seriesDefaults: {
+                renderer                : $.jqplot.LineRenderer,
+                //lineWidth             : 1,                    // width of the line in pixels.
+                shadow                  : false,                // show shadow or not.
+                showLine                : true,                 // whether to render the line segments or not.
+                rendererOptions: {
+                    shadowDepth         : 0,
+                    barWidth            : 3,
+                    barPadding          : 3
+                },
+                markerRenderer          : $.jqplot.MarkerRenderer,
+                markerOptions: {
+                    show                : false,                // wether to show data point markers.
+                    style               : 'filledCircle',       // circle, diamond, square, filledCircle.
+                                                                // filledDiamond or filledSquare.
+                    lineWidth           : 0,                    // width of the stroke drawing the marker.
+                    size                : 10,                   // size (diameter, edge length, etc.) of the marker.
+                    shadow              : false,                // wether to draw shadow on marker or not.
+                    shadowAngle         : 45,                   // angle of the shadow.  Clockwise from x axis.
+                    shadowOffset        : 1,                    // offset from the line of the shadow,
+                    shadowDepth         : 3,                    // number of strokes to make when drawing shadow.  Each stroke
+                                                                // offset by shadowOffset from the last.
+                    shadowAlpha         : 0.07                  // opacity of the shadow
+                }
+            },
+           
+            axesDefaults: {
+                labelRenderer           : $.jqplot.CanvasAxisLabelRenderer,
+                labelOptions: {
+                    fontSize            : '12pt',
+                    textColor           : '#000000'
+                },
+                tickRenderer            : $.jqplot.CanvasAxisTickRenderer ,
+                tickOptions: {
+                    fontSize            : '12pt',
+                    textColor           : '#000000'
+                }
+            },
+        
+            axes: {
+                xaxis: {
+                    label               : chart.settings.get( 'x_axis_label' ),
+                    tickRenderer        : $.jqplot.CanvasAxisTickRenderer,
+                    tickOptions: {
+                        angle           : chart.settings.get( '__use_panels' ) === 'true' ? 0 : -30,
+                        showGridline    : chart.settings.get( 'x_axis_grid' ) === 'true'
+                    },
+                    pad                 : 0
+                },
+                yaxis: {
+                    label               : chart.settings.get( 'y_axis_label' ),
+                    tickOptions         : {
+                        showGridline    : chart.settings.get( 'y_axis_grid' ) === 'true'
+                    },
+                    pad                 : 0
+                }
+            },
+           
+            grid: {
+                background              : '#FFFFFF',
+                borderWidth             : 0,
+                shadow                  : false
+            },
+           
+            cursor: {
+                show                    : true,
+                zoom                    : true,
+                showTooltip             : false,
+                style                   : 'pointer'
+            },
+           
+            highlighter: {
+                show                    : true,
+                showMarker              : false,
+                tooltipAxes             : 'xy'
+            },
+           
+            series: []
+        };
+        
+        // show the legend and put it outside the grid
+        if ( chart.settings.get( 'show_legend' ) == 'true' ) {
+            plot_config.legend = {
+                renderer                : $.jqplot.EnhancedLegendRenderer,
+                show                    : true,
+                placement               : 'outsideGrid',
+                location                : 'n',
+                rendererOptions: {
+                    textColor           : '#000000',
+                    fontSize            : '12pt',
+                    border              : 'none',
+                    shadowAlpha         : 1,
+                    background          : 'rgba(255, 255, 255, 0.9)',
+                    fontFamily          : 'Arial',
+                    numberRows          : 1
+                }
+            };
+        }
+
+        return plot_config;
+    };
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/wrapper.js
new file mode 100644
index 0000000..5d0d53c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/common/wrapper.js
@@ -0,0 +1,98 @@
+/** This is the common wrapper for jqplot based visualizations. */
+define( [ 'visualizations/jqplot/common/plot-config', 'visualizations/utilities/tabular-utilities', 'plugins/jqplot/jquery.jqplot', 'plugins/jqplot/jquery.jqplot.plugins', 'style!css!plugins/jqplot/jquery.jqplot.css' ], function( configmaker, Utilities ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            this.options = options;
+            var self = this;
+            options.render = function( canvas_id, groups ) {
+                return self.render( canvas_id, groups )
+            };
+            Utilities.panelHelper( options );
+        },
+
+        /** Draw all data into a single canvas */
+        render: function( canvas_id, groups ) {
+            var chart               = this.options.chart;
+            var makeCategories      = this.options.makeCategories;
+            var makeSeries          = this.options.makeSeries;
+            var makeSeriesLabels    = this.options.makeSeriesLabels;
+            var makeConfig          = this.options.makeConfig;
+            var plot_config = configmaker( chart );
+            var plot_data = [];
+            try {
+                this._makeAxes( groups, plot_config, chart.settings );
+                if ( makeSeriesLabels ) {
+                    plot_config.series = makeSeriesLabels( groups, plot_config );
+                } else {
+                    plot_config.series = this._makeSeriesLabels( groups );
+                }
+                if ( makeSeries ) {
+                    plot_data = makeSeries( groups, plot_config );
+                } else {
+                    plot_data = Utilities.makeSeries( groups );
+                }
+                if ( makeConfig ) {
+                    makeConfig( groups, plot_config );
+                }
+                if ( chart.get( 'state' ) == 'failed' ) {
+                    return false;
+                }
+
+                // draw graph with default options, overwriting with passed options
+                function drawGraph ( opts ) {
+                    var canvas = $( '#' + canvas_id );
+                    if ( canvas.length == 0 ) {
+                        return;
+                    }
+                    canvas.empty();
+                    var plot_cnf = _.extend( _.clone( plot_config ), opts || {} );
+                    return plot = $.jqplot( canvas_id, plot_data, plot_cnf );
+                }
+
+                // draw plot
+                var plot = drawGraph();
+                $( window ).on( 'resize', function () { drawGraph() } );
+                return true;
+            } catch ( err ) {
+                this._handleError( chart, err );
+                return false;
+            }
+        },
+
+        /** Make series labels */
+        _makeSeriesLabels: function( groups, plot_config ) {
+            var series = [];
+            for ( var group_index in groups ) {
+                series.push( { label: groups[ group_index ].key } );
+            }
+            return series;
+        },
+
+        /** Create axes formatting */
+        _makeAxes: function( groups, plot_config, settings ) {
+            var makeCategories = this.options.makeCategories;
+            var categories = makeCategories ? makeCategories( groups ) : Utilities.makeCategories( groups, [ 'x', 'y' ] );
+            function makeAxis (id) {
+                Utilities.makeTickFormat({
+                    categories  : categories.array[ id ],
+                    type        : settings.get( id + '_axis_type|type' ),
+                    precision   : settings.get( id + '_axis_type|precision' ),
+                    formatter   : function( formatter ) {
+                        if ( formatter ) {
+                            plot_config.axes[ id + 'axis' ].tickOptions.formatter = function( format, value ) {
+                                return formatter( value );
+                            };
+                        }
+                    }
+                });
+            };
+            makeAxis( 'x' );
+            makeAxis( 'y' );
+        },
+
+        /** Handle error */
+        _handleError: function( chart, err ) {
+            chart.state( 'failed', err );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/config.js
new file mode 100644
index 0000000..3b1d3e7
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/config.js
@@ -0,0 +1,14 @@
+define( [ 'visualizations/jqplot/common/config' ], function( default_config ) {
+    return $.extend( true, {}, default_config, {
+        title       : 'Discrete Histogram',
+        description : 'Derives a discrete histogram from tabular data using R and renders a regular bar diagram using jqPlot hosted at http://www.jqplot.com.',
+        keywords    : 'jqplot default',
+        groups      : {
+            x : {
+                label       : 'Observations',
+                type        : 'data_column',
+                is_label    : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/logo.png
new file mode 100644
index 0000000..6cc1971
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/wrapper.js
new file mode 100644
index 0000000..41bc6b2
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/histogram_discrete/wrapper.js
@@ -0,0 +1,26 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'utilities/jobs', 'visualizations/jqplot/common/wrapper' ], function( Utilities, Jobs, Plot ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            Jobs.request( options.chart, Utilities.buildJobDictionary( options.chart, 'histogramdiscrete' ), function( dataset ) {
+                var dataset_groups = new Backbone.Collection();
+                options.chart.groups.each( function( group, index ) {
+                    dataset_groups.add({
+                        __data_columns: { x: { is_label: true }, y: { is_numeric: true } },
+                        x     : 0,
+                        y     : index + 1,
+                        key   : group.get( 'key' )
+                    });
+                });
+                options.dataset_id = dataset.id;
+                options.dataset_groups = dataset_groups;
+                options.makeConfig = function( groups, plot_config ){
+                    $.extend( true, plot_config, {
+                        seriesDefaults: { renderer: $.jqplot.BarRenderer },
+                        axes: { xaxis: { min : -1 }, yaxis: { pad : 1.2 } }
+                    });
+                };
+                new Plot( options );
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/config.js
new file mode 100644
index 0000000..6089471
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/config.js
@@ -0,0 +1,19 @@
+define( [ 'visualizations/jqplot/common/config' ], function( plot_config ) {
+    return $.extend( true, {}, plot_config, {
+        title       : 'Line chart',
+        description : 'Renders a line chart using jqPlot hosted at http://www.jqplot.com.',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/logo.png
new file mode 100644
index 0000000..4ddff79
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/wrapper.js
new file mode 100644
index 0000000..1035929
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/line/wrapper.js
@@ -0,0 +1,7 @@
+define( [ 'visualizations/jqplot/common/wrapper' ], function( Plot ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            new Plot( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/config.js
new file mode 100644
index 0000000..d9357d6
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/config.js
@@ -0,0 +1,18 @@
+define( [ 'visualizations/jqplot/common/config' ], function( plot_config ) {
+    return $.extend( true, {}, plot_config, {
+        title       : 'Scatter plot',
+        description : 'Renders a scatter plot using jqPlot hosted at http://www.jqplot.com.',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/logo.png
new file mode 100644
index 0000000..2486a48
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/wrapper.js
new file mode 100644
index 0000000..fe3f802
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/jqplot/scatter/wrapper.js
@@ -0,0 +1,18 @@
+define( [ 'visualizations/jqplot/common/wrapper' ], function( Plot ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.makeConfig = function( groups, plot_config ) {
+                $.extend( true, plot_config, {
+                    seriesDefaults: {
+                        renderer: $.jqplot.LineRenderer,
+                        showLine: false,
+                        markerOptions : {
+                            show    : true
+                        }
+                    }
+                });
+            };
+            new Plot( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/config.js
new file mode 100644
index 0000000..25a7f67
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/config.js
@@ -0,0 +1,19 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend(true, {}, nvd3_config, {
+        title       : 'Bar diagram',
+        description : 'Renders a regular bar diagram using NVD3 hosted at http://www.nvd3.org.',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/logo.png
new file mode 100644
index 0000000..ff048af
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/wrapper.js
new file mode 100644
index 0000000..1a77a13
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar/wrapper.js
@@ -0,0 +1,8 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'multiBarChart';
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/config.js
new file mode 100644
index 0000000..1c79fdd
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/config.js
@@ -0,0 +1,19 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Horizontal',
+        description : 'Renders a horizontal bar diagram using NVD3 hosted at http://www.nvd3.org.',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/logo.png
new file mode 100644
index 0000000..a88d6a5
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/wrapper.js
new file mode 100644
index 0000000..d8fdd31
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal/wrapper.js
@@ -0,0 +1,8 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'multiBarHorizontalChart';
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/config.js
new file mode 100644
index 0000000..27ec284
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/config.js
@@ -0,0 +1,20 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Stacked horizontal',
+        description : 'Renders a stacked horizontal bar diagram using NVD3 hosted at http://www.nvd3.org.',
+        use_panels  : 'no',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/logo.png
new file mode 100644
index 0000000..c502596
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/wrapper.js
new file mode 100644
index 0000000..08240cc
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_horizontal_stacked/wrapper.js
@@ -0,0 +1,11 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'multiBarHorizontalChart';
+            options.makeConfig = function( nvd3_model ) {
+                nvd3_model.stacked( true );
+            };
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/config.js
new file mode 100644
index 0000000..cbce6f5
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/config.js
@@ -0,0 +1,20 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Stacked',
+        description : 'Renders a stacked bar diagram using NVD3 hosted at http://www.nvd3.org.',
+        use_panels  : 'no',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/logo.png
new file mode 100644
index 0000000..bac9937
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/wrapper.js
new file mode 100644
index 0000000..03c2920
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/bar_stacked/wrapper.js
@@ -0,0 +1,11 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'multiBarChart';
+            options.makeConfig = function( nvd3_model ) {
+                nvd3_model.stacked( true );
+            };
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/common/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/common/config.js
new file mode 100644
index 0000000..b1dd900
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/common/config.js
@@ -0,0 +1,21 @@
+define( [ 'visualizations/utilities/tabular-form' ], function( default_config ) {
+    return $.extend( true, {}, default_config, {
+        title       : '',
+        library     : 'NVD3',
+        tag         : 'svg',
+        keywords    : 'nvd3 default',
+        exports     : [ 'png', 'svg', 'pdf' ],
+        groups      : {
+            color: {
+                label       : 'Pick a series color',
+                type        : 'color'
+            },
+            tooltip : {
+                label       : 'Data point labels',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/common/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/common/wrapper.js
new file mode 100644
index 0000000..142baa3
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/common/wrapper.js
@@ -0,0 +1,75 @@
+/** This is the common wrapper for nvd3 based visualizations. */
+define( [ 'visualizations/utilities/tabular-utilities', 'plugins/nvd3/nv.d3', 'style!css!plugins/nvd3/nv.d3.css' ], function( Utilities ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            this.options = options;
+            options.render = function( canvas_id, groups ) {
+                return self.render( canvas_id, groups )
+            };
+            Utilities.panelHelper( options );
+        },
+
+        render: function( canvas_id, groups ) {
+            var self = this;
+            var chart       = this.options.chart;
+            var type        = this.options.type;
+            var makeConfig  = this.options.makeConfig;
+            var d3chart = nv.models[ type ]();
+            nv.addGraph( function() {
+                try {
+                    d3chart.xAxis.axisLabel( chart.settings.get( 'x_axis_label' ) );
+                    d3chart.yAxis.axisLabel( chart.settings.get( 'y_axis_label' ) ).axisLabelDistance( 30 );
+                    d3chart.options( { showControls: false } );
+                    d3chart.showLegend && d3chart.showLegend(chart.settings.get('show_legend') == 'true');
+                    self._makeAxes( d3chart, groups, chart.settings );
+                    makeConfig && makeConfig( d3chart );
+                    chart.settings.get( '__use_panels' ) === 'true' && d3chart.options( { showControls: false } );
+                    d3chart.xAxis.showMaxMin( false );
+                    d3chart.yAxis.showMaxMin( chart.definition.showmaxmin );
+                    d3chart.tooltipContent( function( key, x, y, graph ) {
+                        return '<h3>' + ( graph.point.tooltip || key ) + '</h3>';
+                    });
+                    if ( $( '#' + canvas_id ).length > 0 ) {
+                        var canvas = d3.select( '#' + canvas_id );
+                        canvas.datum( groups ).call( d3chart );
+                        if ( chart.definition.zoomable && chart.definition.zoomable != 'native' ) {
+                            d3chart.clipEdge && d3chart.clipEdge( true );
+                            Utilities.addZoom({
+                                xAxis  : d3chart.xAxis,
+                                yAxis  : d3chart.yAxis,
+                                yDomain: d3chart.yDomain,
+                                xDomain: d3chart.xDomain,
+                                redraw : function() { d3chart.update() },
+                                svg    : canvas
+                            });
+                        }
+                        nv.utils.windowResize( d3chart.update );
+                    }
+                } catch ( err ) {
+                    chart.state( 'failed', err );
+                }
+            });
+            return true;
+        },
+
+        /** Format axes ticks */
+        _makeAxes: function( d3chart, groups, settings ) {
+            var categories = Utilities.makeCategories( groups, [ 'x', 'y' ] );
+            function makeTickFormat( id ) {
+                Utilities.makeTickFormat({
+                    categories  : categories.array[ id ],
+                    type        : settings.get( id + '_axis_type|type' ),
+                    precision   : settings.get( id + '_axis_type|precision' ),
+                    formatter   : function( formatter ) {
+                        formatter && d3chart[ id + 'Axis' ].tickFormat( function( value ) {
+                            return formatter( value );
+                        });
+                    }
+                });
+            };
+            makeTickFormat( 'x' );
+            makeTickFormat( 'y' );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/config.js
new file mode 100644
index 0000000..fbabf47
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/config.js
@@ -0,0 +1,27 @@
+define( [ 'visualizations/utilities/tabular-form' ], function( default_config ) {
+    return $.extend( true, {}, default_config, {
+        library     : 'NVD3',
+        tag         : 'svg',
+        title       : 'Histogram',
+        description : 'Uses the R-based `charts` tool to derive a histogram and displays it as regular or stacked bar diagram using NVD3 hosted at http://www.nvd3.org.',
+        keywords    : 'nvd3 default',
+        datatype    : 'tabular',
+        groups      : {
+            key : {
+                label       : 'Provide a label',
+                type        : 'text',
+                placeholder : 'Data label',
+                value       : 'Data label'
+            },
+            color : {
+                label       : 'Pick a series color',
+                type        : 'color'
+            },
+            y : {
+                label       : 'Observations',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/logo.png
new file mode 100644
index 0000000..6cc1971
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/wrapper.js
new file mode 100644
index 0000000..e0c5d2a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram/wrapper.js
@@ -0,0 +1,24 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'utilities/jobs', 'visualizations/nvd3/common/wrapper' ], function( Utilities, Jobs, NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            Jobs.request( options.chart, Utilities.buildJobDictionary( options.chart, 'histogram' ), function( dataset ) {
+                var dataset_groups = new Backbone.Collection();
+                options.chart.groups.each( function( group, index ) {
+                    dataset_groups.add({
+                        __data_columns: { x: { is_numeric: true }, y: { is_numeric: true } },
+                        x     : 0,
+                        y     : index + 1,
+                        key   : group.get( 'key' )
+                    });
+                });
+                options.dataset_id = dataset.id;
+                options.dataset_groups = dataset_groups;
+                options.type = 'multiBarChart';
+                options.makeConfig = function( nvd3_model ) {
+                    nvd3_model.options( { showControls: true } );
+                };
+                new NVD3( options );
+            }, function() { options.process.reject() } );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/config.js
new file mode 100644
index 0000000..079b046
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/config.js
@@ -0,0 +1,17 @@
+define( [ 'visualizations/utilities/tabular-form' ], function( default_config ) {
+    return $.extend(true, {}, default_config, {
+        library     : 'NVD3',
+        tag         : 'svg',
+        title       : 'Discrete Histogram',
+        description : 'Uses the R-based `charts` tool to derive a histogram for discrete data e.g. text labels. The result is displayed as regular or stacked bar diagram using NVD3 hosted at http://www.nvd3.org.',
+        keywords    : 'nvd3',
+        datatype    : 'tabular',
+        groups      : {
+            x : {
+                label       : 'Observations',
+                type        : 'data_column',
+                is_label    : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/logo.png
new file mode 100644
index 0000000..6cc1971
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/wrapper.js
new file mode 100644
index 0000000..e55e3fd
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/histogram_discrete/wrapper.js
@@ -0,0 +1,24 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'utilities/jobs', 'visualizations/nvd3/common/wrapper' ], function( Utilities, Jobs, NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            Jobs.request( options.chart, Utilities.buildJobDictionary( options.chart, 'histogramdiscrete' ), function( dataset ) {
+                var dataset_groups = new Backbone.Collection();
+                options.chart.groups.each( function( group, index ) {
+                    dataset_groups.add({
+                        __data_columns: { x: { is_label: true }, y: { is_numeric: true } },
+                        x     : 0,
+                        y     : index + 1,
+                        key   : group.get( 'key' )
+                    });
+                });
+                options.dataset_id = dataset.id;
+                options.dataset_groups = dataset_groups;
+                options.type = 'multiBarChart';
+                options.makeConfig = function( nvd3_model ) {
+                    nvd3_model.options( { showControls: true } );
+                };
+                new NVD3( options );
+            }, function() { options.process.reject() } );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/config.js
new file mode 100644
index 0000000..2f43303
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/config.js
@@ -0,0 +1,20 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Line chart',
+        description : 'Renders a line chart using NVD3 hosted at http://www.nvd3.org.',
+        zoomable    : true,
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/logo.png
new file mode 100644
index 0000000..4ddff79
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/wrapper.js
new file mode 100644
index 0000000..ca64b4b
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line/wrapper.js
@@ -0,0 +1,8 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'lineChart';
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/config.js
new file mode 100644
index 0000000..6388419
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/config.js
@@ -0,0 +1,20 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Line with focus',
+        description : 'Renders a line chart with focus panel using NVD3 hosted at http://www.nvd3.org.',
+        zoomable    : 'native',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/logo.png
new file mode 100644
index 0000000..4cc7e88
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/wrapper.js
new file mode 100644
index 0000000..b118638
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/line_focus/wrapper.js
@@ -0,0 +1,8 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'lineWithFocusChart';
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/config.js
new file mode 100644
index 0000000..4d54d69
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/config.js
@@ -0,0 +1,73 @@
+define( [ 'visualizations/utilities/tabular-form' ], function( default_config ) {
+    return $.extend( true, {}, default_config, {
+        title       : 'Pie chart',
+        description : 'Renders a pie chart using NVD3 hosted at http://www.nvd3.org.',
+        library     : 'NVD3',
+        tag         : 'svg',
+        keywords    : 'nvd3 default',
+        datatypes   : [ 'tabular', 'csv' ],
+        exports     : [ 'png', 'svg', 'pdf' ],
+        use_panels  : 'yes',
+        groups      : {
+            label : {
+                label       : 'Labels',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        },
+        settings : {
+            donut_ratio : {
+                label       : 'Donut ratio',
+                help        : 'Determine how large the donut hole will be.',
+                type        : 'float',
+                value       : 0.5,
+                max         : 1,
+                min         : 0.0
+            },
+            show_legend : {
+                label       : 'Show legend',
+                help        : 'Would you like to add a legend?',
+                type        : 'select',
+                display     : 'radiobutton',
+                value       : 'false',
+                data        : [ { label : 'Yes', value : 'true'  }, { label : 'No',  value : 'false' } ]
+            },
+            label_type  : {
+                type        : 'conditional',
+                test_param  : {
+                    name        : 'type',
+                    label       : 'Donut label',
+                    type        : 'select',
+                    value       : 'percent',
+                    help        : 'What would you like to show for each slice?',
+                    data        : [ { value : 'hide',    label : '-- Nothing --' },
+                                    { value : 'key',     label : 'Label column' },
+                                    { value : 'percent', label : 'Percentage' } ]
+                },
+                cases       : [ { value   : 'hide' },
+                                { value   : 'key',     inputs: [ { name     : 'label_outside',
+                                                                   label    : 'Show outside',
+                                                                   help     : 'Would you like to show labels outside the donut?',
+                                                                   type     : 'select',
+                                                                   display  : 'radiobutton',
+                                                                   value    : 'true',
+                                                                   data     : [ { label : 'Yes', value : 'true'  },
+                                                                                { label : 'No',  value : 'false' } ] } ] },
+                                { value   : 'percent', inputs: [ { name     : 'label_outside',
+                                                                   label    : 'Show outside',
+                                                                   help     : 'Would you like to show labels outside the donut?',
+                                                                   type     : 'select',
+                                                                   display  : 'radiobutton',
+                                                                   value    : 'true',
+                                                                   data     : [ { label : 'Yes', value : 'true'  },
+                                                                                { label : 'No',  value : 'false' } ] } ] } ]
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/logo.png
new file mode 100644
index 0000000..39a9e91
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/wrapper.js
new file mode 100644
index 0000000..f96f2eb
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/pie/wrapper.js
@@ -0,0 +1,69 @@
+/** Pie chart wrapper */
+define( [ 'utilities/utils', 'visualizations/utilities/tabular-datasets', 'plugins/nvd3/nv.d3', 'style!css!plugins/nvd3/nv.d3.css' ], function( Utils, Datasets ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            var self = this;
+            var chart = options.chart;
+            var targets = options.targets;
+            var process = options.process;
+            Datasets.request({
+                dataset_id      : chart.get( 'dataset_id' ),
+                dataset_groups  : chart.groups,
+                success         : function( groups ) {
+                    for ( var group_index in groups ) {
+                        var group = groups[ group_index ];
+                        self._drawGroup( chart, group, targets[ group_index ] );
+                    }
+                    chart.state('ok', 'Pie chart has been drawn.');
+                    process.resolve();
+                }
+            });
+        },
+
+        /** Draw group */
+        _drawGroup: function( chart, group, canvas_id ) {
+            try {
+                var self = this;
+                var canvas = d3.select( '#' + canvas_id );
+                var title = canvas.append( 'text' );
+                this._fixTitle( chart, canvas, title, group.key );
+                var pie_data = [];
+                _.each( group.values, function( value ) {
+                    pie_data.push( { y : value.y, x : value.label } );
+                });
+
+                // add graph to screen
+                nv.addGraph(function() {
+                    var legend_visible = chart.settings.get( 'show_legend' ) == 'true';
+                    var label_outside = chart.settings.get( 'label_outside' ) == 'true';
+                    var label_type = chart.settings.get( 'label_type' );
+                    var donut_ratio = parseFloat( chart.settings.get( 'donut_ratio' ) );
+                    var chart_3d = nv.models.pieChart()
+                        .donut( true )
+                        .labelThreshold( .05 )
+                        .showLegend( legend_visible )
+                        .labelType( label_type )
+                        .donutRatio( donut_ratio )
+                        .donutLabelsOutside( label_outside );
+                    canvas.datum( pie_data ).call( chart_3d );
+                    nv.utils.windowResize( function() {
+                        chart_3d.update();
+                        self._fixTitle( chart, canvas, title, group.key );
+                    });
+                });
+            } catch ( err ) {
+                console.log( err );
+            }
+        },
+
+        /** Fix title */
+        _fixTitle: function( chart, canvas, title_element, title_text ) {
+            var width = parseInt( canvas.style( 'width' ) );
+            var height = parseInt( canvas.style( 'height' ) );
+            title_element.attr( 'x', width / 2 )
+                         .attr( 'y', height - 10 )
+                         .attr( 'text-anchor', 'middle' )
+                         .text( title_text );
+        }
+    });
+});
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/config.js
new file mode 100644
index 0000000..a7802e4
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/config.js
@@ -0,0 +1,19 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Scatter plot',
+        description : 'Renders a scatter plot using NVD3 hosted at http://www.nvd3.org.',
+        zoomable    : true,
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/logo.png
new file mode 100644
index 0000000..2486a48
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/wrapper.js
new file mode 100644
index 0000000..05c1f4d
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/scatter/wrapper.js
@@ -0,0 +1,13 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'scatterChart';
+            options.makeConfig = function( nvd3_model ) {
+                nvd3_model.showDistX( true )
+                          .showDistY( true )
+                          .color( d3.scale.category10().range() );
+            };
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/config.js
new file mode 100644
index 0000000..397f472
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/config.js
@@ -0,0 +1,21 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Stacked area',
+        zoomable    : true,
+        description : 'Renders a stacked area using NVD3 hosted at http://www.nvd3.org.',
+        keywords    : 'nvd3 default',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/logo.png
new file mode 100644
index 0000000..1134584
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/wrapper.js
new file mode 100644
index 0000000..3856a33
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea/wrapper.js
@@ -0,0 +1,8 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'stackedAreaChart';
+            new NVD3( options );
+        }
+    }); 
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/config.js
new file mode 100644
index 0000000..0ba8ccb
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/config.js
@@ -0,0 +1,21 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend( true, {}, nvd3_config, {
+        title       : 'Expanded',
+        zoomable    : true,
+        description : 'Renders an expanded stacked area using NVD3 hosted at http://www.nvd3.org.',
+        keywords    : 'nvd3 default',
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/logo.png
new file mode 100644
index 0000000..3f52454
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/wrapper.js
new file mode 100644
index 0000000..b1f3ee2
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_full/wrapper.js
@@ -0,0 +1,11 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'stackedAreaChart';
+            options.makeConfig = function( nvd3_model ) {
+                nvd3_model.style( 'expand' );
+            };
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/config.js
new file mode 100644
index 0000000..eb843e7
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/config.js
@@ -0,0 +1,22 @@
+define( [ 'visualizations/nvd3/common/config' ], function( nvd3_config ) {
+    return $.extend(true, {}, nvd3_config, {
+        title       : 'Stream',
+        description : 'Renders a stream chart using NVD3 hosted at http://www.nvd3.org.',
+        zoomable    : true,
+        keywords    : 'nvd3 default',
+        showmaxmin  : true,
+        groups      : {
+            x : {
+                label       : 'Values for x-axis',
+                type        : 'data_column',
+                is_label    : true,
+                is_auto     : true
+            },
+            y : {
+                label       : 'Values for y-axis',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/logo.png
new file mode 100644
index 0000000..ff1839d
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/wrapper.js
new file mode 100644
index 0000000..c3493c5
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/nvd3/stackedarea_stream/wrapper.js
@@ -0,0 +1,11 @@
+define( [ 'visualizations/nvd3/common/wrapper' ], function( NVD3 ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            options.type = 'stackedAreaChart';
+            options.makeConfig = function( nvd3_model ) {
+                nvd3_model.style( 'stream' )
+            };
+            new NVD3( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/example/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/example/config.js
new file mode 100755
index 0000000..3d77040
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/example/config.js
@@ -0,0 +1,16 @@
+define([], function() {
+    return {
+        title       : 'Example',
+        library     : 'Custom',
+        tag         : 'svg',
+        keywords    : 'others',
+        datatypes   : [ 'tabular', 'csv' ],
+        use_panels  : 'both',
+        description : 'This is a developer example which demonstrates how to implement and configure a basic d3-based plugin for charts.',
+        groups      : {
+            x : { type : 'data_column', is_numeric : true, label : 'Bubble x-position' },
+            y : { type : 'data_column', is_numeric : true, label : 'Bubble y-position' },
+            z : { type : 'data_column', is_numeric : true, label : 'Bubble size' }
+        }
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/example/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/others/example/logo.png
new file mode 100644
index 0000000..0c13459
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/others/example/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/example/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/example/wrapper.js
new file mode 100755
index 0000000..ba8be25
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/example/wrapper.js
@@ -0,0 +1,39 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'visualizations/utilities/tabular-datasets' ], function( Utilities, Datasets ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            var chart = options.chart;
+            Datasets.request({
+                dataset_id      : chart.get( 'dataset_id' ),
+                dataset_groups  : chart.groups,
+                success         : function( groups ) {
+                    var colors = d3.scale.category20();
+                    var error = null;
+                    _.each( groups, function( group, group_index ) {
+                        try {
+                            var svg = d3.select( '#' + ( options.targets[ group_index ] || options.targets[ 0 ] ) );
+                            var height = parseInt( svg.style( 'height' ) );
+                            var width  = parseInt( svg.style( 'width' ) );
+                            var maxValue = d3.max( group.values, function( d ) { return Math.max( d.x, d.y ) } );
+                            svg.selectAll( 'bubbles' )
+                                .data( group.values )
+                                .enter().append( 'circle' )
+                                .attr( 'r', function( d ) { return ( Math.abs( d.z ) * 20 ) / maxValue } )
+                                .attr( 'cy', function( d, i ) { return height * d.y / maxValue } )
+                                .attr( 'cx', function( d ) { return width * d.x / maxValue } )
+                                .style( 'stroke', colors( group_index ) )
+                                .style( 'fill', 'white' );
+                        } catch ( err ) {
+                            error = err;
+                        }
+                    });
+                    if ( error ) {
+                        chart.state( 'failed', error );
+                    } else {
+                        chart.state( 'ok', 'Workshop chart has been drawn.' );
+                    }
+                    options.process.resolve();
+                }
+            });
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/config.js
new file mode 100644
index 0000000..07c81a3
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/config.js
@@ -0,0 +1,61 @@
+define( [ 'visualizations/utilities/tabular-form' ], function( default_config ) {
+    return $.extend( true, {}, default_config, {
+        title       : 'Heatmap',
+        description : 'Renders a heatmap from matrix data provided in 3-column format (x, y, observation).',
+        library     : 'Custom',
+        tag         : 'svg',
+        keywords    : 'others default',
+        zoomable    : true,
+        exports     : [ 'png', 'svg', 'pdf' ],
+        use_panels  : 'yes',
+        groups      : {
+            x : {
+                label       : 'Column labels',
+                type        : 'data_column',
+                is_label    : true,
+                is_numeric  : true
+            },
+            y : {
+                label       : 'Row labels',
+                type        : 'data_column',
+                is_label    : true,
+                is_numeric  : true
+            },
+            z : {
+                label       : 'Observation',
+                type        : 'data_column',
+                is_numeric  : true
+            }
+        },
+        settings    : {
+            color_set : {
+                label       : 'Color scheme',
+                help        : 'Select a color scheme for your heatmap',
+                type        : 'select',
+                value       : 'jet',
+                data        : [ { label : 'Cold-to-Hot',                value : 'hot' },
+                                { label : 'Cool',                       value : 'cool' },
+                                { label : 'Copper',                     value : 'copper' },
+                                { label : 'Gray scale',                 value : 'gray' },
+                                { label : 'Jet',                        value : 'jet' },
+                                { label : 'No-Green',                   value : 'no_green' },
+                                { label : 'Ocean',                      value : 'ocean' },
+                                { label : 'Polar',                      value : 'polar' },
+                                { label : 'Red-to-Green',               value : 'redgreen' },
+                                { label : 'Red-to-green (saturated)',   value : 'red2green' },
+                                { label : 'Relief',                     value : 'relief' },
+                                { label : 'Seismograph',                value : 'seis' },
+                                { label : 'Sealand',                    value : 'sealand' },
+                                { label : 'Split',                      value : 'split' },
+                                { label : 'Wysiwyg',                    value : 'wysiwyg' } ]
+            },
+            url_template: {
+                label       : 'Url template',
+                help        : 'Enter a url to link the labels with external sources. Use __LABEL__ as placeholder.',
+                type        : 'text',
+                value       : '',
+                placeholder : 'http://someurl.com?id=__LABEL__'
+            }
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/heatmap-parameters.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/heatmap-parameters.js
new file mode 100644
index 0000000..6033530
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/heatmap-parameters.js
@@ -0,0 +1,35 @@
+define([], function() {
+    return {
+        colorSets: {
+            seis:       ['#AA0000', '#D00000', '#F70000', '#FF1D00', '#FF4400', '#FF6A00', '#FF9000', '#FFB700', '#FFDD00', '#FFFF00', '#FFFF00', '#FFFF00', '#BDFF0C', '#73FF1A', '#3FFA36', '#16F45A', '#00D08B', '#0087CD', '#0048FA', '#0024E3'].reverse(),
+            
+            sealand:    ['#8C66FF', '#6A66FF', '#6684FF', '#66A7FF', '#66CAFF', '#66ECFF', '#66FFF0', '#66FFCE', '#66FFAB', '#66FF88', '#66FF66', '#88FF66', '#ABFF66', '#CEFF66', '#FFEEA6', '#FFD3A6', '#FFB8A6', '#FFAAB0', '#FFB5CB', '#FFC0E1'].reverse(),
+            
+            redgreen:   ['#005824', '#1A693B', '#347B53', '#4F8D6B', '#699F83', '#83B09B', '#9EC2B3', '#B8D4CB', '#D2E6E3', '#EDF8FB', '#FFFFFF', '#F1EEF6', '#E6D3E1','#DBB9CD', '#D19EB9', '#C684A4', '#BB6990', '#B14F7C', '#A63467', '#9B1A53', '#91003F'],
+            
+            ocean:      ['#000000', '#000209', '#000413', '#00061E', '#000728', '#000932', '#002650', '#00426E', '#005E8C', '#007AAA', '#0096C8', '#22A9C2', '#45BCBB', '#67CFB5', '#8AE2AE', '#ACF6A8', '#BCF8B9', '#CBF9CA', '#DBFBDC', '#EBFDED'].reverse(),
+            
+            cool:       ['#00FFFF', '#0DF2FF', '#19E6FF', '#26D9FF', '#33CCFF', '#3FBFFF', '#4CB3FF', '#59A6FF', '#6699FF', '#738CFF', '#7F7FFF', '#8C73FF', '#9966FF', '#A659FF', '#B24DFF', '#BF3FFF', '#CC33FF', '#D926FF', '#E619FF', '#F20DFF'],
+            
+            copper:     ['#000000', '#100906', '#1F130D', '#301E13', '#40281A', '#50321F', '#603C26', '#70462D', '#805033', '#905A3A', '#A06440', '#B06E46', '#C0784D', '#D08253', '#E08C5A', '#F09660', '#FFA066', '#FFAA6D', '#FFB473', '#FFBE7A'].reverse(),
+            
+            gray:       ['#000000', '#0D0D0D', '#191919', '#262626', '#333333', '#3F3F3F', '#4C4C4C', '#595959', '#666666', '#737373', '#7F7F7F', '#8C8C8C', '#999999', '#A6A6A6', '#B2B2B2', '#BFBFBF', '#CCCCCC', '#D9D9D9', '#E6E6E6', '#F2F2F2'].reverse(),
+            
+            hot:        ['#000000', '#220000', '#440000', '#660000', '#880000', '#AA0000', '#CC0000', '#EE0000', '#FF1100', '#FF3300', '#FF5500', '#FF7700', '#FF9900', '#FFBB00', '#FFDD00', '#FFFF00', '#FFFF33', '#FFFF66', '#FFFF99', '#FFFFCC'].reverse(),
+            
+            jet:        ['#00007F', '#0000B2', '#0000E5', '#0019FF', '#004DFF', '#007FFF', '#00B2FF', '#00E5FF', '#FFFFF2', '#FFFFD9', '#FFFFBF', '#FFFFA5', '#FFFF8C', '#FFE500', '#FFB300', '#FF7F00', '#FF4C00', '#FF1900', '#E50000', '#B20000'],
+            
+            no_green:   ['#1F60FF', '#1F60FF', '#1F9FFF', '#1FBFFF', '#00CFFF', '#2AFFFF', '#2AFFFF', '#55FFFF', '#7FFFFF', '#AAFFFF', '#FFFF54', '#FFFF54', '#FFF000', '#FFBF00', '#FFA800', '#FF8A00', '#FF8A00', '#FF7000', '#FF4D00', '#FF0000'],
+
+            polar:      ['#0000FF', '#1919FF', '#3333FF', '#4C4CFF', '#6666FF', '#7F7FFF', '#9999FF', '#B2B2FF', '#CCCCFF', '#E6E6FF', '#FFFFFF', '#FFE5E5', '#FFCCCC', '#FFB2B2', '#FF9999', '#FF7F7F', '#FF6666', '#FF4C4C', '#FF3333', '#FF1A1A'],
+
+            red2green:  ['#FF0000', '#FF1919', '#FF3333', '#FF4C4C', '#FF6666', '#FF7F7F', '#FF9999', '#FFB2B2', '#FFCCCC', '#FFE6E6', '#FFFFFF', '#E5FFE5', '#CCFFCC', '#B2FFB2', '#99FF99', '#7FFF7F',  '#66FF66', '#4CFF4C', '#33FF33', '#1AFF1A'].reverse(),
+
+            relief:     ['#000000', '#000413', '#000728', '#002650', '#005E8C', '#0096C8', '#45BCBB', '#8AE2AE', '#BCF8B9', '#DBFBDC', '#467832', '#887438', '#B19D48', '#DBC758', '#FAE769', '#FAEB7E', '#FCED93', '#FCF1A7', '#FCF6C1', '#FDFAE0'].reverse(),
+            
+            split:      ['#7F7FFF', '#6666E6', '#4D4DCC', '#3333B3', '#1A1A99', '#00007F', '#000066', '#00004D', '#000033', '#00001A', '#000000', '#1A0000', '#330000', '#4D0000', '#660000', '#7F0000', '#991A1A', '#B33333', '#CC4D4D', '#E66666'],
+            
+            wysiwyg:    ['#3F003F', '#3F003F', '#3F00BF', '#003FFF', '#00A0FF', '#3FBFFF', '#3FBFFF', '#40E0FF', '#3FFFBF', '#3FFF3F', '#7FFF3F', '#BFFF3F', '#BFFF3F', '#FFE040', '#FFE040', '#FF6040', '#FF1F40', '#FF60C0', '#FFA0FF', '#FFA0FF'].reverse()
+        }
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/heatmap-plugin.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/heatmap-plugin.js
new file mode 100644
index 0000000..cdf779c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/heatmap-plugin.js
@@ -0,0 +1,403 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'utilities/utils', 'visualizations/others/heatmap/heatmap-parameters' ], function( Utilities, Utils, HeatmapParameters ) {
+    return Backbone.View.extend({
+        optionsDefault: {
+            margin: {
+                top                 : 40,
+                right               : 70,
+                bottom              : 70,
+                left                : 70
+            },
+            style: {
+                'font-weight'       : 'normal',
+                'font-family'       : 'Verdana',
+                'font-size'         : 12
+            },
+            legend: {
+                width               : 15,
+                size                : 0.9,
+                style : {
+                    'font-weight'   : 'normal',
+                    'font-family'   : 'Verdana',
+                    'font-size'     : 11
+                },
+                limit               : 7
+            },
+            background_color        : '#FFFFFF',
+            debug_color             : '#FFFFFF'
+        },
+        
+        initialize: function( options ) {
+            var self = this;
+            this.chart          = options.chart;
+            this.canvas_id      = options.canvas_id;
+            this.group          = options.groups[ 0 ];
+            this.data           = options.groups[ 0 ].values;
+            this.options        = Utils.merge ( this.optionsDefault, options );
+            
+            // get color set
+            this.color_set = HeatmapParameters.colorSets[ this.chart.settings.get( 'color_set' ) ];
+            
+            // categories
+            this.categories = Utilities.makeUniqueCategories( [ this.group ] );
+            
+            // domains/scales
+            this.xScale = d3.scale.linear().domain( [ 0, this.categories.array.x.length ] );
+            this.yScale = d3.scale.linear().domain( [ 0, this.categories.array.y.length ] );
+            
+            // color scale
+            this.zMin = d3.min( this.data, function( d ) { return d.z; } );
+            this.zMax = d3.max( this.data, function( d ) { return d.z; } );
+            this.zScale = d3.scale.quantize().domain( [ this.zMin, this.zMax ] ).range( this.color_set );
+
+            // create axis
+            this.xAxis = d3.svg.axis().scale( this.xScale ).orient( 'bottom' );
+            this.yAxis = d3.svg.axis().scale( this.yScale ).orient( 'left' );
+            
+            // make categories
+            this._makeTickFormat( 'x' );
+            this._makeTickFormat( 'y' );
+            
+            // add tooltip
+            this.tooltip = d3.select( '.charts-viewport-container' ).append( 'div' )
+                .attr( 'class', 'charts-tooltip' )
+                .style( this.options.style )
+                .style( 'opacity', 0 );
+            
+            // refresh on window resize
+            $( window ).on( 'resize', function () { self.redraw() } );
+            this.redraw();
+            Utilities.addZoom({
+                xAxis       : this.xAxis,
+                yAxis       : this.yAxis,
+                redraw      : function() { self.redraw() },
+                svg         : d3.select( '#' + this.canvas_id )
+            });
+        },
+            
+        /** Redraw */
+        redraw: function() {
+            // get parameters
+            var chart       = this.chart;
+            var data        = this.data;
+            var self        = this;
+            this.data       = data;
+            
+            // get/reset container
+            var container = $( '#' + this.canvas_id );
+            container.empty();
+            
+            // get domain
+            var xDomain = this.xScale.domain();
+            var yDomain = this.yScale.domain();
+            
+            // set ticks
+            var xTickStart = Math.ceil( xDomain[ 0 ] );
+            var xTickEnd   = Math.floor( xDomain[ 1 ] );
+            var yTickStart = Math.ceil( yDomain[ 0 ] );
+            var yTickEnd   = Math.floor( yDomain[ 1 ] );
+            this.xAxis.tickValues( d3.range( xTickStart, xTickEnd, 1 ) );
+            this.yAxis.tickValues( d3.range( yTickStart, yTickEnd, 1 ) );
+            
+            // get margins
+            var margin = this.options.margin;
+            
+            // configure dimensions
+            this.height = parseInt( container.height() ) - margin.top - margin.bottom;
+            this.width  = parseInt( container.width() ) - margin.left - margin.right;
+            
+            // set range
+            this.xScale.range( [ 0, this.width ] );
+            this.yScale.range( [ this.height, 0 ] );
+
+            // get box properties
+            this.rowCount = yDomain[ 1 ] - yDomain[ 0 ];
+            this.colCount = xDomain[ 1 ] - xDomain[ 0 ];
+            this.boxWidth = Math.max( 1, Math.floor( this.width / this.colCount ) );
+            this.boxHeight = Math.max( 1, Math.floor( this.height / this.rowCount ) );
+            
+            // create group
+            var svg = this.svg = d3.select( '#' + this.canvas_id )
+                                   .append( 'g' )
+                                   .attr( 'class', 'heatmap' )
+                                   .attr( 'transform', 'translate(' + margin.left + ',' + margin.top + ')' );
+                        
+            // build elements
+            this._buildBoxes();
+            this._buildX();
+            this._buildY();
+            
+            // show legend only if requested
+            if ( this.chart.settings.get('show_legend') == 'true' ) {
+                this._buildLegend();
+            }
+        },
+        
+        /** Build boxes */
+        _buildBoxes: function() {
+            var self = this;
+            var height      = this.height;
+            var width       = this.width;
+            var margin      = this.options.margin;
+            var svg         = this.svg;
+            var boxWidth    = this.boxWidth;
+            var boxHeight   = this.boxHeight;
+            function _locator( d ) { return 'translate(' + self.xScale( d.x ) + ',' + self.yScale( d.y + 1 ) + ')' };
+            function _color ( d ) { return self.zScale( d.z ) };
+
+            // set background color
+            var gBackground = svg.append( 'rect' )
+                .attr( 'width', width )
+                .attr( 'height', height )
+                .attr( 'fill', this.options.background_color );
+
+            // clip path
+            var clip = svg.append( 'clipPath' )
+                .attr( 'id', 'clip' )
+                .append( 'rect' )
+                .attr( 'x', 0 )
+                .attr( 'y', 0 )
+                .attr( 'width', width )
+                .attr( 'height', height );
+
+            // create chart area
+            var chartBody = svg.append( 'g' ).attr( 'clip-path', 'url(#clip)' );
+                
+            // add boxes to chart area
+            var boxes = chartBody.selectAll( 'box-group' ).data( this.data, function( d, i ) {
+                return d.x + '\0' + d.y;
+            });
+            var gEnter = boxes.enter().append( 'g' )
+                .attr( 'class', 'box-group' );
+            gEnter.append( 'rect' )
+                .attr( 'class','heat-box' );
+            boxes.selectAll( 'rect' )
+                .attr( 'rx', 1 )
+                .attr( 'ry', 1 )
+                .attr( 'fill', _color )
+                .attr( 'width', boxWidth )
+                .attr( 'height', boxHeight )
+                .attr( 'transform', _locator );
+            
+            // add tooltip events
+            boxes.selectAll( 'rect' )
+            .on( 'dblclick', function( d ) {
+                var url = self.chart.settings.get( 'url_template' ).trim();
+                if ( url ) {
+                    d3.event.stopPropagation();
+                    var xLabel = self.categories.array.x[ d.x ];
+                    var yLabel = self.categories.array.y[ d.y ];
+                    window.open( url.replace( '__LABEL__', xLabel ) );
+                    window.open( url.replace( '__LABEL__', yLabel ) );
+                }
+            })
+            .on('mouseover', function( d ) {
+                self.tooltip.style( 'opacity', 0.9 );
+                self.tooltip .html( self._templateTooltip( d ) )
+                    .style( 'left', ( d3.event.pageX ) + 'px' )
+                    .style( 'top', ( d3.event.pageY - 20 ) + 'px' );
+                })
+            .on('mouseout', function( d ) {
+                self.tooltip.style( 'opacity', 0 );
+            });
+
+            // initially hide tooltips
+            this.tooltip.style( 'opacity', 0 );
+            
+            // exit
+            boxes.exit().remove();
+        },
+        
+        /** Build x axis */
+        _buildX : function() {
+            var self = this;
+            var height      = this.height;
+            var width       = this.width;
+            var margin      = this.options.margin;
+            var svg         = this.svg;
+            var font_size   = this.options.style[ 'font-size' ];
+            var boxWidth    = this.boxWidth;
+            
+            // draw x axis
+            this.gxAxis = svg.append( 'g' )
+                .attr( 'class', 'x axis' )
+                .style( 'stroke-width', 1 )
+                .attr( 'transform', 'translate(0,' + height + ')' )
+                .call( this.xAxis );
+                
+            // fix text
+            var xFontSize = Math.min( boxWidth, font_size );
+            this.gxAxis.selectAll( 'text' )
+                .style( this.options.style )
+                .style( { 'font-size': xFontSize + 'px' } )
+                .attr( 'transform', function( d ) {
+                    var y = -this.getBBox().height - 15;
+                    var x = -xFontSize + boxWidth / 2;
+                    return 'rotate(-90)translate(' + y + ',' + x + ')';
+                });
+                
+            // set background color
+            var gxAxisLabelBackground = svg.append( 'rect' )
+                .attr( 'width', width )
+                .attr( 'height', font_size + 3 )
+                .attr( 'y', height + margin.bottom - font_size - 3 )
+                .attr( 'fill', this.options.debug_color )
+                .attr( 'opacity', 0.7 );
+                
+            // axis label
+            this.gxAxisLabel = svg.append( 'text' )
+                .attr( 'class', 'x label' )
+                .style( this.options.style )
+                .text( this.chart.settings.get( 'x_axis_label' ) )
+                .attr( 'transform', function( d ) {
+                    var y = height + margin.bottom - font_size / 3;
+                    var x = ( width - this.getBBox().width ) / 2;
+                    return 'translate(' + x + ',' + y + ')';
+                });
+                
+            // chart title
+            this.gxTickLabel = svg.append( 'text' )
+                .attr( 'class', 'title' )
+                .style( this.options.style )
+                .style({'font-size' : 1.1 * font_size } )
+                .text( this.group.key )
+                .attr( 'transform', function( d ) {
+                    var y = -margin.top / 2;
+                    var x = ( width - this.getBBox().width ) / 2;
+                    return 'translate(' + x + ',' + y + ')';
+                });
+        },
+        
+        /** Build y axis */
+        _buildY : function() {
+            var self = this;
+            var height      = this.height;
+            var width       = this.width;
+            var margin      = this.options.margin;
+            var svg         = this.svg;
+            var font_size   = this.options.style[ 'font-size' ];
+            var boxHeight   = this.boxHeight;
+            
+            // draw y axis
+            this.gyAxis = svg.append( 'g' )
+                .attr( 'class', 'y axis' )
+                .style( 'stroke-width', 1 )
+                .call( this.yAxis );
+
+            // fix text
+            var yFontSize = Math.min( boxHeight, font_size );
+            this.gyAxis.selectAll( 'text' )
+                .style( this.options.style )
+                .style( { 'font-size': yFontSize + 'px' } )
+                .attr( 'y', -boxHeight / 2 );
+            
+            // set background color
+            var gyAxisLabelBackground = svg.append( 'rect' )
+                .attr( 'width', font_size )
+                .attr( 'height', height )
+                .attr( 'x', -margin.left )
+                .attr( 'fill', this.options.debug_color )
+                .attr( 'opacity', 0.7 );
+                
+            // axis label
+            this.gyAxisLabel = svg.append( 'text' )
+                .attr( 'class', 'y label' )
+                .style( this.options.style )
+                .text( this.chart.settings.get( 'y_axis_label' ) )
+                .attr( 'transform', function( d ) {
+                    var x = -margin.left + font_size - 2;
+                    var y = -( height + this.getBBox().width ) / 2;
+                    return 'rotate(-90)translate(' + y + ',' + x + ')';
+                });
+        },
+
+        /** Build legend */
+        _buildLegend : function() {
+            var self = this;
+            var height      = this.height;
+            var width       = this.width;
+            var margin      = this.options.margin;
+            var font_size   = this.options.legend.style[ 'font-size' ];
+            var limit       = this.options.legend.limit;
+            var legendSize  = this.options.legend.size;
+            var legendWidth = this.options.legend.width;
+            var legendElements = this.zScale.range().length;
+            var legendElementHeight = Math.max( legendSize * height / legendElements, font_size );
+            var legendHeight = legendElements * legendElementHeight / 2;
+            var data = d3.range( this.zMin, this.zMax, 2 * ( this.zMax - this.zMin ) / legendElements ).reverse();
+            if ( data.length < 2 ) {
+                return;
+            }
+            var legend = this.svg.selectAll( '.legend' )
+                .data( data )
+                .enter().append( 'g' )
+                    .attr( 'class', 'legend' )
+                    .attr( 'transform', function( d, i ) {
+                        var x = width + 10;
+                        var y = ( ( height - legendHeight ) / 2 ) + ( i * legendElementHeight );
+                        return 'translate(' + x + ',' + y + ')';
+                    });
+            legend.append( 'rect' )
+                  .attr( 'width', legendWidth )
+                  .attr( 'height', legendElementHeight )
+                  .style( 'fill', function( z ) { return self.zScale( z ) } );
+            legend.append('text')
+                  .attr( 'x', legendWidth + 4 )
+                  .attr( 'y', function() { return ( legendElementHeight + this.getBBox().height ) / 2 } )
+                  .style( this.options.legend.style )
+                  .text( function( d ) { return String( d ).length > limit ? String( d ).substr( 0, limit - 2 ) + '..' : String( d ) } );
+            this.svg.append( 'text' )
+                .style( this.options.legend.style )
+                .style( { 'font-size' : 9, 'font-weight': 'bold' } )
+                .text( 'Legend' )
+                .attr( 'transform', function( d, i ) {
+                    var x = width + ( margin.right - this.getBBox().width ) / 2;
+                    var y = ( ( height - legendHeight ) / 2 ) - 10;
+                    return 'translate(' + x + ',' + y + ')';
+                });
+        },
+
+        /** Create axes formatting */
+        _makeTickFormat: function( id ) {
+            var settings = this.chart.settings;
+            var self = this;
+            Utilities.makeTickFormat({
+                categories  : self.categories.array[ id ],
+                type        : settings.get( id + '_axis_type|type' ),
+                precision   : settings.get( id + '_axis_type|precision' ),
+                formatter   : function( formatter ) {
+                    if ( formatter ) {
+                        self[ id + 'Axis' ].tickFormat( function( value ) {
+                           return formatter( value );
+                        });
+                    }
+                }
+            });
+        },
+
+        /** Handle error */
+        _handleError: function( err ) {
+            this.chart.state( 'failed', err );
+        },
+
+        /** Main template */
+        _templateTooltip: function(d) {
+            var x = this.categories.array.x[ d.x ];
+            var y = this.categories.array.y[ d.y ];
+            var z = d.z;
+            return  '<table>' +
+                        '<tr>' +
+                            '<td class="charts-tooltip-first">Row:</td>' +
+                            '<td>' + y + '</td>' +
+                        '</tr>' +
+                        '<tr>' +
+                            '<td class="charts-tooltip-first">Column:</td>' +
+                            '<td>' + x + '</td>' +
+                        '</tr>' +
+                        '<tr>' +
+                            '<td class="charts-tooltip-first">Value:</td>' +
+                            '<td>' + z + '</td>' +
+                        '</tr>' +
+                    '</table>';
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/logo.png
new file mode 100644
index 0000000..1dd9dc5
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/wrapper.js
new file mode 100644
index 0000000..470ecfa
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap/wrapper.js
@@ -0,0 +1,15 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'visualizations/others/heatmap/heatmap-plugin' ], function( Utilities, HeatMap ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            options.render = function( canvas_id, groups ) {
+                new HeatMap({
+                    chart       : options.chart,
+                    canvas_id   : canvas_id,
+                    groups      : groups
+                });
+                return true;
+            };
+            Utilities.panelHelper( options );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/config.js
new file mode 100644
index 0000000..cbc296a
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/config.js
@@ -0,0 +1,7 @@
+define( [ 'visualizations/others/heatmap/config' ], function( default_config ) {
+    return $.extend( true, {}, default_config, {
+        title       : 'Clustered Heatmap',
+        description : 'Applies hierarchical clustering to a matrix using R. The data has to be provided in 3-column format. The result is displayed as clustered heatmap.',
+        keywords    : 'others default'
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/logo.png
new file mode 100644
index 0000000..30e0334
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/wrapper.js
new file mode 100644
index 0000000..b4ccdef
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/others/heatmap_cluster/wrapper.js
@@ -0,0 +1,34 @@
+define( [ 'visualizations/utilities/tabular-utilities', 'utilities/jobs', 'visualizations/others/heatmap/heatmap-plugin' ], function( Utilities, Jobs, HeatMap ) {
+    return Backbone.View.extend({
+        initialize: function( options ) {
+            Jobs.request( options.chart, Utilities.buildJobDictionary( options.chart, 'heatmap' ), function( dataset ) {
+                var index = 0;
+                var dataset_groups = new Backbone.Collection();
+                options.chart.groups.each( function( group, index ) {
+                    dataset_groups.add({
+                        __data_columns: {
+                            x : { is_label   : true },
+                            y : { is_label   : true },
+                            z : { is_numeric : true }
+                        },
+                        x     : index++,
+                        y     : index++,
+                        z     : index++,
+                        key   : group.get( 'key' )
+                    });
+                });
+                options.dataset_id = dataset.id;
+                options.dataset_groups = dataset_groups;
+                options.render = function( canvas_id, groups ) {
+                    new HeatMap({
+                        chart       : options.chart,
+                        canvas_id   : canvas_id,
+                        groups      : groups
+                    });
+                    return true;
+                };
+                Utilities.panelHelper( options );
+            }, function() { options.process.reject() } );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/config.js b/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/config.js
new file mode 100644
index 0000000..cdcfa75
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/config.js
@@ -0,0 +1,103 @@
+define( [], function() {
+    return {
+        title       : 'PV Protein Viewer',
+        library     : 'PV',
+        datatypes   : [ 'pdb' ],
+        keywords    : 'pv protein viewer pdb structure',
+        description : 'PV is a pdb/protein viewer hosted at https://biasmv.github.io/pv/.',
+        settings    : {
+            quality : {
+                label   : 'Quality',
+                help    : 'Select the rendering quality.',
+                type    : 'select',
+                display : 'radio',
+                value   : 'medium',
+                data    : [ { label : 'High', value : 'high' }, { label : 'Medium', value : 'medium' }, { label : 'Low', value : 'low' } ]
+            },
+            viewer : {
+                type        : 'conditional',
+                test_param  : {
+                    name    : 'mode',
+                    label   : 'Display mode',
+                    type    : 'select',
+                    display : 'radio',
+                    value   : 'cartoon',
+                    help    : 'Select the rendering mode.',
+                    data    : [ { label : 'Cartoon',        value : 'cartoon' },
+                                { label : 'Lines',          value : 'lines' },
+                                { label : 'Points',         value : 'points' },
+                                { label : 'Spheres',        value : 'spheres' },
+                                { label : 'Trace',          value : 'trace' },
+                                { label : 'Trace (line)',   value : 'lineTrace' },
+                                { label : 'Trace (smooth)', value : 'sline' },
+                                { label : 'Tube',           value : 'tube' } ]
+                },
+                cases       : [ { value : 'cartoon', inputs: [ {
+                                    name  : 'radius',
+                                    label : 'Radius',
+                                    help  : 'Radius of tube profile. Also influences the profile thickness for helix and strand profiles.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 3,
+                                    value : 0.3
+                                } ] },
+                                { value : 'lines', inputs: [ {
+                                    name  : 'lineWidth',
+                                    label : 'Line width',
+                                    help  : 'Specify the line width.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 10,
+                                    value : 4
+                                } ] },
+                                { value : 'points', inputs: [ {
+                                    name  : 'pointSize',
+                                    label : 'Point size',
+                                    help  : 'Specify the point size.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 10,
+                                    value : 1
+                                } ] },
+                                { value : 'spheres' },
+                                { value : 'trace', inputs: [ {
+                                    name  : 'radius',
+                                    label : 'Radius',
+                                    help  : 'Specify the tube radius.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 3,
+                                    value : 0.3
+                                } ] },
+                                { value : 'lineTrace', inputs: [ {
+                                    name  : 'lineWidth',
+                                    label : 'Line width',
+                                    help  : 'Specify the line width.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 10,
+                                    value : 4
+                                } ] },
+                                { value : 'sline', inputs: [ {
+                                    name  : 'lineWidth',
+                                    label : 'Line width',
+                                    help  : 'Specify the line width.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 10,
+                                    value : 4
+                                } ] },
+                                { value : 'tube', inputs: [ {
+                                    name  : 'radius',
+                                    label : 'Radius',
+                                    help  : 'Specify the tube radius.',
+                                    type  : 'float',
+                                    min   : 0.1,
+                                    max   : 3,
+                                    value : 0.3
+                                } ] }
+                            ]
+            }
+        }
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/logo.png b/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/logo.png
new file mode 100644
index 0000000..aa842f9
Binary files /dev/null and b/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/logo.png differ
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/wrapper.js b/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/wrapper.js
new file mode 100644
index 0000000..6504a56
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/pv/viewer/wrapper.js
@@ -0,0 +1,38 @@
+define( [ 'utilities/utils', 'plugins/pv/viewer' ], function( Utils, pv ) {
+    return Backbone.Model.extend({
+        initialize: function( options ) {
+            var settings = options.chart.settings;
+            var viewer = pv.Viewer( document.getElementById( options.targets[ 0 ] ), {
+                quality     : settings.get( 'quality' ),
+                width       : 'auto',
+                height      : 'auto',
+                antialias   : true,
+                outline     : true
+            });
+            Utils.get( {
+                url     : options.dataset.download_url,
+                cache   : true,
+                success : function( response ) {
+                    var structure = pv.io.pdb( response );
+                    var viewer_options = {};
+                    _.each( settings.attributes, function( value, key ) {
+                        if ( key.startsWith( 'viewer|' ) ) {
+                            viewer_options[ key.replace( 'viewer|', '' ) ] = value;
+                        }
+                    });
+                    viewer.clear();
+                    viewer.renderAs( 'protein', structure, viewer_options.mode, viewer_options );
+                    viewer.centerOn( structure );
+                    viewer.autoZoom();
+                    options.chart.state( 'ok', 'Chart drawn.' );
+                    options.process.resolve();
+                },
+                error   : function() {
+                    options.chart.state( 'ok', 'Failed to load pdb file.' );
+                    options.process.resolve();
+                }
+            });
+            $( window ).resize( function() { viewer.fitParent() } );
+        }
+    });
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-datasets.js b/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-datasets.js
new file mode 100644
index 0000000..0ad81b8
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-datasets.js
@@ -0,0 +1,121 @@
+/** This class handles, formats and caches datasets. */
+define( [ 'utilities/utils' ], function( Utils ) {
+    /** Fills request dictionary with data from cache/response */
+    var _cache = {};
+    var request = function( options ) {
+        var groups      = options.dataset_groups;
+        var dataset_id  = options.dataset_id;
+        // identify columns needed to fulfill request
+        var column_list = [];
+        groups.each( function( group ) {
+            _.each( group.get( '__data_columns' ), function( column_def, column_name ) {
+                var column = group.get( column_name );
+                var block_id = _block_id( dataset_id, column );
+                if ( column_list.indexOf( column ) === -1 && !_cache[ block_id ] && column != 'auto' && column != 'zero' && column !== undefined ) {
+                    column_list.push( column );
+                }
+            });
+        });
+        if ( column_list.length == 0 ) {
+            _fillFromCache( options );
+            return;
+        }
+        // Fetch data columns into dataset object
+        Utils.get({
+            url     : Galaxy.root + 'api/datasets/' + dataset_id,
+            data    : {
+                data_type   : 'raw_data',
+                provider    : 'dataset-column',
+                indeces     : column_list.toString()
+            },
+            success : function( response ) {
+                var results = new Array( column_list.length );
+                for ( var i = 0; i < results.length; i++ ) {
+                    results[ i ] = [];
+                }
+                for ( var i in response.data ) {
+                    var row = response.data[ i ];
+                    for ( var j in row ) {
+                        var v = row[ j ];
+                        if ( v !== undefined && v != 2147483647 ) {
+                            results[ j ].push( v );
+                        }
+                    }
+                }
+                console.debug( 'tabular-datasets::_fetch() - Fetching complete.' );
+                for ( var i in results ) {
+                    var column = column_list[ i ];
+                    var block_id = _block_id( dataset_id, column );
+                    _cache[ block_id ] = results[ i ];
+                }
+                _fillFromCache( options );
+            }
+        });
+    };
+
+    /** Fill data from cache */
+    var _fillFromCache = function( options ) {
+        var groups      = options.dataset_groups;
+        var dataset_id  = options.dataset_id;
+        console.debug( 'tabular-datasets::_fillFromCache() - Filling request from cache.' );
+        var limit = 0;
+        groups.each( function( group ) {
+            _.each( group.get( '__data_columns' ), function( column_def, column_name ) {
+                var column = group.get( column_name );
+                var block_id = _block_id( dataset_id, column );
+                var column_data = _cache[ block_id ];
+                if ( column_data ) {
+                    limit = Math.max( limit, column_data.length );
+                }
+            });
+        });
+        if ( limit == 0 ) {
+            console.debug( 'tabular-datasets::_fillFromCache() - No data available.' );
+        }
+        var results = [];
+        groups.each( function( group, group_index ) {
+            var dict = Utils.merge( { key: ( group_index ) + ':' + group.get( 'key' ), values: [] }, group.attributes );
+            for ( var j = 0; j < limit; j++ ) {
+                dict.values[ j ] = { x : parseInt( j ) };
+            }
+            results.push( dict );
+        });
+        groups.each( function( group, group_index ) {
+            var values = results[ group_index ].values;
+            _.each( group.get( '__data_columns' ), function( column_def, column_name ) {
+                var column = group.get( column_name );
+                switch ( column ) {
+                    case 'auto':
+                        for ( var j = 0; j < limit; j++ ) {
+                            values[ j ][ column_name ] = parseInt( j );
+                        }
+                        break;
+                    case 'zero':
+                        for ( var j = 0; j < limit; j++ ) {
+                            values[ j ][ column_name ] = 0;
+                        }
+                        break;
+                    default:
+                        var block_id = _block_id( dataset_id, column );
+                        var column_data = _cache[ block_id ];
+                        for ( var j = 0; j < limit; j++ ) {
+                            var value = values[ j ];
+                            var v = column_data[ j ];
+                            if ( isNaN( v ) && !column_def.is_label ) {
+                                v = 0;
+                            }
+                            value[ column_name ] = v;
+                        }
+                }
+            });
+        });
+        options.success( results );
+    };
+
+    /** Get block id */
+    var _block_id = function ( dataset_id, column ) {
+        return dataset_id + '_' + '_' + column;
+    };
+
+    return { request: request };
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-form.js b/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-form.js
new file mode 100644
index 0000000..cb11d9c
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-form.js
@@ -0,0 +1,79 @@
+define( [], function() {
+    var axisLabel = function( name, options ) {
+        options = options || {};
+        prefix  = name.substr( 0, 1 );
+        return {
+            name        : name,
+            label       : prefix.toUpperCase() + '-Axis label',
+            help        : 'Provide a label for the axis.',
+            type        : 'text',
+            value       : options.value || prefix.toUpperCase() + '-axis',
+            placeholder : 'Axis label'
+        }
+    }
+    var axisType = function( name, options ) {
+        options = options || {};
+        prefix  = name.substr( 0, 1 );
+        var axisPrecision = function() {
+            return { name    : 'precision',
+                     label   : 'Axis tick format',
+                     help    : 'Select the tick format for the axis.',
+                     type    : 'select',
+                     value   : options.precision || 1,
+                     data    : [ { label : '0.00001', value : '5' },
+                                 { label : '0.0001',  value : '4' },
+                                 { label : '0.001',   value : '3' },
+                                 { label : '0.01',    value : '2' },
+                                 { label : '0.1',     value : '1' },
+                                 { label : '1',       value : '0' } ] }
+        }
+        return {
+            name        : prefix + '_axis_type',
+            type        : 'conditional',
+            test_param  : {
+                name        : 'type',
+                label       : prefix.toUpperCase() + '-Axis value type',
+                type        : 'select',
+                value       : options.value || 'auto',
+                help        : 'Select the value type of the axis.',
+                data        : [ { value : 'hide',   label : '-- Do not show values --' },
+                                { value : 'auto',   label : 'Auto' },
+                                { value : 'f',      label : 'Float' },
+                                { value : 'd',      label : 'Integer' },
+                                { value : 'e',      label : 'Exponent' },
+                                { value : 'p',      label : 'Percent' },
+                                { value : 's',      label : 'SI-prefix' } ]
+            },
+            cases       : [ { value   : 'hide' },
+                            { value   : 'auto' },
+                            { value   : 'f', inputs: [ axisPrecision() ] },
+                            { value   : 'd' },
+                            { value   : 'e', inputs: [ axisPrecision() ] },
+                            { value   : 'p', inputs: [ axisPrecision() ] },
+                            { value   : 's' } ]
+        }
+    }
+    return {
+        title       : '',
+        library     : '',
+        tag         : '',
+        keywords    : '',
+        datatypes   : [ 'bed', 'bedgraph', 'bedstrict', 'bed6', 'bed12', 'chrint', 'customtrack', 'gff', 'gff3', 'gtf', 'interval', 'encodepeak', 'wig', 'scidx', 'fli', 'csv', 'tsv', 'eland', 'elandmulti', 'picard_interval_list', 'gatk_dbsnp', 'gatk_tranche', 'gatk_recal', 'ct', 'pileup', 'sam', 'taxonomy', 'tabular', 'vcf', 'xls' ],
+        use_panels  : 'both',
+        settings    : {
+            x_axis_label : axisLabel( 'x_axis_label' ),
+            x_axis_type  : axisType( 'x_axis_type' ),
+            y_axis_label : axisLabel( 'y_axis_label' ),
+            y_axis_type  : axisType( 'y_axis_type' ),
+            show_legend  : { type: 'boolean', label: 'Show legend', help: 'Would you like to add a legend?' }
+        },
+        groups      : {
+            key: {
+                label       : 'Provide a label',
+                type        : 'text',
+                placeholder : 'Data label',
+                value       : 'Data label'
+            }
+        }
+    }
+});
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-utilities.js b/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-utilities.js
new file mode 100644
index 0000000..e097330
--- /dev/null
+++ b/config/plugins/visualizations/charts/static/repository/visualizations/utilities/tabular-utilities.js
@@ -0,0 +1,302 @@
+define( [ 'utilities/utils', 'visualizations/utilities/tabular-datasets' ], function( Utils, Datasets ) {
+    /** build job dictionary */
+    function buildJobDictionary( chart, module ) {
+        var settings_string = '';
+        var columns_string = '';
+        var group_index = 0;
+        for ( key in chart.settings.attributes ) {
+            var settings_value = chart.settings.get( key );
+            _.each( [ [ ' ', ' ' ], [ ',', ',' ], [ ':', ':' ] ], function( pair ) {
+                settings_value = settings_value.replace( new RegExp( pair[ 0 ], 'g' ), pair[ 1 ] );
+            });
+            settings_string += key + ':' + settings_value + ', ';
+        };
+        settings_string = settings_string.substring( 0, settings_string.length - 2 )
+        chart.groups.each( function( group ) {
+            group_index++;
+            _.each( group.get( '__data_columns' ), function( data_columns, name ) {
+                columns_string += name + '_' + group_index + ':' + ( parseInt( group.get( name ) ) + 1 ) + ', ';
+            });
+        });
+        columns_string = columns_string.substring( 0, columns_string.length - 2 );
+        return {
+                    'tool_id'       : 'charts',
+                    'inputs'        : {
+                        'input'     : {
+                            'id'    : chart.get( 'dataset_id' ),
+                            'src'   : 'hda'
+                        },
+                        'module'    : module,
+                        'columns'   : columns_string,
+                        'settings'  : settings_string
+                    }
+                }
+    };
+
+    /** Assists in assigning the viewport panels */
+    function panelHelper( options ) {
+        var self = this;
+        var process             = options.process;
+        var chart               = options.chart;
+        var render              = options.render;
+        var targets             = options.targets;
+        var dataset_id          = options.dataset_id || options.chart.get( 'dataset_id' );
+        var dataset_groups      = options.dataset_groups || options.chart.groups;
+        Datasets.request({
+            chart           : chart,
+            dataset_id      : dataset_id,
+            dataset_groups  : dataset_groups,
+            success         : function( result ) {
+                try {
+                    if ( targets.length == result.length ) {
+                        var valid = true;
+                        for ( var group_index in result ) {
+                            var group = result[ group_index ];
+                            if ( !render( targets[ group_index ], [ group ] ) ) {
+                                valid = false;
+                                break;
+                            }
+                        }
+                        if ( valid ) {
+                            chart.state( 'ok', 'Multi-panel chart drawn.' );
+                        }
+                    } else if ( targets.length == 1 ) {
+                        if ( render( targets[ 0 ], result ) ) {
+                            chart.state( 'ok', 'Chart drawn.' );
+                        }
+                    } else {
+                        chart.state( 'failed', 'Invalid panel count.' );
+                    }
+                    process.resolve();
+                } catch ( err ) {
+                    console.debug( 'FAILED: tabular-utilities::panelHelper() - ' + err );
+                    chart.state( 'failed', err );
+                    process.reject();
+                }
+            }
+        });
+    };
+
+    /** Get domain boundaries value */
+    function getDomains( groups, keys ) {
+        function _apply( operator, key ) {
+            var value = undefined;
+            for ( var group_index in groups ) {
+                var value_sub = d3[ operator ]( groups[ group_index ].values, function( d ) { return d[ key ] } );
+                value = value === undefined ? value_sub : Math[ operator ]( value, value_sub );
+            }
+            return value;
+        };
+        var result = {};
+        for( var index in keys ) {
+            var key = keys[ index ];
+            result[ key ] = {
+                min : _apply( 'min', key ),
+                max : _apply( 'max', key ),
+            };
+           
+        }
+        return result;
+    };
+
+    /** Default series maker */
+    function makeSeries( groups, keys ) {
+        var plot_data = [];
+        for ( var group_index in groups ) {
+            var group = groups[ group_index ];
+            var data = [];
+            for ( var value_index in group.values ) {
+                var point = [];
+                if ( keys ) {
+                    for ( var key_index in keys ) {
+                        var column_index = keys[ key_index ];
+                        point.push( group.values[ value_index ][ column_index ] );
+                    }
+                } else {
+                    for ( var column_index in group.values[ value_index ] ) {
+                        point.push( group.values[ value_index ][ column_index ] );
+                    }
+                }
+                data.push( point );
+            }
+            plot_data.push( data );
+        }
+        return plot_data;
+    };
+
+    /** Default category maker */
+    function makeCategories( groups, column_keys ) {
+        var array = {};
+        var data_columns = groups[ 0 ].__data_columns;
+        _.each( column_keys, function( key ) {
+            if ( data_columns[ key ].is_label ) {
+                array[ key ] = [];
+            }
+        });
+        if ( groups && groups[ 0 ] ) {
+            _.each( groups[ 0 ].values, function( value_dict ) {
+                for ( var key in array ) {
+                    array[ key ].push( String( value_dict[ key ] ) );
+                }
+            });
+        }
+        mapCategories( array, groups );
+        return { array : array }
+    };
+
+    /** Apply default mapping index all values contained in label columns (for all groups) */
+    function mapCategories( array, groups ) {
+        _.each( groups, function( group ) {
+            _.each( group.values, function( value_dict, i ) {
+                for ( var key in array ) {
+                    value_dict[ key ] = parseInt( i )
+                }
+            });
+        });
+    };
+
+    /** Category make for unique category labels */
+    function makeUniqueCategories( groups, with_index ) {
+        var categories  = {};
+        var array       = {};
+        var counter     = {};
+        var data_columns = groups[ 0 ].__data_columns;
+        _.each( data_columns, function( column_def, key ) {
+            if ( column_def.is_label ) {
+                categories[ key ] = {};
+                array[ key ]      = [];
+                counter[ key ]    = 0;
+            }
+        });
+        // index all values contained in label columns (for all groups)
+        for ( var i in groups ) {
+            var group = groups[ i ];
+            for ( var j in group.values ) {
+                var value_dict = group.values[ j ];
+                for ( var key in categories ) {
+                    var value = String( value_dict[ key ] );
+                    if ( categories[ key ][ value ] === undefined ) {
+                        categories[ key ][ value ] = counter[ key ];
+                        array[ key ].push( with_index ? [counter[key], value] : value );
+                        counter[ key ]++;
+                    }
+                }
+            }
+        }
+        // convert group values into category indeces
+        for ( var i in groups ) {
+            var group = groups[ i ];
+            for ( var j in group.values ) {
+                var value_dict = group.values[ j ];
+                for ( var key in categories ) {
+                    var value = String( value_dict[ key ] );
+                    value_dict[ key ] = categories[ key ][ value ];
+                }
+            }
+        }
+        return {
+            categories  : categories,
+            array       : array,
+            counter     : counter
+        }
+    };
+
+    /** Make axis */
+    function makeTickFormat ( options ) {
+        var type        = options.type;
+        var precision   = options.precision;
+        var categories  = options.categories;
+        var formatter   = options.formatter;
+        if ( type == 'hide' ) {
+            formatter( function() { return '' } );
+        } else if ( type == 'auto' ) {
+            if ( categories ) {
+                formatter( function( value ) { return categories[ value ] || '' } );
+            }
+        } else {
+            var d3format = function( d ) {
+                switch ( type ) {
+                    case 's':
+                        var prefix = d3.formatPrefix( d );
+                        return prefix.scale( d ).toFixed() + prefix.symbol;
+                    default :
+                        return d3.format( '.' + precision + type )( d );
+                }
+            };
+            if ( categories ) {
+                formatter( function( value ) {
+                    var label = categories[ value ];
+                    if ( label ) {
+                        if ( isNaN( label ) ) {
+                            return label;
+                        } else {
+                            try {
+                                return d3format( label );
+                            } catch ( err ) {
+                                return label;
+                            }
+                        }
+                    } else {
+                        return '';
+                    }
+                });
+            } else {
+                formatter( function( value ) { return d3format( value ) } );
+            }
+        }
+    };
+
+    /** Add zoom handler */
+    function addZoom( options ) {
+        var scaleExtent = 100;
+        var yAxis       = options.yAxis;
+        var xAxis       = options.xAxis;
+        var xDomain     = options.xDomain || xAxis.scale().domain;
+        var yDomain     = options.yDomain || yAxis.scale().domain;
+        var redraw      = options.redraw;
+        var svg         = options.svg;
+        var xScale      = xAxis.scale();
+        var yScale      = yAxis.scale();
+        var x_boundary  = xScale.domain().slice();
+        var y_boundary  = yScale.domain().slice();
+        var d3zoom      = d3.behavior.zoom();
+        xScale.nice();
+        yScale.nice();
+        function fixDomain( domain, boundary ) {
+            domain[ 0 ] = Math.min( Math.max( domain[ 0 ], boundary[ 0 ] ), boundary[ 1 ] - boundary[ 1 ]/scaleExtent );
+            domain[ 1 ] = Math.max( boundary[ 0 ] + boundary[ 1 ] / scaleExtent, Math.min( domain[ 1 ], boundary[ 1 ] ) );
+            return domain;
+        };
+        function zoomed() {
+            yDomain( fixDomain( yScale.domain(), y_boundary ) );
+            xDomain( fixDomain( xScale.domain(), x_boundary ) );
+            redraw();
+        };
+        function unzoomed() {
+            xDomain( x_boundary );
+            yDomain( y_boundary );
+            redraw();
+            d3zoom.scale( 1 );
+            d3zoom.translate( [ 0 , 0 ] );
+        };
+        d3zoom.x( xScale )
+              .y( yScale )
+              .scaleExtent( [ 1, scaleExtent ] )
+              .on( 'zoom', zoomed );
+        svg.call( d3zoom ).on( 'dblclick.zoom', unzoomed );
+        return d3zoom;
+    };
+
+    return {
+        buildJobDictionary      : buildJobDictionary,
+        panelHelper             : panelHelper,
+        makeCategories          : makeCategories,
+        makeUniqueCategories    : makeUniqueCategories,
+        makeSeries              : makeSeries,
+        getDomains              : getDomains,
+        mapCategories           : mapCategories,
+        makeTickFormat          : makeTickFormat,
+        addZoom                 : addZoom
+    }
+
+});
diff --git a/config/plugins/visualizations/charts/templates/charts.mako b/config/plugins/visualizations/charts/templates/charts.mako
new file mode 100644
index 0000000..a65ede3
--- /dev/null
+++ b/config/plugins/visualizations/charts/templates/charts.mako
@@ -0,0 +1,67 @@
+<%
+    root            = h.url_for( "/" )
+    app_root        = root + "plugins/visualizations/charts/static/client"
+    repository_root = root + "plugins/visualizations/charts/static/repository"
+%>
+
+<!DOCTYPE HTML>
+<html>
+    <head>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        <title>${hda.name} | ${visualization_name}</title>
+        ${h.js( 'libs/jquery/jquery',
+                'libs/jquery/jquery-ui',
+                'libs/jquery/select2',
+                'libs/bootstrap',
+                'libs/underscore',
+                'libs/backbone',
+                'libs/d3',
+                'libs/require')}
+        ${h.css( 'base', 'jquery-ui/smoothness/jquery-ui' )}
+        ${h.stylesheet_link( app_root + "/app.css" )}
+    </head>
+    <body>
+        <script type="text/javascript">
+            var app_root = '${app_root}';
+            var repository_root = '${repository_root}';
+            var Galaxy = Galaxy || parent.Galaxy || {
+                root    : '${root}',
+                emit    : {
+                    debug: function() {}
+                }
+            };
+            window.console = window.console || {
+                log     : function(){},
+                debug   : function(){},
+                info    : function(){},
+                warn    : function(){},
+                error   : function(){},
+                assert  : function(){}
+            };
+            require.config({
+                baseUrl: Galaxy.root + "static/scripts/",
+                paths: {
+                    "plugin"        : "${app_root}",
+                    "d3"            : "libs/d3",
+                    "repository"    : "${repository_root}"
+                },
+                shim: {
+                    "libs/underscore": { exports: "_" },
+                    "libs/backbone": { exports: "Backbone" },
+                    "d3": { exports: "d3" }
+                }
+            });
+            $(function() {
+                require( [ 'plugin/app' ], function( App ) {
+                    var config = ${ h.dumps( config ) };
+                    var app = new App({
+                        visualization_id : ${ h.dumps( visualization_id ) } || undefined,
+                        dataset_id       : config.dataset_id,
+                        chart_dict       : config.chart_dict
+                    });
+                    $( 'body' ).append( app.$el );
+                });
+            });
+        </script>
+    </body>
+</html>
\ No newline at end of file
diff --git a/config/plugins/visualizations/charts/webpack.config.js b/config/plugins/visualizations/charts/webpack.config.js
new file mode 100644
index 0000000..8163e27
--- /dev/null
+++ b/config/plugins/visualizations/charts/webpack.config.js
@@ -0,0 +1,48 @@
+var webpack = require( 'webpack' );
+var path = require( 'path' );
+var root = path.join( __dirname, 'static/repository' );
+var grunt = require( 'grunt' );
+var registry_json = grunt.file.readJSON( root + '/registry.json' );
+
+// helper to visit registry values
+var visit = function( callback ) {
+    for ( var lib in registry_json ) {
+        var plugins = registry_json[ lib ];
+        for ( var i in plugins ) {
+            var plugin = plugins[ i ];
+            callback( lib, plugin );
+        }
+    }
+};
+
+// identify entries from registry
+var entry = { registry : root + '/build/registry.tmp.js' };
+visit( function( lib, plugin ) {
+    if ( !grunt.file.exists( root + '/build/' + lib + '_' + plugin + '.js' ) ) {
+        entry[ lib + '_' + plugin ] = root + '/visualizations/' + lib + '/' + plugin + '/wrapper.js';
+        grunt.log.writeln( 'Adding ' + lib + '_' + plugin + '.' );
+    }
+});
+
+// build registry file
+var registry = 'define( [], function() { return {';
+visit( function( lib, plugin ) {
+    registry += lib + '_' + plugin + ':' + 'require( "visualizations/' + lib + '/' + plugin + '/config" ), ';
+});
+registry = registry.substr( 0, registry.length - 1 ) + '} } );';
+grunt.file.write( root + '/build/registry.tmp.js', registry );
+grunt.log.writeln( 'Writing Registry.' );
+
+// configure webpack
+module.exports = {
+    devtool : 'source-map',
+    entry   : entry,
+    output  : {
+        path            : root + '/build',
+        filename        : '[name].js',
+        libraryTarget   : 'amd'
+    },
+    resolve : {
+        root : root
+    }
+};
\ No newline at end of file
diff --git a/config/plugins/visualizations/circster/config/circster.xml b/config/plugins/visualizations/circster/config/circster.xml
new file mode 100644
index 0000000..795887d
--- /dev/null
+++ b/config/plugins/visualizations/circster/config/circster.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Circster" disabled="true">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="not_eq" test_attr="datatype.track_type">None</test>
+            <to_param param_attr="id">dataset_id</to_param>
+            <to_param assign="hda">hda_ldda</to_param>
+        </data_source>
+        <data_source>
+            <model_class>LibraryDatasetDatasetAssociation</model_class>
+            <test type="not_eq" test_attr="datatype.track_type">None</test>
+            <to_param param_attr="id">dataset_id</to_param>
+            <to_param assign="ldda">hda_ldda</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="visualization">id</param>
+        <param type="hda_or_ldda">dataset_id</param>
+        <param_modifier type="string" modifies="dataset_id">hda_ldda</param_modifier>
+        <param type="dbkey">dbkey</param>
+    </params>
+    <entry_point entry_point_type="mako">circster.mako</entry_point>
+    <render_target>_top</render_target>
+</visualization>
diff --git a/config/plugins/visualizations/common/templates/README.txt b/config/plugins/visualizations/common/templates/README.txt
new file mode 100644
index 0000000..be42b2e
--- /dev/null
+++ b/config/plugins/visualizations/common/templates/README.txt
@@ -0,0 +1,8 @@
+Common templates for visualization plugins
+==========================================
+
+Placing Mako templates in this directory will allow them to be properly
+inherited and imported in plugin directories. E.g. if you have a template file
+in this directory named 'config_utils.mako', you can import it in your plugin
+templates using:
+    <%namespace name="config_utils" file="config_utils.mako" />
diff --git a/config/plugins/visualizations/common/templates/config_utils.mako b/config/plugins/visualizations/common/templates/config_utils.mako
new file mode 100644
index 0000000..d99701d
--- /dev/null
+++ b/config/plugins/visualizations/common/templates/config_utils.mako
@@ -0,0 +1,46 @@
+<%def name="add_config_defaults( defaults )">
+## overwrite default_config_dict with config (if any) then assign to config
+<%
+    for key, default in defaults.items():
+        if key not in config or config[ key ] is None:
+            config[ key ] = default
+%>
+</%def>
+
+<%def name="config_form( config_dict )">
+## render form for everything in possible config
+</%def>
+
+<%def name="link_to_change_config( link_contents, new_settings, target='' )">
+<%
+    # assumes there's a config var
+    url_for_args = {
+        'controller'            : 'visualization',
+        'action'                : 'render',
+        'visualization_name'    : visualization_name,
+        'title'                 : title
+    }
+    url_for_args.update( config )
+    url_for_args.update( new_settings )
+    if visualization_id:
+        url_for_args[ 'id' ] = visualization_id
+%>
+    <a href="${h.url_for( **url_for_args )}" target="${target}">${link_contents}</a>
+</%def>
+
+<%def name="save_button( text='Save' )">
+<%
+    # still a GET
+    url_for_args = {
+        'controller'    : 'visualization',
+        'action'        : 'saved',
+        'type'          : visualization_name,
+        'title'         : title,
+        'config'        : h.dumps( config )
+    }
+    # save to existing visualization
+    if visualization_id:
+        url_for_args[ 'id' ] = visualization_id
+%>
+    <form action="${h.url_for( **url_for_args )}" method="post"><input type="submit" value="${text}" /></form>
+</%def>
diff --git a/config/plugins/visualizations/common/templates/script_entry_point.mako b/config/plugins/visualizations/common/templates/script_entry_point.mako
new file mode 100644
index 0000000..4065c59
--- /dev/null
+++ b/config/plugins/visualizations/common/templates/script_entry_point.mako
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+<%inherit file="visualization_base.mako"/>
+
+## No stylesheets
+<%def name="stylesheets()"></%def>
+## No javascript libraries
+<%def name="late_javascripts()">
+    <% tag_attrs = ' '.join([ '{0}="{1}"'.format( key, attr ) for key, attr in script_tag_attributes.items() ]) %>
+    <script type="text/javascript" ${tag_attrs}></script>
+</%def>
diff --git a/config/plugins/visualizations/common/templates/visualization_base.mako b/config/plugins/visualizations/common/templates/visualization_base.mako
new file mode 100644
index 0000000..80bab0b
--- /dev/null
+++ b/config/plugins/visualizations/common/templates/visualization_base.mako
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+<% _=n_ %>
+
+%if embedded:
+    ${self.as_embedded()}
+%else:
+    ${self.as_page()}
+%endif
+
+## render this inside another page or via ajax
+<%def name="as_embedded()">
+    ${self.stylesheets()}
+    ${self.javascripts()}
+    ${self.get_body()}
+</%def>
+
+## render this as its own page
+<%def name="as_page()">
+<!DOCTYPE HTML>
+<html>
+    <head>
+        <title>${self.title()}</title>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        ${self.metas()}
+        ${self.stylesheets()}
+        ${self.javascripts()}
+    </head>
+    <body>
+        ${self.get_body()}
+    </body>
+</html>
+</%def>
+
+## Default title
+<%def name="title()">${ visualization_display_name + ' | Galaxy' }</%def>
+
+## Additional metas can be defined by templates inheriting from this one.
+<%def name="metas()"></%def>
+
+## Default stylesheets
+<%def name="stylesheets()">
+${h.css('base')}
+</%def>
+
+## Default javascripts
+<%def name="javascripts()">
+${h.js(
+    "libs/jquery/jquery",
+    "libs/jquery/jquery.migrate"
+)}
+## connect to Galaxy here
+
+<script type="text/javascript">
+    // console protection
+    window.console = window.console || {
+        log     : function(){},
+        debug   : function(){},
+        info    : function(){},
+        warn    : function(){},
+        error   : function(){},
+        assert  : function(){}
+    };
+</script>
+</%def>
+
+## Default body
+<%def name="get_body()">
+    ${self.late_javascripts()}
+</%def>
+
+## Default javascripts
+<%def name="late_javascripts()">
+${h.js(
+    "libs/jquery/jquery",
+    "libs/jquery/jquery.migrate"
+)}
+</%def>
diff --git a/config/plugins/visualizations/csg/config/csg.xml b/config/plugins/visualizations/csg/config/csg.xml
new file mode 100644
index 0000000..8610b84
--- /dev/null
+++ b/config/plugins/visualizations/csg/config/csg.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="CSG Viewer">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.PlyAscii</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.PlyBinary</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.VtkAscii</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.VtkBinary</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <template>csg.mako</template>
+</visualization>
diff --git a/config/plugins/visualizations/csg/static/Detector.js b/config/plugins/visualizations/csg/static/Detector.js
new file mode 100644
index 0000000..4766d98
--- /dev/null
+++ b/config/plugins/visualizations/csg/static/Detector.js
@@ -0,0 +1,79 @@
+/**
+ * Origin: https://raw.githubusercontent.com/mrdoob/three.js/af21991fc7c4e1d35d6a93031707273d937af0f9/examples/js/Detector.js
+ * @author alteredq / http://alteredqualia.com/
+ * @author mr.doob / http://mrdoob.com/
+ */
+
+var Detector = {
+
+    canvas: !! window.CanvasRenderingContext2D,
+    webgl: ( function () {
+
+        try {
+
+            var canvas = document.createElement( 'canvas' ); return !! ( window.WebGLRenderingContext && ( canvas.getContext( 'webgl' ) || canvas.getContext( 'experimental-webgl' ) ) );
+
+        } catch ( e ) {
+
+            return false;
+
+        }
+
+    } )(),
+    workers: !! window.Worker,
+    fileapi: window.File && window.FileReader && window.FileList && window.Blob,
+
+    getWebGLErrorMessage: function () {
+
+        var element = document.createElement( 'div' );
+        element.id = 'webgl-error-message';
+        element.style.fontFamily = 'monospace';
+        element.style.fontSize = '13px';
+        element.style.fontWeight = 'normal';
+        element.style.textAlign = 'center';
+        element.style.background = '#fff';
+        element.style.color = '#000';
+        element.style.padding = '1.5em';
+        element.style.width = '400px';
+        element.style.margin = '5em auto 0';
+
+        if ( ! this.webgl ) {
+
+            element.innerHTML = window.WebGLRenderingContext ? [
+                'Your graphics card does not seem to support <a href="http://khronos.org/webgl/wiki/Getting_a_WebGL_Implementation" style="color:#000">WebGL</a>.<br />',
+                'Find out how to get it <a href="http://get.webgl.org/" style="color:#000">here</a>.'
+            ].join( '\n' ) : [
+                'Your browser does not seem to support <a href="http://khronos.org/webgl/wiki/Getting_a_WebGL_Implementation" style="color:#000">WebGL</a>.<br/>',
+                'Find out how to get it <a href="http://get.webgl.org/" style="color:#000">here</a>.'
+            ].join( '\n' );
+
+        }
+
+        return element;
+
+    },
+
+    addGetWebGLMessage: function ( parameters ) {
+
+        var parent, id, element;
+
+        parameters = parameters || {};
+
+        parent = parameters.parent !== undefined ? parameters.parent : document.body;
+        id = parameters.id !== undefined ? parameters.id : 'oldie';
+
+        element = Detector.getWebGLErrorMessage();
+        element.id = id;
+
+        parent.appendChild( element );
+
+    }
+
+};
+
+// browserify support
+if ( typeof module === 'object' ) {
+
+    module.exports = Detector;
+
+}
\ No newline at end of file
diff --git a/config/plugins/visualizations/csg/static/OrbitControls.js b/config/plugins/visualizations/csg/static/OrbitControls.js
new file mode 100644
index 0000000..30049cd
--- /dev/null
+++ b/config/plugins/visualizations/csg/static/OrbitControls.js
@@ -0,0 +1,583 @@
+/**
+ * @author qiao / https://github.com/qiao
+ * @author mrdoob / http://mrdoob.com
+ * @author alteredq / http://alteredqualia.com/
+ * @author WestLangley / http://github.com/WestLangley
+ * @author erich666 / http://erichaines.com
+ */
+/*global THREE, console */
+
+// This set of controls performs orbiting, dollying (zooming), and panning. It maintains
+// the "up" direction as +Y, unlike the TrackballControls. Touch on tablet and phones is
+// supported.
+//
+//    Orbit - left mouse / touch: one finger move
+//    Zoom - middle mouse, or mousewheel / touch: two finger spread or squish
+//    Pan - right mouse, or arrow keys / touch: three finter swipe
+//
+// This is a drop-in replacement for (most) TrackballControls used in examples.
+// That is, include this js file and wherever you see:
+//        controls = new THREE.TrackballControls( camera );
+//      controls.target.z = 150;
+// Simple substitute "OrbitControls" and the control should work as-is.
+
+THREE.OrbitControls = function ( object, domElement ) {
+
+    this.object = object;
+    this.domElement = ( domElement !== undefined ) ? domElement : document;
+
+    // API
+
+    // Set to false to disable this control
+    this.enabled = true;
+
+    // "target" sets the location of focus, where the control orbits around
+    // and where it pans with respect to.
+    this.target = new THREE.Vector3();
+    // center is old, deprecated; use "target" instead
+    this.center = this.target;
+
+    // This option actually enables dollying in and out; left as "zoom" for
+    // backwards compatibility
+    this.noZoom = false;
+    this.zoomSpeed = 1.0;
+    // Limits to how far you can dolly in and out
+    this.minDistance = 0;
+    this.maxDistance = Infinity;
+
+    // Set to true to disable this control
+    this.noRotate = false;
+    this.rotateSpeed = 1.0;
+
+    // Set to true to disable this control
+    this.noPan = false;
+    this.keyPanSpeed = 7.0;    // pixels moved per arrow key push
+
+    // Set to true to automatically rotate around the target
+    this.autoRotate = false;
+    this.autoRotateSpeed = 2.0; // 30 seconds per round when fps is 60
+
+    // How far you can orbit vertically, upper and lower limits.
+    // Range is 0 to Math.PI radians.
+    this.minPolarAngle = 0; // radians
+    this.maxPolarAngle = Math.PI; // radians
+
+    // Set to true to disable use of the keys
+    this.noKeys = false;
+    // The four arrow keys
+    this.keys = { LEFT: 37, UP: 38, RIGHT: 39, BOTTOM: 40 };
+
+    ////////////
+    // internals
+
+    var scope = this;
+
+    var EPS = 0.000001;
+
+    var rotateStart = new THREE.Vector2();
+    var rotateEnd = new THREE.Vector2();
+    var rotateDelta = new THREE.Vector2();
+
+    var panStart = new THREE.Vector2();
+    var panEnd = new THREE.Vector2();
+    var panDelta = new THREE.Vector2();
+
+    var dollyStart = new THREE.Vector2();
+    var dollyEnd = new THREE.Vector2();
+    var dollyDelta = new THREE.Vector2();
+
+    var phiDelta = 0;
+    var thetaDelta = 0;
+    var scale = 1;
+    var pan = new THREE.Vector3();
+
+    var lastPosition = new THREE.Vector3();
+
+    var STATE = { NONE : -1, ROTATE : 0, DOLLY : 1, PAN : 2, TOUCH_ROTATE : 3, TOUCH_DOLLY : 4, TOUCH_PAN : 5 };
+    var state = STATE.NONE;
+
+    // events
+
+    var changeEvent = { type: 'change' };
+
+
+    this.rotateLeft = function ( angle ) {
+
+        if ( angle === undefined ) {
+
+            angle = getAutoRotationAngle();
+
+        }
+
+        thetaDelta -= angle;
+
+    };
+
+    this.rotateUp = function ( angle ) {
+
+        if ( angle === undefined ) {
+
+            angle = getAutoRotationAngle();
+
+        }
+
+        phiDelta -= angle;
+
+    };
+
+    // pass in distance in world space to move left
+    this.panLeft = function ( distance ) {
+
+        var panOffset = new THREE.Vector3();
+        var te = this.object.matrix.elements;
+        // get X column of matrix
+        panOffset.set( te[0], te[1], te[2] );
+        panOffset.multiplyScalar(-distance);
+        
+        pan.add( panOffset );
+
+    };
+
+    // pass in distance in world space to move up
+    this.panUp = function ( distance ) {
+
+        var panOffset = new THREE.Vector3();
+        var te = this.object.matrix.elements;
+        // get Y column of matrix
+        panOffset.set( te[4], te[5], te[6] );
+        panOffset.multiplyScalar(distance);
+        
+        pan.add( panOffset );
+    };
+    
+    // main entry point; pass in Vector2 of change desired in pixel space,
+    // right and down are positive
+    this.pan = function ( delta ) {
+
+        var element = scope.domElement === document ? scope.domElement.body : scope.domElement;
+
+        if ( scope.object.fov !== undefined ) {
+
+            // perspective
+            var position = scope.object.position;
+            var offset = position.clone().sub( scope.target );
+            var targetDistance = offset.length();
+
+            // half of the fov is center to top of screen
+            targetDistance *= Math.tan( (scope.object.fov/2) * Math.PI / 180.0 );
+            // we actually don't use screenWidth, since perspective camera is fixed to screen height
+            scope.panLeft( 2 * delta.x * targetDistance / element.clientHeight );
+            scope.panUp( 2 * delta.y * targetDistance / element.clientHeight );
+
+        } else if ( scope.object.top !== undefined ) {
+
+            // orthographic
+            scope.panLeft( delta.x * (scope.object.right - scope.object.left) / element.clientWidth );
+            scope.panUp( delta.y * (scope.object.top - scope.object.bottom) / element.clientHeight );
+
+        } else {
+
+            // camera neither orthographic or perspective - warn user
+            console.warn( 'WARNING: OrbitControls.js encountered an unknown camera type - pan disabled.' );
+
+        }
+
+    };
+
+    this.dollyIn = function ( dollyScale ) {
+
+        if ( dollyScale === undefined ) {
+
+            dollyScale = getZoomScale();
+
+        }
+
+        scale /= dollyScale;
+
+    };
+
+    this.dollyOut = function ( dollyScale ) {
+
+        if ( dollyScale === undefined ) {
+
+            dollyScale = getZoomScale();
+
+        }
+
+        scale *= dollyScale;
+
+    };
+
+    this.update = function () {
+
+        var position = this.object.position;
+        var offset = position.clone().sub( this.target );
+
+        // angle from z-axis around y-axis
+
+        var theta = Math.atan2( offset.x, offset.z );
+
+        // angle from y-axis
+
+        var phi = Math.atan2( Math.sqrt( offset.x * offset.x + offset.z * offset.z ), offset.y );
+
+        if ( this.autoRotate ) {
+
+            this.rotateLeft( getAutoRotationAngle() );
+
+        }
+
+        theta += thetaDelta;
+        phi += phiDelta;
+
+        // restrict phi to be between desired limits
+        phi = Math.max( this.minPolarAngle, Math.min( this.maxPolarAngle, phi ) );
+
+        // restrict phi to be betwee EPS and PI-EPS
+        phi = Math.max( EPS, Math.min( Math.PI - EPS, phi ) );
+
+        var radius = offset.length() * scale;
+
+        // restrict radius to be between desired limits
+        radius = Math.max( this.minDistance, Math.min( this.maxDistance, radius ) );
+        
+        // move target to panned location
+        this.target.add( pan );
+
+        offset.x = radius * Math.sin( phi ) * Math.sin( theta );
+        offset.y = radius * Math.cos( phi );
+        offset.z = radius * Math.sin( phi ) * Math.cos( theta );
+
+        position.copy( this.target ).add( offset );
+
+        this.object.lookAt( this.target );
+
+        thetaDelta = 0;
+        phiDelta = 0;
+        scale = 1;
+        pan.set(0,0,0);
+
+        if ( lastPosition.distanceTo( this.object.position ) > 0 ) {
+
+            this.dispatchEvent( changeEvent );
+
+            lastPosition.copy( this.object.position );
+
+        }
+
+    };
+
+
+    function getAutoRotationAngle() {
+
+        return 2 * Math.PI / 60 / 60 * scope.autoRotateSpeed;
+
+    }
+
+    function getZoomScale() {
+
+        return Math.pow( 0.95, scope.zoomSpeed );
+
+    }
+
+    function onMouseDown( event ) {
+
+        if ( scope.enabled === false ) { return; }
+        event.preventDefault();
+
+        if ( event.button === 0 ) {
+            if ( scope.noRotate === true ) { return; }
+
+            state = STATE.ROTATE;
+
+            rotateStart.set( event.clientX, event.clientY );
+
+        } else if ( event.button === 1 ) {
+            if ( scope.noZoom === true ) { return; }
+
+            state = STATE.DOLLY;
+
+            dollyStart.set( event.clientX, event.clientY );
+
+        } else if ( event.button === 2 ) {
+            if ( scope.noPan === true ) { return; }
+
+            state = STATE.PAN;
+
+            panStart.set( event.clientX, event.clientY );
+
+        }
+
+        // Greggman fix: https://github.com/greggman/three.js/commit/fde9f9917d6d8381f06bf22cdff766029d1761be
+        scope.domElement.addEventListener( 'mousemove', onMouseMove, false );
+        scope.domElement.addEventListener( 'mouseup', onMouseUp, false );
+
+    }
+
+    function onMouseMove( event ) {
+
+        if ( scope.enabled === false ) return;
+
+        event.preventDefault();
+
+        var element = scope.domElement === document ? scope.domElement.body : scope.domElement;
+
+        if ( state === STATE.ROTATE ) {
+
+            if ( scope.noRotate === true ) return;
+
+            rotateEnd.set( event.clientX, event.clientY );
+            rotateDelta.subVectors( rotateEnd, rotateStart );
+
+            // rotating across whole screen goes 360 degrees around
+            scope.rotateLeft( 2 * Math.PI * rotateDelta.x / element.clientWidth * scope.rotateSpeed );
+            // rotating up and down along whole screen attempts to go 360, but limited to 180
+            scope.rotateUp( 2 * Math.PI * rotateDelta.y / element.clientHeight * scope.rotateSpeed );
+
+            rotateStart.copy( rotateEnd );
+
+        } else if ( state === STATE.DOLLY ) {
+
+            if ( scope.noZoom === true ) return;
+
+            dollyEnd.set( event.clientX, event.clientY );
+            dollyDelta.subVectors( dollyEnd, dollyStart );
+
+            if ( dollyDelta.y > 0 ) {
+
+                scope.dollyIn();
+
+            } else {
+
+                scope.dollyOut();
+
+            }
+
+            dollyStart.copy( dollyEnd );
+
+        } else if ( state === STATE.PAN ) {
+
+            if ( scope.noPan === true ) return;
+
+            panEnd.set( event.clientX, event.clientY );
+            panDelta.subVectors( panEnd, panStart );
+            
+            scope.pan( panDelta );
+
+            panStart.copy( panEnd );
+
+        }
+
+        // Greggman fix: https://github.com/greggman/three.js/commit/fde9f9917d6d8381f06bf22cdff766029d1761be
+        scope.update();
+
+    }
+
+    function onMouseUp( /* event */ ) {
+
+        if ( scope.enabled === false ) return;
+
+        // Greggman fix: https://github.com/greggman/three.js/commit/fde9f9917d6d8381f06bf22cdff766029d1761be
+        scope.domElement.removeEventListener( 'mousemove', onMouseMove, false );
+        scope.domElement.removeEventListener( 'mouseup', onMouseUp, false );
+
+        state = STATE.NONE;
+
+    }
+
+    function onMouseWheel( event ) {
+
+        if ( scope.enabled === false || scope.noZoom === true ) return;
+
+        var delta = 0;
+
+        if ( event.wheelDelta ) { // WebKit / Opera / Explorer 9
+
+            delta = event.wheelDelta;
+
+        } else if ( event.detail ) { // Firefox
+
+            delta = - event.detail;
+
+        }
+
+        if ( delta > 0 ) {
+
+            scope.dollyOut();
+
+        } else {
+
+            scope.dollyIn();
+
+        }
+
+    }
+
+    function onKeyDown( event ) {
+
+        if ( scope.enabled === false ) { return; }
+        if ( scope.noKeys === true ) { return; }
+        if ( scope.noPan === true ) { return; }
+
+        // pan a pixel - I guess for precise positioning?
+        // Greggman fix: https://github.com/greggman/three.js/commit/fde9f9917d6d8381f06bf22cdff766029d1761be
+        var needUpdate = false;
+        
+        switch ( event.keyCode ) {
+
+            case scope.keys.UP:
+                scope.pan( new THREE.Vector2( 0, scope.keyPanSpeed ) );
+                needUpdate = true;
+                break;
+            case scope.keys.BOTTOM:
+                scope.pan( new THREE.Vector2( 0, -scope.keyPanSpeed ) );
+                needUpdate = true;
+                break;
+            case scope.keys.LEFT:
+                scope.pan( new THREE.Vector2( scope.keyPanSpeed, 0 ) );
+                needUpdate = true;
+                break;
+            case scope.keys.RIGHT:
+                scope.pan( new THREE.Vector2( -scope.keyPanSpeed, 0 ) );
+                needUpdate = true;
+                break;
+        }
+
+        // Greggman fix: https://github.com/greggman/three.js/commit/fde9f9917d6d8381f06bf22cdff766029d1761be
+        if ( needUpdate ) {
+
+            scope.update();
+
+        }
+
+    }
+    
+    function touchstart( event ) {
+
+        if ( scope.enabled === false ) { return; }
+
+        switch ( event.touches.length ) {
+
+            case 1:    // one-fingered touch: rotate
+                if ( scope.noRotate === true ) { return; }
+
+                state = STATE.TOUCH_ROTATE;
+
+                rotateStart.set( event.touches[ 0 ].pageX, event.touches[ 0 ].pageY );
+                break;
+
+            case 2:    // two-fingered touch: dolly
+                if ( scope.noZoom === true ) { return; }
+
+                state = STATE.TOUCH_DOLLY;
+
+                var dx = event.touches[ 0 ].pageX - event.touches[ 1 ].pageX;
+                var dy = event.touches[ 0 ].pageY - event.touches[ 1 ].pageY;
+                var distance = Math.sqrt( dx * dx + dy * dy );
+                dollyStart.set( 0, distance );
+                break;
+
+            case 3: // three-fingered touch: pan
+                if ( scope.noPan === true ) { return; }
+
+                state = STATE.TOUCH_PAN;
+
+                panStart.set( event.touches[ 0 ].pageX, event.touches[ 0 ].pageY );
+                break;
+
+            default:
+                state = STATE.NONE;
+
+        }
+    }
+
+    function touchmove( event ) {
+
+        if ( scope.enabled === false ) { return; }
+
+        event.preventDefault();
+        event.stopPropagation();
+
+        var element = scope.domElement === document ? scope.domElement.body : scope.domElement;
+
+        switch ( event.touches.length ) {
+
+            case 1: // one-fingered touch: rotate
+                if ( scope.noRotate === true ) { return; }
+                if ( state !== STATE.TOUCH_ROTATE ) { return; }
+
+                rotateEnd.set( event.touches[ 0 ].pageX, event.touches[ 0 ].pageY );
+                rotateDelta.subVectors( rotateEnd, rotateStart );
+
+                // rotating across whole screen goes 360 degrees around
+                scope.rotateLeft( 2 * Math.PI * rotateDelta.x / element.clientWidth * scope.rotateSpeed );
+                // rotating up and down along whole screen attempts to go 360, but limited to 180
+                scope.rotateUp( 2 * Math.PI * rotateDelta.y / element.clientHeight * scope.rotateSpeed );
+
+                rotateStart.copy( rotateEnd );
+                break;
+
+            case 2: // two-fingered touch: dolly
+                if ( scope.noZoom === true ) { return; }
+                if ( state !== STATE.TOUCH_DOLLY ) { return; }
+
+                var dx = event.touches[ 0 ].pageX - event.touches[ 1 ].pageX;
+                var dy = event.touches[ 0 ].pageY - event.touches[ 1 ].pageY;
+                var distance = Math.sqrt( dx * dx + dy * dy );
+
+                dollyEnd.set( 0, distance );
+                dollyDelta.subVectors( dollyEnd, dollyStart );
+
+                if ( dollyDelta.y > 0 ) {
+
+                    scope.dollyOut();
+
+                } else {
+
+                    scope.dollyIn();
+
+                }
+
+                dollyStart.copy( dollyEnd );
+                break;
+
+            case 3: // three-fingered touch: pan
+                if ( scope.noPan === true ) { return; }
+                if ( state !== STATE.TOUCH_PAN ) { return; }
+
+                panEnd.set( event.touches[ 0 ].pageX, event.touches[ 0 ].pageY );
+                panDelta.subVectors( panEnd, panStart );
+                
+                scope.pan( panDelta );
+
+                panStart.copy( panEnd );
+                break;
+
+            default:
+                state = STATE.NONE;
+
+        }
+
+    }
+
+    function touchend( /* event */ ) {
+
+        if ( scope.enabled === false ) { return; }
+
+        state = STATE.NONE;
+    }
+
+    this.domElement.addEventListener( 'contextmenu', function ( event ) { event.preventDefault(); }, false );
+    this.domElement.addEventListener( 'mousedown', onMouseDown, false );
+    this.domElement.addEventListener( 'mousewheel', onMouseWheel, false );
+    this.domElement.addEventListener( 'DOMMouseScroll', onMouseWheel, false ); // firefox
+
+    this.domElement.addEventListener( 'keydown', onKeyDown, false );
+
+    this.domElement.addEventListener( 'touchstart', touchstart, false );
+    this.domElement.addEventListener( 'touchend', touchend, false );
+    this.domElement.addEventListener( 'touchmove', touchmove, false );
+
+};
+
+THREE.OrbitControls.prototype = Object.create( THREE.EventDispatcher.prototype );
diff --git a/config/plugins/visualizations/csg/static/PLYLoader.js b/config/plugins/visualizations/csg/static/PLYLoader.js
new file mode 100644
index 0000000..6dbd80d
--- /dev/null
+++ b/config/plugins/visualizations/csg/static/PLYLoader.js
@@ -0,0 +1,307 @@
+/**
+ * @author Wei Meng / http://about.me/menway
+ * Description: A THREE loader for PLY ASCII files (known as the Polygon File Format or the Stanford Triangle Format).
+ * Limitations: ASCII decoding assumes file is UTF-8.
+ *
+ * Usage:
+ *    var loader = new THREE.PLYLoader();
+ *    loader.load('./models/ply/ascii/dolphins.ply', function (geometry) {
+ *        scene.add( new THREE.Mesh( geometry ) );
+ *    } );
+ *
+ * If the PLY file uses non standard property names, they can be mapped while
+ * loading. For example, the following maps the properties
+ * “diffuse_(red|green|blue)” in the file to standard color names.
+ *
+ * loader.setPropertyNameMapping( {
+ *    diffuse_red: 'red',
+ *    diffuse_green: 'green',
+ *    diffuse_blue: 'blue'
+ * } );
+ */
+
+THREE.PLYLoader = function () {
+    this.propertyNameMapping = {};
+};
+
+THREE.PLYLoader.prototype = {
+    constructor: THREE.PLYLoader,
+    setPropertyNameMapping: function ( mapping ) {
+        this.propertyNameMapping = mapping;
+    },
+    load: function ( url, callback ) {
+        var scope = this;
+        var request = new XMLHttpRequest();
+        request.addEventListener( 'load', function ( event ) {
+            var geometry = scope.parse( event.target.response );
+            scope.dispatchEvent( { type: 'load', content: geometry } );
+            if ( callback ) callback( geometry );
+        }, false );
+
+        request.addEventListener( 'progress', function ( event ) {
+            scope.dispatchEvent( { type: 'progress', loaded: event.loaded, total: event.total } );
+        }, false );
+
+        request.addEventListener( 'error', function () {
+            scope.dispatchEvent( { type: 'error', message: 'Couldn\'t load URL [' + url + ']' } );
+        }, false );
+
+        request.open( 'GET', url, true );
+        request.responseType = "arraybuffer";
+        request.send( null );
+    },
+
+    bin2str: function (buf) {
+        var array_buffer = new Uint8Array(buf);
+        var str = '';
+        for (var i = 0; i < buf.byteLength; i ++) {
+            str += String.fromCharCode(array_buffer[i]); // implicitly assumes little-endian
+        }
+        return str;
+    },
+
+    isASCII: function( data ) {
+        var header = this.parseHeader( this.bin2str( data ) );
+        return header.format === "ascii";
+    },
+
+    parse: function ( data ) {
+        if ( data instanceof ArrayBuffer ) {
+            return this.isASCII( data )
+                ? this.parseASCII( this.bin2str( data ) )
+                : this.parseBinary( data );
+        } else {
+            return this.parseASCII( data );
+        }
+    },
+
+    parseHeader: function ( data ) {
+        var patternHeader = /ply([\s\S]*)end_header\s/;
+        var headerText = "";
+        var headerLength = 0;
+        var result = patternHeader.exec( data );
+        if ( result !== null ) {
+            headerText = result [ 1 ];
+            headerLength = result[ 0 ].length;
+        }
+        var header = {
+            comments: [],
+            elements: [],
+            headerLength: headerLength
+        };
+        var lines = headerText.split( '\n' );
+        var currentElement = undefined;
+        var lineType, lineValues;
+
+        function make_ply_element_property( propertValues, propertyNameMapping ) {
+            var property = {
+                type: propertValues[ 0 ]
+            };
+            if ( property.type === 'list' ) {
+                property.name = propertValues[ 3 ];
+                property.countType = propertValues[ 1 ];
+                property.itemType = propertValues[ 2 ];
+            } else {
+                property.name = propertValues[ 1 ];
+            }
+            if ( property.name in propertyNameMapping ) {
+                property.name = propertyNameMapping[ property.name ];
+            }
+            return property;
+        }
+
+        for ( var i = 0; i < lines.length; i ++ ) {
+            var line = lines[ i ];
+            line = line.trim()
+            if ( line === "" ) { continue; }
+            lineValues = line.split( /\s+/ );
+            lineType = lineValues.shift()
+            line = lineValues.join(" ")
+            switch ( lineType ) {
+            case "format":
+                header.format = lineValues[0];
+                header.version = lineValues[1];
+                break;
+            case "comment":
+                header.comments.push(line);
+                break;
+            // Hack to handle obj_info lines generated by vtk.vtkPLYWriter()
+            case "obj_info":
+                header.comments.push(line);
+                break;
+            case "element":
+                if ( !(currentElement === undefined) ) {
+                    header.elements.push(currentElement);
+                }
+                currentElement = Object();
+                currentElement.name = lineValues[0];
+                currentElement.count = parseInt( lineValues[1] );
+                currentElement.properties = [];
+                break;
+            case "property":
+                currentElement.properties.push( make_ply_element_property( lineValues, this.propertyNameMapping ) );
+                break;
+            default:
+                console.log("unhandled", lineType, lineValues);
+            }
+        }
+        if ( !(currentElement === undefined) ) {
+            header.elements.push(currentElement);
+        }
+        return header;
+    },
+
+    parseASCIINumber: function ( n, type ) {
+        switch ( type ) {
+        case 'char': case 'uchar': case 'short': case 'ushort': case 'int': case 'uint':
+        case 'int8': case 'uint8': case 'int16': case 'uint16': case 'int32': case 'uint32':
+            return parseInt( n );
+        case 'float': case 'double': case 'float32': case 'float64':
+            return parseFloat( n );
+        }
+    },
+
+    parseASCIIElement: function ( properties, line ) {
+        var values = line.split( /\s+/ );
+        var element = Object();
+        for ( var i = 0; i < properties.length; i ++ ) {
+            if ( properties[i].type === "list" ) {
+                var list = [];
+                var n = this.parseASCIINumber( values.shift(), properties[i].countType );
+                for ( var j = 0; j < n; j ++ ) {
+                    list.push( this.parseASCIINumber( values.shift(), properties[i].itemType ) );
+                }
+                element[ properties[i].name ] = list;
+            } else {
+                element[ properties[i].name ] = this.parseASCIINumber( values.shift(), properties[i].type );
+            }
+        }
+        return element;
+    },
+
+    parseASCII: function ( data ) {
+        // PLY ascii format specification, as per http://en.wikipedia.org/wiki/PLY_(file_format)
+        var geometry = new THREE.Geometry();
+        var result;
+        var header = this.parseHeader( data );
+        var patternBody = /end_header\s([\s\S]*)$/;
+        var body = "";
+        if ( ( result = patternBody.exec( data ) ) !== null ) {
+            body = result [ 1 ];
+        }
+        var lines = body.split( '\n' );
+        var currentElement = 0;
+        var currentElementCount = 0;
+        geometry.useColor = false;
+        for ( var i = 0; i < lines.length; i ++ ) {
+            var line = lines[ i ];
+            line = line.trim()
+            if ( line === "" ) { continue; }
+            if ( currentElementCount >= header.elements[currentElement].count ) {
+                currentElement ++;
+                currentElementCount = 0;
+            }
+            var element = this.parseASCIIElement( header.elements[currentElement].properties, line );
+            this.handleElement( geometry, header.elements[currentElement].name, element );
+            currentElementCount ++;
+        }
+        return this.postProcess( geometry );
+    },
+    postProcess: function ( geometry ) {
+        if ( geometry.useColor ) {
+            for ( var i = 0; i < geometry.faces.length; i ++ ) {
+                geometry.faces[i].vertexColors = [
+                    geometry.colors[geometry.faces[i].a],
+                    geometry.colors[geometry.faces[i].b],
+                    geometry.colors[geometry.faces[i].c]
+                ];
+            }
+            geometry.elementsNeedUpdate = true;
+        }
+        geometry.computeBoundingSphere();
+        return geometry;
+    },
+
+    handleElement: function ( geometry, elementName, element ) {
+        if ( elementName === "vertex" ) {
+            geometry.vertices.push(
+                new THREE.Vector3( element.x, element.y, element.z )
+            );
+            if ( 'red' in element && 'green' in element && 'blue' in element ) {
+                geometry.useColor = true;
+                var color = new THREE.Color();
+                color.setRGB( element.red / 255.0, element.green / 255.0, element.blue / 255.0 );
+                geometry.colors.push( color );
+            }
+        } else if ( elementName === "face" ) {
+            var vertex_indices = element.vertex_indices;
+            if ( vertex_indices.length === 3 ) {
+                geometry.faces.push(
+                    new THREE.Face3( vertex_indices[ 0 ], vertex_indices[ 1 ], vertex_indices[ 2 ] )
+                );
+            } else if ( vertex_indices.length === 4 ) {
+                geometry.faces.push(
+                    new THREE.Face3( vertex_indices[ 0 ], vertex_indices[ 1 ], vertex_indices[ 3 ] ),
+                    new THREE.Face3( vertex_indices[ 1 ], vertex_indices[ 2 ], vertex_indices[ 3 ] )
+                );
+            }
+        }
+    },
+
+    binaryRead: function ( dataview, at, type, little_endian ) {
+        switch ( type ) {
+            // corespondences for non-specific length types here match rply:
+        case 'int8':        case 'char':     return [ dataview.getInt8( at ), 1 ];
+        case 'uint8':        case 'uchar':     return [ dataview.getUint8( at ), 1 ];
+        case 'int16':        case 'short':     return [ dataview.getInt16( at, little_endian ), 2 ];
+        case 'uint16':    case 'ushort': return [ dataview.getUint16( at, little_endian ), 2 ];
+        case 'int32':        case 'int':         return [ dataview.getInt32( at, little_endian ), 4 ];
+        case 'uint32':    case 'uint':     return [ dataview.getUint32( at, little_endian ), 4 ];
+        case 'float32': case 'float':     return [ dataview.getFloat32( at, little_endian ), 4 ];
+        case 'float64': case 'double': return [ dataview.getFloat64( at, little_endian ), 8 ];
+        }
+    },
+
+    binaryReadElement: function ( dataview, at, properties, little_endian ) {
+        var element = Object();
+        var result, read = 0;
+        for ( var i = 0; i < properties.length; i ++ ) {
+            if ( properties[i].type === "list" ) {
+                var list = [];
+                result = this.binaryRead( dataview, at + read, properties[i].countType, little_endian );
+                var n = result[0];
+                read += result[1];
+                for ( var j = 0; j < n; j ++ ) {
+                    result = this.binaryRead( dataview, at + read, properties[i].itemType, little_endian );
+                    list.push( result[0] );
+                    read += result[1];
+                }
+                element[ properties[i].name ] = list;
+            } else {
+                result = this.binaryRead( dataview, at + read, properties[i].type, little_endian );
+                element[ properties[i].name ] = result[0];
+                read += result[1];
+            }
+        }
+        return [ element, read ];
+    },
+
+    parseBinary: function ( data ) {
+        var geometry = new THREE.Geometry();
+        var header = this.parseHeader( this.bin2str( data ) );
+        var little_endian = (header.format === "binary_little_endian");
+        var body = new DataView( data, header.headerLength );
+        var result, loc = 0;
+        for ( var currentElement = 0; currentElement < header.elements.length; currentElement ++ ) {
+            for ( var currentElementCount = 0; currentElementCount < header.elements[currentElement].count; currentElementCount ++ ) {
+                result = this.binaryReadElement( body, loc, header.elements[currentElement].properties, little_endian );
+                loc += result[1];
+                var element = result[0];
+                this.handleElement( geometry, header.elements[currentElement].name, element );
+            }
+        }
+        return this.postProcess( geometry );
+    }
+};
+
+THREE.EventDispatcher.prototype.apply( THREE.PLYLoader.prototype );
diff --git a/config/plugins/visualizations/csg/static/VTKLoader.js b/config/plugins/visualizations/csg/static/VTKLoader.js
new file mode 100755
index 0000000..963eb2b
--- /dev/null
+++ b/config/plugins/visualizations/csg/static/VTKLoader.js
@@ -0,0 +1,188 @@
+/**
+ * Origin: https://github.com/mrdoob/three.js/blob/af21991fc7c4e1d35d6a93031707273d937af0f9/examples/js/loaders/VTKLoader.js
+ */
+
+THREE.VTKLoader = function ( manager ) {
+    this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager;
+};
+
+THREE.VTKLoader.prototype = {
+    constructor: THREE.VTKLoader,
+    load: function ( url, onLoad, onProgress, onError ) {
+        // Will we bump into trouble reading the whole file into memory?
+        var scope = this;
+        var loader = new THREE.XHRLoader( scope.manager );
+        loader.setCrossOrigin( this.crossOrigin );
+        loader.load( url, function ( text ) {
+            onLoad( scope.parse( text ) );
+        },
+        onProgress, onError );
+    },
+
+    setCrossOrigin: function ( value ) {
+        this.crossOrigin = value;
+    },
+    parse: function ( data ) {
+        // connectivity of the triangles
+        var indices = [];
+        // triangles vertices
+        var positions = [];
+        // red, green, blue colors in the range 0 to 1
+        var colors = [];
+        // normal vector, one per vertex
+        var normals = [];
+        var result;
+        // pattern for reading vertices, 3 floats or integers
+        var pat3Floats = /(\-?\d+\.?[\d\-\+e]*)\s+(\-?\d+\.?[\d\-\+e]*)\s+(\-?\d+\.?[\d\-\+e]*)/g;
+        // pattern for connectivity, an integer followed by any number of ints
+        // the first integer is the number of polygon nodes
+        var patConnectivity = /^(\d+)\s+([\s\d]*)/;
+        // indicates start of vertex data section
+        var patPOINTS = /^POINTS /;
+        // indicates start of polygon connectivity section
+        var patPOLYGONS = /^POLYGONS /;
+        // POINT_DATA number_of_values 
+        var patPOINT_DATA = /^POINT_DATA[ ]+(\d+)/;
+        // CELL_DATA number_of_polys
+        var patCELL_DATA = /^CELL_DATA[ ]+(\d+)/;
+        // Start of color section
+        var patCOLOR_SCALARS = /^COLOR_SCALARS[ ]+(\w+)[ ]+3/;
+        // NORMALS Normals float
+        var patNORMALS = /^NORMALS[ ]+(\w+)[ ]+(\w+)/;
+        var inPointsSection = false;
+        var inPolygonsSection = false;
+        var inPointDataSection = false;
+        var inCellDataSection = false;
+        var inColorSection = false;
+        var inNormalsSection = false;
+        var lines = data.split('\n');
+
+        for ( var i in lines ) {
+            var line = lines[ i ];
+            if ( inPointsSection ) {
+                // get the vertices
+                while ( ( result = pat3Floats.exec( line ) ) !== null ) {
+                    var x = parseFloat( result[ 1 ] );
+                    var y = parseFloat( result[ 2 ] );
+                    var z = parseFloat( result[ 3 ] );
+                    positions.push( x, y, z );
+                }
+            } else if ( inPolygonsSection ) {
+                if ( ( result = patConnectivity.exec( line ) ) !== null ) {
+                    // numVertices i0 i1 i2 ...
+                    var numVertices = parseInt( result[ 1 ] );
+                    var inds = result[ 2 ].split(/\s+/); 
+                    if ( numVertices >= 3 ) {
+                        var i0 = parseInt( inds[ 0 ] );
+                        var i1, i2;
+                        var k = 1;
+                        // split the polygon in numVertices - 2 triangles
+                        for ( var j = 0; j < numVertices - 2; ++j ) {
+                            i1 = parseInt( inds[ k ] );
+                            i2 = parseInt( inds[ k  + 1 ] );
+                              indices.push( i0, i1, i2 );
+                            k++;
+                        }
+                    }
+                }
+            } else if ( inPointDataSection || inCellDataSection ) {
+                if ( inColorSection ) {
+                    // Get the colors
+                    while ( ( result = pat3Floats.exec( line ) ) !== null ) {
+                        var r = parseFloat( result[ 1 ] );
+                        var g = parseFloat( result[ 2 ] );
+                        var b = parseFloat( result[ 3 ] );
+                        colors.push( r, g, b );
+                    }
+                } else if ( inNormalsSection ) {
+                    // Get the normal vectors
+                    while ( ( result = pat3Floats.exec( line ) ) !== null ) {
+                        var nx = parseFloat( result[ 1 ] );
+                        var ny = parseFloat( result[ 2 ] );
+                        var nz = parseFloat( result[ 3 ] );
+                        normals.push( nx, ny, nz );
+                    }
+                }
+            }
+            if ( patPOLYGONS.exec( line ) !== null ) {
+                inPolygonsSection = true;
+                inPointsSection = false;
+            } else if ( patPOINTS.exec( line ) !== null ) {
+                inPolygonsSection = false;
+                inPointsSection = true;
+            } else if ( patPOINT_DATA.exec( line ) !== null ) {
+                inPointDataSection = true;
+                inPointsSection = false;
+                inPolygonsSection = false;
+            } else if ( patCELL_DATA.exec( line ) !== null ) {
+                inCellDataSection = true;
+                inPointsSection = false;
+                inPolygonsSection = false;
+            } else if ( patCOLOR_SCALARS.exec( line ) !== null ) {
+                inColorSection = true;
+                inNormalsSection = false;
+                inPointsSection = false;
+                inPolygonsSection = false;
+            } else if ( patNORMALS.exec( line ) !== null ) {
+                inNormalsSection = true;
+                inColorSection = false;
+                inPointsSection = false;
+                inPolygonsSection = false;
+            }
+        }
+
+        var geometry;
+        var stagger = 'point';
+        if ( colors.length == indices.length ) {
+            stagger = 'cell';
+        }
+        if ( stagger == 'point' ) {
+            // Nodal. Use BufferGeometry
+            geometry = new THREE.BufferGeometry();
+            geometry.addAttribute( 'index', new THREE.BufferAttribute( new ( indices.length > 65535 ? Uint32Array : Uint16Array )( indices ), 1 ) );
+            geometry.addAttribute( 'position', new THREE.BufferAttribute( new Float32Array( positions ), 3 ) );
+
+            if ( colors.length == positions.length ) {
+                geometry.addAttribute( 'color', new THREE.BufferAttribute( new Float32Array( colors ), 3 ) );
+            }
+            if ( normals.length == positions.length ) {
+                geometry.addAttribute( 'normal', new THREE.BufferAttribute( new Float32Array( normals ), 3 ) );
+            }
+        } else {
+            // Cell centered colors. The only way to attach a solid color to each triangle
+            // is to use Geometry, which is less efficient than BufferGeometry
+            geometry = new THREE.Geometry();
+            var numTriangles = indices.length / 3;
+            var numPoints = positions.length / 3;
+            var va, vb, vc;
+            var face;
+            var ia, ib, ic;
+            var x, y, z;
+            var r, g, b;
+            for ( var j = 0; j < numPoints; ++j ) {
+                x = positions[ 3*j + 0 ];
+                y = positions[ 3*j + 1 ];
+                z = positions[ 3*j + 2 ];
+                geometry.vertices.push( new THREE.Vector3( x, y, z ) );
+            }
+            for ( var i = 0; i < numTriangles; ++i ) {
+                ia = indices[ 3*i + 0 ];
+                ib = indices[ 3*i + 1 ];
+                ic = indices[ 3*i + 2 ];
+                geometry.faces.push( new THREE.Face3( ia, ib, ic ) );
+            }
+            if ( colors.length == numTriangles * 3 ) {
+                for ( var i = 0; i < numTriangles; ++i ) {
+                    face = geometry.faces[i];
+                    r = colors[ 3*i + 0 ];
+                    g = colors[ 3*i + 1 ];
+                    b = colors[ 3*i + 2 ];
+                    face.color = new THREE.Color().setRGB( r, g, b );
+                }
+             }
+        }
+        return geometry;
+    }
+};
+
+THREE.EventDispatcher.prototype.apply( THREE.VTKLoader.prototype );
diff --git a/config/plugins/visualizations/csg/static/dat.gui.min.js b/config/plugins/visualizations/csg/static/dat.gui.min.js
new file mode 100644
index 0000000..17e4a3c
--- /dev/null
+++ b/config/plugins/visualizations/csg/static/dat.gui.min.js
@@ -0,0 +1,94 @@
+/**
+ * dat-gui JavaScript Controller Library
+ * http://code.google.com/p/dat-gui
+ *
+ * Copyright 2011 Data Arts Team, Google Creative Lab
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+var dat=dat||{};dat.gui=dat.gui||{};dat.utils=dat.utils||{};dat.controllers=dat.controllers||{};dat.dom=dat.dom||{};dat.color=dat.color||{};dat.utils.css=function(){return{load:function(e,a){var a=a||document,c=a.createElement("link");c.type="text/css";c.rel="stylesheet";c.href=e;a.getElementsByTagName("head")[0].appendChild(c)},inject:function(e,a){var a=a||document,c=document.createElement("style");c.type="text/css";c.innerHTML=e;a.getElementsByTagName("head")[0].appendChild(c)}}}();
+dat.utils.common=function(){var e=Array.prototype.forEach,a=Array.prototype.slice;return{BREAK:{},extend:function(c){this.each(a.call(arguments,1),function(a){for(var f in a)this.isUndefined(a[f])||(c[f]=a[f])},this);return c},defaults:function(c){this.each(a.call(arguments,1),function(a){for(var f in a)this.isUndefined(c[f])&&(c[f]=a[f])},this);return c},compose:function(){var c=a.call(arguments);return function(){for(var d=a.call(arguments),f=c.length-1;f>=0;f--)d=[c[f].apply(this,d)]; [...]
+each:function(a,d,f){if(e&&a.forEach===e)a.forEach(d,f);else if(a.length===a.length+0)for(var b=0,n=a.length;b<n;b++){if(b in a&&d.call(f,a[b],b)===this.BREAK)break}else for(b in a)if(d.call(f,a[b],b)===this.BREAK)break},defer:function(a){setTimeout(a,0)},toArray:function(c){return c.toArray?c.toArray():a.call(c)},isUndefined:function(a){return a===void 0},isNull:function(a){return a===null},isNaN:function(a){return a!==a},isArray:Array.isArray||function(a){return a.constructor===Array}, [...]
+Object(a)},isNumber:function(a){return a===a+0},isString:function(a){return a===a+""},isBoolean:function(a){return a===false||a===true},isFunction:function(a){return Object.prototype.toString.call(a)==="[object Function]"}}}();
+dat.controllers.Controller=function(e){var a=function(a,d){this.initialValue=a[d];this.domElement=document.createElement("div");this.object=a;this.property=d;this.__onFinishChange=this.__onChange=void 0};e.extend(a.prototype,{onChange:function(a){this.__onChange=a;return this},onFinishChange:function(a){this.__onFinishChange=a;return this},setValue:function(a){this.object[this.property]=a;this.__onChange&&this.__onChange.call(this,a);this.updateDisplay();return this},getValue:function(){ [...]
+updateDisplay:function(){return this},isModified:function(){return this.initialValue!==this.getValue()}});return a}(dat.utils.common);
+dat.dom.dom=function(e){function a(b){if(b==="0"||e.isUndefined(b))return 0;b=b.match(d);return!e.isNull(b)?parseFloat(b[1]):0}var c={};e.each({HTMLEvents:["change"],MouseEvents:["click","mousemove","mousedown","mouseup","mouseover"],KeyboardEvents:["keydown"]},function(b,a){e.each(b,function(b){c[b]=a})});var d=/(\d+(\.\d+)?)px/,f={makeSelectable:function(b,a){if(!(b===void 0||b.style===void 0))b.onselectstart=a?function(){return false}:function(){},b.style.MozUserSelect=a?"auto":"none" [...]
+a?"auto":"none",b.unselectable=a?"on":"off"},makeFullscreen:function(b,a,d){e.isUndefined(a)&&(a=true);e.isUndefined(d)&&(d=true);b.style.position="absolute";if(a)b.style.left=0,b.style.right=0;if(d)b.style.top=0,b.style.bottom=0},fakeEvent:function(b,a,d,f){var d=d||{},m=c[a];if(!m)throw Error("Event type "+a+" not supported.");var l=document.createEvent(m);switch(m){case "MouseEvents":l.initMouseEvent(a,d.bubbles||false,d.cancelable||true,window,d.clickCount||1,0,0,d.x||d.clientX||0,d. [...]
+0,false,false,false,false,0,null);break;case "KeyboardEvents":m=l.initKeyboardEvent||l.initKeyEvent;e.defaults(d,{cancelable:true,ctrlKey:false,altKey:false,shiftKey:false,metaKey:false,keyCode:void 0,charCode:void 0});m(a,d.bubbles||false,d.cancelable,window,d.ctrlKey,d.altKey,d.shiftKey,d.metaKey,d.keyCode,d.charCode);break;default:l.initEvent(a,d.bubbles||false,d.cancelable||true)}e.defaults(l,f);b.dispatchEvent(l)},bind:function(b,a,d,c){b.addEventListener?b.addEventListener(a,d,c||f [...]
+b.attachEvent("on"+a,d);return f},unbind:function(b,a,d,c){b.removeEventListener?b.removeEventListener(a,d,c||false):b.detachEvent&&b.detachEvent("on"+a,d);return f},addClass:function(b,a){if(b.className===void 0)b.className=a;else if(b.className!==a){var d=b.className.split(/ +/);if(d.indexOf(a)==-1)d.push(a),b.className=d.join(" ").replace(/^\s+/,"").replace(/\s+$/,"")}return f},removeClass:function(b,a){if(a){if(b.className!==void 0)if(b.className===a)b.removeAttribute("class");else{v [...]
+c=d.indexOf(a);if(c!=-1)d.splice(c,1),b.className=d.join(" ")}}else b.className=void 0;return f},hasClass:function(a,d){return RegExp("(?:^|\\s+)"+d+"(?:\\s+|$)").test(a.className)||false},getWidth:function(b){b=getComputedStyle(b);return a(b["border-left-width"])+a(b["border-right-width"])+a(b["padding-left"])+a(b["padding-right"])+a(b.width)},getHeight:function(b){b=getComputedStyle(b);return a(b["border-top-width"])+a(b["border-bottom-width"])+a(b["padding-top"])+a(b["padding-bottom"] [...]
+getOffset:function(a){var d={left:0,top:0};if(a.offsetParent){do d.left+=a.offsetLeft,d.top+=a.offsetTop;while(a=a.offsetParent)}return d},isActive:function(a){return a===document.activeElement&&(a.type||a.href)}};return f}(dat.utils.common);
+dat.controllers.OptionController=function(e,a,c){var d=function(f,b,e){d.superclass.call(this,f,b);var h=this;this.__select=document.createElement("select");if(c.isArray(e)){var j={};c.each(e,function(a){j[a]=a});e=j}c.each(e,function(a,b){var d=document.createElement("option");d.innerHTML=b;d.setAttribute("value",a);h.__select.appendChild(d)});this.updateDisplay();a.bind(this.__select,"change",function(){h.setValue(this.options[this.selectedIndex].value)});this.domElement.appendChild(th [...]
+d.superclass=e;c.extend(d.prototype,e.prototype,{setValue:function(a){a=d.superclass.prototype.setValue.call(this,a);this.__onFinishChange&&this.__onFinishChange.call(this,this.getValue());return a},updateDisplay:function(){this.__select.value=this.getValue();return d.superclass.prototype.updateDisplay.call(this)}});return d}(dat.controllers.Controller,dat.dom.dom,dat.utils.common);
+dat.controllers.NumberController=function(e,a){var c=function(d,f,b){c.superclass.call(this,d,f);b=b||{};this.__min=b.min;this.__max=b.max;this.__step=b.step;d=this.__impliedStep=a.isUndefined(this.__step)?this.initialValue==0?1:Math.pow(10,Math.floor(Math.log(this.initialValue)/Math.LN10))/10:this.__step;d=d.toString();this.__precision=d.indexOf(".")>-1?d.length-d.indexOf(".")-1:0};c.superclass=e;a.extend(c.prototype,e.prototype,{setValue:function(a){if(this.__min!==void 0&&a<this.__min [...]
+else if(this.__max!==void 0&&a>this.__max)a=this.__max;this.__step!==void 0&&a%this.__step!=0&&(a=Math.round(a/this.__step)*this.__step);return c.superclass.prototype.setValue.call(this,a)},min:function(a){this.__min=a;return this},max:function(a){this.__max=a;return this},step:function(a){this.__step=a;return this}});return c}(dat.controllers.Controller,dat.utils.common);
+dat.controllers.NumberControllerBox=function(e,a,c){var d=function(f,b,e){function h(){var a=parseFloat(l.__input.value);c.isNaN(a)||l.setValue(a)}function j(a){var b=o-a.clientY;l.setValue(l.getValue()+b*l.__impliedStep);o=a.clientY}function m(){a.unbind(window,"mousemove",j);a.unbind(window,"mouseup",m)}this.__truncationSuspended=false;d.superclass.call(this,f,b,e);var l=this,o;this.__input=document.createElement("input");this.__input.setAttribute("type","text");a.bind(this.__input,"ch [...]
+a.bind(this.__input,"blur",function(){h();l.__onFinishChange&&l.__onFinishChange.call(l,l.getValue())});a.bind(this.__input,"mousedown",function(b){a.bind(window,"mousemove",j);a.bind(window,"mouseup",m);o=b.clientY});a.bind(this.__input,"keydown",function(a){if(a.keyCode===13)l.__truncationSuspended=true,this.blur(),l.__truncationSuspended=false});this.updateDisplay();this.domElement.appendChild(this.__input)};d.superclass=e;c.extend(d.prototype,e.prototype,{updateDisplay:function(){var [...]
+b;if(this.__truncationSuspended)b=this.getValue();else{b=this.getValue();var c=Math.pow(10,this.__precision);b=Math.round(b*c)/c}a.value=b;return d.superclass.prototype.updateDisplay.call(this)}});return d}(dat.controllers.NumberController,dat.dom.dom,dat.utils.common);
+dat.controllers.NumberControllerSlider=function(e,a,c,d,f){var b=function(d,c,f,e,l){function o(b){b.preventDefault();var d=a.getOffset(g.__background),c=a.getWidth(g.__background);g.setValue(g.__min+(g.__max-g.__min)*((b.clientX-d.left)/(d.left+c-d.left)));return false}function y(){a.unbind(window,"mousemove",o);a.unbind(window,"mouseup",y);g.__onFinishChange&&g.__onFinishChange.call(g,g.getValue())}b.superclass.call(this,d,c,{min:f,max:e,step:l});var g=this;this.__background=document.c [...]
+this.__foreground=document.createElement("div");a.bind(this.__background,"mousedown",function(b){a.bind(window,"mousemove",o);a.bind(window,"mouseup",y);o(b)});a.addClass(this.__background,"slider");a.addClass(this.__foreground,"slider-fg");this.updateDisplay();this.__background.appendChild(this.__foreground);this.domElement.appendChild(this.__background)};b.superclass=e;b.useDefaultStyles=function(){c.inject(f)};d.extend(b.prototype,e.prototype,{updateDisplay:function(){this.__foregroun [...]
+(this.getValue()-this.__min)/(this.__max-this.__min)*100+"%";return b.superclass.prototype.updateDisplay.call(this)}});return b}(dat.controllers.NumberController,dat.dom.dom,dat.utils.css,dat.utils.common,".slider {\n  box-shadow: inset 0 2px 4px rgba(0,0,0,0.15);\n  height: 1em;\n  border-radius: 1em;\n  background-color: #eee;\n  padding: 0 0.5em;\n  overflow: hidden;\n}\n\n.slider-fg {\n  padding: 1px 0 2px 0;\n  background-color: #aaa;\n  height: 1em;\n  margin-left: -0.5em;\n  paddi [...]
+dat.controllers.FunctionController=function(e,a,c){var d=function(c,b,e){d.superclass.call(this,c,b);var h=this;this.__button=document.createElement("div");this.__button.innerHTML=e===void 0?"Fire":e;a.bind(this.__button,"click",function(a){a.preventDefault();h.fire();return false});a.addClass(this.__button,"button");this.domElement.appendChild(this.__button)};d.superclass=e;c.extend(d.prototype,e.prototype,{fire:function(){this.__onChange&&this.__onChange.call(this);this.__onFinishChang [...]
+this.getValue());this.getValue().call(this.object)}});return d}(dat.controllers.Controller,dat.dom.dom,dat.utils.common);
+dat.controllers.BooleanController=function(e,a,c){var d=function(c,b){d.superclass.call(this,c,b);var e=this;this.__prev=this.getValue();this.__checkbox=document.createElement("input");this.__checkbox.setAttribute("type","checkbox");a.bind(this.__checkbox,"change",function(){e.setValue(!e.__prev)},false);this.domElement.appendChild(this.__checkbox);this.updateDisplay()};d.superclass=e;c.extend(d.prototype,e.prototype,{setValue:function(a){a=d.superclass.prototype.setValue.call(this,a);th [...]
+this.__onFinishChange.call(this,this.getValue());this.__prev=this.getValue();return a},updateDisplay:function(){this.getValue()===true?(this.__checkbox.setAttribute("checked","checked"),this.__checkbox.checked=true):this.__checkbox.checked=false;return d.superclass.prototype.updateDisplay.call(this)}});return d}(dat.controllers.Controller,dat.dom.dom,dat.utils.common);
+dat.color.toString=function(e){return function(a){if(a.a==1||e.isUndefined(a.a)){for(a=a.hex.toString(16);a.length<6;)a="0"+a;return"#"+a}else return"rgba("+Math.round(a.r)+","+Math.round(a.g)+","+Math.round(a.b)+","+a.a+")"}}(dat.utils.common);
+dat.color.interpret=function(e,a){var c,d,f=[{litmus:a.isString,conversions:{THREE_CHAR_HEX:{read:function(a){a=a.match(/^#([A-F0-9])([A-F0-9])([A-F0-9])$/i);return a===null?false:{space:"HEX",hex:parseInt("0x"+a[1].toString()+a[1].toString()+a[2].toString()+a[2].toString()+a[3].toString()+a[3].toString())}},write:e},SIX_CHAR_HEX:{read:function(a){a=a.match(/^#([A-F0-9]{6})$/i);return a===null?false:{space:"HEX",hex:parseInt("0x"+a[1].toString())}},write:e},CSS_RGB:{read:function(a){a=a. [...]
+return a===null?false:{space:"RGB",r:parseFloat(a[1]),g:parseFloat(a[2]),b:parseFloat(a[3])}},write:e},CSS_RGBA:{read:function(a){a=a.match(/^rgba\(\s*(.+)\s*,\s*(.+)\s*,\s*(.+)\s*\,\s*(.+)\s*\)/);return a===null?false:{space:"RGB",r:parseFloat(a[1]),g:parseFloat(a[2]),b:parseFloat(a[3]),a:parseFloat(a[4])}},write:e}}},{litmus:a.isNumber,conversions:{HEX:{read:function(a){return{space:"HEX",hex:a,conversionName:"HEX"}},write:function(a){return a.hex}}}},{litmus:a.isArray,conversions:{RGB [...]
+3?false:{space:"RGB",r:a[0],g:a[1],b:a[2]}},write:function(a){return[a.r,a.g,a.b]}},RGBA_ARRAY:{read:function(a){return a.length!=4?false:{space:"RGB",r:a[0],g:a[1],b:a[2],a:a[3]}},write:function(a){return[a.r,a.g,a.b,a.a]}}}},{litmus:a.isObject,conversions:{RGBA_OBJ:{read:function(b){return a.isNumber(b.r)&&a.isNumber(b.g)&&a.isNumber(b.b)&&a.isNumber(b.a)?{space:"RGB",r:b.r,g:b.g,b:b.b,a:b.a}:false},write:function(a){return{r:a.r,g:a.g,b:a.b,a:a.a}}},RGB_OBJ:{read:function(b){return a. [...]
+a.isNumber(b.g)&&a.isNumber(b.b)?{space:"RGB",r:b.r,g:b.g,b:b.b}:false},write:function(a){return{r:a.r,g:a.g,b:a.b}}},HSVA_OBJ:{read:function(b){return a.isNumber(b.h)&&a.isNumber(b.s)&&a.isNumber(b.v)&&a.isNumber(b.a)?{space:"HSV",h:b.h,s:b.s,v:b.v,a:b.a}:false},write:function(a){return{h:a.h,s:a.s,v:a.v,a:a.a}}},HSV_OBJ:{read:function(b){return a.isNumber(b.h)&&a.isNumber(b.s)&&a.isNumber(b.v)?{space:"HSV",h:b.h,s:b.s,v:b.v}:false},write:function(a){return{h:a.h,s:a.s,v:a.v}}}}}];retur [...]
+false;var b=arguments.length>1?a.toArray(arguments):arguments[0];a.each(f,function(e){if(e.litmus(b))return a.each(e.conversions,function(e,f){c=e.read(b);if(d===false&&c!==false)return d=c,c.conversionName=f,c.conversion=e,a.BREAK}),a.BREAK});return d}}(dat.color.toString,dat.utils.common);
+dat.GUI=dat.gui.GUI=function(e,a,c,d,f,b,n,h,j,m,l,o,y,g,i){function q(a,b,r,c){if(b[r]===void 0)throw Error("Object "+b+' has no property "'+r+'"');c.color?b=new l(b,r):(b=[b,r].concat(c.factoryArgs),b=d.apply(a,b));if(c.before instanceof f)c.before=c.before.__li;t(a,b);g.addClass(b.domElement,"c");r=document.createElement("span");g.addClass(r,"property-name");r.innerHTML=b.property;var e=document.createElement("div");e.appendChild(r);e.appendChild(b.domElement);c=s(a,e,c.before);g.addC [...]
+g.addClass(c,typeof b.getValue());p(a,c,b);a.__controllers.push(b);return b}function s(a,b,d){var c=document.createElement("li");b&&c.appendChild(b);d?a.__ul.insertBefore(c,params.before):a.__ul.appendChild(c);a.onResize();return c}function p(a,d,c){c.__li=d;c.__gui=a;i.extend(c,{options:function(b){if(arguments.length>1)return c.remove(),q(a,c.object,c.property,{before:c.__li.nextElementSibling,factoryArgs:[i.toArray(arguments)]});if(i.isArray(b)||i.isObject(b))return c.remove(),q(a,c.o [...]
+{before:c.__li.nextElementSibling,factoryArgs:[b]})},name:function(a){c.__li.firstElementChild.firstElementChild.innerHTML=a;return c},listen:function(){c.__gui.listen(c);return c},remove:function(){c.__gui.remove(c);return c}});if(c instanceof j){var e=new h(c.object,c.property,{min:c.__min,max:c.__max,step:c.__step});i.each(["updateDisplay","onChange","onFinishChange"],function(a){var b=c[a],H=e[a];c[a]=e[a]=function(){var a=Array.prototype.slice.call(arguments);b.apply(c,a);return H.a [...]
+g.addClass(d,"has-slider");c.domElement.insertBefore(e.domElement,c.domElement.firstElementChild)}else if(c instanceof h){var f=function(b){return i.isNumber(c.__min)&&i.isNumber(c.__max)?(c.remove(),q(a,c.object,c.property,{before:c.__li.nextElementSibling,factoryArgs:[c.__min,c.__max,c.__step]})):b};c.min=i.compose(f,c.min);c.max=i.compose(f,c.max)}else if(c instanceof b)g.bind(d,"click",function(){g.fakeEvent(c.__checkbox,"click")}),g.bind(c.__checkbox,"click",function(a){a.stopPropag [...]
+else if(c instanceof n)g.bind(d,"click",function(){g.fakeEvent(c.__button,"click")}),g.bind(d,"mouseover",function(){g.addClass(c.__button,"hover")}),g.bind(d,"mouseout",function(){g.removeClass(c.__button,"hover")});else if(c instanceof l)g.addClass(d,"color"),c.updateDisplay=i.compose(function(a){d.style.borderLeftColor=c.__color.toString();return a},c.updateDisplay),c.updateDisplay();c.setValue=i.compose(function(b){a.getRoot().__preset_select&&c.isModified()&&B(a.getRoot(),true);retu [...]
+function t(a,b){var c=a.getRoot(),d=c.__rememberedObjects.indexOf(b.object);if(d!=-1){var e=c.__rememberedObjectIndecesToControllers[d];e===void 0&&(e={},c.__rememberedObjectIndecesToControllers[d]=e);e[b.property]=b;if(c.load&&c.load.remembered){c=c.load.remembered;if(c[a.preset])c=c[a.preset];else if(c[w])c=c[w];else return;if(c[d]&&c[d][b.property]!==void 0)d=c[d][b.property],b.initialValue=d,b.setValue(d)}}}function I(a){var b=a.__save_row=document.createElement("li");g.addClass(a.do [...]
+"has-save");a.__ul.insertBefore(b,a.__ul.firstChild);g.addClass(b,"save-row");var c=document.createElement("span");c.innerHTML=" ";g.addClass(c,"button gears");var d=document.createElement("span");d.innerHTML="Save";g.addClass(d,"button");g.addClass(d,"save");var e=document.createElement("span");e.innerHTML="New";g.addClass(e,"button");g.addClass(e,"save-as");var f=document.createElement("span");f.innerHTML="Revert";g.addClass(f,"button");g.addClass(f,"revert");var m=a.__preset_sele [...]
+a.load&&a.load.remembered?i.each(a.load.remembered,function(b,c){C(a,c,c==a.preset)}):C(a,w,false);g.bind(m,"change",function(){for(var b=0;b<a.__preset_select.length;b++)a.__preset_select[b].innerHTML=a.__preset_select[b].value;a.preset=this.value});b.appendChild(m);b.appendChild(c);b.appendChild(d);b.appendChild(e);b.appendChild(f);if(u){var b=document.getElementById("dg-save-locally"),l=document.getElementById("dg-local-explain");b.style.display="block";b=document.getElementById("dg-l [...]
+localStorage.getItem(document.location.href+".isLocal")==="true"&&b.setAttribute("checked","checked");var o=function(){l.style.display=a.useLocalStorage?"block":"none"};o();g.bind(b,"change",function(){a.useLocalStorage=!a.useLocalStorage;o()})}var h=document.getElementById("dg-new-constructor");g.bind(h,"keydown",function(a){a.metaKey&&(a.which===67||a.keyCode==67)&&x.hide()});g.bind(c,"click",function(){h.innerHTML=JSON.stringify(a.getSaveObject(),void 0,2);x.show();h.focus();h.select( [...]
+"click",function(){a.save()});g.bind(e,"click",function(){var b=prompt("Enter a new preset name.");b&&a.saveAs(b)});g.bind(f,"click",function(){a.revert()})}function J(a){function b(f){f.preventDefault();e=f.clientX;g.addClass(a.__closeButton,k.CLASS_DRAG);g.bind(window,"mousemove",c);g.bind(window,"mouseup",d);return false}function c(b){b.preventDefault();a.width+=e-b.clientX;a.onResize();e=b.clientX;return false}function d(){g.removeClass(a.__closeButton,k.CLASS_DRAG);g.unbind(window," [...]
+c);g.unbind(window,"mouseup",d)}a.__resize_handle=document.createElement("div");i.extend(a.__resize_handle.style,{width:"6px",marginLeft:"-3px",height:"200px",cursor:"ew-resize",position:"absolute"});var e;g.bind(a.__resize_handle,"mousedown",b);g.bind(a.__closeButton,"mousedown",b);a.domElement.insertBefore(a.__resize_handle,a.domElement.firstElementChild)}function D(a,b){a.domElement.style.width=b+"px";if(a.__save_row&&a.autoPlace)a.__save_row.style.width=b+"px";if(a.__closeButton)a.__ [...]
+b+"px"}function z(a,b){var c={};i.each(a.__rememberedObjects,function(d,e){var f={};i.each(a.__rememberedObjectIndecesToControllers[e],function(a,c){f[c]=b?a.initialValue:a.getValue()});c[e]=f});return c}function C(a,b,c){var d=document.createElement("option");d.innerHTML=b;d.value=b;a.__preset_select.appendChild(d);if(c)a.__preset_select.selectedIndex=a.__preset_select.length-1}function B(a,b){var c=a.__preset_select[a.__preset_select.selectedIndex];c.innerHTML=b?c.value+"*":c.value}fun [...]
+0&&o(function(){E(a)});i.each(a,function(a){a.updateDisplay()})}e.inject(c);var w="Default",u;try{u="localStorage"in window&&window.localStorage!==null}catch(K){u=false}var x,F=true,v,A=false,G=[],k=function(a){function b(){localStorage.setItem(document.location.href+".gui",JSON.stringify(d.getSaveObject()))}function c(){var a=d.getRoot();a.width+=1;i.defer(function(){a.width-=1})}var d=this;this.domElement=document.createElement("div");this.__ul=document.createElement("ul");this.domElem [...]
+g.addClass(this.domElement,"dg");this.__folders={};this.__controllers=[];this.__rememberedObjects=[];this.__rememberedObjectIndecesToControllers=[];this.__listening=[];a=a||{};a=i.defaults(a,{autoPlace:true,width:k.DEFAULT_WIDTH});a=i.defaults(a,{resizable:a.autoPlace,hideable:a.autoPlace});if(i.isUndefined(a.load))a.load={preset:w};else if(a.preset)a.load.preset=a.preset;i.isUndefined(a.parent)&&a.hideable&&G.push(this);a.resizable=i.isUndefined(a.parent)&&a.resizable;if(a.autoPlace&&i. [...]
+true;var e=u&&localStorage.getItem(document.location.href+".isLocal")==="true";Object.defineProperties(this,{parent:{get:function(){return a.parent}},scrollable:{get:function(){return a.scrollable}},autoPlace:{get:function(){return a.autoPlace}},preset:{get:function(){return d.parent?d.getRoot().preset:a.load.preset},set:function(b){d.parent?d.getRoot().preset=b:a.load.preset=b;for(b=0;b<this.__preset_select.length;b++)if(this.__preset_select[b].value==this.preset)this.__preset_select.se [...]
+b;d.revert()}},width:{get:function(){return a.width},set:function(b){a.width=b;D(d,b)}},name:{get:function(){return a.name},set:function(b){a.name=b;if(m)m.innerHTML=a.name}},closed:{get:function(){return a.closed},set:function(b){a.closed=b;a.closed?g.addClass(d.__ul,k.CLASS_CLOSED):g.removeClass(d.__ul,k.CLASS_CLOSED);this.onResize();if(d.__closeButton)d.__closeButton.innerHTML=b?k.TEXT_OPEN:k.TEXT_CLOSED}},load:{get:function(){return a.load}},useLocalStorage:{get:function(){return e}, [...]
+((e=a)?g.bind(window,"unload",b):g.unbind(window,"unload",b),localStorage.setItem(document.location.href+".isLocal",a))}}});if(i.isUndefined(a.parent)){a.closed=false;g.addClass(this.domElement,k.CLASS_MAIN);g.makeSelectable(this.domElement,false);if(u&&e){d.useLocalStorage=true;var f=localStorage.getItem(document.location.href+".gui");if(f)a.load=JSON.parse(f)}this.__closeButton=document.createElement("div");this.__closeButton.innerHTML=k.TEXT_CLOSED;g.addClass(this.__closeButton,k.CLAS [...]
+this.domElement.appendChild(this.__closeButton);g.bind(this.__closeButton,"click",function(){d.closed=!d.closed})}else{if(a.closed===void 0)a.closed=true;var m=document.createTextNode(a.name);g.addClass(m,"controller-name");f=s(d,m);g.addClass(this.__ul,k.CLASS_CLOSED);g.addClass(f,"title");g.bind(f,"click",function(a){a.preventDefault();d.closed=!d.closed;return false});if(!a.closed)this.closed=false}a.autoPlace&&(i.isUndefined(a.parent)&&(F&&(v=document.createElement("div"),g.addClass( [...]
+k.CLASS_AUTO_PLACE_CONTAINER),document.body.appendChild(v),F=false),v.appendChild(this.domElement),g.addClass(this.domElement,k.CLASS_AUTO_PLACE)),this.parent||D(d,a.width));g.bind(window,"resize",function(){d.onResize()});g.bind(this.__ul,"webkitTransitionEnd",function(){d.onResize()});g.bind(this.__ul,"transitionend",function(){d.onResize()});g.bind(this.__ul,"oTransitionEnd",function(){d.onResize()});this.onResize();a.resizable&&J(this);d.getRoot();a.parent||c()};k.toggleHide=function [...]
+function(a){a.domElement.style.zIndex=A?-999:999;a.domElement.style.opacity=A?0:1})};k.CLASS_AUTO_PLACE="a";k.CLASS_AUTO_PLACE_CONTAINER="ac";k.CLASS_MAIN="main";k.CLASS_CONTROLLER_ROW="cr";k.CLASS_TOO_TALL="taller-than-window";k.CLASS_CLOSED="closed";k.CLASS_CLOSE_BUTTON="close-button";k.CLASS_DRAG="drag";k.DEFAULT_WIDTH=245;k.TEXT_CLOSED="Close Controls";k.TEXT_OPEN="Open Controls";g.bind(window,"keydown",function(a){document.activeElement.type!=="text"&&(a.which===72||a.keyCode==72)&& [...]
+false);i.extend(k.prototype,{add:function(a,b){return q(this,a,b,{factoryArgs:Array.prototype.slice.call(arguments,2)})},addColor:function(a,b){return q(this,a,b,{color:true})},remove:function(a){this.__ul.removeChild(a.__li);this.__controllers.slice(this.__controllers.indexOf(a),1);var b=this;i.defer(function(){b.onResize()})},destroy:function(){this.autoPlace&&v.removeChild(this.domElement)},addFolder:function(a){if(this.__folders[a]!==void 0)throw Error('You already have a folder in t [...]
+a+'"');var b={name:a,parent:this};b.autoPlace=this.autoPlace;if(this.load&&this.load.folders&&this.load.folders[a])b.closed=this.load.folders[a].closed,b.load=this.load.folders[a];b=new k(b);this.__folders[a]=b;a=s(this,b.domElement);g.addClass(a,"folder");return b},open:function(){this.closed=false},close:function(){this.closed=true},onResize:function(){var a=this.getRoot();if(a.scrollable){var b=g.getOffset(a.__ul).top,c=0;i.each(a.__ul.childNodes,function(b){a.autoPlace&&b===a.__save_ [...]
+g.getHeight(b))});window.innerHeight-b-20<c?(g.addClass(a.domElement,k.CLASS_TOO_TALL),a.__ul.style.height=window.innerHeight-b-20+"px"):(g.removeClass(a.domElement,k.CLASS_TOO_TALL),a.__ul.style.height="auto")}a.__resize_handle&&i.defer(function(){a.__resize_handle.style.height=a.__ul.offsetHeight+"px"});if(a.__closeButton)a.__closeButton.style.width=a.width+"px"},remember:function(){if(i.isUndefined(x))x=new y,x.domElement.innerHTML=a;if(this.parent)throw Error("You can only call remem [...]
+var b=this;i.each(Array.prototype.slice.call(arguments),function(a){b.__rememberedObjects.length==0&&I(b);b.__rememberedObjects.indexOf(a)==-1&&b.__rememberedObjects.push(a)});this.autoPlace&&D(this,this.width)},getRoot:function(){for(var a=this;a.parent;)a=a.parent;return a},getSaveObject:function(){var a=this.load;a.closed=this.closed;if(this.__rememberedObjects.length>0){a.preset=this.preset;if(!a.remembered)a.remembered={};a.remembered[this.preset]=z(this)}a.folders={};i.each(this.__ [...]
+c){a.folders[c]=b.getSaveObject()});return a},save:function(){if(!this.load.remembered)this.load.remembered={};this.load.remembered[this.preset]=z(this);B(this,false)},saveAs:function(a){if(!this.load.remembered)this.load.remembered={},this.load.remembered[w]=z(this,true);this.load.remembered[a]=z(this);this.preset=a;C(this,a,true)},revert:function(a){i.each(this.__controllers,function(b){this.getRoot().load.remembered?t(a||this.getRoot(),b):b.setValue(b.initialValue)},this);i.each(this. [...]
+function(a){a.revert(a)});a||B(this.getRoot(),false)},listen:function(a){var b=this.__listening.length==0;this.__listening.push(a);b&&E(this.__listening)}});return k}(dat.utils.css,'<div id="dg-save" class="dg dialogue">\n\n  Here\'s the new load parameter for your <code>GUI</code>\'s constructor:\n\n  <textarea id="dg-new-constructor"></textarea>\n\n  <div id="dg-save-locally">\n\n    <input id="dg-local-storage" type="checkbox"/> Automatically save\n    values to <code>localStorage</co [...]
+".dg ul{list-style:none;margin:0;padding:0;width:100%;clear:both}.dg.ac{position:fixed;top:0;left:0;right:0;height:0;z-index:0}.dg:not(.ac) .main{overflow:hidden}.dg.main{-webkit-transition:opacity 0.1s linear;-o-transition:opacity 0.1s linear;-moz-transition:opacity 0.1s linear;transition:opacity 0.1s linear}.dg.main.taller-than-window{overflow-y:auto}.dg.main.taller-than-window .close-button{opacity:1;margin-top:-1px;border-top:1px solid #2c2c2c}.dg.main ul.closed .close-button{opacity [...]
+dat.controllers.factory=function(e,a,c,d,f,b,n){return function(h,j,m,l){var o=h[j];if(n.isArray(m)||n.isObject(m))return new e(h,j,m);if(n.isNumber(o))return n.isNumber(m)&&n.isNumber(l)?new c(h,j,m,l):new a(h,j,{min:m,max:l});if(n.isString(o))return new d(h,j);if(n.isFunction(o))return new f(h,j,"");if(n.isBoolean(o))return new b(h,j)}}(dat.controllers.OptionController,dat.controllers.NumberControllerBox,dat.controllers.NumberControllerSlider,dat.controllers.StringController=function(e [...]
+function(c,b){function e(){h.setValue(h.__input.value)}d.superclass.call(this,c,b);var h=this;this.__input=document.createElement("input");this.__input.setAttribute("type","text");a.bind(this.__input,"keyup",e);a.bind(this.__input,"change",e);a.bind(this.__input,"blur",function(){h.__onFinishChange&&h.__onFinishChange.call(h,h.getValue())});a.bind(this.__input,"keydown",function(a){a.keyCode===13&&this.blur()});this.updateDisplay();this.domElement.appendChild(this.__input)};d.superclass= [...]
+e.prototype,{updateDisplay:function(){if(!a.isActive(this.__input))this.__input.value=this.getValue();return d.superclass.prototype.updateDisplay.call(this)}});return d}(dat.controllers.Controller,dat.dom.dom,dat.utils.common),dat.controllers.FunctionController,dat.controllers.BooleanController,dat.utils.common),dat.controllers.Controller,dat.controllers.BooleanController,dat.controllers.FunctionController,dat.controllers.NumberControllerBox,dat.controllers.NumberControllerSlider,dat.con [...]
+dat.controllers.ColorController=function(e,a,c,d,f){function b(a,b,c,d){a.style.background="";f.each(j,function(e){a.style.cssText+="background: "+e+"linear-gradient("+b+", "+c+" 0%, "+d+" 100%); "})}function n(a){a.style.background="";a.style.cssText+="background: -moz-linear-gradient(top,  #ff0000 0%, #ff00ff 17%, #0000ff 34%, #00ffff 50%, #00ff00 67%, #ffff00 84%, #ff0000 100%);";a.style.cssText+="background: -webkit-linear-gradient(top,  #ff0000 0%,#ff00ff 17%,#0000ff 34%,#00ffff 50% [...]
+a.style.cssText+="background: -o-linear-gradient(top,  #ff0000 0%,#ff00ff 17%,#0000ff 34%,#00ffff 50%,#00ff00 67%,#ffff00 84%,#ff0000 100%);";a.style.cssText+="background: -ms-linear-gradient(top,  #ff0000 0%,#ff00ff 17%,#0000ff 34%,#00ffff 50%,#00ff00 67%,#ffff00 84%,#ff0000 100%);";a.style.cssText+="background: linear-gradient(top,  #ff0000 0%,#ff00ff 17%,#0000ff 34%,#00ffff 50%,#00ff00 67%,#ffff00 84%,#ff0000 100%);"}var h=function(e,l){function o(b){q(b);a.bind(window,"mousemove",q); [...]
+"mouseup",j)}function j(){a.unbind(window,"mousemove",q);a.unbind(window,"mouseup",j)}function g(){var a=d(this.value);a!==false?(p.__color.__state=a,p.setValue(p.__color.toOriginal())):this.value=p.__color.toString()}function i(){a.unbind(window,"mousemove",s);a.unbind(window,"mouseup",i)}function q(b){b.preventDefault();var c=a.getWidth(p.__saturation_field),d=a.getOffset(p.__saturation_field),e=(b.clientX-d.left+document.body.scrollLeft)/c,b=1-(b.clientY-d.top+document.body.scrollTop) [...]
+1:b<0&&(b=0);e>1?e=1:e<0&&(e=0);p.__color.v=b;p.__color.s=e;p.setValue(p.__color.toOriginal());return false}function s(b){b.preventDefault();var c=a.getHeight(p.__hue_field),d=a.getOffset(p.__hue_field),b=1-(b.clientY-d.top+document.body.scrollTop)/c;b>1?b=1:b<0&&(b=0);p.__color.h=b*360;p.setValue(p.__color.toOriginal());return false}h.superclass.call(this,e,l);this.__color=new c(this.getValue());this.__temp=new c(0);var p=this;this.domElement=document.createElement("div");a.makeSelectab [...]
+false);this.__selector=document.createElement("div");this.__selector.className="selector";this.__saturation_field=document.createElement("div");this.__saturation_field.className="saturation-field";this.__field_knob=document.createElement("div");this.__field_knob.className="field-knob";this.__field_knob_border="2px solid ";this.__hue_knob=document.createElement("div");this.__hue_knob.className="hue-knob";this.__hue_field=document.createElement("div");this.__hue_field.className="hue-field" [...]
+document.createElement("input");this.__input.type="text";this.__input_textShadow="0 1px 1px ";a.bind(this.__input,"keydown",function(a){a.keyCode===13&&g.call(this)});a.bind(this.__input,"blur",g);a.bind(this.__selector,"mousedown",function(){a.addClass(this,"drag").bind(window,"mouseup",function(){a.removeClass(p.__selector,"drag")})});var t=document.createElement("div");f.extend(this.__selector.style,{width:"122px",height:"102px",padding:"3px",backgroundColor:"#222",boxShadow:"0px 1px  [...]
+f.extend(this.__field_knob.style,{position:"absolute",width:"12px",height:"12px",border:this.__field_knob_border+(this.__color.v<0.5?"#fff":"#000"),boxShadow:"0px 1px 3px rgba(0,0,0,0.5)",borderRadius:"12px",zIndex:1});f.extend(this.__hue_knob.style,{position:"absolute",width:"15px",height:"2px",borderRight:"4px solid #fff",zIndex:1});f.extend(this.__saturation_field.style,{width:"100px",height:"100px",border:"1px solid #555",marginRight:"3px",display:"inline-block",cursor:"pointer"});f. [...]
+{width:"100%",height:"100%",background:"none"});b(t,"top","rgba(0,0,0,0)","#000");f.extend(this.__hue_field.style,{width:"15px",height:"100px",display:"inline-block",border:"1px solid #555",cursor:"ns-resize"});n(this.__hue_field);f.extend(this.__input.style,{outline:"none",textAlign:"center",color:"#fff",border:0,fontWeight:"bold",textShadow:this.__input_textShadow+"rgba(0,0,0,0.7)"});a.bind(this.__saturation_field,"mousedown",o);a.bind(this.__field_knob,"mousedown",o);a.bind(this.__hue [...]
+function(b){s(b);a.bind(window,"mousemove",s);a.bind(window,"mouseup",i)});this.__saturation_field.appendChild(t);this.__selector.appendChild(this.__field_knob);this.__selector.appendChild(this.__saturation_field);this.__selector.appendChild(this.__hue_field);this.__hue_field.appendChild(this.__hue_knob);this.domElement.appendChild(this.__input);this.domElement.appendChild(this.__selector);this.updateDisplay()};h.superclass=e;f.extend(h.prototype,e.prototype,{updateDisplay:function(){var [...]
+if(a!==false){var e=false;f.each(c.COMPONENTS,function(b){if(!f.isUndefined(a[b])&&!f.isUndefined(this.__color.__state[b])&&a[b]!==this.__color.__state[b])return e=true,{}},this);e&&f.extend(this.__color.__state,a)}f.extend(this.__temp.__state,this.__color.__state);this.__temp.a=1;var h=this.__color.v<0.5||this.__color.s>0.5?255:0,j=255-h;f.extend(this.__field_knob.style,{marginLeft:100*this.__color.s-7+"px",marginTop:100*(1-this.__color.v)-7+"px",backgroundColor:this.__temp.toString(),b [...]
+"rgb("+h+","+h+","+h+")"});this.__hue_knob.style.marginTop=(1-this.__color.h/360)*100+"px";this.__temp.s=1;this.__temp.v=1;b(this.__saturation_field,"left","#fff",this.__temp.toString());f.extend(this.__input.style,{backgroundColor:this.__input.value=this.__color.toString(),color:"rgb("+h+","+h+","+h+")",textShadow:this.__input_textShadow+"rgba("+j+","+j+","+j+",.7)"})}});var j=["-moz-","-o-","-webkit-","-ms-",""];return h}(dat.controllers.Controller,dat.dom.dom,dat.color.Color=function( [...]
+b,c){Object.defineProperty(a,b,{get:function(){if(this.__state.space==="RGB")return this.__state[b];n(this,b,c);return this.__state[b]},set:function(a){if(this.__state.space!=="RGB")n(this,b,c),this.__state.space="RGB";this.__state[b]=a}})}function b(a,b){Object.defineProperty(a,b,{get:function(){if(this.__state.space==="HSV")return this.__state[b];h(this);return this.__state[b]},set:function(a){if(this.__state.space!=="HSV")h(this),this.__state.space="HSV";this.__state[b]=a}})}function  [...]
+"HEX")b.__state[c]=a.component_from_hex(b.__state.hex,e);else if(b.__state.space==="HSV")d.extend(b.__state,a.hsv_to_rgb(b.__state.h,b.__state.s,b.__state.v));else throw"Corrupted color state";}function h(b){var c=a.rgb_to_hsv(b.r,b.g,b.b);d.extend(b.__state,{s:c.s,v:c.v});if(d.isNaN(c.h)){if(d.isUndefined(b.__state.h))b.__state.h=0}else b.__state.h=c.h}var j=function(){this.__state=e.apply(this,arguments);if(this.__state===false)throw"Failed to interpret color arguments";this.__state.a= [...]
+1};j.COMPONENTS="r,g,b,h,s,v,hex,a".split(",");d.extend(j.prototype,{toString:function(){return c(this)},toOriginal:function(){return this.__state.conversion.write(this)}});f(j.prototype,"r",2);f(j.prototype,"g",1);f(j.prototype,"b",0);b(j.prototype,"h");b(j.prototype,"s");b(j.prototype,"v");Object.defineProperty(j.prototype,"a",{get:function(){return this.__state.a},set:function(a){this.__state.a=a}});Object.defineProperty(j.prototype,"hex",{get:function(){if(!this.__state.space!=="HEX" [...]
+a.rgb_to_hex(this.r,this.g,this.b);return this.__state.hex},set:function(a){this.__state.space="HEX";this.__state.hex=a}});return j}(dat.color.interpret,dat.color.math=function(){var e;return{hsv_to_rgb:function(a,c,d){var e=a/60-Math.floor(a/60),b=d*(1-c),n=d*(1-e*c),c=d*(1-(1-e)*c),a=[[d,c,b],[n,d,b],[b,d,c],[b,n,d],[c,b,d],[d,b,n]][Math.floor(a/60)%6];return{r:a[0]*255,g:a[1]*255,b:a[2]*255}},rgb_to_hsv:function(a,c,d){var e=Math.min(a,c,d),b=Math.max(a,c,d),e=b-e;if(b==0)return{h:NaN [...]
+a=a==b?(c-d)/e:c==b?2+(d-a)/e:4+(a-c)/e;a/=6;a<0&&(a+=1);return{h:a*360,s:e/b,v:b/255}},rgb_to_hex:function(a,c,d){a=this.hex_with_component(0,2,a);a=this.hex_with_component(a,1,c);return a=this.hex_with_component(a,0,d)},component_from_hex:function(a,c){return a>>c*8&255},hex_with_component:function(a,c,d){return d<<(e=c*8)|a&~(255<<e)}}}(),dat.color.toString,dat.utils.common),dat.color.interpret,dat.utils.common),dat.utils.requestAnimationFrame=function(){return window.webkitRequestAni [...]
+window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(e){window.setTimeout(e,1E3/60)}}(),dat.dom.CenteredDiv=function(e,a){var c=function(){this.backgroundElement=document.createElement("div");a.extend(this.backgroundElement.style,{backgroundColor:"rgba(0,0,0,0.8)",top:0,left:0,display:"none",zIndex:"1000",opacity:0,WebkitTransition:"opacity 0.2s linear"});e.makeFullscreen(this.backgroundElement);this.backgroundElement.style.position="fi [...]
+document.createElement("div");a.extend(this.domElement.style,{position:"fixed",display:"none",zIndex:"1001",opacity:0,WebkitTransition:"-webkit-transform 0.2s ease-out, opacity 0.2s linear"});document.body.appendChild(this.backgroundElement);document.body.appendChild(this.domElement);var c=this;e.bind(this.backgroundElement,"click",function(){c.hide()})};c.prototype.show=function(){var c=this;this.backgroundElement.style.display="block";this.domElement.style.display="block";this.domEleme [...]
+0;this.domElement.style.webkitTransform="scale(1.1)";this.layout();a.defer(function(){c.backgroundElement.style.opacity=1;c.domElement.style.opacity=1;c.domElement.style.webkitTransform="scale(1)"})};c.prototype.hide=function(){var a=this,c=function(){a.domElement.style.display="none";a.backgroundElement.style.display="none";e.unbind(a.domElement,"webkitTransitionEnd",c);e.unbind(a.domElement,"transitionend",c);e.unbind(a.domElement,"oTransitionEnd",c)};e.bind(this.domElement,"webkitTran [...]
+c);e.bind(this.domElement,"transitionend",c);e.bind(this.domElement,"oTransitionEnd",c);this.backgroundElement.style.opacity=0;this.domElement.style.opacity=0;this.domElement.style.webkitTransform="scale(1.1)"};c.prototype.layout=function(){this.domElement.style.left=window.innerWidth/2-e.getWidth(this.domElement)/2+"px";this.domElement.style.top=window.innerHeight/2-e.getHeight(this.domElement)/2+"px"};return c}(dat.dom.dom,dat.utils.common),dat.dom.dom,dat.utils.common);
\ No newline at end of file
diff --git a/config/plugins/visualizations/csg/static/three.min.js b/config/plugins/visualizations/csg/static/three.min.js
new file mode 100644
index 0000000..85e5596
--- /dev/null
+++ b/config/plugins/visualizations/csg/static/three.min.js
@@ -0,0 +1,835 @@
+// threejs.org/license
+'use strict';var THREE={REVISION:"71"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0<a?1:+a});THREE.log=function(){console.log.apply(console,arguments)};THREE.warn=function(){console.warn.apply(console,arguments)};THREE.error=function(){console.error.apply(console,arguments)};THREE.MOUSE={LEFT:0,MIDDLE:1,RIGHT:2};THREE.CullFaceNone=0;THREE.CullFaceBack=1;THREE.CullFaceFront=2;THREE.CullFaceFrontBack=3;THREE.FrontFaceDirectionCW=0;
+THREE.FrontFaceDirectionCCW=1;THREE.BasicShadowMap=0;THREE.PCFShadowMap=1;THREE.PCFSoftShadowMap=2;THREE.FrontSide=0;THREE.BackSide=1;THREE.DoubleSide=2;THREE.NoShading=0;THREE.FlatShading=1;THREE.SmoothShading=2;THREE.NoColors=0;THREE.FaceColors=1;THREE.VertexColors=2;THREE.NoBlending=0;THREE.NormalBlending=1;THREE.AdditiveBlending=2;THREE.SubtractiveBlending=3;THREE.MultiplyBlending=4;THREE.CustomBlending=5;THREE.AddEquation=100;THREE.SubtractEquation=101;THREE.ReverseSubtractEquation=102;
+THREE.MinEquation=103;THREE.MaxEquation=104;THREE.ZeroFactor=200;THREE.OneFactor=201;THREE.SrcColorFactor=202;THREE.OneMinusSrcColorFactor=203;THREE.SrcAlphaFactor=204;THREE.OneMinusSrcAlphaFactor=205;THREE.DstAlphaFactor=206;THREE.OneMinusDstAlphaFactor=207;THREE.DstColorFactor=208;THREE.OneMinusDstColorFactor=209;THREE.SrcAlphaSaturateFactor=210;THREE.MultiplyOperation=0;THREE.MixOperation=1;THREE.AddOperation=2;THREE.UVMapping=300;THREE.CubeReflectionMapping=301;THREE.CubeRefractionMa [...]
+THREE.EquirectangularReflectionMapping=303;THREE.EquirectangularRefractionMapping=304;THREE.SphericalReflectionMapping=305;THREE.RepeatWrapping=1E3;THREE.ClampToEdgeWrapping=1001;THREE.MirroredRepeatWrapping=1002;THREE.NearestFilter=1003;THREE.NearestMipMapNearestFilter=1004;THREE.NearestMipMapLinearFilter=1005;THREE.LinearFilter=1006;THREE.LinearMipMapNearestFilter=1007;THREE.LinearMipMapLinearFilter=1008;THREE.UnsignedByteType=1009;THREE.ByteType=1010;THREE.ShortType=1011;
+THREE.UnsignedShortType=1012;THREE.IntType=1013;THREE.UnsignedIntType=1014;THREE.FloatType=1015;THREE.HalfFloatType=1025;THREE.UnsignedShort4444Type=1016;THREE.UnsignedShort5551Type=1017;THREE.UnsignedShort565Type=1018;THREE.AlphaFormat=1019;THREE.RGBFormat=1020;THREE.RGBAFormat=1021;THREE.LuminanceFormat=1022;THREE.LuminanceAlphaFormat=1023;THREE.RGBEFormat=THREE.RGBAFormat;THREE.RGB_S3TC_DXT1_Format=2001;THREE.RGBA_S3TC_DXT1_Format=2002;THREE.RGBA_S3TC_DXT3_Format=2003;
+THREE.RGBA_S3TC_DXT5_Format=2004;THREE.RGB_PVRTC_4BPPV1_Format=2100;THREE.RGB_PVRTC_2BPPV1_Format=2101;THREE.RGBA_PVRTC_4BPPV1_Format=2102;THREE.RGBA_PVRTC_2BPPV1_Format=2103;
+THREE.Projector=function(){THREE.error("THREE.Projector has been moved to /examples/js/renderers/Projector.js.");this.projectVector=function(a,b){THREE.warn("THREE.Projector: .projectVector() is now vector.project().");a.project(b)};this.unprojectVector=function(a,b){THREE.warn("THREE.Projector: .unprojectVector() is now vector.unproject().");a.unproject(b)};this.pickingRay=function(a,b){THREE.error("THREE.Projector: .pickingRay() is now raycaster.setFromCamera().")}};
+THREE.CanvasRenderer=function(){THREE.error("THREE.CanvasRenderer has been moved to /examples/js/renderers/CanvasRenderer.js");this.domElement=document.createElement("canvas");this.clear=function(){};this.render=function(){};this.setClearColor=function(){};this.setSize=function(){}};THREE.Color=function(a){return 3===arguments.length?this.setRGB(arguments[0],arguments[1],arguments[2]):this.set(a)};
+THREE.Color.prototype={constructor:THREE.Color,r:1,g:1,b:1,set:function(a){a instanceof THREE.Color?this.copy(a):"number"===typeof a?this.setHex(a):"string"===typeof a&&this.setStyle(a);return this},setHex:function(a){a=Math.floor(a);this.r=(a>>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1<c&&(c-=1);return c<1/6 [...]
+c:.5>c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec [...]
+Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r [...]
+a.g;this.b=a.b;return this},copyGammaToLinear:function(a,b){void 0===b&&(b=2);this.r=Math.pow(a.r,b);this.g=Math.pow(a.g,b);this.b=Math.pow(a.b,b);return this},copyLinearToGamma:function(a,b){void 0===b&&(b=2);var c=0<b?1/b:1;this.r=Math.pow(a.r,c);this.g=Math.pow(a.g,c);this.b=Math.pow(a.b,c);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(t [...]
+Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<<8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(c<d?6:0);break;case c:g=(d-b)/k+2;break;case d:g=(b-c)/k+4}g/=6}a.h=g;a.s=f;a.l=h;return a},getStyle:function(){ [...]
+(255*this.r|0)+","+(255*this.g|0)+","+(255*this.b|0)+")"},offsetHSL:function(a,b,c){var d=this.getHSL();d.h+=a;d.s+=b;d.l+=c;this.setHSL(d.h,d.s,d.l);return this},add:function(a){this.r+=a.r;this.g+=a.g;this.b+=a.b;return this},addColors:function(a,b){this.r=a.r+b.r;this.g=a.g+b.g;this.b=a.b+b.b;return this},addScalar:function(a){this.r+=a;this.g+=a;this.b+=a;return this},multiply:function(a){this.r*=a.r;this.g*=a.g;this.b*=a.b;return this},multiplyScalar:function(a){this.r*=a;this.g*=a; [...]
+return this},lerp:function(a,b){this.r+=(a.r-this.r)*b;this.g+=(a.g-this.g)*b;this.b+=(a.b-this.b)*b;return this},equals:function(a){return a.r===this.r&&a.g===this.g&&a.b===this.b},fromArray:function(a){this.r=a[0];this.g=a[1];this.b=a[2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.r;a[b+1]=this.g;a[b+2]=this.b;return a},clone:function(){return(new THREE.Color).setRGB(this.r,this.g,this.b)}};
+THREE.ColorKeywords={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:1243325 [...]
+darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greeny [...]
+grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,l [...]
+lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palego [...]
+palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal: [...]
+tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074};THREE.Quaternion=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._w=void 0!==d?d:1};
+THREE.Quaternion.prototype={constructor:THREE.Quaternion,_x:0,_y:0,_z:0,_w:0,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get w(){return this._w},set w(a){this._w=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._w=d;this.onChangeCallback();return this},copy:function(a){this._x=a.x;this._y=a.y;thi [...]
+this._w=a.w;this.onChangeCallback();return this},setFromEuler:function(a,b){if(!1===a instanceof THREE.Euler)throw Error("THREE.Quaternion: .setFromEuler() now expects a Euler rotation rather than a Vector3 and order.");var c=Math.cos(a._x/2),d=Math.cos(a._y/2),e=Math.cos(a._z/2),f=Math.sin(a._x/2),g=Math.sin(a._y/2),h=Math.sin(a._z/2);"XYZ"===a.order?(this._x=f*d*e+c*g*h,this._y=c*g*e-f*d*h,this._z=c*d*h+f*g*e,this._w=c*d*e-f*g*h):"YXZ"===a.order?(this._x=f*d*e+c*g*h,this._y=c*g*e-f*d*h [...]
+c*d*h-f*g*e,this._w=c*d*e+f*g*h):"ZXY"===a.order?(this._x=f*d*e-c*g*h,this._y=c*g*e+f*d*h,this._z=c*d*h+f*g*e,this._w=c*d*e-f*g*h):"ZYX"===a.order?(this._x=f*d*e-c*g*h,this._y=c*g*e+f*d*h,this._z=c*d*h-f*g*e,this._w=c*d*e+f*g*h):"YZX"===a.order?(this._x=f*d*e+c*g*h,this._y=c*g*e+f*d*h,this._z=c*d*h-f*g*e,this._w=c*d*e-f*g*h):"XZY"===a.order&&(this._x=f*d*e-c*g*h,this._y=c*g*e-f*d*h,this._z=c*d*h+f*g*e,this._w=c*d*e+f*g*h);if(!1!==b)this.onChangeCallback();return this},setFromAxisAngle:fu [...]
+b){var c=b/2,d=Math.sin(c);this._x=a.x*d;this._y=a.y*d;this._z=a.z*d;this._w=Math.cos(c);this.onChangeCallback();return this},setFromRotationMatrix:function(a){var b=a.elements,c=b[0];a=b[4];var d=b[8],e=b[1],f=b[5],g=b[9],h=b[2],k=b[6],b=b[10],l=c+f+b;0<l?(c=.5/Math.sqrt(l+1),this._w=.25/c,this._x=(k-g)*c,this._y=(d-h)*c,this._z=(e-a)*c):c>f&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+ [...]
+.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},co [...]
+-1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback( [...]
+multiply:function(a,b){return void 0!==b?(THREE.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,l=b._w;this._x=c*l+f*g+d*k-e*h;this._y=d*l+f*h+e*g-c*k;this._z=e*l+f*k+c*h-d*g;this._w=f*l-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){THREE.warn("T [...]
+return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1- [...]
+Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return  [...]
+a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0};
+THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this}, [...]
+b){if(void 0!==b)return THREE.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},sub:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;ret [...]
+subScalar:function(a){this.x-=a;this.y-=a;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.x<a.x&& [...]
+this.y<a.y&&(this.y=a.y);return this},clamp:function(a,b){this.x<a.x?this.x=a.x:this.x>b.x&&(this.x=b.x);this.y<a.y?this.y=a.y:this.y>b.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:funct [...]
+Math.round(this.x);this.y=Math.round(this.y);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},di [...]
+distanceToSquared:function(a){var b=this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},lerpVectors:function(a,b,c){this.subVectors(b,a).multiplyScalar(c).add(a);return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){ [...]
+a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},fromAttribute:function(a,b,c){void 0===c&&(c=0);b=b*a.itemSize+c;this.x=a.array[b];this.y=a.array[b+1];return this},clone:function(){return new THREE.Vector2(this.x,this.y)}};THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0};
+THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:thr [...]
+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector3:  [...]
+this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subScalar:function(a){this.x-=a;this.y-=a;this.z-=a;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*= [...]
+a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y=a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&THREE.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a [...]
+c));return this}}(),applyMatrix3:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0 [...]
+c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z=(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,l=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-l*-f;this.y=k*a+b*-f+l*-e-h*-g;this.z=l*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));retu [...]
+unproject:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this [...]
+a):this.z=this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.x<a.x&&(this.x=a.x);this.y<a.y&&(this.y=a.y);this.z<a.z&&(this.z=a.z);return this},clamp:function(a,b){this.x<a.x?this.x=a.x:this.x>b.x&&(this.x=b.x);this.y<a.y?this.y=a.y:this.y>b.y&&(this.y=b.y);this.z<a.z?this.z=a.z:this.z>b.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new  [...]
+b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.c [...]
+Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divide [...]
+setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},lerpVectors:function(a,b,c){this.subVectors(b,a).multiplyScalar(c).add(a);return this},cross:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a. [...]
+e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this},projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function( [...]
+a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length());return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){THREE.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotati [...]
+setEulerFromQuaternion:function(a,b){THREE.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")},getPositionFromMatrix:function(a){THREE.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){THREE.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumn [...]
+b){THREE.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a,b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c [...]
+return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},fromAttribute:function(a,b,c){void 0===c&&(c=0);b=b*a.itemSize+c;this.x=a.array[b];this.y=a.array [...]
+a.array[b+2];return this},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1};
+THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){ca [...]
+case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;retur [...]
+b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return THREE.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subScalar:function(a){this.x-=a;this.y-=a;this.z-=a;this.w-=a;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:functio [...]
+a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w [...]
+b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this},setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var l=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+l-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;l=(l+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>l?.01>e?(b=0,d=c=.7071 [...]
+Math.sqrt(e),c=d/b,d=f/b):h>l?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h),b=d/c,d=k/c):.01>l?(c=b=.707106781,d=0):(d=Math.sqrt(l),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+l-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){thi [...]
+(this.x=a.x);this.y<a.y&&(this.y=a.y);this.z<a.z&&(this.z=a.z);this.w<a.w&&(this.w=a.w);return this},clamp:function(a,b){this.x<a.x?this.x=a.x:this.x>b.x&&(this.x=b.x);this.y<a.y?this.y=a.y:this.y>b.y&&(this.y=b.y);this.z<a.z?this.z=a.z:this.z>b.z&&(this.z=b.z);this.w<a.w?this.w=a.w:this.w>b.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:f [...]
+Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floo [...]
+this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w);return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Mat [...]
+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},lerpVectors:function(a,b,c){this.subVectors(b,a).multiplyScalar(c).add(a);return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w== [...]
+fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;a[b+3]=this.w;return a},fromAttribute:function(a,b,c){void 0===c&&(c=0);b=b*a.itemSize+c;this.x=a.array[b];this.y=a.array[b+1];this.z=a.array[b+2];this.w=a.array[b+3];return this},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};
+THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ";
+THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return  [...]
+a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b,c){var d=THREE.Math.clamp,e=a.elements;a=e[0];var f=e[4],g=e[8],h=e[1],k=e[5],l=e[9],p=e[2],q=e[6],e=e[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(d(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-l,e),this._z=Math.atan2(-f,a)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-d(l,-1,1)),.99999>Math.abs(l)?(this._y=Math.atan2(g,e),this._z=Mat [...]
+k)):(this._y=Math.atan2(-p,a),this._z=0)):"ZXY"===b?(this._x=Math.asin(d(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,e),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,a))):"ZYX"===b?(this._y=Math.asin(-d(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,e),this._z=Math.atan2(h,a)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(d(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-l,k),this._y=Math.atan2(-p,a)):(this._x=0,this._y=Math.atan2(g,e))):"XZY"= [...]
+Math.asin(-d(f,-1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,a)):(this._x=Math.atan2(-l,e),this._y=0)):THREE.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},setFromQuaternion:function(){var a;return function(b,c,d){void 0===a&&(a=new THREE.Matrix4);a.makeRotationFromQuaternion(b);this.setFromRotationMatrix(a,c,d);return this}}(),setFromVector3:function(a,b){return this.set(a.x [...]
+b||this._order)},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this);this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._order;return [...]
+a.set(this._x,this._y,this._z):new THREE.Vector3(this._x,this._y,this._z)},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}};THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3};
+THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this [...]
+b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4 [...]
+this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)};
+THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;b<c;b++)this.expandByPoint(a[b]);return this},setFromCenterAndSize:function(){var a=new THREE.Vector2;return function(b,c){var d=a.copy(c).multiplyScalar(.5);this.min.copy(b).sub(d);this.max.copy(b).add(d);return this}}(),copy:function(a){this.min.copy(a.min);this.max.copy(a.max);return this},makeEmpty:function(){t [...]
+this.min.y=Infinity;this.max.x=this.max.y=-Infinity;return this},empty:function(){return this.max.x<this.min.x||this.max.y<this.min.y},center:function(a){return(a||new THREE.Vector2).addVectors(this.min,this.max).multiplyScalar(.5)},size:function(a){return(a||new THREE.Vector2).subVectors(this.max,this.min)},expandByPoint:function(a){this.min.min(a);this.max.max(a);return this},expandByVector:function(a){this.min.sub(a);this.max.add(a);return this},expandByScalar:function(a){this.min.add [...]
+this.max.addScalar(a);return this},containsPoint:function(a){return a.x<this.min.x||a.x>this.max.x||a.y<this.min.y||a.y>this.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.x<this.min.x||a.min.x>this.max.x||a.max.y<this [...]
+this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(th [...]
+a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)};
+THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;b<c;b++)this.expandByPoint(a[b]);return this},setFromCenterAndSize:function(){var a=new THREE.Vector3;return function(b,c){var d=a.copy(c).multiplyScalar(.5);this.min.copy(b).sub(d);this.max.copy(b).add(d);return this}}(),setFromObject:function(){var a=new THREE.Vector3;return function(b){var c=this;b.updateMatrixW [...]
+this.makeEmpty();b.traverse(function(b){var e=b.geometry;if(void 0!==e)if(e instanceof THREE.Geometry)for(var f=e.vertices,e=0,g=f.length;e<g;e++)a.copy(f[e]),a.applyMatrix4(b.matrixWorld),c.expandByPoint(a);else if(e instanceof THREE.BufferGeometry&&void 0!==e.attributes.position)for(f=e.attributes.position.array,e=0,g=f.length;e<g;e+=3)a.set(f[e],f[e+1],f[e+2]),a.applyMatrix4(b.matrixWorld),c.expandByPoint(a)});return this}}(),copy:function(a){this.min.copy(a.min);this.max.copy(a.max); [...]
+makeEmpty:function(){this.min.x=this.min.y=this.min.z=Infinity;this.max.x=this.max.y=this.max.z=-Infinity;return this},empty:function(){return this.max.x<this.min.x||this.max.y<this.min.y||this.max.z<this.min.z},center:function(a){return(a||new THREE.Vector3).addVectors(this.min,this.max).multiplyScalar(.5)},size:function(a){return(a||new THREE.Vector3).subVectors(this.max,this.min)},expandByPoint:function(a){this.min.min(a);this.max.max(a);return this},expandByVector:function(a){this.mi [...]
+this.max.add(a);return this},expandByScalar:function(a){this.min.addScalar(-a);this.max.addScalar(a);return this},containsPoint:function(a){return a.x<this.min.x||a.x>this.max.x||a.y<this.min.y||a.y>this.max.y||a.z<this.min.z||a.z>this.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(th [...]
+this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.x<this.min.x||a.min.x>this.max.x||a.max.y<this.min.y||a.min.y>this.max.y||a.max.z<this.min.z||a.min.z>this.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSp [...]
+new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,th [...]
+this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this} [...]
+this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0<arguments.length&&THREE.error("THREE.Matrix3: the constructor no longer reads arguments. use .set() instead.")};
+THREE.Matrix3.prototype={constructor:THREE.Matrix3,set:function(a,b,c,d,e,f,g,h,k){var l=this.elements;l[0]=a;l[3]=b;l[6]=c;l[1]=d;l[4]=e;l[7]=f;l[2]=g;l[5]=h;l[8]=k;return this},identity:function(){this.set(1,0,0,0,1,0,0,0,1);return this},copy:function(a){a=a.elements;this.set(a[0],a[3],a[6],a[1],a[4],a[7],a[2],a[5],a[8]);return this},multiplyVector3:function(a){THREE.warn("THREE.Matrix3: .multiplyVector3() has been removed. Use vector.applyMatrix3( matrix ) instead.");return a.applyMat [...]
+multiplyVector3Array:function(a){THREE.warn("THREE.Matrix3: .multiplyVector3Array() has been renamed. Use matrix.applyToVector3Array( array ) instead.");return this.applyToVector3Array(a)},applyToVector3Array:function(){var a=new THREE.Vector3;return function(b,c,d){void 0===c&&(c=0);void 0===d&&(d=b.length);for(var e=0;e<d;e+=3,c+=3)a.x=b[c],a.y=b[c+1],a.z=b[c+2],a.applyMatrix3(this),b[c]=a.x,b[c+1]=a.y,b[c+2]=a.z;return b}}(),multiplyScalar:function(a){var b=this.elements;b[0]*=a;b[3]* [...]
+a;b[1]*=a;b[4]*=a;b[7]*=a;b[2]*=a;b[5]*=a;b[8]*=a;return this},determinant:function(){var a=this.elements,b=a[0],c=a[1],d=a[2],e=a[3],f=a[4],g=a[5],h=a[6],k=a[7],a=a[8];return b*f*a-b*g*k-c*e*a+c*g*h+d*e*k-d*f*h},getInverse:function(a,b){var c=a.elements,d=this.elements;d[0]=c[10]*c[5]-c[6]*c[9];d[1]=-c[10]*c[1]+c[2]*c[9];d[2]=c[6]*c[1]-c[2]*c[5];d[3]=-c[10]*c[4]+c[6]*c[8];d[4]=c[10]*c[0]-c[2]*c[8];d[5]=-c[6]*c[0]+c[2]*c[4];d[6]=c[9]*c[4]-c[5]*c[8];d[7]=-c[9]*c[0]+c[1]*c[8];d[8]=c[5]*c[0 [...]
+c=c[0]*d[0]+c[1]*d[3]+c[2]*d[6];if(0===c){if(b)throw Error("Matrix3.getInverse(): can't invert matrix, determinant is 0");THREE.warn("Matrix3.getInverse(): can't invert matrix, determinant is 0");this.identity();return this}this.multiplyScalar(1/c);return this},transpose:function(){var a,b=this.elements;a=b[1];b[1]=b[3];b[3]=a;a=b[2];b[2]=b[6];b[6]=a;a=b[5];b[5]=b[7];b[7]=a;return this},flattenToArrayOffset:function(a,b){var c=this.elements;a[b]=c[0];a[b+1]=c[1];a[b+2]=c[2];a[b+3]=c[3];a [...]
+a[b+5]=c[5];a[b+6]=c[6];a[b+7]=c[7];a[b+8]=c[8];return a},getNormalMatrix:function(a){this.getInverse(a).transpose();return this},transposeIntoArray:function(a){var b=this.elements;a[0]=b[0];a[1]=b[3];a[2]=b[6];a[3]=b[1];a[4]=b[4];a[5]=b[7];a[6]=b[2];a[7]=b[5];a[8]=b[8];return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8]]},clone:function(){return(new THREE.Matrix3).fromArray(this.e [...]
+THREE.Matrix4=function(){this.elements=new Float32Array([1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]);0<arguments.length&&THREE.error("THREE.Matrix4: the constructor no longer reads arguments. use .set() instead.")};
+THREE.Matrix4.prototype={constructor:THREE.Matrix4,set:function(a,b,c,d,e,f,g,h,k,l,p,q,n,t,r,s){var u=this.elements;u[0]=a;u[4]=b;u[8]=c;u[12]=d;u[1]=e;u[5]=f;u[9]=g;u[13]=h;u[2]=k;u[6]=l;u[10]=p;u[14]=q;u[3]=n;u[7]=t;u[11]=r;u[15]=s;return this},identity:function(){this.set(1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1);return this},copy:function(a){this.elements.set(a.elements);return this},extractPosition:function(a){THREE.warn("THREE.Matrix4: .extractPosition() has been renamed to .copyPosition() [...]
+copyPosition:function(a){var b=this.elements;a=a.elements;b[12]=a[12];b[13]=a[13];b[14]=a[14];return this},extractBasis:function(a,b,c){var d=this.elements;a.set(d[0],d[1],d[2]);b.set(d[4],d[5],d[6]);c.set(d[8],d[9],d[10]);return this},makeBasis:function(a,b,c){this.set(a.x,b.x,c.x,0,a.y,b.y,c.y,0,a.z,b.z,c.z,0,0,0,0,1);return this},extractRotation:function(){var a=new THREE.Vector3;return function(b){var c=this.elements;b=b.elements;var d=1/a.set(b[0],b[1],b[2]).length(),e=1/a.set(b[4], [...]
+f=1/a.set(b[8],b[9],b[10]).length();c[0]=b[0]*d;c[1]=b[1]*d;c[2]=b[2]*d;c[4]=b[4]*e;c[5]=b[5]*e;c[6]=b[6]*e;c[8]=b[8]*f;c[9]=b[9]*f;c[10]=b[10]*f;return this}}(),makeRotationFromEuler:function(a){!1===a instanceof THREE.Euler&&THREE.error("THREE.Matrix: .makeRotationFromEuler() now expects a Euler rotation rather than a Vector3 and order.");var b=this.elements,c=a.x,d=a.y,e=a.z,f=Math.cos(c),c=Math.sin(c),g=Math.cos(d),d=Math.sin(d),h=Math.cos(e),e=Math.sin(e);if("XYZ"===a.order){a=f*h;v [...]
+l=c*h,p=c*e;b[0]=g*h;b[4]=-g*e;b[8]=d;b[1]=k+l*d;b[5]=a-p*d;b[9]=-c*g;b[2]=p-a*d;b[6]=l+k*d;b[10]=f*g}else"YXZ"===a.order?(a=g*h,k=g*e,l=d*h,p=d*e,b[0]=a+p*c,b[4]=l*c-k,b[8]=f*d,b[1]=f*e,b[5]=f*h,b[9]=-c,b[2]=k*c-l,b[6]=p+a*c,b[10]=f*g):"ZXY"===a.order?(a=g*h,k=g*e,l=d*h,p=d*e,b[0]=a-p*c,b[4]=-f*e,b[8]=l+k*c,b[1]=k+l*c,b[5]=f*h,b[9]=p-a*c,b[2]=-f*d,b[6]=c,b[10]=f*g):"ZYX"===a.order?(a=f*h,k=f*e,l=c*h,p=c*e,b[0]=g*h,b[4]=l*d-k,b[8]=a*d+p,b[1]=g*e,b[5]=p*d+a,b[9]=k*d-l,b[2]=-d,b[6]=c*g,b[1 [...]
+a.order?(a=f*g,k=f*d,l=c*g,p=c*d,b[0]=g*h,b[4]=p-a*e,b[8]=l*e+k,b[1]=e,b[5]=f*h,b[9]=-c*h,b[2]=-d*h,b[6]=k*e+l,b[10]=a-p*e):"XZY"===a.order&&(a=f*g,k=f*d,l=c*g,p=c*d,b[0]=g*h,b[4]=-e,b[8]=d*h,b[1]=a*e+p,b[5]=f*h,b[9]=k*e-l,b[2]=l*e-k,b[6]=c*h,b[10]=p*e+a);b[3]=0;b[7]=0;b[11]=0;b[12]=0;b[13]=0;b[14]=0;b[15]=1;return this},setRotationFromQuaternion:function(a){THREE.warn("THREE.Matrix4: .setRotationFromQuaternion() has been renamed to .makeRotationFromQuaternion().");return this.makeRotati [...]
+makeRotationFromQuaternion:function(a){var b=this.elements,c=a.x,d=a.y,e=a.z,f=a.w,g=c+c,h=d+d,k=e+e;a=c*g;var l=c*h,c=c*k,p=d*h,d=d*k,e=e*k,g=f*g,h=f*h,f=f*k;b[0]=1-(p+e);b[4]=l-f;b[8]=c+h;b[1]=l+f;b[5]=1-(a+e);b[9]=d-g;b[2]=c-h;b[6]=d+g;b[10]=1-(a+p);b[3]=0;b[7]=0;b[11]=0;b[12]=0;b[13]=0;b[14]=0;b[15]=1;return this},lookAt:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3;return function(d,e,f){var g=this.elements;c.subVectors(d,e).normalize();0===c.length()&&( [...]
+c).normalize();0===a.length()&&(c.x+=1E-4,a.crossVectors(f,c).normalize());b.crossVectors(c,a);g[0]=a.x;g[4]=b.x;g[8]=c.x;g[1]=a.y;g[5]=b.y;g[9]=c.y;g[2]=a.z;g[6]=b.z;g[10]=c.z;return this}}(),multiply:function(a,b){return void 0!==b?(THREE.warn("THREE.Matrix4: .multiply() now only accepts one argument. Use .multiplyMatrices( a, b ) instead."),this.multiplyMatrices(a,b)):this.multiplyMatrices(this,a)},multiplyMatrices:function(a,b){var c=a.elements,d=b.elements,e=this.elements,f=c[0],g=c [...]
+k=c[12],l=c[1],p=c[5],q=c[9],n=c[13],t=c[2],r=c[6],s=c[10],u=c[14],v=c[3],x=c[7],D=c[11],c=c[15],w=d[0],y=d[4],A=d[8],E=d[12],G=d[1],F=d[5],z=d[9],I=d[13],U=d[2],M=d[6],H=d[10],L=d[14],P=d[3],N=d[7],R=d[11],d=d[15];e[0]=f*w+g*G+h*U+k*P;e[4]=f*y+g*F+h*M+k*N;e[8]=f*A+g*z+h*H+k*R;e[12]=f*E+g*I+h*L+k*d;e[1]=l*w+p*G+q*U+n*P;e[5]=l*y+p*F+q*M+n*N;e[9]=l*A+p*z+q*H+n*R;e[13]=l*E+p*I+q*L+n*d;e[2]=t*w+r*G+s*U+u*P;e[6]=t*y+r*F+s*M+u*N;e[10]=t*A+r*z+s*H+u*R;e[14]=t*E+r*I+s*L+u*d;e[3]=v*w+x*G+D*U+c*P; [...]
+x*F+D*M+c*N;e[11]=v*A+x*z+D*H+c*R;e[15]=v*E+x*I+D*L+c*d;return this},multiplyToArray:function(a,b,c){var d=this.elements;this.multiplyMatrices(a,b);c[0]=d[0];c[1]=d[1];c[2]=d[2];c[3]=d[3];c[4]=d[4];c[5]=d[5];c[6]=d[6];c[7]=d[7];c[8]=d[8];c[9]=d[9];c[10]=d[10];c[11]=d[11];c[12]=d[12];c[13]=d[13];c[14]=d[14];c[15]=d[15];return this},multiplyScalar:function(a){var b=this.elements;b[0]*=a;b[4]*=a;b[8]*=a;b[12]*=a;b[1]*=a;b[5]*=a;b[9]*=a;b[13]*=a;b[2]*=a;b[6]*=a;b[10]*=a;b[14]*=a;b[3]*=a;b[7] [...]
+a;b[15]*=a;return this},multiplyVector3:function(a){THREE.warn("THREE.Matrix4: .multiplyVector3() has been removed. Use vector.applyMatrix4( matrix ) or vector.applyProjection( matrix ) instead.");return a.applyProjection(this)},multiplyVector4:function(a){THREE.warn("THREE.Matrix4: .multiplyVector4() has been removed. Use vector.applyMatrix4( matrix ) instead.");return a.applyMatrix4(this)},multiplyVector3Array:function(a){THREE.warn("THREE.Matrix4: .multiplyVector3Array() has been rena [...]
+return this.applyToVector3Array(a)},applyToVector3Array:function(){var a=new THREE.Vector3;return function(b,c,d){void 0===c&&(c=0);void 0===d&&(d=b.length);for(var e=0;e<d;e+=3,c+=3)a.x=b[c],a.y=b[c+1],a.z=b[c+2],a.applyMatrix4(this),b[c]=a.x,b[c+1]=a.y,b[c+2]=a.z;return b}}(),rotateAxis:function(a){THREE.warn("THREE.Matrix4: .rotateAxis() has been removed. Use Vector3.transformDirection( matrix ) instead.");a.transformDirection(this)},crossVector:function(a){THREE.warn("THREE.Matrix4:  [...]
+return a.applyMatrix4(this)},determinant:function(){var a=this.elements,b=a[0],c=a[4],d=a[8],e=a[12],f=a[1],g=a[5],h=a[9],k=a[13],l=a[2],p=a[6],q=a[10],n=a[14];return a[3]*(+e*h*p-d*k*p-e*g*q+c*k*q+d*g*n-c*h*n)+a[7]*(+b*h*n-b*k*q+e*f*q-d*f*n+d*k*l-e*h*l)+a[11]*(+b*k*p-b*g*n-e*f*p+c*f*n+e*g*l-c*k*l)+a[15]*(-d*g*l-b*h*p+b*g*q+d*f*p-c*f*q+c*h*l)},transpose:function(){var a=this.elements,b;b=a[1];a[1]=a[4];a[4]=b;b=a[2];a[2]=a[8];a[8]=b;b=a[6];a[6]=a[9];a[9]=b;b=a[3];a[3]=a[12];a[12]=b;b=a[7 [...]
+a[13]=b;b=a[11];a[11]=a[14];a[14]=b;return this},flattenToArrayOffset:function(a,b){var c=this.elements;a[b]=c[0];a[b+1]=c[1];a[b+2]=c[2];a[b+3]=c[3];a[b+4]=c[4];a[b+5]=c[5];a[b+6]=c[6];a[b+7]=c[7];a[b+8]=c[8];a[b+9]=c[9];a[b+10]=c[10];a[b+11]=c[11];a[b+12]=c[12];a[b+13]=c[13];a[b+14]=c[14];a[b+15]=c[15];return a},getPosition:function(){var a=new THREE.Vector3;return function(){THREE.warn("THREE.Matrix4: .getPosition() has been removed. Use Vector3.setFromMatrixPosition( matrix ) instead [...]
+this.elements;return a.set(b[12],b[13],b[14])}}(),setPosition:function(a){var b=this.elements;b[12]=a.x;b[13]=a.y;b[14]=a.z;return this},getInverse:function(a,b){var c=this.elements,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[12],k=d[1],l=d[5],p=d[9],q=d[13],n=d[2],t=d[6],r=d[10],s=d[14],u=d[3],v=d[7],x=d[11],d=d[15];c[0]=p*s*v-q*r*v+q*t*x-l*s*x-p*t*d+l*r*d;c[4]=h*r*v-g*s*v-h*t*x+f*s*x+g*t*d-f*r*d;c[8]=g*q*v-h*p*v+h*l*x-f*q*x-g*l*d+f*p*d;c[12]=h*p*t-g*q*t-h*l*r+f*q*r+g*l*s-f*p*s;c[1]=q*r*u-p*s [...]
+k*s*x+p*n*d-k*r*d;c[5]=g*s*u-h*r*u+h*n*x-e*s*x-g*n*d+e*r*d;c[9]=h*p*u-g*q*u-h*k*x+e*q*x+g*k*d-e*p*d;c[13]=g*q*n-h*p*n+h*k*r-e*q*r-g*k*s+e*p*s;c[2]=l*s*u-q*t*u+q*n*v-k*s*v-l*n*d+k*t*d;c[6]=h*t*u-f*s*u-h*n*v+e*s*v+f*n*d-e*t*d;c[10]=f*q*u-h*l*u+h*k*v-e*q*v-f*k*d+e*l*d;c[14]=h*l*n-f*q*n-h*k*t+e*q*t+f*k*s-e*l*s;c[3]=p*t*u-l*r*u-p*n*v+k*r*v+l*n*x-k*t*x;c[7]=f*r*u-g*t*u+g*n*v-e*r*v-f*n*x+e*t*x;c[11]=g*l*u-f*p*u-g*k*v+e*p*v+f*k*x-e*l*x;c[15]=f*p*n-g*l*n+g*k*t-e*p*t-f*k*r+e*l*r;c=e*c[0]+k*c[4]+n* [...]
+if(0==c){if(b)throw Error("THREE.Matrix4.getInverse(): can't invert matrix, determinant is 0");THREE.warn("THREE.Matrix4.getInverse(): can't invert matrix, determinant is 0");this.identity();return this}this.multiplyScalar(1/c);return this},translate:function(a){THREE.error("THREE.Matrix4: .translate() has been removed.")},rotateX:function(a){THREE.error("THREE.Matrix4: .rotateX() has been removed.")},rotateY:function(a){THREE.error("THREE.Matrix4: .rotateY() has been removed.")},rotateZ [...]
+rotateByAxis:function(a,b){THREE.error("THREE.Matrix4: .rotateByAxis() has been removed.")},scale:function(a){var b=this.elements,c=a.x,d=a.y;a=a.z;b[0]*=c;b[4]*=d;b[8]*=a;b[1]*=c;b[5]*=d;b[9]*=a;b[2]*=c;b[6]*=d;b[10]*=a;b[3]*=c;b[7]*=d;b[11]*=a;return this},getMaxScaleOnAxis:function(){var a=this.elements;return Math.sqrt(Math.max(a[0]*a[0]+a[1]*a[1]+a[2]*a[2],Math.max(a[4]*a[4]+a[5]*a[5]+a[6]*a[6],a[8]*a[8]+a[9]*a[9]+a[10]*a[10])))},makeTranslation:function(a,b,c){this.set(1,0,0,a,0,1, [...]
+c,0,0,0,1);return this},makeRotationX:function(a){var b=Math.cos(a);a=Math.sin(a);this.set(1,0,0,0,0,b,-a,0,0,a,b,0,0,0,0,1);return this},makeRotationY:function(a){var b=Math.cos(a);a=Math.sin(a);this.set(b,0,a,0,0,1,0,0,-a,0,b,0,0,0,0,1);return this},makeRotationZ:function(a){var b=Math.cos(a);a=Math.sin(a);this.set(b,-a,0,0,a,b,0,0,0,0,1,0,0,0,0,1);return this},makeRotationAxis:function(a,b){var c=Math.cos(b),d=Math.sin(b),e=1-c,f=a.x,g=a.y,h=a.z,k=e*f,l=e*g;this.set(k*f+c,k*g-d*h,k*h+ [...]
+d*h,l*g+c,l*h-d*f,0,k*h-d*g,l*h+d*f,e*h*h+c,0,0,0,0,1);return this},makeScale:function(a,b,c){this.set(a,0,0,0,0,b,0,0,0,0,c,0,0,0,0,1);return this},compose:function(a,b,c){this.makeRotationFromQuaternion(b);this.scale(c);this.setPosition(a);return this},decompose:function(){var a=new THREE.Vector3,b=new THREE.Matrix4;return function(c,d,e){var f=this.elements,g=a.set(f[0],f[1],f[2]).length(),h=a.set(f[4],f[5],f[6]).length(),k=a.set(f[8],f[9],f[10]).length();0>this.determinant()&&(g=-g); [...]
+c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,l=1/k;b.elements[0]*=c;b.elements[1]*=c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=l;b.elements[9]*=l;b.elements[10]*=l;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]= [...]
+g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a));var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,l=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/l;g[14]=-((f+e)/l);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements; [...]
+a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}};THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3};
+THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors( [...]
+var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3;return function(d,e,f,g){a.cop [...]
+b.copy(e).sub(d).normalize();c.copy(this.origin).sub(a);var h=.5*d.distanceTo(e),k=-this.direction.dot(b),l=c.dot(this.direction),p=-c.dot(b),q=c.lengthSq(),n=Math.abs(1-k*k),t;0<n?(d=k*p-l,e=k*l-p,t=h*n,0<=d?e>=-t?e<=t?(h=1/n,d*=h,e*=h,k=d*(d+k*e+2*l)+e*(k*d+e+2*p)+q):(e=h,d=Math.max(0,-(k*e+l)),k=-d*d+e*(e+2*p)+q):(e=-h,d=Math.max(0,-(k*e+l)),k=-d*d+e*(e+2*p)+q):e<=-t?(d=Math.max(0,-(-k*h+l)),e=0<d?-h:Math.min(Math.max(-h,-p),h),k=-d*d+e*(e+2*p)+q):e<=t?(d=0,e=Math.min(Math.max(-h,-p), [...]
+2*p)+q):(d=Math.max(0,-(k*h+l)),e=0<d?h:Math.min(Math.max(-h,-p),h),k=-d*d+e*(e+2*p)+q)):(e=0<k?-h:h,d=Math.max(0,-(k*e+l)),k=-d*d+e*(e+2*p)+q);f&&f.copy(this.direction).multiplyScalar(d).add(this.origin);g&&g.copy(b).multiplyScalar(e).add(a);return k}}(),isIntersectionSphere:function(a){return this.distanceToPoint(a.center)<=a.radius},intersectSphere:function(){var a=new THREE.Vector3;return function(b,c){a.subVectors(b.center,this.origin);var d=a.dot(this.direction),e=a.dot(a)-d*d,f=b. [...]
+if(e>f)return null;f=Math.sqrt(f-e);e=d-f;d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this [...]
+isIntersectionBox:function(){var a=new THREE.Vector3;return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(f<d||d!==d)d=f;0<=g?(e=(a.min.z-h.z)*g,g*=a.max.z-h.z):(e=(a.m [...]
+g,g*=a.min.z-h.z);if(c>g||e>d)return null;if(e>c||c!==c)c=e;if(g<d||d!==d)d=g;return 0>d?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0<f){if(h)return null;h=1}else if(0>f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return nul [...]
+if(0>g||e+g>f)return null;e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0};
+THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f<g;f++)e=Math.max(e,d.distanceToSquared(b[f]));this.radius=Math.sqrt(e);return this}}(),copy:function(a){this.center.copy(a.center);this.radius=a.radius;return this},empty:function(){return 0>=this.radius},containsP [...]
+this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this. [...]
+return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}};
+THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]};
+THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],l=c[7],p=c[8],q=c[9],n=c[10],t=c[11],r=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,l-g,t-p,c-r).norm [...]
+a,l+g,t+p,c+r).normalize();b[2].setComponents(f+d,l+h,t+q,c+s).normalize();b[3].setComponents(f-d,l-h,t-q,c-s).normalize();b[4].setComponents(f-e,l-k,t-n,c-u).normalize();b[5].setComponents(f+e,l+k,t+n,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){va [...]
+c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)<a)return!1;return!0},intersectsBox:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c){for(var d=this.planes,e=0;6>e;e++){var f=d[e];a.x=0<f.normal.x?c.min.x:c.max.x;b.x=0<f.normal.x?c.max.x:c.min.x;a.y=0<f.normal.y?c.min.y:c.max.y;b.y=0<f.normal.y?c.max.y:c.min.y;a.z=0<f.normal.z?c.min.z:c.max.z;b.z=0<f.normal.z?c.max.z:c.min.z;var g=f.distanceToPoint(a),f=f.distanceToPoint(b);if(0>g&&0>f)return! [...]
+containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0};
+THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCopla [...]
+c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).neg [...]
+b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0<a||0>a&&0<b},intersectLine:function(){var a=new THREE.Vector3;return function(b,c){var d=c||new THREE.Vector3,e=b.delta(a),f=this.normal.dot(e);if(0==f){if(0==this.distanceToPoint(b.start))return d.copy(b.start)}else return f=-(b.start.dot(this.normal)+this.constant)/f,0>f||1<f?voi [...]
+coplanarPoint:function(a){return(a||new THREE.Vector3).copy(this.normal).multiplyScalar(-this.constant)},applyMatrix4:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Matrix3;return function(d,e){var f=e||c.getNormalMatrix(d),f=a.copy(this.normal).applyMatrix3(f),g=this.coplanarPoint(b);g.applyMatrix4(d);this.setFromNormalAndCoplanarPoint(f,g);return this}}(),translate:function(a){this.constant-=a.dot(this.normal);return this},equals:function(a){return a.normal.equals(t [...]
+a.constant==this.constant},clone:function(){return(new THREE.Plane).copy(this)}};
+THREE.Math={generateUUID:function(){var a="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz".split(""),b=Array(36),c=0,d;return function(){for(var e=0;36>e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return a<b?b:a>c?c:a},clampBottom:function(a,b){return a<b?b:a},mapLinear:function(a,b,c,d,e){return d+(a-b)*(e-d)/(c-b)},smoothstep:function(a [...]
+b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return Math.floor(this.randFloat(a,b))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radTo [...]
+180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a},nextPowerOfTwo:function(a){a--;a|=a>>1;a|=a>>2;a|=a>>4;a|=a>>8;a|=a>>16;a++;return a}};
+THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,l,p,q,n;this.initFromArray=function(a){this.points=[];for(var b=0;b<a.length;b++)this.points[b]={x:a[b][0],y:a[b][1],z:a[b][2]}};this.getPoint=function(a){e=(this.points.length-1)*a;f=Math.floor(e);g=e-f;c[0]=0===f?f:f-1;c[1]=f;c[2]=f>this.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.le [...]
+2;l=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];n=this.points[c[3]];h=g*g;k=g*h;d.x=b(l.x,p.x,q.x,n.x,g,h,k);d.y=b(l.y,p.y,q.y,n.y,g,h,k);d.z=b(l.z,p.z,q.z,n.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a<c;a++)b=this.points[a],d[a]=[b.x,b.y,b.z];return d};this.getLength=function(a){var b,c,d,e=b=b=0,f=new THREE.Vector3,g=new THREE.Vector3,h=[],k=0;h[0]=0;a||(a=100);c=this.points.length*a;f.copy(this.points[0]);for(a=1;a [...]
+a/c,d=this.getPoint(b),g.copy(d),k+=g.distanceTo(f),f.copy(d),b*=this.points.length-1,b=Math.floor(b),b!=e&&(h[b]=k,e=b);h[h.length]=k;return{chunks:h,total:k}};this.reparametrizeByArcLength=function(a){var b,c,d,e,f,g,h=[],k=new THREE.Vector3,n=this.getLength();h.push(k.copy(this.points[0]).clone());for(b=1;b<this.points.length;b++){c=n.chunks[b]-n.chunks[b-1];g=Math.ceil(a*c/n.total);e=(b-1)/(this.points.length-1);f=b/(this.points.length-1);for(c=1;c<g-1;c++)d=e+1/g*c*(f-e),d=this.getP [...]
+h.push(k.copy(this.points[b]).clone())}this.points=h}};THREE.Triangle=function(a,b,c){this.a=void 0!==a?a:new THREE.Vector3;this.b=void 0!==b?b:new THREE.Vector3;this.c=void 0!==c?c:new THREE.Vector3};THREE.Triangle.normal=function(){var a=new THREE.Vector3;return function(b,c,d,e){e=e||new THREE.Vector3;e.subVectors(d,c);a.subVectors(b,c);e.cross(a);b=e.lengthSq();return 0<b?e.multiplyScalar(1/Math.sqrt(b)):e.set(0,0,0)}}();
+THREE.Triangle.barycoordFromPoint=function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3;return function(d,e,f,g,h){a.subVectors(g,e);b.subVectors(f,e);c.subVectors(d,e);d=a.dot(a);e=a.dot(b);f=a.dot(c);var k=b.dot(b);g=b.dot(c);var l=d*k-e*e;h=h||new THREE.Vector3;if(0==l)return h.set(-2,-1,-1);l=1/l;k=(k*f-e*g)*l;d=(d*g-e*f)*l;return h.set(1-k-d,d,k)}}();
+THREE.Triangle.containsPoint=function(){var a=new THREE.Vector3;return function(b,c,d,e){b=THREE.Triangle.barycoordFromPoint(b,c,d,e,a);return 0<=b.x&&0<=b.y&&1>=b.x+b.y}}();
+THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midp [...]
+new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b [...]
+clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1};
+THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.n [...]
+a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){};
+THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a] [...]
+c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;e<d;e++)c[e]=b[e];for(e=0;e<d;e++)c[e].call(this,a)}}}};
+(function(a){a.Raycaster=function(b,c,f,g){this.ray=new a.Ray(b,c);this.near=f||0;this.far=g||Infinity;this.params={Sprite:{},Mesh:{},PointCloud:{threshold:1},LOD:{},Line:{}}};var b=function(a,b){return a.distance-b.distance},c=function(a,b,f,g){a.raycast(b,f);if(!0===g){a=a.children;g=0;for(var h=a.length;g<h;g++)c(a[g],b,f,!0)}};a.Raycaster.prototype={constructor:a.Raycaster,precision:1E-4,linePrecision:1,set:function(a,b){this.ray.set(a,b)},setFromCamera:function(b,c){c instanceof a.P [...]
+(this.ray.origin.copy(c.position),this.ray.direction.set(b.x,b.y,.5).unproject(c).sub(c.position).normalize()):c instanceof a.OrthographicCamera?(this.ray.origin.set(b.x,b.y,-1).unproject(c),this.ray.direction.set(0,0,-1).transformDirection(c.matrixWorld)):a.error("THREE.Raycaster: Unsupported camera type.")},intersectObject:function(a,e){var f=[];c(a,this,f,e);f.sort(b);return f},intersectObjects:function(d,e){var f=[];if(!1===d instanceof Array)return a.warn("THREE.Raycaster.intersectO [...]
+f;for(var g=0,h=d.length;g<h;g++)c(d[g],this,f,e);f.sort(b);return f}}})(THREE);
+THREE.Object3D=function(){Object.defineProperty(this,"id",{value:THREE.Object3DIdCount++});this.uuid=THREE.Math.generateUUID();this.name="";this.type="Object3D";this.parent=void 0;this.children=[];this.up=THREE.Object3D.DefaultUp.clone();var a=new THREE.Vector3,b=new THREE.Euler,c=new THREE.Quaternion,d=new THREE.Vector3(1,1,1);b.onChange(function(){c.setFromEuler(b,!1)});c.onChange(function(){b.setFromQuaternion(c,void 0,!1)});Object.defineProperties(this,{position:{enumerable:!0,value: [...]
+value:b},quaternion:{enumerable:!0,value:c},scale:{enumerable:!0,value:d}});this.rotationAutoUpdate=!0;this.matrix=new THREE.Matrix4;this.matrixWorld=new THREE.Matrix4;this.matrixAutoUpdate=!0;this.matrixWorldNeedsUpdate=!1;this.visible=!0;this.receiveShadow=this.castShadow=!1;this.frustumCulled=!0;this.renderOrder=0;this.userData={}};THREE.Object3D.DefaultUp=new THREE.Vector3(0,1,0);
+THREE.Object3D.prototype={constructor:THREE.Object3D,get eulerOrder(){THREE.warn("THREE.Object3D: .eulerOrder has been moved to .rotation.order.");return this.rotation.order},set eulerOrder(a){THREE.warn("THREE.Object3D: .eulerOrder has been moved to .rotation.order.");this.rotation.order=a},get useQuaternion(){THREE.warn("THREE.Object3D: .useQuaternion has been removed. The library now uses quaternions by default.")},set useQuaternion(a){THREE.warn("THREE.Object3D: .useQuaternion has be [...]
+applyMatrix:function(a){this.matrix.multiplyMatrices(a,this.matrix);this.matrix.decompose(this.position,this.quaternion,this.scale)},setRotationFromAxisAngle:function(a,b){this.quaternion.setFromAxisAngle(a,b)},setRotationFromEuler:function(a){this.quaternion.setFromEuler(a,!0)},setRotationFromMatrix:function(a){this.quaternion.setFromRotationMatrix(a)},setRotationFromQuaternion:function(a){this.quaternion.copy(a)},rotateOnAxis:function(){var a=new THREE.Quaternion;return function(b,c){a [...]
+c);this.quaternion.multiply(a);return this}}(),rotateX:function(){var a=new THREE.Vector3(1,0,0);return function(b){return this.rotateOnAxis(a,b)}}(),rotateY:function(){var a=new THREE.Vector3(0,1,0);return function(b){return this.rotateOnAxis(a,b)}}(),rotateZ:function(){var a=new THREE.Vector3(0,0,1);return function(b){return this.rotateOnAxis(a,b)}}(),translateOnAxis:function(){var a=new THREE.Vector3;return function(b,c){a.copy(b).applyQuaternion(this.quaternion);this.position.add(a.m [...]
+return this}}(),translate:function(a,b){THREE.warn("THREE.Object3D: .translate() has been removed. Use .translateOnAxis( axis, distance ) instead.");return this.translateOnAxis(b,a)},translateX:function(){var a=new THREE.Vector3(1,0,0);return function(b){return this.translateOnAxis(a,b)}}(),translateY:function(){var a=new THREE.Vector3(0,1,0);return function(b){return this.translateOnAxis(a,b)}}(),translateZ:function(){var a=new THREE.Vector3(0,0,1);return function(b){return this.transla [...]
+b)}}(),localToWorld:function(a){return a.applyMatrix4(this.matrixWorld)},worldToLocal:function(){var a=new THREE.Matrix4;return function(b){return b.applyMatrix4(a.getInverse(this.matrixWorld))}}(),lookAt:function(){var a=new THREE.Matrix4;return function(b){a.lookAt(b,this.position,this.up);this.quaternion.setFromRotationMatrix(a)}}(),add:function(a){if(1<arguments.length){for(var b=0;b<arguments.length;b++)this.add(arguments[b]);return this}if(a===this)return THREE.error("THREE.Object3 [...]
+a),this;a instanceof THREE.Object3D?(void 0!==a.parent&&a.parent.remove(a),a.parent=this,a.dispatchEvent({type:"added"}),this.children.push(a)):THREE.error("THREE.Object3D.add: object not an instance of THREE.Object3D.",a);return this},remove:function(a){if(1<arguments.length)for(var b=0;b<arguments.length;b++)this.remove(arguments[b]);b=this.children.indexOf(a);-1!==b&&(a.parent=void 0,a.dispatchEvent({type:"removed"}),this.children.splice(b,1))},getChildByName:function(a){THREE.warn("T [...]
+return this.getObjectByName(a)},getObjectById:function(a){return this.getObjectByProperty("id",a)},getObjectByName:function(a){return this.getObjectByProperty("name",a)},getObjectByProperty:function(a,b){if(this[a]===b)return this;for(var c=0,d=this.children.length;c<d;c++){var e=this.children[c].getObjectByProperty(a,b);if(void 0!==e)return e}},getWorldPosition:function(a){a=a||new THREE.Vector3;this.updateMatrixWorld(!0);return a.setFromMatrixPosition(this.matrixWorld)},getWorldQuatern [...]
+new THREE.Vector3,b=new THREE.Vector3;return function(c){c=c||new THREE.Quaternion;this.updateMatrixWorld(!0);this.matrixWorld.decompose(a,c,b);return c}}(),getWorldRotation:function(){var a=new THREE.Quaternion;return function(b){b=b||new THREE.Euler;this.getWorldQuaternion(a);return b.setFromQuaternion(a,this.rotation.order,!1)}}(),getWorldScale:function(){var a=new THREE.Vector3,b=new THREE.Quaternion;return function(c){c=c||new THREE.Vector3;this.updateMatrixWorld(!0);this.matrixWorl [...]
+b,c);return c}}(),getWorldDirection:function(){var a=new THREE.Quaternion;return function(b){b=b||new THREE.Vector3;this.getWorldQuaternion(a);return b.set(0,0,1).applyQuaternion(a)}}(),raycast:function(){},traverse:function(a){a(this);for(var b=0,c=this.children.length;b<c;b++)this.children[b].traverse(a)},traverseVisible:function(a){if(!1!==this.visible){a(this);for(var b=0,c=this.children.length;b<c;b++)this.children[b].traverseVisible(a)}},traverseAncestors:function(a){this.parent&&( [...]
+this.parent.traverseAncestors(a))},updateMatrix:function(){this.matrix.compose(this.position,this.quaternion,this.scale);this.matrixWorldNeedsUpdate=!0},updateMatrixWorld:function(a){!0===this.matrixAutoUpdate&&this.updateMatrix();if(!0===this.matrixWorldNeedsUpdate||!0===a)void 0===this.parent?this.matrixWorld.copy(this.matrix):this.matrixWorld.multiplyMatrices(this.parent.matrixWorld,this.matrix),this.matrixWorldNeedsUpdate=!1,a=!0;for(var b=0,c=this.children.length;b<c;b++)this.childr [...]
+toJSON:function(){var a={metadata:{version:4.3,type:"Object",generator:"ObjectExporter"}},b={},c={},d=function(b){void 0===a.materials&&(a.materials=[]);if(void 0===c[b.uuid]){var d=b.toJSON();delete d.metadata;c[b.uuid]=d;a.materials.push(d)}return b.uuid},e=function(c){var g={};g.uuid=c.uuid;g.type=c.type;""!==c.name&&(g.name=c.name);"{}"!==JSON.stringify(c.userData)&&(g.userData=c.userData);!0!==c.visible&&(g.visible=c.visible);if(c instanceof THREE.PerspectiveCamera)g.fov=c.fov,g.asp [...]
+g.near=c.near,g.far=c.far;else if(c instanceof THREE.OrthographicCamera)g.left=c.left,g.right=c.right,g.top=c.top,g.bottom=c.bottom,g.near=c.near,g.far=c.far;else if(c instanceof THREE.AmbientLight)g.color=c.color.getHex();else if(c instanceof THREE.DirectionalLight)g.color=c.color.getHex(),g.intensity=c.intensity;else if(c instanceof THREE.PointLight)g.color=c.color.getHex(),g.intensity=c.intensity,g.distance=c.distance,g.decay=c.decay;else if(c instanceof THREE.SpotLight)g.color=c.colo [...]
+g.intensity=c.intensity,g.distance=c.distance,g.angle=c.angle,g.exponent=c.exponent,g.decay=c.decay;else if(c instanceof THREE.HemisphereLight)g.color=c.color.getHex(),g.groundColor=c.groundColor.getHex();else if(c instanceof THREE.Mesh||c instanceof THREE.Line||c instanceof THREE.PointCloud){var h=c.geometry;void 0===a.geometries&&(a.geometries=[]);if(void 0===b[h.uuid]){var k=h.toJSON();delete k.metadata;b[h.uuid]=k;a.geometries.push(k)}g.geometry=h.uuid;g.material=d(c.material);c inst [...]
+(g.mode=c.mode)}else c instanceof THREE.Sprite&&(g.material=d(c.material));g.matrix=c.matrix.toArray();if(0<c.children.length)for(g.children=[],h=0;h<c.children.length;h++)g.children.push(e(c.children[h]));return g};a.object=e(this);return a},clone:function(a,b){void 0===a&&(a=new THREE.Object3D);void 0===b&&(b=!0);a.name=this.name;a.up.copy(this.up);a.position.copy(this.position);a.quaternion.copy(this.quaternion);a.scale.copy(this.scale);a.rotationAutoUpdate=this.rotationAutoUpdate;a.m [...]
+a.matrixWorld.copy(this.matrixWorld);a.matrixAutoUpdate=this.matrixAutoUpdate;a.matrixWorldNeedsUpdate=this.matrixWorldNeedsUpdate;a.visible=this.visible;a.castShadow=this.castShadow;a.receiveShadow=this.receiveShadow;a.frustumCulled=this.frustumCulled;a.userData=JSON.parse(JSON.stringify(this.userData));if(!0===b)for(var c=0;c<this.children.length;c++)a.add(this.children[c].clone());return a}};THREE.EventDispatcher.prototype.apply(THREE.Object3D.prototype);THREE.Object3DIdCount=0;
+THREE.Face3=function(a,b,c,d,e,f){this.a=a;this.b=b;this.c=c;this.normal=d instanceof THREE.Vector3?d:new THREE.Vector3;this.vertexNormals=d instanceof Array?d:[];this.color=e instanceof THREE.Color?e:new THREE.Color;this.vertexColors=e instanceof Array?e:[];this.vertexTangents=[];this.materialIndex=void 0!==f?f:0};
+THREE.Face3.prototype={constructor:THREE.Face3,clone:function(){var a=new THREE.Face3(this.a,this.b,this.c);a.normal.copy(this.normal);a.color.copy(this.color);a.materialIndex=this.materialIndex;for(var b=0,c=this.vertexNormals.length;b<c;b++)a.vertexNormals[b]=this.vertexNormals[b].clone();b=0;for(c=this.vertexColors.length;b<c;b++)a.vertexColors[b]=this.vertexColors[b].clone();b=0;for(c=this.vertexTangents.length;b<c;b++)a.vertexTangents[b]=this.vertexTangents[b].clone();return a}};
+THREE.Face4=function(a,b,c,d,e,f,g){THREE.warn("THREE.Face4 has been removed. A THREE.Face3 will be created instead.");return new THREE.Face3(a,b,c,e,f,g)};THREE.BufferAttribute=function(a,b){this.array=a;this.itemSize=b;this.needsUpdate=!1};
+THREE.BufferAttribute.prototype={constructor:THREE.BufferAttribute,get length(){return this.array.length},copyAt:function(a,b,c){a*=this.itemSize;c*=b.itemSize;for(var d=0,e=this.itemSize;d<e;d++)this.array[a+d]=b.array[c+d];return this},set:function(a,b){void 0===b&&(b=0);this.array.set(a,b);return this},setX:function(a,b){this.array[a*this.itemSize]=b;return this},setY:function(a,b){this.array[a*this.itemSize+1]=b;return this},setZ:function(a,b){this.array[a*this.itemSize+2]=b;return t [...]
+b,c){a*=this.itemSize;this.array[a]=b;this.array[a+1]=c;return this},setXYZ:function(a,b,c,d){a*=this.itemSize;this.array[a]=b;this.array[a+1]=c;this.array[a+2]=d;return this},setXYZW:function(a,b,c,d,e){a*=this.itemSize;this.array[a]=b;this.array[a+1]=c;this.array[a+2]=d;this.array[a+3]=e;return this},clone:function(){return new THREE.BufferAttribute(new this.array.constructor(this.array),this.itemSize)}};
+THREE.Int8Attribute=function(a,b){THREE.warn("THREE.Int8Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};THREE.Uint8Attribute=function(a,b){THREE.warn("THREE.Uint8Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};
+THREE.Uint8ClampedAttribute=function(a,b){THREE.warn("THREE.Uint8ClampedAttribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};THREE.Int16Attribute=function(a,b){THREE.warn("THREE.Int16Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};
+THREE.Uint16Attribute=function(a,b){THREE.warn("THREE.Uint16Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};THREE.Int32Attribute=function(a,b){THREE.warn("THREE.Int32Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};
+THREE.Uint32Attribute=function(a,b){THREE.warn("THREE.Uint32Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};THREE.Float32Attribute=function(a,b){THREE.warn("THREE.Float32Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};
+THREE.Float64Attribute=function(a,b){THREE.warn("THREE.Float64Attribute has been removed. Use THREE.BufferAttribute( array, itemSize ) instead.");return new THREE.BufferAttribute(a,b)};THREE.DynamicBufferAttribute=function(a,b){THREE.BufferAttribute.call(this,a,b);this.updateRange={offset:0,count:-1}};THREE.DynamicBufferAttribute.prototype=Object.create(THREE.BufferAttribute.prototype);THREE.DynamicBufferAttribute.prototype.constructor=THREE.DynamicBufferAttribute;
+THREE.DynamicBufferAttribute.prototype.clone=function(){return new THREE.DynamicBufferAttribute(new this.array.constructor(this.array),this.itemSize)};THREE.BufferGeometry=function(){Object.defineProperty(this,"id",{value:THREE.GeometryIdCount++});this.uuid=THREE.Math.generateUUID();this.name="";this.type="BufferGeometry";this.attributes={};this.attributesKeys=[];this.offsets=this.drawcalls=[];this.boundingSphere=this.boundingBox=null};
+THREE.BufferGeometry.prototype={constructor:THREE.BufferGeometry,addAttribute:function(a,b,c){!1===b instanceof THREE.BufferAttribute?(THREE.warn("THREE.BufferGeometry: .addAttribute() now expects ( name, attribute )."),this.attributes[a]={array:b,itemSize:c}):(this.attributes[a]=b,this.attributesKeys=Object.keys(this.attributes))},getAttribute:function(a){return this.attributes[a]},addDrawCall:function(a,b,c){this.drawcalls.push({start:a,count:b,index:void 0!==c?c:0})},applyMatrix:funct [...]
+this.attributes.position;void 0!==b&&(a.applyToVector3Array(b.array),b.needsUpdate=!0);b=this.attributes.normal;void 0!==b&&((new THREE.Matrix3).getNormalMatrix(a).applyToVector3Array(b.array),b.needsUpdate=!0);null!==this.boundingBox&&this.computeBoundingBox();null!==this.boundingSphere&&this.computeBoundingSphere()},center:function(){this.computeBoundingBox();var a=this.boundingBox.center().negate();this.applyMatrix((new THREE.Matrix4).setPosition(a));return a},fromGeometry:function(a, [...]
+var c=a.vertices,d=a.faces,e=a.faceVertexUvs,f=b.vertexColors,g=0<e[0].length,h=3==d[0].vertexNormals.length,k=new Float32Array(9*d.length);this.addAttribute("position",new THREE.BufferAttribute(k,3));var l=new Float32Array(9*d.length);this.addAttribute("normal",new THREE.BufferAttribute(l,3));if(f!==THREE.NoColors){var p=new Float32Array(9*d.length);this.addAttribute("color",new THREE.BufferAttribute(p,3))}if(!0===g){var q=new Float32Array(6*d.length);this.addAttribute("uv",new THREE.Bu [...]
+2))}for(var n=0,t=0,r=0;n<d.length;n++,t+=6,r+=9){var s=d[n],u=c[s.a],v=c[s.b],x=c[s.c];k[r]=u.x;k[r+1]=u.y;k[r+2]=u.z;k[r+3]=v.x;k[r+4]=v.y;k[r+5]=v.z;k[r+6]=x.x;k[r+7]=x.y;k[r+8]=x.z;!0===h?(u=s.vertexNormals[0],v=s.vertexNormals[1],x=s.vertexNormals[2],l[r]=u.x,l[r+1]=u.y,l[r+2]=u.z,l[r+3]=v.x,l[r+4]=v.y,l[r+5]=v.z,l[r+6]=x.x,l[r+7]=x.y,l[r+8]=x.z):(u=s.normal,l[r]=u.x,l[r+1]=u.y,l[r+2]=u.z,l[r+3]=u.x,l[r+4]=u.y,l[r+5]=u.z,l[r+6]=u.x,l[r+7]=u.y,l[r+8]=u.z);f===THREE.FaceColors?(s=s.co [...]
+s.r,p[r+1]=s.g,p[r+2]=s.b,p[r+3]=s.r,p[r+4]=s.g,p[r+5]=s.b,p[r+6]=s.r,p[r+7]=s.g,p[r+8]=s.b):f===THREE.VertexColors&&(u=s.vertexColors[0],v=s.vertexColors[1],s=s.vertexColors[2],p[r]=u.r,p[r+1]=u.g,p[r+2]=u.b,p[r+3]=v.r,p[r+4]=v.g,p[r+5]=v.b,p[r+6]=s.r,p[r+7]=s.g,p[r+8]=s.b);!0===g&&(s=e[0][n][0],u=e[0][n][1],v=e[0][n][2],q[t]=s.x,q[t+1]=s.y,q[t+2]=u.x,q[t+3]=u.y,q[t+4]=v.x,q[t+5]=v.y)}this.computeBoundingSphere();return this},computeBoundingBox:function(){var a=new THREE.Vector3;return  [...]
+this.boundingBox&&(this.boundingBox=new THREE.Box3);var b=this.attributes.position.array;if(b){var c=this.boundingBox;c.makeEmpty();for(var d=0,e=b.length;d<e;d+=3)a.set(b[d],b[d+1],b[d+2]),c.expandByPoint(a)}if(void 0===b||0===b.length)this.boundingBox.min.set(0,0,0),this.boundingBox.max.set(0,0,0);(isNaN(this.boundingBox.min.x)||isNaN(this.boundingBox.min.y)||isNaN(this.boundingBox.min.z))&&THREE.error('THREE.BufferGeometry.computeBoundingBox: Computed min/max have NaN values. The "pos [...]
+computeBoundingSphere:function(){var a=new THREE.Box3,b=new THREE.Vector3;return function(){null===this.boundingSphere&&(this.boundingSphere=new THREE.Sphere);var c=this.attributes.position.array;if(c){a.makeEmpty();for(var d=this.boundingSphere.center,e=0,f=c.length;e<f;e+=3)b.set(c[e],c[e+1],c[e+2]),a.expandByPoint(b);a.center(d);for(var g=0,e=0,f=c.length;e<f;e+=3)b.set(c[e],c[e+1],c[e+2]),g=Math.max(g,d.distanceToSquared(b));this.boundingSphere.radius=Math.sqrt(g);isNaN(this.bounding [...]
+THREE.error('THREE.BufferGeometry.computeBoundingSphere(): Computed radius is NaN. The "position" attribute is likely to have NaN values.')}}}(),computeFaceNormals:function(){},computeVertexNormals:function(){var a=this.attributes;if(a.position){var b=a.position.array;if(void 0===a.normal)this.addAttribute("normal",new THREE.BufferAttribute(new Float32Array(b.length),3));else for(var c=a.normal.array,d=0,e=c.length;d<e;d++)c[d]=0;var c=a.normal.array,f,g,h,k=new THREE.Vector3,l=new THREE [...]
+p=new THREE.Vector3,q=new THREE.Vector3,n=new THREE.Vector3;if(a.index)for(var t=a.index.array,r=0<this.offsets.length?this.offsets:[{start:0,count:t.length,index:0}],s=0,u=r.length;s<u;++s){e=r[s].start;f=r[s].count;for(var v=r[s].index,d=e,e=e+f;d<e;d+=3)f=3*(v+t[d]),g=3*(v+t[d+1]),h=3*(v+t[d+2]),k.fromArray(b,f),l.fromArray(b,g),p.fromArray(b,h),q.subVectors(p,l),n.subVectors(k,l),q.cross(n),c[f]+=q.x,c[f+1]+=q.y,c[f+2]+=q.z,c[g]+=q.x,c[g+1]+=q.y,c[g+2]+=q.z,c[h]+=q.x,c[h+1]+=q.y,c[h+ [...]
+0,e=b.length;d<e;d+=9)k.fromArray(b,d),l.fromArray(b,d+3),p.fromArray(b,d+6),q.subVectors(p,l),n.subVectors(k,l),q.cross(n),c[d]=q.x,c[d+1]=q.y,c[d+2]=q.z,c[d+3]=q.x,c[d+4]=q.y,c[d+5]=q.z,c[d+6]=q.x,c[d+7]=q.y,c[d+8]=q.z;this.normalizeNormals();a.normal.needsUpdate=!0}},computeTangents:function(){function a(a,b,c){q.fromArray(d,3*a);n.fromArray(d,3*b);t.fromArray(d,3*c);r.fromArray(f,2*a);s.fromArray(f,2*b);u.fromArray(f,2*c);v=n.x-q.x;x=t.x-q.x;D=n.y-q.y;w=t.y-q.y;y=n.z-q.z;A=t.z-q.z;E= [...]
+u.x-r.x;F=s.y-r.y;z=u.y-r.y;I=1/(E*z-G*F);U.set((z*v-F*x)*I,(z*D-F*w)*I,(z*y-F*A)*I);M.set((E*x-G*v)*I,(E*w-G*D)*I,(E*A-G*y)*I);k[a].add(U);k[b].add(U);k[c].add(U);l[a].add(M);l[b].add(M);l[c].add(M)}function b(a){ha.fromArray(e,3*a);O.copy(ha);ba=k[a];oa.copy(ba);oa.sub(ha.multiplyScalar(ha.dot(ba))).normalize();ja.crossVectors(O,ba);qa=ja.dot(l[a]);ca=0>qa?-1:1;h[4*a]=oa.x;h[4*a+1]=oa.y;h[4*a+2]=oa.z;h[4*a+3]=ca}if(void 0===this.attributes.index||void 0===this.attributes.position||void [...]
+void 0===this.attributes.uv)THREE.warn("THREE.BufferGeometry: Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array,e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],l=[],p=0;p<g;p++)k[p]=new THRE [...]
+l[p]=new THREE.Vector3;var q=new THREE.Vector3,n=new THREE.Vector3,t=new THREE.Vector3,r=new THREE.Vector2,s=new THREE.Vector2,u=new THREE.Vector2,v,x,D,w,y,A,E,G,F,z,I,U=new THREE.Vector3,M=new THREE.Vector3,H,L,P,N,R;0===this.drawcalls.length&&this.addDrawCall(0,c.length,0);var V=this.drawcalls,p=0;for(L=V.length;p<L;++p){H=V[p].start;P=V[p].count;var J=V[p].index,g=H;for(H+=P;g<H;g+=3)P=J+c[g],N=J+c[g+1],R=J+c[g+2],a(P,N,R)}var oa=new THREE.Vector3,ja=new THREE.Vector3,ha=new THREE.Ve [...]
+ca,ba,qa,p=0;for(L=V.length;p<L;++p)for(H=V[p].start,P=V[p].count,J=V[p].index,g=H,H+=P;g<H;g+=3)P=J+c[g],N=J+c[g+1],R=J+c[g+2],b(P),b(N),b(R)}},computeOffsets:function(a){void 0===a&&(a=65535);for(var b=this.attributes.index.array,c=this.attributes.position.array,d=b.length/3,e=new Uint16Array(b.length),f=0,g=0,h=[{start:0,count:0,index:0}],k=h[0],l=0,p=0,q=new Int32Array(6),n=new Int32Array(c.length),t=new Int32Array(c.length),r=0;r<c.length;r++)n[r]=-1,t[r]=-1;for(c=0;c<d;c++){for(var [...]
+s;s++)r=b[3*c+s],-1==n[r]?(q[2*s]=r,q[2*s+1]=-1,p++):n[r]<k.index?(q[2*s]=r,q[2*s+1]=-1,l++):(q[2*s]=r,q[2*s+1]=n[r]);if(g+p>k.index+a)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1<s&&s<k.index&&(q[p+1]=-1);for(p=0;6>p;p+=2)r=q[p],s=q[p+1],-1===s&&(s=g++),n[r]=s,t[s]=r,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,t,g);return this.drawcalls=this.offsets=h},merge:function(a,b){if(!1===a instanceof THREE.BufferGeometry)THREE.error("THREE.BufferGeometry.merge(): geom [...]
+a);else{void 0===b&&(b=0);var c=this.attributes,d;for(d in c)if(void 0!==a.attributes[d])for(var e=c[d].array,f=a.attributes[d],g=f.array,h=0,f=f.itemSize*b;h<g.length;h++,f++)e[f]=g[h];return this}},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;e<f;e+=3)b=a[e],c=a[e+1],d=a[e+2],b=1/Math.sqrt(b*b+c*c+d*d),a[e]*=b,a[e+1]*=b,a[e+2]*=b},reorderBuffers:function(a,b,c){var d={},e;for(e in this.attributes)"index"!=e&&(d[e]=new this.attributes[e].array. [...]
+c));for(var f=0;f<c;f++){var g=b[f];for(e in this.attributes)if("index"!=e)for(var h=this.attributes[e].array,k=this.attributes[e].itemSize,l=d[e],p=0;p<k;p++)l[f*k+p]=h[g*k+p]}this.attributes.index.array=a;for(e in this.attributes)"index"!=e&&(this.attributes[e].array=d[e],this.attributes[e].numItems=this.attributes[e].itemSize*c)},toJSON:function(){var a={metadata:{version:4,type:"BufferGeometry",generator:"BufferGeometryExporter"},uuid:this.uuid,type:this.type,data:{attributes:{}}},b= [...]
+c=this.offsets,d=this.boundingSphere,e;for(e in b){var f=b[e],g=Array.prototype.slice.call(f.array);a.data.attributes[e]={itemSize:f.itemSize,type:f.array.constructor.name,array:g}}0<c.length&&(a.data.offsets=JSON.parse(JSON.stringify(c)));null!==d&&(a.data.boundingSphere={center:d.center.toArray(),radius:d.radius});return a},clone:function(){var a=new THREE.BufferGeometry,b;for(b in this.attributes)a.addAttribute(b,this.attributes[b].clone());b=0;for(var c=this.offsets.length;b<c;b++){v [...]
+a.offsets.push({start:d.start,index:d.index,count:d.count})}return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.BufferGeometry.prototype);
+THREE.Geometry=function(){Object.defineProperty(this,"id",{value:THREE.GeometryIdCount++});this.uuid=THREE.Math.generateUUID();this.name="";this.type="Geometry";this.vertices=[];this.colors=[];this.faces=[];this.faceVertexUvs=[[]];this.morphTargets=[];this.morphColors=[];this.morphNormals=[];this.skinWeights=[];this.skinIndices=[];this.lineDistances=[];this.boundingSphere=this.boundingBox=null;this.hasTangents=!1;this.dynamic=!0;this.groupsNeedUpdate=this.lineDistancesNeedUpdate=this.col [...]
+this.tangentsNeedUpdate=this.normalsNeedUpdate=this.uvsNeedUpdate=this.elementsNeedUpdate=this.verticesNeedUpdate=!1};
+THREE.Geometry.prototype={constructor:THREE.Geometry,applyMatrix:function(a){for(var b=(new THREE.Matrix3).getNormalMatrix(a),c=0,d=this.vertices.length;c<d;c++)this.vertices[c].applyMatrix4(a);c=0;for(d=this.faces.length;c<d;c++){a=this.faces[c];a.normal.applyMatrix3(b).normalize();for(var e=0,f=a.vertexNormals.length;e<f;e++)a.vertexNormals[e].applyMatrix3(b).normalize()}null!==this.boundingBox&&this.computeBoundingBox();null!==this.boundingSphere&&this.computeBoundingSphere();this.nor [...]
+this.verticesNeedUpdate=!0},fromBufferGeometry:function(a){for(var b=this,c=a.attributes,d=c.position.array,e=void 0!==c.index?c.index.array:void 0,f=void 0!==c.normal?c.normal.array:void 0,g=void 0!==c.color?c.color.array:void 0,h=void 0!==c.uv?c.uv.array:void 0,k=[],l=[],p=c=0;c<d.length;c+=3,p+=2)b.vertices.push(new THREE.Vector3(d[c],d[c+1],d[c+2])),void 0!==f&&k.push(new THREE.Vector3(f[c],f[c+1],f[c+2])),void 0!==g&&b.colors.push(new THREE.Color(g[c],g[c+1],g[c+2])),void 0!==h&&l.p [...]
+h[p+1]));var q=function(a,c,d){var e=void 0!==f?[k[a].clone(),k[c].clone(),k[d].clone()]:[],n=void 0!==g?[b.colors[a].clone(),b.colors[c].clone(),b.colors[d].clone()]:[];b.faces.push(new THREE.Face3(a,c,d,e,n));void 0!==h&&b.faceVertexUvs[0].push([l[a].clone(),l[c].clone(),l[d].clone()])};if(void 0!==e)if(d=a.drawcalls,0<d.length)for(c=0;c<d.length;c++)for(var p=d[c],n=p.start,t=p.count,r=p.index,p=n,n=n+t;p<n;p+=3)q(r+e[p],r+e[p+1],r+e[p+2]);else for(c=0;c<e.length;c+=3)q(e[c],e[c+1],e[ [...]
+0;c<d.length/3;c+=3)q(c,c+1,c+2);this.computeFaceNormals();null!==a.boundingBox&&(this.boundingBox=a.boundingBox.clone());null!==a.boundingSphere&&(this.boundingSphere=a.boundingSphere.clone());return this},center:function(){this.computeBoundingBox();var a=this.boundingBox.center().negate();this.applyMatrix((new THREE.Matrix4).setPosition(a));return a},computeFaceNormals:function(){for(var a=new THREE.Vector3,b=new THREE.Vector3,c=0,d=this.faces.length;c<d;c++){var e=this.faces[c],f=this [...]
+g=this.vertices[e.b];a.subVectors(this.vertices[e.c],g);b.subVectors(f,g);a.cross(b);a.normalize();e.normal.copy(a)}},computeVertexNormals:function(a){var b,c,d;d=Array(this.vertices.length);b=0;for(c=this.vertices.length;b<c;b++)d[b]=new THREE.Vector3;if(a){var e,f,g,h=new THREE.Vector3,k=new THREE.Vector3;a=0;for(b=this.faces.length;a<b;a++)c=this.faces[a],e=this.vertices[c.a],f=this.vertices[c.b],g=this.vertices[c.c],h.subVectors(g,f),k.subVectors(e,f),h.cross(k),d[c.a].add(h),d[c.b]. [...]
+0,b=this.faces.length;a<b;a++)c=this.faces[a],d[c.a].add(c.normal),d[c.b].add(c.normal),d[c.c].add(c.normal);b=0;for(c=this.vertices.length;b<c;b++)d[b].normalize();a=0;for(b=this.faces.length;a<b;a++)c=this.faces[a],c.vertexNormals[0]=d[c.a].clone(),c.vertexNormals[1]=d[c.b].clone(),c.vertexNormals[2]=d[c.c].clone()},computeMorphNormals:function(){var a,b,c,d,e;c=0;for(d=this.faces.length;c<d;c++)for(e=this.faces[c],e.__originalFaceNormal?e.__originalFaceNormal.copy(e.normal):e.__origin [...]
+e.normal.clone(),e.__originalVertexNormals||(e.__originalVertexNormals=[]),a=0,b=e.vertexNormals.length;a<b;a++)e.__originalVertexNormals[a]?e.__originalVertexNormals[a].copy(e.vertexNormals[a]):e.__originalVertexNormals[a]=e.vertexNormals[a].clone();var f=new THREE.Geometry;f.faces=this.faces;a=0;for(b=this.morphTargets.length;a<b;a++){if(!this.morphNormals[a]){this.morphNormals[a]={};this.morphNormals[a].faceNormals=[];this.morphNormals[a].vertexNormals=[];e=this.morphNormals[a].faceNo [...]
+this.morphNormals[a].vertexNormals,h,k;c=0;for(d=this.faces.length;c<d;c++)h=new THREE.Vector3,k={a:new THREE.Vector3,b:new THREE.Vector3,c:new THREE.Vector3},e.push(h),g.push(k)}g=this.morphNormals[a];f.vertices=this.morphTargets[a].vertices;f.computeFaceNormals();f.computeVertexNormals();c=0;for(d=this.faces.length;c<d;c++)e=this.faces[c],h=g.faceNormals[c],k=g.vertexNormals[c],h.copy(e.normal),k.a.copy(e.vertexNormals[0]),k.b.copy(e.vertexNormals[1]),k.c.copy(e.vertexNormals[2])}c=0;f [...]
+d;c++)e=this.faces[c],e.normal=e.__originalFaceNormal,e.vertexNormals=e.__originalVertexNormals},computeTangents:function(){var a,b,c,d,e,f,g,h,k,l,p,q,n,t,r,s,u,v=[],x=[];c=new THREE.Vector3;var D=new THREE.Vector3,w=new THREE.Vector3,y=new THREE.Vector3,A=new THREE.Vector3;a=0;for(b=this.vertices.length;a<b;a++)v[a]=new THREE.Vector3,x[a]=new THREE.Vector3;a=0;for(b=this.faces.length;a<b;a++)e=this.faces[a],f=this.faceVertexUvs[0][a],d=e.a,u=e.b,e=e.c,g=this.vertices[d],h=this.vertices [...]
+l=f[0],p=f[1],q=f[2],f=h.x-g.x,n=k.x-g.x,t=h.y-g.y,r=k.y-g.y,h=h.z-g.z,g=k.z-g.z,k=p.x-l.x,s=q.x-l.x,p=p.y-l.y,l=q.y-l.y,q=1/(k*l-s*p),c.set((l*f-p*n)*q,(l*t-p*r)*q,(l*h-p*g)*q),D.set((k*n-s*f)*q,(k*r-s*t)*q,(k*g-s*h)*q),v[d].add(c),v[u].add(c),v[e].add(c),x[d].add(D),x[u].add(D),x[e].add(D);D=["a","b","c","d"];a=0;for(b=this.faces.length;a<b;a++)for(e=this.faces[a],c=0;c<Math.min(e.vertexNormals.length,3);c++)A.copy(e.vertexNormals[c]),d=e[D[c]],u=v[d],w.copy(u),w.sub(A.multiplyScalar(A [...]
+y.crossVectors(e.vertexNormals[c],u),d=y.dot(x[d]),d=0>d?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;c<d;c++)0<c&&(a+=b[c].distanceTo(b[c-1])),this.lineDistances[c]=a},computeBoundingBox:function(){null===this.boundingBox&&(this.boundingBox=new THREE.Box3);this.boundingBox.setFromPoints(this.vertices)},computeBoundingSphere:function(){null===this.boundingSphere&&(this.boundingSph [...]
+this.boundingSphere.setFromPoints(this.vertices)},merge:function(a,b,c){if(!1===a instanceof THREE.Geometry)THREE.error("THREE.Geometry.merge(): geometry not an instance of THREE.Geometry.",a);else{var d,e=this.vertices.length,f=this.vertices,g=a.vertices,h=this.faces,k=a.faces,l=this.faceVertexUvs[0];a=a.faceVertexUvs[0];void 0===c&&(c=0);void 0!==b&&(d=(new THREE.Matrix3).getNormalMatrix(b));for(var p=0,q=g.length;p<q;p++){var n=g[p].clone();void 0!==b&&n.applyMatrix4(b);f.push(n)}p=0; [...]
+q;p++){var g=k[p],t,r=g.vertexNormals,s=g.vertexColors,n=new THREE.Face3(g.a+e,g.b+e,g.c+e);n.normal.copy(g.normal);void 0!==d&&n.normal.applyMatrix3(d).normalize();b=0;for(f=r.length;b<f;b++)t=r[b].clone(),void 0!==d&&t.applyMatrix3(d).normalize(),n.vertexNormals.push(t);n.color.copy(g.color);b=0;for(f=s.length;b<f;b++)t=s[b],n.vertexColors.push(t.clone());n.materialIndex=g.materialIndex+c;h.push(n)}p=0;for(q=a.length;p<q;p++)if(c=a[p],d=[],void 0!==c){b=0;for(f=c.length;b<f;b++)d.push( [...]
+l.push(d)}}},mergeMesh:function(a){!1===a instanceof THREE.Mesh?THREE.error("THREE.Geometry.mergeMesh(): mesh not an instance of THREE.Mesh.",a):(a.matrixAutoUpdate&&a.updateMatrix(),this.merge(a.geometry,a.matrix))},mergeVertices:function(){var a={},b=[],c=[],d,e=Math.pow(10,4),f,g;f=0;for(g=this.vertices.length;f<g;f++)d=this.vertices[f],d=Math.round(d.x*e)+"_"+Math.round(d.y*e)+"_"+Math.round(d.z*e),void 0===a[d]?(a[d]=f,b.push(this.vertices[f]),c[f]=b.length-1):c[f]=c[a[d]];a=[];f=0; [...]
+g;f++)for(e=this.faces[f],e.a=c[e.a],e.b=c[e.b],e.c=c[e.c],e=[e.a,e.b,e.c],d=0;3>d;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;c<g;c++)this.faceVertexUvs[c].splice(e,1);f=this.vertices.length-b.length;this.vertices=b;return f},toJSON:function(){function a(a,b,c){return c?a|1<<b:a&~(1<<b)}function b(a){var b=a.x.toString()+a.y.toString()+a.z.toString();if(void 0!==l[b])return l[b];l[b]=k.length/3;k.pu [...]
+a.z);return l[b]}function c(a){var b=a.r.toString()+a.g.toString()+a.b.toString();if(void 0!==q[b])return q[b];q[b]=p.length;p.push(a.getHex());return q[b]}function d(a){var b=a.x.toString()+a.y.toString();if(void 0!==t[b])return t[b];t[b]=n.length/2;n.push(a.x,a.y);return t[b]}var e={metadata:{version:4,type:"BufferGeometry",generator:"BufferGeometryExporter"},uuid:this.uuid,type:this.type};""!==this.name&&(e.name=this.name);if(void 0!==this.parameters){var f=this.parameters,g;for(g in  [...]
+f[g]&&(e[g]=f[g]);return e}f=[];for(g=0;g<this.vertices.length;g++){var h=this.vertices[g];f.push(h.x,h.y,h.z)}var h=[],k=[],l={},p=[],q={},n=[],t={};for(g=0;g<this.faces.length;g++){var r=this.faces[g],s=void 0!==this.faceVertexUvs[0][g],u=0<r.normal.length(),v=0<r.vertexNormals.length,x=1!==r.color.r||1!==r.color.g||1!==r.color.b,D=0<r.vertexColors.length,w=0,w=a(w,0,0),w=a(w,1,!1),w=a(w,2,!1),w=a(w,3,s),w=a(w,4,u),w=a(w,5,v),w=a(w,6,x),w=a(w,7,D);h.push(w);h.push(r.a,r.b,r.c);s&&(s=th [...]
+h.push(d(s[0]),d(s[1]),d(s[2])));u&&h.push(b(r.normal));v&&(u=r.vertexNormals,h.push(b(u[0]),b(u[1]),b(u[2])));x&&h.push(c(r.color));D&&(r=r.vertexColors,h.push(c(r[0]),c(r[1]),c(r[2])))}e.data={};e.data.vertices=f;e.data.normals=k;0<p.length&&(e.data.colors=p);0<n.length&&(e.data.uvs=[n]);e.data.faces=h;return e},clone:function(){for(var a=new THREE.Geometry,b=this.vertices,c=0,d=b.length;c<d;c++)a.vertices.push(b[c].clone());b=this.faces;c=0;for(d=b.length;c<d;c++)a.faces.push(b[c].clo [...]
+for(d=this.faceVertexUvs.length;c<d;c++){b=this.faceVertexUvs[c];void 0===a.faceVertexUvs[c]&&(a.faceVertexUvs[c]=[]);for(var e=0,f=b.length;e<f;e++){for(var g=b[e],h=[],k=0,l=g.length;k<l;k++)h.push(g[k].clone());a.faceVertexUvs[c].push(h)}}return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Geometry.prototype);THREE.GeometryIdCount=0;
+THREE.Camera=function(){THREE.Object3D.call(this);this.type="Camera";this.matrixWorldInverse=new THREE.Matrix4;this.projectionMatrix=new THREE.Matrix4};THREE.Camera.prototype=Object.create(THREE.Object3D.prototype);THREE.Camera.prototype.constructor=THREE.Camera;THREE.Camera.prototype.getWorldDirection=function(){var a=new THREE.Quaternion;return function(b){b=b||new THREE.Vector3;this.getWorldQuaternion(a);return b.set(0,0,-1).applyQuaternion(a)}}();
+THREE.Camera.prototype.lookAt=function(){var a=new THREE.Matrix4;return function(b){a.lookAt(this.position,b,this.up);this.quaternion.setFromRotationMatrix(a)}}();THREE.Camera.prototype.clone=function(a){void 0===a&&(a=new THREE.Camera);THREE.Object3D.prototype.clone.call(this,a);a.matrixWorldInverse.copy(this.matrixWorldInverse);a.projectionMatrix.copy(this.projectionMatrix);return a};
+THREE.CubeCamera=function(a,b,c){THREE.Object3D.call(this);this.type="CubeCamera";var d=new THREE.PerspectiveCamera(90,1,a,b);d.up.set(0,-1,0);d.lookAt(new THREE.Vector3(1,0,0));this.add(d);var e=new THREE.PerspectiveCamera(90,1,a,b);e.up.set(0,-1,0);e.lookAt(new THREE.Vector3(-1,0,0));this.add(e);var f=new THREE.PerspectiveCamera(90,1,a,b);f.up.set(0,0,1);f.lookAt(new THREE.Vector3(0,1,0));this.add(f);var g=new THREE.PerspectiveCamera(90,1,a,b);g.up.set(0,0,-1);g.lookAt(new THREE.Vector [...]
+this.add(g);var h=new THREE.PerspectiveCamera(90,1,a,b);h.up.set(0,-1,0);h.lookAt(new THREE.Vector3(0,0,1));this.add(h);var k=new THREE.PerspectiveCamera(90,1,a,b);k.up.set(0,-1,0);k.lookAt(new THREE.Vector3(0,0,-1));this.add(k);this.renderTarget=new THREE.WebGLRenderTargetCube(c,c,{format:THREE.RGBFormat,magFilter:THREE.LinearFilter,minFilter:THREE.LinearFilter});this.updateCubeMap=function(a,b){var c=this.renderTarget,n=c.generateMipmaps;c.generateMipmaps=!1;c.activeCubeFace=0;a.render [...]
+1;a.render(b,e,c);c.activeCubeFace=2;a.render(b,f,c);c.activeCubeFace=3;a.render(b,g,c);c.activeCubeFace=4;a.render(b,h,c);c.generateMipmaps=n;c.activeCubeFace=5;a.render(b,k,c)}};THREE.CubeCamera.prototype=Object.create(THREE.Object3D.prototype);THREE.CubeCamera.prototype.constructor=THREE.CubeCamera;
+THREE.OrthographicCamera=function(a,b,c,d,e,f){THREE.Camera.call(this);this.type="OrthographicCamera";this.zoom=1;this.left=a;this.right=b;this.top=c;this.bottom=d;this.near=void 0!==e?e:.1;this.far=void 0!==f?f:2E3;this.updateProjectionMatrix()};THREE.OrthographicCamera.prototype=Object.create(THREE.Camera.prototype);THREE.OrthographicCamera.prototype.constructor=THREE.OrthographicCamera;
+THREE.OrthographicCamera.prototype.updateProjectionMatrix=function(){var a=(this.right-this.left)/(2*this.zoom),b=(this.top-this.bottom)/(2*this.zoom),c=(this.right+this.left)/2,d=(this.top+this.bottom)/2;this.projectionMatrix.makeOrthographic(c-a,c+a,d+b,d-b,this.near,this.far)};
+THREE.OrthographicCamera.prototype.clone=function(){var a=new THREE.OrthographicCamera;THREE.Camera.prototype.clone.call(this,a);a.zoom=this.zoom;a.left=this.left;a.right=this.right;a.top=this.top;a.bottom=this.bottom;a.near=this.near;a.far=this.far;a.projectionMatrix.copy(this.projectionMatrix);return a};
+THREE.PerspectiveCamera=function(a,b,c,d){THREE.Camera.call(this);this.type="PerspectiveCamera";this.zoom=1;this.fov=void 0!==a?a:50;this.aspect=void 0!==b?b:1;this.near=void 0!==c?c:.1;this.far=void 0!==d?d:2E3;this.updateProjectionMatrix()};THREE.PerspectiveCamera.prototype=Object.create(THREE.Camera.prototype);THREE.PerspectiveCamera.prototype.constructor=THREE.PerspectiveCamera;
+THREE.PerspectiveCamera.prototype.setLens=function(a,b){void 0===b&&(b=24);this.fov=2*THREE.Math.radToDeg(Math.atan(b/(2*a)));this.updateProjectionMatrix()};THREE.PerspectiveCamera.prototype.setViewOffset=function(a,b,c,d,e,f){this.fullWidth=a;this.fullHeight=b;this.x=c;this.y=d;this.width=e;this.height=f;this.updateProjectionMatrix()};
+THREE.PerspectiveCamera.prototype.updateProjectionMatrix=function(){var a=THREE.Math.radToDeg(2*Math.atan(Math.tan(.5*THREE.Math.degToRad(this.fov))/this.zoom));if(this.fullWidth){var b=this.fullWidth/this.fullHeight,a=Math.tan(THREE.Math.degToRad(.5*a))*this.near,c=-a,d=b*c,b=Math.abs(b*a-d),c=Math.abs(a-c);this.projectionMatrix.makeFrustum(d+this.x*b/this.fullWidth,d+(this.x+this.width)*b/this.fullWidth,a-(this.y+this.height)*c/this.fullHeight,a-this.y*c/this.fullHeight,this.near,this. [...]
+this.aspect,this.near,this.far)};THREE.PerspectiveCamera.prototype.clone=function(){var a=new THREE.PerspectiveCamera;THREE.Camera.prototype.clone.call(this,a);a.zoom=this.zoom;a.fov=this.fov;a.aspect=this.aspect;a.near=this.near;a.far=this.far;a.projectionMatrix.copy(this.projectionMatrix);return a};THREE.Light=function(a){THREE.Object3D.call(this);this.type="Light";this.color=new THREE.Color(a)};THREE.Light.prototype=Object.create(THREE.Object3D.prototype);THREE.Light.prototype.constru [...]
+THREE.Light.prototype.clone=function(a){void 0===a&&(a=new THREE.Light);THREE.Object3D.prototype.clone.call(this,a);a.color.copy(this.color);return a};THREE.AmbientLight=function(a){THREE.Light.call(this,a);this.type="AmbientLight"};THREE.AmbientLight.prototype=Object.create(THREE.Light.prototype);THREE.AmbientLight.prototype.constructor=THREE.AmbientLight;THREE.AmbientLight.prototype.clone=function(){var a=new THREE.AmbientLight;THREE.Light.prototype.clone.call(this,a);return a};
+THREE.AreaLight=function(a,b){THREE.Light.call(this,a);this.type="AreaLight";this.normal=new THREE.Vector3(0,-1,0);this.right=new THREE.Vector3(1,0,0);this.intensity=void 0!==b?b:1;this.height=this.width=1;this.constantAttenuation=1.5;this.linearAttenuation=.5;this.quadraticAttenuation=.1};THREE.AreaLight.prototype=Object.create(THREE.Light.prototype);THREE.AreaLight.prototype.constructor=THREE.AreaLight;
+THREE.DirectionalLight=function(a,b){THREE.Light.call(this,a);this.type="DirectionalLight";this.position.set(0,1,0);this.target=new THREE.Object3D;this.intensity=void 0!==b?b:1;this.onlyShadow=this.castShadow=!1;this.shadowCameraNear=50;this.shadowCameraFar=5E3;this.shadowCameraLeft=-500;this.shadowCameraTop=this.shadowCameraRight=500;this.shadowCameraBottom=-500;this.shadowCameraVisible=!1;this.shadowBias=0;this.shadowDarkness=.5;this.shadowMapHeight=this.shadowMapWidth=512;this.shadowC [...]
+this.shadowCascadeOffset=new THREE.Vector3(0,0,-1E3);this.shadowCascadeCount=2;this.shadowCascadeBias=[0,0,0];this.shadowCascadeWidth=[512,512,512];this.shadowCascadeHeight=[512,512,512];this.shadowCascadeNearZ=[-1,.99,.998];this.shadowCascadeFarZ=[.99,.998,1];this.shadowCascadeArray=[];this.shadowMatrix=this.shadowCamera=this.shadowMapSize=this.shadowMap=null};THREE.DirectionalLight.prototype=Object.create(THREE.Light.prototype);THREE.DirectionalLight.prototype.constructor=THREE.Directi [...]
+THREE.DirectionalLight.prototype.clone=function(){var a=new THREE.DirectionalLight;THREE.Light.prototype.clone.call(this,a);a.target=this.target.clone();a.intensity=this.intensity;a.castShadow=this.castShadow;a.onlyShadow=this.onlyShadow;a.shadowCameraNear=this.shadowCameraNear;a.shadowCameraFar=this.shadowCameraFar;a.shadowCameraLeft=this.shadowCameraLeft;a.shadowCameraRight=this.shadowCameraRight;a.shadowCameraTop=this.shadowCameraTop;a.shadowCameraBottom=this.shadowCameraBottom;a.shad [...]
+this.shadowCameraVisible;a.shadowBias=this.shadowBias;a.shadowDarkness=this.shadowDarkness;a.shadowMapWidth=this.shadowMapWidth;a.shadowMapHeight=this.shadowMapHeight;a.shadowCascade=this.shadowCascade;a.shadowCascadeOffset.copy(this.shadowCascadeOffset);a.shadowCascadeCount=this.shadowCascadeCount;a.shadowCascadeBias=this.shadowCascadeBias.slice(0);a.shadowCascadeWidth=this.shadowCascadeWidth.slice(0);a.shadowCascadeHeight=this.shadowCascadeHeight.slice(0);a.shadowCascadeNearZ=this.shad [...]
+a.shadowCascadeFarZ=this.shadowCascadeFarZ.slice(0);return a};THREE.HemisphereLight=function(a,b,c){THREE.Light.call(this,a);this.type="HemisphereLight";this.position.set(0,100,0);this.groundColor=new THREE.Color(b);this.intensity=void 0!==c?c:1};THREE.HemisphereLight.prototype=Object.create(THREE.Light.prototype);THREE.HemisphereLight.prototype.constructor=THREE.HemisphereLight;
+THREE.HemisphereLight.prototype.clone=function(){var a=new THREE.HemisphereLight;THREE.Light.prototype.clone.call(this,a);a.groundColor.copy(this.groundColor);a.intensity=this.intensity;return a};THREE.PointLight=function(a,b,c,d){THREE.Light.call(this,a);this.type="PointLight";this.intensity=void 0!==b?b:1;this.distance=void 0!==c?c:0;this.decay=void 0!==d?d:1};THREE.PointLight.prototype=Object.create(THREE.Light.prototype);THREE.PointLight.prototype.constructor=THREE.PointLight;
+THREE.PointLight.prototype.clone=function(){var a=new THREE.PointLight;THREE.Light.prototype.clone.call(this,a);a.intensity=this.intensity;a.distance=this.distance;a.decay=this.decay;return a};
+THREE.SpotLight=function(a,b,c,d,e,f){THREE.Light.call(this,a);this.type="SpotLight";this.position.set(0,1,0);this.target=new THREE.Object3D;this.intensity=void 0!==b?b:1;this.distance=void 0!==c?c:0;this.angle=void 0!==d?d:Math.PI/3;this.exponent=void 0!==e?e:10;this.decay=void 0!==f?f:1;this.onlyShadow=this.castShadow=!1;this.shadowCameraNear=50;this.shadowCameraFar=5E3;this.shadowCameraFov=50;this.shadowCameraVisible=!1;this.shadowBias=0;this.shadowDarkness=.5;this.shadowMapHeight=thi [...]
+512;this.shadowMatrix=this.shadowCamera=this.shadowMapSize=this.shadowMap=null};THREE.SpotLight.prototype=Object.create(THREE.Light.prototype);THREE.SpotLight.prototype.constructor=THREE.SpotLight;
+THREE.SpotLight.prototype.clone=function(){var a=new THREE.SpotLight;THREE.Light.prototype.clone.call(this,a);a.target=this.target.clone();a.intensity=this.intensity;a.distance=this.distance;a.angle=this.angle;a.exponent=this.exponent;a.decay=this.decay;a.castShadow=this.castShadow;a.onlyShadow=this.onlyShadow;a.shadowCameraNear=this.shadowCameraNear;a.shadowCameraFar=this.shadowCameraFar;a.shadowCameraFov=this.shadowCameraFov;a.shadowCameraVisible=this.shadowCameraVisible;a.shadowBias=t [...]
+a.shadowDarkness=this.shadowDarkness;a.shadowMapWidth=this.shadowMapWidth;a.shadowMapHeight=this.shadowMapHeight;return a};THREE.Cache={files:{},add:function(a,b){this.files[a]=b},get:function(a){return this.files[a]},remove:function(a){delete this.files[a]},clear:function(){this.files={}}};
+THREE.Loader=function(a){this.statusDomElement=(this.showStatus=a)?THREE.Loader.prototype.addStatusElement():null;this.imageLoader=new THREE.ImageLoader;this.onLoadStart=function(){};this.onLoadProgress=function(){};this.onLoadComplete=function(){}};
+THREE.Loader.prototype={constructor:THREE.Loader,crossOrigin:void 0,addStatusElement:function(){var a=document.createElement("div");a.style.position="absolute";a.style.right="0px";a.style.top="0px";a.style.fontSize="0.8em";a.style.textAlign="left";a.style.background="rgba(0,0,0,0.25)";a.style.color="#fff";a.style.width="120px";a.style.padding="0.5em 0.5em 0.5em 0.5em";a.style.zIndex=1E3;a.innerHTML="Loading ...";return a},updateProgress:function(a){var b="Loaded ",b=a.total?b+((100*a.loa [...]
+"%"):b+((a.loaded/1024).toFixed(2)+" KB");this.statusDomElement.innerHTML=b},extractUrlBase:function(a){a=a.split("/");if(1===a.length)return"./";a.pop();return a.join("/")+"/"},initMaterials:function(a,b){for(var c=[],d=0;d<a.length;++d)c[d]=this.createMaterial(a[d],b);return c},needsTangents:function(a){for(var b=0,c=a.length;b<c;b++)if(a[b]instanceof THREE.ShaderMaterial)return!0;return!1},createMaterial:function(a,b){function c(a){a=Math.log(a)/Math.LN2;return Math.pow(2,Math.round(a [...]
+d,e,g,h,k,s){var u=b+e,v,x=THREE.Loader.Handlers.get(u);null!==x?v=x.load(u):(v=new THREE.Texture,x=f.imageLoader,x.crossOrigin=f.crossOrigin,x.load(u,function(a){if(!1===THREE.Math.isPowerOfTwo(a.width)||!1===THREE.Math.isPowerOfTwo(a.height)){var b=c(a.width),d=c(a.height),e=document.createElement("canvas");e.width=b;e.height=d;e.getContext("2d").drawImage(a,0,0,b,d);v.image=e}else v.image=a;v.needsUpdate=!0}));v.sourceFile=e;g&&(v.repeat.set(g[0],g[1]),1!==g[0]&&(v.wrapS=THREE.RepeatW [...]
+1!==g[1]&&(v.wrapT=THREE.RepeatWrapping));h&&v.offset.set(h[0],h[1]);k&&(e={repeat:THREE.RepeatWrapping,mirror:THREE.MirroredRepeatWrapping},void 0!==e[k[0]]&&(v.wrapS=e[k[0]]),void 0!==e[k[1]]&&(v.wrapT=e[k[1]]));s&&(v.anisotropy=s);a[d]=v}function e(a){return(255*a[0]<<16)+(255*a[1]<<8)+255*a[2]}var f=this,g="MeshLambertMaterial",h={color:15658734,opacity:1,map:null,lightMap:null,normalMap:null,bumpMap:null,wireframe:!1};if(a.shading){var k=a.shading.toLowerCase();"phong"===k?g="MeshPh [...]
+"basic"===k&&(g="MeshBasicMaterial")}void 0!==a.blending&&void 0!==THREE[a.blending]&&(h.blending=THREE[a.blending]);void 0!==a.transparent&&(h.transparent=a.transparent);void 0!==a.opacity&&1>a.opacity&&(h.transparent=!0);void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wirefra [...]
+void 0!==a.vertexColors&&("face"===a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorEmissive&&(h.emissive=e(a.colorEmissive));void 0!==a.transparency&&(console.warn("THREE.Loader: transparency has been renamed to opacity"),a.opacity=a.transparency);void 0!==a.opacity&&(h.opacity=a.opacity);a.specularCoef&&(h.s [...]
+a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap,a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h [...]
+a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&&b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormalFactor&&(h.normalScale=new THREE.Vector2(a.mapNormalFactor,a.mapNormalFactor));g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}};
+THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;b<c;b+=2){var d=this.handlers[b+1];if(this.handlers[b].test(a))return d}return null}};THREE.XHRLoader=function(a){this.manager=void 0!==a?a:THREE.DefaultLoadingManager};
+THREE.XHRLoader.prototype={constructor:THREE.XHRLoader,load:function(a,b,c,d){var e=this,f=THREE.Cache.get(a);void 0!==f?b&&b(f):(f=new XMLHttpRequest,f.open("GET",a,!0),f.addEventListener("load",function(c){THREE.Cache.add(a,this.response);b&&b(this.response);e.manager.itemEnd(a)},!1),void 0!==c&&f.addEventListener("progress",function(a){c(a)},!1),void 0!==d&&f.addEventListener("error",function(a){d(a)},!1),void 0!==this.crossOrigin&&(f.crossOrigin=this.crossOrigin),void 0!==this.respon [...]
+this.responseType),f.send(null),e.manager.itemStart(a))},setResponseType:function(a){this.responseType=a},setCrossOrigin:function(a){this.crossOrigin=a}};THREE.ImageLoader=function(a){this.manager=void 0!==a?a:THREE.DefaultLoadingManager};
+THREE.ImageLoader.prototype={constructor:THREE.ImageLoader,load:function(a,b,c,d){var e=this,f=THREE.Cache.get(a);if(void 0!==f)b(f);else return f=document.createElement("img"),f.addEventListener("load",function(c){THREE.Cache.add(a,this);b&&b(this);e.manager.itemEnd(a)},!1),void 0!==c&&f.addEventListener("progress",function(a){c(a)},!1),void 0!==d&&f.addEventListener("error",function(a){d(a)},!1),void 0!==this.crossOrigin&&(f.crossOrigin=this.crossOrigin),f.src=a,e.manager.itemStart(a), [...]
+a}};THREE.JSONLoader=function(a){THREE.Loader.call(this,a);this.withCredentials=!1};THREE.JSONLoader.prototype=Object.create(THREE.Loader.prototype);THREE.JSONLoader.prototype.constructor=THREE.JSONLoader;THREE.JSONLoader.prototype.load=function(a,b,c){c=c&&"string"===typeof c?c:this.extractUrlBase(a);this.onLoadStart();this.loadAjaxJSON(this,a,b,c)};
+THREE.JSONLoader.prototype.loadAjaxJSON=function(a,b,c,d,e){var f=new XMLHttpRequest,g=0;f.onreadystatechange=function(){if(f.readyState===f.DONE)if(200===f.status||0===f.status){if(f.responseText){var h=JSON.parse(f.responseText),k=h.metadata;if(void 0!==k){if("object"===k.type){THREE.error("THREE.JSONLoader: "+b+" should be loaded with THREE.ObjectLoader instead.");return}if("scene"===k.type){THREE.error("THREE.JSONLoader: "+b+" seems to be a Scene. Use THREE.SceneLoader instead.");ret [...]
+d);c(h.geometry,h.materials)}else THREE.error("THREE.JSONLoader: "+b+" seems to be unreachable or the file is empty.");a.onLoadComplete()}else THREE.error("THREE.JSONLoader: Couldn't load "+b+" ("+f.status+")");else f.readyState===f.LOADING?e&&(0===g&&(g=f.getResponseHeader("Content-Length")),e({total:g,loaded:f.responseText.length})):f.readyState===f.HEADERS_RECEIVED&&void 0!==e&&(g=f.getResponseHeader("Content-Length"))};f.open("GET",b,!0);f.withCredentials=this.withCredentials;f.send(null)};
+THREE.JSONLoader.prototype.parse=function(a,b){var c=new THREE.Geometry,d=void 0!==a.scale?1/a.scale:1;(function(b){var d,g,h,k,l,p,q,n,t,r,s,u,v,x=a.faces;p=a.vertices;var D=a.normals,w=a.colors,y=0;if(void 0!==a.uvs){for(d=0;d<a.uvs.length;d++)a.uvs[d].length&&y++;for(d=0;d<y;d++)c.faceVertexUvs[d]=[]}k=0;for(l=p.length;k<l;)d=new THREE.Vector3,d.x=p[k++]*b,d.y=p[k++]*b,d.z=p[k++]*b,c.vertices.push(d);k=0;for(l=x.length;k<l;)if(b=x[k++],t=b&1,h=b&2,d=b&8,q=b&16,r=b&32,p=b&64,b&=128,t){ [...]
+t.a=x[k];t.b=x[k+1];t.c=x[k+3];s=new THREE.Face3;s.a=x[k+1];s.b=x[k+2];s.c=x[k+3];k+=4;h&&(h=x[k++],t.materialIndex=h,s.materialIndex=h);h=c.faces.length;if(d)for(d=0;d<y;d++)for(u=a.uvs[d],c.faceVertexUvs[d][h]=[],c.faceVertexUvs[d][h+1]=[],g=0;4>g;g++)n=x[k++],v=u[2*n],n=u[2*n+1],v=new THREE.Vector2(v,n),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*x[k++],t.normal.set(D[q++],D[q++],D[q]),s.normal.copy(t.normal));if(r)for(d=0;4>d;d++)q=3*x[k++],r=n [...]
+D[q++],D[q]),2!==d&&t.vertexNormals.push(r),0!==d&&s.vertexNormals.push(r);p&&(p=x[k++],p=w[p],t.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=x[k++],p=w[p],2!==d&&t.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(t);c.faces.push(s)}else{t=new THREE.Face3;t.a=x[k++];t.b=x[k++];t.c=x[k++];h&&(h=x[k++],t.materialIndex=h);h=c.faces.length;if(d)for(d=0;d<y;d++)for(u=a.uvs[d],c.faceVertexUvs[d][h]=[],g=0;3>g;g++)n=x[k++],v=u[2*n],n [...]
+v=new THREE.Vector2(v,n),c.faceVertexUvs[d][h].push(v);q&&(q=3*x[k++],t.normal.set(D[q++],D[q++],D[q]));if(r)for(d=0;3>d;d++)q=3*x[k++],r=new THREE.Vector3(D[q++],D[q++],D[q]),t.vertexNormals.push(r);p&&(p=x[k++],t.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=x[k++],t.vertexColors.push(new THREE.Color(w[p]));c.faces.push(t)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;d<g;d+=b)c.skinWeights.push(new THREE [...]
+1<b?a.skinWeights[d+1]:0,2<b?a.skinWeights[d+2]:0,3<b?a.skinWeights[d+3]:0));if(a.skinIndices)for(d=0,g=a.skinIndices.length;d<g;d+=b)c.skinIndices.push(new THREE.Vector4(a.skinIndices[d],1<b?a.skinIndices[d+1]:0,2<b?a.skinIndices[d+2]:0,3<b?a.skinIndices[d+3]:0));c.bones=a.bones;c.bones&&0<c.bones.length&&(c.skinWeights.length!==c.skinIndices.length||c.skinIndices.length!==c.vertices.length)&&THREE.warn("THREE.JSONLoader: When skinning, number of vertices ("+c.vertices.length+"), skinIn [...]
+c.skinIndices.length+"), and skinWeights ("+c.skinWeights.length+") should match.");c.animation=a.animation;c.animations=a.animations})();(function(b){if(void 0!==a.morphTargets){var d,g,h,k,l,p;d=0;for(g=a.morphTargets.length;d<g;d++)for(c.morphTargets[d]={},c.morphTargets[d].name=a.morphTargets[d].name,c.morphTargets[d].vertices=[],l=c.morphTargets[d].vertices,p=a.morphTargets[d].vertices,h=0,k=p.length;h<k;h+=3){var q=new THREE.Vector3;q.x=p[h]*b;q.y=p[h+1]*b;q.z=p[h+2]*b;l.push(q)}}i [...]
+a.morphColors)for(d=0,g=a.morphColors.length;d<g;d++)for(c.morphColors[d]={},c.morphColors[d].name=a.morphColors[d].name,c.morphColors[d].colors=[],k=c.morphColors[d].colors,l=a.morphColors[d].colors,b=0,h=l.length;b<h;b+=3)p=new THREE.Color(16755200),p.setRGB(l[b],l[b+1],l[b+2]),k.push(p)})(d);c.computeFaceNormals();c.computeBoundingSphere();if(void 0===a.materials||0===a.materials.length)return{geometry:c};d=this.initMaterials(a.materials,b);this.needsTangents(d)&&c.computeTangents();r [...]
+materials:d}};THREE.LoadingManager=function(a,b,c){var d=this,e=0,f=0;this.onLoad=a;this.onProgress=b;this.onError=c;this.itemStart=function(a){f++};this.itemEnd=function(a){e++;if(void 0!==d.onProgress)d.onProgress(a,e,f);if(e===f&&void 0!==d.onLoad)d.onLoad()}};THREE.DefaultLoadingManager=new THREE.LoadingManager;THREE.BufferGeometryLoader=function(a){this.manager=void 0!==a?a:THREE.DefaultLoadingManager};
+THREE.BufferGeometryLoader.prototype={constructor:THREE.BufferGeometryLoader,load:function(a,b,c,d){var e=this,f=new THREE.XHRLoader(e.manager);f.setCrossOrigin(this.crossOrigin);f.load(a,function(a){b(e.parse(JSON.parse(a)))},c,d)},setCrossOrigin:function(a){this.crossOrigin=a},parse:function(a){var b=new THREE.BufferGeometry,c=a.data.attributes,d;for(d in c){var e=c[d],f=new self[e.type](e.array);b.addAttribute(d,new THREE.BufferAttribute(f,e.itemSize))}c=a.data.offsets;void 0!==c&&(b. [...]
+a=a.data.boundingSphere;void 0!==a&&(c=new THREE.Vector3,void 0!==a.center&&c.fromArray(a.center),b.boundingSphere=new THREE.Sphere(c,a.radius));return b}};THREE.MaterialLoader=function(a){this.manager=void 0!==a?a:THREE.DefaultLoadingManager};
+THREE.MaterialLoader.prototype={constructor:THREE.MaterialLoader,load:function(a,b,c,d){var e=this,f=new THREE.XHRLoader(e.manager);f.setCrossOrigin(this.crossOrigin);f.load(a,function(a){b(e.parse(JSON.parse(a)))},c,d)},setCrossOrigin:function(a){this.crossOrigin=a},parse:function(a){var b=new THREE[a.type];void 0!==a.color&&b.color.setHex(a.color);void 0!==a.emissive&&b.emissive.setHex(a.emissive);void 0!==a.specular&&b.specular.setHex(a.specular);void 0!==a.shininess&&(b.shininess=a.s [...]
+void 0!==a.uniforms&&(b.uniforms=a.uniforms);void 0!==a.vertexShader&&(b.vertexShader=a.vertexShader);void 0!==a.fragmentShader&&(b.fragmentShader=a.fragmentShader);void 0!==a.vertexColors&&(b.vertexColors=a.vertexColors);void 0!==a.shading&&(b.shading=a.shading);void 0!==a.blending&&(b.blending=a.blending);void 0!==a.side&&(b.side=a.side);void 0!==a.opacity&&(b.opacity=a.opacity);void 0!==a.transparent&&(b.transparent=a.transparent);void 0!==a.wireframe&&(b.wireframe=a.wireframe);void 0 [...]
+(b.size=a.size);void 0!==a.sizeAttenuation&&(b.sizeAttenuation=a.sizeAttenuation);if(void 0!==a.materials)for(var c=0,d=a.materials.length;c<d;c++)b.materials.push(this.parse(a.materials[c]));return b}};THREE.ObjectLoader=function(a){this.manager=void 0!==a?a:THREE.DefaultLoadingManager;this.texturePath=""};
+THREE.ObjectLoader.prototype={constructor:THREE.ObjectLoader,load:function(a,b,c,d){""===this.texturePath&&(this.texturePath=a.substring(0,a.lastIndexOf("/")+1));var e=this,f=new THREE.XHRLoader(e.manager);f.setCrossOrigin(this.crossOrigin);f.load(a,function(a){e.parse(JSON.parse(a),b)},c,d)},setTexturePath:function(a){this.texturePath=a},setCrossOrigin:function(a){this.crossOrigin=a},parse:function(a,b){var c=this.parseGeometries(a.geometries),d=this.parseImages(a.images,function(){void [...]
+d=this.parseTextures(a.textures,d),d=this.parseMaterials(a.materials,d),e=this.parseObject(a.object,c,d);void 0!==a.images&&0!==a.images.length||void 0===b||b(e);return e},parseGeometries:function(a){var b={};if(void 0!==a)for(var c=new THREE.JSONLoader,d=new THREE.BufferGeometryLoader,e=0,f=a.length;e<f;e++){var g,h=a[e];switch(h.type){case "PlaneGeometry":case "PlaneBufferGeometry":g=new THREE[h.type](h.width,h.height,h.widthSegments,h.heightSegments);break;case "BoxGeometry":case "Cub [...]
+new THREE.BoxGeometry(h.width,h.height,h.depth,h.widthSegments,h.heightSegments,h.depthSegments);break;case "CircleGeometry":g=new THREE.CircleGeometry(h.radius,h.segments);break;case "CylinderGeometry":g=new THREE.CylinderGeometry(h.radiusTop,h.radiusBottom,h.height,h.radialSegments,h.heightSegments,h.openEnded);break;case "SphereGeometry":g=new THREE.SphereGeometry(h.radius,h.widthSegments,h.heightSegments,h.phiStart,h.phiLength,h.thetaStart,h.thetaLength);break;case "IcosahedronGeomet [...]
+h.detail);break;case "TorusGeometry":g=new THREE.TorusGeometry(h.radius,h.tube,h.radialSegments,h.tubularSegments,h.arc);break;case "TorusKnotGeometry":g=new THREE.TorusKnotGeometry(h.radius,h.tube,h.radialSegments,h.tubularSegments,h.p,h.q,h.heightScale);break;case "BufferGeometry":g=d.parse(h);break;case "Geometry":g=c.parse(h.data).geometry}g.uuid=h.uuid;void 0!==h.name&&(g.name=h.name);b[h.uuid]=g}return b},parseMaterials:function(a,b){var c={};if(void 0!==a)for(var d=function(a){voi [...]
+THREE.warn("THREE.ObjectLoader: Undefined texture",a);return b[a]},e=new THREE.MaterialLoader,f=0,g=a.length;f<g;f++){var h=a[f],k=e.parse(h);k.uuid=h.uuid;void 0!==h.name&&(k.name=h.name);void 0!==h.map&&(k.map=d(h.map));void 0!==h.bumpMap&&(k.bumpMap=d(h.bumpMap),h.bumpScale&&(k.bumpScale=new THREE.Vector2(h.bumpScale,h.bumpScale)));void 0!==h.alphaMap&&(k.alphaMap=d(h.alphaMap));void 0!==h.envMap&&(k.envMap=d(h.envMap));void 0!==h.normalMap&&(k.normalMap=d(h.normalMap),h.normalScale&& [...]
+new THREE.Vector2(h.normalScale,h.normalScale)));void 0!==h.lightMap&&(k.lightMap=d(h.lightMap));void 0!==h.specularMap&&(k.specularMap=d(h.specularMap));c[h.uuid]=k}return c},parseImages:function(a,b){var c=this,d={};if(void 0!==a&&0<a.length){var e=new THREE.LoadingManager(b),f=new THREE.ImageLoader(e);f.setCrossOrigin(this.crossOrigin);for(var e=function(a){c.manager.itemStart(a);return f.load(a,function(){c.manager.itemEnd(a)})},g=0,h=a.length;g<h;g++){var k=a[g],l=/^(\/\/)|([a-z]+:( [...]
+k.url:c.texturePath+k.url;d[k.uuid]=e(l)}}return d},parseTextures:function(a,b){var c={};if(void 0!==a)for(var d=0,e=a.length;d<e;d++){var f=a[d];void 0===f.image&&THREE.warn('THREE.ObjectLoader: No "image" speficied for',f.uuid);void 0===b[f.image]&&THREE.warn("THREE.ObjectLoader: Undefined image",f.image);var g=new THREE.Texture(b[f.image]);g.needsUpdate=!0;g.uuid=f.uuid;void 0!==f.name&&(g.name=f.name);void 0!==f.repeat&&(g.repeat=new THREE.Vector2(f.repeat[0],f.repeat[1]));void 0!==f [...]
+(g.minFilter=THREE[f.minFilter]);void 0!==f.magFilter&&(g.magFilter=THREE[f.magFilter]);void 0!==f.anisotropy&&(g.anisotropy=f.anisotropy);f.wrap instanceof Array&&(g.wrapS=THREE[f.wrap[0]],g.wrapT=THREE[f.wrap[1]]);c[f.uuid]=g}return c},parseObject:function(){var a=new THREE.Matrix4;return function(b,c,d){var e;e=function(a){void 0===c[a]&&THREE.warn("THREE.ObjectLoader: Undefined geometry",a);return c[a]};var f=function(a){void 0===d[a]&&THREE.warn("THREE.ObjectLoader: Undefined materi [...]
+switch(b.type){case "Scene":e=new THREE.Scene;break;case "PerspectiveCamera":e=new THREE.PerspectiveCamera(b.fov,b.aspect,b.near,b.far);break;case "OrthographicCamera":e=new THREE.OrthographicCamera(b.left,b.right,b.top,b.bottom,b.near,b.far);break;case "AmbientLight":e=new THREE.AmbientLight(b.color);break;case "DirectionalLight":e=new THREE.DirectionalLight(b.color,b.intensity);break;case "PointLight":e=new THREE.PointLight(b.color,b.intensity,b.distance,b.decay);break;case "SpotLight" [...]
+b.intensity,b.distance,b.angle,b.exponent,b.decay);break;case "HemisphereLight":e=new THREE.HemisphereLight(b.color,b.groundColor,b.intensity);break;case "Mesh":e=new THREE.Mesh(e(b.geometry),f(b.material));break;case "Line":e=new THREE.Line(e(b.geometry),f(b.material),b.mode);break;case "PointCloud":e=new THREE.PointCloud(e(b.geometry),f(b.material));break;case "Sprite":e=new THREE.Sprite(f(b.material));break;case "Group":e=new THREE.Group;break;default:e=new THREE.Object3D}e.uuid=b.uui [...]
+b.name&&(e.name=b.name);void 0!==b.matrix?(a.fromArray(b.matrix),a.decompose(e.position,e.quaternion,e.scale)):(void 0!==b.position&&e.position.fromArray(b.position),void 0!==b.rotation&&e.rotation.fromArray(b.rotation),void 0!==b.scale&&e.scale.fromArray(b.scale));void 0!==b.visible&&(e.visible=b.visible);void 0!==b.userData&&(e.userData=b.userData);if(void 0!==b.children)for(var g in b.children)e.add(this.parseObject(b.children[g],c,d));return e}}()};
+THREE.TextureLoader=function(a){this.manager=void 0!==a?a:THREE.DefaultLoadingManager};THREE.TextureLoader.prototype={constructor:THREE.TextureLoader,load:function(a,b,c,d){var e=new THREE.ImageLoader(this.manager);e.setCrossOrigin(this.crossOrigin);e.load(a,function(a){a=new THREE.Texture(a);a.needsUpdate=!0;void 0!==b&&b(a)},c,d)},setCrossOrigin:function(a){this.crossOrigin=a}};THREE.DataTextureLoader=THREE.BinaryTextureLoader=function(){this._parser=null};
+THREE.BinaryTextureLoader.prototype={constructor:THREE.BinaryTextureLoader,load:function(a,b,c,d){var e=this,f=new THREE.DataTexture,g=new THREE.XHRLoader;g.setResponseType("arraybuffer");g.load(a,function(a){if(a=e._parser(a))void 0!==a.image?f.image=a.image:void 0!==a.data&&(f.image.width=a.width,f.image.height=a.height,f.image.data=a.data),f.wrapS=void 0!==a.wrapS?a.wrapS:THREE.ClampToEdgeWrapping,f.wrapT=void 0!==a.wrapT?a.wrapT:THREE.ClampToEdgeWrapping,f.magFilter=void 0!==a.magFil [...]
+THREE.LinearFilter,f.minFilter=void 0!==a.minFilter?a.minFilter:THREE.LinearMipMapLinearFilter,f.anisotropy=void 0!==a.anisotropy?a.anisotropy:1,void 0!==a.format&&(f.format=a.format),void 0!==a.type&&(f.type=a.type),void 0!==a.mipmaps&&(f.mipmaps=a.mipmaps),1===a.mipmapCount&&(f.minFilter=THREE.LinearFilter),f.needsUpdate=!0,b&&b(f,a)},c,d);return f}};THREE.CompressedTextureLoader=function(){this._parser=null};
+THREE.CompressedTextureLoader.prototype={constructor:THREE.CompressedTextureLoader,load:function(a,b,c){var d=this,e=[],f=new THREE.CompressedTexture;f.image=e;var g=new THREE.XHRLoader;g.setResponseType("arraybuffer");if(a instanceof Array){var h=0;c=function(c){g.load(a[c],function(a){a=d._parser(a,!0);e[c]={width:a.width,height:a.height,format:a.format,mipmaps:a.mipmaps};h+=1;6===h&&(1==a.mipmapCount&&(f.minFilter=THREE.LinearFilter),f.format=a.format,f.needsUpdate=!0,b&&b(f))})};for( [...]
+a.length;k<l;++k)c(k)}else g.load(a,function(a){a=d._parser(a,!0);if(a.isCubemap)for(var c=a.mipmaps.length/a.mipmapCount,g=0;g<c;g++){e[g]={mipmaps:[]};for(var h=0;h<a.mipmapCount;h++)e[g].mipmaps.push(a.mipmaps[g*a.mipmapCount+h]),e[g].format=a.format,e[g].width=a.width,e[g].height=a.height}else f.image.width=a.width,f.image.height=a.height,f.mipmaps=a.mipmaps;1===a.mipmapCount&&(f.minFilter=THREE.LinearFilter);f.format=a.format;f.needsUpdate=!0;b&&b(f)});return f}};
+THREE.Material=function(){Object.defineProperty(this,"id",{value:THREE.MaterialIdCount++});this.uuid=THREE.Math.generateUUID();this.name="";this.type="Material";this.side=THREE.FrontSide;this.opacity=1;this.transparent=!1;this.blending=THREE.NormalBlending;this.blendSrc=THREE.SrcAlphaFactor;this.blendDst=THREE.OneMinusSrcAlphaFactor;this.blendEquation=THREE.AddEquation;this.blendEquationAlpha=this.blendDstAlpha=this.blendSrcAlpha=null;this.colorWrite=this.depthWrite=this.depthTest=!0;thi [...]
+!1;this.overdraw=this.alphaTest=this.polygonOffsetUnits=this.polygonOffsetFactor=0;this._needsUpdate=this.visible=!0};
+THREE.Material.prototype={constructor:THREE.Material,get needsUpdate(){return this._needsUpdate},set needsUpdate(a){!0===a&&this.update();this._needsUpdate=a},setValues:function(a){if(void 0!==a)for(var b in a){var c=a[b];if(void 0===c)THREE.warn("THREE.Material: '"+b+"' parameter is undefined.");else if(b in this){var d=this[b];d instanceof THREE.Color?d.set(c):d instanceof THREE.Vector3&&c instanceof THREE.Vector3?d.copy(c):this[b]="overdraw"==b?Number(c):c}}},toJSON:function(){var a={ [...]
+type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type};""!==this.name&&(a.name=this.name);this instanceof THREE.MeshBasicMaterial?(a.color=this.color.getHex(),this.vertexColors!==THREE.NoColors&&(a.vertexColors=this.vertexColors),this.blending!==THREE.NormalBlending&&(a.blending=this.blending),this.side!==THREE.FrontSide&&(a.side=this.side)):this instanceof THREE.MeshLambertMaterial?(a.color=this.color.getHex(),a.emissive=this.emissive.getHex(),this.vertexColors!==T [...]
+(a.vertexColors=this.vertexColors),this.shading!==THREE.SmoothShading&&(a.shading=this.shading),this.blending!==THREE.NormalBlending&&(a.blending=this.blending),this.side!==THREE.FrontSide&&(a.side=this.side)):this instanceof THREE.MeshPhongMaterial?(a.color=this.color.getHex(),a.emissive=this.emissive.getHex(),a.specular=this.specular.getHex(),a.shininess=this.shininess,this.vertexColors!==THREE.NoColors&&(a.vertexColors=this.vertexColors),this.shading!==THREE.SmoothShading&&(a.shading= [...]
+this.blending!==THREE.NormalBlending&&(a.blending=this.blending),this.side!==THREE.FrontSide&&(a.side=this.side)):this instanceof THREE.MeshNormalMaterial?(this.blending!==THREE.NormalBlending&&(a.blending=this.blending),this.side!==THREE.FrontSide&&(a.side=this.side)):this instanceof THREE.MeshDepthMaterial?(this.blending!==THREE.NormalBlending&&(a.blending=this.blending),this.side!==THREE.FrontSide&&(a.side=this.side)):this instanceof THREE.PointCloudMaterial?(a.size=this.size,a.sizeAt [...]
+this.sizeAttenuation,a.color=this.color.getHex(),this.vertexColors!==THREE.NoColors&&(a.vertexColors=this.vertexColors),this.blending!==THREE.NormalBlending&&(a.blending=this.blending)):this instanceof THREE.ShaderMaterial?(a.uniforms=this.uniforms,a.vertexShader=this.vertexShader,a.fragmentShader=this.fragmentShader):this instanceof THREE.SpriteMaterial&&(a.color=this.color.getHex());1>this.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==th [...]
+(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.blendSrcAlpha=this.blendSrcAlpha;a.blendDstAlpha=this.blendDstAlpha;a.blendEquationAlpha=this.blendEquationAlpha;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOff [...]
+this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},update:function(){this.dispatchEvent({type:"update"})},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0;
+THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype);THREE.LineBasicMaterial.prototype.constructor=THREE.LineBasicMaterial;
+THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a};
+THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype);THREE.LineDashedMaterial.prototype.constructor=THREE.LineDashedMaterial;
+THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a};
+THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1; [...]
+THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshBasicMaterial.prototype.constructor=THREE.MeshBasicMaterial;
+THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinec [...]
+a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a};
+THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this [...]
+"round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshLambertMaterial.prototype.constructor=THREE.MeshLambertMaterial;
+THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.sha [...]
+this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a};
+THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine=THREE.MultiplyOperation;th [...]
+1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshPhongMaterial.prototype.constructor=THREE.MeshPhongMaterial;
+THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale);a.specularMap=this [...]
+a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a};
+THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.constructor=THREE.MeshDepthMaterial;
+THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype);
+THREE.MeshNormalMaterial.prototype.constructor=THREE.MeshNormalMaterial;THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]};
+THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;b<c;b++)a.materials.push(this.materials[b].toJSON());return a},clone:function(){for(var a=new THREE.MeshFaceMaterial,b=0;b<this.materials.length;b++)a.materials.push(this.materials[b].clone());return a}};
+THREE.PointCloudMaterial=function(a){THREE.Material.call(this);this.type="PointCloudMaterial";this.color=new THREE.Color(16777215);this.map=null;this.size=1;this.sizeAttenuation=!0;this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.PointCloudMaterial.prototype=Object.create(THREE.Material.prototype);THREE.PointCloudMaterial.prototype.constructor=THREE.PointCloudMaterial;
+THREE.PointCloudMaterial.prototype.clone=function(){var a=new THREE.PointCloudMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.size=this.size;a.sizeAttenuation=this.sizeAttenuation;a.vertexColors=this.vertexColors;a.fog=this.fog;return a};THREE.ParticleBasicMaterial=function(a){THREE.warn("THREE.ParticleBasicMaterial has been renamed to THREE.PointCloudMaterial.");return new THREE.PointCloudMaterial(a)};
+THREE.ParticleSystemMaterial=function(a){THREE.warn("THREE.ParticleSystemMaterial has been renamed to THREE.PointCloudMaterial.");return new THREE.PointCloudMaterial(a)};
+THREE.ShaderMaterial=function(a){THREE.Material.call(this);this.type="ShaderMaterial";this.defines={};this.uniforms={};this.attributes=null;this.vertexShader="void main() {\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n}";this.fragmentShader="void main() {\n\tgl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );\n}";this.shading=THREE.SmoothShading;this.linewidth=1;this.wireframe=!1;this.wireframeLinewidth=1;this.lights=this.fog=!1;this.vertexColors=THREE.NoColors;th [...]
+this.morphTargets=this.skinning=!1;this.defaultAttributeValues={color:[1,1,1],uv:[0,0],uv2:[0,0]};this.index0AttributeName=void 0;this.setValues(a)};THREE.ShaderMaterial.prototype=Object.create(THREE.Material.prototype);THREE.ShaderMaterial.prototype.constructor=THREE.ShaderMaterial;
+THREE.ShaderMaterial.prototype.clone=function(){var a=new THREE.ShaderMaterial;THREE.Material.prototype.clone.call(this,a);a.fragmentShader=this.fragmentShader;a.vertexShader=this.vertexShader;a.uniforms=THREE.UniformsUtils.clone(this.uniforms);a.attributes=this.attributes;a.defines=this.defines;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.fog=this.fog;a.lights=this.lights;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morp [...]
+this.morphTargets;a.morphNormals=this.morphNormals;return a};THREE.RawShaderMaterial=function(a){THREE.ShaderMaterial.call(this,a);this.type="RawShaderMaterial"};THREE.RawShaderMaterial.prototype=Object.create(THREE.ShaderMaterial.prototype);THREE.RawShaderMaterial.prototype.constructor=THREE.RawShaderMaterial;THREE.RawShaderMaterial.prototype.clone=function(){var a=new THREE.RawShaderMaterial;THREE.ShaderMaterial.prototype.clone.call(this,a);return a};
+THREE.SpriteMaterial=function(a){THREE.Material.call(this);this.type="SpriteMaterial";this.color=new THREE.Color(16777215);this.map=null;this.rotation=0;this.fog=!1;this.setValues(a)};THREE.SpriteMaterial.prototype=Object.create(THREE.Material.prototype);THREE.SpriteMaterial.prototype.constructor=THREE.SpriteMaterial;
+THREE.SpriteMaterial.prototype.clone=function(){var a=new THREE.SpriteMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.rotation=this.rotation;a.fog=this.fog;return a};
+THREE.Texture=function(a,b,c,d,e,f,g,h,k){Object.defineProperty(this,"id",{value:THREE.TextureIdCount++});this.uuid=THREE.Math.generateUUID();this.sourceFile=this.name="";this.image=void 0!==a?a:THREE.Texture.DEFAULT_IMAGE;this.mipmaps=[];this.mapping=void 0!==b?b:THREE.Texture.DEFAULT_MAPPING;this.wrapS=void 0!==c?c:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==d?d:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==e?e:THREE.LinearFilter;this.minFilter=void 0!==f?f:THREE.LinearMipMapLine [...]
+this.anisotropy=void 0!==k?k:1;this.format=void 0!==g?g:THREE.RGBAFormat;this.type=void 0!==h?h:THREE.UnsignedByteType;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.generateMipmaps=!0;this.premultiplyAlpha=!1;this.flipY=!0;this.unpackAlignment=4;this._needsUpdate=!1;this.onUpdate=null};THREE.Texture.DEFAULT_IMAGE=void 0;THREE.Texture.DEFAULT_MAPPING=THREE.UVMapping;
+THREE.Texture.prototype={constructor:THREE.Texture,get needsUpdate(){return this._needsUpdate},set needsUpdate(a){!0===a&&this.update();this._needsUpdate=a},clone:function(a){void 0===a&&(a=new THREE.Texture);a.image=this.image;a.mipmaps=this.mipmaps.slice(0);a.mapping=this.mapping;a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.format=this.format;a.type=this.type;a.offset.copy(this.offset);a.repeat.copy(this.repe [...]
+this.generateMipmaps;a.premultiplyAlpha=this.premultiplyAlpha;a.flipY=this.flipY;a.unpackAlignment=this.unpackAlignment;return a},update:function(){this.dispatchEvent({type:"update"})},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Texture.prototype);THREE.TextureIdCount=0;THREE.CubeTexture=function(a,b,c,d,e,f,g,h,k){b=void 0!==b?b:THREE.CubeReflectionMapping;THREE.Texture.call(this,a,b,c,d,e,f,g,h,k);this.images=a};
+THREE.CubeTexture.prototype=Object.create(THREE.Texture.prototype);THREE.CubeTexture.prototype.constructor=THREE.CubeTexture;THREE.CubeTexture.clone=function(a){void 0===a&&(a=new THREE.CubeTexture);THREE.Texture.prototype.clone.call(this,a);a.images=this.images;return a};THREE.CompressedTexture=function(a,b,c,d,e,f,g,h,k,l,p){THREE.Texture.call(this,null,f,g,h,k,l,d,e,p);this.image={width:b,height:c};this.mipmaps=a;this.generateMipmaps=this.flipY=!1};THREE.CompressedTexture.prototype=Ob [...]
+THREE.CompressedTexture.prototype.constructor=THREE.CompressedTexture;THREE.CompressedTexture.prototype.clone=function(){var a=new THREE.CompressedTexture;THREE.Texture.prototype.clone.call(this,a);return a};THREE.DataTexture=function(a,b,c,d,e,f,g,h,k,l,p){THREE.Texture.call(this,null,f,g,h,k,l,d,e,p);this.image={data:a,width:b,height:c}};THREE.DataTexture.prototype=Object.create(THREE.Texture.prototype);THREE.DataTexture.prototype.constructor=THREE.DataTexture;
+THREE.DataTexture.prototype.clone=function(){var a=new THREE.DataTexture;THREE.Texture.prototype.clone.call(this,a);return a};THREE.VideoTexture=function(a,b,c,d,e,f,g,h,k){THREE.Texture.call(this,a,b,c,d,e,f,g,h,k);this.generateMipmaps=!1;var l=this,p=function(){requestAnimationFrame(p);a.readyState===a.HAVE_ENOUGH_DATA&&(l.needsUpdate=!0)};p()};THREE.VideoTexture.prototype=Object.create(THREE.Texture.prototype);THREE.VideoTexture.prototype.constructor=THREE.VideoTexture;
+THREE.Group=function(){THREE.Object3D.call(this);this.type="Group"};THREE.Group.prototype=Object.create(THREE.Object3D.prototype);THREE.Group.prototype.constructor=THREE.Group;THREE.PointCloud=function(a,b){THREE.Object3D.call(this);this.type="PointCloud";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.PointCloudMaterial({color:16777215*Math.random()})};THREE.PointCloud.prototype=Object.create(THREE.Object3D.prototype);THREE.PointCloud.prototype.constru [...]
+THREE.PointCloud.prototype.raycast=function(){var a=new THREE.Matrix4,b=new THREE.Ray;return function(c,d){var e=this,f=e.geometry,g=c.params.PointCloud.threshold;a.getInverse(this.matrixWorld);b.copy(c.ray).applyMatrix4(a);if(null===f.boundingBox||!1!==b.isIntersectionBox(f.boundingBox)){var h=g/((this.scale.x+this.scale.y+this.scale.z)/3),k=new THREE.Vector3,g=function(a,f){var g=b.distanceToPoint(a);if(g<h){var k=b.closestPointToPoint(a);k.applyMatrix4(e.matrixWorld);var n=c.ray.origi [...]
+d.push({distance:n,distanceToRay:g,point:k.clone(),index:f,face:null,object:e})}};if(f instanceof THREE.BufferGeometry){var l=f.attributes,p=l.position.array;if(void 0!==l.index){var l=l.index.array,q=f.offsets;0===q.length&&(q=[{start:0,count:l.length,index:0}]);for(var n=0,t=q.length;n<t;++n)for(var r=q[n].start,s=q[n].index,f=r,r=r+q[n].count;f<r;f++){var u=s+l[f];k.fromArray(p,3*u);g(k,u)}}else for(l=p.length/3,f=0;f<l;f++)k.set(p[3*f],p[3*f+1],p[3*f+2]),g(k,f)}else for(k=this.geomet [...]
+f=0;f<k.length;f++)g(k[f],f)}}}();THREE.PointCloud.prototype.clone=function(a){void 0===a&&(a=new THREE.PointCloud(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.ParticleSystem=function(a,b){THREE.warn("THREE.ParticleSystem has been renamed to THREE.PointCloud.");return new THREE.PointCloud(a,b)};
+THREE.Line=function(a,b,c){THREE.Object3D.call(this);this.type="Line";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.LineBasicMaterial({color:16777215*Math.random()});this.mode=void 0!==c?c:THREE.LineStrip};THREE.LineStrip=0;THREE.LinePieces=1;THREE.Line.prototype=Object.create(THREE.Object3D.prototype);THREE.Line.prototype.constructor=THREE.Line;
+THREE.Line.prototype.raycast=function(){var a=new THREE.Matrix4,b=new THREE.Ray,c=new THREE.Sphere;return function(d,e){var f=d.linePrecision,f=f*f,g=this.geometry;null===g.boundingSphere&&g.computeBoundingSphere();c.copy(g.boundingSphere);c.applyMatrix4(this.matrixWorld);if(!1!==d.ray.isIntersectionSphere(c)){a.getInverse(this.matrixWorld);b.copy(d.ray).applyMatrix4(a);var h=new THREE.Vector3,k=new THREE.Vector3,l=new THREE.Vector3,p=new THREE.Vector3,q=this.mode===THREE.LineStrip?1:2;i [...]
+THREE.BufferGeometry){var n=g.attributes;if(void 0!==n.index){var t=n.index.array,n=n.position.array,r=g.offsets;0===r.length&&(r=[{start:0,count:t.length,index:0}]);for(var s=0;s<r.length;s++)for(var u=r[s].start,v=r[s].count,x=r[s].index,g=u;g<u+v-1;g+=q){var D=x+t[g+1];h.fromArray(n,3*(x+t[g]));k.fromArray(n,3*D);D=b.distanceSqToSegment(h,k,p,l);D>f||(D=b.origin.distanceTo(p),D<d.near||D>d.far||e.push({distance:D,point:l.clone().applyMatrix4(this.matrixWorld),index:g,offsetIndex:s,fac [...]
+object:this}))}}else for(n=n.position.array,g=0;g<n.length/3-1;g+=q)h.fromArray(n,3*g),k.fromArray(n,3*g+3),D=b.distanceSqToSegment(h,k,p,l),D>f||(D=b.origin.distanceTo(p),D<d.near||D>d.far||e.push({distance:D,point:l.clone().applyMatrix4(this.matrixWorld),index:g,face:null,faceIndex:null,object:this}))}else if(g instanceof THREE.Geometry)for(h=g.vertices,k=h.length,g=0;g<k-1;g+=q)D=b.distanceSqToSegment(h[g],h[g+1],p,l),D>f||(D=b.origin.distanceTo(p),D<d.near||D>d.far||e.push({distance: [...]
+index:g,face:null,faceIndex:null,object:this}))}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3 [...]
+THREE.Mesh.prototype.constructor=THREE.Mesh;THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0<this.geometry.morphTargets.length){this.morphTargetBase=-1;this.morphTargetForcedOrder=[];this.morphTargetInfluences=[];this.morphTargetDictionary={};for(var a=0,b=this.geometry.morphTargets.length;a<b;a++)this.morphTargetInfluences.push(0),this.morphTargetDictionary[this.geometry.morphTargets[a].name]=a}};
+THREE.Mesh.prototype.getMorphTargetIndexByName=function(a){if(void 0!==this.morphTargetDictionary[a])return this.morphTargetDictionary[a];THREE.warn("THREE.Mesh.getMorphTargetIndexByName: morph target "+a+" does not exist. Returning 0.");return 0};
+THREE.Mesh.prototype.raycast=function(){var a=new THREE.Matrix4,b=new THREE.Ray,c=new THREE.Sphere,d=new THREE.Vector3,e=new THREE.Vector3,f=new THREE.Vector3;return function(g,h){var k=this.geometry;null===k.boundingSphere&&k.computeBoundingSphere();c.copy(k.boundingSphere);c.applyMatrix4(this.matrixWorld);if(!1!==g.ray.isIntersectionSphere(c)&&(a.getInverse(this.matrixWorld),b.copy(g.ray).applyMatrix4(a),null===k.boundingBox||!1!==b.isIntersectionBox(k.boundingBox)))if(k instanceof THR [...]
+this.material;if(void 0!==l){var p=k.attributes,q,n,t=g.precision;if(void 0!==p.index){var r=p.index.array,s=p.position.array,u=k.offsets;0===u.length&&(u=[{start:0,count:r.length,index:0}]);for(var v=0,x=u.length;v<x;++v)for(var p=u[v].start,D=u[v].index,k=p,w=p+u[v].count;k<w;k+=3){p=D+r[k];q=D+r[k+1];n=D+r[k+2];d.fromArray(s,3*p);e.fromArray(s,3*q);f.fromArray(s,3*n);var y=l.side===THREE.BackSide?b.intersectTriangle(f,e,d,!0):b.intersectTriangle(d,e,f,l.side!==THREE.DoubleSide);if(nul [...]
+var A=g.ray.origin.distanceTo(y);A<t||A<g.near||A>g.far||h.push({distance:A,point:y,face:new THREE.Face3(p,q,n,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,r=k=0,w=s.length;k<w;k+=3,r+=9)p=k,q=k+1,n=k+2,d.fromArray(s,r),e.fromArray(s,r+3),f.fromArray(s,r+6),y=l.side===THREE.BackSide?b.intersectTriangle(f,e,d,!0):b.intersectTriangle(d,e,f,l.side!==THREE.DoubleSide),null!==y&&(y.applyMatrix4(this.matrixWorld),A=g.ray.origin.distanceTo(y),A<t||A<g [...]
+g.far||h.push({distance:A,point:y,face:new THREE.Face3(p,q,n,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(r=this.material instanceof THREE.MeshFaceMaterial,s=!0===r?this.material.materials:null,t=g.precision,u=k.vertices,v=0,x=k.faces.length;v<x;v++)if(D=k.faces[v],l=!0===r?s[D.materialIndex]:this.material,void 0!==l){p=u[D.a];q=u[D.b];n=u[D.c];if(!0===l.morphTargets){y=k.morphTargets;A=this.morphTargetInfluences;d.set(0,0,0);e.set( [...]
+0,0);for(var w=0,E=y.length;w<E;w++){var G=A[w];if(0!==G){var F=y[w].vertices;d.x+=(F[D.a].x-p.x)*G;d.y+=(F[D.a].y-p.y)*G;d.z+=(F[D.a].z-p.z)*G;e.x+=(F[D.b].x-q.x)*G;e.y+=(F[D.b].y-q.y)*G;e.z+=(F[D.b].z-q.z)*G;f.x+=(F[D.c].x-n.x)*G;f.y+=(F[D.c].y-n.y)*G;f.z+=(F[D.c].z-n.z)*G}}d.add(p);e.add(q);f.add(n);p=d;q=e;n=f}y=l.side===THREE.BackSide?b.intersectTriangle(n,q,p,!0):b.intersectTriangle(p,q,n,l.side!==THREE.DoubleSide);null!==y&&(y.applyMatrix4(this.matrixWorld),A=g.ray.origin.distance [...]
+A<g.near||A>g.far||h.push({distance:A,point:y,face:D,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.type="Bone";this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype);THREE.Bone.prototype.constructor=THREE.Bone;
+THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256<this.bones.length?64:64<this.bones.length?32:16<this.bones.length?16:8,this.boneMatrices=new Float32Array(this.boneTextureWidth*this.boneTextureHeight*4),this.boneTexture=new THREE.DataTexture(this.boneMatrices,this.boneTextureWidth,this.boneTextureHeight,THREE.RGBAFormat,THREE.Fl [...]
+this.boneTexture.minFilter=THREE.NearestFilter,this.boneTexture.magFilter=THREE.NearestFilter,this.boneTexture.generateMipmaps=!1,this.boneTexture.flipY=!1):this.boneMatrices=new Float32Array(16*this.bones.length);if(void 0===b)this.calculateInverses();else if(this.bones.length===b.length)this.boneInverses=b.slice(0);else for(THREE.warn("THREE.Skeleton bonInverses is the wrong length."),this.boneInverses=[],b=0,a=this.bones.length;b<a;b++)this.boneInverses.push(new THREE.Matrix4)};
+THREE.Skeleton.prototype.calculateInverses=function(){this.boneInverses=[];for(var a=0,b=this.bones.length;a<b;a++){var c=new THREE.Matrix4;this.bones[a]&&c.getInverse(this.bones[a].matrixWorld);this.boneInverses.push(c)}};
+THREE.Skeleton.prototype.pose=function(){for(var a,b=0,c=this.bones.length;b<c;b++)(a=this.bones[b])&&a.matrixWorld.getInverse(this.boneInverses[b]);b=0;for(c=this.bones.length;b<c;b++)if(a=this.bones[b])a.parent?(a.matrix.getInverse(a.parent.matrixWorld),a.matrix.multiply(a.matrixWorld)):a.matrix.copy(a.matrixWorld),a.matrix.decompose(a.position,a.quaternion,a.scale)};
+THREE.Skeleton.prototype.update=function(){var a=new THREE.Matrix4;return function(){for(var b=0,c=this.bones.length;b<c;b++)a.multiplyMatrices(this.bones[b]?this.bones[b].matrixWorld:this.identityMatrix,this.boneInverses[b]),a.flattenToArrayOffset(this.boneMatrices,16*b);this.useVertexTexture&&(this.boneTexture.needsUpdate=!0)}}();
+THREE.SkinnedMesh=function(a,b,c){THREE.Mesh.call(this,a,b);this.type="SkinnedMesh";this.bindMode="attached";this.bindMatrix=new THREE.Matrix4;this.bindMatrixInverse=new THREE.Matrix4;a=[];if(this.geometry&&void 0!==this.geometry.bones){for(var d,e,f,g,h=0,k=this.geometry.bones.length;h<k;++h)d=this.geometry.bones[h],e=d.pos,f=d.rotq,g=d.scl,b=new THREE.Bone(this),a.push(b),b.name=d.name,b.position.set(e[0],e[1],e[2]),b.quaternion.set(f[0],f[1],f[2],f[3]),void 0!==g?b.scale.set(g[0],g[1] [...]
+1,1);h=0;for(k=this.geometry.bones.length;h<k;++h)d=this.geometry.bones[h],-1!==d.parent?a[d.parent].add(a[h]):this.add(a[h])}this.normalizeSkinWeights();this.updateMatrixWorld(!0);this.bind(new THREE.Skeleton(a,void 0,c))};THREE.SkinnedMesh.prototype=Object.create(THREE.Mesh.prototype);THREE.SkinnedMesh.prototype.constructor=THREE.SkinnedMesh;THREE.SkinnedMesh.prototype.bind=function(a,b){this.skeleton=a;void 0===b&&(this.updateMatrixWorld(!0),b=this.matrixWorld);this.bindMatrix.copy(b) [...]
+THREE.SkinnedMesh.prototype.pose=function(){this.skeleton.pose()};THREE.SkinnedMesh.prototype.normalizeSkinWeights=function(){if(this.geometry instanceof THREE.Geometry)for(var a=0;a<this.geometry.skinIndices.length;a++){var b=this.geometry.skinWeights[a],c=1/b.lengthManhattan();Infinity!==c?b.multiplyScalar(c):b.set(1)}};
+THREE.SkinnedMesh.prototype.updateMatrixWorld=function(a){THREE.Mesh.prototype.updateMatrixWorld.call(this,!0);"attached"===this.bindMode?this.bindMatrixInverse.getInverse(this.matrixWorld):"detached"===this.bindMode?this.bindMatrixInverse.getInverse(this.bindMatrix):THREE.warn("THREE.SkinnedMesh unreckognized bindMode: "+this.bindMode)};
+THREE.SkinnedMesh.prototype.clone=function(a){void 0===a&&(a=new THREE.SkinnedMesh(this.geometry,this.material,this.useVertexTexture));THREE.Mesh.prototype.clone.call(this,a);return a};THREE.MorphAnimMesh=function(a,b){THREE.Mesh.call(this,a,b);this.type="MorphAnimMesh";this.duration=1E3;this.mirroredLoop=!1;this.currentKeyframe=this.lastKeyframe=this.time=0;this.direction=1;this.directionBackwards=!1;this.setFrameRange(0,this.geometry.morphTargets.length-1)};THREE.MorphAnimMesh.prototyp [...]
+THREE.MorphAnimMesh.prototype.constructor=THREE.MorphAnimMesh;THREE.MorphAnimMesh.prototype.setFrameRange=function(a,b){this.startKeyframe=a;this.endKeyframe=b;this.length=this.endKeyframe-this.startKeyframe+1};THREE.MorphAnimMesh.prototype.setDirectionForward=function(){this.direction=1;this.directionBackwards=!1};THREE.MorphAnimMesh.prototype.setDirectionBackward=function(){this.direction=-1;this.directionBackwards=!0};
+THREE.MorphAnimMesh.prototype.parseAnimations=function(){var a=this.geometry;a.animations||(a.animations={});for(var b,c=a.animations,d=/([a-z]+)_?(\d+)/,e=0,f=a.morphTargets.length;e<f;e++){var g=a.morphTargets[e].name.match(d);if(g&&1<g.length){g=g[1];c[g]||(c[g]={start:Infinity,end:-Infinity});var h=c[g];e<h.start&&(h.start=e);e>h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b};
+THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):THREE.warn("THREE.MorphAnimMesh: animation["+a+"] undefined in .playAnimation()")};
+THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.cur [...]
+(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b};
+THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e<f;e++)d[e]=0;-1<a&&(d[a]=1-c);-1<b&&(d[b]=c)};
+THREE.MorphAnimMesh.prototype.clone=function(a){void 0===a&&(a=new THREE.MorphAnimMesh(this.geometry,this.material));a.duration=this.duration;a.mirroredLoop=this.mirroredLoop;a.time=this.time;a.lastKeyframe=this.lastKeyframe;a.currentKeyframe=this.currentKeyframe;a.direction=this.direction;a.directionBackwards=this.directionBackwards;THREE.Mesh.prototype.clone.call(this,a);return a};THREE.LOD=function(){THREE.Object3D.call(this);this.objects=[]};THREE.LOD.prototype=Object.create(THREE.Ob [...]
+THREE.LOD.prototype.constructor=THREE.LOD;THREE.LOD.prototype.addLevel=function(a,b){void 0===b&&(b=0);b=Math.abs(b);for(var c=0;c<this.objects.length&&!(b<this.objects[c].distance);c++);this.objects.splice(c,0,{distance:b,object:a});this.add(a)};THREE.LOD.prototype.getObjectForDistance=function(a){for(var b=1,c=this.objects.length;b<c&&!(a<this.objects[b].distance);b++);return this.objects[b-1].object};
+THREE.LOD.prototype.raycast=function(){var a=new THREE.Vector3;return function(b,c){a.setFromMatrixPosition(this.matrixWorld);var d=b.ray.origin.distanceTo(a);this.getObjectForDistance(d).raycast(b,c)}}();
+THREE.LOD.prototype.update=function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c){if(1<this.objects.length){a.setFromMatrixPosition(c.matrixWorld);b.setFromMatrixPosition(this.matrixWorld);c=a.distanceTo(b);this.objects[0].object.visible=!0;for(var d=1,e=this.objects.length;d<e;d++)if(c>=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;d<e;d++)this.objects[d].object.visible=!1}}}();
+THREE.LOD.prototype.clone=function(a){void 0===a&&(a=new THREE.LOD);THREE.Object3D.prototype.clone.call(this,a);for(var b=0,c=this.objects.length;b<c;b++){var d=this.objects[b].object.clone();d.visible=0===b;a.addLevel(d,this.objects[b].distance)}return a};
+THREE.Sprite=function(){var a=new Uint16Array([0,1,2,0,2,3]),b=new Float32Array([-.5,-.5,0,.5,-.5,0,.5,.5,0,-.5,.5,0]),c=new Float32Array([0,0,1,0,1,1,0,1]),d=new THREE.BufferGeometry;d.addAttribute("index",new THREE.BufferAttribute(a,1));d.addAttribute("position",new THREE.BufferAttribute(b,3));d.addAttribute("uv",new THREE.BufferAttribute(c,2));return function(a){THREE.Object3D.call(this);this.type="Sprite";this.geometry=d;this.material=void 0!==a?a:new THREE.SpriteMaterial}}();
+THREE.Sprite.prototype=Object.create(THREE.Object3D.prototype);THREE.Sprite.prototype.constructor=THREE.Sprite;THREE.Sprite.prototype.raycast=function(){var a=new THREE.Vector3;return function(b,c){a.setFromMatrixPosition(this.matrixWorld);var d=b.ray.distanceToPoint(a);d>this.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};
+THREE.Particle=THREE.Sprite;THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype);THREE.LensFlare.prototype.constructor=THREE.LensFlare;
+THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})};
+THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a<b;a++)c=this.lensFlares[a],c.x=this.positionScreen.x+d*c.distance,c.y=this.positionScreen.y+e*c.distance,c.wantedRotation=c.x*Math.PI*.25,c.rotation+=.25*(c.wantedRotation-c.rotation)};THREE.Scene=function(){THREE.Object3D.call(this);this.type="Scene";this.overrideMaterial=this.fog=null;this.autoUpdate=!0};THREE.Scene.prototype=Object.crea [...]
+THREE.Scene.prototype.constructor=THREE.Scene;THREE.Scene.prototype.clone=function(a){void 0===a&&(a=new THREE.Scene);THREE.Object3D.prototype.clone.call(this,a);null!==this.fog&&(a.fog=this.fog.clone());null!==this.overrideMaterial&&(a.overrideMaterial=this.overrideMaterial.clone());a.autoUpdate=this.autoUpdate;a.matrixAutoUpdate=this.matrixAutoUpdate;return a};THREE.Fog=function(a,b,c){this.name="";this.color=new THREE.Color(a);this.near=void 0!==b?b:1;this.far=void 0!==c?c:1E3};
+THREE.Fog.prototype.clone=function(){return new THREE.Fog(this.color.getHex(),this.near,this.far)};THREE.FogExp2=function(a,b){this.name="";this.color=new THREE.Color(a);this.density=void 0!==b?b:2.5E-4};THREE.FogExp2.prototype.clone=function(){return new THREE.FogExp2(this.color.getHex(),this.density)};THREE.ShaderChunk={};THREE.ShaderChunk.common="#define PI 3.14159\n#define PI2 6.28318\n#define RECIPROCAL_PI2 0.15915494\n#define LOG2 1.442695\n#define EPSILON 1e-6\n\nfloat square( in  [...]
+THREE.ShaderChunk.alphatest_fragment="#ifdef ALPHATEST\n\n\tif ( diffuseColor.a < ALPHATEST ) discard;\n\n#endif\n";THREE.ShaderChunk.lights_lambert_vertex="vLightFront = vec3( 0.0 );\n\n#ifdef DOUBLE_SIDED\n\n\tvLightBack = vec3( 0.0 );\n\n#endif\n\ntransformedNormal = normalize( transformedNormal );\n\n#if MAX_DIR_LIGHTS > 0\n\nfor( int i = 0; i < MAX_DIR_LIGHTS; i ++ ) {\n\n\tvec3 dirVector = transformDirection( directionalLightDirection[ i ], viewMatrix );\n\n\tfloat dotProduct = dot [...]
+THREE.ShaderChunk.map_particle_pars_fragment="#ifdef USE_MAP\n\n\tuniform vec4 offsetRepeat;\n\tuniform sampler2D map;\n\n#endif\n";THREE.ShaderChunk.default_vertex="#ifdef USE_SKINNING\n\n\tvec4 mvPosition = modelViewMatrix * skinned;\n\n#elif defined( USE_MORPHTARGETS )\n\n\tvec4 mvPosition = modelViewMatrix * vec4( morphed, 1.0 );\n\n#else\n\n\tvec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );\n\n#endif\n\ngl_Position = projectionMatrix * mvPosition;\n";
+THREE.ShaderChunk.map_pars_fragment="#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP )\n\n\tvarying vec2 vUv;\n\n#endif\n\n#ifdef USE_MAP\n\n\tuniform sampler2D map;\n\n#endif";THREE.ShaderChunk.skinnormal_vertex="#ifdef USE_SKINNING\n\n\tmat4 skinMatrix = mat4( 0.0 );\n\tskinMatrix += skinWeight.x * boneMatX;\n\tskinMatrix += skinWeight.y * boneMatY;\n\tskinMatrix += skinWeight.z * boneMatZ;\n\tskinMatri [...]
+THREE.ShaderChunk.logdepthbuf_pars_vertex="#ifdef USE_LOGDEPTHBUF\n\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\n\t\tvarying float vFragDepth;\n\n\t#endif\n\n\tuniform float logDepthBufFC;\n\n#endif";THREE.ShaderChunk.lightmap_pars_vertex="#ifdef USE_LIGHTMAP\n\n\tvarying vec2 vUv2;\n\n#endif";THREE.ShaderChunk.lights_phong_fragment="#ifndef FLAT_SHADED\n\n\tvec3 normal = normalize( vNormal );\n\n\t#ifdef DOUBLE_SIDED\n\n\t\tnormal = normal * ( -1.0 + 2.0 * float( gl_FrontFacing ) );\n\n\t#endif\n\n [...]
+THREE.ShaderChunk.fog_pars_fragment="#ifdef USE_FOG\n\n\tuniform vec3 fogColor;\n\n\t#ifdef FOG_EXP2\n\n\t\tuniform float fogDensity;\n\n\t#else\n\n\t\tuniform float fogNear;\n\t\tuniform float fogFar;\n\t#endif\n\n#endif";THREE.ShaderChunk.morphnormal_vertex="#ifdef USE_MORPHNORMALS\n\n\tvec3 morphedNormal = vec3( 0.0 );\n\n\tmorphedNormal += ( morphNormal0 - normal ) * morphTargetInfluences[ 0 ];\n\tmorphedNormal += ( morphNormal1 - normal ) * morphTargetInfluences[ 1 ];\n\tmorphedNorm [...]
+THREE.ShaderChunk.envmap_pars_fragment="#ifdef USE_ENVMAP\n\n\tuniform float reflectivity;\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tuniform samplerCube envMap;\n\t#else\n\t\tuniform sampler2D envMap;\n\t#endif\n\tuniform float flipEnvMap;\n\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )\n\n\t\tuniform float refractionRatio;\n\n\t#else\n\n\t\tvarying vec3 vReflect;\n\n\t#endif\n\n#endif\n";THREE.ShaderChunk.logdepthbuf_fragment="#if defined(USE_LOGDEPTHBUF) && define [...]
+THREE.ShaderChunk.normalmap_pars_fragment="#ifdef USE_NORMALMAP\n\n\tuniform sampler2D normalMap;\n\tuniform vec2 normalScale;\n\n\t// Per-Pixel Tangent Space Normal Mapping\n\t// http://hacksoflife.blogspot.ch/2009/11/per-pixel-tangent-space-normal-mapping.html\n\n\tvec3 perturbNormal2Arb( vec3 eye_pos, vec3 surf_norm ) {\n\n\t\tvec3 q0 = dFdx( eye_pos.xyz );\n\t\tvec3 q1 = dFdy( eye_pos.xyz );\n\t\tvec2 st0 = dFdx( vUv.st );\n\t\tvec2 st1 = dFdy( vUv.st );\n\n\t\tvec3 S = normalize( q0 [...]
+THREE.ShaderChunk.lights_phong_pars_vertex="#if MAX_SPOT_LIGHTS > 0 || defined( USE_BUMPMAP ) || defined( USE_ENVMAP )\n\n\tvarying vec3 vWorldPosition;\n\n#endif\n";THREE.ShaderChunk.lightmap_pars_fragment="#ifdef USE_LIGHTMAP\n\n\tvarying vec2 vUv2;\n\tuniform sampler2D lightMap;\n\n#endif";THREE.ShaderChunk.shadowmap_vertex="#ifdef USE_SHADOWMAP\n\n\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\n\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\n\t}\n\n#endif";
+THREE.ShaderChunk.lights_phong_vertex="#if MAX_SPOT_LIGHTS > 0 || defined( USE_BUMPMAP ) || defined( USE_ENVMAP )\n\n\tvWorldPosition = worldPosition.xyz;\n\n#endif";THREE.ShaderChunk.map_fragment="#ifdef USE_MAP\n\n\tvec4 texelColor = texture2D( map, vUv );\n\n\ttexelColor.xyz = inputToLinear( texelColor.xyz );\n\n\tdiffuseColor *= texelColor;\n\n#endif";THREE.ShaderChunk.lightmap_vertex="#ifdef USE_LIGHTMAP\n\n\tvUv2 = uv2;\n\n#endif";THREE.ShaderChunk.map_particle_fragment="#ifdef USE [...]
+THREE.ShaderChunk.color_pars_fragment="#ifdef USE_COLOR\n\n\tvarying vec3 vColor;\n\n#endif\n";THREE.ShaderChunk.color_vertex="#ifdef USE_COLOR\n\n\tvColor.xyz = inputToLinear( color.xyz );\n\n#endif";THREE.ShaderChunk.skinning_vertex="#ifdef USE_SKINNING\n\n\t#ifdef USE_MORPHTARGETS\n\n\tvec4 skinVertex = bindMatrix * vec4( morphed, 1.0 );\n\n\t#else\n\n\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\n\t#endif\n\n\tvec4 skinned = vec4( 0.0 );\n\tskinned += boneMatX * skinVerte [...]
+THREE.ShaderChunk.envmap_pars_vertex="#if defined( USE_ENVMAP ) && ! defined( USE_BUMPMAP ) && ! defined( USE_NORMALMAP ) && ! defined( PHONG )\n\n\tvarying vec3 vReflect;\n\n\tuniform float refractionRatio;\n\n#endif\n";THREE.ShaderChunk.linear_to_gamma_fragment="\n\toutgoingLight = linearToOutput( outgoingLight );\n";THREE.ShaderChunk.color_pars_vertex="#ifdef USE_COLOR\n\n\tvarying vec3 vColor;\n\n#endif";THREE.ShaderChunk.lights_lambert_pars_vertex="uniform vec3 ambientLightColor;\n\ [...]
+THREE.ShaderChunk.map_pars_vertex="#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP )\n\n\tvarying vec2 vUv;\n\tuniform vec4 offsetRepeat;\n\n#endif\n";THREE.ShaderChunk.envmap_fragment="#ifdef USE_ENVMAP\n\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )\n\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\n\t\t// Transforming Normal Vectors with the In [...]
+THREE.ShaderChunk.specularmap_pars_fragment="#ifdef USE_SPECULARMAP\n\n\tuniform sampler2D specularMap;\n\n#endif";THREE.ShaderChunk.logdepthbuf_vertex="#ifdef USE_LOGDEPTHBUF\n\n\tgl_Position.z = log2(max( EPSILON, gl_Position.w + 1.0 )) * logDepthBufFC;\n\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\n\t\tvFragDepth = 1.0 + gl_Position.w;\n\n#else\n\n\t\tgl_Position.z = (gl_Position.z - 1.0) * gl_Position.w;\n\n\t#endif\n\n#endif";THREE.ShaderChunk.morphtarget_pars_vertex="#ifdef USE_MORPHTARGETS\n\ [...]
+THREE.ShaderChunk.specularmap_fragment="float specularStrength;\n\n#ifdef USE_SPECULARMAP\n\n\tvec4 texelSpecular = texture2D( specularMap, vUv );\n\tspecularStrength = texelSpecular.r;\n\n#else\n\n\tspecularStrength = 1.0;\n\n#endif";THREE.ShaderChunk.fog_fragment="#ifdef USE_FOG\n\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\n\t#else\n\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\n\t#endif\n\n\t#ifdef FOG_EXP2\n\n\t\tfloat fogFactor =  [...]
+THREE.ShaderChunk.bumpmap_pars_fragment="#ifdef USE_BUMPMAP\n\n\tuniform sampler2D bumpMap;\n\tuniform float bumpScale;\n\n\t// Derivative maps - bump mapping unparametrized surfaces by Morten Mikkelsen\n\t// http://mmikkelsen3d.blogspot.sk/2011/07/derivative-maps.html\n\n\t// Evaluate the derivative of the height w.r.t. screen-space using forward differencing (listing 2)\n\n\tvec2 dHdxy_fwd() {\n\n\t\tvec2 dSTdx = dFdx( vUv );\n\t\tvec2 dSTdy = dFdy( vUv );\n\n\t\tfloat Hll = bumpScale  [...]
+THREE.ShaderChunk.defaultnormal_vertex="#ifdef USE_SKINNING\n\n\tvec3 objectNormal = skinnedNormal.xyz;\n\n#elif defined( USE_MORPHNORMALS )\n\n\tvec3 objectNormal = morphedNormal;\n\n#else\n\n\tvec3 objectNormal = normal;\n\n#endif\n\n#ifdef FLIP_SIDED\n\n\tobjectNormal = -objectNormal;\n\n#endif\n\nvec3 transformedNormal = normalMatrix * objectNormal;\n";THREE.ShaderChunk.lights_phong_pars_fragment="uniform vec3 ambientLightColor;\n\n#if MAX_DIR_LIGHTS > 0\n\n\tuniform vec3 directional [...]
+THREE.ShaderChunk.skinbase_vertex="#ifdef USE_SKINNING\n\n\tmat4 boneMatX = getBoneMatrix( skinIndex.x );\n\tmat4 boneMatY = getBoneMatrix( skinIndex.y );\n\tmat4 boneMatZ = getBoneMatrix( skinIndex.z );\n\tmat4 boneMatW = getBoneMatrix( skinIndex.w );\n\n#endif";THREE.ShaderChunk.map_vertex="#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP )\n\n\tvUv = uv * offsetRepeat.zw + offsetRepeat.xy;\n\n#endif";
+THREE.ShaderChunk.lightmap_fragment="#ifdef USE_LIGHTMAP\n\n\toutgoingLight *= diffuseColor.xyz * texture2D( lightMap, vUv2 ).xyz;\n\n#endif";THREE.ShaderChunk.shadowmap_pars_vertex="#ifdef USE_SHADOWMAP\n\n\tvarying vec4 vShadowCoord[ MAX_SHADOWS ];\n\tuniform mat4 shadowMatrix[ MAX_SHADOWS ];\n\n#endif";THREE.ShaderChunk.color_fragment="#ifdef USE_COLOR\n\n\tdiffuseColor.rgb *= vColor;\n\n#endif";THREE.ShaderChunk.morphtarget_vertex="#ifdef USE_MORPHTARGETS\n\n\tvec3 morphed = vec3( 0. [...]
+THREE.ShaderChunk.envmap_vertex="#if defined( USE_ENVMAP ) && ! defined( USE_BUMPMAP ) && ! defined( USE_NORMALMAP ) && ! defined( PHONG )\n\n\tvec3 worldNormal = transformDirection( objectNormal, modelMatrix );\n\n\tvec3 cameraToVertex = normalize( worldPosition.xyz - cameraPosition );\n\n\t#ifdef ENVMAP_MODE_REFLECTION\n\n\t\tvReflect = reflect( cameraToVertex, worldNormal );\n\n\t#else\n\n\t\tvReflect = refract( cameraToVertex, worldNormal, refractionRatio );\n\n\t#endif\n\n#endif\n";
+THREE.ShaderChunk.shadowmap_fragment="#ifdef USE_SHADOWMAP\n\n\t#ifdef SHADOWMAP_DEBUG\n\n\t\tvec3 frustumColors[3];\n\t\tfrustumColors[0] = vec3( 1.0, 0.5, 0.0 );\n\t\tfrustumColors[1] = vec3( 0.0, 1.0, 0.8 );\n\t\tfrustumColors[2] = vec3( 0.0, 0.5, 1.0 );\n\n\t#endif\n\n\t#ifdef SHADOWMAP_CASCADE\n\n\t\tint inFrustumCount = 0;\n\n\t#endif\n\n\tfloat fDepth;\n\tvec3 shadowColor = vec3( 1.0 );\n\n\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\n\t\tvec3 shadowCoord = vShadowCoord[ i ].xyz  [...]
+THREE.ShaderChunk.worldpos_vertex="#if defined( USE_ENVMAP ) || defined( PHONG ) || defined( LAMBERT ) || defined ( USE_SHADOWMAP )\n\n\t#ifdef USE_SKINNING\n\n\t\tvec4 worldPosition = modelMatrix * skinned;\n\n\t#elif defined( USE_MORPHTARGETS )\n\n\t\tvec4 worldPosition = modelMatrix * vec4( morphed, 1.0 );\n\n\t#else\n\n\t\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\n\t#endif\n\n#endif\n";THREE.ShaderChunk.shadowmap_pars_fragment="#ifdef USE_SHADOWMAP\n\n\tuniform sam [...]
+THREE.ShaderChunk.skinning_pars_vertex="#ifdef USE_SKINNING\n\n\tuniform mat4 bindMatrix;\n\tuniform mat4 bindMatrixInverse;\n\n\t#ifdef BONE_TEXTURE\n\n\t\tuniform sampler2D boneTexture;\n\t\tuniform int boneTextureWidth;\n\t\tuniform int boneTextureHeight;\n\n\t\tmat4 getBoneMatrix( const in float i ) {\n\n\t\t\tfloat j = i * 4.0;\n\t\t\tfloat x = mod( j, float( boneTextureWidth ) );\n\t\t\tfloat y = floor( j / float( boneTextureWidth ) );\n\n\t\t\tfloat dx = 1.0 / float( boneTextureWi [...]
+THREE.ShaderChunk.logdepthbuf_pars_fragment="#ifdef USE_LOGDEPTHBUF\n\n\tuniform float logDepthBufFC;\n\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\n\t\t#extension GL_EXT_frag_depth : enable\n\t\tvarying float vFragDepth;\n\n\t#endif\n\n#endif";THREE.ShaderChunk.alphamap_fragment="#ifdef USE_ALPHAMAP\n\n\tdiffuseColor.a *= texture2D( alphaMap, vUv ).g;\n\n#endif\n";THREE.ShaderChunk.alphamap_pars_fragment="#ifdef USE_ALPHAMAP\n\n\tuniform sampler2D alphaMap;\n\n#endif\n";
+THREE.UniformsUtils={merge:function(a){for(var b={},c=0;c<a.length;c++){var d=this.clone(a[c]),e;for(e in d)b[e]=d[e]}return b},clone:function(a){var b={},c;for(c in a){b[c]={};for(var d in a[c]){var e=a[c][d];b[c][d]=e instanceof THREE.Color||e instanceof THREE.Vector2||e instanceof THREE.Vector3||e instanceof THREE.Vector4||e instanceof THREE.Matrix4||e instanceof THREE.Texture?e.clone():e instanceof Array?e.slice():e}}return b}};
+THREE.UniformsLib={common:{diffuse:{type:"c",value:new THREE.Color(15658734)},opacity:{type:"f",value:1},map:{type:"t",value:null},offsetRepeat:{type:"v4",value:new THREE.Vector4(0,0,1,1)},lightMap:{type:"t",value:null},specularMap:{type:"t",value:null},alphaMap:{type:"t",value:null},envMap:{type:"t",value:null},flipEnvMap:{type:"f",value:-1},reflectivity:{type:"f",value:1},refractionRatio:{type:"f",value:.98},morphTargetInfluences:{type:"f",value:0}},bump:{bumpMap:{type:"t",value:null}, [...]
+value:1}},normalmap:{normalMap:{type:"t",value:null},normalScale:{type:"v2",value:new THREE.Vector2(1,1)}},fog:{fogDensity:{type:"f",value:2.5E-4},fogNear:{type:"f",value:1},fogFar:{type:"f",value:2E3},fogColor:{type:"c",value:new THREE.Color(16777215)}},lights:{ambientLightColor:{type:"fv",value:[]},directionalLightDirection:{type:"fv",value:[]},directionalLightColor:{type:"fv",value:[]},hemisphereLightDirection:{type:"fv",value:[]},hemisphereLightSkyColor:{type:"fv",value:[]},hemispher [...]
+value:[]},pointLightColor:{type:"fv",value:[]},pointLightPosition:{type:"fv",value:[]},pointLightDistance:{type:"fv1",value:[]},pointLightDecay:{type:"fv1",value:[]},spotLightColor:{type:"fv",value:[]},spotLightPosition:{type:"fv",value:[]},spotLightDirection:{type:"fv",value:[]},spotLightDistance:{type:"fv1",value:[]},spotLightAngleCos:{type:"fv1",value:[]},spotLightExponent:{type:"fv1",value:[]},spotLightDecay:{type:"fv1",value:[]}},particle:{psColor:{type:"c",value:new THREE.Color(156 [...]
+value:1},size:{type:"f",value:1},scale:{type:"f",value:1},map:{type:"t",value:null},offsetRepeat:{type:"v4",value:new THREE.Vector4(0,0,1,1)},fogDensity:{type:"f",value:2.5E-4},fogNear:{type:"f",value:1},fogFar:{type:"f",value:2E3},fogColor:{type:"c",value:new THREE.Color(16777215)}},shadowmap:{shadowMap:{type:"tv",value:[]},shadowMapSize:{type:"v2v",value:[]},shadowBias:{type:"fv1",value:[]},shadowDarkness:{type:"fv1",value:[]},shadowMatrix:{type:"m4v",value:[]}}};
+THREE.ShaderLib={basic:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.common,THREE.UniformsLib.fog,THREE.UniformsLib.shadowmap]),vertexShader:[THREE.ShaderChunk.common,THREE.ShaderChunk.map_pars_vertex,THREE.ShaderChunk.lightmap_pars_vertex,THREE.ShaderChunk.envmap_pars_vertex,THREE.ShaderChunk.color_pars_vertex,THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void m [...]
+THREE.ShaderChunk.map_vertex,THREE.ShaderChunk.lightmap_vertex,THREE.ShaderChunk.color_vertex,THREE.ShaderChunk.skinbase_vertex,"\t#ifdef USE_ENVMAP",THREE.ShaderChunk.morphnormal_vertex,THREE.ShaderChunk.skinnormal_vertex,THREE.ShaderChunk.defaultnormal_vertex,"\t#endif",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,THREE.ShaderChunk.worldpos_vertex,THREE.ShaderChunk.envmap_vertex,THREE.Shader [...]
+"}"].join("\n"),fragmentShader:["uniform vec3 diffuse;\nuniform float opacity;",THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_fragment,THREE.ShaderChunk.map_pars_fragment,THREE.ShaderChunk.alphamap_pars_fragment,THREE.ShaderChunk.lightmap_pars_fragment,THREE.ShaderChunk.envmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.specularmap_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tvec3 out [...]
+THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.map_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.alphamap_fragment,THREE.ShaderChunk.alphatest_fragment,THREE.ShaderChunk.specularmap_fragment,"\toutgoingLight = diffuseColor.rgb;",THREE.ShaderChunk.lightmap_fragment,THREE.ShaderChunk.envmap_fragment,THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n [...]
+lambert:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.common,THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{emissive:{type:"c",value:new THREE.Color(0)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),vertexShader:["#define LAMBERT\nvarying vec3 vLightFront;\n#ifdef DOUBLE_SIDED\n\tvarying vec3 vLightBack;\n#endif",THREE.ShaderChunk.common,THREE.ShaderChunk.map_pars_vertex,THREE.ShaderChunk.lightmap_pars_vertex,THREE.ShaderChunk.envmap_pars_ver [...]
+THREE.ShaderChunk.color_pars_vertex,THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.map_vertex,THREE.ShaderChunk.lightmap_vertex,THREE.ShaderChunk.color_vertex,THREE.ShaderChunk.morphnormal_vertex,THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,THREE.ShaderChunk.defaultnormal_vertex,THREE.ShaderChunk.morphtarget_vertex, [...]
+THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,THREE.ShaderChunk.worldpos_vertex,THREE.ShaderChunk.envmap_vertex,THREE.ShaderChunk.lights_lambert_vertex,THREE.ShaderChunk.shadowmap_vertex,"}"].join("\n"),fragmentShader:["uniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\nvarying vec3 vLightFront;\n#ifdef DOUBLE_SIDED\n\tvarying vec3 vLightBack;\n#endif",THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_fragment,THREE.ShaderChunk.map_pars_fra [...]
+THREE.ShaderChunk.lightmap_pars_fragment,THREE.ShaderChunk.envmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.specularmap_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.map_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.alphamap_fragment,THREE [...]
+THREE.ShaderChunk.specularmap_fragment,"\t#ifdef DOUBLE_SIDED\n\t\tif ( gl_FrontFacing )\n\t\t\toutgoingLight += diffuseColor.rgb * vLightFront + emissive;\n\t\telse\n\t\t\toutgoingLight += diffuseColor.rgb * vLightBack + emissive;\n\t#else\n\t\toutgoingLight += diffuseColor.rgb * vLightFront + emissive;\n\t#endif",THREE.ShaderChunk.lightmap_fragment,THREE.ShaderChunk.envmap_fragment,THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fra [...]
+phong:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.common,THREE.UniformsLib.bump,THREE.UniformsLib.normalmap,THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{emissive:{type:"c",value:new THREE.Color(0)},specular:{type:"c",value:new THREE.Color(1118481)},shininess:{type:"f",value:30},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),vertexShader:["#define PHONG\nvarying vec3 vViewPosition;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif",THRE [...]
+THREE.ShaderChunk.map_pars_vertex,THREE.ShaderChunk.lightmap_pars_vertex,THREE.ShaderChunk.envmap_pars_vertex,THREE.ShaderChunk.lights_phong_pars_vertex,THREE.ShaderChunk.color_pars_vertex,THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.map_vertex,THREE.ShaderChunk.lightmap_vertex,THREE.ShaderChunk.color_vertex,THREE.ShaderChunk.morphnormal_ [...]
+THREE.ShaderChunk.skinnormal_vertex,THREE.ShaderChunk.defaultnormal_vertex,"#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n#endif",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"\tvViewPosition = -mvPosition.xyz;",THREE.ShaderChunk.worldpos_vertex,THREE.ShaderChunk.envmap_vertex,THREE.ShaderChunk.lights_phong_vertex,THREE.ShaderChunk.shadowmap_vertex,"}"].join("\n"),fragmentS [...]
+THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_fragment,THREE.ShaderChunk.map_pars_fragment,THREE.ShaderChunk.alphamap_pars_fragment,THREE.ShaderChunk.lightmap_pars_fragment,THREE.ShaderChunk.envmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.lights_phong_pars_fragment,THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.bumpmap_pars_fragment,THREE.ShaderChunk.normalmap_pars_fragment,THREE.ShaderChunk.specularmap_pars_fragment,THREE.ShaderChunk.logde [...]
+"void main() {\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.map_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.alphamap_fragment,THREE.ShaderChunk.alphatest_fragment,THREE.ShaderChunk.specularmap_fragment,THREE.ShaderChunk.lights_phong_fragment,THREE.ShaderChunk.lightmap_fragment,THREE.ShaderChunk.envmap_fragment,THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_ [...]
+THREE.ShaderChunk.fog_fragment,"\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n}"].join("\n")},particle_basic:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.particle,THREE.UniformsLib.shadowmap]),vertexShader:["uniform float size;\nuniform float scale;",THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.color_vertex,"\tvec4 mvPosition = modelView [...]
+THREE.ShaderChunk.logdepthbuf_vertex,THREE.ShaderChunk.worldpos_vertex,THREE.ShaderChunk.shadowmap_vertex,"}"].join("\n"),fragmentShader:["uniform vec3 psColor;\nuniform float opacity;",THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_fragment,THREE.ShaderChunk.map_particle_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = v [...]
+THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.map_particle_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.alphatest_fragment,"\toutgoingLight = diffuseColor.rgb;",THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.fog_fragment,"\tgl_FragColor = vec4( outgoingLight, diffuseColor.a );\n}"].join("\n")},dashed:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.common,THREE.UniformsLib.fog,{scale:{type:"f",value:1},dashSize:{type:"f",value:1},totalSize:{type:"f [...]
+vertexShader:["uniform float scale;\nattribute float lineDistance;\nvarying float vLineDistance;",THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.color_vertex,"\tvLineDistance = scale * lineDistance;\n\tvec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;",THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform vec3 diffuse;\ [...]
+THREE.ShaderChunk.common,THREE.ShaderChunk.color_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tif ( mod( vLineDistance, totalSize ) > dashSize ) {\n\t\tdiscard;\n\t}\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,"\toutgoingLight = diffuseColor.rgb;",THREE.ShaderChunk.fog_fragment,"\tgl_FragColor = vec4( outgoing [...]
+depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.common,THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.common,THREE.ShaderChunk.logdept [...]
+"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.common,THREE.ShaderChunk.morphtarget_pars_vertex,THREE.Sh [...]
+"void main() {\n\tvNormal = normalize( normalMatrix * normal );",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.common,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},cube:{uniforms:{tCube:{type:"t [...]
+tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.common,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvWorldPosition = transformDirection( position, modelMatrix );\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.common,THREE.ShaderChunk.lo [...]
+"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},equirect:{uniforms:{tEquirect:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.common,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvWorldPosition = transformDirection( position, modelMatrix );\n\tgl_Position = projectionMatrix * modelViewMatrix * ve [...]
+THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform sampler2D tEquirect;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.common,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\nvec3 direction = normalize( vWorldPosition );\nvec2 sampleUV;\nsampleUV.y = saturate( tFlip * direction.y * -0.5 + 0.5 );\nsampleUV.x = atan( direction.z, direction.x ) * RECIPROCAL_PI2 + 0.5;\ngl_FragColor = texture2D( tEquirect, sampleUV );",THREE.Shade [...]
+"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.common,THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.common,THREE.ShaderChunk.logdepthbuf_pars_fragmen [...]
+THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}};
+THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=m.createBuffer();e.buffer.belongsToAttribute=d;e [...]
+!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){return a.material instanceof THREE.MeshFaceMaterial?a.material.materials[b.materialIndex]:a.material}function d(a,b,c,d){c=c.attributes;var e=b.attributes;b=b.attributesKeys;for(var f=0,g=b.length;f<g;f++){var h=b[f],k=e[h];if(0<=k){var n=c[h];void 0!==n?(h=n.itemSize,m.bindBuffer(m.ARRAY_BUFFER,n.buffer),W.enableAttribute(k),m.vertexAttribPointer(k,h,m.FLOAT,!1,0,d*h*4)):void 0!==a.defaultAttributeValues&&(2===a.defaultAttribute [...]
+m.vertexAttrib2fv(k,a.defaultAttributeValues[h]):3===a.defaultAttributeValues[h].length&&m.vertexAttrib3fv(k,a.defaultAttributeValues[h]))}}W.disableUnusedAttributes()}function e(a,b){return a.object.renderOrder!==b.object.renderOrder?a.object.renderOrder-b.object.renderOrder:a.material.id!==b.material.id?a.material.id-b.material.id:a.z!==b.z?a.z-b.z:a.id-b.id}function f(a,b){return a.object.renderOrder!==b.object.renderOrder?a.object.renderOrder-b.object.renderOrder:a.z!==b.z?b.z-a.z:a. [...]
+b){return b[0]-a[0]}function h(a){if(!1!==a.visible){if(!(a instanceof THREE.Scene||a instanceof THREE.Group)){void 0===a.__webglInit&&(a.__webglInit=!0,a._modelViewMatrix=new THREE.Matrix4,a._normalMatrix=new THREE.Matrix3,a.addEventListener("removed",wb));var c=a.geometry;if(void 0!==c&&void 0===c.__webglInit)if(c.__webglInit=!0,c.addEventListener("dispose",jb),c instanceof THREE.BufferGeometry)B.info.memory.geometries++;else if(a instanceof THREE.Mesh)q(a,c);else if(a instanceof THREE [...]
+c.__webglVertexBuffer){c.__webglVertexBuffer=m.createBuffer();c.__webglColorBuffer=m.createBuffer();c.__webglLineDistanceBuffer=m.createBuffer();B.info.memory.geometries++;var d=c.vertices.length;c.__vertexArray=new Float32Array(3*d);c.__colorArray=new Float32Array(3*d);c.__lineDistanceArray=new Float32Array(1*d);c.__webglLineCount=d;b(a);c.verticesNeedUpdate=!0;c.colorsNeedUpdate=!0;c.lineDistancesNeedUpdate=!0}}else a instanceof THREE.PointCloud&&void 0===c.__webglVertexBuffer&&(c.__we [...]
+m.createBuffer(),c.__webglColorBuffer=m.createBuffer(),B.info.memory.geometries++,d=c.vertices.length,c.__vertexArray=new Float32Array(3*d),c.__colorArray=new Float32Array(3*d),c.__webglParticleCount=d,b(a),c.verticesNeedUpdate=!0,c.colorsNeedUpdate=!0);if(void 0===a.__webglActive)if(a.__webglActive=!0,a instanceof THREE.Mesh)if(c instanceof THREE.BufferGeometry)n(ba,c,a);else{if(c instanceof THREE.Geometry)for(var c=Ua[c.id],d=0,e=c.length;d<e;d++)n(ba,c[d],a)}else a instanceof THREE.Li [...]
+THREE.PointCloud?n(ba,c,a):(a instanceof THREE.ImmediateRenderObject||a.immediateRenderCallback)&&qa.push({id:null,object:a,opaque:null,transparent:null,z:0});if(a instanceof THREE.Light)ca.push(a);else if(a instanceof THREE.Sprite)Xa.push(a);else if(a instanceof THREE.LensFlare)Ya.push(a);else if((c=ba[a.id])&&(!1===a.frustumCulled||!0===cb.intersectsObject(a)))for(d=0,e=c.length;d<e;d++){var f=c[d],g=f,k=g.object,l=g.buffer,p=k.geometry,k=k.material;k instanceof THREE.MeshFaceMaterial? [...]
+THREE.BufferGeometry?0:l.materialIndex],g.material=k,k.transparent?Qa.push(g):Ka.push(g)):k&&(g.material=k,k.transparent?Qa.push(g):Ka.push(g));f.render=!0;!0===B.sortObjects&&(wa.setFromMatrixPosition(a.matrixWorld),wa.applyProjection(db),f.z=wa.z)}}d=0;for(e=a.children.length;d<e;d++)h(a.children[d])}}function k(a,b,c,d,e){for(var f,g=0,h=a.length;g<h;g++){f=a[g];var k=f.object,m=f.buffer;w(k,b);if(e)f=e;else{f=f.material;if(!f)continue;u(f)}B.setMaterialFaces(f);m instanceof THREE.Buf [...]
+B.renderBufferDirect(b,c,d,f,m,k):B.renderBuffer(b,c,d,f,m,k)}}function l(a,b,c,d,e,f){for(var g,h=0,k=a.length;h<k;h++){g=a[h];var m=g.object;if(m.visible){if(f)g=f;else{g=g[b];if(!g)continue;u(g)}B.renderImmediateObject(c,d,e,g,m)}}}function p(a){var b=a.object.material;b.transparent?(a.transparent=b,a.opaque=null):(a.opaque=b,a.transparent=null)}function q(a,b){var d=a.material,e=!1;if(void 0===Ua[b.id]||!0===b.groupsNeedUpdate){delete ba[a.id];for(var f=Ua,g=b.id,d=d instanceof THREE [...]
+h=da.get("OES_element_index_uint")?4294967296:65535,k,e={},l=b.morphTargets.length,p=b.morphNormals.length,q,s={},t=[],r=0,w=b.faces.length;r<w;r++){k=b.faces[r];var u=d?k.materialIndex:0;u in e||(e[u]={hash:u,counter:0});k=e[u].hash+"_"+e[u].counter;k in s||(q={id:Qb++,faces3:[],materialIndex:u,vertices:0,numMorphTargets:l,numMorphNormals:p},s[k]=q,t.push(q));s[k].vertices+3>h&&(e[u].counter+=1,k=e[u].hash+"_"+e[u].counter,k in s||(q={id:Qb++,faces3:[],materialIndex:u,vertices:0,numMorp [...]
+numMorphNormals:p},s[k]=q,t.push(q)));s[k].faces3.push(r);s[k].vertices+=3}f[g]=t;b.groupsNeedUpdate=!1}f=Ua[b.id];g=0;for(d=f.length;g<d;g++){h=f[g];if(void 0===h.__webglVertexBuffer){e=h;e.__webglVertexBuffer=m.createBuffer();e.__webglNormalBuffer=m.createBuffer();e.__webglTangentBuffer=m.createBuffer();e.__webglColorBuffer=m.createBuffer();e.__webglUVBuffer=m.createBuffer();e.__webglUV2Buffer=m.createBuffer();e.__webglSkinIndicesBuffer=m.createBuffer();e.__webglSkinWeightsBuffer=m.cre [...]
+e.__webglFaceBuffer=m.createBuffer();e.__webglLineBuffer=m.createBuffer();if(p=e.numMorphTargets)for(e.__webglMorphTargetsBuffers=[],l=0;l<p;l++)e.__webglMorphTargetsBuffers.push(m.createBuffer());if(p=e.numMorphNormals)for(e.__webglMorphNormalsBuffers=[],l=0;l<p;l++)e.__webglMorphNormalsBuffers.push(m.createBuffer());B.info.memory.geometries++;e=h;r=a;w=r.geometry;p=e.faces3;l=3*p.length;s=1*p.length;t=3*p.length;p=c(r,e);e.__vertexArray=new Float32Array(3*l);e.__normalArray=new Float32 [...]
+e.__colorArray=new Float32Array(3*l);e.__uvArray=new Float32Array(2*l);1<w.faceVertexUvs.length&&(e.__uv2Array=new Float32Array(2*l));w.hasTangents&&(e.__tangentArray=new Float32Array(4*l));r.geometry.skinWeights.length&&r.geometry.skinIndices.length&&(e.__skinIndexArray=new Float32Array(4*l),e.__skinWeightArray=new Float32Array(4*l));r=null!==da.get("OES_element_index_uint")&&21845<s?Uint32Array:Uint16Array;e.__typeArray=r;e.__faceArray=new r(3*s);e.__lineArray=new r(2*t);if(w=e.numMorp [...]
+[],r=0;r<w;r++)e.__morphTargetsArrays.push(new Float32Array(3*l));if(w=e.numMorphNormals)for(e.__morphNormalsArrays=[],r=0;r<w;r++)e.__morphNormalsArrays.push(new Float32Array(3*l));e.__webglFaceCount=3*s;e.__webglLineCount=2*t;if(p.attributes)for(s in void 0===e.__webglCustomAttributesList&&(e.__webglCustomAttributesList=[]),s=void 0,p.attributes){var t=p.attributes[s],r={},v;for(v in t)r[v]=t[v];if(!r.__webglInitialized||r.createUniqueBuffers)r.__webglInitialized=!0,w=1,"v2"===r.type?w [...]
+r.type?w=3:"v4"===r.type?w=4:"c"===r.type&&(w=3),r.size=w,r.array=new Float32Array(l*w),r.buffer=m.createBuffer(),r.buffer.belongsToAttribute=s,t.needsUpdate=!0,r.__original=t;e.__webglCustomAttributesList.push(r)}e.__inittedArrays=!0;b.verticesNeedUpdate=!0;b.morphTargetsNeedUpdate=!0;b.elementsNeedUpdate=!0;b.uvsNeedUpdate=!0;b.normalsNeedUpdate=!0;b.tangentsNeedUpdate=!0;e=b.colorsNeedUpdate=!0}else e=!1;(e||void 0===a.__webglActive)&&n(ba,h,a)}a.__webglActive=!0}function n(a,b,c){var [...]
+a[d]||[];a[d].push({id:d,buffer:b,object:c,material:null,z:0})}function t(a){var b=a.geometry;if(b instanceof THREE.BufferGeometry)for(var d=b.attributes,e=b.attributesKeys,f=0,g=e.length;f<g;f++){var h=e[f],k=d[h],n="index"===h?m.ELEMENT_ARRAY_BUFFER:m.ARRAY_BUFFER;void 0===k.buffer?(k.buffer=m.createBuffer(),m.bindBuffer(n,k.buffer),m.bufferData(n,k.array,k instanceof THREE.DynamicBufferAttribute?m.DYNAMIC_DRAW:m.STATIC_DRAW),k.needsUpdate=!1):!0===k.needsUpdate&&(m.bindBuffer(n,k.buff [...]
+k.updateRange||-1===k.updateRange.count?m.bufferSubData(n,0,k.array):0===k.updateRange.count?console.error("THREE.WebGLRenderer.updateObject: using updateRange for THREE.DynamicBufferAttribute and marked as needsUpdate but count is 0, ensure you are using set methods or updating manually."):(m.bufferSubData(n,k.updateRange.offset*k.array.BYTES_PER_ELEMENT,k.array.subarray(k.updateRange.offset,k.updateRange.offset+k.updateRange.count)),k.updateRange.count=0),k.needsUpdate=!1)}else if(a in [...]
+b.groupsNeedUpdate&&q(a,b);for(var l=Ua[b.id],f=0,p=l.length;f<p;f++){var t=l[f],w=c(a,t),u=w.attributes&&r(w);if(b.verticesNeedUpdate||b.morphTargetsNeedUpdate||b.elementsNeedUpdate||b.uvsNeedUpdate||b.normalsNeedUpdate||b.colorsNeedUpdate||b.tangentsNeedUpdate||u){var v=t,x=a,D=m.DYNAMIC_DRAW,A=!b.dynamic,E=w;if(v.__inittedArrays){var G=!1===E instanceof THREE.MeshPhongMaterial&&E.shading===THREE.FlatShading,y=void 0,z=void 0,F=void 0,B=void 0,I=void 0,H=void 0,M=void 0,R=void 0,P=void [...]
+O=void 0,J=void 0,L=void 0,N=void 0,Ka=void 0,V=void 0,W=void 0,Qa=void 0,Ya=void 0,Xa=void 0,da=void 0,ba=void 0,ja=void 0,Pa=void 0,ka=void 0,Q=void 0,ha=void 0,ia=void 0,ob=void 0,Y=void 0,ub=void 0,pa=void 0,ab=void 0,oa=void 0,ca=void 0,qa=void 0,Ca=void 0,ta=void 0,na=void 0,wa=void 0,La=0,Ma=0,kb=0,yb=0,zb=0,Ra=0,Aa=0,eb=0,Ha=0,la=0,ra=0,K=0,za=void 0,Sa=v.__vertexArray,Ab=v.__uvArray,lb=v.__uv2Array,Na=v.__normalArray,sa=v.__tangentArray,Da=v.__colorArray,Ea=v.__skinIndexArray,Fa [...]
+Gb=v.__morphTargetsArrays,Bb=v.__morphNormalsArrays,mb=v.__webglCustomAttributesList,C=void 0,Va=v.__faceArray,Ta=v.__lineArray,ea=x.geometry,fb=ea.elementsNeedUpdate,vb=ea.uvsNeedUpdate,Mb=ea.normalsNeedUpdate,Ob=ea.tangentsNeedUpdate,ib=ea.colorsNeedUpdate,sb=ea.morphTargetsNeedUpdate,Cb=ea.vertices,$=v.faces3,xa=ea.faces,Hb=ea.faceVertexUvs[0],Oa=ea.faceVertexUvs[1],$a=ea.skinIndices,Ga=ea.skinWeights,nb=ea.morphTargets,bb=ea.morphNormals;if(ea.verticesNeedUpdate){y=0;for(z=$.length;y [...]
+xa[$[y]],J=Cb[B.a],L=Cb[B.b],N=Cb[B.c],Sa[Ma]=J.x,Sa[Ma+1]=J.y,Sa[Ma+2]=J.z,Sa[Ma+3]=L.x,Sa[Ma+4]=L.y,Sa[Ma+5]=L.z,Sa[Ma+6]=N.x,Sa[Ma+7]=N.y,Sa[Ma+8]=N.z,Ma+=9;m.bindBuffer(m.ARRAY_BUFFER,v.__webglVertexBuffer);m.bufferData(m.ARRAY_BUFFER,Sa,D)}if(sb)for(ca=0,qa=nb.length;ca<qa;ca++){y=ra=0;for(z=$.length;y<z;y++)na=$[y],B=xa[na],J=nb[ca].vertices[B.a],L=nb[ca].vertices[B.b],N=nb[ca].vertices[B.c],Ca=Gb[ca],Ca[ra]=J.x,Ca[ra+1]=J.y,Ca[ra+2]=J.z,Ca[ra+3]=L.x,Ca[ra+4]=L.y,Ca[ra+5]=L.z,Ca[ra [...]
+7]=N.y,Ca[ra+8]=N.z,E.morphNormals&&(G?Xa=Ya=Qa=bb[ca].faceNormals[na]:(wa=bb[ca].vertexNormals[na],Qa=wa.a,Ya=wa.b,Xa=wa.c),ta=Bb[ca],ta[ra]=Qa.x,ta[ra+1]=Qa.y,ta[ra+2]=Qa.z,ta[ra+3]=Ya.x,ta[ra+4]=Ya.y,ta[ra+5]=Ya.z,ta[ra+6]=Xa.x,ta[ra+7]=Xa.y,ta[ra+8]=Xa.z),ra+=9;m.bindBuffer(m.ARRAY_BUFFER,v.__webglMorphTargetsBuffers[ca]);m.bufferData(m.ARRAY_BUFFER,Gb[ca],D);E.morphNormals&&(m.bindBuffer(m.ARRAY_BUFFER,v.__webglMorphNormalsBuffers[ca]),m.bufferData(m.ARRAY_BUFFER,Bb[ca],D))}if(Ga.le [...]
+for(z=$.length;y<z;y++)B=xa[$[y]],Pa=Ga[B.a],ka=Ga[B.b],Q=Ga[B.c],Fa[la]=Pa.x,Fa[la+1]=Pa.y,Fa[la+2]=Pa.z,Fa[la+3]=Pa.w,Fa[la+4]=ka.x,Fa[la+5]=ka.y,Fa[la+6]=ka.z,Fa[la+7]=ka.w,Fa[la+8]=Q.x,Fa[la+9]=Q.y,Fa[la+10]=Q.z,Fa[la+11]=Q.w,ha=$a[B.a],ia=$a[B.b],ob=$a[B.c],Ea[la]=ha.x,Ea[la+1]=ha.y,Ea[la+2]=ha.z,Ea[la+3]=ha.w,Ea[la+4]=ia.x,Ea[la+5]=ia.y,Ea[la+6]=ia.z,Ea[la+7]=ia.w,Ea[la+8]=ob.x,Ea[la+9]=ob.y,Ea[la+10]=ob.z,Ea[la+11]=ob.w,la+=12;0<la&&(m.bindBuffer(m.ARRAY_BUFFER,v.__webglSkinIndice [...]
+m.bufferData(m.ARRAY_BUFFER,Ea,D),m.bindBuffer(m.ARRAY_BUFFER,v.__webglSkinWeightsBuffer),m.bufferData(m.ARRAY_BUFFER,Fa,D))}if(ib){y=0;for(z=$.length;y<z;y++)B=xa[$[y]],M=B.vertexColors,R=B.color,3===M.length&&E.vertexColors===THREE.VertexColors?(da=M[0],ba=M[1],ja=M[2]):ja=ba=da=R,Da[Ha]=da.r,Da[Ha+1]=da.g,Da[Ha+2]=da.b,Da[Ha+3]=ba.r,Da[Ha+4]=ba.g,Da[Ha+5]=ba.b,Da[Ha+6]=ja.r,Da[Ha+7]=ja.g,Da[Ha+8]=ja.b,Ha+=9;0<Ha&&(m.bindBuffer(m.ARRAY_BUFFER,v.__webglColorBuffer),m.bufferData(m.ARRAY_ [...]
+D))}if(Ob&&ea.hasTangents){y=0;for(z=$.length;y<z;y++)B=xa[$[y]],P=B.vertexTangents,Ka=P[0],V=P[1],W=P[2],sa[Aa]=Ka.x,sa[Aa+1]=Ka.y,sa[Aa+2]=Ka.z,sa[Aa+3]=Ka.w,sa[Aa+4]=V.x,sa[Aa+5]=V.y,sa[Aa+6]=V.z,sa[Aa+7]=V.w,sa[Aa+8]=W.x,sa[Aa+9]=W.y,sa[Aa+10]=W.z,sa[Aa+11]=W.w,Aa+=12;m.bindBuffer(m.ARRAY_BUFFER,v.__webglTangentBuffer);m.bufferData(m.ARRAY_BUFFER,sa,D)}if(Mb){y=0;for(z=$.length;y<z;y++)if(B=xa[$[y]],I=B.vertexNormals,H=B.normal,3===I.length&&!1===G)for(Y=0;3>Y;Y++)pa=I[Y],Na[Ra]=pa.x [...]
+pa.y,Na[Ra+2]=pa.z,Ra+=3;else for(Y=0;3>Y;Y++)Na[Ra]=H.x,Na[Ra+1]=H.y,Na[Ra+2]=H.z,Ra+=3;m.bindBuffer(m.ARRAY_BUFFER,v.__webglNormalBuffer);m.bufferData(m.ARRAY_BUFFER,Na,D)}if(vb&&Hb){y=0;for(z=$.length;y<z;y++)if(F=$[y],U=Hb[F],void 0!==U)for(Y=0;3>Y;Y++)ab=U[Y],Ab[kb]=ab.x,Ab[kb+1]=ab.y,kb+=2;0<kb&&(m.bindBuffer(m.ARRAY_BUFFER,v.__webglUVBuffer),m.bufferData(m.ARRAY_BUFFER,Ab,D))}if(vb&&Oa){y=0;for(z=$.length;y<z;y++)if(F=$[y],O=Oa[F],void 0!==O)for(Y=0;3>Y;Y++)oa=O[Y],lb[yb]=oa.x,lb[ [...]
+yb+=2;0<yb&&(m.bindBuffer(m.ARRAY_BUFFER,v.__webglUV2Buffer),m.bufferData(m.ARRAY_BUFFER,lb,D))}if(fb){y=0;for(z=$.length;y<z;y++)Va[zb]=La,Va[zb+1]=La+1,Va[zb+2]=La+2,zb+=3,Ta[eb]=La,Ta[eb+1]=La+1,Ta[eb+2]=La,Ta[eb+3]=La+2,Ta[eb+4]=La+1,Ta[eb+5]=La+2,eb+=6,La+=3;m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,v.__webglFaceBuffer);m.bufferData(m.ELEMENT_ARRAY_BUFFER,Va,D);m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,v.__webglLineBuffer);m.bufferData(m.ELEMENT_ARRAY_BUFFER,Ta,D)}if(mb)for(Y=0,ub=mb.length;Y<u [...]
+mb[Y],C.__original.needsUpdate){K=0;if(1===C.size)if(void 0===C.boundTo||"vertices"===C.boundTo)for(y=0,z=$.length;y<z;y++)B=xa[$[y]],C.array[K]=C.value[B.a],C.array[K+1]=C.value[B.b],C.array[K+2]=C.value[B.c],K+=3;else{if("faces"===C.boundTo)for(y=0,z=$.length;y<z;y++)za=C.value[$[y]],C.array[K]=za,C.array[K+1]=za,C.array[K+2]=za,K+=3}else if(2===C.size)if(void 0===C.boundTo||"vertices"===C.boundTo)for(y=0,z=$.length;y<z;y++)B=xa[$[y]],J=C.value[B.a],L=C.value[B.b],N=C.value[B.c],C.arra [...]
+C.array[K+1]=J.y,C.array[K+2]=L.x,C.array[K+3]=L.y,C.array[K+4]=N.x,C.array[K+5]=N.y,K+=6;else{if("faces"===C.boundTo)for(y=0,z=$.length;y<z;y++)N=L=J=za=C.value[$[y]],C.array[K]=J.x,C.array[K+1]=J.y,C.array[K+2]=L.x,C.array[K+3]=L.y,C.array[K+4]=N.x,C.array[K+5]=N.y,K+=6}else if(3===C.size){var T;T="c"===C.type?["r","g","b"]:["x","y","z"];if(void 0===C.boundTo||"vertices"===C.boundTo)for(y=0,z=$.length;y<z;y++)B=xa[$[y]],J=C.value[B.a],L=C.value[B.b],N=C.value[B.c],C.array[K]=J[T[0]],C. [...]
+J[T[1]],C.array[K+2]=J[T[2]],C.array[K+3]=L[T[0]],C.array[K+4]=L[T[1]],C.array[K+5]=L[T[2]],C.array[K+6]=N[T[0]],C.array[K+7]=N[T[1]],C.array[K+8]=N[T[2]],K+=9;else if("faces"===C.boundTo)for(y=0,z=$.length;y<z;y++)N=L=J=za=C.value[$[y]],C.array[K]=J[T[0]],C.array[K+1]=J[T[1]],C.array[K+2]=J[T[2]],C.array[K+3]=L[T[0]],C.array[K+4]=L[T[1]],C.array[K+5]=L[T[2]],C.array[K+6]=N[T[0]],C.array[K+7]=N[T[1]],C.array[K+8]=N[T[2]],K+=9;else if("faceVertices"===C.boundTo)for(y=0,z=$.length;y<z;y++) [...]
+J=za[0],L=za[1],N=za[2],C.array[K]=J[T[0]],C.array[K+1]=J[T[1]],C.array[K+2]=J[T[2]],C.array[K+3]=L[T[0]],C.array[K+4]=L[T[1]],C.array[K+5]=L[T[2]],C.array[K+6]=N[T[0]],C.array[K+7]=N[T[1]],C.array[K+8]=N[T[2]],K+=9}else if(4===C.size)if(void 0===C.boundTo||"vertices"===C.boundTo)for(y=0,z=$.length;y<z;y++)B=xa[$[y]],J=C.value[B.a],L=C.value[B.b],N=C.value[B.c],C.array[K]=J.x,C.array[K+1]=J.y,C.array[K+2]=J.z,C.array[K+3]=J.w,C.array[K+4]=L.x,C.array[K+5]=L.y,C.array[K+6]=L.z,C.array[K+7 [...]
+8]=N.x,C.array[K+9]=N.y,C.array[K+10]=N.z,C.array[K+11]=N.w,K+=12;else if("faces"===C.boundTo)for(y=0,z=$.length;y<z;y++)N=L=J=za=C.value[$[y]],C.array[K]=J.x,C.array[K+1]=J.y,C.array[K+2]=J.z,C.array[K+3]=J.w,C.array[K+4]=L.x,C.array[K+5]=L.y,C.array[K+6]=L.z,C.array[K+7]=L.w,C.array[K+8]=N.x,C.array[K+9]=N.y,C.array[K+10]=N.z,C.array[K+11]=N.w,K+=12;else if("faceVertices"===C.boundTo)for(y=0,z=$.length;y<z;y++)za=C.value[$[y]],J=za[0],L=za[1],N=za[2],C.array[K]=J.x,C.array[K+1]=J.y,C.a [...]
+J.z,C.array[K+3]=J.w,C.array[K+4]=L.x,C.array[K+5]=L.y,C.array[K+6]=L.z,C.array[K+7]=L.w,C.array[K+8]=N.x,C.array[K+9]=N.y,C.array[K+10]=N.z,C.array[K+11]=N.w,K+=12;m.bindBuffer(m.ARRAY_BUFFER,C.buffer);m.bufferData(m.ARRAY_BUFFER,C.array,D)}A&&(delete v.__inittedArrays,delete v.__colorArray,delete v.__normalArray,delete v.__tangentArray,delete v.__uvArray,delete v.__uv2Array,delete v.__faceArray,delete v.__vertexArray,delete v.__lineArray,delete v.__skinIndexArray,delete v.__skinWeightA [...]
+!1;b.morphTargetsNeedUpdate=!1;b.elementsNeedUpdate=!1;b.uvsNeedUpdate=!1;b.normalsNeedUpdate=!1;b.colorsNeedUpdate=!1;b.tangentsNeedUpdate=!1;w.attributes&&s(w)}else if(a instanceof THREE.Line){w=c(a,b);u=w.attributes&&r(w);if(b.verticesNeedUpdate||b.colorsNeedUpdate||b.lineDistancesNeedUpdate||u){var Db=m.DYNAMIC_DRAW,S,aa,Z,Ba,X,Eb,Rb=b.vertices,Ib=b.colors,gb=b.lineDistances,ya=Rb.length,pb=Ib.length,qb=gb.length,Wa=b.__vertexArray,tb=b.__colorArray,hb=b.__lineDistanceArray,$b=b.colo [...]
+Fb=b.lineDistancesNeedUpdate,Sb=b.__webglCustomAttributesList,Jb,cb,ua,Kb,Ia,fa;if(b.verticesNeedUpdate){for(S=0;S<ya;S++)Ba=Rb[S],X=3*S,Wa[X]=Ba.x,Wa[X+1]=Ba.y,Wa[X+2]=Ba.z;m.bindBuffer(m.ARRAY_BUFFER,b.__webglVertexBuffer);m.bufferData(m.ARRAY_BUFFER,Wa,Db)}if($b){for(aa=0;aa<pb;aa++)Eb=Ib[aa],X=3*aa,tb[X]=Eb.r,tb[X+1]=Eb.g,tb[X+2]=Eb.b;m.bindBuffer(m.ARRAY_BUFFER,b.__webglColorBuffer);m.bufferData(m.ARRAY_BUFFER,tb,Db)}if(Fb){for(Z=0;Z<qb;Z++)hb[Z]=gb[Z];m.bindBuffer(m.ARRAY_BUFFER,b. [...]
+m.bufferData(m.ARRAY_BUFFER,hb,Db)}if(Sb)for(Jb=0,cb=Sb.length;Jb<cb;Jb++)if(fa=Sb[Jb],fa.needsUpdate&&(void 0===fa.boundTo||"vertices"===fa.boundTo)){X=0;Kb=fa.value.length;if(1===fa.size)for(ua=0;ua<Kb;ua++)fa.array[ua]=fa.value[ua];else if(2===fa.size)for(ua=0;ua<Kb;ua++)Ia=fa.value[ua],fa.array[X]=Ia.x,fa.array[X+1]=Ia.y,X+=2;else if(3===fa.size)if("c"===fa.type)for(ua=0;ua<Kb;ua++)Ia=fa.value[ua],fa.array[X]=Ia.r,fa.array[X+1]=Ia.g,fa.array[X+2]=Ia.b,X+=3;else for(ua=0;ua<Kb;ua++)Ia [...]
+fa.array[X]=Ia.x,fa.array[X+1]=Ia.y,fa.array[X+2]=Ia.z,X+=3;else if(4===fa.size)for(ua=0;ua<Kb;ua++)Ia=fa.value[ua],fa.array[X]=Ia.x,fa.array[X+1]=Ia.y,fa.array[X+2]=Ia.z,fa.array[X+3]=Ia.w,X+=4;m.bindBuffer(m.ARRAY_BUFFER,fa.buffer);m.bufferData(m.ARRAY_BUFFER,fa.array,Db);fa.needsUpdate=!1}}b.verticesNeedUpdate=!1;b.colorsNeedUpdate=!1;b.lineDistancesNeedUpdate=!1;w.attributes&&s(w)}else if(a instanceof THREE.PointCloud){w=c(a,b);u=w.attributes&&r(w);if(b.verticesNeedUpdate||b.colorsNe [...]
+u){var db=m.DYNAMIC_DRAW,Tb,Ub,ac,ma,bc,Nb=b.vertices,Vb=Nb.length,Pb=b.colors,rb=Pb.length,cc=b.__vertexArray,dc=b.__colorArray,wb=b.colorsNeedUpdate,gc=b.__webglCustomAttributesList,ec,jb,va,Lb,Ja,ga;if(b.verticesNeedUpdate){for(Tb=0;Tb<Vb;Tb++)ac=Nb[Tb],ma=3*Tb,cc[ma]=ac.x,cc[ma+1]=ac.y,cc[ma+2]=ac.z;m.bindBuffer(m.ARRAY_BUFFER,b.__webglVertexBuffer);m.bufferData(m.ARRAY_BUFFER,cc,db)}if(wb){for(Ub=0;Ub<rb;Ub++)bc=Pb[Ub],ma=3*Ub,dc[ma]=bc.r,dc[ma+1]=bc.g,dc[ma+2]=bc.b;m.bindBuffer(m.A [...]
+b.__webglColorBuffer);m.bufferData(m.ARRAY_BUFFER,dc,db)}if(gc)for(ec=0,jb=gc.length;ec<jb;ec++){ga=gc[ec];if(ga.needsUpdate&&(void 0===ga.boundTo||"vertices"===ga.boundTo))if(Lb=ga.value.length,ma=0,1===ga.size)for(va=0;va<Lb;va++)ga.array[va]=ga.value[va];else if(2===ga.size)for(va=0;va<Lb;va++)Ja=ga.value[va],ga.array[ma]=Ja.x,ga.array[ma+1]=Ja.y,ma+=2;else if(3===ga.size)if("c"===ga.type)for(va=0;va<Lb;va++)Ja=ga.value[va],ga.array[ma]=Ja.r,ga.array[ma+1]=Ja.g,ga.array[ma+2]=Ja.b,ma+ [...]
+0;va<Lb;va++)Ja=ga.value[va],ga.array[ma]=Ja.x,ga.array[ma+1]=Ja.y,ga.array[ma+2]=Ja.z,ma+=3;else if(4===ga.size)for(va=0;va<Lb;va++)Ja=ga.value[va],ga.array[ma]=Ja.x,ga.array[ma+1]=Ja.y,ga.array[ma+2]=Ja.z,ga.array[ma+3]=Ja.w,ma+=4;m.bindBuffer(m.ARRAY_BUFFER,ga.buffer);m.bufferData(m.ARRAY_BUFFER,ga.array,db);ga.needsUpdate=!1}}b.verticesNeedUpdate=!1;b.colorsNeedUpdate=!1;w.attributes&&s(w)}}function r(a){for(var b in a.attributes)if(a.attributes[b].needsUpdate)return!0;return!1}funct [...]
+!1}function u(a){!0===a.transparent?W.setBlending(a.blending,a.blendEquation,a.blendSrc,a.blendDst,a.blendEquationAlpha,a.blendSrcAlpha,a.blendDstAlpha):W.setBlending(THREE.NoBlending);W.setDepthTest(a.depthTest);W.setDepthWrite(a.depthWrite);W.setColorWrite(a.colorWrite);W.setPolygonOffset(a.polygonOffset,a.polygonOffsetFactor,a.polygonOffsetUnits)}function v(a,b,c,d,e){var f,g,h,k;Mb=0;if(d.needsUpdate){d.program&&hc(d);d.addEventListener("dispose",ic);var n=pc[d.type];if(n){var l=THRE [...]
+d.__webglShader={uniforms:THREE.UniformsUtils.clone(l.uniforms),vertexShader:l.vertexShader,fragmentShader:l.fragmentShader}}else d.__webglShader={uniforms:d.uniforms,vertexShader:d.vertexShader,fragmentShader:d.fragmentShader};for(var p=0,q=0,r=0,s=0,t=0,w=b.length;t<w;t++){var v=b[t];v.onlyShadow||!1===v.visible||(v instanceof THREE.DirectionalLight&&p++,v instanceof THREE.PointLight&&q++,v instanceof THREE.SpotLight&&r++,v instanceof THREE.HemisphereLight&&s++)}f=p;g=q;h=r;k=s;for(var [...]
+0,F=b.length;G<F;G++){var J=b[G];J.castShadow&&(J instanceof THREE.SpotLight&&z++,J instanceof THREE.DirectionalLight&&!J.shadowCascade&&z++)}u=z;var H;if(Nb&&e&&e.skeleton&&e.skeleton.useVertexTexture)H=1024;else{var N=m.getParameter(m.MAX_VERTEX_UNIFORM_VECTORS),M=Math.floor((N-20)/4);void 0!==e&&e instanceof THREE.SkinnedMesh&&(M=Math.min(e.skeleton.bones.length,M),M<e.skeleton.bones.length&&THREE.warn("WebGLRenderer: too many bones - "+e.skeleton.bones.length+", this GPU supports jus [...]
+H=M}var P={precision:L,supportsVertexTextures:Vb,map:!!d.map,envMap:!!d.envMap,envMapMode:d.envMap&&d.envMap.mapping,lightMap:!!d.lightMap,bumpMap:!!d.bumpMap,normalMap:!!d.normalMap,specularMap:!!d.specularMap,alphaMap:!!d.alphaMap,combine:d.combine,vertexColors:d.vertexColors,fog:c,useFog:d.fog,fogExp:c instanceof THREE.FogExp2,flatShading:d.shading===THREE.FlatShading,sizeAttenuation:d.sizeAttenuation,logarithmicDepthBuffer:ja,skinning:d.skinning,maxBones:H,useVertexTexture:Nb&&e&&e.s [...]
+e.skeleton.useVertexTexture,morphTargets:d.morphTargets,morphNormals:d.morphNormals,maxMorphTargets:B.maxMorphTargets,maxMorphNormals:B.maxMorphNormals,maxDirLights:f,maxPointLights:g,maxSpotLights:h,maxHemiLights:k,maxShadows:u,shadowMapEnabled:B.shadowMapEnabled&&e.receiveShadow&&0<u,shadowMapType:B.shadowMapType,shadowMapDebug:B.shadowMapDebug,shadowMapCascade:B.shadowMapCascade,alphaTest:d.alphaTest,metal:d.metal,wrapAround:d.wrapAround,doubleSided:d.side===THREE.DoubleSide,flipSided [...]
+THREE.BackSide},R=[];n?R.push(n):(R.push(d.fragmentShader),R.push(d.vertexShader));if(void 0!==d.defines)for(var O in d.defines)R.push(O),R.push(d.defines[O]);for(O in P)R.push(O),R.push(P[O]);for(var Ka=R.join(),V,W=0,Qa=Pa.length;W<Qa;W++){var Ya=Pa[W];if(Ya.code===Ka){V=Ya;V.usedTimes++;break}}void 0===V&&(V=new THREE.WebGLProgram(B,Ka,d,P),Pa.push(V),B.info.memory.programs=Pa.length);d.program=V;var Xa=V.attributes;if(d.morphTargets){d.numSupportedMorphTargets=0;for(var ca,da="morphT [...]
+0;ba<B.maxMorphTargets;ba++)ca=da+ba,0<=Xa[ca]&&d.numSupportedMorphTargets++}if(d.morphNormals)for(d.numSupportedMorphNormals=0,da="morphNormal",ba=0;ba<B.maxMorphNormals;ba++)ca=da+ba,0<=Xa[ca]&&d.numSupportedMorphNormals++;d.uniformsList=[];for(var ha in d.__webglShader.uniforms){var ta=d.program.uniforms[ha];ta&&d.uniformsList.push([d.__webglShader.uniforms[ha],ta])}d.needsUpdate=!1}d.morphTargets&&!e.__webglMorphTargetInfluences&&(e.__webglMorphTargetInfluences=new Float32Array(B.max [...]
+var ab=!1,oa=!1,qa=!1,Ua=d.program,ka=Ua.uniforms,Q=d.__webglShader.uniforms;Ua.id!==ob&&(m.useProgram(Ua.program),ob=Ua.id,qa=oa=ab=!0);d.id!==ub&&(-1===ub&&(qa=!0),ub=d.id,oa=!0);if(ab||a!==vb)m.uniformMatrix4fv(ka.projectionMatrix,!1,a.projectionMatrix.elements),ja&&m.uniform1f(ka.logDepthBufFC,2/(Math.log(a.far+1)/Math.LN2)),a!==vb&&(vb=a),(d instanceof THREE.ShaderMaterial||d instanceof THREE.MeshPhongMaterial||d.envMap)&&null!==ka.cameraPosition&&(wa.setFromMatrixPosition(a.matrixW [...]
+wa.x,wa.y,wa.z)),(d instanceof THREE.MeshPhongMaterial||d instanceof THREE.MeshLambertMaterial||d instanceof THREE.MeshBasicMaterial||d instanceof THREE.ShaderMaterial||d.skinning)&&null!==ka.viewMatrix&&m.uniformMatrix4fv(ka.viewMatrix,!1,a.matrixWorldInverse.elements);if(d.skinning)if(e.bindMatrix&&null!==ka.bindMatrix&&m.uniformMatrix4fv(ka.bindMatrix,!1,e.bindMatrix.elements),e.bindMatrixInverse&&null!==ka.bindMatrixInverse&&m.uniformMatrix4fv(ka.bindMatrixInverse,!1,e.bindMatrixInve [...]
+Nb&&e.skeleton&&e.skeleton.useVertexTexture){if(null!==ka.boneTexture){var db=D();m.uniform1i(ka.boneTexture,db);B.setTexture(e.skeleton.boneTexture,db)}null!==ka.boneTextureWidth&&m.uniform1i(ka.boneTextureWidth,e.skeleton.boneTextureWidth);null!==ka.boneTextureHeight&&m.uniform1i(ka.boneTextureHeight,e.skeleton.boneTextureHeight)}else e.skeleton&&e.skeleton.boneMatrices&&null!==ka.boneGlobalMatrices&&m.uniformMatrix4fv(ka.boneGlobalMatrices,!1,e.skeleton.boneMatrices);if(oa){c&&d.fog&& [...]
+c.color,c instanceof THREE.Fog?(Q.fogNear.value=c.near,Q.fogFar.value=c.far):c instanceof THREE.FogExp2&&(Q.fogDensity.value=c.density));if(d instanceof THREE.MeshPhongMaterial||d instanceof THREE.MeshLambertMaterial||d.lights){if(Ob){var qa=!0,ia,Za,Y,bb=0,cb=0,ib=0,xb,pb,qb,Ca,jb,na=jc,rb=na.directional.colors,La=na.directional.positions,Ma=na.point.colors,kb=na.point.positions,yb=na.point.distances,zb=na.point.decays,Ra=na.spot.colors,Aa=na.spot.positions,eb=na.spot.distances,Ha=na.sp [...]
+la=na.spot.anglesCos,ra=na.spot.exponents,K=na.spot.decays,za=na.hemi.skyColors,Sa=na.hemi.groundColors,Ab=na.hemi.positions,lb=0,Na=0,sa=0,Da=0,Ea=0,Fa=0,Gb=0,Bb=0,mb=0,C=0,Va=0,Ta=0;ia=0;for(Za=b.length;ia<Za;ia++)Y=b[ia],Y.onlyShadow||(xb=Y.color,Ca=Y.intensity,jb=Y.distance,Y instanceof THREE.AmbientLight?Y.visible&&(bb+=xb.r,cb+=xb.g,ib+=xb.b):Y instanceof THREE.DirectionalLight?(Ea+=1,Y.visible&&(pa.setFromMatrixPosition(Y.matrixWorld),wa.setFromMatrixPosition(Y.target.matrixWorld) [...]
+pa.normalize(),mb=3*lb,La[mb]=pa.x,La[mb+1]=pa.y,La[mb+2]=pa.z,y(rb,mb,xb,Ca),lb+=1)):Y instanceof THREE.PointLight?(Fa+=1,Y.visible&&(C=3*Na,y(Ma,C,xb,Ca),wa.setFromMatrixPosition(Y.matrixWorld),kb[C]=wa.x,kb[C+1]=wa.y,kb[C+2]=wa.z,yb[Na]=jb,zb[Na]=0===Y.distance?0:Y.decay,Na+=1)):Y instanceof THREE.SpotLight?(Gb+=1,Y.visible&&(Va=3*sa,y(Ra,Va,xb,Ca),pa.setFromMatrixPosition(Y.matrixWorld),Aa[Va]=pa.x,Aa[Va+1]=pa.y,Aa[Va+2]=pa.z,eb[sa]=jb,wa.setFromMatrixPosition(Y.target.matrixWorld),p [...]
+pa.normalize(),Ha[Va]=pa.x,Ha[Va+1]=pa.y,Ha[Va+2]=pa.z,la[sa]=Math.cos(Y.angle),ra[sa]=Y.exponent,K[sa]=0===Y.distance?0:Y.decay,sa+=1)):Y instanceof THREE.HemisphereLight&&(Bb+=1,Y.visible&&(pa.setFromMatrixPosition(Y.matrixWorld),pa.normalize(),Ta=3*Da,Ab[Ta]=pa.x,Ab[Ta+1]=pa.y,Ab[Ta+2]=pa.z,pb=Y.color,qb=Y.groundColor,y(za,Ta,pb,Ca),y(Sa,Ta,qb,Ca),Da+=1)));ia=3*lb;for(Za=Math.max(rb.length,3*Ea);ia<Za;ia++)rb[ia]=0;ia=3*Na;for(Za=Math.max(Ma.length,3*Fa);ia<Za;ia++)Ma[ia]=0;ia=3*sa;fo [...]
+3*Gb);ia<Za;ia++)Ra[ia]=0;ia=3*Da;for(Za=Math.max(za.length,3*Bb);ia<Za;ia++)za[ia]=0;ia=3*Da;for(Za=Math.max(Sa.length,3*Bb);ia<Za;ia++)Sa[ia]=0;na.directional.length=lb;na.point.length=Na;na.spot.length=sa;na.hemi.length=Da;na.ambient[0]=bb;na.ambient[1]=cb;na.ambient[2]=ib;Ob=!1}if(qa){var ea=jc;Q.ambientLightColor.value=ea.ambient;Q.directionalLightColor.value=ea.directional.colors;Q.directionalLightDirection.value=ea.directional.positions;Q.pointLightColor.value=ea.point.colors;Q.po [...]
+ea.point.positions;Q.pointLightDistance.value=ea.point.distances;Q.pointLightDecay.value=ea.point.decays;Q.spotLightColor.value=ea.spot.colors;Q.spotLightPosition.value=ea.spot.positions;Q.spotLightDistance.value=ea.spot.distances;Q.spotLightDirection.value=ea.spot.directions;Q.spotLightAngleCos.value=ea.spot.anglesCos;Q.spotLightExponent.value=ea.spot.exponents;Q.spotLightDecay.value=ea.spot.decays;Q.hemisphereLightSkyColor.value=ea.hemi.skyColors;Q.hemisphereLightGroundColor.value=ea.h [...]
+Q.hemisphereLightDirection.value=ea.hemi.positions;x(Q,!0)}else x(Q,!1)}if(d instanceof THREE.MeshBasicMaterial||d instanceof THREE.MeshLambertMaterial||d instanceof THREE.MeshPhongMaterial){Q.opacity.value=d.opacity;Q.diffuse.value=d.color;Q.map.value=d.map;Q.lightMap.value=d.lightMap;Q.specularMap.value=d.specularMap;Q.alphaMap.value=d.alphaMap;d.bumpMap&&(Q.bumpMap.value=d.bumpMap,Q.bumpScale.value=d.bumpScale);d.normalMap&&(Q.normalMap.value=d.normalMap,Q.normalScale.value.copy(d.nor [...]
+var fb;d.map?fb=d.map:d.specularMap?fb=d.specularMap:d.normalMap?fb=d.normalMap:d.bumpMap?fb=d.bumpMap:d.alphaMap&&(fb=d.alphaMap);if(void 0!==fb){var wb=fb.offset,Qb=fb.repeat;Q.offsetRepeat.value.set(wb.x,wb.y,Qb.x,Qb.y)}Q.envMap.value=d.envMap;Q.flipEnvMap.value=d.envMap instanceof THREE.WebGLRenderTargetCube?1:-1;Q.reflectivity.value=d.reflectivity;Q.refractionRatio.value=d.refractionRatio}if(d instanceof THREE.LineBasicMaterial)Q.diffuse.value=d.color,Q.opacity.value=d.opacity;else  [...]
+THREE.LineDashedMaterial)Q.diffuse.value=d.color,Q.opacity.value=d.opacity,Q.dashSize.value=d.dashSize,Q.totalSize.value=d.dashSize+d.gapSize,Q.scale.value=d.scale;else if(d instanceof THREE.PointCloudMaterial){if(Q.psColor.value=d.color,Q.opacity.value=d.opacity,Q.size.value=d.size,Q.scale.value=U.height/2,Q.map.value=d.map,null!==d.map){var Wb=d.map.offset,Xb=d.map.repeat;Q.offsetRepeat.value.set(Wb.x,Wb.y,Xb.x,Xb.y)}}else d instanceof THREE.MeshPhongMaterial?(Q.shininess.value=d.shini [...]
+d.emissive,Q.specular.value=d.specular,d.wrapAround&&Q.wrapRGB.value.copy(d.wrapRGB)):d instanceof THREE.MeshLambertMaterial?(Q.emissive.value=d.emissive,d.wrapAround&&Q.wrapRGB.value.copy(d.wrapRGB)):d instanceof THREE.MeshDepthMaterial?(Q.mNear.value=a.near,Q.mFar.value=a.far,Q.opacity.value=d.opacity):d instanceof THREE.MeshNormalMaterial&&(Q.opacity.value=d.opacity);if(e.receiveShadow&&!d._shadowPass&&Q.shadowMatrix)for(var sb=0,Cb=0,$=b.length;Cb<$;Cb++){var xa=b[Cb];xa.castShadow&& [...]
+THREE.SpotLight||xa instanceof THREE.DirectionalLight&&!xa.shadowCascade)&&(Q.shadowMap.value[sb]=xa.shadowMap,Q.shadowMapSize.value[sb]=xa.shadowMapSize,Q.shadowMatrix.value[sb]=xa.shadowMatrix,Q.shadowDarkness.value[sb]=xa.shadowDarkness,Q.shadowBias.value[sb]=xa.shadowBias,sb++)}for(var Hb=d.uniformsList,Oa,$a,Ga,nb=0,fc=Hb.length;nb<fc;nb++){var T=Hb[nb][0];if(!1!==T.needsUpdate){var Db=T.type,S=T.value,aa=Hb[nb][1];switch(Db){case "1i":m.uniform1i(aa,S);break;case "1f":m.uniform1f(a [...]
+case "2f":m.uniform2f(aa,S[0],S[1]);break;case "3f":m.uniform3f(aa,S[0],S[1],S[2]);break;case "4f":m.uniform4f(aa,S[0],S[1],S[2],S[3]);break;case "1iv":m.uniform1iv(aa,S);break;case "3iv":m.uniform3iv(aa,S);break;case "1fv":m.uniform1fv(aa,S);break;case "2fv":m.uniform2fv(aa,S);break;case "3fv":m.uniform3fv(aa,S);break;case "4fv":m.uniform4fv(aa,S);break;case "Matrix3fv":m.uniformMatrix3fv(aa,!1,S);break;case "Matrix4fv":m.uniformMatrix4fv(aa,!1,S);break;case "i":m.uniform1i(aa,S);break; [...]
+S);break;case "v2":m.uniform2f(aa,S.x,S.y);break;case "v3":m.uniform3f(aa,S.x,S.y,S.z);break;case "v4":m.uniform4f(aa,S.x,S.y,S.z,S.w);break;case "c":m.uniform3f(aa,S.r,S.g,S.b);break;case "iv1":m.uniform1iv(aa,S);break;case "iv":m.uniform3iv(aa,S);break;case "fv1":m.uniform1fv(aa,S);break;case "fv":m.uniform3fv(aa,S);break;case "v2v":void 0===T._array&&(T._array=new Float32Array(2*S.length));for(var Z=0,Ba=S.length;Z<Ba;Z++)Ga=2*Z,T._array[Ga]=S[Z].x,T._array[Ga+1]=S[Z].y;m.uniform2fv(a [...]
+break;case "v3v":void 0===T._array&&(T._array=new Float32Array(3*S.length));Z=0;for(Ba=S.length;Z<Ba;Z++)Ga=3*Z,T._array[Ga]=S[Z].x,T._array[Ga+1]=S[Z].y,T._array[Ga+2]=S[Z].z;m.uniform3fv(aa,T._array);break;case "v4v":void 0===T._array&&(T._array=new Float32Array(4*S.length));Z=0;for(Ba=S.length;Z<Ba;Z++)Ga=4*Z,T._array[Ga]=S[Z].x,T._array[Ga+1]=S[Z].y,T._array[Ga+2]=S[Z].z,T._array[Ga+3]=S[Z].w;m.uniform4fv(aa,T._array);break;case "m3":m.uniformMatrix3fv(aa,!1,S.elements);break;case "m [...]
+T._array&&(T._array=new Float32Array(9*S.length));Z=0;for(Ba=S.length;Z<Ba;Z++)S[Z].flattenToArrayOffset(T._array,9*Z);m.uniformMatrix3fv(aa,!1,T._array);break;case "m4":m.uniformMatrix4fv(aa,!1,S.elements);break;case "m4v":void 0===T._array&&(T._array=new Float32Array(16*S.length));Z=0;for(Ba=S.length;Z<Ba;Z++)S[Z].flattenToArrayOffset(T._array,16*Z);m.uniformMatrix4fv(aa,!1,T._array);break;case "t":Oa=S;$a=D();m.uniform1i(aa,$a);if(!Oa)continue;if(Oa instanceof THREE.CubeTexture||Oa.im [...]
+Array&&6===Oa.image.length){var X=Oa,Eb=$a;if(6===X.image.length)if(X.needsUpdate){X.image.__webglTextureCube||(X.addEventListener("dispose",Pb),X.image.__webglTextureCube=m.createTexture(),B.info.memory.textures++);m.activeTexture(m.TEXTURE0+Eb);m.bindTexture(m.TEXTURE_CUBE_MAP,X.image.__webglTextureCube);m.pixelStorei(m.UNPACK_FLIP_Y_WEBGL,X.flipY);for(var Rb=X instanceof THREE.CompressedTexture,Ib=X.image[0]instanceof THREE.DataTexture,gb=[],ya=0;6>ya;ya++)gb[ya]=!B.autoScaleCubemaps| [...]
+X.image[ya].image:X.image[ya]:E(X.image[ya],qc);var Yb=gb[0],Zb=THREE.Math.isPowerOfTwo(Yb.width)&&THREE.Math.isPowerOfTwo(Yb.height),Wa=I(X.format),tb=I(X.type);A(m.TEXTURE_CUBE_MAP,X,Zb);for(ya=0;6>ya;ya++)if(Rb)for(var hb,$b=gb[ya].mipmaps,Fb=0,Sb=$b.length;Fb<Sb;Fb++)hb=$b[Fb],X.format!==THREE.RGBAFormat&&X.format!==THREE.RGBFormat?-1<kc().indexOf(Wa)?m.compressedTexImage2D(m.TEXTURE_CUBE_MAP_POSITIVE_X+ya,Fb,Wa,hb.width,hb.height,0,hb.data):THREE.warn("THREE.WebGLRenderer: Attempt t [...]
+m.texImage2D(m.TEXTURE_CUBE_MAP_POSITIVE_X+ya,Fb,Wa,hb.width,hb.height,0,Wa,tb,hb.data);else Ib?m.texImage2D(m.TEXTURE_CUBE_MAP_POSITIVE_X+ya,0,Wa,gb[ya].width,gb[ya].height,0,Wa,tb,gb[ya].data):m.texImage2D(m.TEXTURE_CUBE_MAP_POSITIVE_X+ya,0,Wa,Wa,tb,gb[ya]);X.generateMipmaps&&Zb&&m.generateMipmap(m.TEXTURE_CUBE_MAP);X.needsUpdate=!1;if(X.onUpdate)X.onUpdate()}else m.activeTexture(m.TEXTURE0+Eb),m.bindTexture(m.TEXTURE_CUBE_MAP,X.image.__webglTextureCube)}else if(Oa instanceof THREE.Web [...]
+Oa;m.activeTexture(m.TEXTURE0+$a);m.bindTexture(m.TEXTURE_CUBE_MAP,Jb.__webglTexture)}else B.setTexture(Oa,$a);break;case "tv":void 0===T._array&&(T._array=[]);Z=0;for(Ba=T.value.length;Z<Ba;Z++)T._array[Z]=D();m.uniform1iv(aa,T._array);Z=0;for(Ba=T.value.length;Z<Ba;Z++)Oa=T.value[Z],$a=T._array[Z],Oa&&B.setTexture(Oa,$a);break;default:THREE.warn("THREE.WebGLRenderer: Unknown uniform type: "+Db)}}}}m.uniformMatrix4fv(ka.modelViewMatrix,!1,e._modelViewMatrix.elements);ka.normalMatrix&&m. [...]
+!1,e._normalMatrix.elements);null!==ka.modelMatrix&&m.uniformMatrix4fv(ka.modelMatrix,!1,e.matrixWorld.elements);return Ua}function x(a,b){a.ambientLightColor.needsUpdate=b;a.directionalLightColor.needsUpdate=b;a.directionalLightDirection.needsUpdate=b;a.pointLightColor.needsUpdate=b;a.pointLightPosition.needsUpdate=b;a.pointLightDistance.needsUpdate=b;a.pointLightDecay.needsUpdate=b;a.spotLightColor.needsUpdate=b;a.spotLightPosition.needsUpdate=b;a.spotLightDistance.needsUpdate=b;a.spot [...]
+b;a.spotLightAngleCos.needsUpdate=b;a.spotLightExponent.needsUpdate=b;a.spotLightDecay.needsUpdate=b;a.hemisphereLightSkyColor.needsUpdate=b;a.hemisphereLightGroundColor.needsUpdate=b;a.hemisphereLightDirection.needsUpdate=b}function D(){var a=Mb;a>=Wb&&THREE.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+Wb);Mb+=1;return a}function w(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._mode [...]
+function y(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a,b,c){c?(m.texParameteri(a,m.TEXTURE_WRAP_S,I(b.wrapS)),m.texParameteri(a,m.TEXTURE_WRAP_T,I(b.wrapT)),m.texParameteri(a,m.TEXTURE_MAG_FILTER,I(b.magFilter)),m.texParameteri(a,m.TEXTURE_MIN_FILTER,I(b.minFilter))):(m.texParameteri(a,m.TEXTURE_WRAP_S,m.CLAMP_TO_EDGE),m.texParameteri(a,m.TEXTURE_WRAP_T,m.CLAMP_TO_EDGE),b.wrapS===THREE.ClampToEdgeWrapping&&b.wrapT===THREE.ClampToEdgeWrapping||THREE.warn("THREE.WebGLRendere [...]
+b.sourceFile+" )"),m.texParameteri(a,m.TEXTURE_MAG_FILTER,z(b.magFilter)),m.texParameteri(a,m.TEXTURE_MIN_FILTER,z(b.minFilter)),b.minFilter!==THREE.NearestFilter&&b.minFilter!==THREE.LinearFilter&&THREE.warn("THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter should be set to THREE.NearestFilter or THREE.LinearFilter. ( "+b.sourceFile+" )"));(c=da.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&b.type!==THREE.HalfFloatType&&(1<b.anisotropy||b.__current [...]
+(m.texParameterf(a,c.TEXTURE_MAX_ANISOTROPY_EXT,Math.min(b.anisotropy,B.getMaxAnisotropy())),b.__currentAnisotropy=b.anisotropy)}function E(a,b){if(a.width>b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);THREE.warn("THREE.WebGLRenderer: image is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height,a); [...]
+function G(a,b){m.bindRenderbuffer(m.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(m.renderbufferStorage(m.RENDERBUFFER,m.DEPTH_COMPONENT16,b.width,b.height),m.framebufferRenderbuffer(m.FRAMEBUFFER,m.DEPTH_ATTACHMENT,m.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(m.renderbufferStorage(m.RENDERBUFFER,m.DEPTH_STENCIL,b.width,b.height),m.framebufferRenderbuffer(m.FRAMEBUFFER,m.DEPTH_STENCIL_ATTACHMENT,m.RENDERBUFFER,a)):m.renderbufferStorage(m.RENDERBUFFER,m.RGBA4,b.width,b.height)}f [...]
+THREE.WebGLRenderTargetCube?(m.bindTexture(m.TEXTURE_CUBE_MAP,a.__webglTexture),m.generateMipmap(m.TEXTURE_CUBE_MAP),m.bindTexture(m.TEXTURE_CUBE_MAP,null)):(m.bindTexture(m.TEXTURE_2D,a.__webglTexture),m.generateMipmap(m.TEXTURE_2D),m.bindTexture(m.TEXTURE_2D,null))}function z(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?m.NEAREST:m.LINEAR}function I(a){var b;if(a===THREE.RepeatWrapping)return m.REPEAT;if(a===THREE.ClampToE [...]
+if(a===THREE.MirroredRepeatWrapping)return m.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return m.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return m.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return m.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return m.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return m.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return m.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return m.UNSIGNED_BYTE;if(a===THREE [...]
+if(a===THREE.UnsignedShort5551Type)return m.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return m.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return m.BYTE;if(a===THREE.ShortType)return m.SHORT;if(a===THREE.UnsignedShortType)return m.UNSIGNED_SHORT;if(a===THREE.IntType)return m.INT;if(a===THREE.UnsignedIntType)return m.UNSIGNED_INT;if(a===THREE.FloatType)return m.FLOAT;b=da.get("OES_texture_half_float");if(null!==b&&a===THREE.HalfFloatType)return b.HALF_FLOAT_OES;if(a===THREE. [...]
+if(a===THREE.RGBFormat)return m.RGB;if(a===THREE.RGBAFormat)return m.RGBA;if(a===THREE.LuminanceFormat)return m.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return m.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return m.FUNC_ADD;if(a===THREE.SubtractEquation)return m.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return m.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return m.ZERO;if(a===THREE.OneFactor)return m.ONE;if(a===THREE.SrcColorFactor)return m.SRC_COLOR;if(a===THREE.OneMinusSrcCo [...]
+if(a===THREE.SrcAlphaFactor)return m.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return m.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return m.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return m.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return m.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return m.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return m.SRC_ALPHA_SATURATE;b=da.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)ret [...]
+if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=da.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_ [...]
+if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=da.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var U=void 0!==a.canvas?a.canvas:document.createElement("canvas"),M=void 0!==a.context?a.context:null,H=1,L=void 0!==a.precision?a.precision:"highp",P=void 0!==a.alpha?a.alpha:!1,N=void 0!==a.depth?a.depth:!0,R=void 0!= [...]
+a.stencil:!0,V=void 0!==a.antialias?a.antialias:!1,J=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,oa=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,ja=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,ha=new THREE.Color(0),O=0,ca=[],ba={},qa=[],Ka=[],Qa=[],Xa=[],Ya=[];this.domElement=U;this.context=null;this.sortObjects=this.autoClearStencil=this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.gammaFactor=2;this.shadowMapEnabled=this.gammaOutpu [...]
+!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var B=this,Pa=[],ob=null,ab=null,ub=-1,ta="",vb=null,Mb=0,ib=0,bb=0,pb=U.width,qb=U.height,Xb=0,fc=0,cb=new THREE.Frustum,db=new THREE.Matrix4,wa=new THREE.Vector3,pa=new THREE.Vector3,Ob=!0 [...]
+0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[],decays:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[],decays:[]},hemi:{length:0,skyColors:[],groundColors:[],positions:[]}},m;try{var Yb={alpha:P,depth:N,stencil:R,antialias:V,premultipliedAlpha:J,preserveDrawingBuffer:oa};m=M||U.getContext("webgl",Yb)||U.getContext("experimental-webgl",Yb);if(null===m){if(null!==U.getContext("webgl"))thro [...]
+throw"Error creating WebGL context.";}U.addEventListener("webglcontextlost",function(a){a.preventDefault();Zb();lc();ba={}},!1)}catch(rc){THREE.error("THREE.WebGLRenderer: "+rc)}var W=new THREE.WebGLState(m,I);void 0===m.getShaderPrecisionFormat&&(m.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var da=new THREE.WebGLExtensions(m);da.get("OES_texture_float");da.get("OES_texture_float_linear");da.get("OES_texture_half_float");da.get("OES_texture_half_float [...]
+da.get("OES_standard_derivatives");ja&&da.get("EXT_frag_depth");var rb=function(a,b,c,d){!0===J&&(a*=d,b*=d,c*=d);m.clearColor(a,b,c,d)},lc=function(){m.clearColor(0,0,0,1);m.clearDepth(1);m.clearStencil(0);m.enable(m.DEPTH_TEST);m.depthFunc(m.LEQUAL);m.frontFace(m.CCW);m.cullFace(m.BACK);m.enable(m.CULL_FACE);m.enable(m.BLEND);m.blendEquation(m.FUNC_ADD);m.blendFunc(m.SRC_ALPHA,m.ONE_MINUS_SRC_ALPHA);m.viewport(ib,bb,pb,qb);rb(ha.r,ha.g,ha.b,O)},Zb=function(){vb=ob=null;ta="";ub=-1;Ob=! [...]
+lc();this.context=m;this.state=W;var Wb=m.getParameter(m.MAX_TEXTURE_IMAGE_UNITS),sc=m.getParameter(m.MAX_VERTEX_TEXTURE_IMAGE_UNITS),tc=m.getParameter(m.MAX_TEXTURE_SIZE),qc=m.getParameter(m.MAX_CUBE_MAP_TEXTURE_SIZE),Vb=0<sc,Nb=Vb&&da.get("OES_texture_float"),uc=m.getShaderPrecisionFormat(m.VERTEX_SHADER,m.HIGH_FLOAT),vc=m.getShaderPrecisionFormat(m.VERTEX_SHADER,m.MEDIUM_FLOAT),wc=m.getShaderPrecisionFormat(m.FRAGMENT_SHADER,m.HIGH_FLOAT),xc=m.getShaderPrecisionFormat(m.FRAGMENT_SHADE [...]
+kc=function(){var a;return function(){if(void 0!==a)return a;a=[];if(da.get("WEBGL_compressed_texture_pvrtc")||da.get("WEBGL_compressed_texture_s3tc"))for(var b=m.getParameter(m.COMPRESSED_TEXTURE_FORMATS),c=0;c<b.length;c++)a.push(b[c]);return a}}(),yc=0<uc.precision&&0<wc.precision,mc=0<vc.precision&&0<xc.precision;"highp"!==L||yc||(mc?(L="mediump",THREE.warn("THREE.WebGLRenderer: highp not supported, using mediump.")):(L="lowp",THREE.warn("THREE.WebGLRenderer: highp and mediump not su [...]
+"mediump"!==L||mc||(L="lowp",THREE.warn("THREE.WebGLRenderer: mediump not supported, using lowp."));var zc=new THREE.ShadowMapPlugin(this,ca,ba,qa),Ac=new THREE.SpritePlugin(this,Xa),Bc=new THREE.LensFlarePlugin(this,Ya);this.getContext=function(){return m};this.forceContextLoss=function(){da.get("WEBGL_lose_context").loseContext()};this.supportsVertexTextures=function(){return Vb};this.supportsFloatTextures=function(){return da.get("OES_texture_float")};this.supportsHalfFloatTextures=fu [...]
+this.supportsStandardDerivatives=function(){return da.get("OES_standard_derivatives")};this.supportsCompressedTextureS3TC=function(){return da.get("WEBGL_compressed_texture_s3tc")};this.supportsCompressedTexturePVRTC=function(){return da.get("WEBGL_compressed_texture_pvrtc")};this.supportsBlendMinMax=function(){return da.get("EXT_blend_minmax")};this.getMaxAnisotropy=function(){var a;return function(){if(void 0!==a)return a;var b=da.get("EXT_texture_filter_anisotropic");return a=null!==b [...]
+0}}();this.getPrecision=function(){return L};this.getPixelRatio=function(){return H};this.setPixelRatio=function(a){H=a};this.setSize=function(a,b,c){U.width=a*H;U.height=b*H;!1!==c&&(U.style.width=a+"px",U.style.height=b+"px");this.setViewport(0,0,a,b)};this.setViewport=function(a,b,c,d){ib=a*H;bb=b*H;pb=c*H;qb=d*H;m.viewport(ib,bb,pb,qb)};this.setScissor=function(a,b,c,d){m.scissor(a*H,b*H,c*H,d*H)};this.enableScissorTest=function(a){a?m.enable(m.SCISSOR_TEST):m.disable(m.SCISSOR_TEST) [...]
+function(){return ha};this.setClearColor=function(a,b){ha.set(a);O=void 0!==b?b:1;rb(ha.r,ha.g,ha.b,O)};this.getClearAlpha=function(){return O};this.setClearAlpha=function(a){O=a;rb(ha.r,ha.g,ha.b,O)};this.clear=function(a,b,c){var d=0;if(void 0===a||a)d|=m.COLOR_BUFFER_BIT;if(void 0===b||b)d|=m.DEPTH_BUFFER_BIT;if(void 0===c||c)d|=m.STENCIL_BUFFER_BIT;m.clear(d)};this.clearColor=function(){m.clear(m.COLOR_BUFFER_BIT)};this.clearDepth=function(){m.clear(m.DEPTH_BUFFER_BIT)};this.clearSte [...]
+this.clearTarget=function(a,b,c,d){this.setRenderTarget(a);this.clear(b,c,d)};this.resetGLState=Zb;var wb=function(a){a.target.traverse(function(a){a.removeEventListener("remove",wb);if(a instanceof THREE.Mesh||a instanceof THREE.PointCloud||a instanceof THREE.Line)delete ba[a.id];else if(a instanceof THREE.ImmediateRenderObject||a.immediateRenderCallback)for(var b=qa,c=b.length-1;0<=c;c--)b[c].object===a&&b.splice(c,1);delete a.__webglInit;delete a._modelViewMatrix;delete a._normalMatri [...]
+jb=function(a){a=a.target;a.removeEventListener("dispose",jb);delete a.__webglInit;if(a instanceof THREE.BufferGeometry){for(var b in a.attributes){var c=a.attributes[b];void 0!==c.buffer&&(m.deleteBuffer(c.buffer),delete c.buffer)}B.info.memory.geometries--}else if(b=Ua[a.id],void 0!==b){for(var c=0,d=b.length;c<d;c++){var e=b[c];if(void 0!==e.numMorphTargets){for(var f=0,g=e.numMorphTargets;f<g;f++)m.deleteBuffer(e.__webglMorphTargetsBuffers[f]);delete e.__webglMorphTargetsBuffers}if(v [...]
+0;for(g=e.numMorphNormals;f<g;f++)m.deleteBuffer(e.__webglMorphNormalsBuffers[f]);delete e.__webglMorphNormalsBuffers}nc(e)}delete Ua[a.id]}else nc(a);ta=""},Pb=function(a){a=a.target;a.removeEventListener("dispose",Pb);a.image&&a.image.__webglTextureCube?(m.deleteTexture(a.image.__webglTextureCube),delete a.image.__webglTextureCube):void 0!==a.__webglInit&&(m.deleteTexture(a.__webglTexture),delete a.__webglTexture,delete a.__webglInit);B.info.memory.textures--},oc=function(a){a=a.target [...]
+oc);if(a&&void 0!==a.__webglTexture){m.deleteTexture(a.__webglTexture);delete a.__webglTexture;if(a instanceof THREE.WebGLRenderTargetCube)for(var b=0;6>b;b++)m.deleteFramebuffer(a.__webglFramebuffer[b]),m.deleteRenderbuffer(a.__webglRenderbuffer[b]);else m.deleteFramebuffer(a.__webglFramebuffer),m.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}B.info.memory.textures--},ic=function(a){a=a.target;a.removeEventListener("dispose",ic);hc(a) [...]
+"__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "),c=0,d=b.length;c<d;c++){var e=b[c];void 0!==a[e]&&(m.deleteBuffer(a[e]),delete a[e])}if(void 0!==a.__webglCustomAttributesList){for(e in a.__webglCustomAttributesList)m.deleteBuffer(a.__webglCustomAttributesList[e].buffer);delete a.__webglCustomAttribute [...]
+hc=function(a){var b=a.program.program;if(void 0!==b){a.program=void 0;var c,d,e=!1;a=0;for(c=Pa.length;a<c;a++)if(d=Pa[a],d.program===b){d.usedTimes--;0===d.usedTimes&&(e=!0);break}if(!0===e){e=[];a=0;for(c=Pa.length;a<c;a++)d=Pa[a],d.program!==b&&e.push(d);Pa=e;m.deleteProgram(b);B.info.memory.programs--}}};this.renderBufferImmediate=function(a,b,c){W.initAttributes();a.hasPositions&&!a.__webglVertexBuffer&&(a.__webglVertexBuffer=m.createBuffer());a.hasNormals&&!a.__webglNormalBuffer&& [...]
+m.createBuffer());a.hasUvs&&!a.__webglUvBuffer&&(a.__webglUvBuffer=m.createBuffer());a.hasColors&&!a.__webglColorBuffer&&(a.__webglColorBuffer=m.createBuffer());a.hasPositions&&(m.bindBuffer(m.ARRAY_BUFFER,a.__webglVertexBuffer),m.bufferData(m.ARRAY_BUFFER,a.positionArray,m.DYNAMIC_DRAW),W.enableAttribute(b.attributes.position),m.vertexAttribPointer(b.attributes.position,3,m.FLOAT,!1,0,0));if(a.hasNormals){m.bindBuffer(m.ARRAY_BUFFER,a.__webglNormalBuffer);if(!1===c instanceof THREE.Mesh [...]
+c.shading===THREE.FlatShading){var d,e,f,g,h,k,n,l,p,q,r,s=3*a.count;for(r=0;r<s;r+=9)q=a.normalArray,d=q[r],e=q[r+1],f=q[r+2],g=q[r+3],k=q[r+4],l=q[r+5],h=q[r+6],n=q[r+7],p=q[r+8],d=(d+g+h)/3,e=(e+k+n)/3,f=(f+l+p)/3,q[r]=d,q[r+1]=e,q[r+2]=f,q[r+3]=d,q[r+4]=e,q[r+5]=f,q[r+6]=d,q[r+7]=e,q[r+8]=f}m.bufferData(m.ARRAY_BUFFER,a.normalArray,m.DYNAMIC_DRAW);W.enableAttribute(b.attributes.normal);m.vertexAttribPointer(b.attributes.normal,3,m.FLOAT,!1,0,0)}a.hasUvs&&c.map&&(m.bindBuffer(m.ARRAY_ [...]
+m.bufferData(m.ARRAY_BUFFER,a.uvArray,m.DYNAMIC_DRAW),W.enableAttribute(b.attributes.uv),m.vertexAttribPointer(b.attributes.uv,2,m.FLOAT,!1,0,0));a.hasColors&&c.vertexColors!==THREE.NoColors&&(m.bindBuffer(m.ARRAY_BUFFER,a.__webglColorBuffer),m.bufferData(m.ARRAY_BUFFER,a.colorArray,m.DYNAMIC_DRAW),W.enableAttribute(b.attributes.color),m.vertexAttribPointer(b.attributes.color,3,m.FLOAT,!1,0,0));W.disableUnusedAttributes();m.drawArrays(m.TRIANGLES,0,a.count);a.count=0};this.renderBufferDi [...]
+b,c,e,f,g){if(!1!==e.visible)if(t(g),a=v(a,b,c,e,g),b=!1,c="direct_"+f.id+"_"+a.id+"_"+(e.wireframe?1:0),c!==ta&&(ta=c,b=!0),b&&W.initAttributes(),g instanceof THREE.Mesh){g=!0===e.wireframe?m.LINES:m.TRIANGLES;var h=f.attributes.index;if(h){var k,n;h.array instanceof Uint32Array&&da.get("OES_element_index_uint")?(k=m.UNSIGNED_INT,n=4):(k=m.UNSIGNED_SHORT,n=2);c=f.offsets;if(0===c.length)b&&(d(e,a,f,0),m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,h.buffer)),m.drawElements(g,h.array.length,k,0),B. [...]
+B.info.render.vertices+=h.array.length,B.info.render.faces+=h.array.length/3;else{b=!0;for(var l=0,p=c.length;l<p;l++){var q=c[l].index;b&&(d(e,a,f,q),m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,h.buffer));m.drawElements(g,c[l].count,k,c[l].start*n);B.info.render.calls++;B.info.render.vertices+=c[l].count;B.info.render.faces+=c[l].count/3}}}else b&&d(e,a,f,0),e=f.attributes.position,m.drawArrays(g,0,e.array.length/e.itemSize),B.info.render.calls++,B.info.render.vertices+=e.array.length/e.itemSiz [...]
+e.array.length/(3*e.itemSize)}else if(g instanceof THREE.PointCloud)if(g=m.POINTS,h=f.attributes.index)if(h.array instanceof Uint32Array&&da.get("OES_element_index_uint")?(k=m.UNSIGNED_INT,n=4):(k=m.UNSIGNED_SHORT,n=2),c=f.offsets,0===c.length)b&&(d(e,a,f,0),m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,h.buffer)),m.drawElements(g,h.array.length,k,0),B.info.render.calls++,B.info.render.points+=h.array.length;else for(1<c.length&&(b=!0),l=0,p=c.length;l<p;l++)q=c[l].index,b&&(d(e,a,f,q),m.bindBuffe [...]
+h.buffer)),m.drawElements(g,c[l].count,k,c[l].start*n),B.info.render.calls++,B.info.render.points+=c[l].count;else if(b&&d(e,a,f,0),e=f.attributes.position,c=f.offsets,0===c.length)m.drawArrays(g,0,e.array.length/3),B.info.render.calls++,B.info.render.points+=e.array.length/3;else for(l=0,p=c.length;l<p;l++)m.drawArrays(g,c[l].index,c[l].count),B.info.render.calls++,B.info.render.points+=c[l].count;else if(g instanceof THREE.Line)if(g=g.mode===THREE.LineStrip?m.LINE_STRIP:m.LINES,W.setLi [...]
+H),h=f.attributes.index)if(h.array instanceof Uint32Array?(k=m.UNSIGNED_INT,n=4):(k=m.UNSIGNED_SHORT,n=2),c=f.offsets,0===c.length)b&&(d(e,a,f,0),m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,h.buffer)),m.drawElements(g,h.array.length,k,0),B.info.render.calls++,B.info.render.vertices+=h.array.length;else for(1<c.length&&(b=!0),l=0,p=c.length;l<p;l++)q=c[l].index,b&&(d(e,a,f,q),m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,h.buffer)),m.drawElements(g,c[l].count,k,c[l].start*n),B.info.render.calls++,B.info.re [...]
+c[l].count;else if(b&&d(e,a,f,0),e=f.attributes.position,c=f.offsets,0===c.length)m.drawArrays(g,0,e.array.length/3),B.info.render.calls++,B.info.render.vertices+=e.array.length/3;else for(l=0,p=c.length;l<p;l++)m.drawArrays(g,c[l].index,c[l].count),B.info.render.calls++,B.info.render.vertices+=c[l].count};this.renderBuffer=function(a,b,c,d,e,f){if(!1!==d.visible){t(f);c=v(a,b,c,d,f);b=c.attributes;a=!1;c=e.id+"_"+c.id+"_"+(d.wireframe?1:0);c!==ta&&(ta=c,a=!0);a&&W.initAttributes();if(!d [...]
+0<=b.position)a&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglVertexBuffer),W.enableAttribute(b.position),m.vertexAttribPointer(b.position,3,m.FLOAT,!1,0,0));else if(f.morphTargetBase){c=d.program.attributes;-1!==f.morphTargetBase&&0<=c.position?(m.bindBuffer(m.ARRAY_BUFFER,e.__webglMorphTargetsBuffers[f.morphTargetBase]),W.enableAttribute(c.position),m.vertexAttribPointer(c.position,3,m.FLOAT,!1,0,0)):0<=c.position&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglVertexBuffer),W.enableAttribute(c.positi [...]
+3,m.FLOAT,!1,0,0));if(f.morphTargetForcedOrder.length)for(var h=0,k=f.morphTargetForcedOrder,n=f.morphTargetInfluences,l;h<d.numSupportedMorphTargets&&h<k.length;)l=c["morphTarget"+h],0<=l&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglMorphTargetsBuffers[k[h]]),W.enableAttribute(l),m.vertexAttribPointer(l,3,m.FLOAT,!1,0,0)),l=c["morphNormal"+h],0<=l&&d.morphNormals&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglMorphNormalsBuffers[k[h]]),W.enableAttribute(l),m.vertexAttribPointer(l,3,m.FLOAT,!1,0,0)),f [...]
+n[k[h]],h++;else{k=[];n=f.morphTargetInfluences;h=f.geometry.morphTargets;n.length>h.length&&(console.warn("THREE.WebGLRenderer: Influences array is bigger than morphTargets array."),n.length=h.length);h=0;for(l=n.length;h<l;h++)k.push([n[h],h]);k.length>d.numSupportedMorphTargets?(k.sort(g),k.length=d.numSupportedMorphTargets):k.length>d.numSupportedMorphNormals?k.sort(g):0===k.length&&k.push([0,0]);for(var h=0,p=d.numSupportedMorphTargets;h<p;h++)if(k[h]){var q=k[h][1];l=c["morphTarget [...]
+(m.bindBuffer(m.ARRAY_BUFFER,e.__webglMorphTargetsBuffers[q]),W.enableAttribute(l),m.vertexAttribPointer(l,3,m.FLOAT,!1,0,0));l=c["morphNormal"+h];0<=l&&d.morphNormals&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglMorphNormalsBuffers[q]),W.enableAttribute(l),m.vertexAttribPointer(l,3,m.FLOAT,!1,0,0));f.__webglMorphTargetInfluences[h]=n[q]}else f.__webglMorphTargetInfluences[h]=0}null!==d.program.uniforms.morphTargetInfluences&&m.uniform1fv(d.program.uniforms.morphTargetInfluences,f.__webglMorph [...]
+0,n=e.__webglCustomAttributesList.length;c<n;c++)k=e.__webglCustomAttributesList[c],0<=b[k.buffer.belongsToAttribute]&&(m.bindBuffer(m.ARRAY_BUFFER,k.buffer),W.enableAttribute(b[k.buffer.belongsToAttribute]),m.vertexAttribPointer(b[k.buffer.belongsToAttribute],k.size,m.FLOAT,!1,0,0));0<=b.color&&(0<f.geometry.colors.length||0<f.geometry.faces.length?(m.bindBuffer(m.ARRAY_BUFFER,e.__webglColorBuffer),W.enableAttribute(b.color),m.vertexAttribPointer(b.color,3,m.FLOAT,!1,0,0)):void 0!==d.de [...]
+m.vertexAttrib3fv(b.color,d.defaultAttributeValues.color));0<=b.normal&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglNormalBuffer),W.enableAttribute(b.normal),m.vertexAttribPointer(b.normal,3,m.FLOAT,!1,0,0));0<=b.tangent&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglTangentBuffer),W.enableAttribute(b.tangent),m.vertexAttribPointer(b.tangent,4,m.FLOAT,!1,0,0));0<=b.uv&&(f.geometry.faceVertexUvs[0]?(m.bindBuffer(m.ARRAY_BUFFER,e.__webglUVBuffer),W.enableAttribute(b.uv),m.vertexAttribPointer(b.uv,2,m.FL [...]
+0)):void 0!==d.defaultAttributeValues&&m.vertexAttrib2fv(b.uv,d.defaultAttributeValues.uv));0<=b.uv2&&(f.geometry.faceVertexUvs[1]?(m.bindBuffer(m.ARRAY_BUFFER,e.__webglUV2Buffer),W.enableAttribute(b.uv2),m.vertexAttribPointer(b.uv2,2,m.FLOAT,!1,0,0)):void 0!==d.defaultAttributeValues&&m.vertexAttrib2fv(b.uv2,d.defaultAttributeValues.uv2));d.skinning&&0<=b.skinIndex&&0<=b.skinWeight&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglSkinIndicesBuffer),W.enableAttribute(b.skinIndex),m.vertexAttribPoi [...]
+4,m.FLOAT,!1,0,0),m.bindBuffer(m.ARRAY_BUFFER,e.__webglSkinWeightsBuffer),W.enableAttribute(b.skinWeight),m.vertexAttribPointer(b.skinWeight,4,m.FLOAT,!1,0,0));0<=b.lineDistance&&(m.bindBuffer(m.ARRAY_BUFFER,e.__webglLineDistanceBuffer),W.enableAttribute(b.lineDistance),m.vertexAttribPointer(b.lineDistance,1,m.FLOAT,!1,0,0))}W.disableUnusedAttributes();f instanceof THREE.Mesh?(f=e.__typeArray===Uint32Array?m.UNSIGNED_INT:m.UNSIGNED_SHORT,d.wireframe?(W.setLineWidth(d.wireframeLinewidth*H [...]
+e.__webglLineBuffer),m.drawElements(m.LINES,e.__webglLineCount,f,0)):(a&&m.bindBuffer(m.ELEMENT_ARRAY_BUFFER,e.__webglFaceBuffer),m.drawElements(m.TRIANGLES,e.__webglFaceCount,f,0)),B.info.render.calls++,B.info.render.vertices+=e.__webglFaceCount,B.info.render.faces+=e.__webglFaceCount/3):f instanceof THREE.Line?(f=f.mode===THREE.LineStrip?m.LINE_STRIP:m.LINES,W.setLineWidth(d.linewidth*H),m.drawArrays(f,0,e.__webglLineCount),B.info.render.calls++):f instanceof THREE.PointCloud&&(m.drawA [...]
+0,e.__webglParticleCount),B.info.render.calls++,B.info.render.points+=e.__webglParticleCount)}};this.render=function(a,b,c,d){if(!1===b instanceof THREE.Camera)THREE.error("THREE.WebGLRenderer.render: camera is not an instance of THREE.Camera.");else{var g=a.fog;ta="";ub=-1;vb=null;Ob=!0;!0===a.autoUpdate&&a.updateMatrixWorld();void 0===b.parent&&b.updateMatrixWorld();a.traverse(function(a){a instanceof THREE.SkinnedMesh&&a.skeleton.update()});b.matrixWorldInverse.getInverse(b.matrixWorl [...]
+b.matrixWorldInverse);cb.setFromMatrix(db);ca.length=0;Ka.length=0;Qa.length=0;Xa.length=0;Ya.length=0;h(a);!0===B.sortObjects&&(Ka.sort(e),Qa.sort(f));zc.render(a,b);B.info.render.calls=0;B.info.render.vertices=0;B.info.render.faces=0;B.info.render.points=0;this.setRenderTarget(c);(this.autoClear||d)&&this.clear(this.autoClearColor,this.autoClearDepth,this.autoClearStencil);d=0;for(var n=qa.length;d<n;d++){var m=qa[d],q=m.object;q.visible&&(w(q,b),p(m))}a.overrideMaterial?(d=a.overrideM [...]
+k(Ka,b,ca,g,d),k(Qa,b,ca,g,d),l(qa,"",b,ca,g,d)):(W.setBlending(THREE.NoBlending),k(Ka,b,ca,g,null),l(qa,"opaque",b,ca,g,null),k(Qa,b,ca,g,null),l(qa,"transparent",b,ca,g,null));Ac.render(a,b);Bc.render(a,b,Xb,fc);c&&c.generateMipmaps&&c.minFilter!==THREE.NearestFilter&&c.minFilter!==THREE.LinearFilter&&F(c);W.setDepthTest(!0);W.setDepthWrite(!0);W.setColorWrite(!0)}};this.renderImmediateObject=function(a,b,c,d,e){var f=v(a,b,c,d,e);ta="";B.setMaterialFaces(d);e.immediateRenderCallback?e [...]
+m,cb):e.render(function(a){B.renderBufferImmediate(a,f,d)})};var Ua={},Qb=0,pc={MeshDepthMaterial:"depth",MeshNormalMaterial:"normal",MeshBasicMaterial:"basic",MeshLambertMaterial:"lambert",MeshPhongMaterial:"phong",LineBasicMaterial:"basic",LineDashedMaterial:"dashed",PointCloudMaterial:"particle_basic"};this.setFaceCulling=function(a,b){a===THREE.CullFaceNone?m.disable(m.CULL_FACE):(b===THREE.FrontFaceDirectionCW?m.frontFace(m.CW):m.frontFace(m.CCW),a===THREE.CullFaceBack?m.cullFace(m. [...]
+THREE.CullFaceFront?m.cullFace(m.FRONT):m.cullFace(m.FRONT_AND_BACK),m.enable(m.CULL_FACE))};this.setMaterialFaces=function(a){W.setDoubleSided(a.side===THREE.DoubleSide);W.setFlipSided(a.side===THREE.BackSide)};this.uploadTexture=function(a){void 0===a.__webglInit&&(a.__webglInit=!0,a.addEventListener("dispose",Pb),a.__webglTexture=m.createTexture(),B.info.memory.textures++);m.bindTexture(m.TEXTURE_2D,a.__webglTexture);m.pixelStorei(m.UNPACK_FLIP_Y_WEBGL,a.flipY);m.pixelStorei(m.UNPACK_ [...]
+a.premultiplyAlpha);m.pixelStorei(m.UNPACK_ALIGNMENT,a.unpackAlignment);a.image=E(a.image,tc);var b=a.image,c=THREE.Math.isPowerOfTwo(b.width)&&THREE.Math.isPowerOfTwo(b.height),d=I(a.format),e=I(a.type);A(m.TEXTURE_2D,a,c);var f=a.mipmaps;if(a instanceof THREE.DataTexture)if(0<f.length&&c){for(var g=0,h=f.length;g<h;g++)b=f[g],m.texImage2D(m.TEXTURE_2D,g,d,b.width,b.height,0,d,e,b.data);a.generateMipmaps=!1}else m.texImage2D(m.TEXTURE_2D,0,d,b.width,b.height,0,d,e,b.data);else if(a inst [...]
+0,h=f.length;g<h;g++)b=f[g],a.format!==THREE.RGBAFormat&&a.format!==THREE.RGBFormat?-1<kc().indexOf(d)?m.compressedTexImage2D(m.TEXTURE_2D,g,d,b.width,b.height,0,b.data):THREE.warn("THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()"):m.texImage2D(m.TEXTURE_2D,g,d,b.width,b.height,0,d,e,b.data);else if(0<f.length&&c){g=0;for(h=f.length;g<h;g++)b=f[g],m.texImage2D(m.TEXTURE_2D,g,d,d,e,b);a.generateMipmaps=!1}else m.texImage2D(m.TEXTURE_2D,0,d,d, [...]
+a.generateMipmaps&&c&&m.generateMipmap(m.TEXTURE_2D);a.needsUpdate=!1;if(a.onUpdate)a.onUpdate()};this.setTexture=function(a,b){m.activeTexture(m.TEXTURE0+b);a.needsUpdate?B.uploadTexture(a):m.bindTexture(m.TEXTURE_2D,a.__webglTexture)};this.setRenderTarget=function(a){var b=a instanceof THREE.WebGLRenderTargetCube;if(a&&void 0===a.__webglFramebuffer){void 0===a.depthBuffer&&(a.depthBuffer=!0);void 0===a.stencilBuffer&&(a.stencilBuffer=!0);a.addEventListener("dispose",oc);a.__webglTextur [...]
+B.info.memory.textures++;var c=THREE.Math.isPowerOfTwo(a.width)&&THREE.Math.isPowerOfTwo(a.height),d=I(a.format),e=I(a.type);if(b){a.__webglFramebuffer=[];a.__webglRenderbuffer=[];m.bindTexture(m.TEXTURE_CUBE_MAP,a.__webglTexture);A(m.TEXTURE_CUBE_MAP,a,c);for(var f=0;6>f;f++){a.__webglFramebuffer[f]=m.createFramebuffer();a.__webglRenderbuffer[f]=m.createRenderbuffer();m.texImage2D(m.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=m.TEXTURE_CUBE_MAP_POSITIVE_X+f; [...]
+a.__webglFramebuffer[f]);m.framebufferTexture2D(m.FRAMEBUFFER,m.COLOR_ATTACHMENT0,h,g.__webglTexture,0);G(a.__webglRenderbuffer[f],a)}c&&m.generateMipmap(m.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer=m.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:m.createRenderbuffer(),m.bindTexture(m.TEXTURE_2D,a.__webglTexture),A(m.TEXTURE_2D,a,c),m.texImage2D(m.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=m.TEXTURE_2D,m.bindFramebuffer(m.FRAMEBUFFER, [...]
+m.framebufferTexture2D(m.FRAMEBUFFER,m.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?m.framebufferRenderbuffer(m.FRAMEBUFFER,m.DEPTH_ATTACHMENT,m.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&m.framebufferRenderbuffer(m.FRAMEBUFFER,m.DEPTH_STENCIL_ATTACHMENT,m.RENDERBUFFER,a.__webglRenderbuffer):G(a.__webglRenderbuffer,a),c&&m.generateMipmap(m.TEXTURE_2D);b?m.bindTexture(m.TEXTURE_CUBE_MAP,null):m.bindTexture(m.TEXTURE_2D,n [...]
+null);m.bindFramebuffer(m.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=pb,a=qb,d=ib,e=bb);b!==ab&&(m.bindFramebuffer(m.FRAMEBUFFER,b),m.viewport(d,e,c,a),ab=b);Xb=c;fc=a};this.readRenderTargetPixels=function(a,b,c,d,e,f){if(!(a instanceof THREE.WebGLRenderTarget))console.error("THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.");else if(a.__webglFramebuffer)if(a.format!==T [...]
+else{var g=!1;a.__webglFramebuffer!==ab&&(m.bindFramebuffer(m.FRAMEBUFFER,a.__webglFramebuffer),g=!0);m.checkFramebufferStatus(m.FRAMEBUFFER)===m.FRAMEBUFFER_COMPLETE?m.readPixels(b,c,d,e,m.RGBA,m.UNSIGNED_BYTE,f):console.error("THREE.WebGLRenderer.readRenderTargetPixels: readPixels from renderTarget failed. Framebuffer not complete.");g&&m.bindFramebuffer(m.FRAMEBUFFER,ab)}};this.initMaterial=function(){THREE.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlu [...]
+this.addPostPlugin=function(){THREE.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){THREE.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}};
+THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.f [...]
+THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=void 0!==c.shareDepthFrom?c.shareDepthFrom:null};
+THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps [...]
+a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube.prototype.constructor=THREE.WebGLRenderTargetCube;
+THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic");break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compres [...]
+break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;default:d=a.getExtension(c)}null===d&&THREE.warn("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}};
+THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,l=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var n="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?n="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(n="SHADOWMAP_TYPE_PCF_SOFT");var t="ENVMAP_TYPE_CUBE",r="ENVMAP_MODE_REFLECTION",s="ENVMAP_ [...]
+if(e.envMap){switch(d.envMap.mapping){case THREE.CubeReflectionMapping:case THREE.CubeRefractionMapping:t="ENVMAP_TYPE_CUBE";break;case THREE.EquirectangularReflectionMapping:case THREE.EquirectangularRefractionMapping:t="ENVMAP_TYPE_EQUIREC";break;case THREE.SphericalReflectionMapping:t="ENVMAP_TYPE_SPHERE"}switch(d.envMap.mapping){case THREE.CubeRefractionMapping:case THREE.EquirectangularRefractionMapping:r="ENVMAP_MODE_REFRACTION"}switch(d.combine){case THREE.MultiplyOperation:s="ENV [...]
+break;case THREE.MixOperation:s="ENVMAP_BLENDING_MIX";break;case THREE.AddOperation:s="ENVMAP_BLENDING_ADD"}}var u=0<b.gammaFactor?b.gammaFactor:1,v,x;v=[];for(var D in g)x=g[D],!1!==x&&(x="#define "+D+" "+x,v.push(x));v=v.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",v,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA [...]
+"","#define GAMMA_FACTOR "+u,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.envMap?"#define "+r:"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_ [...]
+"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.flatShading?"#define FLAT_SHADED":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+n:"",e.shadow [...]
+"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\ [...]
+b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap||e.flatShading?"#extension GL_OES_standard_derivatives : enable":"",v,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPU [...]
+u,e.useFog&&e.fog?"#define USE_FOG":"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.envMap?"#define "+t:"",e.envMap?"#define "+r:"",e.envMap?"#define "+s:"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.flatShading?"#define FLAT_SHADED":"",e.metal?" [...]
+"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+n:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));l=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+l);p=new THREE.WebGLShader(f [...]
+b+p);f.attachShader(g,l);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);q=f.getProgramInfoLog(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&THREE.error("THREE.WebGLProgram: shader error: "+f.getError(),"gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS),"gl.getPRogramInfoLog",q);""!==q&&THREE.warn("THREE.WebGLProgram: gl.getProgramInfoLog()"+q);f.deleteShader(l);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix model [...]
+e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")):q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var w in h)q.push(w);h=q;w={};q=0;for(b=h.length;q<b;q++)n=h[q],w[n]=f.getUniformLocation(g,n);this.uniforms=w;q="position normal uv uv2 tangent color skinIndex skinWeight lineDistance".split(" ");for(h=0;h<e.maxMorphTargets;h++)q.push("morphTarget"+h);for(h=0;h<e.maxMorphNormals;h++)q.push("morphNormal"+h);fo [...]
+e=q;k={};y=0;for(h=e.length;y<h;y++)w=e[y],k[w]=f.getAttribLocation(g,w);this.attributes=k;this.attributesKeys=Object.keys(this.attributes);this.id=a++;this.code=c;this.usedTimes=1;this.program=g;this.vertexShader=l;this.fragmentShader=p;return this}}();
+THREE.WebGLShader=function(){var a=function(a){a=a.split("\n");for(var c=0;c<a.length;c++)a[c]=c+1+": "+a[c];return a.join("\n")};return function(b,c,d){c=b.createShader(c);b.shaderSource(c,d);b.compileShader(c);!1===b.getShaderParameter(c,b.COMPILE_STATUS)&&THREE.error("THREE.WebGLShader: Shader couldn't compile.");""!==b.getShaderInfoLog(c)&&THREE.warn("THREE.WebGLShader: gl.getShaderInfoLog()",b.getShaderInfoLog(c),a(d));return c}}();
+THREE.WebGLState=function(a,b){var c=new Uint8Array(16),d=new Uint8Array(16),e=null,f=null,g=null,h=null,k=null,l=null,p=null,q=null,n=null,t=null,r=null,s=null,u=null,v=null,x=null,D=null;this.initAttributes=function(){for(var a=0,b=c.length;a<b;a++)c[a]=0};this.enableAttribute=function(b){c[b]=1;0===d[b]&&(a.enableVertexAttribArray(b),d[b]=1)};this.disableUnusedAttributes=function(){for(var b=0,e=d.length;b<e;b++)d[b]!==c[b]&&(a.disableVertexAttribArray(b),d[b]=0)};this.setBlending=fun [...]
+n,q,r,s,t){c!==e&&(c===THREE.NoBlending?a.disable(a.BLEND):c===THREE.AdditiveBlending?(a.enable(a.BLEND),a.blendEquation(a.FUNC_ADD),a.blendFunc(a.SRC_ALPHA,a.ONE)):c===THREE.SubtractiveBlending?(a.enable(a.BLEND),a.blendEquation(a.FUNC_ADD),a.blendFunc(a.ZERO,a.ONE_MINUS_SRC_COLOR)):c===THREE.MultiplyBlending?(a.enable(a.BLEND),a.blendEquation(a.FUNC_ADD),a.blendFunc(a.ZERO,a.SRC_COLOR)):c===THREE.CustomBlending?a.enable(a.BLEND):(a.enable(a.BLEND),a.blendEquationSeparate(a.FUNC_ADD,a.F [...]
+a.blendFuncSeparate(a.SRC_ALPHA,a.ONE_MINUS_SRC_ALPHA,a.ONE,a.ONE_MINUS_SRC_ALPHA)),e=c);if(c===THREE.CustomBlending){r=r||d;s=s||n;t=t||q;if(d!==f||r!==k)a.blendEquationSeparate(b(d),b(r)),f=d,k=r;if(n!==g||q!==h||s!==l||t!==p)a.blendFuncSeparate(b(n),b(q),b(s),b(t)),g=n,h=q,l=s,p=t}else p=l=k=h=g=f=null};this.setDepthTest=function(b){q!==b&&(b?a.enable(a.DEPTH_TEST):a.disable(a.DEPTH_TEST),q=b)};this.setDepthWrite=function(b){n!==b&&(a.depthMask(b),n=b)};this.setColorWrite=function(b){ [...]
+b,b,b),t=b)};this.setDoubleSided=function(b){r!==b&&(b?a.disable(a.CULL_FACE):a.enable(a.CULL_FACE),r=b)};this.setFlipSided=function(b){s!==b&&(b?a.frontFace(a.CW):a.frontFace(a.CCW),s=b)};this.setLineWidth=function(b){b!==u&&(a.lineWidth(b),u=b)};this.setPolygonOffset=function(b,c,d){v!==b&&(b?a.enable(a.POLYGON_OFFSET_FILL):a.disable(a.POLYGON_OFFSET_FILL),v=b);!b||x===c&&D===d||(a.polygonOffset(c,d),x=c,D=d)};this.reset=function(){for(var a=0;a<d.length;a++)d[a]=0;s=r=t=n=q=e=null}};
+THREE.LensFlarePlugin=function(a,b){var c,d,e,f,g,h,k,l,p,q,n=a.context,t,r,s,u,v,x;this.render=function(D,w,y,A){if(0!==b.length){D=new THREE.Vector3;var E=A/y,G=.5*y,F=.5*A,z=16/A,I=new THREE.Vector2(z*E,z),U=new THREE.Vector3(1,1,0),M=new THREE.Vector2(1,1);if(void 0===s){var z=new Float32Array([-1,-1,0,0,1,-1,1,0,1,1,1,1,-1,1,0,1]),H=new Uint16Array([0,1,2,0,2,3]);t=n.createBuffer();r=n.createBuffer();n.bindBuffer(n.ARRAY_BUFFER,t);n.bufferData(n.ARRAY_BUFFER,z,n.STATIC_DRAW);n.bindB [...]
+r);n.bufferData(n.ELEMENT_ARRAY_BUFFER,H,n.STATIC_DRAW);v=n.createTexture();x=n.createTexture();n.bindTexture(n.TEXTURE_2D,v);n.texImage2D(n.TEXTURE_2D,0,n.RGB,16,16,0,n.RGB,n.UNSIGNED_BYTE,null);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_WRAP_S,n.CLAMP_TO_EDGE);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_WRAP_T,n.CLAMP_TO_EDGE);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_MAG_FILTER,n.NEAREST);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_MIN_FILTER,n.NEAREST);n.bindTexture(n.TEXTURE_2D,x);n.texImage2D(n.TEX [...]
+n.RGBA,16,16,0,n.RGBA,n.UNSIGNED_BYTE,null);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_WRAP_S,n.CLAMP_TO_EDGE);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_WRAP_T,n.CLAMP_TO_EDGE);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_MAG_FILTER,n.NEAREST);n.texParameteri(n.TEXTURE_2D,n.TEXTURE_MIN_FILTER,n.NEAREST);var z=(u=0<n.getParameter(n.MAX_VERTEX_TEXTURE_IMAGE_UNITS))?{vertexShader:"uniform lowp int renderType;\nuniform vec3 screenPosition;\nuniform vec2 scale;\nuniform float rotation;\nuniform sampler2D  [...]
+fragmentShader:"uniform lowp int renderType;\nuniform sampler2D map;\nuniform float opacity;\nuniform vec3 color;\nvarying vec2 vUV;\nvarying float vVisibility;\nvoid main() {\nif( renderType == 0 ) {\ngl_FragColor = vec4( 1.0, 0.0, 1.0, 0.0 );\n} else if( renderType == 1 ) {\ngl_FragColor = texture2D( map, vUV );\n} else {\nvec4 texture = texture2D( map, vUV );\ntexture.a *= opacity * vVisibility;\ngl_FragColor = texture;\ngl_FragColor.rgb *= color;\n}\n}"}:{vertexShader:"uniform lowp i [...]
+fragmentShader:"precision mediump float;\nuniform lowp int renderType;\nuniform sampler2D map;\nuniform sampler2D occlusionMap;\nuniform float opacity;\nuniform vec3 color;\nvarying vec2 vUV;\nvoid main() {\nif( renderType == 0 ) {\ngl_FragColor = vec4( texture2D( map, vUV ).rgb, 0.0 );\n} else if( renderType == 1 ) {\ngl_FragColor = texture2D( map, vUV );\n} else {\nfloat visibility = texture2D( occlusionMap, vec2( 0.5, 0.1 ) ).a;\nvisibility += texture2D( occlusionMap, vec2( 0.9, 0.5 ) [...]
+H=n.createProgram(),L=n.createShader(n.FRAGMENT_SHADER),P=n.createShader(n.VERTEX_SHADER),N="precision "+a.getPrecision()+" float;\n";n.shaderSource(L,N+z.fragmentShader);n.shaderSource(P,N+z.vertexShader);n.compileShader(L);n.compileShader(P);n.attachShader(H,L);n.attachShader(H,P);n.linkProgram(H);s=H;p=n.getAttribLocation(s,"position");q=n.getAttribLocation(s,"uv");c=n.getUniformLocation(s,"renderType");d=n.getUniformLocation(s,"map");e=n.getUniformLocation(s,"occlusionMap");f=n.getUn [...]
+"opacity");g=n.getUniformLocation(s,"color");h=n.getUniformLocation(s,"scale");k=n.getUniformLocation(s,"rotation");l=n.getUniformLocation(s,"screenPosition")}n.useProgram(s);n.enableVertexAttribArray(p);n.enableVertexAttribArray(q);n.uniform1i(e,0);n.uniform1i(d,1);n.bindBuffer(n.ARRAY_BUFFER,t);n.vertexAttribPointer(p,2,n.FLOAT,!1,16,0);n.vertexAttribPointer(q,2,n.FLOAT,!1,16,8);n.bindBuffer(n.ELEMENT_ARRAY_BUFFER,r);n.disable(n.CULL_FACE);n.depthMask(!1);H=0;for(L=b.length;H<L;H++)if( [...]
+E,z),P=b[H],D.set(P.matrixWorld.elements[12],P.matrixWorld.elements[13],P.matrixWorld.elements[14]),D.applyMatrix4(w.matrixWorldInverse),D.applyProjection(w.projectionMatrix),U.copy(D),M.x=U.x*G+G,M.y=U.y*F+F,u||0<M.x&&M.x<y&&0<M.y&&M.y<A){n.activeTexture(n.TEXTURE1);n.bindTexture(n.TEXTURE_2D,v);n.copyTexImage2D(n.TEXTURE_2D,0,n.RGB,M.x-8,M.y-8,16,16,0);n.uniform1i(c,0);n.uniform2f(h,I.x,I.y);n.uniform3f(l,U.x,U.y,U.z);n.disable(n.BLEND);n.enable(n.DEPTH_TEST);n.drawElements(n.TRIANGLES [...]
+0);n.activeTexture(n.TEXTURE0);n.bindTexture(n.TEXTURE_2D,x);n.copyTexImage2D(n.TEXTURE_2D,0,n.RGBA,M.x-8,M.y-8,16,16,0);n.uniform1i(c,1);n.disable(n.DEPTH_TEST);n.activeTexture(n.TEXTURE1);n.bindTexture(n.TEXTURE_2D,v);n.drawElements(n.TRIANGLES,6,n.UNSIGNED_SHORT,0);P.positionScreen.copy(U);P.customUpdateCallback?P.customUpdateCallback(P):P.updateLensFlares();n.uniform1i(c,2);n.enable(n.BLEND);for(var N=0,R=P.lensFlares.length;N<R;N++){var V=P.lensFlares[N];.001<V.opacity&&.001<V.scale [...]
+U.y=V.y,U.z=V.z,z=V.size*V.scale/A,I.x=z*E,I.y=z,n.uniform3f(l,U.x,U.y,U.z),n.uniform2f(h,I.x,I.y),n.uniform1f(k,V.rotation),n.uniform1f(f,V.opacity),n.uniform3f(g,V.color.r,V.color.g,V.color.b),a.state.setBlending(V.blending,V.blendEquation,V.blendSrc,V.blendDst),a.setTexture(V.texture,1),n.drawElements(n.TRIANGLES,6,n.UNSIGNED_SHORT,0))}}n.enable(n.CULL_FACE);n.enable(n.DEPTH_TEST);n.depthMask(!0);a.resetGLState()}}};
+THREE.ShadowMapPlugin=function(a,b,c,d){function e(a,b,d){if(b.visible){var f=c[b.id];if(f&&b.castShadow&&(!1===b.frustumCulled||!0===p.intersectsObject(b)))for(var g=0,h=f.length;g<h;g++){var k=f[g];b._modelViewMatrix.multiplyMatrices(d.matrixWorldInverse,b.matrixWorld);s.push(k)}g=0;for(h=b.children.length;g<h;g++)e(a,b.children[g],d)}}var f=a.context,g,h,k,l,p=new THREE.Frustum,q=new THREE.Matrix4,n=new THREE.Vector3,t=new THREE.Vector3,r=new THREE.Vector3,s=[],u=THREE.ShaderLib.depth [...]
+g=new THREE.ShaderMaterial({uniforms:v,vertexShader:u.vertexShader,fragmentShader:u.fragmentShader});h=new THREE.ShaderMaterial({uniforms:v,vertexShader:u.vertexShader,fragmentShader:u.fragmentShader,morphTargets:!0});k=new THREE.ShaderMaterial({uniforms:v,vertexShader:u.vertexShader,fragmentShader:u.fragmentShader,skinning:!0});l=new THREE.ShaderMaterial({uniforms:v,vertexShader:u.vertexShader,fragmentShader:u.fragmentShader,morphTargets:!0,skinning:!0});g._shadowPass=!0;h._shadowPass=! [...]
+!0;l._shadowPass=!0;this.render=function(c,v){if(!1!==a.shadowMapEnabled){var u,y,A,E,G,F,z,I,U=[];E=0;f.clearColor(1,1,1,1);f.disable(f.BLEND);f.enable(f.CULL_FACE);f.frontFace(f.CCW);a.shadowMapCullFace===THREE.CullFaceFront?f.cullFace(f.FRONT):f.cullFace(f.BACK);a.state.setDepthTest(!0);u=0;for(y=b.length;u<y;u++)if(A=b[u],A.castShadow)if(A instanceof THREE.DirectionalLight&&A.shadowCascade)for(G=0;G<A.shadowCascadeCount;G++){var M;if(A.shadowCascadeArray[G])M=A.shadowCascadeArray[G]; [...]
+var H=G;M=new THREE.DirectionalLight;M.isVirtual=!0;M.onlyShadow=!0;M.castShadow=!0;M.shadowCameraNear=z.shadowCameraNear;M.shadowCameraFar=z.shadowCameraFar;M.shadowCameraLeft=z.shadowCameraLeft;M.shadowCameraRight=z.shadowCameraRight;M.shadowCameraBottom=z.shadowCameraBottom;M.shadowCameraTop=z.shadowCameraTop;M.shadowCameraVisible=z.shadowCameraVisible;M.shadowDarkness=z.shadowDarkness;M.shadowBias=z.shadowCascadeBias[H];M.shadowMapWidth=z.shadowCascadeWidth[H];M.shadowMapHeight=z.sha [...]
+M.pointsWorld=[];M.pointsFrustum=[];I=M.pointsWorld;F=M.pointsFrustum;for(var L=0;8>L;L++)I[L]=new THREE.Vector3,F[L]=new THREE.Vector3;I=z.shadowCascadeNearZ[H];z=z.shadowCascadeFarZ[H];F[0].set(-1,-1,I);F[1].set(1,-1,I);F[2].set(-1,1,I);F[3].set(1,1,I);F[4].set(-1,-1,z);F[5].set(1,-1,z);F[6].set(-1,1,z);F[7].set(1,1,z);M.originalCamera=v;F=new THREE.Gyroscope;F.position.copy(A.shadowCascadeOffset);F.add(M);F.add(M.target);v.add(F);A.shadowCascadeArray[G]=M}H=A;I=G;z=H.shadowCascadeArra [...]
+z.target.position.copy(H.target.position);z.lookAt(z.target);z.shadowCameraVisible=H.shadowCameraVisible;z.shadowDarkness=H.shadowDarkness;z.shadowBias=H.shadowCascadeBias[I];F=H.shadowCascadeNearZ[I];H=H.shadowCascadeFarZ[I];z=z.pointsFrustum;z[0].z=F;z[1].z=F;z[2].z=F;z[3].z=F;z[4].z=H;z[5].z=H;z[6].z=H;z[7].z=H;U[E]=M;E++}else U[E]=A,E++;u=0;for(y=U.length;u<y;u++){A=U[u];A.shadowMap||(G=THREE.LinearFilter,a.shadowMapType===THREE.PCFSoftShadowMap&&(G=THREE.NearestFilter),A.shadowMap=n [...]
+A.shadowMapHeight,{minFilter:G,magFilter:G,format:THREE.RGBAFormat}),A.shadowMapSize=new THREE.Vector2(A.shadowMapWidth,A.shadowMapHeight),A.shadowMatrix=new THREE.Matrix4);if(!A.shadowCamera){if(A instanceof THREE.SpotLight)A.shadowCamera=new THREE.PerspectiveCamera(A.shadowCameraFov,A.shadowMapWidth/A.shadowMapHeight,A.shadowCameraNear,A.shadowCameraFar);else if(A instanceof THREE.DirectionalLight)A.shadowCamera=new THREE.OrthographicCamera(A.shadowCameraLeft,A.shadowCameraRight,A.shad [...]
+A.shadowCameraBottom,A.shadowCameraNear,A.shadowCameraFar);else{THREE.error("THREE.ShadowMapPlugin: Unsupported light type for shadow",A);continue}c.add(A.shadowCamera);!0===c.autoUpdate&&c.updateMatrixWorld()}A.shadowCameraVisible&&!A.cameraHelper&&(A.cameraHelper=new THREE.CameraHelper(A.shadowCamera),c.add(A.cameraHelper));if(A.isVirtual&&M.originalCamera==v){G=v;E=A.shadowCamera;F=A.pointsFrustum;z=A.pointsWorld;n.set(Infinity,Infinity,Infinity);t.set(-Infinity,-Infinity,-Infinity);f [...]
+H;H++)I=z[H],I.copy(F[H]),I.unproject(G),I.applyMatrix4(E.matrixWorldInverse),I.x<n.x&&(n.x=I.x),I.x>t.x&&(t.x=I.x),I.y<n.y&&(n.y=I.y),I.y>t.y&&(t.y=I.y),I.z<n.z&&(n.z=I.z),I.z>t.z&&(t.z=I.z);E.left=n.x;E.right=t.x;E.top=t.y;E.bottom=n.y;E.updateProjectionMatrix()}E=A.shadowMap;F=A.shadowMatrix;G=A.shadowCamera;G.position.setFromMatrixPosition(A.matrixWorld);r.setFromMatrixPosition(A.target.matrixWorld);G.lookAt(r);G.updateMatrixWorld();G.matrixWorldInverse.getInverse(G.matrixWorld);A.ca [...]
+(A.cameraHelper.visible=A.shadowCameraVisible);A.shadowCameraVisible&&A.cameraHelper.update();F.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);F.multiply(G.projectionMatrix);F.multiply(G.matrixWorldInverse);q.multiplyMatrices(G.projectionMatrix,G.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(E);a.clear();s.length=0;e(c,c,G);A=0;for(E=s.length;A<E;A++)z=s[A],F=z.object,z=z.buffer,H=F.material instanceof THREE.MeshFaceMaterial?F.material.materials[0]:F.material,I=void 0!==F.geometry [...]
+0<F.geometry.morphTargets.length&&H.morphTargets,L=F instanceof THREE.SkinnedMesh&&H.skinning,I=F.customDepthMaterial?F.customDepthMaterial:L?I?l:k:I?h:g,a.setMaterialFaces(H),z instanceof THREE.BufferGeometry?a.renderBufferDirect(G,b,null,I,z,F):a.renderBuffer(G,b,null,I,z,F);A=0;for(E=d.length;A<E;A++)z=d[A],F=z.object,F.visible&&F.castShadow&&(F._modelViewMatrix.multiplyMatrices(G.matrixWorldInverse,F.matrixWorld),a.renderImmediateObject(G,b,null,g,F))}u=a.getClearColor();y=a.getClear [...]
+u.g,u.b,y);f.enable(f.BLEND);a.shadowMapCullFace===THREE.CullFaceFront&&f.cullFace(f.BACK);a.resetGLState()}}};
+THREE.SpritePlugin=function(a,b){var c,d,e,f,g,h,k,l,p,q,n,t,r,s,u,v,x;function D(a,b){return a.z!==b.z?b.z-a.z:b.id-a.id}var w=a.context,y,A,E,G,F=new THREE.Vector3,z=new THREE.Quaternion,I=new THREE.Vector3;this.render=function(U,M){if(0!==b.length){if(void 0===E){var H=new Float32Array([-.5,-.5,0,0,.5,-.5,1,0,.5,.5,1,1,-.5,.5,0,1]),L=new Uint16Array([0,1,2,0,2,3]);y=w.createBuffer();A=w.createBuffer();w.bindBuffer(w.ARRAY_BUFFER,y);w.bufferData(w.ARRAY_BUFFER,H,w.STATIC_DRAW);w.bindBu [...]
+A);w.bufferData(w.ELEMENT_ARRAY_BUFFER,L,w.STATIC_DRAW);var H=w.createProgram(),L=w.createShader(w.VERTEX_SHADER),P=w.createShader(w.FRAGMENT_SHADER);w.shaderSource(L,["precision "+a.getPrecision()+" float;","uniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform float rotation;\nuniform vec2 scale;\nuniform vec2 uvOffset;\nuniform vec2 uvScale;\nattribute vec2 position;\nattribute vec2 uv;\nvarying vec2 vUV;\nvoid main() {\nvUV = uvOffset + uv * uvScale;\nvec2 alignedPos [...]
+w.shaderSource(P,["precision "+a.getPrecision()+" float;","uniform vec3 color;\nuniform sampler2D map;\nuniform float opacity;\nuniform int fogType;\nuniform vec3 fogColor;\nuniform float fogDensity;\nuniform float fogNear;\nuniform float fogFar;\nuniform float alphaTest;\nvarying vec2 vUV;\nvoid main() {\nvec4 texture = texture2D( map, vUV );\nif ( texture.a < alphaTest ) discard;\ngl_FragColor = vec4( color * texture.xyz, texture.a * opacity );\nif ( fogType > 0 ) {\nfloat depth = gl_F [...]
+w.compileShader(L);w.compileShader(P);w.attachShader(H,L);w.attachShader(H,P);w.linkProgram(H);E=H;v=w.getAttribLocation(E,"position");x=w.getAttribLocation(E,"uv");c=w.getUniformLocation(E,"uvOffset");d=w.getUniformLocation(E,"uvScale");e=w.getUniformLocation(E,"rotation");f=w.getUniformLocation(E,"scale");g=w.getUniformLocation(E,"color");h=w.getUniformLocation(E,"map");k=w.getUniformLocation(E,"opacity");l=w.getUniformLocation(E,"modelViewMatrix");p=w.getUniformLocation(E,"projectionM [...]
+w.getUniformLocation(E,"fogType");n=w.getUniformLocation(E,"fogDensity");t=w.getUniformLocation(E,"fogNear");r=w.getUniformLocation(E,"fogFar");s=w.getUniformLocation(E,"fogColor");u=w.getUniformLocation(E,"alphaTest");H=document.createElement("canvas");H.width=8;H.height=8;L=H.getContext("2d");L.fillStyle="white";L.fillRect(0,0,8,8);G=new THREE.Texture(H);G.needsUpdate=!0}w.useProgram(E);w.enableVertexAttribArray(v);w.enableVertexAttribArray(x);w.disable(w.CULL_FACE);w.enable(w.BLEND);w [...]
+y);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(x,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,A);w.uniformMatrix4fv(p,!1,M.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);L=H=0;(P=U.fog)?(w.uniform3f(s,P.color.r,P.color.g,P.color.b),P instanceof THREE.Fog?(w.uniform1f(t,P.near),w.uniform1f(r,P.far),w.uniform1i(q,1),L=H=1):P instanceof THREE.FogExp2&&(w.uniform1f(n,P.density),w.uniform1i(q,2),L=H=2)):(w.uniform1i(q,0),L=H=0);for(var P [...]
+N;P++){var R=b[P];R._modelViewMatrix.multiplyMatrices(M.matrixWorldInverse,R.matrixWorld);R.z=-R._modelViewMatrix.elements[14]}b.sort(D);for(var V=[],P=0,N=b.length;P<N;P++){var R=b[P],J=R.material;w.uniform1f(u,J.alphaTest);w.uniformMatrix4fv(l,!1,R._modelViewMatrix.elements);R.matrixWorld.decompose(F,z,I);V[0]=I.x;V[1]=I.y;R=0;U.fog&&J.fog&&(R=L);H!==R&&(w.uniform1i(q,R),H=R);null!==J.map?(w.uniform2f(c,J.map.offset.x,J.map.offset.y),w.uniform2f(d,J.map.repeat.x,J.map.repeat.y)):(w.uni [...]
+0,0),w.uniform2f(d,1,1));w.uniform1f(k,J.opacity);w.uniform3f(g,J.color.r,J.color.g,J.color.b);w.uniform1f(e,J.rotation);w.uniform2fv(f,V);a.state.setBlending(J.blending,J.blendEquation,J.blendSrc,J.blendDst);a.state.setDepthTest(J.depthTest);a.state.setDepthWrite(J.depthWrite);J.map&&J.map.image&&J.map.image.width?a.setTexture(J.map,0):a.setTexture(G,0);w.drawElements(w.TRIANGLES,6,w.UNSIGNED_SHORT,0)}w.enable(w.CULL_FACE);a.resetGLState()}}};
+THREE.GeometryUtils={merge:function(a,b,c){THREE.warn("THREE.GeometryUtils: .merge() has been moved to Geometry. Use geometry.merge( geometry2, matrix, materialIndexOffset ) instead.");var d;b instanceof THREE.Mesh&&(b.matrixAutoUpdate&&b.updateMatrix(),d=b.matrix,b=b.geometry);a.merge(b,d,c)},center:function(a){THREE.warn("THREE.GeometryUtils: .center() has been moved to Geometry. Use geometry.center() instead.");return a.center()}};
+THREE.ImageUtils={crossOrigin:void 0,loadTexture:function(a,b,c,d){var e=new THREE.ImageLoader;e.crossOrigin=this.crossOrigin;var f=new THREE.Texture(void 0,b);e.load(a,function(a){f.image=a;f.needsUpdate=!0;c&&c(f)},void 0,function(a){d&&d(a)});f.sourceFile=a;return f},loadTextureCube:function(a,b,c,d){var e=new THREE.ImageLoader;e.crossOrigin=this.crossOrigin;var f=new THREE.CubeTexture([],b);f.flipY=!1;var g=0;b=function(b){e.load(a[b],function(a){f.images[b]=a;g+=1;6===g&&(f.needsUpd [...]
+c(f))},void 0,d)};for(var h=0,k=a.length;h<k;++h)b(h);return f},loadCompressedTexture:function(){THREE.error("THREE.ImageUtils.loadCompressedTexture has been removed. Use THREE.DDSLoader instead.")},loadCompressedTextureCube:function(){THREE.error("THREE.ImageUtils.loadCompressedTextureCube has been removed. Use THREE.DDSLoader instead.")},getNormalMap:function(a,b){var c=function(a){var b=Math.sqrt(a[0]*a[0]+a[1]*a[1]+a[2]*a[2]);return[a[0]/b,a[1]/b,a[2]/b]};b|=1;var d=a.width,e=a.heigh [...]
+f.width=d;f.height=e;var g=f.getContext("2d");g.drawImage(a,0,0);for(var h=g.getImageData(0,0,d,e).data,k=g.createImageData(d,e),l=k.data,p=0;p<d;p++)for(var q=0;q<e;q++){var n=0>q-1?0:q-1,t=q+1>e-1?e-1:q+1,r=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+r)]/255*b]);u.push([-1,-1,h[4*(n*d+r)]/255*b]);u.push([0,-1,h[4*(n*d+p)]/255*b]);u.push([1,-1,h[4*(n*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(t*d+s)]/255*b]);u.push([0,1,h[4*(t [...]
+b]);u.push([-1,1,h[4*(t*d+r)]/255*b]);n=[];r=u.length;for(t=0;t<r;t++){var s=u[t],x=u[(t+1)%r],s=[s[0]-v[0],s[1]-v[1],s[2]-v[2]],x=[x[0]-v[0],x[1]-v[1],x[2]-v[2]];n.push(c([s[1]*x[2]-s[2]*x[1],s[2]*x[0]-s[0]*x[2],s[0]*x[1]-s[1]*x[0]]))}u=[0,0,0];for(t=0;t<n.length;t++)u[0]+=n[t][0],u[1]+=n[t][1],u[2]+=n[t][2];u[0]/=n.length;u[1]/=n.length;u[2]/=n.length;v=4*(q*d+p);l[v]=(u[0]+1)/2*255|0;l[v+1]=(u[1]+1)/2*255|0;l[v+2]=255*u[2]|0;l[v+3]=255}g.putImageData(k,0,0);return f},generateDataTextu [...]
+b,c){var d=a*b,e=new Uint8Array(3*d),f=Math.floor(255*c.r),g=Math.floor(255*c.g);c=Math.floor(255*c.b);for(var h=0;h<d;h++)e[3*h]=f,e[3*h+1]=g,e[3*h+2]=c;a=new THREE.DataTexture(e,a,b,THREE.RGBFormat);a.needsUpdate=!0;return a}};
+THREE.SceneUtils={createMultiMaterialObject:function(a,b){for(var c=new THREE.Object3D,d=0,e=b.length;d<e;d++)c.add(new THREE.Mesh(a,b[d]));return c},detach:function(a,b,c){a.applyMatrix(b.matrixWorld);b.remove(a);c.add(a)},attach:function(a,b,c){var d=new THREE.Matrix4;d.getInverse(c.matrixWorld);a.applyMatrix(d);b.remove(a);c.add(a)}};
+THREE.FontUtils={faces:{},face:"helvetiker",weight:"normal",style:"normal",size:150,divisions:10,getFace:function(){try{return this.faces[this.face][this.weight][this.style]}catch(a){throw"The font "+this.face+" with "+this.weight+" weight and "+this.style+" style is missing.";}},loadFace:function(a){var b=a.familyName.toLowerCase();this.faces[b]=this.faces[b]||{};this.faces[b][a.cssFontWeight]=this.faces[b][a.cssFontWeight]||{};this.faces[b][a.cssFontWeight][a.cssFontStyle]=a;return thi [...]
+a},drawText:function(a){var b=this.getFace(),c=this.size/b.resolution,d=0,e=String(a).split(""),f=e.length,g=[];for(a=0;a<f;a++){var h=new THREE.Path,h=this.extractGlyphPoints(e[a],b,c,d,h),d=d+h.offset;g.push(h.path)}return{paths:g,offset:d/2}},extractGlyphPoints:function(a,b,c,d,e){var f=[],g,h,k,l,p,q,n,t,r,s,u,v=b.glyphs[a]||b.glyphs["?"];if(v){if(v.o)for(b=v._cachedOutline||(v._cachedOutline=v.o.split(" ")),l=b.length,a=0;a<l;)switch(k=b[a++],k){case "m":k=b[a++]*c+d;p=b[a++]*c;e.mo [...]
+break;case "l":k=b[a++]*c+d;p=b[a++]*c;e.lineTo(k,p);break;case "q":k=b[a++]*c+d;p=b[a++]*c;t=b[a++]*c+d;r=b[a++]*c;e.quadraticCurveTo(t,r,k,p);if(g=f[f.length-1])for(q=g.x,n=g.y,g=1,h=this.divisions;g<=h;g++){var x=g/h;THREE.Shape.Utils.b2(x,q,t,k);THREE.Shape.Utils.b2(x,n,r,p)}break;case "b":if(k=b[a++]*c+d,p=b[a++]*c,t=b[a++]*c+d,r=b[a++]*c,s=b[a++]*c+d,u=b[a++]*c,e.bezierCurveTo(t,r,s,u,k,p),g=f[f.length-1])for(q=g.x,n=g.y,g=1,h=this.divisions;g<=h;g++)x=g/h,THREE.Shape.Utils.b3(x,q, [...]
+n,r,u,p)}return{offset:v.ha*c,path:e}}}};
+THREE.FontUtils.generateShapes=function(a,b){b=b||{};var c=void 0!==b.curveSegments?b.curveSegments:4,d=void 0!==b.font?b.font:"helvetiker",e=void 0!==b.weight?b.weight:"normal",f=void 0!==b.style?b.style:"normal";THREE.FontUtils.size=void 0!==b.size?b.size:100;THREE.FontUtils.divisions=c;THREE.FontUtils.face=d;THREE.FontUtils.weight=e;THREE.FontUtils.style=f;c=THREE.FontUtils.drawText(a).paths;d=[];e=0;for(f=c.length;e<f;e++)Array.prototype.push.apply(d,c[e].toShapes());return d};
+(function(a){var b=function(a){for(var b=a.length,e=0,f=b-1,g=0;g<b;f=g++)e+=a[f].x*a[g].y-a[g].x*a[f].y;return.5*e};a.Triangulate=function(a,d){var e=a.length;if(3>e)return null;var f=[],g=[],h=[],k,l,p;if(0<b(a))for(l=0;l<e;l++)g[l]=l;else for(l=0;l<e;l++)g[l]=e-1-l;var q=2*e;for(l=e-1;2<e;){if(0>=q--){THREE.warn("THREE.FontUtils: Warning, unable to triangulate polygon! in Triangulate.process()");break}k=l;e<=k&&(k=0);l=k+1;e<=l&&(l=0);p=l+1;e<=p&&(p=0);var n;a:{var t=n=void 0,r=void 0 [...]
+u=void 0,v=void 0,x=void 0,D=void 0,w=void 0,t=a[g[k]].x,r=a[g[k]].y,s=a[g[l]].x,u=a[g[l]].y,v=a[g[p]].x,x=a[g[p]].y;if(1E-10>(s-t)*(x-r)-(u-r)*(v-t))n=!1;else{var y=void 0,A=void 0,E=void 0,G=void 0,F=void 0,z=void 0,I=void 0,U=void 0,M=void 0,H=void 0,M=U=I=w=D=void 0,y=v-s,A=x-u,E=t-v,G=r-x,F=s-t,z=u-r;for(n=0;n<e;n++)if(D=a[g[n]].x,w=a[g[n]].y,!(D===t&&w===r||D===s&&w===u||D===v&&w===x)&&(I=D-t,U=w-r,M=D-s,H=w-u,D-=v,w-=x,M=y*H-A*M,I=F*U-z*I,U=E*w-G*D,-1E-10<=M&&-1E-10<=U&&-1E-10<=I) [...]
+!0}}if(n){f.push([a[g[k]],a[g[l]],a[g[p]]]);h.push([g[k],g[l],g[p]]);k=l;for(p=l+1;p<e;k++,p++)g[k]=g[p];e--;q=2*e}}return d?h:f};a.Triangulate.area=b;return a})(THREE.FontUtils);self._typeface_js={faces:THREE.FontUtils.faces,loadFace:THREE.FontUtils.loadFace};THREE.typeface_js=self._typeface_js;
+THREE.Audio=function(a){THREE.Object3D.call(this);this.type="Audio";this.context=a.context;this.source=this.context.createBufferSource();this.source.onended=this.onEnded.bind(this);this.gain=this.context.createGain();this.gain.connect(this.context.destination);this.panner=this.context.createPanner();this.panner.connect(this.gain);this.autoplay=!1;this.startTime=0;this.isPlaying=!1};THREE.Audio.prototype=Object.create(THREE.Object3D.prototype);THREE.Audio.prototype.constructor=THREE.Audio;
+THREE.Audio.prototype.load=function(a){var b=this,c=new XMLHttpRequest;c.open("GET",a,!0);c.responseType="arraybuffer";c.onload=function(a){b.context.decodeAudioData(this.response,function(a){b.source.buffer=a;b.autoplay&&b.play()})};c.send();return this};
+THREE.Audio.prototype.play=function(){if(!0===this.isPlaying)THREE.warn("THREE.Audio: Audio is already playing.");else{var a=this.context.createBufferSource();a.buffer=this.source.buffer;a.loop=this.source.loop;a.onended=this.source.onended;a.connect(this.panner);a.start(0,this.startTime);this.isPlaying=!0;this.source=a}};THREE.Audio.prototype.pause=function(){this.source.stop();this.startTime=this.context.currentTime};THREE.Audio.prototype.stop=function(){this.source.stop();this.startTime=0};
+THREE.Audio.prototype.onEnded=function(){this.isPlaying=!1};THREE.Audio.prototype.setLoop=function(a){this.source.loop=a};THREE.Audio.prototype.setRefDistance=function(a){this.panner.refDistance=a};THREE.Audio.prototype.setRolloffFactor=function(a){this.panner.rolloffFactor=a};THREE.Audio.prototype.setVolume=function(a){this.gain.gain.value=a};
+THREE.Audio.prototype.updateMatrixWorld=function(){var a=new THREE.Vector3;return function(b){THREE.Object3D.prototype.updateMatrixWorld.call(this,b);a.setFromMatrixPosition(this.matrixWorld);this.panner.setPosition(a.x,a.y,a.z)}}();THREE.AudioListener=function(){THREE.Object3D.call(this);this.type="AudioListener";this.context=new (window.AudioContext||window.webkitAudioContext)};THREE.AudioListener.prototype=Object.create(THREE.Object3D.prototype);THREE.AudioListener.prototype.construct [...]
+THREE.AudioListener.prototype.updateMatrixWorld=function(){var a=new THREE.Vector3,b=new THREE.Quaternion,c=new THREE.Vector3,d=new THREE.Vector3,e=new THREE.Vector3,f=new THREE.Vector3;return function(g){THREE.Object3D.prototype.updateMatrixWorld.call(this,g);g=this.context.listener;var h=this.up;this.matrixWorld.decompose(a,b,c);d.set(0,0,-1).applyQuaternion(b);e.subVectors(a,f);g.setPosition(a.x,a.y,a.z);g.setOrientation(d.x,d.y,d.z,h.x,h.y,h.z);g.setVelocity(e.x,e.y,e.z);f.copy(a)}}();
+THREE.Curve=function(){};THREE.Curve.prototype.getPoint=function(a){THREE.warn("THREE.Curve: Warning, getPoint() not implemented!");return null};THREE.Curve.prototype.getPointAt=function(a){a=this.getUtoTmapping(a);return this.getPoint(a)};THREE.Curve.prototype.getPoints=function(a){a||(a=5);var b,c=[];for(b=0;b<=a;b++)c.push(this.getPoint(b/a));return c};THREE.Curve.prototype.getSpacedPoints=function(a){a||(a=5);var b,c=[];for(b=0;b<=a;b++)c.push(this.getPointAt(b/a));return c};
+THREE.Curve.prototype.getLength=function(){var a=this.getLengths();return a[a.length-1]};THREE.Curve.prototype.getLengths=function(a){a||(a=this.__arcLengthDivisions?this.__arcLengthDivisions:200);if(this.cacheArcLengths&&this.cacheArcLengths.length==a+1&&!this.needsUpdate)return this.cacheArcLengths;this.needsUpdate=!1;var b=[],c,d=this.getPoint(0),e,f=0;b.push(0);for(e=1;e<=a;e++)c=this.getPoint(e/a),f+=c.distanceTo(d),b.push(f),d=c;return this.cacheArcLengths=b};
+THREE.Curve.prototype.updateArcLengths=function(){this.needsUpdate=!0;this.getLengths()};THREE.Curve.prototype.getUtoTmapping=function(a,b){var c=this.getLengths(),d=0,e=c.length,f;f=b?b:a*c[e-1];for(var g=0,h=e-1,k;g<=h;)if(d=Math.floor(g+(h-g)/2),k=c[d]-f,0>k)g=d+1;else if(0<k)h=d-1;else{h=d;break}d=h;if(c[d]==f)return d/(e-1);g=c[d];return c=(d+(f-g)/(c[d+1]-g))/(e-1)};THREE.Curve.prototype.getTangent=function(a){var b=a-1E-4;a+=1E-4;0>b&&(b=0);1<a&&(a=1);b=this.getPoint(b);return thi [...]
+THREE.Curve.prototype.getTangentAt=function(a){a=this.getUtoTmapping(a);return this.getTangent(a)};
+THREE.Curve.Utils={tangentQuadraticBezier:function(a,b,c,d){return 2*(1-a)*(c-b)+2*a*(d-c)},tangentCubicBezier:function(a,b,c,d,e){return-3*b*(1-a)*(1-a)+3*c*(1-a)*(1-a)-6*a*c*(1-a)+6*a*d*(1-a)-3*a*a*d+3*a*a*e},tangentSpline:function(a,b,c,d,e){return 6*a*a-6*a+(3*a*a-4*a+1)+(-6*a*a+6*a)+(3*a*a-2*a)},interpolate:function(a,b,c,d,e){a=.5*(c-a);d=.5*(d-b);var f=e*e;return(2*b-2*c+a+d)*e*f+(-3*b+3*c-2*a-d)*f+a*e+b}};
+THREE.Curve.create=function(a,b){a.prototype=Object.create(THREE.Curve.prototype);a.prototype.constructor=a;a.prototype.getPoint=b;return a};THREE.CurvePath=function(){this.curves=[];this.bends=[];this.autoClose=!1};THREE.CurvePath.prototype=Object.create(THREE.Curve.prototype);THREE.CurvePath.prototype.constructor=THREE.CurvePath;THREE.CurvePath.prototype.add=function(a){this.curves.push(a)};THREE.CurvePath.prototype.checkConnection=function(){};
+THREE.CurvePath.prototype.closePath=function(){var a=this.curves[0].getPoint(0),b=this.curves[this.curves.length-1].getPoint(1);a.equals(b)||this.curves.push(new THREE.LineCurve(b,a))};THREE.CurvePath.prototype.getPoint=function(a){var b=a*this.getLength(),c=this.getCurveLengths();for(a=0;a<c.length;){if(c[a]>=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]};
+THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;c<d;c++)b+=this.curves[c].getLength(),a.push(b);return this.cacheLengths=a};
+THREE.CurvePath.prototype.getBoundingBox=function(){var a=this.getPoints(),b,c,d,e,f,g;b=c=Number.NEGATIVE_INFINITY;e=f=Number.POSITIVE_INFINITY;var h,k,l,p,q=a[0]instanceof THREE.Vector3;p=q?new THREE.Vector3:new THREE.Vector2;k=0;for(l=a.length;k<l;k++)h=a[k],h.x>b?b=h.x:h.x<e&&(e=h.x),h.y>c?c=h.y:h.y<f&&(f=h.y),q&&(h.z>d?d=h.z:h.z<g&&(g=h.z)),p.add(h);a={minX:e,minY:f,maxX:b,maxY:c};q&&(a.maxZ=d,a.minZ=g);return a};
+THREE.CurvePath.prototype.createPointsGeometry=function(a){a=this.getPoints(a,!0);return this.createGeometry(a)};THREE.CurvePath.prototype.createSpacedPointsGeometry=function(a){a=this.getSpacedPoints(a,!0);return this.createGeometry(a)};THREE.CurvePath.prototype.createGeometry=function(a){for(var b=new THREE.Geometry,c=0;c<a.length;c++)b.vertices.push(new THREE.Vector3(a[c].x,a[c].y,a[c].z||0));return b};THREE.CurvePath.prototype.addWrapPath=function(a){this.bends.push(a)};
+THREE.CurvePath.prototype.getTransformedPoints=function(a,b){var c=this.getPoints(a),d,e;b||(b=this.bends);d=0;for(e=b.length;d<e;d++)c=this.getWrapPoints(c,b[d]);return c};THREE.CurvePath.prototype.getTransformedSpacedPoints=function(a,b){var c=this.getSpacedPoints(a),d,e;b||(b=this.bends);d=0;for(e=b.length;d<e;d++)c=this.getWrapPoints(c,b[d]);return c};
+THREE.CurvePath.prototype.getWrapPoints=function(a,b){var c=this.getBoundingBox(),d,e,f,g,h,k;d=0;for(e=a.length;d<e;d++)f=a[d],g=f.x,h=f.y,k=g/c.maxX,k=b.getUtoTmapping(k,g),g=b.getPoint(k),k=b.getTangent(k),k.set(-k.y,k.x).multiplyScalar(h),f.x=g.x+k.x,f.y=g.y+k.y;return a};THREE.Gyroscope=function(){THREE.Object3D.call(this)};THREE.Gyroscope.prototype=Object.create(THREE.Object3D.prototype);THREE.Gyroscope.prototype.constructor=THREE.Gyroscope;
+THREE.Gyroscope.prototype.updateMatrixWorld=function(){var a=new THREE.Vector3,b=new THREE.Quaternion,c=new THREE.Vector3,d=new THREE.Vector3,e=new THREE.Quaternion,f=new THREE.Vector3;return function(g){this.matrixAutoUpdate&&this.updateMatrix();if(this.matrixWorldNeedsUpdate||g)this.parent?(this.matrixWorld.multiplyMatrices(this.parent.matrixWorld,this.matrix),this.matrixWorld.decompose(d,e,f),this.matrix.decompose(a,b,c),this.matrixWorld.compose(d,b,f)):this.matrixWorld.copy(this.matr [...]
+!1,g=!0;for(var h=0,k=this.children.length;h<k;h++)this.children[h].updateMatrixWorld(g)}}();THREE.Path=function(a){THREE.CurvePath.call(this);this.actions=[];a&&this.fromPoints(a)};THREE.Path.prototype=Object.create(THREE.CurvePath.prototype);THREE.Path.prototype.constructor=THREE.Path;THREE.PathActions={MOVE_TO:"moveTo",LINE_TO:"lineTo",QUADRATIC_CURVE_TO:"quadraticCurveTo",BEZIER_CURVE_TO:"bezierCurveTo",CSPLINE_THRU:"splineThru",ARC:"arc",ELLIPSE:"ellipse"};
+THREE.Path.prototype.fromPoints=function(a){this.moveTo(a[0].x,a[0].y);for(var b=1,c=a.length;b<c;b++)this.lineTo(a[b].x,a[b].y)};THREE.Path.prototype.moveTo=function(a,b){var c=Array.prototype.slice.call(arguments);this.actions.push({action:THREE.PathActions.MOVE_TO,args:c})};
+THREE.Path.prototype.lineTo=function(a,b){var c=Array.prototype.slice.call(arguments),d=this.actions[this.actions.length-1].args,d=new THREE.LineCurve(new THREE.Vector2(d[d.length-2],d[d.length-1]),new THREE.Vector2(a,b));this.curves.push(d);this.actions.push({action:THREE.PathActions.LINE_TO,args:c})};
+THREE.Path.prototype.quadraticCurveTo=function(a,b,c,d){var e=Array.prototype.slice.call(arguments),f=this.actions[this.actions.length-1].args,f=new THREE.QuadraticBezierCurve(new THREE.Vector2(f[f.length-2],f[f.length-1]),new THREE.Vector2(a,b),new THREE.Vector2(c,d));this.curves.push(f);this.actions.push({action:THREE.PathActions.QUADRATIC_CURVE_TO,args:e})};
+THREE.Path.prototype.bezierCurveTo=function(a,b,c,d,e,f){var g=Array.prototype.slice.call(arguments),h=this.actions[this.actions.length-1].args,h=new THREE.CubicBezierCurve(new THREE.Vector2(h[h.length-2],h[h.length-1]),new THREE.Vector2(a,b),new THREE.Vector2(c,d),new THREE.Vector2(e,f));this.curves.push(h);this.actions.push({action:THREE.PathActions.BEZIER_CURVE_TO,args:g})};
+THREE.Path.prototype.splineThru=function(a){var b=Array.prototype.slice.call(arguments),c=this.actions[this.actions.length-1].args,c=[new THREE.Vector2(c[c.length-2],c[c.length-1])];Array.prototype.push.apply(c,a);c=new THREE.SplineCurve(c);this.curves.push(c);this.actions.push({action:THREE.PathActions.CSPLINE_THRU,args:b})};THREE.Path.prototype.arc=function(a,b,c,d,e,f){var g=this.actions[this.actions.length-1].args;this.absarc(a+g[g.length-2],b+g[g.length-1],c,d,e,f)};
+THREE.Path.prototype.absarc=function(a,b,c,d,e,f){this.absellipse(a,b,c,c,d,e,f)};THREE.Path.prototype.ellipse=function(a,b,c,d,e,f,g){var h=this.actions[this.actions.length-1].args;this.absellipse(a+h[h.length-2],b+h[h.length-1],c,d,e,f,g)};THREE.Path.prototype.absellipse=function(a,b,c,d,e,f,g){var h=Array.prototype.slice.call(arguments),k=new THREE.EllipseCurve(a,b,c,d,e,f,g);this.curves.push(k);k=k.getPoint(1);h.push(k.x);h.push(k.y);this.actions.push({action:THREE.PathActions.ELLIPS [...]
+THREE.Path.prototype.getSpacedPoints=function(a,b){a||(a=40);for(var c=[],d=0;d<a;d++)c.push(this.getPoint(d/a));return c};
+THREE.Path.prototype.getPoints=function(a,b){if(this.useSpacedPoints)return console.log("tata"),this.getSpacedPoints(a,b);a=a||12;var c=[],d,e,f,g,h,k,l,p,q,n,t,r,s;d=0;for(e=this.actions.length;d<e;d++)switch(f=this.actions[d],g=f.action,f=f.args,g){case THREE.PathActions.MOVE_TO:c.push(new THREE.Vector2(f[0],f[1]));break;case THREE.PathActions.LINE_TO:c.push(new THREE.Vector2(f[0],f[1]));break;case THREE.PathActions.QUADRATIC_CURVE_TO:h=f[2];k=f[3];q=f[0];n=f[1];0<c.length?(g=c[c.lengt [...]
+r=g.y):(g=this.actions[d-1].args,t=g[g.length-2],r=g[g.length-1]);for(f=1;f<=a;f++)s=f/a,g=THREE.Shape.Utils.b2(s,t,q,h),s=THREE.Shape.Utils.b2(s,r,n,k),c.push(new THREE.Vector2(g,s));break;case THREE.PathActions.BEZIER_CURVE_TO:h=f[4];k=f[5];q=f[0];n=f[1];l=f[2];p=f[3];0<c.length?(g=c[c.length-1],t=g.x,r=g.y):(g=this.actions[d-1].args,t=g[g.length-2],r=g[g.length-1]);for(f=1;f<=a;f++)s=f/a,g=THREE.Shape.Utils.b3(s,t,q,l,h),s=THREE.Shape.Utils.b3(s,r,n,p,k),c.push(new THREE.Vector2(g,s)) [...]
+this.actions[d-1].args;s=[new THREE.Vector2(g[g.length-2],g[g.length-1])];g=a*f[0].length;s=s.concat(f[0]);s=new THREE.SplineCurve(s);for(f=1;f<=g;f++)c.push(s.getPointAt(f/g));break;case THREE.PathActions.ARC:h=f[0];k=f[1];n=f[2];l=f[3];g=f[4];q=!!f[5];t=g-l;r=2*a;for(f=1;f<=r;f++)s=f/r,q||(s=1-s),s=l+s*t,g=h+n*Math.cos(s),s=k+n*Math.sin(s),c.push(new THREE.Vector2(g,s));break;case THREE.PathActions.ELLIPSE:for(h=f[0],k=f[1],n=f[2],p=f[3],l=f[4],g=f[5],q=!!f[6],t=g-l,r=2*a,f=1;f<=r;f++) [...]
+(s=1-s),s=l+s*t,g=h+n*Math.cos(s),s=k+p*Math.sin(s),c.push(new THREE.Vector2(g,s))}d=c[c.length-1];1E-10>Math.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c};
+THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;c<d;c++){var e=a[c],f=new THREE.Shape;f.actions=e.actions;f.curves=e.curves;b.push(f)}return b}function d(a,b){for(var c=b.length,d=!1,e=c-1,f=0;f<c;e=f++){var g=b[e],h=b[f],k=h.x-g.x,n=h.y-g.y;if(1E-10<Math.abs(n)){if(0>n&&(g=b[f],k=-k,h=b[e],n=-n),!(a.y<g.y||a.y>h.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=n*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g. [...]
+h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;b<c;b++)d=a[b],e=d.args,d=d.action,d==THREE.PathActions.MOVE_TO&&0!=g.actions.length&&(f.push(g),g=new THREE.Path),g[d].apply(g,e);0!=g.actions.length&&f.push(g);return f}(this.actions);if(0==e.length)return[];if(!0===b)return c(e);var f,g,h,k=[];if(1==e.length)return g=e[0],h=new THREE.Shape,h.actions=g.actions,h.curves=g.curves,k.push(h),k;var l=!THREE.Shape.Utils.isClockWise(e[0].getPoints()) [...]
+h=[];var p=[],q=[],n=0,t;p[n]=void 0;q[n]=[];var r,s;r=0;for(s=e.length;r<s;r++)g=e[r],t=g.getPoints(),f=THREE.Shape.Utils.isClockWise(t),(f=a?!f:f)?(!l&&p[n]&&n++,p[n]={s:new THREE.Shape,p:t},p[n].s.actions=g.actions,p[n].s.curves=g.curves,l&&n++,q[n]=[]):q[n].push({h:g,p:t[0]});if(!p[0])return c(e);if(1<p.length){r=!1;s=[];g=0;for(e=p.length;g<e;g++)h[g]=[];g=0;for(e=p.length;g<e;g++)for(f=q[g],l=0;l<f.length;l++){n=f[l];t=!0;for(var u=0;u<p.length;u++)d(n.p,p[u].p)&&(g!=u&&s.push({fro [...]
+hole:l}),t?(t=!1,h[u].push(n)):r=!0);t&&h[g].push(n)}0<s.length&&(r||(q=h))}r=0;for(s=p.length;r<s;r++)for(h=p[r].s,k.push(h),g=q[r],e=0,f=g.length;e<f;e++)h.holes.push(g[e].h);return k};THREE.Shape=function(){THREE.Path.apply(this,arguments);this.holes=[]};THREE.Shape.prototype=Object.create(THREE.Path.prototype);THREE.Shape.prototype.constructor=THREE.Shape;THREE.Shape.prototype.extrude=function(a){return new THREE.ExtrudeGeometry(this,a)};
+THREE.Shape.prototype.makeGeometry=function(a){return new THREE.ShapeGeometry(this,a)};THREE.Shape.prototype.getPointsHoles=function(a){var b,c=this.holes.length,d=[];for(b=0;b<c;b++)d[b]=this.holes[b].getTransformedPoints(a,this.bends);return d};THREE.Shape.prototype.getSpacedPointsHoles=function(a){var b,c=this.holes.length,d=[];for(b=0;b<c;b++)d[b]=this.holes[b].getTransformedSpacedPoints(a,this.bends);return d};
+THREE.Shape.prototype.extractAllPoints=function(a){return{shape:this.getTransformedPoints(a),holes:this.getPointsHoles(a)}};THREE.Shape.prototype.extractPoints=function(a){return this.useSpacedPoints?this.extractAllSpacedPoints(a):this.extractAllPoints(a)};THREE.Shape.prototype.extractAllSpacedPoints=function(a){return{shape:this.getTransformedSpacedPoints(a),holes:this.getSpacedPointsHoles(a)}};
+THREE.Shape.Utils={triangulateShape:function(a,b){function c(a,b,c){return a.x!=b.x?a.x<b.x?a.x<=c.x&&c.x<=b.x:b.x<=c.x&&c.x<=a.x:a.y<b.y?a.y<=c.y&&c.y<=b.y:b.y<=c.y&&c.y<=a.y}function d(a,b,d,e,f){var g=b.x-a.x,h=b.y-a.y,k=e.x-d.x,l=e.y-d.y,p=a.x-d.x,q=a.y-d.y,E=h*k-g*l,G=h*p-g*q;if(1E-10<Math.abs(E)){if(0<E){if(0>G||G>E)return[];k=l*p-k*q;if(0>k||k>E)return[]}else{if(0<G||G<E)return[];k=l*p-k*q;if(0<k||k<E)return[]}if(0==k)return!f||0!=G&&G!=E?[a]:[];if(k==E)return!f||0!=G&&G!=E?[b]:[] [...]
+if(G==E)return[e];f=k/E;return[{x:a.x+f*g,y:a.y+f*h}]}if(0!=G||l*p!=k*q)return[];h=0==g&&0==h;k=0==k&&0==l;if(h&&k)return a.x!=d.x||a.y!=d.y?[]:[a];if(h)return c(d,e,a)?[a]:[];if(k)return c(a,b,d)?[d]:[];0!=g?(a.x<b.x?(g=a,k=a.x,h=b,a=b.x):(g=b,k=b.x,h=a,a=a.x),d.x<e.x?(b=d,E=d.x,l=e,d=e.x):(b=e,E=e.x,l=d,d=d.x)):(a.y<b.y?(g=a,k=a.y,h=b,a=b.y):(g=b,k=b.y,h=a,a=a.y),d.y<e.y?(b=d,E=d.y,l=e,d=e.y):(b=e,E=e.y,l=d,d=d.y));return k<=E?a<E?[]:a==E?f?[]:[b]:a<=d?[b,h]:[b,l]:k>d?[]:k==d?f?[]:[g]: [...]
+[g,l]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10<Math.abs(a)?(b=g*c-d*b,0<a?0<=e&&0<=b:0<=e||0<=b):0<e}var f,g,h,k,l,p={};h=a.concat();f=0;for(g=b.length;f<g;f++)Array.prototype.push.apply(h,b[f]);f=0;for(g=h.length;f<g;f++)l=h[f].x+":"+h[f].y,void 0!==p[l]&&THREE.warn("THREE.Shape: Duplicate point",l),p[l]=f;f=function(a,b){function c(a,b){var d=h.length-1,f=a-1;0>f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[ [...]
+if(!d)return!1;d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;c<h.length;c++)if(e=c+1,e%=h.length,e=d(a,b,h[c],h[e],!0),0<e.length)return!0;return!1}function g(a,c){var e,f,h,k;for(e=0;e<l.length;e++)for(f=b[l[e]],h=0;h<f.length;h++)if(k=h+1,k%=f.length,k=d(a,c,f[h],f[k],!0),0<k.length)return!0;return!1}var h=a.concat(),k,l=[],p,q,A,E,G,F=[],z,I,U,M=0;for(p=b.length;M<p;M++)l.push(M);z=0;for(var H=2*l.length;0<l.lengt [...]
+H){console.log("Infinite Loop! Holes left:"+l.length+", Probably Hole outside Shape!");break}for(q=z;q<h.length;q++){A=h[q];p=-1;for(M=0;M<l.length;M++)if(E=l[M],G=A.x+":"+A.y+":"+E,void 0===F[G]){k=b[E];for(I=0;I<k.length;I++)if(E=k[I],c(q,I)&&!f(A,E)&&!g(A,E)){p=I;l.splice(M,1);z=h.slice(0,q+1);E=h.slice(q);I=k.slice(p);U=k.slice(0,p+1);h=z.concat(I).concat(U).concat(E);z=q;break}if(0<=p)break;F[G]=!0}if(0<=p)break}}return h}(a,b);var q=THREE.FontUtils.Triangulate(f,!1);f=0;for(g=q.len [...]
+q[f],h=0;3>h;h++)l=k[h].x+":"+k[h].y,l=p[l],void 0!==l&&(k[h]=l);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a, [...]
+a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a,b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.constructor=THREE.LineCurve;THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};
+THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()};THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.constructor=THREE.QuadraticBezierCurve;
+THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b};THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};
+THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.CubicBezierCurve.prototype.constructor=THREE.CubicBezierCurve;THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};
+THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b};THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.constructor=THREE.SplineCurve;
+THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c};THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};
+THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype);THREE.EllipseCurve.prototype.constructor=THREE.EllipseCurve;THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};
+THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype);THREE.ArcCurve.prototype.constructor=THREE.ArcCurve;THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});
+THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b});
+THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b});
+THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c});
+THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0<c?0:(Math.floor(Math.abs(c)/b.length)+1)*b.length),d=b[(c-1)%b.length],e=b[c%b.length],f=b[(c+1)%b.length],b=b[(c+2)%b.length],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c});
+THREE.AnimationHandler={LINEAR:0,CATMULLROM:1,CATMULLROM_FORWARD:2,add:function(){THREE.warn("THREE.AnimationHandler.add() has been deprecated.")},get:function(){THREE.warn("THREE.AnimationHandler.get() has been deprecated.")},remove:function(){THREE.warn("THREE.AnimationHandler.remove() has been deprecated.")},animations:[],init:function(a){if(!0===a.initialized)return a;for(var b=0;b<a.hierarchy.length;b++){for(var c=0;c<a.hierarchy[b].keys.length;c++)if(0>a.hierarchy[b].keys[c].time&& [...]
+0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;c<a.hierarchy[b].keys.length;c++)for(var e=0;e<a.hierarchy[b].keys[c].morphTargets.length;e++){var f=a.hierarchy[b].keys[c].morphTargets[e];d[f]=-1}a.hierarchy[b].usedMorphTargets=d;for(c=0;c<a.hierarchy[b]. [...]
+{};for(f in d){for(e=0;e<a.hierarchy[b].keys[c].morphTargets.length;e++)if(a.hierarchy[b].keys[c].morphTargets[e]===f){g[f]=a.hierarchy[b].keys[c].morphTargetsInfluences[e];break}e===a.hierarchy[b].keys[c].morphTargets.length&&(g[f]=0)}a.hierarchy[b].keys[c].morphTargetsInfluences=g}}for(c=1;c<a.hierarchy[b].keys.length;c++)a.hierarchy[b].keys[c].time===a.hierarchy[b].keys[c-1].time&&(a.hierarchy[b].keys.splice(c,1),c--);for(c=0;c<a.hierarchy[b].keys.length;c++)a.hierarchy[b].keys[c].ind [...]
+!0;return a},parse:function(a){var b=function(a,c){c.push(a);for(var d=0;d<a.children.length;d++)b(a.children[d],c)},c=[];if(a instanceof THREE.SkinnedMesh)for(var d=0;d<a.skeleton.bones.length;d++)c.push(a.skeleton.bones[d]);else b(a,c);return c},play:function(a){-1===this.animations.indexOf(a)&&this.animations.push(a)},stop:function(a){a=this.animations.indexOf(a);-1!==a&&this.animations.splice(a,1)},update:function(a){for(var b=0;b<this.animations.length;b++)this.animations[b].resetBl [...]
+for(b=0;b<this.animations.length;b++)this.animations[b].update(a)}};THREE.Animation=function(a,b){this.root=a;this.data=THREE.AnimationHandler.init(b);this.hierarchy=THREE.AnimationHandler.parse(a);this.currentTime=0;this.timeScale=1;this.isPlaying=!1;this.loop=!0;this.weight=0;this.interpolationType=THREE.AnimationHandler.LINEAR};
+THREE.Animation.prototype={constructor:THREE.Animation,keyTypes:["pos","rot","scl"],play:function(a,b){this.currentTime=void 0!==a?a:0;this.weight=void 0!==b?b:1;this.isPlaying=!0;this.reset();THREE.AnimationHandler.play(this)},stop:function(){this.isPlaying=!1;THREE.AnimationHandler.stop(this)},reset:function(){for(var a=0,b=this.hierarchy.length;a<b;a++){var c=this.hierarchy[a];void 0===c.animationCache&&(c.animationCache={animations:{},blending:{positionWeight:0,quaternionWeight:0,sca [...]
+var d=this.data.name,e=c.animationCache.animations,f=e[d];void 0===f&&(f={prevKey:{pos:0,rot:0,scl:0},nextKey:{pos:0,rot:0,scl:0},originalMatrix:c.matrix},e[d]=f);for(c=0;3>c;c++){for(var d=this.keyTypes[c],e=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(d,a,1);g.time<this.currentTime&&g.index>e.index;)e=g,g=this.getNextKeyWith(d,a,g.index+1);f.prevKey[d]=e;f.nextKey[d]=g}}},resetBlendWeights:function(){for(var a=0,b=this.hierarchy.length;a<b;a++){var c=this.hierarchy[a].animation [...]
+c&&(c=c.blending,c.positionWeight=0,c.quaternionWeight=0,c.scaleWeight=0)}},update:function(){var a=[],b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Quaternion,e=function(a,b){var c=[],d=[],e,q,n,t,r,s;e=(a.length-1)*b;q=Math.floor(e);e-=q;c[0]=0===q?q:q-1;c[1]=q;c[2]=q>a.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];t=a[c[1]];r=a[c[2]];s=a[c[3]];c=e*e;n=e*c;d[0]=f(q[0],t[0],r[0],s[0],e,c,n);d[1]=f(q[1],t[1],r[1],s[1],e,c,n);d[2]=f(q[2],t[2],r[2],s[2],e,c,n);return d},f=function [...]
+e,f,n){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*n+(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)this.loop?(this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset()):this.stop();f=0;for(var h=this.hierarchy.length;f<h;f++)for(var k=this.hierarchy[f],l=k.animationCache.animations[this.data.name],p=k.animationCache.blending,q=0;3>q;q++) [...]
+t=l.prevKey[n],r=l.nextKey[n];if(0<this.timeScale&&r.time<=this.currentTime||0>this.timeScale&&t.time>=this.currentTime){t=this.data.hierarchy[f].keys[0];for(r=this.getNextKeyWith(n,f,1);r.time<this.currentTime&&r.index>t.index;)t=r,r=this.getNextKeyWith(n,f,r.index+1);l.prevKey[n]=t;l.nextKey[n]=r}var s=(this.currentTime-t.time)/(r.time-t.time),u=t[n],v=r[n];0>s&&(s=0);1<s&&(s=1);if("pos"===n)if(this.interpolationType===THREE.AnimationHandler.LINEAR)c.x=u[0]+(v[0]-u[0])*s,c.y=u[1]+(v[1] [...]
+c.z=u[2]+(v[2]-u[2])*s,t=this.weight/(this.weight+p.positionWeight),k.position.lerp(c,t),p.positionWeight+=this.weight;else{if(this.interpolationType===THREE.AnimationHandler.CATMULLROM||this.interpolationType===THREE.AnimationHandler.CATMULLROM_FORWARD)a[0]=this.getPrevKeyWith("pos",f,t.index-1).pos,a[1]=u,a[2]=v,a[3]=this.getNextKeyWith("pos",f,r.index+1).pos,s=.33*s+.33,r=e(a,s),t=this.weight/(this.weight+p.positionWeight),p.positionWeight+=this.weight,n=k.position,n.x+=(r[0]-n.x)*t,n [...]
+n.y)*t,n.z+=(r[2]-n.z)*t,this.interpolationType===THREE.AnimationHandler.CATMULLROM_FORWARD&&(s=e(a,1.01*s),b.set(s[0],s[1],s[2]),b.sub(n),b.y=0,b.normalize(),s=Math.atan2(b.x,b.z),k.rotation.set(0,s,0))}else"rot"===n?(THREE.Quaternion.slerp(u,v,d,s),0===p.quaternionWeight?(k.quaternion.copy(d),p.quaternionWeight=this.weight):(t=this.weight/(this.weight+p.quaternionWeight),THREE.Quaternion.slerp(k.quaternion,d,k.quaternion,t),p.quaternionWeight+=this.weight)):"scl"===n&&(c.x=u[0]+(v[0]-u [...]
+u[1]+(v[1]-u[1])*s,c.z=u[2]+(v[2]-u[2])*s,t=this.weight/(this.weight+p.scaleWeight),k.scale.lerp(c,t),p.scaleWeight+=this.weight)}return!0}}}(),getNextKeyWith:function(a,b,c){var d=this.data.hierarchy[b].keys;for(c=this.interpolationType===THREE.AnimationHandler.CATMULLROM||this.interpolationType===THREE.AnimationHandler.CATMULLROM_FORWARD?c<d.length-1?c:d.length-1:c%d.length;c<d.length;c++)if(void 0!==d[c][a])return d[c];return this.data.hierarchy[b].keys[0]},getPrevKeyWith:function(a,b [...]
+this.data.hierarchy[b].keys;for(c=this.interpolationType===THREE.AnimationHandler.CATMULLROM||this.interpolationType===THREE.AnimationHandler.CATMULLROM_FORWARD?0<c?c:0:0<=c?c:c+d.length;0<=c;c--)if(void 0!==d[c][a])return d[c];return this.data.hierarchy[b].keys[d.length-1]}};
+THREE.KeyFrameAnimation=function(a){this.root=a.node;this.data=THREE.AnimationHandler.init(a);this.hierarchy=THREE.AnimationHandler.parse(this.root);this.currentTime=0;this.timeScale=.001;this.isPlaying=!1;this.loop=this.isPaused=!0;a=0;for(var b=this.hierarchy.length;a<b;a++){var c=this.data.hierarchy[a].sids,d=this.hierarchy[a];if(this.data.hierarchy[a].keys.length&&c){for(var e=0;e<c.length;e++){var f=c[e],g=this.getNextKeyWith(f,a,0);g&&g.apply(f)}d.matrixAutoUpdate=!1;this.data.hier [...]
+d.matrixWorldNeedsUpdate=!0}}};
+THREE.KeyFrameAnimation.prototype={constructor:THREE.KeyFrameAnimation,play:function(a){this.currentTime=void 0!==a?a:0;if(!1===this.isPlaying){this.isPlaying=!0;var b=this.hierarchy.length,c,d;for(a=0;a<b;a++)c=this.hierarchy[a],d=this.data.hierarchy[a],void 0===d.animationCache&&(d.animationCache={},d.animationCache.prevKey=null,d.animationCache.nextKey=null,d.animationCache.originalMatrix=c.matrix),c=this.data.hierarchy[a].keys,c.length&&(d.animationCache.prevKey=c[0],d.animationCache [...]
+c[1],this.startTime=Math.min(c[0].time,this.startTime),this.endTime=Math.max(c[c.length-1].time,this.endTime));this.update(0)}this.isPaused=!1;THREE.AnimationHandler.play(this)},stop:function(){this.isPaused=this.isPlaying=!1;THREE.AnimationHandler.stop(this);for(var a=0;a<this.data.hierarchy.length;a++){var b=this.hierarchy[a],c=this.data.hierarchy[a];if(void 0!==c.animationCache){var d=c.animationCache.originalMatrix;d.copy(b.matrix);b.matrix=d;delete c.animationCache}}},update:functio [...]
+this.isPlaying){this.currentTime+=a*this.timeScale;a=this.data.length;!0===this.loop&&this.currentTime>a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;a<b;a++){var c=this.hierarchy[a],d=this.data.hierarchy[a],e=d.keys,d=d.animationCache;if(e.length){var f=d.prevKey,g=d.nextKey;if(g.time<=this.currentTime){for(;g.time<this.currentTime&&g.index>f.index;)f=g,g=e[f.index+1];d.prevKey=f;d.nextKey=g}g.time>=this.currentTime?f.interpola [...]
+f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}},getNextKeyWith:function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;c<b.length;c++)if(b[c].hasTarget(a))return b[c];return b[0]},getPrevKeyWith:function(a,b,c){b=this.data.hierarchy[b].keys;for(c=0<=c?c:c+b.length;0<=c;c--)if(b[c].hasTarget(a))return b[c];return b[b.length-1]}};
+THREE.MorphAnimation=function(a){this.mesh=a;this.frames=a.morphTargetInfluences.length;this.currentTime=0;this.duration=1E3;this.loop=!0;this.currentFrame=this.lastFrame=0;this.isPlaying=!1};
+THREE.MorphAnimation.prototype={constructor:THREE.MorphAnimation,play:function(){this.isPlaying=!0},pause:function(){this.isPlaying=!1},update:function(a){if(!1!==this.isPlaying){this.currentTime+=a;!0===this.loop&&this.currentTime>this.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);a=this.duration/this.frames;var b=Math.floor(this.currentTime/a),c=this.mesh.morphTargetInfluences;b!=this.currentFrame&&(c[this.lastFrame]=0,c[this.curr [...]
+1,c[b]=0,this.lastFrame=this.currentFrame,this.currentFrame=b);c[b]=this.currentTime%a/a;c[this.lastFrame]=1-c[b]}}};
+THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,x=h.heightSegments,D=e/2,w=f/2,y=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",x=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var A=v+1,E=x+1,G=e/v,F=f/x,z=new THREE.Vector3;z[u]=0<g?1:-1;for(e=0;e<E;e++)for(f=0;f<A;f++){var I=new THREE.Vector3;I[a]=(f*G-D)*c;I[b]=(e*F-w)*d;I[u]=g;h.vertices.push [...]
+0;e<x;e++)for(f=0;f<v;f++)w=f+A*e,a=f+A*(e+1),b=f+1+A*(e+1),c=f+1+A*e,d=new THREE.Vector2(f/v,1-e/x),g=new THREE.Vector2(f/v,1-(e+1)/x),u=new THREE.Vector2((f+1)/v,1-(e+1)/x),D=new THREE.Vector2((f+1)/v,1-e/x),w=new THREE.Face3(w+y,a+y,c+y),w.normal.copy(z),w.vertexNormals.push(z.clone(),z.clone(),z.clone()),w.materialIndex=s,h.faces.push(w),h.faceVertexUvs[0].push([d,g,D]),w=new THREE.Face3(a+y,b+y,c+y),w.normal.copy(z),w.vertexNormals.push(z.clone(),z.clone(),z.clone()),w.materialIndex [...]
+h.faceVertexUvs[0].push([g.clone(),u,D.clone()])}THREE.Geometry.call(this);this.type="BoxGeometry";this.parameters={width:a,height:b,depth:c,widthSegments:d,heightSegments:e,depthSegments:f};this.widthSegments=d||1;this.heightSegments=e||1;this.depthSegments=f||1;var h=this;d=a/2;e=b/2;f=c/2;g("z","y",-1,-1,c,b,d,0);g("z","y",1,-1,c,b,-d,1);g("x","z",1,1,a,c,e,2);g("x","z",1,-1,a,c,-e,3);g("x","y",1,-1,a,b,f,4);g("x","y",-1,-1,a,b,-f,5);this.mergeVertices()};THREE.BoxGeometry.prototype=O [...]
+THREE.BoxGeometry.prototype.constructor=THREE.BoxGeometry;
+THREE.CircleGeometry=function(a,b,c,d){THREE.Geometry.call(this);this.type="CircleGeometry";this.parameters={radius:a,segments:b,thetaStart:c,thetaLength:d};a=a||50;b=void 0!==b?Math.max(3,b):8;c=void 0!==c?c:0;d=void 0!==d?d:2*Math.PI;var e,f=[];e=new THREE.Vector3;var g=new THREE.Vector2(.5,.5);this.vertices.push(e);f.push(g);for(e=0;e<=b;e++){var h=new THREE.Vector3,k=c+e/b*d;h.x=a*Math.cos(k);h.y=a*Math.sin(k);this.vertices.push(h);f.push(new THREE.Vector2((h.x/a+1)/2,(h.y/a+1)/2))}c [...]
+0,1);for(e=1;e<=b;e++)this.faces.push(new THREE.Face3(e,e+1,0,[c.clone(),c.clone(),c.clone()])),this.faceVertexUvs[0].push([f[e].clone(),f[e+1].clone(),g.clone()]);this.computeFaceNormals();this.boundingSphere=new THREE.Sphere(new THREE.Vector3,a)};THREE.CircleGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.CircleGeometry.prototype.constructor=THREE.CircleGeometry;
+THREE.CubeGeometry=function(a,b,c,d,e,f){THREE.warn("THREE.CubeGeometry has been renamed to THREE.BoxGeometry.");return new THREE.BoxGeometry(a,b,c,d,e,f)};
+THREE.CylinderGeometry=function(a,b,c,d,e,f,g,h){THREE.Geometry.call(this);this.type="CylinderGeometry";this.parameters={radiusTop:a,radiusBottom:b,height:c,radialSegments:d,heightSegments:e,openEnded:f,thetaStart:g,thetaLength:h};a=void 0!==a?a:20;b=void 0!==b?b:20;c=void 0!==c?c:100;d=d||8;e=e||1;f=void 0!==f?f:!1;g=void 0!==g?g:0;h=void 0!==h?h:2*Math.PI;var k=c/2,l,p,q=[],n=[];for(p=0;p<=e;p++){var t=[],r=[],s=p/e,u=s*(b-a)+a;for(l=0;l<=d;l++){var v=l/d,x=new THREE.Vector3;x.x=u*Math [...]
+g);x.y=-s*c+k;x.z=u*Math.cos(v*h+g);this.vertices.push(x);t.push(this.vertices.length-1);r.push(new THREE.Vector2(v,1-s))}q.push(t);n.push(r)}c=(b-a)/c;for(l=0;l<d;l++)for(0!==a?(g=this.vertices[q[0][l]].clone(),h=this.vertices[q[0][l+1]].clone()):(g=this.vertices[q[1][l]].clone(),h=this.vertices[q[1][l+1]].clone()),g.setY(Math.sqrt(g.x*g.x+g.z*g.z)*c).normalize(),h.setY(Math.sqrt(h.x*h.x+h.z*h.z)*c).normalize(),p=0;p<e;p++){var t=q[p][l],r=q[p+1][l],s=q[p+1][l+1],u=q[p][l+1],v=g.clone() [...]
+D=h.clone(),w=h.clone(),y=n[p][l].clone(),A=n[p+1][l].clone(),E=n[p+1][l+1].clone(),G=n[p][l+1].clone();this.faces.push(new THREE.Face3(t,r,u,[v,x,w]));this.faceVertexUvs[0].push([y,A,G]);this.faces.push(new THREE.Face3(r,s,u,[x.clone(),D,w.clone()]));this.faceVertexUvs[0].push([A.clone(),E,G.clone()])}if(!1===f&&0<a)for(this.vertices.push(new THREE.Vector3(0,k,0)),l=0;l<d;l++)t=q[0][l],r=q[0][l+1],s=this.vertices.length-1,v=new THREE.Vector3(0,1,0),x=new THREE.Vector3(0,1,0),D=new THREE [...]
+1,0),y=n[0][l].clone(),A=n[0][l+1].clone(),E=new THREE.Vector2(A.x,0),this.faces.push(new THREE.Face3(t,r,s,[v,x,D])),this.faceVertexUvs[0].push([y,A,E]);if(!1===f&&0<b)for(this.vertices.push(new THREE.Vector3(0,-k,0)),l=0;l<d;l++)t=q[e][l+1],r=q[e][l],s=this.vertices.length-1,v=new THREE.Vector3(0,-1,0),x=new THREE.Vector3(0,-1,0),D=new THREE.Vector3(0,-1,0),y=n[e][l+1].clone(),A=n[e][l].clone(),E=new THREE.Vector2(A.x,1),this.faces.push(new THREE.Face3(t,r,s,[v,x,D])),this.faceVertexUv [...]
+A,E]);this.computeFaceNormals()};THREE.CylinderGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.CylinderGeometry.prototype.constructor=THREE.CylinderGeometry;THREE.ExtrudeGeometry=function(a,b){"undefined"!==typeof a&&(THREE.Geometry.call(this),this.type="ExtrudeGeometry",a=a instanceof Array?a:[a],this.addShapeList(a,b),this.computeFaceNormals())};THREE.ExtrudeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ExtrudeGeometry.prototype.constructor=THREE.Ext [...]
+THREE.ExtrudeGeometry.prototype.addShapeList=function(a,b){for(var c=a.length,d=0;d<c;d++)this.addShape(a[d],b)};
+THREE.ExtrudeGeometry.prototype.addShape=function(a,b){function c(a,b,c){b||THREE.error("THREE.ExtrudeGeometry: vec does not exist");return b.clone().multiplyScalar(c).add(a)}function d(a,b,c){var d=1,d=a.x-b.x,e=a.y-b.y,f=c.x-a.x,g=c.y-a.y,h=d*d+e*e;if(1E-10<Math.abs(d*g-e*f)){var k=Math.sqrt(h),l=Math.sqrt(f*f+g*g),h=b.x-e/k;b=b.y+d/k;f=((c.x-g/l-h)*g-(c.y+f/l-b)*f)/(d*g-e*f);c=h+d*f-a.x;a=b+e*f-a.y;d=c*c+a*a;if(2>=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10<d?1E-1 [...]
+-1E-10>d?-1E-10>f&&(a=!0):Math.sign(e)==Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(O=a.length;0<=--O;){c=O;d=O-1;0>d&&(d=a.length-1);for(var e=0,f=t+2*p,e=0;e<f;e++){var g=oa*e,h=oa*(e+1),k=b+c+g,g=b+d+g,l=b+d+h,h=b+c+h,k=k+U,g=g+U,l=l+U,h=h+U;I.faces.push(new THREE.Face3(k,g,h,null,null,x));I.faces.push(new THREE.Face3(g,l,h,null,null,x));k=D.generateSideWallUV(I,k,g,l,h);I.faceVertexUvs[0].pu [...]
+k[1],k[3]]);I.faceVertexUvs[0].push([k[1],k[2],k[3]])}}}function f(a,b,c){I.vertices.push(new THREE.Vector3(a,b,c))}function g(a,b,c){a+=U;b+=U;c+=U;I.faces.push(new THREE.Face3(a,b,c,null,null,v));a=D.generateTopUV(I,a,b,c);I.faceVertexUvs[0].push(a)}var h=void 0!==b.amount?b.amount:100,k=void 0!==b.bevelThickness?b.bevelThickness:6,l=void 0!==b.bevelSize?b.bevelSize:k-2,p=void 0!==b.bevelSegments?b.bevelSegments:3,q=void 0!==b.bevelEnabled?b.bevelEnabled:!0,n=void 0!==b.curveSegments?b [...]
+12,t=void 0!==b.steps?b.steps:1,r=b.extrudePath,s,u=!1,v=b.material,x=b.extrudeMaterial,D=void 0!==b.UVGenerator?b.UVGenerator:THREE.ExtrudeGeometry.WorldUVGenerator,w,y,A,E;r&&(s=r.getSpacedPoints(t),u=!0,q=!1,w=void 0!==b.frames?b.frames:new THREE.TubeGeometry.FrenetFrames(r,t,!1),y=new THREE.Vector3,A=new THREE.Vector3,E=new THREE.Vector3);q||(l=k=p=0);var G,F,z,I=this,U=this.vertices.length,r=a.extractPoints(n),n=r.shape,M=r.holes;if(r=!THREE.Shape.Utils.isClockWise(n)){n=n.reverse() [...]
+M.length;F<z;F++)G=M[F],THREE.Shape.Utils.isClockWise(G)&&(M[F]=G.reverse());r=!1}var H=THREE.Shape.Utils.triangulateShape(n,M),L=n;F=0;for(z=M.length;F<z;F++)G=M[F],n=n.concat(G);var P,N,R,V,J,oa=n.length,ja,ha=H.length,r=[],O=0;R=L.length;P=R-1;for(N=O+1;O<R;O++,P++,N++)P===R&&(P=0),N===R&&(N=0),r[O]=d(L[O],L[P],L[N]);var ca=[],ba,qa=r.concat();F=0;for(z=M.length;F<z;F++){G=M[F];ba=[];O=0;R=G.length;P=R-1;for(N=O+1;O<R;O++,P++,N++)P===R&&(P=0),N===R&&(N=0),ba[O]=d(G[O],G[P],G[N]);ca.pu [...]
+qa.concat(ba)}for(P=0;P<p;P++){R=P/p;V=k*(1-R);N=l*Math.sin(R*Math.PI/2);O=0;for(R=L.length;O<R;O++)J=c(L[O],r[O],N),f(J.x,J.y,-V);F=0;for(z=M.length;F<z;F++)for(G=M[F],ba=ca[F],O=0,R=G.length;O<R;O++)J=c(G[O],ba[O],N),f(J.x,J.y,-V)}N=l;for(O=0;O<oa;O++)J=q?c(n[O],qa[O],N):n[O],u?(A.copy(w.normals[0]).multiplyScalar(J.x),y.copy(w.binormals[0]).multiplyScalar(J.y),E.copy(s[0]).add(A).add(y),f(E.x,E.y,E.z)):f(J.x,J.y,0);for(R=1;R<=t;R++)for(O=0;O<oa;O++)J=q?c(n[O],qa[O],N):n[O],u?(A.copy(w [...]
+y.copy(w.binormals[R]).multiplyScalar(J.y),E.copy(s[R]).add(A).add(y),f(E.x,E.y,E.z)):f(J.x,J.y,h/t*R);for(P=p-1;0<=P;P--){R=P/p;V=k*(1-R);N=l*Math.sin(R*Math.PI/2);O=0;for(R=L.length;O<R;O++)J=c(L[O],r[O],N),f(J.x,J.y,h+V);F=0;for(z=M.length;F<z;F++)for(G=M[F],ba=ca[F],O=0,R=G.length;O<R;O++)J=c(G[O],ba[O],N),u?f(J.x,J.y+s[t-1].y,s[t-1].x+V):f(J.x,J.y,h+V)}(function(){if(q){var a;a=0*oa;for(O=0;O<ha;O++)ja=H[O],g(ja[2]+a,ja[1]+a,ja[0]+a);a=t+2*p;a*=oa;for(O=0;O<ha;O++)ja=H[O],g(ja[0]+a, [...]
+a)}else{for(O=0;O<ha;O++)ja=H[O],g(ja[2],ja[1],ja[0]);for(O=0;O<ha;O++)ja=H[O],g(ja[0]+oa*t,ja[1]+oa*t,ja[2]+oa*t)}})();(function(){var a=0;e(L,a);a+=L.length;F=0;for(z=M.length;F<z;F++)G=M[F],e(G,a),a+=G.length})()};
+THREE.ExtrudeGeometry.WorldUVGenerator={generateTopUV:function(a,b,c,d){a=a.vertices;b=a[b];c=a[c];d=a[d];return[new THREE.Vector2(b.x,b.y),new THREE.Vector2(c.x,c.y),new THREE.Vector2(d.x,d.y)]},generateSideWallUV:function(a,b,c,d,e){a=a.vertices;b=a[b];c=a[c];d=a[d];e=a[e];return.01>Math.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.V [...]
+1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.constructor=THREE.ShapeGeometry;THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;c<d;c++)this.addShape(a[c],b);return this};
+THREE.ShapeGeometry.prototype.addShape=function(a,b){void 0===b&&(b={});var c=b.material,d=void 0===b.UVGenerator?THREE.ExtrudeGeometry.WorldUVGenerator:b.UVGenerator,e,f,g,h=this.vertices.length;e=a.extractPoints(void 0!==b.curveSegments?b.curveSegments:12);var k=e.shape,l=e.holes;if(!THREE.Shape.Utils.isClockWise(k))for(k=k.reverse(),e=0,f=l.length;e<f;e++)g=l[e],THREE.Shape.Utils.isClockWise(g)&&(l[e]=g.reverse());var p=THREE.Shape.Utils.triangulateShape(k,l);e=0;for(f=l.length;e<f;e+ [...]
+k=k.concat(g);l=k.length;f=p.length;for(e=0;e<l;e++)g=k[e],this.vertices.push(new THREE.Vector3(g.x,g.y,0));for(e=0;e<f;e++)l=p[e],k=l[0]+h,g=l[1]+h,l=l[2]+h,this.faces.push(new THREE.Face3(k,g,l,null,null,c)),this.faceVertexUvs[0].push(d.generateTopUV(this,k,g,l))};
+THREE.LatheGeometry=function(a,b,c,d){THREE.Geometry.call(this);this.type="LatheGeometry";this.parameters={points:a,segments:b,phiStart:c,phiLength:d};b=b||12;c=c||0;d=d||2*Math.PI;for(var e=1/(a.length-1),f=1/b,g=0,h=b;g<=h;g++)for(var k=c+g*f*d,l=Math.cos(k),p=Math.sin(k),k=0,q=a.length;k<q;k++){var n=a[k],t=new THREE.Vector3;t.x=l*n.x-p*n.y;t.y=p*n.x+l*n.y;t.z=n.z;this.vertices.push(t)}c=a.length;g=0;for(h=b;g<h;g++)for(k=0,q=a.length-1;k<q;k++){b=p=k+c*g;d=p+c;var l=p+1+c,p=p+1,n=g*f [...]
+n+f,s=t+e;this.faces.push(new THREE.Face3(b,d,p));this.faceVertexUvs[0].push([new THREE.Vector2(n,t),new THREE.Vector2(r,t),new THREE.Vector2(n,s)]);this.faces.push(new THREE.Face3(d,l,p));this.faceVertexUvs[0].push([new THREE.Vector2(r,t),new THREE.Vector2(r,s),new THREE.Vector2(n,s)])}this.mergeVertices();this.computeFaceNormals();this.computeVertexNormals()};THREE.LatheGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.LatheGeometry.prototype.constructor=THREE.LatheGeometry;
+THREE.PlaneGeometry=function(a,b,c,d){console.info("THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.");THREE.Geometry.call(this);this.type="PlaneGeometry";this.parameters={width:a,height:b,widthSegments:c,heightSegments:d};this.fromBufferGeometry(new THREE.PlaneBufferGeometry(a,b,c,d))};THREE.PlaneGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.PlaneGeometry.prototype.constructor=THREE.PlaneGeometry;
+THREE.PlaneBufferGeometry=function(a,b,c,d){THREE.BufferGeometry.call(this);this.type="PlaneBufferGeometry";this.parameters={width:a,height:b,widthSegments:c,heightSegments:d};var e=a/2,f=b/2;c=c||1;d=d||1;var g=c+1,h=d+1,k=a/c,l=b/d;b=new Float32Array(g*h*3);a=new Float32Array(g*h*3);for(var p=new Float32Array(g*h*2),q=0,n=0,t=0;t<h;t++)for(var r=t*l-f,s=0;s<g;s++)b[q]=s*k-e,b[q+1]=-r,a[q+2]=1,p[n]=s/c,p[n+1]=1-t/d,q+=3,n+=2;q=0;e=new (65535<b.length/3?Uint32Array:Uint16Array)(c*d*6);fo [...]
+0;s<c;s++)f=s+g*(t+1),h=s+1+g*(t+1),k=s+1+g*t,e[q]=s+g*t,e[q+1]=f,e[q+2]=k,e[q+3]=f,e[q+4]=h,e[q+5]=k,q+=6;this.addAttribute("index",new THREE.BufferAttribute(e,1));this.addAttribute("position",new THREE.BufferAttribute(b,3));this.addAttribute("normal",new THREE.BufferAttribute(a,3));this.addAttribute("uv",new THREE.BufferAttribute(p,2))};THREE.PlaneBufferGeometry.prototype=Object.create(THREE.BufferGeometry.prototype);THREE.PlaneBufferGeometry.prototype.constructor=THREE.PlaneBufferGeometry;
+THREE.RingGeometry=function(a,b,c,d,e,f){THREE.Geometry.call(this);this.type="RingGeometry";this.parameters={innerRadius:a,outerRadius:b,thetaSegments:c,phiSegments:d,thetaStart:e,thetaLength:f};a=a||0;b=b||50;e=void 0!==e?e:0;f=void 0!==f?f:2*Math.PI;c=void 0!==c?Math.max(3,c):8;d=void 0!==d?Math.max(1,d):8;var g,h=[],k=a,l=(b-a)/d;for(a=0;a<d+1;a++){for(g=0;g<c+1;g++){var p=new THREE.Vector3,q=e+g/c*f;p.x=k*Math.cos(q);p.y=k*Math.sin(q);this.vertices.push(p);h.push(new THREE.Vector2((p [...]
+(p.y/b+1)/2))}k+=l}b=new THREE.Vector3(0,0,1);for(a=0;a<d;a++)for(e=a*(c+1),g=0;g<c;g++)f=q=g+e,l=q+c+1,p=q+c+2,this.faces.push(new THREE.Face3(f,l,p,[b.clone(),b.clone(),b.clone()])),this.faceVertexUvs[0].push([h[f].clone(),h[l].clone(),h[p].clone()]),f=q,l=q+c+2,p=q+1,this.faces.push(new THREE.Face3(f,l,p,[b.clone(),b.clone(),b.clone()])),this.faceVertexUvs[0].push([h[f].clone(),h[l].clone(),h[p].clone()]);this.computeFaceNormals();this.boundingSphere=new THREE.Sphere(new THREE.Vector3,k)};
+THREE.RingGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.RingGeometry.prototype.constructor=THREE.RingGeometry;
+THREE.SphereGeometry=function(a,b,c,d,e,f,g){THREE.Geometry.call(this);this.type="SphereGeometry";this.parameters={radius:a,widthSegments:b,heightSegments:c,phiStart:d,phiLength:e,thetaStart:f,thetaLength:g};a=a||50;b=Math.max(3,Math.floor(b)||8);c=Math.max(2,Math.floor(c)||6);d=void 0!==d?d:0;e=void 0!==e?e:2*Math.PI;f=void 0!==f?f:0;g=void 0!==g?g:Math.PI;var h,k,l=[],p=[];for(k=0;k<=c;k++){var q=[],n=[];for(h=0;h<=b;h++){var t=h/b,r=k/c,s=new THREE.Vector3;s.x=-a*Math.cos(d+t*e)*Math. [...]
+s.y=a*Math.cos(f+r*g);s.z=a*Math.sin(d+t*e)*Math.sin(f+r*g);this.vertices.push(s);q.push(this.vertices.length-1);n.push(new THREE.Vector2(t,1-r))}l.push(q);p.push(n)}for(k=0;k<c;k++)for(h=0;h<b;h++){d=l[k][h+1];e=l[k][h];f=l[k+1][h];g=l[k+1][h+1];var q=this.vertices[d].clone().normalize(),n=this.vertices[e].clone().normalize(),t=this.vertices[f].clone().normalize(),r=this.vertices[g].clone().normalize(),s=p[k][h+1].clone(),u=p[k][h].clone(),v=p[k+1][h].clone(),x=p[k+1][h+1].clone();Math. [...]
+a?(s.x=(s.x+u.x)/2,this.faces.push(new THREE.Face3(d,f,g,[q,t,r])),this.faceVertexUvs[0].push([s,v,x])):Math.abs(this.vertices[f].y)===a?(v.x=(v.x+x.x)/2,this.faces.push(new THREE.Face3(d,e,f,[q,n,t])),this.faceVertexUvs[0].push([s,u,v])):(this.faces.push(new THREE.Face3(d,e,g,[q,n,r])),this.faceVertexUvs[0].push([s,u,x]),this.faces.push(new THREE.Face3(e,f,g,[n.clone(),t,r.clone()])),this.faceVertexUvs[0].push([u.clone(),v,x.clone()]))}this.computeFaceNormals();this.boundingSphere=new T [...]
+a)};THREE.SphereGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.SphereGeometry.prototype.constructor=THREE.SphereGeometry;THREE.TextGeometry=function(a,b){b=b||{};var c=THREE.FontUtils.generateShapes(a,b);b.amount=void 0!==b.height?b.height:50;void 0===b.bevelThickness&&(b.bevelThickness=10);void 0===b.bevelSize&&(b.bevelSize=8);void 0===b.bevelEnabled&&(b.bevelEnabled=!1);THREE.ExtrudeGeometry.call(this,c,b);this.type="TextGeometry"};THREE.TextGeometry.prototype=Object. [...]
+THREE.TextGeometry.prototype.constructor=THREE.TextGeometry;
+THREE.TorusGeometry=function(a,b,c,d,e){THREE.Geometry.call(this);this.type="TorusGeometry";this.parameters={radius:a,tube:b,radialSegments:c,tubularSegments:d,arc:e};a=a||100;b=b||40;c=c||8;d=d||6;e=e||2*Math.PI;for(var f=new THREE.Vector3,g=[],h=[],k=0;k<=c;k++)for(var l=0;l<=d;l++){var p=l/d*e,q=k/c*Math.PI*2;f.x=a*Math.cos(p);f.y=a*Math.sin(p);var n=new THREE.Vector3;n.x=(a+b*Math.cos(q))*Math.cos(p);n.y=(a+b*Math.cos(q))*Math.sin(p);n.z=b*Math.sin(q);this.vertices.push(n);g.push(new [...]
+d,k/c));h.push(n.clone().sub(f).normalize())}for(k=1;k<=c;k++)for(l=1;l<=d;l++)a=(d+1)*k+l-1,b=(d+1)*(k-1)+l-1,e=(d+1)*(k-1)+l,f=(d+1)*k+l,p=new THREE.Face3(a,b,f,[h[a].clone(),h[b].clone(),h[f].clone()]),this.faces.push(p),this.faceVertexUvs[0].push([g[a].clone(),g[b].clone(),g[f].clone()]),p=new THREE.Face3(b,e,f,[h[b].clone(),h[e].clone(),h[f].clone()]),this.faces.push(p),this.faceVertexUvs[0].push([g[b].clone(),g[e].clone(),g[f].clone()]);this.computeFaceNormals()};
+THREE.TorusGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.TorusGeometry.prototype.constructor=THREE.TorusGeometry;
+THREE.TorusKnotGeometry=function(a,b,c,d,e,f,g){function h(a,b,c,d,e){var f=Math.cos(a),g=Math.sin(a);a*=b/c;b=Math.cos(a);f*=d*(2+b)*.5;g=d*(2+b)*g*.5;d=e*d*Math.sin(a)*.5;return new THREE.Vector3(f,g,d)}THREE.Geometry.call(this);this.type="TorusKnotGeometry";this.parameters={radius:a,tube:b,radialSegments:c,tubularSegments:d,p:e,q:f,heightScale:g};a=a||100;b=b||40;c=c||64;d=d||8;e=e||2;f=f||3;g=g||1;for(var k=Array(c),l=new THREE.Vector3,p=new THREE.Vector3,q=new THREE.Vector3,n=0;n<c; [...]
+Array(d);var t=n/c*2*e*Math.PI,r=h(t,f,e,a,g),t=h(t+.01,f,e,a,g);l.subVectors(t,r);p.addVectors(t,r);q.crossVectors(l,p);p.crossVectors(q,l);q.normalize();p.normalize();for(t=0;t<d;++t){var s=t/d*2*Math.PI,u=-b*Math.cos(s),s=b*Math.sin(s),v=new THREE.Vector3;v.x=r.x+u*p.x+s*q.x;v.y=r.y+u*p.y+s*q.y;v.z=r.z+u*p.z+s*q.z;k[n][t]=this.vertices.push(v)-1}}for(n=0;n<c;++n)for(t=0;t<d;++t)e=(n+1)%c,f=(t+1)%d,a=k[n][t],b=k[e][t],e=k[e][f],f=k[n][f],g=new THREE.Vector2(n/c,t/d),l=new THREE.Vector2 [...]
+t/d),p=new THREE.Vector2((n+1)/c,(t+1)/d),q=new THREE.Vector2(n/c,(t+1)/d),this.faces.push(new THREE.Face3(a,b,f)),this.faceVertexUvs[0].push([g,l,q]),this.faces.push(new THREE.Face3(b,e,f)),this.faceVertexUvs[0].push([l.clone(),p,q.clone()]);this.computeFaceNormals();this.computeVertexNormals()};THREE.TorusKnotGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.TorusKnotGeometry.prototype.constructor=THREE.TorusKnotGeometry;
+THREE.TubeGeometry=function(a,b,c,d,e,f){THREE.Geometry.call(this);this.type="TubeGeometry";this.parameters={path:a,segments:b,radius:c,radialSegments:d,closed:e};b=b||64;c=c||1;d=d||8;e=e||!1;f=f||THREE.TubeGeometry.NoTaper;var g=[],h,k,l=b+1,p,q,n,t,r,s=new THREE.Vector3,u,v,x;u=new THREE.TubeGeometry.FrenetFrames(a,b,e);v=u.normals;x=u.binormals;this.tangents=u.tangents;this.normals=v;this.binormals=x;for(u=0;u<l;u++)for(g[u]=[],p=u/(l-1),r=a.getPointAt(p),h=v[u],k=x[u],n=c*f(p),p=0;p [...]
+p/d*2*Math.PI,t=-n*Math.cos(q),q=n*Math.sin(q),s.copy(r),s.x+=t*h.x+q*k.x,s.y+=t*h.y+q*k.y,s.z+=t*h.z+q*k.z,g[u][p]=this.vertices.push(new THREE.Vector3(s.x,s.y,s.z))-1;for(u=0;u<b;u++)for(p=0;p<d;p++)f=e?(u+1)%b:u+1,l=(p+1)%d,a=g[u][p],c=g[f][p],f=g[f][l],l=g[u][l],s=new THREE.Vector2(u/b,p/d),v=new THREE.Vector2((u+1)/b,p/d),x=new THREE.Vector2((u+1)/b,(p+1)/d),h=new THREE.Vector2(u/b,(p+1)/d),this.faces.push(new THREE.Face3(a,c,l)),this.faceVertexUvs[0].push([s,v,h]),this.faces.push(n [...]
+f,l)),this.faceVertexUvs[0].push([v.clone(),x,h.clone()]);this.computeFaceNormals();this.computeVertexNormals()};THREE.TubeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.TubeGeometry.prototype.constructor=THREE.TubeGeometry;THREE.TubeGeometry.NoTaper=function(a){return 1};THREE.TubeGeometry.SinusoidalTaper=function(a){return Math.sin(Math.PI*a)};
+THREE.TubeGeometry.FrenetFrames=function(a,b,c){var d=new THREE.Vector3,e=[],f=[],g=[],h=new THREE.Vector3,k=new THREE.Matrix4;b+=1;var l,p,q;this.tangents=e;this.normals=f;this.binormals=g;for(l=0;l<b;l++)p=l/(b-1),e[l]=a.getTangentAt(p),e[l].normalize();f[0]=new THREE.Vector3;g[0]=new THREE.Vector3;a=Number.MAX_VALUE;l=Math.abs(e[0].x);p=Math.abs(e[0].y);q=Math.abs(e[0].z);l<=a&&(a=l,d.set(1,0,0));p<=a&&(a=p,d.set(0,1,0));q<=a&&d.set(0,0,1);h.crossVectors(e[0],d).normalize();f[0].cross [...]
+h);g[0].crossVectors(e[0],f[0]);for(l=1;l<b;l++)f[l]=f[l-1].clone(),g[l]=g[l-1].clone(),h.crossVectors(e[l-1],e[l]),1E-4<h.length()&&(h.normalize(),d=Math.acos(THREE.Math.clamp(e[l-1].dot(e[l]),-1,1)),f[l].applyMatrix4(k.makeRotationAxis(h,d))),g[l].crossVectors(e[l],f[l]);if(c)for(d=Math.acos(THREE.Math.clamp(f[0].dot(f[b-1]),-1,1)),d/=b-1,0<e[0].dot(h.crossVectors(f[0],f[b-1]))&&(d=-d),l=1;l<b;l++)f[l].applyMatrix4(k.makeRotationAxis(e[l],d*l)),g[l].crossVectors(e[l],f[l])};
+THREE.PolyhedronGeometry=function(a,b,c,d){function e(a){var b=a.normalize().clone();b.index=k.vertices.push(b)-1;var c=Math.atan2(a.z,-a.x)/2/Math.PI+.5;a=Math.atan2(-a.y,Math.sqrt(a.x*a.x+a.z*a.z))/Math.PI+.5;b.uv=new THREE.Vector2(c,1-a);return b}function f(a,b,c){var d=new THREE.Face3(a.index,b.index,c.index,[a.clone(),b.clone(),c.clone()]);k.faces.push(d);u.copy(a).add(b).add(c).divideScalar(3);d=Math.atan2(u.z,-u.x);k.faceVertexUvs[0].push([h(a.uv,a,d),h(b.uv,b,d),h(c.uv,c,d)])}fun [...]
+b){for(var c=Math.pow(2,b),d=e(k.vertices[a.a]),g=e(k.vertices[a.b]),h=e(k.vertices[a.c]),l=[],n=0;n<=c;n++){l[n]=[];for(var p=e(d.clone().lerp(h,n/c)),q=e(g.clone().lerp(h,n/c)),s=c-n,r=0;r<=s;r++)l[n][r]=0==r&&n==c?p:e(p.clone().lerp(q,r/s))}for(n=0;n<c;n++)for(r=0;r<2*(c-n)-1;r++)d=Math.floor(r/2),0==r%2?f(l[n][d+1],l[n+1][d],l[n][d]):f(l[n][d+1],l[n+1][d+1],l[n+1][d])}function h(a,b,c){0>c&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+. [...]
+THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,l=0,p=a.length;l<p;l+=3)e(new THREE.Vector3(a[l],a[l+1],a[l+2]));a=this.vertices;for(var q=[],n=l=0,p=b.length;l<p;l+=3,n++){var t=a[b[l]],r=a[b[l+1]],s=a[b[l+2]];q[n]=new THREE.Face3(t.index,r.index,s.index,[t.clone(),r.clone(),s.clone()])}for(var u=new THREE.Vector3,l=0,p=q.length;l<p;l++)g(q[l],d);l=0;for(p=this.faceVertexUvs[0].length;l<p;l++) [...]
+d=b[0].x,a=b[1].x,q=b[2].x,n=Math.max(d,Math.max(a,q)),t=Math.min(d,Math.min(a,q)),.9<n&&.1>t&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));l=0;for(p=this.vertices.length;l<p;l++)this.vertices[l].multiplyScalar(c);this.mergeVertices();this.computeFaceNormals();this.boundingSphere=new THREE.Sphere(new THREE.Vector3,c)};THREE.PolyhedronGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.PolyhedronGeometry.prototype.constructor=THREE.PolyhedronGeometry;
+THREE.DodecahedronGeometry=function(a,b){this.parameters={radius:a,detail:b};var c=(1+Math.sqrt(5))/2,d=1/c;THREE.PolyhedronGeometry.call(this,[-1,-1,-1,-1,-1,1,-1,1,-1,-1,1,1,1,-1,-1,1,-1,1,1,1,-1,1,1,1,0,-d,-c,0,-d,c,0,d,-c,0,d,c,-d,-c,0,-d,c,0,d,-c,0,d,c,0,-c,0,-d,c,0,-d,-c,0,d,c,0,d],[3,11,7,3,7,15,3,15,13,7,19,17,7,17,6,7,6,15,17,4,8,17,8,10,17,10,6,8,0,16,8,16,2,8,2,10,0,12,1,0,1,18,0,18,16,6,10,2,6,2,13,6,13,15,2,16,18,2,18,3,2,3,13,18,1,9,18,9,11,18,11,3,4,14,12,4,12,0,4,0,8,11,9 [...]
+11,19,7,19,5,14,19,14,4,19,4,17,1,12,14,1,14,5,1,5,9],a,b)};THREE.DodecahedronGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.DodecahedronGeometry.prototype.constructor=THREE.DodecahedronGeometry;
+THREE.IcosahedronGeometry=function(a,b){var c=(1+Math.sqrt(5))/2;THREE.PolyhedronGeometry.call(this,[-1,c,0,1,c,0,-1,-c,0,1,-c,0,0,-1,c,0,1,c,0,-1,-c,0,1,-c,c,0,-1,c,0,1,-c,0,-1,-c,0,1],[0,11,5,0,5,1,0,1,7,0,7,10,0,10,11,1,5,9,5,11,4,11,10,2,10,7,6,7,1,8,3,9,4,3,4,2,3,2,6,3,6,8,3,8,9,4,9,5,2,4,11,6,2,10,8,6,7,9,8,1],a,b);this.type="IcosahedronGeometry";this.parameters={radius:a,detail:b}};THREE.IcosahedronGeometry.prototype=Object.create(THREE.Geometry.prototype);
+THREE.IcosahedronGeometry.prototype.constructor=THREE.IcosahedronGeometry;THREE.OctahedronGeometry=function(a,b){this.parameters={radius:a,detail:b};THREE.PolyhedronGeometry.call(this,[1,0,0,-1,0,0,0,1,0,0,-1,0,0,0,1,0,0,-1],[0,2,4,0,4,3,0,3,5,0,5,2,1,2,5,1,5,3,1,3,4,1,4,2],a,b);this.type="OctahedronGeometry";this.parameters={radius:a,detail:b}};THREE.OctahedronGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.OctahedronGeometry.prototype.constructor=THREE.OctahedronGeometry;
+THREE.TetrahedronGeometry=function(a,b){THREE.PolyhedronGeometry.call(this,[1,1,1,-1,-1,1,-1,1,-1,1,-1,-1],[2,1,0,0,3,2,1,3,0,2,3,1],a,b);this.type="TetrahedronGeometry";this.parameters={radius:a,detail:b}};THREE.TetrahedronGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.TetrahedronGeometry.prototype.constructor=THREE.TetrahedronGeometry;
+THREE.ParametricGeometry=function(a,b,c){THREE.Geometry.call(this);this.type="ParametricGeometry";this.parameters={func:a,slices:b,stacks:c};var d=this.vertices,e=this.faces,f=this.faceVertexUvs[0],g,h,k,l,p=b+1;for(g=0;g<=c;g++)for(l=g/c,h=0;h<=b;h++)k=h/b,k=a(k,l),d.push(k);var q,n,t,r;for(g=0;g<c;g++)for(h=0;h<b;h++)a=g*p+h,d=g*p+h+1,l=(g+1)*p+h+1,k=(g+1)*p+h,q=new THREE.Vector2(h/b,g/c),n=new THREE.Vector2((h+1)/b,g/c),t=new THREE.Vector2((h+1)/b,(g+1)/c),r=new THREE.Vector2(h/b,(g+1 [...]
+d,k)),f.push([q,n,r]),e.push(new THREE.Face3(d,l,k)),f.push([n.clone(),t,r.clone()]);this.computeFaceNormals();this.computeVertexNormals()};THREE.ParametricGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ParametricGeometry.prototype.constructor=THREE.ParametricGeometry;
+THREE.AxisHelper=function(a){a=a||1;var b=new Float32Array([0,0,0,a,0,0,0,0,0,0,a,0,0,0,0,0,0,a]),c=new Float32Array([1,0,0,1,.6,0,0,1,0,.6,1,0,0,0,1,0,.6,1]);a=new THREE.BufferGeometry;a.addAttribute("position",new THREE.BufferAttribute(b,3));a.addAttribute("color",new THREE.BufferAttribute(c,3));b=new THREE.LineBasicMaterial({vertexColors:THREE.VertexColors});THREE.Line.call(this,a,b,THREE.LinePieces)};THREE.AxisHelper.prototype=Object.create(THREE.Line.prototype);
+THREE.AxisHelper.prototype.constructor=THREE.AxisHelper;
+THREE.ArrowHelper=function(){var a=new THREE.Geometry;a.vertices.push(new THREE.Vector3(0,0,0),new THREE.Vector3(0,1,0));var b=new THREE.CylinderGeometry(0,.5,1,5,1);b.applyMatrix((new THREE.Matrix4).makeTranslation(0,-.5,0));return function(c,d,e,f,g,h){THREE.Object3D.call(this);void 0===f&&(f=16776960);void 0===e&&(e=1);void 0===g&&(g=.2*e);void 0===h&&(h=.2*g);this.position.copy(d);this.line=new THREE.Line(a,new THREE.LineBasicMaterial({color:f}));this.line.matrixAutoUpdate=!1;this.ad [...]
+this.cone=new THREE.Mesh(b,new THREE.MeshBasicMaterial({color:f}));this.cone.matrixAutoUpdate=!1;this.add(this.cone);this.setDirection(c);this.setLength(e,g,h)}}();THREE.ArrowHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.ArrowHelper.prototype.constructor=THREE.ArrowHelper;
+THREE.ArrowHelper.prototype.setDirection=function(){var a=new THREE.Vector3,b;return function(c){.99999<c.y?this.quaternion.set(0,0,0,1):-.99999>c.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}();THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a-b,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};
+THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)};THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype);THREE.BoxHelper.prototype.constructor=THREE.BoxHelper;
+THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x [...]
+d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this. [...]
+this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.constructor=THREE.BoundingBoxHelper;THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this. [...]
+THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n [...]
+b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()};
+THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype);THREE.CameraHelper.prototype.constructor=THREE.CameraHelper;
+THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;g<h;g++)a.vertices[e[g]].copy(c)};return function(){a=this.geometry;b=this.pointMap;d.projectionMatrix.copy(this.camera.projectionMatrix);e("c",0,0,-1);e("t",0,0,1);e("n1",-1,-1,-1);e("n2",1,-1,-1);e("n3",-1,1,-1);e("n4",1,1,-1);e("f1",-1,-1,1);e("f2",1,-1,1);e("f3",-1,1,1);e("f4",1,1,1);e("u1",.7,1.1,-1);e("u2 [...]
+-1);e("u3",0,2,-1);e("cf1",-1,0,1);e("cf2",1,0,1);e("cf3",0,-1,1);e("cf4",0,1,1);e("cn1",-1,0,-1);e("cn2",1,0,-1);e("cn3",0,-1,-1);e("cn4",0,1,-1);a.verticesNeedUpdate=!0}}();
+THREE.DirectionalLightHelper=function(a,b){THREE.Object3D.call(this);this.light=a;this.light.updateMatrixWorld();this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;b=b||1;var c=new THREE.Geometry;c.vertices.push(new THREE.Vector3(-b,b,0),new THREE.Vector3(b,b,0),new THREE.Vector3(b,-b,0),new THREE.Vector3(-b,-b,0),new THREE.Vector3(-b,b,0));var d=new THREE.LineBasicMaterial({fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);this.lightPlane=new THREE.Line(c,d);t [...]
+c=new THREE.Geometry;c.vertices.push(new THREE.Vector3,new THREE.Vector3);d=new THREE.LineBasicMaterial({fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);this.targetLine=new THREE.Line(c,d);this.add(this.targetLine);this.update()};THREE.DirectionalLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.DirectionalLightHelper.prototype.constructor=THREE.DirectionalLightHelper;
+THREE.DirectionalLightHelper.prototype.dispose=function(){this.lightPlane.geometry.dispose();this.lightPlane.material.dispose();this.targetLine.geometry.dispose();this.targetLine.material.dispose()};
+THREE.DirectionalLightHelper.prototype.update=function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3;return function(){a.setFromMatrixPosition(this.light.matrixWorld);b.setFromMatrixPosition(this.light.target.matrixWorld);c.subVectors(b,a);this.lightPlane.lookAt(c);this.lightPlane.material.color.copy(this.light.color).multiplyScalar(this.light.intensity);this.targetLine.geometry.vertices[1].copy(c);this.targetLine.geometry.verticesNeedUpdate=!0;this.targetLine.materia [...]
+THREE.EdgesHelper=function(a,b,c){b=void 0!==b?b:16777215;c=Math.cos(THREE.Math.degToRad(void 0!==c?c:1));var d=[0,0],e={},f=function(a,b){return a-b},g=["a","b","c"],h=new THREE.BufferGeometry,k;a.geometry instanceof THREE.BufferGeometry?(k=new THREE.Geometry,k.fromBufferGeometry(a.geometry)):k=a.geometry.clone();k.mergeVertices();k.computeFaceNormals();var l=k.vertices;k=k.faces;for(var p=0,q=0,n=k.length;q<n;q++)for(var t=k[q],r=0;3>r;r++){d[0]=t[g[r]];d[1]=t[g[(r+1)%3]];d.sort(f);var [...]
+void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2||k[g.face1].normal.dot(k[g.face2].normal)<=c)p=l[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=l[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:b}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};
+THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype);THREE.EdgesHelper.prototype.constructor=THREE.EdgesHelper;
+THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;c<e;c++)b.vertices.push(new THREE.Vector3,new THREE.Vector3);THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:a,linewidth:d}),THREE.LinePieces);this.matrixAutoUpdate=!1;this.normalMatrix=new THREE.Matrix3;this.update()};THREE.FaceNormalsHelper.prototype=Object.create(THREE.Line.prototype);
+THREE.FaceNormalsHelper.prototype.constructor=THREE.FaceNormalsHelper;
+THREE.FaceNormalsHelper.prototype.update=function(){var a=this.geometry.vertices,b=this.object,c=b.geometry.vertices,d=b.geometry.faces,e=b.matrixWorld;b.updateMatrixWorld(!0);this.normalMatrix.getNormalMatrix(e);for(var f=b=0,g=d.length;b<g;b++,f+=2){var h=d[b];a[f].copy(c[h.a]).add(c[h.b]).add(c[h.c]).divideScalar(3).applyMatrix4(e);a[f+1].copy(h.normal).applyMatrix3(this.normalMatrix).normalize().multiplyScalar(this.size).add(a[f])}this.geometry.verticesNeedUpdate=!0;return this};
+THREE.GridHelper=function(a,b){var c=new THREE.Geometry,d=new THREE.LineBasicMaterial({vertexColors:THREE.VertexColors});this.color1=new THREE.Color(4473924);this.color2=new THREE.Color(8947848);for(var e=-a;e<=a;e+=b){c.vertices.push(new THREE.Vector3(-a,0,e),new THREE.Vector3(a,0,e),new THREE.Vector3(e,0,-a),new THREE.Vector3(e,0,a));var f=0===e?this.color1:this.color2;c.colors.push(f,f,f,f)}THREE.Line.call(this,c,d,THREE.LinePieces)};THREE.GridHelper.prototype=Object.create(THREE.Line [...]
+THREE.GridHelper.prototype.constructor=THREE.GridHelper;THREE.GridHelper.prototype.setColors=function(a,b){this.color1.set(a);this.color2.set(b);this.geometry.colorsNeedUpdate=!0};
+THREE.HemisphereLightHelper=function(a,b){THREE.Object3D.call(this);this.light=a;this.light.updateMatrixWorld();this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.colors=[new THREE.Color,new THREE.Color];var c=new THREE.SphereGeometry(b,4,2);c.applyMatrix((new THREE.Matrix4).makeRotationX(-Math.PI/2));for(var d=0;8>d;d++)c.faces[d].color=this.colors[4>d?0:1];d=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(c,d);this.add(this [...]
+this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.constructor=THREE.HemisphereLightHelper;THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()};
+THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}();
+THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.constructor=THREE.PointLightHelper;
+THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()};THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)};
+THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;c<this.bones.length;c++)this.bones[c].parent instanceof THREE.Bone&&(b.vertices.push(new THREE.Vector3),b.vertices.push(new THREE.Vector3),b.colors.push(new THREE.Color(0,0,1)),b.colors.push(new THREE.Color(0,1,0)));c=new THREE.LineBasicMaterial({vertexColors:THREE.VertexColors,depthTest:!1,depthWrite:!1,transparent:!0});THREE.Line.call(this,b,c,THREE.LinePieces);this.root=a;this.matrix=a.mat [...]
+this.matrixAutoUpdate=!1;this.update()};THREE.SkeletonHelper.prototype=Object.create(THREE.Line.prototype);THREE.SkeletonHelper.prototype.constructor=THREE.SkeletonHelper;THREE.SkeletonHelper.prototype.getBoneList=function(a){var b=[];a instanceof THREE.Bone&&b.push(a);for(var c=0;c<a.children.length;c++)b.push.apply(b,this.getBoneList(a.children[c]));return b};
+THREE.SkeletonHelper.prototype.update=function(){for(var a=this.geometry,b=(new THREE.Matrix4).getInverse(this.root.matrixWorld),c=new THREE.Matrix4,d=0,e=0;e<this.bones.length;e++){var f=this.bones[e];f.parent instanceof THREE.Bone&&(c.multiplyMatrices(b,f.matrixWorld),a.vertices[d].setFromMatrixPosition(c),c.multiplyMatrices(b,f.parent.matrixWorld),a.vertices[d+1].setFromMatrixPosition(c),d+=2)}a.verticesNeedUpdate=!0;a.computeBoundingSphere()};
+THREE.SpotLightHelper=function(a){THREE.Object3D.call(this);this.light=a;this.light.updateMatrixWorld();this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;a=new THREE.CylinderGeometry(0,1,1,8,1,!0);a.applyMatrix((new THREE.Matrix4).makeTranslation(0,-.5,0));a.applyMatrix((new THREE.Matrix4).makeRotationX(-Math.PI/2));var b=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});this.cone=new THREE.Mesh(a,b);this.add(this.cone);this.update()};THREE.SpotLightHelper.prototype=Object.create(THREE [...]
+THREE.SpotLightHelper.prototype.constructor=THREE.SpotLightHelper;THREE.SpotLightHelper.prototype.dispose=function(){this.cone.geometry.dispose();this.cone.material.dispose()};
+THREE.SpotLightHelper.prototype.update=function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){var c=this.light.distance?this.light.distance:1E4,d=c*Math.tan(this.light.angle);this.cone.scale.set(d,d,c);a.setFromMatrixPosition(this.light.matrixWorld);b.setFromMatrixPosition(this.light.target.matrixWorld);this.cone.lookAt(b.sub(a));this.cone.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}}();
+THREE.VertexNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;b=void 0!==c?c:16711680;d=void 0!==d?d:1;c=new THREE.Geometry;a=a.geometry.faces;for(var e=0,f=a.length;e<f;e++)for(var g=0,h=a[e].vertexNormals.length;g<h;g++)c.vertices.push(new THREE.Vector3,new THREE.Vector3);THREE.Line.call(this,c,new THREE.LineBasicMaterial({color:b,linewidth:d}),THREE.LinePieces);this.matrixAutoUpdate=!1;this.normalMatrix=new THREE.Matrix3;this.update()};THREE.VertexNormalsHelper.pr [...]
+THREE.VertexNormalsHelper.prototype.constructor=THREE.VertexNormalsHelper;
+THREE.VertexNormalsHelper.prototype.update=function(a){var b=new THREE.Vector3;return function(a){a=["a","b","c","d"];this.object.updateMatrixWorld(!0);this.normalMatrix.getNormalMatrix(this.object.matrixWorld);for(var d=this.geometry.vertices,e=this.object.geometry.vertices,f=this.object.geometry.faces,g=this.object.matrixWorld,h=0,k=0,l=f.length;k<l;k++)for(var p=f[k],q=0,n=p.vertexNormals.length;q<n;q++){var t=p.vertexNormals[q];d[h].copy(e[p[a[q]]]).applyMatrix4(g);b.copy(t).applyMat [...]
+b.add(d[h]);h+=1;d[h].copy(b);h+=1}this.geometry.verticesNeedUpdate=!0;return this}}();
+THREE.VertexTangentsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;b=void 0!==c?c:255;d=void 0!==d?d:1;c=new THREE.Geometry;a=a.geometry.faces;for(var e=0,f=a.length;e<f;e++)for(var g=0,h=a[e].vertexTangents.length;g<h;g++)c.vertices.push(new THREE.Vector3),c.vertices.push(new THREE.Vector3);THREE.Line.call(this,c,new THREE.LineBasicMaterial({color:b,linewidth:d}),THREE.LinePieces);this.matrixAutoUpdate=!1;this.update()};THREE.VertexTangentsHelper.prototype=Object.create [...]
+THREE.VertexTangentsHelper.prototype.constructor=THREE.VertexTangentsHelper;
+THREE.VertexTangentsHelper.prototype.update=function(a){var b=new THREE.Vector3;return function(a){a=["a","b","c","d"];this.object.updateMatrixWorld(!0);for(var d=this.geometry.vertices,e=this.object.geometry.vertices,f=this.object.geometry.faces,g=this.object.matrixWorld,h=0,k=0,l=f.length;k<l;k++)for(var p=f[k],q=0,n=p.vertexTangents.length;q<n;q++){var t=p.vertexTangents[q];d[h].copy(e[p[a[q]]]).applyMatrix4(g);b.copy(t).transformDirection(g).multiplyScalar(this.size);b.add(d[h]);h+=1 [...]
+h+=1}this.geometry.verticesNeedUpdate=!0;return this}}();
+THREE.WireframeHelper=function(a,b){var c=void 0!==b?b:16777215,d=[0,0],e={},f=function(a,b){return a-b},g=["a","b","c"],h=new THREE.BufferGeometry;if(a.geometry instanceof THREE.Geometry){for(var k=a.geometry.vertices,l=a.geometry.faces,p=0,q=new Uint32Array(6*l.length),n=0,t=l.length;n<t;n++)for(var r=l[n],s=0;3>s;s++){d[0]=r[g[s]];d[1]=r[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);n=0;for(t=p;n<t;n++)for(s=0;2 [...]
+k[q[2*n+s]],g=6*n+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;t=a.geometry.attributes.index.array;l=a.geometry.drawcalls;p=0;0===l.length&&(l=[{count:t.length,index:0,start:0}]);for(var q=new Uint32Array(2*t.length),r=0,v=l.length;r<v;++r)for(var s=l[r].start,u=l[r].count,g=l[r].index,n=s,x=s+u;n<x;n+=3)for( [...]
+g+t[n+s],d[1]=g+t[n+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);n=0;for(t=p;n<t;n++)for(s=0;2>s;s++)g=6*n+3*s,p=3*q[2*n+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),n=0,t=q;n<t;n++)for(s=0;3>s;s++)g=18*n+6*s,q=9*n+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*n+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("posi [...]
+3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.WireframeHelper.prototype.constructor=THREE.WireframeHelper;THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype);THREE.ImmediateRenderObject.prototype.constructor=TH [...]
+THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype);THREE.MorphBlendMesh.prototype.constructor=THREE.MorphBlendMesh;
+THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)};
+THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;f<g;f++){var h=e.morphTargets[f].name.match(b);if(h&&1<h.length){var k=h[1];d[k]||(d[k]={start:Infinity,end:-Infinity});h=d[k];f<h.start&&(h.start=f);f>h.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c};
+THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)};
+THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b};
+THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):THREE.warn("THREE.MorphBlendMesh: animation["+a+"] undefined in .playAnimation()")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1};
+THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;b<c;b++){var d=this.animationsList[b];if(d.active){var e=d.duration/d.length;d.time+=d.direction*a;if(d.mirroredLoop){if(d.time>d.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g= [...]
+f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}};
diff --git a/config/plugins/visualizations/csg/templates/csg.mako b/config/plugins/visualizations/csg/templates/csg.mako
new file mode 100644
index 0000000..d986a7f
--- /dev/null
+++ b/config/plugins/visualizations/csg/templates/csg.mako
@@ -0,0 +1,278 @@
+<%
+    root = h.url_for( "/" )
+    app_root = root + "plugins/visualizations/csg/static/"
+%>
+
+<!DOCTYPE HTML>
+<html>
+    <head>
+        <!-- CSG Viewer is a web application for 3D shape visualization. -->
+        <title>${hda.name} | ${visualization_name}</title>
+        ${h.javascript_link( app_root + 'dat.gui.min.js' )}
+        ${h.javascript_link( app_root + 'three.min.js' )}
+        ${h.javascript_link( app_root + 'Detector.js' )}
+        ${h.javascript_link( app_root + 'OrbitControls.js' )}
+        ${h.javascript_link( app_root + 'PLYLoader.js' )}
+        ${h.javascript_link( app_root + 'VTKLoader.js' )}
+    </head>
+    <body>
+        <!-- Div which will hold the Output -->
+        <div id="WebGL-output"></div>
+        <script type="text/javascript">
+
+            // Global variables
+            var container;
+            var scene = new THREE.Scene();
+            var renderer;
+            var controls;
+            var bbHelper;
+            var defaultBackgroundColor = 0x4d576b;
+
+            // Camera
+            var screenWidth = window.innerWidth;
+            var screenHeight = window.innerHeight;
+            var VIEW_ANGLE = 40;
+            var aspect = screenWidth / screenHeight;
+            var near = 1;
+            var far = 10000;
+            var camera = new THREE.PerspectiveCamera(VIEW_ANGLE, aspect, near, far);
+
+            init();
+            //animate();
+
+            function init() {
+                window.addEventListener('resize', onWindowResize, false);
+                // Color, near, far
+                scene.fog = new THREE.Fog(0x111111, 0.1, 1000);
+                // Data format and loader
+                var hdaExt  = '${hda.ext}';
+                if (hdaExt == 'plyascii' || hdaExt == 'plybinary') {
+                    // This returns THREE.Geometry()
+                    var loader = new THREE.PLYLoader();
+                } else {
+                    // This returns THREE.BufferGeometry()
+                    var loader = new THREE.VTKLoader();
+                }
+                loader.load("${h.url_for( controller='/datasets', action='index')}/${trans.security.encode_id( hda.id )}/display",
+                function (geometry) {
+                    var surface = new THREE.MeshPhongMaterial({shading: THREE.SmoothShading,
+                                                                side: THREE.DoubleSide,
+                                                                shininess: 100,
+                                                                emissive: 0x000000,
+                                                                specular: 0x111111,
+                                                                metal: false});
+                    var edges = new THREE.MeshBasicMaterial({color: 0x111111});
+                    geometry.receiveShadow = true;
+                    geometry.computeFaceNormals();
+                    // Normals may or may not have been set
+                    if ( geometry.type == "BufferGeometry" && ! geometry.getAttribute( 'normal' ) ) {
+                         geometry.computeVertexNormals();
+                    }
+                    var geometryHasColor = false;
+                    if ( geometry.type == "BufferGeometry" && geometry.getAttribute( 'color' ) ) {
+                        geometryHasColor = true;
+                        // Color vertices
+                        surface[ 'vertexColors' ] = THREE.VertexColors;
+                    } else if ( geometry.type == "Geometry" ) { 
+                        // A geometry implies colors
+                        geometryHasColor = true;
+                        // Color Faces
+                        surface[ 'vertexColors' ] = THREE.FaceColors;
+                    } else {
+                        // No color, use gui input
+                        surface[ 'color' ] = new THREE.Color( 0xAAAAAA );
+                        surface[ 'vertexColors' ] = THREE.NoColors;
+                    }
+                    var meshSurface = new THREE.Mesh(geometry, surface);
+                    scene.add(meshSurface);
+                    var mesh = new THREE.Mesh(geometry, surface);
+                    // Will be added on request to the scene
+                    var meshEdges = new THREE.EdgesHelper(mesh, 0x111111);
+                    // Define the BoundingBox
+                    bbHelper = new THREE.BoundingBoxHelper(meshSurface, 0x333333);
+                    bbHelper.update();
+
+                    // Determine box boundaries based on geometry.
+                    var xmin = bbHelper.box.min.x;
+                    var xmax = bbHelper.box.max.x;
+                    var xmid = 0.5*(xmin + xmax);
+                    var xlen = xmax - xmin;
+
+                    var ymin = bbHelper.box.min.y;
+                    var ymax = bbHelper.box.max.y;
+                    var ymid = 0.5*(ymin + ymax);
+                    var ylen = ymax - ymin;
+
+                    var zmin = bbHelper.box.min.z;
+                    var zmax = bbHelper.box.max.z;
+                    var zmid = 0.5*(zmin + zmax);
+                    var zlen = zmax - zmin;
+
+                    var lightX = xmid + 1*xlen;
+                    var lightY = ymid + 2*ylen;
+                    var lightZ = zmid + 5*zlen;
+
+                    // Camera
+                    var camDist = 3*Math.max(xmax - xmin, ymax - ymin, zmax - zmin);
+                    camera.position.set(xmid, ymid, zmax + camDist);
+
+                    // Renderer
+                    renderer = new THREE.WebGLRenderer({antialias: false});
+                    renderer.shadowMapEnabled = true;
+                    renderer.setClearColor(new THREE.Color(defaultBackgroundColor, 1.0));
+                    renderer.setSize(screenWidth, screenHeight);
+
+                    // Add the output of the renderer to the html element
+                    container = document.getElementById("WebGL-output")
+                    container.appendChild(renderer.domElement);
+
+                    // Controls
+                    controls = new THREE.OrbitControls(camera, renderer.domElement);
+                    // this will set the camera position, atempting to use camera.lookAt
+                    // will as THREE.OrbitControls will override the camera target position
+                    controls.target = new THREE.Vector3(xmid, ymid, zmid);
+
+                    // Light
+                    var light = new THREE.SpotLight(0xBBBBBB);
+                    light.castShadow = true;
+                    light.position.set(xmid + 5*xlen, ymid + 5*ylen, zmid + 5*zlen);
+                    light.target.position.set(xmid, ymid, zmid);
+                    light.exponent = 1;
+                    light.angle = 60 * Math.PI / 180;
+                    scene.add(light);
+  
+                    // Ambient light
+                    var lightAmbient = new THREE.AmbientLight(0xffffff);
+                    scene.add(lightAmbient);
+
+                    // Axes
+                    var origin = new THREE.Vector3(xmin, ymin, zmin);
+                    var ex = new THREE.Vector3(xmax, 0, 0);
+                    var ey = new THREE.Vector3(0, ymax, 0);
+                    var ez = new THREE.Vector3(0, 0, zmax);
+                    var xAxis = new THREE.ArrowHelper(ex, origin, xlen, 0xff0000);
+                    var yAxis = new THREE.ArrowHelper(ey, origin, ylen, 0x00ff00);
+                    var zAxis = new THREE.ArrowHelper(ez, origin, zlen, 0x0000ff);
+                    scene.add(xAxis);
+                    scene.add(yAxis);
+                    scene.add(zAxis);
+
+                    // Planes
+                    var el = 5; // length of the planes in xlen, ylen, and zlen units
+                    var eps = 1.e-3 * Math.max(xlen, ylen, zlen);
+                    var xPlaneGeo = new THREE.PlaneBufferGeometry(el*zlen, el*ylen);
+                    var yPlaneGeo = new THREE.PlaneBufferGeometry(el*xlen, el*zlen);
+                    var zPlaneGeo = new THREE.PlaneBufferGeometry(el*xlen, el*ylen);
+                    var xPlaneMat = new THREE.MeshLambertMaterial( {color: 0x550000, 
+                                                                    side: THREE.DoubleSide,
+                                                                    transparent: true,
+                                                                    opacity: 0.5} );
+                    var yPlaneMat = new THREE.MeshLambertMaterial( {color: 0x005500, 
+                                                                    side: THREE.DoubleSide,
+                                                                    transparent: true,
+                                                                    opacity: 0.5} );
+                    var zPlaneMat = new THREE.MeshLambertMaterial( {color: 0x000055, 
+                                                                    side: THREE.DoubleSide, 
+                                                                    transparent: true,
+                                                                    opacity: 0.5} );
+                    var xPlane = new THREE.Mesh(xPlaneGeo, xPlaneMat);
+                    xPlane.rotation.y = - Math.PI/2;
+                    xPlane.position.x = xmin - eps; 
+                    xPlane.position.y = ymin + el*ylen/2; 
+                    xPlane.position.z = zmin + el*zlen/2;
+                    var yPlane = new THREE.Mesh(yPlaneGeo, yPlaneMat);
+                    yPlane.rotation.x = Math.PI/2;
+                    yPlane.position.x = xmin + el*xlen/2;
+                    yPlane.position.y = ymin - eps;
+                    yPlane.position.z = zmin + el*zlen/2;
+                    var zPlane = new THREE.Mesh(zPlaneGeo, zPlaneMat);
+                    zPlane.position.x = xmin + el*xlen/2;
+                    zPlane.position.y = ymin + el*ylen/2;
+                    zPlane.position.z = zmin - eps;
+
+                    // GUI
+                    gui = new dat.GUI();
+                    parameters = {'background': '#4d576b',
+                                  'shininess': 100,
+                                  'color': '#aaaaaa',
+                                  'emissive': '#000000',
+                                  'specular': '#111111',
+                                  'edges': false,
+                                  'lightX': lightX,
+                                  'lightY': lightY,
+                                  'lightZ': lightZ,
+                                  'planes': false,
+                                  'bounding box': false};
+
+                    var sceneFolder = gui.addFolder('scene');
+                    var backgroundGui = sceneFolder.addColor(parameters, 'background').name('background').listen();
+                    backgroundGui.onChange( function(value) {renderer.setClearColor(value);} );
+                    var lightXGui = sceneFolder.add(parameters, 'lightX' ).min(xmid-10*xlen).max(xmid+10*xlen).step(xlen/10.).name('light x').listen();
+                    lightXGui.onChange( function(value) {light.position.x = value} );
+                    var lightYGui = sceneFolder.add(parameters, 'lightY' ).min(ymid-10*ylen).max(ymid+10*ylen).step(ylen/10.).name('light y').listen();
+                    lightYGui.onChange( function(value) {light.position.y = value} );
+                    var lightZGui = sceneFolder.add(parameters, 'lightZ' ).min(zmid-10*zlen).max(zmid+10*zlen).step(zlen/10.).name('light z').listen();
+                    lightZGui.onChange( function(value) {light.position.z = value} );
+                    var scenePlanesGui = sceneFolder.add(parameters, 'planes').listen();
+                    scenePlanesGui.onChange( function(value) {
+                        if (value) {
+                            scene.add(xPlane);
+                            scene.add(yPlane);
+                            scene.add(zPlane) 
+                        } else {
+                            scene.remove(xPlane);
+                            scene.remove(yPlane);
+                            scene.remove(zPlane)
+                        } 
+                    } );
+
+                    var sceneBBoxGui = sceneFolder.add(parameters, 'bounding box').listen();
+                    sceneBBoxGui.onChange( function(value) {
+                       if (value) {
+                           scene.add(bbHelper);
+                       } else {
+                           scene.remove(bbHelper);
+                       }
+                    } );
+
+                    var materialFolder = gui.addFolder('material');
+                    var materialShininessGui = materialFolder.add(parameters, 'shininess').min(0).max(100).step(5).listen();
+                    materialShininessGui.onChange( function(value) {surface.shininess = value} );
+
+                    if (! geometryHasColor) {
+                        var materialColorGui = materialFolder.addColor(parameters, 'color').name('ambient color').listen();
+                        materialColorGui.onChange( function(value) {surface.color.setHex(value.replace('#', '0x'));} );
+                    }
+
+                    var materialEmissiveGui = materialFolder.addColor(parameters, 'emissive').name('emissive color').listen();
+                    materialEmissiveGui.onChange( function(value) {surface.emissive.setHex(value.replace('#', '0x'));} );
+                    var materialSpecularGui = materialFolder.addColor(parameters, 'specular').name('specular color').listen();
+                    materialSpecularGui.onChange( function(value) {surface.specular.setHex(value.replace('#', '0x'));} );
+                    var materialEdgesGui = materialFolder.add(parameters, 'edges').listen();
+                    materialEdgesGui.onChange( function(value) {if (value) {scene.add(meshEdges);} else {scene.remove(meshEdges);} } );
+
+                    // Animate
+                    animate();
+                });
+            }
+
+             function animate() {
+               requestAnimationFrame(animate);
+               render();
+               controls.update();
+             }
+
+            function render() {
+                renderer.render(scene, camera);
+            }
+
+            function onWindowResize() {
+                camera.aspect = window.innerWidth / window.innerHeight;
+                camera.updateProjectionMatrix();
+                renderer.setSize(window.innerWidth, window.innerHeight);
+                controls.handleResize();
+                render();
+            }
+        </script>
+    </body>
+</html>
diff --git a/config/plugins/visualizations/graphviz/README.md b/config/plugins/visualizations/graphviz/README.md
new file mode 100644
index 0000000..c851f8a
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/README.md
@@ -0,0 +1,71 @@
+# Galaxy Graph Visualization Framework  #
+
+The framework visualizes graphs and tries to provide functionality to easily manipulate them in Galaxy platform. 
+For graph construction and visualization [Cytoscape.js](http://js.cytoscape.org/) library is used.
+
+### Input formats ###
+
+As input files json or txt formats can be used. 
+
+Examples: 
+
+* json file 
+   
+```
+#!json
+
+{
+  "nodes":[
+			{
+				"id": 1,
+			},
+			{
+				"id": 2,
+			}
+          ],
+ "links":[
+			
+			{
+				"source":1,
+				"target":2,
+			}
+         ]
+}
+
+```
+
+* txt file with matrix notation
+
+
+```
+#!python
+
+1	3 4 5
+2	4 6
+3	1 5 6 2
+4	3
+5	
+6
+```
+Line "1	 3 4 5" can be read as: source: node "1" , target: node "3" ("4","5")
+
+
+### General Description ###
+
+The framework consists of 3 parts. On the left and right sides of the screen are 2 sliding panels. By default they are hidden. They can be opened by clicking the button on the upper part of each panel. The left panel is  Tool Panel where user can choose different features to manipulate the graph. On the right panel all available node information (metadata) is displayed. 
+In between these panels is the main screen where the visualization is displayed.
+
+
+### Features ###
+
+| Feature      | Description                    |
+| ------------- | ------------------------------ |
+| Lazy loading      |    If the graph contains more than 50 nodes lazy loading will be performed. At first the framework will load only the root nodes(nodes that have no incoming edges) and their child nodes. The child nodes that can be expanded (contain other nodes) will have bigger size. To load the rest of the graph *Expand* feature must be used on the expandable nodes. It's working only if the graph has at least one root node |
+| Delete   | Deletes selected nodes and edges. Can be performed by using corresponding button or shortcut key 'd'     |
+| Restore   | Restores the deleted nodes and edges form the very last deletion performed     |
+| Collapse   | Collapse feature can be performed on the selected node which has   outgoing edges. It hides all outgoing (child) nodes and edges of the selected node. Useful for compact view. Can be performed by using corresponding button or shortcut key 'c'. To uncollapse the node *Expand* or *Restore Structure* can be used      |
+|Expand| Expand feature can be used to load more nodes in case if lazy loading was used or to uncollapse collapsed nodes.Can be performed by using corresponding button or shortcut key 'e'     |
+|Metadata | Node metadata is always displayed on the right panel  |
+|Child/parent dependencies| To see incoming/outgoing nodes and edges of the selected node enable (check) corresponding feature on the left panel |
+|Labels| Node and edge labels can be shown/hidden by enabling/disabling corresponding option on the left Panel |
+|Export PNG| It is possible to export the visualization of the graph as a png picture. To perform the action click on the corresponding button |
\ No newline at end of file
diff --git a/config/plugins/visualizations/graphviz/config/graphviz.xml b/config/plugins/visualizations/graphviz/config/graphviz.xml
new file mode 100644
index 0000000..ab0fea5
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/config/graphviz.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Graph Visualization">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test test_attr="ext" result_type="datatype">json</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test test_attr="ext" result_type="datatype">txt</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="has_dataprovider" test_attr="datatype">node-edge</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <template>graphviz.mako</template>
+</visualization>
diff --git a/config/plugins/visualizations/graphviz/static/css/style.css b/config/plugins/visualizations/graphviz/static/css/style.css
new file mode 100644
index 0000000..61d7eab
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/css/style.css
@@ -0,0 +1,197 @@
+
+
+#cy {
+  height: 100%;
+  width: 100%;
+  position: absolute; 
+  top: 0;
+}
+p {
+text-align:center;
+}
+
+
+.panel {
+
+	width:245px;
+	float:left;
+	height: 650px;
+	background:#d9dada;
+	position:relative;
+	left:-250px;
+	top: -10px;
+	font-family: Arial, "Helvetica Neue", Helvetica, sans-serif;
+	padding-left:5px;
+	 font-size: 14px;
+	
+}
+.slider-arrow {
+	top: -10px;
+	padding:5px;
+	width:10px;
+	float:left;
+	background:#d9dada;
+	font:700 16px  Arial, "Helvetica Neue", Helvetica, sans-serif;
+	color:#3498db;
+	text-decoration:none;
+	position:relative;
+	left:-250px;
+	
+}
+
+
+.btn  {
+  background: #3498db;
+  background-image: -webkit-linear-gradient(top, #3498db, #2980b9);
+  background-image: -moz-linear-gradient(top, #3498db, #2980b9);
+  background-image: -ms-linear-gradient(top, #3498db, #2980b9);
+  background-image: -o-linear-gradient(top, #3498db, #2980b9);
+  background-image: linear-gradient(to bottom, #3498db, #2980b9);
+  -webkit-border-radius: 28;
+  -moz-border-radius: 28;
+  border-radius: 6px;
+  font-family:  Arial, "Helvetica Neue", Helvetica, sans-serif;
+  color: #ffffff;
+  font-size: 14px;
+  padding: 8px 15px 8px 15px;
+  text-decoration: none;
+  width: 200px;
+}
+
+.btn:hover {
+  background: #3cb0fd;
+  background-image: -webkit-linear-gradient(top, #3cb0fd, #3498db);
+  background-image: -moz-linear-gradient(top, #3cb0fd, #3498db);
+  background-image: -ms-linear-gradient(top, #3cb0fd, #3498db);
+  background-image: -o-linear-gradient(top, #3cb0fd, #3498db);
+  background-image: linear-gradient(to bottom, #3cb0fd, #3498db);
+  text-decoration: none;
+  font-family:  Arial, "Helvetica Neue", Helvetica, sans-serif;
+}
+
+.btn:disabled {
+    background: #A4A4A4;
+}
+
+
+input[type=checkbox].css-checkbox {
+    display:none;
+}
+
+input[type=checkbox].css-checkbox + label.css-label {
+    padding-left:20px;
+    height:15px;
+    display:inline-block;
+    line-height:15px;
+    background-repeat:no-repeat;
+    background-position:0 0;
+    font-size:14px;
+    vertical-align:middle;
+    cursor:pointer;
+	font-family:  Arial, "Helvetica Neue", Helvetica, sans-serif;
+}
+
+input[type=checkbox].css-checkbox:checked + label.css-label {
+    background-position:0 -15px;
+}
+
+.css-label {
+    background-image: url(../img/lite-blue-check.png);
+}
+  
+
+.left {
+float: left;
+}
+
+
+#mainselection select {
+   border: 0;
+   color: #EEE;
+   background: transparent;
+   font-size: 14px;
+   font-family:  Arial, "Helvetica Neue", Helvetica, sans-serif;
+   width: 200px;
+   
+   -moz-appearance: none; /* Removes Default Firefox style*/
+  	-webkit-appearance: none;
+   padding: 5px 5px 5px 5px;
+}
+
+#mainselection {
+   overflow:hidden;
+   width:200px;
+   -moz-border-radius: 28px;
+   -webkit-border-radius: 28px;
+   border-radius: 9px 9px 9px 9px;
+   box-shadow: 1px 1px 1px #330033;
+   background: #3498db url(../img/15xvbd5.png) no-repeat
+	scroll 170px center;
+   
+}
+
+#nodeInfoDiv {
+  	width:250px;
+	float:right;
+	height: 650px;
+	background:#d9dada;
+	position:relative;
+	right:-255px;
+	top: -10px;
+	font-family: Arial, "Helvetica Neue", Helvetica, sans-serif;
+	
+	padding-right:5px;
+	font-size: 14px;
+    
+     
+}
+.slider-arrow-forNode {
+	top: -10px;
+	padding:5px;
+	width:10px;
+	float:right;
+	background:#d9dada;
+	font:700 16px  Arial, "Helvetica Neue", Helvetica, sans-serif;
+	color:#3498db;
+	text-decoration:none;
+	position:relative;
+	right:-255px;
+	
+}
+
+#linkDiv{
+	width:245;
+	margin:8px;
+
+
+}
+
+
+.CSSTableGenerator {
+	margin:8px;
+	padding:0;
+	width:240px;
+	box-shadow: 3px 3px 1px #888888;
+	border:2px solid #000000;
+	table-layout: fixed;
+	
+}.CSSTableGenerator table{
+   
+    border-spacing: 0;
+	width:240px;
+	height:100%;
+	margin:0px;padding:0px;
+}
+.CSSTableGenerator tr:nth-child(odd){ background-color:#aad4ff; }
+.CSSTableGenerator tr:nth-child(even)    { background-color:#ffffff; }.CSSTableGenerator td{
+	vertical-align:middle;	
+	border:1px solid #000000;
+	border-width:0px 1px 1px 0px;
+	text-align:left;
+	padding:3px;
+	font-size:14px;
+	font-family:Arial;
+	font-weight:normal;
+	color:#000000;
+	overflow:hidden;
+}
diff --git a/config/plugins/visualizations/graphviz/static/img/15xvbd5.png b/config/plugins/visualizations/graphviz/static/img/15xvbd5.png
new file mode 100644
index 0000000..7ebb1fb
Binary files /dev/null and b/config/plugins/visualizations/graphviz/static/img/15xvbd5.png differ
diff --git a/config/plugins/visualizations/graphviz/static/img/lite-blue-check.png b/config/plugins/visualizations/graphviz/static/img/lite-blue-check.png
new file mode 100644
index 0000000..80dd9d1
Binary files /dev/null and b/config/plugins/visualizations/graphviz/static/img/lite-blue-check.png differ
diff --git a/config/plugins/visualizations/graphviz/static/js/collapse.js b/config/plugins/visualizations/graphviz/static/js/collapse.js
new file mode 100644
index 0000000..ba5f932
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/collapse.js
@@ -0,0 +1,215 @@
+var collapseOrder = 0;   // the order of collapsed nodes
+var collapseNodeCount = 0;  // the number of collapsed nodes 
+var num = 0;
+
+function colNode() { 	//// collapse node
+
+
+	var selectedNode = cy.nodes(':selected');
+	var connectedEdges = selectedNode.connectedEdges(function() {
+		return !this.target().anySame(selectedNode);
+	});
+	var connectedNodes = connectedEdges.targets();
+
+	collapseNodeCount = connectedNodes.length;
+	num = selectedNode.data('colNum', collapseNodeCount);
+	connectedNodes.addClass('collapsedNode' + collapseOrder);
+	connectedEdges.addClass('collapsedNode' + collapseOrder);
+	selectedNode.addClass('superNode');
+
+	addCollapsedEdges(selectedNode, collapseOrder);    
+
+// hide collapsed nodes
+	cy.style()
+		.selector('.collapsedNode' + collapseOrder)
+		.css({
+		'visibility': 'hidden'
+	})
+		.update();
+// change the size of superNode
+	cy.style()
+		.selector('.superNode')
+		.css({
+		'opacity': 0.8,
+		'width': 50,
+		'height': 50
+	})
+		.update();
+
+	cy.fit();
+
+
+	collapseOrder = collapseOrder + 1;
+
+	//}
+	$('.btn.colNode').prop('disabled', true);
+	console.log("shape = " + shape);
+
+}
+
+/// uncollapse(expand) superNode
+
+function unColNode() { 
+
+	var selectedNode = cy.nodes(':selected');
+	var connectedEdges = selectedNode.connectedEdges(function() {
+		return !this.target().anySame(selectedNode);
+	});
+	var connectedNodes = connectedEdges.targets();
+	cy.style()
+		.selector('.superNode')
+		.update();
+
+	selectedNode.removeClass('superNode');
+
+	for (var i = collapseOrder; i >= 0; i--) {
+
+		if (connectedNodes.hasClass('collapsedNode' + i) || connectedEdges.hasClass('collapsedNode' + i)) {
+
+			connectedNodes.removeClass('collapsedNode' + i);
+			connectedEdges.removeClass('collapsedNode' + i);
+			removeCollapsedEdges(selectedNode, i);
+
+			cy.style()
+				.update(); // remove invisibility
+			cy.fit();
+		}
+
+	}
+
+	collapseOrder--;
+	
+	$('.btn.colNode').prop('disabled', true);  // disable collapse button
+
+}
+
+/// creating edges from superNode to childNodes of the collapsed Nodes
+function addCollapsedEdges(selectedNode, collapseOrder) {
+
+	var connectedEdges = selectedNode.connectedEdges(function() {
+		return !this.target().anySame(selectedNode);
+	});
+
+	var connectedNodes = connectedEdges.targets();
+	var newTargetEdges = connectedNodes.connectedEdges(function() {
+		return !this.target().anySame(connectedNodes);
+	});
+	var newSourceEdges = connectedNodes.connectedEdges(function() {
+		return !this.source().anySame(connectedNodes);
+	});
+
+	var newTargetNodes = newTargetEdges.targets();
+	var newSourceNodes = newSourceEdges.sources();
+
+
+	newTargetNodes.each(function(i, ele) {
+
+		if (ele.edgesTo(selectedNode).sources().data('id') == selectedNode.data('id') || ele.data('id') == selectedNode.data('id')) { // preventing duplicate edges 
+
+			ele = ele + 1;
+		} else {
+
+
+			cy.add({
+				group: "edges",
+				data: {
+					source: selectedNode.data('id'),
+					target: ele.data('id')
+				}
+			})
+				.addClass('virtualEdges' + collapseOrder)
+		}
+	});
+
+
+
+
+	newSourceNodes.each(function(i, ele) {
+
+		if (ele.edgesTo(selectedNode).targets().data('id') == selectedNode.data('id') || ele.data('id') == selectedNode.data('id')) {
+
+			ele = ele + 1;
+		} else {
+
+			cy.add({
+				group: "edges",
+				data: {
+					source: ele.data('id'),
+					target: selectedNode.data('id')
+				}
+			})
+				.addClass('virtualEdges' + collapseOrder)
+		}
+	});
+
+
+
+}
+
+// removes the edges created by addCollapsedEdges
+function removeCollapsedEdges(selectedNode, collapseOrder) {
+
+	selectedNode.connectedEdges().each(function(i, ele) {
+
+		if (ele.hasClass('virtualEdges' + collapseOrder)) {
+
+			cy.remove(ele);
+		}
+	});
+
+}
+
+//reseting all superNodes
+function resetCollapse() {
+
+	for (var i = collapseOrder; i >= 0; i--) {
+
+		cy.style()
+			.selector('.collapsedNode' + i)
+			.css({
+			'opacity': 0.8,
+			'background-color': '#888888',
+			'line-color': '#ddd',
+			'target-arrow-color': '#ddd'
+		})
+			.update();
+
+		cy.style()
+			.selector('.superNode')
+			.css({
+			'opacity': 0.8,
+			'background-color': '#888888', //
+			'border-width': 0
+		})
+			.update();
+
+		cy.remove('.virtualEdges' + i);
+
+		cy.nodes().removeClass('superNode');
+		cy.nodes().removeClass('collapsedNode' + i);
+		cy.edges().removeClass('collapsedNode' + i);
+		cy.edges().removeClass('virtualEdges' + i);
+
+	}
+}
+
+
+
+// counts the number of collapsed nodes for the given superNode
+function countCollapse(nd) {
+
+	if (nd.hasClass('superNode')) {
+		Tip('contains ' + nd.data().colNum + ' node(s)', PADDING, 10);
+	} else if (nd.hasClass('toBeExpaned')) {
+
+		var eles = allcy.nodes();
+		var selectedNodeId = nd.id();
+		selectedNodeId = selectedNodeId.replace(/[^0-9\.]+/g, "");
+
+		var nodeCount = eles[selectedNodeId].outdegree();
+		Tip('contains ' + nodeCount + ' node(s)', PADDING, 10);
+
+	} else {
+		Tip('contains 0 node(s)', PADDING, 10);
+	}
+}
diff --git a/config/plugins/visualizations/graphviz/static/js/cytoscape.min.js b/config/plugins/visualizations/graphviz/static/js/cytoscape.min.js
new file mode 100644
index 0000000..c285cf7
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/cytoscape.min.js
@@ -0,0 +1,27 @@
+/*!
+ * This file is part of Cytoscape.js 2.4.0.
+ * 
+ * Cytoscape.js is free software: you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as published by the Free
+ * Software Foundation, either version 3 of the License, or (at your option) any
+ * later version.
+ * 
+ * Cytoscape.js is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+ * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+ * details.
+ * 
+ * You should have received a copy of the GNU Lesser General Public License along with
+ * Cytoscape.js. If not, see <http://www.gnu.org/licenses/>.
+ */
+var cytoscape;!function(e){"use strict";var t=cytoscape=function(){return cytoscape.init.apply(cytoscape,arguments)};t.version="2.4.0",t.init=function(e){return void 0===e&&(e={}),t.is.plainObject(e)?new t.Core(e):t.is.string(e)?t.extension.apply(t.extension,arguments):void 0},t.fn={},"undefined"!=typeof module&&module.exports&&(module.exports=cytoscape),"undefined"!=typeof define&&define.amd&&define("cytoscape",function(){return cytoscape}),e&&(e.cytoscape=cytoscape)}("undefined"==typeo [...]
+return"undefined"!=typeof ActiveXObject||!1},windows:function(){return"undefined"!=typeof navigator&&navigator.appVersion.match(/Win/i)},mac:function(){return"undefined"!=typeof navigator&&navigator.appVersion.match(/Mac/i)},linux:function(){return"undefined"!=typeof navigator&&navigator.appVersion.match(/Linux/i)},unix:function(){return"undefined"!=typeof navigator&&navigator.appVersion.match(/X11/i)}}}(cytoscape,"undefined"==typeof window?null:window),function(e,t){"use strict";e.util= [...]
+var r={};return t=e.util.extend({},r,t),function(){var e=this,t=void 0!==e.length,r=t?e:[e],i=this._private.cy||this;if(!i.styleEnabled())return this;for(var n=0;n<r.length;n++){var a=r[n];a._private.animation.queue=[]}return this}},delay:function(t){var r={};return t=e.util.extend({},r,t),function(e,t){var r=this._private.cy||this;return r.styleEnabled()?(this.animate({delay:e},{duration:e,complete:t}),this):this}},animate:function(t){var r={};return t=e.util.extend({},r,t),function(e,t [...]
+
+break;case void 0:break;default:return!1}return u?(a.bypassed=d?c.bypassed:c,o[a.name]=a):d?c.bypassed=a:o[a.name]=a,!0},e.styfn.update=function(){var e=this._private.cy,t=e.elements();t.updateStyle()},e.styfn.updateMappers=function(t){for(var r=0;r<t.length;r++){for(var i=t[r],n=i._private.style,a=0;a<e.style.properties.length;a++){var o=e.style.properties[a],s=n[o.name];if(s&&s.mapping){var l=s.mapping;this.applyParsedProperty(i,l)}}this.updateStyleHints(i)}},e.styfn.updateTransitions= [...]
+}),this},fit:function(e,t){var r=this.getFitViewport(e,t);if(r){var i=this._private;i.zoom=r.zoom,i.pan=r.pan,this.trigger("pan zoom viewport"),this.notify({type:"viewport"})}return this},getFitViewport:function(t,r){if(e.is.number(t)&&void 0===r&&(r=t,t=void 0),this._private.panningEnabled&&this._private.zoomingEnabled){var i;if(e.is.string(t)){var n=t;t=this.$(n)}else if(e.is.boundingBox(t)){var a=t;i={x1:a.x1,y1:a.y1,x2:a.x2,y2:a.y2},i.w=i.x2-i.x1,i.h=i.y2-i.y1}else e.is.elementOrColl [...]
+allowBinding:!0,allowSetting:!0,settingEvent:"position",settingTriggersEvent:!0,triggerFnName:"rtrigger",allowGetting:!0,validKeys:["x","y"],onSet:function(e){var t=e.updateCompoundBounds();t.rtrigger("position")},canSet:function(e){return!e.locked()}}),silentPosition:e.define.data({field:"position",bindingEvent:"position",allowBinding:!1,allowSetting:!0,settingEvent:"position",settingTriggersEvent:!1,triggerFnName:"trigger",allowGetting:!0,validKeys:["x","y"],onSet:function(e){e.updateC [...]
+collide:function(t,r,n,a,o,s,l,u){var c=i.arrow._points;return e.math.pointInsidePolygon(t,r,c,n,a,o,s,l,u)},roughCollide:n,draw:function(e,t,r,n){for(var o=i.arrow._points,s=0;s<o.length/2;s++){var l=a(o[2*s],o[2*s+1],t,r,n);e.lineTo(l.x,l.y)}},spacing:function(e){return 0},gap:function(e){return 2*e._private.style.width.pxValue}},i.triangle=i.arrow,i["triangle-backcurve"]={_ctrlPt:[0,-.15],collide:function(t,r,n,a,o,s,l,u){var c=i.triangle._points;return e.math.pointInsidePolygon(t,r,c [...]
+var o=r._private.style["text-halign"].strValue,s=r._private.style["text-valign"].strValue,l=r._private.rscratch;if(e.is.number(l.labelX)&&e.is.number(l.labelY)){switch(o){case"left":t.textAlign="right";break;case"right":t.textAlign="left";break;default:t.textAlign="center"}switch(s){case"top":t.textBaseline="bottom";break;case"bottom":t.textBaseline="top";break;default:t.textBaseline="middle"}this.drawText(t,r,l.labelX,l.labelY)}}}},i.getFontCache=function(e){var t;this.fontCaches=this.f [...]
+t.touchData.capture=!0,t.data.bgActivePosistion=void 0;var n=t.data.cy,o=t.getCachedNodes(),s=t.getCachedEdges(),l=t.touchData.now,u=t.touchData.earlier,c=t.data.canvasNeedsRedraw;if(i.touches[0]){var k=t.projectIntoViewport(i.touches[0].clientX,i.touches[0].clientY);l[0]=k[0],l[1]=k[1]}if(i.touches[1]){var k=t.projectIntoViewport(i.touches[1].clientX,i.touches[1].clientY);l[2]=k[0],l[3]=k[1]}if(i.touches[2]){var k=t.projectIntoViewport(i.touches[2].clientX,i.touches[2].clientY);l[4]=k[0 [...]
+d.temperature=d.temperature*t.coolingFactor,d.temperature<t.minTemp?!1:!0)},p=function(){u(d,i,t),!0===t.fit&&i.fit(t.padding);var e=new Date;console.info("Layout took "+(e-o)+" ms"),a.one("layoutstop",t.stop),a.trigger({type:"layoutstop",layout:a})};if(t.animate){var v=0,f=function(){for(var r,n=0;n<t.refresh&&v<t.numIter;){var r=h(v);if(r===!1)break;n++,v++}u(d,i,t),t.fit&&i.fit(t.padding),r!==!1&&v+1<t.numIter?e.util.requestAnimationFrame(f):p()};e.util.requestAnimationFrame(f)}else{f [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/graphviz/static/js/graphVis.js b/config/plugins/visualizations/graphviz/static/js/graphVis.js
new file mode 100644
index 0000000..e22814c
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/graphVis.js
@@ -0,0 +1,186 @@
+var showNodeLabel;
+var showEdgeLabel;
+var showOut;
+var showIn;
+var demoNodes = [];
+var demoEdges = [];
+var shape;
+var allcy;
+var cy;
+// The key to hold down when brush/box selecting: 16 === shift
+var BRUSH_KEY = 16;
+
+$(function() {
+    // remove horizontal scrollbar
+    $("body").css("overflow-x", "hidden");
+
+    // set up a key that, when held down, allows box selection and turns panning off
+    $( document )
+        .on( 'keydown', function( ev ){
+            if( cy && ev.keyCode === BRUSH_KEY ){
+                cy.panningEnabled( false );
+                cy.boxSelectionEnabled( true );
+            }
+        })
+        .on( 'keyup', function( ev ){
+            if( cy && ev.keyCode === BRUSH_KEY ){
+                cy.panningEnabled( true );
+                cy.boxSelectionEnabled( false );
+            }
+        });
+});
+
+// shape of the graph
+$("select option:selected").each(function() {
+    shape = $(this).val();
+});
+
+///////// create graph
+function createGraph(data) {
+    window.graphData = data;
+
+ 	allcy = cytoscape({
+ 		headless: true,
+ 	});
+
+ 	cy = cytoscape({
+ 		container: document.getElementById('cy'),
+
+ 		layout: {
+ 			name: shape
+ 			//avoidOverlap: true,
+ 			//padding: 10
+ 		},
+ 		hideEdgesOnViewport: true,
+ 		hideLabelsOnViewport: true,
+ 		motionBlur: true,
+ 		textureOnViewport: true,
+
+        // initially have click and drag pan the viewport
+        panningEnabled: true,
+        boxSelectionEnabled: false,
+
+ 		ready: function() {
+
+ 			window.cy = this;
+ 			cy.nodes().on("click", function(e) {
+ 				showNodeInfo(e.cyTarget);
+ 			});
+ 		},
+
+ 		style: cytoscape.stylesheet()
+ 			.selector('node')
+ 			.css({
+     			'content': showNodeLabel,
+     			'text-valign': 'center',
+     			'background-color': '#888888',
+     			'opacity': 0.8
+            })
+
+ 			.selector('edge')
+ 			.css({
+     			'curve-style': 'unbundled-bezier',
+     			'target-arrow-shape': 'triangle',
+     			'width': 2,
+     			'line-color': '#ddd',
+     			'target-arrow-color': '#ddd',
+     			'content': showEdgeLabel
+            })
+
+ 			.selector(':selected')
+ 			.css({
+     			'background-color': '#FE2E64',
+     			'line-color': '#FE2E64',
+     			'target-arrow-color': '#FE2E64',
+     			'source-arrow-color': '#FE2E64',
+     			'opacity': 1
+     		})
+
+            .selector('core')
+            .css({
+                'outside-texture-bg-color': 'white'
+     		})
+ 	});
+    window.cy = cy;
+ 	allcy.load(data);
+
+    // if graph contains more than 50 nodes load only root nodes
+ 	if (allcy.nodes().length > 50 && allcy.nodes().roots().length !== 0 ) {
+		var toAdd = allcy.nodes().roots().closedNeighborhood();
+		allcy.nodes().roots().addClass("roots");
+
+		showNodesToExpand(toAdd);
+ 		cy.add(toAdd);
+		cy.load(cy.elements('*').jsons());
+
+ 		cy.style()
+ 			.selector('.toBeExpaned')
+ 			.css({
+     			'width': 50,
+     			'height': 50
+     		})
+ 			.update();
+
+ 	} else {
+        cy.add(data);
+        cy.load(cy.elements('*').jsons());
+ 	}
+ 	checkBoxes();
+
+} // END create graph
+
+
+//// parse function for matrix data
+function parseTextMatrix(data) {
+    var nodes = [],
+        edges = [];
+
+    data.split( '\n' ).forEach( function( line ){
+        var columns = line.split( /,?\s+/ ),
+            sourceId = columns[0]; // split by comma or space
+
+        nodes.push({
+            data: { id: sourceId }
+        });
+
+        for( var i = 1; i < columns.length; i++ ){
+            edges.push({
+                data: {
+                    source  : sourceId,
+                    target  : columns[i]
+                }
+            });
+        }
+    });
+
+	createGraph({
+        nodes: nodes,
+        edges: edges
+    });
+}
+
+///// parsing function for json: for link/egde and "" issues
+function parseJson( data ) {
+
+    if( data.hasOwnProperty( 'links' ) ){
+        data.edges = data.links;
+        delete data.links;
+    }
+	data.nodes = data.nodes.map( function _processNode( node ){
+        return {
+            data : $.extend( {}, node.data, {
+                id : node.id + ''
+            })
+        };
+    });
+    data.edges = data.edges.map( function _processEdge( edge ){
+        return {
+            data : $.extend( {}, edge.data, {
+                id      : edge.id || undefined,
+                source  : data.nodes[ edge.source ].data.id,
+                target  : data.nodes[ edge.target ].data.id
+            })
+        };
+    });
+	createGraph( data );
+}
\ No newline at end of file
diff --git a/config/plugins/visualizations/graphviz/static/js/jquery.qtip.js b/config/plugins/visualizations/graphviz/static/js/jquery.qtip.js
new file mode 100644
index 0000000..67473e1
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/jquery.qtip.js
@@ -0,0 +1,3451 @@
+/*
+ * qTip2 - Pretty powerful tooltips - v2.2.1
+ * http://qtip2.com
+ *
+ * Copyright (c) 2014 
+ * Released under the MIT licenses
+ * http://jquery.org/license
+ *
+ * Date: Sat Sep 6 2014 11:12 GMT+0100+0100
+ * Plugins: tips modal viewport svg imagemap ie6
+ * Styles: core basic css3
+ */
+/*global window: false, jQuery: false, console: false, define: false */
+
+/* Cache window, document, undefined */
+(function( window, document, undefined ) {
+
+// Uses AMD or browser globals to create a jQuery plugin.
+(function( factory ) {
+	"use strict";
+	if(typeof define === 'function' && define.amd) {
+		define(['jquery'], factory);
+	}
+	else if(jQuery && !jQuery.fn.qtip) {
+		factory(jQuery);
+	}
+}
+(function($) {
+	"use strict"; // Enable ECMAScript "strict" operation for this function. See more: http://ejohn.org/blog/ecmascript-5-strict-mode-json-and-more/
+;// Munge the primitives - Paul Irish tip
+var TRUE = true,
+FALSE = false,
+NULL = null,
+
+// Common variables
+X = 'x', Y = 'y',
+WIDTH = 'width',
+HEIGHT = 'height',
+
+// Positioning sides
+TOP = 'top',
+LEFT = 'left',
+BOTTOM = 'bottom',
+RIGHT = 'right',
+CENTER = 'center',
+
+// Position adjustment types
+FLIP = 'flip',
+FLIPINVERT = 'flipinvert',
+SHIFT = 'shift',
+
+// Shortcut vars
+QTIP, PROTOTYPE, CORNER, CHECKS,
+PLUGINS = {},
+NAMESPACE = 'qtip',
+ATTR_HAS = 'data-hasqtip',
+ATTR_ID = 'data-qtip-id',
+WIDGET = ['ui-widget', 'ui-tooltip'],
+SELECTOR = '.'+NAMESPACE,
+INACTIVE_EVENTS = 'click dblclick mousedown mouseup mousemove mouseleave mouseenter'.split(' '),
+
+CLASS_FIXED = NAMESPACE+'-fixed',
+CLASS_DEFAULT = NAMESPACE + '-default',
+CLASS_FOCUS = NAMESPACE + '-focus',
+CLASS_HOVER = NAMESPACE + '-hover',
+CLASS_DISABLED = NAMESPACE+'-disabled',
+
+replaceSuffix = '_replacedByqTip',
+oldtitle = 'oldtitle',
+trackingBound,
+
+// Browser detection
+BROWSER = {
+	/*
+	 * IE version detection
+	 *
+	 * Adapted from: http://ajaxian.com/archives/attack-of-the-ie-conditional-comment
+	 * Credit to James Padolsey for the original implemntation!
+	 */
+	ie: (function(){
+		for (
+			var v = 4, i = document.createElement("div");
+			(i.innerHTML = "<!--[if gt IE " + v + "]><i></i><![endif]-->") && i.getElementsByTagName("i")[0];
+			v+=1
+		) {}
+		return v > 4 ? v : NaN;
+	}()),
+
+	/*
+	 * iOS version detection
+	 */
+	iOS: parseFloat(
+		('' + (/CPU.*OS ([0-9_]{1,5})|(CPU like).*AppleWebKit.*Mobile/i.exec(navigator.userAgent) || [0,''])[1])
+		.replace('undefined', '3_2').replace('_', '.').replace('_', '')
+	) || FALSE
+};
+;function QTip(target, options, id, attr) {
+	// Elements and ID
+	this.id = id;
+	this.target = target;
+	this.tooltip = NULL;
+	this.elements = { target: target };
+
+	// Internal constructs
+	this._id = NAMESPACE + '-' + id;
+	this.timers = { img: {} };
+	this.options = options;
+	this.plugins = {};
+
+	// Cache object
+	this.cache = {
+		event: {},
+		target: $(),
+		disabled: FALSE,
+		attr: attr,
+		onTooltip: FALSE,
+		lastClass: ''
+	};
+
+	// Set the initial flags
+	this.rendered = this.destroyed = this.disabled = this.waiting =
+		this.hiddenDuringWait = this.positioning = this.triggering = FALSE;
+}
+PROTOTYPE = QTip.prototype;
+
+PROTOTYPE._when = function(deferreds) {
+	return $.when.apply($, deferreds);
+};
+
+PROTOTYPE.render = function(show) {
+	if(this.rendered || this.destroyed) { return this; } // If tooltip has already been rendered, exit
+
+	var self = this,
+		options = this.options,
+		cache = this.cache,
+		elements = this.elements,
+		text = options.content.text,
+		title = options.content.title,
+		button = options.content.button,
+		posOptions = options.position,
+		namespace = '.'+this._id+' ',
+		deferreds = [],
+		tooltip;
+
+	// Add ARIA attributes to target
+	$.attr(this.target[0], 'aria-describedby', this._id);
+
+	// Create public position object that tracks current position corners
+	cache.posClass = this._createPosClass(
+		(this.position = { my: posOptions.my, at: posOptions.at }).my
+	);
+
+	// Create tooltip element
+	this.tooltip = elements.tooltip = tooltip = $('<div/>', {
+		'id': this._id,
+		'class': [ NAMESPACE, CLASS_DEFAULT, options.style.classes, cache.posClass ].join(' '),
+		'width': options.style.width || '',
+		'height': options.style.height || '',
+		'tracking': posOptions.target === 'mouse' && posOptions.adjust.mouse,
+
+		/* ARIA specific attributes */
+		'role': 'alert',
+		'aria-live': 'polite',
+		'aria-atomic': FALSE,
+		'aria-describedby': this._id + '-content',
+		'aria-hidden': TRUE
+	})
+	.toggleClass(CLASS_DISABLED, this.disabled)
+	.attr(ATTR_ID, this.id)
+	.data(NAMESPACE, this)
+	.appendTo(posOptions.container)
+	.append(
+		// Create content element
+		elements.content = $('<div />', {
+			'class': NAMESPACE + '-content',
+			'id': this._id + '-content',
+			'aria-atomic': TRUE
+		})
+	);
+
+	// Set rendered flag and prevent redundant reposition calls for now
+	this.rendered = -1;
+	this.positioning = TRUE;
+
+	// Create title...
+	if(title) {
+		this._createTitle();
+
+		// Update title only if its not a callback (called in toggle if so)
+		if(!$.isFunction(title)) {
+			deferreds.push( this._updateTitle(title, FALSE) );
+		}
+	}
+
+	// Create button
+	if(button) { this._createButton(); }
+
+	// Set proper rendered flag and update content if not a callback function (called in toggle)
+	if(!$.isFunction(text)) {
+		deferreds.push( this._updateContent(text, FALSE) );
+	}
+	this.rendered = TRUE;
+
+	// Setup widget classes
+	this._setWidget();
+
+	// Initialize 'render' plugins
+	$.each(PLUGINS, function(name) {
+		var instance;
+		if(this.initialize === 'render' && (instance = this(self))) {
+			self.plugins[name] = instance;
+		}
+	});
+
+	// Unassign initial events and assign proper events
+	this._unassignEvents();
+	this._assignEvents();
+
+	// When deferreds have completed
+	this._when(deferreds).then(function() {
+		// tooltiprender event
+		self._trigger('render');
+
+		// Reset flags
+		self.positioning = FALSE;
+
+		// Show tooltip if not hidden during wait period
+		if(!self.hiddenDuringWait && (options.show.ready || show)) {
+			self.toggle(TRUE, cache.event, FALSE);
+		}
+		self.hiddenDuringWait = FALSE;
+	});
+
+	// Expose API
+	QTIP.api[this.id] = this;
+
+	return this;
+};
+
+PROTOTYPE.destroy = function(immediate) {
+	// Set flag the signify destroy is taking place to plugins
+	// and ensure it only gets destroyed once!
+	if(this.destroyed) { return this.target; }
+
+	function process() {
+		if(this.destroyed) { return; }
+		this.destroyed = TRUE;
+
+		var target = this.target,
+			title = target.attr(oldtitle),
+			timer;
+
+		// Destroy tooltip if rendered
+		if(this.rendered) {
+			this.tooltip.stop(1,0).find('*').remove().end().remove();
+		}
+
+		// Destroy all plugins
+		$.each(this.plugins, function(name) {
+			this.destroy && this.destroy();
+		});
+
+		// Clear timers
+		for(timer in this.timers) {
+			clearTimeout(this.timers[timer]);
+		}
+
+		// Remove api object and ARIA attributes
+		target.removeData(NAMESPACE)
+			.removeAttr(ATTR_ID)
+			.removeAttr(ATTR_HAS)
+			.removeAttr('aria-describedby');
+
+		// Reset old title attribute if removed
+		if(this.options.suppress && title) {
+			target.attr('title', title).removeAttr(oldtitle);
+		}
+
+		// Remove qTip events associated with this API
+		this._unassignEvents();
+
+		// Remove ID from used id objects, and delete object references
+		// for better garbage collection and leak protection
+		this.options = this.elements = this.cache = this.timers =
+			this.plugins = this.mouse = NULL;
+
+		// Delete epoxsed API object
+		delete QTIP.api[this.id];
+	}
+
+	// If an immediate destory is needed
+	if((immediate !== TRUE || this.triggering === 'hide') && this.rendered) {
+		this.tooltip.one('tooltiphidden', $.proxy(process, this));
+		!this.triggering && this.hide();
+	}
+
+	// If we're not in the process of hiding... process
+	else { process.call(this); }
+
+	return this.target;
+};
+;function invalidOpt(a) {
+	return a === NULL || $.type(a) !== 'object';
+}
+
+function invalidContent(c) {
+	return !( $.isFunction(c) || (c && c.attr) || c.length || ($.type(c) === 'object' && (c.jquery || c.then) ));
+}
+
+// Option object sanitizer
+function sanitizeOptions(opts) {
+	var content, text, ajax, once;
+
+	if(invalidOpt(opts)) { return FALSE; }
+
+	if(invalidOpt(opts.metadata)) {
+		opts.metadata = { type: opts.metadata };
+	}
+
+	if('content' in opts) {
+		content = opts.content;
+
+		if(invalidOpt(content) || content.jquery || content.done) {
+			content = opts.content = {
+				text: (text = invalidContent(content) ? FALSE : content)
+			};
+		}
+		else { text = content.text; }
+
+		// DEPRECATED - Old content.ajax plugin functionality
+		// Converts it into the proper Deferred syntax
+		if('ajax' in content) {
+			ajax = content.ajax;
+			once = ajax && ajax.once !== FALSE;
+			delete content.ajax;
+
+			content.text = function(event, api) {
+				var loading = text || $(this).attr(api.options.content.attr) || 'Loading...',
+
+				deferred = $.ajax(
+					$.extend({}, ajax, { context: api })
+				)
+				.then(ajax.success, NULL, ajax.error)
+				.then(function(content) {
+					if(content && once) { api.set('content.text', content); }
+					return content;
+				},
+				function(xhr, status, error) {
+					if(api.destroyed || xhr.status === 0) { return; }
+					api.set('content.text', status + ': ' + error);
+				});
+
+				return !once ? (api.set('content.text', loading), deferred) : loading;
+			};
+		}
+
+		if('title' in content) {
+			if($.isPlainObject(content.title)) {
+				content.button = content.title.button;
+				content.title = content.title.text;
+			}
+
+			if(invalidContent(content.title || FALSE)) {
+				content.title = FALSE;
+			}
+		}
+	}
+
+	if('position' in opts && invalidOpt(opts.position)) {
+		opts.position = { my: opts.position, at: opts.position };
+	}
+
+	if('show' in opts && invalidOpt(opts.show)) {
+		opts.show = opts.show.jquery ? { target: opts.show } :
+			opts.show === TRUE ? { ready: TRUE } : { event: opts.show };
+	}
+
+	if('hide' in opts && invalidOpt(opts.hide)) {
+		opts.hide = opts.hide.jquery ? { target: opts.hide } : { event: opts.hide };
+	}
+
+	if('style' in opts && invalidOpt(opts.style)) {
+		opts.style = { classes: opts.style };
+	}
+
+	// Sanitize plugin options
+	$.each(PLUGINS, function() {
+		this.sanitize && this.sanitize(opts);
+	});
+
+	return opts;
+}
+
+// Setup builtin .set() option checks
+CHECKS = PROTOTYPE.checks = {
+	builtin: {
+		// Core checks
+		'^id$': function(obj, o, v, prev) {
+			var id = v === TRUE ? QTIP.nextid : v,
+				new_id = NAMESPACE + '-' + id;
+
+			if(id !== FALSE && id.length > 0 && !$('#'+new_id).length) {
+				this._id = new_id;
+
+				if(this.rendered) {
+					this.tooltip[0].id = this._id;
+					this.elements.content[0].id = this._id + '-content';
+					this.elements.title[0].id = this._id + '-title';
+				}
+			}
+			else { obj[o] = prev; }
+		},
+		'^prerender': function(obj, o, v) {
+			v && !this.rendered && this.render(this.options.show.ready);
+		},
+
+		// Content checks
+		'^content.text$': function(obj, o, v) {
+			this._updateContent(v);
+		},
+		'^content.attr$': function(obj, o, v, prev) {
+			if(this.options.content.text === this.target.attr(prev)) {
+				this._updateContent( this.target.attr(v) );
+			}
+		},
+		'^content.title$': function(obj, o, v) {
+			// Remove title if content is null
+			if(!v) { return this._removeTitle(); }
+
+			// If title isn't already created, create it now and update
+			v && !this.elements.title && this._createTitle();
+			this._updateTitle(v);
+		},
+		'^content.button$': function(obj, o, v) {
+			this._updateButton(v);
+		},
+		'^content.title.(text|button)$': function(obj, o, v) {
+			this.set('content.'+o, v); // Backwards title.text/button compat
+		},
+
+		// Position checks
+		'^position.(my|at)$': function(obj, o, v){
+			'string' === typeof v && (this.position[o] = obj[o] = new CORNER(v, o === 'at'));
+		},
+		'^position.container$': function(obj, o, v){
+			this.rendered && this.tooltip.appendTo(v);
+		},
+
+		// Show checks
+		'^show.ready$': function(obj, o, v) {
+			v && (!this.rendered && this.render(TRUE) || this.toggle(TRUE));
+		},
+
+		// Style checks
+		'^style.classes$': function(obj, o, v, p) {
+			this.rendered && this.tooltip.removeClass(p).addClass(v);
+		},
+		'^style.(width|height)': function(obj, o, v) {
+			this.rendered && this.tooltip.css(o, v);
+		},
+		'^style.widget|content.title': function() {
+			this.rendered && this._setWidget();
+		},
+		'^style.def': function(obj, o, v) {
+			this.rendered && this.tooltip.toggleClass(CLASS_DEFAULT, !!v);
+		},
+
+		// Events check
+		'^events.(render|show|move|hide|focus|blur)$': function(obj, o, v) {
+			this.rendered && this.tooltip[($.isFunction(v) ? '' : 'un') + 'bind']('tooltip'+o, v);
+		},
+
+		// Properties which require event reassignment
+		'^(show|hide|position).(event|target|fixed|inactive|leave|distance|viewport|adjust)': function() {
+			if(!this.rendered) { return; }
+
+			// Set tracking flag
+			var posOptions = this.options.position;
+			this.tooltip.attr('tracking', posOptions.target === 'mouse' && posOptions.adjust.mouse);
+
+			// Reassign events
+			this._unassignEvents();
+			this._assignEvents();
+		}
+	}
+};
+
+// Dot notation converter
+function convertNotation(options, notation) {
+	var i = 0, obj, option = options,
+
+	// Split notation into array
+	levels = notation.split('.');
+
+	// Loop through
+	while( option = option[ levels[i++] ] ) {
+		if(i < levels.length) { obj = option; }
+	}
+
+	return [obj || options, levels.pop()];
+}
+
+PROTOTYPE.get = function(notation) {
+	if(this.destroyed) { return this; }
+
+	var o = convertNotation(this.options, notation.toLowerCase()),
+		result = o[0][ o[1] ];
+
+	return result.precedance ? result.string() : result;
+};
+
+function setCallback(notation, args) {
+	var category, rule, match;
+
+	for(category in this.checks) {
+		for(rule in this.checks[category]) {
+			if(match = (new RegExp(rule, 'i')).exec(notation)) {
+				args.push(match);
+
+				if(category === 'builtin' || this.plugins[category]) {
+					this.checks[category][rule].apply(
+						this.plugins[category] || this, args
+					);
+				}
+			}
+		}
+	}
+}
+
+var rmove = /^position\.(my|at|adjust|target|container|viewport)|style|content|show\.ready/i,
+	rrender = /^prerender|show\.ready/i;
+
+PROTOTYPE.set = function(option, value) {
+	if(this.destroyed) { return this; }
+
+	var rendered = this.rendered,
+		reposition = FALSE,
+		options = this.options,
+		checks = this.checks,
+		name;
+
+	// Convert singular option/value pair into object form
+	if('string' === typeof option) {
+		name = option; option = {}; option[name] = value;
+	}
+	else { option = $.extend({}, option); }
+
+	// Set all of the defined options to their new values
+	$.each(option, function(notation, value) {
+		if(rendered && rrender.test(notation)) {
+			delete option[notation]; return;
+		}
+
+		// Set new obj value
+		var obj = convertNotation(options, notation.toLowerCase()), previous;
+		previous = obj[0][ obj[1] ];
+		obj[0][ obj[1] ] = value && value.nodeType ? $(value) : value;
+
+		// Also check if we need to reposition
+		reposition = rmove.test(notation) || reposition;
+
+		// Set the new params for the callback
+		option[notation] = [obj[0], obj[1], value, previous];
+	});
+
+	// Re-sanitize options
+	sanitizeOptions(options);
+
+	/*
+	 * Execute any valid callbacks for the set options
+	 * Also set positioning flag so we don't get loads of redundant repositioning calls.
+	 */
+	this.positioning = TRUE;
+	$.each(option, $.proxy(setCallback, this));
+	this.positioning = FALSE;
+
+	// Update position if needed
+	if(this.rendered && this.tooltip[0].offsetWidth > 0 && reposition) {
+		this.reposition( options.position.target === 'mouse' ? NULL : this.cache.event );
+	}
+
+	return this;
+};
+;PROTOTYPE._update = function(content, element, reposition) {
+	var self = this,
+		cache = this.cache;
+
+	// Make sure tooltip is rendered and content is defined. If not return
+	if(!this.rendered || !content) { return FALSE; }
+
+	// Use function to parse content
+	if($.isFunction(content)) {
+		content = content.call(this.elements.target, cache.event, this) || '';
+	}
+
+	// Handle deferred content
+	if($.isFunction(content.then)) {
+		cache.waiting = TRUE;
+		return content.then(function(c) {
+			cache.waiting = FALSE;
+			return self._update(c, element);
+		}, NULL, function(e) {
+			return self._update(e, element);
+		});
+	}
+
+	// If content is null... return false
+	if(content === FALSE || (!content && content !== '')) { return FALSE; }
+
+	// Append new content if its a DOM array and show it if hidden
+	if(content.jquery && content.length > 0) {
+		element.empty().append(
+			content.css({ display: 'block', visibility: 'visible' })
+		);
+	}
+
+	// Content is a regular string, insert the new content
+	else { element.html(content); }
+
+	// Wait for content to be loaded, and reposition
+	return this._waitForContent(element).then(function(images) {
+		if(self.rendered && self.tooltip[0].offsetWidth > 0) {
+			self.reposition(cache.event, !images.length);
+		}
+	});
+};
+
+PROTOTYPE._waitForContent = function(element) {
+	var cache = this.cache;
+
+	// Set flag
+	cache.waiting = TRUE;
+
+	// If imagesLoaded is included, ensure images have loaded and return promise
+	return ( $.fn.imagesLoaded ? element.imagesLoaded() : $.Deferred().resolve([]) )
+		.done(function() { cache.waiting = FALSE; })
+		.promise();
+};
+
+PROTOTYPE._updateContent = function(content, reposition) {
+	this._update(content, this.elements.content, reposition);
+};
+
+PROTOTYPE._updateTitle = function(content, reposition) {
+	if(this._update(content, this.elements.title, reposition) === FALSE) {
+		this._removeTitle(FALSE);
+	}
+};
+
+PROTOTYPE._createTitle = function()
+{
+	var elements = this.elements,
+		id = this._id+'-title';
+
+	// Destroy previous title element, if present
+	if(elements.titlebar) { this._removeTitle(); }
+
+	// Create title bar and title elements
+	elements.titlebar = $('<div />', {
+		'class': NAMESPACE + '-titlebar ' + (this.options.style.widget ? createWidgetClass('header') : '')
+	})
+	.append(
+		elements.title = $('<div />', {
+			'id': id,
+			'class': NAMESPACE + '-title',
+			'aria-atomic': TRUE
+		})
+	)
+	.insertBefore(elements.content)
+
+	// Button-specific events
+	.delegate('.qtip-close', 'mousedown keydown mouseup keyup mouseout', function(event) {
+		$(this).toggleClass('ui-state-active ui-state-focus', event.type.substr(-4) === 'down');
+	})
+	.delegate('.qtip-close', 'mouseover mouseout', function(event){
+		$(this).toggleClass('ui-state-hover', event.type === 'mouseover');
+	});
+
+	// Create button if enabled
+	if(this.options.content.button) { this._createButton(); }
+};
+
+PROTOTYPE._removeTitle = function(reposition)
+{
+	var elements = this.elements;
+
+	if(elements.title) {
+		elements.titlebar.remove();
+		elements.titlebar = elements.title = elements.button = NULL;
+
+		// Reposition if enabled
+		if(reposition !== FALSE) { this.reposition(); }
+	}
+};
+;PROTOTYPE._createPosClass = function(my) {
+	return NAMESPACE + '-pos-' + (my || this.options.position.my).abbrev();
+};
+
+PROTOTYPE.reposition = function(event, effect) {
+	if(!this.rendered || this.positioning || this.destroyed) { return this; }
+
+	// Set positioning flag
+	this.positioning = TRUE;
+
+	var cache = this.cache,
+		tooltip = this.tooltip,
+		posOptions = this.options.position,
+		target = posOptions.target,
+		my = posOptions.my,
+		at = posOptions.at,
+		viewport = posOptions.viewport,
+		container = posOptions.container,
+		adjust = posOptions.adjust,
+		method = adjust.method.split(' '),
+		tooltipWidth = tooltip.outerWidth(FALSE),
+		tooltipHeight = tooltip.outerHeight(FALSE),
+		targetWidth = 0,
+		targetHeight = 0,
+		type = tooltip.css('position'),
+		position = { left: 0, top: 0 },
+		visible = tooltip[0].offsetWidth > 0,
+		isScroll = event && event.type === 'scroll',
+		win = $(window),
+		doc = container[0].ownerDocument,
+		mouse = this.mouse,
+		pluginCalculations, offset, adjusted, newClass;
+
+	// Check if absolute position was passed
+	if($.isArray(target) && target.length === 2) {
+		// Force left top and set position
+		at = { x: LEFT, y: TOP };
+		position = { left: target[0], top: target[1] };
+	}
+
+	// Check if mouse was the target
+	else if(target === 'mouse') {
+		// Force left top to allow flipping
+		at = { x: LEFT, y: TOP };
+
+		// Use the mouse origin that caused the show event, if distance hiding is enabled
+		if((!adjust.mouse || this.options.hide.distance) && cache.origin && cache.origin.pageX) {
+			event =  cache.origin;
+		}
+
+		// Use cached event for resize/scroll events
+		else if(!event || (event && (event.type === 'resize' || event.type === 'scroll'))) {
+			event = cache.event;
+		}
+
+		// Otherwise, use the cached mouse coordinates if available
+		else if(mouse && mouse.pageX) {
+			event = mouse;
+		}
+
+		// Calculate body and container offset and take them into account below
+		if(type !== 'static') { position = container.offset(); }
+		if(doc.body.offsetWidth !== (window.innerWidth || doc.documentElement.clientWidth)) {
+			offset = $(document.body).offset();
+		}
+
+		// Use event coordinates for position
+		position = {
+			left: event.pageX - position.left + (offset && offset.left || 0),
+			top: event.pageY - position.top + (offset && offset.top || 0)
+		};
+
+		// Scroll events are a pain, some browsers
+		if(adjust.mouse && isScroll && mouse) {
+			position.left -= (mouse.scrollX || 0) - win.scrollLeft();
+			position.top -= (mouse.scrollY || 0) - win.scrollTop();
+		}
+	}
+
+	// Target wasn't mouse or absolute...
+	else {
+		// Check if event targetting is being used
+		if(target === 'event') {
+			if(event && event.target && event.type !== 'scroll' && event.type !== 'resize') {
+				cache.target = $(event.target);
+			}
+			else if(!event.target) {
+				cache.target = this.elements.target;
+			}
+		}
+		else if(target !== 'event'){
+			cache.target = $(target.jquery ? target : this.elements.target);
+		}
+		target = cache.target;
+
+		// Parse the target into a jQuery object and make sure there's an element present
+		target = $(target).eq(0);
+		if(target.length === 0) { return this; }
+
+		// Check if window or document is the target
+		else if(target[0] === document || target[0] === window) {
+			targetWidth = BROWSER.iOS ? window.innerWidth : target.width();
+			targetHeight = BROWSER.iOS ? window.innerHeight : target.height();
+
+			if(target[0] === window) {
+				position = {
+					top: (viewport || target).scrollTop(),
+					left: (viewport || target).scrollLeft()
+				};
+			}
+		}
+
+		// Check if the target is an <AREA> element
+		else if(PLUGINS.imagemap && target.is('area')) {
+			pluginCalculations = PLUGINS.imagemap(this, target, at, PLUGINS.viewport ? method : FALSE);
+		}
+
+		// Check if the target is an SVG element
+		else if(PLUGINS.svg && target && target[0].ownerSVGElement) {
+			pluginCalculations = PLUGINS.svg(this, target, at, PLUGINS.viewport ? method : FALSE);
+		}
+
+		// Otherwise use regular jQuery methods
+		else {
+			targetWidth = target.outerWidth(FALSE);
+			targetHeight = target.outerHeight(FALSE);
+			position = target.offset();
+		}
+
+		// Parse returned plugin values into proper variables
+		if(pluginCalculations) {
+			targetWidth = pluginCalculations.width;
+			targetHeight = pluginCalculations.height;
+			offset = pluginCalculations.offset;
+			position = pluginCalculations.position;
+		}
+
+		// Adjust position to take into account offset parents
+		position = this.reposition.offset(target, position, container);
+
+		// Adjust for position.fixed tooltips (and also iOS scroll bug in v3.2-4.0 & v4.3-4.3.2)
+		if((BROWSER.iOS > 3.1 && BROWSER.iOS < 4.1) ||
+			(BROWSER.iOS >= 4.3 && BROWSER.iOS < 4.33) ||
+			(!BROWSER.iOS && type === 'fixed')
+		){
+			position.left -= win.scrollLeft();
+			position.top -= win.scrollTop();
+		}
+
+		// Adjust position relative to target
+		if(!pluginCalculations || (pluginCalculations && pluginCalculations.adjustable !== FALSE)) {
+			position.left += at.x === RIGHT ? targetWidth : at.x === CENTER ? targetWidth / 2 : 0;
+			position.top += at.y === BOTTOM ? targetHeight : at.y === CENTER ? targetHeight / 2 : 0;
+		}
+	}
+
+	// Adjust position relative to tooltip
+	position.left += adjust.x + (my.x === RIGHT ? -tooltipWidth : my.x === CENTER ? -tooltipWidth / 2 : 0);
+	position.top += adjust.y + (my.y === BOTTOM ? -tooltipHeight : my.y === CENTER ? -tooltipHeight / 2 : 0);
+
+	// Use viewport adjustment plugin if enabled
+	if(PLUGINS.viewport) {
+		adjusted = position.adjusted = PLUGINS.viewport(
+			this, position, posOptions, targetWidth, targetHeight, tooltipWidth, tooltipHeight
+		);
+
+		// Apply offsets supplied by positioning plugin (if used)
+		if(offset && adjusted.left) { position.left += offset.left; }
+		if(offset && adjusted.top) {  position.top += offset.top; }
+
+		// Apply any new 'my' position
+		if(adjusted.my) { this.position.my = adjusted.my; }
+	}
+
+	// Viewport adjustment is disabled, set values to zero
+	else { position.adjusted = { left: 0, top: 0 }; }
+
+	// Set tooltip position class if it's changed
+	if(cache.posClass !== (newClass = this._createPosClass(this.position.my))) {
+		tooltip.removeClass(cache.posClass).addClass( (cache.posClass = newClass) );
+	}
+
+	// tooltipmove event
+	if(!this._trigger('move', [position, viewport.elem || viewport], event)) { return this; }
+	delete position.adjusted;
+
+	// If effect is disabled, target it mouse, no animation is defined or positioning gives NaN out, set CSS directly
+	if(effect === FALSE || !visible || isNaN(position.left) || isNaN(position.top) || target === 'mouse' || !$.isFunction(posOptions.effect)) {
+		tooltip.css(position);
+	}
+
+	// Use custom function if provided
+	else if($.isFunction(posOptions.effect)) {
+		posOptions.effect.call(tooltip, this, $.extend({}, position));
+		tooltip.queue(function(next) {
+			// Reset attributes to avoid cross-browser rendering bugs
+			$(this).css({ opacity: '', height: '' });
+			if(BROWSER.ie) { this.style.removeAttribute('filter'); }
+
+			next();
+		});
+	}
+
+	// Set positioning flag
+	this.positioning = FALSE;
+
+	return this;
+};
+
+// Custom (more correct for qTip!) offset calculator
+PROTOTYPE.reposition.offset = function(elem, pos, container) {
+	if(!container[0]) { return pos; }
+
+	var ownerDocument = $(elem[0].ownerDocument),
+		quirks = !!BROWSER.ie && document.compatMode !== 'CSS1Compat',
+		parent = container[0],
+		scrolled, position, parentOffset, overflow;
+
+	function scroll(e, i) {
+		pos.left += i * e.scrollLeft();
+		pos.top += i * e.scrollTop();
+	}
+
+	// Compensate for non-static containers offset
+	do {
+		if((position = $.css(parent, 'position')) !== 'static') {
+			if(position === 'fixed') {
+				parentOffset = parent.getBoundingClientRect();
+				scroll(ownerDocument, -1);
+			}
+			else {
+				parentOffset = $(parent).position();
+				parentOffset.left += (parseFloat($.css(parent, 'borderLeftWidth')) || 0);
+				parentOffset.top += (parseFloat($.css(parent, 'borderTopWidth')) || 0);
+			}
+
+			pos.left -= parentOffset.left + (parseFloat($.css(parent, 'marginLeft')) || 0);
+			pos.top -= parentOffset.top + (parseFloat($.css(parent, 'marginTop')) || 0);
+
+			// If this is the first parent element with an overflow of "scroll" or "auto", store it
+			if(!scrolled && (overflow = $.css(parent, 'overflow')) !== 'hidden' && overflow !== 'visible') { scrolled = $(parent); }
+		}
+	}
+	while((parent = parent.offsetParent));
+
+	// Compensate for containers scroll if it also has an offsetParent (or in IE quirks mode)
+	if(scrolled && (scrolled[0] !== ownerDocument[0] || quirks)) {
+		scroll(scrolled, 1);
+	}
+
+	return pos;
+};
+
+// Corner class
+var C = (CORNER = PROTOTYPE.reposition.Corner = function(corner, forceY) {
+	corner = ('' + corner).replace(/([A-Z])/, ' $1').replace(/middle/gi, CENTER).toLowerCase();
+	this.x = (corner.match(/left|right/i) || corner.match(/center/) || ['inherit'])[0].toLowerCase();
+	this.y = (corner.match(/top|bottom|center/i) || ['inherit'])[0].toLowerCase();
+	this.forceY = !!forceY;
+
+	var f = corner.charAt(0);
+	this.precedance = (f === 't' || f === 'b' ? Y : X);
+}).prototype;
+
+C.invert = function(z, center) {
+	this[z] = this[z] === LEFT ? RIGHT : this[z] === RIGHT ? LEFT : center || this[z];
+};
+
+C.string = function(join) {
+	var x = this.x, y = this.y;
+
+	var result = x !== y ?
+		(x === 'center' || y !== 'center' && (this.precedance === Y || this.forceY) ? 
+			[y,x] : [x,y]
+		) :
+	[x];
+
+	return join !== false ? result.join(' ') : result;
+};
+
+C.abbrev = function() {
+	var result = this.string(false);
+	return result[0].charAt(0) + (result[1] && result[1].charAt(0) || '');
+};
+
+C.clone = function() {
+	return new CORNER( this.string(), this.forceY );
+};
+
+;
+PROTOTYPE.toggle = function(state, event) {
+	var cache = this.cache,
+		options = this.options,
+		tooltip = this.tooltip;
+
+	// Try to prevent flickering when tooltip overlaps show element
+	if(event) {
+		if((/over|enter/).test(event.type) && cache.event && (/out|leave/).test(cache.event.type) &&
+			options.show.target.add(event.target).length === options.show.target.length &&
+			tooltip.has(event.relatedTarget).length) {
+			return this;
+		}
+
+		// Cache event
+		cache.event = $.event.fix(event);
+	}
+
+	// If we're currently waiting and we've just hidden... stop it
+	this.waiting && !state && (this.hiddenDuringWait = TRUE);
+
+	// Render the tooltip if showing and it isn't already
+	if(!this.rendered) { return state ? this.render(1) : this; }
+	else if(this.destroyed || this.disabled) { return this; }
+
+	var type = state ? 'show' : 'hide',
+		opts = this.options[type],
+		otherOpts = this.options[ !state ? 'show' : 'hide' ],
+		posOptions = this.options.position,
+		contentOptions = this.options.content,
+		width = this.tooltip.css('width'),
+		visible = this.tooltip.is(':visible'),
+		animate = state || opts.target.length === 1,
+		sameTarget = !event || opts.target.length < 2 || cache.target[0] === event.target,
+		identicalState, allow, showEvent, delay, after;
+
+	// Detect state if valid one isn't provided
+	if((typeof state).search('boolean|number')) { state = !visible; }
+
+	// Check if the tooltip is in an identical state to the new would-be state
+	identicalState = !tooltip.is(':animated') && visible === state && sameTarget;
+
+	// Fire tooltip(show/hide) event and check if destroyed
+	allow = !identicalState ? !!this._trigger(type, [90]) : NULL;
+
+	// Check to make sure the tooltip wasn't destroyed in the callback
+	if(this.destroyed) { return this; }
+
+	// If the user didn't stop the method prematurely and we're showing the tooltip, focus it
+	if(allow !== FALSE && state) { this.focus(event); }
+
+	// If the state hasn't changed or the user stopped it, return early
+	if(!allow || identicalState) { return this; }
+
+	// Set ARIA hidden attribute
+	$.attr(tooltip[0], 'aria-hidden', !!!state);
+
+	// Execute state specific properties
+	if(state) {
+		// Store show origin coordinates
+		this.mouse && (cache.origin = $.event.fix(this.mouse));
+
+		// Update tooltip content & title if it's a dynamic function
+		if($.isFunction(contentOptions.text)) { this._updateContent(contentOptions.text, FALSE); }
+		if($.isFunction(contentOptions.title)) { this._updateTitle(contentOptions.title, FALSE); }
+
+		// Cache mousemove events for positioning purposes (if not already tracking)
+		if(!trackingBound && posOptions.target === 'mouse' && posOptions.adjust.mouse) {
+			$(document).bind('mousemove.'+NAMESPACE, this._storeMouse);
+			trackingBound = TRUE;
+		}
+
+		// Update the tooltip position (set width first to prevent viewport/max-width issues)
+		if(!width) { tooltip.css('width', tooltip.outerWidth(FALSE)); }
+		this.reposition(event, arguments[2]);
+		if(!width) { tooltip.css('width', ''); }
+
+		// Hide other tooltips if tooltip is solo
+		if(!!opts.solo) {
+			(typeof opts.solo === 'string' ? $(opts.solo) : $(SELECTOR, opts.solo))
+				.not(tooltip).not(opts.target).qtip('hide', $.Event('tooltipsolo'));
+		}
+	}
+	else {
+		// Clear show timer if we're hiding
+		clearTimeout(this.timers.show);
+
+		// Remove cached origin on hide
+		delete cache.origin;
+
+		// Remove mouse tracking event if not needed (all tracking qTips are hidden)
+		if(trackingBound && !$(SELECTOR+'[tracking="true"]:visible', opts.solo).not(tooltip).length) {
+			$(document).unbind('mousemove.'+NAMESPACE);
+			trackingBound = FALSE;
+		}
+
+		// Blur the tooltip
+		this.blur(event);
+	}
+
+	// Define post-animation, state specific properties
+	after = $.proxy(function() {
+		if(state) {
+			// Prevent antialias from disappearing in IE by removing filter
+			if(BROWSER.ie) { tooltip[0].style.removeAttribute('filter'); }
+
+			// Remove overflow setting to prevent tip bugs
+			tooltip.css('overflow', '');
+
+			// Autofocus elements if enabled
+			if('string' === typeof opts.autofocus) {
+				$(this.options.show.autofocus, tooltip).focus();
+			}
+
+			// If set, hide tooltip when inactive for delay period
+			this.options.show.target.trigger('qtip-'+this.id+'-inactive');
+		}
+		else {
+			// Reset CSS states
+			tooltip.css({
+				display: '',
+				visibility: '',
+				opacity: '',
+				left: '',
+				top: ''
+			});
+		}
+
+		// tooltipvisible/tooltiphidden events
+		this._trigger(state ? 'visible' : 'hidden');
+	}, this);
+
+	// If no effect type is supplied, use a simple toggle
+	if(opts.effect === FALSE || animate === FALSE) {
+		tooltip[ type ]();
+		after();
+	}
+
+	// Use custom function if provided
+	else if($.isFunction(opts.effect)) {
+		tooltip.stop(1, 1);
+		opts.effect.call(tooltip, this);
+		tooltip.queue('fx', function(n) {
+			after(); n();
+		});
+	}
+
+	// Use basic fade function by default
+	else { tooltip.fadeTo(90, state ? 1 : 0, after); }
+
+	// If inactive hide method is set, active it
+	if(state) { opts.target.trigger('qtip-'+this.id+'-inactive'); }
+
+	return this;
+};
+
+PROTOTYPE.show = function(event) { return this.toggle(TRUE, event); };
+
+PROTOTYPE.hide = function(event) { return this.toggle(FALSE, event); };
+;PROTOTYPE.focus = function(event) {
+	if(!this.rendered || this.destroyed) { return this; }
+
+	var qtips = $(SELECTOR),
+		tooltip = this.tooltip,
+		curIndex = parseInt(tooltip[0].style.zIndex, 10),
+		newIndex = QTIP.zindex + qtips.length,
+		focusedElem;
+
+	// Only update the z-index if it has changed and tooltip is not already focused
+	if(!tooltip.hasClass(CLASS_FOCUS)) {
+		// tooltipfocus event
+		if(this._trigger('focus', [newIndex], event)) {
+			// Only update z-index's if they've changed
+			if(curIndex !== newIndex) {
+				// Reduce our z-index's and keep them properly ordered
+				qtips.each(function() {
+					if(this.style.zIndex > curIndex) {
+						this.style.zIndex = this.style.zIndex - 1;
+					}
+				});
+
+				// Fire blur event for focused tooltip
+				qtips.filter('.' + CLASS_FOCUS).qtip('blur', event);
+			}
+
+			// Set the new z-index
+			tooltip.addClass(CLASS_FOCUS)[0].style.zIndex = newIndex;
+		}
+	}
+
+	return this;
+};
+
+PROTOTYPE.blur = function(event) {
+	if(!this.rendered || this.destroyed) { return this; }
+
+	// Set focused status to FALSE
+	this.tooltip.removeClass(CLASS_FOCUS);
+
+	// tooltipblur event
+	this._trigger('blur', [ this.tooltip.css('zIndex') ], event);
+
+	return this;
+};
+;PROTOTYPE.disable = function(state) {
+	if(this.destroyed) { return this; }
+
+	// If 'toggle' is passed, toggle the current state
+	if(state === 'toggle') {
+		state = !(this.rendered ? this.tooltip.hasClass(CLASS_DISABLED) : this.disabled);
+	}
+
+	// Disable if no state passed
+	else if('boolean' !== typeof state) {
+		state = TRUE;
+	}
+
+	if(this.rendered) {
+		this.tooltip.toggleClass(CLASS_DISABLED, state)
+			.attr('aria-disabled', state);
+	}
+
+	this.disabled = !!state;
+
+	return this;
+};
+
+PROTOTYPE.enable = function() { return this.disable(FALSE); };
+;PROTOTYPE._createButton = function()
+{
+	var self = this,
+		elements = this.elements,
+		tooltip = elements.tooltip,
+		button = this.options.content.button,
+		isString = typeof button === 'string',
+		close = isString ? button : 'Close tooltip';
+
+	if(elements.button) { elements.button.remove(); }
+
+	// Use custom button if one was supplied by user, else use default
+	if(button.jquery) {
+		elements.button = button;
+	}
+	else {
+		elements.button = $('<a />', {
+			'class': 'qtip-close ' + (this.options.style.widget ? '' : NAMESPACE+'-icon'),
+			'title': close,
+			'aria-label': close
+		})
+		.prepend(
+			$('<span />', {
+				'class': 'ui-icon ui-icon-close',
+				'html': '×'
+			})
+		);
+	}
+
+	// Create button and setup attributes
+	elements.button.appendTo(elements.titlebar || tooltip)
+		.attr('role', 'button')
+		.click(function(event) {
+			if(!tooltip.hasClass(CLASS_DISABLED)) { self.hide(event); }
+			return FALSE;
+		});
+};
+
+PROTOTYPE._updateButton = function(button)
+{
+	// Make sure tooltip is rendered and if not, return
+	if(!this.rendered) { return FALSE; }
+
+	var elem = this.elements.button;
+	if(button) { this._createButton(); }
+	else { elem.remove(); }
+};
+;// Widget class creator
+function createWidgetClass(cls) {
+	return WIDGET.concat('').join(cls ? '-'+cls+' ' : ' ');
+}
+
+// Widget class setter method
+PROTOTYPE._setWidget = function()
+{
+	var on = this.options.style.widget,
+		elements = this.elements,
+		tooltip = elements.tooltip,
+		disabled = tooltip.hasClass(CLASS_DISABLED);
+
+	tooltip.removeClass(CLASS_DISABLED);
+	CLASS_DISABLED = on ? 'ui-state-disabled' : 'qtip-disabled';
+	tooltip.toggleClass(CLASS_DISABLED, disabled);
+
+	tooltip.toggleClass('ui-helper-reset '+createWidgetClass(), on).toggleClass(CLASS_DEFAULT, this.options.style.def && !on);
+
+	if(elements.content) {
+		elements.content.toggleClass( createWidgetClass('content'), on);
+	}
+	if(elements.titlebar) {
+		elements.titlebar.toggleClass( createWidgetClass('header'), on);
+	}
+	if(elements.button) {
+		elements.button.toggleClass(NAMESPACE+'-icon', !on);
+	}
+};
+;function delay(callback, duration) {
+	// If tooltip has displayed, start hide timer
+	if(duration > 0) {
+		return setTimeout(
+			$.proxy(callback, this), duration
+		);
+	}
+	else{ callback.call(this); }
+}
+
+function showMethod(event) {
+	if(this.tooltip.hasClass(CLASS_DISABLED)) { return; }
+
+	// Clear hide timers
+	clearTimeout(this.timers.show);
+	clearTimeout(this.timers.hide);
+
+	// Start show timer
+	this.timers.show = delay.call(this,
+		function() { this.toggle(TRUE, event); },
+		this.options.show.delay
+	);
+}
+
+function hideMethod(event) {
+	if(this.tooltip.hasClass(CLASS_DISABLED) || this.destroyed) { return; }
+
+	// Check if new target was actually the tooltip element
+	var relatedTarget = $(event.relatedTarget),
+		ontoTooltip = relatedTarget.closest(SELECTOR)[0] === this.tooltip[0],
+		ontoTarget = relatedTarget[0] === this.options.show.target[0];
+
+	// Clear timers and stop animation queue
+	clearTimeout(this.timers.show);
+	clearTimeout(this.timers.hide);
+
+	// Prevent hiding if tooltip is fixed and event target is the tooltip.
+	// Or if mouse positioning is enabled and cursor momentarily overlaps
+	if(this !== relatedTarget[0] &&
+		(this.options.position.target === 'mouse' && ontoTooltip) ||
+		(this.options.hide.fixed && (
+			(/mouse(out|leave|move)/).test(event.type) && (ontoTooltip || ontoTarget))
+		))
+	{
+		try {
+			event.preventDefault();
+			event.stopImmediatePropagation();
+		} catch(e) {}
+
+		return;
+	}
+
+	// If tooltip has displayed, start hide timer
+	this.timers.hide = delay.call(this,
+		function() { this.toggle(FALSE, event); },
+		this.options.hide.delay,
+		this
+	);
+}
+
+function inactiveMethod(event) {
+	if(this.tooltip.hasClass(CLASS_DISABLED) || !this.options.hide.inactive) { return; }
+
+	// Clear timer
+	clearTimeout(this.timers.inactive);
+
+	this.timers.inactive = delay.call(this,
+		function(){ this.hide(event); },
+		this.options.hide.inactive
+	);
+}
+
+function repositionMethod(event) {
+	if(this.rendered && this.tooltip[0].offsetWidth > 0) { this.reposition(event); }
+}
+
+// Store mouse coordinates
+PROTOTYPE._storeMouse = function(event) {
+	(this.mouse = $.event.fix(event)).type = 'mousemove';
+	return this;
+};
+
+// Bind events
+PROTOTYPE._bind = function(targets, events, method, suffix, context) {
+	if(!targets || !method || !events.length) { return; }
+	var ns = '.' + this._id + (suffix ? '-'+suffix : '');
+	$(targets).bind(
+		(events.split ? events : events.join(ns + ' ')) + ns,
+		$.proxy(method, context || this)
+	);
+	return this;
+};
+PROTOTYPE._unbind = function(targets, suffix) {
+	targets && $(targets).unbind('.' + this._id + (suffix ? '-'+suffix : ''));
+	return this;
+};
+
+// Global delegation helper
+function delegate(selector, events, method) {
+	$(document.body).delegate(selector,
+		(events.split ? events : events.join('.'+NAMESPACE + ' ')) + '.'+NAMESPACE,
+		function() {
+			var api = QTIP.api[ $.attr(this, ATTR_ID) ];
+			api && !api.disabled && method.apply(api, arguments);
+		}
+	);
+}
+// Event trigger
+PROTOTYPE._trigger = function(type, args, event) {
+	var callback = $.Event('tooltip'+type);
+	callback.originalEvent = (event && $.extend({}, event)) || this.cache.event || NULL;
+
+	this.triggering = type;
+	this.tooltip.trigger(callback, [this].concat(args || []));
+	this.triggering = FALSE;
+
+	return !callback.isDefaultPrevented();
+};
+
+PROTOTYPE._bindEvents = function(showEvents, hideEvents, showTargets, hideTargets, showMethod, hideMethod) {
+	// Get tasrgets that lye within both
+	var similarTargets = showTargets.filter( hideTargets ).add( hideTargets.filter(showTargets) ),
+		toggleEvents = [];
+
+	// If hide and show targets are the same...
+	if(similarTargets.length) {
+
+		// Filter identical show/hide events
+		$.each(hideEvents, function(i, type) {
+			var showIndex = $.inArray(type, showEvents);
+
+			// Both events are identical, remove from both hide and show events
+			// and append to toggleEvents
+			showIndex > -1 && toggleEvents.push( showEvents.splice( showIndex, 1 )[0] );
+		});
+
+		// Toggle events are special case of identical show/hide events, which happen in sequence
+		if(toggleEvents.length) {
+			// Bind toggle events to the similar targets
+			this._bind(similarTargets, toggleEvents, function(event) {
+				var state = this.rendered ? this.tooltip[0].offsetWidth > 0 : false;
+				(state ? hideMethod : showMethod).call(this, event);
+			});
+
+			// Remove the similar targets from the regular show/hide bindings
+			showTargets = showTargets.not(similarTargets);
+			hideTargets = hideTargets.not(similarTargets);
+		}
+	}
+
+	// Apply show/hide/toggle events
+	this._bind(showTargets, showEvents, showMethod);
+	this._bind(hideTargets, hideEvents, hideMethod);
+};
+
+PROTOTYPE._assignInitialEvents = function(event) {
+	var options = this.options,
+		showTarget = options.show.target,
+		hideTarget = options.hide.target,
+		showEvents = options.show.event ? $.trim('' + options.show.event).split(' ') : [],
+		hideEvents = options.hide.event ? $.trim('' + options.hide.event).split(' ') : [];
+
+	// Catch remove/removeqtip events on target element to destroy redundant tooltips
+	this._bind(this.elements.target, ['remove', 'removeqtip'], function(event) {
+		this.destroy(true);
+	}, 'destroy');
+
+	/*
+	 * Make sure hoverIntent functions properly by using mouseleave as a hide event if
+	 * mouseenter/mouseout is used for show.event, even if it isn't in the users options.
+	 */
+	if(/mouse(over|enter)/i.test(options.show.event) && !/mouse(out|leave)/i.test(options.hide.event)) {
+		hideEvents.push('mouseleave');
+	}
+
+	/*
+	 * Also make sure initial mouse targetting works correctly by caching mousemove coords
+	 * on show targets before the tooltip has rendered. Also set onTarget when triggered to
+	 * keep mouse tracking working.
+	 */
+	this._bind(showTarget, 'mousemove', function(event) {
+		this._storeMouse(event);
+		this.cache.onTarget = TRUE;
+	});
+
+	// Define hoverIntent function
+	function hoverIntent(event) {
+		// Only continue if tooltip isn't disabled
+		if(this.disabled || this.destroyed) { return FALSE; }
+
+		// Cache the event data
+		this.cache.event = event && $.event.fix(event);
+		this.cache.target = event && $(event.target);
+
+		// Start the event sequence
+		clearTimeout(this.timers.show);
+		this.timers.show = delay.call(this,
+			function() { this.render(typeof event === 'object' || options.show.ready); },
+			options.prerender ? 0 : options.show.delay
+		);
+	}
+
+	// Filter and bind events
+	this._bindEvents(showEvents, hideEvents, showTarget, hideTarget, hoverIntent, function() {
+		if(!this.timers) { return FALSE; }
+		clearTimeout(this.timers.show);
+	});
+
+	// Prerendering is enabled, create tooltip now
+	if(options.show.ready || options.prerender) { hoverIntent.call(this, event); }
+};
+
+// Event assignment method
+PROTOTYPE._assignEvents = function() {
+	var self = this,
+		options = this.options,
+		posOptions = options.position,
+
+		tooltip = this.tooltip,
+		showTarget = options.show.target,
+		hideTarget = options.hide.target,
+		containerTarget = posOptions.container,
+		viewportTarget = posOptions.viewport,
+		documentTarget = $(document),
+		bodyTarget = $(document.body),
+		windowTarget = $(window),
+
+		showEvents = options.show.event ? $.trim('' + options.show.event).split(' ') : [],
+		hideEvents = options.hide.event ? $.trim('' + options.hide.event).split(' ') : [];
+
+
+	// Assign passed event callbacks
+	$.each(options.events, function(name, callback) {
+		self._bind(tooltip, name === 'toggle' ? ['tooltipshow','tooltiphide'] : ['tooltip'+name], callback, null, tooltip);
+	});
+
+	// Hide tooltips when leaving current window/frame (but not select/option elements)
+	if(/mouse(out|leave)/i.test(options.hide.event) && options.hide.leave === 'window') {
+		this._bind(documentTarget, ['mouseout', 'blur'], function(event) {
+			if(!/select|option/.test(event.target.nodeName) && !event.relatedTarget) {
+				this.hide(event);
+			}
+		});
+	}
+
+	// Enable hide.fixed by adding appropriate class
+	if(options.hide.fixed) {
+		hideTarget = hideTarget.add( tooltip.addClass(CLASS_FIXED) );
+	}
+
+	/*
+	 * Make sure hoverIntent functions properly by using mouseleave to clear show timer if
+	 * mouseenter/mouseout is used for show.event, even if it isn't in the users options.
+	 */
+	else if(/mouse(over|enter)/i.test(options.show.event)) {
+		this._bind(hideTarget, 'mouseleave', function() {
+			clearTimeout(this.timers.show);
+		});
+	}
+
+	// Hide tooltip on document mousedown if unfocus events are enabled
+	if(('' + options.hide.event).indexOf('unfocus') > -1) {
+		this._bind(containerTarget.closest('html'), ['mousedown', 'touchstart'], function(event) {
+			var elem = $(event.target),
+				enabled = this.rendered && !this.tooltip.hasClass(CLASS_DISABLED) && this.tooltip[0].offsetWidth > 0,
+				isAncestor = elem.parents(SELECTOR).filter(this.tooltip[0]).length > 0;
+
+			if(elem[0] !== this.target[0] && elem[0] !== this.tooltip[0] && !isAncestor &&
+				!this.target.has(elem[0]).length && enabled
+			) {
+				this.hide(event);
+			}
+		});
+	}
+
+	// Check if the tooltip hides when inactive
+	if('number' === typeof options.hide.inactive) {
+		// Bind inactive method to show target(s) as a custom event
+		this._bind(showTarget, 'qtip-'+this.id+'-inactive', inactiveMethod, 'inactive');
+
+		// Define events which reset the 'inactive' event handler
+		this._bind(hideTarget.add(tooltip), QTIP.inactiveEvents, inactiveMethod);
+	}
+
+	// Filter and bind events
+	this._bindEvents(showEvents, hideEvents, showTarget, hideTarget, showMethod, hideMethod);
+
+	// Mouse movement bindings
+	this._bind(showTarget.add(tooltip), 'mousemove', function(event) {
+		// Check if the tooltip hides when mouse is moved a certain distance
+		if('number' === typeof options.hide.distance) {
+			var origin = this.cache.origin || {},
+				limit = this.options.hide.distance,
+				abs = Math.abs;
+
+			// Check if the movement has gone beyond the limit, and hide it if so
+			if(abs(event.pageX - origin.pageX) >= limit || abs(event.pageY - origin.pageY) >= limit) {
+				this.hide(event);
+			}
+		}
+
+		// Cache mousemove coords on show targets
+		this._storeMouse(event);
+	});
+
+	// Mouse positioning events
+	if(posOptions.target === 'mouse') {
+		// If mouse adjustment is on...
+		if(posOptions.adjust.mouse) {
+			// Apply a mouseleave event so we don't get problems with overlapping
+			if(options.hide.event) {
+				// Track if we're on the target or not
+				this._bind(showTarget, ['mouseenter', 'mouseleave'], function(event) {
+					if(!this.cache) {return FALSE; }
+					this.cache.onTarget = event.type === 'mouseenter';
+				});
+			}
+
+			// Update tooltip position on mousemove
+			this._bind(documentTarget, 'mousemove', function(event) {
+				// Update the tooltip position only if the tooltip is visible and adjustment is enabled
+				if(this.rendered && this.cache.onTarget && !this.tooltip.hasClass(CLASS_DISABLED) && this.tooltip[0].offsetWidth > 0) {
+					this.reposition(event);
+				}
+			});
+		}
+	}
+
+	// Adjust positions of the tooltip on window resize if enabled
+	if(posOptions.adjust.resize || viewportTarget.length) {
+		this._bind( $.event.special.resize ? viewportTarget : windowTarget, 'resize', repositionMethod );
+	}
+
+	// Adjust tooltip position on scroll of the window or viewport element if present
+	if(posOptions.adjust.scroll) {
+		this._bind( windowTarget.add(posOptions.container), 'scroll', repositionMethod );
+	}
+};
+
+// Un-assignment method
+PROTOTYPE._unassignEvents = function() {
+	var options = this.options,
+		showTargets = options.show.target,
+		hideTargets = options.hide.target,
+		targets = $.grep([
+			this.elements.target[0],
+			this.rendered && this.tooltip[0],
+			options.position.container[0],
+			options.position.viewport[0],
+			options.position.container.closest('html')[0], // unfocus
+			window,
+			document
+		], function(i) {
+			return typeof i === 'object';
+		});
+
+	// Add show and hide targets if they're valid
+	if(showTargets && showTargets.toArray) {
+		targets = targets.concat(showTargets.toArray());
+	}
+	if(hideTargets && hideTargets.toArray) {
+		targets = targets.concat(hideTargets.toArray());
+	}
+
+	// Unbind the events
+	this._unbind(targets)
+		._unbind(targets, 'destroy')
+		._unbind(targets, 'inactive');
+};
+
+// Apply common event handlers using delegate (avoids excessive .bind calls!)
+$(function() {
+	delegate(SELECTOR, ['mouseenter', 'mouseleave'], function(event) {
+		var state = event.type === 'mouseenter',
+			tooltip = $(event.currentTarget),
+			target = $(event.relatedTarget || event.target),
+			options = this.options;
+
+		// On mouseenter...
+		if(state) {
+			// Focus the tooltip on mouseenter (z-index stacking)
+			this.focus(event);
+
+			// Clear hide timer on tooltip hover to prevent it from closing
+			tooltip.hasClass(CLASS_FIXED) && !tooltip.hasClass(CLASS_DISABLED) && clearTimeout(this.timers.hide);
+		}
+
+		// On mouseleave...
+		else {
+			// When mouse tracking is enabled, hide when we leave the tooltip and not onto the show target (if a hide event is set)
+			if(options.position.target === 'mouse' && options.position.adjust.mouse &&
+				options.hide.event && options.show.target && !target.closest(options.show.target[0]).length) {
+				this.hide(event);
+			}
+		}
+
+		// Add hover class
+		tooltip.toggleClass(CLASS_HOVER, state);
+	});
+
+	// Define events which reset the 'inactive' event handler
+	delegate('['+ATTR_ID+']', INACTIVE_EVENTS, inactiveMethod);
+});
+;// Initialization method
+function init(elem, id, opts) {
+	var obj, posOptions, attr, config, title,
+
+	// Setup element references
+	docBody = $(document.body),
+
+	// Use document body instead of document element if needed
+	newTarget = elem[0] === document ? docBody : elem,
+
+	// Grab metadata from element if plugin is present
+	metadata = (elem.metadata) ? elem.metadata(opts.metadata) : NULL,
+
+	// If metadata type if HTML5, grab 'name' from the object instead, or use the regular data object otherwise
+	metadata5 = opts.metadata.type === 'html5' && metadata ? metadata[opts.metadata.name] : NULL,
+
+	// Grab data from metadata.name (or data-qtipopts as fallback) using .data() method,
+	html5 = elem.data(opts.metadata.name || 'qtipopts');
+
+	// If we don't get an object returned attempt to parse it manualyl without parseJSON
+	try { html5 = typeof html5 === 'string' ? $.parseJSON(html5) : html5; } catch(e) {}
+
+	// Merge in and sanitize metadata
+	config = $.extend(TRUE, {}, QTIP.defaults, opts,
+		typeof html5 === 'object' ? sanitizeOptions(html5) : NULL,
+		sanitizeOptions(metadata5 || metadata));
+
+	// Re-grab our positioning options now we've merged our metadata and set id to passed value
+	posOptions = config.position;
+	config.id = id;
+
+	// Setup missing content if none is detected
+	if('boolean' === typeof config.content.text) {
+		attr = elem.attr(config.content.attr);
+
+		// Grab from supplied attribute if available
+		if(config.content.attr !== FALSE && attr) { config.content.text = attr; }
+
+		// No valid content was found, abort render
+		else { return FALSE; }
+	}
+
+	// Setup target options
+	if(!posOptions.container.length) { posOptions.container = docBody; }
+	if(posOptions.target === FALSE) { posOptions.target = newTarget; }
+	if(config.show.target === FALSE) { config.show.target = newTarget; }
+	if(config.show.solo === TRUE) { config.show.solo = posOptions.container.closest('body'); }
+	if(config.hide.target === FALSE) { config.hide.target = newTarget; }
+	if(config.position.viewport === TRUE) { config.position.viewport = posOptions.container; }
+
+	// Ensure we only use a single container
+	posOptions.container = posOptions.container.eq(0);
+
+	// Convert position corner values into x and y strings
+	posOptions.at = new CORNER(posOptions.at, TRUE);
+	posOptions.my = new CORNER(posOptions.my);
+
+	// Destroy previous tooltip if overwrite is enabled, or skip element if not
+	if(elem.data(NAMESPACE)) {
+		if(config.overwrite) {
+			elem.qtip('destroy', true);
+		}
+		else if(config.overwrite === FALSE) {
+			return FALSE;
+		}
+	}
+
+	// Add has-qtip attribute
+	elem.attr(ATTR_HAS, id);
+
+	// Remove title attribute and store it if present
+	if(config.suppress && (title = elem.attr('title'))) {
+		// Final attr call fixes event delegatiom and IE default tooltip showing problem
+		elem.removeAttr('title').attr(oldtitle, title).attr('title', '');
+	}
+
+	// Initialize the tooltip and add API reference
+	obj = new QTip(elem, config, id, !!attr);
+	elem.data(NAMESPACE, obj);
+
+	return obj;
+}
+
+// jQuery $.fn extension method
+QTIP = $.fn.qtip = function(options, notation, newValue)
+{
+	var command = ('' + options).toLowerCase(), // Parse command
+		returned = NULL,
+		args = $.makeArray(arguments).slice(1),
+		event = args[args.length - 1],
+		opts = this[0] ? $.data(this[0], NAMESPACE) : NULL;
+
+	// Check for API request
+	if((!arguments.length && opts) || command === 'api') {
+		return opts;
+	}
+
+	// Execute API command if present
+	else if('string' === typeof options) {
+		this.each(function() {
+			var api = $.data(this, NAMESPACE);
+			if(!api) { return TRUE; }
+
+			// Cache the event if possible
+			if(event && event.timeStamp) { api.cache.event = event; }
+
+			// Check for specific API commands
+			if(notation && (command === 'option' || command === 'options')) {
+				if(newValue !== undefined || $.isPlainObject(notation)) {
+					api.set(notation, newValue);
+				}
+				else {
+					returned = api.get(notation);
+					return FALSE;
+				}
+			}
+
+			// Execute API command
+			else if(api[command]) {
+				api[command].apply(api, args);
+			}
+		});
+
+		return returned !== NULL ? returned : this;
+	}
+
+	// No API commands. validate provided options and setup qTips
+	else if('object' === typeof options || !arguments.length) {
+		// Sanitize options first
+		opts = sanitizeOptions($.extend(TRUE, {}, options));
+
+		return this.each(function(i) {
+			var api, id;
+
+			// Find next available ID, or use custom ID if provided
+			id = $.isArray(opts.id) ? opts.id[i] : opts.id;
+			id = !id || id === FALSE || id.length < 1 || QTIP.api[id] ? QTIP.nextid++ : id;
+
+			// Initialize the qTip and re-grab newly sanitized options
+			api = init($(this), id, opts);
+			if(api === FALSE) { return TRUE; }
+			else { QTIP.api[id] = api; }
+
+			// Initialize plugins
+			$.each(PLUGINS, function() {
+				if(this.initialize === 'initialize') { this(api); }
+			});
+
+			// Assign initial pre-render events
+			api._assignInitialEvents(event);
+		});
+	}
+};
+
+// Expose class
+$.qtip = QTip;
+
+// Populated in render method
+QTIP.api = {};
+;$.each({
+	/* Allow other plugins to successfully retrieve the title of an element with a qTip applied */
+	attr: function(attr, val) {
+		if(this.length) {
+			var self = this[0],
+				title = 'title',
+				api = $.data(self, 'qtip');
+
+			if(attr === title && api && 'object' === typeof api && api.options.suppress) {
+				if(arguments.length < 2) {
+					return $.attr(self, oldtitle);
+				}
+
+				// If qTip is rendered and title was originally used as content, update it
+				if(api && api.options.content.attr === title && api.cache.attr) {
+					api.set('content.text', val);
+				}
+
+				// Use the regular attr method to set, then cache the result
+				return this.attr(oldtitle, val);
+			}
+		}
+
+		return $.fn['attr'+replaceSuffix].apply(this, arguments);
+	},
+
+	/* Allow clone to correctly retrieve cached title attributes */
+	clone: function(keepData) {
+		var titles = $([]), title = 'title',
+
+		// Clone our element using the real clone method
+		elems = $.fn['clone'+replaceSuffix].apply(this, arguments);
+
+		// Grab all elements with an oldtitle set, and change it to regular title attribute, if keepData is false
+		if(!keepData) {
+			elems.filter('['+oldtitle+']').attr('title', function() {
+				return $.attr(this, oldtitle);
+			})
+			.removeAttr(oldtitle);
+		}
+
+		return elems;
+	}
+}, function(name, func) {
+	if(!func || $.fn[name+replaceSuffix]) { return TRUE; }
+
+	var old = $.fn[name+replaceSuffix] = $.fn[name];
+	$.fn[name] = function() {
+		return func.apply(this, arguments) || old.apply(this, arguments);
+	};
+});
+
+/* Fire off 'removeqtip' handler in $.cleanData if jQuery UI not present (it already does similar).
+ * This snippet is taken directly from jQuery UI source code found here:
+ *     http://code.jquery.com/ui/jquery-ui-git.js
+ */
+if(!$.ui) {
+	$['cleanData'+replaceSuffix] = $.cleanData;
+	$.cleanData = function( elems ) {
+		for(var i = 0, elem; (elem = $( elems[i] )).length; i++) {
+			if(elem.attr(ATTR_HAS)) {
+				try { elem.triggerHandler('removeqtip'); }
+				catch( e ) {}
+			}
+		}
+		$['cleanData'+replaceSuffix].apply(this, arguments);
+	};
+}
+;// qTip version
+QTIP.version = '2.2.1';
+
+// Base ID for all qTips
+QTIP.nextid = 0;
+
+// Inactive events array
+QTIP.inactiveEvents = INACTIVE_EVENTS;
+
+// Base z-index for all qTips
+QTIP.zindex = 15000;
+
+// Define configuration defaults
+QTIP.defaults = {
+	prerender: FALSE,
+	id: FALSE,
+	overwrite: TRUE,
+	suppress: TRUE,
+	content: {
+		text: TRUE,
+		attr: 'title',
+		title: FALSE,
+		button: FALSE
+	},
+	position: {
+		my: 'top left',
+		at: 'bottom right',
+		target: FALSE,
+		container: FALSE,
+		viewport: FALSE,
+		adjust: {
+			x: 0, y: 0,
+			mouse: TRUE,
+			scroll: TRUE,
+			resize: TRUE,
+			method: 'flipinvert flipinvert'
+		},
+		effect: function(api, pos, viewport) {
+			$(this).animate(pos, {
+				duration: 200,
+				queue: FALSE
+			});
+		}
+	},
+	show: {
+		target: FALSE,
+		event: 'mouseenter',
+		effect: TRUE,
+		delay: 90,
+		solo: FALSE,
+		ready: FALSE,
+		autofocus: FALSE
+	},
+	hide: {
+		target: FALSE,
+		event: 'mouseleave',
+		effect: TRUE,
+		delay: 0,
+		fixed: FALSE,
+		inactive: FALSE,
+		leave: 'window',
+		distance: FALSE
+	},
+	style: {
+		classes: '',
+		widget: FALSE,
+		width: FALSE,
+		height: FALSE,
+		def: TRUE
+	},
+	events: {
+		render: NULL,
+		move: NULL,
+		show: NULL,
+		hide: NULL,
+		toggle: NULL,
+		visible: NULL,
+		hidden: NULL,
+		focus: NULL,
+		blur: NULL
+	}
+};
+;var TIP,
+
+// .bind()/.on() namespace
+TIPNS = '.qtip-tip',
+
+// Common CSS strings
+MARGIN = 'margin',
+BORDER = 'border',
+COLOR = 'color',
+BG_COLOR = 'background-color',
+TRANSPARENT = 'transparent',
+IMPORTANT = ' !important',
+
+// Check if the browser supports <canvas/> elements
+HASCANVAS = !!document.createElement('canvas').getContext,
+
+// Invalid colour values used in parseColours()
+INVALID = /rgba?\(0, 0, 0(, 0)?\)|transparent|#123456/i;
+
+// Camel-case method, taken from jQuery source
+// http://code.jquery.com/jquery-1.8.0.js
+function camel(s) { return s.charAt(0).toUpperCase() + s.slice(1); }
+
+/*
+ * Modified from Modernizr's testPropsAll()
+ * http://modernizr.com/downloads/modernizr-latest.js
+ */
+var cssProps = {}, cssPrefixes = ["Webkit", "O", "Moz", "ms"];
+function vendorCss(elem, prop) {
+	var ucProp = prop.charAt(0).toUpperCase() + prop.slice(1),
+		props = (prop + ' ' + cssPrefixes.join(ucProp + ' ') + ucProp).split(' '),
+		cur, val, i = 0;
+
+	// If the property has already been mapped...
+	if(cssProps[prop]) { return elem.css(cssProps[prop]); }
+
+	while((cur = props[i++])) {
+		if((val = elem.css(cur)) !== undefined) {
+			return cssProps[prop] = cur, val;
+		}
+	}
+}
+
+// Parse a given elements CSS property into an int
+function intCss(elem, prop) {
+	return Math.ceil(parseFloat(vendorCss(elem, prop)));
+}
+
+
+// VML creation (for IE only)
+if(!HASCANVAS) {
+	var createVML = function(tag, props, style) {
+		return '<qtipvml:'+tag+' xmlns="urn:schemas-microsoft.com:vml" class="qtip-vml" '+(props||'')+
+			' style="behavior: url(#default#VML); '+(style||'')+ '" />';
+	};
+}
+
+// Canvas only definitions
+else {
+	var PIXEL_RATIO = window.devicePixelRatio || 1,
+		BACKING_STORE_RATIO = (function() {
+			var context = document.createElement('canvas').getContext('2d');
+			return context.backingStorePixelRatio || context.webkitBackingStorePixelRatio || context.mozBackingStorePixelRatio ||
+					context.msBackingStorePixelRatio || context.oBackingStorePixelRatio || 1;
+		}()),
+		SCALE = PIXEL_RATIO / BACKING_STORE_RATIO;
+}
+
+
+function Tip(qtip, options) {
+	this._ns = 'tip';
+	this.options = options;
+	this.offset = options.offset;
+	this.size = [ options.width, options.height ];
+
+	// Initialize
+	this.init( (this.qtip = qtip) );
+}
+
+$.extend(Tip.prototype, {
+	init: function(qtip) {
+		var context, tip;
+
+		// Create tip element and prepend to the tooltip
+		tip = this.element = qtip.elements.tip = $('<div />', { 'class': NAMESPACE+'-tip' }).prependTo(qtip.tooltip);
+
+		// Create tip drawing element(s)
+		if(HASCANVAS) {
+			// save() as soon as we create the canvas element so FF2 doesn't bork on our first restore()!
+			context = $('<canvas />').appendTo(this.element)[0].getContext('2d');
+
+			// Setup constant parameters
+			context.lineJoin = 'miter';
+			context.miterLimit = 100000;
+			context.save();
+		}
+		else {
+			context = createVML('shape', 'coordorigin="0,0"', 'position:absolute;');
+			this.element.html(context + context);
+
+			// Prevent mousing down on the tip since it causes problems with .live() handling in IE due to VML
+			qtip._bind( $('*', tip).add(tip), ['click', 'mousedown'], function(event) { event.stopPropagation(); }, this._ns);
+		}
+
+		// Bind update events
+		qtip._bind(qtip.tooltip, 'tooltipmove', this.reposition, this._ns, this);
+
+		// Create it
+		this.create();
+	},
+
+	_swapDimensions: function() {
+		this.size[0] = this.options.height;
+		this.size[1] = this.options.width;
+	},
+	_resetDimensions: function() {
+		this.size[0] = this.options.width;
+		this.size[1] = this.options.height;
+	},
+
+	_useTitle: function(corner) {
+		var titlebar = this.qtip.elements.titlebar;
+		return titlebar && (
+			corner.y === TOP || (corner.y === CENTER && this.element.position().top + (this.size[1] / 2) + this.options.offset < titlebar.outerHeight(TRUE))
+		);
+	},
+
+	_parseCorner: function(corner) {
+		var my = this.qtip.options.position.my;
+
+		// Detect corner and mimic properties
+		if(corner === FALSE || my === FALSE) {
+			corner = FALSE;
+		}
+		else if(corner === TRUE) {
+			corner = new CORNER( my.string() );
+		}
+		else if(!corner.string) {
+			corner = new CORNER(corner);
+			corner.fixed = TRUE;
+		}
+
+		return corner;
+	},
+
+	_parseWidth: function(corner, side, use) {
+		var elements = this.qtip.elements,
+			prop = BORDER + camel(side) + 'Width';
+
+		return (use ? intCss(use, prop) : (
+			intCss(elements.content, prop) ||
+			intCss(this._useTitle(corner) && elements.titlebar || elements.content, prop) ||
+			intCss(elements.tooltip, prop)
+		)) || 0;
+	},
+
+	_parseRadius: function(corner) {
+		var elements = this.qtip.elements,
+			prop = BORDER + camel(corner.y) + camel(corner.x) + 'Radius';
+
+		return BROWSER.ie < 9 ? 0 :
+			intCss(this._useTitle(corner) && elements.titlebar || elements.content, prop) ||
+			intCss(elements.tooltip, prop) || 0;
+	},
+
+	_invalidColour: function(elem, prop, compare) {
+		var val = elem.css(prop);
+		return !val || (compare && val === elem.css(compare)) || INVALID.test(val) ? FALSE : val;
+	},
+
+	_parseColours: function(corner) {
+		var elements = this.qtip.elements,
+			tip = this.element.css('cssText', ''),
+			borderSide = BORDER + camel(corner[ corner.precedance ]) + camel(COLOR),
+			colorElem = this._useTitle(corner) && elements.titlebar || elements.content,
+			css = this._invalidColour, color = [];
+
+		// Attempt to detect the background colour from various elements, left-to-right precedance
+		color[0] = css(tip, BG_COLOR) || css(colorElem, BG_COLOR) || css(elements.content, BG_COLOR) ||
+			css(elements.tooltip, BG_COLOR) || tip.css(BG_COLOR);
+
+		// Attempt to detect the correct border side colour from various elements, left-to-right precedance
+		color[1] = css(tip, borderSide, COLOR) || css(colorElem, borderSide, COLOR) ||
+			css(elements.content, borderSide, COLOR) || css(elements.tooltip, borderSide, COLOR) || elements.tooltip.css(borderSide);
+
+		// Reset background and border colours
+		$('*', tip).add(tip).css('cssText', BG_COLOR+':'+TRANSPARENT+IMPORTANT+';'+BORDER+':0'+IMPORTANT+';');
+
+		return color;
+	},
+
+	_calculateSize: function(corner) {
+		var y = corner.precedance === Y,
+			width = this.options['width'],
+			height = this.options['height'],
+			isCenter = corner.abbrev() === 'c',
+			base = (y ? width: height) * (isCenter ? 0.5 : 1),
+			pow = Math.pow,
+			round = Math.round,
+			bigHyp, ratio, result,
+
+		smallHyp = Math.sqrt( pow(base, 2) + pow(height, 2) ),
+		hyp = [ (this.border / base) * smallHyp, (this.border / height) * smallHyp ];
+
+		hyp[2] = Math.sqrt( pow(hyp[0], 2) - pow(this.border, 2) );
+		hyp[3] = Math.sqrt( pow(hyp[1], 2) - pow(this.border, 2) );
+
+		bigHyp = smallHyp + hyp[2] + hyp[3] + (isCenter ? 0 : hyp[0]);
+		ratio = bigHyp / smallHyp;
+
+		result = [ round(ratio * width), round(ratio * height) ];
+		return y ? result : result.reverse();
+	},
+
+	// Tip coordinates calculator
+	_calculateTip: function(corner, size, scale) {
+		scale = scale || 1;
+		size = size || this.size;
+
+		var width = size[0] * scale,
+			height = size[1] * scale,
+			width2 = Math.ceil(width / 2), height2 = Math.ceil(height / 2),
+
+		// Define tip coordinates in terms of height and width values
+		tips = {
+			br:	[0,0,		width,height,	width,0],
+			bl:	[0,0,		width,0,		0,height],
+			tr:	[0,height,	width,0,		width,height],
+			tl:	[0,0,		0,height,		width,height],
+			tc:	[0,height,	width2,0,		width,height],
+			bc:	[0,0,		width,0,		width2,height],
+			rc:	[0,0,		width,height2,	0,height],
+			lc:	[width,0,	width,height,	0,height2]
+		};
+
+		// Set common side shapes
+		tips.lt = tips.br; tips.rt = tips.bl;
+		tips.lb = tips.tr; tips.rb = tips.tl;
+
+		return tips[ corner.abbrev() ];
+	},
+
+	// Tip coordinates drawer (canvas)
+	_drawCoords: function(context, coords) {
+		context.beginPath();
+		context.moveTo(coords[0], coords[1]);
+		context.lineTo(coords[2], coords[3]);
+		context.lineTo(coords[4], coords[5]);
+		context.closePath();
+	},
+
+	create: function() {
+		// Determine tip corner
+		var c = this.corner = (HASCANVAS || BROWSER.ie) && this._parseCorner(this.options.corner);
+
+		// If we have a tip corner...
+		if( (this.enabled = !!this.corner && this.corner.abbrev() !== 'c') ) {
+			// Cache it
+			this.qtip.cache.corner = c.clone();
+
+			// Create it
+			this.update();
+		}
+
+		// Toggle tip element
+		this.element.toggle(this.enabled);
+
+		return this.corner;
+	},
+
+	update: function(corner, position) {
+		if(!this.enabled) { return this; }
+
+		var elements = this.qtip.elements,
+			tip = this.element,
+			inner = tip.children(),
+			options = this.options,
+			curSize = this.size,
+			mimic = options.mimic,
+			round = Math.round,
+			color, precedance, context,
+			coords, bigCoords, translate, newSize, border, BACKING_STORE_RATIO;
+
+		// Re-determine tip if not already set
+		if(!corner) { corner = this.qtip.cache.corner || this.corner; }
+
+		// Use corner property if we detect an invalid mimic value
+		if(mimic === FALSE) { mimic = corner; }
+
+		// Otherwise inherit mimic properties from the corner object as necessary
+		else {
+			mimic = new CORNER(mimic);
+			mimic.precedance = corner.precedance;
+
+			if(mimic.x === 'inherit') { mimic.x = corner.x; }
+			else if(mimic.y === 'inherit') { mimic.y = corner.y; }
+			else if(mimic.x === mimic.y) {
+				mimic[ corner.precedance ] = corner[ corner.precedance ];
+			}
+		}
+		precedance = mimic.precedance;
+
+		// Ensure the tip width.height are relative to the tip position
+		if(corner.precedance === X) { this._swapDimensions(); }
+		else { this._resetDimensions(); }
+
+		// Update our colours
+		color = this.color = this._parseColours(corner);
+
+		// Detect border width, taking into account colours
+		if(color[1] !== TRANSPARENT) {
+			// Grab border width
+			border = this.border = this._parseWidth(corner, corner[corner.precedance]);
+
+			// If border width isn't zero, use border color as fill if it's not invalid (1.0 style tips)
+			if(options.border && border < 1 && !INVALID.test(color[1])) { color[0] = color[1]; }
+
+			// Set border width (use detected border width if options.border is true)
+			this.border = border = options.border !== TRUE ? options.border : border;
+		}
+
+		// Border colour was invalid, set border to zero
+		else { this.border = border = 0; }
+
+		// Determine tip size
+		newSize = this.size = this._calculateSize(corner);
+		tip.css({
+			width: newSize[0],
+			height: newSize[1],
+			lineHeight: newSize[1]+'px'
+		});
+
+		// Calculate tip translation
+		if(corner.precedance === Y) {
+			translate = [
+				round(mimic.x === LEFT ? border : mimic.x === RIGHT ? newSize[0] - curSize[0] - border : (newSize[0] - curSize[0]) / 2),
+				round(mimic.y === TOP ? newSize[1] - curSize[1] : 0)
+			];
+		}
+		else {
+			translate = [
+				round(mimic.x === LEFT ? newSize[0] - curSize[0] : 0),
+				round(mimic.y === TOP ? border : mimic.y === BOTTOM ? newSize[1] - curSize[1] - border : (newSize[1] - curSize[1]) / 2)
+			];
+		}
+
+		// Canvas drawing implementation
+		if(HASCANVAS) {
+			// Grab canvas context and clear/save it
+			context = inner[0].getContext('2d');
+			context.restore(); context.save();
+			context.clearRect(0,0,6000,6000);
+
+			// Calculate coordinates
+			coords = this._calculateTip(mimic, curSize, SCALE);
+			bigCoords = this._calculateTip(mimic, this.size, SCALE);
+
+			// Set the canvas size using calculated size
+			inner.attr(WIDTH, newSize[0] * SCALE).attr(HEIGHT, newSize[1] * SCALE);
+			inner.css(WIDTH, newSize[0]).css(HEIGHT, newSize[1]);
+
+			// Draw the outer-stroke tip
+			this._drawCoords(context, bigCoords);
+			context.fillStyle = color[1];
+			context.fill();
+
+			// Draw the actual tip
+			context.translate(translate[0] * SCALE, translate[1] * SCALE);
+			this._drawCoords(context, coords);
+			context.fillStyle = color[0];
+			context.fill();
+		}
+
+		// VML (IE Proprietary implementation)
+		else {
+			// Calculate coordinates
+			coords = this._calculateTip(mimic);
+
+			// Setup coordinates string
+			coords = 'm' + coords[0] + ',' + coords[1] + ' l' + coords[2] +
+				',' + coords[3] + ' ' + coords[4] + ',' + coords[5] + ' xe';
+
+			// Setup VML-specific offset for pixel-perfection
+			translate[2] = border && /^(r|b)/i.test(corner.string()) ?
+				BROWSER.ie === 8 ? 2 : 1 : 0;
+
+			// Set initial CSS
+			inner.css({
+				coordsize: (newSize[0]+border) + ' ' + (newSize[1]+border),
+				antialias: ''+(mimic.string().indexOf(CENTER) > -1),
+				left: translate[0] - (translate[2] * Number(precedance === X)),
+				top: translate[1] - (translate[2] * Number(precedance === Y)),
+				width: newSize[0] + border,
+				height: newSize[1] + border
+			})
+			.each(function(i) {
+				var $this = $(this);
+
+				// Set shape specific attributes
+				$this[ $this.prop ? 'prop' : 'attr' ]({
+					coordsize: (newSize[0]+border) + ' ' + (newSize[1]+border),
+					path: coords,
+					fillcolor: color[0],
+					filled: !!i,
+					stroked: !i
+				})
+				.toggle(!!(border || i));
+
+				// Check if border is enabled and add stroke element
+				!i && $this.html( createVML(
+					'stroke', 'weight="'+(border*2)+'px" color="'+color[1]+'" miterlimit="1000" joinstyle="miter"'
+				) );
+			});
+		}
+
+		// Opera bug #357 - Incorrect tip position
+		// https://github.com/Craga89/qTip2/issues/367
+		window.opera && setTimeout(function() {
+			elements.tip.css({
+				display: 'inline-block',
+				visibility: 'visible'
+			});
+		}, 1);
+
+		// Position if needed
+		if(position !== FALSE) { this.calculate(corner, newSize); }
+	},
+
+	calculate: function(corner, size) {
+		if(!this.enabled) { return FALSE; }
+
+		var self = this,
+			elements = this.qtip.elements,
+			tip = this.element,
+			userOffset = this.options.offset,
+			isWidget = elements.tooltip.hasClass('ui-widget'),
+			position = {  },
+			precedance, corners;
+
+		// Inherit corner if not provided
+		corner = corner || this.corner;
+		precedance = corner.precedance;
+
+		// Determine which tip dimension to use for adjustment
+		size = size || this._calculateSize(corner);
+
+		// Setup corners and offset array
+		corners = [ corner.x, corner.y ];
+		if(precedance === X) { corners.reverse(); }
+
+		// Calculate tip position
+		$.each(corners, function(i, side) {
+			var b, bc, br;
+
+			if(side === CENTER) {
+				b = precedance === Y ? LEFT : TOP;
+				position[ b ] = '50%';
+				position[MARGIN+'-' + b] = -Math.round(size[ precedance === Y ? 0 : 1 ] / 2) + userOffset;
+			}
+			else {
+				b = self._parseWidth(corner, side, elements.tooltip);
+				bc = self._parseWidth(corner, side, elements.content);
+				br = self._parseRadius(corner);
+
+				position[ side ] = Math.max(-self.border, i ? bc : (userOffset + (br > b ? br : -b)));
+			}
+		});
+
+		// Adjust for tip size
+		position[ corner[precedance] ] -= size[ precedance === X ? 0 : 1 ];
+
+		// Set and return new position
+		tip.css({ margin: '', top: '', bottom: '', left: '', right: '' }).css(position);
+		return position;
+	},
+
+	reposition: function(event, api, pos, viewport) {
+		if(!this.enabled) { return; }
+
+		var cache = api.cache,
+			newCorner = this.corner.clone(),
+			adjust = pos.adjusted,
+			method = api.options.position.adjust.method.split(' '),
+			horizontal = method[0],
+			vertical = method[1] || method[0],
+			shift = { left: FALSE, top: FALSE, x: 0, y: 0 },
+			offset, css = {}, props;
+
+		function shiftflip(direction, precedance, popposite, side, opposite) {
+			// Horizontal - Shift or flip method
+			if(direction === SHIFT && newCorner.precedance === precedance && adjust[side] && newCorner[popposite] !== CENTER) {
+				newCorner.precedance = newCorner.precedance === X ? Y : X;
+			}
+			else if(direction !== SHIFT && adjust[side]){
+				newCorner[precedance] = newCorner[precedance] === CENTER ?
+					(adjust[side] > 0 ? side : opposite) : (newCorner[precedance] === side ? opposite : side);
+			}
+		}
+
+		function shiftonly(xy, side, opposite) {
+			if(newCorner[xy] === CENTER) {
+				css[MARGIN+'-'+side] = shift[xy] = offset[MARGIN+'-'+side] - adjust[side];
+			}
+			else {
+				props = offset[opposite] !== undefined ?
+					[ adjust[side], -offset[side] ] : [ -adjust[side], offset[side] ];
+
+				if( (shift[xy] = Math.max(props[0], props[1])) > props[0] ) {
+					pos[side] -= adjust[side];
+					shift[side] = FALSE;
+				}
+
+				css[ offset[opposite] !== undefined ? opposite : side ] = shift[xy];
+			}
+		}
+
+		// If our tip position isn't fixed e.g. doesn't adjust with viewport...
+		if(this.corner.fixed !== TRUE) {
+			// Perform shift/flip adjustments
+			shiftflip(horizontal, X, Y, LEFT, RIGHT);
+			shiftflip(vertical, Y, X, TOP, BOTTOM);
+
+			// Update and redraw the tip if needed (check cached details of last drawn tip)
+			if(newCorner.string() !== cache.corner.string() || cache.cornerTop !== adjust.top || cache.cornerLeft !== adjust.left) {
+				this.update(newCorner, FALSE);
+			}
+		}
+
+		// Setup tip offset properties
+		offset = this.calculate(newCorner);
+
+		// Readjust offset object to make it left/top
+		if(offset.right !== undefined) { offset.left = -offset.right; }
+		if(offset.bottom !== undefined) { offset.top = -offset.bottom; }
+		offset.user = this.offset;
+
+		// Perform shift adjustments
+		if(shift.left = (horizontal === SHIFT && !!adjust.left)) { shiftonly(X, LEFT, RIGHT); }
+		if(shift.top = (vertical === SHIFT && !!adjust.top)) { shiftonly(Y, TOP, BOTTOM); }
+
+		/*
+		* If the tip is adjusted in both dimensions, or in a
+		* direction that would cause it to be anywhere but the
+		* outer border, hide it!
+		*/
+		this.element.css(css).toggle(
+			!((shift.x && shift.y) || (newCorner.x === CENTER && shift.y) || (newCorner.y === CENTER && shift.x))
+		);
+
+		// Adjust position to accomodate tip dimensions
+		pos.left -= offset.left.charAt ? offset.user :
+			horizontal !== SHIFT || shift.top || !shift.left && !shift.top ? offset.left + this.border : 0;
+		pos.top -= offset.top.charAt ? offset.user :
+			vertical !== SHIFT || shift.left || !shift.left && !shift.top ? offset.top + this.border : 0;
+
+		// Cache details
+		cache.cornerLeft = adjust.left; cache.cornerTop = adjust.top;
+		cache.corner = newCorner.clone();
+	},
+
+	destroy: function() {
+		// Unbind events
+		this.qtip._unbind(this.qtip.tooltip, this._ns);
+
+		// Remove the tip element(s)
+		if(this.qtip.elements.tip) {
+			this.qtip.elements.tip.find('*')
+				.remove().end().remove();
+		}
+	}
+});
+
+TIP = PLUGINS.tip = function(api) {
+	return new Tip(api, api.options.style.tip);
+};
+
+// Initialize tip on render
+TIP.initialize = 'render';
+
+// Setup plugin sanitization options
+TIP.sanitize = function(options) {
+	if(options.style && 'tip' in options.style) {
+		var opts = options.style.tip;
+		if(typeof opts !== 'object') { opts = options.style.tip = { corner: opts }; }
+		if(!(/string|boolean/i).test(typeof opts.corner)) { opts.corner = TRUE; }
+	}
+};
+
+// Add new option checks for the plugin
+CHECKS.tip = {
+	'^position.my|style.tip.(corner|mimic|border)$': function() {
+		// Make sure a tip can be drawn
+		this.create();
+
+		// Reposition the tooltip
+		this.qtip.reposition();
+	},
+	'^style.tip.(height|width)$': function(obj) {
+		// Re-set dimensions and redraw the tip
+		this.size = [ obj.width, obj.height ];
+		this.update();
+
+		// Reposition the tooltip
+		this.qtip.reposition();
+	},
+	'^content.title|style.(classes|widget)$': function() {
+		this.update();
+	}
+};
+
+// Extend original qTip defaults
+$.extend(TRUE, QTIP.defaults, {
+	style: {
+		tip: {
+			corner: TRUE,
+			mimic: FALSE,
+			width: 6,
+			height: 6,
+			border: TRUE,
+			offset: 0
+		}
+	}
+});
+;var MODAL, OVERLAY,
+	MODALCLASS = 'qtip-modal',
+	MODALSELECTOR = '.'+MODALCLASS;
+
+OVERLAY = function()
+{
+	var self = this,
+		focusableElems = {},
+		current, onLast,
+		prevState, elem;
+
+	// Modified code from jQuery UI 1.10.0 source
+	// http://code.jquery.com/ui/1.10.0/jquery-ui.js
+	function focusable(element) {
+		// Use the defined focusable checker when possible
+		if($.expr[':'].focusable) { return $.expr[':'].focusable; }
+
+		var isTabIndexNotNaN = !isNaN($.attr(element, 'tabindex')),
+			nodeName = element.nodeName && element.nodeName.toLowerCase(),
+			map, mapName, img;
+
+		if('area' === nodeName) {
+			map = element.parentNode;
+			mapName = map.name;
+			if(!element.href || !mapName || map.nodeName.toLowerCase() !== 'map') {
+				return false;
+			}
+			img = $('img[usemap=#' + mapName + ']')[0];
+			return !!img && img.is(':visible');
+		}
+		return (/input|select|textarea|button|object/.test( nodeName ) ?
+				!element.disabled :
+				'a' === nodeName ?
+					element.href || isTabIndexNotNaN :
+					isTabIndexNotNaN
+			);
+	}
+
+	// Focus inputs using cached focusable elements (see update())
+	function focusInputs(blurElems) {
+		// Blurring body element in IE causes window.open windows to unfocus!
+		if(focusableElems.length < 1 && blurElems.length) { blurElems.not('body').blur(); }
+
+		// Focus the inputs
+		else { focusableElems.first().focus(); }
+	}
+
+	// Steal focus from elements outside tooltip
+	function stealFocus(event) {
+		if(!elem.is(':visible')) { return; }
+
+		var target = $(event.target),
+			tooltip = current.tooltip,
+			container = target.closest(SELECTOR),
+			targetOnTop;
+
+		// Determine if input container target is above this
+		targetOnTop = container.length < 1 ? FALSE :
+			(parseInt(container[0].style.zIndex, 10) > parseInt(tooltip[0].style.zIndex, 10));
+
+		// If we're showing a modal, but focus has landed on an input below
+		// this modal, divert focus to the first visible input in this modal
+		// or if we can't find one... the tooltip itself
+		if(!targetOnTop && target.closest(SELECTOR)[0] !== tooltip[0]) {
+			focusInputs(target);
+		}
+
+		// Detect when we leave the last focusable element...
+		onLast = event.target === focusableElems[focusableElems.length - 1];
+	}
+
+	$.extend(self, {
+		init: function() {
+			// Create document overlay
+			elem = self.elem = $('<div />', {
+				id: 'qtip-overlay',
+				html: '<div></div>',
+				mousedown: function() { return FALSE; }
+			})
+			.hide();
+
+			// Make sure we can't focus anything outside the tooltip
+			$(document.body).bind('focusin'+MODALSELECTOR, stealFocus);
+
+			// Apply keyboard "Escape key" close handler
+			$(document).bind('keydown'+MODALSELECTOR, function(event) {
+				if(current && current.options.show.modal.escape && event.keyCode === 27) {
+					current.hide(event);
+				}
+			});
+
+			// Apply click handler for blur option
+			elem.bind('click'+MODALSELECTOR, function(event) {
+				if(current && current.options.show.modal.blur) {
+					current.hide(event);
+				}
+			});
+
+			return self;
+		},
+
+		update: function(api) {
+			// Update current API reference
+			current = api;
+
+			// Update focusable elements if enabled
+			if(api.options.show.modal.stealfocus !== FALSE) {
+				focusableElems = api.tooltip.find('*').filter(function() {
+					return focusable(this);
+				});
+			}
+			else { focusableElems = []; }
+		},
+
+		toggle: function(api, state, duration) {
+			var docBody = $(document.body),
+				tooltip = api.tooltip,
+				options = api.options.show.modal,
+				effect = options.effect,
+				type = state ? 'show': 'hide',
+				visible = elem.is(':visible'),
+				visibleModals = $(MODALSELECTOR).filter(':visible:not(:animated)').not(tooltip),
+				zindex;
+
+			// Set active tooltip API reference
+			self.update(api);
+
+			// If the modal can steal the focus...
+			// Blur the current item and focus anything in the modal we an
+			if(state && options.stealfocus !== FALSE) {
+				focusInputs( $(':focus') );
+			}
+
+			// Toggle backdrop cursor style on show
+			elem.toggleClass('blurs', options.blur);
+
+			// Append to body on show
+			if(state) {
+				elem.appendTo(document.body);
+			}
+
+			// Prevent modal from conflicting with show.solo, and don't hide backdrop is other modals are visible
+			if((elem.is(':animated') && visible === state && prevState !== FALSE) || (!state && visibleModals.length)) {
+				return self;
+			}
+
+			// Stop all animations
+			elem.stop(TRUE, FALSE);
+
+			// Use custom function if provided
+			if($.isFunction(effect)) {
+				effect.call(elem, state);
+			}
+
+			// If no effect type is supplied, use a simple toggle
+			else if(effect === FALSE) {
+				elem[ type ]();
+			}
+
+			// Use basic fade function
+			else {
+				elem.fadeTo( parseInt(duration, 10) || 90, state ? 1 : 0, function() {
+					if(!state) { elem.hide(); }
+				});
+			}
+
+			// Reset position and detach from body on hide
+			if(!state) {
+				elem.queue(function(next) {
+					elem.css({ left: '', top: '' });
+					if(!$(MODALSELECTOR).length) { elem.detach(); }
+					next();
+				});
+			}
+
+			// Cache the state
+			prevState = state;
+
+			// If the tooltip is destroyed, set reference to null
+			if(current.destroyed) { current = NULL; }
+
+			return self;
+		}
+	});
+
+	self.init();
+};
+OVERLAY = new OVERLAY();
+
+function Modal(api, options) {
+	this.options = options;
+	this._ns = '-modal';
+
+	this.init( (this.qtip = api) );
+}
+
+$.extend(Modal.prototype, {
+	init: function(qtip) {
+		var tooltip = qtip.tooltip;
+
+		// If modal is disabled... return
+		if(!this.options.on) { return this; }
+
+		// Set overlay reference
+		qtip.elements.overlay = OVERLAY.elem;
+
+		// Add unique attribute so we can grab modal tooltips easily via a SELECTOR, and set z-index
+		tooltip.addClass(MODALCLASS).css('z-index', QTIP.modal_zindex + $(MODALSELECTOR).length);
+
+		// Apply our show/hide/focus modal events
+		qtip._bind(tooltip, ['tooltipshow', 'tooltiphide'], function(event, api, duration) {
+			var oEvent = event.originalEvent;
+
+			// Make sure mouseout doesn't trigger a hide when showing the modal and mousing onto backdrop
+			if(event.target === tooltip[0]) {
+				if(oEvent && event.type === 'tooltiphide' && /mouse(leave|enter)/.test(oEvent.type) && $(oEvent.relatedTarget).closest(OVERLAY.elem[0]).length) {
+					try { event.preventDefault(); } catch(e) {}
+				}
+				else if(!oEvent || (oEvent && oEvent.type !== 'tooltipsolo')) {
+					this.toggle(event, event.type === 'tooltipshow', duration);
+				}
+			}
+		}, this._ns, this);
+
+		// Adjust modal z-index on tooltip focus
+		qtip._bind(tooltip, 'tooltipfocus', function(event, api) {
+			// If focus was cancelled before it reached us, don't do anything
+			if(event.isDefaultPrevented() || event.target !== tooltip[0]) { return; }
+
+			var qtips = $(MODALSELECTOR),
+
+			// Keep the modal's lower than other, regular qtips
+			newIndex = QTIP.modal_zindex + qtips.length,
+			curIndex = parseInt(tooltip[0].style.zIndex, 10);
+
+			// Set overlay z-index
+			OVERLAY.elem[0].style.zIndex = newIndex - 1;
+
+			// Reduce modal z-index's and keep them properly ordered
+			qtips.each(function() {
+				if(this.style.zIndex > curIndex) {
+					this.style.zIndex -= 1;
+				}
+			});
+
+			// Fire blur event for focused tooltip
+			qtips.filter('.' + CLASS_FOCUS).qtip('blur', event.originalEvent);
+
+			// Set the new z-index
+			tooltip.addClass(CLASS_FOCUS)[0].style.zIndex = newIndex;
+
+			// Set current
+			OVERLAY.update(api);
+
+			// Prevent default handling
+			try { event.preventDefault(); } catch(e) {}
+		}, this._ns, this);
+
+		// Focus any other visible modals when this one hides
+		qtip._bind(tooltip, 'tooltiphide', function(event) {
+			if(event.target === tooltip[0]) {
+				$(MODALSELECTOR).filter(':visible').not(tooltip).last().qtip('focus', event);
+			}
+		}, this._ns, this);
+	},
+
+	toggle: function(event, state, duration) {
+		// Make sure default event hasn't been prevented
+		if(event && event.isDefaultPrevented()) { return this; }
+
+		// Toggle it
+		OVERLAY.toggle(this.qtip, !!state, duration);
+	},
+
+	destroy: function() {
+		// Remove modal class
+		this.qtip.tooltip.removeClass(MODALCLASS);
+
+		// Remove bound events
+		this.qtip._unbind(this.qtip.tooltip, this._ns);
+
+		// Delete element reference
+		OVERLAY.toggle(this.qtip, FALSE);
+		delete this.qtip.elements.overlay;
+	}
+});
+
+
+MODAL = PLUGINS.modal = function(api) {
+	return new Modal(api, api.options.show.modal);
+};
+
+// Setup sanitiztion rules
+MODAL.sanitize = function(opts) {
+	if(opts.show) {
+		if(typeof opts.show.modal !== 'object') { opts.show.modal = { on: !!opts.show.modal }; }
+		else if(typeof opts.show.modal.on === 'undefined') { opts.show.modal.on = TRUE; }
+	}
+};
+
+// Base z-index for all modal tooltips (use qTip core z-index as a base)
+QTIP.modal_zindex = QTIP.zindex - 200;
+
+// Plugin needs to be initialized on render
+MODAL.initialize = 'render';
+
+// Setup option set checks
+CHECKS.modal = {
+	'^show.modal.(on|blur)$': function() {
+		// Initialise
+		this.destroy();
+		this.init();
+
+		// Show the modal if not visible already and tooltip is visible
+		this.qtip.elems.overlay.toggle(
+			this.qtip.tooltip[0].offsetWidth > 0
+		);
+	}
+};
+
+// Extend original api defaults
+$.extend(TRUE, QTIP.defaults, {
+	show: {
+		modal: {
+			on: FALSE,
+			effect: TRUE,
+			blur: TRUE,
+			stealfocus: TRUE,
+			escape: TRUE
+		}
+	}
+});
+;PLUGINS.viewport = function(api, position, posOptions, targetWidth, targetHeight, elemWidth, elemHeight)
+{
+	var target = posOptions.target,
+		tooltip = api.elements.tooltip,
+		my = posOptions.my,
+		at = posOptions.at,
+		adjust = posOptions.adjust,
+		method = adjust.method.split(' '),
+		methodX = method[0],
+		methodY = method[1] || method[0],
+		viewport = posOptions.viewport,
+		container = posOptions.container,
+		cache = api.cache,
+		adjusted = { left: 0, top: 0 },
+		fixed, newMy, containerOffset, containerStatic,
+		viewportWidth, viewportHeight, viewportScroll, viewportOffset;
+
+	// If viewport is not a jQuery element, or it's the window/document, or no adjustment method is used... return
+	if(!viewport.jquery || target[0] === window || target[0] === document.body || adjust.method === 'none') {
+		return adjusted;
+	}
+
+	// Cach container details
+	containerOffset = container.offset() || adjusted;
+	containerStatic = container.css('position') === 'static';
+
+	// Cache our viewport details
+	fixed = tooltip.css('position') === 'fixed';
+	viewportWidth = viewport[0] === window ? viewport.width() : viewport.outerWidth(FALSE);
+	viewportHeight = viewport[0] === window ? viewport.height() : viewport.outerHeight(FALSE);
+	viewportScroll = { left: fixed ? 0 : viewport.scrollLeft(), top: fixed ? 0 : viewport.scrollTop() };
+	viewportOffset = viewport.offset() || adjusted;
+
+	// Generic calculation method
+	function calculate(side, otherSide, type, adjust, side1, side2, lengthName, targetLength, elemLength) {
+		var initialPos = position[side1],
+			mySide = my[side],
+			atSide = at[side],
+			isShift = type === SHIFT,
+			myLength = mySide === side1 ? elemLength : mySide === side2 ? -elemLength : -elemLength / 2,
+			atLength = atSide === side1 ? targetLength : atSide === side2 ? -targetLength : -targetLength / 2,
+			sideOffset = viewportScroll[side1] + viewportOffset[side1] - (containerStatic ? 0 : containerOffset[side1]),
+			overflow1 = sideOffset - initialPos,
+			overflow2 = initialPos + elemLength - (lengthName === WIDTH ? viewportWidth : viewportHeight) - sideOffset,
+			offset = myLength - (my.precedance === side || mySide === my[otherSide] ? atLength : 0) - (atSide === CENTER ? targetLength / 2 : 0);
+
+		// shift
+		if(isShift) {
+			offset = (mySide === side1 ? 1 : -1) * myLength;
+
+			// Adjust position but keep it within viewport dimensions
+			position[side1] += overflow1 > 0 ? overflow1 : overflow2 > 0 ? -overflow2 : 0;
+			position[side1] = Math.max(
+				-containerOffset[side1] + viewportOffset[side1],
+				initialPos - offset,
+				Math.min(
+					Math.max(
+						-containerOffset[side1] + viewportOffset[side1] + (lengthName === WIDTH ? viewportWidth : viewportHeight),
+						initialPos + offset
+					),
+					position[side1],
+
+					// Make sure we don't adjust complete off the element when using 'center'
+					mySide === 'center' ? initialPos - myLength : 1E9
+				)
+			);
+
+		}
+
+		// flip/flipinvert
+		else {
+			// Update adjustment amount depending on if using flipinvert or flip
+			adjust *= (type === FLIPINVERT ? 2 : 0);
+
+			// Check for overflow on the left/top
+			if(overflow1 > 0 && (mySide !== side1 || overflow2 > 0)) {
+				position[side1] -= offset + adjust;
+				newMy.invert(side, side1);
+			}
+
+			// Check for overflow on the bottom/right
+			else if(overflow2 > 0 && (mySide !== side2 || overflow1 > 0)  ) {
+				position[side1] -= (mySide === CENTER ? -offset : offset) + adjust;
+				newMy.invert(side, side2);
+			}
+
+			// Make sure we haven't made things worse with the adjustment and reset if so
+			if(position[side1] < viewportScroll && -position[side1] > overflow2) {
+				position[side1] = initialPos; newMy = my.clone();
+			}
+		}
+
+		return position[side1] - initialPos;
+	}
+
+	// Set newMy if using flip or flipinvert methods
+	if(methodX !== 'shift' || methodY !== 'shift') { newMy = my.clone(); }
+
+	// Adjust position based onviewport and adjustment options
+	adjusted = {
+		left: methodX !== 'none' ? calculate( X, Y, methodX, adjust.x, LEFT, RIGHT, WIDTH, targetWidth, elemWidth ) : 0,
+		top: methodY !== 'none' ? calculate( Y, X, methodY, adjust.y, TOP, BOTTOM, HEIGHT, targetHeight, elemHeight ) : 0,
+		my: newMy
+	};
+
+	return adjusted;
+};
+;PLUGINS.polys = {
+	// POLY area coordinate calculator
+	//	Special thanks to Ed Cradock for helping out with this.
+	//	Uses a binary search algorithm to find suitable coordinates.
+	polygon: function(baseCoords, corner) {
+		var result = {
+			width: 0, height: 0,
+			position: {
+				top: 1e10, right: 0,
+				bottom: 0, left: 1e10
+			},
+			adjustable: FALSE
+		},
+		i = 0, next,
+		coords = [],
+		compareX = 1, compareY = 1,
+		realX = 0, realY = 0,
+		newWidth, newHeight;
+
+		// First pass, sanitize coords and determine outer edges
+		i = baseCoords.length; while(i--) {
+			next = [ parseInt(baseCoords[--i], 10), parseInt(baseCoords[i+1], 10) ];
+
+			if(next[0] > result.position.right){ result.position.right = next[0]; }
+			if(next[0] < result.position.left){ result.position.left = next[0]; }
+			if(next[1] > result.position.bottom){ result.position.bottom = next[1]; }
+			if(next[1] < result.position.top){ result.position.top = next[1]; }
+
+			coords.push(next);
+		}
+
+		// Calculate height and width from outer edges
+		newWidth = result.width = Math.abs(result.position.right - result.position.left);
+		newHeight = result.height = Math.abs(result.position.bottom - result.position.top);
+
+		// If it's the center corner...
+		if(corner.abbrev() === 'c') {
+			result.position = {
+				left: result.position.left + (result.width / 2),
+				top: result.position.top + (result.height / 2)
+			};
+		}
+		else {
+			// Second pass, use a binary search algorithm to locate most suitable coordinate
+			while(newWidth > 0 && newHeight > 0 && compareX > 0 && compareY > 0)
+			{
+				newWidth = Math.floor(newWidth / 2);
+				newHeight = Math.floor(newHeight / 2);
+
+				if(corner.x === LEFT){ compareX = newWidth; }
+				else if(corner.x === RIGHT){ compareX = result.width - newWidth; }
+				else{ compareX += Math.floor(newWidth / 2); }
+
+				if(corner.y === TOP){ compareY = newHeight; }
+				else if(corner.y === BOTTOM){ compareY = result.height - newHeight; }
+				else{ compareY += Math.floor(newHeight / 2); }
+
+				i = coords.length; while(i--)
+				{
+					if(coords.length < 2){ break; }
+
+					realX = coords[i][0] - result.position.left;
+					realY = coords[i][1] - result.position.top;
+
+					if((corner.x === LEFT && realX >= compareX) ||
+					(corner.x === RIGHT && realX <= compareX) ||
+					(corner.x === CENTER && (realX < compareX || realX > (result.width - compareX))) ||
+					(corner.y === TOP && realY >= compareY) ||
+					(corner.y === BOTTOM && realY <= compareY) ||
+					(corner.y === CENTER && (realY < compareY || realY > (result.height - compareY)))) {
+						coords.splice(i, 1);
+					}
+				}
+			}
+			result.position = { left: coords[0][0], top: coords[0][1] };
+		}
+
+		return result;
+	},
+
+	rect: function(ax, ay, bx, by) {
+		return {
+			width: Math.abs(bx - ax),
+			height: Math.abs(by - ay),
+			position: {
+				left: Math.min(ax, bx),
+				top: Math.min(ay, by)
+			}
+		};
+	},
+
+	_angles: {
+		tc: 3 / 2, tr: 7 / 4, tl: 5 / 4,
+		bc: 1 / 2, br: 1 / 4, bl: 3 / 4,
+		rc: 2, lc: 1, c: 0
+	},
+	ellipse: function(cx, cy, rx, ry, corner) {
+		var c = PLUGINS.polys._angles[ corner.abbrev() ],
+			rxc = c === 0 ? 0 : rx * Math.cos( c * Math.PI ),
+			rys = ry * Math.sin( c * Math.PI );
+
+		return {
+			width: (rx * 2) - Math.abs(rxc),
+			height: (ry * 2) - Math.abs(rys),
+			position: {
+				left: cx + rxc,
+				top: cy + rys
+			},
+			adjustable: FALSE
+		};
+	},
+	circle: function(cx, cy, r, corner) {
+		return PLUGINS.polys.ellipse(cx, cy, r, r, corner);
+	}
+};
+;PLUGINS.svg = function(api, svg, corner)
+{
+	var doc = $(document),
+		elem = svg[0],
+		root = $(elem.ownerSVGElement),
+		ownerDocument = elem.ownerDocument,
+		strokeWidth2 = (parseInt(svg.css('stroke-width'), 10) || 0) / 2,
+		frameOffset, mtx, transformed, viewBox,
+		len, next, i, points,
+		result, position, dimensions;
+
+	// Ascend the parentNode chain until we find an element with getBBox()
+	while(!elem.getBBox) { elem = elem.parentNode; }
+	if(!elem.getBBox || !elem.parentNode) { return FALSE; }
+
+	// Determine which shape calculation to use
+	switch(elem.nodeName) {
+		case 'ellipse':
+		case 'circle':
+			result = PLUGINS.polys.ellipse(
+				elem.cx.baseVal.value,
+				elem.cy.baseVal.value,
+				(elem.rx || elem.r).baseVal.value + strokeWidth2,
+				(elem.ry || elem.r).baseVal.value + strokeWidth2,
+				corner
+			);
+		break;
+
+		case 'line':
+		case 'polygon':
+		case 'polyline':
+			// Determine points object (line has none, so mimic using array)
+			points = elem.points || [
+				{ x: elem.x1.baseVal.value, y: elem.y1.baseVal.value },
+				{ x: elem.x2.baseVal.value, y: elem.y2.baseVal.value }
+			];
+
+			for(result = [], i = -1, len = points.numberOfItems || points.length; ++i < len;) {
+				next = points.getItem ? points.getItem(i) : points[i];
+				result.push.apply(result, [next.x, next.y]);
+			}
+
+			result = PLUGINS.polys.polygon(result, corner);
+		break;
+
+		// Unknown shape or rectangle? Use bounding box
+		default:
+			result = elem.getBBox();
+			result = {
+				width: result.width,
+				height: result.height,
+				position: {
+					left: result.x,
+					top: result.y
+				}
+			};
+		break;
+	}
+
+	// Shortcut assignments
+	position = result.position;
+	root = root[0];
+
+	// Convert position into a pixel value
+	if(root.createSVGPoint) {
+		mtx = elem.getScreenCTM();
+		points = root.createSVGPoint();
+
+		points.x = position.left;
+		points.y = position.top;
+		transformed = points.matrixTransform( mtx );
+		position.left = transformed.x;
+		position.top = transformed.y;
+	}
+
+	// Check the element is not in a child document, and if so, adjust for frame elements offset
+	if(ownerDocument !== document && api.position.target !== 'mouse') {
+		frameOffset = $((ownerDocument.defaultView || ownerDocument.parentWindow).frameElement).offset();
+		if(frameOffset) {
+			position.left += frameOffset.left;
+			position.top += frameOffset.top;
+		}
+	}
+
+	// Adjust by scroll offset of owner document
+	ownerDocument = $(ownerDocument);
+	position.left += ownerDocument.scrollLeft();
+	position.top += ownerDocument.scrollTop();
+
+	return result;
+};
+;PLUGINS.imagemap = function(api, area, corner, adjustMethod)
+{
+	if(!area.jquery) { area = $(area); }
+
+	var shape = (area.attr('shape') || 'rect').toLowerCase().replace('poly', 'polygon'),
+		image = $('img[usemap="#'+area.parent('map').attr('name')+'"]'),
+		coordsString = $.trim(area.attr('coords')),
+		coordsArray = coordsString.replace(/,$/, '').split(','),
+		imageOffset, coords, i, next, result, len;
+
+	// If we can't find the image using the map...
+	if(!image.length) { return FALSE; }
+
+	// Pass coordinates string if polygon
+	if(shape === 'polygon') {
+		result = PLUGINS.polys.polygon(coordsArray, corner);
+	}
+
+	// Otherwise parse the coordinates and pass them as arguments
+	else if(PLUGINS.polys[shape]) {
+		for(i = -1, len = coordsArray.length, coords = []; ++i < len;) {
+			coords.push( parseInt(coordsArray[i], 10) );
+		}
+
+		result = PLUGINS.polys[shape].apply(
+			this, coords.concat(corner)
+		);
+	}
+
+	// If no shapre calculation method was found, return false
+	else { return FALSE; }
+
+	// Make sure we account for padding and borders on the image
+	imageOffset = image.offset();
+	imageOffset.left += Math.ceil((image.outerWidth(FALSE) - image.width()) / 2);
+	imageOffset.top += Math.ceil((image.outerHeight(FALSE) - image.height()) / 2);
+
+	// Add image position to offset coordinates
+	result.position.left += imageOffset.left;
+	result.position.top += imageOffset.top;
+
+	return result;
+};
+;var IE6,
+
+/*
+ * BGIFrame adaption (http://plugins.jquery.com/project/bgiframe)
+ * Special thanks to Brandon Aaron
+ */
+BGIFRAME = '<iframe class="qtip-bgiframe" frameborder="0" tabindex="-1" src="javascript:\'\';" ' +
+	' style="display:block; position:absolute; z-index:-1; filter:alpha(opacity=0); ' +
+		'-ms-filter:"progid:DXImageTransform.Microsoft.Alpha(Opacity=0)";"></iframe>';
+
+function Ie6(api, qtip) {
+	this._ns = 'ie6';
+	this.init( (this.qtip = api) );
+}
+
+$.extend(Ie6.prototype, {
+	_scroll : function() {
+		var overlay = this.qtip.elements.overlay;
+		overlay && (overlay[0].style.top = $(window).scrollTop() + 'px');
+	},
+
+	init: function(qtip) {
+		var tooltip = qtip.tooltip,
+			scroll;
+
+		// Create the BGIFrame element if needed
+		if($('select, object').length < 1) {
+			this.bgiframe = qtip.elements.bgiframe = $(BGIFRAME).appendTo(tooltip);
+
+			// Update BGIFrame on tooltip move
+			qtip._bind(tooltip, 'tooltipmove', this.adjustBGIFrame, this._ns, this);
+		}
+
+		// redraw() container for width/height calculations
+		this.redrawContainer = $('<div/>', { id: NAMESPACE+'-rcontainer' })
+			.appendTo(document.body);
+
+		// Fixup modal plugin if present too
+		if( qtip.elements.overlay && qtip.elements.overlay.addClass('qtipmodal-ie6fix') ) {
+			qtip._bind(window, ['scroll', 'resize'], this._scroll, this._ns, this);
+			qtip._bind(tooltip, ['tooltipshow'], this._scroll, this._ns, this);
+		}
+
+		// Set dimensions
+		this.redraw();
+	},
+
+	adjustBGIFrame: function() {
+		var tooltip = this.qtip.tooltip,
+			dimensions = {
+				height: tooltip.outerHeight(FALSE),
+				width: tooltip.outerWidth(FALSE)
+			},
+			plugin = this.qtip.plugins.tip,
+			tip = this.qtip.elements.tip,
+			tipAdjust, offset;
+
+		// Adjust border offset
+		offset = parseInt(tooltip.css('borderLeftWidth'), 10) || 0;
+		offset = { left: -offset, top: -offset };
+
+		// Adjust for tips plugin
+		if(plugin && tip) {
+			tipAdjust = (plugin.corner.precedance === 'x') ? [WIDTH, LEFT] : [HEIGHT, TOP];
+			offset[ tipAdjust[1] ] -= tip[ tipAdjust[0] ]();
+		}
+
+		// Update bgiframe
+		this.bgiframe.css(offset).css(dimensions);
+	},
+
+	// Max/min width simulator function
+	redraw: function() {
+		if(this.qtip.rendered < 1 || this.drawing) { return this; }
+
+		var tooltip = this.qtip.tooltip,
+			style = this.qtip.options.style,
+			container = this.qtip.options.position.container,
+			perc, width, max, min;
+
+		// Set drawing flag
+		this.qtip.drawing = 1;
+
+		// If tooltip has a set height/width, just set it... like a boss!
+		if(style.height) { tooltip.css(HEIGHT, style.height); }
+		if(style.width) { tooltip.css(WIDTH, style.width); }
+
+		// Simulate max/min width if not set width present...
+		else {
+			// Reset width and add fluid class
+			tooltip.css(WIDTH, '').appendTo(this.redrawContainer);
+
+			// Grab our tooltip width (add 1 if odd so we don't get wrapping problems.. huzzah!)
+			width = tooltip.width();
+			if(width % 2 < 1) { width += 1; }
+
+			// Grab our max/min properties
+			max = tooltip.css('maxWidth') || '';
+			min = tooltip.css('minWidth') || '';
+
+			// Parse into proper pixel values
+			perc = (max + min).indexOf('%') > -1 ? container.width() / 100 : 0;
+		max = ((max.indexOf('%') > -1 ? perc : 1) * parseInt(max, 10)) || width;
+			min = ((min.indexOf('%') > -1 ? perc : 1) * parseInt(min, 10)) || 0;
+
+			// Determine new dimension size based on max/min/current values
+			width = max + min ? Math.min(Math.max(width, min), max) : width;
+
+			// Set the newly calculated width and remvoe fluid class
+			tooltip.css(WIDTH, Math.round(width)).appendTo(container);
+		}
+
+		// Set drawing flag
+		this.drawing = 0;
+
+		return this;
+	},
+
+	destroy: function() {
+		// Remove iframe
+		this.bgiframe && this.bgiframe.remove();
+
+		// Remove bound events
+		this.qtip._unbind([window, this.qtip.tooltip], this._ns);
+	}
+});
+
+IE6 = PLUGINS.ie6 = function(api) {
+	// Proceed only if the browser is IE6
+	return BROWSER.ie === 6 ? new Ie6(api) : FALSE;
+};
+
+IE6.initialize = 'render';
+
+CHECKS.ie6 = {
+	'^content|style$': function() {
+		this.redraw();
+	}
+};
+;}));
+}( window, document ));
\ No newline at end of file
diff --git a/config/plugins/visualizations/graphviz/static/js/tip_centerwindow.js b/config/plugins/visualizations/graphviz/static/js/tip_centerwindow.js
new file mode 100644
index 0000000..9c998d8
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/tip_centerwindow.js
@@ -0,0 +1,104 @@
+/*
+tip_centerwindow.js  v. 1.21
+
+The latest version is available at
+http://www.walterzorn.com
+or http://www.devira.com
+or http://www.walterzorn.de
+
+Initial author: Walter Zorn
+Last modified: 3.6.2008
+
+Extension for the tooltip library wz_tooltip.js.
+Centers a sticky tooltip in the window's visible clientarea,
+optionally even if the window is being scrolled or resized.
+*/
+
+// Make sure that the core file wz_tooltip.js is included first
+if(typeof config == "undefined")
+	alert("Error:\nThe core tooltip script file 'wz_tooltip.js' must be included first, before the plugin files!");
+
+// Here we define new global configuration variable(s) (as members of the
+// predefined "config." class).
+// From each of these config variables, wz_tooltip.js will automatically derive
+// a command which can be passed to Tip() or TagToTip() in order to customize
+// tooltips individually. These command names are just the config variable
+// name(s) translated to uppercase,
+// e.g. from config. CenterWindow a command CENTERWINDOW will automatically be
+// created.
+
+//===================  GLOBAL TOOLTIP CONFIGURATION  =========================//
+config. CenterWindow = false	// true or false - set to true if you want this to be the default behaviour
+config. CenterAlways = false	// true or false - recenter if window is resized or scrolled
+//=======  END OF TOOLTIP CONFIG, DO NOT CHANGE ANYTHING BELOW  ==============//
+
+
+// Create a new tt_Extension object (make sure that the name of that object,
+// here ctrwnd, is unique amongst the extensions available for
+// wz_tooltips.js):
+var ctrwnd = new tt_Extension();
+
+// Implement extension eventhandlers on which our extension should react
+ctrwnd.OnLoadConfig = function()
+{
+	if(tt_aV[CENTERWINDOW])
+	{
+		// Permit CENTERWINDOW only if the tooltip is sticky
+		if(tt_aV[STICKY])
+		{
+			if(tt_aV[CENTERALWAYS])
+			{
+				// IE doesn't support style.position "fixed"
+				if(tt_ie)
+					tt_AddEvtFnc(window, "scroll", Ctrwnd_DoCenter);
+				else
+					tt_aElt[0].style.position = "fixed";
+				tt_AddEvtFnc(window, "resize", Ctrwnd_DoCenter);
+			}
+			return true;
+		}
+		tt_aV[CENTERWINDOW] = false;
+	}
+	return false;
+};
+// We react on the first OnMouseMove event to center the tip on that occasion
+ctrwnd.OnMoveBefore = Ctrwnd_DoCenter;
+ctrwnd.OnKill = function()
+{
+	if(tt_aV[CENTERWINDOW] && tt_aV[CENTERALWAYS])
+	{
+		tt_RemEvtFnc(window, "resize", Ctrwnd_DoCenter);
+		if(tt_ie)
+			tt_RemEvtFnc(window, "scroll", Ctrwnd_DoCenter);
+		else
+			tt_aElt[0].style.position = "absolute";
+	}
+	return false;
+};
+// Helper function
+function Ctrwnd_DoCenter()
+{
+	if(tt_aV[CENTERWINDOW])
+	{
+		var x, y, dx, dy;
+
+		// Here we use some functions and variables (tt_w, tt_h) which the
+		// extension API of wz_tooltip.js provides for us
+		if(tt_ie || !tt_aV[CENTERALWAYS])
+		{
+			dx = tt_GetScrollX();
+			dy = tt_GetScrollY();
+		}
+		else
+		{
+			dx = 0;
+			dy = 0;
+		}
+		// Position the tip, offset from the center by OFFSETX and OFFSETY
+		x = (tt_GetClientW() - tt_w) / 2 + dx + tt_aV[OFFSETX];
+		y = (tt_GetClientH() - tt_h) / 2 + dy + tt_aV[OFFSETY];
+		tt_SetTipPos(x, y);
+		return true;
+	}
+	return false;
+}
diff --git a/config/plugins/visualizations/graphviz/static/js/tip_followscroll.js b/config/plugins/visualizations/graphviz/static/js/tip_followscroll.js
new file mode 100644
index 0000000..060d48b
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/tip_followscroll.js
@@ -0,0 +1,88 @@
+/*
+tip_followscroll.js	v. 1.11
+
+The latest version is available at
+http://www.walterzorn.com
+or http://www.devira.com
+or http://www.walterzorn.de
+
+Initial author: Walter Zorn
+Last modified: 3.6.2008
+
+Extension for the tooltip library wz_tooltip.js.
+Lets a "sticky" tooltip keep its position inside the clientarea if the window
+is scrolled.
+*/
+
+// Make sure that the core file wz_tooltip.js is included first
+if(typeof config == "undefined")
+	alert("Error:\nThe core tooltip script file 'wz_tooltip.js' must be included first, before the plugin files!");
+
+// Here we define new global configuration variable(s) (as members of the
+// predefined "config." class).
+// From each of these config variables, wz_tooltip.js will automatically derive
+// a command which can be passed to Tip() or TagToTip() in order to customize
+// tooltips individually. These command names are just the config variable
+// name(s) translated to uppercase,
+// e.g. from config. FollowScroll a command FOLLOWSCROLL will automatically be
+// created.
+
+//===================	GLOBAL TOOLTIP CONFIGURATION	======================//
+config. FollowScroll = false		// true or false - set to true if you want this to be the default behaviour
+//=======	END OF TOOLTIP CONFIG, DO NOT CHANGE ANYTHING BELOW	==============//
+
+
+// Create a new tt_Extension object (make sure that the name of that object,
+// here fscrl, is unique amongst the extensions available for
+// wz_tooltips.js):
+var fscrl = new tt_Extension();
+
+// Implement extension eventhandlers on which our extension should react
+fscrl.OnShow = function()
+{
+	if(tt_aV[FOLLOWSCROLL])
+	{
+		// Permit FOLLOWSCROLL only if the tooltip is sticky
+		if(tt_aV[STICKY])
+		{
+			var x = tt_x - tt_GetScrollX(), y = tt_y - tt_GetScrollY();
+
+			if(tt_ie)
+			{
+				fscrl.MoveOnScrl.offX = x;
+				fscrl.MoveOnScrl.offY = y;
+				fscrl.AddRemEvtFncs(tt_AddEvtFnc);
+			}
+			else
+			{
+				tt_SetTipPos(x, y);
+				tt_aElt[0].style.position = "fixed";
+			}
+			return true;
+		}
+		tt_aV[FOLLOWSCROLL] = false;
+	}
+	return false;
+};
+fscrl.OnHide = function()
+{
+	if(tt_aV[FOLLOWSCROLL])
+	{
+		if(tt_ie)
+			fscrl.AddRemEvtFncs(tt_RemEvtFnc);
+		else
+			tt_aElt[0].style.position = "absolute";
+	}
+};
+// Helper functions (encapsulate in the class to avoid conflicts with other
+// extensions)
+fscrl.MoveOnScrl = function()
+{
+	tt_SetTipPos(fscrl.MoveOnScrl.offX + tt_GetScrollX(), fscrl.MoveOnScrl.offY + tt_GetScrollY());
+};
+fscrl.AddRemEvtFncs = function(PAddRem)
+{
+	PAddRem(window, "resize", fscrl.MoveOnScrl);
+	PAddRem(window, "scroll", fscrl.MoveOnScrl);
+};
+
diff --git a/config/plugins/visualizations/graphviz/static/js/toolPanelFunctions.js b/config/plugins/visualizations/graphviz/static/js/toolPanelFunctions.js
new file mode 100644
index 0000000..1108c67
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/toolPanelFunctions.js
@@ -0,0 +1,609 @@
+$(function() { // on dom ready
+
+
+	///// show/hide Labels
+	$('#nodeLabelCheck').change(function() {
+		if ($(this).is(":checked")) {
+
+			showNodeLabel = 'data(id)'; //.replace(/[^0-9\.]+/g, "")';  // change to label or name if needed
+		} else {
+			showNodeLabel = "";
+		}
+
+		cy.style()
+			.selector('node')
+			.css({
+			'content': showNodeLabel
+		})
+			.update();
+	});
+
+	$('#linkLabelCheck').change(function() {
+		if ($(this).is(":checked")) {
+			showEdgeLabel = "data(id)"; // change to label or name if needed
+		} else {
+			showEdgeLabel = "";
+		}
+
+		cy.style()
+			.selector('edge')
+			.css({
+			'content': showEdgeLabel
+		})
+			.update();
+	});
+
+
+	//////// change the shape of the graph
+	$('#selectShape').change(function() {
+
+		$("select option:selected").each(function() {
+			shape = $(this).val();
+		});
+		cy.makeLayout({
+			'name': shape
+		})
+			.run();
+
+		$('#selectShape').blur();
+	});
+
+	////////  highlighting outgoing nodes
+
+	$('#showOutNode').change(function() {
+		if ($('#showOutNode').is(":checked")) {
+			showOut = true;
+			cy.nodes().on("tapend", highlightOut);
+
+		} else {
+			showOut = false;
+			cy.nodes().off("tapend", highlightOut);
+
+			resetHighlightOut(showIn, showOut);
+		}
+	});
+
+
+	////////  highlighting incoming nodes
+
+	$('#showInNode').change(function() {
+
+		if ($('#showInNode').is(":checked")) {
+			showIn = true;
+			cy.nodes().on("tapend", highlightIn);
+
+
+		} else {
+			showIn = false;
+			cy.nodes().off("tapend", highlightIn);
+			resetHighlightIn(showIn, showOut);
+
+			cy.nodes().removeClass('connectedNodeIn');
+		}
+
+	});
+
+	//// show number of contained nodes for a collapsed node
+	$('#collapseCount').change(function() {
+		checkBoxes();
+	});
+
+	////expand(Load) nodes
+	$('#expandNode').click(function() {
+		var selectedNode = cy.nodes(':selected');
+
+		if (selectedNode.hasClass('superNode')) {
+			unColNode();
+		} else {
+			expandNodes(selectedNode);
+			$('.btn.expNode').prop('disabled', true);
+		}
+
+	});
+
+	//// disable/unable buttons
+	$(document).on('click', function() {
+
+		var selectedNode = cy.nodes(':selected');
+
+		if (selectedNode.outgoers().length === 0) {
+
+			$('.btn.colNode').prop('disabled', true);
+
+
+		} else if (selectedNode.hasClass('superNode')) {
+
+			$('.btn.colNode').prop('disabled', true);
+
+		} else {
+
+			$('.btn.colNode').prop('disabled', false);
+
+		}
+
+		if (selectedNode.hasClass('toBeExpaned') || selectedNode.hasClass('superNode')) {
+
+			$('.btn.expNode').prop('disabled', false);
+
+		} else {
+
+			$('.btn.expNode').prop('disabled', true);
+
+		}
+
+		if (cy.nodes(":selected").length > 0) {
+
+			$('.btn.delNode').prop('disabled', false);
+		} else {
+
+			$('.btn.delNode').prop('disabled', true);
+		}
+
+
+	});
+
+
+	///// keyboard commands
+
+	$(document).keypress(function(e) {
+
+		var selectedNode = cy.nodes(':selected');
+
+		if (e.which === 101 && selectedNode.size() !== 0) { // 101 for 'e' = expand
+
+			if (selectedNode.hasClass('toBeExpaned')) {
+
+				expandNodes(selectedNode);
+
+			} else if (selectedNode.hasClass('superNode')) {
+
+				unColNode();
+			}
+
+
+		}
+		if (e.which === 99 && selectedNode.size() !== 0 && !selectedNode.hasClass('superNode') && selectedNode.outgoers().length !== 0) { // 99 for 'c' = collapse
+
+			colNode();
+
+		}
+
+		if (e.which === 100 && selectedNode.size() !== 0) { // 100 for 'd' = delete
+			deleteSelectedNodes();
+		}
+
+
+	});
+
+	/////////////// tool Panel movement
+
+	$(document).on('click', '.slider-arrow.show', function() {
+		$(".slider-arrow, .panel").animate({
+			left: "+=243"
+		}, 700, function() {
+			// Animation complete.
+		});
+		$(this).html('«').removeClass('show').addClass('hide');
+
+		document.getElementById('cy').style.left = "243px";
+		//document.getElementById('cy').style.width="80%";
+
+		cy.resize();
+
+
+	});
+
+	$(document).on('click', '.slider-arrow.hide', function() {
+		$(".slider-arrow, .panel").animate({
+			left: "-=243"
+		}, 700, function() {
+			// Animation complete.
+		});
+		$(this).html('»').removeClass('hide').addClass('show');
+		document.getElementById('cy').style.left = "0px";
+		// document.getElementById('cy').style.width="100%";
+		cy.resize();
+	});
+
+	//////////////////panel for Node info
+
+	$(document).on('click', '.slider-arrow-forNode.show', function() {
+		$(".slider-arrow-forNode, .nodePanel").animate({
+			right: "+=248"
+		}, 700, function() {
+			// Animation complete.
+		});
+		$(this).html('»').removeClass('show').addClass('hide');
+
+		document.getElementById('cy').style.right = "248px";
+		//document.getElementById('cy').style.width="80%";
+
+		cy.resize();
+
+
+	});
+
+	$(document).on('click', '.slider-arrow-forNode.hide', function() {
+		$(".slider-arrow-forNode, .nodePanel").animate({
+			right: "-=248"
+		}, 700, function() {
+			// Animation complete.
+		});
+		$(this).html('«').removeClass('hide').addClass('show');
+		document.getElementById('cy').style.right = "0px";
+		// document.getElementById('cy').style.width="100%";
+		cy.resize();
+	});
+}); // on dom ready END
+
+///////// finding outgoing Nodes
+function highlightOut() {
+
+	var selectedNode = this;
+
+	var connectedNodes = selectedNode.outgoers();
+
+	if (selectedNode.hasClass('selectedNodeOut')) {
+
+		connectedNodes.removeClass('connectedNodeOut');
+		selectedNode.removeClass('selectedNodeOut');
+
+		cy.style()
+			.selector('.connectedNodeOut')
+			.css({
+			'opacity': 0.8
+		})
+			.update();
+	} else {
+
+		cy.nodes().not(this).removeClass('connectedNodeOut');
+		cy.nodes().not(this).removeClass('selectedNodeOut');
+		cy.edges().not(this).removeClass('connectedNodeOut');
+
+		connectedNodes.addClass('connectedNodeOut');
+		selectedNode.addClass('selectedNodeOut');
+		cy.style()
+			.selector('.connectedNodeOut')
+			.css({
+			'background-color': '#FE2E64',
+			'line-color': '#FE2E64',
+			'target-arrow-color': '#FE2E64',
+			'source-arrow-color': '#FE2E64',
+			'opacity': 0.8
+		})
+			.update();
+
+		cy.style()
+			.selector('.selectedNodeOut')
+			.css({
+			'background-color': '#FE2E64',
+			'line-color': '#FE2E64',
+			'target-arrow-color': '#FE2E64',
+			'source-arrow-color': '#FE2E64',
+			'opacity': 0.8
+		})
+			.update();
+	}
+}
+
+/////// reset highlighting of outgoing nodes
+function resetHighlightOut(showIn, showOut) {
+
+	cy.style()
+		.selector('.connectedNodeOut')
+		.css({
+		'opacity': 0.8,
+		'background-color': '#888888',
+		'line-color': '#ddd',
+		'target-arrow-color': '#ddd'
+	})
+		.update();
+
+
+	if (!showIn && !showOut) {
+		cy.style()
+			.selector('.selectedNodeOut')
+			.css({
+			'opacity': 0.8,
+			'background-color': '#888888', //
+			'border-width': 0
+		})
+			.update();
+
+	}
+	cy.nodes().removeClass('selectedNodeOut');
+	cy.nodes().removeClass('connectedNodeOut');
+	cy.edges().removeClass('connectedNodeOut');
+}
+
+//////////// find inComing nodes
+
+function highlightIn() {
+
+	var selectedNode = this;
+	var connectedNodes = selectedNode.incomers();
+
+	if (selectedNode.hasClass('selectedNodeIn')) {
+
+		cy.style()
+			.selector('.connectedNodeIn')
+			.css({
+			'opacity': 0.8
+		})
+			.update();
+		connectedNodes.removeClass('connectedNodeIn');
+		selectedNode.removeClass('selectedNodeIn');
+
+	} else {
+
+		cy.nodes().not(this).removeClass('connectedNodeIn');
+		cy.nodes().not(this).removeClass('selectedNodeIn');
+		cy.edges().not(this).removeClass('connectedNodeIn');
+
+		connectedNodes.addClass('connectedNodeIn');
+		selectedNode.addClass('selectedNodeIn');
+		cy.style()
+			.selector('.connectedNodeIn')
+			.css({
+			'background-color': '#FE2E64',
+			'line-color': '#FE2E64',
+			'target-arrow-color': '#FE2E64',
+			'source-arrow-color': '#FE2E64',
+			'opacity': 0.8
+		})
+			.update();
+		cy.style()
+			.selector('.selectedNodeIn')
+			.css({
+			'background-color': '#FE2E64',
+			'line-color': '#FE2E64',
+			'target-arrow-color': '#FE2E64',
+			'source-arrow-color': '#FE2E64',
+			'opacity': 0.8
+		})
+			.update();
+	}
+}
+
+
+/////// reset highlighting of incoming nodes
+function resetHighlightIn(showIn, showOut) {
+
+	cy.style()
+		.selector('.connectedNodeIn')
+		.css({
+		'opacity': 0.8,
+		'background-color': '#888888',
+		'line-color': '#ddd',
+		'target-arrow-color': '#ddd'
+	})
+		.update();
+
+	if (!showIn && !showOut) {
+		cy.style()
+			.selector('.selectedNodeIn')
+			.css({
+			'opacity': 0.8,
+			'background-color': '#888888', //
+			'border-width': 0
+		})
+			.update();
+	}
+
+	cy.nodes().removeClass('selectedNodeIn');
+	cy.nodes().removeClass('connectedNodeIn');
+	cy.edges().removeClass('connectedNodeIn');
+
+}
+
+
+////// export PNG
+function exportFunction() {
+
+	var pngPic = cy.png();
+	downloadURI(pngPic, "graph"); //// tested on Chrome and Firefox
+}
+
+function downloadURI(uri, fname) {
+
+	var link = document.createElement("a");
+	link.download = fname;
+
+	link.href = uri;
+	document.body.appendChild(link);
+	link.click();
+	// Cleanup the DOM
+	document.body.removeChild(link);
+}
+
+///// restore Graph structure
+function restorGraphStructure() {
+
+	resetCollapse();
+
+	var shape = "";
+	$("select option:selected").each(function() {
+		shape = $(this).val();
+	});
+	cy.makeLayout({
+		'name': shape
+	})
+		.run();
+
+}
+
+// show node info on the right panel
+function showNodeInfo(node) {
+
+	var str = "";
+	var nodeContent = node.data();
+
+	var fieldName = [];
+	var i = 0;
+	var values = [];
+	var j = 0;
+
+	for (var key in nodeContent) {
+		if (key == 'group' || key == 'data') {
+			continue;
+		}
+
+		fieldName[i] = key;
+		i++;
+		values[j] = nodeContent[key];
+		j++;
+	}
+
+	var childNodes = node.outgoers().nodes();
+	var childNum = childNodes.length;
+
+	var parentNodes = node.incomers().nodes();
+	var parentNum = parentNodes.length;
+	var degree = node.degree();
+
+	var table = createTable(fieldName, values, degree, parentNum, childNum);
+
+	$('#nodeInfoDiv').html("<p><strong>Node Description </p>" + table);
+
+	for (var strIndex = 0; strIndex < fieldName.length; strIndex++) {
+
+		if (isValidUrl(values[strIndex])) {
+			var e = document.createElement("div");
+			e.id = "linkDiv";
+			$('#nodeInfoDiv').append(e);
+
+			var url = values[i];
+			document.getElementById("linkDiv").innerHTML += "<br>" + fieldName[i] + "<br>";
+			$('<iframe id="iframeId" width = "240"/>').appendTo(document.getElementById("linkDiv")).prop('src', url);
+
+			i++;
+		}
+	}
+}
+
+function createTable(fieldName, values, degree, parentNum, childNum) {
+	var table = "<table class='CSSTableGenerator'>";
+	for (var i = 0; i < fieldName.length; i++) {
+		if (isValidUrl(values[i])) {
+			i++;
+		}
+		table += "<tr>" + "<td>" + fieldName[i] + "</td>" + "<td>" + values[i] + "</td>" + "</tr>";
+	}
+	table += "<tr>" + "<td>" + "degree" + "</td>" + "<td>" + degree + "</td>" + "</tr>";
+	table += "<tr>" + "<td>" + "number of child nodes" + "</td>" + "<td>" + childNum + "</td>" + "</tr>";
+	table += "<tr>" + "<td>" + "number of parent nodes" + "</td>" + "<td>" + parentNum + "</td>" + "</tr>";
+	table += "</table>";
+	return table;
+}
+
+function isValidUrl(str) {
+	var pattern = new RegExp(/((([A-Za-z]{3,9}:(?:\/\/)?)(?:[\-;:&=\+\$,\w]+@)?[A-Za-z0-9\.\-]+|(?:www\.|[\-;:&=\+\$,\w]+@)[A-Za-z0-9\.\-]+)((?:\/[\+~%\/\.\w\-_]*)?\??(?:[\-\+=&;%@\.\w_]*)#?(?:[\.\!\/\\\w]*))?)/); // fragment locater
+	return pattern.test(str);
+}
+
+
+
+/////// delete selected nodes
+
+var nodesToRemove;
+var edgesToRemove;
+
+function deleteSelectedNodes() {
+
+	nodesToRemove = cy.nodes(':selected');
+	edgesToRemove = nodesToRemove.connectedEdges();
+
+	cy.remove(nodesToRemove);
+	cy.remove(edgesToRemove);
+
+}
+/////// restore Deleted Nodes
+function restoreDeletedNodes() {
+	nodesToRemove.restore();
+	edgesToRemove.restore();
+}
+
+// expand(load) nodes
+function expandNodes(selectedNode) {
+
+	cy.nodes().unbind("tapend");
+	var selectedNodeId = selectedNode.id();
+	selectedNodeId = selectedNodeId.replace(/[^0-9\.]+/g, ""); // removing ""
+
+	var eles = allcy.nodes();
+
+	nodesToAdd = eles[selectedNodeId].outgoers();
+
+	showNodesToExpand(nodesToAdd);
+	cy.add(eles[selectedNodeId].outgoers());
+
+	selectedNode.removeClass('toBeExpaned');
+
+	$("select option:selected").each(function() {
+		shape = $(this).val();
+	});
+
+	cy.layout({
+		name: shape
+	});
+
+	checkBoxes();
+	cy.nodes().on("tapend", function(e) {
+
+		showNodeInfo(e.cyTarget);
+
+	});
+}
+
+function showNodesToExpand(toAdd) {
+
+	toAdd.nodes().forEach(function(ele) {
+
+		if (ele.outdegree() > 0 && !ele.hasClass('roots')) {
+			ele.addClass('toBeExpaned');
+		}
+	});
+
+}
+
+
+// checkBox options
+
+function checkBoxes() {
+
+
+	if ($('#showInNode').is(":checked")) {
+		showIn = true;
+		cy.nodes().on("tapend", highlightIn);
+
+	} else {
+		showIn = false;
+		cy.nodes().off("tapend", highlightIn);
+		resetHighlightIn(showIn, showOut);
+		cy.nodes().removeClass('connectedNodeIn');
+	}
+
+	if ($('#showOutNode').is(":checked")) {
+		showOut = true;
+		cy.nodes().on("tapend", highlightOut);
+	} else {
+		showOut = false;
+		cy.nodes().off("tapend", highlightOut);
+		resetHighlightOut(showIn, showOut);
+	}
+
+	if ($('#collapseCount').is(":checked")) {
+
+		cy.nodes().on("mouseover", function(event) {
+			var nd = event.cyTarget;
+			countCollapse(nd);
+		});
+		cy.nodes().on("mouseout", function(event) {
+			var nd = event.cyTarget;
+			UnTip();
+		});
+	} else {
+		cy.nodes().off("mouseover");
+	}
+
+}
\ No newline at end of file
diff --git a/config/plugins/visualizations/graphviz/static/js/wz_tooltip.js b/config/plugins/visualizations/graphviz/static/js/wz_tooltip.js
new file mode 100644
index 0000000..01f55f2
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/static/js/wz_tooltip.js
@@ -0,0 +1,1301 @@
+/* This notice must be untouched at all times.
+Copyright (c) 2002-2008 Walter Zorn. All rights reserved.
+
+wz_tooltip.js	 v. 5.31
+
+The latest version is available at
+http://www.walterzorn.com
+or http://www.devira.com
+or http://www.walterzorn.de
+
+Created 1.12.2002 by Walter Zorn (Web: http://www.walterzorn.com )
+Last modified: 7.11.2008
+
+Easy-to-use cross-browser tooltips.
+Just include the script at the beginning of the <body> section, and invoke
+Tip('Tooltip text') to show and UnTip() to hide the tooltip, from the desired
+HTML eventhandlers. Example:
+<a onmouseover="Tip('Some text')" onmouseout="UnTip()" href="index.htm">My home page</a>
+No container DIV required.
+By default, width and height of tooltips are automatically adapted to content.
+Is even capable of dynamically converting arbitrary HTML elements to tooltips
+by calling TagToTip('ID_of_HTML_element_to_be_converted') instead of Tip(),
+which means you can put important, search-engine-relevant stuff into tooltips.
+Appearance & behaviour of tooltips can be individually configured
+via commands passed to Tip() or TagToTip().
+
+Tab Width: 4
+LICENSE: LGPL
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License (LGPL) as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+For more details on the GNU Lesser General Public License,
+see http://www.gnu.org/copyleft/lesser.html
+*/
+
+var config = new Object();
+
+
+//===================  GLOBAL TOOLTIP CONFIGURATION  =========================//
+var tt_Debug	= true		// false or true - recommended: false once you release your page to the public
+var tt_Enabled	= true		// Allows to (temporarily) suppress tooltips, e.g. by providing the user with a button that sets this global variable to false
+var TagsToTip	= true		// false or true - if true, HTML elements to be converted to tooltips via TagToTip() are automatically hidden;
+							// if false, you should hide those HTML elements yourself
+
+// For each of the following config variables there exists a command, which is
+// just the variablename in uppercase, to be passed to Tip() or TagToTip() to
+// configure tooltips individually. Individual commands override global
+// configuration. Order of commands is arbitrary.
+// Example: onmouseover="Tip('Tooltip text', LEFT, true, BGCOLOR, '#FF9900', FADEIN, 400)"
+
+config. Above			= false		// false or true - tooltip above mousepointer
+config. BgColor			= '#E2E7FF'	// Background colour (HTML colour value, in quotes)
+config. BgImg			= ''		// Path to background image, none if empty string ''
+config. BorderColor		= '#003099'
+config. BorderStyle		= 'solid'	// Any permitted CSS value, but I recommend 'solid', 'dotted' or 'dashed'
+config. BorderWidth		= 1
+config. CenterMouse		= false		// false or true - center the tip horizontally below (or above) the mousepointer
+config. ClickClose		= false		// false or true - close tooltip if the user clicks somewhere
+config. ClickSticky		= false		// false or true - make tooltip sticky if user left-clicks on the hovered element while the tooltip is active
+config. CloseBtn		= false		// false or true - closebutton in titlebar
+config. CloseBtnColors	= ['#990000', '#FFFFFF', '#DD3333', '#FFFFFF']	// [Background, text, hovered background, hovered text] - use empty strings '' to inherit title colours
+config. CloseBtnText	= ' X '	// Close button text (may also be an image tag)
+config. CopyContent		= true		// When converting a HTML element to a tooltip, copy only the element's content, rather than converting the element by its own
+config. Delay			= 400		// Time span in ms until tooltip shows up
+config. Duration		= 0			// Time span in ms after which the tooltip disappears; 0 for infinite duration, < 0 for delay in ms _after_ the onmouseout until the tooltip disappears
+config. Exclusive		= false		// false or true - no other tooltip can appear until the current one has actively been closed
+config. FadeIn			= 100		// Fade-in duration in ms, e.g. 400; 0 for no animation
+config. FadeOut			= 100
+config. FadeInterval	= 30		// Duration of each fade step in ms (recommended: 30) - shorter is smoother but causes more CPU-load
+config. Fix				= null		// Fixated position, two modes. Mode 1: x- an y-coordinates in brackets, e.g. [210, 480]. Mode 2: Show tooltip at a position related to an HTML element: [ID of HTML element, x-offset, y-offset from HTML element], e.g. ['SomeID', 10, 30]. Value null (default) for no fixated positioning.
+config. FollowMouse		= true		// false or true - tooltip follows the mouse
+config. FontColor		= '#000044'
+config. FontFace		= 'Verdana,Geneva,sans-serif'
+config. FontSize		= '8pt'		// E.g. '9pt' or '12px' - unit is mandatory
+config. FontWeight		= 'normal'	// 'normal' or 'bold';
+config. Height			= 0			// Tooltip height; 0 for automatic adaption to tooltip content, < 0 (e.g. -100) for a maximum for automatic adaption
+config. JumpHorz		= false		// false or true - jump horizontally to other side of mouse if tooltip would extend past clientarea boundary
+config. JumpVert		= true		// false or true - jump vertically		"
+config. Left			= false		// false or true - tooltip on the left of the mouse
+config. OffsetX			= 14		// Horizontal offset of left-top corner from mousepointer
+config. OffsetY			= 8			// Vertical offset
+config. Opacity			= 100		// Integer between 0 and 100 - opacity of tooltip in percent
+config. Padding			= 3			// Spacing between border and content
+config. Shadow			= false		// false or true
+config. ShadowColor		= '#C0C0C0'
+config. ShadowWidth		= 5
+config. Sticky			= false		// false or true - fixate tip, ie. don't follow the mouse and don't hide on mouseout
+config. TextAlign		= 'left'	// 'left', 'right' or 'justify'
+config. Title			= ''		// Default title text applied to all tips (no default title: empty string '')
+config. TitleAlign		= 'left'	// 'left' or 'right' - text alignment inside the title bar
+config. TitleBgColor	= ''		// If empty string '', BorderColor will be used
+config. TitleFontColor	= '#FFFFFF'	// Color of title text - if '', BgColor (of tooltip body) will be used
+config. TitleFontFace	= ''		// If '' use FontFace (boldified)
+config. TitleFontSize	= ''		// If '' use FontSize
+config. TitlePadding	= 2
+config. Width			= 0			// Tooltip width; 0 for automatic adaption to tooltip content; < -1 (e.g. -240) for a maximum width for that automatic adaption;
+									// -1: tooltip width confined to the width required for the titlebar
+//=======  END OF TOOLTIP CONFIG, DO NOT CHANGE ANYTHING BELOW  ==============//
+
+
+
+
+//=====================  PUBLIC  =============================================//
+function Tip()
+{
+	tt_Tip(arguments, null);
+}
+function TagToTip()
+{
+	var t2t = tt_GetElt(arguments[0]);
+	if(t2t)
+		tt_Tip(arguments, t2t);
+}
+function UnTip()
+{
+	tt_OpReHref();
+	if(tt_aV[DURATION] < 0 && (tt_iState & 0x2))
+		tt_tDurt.Timer("tt_HideInit()", -tt_aV[DURATION], true);
+	else if(!(tt_aV[STICKY] && (tt_iState & 0x2)))
+		tt_HideInit();
+}
+
+//==================  PUBLIC PLUGIN API	 =====================================//
+// Extension eventhandlers currently supported:
+// OnLoadConfig, OnCreateContentString, OnSubDivsCreated, OnShow, OnMoveBefore,
+// OnMoveAfter, OnHideInit, OnHide, OnKill
+
+var tt_aElt = new Array(10), // Container DIV, outer title & body DIVs, inner title & body TDs, closebutton SPAN, shadow DIVs, and IFRAME to cover windowed elements in IE
+tt_aV = new Array(),	// Caches and enumerates config data for currently active tooltip
+tt_sContent,			// Inner tooltip text or HTML
+tt_t2t, tt_t2tDad,		// Tag converted to tip, and its DOM parent element
+tt_musX, tt_musY,
+tt_over,
+tt_x, tt_y, tt_w, tt_h; // Position, width and height of currently displayed tooltip
+
+function tt_Extension()
+{
+	tt_ExtCmdEnum();
+	tt_aExt[tt_aExt.length] = this;
+	return this;
+}
+function tt_SetTipPos(x, y)
+{
+	var css = tt_aElt[0].style;
+
+	tt_x = x;
+	tt_y = y;
+	css.left = x + "px";
+	css.top = y + "px";
+	if(tt_ie56)
+	{
+		var ifrm = tt_aElt[tt_aElt.length - 1];
+		if(ifrm)
+		{
+			ifrm.style.left = css.left;
+			ifrm.style.top = css.top;
+		}
+	}
+}
+function tt_HideInit()
+{
+	if(tt_iState)
+	{
+		tt_ExtCallFncs(0, "HideInit");
+		tt_iState &= ~(0x4 | 0x8);
+		if(tt_flagOpa && tt_aV[FADEOUT])
+		{
+			tt_tFade.EndTimer();
+			if(tt_opa)
+			{
+				var n = Math.round(tt_aV[FADEOUT] / (tt_aV[FADEINTERVAL] * (tt_aV[OPACITY] / tt_opa)));
+				tt_Fade(tt_opa, tt_opa, 0, n);
+				return;
+			}
+		}
+		tt_tHide.Timer("tt_Hide();", 1, false);
+	}
+}
+function tt_Hide()
+{
+	if(tt_db && tt_iState)
+	{
+		tt_OpReHref();
+		if(tt_iState & 0x2)
+		{
+			tt_aElt[0].style.visibility = "hidden";
+			tt_ExtCallFncs(0, "Hide");
+		}
+		tt_tShow.EndTimer();
+		tt_tHide.EndTimer();
+		tt_tDurt.EndTimer();
+		tt_tFade.EndTimer();
+		if(!tt_op && !tt_ie)
+		{
+			tt_tWaitMov.EndTimer();
+			tt_bWait = false;
+		}
+		if(tt_aV[CLICKCLOSE] || tt_aV[CLICKSTICKY])
+			tt_RemEvtFnc(document, "mouseup", tt_OnLClick);
+		tt_ExtCallFncs(0, "Kill");
+		// In case of a TagToTip tip, hide converted DOM node and
+		// re-insert it into DOM
+		if(tt_t2t && !tt_aV[COPYCONTENT])
+			tt_UnEl2Tip();
+		tt_iState = 0;
+		tt_over = null;
+		tt_ResetMainDiv();
+		if(tt_aElt[tt_aElt.length - 1])
+			tt_aElt[tt_aElt.length - 1].style.display = "none";
+	}
+}
+function tt_GetElt(id)
+{
+	return(document.getElementById ? document.getElementById(id)
+			: document.all ? document.all[id]
+			: null);
+}
+function tt_GetDivW(el)
+{
+	return(el ? (el.offsetWidth || el.style.pixelWidth || 0) : 0);
+}
+function tt_GetDivH(el)
+{
+	return(el ? (el.offsetHeight || el.style.pixelHeight || 0) : 0);
+}
+function tt_GetScrollX()
+{
+	return(window.pageXOffset || (tt_db ? (tt_db.scrollLeft || 0) : 0));
+}
+function tt_GetScrollY()
+{
+	return(window.pageYOffset || (tt_db ? (tt_db.scrollTop || 0) : 0));
+}
+function tt_GetClientW()
+{
+	return tt_GetWndCliSiz("Width");
+}
+function tt_GetClientH()
+{
+	return tt_GetWndCliSiz("Height");
+}
+function tt_GetEvtX(e)
+{
+	return (e ? ((typeof(e.pageX) != tt_u) ? e.pageX : (e.clientX + tt_GetScrollX())) : 0);
+}
+function tt_GetEvtY(e)
+{
+	return (e ? ((typeof(e.pageY) != tt_u) ? e.pageY : (e.clientY + tt_GetScrollY())) : 0);
+}
+function tt_AddEvtFnc(el, sEvt, PFnc)
+{
+	if(el)
+	{
+		if(el.addEventListener)
+			el.addEventListener(sEvt, PFnc, false);
+		else
+			el.attachEvent("on" + sEvt, PFnc);
+	}
+}
+function tt_RemEvtFnc(el, sEvt, PFnc)
+{
+	if(el)
+	{
+		if(el.removeEventListener)
+			el.removeEventListener(sEvt, PFnc, false);
+		else
+			el.detachEvent("on" + sEvt, PFnc);
+	}
+}
+function tt_GetDad(el)
+{
+	return(el.parentNode || el.parentElement || el.offsetParent);
+}
+function tt_MovDomNode(el, dadFrom, dadTo)
+{
+	if(dadFrom)
+		dadFrom.removeChild(el);
+	if(dadTo)
+		dadTo.appendChild(el);
+}
+
+//======================  PRIVATE  ===========================================//
+var tt_aExt = new Array(),	// Array of extension objects
+
+tt_db, tt_op, tt_ie, tt_ie56, tt_bBoxOld,	// Browser flags
+tt_body,
+tt_ovr_,				// HTML element the mouse is currently over
+tt_flagOpa,				// Opacity support: 1=IE, 2=Khtml, 3=KHTML, 4=Moz, 5=W3C
+tt_maxPosX, tt_maxPosY,
+tt_iState = 0,			// Tooltip active |= 1, shown |= 2, move with mouse |= 4, exclusive |= 8
+tt_opa,					// Currently applied opacity
+tt_bJmpVert, tt_bJmpHorz,// Tip temporarily on other side of mouse
+tt_elDeHref,			// The tag from which we've removed the href attribute
+// Timer
+tt_tShow = new Number(0), tt_tHide = new Number(0), tt_tDurt = new Number(0),
+tt_tFade = new Number(0), tt_tWaitMov = new Number(0),
+tt_bWait = false,
+tt_u = "undefined";
+
+
+function tt_Init()
+{
+	tt_MkCmdEnum();
+	// Send old browsers instantly to hell
+	if(!tt_Browser() || !tt_MkMainDiv())
+		return;
+	tt_IsW3cBox();
+	tt_OpaSupport();
+	tt_AddEvtFnc(document, "mousemove", tt_Move);
+	// In Debug mode we search for TagToTip() calls in order to notify
+	// the user if they've forgotten to set the TagsToTip config flag
+	if(TagsToTip || tt_Debug)
+		tt_SetOnloadFnc();
+	// Ensure the tip be hidden when the page unloads
+	tt_AddEvtFnc(window, "unload", tt_Hide);
+}
+// Creates command names by translating config variable names to upper case
+function tt_MkCmdEnum()
+{
+	var n = 0;
+	for(var i in config)
+		eval("window." + i.toString().toUpperCase() + " = " + n++);
+	tt_aV.length = n;
+}
+function tt_Browser()
+{
+	var n, nv, n6, w3c;
+
+	n = navigator.userAgent.toLowerCase(),
+	nv = navigator.appVersion;
+	tt_op = (document.defaultView && typeof(eval("w" + "indow" + "." + "o" + "p" + "er" + "a")) != tt_u);
+	tt_ie = n.indexOf("msie") != -1 && document.all && !tt_op;
+	if(tt_ie)
+	{
+		var ieOld = (!document.compatMode || document.compatMode == "BackCompat");
+		tt_db = !ieOld ? document.documentElement : (document.body || null);
+		if(tt_db)
+			tt_ie56 = parseFloat(nv.substring(nv.indexOf("MSIE") + 5)) >= 5.5
+					&& typeof document.body.style.maxHeight == tt_u;
+	}
+	else
+	{
+		tt_db = document.documentElement || document.body ||
+				(document.getElementsByTagName ? document.getElementsByTagName("body")[0]
+				: null);
+		if(!tt_op)
+		{
+			n6 = document.defaultView && typeof document.defaultView.getComputedStyle != tt_u;
+			w3c = !n6 && document.getElementById;
+		}
+	}
+	tt_body = (document.getElementsByTagName ? document.getElementsByTagName("body")[0]
+				: (document.body || null));
+	if(tt_ie || n6 || tt_op || w3c)
+	{
+		if(tt_body && tt_db)
+		{
+			if(document.attachEvent || document.addEventListener)
+				return true;
+		}
+		else
+			tt_Err("wz_tooltip.js must be included INSIDE the body section,"
+					+ " immediately after the opening <body> tag.", false);
+	}
+	tt_db = null;
+	return false;
+}
+function tt_MkMainDiv()
+{
+	// Create the tooltip DIV
+	if(tt_body.insertAdjacentHTML)
+		tt_body.insertAdjacentHTML("afterBegin", tt_MkMainDivHtm());
+	else if(typeof tt_body.innerHTML != tt_u && document.createElement && tt_body.appendChild)
+		tt_body.appendChild(tt_MkMainDivDom());
+	if(window.tt_GetMainDivRefs /* FireFox Alzheimer */ && tt_GetMainDivRefs())
+		return true;
+	tt_db = null;
+	return false;
+}
+function tt_MkMainDivHtm()
+{
+	return(
+		'<div id="WzTtDiV"></div>' +
+		(tt_ie56 ? ('<iframe id="WzTtIfRm" src="javascript:false" scrolling="no" frameborder="0" style="filter:Alpha(opacity=0);position:absolute;top:0px;left:0px;display:none;"></iframe>')
+		: '')
+	);
+}
+function tt_MkMainDivDom()
+{
+	var el = document.createElement("div");
+	if(el)
+		el.id = "WzTtDiV";
+	return el;
+}
+function tt_GetMainDivRefs()
+{
+	tt_aElt[0] = tt_GetElt("WzTtDiV");
+	if(tt_ie56 && tt_aElt[0])
+	{
+		tt_aElt[tt_aElt.length - 1] = tt_GetElt("WzTtIfRm");
+		if(!tt_aElt[tt_aElt.length - 1])
+			tt_aElt[0] = null;
+	}
+	if(tt_aElt[0])
+	{
+		var css = tt_aElt[0].style;
+
+		css.visibility = "hidden";
+		css.position = "absolute";
+		css.overflow = "hidden";
+		return true;
+	}
+	return false;
+}
+function tt_ResetMainDiv()
+{
+	tt_SetTipPos(0, 0);
+	tt_aElt[0].innerHTML = "";
+	tt_aElt[0].style.width = "0px";
+	tt_h = 0;
+}
+function tt_IsW3cBox()
+{
+	var css = tt_aElt[0].style;
+
+	css.padding = "10px";
+	css.width = "40px";
+	tt_bBoxOld = (tt_GetDivW(tt_aElt[0]) == 40);
+	css.padding = "0px";
+	tt_ResetMainDiv();
+}
+function tt_OpaSupport()
+{
+	var css = tt_body.style;
+
+	tt_flagOpa = (typeof(css.KhtmlOpacity) != tt_u) ? 2
+				: (typeof(css.KHTMLOpacity) != tt_u) ? 3
+				: (typeof(css.MozOpacity) != tt_u) ? 4
+				: (typeof(css.opacity) != tt_u) ? 5
+				: (typeof(css.filter) != tt_u) ? 1
+				: 0;
+}
+// Ported from http://dean.edwards.name/weblog/2006/06/again/
+// (Dean Edwards et al.)
+function tt_SetOnloadFnc()
+{
+	tt_AddEvtFnc(document, "DOMContentLoaded", tt_HideSrcTags);
+	tt_AddEvtFnc(window, "load", tt_HideSrcTags);
+	if(tt_body.attachEvent)
+		tt_body.attachEvent("onreadystatechange",
+			function() {
+				if(tt_body.readyState == "complete")
+					tt_HideSrcTags();
+			} );
+	if(/WebKit|KHTML/i.test(navigator.userAgent))
+	{
+		var t = setInterval(function() {
+					if(/loaded|complete/.test(document.readyState))
+					{
+						clearInterval(t);
+						tt_HideSrcTags();
+					}
+				}, 10);
+	}
+}
+function tt_HideSrcTags()
+{
+	if(!window.tt_HideSrcTags || window.tt_HideSrcTags.done)
+		return;
+	window.tt_HideSrcTags.done = true;
+	if(!tt_HideSrcTagsRecurs(tt_body))
+		tt_Err("There are HTML elements to be converted to tooltips.\nIf you"
+				+ " want these HTML elements to be automatically hidden, you"
+				+ " must edit wz_tooltip.js, and set TagsToTip in the global"
+				+ " tooltip configuration to true.", true);
+}
+function tt_HideSrcTagsRecurs(dad)
+{
+	var ovr, asT2t;
+	// Walk the DOM tree for tags that have an onmouseover or onclick attribute
+	// containing a TagToTip('...') call.
+	// (.childNodes first since .children is bugous in Safari)
+	var a = dad.childNodes || dad.children || null;
+
+	for(var i = a ? a.length : 0; i;)
+	{--i;
+		if(!tt_HideSrcTagsRecurs(a[i]))
+			return false;
+		ovr = a[i].getAttribute ? (a[i].getAttribute("onmouseover") || a[i].getAttribute("onclick"))
+				: (typeof a[i].onmouseover == "function") ? (a[i].onmouseover || a[i].onclick)
+				: null;
+		if(ovr)
+		{
+			asT2t = ovr.toString().match(/TagToTip\s*\(\s*'[^'.]+'\s*[\),]/);
+			if(asT2t && asT2t.length)
+			{
+				if(!tt_HideSrcTag(asT2t[0]))
+					return false;
+			}
+		}
+	}
+	return true;
+}
+function tt_HideSrcTag(sT2t)
+{
+	var id, el;
+
+	// The ID passed to the found TagToTip() call identifies an HTML element
+	// to be converted to a tooltip, so hide that element
+	id = sT2t.replace(/.+'([^'.]+)'.+/, "$1");
+	el = tt_GetElt(id);
+	if(el)
+	{
+		if(tt_Debug && !TagsToTip)
+			return false;
+		else
+			el.style.display = "none";
+	}
+	else
+		tt_Err("Invalid ID\n'" + id + "'\npassed to TagToTip()."
+				+ " There exists no HTML element with that ID.", true);
+	return true;
+}
+function tt_Tip(arg, t2t)
+{
+	if(!tt_db || (tt_iState & 0x8))
+		return;
+	if(tt_iState)
+		tt_Hide();
+	if(!tt_Enabled)
+		return;
+	tt_t2t = t2t;
+	if(!tt_ReadCmds(arg))
+		return;
+	tt_iState = 0x1 | 0x4;
+	tt_AdaptConfig1();
+	tt_MkTipContent(arg);
+	tt_MkTipSubDivs();
+	tt_FormatTip();
+	tt_bJmpVert = false;
+	tt_bJmpHorz = false;
+	tt_maxPosX = tt_GetClientW() + tt_GetScrollX() - tt_w - 1;
+	tt_maxPosY = tt_GetClientH() + tt_GetScrollY() - tt_h - 1;
+	tt_AdaptConfig2();
+	// Ensure the tip be shown and positioned before the first onmousemove
+	tt_OverInit();
+	tt_ShowInit();
+	tt_Move();
+}
+function tt_ReadCmds(a)
+{
+	var i;
+
+	// First load the global config values, to initialize also values
+	// for which no command is passed
+	i = 0;
+	for(var j in config)
+		tt_aV[i++] = config[j];
+	// Then replace each cached config value for which a command is
+	// passed (ensure the # of command args plus value args be even)
+	if(a.length & 1)
+	{
+		for(i = a.length - 1; i > 0; i -= 2)
+			tt_aV[a[i - 1]] = a[i];
+		return true;
+	}
+	tt_Err("Incorrect call of Tip() or TagToTip().\n"
+			+ "Each command must be followed by a value.", true);
+	return false;
+}
+function tt_AdaptConfig1()
+{
+	tt_ExtCallFncs(0, "LoadConfig");
+	// Inherit unspecified title formattings from body
+	if(!tt_aV[TITLEBGCOLOR].length)
+		tt_aV[TITLEBGCOLOR] = tt_aV[BORDERCOLOR];
+	if(!tt_aV[TITLEFONTCOLOR].length)
+		tt_aV[TITLEFONTCOLOR] = tt_aV[BGCOLOR];
+	if(!tt_aV[TITLEFONTFACE].length)
+		tt_aV[TITLEFONTFACE] = tt_aV[FONTFACE];
+	if(!tt_aV[TITLEFONTSIZE].length)
+		tt_aV[TITLEFONTSIZE] = tt_aV[FONTSIZE];
+	if(tt_aV[CLOSEBTN])
+	{
+		// Use title colours for non-specified closebutton colours
+		if(!tt_aV[CLOSEBTNCOLORS])
+			tt_aV[CLOSEBTNCOLORS] = new Array("", "", "", "");
+		for(var i = 4; i;)
+		{--i;
+			if(!tt_aV[CLOSEBTNCOLORS][i].length)
+				tt_aV[CLOSEBTNCOLORS][i] = (i & 1) ? tt_aV[TITLEFONTCOLOR] : tt_aV[TITLEBGCOLOR];
+		}
+		// Enforce titlebar be shown
+		if(!tt_aV[TITLE].length)
+			tt_aV[TITLE] = " ";
+	}
+	// Circumvents broken display of images and fade-in flicker in Geckos < 1.8
+	if(tt_aV[OPACITY] == 100 && typeof tt_aElt[0].style.MozOpacity != tt_u && !Array.every)
+		tt_aV[OPACITY] = 99;
+	// Smartly shorten the delay for fade-in tooltips
+	if(tt_aV[FADEIN] && tt_flagOpa && tt_aV[DELAY] > 100)
+		tt_aV[DELAY] = Math.max(tt_aV[DELAY] - tt_aV[FADEIN], 100);
+}
+function tt_AdaptConfig2()
+{
+	if(tt_aV[CENTERMOUSE])
+	{
+		tt_aV[OFFSETX] -= ((tt_w - (tt_aV[SHADOW] ? tt_aV[SHADOWWIDTH] : 0)) >> 1);
+		tt_aV[JUMPHORZ] = false;
+	}
+}
+// Expose content globally so extensions can modify it
+function tt_MkTipContent(a)
+{
+	if(tt_t2t)
+	{
+		if(tt_aV[COPYCONTENT])
+			tt_sContent = tt_t2t.innerHTML;
+		else
+			tt_sContent = "";
+	}
+	else
+		tt_sContent = a[0];
+	tt_ExtCallFncs(0, "CreateContentString");
+}
+function tt_MkTipSubDivs()
+{
+	var sCss = 'position:relative;margin:0px;padding:0px;border-width:0px;left:0px;top:0px;line-height:normal;width:auto;',
+	sTbTrTd = ' cellspacing="0" cellpadding="0" border="0" style="' + sCss + '"><tbody style="' + sCss + '"><tr><td ';
+
+	tt_aElt[0].style.width = tt_GetClientW() + "px";
+	tt_aElt[0].innerHTML =
+		(''
+		+ (tt_aV[TITLE].length ?
+			('<div id="WzTiTl" style="position:relative;z-index:1;">'
+			+ '<table id="WzTiTlTb"' + sTbTrTd + 'id="WzTiTlI" style="' + sCss + '">'
+			+ tt_aV[TITLE]
+			+ '</td>'
+			+ (tt_aV[CLOSEBTN] ?
+				('<td align="right" style="' + sCss
+				+ 'text-align:right;">'
+				+ '<span id="WzClOsE" style="position:relative;left:2px;padding-left:2px;padding-right:2px;'
+				+ 'cursor:' + (tt_ie ? 'hand' : 'pointer')
+				+ ';" onmouseover="tt_OnCloseBtnOver(1)" onmouseout="tt_OnCloseBtnOver(0)" onclick="tt_HideInit()">'
+				+ tt_aV[CLOSEBTNTEXT]
+				+ '</span></td>')
+				: '')
+			+ '</tr></tbody></table></div>')
+			: '')
+		+ '<div id="WzBoDy" style="position:relative;z-index:0;">'
+		+ '<table' + sTbTrTd + 'id="WzBoDyI" style="' + sCss + '">'
+		+ tt_sContent
+		+ '</td></tr></tbody></table></div>'
+		+ (tt_aV[SHADOW]
+			? ('<div id="WzTtShDwR" style="position:absolute;overflow:hidden;"></div>'
+				+ '<div id="WzTtShDwB" style="position:relative;overflow:hidden;"></div>')
+			: '')
+		);
+	tt_GetSubDivRefs();
+	// Convert DOM node to tip
+	if(tt_t2t && !tt_aV[COPYCONTENT])
+		tt_El2Tip();
+	tt_ExtCallFncs(0, "SubDivsCreated");
+}
+function tt_GetSubDivRefs()
+{
+	var aId = new Array("WzTiTl", "WzTiTlTb", "WzTiTlI", "WzClOsE", "WzBoDy", "WzBoDyI", "WzTtShDwB", "WzTtShDwR");
+
+	for(var i = aId.length; i; --i)
+		tt_aElt[i] = tt_GetElt(aId[i - 1]);
+}
+function tt_FormatTip()
+{
+	var css, w, h, pad = tt_aV[PADDING], padT, wBrd = tt_aV[BORDERWIDTH],
+	iOffY, iOffSh, iAdd = (pad + wBrd) << 1;
+
+	//--------- Title DIV ----------
+	if(tt_aV[TITLE].length)
+	{
+		padT = tt_aV[TITLEPADDING];
+		css = tt_aElt[1].style;
+		css.background = tt_aV[TITLEBGCOLOR];
+		css.paddingTop = css.paddingBottom = padT + "px";
+		css.paddingLeft = css.paddingRight = (padT + 2) + "px";
+		css = tt_aElt[3].style;
+		css.color = tt_aV[TITLEFONTCOLOR];
+		if(tt_aV[WIDTH] == -1)
+			css.whiteSpace = "nowrap";
+		css.fontFamily = tt_aV[TITLEFONTFACE];
+		css.fontSize = tt_aV[TITLEFONTSIZE];
+		css.fontWeight = "bold";
+		css.textAlign = tt_aV[TITLEALIGN];
+		// Close button DIV
+		if(tt_aElt[4])
+		{
+			css = tt_aElt[4].style;
+			css.background = tt_aV[CLOSEBTNCOLORS][0];
+			css.color = tt_aV[CLOSEBTNCOLORS][1];
+			css.fontFamily = tt_aV[TITLEFONTFACE];
+			css.fontSize = tt_aV[TITLEFONTSIZE];
+			css.fontWeight = "bold";
+		}
+		if(tt_aV[WIDTH] > 0)
+			tt_w = tt_aV[WIDTH];
+		else
+		{
+			tt_w = tt_GetDivW(tt_aElt[3]) + tt_GetDivW(tt_aElt[4]);
+			// Some spacing between title DIV and closebutton
+			if(tt_aElt[4])
+				tt_w += pad;
+			// Restrict auto width to max width
+			if(tt_aV[WIDTH] < -1 && tt_w > -tt_aV[WIDTH])
+				tt_w = -tt_aV[WIDTH];
+		}
+		// Ensure the top border of the body DIV be covered by the title DIV
+		iOffY = -wBrd;
+	}
+	else
+	{
+		tt_w = 0;
+		iOffY = 0;
+	}
+
+	//-------- Body DIV ------------
+	css = tt_aElt[5].style;
+	css.top = iOffY + "px";
+	if(wBrd)
+	{
+		css.borderColor = tt_aV[BORDERCOLOR];
+		css.borderStyle = tt_aV[BORDERSTYLE];
+		css.borderWidth = wBrd + "px";
+	}
+	if(tt_aV[BGCOLOR].length)
+		css.background = tt_aV[BGCOLOR];
+	if(tt_aV[BGIMG].length)
+		css.backgroundImage = "url(" + tt_aV[BGIMG] + ")";
+	css.padding = pad + "px";
+	css.textAlign = tt_aV[TEXTALIGN];
+	if(tt_aV[HEIGHT])
+	{
+		css.overflow = "auto";
+		if(tt_aV[HEIGHT] > 0)
+			css.height = (tt_aV[HEIGHT] + iAdd) + "px";
+		else
+			tt_h = iAdd - tt_aV[HEIGHT];
+	}
+	// TD inside body DIV
+	css = tt_aElt[6].style;
+	css.color = tt_aV[FONTCOLOR];
+	css.fontFamily = tt_aV[FONTFACE];
+	css.fontSize = tt_aV[FONTSIZE];
+	css.fontWeight = tt_aV[FONTWEIGHT];
+	css.textAlign = tt_aV[TEXTALIGN];
+	if(tt_aV[WIDTH] > 0)
+		w = tt_aV[WIDTH];
+	// Width like title (if existent)
+	else if(tt_aV[WIDTH] == -1 && tt_w)
+		w = tt_w;
+	else
+	{
+		// Measure width of the body's inner TD, as some browsers would expand
+		// the container and outer body DIV to 100%
+		w = tt_GetDivW(tt_aElt[6]);
+		// Restrict auto width to max width
+		if(tt_aV[WIDTH] < -1 && w > -tt_aV[WIDTH])
+			w = -tt_aV[WIDTH];
+	}
+	if(w > tt_w)
+		tt_w = w;
+	tt_w += iAdd;
+
+	//--------- Shadow DIVs ------------
+	if(tt_aV[SHADOW])
+	{
+		tt_w += tt_aV[SHADOWWIDTH];
+		iOffSh = Math.floor((tt_aV[SHADOWWIDTH] * 4) / 3);
+		// Bottom shadow
+		css = tt_aElt[7].style;
+		css.top = iOffY + "px";
+		css.left = iOffSh + "px";
+		css.width = (tt_w - iOffSh - tt_aV[SHADOWWIDTH]) + "px";
+		css.height = tt_aV[SHADOWWIDTH] + "px";
+		css.background = tt_aV[SHADOWCOLOR];
+		// Right shadow
+		css = tt_aElt[8].style;
+		css.top = iOffSh + "px";
+		css.left = (tt_w - tt_aV[SHADOWWIDTH]) + "px";
+		css.width = tt_aV[SHADOWWIDTH] + "px";
+		css.background = tt_aV[SHADOWCOLOR];
+	}
+	else
+		iOffSh = 0;
+
+	//-------- Container DIV -------
+	tt_SetTipOpa(tt_aV[FADEIN] ? 0 : tt_aV[OPACITY]);
+	tt_FixSize(iOffY, iOffSh);
+}
+// Fixate the size so it can't dynamically change while the tooltip is moving.
+function tt_FixSize(iOffY, iOffSh)
+{
+	var wIn, wOut, h, add, pad = tt_aV[PADDING], wBrd = tt_aV[BORDERWIDTH], i;
+
+	tt_aElt[0].style.width = tt_w + "px";
+	tt_aElt[0].style.pixelWidth = tt_w;
+	wOut = tt_w - ((tt_aV[SHADOW]) ? tt_aV[SHADOWWIDTH] : 0);
+	// Body
+	wIn = wOut;
+	if(!tt_bBoxOld)
+		wIn -= (pad + wBrd) << 1;
+	tt_aElt[5].style.width = wIn + "px";
+	// Title
+	if(tt_aElt[1])
+	{
+		wIn = wOut - ((tt_aV[TITLEPADDING] + 2) << 1);
+		if(!tt_bBoxOld)
+			wOut = wIn;
+		tt_aElt[1].style.width = wOut + "px";
+		tt_aElt[2].style.width = wIn + "px";
+	}
+	// Max height specified
+	if(tt_h)
+	{
+		h = tt_GetDivH(tt_aElt[5]);
+		if(h > tt_h)
+		{
+			if(!tt_bBoxOld)
+				tt_h -= (pad + wBrd) << 1;
+			tt_aElt[5].style.height = tt_h + "px";
+		}
+	}
+	tt_h = tt_GetDivH(tt_aElt[0]) + iOffY;
+	// Right shadow
+	if(tt_aElt[8])
+		tt_aElt[8].style.height = (tt_h - iOffSh) + "px";
+	i = tt_aElt.length - 1;
+	if(tt_aElt[i])
+	{
+		tt_aElt[i].style.width = tt_w + "px";
+		tt_aElt[i].style.height = tt_h + "px";
+	}
+}
+function tt_DeAlt(el)
+{
+	var aKid;
+
+	if(el)
+	{
+		if(el.alt)
+			el.alt = "";
+		if(el.title)
+			el.title = "";
+		aKid = el.childNodes || el.children || null;
+		if(aKid)
+		{
+			for(var i = aKid.length; i;)
+				tt_DeAlt(aKid[--i]);
+		}
+	}
+}
+// This hack removes the native tooltips over links in Opera
+function tt_OpDeHref(el)
+{
+	if(!tt_op)
+		return;
+	if(tt_elDeHref)
+		tt_OpReHref();
+	while(el)
+	{
+		if(el.hasAttribute && el.hasAttribute("href"))
+		{
+			el.t_href = el.getAttribute("href");
+			el.t_stats = window.status;
+			el.removeAttribute("href");
+			el.style.cursor = "hand";
+			tt_AddEvtFnc(el, "mousedown", tt_OpReHref);
+			window.status = el.t_href;
+			tt_elDeHref = el;
+			break;
+		}
+		el = tt_GetDad(el);
+	}
+}
+function tt_OpReHref()
+{
+	if(tt_elDeHref)
+	{
+		tt_elDeHref.setAttribute("href", tt_elDeHref.t_href);
+		tt_RemEvtFnc(tt_elDeHref, "mousedown", tt_OpReHref);
+		window.status = tt_elDeHref.t_stats;
+		tt_elDeHref = null;
+	}
+}
+function tt_El2Tip()
+{
+	var css = tt_t2t.style;
+
+	// Store previous positioning
+	tt_t2t.t_cp = css.position;
+	tt_t2t.t_cl = css.left;
+	tt_t2t.t_ct = css.top;
+	tt_t2t.t_cd = css.display;
+	// Store the tag's parent element so we can restore that DOM branch
+	// when the tooltip is being hidden
+	tt_t2tDad = tt_GetDad(tt_t2t);
+	tt_MovDomNode(tt_t2t, tt_t2tDad, tt_aElt[6]);
+	css.display = "block";
+	css.position = "static";
+	css.left = css.top = css.marginLeft = css.marginTop = "0px";
+}
+function tt_UnEl2Tip()
+{
+	// Restore positioning and display
+	var css = tt_t2t.style;
+
+	css.display = tt_t2t.t_cd;
+	tt_MovDomNode(tt_t2t, tt_GetDad(tt_t2t), tt_t2tDad);
+	css.position = tt_t2t.t_cp;
+	css.left = tt_t2t.t_cl;
+	css.top = tt_t2t.t_ct;
+	tt_t2tDad = null;
+}
+function tt_OverInit()
+{
+	if(window.event)
+		tt_over = window.event.target || window.event.srcElement;
+	else
+		tt_over = tt_ovr_;
+	tt_DeAlt(tt_over);
+	tt_OpDeHref(tt_over);
+}
+function tt_ShowInit()
+{
+	tt_tShow.Timer("tt_Show()", tt_aV[DELAY], true);
+	if(tt_aV[CLICKCLOSE] || tt_aV[CLICKSTICKY])
+		tt_AddEvtFnc(document, "mouseup", tt_OnLClick);
+}
+function tt_Show()
+{
+	var css = tt_aElt[0].style;
+
+	// Override the z-index of the topmost wz_dragdrop.js D&D item
+	css.zIndex = Math.max((window.dd && dd.z) ? (dd.z + 2) : 0, 1010);
+	if(tt_aV[STICKY] || !tt_aV[FOLLOWMOUSE])
+		tt_iState &= ~0x4;
+	if(tt_aV[EXCLUSIVE])
+		tt_iState |= 0x8;
+	if(tt_aV[DURATION] > 0)
+		tt_tDurt.Timer("tt_HideInit()", tt_aV[DURATION], true);
+	tt_ExtCallFncs(0, "Show")
+	css.visibility = "visible";
+	tt_iState |= 0x2;
+	if(tt_aV[FADEIN])
+		tt_Fade(0, 0, tt_aV[OPACITY], Math.round(tt_aV[FADEIN] / tt_aV[FADEINTERVAL]));
+	tt_ShowIfrm();
+}
+function tt_ShowIfrm()
+{
+	if(tt_ie56)
+	{
+		var ifrm = tt_aElt[tt_aElt.length - 1];
+		if(ifrm)
+		{
+			var css = ifrm.style;
+			css.zIndex = tt_aElt[0].style.zIndex - 1;
+			css.display = "block";
+		}
+	}
+}
+function tt_Move(e)
+{
+	if(e)
+		tt_ovr_ = e.target || e.srcElement;
+	e = e || window.event;
+	if(e)
+	{
+		tt_musX = tt_GetEvtX(e);
+		tt_musY = tt_GetEvtY(e);
+	}
+	if(tt_iState & 0x4)
+	{
+		// Prevent jam of mousemove events
+		if(!tt_op && !tt_ie)
+		{
+			if(tt_bWait)
+				return;
+			tt_bWait = true;
+			tt_tWaitMov.Timer("tt_bWait = false;", 1, true);
+		}
+		if(tt_aV[FIX])
+		{
+			tt_iState &= ~0x4;
+			tt_PosFix();
+		}
+		else if(!tt_ExtCallFncs(e, "MoveBefore"))
+			tt_SetTipPos(tt_Pos(0), tt_Pos(1));
+		tt_ExtCallFncs([tt_musX, tt_musY], "MoveAfter")
+	}
+}
+function tt_Pos(iDim)
+{
+	var iX, bJmpMod, cmdAlt, cmdOff, cx, iMax, iScrl, iMus, bJmp;
+
+	// Map values according to dimension to calculate
+	if(iDim)
+	{
+		bJmpMod = tt_aV[JUMPVERT];
+		cmdAlt = ABOVE;
+		cmdOff = OFFSETY;
+		cx = tt_h;
+		iMax = tt_maxPosY;
+		iScrl = tt_GetScrollY();
+		iMus = tt_musY;
+		bJmp = tt_bJmpVert;
+	}
+	else
+	{
+		bJmpMod = tt_aV[JUMPHORZ];
+		cmdAlt = LEFT;
+		cmdOff = OFFSETX;
+		cx = tt_w;
+		iMax = tt_maxPosX;
+		iScrl = tt_GetScrollX();
+		iMus = tt_musX;
+		bJmp = tt_bJmpHorz;
+	}
+	if(bJmpMod)
+	{
+		if(tt_aV[cmdAlt] && (!bJmp || tt_CalcPosAlt(iDim) >= iScrl + 16))
+			iX = tt_PosAlt(iDim);
+		else if(!tt_aV[cmdAlt] && bJmp && tt_CalcPosDef(iDim) > iMax - 16)
+			iX = tt_PosAlt(iDim);
+		else
+			iX = tt_PosDef(iDim);
+	}
+	else
+	{
+		iX = iMus;
+		if(tt_aV[cmdAlt])
+			iX -= cx + tt_aV[cmdOff] - (tt_aV[SHADOW] ? tt_aV[SHADOWWIDTH] : 0);
+		else
+			iX += tt_aV[cmdOff];
+	}
+	// Prevent tip from extending past clientarea boundary
+	if(iX > iMax)
+		iX = bJmpMod ? tt_PosAlt(iDim) : iMax;
+	// In case of insufficient space on both sides, ensure the left/upper part
+	// of the tip be visible
+	if(iX < iScrl)
+		iX = bJmpMod ? tt_PosDef(iDim) : iScrl;
+	return iX;
+}
+function tt_PosDef(iDim)
+{
+	if(iDim)
+		tt_bJmpVert = tt_aV[ABOVE];
+	else
+		tt_bJmpHorz = tt_aV[LEFT];
+	return tt_CalcPosDef(iDim);
+}
+function tt_PosAlt(iDim)
+{
+	if(iDim)
+		tt_bJmpVert = !tt_aV[ABOVE];
+	else
+		tt_bJmpHorz = !tt_aV[LEFT];
+	return tt_CalcPosAlt(iDim);
+}
+function tt_CalcPosDef(iDim)
+{
+	return iDim ? (tt_musY + tt_aV[OFFSETY]) : (tt_musX + tt_aV[OFFSETX]);
+}
+function tt_CalcPosAlt(iDim)
+{
+	var cmdOff = iDim ? OFFSETY : OFFSETX;
+	var dx = tt_aV[cmdOff] - (tt_aV[SHADOW] ? tt_aV[SHADOWWIDTH] : 0);
+	if(tt_aV[cmdOff] > 0 && dx <= 0)
+		dx = 1;
+	return((iDim ? (tt_musY - tt_h) : (tt_musX - tt_w)) - dx);
+}
+function tt_PosFix()
+{
+	var iX, iY;
+
+	if(typeof(tt_aV[FIX][0]) == "number")
+	{
+		iX = tt_aV[FIX][0];
+		iY = tt_aV[FIX][1];
+	}
+	else
+	{
+		if(typeof(tt_aV[FIX][0]) == "string")
+			el = tt_GetElt(tt_aV[FIX][0]);
+		// First slot in array is direct reference to HTML element
+		else
+			el = tt_aV[FIX][0];
+		iX = tt_aV[FIX][1];
+		iY = tt_aV[FIX][2];
+		// By default, vert pos is related to bottom edge of HTML element
+		if(!tt_aV[ABOVE] && el)
+			iY += tt_GetDivH(el);
+		for(; el; el = el.offsetParent)
+		{
+			iX += el.offsetLeft || 0;
+			iY += el.offsetTop || 0;
+		}
+	}
+	// For a fixed tip positioned above the mouse, use the bottom edge as anchor
+	// (recommended by Christophe Rebeschini, 31.1.2008)
+	if(tt_aV[ABOVE])
+		iY -= tt_h;
+	tt_SetTipPos(iX, iY);
+}
+function tt_Fade(a, now, z, n)
+{
+	if(n)
+	{
+		now += Math.round((z - now) / n);
+		if((z > a) ? (now >= z) : (now <= z))
+			now = z;
+		else
+			tt_tFade.Timer(
+				"tt_Fade("
+				+ a + "," + now + "," + z + "," + (n - 1)
+				+ ")",
+				tt_aV[FADEINTERVAL],
+				true
+			);
+	}
+	now ? tt_SetTipOpa(now) : tt_Hide();
+}
+function tt_SetTipOpa(opa)
+{
+	// To circumvent the opacity nesting flaws of IE, we set the opacity
+	// for each sub-DIV separately, rather than for the container DIV.
+	tt_SetOpa(tt_aElt[5], opa);
+	if(tt_aElt[1])
+		tt_SetOpa(tt_aElt[1], opa);
+	if(tt_aV[SHADOW])
+	{
+		opa = Math.round(opa * 0.8);
+		tt_SetOpa(tt_aElt[7], opa);
+		tt_SetOpa(tt_aElt[8], opa);
+	}
+}
+function tt_OnCloseBtnOver(iOver)
+{
+	var css = tt_aElt[4].style;
+
+	iOver <<= 1;
+	css.background = tt_aV[CLOSEBTNCOLORS][iOver];
+	css.color = tt_aV[CLOSEBTNCOLORS][iOver + 1];
+}
+function tt_OnLClick(e)
+{
+	//  Ignore right-clicks
+	e = e || window.event;
+	if(!((e.button && e.button & 2) || (e.which && e.which == 3)))
+	{
+		if(tt_aV[CLICKSTICKY] && (tt_iState & 0x4))
+		{
+			tt_aV[STICKY] = true;
+			tt_iState &= ~0x4;
+		}
+		else if(tt_aV[CLICKCLOSE])
+			tt_HideInit();
+	}
+}
+function tt_Int(x)
+{
+	var y;
+
+	return(isNaN(y = parseInt(x)) ? 0 : y);
+}
+Number.prototype.Timer = function(s, iT, bUrge)
+{
+	if(!this.value || bUrge)
+		this.value = window.setTimeout(s, iT);
+}
+Number.prototype.EndTimer = function()
+{
+	if(this.value)
+	{
+		window.clearTimeout(this.value);
+		this.value = 0;
+	}
+}
+function tt_GetWndCliSiz(s)
+{
+	var db, y = window["inner" + s], sC = "client" + s, sN = "number";
+	if(typeof y == sN)
+	{
+		var y2;
+		return(
+			// Gecko or Opera with scrollbar
+			// ... quirks mode
+			((db = document.body) && typeof(y2 = db[sC]) == sN && y2 &&  y2 <= y) ? y2 
+			// ... strict mode
+			: ((db = document.documentElement) && typeof(y2 = db[sC]) == sN && y2 && y2 <= y) ? y2
+			// No scrollbar, or clientarea size == 0, or other browser (KHTML etc.)
+			: y
+		);
+	}
+	// IE
+	return(
+		// document.documentElement.client+s functional, returns > 0
+		((db = document.documentElement) && (y = db[sC])) ? y
+		// ... not functional, in which case document.body.client+s 
+		// is the clientarea size, fortunately
+		: document.body[sC]
+	);
+}
+function tt_SetOpa(el, opa)
+{
+	var css = el.style;
+
+	tt_opa = opa;
+	if(tt_flagOpa == 1)
+	{
+		if(opa < 100)
+		{
+			// Hacks for bugs of IE:
+			// 1.) Once a CSS filter has been applied, fonts are no longer
+			// anti-aliased, so we store the previous 'non-filter' to be
+			// able to restore it
+			if(typeof(el.filtNo) == tt_u)
+				el.filtNo = css.filter;
+			// 2.) A DIV cannot be made visible in a single step if an
+			// opacity < 100 has been applied while the DIV was hidden
+			var bVis = css.visibility != "hidden";
+			// 3.) In IE6, applying an opacity < 100 has no effect if the
+			//	   element has no layout (position, size, zoom, ...)
+			css.zoom = "100%";
+			if(!bVis)
+				css.visibility = "visible";
+			css.filter = "alpha(opacity=" + opa + ")";
+			if(!bVis)
+				css.visibility = "hidden";
+		}
+		else if(typeof(el.filtNo) != tt_u)
+			// Restore 'non-filter'
+			css.filter = el.filtNo;
+	}
+	else
+	{
+		opa /= 100.0;
+		switch(tt_flagOpa)
+		{
+		case 2:
+			css.KhtmlOpacity = opa; break;
+		case 3:
+			css.KHTMLOpacity = opa; break;
+		case 4:
+			css.MozOpacity = opa; break;
+		case 5:
+			css.opacity = opa; break;
+		}
+	}
+}
+function tt_Err(sErr, bIfDebug)
+{
+	if(tt_Debug || !bIfDebug)
+		alert("Tooltip Script Error Message:\n\n" + sErr);
+}
+
+//============  EXTENSION (PLUGIN) MANAGER  ===============//
+function tt_ExtCmdEnum()
+{
+	var s;
+
+	// Add new command(s) to the commands enum
+	for(var i in config)
+	{
+		s = "window." + i.toString().toUpperCase();
+		if(eval("typeof(" + s + ") == tt_u"))
+		{
+			eval(s + " = " + tt_aV.length);
+			tt_aV[tt_aV.length] = null;
+		}
+	}
+}
+function tt_ExtCallFncs(arg, sFnc)
+{
+	var b = false;
+	for(var i = tt_aExt.length; i;)
+	{--i;
+		var fnc = tt_aExt[i]["On" + sFnc];
+		// Call the method the extension has defined for this event
+		if(fnc && fnc(arg))
+			b = true;
+	}
+	return b;
+}
+
+tt_Init();
diff --git a/config/plugins/visualizations/graphviz/templates/graphviz.mako b/config/plugins/visualizations/graphviz/templates/graphviz.mako
new file mode 100644
index 0000000..6f6d689
--- /dev/null
+++ b/config/plugins/visualizations/graphviz/templates/graphviz.mako
@@ -0,0 +1,173 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+
+    <meta charset="utf-8">
+    <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+
+    <title>${hda.name} | ${visualization_name}</title>
+    <%
+        root = h.url_for( '/' )
+    %>
+
+    <script type="text/javascript" src="/static/scripts/libs/jquery/jquery.js"></script>
+
+    ${h.stylesheet_link( root + 'plugins/visualizations/graphviz/static/css/style.css' )}
+
+    ${h.javascript_link( root + 'plugins/visualizations/graphviz/static/js/jquery.qtip.js' )}
+    ${h.javascript_link( root + 'plugins/visualizations/graphviz/static/js/cytoscape.min.js' )}
+    ${h.javascript_link( root + 'plugins/visualizations/graphviz/static/js/collapse.js' )}
+    ${h.javascript_link( root + 'plugins/visualizations/graphviz/static/js/toolPanelFunctions.js' )}
+    ${h.javascript_link( root + 'plugins/visualizations/graphviz/static/js/graphVis.js' )}
+
+</head>
+
+## ----------------------------------------------------------------------------
+<body>
+    ${h.javascript_link( root + 'plugins/visualizations/graphviz/static/js/wz_tooltip.js' )}
+
+    <script>
+        function parseNodeEdge( data ){
+            data = data.data[0];
+            parseJson( data );
+        }
+
+        $(document).ready(function() {
+
+            var hdaName = '${ hda.name | h }',
+                hdaId = '${trans.security.encode_id( hda.id )}',
+                hdaExt = '${hda.ext}',
+                rawUrl = '${h.url_for( controller="/datasets", action="index" )}',
+                apiUrl = '${h.url_for( "/" ) + "api/datasets"}',
+                dataUrl;
+
+            function errorHandler( xhr, status, message ){
+                console.error(x, s, m);
+                alert("error loading data:\n" + m);
+            }
+
+            switch( hdaExt ){
+                case 'txt':
+                    dataUrl = rawUrl + '/' + hdaId + '/display?to_ext=txt';
+                    $.ajax(dataUrl, {
+                        dataType    : 'text',
+                        success     : parseTextMatrix,
+                        error       : errorHandler
+                    });
+                    break;
+
+                case 'json':
+                    dataUrl = rawUrl + '/' + hdaId + '/display?to_ext=json';
+                    $.ajax(dataUrl, {
+                        dataType    : 'json',
+                        success     : parseJson,
+                        error       : errorHandler
+                    });
+                    break;
+
+                default:
+                    dataUrl = apiUrl + '/' + hdaId;
+                    $.ajax(dataUrl, {
+                        dataType    : 'json',
+                        success     : parseNodeEdge,
+                        error       : errorHandler,
+                        data : {
+                            data_type : 'raw_data',
+                            provider  : 'node-edge'
+                        }
+                    });
+            }
+        });
+
+    </script>
+    <div id="cy"></div>
+
+    <!-- left control panel for rendering controls, hiding nodes, etc. - initially hidden -->
+    <div class="panel">
+        <br>
+        <div id="mainselection">
+            <select name="select" id="selectShape">
+                <option value="random" selected>Choose the shape</option>
+                <option value="random">random</option>
+                <option value="circle">circle</option>
+                <option value="grid">grid</option>
+                <option value="concentric">concentric</option>
+                <option value="breadthfirst">breadthfirst</option>
+            </select>
+        </div>
+        <br>
+
+        <input type="checkbox" class="css-checkbox" name="nlabelSelection" id="nodeLabelCheck" />
+        <label for="nodeLabelCheck" name="nodeLabelCheck" class="css-label">
+            show node label
+        </label>
+        <br>
+        <br>
+
+        <input type="checkbox" class="css-checkbox" name="elabelSelection" id="linkLabelCheck" />
+        <label for="linkLabelCheck" name="nodeLabelCheck" class="css-label">
+            show edge label
+        </label>
+        <br>
+        <br>
+
+        <input type="checkbox" class="css-checkbox" name="showOutgoing" id="showOutNode">
+        <label for="showOutNode" name="nodeLabelCheck" class="css-label">
+            highlight outgoing nodes
+        </label>
+        <br>
+        <br>
+
+        <input type="checkbox" class="css-checkbox" name="showInComing" id="showInNode">
+        <label for="showInNode" name="nodeLabelCheck" class="css-label">
+            highlight incoming nodes
+        </label>
+        <br>
+        <br>
+
+        <input type="checkbox" class="css-checkbox" name="showCollapsedNodeNum" id="collapseCount">
+        <label for="collapseCount" name="nodeLabelCheck" class="css-label">
+            show the number of collapsed nodes
+        </label>
+        <br>
+        <br>
+
+        <input type="button" class="btn colNode" name="collapseNode" id="collapseNode" value="Collapse Selected Node " onclick="colNode()" disabled="disabled">
+        <br>
+        <br>
+
+        <input type="button" class="btn expNode" name="expandNode" id="expandNode" value="Expand Selected Node " disabled="disabled">
+        <br>
+        <br>
+
+        <input type="button" class="btn delNode" name="deleteNodes" id="deleteNodes" value="Delete Selected Nodes " onclick="deleteSelectedNodes()" disabled="disabled">
+        <br>
+        <br>
+
+        <input type="button" class="btn" name="restoreNodes" id="restoreNodes" value="Restore Deleted Nodes" onclick="restoreDeletedNodes()">
+        <br>
+        <br>
+        <input type="button" class="btn" name="restore" id="restore" value="Restore The Structure  " onclick="restorGraphStructure()">
+        <br>
+        <br>
+        <input type="button" class="btn" name="export" id="export" value="Export PNG" onclick="exportFunction()">
+        <br>
+        <br>
+        <input type="button" class="btn" name="manual" id="manual" value="More Info" onclick="window.open('https://github.com/eteriSokhoyan/GraphVis')">
+        <br>
+        <br>
+    </div>
+    <!-- button to show above panel -->
+    <a href="javascript:void(0);" class="slider-arrow show">»</a>
+
+    <!-- right control panel for displaying node data - initially hidden -->
+    <div id="nodeInfoDiv" class="nodePanel">
+        <p> <strong>Node Description </strong></p>
+        <br><br>
+        <p> Please select a node </p>
+    </div>
+    <!-- button to show above panel -->
+    <a href="javascript:void(0);" class="slider-arrow-forNode show">«</a>
+</body>
+
+</html>
diff --git a/config/plugins/visualizations/phyloviz/config/phyloviz.xml b/config/plugins/visualizations/phyloviz/config/phyloviz.xml
new file mode 100644
index 0000000..4fd3230
--- /dev/null
+++ b/config/plugins/visualizations/phyloviz/config/phyloviz.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Phyloviz">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Newick</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Nexus</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">xml.Phyloxml</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+        <param type="integer" default="0">tree_index</param>
+    </params>
+    <entry_point entry_point_type="mako">phyloviz.mako</entry_point>
+    <render_target>_top</render_target>
+</visualization>
diff --git a/config/plugins/visualizations/scatterplot/Gruntfile.js b/config/plugins/visualizations/scatterplot/Gruntfile.js
new file mode 100644
index 0000000..821ca92
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/Gruntfile.js
@@ -0,0 +1,46 @@
+// NOTE: use 'sudo npm install .', then 'grunt' to use this file
+
+module.exports = function(grunt) {
+
+    grunt.initConfig({
+        pkg: grunt.file.readJSON( 'package.json' ),
+
+        concat: {
+            // concat any js files in the src dir into a single file in the build dir
+            options: {
+                separator: ';\n'
+            },
+            dist: {
+                src : [ 'src/**/*.js' ],
+                dest: 'build/scatterplot-concat.js'
+            }
+        },
+
+        uglify: {
+            // uglify the concat single file directly into the static dir
+            options: {
+                // uncomment these to allow better source mapping during development
+                //mangle      : false,
+                //beautify    : true
+            },
+            dist: {
+                src : 'build/scatterplot-concat.js',
+                dest: 'static/scatterplot-edit.js'
+            }
+        },
+
+        watch: {
+            // watch for changes in the src dir
+            files: [ 'src/**.js' ],
+            tasks: [ 'default' ]
+        }
+    });
+
+    grunt.loadNpmTasks( 'grunt-contrib-concat' );
+    grunt.loadNpmTasks( 'grunt-contrib-uglify' );
+    grunt.loadNpmTasks( 'grunt-contrib-watch' );
+
+    grunt.registerTask( 'default', [ 'concat', 'uglify' ]);
+    // you can run grunt watch directly:
+    //  grunt watch
+};
diff --git a/config/plugins/visualizations/scatterplot/config/scatterplot.xml b/config/plugins/visualizations/scatterplot/config/scatterplot.xml
new file mode 100644
index 0000000..d264918
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/config/scatterplot.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Scatterplot">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.CSV</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <entry_point entry_point_type="mako">scatterplot.mako</entry_point>
+</visualization>
diff --git a/config/plugins/visualizations/scatterplot/package.json b/config/plugins/visualizations/scatterplot/package.json
new file mode 100644
index 0000000..1adcb96
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/package.json
@@ -0,0 +1,23 @@
+{
+  "name": "galaxy-scatterplot",
+  "version": "0.0.0",
+  "description": "Scatterplot visualization plugin for the Galaxy informatics framework",
+  "main": " ",
+  "scripts": {
+    "test": "test"
+  },
+  "keywords": [
+    "galaxy",
+    "visualization",
+    "d3"
+  ],
+  "author": "Carl Eberhard",
+  "license": "BSD",
+  "devDependencies": {
+    "grunt": "~0.4.1",
+    "grunt-cli": "~0.1.9",
+    "grunt-contrib-concat": "~0.3.0",
+    "grunt-contrib-uglify": "~0.2.2",
+    "grunt-contrib-watch": "~0.5.3"
+  }
+}
diff --git a/config/plugins/visualizations/scatterplot/src/scatterplot-config-editor.js b/config/plugins/visualizations/scatterplot/src/scatterplot-config-editor.js
new file mode 100644
index 0000000..19d743d
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/src/scatterplot-config-editor.js
@@ -0,0 +1,387 @@
+/* =============================================================================
+todo:
+    localize
+    import button(display), func(model) - when user doesn't match
+    Move margins into wid/hi calcs (so final svg dims are w/h)
+    Better separation of AJAX in scatterplot.js (maybe pass in function?)
+    Labels should auto fill in chart control when dataset has column_names
+    Allow column selection/config using the peek output as a base for UI
+    Allow setting perPage in chart controls
+    Allow option to auto set width/height based on screen real estate avail.
+    Handle large number of pages better (Known genes hg19)
+    Use d3.nest to allow grouping, pagination/filtration by group (e.g. chromCol)
+    Semantic HTML (figure, caption)
+    Save as SVG/png
+    Does it work w/ Galaxy.Frame?
+    Embedding
+    Small multiples
+    Drag & Drop other splots onto current (redraw with new axis and differentiate the datasets)
+    Remove 'chart' namessave
+    Somehow link out from info box?
+
+    Subclass on specific datatypes? (vcf, cuffdiff, etc.)
+    What can be common/useful to other visualizations?
+
+============================================================================= */
+/**
+ *  Scatterplot config control UI as a backbone view
+ *      handles:
+ *          configuring which data will be used
+ *          configuring the plot display
+ */
+var ScatterplotConfigEditor = Backbone.View.extend({
+    //TODO: !should be a view on a visualization model
+    //logger      : console,
+    className   : 'scatterplot-control-form',
+
+    /** initialize requires a configuration Object containing a dataset Object */
+    initialize : function( attributes ){
+        if( !this.model ){
+            this.model = new Visualization({ type: 'scatterplot' });
+        }
+        //this.log( this + '.initialize, attributes:', attributes );
+
+        if( !attributes || !attributes.dataset ){
+            throw new Error( "ScatterplotConfigEditor requires a dataset" );
+        }
+        this.dataset = attributes.dataset;
+        //this.log( 'dataset:', this.dataset );
+
+        this.display = new ScatterplotDisplay({
+            dataset : attributes.dataset,
+            model   : this.model
+        });
+    },
+
+    // ------------------------------------------------------------------------- CONTROLS RENDERING
+    render : function(){
+        //console.log( this + '.render' );
+        // render the tab controls, areas and loading indicator
+        this.$el.empty().append( ScatterplotConfigEditor.templates.mainLayout({}));
+        if( this.model.id ){
+            this.$el.find( '.copy-btn' ).show();
+            this.$el.find( '.save-btn' ).text( 'Update saved' );
+        }
+        this.$el.find( '[title]' ).tooltip();
+
+        // render the tab content
+        this._render_dataControl();
+        this._render_chartControls();
+        this._render_chartDisplay();
+
+        // set up behaviours
+
+        // auto render if given both x, y column choices
+        var config = this.model.get( 'config' );
+        if( this.model.id && _.isFinite( config.xColumn ) && _.isFinite( config.yColumn ) ){
+            this.renderChart();
+        }
+        return this;
+    },
+
+    /** get an object with arrays keyed with possible column types (numeric, text, all)
+     *      and if metadata_column_types is set on the dataset, add the indeces of each
+     *      column into the appropriate array.
+     *  Used to disable certain columns from being selected for x, y axes.
+     */
+    _getColumnIndecesByType : function(){
+        //TODO: not sure these contraints are necc. now
+        var types = {
+            numeric : [],
+            text    : [],
+            all     : []
+        };
+        _.each( this.dataset.metadata_column_types || [], function( type, i ){
+            if( type === 'int' || type === 'float' ){
+                types.numeric.push( i );
+            } else if( type === 'str' || type === 'list' ){
+                types.text.push( i );
+            }
+            types.all.push( i );
+        });
+        if( types.numeric.length < 2 ){
+            types.numeric = [];
+        }
+        //console.log( 'types:', JSON.stringify( types ) );
+        return types;
+    },
+
+    /** controls for which columns are used to plot datapoints (and ids/additional info to attach if desired) */
+    _render_dataControl : function( $where ){
+        $where = $where || this.$el;
+        var editor  = this,
+            //column_names = dataset.metadata_column_names || [],
+            config  = this.model.get( 'config' ),
+            columnTypes = this._getColumnIndecesByType();
+
+        // render the html
+        var $dataControl = $where.find( '.tab-pane#data-control' );
+        $dataControl.html( ScatterplotConfigEditor.templates.dataControl({
+            peek : this.dataset.peek
+        }));
+
+        $dataControl.find( '.peek' ).peekColumnSelector({
+            controls : [
+                { label: 'X Column',  id: 'xColumn',  selected: config.xColumn, disabled: columnTypes.text },
+                { label: 'Y Column',  id: 'yColumn',  selected: config.yColumn, disabled: columnTypes.text },
+                { label: 'ID Column', id: 'idColumn', selected: config.idColumn }
+            ]
+            //renameColumns       : true
+
+        }).on( 'peek-column-selector.change', function( ev, data ){
+            //console.info( 'new selection:', data );
+            editor.model.set( 'config', data );
+
+        }).on( 'peek-column-selector.rename', function( ev, data ){
+            //console.info( 'new column names', data );
+        });
+
+        $dataControl.find( '[title]' ).tooltip();
+        return $dataControl;
+    },
+
+    /** tab content to control how the chart is rendered (data glyph size, chart size, etc.) */
+    _render_chartControls : function( $where ){
+//TODO: as controls on actual chart
+        $where = $where || this.$el;
+        var editor = this,
+            config = this.model.get( 'config' ),
+            $chartControls = $where.find( '#chart-control' );
+
+        // ---- skeleton/form for controls
+        $chartControls.html( ScatterplotConfigEditor.templates.chartControl( config ) );
+        //console.debug( '$chartControl:', $chartControls );
+
+        // ---- slider controls
+        // limits for controls (by control/chartConfig id)
+        var controlRanges = {
+                'datapointSize' : { min: 2, max: 10, step: 1 },
+                'width'         : { min: 200, max: 800, step: 20 },
+                'height'        : { min: 200, max: 800, step: 20 }
+            };
+
+        function onSliderChange(){
+            // set the model config when changed and update the slider output text
+            var $this = $( this ),
+                //note: returns a number nicely enough
+                newVal = $this.slider( 'value' );
+            // parent of slide event target has html5 attr data-config-key
+            editor.model.set( 'config', _.object([[ $this.parent().data( 'config-key' ), newVal ]]) );
+            $this.siblings( '.slider-output' ).text( newVal );
+        }
+
+        //console.debug( 'numeric sliders:', $chartControls.find( '.numeric-slider-input' ) );
+        $chartControls.find( '.numeric-slider-input' ).each( function(){
+            // set up the slider with control ranges, change event; set output text to initial value
+            var $this = $( this ),
+                configKey = $this.attr( 'data-config-key' ),
+                sliderSettings = _.extend( controlRanges[ configKey ], {
+                    value   : config[ configKey ],
+                    change  : onSliderChange,
+                    slide   : onSliderChange
+                });
+            //console.debug( configKey + ' slider settings:', sliderSettings );
+            $this.find( '.slider' ).slider( sliderSettings );
+            $this.children( '.slider-output' ).text( config[ configKey ] );
+        });
+
+        // ---- axes labels
+        var columnNames = this.dataset.metadata_column_names || [];
+        var xLabel = config.xLabel || columnNames[ config.xColumn ] || 'X';
+        var yLabel = config.yLabel || columnNames[ config.yColumn ] || 'Y';
+        // set label inputs to current x, y metadata_column_names (if any)
+        $chartControls.find( 'input[name="X-axis-label"]' ).val( xLabel )
+            .on( 'change', function(){
+                editor.model.set( 'config', { xLabel: $( this ).val() });
+            });
+        $chartControls.find( 'input[name="Y-axis-label"]' ).val( yLabel )
+            .on( 'change', function(){
+                editor.model.set( 'config', { yLabel: $( this ).val() });
+            });
+
+        //console.debug( '$chartControls:', $chartControls );
+        $chartControls.find( '[title]' ).tooltip();
+        return $chartControls;
+    },
+
+    /** render the tab content where the chart is displayed (but not the chart itself) */
+    _render_chartDisplay : function( $where ){
+        $where = $where || this.$el;
+        var $chartDisplay = $where.find( '.tab-pane#chart-display' );
+        this.display.setElement( $chartDisplay );
+        this.display.render();
+
+        $chartDisplay.find( '[title]' ).tooltip();
+        return $chartDisplay;
+    },
+
+    // ------------------------------------------------------------------------- EVENTS
+    events : {
+        'change #include-id-checkbox'          : 'toggleThirdColumnSelector',
+        'click #data-control .render-button'   : 'renderChart',
+        'click #chart-control .render-button'  : 'renderChart',
+        'click .save-btn'                      : 'saveVisualization',
+        //'click .copy-btn'                       : function(e){ this.model.save(); }
+    },
+
+    saveVisualization : function(){
+        var editor = this;
+        this.model.save()
+            .fail( function( xhr, status, message ){
+                console.error( xhr, status, message );
+                editor.trigger( 'save:error', view );
+                alert( 'Error loading data:\n' + xhr.responseText );
+            })
+            .then( function(){
+                editor.display.render();
+            });
+    },
+
+    toggleThirdColumnSelector : function(){
+        // show/hide the id selector on the data settings panel
+        this.$el.find( 'select[name="idColumn"]' ).parent().toggle();
+    },
+
+    // ------------------------------------------------------------------------- CHART/STATS RENDERING
+    renderChart : function(){
+        //console.log( this + '.renderChart' );
+        // fetch the data, (re-)render the chart
+        this.$el.find( '.nav li.disabled' ).removeClass( 'disabled' );
+        this.$el.find( 'ul.nav' ).find( 'a[href="#chart-display"]' ).tab( 'show' );
+        this.display.fetchData();
+        //console.debug( this.display.$el );
+    },
+
+    toString : function(){
+        return 'ScatterplotConfigEditor(' + (( this.dataset )?( this.dataset.id ):( '' )) + ')';
+    }
+});
+
+ScatterplotConfigEditor.templates = {
+    // tabbed, main layout for the editor (not used for scatterplot-display)
+    mainLayout : _.template([
+        '<div class="scatterplot-editor tabbable tabs-left">',
+            // tab buttons/headers using Bootstrap
+            '<ul class="nav nav-tabs">',
+                // start with the data controls as the displayed tab
+                '<li class="active">',
+                    '<a title="Use this tab to change which data are used"',
+                       'href="#data-control" data-toggle="tab">Data Controls</a>',
+                '</li>',
+                '<li>',
+                    '<a title="Use this tab to change how the chart is drawn"',
+                       'href="#chart-control" data-toggle="tab" >Chart Controls</a>',
+                '</li>',
+                // chart starts as disabled since there's no info yet
+                '<li class="disabled">',
+                    '<a title="This tab will display the chart"',
+                       'href="#chart-display" data-toggle="tab">Chart</a>',
+                '</li>',
+                // button for saving the visualization config on the server
+                '<li class="file-controls">',
+                    '<button class="save-btn btn btn-default">Save</button>',
+                '</li>',
+            '</ul>',
+
+            // data form, chart config form, chart all get their own tab
+            '<div class="tab-content">',
+                // tab for data settings form
+                '<div id="data-control" class="scatterplot-config-control tab-pane active"></div>',
+
+                // tab for chart graphics control form
+                '<div id="chart-control" class="scatterplot-config-control tab-pane"></div>',
+
+                // tab for actual chart
+                '<div id="chart-display" class="scatterplot-display tab-pane"></div>',
+
+            '</div>',
+        '</div>',
+    ].join('')),
+
+    // the controls for data selection (this is mostly done with column selector now)
+    // TODO: this could be moved to the main template above
+    // TODO: localize
+    dataControl : _.template([
+        '<p class="help-text">',
+            'Use the following control to change which columns are used by the chart. Click any cell ',
+            'from the last three rows of the table to select the column for the appropriate data. ',
+            'Use the \'Draw\' button to render (or re-render) the chart with the current settings. ',
+        '</p>',
+
+        '<ul class="help-text" style="margin-left: 8px">',
+            '<li><b>X Column</b>: which column values will be used for the x axis of the chart.</li>',
+            '<li><b>Y Column</b>: which column values will be used for the y axis of the chart.</li>',
+            '<li><b>ID Column</b>: an additional column value displayed when the user hovers over a data point. ',
+            'It may be useful to select unique or categorical identifiers here (such as gene ids).',
+            '</li>',
+        '</ul>',
+
+        '<div class="column-selection">',
+            // the only dynamic thing
+            '<pre class="peek"><%= peek %></pre>',
+        '</div>',
+
+        '<p class="help-text help-text-small">',
+            '<b>Note</b>: If it can be determined from the dataset\'s filetype that a column is not numeric, ',
+            'that column choice may be disabled for either the x or y axis.',
+        '</p>',
+
+        '<button class="render-button btn btn-primary active">Draw</button>',
+    ].join('')),
+
+    chartControl : _.template([
+        '<p class="help-text">',
+            'Use the following controls to how the chart is displayed. The slide controls can be moved ',
+            'by the mouse or, if the \'handle\' is in focus, your keyboard\'s arrow keys. ',
+            'Move the focus between controls by using the tab or shift+tab keys on your keyboard. ',
+            'Use the \'Draw\' button to render (or re-render) the chart with the current settings. ',
+        '</p>',
+
+        '<div data-config-key="datapointSize" class="form-input numeric-slider-input">',
+            '<label for="datapointSize">Size of data point: </label>',
+            '<div class="slider-output"><%- datapointSize %></div>',
+            '<div class="slider"></div>',
+            '<p class="form-help help-text-small">',
+                'Size of the graphic representation of each data point',
+            '</p>',
+        '</div>',
+
+        '<div data-config-key="width" class="form-input numeric-slider-input">',
+            '<label for="width">Chart width: </label>',
+            '<div class="slider-output"><%- width %></div>',
+            '<div class="slider"></div>',
+            '<p class="form-help help-text-small">',
+                '(not including chart margins and axes)',
+            '</p>',
+        '</div>',
+
+        '<div data-config-key="height" class="form-input numeric-slider-input">',
+            '<label for="height">Chart height: </label>',
+            '<div class="slider-output"><%- height %></div>',
+            '<div class="slider"></div>',
+            '<p class="form-help help-text-small">',
+                '(not including chart margins and axes)',
+            '</p>',
+        '</div>',
+
+        '<div data-config-key="X-axis-label"class="text-input form-input">',
+            '<label for="X-axis-label">Re-label the X axis: </label>',
+            '<input type="text" name="X-axis-label" id="X-axis-label" value="<%- xLabel %>" />',
+            '<p class="form-help help-text-small"></p>',
+        '</div>',
+
+        '<div data-config-key="Y-axis-label" class="text-input form-input">',
+            '<label for="Y-axis-label">Re-label the Y axis: </label>',
+            '<input type="text" name="Y-axis-label" id="Y-axis-label" value="<%- yLabel %>" />',
+            '<p class="form-help help-text-small"></p>',
+        '</div>',
+
+        '<button class="render-button btn btn-primary active">Draw</button>',
+        ].join('')),
+
+    // mainLayout      : scatterplot.editor,
+    // dataControl     : scatterplot.datacontrol,
+    // chartControl    : scatterplot.chartcontrol
+};
+
+//==============================================================================
diff --git a/config/plugins/visualizations/scatterplot/src/scatterplot-display.js b/config/plugins/visualizations/scatterplot/src/scatterplot-display.js
new file mode 100644
index 0000000..1f5c2f9
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/src/scatterplot-display.js
@@ -0,0 +1,214 @@
+// =============================================================================
+/**
+ *  Scatterplot display control UI as a backbone view
+ *      handles:
+ *          fetching the data (if needed)
+ *          computing and displaying data stats
+ *          controls for pagination of data (if needed)
+ */
+var ScatterplotDisplay = Backbone.View.extend({
+
+    initialize : function( attributes ){
+        this.data = null,
+        this.dataset = attributes.dataset;
+        this.lineCount = this.dataset.metadata_data_lines || null;
+    },
+
+    fetchData : function(){
+        this.showLoadingIndicator();
+        //console.debug( 'currPage', this.config.pagination.currPage );
+        var view = this,
+            config = this.model.get( 'config' ),
+            //TODO: very tied to datasets - should be generalized eventually
+            baseUrl = ( window.parent && parent.galaxy_config )? parent.galaxy_config.root : '/',
+            xhr = jQuery.getJSON( baseUrl + 'api/datasets/' + this.dataset.id, {
+                data_type   : 'raw_data',
+                provider    : 'dataset-column',
+                limit       : config.pagination.perPage,
+                offset      : ( config.pagination.currPage * config.pagination.perPage )
+            });
+        xhr.done( function( data ){
+            // no need to hide loading indicator, line info will write over that
+            view.data = data.data;
+            view.trigger( 'data:fetched', view );
+            view.renderData();
+        });
+        xhr.fail( function( xhr, status, message ){
+            console.error( xhr, status, message );
+            view.trigger( 'data:error', view );
+            alert( 'Error loading data:\n' + xhr.responseText );
+        });
+        return xhr;
+    },
+
+    showLoadingIndicator : function(){
+        // display the loading indicator over the tab panels if hidden, update message (if passed)
+        this.$el.find( '.scatterplot-data-info' ).html([
+            '<div class="loading-indicator">',
+                '<span class="fa fa-spinner fa-spin"></span>',
+                '<span class="loading-indicator-message">loading...</span>',
+            '</div>'
+        ].join( '' ));
+    },
+
+    template : function(){
+        var html = [
+            '<div class="controls clear">',
+                '<div class="right">',
+                    '<p class="scatterplot-data-info"></p>',
+                    '<button class="stats-toggle-btn">Stats</button>',
+                    '<button class="rerender-btn">Redraw</button>',
+                '</div>',
+                '<div class="left">',
+                    '<div class="page-control"></div>',
+                '</div>',
+            '</div>',
+            '<svg/>', //TODO: id
+            '<div class="stats-display"></div>'
+        ].join( '' );
+        return html;
+    },
+
+    render : function(){
+        this.$el.addClass( 'scatterplot-display' ).html( this.template() );
+        if( this.data ){
+            this.renderData();
+        }
+        return this;
+    },
+
+    renderData : function(){
+        this.renderLeftControls();
+        this.renderRightControls();
+        this.renderPlot( this.data );
+        this.getStats();
+    },
+
+    renderLeftControls : function(){
+        var display = this,
+            config = this.model.get( 'config' );
+
+        this.$el.find( '.controls .left .page-control' ).pagination({
+            startingPage : config.pagination.currPage,
+            perPage      : config.pagination.perPage,
+            totalDataSize: this.lineCount,
+            currDataSize : this.data.length
+
+        //TODO: move to named function and remove only named
+        }).off().on( 'pagination.page-change', function( event, page ){
+            //console.debug( 'pagination:page-change', page );
+            config.pagination.currPage = page;
+            display.model.set( 'config', { pagination: config.pagination });
+            //console.debug( pagination, display.model.get( 'config' ).pagination );
+            display.resetZoom();
+            display.fetchData();
+        });
+        return this;
+    },
+
+    renderRightControls : function(){
+        var view = this;
+        this.setLineInfo( this.data );
+        // clear prev. handlers due to closure around data
+        this.$el.find( '.stats-toggle-btn' )
+            .off().click( function(){
+                view.toggleStats();
+            });
+        this.$el.find( '.rerender-btn' )
+            .off().click( function(){
+                view.resetZoom();
+                view.renderPlot( this.data );
+            });
+    },
+
+    /** render and show the d3 plot into the svg node of the view */
+    renderPlot : function(){
+        var view = this,
+            $svg = this.$el.find( 'svg' );
+        // turn off stats, clear previous svg, and make it visible
+        this.toggleStats( false );
+        $svg.off().empty().show()
+            // set up listeners for events from plot
+            .on( 'zoom.scatterplot', function( ev, zoom ){
+                //TODO: possibly throttle this
+                //console.debug( 'zoom.scatterplot', zoom.scale, zoom.translate );
+                view.model.set( 'config', zoom );
+            });
+        //TODO: may not be necessary to off/on this more than the initial on
+        // call the sep. d3 function to generate the plot
+        scatterplot( $svg.get( 0 ), this.model.get( 'config' ), this.data );
+    },
+
+    setLineInfo : function( data, contents ){
+        if( data ){
+            var config = this.model.get( 'config' ),
+                totalLines = this.lineCount || 'an unknown total',
+                lineStart  = config.pagination.currPage * config.pagination.perPage,
+                lineEnd    = lineStart + data.length;
+            this.$el.find( '.controls p.scatterplot-data-info' )
+               .text([ lineStart + 1, 'to', lineEnd, 'of', totalLines ].join( ' ' ));
+        } else {
+            this.$el.find( '.controls p.scatterplot-data-info' ).html( contents || '' );
+        }
+
+        return this;
+    },
+
+    resetZoom : function( scale, translate ){
+        scale = ( scale !== undefined )?( scale ):( 1 );
+        translate = ( translate !== undefined )?( translate ):( [ 0, 0 ] );
+        this.model.set( 'config', { scale: scale, translate: translate } );
+        return this;
+    },
+
+    // ------------------------------------------------------------------------ statistics display
+    /** create a webworker to calc stats for data given */
+    getStats : function(){
+        if( !this.data ){ return; }
+        var view = this,
+            config = this.model.get( 'config' ),
+            meanWorker = new Worker( '/plugins/visualizations/scatterplot/static/worker-stats.js' );
+        meanWorker.postMessage({
+            data    : this.data,
+            keys    : [ config.xColumn, config.yColumn ]
+        });
+        meanWorker.onerror = function( event ){
+            meanWorker.terminate();
+        };
+        meanWorker.onmessage = function( event ){
+            view.renderStats( event.data );
+        };
+    },
+
+    renderStats : function( stats, error ){
+        //console.debug( 'renderStats:', stats, error );
+        //console.debug( JSON.stringify( stats, null, '  ' ) );
+        var config = this.model.get( 'config' ),
+            $statsTable = this.$el.find( '.stats-display' ),
+            xLabel = config.xLabel, yLabel = config.yLabel,
+            $table = $( '<table/>' ).addClass( 'table' )
+                .append([ '<thead><th></th><th>', xLabel, '</th><th>', yLabel, '</th></thead>' ].join( '' ))
+                .append( _.map( stats, function( stat, key ){
+                    return $([ '<tr><td>', key, '</td><td>', stat[0], '</td><td>', stat[1], '</td></tr>' ].join( '' ));
+                }));
+        $statsTable.empty().append( $table );
+    },
+
+    toggleStats : function( showStats ){
+        var $statsDisplay = this.$el.find( '.stats-display' );
+        showStats = ( showStats === undefined )?( $statsDisplay.is( ':hidden' ) ):( showStats );
+        if( showStats ){
+            this.$el.find( 'svg' ).hide();
+            $statsDisplay.show();
+            this.$el.find( '.controls .stats-toggle-btn' ).text( 'Plot' );
+        } else {
+            $statsDisplay.hide();
+            this.$el.find( 'svg' ).show();
+            this.$el.find( '.controls .stats-toggle-btn' ).text( 'Stats' );
+        }
+    },
+
+    toString : function(){
+        return 'ScatterplotView()';
+    }
+});
diff --git a/config/plugins/visualizations/scatterplot/src/scatterplot-model.js b/config/plugins/visualizations/scatterplot/src/scatterplot-model.js
new file mode 100644
index 0000000..d5a294d
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/src/scatterplot-model.js
@@ -0,0 +1,39 @@
+var ScatterplotModel = Visualization.extend({
+
+    defaults : {
+        type    : 'scatterplot',
+
+        config  : {
+            // shouldn't be needed for properly saved splots - also often incorrect
+            //xColumn : 0,
+            //yColumn : 1,
+            
+            pagination  : {
+                currPage    : 0,
+                perPage     : 3000
+            },
+
+            // graph style
+            width   : 400,
+            height  : 400,
+
+            margin : {
+                top     : 16,
+                right   : 16,
+                bottom  : 40,
+                left    : 54
+            },
+
+            xTicks   : 10,
+            xLabel   : 'X',
+            yTicks   : 10,
+            yLabel   : 'Y',
+
+            datapointSize   : 4,
+            animDuration    : 500,
+
+            scale       : 1,
+            translate   : [ 0, 0 ]
+        }
+    }
+});
diff --git a/config/plugins/visualizations/scatterplot/src/scatterplot.js b/config/plugins/visualizations/scatterplot/src/scatterplot.js
new file mode 100644
index 0000000..78a9f0d
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/src/scatterplot.js
@@ -0,0 +1,287 @@
+// =============================================================================
+/**
+ *  Two Variable scatterplot visualization using d3
+ *      Uses semi transparent circles to show density of data in x, y grid
+ *      usage :
+ *          var plot = new scatterplot( $( 'svg' ).get(0), config, data )
+ */
+function scatterplot( renderTo, config, data ){
+    //console.log( 'scatterplot', config );
+
+    var translateStr = function( x, y ){
+            return 'translate(' + x + ',' + y + ')';
+        },
+        rotateStr = function( d, x, y ){
+            return 'rotate(' + d + ',' + x + ',' + y + ')';
+        },
+        getX = function( d, i ){
+            //console.debug( d[ config.xColumn ] );
+            return d[ config.xColumn ];
+        },
+        getY = function( d, i ){
+            //console.debug( d[ config.yColumn ] );
+            return d[ config.yColumn ];
+        };
+
+    // .................................................................... scales
+    var stats = {
+            x    : { extent: d3.extent( data, getX ) },
+            y    : { extent: d3.extent( data, getY ) }
+        };
+
+    //TODO: set pan/zoom limits
+    //  from http://stackoverflow.com/questions/10422738/limiting-domain-when-zooming-or-panning-in-d3-js
+    //self.x.domain([Math.max(self.x.domain()[0], self.options.xmin), Math.min(self.x.domain()[1], self.options.xmax)]);
+    //self.y.domain([Math.max(self.y.domain()[0], self.options.ymin), Math.min(self.y.domain()[1], self.options.ymax)]);
+    var interpolaterFns = {
+        x : d3.scale.linear()
+            .domain( stats.x.extent )
+            .range([ 0, config.width ]),
+        y : d3.scale.linear()
+            .domain( stats.y.extent )
+            .range([ config.height, 0 ])
+    };
+
+    // .................................................................... main components
+    var zoom = d3.behavior.zoom()
+        .x( interpolaterFns.x )
+        .y( interpolaterFns.y )
+        .scaleExtent([ 1, 30 ])
+        .scale( config.scale || 1 )
+        .translate( config.translate || [ 0, 0 ] );
+
+    //console.debug( renderTo );
+    var svg = d3.select( renderTo )
+        .attr( "class", "scatterplot" )
+        //.attr( "width",  config.width  + ( config.margin.right + config.margin.left ) )
+        .attr( "width",  '100%' )
+        .attr( "height", config.height + ( config.margin.top + config.margin.bottom ) );
+
+    var content = svg.append( "g" )
+        .attr( "class", "content" )
+        .attr( "transform", translateStr( config.margin.left, config.margin.top ) )
+        .call( zoom );
+
+    // a BIG gotcha - zoom (or any mouse/touch event in SVG?) requires the pointer to be over an object
+    //  create a transparent rect to be that object here
+    content.append( 'rect' )
+        .attr( "class", "zoom-rect" )
+        .attr( "width", config.width ).attr( "height", config.height )
+        .style( "fill", "transparent" );
+
+    //console.log( 'svg:', svg, 'content:', content );
+
+    // .................................................................... axes
+    var axis = { x : {}, y : {} };
+    //console.log( 'xTicks:', config.xTicks );
+    //console.log( 'yTicks:', config.yTicks );
+    axis.x.fn = d3.svg.axis()
+        .orient( 'bottom' )
+        .scale( interpolaterFns.x )
+        .ticks( config.xTicks )
+        // this will convert thousands -> k, millions -> M, etc.
+        .tickFormat( d3.format( 's' ) );
+
+    axis.y.fn = d3.svg.axis()
+        .orient( 'left' )
+        .scale( interpolaterFns.y )
+        .ticks( config.yTicks )
+        .tickFormat( d3.format( 's' ) );
+
+    axis.x.g = content.append( 'g' )
+        .attr( 'class', 'x axis' )
+        .attr( 'transform', translateStr( 0, config.height ) )
+        .call( axis.x.fn );
+    //console.log( 'axis.x.g:', axis.x.g );
+
+    axis.y.g = content.append( 'g' )
+        .attr( 'class', 'y axis' )
+        .call( axis.y.fn );
+    //console.log( 'axis.y.g:', axis.y.g );
+
+    // ................................ axis labels
+    var padding = 6;
+    // x-axis label
+    axis.x.label = svg.append( 'text' )
+        .attr( 'id', 'x-axis-label' )
+        .attr( 'class', 'axis-label' )
+        .text( config.xLabel )
+        // align to the top-middle
+        .attr( 'text-anchor', 'middle' )
+        .attr( 'dominant-baseline', 'text-after-edge' )
+        .attr( 'x', ( config.width / 2 ) + config.margin.left )
+        // place 4 pixels below the axis bounds
+        .attr( 'y', ( config.height + config.margin.bottom + config.margin.top ) - padding );
+    //console.log( 'axis.x.label:', axis.x.label );
+
+//TODO: anchor to left of x margin/graph
+    // y-axis label
+    // place 4 pixels left of the axis.y.g left edge
+    axis.y.label = svg.append( 'text' )
+        .attr( 'id', 'y-axis-label' )
+        .attr( 'class', 'axis-label' )
+        .text( config.yLabel )
+        // align to bottom-middle
+        .attr( 'text-anchor', 'middle' )
+        .attr( 'dominant-baseline', 'text-before-edge' )
+        .attr( 'x', padding )
+        .attr( 'y', config.height / 2 )
+        // rotate around the alignment point
+        .attr( 'transform', rotateStr( -90, padding, config.height / 2 ) );
+    //console.log( 'axis.y.label:', axis.y.label );
+
+    axis.redraw = function _redrawAxis(){
+        svg.select( ".x.axis" ).call( axis.x.fn );
+        svg.select( ".y.axis" ).call( axis.y.fn );
+    };
+
+    // .................................................................... grid
+    function renderGrid(){
+        var grid = { v : {}, h: {} };
+        // vertical
+        grid.v.lines = content.selectAll( 'line.v-grid-line' )
+            // data are the axis ticks; enter, update, exit
+            .data( interpolaterFns.x.ticks( axis.x.fn.ticks()[0] ) );
+        // enter: append any extra lines needed (more ticks)
+        grid.v.lines.enter()
+            .append( 'svg:line' )
+            .classed( 'grid-line v-grid-line', true );
+        // update: set coords
+        grid.v.lines
+            .attr( 'x1', interpolaterFns.x )
+            .attr( 'x2', interpolaterFns.x )
+            .attr( 'y1', 0 )
+            .attr( 'y2', config.height );
+        // exit: just remove them
+        grid.v.lines.exit().remove();
+        //console.log( 'grid.v.lines:', grid.v.lines );
+
+        // horizontal
+        grid.h.lines = content.selectAll( 'line.h-grid-line' )
+            .data( interpolaterFns.y.ticks( axis.y.fn.ticks()[0] ) );
+        grid.h.lines.enter()
+            .append( 'svg:line' )
+            .classed( 'grid-line h-grid-line', true );
+        grid.h.lines
+            .attr( 'x1', 0 )
+            .attr( 'x2', config.width )
+            .attr( 'y1', interpolaterFns.y )
+            .attr( 'y2', interpolaterFns.y );
+        grid.h.lines.exit().remove();
+        //console.log( 'grid.h.lines:', grid.h.lines );
+        return grid;
+    }
+    var grid = renderGrid();
+
+    //// .................................................................... datapoints
+    var datapoints = content.selectAll( '.glyph' ).data( data )
+        // enter - NEW data to be added as glyphs
+        .enter().append( 'svg:circle' )
+            .classed( "glyph", true )
+            .attr( "cx", function( d, i ){ return interpolaterFns.x( getX( d, i ) ); })
+            .attr( "cy", function( d, i ){ return interpolaterFns.y( getY( d, i ) ); })
+            .attr( "r",  0 );
+
+    // for all EXISTING glyphs and those that need to be added: transition anim to final state
+    datapoints.transition().duration( config.animDuration )
+        .attr( "r", config.datapointSize );
+    //console.log( 'datapoints:', datapoints );
+
+    function _redrawDatapointsClipped(){
+        return datapoints
+            //TODO: interpolates twice
+            .attr( "cx", function( d, i ){ return interpolaterFns.x( getX( d, i ) ); })
+            .attr( "cy", function( d, i ){ return interpolaterFns.y( getY( d, i ) ); })
+            .style( 'display', 'block' )
+            // filter out points now outside the graph content area and hide them
+            .filter( function( d, i ){
+                var cx = d3.select( this ).attr( "cx" ),
+                    cy = d3.select( this ).attr( "cy" );
+                if( cx < 0 || cx > config.width  ){ return true; }
+                if( cy < 0 || cy > config.height ){ return true; }
+                return false;
+            }).style( 'display', 'none' );
+    }
+    _redrawDatapointsClipped();
+
+    // .................................................................... behaviors
+    function zoomed( scale, translateX, translateY ){
+        //console.debug( 'zoom', this, zoom.scale(), zoom.translate() );
+
+        // re-render axis, grid, and datapoints
+        $( '.chart-info-box' ).remove();
+        axis.redraw();
+        _redrawDatapointsClipped();
+        grid = renderGrid();
+
+        $( svg.node() ).trigger( 'zoom.scatterplot', {
+            scale       : zoom.scale(),
+            translate   : zoom.translate()
+        });
+    }
+    //TODO: programmatically set zoom/pan and save in config
+    //TODO: set pan/zoom limits
+    zoom.on( "zoom", zoomed );
+
+    function infoBox( top, left, d ){
+        // create an abs pos. element containing datapoint data (d) near the point (top, left)
+        //  with added padding to clear the mouse pointer
+        left += 8;
+        return $([
+            '<div class="chart-info-box" style="position: absolute">',
+                (( config.idColumn !== undefined )?( '<div>' + d[ config.idColumn ] + '</div>' ):( '' )),
+                '<div>', getX( d ), '</div>',
+                '<div>', getY( d ), '</div>',
+            '</div>'
+        ].join( '' ) ).css({ top: top, left: left, 'z-index': 2 });
+    }
+
+    datapoints.on( 'mouseover', function( d, i ){
+        var datapoint = d3.select( this );
+        datapoint
+            .classed( 'highlight', true )
+            .style( 'fill', 'red' )
+            .style( 'fill-opacity', 1 );
+
+        // create horiz line to axis
+        content.append( 'line' )
+            .attr( 'stroke', 'red' )
+            .attr( 'stroke-width', 1 )
+            // start not at center, but at the edge of the circle - to prevent mouseover thrashing
+            .attr( 'x1', datapoint.attr( 'cx' ) - config.datapointSize )
+            .attr( 'y1', datapoint.attr( 'cy' ) )
+            .attr( 'x2', 0 )
+            .attr( 'y2', datapoint.attr( 'cy' ) )
+            .classed( 'hoverline', true );
+
+        // create vertical line to axis - if not on the x axis
+        if( datapoint.attr( 'cy' ) < config.height ){
+            content.append( 'line' )
+                .attr( 'stroke', 'red' )
+                .attr( 'stroke-width', 1 )
+                .attr( 'x1', datapoint.attr( 'cx' ) )
+                // attributes are strings so, (accrd. to js) '3' - 1 = 2 but '3' + 1 = '31': coerce
+                .attr( 'y1', +datapoint.attr( 'cy' ) + config.datapointSize )
+                .attr( 'x2', datapoint.attr( 'cx' ) )
+                .attr( 'y2', config.height )
+                .classed( 'hoverline', true );
+        }
+
+        // show the info box and trigger an event
+        var bbox = this.getBoundingClientRect();
+        $( 'body' ).append( infoBox( bbox.top, bbox.right, d ) );
+        $( svg.node() ).trigger( 'mouseover-datapoint.scatterplot', [ this, d, i ] );
+    });
+
+    datapoints.on( 'mouseout', function(){
+        // return the point to normal, remove hoverlines and info box
+        d3.select( this )
+            .classed( 'highlight', false )
+            .style( 'fill', 'black' )
+            .style( 'fill-opacity', 0.2 );
+        content.selectAll( '.hoverline' ).remove();
+        $( '.chart-info-box' ).remove();
+    });
+}
+
+//==============================================================================
diff --git a/config/plugins/visualizations/scatterplot/static/numeric-column-stats.js b/config/plugins/visualizations/scatterplot/static/numeric-column-stats.js
new file mode 100644
index 0000000..8610c34
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/static/numeric-column-stats.js
@@ -0,0 +1,89 @@
+var numericColumnStats = function( data, keys ){
+    var console = console || { debug: function(){} },
+        stats = {};
+
+    // precondition: keys is an array of keys accessible on each data in data
+    if( !keys || !keys.length ){
+        throw new Error( 'keys is a required parameter and must be an array:' + keys );
+    }
+    if( !data || !data.length ){
+        return stats;
+    }
+
+    function parseVal( val ){
+        if( val === null ){
+            throw new Error( 'Null value' );
+        }
+        val = Number( val );
+        if( isNaN( val ) || !isFinite( val ) ){
+            throw new Error( 'NaN or non-finite number' );
+        }
+        return val;
+    }
+
+    // does this effect the data in the other thread?
+    [ 'min', 'max', 'sum', 'mean', 'median', 'count' ].forEach( function( name, i ){
+        stats[ name ] = new Array( keys.length );
+    });
+
+    var keyIndex = 0,
+        separatedCols = new Array( keys.length );
+    keys.forEach( function( key, keyIndex ){
+        stats.min[ keyIndex ] = 0;
+        stats.max[ keyIndex ] = 0;
+        stats.sum[ keyIndex ] = 0;
+        separatedCols[ keyIndex ] = [];
+    });
+
+    // work backwards to prevent co-modification problems while splitting up data into columns
+    for( var dataIndex=( data.length - 1 ); dataIndex>=0; dataIndex-=1 ){
+        var datum = data.pop( dataIndex );
+
+        for( keyIndex=0; keyIndex<keys.length; keyIndex+=1 ){
+            var key = keys[ keyIndex ],
+                datumColVal = datum[ key ];
+
+            try {
+                //NOTE: parsing value as number
+                datumColVal = parseVal( datumColVal );
+            } catch( e ){
+                continue;
+            }
+
+            // separate the columns
+            separatedCols[ keyIndex ].unshift( datumColVal );
+            // get the other stats
+            stats.min[ keyIndex ] = Math.min( stats.min[ keyIndex ], datumColVal );
+            stats.max[ keyIndex ] = Math.max( stats.max[ keyIndex ], datumColVal );
+            stats.sum[ keyIndex ] += datumColVal;
+        }
+    }
+
+    // get counts, mean, median
+    function comparator( a, b ){
+        if( a < b ){ return -1; }
+        if( a < b ){ return 1; }
+        return 0;
+    }
+
+    for( keyIndex=0; keyIndex<keys.length; keyIndex+=1 ){
+        var count = separatedCols[ keyIndex ].length,
+            sum = stats.sum[ keyIndex ];
+        stats.count[ keyIndex ] = count;
+        stats.mean[ keyIndex ] = ( sum / count );
+
+        // sort columns for median
+        separatedCols[ keyIndex ].sort( comparator );
+
+        // odd count -> straight forward median
+        var middleDataIndex = Math.floor( count / 2 );
+        if( count % 2 === 1 ){
+            stats.median[ keyIndex ] = separatedCols[ keyIndex ][ middleDataIndex ];
+        } else {
+            var middleValA = separatedCols[ keyIndex ][ middleDataIndex ],
+                middleValB = separatedCols[ keyIndex ][( middleDataIndex + 1 )];
+            stats.median[ keyIndex ] = ( middleValA + middleValB ) / 2;
+        }
+    }
+    return stats;
+};
diff --git a/config/plugins/visualizations/scatterplot/static/scatterplot-edit.js b/config/plugins/visualizations/scatterplot/static/scatterplot-edit.js
new file mode 100644
index 0000000..476d23f
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/static/scatterplot-edit.js
@@ -0,0 +1 @@
+function scatterplot(a,b,c){function d(){var a={v:{},h:{}};return a.v.lines=p.selectAll("line.v-grid-line").data(m.x.ticks(q.x.fn.ticks()[0])),a.v.lines.enter().append("svg:line").classed("grid-line v-grid-line",!0),a.v.lines.attr("x1",m.x).attr("x2",m.x).attr("y1",0).attr("y2",b.height),a.v.lines.exit().remove(),a.h.lines=p.selectAll("line.h-grid-line").data(m.y.ticks(q.y.fn.ticks()[0])),a.h.lines.enter().append("svg:line").classed("grid-line h-grid-line",!0),a.h.lines.attr("x1",0).attr [...]
\ No newline at end of file
diff --git a/config/plugins/visualizations/scatterplot/static/scatterplot.css b/config/plugins/visualizations/scatterplot/static/scatterplot.css
new file mode 100644
index 0000000..670156b
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/static/scatterplot.css
@@ -0,0 +1,225 @@
+* { margin: 0; padding: 0; }
+
+/* -------------------------------------------- header */
+.chart-header {
+    margin-bottom: 16px;
+    overflow: auto;
+    background-color: #ebd9b2;
+    padding : 12px 12px 4px 12px;
+}
+
+.chart-header h2 {
+    margin-top: 0px;
+    margin-bottom: 4px;
+}
+
+.chart-header h2:hover {
+    text-decoration: underline;
+    cursor: pointer;
+}
+
+.chart-header p {
+    color: #555;
+    font-size: small;
+}
+
+figcaption {
+    margin-bottom: 8px;
+    font-size: 120%;
+}
+figcaption .title {
+    display: block;
+    font-weight: bold;
+}
+figcaption .title-info {
+    display: block;
+    font-style: italic;
+}
+
+/* -------------------------------------------- main layout */
+.scatterplot-editor > * { margin: 0; padding: 0; }
+.scatterplot-editor .tab-pane {
+    padding-top: 8px;
+}
+
+.scatterplot-editor .help-text,
+.scatterplot-editor .help-text-small {
+    color: grey;
+    margin-bottom: 8px;
+}
+
+.scatterplot-editor .help-text {
+    margin-top: 4px;
+}
+
+.scatterplot-editor .help-text-small {
+    font-size: smaller;
+}
+
+.scatterplot-editor .file-controls {
+    float: right;
+    /*margin-right: 16px;*/
+}
+
+.file-controls .btn {
+    height: 24px;
+    padding: 0px 10px 0px 10px;
+    line-height: 20px;
+}
+
+.file-controls .copy-btn {
+    display: none;
+}
+
+
+/* ============================================ config controls */
+.scatterplot-config-control {
+    max-width: 768px;
+}
+
+.scatterplot-config-control input[type="button"],
+.scatterplot-config-control button,
+.scatterplot-config-control select {
+    width: 100%;
+    max-width: 256px;
+    margin-bottom: 8px;
+}
+.scatterplot-config-control input[type="text"] {
+    width: 100%;
+    max-width: 512px;
+    border: 1px solid lightgrey;
+}
+
+.scatterplot-config-control input[type="checkbox"] {
+    display: inline-block;
+    margin-top: -3px;
+}
+
+.scatterplot-config-control .render-button {
+    display: block;
+    margin-top: 16px;
+}
+
+.scatterplot-config-control .form-input {
+    margin-bottom: 8px;
+}
+
+.scatterplot-config-control .slider {
+    height: 8px;
+    display: block;
+    margin-bottom: 8px;
+}
+
+.scatterplot-config-control .slider-output {
+    float: right;
+}
+
+/* ============================================ plot display */
+/* -------------------------------------------- load indicators */
+.scatterplot-display .loading-indicator {
+    color: grey;
+}
+
+.scatterplot-display .loading-indicator .loading-indicator-message {
+    display: inline-block;
+    margin-left: 4px;
+    font-style: italic;
+}
+
+/* -------------------------------------------- display controls */
+.scatterplot-display .controls {
+    width: 100%;
+    margin-bottom: 4px;
+    border: 1px solid lightgrey;
+    border-radius: 3px;
+    padding: 8px 8px 4px 8px;
+    overflow: auto;
+}
+
+.scatterplot-display .controls button {
+    display: inline-block;
+    min-width: 0;
+    width: auto;
+    margin: 0px;
+}
+
+.scatterplot-display .pagination-scroll-container {
+    max-width: 440px;
+    display: inline-block;
+}
+
+.scatterplot-display .controls .left {
+    /*no shrinkwrap, but collapses well (with scroll)*/
+    /*display: inline;*/
+    /*will shrinkwrap*/
+    display: inline-block;
+    /*display: table-cell;*/
+}
+
+.scatterplot-display .controls .right {
+    float: right;
+    margin-bottom: 4px;
+}
+
+.scatterplot-display .controls p {
+    display: inline-block;
+    margin: 0px 4px 0px 4px;
+    /*to better match the buttons*/
+    vertical-align: middle;
+    line-height: 24px;
+}
+
+.scatterplot-display button.rerender-btn {
+    margin-left: 4px;
+}
+
+.scatterplot-display button.stats-toggle-btn {
+    margin-left: 4px;
+}
+
+
+/* -------------------------------------------- chart area */
+svg.scatterplot {
+    border: 1px solid lightgrey;
+    border-radius: 3px;
+    /* doesn't work */
+    overflow: auto;
+}
+
+svg.scatterplot .grid-line {
+    fill: none;
+    stroke: lightgrey;
+    stroke-opacity: 0.2;
+    shape-rendering: crispEdges;
+}
+
+svg.scatterplot .axis path,
+svg.scatterplot .axis line {
+    fill: none;
+    stroke: black;
+    shape-rendering: crispEdges;
+}
+
+svg.scatterplot .axis text {
+    font-size: 9px;
+}
+
+svg.scatterplot .axis-label {
+    font-family: sans-serif;
+    font-size: 13px;
+}
+
+svg.scatterplot .glyph {
+    stroke: none;
+    fill: black;
+    fill-opacity: 0.2;
+}
+
+/* -------------------------------------------- info box */
+.chart-info-box {
+    border-radius: 4px;
+    padding: 4px;
+    background-color: white;
+    border: 1px solid black;
+}
+
diff --git a/config/plugins/visualizations/scatterplot/static/worker-stats.js b/config/plugins/visualizations/scatterplot/static/worker-stats.js
new file mode 100644
index 0000000..3153746
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/static/worker-stats.js
@@ -0,0 +1,5 @@
+onmessage = function( event ){
+    importScripts( 'numeric-column-stats.js' );
+    postMessage( numericColumnStats( event.data.data, event.data.keys ) );
+    self.close();
+};
diff --git a/config/plugins/visualizations/scatterplot/templates/scatterplot.mako b/config/plugins/visualizations/scatterplot/templates/scatterplot.mako
new file mode 100644
index 0000000..036ffc1
--- /dev/null
+++ b/config/plugins/visualizations/scatterplot/templates/scatterplot.mako
@@ -0,0 +1,113 @@
+<%
+    default_title = "Scatterplot of '" + hda.name + "'"
+    info = hda.name
+    if hda.info:
+        info += ' : ' + hda.info
+
+    # optionally bootstrap data from dprov
+    ##data = list( hda.datatype.dataset_column_dataprovider( hda, limit=10000 ) )
+
+    # Use root for resource loading.
+    root = h.url_for( '/' )
+%>
+## ----------------------------------------------------------------------------
+
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<title>${title or default_title} | ${visualization_display_name}</title>
+
+## ----------------------------------------------------------------------------
+${h.css( 'base', 'jquery-ui/smoothness/jquery-ui')}
+${h.stylesheet_link( root + 'plugins/visualizations/scatterplot/static/scatterplot.css' )}
+
+## ----------------------------------------------------------------------------
+<script type="text/javascript">
+// TODO: blah
+window.Galaxy = { root: '${ root }' };
+</script>
+${h.js( 'libs/jquery/jquery',
+        'libs/jquery/jquery.migrate',
+        'libs/jquery/jquery-ui',
+        'libs/bootstrap',
+        'libs/underscore',
+        'libs/backbone',
+        'libs/d3',
+        'ui/peek-column-selector',
+        'ui/pagination',
+        'mvc/visualization/visualization-model' )}
+
+${h.javascript_link( root + 'plugins/visualizations/scatterplot/static/scatterplot-edit.js' )}
+
+<script type="text/javascript">
+function getModel(){
+    return new ScatterplotModel({
+        id      : ${h.dumps( visualization_id )} || undefined,
+        title   : "${title or default_title}",
+        config  : ${h.dumps( config, indent=2 )}
+    });
+}
+function getHDAJSON(){
+    return ${h.dumps( trans.security.encode_dict_ids( hda.to_dict() ), indent=2 )};
+}
+</script>
+
+</head>
+
+## ----------------------------------------------------------------------------
+<body>
+    %if embedded and saved_visualization:
+        <figcaption>
+            <span class="title">${title}</span>
+            <span class="title-info">${info}</span>
+        </figcaption>
+        <figure class="scatterplot-display"></figure>
+
+        <script type="text/javascript">
+        $(function(){
+            var display = new ScatterplotDisplay({
+                    el      : $( '.scatterplot-display' ).attr( 'id', 'scatterplot-display-' + '${visualization_id}' ),
+                    model   : getModel(),
+                    dataset : getHDAJSON(),
+                    embedded: "${embedded}"
+                }).render();
+            display.fetchData();
+            //window.model = model;
+            //window.display = display;
+        });
+        </script>
+
+    %else:
+        <div class="chart-header">
+            <h2>${title or default_title}</h2>
+            <p>${info}</p>
+        </div>
+
+        <div class="scatterplot-editor"></div>
+        <script type="text/javascript">
+        $(function(){
+            var model = getModel(),
+                hdaJSON = getHDAJSON(),
+                editor  = new ScatterplotConfigEditor({
+                    el      : $( '.scatterplot-editor' ).attr( 'id', 'scatterplot-editor-hda-' + hdaJSON.id ),
+                    model   : model,
+                    dataset : hdaJSON
+                }).render();
+            window.editor = editor;
+
+            $( '.chart-header h2' ).click( function(){
+                var returned = prompt( 'Enter a new title:' );
+                if( returned ){
+                    model.set( 'title', returned );
+                }
+            });
+            model.on( 'change:title', function(){
+                $( '.chart-header h2' ).text( model.get( 'title' ) );
+                document.title = model.get( 'title' ) + ' | ' + '${visualization_display_name}';
+            })
+        });
+        </script>
+    %endif
+
+</body>
diff --git a/config/plugins/visualizations/sweepster/config/sweepster.xml b/config/plugins/visualizations/sweepster/config/sweepster.xml
new file mode 100644
index 0000000..647e2bc
--- /dev/null
+++ b/config/plugins/visualizations/sweepster/config/sweepster.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Sweepster" disabled="true">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+            <to_param param_attr="id">dataset_id</to_param>
+            <to_param assign="hda">hda_ldda</to_param>
+        </data_source>
+        <data_source>
+            <model_class>LibraryDatasetDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">data.Data</test>
+            <to_param param_attr="id">dataset_id</to_param>
+            <to_param assign="ldda">hda_ldda</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="visualization" var_name_in_template="viz">visualization</param>
+        <param type="hda_or_ldda" var_name_in_template="dataset">dataset_id</param>
+        <param_modifier type="string" modifies="dataset_id">hda_ldda</param_modifier>
+    </params>
+    <entry_point entry_point_type="mako">sweepster.mako</entry_point>
+    <render_target>_top</render_target>
+</visualization>
diff --git a/config/plugins/visualizations/trackster/config/trackster.xml b/config/plugins/visualizations/trackster/config/trackster.xml
new file mode 100644
index 0000000..2d63a12
--- /dev/null
+++ b/config/plugins/visualizations/trackster/config/trackster.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Trackster">
+    <!--not tested yet -->
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="not_eq" test_attr="datatype.track_type">None</test>
+            <to_param param_attr="id">dataset_id</to_param>
+            <to_param assign="hda">hda_ldda</to_param>
+            <to_param param_attr="dbkey">dbkey</to_param>
+        </data_source>
+        <data_source>
+            <model_class>LibraryDatasetDatasetAssociation</model_class>
+            <test type="not_eq" test_attr="datatype.track_type">None</test>
+            <to_param param_attr="id">dataset_id</to_param>
+            <to_param assign="ldda">hda_ldda</to_param>
+            <to_param param_attr="dbkey">dbkey</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="visualization">id</param>
+        <param type="dataset">dataset_id</param>
+        <param type="genome_region">genome_region</param>
+        <param type="dbkey">dbkey</param>
+    </params>
+    <entry_point entry_point_type="mako">browser.mako</entry_point>
+    <render_target>_top</render_target>
+</visualization>
diff --git a/config/plugins/visualizations/visualization.dtd b/config/plugins/visualizations/visualization.dtd
new file mode 100644
index 0000000..e01aa7c
--- /dev/null
+++ b/config/plugins/visualizations/visualization.dtd
@@ -0,0 +1,163 @@
+<!-- each visualization must have a template (all other elements are optional) -->
+<!ELEMENT visualization (
+  description*,
+  data_sources*,
+  params*,
+  template_root*,
+  entry_point,
+  render_target*
+)>
+<!-- visualization
+        name: the title/display name of the visualization (e.g. 'Trackster', 'Fastq Stats', etc.) REQUIRED
+        disabled: if included (value does not matter), this attribute will prevent the visualization being loaded
+        embeddable: if included (value does not matter), indicates that this visualization can be rendered as a DOM
+                fragment and won't render to a full page when passed the variable 'embedded' in the query string.
+                DEFAULT false.
+-->
+<!ATTLIST visualization
+    name        CDATA #REQUIRED
+    disabled    CDATA #IMPLIED
+    embeddable  CDATA #IMPLIED
+>
+
+<!ELEMENT interactive_environment (
+  description*,
+  data_sources*,
+  params*,
+  template_root*,
+  entry_point,
+  render_target*
+)>
+<!ATTLIST interactive_environment
+    name        CDATA #REQUIRED
+    disabled    CDATA #IMPLIED
+    embeddable  CDATA #IMPLIED
+>
+
+
+<!ELEMENT description (#PCDATA)>
+<!-- a text description of what the visualization does -->
+
+<!ELEMENT data_sources (data_source*)>
+<!-- data sources are elements that describe what objects (HDAs, LDDAs, Job, User, etc.)
+        are applicable to a visualization. Often these are used to fetch applicable links
+        to the visualizations that use them.
+-->
+  <!ELEMENT data_source (model_class,(test|to_param)*)>
+    <!ELEMENT model_class (#PCDATA)>
+    <!-- model_class is currently the class name of the object you want to make a visualization
+          applicable to (e.g. HistoryDatasetAssociation). Currently only classes in galaxy.model
+          can be used.
+          REQUIRED and currently limited to: 'HistoryDatasetAssociation', 'LibraryDatasetDatasetAssociation'
+    -->
+    <!ELEMENT test (#PCDATA)>
+    <!-- tests help define what conditions the visualization can be applied to the model_class/target.
+          Currently, all tests are OR'd and there is no logical grouping. Tests are run in order.
+        (text): the text of this element is what the given target will be compared to (REQUIRED)
+        type: what type of test to run (e.g. when the target is an HDA the test will often be of type 'isinstance'
+                and test whether the HDA's datatype isinstace of a class).
+                See lib/galaxy/visualizations/registry.py, DataSourceParser.parse_tests for test type options.
+                DEFAULT: string comparison.
+        test_attr: what attribute of the target object should be used in the test. For instance, 'datatype'
+                will attempt to get the HDA.datatype from a target HDA. If the given object doesn't have
+                that attribute the test will fail (with no error). test_attr can be dot separated attributes,
+                looking up each in turn. For example, if the target was a history, one could access the
+                history.user.email by setting test_attr to 'user.email' (why you would want that, I don't know)
+                DEFAULT: to comparing the object itself (and not any of it's attributes)
+        result_type: if the result (the text of the element mentioned above) needs to be parsed into
+                something other than a string, result_type will tell the registry how to do this. E.g.
+                if result_type is 'datatype' the registry will assume the text is a datatype class name
+                and parse it into the proper class before the test (often 'isinstance') is run.
+                DEFAULT: no parsing (result should be a string)
+    -->
+    <!ATTLIST test
+        type        CDATA #IMPLIED
+        test_attr   CDATA #IMPLIED
+        result_type CDATA #IMPLIED
+    >
+
+    <!ELEMENT to_param (#PCDATA)>
+    <!-- to_param tells the registry how to parse the data_source into a query string param.
+          For example, HDA data_sources can set param_to text to 'dataset_id' and param_attr to 'id' and the
+            the target HDA (if it passes the tests) will be passed as "dataset_id=HDA.id"
+        (text): the query string param key this source will be parsed into (e.g. dataset_id)
+                REQUIRED
+        param_attr: the attribute of the data_source object to use as the value in the query string param.
+                E.g. param_attr='id' for an HDA data_source would use the (encoded) id.
+                NOTE: a to_param MUST have either a param_attr or assign
+        assign: you can use this to directly assign a value to a query string's param. E.g. if the
+                data_source is a LDDA we can set 'hda_or_ldda=ldda' using assign='ldda'.
+                NOTE: a to_param MUST have either a param_attr or assign
+    -->
+    <!ATTLIST to_param
+        param_attr  CDATA #IMPLIED
+        assign      CDATA #IMPLIED
+    >
+
+<!ELEMENT params ((param|param_modifier)*)>
+<!-- params describe what data will be sent to a visualization template and
+      how to convert them from a query string in a URL into variables usable in a template.
+      For example,
+     param_modifiers are a special class of parameters that modify other params
+      (e.g. hda_ldda can be 'hda' or 'ldda' and modifies/informs dataset_id to fetch an HDA or LDDA)
+-->
+  <!ELEMENT param (#PCDATA)>
+  <!-- param tells the registry how to parse the query string param back into a resource/data_source.
+        For example, if a query string has "dataset_id=NNN" and the type is 'dataset', the registry
+        will attempt to fetch the hda with id of NNN from the database and pass it to the template.
+      (text): the query string param key this source will be parsed from (e.g. dataset_id)
+              REQUIRED
+      type: the type of the resource.
+              Can be: str (DEFAULT), bool, int, float, json, visualization, dbkey, dataset, or hda_ldda.
+      default: if a param is not passed on the query string (and is not required) OR the given param
+              fails to parse, this value is used instead.
+              DEFAULT: None
+      required: set this to true if the param is required for the template. Rendering will with an error
+              if the param hasn't been sent.
+              DEFAULT: false
+      csv: set this to true if the param is a comma separated list. The registry will attempt to
+              parse each value as the given type and send the result as a list to the template.
+              DEFAULT: false
+      constrain_to: (currently unused) constain a param to a set of values, error if not valid.
+              DEFAULT: don't constrain
+      var_name_in_template: a new name for the resource/variable to use in the template. E.g. an initial
+              query string param key might be 'dataset_id' in the URL, the registry parses it into an HDA,
+              and if var_name_in_template is set to 'hda', the template will be able to access the HDA
+              with the variable name 'hda' (as in hda.title).
+              DEFAULT: keep the original query string name
+  -->
+  <!ATTLIST param
+      type        CDATA #IMPLIED
+      default     CDATA #IMPLIED
+      required    CDATA #IMPLIED
+      csv         CDATA #IMPLIED
+      constrain_to          CDATA #IMPLIED
+      var_name_in_template  CDATA #IMPLIED
+  >
+  <!-- param_modifiers are the same as param but have a REQUIRED 'modifies' attribute.
+        'modifies' must point to the param name (the text part of param element) that it will modify.
+        E.g. <param_modifier modifies="dataset_id">hda_ldda</param_modifier>
+  -->
+  <!ELEMENT param_modifier (#PCDATA)>
+  <!ATTLIST param_modifier
+      modifies    CDATA #REQUIRED
+      type        CDATA #IMPLIED
+      default     CDATA #IMPLIED
+      required    CDATA #IMPLIED
+      csv         CDATA #IMPLIED
+      constrain_to          CDATA #IMPLIED
+      var_name_in_template  CDATA #IMPLIED
+  >
+
+<!-- template: the template used to render the visualization. DEPRECATED -->
+<!-- <!ELEMENT template (#PCDATA)> -->
+<!-- entry_point: the method the registry will use to begin rendering the visualization - a loading point. REQUIRED -->
+<!ELEMENT entry_point (#PCDATA)>
+<!ATTLIST entry_point
+    entry_point_type  CDATA #REQUIRED
+>
+<!-- render_target: used as the target attribute of the link to the visualization.
+      Can be 'galaxy_main', '_top', '_blank'. DEFAULT: 'galaxy_main'
+-->
+<!-- TODO: rename -> render_target -->
+<!ELEMENT render_target (#PCDATA)>
diff --git a/config/plugins/webhooks/demo/history_test1/config/history_test1.yml b/config/plugins/webhooks/demo/history_test1/config/history_test1.yml
new file mode 100644
index 0000000..46d362d
--- /dev/null
+++ b/config/plugins/webhooks/demo/history_test1/config/history_test1.yml
@@ -0,0 +1,5 @@
+name: history_test1
+title: History Menu Webhook Item 1
+type: 
+  - history-menu
+activate: true
diff --git a/config/plugins/webhooks/demo/history_test2/config/history_test2.yml b/config/plugins/webhooks/demo/history_test2/config/history_test2.yml
new file mode 100644
index 0000000..7b071e4
--- /dev/null
+++ b/config/plugins/webhooks/demo/history_test2/config/history_test2.yml
@@ -0,0 +1,5 @@
+name: history_test2
+title: History Menu Webhook Item 2
+type: 
+  - history-menu
+activate: true
diff --git a/config/plugins/webhooks/demo/masthead_test/config/masthead_test.yml b/config/plugins/webhooks/demo/masthead_test/config/masthead_test.yml
new file mode 100644
index 0000000..935da0e
--- /dev/null
+++ b/config/plugins/webhooks/demo/masthead_test/config/masthead_test.yml
@@ -0,0 +1,10 @@
+name: masthead_test
+type: 
+  - masthead
+activate: true
+
+icon: fa-star-half-o
+tooltip: Biostars Galaxy
+
+function: >
+  window.open('https://biostar.usegalaxy.org', '_blank');
diff --git a/config/plugins/webhooks/demo/phdcomics/config/phdcomics.yaml b/config/plugins/webhooks/demo/phdcomics/config/phdcomics.yaml
new file mode 100644
index 0000000..f66a29a
--- /dev/null
+++ b/config/plugins/webhooks/demo/phdcomics/config/phdcomics.yaml
@@ -0,0 +1,5 @@
+name: phdcomics
+type:
+  - tool
+  - workflow
+activate: true
diff --git a/config/plugins/webhooks/demo/phdcomics/helper/__init__.py b/config/plugins/webhooks/demo/phdcomics/helper/__init__.py
new file mode 100644
index 0000000..94073b9
--- /dev/null
+++ b/config/plugins/webhooks/demo/phdcomics/helper/__init__.py
@@ -0,0 +1,43 @@
+import urllib
+import re
+import random
+import logging
+
+log = logging.getLogger(__name__)
+
+
+def main(trans, webhook):
+    error = ''
+    data = {}
+
+    try:
+        # Third-party dependencies
+        try:
+            from bs4 import BeautifulSoup
+        except ImportError as e:
+            log.exception(e)
+            return {}
+
+        # Get latest id
+        if 'latest_id' not in webhook.config.keys():
+            url = 'http://phdcomics.com/gradfeed.php'
+            content = urllib.urlopen(url).read()
+            soap = BeautifulSoup(content, 'html.parser')
+            pattern = '(?:http://www\.phdcomics\.com/comics\.php\?f=)(\d+)'
+            webhook.config['latest_id'] = max([
+                int(re.search(pattern, link.text).group(1))
+                for link in soap.find_all('link', text=re.compile(pattern))
+            ])
+
+        random_id = random.randint(1, webhook.config['latest_id'])
+        url = 'http://www.phdcomics.com/comics/archive.php?comicid=%d' % \
+            random_id
+        content = urllib.urlopen(url).read()
+        soap = BeautifulSoup(content, 'html.parser')
+        comics_src = soap.find_all('img', id='comic')[0].attrs.get('src')
+        data = {'src': comics_src}
+
+    except Exception as e:
+        error = str(e)
+
+    return {'success': not error, 'error': error, 'data': data}
diff --git a/config/plugins/webhooks/demo/phdcomics/static/script.js b/config/plugins/webhooks/demo/phdcomics/static/script.js
new file mode 100644
index 0000000..f8c44e5
--- /dev/null
+++ b/config/plugins/webhooks/demo/phdcomics/static/script.js
@@ -0,0 +1,55 @@
+$(document).ready(function() {
+
+    var galaxyRoot = typeof Galaxy != 'undefined' ? Galaxy.root : '/';
+
+    var PHDComicsAppView = Backbone.View.extend({
+        el: '#phdcomics',
+
+        appTemplate: _.template(
+            '<div id="phdcomics-header">' +
+                '<div id="phdcomics-name">PHD Comics</div>' +
+                '<button id="phdcomics-random">Random</button>' +
+            '</div>' +
+            '<div id="phdcomics-img"></div>'
+        ),
+
+        imgTemplate: _.template('<img src="<%= src %>"">'),
+
+        events: {
+            'click #phdcomics-random': 'getRandomPHDComics'
+        },
+
+        initialize: function() {
+            this.render();
+        },
+
+        render: function() {
+            this.$el.html(this.appTemplate());
+            this.phdComicsImg = this.$('#phdcomics-img');
+            this.getRandomPHDComics();
+            return this;
+        },
+
+        getRandomPHDComics: function() {
+            var me = this,
+                url = galaxyRoot + 'api/webhooks/phdcomics/get_data';
+
+            this.phdComicsImg.html($('<div/>', {id: 'phdcomics-loader'}));
+            $.getJSON(url, function(data) {
+                if (data.success) {
+                    me.phdComics = {src: data.data.src};
+                    me.renderImg();
+                } else {
+                    console.log('[ERROR] "' + url + '":\n' + data.error);
+                }
+            });
+        },
+
+        renderImg: function() {
+            this.phdComicsImg.html(this.imgTemplate({src: this.phdComics.src}));
+        }
+    });
+
+    var PHDComicsApp = new PHDComicsAppView;
+
+});
diff --git a/config/plugins/webhooks/demo/phdcomics/static/styles.css b/config/plugins/webhooks/demo/phdcomics/static/styles.css
new file mode 100644
index 0000000..6b52e57
--- /dev/null
+++ b/config/plugins/webhooks/demo/phdcomics/static/styles.css
@@ -0,0 +1,66 @@
+#phdcomics {
+    border: 1px solid #52697d;
+    text-align: center;
+    border-radius: 3px;
+    overflow: hidden;
+}
+
+#phdcomics-header {
+    background: #52697d;
+    border-bottom: 1px solid #52697d;
+    padding: 15px 0;
+}
+
+#phdcomics-name {
+    color: #fff;
+    padding-bottom: 10px;
+}
+
+#phdcomics-header button {
+    color: #fff;
+    font-size: 14px;
+    background-color: #768fa5;
+    border: none;
+    border-radius: 7px;
+    box-shadow: 0 5px #5c768c;
+    padding: 5px 10px;
+}
+
+#phdcomics-header button:focus {
+    outline: 0;
+}
+
+#phdcomics-header button:hover {
+    background-color: #67839b;
+}
+
+#phdcomics-header button:active {
+    background-color: #67839b;
+    box-shadow: 0 0 #5c768c;
+    transform: translateY(5px);
+}
+
+#phdcomics-img {
+    background: #fff;
+}
+
+#phdcomics-img img {
+    padding: 10px;
+    max-width: 100%;
+    margin-bottom: -4px;
+}
+
+#phdcomics-loader {
+    border: 5px solid #f3f3f3;
+    border-top: 5px solid #52697d;
+    border-radius: 50%;
+    width: 25px;
+    height: 25px;
+    animation: spin 1.5s linear infinite;
+    margin: 15px auto;
+}
+
+ at keyframes spin {
+    0% { transform: rotate(0deg); }
+    100% { transform: rotate(360deg); }
+}
diff --git a/config/plugins/webhooks/demo/trans_object/config/trans_object.yaml b/config/plugins/webhooks/demo/trans_object/config/trans_object.yaml
new file mode 100644
index 0000000..ec8575b
--- /dev/null
+++ b/config/plugins/webhooks/demo/trans_object/config/trans_object.yaml
@@ -0,0 +1,12 @@
+name: trans_object
+type: 
+  - masthead
+activate: true
+
+icon: fa-user
+tooltip: Show Username
+
+function: >
+  $.getJSON("/api/webhooks/trans_object/get_data", function(data) {
+    alert('Username: ' + data.username);
+  });
diff --git a/config/plugins/webhooks/demo/trans_object/helper/__init__.py b/config/plugins/webhooks/demo/trans_object/helper/__init__.py
new file mode 100644
index 0000000..cbc8528
--- /dev/null
+++ b/config/plugins/webhooks/demo/trans_object/helper/__init__.py
@@ -0,0 +1,6 @@
+def main(trans, webhook):
+    if trans.user:
+        user = trans.user.username
+    else:
+        user = 'No user is logged in.'
+    return {'username': user}
diff --git a/config/plugins/webhooks/demo/xkcd/config/xkcd.yml b/config/plugins/webhooks/demo/xkcd/config/xkcd.yml
new file mode 100644
index 0000000..c8ce604
--- /dev/null
+++ b/config/plugins/webhooks/demo/xkcd/config/xkcd.yml
@@ -0,0 +1,5 @@
+name: xkcd
+type:
+  - tool
+  - workflow
+activate: true
diff --git a/config/plugins/webhooks/demo/xkcd/static/script.js b/config/plugins/webhooks/demo/xkcd/static/script.js
new file mode 100644
index 0000000..3621397
--- /dev/null
+++ b/config/plugins/webhooks/demo/xkcd/static/script.js
@@ -0,0 +1,56 @@
+$(document).ready(function() {
+
+    var XkcdAppView = Backbone.View.extend({
+        el: '#xkcd',
+
+        appTemplate: _.template(
+            '<div id="xkcd-header">' +
+                '<div id="xkcd-name">xkcd</div>' +
+                '<button id="xkcd-random">Random</button>' +
+            '</div>' +
+            '<div id="xkcd-img"></div>'
+        ),
+
+        imgTemplate: _.template('<img src="<%= img %>" alt="<%= alt %>" title="<%= title %>">'),
+
+        events: {
+            'click #xkcd-random': 'getRandomXkcd'
+        },
+
+        initialize: function() {
+            var me = this;
+
+            this.render();
+
+            // Get id of the last xkcd
+            $.getJSON('http://dynamic.xkcd.com/api-0/jsonp/comic?callback=?', function(data) {
+                me.latestXkcdId = data.num;
+                me.getRandomXkcd();
+            });
+        },
+
+        render: function() {
+            this.$el.html(this.appTemplate());
+            this.xkcdImg = this.$('#xkcd-img');
+            return this;
+        },
+
+        getRandomXkcd: function() {
+            var me = this,
+                randomId = Math.floor(Math.random() * this.latestXkcdId) + 1;
+
+            this.xkcdImg.html($('<div/>', {id: 'xkcd-loader'}));
+            $.getJSON('http://dynamic.xkcd.com/api-0/jsonp/comic/' + randomId + '?callback=?', function(data) {
+                me.xkcd = {img: data.img, alt: data.alt, title: data.title};
+                me.renderImg();
+            });
+        },
+
+        renderImg: function() {
+            this.xkcdImg.html(this.imgTemplate({img: this.xkcd.img, alt: this.xkcd.alt, title: this.xkcd.title}));
+        }
+    });
+
+    var XkcdApp = new XkcdAppView;
+
+});
diff --git a/config/plugins/webhooks/demo/xkcd/static/styles.css b/config/plugins/webhooks/demo/xkcd/static/styles.css
new file mode 100644
index 0000000..f20a4fa
--- /dev/null
+++ b/config/plugins/webhooks/demo/xkcd/static/styles.css
@@ -0,0 +1,66 @@
+#xkcd {
+    border: 1px solid #96a8c8;
+    text-align: center;
+    border-radius: 3px;
+    overflow: hidden;
+}
+
+#xkcd-header {
+    background: #96a8c8;
+    border-bottom: 1px solid #96a8c8;
+    padding: 15px 0;
+}
+
+#xkcd-name {
+    color: #fff;
+    padding-bottom: 10px;
+}
+
+#xkcd-header button {
+    color: #fff;
+    font-size: 14px;
+    background-color: #859abf;
+    border: none;
+    border-radius: 7px;
+    box-shadow: 0 5px #647eae;
+    padding: 5px 10px;
+}
+
+#xkcd-header button:focus {
+    outline: 0;
+}
+
+#xkcd-header button:hover {
+    background-color: #758cb6;
+}
+
+#xkcd-header button:active {
+    background-color: #758cb6;
+    box-shadow: 0 0 #647eae;
+    transform: translateY(5px);
+}
+
+#xkcd-img {
+    background: #fff;
+}
+
+#xkcd-img img {
+    padding: 10px;
+    max-width: 100%;
+    margin-bottom: -4px;
+}
+
+#xkcd-loader {
+    border: 5px solid #f3f3f3;
+    border-top: 5px solid #96a8c8;
+    border-radius: 50%;
+    width: 25px;
+    height: 25px;
+    animation: spin 1.5s linear infinite;
+    margin: 15px auto;
+}
+
+ at keyframes spin {
+    0% { transform: rotate(0deg); }
+    100% { transform: rotate(360deg); }
+}
diff --git a/config/reports.ini.sample b/config/reports.ini.sample
new file mode 100644
index 0000000..d9227d0
--- /dev/null
+++ b/config/reports.ini.sample
@@ -0,0 +1,91 @@
+# ---- HTTP Server ----------------------------------------------------------
+
+[server:main]
+
+use = egg:Paste#http
+port = 9001
+host = 127.0.0.1
+use_threadpool = true
+threadpool_workers = 10
+
+# ---- Filters --------------------------------------------------------------
+
+# Filters sit between Galaxy and the HTTP server.
+
+# These filters are disabled by default.  They can be enabled with
+# 'filter-with' in the [app:main] section below.
+
+# Define the proxy-prefix filter.
+[filter:proxy-prefix]
+use = egg:PasteDeploy#prefix
+prefix = /reports
+
+# ---- Galaxy Reports  ------------------------------------------------------
+
+[app:main]
+
+# -- Application and filtering
+
+# If running behind a proxy server and Galaxy is served from a subdirectory,
+# enable the proxy-prefix filter and set the prefix in the
+# [filter:proxy-prefix] section above.
+#filter-with = proxy-prefix
+
+# If proxy-prefix is enabled and you're running more than one Galaxy instance
+# behind one hostname, you will want to set this to the same path as the prefix
+# in the filter above.  This value becomes the "path" attribute set in the
+# cookie so the cookies from each instance will not clobber each other.
+#cookie_path = None
+
+# Specifies the factory for the universe WSGI application
+paste.app_factory = galaxy.webapps.reports.buildapp:app_factory
+
+# Verbosity of console log messages.  Acceptable values can be found here:
+# https://docs.python.org/2/library/logging.html#logging-levels
+#log_level = DEBUG
+
+# Database connection
+# Galaxy reports are intended for production Galaxy instances, so sqlite (and the default value
+# below) is not supported. An SQLAlchemy connection string should be used specify an external
+# database.
+#database_connection = sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE
+
+# Where dataset files are saved
+#file_path = database/files
+# Temporary storage for additional datasets, this should be shared through the cluster
+#new_file_path = database/tmp
+
+# Mako templates are compiled as needed and cached for reuse, this directory is
+# used for the cache
+#template_cache_path = database/compiled_templates/reports
+
+# Configuration for debugging middleware
+#debug = False
+
+# Check for WSGI compliance.
+#use_lint = False
+
+# NEVER enable this on a public site (even test or QA)
+#use_interactive = true
+
+# Write thread status periodically to 'heartbeat.log' (careful, uses disk space rapidly!)
+#use_heartbeat = True
+
+# Profiling middleware (cProfile based)
+#use_profile = True
+
+# Mail
+#smtp_server = yourserver at yourfacility.edu
+#error_email_to = your_bugs at bx.psu.edu
+
+# Serving static files (needed if running standalone)
+# static_enabled = True
+# static_cache_time = 360
+# static_dir = %(here)s/static/
+# static_images_dir = %(here)s/static/images
+# static_favicon_dir = %(here)s/static/favicon.ico
+# static_scripts_dir = %(here)s/static/scripts/
+# static_style_dir = %(here)s/static/june_2007_style/blue
+
+# Sentry (getsentry.com) DSN for catching bugs.
+#sentry_dsn = None
diff --git a/config/shed_data_manager_conf.xml.sample b/config/shed_data_manager_conf.xml.sample
new file mode 100644
index 0000000..04b4637
--- /dev/null
+++ b/config/shed_data_manager_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<data_managers>
+</data_managers>
diff --git a/config/shed_tool_conf.xml.sample b/config/shed_tool_conf.xml.sample
new file mode 100644
index 0000000..31ca79b
--- /dev/null
+++ b/config/shed_tool_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<toolbox tool_path="../shed_tools">
+</toolbox>
diff --git a/config/shed_tool_data_table_conf.xml.sample b/config/shed_tool_data_table_conf.xml.sample
new file mode 100644
index 0000000..fd376e4
--- /dev/null
+++ b/config/shed_tool_data_table_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<tables>
+</tables>
diff --git a/config/tool_conf.xml.main b/config/tool_conf.xml.main
new file mode 100644
index 0000000..4575f63
--- /dev/null
+++ b/config/tool_conf.xml.main
@@ -0,0 +1,134 @@
+<?xml version='1.0' encoding='utf-8'?>
+<toolbox>
+  <section id="getext" name="Get Data">
+    <tool file="data_source/upload.xml" />
+    <tool file="data_source/ucsc_tablebrowser.xml" />
+    <tool file="data_source/ucsc_tablebrowser_archaea.xml" />
+    <tool file="data_source/ebi_sra.xml" />
+    <tool file="data_source/biomart.xml" />
+    <tool file="data_source/gramene_mart.xml" />
+    <tool file="data_source/flymine.xml" />
+    <tool file="data_source/fly_modencode.xml" />
+    <tool file="data_source/modmine.xml" />
+    <tool file="data_source/mousemine.xml" />
+    <tool file="data_source/ratmine.xml" />
+    <tool file="data_source/yeastmine.xml" />
+    <tool file="data_source/worm_modencode.xml" />
+    <tool file="data_source/wormbase.xml" />
+    <tool file="data_source/zebrafishmine.xml" />
+    <tool file="data_source/eupathdb.xml" />
+    <tool file="genomespace/genomespace_file_browser_prod.xml" />
+    <tool file="genomespace/genomespace_importer.xml" />
+  </section>
+  <section id="send" name="Send Data">
+    <tool file="genomespace/genomespace_exporter.xml" />
+  </section>
+  <section id="liftOver" name="Lift-Over">
+    <tool file="extract/liftOver_wrapper.xml" />
+  </section>
+  <section id="textutil" name="Text Manipulation">
+    <tool file="filters/fixedValueColumn.xml" />
+    <tool file="filters/catWrapper.xml" hidden="True" />
+    <tool file="filters/condense_characters.xml" />
+    <tool file="filters/convert_characters.xml" />
+    <tool file="filters/mergeCols.xml" />
+    <tool file="filters/CreateInterval.xml" />
+    <tool file="filters/cutWrapper.xml" hidden="True" />
+    <tool file="filters/changeCase.xml" />
+    <tool file="filters/pasteWrapper.xml" />
+    <tool file="filters/remove_beginning.xml" />
+    <tool file="filters/randomlines.xml" />
+    <tool file="filters/headWrapper.xml" hidden="True" />
+    <tool file="filters/tailWrapper.xml" hidden="True" />
+    <tool file="filters/trimmer.xml" />
+    <tool file="filters/wc_gnu.xml" />
+    <tool file="filters/secure_hash_message_digest.xml" />
+  </section>
+  <section id="convert" name="Convert Formats">
+    <tool file="filters/bed2gff.xml" />
+    <tool file="filters/gff2bed.xml" />
+    <tool file="maf/maf_to_bed.xml" />
+    <tool file="maf/maf_to_interval.xml" />
+    <tool file="maf/maf_to_fasta.xml" />
+    <tool file="filters/sff_extractor.xml" />
+    <tool file="filters/wig_to_bigwig.xml" />
+    <tool file="filters/bed_to_bigbed.xml" />
+  </section>
+  <section id="filter" name="Filter and Sort">
+    <tool file="stats/filtering.xml" />
+    <tool file="filters/sorter.xml" />
+    <tool file="filters/grep.xml" />
+    <label id="gff" text="GFF" />
+        <tool file="filters/gff/extract_GFF_Features.xml" />
+        <tool file="filters/gff/gff_filter_by_attribute.xml" />
+        <tool file="filters/gff/gff_filter_by_feature_count.xml" />
+        <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" />
+  </section>
+  <section id="group" name="Join, Subtract and Group">
+    <tool file="filters/joiner.xml" />
+    <tool file="filters/compare.xml" />
+    <tool file="stats/grouping.xml" />
+  </section>
+  <section id="features" name="Extract Features">
+    <tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
+  </section>
+  <section id="fetchSeq" name="Fetch Sequences">
+    <tool file="extract/extract_genomic_dna.xml" />
+  </section>
+  <section id="fetchAlign" name="Fetch Alignments">
+    <tool file="maf/interval2maf_pairwise.xml" />
+    <tool file="maf/interval2maf.xml" />
+    <tool file="maf/interval_maf_to_merged_fasta.xml" />
+    <tool file="maf/genebed_maf_to_fasta.xml" />
+    <tool file="maf/maf_stats.xml" />
+    <tool file="maf/maf_thread_for_species.xml" />
+    <tool file="maf/maf_limit_to_species.xml" />
+    <tool file="maf/maf_limit_size.xml" />
+    <tool file="maf/maf_by_block_number.xml" />
+    <tool file="maf/maf_filter.xml" />
+    <tool file="maf/maf_reverse_complement.xml" />
+  </section>
+  <section id="scores" name="Get Genomic Scores">
+    <tool file="filters/wiggle_to_simple.xml" />
+    <tool file="stats/aggregate_binned_scores_in_intervals.xml" />
+  </section>
+  <section id="stats" name="Statistics">
+    <tool file="stats/gsummary.xml" />
+    <tool file="filters/uniq.xml" />
+  </section>
+  <section id="plots" name="Graph/Display Data">
+    <tool file="plotting/boxplot.xml" />
+    <tool file="maf/vcf_to_maf_customtrack.xml" />
+  </section>
+  <section id="hgv" name="Phenotype Association">
+    <tool file="evolution/codingSnps.xml" />
+    <tool file="evolution/add_scores.xml" />
+    <tool file="phenotype_association/sift.xml" />
+    <tool file="phenotype_association/linkToGProfile.xml" />
+    <tool file="phenotype_association/linkToDavid.xml" />
+    <tool file="phenotype_association/ldtools.xml" />
+    <tool file="phenotype_association/pass.xml" />
+    <tool file="phenotype_association/gpass.xml" />
+    <tool file="phenotype_association/beam.xml" />
+    <tool file="phenotype_association/lps.xml" />
+    <tool file="phenotype_association/master2pg.xml" />
+  </section>
+  <label id="ngs" text="NGS Toolbox Beta" />
+  <section id="cshl_library_information" name="NGS: QC and manipulation">
+    <label id="illumina" text="Illumina data" />
+    <label id="454" text="Roche-454 data" />
+    <label id="solid" text="AB-SOLiD data" />
+        <tool file="next_gen_conversion/solid2fastq.xml" />
+        <tool file="solid_tools/solid_qual_stats.xml" />
+        <tool file="solid_tools/solid_qual_boxplot.xml" />
+    <label id="generic_fastq" text="Generic FASTQ manipulation" />
+    <label id="fastx_toolkit_fastq" text="FASTX-Toolkit for FASTQ data" />
+  </section>
+  <section id="ngs_mapping" name="NGS: Mapping">
+    <label id="illumina" text="Illumina" />
+    <label id="roche_454" text="Roche-454" />
+    <label id="ab_solid" text="AB-SOLiD" />
+  </section>
+  <section id="samtools" name="NGS: SAM Tools">
+  </section>
+</toolbox>
diff --git a/config/tool_conf.xml.sample b/config/tool_conf.xml.sample
new file mode 100644
index 0000000..3e19d92
--- /dev/null
+++ b/config/tool_conf.xml.sample
@@ -0,0 +1,122 @@
+<?xml version='1.0' encoding='utf-8'?>
+<toolbox monitor="true">
+  <section id="getext" name="Get Data">
+    <tool file="data_source/upload.xml" />
+    <tool file="data_source/ucsc_tablebrowser.xml" />
+    <tool file="data_source/ucsc_tablebrowser_test.xml" />
+    <tool file="data_source/ucsc_tablebrowser_archaea.xml" />
+    <tool file="data_source/ebi_sra.xml" />
+    <tool file="data_source/microbial_import.xml" />
+    <tool file="data_source/biomart.xml" />
+    <tool file="data_source/cbi_rice_mart.xml" />
+    <tool file="data_source/gramene_mart.xml" />
+    <tool file="data_source/fly_modencode.xml" />
+    <tool file="data_source/flymine.xml" />
+    <tool file="data_source/flymine_test.xml" />
+    <tool file="data_source/modmine.xml" />
+    <tool file="data_source/mousemine.xml" />
+    <tool file="data_source/ratmine.xml" />
+    <tool file="data_source/yeastmine.xml" />
+    <tool file="data_source/metabolicmine.xml" />
+    <tool file="data_source/worm_modencode.xml" />
+    <tool file="data_source/wormbase.xml" />
+    <tool file="data_source/wormbase_test.xml" />
+    <tool file="data_source/zebrafishmine.xml" />
+    <tool file="data_source/eupathdb.xml" />
+    <tool file="data_source/hbvar.xml" />
+    <tool file="genomespace/genomespace_file_browser_prod.xml" />
+    <tool file="genomespace/genomespace_importer.xml" />
+  </section>
+  <section id="send" name="Send Data">
+    <tool file="genomespace/genomespace_exporter.xml" />
+  </section>
+  <section id="collection_operations" name="Collection Operations">
+    <tool file="${model_tools_path}/unzip_collection.xml" />
+    <tool file="${model_tools_path}/zip_collection.xml" />
+    <tool file="${model_tools_path}/filter_failed_collection.xml" />
+    <tool file="${model_tools_path}/flatten_collection.xml" />
+    <tool file="${model_tools_path}/merge_collection.xml" />
+  </section>
+  <section id="liftOver" name="Lift-Over">
+    <tool file="extract/liftOver_wrapper.xml" />
+  </section>
+  <section id="textutil" name="Text Manipulation">
+    <tool file="filters/fixedValueColumn.xml" />
+    <tool file="filters/catWrapper.xml" />
+    <tool file="filters/cutWrapper.xml" />
+    <tool file="filters/mergeCols.xml" />
+    <tool file="filters/convert_characters.xml" />
+    <tool file="filters/CreateInterval.xml" />
+    <tool file="filters/cutWrapper.xml" />
+    <tool file="filters/changeCase.xml" />
+    <tool file="filters/pasteWrapper.xml" />
+    <tool file="filters/remove_beginning.xml" />
+    <tool file="filters/randomlines.xml" />
+    <tool file="filters/headWrapper.xml" />
+    <tool file="filters/tailWrapper.xml" />
+    <tool file="filters/trimmer.xml" />
+    <tool file="filters/wc_gnu.xml" />
+    <tool file="filters/secure_hash_message_digest.xml" />
+  </section>
+  <section id="filter" name="Filter and Sort">
+    <tool file="stats/filtering.xml" />
+    <tool file="filters/sorter.xml" />
+    <tool file="filters/grep.xml" />
+    
+    <label id="gff" text="GFF" />
+    <tool file="filters/gff/extract_GFF_Features.xml" />
+    <tool file="filters/gff/gff_filter_by_attribute.xml" />
+    <tool file="filters/gff/gff_filter_by_feature_count.xml" />
+    <tool file="filters/gff/gtf_filter_by_attribute_values_list.xml" />
+  </section>
+  <section id="group" name="Join, Subtract and Group">
+    <tool file="filters/joiner.xml" />
+    <tool file="filters/compare.xml" />
+    <tool file="stats/grouping.xml" />
+  </section>
+  <section id="convert" name="Convert Formats">
+    <tool file="filters/axt_to_concat_fasta.xml" />
+    <tool file="filters/axt_to_fasta.xml" />
+    <tool file="filters/axt_to_lav.xml" />
+    <tool file="filters/bed2gff.xml" />
+    <tool file="filters/gff2bed.xml" />
+    <tool file="filters/lav_to_bed.xml" />
+    <tool file="maf/maf_to_bed.xml" />
+    <tool file="maf/maf_to_interval.xml" />
+    <tool file="maf/maf_to_fasta.xml" />
+    <tool file="filters/wiggle_to_simple.xml" />
+    <tool file="filters/sff_extractor.xml" />
+    <tool file="filters/gtf2bedgraph.xml" />
+    <tool file="filters/wig_to_bigwig.xml" />
+    <tool file="filters/bed_to_bigbed.xml" />
+  </section>
+  <section id="features" name="Extract Features">
+    <tool file="filters/ucsc_gene_bed_to_exon_bed.xml" />
+  </section>
+  <section id="fetchSeq" name="Fetch Sequences">
+    <tool file="extract/extract_genomic_dna.xml" />
+  </section>
+  <section id="fetchAlign" name="Fetch Alignments">
+    <tool file="maf/interval2maf_pairwise.xml" />
+    <tool file="maf/interval2maf.xml" />
+    <tool file="maf/maf_split_by_species.xml" />
+    <tool file="maf/interval_maf_to_merged_fasta.xml" />
+    <tool file="maf/genebed_maf_to_fasta.xml" />
+    <tool file="maf/maf_stats.xml" />
+    <tool file="maf/maf_thread_for_species.xml" />
+    <tool file="maf/maf_limit_to_species.xml" />
+    <tool file="maf/maf_limit_size.xml" />
+    <tool file="maf/maf_by_block_number.xml" />
+    <tool file="maf/maf_reverse_complement.xml" />
+    <tool file="maf/maf_filter.xml" />
+  </section>
+  <section id="stats" name="Statistics">
+    <tool file="stats/gsummary.xml" />
+    <tool file="filters/uniq.xml" />
+  </section>
+  <section id="plots" name="Graph/Display Data">
+    <tool file="plotting/bar_chart.xml" />
+    <tool file="plotting/boxplot.xml" />
+    <tool file="maf/vcf_to_maf_customtrack.xml" />
+  </section>
+</toolbox>
diff --git a/config/tool_data_table_conf.xml.sample b/config/tool_data_table_conf.xml.sample
new file mode 100644
index 0000000..9017eba
--- /dev/null
+++ b/config/tool_data_table_conf.xml.sample
@@ -0,0 +1,103 @@
+<!-- Use the file tool_data_table_conf.xml.oldlocstyle if you don't want to update your loc files as changed in revision 4550:535d276c92bc-->
+<tables>
+    <!-- Locations of all fasta files under genome directory -->
+    <table name="all_fasta" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/all_fasta.loc" />
+    </table>
+    <!-- Locations of indexes in the BFAST mapper format -->
+    <table name="bfast_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, formats, name, path</columns>
+        <file path="tool-data/bfast_indexes.loc" />
+    </table>
+    <!-- Locations of nucleotide BLAST databases -->
+    <table name="blastdb" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, path</columns>
+        <file path="tool-data/blastdb.loc" />
+    </table>
+    <!-- Locations of protein BLAST databases -->
+    <table name="blastdb_p" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, path</columns>
+        <file path="tool-data/blastdb_p.loc" />
+    </table>
+    <!-- Locations of protein domain BLAST databases -->
+    <table name="blastdb_d" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, path</columns>
+        <file path="tool-data/blastdb_d.loc" />
+    </table>
+    <!-- Locations of indexes in the BWA mapper format -->
+    <table name="bwa_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/bwa_index.loc" />
+    </table>
+    <!-- Locations of indexes in the BWA color-space mapper format -->
+    <table name="bwa_indexes_color" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/bwa_index_color.loc" />
+    </table>
+    <!-- Locations of MAF files that have been indexed with bx-python -->
+    <table name="indexed_maf_files">
+        <columns>name, value, dbkey, species</columns>
+        <file path="tool-data/maf_index.loc" />
+    </table>
+    <!-- Locations of fasta files appropriate for NGS simulation -->
+    <table name="ngs_sim_fasta" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/ngs_sim_fasta.loc" />
+    </table>
+    <!-- Locations of PerM base index files -->
+    <table name="perm_base_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, path</columns>
+        <file path="tool-data/perm_base_index.loc" />
+    </table>
+    <!-- Locations of PerM color-space index files -->
+    <table name="perm_color_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, path</columns>
+        <file path="tool-data/perm_color_index.loc" />
+    </table>
+    <!-- Location of Picard dict file and other files -->
+    <table name="picard_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/picard_index.loc" />
+    </table>
+    <!-- Location of SRMA dict file and other files -->
+    <table name="srma_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/picard_index.loc" />
+    </table>
+    <!-- Location of Mosaik files -->
+    <table name="mosaik_indexes" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/mosaik_index.loc" />
+    </table>
+    <!-- Locations of indexes in the 2bit format -->
+    <table name="twobit" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, path</columns>
+        <file path="tool-data/twobit.loc" />
+    </table>
+    <!-- Available IGV builds, loaded from URL -->
+    <table name="igv_broad_genomes" comment_char="#" allow_duplicate_entries="False">
+        <columns>name, url, value</columns>
+        <file url="http://igv.broadinstitute.org/genomes/genomes.txt" />
+    </table>
+    <!-- Available liftOver chain file -->
+    <table name="liftOver" comment_char="#" allow_duplicate_entries="False">
+        <columns>dbkey, name, value</columns>
+        <file path="tool-data/liftOver.loc" />
+    </table>
+    <!-- iobio bam servers -->
+    <table name="bam_iobio" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, url</columns>
+        <file path="tool-data/bam_iobio.loc" />
+    </table>
+    <!-- iobio vcf servers -->
+    <table name="vcf_iobio" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, url</columns>
+        <file path="tool-data/vcf_iobio.loc" />
+    </table>
+    <!-- simple biom servers -->
+    <table name="biom_simple_display" comment_char="#" allow_duplicate_entries="False">
+        <columns>value, name, url</columns>
+        <file path="tool-data/biom_simple_display.loc" />
+    </table>
+</tables>
diff --git a/config/tool_destinations.yml.sample b/config/tool_destinations.yml.sample
new file mode 100644
index 0000000..296a14f
--- /dev/null
+++ b/config/tool_destinations.yml.sample
@@ -0,0 +1,311 @@
+# Dynamic Tool Destinations config has four main levels (or key properties) - tools,
+# default_destination, users, and verbose.
+#
+# Under tools is where you put the IDs of the tools. Tool IDs must be
+# distinct. In each tool is a list of rules, each of which contain rule-specific
+# paremeters. Dynamic Tool Destination is set up to allow and fix a few light errors
+# in the config, but it's best to follow the specified template.
+#
+# default_destination, the second key property, specifies which global destination
+# to default to in case none of the rules apply.
+#
+# users, the third key property, can be used to assign a priority to a user. Three priorities
+# are available low, med, and high. By default all users are assumed to be med priority. If
+# a rule has priority destinations the user will be set to the destination which matches their priority
+# or if their priority isn't available they will be matched to the next closest one.
+#
+# A fourth key property is verbose. When this is set to True, Dynamic Tool Destination
+# gives much more descriptive output regarding the steps it's taking in mapping your tools to
+# the appropriate destinations, including config validation and any potential errors found in the
+# config.
+#
+# Note: Configurations can be validated with the following command:
+#
+#    python lib/galaxy/jobs/dynamic_tool_destination.py -c
+#
+# or
+#
+#    python lib/galaxy/jobs/dynamic_tool_destination.py -c /path/to/tool_destinations.yml
+#
+# The general template is as follows (note that this template
+# does not use quote symbols), using spades and smalt as an example
+# (spades for showing what each field is for, and smalt
+# to give a fairly real-world example):
+#
+# Ex:
+#
+# tools:
+#   spades:
+#     rules:
+#       - rule_type: what kind of rule is it?
+#         nice_value: what kind of priority does this rule have over others?
+#         destination: how should this tool be run?
+#         lower_bound: what's the max file size?
+#         upper_bound: what's the minimum file size?
+#         users: #(optional) which users does this rule apply to
+#           - user at example.com
+#     default_destination: this tool-specific field is optional
+#   smalt_map:
+#     rules:
+#       - rule_type: file_size
+#         nice_value: 0
+#         lower_bound: 0
+#         upper_bound: 2 GB
+#         destination: cluster_low_4
+#       - rule_type: file_size
+#         nice_value: 0
+#         lower_bound: 2 GB
+#         upper_bound: 4 GB
+#         destination: cluster_low_8
+#       - rule_type: file_size
+#         nice_value: 0
+#         lower_bound: 4 GB
+#         upper_bound: Infinity
+#         destination: cluster_low_16
+#     default_destination: cluster_default
+# users:
+#   user_low at example.com:
+#     priority: low
+#   user_med at example.com:
+#     priority: med
+#   user_high at example.com:
+#     priority: high
+# default_destination: this global field is mandatory
+# verbose: True
+#
+#
+#
+# Looking at this example, some things must be clarified: each entry in the list of
+# rules per tool is specified by '-'. Per rule, regardless of rule type,
+# the following fields are mandatory:
+# rule_type, nice_value, and destination.
+#
+# Some of the other fields are mandatory only for specific rule types, which will be
+# further discussed below.
+#
+# Starting with rule_type, there are currently 3 rule types: file_size, records,
+# and arguments.
+#
+# file_size and records rules are based on how large the files are: if they fall
+# within specified limits, then the rule is satisfied, and the tool may proceed
+# with the appropriate destination.
+#
+# file_size and records rules have the following required parameters on top of the base
+# mandatory parameters:
+# upper_bound
+# lower_bound
+#
+# Bounds are allowed to be specified in bytes (48000 for example) or a higher size unit,
+# including the unit abbreviation (4 GB or 10 TB for example). Additionally, upper_bound
+# is allowed to be Infinite; simply specify Infinite in order to do so.
+#
+# **The rule will allow the lower_bound, up to but not including the upper_bound
+#
+# The third rule_type is arguments, which has arguments as a mandatory parameter ontop of
+# the base mandatory parameters. The arguments parameter is specified using the following
+# template:
+#
+#
+# arguments:
+#   argument_name: the_argument
+#
+#
+# A real world example is shown below:
+#
+#
+# tools:
+#   spades:
+#     rules:
+#       - rule_type: arguments
+#         nice_value: -19
+#         destination: fail
+#         fail_message: Don't do that
+#         arguments:
+#           careful: true
+#     default_destination: cluster_low
+# default_destination: cluster
+# verbose: False
+#
+#
+# Next up, nice_value is used for prioritizing rules over others in case two rules
+# match. nice_value basically translates to, "the higher the nice_value, the 'nicer'
+# the tool is about being picked last". So based off of that idea, a rule with a nice
+# value of -5 is guaranteed to be picked over a rule with a nice value of 10. nice_value
+# is allowed to go from -20 to 20. If two rules have the same nice value and both were
+# satisfied, the first rule in the config file will be picked. In summary, first-come-
+# first-serve basis unless nice_value overrides that.
+#
+#
+# Finally, destination simply refers to the specific way the tool will run. Each
+# destination ID refers to a specific configuration to run the tool with.
+#
+# Some rules may call for the job to fail if certain conditions are encountered. In
+# this case, destination simply refers to 'fail'.
+#
+# For example, the following rule is set to fail the job if a file that is too large
+# (more than 4GB) is encountered:
+#
+#
+# tools:
+#   spades:
+#     rules:
+#       - rule_type: file_size
+#         nice_value: 0
+#         destination: fail
+#         fail_message: Data too large
+#         lower_bound: 4 GB
+#         upper_bound: Infinity
+#
+#
+# As shown above, a rule with 'fail' as the destination requires an additional
+# parameter, 'fail_message', which DynamicToolDestination uses to print a helpful error
+# message to the user indicating why the job failed (showing up inside the job log in
+# Galaxy's history panel).
+
+
+tools:
+  spades:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 10 MB
+        fail_message: Too few reads for spades to work
+        destination: fail
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 10 MB
+        upper_bound: 2 GB
+        destination:
+          priority:
+            low: slurm_low_24
+            med: slurm_med_24
+            high: slurm_high_24
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 2 GB
+        upper_bound: 4 GB
+        destination:
+          priority:
+            low: slurm_low_48
+            med: slurm_med_48
+            high: slurm_high_48
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 4 GB
+        upper_bound: Infinity
+        fail_message: Too much data, shoudn't run
+        destination: fail
+      - rule_type: arguments
+        nice_value: 0
+        arguments:
+          careful: true
+        destination:
+          priority:
+            low: slurm_low_48
+            med: slurm_med_48
+            high: slurm_high_48
+    default_destination:
+      priority:
+        low: slurm_low_16
+        med: slurm_med_16
+        high: slurm_high_16
+
+  srst2:
+    rules:
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 100
+        destination:
+          priority:
+            low: slurm_low_4
+            med: slurm_med_4
+            high: slurm_high_4
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 100
+        upper_bound: 1000
+        destination:
+          priority:
+            low: slurm_low_8
+            med: slurm_med_8
+            high: slurm_high_8
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 1000
+        upper_bound: 10000
+        destination:
+          priority:
+            low: slurm_low_8
+            med: slurm_med_8
+            high: slurm_high_8
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 1000000
+        upper_bound: Infinity
+        fail_message: Using the wrong tool
+        destination: fail
+
+  smalt_map:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 2 GB
+        destination:
+          priority:
+            low: slurm_low_4
+            med: slurm_med_4
+            high: slurm_high_4
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 2 GB
+        upper_bound: 4 GB
+        destination:
+          priority:
+            low: slurm_low_8
+            med: slurm_med_8
+            high: slurm_high_8
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 4 GB
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: slurm_low_16
+            med: slurm_med_16
+            high: slurm_high_16
+
+  fastqc:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 400 MB
+        fail_message: File size too small
+        destination: fail
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 400 MB
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: slurm_low_4
+            med: slurm_med_4
+            high: slurm_high_4
+    default_destination:
+      priority:
+        low: slurm_low_16
+        med: slurm_med_16
+        high: slurm_high_16
+
+default_destination:
+  priority:
+    low: slurm_low_default
+    med: slurm_med_default
+    high: slurm_high_default
+users:
+  user at email.com:
+    priority: high
+verbose: True
diff --git a/config/tool_shed.ini.sample b/config/tool_shed.ini.sample
new file mode 100644
index 0000000..b8e4982
--- /dev/null
+++ b/config/tool_shed.ini.sample
@@ -0,0 +1,181 @@
+# ---- HTTP Server ----------------------------------------------------------
+
+[server:main]
+
+use = egg:Paste#http
+port = 9009
+
+# The address on which to listen. By default, only listen to localhost
+# (the Tool Shed will not be accessible over the network).
+# Use '0.0.0.0' to listen on all available network interfaces.
+#host = 0.0.0.0
+host = 127.0.0.1
+
+use_threadpool = true
+threadpool_workers = 10
+# Set the number of seconds a thread can work before you should kill it
+# (assuming it will never finish) to 3 hours.
+threadpool_kill_thread_limit = 10800
+
+# ---- Galaxy Tool Shed -----------------------------------------------------
+
+[app:main]
+
+# Specifies the factory for the universe WSGI application
+paste.app_factory = galaxy.webapps.tool_shed.buildapp:app_factory
+
+# Verbosity of console log messages.  Acceptable values can be found here:
+# https://docs.python.org/2/library/logging.html#logging-levels
+#log_level = DEBUG
+
+# By default, the Tool Shed uses a SQLite database at 'database/community.sqlite'.  You
+# may use a SQLAlchemy connection string to specify an external database
+# instead.  This string takes many options which are explained in detail in the
+# config file documentation.
+#database_connection = sqlite:///./database/community.sqlite?isolation_level=IMMEDIATE
+
+# Where the hgweb.config file is stored.
+# The default is the Galaxy installation directory.
+#hgweb_config_dir = None
+
+# Where tool shed repositories are stored.
+#file_path = database/community_files
+
+# Temporary storage for additional datasets,
+# this should be shared through the cluster
+#new_file_path = database/tmp
+
+# File containing old-style genome builds
+#builds_file_path = tool-data/shared/ucsc/builds.txt
+
+# Format string used when showing date and time information.
+# The string may contain:
+# - the directives used by Python time.strftime() function (see
+#   https://docs.python.org/2/library/time.html#time.strftime ),
+# - $locale (complete format string for the server locale),
+# - $iso8601 (complete format string as specified by ISO 8601 international
+#   standard).
+#pretty_datetime_format = $locale (UTC)
+
+# -- Repository and Tool search
+# Using the script located at scripts/build_ts_whoosh_index.py
+# you can generate search index and allow full text API searching over
+# the repositories and tools within the Tool Shed given that you specify
+# the following two config options.
+#toolshed_search_on = True
+#whoosh_index_dir = database/toolshed_whoosh_indexes
+
+# The following boosts are used to customize this instance's TS search.
+# The higher the boost, the more importance the scoring algorithm gives to the
+# given field.
+
+# For searching repositories at /api/repositories:
+#repo_name_boost = 0.9
+#repo_description_boost = 0.6
+#repo_long_description_boost = 0.5
+#repo_homepage_url_boost = 0.3
+#repo_remote_repository_url_boost = 0.2
+#repo_owner_username_boost = 0.3
+
+# For searching tools at /api/tools
+#tool_name_boost = 1.2
+#tool_description_boost = 0.6
+#tool_help_boost = 0.4
+#tool_repo_owner_username = 0.3
+
+# -- Analytics
+
+# You can enter tracking code here to track visitor's behavior
+# through your Google Analytics account. Example: UA-XXXXXXXX-Y
+#ga_code = None
+
+# -- Users and Security
+
+# The Tool Shed encodes various internal values when these values will be output in
+# some format (for example, in a URL or cookie).  You should set a key to be
+# used by the algorithm that encodes and decodes these values.  It can be any
+# string.  If left unchanged, anyone could construct a cookie that would grant
+# them access to others' sessions.
+# One simple way to generate a value for this is with the shell command:
+#   python -c 'import time; print time.time()' | md5sum | cut -f 1 -d ' '
+#id_secret = changethisinproductiontoo
+
+# User authentication can be delegated to an upstream proxy server (usually
+# Apache).  The upstream proxy should set a REMOTE_USER header in the request.
+# Enabling remote user disables regular logins.  For more information, see:
+# https://wiki.galaxyproject.org/Admin/Config/ApacheProxy
+#use_remote_user = False
+
+# If use_remote_user is enabled, anyone who can log in to the Galaxy host may
+# impersonate any other user by simply sending the appropriate header. Thus a
+# secret shared between the upstream proxy server, and Galaxy is required.
+# If anyone other than the Galaxy user is using the server, then apache/nginx
+# should pass a value in the header 'GX_SECRET' that is identical the one below
+#remote_user_secret = changethisinproductiontoo
+
+# Configuration for debugging middleware
+#debug = False
+
+# Check for WSGI compliance.
+#use_lint = False
+
+# Intercept print statements and show them on the returned page.
+#use_printdebug = True
+
+# NEVER enable this on a public site (even test or QA)
+#use_interactive = true
+
+# Administrative users - set this to a comma-separated list of valid Tool Shed
+# users (email addresses).  These users will have access to the Admin section
+# of the server, and will have access to create users, groups, roles,
+# libraries, and more.
+#admin_users = None
+
+# Force everyone to log in (disable anonymous access)
+#require_login = False
+
+# For use by email messages sent from the tool shed
+#smtp_server = smtp.your_tool_shed_server
+#email_from = your_tool_shed_email at server
+
+# If your SMTP server requires a username and password, you can provide them
+# here (password in cleartext here, but if your server supports STARTTLS it
+# will be sent over the network encrypted).
+#smtp_username = None
+#smtp_password = None
+
+# If your SMTP server requires SSL from the beginning of the connection
+#smtp_ssl = False
+
+# The URL linked by the "Support" link in the "Help" menu.
+#support_url = https://wiki.galaxyproject.org/Support
+
+# Address to join mailing list
+#mailing_join_addr = galaxy-announce-join at bx.psu.edu
+
+# Write thread status periodically to 'heartbeat.log' (careful, uses disk
+#  space rapidly!)
+#use_heartbeat = True
+
+# Profiling middleware (cProfile based)
+#use_profile = True
+
+# Enable creation of Galaxy flavor Docker Image
+#enable_galaxy_flavor_docker_image = False
+
+# Show a message box under the masthead.
+#message_box_visible = False
+#message_box_content = None
+#message_box_class = info
+
+# Serving static files (needed if running standalone)
+#static_enabled = True
+#static_cache_time = 360
+#static_dir = static/
+#static_images_dir = static/images
+#static_favicon_dir = static/favicon.ico
+#static_scripts_dir = static/scripts/
+#static_style_dir = static/style/blue
+
+# Sentry (getsentry.com) DSN for catching bugs.
+#sentry_dsn = None
diff --git a/config/tool_sheds_conf.xml.sample b/config/tool_sheds_conf.xml.sample
new file mode 100644
index 0000000..197cd63
--- /dev/null
+++ b/config/tool_sheds_conf.xml.sample
@@ -0,0 +1,7 @@
+<?xml version="1.0"?>
+<tool_sheds>
+    <tool_shed name="Galaxy Main Tool Shed" url="https://toolshed.g2.bx.psu.edu/"/>
+<!-- Test Tool Shed should be used only for testing purposes.
+    <tool_shed name="Galaxy Test Tool Shed" url="https://testtoolshed.g2.bx.psu.edu/"/> 
+-->
+</tool_sheds>
diff --git a/config/workflow_schedulers_conf.xml.sample b/config/workflow_schedulers_conf.xml.sample
new file mode 100644
index 0000000..c7572d9
--- /dev/null
+++ b/config/workflow_schedulers_conf.xml.sample
@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+<!-- If workflow_schedulers_conf.xml exists it defines the workflow scheduling
+     plugins to load and how to configure them. Currently only the core workflow
+     scheduling is available.
+-->
+<workflow_schedulers default="core">
+  <!-- Each element in this file corresponds to a workflow scheduling plugin
+       in lib/galaxy/workflow/schedulers. -->
+
+  <!-- Core plugin schedules whole workflow at outset inside Galaxy and doesn't
+       require any external dependencies. -->
+  <core id="core" />
+
+</workflow_schedulers>
diff --git a/contrib/README b/contrib/README
new file mode 100644
index 0000000..1bfe774
--- /dev/null
+++ b/contrib/README
@@ -0,0 +1,37 @@
+CONTENTS
+--------
+
+edu.psu.galaxy.GalaxyServer.plist:
+
+    This is the LaunchDaemon definition for OS X (pre-Snow Leopard?). Assuming
+    that Galaxy is installed in the home directory of the user 'galaxy', run:
+        mkdir /Users/galaxy/galaxy_dist/log
+        sudo cp /Users/galaxy/galaxy_dist/contrib/edu.psu.galaxy.GalaxyServer.plist /Library/LaunchDaemons/
+        sudo chown root:wheel /Library/LaunchDaemons/edu.psu.galaxy.GalaxyServer.plist
+    Launch Galaxy with:
+        sudo launchctl load /Library/LaunchDaemons/edu.psu.galaxy.GalaxyServer.plist
+    Galaxy will now be started everytime the computer starts.
+    Written and submitted by James Casbon.
+
+galaxy.debian-init:
+
+    init script for Debian/Ubuntu Linux.  Copy to /etc/init.d/galaxy, modify
+    paths, and configure for start at boot with `update-rc.d galaxy defaults`.
+    Also written and submitted by James Casbon.
+
+galaxy.fedora-init:
+
+    init script for Fedora/RedHat/Scientific Linux/CentOS.  Copy to
+    /etc/init.d/galaxy, modify paths, and configure for start at boot with
+    `chkconfig galaxy on`.  Written and submitted by Brad Chapman.
+
+galaxy.solaris-smf.xml:
+
+    SMF Manifest for Solaris 10 and OpenSolaris.  Import with `svccfg import
+    galaxy.solaris-smf.xml`.
+
+gls.pl:
+
+    "Galaxy ls", for sites where Galaxy logins match system logins, this script
+    can be used to list the filesystem paths to a user's history datasets.
+    Requires site modifications.  Written and submitted by Simon McGowan.
diff --git a/contrib/collect_sge_job_timings.sh b/contrib/collect_sge_job_timings.sh
new file mode 100644
index 0000000..60ac843
--- /dev/null
+++ b/contrib/collect_sge_job_timings.sh
@@ -0,0 +1,126 @@
+#!/bin/sh
+
+##
+## CHANGE ME to galaxy's database name
+## 
+DATABASE=galaxyprod
+
+##
+## AWK script to extract the relevant fields of SGE's qacct report
+##   and write them all in one line.
+AWKSCRIPT='
+$1=="jobnumber" { job_number = $2 }
+$1=="qsub_time" { qsub_time = $2 }
+$1=="start_time" { start_time = $2 }
+$1=="end_time" { end_time = $2 
+        print job_number, qsub_time, start_time, end_time
+}
+'
+
+FIFO=$(mktemp -u) || exit 1
+mkfifo "$FIFO" || exit 1
+
+##
+## Write the SGE/QACCT job report into a pipe
+## (later will be loaded into a temporary table)
+qacct -j |
+    egrep "jobnumber|qsub_time|start_time|end_time" |
+    sed 's/  */\t/'  |
+    awk -v FS="\t" -v OFS="\t" "$AWKSCRIPT" |
+    grep -v -- "-/-" > "$FIFO" &
+
+##
+##  The SQL to generate the report
+##
+SQL="
+--
+-- Temporary table which contains the qsub/start/end times, based on SGE's qacct report.
+--
+CREATE TEMPORARY TABLE sge_times (
+  sge_job_id INTEGER PRIMARY KEY,
+  qsub_time TIMESTAMP WITHOUT TIME ZONE,
+  start_time TIMESTAMP WITHOUT TIME ZONE,
+  end_time TIMESTAMP WITHOUT TIME ZONE
+);
+
+COPY sge_times FROM '$FIFO' ;
+
+--
+-- Temporary table which contains a unified view of all galaxy jobs.
+-- for each job:
+--   the user name, total input size (bytes), and input file types, DBKEY
+--   creation time, update time, SGE job runner parameters
+-- If a job had more than one input file, then some parameters might not be accurate (e.g. DBKEY)
+-- as one will be chosen arbitrarily
+CREATE TEMPORARY TABLE job_input_sizes AS
+SELECT
+ job.job_runner_external_id as job_runner_external_id,
+ min(job.id) as job_id,
+ min(job.create_time) as job_create_time,
+ min(job.update_time) as job_update_time,
+ min(galaxy_user.email) as email,
+ min(job.tool_id) as tool_name,
+-- This hack requires a user-custom aggregate function, comment it out for now
+-- textcat_all(hda.extension || ' ') as file_types,
+ sum(dataset.file_size) as total_input_size,
+ count(dataset.file_size) as input_dataset_count,
+ min(job.job_runner_name) as job_runner_name,
+-- This hack tries to extract the DBKEY attribute from the metadata JSON string
+ min(substring(encode(metadata,'escape') from '\"dbkey\": \\\\[\"(.*?)\"\\\\]')) as dbkey
+FROM
+ job,
+ galaxy_user,
+ job_to_input_dataset,
+ history_dataset_association hda,
+ dataset
+WHERE
+ job.user_id = galaxy_user.id
+ AND
+ job.id = job_to_input_dataset.job_id
+ AND
+ hda.id = job_to_input_dataset.dataset_id
+ AND
+ dataset.id = hda.dataset_id
+ AND
+ job.job_runner_external_id is not NULL
+GROUP BY
+ job.job_runner_external_id;
+
+
+--
+-- Join the two temporary tables, create a nice report
+--
+SELECT
+ job_input_sizes.job_runner_external_id as sge_job_id,
+ job_input_sizes.job_id as galaxy_job_id,
+ job_input_sizes.email,
+ job_input_sizes.tool_name,
+-- ## SEE previous query for commented-out filetypes field
+-- job_input_sizes.file_types,
+ job_input_sizes.job_runner_name as sge_params,
+ job_input_sizes.dbkey,
+ job_input_sizes.total_input_size,
+ job_input_sizes.input_dataset_count,
+ job_input_sizes.job_update_time - job_input_sizes.job_create_time as galaxy_total_time,
+ sge_times.end_time - sge_times.qsub_time as sge_total_time,
+ sge_times.start_time - sge_times.qsub_time as sge_waiting_time,
+ sge_times.end_time - sge_times.start_time as sge_running_time,
+ job_input_sizes.job_create_time as galaxy_job_create_time
+-- ## no need to show the exact times, the deltas (above) are informative enough
+-- job_input_sizes.job_update_time as galaxy_job_update_time,
+-- sge_times.qsub_time as sge_qsub_time,
+-- sge_times.start_time as sge_start_time,
+-- sge_times.end_time as sge_end_time
+FROM
+ job_input_sizes
+LEFT OUTER JOIN
+ SGE_TIMES
+ON (job_input_sizes.job_runner_external_id = sge_times.sge_job_id)
+ORDER BY
+ galaxy_job_create_time
+ 
+"
+
+echo "$SQL" | psql --pset "footer=off" -F"  " -A --quiet "$DATABASE"
+
+
diff --git a/contrib/edu.psu.galaxy.GalaxyServer.plist b/contrib/edu.psu.galaxy.GalaxyServer.plist
new file mode 100644
index 0000000..b09de75
--- /dev/null
+++ b/contrib/edu.psu.galaxy.GalaxyServer.plist
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>Label</key>
+	<string>edu.psu.galaxy.GalaxyServer</string>
+	<key>OnDemand</key>
+	<false/>
+	<key>Program</key>
+	<string>/Users/galaxy/galaxy_dist/run.sh</string>
+	<key>RunAtLoad</key>
+	<true/>
+	<key>ServiceDescription</key>
+	<string>Galaxy</string>
+	<key>StandardErrorPath</key>
+	<string>/Users/galaxy/galaxy_dist/log/galaxy.stderr</string>
+	<key>StandardOutPath</key>
+	<string>/Users/galaxy/galaxy_dist/log/galaxy.stdout</string>
+	<key>UserName</key>
+	<string>galaxy</string>
+</dict>
+</plist>
diff --git a/contrib/galaxy.debian-init b/contrib/galaxy.debian-init
new file mode 100644
index 0000000..edb9412
--- /dev/null
+++ b/contrib/galaxy.debian-init
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+# Author: James Casbon, 2009
+
+### BEGIN INIT INFO
+# Provides:             galaxy
+# Required-Start:       $network $local_fs $mysql
+# Required-Stop:
+# Default-Start:        2 3 4 5
+# Default-Stop:         0 1 6
+# Short-Description:    Galaxy
+### END INIT INFO
+
+. /lib/lsb/init-functions
+
+USER="galaxy"
+GROUP="nogroup"
+GALAXY_DIR="/home/galaxy/galaxy_dist/"
+# Galaxy releases >= 16.01 installs dependencies by default into a virtualenv in <GALAXY_DIR>/.venv
+# A simple way to activate this virtualenv is to use the python interpreter in <GALAXY_DIR>/.venv
+# See https://wiki.galaxyproject.org/News/2016_01_GalaxyRelease and
+# https://github.com/galaxyproject/galaxy/blob/dev/doc/source/admin/framework_dependencies.rst
+PYTHON="/home/galaxy/galaxy_dist/.venv/bin/python"
+OPTS="./scripts/paster.py serve --log-file /home/galaxy/galaxy.log config/galaxy.ini"
+PIDFILE="/var/run/galaxy.pid"
+
+case "${1:-''}" in
+  'start')
+           log_daemon_msg "Starting Galaxy"
+           if start-stop-daemon --chuid $USER --group $GROUP --start --make-pidfile \
+	             --pidfile $PIDFILE --background --chdir $GALAXY_DIR --exec $PYTHON -- $OPTS; then
+             log_end_msg 0
+           else
+             log_end_msg 1
+	   fi
+
+        ;;
+  'stop')
+           log_daemon_msg "Stopping Galaxy" 
+	   if start-stop-daemon --stop --pidfile $PIDFILE; then
+	     log_end_msg 0
+	   else 
+	     log_end_msg 1
+	   fi
+        ;;
+  'restart')
+           # restart commands here
+	   $0 stop
+	   $0 start
+			   
+        ;;
+  *)      # no parameter specified
+        echo "Usage: $SELF start|stop|restart|reload|force-reload|status"
+        exit 1
+        ;;
+esac
+
+
+
diff --git a/contrib/galaxy.fedora-init b/contrib/galaxy.fedora-init
new file mode 100644
index 0000000..2bbc7db
--- /dev/null
+++ b/contrib/galaxy.fedora-init
@@ -0,0 +1,107 @@
+#!/bin/bash
+#
+# Init file for Galaxy (http://galaxyproject.org/)
+#   Suitable for use on Fedora and derivatives (RedHat Enterprise Linux, Scientific Linux, CentOS)
+#
+# Contributed by Brad Chapman
+#
+# chkconfig: 2345 98 20
+# description: Galaxy http://galaxyproject.org/
+
+#--- loading functions
+. /etc/init.d/functions
+#--- config
+
+SERVICE_NAME="galaxy"
+RUN_AS="galaxy"
+RUN_IN="/path/to/galaxy-dist"
+
+#--- main actions
+
+start() {
+	echo "Starting $SERVICE_NAME... "
+	cmd="cd $RUN_IN && sh run.sh --daemon"
+	case "$(id -un)" in
+		$RUN_AS)
+			eval "$cmd"
+			;;
+		root)
+			su - $RUN_AS -c "$cmd"
+			;;
+		*)
+			echo "*** ERROR *** must be $RUN_AS or root in order to control this service" >&2
+			exit 1
+	esac
+	echo "...done."
+}
+
+stop() {
+	echo -n "Stopping $SERVICE_NAME... "
+	
+	cmd="cd $RUN_IN && sh run.sh --stop-daemon"
+
+	case "$(id -un)" in
+		$RUN_AS)
+			eval "$cmd"
+			;;
+		root)
+			su - $RUN_AS -c "$cmd"
+			;;
+		*)
+			echo "*** ERROR *** must be $RUN_AS or root in order to control this service" >&2
+			exit 1
+	esac
+	
+	echo "done."
+}
+
+galaxy_status() {
+	if [[ $(grep '\[server:' $RUN_IN/config/galaxy.ini|awk -F'(:)|(])' '{ print $2 }') == 'main' ]]
+	then
+		echo -n "$SERVICE_NAME status: "
+		status -p $RUN_IN/paster.pid galaxy
+	else
+		for proc in $(grep '\[server:' $RUN_IN/config/galaxy.ini|awk -F'(:)|(])' '{ print $2 }')
+		do
+			status -p $RUN_IN/${proc}.pid ${proc}
+		done
+	fi
+}
+
+notsupported() {
+	echo "*** ERROR*** $SERVICE_NAME: operation [$1] not supported"
+}
+
+usage() {
+	echo "Usage: $SERVICE_NAME start|stop|restart|status"
+}
+
+
+#---
+
+case "$1" in
+	start)
+		start "$@"
+		;;
+	stop)
+		stop
+		;;
+	restart|reload)
+		stop
+		start
+		;;
+	status)
+		set +e
+		galaxy_status
+		exit $?
+		;;
+	'')
+		usage >&2
+		exit 1
+		;;
+	*)
+		notsupported "$1" >&2
+		usage >&2
+		exit 1
+		;;
+esac
diff --git a/contrib/galaxy.solaris-smf.xml b/contrib/galaxy.solaris-smf.xml
new file mode 100644
index 0000000..c44d73d
--- /dev/null
+++ b/contrib/galaxy.solaris-smf.xml
@@ -0,0 +1,75 @@
+<?xml version="1.0"?>
+<!DOCTYPE service_bundle SYSTEM "/usr/share/lib/xml/dtd/service_bundle.dtd.1">
+
+<service_bundle type='manifest' name='nginx'>
+
+<service name='application/galaxy' type='service' version='0'>
+
+    <dependency name='net'
+        grouping='require_all'
+        restart_on='none'
+        type='service'>
+        <service_fmri
+            value='svc:/network/loopback'/>
+    </dependency>
+
+    <dependency name='filesystem-local'
+        grouping='require_all'
+        restart_on='none'
+        type='service'>
+        <service_fmri
+            value='svc:/system/filesystem/local:default'/>
+    </dependency>
+
+    <instance name='main' enabled='false'>
+
+        <exec_method name='start'
+            type='method'
+            exec='python -ES ./scripts/paster.py serve universe_wsgi.webapp.ini --log-file=log/main.log --daemon'
+            timeout_seconds='60'>
+            <method_context
+                working_directory='/galaxy'>
+                <method_credential user='galaxy' group='galaxy' />
+                <method_environment>
+                    <envvar name="PATH" value="/opt/local/bin:/bin:/usr/bin" />
+                    <envvar name="TEMP" value="/galaxy/database/tmp" />
+                </method_environment>
+            </method_context>
+
+        </exec_method>
+
+        <exec_method name='stop'
+            type='method'
+            exec=':kill'
+            timeout_seconds='60'>
+            <method_context>
+                <method_credential user='galaxy' group='galaxy' />
+            </method_context>
+        </exec_method>
+
+        <property_group name='general' type='framework'>
+            <propval name='action_authorization' type='astring'
+                value='solaris.smf.manage.galaxy' />
+            <propval name='value_authorization' type='astring'
+                value='solaris.smf.manage.galaxy' />
+        </property_group>
+
+    </instance>
+
+    <stability value='Evolving' />
+
+    <template>
+        <common_name>
+            <loctext xml:lang='C'>
+                Galaxy
+            </loctext>
+        </common_name>
+        <documentation>
+            <doc_link name='galaxyproject.org'
+                uri='http://galaxyproject.org' />
+        </documentation>
+    </template>
+
+</service>
+
+</service_bundle> 
diff --git a/contrib/galaxy_config_merger.py b/contrib/galaxy_config_merger.py
new file mode 100644
index 0000000..38b5d3c
--- /dev/null
+++ b/contrib/galaxy_config_merger.py
@@ -0,0 +1,92 @@
+#! /usr/bin/env python
+'''
+galaxy_config_merger.py
+
+Created by Anne Pajon on 31 Jan 2012
+
+Copyright (c) 2012 Cancer Research UK - Cambridge Research Institute.
+
+This source file is licensed under the Academic Free License version
+3.0 available at http://www.opensource.org/licenses/AFL-3.0.
+
+Permission is hereby granted to reproduce, translate, adapt, alter,
+transform, modify, or arrange this source file (the "Original Work");
+to distribute or communicate copies of it under any license of your
+choice that does not contradict the terms and conditions; to perform
+or display the Original Work publicly.
+
+THE ORIGINAL WORK IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS
+AND WITHOUT WARRANTY, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT
+LIMITATION, THE WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY OR
+FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF
+THE ORIGINAL WORK IS WITH YOU.
+
+Script for merging specific local Galaxy config galaxy.ini.cri with default Galaxy galaxy.ini.sample
+'''
+from __future__ import print_function
+
+import logging
+import optparse
+import sys
+
+from six.moves import configparser
+
+
+def main():
+    # logging configuration
+    logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
+
+    # get the options
+    parser = optparse.OptionParser()
+    parser.add_option("-s", "--sample", dest="sample", action="store", help="path to Galaxy galaxy.ini.sample file")
+    parser.add_option("-c", "--config", dest="config", action="store", help="path to your own galaxy.ini file")
+    parser.add_option("-o", "--output", dest="output", action="store", help="path to the new merged galaxy.ini.new file")
+    (options, args) = parser.parse_args()
+
+    for option in ['sample', 'config']:
+        if getattr(options, option) is None:
+            print("Please supply a --%s parameter.\n" % (option))
+            parser.print_help()
+            sys.exit()
+
+    config_sample = configparser.RawConfigParser()
+    config_sample.read(options.sample)
+    config_sample_content = open(options.sample, 'r').read()
+
+    config = configparser.RawConfigParser()
+    config.read(options.config)
+
+    logging.info("Merging your own config file %s into the sample one %s." % (options.config, options.sample))
+    logging.info("---------- DIFFERENCE ANALYSIS BEGIN ----------")
+    for section in config.sections():
+        if not config_sample.has_section(section):
+            logging.warning("-MISSING- section [%s] not found in sample file. It will be ignored." % section)
+        else:
+            for (name, value) in config.items(section):
+                if not config_sample.has_option(section, name):
+                    if not "#%s" % name in config_sample_content:
+                        logging.warning("-MISSING- section [%s] option '%s' not found in sample file. It will be ignored." % (section, name))
+                    else:
+                        logging.info("-notset- section [%s] option '%s' not set in sample file. It will be added." % (section, name))
+                        config_sample.set(section, name, value)
+                else:
+                    if not config_sample.get(section, name) == value:
+                        logging.info("- diff - section [%s] option '%s' has different value ('%s':'%s'). It will be modified." % (section, name, config_sample.get(section, name), value))
+                        config_sample.set(section, name, value)
+    logging.info("---------- DIFFERENCE ANALYSIS END   ----------")
+
+    if options.output:
+        outputfile = open(options.output, 'w')
+        config_sample.write(outputfile)
+        outputfile.close()
+    else:
+        # print "----------"
+        # config_sample.write(sys.stdout)
+        # print "----------"
+        logging.info("use -o OUTPUT to write the merged configuration into a file.")
+
+    logging.info("read Galaxy galaxy.ini.sample for detailed information.")
+
+
+if __name__ == '__main__':
+    main()
diff --git a/contrib/galaxy_reports.fedora-init b/contrib/galaxy_reports.fedora-init
new file mode 100644
index 0000000..3b25c23
--- /dev/null
+++ b/contrib/galaxy_reports.fedora-init
@@ -0,0 +1,95 @@
+#!/bin/bash
+#
+# Init file for Galaxy (http://galaxyproject.org/)
+#   Suitable for use on Fedora and derivatives (RedHat Enterprise Linux, Scientific Linux, CentOS)
+#
+# Contributed by Brad Chapman
+#
+# chkconfig: 2345 98 20
+# description: Galaxy http://galaxyproject.org/
+
+#--- loading functions
+. /etc/init.d/functions
+#--- config
+
+SERVICE_NAME="galaxy-reports"
+RUN_AS="galaxy"
+RUN_IN="/path/to/galaxy-dist"
+
+#--- main actions
+
+start() {
+	echo "Starting $SERVICE_NAME... "
+	cmd="cd $RUN_IN && sh run_reports.sh --daemon"
+	case "$(id -un)" in
+		$RUN_AS)
+			eval "$cmd"
+			;;
+		root)
+			su - $RUN_AS -c "$cmd"
+			;;
+		*)
+			echo "*** ERROR *** must be $RUN_AS or root in order to control this service" >&2
+			exit 1
+	esac
+	echo "...done."
+}
+
+stop() {
+	echo -n "Stopping $SERVICE_NAME... "
+	
+	cmd="cd $RUN_IN && sh run_reports.sh --stop-daemon"
+
+	case "$(id -un)" in
+		$RUN_AS)
+			eval "$cmd"
+			;;
+		root)
+			su - $RUN_AS -c "$cmd"
+			;;
+		*)
+			echo "*** ERROR *** must be $RUN_AS or root in order to control this service" >&2
+			exit 1
+	esac
+	
+	echo "done."
+}
+
+notsupported() {
+	echo "*** ERROR*** $SERVICE_NAME: operation [$1] not supported"
+}
+
+usage() {
+	echo "Usage: $SERVICE_NAME start|stop|restart|status"
+}
+
+
+#---
+
+case "$1" in
+	start)
+		start "$@"
+		;;
+	stop)
+		stop
+		;;
+	restart|reload)
+		stop
+		start
+		;;
+	status)
+		set +e
+		echo -n "$SERVICE_NAME status: "
+		status -p $RUN_IN/reports_webapp.pid $SERVICE_NAME
+		exit $?
+		;;
+	'')
+		usage >&2
+		exit 1
+		;;
+	*)
+		notsupported "$1" >&2
+		usage >&2
+		exit 1
+		;;
+esac
diff --git a/contrib/galaxy_supervisor.conf b/contrib/galaxy_supervisor.conf
new file mode 100644
index 0000000..e739365
--- /dev/null
+++ b/contrib/galaxy_supervisor.conf
@@ -0,0 +1,45 @@
+# This is a sample supervisor config file.
+# In this configuration, 2 handlers and 1 uwsgi (uwsgi is more performant than the default Paste) instance with 2 processes and 2 threads will be started.
+# In order for this to work, you will need to have a [uwsgi] section in galaxy.ini, as such:
+#
+# [uwsgi]
+# master = True
+#
+# Further, this assumes uwsgi has been installed through pip into a virtualenv (/home/galaxy/galaxy/.venv/, in this example). 
+# If this is not the case, adjust the path to uwsgi accordingly.
+# If uwsgi was not installed through pip, include "--plugin python" in the uwsgi command.
+# This configuration has been tested with galaxy release_16.01 and uWSGI==2.0.12.
+# This assumes galaxy is installed in /home/galaxy/galaxy, so change occurences of /home/galaxy/galaxy accordingly.
+# If you want to run galaxy under a different username, change "user = galaxy" to "user = <your user>".
+# You will probably want to proxy uwsgi (you can't connect with a browser to uwsgi port 4001) with nginx or apache, 
+# see https://wiki.galaxyproject.org/Admin/Config/Performance/Scaling#uWSGI-1
+# or https://wiki.galaxyproject.org/Admin/Config/Performance/Scaling#uWSGI-2 . 
+
+[program:galaxy_web]
+command         = /home/galaxy/galaxy/.venv/bin/uwsgi --virtualenv /home/galaxy/galaxy/.venv --ini-paste /home/galaxy/galaxy/config/galaxy.ini --logdate --master --processes 2 --threads 2 --logto /home/galaxy/galaxy/uwsgi.log --socket 127.0.0.1:4001 --pythonpath lib --stats 127.0.0.1:9191
+directory       = /home/galaxy/galaxy
+umask           = 022
+autostart       = true
+autorestart     = true
+startsecs       = 20
+user            = galaxy
+environment     = PATH=/home/galaxy/galaxy/.venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin,PYTHONHOME=/home/galaxy/galaxy/.venv
+numprocs        = 1
+stopsignal      = INT
+startretries    = 15
+
+[program:handler]
+command         = /home/galaxy/galaxy/.venv/bin/python ./lib/galaxy/main.py -c /home/galaxy/galaxy/config/galaxy.ini --server-name=handler%(process_num)s --log-file=/home/galaxy/galaxy/handler%(process_num)s.log
+directory       = /home/galaxy/galaxy
+process_name    = handler%(process_num)s
+numprocs        = 2
+umask           = 022
+autostart       = true
+autorestart     = true
+startsecs       = 20
+user            = galaxy
+environment     = PYTHONHOME=/home/galaxy/galaxy/.venv
+startretries    = 15
+
+[group:galaxy]
+programs = handler, galaxy_web
diff --git a/contrib/gls.pl b/contrib/gls.pl
new file mode 100644
index 0000000..75995da
--- /dev/null
+++ b/contrib/gls.pl
@@ -0,0 +1,204 @@
+#/!/usr/bin/env perl -w
+
+=head1 NAME
+
+ gls
+
+=head1 DESCRIPTION
+
+ Display the files generated by the current user within the local instance of Galaxy.
+ Information is grouped by user's Galaxy histories and ordered by date/time
+ 
+=head1 OPTIONS
+
+	- i|info        = show more info about file [default = no]
+	- e|error       = show error files [default = no]
+	- d|dirname     = show files from this dirname (ie, history name) only (NOTE. if history name contains spaces, it should be quoted)
+	- n|nocontent   = show empty files [default = no]
+	- a|altuser     = supply an alternative username (nb, admin only)
+	- h|help        = help
+	- m|man         = man
+
+=head1 AUTHOR
+
+ Simon McGowan, CBRG [Computational Biology Research Group, Oxford University, UK]
+
+=head1 Update Record
+
+ 23/09/2010  001  S.McGowan  first written
+
+=cut
+
+
+
+use strict;
+use Data::Dumper;
+use DBI;
+use Getopt::Long;
+use Pod::Usage;
+
+my $show_file_info = 0;
+my $show_error_files = 0;
+my $show_empty_files = 0;
+my $help = 0;
+my $man = 0;
+my $alt_user;
+my $selected_dir;
+
+GetOptions(
+	'h|help'=>\$help,
+	'm|man'=>\$man,
+	'i|info'=>\$show_file_info,
+	'e|error'=>\$show_error_files,
+	'a|altuser=s'=>\$alt_user,
+	'n|nocontent'=>\$show_empty_files,
+	'd|dirname=s'=>\$selected_dir
+); 
+
+pod2usage(1) if $help;
+pod2usage(-verbose=>2) if $man;
+
+my %history_data;
+my %file_data;
+
+
+############  CONFIG  ########################################
+# list of admin usernames:
+my %admin;
+$admin{simonmcg} = '';
+$admin{stevetay} = '';
+
+# institute email domain
+my $email_domain = '@molbiol.ox.ac.uk';
+
+# mysql db
+my $mysql_database = 'galaxy';
+my $mysql_host = 'xxxxxxxx';
+my $mysql_username = 'xxxxxxxx';
+my $mysql_password = 'xxxxxxxx';
+
+# file path
+my $db_root_dir = '/wwwdata/galaxy-prod/database/files/';
+##############################################################
+
+#-----------------------------------------------------------------------------------
+
+my $current_user = getlogin();  
+
+# allow admin to list any user's galaxy files:
+if (exists($admin{$current_user}))
+{
+	if ($alt_user)  {$current_user = $alt_user;}
+} 
+
+&get_data;
+
+&print_galaxy_data;
+
+exit();
+
+#-----------------------------------------------------------------------------------
+
+sub get_data
+{
+	my $dbh = DBI->connect("DBI:mysql:database=$mysql_database;host=" . $mysql_host, $mysql_username, $mysql_password, {'RaiseError' => 1});
+	
+	my $sql = "SELECT h.id, h.name, h.create_time, hda.dataset_id, hda.update_time, hda.name, hda.info, hda.blurb, hda.extension, d.file_size
+	FROM history h, history_dataset_association hda, galaxy_user g, dataset d
+	WHERE g.email = '$current_user$email_domain'
+	AND g.id = h.user_id
+	AND h.id = hda.history_id
+	AND hda.dataset_id = d.id";	
+	
+	my $sth = $dbh->prepare($sql) or die("Failed to prepare statement $sql\n"); 
+	$sth->execute() or die("Can't perform SQL $sql : $DBI::errstr\n");	
+	while (my $ref = $sth->fetch)
+	{
+		my ($history_id, $history_name, $create_time, $dataset_id, $dataset_time, $dataset_name, $info, $blurb, $ext, $file_size) = @{$ref};
+		
+		#print "$history_id, $history_name, $create_time, $dataset_id, $dataset_time, $dataset_name, $info, $blurb, $ext, $file_size\n\n";
+		
+		$history_data{$history_id}{create_time} = $create_time;
+		$history_data{$history_id}{history_name} = $history_name;
+		
+		$file_data{$history_id}{$dataset_id}{dataset_update_time} = $dataset_time;
+		$file_data{$history_id}{$dataset_id}{dataset_name} = $dataset_name;
+		$file_data{$history_id}{$dataset_id}{info} = $info;
+		$file_data{$history_id}{$dataset_id}{blurb} = $blurb;		
+		$file_data{$history_id}{$dataset_id}{file_size} = $file_size;		
+		$file_data{$history_id}{$dataset_id}{ext} = $ext;
+	}	
+	$sth->finish;
+}
+
+sub print_galaxy_data
+{
+	foreach my $hist_id (sort numerically keys %history_data)
+	{
+		my $hist_name = $history_data{$hist_id}{history_name};
+		if ($selected_dir)
+		{
+			# if the user has opted to see just one dir...
+			unless($hist_name eq $selected_dir) {next;}
+		}
+		
+		my $hist_date = $history_data{$hist_id}{create_time};
+		
+		print "\n";
+		print "$hist_date - $hist_name\n";
+		
+		foreach my $dataset_id (sort numerically keys %{$file_data{$hist_id}})
+		{
+			my $dataset_time = $file_data{$hist_id}{$dataset_id}{dataset_update_time};
+			my $dataset_name = $file_data{$hist_id}{$dataset_id}{dataset_name};
+			my $info = $file_data{$hist_id}{$dataset_id}{info};
+			my $blurb = $file_data{$hist_id}{$dataset_id}{blurb};
+			my $file_size = $file_data{$hist_id}{$dataset_id}{file_size};
+			my $ext = $file_data{$hist_id}{$dataset_id}{ext};
+			my $file_path = &derive_file_path($dataset_id);
+			
+			if (($blurb) and ($blurb eq 'empty'))
+			{
+				unless ($show_empty_files) {next;}
+			}
+			if (($blurb) and ($blurb eq 'error'))
+			{
+				unless ($show_error_files) {next;}
+			}
+			
+			print "\t$dataset_time - $dataset_name";
+			print " $file_path";
+			if ($show_file_info)
+			{
+				print " [size:$file_size; type:$ext;";
+				if (($info) and ($blurb)) { print " $info; $blurb"; }
+				elsif ($info) { print " $info"; }
+				elsif ($blurb) { print " $blurb"; }
+				print ']';
+			}
+			print "\n";
+		}
+	}
+}
+
+sub derive_file_path
+{
+	my ($dataset_id) = @_;
+	my $dir = sprintf("%06d", $dataset_id);
+	$dir =~ s/\d\d\d$//;
+	my $full_path = $db_root_dir . $dir . '/dataset_' . $dataset_id . '.dat';
+	return ($full_path);	
+}
+
+sub numerically
+{
+	$a <=> $b;
+}
+
+
+
+
+
+
+
+
diff --git a/contrib/nagios/README b/contrib/nagios/README
new file mode 100644
index 0000000..e68af07
--- /dev/null
+++ b/contrib/nagios/README
@@ -0,0 +1 @@
+Nagios checks for Galaxy.  check_galaxy is used to call check_galaxy.py.
diff --git a/contrib/nagios/check_galaxy b/contrib/nagios/check_galaxy
new file mode 100755
index 0000000..ce7931a
--- /dev/null
+++ b/contrib/nagios/check_galaxy
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+if [ -z "$3" ]; then
+    echo "usage: check_galaxy <server> <username> <password>"
+    exit 3
+fi
+
+here=`dirname $0`
+var="$HOME/.check_galaxy/$1"
+
+touch $var/iterations
+iterations=`cat $var/iterations`
+if [ -z "$iterations" ]; then
+    iterations=0
+fi
+
+new_history=''
+if [ $iterations -gt 96 ]; then
+    new_history='-n'
+    echo 0 > $var/iterations
+else
+    echo `expr $iterations + 1` > $var/iterations
+fi
+
+date >> $var/log
+status=`python $here/check_galaxy.py $new_history $1 $2 $3 2>&1 | tee -a $var/log | tail -n 1`
+
+echo "$status"
+
+case "$status" in
+    "Exception: Tool never finished")
+        exit 1
+        ;;
+    "OK")
+        exit 0
+        ;;
+    *)
+        exit 2
+        ;;
+esac
diff --git a/contrib/nagios/check_galaxy.py b/contrib/nagios/check_galaxy.py
new file mode 100755
index 0000000..dd45f98
--- /dev/null
+++ b/contrib/nagios/check_galaxy.py
@@ -0,0 +1,394 @@
+#!/usr/bin/env python
+"""
+check_galaxy can be run by hand, although it is meant to run from cron
+via the check_galaxy.sh script in Galaxy's cron/ directory.
+"""
+from __future__ import print_function
+
+import filecmp
+import formatter
+import getopt
+import htmllib
+import json
+import os
+import socket
+import sys
+import tempfile
+import time
+import warnings
+
+with warnings.catch_warnings():
+    warnings.simplefilter('ignore')
+    import twill
+    import twill.commands as tc
+
+# options
+if "DEBUG" in os.environ:
+    debug = os.environ["DEBUG"]
+else:
+    debug = False
+
+test_data_dir = os.path.join( os.path.dirname( __file__ ), 'check_galaxy_data' )
+# what tools to run - not so pretty
+tools = {
+    "Extract+genomic+DNA+1":
+    [
+        {
+            "inputs":
+            (
+                {
+                    "file_path": os.path.join( test_data_dir, "1.bed" ),
+                    "dbkey": "hg17",
+                },
+
+            )
+        },
+        { "check_file": os.path.join( test_data_dir, "extract_genomic_dna_out1.fasta" ) },
+        {
+            "tool_run_options":
+            {
+                "input": "1.bed",
+                "interpret_features": "yes",
+                "index_source": "cached",
+                "out_format": "fasta"
+            }
+        }
+    ]
+}
+
+
+# handle arg(s)
+def usage():
+    sys.exit("usage: check_galaxy.py <server> <username> <password>")
+
+
+try:
+    opts, args = getopt.getopt( sys.argv[1:], 'n' )
+except getopt.GetoptError as e:
+    print(str(e))
+    usage()
+if len( args ) < 1:
+    usage()
+server = args[0]
+username = args[1]
+password = args[2]
+
+new_history = False
+for o, a in opts:
+    if o == "-n":
+        if debug:
+            print("Specified -n, will create a new history")
+        new_history = True
+    else:
+        usage()
+
+# state information
+var_dir = os.path.join( os.path.expanduser('~'), ".check_galaxy", server )
+if not os.access( var_dir, os.F_OK ):
+    os.makedirs( var_dir, 0o700 )
+
+# default timeout for twill browser is never
+socket.setdefaulttimeout(300)
+
+# user-agent
+tc.agent("Mozilla/5.0 (compatible; check_galaxy/0.1)")
+tc.config('use_tidy', 0)
+
+
+class Browser:
+    def __init__(self):
+        self.server = server
+        self.tool = None
+        self.tool_opts = None
+        self._hda_id = None
+        self._hda_state = None
+        self._history_id = None
+        self.check_file = None
+        self.cookie_jar = os.path.join( var_dir, "cookie_jar" )
+        dprint("cookie jar path: %s" % self.cookie_jar)
+        if not os.access(self.cookie_jar, os.R_OK):
+            dprint("no cookie jar at above path, creating")
+            tc.save_cookies(self.cookie_jar)
+        tc.load_cookies(self.cookie_jar)
+
+    def get(self, path):
+        tc.go("http://%s%s" % (self.server, path))
+        tc.code(200)
+
+    def reset(self):
+        self.tool = None
+        self.tool_opts = None
+        self._hda_id = None
+        self._hda_state = None
+        self._history_id = None
+        self.check_file = None
+        if new_history:
+            self.get("/history/delete_current")
+            tc.save_cookies(self.cookie_jar)
+        self.delete_datasets()
+
+    def check_redir(self, url):
+        try:
+            tc.get_browser()._browser.set_handle_redirect(False)
+            tc.go(url)
+            tc.code(302)
+            tc.get_browser()._browser.set_handle_redirect(True)
+            dprint( "%s is returning redirect (302)" % url )
+            return(True)
+        except twill.errors.TwillAssertionError as e:
+            tc.get_browser()._browser.set_handle_redirect(True)
+            dprint( "%s is not returning redirect (302): %s" % (url, e) )
+            code = tc.browser.get_code()
+            if code == 502:
+                sys.exit("Galaxy is down (code 502)")
+            return False
+
+    def login(self, user, pw):
+        self.get("/user/login")
+        tc.fv("1", "email", user)
+        tc.fv("1", "password", pw)
+        tc.submit("Login")
+        tc.code(200)
+        if len(tc.get_browser().get_all_forms()) > 0:
+            # uh ohs, fail
+            p = userParser()
+            p.feed(tc.browser.get_html())
+            if p.no_user:
+                dprint("user does not exist, will try creating")
+                self.create_user(user, pw)
+            elif p.bad_pw:
+                raise Exception("Password is incorrect")
+            else:
+                raise Exception("Unknown error logging in")
+        tc.save_cookies(self.cookie_jar)
+
+    def create_user(self, user, pw):
+        self.get("/user/create")
+        tc.fv("1", "email", user)
+        tc.fv("1", "password", pw)
+        tc.fv("1", "confirm", pw)
+        tc.submit("Submit")
+        tc.code(200)
+        if len(tc.get_browser().get_all_forms()) > 0:
+            p = userParser()
+            p.feed(tc.browser.get_html())
+            if p.already_exists:
+                raise Exception('The user you were trying to create already exists')
+
+    def upload(self, input):
+        self.get("/tool_runner/index?tool_id=upload1")
+        tc.fv("1", "file_type", "bed")
+        tc.fv("1", "dbkey", input.get('dbkey', '?'))
+        tc.formfile("1", "file_data", input['file_path'])
+        tc.submit("runtool_btn")
+        tc.code(200)
+
+    def runtool(self):
+        self.get("/tool_runner/index?tool_id=%s" % self.tool)
+        for k, v in self.tool_opts.items():
+            tc.fv("1", k, v)
+        tc.submit("runtool_btn")
+        tc.code(200)
+
+    @property
+    def history_id(self):
+        if self._history_id is None:
+            self.get('/api/histories')
+            self._history_id = json.loads(tc.browser.get_html())[0]['id']
+        return self._history_id
+
+    @property
+    def history_contents(self):
+        self.get('/api/histories/%s/contents' % self.history_id)
+        return json.loads(tc.browser.get_html())
+
+    @property
+    def hda_id(self):
+        if self._hda_id is None:
+            self.set_top_hda()
+        return self._hda_id
+
+    @property
+    def hda_state(self):
+        if self._hda_state is None:
+            self.set_top_hda()
+        return self._hda_state
+
+    def set_top_hda(self):
+        self.get(self.history_contents[-1]['url'])
+        hda = json.loads(tc.browser.get_html())
+        self._hda_id = hda['id']
+        self._hda_state = hda['state']
+
+    @property
+    def undeleted_hdas(self):
+        rval = []
+        for item in self.history_contents:
+            self.get(item['url'])
+            hda = json.loads(tc.browser.get_html())
+            if hda['deleted'] is False:
+                rval.append(hda)
+        return rval
+
+    @property
+    def history_state(self):
+        self.get('/api/histories/%s' % self.history_id)
+        return json.loads(tc.browser.get_html())['state']
+
+    @property
+    def history_state_terminal(self):
+        if self.history_state not in ['queued', 'running', 'paused']:
+            return True
+        return False
+
+    def wait(self):
+        sleep_amount = 1
+        count = 0
+        maxiter = 16
+        while count < maxiter:
+            count += 1
+            if not self.history_state_terminal:
+                time.sleep( sleep_amount )
+                sleep_amount += 1
+            else:
+                break
+        if count == maxiter:
+            raise Exception("Tool never finished")
+
+    def check_state(self):
+        if self.hda_state != "ok":
+            self.get("/datasets/%s/stderr" % self.hda_id)
+            print(tc.browser.get_html())
+            raise Exception("HDA %s NOT OK: %s" % (self.hda_id, self.hda_state))
+
+    def diff(self):
+        self.get("/datasets/%s/display?to_ext=%s" % (self.hda_id, self.tool_opts.get('out_format', 'fasta')))
+        data = tc.browser.get_html()
+        tmp = tempfile.mkstemp()
+        dprint("tmp file: %s" % tmp[1])
+        tmpfh = os.fdopen(tmp[0], 'w')
+        tmpfh.write(data)
+        tmpfh.close()
+        if filecmp.cmp(tmp[1], self.check_file):
+            dprint("Tool output is as expected")
+        else:
+            if not debug:
+                os.remove(tmp[1])
+            raise Exception("Tool output differs from expected")
+        if not debug:
+            os.remove(tmp[1])
+
+    def delete_datasets(self):
+        for hda in self.undeleted_hdas:
+            self.get('/datasets/%s/delete' % hda['id'])
+        hdas = [hda['id'] for hda in self.undeleted_hdas]
+        if hdas:
+            print("Remaining datasets ids:", " ".join(hdas))
+            raise Exception("History still contains datasets after attempting to delete them")
+
+    def check_if_logged_in(self):
+        self.get("/user?cntrller=user")
+        p = loggedinParser()
+        p.feed(tc.browser.get_html())
+        return p.logged_in
+
+
+class userParser(htmllib.HTMLParser):
+    def __init__(self):
+        htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+        self.in_span = False
+        self.in_div = False
+        self.no_user = False
+        self.bad_pw = False
+        self.already_exists = False
+
+    def start_span(self, attrs):
+        self.in_span = True
+
+    def start_div(self, attrs):
+        self.in_div = True
+
+    def end_span(self):
+        self.in_span = False
+
+    def end_div(self):
+        self.in_div = False
+
+    def handle_data(self, data):
+        if self.in_span or self.in_div:
+            if data == "No such user (please note that login is case sensitive)":
+                self.no_user = True
+            elif data == "Invalid password":
+                self.bad_pw = True
+            elif data == "User with that email already exists":
+                self.already_exists = True
+
+
+class loggedinParser(htmllib.HTMLParser):
+    def __init__(self):
+        htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+        self.in_p = False
+        self.logged_in = False
+
+    def start_p(self, attrs):
+        self.in_p = True
+
+    def end_p(self):
+        self.in_p = False
+
+    def handle_data(self, data):
+        if self.in_p:
+            if data == "You are currently not logged in.":
+                self.logged_in = False
+            elif data.startswith( "You are currently logged in as " ):
+                self.logged_in = True
+
+
+def dprint(str):
+    if debug:
+        print(str)
+
+
+if __name__ == "__main__":
+
+    dprint("checking %s" % server)
+
+    b = Browser()
+
+    # login (or not)
+    if b.check_if_logged_in():
+        dprint("we are already logged in (via cookies), hooray!")
+    else:
+        dprint("not logged in... logging in")
+        b.login(username, password)
+
+    for tool, params in tools.items():
+
+        check_file = ""
+
+        # make sure history and state is clean
+        b.reset()
+        b.tool = tool
+
+        # get all the tool run conditions
+        for dict in params:
+            for k, v in dict.items():
+                if k == 'inputs':
+                    for input in v:
+                        b.upload(input)
+                    b.wait()
+                elif k == 'check_file':
+                    b.check_file = v
+                elif k == 'tool_run_options':
+                    b.tool_opts = v
+                else:
+                    raise Exception("Unknown key in tools dict: %s" % k)
+
+        b.runtool()
+        b.wait()
+        b.check_state()
+        b.diff()
+        b.delete_datasets()
+
+    print("OK")
+    sys.exit(0)
diff --git a/create_db.sh b/create_db.sh
new file mode 100755
index 0000000..976d279
--- /dev/null
+++ b/create_db.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    printf "Activating virtualenv at $GALAXY_VIRTUAL_ENV\n"
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+cd `dirname $0`
+python ./scripts/create_db.py $@
diff --git a/cron/README.txt b/cron/README.txt
new file mode 100644
index 0000000..8c8c2e9
--- /dev/null
+++ b/cron/README.txt
@@ -0,0 +1,15 @@
+This folder holds cron jobs that support galaxy.
+
+updateucsc.sh is a shell script to facilitate the updates from UCSC.
+Galaxy stores several files locally to speed up operations that depend
+on information from UCSC.  These files can all be found in the
+static/ucsc folder.
+
+Before adding updateucsc.sh to the crontab, it is important to note
+two things.  First, updateucsc.sh must be edited to point towards the
+root galaxy directory.  At the top of the file there is a variable
+"GALAXY" that should be edited.  Second, the updates come from UCSC
+via their table browsers.  While the tendency is typically to run cron
+jobs late at night, UCSC, like most, tend to take down their servers
+at odd hours for maintenance.  The update scripts for UCSC are not CPU
+intensive, and should be scheduled during normal hours.
diff --git a/cron/add_manual_builds.py b/cron/add_manual_builds.py
new file mode 100644
index 0000000..263a2fc
--- /dev/null
+++ b/cron/add_manual_builds.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+
+"""
+Adds Manually created builds and chrom info to Galaxy's info tables
+
+Usage:
+python add_manual_builds.py input_file builds.txt chrom_length_dir
+"""
+
+import os
+import sys
+
+
+def add_manual_builds(input_file, build_file, chr_dir):
+    # determine existing builds, so as to not overwrite
+    existing_builds = []
+    for line in open(build_file):
+        try:
+            if line.startswith("#"):
+                continue
+            existing_builds.append(line.replace("\n", "").replace("\r", "").split("\t")[0])
+        except:
+            continue
+    build_file_out = open(build_file, 'a')
+    for line in open(input_file):
+        try:
+            fields = line.replace("\n", "").replace("\r", "").split("\t")
+            build = fields.pop(0)
+            if build in existing_builds:
+                continue  # if build exists, leave alone
+            name = fields.pop(0)
+            try:  # get chrom lens if included in file, otherwise still add build
+                chrs = fields.pop(0).split(",")
+            except:
+                chrs = []
+            print>>build_file_out, build + "\t" + name + " (" + build + ")"
+            if chrs:  # create len file if provided chrom lens
+                chr_len_out = open(os.path.join(chr_dir, build + ".len"), 'w')
+                for chr in chrs:
+                    print>>chr_len_out, chr.replace("=", "\t")
+                chr_len_out.close()
+        except:
+            continue
+    build_file_out.close()
+
+
+if __name__ == "__main__":
+    if len(sys.argv) < 4:
+        sys.exit("USAGE: python add_manual_builds.py input_file builds.txt chrom_length_dir")
+    input_file = sys.argv[1]
+    build_file = sys.argv[2]
+    chr_dir = sys.argv[3]
+    add_manual_builds(input_file, build_file, chr_dir)
diff --git a/cron/build_chrom_db.py b/cron/build_chrom_db.py
new file mode 100644
index 0000000..ce472d2
--- /dev/null
+++ b/cron/build_chrom_db.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+"""
+Connects to a UCSC table browser and scrapes chrominfo for every build
+specified by an input file (such as one output by parse_builds.py).
+If not input file specified, it will connect using parse_builds.py to
+retrieve a list of available builds.
+
+All chromInfo is placed in a path with the convention
+{dbpath}/buildname.len
+
+Usage:
+python build_chrom_db.py dbpath/ [builds_file]
+"""
+from __future__ import print_function
+
+import fileinput
+import os
+import sys
+
+from six.moves.urllib.parse import urlencode
+from six.moves.urllib.request import urlopen
+
+import parse_builds
+
+
+def getchrominfo(url, db):
+    tableURL = "http://genome-test.cse.ucsc.edu/cgi-bin/hgTables?"
+    URL = tableURL + urlencode({
+        "clade": "",
+        "org": "",
+        "db": db,
+        "hgta_outputType": "primaryTable",
+        "hgta_group": "allTables",
+        "hgta_table": "chromInfo",
+        "hgta_track": db,
+        "hgta_regionType": "",
+        "position": "",
+        "hgta_doTopSubmit": "get info"})
+    page = urlopen(URL)
+    for line in page:
+        line = line.rstrip( "\r\n" )
+        if line.startswith("#"):
+            continue
+        fields = line.split("\t")
+        if len(fields) > 1 and len(fields[0]) > 0 and int(fields[1]) > 0:
+            yield [fields[0], fields[1]]
+        else:
+            raise Exception("Problem parsing line '%s'" % line)
+
+
+if __name__ == "__main__":
+    if len(sys.argv) == 1:
+        sys.exit("Path to place chromInfo tables must be specified.")
+    dbpath = sys.argv[1]
+    builds = []
+    if len(sys.argv) > 2:
+        try:
+            buildfile = fileinput.FileInput(sys.argv[2])
+            for line in buildfile:
+                if line.startswith("#"):
+                    continue
+                builds.append(line.split("\t")[0])
+        except:
+            sys.exit("Bad input file.")
+    else:
+        try:
+            for build in parse_builds.getbuilds("http://genome.cse.ucsc.edu/cgi-bin/das/dsn"):
+                builds.append(build[0])
+        except:
+            sys.exit("Unable to retrieve builds.")
+    for build in builds:
+        if build == "?":
+            continue  # no lengths for unspecified chrom
+        print("Retrieving " + build)
+        outfile_name = dbpath + build + ".len"
+        try:
+            with open(outfile_name, "w") as outfile:
+                for chrominfo in getchrominfo("http://genome-test.cse.ucsc.edu/cgi-bin/hgTables?", build):
+                    print("\t".join(chrominfo), file=outfile)
+        except Exception as e:
+            print("Failed to retrieve %s: %s" % (build, e))
+            os.remove(outfile_name)
diff --git a/cron/check_galaxy.sh b/cron/check_galaxy.sh
new file mode 100755
index 0000000..62190b5
--- /dev/null
+++ b/cron/check_galaxy.sh
@@ -0,0 +1,222 @@
+#!/bin/sh
+#set -xv
+#
+# Runs the scripts/check_galaxy.py script in a way that's easy to handle from cron
+#
+
+# defaults (note: default sleep is below since it depends on debug)
+DEBUG=0
+STAGGER=0
+INTERVAL=3
+MAIL=
+PAGE=
+NEWHIST=
+BARDARG=0
+# get commandline opts
+while getopts dsi:l:m:p:n optname
+do
+    case $optname in
+        d)  DEBUG=1 ;;
+        s)  STAGGER=1 ;;
+        i)  INTERVAL=$OPTARG ;;
+        l)  SLEEP=$OPTARG ;;
+        m)  MAIL="$MAIL $OPTARG" ;;
+        p)  PAGE="$PAGE $OPTARG" ;;
+        n)  NEWHIST="-n" ;;
+        *)  BADARG=1 ;;
+    esac
+done
+shift `expr $OPTIND - 1`
+
+if [ -z "$1" -o "$BADARG" ]; then
+    cat <<EOF
+usage: `basename $0` [-ds] [-i interval] [-m email_address]+ [-p pager_address]+ <galaxy_host>"
+  -d            Print debugging information.
+  -s            Stagger mailing the pagers/emails, instead of all at once when
+                there's a problem.  Useful for running check_galaxy at night.
+  -i <interval> The number of times this wrapper should execute before mailing
+                the next address, when staggering is enabled.  Mail is sent
+                every <interval> runs of the program, so the actual time
+                between emails is:
+                  time = (<interval>) * (how often wrapper runs from cron)
+  -l <seconds>  This wrapper runs check_galaxy a second time if the first check
+                fails, in case the problem is intermittent.  <seconds> is how
+                many seconds to sleep between checks.
+  -m <address>  Email addresses to send the full check_galaxy output to, if
+                Galaxy is down.  Use multiple -m options to specify multiple
+                addresses.  When staggering, email will be sent in the order
+                which you specify -m options on the command line.
+  -p <address>  Like -m, but sends just the last line of check_galaxy's output.
+                Useful for pagers.  When staggering is enabled and both -m and
+                -p options are present, the first -m address and the first -p
+                address are mailed simultaneously, followed by the second -m
+                and second -p, and so on.
+  -n            Create a new history (passes the -n option to check_galaxy.py).
+  <galaxy_host> The hostname of the Galaxy server to check.  Use a : if running
+                on a non-80 port (e.g. galaxy.example.com:8080).
+EOF
+    exit 1
+fi
+
+if [ -z "$SLEEP" ]; then
+    if [ $DEBUG ]; then
+        SLEEP=2
+    else
+        SLEEP=60
+    fi
+fi
+
+# globals
+CRON_DIR=`dirname $0`
+SCRIPTS_DIR="$CRON_DIR/../scripts"
+CHECK_GALAXY="$SCRIPTS_DIR/check_galaxy.py"
+VAR="$HOME/.check_galaxy"
+
+# sanity
+if [ ! -f $CHECK_GALAXY ]; then
+    [ $DEBUG = 1 ] && echo "$CHECK_GALAXY is missing"
+    exit 0
+fi
+
+# Do any other systems' default ps not take BSD ps args?
+case `uname -s` in
+    SunOS)  PS="/usr/ucb/ps" ;;
+    *)      PS="ps" ;;
+esac
+
+NOTIFIED_MAIL="$VAR/$1/mail"
+NOTIFIED_PAGE="$VAR/$1/page"
+MUTEX="$VAR/$1/wrap.mutex"
+COUNT="$VAR/$1/wrap.count"
+STAGGER_FILE="$VAR/$1/wrap.stagger"
+for dir in $VAR/$1 $NOTIFIED_MAIL $NOTIFIED_PAGE; do
+    if [ ! -d $dir ]; then
+        mkdir -p -m 0700 $dir
+        if [ $? -ne 0 ]; then
+            [ $DEBUG = 1 ] && echo "unable to create dir: $dir"
+            exit 0
+        fi
+    fi
+done
+
+if [ ! -f "$VAR/$1/login" ]; then
+    [ $DEBUG = 1 ] && cat <<EOF
+Please create the file:
+  $VAR/$1/login
+This should contain a username and password to log in to
+Galaxy with, on one line, separated by whitespace, e.g.:
+
+check_galaxy at example.com password
+
+If the user does not exist, check_galaxy will create it
+for you.
+EOF
+    exit 0
+fi
+
+if [ $STAGGER ]; then
+    if [ -f "$STAGGER_FILE" ]; then
+        STAGGER_COUNT=`cat $STAGGER_FILE`
+    else
+        STAGGER_COUNT=$INTERVAL
+    fi
+fi
+
+# only run one at once
+if [ -f $MUTEX ]; then
+    pid=`cat $MUTEX`
+    $PS p $pid >/dev/null 2>&1
+    if [ $? -eq 0 ]; then
+        if [ -f $COUNT ]; then
+            count=`cat $COUNT`
+        else
+            count=0
+        fi
+        if [ "$count" -eq 3 ]; then
+            echo "A check_galaxy process for $1 has been running for an unusually long time.  Something is broken." \
+                | mail -s "$1 problems" $MAIL
+        fi
+        expr $count + 1 > $COUNT
+        exit 0
+    else
+        # stale mutex
+        rm -f $MUTEX
+    fi
+fi
+
+rm -f $COUNT
+echo $$ > $MUTEX
+
+[ $DEBUG = 1 ] && echo "running first check"
+first_try=`$CHECK_GALAXY $NEWHIST $1 2>&1`
+
+if [ $? -ne 0 ]; then
+    # if failure, wait and try again
+    [ $DEBUG = 1 ] && echo "first check failed, sleeping $SLEEP seconds for second run"
+    sleep $SLEEP
+else
+    # if successful
+    [ $DEBUG = 1 ] && echo "first check succeeded"
+    for file in $NOTIFIED_MAIL/* $NOTIFIED_PAGE/*; do
+    	recip=`basename $file`
+    	# the literal string including the * will be passed if the dir is empty
+	[ "$recip" = '*' ] && continue
+        echo "$1 is now okay" | mail -s "$1 OK" $recip
+        rm -f $file
+        [ $DEBUG = 1 ] && echo "up: mailed $recip"
+    done
+    rm -f $MUTEX $STAGGER_FILE
+    exit 0
+fi
+
+[ $DEBUG = 1 ] && echo "running second check"
+second_try=`$CHECK_GALAXY $NEWHIST $1 2>&1`
+
+if [ $? -ne 0 ]; then
+    [ $DEBUG = 1 ] && echo "second check failed"
+    if [ $STAGGER = 1 ]; then
+        if [ "$STAGGER_COUNT" -eq "$INTERVAL" ]; then
+            # send notification this run
+            echo 1 > $STAGGER_FILE
+        else
+            # don't send notification this run
+	    [ $DEBUG = 1 ] && echo "$1 is down, but it's not time to send an email.  STAGGER_COUNT was $STAGGER_COUNT"
+            expr $STAGGER_COUNT + 1 > $STAGGER_FILE
+            rm -f $MUTEX
+            exit 0
+        fi
+    fi
+    for recip in $MAIL; do
+        if [ ! -f "$NOTIFIED_MAIL/$recip" ]; then
+            cat <<HERE | mail -s "$1 problems" $recip
+$second_try
+HERE
+            touch "$NOTIFIED_MAIL/$recip"
+            [ $DEBUG = 1 ] && echo "dn: mailed $recip"
+            [ $STAGGER = 1 ] && break
+        fi
+    done
+    for recip in $PAGE; do
+        if [ ! -f "$NOTIFIED_PAGE/$recip" ]; then
+            cat <<HERE | tail -1 | mail -s "$1 problems" $recip
+$second_try
+HERE
+            touch "$NOTIFIED_PAGE/$recip"
+            [ $DEBUG = 1 ] && echo "dn: mailed $recip"
+            [ $STAGGER = 1 ] && break
+        fi
+    done
+else
+    [ $DEBUG = 1 ] && echo "second check succeeded"
+    for file in $NOTIFIED_MAIL/* $NOTIFIED_PAGE/*; do
+    	recip=`basename $file`
+	[ "$recip" = '*' ] && continue
+        echo "$1 is now okay" | mail -s "$1 OK" $recip
+        rm -f $file
+        [ $DEBUG = 1 ] && echo "up: mailed $recip"
+    done
+    rm -f $STAGGER_FILE
+fi
+
+rm -f $MUTEX
+exit 0
diff --git a/cron/cleanup_datasets.py b/cron/cleanup_datasets.py
new file mode 100644
index 0000000..184be12
--- /dev/null
+++ b/cron/cleanup_datasets.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+import sys
+
+sys.exit("This script has been deprecated, replaced by the set of scripts in <galaxy_distribution>/scripts/cleanup_datsets/."
+         "See https://wiki.galaxyproject.org/Admin/Config/Performance/Purge%20Histories%20and%20Datasets for more information.")
diff --git a/cron/parse_builds.py b/cron/parse_builds.py
new file mode 100644
index 0000000..c301971
--- /dev/null
+++ b/cron/parse_builds.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+"""
+Connects to the URL specified and outputs builds available at that
+DSN in tabular format.  UCSC Main gateway is used as default.
+build   description
+"""
+from __future__ import print_function
+
+import sys
+import xml.etree.ElementTree as ElementTree
+
+from six.moves.urllib.request import urlopen
+
+
+def getbuilds(url):
+    try:
+        page = urlopen(url)
+    except:
+        print("#Unable to open " + url)
+        print("?\tunspecified (?)")
+        sys.exit(1)
+
+    text = page.read()
+    try:
+        tree = ElementTree.fromstring(text)
+    except:
+        print("#Invalid xml passed back from " + url)
+        print("?\tunspecified (?)")
+        sys.exit(1)
+
+    print("#Harvested from " + url)
+    print("?\tunspecified (?)")
+    for dsn in tree:
+        build = dsn.find("SOURCE").attrib['id']
+        description = dsn.find("DESCRIPTION").text.replace(" - Genome at UCSC", "").replace(" Genome at UCSC", "")
+
+        fields = description.split(" ")
+        temp = fields[0]
+        for i in range(len(fields) - 1):
+            if temp == fields[i + 1]:
+                fields.pop(i + 1)
+            else:
+                temp = fields[i + 1]
+        description = " ".join(fields)
+        yield [build, description]
+
+
+if __name__ == "__main__":
+    if len(sys.argv) > 1:
+        URL = sys.argv[1]
+    else:
+        URL = "http://genome.cse.ucsc.edu/cgi-bin/das/dsn"
+    for build in getbuilds(URL):
+        print(build[0] + "\t" + build[1] + " (" + build[0] + ")")
diff --git a/cron/parse_builds_3_sites.py b/cron/parse_builds_3_sites.py
new file mode 100644
index 0000000..b22cd44
--- /dev/null
+++ b/cron/parse_builds_3_sites.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+"""
+Connects to sites and determines which builds are available at each.
+"""
+from __future__ import print_function
+
+import xml.etree.ElementTree as ElementTree
+
+from six.moves.urllib.request import urlopen
+
+sites = ['http://genome.ucsc.edu/cgi-bin/',
+         'http://archaea.ucsc.edu/cgi-bin/',
+         'http://genome-test.cse.ucsc.edu/cgi-bin/']
+names = ['main', 'archaea', 'test']
+
+
+def main():
+    for i in range(len(sites)):
+        site = sites[i] + "das/dsn"
+        trackurl = sites[i] + "hgTracks?"
+        builds = []
+        try:
+            page = urlopen(site)
+        except:
+            print("#Unable to connect to " + site)
+            continue
+        text = page.read()
+        try:
+            tree = ElementTree.fromstring(text)
+        except:
+            print("#Invalid xml passed back from " + site)
+            continue
+        print("#Harvested from", site)
+
+        for dsn in tree:
+            build = dsn.find("SOURCE").attrib['id']
+            builds.append(build)
+            build_dict = {}
+        for build in builds:
+            build_dict[build] = 0
+            builds = list(build_dict.keys())
+        yield [names[i], trackurl, builds]
+
+
+if __name__ == "__main__":
+    for site in main():
+        print(site[0] + "\t" + site[1] + "\t" + ",".join(site[2]))
diff --git a/cron/updateucsc.sh.sample b/cron/updateucsc.sh.sample
new file mode 100644
index 0000000..4524a86
--- /dev/null
+++ b/cron/updateucsc.sh.sample
@@ -0,0 +1,90 @@
+#!/bin/sh 
+#
+# Script to update UCSC shared data tables.  The idea is to update, but if
+# the update fails, not replace current data/tables with error
+# messages.
+
+## Uncomment and edit this line to refer to galaxy's path:
+#GALAXY=/galaxy/path
+PYTHONPATH=${GALAXY}/lib
+export PYTHONPATH
+
+# setup directories
+echo "Creating required directories."
+DIRS="
+${GALAXY}/tool-data/shared/ucsc/new
+${GALAXY}/tool-data/shared/ucsc/chrom
+${GALAXY}/tool-data/shared/ucsc/chrom/new
+"
+for dir in $DIRS; do
+    if [ ! -d $dir ]; then
+        echo "Creating $dir"
+        mkdir $dir
+    else
+        echo "$dir already exists, continuing."
+    fi
+done
+
+date
+echo "Updating UCSC shared data tables."
+
+# Try to build "builds.txt"
+echo "Updating builds.txt"
+python ${GALAXY}/cron/parse_builds.py > ${GALAXY}/tool-data/shared/ucsc/new/builds.txt
+if [ $? -eq 0 ]
+then
+    diff ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt > /dev/null 2>&1
+    if [ $? -ne 0 ]
+    then
+        cp -f ${GALAXY}/tool-data/shared/ucsc/new/builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt
+    fi
+else
+    echo "Failed to update builds.txt" >&2
+fi
+
+# Try to build ucsc_build_sites.txt
+echo "Updating ucsc_build_sites.txt"
+python ${GALAXY}/cron/parse_builds_3_sites.py > ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt
+if [ $? -eq 0 ]
+then
+    diff ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt > /dev/null 2>&1
+    if [ $? -ne 0 ]
+    then
+        cp -f ${GALAXY}/tool-data/shared/ucsc/new/ucsc_build_sites.txt ${GALAXY}/tool-data/shared/ucsc/ucsc_build_sites.txt
+    fi
+else
+    echo "Failed to update builds.txt" >&2
+fi
+
+# Try to build chromInfo tables
+echo "Building chromInfo tables."
+python ${GALAXY}/cron/build_chrom_db.py ${GALAXY}/tool-data/shared/ucsc/chrom/new/ ${GALAXY}/tool-data/shared/ucsc/builds.txt
+if [ $? -eq 0 ]
+then
+    for src in ${GALAXY}/tool-data/shared/ucsc/chrom/new/*.len
+    do
+        dst=${GALAXY}/tool-data/shared/ucsc/chrom/`basename $src`
+        diff $src $dst > /dev/null 2>&1
+        if [ $? -ne 0 ]
+        then
+            echo "cp -f $src $dst"
+            cp -f $src $dst
+        fi
+    done
+else
+    echo "Failed to update chromInfo tables." >&2
+fi
+
+rm -rf ${GALAXY}/tool-data/shared/ucsc/new
+rm -rf ${GALAXY}/tool-data/shared/ucsc/chrom/new
+echo "Update complete."
+
+#Perform Manual Additions here
+echo "Adding Manual Builds."
+python ${GALAXY}/cron/add_manual_builds.py ${GALAXY}/tool-data/shared/ucsc/manual_builds.txt ${GALAXY}/tool-data/shared/ucsc/builds.txt ${GALAXY}/tool-data/shared/ucsc/chrom/
+if [ $? -eq 0 ]
+then
+    echo "Manual addition was successful."
+else
+    echo "Manual addition failed" >&2
+fi
diff --git a/database/info.txt b/database/info.txt
new file mode 100644
index 0000000..5b316a0
--- /dev/null
+++ b/database/info.txt
@@ -0,0 +1 @@
+This folder contains the data
\ No newline at end of file
diff --git a/display_applications/biom/biom_simple.xml b/display_applications/biom/biom_simple.xml
new file mode 100644
index 0000000..9cf1b52
--- /dev/null
+++ b/display_applications/biom/biom_simple.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0"?>
+<display id="biom_simple" version="1.0.0" name="view biom at">
+    <dynamic_links from_data_table="biom_simple_display" skip_startswith="#" id="value" name="name">
+        <url>${ url % { 'biom_file_url_qp': $biom_file.qp } }</url>
+        <param type="data" name="biom_file" url="galaxy_${DATASET_HASH}.biom" />
+    </dynamic_links>
+</display>
diff --git a/display_applications/ensembl/ensembl_bam.xml b/display_applications/ensembl/ensembl_bam.xml
new file mode 100644
index 0000000..2f726e1
--- /dev/null
+++ b/display_applications/ensembl/ensembl_bam.xml
@@ -0,0 +1,21 @@
+<display id="ensembl_bam" version="1.0.0" name="display at Ensembl">
+    <!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method -->
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/Location/View?contigviewbottom=bam:${bam_file.qp}=normal</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" strip_https="True" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" strip_https="True" />
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $bam_file.dbkey ) ]
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ensembl/ensembl_gff.xml b/display_applications/ensembl/ensembl_gff.xml
new file mode 100644
index 0000000..41dfb81
--- /dev/null
+++ b/display_applications/ensembl/ensembl_gff.xml
@@ -0,0 +1,69 @@
+<display id="ensembl_gff" version="1.0.0" name="display at Ensembl">
+    <!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method -->
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/Location/View?r=${position};contigviewbottom=url:${gff_file.qp}=normal</url>
+        <param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.gff" />
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $gff_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+#set chrom, start, end = $gff_file.datatype.get_estimated_display_viewport( $gff_file )
+#if $chrom is not None:
+##The difference between chr1 and 1 is handled by Ensembl, except for the viewport, we need to provide e.g. '1' instead of 'chr1' here
+##This is rather naive, it would be more ideal to have actual mappings
+#if $chrom == 'chrM':
+    #set $chrom = 'MT'
+#end if
+#if $chrom.startswith( 'chr' ):
+    #set $chrom = $chrom[3:]
+#end if
+${chrom}:${start}-${end}
+#else:
+##default view is of '1'
+1
+#end if
+        </param>
+    </dynamic_links>
+    <!-- Old Ensembl method of attaching user data via URL -->
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/contigview?data_URL=${gff_file.qp}${position}</url>
+        <param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.gff" />
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $gff_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+            #set chrom, start, end = $gff_file.datatype.get_estimated_display_viewport( $gff_file )
+            #if $chrom is not None:
+            ##The difference between chr1 and 1 is handled by Ensembl, except for the viewport, we need to provide e.g. '1' instead of 'chr1' here
+            ##This is rather naive, it would be more ideal to have actual mappings
+            #if $chrom == 'chrM':
+                #set $chrom = 'MT'
+            #end if
+            #if $chrom.startswith( 'chr' ):
+                #set $chrom = $chrom[3:]
+            #end if
+            &chr=${chrom}&start=${start}&end=${end}
+            #end if
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ensembl/ensembl_interval_as_bed.xml b/display_applications/ensembl/ensembl_interval_as_bed.xml
new file mode 100644
index 0000000..52871ee
--- /dev/null
+++ b/display_applications/ensembl/ensembl_interval_as_bed.xml
@@ -0,0 +1,69 @@
+<display id="ensembl_interval" version="1.0.0" name="display at Ensembl">
+    <!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method -->
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/Location/View?r=${position};contigviewbottom=url:${bed_file.qp}=normal</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/>
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $bed_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+#set chrom, start, end = $bed_file.datatype.get_estimated_display_viewport( $bed_file )
+#if $chrom is not None:
+##The difference between chr1 and 1 is handled by Ensembl, except for the viewport, we need to provide e.g. '1' instead of 'chr1' here
+##This is rather naive, it would be more ideal to have actual mappings
+#if $chrom == 'chrM':
+    #set $chrom = 'MT'
+#end if
+#if $chrom.startswith( 'chr' ):
+    #set $chrom = $chrom[3:]
+#end if
+${chrom}:${int( start ) + 1}-${end}
+#else:
+##default view is of '1'
+1
+#end if
+        </param>
+    </dynamic_links>
+    <!-- Old Ensembl method of attaching user data via URL -->
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/contigview?data_URL=${bed_file.qp}${position}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/>
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $bed_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+            #set chrom, start, end = $bed_file.datatype.get_estimated_display_viewport( $bed_file )
+            #if $chrom is not None:
+            ##The difference between chr1 and 1 is handled by Ensembl, except for the viewport, we need to provide e.g. '1' instead of 'chr1' here
+            ##This is rather naive, it would be more ideal to have actual mappings
+            #if $chrom == 'chrM':
+                #set $chrom = 'MT'
+            #end if
+            #if $chrom.startswith( 'chr' ):
+                #set $chrom = $chrom[3:]
+            #end if
+            &chr=${chrom}&start=${int( start ) + 1}&end=${end}
+            #end if
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/gbrowse/gbrowse_gff.xml b/display_applications/gbrowse/gbrowse_gff.xml
new file mode 100644
index 0000000..a564b4c
--- /dev/null
+++ b/display_applications/gbrowse/gbrowse_gff.xml
@@ -0,0 +1,29 @@
+<display id="gbrowse_gff" version="1.0.0" name="display at GBrowse">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter>
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/?${position}eurl=${gff_file.qp}</url>
+        <param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.gff" />
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $gff_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+#set chrom, start, end = $gff_file.datatype.get_estimated_display_viewport( $gff_file )
+#if $chrom is not None:
+#if $chrom.startswith( 'chr' ):
+    #set $chrom = $chrom[3:]
+#end if
+q=${chrom}:${start}..${end}&
+#end if
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/gbrowse/gbrowse_interval_as_bed.xml b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
new file mode 100644
index 0000000..67e2f44
--- /dev/null
+++ b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
@@ -0,0 +1,29 @@
+<display id="gbrowse_interval_as_bed" version="1.0.0" name="display at GBrowse">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter>
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/?${position}eurl=${bed_file.qp}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, gbrowse does not(?): force use of converter which will make strict BED6+ file -->
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $bed_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+#set chrom, start, end = $bed_file.datatype.get_estimated_display_viewport( $bed_file )
+#if $chrom is not None:
+#if $chrom.startswith( 'chr' ):
+    #set $chrom = $chrom[3:]
+#end if
+q=${chrom}:${start}..${end}&
+#end if
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/gbrowse/gbrowse_wig.xml b/display_applications/gbrowse/gbrowse_wig.xml
new file mode 100644
index 0000000..5ee6daf
--- /dev/null
+++ b/display_applications/gbrowse/gbrowse_wig.xml
@@ -0,0 +1,29 @@
+<display id="gbrowse_wig" version="1.0.0" name="display at GBrowse">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter>
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}${site_organism}/?${position}eurl=${wig_file.qp}</url>
+        <param type="data" name="wig_file" url="galaxy_${DATASET_HASH}.wig" format="wig"/>
+        <param type="template" name="site_organism" strip="True" >
+            $site_organisms[ $site_dbkeys.index( $wig_file.dbkey ) ]
+        </param>
+        <param type="template" name="position" strip="True" >
+#set chrom, start, end = $wig_file.datatype.get_estimated_display_viewport( $wig_file )
+#if $chrom is not None:
+#if $chrom.startswith( 'chr' ):
+    #set $chrom = $chrom[3:]
+#end if
+q=${chrom}:${start}..${end}&
+#end if
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/igb/bam.xml b/display_applications/igb/bam.xml
new file mode 100644
index 0000000..b9e9ce3
--- /dev/null
+++ b/display_applications/igb/bam.xml
@@ -0,0 +1,13 @@
+<display id="igb_bam" version="0.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>http://bioviz.org/igb/galaxy.html?version=${bam_file.dbkey}&feature_url_0=${bam_file.url}&sym_name_0=${niceName}&sym_method_0=${bam_file.url}&query_url=${bam_file.url}&server_url=galaxy</url>
+        <param type="data" name="bam_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$bam_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="bai_file" url="${niceName}.bam.bai" metadata="bam_index" />
+        <param type="data" name="bam_file" url="${niceName}.bam" />
+    </link>
+</display>
diff --git a/display_applications/igb/bb.xml b/display_applications/igb/bb.xml
new file mode 100644
index 0000000..aa4d97d
--- /dev/null
+++ b/display_applications/igb/bb.xml
@@ -0,0 +1,12 @@
+<display id="igb_bb" version="1.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>http://bioviz.org/igb/galaxy.html?version=${bigbed_file.dbkey}&loadresidues=false&feature_url_0=${bigbed_file.url}&sym_name_0=${niceName}&sym_method_0=${bigbed_file.url}&query_url=${bigbed_file.url}&server_url=galaxy</url>
+        <param type="data" name="bigbed_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$bigbed_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="bigbed_file" url="${niceName}.bigbed" />
+    </link>
+</display>
diff --git a/display_applications/igb/bed.xml b/display_applications/igb/bed.xml
new file mode 100644
index 0000000..3a8ce1c
--- /dev/null
+++ b/display_applications/igb/bed.xml
@@ -0,0 +1,15 @@
+<display id="igb_bed" version="1.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>$bioviz</url>
+        <param type="data" name="bed_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$bed_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="bed_file" url="${niceName}.bed" />
+        <param type="template" name="bioviz" strip="True" >
+            http://bioviz.org/igb/galaxy.html?version=${bed_file.dbkey}&loadresidues=false&feature_url_0=${bed_file.url}&sym_name_0=${niceName}&sym_method_0=${bed_file.url}&query_url=${bed_file.url}&server_url=galaxy
+        </param>
+    </link>
+</display>
diff --git a/display_applications/igb/bedgraph.xml b/display_applications/igb/bedgraph.xml
new file mode 100644
index 0000000..c22fdf9
--- /dev/null
+++ b/display_applications/igb/bedgraph.xml
@@ -0,0 +1,15 @@
+<display id="igb_bedgraph" version="1.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>$bioviz</url>
+        <param type="data" name="bedgraph_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$bedgraph_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="bedgraph_file" url="${niceName}.bed.bedgraph" />
+        <param type="template" name="bioviz" strip="True" >
+            http://bioviz.org/igb/galaxy.html?version=${bedgraph_file.dbkey}&loadresidues=false&feature_url_0=${bedgraph_file.url}&sym_name_0=${niceName}&sym_method_0=${bedgraph_file.url}&query_url=${bedgraph_file.url}&server_url=galaxy
+        </param>
+    </link>
+</display>
diff --git a/display_applications/igb/bigwig.xml b/display_applications/igb/bigwig.xml
new file mode 100644
index 0000000..de8f78b
--- /dev/null
+++ b/display_applications/igb/bigwig.xml
@@ -0,0 +1,20 @@
+<display id="igb_bigwig" version="1.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>http://bioviz.org/igb/galaxy.html?version=${bigwig_file.dbkey}&loadresidues=false&feature_url_0=${bigwig_file.url}&sym_name_0=${niceName}&sym_method_0=${bigwig_file.url}&query_url=${bigwig_file.url}&server_url=galaxy</url>
+        <param type="data" name="bigwig_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$bigwig_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="bigwig_file" url="${niceName}.bigwig" />
+        <!--<param type="template" name="position" strip="True" >
+            #set chrom, start, end = $bigwig_file.datatype.get_estimated_display_viewport( $bigwig_file )
+            #if $chrom is not None:
+                seqid=${chrom}&start=${start}&end=${int(end) + 1}
+            #else:
+                seqid=&start=&end=
+            #end if
+        </param>-->
+    </link>
+</display>
diff --git a/display_applications/igb/gtf.xml b/display_applications/igb/gtf.xml
new file mode 100644
index 0000000..15c898d
--- /dev/null
+++ b/display_applications/igb/gtf.xml
@@ -0,0 +1,15 @@
+<display id="igb_gtf" version="1.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>$bioviz</url>
+        <param type="data" name="gtf_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$gtf_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="gtf_file" url="${niceName}.gtf" />
+        <param type="template" name="bioviz" strip="True" >
+             http://bioviz.org/igb/galaxy.html?version=${gtf_file.dbkey}&loadresidues=false&feature_url_0=${gtf_file.url}&sym_name_0=${niceName}&sym_method_0=${gtf_file.url}&query_url=${gtf_file.url}&server_url=galaxy
+        </param>
+    </link>
+</display>
diff --git a/display_applications/igb/wig.xml b/display_applications/igb/wig.xml
new file mode 100644
index 0000000..99c9b6f
--- /dev/null
+++ b/display_applications/igb/wig.xml
@@ -0,0 +1,20 @@
+<display id="igb_wig" version="1.0.0" name="display in IGB">
+    <link id="View" name="View">
+        <url>http://bioviz.org/igb/galaxy.html?version=${wig_file.dbkey}&${position}&loadresidues=false&feature_url_0=${wig_file.url}&sym_name_0=${niceName}&sym_method_0=${wig_file.url}&query_url=${wig_file.url}&server_url=galaxy</url>
+        <param type="data" name="wig_file_for_name" viewable="False"/>
+        <param type="template" name="niceName" viewable="False" strip="True">
+            #import re
+            #set nm=$wig_file_for_name.name
+            ${re.sub('\W',"_",nm)}
+        </param>
+        <param type="data" name="wig_file" url="${niceName}.wig" />
+        <param type="template" name="position" strip="True" >
+            #set chrom, start, end = $wig_file.datatype.get_estimated_display_viewport( $wig_file )
+            #if $chrom is not None:
+                seqid=${chrom}&start=${start}&end=${int(end) + 1}
+            #else:
+                seqid=&start=&end=
+            #end if
+        </param>
+    </link>
+</display>
diff --git a/display_applications/igv/bam.xml b/display_applications/igv/bam.xml
new file mode 100644
index 0000000..3dcb923
--- /dev/null
+++ b/display_applications/igv/bam.xml
@@ -0,0 +1,94 @@
+<?xml version="1.0"?>
+<display id="igv_bam" version="1.0.0" name="display with IGV">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${$site_id.startswith( 'local_' ) or $dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${redirect_url}</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" />
+        <param type="template" name="site_organism" strip="True" >
+            #if ($dataset.dbkey in $site_dbkeys)
+                $site_organisms[ $site_dbkeys.index( $bam_file.dbkey ) ]
+            #else:
+                $bam_file.dbkey
+            #end if
+        </param>
+        <param type="template" name="jnlp" url="galaxy_${DATASET_HASH}.jnlp" viewable="True" mimetype="application/x-java-jnlp-file"><![CDATA[
+<?xml version="1.0" encoding="utf-8"?>
+<jnlp
+  spec="1.0+"
+  codebase="${site_link}">
+  <information>
+    <title>IGV 1.5</title>
+    <vendor>The Broad Institute</vendor>
+    <homepage href="http://www.broadinstitute.org/igv"/>
+    <description>IGV Software</description>
+    <description kind="short">IGV</description>
+  </information>
+  <security>
+      <all-permissions/>
+  </security>
+  <resources>
+<j2se version="1.5+" initial-heap-size="256m" max-heap-size="1100m"/>
+    <jar href="igv.jar" download="eager" main="true"/>
+    <jar href="batik-codec.jar" download="eager"/>
+    <property name="apple.laf.useScreenMenuBar" value="true"/>
+    <property name="com.apple.mrj.application.growbox.intrudes" value="false"/>
+    <property name="com.apple.mrj.application.live-resize" value="true"/>
+    <property name="com.apple.macos.smallTabs" value="true"/>
+  </resources>
+    <resources os="Mac" arch="i386">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macintel.jar"/>
+    </resources>
+    <resources os="Mac" arch="ppc">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Mac" arch="PowerPC">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Windows">
+        <property name="sun.java2d.noddraw" value="true"/>
+        <nativelib href="hdfnative-win.jar"/>
+    </resources>
+    <resources os="Linux">
+        <nativelib href="hdfnative-linux64.jar"/>
+    </resources>
+  <application-desc main-class="org.broad.igv.ui.IGVMainFrame">
+     <argument>-g</argument>
+     <argument>${site_organism}</argument>
+     <argument>${bam_file.url}</argument>
+  </application-desc>
+</jnlp>
+]]>
+        </param>
+        <param type="template" name="redirect_url" strip="True" >
+            #if $site_id.startswith( 'local_' )
+                ${site_link}?file=${bam_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $bam_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #elif $site_id.startswith( 'web_link_' ):
+                ${site_link}?sessionURL=${bam_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $bam_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #else:
+                ${jnlp.url}
+            #end if
+        </param>
+    </dynamic_links>
+    <dynamic_links from_data_table="igv_broad_genomes" skip_startswith="#" id="value" name="name">
+        <!-- Our input data table is one line per dbkey -->
+        <filter>${ $dataset.dbkey == $value }</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>http://www.broadinstitute.org/igv/projects/current/igv.php?sessionURL=${bam_file.qp}&genome=${bam_file.dbkey}&merge=true&name=${qp( ( $bam_file.name or $DATASET_HASH ).replace( ',', ';' ) )}</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" />
+    </dynamic_links>
+</display>
+<!-- Dan Blankenberg -->
diff --git a/display_applications/igv/bigwig.xml b/display_applications/igv/bigwig.xml
new file mode 100644
index 0000000..e867226
--- /dev/null
+++ b/display_applications/igv/bigwig.xml
@@ -0,0 +1,89 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<display id="igv_bigwig" version="1.0.0" name="display with IGV">
+    
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+        
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator=","/>
+        <dynamic_param name="site_organisms" value="4" split="True" separator=","/>
+        
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+<!--        <filter>${dataset.dbkey in $site_dbkeys}</filter> -->
+        
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${redirect_url}</url>
+        
+        <param type="data" name="bigwig_file" url="galaxy_${DATASET_HASH}.bw" format="bigwig"/>
+
+        <param type="template" name="jnlp" url="galaxy_${DATASET_HASH}.jnlp" viewable="True" mimetype="application/x-java-jnlp-file"><?xml version="1.0" encoding="utf-8"?>
+<jnlp
+  spec="1.0+"
+  codebase="${site_link}">
+  <information>
+    <title>IGV 1.5</title>
+    <vendor>The Broad Institute</vendor>
+    <homepage href="http://www.broadinstitute.org/igv"/>
+    <description>IGV Software</description>
+    <description kind="short">IGV</description>
+  </information>
+  <security>
+      <all-permissions/>
+  </security>
+  <resources>
+  
+<j2se version="1.5+" initial-heap-size="256m" max-heap-size="1100m"/>    
+	<jar href="igv.jar" download="eager" main="true"/>
+    <jar href="batik-codec.jar" download="eager"/> 
+    <property name="apple.laf.useScreenMenuBar" value="true"/>
+    <property name="com.apple.mrj.application.growbox.intrudes" value="false"/>
+    <property name="com.apple.mrj.application.live-resize" value="true"/>
+    <property name="com.apple.macos.smallTabs" value="true"/>
+  </resources>
+	
+    <resources os="Mac" arch="i386">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macintel.jar"/> 
+    </resources>
+
+    <resources os="Mac" arch="ppc">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/> 
+    </resources>
+    
+    <resources os="Mac" arch="PowerPC">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/> 
+    </resources>
+
+    <resources os="Windows">
+        <property name="sun.java2d.noddraw" value="true"/>
+        <nativelib href="hdfnative-win.jar"/> 
+    </resources>
+
+    <resources os="Linux">
+        <nativelib href="hdfnative-linux64.jar"/> 
+    </resources>
+
+  <application-desc main-class="org.broad.igv.ui.IGVMainFrame">
+     <argument>${bigwig_file.url}</argument>
+  </application-desc>
+</jnlp>
+		</param>
+        <param type="template" name="redirect_url" strip="True">
+            #if $site_id.startswith( 'local_' )
+                ${site_link}?file=${bigwig_file.qp}&merge=true&name=${qp( $bigwig_file.name )}
+            #elif $site_id.startswith( 'web_link_' ):
+                ${site_link}?sessionURL=${bigwig_file.qp}&merge=true&name=${qp( $bigwig_file.name )}
+            #else:
+                ${jnlp.url}
+            #end if
+        </param>
+    </dynamic_links>
+    
+    
+</display>
+<!-- Ann Black-Ziegelbein based on Dan Blankenberg -->
diff --git a/display_applications/igv/gff.xml b/display_applications/igv/gff.xml
new file mode 100644
index 0000000..4eabd5e
--- /dev/null
+++ b/display_applications/igv/gff.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0"?>
+<display id="igv_gff" version="1.0.0" name="display with IGV">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${$site_id.startswith( 'local_' ) or $dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${redirect_url}</url>
+        <param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.${dataset.ext}" />
+        <param type="template" name="site_organism" strip="True" >
+            #if ($dataset.dbkey in $site_dbkeys)
+                $site_organisms[ $site_dbkeys.index( $gff_file.dbkey ) ]
+            #else:
+                $gff_file.dbkey
+            #end if
+        </param>
+        <param type="template" name="jnlp" url="galaxy_${DATASET_HASH}.jnlp" viewable="True" mimetype="application/x-java-jnlp-file"><![CDATA[
+<?xml version="1.0" encoding="utf-8"?>
+<jnlp
+  spec="1.0+"
+  codebase="${site_link}">
+  <information>
+    <title>IGV 1.5</title>
+    <vendor>The Broad Institute</vendor>
+    <homepage href="http://www.broadinstitute.org/igv"/>
+    <description>IGV Software</description>
+    <description kind="short">IGV</description>
+  </information>
+  <security>
+      <all-permissions/>
+  </security>
+  <resources>
+<j2se version="1.5+" initial-heap-size="256m" max-heap-size="1100m"/>
+    <jar href="igv.jar" download="eager" main="true"/>
+    <jar href="batik-codec.jar" download="eager"/>
+    <property name="apple.laf.useScreenMenuBar" value="true"/>
+    <property name="com.apple.mrj.application.growbox.intrudes" value="false"/>
+    <property name="com.apple.mrj.application.live-resize" value="true"/>
+    <property name="com.apple.macos.smallTabs" value="true"/>
+  </resources>
+    <resources os="Mac" arch="i386">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macintel.jar"/>
+    </resources>
+    <resources os="Mac" arch="ppc">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Mac" arch="PowerPC">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Windows">
+        <property name="sun.java2d.noddraw" value="true"/>
+        <nativelib href="hdfnative-win.jar"/>
+    </resources>
+    <resources os="Linux">
+        <nativelib href="hdfnative-linux64.jar"/>
+    </resources>
+  <application-desc main-class="org.broad.igv.ui.IGVMainFrame">
+     <argument>-g</argument>
+     <argument>${site_organism}</argument>
+     <argument>${gff_file.url}</argument>
+  </application-desc>
+</jnlp>
+]]>
+        </param>
+        <param type="template" name="redirect_url" strip="True" >
+            #if $site_id.startswith( 'local_' )
+                ${site_link}?file=${gff_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $gff_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #elif $site_id.startswith( 'web_link_' ):
+                ${site_link}?sessionURL=${gff_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $gff_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #else:
+                ${jnlp.url}
+            #end if
+        </param>
+    </dynamic_links>
+    <dynamic_links from_data_table="igv_broad_genomes" skip_startswith="#" id="value" name="name">
+        <!-- Our input data table is one line per dbkey -->
+        <filter>${ $dataset.dbkey == $value }</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>http://www.broadinstitute.org/igv/projects/current/igv.php?sessionURL=${gff_file.qp}&genome=${gff_file.dbkey}&merge=true&name=${qp( ( $gff_file.name or $DATASET_HASH ).replace( ',', ';' ) )}</url>
+        <param type="data" name="gff_file" url="galaxy_${DATASET_HASH}.${dataset.ext}" />
+    </dynamic_links>
+</display>
+<!-- Dan Blankenberg -->
diff --git a/display_applications/igv/interval_as_bed.xml b/display_applications/igv/interval_as_bed.xml
new file mode 100644
index 0000000..4fd6a98
--- /dev/null
+++ b/display_applications/igv/interval_as_bed.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0"?>
+<display id="igv_interval_as_bed" version="1.0.0" name="display with IGV">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${$site_id.startswith( 'local_' ) or $dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${redirect_url}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict" />
+        <param type="template" name="site_organism" strip="True" >
+            #if ($dataset.dbkey in $site_dbkeys)
+                $site_organisms[ $site_dbkeys.index( $bed_file.dbkey ) ]
+            #else:
+                $bed_file.dbkey
+            #end if
+        </param>
+        <param type="template" name="jnlp" url="galaxy_${DATASET_HASH}.jnlp" viewable="True" mimetype="application/x-java-jnlp-file"><![CDATA[
+<?xml version="1.0" encoding="utf-8"?>
+<jnlp
+  spec="1.0+"
+  codebase="${site_link}">
+  <information>
+    <title>IGV 1.5</title>
+    <vendor>The Broad Institute</vendor>
+    <homepage href="http://www.broadinstitute.org/igv"/>
+    <description>IGV Software</description>
+    <description kind="short">IGV</description>
+  </information>
+  <security>
+      <all-permissions/>
+  </security>
+  <resources>
+<j2se version="1.5+" initial-heap-size="256m" max-heap-size="1100m"/>
+    <jar href="igv.jar" download="eager" main="true"/>
+    <jar href="batik-codec.jar" download="eager"/>
+    <property name="apple.laf.useScreenMenuBar" value="true"/>
+    <property name="com.apple.mrj.application.growbox.intrudes" value="false"/>
+    <property name="com.apple.mrj.application.live-resize" value="true"/>
+    <property name="com.apple.macos.smallTabs" value="true"/>
+  </resources>
+    <resources os="Mac" arch="i386">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macintel.jar"/>
+    </resources>
+    <resources os="Mac" arch="ppc">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Mac" arch="PowerPC">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Windows">
+        <property name="sun.java2d.noddraw" value="true"/>
+        <nativelib href="hdfnative-win.jar"/>
+    </resources>
+    <resources os="Linux">
+        <nativelib href="hdfnative-linux64.jar"/>
+    </resources>
+  <application-desc main-class="org.broad.igv.ui.IGVMainFrame">
+     <argument>-g</argument>
+     <argument>${site_organism}</argument>
+     <argument>${bed_file.url}</argument>
+  </application-desc>
+</jnlp>
+]]>
+        </param>
+        <param type="template" name="redirect_url" strip="True" >
+            #if $site_id.startswith( 'local_' )
+                ${site_link}?file=${bed_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $bed_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #elif $site_id.startswith( 'web_link_' ):
+                ${site_link}?sessionURL=${bed_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $bed_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #else:
+                ${jnlp.url}
+            #end if
+        </param>
+    </dynamic_links>
+    <dynamic_links from_data_table="igv_broad_genomes" skip_startswith="#" id="value" name="name">
+        <!-- Our input data table is one line per dbkey -->
+        <filter>${ $dataset.dbkey == $value }</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>http://www.broadinstitute.org/igv/projects/current/igv.php?sessionURL=${bed_file.qp}&genome=${bed_file.dbkey}&merge=true&name=${qp( ( $bed_file.name or $DATASET_HASH ).replace( ',', ';' ) )}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict" />
+    </dynamic_links>
+</display>
+<!-- Dan Blankenberg -->
diff --git a/display_applications/igv/vcf.xml b/display_applications/igv/vcf.xml
new file mode 100644
index 0000000..8a0f319
--- /dev/null
+++ b/display_applications/igv/vcf.xml
@@ -0,0 +1,94 @@
+<?xml version="1.0"?>
+<display id="igv_vcf" version="1.0.0" name="display with IGV">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_organisms" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${$site_id.startswith( 'local_' ) or $dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${redirect_url}</url>
+        <param type="data" name="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz" format="vcf_bgzip" />
+        <param type="data" name="tabix_file" dataset="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz.tbi" format="tabix" />
+        <param type="template" name="site_organism" strip="True" >
+            #if ($dataset.dbkey in $site_dbkeys)
+                $site_organisms[ $site_dbkeys.index( $bgzip_file.dbkey ) ]
+            #else:
+                $bgzip_file.dbkey
+            #end if
+        </param>
+        <param type="template" name="jnlp" url="galaxy_${DATASET_HASH}.jnlp" viewable="True" mimetype="application/x-java-jnlp-file"><![CDATA[
+<?xml version="1.0" encoding="utf-8"?>
+<jnlp
+  spec="1.0+"
+  codebase="${site_link}">
+  <information>
+    <title>IGV 1.5</title>
+    <vendor>The Broad Institute</vendor>
+    <homepage href="http://www.broadinstitute.org/igv"/>
+    <description>IGV Software</description>
+    <description kind="short">IGV</description>
+  </information>
+  <security>
+      <all-permissions/>
+  </security>
+  <resources>
+<j2se version="1.5+" initial-heap-size="256m" max-heap-size="1100m"/>
+    <jar href="igv.jar" download="eager" main="true"/>
+    <jar href="batik-codec.jar" download="eager"/>
+    <property name="apple.laf.useScreenMenuBar" value="true"/>
+    <property name="com.apple.mrj.application.growbox.intrudes" value="false"/>
+    <property name="com.apple.mrj.application.live-resize" value="true"/>
+    <property name="com.apple.macos.smallTabs" value="true"/>
+  </resources>
+    <resources os="Mac" arch="i386">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macintel.jar"/>
+    </resources>
+    <resources os="Mac" arch="ppc">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Mac" arch="PowerPC">
+        <property name="apple.awt.graphics.UseQuartz" value="false"/>
+        <nativelib href="hdfnative-macppc.jar"/>
+    </resources>
+    <resources os="Windows">
+        <property name="sun.java2d.noddraw" value="true"/>
+        <nativelib href="hdfnative-win.jar"/>
+    </resources>
+    <resources os="Linux">
+        <nativelib href="hdfnative-linux64.jar"/>
+    </resources>
+  <application-desc main-class="org.broad.igv.ui.IGVMainFrame">
+     <argument>-g</argument>
+     <argument>${site_organism}</argument>
+     <argument>${bgzip_file.url}</argument>
+  </application-desc>
+</jnlp>
+]]>
+        </param>
+        <param type="template" name="redirect_url" strip="True" >
+            #if $site_id.startswith( 'local_' )
+                ${site_link}?file=${bgzip_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $bgzip_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #elif $site_id.startswith( 'web_link_' ):
+                ${site_link}?sessionURL=${bgzip_file.qp}&genome=${site_organism}&merge=true&name=${qp( ( $bgzip_file.name or $DATASET_HASH ).replace( ',', ';' ) )}
+            #else:
+                ${jnlp.url}
+            #end if
+        </param>
+    </dynamic_links>
+    <dynamic_links from_data_table="igv_broad_genomes" skip_startswith="#" id="value" name="name">
+        <!-- Our input data table is one line per dbkey -->
+        <filter>${ $dataset.dbkey == $value }</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>http://www.broadinstitute.org/igv/projects/current/igv.php?sessionURL=${bgzip_file.qp}&genome=$bgzip_file.dbkey&merge=true&name=${qp( ( $bgzip_file.name or $DATASET_HASH ).replace( ',', ';' ) )}</url>
+        <param type="data" name="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz" format="vcf_bgzip" />
+        <param type="data" name="tabix_file" dataset="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz.tbi" format="tabix" />
+    </dynamic_links>
+</display>
+<!-- Dan Blankenberg -->
diff --git a/display_applications/iobio/bam.xml b/display_applications/iobio/bam.xml
new file mode 100644
index 0000000..69cdabf
--- /dev/null
+++ b/display_applications/iobio/bam.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0"?>
+<display id="iobio_bam" version="1.0.0" name="display at bam.iobio">
+    <dynamic_links from_data_table="bam_iobio" skip_startswith="#" id="value" name="name">
+        <url>${url}?bam=${bam_file.qp}</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" />
+    </dynamic_links>
+</display>
diff --git a/display_applications/iobio/vcf.xml b/display_applications/iobio/vcf.xml
new file mode 100644
index 0000000..60be251
--- /dev/null
+++ b/display_applications/iobio/vcf.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0"?>
+<display id="iobio_vcf" version="1.0.0" name="display at vcf.iobio">
+    <dynamic_links from_data_table="vcf_iobio" skip_startswith="#" id="value" name="name">
+        <url>${url}?vcf=${bgzip_file.qp}</url>
+        <param type="data" name="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz" format="vcf_bgzip" />
+        <param type="data" name="tabix_file" dataset="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz.tbi" format="tabix" />
+    </dynamic_links>
+</display>
diff --git a/display_applications/rviewer/bed.xml b/display_applications/rviewer/bed.xml
new file mode 100644
index 0000000..d810202
--- /dev/null
+++ b/display_applications/rviewer/bed.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<display id="rviewer_interval" version="1.0.0" name="display at RViewer">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_genome_versions" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}/application/service/datasets/add?datasetName=${qp($bed_file.name)}&genomeVersion=${qp( $site_genome_versions[ $site_dbkeys.index( $bed_file.dbkey ) ] )}&dataFormat=bed&dataUrl=${bed_file.qp}</url>
+        <param type="data" name="bed_file" format="bed" url="galaxy.bed" />
+    </dynamic_links>
+</display>
diff --git a/display_applications/rviewer/vcf.xml b/display_applications/rviewer/vcf.xml
new file mode 100644
index 0000000..fe446c0
--- /dev/null
+++ b/display_applications/rviewer/vcf.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<display id="rviewer_vcf" version="1.0.0" name="display at RViewer">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="site_name" value="1"/>
+        <dynamic_param name="site_link" value="2"/>
+        <dynamic_param name="site_dbkeys" value="3" split="True" separator="," />
+        <dynamic_param name="site_genome_versions" value="4" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_dbkeys to dataset dbkey -->
+        <filter>${dataset.dbkey in $site_dbkeys}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${site_link}/application/service/datasets/add?datasetName=${qp($vcf_file.name)}&genomeVersion=${qp( $site_genome_versions[ $site_dbkeys.index( $vcf_file.dbkey ) ] )}&dataFormat=vcf&dataUrl=${vcf_file.qp}</url>
+        <param type="data" name="vcf_file" url="galaxy.vcf" />
+    </dynamic_links>
+</display>
diff --git a/display_applications/ucsc/bam.xml b/display_applications/ucsc/bam.xml
new file mode 100644
index 0000000..29fd167
--- /dev/null
+++ b/display_applications/ucsc/bam.xml
@@ -0,0 +1,17 @@
+<display id="ucsc_bam" version="1.0.0" name="display at UCSC">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bam_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+        <param type="data" name="bam_file" url="galaxy_${DATASET_HASH}.bam" />
+        <param type="data" name="bai_file" url="galaxy_${DATASET_HASH}.bam.bai" metadata="bam_index" /><!-- UCSC expects index file to exist as bam_file_name.bai -->
+        <param type="template" name="track" viewable="True">track type="bam" name="${bam_file.name.replace( '\\', '\\\\' ).replace( '"', '\\"' )}" bigDataUrl="${bam_file.url}" db="${bam_file.dbkey}" pairEndsByName="."</param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ucsc/bigbed.xml b/display_applications/ucsc/bigbed.xml
new file mode 100644
index 0000000..4f5a284
--- /dev/null
+++ b/display_applications/ucsc/bigbed.xml
@@ -0,0 +1,16 @@
+<display id="ucsc_bigbed" version="1.0.0" name="display at UCSC">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bigbed_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+        <param type="data" name="bigbed_file" url="galaxy_${DATASET_HASH}.bigbed" />
+        <param type="template" name="track" viewable="True">track type="bigBed" name="${bigbed_file.name.replace( '\\', '\\\\' ).replace( '"', '\\"' )}" bigDataUrl="${bigbed_file.url}" db="${bigbed_file.dbkey}"</param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ucsc/bigwig.xml b/display_applications/ucsc/bigwig.xml
new file mode 100644
index 0000000..779448a
--- /dev/null
+++ b/display_applications/ucsc/bigwig.xml
@@ -0,0 +1,16 @@
+<display id="ucsc_bigwig" version="1.0.0" name="display at UCSC">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bigwig_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+        <param type="data" name="bigwig_file" url="galaxy_${DATASET_HASH}.bigwig" />
+        <param type="template" name="track" viewable="True">track type="bigWig" name="${bigwig_file.name.replace( '\\', '\\\\' ).replace( '"', '\\"' )}" bigDataUrl="${bigwig_file.url}" db="${bigwig_file.dbkey}"</param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ucsc/interval_as_bed.xml b/display_applications/ucsc/interval_as_bed.xml
new file mode 100644
index 0000000..655bdf1
--- /dev/null
+++ b/display_applications/ucsc/interval_as_bed.xml
@@ -0,0 +1,23 @@
+<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bed_file.dbkey)}&position=${position.qp}&hgt.customText=${bed_file.qp}</url>
+        <param type="data" name="bed_file" url="galaxy_${DATASET_HASH}.bed" format="bedstrict"/> <!-- Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, UCSC does not: force use of converter which will make strict BED6+ file -->
+        <param type="template" name="position" strip="True" >
+#set chrom, start, end = $bed_file.datatype.get_estimated_display_viewport( $bed_file )
+#if $chrom is not None:
+${chrom}:${start}-${int( end ) + 1}
+#else:
+:-
+#end if
+        </param>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ucsc/maf_customtrack.xml b/display_applications/ucsc/maf_customtrack.xml
new file mode 100644
index 0000000..0961a2f
--- /dev/null
+++ b/display_applications/ucsc/maf_customtrack.xml
@@ -0,0 +1,15 @@
+<display id="ucsc_maf_customtrack" version="1.0.0" name="display at UCSC" inherit="True">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($maf_customtrack_file.dbkey)}&position=${maf_customtrack_file.metadata.vp_chromosome}:${maf_customtrack_file.metadata.vp_start}-${maf_customtrack_file.metadata.vp_end}&hgt.customText=${maf_customtrack_file.qp}</url>
+        <param type="data" name="maf_customtrack_file" url="galaxy_${DATASET_HASH}.maf" format="mafcustomtrack"/>
+    </dynamic_links>
+</display>
diff --git a/display_applications/ucsc/trackhub.xml b/display_applications/ucsc/trackhub.xml
new file mode 100755
index 0000000..45c34a0
--- /dev/null
+++ b/display_applications/ucsc/trackhub.xml
@@ -0,0 +1,6 @@
+<display id="ucsc_trackhub" version="1.0.0" name="display at Track Hub UCSC">
+    <link id="main" name="main">
+        <url>https://genome.ucsc.edu/cgi-bin/hgHubConnect?hubUrl=${qp($hub_file.url + '/myHub/hub.txt')}&hgHub_do_firstDb=on&hgHub_do_redirect=on&hgHubConnect.remakeTrackHub=on</url>
+        <param type="data" name="hub_file" url="galaxy_${DATASET_HASH}" allow_extra_files_access="True" />
+    </link>
+</display>
diff --git a/display_applications/ucsc/vcf.xml b/display_applications/ucsc/vcf.xml
new file mode 100644
index 0000000..d8f499e
--- /dev/null
+++ b/display_applications/ucsc/vcf.xml
@@ -0,0 +1,17 @@
+<display id="ucsc_vcf" version="1.0.0" name="display at UCSC">
+    <!-- Load links from file: one line to one link -->
+    <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0">
+        <!-- Define parameters by column from file, allow splitting on builds -->
+        <dynamic_param name="site_id" value="0"/>
+        <dynamic_param name="ucsc_link" value="1"/>
+        <dynamic_param name="builds" value="2" split="True" separator="," />
+        <!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
+        <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter>
+        <filter>${dataset.dbkey in $builds}</filter>
+        <!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
+        <url>${ucsc_link}db=${qp($bgzip_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
+        <param type="data" name="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz" format="vcf_bgzip" />
+        <param type="data" name="tabix_file" dataset="bgzip_file" url="galaxy_${DATASET_HASH}.vcf.gz.tbi" format="tabix" />
+        <param type="template" name="track" viewable="True">track type="vcfTabix" name="${bgzip_file.name.replace( '\\', '\\\\' ).replace( '"', '\\"' )}" bigDataUrl="${bgzip_file.url}" db="${bgzip_file.dbkey}"</param>
+    </dynamic_links>
+</display>
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..c2348cb
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,185 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = build
+UPDATEWORKDIR = /tmp/galaxySphinxUpdate
+UPDATEWORKSOURCELIB = $(UPDATEWORKDIR)/source/lib
+SPHINXAPIDOC  = sphinx-apidoc
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+GENERATED_RST = source/dev/schema.rst
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext updaterst
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+	@echo "  updaterst  to update sphinx rst to reflect code structure changes"
+
+schema.md: parse_gx_xsd.py schema_template.md ../lib/galaxy/tools/xsd/galaxy.xsd ## Build Github-flavored Markdown from Galaxy Tool XSD (expects libxml in environment)
+	python parse_gx_xsd.py schema_template.md ../lib/galaxy/tools/xsd/galaxy.xsd > $@
+
+source/dev/schema.rst: schema.md ## Convert Galaxy Tool XSD Markdown docs into reStructuredText (expects pandoc in environment)
+	pandoc schema.md -f markdown_github-hard_line_breaks -s -o $@
+	./fix_schema_rst.sh $@
+
+# might also want to do
+# cd source/lib; hg revert; rm *.rst.orig;  or not.
+clean:
+	-rm -rf $(BUILDDIR)/* $(UPDATEWORKDIR) schema.md $(GENERATED_RST)
+
+html: $(GENERATED_RST)
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml: $(GENERATED_RST)
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml: $(GENERATED_RST)
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle: $(GENERATED_RST)
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json: $(GENERATED_RST)
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp: $(GENERATED_RST)
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp: $(GENERATED_RST)
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Galaxy.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Galaxy.qhc"
+
+devhelp: $(GENERATED_RST)
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/Galaxy"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Galaxy"
+	@echo "# devhelp"
+
+epub: $(GENERATED_RST)
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex: $(GENERATED_RST)
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf: $(GENERATED_RST)
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text: $(GENERATED_RST)
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man: $(GENERATED_RST)
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info: $(GENERATED_RST)
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext: $(GENERATED_RST)
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes: $(GENERATED_RST)
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck: $(GENERATED_RST)
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest: $(GENERATED_RST)
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+# Targets for updating the structure of the Sphinx RST doc for lib/
+
+$(UPDATEWORKSOURCELIB):
+	mkdir -p $(UPDATEWORKSOURCELIB)
+
+# Create a fresh version of the RST files for the lib, and then create a 
+# unified patch file (ignore all emacs leftovers).
+# Feed that to our custom version of patch.py, which applies patches that
+# are only adds, and reports everything else to the user to deal with manually
+#
+# Note: this is still a very rough process.  the run of patch.py gets some 
+# errors that don't mean anything to us.  And the manual process is not fun.
+updaterst: $(UPDATEWORKSOURCELIB)
+	$(SPHINXAPIDOC) -o $(UPDATEWORKSOURCELIB) ../lib
+	-diff -x '*.rst~' -ru source/lib $(UPDATEWORKSOURCELIB) >  $(UPDATEWORKDIR)/alldifs.patch
+	./patch.py $(UPDATEWORKDIR)/alldifs.patch
diff --git a/doc/fix_schema_rst.sh b/doc/fix_schema_rst.sh
new file mode 100755
index 0000000..f743dec
--- /dev/null
+++ b/doc/fix_schema_rst.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+sed -i.bak -e 's|.. code:: xml|.. code-block:: xml|g' $1
+# Insert table of contents
+sed -i.bak -e '/^``tool``$/i\
+.. contents:: Table of contents\
+\   :local:\
+\   :depth: 1\
+..\
+\
+' $1
+rm -f $1.bak
diff --git a/doc/parse_gx_xsd.py b/doc/parse_gx_xsd.py
new file mode 100644
index 0000000..cf72dfd
--- /dev/null
+++ b/doc/parse_gx_xsd.py
@@ -0,0 +1,171 @@
+# coding: utf-8
+# TODO: Add examples, tables and best practice links to command
+# TODO: Examples of truevalue, falsevalue
+# TODO: Test param extra_file
+# Things dropped from schema_template.md (still documented inside schema).
+#  - request_parameter_translation
+from __future__ import print_function
+
+import sys
+
+from lxml import etree
+from six import StringIO
+
+with open(sys.argv[1], "r") as f:
+    MARKDOWN_TEMPLATE = f.read()
+
+with open(sys.argv[2], "r") as f:
+    xmlschema_doc = etree.parse(f)
+
+markdown_buffer = StringIO()
+
+
+def main():
+    """Entry point for the function that builds Markdown help for the Galaxy XSD."""
+    for line in MARKDOWN_TEMPLATE.splitlines():
+        if line.startswith("$tag:"):
+            print(Tag(line).build_help())
+        else:
+            print(line)
+
+
+class Tag(object):
+
+    def __init__(self, line):
+        assert line.startswith("$tag:")
+        line_parts = line.split(" ")
+        first_part = line_parts[0]
+        hide_attributes = False
+        if len(line_parts) > 1:
+            if "hide_attributes" in line_parts[1]:
+                hide_attributes = True
+        _, title, xpath = first_part.split(":")
+        xpath = xpath.replace("/element", "/{http://www.w3.org/2001/XMLSchema}element")
+        xpath = xpath.replace("/group", "/{http://www.w3.org/2001/XMLSchema}group")
+        xpath = xpath.replace("/complexType", "/{http://www.w3.org/2001/XMLSchema}complexType")
+        self.xpath = xpath
+        self.hide_attributes = hide_attributes
+        self.title = title
+
+    def build_help(self):
+        tag = xmlschema_doc.find(self.xpath)
+        if tag is None:
+            raise Exception("Could not find xpath for %s" % self.xpath)
+
+        title = self.title
+        tag_help = StringIO()
+        tag_help.write("## " + " > ".join(["``%s``" % p for p in title.split("|")]))
+        tag_help.write("\n")
+        tag_help.write(_build_tag(tag, self.hide_attributes))
+        tag_help.write("\n\n")
+        return tag_help.getvalue()
+
+
+def _build_tag(tag, hide_attributes):
+    tag_el = _find_tag_el(tag)
+    attributes = _find_attributes(tag)
+    tag_help = StringIO()
+    annotation_el = tag_el.find("{http://www.w3.org/2001/XMLSchema}annotation")
+    text = annotation_el.find("{http://www.w3.org/2001/XMLSchema}documentation").text
+    for line in text.splitlines():
+        if line.startswith("$attribute_list:"):
+            attributes_str, header_level = line.split(":")[1:3]
+            attribute_names = attributes_str.split(",")
+            header_level = int(header_level)
+            text = text.replace(line, _build_attributes_table(tag, attributes, attribute_names=attribute_names, header_level=header_level))
+        if line.startswith("$assertions"):
+            assertions_tag = xmlschema_doc.find("//{http://www.w3.org/2001/XMLSchema}complexType[@name='TestAssertions']")
+            assertion_tag = xmlschema_doc.find("//{http://www.w3.org/2001/XMLSchema}group[@name='TestAssertion']")
+            assertions_buffer = StringIO()
+            assertions_buffer.write(_doc_or_none(assertions_tag))
+            assertions_buffer.write("\n\n")
+            assertions_buffer.write("Child Element/Assertion | Details \n")
+            assertions_buffer.write("--- | ---\n")
+            elements = assertion_tag.findall("{http://www.w3.org/2001/XMLSchema}choice/{http://www.w3.org/2001/XMLSchema}element")
+            for element in elements:
+                doc = _doc_or_none(element).strip()
+                assertions_buffer.write("``%s`` | %s\n" % (element.attrib["name"], doc))
+            text = text.replace(line, assertions_buffer.getvalue())
+    tag_help.write(text)
+    best_practices = _get_bp_link(annotation_el)
+    if best_practices:
+        tag_help.write("\n\n### Best Practices\n")
+        tag_help.write("""
+Find the Intergalactic Utilities Commision suggested best practices for this
+element [here](%s).""" % best_practices)
+    tag_help.write(_build_attributes_table(tag, attributes, hide_attributes))
+
+    return tag_help.getvalue()
+
+
+def _get_bp_link(annotation_el):
+    anchor = annotation_el.attrib.get("{http://galaxyproject.org/xml/1.0}best_practices", None)
+    link = None
+    if anchor:
+        link = "https://planemo.readthedocs.io/en/latest/standards/docs/best_practices/tool_xml.html#%s" % anchor
+    return link
+
+
+def _build_attributes_table(tag, attributes, hide_attributes=False, attribute_names=None, header_level=3):
+    attribute_table = StringIO()
+    attribute_table.write("\n\n")
+    if attributes and not hide_attributes:
+        header_prefix = '#' * header_level
+        attribute_table.write("\n%s Attributes\n" % header_prefix)
+        attribute_table.write("Attribute | Details | Required\n")
+        attribute_table.write("--- | --- | ---\n")
+        for attribute in attributes:
+            name = attribute.attrib["name"]
+            if attribute_names and name not in attribute_names:
+                continue
+            details = _doc_or_none(attribute)
+            if details is None:
+                type_el = _type_el(attribute)
+                details = _doc_or_none(type_el)
+                annotation_el = type_el.find("{http://www.w3.org/2001/XMLSchema}annotation")
+            else:
+                annotation_el = attribute.find("{http://www.w3.org/2001/XMLSchema}annotation")
+
+            use = attribute.attrib.get("use", "optional") == "required"
+            if "|" in details:
+                raise Exception("Cannot build Markdown table")
+            details = details.replace("\n", " ").strip()
+            best_practices = _get_bp_link(annotation_el)
+            if best_practices:
+                details += """ Find the Intergalactic Utilities Commision suggested best practices for this element [here](%s).""" % best_practices
+
+            attribute_table.write("``%s`` | %s | %s\n" % (name, details, use))
+    return attribute_table.getvalue()
+
+
+def _find_attributes(tag):
+    return tag.findall("{http://www.w3.org/2001/XMLSchema}attribute") or \
+        tag.findall("{http://www.w3.org/2001/XMLSchema}complexType/{http://www.w3.org/2001/XMLSchema}attribute") or \
+        tag.findall("{http://www.w3.org/2001/XMLSchema}complexContent/{http://www.w3.org/2001/XMLSchema}extension/{http://www.w3.org/2001/XMLSchema}attribute") or \
+        tag.findall("{http://www.w3.org/2001/XMLSchema}simpleContent/{http://www.w3.org/2001/XMLSchema}extension/{http://www.w3.org/2001/XMLSchema}attribute")
+
+
+def _find_tag_el(tag):
+    if _doc_or_none(tag) is not None:
+        return tag
+
+    return _type_el(tag)
+
+
+def _type_el(tag):
+    element_type = tag.attrib["type"]
+    type_el = xmlschema_doc.find("//{http://www.w3.org/2001/XMLSchema}complexType/[@name='%s']" % element_type) or \
+        xmlschema_doc.find("//{http://www.w3.org/2001/XMLSchema}simpleType/[@name='%s']" % element_type)
+    return type_el
+
+
+def _doc_or_none(tag):
+    doc_el = tag.find("{http://www.w3.org/2001/XMLSchema}annotation/{http://www.w3.org/2001/XMLSchema}documentation")
+    if doc_el is None:
+        return None
+    else:
+        return doc_el.text
+
+
+if __name__ == '__main__':
+    main()
diff --git a/doc/patch.py b/doc/patch.py
new file mode 100755
index 0000000..efcfaf2
--- /dev/null
+++ b/doc/patch.py
@@ -0,0 +1,1076 @@
+#!/usr/bin/env python
+""" Patch utility to apply unified diffs
+
+    Brute-force line-by-line non-recursive parsing
+
+    Copyright (c) 2008-2012 anatoly techtonik
+    Available under the terms of MIT license
+
+    Project home: http://code.google.com/p/python-patch/
+
+
+    $Id: patch.py 181 2012-11-23 16:03:05Z techtonik $
+    $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
+
+    This program needs further tweaking for how we use it at Galaxy.
+"""
+from __future__ import print_function
+
+import logging
+import os
+import re
+import shutil
+import sys
+from optparse import OptionParser
+from os.path import abspath, exists, isfile
+
+from six import Iterator, StringIO
+from six.moves.urllib.request import urlopen
+
+__author__ = "anatoly techtonik <techtonik at gmail.com>"
+__version__ = "1.12.11"
+
+# -----------------------------------------------
+# Logging is controlled by logger named after the
+# module name (e.g. 'patch' for patch.py module)
+
+debugmode = False
+
+logger = logging.getLogger(__name__)
+
+debug = logger.debug
+info = logger.info
+warning = logger.warning
+
+
+class NullHandler(logging.Handler):
+    """ Copied from Python 2.7 to avoid getting
+        `No handlers could be found for logger "patch"`
+        http://bugs.python.org/issue16539
+    """
+
+    def handle(self, record):
+        pass
+
+    def emit(self, record):
+        pass
+
+    def createLock(self):
+        self.lock = None
+
+
+logger.addHandler(NullHandler())
+
+# -----------------------------------------------
+# Constants for Patch/PatchSet types
+
+DIFF = PLAIN = "plain"
+GIT = "git"
+HG = MERCURIAL = "mercurial"
+SVN = SUBVERSION = "svn"
+# mixed type is only actual when PatchSet contains
+# Patches of different type
+MIXED = MIXED = "mixed"
+
+
+# -----------------------------------------------
+# Helpers (these could come with Python stdlib)
+
+# x...() function are used to work with paths in
+# cross-platform manner - all paths use forward
+# slashes even on Windows.
+
+def xisabs(filename):
+    """ Cross-platform version of `os.path.isabs()`
+        Returns True if `filename` is absolute on
+        Linux, OS X or Windows.
+    """
+    if filename.startswith('/'):     # Linux/Unix
+        return True
+    elif filename.startswith('\\'):  # Windows
+        return True
+    elif re.match(r'\w:[\\/]', filename):  # Windows
+        return True
+    return False
+
+
+def xnormpath(path):
+    """ Cross-platform version of os.path.normpath """
+    return os.path.normpath(path).replace(os.sep, '/')
+
+
+def xstrip(filename):
+    """ Make relative path out of absolute by stripping
+        prefixes used on Linux, OS X and Windows.
+
+        This function is critical for security.
+    """
+    while xisabs(filename):
+        # strip windows drive with all slashes
+        if re.match(r'\w:[\\/]', filename):
+            filename = re.sub(r'^\w+:[\\/]+', '', filename)
+        # strip all slashes
+        elif re.match(r'[\\/]', filename):
+            filename = re.sub(r'^[\\/]+', '', filename)
+    return filename
+
+
+# ----------------------------------------------
+# Main API functions
+
+def fromfile(filename):
+    """ Parse patch file. If successful, returns
+        PatchSet() object. Otherwise returns False.
+    """
+    patchset = PatchSet()
+    debug("reading %s" % filename)
+    fp = open(filename, "rb")
+    res = patchset.parse(fp)
+    fp.close()
+    if res is True:
+        return patchset
+    return False
+
+
+def fromstring(s):
+    """ Parse text string and return PatchSet()
+        object (or False if parsing fails)
+    """
+    ps = PatchSet( StringIO(s) )
+    if ps.errors == 0:
+        return ps
+    return False
+
+
+def fromurl(url):
+    """ Parse patch from an URL, return False
+        if an error occured. Note that this also
+        can throw urlopen() exceptions.
+    """
+    ps = PatchSet( urlopen(url) )
+    if ps.errors == 0:
+        return ps
+    return False
+
+
+# --- Utility functions ---
+# [ ] reuse more universal pathsplit()
+def pathstrip(path, n):
+    """ Strip n leading components from the given path """
+    pathlist = [path]
+    while os.path.dirname(pathlist[0]) != '':
+        pathlist[0:1] = os.path.split(pathlist[0])
+    return '/'.join(pathlist[n:])
+# --- /Utility function ---
+
+
+class Hunk(object):
+    """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
+
+    def __init__(self):
+        self.startsrc = None  # line count starts with 1
+        self.linessrc = None
+        self.starttgt = None
+        self.linestgt = None
+        self.invalid = False
+        self.hasplus = False          # True if any "+" lines in hunk
+        self.hasminus = False         # True if any "-" lines in hunk
+        self.text = []
+
+    def originalText(self):
+
+        return("@@ -" + str(self.startsrc) +
+               "," + str(self.linessrc) +
+               " +" + str(self.starttgt) +
+               "," + str(self.linestgt) +
+               "\n" +
+               self.printableText())
+
+    def printableText(self):
+        """Reformat text into printable text"""
+
+        # yeah, there must be a better way to do this.
+        printable = ""
+        for line in self.text:
+            printable += line
+
+        return printable
+
+
+#  def apply(self, estream):
+#    """ write hunk data into enumerable stream
+#        return strings one by one until hunk is
+#        over
+#
+#        enumerable stream are tuples (lineno, line)
+#        where lineno starts with 0
+#    """
+#    pass
+
+
+class Patch(object):
+    """ Patch for a single file """
+
+    def __init__(self):
+        self.source = None
+        self.target = None
+        self.hunks = []
+        self.hunkends = []
+        self.header = []
+
+        self.type = None
+
+
+class PatchSet(Iterator):
+
+    def __init__(self, stream=None):
+        # --- API accessible fields ---
+
+        # name of the PatchSet (filename or ...)
+        self.name = None
+        # patch set type - one of constants
+        self.type = None
+
+        # list of Patch objects
+        self.items = []
+
+        self.errors = 0    # fatal parsing errors
+        self.warnings = 0  # non-critical warnings
+        # --- /API ---
+
+        if stream:
+            self.parse(stream)
+
+    def __len__(self):
+        return len(self.items)
+
+    def parse(self, stream):
+        """ parse unified diff
+            return True on success
+        """
+        lineends = dict(lf=0, crlf=0, cr=0)
+        nexthunkno = 0    #: even if index starts with 0 user messages number hunks from 1
+
+        p = None
+        hunk = None
+        # hunkactual variable is used to calculate hunk lines for comparison
+        hunkactual = dict(linessrc=None, linestgt=None)
+
+        class wrapumerate(enumerate):
+            """Enumerate wrapper that uses boolean end of stream status instead of
+            StopIteration exception, and properties to access line information.
+            """
+
+            def __init__(self, *args, **kwargs):
+                # we don't call parent, it is magically created by __new__ method
+                self._exhausted = False
+                self._lineno = False     # after end of stream equal to the num of lines
+                self._line = False       # will be reset to False after end of stream
+
+            def __next__(self):
+                """Try to read the next line and return True if it is available,
+                   False if end of stream is reached."""
+                if self._exhausted:
+                    return False
+
+                try:
+                    self._lineno, self._line = next(super(wrapumerate, self))
+                except StopIteration:
+                    self._exhausted = True
+                    self._line = False
+                    return False
+                return True
+
+            @property
+            def is_empty(self):
+                return self._exhausted
+
+            @property
+            def line(self):
+                return self._line
+
+            @property
+            def lineno(self):
+                return self._lineno
+
+        # define states (possible file regions) that direct parse flow
+        headscan = True  # start with scanning header
+        filenames = False  # lines starting with --- and +++
+
+        hunkhead = False  # @@ -R +R @@ sequence
+        hunkbody = False  #
+        hunkskip = False  # skipping invalid hunk mode
+
+        hunkparsed = False  # state after successfully parsed hunk
+
+        # regexp to match start of hunk, used groups - 1,3,4,6
+        re_hunk_start = re.compile("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?")
+
+        self.errors = 0
+        # temp buffers for header and filenames info
+        header = []
+        srcname = None
+        tgtname = None
+
+        # start of main cycle
+        # each parsing block already has line available in fe.line
+        fe = wrapumerate(stream)
+        while next(fe):
+
+            # -- deciders: these only switch state to decide who should process
+            # --           line fetched at the start of this cycle
+            if hunkparsed:
+                hunkparsed = False
+                if re_hunk_start.match(fe.line):
+                    hunkhead = True
+                elif fe.line.startswith("--- "):
+                    filenames = True
+                else:
+                    headscan = True
+            # -- ------------------------------------
+
+            # read out header
+            if headscan:
+                while not fe.is_empty and not fe.line.startswith("--- "):
+                    header.append(fe.line)
+                    next(fe)
+                if fe.is_empty:
+                    if p is None:
+                        debug("no patch data found")  # error is shown later
+                        self.errors += 1
+                    else:
+                        info("%d unparsed bytes left at the end of stream" % len(''.join(header)))
+                        self.warnings += 1
+                        # TODO check for \No new line at the end..
+                        # TODO test for unparsed bytes
+                        # otherwise error += 1
+                    # this is actually a loop exit
+                    continue
+
+                headscan = False
+                # switch to filenames state
+                filenames = True
+
+            line = fe.line
+            lineno = fe.lineno
+
+            # hunkskip and hunkbody code skipped until definition of hunkhead is parsed
+            if hunkbody:
+                # [x] treat empty lines inside hunks as containing single space
+                #     (this happens when diff is saved by copy/pasting to editor
+                #      that strips trailing whitespace)
+                if line.strip("\r\n") == "":
+                    debug("expanding empty line in a middle of hunk body")
+                    self.warnings += 1
+                    line = ' ' + line
+
+                # process line first
+                if re.match(r"^[- \+\\]", line):
+                    # gather stats about line endings
+                    if line.endswith("\r\n"):
+                        p.hunkends["crlf"] += 1
+                    elif line.endswith("\n"):
+                        p.hunkends["lf"] += 1
+                    elif line.endswith("\r"):
+                        p.hunkends["cr"] += 1
+
+                    if line.startswith("-"):
+                        hunkactual["linessrc"] += 1
+                        hunk.hasminus = True
+                    elif line.startswith("+"):
+                        hunkactual["linestgt"] += 1
+                        hunk.hasplus = True
+                    elif not line.startswith("\\"):
+                        hunkactual["linessrc"] += 1
+                        hunkactual["linestgt"] += 1
+                    hunk.text.append(line)
+                    # todo: handle \ No newline cases
+                else:
+                    warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno + 1, p.target))
+                    # add hunk status node
+                    hunk.invalid = True
+                    p.hunks.append(hunk)
+                    self.errors += 1
+                    # switch to hunkskip state
+                    hunkbody = False
+                    hunkskip = True
+
+                # check exit conditions
+                if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt:
+                    warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno + 1, p.target))
+                    # add hunk status node
+                    hunk.invalid = True
+                    p.hunks.append(hunk)
+                    self.errors += 1
+                    # switch to hunkskip state
+                    hunkbody = False
+                    hunkskip = True
+                elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]:
+                    # hunk parsed successfully
+                    p.hunks.append(hunk)
+                    # switch to hunkparsed state
+                    hunkbody = False
+                    hunkparsed = True
+
+                    # detect mixed window/unix line ends
+                    ends = p.hunkends
+                    if ((ends["cr"] != 0) + (ends["crlf"] != 0) + (ends["lf"] != 0)) > 1:
+                        warning("inconsistent line ends in patch hunks for %s" % p.source)
+                        self.warnings += 1
+                    if debugmode:
+                        debuglines = dict(ends)
+                        debuglines.update(file=p.target, hunk=nexthunkno)
+                        debug("crlf: %(crlf)d  lf: %(lf)d  cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
+                    # fetch next line
+                    continue
+
+            if hunkskip:
+                if re_hunk_start.match(line):
+                    # switch to hunkhead state
+                    hunkskip = False
+                    hunkhead = True
+                elif line.startswith("--- "):
+                    # switch to filenames state
+                    hunkskip = False
+                    filenames = True
+                    if debugmode and len(self.items) > 0:
+                        debug("- %2d hunks for %s" % (len(p.hunks), p.source))
+
+            if filenames:
+                if line.startswith("--- "):
+                    if srcname is not None:
+                        # XXX testcase
+                        warning("skipping false patch for %s" % srcname)
+                        srcname = None
+                        # XXX header += srcname
+                        # double source filename line is encountered
+                        # attempt to restart from this second line
+                    re_filename = "^--- ([^\t]+)"
+                    match = re.match(re_filename, line)
+                    # todo: support spaces in filenames
+                    if match:
+                        srcname = match.group(1).strip()
+                    else:
+                        warning("skipping invalid filename at line %d" % lineno)
+                        self.errors += 1
+                        # XXX p.header += line
+                        # switch back to headscan state
+                        filenames = False
+                        headscan = True
+                elif not line.startswith("+++ "):
+                    if srcname is not None:
+                        warning("skipping invalid patch with no target for %s" % srcname)
+                        self.errors += 1
+                        srcname = None
+                        # XXX header += srcname
+                        # XXX header += line
+                    else:
+                        # this should be unreachable
+                        warning("skipping invalid target patch")
+                    filenames = False
+                    headscan = True
+                else:
+                    if tgtname is not None:
+                        # XXX seems to be a dead branch
+                        warning("skipping invalid patch - double target at line %d" % lineno)
+                        self.errors += 1
+                        srcname = None
+                        tgtname = None
+                        # XXX header += srcname
+                        # XXX header += tgtname
+                        # XXX header += line
+                        # double target filename line is encountered
+                        # switch back to headscan state
+                        filenames = False
+                        headscan = True
+                    else:
+                        re_filename = "^\+\+\+ ([^\t]+)"
+                        match = re.match(re_filename, line)
+                        if not match:
+                            warning("skipping invalid patch - no target filename at line %d" % lineno)
+                            self.errors += 1
+                            srcname = None
+                            # switch back to headscan state
+                            filenames = False
+                            headscan = True
+                        else:
+                            if p:  # for the first run p is None
+                                self.items.append(p)
+                            p = Patch()
+                            p.source = srcname
+                            srcname = None
+                            p.target = match.group(1).strip()
+                            p.header = header
+                            header = []
+                            # switch to hunkhead state
+                            filenames = False
+                            hunkhead = True
+                            nexthunkno = 0
+                            p.hunkends = lineends.copy()
+                            continue
+
+            if hunkhead:
+                match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+                if not match:
+                    if not p.hunks:
+                        warning("skipping invalid patch with no hunks for file %s" % p.source)
+                        self.errors += 1
+                        # XXX review switch
+                        # switch to headscan state
+                        hunkhead = False
+                        headscan = True
+                        continue
+                    else:
+                        # TODO review condition case
+                        # switch to headscan state
+                        hunkhead = False
+                        headscan = True
+                else:
+                    hunk = Hunk()
+                    hunk.startsrc = int(match.group(1))
+                    hunk.linessrc = 1
+                    if match.group(3):
+                        hunk.linessrc = int(match.group(3))
+                    hunk.starttgt = int(match.group(4))
+                    hunk.linestgt = 1
+                    if match.group(6):
+                        hunk.linestgt = int(match.group(6))
+                    hunk.invalid = False
+                    hunk.text = []
+
+                    hunkactual["linessrc"] = hunkactual["linestgt"] = 0
+
+                    # switch to hunkbody state
+                    hunkhead = False
+                    hunkbody = True
+                    nexthunkno += 1
+                    continue
+
+        # /while fe.next()
+
+        if p:
+            self.items.append(p)
+
+        if not hunkparsed:
+            if hunkskip:
+                warning("warning: finished with errors, some hunks may be invalid")
+            elif headscan:
+                if len(self.items) == 0:
+                    warning("error: no patch data found!")
+                    return False
+                else:  # extra data at the end of file
+                    pass
+            else:
+                warning("error: patch stream is incomplete!")
+                self.errors += 1
+                if len(self.items) == 0:
+                    return False
+
+        if debugmode and len(self.items) > 0:
+            debug("- %2d hunks for %s" % (len(p.hunks), p.source))
+
+        # XXX fix total hunks calculation
+        debug("total files: %d  total hunks: %d" % (len(self.items),
+            sum(len(p.hunks) for p in self.items)))
+
+        # ---- detect patch and patchset types ----
+        for idx, p in enumerate(self.items):
+            self.items[idx].type = self._detect_type(p)
+
+        types = set([p.type for p in self.items])
+        if len(types) > 1:
+            self.type = MIXED
+        else:
+            self.type = types.pop()
+        # --------
+
+        self._normalize_filenames()
+
+        return (self.errors == 0)
+
+    def _detect_type(self, p):
+        """ detect and return type for the specified Patch object
+            analyzes header and filenames info
+
+            NOTE: must be run before filenames are normalized
+        """
+
+        # check for SVN
+        #  - header starts with Index:
+        #  - next line is ===... delimiter
+        #  - filename is followed by revision number
+        # TODO add SVN revision
+        if (len(p.header) > 1 and p.header[-2].startswith("Index: ")
+                and p.header[-1].startswith("=" * 67)):
+            return SVN
+
+        # common checks for both HG and GIT
+        DVCS = ((p.source.startswith('a/') or p.source == '/dev/null')
+            and (p.target.startswith('b/') or p.target == '/dev/null'))
+
+        # GIT type check
+        #  - header[-2] is like "diff --git a/oldname b/newname"
+        #  - header[-1] is like "index <hash>..<hash> <mode>"
+        # TODO add git rename diffs and add/remove diffs
+        #      add git diff with spaced filename
+        # TODO http://www.kernel.org/pub/software/scm/git/docs/git-diff.html
+
+        # detect the start of diff header - there might be some comments before
+        if len(p.header) > 1:
+            for idx in reversed(range(len(p.header))):
+                if p.header[idx].startswith("diff --git"):
+                    break
+            if re.match(r'diff --git a/[\w/.]+ b/[\w/.]+', p.header[idx]):
+                if (idx + 1 < len(p.header)
+                        and re.match(r'index \w{7}..\w{7} \d{6}', p.header[idx + 1])):
+                    if DVCS:
+                        return GIT
+
+        # HG check
+        #
+        #  - for plain HG format header is like "diff -r b2d9961ff1f5 filename"
+        #  - for Git-style HG patches it is "diff --git a/oldname b/newname"
+        #  - filename starts with a/, b/ or is equal to /dev/null
+        #  - exported changesets also contain the header
+        #    # HG changeset patch
+        #    # User name at example.com
+        #    ...
+        # TODO add MQ
+        # TODO add revision info
+        if len(p.header) > 0:
+            if DVCS and re.match(r'diff -r \w{12} .*', p.header[-1]):
+                return HG
+            if DVCS and p.header[-1].startswith('diff --git a/'):
+                if len(p.header) == 1:  # native Git patch header len is 2
+                    return HG
+                elif p.header[0].startswith('# HG changeset patch'):
+                    return HG
+
+        return PLAIN
+
+    def _normalize_filenames(self):
+        """ sanitize filenames, normalizing paths, i.e.:
+            1. strip a/ and b/ prefixes from GIT and HG style patches
+            2. remove all references to parent directories (with warning)
+            3. translate any absolute paths to relative (with warning)
+
+            [x] always use forward slashes to be crossplatform
+                (diff/patch were born as a unix utility after all)
+
+            return None
+        """
+        for i, p in enumerate(self.items):
+            if p.type in (HG, GIT):
+                # TODO: figure out how to deal with /dev/null entries
+                debug("stripping a/ and b/ prefixes")
+                if p.source != '/dev/null':
+                    if not p.source.startswith("a/"):
+                        warning("invalid source filename")
+                    else:
+                        p.source = p.source[2:]
+                if p.target != '/dev/null':
+                    if not p.target.startswith("b/"):
+                        warning("invalid target filename")
+                    else:
+                        p.target = p.target[2:]
+
+            p.source = xnormpath(p.source)
+            p.target = xnormpath(p.target)
+
+            sep = '/'  # sep value can be hardcoded, but it looks nice this way
+
+            # references to parent are not allowed
+            if p.source.startswith(".." + sep):
+                warning("error: stripping parent path for source file patch no.%d" % (i + 1))
+                self.warnings += 1
+                while p.source.startswith(".." + sep):
+                    p.source = p.source.partition(sep)[2]
+            if p.target.startswith(".." + sep):
+                warning("error: stripping parent path for target file patch no.%d" % (i + 1))
+                self.warnings += 1
+                while p.target.startswith(".." + sep):
+                    p.target = p.target.partition(sep)[2]
+            # absolute paths are not allowed
+            if xisabs(p.source) or xisabs(p.target):
+                warning("error: absolute paths are not allowed - file no.%d" % (i + 1))
+                self.warnings += 1
+                if xisabs(p.source):
+                    warning("stripping absolute path from source name '%s'" % p.source)
+                    p.source = xstrip(p.source)
+                if xisabs(p.target):
+                    warning("stripping absolute path from target name '%s'" % p.target)
+                    p.target = xstrip(p.target)
+
+            self.items[i].source = p.source
+            self.items[i].target = p.target
+
+    def diffstat(self):
+        """ calculate diffstat and return as a string
+            Notes:
+              - original diffstat ouputs target filename
+              - single + or - shouldn't escape histogram
+        """
+        names = []
+        insert = []
+        delete = []
+        namelen = 0
+        # max number of changes for single file (for histogram width
+        # calculation)
+        maxdiff = 0
+        for patch in self.items:
+            i, d = 0, 0
+            for hunk in patch.hunks:
+                for line in hunk.text:
+                    if line.startswith('+'):
+                        i += 1
+                    elif line.startswith('-'):
+                        d += 1
+            names.append(patch.target)
+            insert.append(i)
+            delete.append(d)
+            namelen = max(namelen, len(patch.target))
+            maxdiff = max(maxdiff, i + d)
+        output = ''
+        statlen = len(str(maxdiff))  # stats column width
+        for i, n in enumerate(names):
+            # %-19s | %-4d %s
+            format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n"
+
+            hist = ''
+            # -- calculating histogram --
+            width = len(format % ('', '', ''))
+            histwidth = max(2, 80 - width)
+            if maxdiff < histwidth:
+                hist = "+" * insert[i] + "-" * delete[i]
+            else:
+                iratio = (float(insert[i]) / maxdiff) * histwidth
+                dratio = (float(delete[i]) / maxdiff) * histwidth
+
+                # make sure every entry gets at least one + or -
+                iwidth = 1 if 0 < iratio < 1 else int(iratio)
+                dwidth = 1 if 0 < dratio < 1 else int(dratio)
+                hist = "+" * int(iwidth) + "-" * int(dwidth)
+            # -- /calculating +- histogram --
+            output += (format % (names[i], insert[i] + delete[i], hist))
+
+        output += (" %d files changed, %d insertions(+), %d deletions(-)"
+                   % (len(names), sum(insert), sum(delete)))
+        return output
+
+    def apply(self, strip=0):
+        """ apply parsed patch
+            return True on success
+        """
+
+        total = len(self.items)
+        errors = 0
+        if strip:
+            # [ ] test strip level exceeds nesting level
+            #   [ ] test the same only for selected files
+            #     [ ] test if files end up being on the same level
+            try:
+                strip = int(strip)
+            except ValueError:
+                errors += 1
+                warning("error: strip parameter '%s' must be an integer" % strip)
+                strip = 0
+
+        for i, p in enumerate(self.items):
+            f2patch = p.source
+            if strip:
+                debug("stripping %s leading component from '%s'" % (strip, f2patch))
+                f2patch = pathstrip(f2patch, strip)
+            if not exists(f2patch):
+                f2patch = p.target
+                if strip:
+                    debug("stripping %s leading component from '%s'" % (strip, f2patch))
+                    f2patch = pathstrip(f2patch, strip)
+                if not exists(f2patch):
+                    warning("source/target file does not exist\n--- %s\n+++ %s" % (p.source, f2patch))
+                    errors += 1
+                    continue
+            if not isfile(f2patch):
+                warning("not a file - %s" % f2patch)
+                errors += 1
+                continue
+            filename = f2patch
+
+            debug("processing %d/%d:\t %s" % (i + 1, total, filename))
+
+            # validate before patching
+            f2fp = open(filename)
+            hunkno = 0
+            hunk = p.hunks[hunkno]
+            hunkfind = []
+            validhunks = 0
+            canpatch = False
+            for lineno, line in enumerate(f2fp):
+                if lineno + 1 < hunk.startsrc:
+                    continue
+                elif lineno + 1 == hunk.startsrc:
+                    hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
+                    hunklineno = 0
+
+                    # todo \ No newline at end of file
+
+                # check hunks in source file
+                if lineno + 1 < hunk.startsrc + len(hunkfind) - 1:
+                    if line.rstrip("\r\n") == hunkfind[hunklineno]:
+                        hunklineno += 1
+                    else:
+                        info("file %d/%d:\t %s" % (i + 1, total, filename))
+                        info(" hunk no.%d doesn't match source file at line %d" % (hunkno + 1, lineno))
+                        info("  expected: %s" % hunkfind[hunklineno])
+                        info("  actual  : %s" % line.rstrip("\r\n"))
+                        # not counting this as error, because file may already be patched.
+                        # check if file is already patched is done after the number of
+                        # invalid hunks if found
+                        # TODO: check hunks against source/target file in one pass
+                        #   API - check(stream, srchunks, tgthunks)
+                        #           return tuple (srcerrs, tgterrs)
+
+                        # continue to check other hunks for completeness
+                        hunkno += 1
+                        if hunkno < len(p.hunks):
+                            hunk = p.hunks[hunkno]
+                            continue
+                        else:
+                            break
+
+                # check if processed line is the last line
+                if lineno + 1 == hunk.startsrc + len(hunkfind) - 1:
+                    debug(" hunk no.%d for file %s  -- is ready to be patched" % (hunkno + 1, filename))
+                    hunkno += 1
+                    validhunks += 1
+                    if hunkno < len(p.hunks):
+                        hunk = p.hunks[hunkno]
+                    else:
+                        if validhunks == len(p.hunks):
+                            # patch file
+                            canpatch = True
+                            break
+            else:
+                if hunkno < len(p.hunks):
+                    warning("premature end of source file %s at hunk %d" % (filename, hunkno + 1))
+                    errors += 1
+
+            f2fp.close()
+
+            if validhunks < len(p.hunks):
+                if self._match_file_hunks(filename, p.hunks):
+                    warning("already patched  %s" % filename)
+                else:
+                    warning("source file is different - %s" % filename)
+                    errors += 1
+            if canpatch:
+                backupname = filename + ".orig"
+                if exists(backupname):
+                    warning("can't backup original file to %s - aborting" % backupname)
+                else:
+                    import shutil
+                    shutil.move(filename, backupname)
+                    if self.write_hunks(backupname, filename, p.hunks):
+                        info("successfully patched %d/%d:\t %s" % (i + 1, total, filename))
+                        os.unlink(backupname)
+                    else:
+                        errors += 1
+                        warning("error patching file %s" % filename)
+                        shutil.copy(filename, filename + ".invalid")
+                        warning("invalid version is saved to %s" % filename + ".invalid")
+                        # todo: proper rejects
+                        shutil.move(backupname, filename)
+
+        # todo: check for premature eof
+        return (errors == 0)
+
+    def can_patch(self, filename):
+        """ Check if specified filename can be patched. Returns None if file can
+        not be found among source filenames. False if patch can not be applied
+        clearly. True otherwise.
+
+        :returns: True, False or None
+        """
+        filename = abspath(filename)
+        for p in self.items:
+            if filename == abspath(p.source):
+                return self._match_file_hunks(filename, p.hunks)
+        return None
+
+    def _match_file_hunks(self, filepath, hunks):
+        matched = True
+        fp = open(abspath(filepath))
+
+        class NoMatch(Exception):
+            pass
+
+        lineno = 1
+        line = fp.readline()
+        hno = None
+        try:
+            for hno, h in enumerate(hunks):
+                # skip to first line of the hunk
+                while lineno < h.starttgt:
+                    if not len(line):  # eof
+                        debug("check failed - premature eof before hunk: %d" % (hno + 1))
+                        raise NoMatch
+                    line = fp.readline()
+                    lineno += 1
+                for hline in h.text:
+                    if hline.startswith("-"):
+                        continue
+                    if not len(line):
+                        debug("check failed - premature eof on hunk: %d" % (hno + 1))
+                        # todo: \ No newline at the end of file
+                        raise NoMatch
+                    if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
+                        debug("file is not patched - failed hunk: %d" % (hno + 1))
+                        raise NoMatch
+                    line = fp.readline()
+                    lineno += 1
+
+        except NoMatch:
+            matched = False
+            # todo: display failed hunk, i.e. expected/found
+
+        fp.close()
+        return matched
+
+    def patch_stream(self, instream, hunks):
+        """ Generator that yields stream patched with hunks iterable
+
+            Converts lineends in hunk lines to the best suitable format
+            autodetected from input
+        """
+
+        # todo: At the moment substituted lineends may not be the same
+        #       at the start and at the end of patching. Also issue a
+        #       warning/throw about mixed lineends (is it really needed?)
+
+        hunks = iter(hunks)
+
+        srclineno = 1
+
+        lineends = {'\n': 0, '\r\n': 0, '\r': 0}
+
+        def get_line():
+            """
+            local utility function - return line from source stream
+            collecting line end statistics on the way
+            """
+            line = instream.readline()
+            # 'U' mode works only with text files
+            if line.endswith("\r\n"):
+                lineends["\r\n"] += 1
+            elif line.endswith("\n"):
+                lineends["\n"] += 1
+            elif line.endswith("\r"):
+                lineends["\r"] += 1
+            return line
+
+        for hno, h in enumerate(hunks):
+            debug("hunk %d" % (hno + 1))
+            if h.hasminus:
+                warning("Change removes/replaces some text; INVESTIGATE AND APPLY (OR NOT) MANUALLY")
+                warning("Change:")
+                changeText = h.originalText()
+                if len(changeText) > 1000:
+                    changeText = changeText[0:999] + "...\n"
+                warning(changeText)
+            else:
+                # skip to line just before hunk starts
+                while srclineno < h.startsrc:
+                    yield get_line()
+                    srclineno += 1
+
+                for hline in h.text:
+                    # todo: check \ No newline at the end of file
+                    if hline.startswith("-") or hline.startswith("\\"):
+                        get_line()
+                        srclineno += 1
+                        continue
+                    else:
+                        if not hline.startswith("+"):
+                            get_line()
+                            srclineno += 1
+                        line2write = hline[1:]
+                        # detect if line ends are consistent in source file
+                        if sum([bool(lineends[x]) for x in lineends]) == 1:
+                            newline = [x for x in lineends if lineends[x] != 0][0]
+                            yield line2write.rstrip("\r\n") + newline
+                        else:  # newlines are mixed
+                            yield line2write
+
+        for line in instream:
+            yield line
+
+    def write_hunks(self, srcname, tgtname, hunks):
+        src = open(srcname, "rb")
+        tgt = open(tgtname, "wb")
+
+        debug("processing target file %s" % tgtname)
+
+        tgt.writelines(self.patch_stream(src, hunks))
+
+        tgt.close()
+        src.close()
+        # [ ] TODO: add test for permission copy
+        shutil.copymode(srcname, tgtname)
+        return True
+
+
+if __name__ == "__main__":
+    opt = OptionParser(usage="1. %prog [options] unified.diff\n"
+                       "       2. %prog [options] http://host/patch\n"
+                       "       3. %prog [options] -- < unified.diff",
+                       version="python-patch %s" % __version__)
+    opt.add_option("-q", "--quiet", action="store_const", dest="verbosity",
+                   const=0, help="print only warnings and errors", default=1)
+    opt.add_option("-v", "--verbose", action="store_const", dest="verbosity",
+                   const=2, help="be verbose")
+    opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
+    opt.add_option("--diffstat", action="store_true", dest="diffstat",
+                   help="print diffstat and exit")
+    opt.add_option("-p", "--strip", type="int", metavar='N', default=0,
+                   help="strip N path components from filenames")
+    (options, args) = opt.parse_args()
+
+    if not args and sys.argv[-1:] != ['--']:
+        opt.print_version()
+        opt.print_help()
+        sys.exit()
+    readstdin = (sys.argv[-1:] == ['--'] and not args)
+
+    debugmode = options.debugmode
+
+    verbosity_levels = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
+    loglevel = verbosity_levels[options.verbosity]
+    logformat = "%(message)s"
+    if debugmode:
+        loglevel = logging.DEBUG
+        logformat = "%(levelname)8s %(message)s"
+    logger.setLevel(loglevel)
+    loghandler = logging.StreamHandler()
+    loghandler.setFormatter(logging.Formatter(logformat))
+    logger.addHandler(loghandler)
+
+    if readstdin:
+        patch = PatchSet(sys.stdin)
+    else:
+        patchfile = args[0]
+        urltest = patchfile.split(':')[0]
+        if (':' in patchfile and urltest.isalpha()
+                and len(urltest) > 1):  # one char before : is a windows drive letter
+            patch = fromurl(patchfile)
+        else:
+            if not exists(patchfile) or not isfile(patchfile):
+                sys.exit("patch file does not exist - %s" % patchfile)
+            patch = fromfile(patchfile)
+
+    if options.diffstat:
+        print(patch.diffstat())
+        sys.exit(0)
+
+    patch.apply(options.strip) or sys.exit(-1)
+
+    # todo: document and test line ends handling logic - patch.py detects proper line-endings
+    #       for inserted hunks and issues a warning if patched file has incosistent line ends
diff --git a/doc/schema_template.md b/doc/schema_template.md
new file mode 100644
index 0000000..77d077d
--- /dev/null
+++ b/doc/schema_template.md
@@ -0,0 +1,79 @@
+# Galaxy Tool XML File
+
+The XML File for a Galaxy tool, generally referred to as the "tool config
+file" or "wrapper", serves a number of purposes. First, it lays out the user
+interface for the tool (e.g. form fields, text, help, etc.). Second, it
+provides the glue that links your tool to Galaxy by telling Galaxy how to
+invoke it, what options to pass, and what files it will produce as output.
+
+If you find a bug please report it [here](https://github.com/galaxyproject/galaxy/issues/new).
+
+This document serves as reference documentation. If you would like to learn
+how to build tools for Galaxy,
+[Planemo](https://planemo.readthedocs.io/en/latest/writing.html) features a
+number of tutorials on building Galaxy tools that would better serve that purpose.
+
+$tag:tool://element[@name='tool']
+$tag:tool|description://element[@name='tool']//element[@name='description']
+$tag:tool|version_command://complexType[@name='VersionCommand']
+$tag:tool|command://element[@name='tool']//element[@name='command'] hide_attributes
+$tag:tool|inputs://complexType[@name='Inputs']
+$tag:tool|inputs|section://complexType[@name='Section']
+$tag:tool|inputs|repeat://complexType[@name='Repeat']
+$tag:tool|inputs|conditional://complexType[@name='Conditional']
+$tag:tool|inputs|conditional|when://complexType[@name='ConditionalWhen']
+$tag:tool|inputs|param://complexType[@name='Param']
+$tag:tool|inputs|param|validator://complexType[@name='Validator']
+$tag:tool|inputs|param|option://complexType[@name='ParamOption']
+$tag:tool|inputs|param|options://complexType[@name='ParamOptions']
+$tag:tool|inputs|param|options|column://complexType[@name='Column']
+$tag:tool|inputs|param|options|filter://complexType[@name='Filter']
+$tag:tool|inputs|param|sanitizer://complexType[@name='Sanitizer']
+$tag:tool|inputs|param|sanitizer|valid://complexType[@name='SanitizerValid']
+$tag:tool|inputs|param|sanitizer|valid|add://complexType[@name='SanitizerValidAdd']
+$tag:tool|inputs|param|sanitizer|valid|remove://complexType[@name='SanitizerValidRemove']
+$tag:tool|inputs|param|sanitizer|mapping://complexType[@name='SanitizerMapping']
+$tag:tool|inputs|param|sanitizer|mapping|add://complexType[@name='SanitizerMappingAdd']
+$tag:tool|inputs|param|sanitizer|mapping|remove://complexType[@name='SanitizerMappingRemove']
+$tag:tool|configfiles://complexType[@name='ConfigFiles']
+$tag:tool|configfiles|configfile://complexType[@name='ConfigFile']
+$tag:tool|configfiles|inputs://complexType[@name='ConfigInputs']
+$tag:tool|environment_variables://complexType[@name='EnvironmentVariables']
+$tag:tool|environment_variables|environment_variable://complexType[@name='EnvironmentVariable']
+$tag:tool|outputs://complexType[@name='Outputs']
+$tag:tool|outputs|data://complexType[@name='Data']
+$tag:tool|outputs|data|filter://complexType[@name='OutputFilter']
+$tag:tool|outputs|data|change_format://complexType[@name='ChangeFormat']
+$tag:tool|outputs|data|change_format|when://complexType[@name='ChangeFormatWhen']
+$tag:tool|outputs|data|actions://complexType[@name='Actions']
+$tag:tool|outputs|data|actions|conditional://complexType[@name='ActionsConditional']
+$tag:tool|outputs|data|actions|conditional|when://complexType[@name='ActionsConditionalWhen']
+$tag:tool|outputs|data|actions|action://complexType[@name='Action']
+$tag:tool|outputs|data|discover_datasets://complexType[@name='OutputDiscoverDatasets']
+$tag:tool|outputs|collection://complexType[@name='Collection']
+$tag:tool|outputs|collection|filter://complexType[@name='OutputFilter']
+$tag:tool|outputs|collection|discover_datasets://complexType[@name='OutputCollectionDiscoverDatasets']
+$tag:tool|tests://complexType[@name='Tests']
+$tag:tool|tests|test://complexType[@name='Test']
+$tag:tool|tests|test|param://complexType[@name='TestParam']
+$tag:tool|tests|test|repeat://complexType[@name='TestRepeat']
+$tag:tool|tests|test|section://complexType[@name='TestSection']
+$tag:tool|tests|test|conditional://complexType[@name='TestConditional']
+$tag:tool|tests|test|output://complexType[@name='TestOutput']
+$tag:tool|tests|test|output|discover_dataset://complexType[@name='TestDiscoveredDataset']
+$tag:tool|tests|test|output|metadata://complexType[@name='TestOutputMetadata']
+$tag:tool|tests|test|output|assert_contents://group[@name='TestOutputElement']//element[@name='assert_contents']
+$tag:tool|tests|test|output_collection://complexType[@name='TestOutputCollection']
+$tag:tool|tests|test|assert_command://group[@name='TestParamElement']//element[@name='assert_command']
+$tag:tool|tests|test|assert_stdout://group[@name='TestParamElement']//element[@name='assert_stdout']
+$tag:tool|tests|test|assert_stderr://group[@name='TestParamElement']//element[@name='assert_stderr']
+$tag:tool|code://complexType[@name='Code']
+$tag:tool|requirements://complexType[@name='Requirements']
+$tag:tool|requirements|requirement://complexType[@name='Requirement']
+$tag:tool|requirements|container://complexType[@name='Container']
+$tag:tool|stdio://complexType[@name='Stdio']
+$tag:tool|stdio|exit_code://complexType[@name='ExitCode'] hide_attributes
+$tag:tool|stdio|regex://complexType[@name='Regex'] hide_attributes
+$tag:tool|help://element[@name='tool']//element[@name='help']
+$tag:tool|citations://complexType[@name='Citations']
+$tag:tool|citations|citation://complexType[@name='Citation']
diff --git a/doc/source/_static/style.css b/doc/source/_static/style.css
new file mode 100644
index 0000000..ae29519
--- /dev/null
+++ b/doc/source/_static/style.css
@@ -0,0 +1,3 @@
+div.floatright {
+    float: right;
+}
diff --git a/doc/source/_templates/layout.html b/doc/source/_templates/layout.html
new file mode 100644
index 0000000..324da72
--- /dev/null
+++ b/doc/source/_templates/layout.html
@@ -0,0 +1,5 @@
+{# layout.html #}
+{# Import the theme's layout. #}
+{% extends "!layout.html" %}
+
+{% set css_files = css_files + ['_static/style.css'] %}
diff --git a/doc/source/admin/chat-0.png b/doc/source/admin/chat-0.png
new file mode 100644
index 0000000..98ef2b5
Binary files /dev/null and b/doc/source/admin/chat-0.png differ
diff --git a/doc/source/admin/chat-1.png b/doc/source/admin/chat-1.png
new file mode 100644
index 0000000..70a09ed
Binary files /dev/null and b/doc/source/admin/chat-1.png differ
diff --git a/doc/source/admin/chat-2.png b/doc/source/admin/chat-2.png
new file mode 100644
index 0000000..45aa7e7
Binary files /dev/null and b/doc/source/admin/chat-2.png differ
diff --git a/doc/source/admin/chat-3.png b/doc/source/admin/chat-3.png
new file mode 100644
index 0000000..8dc815a
Binary files /dev/null and b/doc/source/admin/chat-3.png differ
diff --git a/doc/source/admin/chat-4.png b/doc/source/admin/chat-4.png
new file mode 100644
index 0000000..428151e
Binary files /dev/null and b/doc/source/admin/chat-4.png differ
diff --git a/doc/source/admin/chat.rst b/doc/source/admin/chat.rst
new file mode 100644
index 0000000..fb249a4
--- /dev/null
+++ b/doc/source/admin/chat.rst
@@ -0,0 +1,137 @@
+Galaxy Communication Server
+===========================
+
+A problem faced by administrators, especially those of smaller galaxy sites, is how to connect users with documentation on workflows and analysis procedures, and then helping them through running these steps. For sites where the admins and the tool developers are the same groups, finding time for developing the required training resources and providing the level of hands-on guidance required can be an impossible challenge in time management.
+
+In order to solve this, a simple chat server has been added to Galaxy to provide realtime communication between users. The hope is that this will decrease the burden on administrators:
+
+- *Troubleshooting*: When users have trouble with a tool, they can ask in chat if
+  anyone can help, rather than coming to administrators and tool developers
+  first.
+- *Sharing*: Your galaxy users can easily talk to coworkers, maybe learn about
+  new analyses they've each developed
+- *Dedicated Discussion*: The chat implementation provides the ability to
+  designate some fixed chat rooms which will always be visible to users, these
+  can be used to help guide user discussion to topics like NGS.
+
+.. image:: communication_server.png
+
+Pre-requisites
+--------------
+
+This service will need to be deployed on a host that has:
+
+- Access to the Galaxy database
+- A port exposed
+- A non-default ``id_secret`` set in your ``galaxy.ini``
+
+We say "access to galaxy database", this service could run on an NFS mounted
+copy of Galaxy, if the host it is deployed on can access the database of the
+main galaxy instance. This is required, because the chat service validates
+connecting users to prevent impersonation.
+
+Deploying the Server
+--------------------
+
+The chat server is a very new feature in Galaxy and has been disabled by
+default at a number of levels, as not all users may wish to use this feature.
+
+Configuration is managed in ``galaxy.ini``:
+
+.. code-block::  ini
+
+    # Galaxy real time communication server settings
+    enable_communication_server = True
+    communication_server_host = http://localhost
+    communication_server_port = 7070
+    # persistent_communication_rooms is a comma-separated list of rooms that should be always available.
+    persistent_communication_rooms = NGS,RNA-Seq,Genome Annotation
+
+As you can see in this configuration, it is pointing at a service running on
+the same host as Galaxy, talking to port 7070.
+
+Once you have configured this portion, you will want to install the required
+dependencies, and launch the chat server. This is run as a separate flask based
+service.
+
+Installing the Dependencies
+---------------------------
+
+Most admins will find it convenient to activate the Galaxy virtualenv, and run
+the following command:
+
+.. code-block:: console
+
+    (.venv)$ pip install flask flask-login flask-socketio eventlet
+
+You can then launch the server:
+
+.. code-block:: console
+
+    (.venv)$ python ./scripts/communication/communication_server.py --port 7070 --host localhost
+
+Next we'll configure user-level access.
+
+Enabling the Chat Interace
+--------------------------
+
+Users who wish to enable the chat can do so in their user preferences.
+
+.. image:: chat-0.png
+
+There is a new element at the top of the list, which controls chat preferences
+
+.. image:: chat-1.png
+
+And a short dialog to enable it
+
+.. image:: chat-2.png
+
+After which, a new icon becomes visible next to "Analyse Data" on your Galaxy
+header.
+
+.. image:: chat-3.png
+
+Opening the chat interface, lastly we notice that chat defaults to an offline
+state. Clicking the red chat bubble icon will turn it green and allow the user
+to engage with others.
+
+.. image:: chat-4.png
+
+Finally users can connect with one another within Galaxy.
+
+.. image:: communication_server_rooms.png
+
+Security Considerations
+-----------------------
+
+This service does:
+
+- Ensure authenticated access by decoding their Galaxy ``galaxysession``
+  cookie.
+- Prevent user spoofing
+
+This service does not:
+
+- enforce rate-limiting of messages
+- run any sort of text/obscenity filtering rules
+- prevent user spoofing if the user is an administrator and impersonating
+  another user
+
+Keeping the Chat Server Running
+-------------------------------
+
+Here is an example supervisord configuration for keeping the chat server running:
+
+.. code-block:: console
+
+    [program:galaxy_chat_server]
+    directory       = GALAXY_ROOT
+    command         = python ./scripts/communication/communication_server.py --port 7070 --host localhost
+    autostart       = true
+    autorestart     = unexpected
+    user            = GALAXY_USER
+    startsecs       = 2
+    redirect_stderr = true
+
+Configuration with virtualenvs will be slightly different.
diff --git a/doc/source/admin/communication_server.png b/doc/source/admin/communication_server.png
new file mode 100644
index 0000000..0f755c9
Binary files /dev/null and b/doc/source/admin/communication_server.png differ
diff --git a/doc/source/admin/communication_server_rooms.png b/doc/source/admin/communication_server_rooms.png
new file mode 100644
index 0000000..f6cf58a
Binary files /dev/null and b/doc/source/admin/communication_server_rooms.png differ
diff --git a/doc/source/admin/conda_faq.rst b/doc/source/admin/conda_faq.rst
new file mode 100644
index 0000000..4fe67d1
--- /dev/null
+++ b/doc/source/admin/conda_faq.rst
@@ -0,0 +1,344 @@
+===========================
+Conda for Tool Dependencies
+===========================
+
+Galaxy tools (also called wrappers) traditionally use Tool Shed package
+recipes to install their dependencies. At the tool's installation time
+the recipe is downloaded and executed in order to provide the underlying
+software executables. Introduction of these Galaxy-specific recipes was
+a necessary step at the time, however nowadays there are other more
+mature and stable options to install software in a similar manner. The
+Galaxy community has taken steps to improve the tool dependency system
+in order to enable new features and expand its reach. This document aims
+to describe these and answer the FAQ.
+
+Galaxy has adopted a new standard for tool dependencies: Conda packages!
+
+Not only do Conda packages make tool dependencies more reliable and
+stable, they are also easier to test and faster to develop than the
+traditional Tool Shed package recipes.
+
+Conda is a package manager like ``apt-get``, ``yum``, ``pip``, ``brew`` or
+``guix``. We don't want to argue about the relative merits of various package
+managers here, in fact Galaxy supports multiple package managers and we welcome
+community contributions (such as implementing a Guix package manager or
+enhancing the existing brew support to bring it on par with Conda).
+
+As a community, we have decided that Conda is the one that best fulfills
+community's needs. The following are some of the crucial Conda features that led
+to this decision:
+
+-  Installation of packages does not require *root* privileges
+   (installation at any location the Galaxy user has write access to)
+-  Multiple versions of software can be installed on the system
+-  HPC-ready
+-  Faster and more robust package installations through pre-compiled
+   packages (no build environment complications)
+-  Independent of programming language (R, Perl, Python, Julia, Java,
+   pre-compiled binaries, and more)
+-  Easy to write recipes (1 YAML description file + 1 Bash install
+   script)
+-  An active, large and growing community (with more and more software
+   authors managing their own recipes)
+-  Extensive documentation: `Conda documentation`_ and `Conda quick-start`_
+
+Below we answer some common questions (collected by Lance Parsons):
+
+
+1. How do I enable Conda dependency resolution for Galaxy jobs?
+***************************************************************
+
+Galaxy's dependency job resolution is managed via
+``dependency_resolvers_conf.xml`` configuration file. Most Galaxy administrators
+should be using Galaxy's default dependency resolvers configuration file
+( ``dependency_resolvers_conf.xml.sample`` ). With
+release 16.04, Galaxy has enabled Conda dependency resolution by default when
+Conda was already installed on the system. Having Conda enabled in
+``dependency_resolvers_conf.xml`` means that Galaxy can look for job
+dependencies using the Conda system when it attempts to run tools.
+
+Note that the order of resolvers in the file matters and the ``<tool_shed_packages />``
+entry should remain first. This means that tools that have specified Tool Shed packages
+as their dependencies will work without a change.
+
+The most common configuration settings related to Conda are listed in Table 1.
+See `galaxy.ini.sample`_ for the complete list.
+
++--------------------------+--------------------------+---------------------------+
+| Setting                  | Default setting          | Meaning                   |
++--------------------------+--------------------------+---------------------------+
+| ``conda_prefix``         | <tool\_dependency\_dir>/ | the location              |
+|                          | \_conda                  | on the                    |
+|                          |                          | filesystem where Conda    |
+|                          |                          | packages and              |
+|                          |                          | environments are          |
+|                          |                          | installed                 |
+|                          |                          |                           |
+|                          |                          | IMPORTANT : Due to a      |
+|                          |                          | current limitation in     |
+|                          |                          | Conda, the total length   |
+|                          |                          | of the                    |
+|                          |                          |                           |
+|                          |                          | ``conda_prefix`` and the  |
+|                          |                          | ``job_working_directory`` |
+|                          |                          | path should be less       |
+|                          |                          | than 50 characters!       |
++--------------------------+--------------------------+---------------------------+
+| ``conda_auto_init``      | False                    | Set to True to instruct   |
+|                          |                          | Galaxy to install Conda   |
+|                          |                          | (the package manager)     |
+|                          |                          | automatically if it       |
+|                          |                          | cannot find a local copy  |
+|                          |                          | already on the system.    |
++--------------------------+--------------------------+---------------------------+
+| ``conda_auto_install``   | False                    | Set to True to instruct   |
+|                          |                          | Galaxy to look for and    |
+|                          |                          | install Conda packages    |
+|                          |                          | for missing tool          |
+|                          |                          | dependencies before       |
+|                          |                          | running a job.            |
++--------------------------+--------------------------+---------------------------+
+
+*Table 1: Commonly used configuration options for Conda in Galaxy.*
+
+
+2. How do Conda dependencies work? Where do things get installed?
+*****************************************************************
+
+In contrast to the TS dependency system, which was used exclusively by Galaxy,
+Conda is a pre-existing, independent project. With Conda, it is possible for an
+admin to install and manage packages without touching Galaxy at all. Galaxy can
+handle these dependencies for you, but admins are not required to use Galaxy for
+dependency management.
+
+There are a few new config options in the ``galaxy.ini`` file (see Table 1 or
+`galaxy.ini.sample`_ for more information), but by default Galaxy will install
+Conda (the package manager) and the required packages in the
+``<tool_dependency_dir>/_conda/`` directory. In this directory, Galaxy will
+create an ``envs`` folder with all of the environments managed by Galaxy. Each
+environment contains a ``lib``, ``bin``, ``share``, and ``include``
+subdirectory, depending on the tool, and is sufficient to get a Galaxy tool up
+and running. Galaxy simply sources this folder via Conda and makes everything
+available before the tool is executed on your system.
+
+To summarize, there are four ways to manage Conda dependencies for use
+with Galaxy. For all of these options, Conda dependency management must
+be configured in the ``dependency_resolvers_conf.xml`` and the ``galaxy.ini`` file.
+
+#. Manual Install - Conda dependencies may be installed by
+   administrators from the command line. Conda (and thus the Conda
+   environments) should be installed in the location specified by the
+   ``conda_prefix`` path (defined in ``galaxy.ini`` and by default
+   ``<tool_dependency_dir>/_conda/`` directory). Galaxy will search
+   these environments for required packages when tools are run. Conda
+   environment names have to follow a specific naming pattern. As an
+   example, to install samtools in version 0.1.19, the administrator can
+   run the command:
+
+   .. code-block:: bash
+
+      $ conda create --name __samtools at 0.1.19 samtools==0.1.19 --channel bioconda
+
+   Tools that require samtools version 0.1.19 will then be able to find
+   and use the installed Conda package.
+#. Galaxy Admin Interface (>= 16.07) - Galaxy will install Conda tool
+   dependencies when tools are installed from the Tool Shed if the
+   option “When available, install externally managed dependencies (e.g.
+   Conda)? Beta” is checked. Admins may also view and manage Conda
+   dependencies via the Admin interface.
+#. Automatically at tool run time - When a tool is run and a dependency
+   is not found, Galaxy will attempt to install the dependency using
+   Conda if ``conda_auto_install`` is activated in the configuration.
+#. Via the API (>= 16.07) - The Galaxy community maintains an `ansible role`_
+   that uses BioBlend_ and the Galaxy API to install tools.
+
+
+3. What is required to make use of this? Any specific packages, Galaxy revision, OS version, etc.?
+**************************************************************************************************
+
+The minimum required version of Galaxy to use Conda is 16.01, however
+version 16.07 or greater is recommended. The 16.07 release of Galaxy has
+a graphical user interface to manage packages, but this is not
+required to have Conda dependencies managed and used by Galaxy.
+
+Conda packages should work on all compatible operating systems with
+*glibc* version 2.5 or newer (this includes Centos 5). We will most
+likely switch soon to *glibc* version 2.12 as a minimum requirement (this
+includes CentOS 6). So all packages will run on all \*nix operating
+systems newer than 2007.
+
+
+4. If I have Conda enabled, what do I need to do to install tools using it? For example, how can I install the latest Trinity? And how will I know the dependencies are installed?
+**********************************************************************************************************************************************************************************
+
+This depends on your ``galaxy.ini`` setting. Starting with release 16.07, Galaxy
+can automatically install the Conda package manager for you if you have enabled
+``conda_auto_init``. Galaxy can then install Trinity along with its dependencies
+using one of the methods listed in question 2 above. In particular, if
+``conda_auto_install`` is True and Trinity is not installed yet, Galaxy will try
+to install it via Conda when a Trinity job is launched.
+
+With release 16.07 you can see which dependencies are being used
+in the “Manage installed tools” section of the Admin panel and you can select
+whether or not to install Conda packages or Tool Shed package recipes when you
+install new tools there, even if ``conda_auto_install`` is disabled.
+
+During a tool installation, the Galaxy admin has control over which systems will be used to
+install the tool requirements. The default settings will trigger installation
+of both TS and Conda packages (if Conda is present), thus depending on the
+dependency resolvers configuration with regards to what will actually be used during
+the tool execution.
+
+To check if Galaxy has created a Trinity environment, have a look at folders under
+``<tool_dependency_dir>/_conda/envs/``(or ``<conda_prefix>/envs`` if you have changed `conda_prefix` in your galaxy.ini file).
+
+We recommend to use Conda on a tool-per-tool basis, by unchecking the checkbox
+for TS dependencies during the tool installation, and for tools where there
+are no available TS dependencies.
+
+
+5. Can I mix traditional Galaxy packages and Conda packages?
+************************************************************
+
+Yes, the way this works is that Galaxy goes through the list of
+requirements for a tool, and then determines for each requirement if it
+can be satisfied by any of the active resolver systems.
+
+The order in which resolvers are tried is listed in the
+``dependency_resolvers_conf.xml`` file. The default order is
+
+-  Tool Shed packages
+-  Packages manually installed by administrators
+-  Conda packages
+
+The first system that satisfies a requirement will be used. See
+`resolver docs`_ for detailed documentation.
+
+
+6. How do I know what system is being used by a given tool?
+***********************************************************
+
+The Galaxy log will show which dependency resolution system is used
+to satisfy each tool dependency and you can specify priorities using the
+``dependency_resolvers_conf.xml`` file (see question 5 above). Starting from Galaxy
+release 16.07, you can see which dependency will be used (“resolved”) in the
+Admin panel.
+
+
+7. How do I go about specifying Conda dependencies for a tool? All the docs still seem to recommend (or exclusively discuss) the ``tool_dependencies.xml`` method.
+******************************************************************************************************************************************************************
+
+The simple answer is: you don't need to do much to make Conda work for a tool.
+
+The ``<requirement>`` tag in the tool XML file is enough. The name and the
+version should correspond to a Conda package in the ``default``, ``r``,
+``bioconda`` or ``iuc`` Conda channel (you can extend this list if you
+like in your ``galaxy.ini`` ). If this is the case you are ready to go. Read
+more about `Conda channels`_  and browse their packages on https://anaconda.org/ url followed by the channel name (e.g.
+`https://anaconda.org/bioconda <https://anaconda.org/bioconda>`__
+).
+
+We will gradually adjust the documentation about ``tool_dependencies.xml`` and
+deprecate it everywhere.
+
+
+8. During tool installation what if there is no Conda package available for a given requirement? What if the requirement is resolved in a different software than the original wrapper author meant to use?
+***********************************************************************************************************************************************************************************************************
+
+If there is no Conda package available during tool installation the tool
+will install automatically, and can be used if its dependencies are
+satisfied by another dependency system such as Tool Shed package
+recipes, Docker containers or modules.
+
+If there is a package of correct name and version it will be used. There
+is no equivalent of the “owner” concept used in Galaxy packages
+installed from the Tool Shed.
+
+
+9. Where can I find a list of existing Conda packages that I can point to, so I don't have to reinvent the wheel for common dependencies?
+*****************************************************************************************************************************************
+
+With Conda package manager installed on your system, run:
+
+.. code-block:: bash
+
+   $ conda search <package_name> -c bioconda -c iuc
+
+This will search in all channels that are activated by default in
+Galaxy. If you find your package, you are ready to go. If not please
+`create a Conda package`_ and submit_ it to BioConda_ or get in `contact with the IUC`_.
+
+
+10. How can I create a new Conda package for a dependency?
+**********************************************************
+
+Adding a package to the BioConda or IUC Conda channels will make it
+available for Galaxy tools to use as a dependency. To learn how, get in
+touch with the awesome BioConda community. They have great documentation
+and assist with all development. You will also see a few of us at this
+project to get you started :)
+
+Don't be scared! Conda recipes are really simple to write. Conda also
+offers so called \`skeleton\` generators that generate recipes from
+pypi, cran, or cpan for you (mostly) automatically.
+
+
+11. Is there a way to convert traditional Tool Shed package recipes that are not yet in a Conda channel?
+********************************************************************************************************
+
+First, you do not need to do anything to your wrapper as long as the
+package name in the requirement tag matches the name of correct
+Conda package. (You may want to mention in the README or a comment the
+Conda channel that contains the package).
+
+If you want to migrate some recipes from XML to Conda, IUC is happy to
+give you a hand. We are trying to get all new versions under Conda and
+leave the old versions as they are – simply because of time.
+
+
+12. What is the recommendation for existing installations? Will I continue to maintain both systems or migrate to the new Conda system eventually?
+**************************************************************************************************************************************************
+
+Old tools will use the traditional installation system; this system will
+stay and will be supported for installing old tools to guarantee sustainability
+and reproducibility. New tools from the IUC, may be Conda only.
+
+
+13. What can I do if Conda doesn't work for me?
+***********************************************
+
+There is currently a limitation in the way Conda packages are being
+built. This limitation will be addressed shortly by the Conda community,
+however this requires all packages to be rebuilt.
+
+To work around this limitation, please make sure that the total length
+of the ``conda_prefix`` and ``job_working_directory`` path is less than 50
+characters long.
+
+If this is your problem, you should see a warning similar to the
+following in your galaxy log files:
+
+.. code-block:: bash
+
+   ERROR: placeholder '/home/ray/r_3_3_1-x64-3.5/envs/_build_placehold_placehold_placehold_placehold_pl' too short
+
+In rare cases Conda may not have been properly installed by Galaxy.
+A symptom for this is if there is no activate script in
+``<conda_prefix>/bin`` folder. In that case you can delete the ``conda_prefix`` folder
+and restart Galaxy, which will again attempt to install Conda.
+
+If this does not solve your problem or you have any trouble following
+the instructions, please ask on the Galaxy mailing list or the Galaxy
+IRC channel.
+
+.. _Conda documentation: http://conda.pydata.org/docs/building/build.html
+.. _Conda quick-start: http://conda.pydata.org/docs/get-started.html
+.. _ansible role: https://github.com/galaxyproject/ansible-galaxy-tools
+.. _BioBlend: https://github.com/galaxyproject/bioblend
+.. _resolver docs: https://docs.galaxyproject.org/en/master/admin/dependency_resolvers.html
+.. _Conda channels: http://conda.pydata.org/docs/custom-channels.html
+.. _create a Conda package: http://conda.pydata.org/docs/building/recipe.html#conda-recipe-files-overview
+.. _submit: https://bioconda.github.io/#step-4-join-the-team
+.. _BioConda: https://bioconda.github.io
+.. _contact with the IUC: https://gitter.im/galaxy-iuc/iuc
+.. _galaxy.ini.sample: https://github.com/galaxyproject/galaxy/blob/dev/config/galaxy.ini.sample
diff --git a/doc/source/admin/dependency_resolvers.rst b/doc/source/admin/dependency_resolvers.rst
new file mode 100644
index 0000000..6ec8e9f
--- /dev/null
+++ b/doc/source/admin/dependency_resolvers.rst
@@ -0,0 +1,194 @@
+Dependency Resolvers in Galaxy
+==============================
+
+There are two parts to building a link between Galaxy and command line bioinformatics tools: the tool XML that
+specifies a mapping between the Galaxy web user interface and the tool command line and tool dependencies that specify
+how to source the actual packages that implement the tool’s commands. The final script that Galaxy submits to run a
+job uses includes commands, such as changes to the ``PATH`` environment variable, that are generated by *dependency
+resolvers*. There is a default dependency resolver configuration but administrators can provide their own configuration
+using the ``dependency_resolvers_conf.xml`` configuration file in the Galaxy ``config/`` directory.
+
+The binding between tool XML and the tools they need to run is specified in the tool XML using *requirements*
+tags, for example
+
+.. code-block:: xml
+
+    <requirement type="package" version="0.7.10.039ea20639">bwa</requirement>
+
+In some cases these requirement tags can be specified without a version
+
+.. code-block:: xml
+
+    <requirement type="package">bedtools</requirement>
+
+The requirement turn into inputs to the dependency resolver. Each dependency resolver is thus given given one or
+two inputs: the name of the dependency to resolve and, in most cases, the version string of the
+dependency.
+
+Default Dependency Resolvers
+----------------------------
+
+The default configuration of dependency resolvers is equivalent to the following ``dependency_resolvers_conf.xml``
+
+.. code-block:: xml
+
+  <dependency_resolvers>
+  <!-- the default configuration, first look for dependencies installed from the toolshed -->
+    <tool_shed_packages />
+  <!-- then look for env.sh files profile according to the "galaxy packages" schema -->
+    <galaxy_packages />
+    <galaxy_packages versionless="true" />
+    <conda />
+    <conda versionless="true" />
+  </dependency_resolvers>
+
+This default dependency resolver configuration contains three items. First, the *tool shed dependency resolver* is used,
+then the *Galaxy packages dependency resolver* is used, first looking for packages by name and version string and then
+finally looking for the package just by name. The default configuration thus prefers packages installed from the Galaxy
+Tool Shed, before trying to find a "Galaxy package" satisfying the specific version the dependency requires before
+finally falling back to looking for a Galaxy package with merely the correct name. If any of the dependency
+resolvers succeeds a dependency resolution object is returned and no more resolvers are called. This dependency
+resolution object provides shell commands to prepend to the shell script that runs the tool.
+
+Tool Shed Dependency Resolver
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``tool_shed_packages`` dependency resolver works with packages installed from the Galaxy Tool Shed. When a package
+is installed from the Tool Shed it creates a directory structure under the directory that is specified as the
+``tool_dependency_dir`` in Galaxy's configuration. This directory structure contains references to the tool's name,
+owner (in the Tool Shed) and version string (amongst other things) and ultimately contains a file named ``env.sh``
+that contains commands to make the dependency runnable. This is installed, along with the packaged tool, by the tool
+package and doesn't require any configuration by the Galaxy administrator.
+
+The Tool Shed dependency resolver is not able to resolve package requirements that do not have a version string,
+like the `bedtools` example above.
+
+Galaxy Packages Dependency Resolver
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``galaxy_packages`` dependency resolver allows Galaxy admins to specify how Galaxy should load manually
+installed packages. This resolver can be configured either to use the version string or in *versionless* mode.
+
+The Galaxy Packages dependency resolver takes a ``base_path`` argument that specifies the path under which
+it starts looking for the files it requires. The default value for this ``base_path`` is the
+``tool_dependency_dir`` configured in Galaxy's ``config/galaxy.ini``. Below the base path, the Galaxy Packages
+resolver looks for directories named after tools, e.g. ``bedtools``. As mentioned before, this resolver
+works in versioned and versionless mode. The default mode is versioned, where the dependency resolver looks for a
+directory named after the dependency's version string. For example, if the Galaxy tool specifies that it
+needs ``bedtools`` version 2.20.1, the dependency resolver will look for a directory ``bedtools/2.20.1``.
+
+If the Galaxy Package dependency resolver finds a ``bin`` directory in this directory, it adds it to the ``PATH``
+used by the scripts Galaxy uses to run tools. If, however, it finds an ``env.sh`` script, it sources this
+script before running the tool that requires this dependency. This can be used to set up the environment
+needed for the tool to run. For example, this ``env.sh`` uses `Environment Modules <http://modules.sourceforge.net/>`_
+to setup the environment for ``bedtools``
+
+.. code-block:: bash
+
+    #!/bin/sh
+
+    if [ -z "$MODULEPATH" ] ; then
+      . /etc/profile.d/module.sh
+    fi
+
+    module add bedtools/bedtools-2.20.1
+
+The Galaxy Package dependency resolver operates quite similarly when used in versionless module. Instead of looking
+for a directory named after a version, it looks for a directory ending in ``default``. For example
+``bedtools/default``. It then looks for a `bin` subdirectory or ``envh.sh`` and incorporates these in the tool
+script that finally gets run. This versionless (i.e. default) lookup is also used if the package requirement
+does not specify a version string.
+
+Environment Modules Dependency Resolver
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The example above used Environment Modules to set the ``PATH`` (and other settings) for ``bedtools``. With
+the ``modules`` dependency resolver it is possible to use Environment Modules directory. This resolver
+takes these parameters:
+
+modulecmd
+    path to Environment Modules' ``modulecmd`` tool
+
+modulepath
+    value used for MODULEPATH environment variable, used to locate modules
+
+versionless
+    whether to resolve tools using a version string or not (default: *false*)
+
+find_by
+    whether to use the ``DirectoryModuleChecker`` or ``AvailModuleChecker`` (permissable values are "directory" or "avail",
+    default is "avail")
+
+prefetch
+    in the AvailModuleChecker prefetch module info with ``module avail`` (default: true)
+
+default_indicator
+    what indicate to the AvailModuleChecker that a module is the default version (default: "(default)"). Note
+    that the first module found is considered the default when no version is used by the resolver, so
+    the sort order of modules matters.
+
+The Environment Modules dependency resolver can work in two modes. The ``AvailModuleChecker`` searches the results
+of the ``module avail`` command for the name of the dependency. If it is configured in versionless mode,
+or is looking for a package with no version specified, it accepts any module whose name matches and is a bare word
+or the first module whose name matched. For this reason, the default version of the module should be the first one
+listed, something that can be achieved by tagging it with a word that appears first in sort order, for example the
+string "(default)" (yielding a module name like ``bedtools/(default)``). So when looking for ``bedtools`` in
+versionless mode the search would match the first module called ``bedtools``, and in versioned mode the search would
+only match if a module named ``bedtools/2.20.1`` was present (assuming you're looking for ``bedtools/2.20.1``).
+
+The``DirectoryModuleChecker`` looks for files or directories in the path specified by ``MODULEPATH`` or
+``MODULESHOME`` that match the dependency being resolved. In versionless mode a match on simply
+the dependency name is needed, and in versioned mode a match on the dependency name and
+version string is needed.
+
+If a module matches the dependency is found, code to executed ``modulecmd sh load`` with the name of the dependency
+is added to the script that is run to run the tool. E.g. ``modulecmd sh load bedtools``. If version strings are being
+used, they'll be used in the ``load`` command e.g. ``modulecmd sh load bwa/0.7.10.039ea20639``.
+
+
+Homebrew Dependency Resolver
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This dependency resolver uses homebrew packages to resolve requirements.
+
+
+Brew Tool Shed Package Resolver
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This dependency resolver would resolve tool shed packages that had been
+auto converted to the tool shed. It is highly experimental, undocumented,
+and will almost certainy be removed from the code base.
+
+
+Conda Dependency Resolver
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The conda XML tag can be used to configure a conda dependency resolver.
+This resolver can be configured with the following options.
+
+prefix
+    The conda_prefix used to locate dependencies in (default: ``<tool_dependency_dir>/_conda``).
+
+exec
+    The conda executable to use, it will default to the one on the
+    PATH (if available) and then to ``<conda_prefix>/bin/conda``.
+
+versionless
+    whether to resolve tools using a version string or not (default: *false*)
+
+debug
+    Pass debug flag to conda commands (default: false).
+
+ensure_channels
+    conda channels to enable by default. See
+    http://conda.pydata.org/docs/custom-channels.html for more
+    information about channels. (default: conda-forge,r,bioconda,iuc).
+
+auto_install
+    Set to True to instruct Galaxy to look for and install missing tool
+    dependencies before each job runs. (default: False)
+
+auto_init
+    Set to True to instruct Galaxy to install conda from the web
+    automatically if it cannot find a local copy and conda_exec is not
+    configured.
diff --git a/doc/source/admin/framework_dependencies.rst b/doc/source/admin/framework_dependencies.rst
new file mode 100644
index 0000000..4c4c54d
--- /dev/null
+++ b/doc/source/admin/framework_dependencies.rst
@@ -0,0 +1,641 @@
+.. _framework-dependencies:
+
+Galaxy Framework Dependencies
+=============================
+
+Galaxy is a large Python application with a long list of `Python module
+dependencies`_. As a result, the Galaxy developers have made significant effort
+to provide these dependencies in as simple a method as possible. Prior to the
+16.01 release, this was done by distributing dependencies in Python's old
+standard packaging format (Egg) in a non-standard way. As of the 16.01 release,
+Galaxy now distributes dependencies in Python's new standard packaging format
+(Wheel), albeit still in a non-standard way. Thankfully, the new distribution
+method is far more compatible with the standard package distribution tooling
+(i.e. `pip`_) than the old method.
+
+.. _Python module dependencies: https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/dependencies/requirements.txt
+.. _pip: https://pip.pypa.io/
+.. _wheel: https://wheel.readthedocs.org/
+
+How it works
+------------
+
+Upon startup (with ``run.sh``), the startup scripts will:
+
+1. Create a Python `virtualenv`_ in the directory ``.venv``.
+
+2. Unset the ``$PYTHONPATH`` environment variable (if set) as this can
+   interfere with installing `Galaxy pip`_ and dependencies.
+
+3. Replace that virtualenv's pip with `Galaxy pip`_.
+
+4. If applicable, create a ``binary-compatibility.cfg`` (see the `Galaxy pip
+   and wheel`_ section for an explanation of this file).
+
+5. Download and install wheels from the Galaxy project wheel server,
+   `wheels.galaxyproject.org`_, using pip.
+
+6. Start Galaxy using ``.venv/bin/python``.
+
+.. _virtualenv: https://virtualenv.readthedocs.org/
+.. _wheels.galaxyproject.org: https://wheels.galaxyproject.org/
+
+Options
+-------
+
+A variety of options to ``run.sh`` are available to control the above behavior:
+
+- ``--skip-venv``: Do not create or use a virtualenv, and do not replace pip
+  with Galaxy pip.
+- ``--skip-wheels``: Do not install wheels.
+- ``--no-create-venv``: Do not create a virtualenv, but use one if it exists at
+  ``.venv`` or if ``$VIRTUAL_ENV`` is set (this variable is set by virtualenv's
+  ``activate``)
+- ``--replace-pip/--no-replace-pip``: Do/do not replace pip with Galaxy pip.
+
+Managing dependencies manually
+------------------------------
+
+Create a virtualenv
+^^^^^^^^^^^^^^^^^^^
+
+Using a `virtualenv`_ in ``.venv`` under the Galaxy source tree is not
+required. More complicated Galaxy setups may choose to use a virtualenv
+external to the Galaxy source tree, which can be done either by not using
+``run.sh`` directly (an example of this can be found under the `Supervisor`_
+section) or using the ``--no-create-venv`` option, explained in the `Options`_
+section. It is also possible to force Galaxy to start without a virtualenv at
+all, but you should not do this unless you know what you're doing.
+
+To manually create a virtualenv, you will first need to obtain virtualenv.
+There are a variety of ways to do this:
+
+- ``pip install virtualenv``
+- ``brew install virtualenv``
+- Install your Linux distribution's virtualenv package from the system package
+  manager (e.g. ``apt-get install python-virtualenv``).
+- Download the `virtualenv source from PyPI
+  <https://pypi.python.org/pypi/virtualenv>`_, untar, and run the
+  ``virtualenv.py`` script contained within as ``python virtualenv.py
+  /path/to/galaxy/virtualenv``
+
+Once this is done, create a virtualenv. In our example, the virtualenv will
+live in ``/srv/galaxy/venv`` and the Galaxy source code has been cloned to
+``/srv/galaxy/server``.
+
+.. code-block:: console
+    
+    $ virtualenv /srv/galaxy/venv
+    New python executable in /srv/galaxy/venv/bin/python
+    Installing setuptools, pip, wheel...done.
+    $ . /srv/galaxy/venv/bin/activate
+    (venv)$
+
+Install dependencies
+^^^^^^^^^^^^^^^^^^^^
+
+Normally, ``run.sh`` calls `common_startup.sh`_, which creates the virtualenv,
+installs Galaxy pip, and installs dependencies. You can call this script
+yourself to set up Galaxy pip and the dependencies without creating a
+virtualenv using the ``--no-create-venv`` option:
+
+.. code-block:: console
+    
+    (venv)$ PYTHONPATH= sh /srv/galaxy/server/scripts/common_startup.sh --no-create-venv
+    Ignoring indexes: https://pypi.python.org/simple
+    Collecting pip
+      Downloading https://wheels.galaxyproject.org/packages/pip-8.0.0+gx1-py2.py3-none-any.whl (1.2MB)
+        100% |████████████████████████████████| 1.2MB 37.8MB/s 
+    Installing collected packages: pip
+      Found existing installation: pip 7.1.2
+        Uninstalling pip-7.1.2:
+          Successfully uninstalled pip-7.1.2
+    Successfully installed pip-8.0.0+gx1
+    Collecting bx-python==0.7.3 (from -r requirements.txt (line 2))
+      Downloading https://wheels.galaxyproject.org/packages/bx_python-0.7.3-cp27-cp27mu-linux_x86_64.whl (1.7MB)
+        100% |████████████████████████████████| 1.7MB 25.4MB/s 
+
+    ...
+
+    Collecting pysam==0.8.3+gx1 (from -r requirements.txt (line 69))
+      Downloading https://wheels.galaxyproject.org/packages/pysam-0.8.3+gx1-cp27-cp27mu-linux_x86_64.whl (7.4MB)
+        100% |████████████████████████████████| 7.4MB 15.1MB/s 
+    Installing collected packages: bx-python, MarkupSafe, PyYAML, SQLAlchemy,
+      mercurial, numpy, pycrypto, six, Paste, PasteDeploy, docutils, wchartype,
+      repoze.lru, Routes, WebOb, WebHelpers, Mako, pytz, Babel, Beaker,
+      Markdown, Cheetah, requests, requests-toolbelt, boto, bioblend, amqp,
+      anyjson, kombu, pbr, sqlparse, decorator, Tempita, sqlalchemy-migrate,
+      Parsley, nose, svgwrite, ecdsa, paramiko, Fabric, Whoosh, pysam
+    Successfully installed Babel-2.0 Beaker-1.7.0 Cheetah-2.4.4 Fabric-1.10.2
+      Mako-1.0.2 Markdown-2.6.3 MarkupSafe-0.23 Parsley-1.3 Paste-2.0.2
+      PasteDeploy-1.5.2 PyYAML-3.11 Routes-2.2 SQLAlchemy-1.0.8 svgwrite-1.1.6
+      Tempita-0.5.3.dev0 WebHelpers-1.3 WebOb-1.4.1 Whoosh-2.4.1+gx1 amqp-1.4.8
+      anyjson-0.3.3 bioblend-0.6.1 boto-2.38.0 bx-python-0.7.3 decorator-4.0.2
+      docutils-0.12 ecdsa-0.13 kombu-3.0.30 mercurial-3.4.2 nose-1.3.7
+      numpy-1.9.2 paramiko-1.15.2 pbr-1.8.0 pycrypto-2.6.1 pysam-0.8.3+gx1
+      pytz-2015.4 repoze.lru-0.6 requests-2.8.1 requests-toolbelt-0.4.0
+      six-1.9.0 sqlalchemy-migrate-0.10.0 sqlparse-0.1.16 wchartype-0.1
+
+**Warning:** If your ``$PYTHONPATH`` is set, it may interfere with the
+dependency installation process (this will almost certainly be the case if you
+use `virtualenv-burrito`_). Without ``--no-create-venv`` the ``$PYTHONPATH``
+variable will be automatically unset, but we assume you know what you're doing
+and may want it left intact if you are using ``--no-create-venv``. If you
+encounter problems, try unsetting ``$PYTHONPATH`` as shown in the example
+above.
+
+.. _common_startup.sh: https://github.com/galaxyproject/galaxy/blob/dev/scripts/common_startup.sh
+.. _virtualenv-burrito: https://github.com/brainsik/virtualenv-burrito
+
+Installing unpinned dependencies
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Galaxy's dependencies can be installed either "pinned" (they will be installed
+at exact versions specified for your Galaxy release) or "unpinned" (the latest
+versions of all dependencies will be installed unless there are known
+incompatibilities with new versions). By default, the release branch(es) of
+Galaxy use pinned versions for three reasons:
+
+1. Using pinned versions insures that the prebuilt wheels on
+   `wheels.galaxyproject.org`_ will be installed, and no compilation will be
+   necesseary.
+
+2. Galaxy releases are tested with the pinned versions and this allows us to
+   give as much assurance as possible that the pinned versions will work with
+   the given Galaxy release (especially as time progresses and newer dependency
+   versions are released while the Galaxy release receives fewer updates.
+
+3. Pinning furthers Galaxy's goal of reproducibility as differing dependency
+   versions could result in non-reproducible behavior.
+
+Install dependencies using the `unpinned requirements file`_, and then instruct
+Galaxy to start without attempting to fetch wheels:
+
+.. code-block:: console
+
+    (venv)$ pip install --index-url=https://wheels.galaxyproject.org/simple/ -r lib/galaxy/dependencies/requirements.txt
+    (venv)$ deactivate
+    $ sh run.sh --no-create-venv --skip-wheels
+
+Including ``--index-url=https://wheels.galaxyproject.org/simple/`` is important
+as two dependencies (pysam, Whoosh) include modifications specific to Galaxy
+which are only available on `wheels.galaxyproject.org`_.
+
+.. _unpinned requirements file: https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/dependencies/requirements.txt
+
+Wheel interaction with other software
+-------------------------------------
+
+Galaxy job handlers
+^^^^^^^^^^^^^^^^^^^
+
+All Galaxy jobs run a metadata detection step on the job outputs upon
+completion of the tool. The metadata detection step requires many of Galaxy's
+dependencies. Because of this, it's necessary to make sure the metadata
+detection step runs in Galaxy's virtualenv. If you run a relatively simple
+Galaxy setup (e.g. single process, or multiple Python Paste processes started
+using ``run.sh``) then this is assured for you automatically. In more
+complicated setups (supervisor, the "headless" Galaxy handler, and/or the
+virtualenv used to start Galaxy is not a shared filesystem) it may be necessary
+to make sure the handlers know where the virtualenv (or a virtualenv containing
+Galaxy's dependencies) can be found.
+
+If your jobs are failing due to Python ``ImportError`` exceptions, this is most
+likely the problem. If so, you can use the ``<env>`` tag in ``job_conf.xml`` to
+source the virtualenv. For example:
+
+.. code-block:: xml
+
+    <job_conf>
+        <plugins>
+            ...
+        </plugins>
+        <destinations default="cluster">
+            <destination id="cluster" runner="drmaa">
+                <param id="nativeSpecification"> ...cluster options... </param>
+
+                <env file="/galaxy/server/.venv/bin/activate" />
+
+            </destination>
+        </destinations>
+    </job_conf>
+
+If your Galaxy server's virtualenv isn't available on the cluster you can
+create one manually using the instructions under `Managing dependencies
+manually`_.
+
+Pulsar
+^^^^^^
+
+If using `Pulsar`_'s option to set metadata on the remote server, the same
+conditions as with `Galaxy job handlers`_ apply. You should create a virtualenv
+on the remote resource, install Galaxy's dependencies in to it, and set an
+``<env>`` tag pointing to the virtualenv's ``activate`` as in the `Galaxy job
+handlers`_ section. Instructions on how to create a virtualenv can be found
+under the `Managing dependencies manually`_ section.
+
+.. _Pulsar: http://pulsar.readthedocs.org/
+
+Conda
+^^^^^
+
+`Conda`_ and `virtualenv`_ are incompatible. However, Conda provides its own
+environment separation functionality in the form of `Conda environments`_.
+Starting Galaxy with Conda Python will cause ``--skip-venv`` to be implicitly
+set, and the currently active Conda environment will be used to install Galaxy
+framework dependencies instaead.  Be sure to create and activate a Conda
+environment for Galaxy prior to installing packages and/or starting Galaxy.
+
+You may choose to install Galaxy's dependencies either at their `pinned`_
+versions using pip or `unpinned`_ using a combination of conda and pip. When
+running under Conda, pip is not replaced with Galaxy pip, so installing pinned
+dependencies will require compilation, will be slower and requires having those
+dependencies' build-time dependencies installed, but has benefits as explained
+under the `Installing unpinned dependencies`_ section.  Installing unpinned
+dependencies allows you to use Conda's binary packages for quick and easy
+installation.
+
+Pinned dependencies will be installed by default when running ``run.sh``. To
+install unpinned dependencies, the process is similar as to installing unpinned
+versions without Conda, with the extra step of installing as much as possible
+from Conda/Bioconda before installing from pip. Begin by adding the `Bioconda`_
+channel as explained in the `Bioconda instructions`_ and then creating a new
+Conda environment using the provided Conda environment file. Then, install
+remaining dependencies using pip and start Galaxy, instructing it to skip the
+automatic fetching of pinned dependencies.
+
+.. code-block:: console
+
+    $ conda config --add channels r
+    $ conda config --add channels bioconda
+    $ conda create --name galaxy --file lib/galaxy/dependencies/conda-environment.txt
+    Fetching package metadata: ........
+    Solving package specifications: ............................................
+    Package plan for installation in environment /home/nate/conda/envs/galaxy:
+
+    The following packages will be downloaded:
+
+        package                    |            build
+        ---------------------------|-----------------
+        boto-2.38.0                |           py27_0         1.3 MB
+        cheetah-2.4.4              |           py27_0         267 KB
+        decorator-4.0.6            |           py27_0          11 KB
+        docutils-0.12              |           py27_0         636 KB
+        ecdsa-0.11                 |           py27_0          73 KB
+        markupsafe-0.23            |           py27_0          30 KB
+        mercurial-3.4.2            |           py27_0         2.9 MB
+        nose-1.3.7                 |           py27_0         194 KB
+        paste-1.7.5.1              |           py27_0         490 KB
+        pytz-2015.7                |           py27_0         174 KB
+        repoze.lru-0.6             |           py27_0          15 KB
+        requests-2.9.1             |           py27_0         605 KB
+        six-1.10.0                 |           py27_0          16 KB
+        sqlalchemy-1.0.11          |           py27_0         1.3 MB
+        sqlparse-0.1.18            |           py27_0          51 KB
+        webob-1.4.1                |           py27_0         108 KB
+        babel-2.1.1                |           py27_0         2.3 MB
+        bx-python-0.7.3            |      np110py27_1         2.1 MB
+        mako-1.0.3                 |           py27_0         105 KB
+        paramiko-1.15.2            |           py27_0         197 KB
+        pastedeploy-1.5.2          |           py27_1          23 KB
+        requests-toolbelt-0.5.0    |           py27_0          83 KB
+        routes-2.2                 |           py27_0          48 KB
+        bioblend-0.7.0             |           py27_0         181 KB
+        fabric-1.10.2              |           py27_0         108 KB
+        ------------------------------------------------------------
+                                               Total:        13.2 MB
+
+    The following NEW packages will be INSTALLED:
+
+        babel:             2.1.1-py27_0     
+        bioblend:          0.7.0-py27_0     
+        boto:              2.38.0-py27_0    
+        bx-python:         0.7.3-np110py27_1
+        cheetah:           2.4.4-py27_0     
+        decorator:         4.0.6-py27_0     
+        docutils:          0.12-py27_0      
+        ecdsa:             0.11-py27_0      
+        fabric:            1.10.2-py27_0    
+        libgfortran:       1.0-0            
+        mako:              1.0.3-py27_0     
+        markupsafe:        0.23-py27_0      
+        mercurial:         3.4.2-py27_0     
+        nose:              1.3.7-py27_0     
+        numpy:             1.10.2-py27_0    
+        openblas:          0.2.14-3         
+        openssl:           1.0.2e-0         
+        paramiko:          1.15.2-py27_0    
+        paste:             1.7.5.1-py27_0   
+        pastedeploy:       1.5.2-py27_1     
+        pip:               7.1.2-py27_0     
+        pycrypto:          2.6.1-py27_0     
+        python:            2.7.11-0         
+        pytz:              2015.7-py27_0    
+        pyyaml:            3.11-py27_1      
+        readline:          6.2-2            
+        repoze.lru:        0.6-py27_0       
+        requests:          2.9.1-py27_0     
+        requests-toolbelt: 0.5.0-py27_0     
+        routes:            2.2-py27_0       
+        setuptools:        19.2-py27_0      
+        six:               1.10.0-py27_0    
+        sqlalchemy:        1.0.11-py27_0    
+        sqlite:            3.9.2-0          
+        sqlparse:          0.1.18-py27_0    
+        tk:                8.5.18-0         
+        webob:             1.4.1-py27_0     
+        wheel:             0.26.0-py27_1    
+        yaml:              0.1.6-0          
+        zlib:              1.2.8-0          
+
+    Proceed ([y]/n)? 
+
+    Fetching packages ...
+    boto-2.38.0-py 100% |############################################| Time: 0:00:00   3.27 MB/s
+    cheetah-2.4.4- 100% |############################################| Time: 0:00:00   1.65 MB/s
+    decorator-4.0. 100% |############################################| Time: 0:00:00  20.38 MB/s
+    docutils-0.12- 100% |############################################| Time: 0:00:00   2.21 MB/s
+    ecdsa-0.11-py2 100% |############################################| Time: 0:00:00 762.58 kB/s
+    markupsafe-0.2 100% |############################################| Time: 0:00:00 931.23 kB/s
+    mercurial-3.4. 100% |############################################| Time: 0:00:00   5.36 MB/s
+    nose-1.3.7-py2 100% |############################################| Time: 0:00:00   1.12 MB/s
+    paste-1.7.5.1- 100% |############################################| Time: 0:00:00   1.91 MB/s
+    pytz-2015.7-py 100% |############################################| Time: 0:00:00   1.08 MB/s
+    repoze.lru-0.6 100% |############################################| Time: 0:00:00 465.26 kB/s
+    requests-2.9.1 100% |############################################| Time: 0:00:00   2.28 MB/s
+    six-1.10.0-py2 100% |############################################| Time: 0:00:00 477.04 kB/s
+    sqlalchemy-1.0 100% |############################################| Time: 0:00:00   4.25 MB/s
+    sqlparse-0.1.1 100% |############################################| Time: 0:00:00 774.57 kB/s
+    webob-1.4.1-py 100% |############################################| Time: 0:00:00 819.13 kB/s
+    babel-2.1.1-py 100% |############################################| Time: 0:00:00   5.53 MB/s
+    bx-python-0.7. 100% |############################################| Time: 0:00:00   5.11 MB/s
+    mako-1.0.3-py2 100% |############################################| Time: 0:00:00 813.04 kB/s
+    paramiko-1.15. 100% |############################################| Time: 0:00:00   1.23 MB/s
+    pastedeploy-1. 100% |############################################| Time: 0:00:00 721.20 kB/s
+    requests-toolb 100% |############################################| Time: 0:00:00 856.06 kB/s
+    routes-2.2-py2 100% |############################################| Time: 0:00:00 666.70 kB/s
+    bioblend-0.7.0 100% |############################################| Time: 0:00:00   1.15 MB/s
+    fabric-1.10.2- 100% |############################################| Time: 0:00:00 843.81 kB/s
+    Extracting packages ...
+    [      COMPLETE      ]|###############################################################| 100%
+    Linking packages ...
+    [      COMPLETE      ]|###############################################################| 100%
+    #
+    # To activate this environment, use:
+    # $ source activate galaxy
+    #
+    # To deactivate this environment, use:
+    # $ source deactivate
+    #
+    $ source activate galaxy
+    discarding /home/nate/conda/bin from PATH
+    prepending /home/nate/conda/envs/galaxy/bin to PATH
+    $ pip install --index-url=https://wheels.galaxyproject.org/simple/ -r lib/galaxy/dependencies/requirements.txt
+    Requirement already satisfied (use --upgrade to upgrade): numpy in /home/nate/conda/envs/galaxy/lib/python2.7/site-packages (from -r lib/galaxy/dependencies/requirements.txt (line 1))
+
+      ...
+
+    Collecting WebHelpers (from -r lib/galaxy/dependencies/requirements.txt (line 15))
+      Downloading https://wheels.galaxyproject.org/packages/WebHelpers-1.3-py2-none-any.whl (149kB)
+        100% |████████████████████████████████| 151kB 55.7MB/s 
+
+      ...
+
+    Building wheels for collected packages: pysam
+      Running setup.py bdist_wheel for pysam
+
+    $ sh run.sh --skip-wheels
+
+.. _Conda: http://conda.pydata.org/
+.. _Conda environments: http://conda.pydata.org/docs/using/envs.html
+.. _Bioconda: https://bioconda.github.io/
+.. _Bioconda instructions: Bioconda_
+.. _pinned: `Installing unpinned dependencies`_
+.. _unpinned: pinned_
+
+uWSGI
+^^^^^
+
+The simplest scenario to using uWSGI with the wheel-based dependencies is to
+install uWSGI into Galaxy virtualenv (by default, ``.venv``) using pip, e.g.:
+
+.. code-block:: console
+
+    $ . ./.venv/bin/activate
+    (.venv)$ pip install uwsgi
+    Collecting uwsgi
+      Downloading uwsgi-2.0.12.tar.gz (784kB)
+        100% |████████████████████████████████| 786kB 981kB/s 
+    Building wheels for collected packages: uwsgi
+      Running setup.py bdist_wheel for uwsgi
+      Stored in directory: /home/nate/.cache/pip/wheels/a4/7b/7c/8cbe2fe2c2b963173361cc18aa726f165dc4803effbb8195fc
+    Successfully built uwsgi
+    Installing collected packages: uwsgi
+    Successfully installed uwsgi-2.0.12
+
+Because uWSGI is installed in the virtualenv, Galaxy's dependencies will be
+found upon startup.
+
+If uWSGI is installed outside of the virtualenv (e.g. from apt) you will need
+to pass the ``-H`` option (or one of `its many aliases
+<http://uwsgi-docs.readthedocs.org/en/latest/Options.html#home>`_) on the uWSGI
+command line:
+
+.. code-block:: console
+
+    $ uwsgi --ini /srv/galaxy/config/uwsgi.ini -H /srv/galaxy/venv
+
+Or in the uWSGI config file:
+
+.. code-block:: ini
+
+    [uwsgi]
+    processes = 8
+    threads = 4
+    socket = /srv/galaxy/var/uwgi.sock
+    logto = /srv/galaxy/var/uwsgi.log
+    master = True
+    pythonpath = /srv/galaxy/server/lib
+    pythonhome = /srv/galaxy/venv
+    module = galaxy.webapps.galaxy.buildapp:uwsgi_app_factory()
+    set = galaxy_config_file=/srv/galaxy/config/galaxy.ini
+    set = galaxy_root=/srv/galaxy/server
+
+Supervisor
+^^^^^^^^^^
+
+Many production sites use `supervisord`_ to manage their Galaxy processes
+rather than relying on ``run.sh`` or other means. There's no simple way to
+activate a virtualenv when using supervisor, but you can simulate the effects
+by setting ``$PATH`` and ``$VIRTUAL_ENV`` in your supervisor config:
+
+.. code-block:: ini
+
+    [program:galaxy_uwsgi]
+    command         = /srv/galaxy/venv/bin/uwsgi --ini /srv/galaxy/config/uwsgi.ini
+    directory       = /srv/galaxy/server
+    environment     = VIRTUAL_ENV="/srv/galaxy/venv",PATH="/srv/galaxy/venv/bin:%(ENV_PATH)s"
+    numprocs        = 1
+
+    [program:galaxy_handler]
+    command         = /srv/galaxy/venv/bin/python ./scripts/galaxy-main -c /srv/galaxy/config/galaxy.ini --server-name=handler%(process_num)s
+    directory       = /srv/galaxy/server
+    process_name    = handler%(process_num)s
+    numprocs        = 4
+    environment     = VIRTUAL_ENV="/srv/galaxy/venv",PATH="/srv/galaxy/venv/bin:%(ENV_PATH)s"
+
+With supervisor < 3.0 you cannot use the ``%(ENV_PATH)s`` template variable and
+must instead specify the full desired ``$PATH``.
+
+.. _supervisord: http://supervisord.org/
+
+Custom pip/wheel rationale
+--------------------------
+
+We chose to use a modified version of the `pip`_ and `wheel`_ packages in order
+to make Galaxy easy to use. People wishing to run Galaxy (especially only for
+tool development) may not be systems or command line experts. Unfortunately,
+Python modules with C extensions may not always compile out of the box
+(typically due to missing compilers, headers, or other system packages) and the
+failure messages generated are typically only decipherable to people
+experienced with software compilation and almost never indicate how to fix the
+problem. In addition, the process of compiling all of Galaxy's C extension
+dependencies can be very long if it does succeed. As a result, we want to
+precompile Galaxy's dependencies. However, the egg format was never prepared
+for doing this on any platform and wheels could not do it on Linux because
+there is no ABI compatibility between Linux distributions or versions.
+
+As a benefit of using the standard tooling (pip), if you choose not to use
+Galaxy pip, all of Galaxy's dependencies should still be installable using
+standard pip. You will still need to point pip at `wheels.galaxyproject.org`_
+in order to fetch some modified packages and ones that aren't available on
+PyPI, but this can be done with the unmodified version of pip.
+
+A good early discussion of these problems can be found in Armin Ronacher's
+`blog post on wheels <http://lucumr.pocoo.org/2014/1/27/python-on-wheels/>`_.
+One of the problems Armin discusses, Python interpreter ABI incompatibilites
+depending on build-time options (UCS2 vs. UCS4), has been fixed by us and
+accepted into pip >= 8.0 in `pip pull request #3075`_. The other major problem
+(the non-portability of wheels between Linux distributions) remains. `Galaxy
+pip`_ provides one solution to this problem.
+
+More recently, the proposed `PEP 513`_ proposes a different solution to the
+cross-distro problem.  PEP 513 also contains a very detailed technical
+explanation of the problem.
+
+.. _PEP 513: https://www.python.org/dev/peps/pep-0513/
+.. _pip pull request #3075: https://github.com/pypa/pip/pull/3075
+
+Galaxy pip and wheel
+--------------------
+.. _Galaxy pip:
+.. _Galaxy wheel: `Galaxy pip and wheel`_
+
+`Galaxy pip is a fork <https://github.com/natefoo/pip/tree/linux-wheels>`_ of
+`pip`_ in which we have added support for installing wheels containing C
+extensions (wheels that have compiled binary code) on Linux.  `Galaxy wheel is
+a fork <https://bitbucket.org/natefoo/wheel>`_ of `wheel`_ in which we have
+added support for building wheels installable with Galaxy pip.
+
+Two different types of wheels can be created:
+
+1. "Simple" wheels with very few dependencies outside of libc and libm built on
+   a "suitably old" platform (currently Debian Squeeze) such that they should
+   work on all newer systems (e.g. RHEL 6+, Ubuntu 12.04+). These wheels carry
+   the unmodified ``linux_{arch}`` platform tag (e.g. ``linux_x86_64``) as
+   specified in `PEP 425`_ and that you will find on wheels built with an
+   unmodified `wheel`_.
+
+2. Wheels with specific external dependencies (for example, ``libpq.so``, the
+   PostgreSQL library, used by `psycopg2`_) can be built on each supported
+   Linux distribution and tagged more specifically for each distribution. These
+   wheels carry a ``linux_{arch}_{distro}_{version}`` platform tag (e.g.
+   ``linux_x86_ubuntu_14_04``) and can be created using `Galaxy wheel`_.
+
+The `manylinux`_ project implements the "Simple" wheels in a more clearly
+defined way and allows for the inclusion of "non-standard" external
+dependencies directly into the wheel. Galaxy will officially support any
+standard which allows for Linux wheels in PyPI once such a standard is
+complete.
+
+.. _PEP 425: https://www.python.org/dev/peps/pep-0425/
+.. _manylinux: https://github.com/manylinux/manylinux/
+.. _psycopg2: http://initd.org/psycopg/
+
+Wheel platform compatibility
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Galaxy pip and Galaxy wheel also include support for the proposed
+`binary-compatibility.cfg`_ file. This file allows distributions that are
+binary compatibile (e.g. Red Hat Enterprise Linux 6 and CentOS 6) to use the
+same wheels.
+
+This is a JSON format file which can be installed in ``/etc/python`` or the
+root of a virtualenv (`common_startup.sh`_ creates it here) and provides a
+mapping between `PEP 425`_ platform tags. For example, the following
+``binary-compatibility.cfg`` indicates that wheels built on the platform
+``linux_x86_centos_6_7`` will have their platform tag overridden to
+``linux_x86_rhel_6``. In addition, wheels tagged with ``linux_x86_64_rhel_6_7``
+and ``linux_x86_64_rhel_6`` will be installable on a ``linux_x86_centos_6_7``
+system:
+
+.. code-block:: json
+
+    {
+        "linux_x86_64_centos_6_7": {
+            "build": "linux_x86_64_rhel_6",
+            "install": ["linux_x86_64_rhel_6_7", "linux_x86_64_rhel_6"]
+        }
+    }
+
+Currently, Scientific Linux, CentOS, and Red Hat Enterprise Linux will be set
+as binary compatible by `common_startup.sh`_.
+
+.. _binary-compatibility.cfg: https://mail.python.org/pipermail/distutils-sig/2015-July/026617.html
+
+Adding additional wheels as Galaxy dependencies
+-----------------------------------------------
+
+New wheels can be added to Galaxy, or the versions of existing wheels can be
+updated, using `Galaxy Starforge`_, Galaxy's Docker-based build system.
+
+The process is still under development and will be streamlined and automated
+over time. For the time being, please use the following process to add new
+wheels:
+
+1. Install `Starforge`_ (e.g. with ``pip install starforge`` or ``python
+   setup.py install`` from the source). You will also need to have Docker
+   installed on your system.
+
+2. Obtain `wheels.yml`_ (this file will most likely be moved in to Galaxy in
+   the future) and add/modify the wheel definition.
+
+3. Use ``starforge wheel --wheels-config=wheels.yml <wheel-name>`` to build the
+   wheel. If the wheel includes C extensions, you will probably want to also
+   use the ``--no-qemu`` flag to prevent Starforge from attempting to build on
+   Mac OS X using QEMU/KVM.
+
+4. If the wheel build is successful, submit a pull request to `Starforge`_ with
+   your changes to `wheels.yml`_.
+
+5. A `Galaxy Committers group`_ member will need to trigger an automated build
+   of the wheel changes in your pull request. Galaxy's Jenkins_ service will
+   build these changes using Starforge.
+
+6. If the pull request is merged, submit a pull request to Galaxy modifying the
+   files in `lib/galaxy/dependencies`_ as appropriate.
+
+You may attempt to skip directly to step 4 and let the Starforge wheel PR
+builder build your wheels for you. This is especially useful if you are simply
+updating an existing wheel's version. However, if you are adding a new C
+extension wheel that is not simple to build, you may need to go through many
+iterations of updating the PR and having a `Galaxy Committers group`_ member
+triggering builds before wheels are successfully built. You can avoid this
+cycle by performing steps 1-3 locally.
+
+.. _Starforge:
+.. _Galaxy Starforge: https://github.com/galaxyproject/starforge/
+.. _wheels.yml: https://github.com/galaxyproject/starforge/blob/master/wheels/build/wheels.yml
+.. _Galaxy Committers group: https://github.com/galaxyproject/galaxy/blob/dev/doc/source/project/organization.rst#committers
+.. _Jenkins: https://jenkins.galaxyproject.org/
+.. _lib/galaxy/dependencies: https://github.com/galaxyproject/galaxy/tree/dev/lib/galaxy/dependencies
diff --git a/doc/source/admin/grt.rst b/doc/source/admin/grt.rst
new file mode 100644
index 0000000..cde12d7
--- /dev/null
+++ b/doc/source/admin/grt.rst
@@ -0,0 +1,41 @@
+Galactic Radio Telescope
+========================
+
+This is an opt-in service which Galaxy admins can configure to contribute their
+job run data back to the community. We hope that by collecting this information
+we can build accurate models of tool CPU/memory/time requirements. In turn,
+admins will be able to use this analyzed data to optimize their job
+distribution across highly heterogenous clusters.
+
+Registration
+------------
+
+You will need to register your Galaxy instance with the Galactic Radio
+Telescope (GRT). This can be done `https://radio-telescope.galaxyproject.org
+<https://radio-telescope.galaxyproject.org>`__.
+
+Submitting Data
+---------------
+
+Once you've registered your Galaxy instance, you'll receive an instance ID and
+an API key which are used to run ``scripts/grt.py``. The tool itself is very simple
+to run. It collects the last 7 days (by default) of data from your Galaxy
+server, and sends them to the GRT for processing and display. Additionally
+it collects the total number of users, and the number of users who ran
+jobs in the last N days.
+
+Running the tool is simple:
+
+.. code-block:: shell
+
+    python scripts/grt.py \
+        <INSTANCE_UUID> \
+        <API_KEY> \
+        -c config/galaxy.ini \
+        --grt-url https://radio-telescope.galaxyproject.org/api/v1/upload/
+        --days 7
+
+The only required parameters are the instance ID and API key. As you can see in
+the example command, the GRT URL is configurable. If you do not wish to
+participate in the public version of this experiment you can host your own
+radio telescope to collect Galactic information.
diff --git a/doc/source/admin/images_webhooks/history-menu.png b/doc/source/admin/images_webhooks/history-menu.png
new file mode 100644
index 0000000..9c746a4
Binary files /dev/null and b/doc/source/admin/images_webhooks/history-menu.png differ
diff --git a/doc/source/admin/images_webhooks/masthead.png b/doc/source/admin/images_webhooks/masthead.png
new file mode 100644
index 0000000..4fdcd40
Binary files /dev/null and b/doc/source/admin/images_webhooks/masthead.png differ
diff --git a/doc/source/admin/images_webhooks/masthead_trans_object.png b/doc/source/admin/images_webhooks/masthead_trans_object.png
new file mode 100644
index 0000000..cd47611
Binary files /dev/null and b/doc/source/admin/images_webhooks/masthead_trans_object.png differ
diff --git a/doc/source/admin/images_webhooks/tool.png b/doc/source/admin/images_webhooks/tool.png
new file mode 100644
index 0000000..b02deff
Binary files /dev/null and b/doc/source/admin/images_webhooks/tool.png differ
diff --git a/doc/source/admin/images_webhooks/workflow.png b/doc/source/admin/images_webhooks/workflow.png
new file mode 100644
index 0000000..235ef96
Binary files /dev/null and b/doc/source/admin/images_webhooks/workflow.png differ
diff --git a/doc/source/admin/index.rst b/doc/source/admin/index.rst
new file mode 100644
index 0000000..8bbb19a
--- /dev/null
+++ b/doc/source/admin/index.rst
@@ -0,0 +1,27 @@
+Special Topics in Galaxy Administration & Deployment Documentation
+==================================================================
+
+This documentation intends to function as a version-specific supplement to the
+`wiki <https://wiki.galaxyproject.org/Admin/>`__, not the primary admin
+documentation. These resources should be used together.
+
+.. toctree::
+   :maxdepth: 3
+
+   dependency_resolvers.rst
+
+   conda_faq.rst
+
+   mulled_containers.rst
+
+   interactive_environments.rst
+
+   framework_dependencies.rst
+
+   useful_scripts.rst
+
+   grt.rst
+
+   chat.rst
+
+   webhooks.rst
diff --git a/doc/source/admin/interactive_environments.png b/doc/source/admin/interactive_environments.png
new file mode 100644
index 0000000..49a3164
Binary files /dev/null and b/doc/source/admin/interactive_environments.png differ
diff --git a/doc/source/admin/interactive_environments.rst b/doc/source/admin/interactive_environments.rst
new file mode 100644
index 0000000..d215e36
--- /dev/null
+++ b/doc/source/admin/interactive_environments.rst
@@ -0,0 +1,259 @@
+Galaxy Interactive Environments (GIEs)
+======================================
+
+GIEs were a new feature back in Galaxy 15.05, leading with the release of the
+IPython IE. They were presented at GCC2015, and the RStudio IE was released as
+part of 15.07. The IPython IE was superceded by a Project Jupyter IE in
+Galaxy XXXXX.
+
+A GIE is a Docker container, launched by Galaxy, proxied by Galaxy, with some
+extra sugar inside the container to allow users to interact easily with their
+Galaxy histories.
+
+How GIEs Work
+-------------
+
+A GIE is primarily composed of a Docker container, and the Galaxy visualization
+component. Galaxy vizualisation plugins are rendered using Mako templates and
+Mako templates in turn can run Python code. GIEs build upon visualization plugins,
+adding features to allow for container management and proxying. This Python code
+in the Mako templates is used to launch the Docker container within which a GIE
+runs. Once this container is launched, we notify a proxy built into Galaxy which
+helps coordinate a 1:1 mapping of users and their docker containers.
+
+Here's a simple diagram recapping the above:
+
+.. image:: interactive_environments.png
+
+Deploying GIEs
+--------------
+
+Deploying GIEs is not a trivial operation. They have complex interactions with
+numerous services, you'll need to be a fairly competent SysAdmin to debug all
+of the possible problems that can occur during deployment. After the initial
+hurdle, most find that GIEs require little to no maintenance.
+
+An `Ansible <http://www.ansible.com/>`__ role for installing and managing GIEs
+can be found on
+`Github <https://github.com/galaxyproject/ansible-interactive-environments>`__
+and `Ansible Galaxy <https://galaxy.ansible.com/detail#/role/6056>`__.
+
+Setting up the Proxy
+^^^^^^^^^^^^^^^^^^^^
+
+Currently the Galaxy proxy is a NodeJS+Sqlite3 proxy.
+
+- Node has recently upgraded, and our proxy is pinned to an old version of
+  sqlite3. As such you'll currently need to have an older version of Node
+  available (0.10.X - 0.11.X vintage).
+- We're working on solutions in this space to provide a better deployment
+  mechanism here and fewer dependencies.
+- Please note that if you have NodeJS installed under Ubuntu, it often
+  installs to ``/usr/bin/nodejs``, whereas ``npm`` expects it to be
+  ``/usr/bin/node``. You will need to create that symlink yourself.
+
+Once Node and npm are ready to go, you'll need to install the dependencies
+
+.. code-block:: console
+
+    $ cd $GALAXY_ROOT/lib/galaxy/web/proxy/js
+    $ npm install
+
+Running ``node lib/main.js --help`` should produce some useful help text
+
+.. code-block:: console
+
+    Usage: main [options]
+
+    Options:
+
+    -h, --help             output usage information
+    -V, --version          output the version number
+    --ip <n>               Public-facing IP of the proxy
+    --port <n>             Public-facing port of the proxy
+    --cookie <cookiename>  Cookie proving authentication
+    --sessions <file>      Routes file to monitor
+    --verbose
+
+There are two ways to handle actually running the proxy. The first is to have
+Galaxy automatically launches the proxy as needed. This is the default configuration
+as of 2014. Alternately, the proxy can be stated manually or via a system such as
+Supervisord. Assuming that the ``$GALAXY_ROOT`` environment variable refers to the location of
+the Galaxy installation, the command for launching the proxy is:
+
+.. code-block:: console
+
+    $ node $GALAXY_ROOT/lib/galaxy/web/proxy/js/lib/main.js --ip 0.0.0.0 \
+        --port 8800 --sessions $GALAXY_ROOT/database/session_map.sqlite \
+        --cookie galaxysession --verbose
+
+And this can be configured in your supervisord config by adding:
+
+.. code-block:: console
+
+
+    [program:galaxy_nodejs_proxy]
+    directory       = GALAXY_ROOT
+    command         = GALAXY_ROOT/lib/galaxy/web/proxy/js/lib/main.js --sessions database/session_map.sqlite --ip 0.0.0.0 --port 8800
+    autostart       = true
+    autorestart     = unexpected
+    user            = GALAXY_USER
+    startsecs       = 5
+    redirect_stderr = true
+
+where ``GALAXY_ROOT`` is the location of your Galaxy installation and ``GALAXY_USER`` is the username of the user that
+Galaxy runs as.
+
+Configuring the Proxy
+^^^^^^^^^^^^^^^^^^^^^
+
+Configuration is all managed in ``galaxy.ini``. The default arguments used
+for the proxy are:
+
+.. code-block::  ini
+
+    dynamic_proxy_manage=True
+    dynamic_proxy_session_map=database/session_map.sqlite
+    dynamic_proxy_bind_port=8800
+    dynamic_proxy_bind_ip=0.0.0.0
+    dynamic_proxy_debug=True
+
+As you can see most of these variables map directly to the command line
+arguments to the NodeJS script. There are a few extra parameters which will
+be needed if you run Galaxy behind an upstream proxy like nginx or
+Apache:
+
+.. code-block:: ini
+
+    dynamic_proxy_external_proxy=True
+    dynamic_proxy_prefix=gie_proxy
+
+The first option says that you have Galaxy and the Galaxy NodeJS proxy wrapped
+in an upstream proxy like Apache or NGINX. This will cause Galaxy to connect
+users to the same port as Galaxy is being served on (so 80/443), rather than
+directing them to port 8800.
+
+The second option is closely entertwined with the first option. When Galaxy is
+accessed, it sets a cookie called ``galaxysession``. This cookie generally cannot be sent with requests
+to different domains and different ports, so Galaxy and the dynamic proxy must
+be accessible on the same port and protocol. In addition, the cookie is only
+accessible to URLs that share the same prefix as the Galaxy URL. For example,
+if you're running Galaxy under a URL like ``https://f.q.d.n/galaxy/``, the cookie
+is only accessible to URLs that look like ``https://f.q.d.n/galaxy/*``. The
+second (``dynamic_proxy_prefix``) option sets the URL path that's used to
+differentiate requests that should go through the proxy to those that should go
+to Galaxy. You will need to add special upstream proxy configuration to handle
+this, and you'll need to use the same ``dynamic_proxy_prefix`` in your
+``galaxy.ini`` that you use in your URL routes.
+
+In the examples below, we assume that your Galaxy installation is available
+at a URL such as ``https://f.q.d.n/galaxy``. If instead it is available at a
+URL like ``https://f.q.d.n``, remove the ``/galaxy`` prefix from the examples.
+For example ``/galaxy/gie_proxy`` would become ``/gie_proxy``. Remember that
+``gie_proxy`` is the value you use for the ``dynamic_proxy_prefix`` option. If
+you use a different value in that option you should change the examples
+accordingly.
+
+**Apache**
+
+.. code-block:: apache
+
+    # Project Jupyter specific. Other IEs may require their own routes.
+    ProxyPass        /galaxy/gie_proxy/jupyter/ipython/api/kernels ws://localhost:8800/galaxy/gie_proxy/jupyter/ipython/api/kernels
+
+    # Global GIE configuration
+    ProxyPass        /galaxy/gie_proxy http://localhost:8800/galaxy/gie_proxy
+    ProxyPassReverse /galaxy/gie_proxy http://localhost:8800/galaxy/gie_proxy
+
+    # Normal Galaxy configuration
+    ProxyPass        /galaxy http://localhost:8000/galaxy
+    ProxyPassReverse /galaxy http://localhost:8000/galaxy
+
+Please note you will need to be using apache2.4 with ``mod_proxy_wstunnel``.
+
+**Nginx**
+
+.. code-block:: nginx
+
+    # Global GIE configuration
+    location /galaxy/gie_proxy {
+        proxy_pass http://localhost:8800/galaxy/gie_proxy;
+        proxy_redirect off;
+    }
+
+    # Project Jupyter / IPython specific. Other IEs may require their own routes.
+    location ~ ^/galaxy/gie_proxy/jupyter/(?<nbtype>[^/]+)/api/kernels(?<rest>.*?)$ {
+        proxy_pass http://localhost:8800/galaxy/gie_proxy/jupyter/$nbtype/api/kernels$rest;
+        proxy_redirect off;
+        proxy_http_version 1.1;
+        proxy_set_header Upgrade $http_upgrade;
+        proxy_set_header Connection "upgrade";
+    }
+
+If you proxy static content, you may find the following rule useful for
+proxying to GIE and other visualization plugin static content.
+
+.. code-block:: nginx
+
+    location ~ ^/plugins/(?<plug_type>.+?)/(?<vis_name>.+?)/static/(?<static_file>.*?)$ {
+        alias /path/to/galaxy-dist/config/plugins/$plug_type/$vis_name/static/$static_file;
+    }
+
+Docker on Another Host
+^^^^^^^^^^^^^^^^^^^^^^
+
+You might want to run your IEs on a host different to the one that hosts your
+Galaxy webserver, since IEs on the same host as the webserver compete for
+resources with that webserver and introduce some security considerations which
+could be mitigated by moving containers to a separate host. This feature has
+been available since 15.07 and is used in production at the University of Freiburg.
+
+First you need to configure a second host to be Docker enabled. In the
+following we call this host ``gx-docker`` You need to start the Docker daemon
+and bind it to a TCP port, not to a Unix socket as is the default. For example
+you can start the daemon with
+
+.. code-block:: console
+
+    $ docker -H 0.0.0.0:4243 -d
+
+On your client, the Galaxy webserver, you can now install a Docker client. This
+can also be done on older Systems like Scientific-Linux, CentOS 6, which does
+not have Docker support by default. The client just talks to the Docker daemon
+on host ``gx-docker``, and does not run anything itself, locally. You can test
+your configuration for example by starting busybox from your client on the
+Docker host with
+
+.. code-block:: console
+
+    $ docker -H tcp://gx-docker:4243 run -it busybox sh
+
+So far so good! Now we need to configure Galaxy to use our new Docker host
+to start the Interactive Environments. For that we need to edit the IPython GIE
+configuration, ``ipython.ini`` to use our custom docker host
+
+.. code-block:: ini
+
+    [main]
+
+    [docker]
+    command = docker -H tcp://gx-docker:4243 {docker_args}
+    image = bgruening/docker-ipython-notebook:dev
+    docker_hostname = gx-docker
+
+Please adapt your ``command`` and the ``image`` as needed.
+
+As next step we need to configure a share mount point between the Docker host
+and Galaxy. Unfortunately, this can not be a NFS mount. Docker does not like
+NFS yet. You could for example use a sshfs mount with the following script
+
+.. code-block:: bash
+
+    if mount | grep ^gx-docker:/var/tmp/gx-docker; then
+        echo "/var/tmp/gx-docker already mounted."
+    else
+        sshfs gx-docker:/var/tmp/gx-docker /var/tmp/gx-docker
+        echo 'Mounting ...'
+    fi
+
+This will let Galaxy and the Docker host share temporary files.
diff --git a/doc/source/admin/interactive_environments.svg b/doc/source/admin/interactive_environments.svg
new file mode 100644
index 0000000..564913e
--- /dev/null
+++ b/doc/source/admin/interactive_environments.svg
@@ -0,0 +1,2536 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:xlink="http://www.w3.org/1999/xlink"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="242.76199"
+   height="360.90646"
+   id="svg2"
+   version="1.1"
+   inkscape:version="0.48.5 r10040"
+   sodipodi:docname="diagram.svg">
+  <title
+     id="title5754">Galaxy IPython Graphic</title>
+  <defs
+     id="defs4">
+    <marker
+       inkscape:stockid="Arrow1Mstart"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mstart"
+       style="overflow:visible">
+      <path
+         id="path4885"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(0.4,0,0,0.4,4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Lstart"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Lstart"
+       style="overflow:visible">
+      <path
+         id="path4879"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(0.8,0,0,0.8,10,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="SquareL"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="SquareL"
+       style="overflow:visible">
+      <path
+         id="path4494"
+         d="M -5,-5 -5,5 5,5 5,-5 -5,-5 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="scale(0.8,0.8)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="EmptyDiamondL"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="EmptyDiamondL"
+       style="overflow:visible">
+      <path
+         id="path4530"
+         d="M 0,-7.0710768 -7.0710894,0 0,7.0710589 7.0710462,0 0,-7.0710768 z"
+         style="fill:#ffffff;fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="scale(0.8,0.8)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Tail"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Tail"
+       style="overflow:visible">
+      <g
+         id="g4460"
+         transform="scale(-1.2,-1.2)">
+        <path
+           id="path4462"
+           d="M -3.8048674,-3.9585227 0.54352094,0"
+           style="fill:none;stroke:#000000;stroke-width:0.80000001;stroke-linecap:round"
+           inkscape:connector-curvature="0" />
+        <path
+           id="path4464"
+           d="M -1.2866832,-3.9585227 3.0617053,0"
+           style="fill:none;stroke:#000000;stroke-width:0.80000001;stroke-linecap:round"
+           inkscape:connector-curvature="0" />
+        <path
+           id="path4466"
+           d="M 1.3053582,-3.9585227 5.6537466,0"
+           style="fill:none;stroke:#000000;stroke-width:0.80000001;stroke-linecap:round"
+           inkscape:connector-curvature="0" />
+        <path
+           id="path4468"
+           d="M -3.8048674,4.1775838 0.54352094,0.21974226"
+           style="fill:none;stroke:#000000;stroke-width:0.80000001;stroke-linecap:round"
+           inkscape:connector-curvature="0" />
+        <path
+           id="path4470"
+           d="M -1.2866832,4.1775838 3.0617053,0.21974226"
+           style="fill:none;stroke:#000000;stroke-width:0.80000001;stroke-linecap:round"
+           inkscape:connector-curvature="0" />
+        <path
+           id="path4472"
+           d="M 1.3053582,4.1775838 5.6537466,0.21974226"
+           style="fill:none;stroke:#000000;stroke-width:0.80000001;stroke-linecap:round"
+           inkscape:connector-curvature="0" />
+      </g>
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend"
+       style="overflow:visible">
+      <path
+         id="path4433"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Lend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Lend"
+       style="overflow:visible">
+      <path
+         id="path4427"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.8,0,0,-0.8,-10,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075">
+      <path
+         id="path4077"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-7">
+      <path
+         id="path4077-1"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-7"
+       style="overflow:visible">
+      <path
+         inkscape:connector-curvature="0"
+         id="path4433-5"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-8"
+       style="overflow:visible">
+      <path
+         inkscape:connector-curvature="0"
+         id="path4433-7"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-2"
+       style="overflow:visible">
+      <path
+         inkscape:connector-curvature="0"
+         id="path4433-58"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-88"
+       style="overflow:visible">
+      <path
+         inkscape:connector-curvature="0"
+         id="path4433-1"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-5"
+       style="overflow:visible">
+      <path
+         inkscape:connector-curvature="0"
+         id="path4433-9"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-3"
+       style="overflow:visible">
+      <path
+         inkscape:connector-curvature="0"
+         id="path4433-19"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)" />
+    </marker>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath3769">
+      <path
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         d="m -91,326.36219 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.67781,0.67781 -1.27443,1.421 -1.8125,2.21875 -0.53807,0.79775 -0.99648,1.63336 -1.375,2.53125 -0.37852,0.89789 -0.67584,1.86553 -0.875,2.84375 -0.19916,0.97822 -0.3125,1.9925 -0.3125,3.03125 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.49648,1.9 [...]
+         id="path3771"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath3839">
+      <path
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         d="m -104.42,345.58218 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.49648,1.91461 0.875,2.8125 0.37852,0.89789 0.83693,1.7335 1.375,2.53125 0.53807,0.79775 1.13469,1.54094 1.8125,2.21875 0.677812,0.67781 1.421002,1.30568 2.218752,1.84375 0.79775,0.53807 1.66461,0.99648 2.5625,1.375 0.89789,0.37852 1.83428,0.67584 2.8125,0.875 0.97822,0.19916 1.9925,0.3125 3.03125,0.3125 l 100,0 c 1.03875,0 2.05303,-0.11334 3.03125,-0.3125 0.97822,-0.19916 1.91461,-0.49648 2. [...]
+         id="path3841"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="csssssssssssssssssscc" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-5">
+      <path
+         id="path4077-8"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath3250">
+      <path
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         d="m -105.42,344.58218 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.49648,1.91461 0.875,2.8125 0.37852,0.89789 0.83693,1.7335 1.375,2.53125 0.53807,0.79775 1.13469,1.54094 1.8125,2.21875 0.67781,0.67781 1.421002,1.30568 2.218752,1.84375 0.79775,0.53807 1.66461,0.99648 2.5625,1.375 0.89789,0.37852 1.83428,0.67584 2.8125,0.875 0.97822,0.19916 1.9925,0.3125 3.03125,0.3125 61,0 52,0 59,0 l 0,-55 c 0,0 -67.5,0 -74.000002,0 z"
+         id="path3252"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="csssssssscccc" />
+    </clipPath>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-9"
+       style="overflow:visible">
+      <path
+         id="path4433-4"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath3326">
+      <path
+         inkscape:connector-curvature="0"
+         style="fill:none;stroke:#000000;stroke-width:3.08463287;stroke-miterlimit:4;stroke-dasharray:none"
+         d="m 190.07552,477.03785 c -1.41457,0 -2.79583,0.14336 -4.12799,0.49125 -1.33217,0.3479 -2.60734,0.86724 -3.8301,1.52841 -1.22277,0.66119 -2.40327,1.46191 -3.48966,2.40177 -1.08638,0.93989 -2.09847,2.0366 -3.02152,3.22056 -0.92306,1.18396 -1.73555,2.48212 -2.46829,3.87559 -0.73275,1.39346 -1.35703,2.85307 -1.87249,4.42144 -0.51547,1.56839 -0.92037,3.25861 -1.19159,4.9673 -0.27122,1.7087 -0.42557,3.48039 -0.42557,5.29482 l 0,69.8697 c 0,1.81443 0.15435,3.58611 0.42557,5.29482 0.2 [...]
+         id="path3328" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-6">
+      <path
+         id="path4077-5"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-6-4">
+      <path
+         id="path4077-5-7"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-6-3">
+      <path
+         id="path4077-5-4"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-0"
+       style="overflow:visible">
+      <path
+         id="path4433-59"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-04"
+       style="overflow:visible">
+      <path
+         id="path4433-42"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-2">
+      <path
+         id="path4077-82"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <clipPath
+       clipPathUnits="userSpaceOnUse"
+       id="clipPath4075-2-1">
+      <path
+         id="path4077-82-0"
+         d="m 110,287.375 c -1.03875,0 -2.05303,0.0821 -3.03125,0.28125 -0.97822,0.19916 -1.91461,0.49648 -2.8125,0.875 -0.89789,0.37852 -1.76475,0.83693 -2.5625,1.375 -0.79775,0.53807 -1.54094,1.16594 -2.21875,1.84375 -0.677813,0.67781 -1.274434,1.421 -1.8125,2.21875 -0.538066,0.79775 -0.996484,1.63336 -1.375,2.53125 -0.378516,0.89789 -0.67584,1.86553 -0.875,2.84375 C 95.11334,300.32197 95,301.33625 95,302.375 l 0,40 c 0,1.03875 0.11334,2.05303 0.3125,3.03125 0.19916,0.97822 0.496484,1. [...]
+         style="fill:none;stroke:#000000;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none"
+         inkscape:connector-curvature="0" />
+    </clipPath>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-6"
+       style="overflow:visible">
+      <path
+         id="path4433-6"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-25"
+       style="overflow:visible">
+      <path
+         id="path4433-43"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend-4"
+       style="overflow:visible">
+      <path
+         id="path4433-75"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="2.6924229"
+     inkscape:cx="161.42482"
+     inkscape:cy="253.42584"
+     inkscape:document-units="px"
+     inkscape:current-layer="layer1"
+     showgrid="false"
+     inkscape:snap-global="true"
+     inkscape:window-width="1366"
+     inkscape:window-height="702"
+     inkscape:window-x="0"
+     inkscape:window-y="27"
+     inkscape:window-maximized="1"
+     inkscape:snap-bbox="true"
+     fit-margin-top="5"
+     fit-margin-left="5"
+     fit-margin-right="5"
+     fit-margin-bottom="5"
+     showguides="true"
+     inkscape:guide-bbox="true"
+     inkscape:snap-grids="false"
+     inkscape:snap-to-guides="true">
+    <inkscape:grid
+       type="xygrid"
+       id="grid2985"
+       empspacing="5"
+       visible="true"
+       enabled="true"
+       snapvisiblegridlinesonly="true"
+       originx="-88.499996px"
+       originy="-415.22101px" />
+    <inkscape:grid
+       type="axonomgrid"
+       id="grid2987"
+       units="mm"
+       empspacing="5"
+       visible="true"
+       enabled="false"
+       snapvisiblegridlinesonly="true"
+       originx="-24.976666mm"
+       originy="-117.18458mm" />
+  </sodipodi:namedview>
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title>Galaxy IPython Graphic</dc:title>
+        <dc:creator>
+          <cc:Agent>
+            <dc:title>Björn Grüning, Torsten Houwaart, Eric Rasche</dc:title>
+          </cc:Agent>
+        </dc:creator>
+        <dc:language>en_US</dc:language>
+        <cc:license
+           rdf:resource="http://creativecommons.org/licenses/by-sa/3.0/" />
+      </cc:Work>
+      <cc:License
+         rdf:about="http://creativecommons.org/licenses/by-sa/3.0/">
+        <cc:permits
+           rdf:resource="http://creativecommons.org/ns#Reproduction" />
+        <cc:permits
+           rdf:resource="http://creativecommons.org/ns#Distribution" />
+        <cc:requires
+           rdf:resource="http://creativecommons.org/ns#Notice" />
+        <cc:requires
+           rdf:resource="http://creativecommons.org/ns#Attribution" />
+        <cc:permits
+           rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
+        <cc:requires
+           rdf:resource="http://creativecommons.org/ns#ShareAlike" />
+      </cc:License>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Layer 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-88.499996,-276.23475)">
+    <text
+       xml:space="preserve"
+       style="font-size:10px;font-style:normal;font-weight:normal;text-align:center;line-height:125%;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans"
+       x="142.18896"
+       y="291.41959"
+       id="text3803"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan3805"
+         x="142.18896"
+         y="291.41959">Launch IPython</tspan><tspan
+         sodipodi:role="line"
+         x="142.18896"
+         y="303.91959"
+         id="tspan3686">notebook</tspan></text>
+    <rect
+       style="fill:#ffffff;stroke:#000000;stroke-width:0.93200964;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068"
+       width="96.567993"
+       height="62.924458"
+       x="93.966003"
+       y="340.32208"
+       ry="0" />
+    <rect
+       style="fill:#c8b7b7;stroke:#000000;stroke-width:0.93270081;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068-4"
+       width="96.567299"
+       height="15.067299"
+       x="93.966347"
+       y="325.31851"
+       ry="0" />
+    <text
+       xml:space="preserve"
+       style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       x="105.97406"
+       y="335.23715"
+       id="text5088"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan5090"
+         x="105.97406"
+         y="335.23715">IE Entry Point</tspan></text>
+    <rect
+       ry="0"
+       y="343.80939"
+       x="97.25"
+       height="20"
+       width="90"
+       id="rect5110"
+       style="fill:#8080ff;stroke:none;fill-opacity:1" />
+    <flowRoot
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       id="flowRoot5120"
+       transform="translate(-10.876465,-35.715541)"><flowRegion
+         id="flowRegion5122"><use
+           x="0"
+           y="0"
+           xlink:href="#rect5110"
+           id="use5124"
+           width="744.09448"
+           height="1052.3622"
+           transform="translate(14.072074,38.206398)" /></flowRegion><flowPara
+         id="flowPara5128"
+         style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans">Configure Docker Container</flowPara><flowPara
+         style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+         id="flowPara3698">Run Docker Container </flowPara></flowRoot>    <rect
+       ry="0"
+       y="376.39136"
+       x="97.25"
+       height="20"
+       width="90"
+       id="rect5110-0"
+       style="fill:#8080ff;stroke:none;fill-opacity:1" />
+    <flowRoot
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       id="flowRoot5120-4"
+       transform="translate(-2.839332,82.785557)"><flowRegion
+         id="flowRegion5122-2"><use
+           x="0"
+           y="0"
+           xlink:href="#rect5110"
+           id="use5124-3"
+           width="744.09448"
+           height="1052.3622"
+           transform="translate(5.9346795,-48.117229)" /></flowRegion><flowPara
+         id="flowPara5128-4"
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans">authenticate with <flowSpan
+   style="font-size:6px;font-family:Courier;-inkscape-font-specification:Courier"
+   id="flowSpan5549">Password</flowSpan></flowPara><flowPara
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+         id="flowPara6258">load default notebook</flowPara><flowPara
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+         id="flowPara6260" /><flowPara
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+         id="flowPara5543" /><flowPara
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+         id="flowPara6262" /></flowRoot>    <path
+       style="fill:none;stroke:#000000;stroke-width:1.10000002;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25;marker-end:url(#Arrow1Mend)"
+       d="m 142.25103,307.14835 c 0.0812,12.81108 -0.002,0.66115 0,13.47438"
+       id="path4418"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <text
+       xml:space="preserve"
+       style="font-size:4px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans"
+       x="147.73999"
+       y="316.12985"
+       id="text4093-1"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan4095-9"
+         x="147.73999"
+         y="316.12985"
+         style="font-size:6px">call</tspan></text>
+    <g
+       id="g5706"
+       transform="translate(-36.596011,-42.688734)">
+      <path
+         sodipodi:nodetypes="cccc"
+         inkscape:connector-curvature="0"
+         id="path4418-2-9-5"
+         d="m 204.92518,401.36218 28.70083,-3e-4 2.8e-4,-33.349 10.07895,0"
+         style="fill:none;stroke:#666666;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:2, 1;stroke-dashoffset:0;marker-end:url(#Arrow1Mend)" />
+      <path
+         sodipodi:nodetypes="ccc"
+         inkscape:connector-curvature="0"
+         id="path4418-2-9-5-8"
+         d="m 233.62629,401.36188 -2.8e-4,3.807 10.07923,0"
+         style="fill:none;stroke:#666666;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:2, 1;stroke-dashoffset:0;marker-end:url(#Arrow1Mend)" />
+    </g>
+    <g
+       id="g3481" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25;marker-end:url(#Arrow1Mend)"
+       d="m 170.12147,392.3091 50.60653,0 0,22.01393"
+       id="path4418-2-7"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="ccc" />
+    <flowRoot
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       id="flowRoot5120-2"
+       transform="translate(-12.095552,-14.384445)"><flowRegion
+         id="flowRegion5122-4"><use
+           x="0"
+           y="0"
+           xlink:href="#rect5110"
+           id="use5124-4"
+           width="744.09448"
+           height="1052.3622"
+           transform="translate(14.324028,23.298349)" /></flowRegion><flowPara
+         style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+         id="flowPara5697" /></flowRoot>    <path
+       style="fill:none;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25;marker-end:url(#Arrow1Mend)"
+       d="m 185.67613,379.54889 21.4331,0"
+       id="path4418-2"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <rect
+       style="fill:#ffffff;stroke:#000000;stroke-width:0.62447745;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068-6-8"
+       width="109.07153"
+       height="19.028866"
+       x="213.2879"
+       y="382.33243"
+       ry="0" />
+    <rect
+       style="fill:#c8b7b7;stroke:#000000;stroke-width:0.625;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068-4-0-1"
+       width="109.15531"
+       height="10.153492"
+       x="213.24542"
+       y="358.26517"
+       ry="0" />
+    <text
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       x="214.56726"
+       y="364.25571"
+       id="text5088-0-4"
+       sodipodi:linespacing="125%"
+       transform="scale(0.99892576,1.0010754)"><tspan
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+         sodipodi:role="line"
+         id="tspan5090-9-4"
+         x="214.56726"
+         y="364.25571">ipython_galaxy_notebook.ipynb</tspan></text>
+    <rect
+       style="fill:#ffffff;stroke:#000000;stroke-width:0.625;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068-6"
+       width="109.25714"
+       height="40.198853"
+       x="213.19452"
+       y="314.77185"
+       ry="0" />
+    <rect
+       style="fill:#c8b7b7;stroke:#000000;stroke-width:0.625;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068-4-0"
+       width="109.15437"
+       height="10.154368"
+       x="213.24591"
+       y="314.82321"
+       ry="0" />
+    <text
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       x="216.90891"
+       y="321.15759"
+       id="text5088-0"
+       sodipodi:linespacing="125%"><tspan
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+         sodipodi:role="line"
+         id="tspan5090-9"
+         x="216.90891"
+         y="321.15759">config.yaml</tspan></text>
+    <text
+       transform="scale(0.98530662,1.0149125)"
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       x="221.76077"
+       y="328.12482"
+       id="text5201"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan5203"
+         x="221.76077"
+         y="328.12482">- History_ID</tspan><tspan
+         sodipodi:role="line"
+         x="221.76077"
+         y="335.62482"
+         id="tspan5205">- API_Key</tspan><tspan
+         sodipodi:role="line"
+         x="221.76077"
+         y="343.12482"
+         id="tspan5207">- Password</tspan></text>
+    <rect
+       ry="0"
+       y="281.771"
+       x="209.92075"
+       height="121.47555"
+       width="115.80465"
+       id="rect5068-6-3"
+       style="fill:none;stroke:#000000;stroke-width:1.01499999;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+    <rect
+       style="fill:#8080ff;stroke:#000000;stroke-width:1.01499999;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+       id="rect5248"
+       width="115.86283"
+       height="30.362831"
+       x="209.89166"
+       y="281.74225"
+       ry="0" />
+    <text
+       sodipodi:linespacing="125%"
+       id="text5300"
+       y="306.04703"
+       x="211.61868"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       xml:space="preserve"><tspan
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+         y="306.04703"
+         x="211.61868"
+         id="tspan5302"
+         sodipodi:role="line">IPython + scipy stack installed</tspan></text>
+    <rect
+       ry="0"
+       y="281.74686"
+       x="209.89627"
+       height="15.353636"
+       width="115.85364"
+       id="rect5322"
+       style="fill:#000080;stroke:#000000;stroke-width:1.01499999;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+    <text
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ffffff;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       x="223.7728"
+       y="293.1517"
+       id="text4321-8"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan4323-9"
+         x="223.7728"
+         y="293.1517"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans">Docker Container</tspan></text>
+    <rect
+       style="fill:#c8b7b7;fill-opacity:1;stroke:#000000;stroke-width:0.625;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0.25"
+       id="rect5068-4-0-1-3"
+       width="109.07056"
+       height="10.223566"
+       x="213.28781"
+       y="372.23035"
+       ry="0" />
+    <text
+       sodipodi:linespacing="125%"
+       id="text5201-0"
+       y="378.99741"
+       x="215.05746"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       xml:space="preserve"><tspan
+         id="tspan5207-0"
+         y="378.99741"
+         x="215.05746"
+         sodipodi:role="line"
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;fill:#000000;stroke:none;font-family:Sans;-inkscape-font-specification:Sans">IPython Webservice </tspan></text>
+    <text
+       xml:space="preserve"
+       style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+       x="237.43539"
+       y="393.45691"
+       id="text4093-1-7-5-6"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         x="237.43539"
+         y="393.45691"
+         style="font-size:6px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:start;line-height:125%;writing-mode:lr-tb;text-anchor:start;font-family:Courier 10 Pitch;-inkscape-font-specification:Courier 10 Pitch"
+         id="tspan5748"><tspan
+           style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:Sans;-inkscape-font-specification:Sans"
+           id="tspan5820">While connection active</tspan></tspan></text>
+    <g
+       id="g5814"
+       transform="translate(2.0196691,0)">
+      <path
+         sodipodi:nodetypes="csssc"
+         inkscape:connector-curvature="0"
+         id="path4656"
+         d="m 225.26446,392.85184 c -0.72915,1.49657 -1.07585,3.25496 -3.6562,4.017 -2.71038,0.80042 -5.55338,-0.95445 -6.28483,-3.68426 -0.73145,-2.72982 0.88854,-5.53572 3.61836,-6.26717 2.72981,-0.73146 5.53572,0.88853 6.26717,3.61835"
+         style="fill:none;stroke:#646464;stroke-width:0.99813855;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0;marker-start:none;marker-mid:none" />
+      <path
+         inkscape:connector-curvature="0"
+         id="path5575"
+         d="m 222.41814,394.35001 3.38134,-2.46214 0.75505,4.03791 z"
+         style="fill:#646464;fill-opacity:1;stroke:#646464;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+    </g>
+    <g
+       id="g5835">
+      <rect
+         ry="0"
+         rx="0"
+         y="416.37634"
+         x="94.02726"
+         height="14.945478"
+         width="231.70747"
+         id="rect5699"
+         style="fill:#373748;fill-opacity:1;stroke:#000000;stroke-width:1.05452132;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="125%"
+         id="text5088-6"
+         y="426.60788"
+         x="192.43471"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans;-inkscape-font-specification:Sans"
+         xml:space="preserve"><tspan
+           style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;fill:#ffffff;font-family:Sans;-inkscape-font-specification:Sans"
+           y="426.60788"
+           x="192.43471"
+           id="tspan5090-3"
+           sodipodi:role="line">Galaxy</tspan></text>
+      <image
+         width="213.14897"
+         height="199.60252"
+         xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAwMAAALSCAYAAACBNw0dAAAABHNCSVQICAgIfAhkiAAAIABJREFU
+eJzsnWd4VEXbgO+zPRVCAinU0DtSpFel2AsIoiIICKhIUT/rK/aKivIKCAoovkpRBBFRQaR3kN5r
+CCEhPaRuO2e+H7ub3igJAea+rjW4e2aecmb3zDPzzIwSElZfIJFIJBKJRCKRSG46FCGEDAYkEolE
+IpFIJJKbEN21VkAikUgkEolEIpFcG2QwIJFIJBKJRCKR3KTIYEAikUgkEolEIrlJkcGARCKRSCQS
+iURykyKDAYlEIpFIJBKJ5CZFBgMSiUQikUgkEslNigwGJBKJRCKRSCSSmxQZDEgkEolEIpFIJDcp
+MhiQSCQSiUQikUhuUmQwIJFIJBKJRCKR3KTIYEAikUgkEolEIrlJkcGARCKRSCQSiURyk1LhggFF
+UVAUBZ1Oh9FoxNvbG29vb0wmEzqdLvvzkydPAiCEQNM0AEaMGIHRaMTf3x8fHx+8vb3x9fUlJCSE
+du3aMWrUKDZu3FisfE2IvC9NXJL++cuLSyt+U6O5nbXjyAV6PbeUwW/9TpbNeY21KhnV3Ua+/GUP
+3Z9fzmeLdgFcctuRSCQSiUQiKW8M11qB/PTp04fU1FTS0tJISEggLi4OgMDAQKpVq4aPjw+BgYH4
++voCruDBEwzY7XacTieZmZmoqppdZ0ZGBrGxsfz77798//33TJgwgY8//ji7fG50+f6ffP9bEgXK
+XwGaJtDpFJZuOMk/e2PQKxrj+7eiXvXKCAG5RVntTt76ditpmQ7+M7QDYYE+CCEK2FeREQJQXJ3r
+TAekZtivtUqXRJbNgapYyLK52p7AbZBEIpFIJBJJBaXCBQOrVq0CwOFwsHPnTnr06IGqqsyaNYv7
+7rsPg8FQoIPr+X+9Xo+iKLRr147p06ejaRpCCBITE9mxYwdff/010dHRfPLJJ4SFhTFx4kQ0TUOn
+c02QxCRmMHPZPhxODaeqgU5P9UAvJjzUplidPZ32Q2cS+W7lURSholMUTGYTzWpX4uHbGl9Wx1wT
+Ah0Kx84lsftMBkJz8v3Kw7w9onN2fZ6/qiZYv+88Qu9FepYd8CkQMFwuHvv+2h7B/NVHmPlCb7wt
+xiuvOB95dBUumdcTiqKA0LLtUGQgIJFIJBKJpIJT4YIBIQRCiOx0H0+H19fXF6PRmD0LkLtjLdzp
+JZ5rK1euTNu2bfPUe+eddzJkyBD69OlDZGQkU6dOZfTo0Xh7e6OqGnq9jpR0G3/tjsWg2ahZzQ+n
+0CE0tYCMwnWGDJuDMxfS0GlOVE0jJsNISmqGOxhwdXaFEKiaQKcoJXZ2PZ9aTHoUzYZqy2Tt3ixG
+x6dTvapvgRQkPy8jqZnW7NmJwgIBIUATGqCgKC4ZJQUpwj1kv3ZPJIfPJuFwamiayBPglGSLEK5x
+co8fFLf8/Lp5jM6fYKNpIvs9nVKwbNFyL93eq1EWCp8Z8KQOXW+BjkQikUgkkhuTChcMeDr0AE6n
+M/vfnrQfIUT2SH5+cl/rmRXwdN6cTif16tVj3LhxvPDCC0RFRXHkyBHatm2LEBqgw6hX0CuCgb0a
+MnFg3tmA4jqBer1Ln/aNQ1j81h0AZNmc9Jr4C96WMHf5nHoM+lJ2RrPtAqeqElrFm5hUwU9rjvLc
+w+3cvsipS3MHGuQq55HrCRwUBfRKXv+pmoa+EJ96ghyPfYfOJFLZ14zFZHDLLZ0dmnAFPwp5i3je
+L8xyxR0ZePTO33n2zFYURXH2Fi33ysuWhAwCJBKJRCKRVCQq3ALiq4VOp8vzMhgMCCG45ZZbXKPz
+qkpiYiKQ0/nD3Zl2qlr2CL6H2NhYYmNjC5VltVqJjIzEZrNlByQOp4ZTy1mA7Pkbk5jBtKUHOHA6
+wS2y5EWmiqKgM5i5s2M4lc0qy7edJSnVik6nlKq8KyhydW4jY1PZdOA8q3aeZfeJONIy7eh1ukLr
+UdyzF5lWBxv3RRF/0Y4CnIxO4VxcGpGxqZyLTSPiQiqZVkeR8nWKQnxKFtuPXGDljgi2Hb5AfEpm
+8Z1qBRSdgsWkB+D4uWRW/xvJ+r1RnItLLTEQyG3vxv1ue4+77NUpRfstb9k0Nh2Izi6bnuUotmxp
+iLhwkePnki+7/BVzcRm9DD70/zvtKlSWwf457zD5t0gcAGn/MMjPyO2/XbwKdV8DUv/kbouZO1Zc
+p/rnId+9KQHr3pepY2zF1DNls2DfcfJTmhsb8s7h62sdkEQikdwMVLiZgbIi/8i+EAK9Xp/3s1zX
+uPK/RfaagtmzZzN16lT27dtHaGgoQPZnL7/8MgsXLuTEiROYzeacOnLLc//98e8jLN9v48CpWGb9
+X+9SpZ3odAp6ozc1qvpyT+e6LNqayO9bTzO0X1M0AUVNNHjSkhRFISo+jWlLD7B2dyR6ix8oOjTV
+QVUfeLxvIwb1apR9rWfUfeWOCL5beZyY+BSynHpUzUlKupORn6wHT6dYaBgsfrwxuBH92tdB1QT6
+XB31LJuTmcv2s2zLGWzCgqLTITQNi87KPR1qM7Z/6+wOv5JLb0XRU8nHTGxyJh/8uIudJ1LQGUwI
+VcUgrNzZvibPP9wOs1FXIGVMURQiY1OZ/usB1u05h97iD4pSpL35y56Lc/lq3d5I9OacstV84fE+
+jRnYq2Gp1oAo2fW6bHrnuy38+W8CwX6CXz+4v8T7XiaY6jJg/Fi8a5muvC6RwYE57/JJ09sZe18t
+rv4qEsllI++NRCKRSErJDRsMeNYeeP7tdDoxmUwcOnQIRVGwWCzUqFGjxHo8Hb7Bgwfz+uuvM3/+
+fF544QWcTicGgwGbzca8efPo168f/v7+qKqaHWQURp0Qfwx7zlA3NCBbt5IDAgFopGbYeahnA+av
+jeCnNcd4qGdDvM2GPHrmRhOg1ymcjU3lmc/Xkmw1EVTJTJfmQfh5mzh5PoUth+L4fOkJohPSmDiw
+XZ5Rb52iUCe0Es1rVyIhNYsth2Ix6aFn62AsRj2acKe9KAZqh/hnl/HYZXOo/N+Mdew9p2ISKn1b
+BRBUyYvohHTW773IL9vjiYxbz2dje6LXKXnSooTQyLI6ePaLNUQlZNKnTXVCAn05HZPChn0X+f3f
+BDKsW3l/VNcCi6nPxqby9JS1pNg89gbi52PiVFQKmw/H8vnSE1xITGf8Q20LLfvMlLUk24wE+Zno
+3CIQfx8TJ6NS2HI4jilLjxGdkMoEt688W90Wdg9yr5H48H/b+GvfRar5K3w5sVcJ97sM8WrBs1Mm
+Xzv5EolEIpFIKhQ3VJpQzmJWXfZZBZ6/JpOJxMREZs6cCUCrVq1o2LBh9vXF1SmEoF69etx22218
+9dVXedYtrFmzhosXL/L0008DRaf9eDrJD/VsyNznOvH8w23z6FyCZQihkZJuJTTQl46NKpFkt7Bq
+R0T2FZ6F1XlLuTqjH/6wnWSrkTZ1TPww6S5ee7wD4wa0Zur4Xkwd1w2L3sbC9VGs23Mujz59bq3N
+h0924D9DO/Dk3S1Q9Cb8vE28MawT/xnakUnDOvKfxzvwnyFtaVyritseV0qUoijM++sQeyKd1AuC
++W/czdsjOjNuQGs+HNONb17qi7/Rxo5TGSxZfyLPWhFNE2hOO+cSsrDaNX5+607eG9WVZ/vfwpSx
+PXl3ZGcUNZPVe2LZdOB89myGp+yH/9tOis1I2zpmfph0p8ve/q35YnwvvhjbDbPOxo/rXClHrrJk
+l33/+20k2Yy0rWPhh0l38R932anje/H5M12w6OzMX3eO9XvPZcsV7gpcMyW57r87EPhs4S6W704m
+1E9j6rju1KzqV4p7XkZc/J2+Zl/6/53mSg3x6s7sZR/xcIfa+CpGAhvdzsSfTpIlsvj3hVoo9f7D
+fluOQSl/PUxl3S18fvQwk5sHM2Srk4Q5XfENHcv2TNdVavI2PnvoFkItCopPbW5/aTnR7uwTNWkH
+057oTN0APYoxkPpdRzLz3xQ0cKUZVQlm2NI/+XhwR+r4Kii+4fR+eTnnS5m94oz9h/cebEWoRcG7
+ZkeemLmKae29aPrxcRwIMk8u4bV7WhDmq7hmn+r25Jl5R8kqUFNx12ok/jmcEF1dnt+S5gpi1ViW
+PhyEuc0HHMgqwXenSpG4UwpfFOlLx4lC742wnmHpK3fRItQLRTES1PxeXl1yGmu2UI3U3V/y6K01
+8FYMBNTvwTPzDpEuSnHvSvF5buwRP/JImJ7gh77jlK2QCyQSiURSbtxQwYCnM2mz2UhKSiIhIYGY
+mBj279/P3Llzuf322zl06BBCCN58883sMwpKt5sOjB8/nlOnTrF9+/bsYGDu3LmEhYXRtWtXgCJn
+BXKLqB3ij9FQ9OxBIRqgCHCqLj0e69sUzWFlweojOJxa7svyoNMp7DkRx+6TKRjJ5JXH2hPga8ap
+aqiaa01E5+ZhDOpRD73RxP9WHc5eX+Cx2+G+NtPqyJ5tScuyo2oaqqa5D1crKDcl3cbidScQzizG
+9W9FaKAPdoeKw6lhs6s0Cw/ivk510OsN/Lb5FJomshcxu+QLFJ2eEXc1oVawf/Z2r6qmcWeHcLo0
+Dcbo5cvvW04DrrMJFMVt7+kUTCKTV4a0J8DXksfeLi2q83CvehiMXvyw6giqprlkKQq7j8ey98xF
+LGTy2uMdCPDLW7ZryxoM7FEXg8mSx1ce8xVFQSiu2RjPwuupi//ll23xhPiqTHmmK3VCKqGqhXWP
+rhHWjYwd8j+8h0zjz/VLeKdHPNMfvpWhS1NpMuQRap9ewMKj7t6aSGLdjBVkdBrHQw2a8tLBWH7o
+ZCBo5CbSY6bTwRvAyfqxz7Kjy0es2LmJhS/UY+cnjzJ+dQpYD/BBr46MWxXEiFmr2bxyOsMqr+Dp
+9r355IhbhhrHkmeeY9etb7J01z7+mtSQnZMf4dmVyQV2mMqPSNvKS5178/bJDry5ZCMrZzyKOnUA
+L+50d3fth/n03gF8GtWVV777h02rF/J2r1hmPvEo3+TPlS/2Wo3Afp/x/UgrUx9/i21pTuKWT2D0
+b/X54McXaOHlRbPifFenlEk7xfmiOF+erFXw3pjOM39QawYu8uPxaX+xbdOvvN8zli8GdGT0Hwmu
+DrvzIG88PhfvJ2aycsNS3uuVzDdPtGfQD1GoJd270txblyOwR/zIsM5DWdN5Dlt+fIJ65iLsl0gk
+Ekm5cEOlCXlGx7dt20adOnWyTyfOysrK7tB7eXkxdepU7rjjjuwRfs9IdlF4Ov533HEHQUFBTJ8+
+nY4dO5KcnMyKFSsYN24cBoOh2BSh3Dv7lLQTTkGUPCPObRoE07K2NwfOO9m4P4rb2tRy2aDLOzAN
+sPVQNEYvP1rUNFKjqh+aEBjcuwMJ4dKld7vafPvnEY6cVYiMTaN2iH92CoxOca0ByK2vTlGyFx3n
+95vHtoOnE0hzGKleCW5tHAKAyZjXNx2bhTFv9SnOxjqIT8kiuIq3y1pFh6LTo2h22jQMRggw6HUo
+7gPJhICuLauz8chhjpzNwuZQMRlcNm09GI3R4kfzWkaqB/miaTn2au4F3be1qc28lcc4GCGITsig
+ZjXXSP22wzEYLX40q2kkNNAHVdMKlO3drg7frTzG4QjBufh0alXzy5WOBgo6fCwmFAWmLdnDok2x
+hPgJvni2O7WC/dGEyN6dqaLQeNISZo1rhAno1rkV5qP1GfXBL3y6/jEeC/+E+QuO8EarWzDFrWbG
+3xq3fXcP1fUU3P/VTdDAWXw98TYqKdCm3rusntmd3RFppO6YwtT9YYzfuoDXO/oA0PnWGkQ37MYX
+X+5h3MeABqbb/8s3z/elsgLUeptHP+/C1tMXUQko5gdLkLT6Xb4604ZPTkznqXpGoCu31o5na6v3
+3Jd40XTUZ/xw7zMMamABoH2ts8z+bioRSQ4IzF1dCdeGV6HP5B8Y1bIvj42z0HzF7zT5ZC/jmrh6
+tpamJfiuNBTji1S/Enw5vW6eqmzHvuGtFUG8umceL7V02dOhUxu8D9Tiyc/X84E7c6zN+0v5amx9
+jEC3Tq0wHavH6MmLOBh2sFh5IwbPKvbzp55363HmR4aNGsnvLWexWwYCEolEUiGoWL2SKyT31qJp
+aWmkp6eTlZWFr68vbdu2Zfz48ezcuZNRo0Zd8iFgmqZhNpsZMWIEixcvRtM0/vnnH7KysnjiiScA
+V9BQmp1mLmt7yVy66nQKj/ZpCkLw46rDgGsUOmcnnJxrT0dfRMGV+w95gwVFcdVVtbI3Ab4mVJ2F
+yNhU13WFqlCy3p5yJ8+noCgKWXaVTxbsZPL8nNfHP+7gkwU7WfjPMVDtOJwayWl5EzUURYe3xUhQ
+Ja/s3X0AFFyj8dWDfFEdVi6m20lIyczWLTIuDUVRqBta2VMgj990OoWQAG/8vfRoipGIC6nZn5+J
+uQgohIdVIn9hT9ngKt74extQFTNnc5V1XePSOyzIh5U7zvDjuijCKsH0CT1cgYB2ZduSlg216Nun
+NtnLiQ3V6ftIczixkdOiEY8MqcvZhfM5YlWJ+XM66413MK5PtWJ+OHQ07N0c/+zV4GZ8TDrASfz+
+vST6dOSeZj45l/u14N62Zi7sOkSyBqDQoE+LnPI6M77m0vxM2YjYdBBrnX70qpEz8m6p14cuAe7/
+MdfloXFDaRL5O7M/fZ2xj/SlXZcXOVTYRE0prlUCevHxD0/hmPcBK5t+yndj6uf40Xw5vstPUb4o
+rS89CNIPr+WkdooPOlTB29vb9fKty4gNTuwxp0l2AoRzx+01cxYbG6rT75EWELGJnTuKk7eP43tL
+oY/zBJ/eP5KlSZB+4gRJpdnmCCuHv36TnyLLZpejQhFxLBgwjFWFbrjl4NSXA3h+a2YZCXdwat5/
+mHtc7rokkUjKjxtqZsAzKt+xY0d++OEHnE4nRqMRs9lMYGAgBoPL3NynDpcWT2dz1KhRTJ48mb/+
++oslS5bQtGlTmjRpkn1NScHA5ZxEnB9NE3RtUZ3aAXs5FGVj94k42jSo5uqJK8J9boKLTJsTTaj4
+erke8YVJNhn1+HobSXPqyLC5HrpCAy4lk8mNp/6L6TaEphKfksWSbXGFL3DWnKAzokGedCdXuo2C
+xajL3pkoZ7Gu6755WwwIoWF3algdanbZ9CwHmqbh520q0g5vixGzyYDOoZCakfPQzbQ5EZqKv7c5
+jy25MRr0+HubyNT0ZLl9peEJQgWqI4v1e8+x7VAMOpMPVptLx+sJRacDTUUTZhoOHkr99+Ywf/8I
+6s7Yit+Dq+hepbj2q8PiZy76BAqjF6Y8Xz0Fnd4lz/XV0WH2tVzGKIVAy9UOctfvmYjRUjbxer9+
+fBTZnAce7s89j73FMy/cwWMdPy9QSkvZxBslXquRHh1FKmA/c4hzmYK6lTyWX47v8lOCL0r0ZQ5C
+VUHXjTl7FtGvUt7PFKMfvlHvFipC0elQ9CbXjmUlySuFPuHP/MmqJ/dy/y2vMvKzx9n5Zku8ivGA
+Fv83Xx3vznujVQ6+/xDz71jAB81P88HD31Kv82HmHqxBbV0i0T6DeavHJj5ZnoGX7SI1x33Ni6Zp
+DH/3IL4BDi6G9afzgWWEf7eI/vbvGTDBwJR3HXzx4VpSdSrO0AG81GcPb0w5QUAwRBwVvOryHOm7
+p/D0+/+iaRfxvn8Kz2vJbP50LKP8UjlveYTPhx7htQ/ccmqN483gT3hll0evx/hsyHHemnwA7yA9
+Fw54M3FGB5Z/7pH7IONab2TK7x69v+Hte7rw3w/Wk/xpHwIq2tiBRCK5IbmhggFPh9PHx4fatWsX
++FxV1ewFxZdTtxCC+vXr061bNz7++GOOHj3KK6+8kl13cbsI5T50LNPqwNty+Zv9qZqG0aDnsb7N
++PDnY/y48jBtGlTDZNRBVk6HGUCnuEarPYtkCw1VhPs8BIWcw8dy6ZtzWWn213fvrKNT0BlM1As2
+82z/1ghNUFgMpLrrDA+tlCd9ShG41yLkL+Q6jEzTcmZBdHlmTVz2Zp8RUYhMTRPurX7IcwCcTlFQ
+dDqcHmcVeh6a6xwKhC7nY+H5SKCpdrYfs9OiXhBeJgO7z9p4aeZGvn6xN/7epqsSDF5dIlm1OpL3
+Wzd0jWqr0axeeADqDSHcC0z1BzGs0dtMe/8d/HZX45EvO+B/WXIMVG3Ziiopa1h2KJMe7V0pYaQf
+5PddWVS9qxkBemvxVRSLmVodG2KYtpqN0e/QKtz102aLWMvmRFc8mLbpfSbvaMbMqC086c7VsR9e
+TVYhsVpprnVG/sCop9fQ7pPZ1P78SYa+PpD9/+2OJx64er7LT2l8mft6Bd+Gnailfc/mGB+GNfZ3
+tV3HGeYOHcof3ecytxPAGf5ac463WrrShHCeZ9WCAyiNRtOxg1cx8lrRsHVJ+mwFQwMefeo2ajXt
+zpzXvqPtu0/y1eBNPN+o6G1u03b/SlLnjwv3m+qg8oCPmHVXEv8d9Cm79kWQETaIYfe1pE49Cwb7
+nTw3qRephxbzzpzztB+uMn1NNG1SllFlxFTOfHwvx3zvoHUllcRj3/Pmao2HV/zKI5WP8G7f910y
+RDJ/f7SBPtOWMrRKJGs2ZOJMNlJ/xBS+6RvHpwNmEFXl8VxythH7QC69Br7FzHcyuPfbX3i00i5e
+6DuVLR98wbGqbrkn/mBVZhQZYYOz9VYC2tIx5l32Z/Shh+9lNxKJRCIpNTdsmpBnvUDuLUb1ev1l
+BQL56584cSIbNmzAbrczePBgoPgdiYDszuma3ecY+skm5v5xwF3npevhkXV721oEWmxsOZLAoYhE
+Avws6HSGPJ3uQH8vFEUhMdWVhlNYNzTL5iQp1Ypqt1LJx5TnOqHlHrF3rzUoRjfh3jskyN8CuE5b
+7tQslM4twujUvOCra4vqdG1RHV9vU66tRQUCjQyrnUybwy1bySM7Jd2GojPgbTZkz3qU1t6UDCtp
+mTZUp53ASjnjkgF+FhQUElOy3LYXLJtpdZKcZsVpz6SSr2sGwXPnXedBeNGhURW+frEvHz/dndpV
+FM6lwLvztrkWrF/BgWVlxf7XHuCp6SvYtOkPZo69lzHrLDzw2sPUNgCmegx8ojEXflvAiVpP8OQt
+3nnKKopGeuRRTkcnYy/BNJ/2zzO+RRSfD3icyUs3sXPjEj4c8hDTL7Rm4sR2+BRfvAR0VLvrdYaH
+7OSFAROZ8/d2tq/8irGPTeUs7pO/A2pQmZMs/3kth6MiObR6Ok8N/pDj4iKnT8aSkWtiocRrrWf4
+duSzbGw3hTkThvPptyOwTRvKpE2pOd+PEnx3JZTGl7nvja75U7zUPYXZ997NK9+tYsfOf5g9/j6e
+WpxJm+61MOH6ruyf1J9nZvzB5k0rmPHMPTy13p9HJvWnUafi5VW7pHvrTeuX5zKxxk5ee+o7IopM
+F1JJi76IXxWL+3usoWqA8yKxaSrofKgWaEZRdBh0Oir3eJWPhjUkfd3HjP10O7tmTWLW5mSqtb+N
+FgEmQu8YjGHhbL5ZVY+RPQLQRACdnn6bj957gYdu60GzgMJ+wwWa6hpUQahkXMzAofcluIoZ0KHX
+WTmUR44+n15kD1yAgqII1HxyO9//n1x67yNL502QOZ54a8X7rZBIJDcmFTIY8HTgPZ1vz6h87vdK
+wrP/e+594K8UTye8X79+eHt707NnT0JDQ0u1I5GHbYeiibd5s2Hvebeel66Hgmt029tsYFDPBujM
+fvy89hheJteqztweal43EM3p5OjZRNf+/eQENa6TluFU9EWyVD3eBpW67nz57JmMPDMFInv7zNz3
+Ife/Ffdju2l4EE5bBqdj0jgXl4ZT1bA7VJyqa1cgp1PD4X55AqVccxAITcPudB0ApnlG48k5yfn4
+uWT0Ri+qVvYmqJJ39g49TesEojodHDqT4Kkpn72CiJhUslQD3kaN8NCcMceWdYNQVSdHIxNdZZT8
+vhKcPJ+MXTPga4L61Su7fZTrsDqdnupVfdHrFHwsRt4b2REfvZ3NRy4yfeke9DpdnpOtrzn6xrw6
+53HSvhlJ72738urqyoz5cQc/Dgx1Z1cZCX9wKE2ApmOG0jT3gk+lMrcOuZ+aW56kZc8POVDSwL6l
+Ja+v28p/e0Xz1fDutO85km/i+jF92xpeaXblK0mVyrfz5dbf+L+Q1bx4Zyf6PvsLAa/O4rFqrtQl
+v04f8fN7XTn6Zh+a1WzO3W/sou201Xza14ffh/Zn2omclDGfYq+9h4kvPsqEre2ZMnsYdYw6qvSe
+zLdDs/jy8UlsSvXc32J8d6WU5Mv890atxzMrdjPvCQNL/+9uOrS/h0nbm/Peyj94ye17xb8fcxcO
+JmHWCG7r9gBvbAjhuaU7mXNPELqS5F3ivVX8OvHGnFEErHuBcT9HU1iCF+jxD6tEWqIVgYnqnRpz
++L3RjJr4NafV/D+cCraj83j9rW9YdUJP87bVqRpeHfXo3/zv6z+Ij1/H3xndeET/Db+3GEY7Xz+6
+vjSAY889xKBBI5lr68ro11qwcPjDPPLY62zO0rt+j5Qq9HmpA0uG3MeAgWNZmu6Pl5JXx8r55Kw8
+kyvfXwnijhdvYcHge3l45CfszqpMt9xysxpS5XRuvUMwahkk2KpR1asizSBKJJIbGlGBOXz4sDAa
+jQIQf//9d5HXOZ1OIYQQQ4cOFYDo3bu3EEIITdNKJUdVXdedOJck2j+zWEyev1M4VU3YHKpwqmp2
+XXa7XWiaJnbu3CkA8dtvv7nL51zjcKrCqWoiKdUq2o1eIF6csd6to0vG4YgE8Z+5O8Xa3ZHuMkXr
+5XC66p26eLfo8vwq8d/Fu111qTmFktOsovdzi0WXsYvE7c/9JNqN/lGciErO/vxCYrroPm6R6DT2
+F7Fuz7lC5fzf9HWi08Q/xMszNxTQyWPbwTMJ4tYxC0TnZxaKC4kZJblU2B1O8fCby0XH8cvFB//b
+XuL1QgihugVv3n9etBs9X7R/ZrH46IeCZTVNE4++s0J0mvC7+GzhLiFEjq9i3PZ2HrdErNsTWaic
+F9z2vjproxAi597EJKSL7uN+Ep2e/UWsL2VZVdWE0y178vwdotPEP8Vs0YmNAAAgAElEQVS787YK
+IYSwOVztct3uSNFx7M+i/TOLxR9bT5XKF+VB1p6XRG19M/HJCXsxV2ki6Y9BopL+VjH9jKPcdLsc
+7FErxaxpP4lD6TkNWI3+XnQ3+omH/k69BhpdP76rqGjxy8W4F1aL5NL9lFdAbOLUz7PEmjinEBlb
+xfP3vCcOWIsvoSWsEOOfX3Ud2yyRSK43KtzMgN1uJyUlhbi4OCIiIgDXiHxUVBSxsbEkJSVhs+U9
+pSb3YWOeV+73SyL3ZYqi4GU2oNcpmAy6XHvfKxiNRhRFYdKkSVSvXp0777wzW67nGoPeteg1wM+c
+p17PAHuT2oG8N7wdPVvXLCC7gF5F6ej+q2mCyr5m7ukcjhMTqRl21247nkOCNEFwFR8e79sEDF58
+NH8Xa3efIy3TtYNPbFIGUxb9y4ZDiSj2NEbe3cJdc65TiN2K16rmR2UfI6rOzOwVB7iYbsOpatgc
+KsmpWdhzLd4UwrXQdtyA1qj2LH7dEsl787ZxNDKJ5DQrqRl2LiRmsOVgNB8v+JffNp9y6es+R0Eg
+0On14LTxy6azLPjnWLbOccmZvPf9dk5esCJsaTzYvT7g2ttf0wQhVXwY0rcxQm/m4wW7Wbv7HOlZ
+DmwOldikTD5duIuNh5NQ7GmMuLt5tm81TRAS6MNjfRqBwcKHC3azbo+rrMtXmXzmLqtzpucpq+RZ
+s5CzJsXgngXo0bomY+5uhKIzMHnhXo5FJhZ90ysQWupxNv/1La9N/Bl77+foX6tiLzHSEceSlx9l
+4Es/sjPiAlGH/+aLsS+yofIAxrQr3+Tr6813FRUlqDdPNVzPqnPluJvQVcVIUM0EZgx/nJFjPiFj
+0KM0KnaGyMHpFVtoNaaHaztZiUQiKQcq3BOqYcOGpKamkpaWhtOZ8wAYPnw4er0eX19fAgMDWbdu
+HTVrujrUwp3KYbfb0TQtO1gQpVys6dmSU9UEmupk9a6zXEjMwKEKqgd5M/6hNsyfP5/Nmzdz9OhR
+1qxZw++//47BYMhOEVIUhSMRiXy38ihGHThUFcVgzk5r8cgQAjQhUCh5i1FPl9ypaqAzZqeX5Bxy
+5fo7qGcjFq8/iU1nQmhqdl66J51o+F3NiU3KYPmOGF6du4sALw0/bzPRCemoBj+8TfD6kA40qFG5
+UJ8JAX7eJob0bcL05cf4bWsUG/ZFEejvRZbdyYVkK5893ZXOzcNQNS37DIIuLarz+mOtmbJ4Pyv2
+JLJsy0oqeRtQFPcuPgYzJp9AqlbK+3TU6RT0lkp0b+hNSKAPX66I4Jvl+6lW2YsLSRlYhQUFjZce
+aUt4aCW3b5Vs/w6/szmxSZn8vvMCr87ZRYC3wM/HRHS8y14fE7w+pCP1q1d2nzXhzkjWBCPcvlqx
+M5ZXZu+iio/A18uU7Ssft688ZXOvA/DcJ4dTdd8nkZ0W9MSdLTh7IY2V+y7y4leb+O3D+0tsl9ca
+5/klTOj/Fhdaj2HOVwMIuYZDBxlb/o8HXthMejHXmBqOYsryj5jy4kv0CI8hyzuU5j2e4Jctb3J7
+OfesivNd6Wx5inmzh1Ha88luXCw0Hf0OTa+1GpeNgn+H1/j599Jeb6Te0PeoV5YqSSQSST4UIUqZ
+hC+RSCQSiUQikUhuKCpcmpBEIpFIJBKJRCIpH2QwIJFIJBKJRCKR3KTIYEAikUgkEolEIrlJKdMF
+xHI5gkQikUgkEolEUnEpk2BAFHEglUQikUgkEolEIqk4XPVgwNP5F7lOqZUBgUQikUgkEolEUvEw
+HDp06KpWmD8YyP2SSCQSiUQikUgkFQclLS3tqvXScwcCmqZx/vx5mja9fo+LkUgkEolEIpFIbmQM
+Ot3V21Ao9wxAaU7+lUgkEolEIpFIJNcOg16vv2qVeYIBTdPyvCeRSCQSiUQikUgqHmU2MyCDAIlE
+IpFIJBKJpGJjKIt0HkVRstOEZFAgkUgkEolEIpFUTGQwIJFIJBKJRCKR3KTo33zzzbc8nfcrfeVG
+CEFycjJBQUHlapCW+i8z353B8g0b2bB+PevXr2f9tij8vfexaFsAzQwr+Gq9H22aVObqrZa4Qv02
+HUJt2Ia6vrlStkQau+fO5lD1NoSdn8+0jZVpXfsi+w6kERDij7FM12c7iP5jKt+cqEW7Bv5uP6nE
+r53GzH2hNFeW89XGyrRpUuma+LBEbCeZ/9lSlDZtqGa8ulWLzEh2l8s9cKGmnWTDLz+y6Nc/WLVu
+O0cSTdSsF4avoWjhthPzmbbej2aGP8qlrTtjVvLlvCjqt62Dtw609GP8NuM7dpsa0yTUi6uXiOjC
+Y19Z26XGr2X67JPUblcPz1fTfnoRny120qpNCKYibkF56SeRSCQSSUlo9nQuXsxENZgw6Qt/cN1w
+MwMC0Ae0ZdRzAwk35bzrzLpIvUZmDNGguPW6FnMWRemnOZ2ouX0lBCgKCgJT+AMMr2FGn7WdnbuM
+1G1VHcvV7mHlwUC11s3RLdxPXO8wwoyAmszRww4a3BeCX7BLH8M18mGJCAAFRQiudvNTsyLL6R4A
+jhjWfreYyJaPMO7Ruviqcez+5Vvmrw7l2btqUFScI3C3ccqnrQsUFJcgtMwI/v7+N5I7DufxNlXQ
+lYHs8rPLvStarnbkalpKsbLLSz+JRCKRSIrCmXKUdcv/ZNPxJFQATFRr1oO77+mad/CZcggGrg0C
+1eHAkUsF25nfmHeoC080d7+hZXB6/S8s2x6FzRBAnY73cH+XmniVi9oF9QMQ9gvsWPoTa05mYg5p
+QK1MjSqAPeI35u1vRKusrZyPEvz8dyjD+tXEVGjdVwd91ZY00y3kQHxvwsIMaKknOGirz93BRmwR
+i5l3qAtj7gskKr8PO1rYNusnGPQUvfzPsPCTH8i8//8Y3kzj3znfENl7HP3Dy1LzvNhP/8LcfwOp
+lXWYw5EpeLUawLC7G+GdcZS/FvzGnjgbin9T7nr8AW6pZC20TehO/cS3+6sREvUP/8abUHSWcrkH
+tshN7DH2ZFS32ngLJ6oukFZ3PUDmATs2DYwU3obLOkYpCmGLYdPCxUQ1H8KwjkHohCjye5bt05gd
+xNWujpZVvcA98q1g9hWgKNsAUEna/DW/pdXG+/g+IjNM1O4+kIc6V8csd12WSCQSSRmipR7g569+
+4rC5GbcPfpDGVU1knj/EplV/8+2sFIY9fS/1vXMeRoayVuhazAxoybuZ++EBXLMhegI7D2dYHciJ
+TzSsEatZFX8Lw18eToD1FH/OXsqmek/TO6RsXVJQP8BUlwHPDiZ4769s1HoxbtItmM/+yfTZ56gi
+BEIBxVCVDvd05uhiI4P71cRY1qOOukBaNFNYdCCO20KDST+xn6x6dxBsEGgKKIoowoejaFpHZWVU
+Jl1CzpJkMGM9GY+9voOT6WG0CTGUfZvwjOIKgVBUkk9doNu4Z7nXdIJFX67ndGY9Qnev4VTdobz2
+TAgpWxax6kQy9YM2F9omuikaSUdO03zIy7xR6QDfLjaUwz1QST2fgKlud3zQ0ITbMJ/6dOkMmqqS
+dabwNtwttxMQZT+CLkA4E9n507dsiWvMiMeC0WkagqLaSG6fPk0/9U+++Cn/PWpAg7ji7SsPu5xx
+65jx/ubs76pQHag1GoHQyCrKNk8Fip3zB+wMG/8KQ7XD/PzVr2xtMIYeVWXykEQikUjKCJHF6VW/
+c1jXlqFj7svp9FcNpXbdqiz88hd+Xd+W8XeEZWcYlHkwcC3Q5UvDEaqDjOO5r3CScOQkceeiWDBz
+s+sdhyAsU6U8XJJfP4SG036RvcezqN+7Mb6qExHWlnYhx7FnlxKomnD9VQVlPzyqJ7BFc5SFB4nv
+6UPMgUzq9QnBCNiAon2oENgklPSdMaSIc1g6dsZy9AzxcXYSApoSVu7DogqWGm2p56PidFammg84
+hYJvrXAMS5fxs2hF8xb383ConnO/F9UmwBLenfZ1fFESNcrrHihCoKDl7fAKFbtNQ9FrxbTh8scZ
+v5/IlsMYEvAry1af5ak7a2NWivueeXzqB6cLu0eOCmGfoWp3np1wF2HunwX7qfl8+jcU/xviwUjV
+1u2pbVJRlfp0arySFecy6V7VDzk5IJFIJJIywRHLvhNZhPTsTl3vvE8bnX8zet36D18dOkxi7zA8
+49833syAewTVabNhyyVayxkodWMg7PbhjGnnj4KGNTkJm8VY5voWpR8CdAoITUUTetBEwXzl3CPe
+5eBWXUALmusWcuBsADHp4dzmHtX32FCUD422pgQkHuYITmp2bYDx2J8cP6FibtQBC+Wgu8g9Kg56
+kw6hCYRwd6yFwBx+N2PHJ3By3z72/jaNdTUfoF9RbeKsgsGsQ1M1lHK7Bzp8qgdhXX2Giz2DqeIe
+TNZStvPN1xH0ntAfS1H6RmSbn6NrGWoqBBiCe/JQ93Cq2O/jyIxlrGk+mn41dRT5PcvlUwq9R1rR
+ZSPKzy4QOGw2bO4+vt2Z+8tYnH5u3YSKKgSKUNEABYEmRMVJdZJIJBLJjYVqJc2qw6eyxbV2Ms+H
+Cj6BPijWVKyqQLj7FmX+TPIcRFaer0Llut51/9UT2LA6F7ds5UyqlYwLu1g0ezmnsq6lfmbCGnlz
+eusxLjo1rDF72HXBkUd3BCCcaOXlS10lmjdX2PfXOpLr3EKwPrcvDUX6EEsYDfWH2XrOjzpVAgkP
+vsi2HTbqhvu4F/WW9Su79eUky+TxvZ1zv01hxjYnNdv24q4+9bFHX8S3QRFtIk8d5XcPTDW7cYt9
+A79uPE2qQ0PNimH3XxvJaNqR6kY9VYpqw3lsLwd/I1B0CqpTRTOF0+/emhz+dS3nrCXpmLdDn/ce
+GUq2r1zakXAH5CKfrqWwDQfx+3ZzNlNDTTvJjqN66tXwKqfvgHzJl3zJl3zdlC+9PyH+KvGnE7Dn
+/0yzceFkIlQKwU+f8/4NmSZUMgrmundyf/1F/PzFx9h0VWh+72M081Uo06FGN1rybr6bnGvNACbq
+PDiWx259kG7nFjLz07+wBDehaW3vPOkEOq9qVM1cwsKVYYy4s2wXr7rQU7l5C8z/bKH2/cH5Gktx
+PvShdgMvrMfrUdWgx9AwEOfRytQPqCi50kbCuvWk6g9z+GSjAEtNug1sSlBoY+6PKGiPLS6nZLne
+A0MIt40YwPqlK5jxcTJ2xYvgZrfx+B21sSgCivC/LbYslSoBITDXu4N7w2awbH0zRvcpQse4kqvy
+qoj2ZaOUQj8jVarb2Dh7Cj9nGqndczB3B1WU74BEIpFIbkgMVWl9ayhbVi9jXePh9K7j5e5LqqQd
+W8Vvh5yEP9CUSrmmAxQhxFXr/noiDE3TUFWVkydPUrdu3atVfelQDJjNOhxWO1ru9/UmzDonds2A
+We/EahfojSaMetcmjKrDjkMth0hAMWC2GPNNyQhUuw27qmAwmTDocKUXoAOHDYfi0t3mVDCZjSiq
+HZtDK7z+MtDXYjGg2azYPSLdvrQ5ivahzmjBrHNitTlBb8ZiFNjy35MyQ4fJYkS12VAVExaDE6vd
+laRhMJvAbsOJHpPJiE4B0HDaHTg1CrdHl6sORV++90DRYzQa0etd28xqqhOHw+lOe1MK11dvwqJ3
+YlM9bb2M9dQZXffX5nTH0gp6kxmjsGMtqo3k9qmuiHskirevzO1SDJjNCg6rI6fd6kxYjCo2m4oo
+1v82Lmz8liXKQEZ3r4oB0Jx2HE6tPMYbJBKJRHIz40xg16I5rDgJoU1aUK+qiczoY+w/mYB3y4cY
+eX8z/G/oYEAikUiuOSrJ2+exlAcZ1iFAHj4mkUgkkvJFyyRq3xa27jvBhYsqpoDqNG7bmY5Ngwts
+cV3mwUB4ePjVql4ikUiuGxSDBTM2rE45FyCRSCSSissNt5uQRCKRVASEI4usa62ERCKRSCQlIHe4
+k0gkEolEIpFIblLkzIBEIpFIJBKJRHKTUmYzA4qi4O/vX1bVSyQSiUQikUgkkiukTGYGFEVBURQy
+MjLw8/NDr5d7aUgkEolEIpFIJBWNqzoz4AkCPK9KlSqRmJiI0+m8mmIkEolEIpFIJBLJVeCqbi0K
+5Dn2WNM0kpKSSElJwd/fn6SkpKspSiKRSCQSiUQikVwBZRIMeP4W9pJIJBKJRCKRSCQVg6seDEDe
+gMDzVwYCEolEIpFIJBJJxaJMggHIu6WoDAQkEolEIpFcLteqH6EoyjWRK5GUJ2V2zoD8AkkkEolE
+IrkSNE0jJiaG5ORkfHx8SE5OLhe5/v7+WK1WgoODCQoKkn0ayQ1Nmc0MSCQSiUQikVwuQgiio6PJ
+yMigVq1amEymcuuUCyGwWq2cPXuWoKAgGRBIbmjK/ATi8uBqxzMlfeGvprzylJW/3rL8YSvL+m9E
+n5W3TWVpS1nVLR/EEsnNg2etYWJiIo0aNcJoNGa/X15YLBZq1qxJZGQkgYGB8jdIcsNyXQcDnunD
+pKSkqzJ9mHtasGrVqgW++FdTXnnKys/1GgjciD67FjZdb4FAcT4SQhAbG0tCQgJ2u12uT7oKKIqC
+2WwmJCSEwMDAa61OhUC2s7KlqDYnhKBSpUoYDIZL9rkQDiIOjGbQ+vXEaxpgoFr4O/xy12BqGkp/
+xJKXlxdeXl7ZwYkMCCQ3ItdtMCCEICYmhoyMDBo0aIDJZCI8PPyK6/RMCyqKkmda8GrLK09Z+eVq
+msa+ffto06bNVamzvOq/EX1W3jaVtS1lUXdxPrpw4QLp6enUr18fs9ksH9TFYLfbS32tpmkcPXoU
+RVGoUqVKGWp1fSDb2eVxpW1OCEFCQgLVq1e/ZNlOZwbHT6xgz8Wc842ijq/kdK/+1PA1X1JdSUlJ
+1KxZ85J1kEiuF67LYKAspw8LmxYsK3nlKcuDp8N2Nessz/pvRJ+Vp01laUtZ1l3UdH18fDxNmjTJ
+9o+keNLT00t97fbt2zGbzTIYQLazK+FqtLnL+T1RVRWbmu9N4cSpOhHCdMn1SSQ3MtdlMAA504d6
+vT67A3K1sFgseaYFy1Jeecry1O2p82rXXR71w43nMyg/m8rSlrL2U34fKYqCw+GQHbQyIiEhgczM
+zGutRoVAtrPyoag2dznBgKYJtALFBELT0DRNzu5IJLkofeJcBcMzfVjUScdX+kpKSipwVkJZyStP
+Wfk7m9dr/Teiz8rTprK0pSzrzu8jSdkh/Swpb4pqc1fyW3Q16pJIbnSu25kBD+X9RS2NPGFPI/mi
+E0tAZbwNlz/6UBa25e+wFY+T+A3TeHPGdjK8vdBrNuz1nmXG653wL8KsUtWfdYDPnniOP7OCqRlk
+JrsqYSXhXBy+90xl7sRmWC7ZuorgMw8amfGxZPgEU9X7ymLuQmVqmSSlGgiobOJSWljJtjhJi7tA
+iq1oO3WWQEKrehcYSbhUPznTE4hNtuMdFEKA13U7LiGRSMqYy/ldF0IgKFhOdvAlkoJc98FAoekI
+mbt594kPOJh+gfOpgFKVBo19iTsWjVejXgx/4yXurnl5OYPFpT/YYzfzwxczmL/xLFYA/GjYdyjj
+xg2idYD+qspCyyTu7Hmy/GtSM9BS6ikeIUqbyuHgwqqPeGVBJBn+DzFj5kOEZG3kxVdjsWoCX6Xo
+UZeS6lecyZw5l4lodSs9uwSR7Rk1jo3zFnPodDIOITBdxg92adNTtLR9fPfWVA42m8jbT7TErxgH
+lt5nOdgOfcLAMctIDriPmT+/RPPLiWw8uhYmM30nbw74CPXe8UwY3ocGxRmQixJtSd/Ca/1f4d/i
+zPTvx7RfJnGL1yXW7cYRt5l5k7/gh20xOAGwUKfXcJ57og7//hxF7+cfpq5M6a2wqGknWP/rCrae
+jCdD8yK0RW8eur8Doeaiw1Lb8f8xbV9XRrfaxNf7ujJuYDgmADWJrbOn8UeUA+F04BQKBqMBxVCN
+nk89ze3BpX9EuWR04tGQVfyqf4QxnQOu36lvSR5K23nPip9K/58/5F+bBgjsjoy8F9h+4oE5f7ja
+nuLPrZ1/Y8mtTTDLlCHJTc51HwwU9iOhaBmkhwxkQpPFvPRjDDQZzZSvWrDiudnUGduSlW+/g/31
+Sdxf+9JGVYuSB6AlrOad4W+zLhUggPpNKhFzJILjq6Yzbn8Un815gfZFDadfoiyc0Sx75WnmXAjE
+NyUGXYdxfPjyXeTENwJr9L9sPKyjedfWhFrybsVY8uitgwt/vsdLf9ThlY978r/3EtDceZaO86v4
+4tX1ZFhVhGKhVr/xPNsvFOMl1O/RJmvfT8zaV8gFyWdJsHXG23h5o0EloaXtY+5zE5h3TIXt4xjv
+nMrUka2KDAguZ2ZAy0ohCyAzhUztymYsCm3jgMi6yN6f3mX4r/Pp99QExjzQmqolpDWXZIuCQJQU
+72gCIQqWL42ftIQ1vDv8TdakhtH10Qn0qGsk4eA/LPr1KyasBXz7cut4EJdx7yXlgCOaf+Ys4mzL
+Rxn3SDi+ahx7lnzLD6uqM/HemlxyVr2+Cp3GvEEnwHbif3y6thXPjWqJt+ybSXJRqhl5IXDq6hBG
+KusdRV9nd6Tj2uMogFreFlBVhP7SB+skkhuJGzIYyMH9RBEamqaiCQVL7Qd4400nr368iFafDaHO
+JT69CpUn0tg5bYo7EACajuWLmS34/fGHmXkWuLCMj+bdy/xnG3IpG5oV2Zm2R7MnoiZjZk3nvsoX
+WP7KM0xZ24FP+1ZBSz7Ehk3HyQpsQm37VracbUz/hl556iyxs561j5kzd+MMjObz17bg3Wos3tiJ
+2bqWaOtJROhkZo1ugP7cQl7+8Rz2fqEY8uWKF1d/oe+3epvfpnTFEruUZ8csYtJHVfjw5T6EXo37
+kwstbT/fPu8OBABQOfH9BCYylc9HFD5DUOpgQNhIiLqIb43gXEGm61+aNYGoFC+qBxdMrymJooKB
+bOynWPnf8axc2Imh45/l0W41i+xMlRwMXL5eJfpJZLBn1mesSW3AyDkzGdHYyxXUdAojfscelkQX
+XXfp0Ug7uoK5P2wk2iFw2E3Uv+dpRt5eo8TvnkhZz3tTHTwzqTeBcki5UGxn1/OvoSeje9TFXw8Y
+gml994Nk7rdh08BIBqfWLGLJ1nNYjVWo2/l++nerdXkj9GoKB1bMZ8W+RBzGQFreNZi7W1bBUMT7
+OQiskWuZu+UwURlm6vR4mId71MLLGcv2Xxbw97EU7Jo39XoP4ZGuYYiTC/hmbzCh0VtJ6T6Iaqt+
+xvbw//FQnaKnp6wHv+DpyRkMnfIKvYL0QCb/fvgmBx9+n2FyWuuaoffqw1v9ZmD98xmWFrv2vToD
+u/7IezUDkWGARHIDBAOFpSPohAChUKlZJ9o1OUy6WMpLTy3FVOMueigqhmotaaz8TLJDUEt/aZ2O
+wuQp1lP8vSMNqMatd7SnVv1aGDRvGvV9kHsiTrPh733Eb1xH5KgG1LuEEc+iUi10Xi0YNSKQF0c9
+yZb2dciMcJB14C+WW33QV2pMl563sH/tITLaPcoDoaY89ZQmlUOnZWGtPYIvpg4gWCcQwkrk8nd4
+c11jXnlZ4asDFiwWC4rZmP2Qz72lZImpIoV19HRmTGYzZqMe31bjGNNyDe9+pmPS8724hEyBYtNT
+RMYhvnthYq5AwIPK8e8n8DxT+fSJ5gUCgtKlv9g59e0YRs6LoM6gz5jc3mOjQE3eyYyXn2dRRB2G
+zfma4fUuLcIpVGZhPozbyvevb2VJs/t5atyT3NnYr8CDrkRbStkJ9+zIcSl1K9YT/LUlFa/ubzKw
+oRnV6UQIDWvMWXRt7+GeNmAI6kyIoWDdpUVNXM9/vzpCx1feY3yoCTIOMe+N6axs+h73hcrHfm5G
+jBjBsmXLsndVadeuHX/99VcxJVRSz8VjqtuDSrlcqfNvRLeuAALrqZX8GXcLw19+gsq20/z1zWI2
+NBhPz0vWTpB+aBl/XLiFka92IiBlB9/NWcbB8KHUP1PY+8Nokl3WTnyEnnvHvUa4eoifpy9mQ6Px
+3ObczUHdbYx7vSV+aTv5Zs4OLnR8gGBUEg+dpPnjz3BPuB+OWmMQ/iV9RxXM5igWz9vGLc91IUAG
+j6WisDZ3//338/TTTxdbrrSDA0ajEf+gB3m/r4ZY9Sy/FhoQVOehzj8wuVE4vhZL9lbOEsnNzHUf
+DBT2JXYtHNIT2OU5vuie931N09DsWrHlL1WeomWQnAlU6cKTL/4fTU2uzky7oc/TPnM7KRv2sSkj
+mUwVhKH08opMtdAMhN35Ft93ieDAwWgc3Zuy7vdIrFnVaF33BAsX+XLf43cTZiq4WKq0I/cCgaqq
+qELgiFjAfyavJSU8mi+mJWC893GEpuWUz1fnZc0MCA1VVREGf4zHZvFpgpnk4+8yKbwhMwZWR1/a
+TmqR12Wx//OXmHfUCfpGDBzqzbJv92A3teaJB7L4309HOfb9K3zRejGv32IuUGeJsymKglflypgR
+RPz0AuMPBLnWjdiOMGfiRo7GCDBVwt+iK0HP0tlUXPn0Q8uY8aUfNT8aQUvfvGP9JdlSWr0EBRfh
+lTjroGWQmA7+1augV1VUd4ff0qA/E17SZT+UVVW9zIezStLO1SS1f5Luoe4UQJ+mPPLaeFK9dYiM
+oyyd/j3/puvR1CB6PvUUvb338v1/f+ao6oNvJRMJagewnWXlN7PZmGjAoAvg1iGjuKue1yWnFFZ0
+brvtNk6dOsXhw4epUaMGt99+e8mFhKBgerVAdWooeo34Q8eJjTzHglmbAXA6NMIy8gffpcFB3JEL
+BHYaSJBBQQlqSaeq69gXm4l/oe87cwUDRoLbdKCOjw49jenSdCXLIzPo06EfQ3yiOLNvM9tP7eNC
+VlVUdzOzhHenY70AzIC5VKcu6/Br9Sh9Mn7h+13NGd8+J3gorJ11i/2Ct7cHU9sWQ0ysldr9JzD8
+ljRWz7rx21luCmtz3bt3L7FcaX8PdDqda/vhav35oC9QICBwBwKN6+Hv7Y3enR4kgwHJzc4NGgwA
+wsG5xROYuDyDSj45w1hqejKBgydSV1zetmGFyjME0yQYdkQvZfz88soAACAASURBVMztSzG0eI1F
+nzVh1VNDmXXafX14Y6oaL01ecZ1pZ/w2vv9iJr/uisZhDubW+/qSdGY7/1R9jLub72bTxlPc1SMc
+L+XSOmyu913/0TQNVWg4L8YSMOxb5o5qgNETUGkaKjmdwksJBoq1N+B2Pl3S17WHfOSPPDcnBgfV
+XbM9pa2jEBTFTP37B3Hr2cO0ev5tBto/ZDmA4sctIz+kQ6U3mbK3Gfc3MBeop3Q2GajR/0Nm2F7m
+6Zn7uHAk3v1+PEdjAFNLRs+YzIAahryB1BXYVChB7Xls/Hge714LsyjYqb6S+1NUPaWtWxiq0rAa
+7Nx9gERnONXcbdPTnnIftnZ5qKReyMQ3PCDXD5uCOTCUqjiJ+uV79jYbx9t3h+A8/j2T5q3DL3Al
+F277Dx/0CCB922e8tFwlft13bKg+mrfG10ScXcwbs9fS7q27CL7BJhbuuOMO0tLSWLp0Ke3bt2fQ
+oEEllNDjVyMI66rTXPx/9s47PIrijePfK+mNFEJCQgIJkARDDz0UaaFXKQIiiNJE9IeCKCqiUuwd
+EBUsdBFRpPfeezGEEkJJT0gvl7t9f39cyeVym9xdLhDI+3mePHC7OzPvvPPOO/Puzs6qasNTow/h
+wTEs/v4Wes0cAXuSo3b38Zgc4QoJBBQ8SEeRsy0QW3F5JYCRvWG0x6n0Qd3/JZDIgLz//sSyHQo0
+jozAU506IunuNd3Fcnsb85cyyTzRYXwfnFq0CufDntccVOL+dkM7O47wngo8yKiP2W+Ng3PcSsxZ
+ex13M7ZXCzvTx5jNBQYGlpvOHJ+gDQhQKiAoDgTcnJwgl8vNzpthnlQe+2DA6HICQb2TQH460PKN
+rzCjcfGa+dxj7+LtW4UQYHypg0Xlyf3RZ0Q4Vn55GSoAyswExCe4ISlb62QcETmqE7wk5pUnvhvP
+Xayb8zmudn0Pv3/QAHe+HI9v3Lrj3TEqnN55FCfRDJ2D7mPrxnto1acd/O1KvkBc/pIXW9glrMe8
+GfthJwGErDvIbD8ESqUS0JvICgIBJJTI05T8JYKxDd+gCT5INzFUCYL6qUOZspbOw2iZEgmcGj+P
+z3+WgohQcKZYAhUc0WjMJ/hpjPqrlYJQ+mmKKbvkENmiwbMfYwnUAYFCe8K2CSYu/gyjGmiXxlhh
+aZqhDm3qovvE/2Hq0BbwlGnekTESdJS7lEekbQzRz8fUvCGvg75DGmDV4sVYtL4h3h9aH85SAKoH
+OPfbF9hSYwLeGFQX9hbfGpXBpZYDsu8+gBI1NC+zKpG4bxUOePVByB0V6vXzhBwSyP2egmfGEVzL
+d0CjQTUggxQuwU3gY6NE2q1kPEheh+/uyAAhH3L7EBQIwJO2uNjLywtTpkwpd4mGPvZ1O6NZ4Qps
+2O+LZzsHwbkoEaf/3YecsBHwt5WDQvyQufME7jXuDt/c8/jj54toOuV5hJgtnQ1qhvkg7cRFpIW1
+QY3MSziWUhPNvR1FjtsAidq0RUg6ewp3IvuhrjIGx6JtEdrOHpkn78Ku9Xh0i6iBwrjtSC5QlbZ1
+UiInPRPk6gEXm/IMUQK5dxe80OMIPlt7GQNIXXaaoZ1lXsEDpQ08GwaqlyDaOcEGCrWdJT75dqaP
+JTYHlPGUXBmLX7f0w5ux91FY4hoJ7NyG46vei/BRDylcj6+CtMFHeKeeDXYc7oTXYm6jsEROUti7
+9MMXz/yE0TXMebOPYZ4MHvtgQPTJgNbNE3TLDiQSSQnnb60nA4IghU+/uZgbPR3vbUsA7qzAK9ob
+RZAieMQCvBHpBgjmTQRF73Irk3Et3RPNGtohbu9yLDvqgo5DvSCzlaFNv6FQJJ3D7sMJoIIHOHar
+BYaF2Zu3jMe+Bd5b8yuUKvU1BRe/xLsX1Mt4JJpJnvouLozmWW7+EkfUdAGQXeJoicmkRCKBSkW6
+4yYvXylr6ZPGBgBNIKNBUKmgVAolrjOWtqz8Aa2d2aD+yEVYTG9i6g8XobBtgomLP8Wz9W2hLCqy
+6C5U2ct5XNFk6DS8+mIv1HckqFRFKCoSf/JgnWVCVGJpmKl5C4IMfoPfx8yLU/HpD5MxdEMwwoNc
+kRd7AddTBXhH9UIe1YWdSeGIMWTwjOgG9/dXY0+nNxAVYAfKvIi//7oM9zeGwzNAitiYNCgb+KDo
+3hWk1QhF55p3sP/aA/Tz9UDOzYtILGoC90Bv+ASNxiu9fUFJR/DPWR94P/ae0krY+qPnhGew969N
++GpeOgoljvAJ74qxfYPhIJEADfth0K01WP3xByiQeqBx/zF4ylmi2ULWHCRweWoAeseuwc8f74ZS
+5o5GvZ9FE1cZ5EaPS6HSBQN28AtVYc/3C5CQ74DgbiPR3ssO0og2cPltMT4/44madYJRz+UmdhxN
+wlg/vWKVidj302/lvkBcjBy+3V9Epw8WYGOiDC1gAw9DO3MLRA15AqRSaYl07oHe8KnLdmYKYn5J
+WXAR627dM5jYAwChMHMdpmwuwle9F+H9Xr0hFWKx8chQzLiVZCQnAQXZW/FHYgaeda3JXydmqh2P
+vesxOqGxqYUmLuvw2+kiCKemY7LBeYlkFS4FDkCUzErLhIiglHmg85srsLbbNvy14yRikhVw8m2E
+9v2GoEe4ByTKIrOfQojK5tAEU15ujK9/+hwnPJqgz4LP0TNQBpVKvTZXXrMZeg9pBoJUd+fenMm6
+IAggiQRSqdohyuUSoKgAeXl5kOtN1oV8zRMWc/O3a4I5/x7Ae5r1mkLKZvxvUSEK8vMBTR3E8rdY
+Z3rnJBIJ5HW6YkBkPu56Pw0/maDTnVg6U5fWaAOCBs9+ghX1DiDWrwva+9tCpVRa/FKsUZuTuqFx
+vxcwYswotPW1gaBSoqio/CdP5QYDMneENQtGRqZ4PjKfBnCTWrBMiAhKmQ/6fbASDXauxqotxxFz
+JwmO/t3w3NRRGNm5LmzNDJhLyVbzabw6NRs/ff8mdiglUCkdETZwOgbWcYCtx1g0/f5rvHtUBoIP
+ek4eidZOHoj+agFm73SCWy1veNrLUevpMWi+9Bu8cwhQqWqg3QvTKvC04slDXiMMPceHoaexk1IX
+hPaZiNA+JQ/LGj6H1xsCQD3Nv6Wxa/Ac5jTQT+SOJgOnoslAgwtFjst1ZdTHywCAQSUvqNUBY2d2
+0DvQQ/PvaLyuLdfGH/3ffNu4gGLY+KH3i0/j4Ow9AOTwjzK0s3bwSDpuWAm1nS1mOzMF0WBAqUKZ
+b6TkbsRrG3bgM3d/UM413C9jy1GAIKjU43TJwI1hnnwk9BgumCNSv2R44cIFo+sNpVIp5HI5pFJp
+qYmkRCJRTzQFAcoyJmhxcXFo2rSp7gWjssrT5iuVSiGTySCVlnwZUvtkQkzV5palX472brr2TyuL
+9l/DcrUy3b9/36S1mhKJBHhwFD9+twW38oQSE2pI7ODfYxqmdK8FiVJpcv5aXWnllBbF4s8vfsTx
+NCUEg/z9uk/D1B7F+VuqM0NkMhnkcrl6glrORN1cnQHFbaTfPuVhTp20+WuvMzVgKq8uUqkUNjY2
+uryNoVKpUFRUOrg1p+1lMlmJl/cEQdAs0Sr7fQp9HUkkEpw5cwYtW7Yst94MoFAokJOTY/L1ixcv
+Ru/evVm/ANuZhVTE5vTH+aCgIKPX52dtwqBfXsApM+XyDV6Mr9w34sXTu+Ee8gu+dvkFo08fQkT3
+8/gzxKeU/7t161YJv8MwTxpP5JMBQRBQVFRUZqfVTp6s8WRAe1w7odEGHJaWUV5Z2kmTfrBjOOEv
+K09T73Jrr5F5dMCUecZ3fBAEAaRZ629q/obrzaXSAAyZvRBDTMjfFHlNQatDQ5nF8jRHZ8byrwhi
+Nm5J/uUv5RGgUCgs6jvmtL02GNcv9zG8L8EwzEPA2r4hMXEtfs26hGwAeQlr8OuDC1BvOiSplPIY
+pqrzWAcDLi4uFVj3XDbOzs4ml1fRsi0py3BCbSqWTGy1L/QaThC1Mpi7TMhY/iqVqtSjWWP5a7G0
+ffTzNhVL6mTutYB5dapIgFneUh5LMXc5lSUY0xFTOfAdUOZhI2ZzojcXpPXRvaYTTqXkmlUO5R7E
+Vk0SVdYObMoCYNMeUW72FbqBxzCPK491MJCXlwdnZ+cylzRYQlFRERQKRanjlVHewywLsPwut6mT
+N0vyNzeweRx0Zi4Pq06VWZfK1pMxHdnY2KCoqAg2NmZ+qpopFy8vLzg6Oj5qMaoEbGcPBzGbE/Mn
+Unl9TIi6gDGFhWb5HGNBh1wuh6Nj6a2lGaY68NgGAxKJBG5ubkhPT4ezszPs7KyzHVhBQQGys7Ph
+5eVVwmFURnkPsywt+hNvS19ofZT5P4k6e5h1qsy6VGbeYjqqWbMmbt++jYCAAKu2+ZPIJ598YvK1
+Hh4eaNWqFWrVqlWJEj0+sJ1ZhrVsTmxyLpPJ4ODgADs7uwpN4LVPvvnbA0x1RSLo77H4GKFdc3zr
+1i3cvXsXPj4+iIuLq1Cevr6+yMjIQFBQEPz9/XUv6VZGeQ+zLH20EzZr3z03zN/f3x8JCQlWzftR
+6+xJqFNltn9l6aksHRERkpKSkJycjKKiMrcKYczAzs4Ovr6+8DTpa7xPPmxnlY+hzem/QFynTp1H
+Ktvdu3f5BWLmiUaiUqkey2BA/0VEsXXl5qK9OyCVSnV/2o5v7fIeZln66L/8qZXBms6tMvN/EnX2
+sOtUmXWprLzL0hHDME8m2mAgNjYWbm5uj1QW7c0IDgaYJxX54/w4TH+LSmvVw3ASo5+vtct7mGXp
+Y1iOtanM/J9EnT3sOlVmXSor77J0xDDMk4e2j+fm5sLR0bHSnmaXh1KpRGFhYQmZGOZJ47EOBrRY
+++62FjHdVEZ5D7MsbXmVeYejsvPXlmGMx1Vn2jKMURlPcCqrLg/DthiGqR64uroiPT0dLi4usLU1
+5avQ1kP/XSUt7H+YJxGJQqFgy2YYhmEYpkqhXSp08+ZNq78LVh7ad5Xq1auHgIAAXp7IPNFICgoK
+OBhgGIZhGKbKof8u0sO+K6+/PJEDAeZJRm7px38YhmEYhmEqm0e1NEd/QwSGeZKRV8a+6QzDMAzD
+ME8C/J4A86TDwQDDMAzDMAzDVFPkO3fufNQyMAzDMAzDMAzzCJAQP/9iGIZhGIZhmGqJ9FELwDAM
+wzAMwzDMo4GDAYZhGIZhGIappnAwwDAMwzAMwzDVFA4GGIZhGIZhGKaawsEAwzAMwzAMw1RTOBhg
+GIZhGIZhmGoKBwMMwzAMwzAMU03hYIBhGIZhGIZhqikcDDAMwzAMwzBMNYWDAYZhGIZhGIappnAw
+wDAMwzAMwzDVFA4GGIZhGIZhGKaawsEAwzAMwzAMw1RTOBhgGIZhGIZhmGoKBwMMwzAMwzAMU03h
+YIBhGIZhGIZhqikcDDAMwzAMwzBMNYWDAYZhGIZhGIappnAwwDAMwzAMwzDVFA4GGIZhGIZhGKaa
+wsEAwzAMwzAMw1RTOBhgGIZhGIZhmGoKBwMMwzAMwzAMU03hYIBhGIZhGIZhqikcDDAMwzAMwzBM
+NYWDAYZhGIZhGIappnAwwDAMwzAMwzDVFA4GGIZhGIZhGKaawsEAwzAMwzAMw1RTOBh45GRh90g3
+SCQSSCQSyLuuRyrpnS68jPfqS3TnQ+dHQ6F3mlLWoYtMe94Tzx/IsUCGbOx+xlmThw16bcmsYJ2Y
+ivEo24NtwXyqts5U6Wexet549AivDVe5BBKJA7yDm6HToJfxzd67KKDy8yibql3/cqE0rO9Y7GMb
+f3ETRQAAJW5/20x3vPSfHWoGNUePl77F4aSiShLucdRtLg4+71GsJ7+pOJRlaGQCkn5vV3xN8+8R
+p3wkwpZEeQtfNdG2b0N8cFVRfpqHRVWWjXnskT9qARgXNBvaGjbrdqMIgCr6IG4WDIeXg/osZV3B
+wdjiq2/uu4Lst0PhKVH/Lrh1CDGC5qRdGwxu7PQwhX8iUdxch09/PodsQQKnxi9g1ugGsHuMy3mU
+PEl1fPzqokT8vzPRf9hXOFugf7wAKbcuIOXWBRz6ezE+6joX69e9iy5eskcl6GOKAqmx57H7p+nY
+vXItFh3fiVlNnSCxJKfHzrbMIH4Jpi+djhOzQmFbSUU80fpjmIcABwOPHAnc2zyDptiN0wCQcBwX
+UlVoU0c9MOff3I9oofhq5dV9uFEwFJ4OAKBC2qUTSNCebDYEEW6WDEWMPoW31mHBwr+QB8Cmd0e8
+WkkDy8Mqxzzk8GzWHZ2S0iBIXNGypk2FcquadbQM8bpYV2fWQUDazmlo3/8HxJU47gqfWoTEpGzd
+kZS989BjQA1c3vcaQh7XxnkY2AShdevasJUApMxDSuxFxCRpbmcXHMXbU1di5IFJCLRgVH2S+okx
+zn8wE3+N3YQRPpUTcD7p+mOYyoaXCVUBZD6dMDBE++s69l3P0/xfhdRzJ5Ckf3HCUZxNUWl+5OPG
+gRu6U6EDIuHNN/eYCuGA5u9swoFDh3Do4BbMb+34qAV6DKh6OqP0XZgxWi8QqD8WS4/cQY4yEwmJ
+WRByb+LfD/qglua08tgczNySAkEkPwZAxAfYfOgQDh06hMPHzuBafBpOLYiA1uUKx1bhcBpr0Ci5
+/+L1+ceRXeElaQzDVAYcDFQFbOsiqmdtzY8snD8Rr1mzmoeYferJvm27yejkAAAx2B2dq760KBFn
+zmdo0tVGjx6BJR7DqjLO4dfXB6CZrwMkEglsPYPQevhcrL+UCRXMQBGP/d9MRlS4N+wlEkjsvdGw
+zUDMWnsF2cbGPiEb/214H6Mig+Fuo17jaO/dCN0nfIJtsfkoMR4o4/B98+J1kO+fuojV059GXUf1
+mlzfVqPx2cEUKFWpOPLVWLT1U9fF2b8ZBs/bijuFJYuuUJ0pGavaS+DaU32HCQCKtvWFm8QVI/fq
+vYthTv0qUo6uvBxc/nUanq6nXjvsXCcCIxftxN3C0pdWvM1F1iiXWFddC5MO38OJb8ejfR1HTXs0
+Qd+3/sLNfDKrjibLa1j+0QeI2zQbPYOc4NRlPdIIAFTIuLAGbw1uhfo+LpBKJJA610JQxCDM+uUE
+ko0t6zbFtsutizGd5eH4y7V1a6ID3zgL/ZU6qjtLEaFdL+3QD5vSiq2m4m2oROyqd/Bbquan2xCs
+O7gck9rXgZNm5ipxDELfd//Erg8aaS7Kw87lx1G8It0CXRrF1HwEpG4ZAy/devz6mHM+X5PFXSzv
+Ylv8XlW7r3G9MA1/9XPQHfObfhL5+sVm7sAwF01e8s74PaESJulSV7Sc8D+01f6mBFxPSTRPrvjE
+SvMFZvkpc/u3Bdz/bjqWXTdxnbupslvbzxiKkXUBP0/qiABHCSQSe/hGDMe8zbdL9GWzZa5omlJ5
+pGDri/46m3OOWorrxuyBYcqCmCpB1u6R5AYQALKJ+ovSiIgKr9IHDdXHGs3fTp81Vf8/+N1LVEBE
+lLGFBjioj8F1OO3MLM5PmbCJXqyrOVfqrz5N2ZxARcWl066hTppzcor6N6M4o/z/6PtebiL5gPxG
+raN7xRkRFcXTpon1Ra+HTUt6+2A6qXTX36bvmmnPO1JIA/vSaSSN6NlhDYzm5/viHnogWFJnIwhJ
+tLKdsbQuNGJPtmX1s6gc/fYA1e/XihyM1X3CTkoXLG1zMURsQUildZHavJyoVa+GJDFSjnPf3+lu
+kWm6NEveEuXXpBELX6C62msj11GqoKTkf8eTn1i7AFR7/FZK0W8YU23brPYq1lnu0cnko702bCFF
+K3SVodQ/e5Kt5pxdrw2UYi0bJiJS3qbvWxSnC/3oKhWKXCrkJ9GNq1foypUr9N+tdFKLaK4uxfyH
+mfko79Oa/s66c4591lCCiijn6Ct6eYTTBxfziUiglI19yF57vMH7dFmvkrnHpup0L41cQffL6pAl
+bAsU/vkNjR6KKPabpsXytltJSYJB0qRV1EEnWxgtjC40Ty5l5fgCs/2Uuf27THLowFh3XZoaHbpR
+bc3/XQb+QYkqIiIVJf7WtjjvZt/RbW2+5shubT9TdJO+bFysh9BgOyNpHKnrN1cpX7/KlowLZo+V
++rI1oHlXComokG780JOctGka/o/2pZc5+jCMUTgYqCIIKRuop42mQ9d5nc7kEwmpG6ibHATUoDH7
+kujIRG8CQJJOqylJICq49C4Fa5yAvOtaStYOBqpE2jDEVedU3DtOoflff0qzRzxFNlqn4diH1sQr
+NQnEBvNCiv6kRbFzktajbs+9Qq9N6EkBOqflRs/8k6JxWEq6+1svctSdk1JQ9+fo5cnDqX0tPSfn
+PZ52aWfwJYIB9Z9n0+7Ur1s4uRg6R9tgiuzTm9r66R+Loo2pggV1No6qMJ+SNw/QDbjynhspKb+A
+FCoL62dROSUnAAAIHo3p6T7dqIm73jFJJP0ar7KwzcUwJRjQ1rMZdevXi9rotwdC6aP/Csuvo7ny
+GpQv15cjch2l5l+gt4P0JkednqNXZ75G4/s0Lh4oEUFL4rT1N8+2TW8vPZ3lHKGJOrtoRUvvaMvO
+pn1jtJMlG+q+Nkndf6zVhhn/Uh87bbkBNPNsftnXG1Jgri5F6m92PkSKW0ups632XBDNPhFNP3W2
+0eUR9MZx0oTlJKT8Sb3ttdc2pa9vaad0hXRlXkPNcQm1++kulakxS4MBVRadXtiKZLq2GUhbMsyX
+y+q+wBI/ZUH/FqdkMFD75a20/lnt7zo0/Wg2iQcD5stuVT9TYsKt+XMOo66D+pXUg7QtfXdTG91b
+Mi5YMlYaBgP5lHViDoVrr3XtSz/fLK9tGMY4HAxUFZR36IdW2o7ellbEqyjv5HTNHZU29PP9Ikpe
+05mkAKHmi3Q4V6CUP7rqBqLm38Xq7m4U3fyammsdRL036XSe5oSQSYdfC9A5mgZzNU8YxAbzvBP0
+Sm2tTN40fluqbtJ/f2UvnfO1jfqTUgUiKrhEcxtor5dQ84/OULb2jmfKDppaR3tORh2X31MP0AbB
+QO3Je0h9Y0NB1z9rXOwUbbvQDxpHJ6T9S8PcigenBdGFFtRZnKydg3VO2qb3FtI9cLGkfpaUYzgB
+aPwenchSF6RM3EjDamjL8aGpx3MtbHNRqUwKBuy7LqZozRxTyD5GMwKgS9Nzc/GTJbE6mi2v4WTF
+pilNWX6Q/rubRInJuVQY+w011Z5r+AFd0VZSSKU/ukh1NjzpqFpfZtu2ye2lH0zn0KEXa+qOd1ub
+RAIRUf5ZmhmoKVv+NK1W3yq1Whsq7/5ALXWTjLb0a0LJO4V5p+dS3/ZtqW3bkn+dX1hLd4uIiszV
+pUj9zc+HiKiA/vukpe6utDQgtPimgO9E2qsfZAsp9Gdv7ZNECUX+Gq9uQ2UcLY3Q1r81/RBXTk80
+NRiwCaLWHSIpMjKSOrRrQaE+xUEKAHIbsYXSBMvksqYvsMhPWdi/jWMQDLxykjJvfU/tZerfkogv
+KEYhEgxY6GOt5mcMgwGvUfSH5tG3kH2aPmwp1Z0LfueiOo0lMls0VpYMBt7ds46e0wUOjejNQxlU
+9i0ohhGH3xmoKshqodOgUM2PGOy/noO0C0cRDwB1OqCphxyu4Z0RBAApJ3AqMRuxh69p1jsGo19n
+X83WUIT0kxtxQZNTwNAReEqzTSkkrmjx7AD4an5e33ocKWUsmCy6uw+74zU//EZiSmdPzUsmMvj2
+n4v3JozCqFGj8GwLOQpVgDLxAP6+rrm+xkgsfLk5nDWbG8m8umH23A6a9Cqc2ngO2TDEG/3HtoW7
+FABs4NMsDC7aUxETMLCe+o0IiUsDtPTTnlChSGW9OpdFxetnGa1fm4QIF3VBMu+OeCZce6YAWQUC
+rNnmpuGC/m+NQYi9pgjncPRt56o5p0RuThGozPQVl7fRh+vw1fiOCPX3Rq2ajrD1H49dCfGIj49H
+4vFZCLMTkJ94GXtXfo5vTmrXi6ugUKklM9e2LcMJzcb2hxcAQIkT688iC4Dy/n5s17zZK2s/Hl1q
+Sq2iEy1UmKv3LRI57OQldxhTpl/EvqPHcfx4yb8Dx+OQT4DcTF2KYVk+dgh9+UfM0WyoINyJ1vQj
+d4xc+hE619Cri8QTnSY8rdk1hnB63UlkEEAPTuJPrSJbPo+o2lbaVaHoFk4eOYzDhw/jyLGziE7U
+e3HCfzSWfd4THpLKlat8X2ANP1XR/l0am3rj8PWMegAAOv0+3tycbHStvnV9bMX7VNO35mGgn3pk
+lTi3wCsLn4W75tzNzfuRoLRM5orX8y5WPj8Kv2t2F2kwdx3mRbpZtK0twwD8AnEVwhaBPaOgnuOm
+4+yZWFzdp/YWto06IcgesAvogKZOAHAdey7fw+Xj99VJa3ZFVJB2IzUFki7f1e0KcuezFnDQ+1CO
+Y5vvirciTfwPZX0rR5F4FYnaHwGtEWhffE7i2hazf1qFVatW4ZcFA1BbDigSLhfn3agfmpfY5lSG
+Wu16Iljzq+B2DNJKle0ML2e9AVJSnN7GzQMOup8SA6dnvTqXRcXrZwlyuNdyKu6oEhs42Bl224dT
+/2Ic4O6kP5GRwM7ZnB3EKyqvJ1q39y+5Z7ncGTVruSDr9FosmtQdYd52cPRtjG5jF+JQnmF6823b
+Upybj0U/T/X/cw79icu5AtKOb8IVdUloO74bvKWANdtQ5uoDV92vNMSmmfk1JzN1afV8HJth1k+v
+oI7eIac+3+KzvjUNBiwJPDtNwNMa11dwdD3O5wA5FzbipEY/zZ/vDb9K3EDbJaAZek5aitOXfsNw
+XUGVJZcpvsAafqqi/dsYjmgx80sMcwOALPw1YxFO5ZQOKazrYyvap/zQuaMfijcKlsC1eX/o4q97
+53Cv0DKZK17PAsTeK45erq9ajSv5htcwjOlwMFCFcAjpj05u6v9f37EX+86p9/YIfjoMzgDgFIpu
+DQGgAJd27MYOzZ0Fx/YD8ZRuN0MBBVlG9zooTW4a8sq4wygU5kC7KYFNDXfYl3PbgRR5ujuSUgdn
+2BpcL7V3hW7OVZhthS+f6iS1Wp3L4tHVrzweTv2tR0Xly6HbRQAAIABJREFUlcHW4G43hDTsnNYS
+oQNm4Ks/DuNaihKQ1USjbsMxIMiIBGbatsU4t8DzfT3U/0/ZjX+vp+DcxrOaCUobjOteS7M1pfXa
+UOLcABG6J2cx2HYytcSWoS49NiJXvUQUlLUbI1wNMjBTl6JYnA9BkZ1ZYheewswMFBjZEEji1QkT
+umhm3Rn7sPFqGq5tPqjZFakJxvapY72P6bRbiSRBozfNX1bcOexYOgktaxiEKQ9TLgOqqp+S1uyD
+hR+0UU864r7Gq8tvlbrGurJXtE/Zwdkg2JLYusBZGx0o8qAgy2S2ehvd+Az/WxmHqvARZ+bxhIOB
+qoRzMzzTVn0HpuDET/j1PwBwQ/PWtdV3J2Q10aK9+sFm3IYV2JsOABK0GNpC706gDE5ezrpfTb6I
+RnZ+PvKN/cX/iA5lfLBY5uwFbU5FqQkltxGlImSlpSAlJQWp6blQApC5+kITy0BIuIYUg7sZiqRo
+JGt/uPrC1WrfRLBencss5ZHVr1zJHkr9rYf15S289DmmLI5R/3DrgbkbL+NBYTKu7F6GlxqW/giY
+ubZtOS5oOa6PZmnBbWz+eyc2HtVsedh6HHr6ao3EijpxCMGALtrFDCocXPQTrhidExEeHP8de7JK
+HjVXl2JYmg9lHsZ7E39Dqt4x5ZFZmLbmXum2kHih84TOmiU5Cdj+1z5s2al5Yhr+HPoGPKLvaj5C
+uaqun7JB8ISv8WqA+lfc6ZRSV1hX9or2qWRcSyjZcZRp13FfK1MNf9SQWyazNerpPXI1zqwdqPFj
+RTj49nvY++Ch3YFinjA4GKhKSGqg9dBm6v9nXNU8RmyIrg21t/3tEdwlTH0nMfk81K70KQxq56XX
+kLbwiwiDdmlk/OVkwN4e9po/2/xr2LXxD/zxxx/468CdMu842Ndtr3tUiXO/Y3dC8a0TRfQitPLy
+hre3N2p2XY67SsA+MBLh2tsZV5fh9//0NjumTBxdukH3ATXfds3gZbVByHp1LgUVX1yp9aOKOPFK
+rL810dXR2vIKSDuzB9r7jD6jP8SswU+hhgxAQSyOXCu9lsBc2xavS/m4tByH3jXU/7/y9QdYq1mf
+1PL5XiheNm5Nnbig3YyXUFf78+p7GPbmdiQZ1KPw9lq8PPbXEpNuS3RpHEvzycXphROw+J76l2PT
+SPgAAPKw/dUZ2Jxs+HhAAq8uL6KzZhXLzZ8+xOL/1P9vNGYA6j6yj0FXQK4K+YJH6YdNwKkVZn85
+GIYPo7RYRXar+Zkc7Fi6H6m64wrErP4BF7WyNu2MevaWyVzxejbA1HeGosWgjzAzTHMo9Te8+vUl
+vW8gqJB54zzOnDmDM2fOITq50Ox3PZhqxKN9f5kxRBG9gEL1dqiA/2t0Kq/4fOHVDylE/3zATDLc
+OVBI2UQDnTTnpU1p5q4EUpBAhcmn6Ychnrq0oe+Xs5uQMo6WtSvePcEt6jM6eDeH8pOP02ddHXXH
+G75/Wb2PuZBGW4br7dseNI5WnI6n7MxY2v95X/LUyR1IM89oKlViN6EgmnOxeJ+U7D0jdDuJlNhh
+QxFDHzcq3lVh3pVCC+osTtbOocVbH7b6ke5pt6ywpH6WlFPWdx8og/7ppm2TGjRmf7aFbS4qlQm7
+CXnTxBI7wOTQ4QleujI6rEnR7WohVkez5S2zfBUl/KK3M0n9V2nbvXwqfBBD/8yKKN5CEJ40/lCO
+Oom5tm1xexERZdLOkYbfM2imt+WkhTopC1UqbRvvU6JMp/D+NPGdL+mnZZ/SrOe7U5AMJWVq9DHF
+KCzQpdH6W5IPUf6lj6ix7lwTWnTlLm0YWPztAe8XDPbTJyJSJdH6nrYG+g0xYQtMreIt/85A2W1g
+ulzW9AUW+SkL+7dxSu8mVMIbFsbQ5y0NbE+7m5CFPtZqfqbU1qIu1HXeP3T+1jU6/PN4qq877krD
+tqSr9WCRvi0ZK419Z0BFKX8P032jCPbd6Ze7WuN5QBs7F9clbNE10n3qhGEM4GCgqpF3il7zL+7A
+Nj02qrer05K1m0a4Fp93f26fbt/tYgro6mdtive/BggOjqTvfGXN36GjGbopm8iAI1D2sVnUoMRg
+ZvDnOZI2JRVvXaiI/Yl6OpZxPUB+L26nVG0SKwUD5tdZnPwzr1Md/Xw8mtFrR3Msq59F5VgwAbBa
+/a0bDIjX0Ux5yyyfqDB6ETUpy04BAhxowFbLbduy9tK02o7h5KqftskXdKPUyGw9GyYiEjJP0cJO
+jmXrRN6KZrwVod7Ks9HHFKOwRJfG6292PoUx9FVbme6c55jtlC4QFVx6X+8GSQC9ejjLYDKqoqR1
+PXQfcQNAaDCPrpi65XplBQNmyGVdX2CBn3qYwQARZR95hfz15dH76JglPtZqfsbYdwaM/Nl2+r5E
+/7VEZvPHSmPBABEVXqUFejLXenEvqavDwQBjOrxMqKrhEIIBnWvofgY/3Ui35RgAwDEE3UK0P+zQ
+ZkhjlF4+bIew1zZj5zuddNugIV+7fYcU9QYswN5d76OdW3lvTUrg3PYj7Nn4Clo6lj7r0OwlrDj4
+EwZ4F5uRTd0XsOHo9xhR35hpuaD19D9x5PsoeFrd8qxVZ8C+8RTM7qy3vUz6TSTkEwDr1q+scszH
+evW3JuJ1tK68tg2nYvmi7sV5AQDs0HLGH9g6u762AGz54BfcKAIsse2KtJdb6/GIcin+/dSYfggo
+tVTEujqRuEbgzW0XsGFWV3gbu8CrJ+bv3YYPhndDoN5h83VpHPPyUSLu9ymYfVy7XCsC78ztCncJ
+YNdoKhYN1S4suYOvX/wU50vsRiSFV5cJ6KS34U3wyCEIrugGOBXGdLms6wsepR82Dec2b+PzAc5G
+z1kie+X4mXp4ae6IEn0DAJzavY1tf05CsF7/tURmq7WRbSgmfTxKV7+kn17FYv1lRwxjAhKiCi5Q
+ZKowhILE/3Dq5EmcvhQP+DRAk3bd0bmRu9k7WQh593Hl9GmcOXsFCZLaCG3WFl06hMJdLCNlJmIO
+b8Puk9eRmCtFjTrh6NirJyL8HSp5L2Qr1VmRgGOb/sLB6Aew8Q1Dp8EDEaG/cNNa9SuvHLOxXptb
+jTLraE15Vci4uhdb957FfZsGaNulC9qGeKC8ZeNm2bbV28sY1m9DVdYNHN62B2duJCBLWgMBT3VG
+v17N4S06YbZMl5WXTzkIiVjZyRfPHQGAenj7wn+Y38SuvFSVjzlyVYZtPTI/bAXMlb1S/IyA3NiD
+2Pj3EdzMdkBgm74Y0i0EbmLNYom+H+c2Yp4YOBhgGIZhHm8Kr2Be43C8fx1A4Cycjf4Yze3LTVX5
+VFW5GIZh9HhkNwsZhmEYpiKoUk9j5+EYXP5zHuZpvrsSOGIkwh7xhLuqysUwDGMMDgYYhmGYx5LC
+6z9i3OBlxXuyy9ri9YnheNRz7qoqF8MwjDH4BWKGYRjmsUQid4QTAKl7EFr2fBHfHtmKl4Mf2ccF
+qrxcDMMwxuB3BhiGYRiGYRimmsJPBhiGYRiGYRimmsLBAMMwDMMwDMNUUzgYYBiGYRiGYZhqCgcD
+DMMwDMMwDFNN4WCAYRiGYRiGYaopHAwwDMMwDMMwTDWFgwGGYRiGYRiGqaZwMMAwDMMwDMMw1RQO
+BhiGYRiGYRimmsLBAMMwDMMwDMNUUzgYYBiGYRiGYZhqCgcDDMMwDMMwDFNN4WCAYRiGYRiGYaop
+HAwwDMMwDMMwTDWFgwGGYRiGYRiGqaZwMPCooSwcml4PLu2+wnVF8eH883PxlFtrfBZd+OhkY8xE
+QPzySDSYchx5IldQzgWs+HYL7hVVvLScvSNRp8t6pJFp1+uXnXdsEhp0XIEEoaJSPNw6V2ae4giI
+X94RodNPIb8yss/8Fz1tJJDIbWFrq/1zRatF/0FRfuoqS+7hFxDUeRWSTbTPEmTvwXDfVlhyR2V1
+ubRkbukFzw5q+ZR3VmFsZAuEussR/NYFFFRaqZVNIa6v+gQrLov1RhXurxqMllP3I7NUu+ThzDuN
+IJGEYeE1teUJaXvxTu9QeDk5wNHRHQ37fYh9KXptkn8ec9t2wRcxIpaatQevjF1tnp9R3sbSTjVg
+a2sLG5kEEokEEhtb2NrawXfELmSbkdWj49H4xbLQjhep2Q+3XMupHB1Wqt4VMfhuYAgCIz/E0a1q
+fd85VAE/WCHK158+HAw8aiSuiPxgBZ6Ln4eXV92BEgAU17Fs6rewmfkzpoXaPWoJGStCOeex4rut
+uG8FR+TY5lPsXhGFGpKHX7Y5VEa5j6oulYZLW/xyrwAKhULzl4VTs8Ng+6jlqibIA0bjt8NHsXpC
+EGwetTAVohDXV32K5ZfzYHTukXMCCz9Kxrg3IuFm4DfyznyCN083QZiD/glHhL+0ElfS85D34Aze
+kX6Fka8fLJ6QOzTBlFcdseSd7cZvSri2wiDX7diXZEY0IK+LyQczoFAoUBC7GC09+mBrmgIKRSES
+1vWAi+k5VWketg/TjhduuU+O77REh5Wq95yL+PNME/y07V207Wze+Pyo4WCgCiCp0Rnzfx6G6Nn/
+w99JCtxf9yoWZE7BzzMaw16VjD3z+iLMzxe+tYMQOW0tbhaoo/y6UZuRCQDIx6npoWi/9C5K3UdT
+JmL7nJ5oWLsWvLxD0G/+AaSpAChisXZaBwR6ecHD1R31+32MU1mEvGOT8VS3hVgwqiMa13aGe5OX
+sOGesrTQIulFyxM7LmTizHej0Kx2TdT0CULr4YtwKE2AkLYP7/UMgoe7O9y8m2PC6ttQAKLHS5KB
+HSMDEfmjRh8F5zArNBSzzxcAZuoz6/AEhHZbhHl9AxHw3D7kGBZVcAOrJ7eBn0dN+If1xHt7U9Vl
+GtNP6jUseWkujsT8hnGjv0dMjogO9VHdxY8daiHqjxT14F4Ug4+bBWDs3izknX4bUS/sQEahCfko
+YkqWrQBUGSfw6TNtEebrAo9mk7AxXinaHpVdZ9F2NSZPQnSpuugjZsNibZx9bDLC2r+O/w1sjWB3
+d4SO/Bw/vtkXLeu7w9GvN764lA+AUHD9F4xrFQRfD0806DsfB9NUovrKLc9uTEXEXsvN36hchbj9
+YxSC+qzAHSWQf34eIkImYme6UKY/KFs3uYhf3hmNx72N59rWR21Pb4QN+hTHHgjlyGLC5JAycXzh
+ILSs5wFH7054/3iWug8o72Ldi21RPzAAgYEBCG77In65pnlmk70Hw70DMeSFZzGwRwTqBXYpTod8
+RP84CuG1aiEwpC0mrLwN0+YDKqTs+xADw+vAv44//Br0wJydyaV9rdUowLWfR+Opmt4ICGmHZ0aF
+wrWD9s6iuCx5p99Dr9adMW1PKo692Q2tIlqh+5vHkauXc8bhr/C3xzgMDpSXLDLvLD55/QyGLRoB
+f71TUs+2GDkkArXsJIBdADo87Ye8uCQU6NyLFLV6TED9w19iS6KxNnVFq8Gu2L4/CQLE+6ZZiPQJ
+Q0R9ijFbf3BH1M+abL+VPRboo4jG/CZ1MfFoHiAk4veOMsi7rEGSAOTsfw7+jRfgzN6SPiL59NuI
+GvMtFr6oV26BKXV7gsZTw3Ew3zRbEp3v6KOMw6+TZ+PQ/S2YOuJjnDisGZ+1l1niB82dp4npzxSI
+qRqoUmnbOH/y7f8q9andgP53JIsEEiht62jyD3uDDqSrSMi7Sl938qGOS2LpwZ4RFNjzH8ogIqI8
+OvlKCLVbcoeUJTOlpD+HUJ2I+XQ2R0WKhM00LjCU5lwooPyL8ymqz6d0PkcgUsTS4jZ1aPS+bMo9
+OpF87FrQ/LPZJCiTaNMwP4r4NpaKDMQ1nj5TpLw8UTlyjs+g5l0+oVPpRSQU3KddM0Kp7sQDdHVJ
+a/Ib9g+lqATKObuIBg9fTDGFSoozery0OjN3j6G67ZdQnJIo5+gUCm62gKILzddn5qHx5C1vQJPW
+nKP4PJVBKUUUuySSarZeQGeyVKRM3UOv1gX5TD5GaSL6VSX8Qh0bTqXjuWI6zDYoQ0n3fu5Ivn3+
+pBSBqDB6ATUNnkpHc4iy94+hwC7r6N4FU/KhEmXnHp1IPrbNaP65HBJUybRpqB+1WXqHMo22x2HK
+qdQ6Z4i2q3H7OExZenkaImbDYm2cdXQi1a7Ri365rSBV6r803N2O2nxxlfKFLDo8LZhCZp+lmz+3
+Iqlzb/rpegEJiju0/tnaVGfSIUoUkS+pTLsxIGMz9ZCDYOdADg7qP6fA52lftri9ZpSTv5jechSx
+9GNUMPVftpXmtQ2nl/c+IIHEbTHXRN1IHHvQ0mv5JCju0h+j/CnotROUfGg81eu0km4dK8+mjJC1
+m4a52VOnxTeoUMiny/OfIvdemyidiEiVQ3euJ1C+oOkfv3Yj3+5rKUnQpKvhRgP+SCQVKSluSSvy
+0qQrur2E2nm0pi/+KyAqukO/9XIkabuV6nRERJRPZ15vQCGzz1O+fr9JXE8D63Wlb6/kkkAC5Z6b
+R00CX6CDpbuYHgV06aN25OPuTu76f15P0bQjZdaclHHLKNKzNX1+NZ9IGU9rB7iQTCNn+bJk0pbe
+XhS5JoWEUjnn0rEpdamhQf2IcunMex2o6+fRVJixmXq4hNGC6NJOVcg+QbPDfGjQuoSS40zRDfqi
+qRcN3JZpvEIZu+jl51ZTvEq8bxqOL8W6WEwtPfrQVl3W4n2iZB7iY4WYDxLzs6J9qUR5D2Ms0Cef
+zrwRQuFzL1NB5k56NrAWufmMpF2Z+XT+rVAKnXWOUg18hHa8SI4vLte0uj1J46n+OGiqLZnRPml/
+0NMBo2hfdvH4HHfQcj9o3jztOl0X0Z+RYbIU8vLDBeahIPVEz08W4+nQAdg3bC9Wt3OBBHm4tuko
+ar6wE+3cpZAgDGNebYQPvjuDrHdMyTQXF9ccQ+2JX6OJkxQyp75YdrkTFPZ2sJe/hb+WXcSx7cvx
+6cm9WHslC4FFBNgBjuFjMbKxMyQyWzRsXBOUqyj1yNm+sbH0OSLlKXHsQ2PHgRvv/o2r/9lhUo91
+kACggnxIghNg27QH6syfhdGTz2LIwKH4eVU43OVATnPjxw1xbf0ink6cjX/vDkfoD9vh/eJeBNvm
+45QF+rRvMhWznmkG31LlZOPchmiEz9yE5i5SSBCJyS81xj93xfRTUoumXAPI4NtrAkLe+wVHHvRF
+ww3rUPTML2juBCjNyqc0jo3HYWS4EyRSOYIbeULIzUbMJmPtcQ85AuAkraw6S+FhtF0LcaEMecqs
+mwk2rI9Do6Ho7m8DqbIuwuqEIrxXMOwlMgQ85Q1cVYAgg0+f1zG8vh0kqIN+r4/AnOf+wS5HcfnE
+7cYILm3xy9UjeN5H/2FtHo6J2Wt4WfkX4oaY3mQd8PzS2VjXuC9WjN+P811qQCLaLmp/YIpuvKNe
+x7MN7SGBP/rOGIa3x21DbP9yZNHZlFijhGFIVCBsJXIEtA6D3V/3kaUC3CUqpO6dhwkjjyEVciAr
+Bik23fFACXgDgH0wukd4QgoZPBrUhU2qOp0sehti/Iahf7AdIPdH73Et4Px1+U2Te3Et9t27gXsT
+o7BOAkDIR55tHdzPFgBnsQrYIXzOUSTMKT9/Q/L+24Jov+EYUN8ekPmg+5jmcPqyIrJoKUJKzAO4
+DPcosRQq79wneHlXFH7YGwLbwuvGkyrisGrCSPz99C84+IwPZPrn5J6o616IVXGZUMG15DkAcGuF
+QS6vYH/SCAyE+X2zJPniY+LkuvDQXScT8SkARHyQcT9biGsm2e/DGAtKpEDIwEjkzTyE231uIjp0
+Jv6XsRzbo28jaG8ROn/ZEPaqkj6i9NNJ0/vmkzOe6mOqLVk+xhZjmR8syy+X7kdpOLvLuP5MgYOB
+KoTUqw36hLghsU8L3XpOAoASa84kgCCAStihgILsAhibHwlKARK5TJMFoTA9AanuTlCcnI5Ok26g
+68svYPCYdxBw4TDWa+VwcIVDmeMK4cGeaUbTGy/PR+R4HQgqOeq9sBpHFzSFHQBVZixuZHmjXp0R
+OHp9Ck7+uxGbVj6Pp97qiHWHv0THNh8ZP+5qsDDPJQITuqdixm9r4HcwGFMWBkCOAov0KXNwE9GH
+BFKpRC879ctuWv30EtFveTo0ROoThQmh7+HXfafQdIMUo1Y3gj20zl2dT5fJ5edTKt9S7Uyi7VFT
+d13l1NnZaLsuhLOoPBvMrJshJdtYIpVBqquUFPJy5lUSiRRSmRwSQUS+uB1l2I3plGWv4vmX3Y6F
+2cnIgC1y4+4iRwDcZGXboiW6kQhKCFS+LGUjg61copNBQgQQUPjf5xg9Lw1zj5/EiDq2yNk/Gg2n
+qfT6sBRyrcASKaBJJ4EEEpkNZBK1HqU6f1Q+diFTsXrfm2ho8gsFhbj0YUd0/TQaJbaAkNXGmH/P
+YHEHp3LSa/tYaQnNl0WLFLbONlDmFelNvpVIOr4X16Nj0Dt4KSDkID47Cwef7gbV9l14p4k9oLyP
+Pyf1xDybBdjzVRS8DNuNFMhXALZOtiLrjt3QarALph9IQv9AU/pm2ZTpw/WOG/cpXyD81CtGbd24
+nxVMtN+HMxbo49R4GFomfYbN/+bCoffP6J/yKyb/sxmX0ttiVrgjcKEsH6Eu1+S++QSNpyVTlSG/
+pPgqS/I2LMl8P1h2uaX7kZj+TIPfGajSOCB0YFukLF+GYw8EUN4V/P71ZYQOawk3W0cI8RdwJ5+g
+Sj6AX7bfN7L+1RHhw1rgzg+/4mKugKKk3ZjVdRB+uJmD+wf2o7D/R1j4vxGI9L6DA/9lQakyNdJV
+iKR3EClPLnJcivr9OiPv90+x+a4CQu5/WD6qM17adg8n/9cQIbNuIGTIK/jwyzlon34cVzNzcdro
+cWNhkBOaj++FhHmv4mCTl9HbR2oFfRrijObDG+Hyp8twLocgpB/G0h8vIl9UP2r9kqoIqnKuKYHU
+Gz0mhOLEezPxu+M4PNtA/7VSM/LRlS2GvUh7pOkFmpVR5zyRdrUtU56y61IaiY0lbaxFhcStX2DD
+rUJQ0T1s/nw9hKgh6D6kPH1pk2chLuYW0hTm3E0St1fD2LckZbRj/gV8NmElItYfwOz0dzF9Q4J5
+tiiim+QdX2DdjUJQ0X1s+XIdFF16o55dObJodJJaaN42G0JuKgpcGyLM2xagDJxcsw+JJiw7dwjr
+g9DkXTiVJgCUiXNbrpZYTw/I4FLLARnX7yFfTySnJiPQOvFHfLc/RW1vRam4cOAsUss0Pjs0fvck
+UrKykKX/9yC63EDAMawvQu//gX9jCwFVPHavOqeTs3xZpLB3lSP7XgZKq8QewZH+SD0dp7dbkhz1
+phxEanoi7t+/j/vRq9DDJQxz9+3RBAKJ2DwtCrPz38HO5SMRYCwAKbiDU0neaBPqWjwRMbB3t1aD
+4Lz1AMx5j9g4pvaJfBGfUiBu60b9rCk+Eai0saCsPuLaEiPCLmLRd2loF1kXIT0jEP/NJ7gcPgLN
+ncvWorpcU+sGPFHjqS6NqbZUUf8IWOYHzS1XTH+mwcFAlUYCj55f4uehF/FiWC3UrNsH68K/xK8T
+guDW/GW87LcUbX2DETHiTwQOaYzSQ4wMtYctw+LI7RhSzwueIZMQ+/wyzGzihgZj30Crnf1QP6Ql
+np64HX59gnHs1Vk4nGtEjFLYiaR/C7f6GCvPSUQOe7hELsSa6Xl4r6U3avg+jR/9F2LF8yFoOWMB
+uh0fibqe7vAMfxvKV77CCD9nNDV6vNSDaQCAY+MR6FnLE72mPQ0viTX0aYgcAWN+xpfN/0L/gJqo
+0/YjKIc+jZpSMf3MwlFJA0Q4/YWRQ36AMNLUNpCiZrcJCL91Gj4ThqDku3/iZRnmI3XWlv0Nrons
+BGi8PQL0HiFWRp3nInuS8XYVk8fWhLoY4mhRG+tSI3xoCDYPCYGvT0u8XzgDq95tgVrl6ksNpf+L
+UU174fsb5mxhIW6v5d0UNq43L1xa9AJWNv0a86Pa4OUfJuH+7FexKVFeQX9gA98IP2wbHgLfWk3x
+duY0rPywLbTzEbE2lKX/i1FNeuArsW0pRXBo/gbmtd2BZztHYcDAcfi5MAg1TbhLLg94DsvmuuGz
+Pl3Rp/9ofBNvB9sSg74NgsbMw6Ab4xEaGIwWkw8gB4C01lCsWD8Od2Y0R2DduqhTtzUmrYxBXiVt
+FSgLeA4/LQrCj52DUL/xM9jgVBuOcjmkJsnihIhXJ6LGd10R3qQ5urxxTC/gsUFA/5GodXIjrpiy
+1yCAwitfY/oPV3Bz88to6uUKV1dX1Or8DfTNWHFrC3ZL+uLZRg66Y6Xs3a01Brlsw4HkikYDpvYJ
+B5GxwrEMWzfuZ8v3iUBljQWH7pbRRyQeaPdMfWQUhaNHfQc4PtUf4UWZCBnWuswdbPTHgfjWpvkv
+4MkZT4vr/y1SuphiS6aPsWVhvh80t1wx/Zkmn4SIKsmlMcyjJf/cHLR8JhPLL32Hto6PWhqGeRIR
+EL+8Ezqf+gwXlrSFed1MQPy6GdjU/FNMNX/NyxMMoSivCHJHW0iQh3NzWmFk4e+48FkL2Fc0ayEJ
+G4Z3w8ZxB7Cqn6fJy6TEycGRae3wTqN/sGtqPV53bHWqTh/h8bSyqBptzH2XeQIRkPzHYDSdeB09
+Fu9ABDsuhql65F3Fjviu6D+EA4GS5OH0nG4Yty0HdnIVUHcUfvy1WcUDAQCQ1sLArxbi3s67yCVP
+OFc0GihMwN3gt/HDCxwIVApVoo/weFqpVIk25icDDMMwDMMwDFNt4XcGGIZhGIZhGKaawsEAwzAM
+wzAMw1RTOBhgGIZhGIZhmGoKBwMMwzAMwzAMU03hYIBhGIZhGIZhqikcDFQBKOcCVny7BfeKgLxj
+k9Cg4wokVPhLjRUj9/ALCOq8CrGH1P8mW23PqUJcX/UJVlwW++qNCvdXDUbLqfuRaVBm0Y3PENFo
+Js4VlE4lpO/DzPYBqBMcgqaDl+Gm3kdxKGU1Ij174t/M4mPKO6swNrIFQt3lCH7rAoxk+ZhQnj4t
+Syeq6/zzmNu2C74w9UNRyttY2qkGbG1tYSNTfx5s7p4QAAAgAElEQVRdYmMLW1s7+I7YhSSr2ruA
++OWRaDDlOMS0od/XrEVF8szZOxJ1uqzHnUPi/a3y/YOA+OUdETr9lMlfq6wsDPVh3O+Iy6v1W6b4
+K21ZaY/5fnrm1NkSxGyuPP1VRl9D1h68Mna16fZfjv/JtqJo1YHy29R6vuTBn90Q9PxBlPVdr0qx
+MQMqu3+ZI8OtPWX3OVN0JgYHA1UAyjmPFd9txf1KNOiqQyGur/oUyy/nwag955zAwo+SMe6NSLiZ
+sQd27tnvsVI5B0djruHCXxMRrLdlr8Q9Cj/s+AaRep9olweMxm+Hj2K1CV9zrdqUo09rp3Nogimv
+OmLJO9tNm0TJ62LywQwoFAoUxC5GS48+2JqmgEJRiIR1PeBilswVpzL6WkXydGzzKXaviCrzi6HV
+yT+Yoo/HsawnkfL0Vyl269oKg1y3Y1+SidFAFfM/jztVzRdVNXkqm8r0WRwMPGoUMVjy0lwcifkN
+40Z/jxgFoMo4gU+faYswXxd4NJuEjfFKQMjEme9GoVntmqjpE4TWwxfhUJqAvGOT8VS3hVgwqiMa
+13aGe5OXsOGesnQ5ykRsn9MTDWvXgpd3CPrNP4A0FQBFLNZO64BALy94uLqjfr+PcSqrnFle9k4M
+9vRD1KBe6NG9A5o2G4TPTmepJ5XZezDctxWW3FEBUN89qh3xPeKUQN7p99CrdWdM25OKY292Q6uI
+Vuj+5vESUWzG4a/wt8c4DA4s/QkbidwF3rXcYKPfEfLP4P3ebdBp4r9IPDsf/dpEoMPoXxBbBAD5
+OPdhX7Rp2RxNI6fjcI6pjaJCyr4PMTC8Dvzr+MOvQQ/M2ZkMlbZ+3oEY8sKzGNgjAvUCu+D945q6
+K+9i3YttUT8wAIGBAQhu+yJ+uZYPQIW7S9vAM6Q9Wvp7oNGQcegV7gOv1vNwsaCc8sqgLH0q763C
+kHpt8NGFPAAK3FjcA3W7fYuYQtPawaiuAQBS1OoxAfUPf4ktida5PW2OvZei4AZWT24DP4+a8A/r
+iff2pqr1ZsyuU6+V7Gs5xm1fSNuH93oGwcPdHW7ezTFh9W0oAOMyJUSX6r86FNGY36QuJh7NA4RE
+/N5RBnmXNUgSgJz9z8G/8QKcPfo2ol7YgQyxLvew/AMIBdd/wbhWQfD18ESDvvNxME1lejsY0/eD
+O/ixQy1E/ZGi7h9FMfi4WQDG7s0S9Tt5pw30IeqfROTVYoLc2rLij5qiowzsGBmIyB/vqu2r4Bxm
+hYZi9vkCQJWMPfP6IszPF761gxA5bS1uFqh9X92ozVA/kMzHqemhaL/0LrIOT0Bot0WY1zcQAc/t
+Qym3VHQf/7zRGUG+fqjXdABmTGmGpu9dRmF5vtroeQVu/xiFoD4rcEcJ5J+fh4iQidiZni5eHyMY
+szmt/tJTjfQXQ7vNN66j3BK6+At/liuTK1oNdsX2/UkQADPsWwSRtjNE1CeYa/cm9aeHZ2ui+jNW
+TpZBmxaI1UWkb4rpurw2oByc+6Qr6nVeYGDvptlYKapg/9IhNk/TtpfWPxaVfZ2ozsqCmEeOKuEX
+6thwKh3PJco9OpF8bJvR/HM5JKiSadNQP2qz9A5lHp9Bzbt8QqfSi0gouE+7ZoRS3YmHKfnoRPKx
+a0Hzz2aToEyiTcP8KOLbWCoqWQIl/TmE6kTMp7M5KlIkbKZxgaE050IB5V+cT1F9PqXzOQKRIpYW
+t6lDo/dlU86h8VSv00q6dVD9b5Kgl13WDhrk7ES918STkgRK2zqK/Jt8RFcLiShrNw3ziaDFcUoi
+IsreM4J8W35Ht3UCZdKW3l4UuSaFBDIkl45NqUsNZ5+nfDN1mLmtH9XsYCCnBiF5FXXw6EGbMwzP
+5NOZ1xtQiEF5qsT1NLBeV/r2Si4JJFDuuXnUJPAFOpitqV8NNxrwRyKpSElxS1qRV69NlE5EpMqh
+O9cTKF8gIlLSvV+7kW/3tZQkKOnOklbkO2wHJV96jxp6DaZtydG0qGULWhBdWHZ55ddcRJ8qSts1
+hcKemk5b9r5PrYNH0Pr7ShPSmUDRDfqiqRcN3JZpVjJl3GJq6dGHtuolM9fec0oKQrFLIqlm6wV0
+JktFytQ99GpdkM/kY5QmYtf6fc247WdQ3JLW5DfsH0pRCZRzdhENHr6YYgqJckRkytLLsyT5dOaN
+EAqfe5kKMnfSs4G1yM1nJO3KzKfzb4VS6KxzlLJ/DAV2WUdxB8X728PwD/d/bkVS59700/UCEhR3
+aP2ztanOpEOUaFI7iOvy3s8dybfPn5QiEBVGL6CmwVPpaI7Y9dmUbaCPuAvGrssUl1frt46VL7e2
+rDtHTNERUebuMVS3/RKKUxLlHJ1Cwc0WUHShQGlbR5N/2Bt0IF1FQt5V+rqTD3VcEksP9oygwJ7/
+kNrt5NHJV0Ko3ZI7lHloPHnLG9CkNecoPk9lUIqKEtcPoDrtP6VLuQIpk7bQBB9Q8LuXKKMcX21c
+V9lEilj6MSqY+i/bSvPahtPLex+QIFqf0v1WzOYy9o+hwC6r6exi4/2l2G7FdZRhoAuTZMrYRS8/
+t5riVRrZTGg7ImP+R1yukumVoj7BXLsX8yGG/enh2JqY/m5Rkkg5hXq+yHhdDlKM0b55kOKM5ile
+VvKGrlRvzBY6+mVPqhf5AR3PKD1SmWJjhr6uqvUvfdnE5mlaGW7uG0OBXdbQ1Q3Gr0s3QWdi8BfE
+qyCOjcdhZLgTJFI5ght5QsjNRsymv3H1PztM6rEOEgBUkA9J8D3kCIBj+FiMbOwMicwWDRvXBOUq
+DJZ+5OLimmOoPfFrNHGSQubUF8sud4LC3g728rfw17KLOLZ9OT49uRdrr2QhsIhQ7rflHcPQv703
+ZJDAPaIfAuOW4GLW2wizq0jNi5AS8wAuwz0e6dKd3Itrse/eDdybGIV1EgBCPvJs6+B+tgA4A7AP
+RvcIT0ghg0eDurBJvY8sFeAuUSF17zxMGHkMqZADWTFIsemOB0rAEXK4+XnAwa02PD3S4eHoBl+n
+fJzNViH3TlnlWfrwTgqP7p9h1bB2aN0tG5P3nMXQ2jLrKEjuibruhVgVlwkVXFHRXM21dyedSrJx
+bkM0wmduQnMXKSSIxOSXGuOfu4B9YxG71sP4NVJ4NO+BOvNnYfTksxgycCh+XhUOd3khLpQhk3Hs
+ETIwEnkzD+F2n5uIDp2J/2Usx/bo2wjaW4TOXzaEvYmvXlREX+X7BwCQwafP6xhe3w4S1EG/10dg
+znP/YJfj/9m77/ia7v+B4697s5cEEWLGyBDEjog9S6wWCa0qqjZV2qp+Vbev+mmVNmq0iupAtb72
+3nvUiC1GxQghITu5yb3n90eCJO7KIuT9fDz6eFTO+nzen88597zP+ZxzzGkHw7F07zQY748Xse9+
+F7xWLCOt9yLqOxhpH+scETTYjvrKu4mrwQCpXDKr/2SJqRkxKuH/Fm1uT2Tt9RB85m3E7a3tVLdO
+5sj/9lPmzc00LalGRU1eH+vL56H/EPeR4Ta09RvJhN71cH/iOJvAid8O4zHyR2rZq1DZt2Fo72rs
+NxYLSxOxsvJgwNyJLKvThYWDdnKitQsqg/XRX94n+5wGXWa/KVW/A5X+m3N/gce7RTIXDMWodo5Y
+mFMm58a87DSGnXf60MPMttPPSLmGe1Dq0XwWBo4JQK76fSoXzOyXT6evZbbtE/GL5byh7bz8cClj
++5i+ffN/bEzRt869HPE0sK1ROu6tG0bQ8ju4fzANP6Pjhs1ty6K5f2UwfJ7GQfPm04TnJmbZyTCh
+IkhtVwK7bC2joNNaUvXN39l/9ChHjx7l8L4dbAjtShm1vvmfpEvXobK0QJW5vtSYSO4ma7m/bTT+
+rSawKsKRxq9/xIRmzpjVfRQt6Vrl4crRKQo6BVCBSgVK5iRFl475g0nUWDtakZ6Ulsvx7wXPxnsk
+v+/Yw549e9iz7yjhF1fS1/1hkNVYqjOjpFJnVFaB1LPf0O+zaAavPMyxo0fZPb8bZVTaR7FQW6hR
+qS1Qqy1Qo8ZCpUOrM2d7eZR+n/On7+NcJpmzp+5QYMMqFQ3JGrB2sC6QA0hu+/tjKtRqVZb+mvGA
+IChm9GvD8zg2+ZL94Vv5vGMp/v11ALUavMOeOJ2ZZcrOoU4wDe/8xZq1+7DrHES39hbsXr2GVTEB
+BNe2fyrxMuf4kJNKpUZtYYlKZ06dDcdSXe4lBvucZPGOIyxfoea1Ab7YmtU+xterv7xWWKhMx8O8
+mOrh1IjB7e+x+Jc/CN1dnRE9K2MJGceqbIVSgU73aL/PoCMlPuXRsdDCztng9rTp2mzrU6lVqEzG
+wvj09PgoHmBN4rXrj5NXA/XJbXz07y/Zj+DGYpQtFmaVyZnGrzixcVfGUKG89G9zymW6jrpc93uz
+++VT6mugP36mt2N+XR7um2qV/nU+PG/Qty115aFsODwD17nD+eGC8Ssn5rZlUdy/HtJ/nvbkGZSx
++XITs6wkGSgiFG2akTHittTo2oqkJdNZc12DLvEcP7/WiiEbos080bandnADIuYtJixRR9qdrUxo
++zLzLidwc9dOUrt9ydRxfWjuFsGuc3GPT/KNSTrNH+uuoiGdW1t+J7xiB+o6q8CyFB6O0ZyNTAVS
+uLLrBDHZKqbGtoQl8Tce8OTITluqN6/IvaPXntLbfSxwKmvHg/AbJGepsoNfH/xv/0jozrsZbZJ2
+j5O7jnHPxCB+XeI9Ukp4UdPNGpQHHP5jB7fNGL5qzvaSw2YyYtg0Dj4x/s9QPFM4M/NVJiZM5sCR
+2ZSbGcInB+KyJFnG2sGElAiO3HGjiU+JxwdLbRzXLl4hWlMQaZy5/d2R+iG+nJ4+n+MJCrqYvcz9
+MYxkNEb7dca+ZmieJI6O88J7wiW8e47hi28nERhzkLOx1kbLZHD/LdGQPjXD+Co0mqbNPfDu2Ihb
+3/0fp2v3ob6jvgX0K9zjA4CW2+tnsOJKKkraDdZ8sxzdSz1p39Oc9RqJt9qNDoN9OPTx+yyxH8ir
+ntbG5zd3vXrLG4SHdUHFQx8H6g/qRORnY9ntN4rO5dSAHT49Arj783wO3NehJJ1hyazT+AQ3xNna
+Ht2tk0QkK2ijdrFo400zEnIH/ILrcWn2L5xOUtBGbWfen5fRmIyZkenJJ/l68K80Wr6LiTGTeXtF
+ZGZf0lef3Eo2sL9kRDqj3xqOUYknMjsDZcpxfHFu/DKO63dh7nPE+plbLkN1TMllv89Nv3wafS1v
+ccloU2N10bdv9qJTsL51Nsf/ZUPbUlOqbivq1B3CnHc0TBu5mH/1/Fjlro8Vgf0rsy/fS815vDN0
+npbzhN7YfObFTB9JBooAtaMnjRxW0rfndxhK5JyaT+WPt5P4uKEbLu5t+LHiVBYOMJ1pZrCgfPB8
+fmi+kZ5VXSntPYyrA+bzvp8znm+8R+PNXanh3ZA2QzdSIag6B8ZOYK+pd1PZlsdt2xvUre5Bg0/V
+vLdwLDWtAdvaDP+wMVtDAmjbfQAzrznhmG0ciQONxg7FJbQttf3q0/q9A1keXLWicre+lD38N2dy
++6ZMPZQH2xjdqhGN20/kYMxOxrRuhH+HiRx6tG4rqr3+GS9fGoRPleo0GL6LBEBdthcLlw8kYnx9
+qnh4UMnDn2G/XiTJxHmuXf33+CxgE6+2eonuPQayILUaZcwY72TO9jT/rmHe/JVceKIQ+uKpELtn
+EiGzKzFz4Zt4Vn6Z0AUtWdlvNKvvaI0sZx7NlXVsVXXhVV+7R39TYtbyWt1OzL5UMPcfzOvvllR+
+fQHf1l9Jt8plqBTwJem92lBGbWOwX+9XPdzX5qHrq2+eT4gf9l/aHeyLR+mSlK79H9LHzKRPBQuD
+ZbI2tv+qStG0dw0epNWmQw077Gt1o3ZaLN7B/ma/EaLwjw8A9tTu5c2ant64l2vIp6nj+W1yA8qa
+tV7D8d6bqKZMu8HUvnKUcoN7kvFeAGPzm7te/eV9eK8l//EwEKU6fehYtjSdRrfBVQWgolTHb1nQ
+K4y3apaljEcQy2p/y+LB1XCuP4pRFeYS4F6dRn3+okrPOjiY3IIFFV+dz8x6K+hUqTw+Pf6gZIsa
+lHAqgbfRmBmK1Wi+n/wmv9adxZSXmjBq3jBuThzL/25rDdQnt+yoO17//vK4337P3db6Y6Tv8Kiv
+TE8cX5z9edlpA7ui8pMNGG677OUyVEf7XPb73PXLwu9ruY+LTZZj0S1/Q3XRt282pKLedVanrMk2
+sKHW2HkMujmZcX9HZrsokvs+9uz3LyVmLa/5dWDmE6/nNnSeZpuH+QzHzCCzny4Q4qG4TcrLZZsq
+iyKffCAp37S3lT971VJeXXMv9w+2iqcgXtk7qrbSevYVvQ/pCfEiSzr2H6VmtVHKgSceFi8oOiVm
+15fKuNknlASdoigpF5Tvmnkor23N3cP65ir8+uReUSzTsyBxKAxFYf/SKjeXjlVmX9AUyjbzSh4g
+FkWLuiw9Zk7lxubrJCqlcZR3gBctqZFcr/4f5r1ZNd9XWYV4fuiI+vMV6g4Np8MPm2iUt8c9zKDC
+uVZznKeG4D1dTUl7S8oFzWJRyxIFvJ2nVZ/cKIplehYkDoWnCOxfSWfZdKst3XoWrS8cqRQl5+MV
+QgghhBBCiOJAnhkQQgghhBCimJJkQAghhBBCiGJKkgEhhBBCCCGKKUkGhBBCCCGEKKYkGRBCCCGE
+EKKYkmSgCFASTrLw+3XcSIOkA8PwbLGQyDx8SyVx75tUa/UbUS/0+6F03Pq5BT5vHyHZzCWyxjfr
+/xcGQ+svqDYurPInbO9LpdbLiTbQdwyVOdfl0VwktIc3VZp/QViWz0wXVHxIOsSoql58cCIFYtfS
+0UqFytIKa2srLNS2lG04mIUXDH/fusDKYYCpOOepfQ3EVAghhDCHJANFgJJwgoWh67lZSCeoxV3W
++BZ2rA2tv6C2W1jlt28yna0LXzL7q7h5Lk9CGH/948dPGyaT9YOJhdYujo358VoKGk0aWs01ljTf
+xdi3fuW6gU8yFnb/MBXnPG3fQEyFEEIIc0gy8KxpLjJnyCfsu/gLA/vN5qIGtA8OMb13ADXdnShV
+bxh/30oHXSz/hL5GvfJlKFOuGv4hX7En2sAlS81Vlo5uRhVXV0qVKEmNrtM4EqeQuHcwPu2+4rMu
+VajcfwcJaTdZ/V4rqrlXoGrd7owfUY+6H58m1ZxtGVg2Juc2DK3LwN+TDgynVrup/Pe1FtQp70hJ
+vyGsuJGeo4IKKeGLGNi4Gu6lSuPZZQq7o7UkbO+Lx0triAUgmSNv+xD43TZCH8Y3eCzjBmSJdXIU
+2z7rQs0K7riXr0bz0Uu5nIJ5ZdAX43sXnmjLAm3jnOvRW34dt35uRZ2B/6F/QA3Kl3aj5svTOXA/
+e/vl7AtRR//DS29u4oHGQJ9AT5n/Pau/vgBaPWVLuMbi4RPZc3MdI/tM41SKgXoVxD6gj2VZmvft
+gPPVzfz0ShWa/3g94zPtKceZ4OPDxMNh5pVDX93M7DdJmXG+tV/PvFdyxDNFf32ztd2ri5idNaZx
++vd90m+zcVJHvMqXxdXNm65TdhGtJf8xFUII8fx71p9AFoqijVyktPAaqRxMVJTE/UOVctb1lCnH
+ExSdNkr5X68KSpO5EUrswfFK/db/pxyJSVN0KTeVLeN9FI+he5WELOtJ2DNIqdryV+XaySnKS0HT
+lRMJOkXRXFV+aFJJ6bcjXknYM0hxs/RUhv1xXLmVlKbcXt5dqRQ4XTmVqFPS76xTBpdDqT75lBJt
+cltaw8tm24ZWSTCwrjsG/h61f6hSzqaBMuVYvKJLv6P8L7iC0uj7q0palm3fXNBYUTt2Vn4KT1F0
+mghl+avllUrD9iiR2/ooVTquVh4oiqIoScrhMd5K0zkRiiZLfB/HWqdEr++nVKz5nrIrRqvoks4q
+s1qWU1rMuarEmiyDoiSH6Y9x1rYsjDY2Xf7LyrUFjRWVfQdl7oVkRae5rvz5WkWl2juHlKxFSsjR
+TvE7X1eqtP5DObNMf7vGGCizRm99Dcc2LfpPpU3l15Qd8YUTHyXxoDLSw1OZcDxZUR6sUTq4NFZ+
+vJGeUaqU68qaER6KU4sflVMbX1c8Auco19IVJWH/CKV6vf8q51PNKcc1JSof/SYjzsuUiH36503N
+sn2D+06OtlOyxFR/v4xV7vzVU6nUaIpyLEGraCLXKAOr+CiTTqYY3Ea2mAohhHihWT7rZEQ8yb7O
+QPrWdkCltqS6b2l0ifFc/N8qzp6zYViHZagAJSUZVfUbJOjAIcf9Hds6H7JyfhgHNv7M9MPbWXom
+jippCliCrd9IJvSuh7tlHJt+O4zHyB+pZa9CZd+Gob2rsZ9ULpncVgIn9C6buf1H20jlpN51XebE
+34a3YV/7DfrWcURlYY1XnTIoiRqyD7G2oFzQu4TUsEFFJbq+24dJ/TdxtUduopzMhf/tp8ybm2la
+Uo2Kmrw+1pfPQ/8hrq7pMhiM8VNqY6Plf90Ct5fe5VUvW1RUpMv4YP4zcANXNf7Uss5Sh0ftBAmA
+qXZ9sswa9F9DNlK2kKcVn0wJRxhSzYm3LRS0aWpcG73B3N/6U7uMJ21uT2Tt9RB85m3E7a3tVLd+
+cvEnyxHLuXz0m2zrNjqv8f0wa9tlfXhGf79MIOyPA5QfOgs/BzUWDl2Yf7olGlu4NDkPMRVCCPFC
+kWSgCFLblcAu2w+xgk5rSdU3f2f/f+tiA2hjr3Ipzo0yT/xgK9zfNpp2wy/RdtSbvPL6R1Q+uZfl
+mVMt7JwfrVubroUsY5dVahUqM7elf9mc2zC0rhIkfP9f/du4sV1P/Y1TqdSoLaywyDYOW0dKfIqB
+k9WHkSJbHUAFOh2Koq8Nsi95f9toWg7TH2Nz5K+NTZc/219ValS6dHQ5/p61LzxkrF1z0y6Gy2be
+QwkFER8g45mB0wd4q4JFjgmNGNz+HuN/+YMKu6szYmplLOGJ/qKvznnvN6bXnXUrBut7bZPetjPW
+L3XpOlSWFpnFVkiNieReyUp5i6kQQogXihzyiwhFm4aBZxoBW2p0bUXSkumsua5Bl3iOn19rxZAN
+0XpOdjXc3LWT1G5fMnVcH5q7RbDrXBzp2pzXJx3wC67Hpdm/cDpJQRu1nXl/XkZj1rYMLWtuuROp
+ZnZ99NFye/0MVlxJRUm7wZpvlqN7KYiqDvbobp0kIllBG7WLRRtv8vA5zKzxzfh/O3x6BHD35/kc
+uK9DSTrDklmn8QluSAmT56vGY2yoLQuqjU2XX0vUphksu5SKknaTdd8uQ9O6M1VtTNXL3HY1Va+8
+xbbg9gFzOFB/UCciPxvLbr9RdC73+FBovBz56Tfmydh+XuprqF/aUTu4ARHzFhOWqCPtzlYmtH2Z
+eZfVBRxTIYQQzyNJBooAtaMnjRxW0rfnd1wwcObl1Hwqf7ydxMcN3XBxb8OPFaeycEBlPbd2bPB8
+4z0ab+5KDe+GtBm6kQpB1TkwdgJ7E7POZ0HFV+czs94KOlUqj0+PPyjZogYlnGxwNrktw8vm7FCG
+yl3S7ProY0/tXt6s6emNe7mGfJo6nt8mN8C1/ihGVZhLgHt1GvX5iyo96+CQI77h1g///3vutv6W
+Bb3CeKtmWcp4BLGs9rcsHlwNK5PbNxzj/Sr9bVlQbfx4PcbKb4V7owpsCPHGvWxd/hM7ml+/CMDR
+ZL3Mb1fj9VJRqmPuYluw+4B57Ov0oWPZ0nQa3QZXlbnlyH3dciPr9m/557a+hvrlh1wJms8PzTfS
+s6orpb2HcXXAfN73sy3wmAohhHj+qBQl56ACUTwo3N/9X7443ZUvRtTFQXOR79u9xMHPTvJbuxKF
+uKwoXDpu/dySVke+5uScAOxztWzxatfk45No2DuWn0+FEpC7QAkhhBAvDLkAVGypcK7VHOepIXhP
+V1PS3pJyQbNY1NKck778LCuKruLSrjqi/nyFukPD6fDDJhpJIiCEEKIYkzsDQgghhBBCFFPyzIAQ
+QgghhBDFlCQDQgghhBBCFFOSDAghhBBCCFFMSTIghBBCCCFEMSXJgBBCCCGEEMWUJANCCCGEEEIU
+U5IMCCGEEEIIUUxJMiCEEEIIIUQxJcmAEEIIIYQQxZQkA0IIIYQQQhRTkgwIIYQQQghRTEkyIIQQ
+QgghRDElyYAQQgghhBDFlOWzLkBxoygKt2/f5v79+2g0mmddHCGEEEII8QJRqVRYWVnh5uaGq6ur
+yfklGXjKIiMjSUxMpGLFilhbWz/r4gghhBBCiBeIoihoNBoiIyNRqVSULl3a6PwyTOgpi4mJwd3d
+XRIBIYQQQghR4FQqFTY2NpQrV467d++anF+SgacsPT0dS0u5ISOEEEIIIQqPra0tKSkpJueTZEAI
+IYQQQohiSpIBIYQQQgghiilJBoQQQgghhCimJBkQQgghhBCimJJkoIhJiZpO1wVNGXo5BuVZF0YI
+IYQQQrzQinYykHST8XWm4WT3GSrVZ9g4fUXV/leJz/eKtZz7YjZWJWfTrO8RLmZ++0sXfZWPOofi
+6jAFe/tpeHXdzY67j0/JtTfP8larOZSz+oZhB9LyXQp9dOnXCLt3ggsp6c82GVCi2HH0Mz4/doAY
+yUqEEEIIIV5IRTsZsK/AjFMfELU3AA+3OmyKnMjVJVVxKpCVq6nUtwdbljbG69Er/62oPaQnZ2Im
+kXR/KB+pD9L33WuPkg+LCr78tCWYMTUL79Wg9uVDifggmV213J5t4+jusPPoV0w9LsmAEEIIIcSL
+qmgnA8bEXyXEbSY93/yLHh3mU7XKIj49mJqvq+nq0hXp27M8ZW0AG2eatXEi6VoCKWauNPnUYcaM
+2s+huDxsXHuBaT/aYTct479WZ6LQAZDE4c1VcZodSJ2ZpfFdPoC2s+ywnzucTck6UqIm4T3Njqp/
+9qbxt3bYfe3La8fPk5xRIsLPjKbl9w7YTbPDfeEg5kXFZ8ZIy43jTbGb5k6ntT2o/Y0ddtOq8lZE
+HJFhzbH72p//iwXuTaLOdDvsQt/hiKmbIRmPw0AAACAASURBVPE76FfJiz7D3iC4SyA+Xh358nBc
+xvbid9DPoxnzr2sBSNjZn6qBc4lI13LjxxaU92tN0+ru1O8zhO4Nq1Ch+RROpQDxWwkpX41uId3p
+EtQG/yYhzDyWsc6EfaPwqjOJk5mv0FXur6dv1QC+DTdRUO0VFq72p/I3GbF2+r4Fo8/9S8YNojiO
+HelLvZkZ00rMasTQ8Idtkde6G66DEEIIIcSz9PwmAwBpKWiDOrFyyxB2fZjG7M/+5UEBrVpJiOSn
+H5NoP8qDUirzltFcPc/sH85yPikPp3kWHrzZ8yDbg7rh/MREHek6H0Y19uLq1UPUbTmW6rGrWH4v
++dEct6Mr8mHIX0xwvcbKreNZk6Qj9e43BK9dwNnSE/ip+xRaxC/lnT8/5nC2c+UHHIzx5eOQfewM
+ns4rJWxw9VrIof6/0N8BKDGSZW/s52Df96llZUY90uPQdfqaZev2sOX9JOZO2UWsyYUUbOp8xJrV
+I9Hui2P0pi2M161lY0Tm+K20OCyC57N6/XY2fW5P6PA5XNCAY6OR9Lf6kzlH4gEddzaGcshnDH2r
+mSqompLug5jZezeHBq3jk5JhLNj0BYc0oCRvZuKOVdyuPIfdgw+xofNQ/O3MvBNkrO4G6iCEEEII
+8Sw935/CtS1F+0Z2qFFRytMFq3txxGmhpEU+16uJ5bfBK1jV5mV293bE3NU5d38DXZ4v99pQpnRd
+HFLdsNYzVeVQn0B3DU6qmwRUCeSs6geiNVqwyZju6TecrhW8aNqgPv+34Qx7HiTQJHINFyjLsDYT
+6OduQePYX1izaxMbH6TSpMzDpi9JSNuPCKnoADTI/JsnfmVTcLcErCrgW7Y+NcxNG22q0qZBKdRY
+ULJGFazu3SJOCy5GF7LEuXwpbJ3dKVXqPqXsnCnnkMKJhIy7CNh5ExRQBgtUuDQMonLEfE7HT8Cn
+tDcDRlSkzfc7iWlSh79/uEizSV0oZ6LBFG0cUXeXMWP/RCJSNJlX6MP5N01HC5vqNHKBPeETGKrp
+yks1ejPYo6R5WbOxuhusg5mZphBCCCFEIXi+7wygwlKdeTKlUoEC+R57kR7PX8OW8JlVOzbOrI5r
+kYmQKqOOmf+LoqDLUtn09DQUFNLS0w0vnyl7iOxxsSrISqqztIkaFCVjg5nFVzI3rujSsw29UVuo
+UaktUKstUKPGQqVD+3AGRUu6NnNBXTo6RclMuiyp+Mo7NDgeyl+7FvNzTHfebumC8dNrLTfOjmTs
+qUNY1/qJHW8eZLG3C6AlXVHAsj5fDjjBqk7jaGt7kV+29SJgzUqizOpXBuputA5CCCGEEM9OkTnV
+LRLSE1gzegkTk1uy+efaVDZnWEwWyWEHGTFsHwfz8MyAkh7JhbunOBNzjzQgMfYsp6POcDXV0Ml9
+dlePf8K3YaF8fPAkqGvRwsWRcpW74sUdftv1Nb+fC+XjIxfAoSOdXGxMr1Blh4sVEH+QXbfCOB1z
+m2STCxlhWYoqjjGcv50KpHB1Txj3TQ7Ez5R8luUb/kVDOpHblnGpQjvqlMg46Va7tuWdjleYNGgO
+qv5DqWdvamUKOl06YItHxVY0LBHLkcgsg8vSz7HkxB7SXLswJPBD+jlDUsxFYswtax7qIIQQQgjx
+rBTtYUJJNxnf5Fd+vJRCQgq85H6R8i/3IazA3iiUXeqZQ7w97y7X7NdR13UdAHb127JvaxNqmJEY
+aP69yLz5GgI+CyQglyd6yVFT6LBkAXcz//1gT2ea7AH/rueYZsbylaqUYO32ifyTXo2e7WfQzV6N
+jf27LA+6xZs7pjJ4tUIJ19583fVzmpiT5Kir8EqTvvyyZSmjf1sDDsPYPWwmjXOZID1i68uQ9xvS
+o18rOtX1prKzEw5mDz1yx23HYPy/v0GMyo9xi0fh82gslSMN3+qJy+8HGPpaDb1DrLKzpJLv//HB
+hTeYvqoK7k71qG+TNTlK4/a1mUzcfZkHOrB0CGBUuzeokd+hZ0brIIQQQgjxbKgURSmGgxW0nPti
+Pl1udeP0nIqYvJicleYeUxr9RsS8kcxrmvsz4xMnTuDj45Pr5QxJiZpE3YUzcGl/nn0NqxTx7C4P
+4rcSUvtLuh/ezutl9WUPGsJntaHDrvc4suIVyhTFe10m6yCEEEIIUfDOnz9PvXr1jM5TTM9MVFi5
+2KPbtIa2wYfNfquL9uYZBrVdwaIH9pS2kSEez1zCHkYH1KTdj5WY9FXnopkICCGEEEIUYcX0zsCz
+U9B3BoQQQgghhNBH7gwIIYQQQgghDJJkoIhJiZpO1wVNGXo5Rr5QK4QQQgghCpUkA0WMLv0aYfdO
+cCEl/dkmA0oUO45+xufHDhDzVAqSyqWlM/jlTJLZS6RfX8rgtgHUdXfEd3IYKQWwPXPWqdxbRtvy
+XVmf9dPK8TvoV7UlC29pc1WKwlAYcSlKTNev6NdBCCGEKCokGShi7MuHEvFBMrtquT3bxtHdYefR
+r5h6/OkmA4vPJpudBFlW6suC7TtZPKAquX+vk/7tmbNOlUsHQtd+QzPHXG/0qSiMuBQlputX9Osg
+hBBCFBWSDBQV2gtM+9EOu2kZ/7U6E5X5hd4kDm+uitPsQOrMLI3v8gG0nWWH/dzhbErWkRI1Ce9p
+dlT9szeNv7XD7mtfXjt+PvMDYcmEnxlNy+8dsJtmh/vCQcyLis88QdJy43hT7Ka502ltD2p/Y4fd
+tKq8FRFHZFhz7L725/9igXuTqDPdDrvQdziSZrQC3PixBeX9WtO0ujv1+wyhe8MqVGg+hVMpQPoN
+VoxoSS0vT7y8PPFtOYIlFzNKmXTsc7o378i4HdEcmtSZZoHNCJp0mETIeC1n+Wp0C+lOl6A2+DcJ
+YeaxODNO8tK4ueo92np7UK26BzUCh/FreIrp7RmVzMmpr9AisAn+bd9lX0LOEESxccJLNKjpQcVa
+r/DNkVjT5TRSv4R9o/CqM4mTmZe+lfvr6Vs1gG/D04pWXLRXWLjan8rfZPRdp+9bMPrcv2S8pCuO
+Y0f6Um9mxrQSsxoxNDwKk99wy2P98t62QgghRPEkyUBRYeHBmz0Psj2oG85PTNSRrvNhVGMvrl49
+RN2WY6keu4rl9x5/E/h2dEU+DPmLCa7XWLl1PGuSdKTe/YbgtQs4W3oCP3WfQov4pbzz58ccznZS
+/4CDMb58HLKPncHTeaWEDa5eCznU/xf6OwAlRrLsjf0c7Ps+tUxeZlawqfMRa1aPRLsvjtGbtjBe
+t5aNERpQlyTgvT/550I4Fy+eZ+uwa3w8bh13FbBv8DGr927m2zalaTJlA/v272P9FH8cHq42LQ6L
+4PmsXr+dTZ/bEzp8jsnXwSrR63l75GYCl5zi8uVzrA4+xftDlxCRbsb2DLKj7ocr2b3+S/z1fZwi
+NZLbgXM5cO4yRz6xYNbQWZxJNblSg/VzbDSS/lZ/MudIPKDjzsZQDvmMoW81qyIWFzUl3Qcxs/du
+Dg1axyclw1iw6QsOaUBJ3szEHau4XXkOuwcfYkPnofjbmfk1jDzUL+91EEIIIYonSQaKDBvKlK5L
+3dJuer+iq3KoT6B7dZxU7gRUCaS8KpFozePx6Z5+w+laoRPDG9QH3Rn2PEjgdsQaLlCW19tMoF/N
+0Xzp7w0Jm9j4IOsZaklC2n5ESKUGNKnWmy4uNljZeuJX1gd3S8C6Ar5l61PXtYIZH2ezxLl8KWyd
+3SlVqgKl7Jwp55BCVIIW0BK9cwq9mzUhILAFPb86yr3If7mfbkZo7LwJCiiDBSpcGgZROWILp+ON
+XyNOvryN086d6eXnhAo7PLv2osKFTZwvzGHktjUJ7uyBDRaUa9cXr1vbCXtg8hq44frZeDNgREW2
+fL+TGE0Ef/9wkWZju1DOwsRyRhRGXBRtHFF3l/Gf/7UnYGEXJt/UQGo4/6bpUFlVp5ELxIZPYOjW
+mayJq0grt5LmHXjyUD8hhBBC5I4kA88NFahUj/4XRUGXZdBEenoaCgpp6YbOrh9/JC376ZQ9LlYF
+1w3UFmpUagvUagvUqLFQ6dDqIPXcLAZOiWbg8j0c3L+frbODcFVpMesrF4qWdG3mjLp0dIqCzpzl
+VAb/8RRkbE+lMmO7ButnScVX3qHB8VD+2rWYn2O683ZLl8c1KRJx0XLj7EjGnjqEda2f2PHmQRZ7
+uwBa0hUFLOvz5YATrOo0jra2F/llWy8C1qwkqjDbXQghhBBmk2SgiFDSI7lw9xRnYu6RBiTGnuV0
+1Bmupppz6RyuHv+Eb8NC+fjgSVDXooWLI+Uqd8WLO/y262t+PxfKx0cugENHOrnYmF6hyg4XKyD+
+ILtuhXE65jbJJhcyTJcUTUoJT7zLWIMSy9Hlu7iTrWpqbJ0sSbj5gCdqnHyW5Rv+RUM6kduWcalC
+O+qUeHgSa4FjWTseXLpJSpYTRbtq7aj9YCMrzySgkMqVDX9z07Mj3o9ubxjZnoF1mpRyjr82R6BB
+S9TOZYRXaI+f8+OT7eRTobw9+hsOx+VYqZH6qV3b8k7HK0waNAdV/6HUszdvucKJiz4KOl06YItH
+xVY0LBHLkcgHjyenn2PJiT2kuXZhSOCH9HOGpJiLxJhxwyQv9ctbHYQQQojiS5KBIiI5agodfvan
+5bpVPADO7OlMk4WNGHgpxvTDlkClKiVYu30iy5Kq0bP9DLrZq7Ep8y7LgwbiFTWVwav/ww773nwd
+/DlNzHnFjLoKrzTpi4+yhtG/NaHx719x2ugDxMbZ1X2Hyf5bGNChG72Dh7AotSqu2crhQIPRg3Ge
+04mGjQPoOPHQ44c+bdxx2zEYf19vAr5UM27+KHwejaWyouqrk+l+ZSh+Xr4EjNlDAqByDWLW7Pbs
+fc0Pb29fuvzuw1fz+1PF0oztGVinEruDcR0CaRb0EYfv72b8S4E07/IRRx4OsbEuR+kdb9G0tjeN
+J2sYM28MvlnyLs21dfy0YBUXk3OcvRqtnyMN3+qJS6IXQ1+rkX0I2VOPiz6WVPL9Pz6o7My2VVVw
+X/ABJ62zJptp3L42k7d+b0L9hb2Zmx7AqNZvUMPC4ArzVb+81UEIIYQovlSKYtZADVFATpw4gY+P
+T4GtLyVqEnUXzsCl/Xn2NayCmY9mPj/itxJS+0u6H97O62VfwNzVZP00hM9qQ4dd73FkxSuUUZu7
+3HPuRa+fEEII8RScP3+eevXqGZ3nhTt3FOKFkbCH0e0HsjqhMZP/7vw4ERBCCCGEKCByZ+ApK+g7
+A0IIIYQQQuhjzp0BudYohBBCCCFEMSXJgBBCCCGEEMWUJANPmaWlJekGvwUghBBCCCFE/qWkpGBr
+a2tyPkkGnrJSpUoRGRlJWlo+3tMphBBCCCGEAampqdy+fZsyZcqYnFceIH7KFEXh9u3bREdHyx0C
+IYQQQghR4KysrChbtiyurq4m55VkQAghhBBCiGJKhgkJIYQQQghRTEkyIIQQQgghRDElyYAQQggh
+hBDFlCQDQgghhBBCFFOSDAghhBBCCFFMSTIghBBCCCFEMSXJgBBCCCGEEMWUJANCCCGEEEIUU5IM
+CCGEEEIIUUxZPusCFDeKonD79m3u37+PRqN51sURQgghhBAvEJVKhZWVFW5ubri6upqcX5KBpywy
+MpLExEQqVqyItbX1sy6OEEIIIYR4gSiKgkajITIyEpVKRenSpY3OL8OEnrKYmBjc3d0lERBCCCGE
+EAVOpVJhY2NDuXLluHv3rsn5JRl4ytLT07G0lBsyQgghhBCi8Nja2pKSkmJyPkkGhBBCCCGEKKYk
+GRBCCCGEEKKYkmRACCGEEEKIYkqSASGEEEIIIYopSQaEEEIIIYQopop2MpB0k/F1puFk9xkq1WfY
+OH1F1f5Xic/3irWc+2I2ViVn06zvES4+8e2vNP75aDYq1WymXtA+XurmWd5qNYdyVt8w7EBavkth
+TOyG7pRvs5S7ijlzp3Jp6Qx+OZNUqGV6FtKvL2Vw2wDqujviOzkM08/E596Tsc5rPE0tp+XW0hCa
+jt1NrFntmh/5KUsSx/fXx25WPabfz7lzGJiWHMYXLTvyXXjhfEjP3P3BnP6i3FtG2/JdWR+buzIY
+Xu7p739KxEKOd5tOYurjv2nDpnI0aDoJBncSDclrF3AnPNnsdRYK5S5LW5ej81rDDZDXNioakjl+
+qAcNf3TGblZp+kTk/xerKCm8Y7KG81Pr4eRenzb95/HoUJJ2hHd+ssNuthtuc9xwm1uXsbcSHy2l
+vfU3Izo0popTVUYfyrEPFvJxSYgXRdFOBuwrMOPUB0TtDcDDrQ6bIidydUlVnApk5Woq9e3BlqWN
+8crxyv+kf/bxwdGy1LTL/neLCr78tCWYMTWL2qtBM05GFp9NptDPMZ8yy0p9WbB9J4sHVMXqqW01
+r/E0sVzCEaZ/dZf+7wTirCqQghZKWZKiZjApqjY+lk8W0uA0u9oMHW3P/E83E/MMO6E5/UXl0oHQ
+td/QzDF36za8XNHY/yxqDaBC6fVE7Io2UI6MZOB2eGGf7edfXtuoaLCkXKW3mfPKWt5zsX3WhSlw
+hXtMtqRi8DzWLRmGZ9bfZQtfJve9QdSIKKKGn2RWeYfHk8r3ZM66XxnpY/Pk6orIcUmIoq5oJwPG
+xF8lxG0mPd/8ix4d5lO1yiI+PZia/x/jpEj+791Igr+qTcVcnvMnnzrMmFH7ORSXlw0nc+HnATSs
+XBkvv5YM/+Ma6Q8npd9gxYiW1PLyxMvLE9+WI1hyMePqXtKxz+nevCPjdkRzaFJnmgU2I2jSYRJN
+LFco4nfQr5IXfYa9QXCXQHy8OvLl4biMNonfSkj5anQL6U6XoDb4Nwlh5rG4fLZXGjdXvUdbbw+q
+VfegRuAwfg3PvE5ltO6GY20snuk3l9LHpwVfhSUBGi7P64J35x8ITzXRDpli93/PmpL96VHZ3I6V
+wsVFA2lQsRKefq14dUBd3B5dHddyb9dUghvWoHqN6lSr3YVPtt5Fa6IOJsuSdoIZu4/Ts3kwFXIe
+HYxNQ41bu4FU3/89G+7ozKyfMUb2ByN1N7XOk1NfoUVgE/zbvsu+hMfru/FjC8r7taZpdXfq9xlC
+94ZVqNB8CqdSjC1nKtYmynl/Je3LBvDR5Fdp17IZ/vUCGLM3y8qNsbDHurQTZM3JLNwpM7oZiXOW
+kZTjQqj2zPecDhnA5QMPiP9mEMd7B3Pqm5PZY6ZvnXmm5c76CbSvWQNv35p412zJB4cTn5wt/Tbr
+xgXg99oiMm5YGI61yWOIwXgaOE4kHeZt3wBmXs56l1dH1LKXqBaygVhz6/AEK9zLt8O/ZGlsCirp
+j99BP49mzL+e0WIJO/tTNXAuEelgPNbG+6Au9gihAwLwrOqJt48vzUev4Y4WwzEzKa8xKywFfVwS
+4sX0/CYDAGkpaIM6sXLLEHZ9mMbsz/7lQf5WyLFpG9jTvSODquY+NJqr55n9w1nOJ+X+FDc94leG
+fXSFgVvCuXjsd7rF3iDx4WrUJQl470/+uRDOxYvn2TrsGh+PW8ddBewbfMzqvZv5tk1pmkzZwL79
++1g/xR8HE8sZlsqZaa3xcHfHPet/FRsy/qAZB/X0OHSdvmbZuj1seT+JuVN28ehOf1ocFsHzWb1+
+O5s+tyd0+Bwu5OPurRK9nrdHbiZwySkuXz7H6uBTvD90ScYPpJG6G4u1sXhaVghh7g8NWDboYzbu
+ms7A70ox9edheNqYaAcAkriw7h/smzeitIV59dNe/50Rky4zcGs44ceX0SspkoddSxe1ipHDd9P2
+1zAuXbrE6d+bsWHUZA4m5KcsSZz4Zzz7qk/lDaecCYuxaRlUzn60dzvDqjAzT2iNMNZGxupunB11
+P1zJ7vVf4m+fc5qCTZ2PWLN6JNp9cYzetIXxurVsjNAYXc5YrM0qZ8JZ9rl9zNrd+zh8bCf/bWxp
+1v6nqtCHWouG45DtrqYKy4ZvUd7qL64fzD6+xqLWGGovX0z1pi44vbuQ+iv+pM67dcna/PrXmUdp
+11j22TJcZxzl/NlznD+2gjHe2a+SK5prLB/WmU+SJ7Bu0UA87cB4G2H6GPJEPB0MHyesvXjJ9x47
+zsSjS4kk7GQEiUoyF7dcw6OzH44G65DPY2RBMxJro31QecC2d0KYZfsRuy+Gc+HcPywdWhM7lYlj
+a57Kks+Yac/yzV/VcZ9bnaYbZnIg1fwT+4I8Lgnxoipq411yx7YU7RvZoUZFKU8XrO7FEaeFkmae
+bOWUdHwfo7bUYN720linRud6eefub6DL46XupAubCa/Qi6BqNmBZgZf618Mx9OFULdE7pzC8/yHu
+YQHxl7hn1Zb76VDG6H3avCxnQ60PdvLvB3mrBzZVadOgFGosKFmjClb3bhGnBRcAO2+CAspggQqX
+hkFUjpjP6fgJ+JTO2+Wz5MvbOO3cmY/8nFABnl17UWH6Js4nDaGyo+G62xiNtTFqSradyqKerWne
+OZ4hGw7ysru5nS2Nu+EPcOxVyuxb60nnN3KhQm+6VLcFi7K0fbUuDt9lTEs8tZxdNy9zY1Q3VqgA
+XTJJ1hW5laADR1OJrP6yJN2dwdiIDoT29MJaeyl7WYxMe8SyNFVcNPwREYeWEuRxN8zYnpE2yl/d
+DbHEuXwpbJ3dKVXqPqXsnCnnkMKJBNP3Gwwxq5wO9Rgc7I0dgNoWJxvyt/9ZVqHsSD9uzl5FcuAb
+2D2rI7ylKw2au/DV5KG8d6INAc07E9TcLcsMiRwY35ZdcU346XQPKpmbgJg6huiJZ9JJQ8eJN6nb
+qRJXtoVz12E2Qf1i+e6frwg/7UTrca5YWKYaqINF/tqooBmJtdE+qD7P//Y6ELyiPe5WAHZU9KsB
+QJKxY2uJvJQlHzGzrMl7ITeYVqI0lqlhLNjUmd7banIm6KWM3xWTyxfccUmIF9XznQygwlKd+SOg
+UoECeR93ouPOwauEn4+mc/WjoNNwKz6V3W1+QbuxPx/5FW6oVKhQWVhiocr4l9rS4tHd+tRzsxg4
+JZpJu/fQu6I1CbsHUmecFsVEXfO2XCqnp7aj07cXyXbR3sKdV//ez6ymDoYWzKTO0iZqUJTHbaJo
+Sdc+vLybjk5R8pw8PaLS/w9jdTcWa5PSH3DhzH1KuKZx7vQd0lo5o2ekqh5qrB0t0San5bKLqjLL
+9mQJrb2GsXjTu3jmeuCuvrKkExW5k0sx4fRY9CMoiURq4tj7V2e0L6+ir8Fp65jomnnFV9GQrAFr
+B+snbzmmJXA/ToutizN2Zvwam2qjvNfdMLWFGpXaArXaAjVqLFQ6tPkcWWCynBZ2ONtljVZ+9z8V
+ls1GUHbmaG4c74ln4wIadJ/L9kNVguZfH+HkmX3s3LmRXwf6892YvWx7x4uM835LKvZbyvcW/2HQ
+0EUE/j2YquYkBKaOIU/E82F59P3DgjJNWmKzeCfb1A/o+Yaaddv3EplQj/942IDKxkAdqnAxX21E
+HuKZ+ROXWVdFl86jrmkk1mCkDyYBKIaPRwaOrcbLaags+YiZypGKJTL7sY0ffesG8snOg9zQvoSL
+ObEzdlwSQgDP+zChAqWm6ohB3It5j5s3x3PzfE86OLnyyY43zE4EksMOMmLYPg7m4ZkBO5+X8I7a
+zj8xOlBiObHh3KMx3rqkaFJKeOJdxhqUWI4u38WdbLdr1dg6WZJw8wFZ/2x6OX1sqP3hXm5ERRGV
+9b/Ik+b9yBmTfJblG/5FQzqR25ZxqUI76pR4/COTfCqUt0d/w+G4nD9PFjiWtePBpZukZJlkV60d
+tR9sZOWZBBRSubLhb256dsTb3njdjcU6g/54QgpnQ9/go8QP2bVvFmVDX+eLQ1mfezC0HIAt1QIr
+cu+fCLPfvmHv0wnvm3+x/t9U0EayfenJR+V0qBNM49s/M3f3vYzxv2nRhO05QfSjC9m5LYslHn5b
+uTn8GlfeusKVNxbSztqHSb02MNHV0ci0LEM/Uq5zNKoMjb2dnjh1SDj4Nr4VWxF62bxxYcbayHTd
+9feXwqM/1qbLqU8B7H/WNSg/rBr3Z28kNdu21KgdLNHdyf2zOrltP3QJRIRH41irDcGjP+XzfuW4
+fvJ2lhNBGyo3bUzb//zC+MRPef3bMMx6F5OJY4g+xo4TNlXbUvfOT3x3qhFvDOxI5HezuFCtMzXt
+jdUh/22U63halqKKYwznb6cCKVzdE8b9h9mAkVgb7YN2PrzcLJEVc7ZlHhs13A2/RrzOeMwyGNjH
+CiFmWk04ZxKSMq71pf/LprOHoXQDypt7id/IcUkIkaFoJwOZrxZ1a36Qf6NO8ZJ7Qb1atHBo/r3I
+vPnnuJCHZwYsK73G7Ekl+LZHJ17uNYgfIm2wzjxy2dV9h8n+WxjQoRu9g4ewKLUqrtmu8jjQYPRg
+nOd0omHjADpOPESiWcs9ZTbuuO0YjL+vNwFfqhk3fxQ+Wa4Gaq6t46cFq7iYnDN+VlR9dTLdrwzF
+z8uXgDF7SABUrkHMmt2eva/54e3tS5ffffhqfn+qWBqvu7FYZ9AXT4XYfZ/w+tyKfD1/ADUqdWPm
+3OasHjiOtVFaI8s9rkOloGDcjq7inJlvoLSo9BpzvqzKwg41qdXoVf62d8fe0hI1oHZ7mfm/9ef6
+B03w8vamhndzRv8RzuOuV7BlMYfm6gZ2qDrTxyfHa7jQEnf5MimVO9K6gnnjQYy1kem66+8vSuwO
+xnUIpFnQRxy+v5vxLwXSvMtHHDERA9PL6Y+16XIWFjVWbUbhemcBN89lfWGAHY79Q7D4bSD/vNyL
+sP87YcZD15CX9kMbw54p3anr6YW3Zy2Ctwcy5aMmPHGfwtaHkYu+xHX263yyJxadqVibOIboY+w4
+gYMvnapGE1WnM7VqtKFFagSlOtbPeMOWuXV4QiqnDzen4vzmTLkfw+rV1Sj78wC2Pzrvz0M8bX0Z
+8n5DtvdrRafebxEa4YTDw19vI+U02gdVLrSb9SdvJ35BCx8fatdpTK9ZYSQqJmIGGNrH8h4zw9Lj
+1/H+0kqUnO2K6/xApqQN5bd2QZQy+IQZ+wAAIABJREFUc3nDxyUhxEMqRTE12ORFpOXcF/Ppcqsb
+p+dURN9zagZp7jGl0W9EzBvJvKa5P7M+ceIEPj4+uV7uuRe/lZDaX9L98HZeL1u0c9BCo4tiZb/O
+/K//FhYFlTLjKpVCWlIalvbWqEji5CfN6Z/6M4e/qke+X1iY67KYksCBca351GcF64Z5ZB9/qMSw
+qldzfuu/k6WvuBXxKxAvCi2pK97k5NY+1P8hCKv8BL2otN+LcgwpKvEscjScnxrIK5E/8M93/rn8
+Xb7AtGY9uB56jNAmWZc0clwSopg4f/489erVMzpPMT0OqbBysUe3aQ1tgw+b/UYb7c0zDGq7gkUP
+7CldYO+ME8WG2o2uX39O47tZ3hRlVBLHPmlP3boNadwwkLdOBfPDBL/8JwJ5KosJqbe5UW0CoQP0
+/OCqStHj77MslxOfp8gCm96L8Z+bz0QApP0KmsTTABVWzq7otoyk02tz9HwMVD/trRUM7dSfJQ9K
+Uyrn77Kx45IQ4pFiemfg2Sm2dwaEEEIIIcRTJXcGhBBCCCGEEAZJMiCEEEIIIUQxJcnAcyD9+lIG
+tw2grrsjvpPD9LyaMpVLS2fwy5mCeTWM6e0ZY6osWm4tDaHp2N3E5nOAWv7KmUfJYXzRsiPfhefj
+08lCCCGEEEWEJAPPActKfVmwfSeLB1Q18PXajBPwxWeT8/7NtVxtzxgTZUk4wvSv7tL/ncCM1/c9
+s3LmkV1tho62Z/6nm4mRp22EEEII8ZyTZKCoiN9KSPlqdAvpTpegNvg3CWHmMdMfB0o69jndm3dk
+3I5oDk3qTLPAZgRNOpzjI1oFtz1I4+aq92jr7UG16h7UCBzGr+EpZpcldv/3rCnZnx6VH7/bIXbr
+G1QPmMXltIx/6+4sJcgjiKW3dZB+gxUjWlLLyxMvL098W45gycWs7043VL8d9PNoxvzrGW9ST9jZ
+n6qBc4lIB9Byb9dUghvWoHqN6lSr3YVPtt7NfOe6ljvrJ9C+Zg28fWviXbMlHxzOWgM1bu0GUn3/
+92y4k8/P0wohhBBCPGOSDBQlaXFYBM9n9frtbPrcntDhc0y+9tS+wces3ruZb9uUpsmUDezbv4/1
+U/wx6zuYedieEr2et0duJnDJKS5fPsfq4FO8P3QJEenmlCWJC+v+wb55I0pn+Xqkc5MhtIv5jb+v
+aAAttzct4HyDobQvqwZ1SQLe+5N/LoRz8eJ5tg67xsfj1nE3H1fldVGrGDl8N21/DePSpUuc/r0Z
+G0ZN5mACkHaNZZ8tw3XGUc6fPcf5YysY4539ZZ4qZz/au51hVVhC3gshhBBCCFEESDJQlNh5ExRQ
+BgtUuDQMonLEFk7HF+JYlDxsL/nyNk47d6aXnxMq7PDs2osKFzZx3qzHFdK4G/4AR49S2Yf1ODVg
+cKd4lv55CY32FusXXqXJkNaUVgFoid45hd7NmhAQ2IKeXx3lXuS/3E/Pa6Uh8dRydt28zJJR3Wjf
+rj3dxq4lyTqGWwk6sHSlQXMXdk8eyntT57DiaBIlS+T47r1laaq4aLgREWfmF1yFEEIIIYom+Q5H
+UaJoSddmnozr0tEpCrrCHJee1+2pDP7DBDXWjpZok9NyDEdywG9QdzSDlnK6dzkW32zJ581cUAGp
+52YxcEo0k3bvoXdFaxJ2D6TOOC0mv46hApWKR/MpunSyDuqx9hrG4k3v4vnEwwYlaP71EU6e2cfO
+nRv5daA/343Zy7Z3vLB+OIuiIVkD1g7Wkk0LIYQQ4rkm5zJFSfJZlm/4Fw3pRG5bxqUK7ahT4uHJ
+tgWOZe14cOkmKU+cCKuxdbIk4eYD9F0wTz4Vytujv+FwXI4F87A9u2rtqP1gIyvPJKCQypUNf3PT
+syPej74Ab6wstlQLrMi9fyKeePOPne/r9FQt57///ZF7Hd6icYmMv+uSokkp4Yl3GWtQYjm6fBd3
+sq3YQFwsS1HFMYbzt1OBFK7uCeN+ZjbgUCeYxrd/Zu7uexlX9tOiCdtzgmgtoEsgIjwax1ptCB79
+KZ/3K8f1k7fJNnoq5TpHo8rQ2NspV6mQEEIIIURRI8lAUWLjjtuOwfj7ehPwpZpx80fh8+hytBVV
+X51M9ytD8fPyJWDMHh6PWHegwejBOM/pRMPGAXSceCjbQ7uaa+v4acEqLibnSAbysD2VaxCzZrdn
+72t+eHv70uV3H76a358qj+4xGSuLFZWCgnE7uopzOYcV2Xjzal9b1v0ZR5dB9XHM/LNd3XeY7L+F
+AR260Tt4CItSq+Ka7Wq+gbjY+jLk/YZs79eKTr3fIjTCCYfM3q52e5n5v/Xn+gdN8PL2poZ3c0b/
+EU6SAmhj2DOlO3U9vfD2rEXw9kCmfNTkUXkANFc3sEPVmT4+dqZaVAghhBCiSFMpiskBF6IAnThx
+Ah8fnycnxG8lpPaXdD+8ndfLPoUc7Wlv7yFdFCv7deZ//bewKKjUc3hlPYED41rzqc8K1g3zkHF2
+QgghhCiyzp8/T7169YzOI3cGxNOldqPr15/T+O4NEp/HNDT1NjeqTSB0gCQCQgghhHj+yZ2Bp8zg
+nQEhhBBCCCEKkNwZEEIIIYQQQhgkyYAQQgghhBDFlCQDQgghhBBCFFOSDDwH0q8vZXDbAOq6O+I7
+OeyJd/RDKpeWzuCXM2Z9BviZ0t3fxYetPanh64d/yAKupD2eptxbRtvyXVkfm9u1arm1NISmY3cT
+m+MJGCViIce7TScxNS+l1ZK6YiCHh68nLcsXy5TYQ1x5rS2HXwri2JjlpGT97sH9dZxs+hbR8XnZ
+3vPNYKyTw/iiZUe+C9foXU4IIYQQz468EOU5YFmpLwu2v8yJiY0ZoHeOjGRgscUA+teyL9Kv60w8
+Ppc/0j9g76m3qGiRfZrKpQOha+tRwVH/sgYlHGH6V3fpvzIQ54KsfPI5bi64g+v0NlhlSZu1Z38n
+Kn0Y9df3wSZHHSjRjBo/1cTWHvGQXW2Gjran3aebef3XrpQqyh1UCCGEKGbkzkBREb+VkPLV6BbS
+nS5BbfBvEsLMY3GYetVT0rHP6d68I+N2RHNoUmeaBTYjaNLhbB8d00/LnfUTaF+zBt6+NfGu2ZIP
+DmculX6DFSNaUsvLEy8vT3xbjmDJxWQAYre+QfWAWVzOvKKvu7OUII8glt7WAVru7ZpKcMMaVK9R
+nWq1u/DJ1rsZX/lNPs6XPVrQYdQG7pyYRs8WgbQZuIR/0wCSOTn1FVoENsG/7bvsS8hazhQuLhpI
+g4qV8PRrxasD6uLWZil3swQmdv/3rCnZnx6V9eS2FvZYl3Yi9xmSjrQdodxzG0yFmpkfF0s5y7Vh
+fTj1yS7Szs3jTJ/enHx/ZeadgRQS5wznRO+eHO/3Xx5ku0mTStJf7/NPYDMOB73K2fe6cOC1taQp
+QOJBzrUMJjJSm9Eqh97lUO/fSdU+rNxmTvr34uqMsZzsG8yxrr24dDQJ0JJ2aA5nu7fhUJvWHOo0
+mH/3xZjsLyTu42zT1pweM5RTb77GsZ5juHkmM+CJBznXrB1nJ73H2cG9OdJuANdOPmwMI3UwGWs1
+bu0GUn3/92y4o8s5UQghhBDPkCQDRUlaHBbB81m9fjubPrcndPgcLpgYWWHf4GNW793Mt21K02TK
+Bvbt38f6Kf44mNzWNZZ9tgzXGUc5f/Yc54+tYIy3bcY0dUkC3vuTfy6Ec/HiebYOu8bH49ZxVwHn
+JkNoF/Mbf1/RAFpub1rA+QZDaV9WjS5qFSOH76btr2FcunSJ0783Y8OoyRxMAOzq89GqPWye1Q5X
+/y9Yt28/Oxb1x8MKwI66H65k9/ov8c9xRV17/XdGTLrMwK3hhB9fRq+kyIwvBT+SxIV1/2DfvBGl
+c16lB1QV+lBr0XAcrJ+cZpTmErfmXabkqE6Pr/7b+lJl3jLqTA7Eym88df5cQd3pr2BrCWCLw4i5
+1Pv5XZxyfJhYiVxN+DcRlFuyHf8131EmJQotuZAUTpzrGOos/ZMGq3/Hw88OJXor4ZMP4zJjPU12
+7KDRzIbEfDKDOHNGiqUloOo8hdo//4bfO7b/z959h0dVpn0c/86ZkkJCIJSIAgICARUEV6TaUQFX
+7Ky+CroqYF1F17YWLNgVdQErltV1X0VdX0VULAh2AQvSVaRISaGlJ5OZOe8fh5M5GQIECJkzye9z
+XbmSmTkzOYdJwn0/z30/D+tue5VSu1wrXAzH3EL351+n5+gy1j/5HaFaXsPO/q09GT0Z3Hox7/xc
+vP2DIiIiEjdKBtwkJZth/VrhxUOzPw2j/ZqPWVS0j7aB8LXk8EHN+Pz2Mfz9/qd4c34pzZvaUW+Y
+TbPv5eyBfek34CjOfGA+GzesYksISD+cS4YU8dobvxEMr+f9F1fSd/SxtPBAycJpzFm3gleuPJXB
+Jwzm1GveozSwmfXFez4aXLrsQ5YfcDanHJQM3iyOP++wmESnkvxft5LWIRP/Hn+XWCahr58mN3kk
+bQ/f3Zql7YVXfE5p1lAyD0wCb0uan9qdGvKWHUs5mP2GdrJ+WY0kfAEP4eUzKMhZQ+740Sy4YCSL
+7p5FxF9AsKQW/9bJHcnsnYkHD74ex5G0/ktK7B3gAu1odkgzPHjxdTgAY3Me4XAdXIOvBQc2C7J2
+TeHuJUIiIiKyT6lnwE3MMKHwtqAsEiJimkT21ZZwnqYMemQeCxZ/xezZH/Lvi47kn1d/yafXdsVc
++gQX3buJWz//grPbBij+/CJ6jAtjbU/XhJ5/HU7wr6+x6Oz9+Ne6o7l7YLOqypBA17H8a+b1dKm7
+yBzwbHv9mmp9DAJpPsJllbsukamt0Bpypyyg6dXjSamz35AdXIPH+rC3/jMjoe2vw5uML3n7vN3T
+8Ty6/evS3T9HM4JpR+SREKZpEv2mHjz2t/IY0RPb2TXU6nsGKQtCoElAIxAiIiIuov+X3aRsCdM+
+WEWQEBs+fZ3fDjiBHk3twMtLWlYKW39bR/l20aJBcrqP4nVbCcU+BJQtnMzfrnqUuYWOJ0aKWfPr
+JtIOOY5zrrqTu8/fjz8W5BAEIqWbKG/ahexWATALmD9tDrmOF045+ALO9EzjvvueY+OJl9KnqXV/
+kx7n0CfnBZ7+fKM1+lu5iZ+/+IlNezEUnNptCNnr3uL9VRUQ3sCs1xbE9EMk02lAWzZ+v6aGVZZ2
+LrL8ZX67cypFxc5/UJPQ98+xvvIs2vXL2PMTd/AedDSpuR+yaW0QwnlseW9pdHTcm0FyagFl+UGg
+gvJ5ywjVIqvxZg8jPf8N1s/dYsXxoa0Uz1tCyPFvXfP1ARW/kjdnLRHCBL+eQdl+A2iStvMAf6fX
+UBvlfzA/rxV9stNd3eAuIiLS2CgZcJOkNrT+7BKOPDibfhMMxj17Jd2q6q/9dDzvdob/PoaeXQ+m
+39VfEK2+bsLhV11CxlND+FOffpx083fVAubg6hlMff4dfilzBIXhzXxx73AO69KV7C6HcM6sAdx7
+W1/SgJTDruX2Iz/mwhNP5exzRvNSRUdaOkf6k7I579xkZrxRyCl/7Y1dSGO0Pp1nXx3JHzf1pWt2
+Np2zB3HV//4aU+O/PbPgM8adOICBw25j7pbPue7kAQw65TbmlYK33f/w1ISOvHhidw454jz+m9qG
+VJ/P8YPrp92wc2g9/x2W7ubKqpF1n7Fh2ieUOrOr8Abyp3xFk8v+Quru9BkUfcuKUWfz48WPUlQw
+l98vOpsfL5lIURl42gyny3VtyR11EvNOv5aNya3xer1WUJzUhTZjDmXruPNYeOUtrF+fhrcW0bKn
+xYl0fewMKh48g7mDBzN38Ah+m76qWoBe4/UBBFoT+PZmfjz5BH6Y7KHdvSNJ3cVMzk6voRaCKz/g
+M89Q/tItZdcHi4iISL3xmKa5rwpRpAY//fQT3bp12/6Bok8YcegEhs+dxQVZytGiTCpLK/GlBvBQ
+yoLxgxhZ8QJzH+hFsn1IJI+3zx/K/438mJeGZe7VyHP45wf48WaDbm/dSFqdxa0mZlkIT4ofKKPk
+8b+wNPgAh994cP1n4yVfsWTIFFq8/W+yWu7Od9+bayjmm3HHcme3N5kxtoNqE0VEROrJsmXL6NWr
+106P0f/L4nKl/DB+KKM/KiHJF4YD/8KTU3tGEwEAozV/fuRu1n2ylhIzk11UvOyUt+fNHPH+3p5z
+rDKKn7iY5V+UYngjcMApdLm/W4JNy+3FNVTksLbTjUy+UImAiIiI22hmoJ7tcGZARERERKQO1WZm
+ILEGJ0VEREREpM4oGRARERERaaSUDIiIiIiINFJKBhJA6I/XuOT4fhzWJo2Db/+5hvX0K/jttYm8
+vHg319bckaLPOL/DQJ79Y1/sFRtm/Wsj6H/N5xQ4u1XKfuaeo0/in78G98H3FBEREZGaKBlIAL52
+5/L8rNn868KO1LwcvJUM/GtJWd3twruvFM/j4QfyGXntADKcq/6kHMqYq1J59s6P2Oz6ixARERFp
+GJQMuEXRJ4zYvxOnjhjOKcOO48i+I3j8h8JdBvelP9zN8EEnMe6zTXx361AGDhjIsFvnxuzSW5Mw
+ue/fyODunck+uDvZ3Y/mprmOZ5mFzH14BP27tSGz3WAmzLXPpZJ17/yd47M70OmgDnQeMJZ//1oO
+pXP528H9eHxFpeN7RMh7/WQ6jfiAgm33FHw9ienNR3Ja+9hFJg1an3ARB309iQ9yI7s8exERERHZ
+e0oG3KSyEO85z/Lu+7OYeXcqky97iuW7qJpJPfwO3v3yIx47rgV97/2Ar77+ivfvPZImu/xeq3n9
+rtdpOXE+y5YsZdkPb3J1tmP1/oq1rD70YeYsXcmXV29hyr1zKADMTe/ztys+YsArC1mxYinvnrOQ
+G8a8wppAV04+eCOfLS4iUr6BnxesocQs45ePV9NhaM9tuxSXsnzG96QOOoIW3u1PyZPRk8GtF/PO
+z8XbPygiIiIidU7JgJukZDOsXyu8eGj2p2G0X/Mxi4r2Uc2MryWHD2rG57eP4e/3P8Wb80tp3tQR
+oSdnc9qJ7Ql4kml3RDZJG9dTGIayFZ+yKGMoZ/VMx0MKXf58Fgcsn8my0nQOG9KO3z/9lfyvbmLY
+yVczc/0avlqUzrFHtsR65Uryf91KWofMmsudfC04sFmQtWsK2RfdCiIiIiJSnZIBNzHDhMLbgv9I
+iIhpEtlX9fOepgx6ZB4LXrqcfumr+PdFRzLkiV+ITkR4Cfison6P4cVjmlTVLFXb4ddTdXyrvkeT
+9MNsPn1vK2eO8jBj1pfMKe7F8R2Sth1jEEjzES6rrLn8yQxSFoRAk4B+MEVERETqgWIuNylbwrQP
+VhEkxIZPX+e3A06gR9NosJ2WlcLW39ZRvl0kbZCc7qN43VZCNb3swsn87apHmVvoeGKkmDW/biLt
+kOM456o7ufv8/fhjQQ67WssnpdMJHLr1Q95eXIxJBb9/8F/WdTmJ7FRI6ng8h+VO5Z8Lj2DURSex
+4Z9PsLzTULqn2s9OptOAtmz8fk0NKyIB5X8wP68VfbLTq+cbIiIiIrJPKBlwk6Q2tP7sEo48OJt+
+EwzGPXsl3QL2g346nnc7w38fQ8+uB9Pv6i+IVtY34fCrLiHjqSH8qU8/Trr5u2oNxMHVM5j6/Dv8
+UuZIBsKb+eLe4RzWpSvZXQ7hnFkDuPe2vttq+3fM03IYT0wZzJf/05Ps7IM55T/deODZkRzoA5oc
+zJCOm8jrMZRDOh/HURVryDypt2PVID/thp1D6/nvsLSGVVCDKz/gM89Q/tItZXf/5URERERkD3hM
+09RCjvXop59+olu3bts/UPQJIw6dwPC5s7ggqwHnaJE83j5/KP838mNeGpbpmAEo5ptxx3JntzeZ
+MbYDsWsNiYiIiMjuWbZsGb169drpMQ046hRXMlrz50fupk/+WkqcaWhFDms73cjkC5UIiIiIiNQX
+zQzUsx3ODIiIiIiI1CHNDIiIiIiIyA4pGRARERERaaSUDNQzn89HKFTTAqAiIiIiInWjvLyc5OTk
+XR6nZKCeZWZmsmHDBiorK+N9KiIiIiLSAFVUVJCTk0OrVq12eawaiOuZaZrk5OSwadMmzRCIiIiI
+SJ3z+/1kZWXRsmXLXR6rZEBEREREpJFSmZCIiIiISCOlZEBEREREpJFSMiAiIiIi0kgpGRARERER
+aaSUDIiIiIiINFJKBkREREREGilfvE9Adszeh8Dns96mSCRCJBLB4/Hg9XoBa9+CcDhc7T4RERER
+kdrQPgMJwjRNPB5P1e1IJIJhGDs9RkRERERkZ1Qm5FL5+fk899xzTJs2rWrkf/ny5fzzn/9k9uzZ
+VYnAhg0bmDRpEjNnzlQiICIiIiK7RcmAy9gTNfn5+SxdupR77rmHUChEbm4ukydPZuvWrbz66qvM
+nz8fgNtvv52ysjKmTZvG9OnTAQiHw3E7fxERERFJHOoZcBl7dL979+5MnDiRc889l6SkJObMmUOz
+Zs244447eO+993jjjTfIyMggEAhw44038vHHH/PRRx9x6qmnVs0a2D0GIiK1ZfcoiYhI46C/+i7l
+8XgoLy+ntLQUgE2bNpGcnEw4HCY9PZ2CggLWr19P+/btCYVCpKen4/P5KCsrIyUlBQDDMLbrKxCR
+hiESsT4Mw/oQERHZE0oGXMo0TSKRSNVne8bAMAxM08QwDHw+H+FwGMMwqkqDfD5fVSPxkiVLmDt3
+LoZhaIZARHYpMzOT4cOHx/s0RESkHikZcCmPx0NqaiqBQADDMOjevTv//e9/qxqJDzroIA455BCe
+eOIJDMPgt99+w+/34/f7KS8vJzk5malTp5KUlESHDh0axEpD9jV8+eWX9O3bF7/fH+9TqjOlpaX8
++OOPDBw4sEG9V2vWrKGwsJBDDz20QVyXvYrXDz/8wAEHHEBWVla9X5dpWjMBq1bBjBnQp4/1YZqw
+p6dhX8Pll1/OqaeemvDvk4iI1J6SAZexg43Fixczfvx4vvnmG0aMGMGECRM48MADGTx4MJ07d+bB
+Bx8kIyODgQMHMmzYMFq1asX48eOBaN9BUlISo0aNonv37vG8pDrXsmVLzjrrrHifRp17++23OeOM
+M+J9GnVq7dq1bNiwgT59+sT7VOrUZ599xuGHH05GRkbczuGGG+C002DsWDjggLp5zZdffrlBJG0i
+IlJ7SgZcxq7xz87O5tlnnyUQCFBZWUlGRgbjxo3joosuIi0tjUAgAMA111zDyJEjSU1NJTk5udpr
+hEIh1q9fT/fu3SkrK0v4TcnsIGXLli0UFBSQkpKS8IGLff5bt25ly5YtBINBwuFwg3mvNmzYQF5e
+XoO5LvsaNm7cyLp16+rtZ9A0rY/iYpg0CYJB+OYbeO012G8/qKjY81kBiA5CBIPBhP59EhGR3adk
+wKV8Ph+ZmZnb3V+b+5z7yNkrgwQCgYQPxOyg6+STTyYtLQ2v19tgkoFmzZpx4oknEggEatxQLtHY
+19W1a1c6dOjQYK7LvoZBgwaRnp5OIBCo859Bu9zHuR2kXRo0bRp8/TWMGgWnnw5t21qPBQJ1kwxY
+3yuxf6dERGT3KBmQhGEHKO3atdvuvkRln38gEKi6rkQPmCF6Xc4ymoZwXfY1tGnTpuq+fZEIWK8b
+vd/jgTvvhPfegxdegJ49o8fbj+9Nz4CIiDReSgYkYdgjlvbqSh6PJ+FHMe3zN02zapWoRL8mqH5d
+QIN4r2Df/Aw6ZwKc+wX6fLB1q/X10qVWWdA//wk9ekBlZXRJUSUCIiKyN5QMSMJwLq8ae1+iss/f
+Diyd9yWymq6lIV1XXf4MOmcCnPt93XUXvPEGZGZCQQFccw0MGGDtLWAfV9MsgoiIyO5QMiAiUs+c
+5T2RiHU7GIQ5c6CoyLr95pvw/vvQvn30eZGI9ZzYngIREZE9pWRARKSexNb42+U9hmHNAvzv/1r9
+AEVFcO211kpBoZD1HLskSGVBIiJSl5QMiIjUI48n2hvg9cLKlfDhh/DMM/D009CvX/XjnTMASgRE
+RKSuKRkQEdnH7ADeDubtVX5NE6ZOhV9+gdtugz/9KfocZ0mQkxIBERGpS0oGRET2IedIfjhsBfmb
+N1tLhK5cCRs3wpNPWiVB4XD1UiJnb4CSABER2ReUDIiI1LHYAD4cjgb3fj/85z/w448wfDhkZ1uJ
+QCRS81KhSgJERGRfUjIgIlLHYjcMs8uCFi6Eiy+29g+YNg1697buD4ejx8Q+X0REZF9SMiAiUsec
+MwMlJbB6tbV06OTJcM45cNllkJJSffMwlQOJiEg8KBkQEakDNS0bahjWcqFPPGE1B1dWwqWXQtOm
+0bIgERGReFIyICJSR2pq+F2/HkaOhBtvjPYC2IkCaEZARETiS+NSIiJ7wbkPgP21Xfrj8UAgAD6f
+NRMQDFZPCKDm5UNFRETqi2YGRET2QuyyoX4/zJ4NX38N7drBH39A167RoF8bh4mIiJsoGRAR2U3O
+2QCbb9tf04ICmDgROneGn3+GBQugV6/qMwBKBERExC1UJiQispucgb09yv/VV/DXv8KwYdZMwMSJ
+cPTR0eNFRETcSDMDIiK14Kzxj0SsD1swCPffD8ccAxdeCN27W2VDRUUqCRIREXdTMiAiUgvOmQC7
+QRhg6lR47DFo3x5uuCF6vGlGG4e1hKiIiLiVkgERkZ1wrv4TiVg7Ba9eDb//Dhs3wttvw3PPwYAB
+EApFkwZ7R+Ga+gtERETcQsmAiEgNnOU9zkSgvBwmTLA+p6bC4MHRRMCeAXA2CqtMSERE3EzJgIhI
+DGcAH4lYXxsGzJsHr71mBf6PPw5paZCUZB1vzwTEvo6SARERcTNVsoqIxLBH9O0g3zCskqAxY6w+
+gMsugxYtoomA/ZzYoN9uNlYyICIibqWZARFp1GJ3EA6Ho7d9Pvj8c5g5E1asgNNOgzvvtB5zBvnO
+HYVjKRkQERE3UzIgIo3ajlYJsu978knIyoIhQ6wPe0nRmmYB7Oc4n68yIRERcTMlAwnGNE0i26IR
+wzDweDyEtw1l2rdFpPacZT7NxBNzAAAgAElEQVSzZsEtt1i9AB6PVRp06KHwxBN79to7mzEQERFx
+AyUDCcbj8eB1dCpGIpFqt+1jRKRmzqVCw+Ho7YoKePZZuPxyOPFEKCmx+gVat7aOC4etsiHD2L3R
+fu0zICIibqZkIMHk5uYyc+ZMkpKSOPnkk2nWrBkzZ85k/fr1DBs2jKysrKqZAxHZnrPO3+f4C/jo
+o9ZOwhdeuH2gbzcSO2cRavu9NDMgIiJupmQggYTDYW699Va6devG2rVrWbNmDYcffjjTpk2jefPm
+rFixgptuuomUlJSq55jbIhHTNDFNE4/HU/VZpDGyR/XLy+Gbb6C01Bq5f/ll+OAD67FwuPpo/p4G
+9W7dZ8D5t8C+bX+IiEjjosnrBGIYBjk5OVx//fWce+65lJaW8uyzz3LdddfxyCOPkJeXx08//YTP
+MdwZCAQA8Pl8VQmAEgFpTOz41m7mtW/PmgX33ANffGElAWPHQvv20UZiqL5caE1Lh9b2e7vtV875
+t8AuPfR4PBiqZxIRaXQ0M5BAPB4Pp512Gn379iUcDvPEE0+wcOFCUlJSCIfDeL1etm7dWu0569at
+Iy8vj4qKCrKysggEApoZkEbF/lEPh636fXupz59+guHD4corrceSk6v3E9TFr4hb9xlwzgxs2bKF
+wsJC/H4/wWAw3qcmIiL1TMlAAsnNzeXTTz/l888/Z/ny5bzxxhusWbOGpKSkqpWEYoP8n3/+GcMw
+ME2T448/nszMTCUD0qA5A3onX8xfu5wcOOUU8PutD6e6+vVwa5mQ0/Lly1m+fDmpqamUlpbG+3RE
+RKSeKRlIAHbwXlpaSjgcJjk5mZYtW1JaWsqBBx7Ib7/9xv777095eTnt2rWr9tzhw4fTp0+favep
+FEAaMmfg7QzGP/gAXnnF+towYPZsuOGG6nsBOPcKqIsA3q37DDjLhPr370///v0BePTRR+N5WiIi
+EgdKBhKAPZ3fsWNH+vbtW9UvMGrUKI466ij+9re/cd9993HmmWdy6KGHEgwGq3oFioqKAAgGg/j9
+fjUQS4PknA2IDe4rKqyVgO68Ey6+GNq0gVDIWkK0bdvqPQG2upwZcHuZkGmahEIhvF5v1Z4lIiLS
+eCgZSBB28P73v/+d3NxcvF4vLVu2BOC5556jtLSUrKysapuQAVV7ENgNgs7XEmkodtTke9NNsGSJ
+1RPQoYPVJOy0rxt83VomFNtA7PP5tGmhiEgjpWQgAWVlZVV9bZomTZs2pWnTplX36T90aUycAX0k
+AmvWwObNsHYtfPwxvP22NRPQtm20gdg+3q6Y21cBu/YZEBERt1MykOBqCvy1Vrg0Bs4A3t7ld9Uq
+qw+gVStrH4FrroEDD6z+nNhGYth3I/du7RkQERGxKRkQkYRmmtbIv88Hv/xifX3PPdZjLVtWnwlw
+jtTXV4CuZEBERNxMyYCIJBxngG0YkJRkfV1SAvvvb80MRCLRDcScx9dnYO7WBmIRERGbkgERcT1n
+5ZtpRjcQMwyrP+C//4XSUmu50OOPtx6rrIRti2rFjZIBERFxOy04LyKuYgf+zmVCnQzD2iQsKcn6
+/Oqr8PrrVjJw4okwYkT0GFu8gnH1DIiIiNtpZkBEXGVX6/2//z5MmgRNm1qj/ytWwIsvwuGHVz/O
+DXvrxS4tqt5+ERFxGyUDIhJ3scuD2gF0eTls2hRdBrS4GF56Cf78Zzj2WNiyBbKyoGNHCAat1/D5
+tu8TiJfYPoV4n4+IiEgsJQMiEjfOnYLt2/aIfjgMV1wRfczvh7w8a6nQ0aN33A9Q36sF7YxpVu8Z
+cMM5iYiIOCkZEJG4s4Nmw4CCAvj9d/j+e6sE6NVXrX4Av986pk0bKxFwLhnq5LaAW2VCIiLiZkoG
+RCRunCPmXq/19cMPww8/WDsGjx8P7dpt/zznDILbxWtZUxERkdpQMiAicWPPBhQWWjMBS5fCkiXw
+4IPQpYu1YpA9A+As/7F7AuzbbhXbQCwiIuI2SgZEpN7ZgbxdT//f/8Kzz0LnznDeedCjR/S4Hc0A
+JEqAbSczIiIibqRkQETiwh41B1i+HMaNg3POsW7H1tYn6ui6ZgZERMTtEqTqVkQSlXPjsEjEWiXI
+uYuwx2OVBmVmWsuDhkLubwquLfUKiIiI22lmQET2qZqWDXV64AFYtAi6d9/5KkGJSMmAiIi4nZIB
+EalTdlmMPRtgfzYMKCuDtWut+5KTYdYs+PxzmDED9ttPJTUiIiL1TcmAiOyV2ODfGcw7NxSLROD+
++63+gJYtraVE16yB88+Hbt2gstLaPbghJQOaGRAREbdTMiAieyx2Q61IJJoUeL2wejXk5kJKCvzy
+C3z6Kbz4YnQDsZQUyMqyvrYTgYY0O2AnQw3lekREpOFRMiAie8wZvBtG9Z6A/HwYOhSOPNJqFq6o
+gLFjoWvXHb9eQ0oEQDMDIiLifkoGRGS3OZf+jESsj8JCeOcdKC21koIvv4TTT7dKg5xi1923R84b
+WiIADe96RESk4VEyICK1Fhuwm6ZVDuT1wrRp8K9/wcCBVv1/ly4wenS0dMh+3o52D26IgbPKhERE
+xO2UDIhIrdXUHPzNN/DMM7BwoTUz0Lbt7r1OQ6dkQERE3EybjonITsVuGmZvFFZcDJs2weOPQ7t2
+8O9/W8uDVlRYm4cFg9GSoNgdhRuLmvZVEBERcRPNDIjITjlXC7JHudevhzvusAL/lBS47jpo3jy6
+KtCOXqMx0syAiIi4mZKBBGWaJh5HhBF7W6QuOEf0PR6rSXjzZpgyBZo0sVYHatcumgjsaL+Bxqqh
+NkaLiEjDoWQgwdhBvx34x94GlBRInbKD+5ISuOoqazagogImTbISAfsYlcRsz7lSkoiIiBspGUgw
+Ho+H3Nxc8vPzadWqFVlZWaxfv57CwkI6depEIBDAVOQhe8G5o3BlpTXin5QEH34Ia9daqwb5/dC0
+qbV/gL3HgGYEdkyJkoiIuJWSgQSzaNEiHnroIYqKijjrrLM48sgjGT9+PCUlJZxwwglcccUV8T5F
+SWDOkhaPBwIB6+tIBG6/HR57DFq2jB7r9UafqyRge+oXEBERt1MykCAikQiGYXDfffdx77330rFj
+RwDGjh3L5ZdfztFHH82ll17Kzz//zJ/+9Keq59mzBKZpVpUUqb9AdsTjsQJ/w7DKgp59FjZuhKVL
+rd2EhwyxZgOcI92qid8xt+4z4PxbYN+2P0REpHHR5HWCMAyD4uJigsEgU6dOZeTIkfzyyy/k5+fT
+tm1bIpEIgUCADRs2VHue3+8HwOfzVSUASgTEyY7/TDO6dGgkAgsWwNSpcMghcNxx1opBdlkQbP9Z
+tufWfyPn3wKPx4PX692u90hERBoHzQwkkEgkwsqVKxk9ejRer5cXX3yR33//nZSUlGojfE55eXls
+3ryZiooKWrZsid/v18yAVBO7kZi9NOjq1TBsGPzP/1Q/XjMBic85M1BYWEhxcTE+n49QKBTvUxMR
+kXqmZCBBmKZJ06ZNad26NSeffDIAM2bMAKCoqIg2bdoQDAZp0aJFtefNnz+fyspKAAYPHkxmZqaS
+gUbO2ehrbyhml7IEg9bSoV4vLF4M7dtbswGhkNU0bBhKBHaH/e/l5n+zxYsXs3TpUlJTUykpKYn3
+6YiISD1TMpBgzjvvPK6//npKSkro1KkTkyZNYsKECSQlJdG+fXt69epFKBTCt21497TTTuPII4+s
+9hqGljZp1GraRAysHYVvvx0KCqzZgZwc67ZhWMlB7POkdtyYDDjLhAYMGMCAAQMAePzxx+N5WiIi
+EgdKBhKE/Z/3qFGjmDVrFsFgkIEDB5Kenk4gECA3N5ejjjqK1NRUKioqqpKB4uJiAILBIH6/Xw3E
+jZBzJiASsUb6wQryKyut5ULT0mDWLJg3D5580poJyMiAjh2t5zmTAf3o1J5b/62cZUKmaRIKhfB6
+vYTtHw4REWk0lAwkoOOPP77q63A4TN++fas97hz5925b+9FuEAQ1EDc2zrfbMKqvBHTTTTBnjrV5
+WEkJ3Hgj9Oy589eQ2nNrAhXbQOzz+TAMQ38bREQaISUDCSgSiQBW0O/1eqtu2/+xa3lAcc4GhMPR
+5UC3bIFPPrFur1oFixbBzJmQmRl9buyKQXZvgeLE3efWpUVFRERsSgYSUGzNv3oAxBa7C7C9MZi9
+OdgLL8Bbb8HgwZCbC6NHW4lAKBRtdvV6t38dBbO7x9mPoWRARETcTMmASAPiTALCYasR+NNP4aOP
+rJWCfv4Z/vMf6Nw5+hzTjC4nGvs6smecSZS9YpOIiIgbKRkQSWA1LRMKVqOwzwdFRTBpEhxxBBx0
+EJxxhtUUXFFhPccwrONiZwJk72hmQEREEoWSAZEE5gwyTTPaHGwYsG4dXHYZdO1qNQpv24y66vGd
+vZbsHZVZiYhIolAyIJJA7BFnO9h0lp8YhrVhGFjHTJ0K6elw661WH8C2PvPtdhyOvU/2XmwDtv59
+RUTErZQMiCSQ2EDeefuZZ+C116BNGygtte576qlog3BsX4DUDyUCIiLiZgoPRFzOORsQCln3GQZs
+3WqVAhmGtUfAq6/C/fdbPQHhMDRtam0cZvcPqC+g/mnmRURE3E7JgIiLOdf393iq1/2fcgo0aRLd
+Wfjoo2HgwO2fb/cHKCCtf0oGRETE7ZQMiLhMbF9AJGLdDgZh+nRrs7A//oDWreHdd7d/7s5eU+pX
+bM+AlhgVERG30W5VIi4T23xqbwb23Xfw0EOQmmolAvfeayUI5eXW58rK6oFn7NdS/2L7OvQ+iIiI
+22hmQMRlIhErAQiFYNo0+OEHq/Z/3jy44AK44ortn+MccdYsgHvEJmN6X0RExG2UDIi4RGx50Lp1
+8NZbMGgQ7L8/9OwJgwdX31zMMLYP/hVwupfKhERExG2UDIi4QGxAb5qQl2fNCIwbt/Njxb1iewb0
+vomIiNsoGRCJE+dMQCQSHTX2eq37V6yA4mLr/mDQut9+LHaVIXEnbTomIiJup2RAJE6cwbzXG73/
+/vutXoFIBC67zLrPMKKJgPO54m56n0RExO2UDIjUI+fGX5GI9VFZCd9+C4WF1u0334R//xsOOST6
+HOf+ApJYNDMgIiJupmRAZB+raedfu8zH54M33oCpU6F7dygqgksuga5drcQAok3Csa8h7hdb0iUi
+IuI2SgZE9jHnhlN2YOjxwOrV8H//B88+ayUD/ftXf56dACiYTFyaFRAREbdTMiCyj8QuFWqa1ig/
+QDgML74Iy5fDhAlw2GHVm4jtjcYksamBWERE3E7JgMg+YgeAdpDv88GMGfDpp1Yz8PLlMGUKtGtn
+JQexS1CqNCjxKREQERG3UzIgUoecswH21/ZKQTk5VjlQnz7QuTOceWY0EbBnApybUimITHzqGRAR
+EbdTMiBSh5wj+/bXs2fDxIlWMnD00fCPf0SPj0SqLyuqoLFhUZmQiIi4nZIBkb3knA0Ih6ObiK1e
+bY34338/nHUWDB4MrVtbx0Qi0b0DVA7UsCkZEBERN1MyILIXYncC9m37jbrnHpg+HVq0gLQ0uOAC
+SE2NPkezAY2DEgEREXE7JQMie8hOBCIR63NZGXz8MeTnw//+L3zzDWRkVD/eXlFIswGNg3oGRETE
+7Yx4n4DsHdPZcbqNR5HHPuFcItT+2k4EPB54/314+GHYuBFuvNFKBEIh68MOCO2lRTVi3DioZ0BE
+RNxOMwMJKj8/n4yMDAKBABs3bqSoqIj27dvj9XprTBBk78XuIGxbuxamTbMahf/2Nxgxwro/tjnY
+fp4Cw8ZDiYCIiLidZgYS0E8//USvXr2YM2cO+fn5XHbZZVx11VU8/fTThEKheJ9eg+Us7bGXAjUM
+qz9g8WK48EI44wzrGLtB2NlP4PwsjYPebxERcTvNDCSYyspKXnzxRYYNG4bH42Hy5MmMGTOGk046
+iUsvvZSFCxfSu3fvquPtWQLTNDFNE4/HU/VZdo89qp+fbyUA5eVWU/CCBfDhh9C8efX9BZzPkcbJ
+rWVCzr8F9m37Q0REGhfNDCQI+z/pl156iZNOOokePXpgGAarVq2iY8eOmKZJIBBg3bp11Z7n27a8
+jc/nq0oAlAjsXGxPQDhc/WPlSpg3D04/HY4/Hl54AZo2tR6D6oGf/qkbN7c2EDv/Fng8Hrxeb9XX
+IiLSuCgZSAD2KN7SpUuZPXs22dnZrFixgvz8fEpKSvB6vUQikapjnTZv3kxRURH5+flVJUQa/ds5
+50iuvReA1wt+v/XZMKBHDxg2DIYPh0MOiR6nciCJ5daZAftzUVERubm5bNq0SWWGIiKNkMqEEoCd
+DBQXFxMIBLj99ttZuHAhmzdvpqKigtLSUrxeL8FgkMzMzGrP/eabbygtLcU0TU466SRatGihMqEa
+1LRxmMcDxcVWP0AoZM0SZGVZ5UHBYPXNw+xVgtw4Cizx4+wbcasFCxawZMkSmjRpQnFxcbxPR0RE
+6pmSgQRgbIs0+/Tpw4svvgjA3XffTe/evenSpQt33303mZmZHHDAARx22GGEQqGq8qDTTz+dvn37
+1vh6jZmzGdi5ZKhz4zCAyZNhzhxr9H/LFqtf4IwzoEmT6CyByoJkR+yfKbf9XDjLhAYNGsSgQYMA
+eOKJJ+J5WiIiEgdKBhKIaZqEtxWm/+Uvf6FJkya0bduWyy+/nPXr13PCCSeQlpZGRUVFVTJQUlIC
+QDAYxO/3q4HYwZkI2AoLYeZMKCmBoiL48kt46CHo3RtKS+H66yEvD5KS4nPOkljc2jPgbCC2/64Y
+hlFVbigiIo2HkoEE4vF4qoL87OxsACKRCEcddVS145wj/95tC93bDYL26zQ2zjKg2PucXz/3nDUT
+cNRRsHkzXHaZlQgAJCdbfQP2RmMiu+LWMqHYBmKw/m40xr8NIiKNnZKBBGU3ANqjeaZpVv1nrgbh
+6mrqB7A/QiF49VVYv95aGvStt+CBB+Doo6PPdwb/kYj1nNjNxER2xI1lQiIiIjYVjyeo2BE958i/
+RDkTAbsfwO+HQMAq9Zk920oAWra0RnFvuQX69rWOj0S232jMMKyEQsmA1IZb9xkQERGxaWZAGiw7
+AbBH9isrYfx4+Oora7Mwvx9+/hmefhqGDNn++c4kwtlkHApFVw8S2Rm39gyIiIjYlAxIg+EMuuxy
+IPvrQAA++cRaJvShh6zVgMJhKyno0MFKFEyz+n4BzkTAfsx+PZ9+c6QWNCsgIiJup5BGGgxnM7Cz
+jMfns0bz77oLrrrKKgPandeL/Toc1syA1I6SARERcTslA5KwYvcKcK7pvnKlVQJkGFYy8NVXcMAB
+cMEF1iyAM1mwn+N8vZq+l32cyoSktlQmJCIibqdkQBJObHBV08ZOV14JXbpE9wMIBuHvf7dG9e1m
+4NjX2VnA5uwZcJYMieyMGohFRMTtlAxIwrKXCrVHX2fNspYIXb/eevyhh6zmYXt2wDkbsKejtZoZ
+kN2lREBERNxMIY0kFGcQbxjWikA+HyxbZu0OvHYtlJfDhAnWrEBKivXZ662+4djuBmjOEiIlA1Jb
+mhkQERG308yAJARnEmAH43/8AVOmWLcXL4aLLrJKgZzP2dVr7cl5qIFYaks9AyIi4nZKBiQh2MGU
+vXEYWDsHL10K550HRx0Fxx1nJQYQ3SCspkBsT8uD7M9KBmRnYmegNDMgIiJupmRAEkIoZCUBb70F
+kyZZwXhZGTz7LPTosf3xO1sZaE9o0zGprdiVqZQIiIiImykZENdxBlKRSPUAfP58GDrUKglKSoJm
+zaylQu0VgnY0G1BX5wSaGZCdcyYDzvtgx6VrIiIi8aJkQFzFGcg7l/C0VwLyeiEry/oAK1nw+6PH
+w74ZiVWZkOyJ2JkBzRKIiIjbKBkQV7FHVSMRK/D/5RdYssQKwNu2hbw86Nq1+iZj+zIJsKlMSGrL
+OfofuweGkgEREXEbJQPiKnYiYBhQUQHjx1u9Ah06wCuvwLffwmmn1V8S4Dwv+3tpZkB2JvZn0zBU
+JiQiIu6lZEDiyjm6X1kZ7RHw+eCFF6wSoCefhLQ0+Oc/4Z13orsK1+coq/N7KRmQ2lKZkIiIuJ2S
+AalXsaU9zvsCgeh9BQXw8sswcaKVCEB0SVE7EK/P9ds1MyB7SisKiYiImykZkHq1o1HSLVvguusg
+N9dKClavhmOOgf79obTU2knYFs9kQD0DsjtqSn5FRETcRMmA1JvYwKiiwpoB8Hrh9dchJwcmTICS
+EkhOhi5drFF4ny9aew3RzyoTErfTpmMiIuJ2Sgak3tj7Bng8Vn/ALbfAihVWGdDmzdbtP/2p+nPs
+453sZUbri71ykU3JgOwOZwOxiIiI2ygZkH0mtjTCDqpNE55+GpYuhaeesh5r0gRatbKCf1tsAGXf
+ru9APHZ1mJoSFJEd0c+KiIi4mZIB2SdiNw9zlko89hh88AFcc421ZKjNXlLUfg5UD8Rjy4TqizOY
+83qtngEFeFIb9sZ5+nkRERG3UjIg+4y9Z4Bpwh9/wLx5kJoKL74I//0vdO5sldzEbsi0o8Zguzyo
+vgMr5/kYhpIBqT31DIiIiNspGZA651x5xw7gr7wSsrKscqArrrASAeeoqTPgdgZOzvvt16rvlVmc
+wZxhVJ/BENkZZ+O7iIiIGykZkDrjDNztEf/Vq+GBB6yG4RdfrH4sbJ8IxKqpTGhXMwj7kmFUn80Q
+2RX9rIiIiJspGWiAPPUYfTiXC7U/PJ7oBmGPPQbFxdbnysrobEFNMwI7Ets4HI9yIWdCojIhqa3Y
+fhkRERG3UTKQYAoKCiguLqZJkyY0a9as6r7S0lKysrIwDAOznupoYkt7nLMCY8daewgsXQqffRZd
+KSi2FGh3gqTYBuL6nBmILRMSqQ27TEjJgIiIuJWSgQRSUVHBAw88wK+//ophGIwbN47OnTtz1VVX
+UVBQwBlnnMGll15ab+fjHNkPh61Nw0pL4ZtvrPKgp56CpCRo0aL62vx7GhjVlAzUd6mQ16t9BmT3
+6GdFRETcTMlAgrnhhhvIzMzk3XffZcaMGZSXl3PJJZdw0kkncemll9K3b1969epVdbw9S2CaJqZp
+4vF4qj7vDXvywW6m/eknuOkm6NED1qyBG26wmoTtY+siIKppadF49QyI1IZby4Scfwvs2/aHiIg0
+LkoGEkhSUhKBQACAtWvX0qJFC7799ls6d+6MaZr4/X7Wrl1bLRnwbiuw9/mib3Vd9RQ4+wR++MFa
+Leimm6CiAg48sOZNx6zvv2ffL3ZmIR5BlnoGZHfYP6Nui7HtvwH2Z/vvRH32G4mIiDsoGUgg9mje
+3Llzee+993j11VeZPXs2AJFIpNpIn+3dd99l1apVmKbJsGHDaNmyJZFIBGMvhuqdpTl2jlFUBAMG
+wH77bX+MbW/jjJpOub7LhOyeAcVMUhtu3WfAOTMwZ84cFi1aRFpaGlu3bo33qYmISD1TMpBAPB4P
+eXl5vPDCC0yZMoXmzZvTpEkTiouL8Xq9VFZW0rx582rPGTJkCCeccAIVFRVVswp7OvrnDLwrKqyv
+Cwvh/fetJuErrrAC5VAI/P7tn7O3anqd+g6y1DMguyMRdiA+6qij6N+/P4ZhMHny5HifjoiI1DMl
+AwnCNE3Kyso49dRTadasGdOnT6dr165ceumlPP744zRv3pysrCwOO+wwQqFQVVmQ83NsaUDtv3f1
+cgePx2oMBnj4YZg/H4YMgf79o8uK7slqQbtS007F9c1ulhapDTfOCkD1MiGv14thGHg8HpUJiYg0
+QkoGEoTH48EwDP7xj38QiUQoLCwkIyOD/v37Y5omf/zxB0OHDiU9PZ2KioqqJKAuGohjN/mqrIR/
+/AM2bIDvvoNvv7VWDLKP2Vej5nuzLGldUc+A7C43zgzENhDbpYNqIBYRaXyUDCSQ5ORkTjvttGr3
+mabJCSecUO0+Zz+AcwSwtjMDzkbfSKT6zEAwCDNmWLMBd9wB48ZB06ZWgmBvJlbXdfw7ajyOVzKg
+HYhld7gxGYj9W2D/fdDMgIhI46NkIMGEHTUq9myBfZ891b+3o3t2PGAvG2pbutQqC1q7Fv7+dzju
+uO2fuy8aeu3Xs3cedn6v+qaeAREREWlIlAwkGG9sRLyD+3aXM7C2vzYMyMuzyoGaNoUHH7TKgW67
+DTp2jO7E66yL3hcDizsqD4rXzIBWE5Lacus+AyIiIjYlAwJUD1bsr3Ny4PrrrQC4aVOrROjKK6FT
+J6tu3uut/rx9tcyns0zI749PYOVMkNQzILXl8bizTEhERMSmZKARcwbZ4bAV5Pp81gZiPh/8+9/Q
+siXcfLN1bHo6pKVZI+N2IuBMAPZVwON8fV+cfmLtc9DMgOwOOxkQERFxKyUDjZgzoPV6rY9PP7V2
+Ee7WzdpL4M47oU2b6HHO1YLiseGXc9nS+uTsW1ADsdRWff+OiIiI7C4lA42QHaCYZrQZdv16ePll
++M9/4LHH4MQTqx+/o9eoT7F7GNQn5yyKAjypLbfuQCwiImJTMtBI2MGsaUY/nGU3r71mzQrceCP0
+62eVDJlm9eVCIb7Nu3bPQLw2G4PoikYK7qS2lAyIiIibKRloJJwj2nZgEgzCfffBsmWwaRM8/TR0
+6WLNFni99dMPsDsMY/um5Xicg0htaRZJRETcTslAI2CXA9mbhuXnQ3IyvP46/PorXHQRtG8PnTtb
+j9uzBW4LYgwjfqsJOc/B+VlkZ7SakIiIuJ2SgQbOLqkJBKzPd90Fa9ZYTcHffw+33BLtD4hEosfZ
+z3VTEBPPngHnOYCSAakdeyYuHqVtIiIitaFkoIGzRyV//x0WL4affrI2DUtPh9GjrT0DnJuHOXsD
+3JQIQDQZiGdgpZkB2R326ltu+10SERGxKRlo4Px+ePtteOABa/fgiy+GAQPifVZ7xg0zA0oGZHdo
+NSEREXE7JQMNnGlaOwmffTZcd511XzgcfdwetYxdLciNvF71DEjice7LISIi4jZKBho407TKgJo1
+s4LpysqaR9fdnAQ4E1EI3QUAACAASURBVBV7ac94Uc+A7C474fZ4oiV5IiIibqGQpoEzTWuFIOf6
+/PZMQKKMVDqXN433zICSAakN+3crErH27LD36nBz0i0iIo2TQppGoLISkpKq1y4nUh2z85zd0jOQ
+KP92Eh/2z0eHDvDQQ9vv2yEiIuIWSgYaOHtmIDnZup2IwYg9ymoY0T0Q4kU9A1Ib9s9HkyaQnR1d
+VUhERMRt9N9TA2cnA0lJ8T6TPecszYlXMuBMSOyNpER2xP55iS3HS8RkXEREGjaFNA1c7MxAovQJ
+1CSePQOx5VVKBmR3JFqfjoiINB5aTaiBcgYdwWB0Z+FEHpm0VxOKZzLgXBlGZEecyWPsfSIiIm6i
+8c0Gyg48TNNqIE7kngGbyoRERERE6pZCmgbKWbNcURHtGUjkMgV7NaF4fW/7s5IBERERaSgU0jRQ
+zmSgsjJaJpTI4rm0qHNmwDTjv/mZiIiISF1QMtBAOQPmRF9a1GYYVlITz54Bn8+aZbFnBhJ5pkVE
+REREyUADZ88MJCUlbuDqPO9wuPqsR32xZwL69IHp06FTJ+t2IidXIiIiIlpNqAEwTRNzW2RsGAYe
+R4Tq3GcgUQNX+7xTU02ysz0EAua23Vw9mKZZ7Xr39Tk0bQrHHbf9/XvKPn/7/avPa9qXdF0iIiKJ
+QclAA+DxeGISALNq1DwSiSYDiTozYJfktG3r4W9/g7Q0j6Oht36CMCv5iK4VbzcS21/vKfv8ndfR
+EAJLXZeIiEhiUJlQA/Dee+8xZMgQxo0bx9atW/H7/VUBanq6FbAmJyfupkfOev1mzaIrClnBeP1c
+kHM1IXt5Uef9e6qm86+va6pvDfW6REREEpmSgQRXUFDA5MmTeeONN2jWrBmvv/46YM0IAHz6Kaxd
+G10FJxEHMaOzHBEiEfjiiy8IBoMJmdjsSElJCV9++SVgXWeis69h9erVLFq0qNp9iSwcDgPw/fff
+k5OTAzSM6xIRkcZLZUIJyq5TXrBgAdnZ2aSnpzN06FDefPNNiouLSU1Nx+Px8MEHEc49F0KhcMIH
+z5FIBMMw+fnnn+nVqxderzfh67Xt8y8rK2PhwoX079+/KuBMZJFIBNM0ycnJIScnh+7duzeI6wqF
+QgAsX76c5ORkWrVqVXWticx5DYn8+yQiIrtPyUCCsv/DLikpISMjg0gkQkpKCsFgkHA4jNdrjWL+
+619N6dwZIPE3GvBuW9KnWbNmpKenx/ls6lZmZibNmjXD6/VWXWcis6+hefPmlJeXN7jratasGc2b
+N29w12UnBUoIREQaDyUDCa5169asW7cOwzDYsmULqampBLbtMNamTRv+/OdRpKdnYBjhhB+9tG3Z
+soXHHnsMowFtAxwOhyksLOSxxx6L96nUqbKyMkKhUINJ3uxAubCwkKSkJJLsrb0TnHNVpIb0eyUi
+IrvmMRtKhNhIVVRUcO6553L11Vfz+uuvc+KJJ3L22WdjmiahUIjKysqEHL20z980zW0N0R4ikQih
+UAiPx4PP56u6zy4/se9zu1AoRCQSqXYd9n3O96qm+9yssrKyKqD0bevytstqvF4vhmFUe199Pl9C
+BJ72z53f7wei12S/d4l2TaZpUllZWfW74vf7q67R6/VWJTj2dTnfTxERaXiUDDQAixYt4vHHH+fw
+ww9n9OjRVUGLiIiIiMjOuHsIS2rl0EMPZerUqVxxxRXVEoHnn3+eCy+8kDlz5sTx7PZMQUEBt9xy
+C2PHjiU/Px+ABQsWMGbMGO67776q0dnff/+da665hltvvZWioqJ4nnKtvfzyy4wZM4Y77riDDRs2
+ADBp0iQuvvhiPvvss6rjpk2bxgUXXMCHH34Yr1OttUgkwl133cXYsWN58MEHqaiooKysjJtvvpmr
+r76a1atXA1aPy8MPP8wll1zCsmXL4nzWO2evErRo0SLGjRtHcXExhYWFXH/99Vx77bVV711BQQET
+Jkxg7NixrFy5Mp6nvFP2uM+qVasYO3Ys1157Lf/4xz8A+OGHHxg9ejQPPvhg1XX/+uuvXHXVVYwf
+P56SkpK4nbeIiOxbSgYagEgkQmVlJaFQqKpk5t133+W7775jzJgxTJo0qSqgThShUIgjjjiCpUuX
+kpubS2lpKc899xwnn3wykUiE559/HoBHHnmEI488krZt23LfffdVPdetgsEgrVq14q9//SvNmzfn
+mWeeYfr06axdu5azzz6badOmsXr1apYtW8a0adMYO3Yszz33nOsDZ8MwGDJkCNdddx15eXm89NJL
+PPPMM7Rp04YjjzySKVOmAPDmm2+yYcMGLr74Ym677bY4n/XOeTweSktLeeWVV/jss8/Iycnh6aef
+pnPnzvTo0YPJkycD8Morr1BeXs6IESOqgms3Trja57R8+XJycnK44oorOPfcc1m/fj0vvPACp5xy
+CuXl5bz00ksAPPjggxxzzDFkZmby8MMPA+7+3RIRkT2jZKABMAwDv9+Pz+erqi1fuHAhxx9/PAMH
+DqR3797MmjULSJw10Vu0aMFZZ51Ft27dSElJ4ffff6eoqIizzjqLUaNGMXv2bFauXInP5+P888/n
+zDPPJC8vj7KyMlfXNwcCAYYOHUr//v057rjjKCsr48033+S0005j2LBhNG/enLlz5zJ//nyGDBnC
+UUcdRc+ePfnhhx/ifeq71LdvX4qLi/F6vWRmZvLFF19wxRVXMHLkSFauXMmSJUtYsWIFF1xwAQMH
+DiQ9Pb1qDwI38ng8TJ8+ndTUVE4//XTWr1/PvHnzuPzyy7nkkktYsmQJv/32G2vXruXCCy/khBNO
+wO/3s2LFiqpeAjeyewUqKiro2bMnv/32G8FgkNNPP53zzjuPWbNmsWLFClJTUznnnHOqrr2iosLV
+v1siIrJnlAw0QNbmXJGqJUdbtWrFxo0bAXeOWO6IaZqUl5djGAZlZWWYpolpmgQCAcrLyykoKCA1
+NbWq0TE9PZ0tW7bE+7R3yU7InnnmGY455piqplPTNPF6vZSUlFBYWFi1hn3r1q0pLi6O81nv2pYt
+W5gyZQqLFi2ibdu2lJSUVO0FYa+WZL9PkUiE/fbbr+rn0o3++OMPli1bxvjx4ykvL6dFixYEg0GA
+atdkL+trmiatW7d27TXZSUDnzp3Zb7/9eP7557n11ltZu3Zt1c9fIBCgoqKCrVu3kp6eXvU7l5qa
+ytatW+N8BSIisi9omKcBMgwDr9dLaWkphmFQUlJCixYtgMTaUMhexcRe4cTj8VStvOP3+0lNTa1a
+FcUwDMrLy6uWsHTzWumGYTBx4kTS0tIYNmwYr7zyChBd3tFeGraoqAjDMCgsLGT//feP5ynXSvPm
+zXnhhRd4//33efPNN6tWovF4PHi9XpKTk/F6vVUJXkFBAU2bNgXc9X7Z5/LOO+/w4Ycfkpuby5w5
+cyguLqayshKg6ppSUlIwDIOKioqqJUfta3LL9djs8+nUqRPPPPMMAPfffz+ffPIJGRkZeDwewuEw
+Pp+PJk2aVF0TWOVtbnyvRERk7ykZaGCsXXoN2rZtyzfffMOxxx7LV199xSOPPALg+mUPbeFwmKKi
+IjZv3swff/xB165d8fv9LF68mO+++47DDjuMTp06sWnTJr799ls2btyIx+MhPT2dUCjkynIG+72Z
+MmUKX3/9NZMnT6awsJCjjz6aL774gtatW7Np0yZ69epFWVkZjz32GEOHDmXevHnccsst8T79nSos
+LGTLli0kJyezdOlS2rZti9/vZ/r06bRu3ZoWLVpw8MEH8+WXXzJjxgz2228/Vq5cSY8ePQB3Bc72
+uYwYMYIBAwawdetWVq9ezfHHH0+TJk346KOPSEpK4oADDqBz585kZGTw4YcfMnz4cNavX0+XLl3i
+fAU7V1BQQFFREYFAgA0bNjBgwAAWLFjAkiVL+OKLL+jduzedO3cmJyeHn376iV9//ZVAIEBKSopr
+f7dERGTPaWnRBqq8vJwHHniAb7/9lpEjR3L++efH+5R2S15eHjfffDPLli2jadOm3HPPPVXXdOCB
+B3L//feTkZHBd999x8MPP0x6ejp33XUX7du3d/XIZUVFBRdffDFlZWWkpqbSs2dPrr76au655x5+
+/PFHLrzwQs4991wikQgTJ05k5syZjBgxgtGjR8f71Hdqw4YN3HnnnWzevJkDDzyQO+64g/Lycq67
+7jpKS0u5/fbb6d27N3l5edxzzz2sWrWK66+/nmOPPTbep14rjz76KKNGjSIUCnHttdcSiUS4++67
+6d69O2vXruWee+4hNzeXm2++mX79+sX7dHfqq6++YuLEifj9fgYOHMjVV1/NBx98wOTJkznooIO4
+7777SEtL48svv2TixIm0aNGCu+66i/3339/Vv1siIrJnlAyIiOyA/edRAbCIiDRUmu9twCKRCKZp
+YhhGQgYz9vlDtLzJ3rnXvm2a5nb3uZ29/CtQdd41vVeJ9v7Z5+t8L+xrde6gnEjX5XwvGso12b8z
+EP35q+n3KBF/t0REZPdpZkBEREREpJHScI+IiIiISCOlZEBEREREpJFSMiAiIiIi0kgpGRARERER
+aaSUDIiIiIiINFJKBkREREREGiklAyIiIiIijZSSARERERGRRkrJgIiIiIhII+Xb2xcwTZPly5ez
+atUqSkpK0IbGIiIiIiLu4fF4SEtLIzs7mw4dOlR7bK+TgWXLlpGfn8/AgQNJS0vD4/Hs7UuKiIiI
+iEgdMU2TwsJC5s2bh2EYtG/fvuqxvS4T+v333+nTpw/p6elKBEREREREXMbj8ZCRkcERRxzBL7/8
+Uu2xvU4GysrKSElJ2duXERERERGRfSgjI4OCgoJq96mBWERERESkEfB4PNv19+51z4DsHtM0ycnJ
+YcuWLQSDwXifjoiIiIg0IB6PB7/fT+vWrWnZsuUuj1cyUM82bNhASUkJbdu2JRAIxPt0RERERKQB
+MU2TYDDIhg0b8Hg8tGjRYqfHq0yonm3evJk2bdooERARERGROufxeEhKSmK//fYjPz9/l8crGahn
+oVAIn08TMiIiIiKy7yQnJ1NeXr7L45QMiIiIiIg0UkoGREREREQaKSUDIiIiIiKNlJIBlynPe5g/
+P9+fMSs2Y+76cBERERGRPebuZKB0Hdf1eJD0lLvweO4iKf0BOo5cSdFev3CYpfdMwd98CgPPnccv
+Vcv9h/j+rlc5tM0EPJ77OPPj6vsAhNct4dJjnmI//6OM/aZyr8+iJpHQan7e+BPLy0PxTQbMPD6b
+fxd3//ANm5WViIiIiDRI7k4GUg9g4sKbyPuyHx1a92DmhptZ+UpH0uvkxQ3anXsaH7/Wh66B6H1t
+Bvdn6scjubnL9iv+eA84mKkfn8PV3ffdakCp+09mzU1lzDmkdXzfnEgus+c/wP0/KhkQERERaajc
+nQzsTNFKRrR+nDMvfovTTnyWjge+xJ3fVuzlaLrB/gM70a9LKkl78C9TtnAuV1/5Nd8V7sG3Di/n
+wedSSHnQ+jhmcR4RAEqZ+1FH0qcMoMfjLTh42oUc/0QKqU9fxsyyCOV5t5L9YAod3zibPo+lkPLI
+wfzPj8sos86IXxdfxdGTmpDyYAptXvwrz+QVbfs3CrP2x/6kPNiGIe+dxqGPppDyYEcuXVPIhp8H
+kfLIkTxUAGy8lR4Pp5Ay+Vrm1WIyJFIwj8kX9qNLxy5kdzuYQVdNJzdsfb/c929kcPfOZB/cnezu
+R3PT3BIACj4ZxUH9nmDFtteP5L7GsA7DeC0nsgf/kCIiIiJSW4mbDABUlhMeNoS3Px7NnFsqmXLX
+KrbG8XSCK5cx5cklLCvdg5TE24GLz/yWWcNOJWO7ByOEIt24sk9XVq78jsOOvoaDCt5h2sayqiNy
+NrXllhFvcWPL1bz9yXVML41Qkf8o57z3PEta3MjU4fdyVNFrXPvGHcytFtRv5dvNB3PHiK+Yfc7D
+nNE0iZZdX+S7kS8zsgnQ9ApeH/U13557A4f4d3EN5lY+vXYETyTfxue//Mrypd/z2pjupHiAytW8
+ftfrtJw4n2VLlrLshze5OjsZgIy+ozlh86v89/cgECZn5vMsO3wMg7MS+8dTRERExO0Se/er5EwG
+H5GCgYfMLs3wbyykMAzNvfE5nYzho4js8dREEq1aHEaTitbUtDexp0lvBrQJku5ZR78DB7DE8ySb
+gmFIsh7v0vMy/nxAV/of3puHPljMF1uL6bthOsvJYuxxN3J+Gy99Cl5m+pyZfLi1gr6t7Le+OSOO
+v40RbZsAh2+7rws9s8pp4wP8B3BwVm861yYuL1vG/33ZhHPeHEwbP0AKbXt23nYBLTl8UDMeuH0M
+f//pOPoNGsqwQa2tx9IP55L/Z+++w6K60geOf6dQhiIqimJFpImK2ADBELuAdRVMjEHNutFoNNEU
+s781JsbEaNY0S6IxMeqqiRo2rpXYFewtiKIgVgQLAoogfWZ+f4AGEIYBu76f5+F5dC7nznsLM+c9
+59xzAjIY89sZ3v4/azYuOo/3xE7YKqp2JoUQQgghhHGe8qZXBWplUY1RoQA9PLtT8CgKj7Hon+j1
+6IodbEFBPnr05BcUlF++SMlTZEF1kwd5G+jLvgSKanT88hDHFo/Gx/oCy4Z7ETDrNIWPaFvi8Vpf
+8sJWcCJ+HUuS/BnpVx3JBYQQQgghHq6nPBl4smRH72f0qD3sr8IzA/qCK8RdP05MWgr5wO30k5xI
+juF8bnmV+5LO//kx30TP5aP9x0DZnBeqW1G3UW9cuMbyXV/yy6m5fHQoDix7EFDdrOIdKjRUNwEy
+9rPrcjQn0q6SXVEZjRv9/W4TNm8b1woA8rgef5EMHaDLJCE+FavmnQkZO4WpQ+py6dhV7szXpHF/
+lQGKVXz++Y+kdP8H7asZddhCCCGEEOI+PNnJQNHUonYd93Mh+Tg97R/U1KLl0XL80x+pXWsBn8Rl
+s7r3V9g0/i9bjXzDvAun+WHBKeKq8MxAdvI0uv/shf+GNdwEYiID8V7UjuFn0jDmMdqGjauxfvs/
+WZnlyIBuX9PHQolZ7XdZFTQcl+TpjFj7L3ZYBPNlyFS8Kxr7D6BszN+8X8ZNv46xy71p/8sMTlT0
+ALGiOl1n/cZbtz/lBTc3WrRsz8BZ0dzWA9o0Iqf1pZWzC67OzQnZ7su0D72xulPWzJXBL5uz4bdb
+9Hqt9V+vCyGEEEKIh0ah1+vva2BNWFgYwcHBDyqeR0TLqU8X0OtyH07Ma4BFZYrmpTCt3XISfhjD
+Dx2MqVWXFBUVhZubW6XLlScneRKtFn1N9W6x7Gnb+Cl/CEQIIYQQQjwosbGxeHp6lnitdN39ye4Z
+eGgUmFS3QLdpHV1CDhKXV3EJAG1SDK91CWPxTQtszWREuxBCCCGEeLo9pw3JSpzGDePCuMqVUtVv
+zqLdzR9OSFVkbjeNuA+mPe4whBBCCCHEU+g57RkQQgghhBBCSDIghBBCCCHEc0qSASGEEEIIIZ5T
+kgwIIYQQQgjxnJJkQAghhBBCiOeUJANCCCGEEEI8pyQZEEIIIYQQ4jklyYAQQgghhBDPKUkGhBBC
+CCGEeE5JMiCEEEIIIcRzSpIBIYQQQgghnlOSDAghhBBCCPGckmRACCGEEEKI55QkA0IIIYQQQjyn
+JBkQQgghhBDiOSXJgBBCCCGEEM8pSQaEEEIIIYR4Tkky8JQruLSCEV18aGVvhfvkaHIed0BVUHBr
+BSOW+dBqlhXuu8o+Bn32SrrM6s3G3MrsOZczJ7/mP9ezytmu5fKKQXR4O4J0fbGXs6P51L8Hs+Pz
+KvNmQgghhBBPHUkGnnLqhi+zcPtOlgxrgsnjDqaK1NVeZuGrO1nSsvxjUJh1Z+5LX+FnWpk953Im
+5muWpGSjL2tz5iFmzrhO6HhfbBTFXte0YORYCxZM2UxamQWFEEIIIZ4Nkgw8IdK3DqWpzyzO5hf+
+X3dtBUEOQay4qoOCRMJG+9PcxRkXF2fc/Uez9HR2xTvN2MEQBz8WXNICkLkzlCa+80koANCSsms6
+IW2daOrUFMcWvfh463UKf1PLtY0T6dbMCVf3Zrg28+eDg7cfzoFXKJtje//GC0u88Vr2LntKNNZr
+uXZmIt3mO+H6QzNc5/vzweXCOLOuTqXvkh5MuJjKgZ2B+C32I2jnQYofRfreOayrEUq/RupS76nE
+rutwmu6dQ/g13cM9PCGEEEKIx0iSgSeEjffrdE1bzu/n8gAtVzctJLbNSLrVUYKyBj7v/caRuHhO
+n45l66iLfDRhA9fvo9Val7yGMW9E0GVZNGfOnOHEL36EvzmZ/ZlA/kVWfrKSWl8fJvbkKWKPhjHO
+1byCPeYS80UnHOztsS/+06At7+y/n0RCQyvf1US8/BlepbsNdBdZGbmSWt0PEzvqFLH/CGOcbWGc
+FnU/Yu2wzXzT2BbvTuHsGb6HjZ28sLxbOIu4DUew6NgOW9W976qw8aCbXQxrojPvI3YhhBBCiCdb
+6SZR8bhYt2FEQAZjfjvD2/9nzcZF5/Ge2AlbBaDXkrpzGm+EHiAFFWScIcWkCzcKoHYVxwbdPr6K
+XUlnSXyzD2EKQJdNlmkDLmfqwLIWbTpWZ8bkkbwX1RmfjoEEdbSrYI9mNP9gJxc+qFo8VaKsRZuG
+1ZmxayTvXeuMT8NAghpWFOcd+VyPv4nVwJplD01S29K4eh6/JtxCSzXKyBeEEEIIIZ560jPwxLDE
+47W+5IWt4ET8OpYk+TPSrzoKIPfULIZPS2X4qkj2793L1u+CqKXQoq+oZ0ABCgV3f0+vK6D4oBdT
+l1Es2bSNbdu2sW3HXk4cX0VIXSUoqtHxy0McWzwaH+sLLBvuRcCs0xh+nDaXE9M70sDODrviP/at
+eHvfwxpiVI2O3Q5xrM9ofEwvsGytFwEHK4rzDiWmVmq02fllP0+gzyM7D0wtTeWPRAghhBDPLKnn
+PEE07q8yQLGKzz//kZTu/6B9tcLXdVmp5FRzxrW2KejTObxqF9cKipdUYVVHw80zSeQUr9mqa9LY
+Ko3Yq7lADucjo7lRlA1Ytgyh/dWfmR+RUvicQH4q0ZFRpGoBXSYJ8alYNe9MyNgpTB1Sl0vHrlZQ
+yTajxf/tJjE5meTiP1eOMauDpcGSd4/BSsPNtCTjZ0TSZ5KQlopV7c6EtJvC1BZ1uXSteJxKzE3V
+ZGbcpOCewuY4+jYg5UhC2e+Xc4nDybVp72qNoqztQgghhBDPABkm9CQxc2Xwy+a0nnKLNyNbY1X0
+sqbVeCZ7DWFY9yM42mmwqtGEWiXGtpjQZPBk+q4YiYeLNfUCfmDrnBewMnfn9ffb0m/IiwS0cqWR
+jTWWRemf0q4/C5bHM+Zdb1wy1CjyldQLmMryDp7Y6tKInDaQT/ako1AUoK/Tg2lLvO/G83CY0KT5
+ZPrGjMTje2vqNf2BrT1fwDJ3B+/8dxIHcq8TnXONd3715XNNF74Z8BntVWlE7h7IJ4npKChAb9mD
+aX2Lx2lJm/YjsFkTQNsTttR1mMXqLt5Fzw2Y0DAoBLuFaziV9SLtLUpGk3c+nB2KQOa7aR7qUQsh
+hBBCPE4Kvb7CwSYGhYWFERwc/KDieeZFRUXh5ub2uMMQALpkVg8J5H+hW1gcVLNYD0Am+yZ0Yopb
+GBtGOUjGLIQQQoinUmxsLJ6eniVeK113l2FC4vmltKP3l1Npfz2R28VT4tyrJDpOZO4wSQSEEEII
+8WyTuo54rpnU78XYYaVeNHMiZJzTY4lHCCGEEOJRkp4BIYQQQgghnlOSDDxFcpJn0nthB0aeTSt7
+OkwhhBBCCCEqQZKBp4iu4CLRKVHE5RSUTAb0yew4/AlTj+4jrdgGbeYi/L7Q4Lj1CNmPOtgq0XJ5
+xSA6vB1B+kPPdnI5s+Jr/hOTVYVYsvhzb2s0szyZeaP0hKvlbMuO5lP/HsyON24VhPsu96Bl7GCI
+gx8LLmkf2lukh/elXucVXNdDwaUVjOjiQyt7K9wnRxs/3awQQgghKkWSgaeIRb25JHyQza7mdiUv
+nO4aOw/PYPqfJZOBp07mIWbOuE7oeF9sHvrk/oXJwJKT2WX3shiIJSv5ayYlt8BNfW+Q5W7TtGDk
+WAsWTNlcuWtU1XJPOXXDl1m4fSdLhjUpe4VoIYQQQjwQkgw8CfJ38/osCzr8mchf7a5prP7VGptV
+67ihjeOLHzVovij8eTEmuWglYR1Xojui+dKLf6cDKZNoOVODZu54DuX/tfvs69/Sd64FmpnNGRGb
+VMYCXPcERNLp9+gyzwHH7xxwWjyKZWlFbbN5Wxk0y5E+v/el14rOeC0axLdXbxVVqLWkJEwnZKET
+Tb9viuOCXnx8/nrhMeXtYMhsF17aOJSQlb64zevBZ5dvlaiIp++dw7oaofRrZOxz7TmcXjycNg0a
+4uzxIoOHtcKuqGUZtKTsmk5IWyeaOjXFsUUvPt5aGEvW0an07diDCTtSOTApED9fP4ImHaT4Osnl
+xpIfxdcRfzKgYwj1S//1GNqGEruuw2m6dw7h13SlNxpQhXLacyxa60WjrwrvF+s5LzD21IWixdhu
+cfTQy3h+W7it2qx2jIxPxqg9629xcOYgOrjZU7NhNz47WHT9ChIJG+1PcxdnXFyccfcfzdLTRX1R
+GTsY0tCFl0YNJaSXL24uPf4qRzZxPw+jbaNGuHj488avF424N8HQtb0j+8T3vDP+Ww7deo4yKCGE
+EKIKJBl4Eqgd6VhDyaUrCdy6uYbvDq8iJvsSR9N1NKrvhKXKgb8P2M/2oD7YlCiopJbLIg6E/odQ
+S6DaGFYO3cv+l9+nebHm1JvXcwny/xcvqM7xy56VnK+g5qfP3shb4Zvx7Xecs2+eYq37cd7fuJSE
+O+V0t1A1W8Dal7ez6UUL5obPI04Luqw1jNkYQZd+0ZwZc4YT/f0I/2My+++McNHfQtf0S1a+FMkW
+nyzm79lF+t13zSJuwxEsOrbDVmXcadNe+oXRk84yfGs88X+uZGDWFbKK6n665DWMeSOCLsuiOXPm
+DCd+8SP8zcnskv4TvAAAIABJREFUzwSLNh+xdvdmvulsi/e0cPbs3cPGaV78tU5yebFkEXXkHfY0
+nc5Q69IJi6FthRQ2HnSzi2FNdKZxB1jlckpq2L/Gt8ERHHhtAx/XiGbhpk85kAf67M38c8carjaa
+R8SIA4QHjsRLY2TylZvIxRYz2XXqPLvH3eC7aUXXT1kDn/d+40hcPKdPx7J11EU+mrChKCkDCm6h
+C/iSlRsi2fJ+FvOLyhUkLGPUh+cYviWe00d/oU96qSley2Ho2t6Rd2Ed83/4nbhsSQaEEEIIQ2Rq
+0SeBogbu9rVJvxzNoeg5vLcvkxE1vuB8poZm9epjihm1bVthmWuHaamiJubOeNTJwV4NmNTHvU5r
+nIpSvDstpbVd3mFUy/pYHp1G5O0rZOgwmAZm39jGCfNAPrSzRgE4Ow2k/r5NxOa/TiMFoHYlqH5t
+VCiobh9Eo/QFnMibSP3kVezKOEviH30IUwD6bLJUDbicrwMTQNWEznVrokRFjZqNMcm6zC09VFcA
+5HM9/iZWA2saPSwkK/YP4uoH06upOajq0GVwKyxnF267fXwVu5LOkvhmUSy6bLJMG3A5UwdWFeXA
+ZceSdf1r3k7oztwBLphqz5SMxcC2u9S2NK6ex68Jt9BSDSNznkqX02tvkXx9JV/v/ScJOXlFrfDx
+XMjX8YJZU9pVh8j4iYzM601Pp2BGONQwrlXA3JV+3RthqlDTsJ0rZmsuc0sL1RVaUndO443QA6Sg
+gowzpJh04UYB1AYwa0LnNkXX3akxJimF5ZRxm4mvP5AgRzNQ16dnqCdWcysOw5hra9M7nKyn40EZ
+IYQQ4rGSZOCJYE6TBk6oTm1l/YV8vB2qcSgunBt6B8bWsLjvvavVJihQGl/5vEep8e96LQV3Fq7W
+F6BDf3eYiantKJa88i7OpWuXeQBK1Io7+1KCXg/6O7tXYmqlRpudX8mZkhRF0d07ft/UZRRLNr2L
+c6UHnZcVSwHJV3ZyJi2efot/BP1truTdYvd/A9H2X8PL5W7bwD9rmRfuQp9Hdh6YWppWrkuuUuW0
+JJ4cw9vHj+Dc9j/saOXCxT0BDIsrumbq1nw2LIpOsb+z6fwG/rNtID9eXM6pAQOwq/A5DRWmRc9C
+KJQqFEXXLzd2FsOnpTIpIpLgBqZkRgyn5QQtf61trkStLNq54q/rrkCBQqVGpQBQoFSryriKZav6
+tRVCCCFEcTJM6ImgwMrWm7rZG1h6oyVvtPHh+snfuGTqQWsLNfqCK8RdP05MWgr5wO30k5xIjuF8
+btEIa4WG6iZAxn52XY7mRNrV+5o9SFOjKy1y/mB1SiZ6cjl39neSavbA9U7Fq+Akq85cII8Crpxf
+yRnrrrQ0VWBpF0L7zJ+Zn5BS2CuhSyX6UhSpRtXuzXH0bUDKkQSjZ46xcAvANem/bLyQC9orbF9x
+7O64f8uWIbS/+jPzI4piyU8lOjKK1LsDy5WYW6vJTLpZxjj1smJR4+CxlaQ3LnLuH+c4N3QRXU3d
+mDQwnH/WsjKwzfyv3eZc4nBybdq7Whtd6a18OT06XQFgjkODF2lbLZ1DV27+tbngFEujIsmv1YvX
+ff+PITaQlXaatMo8xlCKLiuVnGrOuNY2BX06h1ft4poRg/81bj1xTd7OkTQd6NOJCj9V4rkNUGFV
+R8PNM0nkFLuHKr62kH18Lm+N/YqD8syAEEIIYZAkA08IcxsfXFWQY9ODDvUCcNXrobofjiaQnTyN
+7j974b9hDTeBmMhAvBe1Y/iZtMIWeWVj/ub9Mm76dYxd7k37X2ZwIr+CNzRAoQliVmA3dq/2wHWe
+O71OuDEjKJTGd+4WlT12F0fg9YMrPruVTAh6EzcVKC36s6B/KJe2e+MyzxWneR0ZGxN/dxy/YSY0
+DArB7vAaTpU322cpqoavMO+zJizq3ozm7Qbzu4U9Fmo1SkBp158Fy0O59IE3Lq6uOLl2ZOyvxWOx
+pM3YEdjMC6Btex96/PNAsYpo5WMxRt75cHYoAnnJTfMQy6lp6P5vPmhkw7Y1jbFf+AHHTM2Kbc/n
+6sVv+ccv3rReFMz8Ah/e7DQUp6p3G6FpNZ7JXlsY1r0PwSGvszi3CbWMaLFXN3yF7yZV45t+AfQf
++BrfXzHDtES2Y0KTwZPpe24kHi7u+IyLJBNjri3kXdzATwvXcFqeGRBCCCEMUuj1+vv6tgwLCyM4
+OPhBxfPMi4qKws3N7XGHUXV5Wxn0w2f0/ft2XrV8wLmkLpnVQwL5X+gWFgfVNKoVPD8rH7WFKQqy
+OPZxR0Jzf+bgDE/MKyz7oGOpSCb7JnRiilsYG0Y5VGJ8XlXLCSGEEOJ5Fxsbi6enZ4nXStfdpW4h
+nhxKO3p/OZWkrYnc1tfEqsIaeBZHPw7k9c23MVNrofFLfP+Tx/0nAlWKpQK5V0l0nMjcYZWs0Fe1
+nBBCCCGEEaRn4BF76nsGhBBCCCHEU8GYngF5ZkAIIYQQQojnlCQDQgghhBBCPKckGRDPNG2cmv39
+TImKuP9HgIUQQgghnjWSDDzX9KQnTaHLbEs0X2jQfDuciLzHHdMDlqMg/YySzJtUcjGzxy8z8u/Y
+ayyp26AB9Zu0ZcCHa7mQW/J39Ckr6VKvNxvTK7PnXM6s+Jr/xJScNzXrwFgaazTUadCQBvb1cOs5
+kTWXHuwNkR7el3qdV3D9gVwMLZdXDKLD2xGkG7O/jB0McfBjwSVtxb9bRcWPr+DSCkZ08aGVvRXu
+k6ONXj/jYcRiiDFxVu0+M1Su7HvwYdInLOLPPjO5XexvSBs9ncNBM8ks9+Lkkb1+Idfiy165pax9
+PnkMn+uqXtsnn5br2z4myK0e9erVo2mnd1mTWDjndvbhd3BuOJyIzMLfzImehHuDIezIMFBOm8TS
+Pg7Y2dlRu4YGjUaDTW077Owa0uXbOPIyIxhur0FTozZ2dnbY2dlh3/odDlRwi9/eO4r6Gmt6LLuM
+FtBeWkCHuneuR/nHYLBcRbFkR/Opfw9mxz9rX/iiqiQZeK5lExezmH3ZTZgQvJeDodNo+4yt6Kpq
+lU/PmBw69tU/gOlBHz3TRmPYGJ9IwpHv8dkzin6fRZVYUE5RvTtz13+Fn1Vl9lpYOVhyMvueBEld
+7++sOX2JxEtHmGH3C6+P3UDKfSxI9lBlHmLmjOuEjvfF5gm8uOqGL7Nw+06WDGvCk/xnZUycVbvP
+DJUr/x58lFTNh1HfdiMJu1LLiaMwGbga/0TX9itg+FxX9do+6fTpO5n496XU/yaaxKRYlnfYzBtj
+VnOtgs+zcssp6hO67gLJyclc2vUOTWwH8L9zySQnX2L7eFdMAUwaM257AsnJySQnJ3Plz6/xtqg4
+VpVCxZE5v3CmVN28omMorxwVxaJpwcixFiyYspm0p62VTDwUkgw8p3S3l+H/hS0v/nkNOMs3Yb54
+LZ3EkXyAbOJjxuI/p7DHwH7Ra/yQnFH0RaIl8c8OaL6wJ2B9P1p8pUHzRRP+kZBZ8ZveWE23Oj58
+OHkwXf398PL0YdzuTCCfpDXv0cXVAcemDjj5jmJZfA5kHeQtdx++PVt8BTUdySt74jgoHIMNWfkK
+4nubs6554c/utYq7X4T6ZBURzc0Jf8mUCF9z1nmY8ecfCqMqJbr0Q8wd5oNzE2dc3dzpOHYd17SF
+5+Xaxol0a+aEq3szXJv588HBwmXM0rcOpanPLO4chu7aCoIcglhx1fhatqqmN29M7kbqL4s4ngWQ
+zbHpf+MFX2+8urzLnlKnv7w4s45OpW/HHkzYkcqBSYH4+foRNOlgqZV/AbU9Lw5wIz/+GFfyC48v
+Zdd0Qto60dSpKY4tevHx1uuFKwAXJBI22p/mLs64uDjj7j+apafvpCzZxP08jLaNGuHi4c8bv14s
+tuKzgX2CgfulUPreOayrEUq/RpWYdFV/i4MzB9HBzZ6aDbvx2cFbhdfd0DFk7GBIQxdeGjWUkF6+
+uLn0+KucweMzpIJjr5L7ONcG9ln2faYl8ccXqOfRiQ5N7Wn90uv0bduY+h2ncTzHULmK7sH7uycM
+UllgamtNiVYBlT21x/pxe95KskpVqLQxczgxaBhn990k46vX+DM4hONfHSt5zsraZ5Xlc+m/4/B1
+bIKbpx8DQjvS0Gcu5wu4p1crc2coTXznk1BQWK7Mz08qOtcGPkMM3vM5nF48nDYNGuLs8SKDh7XC
+zsjevkf5+Zl98r/spAvD/GuhVFSj9SsDqHnwV45kGI6xquXuh4nD3wg1/5Ufo7JKfA9VFEt55Sqm
+xK7rcJrunUN4GdlR9onveWf8txySFdyfG5IMPKeUmt78/Np25jlZA82YNGgP+4Z8ShsTyL3+FSHr
+F3LSdiI/9Z3GCxkrGP/bRxwssarxTfanufPRoD3sDJnJ36oZ2faZeZI9dh+xPmIPB4/u5PP2luhT
+N/LWmM34Lj3O2bOnWBtynPdHLiXB1IWe7insiMlAl3OF6GMJ3NZnc3rLRRwCPTDYkKXW03h2Hn6f
+actt7SxI1+EyL596VgoSv1dxu6JanP4m28YPYpb5h0Scjifu1BFWjGyGRgHkX2TlJyup9fVhYk+e
+IvZoGONcC1c8sPF+na5py/n9XB6g5eqmhcS2GUm3OpX78zOv1xzbGzEk5gBoaPV/q4nY+BlepVue
+DMRp0eYj1u7ezDedbfGeFs6evXvYOM0Ly9JvlnuRLavisGrnS0NT0CWvYcwbEXRZFs2ZM2c48Ysf
+4W9OZn8moKyBz3u/cSQuntOnY9k66iIfTdhQOFQmYRmjPjzH8C3xnD76C33SE7ld9P1icJ93lHG/
+FMoibsMRLDq2w7YyqyfnJnKxxUx2nTrP7nE3+G7arsKk0sAxAFBwC13Al6zcEMmW97OYX1TO0PEZ
+YtSxV9J9n+syGbjP0GPW8kPWrR2Dds8txm7awju69fyRkGewnKF7sGr3hJqYLzrhYG+PffGfBm15
+Z/9faa6i/ks0X/wGlqbFo1GgbvsP6pn8l0v7SzYvqJqPo8WqJTTtUB3rdxfROuw3Wr7biuK3W9n7
+rBp96h+8M34PQatjiD26njFWl0gzIjss9/OzoKK/d0PXlnLvee2lXxg96SzDt8YT/+dKBmZdKVr9
+O9fwdXjEn595189yy9qBmkVtBWpbB6rnXeHiTcMf9FUtB4XHMae7491jb9p3CZeMaR1QN6D/OBe2
+zt7JjWJ18wpjKaecMbEobDzoZhfDmuh7PwTyLqxj/g+/EycruD83ZB2j55WyOk52Hly3MAWscLDz
+xNNSCRRwMWEdcdRhVOeJDLFX0T79P6zbtYk/bubiXfvOLVODQV0+ZFADS6CN8e9r6cmIEFc0AEpz
+rM0g69g2TtgE8qGHNQrAufdA6s/cRGzW32kV0JBz2+K5bvkdQUPSmX1kBvEnrOk0oRYG64AKMHXU
+YZOhKrfRzsxXR+0WenLrweVUBdoCDP9FZMfyv92WhIR1w94EQEMDD6ei96tFm47VmTF5JO9Fdcan
+YyBBHe0Kt1m3YURABmN+O8Pb/2fNxkXn8Z7YCdtKtyYqAH3FLUCG4qxAweWf6dU0jIKbtzDx+5TN
+q7tTXQEZx1exK+ksiW/2IUwB6LLJMm3A5UwdWGhJ3TmNN0IPkIIKMs6QYtKFGwVgFreZ+PoDCXI0
+A3V9eoZ6YjW38L1uG9qnVdEXfRn3S6F8rsffxGpgzcoNwTF3pV/3Rpgq1DRs54rZmsvc0kJ1RfnH
+UBvArAmd29REiYoaTo0xSSkspzRwfIYYdeyVlHW/57rS1NjUq4m5jT01a96gpsaGupY5RGVWvX+j
+qvdE8w92cuGDqh5GY+qM8SDpuzVk+w5F85i+FbPPbiW6Wg8muVqA0gLvQR2p9acx5cr7/HydRtXu
+I6By7nlV7B/E1Q+mV1NzUNWhy+BWWM4GMDN8HbL2P/rPTwWlem30GFxZ6X57eEwaM27jn/y7taaS
+BZXU7PIWL34+lTVJfR5MuYpiUdvSuHoevybcQku1Et+nNr3DySr7MRnxjJJkQFTgr0/Hkp+hFlQ3
+qUIlQqXBRlNGOUVZ/1FR29sfsyU72aa8yYChSjZs382VTE/+5WB27z4qSXHn7q/UF0A5lXFFNTp+
+eYhjMXvYufMPlg33Yva43Wwb74Iplni81pe811ZwIrguS5L8mepXvdLfO7mXT5BaozkNjVpi2Yik
+oQzqen9nw/HvaHsrnHe7jeHfm19l+cC6AJi6jGLJpndxLlX7zo2ZxfBpqUyKiCS4gSmZEcNpOUGL
+Xg8KFChUalQKAAVKdcnkrLx93lXe/YISUys12uz8Sh6nClN1YQQKpQqFXg96yI0t/xjuvJ9aWRS5
+QglF5So6PkMqPPb8TG7c0mJe3QaNEb0f932uq0CpUqJQqlAqVShRolLo0N7nMyaVvydyOTG9KwHf
+nKbESB+VPYN/38usDvf0e5WiQO03mjrfjiXxzwE4t39Ag+cref0M/s0qQKHg7v2o1xWgK7W9nP/c
+h7Lv+TvvobjnvSq4Dq3gUX5+mtZuSrWMBG7kA6ZQkHqBmyZ1aVRdjSLdHBNFsVj0ehQmGkwUYFrb
+kWoZF0krAMyKypnWw6H6w60uKSzb8kbITYYtjb87FM1gLEnllzOKPo/sPDC1NJUhIkLuAVGamrqN
+euPCNZbv+pJfTs3lo0NxYNmDgOr3XwEvi8axKy1u/sHqmEz05HIu/HeSnHvgagFmTbrQ6tpPzD7e
+jqHDe3Bl9iziHANpVtFDWTkKMk8ryLhY+P2lvaIgI05B1v2M+9S40d/vNmHztnGtACCP6/EXydAB
+ukwS4lOxat6ZkLFTmDqkLpeOXb37pahxf5UBilV8/vmPpHT/B+1LtdhlH5/LW2O/4mA5YzQL0vbz
+/adbqfnya7Ss6NgNxQmAEnNrNZlJN8sd325SN4BP5/Tg0AdTiEjXY9kyhPZXf2Z+RErhF05+KtGR
+UaRqQZeVSk41Z1xrm4I+ncOrdhW9L2jceuKavJ0jaTrQpxMVfuru8wmG9lkxcxx9G5ByJOGBzNJj
+6BgMMXR8hVRY1dFw80wSOcUurTHHnrn/LdwbvMjcs8bN+HF/57rsOB+esu/Bqt0TZrT4v90kFj0o
+effnyjEjEoEipk7UG+XIje/+ILfEeylRWqrRXbtV6eS60tevaVda3trKxtPZoLvJ4bA9pNyJRV2T
+xlZpxF7NBXI4Hxl9d1iIoc/PO8dQ0d97ZVi4BeCa9F82XsgF7RW2rzhWdJ9VcB0e8eenxj2YTmxn
++b4b6PSZnAj7H6ltBtHWGkzretAo/yQHzmehJ48rhw9zo64n9cxA4z4Qf/12lu5ORafP4NjK1dzw
+Gkwb6wdw8gwyxSV0JDb/+w8nc+4cgzGx3FvOKDmXOJxcm/au1vckVhV9H4lnjyQD4h5mtd9lVdBw
+XJKnM2Ltv9hhEcyXIVPxfkhToihqBTHru27sfsUDV1d3ev3ixowFoTRWA5buBDRJJbllIM2dOvNC
+bgI1e7SucPYYbZyaPX8zI/L/VOQDt2absmuAGUd3GPegcNmBVqfrrN946/anvODmRouW7Rk4K7pw
+XLY2jchpfWnl7IKrc3NCtvsy7UPvv55rMHNl8MvmbPjtFr1ea33P8w55Fzfw08I1nC41RjMvYS49
+HO1p6Dmafb7zWDu5NRpAn76DCd198Qv6kIM3Ininpy8de33IoawK4gTAkjZjR2AzL4C27X3o8c8D
+9z5AjAKbFz7kw6ZreX9uDHl2/VmwPJRLH3jj4uqKk2tHxv4aT5YeNK3GM9lrC8O69yE45HUW5zah
+VtG9om74Ct9NqsY3/QLoP/A1vr9ihmnRtVMa2GfFTGgYFILd4TWcegCzUxo6BkMMHd+dOJsMnkzf
+cyPxcHHHZ1wkmRhz7FpunT1LTqMedKpv3ID0+zvXZcdp8D4zoOJyZd+D93dP3A8lJp3fpNa1hSSd
+Kj4+QoNV6CBUy4dzpP9Aov8dZWTra+Wvn8I2iG+/8WF9v2a4tenF7Jv1qXGnR8Hcndffb8v2IS8S
+EPwP5iZYY1n07W3w8xMo71xX9dqqGr7CvM+asKh7M5q3G8zvFvZYqNUVVyYe8eenwuZF/v3TK5wf
+25JG9erRcWEzvpwXgr0KlHX6M2dmO9b2d6JB/SZ0/qEuk+cPo4kaFDadmfnTYC6Ma0H9+q4M3t2N
+77/vj1GPeOVfZE6XRnen87RzHMS6SkzXo6rfj7f9ldzJ2oyNpXQ5Y2LJOx/ODkUgL7ndO4yovO8j
+8exS6PUGR9BVKCwsjODg4AcVzzMvKioKNze3xx2GEE8/XTKrhwTyv9AtLA6q+VROHVsufRprBnZk
+eehOVvzNTlptHgktuWF/59jWl2j9fRBVGQV51wO4frf3jqL1uy3ZFDmWJk/UgF49+Vn5qC1MUZDF
+sY87Epr7MwdneGLUCMbHoODKOt4OeIfrH+zgl1cayPhoMtk3oRNT3MLYMMpBzsczLjY2Fk9PzxKv
+la67yz0ghHg6Ke3o/eVUkrYmcltfE6tnKRtQ1KTf7yfp97jjeK6oMAtegteDaNt6pq9fFkc/DuT1
+zbcxU2uh8Ut8/5PHE5sIAKjt+zA7sh03zepKpQcg9yqJjhOZO0wSAVFI7gMhxFPLpH4vxg573FEI
+8eBZ+v7A6X2PO4qyWOI9M4LomY87jspRVbPH9nG8cd4F1i5cy4X8ezep63UhdGALrB91Q4aZEyHj
+jJthTjwfJBkQQgghhHgYTB3oO/qtxx2FEAbJUFTxSOUkz6T3wg6MPJtW9Qd5hRBCCCHEAyHJgHik
+dAUXiU6JIi6noFQyoCc9aQpdZlui+UKD5tvhRBg3I99DkEfsdE+s7VvTOfQH4kvFoU9YxJ99ZnI7
+t9hrV9YTN2QgR3xacujrWO5zqvVCuutce8mdyJbt2Ne+feGP/0iSkwvnM9FnHCfp/YEc8PVln48v
+R8b/Rk6xruiy4gTQXv6d0d3b09i6CWMPVG4qHn3KSrrU683G9Ip/tyzp4X2p13nFXyv73sc+yy+X
+y5kVX/OfmPKOTcvlFYPo8HYE6cVvwuxoPvXvwezSF1wIIYR4hskwIfFIWdSbS8IHZS3Rmk1czGL2
+ZTdhQvBSBle3w/EhTWVqHDUNQr5nw2wvKprWH0Bh3xvX5d3J/Hd/4h5oHBZYfbIRzwF2JWfL0aaQ
++q9RJFp8jOfOnpipb5N97AIqI9J7Vb0BzNvQnC/8+nGpktEoqndn7npP6j+gdZnuZ5/llytMBpao
+hhHa3OLeWYYyDzFzxnVCV/uWnKJW04KRYy3oOmUzry7rTc1n6YFkIYQQohzSMyAeDW0cX/yoKWz1
+/0LDizHJd1vPdbeX4f+FLS/+eQ04yzdhvngtncSRMh64useN1XSr48OHkwfT1d8PL08fxu3OBPJJ
+WvMeXVwdcGzqgJPvKJbF50DWQd5y9+Hbs8V3riN5ZU8cB4VjVOO0ygJTW+tKLPRZQO7W6Rzr5s+B
+Tv4cDJ7EtQt3mutzyfrv+xzx9eNg0GBOvteLfa+sJ7+CMVT61EgS99nRYGw3zEwBpSWa1s0xKb7S
+aaXjNCSbY9P/xgu+3nh1eZc9mcW3abm2cSLdmjnh6t4M12b+fHDw9t1ycT8Po22jRrh4+PPGrxeL
+TYVtaJ85nF48nDYNGuLs8SKDh7XC7m6PQvnlso5OpW/HHkzYkcqBSYH4+foRNOlgiXUU0vfOYV2N
+UPo1Kt0WosSu63Ca7p1D+LV7+3ayT3zPO+O/5ZAsxCOEEOIZIsmAeDRUDvx9wH62B/XBptQmpaY3
+P7+2nXlO1kAzJg3aw74hn9LG2J6BzJPssfuI9RF7OHh0J5+3t0SfupG3xmzGd+lxzp49xdqQ47w/
+cikJpi70dE9hR0wGupwrRB9L4LY+m9NbLuIQ6HHPYjZlUdR/ieaL38DSuLWE4OZOzn4USbWvwvHe
+uZnmQXGc+9dqcrWgv7KW+K8SqLt0O17rZlM7J7nUokZZZH7Sm/0+Puzz8eHgmI3k6UB/7SR56kZY
+1FSV86ZViNMgDa3+bzURGz/Dq3RXSf5FVn6yklpfHyb25Clij4YxzrVwosGChGWM+vAcw7fEc/ro
+L/RJTyy2+Fn5+9Re+oXRk84yfGs88X+uZGDWlWILT5VfzqLNR6zdvZlvOtviPS2cPXv3sHGaF3+t
+Q5tF3IYjWHRsh20Zp05h40E3uxjWRGfesy3vwjrm//A7cbIQjxBCiGeIJAPiETGjtm0rWtnacU/d
+VFkdJzsPXC1MASsc7DzxrN3Q+HnjLT0ZEeKKBkBpjrWZguyz2zhhE8hAD2sUaHDuPZD6cZuIzbKm
+VUBDzm2L5/qeDwjqOY5NlxPYc8KaTl61KL9qXXXai3vJtH6R2m6WgDkWXQIwOx9JVjZoz0aQVSeQ
+mo3NQFWLGn2alYrBAquP1+Ozfz8d9u/H6/sgTJ+0v1p1Ldp0rE7E5JG8N30eYYezqFGt8Ciy4jYT
+X38gQY5moK5Pz1BPoxKurNg/iKsfTK+m5qCqQ5fBrYpV6O9HPtfjb2LlUJMyc021LY2r55GYcOue
+lWZteoeTlR3Bq0YtRSqEEEI8HeRbTTz9VBpsNGXcyoqy/qOitrc/Zkd3sm39TQYMVbBh+252ZXrS
+xcHsEQR7T2CAougV48fzKOo0w7Qggay00lXWx0BRjY5fHuLY4tH4WF9g2XAvAmadJo/CI1Oo1KgU
+AAqUalVljrLS56ViSkyt1Giz88uezUqfR3YemFqayoejEEKI54J834lHQl9whbjrx4lJSyEfuJ1+
+khPJMZzPLaiwbFVoHLvS4uYfrI7JRE8u58J/J8m5B64WYNakC62u/cTs4+0YOrwHV2bPIs4xkGbG
+PClskApVLXPyL15DV6ymqWrki1VGBCnxWUAe2bs2kevwAhoNqJr6Y3HtD1IT80CbzI31p+5pkS6L
+wtafBh2SSfxuG7n5gD6LnGMnyX8AuUH28bm8NfYrDho7Nl6XSUJ8KlbNOxMydgpTh9Tl0rGr5AEa
+t564Jm9aAeVRAAAgAElEQVTnSJoO9OlEhZ8qMX6/PBZuAbgm/ZeNF3JBe4XtK44ZVa6QEnNrNZlJ
+N7n37jLH0bcBKUcSyCmraM4lDifXpr2r9T0pSKXPixBCCPEUkGRAPBLZydPo/rMX/hvWcBOIiQzE
+e1E7hp9JezDTcJaiqBXErO+6sfsVD1xd3en1ixszFoTSWA1YuhPQJJXkloE0d+rMC7kJ1OzRuuTM
+MlWixrzPWGpd+heHu/bk6CeHCiv2NTrR9BM/0scHcbBbD06sbUqTaf0xV4HCvi/O7zTg2tAeHOo/
+nhRzO1QqI1rPVbWw/Xw+DQt+IMrfl32+3YlZfBLtAziZeRc38NPCNZwuNjZen76DCd198Qv6kIM3
+Ininpy8de33IoSxAm0bktL60cnbB1bk5Idt9mfahN1aAuuErfDepGt/0C6D/wNf4/ooZpoqK96lq
++ArzPmvCou7NaN5uML9b2GOhVqOsKBYALGkzdgQ28wJo296HHv88UCyRMKFhUAh2h9dwqoyZR/PO
+h7NDEchLbhqjzosQQgjxtFPo9fr7+mYLCwsjODj4QcXzzIuKisLNze1xhyEMyiN2ui9/u/I9R4yc
+WrTq9OizC1BoTIBsbn/7EqfyZtBmovvDzdTz4gqnFp17lLneD/cIq0ZPflY+agtTFGRx7OOOhOb+
+zMEZnpjf7651yaweEsj/QrewOKhmscQrk30TOjHFLYwNoxxk3mUhhBBPvdjYWDw9PUu8VrruLt93
+QtxDgYlNLXT/GUPAKyP4afFoXB7IjDxlySZz1t+Ji8xCqdJB/V44T3d7qImA9nIYo1/9N3tv2jLA
+7EmdTD+Lox8H8vrm25iptdD4Jb7/yeP+EwEApR29v5xK0tZEbutr/vWgeu5VEh0nMneYJAJCCCGe
+H9Iz8IhJz4AQQgghhHgUjOkZkGcGhBBCCCGEeE5JMiCEEEIIIcRzSpIBIYQQQgghnlOSDAhRTMGl
+FYzo4kMreyvcJ0eXPRe9ID28L/U6r+B6iSeOtFxeMYgOb0eQ/lhn38wjdron1vat6Rz6A/F5Jbfq
+ExbxZ5+Z3M4t9tqV9cQNGcgRn5Yc+jr2wUx3q7vOtZfciWzZjn3t2xf++I8kOblwMQh9xnGS3h/I
+AV9f9vn4cmT8b+TkG44TQHv5d0Z3b09j6yaMPVDG/KjlyY7mU/8ezC59QoQQQjzXZNIMIYpRN3yZ
+hdv7E/XP9gx73ME8bTIPMXPGdUJX+z6ANRvul5oGId+zwcipYRX2vXFd3p3Mf/cn7oHGYYHVJxvx
+HGBXcu0IbQqp/xpFosXHeO7siZn6NtnHLqAyonlGVW8A8zY0L5watjKhaFowcqwFXads5tVlvan5
+2K+REEKIJ4H0DIhnUvrWoTT1mcXZopZW3bUVBDkEseKqDgoSCRvtT3MXZ1xcnHH3H83S09kV7zRj
+B0Mc/FhwqbBlN3NnKE1855NQAKAlZdd0Qto60dSpKY4tevHx1utFqwlrubZxIt2aOeHq3gzXZv58
+cNCI9XQNxZmxlUH1HOkzqC+9gjrj5T2Ib4/eQl/RNkNxZuxgSEMXXho1lJBevri59OCzg3fKZRP3
+8zDaNmqEi4c/b/x68Z7VfdP3zmFdjVD6NapEG8ON1XSr48OHkwfT1d8PL08fxu3OBPJJWvMeXVwd
+cGzqgJPvKJbF50DWQd5y9+Hbs8Wa0NGRvLInjoPCSTfmPVUWmNpaU/HKbncUkLt1Ose6+XOgkz8H
+gydx7cKd5vpcsv77Pkd8/TgYNJiT7/Vi3yvrya+gZ0SfGkniPjsajO2GmSmgtETTujkmqvuJsyJK
+7LoOp+neOYRfexhL/QkhhHgaSTIgnkk23q/TNW05v5/LA7Rc3bSQ2DYj6VZHCcoa+Lz3G0fi4jl9
+Opatoy7y0YQNpYa8VI4ueQ1j3oigy7Jozpw5w4lf/Ah/czL7M4H8i6z8ZCW1vj5M7MlTxB4NY5yr
+ETPmVxRn/i1UIQtYu3E7m6ZaMPeNecTlGd5mME6AglvoAr5k5YZItryfxfxpu0gHChKWMerDcwzf
+Es/po7/QJz2R2yXOVxZxG45g0bEdtsUrtMbIPMkeu49YH7GHg0d38nl7S/SpG3lrzGZ8lx7n7NlT
+rA05zvsjl5Jg6kJP9xR2xGSgy7lC9LEEbuuzOb3lIg6BHlgZ8XaK+i/RfPEbWBq7dsTNnZz9KJJq
+X4XjvXMzzYPiOPev1eRqQX9lLfFfJVB36Xa81s2mdk5yUQL413nJ/KQ3+3182Ofjw8ExG8nTgf7a
+SfLUjbCoWf7JqnScRlDYeNDNLoY10ZkV/7IQQojnggwTEs8m6zaMCMhgzG9nePv/rNm46DzeEzth
+qwD0WlJ3TuON0AOkoIKMM6SYdOFGAdQ2qdrb3T6+il1JZ0l8sw9hCkCXTZZpAy5n6sCyFm06VmfG
+5JG8F9UZn46BBHW0M2KvBuIE0LgS5FMbFQqqtw2iUcICTmRMxM20/G31DcYJmDWhc5uaKFFRw6kx
+JimXuaUFZdxm4usPJMjRDNT16RnqidXc4rHmcz3+JlYDa1LpU2jpyYgQVzQASnOszSDr2DZO2ATy
+oYc1CsC590Dqz9xEbNbfaRXQkHPb4rlu+R1BQ9KZfWQG8Ses6TShFpXNQ4yhvbiXTOsXaeRmCYBF
+lwDMfowkK/tlVGcjyKoTSM3GZqCqRY0+zVAtKV7aAquP198zTOixtcurbWlcPY9fE26hpdpDOV9C
+CCGeLpIMiGeUJR6v9SXvtRWcCK7LkiR/pvpVRwHknprF8GmpTIqIJLiBKZkRw2k5QUuFy+8pQKHg
+7u/pdQUlKnWmLqNYsuldnO+pDVej45eHOBazh507/2DZcC9mj9vNtvEuGGr0rTBOvZYCbdF/dAXo
+9Hp0RmwrN84MACVqZVG1VaEsPFg9KFCgUKlRKQpPhFKtKjV6RYmplRptdj6V7mBRabDRlNFJqSjr
+Pypqe/tjtmQn25Q3GTBUyYbtu7mS6cm/HMwq+85VVHrcjqLoFePH8yjqNMO04BBZaVpqWD7CKrk+
+j+w8MLU0lW5hIYQQgAwTEs8wjfurDFCs4vPPfySl+z9oX63wdV1WKjnVnHGtbQr6dA6v2sW1EgPg
+VVjV0XDzTBI5xWu26po0tkoj9moukMP5yGhuFGUDli1DaH/1Z+ZHpBQOE8lPJToyilQtoMskIT4V
+q+adCRk7halD6nLp2FWKz+mSfXwub439ioO3/nrDCuPMPsmq8AvkUcCVbSs5U78rLaspDG4zGKeh
+c+nWE9fk7RxJ04E+najwU5R86sEcR98GpBxJeCAzMGkcu9Li5h+sjslETy7nwn8nybkHrhZg1qQL
+ra79xOzj7Rg6vAdXZs8izjGQZsY8KWyQClUtc/IvXvsrqQJUjXyxyoggJT4LyCN71yZyHV5AowFV
+U38srv1BamIeaJO5sf4UFZxKABS2/jTokEzid9vIzQf0WeQcO0m+MYXvR84lDifXpr2r9YN7FEEI
+IcRTTXoGxLPLzJXBL5vTesot3oxsfXc8uabVeCZ7DWFY9yM42mmwqtGEWiVayU1oMngyfVeMxMPF
+mnoBP7B1zgtYmbvz+vtt6TfkRQJaudLIxhrLonRaadefBcvjGfOuNy4ZahT5SuoFTGV5B09sdWlE
+ThvIJ3vSUSgK0NfpwbQl3iXGt+dd3MBPC2/jNXkCXkUV+grjNLPHbscIvOYkkqbwYMKSNwuHCOWW
+v01hKE4Dp1Ld8BW+mxTJP/oFsNTeCpMCM0xL1CZNaBgUgt3CNZzKepH291kxV9QKYtZ3EQx9xQNX
+FOhqdGbGklAaqwFLdwKapLLDPZDmTla8kPtPrvdo/QBmMFJj3mcstdb/i8NdrTB98TNafdweVY1O
+NP3kILHjgzgIYNOBJjP7Y64C7Pvi/M5BYof24LJNPazc7FCpSvealEFVC9vP55P3ySdE+U9BhwJT
+nwk0/7d7yYeIH7C88+HsUAQy303z8N5ECCHEU0Wh11c4OMKgsLAwgoODH1Q8z7yoqCjc3Nwedxji
+aZexlUEtPqPvwe28Wkdp/LaHSZfM6iGB/C90C4uDaj7Gluc8Yqf78rcr33PEyKlFq06PPrsAhcYE
+yOb2ty9xKm8GbSa6P9xu17y4wqlF5x5lrrexR5jJvgmdmOIWxoZRDtISJIQQz4HY2Fg8PT1LvFa6
+7i7DhIQQD4bSjt5fTqX99dIzDT1qCkxsaqHbMoaAV+Zx+qGusZVN5qxQDvfqy9G+IcTFBeI80u2h
+frBqL4cxMiCUpTdtqWlWiZQr9yqJjhOZO0wSASGEEH+RnoFHTHoGhBBCCCHEoyA9A0IIIYQQQohy
+STIghBBCCCHEc0qSASGEEEIIIZ5Tkgw85QourWBEFx9a2VvhPjn6gczx/uTJ5cyKr/lPTFY527Vc
+XjGIDm9HkF78CZjsaD7178Hs+If6BKkQQgghxFNLkoGnnLrhyyzcvpMlw5pwz8K3z4zCZGDJyeyy
+V7fNPMTMGdcJHe9bcq55TQtGjrVgwZTNpD3W2W2EEEIIIZ5Mkgw8IdK3DqWpzyzO5hf+X3dtBUEO
+Qay4qoOCRMJG+9PcxRkXF2fc/Uez9HR2xTvN2MEQBz8WXCpc1jRzZyhNfOeTUACgJWXXdELaOtHU
+qSmOLXrx8dbrRaunarm2cSLdmjnh6t4M12b+fHDwdvnvc5ehcvkkrXmPLq4OODZ1wMl3FMvii/Vj
+3FhNtzo+fDh5MF39/fDy9GHc7kyyjk6lb8ceTNiRyoFJgfj5+hE06WCJ1W/T985hXY1Q+jUqPWGi
+Eruuw2m6dw7h13RGxC+EEEII8XyR6aafEDber9M17V1+Pzea911VXN20kNg2b9KtjhL0NfB57zeO
+fF8Hc4WWy8v74DdhAwHrg6ldxZWddMlrGPNGBF1+j2ZVMw3Z0dPpFDKZHkfm42d2kZWfrKTW18fY
+ElgdspNJyjOveKf55ZfTp27krTGb8V19nG1eak5905WuI5fiv+V17tbhM0+yx24RGyOaodHlkJFv
+hoXZR6zdPZ4/+rszc0g4W0NsSy1mlUXchiNYdPwA2zJWblXYeNDNLoY10ZkMqVutaidLCCGEEOIZ
+JT0DTwrrNowIyGDFb2fI015m46LzeL/eCVsFgJbUndMI9vPGx/cFBsw4TMqVC9woqPrb3T6+il1J
+Z1n6Zh+6de1Gn7fXk2WaxuVMHahr0aZjdSImj+S96fMIO5xFjWpl1LRLM1Au++w2TtgEMtDDGgUa
+nHsPpH7cJmKLPwZg6cmIEFc0AEpzrI1aUCmf6/E3sXKoWfYwKbUtjavnkZhwq6jXQwghhBBC3CE9
+A08MSzxe60veays4EVyXJUn+TPWrjgLIPTWL4dNSmRQRSXADUzIjhtNygpYKl4tTgELB3d/T6woo
+PljG1GUUSza9i/M9tehqdPzyEMdi9rBz5x8sG+7F7HG72TbeBVOD71d+uTvxlAiuNJUGG01l81Ml
+plZqtNn5ZT9PoM8jOw9MLU0l8xVCCCGEKEXqR08QjfurDFCs4vPPfySl+z9oXzSqRZeVSk41Z1xr
+m4I+ncOrdnGtRK+ACqs6Gm6eSSKneI1YXZPGVmnEXs0FcjgfGc2NomzAsmUI7a/+zPyIlMIW8/xU
+oiOjSNUCukwS4lOxat6ZkLFTmDqkLpeOXaX4nDzZx+fy1tivOHir2BsaKKdx7EqLm3+wOiYTPbmc
+C/+dJOceuFoYc2aUmFuryUy6yb2dIeY4+jYg5UhC2TMp5VzicHJt2rtal5V+CCGEEEI816Rn4Eli
+5srgl81pPeUWb0a2xqroZU2r8Uz2GsKw7kdwtNNgVaMJtUq05pvQZPBk+q4YiYeLNfUCfmDrnBew
+Mnfn9ffb0m/IiwS0cqWRjTWWRemf0q4/C5bHM+Zdb1wy1CjyldQLmMryDp7Y6tKInDaQT/ako1AU
+oK/Tg2lLvO/GA5B3cQM/LbyN1+QJeFUrqmZrDZSrFcSs7yIY+ooHrijQ1ejMjCWhNDbqDrSkzdgR
+2AwNoO1yW+p2ncXqGd5YFh17w6AQ7Bau4VTWi7QvlVzknQ9nhyKQ+W6aSl4MIYQQQohnn0Kvr3Cw
+iUFhYWEEBwc/qHieeVFRUbi5uT3uMJ4tumRWDwnkf6FbWBxUs1gPQCb7JnRiilsYG0Y5SOYrhBBC
+iOdKbGwsnp6eJV4rXXeXYULi6af8//buPS6qOn3g+GeY4Q5eADHyEiIwiIqGCQhGKmiCSv0SKDPS
+1lUrofSXWbtmmSvlb7tslq2tm5uu1qKxuboKeVe84yXFGwiaIChXU7kMDMzM7w9QAVEQUEd93q+X
+f3jOnHOeOWd8+TznPOf7dWTkp3PoX5BNae3StiKXbJcZLBgnhYAQQgghREMkRxIPBNNOI4geV2+h
+uSsRMa73JB4hhBBCiPuBPBkQQgghhBDiISXFgBBCCCGEEA8pKQbuA1Xn4pgwxI8+TjZ4zkppYAjN
+CjLiPuefx8sa2PpOHO9WGotFx/m4SAa8mcTlFr263tI4m0mTwp8Ch/FlurbxzwohhBBCGDkpBu4D
+qi4vsHjLNpaO69bwLLs1CfjSE5qGJ95q9ePdSiOxlOznk3kFRE31p20LB/5vWZzNZNmLSdFWLJq9
+gYutcbKFEEIIIe4hKQaMRfEmIh91YVRkGCNCB+PjG8kXh640mtyXHZpD2MBhTNtaxL6ZIQT4BxA6
+M5nSO3Q8qCRn9XSGqJ1x6e6Mq/9klqeXNzmWy7u/4r/to3im6/V31y9vepnufvM5XVn9d31eHKHO
+ocTl6qEqm/jXAunp7oa7uxuega+x7JSm0Sgp3spY5wAWndMBULItim7+35BVBaCjcPvHRPRzpbtr
+d1x6jeCDTQXVk6+hIy9hBsE9XFF79kDdI5B3kmt/AxMcg8bTffdXJObp6x9VCCGEEOK+IsWAMam8
+gjJiEWsStrB+jhULXl1IWiPdKFbe77Nm5wb+Mtge39hEdu3eRUKsT82EXK1/PENRAm+8vgH/ZUc5
+ffokayKO8vakZWRVNSWWMtLWHcRq4BPYK6/vs63vRIIufs9PZ7SAjtz1i0n1nkRwRxMwaY/f9B85
+mJbOqVOpbJqcyfvT1lHQgrvy+vzVvP5qEkOWp5CRkcGxHwJInDKLvSVAZSYrPlyBw+cHSD1xktRD
+8cSoLepsr2jrRbDjcVanlDQ/CCGEEEIIIyDFgDGxVBPq1wElCtr1C6Vr1kaOFd/BXpRmHE9zejPH
+2oYw2ssWBZa4jRxNp7T1pDbpdYVKCtIvYeNsV7etx9abCcOLifsxA63uPAnf/YrvxEHYKwB0FG2L
+JTzAFz//J3lu3gEKL5zlt6rmfmkoPbqS7TmnWTZlFMFBwYx6cy1lZhc5X6IHlQPeA9uRNGsS0z9e
+SPyBMtq3Udbdgcqex9ppyc66UvM0QQghhBDi/iTzDBgTg44qXU0yrq9CbzCgv5N96c09nuKmf2mE
+CWY2KnSaynrtSNZ4vRKG9pU4joU/wtKcQOYEtEMBVJycz/jYImYm7SC8sxklSePpPU1Ho/NmK0Ch
+4NrnDPoqajf1mLlPZun6t3C74WWDNgz8dD9Hju9i27afWT7ehy9jdrJ5qjtmVz9i0KLRgpm1mVTT
+QgghhLivSS5jTDQnWJl4Fi1VXNi8goxOQfRuczXZVmLT0ZJLGTmU35AIm2Bhq6Ik5xIN3TDXHF3A
+G9GfkXyl3obNOJ6lSxC9Lv3MquMlGKjgTOJP5LgNQ23VlFgscPHvTOHBrBtG/rH0fInnFCv56KO/
+Uzj09/RvU71cX1ZEeRs31B3MwHCZAyu3k1dnxzc5Lyo7HrO5SGpuBVDOrztS+K2mGrDuHUH/3H/w
+TVJh9Z39yiJSdhymSAfoS8hKL8Km52AiomczZ+wjnDuSS53uqfJzHMjvQH+17W2VQkIIIYQQxkaK
+AWNi7oTj1gn4eKrxm2vCtEVT8Lh2O9qUbmNmEXZmEl7unvjF7OB6x7o13tETaLtwOP36+zHs3X11
+XtrVZq7j28WrOaWpVww043gKh1Dmfx3Mzhe9UKs9GfGDB/MWRfHYtWdMt4rFlC6hETgeWM3J+m1F
+5mrGvGDBuh+vMOKVx7GpWWzZZyqzfDYybugowiMmsqSiGw517ubf5LxYeDLx7X5sGfsUw8N/z4Is
+W6xrfu0mjs+y6Psozr3ji7tajat6INH/SqfMAOgusiM2jD5u7qjdehKxxZ/Y93yvxQOg/TWRrYoQ
+nvewbOyKCiGEEEIYNYXB0GjDxS3Fx8cTHh7eWvE88A4fPoyHh8eNK4o3EdlrLmHJW3ip412o0e72
+8a7S57NqbAj/idrIklC7+/DOegl7pg1itkc86yY7S5+dEEIIIYxWamoqffv2rbOsfu4uTwbE3WXi
+yMhP59C/IJvS+3Gc/opcsl1msGCcFAJCCCGEuP9JPmMsbINZmRn84B6vFtNOI4ged08O3XLmrkTE
+uN7rKIQQQgghWoU8GRBCCCGEEOIhJcWAEEIIIYQQDykpBoQQQgghhHhISTFwH6g6F8eEIX70cbLB
+c1bKDWP0QwUZcZ/zz+NNmgb4ntL/tp0/DHLD1dMLn8jFnKm8vs5QuIIhj44k4fLt7lXH+bhIBryZ
+xOV6LyUbsr7jl1GfUFrRnGh1VMSPJ/nVBCr1YLiwlrSxozno15v9n6fWmcSs2fQF5D3vyY7eT7Cn
+f//qP4GTyM+vntvYUHyUnLdHs8/fnz1+/hyc+iPllbfeZcvi1KJZu5i8dE1zv1Gruun106Twp8Bh
+fJmubXA7IYQQQjSNvEB8H1B1eYHFW57l8Lv9afi92+piYKlyHFE9rYx6uM7SX77hX1XvsPPo7+ms
+rLtO0W4oC9b2pZNNw9veVMl+PplXQNQqf9q25pfXnCRncR4OnwzG1ARwGon6+6GU/PlZ0lrxMGCF
+zYcJ9H3Ose610xVS9MfJZFt9QN9tT2OuKkVz5CzKRkp4RYvirC4Gck1G09HNiOdRsOzFpGgrgmZv
+4KXlI7Ez5h+9EEIIYcTkyYCxKN5E5KMujIoMY0ToYHx8I/ni0BUaG32z7NAcwgYOY9rWIvbNDCHA
+P4DQmcl1Jh1rmI68hBkE93BF7dkDdY9A3kmu2aoqm/jXAunp7oa7uxuega+x7FT1neLLm16mu998
+TtfcndbnxRHqHEpcrh7QUbj9YyL6udLdtTsuvUbwwaaC6ll+Nb8w95knGTolkbzD/8dzT/ozePwy
+zlYCaDjy8f/wpL8vPkPeYldJ7TjLObVkPN6du+Dm9RRjxvXBcXAcBbVOzOXdX/Hf9lE807WB2lZp
+hZm9LbdfIemp3LqAQscJdOrRlKS4iopNH3MkOJB9gwJJDp9J3tmrt7MrKPv32xz0DyA5dAwnpo9g
+z4trqWzk4hqKdpC9x5HO0cGYmwEm1lg+3hNT5a23a26cuuNfcSxyHKf3XKL4s1f4JTyCo58dqb5+
+t1K6ixMDBnEsZhJHf/cih56LIed4zUUs3cvJgCBOzJzOiQnh7A8aR+aRqxe4CeflptfPBMeg8XTf
+/RWJea3yjEYIIYR4KEkxYEwqr6CMWMSahC2sn2PFglcXktZIF4SV9/us2bmBvwy2xzc2kV27d5EQ
+64N1o8fKZMWHK3D4/ACpJ06SeiieGLVF9TqT9vhN/5GDaemcOpXKpsmZvD9tHQUGaOs7kaCL3/PT
+GS2gI3f9YlK9JxHc0QR9/mpefzWJIctTyMjI4NgPASROmcXeEsDycd5bvYMN84Nw8PkT63btZuuS
+KJxNASzp84dVJCXMxceqbpi6cz/w2szTjN+UTvovKxhddqF6puBrykhbdxCrgU9g30CSrOj0PD2X
+vIq12Y3rbkmbwfm/nab9lOGYNyX5vrSN0+/voM1nifhu20DP0DTO/HEVFTowXFhD+mdZPLJsCz7/
+/ZIO5fn1EuwySj4cyV4/P/b4+ZH8egJaPRjyTqBVdcXKrkXZf5PjVPaModfKpXQf0A7bt77j8fgf
+6f1WH5p09MoSFCGx9PrH93hNtSDnve8pu9rOpCuBp/5Aj8Ur8Jqo4fxf91FFU87Lra+foq0XwY7H
+WZ1ScuNKIYQQQjSJFAPGxFJNqF8HlCho1y+UrlkbOVZ8h2bmUjngPbAdSbMmMf3jhcQfKKN9m6tp
+n46ibbGEB/ji5/8kz807QOGFs/xWBdh6M2F4MXE/ZqDVnSfhu1/xnTgIewWUHl3J9pzTLJsyiuCg
+YEa9uZYys4ucL2n+nduy1J9J6xTOiO4WoOzIkDF96hU6lRSkX8LG2Q7TZh+lPgNVu78hzyKKzt5N
+61nSZe6mxPYpOnhYAxZYDRmO+a87KNOA7nQSZR1DsHvMHJQOtB/Vo16CbYXNB2vx27uXAXv34vPX
+UMzu0L/MW8XZIhbdsHvcDgUKVL0HY35+J6VXZ5Uz60K7nu1QoETl3AmTi/nodE05L41Q2fNYOy3Z
+WVcaf3ohhBBCiAbJOwPGxKCjSleTQOmr0BsM6O/ULL2KNgz8dD9Hju9i27afWT7ehy9jdrJ5qjuG
+k/MZH1vEzKQdhHc2oyRpPL2n6TAYAKzxeiUM7StxHAt/hKU5gcwJaHeti8PMfTJL17+FW+tl5oCi
+Zv8N9fqYYGajQqepbLSlqsmqssj7+ghtYj7Astn/QurHeqvvcJM9dOyBWdV+yi7qaG/dik8H6sXV
+Kgx6DFczcn0VBoOB6xdEgeJqcaMwoeaHdH1dc+MwaNFowczaTO5qCCGEEM0k/4caE80JViaeRUsV
+FzavIKNTEL3bXE2SlNh0tORSRg7lN2S9JljYqijJuURVQ7s9uoA3oj8j+UqtDfUlZKUXYdNzMBHR
+s5kz9hHOHclFC+jLiihv44a6gxkYLnNg5Xbyau3Y0vMlnlOs5KOP/k7h0N/Tv031cuveEfTP/Qff
+JBVW36mtLCJlx2GKWnDb1spjOOqcf5NwtgJ0F9gSd6Te+xAWuPh3pvBgVgOjLN2aPu2fZMz+luKS
+2iq1Iy4AAAk0SURBVCfUQNXBv3O+cjRd/No2sJUSpYMFlZl5dQo1ZVd/bIqTKEwvA7Rotq+nwvlJ
+LC1B2T0Qq7yfKcrWgi6f39aebNKdbIV9IJ0H5JP99WYqKgFDGeVHTlDZpPN5+3FWM8HEWoU+r+H3
+VRo+Z0BFOvnbs9GjQ7t7HZpH/LG2uXWC39zzck35OQ7kd6C/2taoX5oXQgghjJkUA8bE3AnHrRPw
+8VTjN9eEaYum4HGtV9qUbmNmEXZmEl7unvjF7OB6p7Q13tETaLtwOP36+zHs3X11EmZt5jq+Xbya
+U5paCZzuIjtiw+jj5o7arScRW/yJfc8XG8Cyz1Rm+Wxk3NBRhEdMZElFNxxq3+k3VzPmBQvW/XiF
+Ea88ztVGGhPHZ1n0fRTn3vHFXa3GVT2Q6H+l1+vxv5Hh8lamDfUnIPQ9kn9L4n+f9mfgiPfYXwbK
+Li+ycG43vhvag55PjOEnKyesVKpaP1xTuoRG4HhgNSdvc2RVfc5WLqzcRFnt6kp3gYKvd2H96vNY
+NfiegQqLUdE4nPsjB4Ke5tCH+6sT2PaD6P5hAJenhpIcPIxja7rTLfZZLJSgcArD7X87k/fyMPY/
+O5VCC0eUSmXjCazSAfuPvqFL1d84HOjPHv+hHF9yAl2Tuq5uP85qlthERaL8fjwHnx1Nyp8P10nQ
+GzxnAGaOmO19l1+eDuLQAgVdYqOwauTpULPPSw3tr4lsVYTwvIcRj3okhBBCGDmFwWBoUXdFfHw8
+4eHhrRXPA+/w4cN4eHjcuKJ4E5G95hKWvIWXOkqNdp2ByrJKVFZmKCjjyAcDiar4B8nz+mJx9SP6
+fFaNDeE/URtZEmrXorvEupR5/PKuCR7/noFNq+WYBgyaKhSWpoCG0i+e56R2Ht4zPB+Marx0FyeG
+f439quV0dLidb9SS81LCnmmDmO0Rz7rJztLvKIQQQjQgNTWVvn371llWP3eX/0OFkSvj0AchTNxQ
+irlKB489z1+/9bpeCACYODLy0znkbMqm1GBHI90pt6T0epcnEloac30aSub/jrQdZZgo9dBpBG4f
+ezwYhUCLtOC8VOSS7TKDBeOkEBBCCCFaQp4M3GU3fTIgRFNVZlO0cjPlDbwgonAcgONwd1TSRC+E
+EEI89OTJgBAPItPO2I9teC5qIYQQQojbIZ0KQgghhBBCPKSkGLgPVJ2LY8IQP/o42eA5K6WBITQr
+yIj7nH8ev83hdG6meCtjnQNYdO5OTOWk43xcJAPeTOJy7QY1TQp/ChzGl+mNTLkshBBCCCFajRQD
+9wFVlxdYvGUbS8d1u8ksu9XFwNITmtabeOtOKdnPJ/MKiJrqT9vafe2WvZgUbcWi2Ru4aPRfQggh
+hBDiwSDFgLEo3kTkoy6MigxjROhgfHwj+eJQwxM/1VZ2aA5hA4cxbWsR+2aGEOAfQOjM5HoTczVE
+R17CDIJ7uKL27IG6RyDvJNfaynCF5E8iGeDhhF2XYOYmX42lkpzV0xmidsaluzOu/pNZnl4OZcm8
+4enHF6crax1DT/6Kp3GJTORyzZLLu7/iv+2jeKZr/ddVTHAMGk/33V+RmNekgfSFEEIIIUQLSTFg
+TCqvoIxYxJqELayfY8WCVxeS1kjXjJX3+6zZuYG/DLbHNzaRXbt3kRDrg3Wjx8pkxYcrcPj8AKkn
+TpJ6KJ4Yda0BOyuyyez1CdtP/srOmN/4OnY7lwFDUQJvvL4B/2VHOX36JGsijvL2pGVkmbnztGch
+W48Xoy+/QMqRLEoNGk5tzMQ5xKtmYrIy0tYdxGrgE9grbwxJ0daLYMfjrE4puXGlEEIIIYRodVIM
+GBNLNaF+HVCioF2/ULpmbeRY8R3qmVE54D2wHUmzJjH944XEHyijfZtaGbqFmmeGdsVMYUGXJ9SY
+F57nig40pzdzrG0Io71sUWCJ28jRdEpbT2qZLX2Gd+HM5nQKdr1D6NMxrD+fxa5jtgzycaB6z5UU
+pF/Cxtmu4XYnlT2PtdOSnXWFO/G2ghBCCCGEqEuKAWNi0FGlq0n+9VXoDQb0d6p/XtGGgZ/u58iS
+1/CzPcvy8T4Mn3+K6w8ilJjVDFavMFGiMBi41rNUZwx7xbXPd/ANxPzQNjavvcRzLytYt2Un20v6
+MsTZvOYzJpjZqNBpKhtufzJo0WjBzNpMfphCCCGEEHeB5FzGRHOClYln0VLFhc0ryOgURO8215Nt
+m46WXMrIofyGTNoEC1sVJTmXaGAeKjRHF/BG9GckX6m1ob6ErPQibHoOJiJ6NnPGPsK5I7k0NpaP
+pUsQvS79zKrjJRio4EziT+S4DUNtBebdhtAn71u+PPoEL48fxoUv55PmEkIPq6tbW+Di35nCg1kN
+jIgElJ/jQH4H+qttkTmzhBBCCCHuPCkGjIm5E45bJ+DjqcZvrgnTFk3Bw+zqSlO6jZlF2JlJeLl7
+4hezg+ud9dZ4R0+g7cLh9Ovvx7B399V5gVibuY5vF6/mlKZWMaC7yI7YMPq4uaN260nEFn9i3/Ot
+6e2/OYVDKPO/Dmbni16o1Z6M+MGDeYuieEwFWHsyvFsR+b1D6Ok6mCcrsrAb9nitUYNM6RIageOB
+1ZxsYBRU7a+JbFWE8LyH5e2eOSGEEEII0QwKg8HQokaU+lMai1s7fPgwHh4eN64o3kRkr7mEJW/h
+pY4PcI2mz2fV2BD+E7WRJaF2tZ4AlLBn2iBme8SzbrKzTI0thBBCCNFCqamp9O3bt86y+rn7A5x1
+CqNk4sjIT+fQvyCb0tplaEUu2S4zWDBOCgEhhBBCiLtF8i5jYRvMyszgex3FXWHaaQTR4+otNHcl
+Isb1nsQjhBBCCPGwkicDQgghhBBCPKSkGBBCCCGEEOIhJcWAEEIIIYQQDykpBu4ylUpFVVVDswEI
+IYQQQgjROsrLy7GwsGj0c1IM3GV2dnZcuHCBysrKex2KEEIIIYR4AFVUVJCbm0uHDh0a/ayMJnSX
+OTk5kZubS2ZmpjwhEEIIIYQQrc7U1JSOHTtib2/f6GelGLjLFAoFTk5OODk53etQhBBCCCHEQ07a
+hIQQQgghhHhIqQ4ePNiiHXTr1o2W7kPcP1QqFUqlkkcffRQ7O7t7HY4QQgghhGgBlbe3972OQdxH
+dDodxcXFZGVlYWZmho2Nzb0OSQghhBBCNNP/A9rHjJzONPeVAAAAAElFTkSuQmCC
+"
+         id="image5832"
+         x="94.614235"
+         y="432.10336" />
+      <rect
+         ry="0"
+         y="447.40503"
+         x="308.0752"
+         height="183.98033"
+         width="17.45915"
+         id="rect5756"
+         style="fill:#aff1af;fill-opacity:1;stroke:#000000;stroke-width:0.53941393;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <rect
+         y="432.16876"
+         x="308.07794"
+         height="15.661235"
+         width="17.453625"
+         id="rect5760"
+         style="fill:#dfe5f9;fill-opacity:1;stroke:#000000;stroke-width:0.54565936;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <rect
+         y="431.65961"
+         x="94"
+         height="199.9816"
+         width="231.76151"
+         id="rect5577"
+         style="fill:none;stroke:#000000;stroke-width:1;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+    </g>
+  </g>
+  <g
+     inkscape:groupmode="layer"
+     id="layer2"
+     inkscape:label="Layer"
+     style="display:inline"
+     transform="translate(-88.499996,-276.23475)" />
+</svg>
diff --git a/doc/source/admin/mulled_containers.rst b/doc/source/admin/mulled_containers.rst
new file mode 100644
index 0000000..e0a5b91
--- /dev/null
+++ b/doc/source/admin/mulled_containers.rst
@@ -0,0 +1,128 @@
+================================
+Containers for Tool Dependencies
+================================
+
+Galaxy tools (also called wrappers) are able to use Conda packages
+(see more information in our `Galaxy Conda documentation`_) and Docker containers as dependency resolvers.
+The IUC_ recommends to use Conda packages as primary dependency resolver, mainly because Docker is not
+available on every (HPC-) system. Conda on the other hand can be installed by Galaxy and maintained
+entirely in user-space. Nevertheless, Docker (Containers in general) has some unique features and
+there are many use-cases in the Galaxy community which makes containerized systems very appealing.
+
+Since 2014 Galaxy supports running tools in Docker containers via a special `container annotation`_ inside of the 
+requirement field.
+
+.. code-block:: xml
+
+    <requirements>
+        <!-- Container based dependency handling -->
+        <container type="docker">busybox:ubuntu-14.04</container>
+        <!-- Conda based dependency handling -->
+        <requirement type="package" version="8.22">gnu_coreutils</requirement>
+    </requirements>
+
+
+This approach has shown two limitations that slowed down the adoption by tool developers.
+First, every tool needs to be annotated with a container name (as shown above) and this container needs
+to be created beforehand, usually manually. The second reason is that a Galaxy tool aims to be deployed everywhere,
+independet of the underlying system, meaning if Docker is not available Galaxy should use Conda packages. 
+This puts an additional burden on tool developers who need to take care of two dependency resolvers. This setup can cause
+different tool results depending on the resolver, because both the Conda package and the Docker container are
+usually not created out of the same recipe and maybe were compiled in a different way, use different sources etc.
+
+Not an ideal solution and something we wanted to solve.
+
+Here we demonstrate a solution that can create Containers out of Conda packages automatically.
+This can be either used to support communities like BioContainers_ to create Containers
+before deploying a Galaxy tool, or this can be used by Galaxy to create Containers on-demand and on-the-fly if one
+is not available already.
+
+
+Automatic build of Linux containers
+-----------------------------------
+
+We utilize [mulled](https://github.com/mulled/mulled) with [involucro](https://github.com/involucro/involucro)
+in an automatic way. This is for example used to convert all packages in bioconda_ into Linux Containers
+(Docker and rkt at the moment) and made available at the `BioContainers Quay.io account`_.
+
+We have developed small utilities around this technology stack which is currently included in galaxy-lib_.
+Here is a short introduction:
+
+Search for containers
+^^^^^^^^^^^^^^^^^^^^^
+
+This will search for containers in the biocontainers organisation.
+
+.. code-block:: bash
+
+   $ mulled-search -s vsearch -o biocontainers
+
+
+Build all packages from bioconda from the last 24h
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The BioConda community is building a container for every package they create with a command similar to this.
+
+.. code-block:: bash
+
+   $ mulled-build-channel --channel bioconda --namespace biocontainers \
+      --involucro-path ./involucro --recipes-dir ./bioconda-recipes --diff-hours 25 build
+
+
+Building Docker containers for local Conda packages
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Conda packages can be tested with creating a busybox based container for this particular package in the following way.
+This also demonstrates how you can build a container locally and on-the-fly.
+
+  > we modified the samtools package to version 3.0 to make clear we are using a local version
+
+1) build your recipe
+
+.. code-block:: bash
+   
+   $ conda build recipes/samtools
+
+2) index your local builds
+
+.. code-block:: bash
+   
+   $ conda index /home/bag/miniconda2/conda-bld/linux-64/
+
+
+3) build a container for your local package
+
+.. code-block:: bash
+   
+   $ mulled-build build-and-test 'samtools=3.0--0' \
+      --extra-channel file://home/bag/miniconda2/conda-bld/ --test 'samtools --help'
+
+The ``--0`` indicates the build version of the conda package. It is recommended to specify this number otherwise
+you will override already existing images. For Python Conda packages this extension might look like this ``--py35_1``.
+
+Build, test and push a conda-forge package to biocontainers
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ > You need to have write access to the biocontainers repository
+
+You can build packages from other Conda channels as well, not only from BioConda. ``pandoc`` is available from the
+conda-forge channel and conda-forge is also enabled by default in Galaxy. To build ``pandoc`` and push it to biocontainrs
+you could do something along these lines.
+
+
+.. code-block:: bash
+
+   $ mulled-build build-and-test 'pandoc=1.17.2--0' --test 'pandoc --help' -n biocontainers
+
+.. code-block:: bash
+  
+   $ mulled-build push 'pandoc=1.17.2--0' --test 'pandoc --help' -n biocontainers
+
+
+.. _Galaxy Conda documentation: ./conda_faq.rst
+.. _IUC: https://wiki.galaxyproject.org/IUC
+.. _container annotation:  https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/catDocker.xml#L4
+.. _BioContainers: https://github.com/biocontainers
+.. _bioconda: https://github.com/bioconda/bioconda-recipes
+.. _BioContainers Quay.io account: https://quay.io/organization/biocontainers
+.. _galaxy-lib: https://github.com/galaxyproject/galaxy-lib
diff --git a/doc/source/admin/useful_scripts.rst b/doc/source/admin/useful_scripts.rst
new file mode 100644
index 0000000..5c17ddb
--- /dev/null
+++ b/doc/source/admin/useful_scripts.rst
@@ -0,0 +1,19 @@
+Useful Scripts and Administration Tricks
+========================================
+
+This page aims to help ease the burden of administration with some easy to use scripts and documentation on what is available for admins to use.
+
+Uploading a directory into a Data Library
+-----------------------------------------
+
+Data libraries can really ease the use of Galaxy for your administrators and end users. They provide a form of shared folders that users can copy datasets from into their history.
+
+This script was developed to be as general as possible, allowing you to pipe the output of a much more complex find command to this script, uploading all of the files into a data library:
+
+.. code-block:: console
+
+    $ find /path/to/sequencing-data/ -name '*.fastq' -or -name '*.fa' | python $GALAXY_ROOT/scripts/api/library_upload_dir.py
+
+Find has an extremely expressive command line for selecting specific files that are of interest to you. These will then be recursively uploaded into Galaxy, maintaining the folder hierarchy, a useful feature when moving legacy data into Galaxy. For a complete description of the options of this script, you can run ``python $GALAXY_ROOT/scripts/api/library_upload_dir.py --help``
+
+This tool will not overwrite or re-upload already uploaded datasets. As a result, one can imagine running this on a cron job to keep an "incoming sequencing data" directory synced with a data library.
diff --git a/doc/source/admin/webhooks.rst b/doc/source/admin/webhooks.rst
new file mode 100644
index 0000000..5463fbb
--- /dev/null
+++ b/doc/source/admin/webhooks.rst
@@ -0,0 +1,186 @@
+Galaxy webhooks
+===============
+
+Tiny plugin interface to extend the Galaxy client.
+
+Galaxy webhooks provides a simple way of inserting icons, links, or other HTML elements into predefined locations.
+For this Galaxy provides some entry points which can be used to extend the client with content. This content
+can consists out of simple HTML, JS or dynamically generated content from a python function.
+
+  Please note that the webhooks interface is new and can change in the coming releases. Consider it as beta as we don't
+  make promises to keep the API stable at the moment.
+
+Plugin activation
+-----------------
+All webhooks that are included in the main Galaxy distribution are located in the ``config/plugins/webhooks/demo`` folder
+and are deactivated by default.
+To activate these demo webhooks make sure this path is added to ``webhooks_dir`` in your ``galaxy.ini``. You can add as many
+webhook folders as you like as a comma separated list.
+Webhooks supports one additional layer of activating/deactivating by changing the ``activate: true`` in each config of each webhook.
+
+
+Entry points
+------------
+
+Currently there are four entry points (types) available:
+
+- tool (after tool execution)
+- workflow (after workflow execution)
+- masthead (at the top level masthead)
+- history-menu (inside History Panel menu)
+
+For each type there is an example provided:
+
+- Tool and workflow: A comic strip can be shown when a tool or a workflow is running. Right now PhD_ and XKCD_ comics are provided.
+
+.. _PhD: http://phdcomics.com
+.. _XKCD: http://xkcd.com/
+
+ .. image:: images_webhooks/tool.png
+    :scale: 50 %
+
+ .. image:: images_webhooks/workflow.png
+    :scale: 50 %
+
+- Additional functionality can be added to the top menu. Two dummy buttons are implemented to show the idea:
+
+  - A button that links to biostars
+   .. image:: images_webhooks/masthead.png
+      :scale: 50 %
+
+  - A button that shows a pop-up with information about an user.
+   .. image:: images_webhooks/masthead_trans_object.png
+      :scale: 50 %
+
+- The history menu can be extended. In this case we use two dummy entries 'History Menu Webhook Item 1' and  'History Menu Webhook Item 2'.
+ .. image:: images_webhooks/history-menu.png
+    :scale: 25 %
+
+Plugin structure
+----------------
+
+Each plugin has the following folder structure:
+
+.. code-block::
+
+   - plugin_name
+      - config
+         - plugin_name.yaml (mandatory)
+      - helper
+         - __init__.py (optional)
+      - static
+         - script.js (optional)
+         - styles.css (optional)
+
+
+config
+------
+
+The configuration file is just a .yml (or .yaml) file with a few options. The following options are mandatory:
+
+- **name** - must be the same as the plugin's root directory name
+- **type** (see Entry points) - can be combined with others
+- **activate** - *true* or *false* - whether show the plugin on a page or not
+
+All other options can be anything used by the plugin and accessed later via *webhook.config['...']*.
+
+
+helper/__init__.py
+------------------
+
+*__init__.py has* to have the **main()** function with the following (or similar) structure:
+
+.. code-block:: python
+
+   import logging
+   log = logging.getLogger(__name__)
+
+   def main(trans, webhook):
+      error = ''
+      data = {}
+      try:
+         # Third-party dependencies
+         try:
+            from bs4 import BeautifulSoup
+         except ImportError as e:
+             log.exception(e)
+             return {}
+         # some processing...
+      except Exception as e:
+         error = str(e)
+      return {'success': not error, 'error': error, 'data': data}
+
+As an example please take a look at the *phdcomics* example plugin: https://github.com/bgruening/galaxy/blob/feature/plugin-system/config/plugins/webhooks/phdcomics/helper/__init__.py
+
+
+static
+------
+
+The *static* folder contains only two files with the specified above names (otherwise, they won’t be read on Galaxy run).
+
+- script.js - all JavaScript code (with all third-party dependencies) must be here
+- styles.css - all CSS styles, used by the plugin
+
+
+Plugin dependencies
+-------------------
+
+Some plugins might have additional dependencies that needs to be installed into the Galaxy environment.
+For example the PhD-Comic plugin requires the library beautifulsoup4. If thses dependencies are not present
+plugins should deactivate themself and issue an error into the Galaxy log.
+
+To install these additional plugin do the following:
+
+.. code-block:: python
+
+  . GALAXY_ROOT/.venv/bin/activate  # activate Galaxy's virtualenv
+  pip install beautifulsoup4        # install the requirements
+
+
+Issues
+------
+
+tool/workflow
+-------------
+
+If a tool or a workflow plugin has script.js and/or styles.css, the content of these files will be read as two strings and sent to the client and appended to DOM’s <head>.
+
+Such approach is a possible bottleneck if the two files are big (however, this shouldn’t ever happen because plugins are supposed to be small and simple).
+
+masthead
+--------
+
+Topbar buttons are hard coded, so they’re rendered only after *make client*.
+
+The plugin system is entirely dynamic. All plugins are detected during Galaxy load and their configs and statics are being saved. So, every plugin must be shown/rendered dynamically.
+
+I found a not very optimal way to add buttons to the topbar (masthead):
+
+.. code-block:: javascript
+
+  $(document).ready(function() {
+     Galaxy.page.masthead.collection.add({
+          id      : ... ,
+          icon    : ... ,
+          url     : ... ,
+          tooltip : ... ,
+          onlick  : function() { ... }
+      });
+  });
+
+history-menu
+------------
+
+History Panel items are again hard coded, but in the current implementation they’re rendered as html elements (so, they’re not even stored in a collection or any other object).
+
+To add new menu items, I do the following:
+
+.. code-block:: javascript
+
+  menu.push({
+    html : _l( ... ),
+    anon : true,
+    func : function() { ... }
+  });
+
+But in order to fetch all plugin menu items before rendering, I get them via API in a synchronous manner. The problem is that History Panel now may load a bit longer.
diff --git a/doc/source/api/guidelines.rst b/doc/source/api/guidelines.rst
new file mode 100644
index 0000000..b019c91
--- /dev/null
+++ b/doc/source/api/guidelines.rst
@@ -0,0 +1,70 @@
+API Design Guidelines
+=====================
+
+The following section outlines guidelines related to extending and/or modifing
+the Galaxy API. The Galaxy API has grown in an ad-hoc fashion over time by
+many contributors and so clients SHOULD NOT expect the API will conform to
+these guidelines - but developers contributing to the Galaxy API SHOULD follow
+these guidelines.
+
+    - API functionality should include docstring documentation for consumption
+      at docs.galaxyproject.org.
+    - Developers should familiarize themselves with the HTTP status code definitions
+      http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html. The API responses
+      should properly set the status code according to the result - in particular
+      2XX responses should be used for successful requests, 4XX for various
+      kinds of client errors, and 5XX for the errors on the server side.
+    - If there is an error processing some part of request (one item in a list
+      for instance), the status code should be set to reflect the error and the
+      partial result may or may not be returned depending on the controller -
+      this behavior should be documented.
+    - API methods should throw a finite number of exceptions 
+      (defined in :doc:`galaxy.exceptions`) and these should subclass 
+      `MessageException` and not paste/wsgi HTTP exceptions. When possible, 
+      the framework itself should be responsible catching these exceptions, 
+      setting the status code, and building an error response.
+    - Error responses should not consist of plain text strings - they should be
+      dictionaries describing the error and containing the following::
+
+          {
+            "status_code": 400,
+            "err_code": 400007,
+            "err_msg": "Request contained invalid parameter, action could not be completed.",
+            "type": "error",
+            "extra_error_info": "Extra information."
+          }
+
+      Various error conditions (once a format has been chosen and framework to
+      enforce it in place) should be spelled out in this document.
+    - Backward compatibility is important and should be maintained when possible.
+      If changing behavior in a non-backward compatibile way please ensure one
+      of the following holds - there is a strong reason to believe no consumers
+      depend on a behavior, the behavior is effectively broken, or the API
+      method being modified has not been part of a tagged dist release.
+
+The following bullet points represent good practices more than guidelines, please
+consider them when modifying the API.
+
+    - Functionality should not be copied and pasted between controllers -
+      consider refactoring functionality into associated classes or short of
+      that into Mixins (http://en.wikipedia.org/wiki/Composition_over_inheritance)
+      or into Managers (:doc:`galaxy.managers`).
+    - API additions are more permanent changes to Galaxy than many other potential
+      changes and so a second opinion on API changes should be sought.
+    - New API functionality should include functional tests. These functional
+      tests should be implemented in Python and placed in
+      `test/functional/api`.
+    - Changes to reflect modifications to the API should be pushed upstream to
+      the BioBlend project if possible.
+
+Longer term goals/notes.
+
+    - It would be advantageous to have a clearer separation of anonymous and
+      admin handling functionality.
+    - If at some point in the future, functionality needs to be added that
+      breaks backward compatibility in a significant way to a component used by
+      the community - a "dev" variant of the API will be established and
+      the community should be alerted and given a timeframe for when the old
+      behavior will be replaced with the new behavior.
+    - Consistent standards for range-based requests, batch requests, filtered
+      requests, etc... should be established and documented here.
diff --git a/doc/source/api/quickstart.rst b/doc/source/api/quickstart.rst
new file mode 100644
index 0000000..8e9043d
--- /dev/null
+++ b/doc/source/api/quickstart.rst
@@ -0,0 +1,185 @@
+Quickstart
+==========
+
+Log in as your user, navigate to the API Keys page in the User menu, and
+generate a new API key.  Make a note of the API key, and then pull up a
+terminal.  Now we'll use the display.py script in your galaxy/scripts/api
+directory for a short example::
+
+        % ./display.py my_key http://localhost:4096/api/histories
+        Collection Members
+        ------------------
+        #1: /api/histories/8c49be448cfe29bc
+          name: Unnamed history
+          id: 8c49be448cfe29bc
+        #2: /api/histories/33b43b4e7093c91f
+          name: output test
+          id: 33b43b4e7093c91f
+
+The result is a Collection of the histories of the user specified by the API
+key (you).  To look at the details of a particular history, say #1 above, do
+the following::
+
+        % ./display.py my_key http://localhost:4096/api/histories/8c49be448cfe29bc
+        Member Information
+        ------------------
+        state_details: {'ok': 1, 'failed_metadata': 0, 'upload': 0, 'discarded': 0, 'running': 0, 'setting_metadata': 0, 'error': 0, 'new': 0, 'queued': 0, 'empty': 0}
+        state: ok
+        contents_url: /api/histories/8c49be448cfe29bc/contents
+        id: 8c49be448cfe29bc
+        name: Unnamed history
+
+This gives detailed information about the specific member in question, in this
+case the History.  To view history contents, do the following::
+
+
+        % ./display.py my_key http://localhost:4096/api/histories/8c49be448cfe29bc/contents
+        Collection Members
+        ------------------
+        #1: /api/histories/8c49be448cfe29bc/contents/6f91353f3eb0fa4a
+          name: Pasted Entry
+          type: file
+          id: 6f91353f3eb0fa4a
+
+What we have here is another Collection of items containing all of the datasets
+in this particular history.  Finally, to view details of a particular dataset
+in this collection, execute the following::
+
+        % ./display.py my_key http://localhost:4096/api/histories/8c49be448cfe29bc/contents/6f91353f3eb0fa4a
+        Member Information
+        ------------------
+        misc_blurb: 1 line
+        name: Pasted Entry
+        data_type: txt
+        deleted: False
+        file_name: /Users/yoplait/work/galaxy-stock/database/files/000/dataset_82.dat
+        state: ok
+        download_url: /datasets/6f91353f3eb0fa4a/display?to_ext=txt
+        visible: True
+        genome_build: ?
+        model_class: HistoryDatasetAssociation
+        file_size: 17
+        metadata_data_lines: 1
+        id: 6f91353f3eb0fa4a
+        misc_info: uploaded txt file
+        metadata_dbkey: ?
+
+And now you've successfully used the API to request and select a history,
+browse the contents of that history, and then look at detailed information
+about a particular dataset.
+
+For a more comprehensive Data Library example, set the following option in your
+galaxy.ini as well, and restart galaxy again::
+
+        admin_users = you at example.org
+        library_import_dir = /path/to/some/directory
+
+In the directory you specified for 'library_import_dir', create some
+subdirectories, and put (or symlink) files to import into Galaxy into those
+subdirectories.
+
+In Galaxy, create an account that matches the address you put in 'admin_users',
+then browse to that user's preferences and generate a new API Key.  Copy the
+key to your clipboard and then use these scripts::
+
+        % ./display.py my_key http://localhost:4096/api/libraries
+        Collection Members
+        ------------------
+
+        0 elements in collection
+
+        % ./library_create_library.py my_key http://localhost:4096/api/libraries api_test 'API Test Library'
+        Response
+        --------
+        /api/libraries/f3f73e481f432006
+          name: api_test
+          id: f3f73e481f432006
+
+        % ./display.py my_key http://localhost:4096/api/libraries
+        Collection Members
+        ------------------
+        /api/libraries/f3f73e481f432006
+          name: api_test
+          id: f3f73e481f432006
+
+        % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006
+        Member Information
+        ------------------
+        synopsis: None
+        contents_url: /api/libraries/f3f73e481f432006/contents
+        description: API Test Library
+        name: api_test
+
+        % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents
+        Collection Members
+        ------------------
+        /api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+          name: /
+          type: folder
+          id: 28202595c0d2591f61ddda595d2c3670
+
+        % ./library_create_folder.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591f61ddda595d2c3670 api_test_folder1 'API Test Folder 1'
+        Response
+        --------
+        /api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+          name: api_test_folder1
+          id: 28202595c0d2591fa4f9089d2303fd89
+
+        % ./library_upload_from_import_dir.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591fa4f9089d2303fd89 bed bed hg19
+        Response
+        --------
+        /api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+          name: 2.bed
+          id: e9ef7fdb2db87d7b
+        /api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+          name: 3.bed
+          id: 3b7f6a31f80a5018
+
+        % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 
+        Collection Members
+        ------------------
+        /api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+          name: /
+          type: folder
+          id: 28202595c0d2591f61ddda595d2c3670
+        /api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+          name: /api_test_folder1
+          type: folder
+          id: 28202595c0d2591fa4f9089d2303fd89
+        /api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+          name: /api_test_folder1/2.bed
+          type: file
+          id: e9ef7fdb2db87d7b
+        /api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+          name: /api_test_folder1/3.bed
+          type: file
+          id: 3b7f6a31f80a5018
+
+        % ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+        Member Information
+        ------------------
+        misc_blurb: 68 regions
+        metadata_endCol: 3
+        data_type: bed
+        metadata_columns: 6
+        metadata_nameCol: 4
+        uploaded_by: nate at ...
+        metadata_strandCol: 6
+        name: 2.bed
+        genome_build: hg19
+        metadata_comment_lines: None
+        metadata_startCol: 2
+        metadata_chromCol: 1
+        file_size: 4272
+        metadata_data_lines: 68
+        message:
+        metadata_dbkey: hg19
+        misc_info: uploaded bed file
+        date_uploaded: 2010-06-22T17:01:51.266119
+        metadata_column_types: str, int, int, str, int, str
+
+Other parameters are valid when uploading, they are the same parameters as are
+used in the web form, like 'link_data_only' and etc.
+
+The request and response format should be considered alpha and are subject to change.
+
diff --git a/doc/source/api_doc.rst b/doc/source/api_doc.rst
new file mode 100644
index 0000000..b8acafa
--- /dev/null
+++ b/doc/source/api_doc.rst
@@ -0,0 +1,29 @@
+Galaxy API Documentation
+========================
+
+In addition to being accessible through a web interface, Galaxy can also be
+accessed programmatically, through shell scripts and other programs. The web
+interface is appropriate for things like exploratory analysis, visualization,
+construction of workflows, and rerunning workflows on new datasets.
+
+The web interface is less suitable for things like
+    - Connecting a Galaxy instance directly to your sequencer and running
+      workflows whenever data is ready.
+    - Running a workflow against multiple datasets (which can be done with the
+      web interface, but is tedious).
+    - When the analysis involves complex control, such as looping and
+      branching.
+
+The Galaxy API addresses these and other situations by exposing Galaxy
+internals through an additional interface, known as an Application Programming
+Interface, or API.
+
+
+.. toctree::
+   :maxdepth: 3
+
+    Galaxy API Quickstart <api/quickstart>
+
+    Galaxy API Guidelines <api/guidelines>
+
+    Galaxy API Documentation <lib/galaxy.webapps.galaxy.api>
diff --git a/doc/source/conf.py b/doc/source/conf.py
new file mode 100644
index 0000000..c861f39
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,294 @@
+# -*- coding: utf-8 -*-
+#
+# Galaxy documentation build configuration file, created by
+# sphinx-quickstart on Tue Mar  6 10:44:44 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import datetime
+import os
+import sys
+
+# Library to make .md to slideshow
+from recommonmark.parser import CommonMarkParser
+
+source_parsers = {
+    '.md': CommonMarkParser,
+}
+
+# REQUIRED GALAXY INCLUDES
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# Configure default autodoc's action
+autodoc_default_flags = [ 'members', 'undoc-members', 'show-inheritance' ]
+
+
+def dont_skip_init(app, what, name, obj, skip, options):
+    if name == "__init__":
+        return False
+    return skip
+
+
+def setup(app):
+    app.connect("autodoc-skip-member", dont_skip_init)
+
+
+# The suffix of source filenames.
+source_suffix = ['.rst', '.md']
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Galaxy Code'
+copyright = str( datetime.datetime.now().year ) + u', Galaxy Team'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+# version = '1'
+# The full version, including alpha/beta/rc tags.
+# release = '1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Galaxydoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+    ('index', 'Galaxy.tex', u'Galaxy Code Documentation',
+     u'Galaxy Team', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'galaxy', u'Galaxy Code Documentation',
+     [u'Galaxy Team'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+    ('index', 'Galaxy', u'Galaxy Code Documentation',
+     u'Galaxy Team', 'Galaxy', 'Data intensive biology for everyone.',
+     'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+
+# -- ReadTheDocs.org Settings ------------------------------------------------
+class Mock(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __call__(self, *args, **kwargs):
+        return Mock()
+
+    @classmethod
+    def __getattr__(cls, name):
+        if name in ('__file__', '__path__'):
+            return '/dev/null'
+        elif name[0] == name[0].upper():
+            mockType = type(name, (), {})
+            mockType.__module__ = __name__
+            return mockType
+        else:
+            return Mock()
+
+# adding pbs_python, DRMAA_python, markupsafe, and drmaa here had no effect.
+MOCK_MODULES = ['tables', 'decorator']
+for mod_name in MOCK_MODULES:
+    sys.modules[mod_name] = Mock()
diff --git a/doc/source/dev/build_a_job_runner.rst b/doc/source/dev/build_a_job_runner.rst
new file mode 100644
index 0000000..9e8577c
--- /dev/null
+++ b/doc/source/dev/build_a_job_runner.rst
@@ -0,0 +1,350 @@
+Build a job runner
+==================
+
+A walk through the steps of building a runner for Galaxy
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this tutorial, we would build the runner in a block by block fashion
+(like the building blocks), so we would divide the runner into
+components based on their function.
+
+We assume you are familiar with setting up and managing a local installation of Galaxy.
+
+To learn more about the basics, please refer to:
+https://wiki.galaxyproject.org/Admin/GetGalaxy
+
+To explore existing runners, please refer to:
+https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/jobs/runners
+
+What is required to make a runner for Galaxy?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`galaxy.jobs.runners.\_\_init\_\_.py <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/jobs/runners/__init__.py>`__
+has the base runner implementation. To create a new runner, that base
+runner must be inherited and only certain methods need to be
+overridden with your logic.
+
+These are the following methods which need to be implemented: 
+
+1. \_\_init\_\_(app, nworkers, \*\*kwargs)
+
+2. queue\_job(job\_wrapper)
+
+3. check\_watched\_item(job\_state)
+
+4. stop\_job(job)
+
+5. recover(job, job\_wrapper)
+
+The big picture
+---------------
+
+The above methods are invoked at various state of a job execution in
+Galaxy. These methods will act as a mediator between the Galaxy
+framework and the external executor framework. To know, when and how
+these methods are invoked, we will see about the implementation of
+parent class and process lifecycle of the runner.
+
+Implementation of parent class (galaxy.jobs.runners.\_\_init\_\_.py)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+-  .. rubric:: Class Inheritance structure
+      :name: class-inheritance-structure
+
+   .. image:: inherit.png
+
+-  .. rubric:: The big picture!
+      :name: the-big-picture-1
+
+   .. image:: runner_diag.png
+
+The whole process is divided into different stages for understanding
+purpose.
+
+Runner Methods in detail
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+1. \_\_init\_\_ method - STAGE 1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Input params:
+
+1. app
+
+2. nworkers (Number of threads specified in job\_conf)
+
+3. \*\*kwargs (Variable length argument)
+
+Output params: NA
+
+The input params are read from job\_conf.xml and passed to the runner by
+the Galaxy framework. Configuration of where to run jobs and external
+runner configuration is performed in the job\_conf.xml file. More
+information about job\_conf.xml is available
+`here <https://wiki.galaxyproject.org/Admin/Config/Jobs>`__.
+
+Have a look at the sample job\_conf.xml:
+
+::
+
+    <job_conf>
+        <plugins>
+            <plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner" workers="4"/>
+            <plugin id="godocker" type="runner" load="galaxy.jobs.runners.godocker:GodockerJobRunner">
+                <param id="user">gosc</param>
+                <param id="key">HELLOWORLD</param>
+            </plugin>
+        </plugins>
+        <handlers>
+            <handler id="main"/>
+        </handlers>
+        <destinations default="god">
+            <destination id="local" runner="local"/>
+            <destination id="god" runner="godocker">
+                <param id="docker_cpu">1</param>
+                <param id="docker_memory">2</param>
+            </destination>
+        </destinations>
+    </job_conf>
+
+The following steps are followed to manipulate the data in job\_conf.xml
+
+A: Define structure of data under plugin tag (plugin tag in
+job\_conf.xml) as a dictionary.
+
+::
+
+    runner_param_specs = dict(user=dict(map=str), key=dict(map=str))
+
+B: Update the dictionary structure in kwargs.
+
+::
+
+    kwargs['runner_param_specs'].update(runner_param_specs)
+
+C: Now call the parent constructor to assign the values.
+
+::
+
+    super(GodockerJobRunner, self).__init__(app, nworkers, **kwargs)
+
+D: The assigned values can be accessed in runner in the following way.
+
+::
+
+    print self.runner_params["user"] 
+    print self.runner_params["key"]
+
+The output will be:
+
+::
+
+    gosc
+    HELLOWORLD
+
+E: Invoke the external API with the values obtained by the above method
+for initialization.
+
+Finally the worker threads and monitor threads are invoked for galaxy to
+listen for incoming tool submissions.
+
+::
+
+    self._init_monitor_thread()
+    self._init_worker_threads()
+
+2. queue\_job method - STAGE 2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Input params: job\_wrapper (Object of
+`galaxy.jobs.JobWrapper <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/jobs/__init__.py#L743>`__)
+
+Output params: None
+
+galaxy.jobs.JobWrapper is a Wrapper around 'model.Job' with convenience
+methods for running processes and state management.
+
+-  Functioning of queue\_job method. 
+
+   A. prepare\_job() method is invoked to do some sanity checks that all runners' queue\_job() methods are
+   likely to want to do and also to build runner command line for that
+   job. Initial state and configuration of the job are set and every
+   data is associated with **job\_wrapper**.
+
+B. Submit job to the external runner and return the jobid. Accessing
+jobs data (tool submitted in Galaxy webframework) is purely from
+job\_wrapper. eg: job\_wrapper.get\_state() -> gives state of a job
+(queued/running/failed/success/...)
+
+Let us look at a means of accessing external runner's configuration
+present under destination tag of job\_conf.xml in the above example.
+
+::
+
+    job_destination = job_wrapper.job_destination
+    docker_cpu = int(job_destination.params["docker_cpu"])
+    docker_ram = int(job_destination.params["docker_memory"])
+
+A special case: User Story: A docker based external runner is present. A
+default docker image for execution is set in job\_conf.xml. A tool can
+also specify the docker image for its execution. Specification in tool
+is given more priority than the default specification. To achieve such a
+functionality. We can use the following statement:
+
+::
+
+    docker_image = self._find_container(job_wrapper).container_id
+
+Note: This pre-written method is only for getting the external
+image/container/os..
+
+C. After successful submission of job in the external runner, submit the
+job to Galaxy framework. To do that,make an object of
+AsynchronousJobState and put it in monitor\_queue.
+
+::
+
+    ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper, job_id=job_id, job_destination=job_destination)
+    self.monitor_queue.put(ajs)
+
+3. check\_watched\_item method - STAGE 3
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Input params: job\_state (Object of
+`galaxy.jobs.runners.AsynchronousJobState <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/jobs/runners/__init__.py#L400>`__)
+
+Output params: AsynchronousJobState object
+
+Without going into much detail, assume there is a queue to track the status of every job. eg:
+
+.. image:: queue.png
+    :align: center
+
+The galaxy framework updates the status of a job by iterating through the
+queue. During the iteration, it calls check\_watched\_item method with the job.
+Your responsibility will be to get the status of execution of the job from the
+external runner and return the updated status of the job, and also to
+copy the output files for the completed jobs.
+
+Updated result after an iteration (after invocation of check\_watched\_item 6 times):
+
+.. image:: queue_b.png
+    :align: center
+
+
+Note: Iterating through the queue is already taken care by the framework.
+
+To inform galaxy about the status of the job:
+
+-  Get the job status from external runner using the job\_id.
+
+-  Check if the job is queued/running/completed.. etc. A general structure is provided below.
+
+-  Call self.mark\_as\_finished(job\_state), if the job has been successfully executed.
+
+-  Call self.mark\_as\_failed(job\_state), if the job has failed during execution.
+
+-  To change state of a job, change job\_state.running and job\_state.job\_wrapper.change\_state()
+
+::
+
+    def check_watched_item(self, job_state):
+            !job_status = get_task_from_external_runner(job_state.job_id)
+            if job_status == "over_with_success":
+                job_state.running = False
+                job_state.job_wrapper.change_state(model.Job.states.OK)
+                !create_log_file()
+                self.mark_as_finished(job_state)
+                return None
+
+            elif job_status == "running":
+                job_state.running = True
+                job_state.job_wrapper.change_state(model.Job.states.RUNNING)
+                return job_state
+
+            elif job_status == "pending":
+                return job_state
+
+            elif job_status == "over_with_error":
+                job_state.running = False
+                job_state.job_wrapper.change_state(model.Job.states.ERROR)
+                !create_log_file()
+                self.mark_as_failed(job_state)
+                return None
+
+Note:
+
+-  Methods prefixed with ! are user-defined methods.
+
+-  Return value is job\_state for running,pending jobs and None for rest of the states of jobs.
+
+create\_log\_files() are nothing but copying the files (error\_file,
+output\_file, exit\_code\_file) from external runner's directory to
+working directory of Galaxy.
+
+Source of the files are from the output directory of your external
+runner. Destination of the files will be:
+
+-  output file -> job\_state.output\_file.
+
+-  error file -> job\_state.error\_file.
+
+-  exit code file -> job\_state.exit\_code\_file.
+
+4. stop\_job method - STAGE 4
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Input params: job (Object of
+`galaxy.model.Job <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/model/__init__.py#L344>`__)
+
+Output params: None
+
+Functionality: Attempts to delete a dispatched executing Job in external
+runner.
+
+When an user requests to stop the execution of job in Galaxy framework,
+a call is made to the external runner to stop the job execution.
+
+The job\_id of the job to be deleted is accessed by
+
+::
+
+    job.id
+
+5. recover method - STAGE 5
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Input params:
+
+-  job (Object of `galaxy.model.Job <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/model/__init__.py#L344>`__).
+
+-  job\_wrapper (Object of `galaxy.jobs.JobWrapper <https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/jobs/__init__.py#L743>`__).
+
+
+Output params: None
+
+Functionality: Recovers jobs stuck in the queued/running state when
+Galaxy started.
+
+This method is invoked by Galaxy at the time of startup. Jobs in Running
+& Queued status in Galaxy are put in the monitor\_queue by creating an
+AsynchronousJobState object.
+
+The following is a generic code snippet for recover method.
+
+::
+
+    ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper)
+    ajs.job_id = str(job_wrapper.job_id)
+    ajs.job_destination = job_wrapper.job_destination
+    job_wrapper.command_line = job.command_line
+    ajs.job_wrapper = job_wrapper
+    if job.state == model.Job.states.RUNNING:
+        ajs.old_state = 'R'
+        ajs.running = True
+        self.monitor_queue.put(ajs)
+
+    elif job.state == model.Job.states.QUEUED:
+        ajs.old_state = 'Q'
+        ajs.running = False
+        self.monitor_queue.put(ajs)
diff --git a/doc/source/dev/faq.rst b/doc/source/dev/faq.rst
new file mode 100644
index 0000000..596084d
--- /dev/null
+++ b/doc/source/dev/faq.rst
@@ -0,0 +1,28 @@
+How Do I...
+===========
+
+This section contains a number of smaller topics with links and examples meant
+to provide relatively concrete answers for specific Galaxy development scenarios.
+
+... interact with the Galaxy database interactively?
+----------------------------------------------------
+
+This can be done with either IPython/Jupyter or a plain python console, depending on your preferences::
+
+    python -i scripts/db_shell.py
+
+... build Galaxy Javascript frontend client?
+--------------------------------------------
+
+We've added a makefile which will let you do this. You can simple run::
+
+    make client
+
+If you prefer docker and aren't a JS developer primarily, you can run
+
+    make grunt-docker
+
+Please see the ``Makefile`` itself for details and other options. There is also a readme at
+``client/README.md``.
+
+
diff --git a/doc/source/dev/index.rst b/doc/source/dev/index.rst
new file mode 100644
index 0000000..504b996
--- /dev/null
+++ b/doc/source/dev/index.rst
@@ -0,0 +1,18 @@
+Developer Documentation
+=======================
+
+.. toctree::
+  :maxdepth: 1
+
+  schema
+  interactive_environments
+  build_a_job_runner
+  faq
+
+These are other primary areas of documentation developers will be interested in:
+
+- `Codebase Documentation`_ for developers of Galaxy
+- `API Documentation`_ for developers of third party tools interacting with Galaxy
+
+.. _Codebase Documentation: ../lib/modules.html
+.. _API Documentation: ../api_doc.html
diff --git a/doc/source/dev/inherit.png b/doc/source/dev/inherit.png
new file mode 100644
index 0000000..671a8d4
Binary files /dev/null and b/doc/source/dev/inherit.png differ
diff --git a/doc/source/dev/interactive_environments.rst b/doc/source/dev/interactive_environments.rst
new file mode 100644
index 0000000..740bb2c
--- /dev/null
+++ b/doc/source/dev/interactive_environments.rst
@@ -0,0 +1,479 @@
+Interactive Environments in Detail (and How to Build Your Own)
+==============================================================
+
+Unfortunately building a GIE isn't completely straightforward, and it's
+certainly not as simple as picking out an existing container and plugging it
+in. Here we'll go through build a "Hello, World" GIE which just displays a file
+from a user's history.
+
+Directory Layout
+----------------
+
+The GIE directory layout looks identical to that of normal visualization
+plugins, for those familiar with developing those
+
+.. code-block:: console
+
+    $ tree $GALAXY_ROOT/config/plugins/interactive_environments/ipython/
+    config/plugins/interactive_environments/ipython/
+    ├── config
+    │   ├── ipython.ini
+    │   ├── ipython.ini.sample
+    │   └── ipython.xml
+    ├── static
+    │   └── js
+    │       └── ipython.js
+    └── templates
+        ├── ipython.mako
+        └── notebook.ipynb
+
+We'll use the variable ``{gie}`` to stand in for the name of your GIE. It
+should match ``[a-z]+``, like ``ipython`` or ``rstudio``. Here you can see the
+``config/`` directory with a ``{gie}.ini.sample`` providing docker and image
+configuration, and then ``{gie}.xml`` which outlines that it is a GIE.
+
+The static directory can hold resources such as javascript and css files. If
+you are actively developing a GIE, you'll need to restart Galaxy after adding
+any resources to that file, before they can be accessed in the browser.
+
+Lastly, and most importantly, there's the templates folder. This normally just
+contains ``{gie}.mako``, however the IPython file needs an extra template file.
+
+First Steps, Configuration
+--------------------------
+
+We will name our GIE "helloworld", but you are free to name your's differently.
+We'll first need to create the directory structure and set up our
+configuration
+
+.. code-block:: console
+
+    $ mkdir -p $GALAXY_ROOT/config/plugins/interactive_environments/helloworld/{config,static,templates}
+    $ cd $GALAXY_ROOT/config/plugins/interactive_environments/helloworld/
+
+Next, you'll need to create the GIE plugin XML file ``config/helloworld.xml``
+
+.. code-block:: xml
+
+    <?xml version="1.0" encoding="UTF-8"?>
+    <!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+    <!-- This is the name which will show up in the User's Browser -->
+    <interactive_environment name="HelloWorld">
+        <data_sources>
+            <data_source>
+                <model_class>HistoryDatasetAssociation</model_class>
+
+                <!-- here you filter out which types of datasets are
+                     appropriate for this GIE -->
+                <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+                <test type="isinstance" test_attr="datatype" result_type="datatype">data.Text</test>
+                <to_param param_attr="id">dataset_id</to_param>
+            </data_source>
+        </data_sources>
+        <params>
+            <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+        </params>
+        <!-- Be sure that your entrypoint name is correct! -->
+        <entry_point entry_point_type="mako">helloworld.mako</entry_point>
+    </interactive_environment>
+
+Once this is done, we can set up our INI file, ``config/helloworld.ini.sample`` which controlls docker interaction
+
+.. code-block:: ini
+
+    [main]
+    # Unused
+
+    [docker]
+    # Command to execute docker. For example `sudo docker` or `docker-lxc`.
+    #command = docker {docker_args}
+
+    # The docker image name that should be started.
+    image = hello-ie
+
+    # Additional arguments that are passed to the `docker run` command.
+    #command_inject = --sig-proxy=true -e DEBUG=false
+
+    # URL to access the Galaxy API with from the spawn Docker container, if empty
+    # this falls back to galaxy.ini's galaxy_infrastructure_url and finally to the
+    # Docker host of the spawned container if that is also not set.
+    #galaxy_url =
+
+    # The Docker hostname. It can be useful to run the Docker daemon on a different
+    # host than Galaxy.
+    #docker_hostname = localhost
+
+We've named our image ``hello-ie``, we'll get to creating that in a minute.
+
+Mako Templates
+--------------
+
+Mako templates are very easy to use, and they allow significantly more
+flexibility than many other templating languages. It's because of this
+flexibility (and ability to write plain python code in them) that GIEs were
+possible to develop easily.
+
+In our ``templates/helloworld.mako``, we'll add the following
+
+.. code-block:: html+mako
+
+    <%namespace name="ie" file="ie.mako" />
+
+This line says to inherit from the ``ie.mako`` file that's available in
+``$GALAXY_ROOT/config/plugins/interactive_environments/common/templates/ie.mako``.
+Next we'll add the following
+
+.. code-block:: html+mako
+
+    <%
+    # Sets ID and sets up a lot of other variables
+    ie_request.load_deploy_config()
+
+    # Define a volume that will be mounted into the container.
+    # This is a useful way to provide access to large files in the container,
+    # if the user knows ahead of time that they will need it.
+    user_file = ie_request.volume(
+        hda.file_name, '/import/file.dat', how='ro')
+
+    # Launch the IE. This builds and runs the docker command in the background.
+    ie_request.launch(
+        volumes=[user_file],
+        env_override={
+            'custom': '42'
+        }
+    )
+
+    # Only once the container is launched can we template our URLs. The ie_request
+    # doesn't have all of the information needed until the container is running.
+    url = ie_request.url_template('${PROXY_URL}/helloworld/')
+    %>
+
+That mako snippet loaded the configuration from the INI files, launched the
+docker container, and then built a URL to the correct endpoint, through the
+Galaxy NodeJS proxy. Additionally we've set an environment variable named ``CUSTOM`` with the value ``42`` to be passed to the container, and we've attached the dataset the user selected (available in ``hda``) to the container as a read-only volume.
+
+We'll continue appending to our ``helloworld.mako`` the HTML code that's actually displayed to the user, when this template is rendered
+
+.. code-block:: html+mako
+
+    <html>
+    <head>
+    <!-- Loads some necessary javascript libraries. Specifically jquery,
+         toastr, and requirejs -->
+    ${ ie.load_default_js() }
+    </head>
+    <body>
+
+    <script type="text/javascript">
+    // see $GALAXY_ROOT/config/plugins/interactive_environments/common/templates/ie.mako to learn what this does
+    ${ ie.default_javascript_variables() }
+    var notebook_login_url = 'unused';
+    var notebook_access_url = '${ notebook_access_url }';
+
+    // Load code with require.js
+    ${ ie.plugin_require_config() }
+
+    // Load notebook
+    // This will load code from static/helloworld.js, often used to handle
+    // things like Login. The load_notebook function will eventually append
+    // an IFrame to the <div id="main" /> below.
+    requirejs(['interactive_environments', 'plugin/helloworld'], function(){
+        load_notebook(notebook_access_url);
+    });
+    </script>
+    <div id="main" width="100%" height="100%">
+    </div>
+    </body>
+    </html>
+
+We've glossed over some of the features of this file, but most IEs do a significant amount of "magic" in the top half of the mako template. For instance, the IPython notebook:
+
+- If the user is trying to run the IPython GIE Visualization on an existing notebook in their history, then that gets loaded into the docker container via the temp directory and set as the default notebook
+- Otherwise a default notebook is built for the user.
+
+The RStudio notebook:
+
+- generates a random password and configures the image to use this password
+- Copies in an RData file if the user has loaded one
+- sets some custom environment variables.
+
+
+Connecting the User to the Container via Javascript
+---------------------------------------------------
+
+With the mako template above finished, if you were to load this in your
+browser, not a lot would happen because we haven't built the hello-ie image,
+and we haven't used Javascript to connect the user with the container. In the
+tail end of the template, we set a variable ``notebook_access_url``. These are
+partially a legacy of how things used to be done and you're welcome to clean up
+your code according to your desires. Galaxy's NodeJS proxy handles the
+authentication of users, so you don't have to worry about it, and can just
+assume that only the correct user will have access to a given notebook.
+
+In the ``static/`` directory, we generally create a ``js/`` directory below that, and create a ``{gie}.js`` (so, ``static/js/helloworld.js``) file in there. That file will have a function, ``load_notebook`` which will check if the GIE is available, and when it is, display it to the user.
+
+We start by writing the load notebook function, which is pretty generic
+
+.. code-block:: javascript
+
+    // Load an interactive environment (IE) from a remote URL
+    // @param {String} notebook_access_url: the URL embeded in the page and loaded
+    function load_notebook(notebook_access_url){
+        // When the page has completely loaded...
+        $( document ).ready(function() {
+            // Test if we can access the GIE, and if so, execute the function
+            // to load the GIE for the user.
+            test_ie_availability(notebook_access_url, function(){
+                _handle_notebook_loading(notebook_access_url);
+            });
+        });
+    }
+
+This function will display a spinner to the user to indicate process, and then make multiple requests to ``notebook_access_url``. That MUST return a 200 OK for the ``_handle_notebook_loading`` function to ever be called. 302s do not count!
+
+With that, we've almost completed the Javascript portion, just need to implement the function to display the GIE to the user in an iframe
+
+.. code-block:: javascript
+
+    function _handle_notebook_loading(notebook_access_url){
+        append_notebook(notebook_access_url);
+    }
+
+
+This function is very short. Historically, the GIE process involved a complex dance of:
+
+- generating a random password in the mako template
+- setting it as a javascript variable
+- passing it to the docker container
+- once the container was available, have the javascript automatically log a
+  user in (something browsers try to prevent since that's otherwise an XSS
+  vulnerability.)
+- hope everything worked
+
+Since the NodeJS proxy takes care of authentication/authorization, we can
+reduce the helloworld ``_handle_notebook_loading`` function to a simple
+``append_notebook`` call. You may wish to look at the IPython and RStudio GIEs
+for examples of the complex things that can be done at every step.
+
+The GIE Container
+-----------------
+
+We'll build a simple container that just displays the dataset a user has
+selected to them. Remember when we attached a volume to the container? We'll
+make use of that now.
+
+GIE Containers (often) consist of:
+
+- Dockerfile
+- NGINX Proxy configuration
+- A custom startup script/entrypoint
+- A script to monitor traffic and kill unused containers
+
+We have to monitor the container's traffic and kill off unused containers,
+bceause no one is watching them. The user launches the container in Galaxy, and
+Galaxy immediately forgets the container exists. Thus, we say that if a
+container has no connections to TCP connections to itself, then it should
+commit suicide by killing the root process.
+
+Here's an example Dockerfile for our helloworld container
+
+.. code-block:: dockerfile
+
+    FROM ubuntu:14.04
+    # These environment variables are passed from Galaxy to the container
+    # and help you enable connectivity to Galaxy from within the container.
+    # This means your user can import/export data from/to Galaxy.
+    ENV DEBIAN_FRONTEND=noninteractive \
+        API_KEY=none \
+        DEBUG=false \
+        PROXY_PREFIX=none \
+        GALAXY_URL=none \
+        GALAXY_WEB_PORT=10000 \
+        HISTORY_ID=none \
+        REMOTE_HOST=none
+
+    RUN apt-get -qq update && \
+        apt-get install --no-install-recommends -y \
+        wget procps nginx python python-pip net-tools nginx
+
+    # Our very important scripts. Make sure you've run `chmod +x startup.sh
+    # monitor_traffic.sh` outside of the container!
+    ADD ./startup.sh /startup.sh
+    ADD ./monitor_traffic.sh /monitor_traffic.sh
+
+    # /import will be the universal mount-point for IPython
+    # The Galaxy instance can copy in data that needs to be present to the
+    # container
+    RUN mkdir /import
+
+    # Nginx configuration
+    COPY ./proxy.conf /proxy.conf
+
+    VOLUME ["/import"]
+    WORKDIR /import/
+
+    # EXTREMELY IMPORTANT! You must expose a SINGLE port on your container.
+    EXPOSE 80
+    CMD /startup.sh
+
+If you have questions on this, please feel free to contact us on IRC
+(`irc.freenode.net#galaxyproject <https://webchat.freenode.net/?channels=galaxyproject>`__).
+
+The proxy configuration is interesting, here we'll point NGINX to reverse proxy
+a service running on ``:8000`` inside the container. That port will be hosting
+a python process which serves up the directory contents of ``/import``, i.e.
+the file the user selected which was mounted as a volume into
+``/import/file.dat``
+
+.. code-block:: nginx
+
+    server {
+        listen 80;
+        server_name localhost;
+        access_log /var/log/nginx/localhost.access.log;
+
+        # Note the trailing slash used everywhere!
+        location PROXY_PREFIX/helloworld/ {
+            proxy_buffering off;
+            proxy_pass         http://127.0.0.1:8000/;
+            proxy_redirect     http://127.0.0.1:8000/ PROXY_PREFIX/helloworld/;
+        }
+    }
+
+
+And here we'll run that service in our ``startup.sh`` file
+
+.. code-block:: bash
+
+    #!/bin/bash
+    # First, replace the PROXY_PREFIX value in /proxy.conf with the value from
+    # the environment variable.
+    sed -i "s|PROXY_PREFIX|${PROXY_PREFIX}|" /proxy.conf;
+    # Then copy into the default location for ubuntu+nginx
+    cp /proxy.conf /etc/nginx/sites-enabled/default;
+
+    # Here you would normally start whatever service you want to start. In our
+    # example we start a simple directory listing service on port 8000
+    cd /import/ && python -mSimpleHTTPServer &
+
+    # Launch traffic monitor which will automatically kill the container if
+    # traffic stops
+    /monitor_traffic.sh &
+    # And finally launch nginx in foreground mode. This will make debugging
+    # easier as logs will be available from `docker logs ...`
+    nginx -g 'daemon off;'
+
+Lastly, our ``monitor_traffic.sh`` file is often re-used between containers, the only adjustment being the port that is looked at
+
+.. code-block:: bash
+
+    #!/bin/bash
+    while true; do
+        sleep 60
+        if [ `netstat -t | grep -v CLOSE_WAIT | grep ':80' | wc -l` -lt 3 ]
+        then
+            pkill nginx
+        fi
+    done
+
+With those files, ``monitor_traffic.sh``, ``Dockerfile``, ``startup.sh``, and ``proxy.conf``, you should be able to build your ``hello-ie`` container
+
+.. code-block:: bash
+
+    $ cd hello-ie
+    $ docker build -t hello-ie .
+
+Now, if everything went smoothly, you should be able to restart Galaxy and try out your new GIE on a tabular or text file!
+
+Debugging
+---------
+
+When you launch your new GIE in Galaxy, your Galaxy logs should show something like the following:
+
+.. code-block:: console
+
+    Starting docker container for IE helloworld with command [docker run --sig-proxy=true -e DEBUG=false -e "GALAXY_URL=http://localhost/galaxy/" -e "CORS_ORIGIN=http://localhost" -e "GALAXY_WEB_PORT=8000" -e "HISTORY_ID=f2db41e1fa331b3e" -e "CUSTOM=42" -e "GALAXY_PASTER_PORT=8000" -e "PROXY_PREFIX=/galaxy/gie_proxy" -e "API_KEY=1712364174a0ff79b34e9a78fee3ca1c" -e "REMOTE_HOST=127.0.0.1" -e "USER_EMAIL=hxr at local.host" -d -P -v "/home/hxr/work/galaxy/database/tmp/tmp5HaqZy:/import/" -v " [...]
+
+Here's the docker command written out in a more readable manner:
+
+.. code-block:: console
+
+    $ docker run --sig-proxy=true \
+        -d -P \
+        -e "API_KEY=1712364174a0ff79b34e9a78fee3ca1c" \
+        -e "CORS_ORIGIN=http://localhost" \
+        -e "CUSTOM=42" \
+        -e "DEBUG=false" \
+        -e "GALAXY_PASTER_PORT=8000" \
+        -e "GALAXY_URL=http://localhost/galaxy/" \
+        -e "GALAXY_WEB_PORT=8000" \
+        -e "HISTORY_ID=f2db41e1fa331b3e" \
+        -e "PROXY_PREFIX=/galaxy/gie_proxy" \
+        -e "REMOTE_HOST=127.0.0.1" \
+        -e "USER_EMAIL=hxr at local.host" \
+        -v "/home/hxr/work/galaxy/database/tmp/tmp5HaqZy:/import/" \
+        -v "/home/hxr/work/galaxy/database/files/000/dataset_68.dat:/import/file.dat:ro" \
+      hello-ie
+
+As you can see, a LOT is going on! We'll break it down further:
+
+- ``-d`` runs the container in daemon mode, it launches and the client
+  submitting the container finished
+- ``-P`` randomly assigns an unused port to the container for each ``EXPOSE``d
+  port from our ``Dockerfile``. This is why you must expose a port, and only
+  one port.
+- A large number of environment variables are set:
+
+    - The user's API key is provided, allowing you to access datasets and
+      submit jobs on their behalf. If you have an environment like
+      IPython/RStudio, it is **highly recommended** that you provide some magic
+      by which the user can use their API key without embedding it in the
+      notebook. If you do embed it somehow in a document that gets saved to
+      their history, anyone can impersonate that user if they get a hold of it.
+      In the IPython GIE we have a variable that just runs
+      ``os.environ.get('API_KEY')`` to avoid embedding it in their notebook.
+    - A CORS Origin is provided for very strict servers, but it may be easier
+      to simply void CORS requirements within the nginx proxy in your
+      container.
+    - Custom variables specified in your ``launch()`` command are available
+    - A ``DEBUG`` environment variable should be used to help admins debug
+      existing containers. You should use it to increase logging, not cleanup
+      temporary files, and so on.
+    - ``GALAXY_PASTER_PORT`` (deprecated) and ``GALAXY_WEB_PORT`` are the raw
+      port that Galaxy is listening on. You can use this to help decide how to
+      takl to Galaxy.
+    - ``GALAXY_URL`` is the URL that Galaxy should be accessible at. For
+      various reasons this may not be true. We recommend looking at our
+      implementation of `galaxy.py
+      <https://github.com/bgruening/docker-ipython-notebook/blob/15.07/galaxy.py>`__
+      which is a small utility script to provide API access to Galaxy to get
+      and fetch data, based on those environment variables.
+    - The ``HISTORY_ID`` of the current history the user is on is provided. In
+      the IPython/RStudio containers, we provide a dead simple method for users
+      to download datasets from their current history which will be visible to
+      them on the right hand side of their screen.
+    - A ``PROXY_PREFIX`` is provided which should be used in the nginx conf.
+    - ``REMOTE_HOST`` is another component used to test for a possible Galaxy
+      access path
+    - The user's email is made available, very convenient for webservices like
+      Entrez which require the user's email address. You can pre-fill it out
+      for them, making their life easier.
+    - Two volumes are mounted, one a temporary directory from Galaxy (rw), and one
+      the dataset the user selected (ro).
+
+- and finally the image is specified.
+
+Most of this information is usually required to build friendly, easy-to-use
+GIEs. One of the strong points of GIEs is their magic interaction with Galaxy.
+Here we've mounted a volume read-only, but in real life you may wish to provide
+connectivity like IPython and RStudio provide, allowing the user to load
+datasets on demand for interactive analysis, and then to store analysis
+artefacts (and a log of what was done inside the container, à la IPython's
+"notebooks") back to their current history.
+
+If everything went well, at this point you should see a directory listing show up:
+
+.. image:: interactive_environments_success.png
+
+If you find yourself encountering difficulties, the "Hello, World" IE is
+available in a `GitHub repo <https://github.com/erasche/hello-world-interactive-environment/releases/tag/v15.10>`__, and there are people on the IRC channel who can help debug.
diff --git a/doc/source/dev/interactive_environments_success.png b/doc/source/dev/interactive_environments_success.png
new file mode 100644
index 0000000..2f88190
Binary files /dev/null and b/doc/source/dev/interactive_environments_success.png differ
diff --git a/doc/source/dev/queue.png b/doc/source/dev/queue.png
new file mode 100644
index 0000000..c4557aa
Binary files /dev/null and b/doc/source/dev/queue.png differ
diff --git a/doc/source/dev/queue_b.png b/doc/source/dev/queue_b.png
new file mode 100644
index 0000000..1d2c003
Binary files /dev/null and b/doc/source/dev/queue_b.png differ
diff --git a/doc/source/dev/runner_diag.png b/doc/source/dev/runner_diag.png
new file mode 100644
index 0000000..812b452
Binary files /dev/null and b/doc/source/dev/runner_diag.png differ
diff --git a/doc/source/index.rst b/doc/source/index.rst
new file mode 100644
index 0000000..cfee8c4
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,63 @@
+Galaxy Code Documentation
+*************************
+
+Galaxy_ is an open, web-based platform for accessible, reproducible, and
+transparent computational biomedical research.
+
+- *Accessible:* Users without programming experience can easily specify parameters and run tools and workflows.
+- *Reproducible:* Galaxy captures information so that any user can repeat and understand a complete computational analysis.
+- *Transparent:* Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+
+Things to know:
+
+- There are multiple choices_ when it comes to using Galaxy.
+- You can explore the `current code in the development branch`_ on GitHub.
+- This documentation is hosted at readthedocs_.
+
+For more information on the Galaxy Project, please visit the `project home page`_.
+
+.. _Galaxy: http://galaxyproject.org
+.. _choices: https://wiki.galaxyproject.org/BigPicture/Choices
+.. _current code in the development branch: https://github.com/galaxyproject/galaxy
+.. _readthedocs: http://galaxy.readthedocs.org
+.. _project home page: http://galaxyproject.org
+
+
+Contents
+========
+.. toctree::
+   :maxdepth: 5
+
+   Galaxy API Documentation <api_doc>
+
+   Tool Shed API Documentation <ts_api_doc>
+
+   Application Documentation <lib/modules>
+
+   Releases <releases/index>
+
+   Developer Documentation <dev/index>
+
+   Special topics in Administration <admin/index>
+
+   Project Governance <project/organization>
+
+   Issue Management <project/issues>
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
+Building this Documentation
+===========================
+
+If you have your own copy of the Galaxy source code, you can also generate your own version of this documentation:
+
+::
+
+    $ make -C doc/ html
+
+The generated documentation will be in ``doc/build/html/`` and can be viewed with a web browser.  Note that you will need to install Sphinx and a fair number of module dependencies before this will produce output.
diff --git a/doc/source/lib/galaxy.actions.rst b/doc/source/lib/galaxy.actions.rst
new file mode 100644
index 0000000..bd4c480
--- /dev/null
+++ b/doc/source/lib/galaxy.actions.rst
@@ -0,0 +1,20 @@
+galaxy.actions package
+======================
+
+.. automodule:: galaxy.actions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.actions.admin module
+---------------------------
+
+.. automodule:: galaxy.actions.admin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.auth.providers.rst b/doc/source/lib/galaxy.auth.providers.rst
new file mode 100644
index 0000000..d2d4bfb
--- /dev/null
+++ b/doc/source/lib/galaxy.auth.providers.rst
@@ -0,0 +1,44 @@
+galaxy.auth.providers package
+=============================
+
+.. automodule:: galaxy.auth.providers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.auth.providers.alwaysreject module
+-----------------------------------------
+
+.. automodule:: galaxy.auth.providers.alwaysreject
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.auth.providers.ldap_ad module
+------------------------------------
+
+.. automodule:: galaxy.auth.providers.ldap_ad
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.auth.providers.localdb module
+------------------------------------
+
+.. automodule:: galaxy.auth.providers.localdb
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.auth.providers.pam_auth module
+-------------------------------------
+
+.. automodule:: galaxy.auth.providers.pam_auth
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.auth.rst b/doc/source/lib/galaxy.auth.rst
new file mode 100644
index 0000000..eefcbc6
--- /dev/null
+++ b/doc/source/lib/galaxy.auth.rst
@@ -0,0 +1,15 @@
+galaxy.auth package
+===================
+
+.. automodule:: galaxy.auth
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.auth.providers
+
diff --git a/doc/source/lib/galaxy.dataset_collections.rst b/doc/source/lib/galaxy.dataset_collections.rst
new file mode 100644
index 0000000..e065d30
--- /dev/null
+++ b/doc/source/lib/galaxy.dataset_collections.rst
@@ -0,0 +1,67 @@
+galaxy.dataset_collections package
+==================================
+
+.. automodule:: galaxy.dataset_collections
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.dataset_collections.types
+
+Submodules
+----------
+
+galaxy.dataset_collections.builder module
+-----------------------------------------
+
+.. automodule:: galaxy.dataset_collections.builder
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.dataset_collections.matching module
+------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.matching
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.dataset_collections.registry module
+------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.dataset_collections.structure module
+-------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.structure
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.dataset_collections.subcollections module
+------------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.subcollections
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.dataset_collections.type_description module
+--------------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.type_description
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.dataset_collections.types.rst b/doc/source/lib/galaxy.dataset_collections.types.rst
new file mode 100644
index 0000000..639fbf2
--- /dev/null
+++ b/doc/source/lib/galaxy.dataset_collections.types.rst
@@ -0,0 +1,28 @@
+galaxy.dataset_collections.types package
+========================================
+
+.. automodule:: galaxy.dataset_collections.types
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.dataset_collections.types.list module
+--------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.types.list
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.dataset_collections.types.paired module
+----------------------------------------------
+
+.. automodule:: galaxy.dataset_collections.types.paired
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.datatypes.converters.rst b/doc/source/lib/galaxy.datatypes.converters.rst
new file mode 100644
index 0000000..3cbddee
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.converters.rst
@@ -0,0 +1,258 @@
+galaxy.datatypes.converters package
+===================================
+
+.. automodule:: galaxy.datatypes.converters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.datatypes.converters.bed_to_gff_converter module
+-------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bed_to_gff_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.bedgraph_to_array_tree_converter module
+-------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bedgraph_to_array_tree_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.bgzip module
+----------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bgzip
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.fasta_to_len module
+-----------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fasta_to_len
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.fasta_to_tabular_converter module
+-------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fasta_to_tabular_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.fastq_to_fqtoc module
+-------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fastq_to_fqtoc
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.fastqsolexa_to_fasta_converter module
+-----------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fastqsolexa_to_fasta_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.fastqsolexa_to_qual_converter module
+----------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.fastqsolexa_to_qual_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.gff_to_bed_converter module
+-------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.gff_to_bed_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.gff_to_interval_index_converter module
+------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.gff_to_interval_index_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.interval_to_bed_converter module
+------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_bed_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.interval_to_bedstrict_converter module
+------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_bedstrict_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.interval_to_coverage module
+-------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_coverage
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.interval_to_fli module
+--------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_fli
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.interval_to_interval_index_converter module
+-----------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_interval_index_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.interval_to_tabix_converter module
+--------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.interval_to_tabix_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.lped_to_fped_converter module
+---------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.lped_to_fped_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.lped_to_pbed_converter module
+---------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.lped_to_pbed_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.maf_to_fasta_converter module
+---------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.maf_to_fasta_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.maf_to_interval_converter module
+------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.maf_to_interval_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.pbed_ldreduced_converter module
+-----------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.pbed_ldreduced_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.pbed_to_lped_converter module
+---------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.pbed_to_lped_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.picard_interval_list_to_bed6_converter module
+-------------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.picard_interval_list_to_bed6_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.pileup_to_interval_index_converter module
+---------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.pileup_to_interval_index_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.sam_to_bam module
+---------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.sam_to_bam
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.vcf_to_interval_index_converter module
+------------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.vcf_to_interval_index_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.vcf_to_vcf_bgzip module
+---------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.vcf_to_vcf_bgzip
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.bcf_bgzip_to_bcf_converter module
+-------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bcf_bgzip_to_bcf_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.bcf_to_bcf_bgzip_converter module
+-------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.bcf_to_bcf_bgzip_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.wiggle_to_array_tree_converter module
+-----------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.wiggle_to_array_tree_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.converters.wiggle_to_simple_converter module
+-------------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.converters.wiggle_to_simple_converter
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/galaxy.datatypes.dataproviders.rst b/doc/source/lib/galaxy.datatypes.dataproviders.rst
new file mode 100644
index 0000000..cd85ef1
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.dataproviders.rst
@@ -0,0 +1,84 @@
+galaxy.datatypes.dataproviders package
+======================================
+
+.. automodule:: galaxy.datatypes.dataproviders
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.datatypes.dataproviders.base module
+------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.base
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.chunk module
+-------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.chunk
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.column module
+--------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.column
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.dataset module
+---------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.dataset
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.decorators module
+------------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.decorators
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.exceptions module
+------------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.exceptions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.external module
+----------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.external
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.hierarchy module
+-----------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.hierarchy
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.dataproviders.line module
+------------------------------------------
+
+.. automodule:: galaxy.datatypes.dataproviders.line
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.datatypes.display_applications.rst b/doc/source/lib/galaxy.datatypes.display_applications.rst
new file mode 100644
index 0000000..a6f3e5e
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.display_applications.rst
@@ -0,0 +1,36 @@
+galaxy.datatypes.display_applications package
+=============================================
+
+.. automodule:: galaxy.datatypes.display_applications
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.datatypes.display_applications.application module
+--------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.display_applications.application
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.display_applications.parameters module
+-------------------------------------------------------
+
+.. automodule:: galaxy.datatypes.display_applications.parameters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.display_applications.util module
+-------------------------------------------------
+
+.. automodule:: galaxy.datatypes.display_applications.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.datatypes.rst b/doc/source/lib/galaxy.datatypes.rst
new file mode 100644
index 0000000..bd76357
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.rst
@@ -0,0 +1,206 @@
+galaxy.datatypes package
+========================
+
+.. automodule:: galaxy.datatypes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.datatypes.converters
+    galaxy.datatypes.dataproviders
+    galaxy.datatypes.display_applications
+    galaxy.datatypes.util
+
+Submodules
+----------
+
+galaxy.datatypes.assembly module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.assembly
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.binary module
+------------------------------
+
+.. automodule:: galaxy.datatypes.binary
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.checkers module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.checkers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.chrominfo module
+---------------------------------
+
+.. automodule:: galaxy.datatypes.chrominfo
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.constructive_solid_geometry module
+---------------------------------------------------
+
+.. automodule:: galaxy.datatypes.constructive_solid_geometry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.coverage module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.coverage
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.data module
+----------------------------
+
+.. automodule:: galaxy.datatypes.data
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.genetics module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.genetics
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.graph module
+-----------------------------
+
+.. automodule:: galaxy.datatypes.graph
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.images module
+------------------------------
+
+.. automodule:: galaxy.datatypes.images
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.interval module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.interval
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.metadata module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.msa module
+---------------------------
+
+.. automodule:: galaxy.datatypes.msa
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.ngsindex module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.ngsindex
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.proteomics module
+----------------------------------
+
+.. automodule:: galaxy.datatypes.proteomics
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.qualityscore module
+------------------------------------
+
+.. automodule:: galaxy.datatypes.qualityscore
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.registry module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.sequence module
+--------------------------------
+
+.. automodule:: galaxy.datatypes.sequence
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.sniff module
+-----------------------------
+
+.. automodule:: galaxy.datatypes.sniff
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.tabular module
+-------------------------------
+
+.. automodule:: galaxy.datatypes.tabular
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.text module
+----------------------------
+
+.. automodule:: galaxy.datatypes.text
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.tracks module
+------------------------------
+
+.. automodule:: galaxy.datatypes.tracks
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.xml module
+---------------------------
+
+.. automodule:: galaxy.datatypes.xml
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.datatypes.util.rst b/doc/source/lib/galaxy.datatypes.util.rst
new file mode 100644
index 0000000..6b9466b
--- /dev/null
+++ b/doc/source/lib/galaxy.datatypes.util.rst
@@ -0,0 +1,36 @@
+galaxy.datatypes.util package
+=============================
+
+.. automodule:: galaxy.datatypes.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.datatypes.util.generic_util module
+-----------------------------------------
+
+.. automodule:: galaxy.datatypes.util.generic_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.util.gff_util module
+-------------------------------------
+
+.. automodule:: galaxy.datatypes.util.gff_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.datatypes.util.image_util module
+---------------------------------------
+
+.. automodule:: galaxy.datatypes.util.image_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.dependencies.rst b/doc/source/lib/galaxy.dependencies.rst
new file mode 100644
index 0000000..16ce685
--- /dev/null
+++ b/doc/source/lib/galaxy.dependencies.rst
@@ -0,0 +1,8 @@
+galaxy.dependencies package
+===========================
+
+.. automodule:: galaxy.dependencies
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.eggs.rst b/doc/source/lib/galaxy.eggs.rst
new file mode 100644
index 0000000..6331aa3
--- /dev/null
+++ b/doc/source/lib/galaxy.eggs.rst
@@ -0,0 +1,8 @@
+galaxy.eggs package
+===================
+
+.. automodule:: galaxy.eggs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.exceptions.rst b/doc/source/lib/galaxy.exceptions.rst
new file mode 100644
index 0000000..dd83fb8
--- /dev/null
+++ b/doc/source/lib/galaxy.exceptions.rst
@@ -0,0 +1,20 @@
+galaxy.exceptions package
+=========================
+
+.. automodule:: galaxy.exceptions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.exceptions.error_codes module
+------------------------------------
+
+.. automodule:: galaxy.exceptions.error_codes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.external_services.result_handlers.rst b/doc/source/lib/galaxy.external_services.result_handlers.rst
new file mode 100644
index 0000000..749bfb7
--- /dev/null
+++ b/doc/source/lib/galaxy.external_services.result_handlers.rst
@@ -0,0 +1,20 @@
+galaxy.external_services.result_handlers package
+================================================
+
+.. automodule:: galaxy.external_services.result_handlers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.external_services.result_handlers.basic module
+-----------------------------------------------------
+
+.. automodule:: galaxy.external_services.result_handlers.basic
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.external_services.rst b/doc/source/lib/galaxy.external_services.rst
new file mode 100644
index 0000000..b08913d
--- /dev/null
+++ b/doc/source/lib/galaxy.external_services.rst
@@ -0,0 +1,43 @@
+galaxy.external_services package
+================================
+
+.. automodule:: galaxy.external_services
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.external_services.result_handlers
+
+Submodules
+----------
+
+galaxy.external_services.actions module
+---------------------------------------
+
+.. automodule:: galaxy.external_services.actions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.external_services.parameters module
+------------------------------------------
+
+.. automodule:: galaxy.external_services.parameters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.external_services.service module
+---------------------------------------
+
+.. automodule:: galaxy.external_services.service
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.forms.rst b/doc/source/lib/galaxy.forms.rst
new file mode 100644
index 0000000..088d52b
--- /dev/null
+++ b/doc/source/lib/galaxy.forms.rst
@@ -0,0 +1,20 @@
+galaxy.forms package
+====================
+
+.. automodule:: galaxy.forms
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.forms.forms module
+-------------------------
+
+.. automodule:: galaxy.forms.forms
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.actions.rst b/doc/source/lib/galaxy.jobs.actions.rst
new file mode 100644
index 0000000..7ec9a93
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.actions.rst
@@ -0,0 +1,20 @@
+galaxy.jobs.actions package
+===========================
+
+.. automodule:: galaxy.jobs.actions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.actions.post module
+-------------------------------
+
+.. automodule:: galaxy.jobs.actions.post
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.deferred.rst b/doc/source/lib/galaxy.jobs.deferred.rst
new file mode 100644
index 0000000..371a172
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.deferred.rst
@@ -0,0 +1,36 @@
+galaxy.jobs.deferred package
+============================
+
+.. automodule:: galaxy.jobs.deferred
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.deferred.data_transfer module
+-----------------------------------------
+
+.. automodule:: galaxy.jobs.deferred.data_transfer
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.deferred.manual_data_transfer module
+------------------------------------------------
+
+.. automodule:: galaxy.jobs.deferred.manual_data_transfer
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.deferred.pacific_biosciences_smrt_portal module
+-----------------------------------------------------------
+
+.. automodule:: galaxy.jobs.deferred.pacific_biosciences_smrt_portal
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.metrics.collectl.rst b/doc/source/lib/galaxy.jobs.metrics.collectl.rst
new file mode 100644
index 0000000..01120a3
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.metrics.collectl.rst
@@ -0,0 +1,44 @@
+galaxy.jobs.metrics.collectl package
+====================================
+
+.. automodule:: galaxy.jobs.metrics.collectl
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.metrics.collectl.cli module
+---------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.collectl.cli
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.collectl.processes module
+---------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.collectl.processes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.collectl.stats module
+-----------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.collectl.stats
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.collectl.subsystems module
+----------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.collectl.subsystems
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.metrics.instrumenters.rst b/doc/source/lib/galaxy.jobs.metrics.instrumenters.rst
new file mode 100644
index 0000000..71b1a86
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.metrics.instrumenters.rst
@@ -0,0 +1,60 @@
+galaxy.jobs.metrics.instrumenters package
+=========================================
+
+.. automodule:: galaxy.jobs.metrics.instrumenters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.metrics.instrumenters.collectl module
+-------------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.instrumenters.collectl
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.instrumenters.core module
+---------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.instrumenters.core
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.instrumenters.cpuinfo module
+------------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.instrumenters.cpuinfo
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.instrumenters.env module
+--------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.instrumenters.env
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.instrumenters.meminfo module
+------------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.instrumenters.meminfo
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.metrics.instrumenters.uname module
+----------------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.instrumenters.uname
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.metrics.rst b/doc/source/lib/galaxy.jobs.metrics.rst
new file mode 100644
index 0000000..b050d0f
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.metrics.rst
@@ -0,0 +1,28 @@
+galaxy.jobs.metrics package
+===========================
+
+.. automodule:: galaxy.jobs.metrics
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.jobs.metrics.collectl
+    galaxy.jobs.metrics.instrumenters
+
+Submodules
+----------
+
+galaxy.jobs.metrics.formatting module
+-------------------------------------
+
+.. automodule:: galaxy.jobs.metrics.formatting
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.rst b/doc/source/lib/galaxy.jobs.rst
new file mode 100644
index 0000000..8905e46
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.rst
@@ -0,0 +1,103 @@
+galaxy.jobs package
+===================
+
+.. automodule:: galaxy.jobs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.jobs.actions
+    galaxy.jobs.deferred
+    galaxy.jobs.metrics
+    galaxy.jobs.runners
+    galaxy.jobs.splitters
+
+Submodules
+----------
+
+galaxy.jobs.command_factory module
+----------------------------------
+
+.. automodule:: galaxy.jobs.command_factory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.datasets module
+---------------------------
+
+.. automodule:: galaxy.jobs.datasets
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.error_level module
+------------------------------
+
+.. automodule:: galaxy.jobs.error_level
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.handler module
+--------------------------
+
+.. automodule:: galaxy.jobs.handler
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.manager module
+--------------------------
+
+.. automodule:: galaxy.jobs.manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.mapper module
+-------------------------
+
+.. automodule:: galaxy.jobs.mapper
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.output_checker module
+---------------------------------
+
+.. automodule:: galaxy.jobs.output_checker
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.rule_helper module
+------------------------------
+
+.. automodule:: galaxy.jobs.rule_helper
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.stock_rules module
+------------------------------
+
+.. automodule:: galaxy.jobs.stock_rules
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.transfer_manager module
+-----------------------------------
+
+.. automodule:: galaxy.jobs.transfer_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.runners.rst b/doc/source/lib/galaxy.jobs.runners.rst
new file mode 100644
index 0000000..cbeb6de
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.rst
@@ -0,0 +1,92 @@
+galaxy.jobs.runners package
+===========================
+
+.. automodule:: galaxy.jobs.runners
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.jobs.runners.state_handlers
+    galaxy.jobs.runners.util
+
+Submodules
+----------
+
+galaxy.jobs.runners.cli module
+------------------------------
+
+.. automodule:: galaxy.jobs.runners.cli
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.condor module
+---------------------------------
+
+.. automodule:: galaxy.jobs.runners.condor
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.drmaa module
+--------------------------------
+
+.. automodule:: galaxy.jobs.runners.drmaa
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.local module
+--------------------------------
+
+.. automodule:: galaxy.jobs.runners.local
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.pbs module
+------------------------------
+
+.. automodule:: galaxy.jobs.runners.pbs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.pulsar module
+---------------------------------
+
+.. automodule:: galaxy.jobs.runners.pulsar
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.slurm module
+--------------------------------
+
+.. automodule:: galaxy.jobs.runners.slurm
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.state_handler_factory module
+------------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.state_handler_factory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.tasks module
+--------------------------------
+
+.. automodule:: galaxy.jobs.runners.tasks
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.runners.state_handlers.rst b/doc/source/lib/galaxy.jobs.runners.state_handlers.rst
new file mode 100644
index 0000000..06b15dd
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.state_handlers.rst
@@ -0,0 +1,20 @@
+galaxy.jobs.runners.state_handlers package
+==========================================
+
+.. automodule:: galaxy.jobs.runners.state_handlers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.runners.state_handlers.resubmit module
+--------------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.state_handlers.resubmit
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.runners.util.cli.job.rst b/doc/source/lib/galaxy.jobs.runners.util.cli.job.rst
new file mode 100644
index 0000000..ec1149a
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.util.cli.job.rst
@@ -0,0 +1,36 @@
+galaxy.jobs.runners.util.cli.job package
+========================================
+
+.. automodule:: galaxy.jobs.runners.util.cli.job
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.runners.util.cli.job.slurm module
+---------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.cli.job.slurm
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.util.cli.job.slurm_torque module
+----------------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.cli.job.slurm_torque
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.util.cli.job.torque module
+----------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.cli.job.torque
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.runners.util.cli.rst b/doc/source/lib/galaxy.jobs.runners.util.cli.rst
new file mode 100644
index 0000000..dadd626
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.util.cli.rst
@@ -0,0 +1,28 @@
+galaxy.jobs.runners.util.cli package
+====================================
+
+.. automodule:: galaxy.jobs.runners.util.cli
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.jobs.runners.util.cli.job
+    galaxy.jobs.runners.util.cli.shell
+
+Submodules
+----------
+
+galaxy.jobs.runners.util.cli.factory module
+-------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.cli.factory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.runners.util.cli.shell.rst b/doc/source/lib/galaxy.jobs.runners.util.cli.shell.rst
new file mode 100644
index 0000000..42e5c7f
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.util.cli.shell.rst
@@ -0,0 +1,28 @@
+galaxy.jobs.runners.util.cli.shell package
+==========================================
+
+.. automodule:: galaxy.jobs.runners.util.cli.shell
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.runners.util.cli.shell.local module
+-----------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.cli.shell.local
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.util.cli.shell.rsh module
+---------------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.cli.shell.rsh
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.runners.util.condor.rst b/doc/source/lib/galaxy.jobs.runners.util.condor.rst
new file mode 100644
index 0000000..823ea51
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.util.condor.rst
@@ -0,0 +1,8 @@
+galaxy.jobs.runners.util.condor package
+=======================================
+
+.. automodule:: galaxy.jobs.runners.util.condor
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.jobs.runners.util.job_script.rst b/doc/source/lib/galaxy.jobs.runners.util.job_script.rst
new file mode 100644
index 0000000..bcc2b16
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.util.job_script.rst
@@ -0,0 +1,8 @@
+galaxy.jobs.runners.util.job_script package
+===========================================
+
+.. automodule:: galaxy.jobs.runners.util.job_script
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.jobs.runners.util.rst b/doc/source/lib/galaxy.jobs.runners.util.rst
new file mode 100644
index 0000000..8bdd25f
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.runners.util.rst
@@ -0,0 +1,45 @@
+galaxy.jobs.runners.util package
+================================
+
+.. automodule:: galaxy.jobs.runners.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.jobs.runners.util.cli
+    galaxy.jobs.runners.util.condor
+    galaxy.jobs.runners.util.job_script
+
+Submodules
+----------
+
+galaxy.jobs.runners.util.env module
+-----------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.env
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.util.external module
+----------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.external
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.runners.util.kill module
+------------------------------------
+
+.. automodule:: galaxy.jobs.runners.util.kill
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.jobs.splitters.rst b/doc/source/lib/galaxy.jobs.splitters.rst
new file mode 100644
index 0000000..d8af934
--- /dev/null
+++ b/doc/source/lib/galaxy.jobs.splitters.rst
@@ -0,0 +1,28 @@
+galaxy.jobs.splitters package
+=============================
+
+.. automodule:: galaxy.jobs.splitters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.jobs.splitters.basic module
+----------------------------------
+
+.. automodule:: galaxy.jobs.splitters.basic
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.jobs.splitters.multi module
+----------------------------------
+
+.. automodule:: galaxy.jobs.splitters.multi
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.managers.rst b/doc/source/lib/galaxy.managers.rst
new file mode 100644
index 0000000..5fab924
--- /dev/null
+++ b/doc/source/lib/galaxy.managers.rst
@@ -0,0 +1,228 @@
+galaxy.managers package
+=======================
+
+.. automodule:: galaxy.managers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.managers.annotatable module
+----------------------------------
+
+.. automodule:: galaxy.managers.annotatable
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.api_keys module
+-------------------------------
+
+.. automodule:: galaxy.managers.api_keys
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.base module
+---------------------------
+
+.. automodule:: galaxy.managers.base
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.citations module
+--------------------------------
+
+.. automodule:: galaxy.managers.citations
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.collections module
+----------------------------------
+
+.. automodule:: galaxy.managers.collections
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.collections_util module
+---------------------------------------
+
+.. automodule:: galaxy.managers.collections_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.configuration module
+------------------------------------
+
+.. automodule:: galaxy.managers.configuration
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.containers module
+---------------------------------
+
+.. automodule:: galaxy.managers.containers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.context module
+------------------------------
+
+.. automodule:: galaxy.managers.context
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.datasets module
+-------------------------------
+
+.. automodule:: galaxy.managers.datasets
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.deletable module
+--------------------------------
+
+.. automodule:: galaxy.managers.deletable
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.folders module
+------------------------------
+
+.. automodule:: galaxy.managers.folders
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.hdas module
+---------------------------
+
+.. automodule:: galaxy.managers.hdas
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.histories module
+--------------------------------
+
+.. automodule:: galaxy.managers.histories
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.lddas module
+----------------------------
+
+.. automodule:: galaxy.managers.lddas
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.libraries module
+--------------------------------
+
+.. automodule:: galaxy.managers.libraries
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.pages module
+----------------------------
+
+.. automodule:: galaxy.managers.pages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.ratable module
+------------------------------
+
+.. automodule:: galaxy.managers.ratable
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.rbac_secured module
+-----------------------------------
+
+.. automodule:: galaxy.managers.rbac_secured
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.roles module
+----------------------------
+
+.. automodule:: galaxy.managers.roles
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.secured module
+------------------------------
+
+.. automodule:: galaxy.managers.secured
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.sharable module
+-------------------------------
+
+.. automodule:: galaxy.managers.sharable
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.taggable module
+-------------------------------
+
+.. automodule:: galaxy.managers.taggable
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.tags module
+---------------------------
+
+.. automodule:: galaxy.managers.tags
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.users module
+----------------------------
+
+.. automodule:: galaxy.managers.users
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.visualizations module
+-------------------------------------
+
+.. automodule:: galaxy.managers.visualizations
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.managers.workflows module
+--------------------------------
+
+.. automodule:: galaxy.managers.workflows
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.model.migrate.rst b/doc/source/lib/galaxy.model.migrate.rst
new file mode 100644
index 0000000..60857c8
--- /dev/null
+++ b/doc/source/lib/galaxy.model.migrate.rst
@@ -0,0 +1,20 @@
+galaxy.model.migrate package
+============================
+
+.. automodule:: galaxy.model.migrate
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.model.migrate.check module
+---------------------------------
+
+.. automodule:: galaxy.model.migrate.check
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.model.orm.rst b/doc/source/lib/galaxy.model.orm.rst
new file mode 100644
index 0000000..b689686
--- /dev/null
+++ b/doc/source/lib/galaxy.model.orm.rst
@@ -0,0 +1,44 @@
+galaxy.model.orm package
+========================
+
+.. automodule:: galaxy.model.orm
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.model.orm.engine_factory module
+--------------------------------------
+
+.. automodule:: galaxy.model.orm.engine_factory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.orm.logging_connection_proxy module
+------------------------------------------------
+
+.. automodule:: galaxy.model.orm.logging_connection_proxy
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.orm.now module
+---------------------------
+
+.. automodule:: galaxy.model.orm.now
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.orm.scripts module
+-------------------------------
+
+.. automodule:: galaxy.model.orm.scripts
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.model.rst b/doc/source/lib/galaxy.model.rst
new file mode 100644
index 0000000..748a7ba
--- /dev/null
+++ b/doc/source/lib/galaxy.model.rst
@@ -0,0 +1,69 @@
+galaxy.model package
+====================
+
+.. automodule:: galaxy.model
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.model.migrate
+    galaxy.model.orm
+    galaxy.model.tool_shed_install
+
+Submodules
+----------
+
+galaxy.model.base module
+------------------------
+
+.. automodule:: galaxy.model.base
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.custom_types module
+--------------------------------
+
+.. automodule:: galaxy.model.custom_types
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.item_attrs module
+------------------------------
+
+.. automodule:: galaxy.model.item_attrs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.mapping module
+---------------------------
+
+.. automodule:: galaxy.model.mapping
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.search module
+--------------------------
+
+.. automodule:: galaxy.model.search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.model.util module
+------------------------
+
+.. automodule:: galaxy.model.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.model.tool_shed_install.migrate.rst b/doc/source/lib/galaxy.model.tool_shed_install.migrate.rst
new file mode 100644
index 0000000..56b44de
--- /dev/null
+++ b/doc/source/lib/galaxy.model.tool_shed_install.migrate.rst
@@ -0,0 +1,20 @@
+galaxy.model.tool_shed_install.migrate package
+==============================================
+
+.. automodule:: galaxy.model.tool_shed_install.migrate
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.model.tool_shed_install.migrate.check module
+---------------------------------------------------
+
+.. automodule:: galaxy.model.tool_shed_install.migrate.check
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.model.tool_shed_install.rst b/doc/source/lib/galaxy.model.tool_shed_install.rst
new file mode 100644
index 0000000..24e6b47
--- /dev/null
+++ b/doc/source/lib/galaxy.model.tool_shed_install.rst
@@ -0,0 +1,27 @@
+galaxy.model.tool_shed_install package
+======================================
+
+.. automodule:: galaxy.model.tool_shed_install
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.model.tool_shed_install.migrate
+
+Submodules
+----------
+
+galaxy.model.tool_shed_install.mapping module
+---------------------------------------------
+
+.. automodule:: galaxy.model.tool_shed_install.mapping
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.objectstore.rst b/doc/source/lib/galaxy.objectstore.rst
new file mode 100644
index 0000000..52a3442
--- /dev/null
+++ b/doc/source/lib/galaxy.objectstore.rst
@@ -0,0 +1,52 @@
+galaxy.objectstore package
+==========================
+
+.. automodule:: galaxy.objectstore
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.objectstore.pulsar module
+--------------------------------
+
+.. automodule:: galaxy.objectstore.pulsar
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.objectstore.rods module
+------------------------------
+
+.. automodule:: galaxy.objectstore.rods
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.objectstore.s3 module
+----------------------------
+
+.. automodule:: galaxy.objectstore.s3
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.objectstore.s3_multipart_upload module
+---------------------------------------------
+
+.. automodule:: galaxy.objectstore.s3_multipart_upload
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.objectstore.azure_blob module
+---------------------------------------------
+
+.. automodule:: galaxy.objectstore.azure_blob
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.openid.rst b/doc/source/lib/galaxy.openid.rst
new file mode 100644
index 0000000..ccd8057
--- /dev/null
+++ b/doc/source/lib/galaxy.openid.rst
@@ -0,0 +1,20 @@
+galaxy.openid package
+=====================
+
+.. automodule:: galaxy.openid
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.openid.providers module
+------------------------------
+
+.. automodule:: galaxy.openid.providers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.quota.rst b/doc/source/lib/galaxy.quota.rst
new file mode 100644
index 0000000..3680203
--- /dev/null
+++ b/doc/source/lib/galaxy.quota.rst
@@ -0,0 +1,8 @@
+galaxy.quota package
+====================
+
+.. automodule:: galaxy.quota
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.rst b/doc/source/lib/galaxy.rst
new file mode 100644
index 0000000..aa8c5fe
--- /dev/null
+++ b/doc/source/lib/galaxy.rst
@@ -0,0 +1,91 @@
+galaxy package
+==============
+
+.. automodule:: galaxy
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.actions
+    galaxy.auth
+    galaxy.dataset_collections
+    galaxy.datatypes
+    galaxy.dependencies
+    galaxy.eggs
+    galaxy.exceptions
+    galaxy.external_services
+    galaxy.forms
+    galaxy.jobs
+    galaxy.managers
+    galaxy.model
+    galaxy.objectstore
+    galaxy.openid
+    galaxy.quota
+    galaxy.sample_tracking
+    galaxy.security
+    galaxy.tags
+    galaxy.tools
+    galaxy.util
+    galaxy.visualization
+    galaxy.web
+    galaxy.webapps
+    galaxy.work
+    galaxy.workflow
+
+Submodules
+----------
+
+galaxy.app module
+-----------------
+
+.. automodule:: galaxy.app
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.config module
+--------------------
+
+.. automodule:: galaxy.config
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.main module
+------------------
+
+.. automodule:: galaxy.main
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.queue_worker module
+--------------------------
+
+.. automodule:: galaxy.queue_worker
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.queues module
+--------------------
+
+.. automodule:: galaxy.queues
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.version module
+---------------------
+
+.. automodule:: galaxy.version
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.sample_tracking.rst b/doc/source/lib/galaxy.sample_tracking.rst
new file mode 100644
index 0000000..dbef4b7
--- /dev/null
+++ b/doc/source/lib/galaxy.sample_tracking.rst
@@ -0,0 +1,44 @@
+galaxy.sample_tracking package
+==============================
+
+.. automodule:: galaxy.sample_tracking
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.sample_tracking.data_transfer module
+-------------------------------------------
+
+.. automodule:: galaxy.sample_tracking.data_transfer
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.sample_tracking.external_service_types module
+----------------------------------------------------
+
+.. automodule:: galaxy.sample_tracking.external_service_types
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.sample_tracking.request_types module
+-------------------------------------------
+
+.. automodule:: galaxy.sample_tracking.request_types
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.sample_tracking.sample module
+------------------------------------
+
+.. automodule:: galaxy.sample_tracking.sample
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.security.rst b/doc/source/lib/galaxy.security.rst
new file mode 100644
index 0000000..5f1ab5f
--- /dev/null
+++ b/doc/source/lib/galaxy.security.rst
@@ -0,0 +1,28 @@
+galaxy.security package
+=======================
+
+.. automodule:: galaxy.security
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.security.passwords module
+--------------------------------
+
+.. automodule:: galaxy.security.passwords
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.security.validate_user_input module
+------------------------------------------
+
+.. automodule:: galaxy.security.validate_user_input
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tags.rst b/doc/source/lib/galaxy.tags.rst
new file mode 100644
index 0000000..d61869f
--- /dev/null
+++ b/doc/source/lib/galaxy.tags.rst
@@ -0,0 +1,8 @@
+galaxy.tags package
+===================
+
+.. automodule:: galaxy.tags
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.tools.actions.rst b/doc/source/lib/galaxy.tools.actions.rst
new file mode 100644
index 0000000..1cf4863
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.actions.rst
@@ -0,0 +1,60 @@
+galaxy.tools.actions package
+============================
+
+.. automodule:: galaxy.tools.actions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.actions.data_manager module
+----------------------------------------
+
+.. automodule:: galaxy.tools.actions.data_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.actions.data_source module
+---------------------------------------
+
+.. automodule:: galaxy.tools.actions.data_source
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.actions.history_imp_exp module
+-------------------------------------------
+
+.. automodule:: galaxy.tools.actions.history_imp_exp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.actions.metadata module
+------------------------------------
+
+.. automodule:: galaxy.tools.actions.metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.actions.upload module
+----------------------------------
+
+.. automodule:: galaxy.tools.actions.upload
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.actions.upload_common module
+-----------------------------------------
+
+.. automodule:: galaxy.tools.actions.upload_common
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.data.rst b/doc/source/lib/galaxy.tools.data.rst
new file mode 100644
index 0000000..4435ba5
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.data.rst
@@ -0,0 +1,8 @@
+galaxy.tools.data package
+=========================
+
+.. automodule:: galaxy.tools.data
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.tools.data_manager.rst b/doc/source/lib/galaxy.tools.data_manager.rst
new file mode 100644
index 0000000..02c75fa
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.data_manager.rst
@@ -0,0 +1,20 @@
+galaxy.tools.data_manager package
+=================================
+
+.. automodule:: galaxy.tools.data_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.data_manager.manager module
+----------------------------------------
+
+.. automodule:: galaxy.tools.data_manager.manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.deps.resolvers.rst b/doc/source/lib/galaxy.tools.deps.resolvers.rst
new file mode 100644
index 0000000..0ff6842
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.deps.resolvers.rst
@@ -0,0 +1,68 @@
+galaxy.tools.deps.resolvers package
+===================================
+
+.. automodule:: galaxy.tools.deps.resolvers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.deps.resolvers.brewed_tool_shed_packages module
+------------------------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.brewed_tool_shed_packages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.resolvers.galaxy_packages module
+--------------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.galaxy_packages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.resolvers.homebrew module
+-------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.homebrew
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.resolvers.modules module
+------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.modules
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.resolvers.resolver_mixins module
+--------------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.resolver_mixins
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.resolvers.tool_shed_packages module
+-----------------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.tool_shed_packages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.resolvers.unlinked_tool_shed_packages module
+--------------------------------------------------------------
+
+.. automodule:: galaxy.tools.deps.resolvers.unlinked_tool_shed_packages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.deps.rst b/doc/source/lib/galaxy.tools.deps.rst
new file mode 100644
index 0000000..f64c423
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.deps.rst
@@ -0,0 +1,83 @@
+galaxy.tools.deps package
+=========================
+
+.. automodule:: galaxy.tools.deps
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.tools.deps.resolvers
+
+Submodules
+----------
+
+galaxy.tools.deps.brew_exts module
+----------------------------------
+
+.. automodule:: galaxy.tools.deps.brew_exts
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.brew_util module
+----------------------------------
+
+.. automodule:: galaxy.tools.deps.brew_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.commands module
+---------------------------------
+
+.. automodule:: galaxy.tools.deps.commands
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.containers module
+-----------------------------------
+
+.. automodule:: galaxy.tools.deps.containers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.dependencies module
+-------------------------------------
+
+.. automodule:: galaxy.tools.deps.dependencies
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.docker_util module
+------------------------------------
+
+.. automodule:: galaxy.tools.deps.docker_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.dockerfiles module
+------------------------------------
+
+.. automodule:: galaxy.tools.deps.dockerfiles
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.deps.requirements module
+-------------------------------------
+
+.. automodule:: galaxy.tools.deps.requirements
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.filters.rst b/doc/source/lib/galaxy.tools.filters.rst
new file mode 100644
index 0000000..bbea7ec
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.filters.rst
@@ -0,0 +1,8 @@
+galaxy.tools.filters package
+============================
+
+.. automodule:: galaxy.tools.filters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.tools.imp_exp.rst b/doc/source/lib/galaxy.tools.imp_exp.rst
new file mode 100644
index 0000000..e466f86
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.imp_exp.rst
@@ -0,0 +1,28 @@
+galaxy.tools.imp_exp package
+============================
+
+.. automodule:: galaxy.tools.imp_exp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.imp_exp.export_history module
+------------------------------------------
+
+.. automodule:: galaxy.tools.imp_exp.export_history
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.imp_exp.unpack_tar_gz_archive module
+-------------------------------------------------
+
+.. automodule:: galaxy.tools.imp_exp.unpack_tar_gz_archive
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.linters.rst b/doc/source/lib/galaxy.tools.linters.rst
new file mode 100644
index 0000000..3463dc9
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.linters.rst
@@ -0,0 +1,82 @@
+galaxy.tools.linters package
+============================
+
+.. automodule:: galaxy.tools.linters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.linters.citations module
+-------------------------------------
+
+.. automodule:: galaxy.tools.linters.citations
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.command module
+-----------------------------------
+
+.. automodule:: galaxy.tools.linters.command
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.general module
+-----------------------------------
+
+.. automodule:: galaxy.tools.linters.general
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.help module
+--------------------------------
+
+.. automodule:: galaxy.tools.linters.help
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.inputs module
+----------------------------------
+
+.. automodule:: galaxy.tools.linters.inputs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.outputs module
+-----------------------------------
+
+.. automodule:: galaxy.tools.linters.outputs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.stdio module
+---------------------------------
+
+.. automodule:: galaxy.tools.linters.stdio
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.tests module
+---------------------------------
+
+.. automodule:: galaxy.tools.linters.tests
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.linters.xml_order module
+-------------------------------------
+
+.. automodule:: galaxy.tools.linters.xml_order
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/galaxy.tools.parameters.rst b/doc/source/lib/galaxy.tools.parameters.rst
new file mode 100644
index 0000000..8bfd839
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.parameters.rst
@@ -0,0 +1,106 @@
+galaxy.tools.parameters package
+===============================
+
+.. automodule:: galaxy.tools.parameters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.parameters.basic module
+------------------------------------
+
+.. automodule:: galaxy.tools.parameters.basic
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.dataset_matcher module
+----------------------------------------------
+
+.. automodule:: galaxy.tools.parameters.dataset_matcher
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.dynamic_options module
+----------------------------------------------
+
+.. automodule:: galaxy.tools.parameters.dynamic_options
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.grouping module
+---------------------------------------
+
+.. automodule:: galaxy.tools.parameters.grouping
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.history_query module
+--------------------------------------------
+
+.. automodule:: galaxy.tools.parameters.history_query
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.input_translation module
+------------------------------------------------
+
+.. automodule:: galaxy.tools.parameters.input_translation
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.meta module
+-----------------------------------
+
+.. automodule:: galaxy.tools.parameters.meta
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.output_collect module
+---------------------------------------------
+
+.. automodule:: galaxy.tools.parameters.output_collect
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.sanitize module
+---------------------------------------
+
+.. automodule:: galaxy.tools.parameters.sanitize
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.validation module
+-----------------------------------------
+
+.. automodule:: galaxy.tools.parameters.validation
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.wrapped module
+--------------------------------------
+
+.. automodule:: galaxy.tools.parameters.wrapped
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parameters.wrapped_json module
+-------------------------------------------
+
+.. automodule:: galaxy.tools.parameters.wrapped_json
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/galaxy.tools.parser.rst b/doc/source/lib/galaxy.tools.parser.rst
new file mode 100644
index 0000000..ecdc2b1
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.parser.rst
@@ -0,0 +1,52 @@
+galaxy.tools.parser package
+===========================
+
+.. automodule:: galaxy.tools.parser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.parser.factory module
+----------------------------------
+
+.. automodule:: galaxy.tools.parser.factory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parser.interface module
+------------------------------------
+
+.. automodule:: galaxy.tools.parser.interface
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parser.util module
+-------------------------------
+
+.. automodule:: galaxy.tools.parser.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parser.xml module
+------------------------------
+
+.. automodule:: galaxy.tools.parser.xml
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.parser.yaml module
+-------------------------------
+
+.. automodule:: galaxy.tools.parser.yaml
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.rst b/doc/source/lib/galaxy.tools.rst
new file mode 100644
index 0000000..01add8d
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.rst
@@ -0,0 +1,102 @@
+galaxy.tools package
+====================
+
+.. automodule:: galaxy.tools
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.tools.actions
+    galaxy.tools.data
+    galaxy.tools.data_manager
+    galaxy.tools.deps
+    galaxy.tools.filters
+    galaxy.tools.imp_exp
+    galaxy.tools.linters
+    galaxy.tools.parameters
+    galaxy.tools.parser
+    galaxy.tools.search
+    galaxy.tools.toolbox
+    galaxy.tools.util
+
+Submodules
+----------
+
+galaxy.tools.errors module
+--------------------------
+
+.. automodule:: galaxy.tools.errors
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.evaluation module
+------------------------------
+
+.. automodule:: galaxy.tools.evaluation
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.exception_handling module
+--------------------------------------
+
+.. automodule:: galaxy.tools.exception_handling
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.execute module
+---------------------------
+
+.. automodule:: galaxy.tools.execute
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.lint module
+------------------------
+
+.. automodule:: galaxy.tools.lint
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.loader module
+--------------------------
+
+.. automodule:: galaxy.tools.loader
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.loader_directory module
+------------------------------------
+
+.. automodule:: galaxy.tools.loader_directory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.test module
+------------------------
+
+.. automodule:: galaxy.tools.test
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.wrappers module
+----------------------------
+
+.. automodule:: galaxy.tools.wrappers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.search.rst b/doc/source/lib/galaxy.tools.search.rst
new file mode 100644
index 0000000..edbb5bf
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.search.rst
@@ -0,0 +1,8 @@
+galaxy.tools.search package
+===========================
+
+.. automodule:: galaxy.tools.search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.tools.toolbox.filters.rst b/doc/source/lib/galaxy.tools.toolbox.filters.rst
new file mode 100644
index 0000000..a5a81d2
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.toolbox.filters.rst
@@ -0,0 +1,8 @@
+galaxy.tools.toolbox.filters package
+====================================
+
+.. automodule:: galaxy.tools.toolbox.filters
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.tools.toolbox.lineages.rst b/doc/source/lib/galaxy.tools.toolbox.lineages.rst
new file mode 100644
index 0000000..3159c8d
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.toolbox.lineages.rst
@@ -0,0 +1,44 @@
+galaxy.tools.toolbox.lineages package
+=====================================
+
+.. automodule:: galaxy.tools.toolbox.lineages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.tools.toolbox.lineages.factory module
+--------------------------------------------
+
+.. automodule:: galaxy.tools.toolbox.lineages.factory
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.lineages.interface module
+----------------------------------------------
+
+.. automodule:: galaxy.tools.toolbox.lineages.interface
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.lineages.stock module
+------------------------------------------
+
+.. automodule:: galaxy.tools.toolbox.lineages.stock
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.lineages.tool_shed module
+----------------------------------------------
+
+.. automodule:: galaxy.tools.toolbox.lineages.tool_shed
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.toolbox.rst b/doc/source/lib/galaxy.tools.toolbox.rst
new file mode 100644
index 0000000..7528cb7
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.toolbox.rst
@@ -0,0 +1,60 @@
+galaxy.tools.toolbox package
+============================
+
+.. automodule:: galaxy.tools.toolbox
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.tools.toolbox.filters
+    galaxy.tools.toolbox.lineages
+
+Submodules
+----------
+
+galaxy.tools.toolbox.base module
+--------------------------------
+
+.. automodule:: galaxy.tools.toolbox.base
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.integrated_panel module
+--------------------------------------------
+
+.. automodule:: galaxy.tools.toolbox.integrated_panel
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.panel module
+---------------------------------
+
+.. automodule:: galaxy.tools.toolbox.panel
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.tags module
+--------------------------------
+
+.. automodule:: galaxy.tools.toolbox.tags
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.tools.toolbox.watcher module
+-----------------------------------
+
+.. automodule:: galaxy.tools.toolbox.watcher
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.tools.util.galaxyops.rst b/doc/source/lib/galaxy.tools.util.galaxyops.rst
new file mode 100644
index 0000000..4ea949a
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.util.galaxyops.rst
@@ -0,0 +1,8 @@
+galaxy.tools.util.galaxyops package
+===================================
+
+.. automodule:: galaxy.tools.util.galaxyops
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.tools.util.rst b/doc/source/lib/galaxy.tools.util.rst
new file mode 100644
index 0000000..f7b6b6f
--- /dev/null
+++ b/doc/source/lib/galaxy.tools.util.rst
@@ -0,0 +1,27 @@
+galaxy.tools.util package
+=========================
+
+.. automodule:: galaxy.tools.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.tools.util.galaxyops
+
+Submodules
+----------
+
+galaxy.tools.util.maf_utilities module
+--------------------------------------
+
+.. automodule:: galaxy.tools.util.maf_utilities
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.util.backports.rst b/doc/source/lib/galaxy.util.backports.rst
new file mode 100644
index 0000000..ca3788e
--- /dev/null
+++ b/doc/source/lib/galaxy.util.backports.rst
@@ -0,0 +1,7 @@
+galaxy.util.backports package
+=============================
+
+.. automodule:: galaxy.util.backports
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/galaxy.util.log.rst b/doc/source/lib/galaxy.util.log.rst
new file mode 100644
index 0000000..0f81bc0
--- /dev/null
+++ b/doc/source/lib/galaxy.util.log.rst
@@ -0,0 +1,20 @@
+galaxy.util.log package
+=======================
+
+.. automodule:: galaxy.util.log
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.util.log.fluent_log module
+---------------------------------
+
+.. automodule:: galaxy.util.log.fluent_log
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.util.pastescript.rst b/doc/source/lib/galaxy.util.pastescript.rst
new file mode 100644
index 0000000..0a861eb
--- /dev/null
+++ b/doc/source/lib/galaxy.util.pastescript.rst
@@ -0,0 +1,28 @@
+galaxy.util.pastescript package
+===============================
+
+.. automodule:: galaxy.util.pastescript
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.util.pastescript.loadwsgi module
+---------------------------------------
+
+.. automodule:: galaxy.util.pastescript.loadwsgi
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.pastescript.serve module
+------------------------------------
+
+.. automodule:: galaxy.util.pastescript.serve
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.util.rst b/doc/source/lib/galaxy.util.rst
new file mode 100644
index 0000000..b059a02
--- /dev/null
+++ b/doc/source/lib/galaxy.util.rst
@@ -0,0 +1,309 @@
+galaxy.util package
+===================
+
+.. automodule:: galaxy.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.util.backports
+    galaxy.util.log
+    galaxy.util.pastescript
+
+Submodules
+----------
+
+galaxy.util.aliaspickler module
+-------------------------------
+
+.. automodule:: galaxy.util.aliaspickler
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.biostar module
+--------------------------
+
+.. automodule:: galaxy.util.biostar
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.bunch module
+------------------------
+
+.. automodule:: galaxy.util.bunch
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.checkers module
+---------------------------
+
+.. automodule:: galaxy.util.checkers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.dbkeys module
+-------------------------
+
+.. automodule:: galaxy.util.dbkeys
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.dictifiable module
+------------------------------
+
+.. automodule:: galaxy.util.dictifiable
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.expressions module
+------------------------------
+
+.. automodule:: galaxy.util.expressions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.filelock module
+---------------------------
+
+.. automodule:: galaxy.util.filelock
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.hash_util module
+----------------------------
+
+.. automodule:: galaxy.util.hash_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.heartbeat module
+----------------------------
+
+.. automodule:: galaxy.util.heartbeat
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.image_util module
+-----------------------------
+
+.. automodule:: galaxy.util.image_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.inflection module
+-----------------------------
+
+.. automodule:: galaxy.util.inflection
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.json module
+-----------------------
+
+.. automodule:: galaxy.util.json
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.jstree module
+-------------------------
+
+.. automodule:: galaxy.util.jstree
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.lazy_process module
+-------------------------------
+
+.. automodule:: galaxy.util.lazy_process
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.multi_byte module
+-----------------------------
+
+.. automodule:: galaxy.util.multi_byte
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.none_like module
+----------------------------
+
+.. automodule:: galaxy.util.none_like
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.object_wrapper module
+---------------------------------
+
+.. automodule:: galaxy.util.object_wrapper
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.odict module
+------------------------
+
+.. automodule:: galaxy.util.odict
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.permutations module
+-------------------------------
+
+.. automodule:: galaxy.util.permutations
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.plugin_config module
+--------------------------------
+
+.. automodule:: galaxy.util.plugin_config
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.postfork module
+---------------------------
+
+.. automodule:: galaxy.util.postfork
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.properties module
+-----------------------------
+
+.. automodule:: galaxy.util.properties
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.sanitize_html module
+--------------------------------
+
+.. automodule:: galaxy.util.sanitize_html
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.simplegraph module
+------------------------------
+
+.. automodule:: galaxy.util.simplegraph
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.sleeper module
+--------------------------
+
+.. automodule:: galaxy.util.sleeper
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.sockets module
+--------------------------
+
+.. automodule:: galaxy.util.sockets
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.specs module
+------------------------
+
+.. automodule:: galaxy.util.specs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.sqlite module
+-------------------------
+
+.. automodule:: galaxy.util.sqlite
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.streamball module
+-----------------------------
+
+.. automodule:: galaxy.util.streamball
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.submodules module
+-----------------------------
+
+.. automodule:: galaxy.util.submodules
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.template module
+---------------------------
+
+.. automodule:: galaxy.util.template
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.topsort module
+--------------------------
+
+.. automodule:: galaxy.util.topsort
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.ucsc module
+-----------------------
+
+.. automodule:: galaxy.util.ucsc
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.validation module
+-----------------------------
+
+.. automodule:: galaxy.util.validation
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.util.xml_macros module
+-----------------------------
+
+.. automodule:: galaxy.util.xml_macros
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.visualization.data_providers.phyloviz.rst b/doc/source/lib/galaxy.visualization.data_providers.phyloviz.rst
new file mode 100644
index 0000000..26b7cb9
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.data_providers.phyloviz.rst
@@ -0,0 +1,44 @@
+galaxy.visualization.data_providers.phyloviz package
+====================================================
+
+.. automodule:: galaxy.visualization.data_providers.phyloviz
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.visualization.data_providers.phyloviz.baseparser module
+--------------------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.phyloviz.baseparser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.data_providers.phyloviz.newickparser module
+----------------------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.phyloviz.newickparser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.data_providers.phyloviz.nexusparser module
+---------------------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.phyloviz.nexusparser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.data_providers.phyloviz.phyloxmlparser module
+------------------------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.phyloviz.phyloxmlparser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.visualization.data_providers.rst b/doc/source/lib/galaxy.visualization.data_providers.rst
new file mode 100644
index 0000000..3180ac7
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.data_providers.rst
@@ -0,0 +1,51 @@
+galaxy.visualization.data_providers package
+===========================================
+
+.. automodule:: galaxy.visualization.data_providers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.visualization.data_providers.phyloviz
+
+Submodules
+----------
+
+galaxy.visualization.data_providers.basic module
+------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.basic
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.data_providers.cigar module
+------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.cigar
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.data_providers.genome module
+-------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.genome
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.data_providers.registry module
+---------------------------------------------------
+
+.. automodule:: galaxy.visualization.data_providers.registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.visualization.genome.rst b/doc/source/lib/galaxy.visualization.genome.rst
new file mode 100644
index 0000000..9097529
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.genome.rst
@@ -0,0 +1,8 @@
+galaxy.visualization.genome package
+===================================
+
+.. automodule:: galaxy.visualization.genome
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.visualization.plugins.rst b/doc/source/lib/galaxy.visualization.plugins.rst
new file mode 100644
index 0000000..b8ded7d
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.plugins.rst
@@ -0,0 +1,52 @@
+galaxy.visualization.plugins package
+====================================
+
+.. automodule:: galaxy.visualization.plugins
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.visualization.plugins.config_parser module
+-------------------------------------------------
+
+.. automodule:: galaxy.visualization.plugins.config_parser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.plugins.plugin module
+------------------------------------------
+
+.. automodule:: galaxy.visualization.plugins.plugin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.plugins.registry module
+--------------------------------------------
+
+.. automodule:: galaxy.visualization.plugins.registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.plugins.resource_parser module
+---------------------------------------------------
+
+.. automodule:: galaxy.visualization.plugins.resource_parser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.visualization.plugins.utils module
+-----------------------------------------
+
+.. automodule:: galaxy.visualization.plugins.utils
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.visualization.rst b/doc/source/lib/galaxy.visualization.rst
new file mode 100644
index 0000000..1f33449
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.rst
@@ -0,0 +1,30 @@
+galaxy.visualization package
+============================
+
+.. automodule:: galaxy.visualization
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.visualization.data_providers
+    galaxy.visualization.genome
+    galaxy.visualization.plugins
+    galaxy.visualization.tracks
+
+Submodules
+----------
+
+galaxy.visualization.genomes module
+-----------------------------------
+
+.. automodule:: galaxy.visualization.genomes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.visualization.tracks.rst b/doc/source/lib/galaxy.visualization.tracks.rst
new file mode 100644
index 0000000..2d27939
--- /dev/null
+++ b/doc/source/lib/galaxy.visualization.tracks.rst
@@ -0,0 +1,8 @@
+galaxy.visualization.tracks package
+===================================
+
+.. automodule:: galaxy.visualization.tracks
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.web.base.controllers.rst b/doc/source/lib/galaxy.web.base.controllers.rst
new file mode 100644
index 0000000..6c153a1
--- /dev/null
+++ b/doc/source/lib/galaxy.web.base.controllers.rst
@@ -0,0 +1,20 @@
+galaxy.web.base.controllers package
+===================================
+
+.. automodule:: galaxy.web.base.controllers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.web.base.controllers.admin module
+----------------------------------------
+
+.. automodule:: galaxy.web.base.controllers.admin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.base.rst b/doc/source/lib/galaxy.web.base.rst
new file mode 100644
index 0000000..3ac9072
--- /dev/null
+++ b/doc/source/lib/galaxy.web.base.rst
@@ -0,0 +1,43 @@
+galaxy.web.base package
+=======================
+
+.. automodule:: galaxy.web.base
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.web.base.controllers
+
+Submodules
+----------
+
+galaxy.web.base.controller module
+---------------------------------
+
+.. automodule:: galaxy.web.base.controller
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.base.interactive_environments module
+-----------------------------------------------
+
+.. automodule:: galaxy.web.base.interactive_environments
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.base.pluginframework module
+--------------------------------------
+
+.. automodule:: galaxy.web.base.pluginframework
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.framework.helpers.rst b/doc/source/lib/galaxy.web.framework.helpers.rst
new file mode 100644
index 0000000..d191693
--- /dev/null
+++ b/doc/source/lib/galaxy.web.framework.helpers.rst
@@ -0,0 +1,20 @@
+galaxy.web.framework.helpers package
+====================================
+
+.. automodule:: galaxy.web.framework.helpers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.web.framework.helpers.grids module
+-----------------------------------------
+
+.. automodule:: galaxy.web.framework.helpers.grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.framework.middleware.rst b/doc/source/lib/galaxy.web.framework.middleware.rst
new file mode 100644
index 0000000..20724c7
--- /dev/null
+++ b/doc/source/lib/galaxy.web.framework.middleware.rst
@@ -0,0 +1,84 @@
+galaxy.web.framework.middleware package
+=======================================
+
+.. automodule:: galaxy.web.framework.middleware
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.web.framework.middleware.error module
+--------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.error
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.profile module
+----------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.profile
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.remoteuser module
+-------------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.remoteuser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.request_id module
+-------------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.request_id
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.sentry module
+---------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.sentry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.static module
+---------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.static
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.statsd module
+---------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.statsd
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.translogger module
+--------------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.translogger
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.middleware.xforwardedhost module
+-----------------------------------------------------
+
+.. automodule:: galaxy.web.framework.middleware.xforwardedhost
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.framework.rst b/doc/source/lib/galaxy.web.framework.rst
new file mode 100644
index 0000000..f6357d0
--- /dev/null
+++ b/doc/source/lib/galaxy.web.framework.rst
@@ -0,0 +1,60 @@
+galaxy.web.framework package
+============================
+
+.. automodule:: galaxy.web.framework
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.web.framework.helpers
+    galaxy.web.framework.middleware
+
+Submodules
+----------
+
+galaxy.web.framework.base module
+--------------------------------
+
+.. automodule:: galaxy.web.framework.base
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.decorators module
+--------------------------------------
+
+.. automodule:: galaxy.web.framework.decorators
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.formbuilder module
+---------------------------------------
+
+.. automodule:: galaxy.web.framework.formbuilder
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.openid_manager module
+------------------------------------------
+
+.. automodule:: galaxy.web.framework.openid_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.framework.webapp module
+----------------------------------
+
+.. automodule:: galaxy.web.framework.webapp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.proxy.rst b/doc/source/lib/galaxy.web.proxy.rst
new file mode 100644
index 0000000..53c7c4b
--- /dev/null
+++ b/doc/source/lib/galaxy.web.proxy.rst
@@ -0,0 +1,20 @@
+galaxy.web.proxy package
+========================
+
+.. automodule:: galaxy.web.proxy
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.web.proxy.filelock module
+--------------------------------
+
+.. automodule:: galaxy.web.proxy.filelock
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.rst b/doc/source/lib/galaxy.web.rst
new file mode 100644
index 0000000..762db65
--- /dev/null
+++ b/doc/source/lib/galaxy.web.rst
@@ -0,0 +1,54 @@
+galaxy.web package
+==================
+
+.. automodule:: galaxy.web
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.web.base
+    galaxy.web.framework
+    galaxy.web.proxy
+    galaxy.web.security
+
+Submodules
+----------
+
+galaxy.web.buildapp module
+--------------------------
+
+.. automodule:: galaxy.web.buildapp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.form_builder module
+------------------------------
+
+.. automodule:: galaxy.web.form_builder
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.formatting module
+----------------------------
+
+.. automodule:: galaxy.web.formatting
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.web.params module
+------------------------
+
+.. automodule:: galaxy.web.params
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.web.security.rst b/doc/source/lib/galaxy.web.security.rst
new file mode 100644
index 0000000..276b543
--- /dev/null
+++ b/doc/source/lib/galaxy.web.security.rst
@@ -0,0 +1,8 @@
+galaxy.web.security package
+===========================
+
+.. automodule:: galaxy.web.security
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.webapps.galaxy.api.rst b/doc/source/lib/galaxy.webapps.galaxy.api.rst
new file mode 100644
index 0000000..657035e
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.galaxy.api.rst
@@ -0,0 +1,324 @@
+galaxy.webapps.galaxy.api package
+=================================
+
+.. automodule:: galaxy.webapps.galaxy.api
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.galaxy.api.annotations module
+--------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.annotations
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.authenticate module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.authenticate
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.configuration module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.configuration
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.dataset_collections module
+----------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.dataset_collections
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.datasets module
+-----------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.datasets
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.datatypes module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.datatypes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.extended_metadata module
+--------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.extended_metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.folder_contents module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.folder_contents
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.folders module
+----------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.folders
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.forms module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.forms
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.genomes module
+----------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.genomes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.group_roles module
+--------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.group_roles
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.group_users module
+--------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.group_users
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.groups module
+---------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.groups
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.histories module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.histories
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.history_contents module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.history_contents
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.item_tags module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.item_tags
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.job_files module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.job_files
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.jobs module
+-------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.jobs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.lda_datasets module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.lda_datasets
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.libraries module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.libraries
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.library_contents module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.library_contents
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.metrics module
+----------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.metrics
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.page_revisions module
+-----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.page_revisions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.pages module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.pages
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.provenance module
+-------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.provenance
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.quotas module
+---------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.quotas
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.remote_files module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.remote_files
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.request_types module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.request_types
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.requests module
+-----------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.requests
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.roles module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.roles
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.samples module
+----------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.samples
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.search module
+---------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.tool_data module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.tool_data
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.tool_shed_repositories module
+-------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.tool_shed_repositories
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.tools module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.tools
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.users module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.users
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.visualizations module
+-----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.visualizations
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.api.workflows module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.workflows
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.galaxy.controllers.rst b/doc/source/lib/galaxy.webapps.galaxy.controllers.rst
new file mode 100644
index 0000000..fa807bd
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.galaxy.controllers.rst
@@ -0,0 +1,236 @@
+galaxy.webapps.galaxy.controllers package
+=========================================
+
+.. automodule:: galaxy.webapps.galaxy.controllers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.galaxy.controllers.admin module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.admin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.admin_toolshed module
+-------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.admin_toolshed
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.async module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.async
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.biostar module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.biostar
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.data_manager module
+-----------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.data_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.dataset module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.dataset
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.error module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.error
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.external_service module
+---------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.external_service
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.external_services module
+----------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.external_services
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.forms module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.forms
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.history module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.history
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.library module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.library
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.library_admin module
+------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.library_admin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.library_common module
+-------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.library_common
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.mobile module
+-----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.mobile
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.page module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.page
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.request_type module
+-----------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.request_type
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.requests module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.requests
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.requests_admin module
+-------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.requests_admin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.requests_common module
+--------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.requests_common
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.root module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.root
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.search module
+-----------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.tag module
+--------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.tag
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.tool_runner module
+----------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.tool_runner
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.user module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.user
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.userskeys module
+--------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.userskeys
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.visualization module
+------------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.visualization
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.galaxy.controllers.workflow module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.controllers.workflow
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.galaxy.rst b/doc/source/lib/galaxy.webapps.galaxy.rst
new file mode 100644
index 0000000..5256155
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.galaxy.rst
@@ -0,0 +1,28 @@
+galaxy.webapps.galaxy package
+=============================
+
+.. automodule:: galaxy.webapps.galaxy
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.webapps.galaxy.api
+    galaxy.webapps.galaxy.controllers
+
+Submodules
+----------
+
+galaxy.webapps.galaxy.buildapp module
+-------------------------------------
+
+.. automodule:: galaxy.webapps.galaxy.buildapp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.reports.controllers.rst b/doc/source/lib/galaxy.webapps.reports.controllers.rst
new file mode 100644
index 0000000..e5aee39
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.reports.controllers.rst
@@ -0,0 +1,76 @@
+galaxy.webapps.reports.controllers package
+==========================================
+
+.. automodule:: galaxy.webapps.reports.controllers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.reports.controllers.home module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.home
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.jobs module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.jobs
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.query module
+-----------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.query
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.root module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.root
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.sample_tracking module
+---------------------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.sample_tracking
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.system module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.system
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.users module
+-----------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.users
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.controllers.workflows module
+---------------------------------------------------
+
+.. automodule:: galaxy.webapps.reports.controllers.workflows
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.reports.rst b/doc/source/lib/galaxy.webapps.reports.rst
new file mode 100644
index 0000000..8e9fff6
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.reports.rst
@@ -0,0 +1,43 @@
+galaxy.webapps.reports package
+==============================
+
+.. automodule:: galaxy.webapps.reports
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.webapps.reports.controllers
+
+Submodules
+----------
+
+galaxy.webapps.reports.app module
+---------------------------------
+
+.. automodule:: galaxy.webapps.reports.app
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.buildapp module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.reports.buildapp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.reports.config module
+------------------------------------
+
+.. automodule:: galaxy.webapps.reports.config
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.rst b/doc/source/lib/galaxy.webapps.rst
new file mode 100644
index 0000000..ddb0cd6
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.rst
@@ -0,0 +1,29 @@
+galaxy.webapps package
+======================
+
+.. automodule:: galaxy.webapps
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.webapps.galaxy
+    galaxy.webapps.reports
+    galaxy.webapps.tool_shed
+
+Submodules
+----------
+
+galaxy.webapps.util module
+--------------------------
+
+.. automodule:: galaxy.webapps.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.api.rst b/doc/source/lib/galaxy.webapps.tool_shed.api.rst
new file mode 100644
index 0000000..16a5464
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.api.rst
@@ -0,0 +1,76 @@
+galaxy.webapps.tool_shed.api package
+====================================
+
+.. automodule:: galaxy.webapps.tool_shed.api
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.api.authenticate module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.authenticate
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.categories module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.categories
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.configuration module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.configuration
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.groups module
+------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.groups
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.repositories module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.repositories
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.repository_revisions module
+--------------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.repository_revisions
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.tools module
+-----------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.tools
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.api.users module
+-----------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.api.users
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.controllers.rst b/doc/source/lib/galaxy.webapps.tool_shed.controllers.rst
new file mode 100644
index 0000000..6e9e791
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.controllers.rst
@@ -0,0 +1,68 @@
+galaxy.webapps.tool_shed.controllers package
+============================================
+
+.. automodule:: galaxy.webapps.tool_shed.controllers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.controllers.admin module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.admin
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.controllers.groups module
+--------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.groups
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.controllers.hg module
+----------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.hg
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.controllers.repository module
+------------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.repository
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.controllers.repository_review module
+-------------------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.repository_review
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.controllers.upload module
+--------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.upload
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.controllers.user module
+------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.controllers.user
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.framework.middleware.rst b/doc/source/lib/galaxy.webapps.tool_shed.framework.middleware.rst
new file mode 100644
index 0000000..361baef
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.framework.middleware.rst
@@ -0,0 +1,28 @@
+galaxy.webapps.tool_shed.framework.middleware package
+=====================================================
+
+.. automodule:: galaxy.webapps.tool_shed.framework.middleware
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.framework.middleware.hg module
+-------------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.framework.middleware.hg
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.framework.middleware.remoteuser module
+---------------------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.framework.middleware.remoteuser
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.framework.rst b/doc/source/lib/galaxy.webapps.tool_shed.framework.rst
new file mode 100644
index 0000000..0276dcb
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.framework.rst
@@ -0,0 +1,15 @@
+galaxy.webapps.tool_shed.framework package
+==========================================
+
+.. automodule:: galaxy.webapps.tool_shed.framework
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.webapps.tool_shed.framework.middleware
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.model.migrate.rst b/doc/source/lib/galaxy.webapps.tool_shed.model.migrate.rst
new file mode 100644
index 0000000..9fd4bd5
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.model.migrate.rst
@@ -0,0 +1,20 @@
+galaxy.webapps.tool_shed.model.migrate package
+==============================================
+
+.. automodule:: galaxy.webapps.tool_shed.model.migrate
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.model.migrate.check module
+---------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.model.migrate.check
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.model.rst b/doc/source/lib/galaxy.webapps.tool_shed.model.rst
new file mode 100644
index 0000000..bc60bd7
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.model.rst
@@ -0,0 +1,27 @@
+galaxy.webapps.tool_shed.model package
+======================================
+
+.. automodule:: galaxy.webapps.tool_shed.model
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.webapps.tool_shed.model.migrate
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.model.mapping module
+---------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.model.mapping
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.rst b/doc/source/lib/galaxy.webapps.tool_shed.rst
new file mode 100644
index 0000000..836aa57
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.rst
@@ -0,0 +1,49 @@
+galaxy.webapps.tool_shed package
+================================
+
+.. automodule:: galaxy.webapps.tool_shed
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.webapps.tool_shed.api
+    galaxy.webapps.tool_shed.controllers
+    galaxy.webapps.tool_shed.framework
+    galaxy.webapps.tool_shed.model
+    galaxy.webapps.tool_shed.search
+    galaxy.webapps.tool_shed.security
+    galaxy.webapps.tool_shed.util
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.app module
+-----------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.app
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.buildapp module
+----------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.buildapp
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.config module
+--------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.config
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.search.rst b/doc/source/lib/galaxy.webapps.tool_shed.search.rst
new file mode 100644
index 0000000..9daeff1
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.search.rst
@@ -0,0 +1,28 @@
+galaxy.webapps.tool_shed.search package
+=======================================
+
+.. automodule:: galaxy.webapps.tool_shed.search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.search.repo_search module
+--------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.search.repo_search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.search.tool_search module
+--------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.search.tool_search
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.security.rst b/doc/source/lib/galaxy.webapps.tool_shed.security.rst
new file mode 100644
index 0000000..4e39d99
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.security.rst
@@ -0,0 +1,8 @@
+galaxy.webapps.tool_shed.security package
+=========================================
+
+.. automodule:: galaxy.webapps.tool_shed.security
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/galaxy.webapps.tool_shed.util.rst b/doc/source/lib/galaxy.webapps.tool_shed.util.rst
new file mode 100644
index 0000000..6ee1d6c
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.tool_shed.util.rst
@@ -0,0 +1,36 @@
+galaxy.webapps.tool_shed.util package
+=====================================
+
+.. automodule:: galaxy.webapps.tool_shed.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.webapps.tool_shed.util.hgweb_config module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.hgweb_config
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.util.ratings_util module
+-------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.ratings_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.webapps.tool_shed.util.shed_statistics module
+----------------------------------------------------
+
+.. automodule:: galaxy.webapps.tool_shed.util.shed_statistics
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.work.rst b/doc/source/lib/galaxy.work.rst
new file mode 100644
index 0000000..415a5b8
--- /dev/null
+++ b/doc/source/lib/galaxy.work.rst
@@ -0,0 +1,20 @@
+galaxy.work package
+===================
+
+.. automodule:: galaxy.work
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.work.context module
+--------------------------
+
+.. automodule:: galaxy.work.context
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.workflow.rst b/doc/source/lib/galaxy.workflow.rst
new file mode 100644
index 0000000..8d0a2c4
--- /dev/null
+++ b/doc/source/lib/galaxy.workflow.rst
@@ -0,0 +1,75 @@
+galaxy.workflow package
+=======================
+
+.. automodule:: galaxy.workflow
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy.workflow.schedulers
+
+Submodules
+----------
+
+galaxy.workflow.extract module
+------------------------------
+
+.. automodule:: galaxy.workflow.extract
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.workflow.modules module
+------------------------------
+
+.. automodule:: galaxy.workflow.modules
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.workflow.render module
+-----------------------------
+
+.. automodule:: galaxy.workflow.render
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.workflow.run module
+--------------------------
+
+.. automodule:: galaxy.workflow.run
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.workflow.run_request module
+----------------------------------
+
+.. automodule:: galaxy.workflow.run_request
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.workflow.scheduling_manager module
+-----------------------------------------
+
+.. automodule:: galaxy.workflow.scheduling_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy.workflow.steps module
+----------------------------
+
+.. automodule:: galaxy.workflow.steps
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy.workflow.schedulers.rst b/doc/source/lib/galaxy.workflow.schedulers.rst
new file mode 100644
index 0000000..4380119
--- /dev/null
+++ b/doc/source/lib/galaxy.workflow.schedulers.rst
@@ -0,0 +1,20 @@
+galaxy.workflow.schedulers package
+==================================
+
+.. automodule:: galaxy.workflow.schedulers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy.workflow.schedulers.core module
+--------------------------------------
+
+.. automodule:: galaxy.workflow.schedulers.core
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy_ext.metadata.rst b/doc/source/lib/galaxy_ext.metadata.rst
new file mode 100644
index 0000000..489f529
--- /dev/null
+++ b/doc/source/lib/galaxy_ext.metadata.rst
@@ -0,0 +1,20 @@
+galaxy_ext.metadata package
+===========================
+
+.. automodule:: galaxy_ext.metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy_ext.metadata.set_metadata module
+---------------------------------------
+
+.. automodule:: galaxy_ext.metadata.set_metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/galaxy_ext.rst b/doc/source/lib/galaxy_ext.rst
new file mode 100644
index 0000000..040ba0b
--- /dev/null
+++ b/doc/source/lib/galaxy_ext.rst
@@ -0,0 +1,15 @@
+galaxy_ext package
+==================
+
+.. automodule:: galaxy_ext
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy_ext.metadata
+
diff --git a/doc/source/lib/galaxy_utils.rst b/doc/source/lib/galaxy_utils.rst
new file mode 100644
index 0000000..c17a719
--- /dev/null
+++ b/doc/source/lib/galaxy_utils.rst
@@ -0,0 +1,15 @@
+galaxy_utils package
+====================
+
+.. automodule:: galaxy_utils
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    galaxy_utils.sequence
+
diff --git a/doc/source/lib/galaxy_utils.sequence.rst b/doc/source/lib/galaxy_utils.sequence.rst
new file mode 100644
index 0000000..f047ab6
--- /dev/null
+++ b/doc/source/lib/galaxy_utils.sequence.rst
@@ -0,0 +1,52 @@
+galaxy_utils.sequence package
+=============================
+
+.. automodule:: galaxy_utils.sequence
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+galaxy_utils.sequence.fasta module
+----------------------------------
+
+.. automodule:: galaxy_utils.sequence.fasta
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy_utils.sequence.fastq module
+----------------------------------
+
+.. automodule:: galaxy_utils.sequence.fastq
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy_utils.sequence.sequence module
+-------------------------------------
+
+.. automodule:: galaxy_utils.sequence.sequence
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy_utils.sequence.transform module
+--------------------------------------
+
+.. automodule:: galaxy_utils.sequence.transform
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+galaxy_utils.sequence.vcf module
+--------------------------------
+
+.. automodule:: galaxy_utils.sequence.vcf
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/log_tempfile.rst b/doc/source/lib/log_tempfile.rst
new file mode 100644
index 0000000..51b9049
--- /dev/null
+++ b/doc/source/lib/log_tempfile.rst
@@ -0,0 +1,7 @@
+log_tempfile module
+===================
+
+.. automodule:: log_tempfile
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/mimeparse.rst b/doc/source/lib/mimeparse.rst
new file mode 100644
index 0000000..afcdfa0
--- /dev/null
+++ b/doc/source/lib/mimeparse.rst
@@ -0,0 +1,7 @@
+mimeparse module
+================
+
+.. automodule:: mimeparse
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/modules.rst b/doc/source/lib/modules.rst
new file mode 100644
index 0000000..a1f2316
--- /dev/null
+++ b/doc/source/lib/modules.rst
@@ -0,0 +1,13 @@
+lib
+===
+
+.. toctree::
+   :maxdepth: 4
+
+   galaxy
+   galaxy_ext
+   galaxy_utils
+   log_tempfile
+   mimeparse
+   psyco_full
+   tool_shed
diff --git a/doc/source/lib/psyco_full.rst b/doc/source/lib/psyco_full.rst
new file mode 100644
index 0000000..370be05
--- /dev/null
+++ b/doc/source/lib/psyco_full.rst
@@ -0,0 +1,7 @@
+psyco_full module
+=================
+
+.. automodule:: psyco_full
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/source/lib/tool_shed.capsule.rst b/doc/source/lib/tool_shed.capsule.rst
new file mode 100644
index 0000000..3865f87
--- /dev/null
+++ b/doc/source/lib/tool_shed.capsule.rst
@@ -0,0 +1,20 @@
+tool_shed.capsule package
+=========================
+
+.. automodule:: tool_shed.capsule
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.capsule.capsule_manager module
+----------------------------------------
+
+.. automodule:: tool_shed.capsule.capsule_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.dependencies.repository.rst b/doc/source/lib/tool_shed.dependencies.repository.rst
new file mode 100644
index 0000000..bd6356b
--- /dev/null
+++ b/doc/source/lib/tool_shed.dependencies.repository.rst
@@ -0,0 +1,20 @@
+tool_shed.dependencies.repository package
+=========================================
+
+.. automodule:: tool_shed.dependencies.repository
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.dependencies.repository.relation_builder module
+---------------------------------------------------------
+
+.. automodule:: tool_shed.dependencies.repository.relation_builder
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.dependencies.rst b/doc/source/lib/tool_shed.dependencies.rst
new file mode 100644
index 0000000..563583e
--- /dev/null
+++ b/doc/source/lib/tool_shed.dependencies.rst
@@ -0,0 +1,28 @@
+tool_shed.dependencies package
+==============================
+
+.. automodule:: tool_shed.dependencies
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    tool_shed.dependencies.repository
+    tool_shed.dependencies.tool
+
+Submodules
+----------
+
+tool_shed.dependencies.attribute_handlers module
+------------------------------------------------
+
+.. automodule:: tool_shed.dependencies.attribute_handlers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.dependencies.tool.rst b/doc/source/lib/tool_shed.dependencies.tool.rst
new file mode 100644
index 0000000..3062aa2
--- /dev/null
+++ b/doc/source/lib/tool_shed.dependencies.tool.rst
@@ -0,0 +1,20 @@
+tool_shed.dependencies.tool package
+===================================
+
+.. automodule:: tool_shed.dependencies.tool
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.dependencies.tool.tag_attribute_handler module
+--------------------------------------------------------
+
+.. automodule:: tool_shed.dependencies.tool.tag_attribute_handler
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.datatypes.rst b/doc/source/lib/tool_shed.galaxy_install.datatypes.rst
new file mode 100644
index 0000000..bab6f24
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.datatypes.rst
@@ -0,0 +1,20 @@
+tool_shed.galaxy_install.datatypes package
+==========================================
+
+.. automodule:: tool_shed.galaxy_install.datatypes
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.datatypes.custom_datatype_manager module
+-----------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.datatypes.custom_datatype_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.grids.rst b/doc/source/lib/tool_shed.galaxy_install.grids.rst
new file mode 100644
index 0000000..b01741c
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.grids.rst
@@ -0,0 +1,20 @@
+tool_shed.galaxy_install.grids package
+======================================
+
+.. automodule:: tool_shed.galaxy_install.grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.grids.admin_toolshed_grids module
+----------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.grids.admin_toolshed_grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.metadata.rst b/doc/source/lib/tool_shed.galaxy_install.metadata.rst
new file mode 100644
index 0000000..8a63d06
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.metadata.rst
@@ -0,0 +1,20 @@
+tool_shed.galaxy_install.metadata package
+=========================================
+
+.. automodule:: tool_shed.galaxy_install.metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.metadata.installed_repository_metadata_manager module
+------------------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.metadata.installed_repository_metadata_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.migrate.rst b/doc/source/lib/tool_shed.galaxy_install.migrate.rst
new file mode 100644
index 0000000..4e3291d
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.migrate.rst
@@ -0,0 +1,28 @@
+tool_shed.galaxy_install.migrate package
+========================================
+
+.. automodule:: tool_shed.galaxy_install.migrate
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.migrate.check module
+---------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.migrate.check
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.migrate.common module
+----------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.migrate.common
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.repository_dependencies.rst b/doc/source/lib/tool_shed.galaxy_install.repository_dependencies.rst
new file mode 100644
index 0000000..5110d9f
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.repository_dependencies.rst
@@ -0,0 +1,20 @@
+tool_shed.galaxy_install.repository_dependencies package
+========================================================
+
+.. automodule:: tool_shed.galaxy_install.repository_dependencies
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager module
+-------------------------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.rst b/doc/source/lib/tool_shed.galaxy_install.rst
new file mode 100644
index 0000000..f4510a2
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.rst
@@ -0,0 +1,74 @@
+tool_shed.galaxy_install package
+================================
+
+.. automodule:: tool_shed.galaxy_install
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    tool_shed.galaxy_install.datatypes
+    tool_shed.galaxy_install.grids
+    tool_shed.galaxy_install.metadata
+    tool_shed.galaxy_install.migrate
+    tool_shed.galaxy_install.repository_dependencies
+    tool_shed.galaxy_install.tool_dependencies
+    tool_shed.galaxy_install.tools
+    tool_shed.galaxy_install.utility_containers
+
+Submodules
+----------
+
+tool_shed.galaxy_install.dependency_display module
+--------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.dependency_display
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.install_manager module
+-----------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.install_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.installed_repository_manager module
+------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.installed_repository_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.repair_repository_manager module
+---------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.repair_repository_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tool_migration_manager module
+------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_migration_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.update_repository_manager module
+---------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.update_repository_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.recipe.rst b/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.recipe.rst
new file mode 100644
index 0000000..29d7652
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.recipe.rst
@@ -0,0 +1,60 @@
+tool_shed.galaxy_install.tool_dependencies.recipe package
+=========================================================
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.tool_dependencies.recipe.asynchronous_reader module
+----------------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe.asynchronous_reader
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder module
+-------------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tool_dependencies.recipe.install_environment module
+----------------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe.install_environment
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager module
+-----------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tool_dependencies.recipe.step_handler module
+---------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe.step_handler
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tool_dependencies.recipe.tag_handler module
+--------------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.recipe.tag_handler
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.rst b/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.rst
new file mode 100644
index 0000000..5f67bad
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.tool_dependencies.rst
@@ -0,0 +1,27 @@
+tool_shed.galaxy_install.tool_dependencies package
+==================================================
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    tool_shed.galaxy_install.tool_dependencies.recipe
+
+Submodules
+----------
+
+tool_shed.galaxy_install.tool_dependencies.env_manager module
+-------------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tool_dependencies.env_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.tools.rst b/doc/source/lib/tool_shed.galaxy_install.tools.rst
new file mode 100644
index 0000000..1d1d0f3
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.tools.rst
@@ -0,0 +1,28 @@
+tool_shed.galaxy_install.tools package
+======================================
+
+.. automodule:: tool_shed.galaxy_install.tools
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.galaxy_install.tools.data_manager module
+--------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tools.data_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.galaxy_install.tools.tool_panel_manager module
+--------------------------------------------------------
+
+.. automodule:: tool_shed.galaxy_install.tools.tool_panel_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.galaxy_install.utility_containers.rst b/doc/source/lib/tool_shed.galaxy_install.utility_containers.rst
new file mode 100644
index 0000000..347d731
--- /dev/null
+++ b/doc/source/lib/tool_shed.galaxy_install.utility_containers.rst
@@ -0,0 +1,8 @@
+tool_shed.galaxy_install.utility_containers package
+===================================================
+
+.. automodule:: tool_shed.galaxy_install.utility_containers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
diff --git a/doc/source/lib/tool_shed.grids.rst b/doc/source/lib/tool_shed.grids.rst
new file mode 100644
index 0000000..7ee5fb0
--- /dev/null
+++ b/doc/source/lib/tool_shed.grids.rst
@@ -0,0 +1,52 @@
+tool_shed.grids package
+=======================
+
+.. automodule:: tool_shed.grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.grids.admin_grids module
+----------------------------------
+
+.. automodule:: tool_shed.grids.admin_grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.grids.repository_grid_filter_manager module
+-----------------------------------------------------
+
+.. automodule:: tool_shed.grids.repository_grid_filter_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.grids.repository_grids module
+---------------------------------------
+
+.. automodule:: tool_shed.grids.repository_grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.grids.repository_review_grids module
+----------------------------------------------
+
+.. automodule:: tool_shed.grids.repository_review_grids
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.grids.util module
+---------------------------
+
+.. automodule:: tool_shed.grids.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.managers.rst b/doc/source/lib/tool_shed.managers.rst
new file mode 100644
index 0000000..04de3ce
--- /dev/null
+++ b/doc/source/lib/tool_shed.managers.rst
@@ -0,0 +1,28 @@
+tool_shed.managers package
+==========================
+
+.. automodule:: tool_shed.managers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.managers.groups module
+--------------------------------
+
+.. automodule:: tool_shed.managers.groups
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.managers.repositories module
+--------------------------------------
+
+.. automodule:: tool_shed.managers.repositories
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.metadata.rst b/doc/source/lib/tool_shed.metadata.rst
new file mode 100644
index 0000000..7e2ba80
--- /dev/null
+++ b/doc/source/lib/tool_shed.metadata.rst
@@ -0,0 +1,28 @@
+tool_shed.metadata package
+==========================
+
+.. automodule:: tool_shed.metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.metadata.metadata_generator module
+--------------------------------------------
+
+.. automodule:: tool_shed.metadata.metadata_generator
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.metadata.repository_metadata_manager module
+-----------------------------------------------------
+
+.. automodule:: tool_shed.metadata.repository_metadata_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.repository_types.rst b/doc/source/lib/tool_shed.repository_types.rst
new file mode 100644
index 0000000..5e5cff1
--- /dev/null
+++ b/doc/source/lib/tool_shed.repository_types.rst
@@ -0,0 +1,60 @@
+tool_shed.repository_types package
+==================================
+
+.. automodule:: tool_shed.repository_types
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.repository_types.metadata module
+------------------------------------------
+
+.. automodule:: tool_shed.repository_types.metadata
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.repository_types.registry module
+------------------------------------------
+
+.. automodule:: tool_shed.repository_types.registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.repository_types.repository_suite_definition module
+-------------------------------------------------------------
+
+.. automodule:: tool_shed.repository_types.repository_suite_definition
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.repository_types.tool_dependency_definition module
+------------------------------------------------------------
+
+.. automodule:: tool_shed.repository_types.tool_dependency_definition
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.repository_types.unrestricted module
+----------------------------------------------
+
+.. automodule:: tool_shed.repository_types.unrestricted
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.repository_types.util module
+--------------------------------------
+
+.. automodule:: tool_shed.repository_types.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.rst b/doc/source/lib/tool_shed.rst
new file mode 100644
index 0000000..9ea2997
--- /dev/null
+++ b/doc/source/lib/tool_shed.rst
@@ -0,0 +1,44 @@
+tool_shed package
+=================
+
+.. automodule:: tool_shed
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Subpackages
+-----------
+
+.. toctree::
+
+    tool_shed.capsule
+    tool_shed.dependencies
+    tool_shed.galaxy_install
+    tool_shed.grids
+    tool_shed.managers
+    tool_shed.metadata
+    tool_shed.repository_types
+    tool_shed.tools
+    tool_shed.util
+    tool_shed.utility_containers
+
+Submodules
+----------
+
+tool_shed.repository_registry module
+------------------------------------
+
+.. automodule:: tool_shed.repository_registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.tool_shed_registry module
+-----------------------------------
+
+.. automodule:: tool_shed.tool_shed_registry
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.tools.rst b/doc/source/lib/tool_shed.tools.rst
new file mode 100644
index 0000000..bfc71f2
--- /dev/null
+++ b/doc/source/lib/tool_shed.tools.rst
@@ -0,0 +1,36 @@
+tool_shed.tools package
+=======================
+
+.. automodule:: tool_shed.tools
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.tools.data_table_manager module
+-----------------------------------------
+
+.. automodule:: tool_shed.tools.data_table_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.tools.tool_validator module
+-------------------------------------
+
+.. automodule:: tool_shed.tools.tool_validator
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.tools.tool_version_manager module
+-------------------------------------------
+
+.. automodule:: tool_shed.tools.tool_version_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.util.rst b/doc/source/lib/tool_shed.util.rst
new file mode 100644
index 0000000..47b221e
--- /dev/null
+++ b/doc/source/lib/tool_shed.util.rst
@@ -0,0 +1,156 @@
+tool_shed.util package
+======================
+
+.. automodule:: tool_shed.util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.util.basic_util module
+--------------------------------
+
+.. automodule:: tool_shed.util.basic_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.commit_util module
+---------------------------------
+
+.. automodule:: tool_shed.util.commit_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.common_util module
+---------------------------------
+
+.. automodule:: tool_shed.util.common_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.container_util module
+------------------------------------
+
+.. automodule:: tool_shed.util.container_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.encoding_util module
+-----------------------------------
+
+.. automodule:: tool_shed.util.encoding_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.hg_util module
+-----------------------------
+
+.. automodule:: tool_shed.util.hg_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.metadata_util module
+-----------------------------------
+
+.. automodule:: tool_shed.util.metadata_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.readme_util module
+---------------------------------
+
+.. automodule:: tool_shed.util.readme_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.repository_content_util module
+---------------------------------------------
+
+.. automodule:: tool_shed.util.repository_content_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.repository_util module
+-------------------------------------
+
+.. automodule:: tool_shed.util.repository_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.review_util module
+---------------------------------
+
+.. automodule:: tool_shed.util.review_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.search_util module
+---------------------------------
+
+.. automodule:: tool_shed.util.search_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.shed_util_common module
+--------------------------------------
+
+.. automodule:: tool_shed.util.shed_util_common
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.tool_dependency_util module
+------------------------------------------
+
+.. automodule:: tool_shed.util.tool_dependency_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.tool_util module
+-------------------------------
+
+.. automodule:: tool_shed.util.tool_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.web_util module
+------------------------------
+
+.. automodule:: tool_shed.util.web_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.workflow_util module
+-----------------------------------
+
+.. automodule:: tool_shed.util.workflow_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+tool_shed.util.xml_util module
+------------------------------
+
+.. automodule:: tool_shed.util.xml_util
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/lib/tool_shed.utility_containers.rst b/doc/source/lib/tool_shed.utility_containers.rst
new file mode 100644
index 0000000..bfa484d
--- /dev/null
+++ b/doc/source/lib/tool_shed.utility_containers.rst
@@ -0,0 +1,20 @@
+tool_shed.utility_containers package
+====================================
+
+.. automodule:: tool_shed.utility_containers
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+Submodules
+----------
+
+tool_shed.utility_containers.utility_container_manager module
+-------------------------------------------------------------
+
+.. automodule:: tool_shed.utility_containers.utility_container_manager
+    :members:
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/source/project/issues.rst b/doc/source/project/issues.rst
new file mode 100644
index 0000000..cdee91d
--- /dev/null
+++ b/doc/source/project/issues.rst
@@ -0,0 +1,234 @@
+=======================
+Galaxy Issue Management
+=======================
+
+The purpose of this document is to formalize how we manage the full
+cycle (reporting, tracking progress, resolution) of both
+feature/enhancement ideas and bugs, as well as providing a few general
+guidelines for longer term planning for various Galaxy related projects.
+Some inspiration taken from the way the
+`Docker <https://github.com/docker/docker>`__ project labels issues.
+
+Milestones
+==========
+
+Every pull request should, prior to merge, be assigned to the milestone
+corresponding to the Galaxy release it will first appear in (15.10,
+16.01, and so on). This, along with the tags applied, will be used to
+generate the release notes.
+
+Any non-PR issue assigned to a milestone *must* be resolved or
+reassigned prior to publishing that release. This is the primary
+mechanism by which we force reconciliation (issue/bug fixed, closed, or
+intentionally postponed) prior to release, and prevent things falling
+through the cracks. In practice, bugs should almost always be tagged
+with a milestone which forces the reconciliation date. Issues *may* be,
+but they don't necessarily have to be -- this is subjective and it
+depends on whether or not the issue should be revisited prior to the
+corresponding release.
+
+Effective use of milestones should prevent bugs from falling through the
+cracks, and will provide a mechanism for forcing the revisitation(and
+thus progress or even potential discard) of ideas for enhancements or
+features.
+
+Deferring Issues
+----------------
+
+To prevent the review of issues attached to milestones from becoming too
+cumbersome, and to encourage active review and handling of issues, any
+contributor can choose to 'defer' an issue attached to an upcoming
+release milestone to a later one. To do this, simply reassign the issue
+to the new milestone and leave a comment so that others notice,
+something like 'Issue deferred to target\_milestone\_reference, does not
+block release current\_milestone\_reference'.
+
+Once deferred, an issue can't simply be reattached back to the earlier
+milestone -- this requires a PR. The intent here is to make it such that
+if a contributor wants to force an issue to be handle with a release,
+they need to put the work forward to do so or convince someone else to.
+
+Labeling Structure
+==================
+
+To allow for easy search, filtering, and general issue management every
+issue or PR (not tagged ``procedures`` or ``planning``) is *required* to
+have three labels which indicate the type, status, and focus area of the
+issue. Any issue without these three tags will be automatically have a
+``triage`` label applied indicating that it needs human intervention to
+be correctly tagged. These ``triage`` tagged issues will be regularly
+reviewed and tagged as appropriate.
+
+Kind Labels
+-----------
+
+The 'kind' label set is used for classifying the type of contribution or
+request/report to separate enhancements and new features from bugs, etc.
+
+-  ``kind/bug`` - something is broken, and it needs fixing
+-  ``kind/enhancement`` - polish to an existing feature or interface
+-  ``kind/feature`` - something brand new
+-  ``kind/refactoring`` - refactoring of existing code, no functional
+   changes
+-  ``kind/testing`` - issues related to tests and the testing framework
+
+Status Labels
+-------------
+
+The ``status`` of an issue or PR should be tracked using the following
+labels:
+
+-  ``status/planning`` - the issue planning phase, this issue may
+   potentially need more information (or just more thinking) to proceed
+   to a work in progress
+-  ``status/WIP`` - this issue or PR is currently being worked on and in
+   the case of a PR, it should not be merged until this tag is removed
+-  ``status/review`` - PR is complete and ready for review, or when
+   applied to an issue it is thought to be resolved but needs
+   verification
+
+We use the same set of status tags for PRs and issues to keep things
+simple, but not every PR or issue needs to go through every state. For
+example, it'll be common for a PR to be submitted with the label
+'status/review', and get merged without needing to go through the rest
+of the states.
+
+Note that there are no ``status/complete``, ``status/wontfix``,
+``status/duplicate``, or other terminal status indicators. This is
+intentional to keep the tail end of bookkeeping from getting onerous.
+These sorts of terminal states *and their justifications* (e.g. the
+reason why it's a wontfix, or a reference to the duplicate issue) should
+be indicated in the closing comment by the issue closer.
+
+Area Labels
+-----------
+
+The 'area' label is used for tagging issues and pull requests to a
+particular focus area. This allows for easy searching within that
+particular domain, as well as more organized release notes.
+
+-  ``area/admin`` - Changes to admin functionality of the Galaxy webapp.
+-  ``area/API``
+-  ``area/cleanup`` - General code cleanup.
+-  ``area/database`` - Change requires a modification to Galaxy's database.
+-  ``area/dataset-collections``
+-  ``area/datatypes`` - Changes to Galaxy's datatypes
+-  ``area/datatype-framework`` - Changes to Galaxy's datatype and metadata framework
+-  ``area/documentation``
+-  ``area/framework``
+-  ``area/GIEs``
+-  ``area/histories``
+-  ``area/jobs``
+-  ``area/performance``
+-  ``area/reports``
+-  ``area/system`` - Changes to scripts used to run or manage Galaxy.
+-  ``area/tools`` - Changes to specific tools in Galaxy.
+-  ``area/tool-framework``
+-  ``area/toolshed``- Changes to the tool shed client or server.
+-  ``area/UI-UX``
+-  ``area/util``
+-  ``area/visualizations``
+-  ``area/workflows``
+
+New labels should be proposed by opening a pull request against this document
+in the dev branch of Galaxy.
+
+Other Useful Labels
+-------------------
+
+While the three labels sets indicating kind, status, and area are
+required there are several other labels that are be useful and/or have
+special purpose.
+
+-  ``procedures`` is a special tag that indicates that the issue is
+   related to project governance, and it overrides the need for the trio
+   of kind/status/area tags, and these are never auto-flagged for
+   triage.  More details are available in the ORGANIZATION_ document.
+
+-  ``planning`` is also a special tag that indicates the issue is
+   related to larger-scale issue planning. These issues are typically
+   meta-issues containing checklists and references to other issues
+   which are subcomponents and stepping stones necessary for issue
+   resolution. These *can* utilize the ``area/*`` tags but are not
+   required to. Status and type make little sense here.
+
+-  ``roadmap`` is a reserved tag for the primary project roadmap. This
+   is a meta-issue that is not expected to be completed, but rather
+   serves as an entry point to the high level development of the
+   project.
+
+-  ``friendliness/beginner`` can be used to indicate a nice entry-level
+   issue that only requires limited understanding of the larger Galaxy
+   framework and ecosystem. This is useful for encouraging new
+   contributors.
+   
+-  ``friendliness/intermediate`` can be used to indicate an advanced
+   level issue that requires decent understanding of the larger Galaxy
+   framework and system.
+         
+-  ``friendliness/unfriendly`` can be used to mark issues that require
+   deep understanding of the framework and/or exquisite programming
+   abilities.
+
+-  ``minor`` is a special tag used to generate release notes. It should
+   only be applied to pull requests made by committers that fix
+   functionality modified during the same release cycle. Such fixes are
+   unimportant for release notes. No pull request issued by someone
+   outside the committers group should have this tag applied because
+   these pull requests must be highlighted in the release notes.
+
+-  ``major`` is a special tag used to generate release notes. In practice
+   this should be applied to at most a couple dozen pull requests each
+   release and is used to prioritize important items of note for the
+   top of release notes sections.
+   
+-  ``merge`` tag used to indicate PR that only merges a change that has
+   been previously added. Used to filter things out of release notes.
+   
+-  ``feature-request`` is used to indicate a request for change or feature.
+
+-  ``triage`` is a tag automatically added by a GalaxyBot to indicate that
+   the issue needs to be evaluated and properly tagged.
+   
+-  ``hacktoberfest`` is a tag that encourages contributions to Galaxy codebase
+   by including these issues in the `Hacktoberfest <https://hacktoberfest.digitalocean.com/>`__ event.
+   Similar to ``friendliness/beginner`` tag in other characteristics.
+
+
+The Roadmap
+===========
+
+We will maintain a single ``roadmap`` tagged meta-issue which will
+describe (at a very high level) the *current* major areas of focus for
+the project. This is similar to our PRIORITIES 2014/15 cards on Trello.
+Using `Task
+Lists <https://github.com/blog/1375-task-lists-in-gfm-issues-pulls-comments>`__,
+this issue will link to sub-issues which will go into much more detail,
+might have its own checklists to even more subcomponent cards, and so
+on.
+
+This ``roadmap`` issue will be assigned to every release milestone,
+forcing periodic review of the roadmap.
+
+Voting
+======
+
+Users can vote for issues by commenting with a +1. It's possible to sort
+the issue list by 'most commented' which would be a good indicator of
+what issues are 'hot', though this doesn't necessarily indicate a high
+vote. It's possible that that this is good enough and in some ways
+potentially more useful to find 'hot' issues than a flat vote count.
+
+Automation
+==========
+
+For now, we will rely on a few simple automation rules:
+
+-  All issues, unless tagged ``procedures`` or ``planning`` will
+   automatically be tagged ``triage``, indicating that they require
+   attention.
+
+-  All PRs that are not assigned to a milestone will be tagged
+   ``triage`` to indicate that they require attention prior to merge.
+
+.. _ORGANIZATION: https://github.com/galaxyproject/galaxy/blob/dev/doc/source/project/organization.rst
diff --git a/doc/source/project/organization.rst b/doc/source/project/organization.rst
new file mode 100644
index 0000000..38ab231
--- /dev/null
+++ b/doc/source/project/organization.rst
@@ -0,0 +1,199 @@
+==================================
+Galaxy Core Governance
+==================================
+
+This document informally outlines the organizational structure governing the
+Galaxy core code base hosted at https://github.com/galaxyproject/galaxy. This
+governance extends to code-related activities of this repository such as
+releases and packaging. This governance does not include infrastructure such
+as Galaxy's Trello board, the Galaxy mailing lists, etc... or other Galaxy-
+related projects belonging to the ``galaxyproject`` organization on GitHub.
+
+Committers
+==========
+
+The *committers* group is the group of trusted developers and advocates who
+manage the core Galaxy code base. They assume many roles required to achieve
+the project's goals, especially those that require a high level of trust.
+
+Galaxy Project *committers* are the only individuals who may commit to the
+core Galaxy code base. All commits must be made in accordance with procedures
+outlined below. In particular, in most cases
+direct commit access is not allowed and this access is restricted to merging
+pull requests issued by others.
+
+Committers may participate in formal votes - these votes typically include
+votes to modify team membership, merge pull requests, and modify or clarify
+the procedures outlined in this document and CODE_OF_CONDUCT_.
+
+Members
+-------
+
+- Enis Afgan (@afgane)
+- Dannon Baker (@dannon)
+- Daniel Blankenberg (@blankenberg)
+- Dave Bouvier (@davebx)
+- Martin Čech (@martenson)
+- John Chilton (@jmchilton)
+- Dave Clements (@tnabtaf)
+- Nate Coraor (@natefoo)
+- Carl Eberhard (@carlfeberhard)
+- Jeremy Goecks (@jgoecks)
+- Björn Grüning (@bgruening)
+- Aysam Guerler (@guerler)
+- Jennifer Hillman Jackson (@jennaj)
+- Anton Nekrutenko (@nekrut)
+- Eric Rasche (@erasche)
+- Nicola Soranzo (@nsoranzo)
+- James Taylor (@jxtx)
+- Nitesh Turaga (@nitesh1989)
+- Marius van den Beek (@mvdbeek)
+
+
+Membership
+----------
+
+The *committers* group was seeded with the group of active developers and
+advocates with commit access to the repository as of May 2015. This group
+subsequently voted in new members.
+
+Any member of the *committers* group may nominate an individual for membership
+to the *committers* group. Such individuals must have demonstrated:
+
+- Good grasp of the design of Galaxy core project.
+- Solid track record of being constructive and helpful.
+- Significant contributions to the project.
+- Willingness to dedicate some time to improving Galaxy.
+
+The above list of people is the canonical source used to determine
+membership to the *committers* group - as such new members may be added to
+this group by opening a pull request adding a qualified person to this list.
+Pull requests modifying the membership of this list are subject to the normal
+rules for pull requests that modify governance procedures outlined below - with
+one exception - a *committer* may not vote
+against their own removal from the group (for obvious reasons).
+
+Given the responsibilities and power invested in this group - it is important
+that individuals not actively working on Galaxy in some fashion are removed from
+the group. If individuals in this group intend to change jobs or reallocate
+volunteer activities and will no longer be active in the Galaxy community,
+they should withdraw from membership of this group. Periodically, active
+members may review this group and request inactive members are removed - this
+should not be interpreted as a condemnation of these inactive members but
+merely as a reflection of a desire to keep this group focused enough to remain
+effective.
+
+Handling Pull Requests
+======================
+
+Everyone is encouraged to express opinions and issue non-binding votes on pull
+requests, but only members of the *committers* group may issue binding votes
+on pull requests.
+
+Votes on pull requests should take the form of
+`+1, 0, -1, and fractions <http://www.apache.org/foundation/voting.html>`_
+as outlined by the Apache Foundation.
+
+Pull requests modifying pre-existing releases should be restricted to bug fixes
+and require at least 2 *+1* binding votes from someone other than the author of
+the pull request with no *-1* binding votes.
+
+Pull requests changing or clarifying the procedures governing this repository:
+
+- Must be made to the ``dev`` branch of this repository.
+- Must remain open for at least 192 hours (unless every qualified committer has
+  voted).
+- Require binding *+1* votes from at least 25% of qualified *committers* with no
+  *-1* binding votes.
+- Should be titled with the prefix *[PROCEDURES]* and tagged with
+  the *procedures* tag in Github.
+- Should not be modified once open. If changes are needed, the pull request
+  should be closed, re-opened with modifications, and votes reset.
+- Should be restricted to just modifying the procedures and generally should not
+  contain code modifications.
+- If the pull request adds or removes *committers*, there must be a separate
+  pull request for each person added or removed.
+
+Any other pull request requires at least 1 *+1* binding vote from someone other
+than the author of the pull request. A member of the *committers* group merging
+a pull request is considered an implicit +1.
+
+Pull requests marked *[WIP]* (i.e. work in progress) in the title by the
+author(s), or tagged WIP via GitHub tags, may *not* be merged without
+coordinating the removal of that tag with the pull request author(s), and
+completing the removal of that tag from wherever it is present in the open pull
+request.
+
+Timelines
+---------
+
+Except in the case of pull requests modifying governance procedures, there are
+generally no objective guidelines defining how long pull requests must remain
+open for comment. Subjectively speaking though - larger and more potentially
+controversial pull requests containing enhancements should remain open for a at
+least a few days to give everyone the opportunity to weigh in.
+
+Vetoes
+------
+
+A note on vetoes (*-1* votes) taken verbatim from the
+`Apache Foundation <http://www.apache.org/foundation/voting.html>`_:
+
+  "A code-modification proposal may be stopped dead in its tracks by a *-1* vote
+  by a qualified voter. This constitutes a veto, and it cannot be overruled nor
+  overridden by anyone. Vetoes stand until and unless withdrawn by their casters.
+  
+  To prevent vetoes from being used capriciously, they must be accompanied by a
+  technical justification showing why the change is bad (opens a security
+  exposure, negatively affects performance, etc. ). A veto without a
+  justification is invalid and has no weight."
+
+For votes regarding non-coding issues such as procedure changes, the requirement
+that a veto is accompanied by a *technical* justification is relaxed somewhat,
+though a well reasoned justification must still be included.
+
+Reversions
+----------
+
+A *-1* vote on any recently merged pull request requires an immediate
+reversion of the merged pull request. The backout of such a pull request
+invokes a mandatory, minimum 72 hour, review period.
+
+- Recently merged pull requests are defined as a being within the past 168 hours (7
+  days), so as to not prevent forward progress, while allowing for reversions of
+  things merged without proper review and consensus.
+- The person issuing the *-1* vote will, upon commenting *-1* with technical
+  justification per the vetoes section, immediately open a pull request to
+  revert the original merge in question. If any committer other than the *-1*
+  issuer deems the justification technical - regardless of whether they agree
+  with justification - that committer must then merge the pull request to
+  revert.
+
+Direct Commit Access
+--------------------
+
+The Galaxy *committers* group may only commit directly to Galaxy (i.e. outside
+of a pull request and not following the procedures described here) the
+following two categories of patches:
+
+* Patches for serious security vulnerabilities.
+* Cherry-picking and/or merging of existing approved commits to other branches.
+
+Labeling and Milestones
+-----------------------
+
+Pull request handling, labeling, and milestone usage follows the procedures
+described in ISSUES_.
+
+
+Issue Reporting
+===============
+
+Issues (bugs, feature requests, etc.) should be reported at ISSUE_REPORT_, and
+handling of issues follows the procedures described in ISSUES_.
+
+
+.. _LICENSE: https://github.com/galaxyproject/galaxy/blob/dev/LICENSE.txt
+.. _CODE_OF_CONDUCT: https://github.com/galaxyproject/galaxy/blob/dev/CODE_OF_CONDUCT.md
+.. _ISSUES: https://github.com/galaxyproject/galaxy/blob/dev/doc/source/project/issues.rst
+.. _ISSUE_REPORT: https://github.com/galaxyproject/galaxy/issues/
diff --git a/doc/source/releases/13.01_announce.rst b/doc/source/releases/13.01_announce.rst
new file mode 100644
index 0000000..1f6ad49
--- /dev/null
+++ b/doc/source/releases/13.01_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+January 2013 Galaxy Release (v 13.01)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2013_01_11_DistributionNewsBrief
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/13.02_announce.rst b/doc/source/releases/13.02_announce.rst
new file mode 100644
index 0000000..09a67d5
--- /dev/null
+++ b/doc/source/releases/13.02_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+February 2013 Galaxy Release (v 13.02)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2013_02_08_GalaxyNewsBrief
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/13.04_announce.rst b/doc/source/releases/13.04_announce.rst
new file mode 100644
index 0000000..e2f9378
--- /dev/null
+++ b/doc/source/releases/13.04_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+April 2013 Galaxy Release (v 13.04)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2013_04_01_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/13.06_announce.rst b/doc/source/releases/13.06_announce.rst
new file mode 100644
index 0000000..58afa38
--- /dev/null
+++ b/doc/source/releases/13.06_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+June 2013 Galaxy Release (v 13.06)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2013_06_03_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/13.08_announce.rst b/doc/source/releases/13.08_announce.rst
new file mode 100644
index 0000000..44c8ad1
--- /dev/null
+++ b/doc/source/releases/13.08_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+August 2013 Galaxy Release (v 13.08)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2013_08_12_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/13.11_announce.rst b/doc/source/releases/13.11_announce.rst
new file mode 100644
index 0000000..4fb4c7c
--- /dev/null
+++ b/doc/source/releases/13.11_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+November 2013 Galaxy Release (v 13.11)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2013_11_04_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/14.02_announce.rst b/doc/source/releases/14.02_announce.rst
new file mode 100644
index 0000000..94462d7
--- /dev/null
+++ b/doc/source/releases/14.02_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+February 2014 Galaxy Release (v 14.02)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2014_02_10_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/14.04_announce.rst b/doc/source/releases/14.04_announce.rst
new file mode 100644
index 0000000..22e02c8
--- /dev/null
+++ b/doc/source/releases/14.04_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+April 2014 Galaxy Release (v 14.04)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2014_04_14_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/14.06_announce.rst b/doc/source/releases/14.06_announce.rst
new file mode 100644
index 0000000..5b690ef
--- /dev/null
+++ b/doc/source/releases/14.06_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+June 2014 Galaxy Release (v 14.06)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2014_06_02_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/14.08_announce.rst b/doc/source/releases/14.08_announce.rst
new file mode 100644
index 0000000..7052e85
--- /dev/null
+++ b/doc/source/releases/14.08_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+August 2014 Galaxy Release (v 14.08)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2014_08_11_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/14.10_announce.rst b/doc/source/releases/14.10_announce.rst
new file mode 100644
index 0000000..e6aaf56
--- /dev/null
+++ b/doc/source/releases/14.10_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+October 2014 Galaxy Release (v 14.10)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2014_10_06_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/15.01_announce.rst b/doc/source/releases/15.01_announce.rst
new file mode 100644
index 0000000..f670f3c
--- /dev/null
+++ b/doc/source/releases/15.01_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+January 2015 Galaxy Release (v 15.01)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2015_01_13_Galaxy_Distribution
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/15.03_announce.rst b/doc/source/releases/15.03_announce.rst
new file mode 100644
index 0000000..7206175
--- /dev/null
+++ b/doc/source/releases/15.03_announce.rst
@@ -0,0 +1,11 @@
+===========================================================
+March 2015 Galaxy Release (v 15.03)
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/News/2015_03_GalaxyRelease
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/15.05.rst b/doc/source/releases/15.05.rst
new file mode 100644
index 0000000..8e68f3f
--- /dev/null
+++ b/doc/source/releases/15.05.rst
@@ -0,0 +1,356 @@
+.. to_doc
+
+-------------------------------
+15.05
+-------------------------------
+
+.. enhancements
+
+Enhancements
+-------------------------------
+
+* Pluggable framework to custom authentication (including new LDAP/Active
+  Directory integration). Thanks to many including Andrew Robinson,
+  Nicola Soranzo, and David Trudgian. `Pull Request 1`_, `Pull Request 33`_,
+  `Pull Request 51`_, `Pull Request 75`_, `Pull Request 98`_,
+  `Pull Request 216`_
+* Implement a new ``section`` tag for tool parameters. `Pull Request 35`_,
+  `Trello <https://trello.com/c/KxlQK0FB>`__
+* New UI widgets allowing much more flexibility when creating simple dataset
+  pair and list collections. `Pull Request 134`_,
+  `Trello <https://trello.com/c/xCdFQPBW>`__
+* Improved JavaScript build system for client code and libraries (now
+  using uglify_ and featuring `Source Maps`_). 72c876c_, 9a7f5fc_, 648a623_,
+  22f280f_, `Trello <https://trello.com/c/7midE7Bx>`__
+* Add an `External Display Application`_ for viewing GFF/GTF files with IGV_.
+  `Pull Request 70`_, `Trello <https://trello.com/c/Hfg3gYsL>`__
+* Use TravisCI_ and Tox_ for continuous integration testing.
+  `Pull Request 40`_, `Pull Request 62`_, `Pull Request 97`_,
+  `Pull Request 99`_, `Pull Request 123`_, `Pull Request 222`_,
+  `Pull Request 235`_,
+* Infrastructure for improved toolbox and Tool Shed searching.
+  `Pull Request 9`_, `Pull Request 116`_, `Pull Request 142`_,
+  `Pull Request 226`_, c2eb74c_, 2bf52fe_, ec549db_,
+  `Trello <https://trello.com/c/YJW1dCkB>`__, `Trello <https://trello.com/c/AG3qYRZe>`__
+* Enhance UI to allow renaming dataset collections. 21d1d6b_
+* Improve highlighting of current/active content history panel.
+  `Pull Request 126`_
+* Improvements to UI and API for histories and collections. e36e51e_,
+  1e55206_, 0c79680_
+* Update history dataset API to account for job re-submission. b4cf49a_
+* Allow recalculating user disk usage from the admin interface. 964e081_
+* Collect significantly more metadata for BAM files. `Pull Request 107`_,
+  `Pull Request 108`_
+* Implement ``detect_errors`` attribute on command of tool XML.
+  `Pull Request 117`_
+* Allow setting ``auto_format="True"`` on tool ``output`` tags.
+  `Pull Request 130`_
+* Allow testing tool outputs based on MD5 hashes. `Pull Request 125`_
+* Improved Cheetah type casting for int/float values. `Pull Request 121`_
+* Add option to pass arbitrary parameters to gem install as part of
+  the tool shed ``setup_ruby_environment`` Tool Shed install action -
+  thanks to Björn Grüning. `Pull Request 118`_
+* Add ``argument`` attribute to tool parameters. `Pull Request 8`_
+* Improve link and message that appears after workflows are run.
+  `Pull Request 143`_
+* Add NCBI SRA datatype - thanks to Matt Shirley. `Pull Request 87`_
+* Stronger toolbox filtering. `Pull Request 119`_
+* Allow updating Tool Shed repositories via the API - thanks to Eric Rasche.
+  `Pull Request 30`_
+* Expose category list in show call for Tool Shed repositories - thanks to
+  Eric Rasche. `Pull Request 29`_
+* Add API endpoint to create Tool Shed repositories. `Pull Request 2`_
+* Do not configure Galaxy to use the test Tool Shed by default.
+  `Pull Request 38`_
+* Add fields and improve display of Tool Shed repositories.
+  a24e206_, d6d61bc_, `Trello <https://trello.com/c/g1dt8WhA>`__
+* Enhance multi-selection widgets to allow key combinations ``Ctrl-A``
+  and ``Ctrl-X``. e8564d7_, `Trello <https://trello.com/c/3QhD5l5h>`__
+* New, consistent button for displaying citation BibTeX. `Pull Request 19`_
+* Improved ``README`` reflecting move to Github - thanks in part to Eric
+  Rasche. `PR #2 (old repo)
+  <https://github.com/galaxyproject/galaxy-beta1/pull/2>`__,
+  226e826_, 2650d09_, 7d5dde8_
+* Update application to use new logo. 2748f9d_, `Pull Request 187`_,
+  `Pull Request 206`_
+* Update many documentation links to use https sites - thanks to
+  Nicola Soranzo. 8254cab_
+* Sync report options config with ``galaxy.ini`` - thanks to Björn Grüning.
+  `Pull Request 12`_
+* Eliminate need to use API key to list tools via API. cd7abe8_
+* Restore function necessary for splitting sequence datatypes - thanks to
+  Roberto Alonso. `Pull Request 5`_
+* Suppress filenames in SAM merge using ``egrep`` - thanks to Peter Cock
+  and Roberto Alonso. `Pull Request 4`_
+* Option to sort counts in ``Count1`` tool (``tools/filters/uniq.xml``) -
+  thanks to Peter Cock. `Pull Request 16`_
+* Preserve spaces in ``Count1`` tool (``tools/filters/uniq.xml``) - thanks to
+  Peter Cock. `Pull Request 13`_
+* `Interactive Environments`_ improvements and fixes from multiple
+  developers including Eric Rasche and  Björn Grüning. `Pull Request 69`_,
+  `Pull Request 73`_, `Pull Request 131`_, `Pull Request 135`_,
+  `Pull Request 152`_, `Pull Request 197`_
+* Enable multi-part upload for exporting files with the GenomeSpace export 
+  tool. `Pull Request 74`_, `Trello <https://trello.com/c/28O46iln>`__
+* Large refactoring, expansion, and increase in test coverage for "managers".
+  `Pull Request 76`_
+* Improved display of headers in tool help. 157eba6_, 
+  `Biostar <https://biostar.usegalaxy.org/p/11211/>`__
+* Uniform configuration of "From" field for sent emails - thanks to Nicola
+  Soranzo. `Pull Request 23`_
+* Allow setting ``job_conf.xml`` params via environment variables &
+  ``galaxy.ini``. dde2fc9_
+* Allow a tool data table to declare that duplicate entries are not
+  allowed. `Pull Request 245`_
+* Add verbose test error flag option in run_tests.sh. 62f0495_
+* Update ``.gitignore`` to include ``run_api_tests.html``. b52cc98_
+* Add experimental options to run tests in Docker. e99adb5_
+* Improve ``run_test.sh --help`` documentation to detail running specific
+  tests. `Pull Request 86`_
+* Remove older, redundant history tests. `Pull Request 120`_,
+  `Trello <https://trello.com/c/p6oOVhGp>`__
+* Add test tool demonstrating citing a Github repository. 65def71_
+* Add option to track all automated changes to the integrated tool panel.
+  10bb492_
+* Make tool version explicit in all distribution tool - thanks to Peter Cock. 
+  `Pull Request 14`_. 
+* Relocate the external metadata setting script. `Pull Request 7`_
+* Parameterize script used to pull new builds from the UCSC Browser.
+  e4e5df0_
+* Enhance jobs and workflow logging to report timings. 06346a4_
+* Add debug message for dynamic options exceptions. `Pull Request 91`_
+* Remove demo sequencer app. 3af3bf5_
+* Tweaks to the Pulsar's handling of async messages. `Pull Request 109`_
+* Return more specific API authentication errors. 71a64ca_
+* Upgrade Python dependency sqlalchemy to 1.0.0. d725aab_, `Pull Request 129`_
+* Upgrade Python dependency amqp to 1.4.6. `Pull Request 128`_
+* Upgrade Python dependency kombu to 3.0.24. `Pull Request 128`_
+* Upgrade JavaScript dependency raven.js to 1.1.17. bcd1701_
+
+.. fixes
+
+Fixes
+-------------------------------
+
+* During the 15.05 development cycle dozens of fixes were pushed to the
+  ``release_15.03`` branch of Galaxy. These are all included in 15.05 and
+  summarized `here
+  <https://github.com/galaxyproject/galaxy/compare/v15.03...release_15.03>`__
+  (with special thanks to Björn Grüning and Marius van den Beek).
+* Fix race condition that would occasionally prevent Galaxy from starting
+  properly. `Pull Request 198`_, `Trello <https://trello.com/c/WVlaLsOh>`__
+* Fix scatter plot API communications for certain proxied Galaxy instances -
+  thanks to @yhoogstrate. `Pull Request 89`_
+* Fix bug in collectl_ job metrics plugin - thanks to Carrie Ganote. 
+  `Pull Request 231`_
+* Fix late validation of tool parameters. `Pull Request 115`_
+* Fix ``fasta_to_tabular_converter.py`` (for implicit conversion) - thanks to
+  Peter Cock. `Pull Request 11`_
+* Fix to eliminate race condition by collecting extra files before declaring
+  dataset's OK. `Pull Request 48`_
+* Fix setting current history for certain proxied Galaxy instances - thanks
+  to @wezen. 6946e46_.
+* Fix typo in tool failure testing example - thanks to Peter Cock.
+  `Pull Request 18`_.
+* Fix Galaxy to default to using SSL for communicating with Tool Sheds.
+  0b037a2_
+* Fix data source tools to open in ``_top`` window.
+  `Pull Request 17`_
+* Fix to fallback to name for tool parameters without labels.
+  `Pull Request 189`_, `Trello <https://trello.com/c/Y2xbXqzZ>`__
+* Fix to remove redundant version ids in tool version selector.
+  `Pull Request 244`_
+* Fix for downloading metadata files. `Pull Request 234`_
+* Fix for history failing to render if it contains more exotic dataset 
+  collection types. `Pull Request 196`_
+* Fixes for BaseURLToolParameter. `Pull Request 247`_
+* Fix to suppress pysam binary incompatibility warning when using datatypes
+  in ``binary.py``. `Pull Request 252`_
+* Fix for library UI duplication bug. `Pull Request 179`_
+* Fix for `Backbone.js`_ loading as AMD_. 4e5218f_
+* Other small Tool Shed fixes. 815f86f_, 76e0915_
+* Fix file closing in ``lped_to_pbed_converter``. 182b67f_
+* Fix undefined variables in Tool Shed ``add_repository_entry`` API script.
+  47e6f08_
+* Fix user registration to respect use_panels when in the Galaxy app.
+  7ac8631_, `Trello <https://trello.com/c/lA1mdDrP>`__
+* Fix bug in scramble exception, incorrect reference to source_path 79d50d8_
+* Fix error handling in ``pbed_to_lped``. 7aecd7a_
+* Fix error handling in Tool Shed step handler for ``chmod`` action. 1454396_
+* Fix ``__safe_string_wrapper`` in tool evaluation object_wrapper. ab6f13e_
+* Fixes for data types and data providers. c1d2d1f_, 8da70bb_, 0b83b1e_
+* Fixes for Tool Shed commit and mercurial handling modules. 6102edf_,
+  b639bc0_, debea9d_
+* Fix to clean working directory during job re-submission. `Pull Request 236`_
+* Fix bug when task splitting jobs fail. `Pull Request 214`_
+* Fix some minor typos in comment docs in ``config/galaxy.ini.sample``.
+  `Pull Request 210`_
+* Fix admin disk usage message. `Pull Request 205`_,
+  `Trello <https://trello.com/c/2pdw2dK8>`__
+* Fix to sessionStorage Model to suppress QUOTA DOMExceptions when Safari
+  users are in private browsing mode. 0c94f04_
+
+.. _IGV: https://www.broadinstitute.org/igv/
+.. _External Display Application: https://wiki.galaxyproject.org/Admin/Tools/External%20Display%20Applications%20Tutorial
+.. _Interactive Environments: https://wiki.galaxyproject.org/Admin/IEs
+.. _TravisCI: https://travis-ci.org/
+.. _Tox: https://testrun.org/tox/latest/
+.. _Source Maps: https://developer.chrome.com/devtools/docs/javascript-debugging#source-maps
+.. _uglify: https://developer.chrome.com/devtools/docs/javascript-debugging#source-maps
+.. _collectl: http://collectl.sourceforge.net/
+.. _Backbone.js: http://backbonejs.org/
+.. _AMD: http://requirejs.org/docs/whyamd.html
+
+.. github_links
+.. _Pull Request 129: https://github.com/galaxyproject/galaxy/pull/129
+.. _Pull Request 128: https://github.com/galaxyproject/galaxy/pull/128
+.. _Pull Request 2: https://github.com/galaxyproject/galaxy/pull/2
+.. _Pull Request 247: https://github.com/galaxyproject/galaxy/pull/247
+.. _Pull Request 252: https://github.com/galaxyproject/galaxy/pull/252
+.. _Pull Request 245: https://github.com/galaxyproject/galaxy/pull/245
+.. _Pull Request 244: https://github.com/galaxyproject/galaxy/pull/244
+.. _Pull Request 236: https://github.com/galaxyproject/galaxy/pull/236
+.. _Pull Request 235: https://github.com/galaxyproject/galaxy/pull/235
+.. _Pull Request 222: https://github.com/galaxyproject/galaxy/pull/222
+.. _Pull Request 234: https://github.com/galaxyproject/galaxy/pull/234
+.. _Pull Request 231: https://github.com/galaxyproject/galaxy/pull/231
+.. _Pull Request 226: https://github.com/galaxyproject/galaxy/pull/226
+.. _Pull Request 216: https://github.com/galaxyproject/galaxy/pull/216
+.. _Pull Request 215: https://github.com/galaxyproject/galaxy/pull/215
+.. _Pull Request 214: https://github.com/galaxyproject/galaxy/pull/214
+.. _Pull Request 198: https://github.com/galaxyproject/galaxy/pull/198
+.. _Pull Request 210: https://github.com/galaxyproject/galaxy/pull/210
+.. _Pull Request 206: https://github.com/galaxyproject/galaxy/pull/206
+.. _Pull Request 205: https://github.com/galaxyproject/galaxy/pull/205
+.. _Pull Request 197: https://github.com/galaxyproject/galaxy/pull/197
+.. _Pull Request 196: https://github.com/galaxyproject/galaxy/pull/196
+.. _Pull Request 189: https://github.com/galaxyproject/galaxy/pull/189
+.. _Pull Request 187: https://github.com/galaxyproject/galaxy/pull/187
+.. _Pull Request 179: https://github.com/galaxyproject/galaxy/pull/179
+.. _Pull Request 153: https://github.com/galaxyproject/galaxy/pull/153
+.. _Pull Request 152: https://github.com/galaxyproject/galaxy/pull/152
+.. _5abb8ad: https://github.com/galaxyproject/galaxy/commit/5abb8ad
+.. _Pull Request 130: https://github.com/galaxyproject/galaxy/pull/130
+.. _Pull Request 146: https://github.com/galaxyproject/galaxy/pull/146
+.. _Pull Request 135: https://github.com/galaxyproject/galaxy/pull/135
+.. _Pull Request 143: https://github.com/galaxyproject/galaxy/pull/143
+.. _Pull Request 142: https://github.com/galaxyproject/galaxy/pull/142
+.. _Pull Request 131: https://github.com/galaxyproject/galaxy/pull/131
+.. _d725aab: https://github.com/galaxyproject/galaxy/commit/d725aab
+.. _Pull Request 126: https://github.com/galaxyproject/galaxy/pull/126
+.. _e09761e: https://github.com/galaxyproject/galaxy/commit/e09761e
+.. _8d3c531: https://github.com/galaxyproject/galaxy/commit/8d3c531
+.. _Pull Request 125: https://github.com/galaxyproject/galaxy/pull/125
+.. _Pull Request 123: https://github.com/galaxyproject/galaxy/pull/123
+.. _Pull Request 121: https://github.com/galaxyproject/galaxy/pull/121
+.. _Pull Request 120: https://github.com/galaxyproject/galaxy/pull/120
+.. _Pull Request 119: https://github.com/galaxyproject/galaxy/pull/119
+.. _Pull Request 117: https://github.com/galaxyproject/galaxy/pull/117
+.. _Pull Request 118: https://github.com/galaxyproject/galaxy/pull/118
+.. _Pull Request 134: https://github.com/galaxyproject/galaxy/pull/134
+.. _Pull Request 116: https://github.com/galaxyproject/galaxy/pull/116
+.. _Pull Request 109: https://github.com/galaxyproject/galaxy/pull/109
+.. _647cf55: https://github.com/galaxyproject/galaxy/commit/647cf55
+.. _Pull Request 108: https://github.com/galaxyproject/galaxy/pull/108
+.. _Pull Request 107: https://github.com/galaxyproject/galaxy/pull/107
+.. _8254cab: https://github.com/galaxyproject/galaxy/commit/8254cab
+.. _Pull Request 99: https://github.com/galaxyproject/galaxy/pull/99
+.. _Pull Request 98: https://github.com/galaxyproject/galaxy/pull/98
+.. _Pull Request 115: https://github.com/galaxyproject/galaxy/pull/115
+.. _Pull Request 97: https://github.com/galaxyproject/galaxy/pull/97
+.. _Pull Request 91: https://github.com/galaxyproject/galaxy/pull/91
+.. _Pull Request 89: https://github.com/galaxyproject/galaxy/pull/89
+.. _Pull Request 86: https://github.com/galaxyproject/galaxy/pull/86
+.. _Pull Request 87: https://github.com/galaxyproject/galaxy/pull/87
+.. _Pull Request 73: https://github.com/galaxyproject/galaxy/pull/73
+.. _Pull Request 74: https://github.com/galaxyproject/galaxy/pull/74
+.. _Pull Request 75: https://github.com/galaxyproject/galaxy/pull/75
+.. _Pull Request 70: https://github.com/galaxyproject/galaxy/pull/70
+.. _Pull Request 69: https://github.com/galaxyproject/galaxy/pull/69
+.. _Pull Request 62: https://github.com/galaxyproject/galaxy/pull/62
+.. _Pull Request 51: https://github.com/galaxyproject/galaxy/pull/51
+.. _Pull Request 76: https://github.com/galaxyproject/galaxy/pull/76
+.. _2650d09: https://github.com/galaxyproject/galaxy/commit/2650d09
+.. _7d5dde8: https://github.com/galaxyproject/galaxy/commit/7d5dde8
+.. _2748f9d: https://github.com/galaxyproject/galaxy/commit/2748f9d
+.. _d6d61bc: https://github.com/galaxyproject/galaxy/commit/d6d61bc
+.. _815f86f: https://github.com/galaxyproject/galaxy/commit/815f86f
+.. _76e0915: https://github.com/galaxyproject/galaxy/commit/76e0915
+.. _bce8171: https://github.com/galaxyproject/galaxy/commit/bce8171
+.. _06346a4: https://github.com/galaxyproject/galaxy/commit/06346a4
+.. _b4cf49a: https://github.com/galaxyproject/galaxy/commit/b4cf49a
+.. _Pull Request 40: https://github.com/galaxyproject/galaxy/pull/40
+.. _Pull Request 38: https://github.com/galaxyproject/galaxy/pull/38
+.. _a24e206: https://github.com/galaxyproject/galaxy/commit/a24e206
+.. _Pull Request 35: https://github.com/galaxyproject/galaxy/pull/35
+.. _e36e51e: https://github.com/galaxyproject/galaxy/commit/e36e51e
+.. _1e55206: https://github.com/galaxyproject/galaxy/commit/1e55206
+.. _0c79680: https://github.com/galaxyproject/galaxy/commit/0c79680
+.. _Pull Request 1: https://github.com/galaxyproject/galaxy/pull/1
+.. _Pull Request 33: https://github.com/galaxyproject/galaxy/pull/33
+.. _Pull Request 48: https://github.com/galaxyproject/galaxy/pull/48
+.. _21d1d6b: https://github.com/galaxyproject/galaxy/commit/21d1d6b
+.. _Pull Request 30: https://github.com/galaxyproject/galaxy/pull/30
+.. _Pull Request 29: https://github.com/galaxyproject/galaxy/pull/29
+.. _c0e5509: https://github.com/galaxyproject/galaxy/commit/c0e5509
+.. _157eba6: https://github.com/galaxyproject/galaxy/commit/157eba6
+.. _72c876c: https://github.com/galaxyproject/galaxy/commit/72c876c
+.. _9a7f5fc: https://github.com/galaxyproject/galaxy/commit/9a7f5fc
+.. _648a623: https://github.com/galaxyproject/galaxy/commit/648a623
+.. _59028c0: https://github.com/galaxyproject/galaxy/commit/59028c0
+.. _bcd1701: https://github.com/galaxyproject/galaxy/commit/bcd1701
+.. _22f280f: https://github.com/galaxyproject/galaxy/commit/22f280f
+.. _6946e46: https://github.com/galaxyproject/galaxy/commit/6946e46
+.. _65def71: https://github.com/galaxyproject/galaxy/commit/65def71
+.. _4e5218f: https://github.com/galaxyproject/galaxy/commit/4e5218f
+.. _Pull Request 16: https://github.com/galaxyproject/galaxy/pull/16
+.. _Pull Request 13: https://github.com/galaxyproject/galaxy/pull/13
+.. _e8564d7: https://github.com/galaxyproject/galaxy/commit/e8564d7
+.. _Pull Request 23: https://github.com/galaxyproject/galaxy/pull/23
+.. _Pull Request 22: https://github.com/galaxyproject/galaxy/pull/22
+.. _10bb492: https://github.com/galaxyproject/galaxy/commit/10bb492
+.. _Pull Request 19: https://github.com/galaxyproject/galaxy/pull/19
+.. _Pull Request 18: https://github.com/galaxyproject/galaxy/pull/18
+.. _0b037a2: https://github.com/galaxyproject/galaxy/commit/0b037a2
+.. _Pull Request 17: https://github.com/galaxyproject/galaxy/pull/17
+.. _b29a5e9: https://github.com/galaxyproject/galaxy/commit/b29a5e9
+.. _Pull Request 14: https://github.com/galaxyproject/galaxy/pull/14
+.. _7aecd7a: https://github.com/galaxyproject/galaxy/commit/7aecd7a
+.. _Pull Request 12: https://github.com/galaxyproject/galaxy/pull/12
+.. _cd7abe8: https://github.com/galaxyproject/galaxy/commit/cd7abe8
+.. _62f0495: https://github.com/galaxyproject/galaxy/commit/62f0495
+.. _Pull Request 11: https://github.com/galaxyproject/galaxy/pull/11
+.. _Pull Request 9: https://github.com/galaxyproject/galaxy/pull/9
+.. _632ec4e: https://github.com/galaxyproject/galaxy/commit/632ec4e
+.. _Pull Request 8: https://github.com/galaxyproject/galaxy/pull/8
+.. _Pull Request 7: https://github.com/galaxyproject/galaxy/pull/7
+.. _b52cc98: https://github.com/galaxyproject/galaxy/commit/b52cc98
+.. _1454396: https://github.com/galaxyproject/galaxy/commit/1454396
+.. _8da70bb: https://github.com/galaxyproject/galaxy/commit/8da70bb
+.. _b639bc0: https://github.com/galaxyproject/galaxy/commit/b639bc0
+.. _ab6f13e: https://github.com/galaxyproject/galaxy/commit/ab6f13e
+.. _debea9d: https://github.com/galaxyproject/galaxy/commit/debea9d
+.. _6102edf: https://github.com/galaxyproject/galaxy/commit/6102edf
+.. _c1d2d1f: https://github.com/galaxyproject/galaxy/commit/c1d2d1f
+.. _0b83b1e: https://github.com/galaxyproject/galaxy/commit/0b83b1e
+.. _216fb95: https://github.com/galaxyproject/galaxy/commit/216fb95
+.. _182b67f: https://github.com/galaxyproject/galaxy/commit/182b67f
+.. _47e6f08: https://github.com/galaxyproject/galaxy/commit/47e6f08
+.. _7ac8631: https://github.com/galaxyproject/galaxy/commit/7ac8631
+.. _2bf52fe: https://github.com/galaxyproject/galaxy/commit/2bf52fe
+.. _e4e5df0: https://github.com/galaxyproject/galaxy/commit/e4e5df0
+.. _6e17bf4: https://github.com/galaxyproject/galaxy/commit/6e17bf4
+.. _0c94f04: https://github.com/galaxyproject/galaxy/commit/0c94f04
+.. _Pull Request 1: https://github.com/galaxyproject/galaxy/pull/1
+.. _ec549db: https://github.com/galaxyproject/galaxy/commit/ec549db
+.. _226e826: https://github.com/galaxyproject/galaxy/commit/226e826
+.. _79d50d8: https://github.com/galaxyproject/galaxy/commit/79d50d8
+.. _964e081: https://github.com/galaxyproject/galaxy/commit/964e081
+.. _Pull Request 5: https://github.com/galaxyproject/galaxy/pull/5
+.. _1f1bb29: https://github.com/galaxyproject/galaxy/commit/1f1bb29
+.. _Pull Request 4: https://github.com/galaxyproject/galaxy/pull/4
+.. _dde2fc9: https://github.com/galaxyproject/galaxy/commit/dde2fc9
+.. _c2eb74c: https://github.com/galaxyproject/galaxy/commit/c2eb74c
+.. _71a64ca: https://github.com/galaxyproject/galaxy/commit/71a64ca
+.. _3af3bf5: https://github.com/galaxyproject/galaxy/commit/3af3bf5
+.. _e99adb5: https://github.com/galaxyproject/galaxy/commit/e99adb5
diff --git a/doc/source/releases/15.05_announce.rst b/doc/source/releases/15.05_announce.rst
new file mode 100644
index 0000000..db98dc3
--- /dev/null
+++ b/doc/source/releases/15.05_announce.rst
@@ -0,0 +1,58 @@
+===========================================================
+May 2015 Galaxy Release (v 15.05)
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Authentication Plugins**
+  Galaxy now has native support for LDAP and Active Directory via a new 
+  community developed authentication plugin system.
+
+**Tool Sections**
+  Tool parameters may now be groupped into collapsable sections.
+
+**Collection Creators**
+  New widgets have been added that allow much more flexibility when creating
+  simple dataset pair and list collections.
+
+`Github <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New
+  .. code-block:: shell
+  
+      % git clone -b master https://github.com/galaxyproject/galaxy.git
+
+Update to latest stable release
+  .. code-block:: shell
+  
+      % git checkout master && pull --ff-only origin master
+
+Update to exact version
+  .. code-block:: shell
+  
+      % git checkout v15.05
+
+
+`BitBucket <https://bitbucket.org/galaxy/galaxy-dist>`__
+===========================================================
+
+Upgrade
+  .. code-block:: shell
+  
+      % hg pull 
+      % hg update latest_15.05
+
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+Release Notes
+===========================================================
+
+.. include:: 15.05.rst
+   :start-after: enhancements
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/15.07.rst b/doc/source/releases/15.07.rst
new file mode 100644
index 0000000..b273226
--- /dev/null
+++ b/doc/source/releases/15.07.rst
@@ -0,0 +1,548 @@
+
+.. to_doc
+
+-------------------------------
+15.07
+-------------------------------
+
+Enhancements
+-------------------------------
+
+
+.. enhancements
+
+* Interactive environment (IE) framework updates including a new IE for RStudio.
+  (Thanks to a team effort spearheaded by `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 229`_, `Pull Request 230`_
+* Rework workflow editor to use newer tool form components and allow construction
+  of workflows with tools explicitly producing output collections.
+  `Pull Request 286`_, `Pull Request 328`_, `Pull Request 311`_,
+  `Pull Request 397`_, `Pull Request 233`_, `Pull Request 625`_,
+  `Pull Request 602`_
+* Implement policies for committers and pull requests.
+  `Pull Request 295`_, `Pull Request 314`_
+* Allow exporting a library folder to a history. (Thanks to `@ericenns
+  <https://github.com/ericenns>`__.) `Pull Request 426`_
+* Fix and update documentation available on the ReadTheDocs with particular
+  focus on the API documentation. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.)
+  `Pull Request 399`_, `Pull Request 405`_
+* Add EDAM IDs to Galaxy datatypes. (Thanks to a team effort initiated by `@erasche
+  <https://github.com/erasche>`__.)
+  `Pull Request 80`_, `Pull Request 342`_
+* Improved reports app sorting and bug fixes thanks to
+  Daniel Bouchard (`@Airistotal <https://github.com/Airistotal>`__). Even more
+  reports enhancements will be included in the next release of Galaxy including
+  new landing page, pagination, and sparkline graphs.
+  `Pull Request 303`_, `Pull Request 330`_, `Pull Request 338`_
+* Significant speedup of tool panel/homepage loading. (Thanks to `@abretaud
+  <https://github.com/abretaud>`__.) `Pull Request 159`_
+* Allow parameterization of tool XML macros. `Pull Request 362`_
+* Allow use of tool-style XML macros in ``job_conf.xml``. `Pull Request 362`_
+* Update and clarify admin menu. `Pull Request 302`_
+* Add usage information to python cleanup scripts. (Thanks to `@dpryan79
+  <https://github.com/dpryan79>`__.) `Pull Request 274`_
+* Added default indexing for bcf files. `Pull Request 163`_
+* Implement merging of BAM files for the parallelism framework. (Thanks to
+  `@roalva1 <https://github.com/roalva1>`__.) `Pull Request 149`_
+* Send signal to reload data tables when adding an entry (Thanks to `@abretaud
+  <https://github.com/abretaud>`__.) `Pull Request 250`_
+* Add display for Interval files at IGV via bedstrict. `Pull Request 259`_
+* Include more information in default ``welcome.html`` file. `Pull Request 190`_
+* Implement a command line script for collecting stats on job runtimes. 
+  `Pull Request 151`_
+* Implement an authentication provider for PAM (Pluggable Authentication Modules)
+  (Thanks to `@pvanheus <https://github.com/pvanheus>`__.) `Pull Request 458`_
+* Re-implement library data and FTP file parameters for new tool form.
+  `Pull Request 132`_, `Pull Request 546`_
+* Adding tool error tracking. (Thanks to `@kellrott
+  <https://github.com/kellrott>`__.) `Pull Request 322`_
+* Implement ``<filter>`` tag for output collections. `Pull Request 455`_
+* Allow environment variable definitions in tool XML. `Pull Request 395`_
+* Add new ``exclude_min`` and ``exclude_max`` attributes to ``InRangeValidator``
+  for numeric tool parameters. (Thanks to `@gregvonkuster
+  <https://github.com/gregvonkuster>`__.) `Pull Request 339`_
+* Added unlinked Tool Shed dependency resolver. (Thanks to `@Christian-B
+  <https://github.com/Christian-B>`__.) `Pull Request 441`_
+* Add emboss datatypes (previously migrated to Tool Shed). (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 148`_
+* Add new Hmmer & Stockholm datatypes. (Thanks to `@erasche
+  <https://github.com/erasche>`__.) `Pull Request 218`_
+* Add an RData datatype. (Thanks to `@erasche
+  <https://github.com/erasche>`__.) `Pull Request 239`_
+* Add mauve XMFA datatype (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 270`_
+* Add ConnectivityTable (\*.ct) (RNA 2D structure format) datatype.
+  (Thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 439`_
+* Add Illumina methylation datatype (IDAT). `Pull Request 335`_
+* Add Dot-Bracket notation (dbn) datatype. (Thanks to `@yhoogstrate
+  <https://github.com/yhoogstrate>`__.) `Pull Request 366`_
+* Add proteomics datatypes. (Thanks to `@iracooke
+  <https://github.com/iracooke>`__.) `Pull Request 389`_
+* Add new MzSQlite datatype for aggregating MS-based protein identification
+  information. (Thanks to `@jj-umn <https://github.com/jj-umn>`__.)
+  `Pull Request 427`_
+* Allow ``display_in_upload`` by default for h5 files. `Pull Request 470`_
+* Accept history id in tool state construction API. `Pull Request 268`_
+* Adding dataset uuid information to job and workflow invocation calls.
+  (Thanks to `@kellrott <https://github.com/kellrott>`__.) `Pull Request 181`_
+* Allow workflows executed via the API to optionally run with tool upgrades.
+  `Pull Request 402`_
+* Various PEP-8 fixes, more to come next release. (Thanks to efforts lead by
+  `@nsoranzo <https://github.com/nsoranzo>`__,
+  `@remimarenco <https://github.com/remimarenco>`__, and
+  `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 421`_, `Pull Request 422`_, `Pull Request 424`_,
+  `Pull Request 433`_, `Pull Request 449`_, `Pull Request 350`_,
+  `Pull Request 453`_, `Pull Request 394`_, `Pull Request 204`_
+* Nose update with ``--debug`` option added to ``run_tests.sh``. `Pull Request 172`_
+* Mention ``-installed`` option in ``run_tests.sh`` help. `Pull Request 323`_
+* Allow config of shed tool configuration file used with ``run_tests.sh
+  -installed``. `Pull Request 267`_
+* Allow running tests from outer directories (Thanks to `@yhoogstrate
+  <https://github.com/yhoogstrate>`__.) `Pull Request 137`_
+* Enhancement allowing use of DRMAA when Galaxy instance isn't on a shared
+  volume with compute nodes. (Thanks to `@vavrusa <https://github.com/vavrusa>`__.) 
+  `Pull Request 160`_
+* Enhancements for Condor. 
+  (Thanks to `@dpryan79 <https://github.com/dpryan79>`__.)
+  `Pull Request 289`_
+* Update ``gsummary`` dependencies. (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 182`_
+* Refactor visualization plugins to promote implementation to a class.
+  `Pull Request 186`_
+* Switch visualizations registry to entry_point config definition for 
+  built-in visualizations. `Pull Request 243`_
+* Set default value for IE sig proxy (Thanks to `@erasche
+  <https://github.com/erasche>`__.) `Pull Request 197`_
+* Other smaller interactive environment updates. (Thanks to
+  `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 412`_
+* Refuse to load interactive debugging middleware under uwsgi.
+  `Pull Request 200`_
+* Move stylesheet source files from ``static/style/src`` to
+  ``client/galaxy/style``. `Pull Request 208`_
+* Fix sprite paths by using hardcoded path in task. `Pull Request
+  279`_
+* Improve various focus handling behavior in the UI. `Pull Request 280`_
+* Fixes and updates to various JavaScript-based unit tests. `Pull Request 219`_
+* Update various client libs (e.g. jQuery, Raven, d3, requirejs).
+  `Pull Request 221`_, `Pull Request 241`_
+* Ignore sample files from toolshed installed tools. (Thanks to `@Unode
+  <https://github.com/Unode>`__.) `Pull Request 248`_
+* Add test option for disabling truncation of test diffs.
+  `Pull Request 249`_
+* Various documentation and wording improvements for various files.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 277`_
+* Allow external display applications optional access to files within a
+  dataset's ``extra_files_path``. `Pull Request 284`_
+* Improve the data manager JSON datatype's class to be more specific.
+  `Pull Request 285`_
+* Allow setting connection options for LDAP authentication. (Thanks to `@Unode
+  <https://github.com/Unode>`__.) `Pull Request 287`_
+* Fix for gzip being imported twice. (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 296`_
+* Export environment variables from all dependent repositories.
+  (Thanks to `@bgruening <https://github.com/bgruening>`__.) `Pull Request 297`_
+* Improve ``auth_conf.xml.sample`` documentation. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.) `Pull Request 299`_
+* Preserve unix permissions, if any, when extracting .zip files during tool shed
+  installations. `Pull Request 308`_
+* Update mailing lists to use lists.galaxyproject.org addresses.
+  `Pull Request 316`_
+* Add new argument to inject volumes in ``docker_cmd()`` and ``launch()``.
+  (Thanks to `@bgruening <https://github.com/bgruening>`__.) `Pull Request 318`_
+* Create a separate application entry point to be used by uWSGI without ``--ini-
+  paste`` and PasteDeploy. `Pull Request 325`_
+* Implement groups in Tool Shed models and UI. `Pull Request 326`_,
+  `Pull Request 374`_
+* Improved datatype registry debugging. `Pull Request 329`_
+* Add app option to client process to allow building Tool Shed scripts. 
+  `Pull Request 332`_
+* Add grunt command ``grunt decompress`` to copy decompressed artifacts.
+  `Pull Request 173`_
+* Improvements to Tool Shed search API.
+  `Pull Request 334`_, `Pull Request 376`_, `Pull Request 420`_, `Pull Request 563`_
+* Restore link to mercurial browsing of repositories. `Pull Request 340`_
+* Enhance tool testing to only cleanup successful jobs. `Pull Request 341`_
+* Implement metadata size limiting. `Pull Request 345`_, `Pull Request 416`_
+* Add tool parameter argument to parameter help.
+  `Pull Request 346`_, `Pull Request 351`_
+* Refactor Tool Shed JavaScript files to be separated from Galaxy Java Script
+  `Pull Request 347`_
+* Fixed object store path - ``job_work``. (Thanks to `@charz
+  <https://github.com/charz>`__.) `Pull Request 354`_
+* Make debugging output nicer for IE's (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 355`_
+* Add configurable Docker hostname (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 356`_
+* Improve XML styling of display applications. 
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 364`_
+* README improvements including - adding shields for Biostar, IRC, and Travis.
+  `Pull Request 368`_, `Pull Request 369`_
+* Use passed in URL for getbuilds method of parse_builds.py (Thanks to
+  `@dctrud <https://github.com/dctrud>`__.) `Pull Request 384`_
+* Add error message for bad dependency declaration (Thanks to `@kaktus42
+  <https://github.com/kaktus42>`__.) `Pull Request 419`_
+* Refactor JavaScript set up code for tagging toward reuse. `Pull Request 429`_
+* Improved md5sum/sha256 in Tool Shed actions. (Thanks to `@yhoogstrate
+  <https://github.com/yhoogstrate>`__.) `Pull Request 444`_
+* Include owner annotations of HDAs in published history display.
+  `Pull Request 490`_
+* Added log error for config file not found. (Thanks to `@Christian-B
+  <https://github.com/Christian-B>`__.) `Pull Request 443`_
+* Fix for installing dependencies from source when download of precompiled
+  binaries has failed. `Pull Request 531`_
+* Use a password field for deprecated cloudlaunch's secret key field.
+  `Pull Request 551`_
+* Normalize ``datatypes_by_ext`` upon entry. `Pull Request 567`_
+* Stop creating unneeded ``database/message.mako.py``. `Pull Request 575`_
+
+
+Fixes
+-------------------------------
+
+.. fixes
+
+* During the 15.07 development cycle dozens of fixes were pushed to the
+  ``release_15.05`` branch of Galaxy. These are all included in 15.07 and
+  summarized `here
+  <https://github.com/galaxyproject/galaxy/compare/v15.05...release_15.05>`__
+  (with special thanks to `@bgruening <https://github.com/bgruening>`__,
+  `@mvdbeek <https://github.com/mvdbeek>`__,
+  `@nsoranzo <https://github.com/nsoranzo>`__,
+  and `@abretaud <https://github.com/abretaud>`__).
+* Stop Trackster location wrapping in Chrome. (Thanks to `@dctrud
+  <https://github.com/dctrud>`__.) `Pull Request 440`_
+* Fix return integer values when 'round to nearest integer' is selected in
+  Group tool. `Pull Request 452`_
+* Fix external set_meta under sqlalchemy 1.0.0. `Pull Request 150`_
+* Fix ``base.util`` to get the changeset from git. `Pull Request 161`_
+* Remove ``biomart_test`` tool, http://test.biomart.org/ is down. (Thanks to
+  `@nsoranzo <https://github.com/nsoranzo>`__.) `Pull Request 183`_
+* Fix broken browser tests. `Pull Request 202`_
+* Fix assertions in ``test_VisualizationsRegistry.py``. `Pull Request 223`_
+* Fix ``run_tests.sh`` exit code. `Pull Request 225`_
+* Use ``unittest2`` to allow Python 2.7 assertions under Python 2.6.
+  `Pull Request 246`_
+* Have ``trans.user_ftp_dir`` return ``None`` instead of error if upload dir unset.
+  `Pull Request 257`_
+* Allow IGV external displays to use a name with a comma in it. `Pull Request 261`_
+* Fix bug in history sharing. (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 263`_
+* Convert tools directory to only use unix newline characters.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.) `Pull Request 264`_
+* Remove spurious numpy requirement on ``maf_stats.xml`` tool.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.) `Pull Request 264`_
+* Use more actionable ``package`` requirement type on some tools.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.) `Pull Request 264`_
+* Small doc fix for ``config/plugins/visualizations/README.txt``.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.) `Pull Request 264`_
+* Fix function names for dynamic tool test functions. `Pull Request 266`_
+* Fixes to dataset chooser. `Pull Request 272`_
+* Fix Trackster load order of require and d3. `Pull Request 291`_
+* Remove bad HTML tags from login template. (Thanks to `@Unode
+  <https://github.com/Unode>`__.) `Pull Request 294`_
+* Exclude curly braces from API-key to fix toolshed bootstrapping. (Thanks to
+  `@mvdbeek <https://github.com/mvdbeek>`__.) `Pull Request 301`_
+* Fix repeated error messages with identical text. `Pull Request 306`_
+* Fix shared mako templates that would not work properly with the Tool Shed.
+  `Pull Request 307`_
+* Fix json sniffer for large files. `Pull Request 309`_
+* Fix a typo error in ``visualization/plugins/resource_parser.py``. (Thanks to
+  `@remimarenco <https://github.com/remimarenco>`__.) `Pull Request 312`_
+* Fix for generating metadata on toolshed tools tests, under certain
+  conditions. `Pull Request 319`_
+* Fix Tool Shed metadata generation bug. (Thanks to `@bgruening
+  <https://github.com/bgruening>`__.) `Pull Request 333`_
+* Fixing string appending error in library dictification. (Thanks to `@kellrott
+  <https://github.com/kellrott>`__.) `Pull Request 336`_
+* Hide constantly broken Tool Shed test results. `Pull Request 337`_
+* Fix for fix for handling of unix permissions in .zip files. `Pull Request
+  344`_
+* Fix common API scripts so they don't fail when response has no URL param.
+  `Pull Request 352`_
+* Fix for dynamic test data fetching. `Pull Request 371`_
+* Fix bug in ``Text.split()`` method. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.) `Pull Request 373`_
+* Fix and update serializer call to non-``trans`` form for histories.
+  `Pull Request 390`_
+* Fix getting admin key when bootstrapping toolshed. (Thanks to
+  `@galaxyproject <https://github.com/galaxyproject>`__.) `Pull Request 392`_
+* Fix error when displaying job params for tools containing a section.
+  (Thanks to `@abretaud <https://github.com/abretaud>`__.) `Pull Request 398`_
+* Fix for setting environment variable paths for installed tools. (Thanks to
+  `@gregvonkuster <https://github.com/gregvonkuster>`__.) `Pull Request 403`_
+* Quota calculation fixes for managers. `Pull Request 406`_
+* Fix paging issue with whoosh due to missing ``str`` to ``int`` conversion.
+  `Pull Request 407`_
+* Use ``web.url_for( '/', qualified=True )`` instead of ``'/'`` to generate
+  URLs (for correctness). (Thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 414`_
+* Fix for async datasources. `Pull Request 423`_
+* Fix UI bug related to ID collisions for display of certain collections.
+  `Pull Request 431`_
+* Fix ``run_tests.sh`` example API test command. `Pull Request 432`_
+* Fixed typo in Tool Shed docstring. (Thanks to `@peterjc
+  <https://github.com/peterjc>`__.) `Pull Request 438`_
+* Various Trackster fixes. `Pull Request 446`_
+* Fix job query to allow admins to view other histories. `Pull Request 465`_
+* Fix ``dbkey`` filtering of multiple input targets. `Pull Request 534`_
+* Fixes for ``DeleteIntermediatesAction``. `Pull Request 547`_
+* Fix overriding the name variable in Tool Shed index code. `Pull Request 566`_
+* Fix batch mode input in new tool form UI. (Thanks to `@ericenns
+  <https://github.com/ericenns>`__.) `Pull Request 568`_
+* CSV/SAM sniff order fix. `Pull Request 571`_
+* Show field for entering remote/home url when managing a Tool Shed repository.
+  `Pull Request 580`_
+* Fix regenerating repository metadata when coming from batch interface.
+  `Pull Request 600`_
+
+
+.. github_links
+.. _Pull Request 80: https://github.com/galaxyproject/galaxy/pull/80
+.. _Pull Request 132: https://github.com/galaxyproject/galaxy/pull/132
+.. _Pull Request 137: https://github.com/galaxyproject/galaxy/pull/137
+.. _Pull Request 148: https://github.com/galaxyproject/galaxy/pull/148
+.. _Pull Request 149: https://github.com/galaxyproject/galaxy/pull/149
+.. _Pull Request 150: https://github.com/galaxyproject/galaxy/pull/150
+.. _Pull Request 151: https://github.com/galaxyproject/galaxy/pull/151
+.. _Pull Request 159: https://github.com/galaxyproject/galaxy/pull/159
+.. _Pull Request 160: https://github.com/galaxyproject/galaxy/pull/160
+.. _Pull Request 161: https://github.com/galaxyproject/galaxy/pull/161
+.. _Pull Request 163: https://github.com/galaxyproject/galaxy/pull/163
+.. _Pull Request 172: https://github.com/galaxyproject/galaxy/pull/172
+.. _Pull Request 173: https://github.com/galaxyproject/galaxy/pull/173
+.. _Pull Request 181: https://github.com/galaxyproject/galaxy/pull/181
+.. _Pull Request 182: https://github.com/galaxyproject/galaxy/pull/182
+.. _Pull Request 183: https://github.com/galaxyproject/galaxy/pull/183
+.. _Pull Request 186: https://github.com/galaxyproject/galaxy/pull/186
+.. _Pull Request 190: https://github.com/galaxyproject/galaxy/pull/190
+.. _Pull Request 191: https://github.com/galaxyproject/galaxy/pull/191
+.. _Pull Request 197: https://github.com/galaxyproject/galaxy/pull/197
+.. _Pull Request 200: https://github.com/galaxyproject/galaxy/pull/200
+.. _Pull Request 202: https://github.com/galaxyproject/galaxy/pull/202
+.. _Pull Request 204: https://github.com/galaxyproject/galaxy/pull/204
+.. _Pull Request 208: https://github.com/galaxyproject/galaxy/pull/208
+.. _Pull Request 218: https://github.com/galaxyproject/galaxy/pull/218
+.. _Pull Request 219: https://github.com/galaxyproject/galaxy/pull/219
+.. _Pull Request 221: https://github.com/galaxyproject/galaxy/pull/221
+.. _Pull Request 223: https://github.com/galaxyproject/galaxy/pull/223
+.. _Pull Request 225: https://github.com/galaxyproject/galaxy/pull/225
+.. _Pull Request 229: https://github.com/galaxyproject/galaxy/pull/229
+.. _Pull Request 230: https://github.com/galaxyproject/galaxy/pull/230
+.. _Pull Request 233: https://github.com/galaxyproject/galaxy/pull/233
+.. _Pull Request 239: https://github.com/galaxyproject/galaxy/pull/239
+.. _Pull Request 240: https://github.com/galaxyproject/galaxy/pull/240
+.. _Pull Request 241: https://github.com/galaxyproject/galaxy/pull/241
+.. _Pull Request 243: https://github.com/galaxyproject/galaxy/pull/243
+.. _Pull Request 246: https://github.com/galaxyproject/galaxy/pull/246
+.. _Pull Request 248: https://github.com/galaxyproject/galaxy/pull/248
+.. _Pull Request 249: https://github.com/galaxyproject/galaxy/pull/249
+.. _Pull Request 250: https://github.com/galaxyproject/galaxy/pull/250
+.. _Pull Request 257: https://github.com/galaxyproject/galaxy/pull/257
+.. _Pull Request 259: https://github.com/galaxyproject/galaxy/pull/259
+.. _Pull Request 261: https://github.com/galaxyproject/galaxy/pull/261
+.. _Pull Request 263: https://github.com/galaxyproject/galaxy/pull/263
+.. _Pull Request 264: https://github.com/galaxyproject/galaxy/pull/264
+.. _Pull Request 266: https://github.com/galaxyproject/galaxy/pull/266
+.. _Pull Request 267: https://github.com/galaxyproject/galaxy/pull/267
+.. _Pull Request 268: https://github.com/galaxyproject/galaxy/pull/268
+.. _Pull Request 269: https://github.com/galaxyproject/galaxy/pull/269
+.. _Pull Request 270: https://github.com/galaxyproject/galaxy/pull/270
+.. _Pull Request 272: https://github.com/galaxyproject/galaxy/pull/272
+.. _Pull Request 274: https://github.com/galaxyproject/galaxy/pull/274
+.. _Pull Request 277: https://github.com/galaxyproject/galaxy/pull/277
+.. _Pull Request 279: https://github.com/galaxyproject/galaxy/pull/279
+.. _Pull Request 280: https://github.com/galaxyproject/galaxy/pull/280
+.. _Pull Request 284: https://github.com/galaxyproject/galaxy/pull/284
+.. _Pull Request 285: https://github.com/galaxyproject/galaxy/pull/285
+.. _Pull Request 286: https://github.com/galaxyproject/galaxy/pull/286
+.. _Pull Request 287: https://github.com/galaxyproject/galaxy/pull/287
+.. _Pull Request 289: https://github.com/galaxyproject/galaxy/pull/289
+.. _Pull Request 290: https://github.com/galaxyproject/galaxy/pull/290
+.. _Pull Request 291: https://github.com/galaxyproject/galaxy/pull/291
+.. _Pull Request 294: https://github.com/galaxyproject/galaxy/pull/294
+.. _Pull Request 295: https://github.com/galaxyproject/galaxy/pull/295
+.. _Pull Request 296: https://github.com/galaxyproject/galaxy/pull/296
+.. _Pull Request 297: https://github.com/galaxyproject/galaxy/pull/297
+.. _Pull Request 299: https://github.com/galaxyproject/galaxy/pull/299
+.. _Pull Request 301: https://github.com/galaxyproject/galaxy/pull/301
+.. _Pull Request 302: https://github.com/galaxyproject/galaxy/pull/302
+.. _Pull Request 303: https://github.com/galaxyproject/galaxy/pull/303
+.. _Pull Request 306: https://github.com/galaxyproject/galaxy/pull/306
+.. _Pull Request 307: https://github.com/galaxyproject/galaxy/pull/307
+.. _Pull Request 308: https://github.com/galaxyproject/galaxy/pull/308
+.. _Pull Request 309: https://github.com/galaxyproject/galaxy/pull/309
+.. _Pull Request 311: https://github.com/galaxyproject/galaxy/pull/311
+.. _Pull Request 312: https://github.com/galaxyproject/galaxy/pull/312
+.. _Pull Request 314: https://github.com/galaxyproject/galaxy/pull/314
+.. _Pull Request 316: https://github.com/galaxyproject/galaxy/pull/316
+.. _Pull Request 318: https://github.com/galaxyproject/galaxy/pull/318
+.. _Pull Request 319: https://github.com/galaxyproject/galaxy/pull/319
+.. _Pull Request 322: https://github.com/galaxyproject/galaxy/pull/322
+.. _Pull Request 323: https://github.com/galaxyproject/galaxy/pull/323
+.. _Pull Request 325: https://github.com/galaxyproject/galaxy/pull/325
+.. _Pull Request 326: https://github.com/galaxyproject/galaxy/pull/326
+.. _Pull Request 328: https://github.com/galaxyproject/galaxy/pull/328
+.. _Pull Request 329: https://github.com/galaxyproject/galaxy/pull/329
+.. _Pull Request 330: https://github.com/galaxyproject/galaxy/pull/330
+.. _Pull Request 332: https://github.com/galaxyproject/galaxy/pull/332
+.. _Pull Request 333: https://github.com/galaxyproject/galaxy/pull/333
+.. _Pull Request 334: https://github.com/galaxyproject/galaxy/pull/334
+.. _Pull Request 335: https://github.com/galaxyproject/galaxy/pull/335
+.. _Pull Request 336: https://github.com/galaxyproject/galaxy/pull/336
+.. _Pull Request 337: https://github.com/galaxyproject/galaxy/pull/337
+.. _Pull Request 338: https://github.com/galaxyproject/galaxy/pull/338
+.. _Pull Request 339: https://github.com/galaxyproject/galaxy/pull/339
+.. _Pull Request 340: https://github.com/galaxyproject/galaxy/pull/340
+.. _Pull Request 341: https://github.com/galaxyproject/galaxy/pull/341
+.. _Pull Request 342: https://github.com/galaxyproject/galaxy/pull/342
+.. _Pull Request 344: https://github.com/galaxyproject/galaxy/pull/344
+.. _Pull Request 345: https://github.com/galaxyproject/galaxy/pull/345
+.. _Pull Request 346: https://github.com/galaxyproject/galaxy/pull/346
+.. _Pull Request 347: https://github.com/galaxyproject/galaxy/pull/347
+.. _Pull Request 349: https://github.com/galaxyproject/galaxy/pull/349
+.. _Pull Request 350: https://github.com/galaxyproject/galaxy/pull/350
+.. _Pull Request 351: https://github.com/galaxyproject/galaxy/pull/351
+.. _Pull Request 352: https://github.com/galaxyproject/galaxy/pull/352
+.. _Pull Request 354: https://github.com/galaxyproject/galaxy/pull/354
+.. _Pull Request 355: https://github.com/galaxyproject/galaxy/pull/355
+.. _Pull Request 356: https://github.com/galaxyproject/galaxy/pull/356
+.. _Pull Request 362: https://github.com/galaxyproject/galaxy/pull/362
+.. _Pull Request 364: https://github.com/galaxyproject/galaxy/pull/364
+.. _Pull Request 366: https://github.com/galaxyproject/galaxy/pull/366
+.. _Pull Request 368: https://github.com/galaxyproject/galaxy/pull/368
+.. _Pull Request 369: https://github.com/galaxyproject/galaxy/pull/369
+.. _Pull Request 370: https://github.com/galaxyproject/galaxy/pull/370
+.. _Pull Request 371: https://github.com/galaxyproject/galaxy/pull/371
+.. _Pull Request 373: https://github.com/galaxyproject/galaxy/pull/373
+.. _Pull Request 374: https://github.com/galaxyproject/galaxy/pull/374
+.. _Pull Request 376: https://github.com/galaxyproject/galaxy/pull/376
+.. _Pull Request 381: https://github.com/galaxyproject/galaxy/pull/381
+.. _Pull Request 383: https://github.com/galaxyproject/galaxy/pull/383
+.. _Pull Request 384: https://github.com/galaxyproject/galaxy/pull/384
+.. _Pull Request 386: https://github.com/galaxyproject/galaxy/pull/386
+.. _Pull Request 389: https://github.com/galaxyproject/galaxy/pull/389
+.. _Pull Request 390: https://github.com/galaxyproject/galaxy/pull/390
+.. _Pull Request 392: https://github.com/galaxyproject/galaxy/pull/392
+.. _Pull Request 394: https://github.com/galaxyproject/galaxy/pull/394
+.. _Pull Request 395: https://github.com/galaxyproject/galaxy/pull/395
+.. _Pull Request 397: https://github.com/galaxyproject/galaxy/pull/397
+.. _Pull Request 398: https://github.com/galaxyproject/galaxy/pull/398
+.. _Pull Request 399: https://github.com/galaxyproject/galaxy/pull/399
+.. _Pull Request 402: https://github.com/galaxyproject/galaxy/pull/402
+.. _Pull Request 403: https://github.com/galaxyproject/galaxy/pull/403
+.. _Pull Request 405: https://github.com/galaxyproject/galaxy/pull/405
+.. _Pull Request 406: https://github.com/galaxyproject/galaxy/pull/406
+.. _Pull Request 407: https://github.com/galaxyproject/galaxy/pull/407
+.. _Pull Request 410: https://github.com/galaxyproject/galaxy/pull/410
+.. _Pull Request 412: https://github.com/galaxyproject/galaxy/pull/412
+.. _Pull Request 414: https://github.com/galaxyproject/galaxy/pull/414
+.. _Pull Request 416: https://github.com/galaxyproject/galaxy/pull/416
+.. _Pull Request 419: https://github.com/galaxyproject/galaxy/pull/419
+.. _Pull Request 420: https://github.com/galaxyproject/galaxy/pull/420
+.. _Pull Request 421: https://github.com/galaxyproject/galaxy/pull/421
+.. _Pull Request 422: https://github.com/galaxyproject/galaxy/pull/422
+.. _Pull Request 423: https://github.com/galaxyproject/galaxy/pull/423
+.. _Pull Request 424: https://github.com/galaxyproject/galaxy/pull/424
+.. _Pull Request 426: https://github.com/galaxyproject/galaxy/pull/426
+.. _Pull Request 427: https://github.com/galaxyproject/galaxy/pull/427
+.. _Pull Request 429: https://github.com/galaxyproject/galaxy/pull/429
+.. _Pull Request 431: https://github.com/galaxyproject/galaxy/pull/431
+.. _Pull Request 432: https://github.com/galaxyproject/galaxy/pull/432
+.. _Pull Request 433: https://github.com/galaxyproject/galaxy/pull/433
+.. _Pull Request 438: https://github.com/galaxyproject/galaxy/pull/438
+.. _Pull Request 439: https://github.com/galaxyproject/galaxy/pull/439
+.. _Pull Request 440: https://github.com/galaxyproject/galaxy/pull/440
+.. _Pull Request 441: https://github.com/galaxyproject/galaxy/pull/441
+.. _Pull Request 443: https://github.com/galaxyproject/galaxy/pull/443
+.. _Pull Request 444: https://github.com/galaxyproject/galaxy/pull/444
+.. _Pull Request 446: https://github.com/galaxyproject/galaxy/pull/446
+.. _Pull Request 449: https://github.com/galaxyproject/galaxy/pull/449
+.. _Pull Request 452: https://github.com/galaxyproject/galaxy/pull/452
+.. _Pull Request 453: https://github.com/galaxyproject/galaxy/pull/453
+.. _Pull Request 454: https://github.com/galaxyproject/galaxy/pull/454
+.. _Pull Request 455: https://github.com/galaxyproject/galaxy/pull/455
+.. _Pull Request 458: https://github.com/galaxyproject/galaxy/pull/458
+.. _Pull Request 459: https://github.com/galaxyproject/galaxy/pull/459
+.. _Pull Request 462: https://github.com/galaxyproject/galaxy/pull/462
+.. _Pull Request 463: https://github.com/galaxyproject/galaxy/pull/463
+.. _Pull Request 465: https://github.com/galaxyproject/galaxy/pull/465
+.. _Pull Request 470: https://github.com/galaxyproject/galaxy/pull/470
+.. _Pull Request 472: https://github.com/galaxyproject/galaxy/pull/472
+.. _Pull Request 473: https://github.com/galaxyproject/galaxy/pull/473
+.. _Pull Request 478: https://github.com/galaxyproject/galaxy/pull/478
+.. _Pull Request 479: https://github.com/galaxyproject/galaxy/pull/479
+.. _Pull Request 480: https://github.com/galaxyproject/galaxy/pull/480
+.. _Pull Request 481: https://github.com/galaxyproject/galaxy/pull/481
+.. _Pull Request 482: https://github.com/galaxyproject/galaxy/pull/482
+.. _Pull Request 484: https://github.com/galaxyproject/galaxy/pull/484
+.. _Pull Request 485: https://github.com/galaxyproject/galaxy/pull/485
+.. _Pull Request 486: https://github.com/galaxyproject/galaxy/pull/486
+.. _Pull Request 487: https://github.com/galaxyproject/galaxy/pull/487
+.. _Pull Request 488: https://github.com/galaxyproject/galaxy/pull/488
+.. _Pull Request 490: https://github.com/galaxyproject/galaxy/pull/490
+.. _Pull Request 491: https://github.com/galaxyproject/galaxy/pull/491
+.. _Pull Request 492: https://github.com/galaxyproject/galaxy/pull/492
+.. _Pull Request 493: https://github.com/galaxyproject/galaxy/pull/493
+.. _Pull Request 494: https://github.com/galaxyproject/galaxy/pull/494
+.. _Pull Request 495: https://github.com/galaxyproject/galaxy/pull/495
+.. _Pull Request 496: https://github.com/galaxyproject/galaxy/pull/496
+.. _Pull Request 499: https://github.com/galaxyproject/galaxy/pull/499
+.. _Pull Request 500: https://github.com/galaxyproject/galaxy/pull/500
+.. _Pull Request 509: https://github.com/galaxyproject/galaxy/pull/509
+.. _Pull Request 510: https://github.com/galaxyproject/galaxy/pull/510
+.. _Pull Request 511: https://github.com/galaxyproject/galaxy/pull/511
+.. _Pull Request 512: https://github.com/galaxyproject/galaxy/pull/512
+.. _Pull Request 514: https://github.com/galaxyproject/galaxy/pull/514
+.. _Pull Request 515: https://github.com/galaxyproject/galaxy/pull/515
+.. _Pull Request 516: https://github.com/galaxyproject/galaxy/pull/516
+.. _Pull Request 517: https://github.com/galaxyproject/galaxy/pull/517
+.. _Pull Request 526: https://github.com/galaxyproject/galaxy/pull/526
+.. _Pull Request 527: https://github.com/galaxyproject/galaxy/pull/527
+.. _Pull Request 529: https://github.com/galaxyproject/galaxy/pull/529
+.. _Pull Request 530: https://github.com/galaxyproject/galaxy/pull/530
+.. _Pull Request 531: https://github.com/galaxyproject/galaxy/pull/531
+.. _Pull Request 532: https://github.com/galaxyproject/galaxy/pull/532
+.. _Pull Request 533: https://github.com/galaxyproject/galaxy/pull/533
+.. _Pull Request 534: https://github.com/galaxyproject/galaxy/pull/534
+.. _Pull Request 536: https://github.com/galaxyproject/galaxy/pull/536
+.. _Pull Request 537: https://github.com/galaxyproject/galaxy/pull/537
+.. _Pull Request 538: https://github.com/galaxyproject/galaxy/pull/538
+.. _Pull Request 539: https://github.com/galaxyproject/galaxy/pull/539
+.. _Pull Request 540: https://github.com/galaxyproject/galaxy/pull/540
+.. _Pull Request 543: https://github.com/galaxyproject/galaxy/pull/543
+.. _Pull Request 544: https://github.com/galaxyproject/galaxy/pull/544
+.. _Pull Request 545: https://github.com/galaxyproject/galaxy/pull/545
+.. _Pull Request 546: https://github.com/galaxyproject/galaxy/pull/546
+.. _Pull Request 547: https://github.com/galaxyproject/galaxy/pull/547
+.. _Pull Request 550: https://github.com/galaxyproject/galaxy/pull/550
+.. _Pull Request 551: https://github.com/galaxyproject/galaxy/pull/551
+.. _Pull Request 553: https://github.com/galaxyproject/galaxy/pull/553
+.. _Pull Request 561: https://github.com/galaxyproject/galaxy/pull/561
+.. _Pull Request 563: https://github.com/galaxyproject/galaxy/pull/563
+.. _Pull Request 565: https://github.com/galaxyproject/galaxy/pull/565
+.. _Pull Request 566: https://github.com/galaxyproject/galaxy/pull/566
+.. _Pull Request 567: https://github.com/galaxyproject/galaxy/pull/567
+.. _Pull Request 568: https://github.com/galaxyproject/galaxy/pull/568
+.. _Pull Request 569: https://github.com/galaxyproject/galaxy/pull/569
+.. _Pull Request 571: https://github.com/galaxyproject/galaxy/pull/571
+.. _Pull Request 575: https://github.com/galaxyproject/galaxy/pull/575
+.. _Pull Request 577: https://github.com/galaxyproject/galaxy/pull/577
+.. _Pull Request 580: https://github.com/galaxyproject/galaxy/pull/580
+.. _Pull Request 581: https://github.com/galaxyproject/galaxy/pull/581
+.. _Pull Request 583: https://github.com/galaxyproject/galaxy/pull/583
+.. _Pull Request 584: https://github.com/galaxyproject/galaxy/pull/584
+.. _Pull Request 585: https://github.com/galaxyproject/galaxy/pull/585
+.. _Pull Request 587: https://github.com/galaxyproject/galaxy/pull/587
+.. _Pull Request 589: https://github.com/galaxyproject/galaxy/pull/589
+.. _Pull Request 591: https://github.com/galaxyproject/galaxy/pull/591
+.. _Pull Request 592: https://github.com/galaxyproject/galaxy/pull/592
+.. _Pull Request 594: https://github.com/galaxyproject/galaxy/pull/594
+.. _Pull Request 600: https://github.com/galaxyproject/galaxy/pull/600
+.. _Pull Request 602: https://github.com/galaxyproject/galaxy/pull/602
+.. _Pull Request 625: https://github.com/galaxyproject/galaxy/pull/625
diff --git a/doc/source/releases/15.07_announce.rst b/doc/source/releases/15.07_announce.rst
new file mode 100644
index 0000000..f9cafcb
--- /dev/null
+++ b/doc/source/releases/15.07_announce.rst
@@ -0,0 +1,70 @@
+===========================================================
+July 2015 Galaxy Release (v 15.07)
+===========================================================
+
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Interactive Environments**
+  The interactive environments (IE) framework features several updates this release
+  and Galaxy is now distributed with a new IE for RStudio implemented by Eric Rasche.
+
+**Workflow Editor Enhancements**
+  The workflow editor has been significantly revamped - it now uses newer tool form
+  components and allow construction of workflows with tools explicitly producing
+  output collections.
+
+**Policies for Committers and Pull Requests**
+  The process for adding committers to the project and fielding pull requests has 
+  been formalized and documented in the source code. Three new committers have been
+  added to the project - Björn Grüning, Nicola Soranzo, and Eric Rasche.
+
+
+`Github <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New
+  .. code-block:: shell
+  
+      % git clone -b master https://github.com/galaxyproject/galaxy.git
+
+Update to latest stable release
+  .. code-block:: shell
+  
+      % git checkout master && pull --ff-only origin master
+
+Update to exact version
+  .. code-block:: shell
+  
+      % git checkout v15.07
+
+
+`BitBucket <https://bitbucket.org/galaxy/galaxy-dist>`__
+===========================================================
+
+Upgrade
+  .. code-block:: shell
+  
+      % hg pull 
+      % hg update latest_15.07
+
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+Deprecation Notice
+===========================================================
+
+Galaxy's built-in cloudlaunch functionality is being replaced by `CloudLaunch
+<https://github.com/galaxyproject/cloudlaunch>`__ and will be removed from the
+core codebase in the next release.
+
+Release Notes
+===========================================================
+
+.. include:: 15.07.rst
+   :start-after: enhancements
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/15.10.rst b/doc/source/releases/15.10.rst
new file mode 100644
index 0000000..3ff0949
--- /dev/null
+++ b/doc/source/releases/15.10.rst
@@ -0,0 +1,641 @@
+
+.. to_doc
+
+-------------------------------
+15.10
+-------------------------------
+
+Enhancements
+-------------------------------
+
+.. enhancements
+
+* Greatly enhance the reports application - including improved styling,
+  new landing page, pagination, and sparklines. Huge thanks to
+  `@Airistotal <https://github.com/Airistotal>`__.
+  `Pull Request 370`_, `Pull Request 410`_, `Pull Request 609`_,
+  `Pull Request 472`_, `Pull Request 576`_, `Pull Request 678`_
+  `Pull Request 675`_, `Pull Request 584`_
+* Enhanced upload tool - with support for composite types and
+  improved styling. 
+  `Pull Request 605`_, `Pull Request 620`_, `Pull Request 577`_,
+  `Pull Request 628`_
+* Improved API and UI for data libraries - including new support for library 
+  folder management and search.
+  `Pull Request 806`_, `Pull Request 820`_
+* Expose more history panel options for collection datasets.
+  `Pull Request 854`_
+* Improve UI encapsulation for panel rendering, iframes and tool forms.
+  `Pull Request 706`_, `Pull Request 739`_, `Pull Request 786`_
+* Implement whitelist for tools that generate HTML.
+  `Pull Request 510`_
+* Add ``regex_replace`` tool dependency install action.
+  `Pull Request 457`_
+* Document Galaxy interactive environments (GIEs).
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 803`_
+* Explicit project procedure language on reversions and WIP. (Thanks to `@erasche
+  <https://github.com/erasche>`__.)
+  `Pull Request 556`_
+* Readme improvemenets - GitHub issues, security email contact and CI testing
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 574`_
+* Remove history controller methods.
+  `Pull Request 578`_
+* Remove older references to get_disk_size and associated methods in histories code.
+  `Pull Request 582`_
+* Cleanup of the Tool Shed's url handling methods.
+  `Pull Request 586`_
+* Remove old cloudlaunch in favor of new
+  `web application <https://github.com/galaxyproject/cloudlaunch>`__.
+  `Pull Request 597`_
+* Enable a Grunt develop mode.
+  `Pull Request 601`_
+* Allow tools to explicitly produce nested collections.
+  `Pull Request 538`_
+* Update output actions code for some collection operations.
+  `Pull Request 544`_
+* Allow ``discover_datasets`` tag to use ``format`` attribute (instead of ``ext``).
+  `Pull Request 764`_
+* Implement ``min`` and ``max`` attributes for multiple data input parameters.
+  `Pull Request 765`_
+* Style fixes a test fixes for all tool shed code. (Thanks to
+  `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 650`_
+* Style fixes for ``lib/galaxy_utils/``. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.)
+  `Pull Request 604`_
+* Style fixes for ``test/``. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.)
+  `Pull Request 653`_
+* Style fixes for ``contrib/`` and ``lib/tool_shed/``. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.)
+  `Pull Request 708`_
+* Style fixes violated when merging from ``release_15.07``. (Thanks to `@nsoranzo
+  <https://github.com/nsoranzo>`__.)
+  `Pull Request 688`_
+* Style fixes for tool shed database migrations. (Thanks to
+  `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 770`_
+* Changed refresh time on reports homepage (Thanks to `@Airistotal
+  <https://github.com/Airistotal>`__.)
+  `Pull Request 609`_
+* Remove unused ``fastx_clipper_sequences.txt`` file.
+  (Thanks to `@lparsons <https://github.com/lparsons>`__.)
+  `Pull Request 611`_
+* Move ``manual_builds.txt`` to ``manual_builds.txt.sample``.
+  (Thanks to `@lparsons <https://github.com/lparsons>`__.)
+  `Pull Request 612`_
+* Expose tool shed version through API.
+  `Pull Request 613`_
+* Improvements to history API allowing fetching of histories shared with given user.
+  `Pull Request 614`_
+* Allow deserializing dataset permissions via the API.
+  `Pull Request 640`_, `Pull Request 496`_
+* Merge nearly-identical methods in ``shed_util_common`` module.
+  `Pull Request 651`_
+* Expose tool config file path to admins via the API.
+  `Pull Request 652`_
+* Add Galaxy developer documentation section to docs.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 654`_
+* Password reset improvements.
+  `Pull Request 655`_
+* Allow accessing repeat and conditional elements in workflow post job action
+  parameters.
+  `Pull Request 662`_
+* Use new path variable syntax available since Routes 1.9.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 665`_
+* Lazy-load histories into multi-history view.
+  `Pull Request 676`_
+* Use standard Galaxy message box styles on tool help texts that use rST
+  admonitions.
+  `Pull Request 685`_
+* Set a default value for copy history dialog radio input (to copy *all*).
+  `Pull Request 693`_
+* Remove various code duplication.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 694`_
+* Use ``setup.py`` for Python all installations during dependency installation.
+  (Thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 698`_
+* Reorganize procedures documentation content. (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 702`_
+* Upgrade sqlalchemy to 1.0.8.
+  `Pull Request 709`_
+* Enhance GenomeSpace tools to report the GS tool name.
+  `Pull Request 711`_
+* Add Oxli datatypes. (Thanks to `@dib-lab <https://github.com/dib-lab>`__.)
+  `Pull Request 721`_
+* Extend root controller to allow specifying a history id.
+  `Pull Request 725`_
+* Fix multi-history copy/new to set the history as current.
+  `Pull Request 727`_
+* Fix workflow post job actions when rerunning a job.
+  `Pull Request 730`_
+* Trigger iframe event for history.
+  `Pull Request 732`_
+* Add testing environment variable GALAXY_TEST_LOGGING_CONFIG.
+  `Pull Request 738`_  
+* Do not explicitly require pexpect Python dependency.
+  `Pull Request 741`_
+* API and UI enhancements for history copying.
+  `Pull Request 742`_
+* Remove deprecated objectstore options from sample documentation.
+  `Pull Request 748`_
+* Improved jobs API documentation.
+  `Pull Request 751`_
+* Improved state tracking of collections in history panel.
+  `Pull Request 755`_, `Pull Request 819`_
+* Updates and fixes for dockerized Galaxy testing.
+  `Pull Request 759`_
+* When submitting tool error reports, include 'old' tool id as part of subject
+  line.
+  `Pull Request 761`_
+* Eliminate use of bare exceptions from code.
+  `Pull Request 763`_
+* Add encoded IDs to job error reports and also include a link to show_params
+  `Pull Request 771`_
+* In log of collecting job metrics, print id of has_metrics object
+  `Pull Request 772`_
+* Add docker command wrapper. (Thanks to `@scholtalbers
+  <https://github.com/scholtalbers>`__.)
+  `Pull Request 777`_
+* Unify the docker command, command_inject, and command_wrapper (Thanks to
+  `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 782`_
+* Test case to prevent regression of `#753
+  <https://github.com/galaxyproject/galaxy/issues/753>`__.
+  `Pull Request 784`_
+* Refactor interactive environment launching to use Docker's ``-P`` option.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 790`_
+* Print ``has_metrics`` class name instead of object memory address. (Thanks to
+  `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 791`_
+* Improved state tracking of datasets in the history panel.
+  `Pull Request 793`_
+* Add sentry configuration option and doc to ``galaxy.ini.sample``.
+  `Pull Request 799`_
+* Small tweaks to tool loading.
+  `Pull Request 813`_
+* Implment request timing middleware via statsd.
+  `Pull Request 821`_
+* Various small ``run_tests.sh`` fixes/enhancements.
+  `Pull Request 825`_
+* Remove test broken after `#786
+  <https://github.com/galaxyproject/galaxy/issues/786>`__. (Thanks to
+  `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 828`_
+* Improved file handling in local runner.
+  `Pull Request 830`_
+* Swap debug mode to be off by default in ``galaxy.ini.sample``.
+  `Pull Request 840`_
+* Update documentation for the gff_filter_by_attribute tool (Thanks to
+  `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 842`_
+* Allow reuse of parent ``/tmp`` directory in dockerized testing.
+  `Pull Request 843`_
+* More small adjustments to test script ``run_tests.sh``.
+  `Pull Request 844`_
+* UCSC tool cleanup - remove unused, deprecated code and add citations to existing tools.
+  `Pull Request 846`_
+* Add more distribution tool citations.
+  `Pull Request 847`_
+* Remove unused ``echo`` tool
+  `Pull Request 848`_
+* Update interactive environments for this release. (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 849`_
+* Enhancements for IPython interactive environment allowing it run in certain
+  deployments.
+  (Thanks to `@scholtalbers <https://github.com/scholtalbers>`__.)
+  `Pull Request 462`_
+* Update style tools ``grunt-contrib-less``, ``grunt-spritesmith``.
+  `Pull Request 473`_
+* Preserve tags and annotations when copying datasets between histories.
+  `Pull Request 482`_
+* Implement grunt task for compiling handlerbars templates. 
+  `Pull Request 484`_
+* Improve shortcut logic in Python job code.
+  `Pull Request 488`_
+* Do not render dataset selector when there are no datasets to select in history.
+  `Pull Request 491`_
+* Clean up the formatting on mapping.py
+  `Pull Request 492`_
+* Allow interactive environments to have their own static base url.
+  `Pull Request 500`_
+* Improve history dataset manager testing to verify set order of tags.
+  `Pull Request 493`_
+* Enhance directory handling for tool shed installations.
+  (Thanks to `@gregvonkuster <https://github.com/gregvonkuster>`__.)
+  `Pull Request 511`_
+* Improved tag handling for anonymous users.
+  `Pull Request 516`_
+* Allow uninstalling multiple toolshed repositories at once.
+  `Pull Request 517`_
+* Allow the user to add the dataset for Trackster visualization to and existing
+  visualization.
+  `Pull Request 526`_
+* Tool ``extract_genomic_dna`` tool to support for 2bit files from local data.
+  (Thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 529`_
+* Remove unused serializer ``_get_history_data``.
+  `Pull Request 530`_
+* Render ``nice_size`` on the client.
+  `Pull Request 532`_
+* Swap ``track_jobs_in_database`` to be ``True`` by default.
+  `Pull Request 533`_
+* Update client libs: farbtastic, jqtouch, bib2json
+  `Pull Request 536`_
+* Standardize ``nice_size()`` import.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 537`_
+* Remove older, unused history CSS rules.
+  `Pull Request 539`_
+* Allow ordering of the history index list using an ``order`` query parameter.
+  `Pull Request 540`_
+* Improve job config error message when no handler elements are defined.
+  `Pull Request 543`_
+* Allow use of History ``disk_size`` as a column in queries.
+  `Pull Request 550`_
+* Implement API for dealing with converted datasets.
+  `Pull Request 553`_
+* Include tool shed API information in Galaxy's documentation.
+  `Pull Request 569`_
+* Stop creating unneeded ``database/message.mako.py`` file.
+  `Pull Request 575`_
+* Page Saved Histories at 10 histories.
+  `Pull Request 581`_
+* Remove older references to ``get_disk_size`` and associated methods.
+  `Pull Request 582`_
+* Remove osme unused files in ``/static``.
+  `Pull Request 583`_
+* Minor cleanup of some Python iterator usages.
+  `Pull Request 589`_
+* Minor tool shed code style cleanup.
+  `Pull Request 591`_
+* Include ``valid_tools`` as part of metadata returned with the repository
+  install information.
+  `Pull Request 592`_
+* New API for published histories.
+  `Pull Request 594`_
+
+
+
+Fixes
+-------------------------------
+
+
+.. fixes
+
+* Fix batch mode input in new tool form UI.
+  (Thanks to `@ericenns <https://github.com/ericenns>`__.)
+  `Pull Request 568`_
+* Tool lineage fixes.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 596`_
+* Fix bug in reload API where id wasn't recognised.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 619`_
+* Correct typo in tool shed action handler code.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 561`_
+* Small fixes for UI functional tests.
+  `Pull Request 485`_, `Pull Request 487`_
+* Bug fixes related style adjustments.
+  (Thanks in part to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 478`_, `Pull Request 479`_, `Pull Request 480`_, `Pull Request 481`_
+* Fixes to make Sweepster functional.
+  `Pull Request 486`_
+* Add client to ignore so that tox locally doesn't try to test JavaScript.
+  `Pull Request 494`_
+* Fix for server error when installing certain repositories.
+  `Pull Request 495`_
+* Fix BBI data provider underflow bug when computing standard deviation.
+  `Pull Request 499`_
+* Add numpad support to integer inputs.
+  `Pull Request 509`_
+* Fix for ``rdata`` datatype.
+  `Pull Request 512`_
+* Fix for toolshed escape method, where an error/log message contains non-
+  ASCII characters.
+  `Pull Request 514`_
+* Restore line wrongly removed in fa7c5c5 and again in 9805294 .
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 515`_
+* Ensure file exists before attempting ``chmod`` in tool shed install code.
+  `Pull Request 545`_
+* Filter deleted library datasets when using the library data parameter.
+  `Pull Request 546`_
+* Correct examples in workflow invocation documentation.
+  `Pull Request 565`_
+* Fix the CSS class for reports brand.
+  `Pull Request 585`_
+* Retab ``.loc.sample`` files.
+  `Pull Request 587`_
+* ``liftOver.loc`` was not accessible in local data
+  (Thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 588`_
+* Ensure removal of temporary tool shed files.
+  `Pull Request 607`_
+* Bug fix in Reports
+  (Thanks to `@Airistotal <https://github.com/Airistotal>`__.)
+  `Pull Request 626`_
+* Cleaner commit history for `#629
+  <https://github.com/galaxyproject/galaxy/issues/629>`__ and array syntax for
+  pathspec
+  (Thanks to `@chambm <https://github.com/chambm>`__.)
+  `Pull Request 632`_
+* add missing js build
+  `Pull Request 648`_
+* Reports - Small bug fixes
+  (Thanks to `@Airistotal <https://github.com/Airistotal>`__.)
+  `Pull Request 656`_
+* Fix tool image paths
+  `Pull Request 660`_
+* remove reports style that affected the whole application
+  `Pull Request 661`_
+* Fix centering bug in firefox
+  (Thanks to `@Airistotal <https://github.com/Airistotal>`__.)
+  `Pull Request 669`_
+* Don't overwrite $R_LIBS path during installation.
+  (Thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 671`_
+* Fix tool shed installation issue
+  (`#525 <https://github.com/galaxyproject/galaxy/issues/525>`__).
+  (Thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 681`_
+* Return copied collection from manager copy method.
+  `Pull Request 682`_
+* Fix multi-history fetch flag to allow re-fetching after collection sort.
+  `Pull Request 695`_
+* Small fixes for LDAP auth.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 696`_
+* Fix regression introduced in tool shed code this cycle.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 701`_
+* Fix workflow editor post job action argument display.
+  `Pull Request 703`_
+* Fix to get custom tabular datatypes.
+  (Thanks to `@richard-burhans <https://github.com/richard-burhans>`__.)
+  `Pull Request 705`_
+* Fix for logging in using OpenID when the OpenID isn't associated.
+  `Pull Request 712`_
+* Misc fixes.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 720`_
+* Fix history option flags.
+  `Pull Request 726`_
+* Fix ``Galaxy.user`` in client code to be available when not using a require.
+  `Pull Request 728`_
+* Remove no-op ``__init__`` methods from ``lib/galaxy/datatypes/binary.py``.
+  (Thanks to `@mr-c <https://github.com/mr-c>`__.)
+  `Pull Request 729`_
+* Fix more API tests.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 731`_
+* Fixes for framework test cases.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 752`_, `Pull Request 757`_
+* Bugfix in test error reporting handling code.
+  `Pull Request 760`_
+* Tool form error messages and rerun fixes.
+  `Pull Request 766`_
+* Quote value to fix configparser error in interactive environments.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 775`_
+* Fix upload event binding.
+  `Pull Request 778`_
+* Fix ``_JAVA_OPTIONS`` example in ``job_conf.xml.sample_advanced``.
+  `Pull Request 796`_
+* Fix errors when starting ``./run_tests.sh -with_framework_test_tools -api``.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 800`_
+* Tweak password reset email template to truly be plain text.
+  `Pull Request 812`_
+* Fix returned tuple from ``creating_job``.
+  `Pull Request 817`_
+* Fix database initialization when ``galaxy.ini`` doesn't exist.
+  `Pull Request 822`_
+* Fix default value for optional select fields.
+  `Pull Request 826`_
+* Use dependency handling in ``lib/galaxy/datatypes/``.
+  (Thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 838`_
+* Expose all API keys to admins under ``REMOTE_USER``.
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 873`_
+* Various fixes for data libraries.
+  `Pull Request 878`_
+* Minor fixes to the history UI.
+  `Pull Request 910`_
+* Fix command quoting and remove size from text input for Cut tool.
+  `Pull Request 913`_
+
+.. github_links
+.. _Pull Request 370: https://github.com/galaxyproject/galaxy/pull/370
+.. _Pull Request 410: https://github.com/galaxyproject/galaxy/pull/410
+.. _Pull Request 457: https://github.com/galaxyproject/galaxy/pull/457
+.. _Pull Request 462: https://github.com/galaxyproject/galaxy/pull/462
+.. _Pull Request 472: https://github.com/galaxyproject/galaxy/pull/472
+.. _Pull Request 473: https://github.com/galaxyproject/galaxy/pull/473
+.. _Pull Request 478: https://github.com/galaxyproject/galaxy/pull/478
+.. _Pull Request 479: https://github.com/galaxyproject/galaxy/pull/479
+.. _Pull Request 480: https://github.com/galaxyproject/galaxy/pull/480
+.. _Pull Request 481: https://github.com/galaxyproject/galaxy/pull/481
+.. _Pull Request 482: https://github.com/galaxyproject/galaxy/pull/482
+.. _Pull Request 484: https://github.com/galaxyproject/galaxy/pull/484
+.. _Pull Request 485: https://github.com/galaxyproject/galaxy/pull/485
+.. _Pull Request 486: https://github.com/galaxyproject/galaxy/pull/486
+.. _Pull Request 487: https://github.com/galaxyproject/galaxy/pull/487
+.. _Pull Request 488: https://github.com/galaxyproject/galaxy/pull/488
+.. _Pull Request 491: https://github.com/galaxyproject/galaxy/pull/491
+.. _Pull Request 492: https://github.com/galaxyproject/galaxy/pull/492
+.. _Pull Request 493: https://github.com/galaxyproject/galaxy/pull/493
+.. _Pull Request 494: https://github.com/galaxyproject/galaxy/pull/494
+.. _Pull Request 495: https://github.com/galaxyproject/galaxy/pull/495
+.. _Pull Request 496: https://github.com/galaxyproject/galaxy/pull/496
+.. _Pull Request 499: https://github.com/galaxyproject/galaxy/pull/499
+.. _Pull Request 500: https://github.com/galaxyproject/galaxy/pull/500
+.. _Pull Request 509: https://github.com/galaxyproject/galaxy/pull/509
+.. _Pull Request 510: https://github.com/galaxyproject/galaxy/pull/510
+.. _Pull Request 511: https://github.com/galaxyproject/galaxy/pull/511
+.. _Pull Request 512: https://github.com/galaxyproject/galaxy/pull/512
+.. _Pull Request 514: https://github.com/galaxyproject/galaxy/pull/514
+.. _Pull Request 515: https://github.com/galaxyproject/galaxy/pull/515
+.. _Pull Request 516: https://github.com/galaxyproject/galaxy/pull/516
+.. _Pull Request 517: https://github.com/galaxyproject/galaxy/pull/517
+.. _Pull Request 526: https://github.com/galaxyproject/galaxy/pull/526
+.. _Pull Request 527: https://github.com/galaxyproject/galaxy/pull/527
+.. _Pull Request 529: https://github.com/galaxyproject/galaxy/pull/529
+.. _Pull Request 530: https://github.com/galaxyproject/galaxy/pull/530
+.. _Pull Request 532: https://github.com/galaxyproject/galaxy/pull/532
+.. _Pull Request 533: https://github.com/galaxyproject/galaxy/pull/533
+.. _Pull Request 536: https://github.com/galaxyproject/galaxy/pull/536
+.. _Pull Request 537: https://github.com/galaxyproject/galaxy/pull/537
+.. _Pull Request 538: https://github.com/galaxyproject/galaxy/pull/538
+.. _Pull Request 539: https://github.com/galaxyproject/galaxy/pull/539
+.. _Pull Request 540: https://github.com/galaxyproject/galaxy/pull/540
+.. _Pull Request 543: https://github.com/galaxyproject/galaxy/pull/543
+.. _Pull Request 544: https://github.com/galaxyproject/galaxy/pull/544
+.. _Pull Request 545: https://github.com/galaxyproject/galaxy/pull/545
+.. _Pull Request 546: https://github.com/galaxyproject/galaxy/pull/546
+.. _Pull Request 550: https://github.com/galaxyproject/galaxy/pull/550
+.. _Pull Request 553: https://github.com/galaxyproject/galaxy/pull/553
+.. _Pull Request 556: https://github.com/galaxyproject/galaxy/pull/556
+.. _Pull Request 561: https://github.com/galaxyproject/galaxy/pull/561
+.. _Pull Request 565: https://github.com/galaxyproject/galaxy/pull/565
+.. _Pull Request 568: https://github.com/galaxyproject/galaxy/pull/568
+.. _Pull Request 569: https://github.com/galaxyproject/galaxy/pull/569
+.. _Pull Request 574: https://github.com/galaxyproject/galaxy/pull/574
+.. _Pull Request 575: https://github.com/galaxyproject/galaxy/pull/575
+.. _Pull Request 576: https://github.com/galaxyproject/galaxy/pull/576
+.. _Pull Request 577: https://github.com/galaxyproject/galaxy/pull/577
+.. _Pull Request 578: https://github.com/galaxyproject/galaxy/pull/578
+.. _Pull Request 581: https://github.com/galaxyproject/galaxy/pull/581
+.. _Pull Request 582: https://github.com/galaxyproject/galaxy/pull/582
+.. _Pull Request 583: https://github.com/galaxyproject/galaxy/pull/583
+.. _Pull Request 584: https://github.com/galaxyproject/galaxy/pull/584
+.. _Pull Request 585: https://github.com/galaxyproject/galaxy/pull/585
+.. _Pull Request 586: https://github.com/galaxyproject/galaxy/pull/586
+.. _Pull Request 587: https://github.com/galaxyproject/galaxy/pull/587
+.. _Pull Request 588: https://github.com/galaxyproject/galaxy/pull/588
+.. _Pull Request 589: https://github.com/galaxyproject/galaxy/pull/589
+.. _Pull Request 591: https://github.com/galaxyproject/galaxy/pull/591
+.. _Pull Request 592: https://github.com/galaxyproject/galaxy/pull/592
+.. _Pull Request 594: https://github.com/galaxyproject/galaxy/pull/594
+.. _Pull Request 596: https://github.com/galaxyproject/galaxy/pull/596
+.. _Pull Request 597: https://github.com/galaxyproject/galaxy/pull/597
+.. _Pull Request 601: https://github.com/galaxyproject/galaxy/pull/601
+.. _Pull Request 604: https://github.com/galaxyproject/galaxy/pull/604
+.. _Pull Request 605: https://github.com/galaxyproject/galaxy/pull/605
+.. _Pull Request 607: https://github.com/galaxyproject/galaxy/pull/607
+.. _Pull Request 609: https://github.com/galaxyproject/galaxy/pull/609
+.. _Pull Request 611: https://github.com/galaxyproject/galaxy/pull/611
+.. _Pull Request 612: https://github.com/galaxyproject/galaxy/pull/612
+.. _Pull Request 613: https://github.com/galaxyproject/galaxy/pull/613
+.. _Pull Request 614: https://github.com/galaxyproject/galaxy/pull/614
+.. _Pull Request 617: https://github.com/galaxyproject/galaxy/pull/617
+.. _Pull Request 619: https://github.com/galaxyproject/galaxy/pull/619
+.. _Pull Request 620: https://github.com/galaxyproject/galaxy/pull/620
+.. _Pull Request 626: https://github.com/galaxyproject/galaxy/pull/626
+.. _Pull Request 628: https://github.com/galaxyproject/galaxy/pull/628
+.. _Pull Request 632: https://github.com/galaxyproject/galaxy/pull/632
+.. _Pull Request 640: https://github.com/galaxyproject/galaxy/pull/640
+.. _Pull Request 646: https://github.com/galaxyproject/galaxy/pull/646
+.. _Pull Request 648: https://github.com/galaxyproject/galaxy/pull/648
+.. _Pull Request 650: https://github.com/galaxyproject/galaxy/pull/650
+.. _Pull Request 651: https://github.com/galaxyproject/galaxy/pull/651
+.. _Pull Request 652: https://github.com/galaxyproject/galaxy/pull/652
+.. _Pull Request 653: https://github.com/galaxyproject/galaxy/pull/653
+.. _Pull Request 654: https://github.com/galaxyproject/galaxy/pull/654
+.. _Pull Request 655: https://github.com/galaxyproject/galaxy/pull/655
+.. _Pull Request 656: https://github.com/galaxyproject/galaxy/pull/656
+.. _Pull Request 660: https://github.com/galaxyproject/galaxy/pull/660
+.. _Pull Request 661: https://github.com/galaxyproject/galaxy/pull/661
+.. _Pull Request 662: https://github.com/galaxyproject/galaxy/pull/662
+.. _Pull Request 665: https://github.com/galaxyproject/galaxy/pull/665
+.. _Pull Request 669: https://github.com/galaxyproject/galaxy/pull/669
+.. _Pull Request 671: https://github.com/galaxyproject/galaxy/pull/671
+.. _Pull Request 675: https://github.com/galaxyproject/galaxy/pull/675
+.. _Pull Request 676: https://github.com/galaxyproject/galaxy/pull/676
+.. _Pull Request 678: https://github.com/galaxyproject/galaxy/pull/678
+.. _Pull Request 681: https://github.com/galaxyproject/galaxy/pull/681
+.. _Pull Request 682: https://github.com/galaxyproject/galaxy/pull/682
+.. _Pull Request 684: https://github.com/galaxyproject/galaxy/pull/684
+.. _Pull Request 685: https://github.com/galaxyproject/galaxy/pull/685
+.. _Pull Request 688: https://github.com/galaxyproject/galaxy/pull/688
+.. _Pull Request 690: https://github.com/galaxyproject/galaxy/pull/690
+.. _Pull Request 693: https://github.com/galaxyproject/galaxy/pull/693
+.. _Pull Request 694: https://github.com/galaxyproject/galaxy/pull/694
+.. _Pull Request 695: https://github.com/galaxyproject/galaxy/pull/695
+.. _Pull Request 696: https://github.com/galaxyproject/galaxy/pull/696
+.. _Pull Request 698: https://github.com/galaxyproject/galaxy/pull/698
+.. _Pull Request 701: https://github.com/galaxyproject/galaxy/pull/701
+.. _Pull Request 702: https://github.com/galaxyproject/galaxy/pull/702
+.. _Pull Request 703: https://github.com/galaxyproject/galaxy/pull/703
+.. _Pull Request 705: https://github.com/galaxyproject/galaxy/pull/705
+.. _Pull Request 706: https://github.com/galaxyproject/galaxy/pull/706
+.. _Pull Request 708: https://github.com/galaxyproject/galaxy/pull/708
+.. _Pull Request 709: https://github.com/galaxyproject/galaxy/pull/709
+.. _Pull Request 710: https://github.com/galaxyproject/galaxy/pull/710
+.. _Pull Request 711: https://github.com/galaxyproject/galaxy/pull/711
+.. _Pull Request 712: https://github.com/galaxyproject/galaxy/pull/712
+.. _Pull Request 720: https://github.com/galaxyproject/galaxy/pull/720
+.. _Pull Request 721: https://github.com/galaxyproject/galaxy/pull/721
+.. _Pull Request 722: https://github.com/galaxyproject/galaxy/pull/722
+.. _Pull Request 725: https://github.com/galaxyproject/galaxy/pull/725
+.. _Pull Request 726: https://github.com/galaxyproject/galaxy/pull/726
+.. _Pull Request 727: https://github.com/galaxyproject/galaxy/pull/727
+.. _Pull Request 728: https://github.com/galaxyproject/galaxy/pull/728
+.. _Pull Request 729: https://github.com/galaxyproject/galaxy/pull/729
+.. _Pull Request 730: https://github.com/galaxyproject/galaxy/pull/730
+.. _Pull Request 731: https://github.com/galaxyproject/galaxy/pull/731
+.. _Pull Request 732: https://github.com/galaxyproject/galaxy/pull/732
+.. _Pull Request 735: https://github.com/galaxyproject/galaxy/pull/735
+.. _Pull Request 736: https://github.com/galaxyproject/galaxy/pull/736
+.. _Pull Request 738: https://github.com/galaxyproject/galaxy/pull/738
+.. _Pull Request 739: https://github.com/galaxyproject/galaxy/pull/739
+.. _Pull Request 741: https://github.com/galaxyproject/galaxy/pull/741
+.. _Pull Request 742: https://github.com/galaxyproject/galaxy/pull/742
+.. _Pull Request 748: https://github.com/galaxyproject/galaxy/pull/748
+.. _Pull Request 751: https://github.com/galaxyproject/galaxy/pull/751
+.. _Pull Request 752: https://github.com/galaxyproject/galaxy/pull/752
+.. _Pull Request 755: https://github.com/galaxyproject/galaxy/pull/755
+.. _Pull Request 757: https://github.com/galaxyproject/galaxy/pull/757
+.. _Pull Request 758: https://github.com/galaxyproject/galaxy/pull/758
+.. _Pull Request 759: https://github.com/galaxyproject/galaxy/pull/759
+.. _Pull Request 760: https://github.com/galaxyproject/galaxy/pull/760
+.. _Pull Request 761: https://github.com/galaxyproject/galaxy/pull/761
+.. _Pull Request 763: https://github.com/galaxyproject/galaxy/pull/763
+.. _Pull Request 764: https://github.com/galaxyproject/galaxy/pull/764
+.. _Pull Request 765: https://github.com/galaxyproject/galaxy/pull/765
+.. _Pull Request 766: https://github.com/galaxyproject/galaxy/pull/766
+.. _Pull Request 770: https://github.com/galaxyproject/galaxy/pull/770
+.. _Pull Request 771: https://github.com/galaxyproject/galaxy/pull/771
+.. _Pull Request 772: https://github.com/galaxyproject/galaxy/pull/772
+.. _Pull Request 775: https://github.com/galaxyproject/galaxy/pull/775
+.. _Pull Request 777: https://github.com/galaxyproject/galaxy/pull/777
+.. _Pull Request 778: https://github.com/galaxyproject/galaxy/pull/778
+.. _Pull Request 782: https://github.com/galaxyproject/galaxy/pull/782
+.. _Pull Request 784: https://github.com/galaxyproject/galaxy/pull/784
+.. _Pull Request 786: https://github.com/galaxyproject/galaxy/pull/786
+.. _Pull Request 790: https://github.com/galaxyproject/galaxy/pull/790
+.. _Pull Request 791: https://github.com/galaxyproject/galaxy/pull/791
+.. _Pull Request 793: https://github.com/galaxyproject/galaxy/pull/793
+.. _Pull Request 796: https://github.com/galaxyproject/galaxy/pull/796
+.. _Pull Request 799: https://github.com/galaxyproject/galaxy/pull/799
+.. _Pull Request 800: https://github.com/galaxyproject/galaxy/pull/800
+.. _Pull Request 803: https://github.com/galaxyproject/galaxy/pull/803
+.. _Pull Request 806: https://github.com/galaxyproject/galaxy/pull/806
+.. _Pull Request 807: https://github.com/galaxyproject/galaxy/pull/807
+.. _Pull Request 812: https://github.com/galaxyproject/galaxy/pull/812
+.. _Pull Request 813: https://github.com/galaxyproject/galaxy/pull/813
+.. _Pull Request 817: https://github.com/galaxyproject/galaxy/pull/817
+.. _Pull Request 819: https://github.com/galaxyproject/galaxy/pull/819
+.. _Pull Request 820: https://github.com/galaxyproject/galaxy/pull/820
+.. _Pull Request 821: https://github.com/galaxyproject/galaxy/pull/821
+.. _Pull Request 822: https://github.com/galaxyproject/galaxy/pull/822
+.. _Pull Request 824: https://github.com/galaxyproject/galaxy/pull/824
+.. _Pull Request 825: https://github.com/galaxyproject/galaxy/pull/825
+.. _Pull Request 826: https://github.com/galaxyproject/galaxy/pull/826
+.. _Pull Request 828: https://github.com/galaxyproject/galaxy/pull/828
+.. _Pull Request 830: https://github.com/galaxyproject/galaxy/pull/830
+.. _Pull Request 838: https://github.com/galaxyproject/galaxy/pull/838
+.. _Pull Request 839: https://github.com/galaxyproject/galaxy/pull/839
+.. _Pull Request 840: https://github.com/galaxyproject/galaxy/pull/840
+.. _Pull Request 842: https://github.com/galaxyproject/galaxy/pull/842
+.. _Pull Request 843: https://github.com/galaxyproject/galaxy/pull/843
+.. _Pull Request 844: https://github.com/galaxyproject/galaxy/pull/844
+.. _Pull Request 846: https://github.com/galaxyproject/galaxy/pull/846
+.. _Pull Request 847: https://github.com/galaxyproject/galaxy/pull/847
+.. _Pull Request 848: https://github.com/galaxyproject/galaxy/pull/848
+.. _Pull Request 849: https://github.com/galaxyproject/galaxy/pull/849
+.. _Pull Request 854: https://github.com/galaxyproject/galaxy/pull/854
+.. _Pull Request 873: https://github.com/galaxyproject/galaxy/pull/873
+.. _Pull Request 878: https://github.com/galaxyproject/galaxy/pull/878
+.. _Pull Request 879: https://github.com/galaxyproject/galaxy/pull/879
+.. _Pull Request 910: https://github.com/galaxyproject/galaxy/pull/910
+.. _Pull Request 913: https://github.com/galaxyproject/galaxy/pull/913
diff --git a/doc/source/releases/15.10_announce.rst b/doc/source/releases/15.10_announce.rst
new file mode 100644
index 0000000..9869f3b
--- /dev/null
+++ b/doc/source/releases/15.10_announce.rst
@@ -0,0 +1,85 @@
+
+===========================================================
+October 2015 Galaxy Release (v 15.10)
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Reports Application**
+  The reports web application has been greatly enhanced - including
+  improved styling, new landing page, more reports, pagination, and
+  sparklines. Huge thanks to Daniel Bouchard
+  (`@Airistotal <https://github.com/Airistotal>`__) for these
+  enhancements.
+
+**Upload**
+  The Galaxy upload widget now features support for composite datatypes and
+  improved styling.
+
+**Data Libraries**
+  Improved API and UI for data libraries - including support for library
+  folder management and search.
+
+`GitHub <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New
+  .. code-block:: shell
+
+      % git clone -b master https://github.com/galaxyproject/galaxy.git
+
+Update to latest stable release
+  .. code-block:: shell
+
+      % git checkout master && pull --ff-only origin master
+
+Update to exact version
+  .. code-block:: shell
+
+      % git checkout v15.10
+
+
+`BitBucket <https://bitbucket.org/galaxy/galaxy-dist>`__
+===========================================================
+
+Upgrade
+  .. code-block:: shell
+
+      % hg pull
+      % hg update latest_15.10
+
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+
+Deprecation Notices
+===========================================================
+
+The Mercurial repository at https://bitbucket.org/galaxy/galaxy-dist is deprecated.
+**We recommend deployers to switch their instances to git** and follow the `master`
+branch of GitHub repository https://github.com/galaxyproject/galaxy
+Details are available at https://wiki.galaxyproject.org/Develop/SourceCode
+*The next few releases will still be available on Bitbucket, but they may be
+less up to date than the corresponding GitHub branches.*
+
+The **old UI of Data Libraries is deprecated** and will be removed in the
+next release of Galaxy. The current Data Libraries Beta will replace it.
+The Data Library menu items were renamed accordingly.
+
+The **graphview visualization is deprecated** and will be replaced in the next
+release of Galaxy.
+
+The direct access to **Tool Shed repositories through the Mercurial API is deprecated**.
+It will be removed in a future release. Please use Planemo for uploading to the TS
+instead.
+
+Release Notes
+===========================================================
+
+.. include:: 15.10.rst
+   :start-after: enhancements
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/16.01.rst b/doc/source/releases/16.01.rst
new file mode 100644
index 0000000..1fd79ee
--- /dev/null
+++ b/doc/source/releases/16.01.rst
@@ -0,0 +1,782 @@
+
+.. to_doc
+
+-------------------------------
+16.01
+-------------------------------
+
+.. announce_start
+
+Enhancements
+-------------------------------
+
+.. major_feature
+
+* Interactive tours (with significant help from
+  `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1422`_, `Pull Request 1464`_
+* Replace Galaxy eggs dependency management with wheels.
+  `Pull Request 428`_, `Pull Request 989`_, `Pull Request 988`_,
+  `Pull Request 1389`_, `Pull Request 1485`_, `Pull Request 995`_,
+  `Pull Request 996`_, `Pull Request 1006`_, `Pull Request 1017`_,
+  `Pull Request 1037`_, `Pull Request 1495`_
+* Implement nested workflows.
+  `Pull Request 1306`_
+
+.. feature
+
+* Use Webpack to greatly optimize JavaScript and other assets served by Galaxy.
+  `Pull Request 1144`_
+* Overhaul Galaxy analysis view toward a more modular JavaScript architecture.
+  `Pull Request 706`_, `Pull Request 1184`_
+* Add BAM iobio_ visualization as a Galaxy Interactive Environment
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1131`_
+* Add Jupyter Interactive Environment
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1400`_
+* Procedures for bug and issue handling as well as roadmap management.
+  `Pull Request 902`_, `Pull Request 1020`_
+* Implement a conda dependency resolver (with significant help from
+  `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1345`_, `Pull Request 1527`_, `Pull Request 1421`_,
+  `Pull Request 1409`_
+* Many ToolBox improvements including allowing specifying labels on individual
+  tools, monitoring and automatic reloading of ``tool_conf.xml`` files, and 
+  allowing specification of such files in JSON/YAML.
+  `Pull Request 1012`_, `Pull Request 1398`_
+* Configurable client side logging.
+  `Pull Request 1011`_
+* Allow input collections to specify multiple ``collection_type``\ s.
+  `Pull Request 1308`_
+* Allow multiple collections to be supplied to a multiple data parameter.
+  `Pull Request 805`_
+* Implement ``type_source`` on output collections.
+  `Pull Request 1153`_
+* Allow tools to request a special configuration file containing tool parameters 
+  dumped to json.
+  `Pull Request 1405`_
+* Add search on username/any for API calls to user and use Select2 for sharing
+  workflows/pages/histories
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1111`_
+* Improve masthead and scratchbook UI and architecture.
+  `Pull Request 1407`_, `Pull Request 1424`_
+* Add support for constructive solid geometry datatypes ``ply`` and ``vtk``.
+  (Thanks to `@gregvonkuster <https://github.com/gregvonkuster>`__.)
+  `Pull Request 905`_, `Pull Request 1211`_
+* Add searchgui datatype
+  (thanks to `@jj-umn <https://github.com/jj-umn>`__.)
+  `Pull Request 914`_
+* Add BIOM v1 datatype
+  (thanks to `@fescudie <https://github.com/fescudie>`__.)
+  `Pull Request 950`_, `Pull Request 1198`_
+* Add CRAM datatype 
+  (thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 1108`_, `Pull Request 1182`_
+* Add UniProtXML datatype
+  (thanks to `@jj-umn <https://github.com/jj-umn>`__.)
+  `Pull Request 1004`_
+* Avoid confusion by adding the PDF datatype to the list of uploadable file
+  types.
+  `Pull Request 901`_
+* Add idpDB and HDF5 sniffers and fix MzSQlite sniffer
+  (thanks to `@chambm <https://github.com/chambm>`__.)
+  `Pull Request 1209`_
+* Improve tool shed installation API for installation of repositories if they
+  are already installed
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1095`_
+* Prevent GIEs from being closed without user confirmation
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1178`_
+* Add the constructive solid geometery (CSG) viewer visualization plugin
+  (thanks to `@gregvonkuster <https://github.com/gregvonkuster>`__.)
+  `Pull Request 1254`_
+* Add Google analytics tracking to new data libraries.
+  `Pull Request 959`_
+* Extend users API with delete action
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1063`_
+* Finish swapping unencoded ids with order_index in workflows API.
+  `Pull Request 1137`_
+
+.. enhancement
+
+* Add documentation and extra options to Environment Modules resolver
+  (thanks to `@pvanheus <https://github.com/pvanheus>`__.)
+  `Pull Request 1221`_
+* Documentation for dependency resolvers
+  (thanks to `@pvanheus <https://github.com/pvanheus>`__.)
+  `Pull Request 1296`_
+* Populate env.sh files created via tool shed installations with ``X_ROOT_DIR``
+  environment variables.
+  `Pull Request 564`_, `Pull Request 889`_
+* Reorganize history menu
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 985`_
+* Look up installed tools from different toolshed(s)
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 818`_, `Pull Request 1286`_
+* Documentation updates for ObjectStore
+  (thanks to `@mr-c <https://github.com/mr-c>`__.)
+  `Pull Request 853`_
+* Update JavaScript dependency Dynatree.
+  `Pull Request 856`_
+* Eliminate LWR from Galaxy (you will need to upgrade to Pulsar).
+  `Pull Request 857`_
+* Isolate tool commands by default.
+  `Pull Request 1412`_, `Pull Request 1494`_
+* Allow override of job shell (for conda resolver).
+  `Pull Request 1473`_
+* Slight tweak to sidebar section naming from 'security' to 'user management'.
+  `Pull Request 877`_
+* Fix the issue where too many datasets are linked to a certain role
+  (thanks to `@scholtalbers <https://github.com/scholtalbers>`__.)
+  `Pull Request 881`_
+* Fix R dependency installation to be more portable
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 893`_
+* Reorder workflow inputs to top
+  (thanks to `@scholtalbers <https://github.com/scholtalbers>`__.)
+  `Pull Request 912`_
+* Add an API script to upload directory as a data library
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 918`_
+* Enable dockerized toolshed tests.
+  `Pull Request 942`_
+* Improvements and fixes for abstract tool interface.
+  `Pull Request 955`_
+* Remove remaining references to galaxy.eggs and flake8 some tools
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 990`_
+* Apply stricter flake8 rules for the directories shared with Pulsar
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1010`_
+* Updates to default welcome page.
+  `Pull Request 1013`_
+* Revise deferred queue, use jQuery promises.
+  `Pull Request 1018`_
+* Keep select field open in tool form multi select fields to ease selecting many
+  options quickly.
+  `Pull Request 1019`_
+* Allow resizing of regular multi-select boxes.
+  `Pull Request 1025`_
+* Add error message for wrongly formatted data in wig_to_bigwig
+  (thanks to `@scholtalbers <https://github.com/scholtalbers>`__.)
+  `Pull Request 1033`_
+* Verify len files during download from UCSC
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1066`_
+* Add biocrusoe to contributors
+  `Pull Request 1067`_
+* Outline "Format 2" workflow definitions.
+  `Pull Request 1096`_
+* Improve the API attributes display on parameters page.
+  `Pull Request 1098`_
+* Update run_tests.sh help for recent changes
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1101`_
+* Show welcome page with required login.
+  `Pull Request 1105`_
+* Allow accessing collection elements in format_source.
+  (with significant help from `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1113`_, `Pull Request 1155`_
+* Add nginx config for GIE proxy to documentation.
+  `Pull Request 1123`_
+* Enforce client build deps are up-to-date.
+  `Pull Request 1130`_
+* Menu onclick addition.
+  `Pull Request 1142`_
+* Ensure confirmation when leaving GIE windows.
+  `Pull Request 1157`_
+* Add local grunt-cli dependency for qunit tests.
+  `Pull Request 1159`_
+* Tighten up GG sniffing
+  `Pull Request 864`_
+* Use common exceptions in tools.
+  `Pull Request 874`_
+* Refactor the remote user middleware for complexity
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 875`_
+* Allowed for capitalized package names such as R in unlinked tool shed
+  dependency resolver
+  (thanks to `@Christian-B <https://github.com/Christian-B>`__.)
+  `Pull Request 1160`_
+* A slew of tool execution performance optimizations including a huge
+  fix from `@ericenns <https://github.com/ericenns>`__.
+  `Pull Request 1166`_, `Pull Request 1163`_, `Pull Request 1199`_,
+  `Pull Request 1212`_
+* Do not check for tool migrations when running tests.
+  `Pull Request 1176`_
+* Lazy load HDA/LDDA metadata to speed up history loading
+  (thanks to `@abretaud <https://github.com/abretaud>`__.)
+  `Pull Request 1179`_
+* Add debug statement in output checker for why job is failing.
+  `Pull Request 1213`_
+* Small enhancements in workflow inputs and outputs.
+  `Pull Request 1214`_
+* More small workflow tweaks.
+  `Pull Request 1216`_
+* Add Workflow editor UI for step labels.
+  `Pull Request 1251`_
+* Ease the grunt uglify process
+  (Thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1238`_
+* Add beta run workflow form based on the newer tool form code.
+  `Pull Request 1249`_
+* During tool migrations, do not mess with galaxy_config_file if provided
+  (thanks to `@openlangrid <https://github.com/openlangrid>`__ and `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1230`_, `Pull Request 1253`_
+* In the users API, properly return a boolean from has_requests
+  `Pull Request 1262`_
+* Extend the logging of tool dependency status changes
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1263`_
+* Remove history_options and options.mako (history options as a page).
+  `Pull Request 1271`_
+* Minor mail config cleanup
+  `Pull Request 1299`_
+* Replace uses of history.imp and history.copy web methods for API create
+  `Pull Request 1303`_
+* More tightening up of parameter validation during workflow stuff.
+  `Pull Request 1319`_
+* Show a more user-friendly error when the webserver port is blocked
+  (thanks to `@shano <https://github.com/shano>`__.)
+  `Pull Request 1320`_
+* Upgrade Paste to 2.0.2.
+  `Pull Request 1344`_
+* A series of small refactoring enabling use of certain Galaxy modules
+  a stand-alone Python library with minimal dependencies and Python 3
+  compatibility.
+  `Pull Request 1350`_, `Pull Request 1351`_, `Pull Request 1352`_,
+  `Pull Request 1359`_, `Pull Request 1362`_, `Pull Request 1376`_,
+  `Pull Request 1413`_, `Pull Request 1427`_, `Pull Request 1363`_,
+  `Pull Request 1367`_, `Pull Request 1377`_, `Pull Request 1388`_,
+  `Pull Request 1448`_
+* Adding UUID support to directory_hash_id
+  (thanks to `@kellrott <https://github.com/kellrott>`__.)
+  `Pull Request 1397`_
+* Unify and abstract code for checking if file looks like a tool definition.
+  `Pull Request 1368`_
+* Added hashes to pip requirements file
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1393`_
+* Interactive environment updates
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1399`_
+* Change scratchbook close icon.
+  `Pull Request 1425`_
+* Rename reports_wsgi.ini to reports.ini
+  (thanks to `@souravsingh <https://github.com/souravsingh>`__.)
+  `Pull Request 1181`_
+
+.. small_enhancement
+
+* Attempt to fix more transiently failing API tests.
+  `Pull Request 859`_, `Pull Request 865`_  
+* Fix data library test case.
+  `Pull Request 898`_
+* Remove ``test_map_over_two_collections_legacy`` test case as it is obsolete.
+  `Pull Request 924`_
+* Version the testing-base docker image.
+  `Pull Request 938`_
+* Update casperjs functional tests.
+  `Pull Request 944`_
+* Fix the docker db client_encoding to not be ascii (default).
+  `Pull Request 952`_
+* Use the database temp directory to store the Mako template cache when
+  running framework tests.
+  `Pull Request 956`_
+* Rename ``lib/galaxy/main.py`` to scripts/galaxy-main
+  `Pull Request 994`_
+* Fix deferred dom removal.
+  `Pull Request 997`_
+* Options to more easily test esoteric tooling options.
+  `Pull Request 1014`_
+* Fix log statements and link to logger.
+  `Pull Request 1040`_
+* Add more data libraries API tests.
+  `Pull Request 1074`_
+* Remove upload unused function parameter
+  (thanks to `@einon <https://github.com/einon>`__.)
+  `Pull Request 1078`_
+* Update testing docker image.
+  `Pull Request 1083`_
+* Small tool and workflow refactorings.
+  `Pull Request 1097`_
+* Revise and fix waiting for tool tests.
+  `Pull Request 1119`_
+* Small Tool and Workflow Refactoring and Fixes
+  `Pull Request 1202`_
+* Wait on jobs and history in certain API test cases.
+  `Pull Request 1226`_
+* Fix qunit shim to match base_panels.mako shim.
+  `Pull Request 1233`_
+* Improved logging related tool test timeouts.
+  `Pull Request 1243`_
+* Refactor generic side workflow editor panel toward backbone.
+  `Pull Request 1247`_
+* Attempt to fix transiently failing tool test on Jenkins.
+  `Pull Request 1248`_
+* Set client_encoding for TS dockerized test db
+  `Pull Request 1276`_
+* Check for sessionStorage using a more cross-browser way.
+  `Pull Request 1279`_
+* Small API test improvements.
+  `Pull Request 1285`_
+* Enforce metrics related to moving toward modern client infrastructure.
+  `Pull Request 1292`_
+* Refactor tool stuff for generic model actions.
+  `Pull Request 1307`_
+* Remove workflow casperjs API test duplicating test coverage of API tests.
+  `Pull Request 1316`_
+* Refactor tool parsing handling toward reuse outside Galaxy.
+  `Pull Request 1349`_, `Pull Request 1353`_
+* Lint with Python 3 several modules.
+  `Pull Request 1354`_
+* Fix and potential fix for transiently failing tests.
+  `Pull Request 1401`_
+* Fix CasperJS tests.
+  `Pull Request 1438`_, `Pull Request 1439`_
+* Add a log.warn() if extracted file does not exist when changing permissions.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 939`_
+* Add a safe_relpath util function for ensuring a path does not reference an absolute or parent directory.
+  `Commit f540a16`_
+
+Security
+-------------------------------
+
+.. security
+
+* Security fixes for history imports.
+  `Commit bf1c77d`_
+* Security fixes for object store paths.
+  `Commit 5da91bd`_
+* Remove sample tracking manual external service transfer due to security concerns.
+  `Commit cd8b965`_
+* Security fixes for tool shed repository browsing.
+  `Commit e4a1d57`_
+* Security fixes for tool shed hg push and capsule/tarball uploads.
+  `Commit e845d64`_
+
+Fixes
+-------------------------------
+
+.. major_bug
+
+.. bug
+
+* Add check for ``HTTP_GX_SECRET``
+  (thanks to `@golharam <https://github.com/golharam>`__.)
+  `Pull Request 827`_
+* Test if parameter present in value to avoid index error in
+  tool state handling
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 863`_
+* Bump version of sqlite3 in nodejs proxy
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 866`_
+* Expose API keys to admins under ``REMOTE_USER``
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 872`_
+* Bug fix for tool shed repository API
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 891`_
+* Add a workaround to return a proper error code during R
+  package installation
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 892`_
+* Various Quota bug fixes.
+  `Pull Request 907`_
+* Always set 'tests' for a visualization plugin to avoid an attribute
+  error.
+  `Pull Request 908`_
+* Bug fix for condor runner changes made this release cycle
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 909`_
+* Change user preference datatype to text.
+  `Pull Request 916`_
+* Fix syntax of error parameter of ``tryCatch`` in ``setup_r_environment action``
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 929`_
+* Revise tool URL building.
+  `Pull Request 947`_
+* Fix for code checking if a file seems to a tool definition file
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 949`_
+* Fix base panels to include scripts as the last element of the body.
+  `Pull Request 969`_
+* Grid batch operation fixes.
+  `Pull Request 971`_
+* Fix extra files path URL problem.
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 991`_
+* Fixes broken env-var declarations for tools with weird chars in their names
+  (thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 1028`_
+* Added gitignore rules to ignore custom tool-data
+  (thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 1048`_
+* Fix import of history datasets into library.
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1049`_
+* Fix for reloading tools that have non-standard tool_ids/versions.
+  `Pull Request 1050`_
+* Improved encoding handling for Jobs.
+  `Pull Request 1052`_
+* Fix lped report output from converter.
+  `Pull Request 1069`_, `Pull Request 1070`_, `Pull Request 1072`_
+* Trivial: Replace unnecessary duplicated var check with 'else if'
+  (thanks to `@einon <https://github.com/einon>`__.)
+  `Pull Request 1073`_
+* Fix a bug in IEs when proxying the proxy.
+  `Pull Request 1076`_
+* Fix 500 error when attempting to update installed repository.
+  `Pull Request 1082`_
+* Resolve conflicting label CSS class for trackster.
+  `Pull Request 1086`_
+* Fix bug with referrer attribute type change in WebOb.
+  `Pull Request 1091`_
+* Fix API TS installation
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1094`_
+* Better error when tool shed repository directory is missing
+  (thanks to `@lparsons <https://github.com/lparsons>`__.)
+  `Pull Request 1107`_
+* Don't let ``$input`` hang cheetah evaluation.
+  `Pull Request 1117`_
+* Fix for re-installing an uninstalled TS repository with a dependency
+  (Thanks to `@gregvonkuster <https://github.com/gregvonkuster>`__.)
+  `Pull Request 1154`_
+* Fix interface and usage of ``WorkflowModule.get_runtime_inputs``.
+  `Pull Request 1174`_
+* Add enhancements to the Galaxy repository install process
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1193`_
+* Tool shed fixes
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1200`_
+* Fix for updating tool parameter dicts when a new parameter has been added to
+  a section.
+  `Pull Request 1215`_
+* Replace the defunct readthedocs badge.
+  `Pull Request 1229`_
+* Export ``GALAXY_TEST_DBURI`` as ``GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION``
+  before installing wheels.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1231`_
+* Fix passing of nginx_upload_path and ftp_upload_site.
+  `Pull Request 1250`_
+* Fixed indentation errors for reports app
+  (thanks to `@markiskander <https://github.com/markiskander>`__.)
+  `Pull Request 1259`_
+* Open select2 drop down on caret click.
+  `Pull Request 1298`_
+* Improved validation of tools during workflow execution.
+  `Pull Request 1302`_
+* Properly remove datasets from the filtered lists when pairing datasets
+  for the paired dataset list creator.
+  `Pull Request 1310`_
+* Update Kombu and AMQP wheels to fix problems with El Capitan's System
+  Integrity Protection.
+  `Pull Request 1327`_
+* Fix for creating workflow outputs on initial workflow upload.
+  `Pull Request 1330`_
+* Don't query on unencoded IDs for error form
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1340`_
+* If ``GALAXY_SLOTS`` is defined in the environment, use it for the local runner.
+  `Pull Request 1346`_
+* Use both ``SLURM_NTASKS`` and ``SLURM_CPUS_PER_TASK`` to set ``GALAXY_SLOTS``
+  (thanks to `@lparsons <https://github.com/lparsons>`__.)
+  `Pull Request 1347`_
+* Fix for loading workflows that have tool version / step upgrade messages.
+  `Pull Request 1348`_
+* Allow installation of different repositories with the same name in a single
+  request
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1366`_
+* Copy workflow objects when importing them.
+  `Pull Request 1474`_
+* Undo user icon in masthead.
+  `Pull Request 1493`_
+* Fix mime type when previewing certain tabular data.
+  `Pull Request 1498`_
+* Fix disabled CSS.
+  `Pull Request 1501`_
+* catch Exception and properly log errors
+  `Pull Request 1511`_
+* Fix for workflow validation problem introduced in 15.10.
+  `Pull Request 1536`_,
+  `Issue #1514 <https://github.com/galaxyproject/galaxy/issues/1514>`__
+* Keep track of hidden datasets.
+  `Pull Request 1551`_
+* Force ``--skip-venv`` if we can detect that Python is Conda Python.
+  `Pull Request 1554`_
+* Fix installation of Tool Shed repositories containing non-ASCII characters
+  in the description.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1759`_
+* Fix pretty_print_time_interval for MySQL.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1761`_
+
+.. _iobio: http://iobio.io/
+
+.. github_links
+
+.. _Pull Request 428: https://github.com/galaxyproject/galaxy/pull/428
+.. _Pull Request 564: https://github.com/galaxyproject/galaxy/pull/564
+.. _Pull Request 706: https://github.com/galaxyproject/galaxy/pull/706
+.. _Pull Request 805: https://github.com/galaxyproject/galaxy/pull/805
+.. _Pull Request 818: https://github.com/galaxyproject/galaxy/pull/818
+.. _Pull Request 827: https://github.com/galaxyproject/galaxy/pull/827
+.. _Pull Request 853: https://github.com/galaxyproject/galaxy/pull/853
+.. _Pull Request 856: https://github.com/galaxyproject/galaxy/pull/856
+.. _Pull Request 857: https://github.com/galaxyproject/galaxy/pull/857
+.. _Pull Request 859: https://github.com/galaxyproject/galaxy/pull/859
+.. _Pull Request 863: https://github.com/galaxyproject/galaxy/pull/863
+.. _Pull Request 864: https://github.com/galaxyproject/galaxy/pull/864
+.. _Pull Request 865: https://github.com/galaxyproject/galaxy/pull/865
+.. _Pull Request 866: https://github.com/galaxyproject/galaxy/pull/866
+.. _Pull Request 872: https://github.com/galaxyproject/galaxy/pull/872
+.. _Pull Request 874: https://github.com/galaxyproject/galaxy/pull/874
+.. _Pull Request 875: https://github.com/galaxyproject/galaxy/pull/875
+.. _Pull Request 876: https://github.com/galaxyproject/galaxy/pull/876
+.. _Pull Request 877: https://github.com/galaxyproject/galaxy/pull/877
+.. _Pull Request 881: https://github.com/galaxyproject/galaxy/pull/881
+.. _Pull Request 889: https://github.com/galaxyproject/galaxy/pull/889
+.. _Pull Request 891: https://github.com/galaxyproject/galaxy/pull/891
+.. _Pull Request 892: https://github.com/galaxyproject/galaxy/pull/892
+.. _Pull Request 893: https://github.com/galaxyproject/galaxy/pull/893
+.. _Pull Request 898: https://github.com/galaxyproject/galaxy/pull/898
+.. _Pull Request 901: https://github.com/galaxyproject/galaxy/pull/901
+.. _Pull Request 902: https://github.com/galaxyproject/galaxy/pull/902
+.. _Pull Request 905: https://github.com/galaxyproject/galaxy/pull/905
+.. _Pull Request 907: https://github.com/galaxyproject/galaxy/pull/907
+.. _Pull Request 908: https://github.com/galaxyproject/galaxy/pull/908
+.. _Pull Request 909: https://github.com/galaxyproject/galaxy/pull/909
+.. _Pull Request 912: https://github.com/galaxyproject/galaxy/pull/912
+.. _Pull Request 914: https://github.com/galaxyproject/galaxy/pull/914
+.. _Pull Request 916: https://github.com/galaxyproject/galaxy/pull/916
+.. _Pull Request 918: https://github.com/galaxyproject/galaxy/pull/918
+.. _Pull Request 924: https://github.com/galaxyproject/galaxy/pull/924
+.. _Pull Request 929: https://github.com/galaxyproject/galaxy/pull/929
+.. _Pull Request 938: https://github.com/galaxyproject/galaxy/pull/938
+.. _Pull Request 939: https://github.com/galaxyproject/galaxy/pull/939
+.. _Pull Request 942: https://github.com/galaxyproject/galaxy/pull/942
+.. _Pull Request 944: https://github.com/galaxyproject/galaxy/pull/944
+.. _Pull Request 947: https://github.com/galaxyproject/galaxy/pull/947
+.. _Pull Request 949: https://github.com/galaxyproject/galaxy/pull/949
+.. _Pull Request 950: https://github.com/galaxyproject/galaxy/pull/950
+.. _Pull Request 952: https://github.com/galaxyproject/galaxy/pull/952
+.. _Pull Request 955: https://github.com/galaxyproject/galaxy/pull/955
+.. _Pull Request 956: https://github.com/galaxyproject/galaxy/pull/956
+.. _Pull Request 959: https://github.com/galaxyproject/galaxy/pull/959
+.. _Pull Request 960: https://github.com/galaxyproject/galaxy/pull/960
+.. _Pull Request 969: https://github.com/galaxyproject/galaxy/pull/969
+.. _Pull Request 971: https://github.com/galaxyproject/galaxy/pull/971
+.. _Pull Request 985: https://github.com/galaxyproject/galaxy/pull/985
+.. _Pull Request 988: https://github.com/galaxyproject/galaxy/pull/988
+.. _Pull Request 989: https://github.com/galaxyproject/galaxy/pull/989
+.. _Pull Request 990: https://github.com/galaxyproject/galaxy/pull/990
+.. _Pull Request 991: https://github.com/galaxyproject/galaxy/pull/991
+.. _Pull Request 994: https://github.com/galaxyproject/galaxy/pull/994
+.. _Pull Request 995: https://github.com/galaxyproject/galaxy/pull/995
+.. _Pull Request 996: https://github.com/galaxyproject/galaxy/pull/996
+.. _Pull Request 997: https://github.com/galaxyproject/galaxy/pull/997
+.. _Pull Request 1004: https://github.com/galaxyproject/galaxy/pull/1004
+.. _Pull Request 1006: https://github.com/galaxyproject/galaxy/pull/1006
+.. _Pull Request 1010: https://github.com/galaxyproject/galaxy/pull/1010
+.. _Pull Request 1011: https://github.com/galaxyproject/galaxy/pull/1011
+.. _Pull Request 1012: https://github.com/galaxyproject/galaxy/pull/1012
+.. _Pull Request 1013: https://github.com/galaxyproject/galaxy/pull/1013
+.. _Pull Request 1014: https://github.com/galaxyproject/galaxy/pull/1014
+.. _Pull Request 1017: https://github.com/galaxyproject/galaxy/pull/1017
+.. _Pull Request 1018: https://github.com/galaxyproject/galaxy/pull/1018
+.. _Pull Request 1019: https://github.com/galaxyproject/galaxy/pull/1019
+.. _Pull Request 1020: https://github.com/galaxyproject/galaxy/pull/1020
+.. _Pull Request 1025: https://github.com/galaxyproject/galaxy/pull/1025
+.. _Pull Request 1028: https://github.com/galaxyproject/galaxy/pull/1028
+.. _Pull Request 1033: https://github.com/galaxyproject/galaxy/pull/1033
+.. _Pull Request 1037: https://github.com/galaxyproject/galaxy/pull/1037
+.. _Pull Request 1040: https://github.com/galaxyproject/galaxy/pull/1040
+.. _Pull Request 1048: https://github.com/galaxyproject/galaxy/pull/1048
+.. _Pull Request 1049: https://github.com/galaxyproject/galaxy/pull/1049
+.. _Pull Request 1050: https://github.com/galaxyproject/galaxy/pull/1050
+.. _Pull Request 1052: https://github.com/galaxyproject/galaxy/pull/1052
+.. _Pull Request 1063: https://github.com/galaxyproject/galaxy/pull/1063
+.. _Pull Request 1066: https://github.com/galaxyproject/galaxy/pull/1066
+.. _Pull Request 1067: https://github.com/galaxyproject/galaxy/pull/1067
+.. _Pull Request 1069: https://github.com/galaxyproject/galaxy/pull/1069
+.. _Pull Request 1070: https://github.com/galaxyproject/galaxy/pull/1070
+.. _Pull Request 1072: https://github.com/galaxyproject/galaxy/pull/1072
+.. _Pull Request 1073: https://github.com/galaxyproject/galaxy/pull/1073
+.. _Pull Request 1074: https://github.com/galaxyproject/galaxy/pull/1074
+.. _Pull Request 1076: https://github.com/galaxyproject/galaxy/pull/1076
+.. _Pull Request 1078: https://github.com/galaxyproject/galaxy/pull/1078
+.. _Pull Request 1082: https://github.com/galaxyproject/galaxy/pull/1082
+.. _Pull Request 1083: https://github.com/galaxyproject/galaxy/pull/1083
+.. _Pull Request 1086: https://github.com/galaxyproject/galaxy/pull/1086
+.. _Pull Request 1091: https://github.com/galaxyproject/galaxy/pull/1091
+.. _Pull Request 1094: https://github.com/galaxyproject/galaxy/pull/1094
+.. _Pull Request 1095: https://github.com/galaxyproject/galaxy/pull/1095
+.. _Pull Request 1096: https://github.com/galaxyproject/galaxy/pull/1096
+.. _Pull Request 1097: https://github.com/galaxyproject/galaxy/pull/1097
+.. _Pull Request 1098: https://github.com/galaxyproject/galaxy/pull/1098
+.. _Pull Request 1101: https://github.com/galaxyproject/galaxy/pull/1101
+.. _Pull Request 1105: https://github.com/galaxyproject/galaxy/pull/1105
+.. _Pull Request 1107: https://github.com/galaxyproject/galaxy/pull/1107
+.. _Pull Request 1108: https://github.com/galaxyproject/galaxy/pull/1108
+.. _Pull Request 1111: https://github.com/galaxyproject/galaxy/pull/1111
+.. _Pull Request 1113: https://github.com/galaxyproject/galaxy/pull/1113
+.. _Pull Request 1117: https://github.com/galaxyproject/galaxy/pull/1117
+.. _Pull Request 1119: https://github.com/galaxyproject/galaxy/pull/1119
+.. _Pull Request 1123: https://github.com/galaxyproject/galaxy/pull/1123
+.. _Pull Request 1126: https://github.com/galaxyproject/galaxy/pull/1126
+.. _Pull Request 1130: https://github.com/galaxyproject/galaxy/pull/1130
+.. _Pull Request 1131: https://github.com/galaxyproject/galaxy/pull/1131
+.. _Pull Request 1137: https://github.com/galaxyproject/galaxy/pull/1137
+.. _Pull Request 1142: https://github.com/galaxyproject/galaxy/pull/1142
+.. _Pull Request 1144: https://github.com/galaxyproject/galaxy/pull/1144
+.. _Pull Request 1153: https://github.com/galaxyproject/galaxy/pull/1153
+.. _Pull Request 1154: https://github.com/galaxyproject/galaxy/pull/1154
+.. _Pull Request 1155: https://github.com/galaxyproject/galaxy/pull/1155
+.. _Pull Request 1157: https://github.com/galaxyproject/galaxy/pull/1157
+.. _Pull Request 1159: https://github.com/galaxyproject/galaxy/pull/1159
+.. _Pull Request 1160: https://github.com/galaxyproject/galaxy/pull/1160
+.. _Pull Request 1163: https://github.com/galaxyproject/galaxy/pull/1163
+.. _Pull Request 1166: https://github.com/galaxyproject/galaxy/pull/1166
+.. _Pull Request 1174: https://github.com/galaxyproject/galaxy/pull/1174
+.. _Pull Request 1176: https://github.com/galaxyproject/galaxy/pull/1176
+.. _Pull Request 1178: https://github.com/galaxyproject/galaxy/pull/1178
+.. _Pull Request 1179: https://github.com/galaxyproject/galaxy/pull/1179
+.. _Pull Request 1181: https://github.com/galaxyproject/galaxy/pull/1181
+.. _Pull Request 1182: https://github.com/galaxyproject/galaxy/pull/1182
+.. _Pull Request 1184: https://github.com/galaxyproject/galaxy/pull/1184
+.. _Pull Request 1193: https://github.com/galaxyproject/galaxy/pull/1193
+.. _Pull Request 1198: https://github.com/galaxyproject/galaxy/pull/1198
+.. _Pull Request 1199: https://github.com/galaxyproject/galaxy/pull/1199
+.. _Pull Request 1200: https://github.com/galaxyproject/galaxy/pull/1200
+.. _Pull Request 1202: https://github.com/galaxyproject/galaxy/pull/1202
+.. _Pull Request 1209: https://github.com/galaxyproject/galaxy/pull/1209
+.. _Pull Request 1211: https://github.com/galaxyproject/galaxy/pull/1211
+.. _Pull Request 1212: https://github.com/galaxyproject/galaxy/pull/1212
+.. _Pull Request 1213: https://github.com/galaxyproject/galaxy/pull/1213
+.. _Pull Request 1214: https://github.com/galaxyproject/galaxy/pull/1214
+.. _Pull Request 1215: https://github.com/galaxyproject/galaxy/pull/1215
+.. _Pull Request 1216: https://github.com/galaxyproject/galaxy/pull/1216
+.. _Pull Request 1217: https://github.com/galaxyproject/galaxy/pull/1217
+.. _Pull Request 1221: https://github.com/galaxyproject/galaxy/pull/1221
+.. _Pull Request 1226: https://github.com/galaxyproject/galaxy/pull/1226
+.. _Pull Request 1229: https://github.com/galaxyproject/galaxy/pull/1229
+.. _Pull Request 1230: https://github.com/galaxyproject/galaxy/pull/1230
+.. _Pull Request 1231: https://github.com/galaxyproject/galaxy/pull/1231
+.. _Pull Request 1233: https://github.com/galaxyproject/galaxy/pull/1233
+.. _Pull Request 1238: https://github.com/galaxyproject/galaxy/pull/1238
+.. _Pull Request 1243: https://github.com/galaxyproject/galaxy/pull/1243
+.. _Pull Request 1247: https://github.com/galaxyproject/galaxy/pull/1247
+.. _Pull Request 1248: https://github.com/galaxyproject/galaxy/pull/1248
+.. _Pull Request 1249: https://github.com/galaxyproject/galaxy/pull/1249
+.. _Pull Request 1250: https://github.com/galaxyproject/galaxy/pull/1250
+.. _Pull Request 1251: https://github.com/galaxyproject/galaxy/pull/1251
+.. _Pull Request 1253: https://github.com/galaxyproject/galaxy/pull/1253
+.. _Pull Request 1254: https://github.com/galaxyproject/galaxy/pull/1254
+.. _Pull Request 1259: https://github.com/galaxyproject/galaxy/pull/1259
+.. _Pull Request 1261: https://github.com/galaxyproject/galaxy/pull/1261
+.. _Pull Request 1262: https://github.com/galaxyproject/galaxy/pull/1262
+.. _Pull Request 1263: https://github.com/galaxyproject/galaxy/pull/1263
+.. _Pull Request 1271: https://github.com/galaxyproject/galaxy/pull/1271
+.. _Pull Request 1276: https://github.com/galaxyproject/galaxy/pull/1276
+.. _Pull Request 1277: https://github.com/galaxyproject/galaxy/pull/1277
+.. _Pull Request 1279: https://github.com/galaxyproject/galaxy/pull/1279
+.. _Pull Request 1285: https://github.com/galaxyproject/galaxy/pull/1285
+.. _Pull Request 1286: https://github.com/galaxyproject/galaxy/pull/1286
+.. _Pull Request 1292: https://github.com/galaxyproject/galaxy/pull/1292
+.. _Pull Request 1296: https://github.com/galaxyproject/galaxy/pull/1296
+.. _Pull Request 1298: https://github.com/galaxyproject/galaxy/pull/1298
+.. _Pull Request 1299: https://github.com/galaxyproject/galaxy/pull/1299
+.. _Pull Request 1302: https://github.com/galaxyproject/galaxy/pull/1302
+.. _Pull Request 1303: https://github.com/galaxyproject/galaxy/pull/1303
+.. _Pull Request 1306: https://github.com/galaxyproject/galaxy/pull/1306
+.. _Pull Request 1307: https://github.com/galaxyproject/galaxy/pull/1307
+.. _Pull Request 1308: https://github.com/galaxyproject/galaxy/pull/1308
+.. _Pull Request 1310: https://github.com/galaxyproject/galaxy/pull/1310
+.. _Pull Request 1316: https://github.com/galaxyproject/galaxy/pull/1316
+.. _Pull Request 1319: https://github.com/galaxyproject/galaxy/pull/1319
+.. _Pull Request 1320: https://github.com/galaxyproject/galaxy/pull/1320
+.. _Pull Request 1327: https://github.com/galaxyproject/galaxy/pull/1327
+.. _Pull Request 1330: https://github.com/galaxyproject/galaxy/pull/1330
+.. _Pull Request 1340: https://github.com/galaxyproject/galaxy/pull/1340
+.. _Pull Request 1343: https://github.com/galaxyproject/galaxy/pull/1343
+.. _Pull Request 1344: https://github.com/galaxyproject/galaxy/pull/1344
+.. _Pull Request 1345: https://github.com/galaxyproject/galaxy/pull/1345
+.. _Pull Request 1346: https://github.com/galaxyproject/galaxy/pull/1346
+.. _Pull Request 1347: https://github.com/galaxyproject/galaxy/pull/1347
+.. _Pull Request 1348: https://github.com/galaxyproject/galaxy/pull/1348
+.. _Pull Request 1349: https://github.com/galaxyproject/galaxy/pull/1349
+.. _Pull Request 1350: https://github.com/galaxyproject/galaxy/pull/1350
+.. _Pull Request 1351: https://github.com/galaxyproject/galaxy/pull/1351
+.. _Pull Request 1352: https://github.com/galaxyproject/galaxy/pull/1352
+.. _Pull Request 1353: https://github.com/galaxyproject/galaxy/pull/1353
+.. _Pull Request 1354: https://github.com/galaxyproject/galaxy/pull/1354
+.. _Pull Request 1359: https://github.com/galaxyproject/galaxy/pull/1359
+.. _Pull Request 1362: https://github.com/galaxyproject/galaxy/pull/1362
+.. _Pull Request 1363: https://github.com/galaxyproject/galaxy/pull/1363
+.. _Pull Request 1366: https://github.com/galaxyproject/galaxy/pull/1366
+.. _Pull Request 1367: https://github.com/galaxyproject/galaxy/pull/1367
+.. _Pull Request 1368: https://github.com/galaxyproject/galaxy/pull/1368
+.. _Pull Request 1376: https://github.com/galaxyproject/galaxy/pull/1376
+.. _Pull Request 1377: https://github.com/galaxyproject/galaxy/pull/1377
+.. _Pull Request 1385: https://github.com/galaxyproject/galaxy/pull/1385
+.. _Pull Request 1388: https://github.com/galaxyproject/galaxy/pull/1388
+.. _Pull Request 1389: https://github.com/galaxyproject/galaxy/pull/1389
+.. _Pull Request 1393: https://github.com/galaxyproject/galaxy/pull/1393
+.. _Pull Request 1397: https://github.com/galaxyproject/galaxy/pull/1397
+.. _Pull Request 1398: https://github.com/galaxyproject/galaxy/pull/1398
+.. _Pull Request 1399: https://github.com/galaxyproject/galaxy/pull/1399
+.. _Pull Request 1400: https://github.com/galaxyproject/galaxy/pull/1400
+.. _Pull Request 1401: https://github.com/galaxyproject/galaxy/pull/1401
+.. _Pull Request 1405: https://github.com/galaxyproject/galaxy/pull/1405
+.. _Pull Request 1407: https://github.com/galaxyproject/galaxy/pull/1407
+.. _Pull Request 1409: https://github.com/galaxyproject/galaxy/pull/1409
+.. _Pull Request 1412: https://github.com/galaxyproject/galaxy/pull/1412
+.. _Pull Request 1413: https://github.com/galaxyproject/galaxy/pull/1413
+.. _Pull Request 1414: https://github.com/galaxyproject/galaxy/pull/1414
+.. _Pull Request 1415: https://github.com/galaxyproject/galaxy/pull/1415
+.. _Pull Request 1421: https://github.com/galaxyproject/galaxy/pull/1421
+.. _Pull Request 1422: https://github.com/galaxyproject/galaxy/pull/1422
+.. _Pull Request 1424: https://github.com/galaxyproject/galaxy/pull/1424
+.. _Pull Request 1425: https://github.com/galaxyproject/galaxy/pull/1425
+.. _Pull Request 1427: https://github.com/galaxyproject/galaxy/pull/1427
+.. _Pull Request 1430: https://github.com/galaxyproject/galaxy/pull/1430
+.. _Pull Request 1438: https://github.com/galaxyproject/galaxy/pull/1438
+.. _Pull Request 1439: https://github.com/galaxyproject/galaxy/pull/1439
+.. _Pull Request 1448: https://github.com/galaxyproject/galaxy/pull/1448
+.. _Pull Request 1464: https://github.com/galaxyproject/galaxy/pull/1464
+.. _Pull Request 1473: https://github.com/galaxyproject/galaxy/pull/1473
+.. _Pull Request 1474: https://github.com/galaxyproject/galaxy/pull/1474
+.. _Pull Request 1485: https://github.com/galaxyproject/galaxy/pull/1485
+.. _Pull Request 1487: https://github.com/galaxyproject/galaxy/pull/1487
+.. _Pull Request 1493: https://github.com/galaxyproject/galaxy/pull/1493
+.. _Pull Request 1494: https://github.com/galaxyproject/galaxy/pull/1494
+.. _Pull Request 1495: https://github.com/galaxyproject/galaxy/pull/1495
+.. _Pull Request 1498: https://github.com/galaxyproject/galaxy/pull/1498
+.. _Pull Request 1501: https://github.com/galaxyproject/galaxy/pull/1501
+.. _Pull Request 1511: https://github.com/galaxyproject/galaxy/pull/1511
+.. _Pull Request 1527: https://github.com/galaxyproject/galaxy/pull/1527
+.. _Pull Request 1536: https://github.com/galaxyproject/galaxy/pull/1536
+.. _Pull Request 1551: https://github.com/galaxyproject/galaxy/pull/1551
+.. _Pull Request 1554: https://github.com/galaxyproject/galaxy/pull/1554
+.. _Pull Request 1558: https://github.com/galaxyproject/galaxy/pull/1558
+.. _Pull Request 1759: https://github.com/galaxyproject/galaxy/pull/1759
+.. _Pull Request 1761: https://github.com/galaxyproject/galaxy/pull/1761
+
+.. _Commit f540a16: https://github.com/galaxyproject/galaxy/commit/f540a16768307995ea49c5d241948537ebbfa540
+.. _Commit bf1c77d: https://github.com/galaxyproject/galaxy/commit/bf1c77d171f079f42d481ad465dbaef3bac8b4d4
+.. _Commit 5da91bd: https://github.com/galaxyproject/galaxy/commit/5da91bddcda3ad3a4942e752d9b4bb3e7872046c
+.. _Commit cd8b965: https://github.com/galaxyproject/galaxy/commit/cd8b96553f673145ee64a86d32960ba42643baed
+.. _Commit e4a1d57: https://github.com/galaxyproject/galaxy/commit/e4a1d5727805168a9fd15aca1cdd21630ada2bbc
+.. _Commit e845d64: https://github.com/galaxyproject/galaxy/commit/e845d649c672c51cc2336da0d880c9ff74ea6b5f
diff --git a/doc/source/releases/16.01_announce.rst b/doc/source/releases/16.01_announce.rst
new file mode 100644
index 0000000..60ea986
--- /dev/null
+++ b/doc/source/releases/16.01_announce.rst
@@ -0,0 +1,162 @@
+
+===========================================================
+January 2016 Galaxy Release (v 16.01)
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Interactive Tours**
+  The interactive tours framework allows developers and deployers to build
+  interactive tutorials for users superimposed on the actual Galaxy web front
+  end. Unlike video tutorials, these will not become stale and are truly 
+  interactive (allowing users to actually navigate and interact with Galaxy).
+  Galaxy 16.01 ships with two example tours and new ones can easily be added by
+  creating a small YAML file describing the tour. Try the `Galaxy UI tour
+  <https://usegalaxy.org/?tour_id=core.galaxy_ui>`__  on Main.
+
+**Wheels**
+  Galaxy's Python dependencies have traditionally been distributed as eggs_ using
+  custom dependency management code to enable Galaxy to distribute binary 
+  dependencies (enabling quick downloads and minimal system requirements). With 
+  this release all of that infrastructure has been replaced with a modern Python
+  infrastructure based on pip_ and wheels_.
+  Work done as part of this to enable binary dependencies on Linux has been 
+  included with the recently released pip_ 8.
+
+  Detailed documentation on these changes and their impact under a variety of
+  Galaxy deployment scenarios can be found in the :ref:`framework-dependencies`
+  section of the Admin documentation.
+
+**Nested Workflows**
+  Workflows may now run other workflows as a single abstract step in the parent
+  workflow. This allows for reusing or subworkflows in your analyses.
+
+`Github <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New
+  .. code-block:: shell
+
+      % git clone -b master https://github.com/galaxyproject/galaxy.git
+
+Update to latest stable release
+  .. code-block:: shell
+
+      % git checkout master && pull --ff-only origin master
+
+Update to exact version
+  .. code-block:: shell
+
+      % git checkout v16.01
+
+
+`BitBucket <https://bitbucket.org/galaxy/galaxy-dist>`__
+===========================================================
+
+Upgrade
+  .. code-block:: shell
+
+      % hg pull
+      % hg update latest_16.01
+
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+Deprecation Notices
+===========================================================
+
+Barring a strong outcry from deployers, 16.01 will be the last release of Galaxy to
+support Python 2.6. For more information, see Galaxy Github
+`Issue #1596 <https://github.com/galaxyproject/galaxy/issues/1596>`_.
+
+.. _eggs: https://pythonhosted.org/setuptools/formats.html
+.. _wheels: http://pythonwheels.com/
+.. _pip: https://pip.pypa.io/en/stable/
+
+Security
+===========================================================
+
+Multiple security vulnerabilities were identified during this release cycle and
+fixed concurrently with the release. In addition, the fixes have been
+backported to older releases.
+
+The Galaxy Committers would like to thank `Youri Hoogstrate
+<https://github.com/yhoogstrate>`_ at the Erasmus MC, Rotterdam, who initially
+reported the ``hg push`` vulnerability. Through additional auditing based on
+this attack vector, we discovered the other vulnerabilities.
+
+**Galaxy**
+  Multiple security vulnerabilities were discovered in Galaxy that allow
+  malicious actors to read and write files on the Galaxy server. Additionally,
+  Galaxy servers on which a rarely used feature has been enabled are vulnerable
+  to an arbitrary code execution exploit.
+
+  1. A write vulnerability exists in the history import mechanism. It is
+     possible to create a history tar archive that contains files with parent
+     directory components in the file path (e.g. ``foo/../../bar`` would
+     extract to ``../bar``), and these archive members would be written if the
+     user running the Galaxy server had write permission to the given path.
+
+  2. A read vulnerability exists in the object store path composition code.
+     Galaxy allows clients to add elements to the end of a path to "extra"
+     files associated with a dataset (as is the case with composite datatypes).
+     These elements were not being checked to ensure they did not contain
+     relative parent references (``..``) or did not start with an absolute path
+     character (``/``). Because of this, the dataset display methods could be
+     manipulated to return the contents of any files for which the Galaxy
+     server user had read permission.
+
+  3. An arbitrary code execution vulnerability exists in the Galaxy sample
+     tracking system. The sample tracking system included a feature which
+     allowed administrators to browse remote "external services" (such as
+     sequencers) to choose files to transfer to the Galaxy server. This
+     browsing code used a shell invocation which did not sanitize user input.
+     However, this code is only reachable if at least one external service has
+     ever been defined. 
+
+  Fixes for all three issues have been applied to Galaxy releases back to
+  v14.10.
+
+**Tool Shed**
+  Multiple security vulnerabilities were discovered in the Tool Shed that allow
+  malicious actors to read and write files on the Tool Shed server outside of
+  normal Tool Shed repository directories.
+
+  1. A write vulnerability exists in the Tool Shed tarball and capsule upload
+     functionality. It is possible to create a tar archive that contains files
+     with parent directory components in the file path (e.g.  ``foo/../../bar``
+     would extract to ``../bar``), and these archive members would be written
+     if the user running the Tool Shed had write permission to the given path.
+     The Tool Shed tarball handling code checked for invalid characters (``/``
+     or ``..``) at the beginning of the path but not for ``..`` in the middle
+     of a path.
+
+  2. A read vulnerability exists in multiple places. The first is in the (now
+     deprecated) ``hg push`` functionality for updating Tool Shed repositories.
+     This method allows malicious actors to push symlinks whose targets are
+     outside the repository (either via an absolute or relative path). The
+     contents of the targets would then be visible in the Tool Shed repository
+     contents viewer, if the Tool Shed user has read permission on the target.
+
+  3. A second read vulnerability exists in the Tool Shed repository contents
+     viewer. The viewer would allow a malicious actor to specify a path outside
+     the repository, and if the Tool Shed system user had read permissions on
+     that path, it would be displayed. The viewer also did not check to ensure
+     that the targets of symlinks in a repository did not point outside the
+     repository.
+
+  The repository contents viewer read vulnerability also exists in Galaxy, but
+  is only reachable/exploitable by admin users. Fixes for vulnerability #3 have
+  been applied to Galaxy/Tool Shed releases back to v14.10, and issues #1 and
+  #2 have been applied to releases back to v15.01.
+
+Release Notes
+===========================================================
+
+.. include:: 16.01.rst
+   :start-after: announce_start
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/16.04.rst b/doc/source/releases/16.04.rst
new file mode 100644
index 0000000..c3bc54c
--- /dev/null
+++ b/doc/source/releases/16.04.rst
@@ -0,0 +1,867 @@
+
+.. to_doc
+
+16.04
+===============================
+
+.. announce_start
+
+Enhancements
+-------------------------------
+
+.. major_feature
+
+* Overhaul of Tools and Jobs
+  `Pull Request 1688`_
+* Implement an Embedded Pulsar Job Runner
+  `Pull Request 2057`_
+* Use the API to install repositories instead of loading the
+  Tool Shed in an iframe. (Beta)
+  `Pull Request 1392`_
+
+.. feature
+
+* Phinch interactive environment
+  (thanks to `@shiltemann <https://github.com/shiltemann>`__.)
+  `Pull Request 1647`_
+* Add iobio external display applications for BAM and VCF.
+  `Pull Request 1926`_
+* add chemical datatypes
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1941`_
+* Scratchbook tour
+  `Pull Request 1463`_
+* Work toward automating release management.
+  `Pull Request 1613`_
+* Basic tool error Sentry reporting.
+  `Pull Request 1900`_
+* disable 'hg push' to TS repositories
+  `Pull Request 2033`_
+* Generic GIE Launcher, GIE Image Chooser, multiple datasets
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1403`_
+* The entire Python source code is now continuously checked with flake8 for
+  PEP-8 style consistency and common errors. The last 3500 errors were fixed in
+  this release
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1489`_, `Pull Request 1713`_, `Pull Request 1755`_
+
+.. enhancement
+
+* Implement new GIE proxy
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1807`_
+* Workflow editor overview click navigation
+  `Pull Request 1843`_
+* API, history contents: allow filters, limit/offset, and ordering
+  `Pull Request 1602`_
+* datalibs: introduce folder management; fix various glitches; refactor
+  `Pull Request 1562`_
+* Add makefile target for release process
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1433`_
+* libraries: provide select all/none options when importing
+  `Pull Request 1970`_
+* libraries: add 'create history &import to it' feature
+  `Pull Request 2017`_
+* Bcf.gz to bcf (and reverse) converters + bcf_bgzip data type
+  (thanks to `@markiskander <https://github.com/markiskander>`__.)
+  `Pull Request 1148`_
+* Update SnpEffDb and SnpSiftDbNSFP for SnpEff v4.1
+  (thanks to `@jj-umn <https://github.com/jj-umn>`__.)
+  `Pull Request 1280`_
+* Docker Testing - Newer versions of phantomjs and casperjs, in container
+  wheels.
+  `Pull Request 1449`_
+* Add run_toolshed_tests.html to .gitignore .
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1456`_
+* Allow sorting of discover_datasets elements.
+  `Pull Request 1512`_
+* Cleanup common ui elements, add test cases
+  `Pull Request 1519`_
+* add minor styling to tour popover
+  `Pull Request 1539`_
+* Code design and legacy cleanup of datatypes and metadata.
+  `Pull Request 1556`_
+* show_params input walking
+  `Pull Request 1561`_
+* Add API endpoint for fetching all metadata for a repository.
+  `Pull Request 1565`_
+* Fix parameter validation
+  `Pull Request 1572`_
+* Remove recovery hack for REALLY old jobs.
+  `Pull Request 1574`_
+* Clarify area github tags.
+  `Pull Request 1579`_
+* update pinned-requirements.txt
+  (thanks to `@matthdsm <https://github.com/matthdsm>`__.)
+  `Pull Request 1605`_
+* Improve dbkey handling for discovered datasets.
+  `Pull Request 1610`_
+* Various improvements to the heartbeat
+  `Pull Request 1614`_
+* Visualization plugins: add routes that more closely match potential static assets.
+  `Pull Request 1615`_
+* Disable individual file metadata changes in upload
+  `Pull Request 1625`_
+* added resolver dependency display to the tool view page
+  (thanks to `@zipho <https://github.com/zipho>`__.)
+  `Pull Request 1632`_
+* Cherry-picked changes from pull request `#1392
+  <https://github.com/galaxyproject/galaxy/issues/1392>`__.
+  `Pull Request 1821`_
+* Export toolshed repository information for workflow portability
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1636`_
+* Lint optional handling on conditional test parameters.
+  `Pull Request 1639`_
+* Updated hashes for bioblend v0.7.0
+  (thanks to `@matthdsm <https://github.com/matthdsm>`__.)
+  `Pull Request 1646`_
+* Add tox startup (run.sh) test and test on OS X and linux
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1649`_
+* Optionally pass dataset lists as a single file within tool wrappers
+  (thanks to `@einon <https://github.com/einon>`__.)
+  `Pull Request 1658`_
+* add a zip datatype which won't be extracted
+  (thanks to `@lecorguille <https://github.com/lecorguille>`__.)
+  `Pull Request 1665`_
+* Tour improvements and bugfixes
+  `Pull Request 1671`_
+* Add Marius van den Beek to Galaxy committer group.
+  `Pull Request 1699`_
+* Revise tool parameter handling, remove redundancies
+  `Pull Request 1711`_
+* Python 2/3 string handling
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1731`_
+* workaround for error message on tool lookup in case original toolshed had
+  been disabled
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1738`_
+* Make api/tools return tool_shed_repository information.
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1752`_
+* Use visit inputs for check_and_update_param_values
+  `Pull Request 1764`_
+* Update makefile and add the script I used to apply security patches for
+  16.01
+  `Pull Request 1793`_
+* Implement dataset empty input validator.
+  `Pull Request 1808`_
+* Makefile help target + formatting
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1824`_
+* Add license and repository information to package.json.
+  `Pull Request 1829`_
+* Improved logging during tool executions.
+  `Pull Request 1832`_
+* Do not access the history when loading the workflow editor
+  `Pull Request 1861`_
+* Tones down the warning message about tool changes
+  (thanks to `@bwlang <https://github.com/bwlang>`__.)
+  `Pull Request 1893`_
+* Use shared value serialization for tool model
+  `Pull Request 1901`_
+* adding contrib init for reports and modifying status check to have output by process in case multiprocess
+  (thanks to `@miloaec <https://github.com/miloaec>`__.)
+  `Pull Request 1911`_
+* Retrieve authentification also for long urls
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1912`_
+* Modify run.sh to add --restart option, for a more consistent use of parameters.
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 1914`_
+* Rename tool_shed_get to url_get.
+  `Pull Request 1918`_
+* Allow testing element counts in tool output collections.
+  `Pull Request 1942`_
+* Use database query rather than dataset iteration to compute dataset counts in saved histories grid.
+  `Pull Request 1948`_
+* Add "new" state to list of dataset states in saved history list.
+  `Pull Request 1949`_
+* Add IUC as a new default bioconda channel
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1950`_
+* More job related timing logging.
+  `Pull Request 1956`_
+* Improvements to workflow scaling performance testing scripts.
+  `Pull Request 1958`_
+* Optimization for discovering datasets for dynamic output collection.
+  `Pull Request 1959`_
+* Improve tabular datatypes.
+  (thanks to `@brenninc <https://github.com/brenninc>`__ and `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1968`_, `Pull Request 2317`_
+* Optimize adding datasets to large histories.
+  `Pull Request 1983`_
+* Much more friendly route mapping.
+  `Pull Request 2002`_
+* Revise content selection, add test cases
+  `Pull Request 2011`_
+* Don't setup a default tool_data_table_config_path in the shed.
+  `Pull Request 2025`_
+* data_column to json config file
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2026`_
+* Revise input wrapper, add test cases
+  `Pull Request 2027`_
+* enable obfs datatype during upload
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2028`_
+* Better element_identifier handling in repeat sections
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2029`_
+* Pulsar-As-A-Dependency
+  `Pull Request 2052`_
+* Simplify Workflow sharing menu
+  (thanks to `@nturaga <https://github.com/nturaga>`__.)
+  `Pull Request 2060`_
+* Add Phinch as an external display application.
+  `Pull Request 2069`_
+* For non-shed tool data - read loc from sample if available.
+  `Pull Request 2070`_
+* tool lineage for versionless toolshed tool_ids
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2082`_
+* make new beta TS tools installation via API configurable
+  `Pull Request 2088`_
+* Patch in visual separation of section parameters
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2091`_
+* Add sample supervisor config
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2096`_
+* Fix cli runner: use embed_metadata_in_job parameter
+  (thanks to `@ThomasWollmann <https://github.com/ThomasWollmann>`__.)
+  `Pull Request 2107`_
+* Switch Dockerized commands to use sh instead of bash.
+  `Pull Request 2282`_
+
+.. small_enhancement
+
+* Remove IPython IE, which was replaced by Jupyter IE.
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 1402`_
+* Added py34-lint tox target but forgot to update .travis.yml.
+  `Pull Request 1457`_
+* fix old jobparam hack for importing files to library
+  `Pull Request 1488`_
+* An attempt to add limit/offset to history contents
+  `Pull Request 1490`_
+* Revise label handling in form
+  `Pull Request 1496`_
+* History UI: clean up.
+  `Pull Request 1522`_
+* Client-build, Webpack: add tasks to grunt for the common webpack tasks,
+  update readme
+  `Pull Request 1523`_
+* Remove handlebars and rely solely on underscore templates
+  `Pull Request 1537`_
+* disable email notifications from travis
+  `Pull Request 1592`_
+* Remove unnecessary variable and assignment of job command
+  (thanks to `@einon <https://github.com/einon>`__.)
+  `Pull Request 1616`_
+* Implement the ratable mixin
+  `Pull Request 1618`_
+* Tour cleanup, remove a few globals.
+  `Pull Request 1621`_
+* Refactor workflow loading.
+  `Pull Request 1735`_
+* Swapping from svgfig to svgwrite
+  `Pull Request 1747`_
+* various libraries refactoring and bugfixes
+  `Pull Request 1751`_
+* Refactor the Html Datatype Class into text.py instead of images.py
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 1760`_
+* Drop python2.6 (deprecated, will not be supported in 16.04) from testing.
+  `Pull Request 1785`_
+* Rework history updating
+  `Pull Request 1788`_
+* Tests for some of the 16.01 security vulnerabilities
+  `Pull Request 1794`_
+* Reduce the use of mutable types in toolshed test framework's method
+  definitions.
+  `Pull Request 1813`_
+* Remove decryption/encryption of tool states
+  `Pull Request 1838`_
+* Do not import dumps and loads from galaxy.util.json .
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1840`_
+* Replace get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision with get_repository_for_dependency_relationship.
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1868`_
+* Cosmetic comma fixes :)
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 1874`_
+* More debugging for transiently failing tool shed test.
+  `Pull Request 1939`_
+* Improve logging and retry  for another TS test...
+  `Pull Request 1961`_
+* Add page layout test cases
+  `Pull Request 1991`_
+* Fix inconsistency in update state handling for tool modules
+  `Pull Request 1993`_
+* Functional Test Drivers Overhaul
+  `Pull Request 2016`_
+* Remove install and test code.
+  `Pull Request 2018`_
+* Remove twill functional tests we don't actively run.
+  `Pull Request 2019`_
+* Fix transiently failing tool shed tests.
+  `Pull Request 2030`_
+* Libraries: faster, refactored, cleaned
+  `Pull Request 2031`_
+* Adjust testing directories so no python root is ever "tool_shed".
+  `Pull Request 2067`_
+* Fix pbs runner file touch.
+  `Pull Request 2074`_
+* Move scripts out of lib/tool_shed.
+  `Pull Request 2093`_
+* Consolidate duplicated method in tool_shed/model.
+  `Pull Request 2099`_
+* Remove redundant ui divs, move masthead into #everything
+  `Pull Request 2182`_
+* Remove SLURM memory limit warning from stderr if the job was successful
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2309`_
+
+
+Fixes
+-------------------------------
+
+.. major_bug
+
+* upgrade mercurial wheel to latest (fixing CVE issues)
+  `Pull Request 2045`_
+* Add changeset_revision to tool attributes, avoid self.tool_shed_repository.
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1802`_
+* Do not convert spaces to tabs when importing datasets into a library
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 1985`_
+
+.. bug
+
+* Fix that typo in relation_builder.
+  `Pull Request 1453`_
+* URL generation tweaks for utils.js
+  `Pull Request 1478`_
+* Fix project linting for new pep8
+  `Pull Request 1483`_
+* Fix Python 3 problem causing Travis failure of dev.
+  `Pull Request 1505`_
+* catch Exception and properly log errors
+  `Pull Request 1510`_
+* Change python print() format to be backward compatible with older versions.
+  (thanks to `@einon <https://github.com/einon>`__.)
+  `Pull Request 1520`_
+* Add js for mako based masthead
+  `Pull Request 1533`_
+* restrict blue popover to tours
+  `Pull Request 1577`_
+* small spelling error
+  (thanks to `@matthdsm <https://github.com/matthdsm>`__.)
+  `Pull Request 1582`_
+* Fix installation of repository suites outside of tool panel section
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1601`_
+* Fixes `<param argument="--set" />` not working when `help=""` is not set
+  (thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 1650`_
+* Fix validation for data source tools
+  `Pull Request 1654`_
+* Better fix for missing element identifier
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1693`_
+* Update david identifier types
+  (thanks to `@pavanvidem <https://github.com/pavanvidem>`__.)
+  `Pull Request 1696`_
+* Drop Ross from the committers group.
+  `Pull Request 1698`_
+* Wrap conditional test parameters
+  `Pull Request 1714`_
+* Remove len(stderr), breaks on recent docker versions
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1769`_
+* Strip URL of download_file and download_by_url install actions.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1775`_
+* Fix tool form rendering of sections
+  `Pull Request 1783`_
+* Fix unused import.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 1796`_
+* Fix tool shed tests for commit e4a1d5727805168a9fd15aca1cdd21630ada2bbc
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 1798`_
+* Checks for api_key before checking for header from SSO.
+  (thanks to `@MatthewRalston <https://github.com/MatthewRalston>`__.)
+  `Pull Request 1801`_
+* Ensure tool.changeset_revision is set...
+  `Pull Request 1806`_
+* Change many job mapped properties to lazy loads
+  `Pull Request 1809`_
+* Whitelist logging tweaks
+  `Pull Request 1819`_
+* Fix upload tool routing
+  `Pull Request 1827`_
+* Using node 5.7, 'grunt style' fails with the error:
+  `Pull Request 1841`_
+* Do not create text values for failed inputs
+  `Pull Request 1844`_
+* Prevent tours from kicking off within iframes
+  `Pull Request 1846`_
+* Fix repeat prefix
+  `Pull Request 1848`_
+* also update rrda when repairing or updating a repository
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 1850`_
+* Fix regex validator
+  `Pull Request 1862`_
+* Tour routing overhaul
+  `Pull Request 1870`_
+* Add dedicated client endpoint to the root controller.
+  `Pull Request 1879`_
+* Update check_python.py now dropping Python 2.6
+  (thanks to `@peterjc <https://github.com/peterjc>`__.)
+  `Pull Request 1883`_
+* Fix citation-model to fail silently/gracefully
+  `Pull Request 1884`_
+* Change to sentry middleware to work with modern raven clients.
+  `Pull Request 1895`_
+* svgfig->svgwrite in unpinned requirements
+  `Pull Request 1896`_
+* Fix icon sizes
+  `Pull Request 1934`_
+* Fix tool downloads in tool form
+  `Pull Request 1935`_
+* Fixing error in run.sh script
+  (thanks to `@kellrott <https://github.com/kellrott>`__.)
+  `Pull Request 1954`_
+* Fix typo: send-->sent
+  `Pull Request 1965`_
+* Fix farbtastic's use of deprecated jquery fns by loading jq-migrate in galaxy.panels.mako .
+  `Pull Request 1972`_
+* Remove duplicate help target
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 1980`_
+* Fix booleans in workflow editor
+  `Pull Request 1990`_
+* libraries: change default size of fa in iconspans
+  `Pull Request 2000`_
+* libraries: move text out of icon spans for library dataset view
+  `Pull Request 2008`_
+* Fix unbound error that is possible if using tool bursting.
+  `Pull Request 2009`_
+* Fix multiple flag for workflow dataset inputs
+  `Pull Request 2021`_
+* fix duplication of select2 entries
+  `Pull Request 2022`_
+* Fix fluent query log serialization when datetime types are in use.
+  `Pull Request 2039`_
+* Workflow import fix when tools are missing.
+  `Pull Request 2048`_
+* Managers: remove UTC zone when parsing dates
+  (thanks also to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2042`_, `Pull Request 2062`_, `Pull Request 2065`_
+* Change user disk usage pgcalc function up a bit to make a slightly safer dry-run.
+  `Pull Request 2063`_
+* Allow tool confs with a tool_path to not be interpreted as shed confs.
+  `Pull Request 2066`_
+* Fix deps.command.download_command on Mac OS X.
+  `Pull Request 2075`_
+* Show sections in workflow run
+  `Pull Request 2087`_
+* Workflow section fix backport
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2092`_
+* Run external local set_metadata jobs in the job's working directory
+  `Pull Request 2094`_
+* make dependencies browsable again
+  `Pull Request 2101`_
+* Convert the DRMAA runner to use Pulsar's DRMAA session wrapper
+  `Pull Request 2102`_
+* Updated to Dependency change in b167a741a444c3988447b0d63a1ba3dc5e4e62f5
+  (thanks to `@Christian-B <https://github.com/Christian-B>`__.)
+  `Pull Request 2104`_
+* Fix datatype list in workflow editor
+  `Pull Request 2105`_
+* Workaround for the toolshed's hgweb.
+  `Pull Request 2106`_
+* Update debian init script
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2109`_
+* Rev Pulsar to 0.7.0.dev3.
+  `Pull Request 2122`_
+* change run_tool_shed.py to see the file in the correct lcoation
+  (thanks to `@nturaga <https://github.com/nturaga>`__.)
+  `Pull Request 2131`_
+* Fixes due to `#2093 <https://github.com/galaxyproject/galaxy/issues/2093>`__
+  and `#2018 <https://github.com/galaxyproject/galaxy/issues/2018>`__
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2133`_
+* markupsafe.escape() in Python2 does not work on str containing non-ASCII
+  characters
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2138`_
+* load options from config, not from options
+  `Pull Request 2139`_
+* fix/replace biomart data source
+  `Pull Request 2149`_
+* Add tool parameters back to parameters and expose for certain tools
+  `Pull Request 2156`_
+* Fix wrapper issue, overload __ne__
+  `Pull Request 2161`_
+* Fix grouping tool and enhance performance when removing lines.
+  `Pull Request 2166`_
+* libraries: always fetch new permissions as we are reusing the view
+  `Pull Request 2176`_
+* Fix for redirecting a non-user when a tool has require_login=True.
+  `Pull Request 2180`_
+* Fix.published history long titles have to be truncated
+  `Pull Request 2189`_
+* fix a bug with missing history_id in libraries dataset import
+  `Pull Request 2190`_
+* Fix message in legacy panel mako
+  `Pull Request 2191`_
+* Prepass on remote user header based on maildomain and normalize config.
+  `Pull Request 2195`_
+* Extend to_json for dataset tool parameters
+  `Pull Request 2196`_
+* Pages parser fix
+  `Pull Request 2197`_
+* Translate data source tool parameters on parameter expansion
+  `Pull Request 2201`_
+* Sync job_script module with Pulsar to fix doctest.
+  `Pull Request 2203`_
+* Improve error message for data source tools, executed through the
+  tool_runner controller
+  `Pull Request 2204`_
+* Add fixed validation check for data_source tools URL parameter
+  `Pull Request 2208`_
+* Fix tool action redirect url for non-default tools
+  `Pull Request 2211`_
+* Browse library date handling for non-ascii month abbreviations
+  `Pull Request 2214`_
+* Avoid reset of cursor position for input fields during manual entry
+  `Pull Request 2218`_
+* Add conditional statsd requirement
+  `Pull Request 2227`_
+* Fix js value validator
+  `Pull Request 2234`_
+* Encode collection reduce in serializable fashion
+  `Pull Request 2238`_
+* Update `_condarc` automatically
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2255`_
+* Truncate job name in DRMAA runner by default for PBSPro.
+  `Pull Request 2265`_
+* Cherrypick of encoding fix
+  `Pull Request 2266`_
+* If a Slurm post-mortem determines that a job is in a non-terminal state,
+  return the job to the monitor queue
+  `Pull Request 2311`_
+* Unicodify pretty datetimes which can contain non-ASCII chars
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2312`_
+* Relax default value acceptance condition
+  `Pull Request 2316`_
+* Fixes the proxy naming
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2333`_
+* Re-fix the bug with multiple preceding '/' characters in the GIE proxy
+  prefix
+  `Pull Request 2339`_
+* Do not pollute param_dict with a non JSONifiable dict
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2345`_
+
+.. github_links
+.. _Pull Request 1148: https://github.com/galaxyproject/galaxy/pull/1148
+.. _Pull Request 1280: https://github.com/galaxyproject/galaxy/pull/1280
+.. _Pull Request 1331: https://github.com/galaxyproject/galaxy/pull/1331
+.. _Pull Request 1357: https://github.com/galaxyproject/galaxy/pull/1357
+.. _Pull Request 1392: https://github.com/galaxyproject/galaxy/pull/1392
+.. _Pull Request 1402: https://github.com/galaxyproject/galaxy/pull/1402
+.. _Pull Request 1403: https://github.com/galaxyproject/galaxy/pull/1403
+.. _Pull Request 1431: https://github.com/galaxyproject/galaxy/pull/1431
+.. _Pull Request 1433: https://github.com/galaxyproject/galaxy/pull/1433
+.. _Pull Request 1449: https://github.com/galaxyproject/galaxy/pull/1449
+.. _Pull Request 1453: https://github.com/galaxyproject/galaxy/pull/1453
+.. _Pull Request 1454: https://github.com/galaxyproject/galaxy/pull/1454
+.. _Pull Request 1456: https://github.com/galaxyproject/galaxy/pull/1456
+.. _Pull Request 1457: https://github.com/galaxyproject/galaxy/pull/1457
+.. _Pull Request 1463: https://github.com/galaxyproject/galaxy/pull/1463
+.. _Pull Request 1478: https://github.com/galaxyproject/galaxy/pull/1478
+.. _Pull Request 1483: https://github.com/galaxyproject/galaxy/pull/1483
+.. _Pull Request 1488: https://github.com/galaxyproject/galaxy/pull/1488
+.. _Pull Request 1489: https://github.com/galaxyproject/galaxy/pull/1489
+.. _Pull Request 1490: https://github.com/galaxyproject/galaxy/pull/1490
+.. _Pull Request 1496: https://github.com/galaxyproject/galaxy/pull/1496
+.. _Pull Request 1505: https://github.com/galaxyproject/galaxy/pull/1505
+.. _Pull Request 1510: https://github.com/galaxyproject/galaxy/pull/1510
+.. _Pull Request 1512: https://github.com/galaxyproject/galaxy/pull/1512
+.. _Pull Request 1519: https://github.com/galaxyproject/galaxy/pull/1519
+.. _Pull Request 1520: https://github.com/galaxyproject/galaxy/pull/1520
+.. _Pull Request 1522: https://github.com/galaxyproject/galaxy/pull/1522
+.. _Pull Request 1523: https://github.com/galaxyproject/galaxy/pull/1523
+.. _Pull Request 1533: https://github.com/galaxyproject/galaxy/pull/1533
+.. _Pull Request 1537: https://github.com/galaxyproject/galaxy/pull/1537
+.. _Pull Request 1539: https://github.com/galaxyproject/galaxy/pull/1539
+.. _Pull Request 1556: https://github.com/galaxyproject/galaxy/pull/1556
+.. _Pull Request 1561: https://github.com/galaxyproject/galaxy/pull/1561
+.. _Pull Request 1562: https://github.com/galaxyproject/galaxy/pull/1562
+.. _Pull Request 1565: https://github.com/galaxyproject/galaxy/pull/1565
+.. _Pull Request 1566: https://github.com/galaxyproject/galaxy/pull/1566
+.. _Pull Request 1572: https://github.com/galaxyproject/galaxy/pull/1572
+.. _Pull Request 1574: https://github.com/galaxyproject/galaxy/pull/1574
+.. _Pull Request 1577: https://github.com/galaxyproject/galaxy/pull/1577
+.. _Pull Request 1579: https://github.com/galaxyproject/galaxy/pull/1579
+.. _Pull Request 1582: https://github.com/galaxyproject/galaxy/pull/1582
+.. _Pull Request 1583: https://github.com/galaxyproject/galaxy/pull/1583
+.. _Pull Request 1591: https://github.com/galaxyproject/galaxy/pull/1591
+.. _Pull Request 1592: https://github.com/galaxyproject/galaxy/pull/1592
+.. _Pull Request 1601: https://github.com/galaxyproject/galaxy/pull/1601
+.. _Pull Request 1602: https://github.com/galaxyproject/galaxy/pull/1602
+.. _Pull Request 1605: https://github.com/galaxyproject/galaxy/pull/1605
+.. _Pull Request 1610: https://github.com/galaxyproject/galaxy/pull/1610
+.. _Pull Request 1613: https://github.com/galaxyproject/galaxy/pull/1613
+.. _Pull Request 1614: https://github.com/galaxyproject/galaxy/pull/1614
+.. _Pull Request 1615: https://github.com/galaxyproject/galaxy/pull/1615
+.. _Pull Request 1616: https://github.com/galaxyproject/galaxy/pull/1616
+.. _Pull Request 1618: https://github.com/galaxyproject/galaxy/pull/1618
+.. _Pull Request 1621: https://github.com/galaxyproject/galaxy/pull/1621
+.. _Pull Request 1625: https://github.com/galaxyproject/galaxy/pull/1625
+.. _Pull Request 1632: https://github.com/galaxyproject/galaxy/pull/1632
+.. _Pull Request 1636: https://github.com/galaxyproject/galaxy/pull/1636
+.. _Pull Request 1639: https://github.com/galaxyproject/galaxy/pull/1639
+.. _Pull Request 1646: https://github.com/galaxyproject/galaxy/pull/1646
+.. _Pull Request 1647: https://github.com/galaxyproject/galaxy/pull/1647
+.. _Pull Request 1649: https://github.com/galaxyproject/galaxy/pull/1649
+.. _Pull Request 1650: https://github.com/galaxyproject/galaxy/pull/1650
+.. _Pull Request 1654: https://github.com/galaxyproject/galaxy/pull/1654
+.. _Pull Request 1658: https://github.com/galaxyproject/galaxy/pull/1658
+.. _Pull Request 1665: https://github.com/galaxyproject/galaxy/pull/1665
+.. _Pull Request 1670: https://github.com/galaxyproject/galaxy/pull/1670
+.. _Pull Request 1671: https://github.com/galaxyproject/galaxy/pull/1671
+.. _Pull Request 1688: https://github.com/galaxyproject/galaxy/pull/1688
+.. _Pull Request 1693: https://github.com/galaxyproject/galaxy/pull/1693
+.. _Pull Request 1696: https://github.com/galaxyproject/galaxy/pull/1696
+.. _Pull Request 1698: https://github.com/galaxyproject/galaxy/pull/1698
+.. _Pull Request 1699: https://github.com/galaxyproject/galaxy/pull/1699
+.. _Pull Request 1711: https://github.com/galaxyproject/galaxy/pull/1711
+.. _Pull Request 1713: https://github.com/galaxyproject/galaxy/pull/1713
+.. _Pull Request 1714: https://github.com/galaxyproject/galaxy/pull/1714
+.. _Pull Request 1731: https://github.com/galaxyproject/galaxy/pull/1731
+.. _Pull Request 1735: https://github.com/galaxyproject/galaxy/pull/1735
+.. _Pull Request 1738: https://github.com/galaxyproject/galaxy/pull/1738
+.. _Pull Request 1742: https://github.com/galaxyproject/galaxy/pull/1742
+.. _Pull Request 1747: https://github.com/galaxyproject/galaxy/pull/1747
+.. _Pull Request 1751: https://github.com/galaxyproject/galaxy/pull/1751
+.. _Pull Request 1752: https://github.com/galaxyproject/galaxy/pull/1752
+.. _Pull Request 1755: https://github.com/galaxyproject/galaxy/pull/1755
+.. _Pull Request 1756: https://github.com/galaxyproject/galaxy/pull/1756
+.. _Pull Request 1760: https://github.com/galaxyproject/galaxy/pull/1760
+.. _Pull Request 1764: https://github.com/galaxyproject/galaxy/pull/1764
+.. _Pull Request 1769: https://github.com/galaxyproject/galaxy/pull/1769
+.. _Pull Request 1770: https://github.com/galaxyproject/galaxy/pull/1770
+.. _Pull Request 1775: https://github.com/galaxyproject/galaxy/pull/1775
+.. _Pull Request 1783: https://github.com/galaxyproject/galaxy/pull/1783
+.. _Pull Request 1785: https://github.com/galaxyproject/galaxy/pull/1785
+.. _Pull Request 1788: https://github.com/galaxyproject/galaxy/pull/1788
+.. _Pull Request 1793: https://github.com/galaxyproject/galaxy/pull/1793
+.. _Pull Request 1794: https://github.com/galaxyproject/galaxy/pull/1794
+.. _Pull Request 1796: https://github.com/galaxyproject/galaxy/pull/1796
+.. _Pull Request 1798: https://github.com/galaxyproject/galaxy/pull/1798
+.. _Pull Request 1800: https://github.com/galaxyproject/galaxy/pull/1800
+.. _Pull Request 1801: https://github.com/galaxyproject/galaxy/pull/1801
+.. _Pull Request 1802: https://github.com/galaxyproject/galaxy/pull/1802
+.. _Pull Request 1806: https://github.com/galaxyproject/galaxy/pull/1806
+.. _Pull Request 1807: https://github.com/galaxyproject/galaxy/pull/1807
+.. _Pull Request 1808: https://github.com/galaxyproject/galaxy/pull/1808
+.. _Pull Request 1809: https://github.com/galaxyproject/galaxy/pull/1809
+.. _Pull Request 1813: https://github.com/galaxyproject/galaxy/pull/1813
+.. _Pull Request 1819: https://github.com/galaxyproject/galaxy/pull/1819
+.. _Pull Request 1821: https://github.com/galaxyproject/galaxy/pull/1821
+.. _Pull Request 1824: https://github.com/galaxyproject/galaxy/pull/1824
+.. _Pull Request 1827: https://github.com/galaxyproject/galaxy/pull/1827
+.. _Pull Request 1829: https://github.com/galaxyproject/galaxy/pull/1829
+.. _Pull Request 1832: https://github.com/galaxyproject/galaxy/pull/1832
+.. _Pull Request 1835: https://github.com/galaxyproject/galaxy/pull/1835
+.. _Pull Request 1838: https://github.com/galaxyproject/galaxy/pull/1838
+.. _Pull Request 1840: https://github.com/galaxyproject/galaxy/pull/1840
+.. _Pull Request 1841: https://github.com/galaxyproject/galaxy/pull/1841
+.. _Pull Request 1843: https://github.com/galaxyproject/galaxy/pull/1843
+.. _Pull Request 1844: https://github.com/galaxyproject/galaxy/pull/1844
+.. _Pull Request 1846: https://github.com/galaxyproject/galaxy/pull/1846
+.. _Pull Request 1848: https://github.com/galaxyproject/galaxy/pull/1848
+.. _Pull Request 1850: https://github.com/galaxyproject/galaxy/pull/1850
+.. _Pull Request 1853: https://github.com/galaxyproject/galaxy/pull/1853
+.. _Pull Request 1861: https://github.com/galaxyproject/galaxy/pull/1861
+.. _Pull Request 1862: https://github.com/galaxyproject/galaxy/pull/1862
+.. _Pull Request 1868: https://github.com/galaxyproject/galaxy/pull/1868
+.. _Pull Request 1870: https://github.com/galaxyproject/galaxy/pull/1870
+.. _Pull Request 1874: https://github.com/galaxyproject/galaxy/pull/1874
+.. _Pull Request 1876: https://github.com/galaxyproject/galaxy/pull/1876
+.. _Pull Request 1879: https://github.com/galaxyproject/galaxy/pull/1879
+.. _Pull Request 1883: https://github.com/galaxyproject/galaxy/pull/1883
+.. _Pull Request 1884: https://github.com/galaxyproject/galaxy/pull/1884
+.. _Pull Request 1893: https://github.com/galaxyproject/galaxy/pull/1893
+.. _Pull Request 1895: https://github.com/galaxyproject/galaxy/pull/1895
+.. _Pull Request 1896: https://github.com/galaxyproject/galaxy/pull/1896
+.. _Pull Request 1900: https://github.com/galaxyproject/galaxy/pull/1900
+.. _Pull Request 1901: https://github.com/galaxyproject/galaxy/pull/1901
+.. _Pull Request 1910: https://github.com/galaxyproject/galaxy/pull/1910
+.. _Pull Request 1911: https://github.com/galaxyproject/galaxy/pull/1911
+.. _Pull Request 1912: https://github.com/galaxyproject/galaxy/pull/1912
+.. _Pull Request 1914: https://github.com/galaxyproject/galaxy/pull/1914
+.. _Pull Request 1918: https://github.com/galaxyproject/galaxy/pull/1918
+.. _Pull Request 1926: https://github.com/galaxyproject/galaxy/pull/1926
+.. _Pull Request 1934: https://github.com/galaxyproject/galaxy/pull/1934
+.. _Pull Request 1935: https://github.com/galaxyproject/galaxy/pull/1935
+.. _Pull Request 1936: https://github.com/galaxyproject/galaxy/pull/1936
+.. _Pull Request 1939: https://github.com/galaxyproject/galaxy/pull/1939
+.. _Pull Request 1941: https://github.com/galaxyproject/galaxy/pull/1941
+.. _Pull Request 1942: https://github.com/galaxyproject/galaxy/pull/1942
+.. _Pull Request 1943: https://github.com/galaxyproject/galaxy/pull/1943
+.. _Pull Request 1948: https://github.com/galaxyproject/galaxy/pull/1948
+.. _Pull Request 1949: https://github.com/galaxyproject/galaxy/pull/1949
+.. _Pull Request 1950: https://github.com/galaxyproject/galaxy/pull/1950
+.. _Pull Request 1953: https://github.com/galaxyproject/galaxy/pull/1953
+.. _Pull Request 1954: https://github.com/galaxyproject/galaxy/pull/1954
+.. _Pull Request 1956: https://github.com/galaxyproject/galaxy/pull/1956
+.. _Pull Request 1958: https://github.com/galaxyproject/galaxy/pull/1958
+.. _Pull Request 1959: https://github.com/galaxyproject/galaxy/pull/1959
+.. _Pull Request 1961: https://github.com/galaxyproject/galaxy/pull/1961
+.. _Pull Request 1962: https://github.com/galaxyproject/galaxy/pull/1962
+.. _Pull Request 1963: https://github.com/galaxyproject/galaxy/pull/1963
+.. _Pull Request 1965: https://github.com/galaxyproject/galaxy/pull/1965
+.. _Pull Request 1968: https://github.com/galaxyproject/galaxy/pull/1968
+.. _Pull Request 1969: https://github.com/galaxyproject/galaxy/pull/1969
+.. _Pull Request 1970: https://github.com/galaxyproject/galaxy/pull/1970
+.. _Pull Request 1971: https://github.com/galaxyproject/galaxy/pull/1971
+.. _Pull Request 1972: https://github.com/galaxyproject/galaxy/pull/1972
+.. _Pull Request 1974: https://github.com/galaxyproject/galaxy/pull/1974
+.. _Pull Request 1980: https://github.com/galaxyproject/galaxy/pull/1980
+.. _Pull Request 1983: https://github.com/galaxyproject/galaxy/pull/1983
+.. _Pull Request 1985: https://github.com/galaxyproject/galaxy/pull/1985
+.. _Pull Request 1990: https://github.com/galaxyproject/galaxy/pull/1990
+.. _Pull Request 1991: https://github.com/galaxyproject/galaxy/pull/1991
+.. _Pull Request 1993: https://github.com/galaxyproject/galaxy/pull/1993
+.. _Pull Request 1996: https://github.com/galaxyproject/galaxy/pull/1996
+.. _Pull Request 2000: https://github.com/galaxyproject/galaxy/pull/2000
+.. _Pull Request 2002: https://github.com/galaxyproject/galaxy/pull/2002
+.. _Pull Request 2004: https://github.com/galaxyproject/galaxy/pull/2004
+.. _Pull Request 2008: https://github.com/galaxyproject/galaxy/pull/2008
+.. _Pull Request 2009: https://github.com/galaxyproject/galaxy/pull/2009
+.. _Pull Request 2010: https://github.com/galaxyproject/galaxy/pull/2010
+.. _Pull Request 2011: https://github.com/galaxyproject/galaxy/pull/2011
+.. _Pull Request 2015: https://github.com/galaxyproject/galaxy/pull/2015
+.. _Pull Request 2016: https://github.com/galaxyproject/galaxy/pull/2016
+.. _Pull Request 2017: https://github.com/galaxyproject/galaxy/pull/2017
+.. _Pull Request 2018: https://github.com/galaxyproject/galaxy/pull/2018
+.. _Pull Request 2019: https://github.com/galaxyproject/galaxy/pull/2019
+.. _Pull Request 2020: https://github.com/galaxyproject/galaxy/pull/2020
+.. _Pull Request 2021: https://github.com/galaxyproject/galaxy/pull/2021
+.. _Pull Request 2022: https://github.com/galaxyproject/galaxy/pull/2022
+.. _Pull Request 2025: https://github.com/galaxyproject/galaxy/pull/2025
+.. _Pull Request 2026: https://github.com/galaxyproject/galaxy/pull/2026
+.. _Pull Request 2027: https://github.com/galaxyproject/galaxy/pull/2027
+.. _Pull Request 2028: https://github.com/galaxyproject/galaxy/pull/2028
+.. _Pull Request 2029: https://github.com/galaxyproject/galaxy/pull/2029
+.. _Pull Request 2030: https://github.com/galaxyproject/galaxy/pull/2030
+.. _Pull Request 2031: https://github.com/galaxyproject/galaxy/pull/2031
+.. _Pull Request 2033: https://github.com/galaxyproject/galaxy/pull/2033
+.. _Pull Request 2039: https://github.com/galaxyproject/galaxy/pull/2039
+.. _Pull Request 2040: https://github.com/galaxyproject/galaxy/pull/2040
+.. _Pull Request 2042: https://github.com/galaxyproject/galaxy/pull/2042
+.. _Pull Request 2044: https://github.com/galaxyproject/galaxy/pull/2044
+.. _Pull Request 2045: https://github.com/galaxyproject/galaxy/pull/2045
+.. _Pull Request 2048: https://github.com/galaxyproject/galaxy/pull/2048
+.. _Pull Request 2052: https://github.com/galaxyproject/galaxy/pull/2052
+.. _Pull Request 2055: https://github.com/galaxyproject/galaxy/pull/2055
+.. _Pull Request 2057: https://github.com/galaxyproject/galaxy/pull/2057
+.. _Pull Request 2060: https://github.com/galaxyproject/galaxy/pull/2060
+.. _Pull Request 2061: https://github.com/galaxyproject/galaxy/pull/2061
+.. _Pull Request 2062: https://github.com/galaxyproject/galaxy/pull/2062
+.. _Pull Request 2063: https://github.com/galaxyproject/galaxy/pull/2063
+.. _Pull Request 2065: https://github.com/galaxyproject/galaxy/pull/2065
+.. _Pull Request 2066: https://github.com/galaxyproject/galaxy/pull/2066
+.. _Pull Request 2067: https://github.com/galaxyproject/galaxy/pull/2067
+.. _Pull Request 2069: https://github.com/galaxyproject/galaxy/pull/2069
+.. _Pull Request 2070: https://github.com/galaxyproject/galaxy/pull/2070
+.. _Pull Request 2071: https://github.com/galaxyproject/galaxy/pull/2071
+.. _Pull Request 2074: https://github.com/galaxyproject/galaxy/pull/2074
+.. _Pull Request 2075: https://github.com/galaxyproject/galaxy/pull/2075
+.. _Pull Request 2078: https://github.com/galaxyproject/galaxy/pull/2078
+.. _Pull Request 2082: https://github.com/galaxyproject/galaxy/pull/2082
+.. _Pull Request 2087: https://github.com/galaxyproject/galaxy/pull/2087
+.. _Pull Request 2088: https://github.com/galaxyproject/galaxy/pull/2088
+.. _Pull Request 2089: https://github.com/galaxyproject/galaxy/pull/2089
+.. _Pull Request 2091: https://github.com/galaxyproject/galaxy/pull/2091
+.. _Pull Request 2092: https://github.com/galaxyproject/galaxy/pull/2092
+.. _Pull Request 2093: https://github.com/galaxyproject/galaxy/pull/2093
+.. _Pull Request 2094: https://github.com/galaxyproject/galaxy/pull/2094
+.. _Pull Request 2096: https://github.com/galaxyproject/galaxy/pull/2096
+.. _Pull Request 2099: https://github.com/galaxyproject/galaxy/pull/2099
+.. _Pull Request 2101: https://github.com/galaxyproject/galaxy/pull/2101
+.. _Pull Request 2102: https://github.com/galaxyproject/galaxy/pull/2102
+.. _Pull Request 2104: https://github.com/galaxyproject/galaxy/pull/2104
+.. _Pull Request 2105: https://github.com/galaxyproject/galaxy/pull/2105
+.. _Pull Request 2106: https://github.com/galaxyproject/galaxy/pull/2106
+.. _Pull Request 2107: https://github.com/galaxyproject/galaxy/pull/2107
+.. _Pull Request 2109: https://github.com/galaxyproject/galaxy/pull/2109
+.. _Pull Request 2118: https://github.com/galaxyproject/galaxy/pull/2118
+.. _Pull Request 2122: https://github.com/galaxyproject/galaxy/pull/2122
+.. _Pull Request 2131: https://github.com/galaxyproject/galaxy/pull/2131
+.. _Pull Request 2133: https://github.com/galaxyproject/galaxy/pull/2133
+.. _Pull Request 2138: https://github.com/galaxyproject/galaxy/pull/2138
+.. _Pull Request 2139: https://github.com/galaxyproject/galaxy/pull/2139
+.. _Pull Request 2141: https://github.com/galaxyproject/galaxy/pull/2141
+.. _Pull Request 2149: https://github.com/galaxyproject/galaxy/pull/2149
+.. _Pull Request 2150: https://github.com/galaxyproject/galaxy/pull/2150
+.. _Pull Request 2154: https://github.com/galaxyproject/galaxy/pull/2154
+.. _Pull Request 2156: https://github.com/galaxyproject/galaxy/pull/2156
+.. _Pull Request 2161: https://github.com/galaxyproject/galaxy/pull/2161
+.. _Pull Request 2164: https://github.com/galaxyproject/galaxy/pull/2164
+.. _Pull Request 2166: https://github.com/galaxyproject/galaxy/pull/2166
+.. _Pull Request 2176: https://github.com/galaxyproject/galaxy/pull/2176
+.. _Pull Request 2180: https://github.com/galaxyproject/galaxy/pull/2180
+.. _Pull Request 2182: https://github.com/galaxyproject/galaxy/pull/2182
+.. _Pull Request 2184: https://github.com/galaxyproject/galaxy/pull/2184
+.. _Pull Request 2186: https://github.com/galaxyproject/galaxy/pull/2186
+.. _Pull Request 2189: https://github.com/galaxyproject/galaxy/pull/2189
+.. _Pull Request 2190: https://github.com/galaxyproject/galaxy/pull/2190
+.. _Pull Request 2191: https://github.com/galaxyproject/galaxy/pull/2191
+.. _Pull Request 2195: https://github.com/galaxyproject/galaxy/pull/2195
+.. _Pull Request 2196: https://github.com/galaxyproject/galaxy/pull/2196
+.. _Pull Request 2197: https://github.com/galaxyproject/galaxy/pull/2197
+.. _Pull Request 2201: https://github.com/galaxyproject/galaxy/pull/2201
+.. _Pull Request 2203: https://github.com/galaxyproject/galaxy/pull/2203
+.. _Pull Request 2204: https://github.com/galaxyproject/galaxy/pull/2204
+.. _Pull Request 2208: https://github.com/galaxyproject/galaxy/pull/2208
+.. _Pull Request 2211: https://github.com/galaxyproject/galaxy/pull/2211
+.. _Pull Request 2214: https://github.com/galaxyproject/galaxy/pull/2214
+.. _Pull Request 2218: https://github.com/galaxyproject/galaxy/pull/2218
+.. _Pull Request 2220: https://github.com/galaxyproject/galaxy/pull/2220
+.. _Pull Request 2227: https://github.com/galaxyproject/galaxy/pull/2227
+.. _Pull Request 2234: https://github.com/galaxyproject/galaxy/pull/2234
+.. _Pull Request 2238: https://github.com/galaxyproject/galaxy/pull/2238
+.. _Pull Request 2255: https://github.com/galaxyproject/galaxy/pull/2255
+.. _Pull Request 2265: https://github.com/galaxyproject/galaxy/pull/2265
+.. _Pull Request 2266: https://github.com/galaxyproject/galaxy/pull/2266
+.. _Pull Request 2282: https://github.com/galaxyproject/galaxy/pull/2282
+.. _Pull Request 2284: https://github.com/galaxyproject/galaxy/pull/2284
+.. _Pull Request 2309: https://github.com/galaxyproject/galaxy/pull/2309
+.. _Pull Request 2311: https://github.com/galaxyproject/galaxy/pull/2311
+.. _Pull Request 2312: https://github.com/galaxyproject/galaxy/pull/2312
+.. _Pull Request 2316: https://github.com/galaxyproject/galaxy/pull/2316
+.. _Pull Request 2317: https://github.com/galaxyproject/galaxy/pull/2317
+.. _Pull Request 2333: https://github.com/galaxyproject/galaxy/pull/2333
+.. _Pull Request 2339: https://github.com/galaxyproject/galaxy/pull/2339
+.. _Pull Request 2345: https://github.com/galaxyproject/galaxy/pull/2345
+
diff --git a/doc/source/releases/16.04_announce.rst b/doc/source/releases/16.04_announce.rst
new file mode 100644
index 0000000..e615714
--- /dev/null
+++ b/doc/source/releases/16.04_announce.rst
@@ -0,0 +1,120 @@
+
+===========================================================
+April 2016 Galaxy Release (v 16.04)
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Tool Profile Versions**
+  Tools may now `declare which version <http://planemo.readthedocs.io/en/latest/galaxy_changelog.html#tool-profile-version-pr-1688>`__
+  of Galaxy they require. Tools requiring 16.04 or newer will
+  have new default behaviors (such as using exit code for error detection) that should simplify tool development.
+  See `PR #1688 <https://github.com/galaxyproject/galaxy/pull/1688>`__.
+
+**Embedded Pulsar Job Runner**
+  Galaxy can now start a Pulsar application embedded within
+  the Galaxy process itself. This allows using Pulsar's
+  job staging and isolation without requiring a RESTful
+  web service or a message queue. This is enabling
+  `usegalaxy.org <https://usegalaxy.org/>`__ to run jobs to
+  on the new `JetStream cloud <http://jetstream-cloud.org/>`__.
+  See `PR #2057 <https://github.com/galaxyproject/galaxy/pull/2057>`__.
+
+**New chemical datatypes**
+  Galaxy now detects and supports many molecular datatypes. See `Pull Request 1941`_.
+  Thanks to `Björn Grüning (@bgruening) <https://github.com/bgruening>`__.
+
+
+.. _Pull Request 1941: https://github.com/galaxyproject/galaxy/pull/1941
+
+`Github <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New
+  .. code-block:: shell
+
+      % git clone -b master https://github.com/galaxyproject/galaxy.git
+
+Update to latest stable release
+  .. code-block:: shell
+
+      % git checkout master && pull --ff-only origin master
+
+Update to exact version
+  .. code-block:: shell
+
+      % git checkout v16.04
+
+
+`BitBucket <https://bitbucket.org/galaxy/galaxy-dist>`__
+===========================================================
+
+**Note**: Version 16.04 will be the *final* release to be pushed to Bitbucket. More details can be found in the `Deprecation Notices`_ below.
+
+Upgrade
+  .. code-block:: shell
+
+      % hg pull
+      % hg update latest_16.04
+
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+Security
+===========================================================
+
+TL;DR
+**Only Tool Sheds newer than 16.01 should be deployed from now on.**
+(with commit 449098d8b14b45269be106f6410c0b9145c51d50 from Mar 30 present)
+
+Due to the security fixes on the Mercurial_ side we had to update the hg version that
+both Galaxy and TS depend on because the fixes have not been backported to older versions.
+However this has broken the TS's ``hg push`` functionality as Mercurial changed their bundle
+format in a non-compatible manner. Given that we deprecated the ``hg push`` API
+functionality back in the 15.10 we decided to disable it fully from 16.01 (retroactively).
+
+.. _Mercurial: https://www.mercurial-scm.org/wiki/WhatsNew#Mercurial_3.7.3_.282016-3-29.29
+
+Deprecation Notices
+===========================================================
+
+API deprecations
+----------------
+
+API for history contents, index:
+ * **types**: is no longer a valid parameter but accessible using ``?q=history_content_type&qv=[dataset | dataset_collection]``
+ * **ids**: is no longer a valid url parameter but is accessible using ``?q=type_id-in&qv=<e.g. dataset-abcdef123,dataset_collection-987fedcba, ...>``
+ * **deleted** and **visible**: are no longer parameters but are still accessible using ``q=deleted&qv=[True | False]&q=visible&qv=[True | False]``
+
+API for history contents, show:
+  * **api_type**: removed and unavailable (was :code:`file` and constant across HDAs)
+  * **display_apps**, **display_types**, **visualization**: removed from the default :code:`detailed` view but still available by calling url with ``?keys=display_apps,display_types,visualization``
+
+API histories (removed from the available serialized data on all calls):
+  * **state, state_details, state_ids** - can be replaced by specifically requesting a single array of contents, each containing :code:`{ id, state, deleted, visible }`
+
+Galaxy no longer on Bitbucket
+-----------------------------
+
+Galaxy moved its code and development activities from Bitbucket to GitHub in early 2015. Since this time, releases have been mirrored back to Bitbucket. However, after this release, no new changes will be pushed to Bitbucket. Anyone still receiving updates to a Galaxy server via Bitbucket and Mercurial should switch to GitHub and Git. This can be done using the following process:
+
+1. Backup everything.
+2. Find what branch and commit your Mercurial Galaxy is at using :code:`hg log -b $(hg branch)`
+3. :code:`git clone https://github.com/galaxyproject/galaxy` in a temporary directory.
+4. Find the corresponding commit in the cloned Git repository on the corresponding branch (Bitbucket default->GitHub dev; Bitbucket stable->GitHub master).
+5. Checkout the GitHub repository at the commit you found in the previous step.
+6. Backup your .hg/ folder.
+7. Replace your .hg/ folder with the .git/ folder from the new checkout.
+8. Your Galaxy should be switched to Git. Unless you have local changes, git status should show none.
+9. You can now update to the latest Git revision using :code:`git pull`
+
+Release Notes
+===========================================================
+
+.. include:: 16.04.rst
+   :start-after: announce_start
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/16.07.rst b/doc/source/releases/16.07.rst
new file mode 100644
index 0000000..911f6d0
--- /dev/null
+++ b/doc/source/releases/16.07.rst
@@ -0,0 +1,787 @@
+
+.. to_doc
+
+16.07
+===============================
+
+.. announce_start
+
+Highlighted Enhancements
+-------------------------------
+
+* Implement an Azure backend for the Galaxy objectstore
+  (thanks to `@zfrenchee <https://github.com/zfrenchee>`__.)
+  `Pull Request 2621`_
+* HTCondor Docker Integration
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2278`_
+* Implement a Kubernetes job runner
+  (thanks to `@pcm32 <https://github.com/pcm32>`__.)
+  `Pull Request 2314`_, `Pull Request 2498`_, `Pull Request 2559`_,
+  `Pull Request 2578`_, `Pull Request 2616`_, `Pull Request 2528`_
+* Implement collection operations - tool-like entities allowing users
+  to zip, unzip, flatten, and filter failed datasets out of collections.
+  (You may need to sync your ``tool_conf.xml`` against sample.)
+  `Pull Request 2434`_
+* Allow history panel to display list of lists dataset collections.
+  `Pull Request 2613`_
+* Add a Tool Shed API endpoint method to check the current user.
+  `Pull Request 2243`_
+* Add a Galaxy API endpoint to update a user object.
+  `Pull Request 2595`_
+* Improvements to toolbox search.
+  `Pull Request 2273`_
+* New pages and general improvements to the Reports application
+  (thanks to `@BalthazarPavot <https://github.com/BalthazarPavot>`__.)
+  `Pull Request 2664`_
+* Nicer bug report emails
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2143`_
+* Tools with auto_format='true' now connectable in workflow editor
+  (thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__.)
+  `Pull Request 2144`_
+* Drop "Galaxy pip" in favor of manylinux1 wheels (which are supported in pip
+  >= 8.1).
+  `Pull Request 2383`_
+* Allow tool form to listen to history
+  (thanks to `@nturaga <https://github.com/nturaga>`__.)
+  `Pull Request 2476`_
+* Add `EDAM <http://edamontology.org/page>`__ operations to tools and expose in API
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__, `@nsoranzo <https://github.com/nsoranzo>`__ and `@odoppelt <https://github.com/odoppelt>`__.)
+  `Pull Request 2379`_
+* Add support to PDBQT molecule datatype
+  (thanks to `@leobiscassi <https://github.com/leobiscassi>`__.)
+  `Pull Request 2452`_
+* Add the trackhub composite datatype
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 2348`_, `Pull Request 2646`_, `Pull Request 2701`_
+* Add the Mothur datatypes
+  (thanks to `@shiltemann <https://github.com/shiltemann>`__.)
+  `Pull Request 2038`_, `Pull Request 2429`_
+* Add the dynamic tool destination tool to Galaxy for defining finer grained
+  tool destination rules.
+  (thanks to `@ericenns <https://github.com/ericenns>`__.)
+  `Pull Request 2579`_
+* Galaxy can now install Conda dependencies in addition to tool shed
+  dependencies during repository installations
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__
+  and `@bwlang <https://github.com/bwlang>`__.)
+  `Pull Request 2554`_
+* Implement the Galaxy Communication Server (a simple in-Galaxy chat server)
+  (thanks to `@anuprulez <https://github.com/anuprulez>`__
+  and `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2515`_
+
+Enhancements
+-------------------------------
+
+* Install Conda at startup; enable conda_auto_init in sample conf
+  `Pull Request 2759`_
+* Add ``filter_failed_datasets_from_collection.py`` script to ``scripts/``
+  (thanks to `@chambm <https://github.com/chambm>`__.)
+  `Pull Request 2358`_
+* Enhanced workflow run form using newer client forms architecture.
+  `Pull Request 2077`_
+* Tweaks to various library functions for galaxy-lib.
+  `Pull Request 2110`_, `Pull Request 2494`_, `Pull Request 2354`_,
+  `Pull Request 2414`_, `Pull Request 2436`_
+* Automate and clarify more of the release process.
+  `Pull Request 2117`_,  `Pull Request 2366`_, `Pull Request 2372`_,
+  `Pull Request 2388`_
+* Update template to include the release of the Docker container
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2378`_
+* Add ``server_starttime`` to configuration API.
+  `Pull Request 2124`_
+* Show line breaks in commit log in the Tool Shed
+  (thanks to `@nturaga <https://github.com/nturaga>`__.)
+  `Pull Request 2142`_
+* Replacing the ``_getIndex`` method by UnderscoreJS method ``_.findIndex()``
+  (thanks to `@anuprulez <https://github.com/anuprulez>`__.)
+  `Pull Request 2157`_
+* Small enhancements to user activation process.
+  `Pull Request 2158`_
+* Implement ``single_user = <email>`` config option.
+  `Pull Request 2165`_
+* More documentation and flexibility for FTP imports.
+  `Pull Request 2168`_
+* Make stats prefix configurable and add a conditional wheel requirement.
+  `Pull Request 2171`_
+* Fix running tool tests if remote user middleware is enabled.
+  `Pull Request 2173`_
+* Improve handling of a missing R environment during Tool Shed dependency
+  installations.
+  `Pull Request 2215`_
+* Quote some parameters in the trim tool command
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2248`_
+* Remove overly chatty tool dependency resolution logging statement.
+  `Pull Request 2254`_
+* Update the list of contributors.
+  `Pull Request 2256`_, `Pull Request 2257`_, `Pull Request 2258`_
+* Add grunt watcher on .less files
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 2274`_
+* Remove dependency on custom Whoosh (requirements update).
+  `Pull Request 2276`_
+* Various enhancements for library functions related to tools.
+  `Pull Request 2279`_
+* Shift ``asbool`` handling in datatypes API so that an appropriate error message is now returned.
+  `Pull Request 2286`_
+* More small fixes from downstream projects.
+  `Pull Request 2292`_
+* For published workflow display, provide a better error when the workflow or user cannot be found
+  `Pull Request 2302`_
+* Bump sqlite3 version
+  (thanks to `@matthdsm <https://github.com/matthdsm>`__.)
+  `Pull Request 2307`_
+* Show collection counts in collection and history collection views.
+  `Pull Request 2326`_
+* Various client library updates.
+  `Pull Request 2328`_
+* Revise form data visitor.
+  `Pull Request 2330`_
+* Update using xhr date header.
+  `Pull Request 2335`_
+* Implement an API for managing history/sharable model user sharing.
+  `Pull Request 2340`_
+* Revise portlet view, integrate collapsible option.
+  `Pull Request 2360`_
+* Fix issue with lowercasing in datatypes/registry.py
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 2361`_
+* Improve composite output tool test.
+  `Pull Request 2363`_
+* Upgrade shared ui libraries, add test cases
+  `Pull Request 2368`_
+* Throughout backend use open() instead of file() for Python 3
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2370`_
+* Remove backported importlib and ordereddict.
+  `Pull Request 2380`_
+* Note on updating tool testing
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2381`_
+* Docker swarm support for Galaxy Interactive Environments (GIEs)
+  `Pull Request 2386`_, `Pull Request 2392`_
+* Add support for EDAM data annotations to datatypes
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__, `@bgruening <https://github.com/bgruening>`__, `@hmenager <https://github.com/hmenager>`__ and `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2387`_
+* Add a new workflow building mode variation which does access the history
+  `Pull Request 2390`_
+* Add conda search function to ``conda_util`` library function
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2395`_
+* Add history navigation options for dataset frames to scratch book
+  `Pull Request 2396`_
+* Add EDAM topics to tools and expose in API
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2397`_
+* Update SLURM runner so when scontrol fails, Galaxy attempts to get job
+  state with sacct
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2403`_
+* Enhancements to tool test output verification.
+  `Pull Request 2409`_
+* Show history dataset id (hid) in dataset 'view details' page.
+  `Pull Request 2411`_
+* Updates and fixes for the run workflow UI elements.
+  `Pull Request 2416`_
+* Raise ``Exception`` if ``watch_tools`` has an unrecognized value
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2418`_
+* Use ``log.warning()`` instead of deprecated ``log.warn()`` (for Python 3)
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2420`_
+* Minor updates to documentation.
+  `Pull Request 2427`_
+* Correct various EDAM data/format annotations in datatypes
+  (thanks to `@hmenager <https://github.com/hmenager>`__.)
+  `Pull Request 2428`_
+* Fix ``$defaults`` according to `#2430
+  <https://github.com/galaxyproject/galaxy/issues/2430>`__
+  (thanks to `@ThomasWollmann <https://github.com/ThomasWollmann>`__.)
+  `Pull Request 2438`_
+* Properly list subworkflows in run workflow form.
+  `Pull Request 2440`_
+* Make some files compatible with Python3
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2453`_
+* Add parameter expansion caller and test cases as preparation for workflow
+  API extension.
+  `Pull Request 2464`_
+* Set a default for ``tool_dependencies_dir``.
+  `Pull Request 2480`_
+* Add a password expiration config option (resolves `#2485
+  <https://github.com/galaxyproject/galaxy/issues/2485>`__)
+  (thanks to `@benfulton <https://github.com/benfulton>`__.)
+  `Pull Request 2486`_
+* Display TIFF format in upload
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2516`_
+* Use ``self.decode_id`` instead of ``trans.security.decode_id``
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2529`_
+* Charts revision.
+  `Pull Request 2531`_
+* Add ``Makefile`` target for fetching updated bower dependencies.
+  `Pull Request 2534`_
+* Make conda autoinstall of packages safer:
+  (thanks to `@pvanheus <https://github.com/pvanheus>`__.)
+  `Pull Request 2538`_
+* Updates to Interactive Environment docs
+  (thanks to `@pvanheus <https://github.com/pvanheus>`__.)
+  `Pull Request 2543`_
+* Allow displaying bigWig files in IGV
+  (thanks to `@dpryan79 <https://github.com/dpryan79>`__.)
+  `Pull Request 2545`_
+* Allow users to purge datasets by default.
+  `Pull Request 2553`_
+* Add Jupyter IE to the allowed visualizations for BAM datasets
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2572`_
+* Python3: port 215 files
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__, `@tshtatland <https://github.com/tshtatland>`__, `@xiaoqian1984 <https://github.com/xiaoqian1984>`__, `@alanryanwang <https://github.com/alanryanwang>`__ and `@ericenns <https://github.com/ericenns>`__.)
+  `Pull Request 2574`_
+* Add python-ldap module to conditional requirements
+  (thanks to `@abretaud <https://github.com/abretaud>`__.)
+  `Pull Request 2576`_
+
+* Update gsummary.xml
+  (thanks to `@lecorguille <https://github.com/lecorguille>`__.)
+  `Pull Request 2590`_
+* Add Warning when Conda is installed
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 2596`_
+* Added the neostore (neo4j db) for noe4jdb dataset
+  (thanks to `@zipho <https://github.com/zipho>`__.)
+  `Pull Request 2605`_
+* Fix smart_str for Python 3. Add doctests
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2608`_
+* Added Neo4j IE
+  (thanks to `@thobalose <https://github.com/thobalose>`__.)
+  `Pull Request 2610`_
+* Committing format changes to the user impersonation dropdown.
+  (thanks to `@sszakony <https://github.com/sszakony>`__.)
+  `Pull Request 2612`_
+* Use pagination and filtering for select2 fields
+  `Pull Request 2615`_
+* Update workflow share dropdown width
+  (thanks to `@sszakony <https://github.com/sszakony>`__.)
+  `Pull Request 2617`_
+* Graph visualization: add parsers, change datatypes
+  `Pull Request 2620`_
+* Fix certain combos of dynamic deeply nested collections in workflows.
+  `Pull Request 2634`_
+* add tar to datatypes_conf.xml
+  (thanks to `@mariabernard <https://github.com/mariabernard>`__.)
+  `Pull Request 2636`_
+* Optimize get_file_peek()
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2637`_
+* Move workflow run and history options to top, add loading indicator
+  `Pull Request 2643`_
+* Enhance workflow run button to indicate workflow loading progress, keep
+  header fixed
+  `Pull Request 2647`_
+* Allow uploading strict BED datatypes.
+  `Pull Request 2648`_
+* Reduce server sided refresh requests to changes in data associated input
+  fields
+  `Pull Request 2652`_
+* Add import order linting using tox and flake8-import-order.
+  `Pull Request 2661`_
+* Add hg38 to main for ucsc_build_sites.txt.sample
+  `Pull Request 2670`_
+* Extend the list of files tested with flake8-import-order to all files
+  already passing
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2682`_
+* List resolver Tool Shed packages
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2750`_
+
+
+.. small_enhancement
+
+* Remove buildbot remnants
+  `Pull Request 2125`_
+* Try pushing again when we get unexpected exception (test fix)
+  `Pull Request 2130`_
+* Refactor checking tool files toward reuse with workflows in Planemo.
+  `Pull Request 2183`_
+* Rename/refactor Utils.validate() to Utils.isEmpty()
+  `Pull Request 2239`_
+* Cleanup of the install and test framework.
+  `Pull Request 2249`_
+* cleanup TS and Reports configs
+  `Pull Request 2267`_
+* Move specific methods from shed_util_common.py to specific_util.py
+  `Pull Request 2269`_
+* Revise selection, options and drilldown wrappers, add tests
+  `Pull Request 2305`_
+* Visualizations: remove graphview
+  `Pull Request 2329`_
+* Update qunit and sinon
+  `Pull Request 2338`_
+* Build job success messages within tool form view
+  `Pull Request 2355`_
+* Cleanup of E203 primarily, and some other misc formatting
+  `Pull Request 2364`_
+* Isolate the data libraries css to affect only the libraries app
+  `Pull Request 2444`_
+* Re-add get_updated_changeset_revisions_from_tool_shed() method
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2472`_
+* Workaround for circular import. Refactor.
+  `Pull Request 2477`_
+* run_tests.sh: Clarify when/where the report file is created.
+  (thanks to `@NickSto <https://github.com/NickSto>`__.)
+  `Pull Request 2564`_
+* Reroute consistent workflow endpoint to internal API
+  `Pull Request 2622`_
+* Add tool test for what happens when output filter throws exception.
+  `Pull Request 2631`_
+* Ignore .tox and eggs directories for flake8
+  `Pull Request 2650`_
+* Reorder base templates meta tags
+  `Pull Request 2651`_
+* Import pyparsing in testing so twill's older version doesn't get loaded.
+  `Pull Request 2656`_
+* Change default refresh mode for form views
+  `Pull Request 2667`_
+* Fix unit tests
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2691`_
+* Create a uWSGI postfork function registry and start the tool conf watcher
+  thread post-fork
+  `Pull Request 2774`_
+
+
+Fixes
+-------------------------------
+
+.. major_bug
+
+* Do not encode dict items that start with ``metadata\_`` in return value of
+  api/tools/create.
+  `Pull Request 2459`_
+* Generate metadata in separate conda environment
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2253`_
+* Fix to use "$NCPUS" if defined to set GALAXY_SLOTS
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2145`_
+
+.. bug
+
+* fix blocking issue check
+  `Pull Request 2113`_
+* Correctly check set_environment declarations when updating repository.
+  `Pull Request 2116`_
+* Hide hidden data parameters in workflow editor
+  `Pull Request 2120`_
+* Place $headers before integrity_check
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2146`_
+* Fix bug when determining locale
+  (thanks to `@xiaojay <https://github.com/xiaojay>`__.)
+  `Pull Request 2213`_
+* Add pyparsing dependency of svgwrite. Unpin mercurial.
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2223`_
+* Sanitize poorly escaped input value in run workflow form
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2231`_
+* Multi-data collection reductions in repeat blocks are broken.
+  `Pull Request 2233`_
+* Encode collection reduce in serializable fashion
+  `Pull Request 2241`_
+* Cast everything to a string
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2252`_
+* Use standard model.orm.now for toolshed's password reset token datetime math
+  `Pull Request 2261`_
+* fixed ui bugs in tours
+  (thanks to `@ThomasWollmann <https://github.com/ThomasWollmann>`__.)
+  `Pull Request 2262`_
+* Preliminary fix for displaying raw text/html
+  `Pull Request 2263`_
+* Correctly use defaults specified in galaxy.ini.sample for statsd.
+  `Pull Request 2268`_
+* Fix encoding issues in test/unit/test_objectstore.py.
+  `Pull Request 2280`_
+* Workflow sharing slug edit fix.
+  `Pull Request 2293`_
+* Always remove the working directory after creating the capsule archive.
+  `Pull Request 2294`_
+* update check for Conda env.
+  (thanks to `@matthdsm <https://github.com/matthdsm>`__.)
+  `Pull Request 2295`_
+* Make grunt watch (for scripts) work again, settings were being overwritten by the new watch-style command
+  `Pull Request 2296`_
+* Monkeypatch Whoosh to use mkdtemp rather than a fixed temporary directory
+  `Pull Request 2310`_
+* Build: remove redundant webpack step in makefile
+  `Pull Request 2327`_
+* Unicodify has different semantics to str, causing bug reporter bug
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2336`_
+* Wrap statsd timing middleware more closely around the core application
+  `Pull Request 2342`_
+* UI & API: fix display of inaccessible datasets
+  `Pull Request 2351`_
+* Update README to remove 2.6
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__.)
+  `Pull Request 2367`_
+* Revise asserts triggering false exceptions for dynamic parameters
+  `Pull Request 2374`_
+* Fix the installed conda version to 3.19.3.
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2394`_
+* Check variable's presence before calling str methods
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2408`_
+* UI, history: correctly handle index API defaults
+  `Pull Request 2413`_
+* Fix to 4a61a4c: add hids for dataset inputs
+  `Pull Request 2415`_
+* wrap bare str raises with generic Exception()
+  `Pull Request 2421`_
+* Fix raven.js import order in galaxy.panels.mako
+  `Pull Request 2445`_
+* Fix usage of super() in the mothur datatypes.
+  (thanks to `@bgruening <https://github.com/bgruening>`__.)
+  `Pull Request 2457`_
+* Make pykube an optional dependency since we don't have wheels for it.
+  `Pull Request 2468`_
+* fix markup for button links on sharing template
+  `Pull Request 2481`_
+* fix image generation for workflows with missing tools
+  `Pull Request 2488`_
+* mothur datatypes: don't generate error for pairwise distance matrices
+  (thanks to `@shiltemann <https://github.com/shiltemann>`__.)
+  `Pull Request 2489`_
+* Toolbox filter fixes
+  `Pull Request 2499`_
+* Fix Post Job Action problem
+  `Pull Request 2505`_
+* Copy PJAs when importing a workflow.
+  `Pull Request 2508`_
+* Fix two errors in sff_extract.py detected by flake8
+  `Pull Request 2509`_
+* Fix HTML errors on 2 templates
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__.)
+  `Pull Request 2524`_
+* Fix tabular display chunking
+  `Pull Request 2527`_
+* Fix dataset matcher when no tool available.
+  (thanks to `@zipho <https://github.com/zipho>`__.)
+  `Pull Request 2540`_
+* Initialize objectstore for fix_set_dataset_sizes script
+  `Pull Request 2544`_
+* Fix old bug in non-allowed_images.yml GIEs
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2548`_
+* More defensive GIE Proxying
+  (thanks to `@erasche <https://github.com/erasche>`__.)
+  `Pull Request 2552`_
+* Fixes for showing min/max labels in Trackster.
+  `Pull Request 2555`_
+* Fix tag_autocomplete issue in workflow editor.
+  `Pull Request 2569`_
+* Fix element_identifier unavailable with data_collection input param
+  (thanks to `@abretaud <https://github.com/abretaud>`__.)
+  `Pull Request 2570`_
+* Ignore W503 (line break before binary operator) as PEP 8 has reversed its
+  position on this.
+  `Pull Request 2594`_
+* Fix hex vs float parsing in request body
+  `Pull Request 2597`_
+* Fixes for workflow extraction of mapping collection jobs.
+  `Pull Request 2625`_
+* Workflow SVG generation fix
+  `Pull Request 2628`_
+* Properly initialize datasets when workflow is loaded for execution
+  `Pull Request 2632`_
+* typo in summary statistics tool
+  (thanks to `@nturaga <https://github.com/nturaga>`__.)
+  `Pull Request 2639`_
+* Set allow_duplicate_entries="False" for built-in Tool Data Tables.
+  `Pull Request 2644`_
+* Ensure Galaxy's python binary is used for metadata generation.
+  `Pull Request 2660`_
+* Fix an error with getting the size of an object from Azure
+  `Pull Request 2678`_
+* Limit creation of multitudes of default, empty histories.
+  `Pull Request 2684`_
+* Fix the sanitization of ``None`` values in utils
+  `Pull Request 2693`_
+* Fix metadata collection on workdir outputs for Pulsar.
+  `Pull Request 2721`_
+* Allow link buttons to trigger regular click events
+  `Pull Request 2743`_
+* Fix unused href parameter when adding scratchbook frames
+  `Pull Request 2744`_
+* Update common_startup.sh to require python2.7 in .venv
+  (thanks to `@BenjaminHCCarr <https://github.com/BenjaminHCCarr>`__.)
+  `Pull Request 2756`_
+* Separate styles of new and old libraries fixing various ui bugs
+  `Pull Request 2780`_
+* Make conda install and resolver status more robust
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__.)
+  `Pull Request 2789`_
+* Smart string handling on the wsgi response -- this fixes uWSGI blanking out when handling endpoints that return Unicode
+  `Pull Request 2630`_
+* Do not instantiate the raven (sentry) client or tool conf watchdog threads
+  until uWSGI postfork
+  `Pull Request 2792`_
+* API, history contents: allow ``source`` in payload
+  `Pull Request 2809`_
+
+.. github_links
+.. _Pull Request 2038: https://github.com/galaxyproject/galaxy/pull/2038
+.. _Pull Request 2077: https://github.com/galaxyproject/galaxy/pull/2077
+.. _Pull Request 2079: https://github.com/galaxyproject/galaxy/pull/2079
+.. _Pull Request 2110: https://github.com/galaxyproject/galaxy/pull/2110
+.. _Pull Request 2113: https://github.com/galaxyproject/galaxy/pull/2113
+.. _Pull Request 2116: https://github.com/galaxyproject/galaxy/pull/2116
+.. _Pull Request 2117: https://github.com/galaxyproject/galaxy/pull/2117
+.. _Pull Request 2119: https://github.com/galaxyproject/galaxy/pull/2119
+.. _Pull Request 2120: https://github.com/galaxyproject/galaxy/pull/2120
+.. _Pull Request 2121: https://github.com/galaxyproject/galaxy/pull/2121
+.. _Pull Request 2124: https://github.com/galaxyproject/galaxy/pull/2124
+.. _Pull Request 2125: https://github.com/galaxyproject/galaxy/pull/2125
+.. _Pull Request 2130: https://github.com/galaxyproject/galaxy/pull/2130
+.. _Pull Request 2142: https://github.com/galaxyproject/galaxy/pull/2142
+.. _Pull Request 2143: https://github.com/galaxyproject/galaxy/pull/2143
+.. _Pull Request 2144: https://github.com/galaxyproject/galaxy/pull/2144
+.. _Pull Request 2145: https://github.com/galaxyproject/galaxy/pull/2145
+.. _Pull Request 2146: https://github.com/galaxyproject/galaxy/pull/2146
+.. _Pull Request 2157: https://github.com/galaxyproject/galaxy/pull/2157
+.. _Pull Request 2158: https://github.com/galaxyproject/galaxy/pull/2158
+.. _Pull Request 2165: https://github.com/galaxyproject/galaxy/pull/2165
+.. _Pull Request 2168: https://github.com/galaxyproject/galaxy/pull/2168
+.. _Pull Request 2171: https://github.com/galaxyproject/galaxy/pull/2171
+.. _Pull Request 2173: https://github.com/galaxyproject/galaxy/pull/2173
+.. _Pull Request 2183: https://github.com/galaxyproject/galaxy/pull/2183
+.. _Pull Request 2213: https://github.com/galaxyproject/galaxy/pull/2213
+.. _Pull Request 2215: https://github.com/galaxyproject/galaxy/pull/2215
+.. _Pull Request 2223: https://github.com/galaxyproject/galaxy/pull/2223
+.. _Pull Request 2231: https://github.com/galaxyproject/galaxy/pull/2231
+.. _Pull Request 2233: https://github.com/galaxyproject/galaxy/pull/2233
+.. _Pull Request 2239: https://github.com/galaxyproject/galaxy/pull/2239
+.. _Pull Request 2241: https://github.com/galaxyproject/galaxy/pull/2241
+.. _Pull Request 2243: https://github.com/galaxyproject/galaxy/pull/2243
+.. _Pull Request 2248: https://github.com/galaxyproject/galaxy/pull/2248
+.. _Pull Request 2249: https://github.com/galaxyproject/galaxy/pull/2249
+.. _Pull Request 2252: https://github.com/galaxyproject/galaxy/pull/2252
+.. _Pull Request 2253: https://github.com/galaxyproject/galaxy/pull/2253
+.. _Pull Request 2254: https://github.com/galaxyproject/galaxy/pull/2254
+.. _Pull Request 2256: https://github.com/galaxyproject/galaxy/pull/2256
+.. _Pull Request 2257: https://github.com/galaxyproject/galaxy/pull/2257
+.. _Pull Request 2258: https://github.com/galaxyproject/galaxy/pull/2258
+.. _Pull Request 2261: https://github.com/galaxyproject/galaxy/pull/2261
+.. _Pull Request 2262: https://github.com/galaxyproject/galaxy/pull/2262
+.. _Pull Request 2263: https://github.com/galaxyproject/galaxy/pull/2263
+.. _Pull Request 2267: https://github.com/galaxyproject/galaxy/pull/2267
+.. _Pull Request 2268: https://github.com/galaxyproject/galaxy/pull/2268
+.. _Pull Request 2269: https://github.com/galaxyproject/galaxy/pull/2269
+.. _Pull Request 2271: https://github.com/galaxyproject/galaxy/pull/2271
+.. _Pull Request 2273: https://github.com/galaxyproject/galaxy/pull/2273
+.. _Pull Request 2274: https://github.com/galaxyproject/galaxy/pull/2274
+.. _Pull Request 2276: https://github.com/galaxyproject/galaxy/pull/2276
+.. _Pull Request 2278: https://github.com/galaxyproject/galaxy/pull/2278
+.. _Pull Request 2279: https://github.com/galaxyproject/galaxy/pull/2279
+.. _Pull Request 2280: https://github.com/galaxyproject/galaxy/pull/2280
+.. _Pull Request 2286: https://github.com/galaxyproject/galaxy/pull/2286
+.. _Pull Request 2289: https://github.com/galaxyproject/galaxy/pull/2289
+.. _Pull Request 2292: https://github.com/galaxyproject/galaxy/pull/2292
+.. _Pull Request 2293: https://github.com/galaxyproject/galaxy/pull/2293
+.. _Pull Request 2294: https://github.com/galaxyproject/galaxy/pull/2294
+.. _Pull Request 2295: https://github.com/galaxyproject/galaxy/pull/2295
+.. _Pull Request 2296: https://github.com/galaxyproject/galaxy/pull/2296
+.. _Pull Request 2302: https://github.com/galaxyproject/galaxy/pull/2302
+.. _Pull Request 2305: https://github.com/galaxyproject/galaxy/pull/2305
+.. _Pull Request 2307: https://github.com/galaxyproject/galaxy/pull/2307
+.. _Pull Request 2308: https://github.com/galaxyproject/galaxy/pull/2308
+.. _Pull Request 2310: https://github.com/galaxyproject/galaxy/pull/2310
+.. _Pull Request 2314: https://github.com/galaxyproject/galaxy/pull/2314
+.. _Pull Request 2326: https://github.com/galaxyproject/galaxy/pull/2326
+.. _Pull Request 2327: https://github.com/galaxyproject/galaxy/pull/2327
+.. _Pull Request 2328: https://github.com/galaxyproject/galaxy/pull/2328
+.. _Pull Request 2329: https://github.com/galaxyproject/galaxy/pull/2329
+.. _Pull Request 2330: https://github.com/galaxyproject/galaxy/pull/2330
+.. _Pull Request 2335: https://github.com/galaxyproject/galaxy/pull/2335
+.. _Pull Request 2336: https://github.com/galaxyproject/galaxy/pull/2336
+.. _Pull Request 2338: https://github.com/galaxyproject/galaxy/pull/2338
+.. _Pull Request 2340: https://github.com/galaxyproject/galaxy/pull/2340
+.. _Pull Request 2342: https://github.com/galaxyproject/galaxy/pull/2342
+.. _Pull Request 2348: https://github.com/galaxyproject/galaxy/pull/2348
+.. _Pull Request 2351: https://github.com/galaxyproject/galaxy/pull/2351
+.. _Pull Request 2354: https://github.com/galaxyproject/galaxy/pull/2354
+.. _Pull Request 2355: https://github.com/galaxyproject/galaxy/pull/2355
+.. _Pull Request 2358: https://github.com/galaxyproject/galaxy/pull/2358
+.. _Pull Request 2360: https://github.com/galaxyproject/galaxy/pull/2360
+.. _Pull Request 2361: https://github.com/galaxyproject/galaxy/pull/2361
+.. _Pull Request 2363: https://github.com/galaxyproject/galaxy/pull/2363
+.. _Pull Request 2364: https://github.com/galaxyproject/galaxy/pull/2364
+.. _Pull Request 2366: https://github.com/galaxyproject/galaxy/pull/2366
+.. _Pull Request 2367: https://github.com/galaxyproject/galaxy/pull/2367
+.. _Pull Request 2368: https://github.com/galaxyproject/galaxy/pull/2368
+.. _Pull Request 2370: https://github.com/galaxyproject/galaxy/pull/2370
+.. _Pull Request 2372: https://github.com/galaxyproject/galaxy/pull/2372
+.. _Pull Request 2374: https://github.com/galaxyproject/galaxy/pull/2374
+.. _Pull Request 2378: https://github.com/galaxyproject/galaxy/pull/2378
+.. _Pull Request 2379: https://github.com/galaxyproject/galaxy/pull/2379
+.. _Pull Request 2380: https://github.com/galaxyproject/galaxy/pull/2380
+.. _Pull Request 2381: https://github.com/galaxyproject/galaxy/pull/2381
+.. _Pull Request 2383: https://github.com/galaxyproject/galaxy/pull/2383
+.. _Pull Request 2386: https://github.com/galaxyproject/galaxy/pull/2386
+.. _Pull Request 2387: https://github.com/galaxyproject/galaxy/pull/2387
+.. _Pull Request 2388: https://github.com/galaxyproject/galaxy/pull/2388
+.. _Pull Request 2390: https://github.com/galaxyproject/galaxy/pull/2390
+.. _Pull Request 2391: https://github.com/galaxyproject/galaxy/pull/2391
+.. _Pull Request 2392: https://github.com/galaxyproject/galaxy/pull/2392
+.. _Pull Request 2394: https://github.com/galaxyproject/galaxy/pull/2394
+.. _Pull Request 2395: https://github.com/galaxyproject/galaxy/pull/2395
+.. _Pull Request 2396: https://github.com/galaxyproject/galaxy/pull/2396
+.. _Pull Request 2397: https://github.com/galaxyproject/galaxy/pull/2397
+.. _Pull Request 2403: https://github.com/galaxyproject/galaxy/pull/2403
+.. _Pull Request 2408: https://github.com/galaxyproject/galaxy/pull/2408
+.. _Pull Request 2409: https://github.com/galaxyproject/galaxy/pull/2409
+.. _Pull Request 2410: https://github.com/galaxyproject/galaxy/pull/2410
+.. _Pull Request 2411: https://github.com/galaxyproject/galaxy/pull/2411
+.. _Pull Request 2412: https://github.com/galaxyproject/galaxy/pull/2412
+.. _Pull Request 2413: https://github.com/galaxyproject/galaxy/pull/2413
+.. _Pull Request 2414: https://github.com/galaxyproject/galaxy/pull/2414
+.. _Pull Request 2415: https://github.com/galaxyproject/galaxy/pull/2415
+.. _Pull Request 2416: https://github.com/galaxyproject/galaxy/pull/2416
+.. _Pull Request 2418: https://github.com/galaxyproject/galaxy/pull/2418
+.. _Pull Request 2420: https://github.com/galaxyproject/galaxy/pull/2420
+.. _Pull Request 2421: https://github.com/galaxyproject/galaxy/pull/2421
+.. _Pull Request 2427: https://github.com/galaxyproject/galaxy/pull/2427
+.. _Pull Request 2428: https://github.com/galaxyproject/galaxy/pull/2428
+.. _Pull Request 2429: https://github.com/galaxyproject/galaxy/pull/2429
+.. _Pull Request 2434: https://github.com/galaxyproject/galaxy/pull/2434
+.. _Pull Request 2436: https://github.com/galaxyproject/galaxy/pull/2436
+.. _Pull Request 2438: https://github.com/galaxyproject/galaxy/pull/2438
+.. _Pull Request 2440: https://github.com/galaxyproject/galaxy/pull/2440
+.. _Pull Request 2444: https://github.com/galaxyproject/galaxy/pull/2444
+.. _Pull Request 2445: https://github.com/galaxyproject/galaxy/pull/2445
+.. _Pull Request 2452: https://github.com/galaxyproject/galaxy/pull/2452
+.. _Pull Request 2453: https://github.com/galaxyproject/galaxy/pull/2453
+.. _Pull Request 2455: https://github.com/galaxyproject/galaxy/pull/2455
+.. _Pull Request 2457: https://github.com/galaxyproject/galaxy/pull/2457
+.. _Pull Request 2459: https://github.com/galaxyproject/galaxy/pull/2459
+.. _Pull Request 2464: https://github.com/galaxyproject/galaxy/pull/2464
+.. _Pull Request 2468: https://github.com/galaxyproject/galaxy/pull/2468
+.. _Pull Request 2469: https://github.com/galaxyproject/galaxy/pull/2469
+.. _Pull Request 2472: https://github.com/galaxyproject/galaxy/pull/2472
+.. _Pull Request 2476: https://github.com/galaxyproject/galaxy/pull/2476
+.. _Pull Request 2477: https://github.com/galaxyproject/galaxy/pull/2477
+.. _Pull Request 2480: https://github.com/galaxyproject/galaxy/pull/2480
+.. _Pull Request 2481: https://github.com/galaxyproject/galaxy/pull/2481
+.. _Pull Request 2486: https://github.com/galaxyproject/galaxy/pull/2486
+.. _Pull Request 2488: https://github.com/galaxyproject/galaxy/pull/2488
+.. _Pull Request 2489: https://github.com/galaxyproject/galaxy/pull/2489
+.. _Pull Request 2494: https://github.com/galaxyproject/galaxy/pull/2494
+.. _Pull Request 2498: https://github.com/galaxyproject/galaxy/pull/2498
+.. _Pull Request 2499: https://github.com/galaxyproject/galaxy/pull/2499
+.. _Pull Request 2505: https://github.com/galaxyproject/galaxy/pull/2505
+.. _Pull Request 2508: https://github.com/galaxyproject/galaxy/pull/2508
+.. _Pull Request 2509: https://github.com/galaxyproject/galaxy/pull/2509
+.. _Pull Request 2515: https://github.com/galaxyproject/galaxy/pull/2515
+.. _Pull Request 2516: https://github.com/galaxyproject/galaxy/pull/2516
+.. _Pull Request 2524: https://github.com/galaxyproject/galaxy/pull/2524
+.. _Pull Request 2527: https://github.com/galaxyproject/galaxy/pull/2527
+.. _Pull Request 2528: https://github.com/galaxyproject/galaxy/pull/2528
+.. _Pull Request 2529: https://github.com/galaxyproject/galaxy/pull/2529
+.. _Pull Request 2531: https://github.com/galaxyproject/galaxy/pull/2531
+.. _Pull Request 2534: https://github.com/galaxyproject/galaxy/pull/2534
+.. _Pull Request 2538: https://github.com/galaxyproject/galaxy/pull/2538
+.. _Pull Request 2540: https://github.com/galaxyproject/galaxy/pull/2540
+.. _Pull Request 2543: https://github.com/galaxyproject/galaxy/pull/2543
+.. _Pull Request 2544: https://github.com/galaxyproject/galaxy/pull/2544
+.. _Pull Request 2545: https://github.com/galaxyproject/galaxy/pull/2545
+.. _Pull Request 2548: https://github.com/galaxyproject/galaxy/pull/2548
+.. _Pull Request 2552: https://github.com/galaxyproject/galaxy/pull/2552
+.. _Pull Request 2553: https://github.com/galaxyproject/galaxy/pull/2553
+.. _Pull Request 2554: https://github.com/galaxyproject/galaxy/pull/2554
+.. _Pull Request 2555: https://github.com/galaxyproject/galaxy/pull/2555
+.. _Pull Request 2559: https://github.com/galaxyproject/galaxy/pull/2559
+.. _Pull Request 2564: https://github.com/galaxyproject/galaxy/pull/2564
+.. _Pull Request 2569: https://github.com/galaxyproject/galaxy/pull/2569
+.. _Pull Request 2570: https://github.com/galaxyproject/galaxy/pull/2570
+.. _Pull Request 2572: https://github.com/galaxyproject/galaxy/pull/2572
+.. _Pull Request 2574: https://github.com/galaxyproject/galaxy/pull/2574
+.. _Pull Request 2576: https://github.com/galaxyproject/galaxy/pull/2576
+.. _Pull Request 2578: https://github.com/galaxyproject/galaxy/pull/2578
+.. _Pull Request 2579: https://github.com/galaxyproject/galaxy/pull/2579
+.. _Pull Request 2590: https://github.com/galaxyproject/galaxy/pull/2590
+.. _Pull Request 2594: https://github.com/galaxyproject/galaxy/pull/2594
+.. _Pull Request 2595: https://github.com/galaxyproject/galaxy/pull/2595
+.. _Pull Request 2596: https://github.com/galaxyproject/galaxy/pull/2596
+.. _Pull Request 2597: https://github.com/galaxyproject/galaxy/pull/2597
+.. _Pull Request 2599: https://github.com/galaxyproject/galaxy/pull/2599
+.. _Pull Request 2605: https://github.com/galaxyproject/galaxy/pull/2605
+.. _Pull Request 2608: https://github.com/galaxyproject/galaxy/pull/2608
+.. _Pull Request 2610: https://github.com/galaxyproject/galaxy/pull/2610
+.. _Pull Request 2612: https://github.com/galaxyproject/galaxy/pull/2612
+.. _Pull Request 2613: https://github.com/galaxyproject/galaxy/pull/2613
+.. _Pull Request 2615: https://github.com/galaxyproject/galaxy/pull/2615
+.. _Pull Request 2616: https://github.com/galaxyproject/galaxy/pull/2616
+.. _Pull Request 2617: https://github.com/galaxyproject/galaxy/pull/2617
+.. _Pull Request 2620: https://github.com/galaxyproject/galaxy/pull/2620
+.. _Pull Request 2621: https://github.com/galaxyproject/galaxy/pull/2621
+.. _Pull Request 2622: https://github.com/galaxyproject/galaxy/pull/2622
+.. _Pull Request 2625: https://github.com/galaxyproject/galaxy/pull/2625
+.. _Pull Request 2628: https://github.com/galaxyproject/galaxy/pull/2628
+.. _Pull Request 2630: https://github.com/galaxyproject/galaxy/pull/2630
+.. _Pull Request 2631: https://github.com/galaxyproject/galaxy/pull/2631
+.. _Pull Request 2632: https://github.com/galaxyproject/galaxy/pull/2632
+.. _Pull Request 2634: https://github.com/galaxyproject/galaxy/pull/2634
+.. _Pull Request 2636: https://github.com/galaxyproject/galaxy/pull/2636
+.. _Pull Request 2637: https://github.com/galaxyproject/galaxy/pull/2637
+.. _Pull Request 2639: https://github.com/galaxyproject/galaxy/pull/2639
+.. _Pull Request 2643: https://github.com/galaxyproject/galaxy/pull/2643
+.. _Pull Request 2644: https://github.com/galaxyproject/galaxy/pull/2644
+.. _Pull Request 2646: https://github.com/galaxyproject/galaxy/pull/2646
+.. _Pull Request 2647: https://github.com/galaxyproject/galaxy/pull/2647
+.. _Pull Request 2648: https://github.com/galaxyproject/galaxy/pull/2648
+.. _Pull Request 2650: https://github.com/galaxyproject/galaxy/pull/2650
+.. _Pull Request 2651: https://github.com/galaxyproject/galaxy/pull/2651
+.. _Pull Request 2652: https://github.com/galaxyproject/galaxy/pull/2652
+.. _Pull Request 2656: https://github.com/galaxyproject/galaxy/pull/2656
+.. _Pull Request 2660: https://github.com/galaxyproject/galaxy/pull/2660
+.. _Pull Request 2661: https://github.com/galaxyproject/galaxy/pull/2661
+.. _Pull Request 2664: https://github.com/galaxyproject/galaxy/pull/2664
+.. _Pull Request 2667: https://github.com/galaxyproject/galaxy/pull/2667
+.. _Pull Request 2670: https://github.com/galaxyproject/galaxy/pull/2670
+.. _Pull Request 2678: https://github.com/galaxyproject/galaxy/pull/2678
+.. _Pull Request 2680: https://github.com/galaxyproject/galaxy/pull/2680
+.. _Pull Request 2681: https://github.com/galaxyproject/galaxy/pull/2681
+.. _Pull Request 2682: https://github.com/galaxyproject/galaxy/pull/2682
+.. _Pull Request 2684: https://github.com/galaxyproject/galaxy/pull/2684
+.. _Pull Request 2685: https://github.com/galaxyproject/galaxy/pull/2685
+.. _Pull Request 2691: https://github.com/galaxyproject/galaxy/pull/2691
+.. _Pull Request 2693: https://github.com/galaxyproject/galaxy/pull/2693
+.. _Pull Request 2701: https://github.com/galaxyproject/galaxy/pull/2701
+.. _Pull Request 2721: https://github.com/galaxyproject/galaxy/pull/2721
+.. _Pull Request 2723: https://github.com/galaxyproject/galaxy/pull/2723
+.. _Pull Request 2737: https://github.com/galaxyproject/galaxy/pull/2737
+.. _Pull Request 2743: https://github.com/galaxyproject/galaxy/pull/2743
+.. _Pull Request 2744: https://github.com/galaxyproject/galaxy/pull/2744
+.. _Pull Request 2749: https://github.com/galaxyproject/galaxy/pull/2749
+.. _Pull Request 2750: https://github.com/galaxyproject/galaxy/pull/2750
+.. _Pull Request 2756: https://github.com/galaxyproject/galaxy/pull/2756
+.. _Pull Request 2759: https://github.com/galaxyproject/galaxy/pull/2759
+.. _Pull Request 2773: https://github.com/galaxyproject/galaxy/pull/2773
+.. _Pull Request 2774: https://github.com/galaxyproject/galaxy/pull/2774
+.. _Pull Request 2780: https://github.com/galaxyproject/galaxy/pull/2780
+.. _Pull Request 2789: https://github.com/galaxyproject/galaxy/pull/2789
+.. _Pull Request 2792: https://github.com/galaxyproject/galaxy/pull/2792
+.. _Pull Request 2809: https://github.com/galaxyproject/galaxy/pull/2809
+.. _Pull Request 2810: https://github.com/galaxyproject/galaxy/pull/2810
+
diff --git a/doc/source/releases/16.07_announce.rst b/doc/source/releases/16.07_announce.rst
new file mode 100644
index 0000000..131609e
--- /dev/null
+++ b/doc/source/releases/16.07_announce.rst
@@ -0,0 +1,59 @@
+
+===========================================================
+July 2016 Galaxy Release (v 16.07)
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Shift of Galaxy tool dependencies to Conda**
+  Galaxy admins obtained the ability to install Galaxy tool's
+  dependencies using the Conda package manager. This is a Beta feature
+  and we encourage interested deployers to opt-in by modifying configuration.
+  Documentation that explains this switch and answers FAQ is `available <https://docs.galaxyproject.org/en/master/admin/conda_faq.html>`__.
+
+**Dynamic tool destinations**
+  Our friends from Canada National Microbiology Laboratory enhanced
+  Galaxy with feature that allows dynamic mapping of tools to destinations
+  based on finer grained admin-specified rules. Please see the
+  `wiki <https://wiki.galaxyproject.org/Admin/Config/Jobs#Dynamic_Destination_Mapping>`__.
+  Implemented in `PR #2579 <https://github.com/galaxyproject/galaxy/pull/2579>`__
+
+**Galaxy chat**
+  Admins can now plug in the included communication server to enable
+  users of their instance to use real-time chat within the Galaxy interface.
+  Please see the `documentation <https://docs.galaxyproject.org/en/master/admin/chat.html>`__
+  to learn how to activate and use this feature.
+  Implemented in `PR #2515 <https://github.com/galaxyproject/galaxy/pull/2515>`__
+
+`Github <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New Galaxy repository
+  .. code-block:: shell
+
+      $ git clone -b release_16.07 https://github.com/galaxyproject/galaxy.git
+
+Update of existing Galaxy repository
+  .. code-block:: shell
+
+      $ git checkout release_16.07 && git pull --ff-only origin release_16.07
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+Deprecation Notices
+===========================================================
+
+The following URLs will be removed and should be considered deprecated:
+ * ``<galaxy>\history\as_xml``
+ * ``<galaxy>\history\list_as_xml``
+
+Release Notes
+===========================================================
+
+.. include:: 16.07.rst
+   :start-after: announce_start
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/16.10.rst b/doc/source/releases/16.10.rst
new file mode 100644
index 0000000..8d96aa2
--- /dev/null
+++ b/doc/source/releases/16.10.rst
@@ -0,0 +1,693 @@
+
+.. to_doc
+
+16.10
+===============================
+
+.. announce_start
+
+Highlighted Enhancements
+------------------------
+
+.. major_feature
+
+* Overhaul of charts visualization - more visualizations, more options, and better user interface.
+  `Pull Request 2875`_, `Pull Request 2820`_
+* Paginate contents of large histories.
+  `Pull Request 2588`_
+* Implement a collection operation tool for merging collections
+  (thanks to `@Takadonet <https://github.com/Takadonet>`__).
+  `Pull Request 2771`_
+* Replace `reference documentation <https://docs.galaxyproject.org/en/latest/dev/schema.html>`__
+  for tool XML files with automatically generated
+  documentation from a now official Galaxy XSD documentation (with help from many).
+  `Pull Request 2923`_, `Pull Request 2936`_, `Pull Request 3086`_,
+  `Pull Request 2932`_, `Pull Request 2903`_, `Pull Request 3020`_,
+  `Pull Request 3072`_
+* Add a password strength evaluation bar
+  (thanks to `@benfulton <https://github.com/benfulton>`__).
+  `Pull Request 2687`_
+* Implement a GoDocker job runner
+  (thanks to `@varunshankar <https://github.com/varunshankar>`__).
+  `Pull Request 2791`_, `Pull Request 2653`_
+* Support for API batch requests.
+  `Pull Request 1768`_
+* Allow JSONP to be returned from API endpoints.
+  `Pull Request 2937`_
+* Add "Save as" as an option in the workflow editor
+  (thanks to `@tmcgowan <https://github.com/tmcgowan>`__).
+  `Pull Request 3035`_
+* Allow naming input datasets and collections during workflow extraction.
+  `Pull Request 2943`_
+* Various enhancements for API driven installation of tool shed repositories.
+
+Security
+-------------------------------
+
+.. security
+
+* Fix an ACE vulnerability with gff filter tools (thanks to David Wyde).
+  `Commit c1e3087`_
+* Escape filename in upload dialog, default and composite (thanks to David Wyde).
+  `Pull Request 3278`_
+
+Enhancements
+-------------------------------
+
+* Add "Save as" as an option in the workflow editor
+  (thanks to `@tmcgowan <https://github.com/tmcgowan>`__).
+  `Pull Request 3035`_
+* Allow '.' and '_' for public names
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2947`_
+* Allow naming input datasets and collections during workflow extraction.
+  `Pull Request 2943`_
+* Various enhancements for API driven installation of Tool Shed repositories.
+  `Pull Request 2769`_
+* Implement beta support for discovering biocontainer Docker containers automatically for tool dependencies.
+  `Pull Request 2986`_
+* Improved API support for downloading datasets and relevant files.
+  `Pull Request 2741`_, `Pull Request 2885`_
+* Make tool search field stay at top of window when tool list scrolls.
+  (thanks to `@sszakony <https://github.com/sszakony>`__).
+  `Pull Request 2730`_, `Issue 2375`_
+* Scratchbook enhancement to include info and view details links for datasets.
+  `Pull Request 2733`_
+* Add implicit conversions target extension to HDA name when listed as
+  selection option in the tool form.
+  `Pull Request 2734`_
+* Add column names to BED datatype so that column names are displayed in
+  the dataset viewer.
+  `Pull Request 2784`_
+* Add "All" and "None" buttons to copy dataset view.
+  `Pull Request 2957`_
+* Lazy load workflow invocation steps to vastly improve scheduling performance of large
+  collection workflows
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2978`_
+* Add CEL datatype
+  (thanks to `@nturaga <https://github.com/nturaga>`__).
+  `Pull Request 3073`_
+* Add a netcdf datatype for metabolomics spectre data
+  (thanks to `@lecorguille <https://github.com/lecorguille>`__).
+  `Pull Request 2706`_
+* Returning Blast datatypes to Galaxy core
+  (thanks to `@peterjc <https://github.com/peterjc>`__).
+  `Pull Request 2696`_
+* Add HDT format (with turtle format sniffer improvements)
+  (thanks to `@jjkoehorst <https://github.com/jjkoehorst>`__).
+  `Pull Request 2787`_
+* Allow more configurability for containerized jobs.
+  `Pull Request 2790`_
+* Functionality to allow admins to switch on/off the line endings conversion
+  on library dataset import
+  (thanks to `@SANBI-SA <https://github.com/SANBI-SA>`__).
+  `Pull Request 2819`_
+* Add a tutorial on building job runners to the documentation
+  (thanks to `@varunshankar <https://github.com/varunshankar>`__).
+  `Pull Request 2700`_
+* Allow viewing tool help from history datasets
+  (thanks to `@tmcgowan <https://github.com/tmcgowan>`__).
+  `Pull Request 3051`_
+* Prevent TS from creating sessions for repository status checks.
+  `Pull Request 2732`_
+* Shorten Conda environment names
+  (thanks to `@bgruening <https://github.com/bgruening>`__).
+  `Pull Request 2794`_
+* Various release process improvements (including an enhancement from
+  `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2857`_, `Pull Request 2858`_, `Pull Request 2689`_,
+  `Pull Request 2865`_, `Pull Request 2952`_, `Pull Request 2695`_,
+  `Pull Request 2991`_, `Pull Request 3232`_
+* Improve samtools usage with ``--version-only`` check
+  (thanks to `@remimarenco <https://github.com/remimarenco>`__).
+  `Pull Request 2705`_
+* All-around improvements to toolbox loading/reloading
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 2901`_
+* Refactor shed tool lineage registration
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 2909`_
+* Log the resolved dependency type
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 2917`_, `Pull Request 3036`_
+* Fixes for Python 3 and new import linting for dozens of packages
+  (largely thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2731`_, `Pull Request 3177`_, `Pull Request 2983`_,
+  `Pull Request 2889`_, `Pull Request 3031`_, `Pull Request 3170`_,
+  `Pull Request 3154`_
+* Change every occurrence in the code of ``__all__`` from list to a tuple
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 3041`_, `Pull Request 3071`_
+* Docstring linting and fixes.
+  `Pull Request 3003`_, `Pull Request 3004`_
+* Update python-ldap module
+  (thanks to `@abretaud <https://github.com/abretaud>`__).
+  `Pull Request 2738`_
+* Update sqlalchemy to 1.0.15 (from 1.0.8).
+  `Pull Request 2979`_
+* Slightly revise auto configuration of loggers.
+  `Pull Request 2735`_
+* Various ``Makefile`` improvements and fixes.
+  `Pull Request 2757`_, `Pull Request 2688`_, `Pull Request 3167`_
+* Improve startup scripts
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2765`_
+* Remove ``enable_beta_tool_command_isolation`` config option
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2830`_
+* Regularize comments configuration samples.
+  `Pull Request 2824`_, `Pull Request 2841`_, `Pull Request 2814`_
+* Skip whoami check for LDAP servers not supporting it
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2832`_
+* Filter disposable mail blacklist on the first domain level only.
+  `Pull Request 2839`_
+* Refactor duplicated back-end code to create a new ``trans`` method for
+  getting the most recently touched history.
+  `Pull Request 2870`_
+* Document setting column names in Galaxy tool XSD.
+  `Pull Request 2965`_
+* Functional test case for testing column names as meta-data
+  (thanks to `@yhoogstrate <https://github.com/yhoogstrate>`__).
+  `Pull Request 2970`_
+* Improve ``version_command`` tag.
+  `Pull Request 2961`_
+* Add ``rgb`` option for color picker tool parameter.
+  `Pull Request 3006`_
+* Improved docs and testing for color parameters.
+  `Pull Request 3016`_
+* Add documentation on optional ``multiple="true"`` data parameters.
+  `Pull Request 3024`_
+* In Trackster, read numerical data directly from bigWig files.
+  `Pull Request 2880`_
+* Improve wording of tool installation forms
+  (thanks to `@nturaga <https://github.com/nturaga>`__).
+  `Pull Request 2893`_
+* Add `database/dependencies` to  `.gitignore`
+  (thanks to `@nturaga <https://github.com/nturaga>`__).
+  `Pull Request 2904`_
+* Allow admin to serialize/deserialize dataset permissions via the API.
+  `Pull Request 2912`_
+* Allow API access to genome fasta index and sequence
+  (thanks to `@jj-umn <https://github.com/jj-umn>`__).
+  `Pull Request 2939`_
+* Extend ``PUT /api/workflows/{id}`` with additional workflow properties
+  (thanks to `@tmcgowan <https://github.com/tmcgowan>`__).
+  `Pull Request 3080`_
+* Remove unnecessary use of binascii
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2946`_
+* Change wording to be more clear about how workflow output cleanup works.
+  `Pull Request 2966`_
+* Optimized workflow invocation step update.
+  `Pull Request 2973`_
+* Update issues.rst to bring inline with actual usage on Github.
+  `Pull Request 2976`_
+* Improve Conda FAQ documentation.
+  `Pull Request 2998`_, `Pull Request 2891`_, `Pull Request 2871`_
+* Add documentation for ``conda_copy_dependencies`` option
+  (thanks to `@abretaud <https://github.com/abretaud>`__).
+  `Pull Request 3105`_
+* Slight clarification of run.sh messaging when using Conda.
+  `Pull Request 3188`_
+* Display tool requirements for conda-only tools
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3262`_
+* Pass dependency shell command errors to log
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3129`_
+* Increase min number of columns for pileup sniffing to 5
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 3033`_
+* Remove outdated warning on ID_SECRET
+  (thanks to `@erasche <https://github.com/erasche>`__).
+  `Pull Request 3046`_
+* Add Galaxy architecture slides documentation to project.
+  `Pull Request 3060`_, `Pull Request 3091`_
+* Add tool XML reload to tool run page - for admins only
+  (thanks to `@tmcgowan <https://github.com/tmcgowan>`__).
+  `Pull Request 3070`_, `Issue 943 <https://github.com/galaxyproject/galaxy/issues/943>`__
+* Move thumbnail list view to shared UI elements, revise tabs, add test cases
+  `Pull Request 3079`_
+* Include filenames in .loc file DEBUG lines
+  (thanks to `@peterjc <https://github.com/peterjc>`__).
+  `Pull Request 3095`_
+* Log loading of resources from .loc files
+  (thanks to `@peterjc <https://github.com/peterjc>`__).
+  `Pull Request 3099`_
+* Eager load tags and annotations in history display.
+  `Pull Request 3110`_
+* Display an error message when toolbox filtering fails.
+  `Pull Request 3123`_
+* Bring in various updates from galaxy-lib.
+  `Pull Request 3169`_, `Pull Request 2888`_
+* Add links to local tours to sample welcome.
+  `Pull Request 3176`_
+* Fix dataset selector update when deleting history items.
+  `Pull Request 2677`_
+* Refactor upload dialog to use local CSS classes and avoid model listeners.
+  `Pull Request 2711`_
+* Rework Tool Shed middleware
+  `Pull Request 2782`_, `Pull Request 2753`_
+* Refactor - move filelock.py to galaxy.util.
+  `Pull Request 2879`_
+* Use requests in driver_util for better proxy handling
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3013`_
+* Add ``download_to_file`` function to ``galaxy.util``
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 3100`_
+* Quote all paths in converter tool XML files
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3165`_
+* Add routes to get genomes indexes and sequences
+  (thanks to `@jj-umn <https://github.com/jj-umn>`__).
+  `Pull Request 3109`_
+* Admin installation UX wording fixes.
+  `Pull Request 3226`_
+
+Fixes
+-------------------------------
+
+* Fix for bioconda recipes depending on conda-forge.
+  `Pull Request 3023`_
+* Fix history deletion bug when using impersonation.
+  `Pull Request 2654`_
+* Fix workflow extraction API tests on PostgreSQL
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 3039`_
+* Fix link target in admin view of tool lineages.
+  `Pull Request 2862`_
+* Avoid pointless lambda function
+  (thanks to `@peterjc <https://github.com/peterjc>`__).
+  `Pull Request 3097`_
+* Remove unnecessary ``set_output_history`` parameter
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 3155`_
+* Fix BLAST database *.loc files inconsistency
+  (thanks to `@peterjc <https://github.com/peterjc>`__).
+  `Pull Request 3098`_
+* Log invalid XML filename
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3124`_
+* Various code fixes for object store and docs
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 3119`_
+* Use default job history on ``build_for_rerun`` if current history is unavailable.
+  `Pull Request 2668`_
+* Always display text
+  `Pull Request 3052`_
+* Fixing button width on composite upload dialog
+  (thanks to `@sszakony <https://github.com/sszakony>`__).
+  `Pull Request 2703`_, `Issue 2591 <https://github.com/galaxyproject/galaxy/issues/2591>`__
+* Fix for splitting dataset collections
+  (thanks to `@gregvonkuster <https://github.com/gregvonkuster>`__).
+  `Pull Request 2708`_
+* Various datatypes fixes and enhancements
+  (thanks to `@nsoranzo <https://github.com/nsoranzo>`__).
+  `Pull Request 2690`_
+* Various fixes and improvements for galaxy.json.
+  `Pull Request 2697`_
+* Fix tours pasting content into text area.
+  `Pull Request 2715`_
+* Slight styling tweak to prevent overflow on the right edge in composite upload dialog.
+  `Pull Request 2716`_
+* Workflow toolbox style updates.
+  `Pull Request 2740`_
+* Fix docker command_list
+  (thanks to `@manabuishii <https://github.com/manabuishii>`__).
+  `Pull Request 2748`_
+* Fix ``list:list`` history display in the GUI.
+  `Pull Request 2758`_
+* Parse internal parameters such as job resource selections when data
+  converter tools are executed implicitly.
+  `Pull Request 2761`_
+* Fix broken check when creating Docker volumes default for jobs
+  (thanks to `@manabuishii <https://github.com/manabuishii>`__).
+  `Pull Request 2763`_
+* Fix to prevent login form from appearing in scratchbook window
+  (thanks to `@sszakony <https://github.com/sszakony>`__).
+  `Pull Request 2808`_
+* Allow NodeJS proxy to do reverse proxying
+  (thanks to `@erasche <https://github.com/erasche>`__).
+  `Pull Request 2817`_
+* Fixes for the RStudio interactive environment
+  (thanks to `@erasche <https://github.com/erasche>`__).
+  `Pull Request 2818`_
+* Acquire lock before attempting to install conda
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 2826`_
+* Fix Galaxy for if pyuwsgi is install in Galaxy's virtual environment.
+  `Pull Request 2836`_
+* Fix tabular renderer display when ``total_cols > line_cols > 1``.
+  `Pull Request 2845`_
+* Remove empty file in toolbox module.
+  `Pull Request 2877`_
+* Fix for workflow execution when using collections.
+  `Pull Request 2898`_, `Issue 2806 <https://github.com/galaxyproject/galaxy/issues/2806>`__
+* Implement ``split`` on a validator.
+  `Pull Request 2921`_
+* Catch failure to import grp in util (for Pulsar on Windows)
+  `Pull Request 2928`_
+* Bump pysam, remove ``already_compressed`` usage due to new wheel.
+  `Pull Request 2953`_
+* Re-build DOM on render for modal in the front-end.
+  `Pull Request 2955`_
+* Fix optional column form values when editing dataset properties.
+  `Pull Request 2960`_
+* Fix optional flag for library datasets in legacy library UI.
+  `Pull Request 2962`_
+* Reload metadata and lib tools on toolbox reload
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 2992`_
+* Update some old information in API guideline docs.
+  `Pull Request 3011`_
+* Fix for rerunning tool with workflow resume that is part of a collection.
+  `Pull Request 3019`_
+* Missing double quotation in tool XSD example
+  (thanks to `@ramezrawas <https://github.com/ramezrawas>`__).
+  `Pull Request 3027`_
+* Include base classes when searching datatypes by name.
+  `Pull Request 3028`_
+* Image detection fixes.
+  `Pull Request 3042`_
+* Fix tabular filtering tool broken in `#2983
+  <https://github.com/galaxyproject/galaxy/issues/2983>`__
+  `Pull Request 3043`_
+* Fix GFF attribute filtering tool broken in `#2983
+  <https://github.com/galaxyproject/galaxy/issues/2983>`__
+  `Pull Request 3044`_
+* Fix for displaying parameters to the user that have a ``False`` value.
+  `Pull Request 3045`_
+* Fix downloading tool tarball with boolean test input values.
+  `Pull Request 3047`_
+* Sort bedtools output in ``bam_to_bigwig`` conversion
+  (thanks to `@lparsons <https://github.com/lparsons>`__).
+  `Pull Request 3049`_
+* Remove incorrect communication server check.
+  `Pull Request 3053`_
+* Fix tool XSD to accept a help attribute for ``section``s
+  (thanks to `@joachimwolff <https://github.com/joachimwolff>`__).
+  `Pull Request 3131`_
+* Fix import orders for updates to flake8_import_order.
+  `Pull Request 3059`_
+* Dataset metadata should not be cached when building chart visualizations.
+  `Pull Request 3062`_
+* Fix libraries prefix.
+  `Pull Request 3074`_
+* Fix qunit local testing install -- now runs on modern node (v6.9.1).
+  `Pull Request 3089`_
+* Escape section name when writing to XML
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3122`_
+* Fix tools placed outside of panel section
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3125`_
+* Fix XSD for ``exclude_min`` and ``exclude_max``
+  (thanks to `@gregvonkuster <https://github.com/gregvonkuster>`__).
+  `Pull Request 3108`_
+* Update galaxy.xsd
+  (thanks to `@lecorguille <https://github.com/lecorguille>`__).
+  `Pull Request 3132`_
+* Fix to treat the value of ``message_box_visible`` in ``galaxy.ini`` as a boolean.
+  `Pull Request 3139`_
+* Ensure a From: address is set for user activation emails.
+  `Pull Request 3140`_
+* Explicitly specify attributes which should be copied to converted datasets.
+  `Pull Request 3149`_
+* Prefer existing .venv over conda, when both are available.
+  `Pull Request 3180`_
+* Validate cycles and step size before building model for workflow execution.
+  `Pull Request 3183`_
+* Fix a typo in the intro tour.
+  `Pull Request 3184`_
+* Make SAM to bam converter tool compatible with samtools >= 1.3
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3187`_
+* Remove unused (and unintended) config override.
+  `Pull Request 3198`_
+* Fixes for deleting histories.
+  `Pull Request 3203`_
+* Change 'History Actions' to 'Current History' in history options menu.
+  `Pull Request 3205`_
+* Preserve custom event handler for form inputs.
+  `Pull Request 3210`_
+* Admin installation UX wording fixes.
+  `Pull Request 3226`_
+* Backport `#3106 <https://github.com/galaxyproject/galaxy/issues/3106>`__ and
+  `#3222 <https://github.com/galaxyproject/galaxy/issues/3222>`__: Cached
+  conda environments and API to manage them
+  (thanks to `@mvdbeek <https://github.com/mvdbeek>`__).
+  `Pull Request 3227`_
+* Update the copyright year in the LICENSE.txt.
+  `Pull Request 2926`_
+* Fix for ToolShed install when copied sample data target exists, but is broken symlink.
+  `Pull Request 3279`_
+
+.. _Issue 2375: https://github.com/galaxyproject/galaxy/issues/2375
+.. github_links
+.. _Pull Request 1768: https://github.com/galaxyproject/galaxy/pull/1768
+.. _Pull Request 2588: https://github.com/galaxyproject/galaxy/pull/2588
+.. _Pull Request 2653: https://github.com/galaxyproject/galaxy/pull/2653
+.. _Pull Request 2654: https://github.com/galaxyproject/galaxy/pull/2654
+.. _Pull Request 2668: https://github.com/galaxyproject/galaxy/pull/2668
+.. _Pull Request 2669: https://github.com/galaxyproject/galaxy/pull/2669
+.. _Pull Request 2672: https://github.com/galaxyproject/galaxy/pull/2672
+.. _Pull Request 2674: https://github.com/galaxyproject/galaxy/pull/2674
+.. _Pull Request 2677: https://github.com/galaxyproject/galaxy/pull/2677
+.. _Pull Request 2686: https://github.com/galaxyproject/galaxy/pull/2686
+.. _Pull Request 2687: https://github.com/galaxyproject/galaxy/pull/2687
+.. _Pull Request 2688: https://github.com/galaxyproject/galaxy/pull/2688
+.. _Pull Request 2689: https://github.com/galaxyproject/galaxy/pull/2689
+.. _Pull Request 2690: https://github.com/galaxyproject/galaxy/pull/2690
+.. _Pull Request 2695: https://github.com/galaxyproject/galaxy/pull/2695
+.. _Pull Request 2696: https://github.com/galaxyproject/galaxy/pull/2696
+.. _Pull Request 2697: https://github.com/galaxyproject/galaxy/pull/2697
+.. _Pull Request 2700: https://github.com/galaxyproject/galaxy/pull/2700
+.. _Pull Request 2703: https://github.com/galaxyproject/galaxy/pull/2703
+.. _Pull Request 2705: https://github.com/galaxyproject/galaxy/pull/2705
+.. _Pull Request 2706: https://github.com/galaxyproject/galaxy/pull/2706
+.. _Pull Request 2708: https://github.com/galaxyproject/galaxy/pull/2708
+.. _Pull Request 2711: https://github.com/galaxyproject/galaxy/pull/2711
+.. _Pull Request 2715: https://github.com/galaxyproject/galaxy/pull/2715
+.. _Pull Request 2716: https://github.com/galaxyproject/galaxy/pull/2716
+.. _Pull Request 2720: https://github.com/galaxyproject/galaxy/pull/2720
+.. _Pull Request 2730: https://github.com/galaxyproject/galaxy/pull/2730
+.. _Pull Request 2731: https://github.com/galaxyproject/galaxy/pull/2731
+.. _Pull Request 2732: https://github.com/galaxyproject/galaxy/pull/2732
+.. _Pull Request 2733: https://github.com/galaxyproject/galaxy/pull/2733
+.. _Pull Request 2734: https://github.com/galaxyproject/galaxy/pull/2734
+.. _Pull Request 2735: https://github.com/galaxyproject/galaxy/pull/2735
+.. _Pull Request 2738: https://github.com/galaxyproject/galaxy/pull/2738
+.. _Pull Request 2740: https://github.com/galaxyproject/galaxy/pull/2740
+.. _Pull Request 2741: https://github.com/galaxyproject/galaxy/pull/2741
+.. _Pull Request 2748: https://github.com/galaxyproject/galaxy/pull/2748
+.. _Pull Request 2753: https://github.com/galaxyproject/galaxy/pull/2753
+.. _Pull Request 2757: https://github.com/galaxyproject/galaxy/pull/2757
+.. _Pull Request 2758: https://github.com/galaxyproject/galaxy/pull/2758
+.. _Pull Request 2761: https://github.com/galaxyproject/galaxy/pull/2761
+.. _Pull Request 2762: https://github.com/galaxyproject/galaxy/pull/2762
+.. _Pull Request 2763: https://github.com/galaxyproject/galaxy/pull/2763
+.. _Pull Request 2765: https://github.com/galaxyproject/galaxy/pull/2765
+.. _Pull Request 2766: https://github.com/galaxyproject/galaxy/pull/2766
+.. _Pull Request 2769: https://github.com/galaxyproject/galaxy/pull/2769
+.. _Pull Request 2770: https://github.com/galaxyproject/galaxy/pull/2770
+.. _Pull Request 2771: https://github.com/galaxyproject/galaxy/pull/2771
+.. _Pull Request 2776: https://github.com/galaxyproject/galaxy/pull/2776
+.. _Pull Request 2777: https://github.com/galaxyproject/galaxy/pull/2777
+.. _Pull Request 2782: https://github.com/galaxyproject/galaxy/pull/2782
+.. _Pull Request 2784: https://github.com/galaxyproject/galaxy/pull/2784
+.. _Pull Request 2787: https://github.com/galaxyproject/galaxy/pull/2787
+.. _Pull Request 2790: https://github.com/galaxyproject/galaxy/pull/2790
+.. _Pull Request 2791: https://github.com/galaxyproject/galaxy/pull/2791
+.. _Pull Request 2794: https://github.com/galaxyproject/galaxy/pull/2794
+.. _Pull Request 2799: https://github.com/galaxyproject/galaxy/pull/2799
+.. _Pull Request 2808: https://github.com/galaxyproject/galaxy/pull/2808
+.. _Pull Request 2814: https://github.com/galaxyproject/galaxy/pull/2814
+.. _Pull Request 2816: https://github.com/galaxyproject/galaxy/pull/2816
+.. _Pull Request 2817: https://github.com/galaxyproject/galaxy/pull/2817
+.. _Pull Request 2818: https://github.com/galaxyproject/galaxy/pull/2818
+.. _Pull Request 2819: https://github.com/galaxyproject/galaxy/pull/2819
+.. _Pull Request 2820: https://github.com/galaxyproject/galaxy/pull/2820
+.. _Pull Request 2824: https://github.com/galaxyproject/galaxy/pull/2824
+.. _Pull Request 2826: https://github.com/galaxyproject/galaxy/pull/2826
+.. _Pull Request 2830: https://github.com/galaxyproject/galaxy/pull/2830
+.. _Pull Request 2832: https://github.com/galaxyproject/galaxy/pull/2832
+.. _Pull Request 2836: https://github.com/galaxyproject/galaxy/pull/2836
+.. _Pull Request 2839: https://github.com/galaxyproject/galaxy/pull/2839
+.. _Pull Request 2840: https://github.com/galaxyproject/galaxy/pull/2840
+.. _Pull Request 2841: https://github.com/galaxyproject/galaxy/pull/2841
+.. _Pull Request 2845: https://github.com/galaxyproject/galaxy/pull/2845
+.. _Pull Request 2848: https://github.com/galaxyproject/galaxy/pull/2848
+.. _Pull Request 2857: https://github.com/galaxyproject/galaxy/pull/2857
+.. _Pull Request 2858: https://github.com/galaxyproject/galaxy/pull/2858
+.. _Pull Request 2862: https://github.com/galaxyproject/galaxy/pull/2862
+.. _Pull Request 2865: https://github.com/galaxyproject/galaxy/pull/2865
+.. _Pull Request 2870: https://github.com/galaxyproject/galaxy/pull/2870
+.. _Pull Request 2871: https://github.com/galaxyproject/galaxy/pull/2871
+.. _Pull Request 2872: https://github.com/galaxyproject/galaxy/pull/2872
+.. _Pull Request 2875: https://github.com/galaxyproject/galaxy/pull/2875
+.. _Pull Request 2877: https://github.com/galaxyproject/galaxy/pull/2877
+.. _Pull Request 2879: https://github.com/galaxyproject/galaxy/pull/2879
+.. _Pull Request 2880: https://github.com/galaxyproject/galaxy/pull/2880
+.. _Pull Request 2885: https://github.com/galaxyproject/galaxy/pull/2885
+.. _Pull Request 2887: https://github.com/galaxyproject/galaxy/pull/2887
+.. _Pull Request 2888: https://github.com/galaxyproject/galaxy/pull/2888
+.. _Pull Request 2889: https://github.com/galaxyproject/galaxy/pull/2889
+.. _Pull Request 2891: https://github.com/galaxyproject/galaxy/pull/2891
+.. _Pull Request 2893: https://github.com/galaxyproject/galaxy/pull/2893
+.. _Pull Request 2894: https://github.com/galaxyproject/galaxy/pull/2894
+.. _Pull Request 2895: https://github.com/galaxyproject/galaxy/pull/2895
+.. _Pull Request 2898: https://github.com/galaxyproject/galaxy/pull/2898
+.. _Pull Request 2901: https://github.com/galaxyproject/galaxy/pull/2901
+.. _Pull Request 2902: https://github.com/galaxyproject/galaxy/pull/2902
+.. _Pull Request 2903: https://github.com/galaxyproject/galaxy/pull/2903
+.. _Pull Request 2904: https://github.com/galaxyproject/galaxy/pull/2904
+.. _Pull Request 2905: https://github.com/galaxyproject/galaxy/pull/2905
+.. _Pull Request 2909: https://github.com/galaxyproject/galaxy/pull/2909
+.. _Pull Request 2912: https://github.com/galaxyproject/galaxy/pull/2912
+.. _Pull Request 2917: https://github.com/galaxyproject/galaxy/pull/2917
+.. _Pull Request 2920: https://github.com/galaxyproject/galaxy/pull/2920
+.. _Pull Request 2921: https://github.com/galaxyproject/galaxy/pull/2921
+.. _Pull Request 2923: https://github.com/galaxyproject/galaxy/pull/2923
+.. _Pull Request 2926: https://github.com/galaxyproject/galaxy/pull/2926
+.. _Pull Request 2928: https://github.com/galaxyproject/galaxy/pull/2928
+.. _Pull Request 2932: https://github.com/galaxyproject/galaxy/pull/2932
+.. _Pull Request 2935: https://github.com/galaxyproject/galaxy/pull/2935
+.. _Pull Request 2936: https://github.com/galaxyproject/galaxy/pull/2936
+.. _Pull Request 2937: https://github.com/galaxyproject/galaxy/pull/2937
+.. _Pull Request 2939: https://github.com/galaxyproject/galaxy/pull/2939
+.. _Pull Request 2943: https://github.com/galaxyproject/galaxy/pull/2943
+.. _Pull Request 2946: https://github.com/galaxyproject/galaxy/pull/2946
+.. _Pull Request 2947: https://github.com/galaxyproject/galaxy/pull/2947
+.. _Pull Request 2952: https://github.com/galaxyproject/galaxy/pull/2952
+.. _Pull Request 2953: https://github.com/galaxyproject/galaxy/pull/2953
+.. _Pull Request 2955: https://github.com/galaxyproject/galaxy/pull/2955
+.. _Pull Request 2957: https://github.com/galaxyproject/galaxy/pull/2957
+.. _Pull Request 2960: https://github.com/galaxyproject/galaxy/pull/2960
+.. _Pull Request 2961: https://github.com/galaxyproject/galaxy/pull/2961
+.. _Pull Request 2962: https://github.com/galaxyproject/galaxy/pull/2962
+.. _Pull Request 2965: https://github.com/galaxyproject/galaxy/pull/2965
+.. _Pull Request 2966: https://github.com/galaxyproject/galaxy/pull/2966
+.. _Pull Request 2969: https://github.com/galaxyproject/galaxy/pull/2969
+.. _Pull Request 2970: https://github.com/galaxyproject/galaxy/pull/2970
+.. _Pull Request 2971: https://github.com/galaxyproject/galaxy/pull/2971
+.. _Pull Request 2972: https://github.com/galaxyproject/galaxy/pull/2972
+.. _Pull Request 2973: https://github.com/galaxyproject/galaxy/pull/2973
+.. _Pull Request 2974: https://github.com/galaxyproject/galaxy/pull/2974
+.. _Pull Request 2975: https://github.com/galaxyproject/galaxy/pull/2975
+.. _Pull Request 2976: https://github.com/galaxyproject/galaxy/pull/2976
+.. _Pull Request 2978: https://github.com/galaxyproject/galaxy/pull/2978
+.. _Pull Request 2979: https://github.com/galaxyproject/galaxy/pull/2979
+.. _Pull Request 2983: https://github.com/galaxyproject/galaxy/pull/2983
+.. _Pull Request 2986: https://github.com/galaxyproject/galaxy/pull/2986
+.. _Pull Request 2990: https://github.com/galaxyproject/galaxy/pull/2990
+.. _Pull Request 2991: https://github.com/galaxyproject/galaxy/pull/2991
+.. _Pull Request 2992: https://github.com/galaxyproject/galaxy/pull/2992
+.. _Pull Request 2998: https://github.com/galaxyproject/galaxy/pull/2998
+.. _Pull Request 3003: https://github.com/galaxyproject/galaxy/pull/3003
+.. _Pull Request 3004: https://github.com/galaxyproject/galaxy/pull/3004
+.. _Pull Request 3005: https://github.com/galaxyproject/galaxy/pull/3005
+.. _Pull Request 3006: https://github.com/galaxyproject/galaxy/pull/3006
+.. _Pull Request 3007: https://github.com/galaxyproject/galaxy/pull/3007
+.. _Pull Request 3010: https://github.com/galaxyproject/galaxy/pull/3010
+.. _Pull Request 3011: https://github.com/galaxyproject/galaxy/pull/3011
+.. _Pull Request 3013: https://github.com/galaxyproject/galaxy/pull/3013
+.. _Pull Request 3016: https://github.com/galaxyproject/galaxy/pull/3016
+.. _Pull Request 3019: https://github.com/galaxyproject/galaxy/pull/3019
+.. _Pull Request 3020: https://github.com/galaxyproject/galaxy/pull/3020
+.. _Pull Request 3023: https://github.com/galaxyproject/galaxy/pull/3023
+.. _Pull Request 3024: https://github.com/galaxyproject/galaxy/pull/3024
+.. _Pull Request 3025: https://github.com/galaxyproject/galaxy/pull/3025
+.. _Pull Request 3027: https://github.com/galaxyproject/galaxy/pull/3027
+.. _Pull Request 3028: https://github.com/galaxyproject/galaxy/pull/3028
+.. _Pull Request 3031: https://github.com/galaxyproject/galaxy/pull/3031
+.. _Pull Request 3032: https://github.com/galaxyproject/galaxy/pull/3032
+.. _Pull Request 3033: https://github.com/galaxyproject/galaxy/pull/3033
+.. _Pull Request 3035: https://github.com/galaxyproject/galaxy/pull/3035
+.. _Pull Request 3036: https://github.com/galaxyproject/galaxy/pull/3036
+.. _Pull Request 3037: https://github.com/galaxyproject/galaxy/pull/3037
+.. _Pull Request 3039: https://github.com/galaxyproject/galaxy/pull/3039
+.. _Pull Request 3040: https://github.com/galaxyproject/galaxy/pull/3040
+.. _Pull Request 3041: https://github.com/galaxyproject/galaxy/pull/3041
+.. _Pull Request 3042: https://github.com/galaxyproject/galaxy/pull/3042
+.. _Pull Request 3043: https://github.com/galaxyproject/galaxy/pull/3043
+.. _Pull Request 3044: https://github.com/galaxyproject/galaxy/pull/3044
+.. _Pull Request 3045: https://github.com/galaxyproject/galaxy/pull/3045
+.. _Pull Request 3046: https://github.com/galaxyproject/galaxy/pull/3046
+.. _Pull Request 3047: https://github.com/galaxyproject/galaxy/pull/3047
+.. _Pull Request 3049: https://github.com/galaxyproject/galaxy/pull/3049
+.. _Pull Request 3051: https://github.com/galaxyproject/galaxy/pull/3051
+.. _Pull Request 3052: https://github.com/galaxyproject/galaxy/pull/3052
+.. _Pull Request 3053: https://github.com/galaxyproject/galaxy/pull/3053
+.. _Pull Request 3056: https://github.com/galaxyproject/galaxy/pull/3056
+.. _Pull Request 3059: https://github.com/galaxyproject/galaxy/pull/3059
+.. _Pull Request 3060: https://github.com/galaxyproject/galaxy/pull/3060
+.. _Pull Request 3062: https://github.com/galaxyproject/galaxy/pull/3062
+.. _Pull Request 3070: https://github.com/galaxyproject/galaxy/pull/3070
+.. _Pull Request 3071: https://github.com/galaxyproject/galaxy/pull/3071
+.. _Pull Request 3072: https://github.com/galaxyproject/galaxy/pull/3072
+.. _Pull Request 3073: https://github.com/galaxyproject/galaxy/pull/3073
+.. _Pull Request 3074: https://github.com/galaxyproject/galaxy/pull/3074
+.. _Pull Request 3077: https://github.com/galaxyproject/galaxy/pull/3077
+.. _Pull Request 3079: https://github.com/galaxyproject/galaxy/pull/3079
+.. _Pull Request 3080: https://github.com/galaxyproject/galaxy/pull/3080
+.. _Pull Request 3082: https://github.com/galaxyproject/galaxy/pull/3082
+.. _Pull Request 3086: https://github.com/galaxyproject/galaxy/pull/3086
+.. _Pull Request 3087: https://github.com/galaxyproject/galaxy/pull/3087
+.. _Pull Request 3089: https://github.com/galaxyproject/galaxy/pull/3089
+.. _Pull Request 3091: https://github.com/galaxyproject/galaxy/pull/3091
+.. _Pull Request 3095: https://github.com/galaxyproject/galaxy/pull/3095
+.. _Pull Request 3097: https://github.com/galaxyproject/galaxy/pull/3097
+.. _Pull Request 3098: https://github.com/galaxyproject/galaxy/pull/3098
+.. _Pull Request 3099: https://github.com/galaxyproject/galaxy/pull/3099
+.. _Pull Request 3100: https://github.com/galaxyproject/galaxy/pull/3100
+.. _Pull Request 3102: https://github.com/galaxyproject/galaxy/pull/3102
+.. _Pull Request 3103: https://github.com/galaxyproject/galaxy/pull/3103
+.. _Pull Request 3105: https://github.com/galaxyproject/galaxy/pull/3105
+.. _Pull Request 3108: https://github.com/galaxyproject/galaxy/pull/3108
+.. _Pull Request 3109: https://github.com/galaxyproject/galaxy/pull/3109
+.. _Pull Request 3110: https://github.com/galaxyproject/galaxy/pull/3110
+.. _Pull Request 3113: https://github.com/galaxyproject/galaxy/pull/3113
+.. _Pull Request 3116: https://github.com/galaxyproject/galaxy/pull/3116
+.. _Pull Request 3119: https://github.com/galaxyproject/galaxy/pull/3119
+.. _Pull Request 3122: https://github.com/galaxyproject/galaxy/pull/3122
+.. _Pull Request 3123: https://github.com/galaxyproject/galaxy/pull/3123
+.. _Pull Request 3124: https://github.com/galaxyproject/galaxy/pull/3124
+.. _Pull Request 3125: https://github.com/galaxyproject/galaxy/pull/3125
+.. _Pull Request 3129: https://github.com/galaxyproject/galaxy/pull/3129
+.. _Pull Request 3130: https://github.com/galaxyproject/galaxy/pull/3130
+.. _Pull Request 3131: https://github.com/galaxyproject/galaxy/pull/3131
+.. _Pull Request 3132: https://github.com/galaxyproject/galaxy/pull/3132
+.. _Pull Request 3135: https://github.com/galaxyproject/galaxy/pull/3135
+.. _Pull Request 3139: https://github.com/galaxyproject/galaxy/pull/3139
+.. _Pull Request 3140: https://github.com/galaxyproject/galaxy/pull/3140
+.. _Pull Request 3141: https://github.com/galaxyproject/galaxy/pull/3141
+.. _Pull Request 3149: https://github.com/galaxyproject/galaxy/pull/3149
+.. _Pull Request 3154: https://github.com/galaxyproject/galaxy/pull/3154
+.. _Pull Request 3155: https://github.com/galaxyproject/galaxy/pull/3155
+.. _Pull Request 3165: https://github.com/galaxyproject/galaxy/pull/3165
+.. _Pull Request 3167: https://github.com/galaxyproject/galaxy/pull/3167
+.. _Pull Request 3169: https://github.com/galaxyproject/galaxy/pull/3169
+.. _Pull Request 3170: https://github.com/galaxyproject/galaxy/pull/3170
+.. _Pull Request 3176: https://github.com/galaxyproject/galaxy/pull/3176
+.. _Pull Request 3177: https://github.com/galaxyproject/galaxy/pull/3177
+.. _Pull Request 3180: https://github.com/galaxyproject/galaxy/pull/3180
+.. _Pull Request 3183: https://github.com/galaxyproject/galaxy/pull/3183
+.. _Pull Request 3184: https://github.com/galaxyproject/galaxy/pull/3184
+.. _Pull Request 3187: https://github.com/galaxyproject/galaxy/pull/3187
+.. _Pull Request 3188: https://github.com/galaxyproject/galaxy/pull/3188
+.. _Pull Request 3191: https://github.com/galaxyproject/galaxy/pull/3191
+.. _Pull Request 3198: https://github.com/galaxyproject/galaxy/pull/3198
+.. _Pull Request 3203: https://github.com/galaxyproject/galaxy/pull/3203
+.. _Pull Request 3205: https://github.com/galaxyproject/galaxy/pull/3205
+.. _Pull Request 3210: https://github.com/galaxyproject/galaxy/pull/3210
+.. _Pull Request 3226: https://github.com/galaxyproject/galaxy/pull/3226
+.. _Pull Request 3227: https://github.com/galaxyproject/galaxy/pull/3227
+.. _Pull Request 3232: https://github.com/galaxyproject/galaxy/pull/3232
+.. _Pull Request 3262: https://github.com/galaxyproject/galaxy/pull/3262
+.. _Pull Request 3279: https://github.com/galaxyproject/galaxy/pull/3279
+
+.. _Commit c1e3087: https://github.com/galaxyproject/galaxy/commit/c1e3087ca35dbca1b0328954fe4769d666d3f934
+.. _Pull Request 3278: https://github.com/galaxyproject/galaxy/pull/3278
diff --git a/doc/source/releases/16.10_announce.rst b/doc/source/releases/16.10_announce.rst
new file mode 100644
index 0000000..de1e4a3
--- /dev/null
+++ b/doc/source/releases/16.10_announce.rst
@@ -0,0 +1,84 @@
+
+===========================================================
+October 2016 Galaxy Release (v 16.10)
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+==========
+
+**Galaxu UI plugins - Webhooks**
+  We introduce Galaxy Webhooks - optional plugins for the web UI that allow for better customization of your instance. See the `documentation <https://docs.galaxyproject.org/en/master/admin/webhooks.html>`__.
+  Includes work from `@bgruening <https://github.com/bgruening>`__, `@anatskiy <https://github.com/anatskiy>`__, and Joachim Wolff `@joachimwolff <https://github.com/joachimwolff>`__.
+  Implemented in `Pull Request 3040`_.
+
+**Workflow run form replaced**
+  The workflow run form has been replaced by one backed by the new tool form and the API. Nicer, faster, standardized.
+  Implemented in `Pull Request 2669`_, `Pull Request 2720`_, `Pull Request 2766`_,
+  `Pull Request 2776`_, `Pull Request 2770`_, `Pull Request 2895`_,
+  `Pull Request 2935`_, `Pull Request 3007`_, `Pull Request 2969`_,
+  `Pull Request 2799`_, `Pull Request 3082`_, `Pull Request 2672`_,
+  `Pull Request 2920`_, `Pull Request 2777`_, `Pull Request 2902`_.
+
+**Automatic tool reload after installation**
+  Galaxy does not need to be restarted after tool installation anymore. This provides a smoother experience for the users. Yay!
+  Thanks to `@mvdbeek <https://github.com/mvdbeek>`__.
+  Implemented in `Pull Request 2840`_, `Pull Request 3025`_.
+
+Get Galaxy
+==========
+
+The code lives at `Github <https://github.com/galaxyproject/galaxy>`__ and you should have `Git <https://git-scm.com/>`__ to obtain it.
+
+To get a new Galaxy repository run:
+  .. code-block:: shell
+
+      $ git clone -b release_16.10 https://github.com/galaxyproject/galaxy.git
+
+To update an existing Galaxy repository run:
+  .. code-block:: shell
+
+      $ git checkout release_16.10 && git pull --ff-only origin release_16.10
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code location.
+
+Deprecation Notices
+===================
+
+* API-like endpoints (returning JSON) that start with ``<galaxy>/root/`` in the URL are now deprecated and will be removed in future releases. Please use proper API (e.g. ``/api/histories`` or ``/api/tools``) to replace your implementations.
+* The ``jsonp`` parameter for search API endpoints in Tool Shed is deprecated and will be removed in the future. Instead the presence of a ``callback`` argument will trigger the JSONP format to be returned.
+
+Security
+========
+
+An arbitrary code execution vulnerability in two tools and an XSS vulnerability
+with the upload tool were identified this release cycle and have been fixed
+concurrently with the release. In addition, the fixes have been backported to
+older releases.
+
+The Galaxy Committers would like to thank David Wyde for disclosing these
+vulnerabilities. Details follow:
+
+1. The vulnerable tools are "Filter GFF data by attribute" and "Filter GFF data
+   by feature count", both of which are provided with and enabled by default in
+   the Galaxy server. These two tools share code with each other and the more
+   general "Filter data on any column using simple expressions" tool. The
+   latter was fixed in a previous security disclosure but these GFF variants of
+   the tool were missed when updating the Filter tool. These tools use the
+   Python ``eval`` and ``exec`` functions and do not properly sanitize input to
+   these functions. The fix for this issue has been applied to Galaxy releases
+   back to v14.10 and can be found in `Commit c1e3087`_
+
+2. An uploaded file's name was not properly sanitized, and so a specially
+   crafted filename uploaded to the Galaxy server could be used as an XSS
+   attack vector. The fix for this issue has been applied to Galaxy releases
+   back to v16.07 and can be found in `Pull Request 3278`_.
+
+Release Notes
+=============
+
+.. include:: 16.10.rst
+   :start-after: announce_start
+
+.. include:: _thanks.rst
diff --git a/doc/source/releases/17.01_announce.rst b/doc/source/releases/17.01_announce.rst
new file mode 100644
index 0000000..943de58
--- /dev/null
+++ b/doc/source/releases/17.01_announce.rst
@@ -0,0 +1,10 @@
+
+===========================================================
+January 2017 Galaxy Release (v 17.01)
+===========================================================
+
+
+Schedule
+===========================================================
+ * Planned Freeze Date: 2017-01-02
+ * Planned Release Date: 2017-01-23
diff --git a/doc/source/releases/_header.rst b/doc/source/releases/_header.rst
new file mode 100644
index 0000000..ddaf966
--- /dev/null
+++ b/doc/source/releases/_header.rst
@@ -0,0 +1,3 @@
+.. image:: https://wiki.galaxyproject.org/Images/GalaxyLogos?action=AttachFile&do=get&target=galaxy_logo_25percent_transparent.png
+   :alt: Get the Galaxy Release Your Way
+   :target: http://getgalaxy.org
diff --git a/doc/source/releases/_thanks.rst b/doc/source/releases/_thanks.rst
new file mode 100644
index 0000000..388a44e
--- /dev/null
+++ b/doc/source/releases/_thanks.rst
@@ -0,0 +1,7 @@
+To stay up to date with Galaxy's progress watch our `screencasts <https://vimeo.com/galaxyproject>`__, 
+read our `wiki <https://wiki.galaxyproject.org/>`__, and follow
+`@galaxyproject <https://twitter.com/galaxyproject>`__ on Twitter.
+
+*Thanks for using Galaxy!*
+
+`The Galaxy Team <https://wiki.galaxyproject.org/GalaxyTeam>`__
diff --git a/doc/source/releases/index.rst b/doc/source/releases/index.rst
new file mode 100644
index 0000000..5d3859f
--- /dev/null
+++ b/doc/source/releases/index.rst
@@ -0,0 +1,27 @@
+Releases
+========
+
+.. toctree::
+   :maxdepth: 1
+
+   16.10_announce
+   16.07_announce
+   16.04_announce
+   16.01_announce
+   15.10_announce
+   15.07_announce
+   15.05_announce
+   15.03_announce
+   15.01_announce
+   14.10_announce
+   14.08_announce
+   14.06_announce
+   14.04_announce
+   14.02_announce
+   13.11_announce
+   13.08_announce
+   13.06_announce
+   13.04_announce
+   13.02_announce
+   13.01_announce
+   older_releases
diff --git a/doc/source/releases/older_releases.rst b/doc/source/releases/older_releases.rst
new file mode 100644
index 0000000..98aebbe
--- /dev/null
+++ b/doc/source/releases/older_releases.rst
@@ -0,0 +1,11 @@
+===========================================================
+Galaxy Releases older than v 13.01
+===========================================================
+
+.. include:: _header.rst
+
+Please see the `Galaxy wiki`_ for announcement and release notes.
+
+.. _Galaxy wiki: https://wiki.galaxyproject.org/DevNewsBriefs
+
+.. include:: _thanks.rst
diff --git a/doc/source/slideshow/architecture/galaxy_architecture.md b/doc/source/slideshow/architecture/galaxy_architecture.md
new file mode 100644
index 0000000..fc40e6f
--- /dev/null
+++ b/doc/source/slideshow/architecture/galaxy_architecture.md
@@ -0,0 +1,1341 @@
+layout: true
+class: inverse, middle, large
+
+---
+class: special
+# Galaxy Architecture
+
+Nate, James, John, Rémi
+
+.footnote[\#usegalaxy / @galaxyproject]
+
+---
+
+class: larger
+
+### Please Interrupt!
+
+We're here to answer your questions about Galaxy architecture!
+
+---
+
+## Getting involved in Galaxy
+
+---
+
+class: larger
+
+**IRC:** irc.freenode.net#galaxyproject
+
+**GitHub:** github.com/galaxyproject
+
+**Twitter:**: #usegalaxy, @galaxyproject
+
+---
+
+### Contributing
+
+All Galaxy development happens on GitHub
+
+Contribution guidelines: http://bit.ly/gx-CONTRIBUTING-md
+
+---
+
+## The **/galaxyproject** projects
+
+---
+
+github.com/galaxyproject/**galaxy**
+
+The main Galaxy application. Web interface, database model, job running, etc. Also includes other web applications including the **ToolShed** and **Reports**
+
+---
+
+github.com/galaxyproject/**cloudman**
+
+Galaxy CloudMan - a web application which manages a Galaxy cluster in
+the cloud.
+
+github.com/galaxyproject/**cloudlaunch**
+
+CloudLaunch web application to make it wasy to launch images on a cloud, drives *https://launch.usegalaxy.org*
+
+---
+
+github.com/galaxyproject/**tools-iuc**
+
+Galaxy tools maintained by *iuc* (the "Intergalactic Utilities Commission").
+
+A variety of tools, generally of high quality including many of the core tools for Galaxy main.
+
+Demonstrates *current tool development best practices* - development on
+github and then deployed to test/main ToolSheds
+
+github.com/galaxyproject/**tools-devteam**
+
+Many older tools appearing on usegalaxy.org.
+
+
+---
+
+### Tools Aside - More Repositories
+
+Other repositories with high quality tools:
+
+ * [Björn Grüning's repo](https://github.com/bgruening/galaxytools)
+ * Peter Cock's repos:
+   * [blast repo](https://github.com/peterjc/galaxy_blast)
+   * [pico repo](https://github.com/peterjc/pico_galaxy)
+   * [mira repo](https://github.com/peterjc/galaxy_mira)
+ * [ENCODE tools](https://github.com/modENCODE-DCC/Galaxy)
+ * [Biopython repo](https://github.com/biopython/galaxy_packages)
+ * [Galaxy Proteomics repo](https://github.com/galaxyproteomics/tools-galaxyp)
+ * [Colibread Galaxy Tools](https://github.com/genouest/tools-colibread)
+ * [Greg von Kuster's repo](https://github.com/gregvonkuster/galaxy-csg)
+ * [TGAC repo](https://github.com/TGAC/tgac-galaxytools)
+ * [AAFC-MBB Canada repo](https://github.com/AAFC-MBB/Galaxy/tree/master/wrappers)
+ * [Mark Einon's repo](https://github.com/einon/galaxy-tools)
+
+
+---
+
+github.com/galaxyproject/**starforge**
+
+Build Galaxy Tool dependencies for the ToolShed in Docker containers
+
+Build Galaxy framework dependencies as Python wheels
+
+---
+
+github.com/galaxyproject/**planemo**
+
+Commande line utilities to assist in the development of Galaxy tools.
+Linting, testing, deploying to ToolSheds... *The best practice approach
+for Galaxy tool development!*
+
+github.com/galaxyproject/**planemo-machine**
+
+Builds Galaxy environments for Galaxy tool development including Docker
+container, virtual machines, Google compute images
+
+---
+
+github.com/galaxyproject/**{ansible-\*, \*-playbook}**
+
+Ansible components to automate almost every aspect of Galaxy installation and maintenance.
+
+Ansible is an advanced configuration management system
+
+These playbooks are used to maintain Galaxy main, cloud images, virtual machines, ...
+
+---
+
+github.com/galaxyproject/**pulsar**
+
+Distributed job execution engine for Galaxy.
+
+Stages data, scripts, configuration.
+
+Can run jobs on Windows machines.
+
+Can act as its own queuing system or access an existing cluster DRM.
+
+---
+
+github.com/galaxyproject/**bioblend**
+
+Official Python client for the Galaxy, ToolShed, and CloudMan APIs.
+
+Best documented path to scripting the Galaxy API.
+
+---
+
+- github.com/galaxyproject/**blend4php**
+- github.com/**jmchilton/blend4j**
+- github.com/**chapmanb/clj-blend**
+
+Galaxy API bindings for other languages.
+
+---
+
+github.com/**bgruening/docker-galaxy-stable**
+
+High quality Docker containers for stable Galaxy environments.
+
+Releases corresponding to each new version of Galaxy.
+
+Many flavors available.
+
+---
+
+class: white
+![Docker](images/docker-chart.png)
+
+---
+
+## Principles
+
+---
+
+### Aspirational Principles of Galaxy Architecture
+
+Whereas the architecture of the frontend (Web UI) aims for consistency and is
+highly opinionated, the backend (Python server) is guided by flexibility and is meant to be driven by plugins whenever possible.
+
+???
+
+Though an imperfect abstraction... maybe it is beneficial to think of the organizational
+principles that guide frontend and backend development of Galaxy as
+diametrically opposite.
+
+The frontend architecture is guided by the principle that the end user experience
+should be as simple and consistent as possible. The backend has been deployed at
+so many different sites and targeting so many different technologies - that
+flexibility is paramount.
+
+---
+
+### An Opinionated Frontend
+
+- The target audience is a *bench scientist* - no knowledge of programming, paths, or command lines should be assumed.
+- Consistent colors, fonts, themes, etc...
+- Reusable components for presenting common widgets - from the generic (forms and grids) to the specific (tools and histories).
+- Tied to specific technologies:
+  - JavaScript driven
+  - Backbone for MVC
+  - webpack & RequireJS for modules
+
+---
+
+### A Plugin Driven Backend
+
+Galaxy's backend is in many ways driven by *pluggable interfaces* and
+can be adapted to many different technologies.
+
+- SQLAlchemy allows using sqlite, postgres, or MySQL for a database.
+- Many different cluster backends or job managers are supported.
+- Different frontend proxies (e.g. nginx) are supported as well as web
+  application containers (e.g. uWSGI).
+- Different storage strategies and technologies are supported (e.g. S3).
+- Tool definitions, job metrics, stat middleware, tool dependency resolution, workflow modules,
+  datatype definitions are all plugin driven.
+
+???
+
+If the chief architectual principle guiding the frontend is a fast and accessible
+experience for the bench scientist, perhaps for the backend it is allowing 
+deployment on many different platforms and a different scales.
+
+---
+
+### A Plugin Driven Backend but...
+
+Galaxy has long been guided by the principle that cloning it and calling
+the `run.sh` should "just work" and should work quickly.
+
+So by default Galaxy does not require:
+
+ - Compilation - it fetches *binary wheels*.
+ - A job manager - Galaxy can act as one.
+ - An external database server - Galaxy can use an sqlite database.
+ - A web proxy or external Python web server.
+
+---
+
+## Web Frameworks
+
+---
+
+![Client-Server Communications](images/server_client.plantuml.svg)
+
+???
+
+Workflow, Data Libraries, Visualization, History, Tool Menu,
+Many Grids
+
+---
+
+class: white
+
+![Backbone MVC](images/backbone-model-view.svg)
+
+### Backbone MVC
+
+---
+
+![Client-Server Communications (old)](images/server_client_old.plantuml.svg)
+
+???
+
+User management and admin things, Reports and Tool Shed
+Webapp
+
+---
+
+![WSGI Application](images/wsgi_app.svg)
+
+### Galaxy WSGI
+
+---
+
+### WSGI
+
+- Python interface for web servers defined by PEP 333 - https://www.python.org/dev/peps/pep-0333/.
+- Galaxy moving from Paster to uwsgi to host the application.
+  - http://pythonpaste.org/
+  - https://uwsgi-docs.readthedocs.io/
+
+---
+
+![WSGI Request](images/wsgi_request.svg)
+
+---
+
+### Galaxy WSGI Middleware
+
+A WSGI function:
+
+`def app(environ, start_response):`
+
+- Middleware act as filters, modify the `environ` and then pass through to the next webapp
+- Galaxy uses several middleware components defined in the `wrap_in_middleware`
+  function of `galaxy.webapps.galaxy.buildapp`.
+
+---
+
+class: normal
+
+### Galaxy's WSGI Middleware
+
+Middleware configured in `galaxy.webapps.galaxy.buildapp#wrap_in_middleware`.
+
+- `paste.httpexceptions#make_middleware`
+- `galaxy.web.framework.middleware.remoteuser#RemoteUser` (if configured)
+- `paste.recursive#RecursiveMiddleware`
+- `galaxy.web.framework.middleware.sentry#Sentry` (if configured)
+- Various debugging middleware (linting, interactive exceptions, etc...)
+- `galaxy.web.framework.middleware.statsd#StatsdMiddleware` (if configured)
+- `galaxy.web.framework.middleware.xforwardedhost#XForwardedHostMiddleware`
+- `galaxy.web.framework.middleware.request_id#RequestIDMiddleware`
+
+---
+
+background-image: url(images/webapp.plantuml.svg)
+
+---
+
+class: normal
+
+### Routes
+
+Setup on `webapp` in `galaxy.web.webapps.galaxy.buildapp.py`.
+
+```python
+webapp.add_route(
+    '/datasets/:dataset_id/display/{filename:.+?}',
+    controller='dataset', action='display',
+    dataset_id=None, filename=None
+)
+```
+
+URL `/datasets/278043/display` matches this route, so `handle_request` will
+
+- lookup the controller named “dataset”
+- look for a method named “display” that is exposed
+- call it, passing dataset_id and filename as keyword arg
+
+Uses popular Routes library (https://pypi.python.org/pypi/Routes).
+
+---
+
+class: normal
+
+Simplified `handle_request` from `lib/galaxy/web/framework/base.py`.
+
+```python
+def handle_request(self, environ, start_response):
+    path_info = environ.get( 'PATH_INFO', '' )
+    map = self.mapper.match( path_info, environ )
+    if path_info.startswith('/api'):
+        controllers = self.api_controllers
+    else:
+        controllers = self.controllers
+
+    trans = self.transaction_factory( environ )
+
+    controller_name = map.pop( 'controller', None )
+    controller = controllers.get( controller_name, None )
+
+    # Resolve action method on controller
+    action = map.pop( 'action', 'index' )
+    method = getattr( controller, action, None )
+
+    kwargs = trans.request.params.mixed()
+    # Read controller arguments from mapper match
+    kwargs.update( map )
+
+    body = method( trans, **kwargs )
+    # Body may be a file, string, etc... respond with it.
+```
+
+---
+
+### API Controllers
+
+- `lib/galaxy/webapps/galaxy/controllers/api/`
+- Exposed method take `trans` and request parameters and return a JSON response.
+- Ideally these are *thin*
+  - Focused on "web things" - adapting parameters and responses and move
+    "business logic" to components not bound to web functionality.
+
+---
+
+### Legacy Controllers
+
+- `lib/galaxy/webapps/galaxy/controllers/`
+- Return arbitrary content - JSON, HTML, etc...
+- Render HTML components using [mako](http://www.makotemplates.org/) templates (see `templates/`)
+- The usage of these should decrease over time.
+
+---
+
+## Application Components
+
+---
+
+### Galaxy Models
+
+- Database interactions powered by SQLAlchemy - http://www.sqlalchemy.org/.
+- Galaxy doesn't think in terms "rows" but "objects".
+- Classes for Galaxy model objects in `lib/galaxy/model/__init__.py`.
+- Classes mapped to tables in `lib/galaxy/model/mapping.py`
+  - Describes table definitions and relationships.
+
+---
+
+class: white, widen_image
+
+![SQLAlchemy Architecture](images/sqla_arch_small.png)
+
+---
+
+### Galaxy Model Migrations
+
+- A migration describes a linear list of database "diff"s to
+  end up with the current Galaxy model.
+- Allow the schema to be migrated forward automatically.
+- Powered by sqlalchemy-migrate - https://sqlalchemy-migrate.readthedocs.io/en/latest/.
+- Each file in `lib/galaxy/model/migrate/versions/`
+  - `0124_job_state_history.py`
+  - `0125_workflow_step_tracking.py`
+  - `0126_password_reset.py`
+
+---
+
+class: white, narrow_image
+![Galaxy Schema](images/galaxy_schema.png)
+
+### Database Diagram
+
+https://wiki.galaxyproject.org/Admin/Internals/DataModel
+
+---
+
+![HDA](images/hda.svg)
+
+---
+
+![HDA Dataset](images/hda_dataset.plantuml.svg)
+
+---
+
+### Metadata
+
+- Typed key-value pairs attached to HDA.
+- Keys and types defined at the datatype level.
+- Can be used by tools to dynamically control the tool form.
+
+???
+
+Slides for datatypes, example of meta data definitions...
+
+---
+
+![HDAs and HDCAs](images/hda_hdca.plantuml.svg)
+
+---
+
+![Workflows](images/workflow_definition.svg)
+
+---
+
+![Workflow Running](images/workflow_run.svg)
+
+---
+
+![Libraries](images/libraries.svg)
+
+---
+
+![Library Permissions](images/library_permissions.svg)
+
+---
+
+![Data Managers](images/data_managers.svg)
+
+---
+
+### Job Components
+
+- Job is placed into the database and picked up by the job handler.
+- Job handler (`JobHandler`) watches the job and transitions job's state - common startup and finishing.
+- Job mapper (`JobRunnerMapper`) decides the "destination" for a job.
+- Job runner (e.g. `DrammaJobRunner`) actual runs the job and provides an interface for checking status.
+
+---
+
+class: normal
+
+### Object Store
+
+.strike[```
+>>> fh = open( dataset.file_path, 'w' )
+>>> fh.write( ‘foo’ )
+>>> fh.close()
+>>> fh = open( dataset.file_path, ‘r’ )
+>>> fh.read()
+```]
+
+```
+>>> update_from_file( dataset, file_name=‘foo.txt’ )
+>>> get_data( dataset )
+>>> get_data( dataset, start=42, count=4096 )
+```
+
+---
+
+background-image: url(images/objectstore.plantuml.svg)
+
+---
+
+### Visualization Plugins
+
+Adding new visualizations to a Galaxy instance
+
+- Configuration file (XML)
+- Base template (Mako or JavaScript)
+- Additional static data if needed (CSS, JS, …)
+
+---
+
+class: smaller
+
+```xml
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="Charts">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.CSV</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <entry_point entry_point_type="mako">charts.mako</entry_point>
+</visualization>
+```
+
+---
+
+### Visualization Examples
+
+All in `config/plugins/visualizations`:
+
+- `csg` - Chemical structure viewer
+- `graphviz` - Visualize graph data using [cytoscape.js](http://www.cytoscape.org/)
+- `charts` - A more elobrate builds on more Galaxy abstractions.
+- `trackster` - Genome browser, deeply tied to Galaxy internals.
+
+---
+
+### Data Providers
+
+Provide efficient access to data for viz & API
+
+Framework provides direct link to read the raw dataset
+or use data providers to adapt it
+
+In config, assert that visualization requires a given type of data providers
+
+Data providers process data before sending to browser - slice, filter, reformat, ...
+
+---
+
+### Interactive Environments
+
+Similar to vizualizations: config and template
+
+Within the base template, launch a Docker container running a web accessible
+process
+
+Build a UI that accesses that process through a proxy
+
+---
+
+### Interactive Environments - Examples
+
+All in `config/plugins/interactive_environments`:
+
+- `jupyter`
+- `rstudio`
+- `phinch`
+- `bam_iobio`
+
+---
+
+### Managers
+
+High-level business logic that tie all of these components together.
+
+Controllers should ideally be thin wrappers around actions defined in managers.
+
+Whenever model require more than just the database, the operation should be defined 
+in a manager instead of in the model.
+
+---
+
+## Client Architecture
+
+---
+
+### Client Directories
+
+- Source stylesheets and JavaScript in `client/galaxy/{style|scripts}`
+- "Packed" scripts served by Galaxy stored in `static/{style|scripts}`
+  - webpack builds these "compiled" artifacts
+
+Upshot - modify files in `client` and rebuild with `make client` before
+deployment.
+
+---
+
+class: normal
+
+### Building the Client - Makefile Targets
+
+```
+client: grunt style ## Rebuild all client-side artifacts
+
+grunt: npm-deps ## Calls out to Grunt to build client
+  cd client && node_modules/grunt-cli/bin/grunt
+
+style: npm-deps ## Calls the style task of Grunt
+  cd client && node_modules/grunt-cli/bin/grunt style
+
+npm-deps: ## Install NodeJS dependencies.
+  cd client && npm install
+```
+
+---
+
+### grunt
+
+Build tool for node/JavaScript, tasks in `client/Gruntfile.js`. Default task is
+
+.smaller[```grunt.registerTask( 'default', [ 'check-modules', 'uglify', 'webpack' ] );```]
+
+- `check-modules` Verifies node dependencies are correct and exact.
+- [`uglify`](https://github.com/mishoo/UglifyJS) Compresses JavaScript modules in `client` and move to `static` and creates source maps.
+   - JavaScript loads much faster but difficult to debug by default
+   - Source maps re-enable proper stack traces.
+- `webpack` Bundles modules together into a single JavaScript file - quickly loadable.
+
+---
+
+### JavaScript Modules - The Problem
+
+From http://requirejs.org/docs/why.html:
+
+- Web sites are turning into Web apps
+- Code complexity grows as the site gets bigger
+- Assembly gets harder
+- Developer wants discrete JS files/modules
+- Deployment wants optimized code in just one or a few HTTP calls
+
+---
+
+### JavaScript Modules - The Solution
+
+From http://requirejs.org/docs/why.html:
+
+ - Some sort of #include/import/require
+ - Ability to load nested dependencies
+ - Ease of use for developer but then backed by an optimization tool that helps deployment
+
+RequireJS an implementation of AMD.
+
+
+---
+
+class: normal
+
+### JavaScript Modules - Galaxy AMD Example
+
+```javascript
+/**
+    This is the workflow tool form.
+*/
+define(['utils/utils', 'mvc/tool/tool-form-base'],
+    function(Utils, ToolFormBase) {
+
+    // create form view
+    var View = ToolFormBase.extend({
+      ...
+    });
+
+    return {
+        View: View
+    };
+});
+```
+
+---
+
+class: white
+![What is Webpack](images/what-is-webpack.svg)
+
+---
+
+### webpack in Galaxy
+
+- Turns Galaxy modules into an "app".
+- Builds two bundles currently - a common set of libraries and an analysis "app".
+- https://github.com/galaxyproject/galaxy/issues/1041
+- https://github.com/galaxyproject/galaxy/pull/1144
+
+---
+
+class: white
+![Webpack in Action](images/jsload.png)
+
+---
+
+### Stylesheets
+
+- Galaxy uses the less CSS preprocessor - http://lesscss.org/
+- Rebuild style with `make style`
+- Less files in `client/galaxy/style/less`
+- Build happens with grunt recipe in `client/grunt-tasks/style.js`
+
+---
+
+## Dependencies
+
+---
+
+### Dependencies - Python
+
+`script/common_startup.sh` sets up a `virtualenv` with required dependencies in `$GALAXY_ROOT/.venv` (or `$GALAXY_VIRTUAL_ENV` if set).
+
+- Check for existing virtual environment, if it doesn't exist check for `virtualenv`.
+- If `virtualenv` exists, use it. Otherwise download it as a script and setup a virtual environment using it.
+- `. "$GALAXY_VIRTUAL_ENV/bin/activate"`
+- Upgrade to latest `pip` to allow use of binary wheels.
+- `pip install -r requirements.txt --index-url https://wheels.galaxyproject.org/simple`
+- Install dozens of dependencies.
+
+---
+
+### Dependencies - JavaScript
+
+These come bundled with Galaxy, so do not need to be fetched at runtime.
+
+- Dependencies are defined `galaxy/client/bower.json`.
+- Bower (https://bower.io/) is used to re-fetch these.
+- `cd client; grunt install-libs`
+
+---
+
+## Galaxy Startup Process
+
+---
+
+class: normal
+
+### Cloning Galaxy
+
+```
+$ git clone https://github.com/galaxyproject/galaxy.git galaxy
+Cloning into 'galaxy'...
+remote: Counting objects: 173809, done.
+remote: Total 173809 (delta 0), reused 0 (delta 0), pack-reused 173809
+Receiving objects: 100% (173809/173809), 55.18 MiB | 11.08 MiB/s, done.
+Resolving deltas: 100% (137885/137885), done.
+Checking connectivity... done.
+$ cd galaxy
+$ git checkout -b master origin/master
+Branch master set up to track remote branch master from origin.
+Switched to a new branch 'master'
+$ sh run.sh
+```
+
+---
+
+class: normal
+
+### Copying Configs
+
+```
+$ sh run.sh
+Initializing config/migrated_tools_conf.xml from migrated_tools_conf.xml.sample
+Initializing config/shed_tool_conf.xml from shed_tool_conf.xml.sample
+Initializing config/shed_tool_data_table_conf.xml from shed_tool_data_table_conf.xml.sample
+Initializing config/shed_data_manager_conf.xml from shed_data_manager_conf.xml.sample
+Initializing tool-data/shared/ucsc/builds.txt from builds.txt.sample
+Initializing tool-data/shared/ucsc/manual_builds.txt from manual_builds.txt.sample
+Initializing tool-data/shared/ucsc/ucsc_build_sites.txt from ucsc_build_sites.txt.sample
+Initializing tool-data/shared/igv/igv_build_sites.txt from igv_build_sites.txt.sample
+Initializing tool-data/shared/rviewer/rviewer_build_sites.txt from rviewer_build_sites.txt.sample
+Initializing static/welcome.html from welcome.html.sample
+```
+
+---
+
+### Setting up `.venv` and `pip`
+
+```
+Using real prefix '/usr'
+New python executable in .venv/bin/python
+Installing setuptools, pip, wheel...done.
+Activating virtualenv at .venv
+Collecting pip>=8.1
+  Using cached pip-8.1.2-py2.py3-none-any.whl
+Installing collected packages: pip
+  Found existing installation: pip 7.1.2
+    Uninstalling pip-7.1.2:
+      Successfully uninstalled pip-7.1.2
+Successfully installed pip-8.1.2
+```
+
+---
+
+class: normal
+
+### Installing Dependencies
+
+.code[```
+Collecting bx-python==0.7.3 (from -r requirements.txt (line 2))
+  Downloading https://wheels.galaxyproject.org/packages/bx_python-0.7.3-cp27-cp27mu-manylinux1_x86_64.whl (2.1MB)
+Collecting MarkupSafe==0.23 (from -r requirements.txt (line 3))
+  Downloading https://wheels.galaxyproject.org/packages/MarkupSafe-0.23-cp27-cp27mu-manylinux1_x86_64.whl
+Collecting PyYAML==3.11 (from -r requirements.txt (line 4))
+  Downloading https://wheels.galaxyproject.org/packages/PyYAML-3.11-cp27-cp27mu-manylinux1_x86_64.whl (367kB)
+Collecting SQLAlchemy==1.0.8 (from -r requirements.txt (line 5))
+  Downloading https://wheels.galaxyproject.org/packages/SQLAlchemy-1.0.8-cp27-cp27mu-manylinux1_x86_64.whl (1.0MB)
+Collecting mercurial==3.7.3 (from -r requirements.txt (line 6))
+  Downloading https://wheels.galaxyproject.org/packages/mercurial-3.7.3-cp27-cp27mu-manylinux1_x86_64.whl (1.5MB)
+...
+...
+Building wheels for collected packages: repoze.lru
+  Running setup.py bdist_wheel for repoze.lru: started
+  Running setup.py bdist_wheel for repoze.lru: finished with status 'done'
+  Stored in directory: /home/john/.cache/pip/wheels/b2/cd/b3/7e24400bff83325a01d492940eff6e9579f553f33348323d79
+Successfully built repoze.lru
+Installing collected packages: bx-python, MarkupSafe, PyYAML, SQLAlchemy, mercurial, numpy, pycrypto, six, Paste, PasteDeploy, docutils, wchartype, repoze.lru, Routes, WebOb, WebHelpers, Mako, pytz, Babel, Beaker, dictobj, nose, Parsley, Whoosh, Markdown, Cheetah, requests, boto, requests-toolbelt, bioblend, anyjson, amqp, kombu, psutil, PasteScript, pulsar-galaxy-lib, sqlparse, pbr, decorator, Tempita, sqlalchemy-migrate, pyparsing, svgwrite, ecdsa, paramiko, Fabric, pysam
+Successfully installed Babel-2.0 Beaker-1.7.0 Cheetah-2.4.4 Fabric-1.10.2 Mako-1.0.2 Markdown-2.6.3 MarkupSafe-0.23 Parsley-1.3 Paste-2.0.2 PasteDeploy-1.5.2 PasteScript-2.0.2 PyYAML-3.11 Routes-2.2 SQLAlchemy-1.0.8 Tempita-0.5.3.dev0 WebHelpers-1.3 WebOb-1.4.1 Whoosh-2.7.4 amqp-1.4.8 anyjson-0.3.3 bioblend-0.7.0 boto-2.38.0 bx-python-0.7.3 decorator-4.0.2 dictobj-0.3.1 docutils-0.12 ecdsa-0.13 kombu-3.0.30 mercurial-3.7.3 nose-1.3.7 numpy-1.9.2 paramiko-1.15.2 pbr-1.8.0 psutil-4.1.0 pul [...]
+```]
+
+---
+
+class: smaller
+
+### Initial Debugging as App Starts
+
+.code[```
+Activating virtualenv at .venv
+DEBUG:galaxy.app:python path is: /home/john/workspace/galaxy-clean/scripts,
+/home/john/workspace/galaxy-clean/lib, /home/john/workspace/galaxy-clean/.venv/lib/python2.7,/home/john/workspace/galaxy-clean/.venv/lib/python2.7/plat-x86_64-linux-gnu,
+/home/john/workspace/galaxy-clean/.venv/lib/python2.7/lib-tk, /home/john/workspace/galaxy-clean/.venv/lib/python2.7/lib-old, /home/john/workspace/galaxy-clean/.venv/lib/python2.7/lib-dynload,
+/usr/lib/python2.7, /usr/lib/python2.7/plat-x86_64-linux-gnu, /usr/lib/python2.7/lib-tk, /home/john/workspace/galaxy-clean/.venv/local/lib/python2.7/site-packages
+INFO:galaxy.config:Logging at '10' level to 'stdout'
+galaxy.queue_worker INFO 2016-06-23 19:11:51,925 Initializing main Galaxy Queue Worker on sqlalchemy+sqlite:///./database/control.sqlite?isolation_level=IMMEDIATE
+tool_shed.tool_shed_registry DEBUG 2016-06-23 19:11:51,951 Loading references to tool sheds from ./config/tool_sheds_conf.xml.sample
+tool_shed.tool_shed_registry DEBUG 2016-06-23 19:11:51,951 Loaded reference to tool shed: Galaxy Main Tool Shed
+galaxy.app DEBUG 2016-06-23 19:11:51,956 Using "galaxy.ini" config file:
+/home/john/workspace/galaxy-clean/config/galaxy.ini.sample
+```]
+
+---
+
+class: normal
+
+### Database Migrations
+
+.code[```
+migrate.versioning.repository DEBUG 2016-06-23 19:11:51,993 Loading repository lib/galaxy/model/migrate...
+migrate.versioning.script.base DEBUG 2016-06-23 19:11:51,994 Loading script lib/galaxy/model/migrate/versions/0001_initial_tables.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:11:51,994 Script lib/galaxy/model/migrate/versions/0001_initial_tables.py loaded successfully
+migrate.versioning.script.base DEBUG 2016-06-23 19:11:51,994 Loading script lib/galaxy/model/migrate/versions/0002_metadata_file_table.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:11:52,009 Loading script lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py...
+...
+galaxy.model.migrate.check INFO 2016-06-23 19:13:32,812 Migrating 128 -> 129... 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:33,436 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:33,437 Migration script to allow invalidation of job external output metadata temp files
+galaxy.model.migrate.check INFO 2016-06-23 19:13:33,437 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:33,437 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:33,437 Migrating 129 -> 130... 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:34,325 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:34,325 Migration script to change the value column of user_preference from varchar to text.
+galaxy.model.migrate.check INFO 2016-06-23 19:13:34,325 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:34,325 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:34,326 Migrating 130 -> 131... 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:35,633 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:35,633 Migration script to support subworkflows and workflow request input parameters
+galaxy.model.migrate.check INFO 2016-06-23 19:13:35,633 
+galaxy.model.migrate.check INFO 2016-06-23 19:13:35,633 
+```]
+
+---
+
+Everything after here happens every time
+
+---
+
+class: smaller
+
+.code[```
+migrate.versioning.repository DEBUG 2016-06-23 19:13:35,635 Loading repository lib/tool_shed/galaxy_install/migrate...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,635 Loading script lib/tool_shed/galaxy_install/migrate/versions/0001_tools.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Script lib/tool_shed/galaxy_install/migrate/versions/0001_tools.py loaded successfully
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Loading script lib/tool_shed/galaxy_install/migrate/versions/0002_tools.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Script lib/tool_shed/galaxy_install/migrate/versions/0002_tools.py loaded successfully
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Loading script lib/tool_shed/galaxy_install/migrate/versions/0003_tools.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Script lib/tool_shed/galaxy_install/migrate/versions/0003_tools.py loaded successfully
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Loading script lib/tool_shed/galaxy_install/migrate/versions/0004_tools.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Script lib/tool_shed/galaxy_install/migrate/versions/0004_tools.py loaded successfully
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Loading script lib/tool_shed/galaxy_install/migrate/versions/0005_tools.py...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,636 Script lib/tool_shed/galaxy_install/migrate/versions/0005_tools.py loaded successfully
+...
+migrate.versioning.script.base DEBUG 2016-06-23 19:13:35,637 Script lib/tool_shed/galaxy_install/migrate/versions/0012_tools.py loaded successfully
+migrate.versioning.repository DEBUG 2016-06-23 19:13:35,637 Repository lib/tool_shed/galaxy_install/migrate loaded successfully
+tool_shed.galaxy_install.migrate.check DEBUG 2016-06-23 19:13:35,660 The main Galaxy tool shed is not currently available, so skipped tool migration 1 until next server startup
+galaxy.config INFO 2016-06-23 19:13:35,679 Install database targetting Galaxy's database configuration.
+```]
+
+---
+
+class: smaller
+
+.code[```
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,748 Loading datatypes from ./config/datatypes_conf.xml.sample
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,749 Retrieved datatype module galaxy.datatypes.binary:Ab1 from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,750 Retrieved datatype module galaxy.datatypes.assembly:Amos from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,751 Retrieved datatype module galaxy.datatypes.text:Arff from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,751 Retrieved datatype module galaxy.datatypes.data:GenericAsn1 from the datatype registry.
+...
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,822 Retrieved datatype module galaxy.datatypes.mothur:SquareDistanceMatrix from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,822 Retrieved datatype module galaxy.datatypes.mothur:LowerTriangleDistanceMatrix from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,822 Retrieved datatype module galaxy.datatypes.mothur:RefTaxonomy from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,822 Retrieved datatype module galaxy.datatypes.mothur:RefTaxonomy from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:RefTaxonomy from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:ConsensusTaxonomy from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:TaxonomySummary from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:Frequency from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:Quantile from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:Quantile from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:Quantile from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,823 Retrieved datatype module galaxy.datatypes.mothur:Quantile from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Retrieved datatype module galaxy.datatypes.mothur:Axes from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Retrieved datatype module galaxy.datatypes.mothur:SffFlow from the datatype registry.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Retrieved datatype module galaxy.datatypes.mothur:CountTable from the datatype registry.
+```]
+
+---
+
+class: smaller
+
+.code[```
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Loaded sniffer for datatype 'galaxy.datatypes.mothur:Sabund'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Loaded sniffer for datatype 'galaxy.datatypes.mothur:Otu'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Loaded sniffer for datatype 'galaxy.datatypes.mothur:GroupAbund'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Loaded sniffer for datatype 'galaxy.datatypes.mothur:SecondaryStructureMap'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Loaded sniffer for datatype 'galaxy.datatypes.mothur:LowerTriangleDistanceMatrix'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,824 Loaded sniffer for datatype 'galaxy.datatypes.mothur:SquareDistanceMatrix'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:PairwiseDistanceMatrix'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:Oligos'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:Quantile'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:Frequency'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:LaneMask'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:RefTaxonomy'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.mothur:Axes'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.constructive_solid_geometry:PlyAscii'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.constructive_solid_geometry:PlyBinary'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.constructive_solid_geometry:VtkAscii'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.constructive_solid_geometry:VtkBinary'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.interval:ScIdx'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,825 Loaded sniffer for datatype 'galaxy.datatypes.tabular:Vcf'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,826 Loaded sniffer for datatype 'galaxy.datatypes.binary:TwoBit'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,826 Loaded sniffer for datatype 'galaxy.datatypes.binary:GeminiSQLite'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,826 Loaded sniffer for datatype 'galaxy.datatypes.binary:MzSQlite'
+...
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,832 Loaded sniffer for datatype 'galaxy.datatypes.images:Psd'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,832 Loaded sniffer for datatype 'galaxy.datatypes.images:Xbm'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,832 Loaded sniffer for datatype 'galaxy.datatypes.images:Rgb'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,832 Loaded sniffer for datatype 'galaxy.datatypes.images:Pbm'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,832 Loaded sniffer for datatype 'galaxy.datatypes.images:Pgm'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,832 Loaded sniffer for datatype 'galaxy.datatypes.images:Xpm'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.images:Eps'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.images:Rast'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.binary:OxliCountGraph'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.binary:OxliNodeGraph'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.binary:OxliTagSet'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.binary:OxliStopTags'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.binary:OxliSubset'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded sniffer for datatype 'galaxy.datatypes.binary:OxliGraphLabels'
+```
+]
+
+---
+
+class: smaller
+
+.code[```
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,833 Loaded build site 'ucsc': tool-data/shared/ucsc/ucsc_build_sites.txt with display sites: main,test,archaea,ucla
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,834 Loaded build site 'gbrowse': tool-data/shared/gbrowse/gbrowse_build_sites.txt with display sites: modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,834 Loaded build site 'ensembl': tool-data/shared/ensembl/ensembl_sites.txt
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,834 Loaded build site 'ensembl_data_url': tool-data/shared/ensembl/ensembl_sites_data_URL.txt
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,834 Loaded build site 'igv': tool-data/shared/igv/igv_build_sites.txt
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:35,834 Loaded build site 'rviewer': tool-data/shared/rviewer/rviewer_build_sites.txt
+```]
+
+---
+
+class: smaller
+
+.code[```
+galaxy.tools.data INFO 2016-06-23 19:13:35,871 Could not find tool data tool-data/all_fasta.loc, reading sample
+galaxy.tools.data DEBUG 2016-06-23 19:13:35,871 Loaded tool data table 'all_fasta'
+galaxy.tools.data INFO 2016-06-23 19:13:35,871 Could not find tool data tool-data/bfast_indexes.loc, reading sample
+galaxy.tools.data DEBUG 2016-06-23 19:13:35,871 Loaded tool data table 'bfast_indexes'
+galaxy.tools.data WARNING 2016-06-23 19:13:35,871 Cannot find index file 'tool-data/blastdb_p.loc' for tool data table 'blastdb_p'
+...
+galaxy.tools.data DEBUG 2016-06-23 19:13:36,210 Loaded tool data table 'vcf_iobio'
+galaxy.tools.data INFO 2016-06-23 19:13:36,211 Could not find tool data tool-data/biom_simple_display.loc, reading sample
+galaxy.tools.data DEBUG 2016-06-23 19:13:36,211 Loaded tool data table 'biom_simple_display'
+
+```]
+
+---
+
+class: normal
+
+### Job Configuration, Citation Cache
+
+.code[```
+galaxy.jobs DEBUG 2016-06-23 19:13:36,233 Loading job configuration from /home/john/workspace/galaxy-clean/config/galaxy.ini.sample
+galaxy.jobs DEBUG 2016-06-23 19:13:36,233 Done loading job configuration
+beaker.container DEBUG 2016-06-23 19:13:36,278 data file ./database/citations/data/container_file/4/48/48e563f148dc04d8b31c94878c138019862e580d.cache
+```]
+
+---
+
+class: smaller
+
+### Load Toolbox
+
+.code[```
+galaxy.tools.toolbox.base INFO 2016-06-23 19:13:36,279 Parsing the tool configuration ./config/tool_conf.xml.sample
+galaxy.tools.toolbox.base DEBUG 2016-06-23 19:13:36,291 Loaded tool id: upload1, version: 1.1.4 into tool panel..
+galaxy.tools.toolbox.base DEBUG 2016-06-23 19:13:36,294 Loaded tool id: ucsc_table_direct1, version: 1.0.0 into tool panel..
+galaxy.tools.toolbox.base DEBUG 2016-06-23 19:13:36,296 Loaded tool id: ucsc_table_direct_test1, version: 1.0.0 into tool panel..
+galaxy.tools.toolbox.base DEBUG 2016-06-23 19:13:36,298 Loaded tool id: ucsc_table_direct_archaea1, version: 1.0.0 into tool panel..
+...
+galaxy.tools.toolbox.base DEBUG 2016-06-23 19:13:36,496 Loaded tool id: vcf_to_maf_customtrack1, version: 1.0.0 into tool panel..
+galaxy.tools.toolbox.base INFO 2016-06-23 19:13:36,497 Parsing the tool configuration ./config/shed_tool_conf.xml
+galaxy.tools.toolbox.base INFO 2016-06-23 19:13:36,497 Parsing the tool configuration ./config/migrated_tools_conf.xml
+```]
+
+
+---
+
+class: smaller
+
+### Tool Dependency Resolution and Indexing 
+
+.code[```
+galaxy.tools.deps WARNING 2016-06-23 19:13:36,498 Path './database/dependencies' does not exist, ignoring
+galaxy.tools.deps WARNING 2016-06-23 19:13:36,498 Path './database/dependencies' is not directory, ignoring
+galaxy.tools.deps DEBUG 2016-06-23 19:13:36,503 Unable to find config file './dependency_resolvers_conf.xml'
+galaxy.tools.search DEBUG 2016-06-23 19:13:36,560 Starting to build toolbox index.
+galaxy.tools.search DEBUG 2016-06-23 19:13:37,789 Toolbox index finished. It took: 0:00:01.229406
+```]
+
+---
+
+class: smaller
+
+### Display Applications
+
+.code[```
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:37,795 Loaded display application 'ucsc_bam' for datatype 'bam', inherit=False.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:37,797 Loaded display application 'ensembl_bam' for datatype 'bam', inherit=False.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:37,833 Loaded display application 'igv_bam' for datatype 'bam', inherit=False.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:37,834 Loaded display application 'igb_bam' for datatype 'bam', inherit=False.
+...
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,002 Loaded display application 'igv_vcf' for datatype 'vcf_bgzip', inherit=False.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,003 Loaded display application 'biom_simple' for datatype 'biom1', inherit=False.
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,003 Adding inherited display application 'ensembl_gff' to datatype 'gtf'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,004 Adding inherited display application 'igv_gff' to datatype 'gtf'
+...
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,007 Adding inherited display application 'gbrowse_interval_as_bed' to datatype 'bed6'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,007 Adding inherited display application 'rviewer_interval' to datatype 'bed6'
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,007 Adding inherited display application 'igv_interval_as_bed' to datatype 'bed6'
+```]
+
+---
+
+class: smaller
+
+### Datatype Converters
+
+.code[```
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,010 Loaded converter: CONVERTER_Bam_Bai_0
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,011 Loaded converter: CONVERTER_bam_to_bigwig_0
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,012 Loaded converter: CONVERTER_bed_to_gff_0
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,012 Loaded converter: CONVERTER_bed_to_bgzip_0
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,013 Loaded converter: CONVERTER_bed_to_tabix_0
+...
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,096 Loaded converter: CONVERTER_cml_to_inchi
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,097 Loaded converter: CONVERTER_cml_to_sdf
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,098 Loaded converter: CONVERTER_cml_to_mol2
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,099 Loaded converter: CONVERTER_ref_to_seq_taxomony
+```]
+
+---
+
+class: normal
+
+### Special Tools
+
+.code[```
+galaxy.datatypes.registry DEBUG 2016-06-23 19:13:38,101 Loaded external metadata tool: __SET_METADATA__
+galaxy.tools.special_tools DEBUG 2016-06-23 19:13:38,107 Loaded history import tool: __IMPORT_HISTORY__
+galaxy.tools.special_tools DEBUG 2016-06-23 19:13:38,108 Loaded history export tool: __EXPORT_HISTORY__
+```]
+
+---
+
+class: normal
+
+### Vizualization Plugins
+
+.code[```
+galaxy.web.base.pluginframework INFO 2016-06-23 19:13:38,109 VisualizationsRegistry, loaded plugin: charts
+galaxy.visualization.plugins.config_parser INFO 2016-06-23 19:13:38,110 Visualizations plugin disabled: Circster. Skipping...
+galaxy.visualization.plugins.config_parser INFO 2016-06-23 19:13:38,111 template syntax is deprecated: use entry_point instead
+galaxy.web.base.pluginframework INFO 2016-06-23 19:13:38,111 VisualizationsRegistry, loaded plugin: csg
+galaxy.visualization.plugins.config_parser INFO 2016-06-23 19:13:38,111 template syntax is deprecated: use entry_point instead
+galaxy.web.base.pluginframework INFO 2016-06-23 19:13:38,112 VisualizationsRegistry, loaded plugin: graphviz
+galaxy.web.base.pluginframework INFO 2016-06-23 19:13:38,112 VisualizationsRegistry, loaded plugin: phyloviz
+galaxy.web.base.pluginframework INFO 2016-06-23 19:13:38,113 VisualizationsRegistry, loaded plugin: scatterplot
+galaxy.visualization.plugins.config_parser INFO 2016-06-23 19:13:38,113 Visualizations plugin disabled: Sweepster. Skipping...
+galaxy.web.base.pluginframework INFO 2016-06-23 19:13:38,114 VisualizationsRegistry, loaded plugin: trackster
+```]
+
+---
+
+class: normal
+
+### Tours
+
+.code[```
+galaxy.tours INFO 2016-06-23 19:13:38,125 Loaded tour 'core.scratchbook'
+galaxy.tours INFO 2016-06-23 19:13:38,158 Loaded tour 'core.galaxy_ui'
+galaxy.tours INFO 2016-06-23 19:13:38,183 Loaded tour 'core.history'
+```]
+
+---
+
+class: normal
+
+### Job Handler and Runners
+
+.code[```
+galaxy.jobs.manager DEBUG 2016-06-23 19:13:38,196 Starting job handler
+galaxy.jobs INFO 2016-06-23 19:13:38,196 Handler 'main' will load all configured runner plugins
+galaxy.jobs.runners.state_handler_factory DEBUG 2016-06-23 19:13:38,198 Loaded 'failure' state handler from module galaxy.jobs.runners.state_handlers.resubmit
+galaxy.jobs.runners DEBUG 2016-06-23 19:13:38,198 Starting 5 LocalRunner workers
+galaxy.jobs DEBUG 2016-06-23 19:13:38,200 Loaded job runner 'galaxy.jobs.runners.local:LocalJobRunner' as 'local'
+galaxy.jobs DEBUG 2016-06-23 19:13:38,200 Legacy destination with id 'local:///', url 'local:///' converted, got params:
+galaxy.jobs.handler DEBUG 2016-06-23 19:13:38,200 Loaded job runners plugins: local
+galaxy.jobs.handler INFO 2016-06-23 19:13:38,200 job handler stop queue started
+galaxy.jobs.handler INFO 2016-06-23 19:13:38,222 job handler queue started
+```]
+
+---
+
+class: normal
+
+### Ignore this...
+
+.code[```
+galaxy.sample_tracking.external_service_types DEBUG 2016-06-23 19:13:38,229 Loaded external_service_type: Simple unknown sequencer 1.0.0
+galaxy.sample_tracking.external_service_types DEBUG 2016-06-23 19:13:38,230 Loaded external_service_type: Applied Biosystems SOLiD 1.0.0
+```]
+
+---
+
+class: normal
+
+### Workflow Scheduler
+
+.code[```
+galaxy.workflow.scheduling_manager DEBUG 2016-06-23 19:13:38,254 Starting workflow schedulers
+```]
+
+---
+
+class: normal
+
+### Controllers
+
+.code[```
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,347 Enabling 'external_service' controller, class: ExternalService
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,347 Enabling 'requests_common' controller, class: RequestsCommon
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,375 Enabling 'library_common' controller, class: LibraryCommon
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,390 Enabling 'visualization' controller, class: VisualizationController
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,398 Enabling 'page' controller, class: PageController
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,400 Enabling 'async' controller, class: ASync
+...
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,728 Enabling 'history_content_tags' API controller, class: HistoryContentTagsController
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,728 Enabling 'history_tags' API controller, class: HistoryTagsController
+galaxy.web.framework.base DEBUG 2016-06-23 19:13:38,728 Enabling 'workflow_tags' API controller, class: WorkflowTagsController
+```]
+
+---
+
+class: normal
+
+### Middleware
+
+.code[```
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,036 Enabling 'httpexceptions' middleware
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,037 Enabling 'recursive' middleware
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,042 Enabling 'error' middleware
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,043 Enabling 'trans logger' middleware
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,044 Enabling 'x-forwarded-host' middleware
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,044 Enabling 'Request ID' middleware
+```]
+
+---
+
+class: normal
+
+### Static Paths for Viz
+
+.code[```
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,048 added url, path to static middleware: /plugins/visualizations/charts/static, ./config/plugins/visualizations/charts/static
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,048 added url, path to static middleware: /plugins/visualizations/csg/static, ./config/plugins/visualizations/csg/static
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,049 added url, path to static middleware: /plugins/visualizations/graphviz/static, ./config/plugins/visualizations/graphviz/static
+galaxy.webapps.galaxy.buildapp DEBUG 2016-06-23 19:13:39,049 added url, path to static middleware: /plugins/visualizations/scatterplot/static, ./config/plugins/visualizations/scatterplot/static
+```]
+
+---
+
+### It is Up!
+
+.code[```
+galaxy.queue_worker INFO 2016-06-23 19:13:39,049 Binding and starting galaxy control worker for main
+Starting server in PID 21102.
+serving on http://127.0.0.1:8080
+```]
+
+---
+
+## Production Galaxy - usegalaxy.org
+
+---
+
+class: centered
+
+.pull-left[
+#### Default
+
+SQLite
+
+Paste#http
+
+Single process
+
+Single host
+
+Local jobs
+
+]
+
+.pull-right[
+#### Production
+
+PostgreSQL
+
+uWSGI / nginx
+
+Multiple processes
+
+Multiple hosts
+
+Jobs across many clusters
+]
+
+*http://usegalaxy.org/production*
+
+---
+
+### postgres
+
+- Database server can scale way beyond default sqlite
+- https://www.postgresql.org/
+- github.com/galaxyproject/usegalaxy-playbook -> `roles/galaxyprojectdotorg.postgresql`
+
+---
+
+### nginx (or Apache)
+
+- Optimized servers for static content
+- https://www.nginx.com/resources/wiki/
+- github.com/galaxyproject/usegalaxy-playbook -> `templates/nginx/usegalaxy.j2`
+
+---
+
+### Multi-processes
+
+Threads in Python are limited by the [GIL](https://wiki.python.org/moin/GlobalInterpreterLock).
+
+Running multiple processes of Galaxy and seperate processes for web handling
+and job processing works around this.
+
+This used to be an important detail - but uWSGI makes things a lot easier.
+
+---
+
+![Cluster Support](images/cluster_support.svg)
+
+### Cluster Support
+
+---
+
+![usegalaxy.org web servers](images/usegalaxy_webservers.svg)
+
+---
+
+![usegalaxy.org servers](images/usegalaxyorg.svg)
+
+---
+
+## Q & A
+
+---
diff --git a/doc/source/slideshow/architecture/images/backbone-model-view.svg b/doc/source/slideshow/architecture/images/backbone-model-view.svg
new file mode 100644
index 0000000..38aa264
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/backbone-model-view.svg
@@ -0,0 +1,152 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Generator: Adobe Illustrator 14.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 43363)  -->
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
+	 width="890.447px" height="167.398px" viewBox="0 0 890.447 167.398" enable-background="new 0 0 890.447 167.398"
+	 xml:space="preserve">
+<path fill="#5C8BEE" d="M108.421,75.665h80.791v-10h-80.791c-9.799,0-17.771,7.971-17.771,17.771
+	c0,9.797,7.972,17.771,17.771,17.771h39.413v5.438l24.834-10.439l-24.834-10.438v5.438h-39.413c-4.285,0-7.771-3.479-7.771-7.771
+	C100.65,79.151,104.136,75.665,108.421,75.665z"/>
+<polygon fill="#5C8BEE" points="328.457,76.665 632.869,76.665 632.869,66.665 328.457,66.665 328.457,61.222 303.625,71.665 
+	328.457,82.106 "/>
+<rect x="478.624" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="503.624" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="578.623" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<polygon fill="#5C8BEE" points="603.037,88.222 603.037,93.665 602.623,93.665 602.623,103.664 603.037,103.664 603.037,109.105 
+	627.869,98.664 "/>
+<rect x="553.623" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="453.625" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="528.624" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="428.625" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="403.625" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<rect x="378.625" y="93.664" fill="#5C8BEE" width="15" height="10"/>
+<polygon fill="#5C8BEE" points="822.016,67.828 785.604,67.828 785.604,62.385 760.771,72.828 785.604,83.27 785.604,77.828 
+	822.016,77.828 "/>
+<g>
+	<path d="M684.365,113.559l-9.545-53.396h8.439l6.693,41.621l6.75-41.621h8.445l-9.717,53.396H684.365L684.365,113.559z"/>
+</g>
+<g>
+	<path d="M231.792,113.559h-8.055V60.12h11.559l7.506,38.386l7.205-38.386h11.104v53.438h-8.054v-33.58l-6.988,33.58h-6.477
+		l-7.799-33.494L231.792,113.559L231.792,113.559z"/>
+</g>
+<path d="M689.988,146.9c-33.771,0-61.229-27.465-61.229-61.229c0-33.759,27.467-61.225,61.229-61.225
+	c33.762,0,61.227,27.465,61.227,61.225C751.215,119.438,723.748,146.9,689.988,146.9L689.988,146.9z M689.988,32.452
+	c-29.354,0-53.229,23.876-53.229,53.224c0,29.354,23.877,53.229,53.229,53.229c29.35,0,53.227-23.877,53.227-53.229
+	C743.215,56.329,719.338,32.452,689.988,32.452L689.988,32.452z"/>
+<path d="M242.424,146.9c-33.76,0-61.225-27.465-61.225-61.229c0-33.759,27.465-61.225,61.225-61.225
+	c33.76,0,61.225,27.465,61.225,61.225C303.649,119.438,276.184,146.9,242.424,146.9L242.424,146.9z M242.424,32.452
+	c-29.349,0-53.225,23.876-53.225,53.224c0,29.354,23.876,53.229,53.225,53.229c29.349,0,53.225-23.877,53.225-53.229
+	C295.649,56.329,271.773,32.452,242.424,32.452L242.424,32.452z"/>
+<g>
+	<path d="M38.009,99.439c0,2.115-0.819,3.922-2.458,5.422c-1.639,1.5-3.616,2.25-5.933,2.25h-8.39V75.049h8.391
+		c2.335,0,4.317,0.75,5.947,2.25c1.629,1.5,2.444,3.309,2.444,5.396L38.009,99.439L38.009,99.439z M26.567,102.33h3.221
+		c0.848,0,1.563-0.271,2.147-0.831c0.583-0.554,0.876-1.205,0.876-1.955V82.772c0-0.771-0.297-1.422-0.89-1.979
+		c-0.593-0.546-1.304-0.812-2.133-0.812h-3.221V102.33z"/>
+	<path d="M41.682,107.111V75.049h7.995c2.109,0,4.012,0.503,5.707,1.518c1.45,0.854,2.486,2.174,3.108,3.963
+		C58.831,81.482,59,82.617,59,83.923c0,2.146-0.631,3.869-1.893,5.165c-0.528,0.545-1.149,0.963-1.865,1.252
+		c1.187,0.408,2.193,1.229,3.023,2.438c0.545,0.812,0.951,1.875,1.215,3.17c0.131,0.666,0.197,1.396,0.197,2.227
+		c0,2.027-0.424,3.75-1.271,5.165c-0.66,1.106-1.593,1.995-2.797,2.659c-1.356,0.75-2.703,1.125-4.04,1.125L41.682,107.111
+		L41.682,107.111L41.682,107.111z M47.445,88.139h2.232c1.657,0,2.769-0.73,3.333-2.199c0.207-0.545,0.311-1.219,0.311-2.021
+		c0-1.33-0.358-2.335-1.074-3.018c-0.66-0.631-1.516-0.946-2.571-0.946h-2.232L47.445,88.139L47.445,88.139z M47.445,102.33h2.232
+		c1.808,0,3.061-0.844,3.757-2.53c0.245-0.569,0.367-1.233,0.367-1.983c0-1.791-0.377-3.104-1.13-3.912
+		c-0.678-0.75-1.677-1.125-2.995-1.125h-2.232L47.445,102.33L47.445,102.33z"/>
+</g>
+<path fill="#231F20" d="M39.585,40.734C18.575,40.734,5,46.154,5,54.541v49.711c0,7.872,13.223,16.368,34.585,16.368
+	s34.584-8.496,34.584-16.368V54.541C74.169,46.154,60.596,40.734,39.585,40.734z M39.591,46.734c20.195,0,28.578,5.084,28.578,7.807
+	c0,2.726-8.384,7.812-28.584,7.812S11,57.265,11,54.541C11,51.819,19.385,46.734,39.591,46.734z M39.585,114.62
+	C21.089,114.62,11,107.77,11,104.252V62.95c5.911,3.42,15.886,5.403,28.585,5.403c12.698,0,22.673-1.983,28.584-5.403v41.302
+	C68.169,107.77,58.08,114.62,39.585,114.62z"/>
+<path fill="#5C8BEE" d="M303.081,115.44l8.812,4.725c3.835-8.158,6.291-17.09,7.024-26.5H308.89
+	C308.203,101.376,306.181,108.706,303.081,115.44z"/>
+<path fill="#5C8BEE" d="M322.146,125.657l11.023,5.908c5.629-11.577,9.105-24.383,9.909-37.9h-12.524
+	C329.788,105.052,326.844,115.852,322.146,125.657z"/>
+<path fill="#5C8BEE" d="M343.369,137.03l13.229,7.088c7.619-15.333,12.24-32.406,13.081-50.453h-15.016
+	C353.841,109.154,349.863,123.818,343.369,137.03z"/>
+<path d="M864.363,86.665c0,0,8.65-9.795,8.65-27.766c0-10.746-8.844-19.493-19.488-19.493c-10.844,0-19.496,8.844-19.496,19.493
+	c0,17.971,8.652,27.766,8.652,27.766c0,13.501-21.68,9.318-21.68,24.342v4.374c0,1.236,0.949,2.188,2.188,2.188h60.76
+	c1.235,0,2.186-0.952,2.186-2.188v-4.374C886.041,95.793,864.363,100.166,864.363,86.665z"/>
+<g>
+	<path fill="#BFBFBF" d="M123.738,44.126c0.02,1.304-0.318,2.416-1.014,3.341c-0.458,0.626-1.104,1.069-1.939,1.328
+		c-0.447,0.14-0.969,0.208-1.566,0.208c-1.104,0-2.019-0.272-2.744-0.819c-0.607-0.447-1.086-1.057-1.439-1.827
+		c-0.353-0.771-0.559-1.653-0.619-2.647l2.685-0.193c0.119,1.09,0.407,1.879,0.865,2.368c0.338,0.37,0.726,0.546,1.164,0.525
+		c0.616-0.02,1.108-0.323,1.477-0.911c0.188-0.289,0.283-0.701,0.283-1.239c0-0.775-0.353-1.548-1.059-2.313
+		c-0.557-0.527-1.392-1.318-2.505-2.374c-0.935-0.906-1.596-1.717-1.984-2.434c-0.417-0.807-0.626-1.683-0.626-2.628
+		c0-1.701,0.572-2.99,1.715-3.866c0.706-0.527,1.581-0.791,2.625-0.791c1.004,0,1.864,0.224,2.58,0.671
+		c0.557,0.348,1.007,0.835,1.35,1.462c0.343,0.626,0.549,1.347,0.619,2.163l-2.699,0.492c-0.08-0.767-0.298-1.362-0.656-1.79
+		c-0.259-0.309-0.632-0.462-1.119-0.462c-0.517,0-0.91,0.229-1.178,0.686c-0.219,0.368-0.328,0.825-0.328,1.372
+		c0,0.854,0.368,1.725,1.104,2.61c0.278,0.338,0.696,0.735,1.253,1.192c0.656,0.547,1.089,0.931,1.297,1.148
+		c0.696,0.695,1.233,1.382,1.611,2.058c0.179,0.318,0.323,0.612,0.433,0.88C123.589,43.002,123.729,43.599,123.738,44.126z"/>
+	<path fill="#BFBFBF" d="M128.481,40.949l-3.773-10.873h3.103l2.088,6.711l2.073-6.711h3.117L131.3,40.949v7.83h-2.819V40.949z"/>
+	<path fill="#BFBFBF" d="M140.505,30.076l3.741,12.57v-12.57h2.819v18.703h-3.028l-3.877-11.978v11.978h-2.819V30.076H140.505z"/>
+	<path fill="#BFBFBF" d="M154.015,49.018c-1.243,0-2.299-0.436-3.169-1.306c-0.87-0.87-1.305-1.921-1.305-3.153V34.357
+		c0-1.243,0.438-2.299,1.312-3.169s1.929-1.306,3.162-1.306c1.243,0,2.297,0.438,3.162,1.312c0.865,0.875,1.297,1.929,1.297,3.162
+		v2.133h-2.923v-2.193c0-0.446-0.159-0.83-0.477-1.147c-0.318-0.318-0.701-0.478-1.148-0.478c-0.448,0-0.828,0.159-1.141,0.478
+		c-0.313,0.317-0.47,0.701-0.47,1.147v10.232c0,0.447,0.157,0.828,0.47,1.141c0.313,0.313,0.693,0.471,1.141,0.471
+		c0.447,0,0.83-0.156,1.148-0.471c0.318-0.312,0.477-0.692,0.477-1.141v-2.581h2.923v2.61c0,1.242-0.438,2.297-1.312,3.161
+		C156.287,48.585,155.238,49.018,154.015,49.018z"/>
+</g>
+<g>
+	<path fill="#BFBFBF" d="M593.451,44.304c0,1.232-0.433,2.286-1.297,3.161c-0.865,0.875-1.909,1.312-3.133,1.312h-4.431V30.076
+		h4.431c1.232,0,2.279,0.438,3.141,1.312c0.859,0.875,1.289,1.924,1.289,3.146V44.304z M587.412,45.99h1.699
+		c0.446,0,0.825-0.162,1.134-0.485c0.308-0.322,0.462-0.703,0.462-1.141V34.58c0-0.448-0.156-0.83-0.469-1.147
+		c-0.314-0.318-0.689-0.479-1.127-0.479h-1.699V45.99L587.412,45.99z"/>
+	<path fill="#BFBFBF" d="M599.01,44.484l-0.684,4.295h-2.937l3.178-18.688h3.878l3.132,18.688h-2.964l-0.66-4.295H599.01z
+		 M600.491,34.371l-1.044,7.368h2.088L600.491,34.371z"/>
+	<path fill="#BFBFBF" d="M608.307,32.925h-2.998v-2.833h8.801v2.833h-2.983v15.854h-2.818L608.307,32.925L608.307,32.925z"/>
+	<path fill="#BFBFBF" d="M617.43,44.484l-0.684,4.295h-2.937l3.178-18.688h3.878l3.132,18.688h-2.964l-0.66-4.295H617.43z
+		 M618.911,34.371l-1.044,7.368h2.088L618.911,34.371z"/>
+</g>
+<g>
+	<path fill="#BFBFBF" d="M768.561,48.779h-2.819V30.091h2.819V48.779z"/>
+	<path fill="#BFBFBF" d="M774.245,30.076l3.741,12.57v-12.57h2.817v18.703h-3.026l-3.878-11.978v11.978h-2.819V30.076H774.245z"/>
+	<path fill="#BFBFBF" d="M787.77,30.091c1.372,0,2.467,0.433,3.281,1.298c0.756,0.824,1.133,1.879,1.133,3.161v2.715
+		c0,1.232-0.43,2.286-1.289,3.161s-1.901,1.312-3.125,1.312h-1.625v7.04h-2.819V30.091H787.77z M789.455,34.594
+		c0-0.486-0.146-0.88-0.439-1.178c-0.294-0.298-0.685-0.447-1.172-0.447h-1.699v5.981h1.699c0.447,0,0.828-0.159,1.142-0.478
+		s0.471-0.701,0.471-1.148L789.455,34.594L789.455,34.594z"/>
+	<path fill="#BFBFBF" d="M798.881,48.988c-1.243,0-2.297-0.433-3.162-1.298s-1.298-1.914-1.298-3.147V30.091h2.759v14.423
+		c0,0.446,0.16,0.827,0.479,1.141c0.316,0.312,0.7,0.47,1.147,0.47s0.827-0.157,1.142-0.47c0.312-0.313,0.47-0.694,0.47-1.141
+		V30.091h2.938v14.452c0,1.253-0.438,2.308-1.312,3.162C801.17,48.56,800.113,48.988,798.881,48.988z"/>
+	<path fill="#BFBFBF" d="M808.604,32.925h-2.997v-2.833h8.8v2.833h-2.982v15.854h-2.818V32.925H808.604z"/>
+</g>
+<g>
+	<path fill="#BFBFBF" d="M384.73,140.802c-1.243,0-2.3-0.435-3.17-1.305c-0.869-0.87-1.305-1.922-1.305-3.154v-10.2
+		c0-1.244,0.438-2.301,1.312-3.17c0.874-0.87,1.929-1.306,3.162-1.306c1.242,0,2.297,0.438,3.161,1.312
+		c0.865,0.875,1.298,1.929,1.298,3.162v2.133h-2.924v-2.193c0-0.447-0.158-0.83-0.477-1.147c-0.318-0.318-0.701-0.478-1.148-0.478
+		s-0.828,0.158-1.141,0.478c-0.312,0.317-0.47,0.7-0.47,1.147v10.232c0,0.446,0.157,0.827,0.47,1.141
+		c0.312,0.312,0.693,0.469,1.141,0.469s0.83-0.155,1.148-0.469c0.318-0.312,0.477-0.693,0.477-1.141v-2.581h2.924v2.609
+		c0,1.242-0.438,2.298-1.312,3.162C387.003,140.369,385.954,140.802,384.73,140.802z"/>
+	<path fill="#BFBFBF" d="M394.32,132.644v7.92h-2.819v-18.688h2.819v7.935h3.399v-7.935h2.819v18.688h-2.819v-7.92H394.32z"/>
+	<path fill="#BFBFBF" d="M406.516,136.269l-0.685,4.295h-2.937l3.178-18.688h3.877l3.132,18.688h-2.964l-0.66-4.295H406.516z
+		 M407.997,126.156l-1.044,7.369h2.088L407.997,126.156z"/>
+	<path fill="#BFBFBF" d="M418.619,121.861l3.741,12.57v-12.57h2.818v18.703h-3.027l-3.877-11.978v11.978h-2.819v-18.703H418.619
+		L418.619,121.861z"/>
+	<path fill="#BFBFBF" d="M432.128,140.802c-1.243,0-2.299-0.438-3.169-1.312s-1.305-1.924-1.305-3.146v-10.2
+		c0-1.244,0.438-2.301,1.312-3.17c0.874-0.87,1.929-1.306,3.162-1.306c1.243,0,2.297,0.438,3.162,1.312
+		c0.865,0.875,1.297,1.929,1.297,3.162v2.133h-2.923v-2.193c0-0.447-0.159-0.83-0.478-1.147c-0.317-0.318-0.7-0.478-1.147-0.478
+		c-0.448,0-0.828,0.158-1.141,0.478c-0.313,0.317-0.471,0.7-0.471,1.147v10.232c0,0.446,0.157,0.827,0.471,1.141
+		c0.312,0.312,0.692,0.469,1.141,0.469c0.447,0,0.83-0.154,1.147-0.468c0.318-0.312,0.478-0.69,0.478-1.138v-3.674h-1.566v-2.834
+		h4.489v6.532c0,1.242-0.438,2.298-1.312,3.162C434.401,140.37,433.352,140.802,432.128,140.802z"/>
+	<path fill="#BFBFBF" d="M439.033,140.564v-18.703h8.023v2.834h-5.205v5.101h3.804v2.834h-3.804v5.102h5.205v2.834L439.033,140.564
+		L439.033,140.564z"/>
+</g>
+<g>
+	<path fill="#BFBFBF" d="M563.609,123.877c1.569,0,2.724,0.433,3.46,1.297c0.646,0.757,0.97,1.811,0.97,3.162v2.715
+		c0,1.322-0.503,2.44-1.507,3.355l2.088,8.158h-3.048l-1.71-7.039c-0.079,0-0.164,0-0.253,0h-1.626v7.039h-2.819v-18.688
+		L563.609,123.877L563.609,123.877z M565.295,128.381c0-1.084-0.537-1.626-1.611-1.626h-1.699v5.981h1.699
+		c0.447,0,0.828-0.16,1.142-0.479c0.312-0.316,0.471-0.7,0.471-1.147L565.295,128.381L565.295,128.381z"/>
+	<path fill="#BFBFBF" d="M571.035,142.564v-18.703h8.024v2.834h-5.204v5.101h3.803v2.834h-3.803v5.102h5.204v2.834L571.035,142.564
+		L571.035,142.564z"/>
+	<path fill="#BFBFBF" d="M584.164,123.861l3.74,12.57v-12.57h2.818v18.703h-3.027l-3.877-11.978v11.978h-2.819v-18.703H584.164
+		L584.164,123.861z"/>
+	<path fill="#BFBFBF" d="M602.104,138.09c0,1.233-0.433,2.287-1.298,3.162c-0.864,0.875-1.908,1.312-3.132,1.312h-4.43v-18.703h4.43
+		c1.232,0,2.279,0.438,3.139,1.312c0.86,0.875,1.291,1.925,1.291,3.146V138.09z M596.062,139.775h1.7
+		c0.447,0,0.824-0.162,1.134-0.484c0.309-0.323,0.463-0.703,0.463-1.141v-9.785c0-0.447-0.157-0.83-0.472-1.148
+		c-0.312-0.317-0.688-0.477-1.125-0.477h-1.7V139.775z"/>
+	<path fill="#BFBFBF" d="M604.578,142.564v-18.703h8.023v2.834h-5.205v5.101h3.805v2.834h-3.805v5.102h5.205v2.834L604.578,142.564
+		L604.578,142.564z"/>
+	<path fill="#BFBFBF" d="M618.986,123.877c1.569,0,2.724,0.433,3.46,1.297c0.646,0.757,0.97,1.811,0.97,3.162v2.715
+		c0,1.322-0.503,2.44-1.507,3.355l2.088,8.158h-3.048l-1.71-7.039c-0.079,0-0.164,0-0.253,0h-1.626v7.039h-2.819v-18.688
+		L618.986,123.877L618.986,123.877z M620.672,128.381c0-1.084-0.537-1.626-1.611-1.626h-1.699v5.981h1.699
+		c0.447,0,0.828-0.16,1.142-0.479c0.312-0.316,0.471-0.7,0.471-1.147L620.672,128.381L620.672,128.381z"/>
+</g>
+</svg>
diff --git a/doc/source/slideshow/architecture/images/cluster_support.svg b/doc/source/slideshow/architecture/images/cluster_support.svg
new file mode 100644
index 0000000..f23d42c
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/cluster_support.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_0_420.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_0_420.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m561 [...]
+
diff --git a/doc/source/slideshow/architecture/images/data_managers.svg b/doc/source/slideshow/architecture/images/data_managers.svg
new file mode 100644
index 0000000..fd473b4
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/data_managers.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_5_124.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_5_124.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m201 [...]
+
diff --git a/doc/source/slideshow/architecture/images/docker-chart.png b/doc/source/slideshow/architecture/images/docker-chart.png
new file mode 100644
index 0000000..12ff77f
Binary files /dev/null and b/doc/source/slideshow/architecture/images/docker-chart.png differ
diff --git a/doc/source/slideshow/architecture/images/family/team.png b/doc/source/slideshow/architecture/images/family/team.png
new file mode 100644
index 0000000..e0a350b
Binary files /dev/null and b/doc/source/slideshow/architecture/images/family/team.png differ
diff --git a/doc/source/slideshow/architecture/images/galaxy_schema.png b/doc/source/slideshow/architecture/images/galaxy_schema.png
new file mode 100644
index 0000000..d38a9fa
Binary files /dev/null and b/doc/source/slideshow/architecture/images/galaxy_schema.png differ
diff --git a/doc/source/slideshow/architecture/images/hda.svg b/doc/source/slideshow/architecture/images/hda.svg
new file mode 100644
index 0000000..a10277a
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/hda.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_1_29.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_1_29.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m372.0 [...]
+
diff --git a/doc/source/slideshow/architecture/images/hda_dataset.plantuml.svg b/doc/source/slideshow/architecture/images/hda_dataset.plantuml.svg
new file mode 100644
index 0000000..5743cbb
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/hda_dataset.plantuml.svg
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="142px" style="width:401px;height:142px;" version="1.1" viewBox="0 0 401 142" width="401px"><defs><filter height="300%" id="f1" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0"  [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/hda_dataset.plantuml.txt b/doc/source/slideshow/architecture/images/hda_dataset.plantuml.txt
new file mode 100644
index 0000000..92d56b5
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/hda_dataset.plantuml.txt
@@ -0,0 +1,23 @@
+ at startuml
+
+!include plantuml_options.txt
+
+class HistoryDatasetAssociation {
+    hid: integer
+    history_id: integer
+    dataset_id: integer
+    state: string
+    name: string
+    info: string
+}
+
+class Dataset {
+    object_store_id: string
+    external_filename: string
+    _extra_files_path: string
+    file_size: integer
+    total_size: integer
+}
+
+HistoryDatasetAssociation "*" -> "1" Dataset
+ at enduml
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/hda_hdca.plantuml.svg b/doc/source/slideshow/architecture/images/hda_hdca.plantuml.svg
new file mode 100644
index 0000000..64d3077
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/hda_hdca.plantuml.svg
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="455px" style="width:502px;height:455px;" version="1.1" viewBox="0 0 502 455" width="502px"><defs><filter height="300%" id="f1" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0"  [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/hda_hdca.plantuml.txt b/doc/source/slideshow/architecture/images/hda_hdca.plantuml.txt
new file mode 100644
index 0000000..facb8bf
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/hda_hdca.plantuml.txt
@@ -0,0 +1,29 @@
+ at startuml
+
+!include plantuml_options.txt
+
+class History
+class HistoryDatasetAssociation {
+    history_content_type = 'dataset'
+    hid
+}
+class HistoryDatasetCollectionAssociation {
+    history_content_type = 'dataset_collection'
+    hid
+}
+class DatasetCollection {
+    collection_type
+}
+class DatasetCollectionElement {
+    element_index
+    element_identifier    
+}
+
+History "1" -- "*" HistoryDatasetAssociation
+History "1" -- "*" HistoryDatasetCollectionAssociation
+HistoryDatasetCollectionAssociation "1" -- "1" DatasetCollection
+DatasetCollection "1" -- "0..1" DatasetCollectionElement
+DatasetCollectionElement "*" -- "1" DatasetCollection
+HistoryDatasetAssociation "1" -- "0..1" DatasetCollectionElement
+
+ at enduml
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/jsload.png b/doc/source/slideshow/architecture/images/jsload.png
new file mode 100644
index 0000000..f84c80d
Binary files /dev/null and b/doc/source/slideshow/architecture/images/jsload.png differ
diff --git a/doc/source/slideshow/architecture/images/libraries.svg b/doc/source/slideshow/architecture/images/libraries.svg
new file mode 100644
index 0000000..b0008c0
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/libraries.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_1_103.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_1_103.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m372 [...]
+
diff --git a/doc/source/slideshow/architecture/images/library_permissions.svg b/doc/source/slideshow/architecture/images/library_permissions.svg
new file mode 100644
index 0000000..85755e2
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/library_permissions.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_2_9.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_2_9.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m372.0 3 [...]
+
diff --git a/doc/source/slideshow/architecture/images/objectstore.plantuml.svg b/doc/source/slideshow/architecture/images/objectstore.plantuml.svg
new file mode 100644
index 0000000..ff5a09c
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/objectstore.plantuml.svg
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="397px" style="width:583px;height:397px;" version="1.1" viewBox="0 0 583 397" width="583px"><defs><filter height="300%" id="f1" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0"  [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/objectstore.plantuml.txt b/doc/source/slideshow/architecture/images/objectstore.plantuml.txt
new file mode 100644
index 0000000..4843842
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/objectstore.plantuml.txt
@@ -0,0 +1,28 @@
+ at startuml
+
+!include plantuml_options.txt
+
+abstract class ObjectStore {
+    exists(obj)
+    file_ready(obj)
+    create(obj)
+    size(obj)
+    delete(obj)
+    get_data(obj)
+    get_filename(obj)
+    update_from_file(obj)
+    get_store_usage_percent()
+}
+
+class DiskObjectStore
+abstract class NestedObjectStore
+
+ObjectStore <|-- DiskObjectStore
+ObjectStore <|-- NestedObjectStore
+ObjectStore <|-- S3ObjectStore
+DiskObjectStore <|-- IRODSObjectStore
+NestedObjectStore <|-- DistributedObjectStore
+NestedObjectStore <|-- HierarchicalObjectStore
+
+
+ at enduml
diff --git a/doc/source/slideshow/architecture/images/objectstore_diagram.svg b/doc/source/slideshow/architecture/images/objectstore_diagram.svg
new file mode 100644
index 0000000..7dd067a
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/objectstore_diagram.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_5_78.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_5_78.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m176.0 [...]
+
diff --git a/doc/source/slideshow/architecture/images/plantuml_options.txt b/doc/source/slideshow/architecture/images/plantuml_options.txt
new file mode 100644
index 0000000..bb8c72e
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/plantuml_options.txt
@@ -0,0 +1,34 @@
+skinparam handwritten true
+' skinparam roundcorner 20
+
+skinparam class {
+    ArrowFontColor #FFEFD5
+    BackgroundColor #FFEFD5
+    ArrowColor Orange
+    BorderColor DarkOrange
+}
+
+skinparam object {
+    ArrowFontColor #FFEFD5
+    BackgroundColor #FFEFD5
+    ArrowColor Orange
+    BorderColor DarkOrange
+}
+
+skinparam note {
+    BackgroundColor #FFEFD5
+    BorderColor #BF5700
+}
+
+skinparam sequence {
+    ArrowColor Orange
+    ArrowFontColor #FFEFD5
+    ActorBorderColor DarkOrange
+    ActorBackgroundColor #FFEFD5
+
+    ParticipantBorderColor DarkOrange
+    ParticipantBackgroundColor #FFEFD5
+
+    LifeLineBorderColor DarkOrange
+    LifeLineBackgroundColor #FFEFD5
+}
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/sequence.json b/doc/source/slideshow/architecture/images/sequence.json
new file mode 100644
index 0000000..406e708
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/sequence.json
@@ -0,0 +1,3 @@
+{
+  "mirrorActors": false
+}
diff --git a/doc/source/slideshow/architecture/images/server_client.plantuml.svg b/doc/source/slideshow/architecture/images/server_client.plantuml.svg
new file mode 100644
index 0000000..3ac4c63
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/server_client.plantuml.svg
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="430px" style="width:605px;height:430px;" version="1.1" viewBox="0 0 605 430" width="605px"><defs><filter height="300%" id="f1" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0"  [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/server_client.plantuml.txt b/doc/source/slideshow/architecture/images/server_client.plantuml.txt
new file mode 100644
index 0000000..eadc622
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/server_client.plantuml.txt
@@ -0,0 +1,20 @@
+ at startuml
+
+!include plantuml_options.txt
+
+note over Browser, Server: HTTP
+
+Browser -> Server: Page Request
+activate Server
+Server -->Browser: Static Content (HTML+JS+CSS)
+deactivate Server
+
+note left of Browser: MVC with \nbackbone.js
+Browser -> Server: API Request (JSON)
+activate Server
+note right of Server: Build JSON response\nin Galaxy "API" controllers
+Server --> Browser: API Response (JSON)
+deactivate Server
+
+note left of Browser: HTML rendered from\nclient-side templates\n(in Backbone views).
+ at enduml
diff --git a/doc/source/slideshow/architecture/images/server_client_old.plantuml.svg b/doc/source/slideshow/architecture/images/server_client_old.plantuml.svg
new file mode 100644
index 0000000..ea5a96f
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/server_client_old.plantuml.svg
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="318px" style="width:647px;height:318px;" version="1.1" viewBox="0 0 647 318" width="647px"><defs><filter height="300%" id="f1" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0"  [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/server_client_old.plantuml.txt b/doc/source/slideshow/architecture/images/server_client_old.plantuml.txt
new file mode 100644
index 0000000..2cc5c1e
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/server_client_old.plantuml.txt
@@ -0,0 +1,14 @@
+ at startuml
+
+!include plantuml_options.txt
+
+note over Browser, Server: HTTP
+
+Browser -> Server: Page Request
+activate Server
+note right of Server: Build HTML fragments\nusing Mako Python library\nin Galaxy "web" controllers
+Server --> Browser: Static Content (HTML+JS+CSS)
+deactivate Server
+note left of Browser: Render HTML fragraments \nwith JavaScript
+
+ at enduml
diff --git a/doc/source/slideshow/architecture/images/sqla_arch_small.png b/doc/source/slideshow/architecture/images/sqla_arch_small.png
new file mode 100644
index 0000000..a1c0958
Binary files /dev/null and b/doc/source/slideshow/architecture/images/sqla_arch_small.png differ
diff --git a/doc/source/slideshow/architecture/images/usegalaxy_webservers.svg b/doc/source/slideshow/architecture/images/usegalaxy_webservers.svg
new file mode 100644
index 0000000..b51720a
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/usegalaxy_webservers.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_0_80.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_0_80.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" d="m79.049866 241.42723l0 0c [...]
+
diff --git a/doc/source/slideshow/architecture/images/usegalaxyorg.svg b/doc/source/slideshow/architecture/images/usegalaxyorg.svg
new file mode 100644
index 0000000..5026e49
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/usegalaxyorg.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="gb64e9d740_0_0.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#gb64e9d740_0_0.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m367.122 [...]
+
diff --git a/doc/source/slideshow/architecture/images/webapp.plantuml.svg b/doc/source/slideshow/architecture/images/webapp.plantuml.svg
new file mode 100644
index 0000000..8c158a2
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/webapp.plantuml.svg
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="195px" style="width:372px;height:195px;" version="1.1" viewBox="0 0 372 195" width="372px"><defs><filter height="300%" id="f1" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0"  [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/images/webapp.plantuml.txt b/doc/source/slideshow/architecture/images/webapp.plantuml.txt
new file mode 100644
index 0000000..1822c93
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/webapp.plantuml.txt
@@ -0,0 +1,25 @@
+ at startuml
+
+!include plantuml_options.txt
+
+object webapp {
+    controllers : dict
+    api_controllers : dict
+    mapper : routes.Mapper
+    handle_request: (environ, start_response) -> ()
+    transaction_factory: (environ) -> GalaxyWebTransaction
+}
+
+object app {
+
+}
+
+object trans {
+
+}
+
+webapp -> "app" app
+app "app" <-- trans
+webapp *-- trans
+
+ at enduml
diff --git a/doc/source/slideshow/architecture/images/what-is-webpack.svg b/doc/source/slideshow/architecture/images/what-is-webpack.svg
new file mode 100644
index 0000000..6576a30
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/what-is-webpack.svg
@@ -0,0 +1,529 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:xlink="http://www.w3.org/1999/xlink"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="779.52753"
+   height="389.76376"
+   id="svg2"
+   version="1.1"
+   inkscape:version="0.48.5 r10040"
+   sodipodi:docname="what-is-webpack.svg"
+   inkscape:export-filename="C:\Users\kop\Repos\node_modules\webpack.github.com\assets\what-is-webpack.png"
+   inkscape:export-xdpi="300"
+   inkscape:export-ydpi="300">
+  <defs
+     id="defs4">
+    <marker
+       inkscape:stockid="Arrow2Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow2Mend"
+       style="overflow:visible">
+      <path
+         id="path3864"
+         style="font-size:12px;fill-rule:evenodd;stroke-width:0.625;stroke-linejoin:round"
+         d="M 8.7185878,4.0337352 -2.2072895,0.01601326 8.7185884,-4.0017078 c -1.7454984,2.3720609 -1.7354408,5.6174519 -6e-7,8.035443 z"
+         transform="scale(-0.6,-0.6)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="DotM"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="DotM"
+       style="overflow:visible">
+      <path
+         id="path3902"
+         d="m -2.5,-1 c 0,2.76 -2.24,5 -5,5 -2.76,0 -5,-2.24 -5,-5 0,-2.76 2.24,-5 5,-5 2.76,0 5,2.24 5,5 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;marker-start:none;marker-end:none"
+         transform="matrix(0.4,0,0,0.4,2.96,0.4)"
+         inkscape:connector-curvature="0" />
+    </marker>
+    <marker
+       inkscape:stockid="Arrow1Mend"
+       orient="auto"
+       refY="0"
+       refX="0"
+       id="Arrow1Mend"
+       style="overflow:visible">
+      <path
+         id="path3846"
+         d="M 0,0 5,-5 -12.5,0 5,5 0,0 z"
+         style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;marker-start:none"
+         transform="matrix(-0.4,0,0,-0.4,-4,0)"
+         inkscape:connector-curvature="0" />
+    </marker>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="1.4"
+     inkscape:cx="935.05791"
+     inkscape:cy="89.116221"
+     inkscape:document-units="px"
+     inkscape:current-layer="g4748"
+     showgrid="true"
+     units="cm"
+     inkscape:window-width="1920"
+     inkscape:window-height="1003"
+     inkscape:window-x="-9"
+     inkscape:window-y="-9"
+     inkscape:window-maximized="1">
+    <inkscape:grid
+       type="xygrid"
+       id="grid2985"
+       empspacing="10"
+       visible="true"
+       enabled="true"
+       snapvisiblegridlinesonly="true"
+       units="cm"
+       spacingx="0.1cm"
+       spacingy="0.1cm" />
+  </sodipodi:namedview>
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Ebene 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(0,-662.59867)">
+    <g
+       id="g3761"
+       transform="translate(-2.7033287e-7,106.29922)">
+      <rect
+         y="591.73254"
+         x="35.433071"
+         height="35.433071"
+         width="35.433071"
+         id="rect2987"
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text3757"
+         y="612.99237"
+         x="53.149605"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="612.99237"
+           x="53.149605"
+           id="tspan3759"
+           sodipodi:role="line">.js</tspan></text>
+    </g>
+    <g
+       id="g3766"
+       transform="translate(70.866141,141.7323)">
+      <rect
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+         id="rect3768"
+         width="35.433071"
+         height="35.433071"
+         x="35.433071"
+         y="591.73254" />
+      <text
+         xml:space="preserve"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         x="53.149605"
+         y="612.99237"
+         id="text3770"
+         sodipodi:linespacing="120%"><tspan
+           sodipodi:role="line"
+           id="tspan3772"
+           x="53.149605"
+           y="612.99237">.js</tspan></text>
+    </g>
+    <g
+       id="g4713"
+       transform="translate(-2.7033287e-7,106.29922)">
+      <rect
+         y="662.59869"
+         x="35.433067"
+         height="35.433071"
+         width="35.433071"
+         id="rect3776"
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text3778"
+         y="683.85852"
+         x="53.149601"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="683.85852"
+           x="53.149601"
+           id="tspan3780"
+           sodipodi:role="line">.jade</tspan></text>
+    </g>
+    <path
+       inkscape:connector-curvature="0"
+       id="path4691"
+       d="m 248.0315,804.33099 0,35.43307 248.03149,0 0,35.43307 53.14961,-53.14961 -53.14961,-53.14961 0,35.43308 z"
+       style="fill:#646464;fill-opacity:1;stroke:#c8c8c8;stroke-width:3.54330707;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" />
+    <image
+       width="253.23659"
+       height="283.31219"
+       xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA0oAAAOuCAYAAAA5D+d5AAAABHNCSVQICAgIfAhkiAAAIABJREFU eJzsvVm3LUd15/uPyL2PWkCYxipjsKHABRQYjADpSIISRgh01EBRpfNwa4w7xn25/hr6Gr4vd4y6 o+6DdMsFao5ACCNj9YgeAzYUYJoynekkJJ2zdkbch4wZOWPGjMhce++j083fGKA8KzMjIyJz7TX/ OZtwMAzDMIwzzdG7LsHl47UAgCuGx3D38efPcI8MwzCMCxx3pjtgGIZhXMDceafH4299N4I/CgQA HgiI2AlP4Og3n8add4Yz3UXDMAzjwsSEkmEYhnFm+OAn3gq/9354XEQaqSDgJMLO5/HZj33zTHTP MAzDuLAxoWQYhmG8uNx41+vg/QfgcEUWSOQ3IrHEhVPEbxDC5/DQ8R++2F01DMMwLl [...]
+       id="image4684"
+       x="248.03149"
+       y="733.46484" />
+    <g
+       id="g3782"
+       transform="translate(70.866141,212.59844)">
+      <rect
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+         id="rect3784"
+         width="35.433071"
+         height="35.433071"
+         x="35.433071"
+         y="591.73254" />
+      <text
+         xml:space="preserve"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         x="53.149605"
+         y="612.99237"
+         id="text3786"
+         sodipodi:linespacing="120%"><tspan
+           sodipodi:role="line"
+           id="tspan3788"
+           x="53.149605"
+           y="612.99237">.coffee</tspan></text>
+    </g>
+    <g
+       transform="translate(-5.4066575e-7,248.03151)"
+       id="g3790">
+      <rect
+         y="591.73254"
+         x="35.433071"
+         height="35.433071"
+         width="35.433071"
+         id="rect3792"
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text3794"
+         y="612.99237"
+         x="53.149605"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="612.99237"
+           x="53.149605"
+           id="tspan3796"
+           sodipodi:role="line">.css</tspan></text>
+    </g>
+    <g
+       id="g3798"
+       transform="translate(70.866141,283.46458)">
+      <rect
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+         id="rect3800"
+         width="35.433071"
+         height="35.433071"
+         x="35.433071"
+         y="591.73254" />
+      <text
+         xml:space="preserve"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         x="53.149605"
+         y="612.99237"
+         id="text3802"
+         sodipodi:linespacing="120%"><tspan
+           sodipodi:role="line"
+           id="tspan3804"
+           x="53.149605"
+           y="612.99237">.less</tspan></text>
+    </g>
+    <g
+       transform="translate(-8.1099862e-7,318.89765)"
+       id="g3806">
+      <rect
+         y="591.73254"
+         x="35.433071"
+         height="35.433071"
+         width="35.433071"
+         id="rect3808"
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text3810"
+         y="612.99237"
+         x="53.149605"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="612.99237"
+           x="53.149605"
+           id="tspan3812"
+           sodipodi:role="line">.less</tspan></text>
+    </g>
+    <g
+       id="g3814"
+       transform="translate(141.73228,248.03151)">
+      <rect
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+         id="rect3816"
+         width="35.433071"
+         height="35.433071"
+         x="35.433071"
+         y="591.73254" />
+      <text
+         xml:space="preserve"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         x="53.149605"
+         y="612.99237"
+         id="text3818"
+         sodipodi:linespacing="120%"><tspan
+           sodipodi:role="line"
+           id="tspan3820"
+           x="53.149605"
+           y="612.99237">.png</tspan></text>
+    </g>
+    <g
+       transform="translate(141.73228,177.16537)"
+       id="g3822">
+      <rect
+         y="591.73254"
+         x="35.433071"
+         height="35.433071"
+         width="35.433071"
+         id="rect3824"
+         style="fill:#c8c8c8;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:1.77165353;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text3826"
+         y="612.99237"
+         x="53.149605"
+         style="font-size:10px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="612.99237"
+           x="53.149605"
+           id="tspan3828"
+           sodipodi:role="line">.coffee</tspan></text>
+    </g>
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="m 70.866142,726.37823 35.433068,14.17323"
+       id="path3832"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:nodetypes="cc"
+       inkscape:connector-curvature="0"
+       id="path4652"
+       d="m 53.149606,733.46484 0,35.43307"
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="m 141.73228,761.8113 35.43307,14.17323"
+       id="path4654"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:nodetypes="cc"
+       inkscape:connector-curvature="0"
+       id="path4656"
+       d="m 124.01575,768.89791 0,35.43308"
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="M 70.866142,797.24437 106.29921,811.4176"
+       id="path4658"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:nodetypes="cc"
+       inkscape:connector-curvature="0"
+       id="path4660"
+       d="m 141.73228,811.4176 35.43307,-14.17323"
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="m 141.73228,832.67744 35.43307,14.17323"
+       id="path4662"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:nodetypes="cc"
+       inkscape:connector-curvature="0"
+       id="path4664"
+       d="m 194.88189,804.33099 0,35.43307"
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="M 113.38583,768.89791 63.779528,839.76406"
+       id="path4666"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:nodetypes="cc"
+       inkscape:connector-curvature="0"
+       id="path4668"
+       d="m 63.779528,804.33099 49.606302,70.86614"
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="m 70.866142,857.48059 106.299208,0"
+       id="path4670"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <path
+       sodipodi:nodetypes="cc"
+       inkscape:connector-curvature="0"
+       id="path4672"
+       d="m 53.149606,910.6302 0,-35.43307"
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)" />
+    <path
+       style="fill:none;stroke:#000000;stroke-width:1.0629921;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;marker-start:url(#DotM);marker-end:url(#Arrow2Mend)"
+       d="M 106.29921,903.54358 70.866142,917.71681"
+       id="path4674"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="cc" />
+    <g
+       id="g4727"
+       transform="translate(14.173224,113.38584)">
+      <rect
+         style="fill:#e6e6e6;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:3.54330707;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+         id="rect4693"
+         width="70.866142"
+         height="70.866142"
+         x="566.92914"
+         y="627.16565"
+         ry="7.0866175"
+         rx="7.0866189" />
+      <text
+         xml:space="preserve"
+         style="font-size:24px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         x="602.36218"
+         y="669.6853"
+         id="text3757-1"
+         sodipodi:linespacing="120%"><tspan
+           sodipodi:role="line"
+           id="tspan3759-7"
+           x="602.36218"
+           y="669.6853">.js</tspan></text>
+    </g>
+    <g
+       id="g4732"
+       transform="translate(106.2992,113.38581)">
+      <rect
+         rx="7.0866189"
+         ry="7.0866175"
+         y="627.16565"
+         x="566.92914"
+         height="70.866142"
+         width="70.866142"
+         id="rect4734"
+         style="fill:#e6e6e6;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:3.54330707;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text4736"
+         y="669.6853"
+         x="602.36218"
+         style="font-size:24px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="669.6853"
+           x="602.36218"
+           id="tspan4738"
+           sodipodi:role="line">.js</tspan></text>
+    </g>
+    <g
+       transform="translate(14.173219,205.51179)"
+       id="g4740">
+      <rect
+         style="fill:#e6e6e6;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:3.54330707;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0"
+         id="rect4742"
+         width="70.866142"
+         height="70.866142"
+         x="566.92914"
+         y="627.16565"
+         ry="7.0866175"
+         rx="7.0866189" />
+      <text
+         xml:space="preserve"
+         style="font-size:24px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         x="602.36218"
+         y="669.6853"
+         id="text4744"
+         sodipodi:linespacing="120%"><tspan
+           sodipodi:role="line"
+           id="tspan4746"
+           x="602.36218"
+           y="669.6853">.png</tspan></text>
+    </g>
+    <g
+       id="g4748"
+       transform="translate(106.2992,205.51179)">
+      <rect
+         rx="7.0866189"
+         ry="7.0866175"
+         y="627.16565"
+         x="566.92914"
+         height="70.866142"
+         width="70.866142"
+         id="rect4750"
+         style="fill:#e6e6e6;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:3.54330707;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0" />
+      <text
+         sodipodi:linespacing="120%"
+         id="text4752"
+         y="669.6853"
+         x="602.36218"
+         style="font-size:24px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+         xml:space="preserve"><tspan
+           y="669.6853"
+           x="602.36218"
+           id="tspan4754"
+           sodipodi:role="line">.css</tspan></text>
+    </g>
+    <text
+       xml:space="preserve"
+       style="font-size:24px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+       x="134.64568"
+       y="985.03961"
+       id="text4756"
+       sodipodi:linespacing="120%"><tspan
+         sodipodi:role="line"
+         id="tspan4758"
+         x="134.64568"
+         y="985.03961">modules</tspan><tspan
+         sodipodi:role="line"
+         x="134.64568"
+         y="1013.8396"
+         id="tspan4760">with dependencies</tspan></text>
+    <text
+       sodipodi:linespacing="120%"
+       id="text4762"
+       y="985.03961"
+       x="673.22833"
+       style="font-size:24px;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;text-align:center;line-height:120.00000477%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;font-family:Arial;-inkscape-font-specification:Arial"
+       xml:space="preserve"><tspan
+         id="tspan4766"
+         y="985.03961"
+         x="673.22833"
+         sodipodi:role="line">static</tspan><tspan
+         y="1013.8396"
+         x="673.22833"
+         sodipodi:role="line"
+         id="tspan4770">assets</tspan></text>
+  </g>
+</svg>
diff --git a/doc/source/slideshow/architecture/images/workflow_definition.svg b/doc/source/slideshow/architecture/images/workflow_definition.svg
new file mode 100644
index 0000000..567f556
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/workflow_definition.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_1_48.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_1_48.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m372.0 [...]
+
diff --git a/doc/source/slideshow/architecture/images/workflow_run.svg b/doc/source/slideshow/architecture/images/workflow_run.svg
new file mode 100644
index 0000000..14efb1d
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/workflow_run.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_1_66.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_1_66.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m372.0 [...]
+
diff --git a/doc/source/slideshow/architecture/images/wsgi_app.svg b/doc/source/slideshow/architecture/images/wsgi_app.svg
new file mode 100644
index 0000000..1dfe26c
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/wsgi_app.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="gb64e9d740_0_14.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#gb64e9d740_0_14.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m118.8 [...]
+
diff --git a/doc/source/slideshow/architecture/images/wsgi_request.svg b/doc/source/slideshow/architecture/images/wsgi_request.svg
new file mode 100644
index 0000000..a40c21f
--- /dev/null
+++ b/doc/source/slideshow/architecture/images/wsgi_request.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 960.0 540.0" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="ga2f5d01c8_2_157.0"><path d="m0 0l960.0 0l0 540.0l-960.0 0l0 -540.0z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#ga2f5d01c8_2_157.0)"><path fill="#000000" d="m0 0l960.0 0l0 540.0l-960.0 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m51. [...]
+
diff --git a/doc/source/slideshow/architecture/remark-latest.min.js b/doc/source/slideshow/architecture/remark-latest.min.js
new file mode 100644
index 0000000..d453316
--- /dev/null
+++ b/doc/source/slideshow/architecture/remark-latest.min.js
@@ -0,0 +1,14 @@
+require=function(e,t,r){function a(r,n){if(!t[r]){if(!e[r]){var s="function"==typeof require&&require;if(!n&&s)return s(r,!0);if(i)return i(r,!0);throw new Error("Cannot find module '"+r+"'")}var l=t[r]={exports:{}};e[r][0].call(l.exports,function(t){var i=e[r][1][t];return a(i?i:t)},l,l.exports)}return t[r].exports}for(var i="function"==typeof require&&require,n=0;n<r.length;n++)a(r[n]);return a}({"components/printing":[function(e,t){t.exports=e("yoGRCZ")},{}],yoGRCZ:[function(e,t){func [...]
+}},{name:"applescript",create:function(e){var t=e.inherit(e.QUOTE_STRING_MODE,{illegal:""}),r={className:"params",begin:"\\(",end:"\\)",contains:["self",e.C_NUMBER_MODE,t]},a=e.COMMENT("--","$"),i=e.COMMENT("\\(\\*","\\*\\)",{contains:["self",a]}),n=[a,i,e.HASH_COMMENT_MODE];return{aliases:["osascript"],keywords:{keyword:"about above after against and around as at back before beginning behind below beneath beside between but by considering contain contains continue copy div does eighth e [...]
+return{case_insensitive:!0,illegal:/\/\*/,keywords:{keyword:t,built_in:a,literal:r},contains:[i,n,s,l,o,c,d]}}},{name:"avrasm",create:function(e){return{case_insensitive:!0,lexemes:"\\.?"+e.IDENT_RE,keywords:{keyword:"adc add adiw and andi asr bclr bld brbc brbs brcc brcs break breq brge brhc brhs brid brie brlo brlt brmi brne brpl brsh brtc brts brvc brvs bset bst call cbi cbr clc clh cli cln clr cls clt clv clz com cp cpc cpi cpse dec eicall eijmp elpm eor fmul fmuls fmulsu icall ijmp  [...]
+}},{name:"django",create:function(e){var t={begin:/\|[A-Za-z]+:?/,keywords:{name:"truncatewords removetags linebreaksbr yesno get_digit timesince random striptags filesizeformat escape linebreaks length_is ljust rjust cut urlize fix_ampersands title floatformat capfirst pprint divisibleby add make_list unordered_list urlencode timeuntil urlizetrunc wordcount stringformat linenumbers slice date dictsort dictsortreversed default_if_none pluralize lower join center default truncatewords_htm [...]
+}},{name:"haskell",create:function(e){var t={variants:[e.COMMENT("--","$"),e.COMMENT("{-","-}",{contains:["self"]})]},r={className:"meta",begin:"{-#",end:"#-}"},a={className:"meta",begin:"^#",end:"$"},i={className:"type",begin:"\\b[A-Z][\\w']*",relevance:0},n={begin:"\\(",end:"\\)",illegal:'"',contains:[r,a,{className:"type",begin:"\\b[A-Z][\\w]*(\\((\\.\\.|,|\\w+)\\))?"},e.inherit(e.TITLE_MODE,{begin:"[_a-z][\\w']*"}),t]},s={begin:"{",end:"}",contains:n.contains};return{aliases:["hs"],k [...]
+}},{name:"livescript",create:function(e){var t={keyword:"in if for while finally new do return else break catch instanceof throw try this switch continue typeof delete debugger case default function var with then unless until loop of by when and or is isnt not it that otherwise from to til fallthrough super case default function var void const let enum export import native __hasProp __extends __slice __bind __indexOf",literal:"true false null undefined yes no on off it that void",built_i [...]
+}},{name:"matlab",create:function(e){var t=[e.C_NUMBER_MODE,{className:"string",begin:"'",end:"'",contains:[e.BACKSLASH_ESCAPE,{begin:"''"}]}],r={relevance:0,contains:[{begin:/'['\.]*/}]};return{keywords:{keyword:"break case catch classdef continue else elseif end enumerated events for function global if methods otherwise parfor persistent properties return spmd switch try while",built_in:"sin sind sinh asin asind asinh cos cosd cosh acos acosd acosh tan tand tanh atan atand atan2 atanh  [...]
+}},{name:"objectivec",create:function(e){var t={className:"built_in",begin:"(AV|CA|CF|CG|CI|MK|MP|NS|UI|XC)\\w+"},r={keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak co [...]
+s.contains=c,o.contains=c;var d="[>?]>",m="[\\w#]+\\(\\w+\\):\\d+:\\d+>",u="(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>",h=[{begin:/^\s*=>/,starts:{end:"$",contains:c}},{className:"meta",begin:"^("+d+"|"+m+"|"+u+")",starts:{end:"$",contains:c}}];return{aliases:["rb","gemspec","podspec","thor","irb"],keywords:r,illegal:/\/\*/,contains:n.concat(h).concat(c)}}},{name:"ruleslanguage",create:function(e){return{keywords:{keyword:"BILL_PERIOD BILL_START BILL_STOP RS_EFFECTIVE_START RS_EFFECTIVE_STO [...]
+});s=s.concat(i);var l={className:"string",relevance:0,variants:[{begin:'"',end:'"',contains:[{begin:'""'}]},{begin:"'",end:"'",contains:[{begin:"''"}]}]},o={className:"number",begin:e.NUMBER_RE,relevance:0},c={className:"string",variants:[e.QUOTE_STRING_MODE,{begin:"'\\\\?.",end:"'",illegal:"."}]},d={className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma ifdef ifndef"},contains:[{begin:/\\\n/,relevance:0},{beginKeywords:"in [...]
+return{aliases:["styl"],case_insensitive:!1,illegal:"("+o.join("|")+")",keywords:"if else for in",contains:[e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,r,{begin:"\\.[a-zA-Z][a-zA-Z0-9_-]*"+s,returnBegin:!0,contains:[{className:"selector-class",begin:"\\.[a-zA-Z][a-zA-Z0-9_-]*"}]},{begin:"\\#[a-zA-Z][a-zA-Z0-9_-]*"+s,returnBegin:!0,contains:[{className:"selector-id",begin:"\\#[a-zA-Z][a-zA-Z0-9_-]*"}]},{begin:"\\b("+n.join("|")+")"+s,returnBegin:!0, [...]
+}},{name:"xl",create:function(e){var t="ObjectLoader Animate MovieCredits Slides Filters Shading Materials LensFlare Mapping VLCAudioVideo StereoDecoder PointCloud NetworkAccess RemoteControl RegExp ChromaKey Snowfall NodeJS Speech Charts",r={keyword:"if then else do while until for loop import with is as where when by data constant integer real text name boolean symbol infix prefix postfix block tree",literal:"true false nil",built_in:"in mod rem and or xor not abs sign floor ceil sqrt  [...]
+}()},{}],8:[function(e,t,r){r.addClass=function(e,t){e.className=r.getClasses(e).concat([t]).join(" ")},r.removeClass=function(e,t){e.className=r.getClasses(e).filter(function(e){return e!==t}).join(" ")},r.toggleClass=function(e,t){var a=r.getClasses(e),i=a.indexOf(t);-1!==i?a.splice(i,1):a.push(t),e.className=a.join(" ")},r.getClasses=function(e){return e.className.split(" ").filter(function(e){return""!==e})},r.hasClass=function(e,t){return-1!==r.getClasses(e).indexOf(t)},r.getPrefixe [...]
+},e.prototype.token=function(e,t){for(var r,a,i,n,s,l,o,c,d,e=e.replace(/^ +$/gm,"");e;)if((i=this.rules.newline.exec(e))&&(e=e.substring(i[0].length),i[0].length>1&&this.tokens.push({type:"space"})),i=this.rules.code.exec(e))e=e.substring(i[0].length),i=i[0].replace(/^ {4}/gm,""),this.tokens.push({type:"code",text:this.options.pedantic?i:i.replace(/\n+$/,"")});else if(i=this.rules.fences.exec(e))e=e.substring(i[0].length),this.tokens.push({type:"code",lang:i[2],text:i[3]});else if(i=thi [...]
\ No newline at end of file
diff --git a/doc/source/slideshow/architecture/style.css b/doc/source/slideshow/architecture/style.css
new file mode 100644
index 0000000..15197f4
--- /dev/null
+++ b/doc/source/slideshow/architecture/style.css
@@ -0,0 +1,210 @@
+ at import url(https://fonts.googleapis.com/css?family=Oxygen);
+ at import url(https://fonts.googleapis.com/css?family=Consolas);
+ at import url(https://fonts.googleapis.com/css?family=Droid+Serif);
+ at import url(https://fonts.googleapis.com/css?family=Yanone+Kaffeesatz);
+ at import url(https://fonts.googleapis.com/css?family=Ubuntu+Mono:400,700,400italic);
+
+body {
+    font-family: 'Oxygen', 'PT Sans', Serif;
+}
+h1, h2, h3 {
+    font-weight: bold;
+}
+h1 {
+    font-size: 3em;
+    color: #ff9300;
+}
+h2 { font-size: 2em; }
+
+.title h1, h2 {
+    margin-bottom: 0px;
+    margin-top: 0px;
+    text-align: center;
+}
+
+.title p {
+    margin-top: 50px;
+    text-align: center;
+}
+
+.centered {
+    text-align: center;
+}
+
+img {
+    max-width: 100%;
+}
+
+.widen_image img {
+    max-width: 100%;
+    width: 100%;
+}
+
+.narrow_image img {
+    height: 500px;
+}
+
+h3 {
+    font-size: 1.8em;
+    position: absolute;
+    top: .5em;
+}
+
+.footnote {
+    position: absolute;
+    bottom: 3em;
+}
+
+.large {
+    font-size: 1.6em;
+}
+
+.larger {
+    font-size: 2em;
+}
+
+
+li p { line-height: 1.25em; }
+
+.normal {
+    font-size: 1em;
+}
+
+.smaller {
+    font-size: .8em;
+}
+
+a, a > code {
+    color: #ff9300;
+    text-decoration: none;
+}
+
+.code > pre, .code > code {
+    white-space: pre-wrap;
+}
+.remark-code, .remark-inline-code {
+    font-family: 'Consolas';
+    color: #ff9300;
+    font-size: 1.1em;
+}
+.slightly-smaller {
+    font-size: .9em;
+}
+    
+.remark-code-line-highlighted     {
+    background-color: #373832;
+}
+.pull-left {
+    float: left;
+    width: 47%;
+}
+.pull-right {
+    float: right;
+    width: 47%;
+}
+.pull-right ~ p {
+    clear: both;
+}
+#slideshow .slide .content code {
+}
+#slideshow .slide .content pre code {
+}
+
+.inverse {
+    background-color: #000;
+    background-repeat: no-repeat;
+    background-position: center;
+    background-size: contain;
+    color: #ffffff;
+}
+
+i, em, b, strong {
+    color: #ff9300;
+}
+
+.inverse h1, .inverse h2 {
+    color: #f3f3f3;
+    line-height: 0.8em;
+}
+
+.white {
+    background-color: #ffffff;
+    background-repeat: no-repeat;
+    background-position: center;
+    background-size: contain;
+    color: #000;
+}
+
+.strike {
+    text-decoration: line-through;
+}
+
+/* Slide-specific styling */
+#slide-inverse .footnote {
+    bottom: 12px;
+    left: 20px;
+}
+#slide-how .slides {
+    font-size: 0.9em;
+    position: absolute;
+    top:  151px;
+    right: 140px;
+}
+#slide-how .slides h3 {
+    margin-top: 0.2em;
+}
+#slide-how .slides .first, #slide-how .slides .second {
+    padding: 1px 20px;
+    height: 90px;
+    width: 120px;
+    -moz-box-shadow: 0 0 10px #777;
+    -webkit-box-shadow: 0 0 10px #777;
+    box-shadow: 0 0 10px #777;
+}
+#slide-how .slides .first {
+    background: #fff;
+    position: absolute;
+    top: 20%;
+    left: 20%;
+    z-index: 1;
+}
+#slide-how .slides .second {
+    position: relative;
+    background: #fff;
+    z-index: 0;
+}
+
+/* Two-column layout */
+.left-column {
+    color: #777;
+    width: 20%;
+    height: 92%;
+    float: left;
+}
+.left-column h2:last-of-type, .left-column h3:last-child {
+    color: #000;
+}
+.right-column {
+    width: 75%;
+    float: right;
+    padding-top: 1em;
+}
+
+/*First slide h1 orange, h2 white*/
+.special h1 {
+    color: #ff8800;
+}
+
+.special p {
+    color: #cbcbcb;
+}
+
+.special {
+    color: #ff8800;
+}
+
+/* Emphasis 2 */
+.special2 {
+    color: #00f900;
+}
+
diff --git a/doc/source/ts_api_doc.rst b/doc/source/ts_api_doc.rst
new file mode 100644
index 0000000..99b6c8c
--- /dev/null
+++ b/doc/source/ts_api_doc.rst
@@ -0,0 +1,17 @@
+Tool Shed API Documentation
+===========================
+
+The Galaxy Tool Shed can be accessed programmatically using API described
+here via shell scripts and other programs.
+To interact with the API you first need to log in as your user, navigate
+to the API Keys page in the User menu, and generate a new API key. This
+key you have to attach to every request as a parameter. Example:
+
+::
+
+    GET https://toolshed.g2.bx.psu.edu/api/repositories?q=fastq?key=SOME_KEY_HERE
+
+.. toctree::
+   :maxdepth: 3
+
+    Toolshed API Documentation <lib/galaxy.webapps.tool_shed.api>
diff --git a/external_service_types/454_life_sciences.xml b/external_service_types/454_life_sciences.xml
new file mode 100644
index 0000000..13c71ba
--- /dev/null
+++ b/external_service_types/454_life_sciences.xml
@@ -0,0 +1,26 @@
+<sequencer_type id="454_life_sciences" name="454 Life Sciences" version="1.0.0">
+    <description></description>
+    <version>1</version>
+ 
+    <data_transfer_settings>
+        <data_transfer protocol='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location' />
+    </data_transfer_settings>
+ 
+    <run_details>
+        <results>
+            <dataset name="dataset1_name" datatype="dataset1_datatype" />
+        </results>
+    </run_details>
+ 
+    <form type="external_service" name="Applied Biosystems SOLiD Form" description="">
+        <fields>
+            <field name="host" type="text" label="Hostname or IP address" description="" value="" required="True" />
+            <field name="user_name" type="text" label="User name" description="User name to log into the sequencer." value="" required="True" />
+            <field name="password" type="password" label="Password" description="" value="" required="True" />
+            <field name="data_location" type="text" label="Data directory" description="" value="" required="False" />
+            <field name="dataset1_name" type="text" label="Sample run output 1" description="" value="SFF file" required="True" />
+            <field name="dataset1_datatype" type="text" label="Sample run datatype 1" description="" value="sff" required="True" />
+        </fields>
+    </form>
+
+</sequencer_type>
diff --git a/external_service_types/applied_biosystems_solid.xml b/external_service_types/applied_biosystems_solid.xml
new file mode 100644
index 0000000..a2c70ef
--- /dev/null
+++ b/external_service_types/applied_biosystems_solid.xml
@@ -0,0 +1,38 @@
+<sequencer_type id="applied_biosystems_solid" name="Applied Biosystems SOLiD" version="1.0.0">
+    <description></description>
+    <version>3</version>
+ 
+    <data_transfer_settings>
+        <data_transfer protocol='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location' rename_dataset='rename_dataset' />
+    </data_transfer_settings>
+ 
+    <run_details>
+        <results>
+            <dataset name="dataset1_name" datatype="dataset1_datatype" />
+            <dataset name="dataset2_name" datatype="dataset2_datatype" />
+            <dataset name="dataset3_name" datatype="dataset3_datatype" />
+        </results>
+    </run_details>
+ 
+    <form type="external_service" name="Applied Biosystems SOLiD Form" description="">
+        <fields>
+            <field name="host" type="text" label="Hostname or IP address" description="" value="" required="True" />
+            <field name="user_name" type="text" label="User name" description="User name to log into the sequencer." value="" required="True" />
+            <field name="password" type="password" label="Password" description="" value="" required="True" />
+            <field name="data_location" type="text" label="Data directory" description="" value="" required="False" />
+            <field name="rename_dataset" type="select" label="Prepend the experiment name and sample name to the dataset name?" description="Galaxy datasets are renamed by prepending the experiment name and sample name to the dataset name, ensuring dataset names remain unique in Galaxy even when multiple datasets have the same name on the sequencer." value="">
+	            <option value="Do not rename" />
+	            <option value="Preprend sample name" />
+                <option value="Prepend experiment name" />
+                <option value="Prepend experiment and sample name" />
+  	        </field>
+  	        <field name="dataset1_name" type="text" label="Sample run output 1" description="" value="Colorspace FASTA file" required="True" />
+  	        <field name="dataset1_datatype" type="text" label="Sample run datatype 1" description="" value="csfasta" required="True" />
+            <field name="dataset2_name" type="text" label="Sample run output 2" description="" value="Quality file" required="True" />
+            <field name="dataset2_datatype" type="text" label="Sample run datatype 2" description="" value="qual" required="True" />
+            <field name="dataset3_name" type="text" label="Sample run output 3" description="" value="STATS file" required="True" />
+            <field name="dataset3_datatype" type="text" label="Sample run datatype 3" description="" value="txt" required="True" />
+        </fields>
+    </form>
+
+</sequencer_type>
diff --git a/external_service_types/pacific_biosciences_smrt_portal.xml b/external_service_types/pacific_biosciences_smrt_portal.xml
new file mode 100644
index 0000000..afe1fa0
--- /dev/null
+++ b/external_service_types/pacific_biosciences_smrt_portal.xml
@@ -0,0 +1,134 @@
+<external_service id="pacific_biosciences_smrt_portal" name="Pacific Biosciences SMRT Portal" version="1.1.0">
+    <description></description>
+    <version>1</version>
+    <data_transfer_settings>
+        <data_transfer protocol='http' automatic_transfer='True' />
+    </data_transfer_settings>
+    <run_details>
+        <results>
+            <dataset name="dataset2_name" datatype="dataset2_datatype"
+                     url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/filtered_subreads.fa" />
+            <dataset name="dataset3_name" datatype="dataset3_datatype"
+                     url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/aligned_reads.bam" />
+            <dataset name="dataset4_name" datatype="dataset4_datatype"
+                     url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/alignment_summary.gff" />
+            <dataset name="dataset5_name" datatype="dataset5_datatype"
+                     url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/coverage.bed" />
+            <dataset name="dataset6_name" datatype="dataset6_datatype"
+                     url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/variants.bed" />
+            <dataset name="dataset7_name" datatype="dataset7_datatype"
+                     url="http://${host}/smrtportal/api/Jobs/${secondary_analysis_job_id}/Contents/data/variants.gff.gz" />
+        </results>
+    </run_details>
+    <form type="external_service" name="Pacific Biosciences SMRT Portal Form" description="">
+        <fields>
+            <field name="host" type="text" label="Hostname or IP address" description="" value="192.168.56.101" required="True" />
+            <field name="user_name" type="text" label="User name" description="User name to log into the sequencer." value="administrator" required="True" />
+            <field name="password" type="password" label="Password" description="" value="galaxy" required="True" />
+            <field name="data_location" type="text" label="Data directory" description="" value="" required="False" />
+            
+            <field name="dataset2_name" type="text" label="Filtered reads" description="" value="Filtered reads" required="True" />
+            <field name="dataset2_datatype" type="text" label="Format" description="" value="fasta" required="True" />
+            
+            <field name="dataset3_name" type="text" label="Aligned reads bam" description="" value="Aligned reads" required="True" />
+            <field name="dataset3_datatype" type="text" label="Format" description="" value="bam" required="True" />
+            
+            <field name="dataset4_name" type="text" label="Coverage gff" description="" value="Coverage (gff)" required="True" />
+            <field name="dataset4_datatype" type="text" label="Format" description="" value="gff" required="True" />
+            
+            <field name="dataset5_name" type="text" label="Coverage bed" description="" value="Coverage (bed)" required="True" />
+            <field name="dataset5_datatype" type="text" label="Format" description="" value="bed" required="True" />
+            
+            <field name="dataset6_name" type="text" label="Variants bed" description="" value="Variants (bed)" required="True" />
+            <field name="dataset6_datatype" type="text" label="Format" description="" value="bed" required="True" />
+
+            <field name="dataset7_name" type="text" label="Variants gff" description="" value="Variants (gff)" required="True" />
+            <field name="dataset7_datatype" type="text" label="Format" description="" value="gff" required="True" />
+        </fields>
+    </form>
+    
+    <actions>
+        <param name="api_url" type="template">http://${fields.host}/smrtportal/api</param>
+        <param name="web_url" type="template">http://${fields.host}/smrtportal/#</param>
+        <section name="jobs" label="Job Service">
+        
+            <param name="jobs_url" type="template">${api_url}/Jobs</param>
+            <param name="monitor_jobs_url" type="template">${web_url}/MonitorJobs</param>
+            <!--
+            <action type="web_api" name="jobs_list" label="List Jobs" description="">
+                <request target="galaxy_main" method="post">
+                    <url>${jobs.jobs_url}</url>
+                </request>
+                <result_handler type="jquery_grid"></result_handler>
+            </action>
+            -->
+            <conditional name="job_sample" label="Sample to Job Service" ref="item">
+                <when type="item_type" name="sample" value="sample">
+                    <param name="secondary_analysis_job_id" type="template">${item.run_details.run.info.content.get( 'secondary_analysis_job_id' )}</param>
+                    <conditional name="valid_job_sample" ref="jobs.job_sample.sample.secondary_analysis_job_id">
+                        <when type="boolean" name="valid">
+                            <param name="job_sample_url" type="template">${jobs.jobs_url}/${jobs.job_sample.sample.secondary_analysis_job_id}</param>
+                            <param name="monitor_job_url" type="template">${jobs.monitor_jobs_url}/DetailsOfJob/${jobs.job_sample.sample.secondary_analysis_job_id}</param>
+
+                            <action type="web" name="monitor_job" label="View Job in SMRT Portal" description="">
+                                <request target="_blank" method="get">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.monitor_job_url}</url>
+                                </request>
+                            </action>
+
+                            <action type="web_api" name="job_status" label="Job status" description="">
+                                <request target="galaxy_main" method="post">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Status</url>
+                                </request>
+                                <result_handler type="json_display"></result_handler>
+                            </action>
+
+                            <action type="web_api" name="job_history" label="Job history" description="">
+                                <request target="galaxy_main" method="post">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/History</url>
+                                </request>
+                                <result_handler type="jquery_grid"></result_handler>
+                            </action>
+                            
+                            <action type="web_api" name="job_log" label="Job log" description="">
+                                <request target="galaxy_main" method="post">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Log</url>
+                                </request>
+                                <result_handler type="display"></result_handler>
+                            </action>
+                            
+                            <action type="web_api" name="job_contents" label="Job contents" description="">
+                                <request target="galaxy_main" method="post">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Contents</url>
+                                </request>
+                                <result_handler type="json_display"></result_handler>
+                            </action>
+                            <!--
+                            <action type="web_api" name="job_protocol" label="Job protocol" description="">
+                                <request target="galaxy_main" method="post">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Protocol</url>
+                                </request>
+                                <result_handler type="display"></result_handler>
+                            </action>
+                            -->
+                            <action type="web_api" name="job_inputs" label="Job inputs" description="">
+                                <request target="galaxy_main" method="post">
+                                    <url>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Inputs</url>
+                                </request>
+                                <result_handler type="jquery_grid"></result_handler>
+                            </action>
+                            
+                            <action type="template" name="job_smrt_view" label="Job SMRT View" description="">
+                                <template>${jobs.job_sample.sample.valid_job_sample.valid.job_sample_url}/Contents/vis.jnlp</template>
+                                <result_handler type="web_redirect"></result_handler>
+                            </action>
+
+                        </when>
+                    </conditional>
+                </when>
+            </conditional>
+            
+        </section>
+    </actions>
+    
+</external_service>
diff --git a/external_service_types/simple_unknown_sequencer.xml b/external_service_types/simple_unknown_sequencer.xml
new file mode 100644
index 0000000..4064d37
--- /dev/null
+++ b/external_service_types/simple_unknown_sequencer.xml
@@ -0,0 +1,26 @@
+<!-- 
+DEPRECATED 
+This sequencer type should not be used to create new sequencers. 
+This is only for backward compatibility
+-->
+<sequencer_type id="simple_unknown_sequencer" name="Simple unknown sequencer" version="1.0.0">
+    <description></description>
+    <version></version>
+    <data_transfer_settings>
+        <data_transfer protocol='scp' automatic_transfer='False' host='host' user_name='user_name' password='password' data_location='data_location'/>
+    </data_transfer_settings>
+    <form type="external_service" name="Simple unknown sequencer form" description="">
+        <fields>
+            <field name="host" type="text" label="Hostname or IP address" description="" value="" required="True" />
+            <field name="user_name" type="text" label="User name" description="User name to log into the sequencer." value="" required="True" />
+            <field name="password" type="password" label="Password" description="" value="" required="True" />
+            <field name="data_location" type="text" label="Data directory" description="" value="" required="False" />
+            <field name="rename_dataset" type="select" label="Prepend the experiment name and sample name to the dataset name?" description="Galaxy datasets are renamed by prepending the experiment name and sample name to the dataset name, ensuring dataset names remain unique in Galaxy even when multiple datasets have the same name on the sequencer." value="">
+                <option value="Do not rename" />
+                <option value="Preprend sample name" />
+                <option value="Prepend experiment name" />
+                <option value="Prepend experiment and sample name" />
+            </field>
+        </fields>
+    </form>
+</sequencer_type>
\ No newline at end of file
diff --git a/extract_dataset_parts.sh b/extract_dataset_parts.sh
new file mode 100755
index 0000000..26f915c
--- /dev/null
+++ b/extract_dataset_parts.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+cd `dirname $0`
+for file in $1/split_info*.json
+do
+    # echo processing $file
+    python ./scripts/extract_dataset_part.py $file
+done
diff --git a/lib/galaxy/__init__.py b/lib/galaxy/__init__.py
new file mode 100644
index 0000000..07a3f6c
--- /dev/null
+++ b/lib/galaxy/__init__.py
@@ -0,0 +1,20 @@
+"""
+Galaxy root package -- this is a namespace package.
+"""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
+
+
+# compat: BadZipFile introduced in Python 2.7
+import zipfile
+if not hasattr( zipfile, 'BadZipFile' ):
+    zipfile.BadZipFile = zipfile.error
+
+# compat: patch to add the NullHandler class to logging
+import logging
+if not hasattr( logging, 'NullHandler' ):
+    class NullHandler( logging.Handler ):
+        def emit( self, record ):
+            pass
+    logging.NullHandler = NullHandler
diff --git a/lib/galaxy/actions/__init__.py b/lib/galaxy/actions/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/actions/admin.py b/lib/galaxy/actions/admin.py
new file mode 100644
index 0000000..458332b
--- /dev/null
+++ b/lib/galaxy/actions/admin.py
@@ -0,0 +1,200 @@
+"""
+Contains administrative functions
+"""
+
+import logging
+from galaxy import util
+from galaxy.exceptions import ActionInputError
+
+log = logging.getLogger( __name__ )
+
+
+class AdminActions( object ):
+    """
+    Mixin for controllers that provide administrative functionality.
+    """
+    def _create_quota( self, params ):
+        if params.amount.lower() in ( 'unlimited', 'none', 'no limit' ):
+            create_amount = None
+        else:
+            try:
+                create_amount = util.size_to_bytes( params.amount )
+            except AssertionError:
+                create_amount = False
+        if not params.name or not params.description:
+            raise ActionInputError( "Enter a valid name and a description." )
+        elif self.sa_session.query( self.app.model.Quota ).filter( self.app.model.Quota.table.c.name == params.name ).first():
+            raise ActionInputError( "Quota names must be unique and a quota with that name already exists, so choose another name." )
+        elif not params.get( 'amount', None ):
+            raise ActionInputError( "Enter a valid quota amount." )
+        elif create_amount is False:
+            raise ActionInputError( "Unable to parse the provided amount." )
+        elif params.operation not in self.app.model.Quota.valid_operations:
+            raise ActionInputError( "Enter a valid operation." )
+        elif params.default != 'no' and params.default not in self.app.model.DefaultQuotaAssociation.types.__dict__.values():
+            raise ActionInputError( "Enter a valid default type." )
+        elif params.default != 'no' and params.operation != '=':
+            raise ActionInputError( "Operation for a default quota must be '='." )
+        elif create_amount is None and params.operation != '=':
+            raise ActionInputError( "Operation for an unlimited quota must be '='." )
+        else:
+            # Create the quota
+            quota = self.app.model.Quota( name=params.name, description=params.description, amount=create_amount, operation=params.operation )
+            self.sa_session.add( quota )
+            # If this is a default quota, create the DefaultQuotaAssociation
+            if params.default != 'no':
+                self.app.quota_agent.set_default_quota( params.default, quota )
+            else:
+                # Create the UserQuotaAssociations
+                for user in [ self.sa_session.query( self.app.model.User ).get( x ) for x in params.in_users ]:
+                    uqa = self.app.model.UserQuotaAssociation( user, quota )
+                    self.sa_session.add( uqa )
+                # Create the GroupQuotaAssociations
+                for group in [ self.sa_session.query( self.app.model.Group ).get( x ) for x in params.in_groups ]:
+                    gqa = self.app.model.GroupQuotaAssociation( group, quota )
+                    self.sa_session.add( gqa )
+            self.sa_session.flush()
+            message = "Quota '%s' has been created with %d associated users and %d associated groups." % \
+                      ( quota.name, len( params.in_users ), len( params.in_groups ) )
+            return quota, message
+
+    def _rename_quota( self, quota, params ):
+        if not params.name:
+            raise ActionInputError( 'Enter a valid name' )
+        elif params.name != quota.name and self.sa_session.query( self.app.model.Quota ).filter( self.app.model.Quota.table.c.name == params.name ).first():
+            raise ActionInputError( 'A quota with that name already exists' )
+        else:
+            old_name = quota.name
+            quota.name = params.name
+            quota.description = params.description
+            self.sa_session.add( quota )
+            self.sa_session.flush()
+            message = "Quota '%s' has been renamed to '%s'" % ( old_name, params.name )
+            return message
+
+    def _manage_users_and_groups_for_quota( self, quota, params ):
+        if quota.default:
+            raise ActionInputError( 'Default quotas cannot be associated with specific users and groups' )
+        else:
+            in_users = [ self.sa_session.query( self.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
+            in_groups = [ self.sa_session.query( self.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
+            self.app.quota_agent.set_entity_quota_associations( quotas=[ quota ], users=in_users, groups=in_groups )
+            self.sa_session.refresh( quota )
+            message = "Quota '%s' has been updated with %d associated users and %d associated groups" % ( quota.name, len( in_users ), len( in_groups ) )
+            return message
+
+    def _edit_quota( self, quota, params ):
+        if params.amount.lower() in ( 'unlimited', 'none', 'no limit' ):
+            new_amount = None
+        else:
+            try:
+                new_amount = util.size_to_bytes( params.amount )
+            except AssertionError:
+                new_amount = False
+        if not params.amount:
+            raise ActionInputError( 'Enter a valid amount' )
+        elif new_amount is False:
+            raise ActionInputError( 'Unable to parse the provided amount' )
+        elif params.operation not in self.app.model.Quota.valid_operations:
+            raise ActionInputError( 'Enter a valid operation' )
+        else:
+            quota.amount = new_amount
+            quota.operation = params.operation
+            self.sa_session.add( quota )
+            self.sa_session.flush()
+            message = "Quota '%s' is now '%s'" % ( quota.name, quota.operation + quota.display_amount )
+            return message
+
+    def _set_quota_default( self, quota, params ):
+        if params.default != 'no' and params.default not in self.app.model.DefaultQuotaAssociation.types.__dict__.values():
+            raise ActionInputError( 'Enter a valid default type.' )
+        else:
+            if params.default != 'no':
+                self.app.quota_agent.set_default_quota( params.default, quota )
+                message = "Quota '%s' is now the default for %s users" % ( quota.name, params.default )
+            else:
+                if quota.default:
+                    message = "Quota '%s' is no longer the default for %s users." % ( quota.name, quota.default[0].type )
+                    for dqa in quota.default:
+                        self.sa_session.delete( dqa )
+                    self.sa_session.flush()
+                else:
+                    message = "Quota '%s' is not a default." % quota.name
+            return message
+
+    def _unset_quota_default( self, quota, params ):
+        if not quota.default:
+            raise ActionInputError( "Quota '%s' is not a default." % quota.name )
+        else:
+            message = "Quota '%s' is no longer the default for %s users." % ( quota.name, quota.default[0].type )
+            for dqa in quota.default:
+                self.sa_session.delete( dqa )
+            self.sa_session.flush()
+            return message
+
+    def _mark_quota_deleted( self, quota, params ):
+        quotas = util.listify( quota )
+        names = []
+        for q in quotas:
+            if q.default:
+                names.append( q.name )
+        if len( names ) == 1:
+            raise ActionInputError( "Quota '%s' is a default, please unset it as a default before deleting it" % ( names[0] ) )
+        elif len( names ) > 1:
+            raise ActionInputError( "Quotas are defaults, please unset them as defaults before deleting them: " + ', '.join( names ) )
+        message = "Deleted %d quotas: " % len( quotas )
+        for q in quotas:
+            q.deleted = True
+            self.sa_session.add( q )
+            names.append( q.name )
+        self.sa_session.flush()
+        message += ', '.join( names )
+        return message
+
+    def _undelete_quota( self, quota, params=None):
+        quotas = util.listify( quota )
+        names = []
+        for q in quotas:
+            if not q.deleted:
+                names.append( q.name )
+        if len( names ) == 1:
+            raise ActionInputError( "Quota '%s' has not been deleted, so it cannot be undeleted." % ( names[0] ) )
+        elif len( names ) > 1:
+            raise ActionInputError( "Quotas have not been deleted so they cannot be undeleted: " + ', '.join( names ) )
+        message = "Undeleted %d quotas: " % len( quotas )
+        for q in quotas:
+            q.deleted = False
+            self.sa_session.add( q )
+            names.append( q.name )
+        self.sa_session.flush()
+        message += ', '.join( names )
+        return message
+
+    def _purge_quota( self, quota, params ):
+        """
+        This method should only be called for a Quota that has previously been deleted.
+        Purging a deleted Quota deletes all of the following from the database:
+        - UserQuotaAssociations where quota_id == Quota.id
+        - GroupQuotaAssociations where quota_id == Quota.id
+        """
+        quotas = util.listify( quota )
+        names = []
+        for q in quotas:
+            if not q.deleted:
+                names.append( q.name )
+        if len( names ) == 1:
+            raise ActionInputError( "Quota '%s' has not been deleted, so it cannot be purged." % ( names[0] ) )
+        elif len( names ) > 1:
+            raise ActionInputError( "Quotas have not been deleted so they cannot be undeleted: " + ', '.join( names ) )
+        message = "Purged %d quotas: " % len( quotas )
+        for q in quotas:
+            # Delete UserQuotaAssociations
+            for uqa in q.users:
+                self.sa_session.delete( uqa )
+            # Delete GroupQuotaAssociations
+            for gqa in q.groups:
+                self.sa_session.delete( gqa )
+            names.append( q.name )
+        self.sa_session.flush()
+        message += ', '.join( names )
+        return message
diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py
new file mode 100644
index 0000000..d5f73b9
--- /dev/null
+++ b/lib/galaxy/app.py
@@ -0,0 +1,222 @@
+from __future__ import absolute_import
+import logging
+import signal
+import sys
+import time
+import os
+
+from galaxy import config, jobs
+import galaxy.model
+import galaxy.security
+import galaxy.queues
+from galaxy.managers.collections import DatasetCollectionManager
+import galaxy.quota
+from galaxy.managers.tags import GalaxyTagManager
+from galaxy.visualization.genomes import Genomes
+from galaxy.visualization.data_providers.registry import DataProviderRegistry
+from galaxy.visualization.plugins.registry import VisualizationsRegistry
+from galaxy.tools.special_tools import load_lib_tools
+from galaxy.tours import ToursRegistry
+from galaxy.webhooks import WebhooksRegistry
+from galaxy.sample_tracking import external_service_types
+from galaxy.openid.providers import OpenIDProviders
+from galaxy.tools.data_manager.manager import DataManagers
+from galaxy.jobs import metrics as job_metrics
+from galaxy.web.proxy import ProxyManager
+from galaxy.queue_worker import GalaxyQueueWorker
+from galaxy.util import heartbeat
+from galaxy.util.postfork import register_postfork_function
+from tool_shed.galaxy_install import update_repository_manager
+
+
+log = logging.getLogger( __name__ )
+app = None
+
+
+class UniverseApplication( object, config.ConfiguresGalaxyMixin ):
+    """Encapsulates the state of a Universe application"""
+    def __init__( self, **kwargs ):
+        if not log.handlers:
+            # Paste didn't handle it, so we need a temporary basic log
+            # configured.  The handler added here gets dumped and replaced with
+            # an appropriately configured logger in configure_logging below.
+            logging.basicConfig(level=logging.DEBUG)
+        log.debug( "python path is: %s", ", ".join( sys.path ) )
+        self.name = 'galaxy'
+        self.new_installation = False
+        # Read config file and check for errors
+        self.config = config.Configuration( **kwargs )
+        self.config.check()
+        config.configure_logging( self.config )
+        self.configure_fluent_log()
+        self.config.reload_sanitize_whitelist(explicit='sanitize_whitelist_file' in kwargs)
+        self.amqp_internal_connection_obj = galaxy.queues.connection_from_config(self.config)
+        # control_worker *can* be initialized with a queue, but here we don't
+        # want to and we'll allow postfork to bind and start it.
+        self.control_worker = GalaxyQueueWorker(self)
+
+        self._configure_tool_shed_registry()
+        self._configure_object_store( fsmon=True )
+        # Setup the database engine and ORM
+        config_file = kwargs.get( 'global_conf', {} ).get( '__file__', None )
+        if config_file:
+            log.debug( 'Using "galaxy.ini" config file: %s', config_file )
+        check_migrate_tools = self.config.check_migrate_tools
+        self._configure_models( check_migrate_databases=True, check_migrate_tools=check_migrate_tools, config_file=config_file )
+
+        # Manage installed tool shed repositories.
+        from tool_shed.galaxy_install import installed_repository_manager
+        self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager( self )
+
+        self._configure_datatypes_registry( self.installed_repository_manager )
+        galaxy.model.set_datatypes_registry( self.datatypes_registry )
+
+        # Security helper
+        self._configure_security()
+        # Tag handler
+        self.tag_handler = GalaxyTagManager( self )
+        # Dataset Collection Plugins
+        self.dataset_collections_service = DatasetCollectionManager(self)
+
+        # Tool Data Tables
+        self._configure_tool_data_tables( from_shed_config=False )
+        # Load dbkey / genome build manager
+        self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )
+
+        # Genomes
+        self.genomes = Genomes( self )
+        # Data providers registry.
+        self.data_provider_registry = DataProviderRegistry()
+
+        # Initialize job metrics manager, needs to be in place before
+        # config so per-destination modifications can be made.
+        self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self )
+
+        # Initialize the job management configuration
+        self.job_config = jobs.JobConfiguration(self)
+
+        self._configure_toolbox()
+
+        # Load Data Manager
+        self.data_managers = DataManagers( self )
+        # Load the update repository manager.
+        self.update_repository_manager = update_repository_manager.UpdateRepositoryManager( self )
+        # Load proprietary datatype converters and display applications.
+        self.installed_repository_manager.load_proprietary_converters_and_display_applications()
+        # Load datatype display applications defined in local datatypes_conf.xml
+        self.datatypes_registry.load_display_applications( self )
+        # Load datatype converters defined in local datatypes_conf.xml
+        self.datatypes_registry.load_datatype_converters( self.toolbox )
+        # Load external metadata tool
+        self.datatypes_registry.load_external_metadata_tool( self.toolbox )
+        # Load history import/export tools.
+        load_lib_tools( self.toolbox )
+        # visualizations registry: associates resources with visualizations, controls how to render
+        self.visualizations_registry = VisualizationsRegistry(
+            self,
+            directories_setting=self.config.visualization_plugins_directory,
+            template_cache_dir=self.config.template_cache )
+        # Tours registry
+        self.tour_registry = ToursRegistry(self.config.tour_config_dir)
+        # Webhooks registry
+        self.webhooks_registry = WebhooksRegistry(self.config.webhooks_dirs)
+        # Load security policy.
+        self.security_agent = self.model.security_agent
+        self.host_security_agent = galaxy.security.HostAgent(
+            model=self.security_agent.model,
+            permitted_actions=self.security_agent.permitted_actions )
+        # Load quota management.
+        if self.config.enable_quotas:
+            self.quota_agent = galaxy.quota.QuotaAgent( self.model )
+        else:
+            self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
+        # Heartbeat for thread profiling
+        self.heartbeat = None
+        # Container for OpenID authentication routines
+        if self.config.enable_openid:
+            from galaxy.web.framework import openid_manager
+            self.openid_manager = openid_manager.OpenIDManager( self.config.openid_consumer_cache_path )
+            self.openid_providers = OpenIDProviders.from_file( self.config.openid_config_file )
+        else:
+            self.openid_providers = OpenIDProviders()
+        from galaxy import auth
+        self.auth_manager = auth.AuthManager( self )
+        # Start the heartbeat process if configured and available (wait until
+        # postfork if using uWSGI)
+        if self.config.use_heartbeat:
+            if heartbeat.Heartbeat:
+                self.heartbeat = heartbeat.Heartbeat(
+                    self.config,
+                    period=self.config.heartbeat_interval,
+                    fname=self.config.heartbeat_log
+                )
+                self.heartbeat.daemon = True
+                register_postfork_function(self.heartbeat.start)
+        self.sentry_client = None
+        if self.config.sentry_dsn:
+
+            def postfork_sentry_client():
+                import raven
+                self.sentry_client = raven.Client(self.config.sentry_dsn)
+
+            register_postfork_function(postfork_sentry_client)
+
+        # Transfer manager client
+        if self.config.get_bool( 'enable_beta_job_managers', False ):
+            from galaxy.jobs import transfer_manager
+            self.transfer_manager = transfer_manager.TransferManager( self )
+        # Start the job manager
+        from galaxy.jobs import manager
+        self.job_manager = manager.JobManager( self )
+        self.job_manager.start()
+        # FIXME: These are exposed directly for backward compatibility
+        self.job_queue = self.job_manager.job_queue
+        self.job_stop_queue = self.job_manager.job_stop_queue
+        self.proxy_manager = ProxyManager( self.config )
+        # Initialize the external service types
+        self.external_service_types = external_service_types.ExternalServiceTypesCollection(
+            self.config.external_service_type_config_file,
+            self.config.external_service_type_path, self )
+
+        from galaxy.workflow import scheduling_manager
+        # Must be initialized after job_config.
+        self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self )
+
+        # Configure handling of signals
+        handlers = {}
+        if self.heartbeat:
+            handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler
+        self._configure_signal_handlers( handlers )
+
+        self.model.engine.dispose()
+        self.server_starttime = int(time.time())  # used for cachebusting
+
+    def shutdown( self ):
+        self.workflow_scheduling_manager.shutdown()
+        self.job_manager.shutdown()
+        self.object_store.shutdown()
+        if self.heartbeat:
+            self.heartbeat.shutdown()
+        self.update_repository_manager.shutdown()
+        try:
+            self.control_worker.shutdown()
+        except AttributeError:
+            # There is no control_worker
+            pass
+        try:
+            # If the datatypes registry was persisted, attempt to
+            # remove the temporary file in which it was written.
+            if self.datatypes_registry.integrated_datatypes_configs is not None:
+                os.unlink( self.datatypes_registry.integrated_datatypes_configs )
+        except:
+            pass
+
+    def configure_fluent_log( self ):
+        if self.config.fluent_log:
+            from galaxy.util.log.fluent_log import FluentTraceLogger
+            self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+        else:
+            self.trace_logger = None
+
+    def is_job_handler( self ):
+        return (self.config.track_jobs_in_database and self.job_config.is_handler(self.config.server_name)) or not self.config.track_jobs_in_database
diff --git a/lib/galaxy/auth/__init__.py b/lib/galaxy/auth/__init__.py
new file mode 100644
index 0000000..387a952
--- /dev/null
+++ b/lib/galaxy/auth/__init__.py
@@ -0,0 +1,174 @@
+"""
+Contains implementations of the authentication logic.
+"""
+
+import logging
+import xml.etree.ElementTree
+from collections import namedtuple
+
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.util import plugin_config, string_as_bool, string_as_bool_or_none
+
+log = logging.getLogger(__name__)
+
+
+class AuthManager(object):
+
+    def __init__(self, app):
+        self.__app = app
+        import galaxy.auth.providers
+        self.__plugins_dict = plugin_config.plugins_dict(galaxy.auth.providers, 'plugin_type' )
+        auth_config_file = app.config.auth_config_file
+        # parse XML
+        ct = xml.etree.ElementTree.parse(auth_config_file)
+        conf_root = ct.getroot()
+
+        authenticators = []
+        # process authenticators
+        for auth_elem in conf_root.getchildren():
+            type_elem = auth_elem.find('type')
+            plugin = self.__plugins_dict.get(type_elem.text)()
+
+            # check filterelem
+            filter_elem = auth_elem.find('filter')
+            if filter_elem is not None:
+                filter_template = str(filter_elem.text)
+            else:
+                filter_template = None
+
+            # extract options
+            options_elem = auth_elem.find('options')
+            options = {}
+            if options_elem is not None:
+                for opt in options_elem:
+                    options[opt.tag] = opt.text
+            authenticator = Authenticator(
+                plugin=plugin,
+                filter_template=filter_template,
+                options=options,
+            )
+            authenticators.append(authenticator)
+        self.authenticators = authenticators
+
+    def check_registration_allowed(self, email, username, password):
+        """Checks if the provided email/username is allowed to register."""
+        message = ''
+        status = 'done'
+        for provider, options in self.active_authenticators(email, username, password):
+            allow_reg = _get_tri_state(options, 'allow-register', True)
+            if allow_reg is None:  # i.e. challenge
+                auth_result, msg = provider.authenticate(email, username, password, options)
+                if auth_result is True:
+                    break
+                if auth_result is None:
+                    message = 'Invalid email address/username or password'
+                    status = 'error'
+                    break
+            elif allow_reg is True:
+                break
+            elif allow_reg is False:
+                message = 'Account registration not required for your account.  Please simply login.'
+                status = 'error'
+                break
+        return message, status
+
+    def check_auto_registration(self, trans, login, password):
+        """
+        Checks the username/email & password using auth providers in order.
+        If a match is found, returns the 'auto-register' option for that provider.
+        """
+        if '@' in login:
+            email = login
+            username = None
+        else:
+            email = None
+            username = login
+        for provider, options in self.active_authenticators(email, username, password):
+            if provider is None:
+                log.debug( "Unable to find module: %s" % options )
+            else:
+                auth_result, auto_email, auto_username = provider.authenticate(email, username, password, options)
+                auto_email = str(auto_email).lower()
+                auto_username = str(auto_username).lower()
+                if auth_result is True:
+                    # make username unique
+                    if validate_publicname( trans, auto_username ) != '':
+                        i = 1
+                        while i <= 10:  # stop after 10 tries
+                            if validate_publicname( trans, "%s-%i" % (auto_username, i) ) == '':
+                                auto_username = "%s-%i" % (auto_username, i)
+                                break
+                            i += 1
+                        else:
+                            break  # end for loop if we can't make a unique username
+                    log.debug( "Email: %s, auto-register with username: %s" % (auto_email, auto_username) )
+                    return (_get_bool(options, 'auto-register', False), auto_email, auto_username)
+                elif auth_result is None:
+                    log.debug( "Email: %s, Username %s, stopping due to failed non-continue" % (auto_email, auto_username) )
+                    break  # end authentication (skip rest)
+        return (False, '', '')
+
+    def check_password(self, user, password):
+        """Checks the username/email and password using auth providers."""
+        for provider, options in self.active_authenticators(user.email, user.username, password):
+            if provider is None:
+                log.debug( "Unable to find module: %s" % options )
+            else:
+                auth_result = provider.authenticate_user(user, password, options)
+                if auth_result is True:
+                    return True  # accept user
+                elif auth_result is None:
+                    break  # end authentication (skip rest)
+        return False
+
+    def check_change_password(self, user, current_password):
+        """Checks that auth provider allows password changes and current_password
+        matches.
+        """
+        for provider, options in self.active_authenticators(user.email, user.username, current_password):
+            if provider is None:
+                log.debug( "Unable to find module: %s" % options )
+            else:
+                auth_result = provider.authenticate_user(user, current_password, options)
+                if auth_result is True:
+                    if _get_bool(options, "allow-password-change", False):
+                        return (True, '')  # accept user
+                    else:
+                        return (False, 'Password change not supported')
+                elif auth_result is None:
+                    break  # end authentication (skip rest)
+        return (False, 'Invalid current password')
+
+    def active_authenticators(self, email, username, password):
+        """Yields AuthProvider instances for the provided configfile that match the
+        filters.
+        """
+        try:
+            for authenticator in self.authenticators:
+                filter_template = authenticator.filter_template
+                if filter_template:
+                    filter_str = filter_template.format(email=email, username=username, password=password)
+                    passed_filter = eval(filter_str, {"__builtins__": None}, {'str': str})
+                    if not passed_filter:
+                        continue  # skip to next
+                yield authenticator.plugin, authenticator.options
+        except Exception:
+            log.exception( "Active Authenticators Failure" )
+            raise
+
+
+Authenticator = namedtuple('Authenticator', ['plugin', 'filter_template', 'options'])
+
+
+def _get_bool(d, k, o):
+    if k in d:
+        return string_as_bool(d[k])
+    else:
+        return o
+
+
+def _get_tri_state(d, k, o):
+    if k in d:
+        return string_as_bool_or_none(d[k])
+    else:
+        return o
diff --git a/lib/galaxy/auth/providers/__init__.py b/lib/galaxy/auth/providers/__init__.py
new file mode 100644
index 0000000..054bb45
--- /dev/null
+++ b/lib/galaxy/auth/providers/__init__.py
@@ -0,0 +1,58 @@
+"""
+Created on 15/07/2014
+
+ at author: Andrew Robinson
+"""
+
+import abc
+
+
+class AuthProvider(object):
+    """A base class for all Auth Providers."""
+    __metaclass__ = abc.ABCMeta
+
+    @abc.abstractproperty
+    def plugin_type(self):
+        """ Short string providing labelling this plugin """
+
+    @abc.abstractmethod
+    def authenticate(self, email, username, password, options):
+        """
+        Check that the user credentials are correct.
+
+        NOTE: Used within auto-registration to check it is ok to register this
+        user.
+
+        :param  email: the user's email address
+        :type   email: str
+        :param  username: the user's username
+        :type   username: str
+        :param  password: the plain text password they typed
+        :type   password: str
+        :param  options: options provided in auth_config_file
+        :type   options: dict
+        :returns:   True: accept user, False: reject user and None: reject user
+            and don't try any other providers.  str, str is the email and
+            username to register with if accepting
+        :rtype:     (bool, str, str)
+        """
+
+    @abc.abstractmethod
+    def authenticate_user(self, user, password, options):
+        """
+        Same as authenticate() method, except an User object is provided instead
+        of a username.
+
+        NOTE: used on normal login to check authentication and update user
+        details if required.
+
+        :param  user: the user to authenticate
+        :type   user: galaxy.model.User
+        :param  password: the plain text password they typed
+        :type   password: str
+        :param  options: options provided in auth_config_file
+        :type   options: dict
+        :returns:   True: accept user, False: reject user and None: reject user
+            and don't try any other providers
+        :rtype:     bool
+        """
diff --git a/lib/galaxy/auth/providers/alwaysreject.py b/lib/galaxy/auth/providers/alwaysreject.py
new file mode 100644
index 0000000..706681e
--- /dev/null
+++ b/lib/galaxy/auth/providers/alwaysreject.py
@@ -0,0 +1,33 @@
+"""
+Created on 16/07/2014
+
+ at author: Andrew Robinson
+"""
+import logging
+
+from ..providers import AuthProvider
+
+log = logging.getLogger(__name__)
+
+
+class AlwaysReject(AuthProvider):
+    """A simple authenticator that just accepts users (does not care about their
+    password).
+    """
+    plugin_type = 'alwaysreject'
+
+    def authenticate(self, email, username, password, options):
+        """
+        See abstract method documentation.
+        """
+        return (None, '', '')
+
+    def authenticate_user(self, user, password, options):
+        """
+        See abstract method documentation.
+        """
+        log.debug("User: %s, ALWAYSREJECT: None" % (user.email))
+        return None
+
+
+__all__ = ('AlwaysReject', )
diff --git a/lib/galaxy/auth/providers/ldap_ad.py b/lib/galaxy/auth/providers/ldap_ad.py
new file mode 100644
index 0000000..69ffa9c
--- /dev/null
+++ b/lib/galaxy/auth/providers/ldap_ad.py
@@ -0,0 +1,202 @@
+"""
+Created on 15/07/2014
+
+ at author: Andrew Robinson
+"""
+
+import logging
+
+from galaxy.auth import _get_bool
+from galaxy.exceptions import ConfigurationError
+
+from ..providers import AuthProvider
+
+log = logging.getLogger(__name__)
+
+
+def _get_subs(d, k, params):
+    if k not in d:
+        raise ConfigurationError("Missing '%s' parameter in LDAP options" % k)
+    return str(d[k]).format(**params)
+
+
+def _parse_ldap_options(ldap, options_unparsed):
+    # Tag is defined in the XML but is empty
+    if not options_unparsed:
+        return []
+
+    if "=" not in options_unparsed:
+        log.error("LDAP authenticate: Invalid syntax in <ldap-options>. Syntax should be option1=value1,option2=value2")
+        return []
+
+    ldap_options = []
+
+    # Valid options must start with this prefix. See help(ldap)
+    prefix = "OPT_"
+
+    for opt in options_unparsed.split(","):
+        key, value = opt.split("=")
+
+        try:
+            pair = []
+            for n in (key, value):
+                if not n.startswith(prefix):
+                    raise ValueError
+
+                name = getattr(ldap, n)
+                pair.append(name)
+
+        except ValueError:
+            log.warning("LDAP authenticate: Invalid parameter pair %s=%s. '%s' doesn't start with prefix %s", key, value, n, prefix)
+            continue
+
+        except AttributeError:
+            log.warning("LDAP authenticate: Invalid parameter pair %s=%s. '%s' is not available in module ldap", key, value, n)
+            continue
+
+        else:
+            log.debug("LDAP authenticate: Valid LDAP option pair %s=%s -> %s=%s", key, value, *pair)
+            ldap_options.append(pair)
+
+    return ldap_options
+
+
+class LDAP(AuthProvider):
+
+    """
+    Attempts to authenticate users against an LDAP server.
+
+    If options include search-fields then it will attempt to search LDAP for
+    those fields first.  After that it will bind to LDAP with the username
+    (formatted as specified).
+    """
+    plugin_type = 'ldap'
+
+    def authenticate(self, email, username, password, options):
+        """
+        See abstract method documentation.
+        """
+        log.debug("LDAP authenticate: email is %s" % email)
+        log.debug("LDAP authenticate: username is %s" % username)
+        log.debug("LDAP authenticate: options are %s" % options)
+
+        failure_mode = False  # reject but continue
+        if options.get('continue-on-failure', 'False') == 'False':
+            failure_mode = None  # reject and do not continue
+
+        if _get_bool(options, 'login-use-username', False):
+            if username is None:
+                log.debug('LDAP authenticate: username must be used to login, cannot be None')
+                return (failure_mode, '', '')
+        else:
+            if email is None:
+                log.debug('LDAP authenticate: email must be used to login, cannot be None')
+                return (failure_mode, '', '')
+
+        try:
+            import ldap
+        except:
+            log.debug('LDAP authenticate: could not load ldap module')
+            return (failure_mode, '', '')
+
+        # do LDAP search (if required)
+        params = {'email': email, 'username': username, 'password': password}
+
+        try:
+            ldap_options_raw = _get_subs(options, 'ldap-options', params)
+        except ConfigurationError:
+            ldap_options = ()
+        else:
+            ldap_options = _parse_ldap_options(ldap, ldap_options_raw)
+
+        if 'search-fields' in options:
+            try:
+                # setup connection
+                ldap.set_option(ldap.OPT_REFERRALS, 0)
+
+                for opt in ldap_options:
+                    ldap.set_option(*opt)
+
+                l = ldap.initialize(_get_subs(options, 'server', params))
+                l.protocol_version = 3
+
+                if 'search-user' in options:
+                    l.simple_bind_s(_get_subs(options, 'search-user', params),
+                                    _get_subs(options, 'search-password', params))
+                else:
+                    l.simple_bind_s()
+
+                # setup search
+                attributes = [_.strip().format(**params)
+                              for _ in options['search-fields'].split(',')]
+                suser = l.search_ext_s(_get_subs(options, 'search-base', params),
+                    ldap.SCOPE_SUBTREE,
+                    _get_subs(options, 'search-filter', params), attributes,
+                    timeout=60, sizelimit=1)
+
+                # parse results
+                if suser is None or len(suser) == 0:
+                    log.warning('LDAP authenticate: search returned no results')
+                    return (failure_mode, '', '')
+                dn, attrs = suser[0]
+                log.debug(("LDAP authenticate: dn is %s" % dn))
+                log.debug(("LDAP authenticate: search attributes are %s" % attrs))
+                if hasattr(attrs, 'has_key'):
+                    for attr in attributes:
+                        if attr in attrs:
+                            params[attr] = str(attrs[attr][0])
+                        else:
+                            params[attr] = ""
+                params['dn'] = dn
+            except Exception:
+                log.exception('LDAP authenticate: search exception')
+                return (failure_mode, '', '')
+        # end search
+
+        # bind as user to check their credentials
+        try:
+            # setup connection
+            ldap.set_option(ldap.OPT_REFERRALS, 0)
+
+            for opt in ldap_options:
+                ldap.set_option(*opt)
+
+            l = ldap.initialize(_get_subs(options, 'server', params))
+            l.protocol_version = 3
+            bind_password = _get_subs(options, 'bind-password', params)
+            if not bind_password:
+                raise RuntimeError('LDAP authenticate: empty password')
+            l.simple_bind_s(_get_subs(
+                options, 'bind-user', params), bind_password)
+            try:
+                whoami = l.whoami_s()
+            except ldap.PROTOCOL_ERROR:
+                # The "Who am I?" extended operation is not supported by this LDAP server
+                pass
+            else:
+                log.debug("LDAP authenticate: whoami is %s", whoami)
+                if whoami is None:
+                    raise RuntimeError('LDAP authenticate: anonymous bind')
+        except Exception:
+            log.warning('LDAP authenticate: bind exception', exc_info=True)
+            return (failure_mode, '', '')
+
+        log.debug('LDAP authentication successful')
+        return (True,
+                _get_subs(options, 'auto-register-email', params),
+                _get_subs(options, 'auto-register-username', params))
+
+    def authenticate_user(self, user, password, options):
+        """
+        See abstract method documentation.
+        """
+        return self.authenticate(user.email, user.username, password, options)[0]
+
+
+class ActiveDirectory(LDAP):
+    """ Effectively just an alias for LDAP auth, but may contain active directory specific
+    logic in the future. """
+    plugin_type = 'activedirectory'
+
+
+__all__ = ('LDAP', 'ActiveDirectory')
diff --git a/lib/galaxy/auth/providers/localdb.py b/lib/galaxy/auth/providers/localdb.py
new file mode 100644
index 0000000..c853f84
--- /dev/null
+++ b/lib/galaxy/auth/providers/localdb.py
@@ -0,0 +1,32 @@
+"""
+Created on 16/07/2014
+
+ at author: Andrew Robinson
+"""
+import logging
+
+from ..providers import AuthProvider
+
+log = logging.getLogger(__name__)
+
+
+class LocalDB(AuthProvider):
+    """Authenticate users against the local Galaxy database (as per usual)."""
+    plugin_type = 'localdb'
+
+    def authenticate(self, email, username, password, options):
+        """
+        See abstract method documentation.
+        """
+        return (False, '', '')  # it can never auto-create based of localdb (chicken-egg)
+
+    def authenticate_user(self, user, password, options):
+        """
+        See abstract method documentation.
+        """
+        user_ok = user.check_password(password)
+        log.debug("User: %s, LOCALDB: %s" % (user.email, user_ok))
+        return user_ok
+
+
+__all__ = ('LocalDB', )
diff --git a/lib/galaxy/auth/providers/pam_auth.py b/lib/galaxy/auth/providers/pam_auth.py
new file mode 100644
index 0000000..8763a18
--- /dev/null
+++ b/lib/galaxy/auth/providers/pam_auth.py
@@ -0,0 +1,152 @@
+"""
+Created on 13/07/2015
+
+Author Peter van Heusden (pvh at sanbi.ac.za)
+"""
+import logging
+import shlex
+from subprocess import PIPE, Popen
+
+from galaxy.auth import _get_bool
+
+from ..providers import AuthProvider
+
+log = logging.getLogger(__name__)
+
+"""
+This module provides an AuthProvider for PAM (pluggable authentication module) authentication.
+PAM is the Pluggable Authentication Module system (http://www.linux-pam.org/)
+It relies on python-pam (https://pypi.python.org/pypi/python-pam)
+
+Configuration is via config/auth_conf.xml and the following options are supported:
+  - auto-register: True/False: automatically register an account for an unknown user. Default: False
+  - maildomain: string: all valid users fall within the specified mail domain. Default: None
+  - login-use-email: True/False: Parse the email address to get login details. Default: False
+  - login-use-username: True/False: Use the username argument for login details. Default: False
+                                    Technical note: when a user is not found in the database,
+                                    their username is the user part of a user at host email
+                                    address. After user creation, however, the username is
+                                    the user's public name.
+  - pam-service: string: The service name to use for PAM authentication. Default: galaxy
+  - use-external-helper: True/False: Run an external helper script as root with sudo to do
+                                     authentication. If False authentication is done
+                                     by the module directly. Default: False
+                                     Technical note: some PAM modules (e.g. pam_unix.so)
+                                     require to be run as root to authenticate users.
+  - authentication-helper-script: string: Absolute path to helper script to run for authentication. Default: None
+                                          There needs to be a config (in /etc/sudoers or /etc/sudoers.d)
+                                          that allows the galaxy user to run this as root with no password check
+                                          For example:
+galaxy	ALL=(root) NOPASSWD: /opt/galaxy/scripts/auth/pam_auth_helper.py
+
+
+Configuration example (for internal authentication, use email for user details):
+<authenticator>
+  <type>PAM</type>
+  <options>
+          <auto-register>True</auto-register>
+          <maildomain>example.com</maildomain>
+          <login-use-email>True</login-use-email>
+          <pam-service>ssh</pam-service>
+  </options>
+</authenticator>
+"""
+
+
+class PAM(AuthProvider):
+
+    plugin_type = 'PAM'
+
+    def authenticate(self, email, username, password, options):
+        pam_username = None
+        auto_register_username = None
+        auto_register_email = None
+        force_fail = False
+        log.debug("use username: {} use email {} email {} username {}".format(options.get('login-use-username'), options.get('login-use-email', False), email, username))
+        # check email based login first because if email exists in Galaxy DB
+        # we will be given the "public name" as username
+        if _get_bool(options, 'login-use-email', False) and email is not None:
+            if '@' in email:
+                (email_user, email_domain) = email.split('@')
+                pam_username = email_user
+                if email_domain == options.get('maildomain', None):
+                    auto_register_email = email
+                    if username is not None:
+                        auto_register_username = username
+                    else:
+                        auto_register_username = email_user
+                else:
+                    log.debug('PAM authenticate: warning: email does not match configured PAM maildomain')
+                    # no need to fail: if auto-register is not enabled, this
+                    # might still be a valid user
+            else:
+                log.debug('PAM authenticate: email must be used to login, but no valid email found')
+                force_fail = True
+        elif _get_bool(options, 'login-use-username', False):
+            # if we get here via authenticate_user then
+            # user will be "public name" and
+            # email address will be as per registered user
+            if username is not None:
+                pam_username = username
+                if email is not None:
+                    auto_register_email = email
+                elif options.get('maildomain', None) is not None:
+                    # we can register a user with this username and mail domain
+                    # if auto registration is enabled
+                    auto_register_email = '{}@{}'.format(username, options['maildomain'])
+                auto_register_username = username
+            else:
+                log.debug('PAM authenticate: username login selected but no username provided')
+                force_fail = True
+        else:
+            log.debug('PAM authenticate: could not find username for PAM')
+            force_fail = True
+
+        if force_fail:
+            return None, '', ''
+
+        pam_service = options.get('pam-service', 'galaxy')
+        use_helper = _get_bool(options, 'use-external-helper', False)
+        log.debug("PAM auth: will use external helper: {}".format(use_helper))
+        authenticated = False
+        if use_helper:
+            authentication_helper = options.get('authentication-helper-script', '/bin/false').strip()
+            log.debug("PAM auth: external helper script: {}".format(authentication_helper))
+            if not authentication_helper.startswith('/'):
+                # don't accept relative path
+                authenticated = False
+            else:
+                auth_cmd = shlex.split('/usr/bin/sudo -n {}'.format(authentication_helper))
+                log.debug("PAM auth: external helper cmd: {}".format(auth_cmd))
+                proc = Popen(auth_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
+                message = '{}\n{}\n{}\n'.format(pam_service, pam_username, password)
+                (output, error) = proc.communicate(message)
+                status = proc.wait()
+                if status != 0 and error != '':
+                    log.debug("PAM auth: external authentication script had errors: status {} error {}".format(status, error))
+                if output.strip() == 'True':
+                    authenticated = True
+                else:
+                    authenticated = False
+        else:
+            try:
+                import pam
+            except ImportError:
+                log.debug('PAM authenticate: could not load pam module, PAM authentication disabled')
+                return None, '', ''
+
+            p_auth = pam.pam()
+            authenticated = p_auth.authenticate(pam_username, password, service=pam_service)
+
+        if authenticated:
+            log.debug('PAM authentication successful for {}'.format(pam_username))
+            return True, auto_register_email, auto_register_username
+        else:
+            log.debug('PAM authentication failed for {}'.format(pam_username))
+            return False, '', ''
+
+    def authenticate_user(self, user, password, options):
+        return self.authenticate(user.email, user.username, password, options)[0]
+
+
+__all__ = ('PAM', )
diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py
new file mode 100644
index 0000000..49f49bd
--- /dev/null
+++ b/lib/galaxy/config.py
@@ -0,0 +1,982 @@
+"""
+Universe configuration builder.
+"""
+# absolute_import needed for tool_shed package.
+from __future__ import absolute_import
+
+import logging
+import logging.config
+import os
+import re
+import signal
+import socket
+import string
+import sys
+import tempfile
+import threading
+import time
+
+from datetime import timedelta
+
+from six import string_types
+from six.moves import configparser
+
+from galaxy.exceptions import ConfigurationError
+from galaxy.util import listify
+from galaxy.util import string_as_bool
+from galaxy.util.dbkeys import GenomeBuilds
+from galaxy.util.postfork import register_postfork_function
+from galaxy.web.formatting import expand_pretty_datetime_format
+from .version import VERSION_MAJOR
+
+log = logging.getLogger( __name__ )
+
+
+def resolve_path( path, root ):
+    """If 'path' is relative make absolute by prepending 'root'"""
+    if not os.path.isabs( path ):
+        path = os.path.join( root, path )
+    return path
+
+
+class Configuration( object ):
+    deprecated_options = ( 'database_file', )
+
+    def __init__( self, **kwargs ):
+        self.config_dict = kwargs
+        self.root = kwargs.get( 'root_dir', '.' )
+
+        # Resolve paths of other config files
+        self.__parse_config_file_options( kwargs )
+
+        # Collect the umask and primary gid from the environment
+        self.umask = os.umask( 0o77 )  # get the current umask
+        os.umask( self.umask )  # can't get w/o set, so set it back
+        self.gid = os.getgid()  # if running under newgrp(1) we'll need to fix the group of data created on the cluster
+
+        self.version_major = VERSION_MAJOR
+        # Database related configuration
+        self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
+        self.database_connection = kwargs.get( "database_connection", False )
+        self.database_engine_options = get_database_engine_options( kwargs )
+        self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
+        self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) )
+
+        # Don't set this to true for production databases, but probably should
+        # default to True for sqlite databases.
+        self.database_auto_migrate = string_as_bool( kwargs.get( "database_auto_migrate", "False" ) )
+
+        # Install database related configuration (if different).
+        self.install_database_connection = kwargs.get( "install_database_connection", None )
+        self.install_database_engine_options = get_database_engine_options( kwargs, model_prefix="install_" )
+
+        # Where dataset files are stored
+        self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
+        self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
+        tempfile.tempdir = self.new_file_path
+        self.openid_consumer_cache_path = resolve_path( kwargs.get( "openid_consumer_cache_path", "database/openid_consumer_cache" ), self.root )
+        self.cookie_path = kwargs.get( "cookie_path", "/" )
+        # Galaxy OpenID settings
+        self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
+        self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
+        self.enable_unique_workflow_defaults = string_as_bool( kwargs.get( 'enable_unique_workflow_defaults', False ) )
+        self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
+        self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
+        self.builds_file_path = resolve_path( kwargs.get( "builds_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'builds.txt') ), self.root )
+        self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'chrom') ), self.root )
+        # The value of migrated_tools_config is the file reserved for containing only those tools that have been eliminated from the distribution
+        # and moved to the tool shed.
+        self.integrated_tool_panel_config = resolve_path( kwargs.get( 'integrated_tool_panel_config', 'integrated_tool_panel.xml' ), self.root )
+        integrated_tool_panel_tracking_directory = kwargs.get( 'integrated_tool_panel_tracking_directory', None )
+        if integrated_tool_panel_tracking_directory:
+            self.integrated_tool_panel_tracking_directory = resolve_path( integrated_tool_panel_tracking_directory, self.root )
+        else:
+            self.integrated_tool_panel_tracking_directory = None
+        self.toolbox_filter_base_modules = listify( kwargs.get( "toolbox_filter_base_modules", "galaxy.tools.filters,galaxy.tools.toolbox.filters" ) )
+        self.tool_filters = listify( kwargs.get( "tool_filters", [] ), do_strip=True )
+        self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ), do_strip=True )
+        self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ), do_strip=True )
+
+        self.user_tool_filters = listify( kwargs.get( "user_tool_filters", [] ), do_strip=True )
+        self.user_label_filters = listify( kwargs.get( "user_tool_label_filters", [] ), do_strip=True )
+        self.user_section_filters = listify( kwargs.get( "user_tool_section_filters", [] ), do_strip=True )
+
+        self.tour_config_dir = resolve_path( kwargs.get("tour_config_dir", "config/plugins/tours"), self.root)
+        self.webhooks_dirs = resolve_path( kwargs.get("webhooks_dir", "config/plugins/webhooks"), self.root)
+
+        self.expose_user_name = kwargs.get( "expose_user_name", False )
+        self.expose_user_email = kwargs.get( "expose_user_email", False )
+        self.password_expiration_period = timedelta( days=int( kwargs.get( "password_expiration_period", 0 ) ) )
+
+        # Check for tools defined in the above non-shed tool configs (i.e., tool_conf.xml) tht have
+        # been migrated from the Galaxy code distribution to the Tool Shed.
+        self.check_migrate_tools = string_as_bool( kwargs.get( 'check_migrate_tools', True ) )
+        self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
+        self.x_frame_options = kwargs.get( "x_frame_options", "SAMEORIGIN" )
+        if self.shed_tool_data_path:
+            self.shed_tool_data_path = resolve_path( self.shed_tool_data_path, self.root )
+        else:
+            self.shed_tool_data_path = self.tool_data_path
+        self.manage_dependency_relationships = string_as_bool( kwargs.get( 'manage_dependency_relationships', False ) )
+        self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
+        self.hours_between_check = kwargs.get( 'hours_between_check', 12 )
+        self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
+        if isinstance( self.hours_between_check, string_types ):
+            self.hours_between_check = float( self.hours_between_check )
+        try:
+            if isinstance( self.hours_between_check, int ):
+                if self.hours_between_check < 1 or self.hours_between_check > 24:
+                    self.hours_between_check = 12
+            elif isinstance( self.hours_between_check, float ):
+                # If we're running functional tests, the minimum hours between check should be reduced to 0.001, or 3.6 seconds.
+                if self.running_functional_tests:
+                    if self.hours_between_check < 0.001 or self.hours_between_check > 24.0:
+                        self.hours_between_check = 12.0
+                else:
+                    if self.hours_between_check < 1.0 or self.hours_between_check > 24.0:
+                        self.hours_between_check = 12.0
+            else:
+                self.hours_between_check = 12
+        except:
+            self.hours_between_check = 12
+        self.update_integrated_tool_panel = kwargs.get( "update_integrated_tool_panel", True )
+        self.enable_data_manager_user_view = string_as_bool( kwargs.get( "enable_data_manager_user_view", "False" ) )
+        self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path', self.tool_data_path )
+        self.tool_secret = kwargs.get( "tool_secret", "" )
+        self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
+        self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
+        self.max_metadata_value_size = int( kwargs.get( "max_metadata_value_size", 5242880 ) )
+        self.single_user = kwargs.get( "single_user", None )
+        self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) ) or self.single_user
+        self.normalize_remote_user_email = string_as_bool( kwargs.get( "normalize_remote_user_email", "False" ) )
+        self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
+        self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
+        self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
+        self.remote_user_secret = kwargs.get( "remote_user_secret", None )
+        self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
+        self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
+        self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
+        self.allow_user_dataset_purge = string_as_bool( kwargs.get( "allow_user_dataset_purge", "True" ) )
+        self.allow_user_impersonation = string_as_bool( kwargs.get( "allow_user_impersonation", "False" ) )
+        self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) )
+        self.collect_outputs_from = [ x.strip() for x in kwargs.get( 'collect_outputs_from', 'new_file_path,job_working_directory' ).lower().split(',') ]
+        self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
+        self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root )
+        self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) )
+        self.cluster_job_queue_workers = int( kwargs.get( "cluster_job_queue_workers", "3" ) )
+        self.job_queue_cleanup_interval = int( kwargs.get("job_queue_cleanup_interval", "5") )
+        self.cluster_files_directory = os.path.abspath( kwargs.get( "cluster_files_directory", "database/pbs" ) )
+
+        # Fall back to legacy job_working_directory config variable if set.
+        default_jobs_directory = kwargs.get( "job_working_directory", "database/jobs_directory" )
+        self.jobs_directory = resolve_path( kwargs.get( "jobs_directory", default_jobs_directory ), self.root )
+        self.default_job_shell = kwargs.get( "default_job_shell", "/bin/bash" )
+        self.cleanup_job = kwargs.get( "cleanup_job", "always" )
+        self.container_image_cache_path = self.resolve_path( kwargs.get( "container_image_cache_path", "database/container_images" ) )
+        self.outputs_to_working_directory = string_as_bool( kwargs.get( 'outputs_to_working_directory', False ) )
+        self.output_size_limit = int( kwargs.get( 'output_size_limit', 0 ) )
+        self.retry_job_output_collection = int( kwargs.get( 'retry_job_output_collection', 0 ) )
+        self.check_job_script_integrity = string_as_bool( kwargs.get( "check_job_script_integrity", True ) )
+        self.job_walltime = kwargs.get( 'job_walltime', None )
+        self.job_walltime_delta = None
+        if self.job_walltime is not None:
+            h, m, s = [ int( v ) for v in self.job_walltime.split( ':' ) ]
+            self.job_walltime_delta = timedelta( 0, s, 0, 0, m, h )
+        self.admin_users = kwargs.get( "admin_users", "" )
+        self.admin_users_list = [u.strip() for u in self.admin_users.split(',') if u]
+        self.mailing_join_addr = kwargs.get('mailing_join_addr', 'galaxy-announce-join at bx.psu.edu')
+        self.error_email_to = kwargs.get( 'error_email_to', None )
+        # activation_email was used until release_15.03
+        activation_email = kwargs.get( 'activation_email', None )
+        self.email_from = kwargs.get( 'email_from', activation_email )
+        self.user_activation_on = string_as_bool( kwargs.get( 'user_activation_on', False ) )
+        self.activation_grace_period = int( kwargs.get( 'activation_grace_period', 3 ) )
+        default_inactivity_box_content = ( "Your account has not been activated yet. Feel free to browse around and see what's available, but"
+                                           " you won't be able to upload data or run jobs until you have verified your email address." )
+        self.inactivity_box_content = kwargs.get( 'inactivity_box_content', default_inactivity_box_content )
+        self.terms_url = kwargs.get( 'terms_url', None )
+        self.instance_resource_url = kwargs.get( 'instance_resource_url', None )
+        self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
+        self.ga_code = kwargs.get( 'ga_code', None )
+        self.session_duration = int(kwargs.get( 'session_duration', 0 ))
+        #  Get the disposable email domains blacklist file and its contents
+        self.blacklist_location = kwargs.get( 'blacklist_file', None )
+        self.blacklist_content = None
+        if self.blacklist_location is not None:
+            self.blacklist_file = resolve_path( kwargs.get( 'blacklist_file', None ), self.root )
+            try:
+                with open( self.blacklist_file ) as blacklist:
+                    self.blacklist_content = [ line.rstrip() for line in blacklist.readlines() ]
+            except IOError:
+                log.error( "CONFIGURATION ERROR: Can't open supplied blacklist file from path: " + str( self.blacklist_file ) )
+        self.smtp_server = kwargs.get( 'smtp_server', None )
+        self.smtp_username = kwargs.get( 'smtp_username', None )
+        self.smtp_password = kwargs.get( 'smtp_password', None )
+        self.smtp_ssl = kwargs.get( 'smtp_ssl', None )
+        self.track_jobs_in_database = string_as_bool( kwargs.get( 'track_jobs_in_database', 'True') )
+        self.start_job_runners = listify(kwargs.get( 'start_job_runners', '' ))
+        self.expose_dataset_path = string_as_bool( kwargs.get( 'expose_dataset_path', 'False' ) )
+        self.enable_communication_server = string_as_bool( kwargs.get( 'enable_communication_server', 'False' ) )
+        self.communication_server_host = kwargs.get( 'communication_server_host', 'http://localhost' )
+        self.communication_server_port = int( kwargs.get( 'communication_server_port', '7070' ) )
+        self.persistent_communication_rooms = listify( kwargs.get( "persistent_communication_rooms", [] ), do_strip=True )
+        # External Service types used in sample tracking
+        self.external_service_type_path = resolve_path( kwargs.get( 'external_service_type_path', 'external_service_types' ), self.root )
+        # Tasked job runner.
+        self.use_tasked_jobs = string_as_bool( kwargs.get( 'use_tasked_jobs', False ) )
+        self.local_task_queue_workers = int(kwargs.get("local_task_queue_workers", 2))
+        self.tool_submission_burst_threads = int( kwargs.get( 'tool_submission_burst_threads', '1' ) )
+        self.tool_submission_burst_at = int( kwargs.get( 'tool_submission_burst_at', '10' ) )
+
+        # Enable new interface for API installations from TS.
+        # Admin menu will list both if enabled.
+        self.enable_beta_ts_api_install = string_as_bool( kwargs.get( 'enable_beta_ts_api_install', 'False' ) )
+        # The transfer manager and deferred job queue
+        self.enable_beta_job_managers = string_as_bool( kwargs.get( 'enable_beta_job_managers', 'False' ) )
+        # These workflow modules should not be considered part of Galaxy's
+        # public API yet - the module state definitions may change and
+        # workflows built using these modules may not function in the
+        # future.
+        self.enable_beta_workflow_modules = string_as_bool( kwargs.get( 'enable_beta_workflow_modules', 'False' ) )
+        # These are not even beta - just experiments - don't use them unless
+        # you want yours tools to be broken in the future.
+        self.enable_beta_tool_formats = string_as_bool( kwargs.get( 'enable_beta_tool_formats', 'False' ) )
+
+        # Certain modules such as the pause module will automatically cause
+        # workflows to be scheduled in job handlers the way all workflows will
+        # be someday - the following two properties can also be used to force this
+        # behavior in under conditions - namely for workflows that have a minimum
+        # number of steps or that consume collections.
+        self.force_beta_workflow_scheduled_min_steps = int( kwargs.get( 'force_beta_workflow_scheduled_min_steps', '250' ) )
+        self.force_beta_workflow_scheduled_for_collections = string_as_bool( kwargs.get( 'force_beta_workflow_scheduled_for_collections', 'False' ) )
+
+        # Per-user Job concurrency limitations
+        self.cache_user_job_count = string_as_bool( kwargs.get( 'cache_user_job_count', False ) )
+        self.user_job_limit = int( kwargs.get( 'user_job_limit', 0 ) )
+        self.registered_user_job_limit = int( kwargs.get( 'registered_user_job_limit', self.user_job_limit ) )
+        self.anonymous_user_job_limit = int( kwargs.get( 'anonymous_user_job_limit', self.user_job_limit ) )
+        self.default_cluster_job_runner = kwargs.get( 'default_cluster_job_runner', 'local:///' )
+        self.pbs_application_server = kwargs.get('pbs_application_server', "" )
+        self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "" )
+        self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "" )
+        self.pbs_stage_path = kwargs.get('pbs_stage_path', "" )
+        self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None )
+        self.drmaa_external_killjob_script = kwargs.get('drmaa_external_killjob_script', None)
+        self.external_chown_script = kwargs.get('external_chown_script', None)
+        self.environment_setup_file = kwargs.get( 'environment_setup_file', None )
+        self.use_heartbeat = string_as_bool( kwargs.get( 'use_heartbeat', 'False' ) )
+        self.heartbeat_interval = int( kwargs.get( 'heartbeat_interval', 20 ) )
+        self.heartbeat_log = kwargs.get( 'heartbeat_log', None )
+        self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
+        self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) )
+        self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
+        self.sanitize_whitelist_file = resolve_path( kwargs.get( 'sanitize_whitelist_file', "config/sanitize_whitelist.txt" ), self.root )
+        self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
+        self.allowed_origin_hostnames = self._parse_allowed_origin_hostnames( kwargs )
+        self.trust_ipython_notebook_conversion = string_as_bool( kwargs.get( 'trust_ipython_notebook_conversion', False ) )
+        self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
+        self.brand = kwargs.get( 'brand', None )
+        self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
+        self.show_welcome_with_login = string_as_bool( kwargs.get( "show_welcome_with_login", "False" ) )
+        # Configuration for the message box directly below the masthead.
+        self.message_box_visible = string_as_bool( kwargs.get( 'message_box_visible', False ) )
+        self.message_box_content = kwargs.get( 'message_box_content', None )
+        self.message_box_class = kwargs.get( 'message_box_class', 'info' )
+        self.support_url = kwargs.get( 'support_url', 'https://wiki.galaxyproject.org/Support' )
+        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
+        self.blog_url = kwargs.get( 'blog_url', None )
+        self.screencasts_url = kwargs.get( 'screencasts_url', None )
+        self.library_import_dir = kwargs.get( 'library_import_dir', None )
+        self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None )
+        # Searching data libraries
+        self.enable_lucene_library_search = string_as_bool( kwargs.get( 'enable_lucene_library_search', False ) )
+        self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
+        self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
+        self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
+        self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' )  # attribute on user - email, username, id, etc...
+        self.ftp_upload_dir_template = kwargs.get( 'ftp_upload_dir_template', '${ftp_upload_dir}%s${ftp_upload_dir_identifier}' % os.path.sep )
+        self.ftp_upload_purge = string_as_bool(  kwargs.get( 'ftp_upload_purge', 'True' ) )
+        self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
+        self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
+        self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
+        self.watch_tools = kwargs.get( 'watch_tools', 'false' )
+        # On can mildly speed up Galaxy startup time by disabling index of help,
+        # not needed on production systems but useful if running many functional tests.
+        self.index_tool_help = string_as_bool( kwargs.get( "index_tool_help", True ) )
+        self.tool_name_boost = kwargs.get( "tool_name_boost", 9 )
+        self.tool_section_boost = kwargs.get( "tool_section_boost", 3 )
+        self.tool_description_boost = kwargs.get( "tool_description_boost", 2 )
+        self.tool_labels_boost = kwargs.get( "tool_labels_boost", 1 )
+        self.tool_stub_boost = kwargs.get( "tool_stub_boost", 5 )
+        self.tool_help_boost = kwargs.get( "tool_help_boost", 0.5 )
+        self.tool_search_limit = kwargs.get( "tool_search_limit", 20 )
+        # Location for tool dependencies.
+        # Location for tool dependencies.
+        tool_dependency_dir = kwargs.get( "tool_dependency_dir", "database/dependencies" )
+        if tool_dependency_dir.lower() == "none":
+            tool_dependency_dir = None
+
+        if tool_dependency_dir is not None:
+            self.tool_dependency_dir = resolve_path( tool_dependency_dir, self.root )
+            # Setting the following flag to true will ultimately cause tool dependencies
+            # to be located in the shell environment and used by the job that is executing
+            # the tool.
+            self.use_tool_dependencies = True
+        else:
+            self.tool_dependency_dir = None
+            self.use_tool_dependencies = os.path.exists(self.dependency_resolvers_config_file)
+        self.use_cached_dependency_manager = string_as_bool(kwargs.get("use_cached_dependency_manager", 'False'))
+        self.tool_dependency_cache_dir = kwargs.get( 'tool_dependency_cache_dir', os.path.join(self.tool_dependency_dir, '_cache'))
+
+        self.enable_beta_mulled_containers = string_as_bool( kwargs.get( 'enable_beta_mulled_containers', 'False' ) )
+        containers_resolvers_config_file = kwargs.get( 'containers_resolvers_config_file', None )
+        if containers_resolvers_config_file:
+            containers_resolvers_config_file = resolve_path(containers_resolvers_config_file, self.root)
+        self.containers_resolvers_config_file = containers_resolvers_config_file
+
+        involucro_path = kwargs.get('involucro_path', None)
+        if involucro_path is None:
+            involucro_path = os.path.join(tool_dependency_dir, "involucro")
+        self.involucro_path = resolve_path(involucro_path, self.root)
+        self.involucro_auto_init = string_as_bool(kwargs.get( 'involucro_auto_init', True))
+
+        # Configuration options for taking advantage of nginx features
+        self.upstream_gzip = string_as_bool( kwargs.get( 'upstream_gzip', False ) )
+        self.apache_xsendfile = string_as_bool( kwargs.get( 'apache_xsendfile', False ) )
+        self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
+        self.nginx_x_archive_files_base = kwargs.get( 'nginx_x_archive_files_base', False )
+        self.nginx_upload_store = kwargs.get( 'nginx_upload_store', False )
+        self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
+        self.nginx_upload_job_files_store = kwargs.get( 'nginx_upload_job_files_store', False )
+        self.nginx_upload_job_files_path = kwargs.get( 'nginx_upload_job_files_path', False )
+        if self.nginx_upload_store:
+            self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
+        self.object_store = kwargs.get( 'object_store', 'disk' )
+        self.object_store_check_old_style = string_as_bool( kwargs.get( 'object_store_check_old_style', False ) )
+        self.object_store_cache_path = resolve_path( kwargs.get( "object_store_cache_path", "database/object_store_cache" ), self.root )
+        # Handle AWS-specific config options for backward compatibility
+        if kwargs.get( 'aws_access_key', None) is not None:
+            self.os_access_key = kwargs.get( 'aws_access_key', None )
+            self.os_secret_key = kwargs.get( 'aws_secret_key', None )
+            self.os_bucket_name = kwargs.get( 's3_bucket', None )
+            self.os_use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
+        else:
+            self.os_access_key = kwargs.get( 'os_access_key', None )
+            self.os_secret_key = kwargs.get( 'os_secret_key', None )
+            self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
+            self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
+        self.os_host = kwargs.get( 'os_host', None )
+        self.os_port = kwargs.get( 'os_port', None )
+        self.os_is_secure = string_as_bool( kwargs.get( 'os_is_secure', True ) )
+        self.os_conn_path = kwargs.get( 'os_conn_path', '/' )
+        self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
+        self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
+        if self.distributed_object_store_config_file is not None:
+            self.distributed_object_store_config_file = resolve_path( self.distributed_object_store_config_file, self.root )
+        self.irods_root_collection_path = kwargs.get( 'irods_root_collection_path', None )
+        self.irods_default_resource = kwargs.get( 'irods_default_resource', None )
+        # Parse global_conf and save the parser
+        global_conf = kwargs.get( 'global_conf', None )
+        global_conf_parser = configparser.ConfigParser()
+        self.config_file = None
+        self.global_conf_parser = global_conf_parser
+        if global_conf and "__file__" in global_conf:
+            self.config_file = global_conf['__file__']
+            global_conf_parser.read(global_conf['__file__'])
+        # Heartbeat log file name override
+        if global_conf is not None and 'heartbeat_log' in global_conf:
+            self.heartbeat_log = global_conf['heartbeat_log']
+        if self.heartbeat_log is None:
+            self.heartbeat_log = 'heartbeat_{server_name}.log'
+        # Determine which 'server:' this is
+        self.server_name = 'main'
+        for arg in sys.argv:
+            # Crummy, but PasteScript does not give you a way to determine this
+            if arg.lower().startswith('--server-name='):
+                self.server_name = arg.split('=', 1)[-1]
+        # Allow explicit override of server name in confg params
+        if "server_name" in kwargs:
+            self.server_name = kwargs.get("server_name")
+        # Store all configured server names
+        self.server_names = []
+        for section in global_conf_parser.sections():
+            if section.startswith('server:'):
+                self.server_names.append(section.replace('server:', '', 1))
+
+        # Default URL (with schema http/https) of the Galaxy instance within the
+        # local network - used to remotely communicate with the Galaxy API.
+        web_port = kwargs.get("galaxy_infrastructure_web_port", None)
+        self.galaxy_infrastructure_web_port = web_port
+        galaxy_infrastructure_url = kwargs.get( 'galaxy_infrastructure_url', None )
+        galaxy_infrastructure_url_set = True
+        if galaxy_infrastructure_url is None:
+            # Still provide a default but indicate it was not explicitly set
+            # so dependending on the context a better default can be used (
+            # request url in a web thread, Docker parent in IE stuff, etc...)
+            galaxy_infrastructure_url = "http://localhost"
+            web_port = self.galaxy_infrastructure_web_port or self.guess_galaxy_port()
+            if web_port:
+                galaxy_infrastructure_url += ":%s" % (web_port)
+            galaxy_infrastructure_url_set = False
+        if "HOST_IP" in galaxy_infrastructure_url:
+            galaxy_infrastructure_url = string.Template(galaxy_infrastructure_url).safe_substitute({
+                'HOST_IP': socket.gethostbyname(socket.gethostname())
+            })
+        self.galaxy_infrastructure_url = galaxy_infrastructure_url
+        self.galaxy_infrastructure_url_set = galaxy_infrastructure_url_set
+
+        # Store advanced job management config
+        self.job_manager = kwargs.get('job_manager', self.server_name).strip()
+        self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
+        self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
+        # Store per-tool runner configs
+        self.tool_handlers = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_handlers', 'name' )
+        self.tool_runners = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_runners', 'url' )
+        # Galaxy messaging (AMQP) configuration options
+        self.amqp = {}
+        try:
+            amqp_config = global_conf_parser.items("galaxy_amqp")
+        except configparser.NoSectionError:
+            amqp_config = {}
+        for k, v in amqp_config:
+            self.amqp[k] = v
+        # Galaxy internal control queue configuration.
+        # If specified in universe, use it, otherwise we use whatever 'real'
+        # database is specified.  Lastly, we create and use new sqlite database
+        # (to minimize locking) as a final option.
+        if 'amqp_internal_connection' in kwargs:
+            self.amqp_internal_connection = kwargs.get('amqp_internal_connection')
+            # TODO Get extra amqp args as necessary for ssl
+        elif 'database_connection' in kwargs:
+            self.amqp_internal_connection = "sqlalchemy+" + self.database_connection
+        else:
+            self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path( "database/control.sqlite", self.root )
+        self.biostar_url = kwargs.get( 'biostar_url', None )
+        self.biostar_key_name = kwargs.get( 'biostar_key_name', None )
+        self.biostar_key = kwargs.get( 'biostar_key', None )
+        self.biostar_enable_bug_reports = string_as_bool( kwargs.get( 'biostar_enable_bug_reports', True ) )
+        self.biostar_never_authenticate = string_as_bool( kwargs.get( 'biostar_never_authenticate', False ) )
+        self.pretty_datetime_format = expand_pretty_datetime_format( kwargs.get( 'pretty_datetime_format', '$locale (UTC)' ) )
+        self.master_api_key = kwargs.get( 'master_api_key', None )
+        if self.master_api_key == "changethis":  # default in sample config file
+            raise ConfigurationError("Insecure configuration, please change master_api_key to something other than default (changethis)")
+
+        # Experimental: This will not be enabled by default and will hide
+        # nonproduction code.
+        # The api_folders refers to whether the API exposes the /folders section.
+        self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
+        # This is for testing new library browsing capabilities.
+        self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+        # Error logging with sentry
+        self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
+        # Statistics and profiling with statsd
+        self.statsd_host = kwargs.get( 'statsd_host', '')
+        self.statsd_port = int( kwargs.get( 'statsd_port', 8125 ) )
+        self.statsd_prefix = kwargs.get( 'statsd_prefix', 'galaxy' )
+        # Logging with fluentd
+        self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+        self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+        self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
+        # directory where the visualization registry searches for plugins
+        self.visualization_plugins_directory = kwargs.get(
+            'visualization_plugins_directory', 'config/plugins/visualizations' )
+        ie_dirs = kwargs.get( 'interactive_environment_plugins_directory', None )
+        self.gie_dirs = [d.strip() for d in (ie_dirs.split(",") if ie_dirs else [])]
+        if ie_dirs and not self.visualization_plugins_directory:
+            self.visualization_plugins_directory = ie_dirs
+        elif ie_dirs:
+            self.visualization_plugins_directory += ",%s" % ie_dirs
+
+        self.proxy_session_map = self.resolve_path( kwargs.get( "dynamic_proxy_session_map", "database/session_map.sqlite" ) )
+        self.manage_dynamic_proxy = string_as_bool( kwargs.get( "dynamic_proxy_manage", "True" ) )  # Set to false if being launched externally
+        self.dynamic_proxy_debug = string_as_bool( kwargs.get( "dynamic_proxy_debug", "False" ) )
+        self.dynamic_proxy_bind_port = int( kwargs.get( "dynamic_proxy_bind_port", "8800" ) )
+        self.dynamic_proxy_bind_ip = kwargs.get( "dynamic_proxy_bind_ip", "0.0.0.0" )
+        self.dynamic_proxy_external_proxy = string_as_bool( kwargs.get( "dynamic_proxy_external_proxy", "False" ) )
+        self.dynamic_proxy_prefix = kwargs.get( "dynamic_proxy_prefix", "gie_proxy" )
+
+        self.dynamic_proxy = kwargs.get( "dynamic_proxy", "node" )
+        self.dynamic_proxy_golang_noaccess = kwargs.get( "dynamic_proxy_golang_noaccess", 60 )
+        self.dynamic_proxy_golang_clean_interval = kwargs.get( "dynamic_proxy_golang_clean_interval", 10 )
+        self.dynamic_proxy_golang_docker_address = kwargs.get( "dynamic_proxy_golang_docker_address", "unix:///var/run/docker.sock" )
+        self.dynamic_proxy_golang_api_key = kwargs.get( "dynamic_proxy_golang_api_key", None )
+
+        # Default chunk size for chunkable datatypes -- 64k
+        self.display_chunk_size = int( kwargs.get( 'display_chunk_size', 65536) )
+
+        self.citation_cache_type = kwargs.get( "citation_cache_type", "file" )
+        self.citation_cache_data_dir = self.resolve_path( kwargs.get( "citation_cache_data_dir", "database/citations/data" ) )
+        self.citation_cache_lock_dir = self.resolve_path( kwargs.get( "citation_cache_lock_dir", "database/citations/locks" ) )
+
+    @property
+    def sentry_dsn_public( self ):
+        """
+        Sentry URL with private key removed for use in client side scripts,
+        sentry server will need to be configured to accept events
+        """
+        if self.sentry_dsn:
+            return re.sub( r"^([^:/?#]+:)?//(\w+):(\w+)", r"\1//\2", self.sentry_dsn )
+        else:
+            return None
+
+    def reload_sanitize_whitelist( self, explicit=True ):
+        self.sanitize_whitelist = []
+        try:
+            with open(self.sanitize_whitelist_file, 'rt') as f:
+                for line in f.readlines():
+                    if not line.startswith("#"):
+                        self.sanitize_whitelist.append(line.strip())
+        except IOError:
+            if explicit:
+                log.warning("Sanitize log file explicitly specified as '%s' but does not exist, continuing with no tools whitelisted.", self.sanitize_whitelist_file)
+
+    def __parse_config_file_options( self, kwargs ):
+        """
+        Backwards compatibility for config files moved to the config/ dir.
+        """
+        defaults = dict(
+            auth_config_file=[ 'config/auth_conf.xml', 'config/auth_conf.xml.sample' ],
+            data_manager_config_file=[ 'config/data_manager_conf.xml', 'data_manager_conf.xml', 'config/data_manager_conf.xml.sample' ],
+            datatypes_config_file=[ 'config/datatypes_conf.xml', 'datatypes_conf.xml', 'config/datatypes_conf.xml.sample' ],
+            external_service_type_config_file=[ 'config/external_service_types_conf.xml', 'external_service_types_conf.xml', 'config/external_service_types_conf.xml.sample' ],
+            job_config_file=[ 'config/job_conf.xml', 'job_conf.xml' ],
+            tool_destinations_config_file=[ 'config/tool_destinations.yml', 'config/tool_destinations.yml.sample' ],
+            job_metrics_config_file=[ 'config/job_metrics_conf.xml', 'job_metrics_conf.xml', 'config/job_metrics_conf.xml.sample' ],
+            dependency_resolvers_config_file=[ 'config/dependency_resolvers_conf.xml', 'dependency_resolvers_conf.xml' ],
+            job_resource_params_file=[ 'config/job_resource_params_conf.xml', 'job_resource_params_conf.xml' ],
+            migrated_tools_config=[ 'migrated_tools_conf.xml', 'config/migrated_tools_conf.xml' ],
+            object_store_config_file=[ 'config/object_store_conf.xml', 'object_store_conf.xml' ],
+            openid_config_file=[ 'config/openid_conf.xml', 'openid_conf.xml', 'config/openid_conf.xml.sample' ],
+            shed_data_manager_config_file=[ 'shed_data_manager_conf.xml', 'config/shed_data_manager_conf.xml' ],
+            shed_tool_data_table_config=[ 'shed_tool_data_table_conf.xml', 'config/shed_tool_data_table_conf.xml' ],
+            tool_sheds_config_file=[ 'config/tool_sheds_conf.xml', 'tool_sheds_conf.xml', 'config/tool_sheds_conf.xml.sample' ],
+            workflow_schedulers_config_file=['config/workflow_schedulers_conf.xml', 'config/workflow_schedulers_conf.xml.sample'],
+        )
+
+        listify_defaults = dict(
+            tool_data_table_config_path=[ 'config/tool_data_table_conf.xml', 'tool_data_table_conf.xml', 'config/tool_data_table_conf.xml.sample' ],
+            # rationale:
+            # [0]: user has explicitly created config/tool_conf.xml but did not
+            #      move their existing shed_tool_conf.xml, don't use
+            #      config/shed_tool_conf.xml, which is probably the empty
+            #      version copied from the sample, or else their shed tools
+            #      will disappear
+            # [1]: user has created config/tool_conf.xml and, having passed
+            #      [0], probably moved their shed_tool_conf.xml as well
+            # [2]: user has done nothing, use the old files
+            # [3]: fresh install
+            tool_config_file=[ 'config/tool_conf.xml,shed_tool_conf.xml',
+                               'config/tool_conf.xml,config/shed_tool_conf.xml',
+                               'tool_conf.xml,shed_tool_conf.xml',
+                               'config/tool_conf.xml.sample,config/shed_tool_conf.xml' ]
+        )
+
+        for var, defaults in defaults.items():
+            if kwargs.get( var, None ) is not None:
+                path = kwargs.get( var )
+            else:
+                for default in defaults:
+                    if os.path.exists( resolve_path( default, self.root ) ):
+                        path = default
+                        break
+                else:
+                    path = defaults[-1]
+            setattr( self, var, resolve_path( path, self.root ) )
+
+        for var, defaults in listify_defaults.items():
+            paths = []
+            if kwargs.get( var, None ) is not None:
+                paths = listify( kwargs.get( var ) )
+            else:
+                for default in defaults:
+                    for path in listify( default ):
+                        if not os.path.exists( resolve_path( path, self.root ) ):
+                            break
+                    else:
+                        paths = listify( default )
+                        break
+                else:
+                    paths = listify( defaults[-1] )
+            setattr( self, var, [ resolve_path( x, self.root ) for x in paths ] )
+
+        # Backwards compatibility for names used in too many places to fix
+        self.datatypes_config = self.datatypes_config_file
+        self.tool_configs = self.tool_config_file
+
+    def __read_tool_job_config( self, global_conf_parser, section, key ):
+        try:
+            tool_runners_config = global_conf_parser.items( section )
+
+            # Process config to group multiple configs for the same tool.
+            rval = {}
+            for entry in tool_runners_config:
+                tool_config, val = entry
+                tool = None
+                runner_dict = {}
+                if tool_config.find("[") != -1:
+                    # Found tool with additional params; put params in dict.
+                    tool, params = tool_config[:-1].split( "[" )
+                    param_dict = {}
+                    for param in params.split( "," ):
+                        name, value = param.split( "@" )
+                        param_dict[ name ] = value
+                    runner_dict[ 'params' ] = param_dict
+                else:
+                    tool = tool_config
+
+                # Add runner URL.
+                runner_dict[ key ] = val
+
+                # Create tool entry if necessary.
+                if tool not in rval:
+                    rval[ tool ] = []
+
+                # Add entry to runners.
+                rval[ tool ].append( runner_dict )
+
+            return rval
+        except configparser.NoSectionError:
+            return {}
+
+    def get( self, key, default ):
+        return self.config_dict.get( key, default )
+
+    def get_bool( self, key, default ):
+        if key in self.config_dict:
+            return string_as_bool( self.config_dict[key] )
+        else:
+            return default
+
+    def ensure_tempdir( self ):
+        self._ensure_directory( self.new_file_path )
+
+    def _ensure_directory( self, path ):
+        if path not in [ None, False ] and not os.path.isdir( path ):
+            try:
+                os.makedirs( path )
+            except Exception as e:
+                raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+
+    def check( self ):
+        paths_to_check = [ self.root, self.tool_path, self.tool_data_path, self.template_path ]
+        # Check that required directories exist
+        for path in paths_to_check:
+            if path not in [ None, False ] and not os.path.isdir( path ):
+                try:
+                    os.makedirs( path )
+                except Exception as e:
+                    raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+        # Create the directories that it makes sense to create
+        for path in (self.new_file_path, self.template_cache, self.ftp_upload_dir,
+                     self.library_import_dir, self.user_library_import_dir,
+                     self.nginx_upload_store, self.whoosh_index_dir,
+                     self.object_store_cache_path):
+            self._ensure_directory( path )
+        # Check that required files exist
+        tool_configs = self.tool_configs
+        if self.migrated_tools_config not in tool_configs:
+            tool_configs.append( self.migrated_tools_config )
+        for path in tool_configs:
+            if not os.path.exists( path ):
+                raise ConfigurationError("Tool config file not found: %s" % path )
+        for datatypes_config in listify( self.datatypes_config ):
+            if not os.path.isfile( datatypes_config ):
+                raise ConfigurationError("Datatypes config file not found: %s" % datatypes_config )
+        # Check for deprecated options.
+        for key in self.config_dict.keys():
+            if key in self.deprecated_options:
+                log.warning( "Config option '%s' is deprecated and will be removed in a future release.  Please consult the latest version of the sample configuration file." % key )
+
+    def is_admin_user( self, user ):
+        """
+        Determine if the provided user is listed in `admin_users`.
+
+        NOTE: This is temporary, admin users will likely be specified in the
+              database in the future.
+        """
+        admin_users = [ x.strip() for x in self.get( "admin_users", "" ).split( "," ) ]
+        return user is not None and user.email in admin_users
+
+    def resolve_path( self, path ):
+        """ Resolve a path relative to Galaxy's root.
+        """
+        return resolve_path( path, self.root )
+
+    def guess_galaxy_port(self):
+        # Code derived from IPython work ie.mako
+        config = configparser.SafeConfigParser({'port': '8080'})
+        if self.config_file:
+            config.read( self.config_file )
+
+        try:
+            port = config.getint('server:%s' % self.server_name, 'port')
+        except:
+            # uWSGI galaxy installations don't use paster and only speak uWSGI not http
+            port = None
+        return port
+
+    def _parse_allowed_origin_hostnames( self, kwargs ):
+        """
+        Parse a CSV list of strings/regexp of hostnames that should be allowed
+        to use CORS and will be sent the Access-Control-Allow-Origin header.
+        """
+        allowed_origin_hostnames = listify( kwargs.get( 'allowed_origin_hostnames', None ) )
+        if not allowed_origin_hostnames:
+            return None
+
+        def parse( string ):
+            # a string enclosed in fwd slashes will be parsed as a regexp: e.g. /<some val>/
+            if string[0] == '/' and string[-1] == '/':
+                string = string[1:-1]
+                return re.compile( string, flags=( re.UNICODE | re.LOCALE ) )
+            return string
+
+        return [ parse( v ) for v in allowed_origin_hostnames if v ]
+
+
+def get_database_engine_options( kwargs, model_prefix='' ):
+    """
+    Allow options for the SQLAlchemy database engine to be passed by using
+    the prefix "database_engine_option".
+    """
+    conversions = {
+        'convert_unicode': string_as_bool,
+        'pool_timeout': int,
+        'echo': string_as_bool,
+        'echo_pool': string_as_bool,
+        'pool_recycle': int,
+        'pool_size': int,
+        'max_overflow': int,
+        'pool_threadlocal': string_as_bool,
+        'server_side_cursors': string_as_bool
+    }
+    prefix = "%sdatabase_engine_option_" % model_prefix
+    prefix_len = len( prefix )
+    rval = {}
+    for key, value in kwargs.items():
+        if key.startswith( prefix ):
+            key = key[prefix_len:]
+            if key in conversions:
+                value = conversions[key](value)
+            rval[ key  ] = value
+    return rval
+
+
+def configure_logging( config ):
+    """
+    Allow some basic logging configuration to be read from ini file.
+    """
+    # Get root logger
+    root = logging.getLogger()
+    # PasteScript will have already configured the logger if the
+    # 'loggers' section was found in the config file, otherwise we do
+    # some simple setup using the 'log_*' values from the config.
+    paste_configures_logging = config.global_conf_parser.has_section( "loggers" )
+    auto_configure_logging = not paste_configures_logging and string_as_bool( config.get( "auto_configure_logging", "True" ) )
+    if auto_configure_logging:
+        format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" )
+        level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ]
+        destination = config.get( "log_destination", "stdout" )
+        log.info( "Logging at '%s' level to '%s'" % ( level, destination ) )
+        # Set level
+        root.setLevel( level )
+
+        disable_chatty_loggers = string_as_bool( config.get( "auto_configure_logging_disable_chatty", "True" ) )
+        if disable_chatty_loggers:
+            # Turn down paste httpserver logging
+            if level <= logging.DEBUG:
+                for chatty_logger in ["paste.httpserver.ThreadPool", "routes.middleware"]:
+                    logging.getLogger( chatty_logger ).setLevel( logging.WARN )
+
+        # Remove old handlers
+        for h in root.handlers[:]:
+            root.removeHandler(h)
+        # Create handler
+        if destination == "stdout":
+            handler = logging.StreamHandler( sys.stdout )
+        else:
+            handler = logging.FileHandler( destination )
+        # Create formatter
+        formatter = logging.Formatter( format )
+        # Hook everything up
+        handler.setFormatter( formatter )
+        root.addHandler( handler )
+    # If sentry is configured, also log to it
+    if config.sentry_dsn:
+        from raven.handlers.logging import SentryHandler
+        sentry_handler = SentryHandler( config.sentry_dsn )
+        sentry_handler.setLevel( logging.WARN )
+        register_postfork_function(root.addHandler, sentry_handler)
+
+
+class ConfiguresGalaxyMixin:
+    """ Shared code for configuring Galaxy-like app objects.
+    """
+
+    def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ):
+        self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style )
+
+    def wait_for_toolbox_reload(self, old_toolbox):
+        while True:
+            # Wait till toolbox reload has been triggered
+            # and make sure toolbox has finished reloading)
+            if self.toolbox.has_reloaded(old_toolbox):
+                break
+
+            time.sleep(1)
+
+    def reload_toolbox(self):
+        # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
+
+        tool_configs = self.config.tool_configs
+        if self.config.migrated_tools_config not in tool_configs:
+            tool_configs.append( self.config.migrated_tools_config )
+
+        from galaxy import tools
+        old_toolbox = self.toolbox
+        self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
+        self.reindex_tool_search()
+        if old_toolbox:
+            old_toolbox.shutdown()
+
+    def _configure_toolbox( self ):
+        from galaxy.managers.citations import CitationsManager
+        self.citations_manager = CitationsManager( self )
+
+        from galaxy.tools.toolbox.cache import ToolCache
+        from galaxy.tools.toolbox.lineages.tool_shed import ToolVersionCache
+        self.tool_cache = ToolCache()
+        self.tool_version_cache = ToolVersionCache(self)
+
+        self._toolbox_lock = threading.RLock()
+        self.toolbox = None
+        self.reload_toolbox()
+
+        from galaxy.tools.deps import containers
+        galaxy_root_dir = os.path.abspath(self.config.root)
+        file_path = os.path.abspath(getattr(self.config, "file_path"))
+        app_info = containers.AppInfo(
+            galaxy_root_dir=galaxy_root_dir,
+            default_file_path=file_path,
+            outputs_to_working_directory=self.config.outputs_to_working_directory,
+            container_image_cache_path=self.config.container_image_cache_path,
+            library_import_dir=self.config.library_import_dir,
+            enable_beta_mulled_containers=self.config.enable_beta_mulled_containers,
+            containers_resolvers_config_file=self.config.containers_resolvers_config_file,
+            involucro_path=self.config.involucro_path,
+            involucro_auto_init=self.config.involucro_auto_init,
+        )
+        self.container_finder = containers.ContainerFinder(app_info)
+
+    def reindex_tool_search( self, toolbox=None ):
+        # Call this when tools are added or removed.
+        import galaxy.tools.search
+        index_help = getattr( self.config, "index_tool_help", True )
+        if not toolbox:
+            toolbox = self.toolbox
+        self.toolbox_search = galaxy.tools.search.ToolBoxSearch( toolbox, index_help )
+
+    def _configure_tool_data_tables( self, from_shed_config ):
+        from galaxy.tools.data import ToolDataTableManager
+
+        # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
+        self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
+                                                      config_filename=self.config.tool_data_table_config_path )
+        # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
+        self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
+                                                     tool_data_path=self.tool_data_tables.tool_data_path,
+                                                     from_shed_config=from_shed_config )
+
+    def _configure_datatypes_registry( self, installed_repository_manager=None ):
+        from galaxy.datatypes import registry
+        # Create an empty datatypes registry.
+        self.datatypes_registry = registry.Registry()
+        if installed_repository_manager:
+            # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories.  We
+            # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
+            # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
+            # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
+            # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence.  If there is a conflict
+            # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
+            installed_repository_manager.load_proprietary_datatypes()
+        # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
+        datatypes_configs = self.config.datatypes_config
+        for datatypes_config in listify( datatypes_configs ):
+            # Setting override=False would make earlier files would take
+            # precedence - but then they wouldn't override tool shed
+            # datatypes.
+            self.datatypes_registry.load_datatypes( self.config.root, datatypes_config, override=True )
+
+    def _configure_object_store( self, **kwds ):
+        from galaxy.objectstore import build_object_store_from_config
+        self.object_store = build_object_store_from_config( self.config, **kwds )
+
+    def _configure_security( self ):
+        from galaxy.web import security
+        self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+
+    def _configure_tool_shed_registry( self ):
+        import tool_shed.tool_shed_registry
+
+        # Set up the tool sheds registry
+        if os.path.isfile( self.config.tool_sheds_config_file ):
+            self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config_file )
+        else:
+            self.tool_shed_registry = None
+
+    def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ):
+        """
+        Preconditions: object_store must be set on self.
+        """
+        if self.config.database_connection:
+            db_url = self.config.database_connection
+        else:
+            db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+        install_db_url = self.config.install_database_connection
+        # TODO: Consider more aggressive check here that this is not the same
+        # database file under the hood.
+        combined_install_database = not( install_db_url and install_db_url != db_url )
+        install_db_url = install_db_url or db_url
+
+        if check_migrate_databases:
+            # Initialize database / check for appropriate schema version.  # If this
+            # is a new installation, we'll restrict the tool migration messaging.
+            from galaxy.model.migrate.check import create_or_verify_database
+            create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self )
+            if not combined_install_database:
+                from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
+                tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
+
+        if check_migrate_tools:
+            # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
+            from tool_shed.galaxy_install.migrate.check import verify_tools
+            if combined_install_database:
+                install_database_options = self.config.database_engine_options
+            else:
+                install_database_options = self.config.install_database_engine_options
+            verify_tools( self, install_db_url, config_file, install_database_options )
+
+        from galaxy.model import mapping
+        self.model = mapping.init( self.config.file_path,
+                                   db_url,
+                                   self.config.database_engine_options,
+                                   map_install_models=combined_install_database,
+                                   database_query_profiling_proxy=self.config.database_query_profiling_proxy,
+                                   object_store=self.object_store,
+                                   trace_logger=getattr(self, "trace_logger", None),
+                                   use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) )
+
+        if combined_install_database:
+            log.info("Install database targetting Galaxy's database configuration.")
+            self.install_model = self.model
+        else:
+            from galaxy.model.tool_shed_install import mapping as install_mapping
+            install_db_url = self.config.install_database_connection
+            log.info("Install database using its own connection %s" % install_db_url)
+            install_db_engine_options = self.config.install_database_engine_options
+            self.install_model = install_mapping.init( install_db_url,
+                                                       install_db_engine_options )
+
+    def _configure_signal_handlers( self, handlers ):
+        for sig, handler in handlers.items():
+            signal.signal( sig, handler )
diff --git a/lib/galaxy/dataset_collections/__init__.py b/lib/galaxy/dataset_collections/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/dataset_collections/builder.py b/lib/galaxy/dataset_collections/builder.py
new file mode 100644
index 0000000..48c160c
--- /dev/null
+++ b/lib/galaxy/dataset_collections/builder.py
@@ -0,0 +1,90 @@
+from galaxy import model
+from galaxy.util.odict import odict
+
+
+def build_collection( type, dataset_instances ):
+    """
+    Build DatasetCollection with populated DatasetcollectionElement objects
+    corresponding to the supplied dataset instances or throw exception if
+    this is not a valid collection of the specified type.
+    """
+    dataset_collection = model.DatasetCollection( )
+    set_collection_elements( dataset_collection, type, dataset_instances )
+    return dataset_collection
+
+
+def set_collection_elements( dataset_collection, type, dataset_instances ):
+    element_index = 0
+    elements = []
+    for element in type.generate_elements( dataset_instances ):
+        element.element_index = element_index
+        element.collection = dataset_collection
+        elements.append( element )
+
+        element_index += 1
+
+    dataset_collection.elements = elements
+    return dataset_collection
+
+
+class CollectionBuilder(object):
+    """ Purely functional builder pattern for building a dataset collection. """
+
+    def __init__(self, collection_type_description):
+        self._collection_type_description = collection_type_description
+        self._current_elements = odict()
+
+    def get_level(self, identifier):
+        if not self._nested_collection:
+            message_template = "Cannot add nested collection to collection of type [%s]"
+            message = message_template % (self._collection_type_description)
+            raise AssertionError(message)
+        if identifier not in self._current_elements:
+            subcollection_builder = CollectionBuilder(
+                self._subcollection_type_description
+            )
+            self._current_elements[identifier] = subcollection_builder
+
+        return self._current_elements[identifier]
+
+    def add_dataset(self, identifier, dataset_instance):
+        self._current_elements[ identifier ] = dataset_instance
+
+    def build_elements(self):
+        elements = self._current_elements
+        if self._nested_collection:
+            new_elements = odict()
+            for identifier, element in elements.items():
+                new_elements[identifier] = element.build()
+            elements = new_elements
+        return elements
+
+    def build(self):
+        type_plugin = self._collection_type_description.rank_type_plugin()
+        collection = build_collection( type_plugin, self.build_elements() )
+        collection.collection_type = self._collection_type_description.collection_type
+        return collection
+
+    @property
+    def _subcollection_type_description(self):
+        return self._collection_type_description.subcollection_type_description()
+
+    @property
+    def _nested_collection(self):
+        return self._collection_type_description.has_subcollections()
+
+
+class BoundCollectionBuilder( CollectionBuilder ):
+    """ More stateful builder that is bound to a particular model object. """
+
+    def __init__( self, dataset_collection, collection_type_description ):
+        self.dataset_collection = dataset_collection
+        if dataset_collection.populated:
+            raise Exception("Cannot reset elements of an already populated dataset collection.")
+        super( BoundCollectionBuilder, self ).__init__( collection_type_description )
+
+    def populate( self ):
+        elements = self.build_elements()
+        type_plugin = self._collection_type_description.rank_type_plugin()
+        set_collection_elements( self.dataset_collection, type_plugin, elements )
+        self.dataset_collection.mark_as_populated()
diff --git a/lib/galaxy/dataset_collections/matching.py b/lib/galaxy/dataset_collections/matching.py
new file mode 100644
index 0000000..3398aec
--- /dev/null
+++ b/lib/galaxy/dataset_collections/matching.py
@@ -0,0 +1,85 @@
+from galaxy.util import bunch
+from galaxy import exceptions
+from .structure import get_structure, leaf
+
+CANNOT_MATCH_ERROR_MESSAGE = "Cannot match collection types."
+
+
+class CollectionsToMatch( object ):
+    """ Structure representing a set of collections that need to be matched up
+    when running tools (possibly workflows in the future as well).
+    """
+
+    def __init__( self ):
+        self.collections = {}
+
+    def add( self, input_name, hdca, subcollection_type=None, linked=True ):
+        self.collections[ input_name ] = bunch.Bunch(
+            hdca=hdca,
+            subcollection_type=subcollection_type,
+            linked=linked,
+        )
+
+    def has_collections( self ):
+        return len( self.collections ) > 0
+
+    def iteritems( self ):
+        return self.collections.iteritems()
+
+
+class MatchingCollections( object ):
+    """ Structure holding the result of matching a list of collections
+    together. This class being different than the class above and being
+    created in the dataset_collections_service layer may seem like
+    overkill but I suspect in the future plugins will be subtypable for
+    instance so matching collections will need to make heavy use of the
+    dataset collection type registry managed by the dataset collections
+    sevice - hence the complexity now.
+    """
+
+    def __init__( self ):
+        self.linked_structure = None
+        self.unlinked_structures = []
+        self.collections = {}
+
+    def __attempt_add_to_linked_match( self, input_name, hdca, collection_type_description, subcollection_type ):
+        structure = get_structure( hdca, collection_type_description, leaf_subcollection_type=subcollection_type )
+        if not self.linked_structure:
+            self.linked_structure = structure
+            self.collections[ input_name ] = hdca
+        else:
+            if not self.linked_structure.can_match( structure ):
+                raise exceptions.MessageException( CANNOT_MATCH_ERROR_MESSAGE )
+            self.collections[ input_name ] = hdca
+
+    def slice_collections( self ):
+        return self.linked_structure.walk_collections( self.collections )
+
+    @property
+    def structure( self ):
+        """Yield cross product of all unlinked datasets to linked dataset."""
+        effective_structure = leaf
+        for unlinked_structure in self.unlinked_structures:
+            effective_structure = effective_structure.multiply( unlinked_structure )
+        linked_structure = self.linked_structure or leaf
+        effective_structure = effective_structure.multiply( linked_structure )
+        return None if effective_structure.is_leaf else effective_structure
+
+    @staticmethod
+    def for_collections( collections_to_match, collection_type_descriptions ):
+        if not collections_to_match.has_collections():
+            return None
+
+        matching_collections = MatchingCollections()
+        for input_key, to_match in collections_to_match.iteritems():
+            hdca = to_match.hdca
+            collection_type_description = collection_type_descriptions.for_collection_type( hdca.collection.collection_type )
+            subcollection_type = to_match.subcollection_type
+
+            if to_match.linked:
+                matching_collections.__attempt_add_to_linked_match( input_key, hdca, collection_type_description, subcollection_type )
+            else:
+                structure = get_structure( hdca, collection_type_description, leaf_subcollection_type=subcollection_type )
+                matching_collections.unlinked_structures.append( structure )
+
+        return matching_collections
diff --git a/lib/galaxy/dataset_collections/registry.py b/lib/galaxy/dataset_collections/registry.py
new file mode 100644
index 0000000..d688d3a
--- /dev/null
+++ b/lib/galaxy/dataset_collections/registry.py
@@ -0,0 +1,24 @@
+from .types import list
+from .types import paired
+from galaxy import model
+
+PLUGIN_CLASSES = [list.ListDatasetCollectionType, paired.PairedDatasetCollectionType]
+
+
+class DatasetCollectionTypesRegistry(object):
+
+    def __init__( self, app ):
+        self.__plugins = dict( [ ( p.collection_type, p() ) for p in PLUGIN_CLASSES ] )
+
+    def get( self, plugin_type ):
+        return self.__plugins[ plugin_type ]
+
+    def prototype( self, plugin_type ):
+        plugin_type_object = self.get( plugin_type )
+        if not hasattr( plugin_type_object, 'prototype_elements' ):
+            raise Exception( "Cannot pre-determine structure for collection of type %s" % plugin_type )
+
+        dataset_collection = model.DatasetCollection()
+        elements = [ e for e in plugin_type_object.prototype_elements() ]
+        dataset_collection.elements = elements
+        return dataset_collection
diff --git a/lib/galaxy/dataset_collections/structure.py b/lib/galaxy/dataset_collections/structure.py
new file mode 100644
index 0000000..d2e7cc3
--- /dev/null
+++ b/lib/galaxy/dataset_collections/structure.py
@@ -0,0 +1,134 @@
+""" Module for reasoning about structure of and matching hierarchical collections of data.
+"""
+import logging
+log = logging.getLogger( __name__ )
+
+from .type_description import map_over_collection_type
+
+
+class Leaf( object ):
+
+    def __len__( self ):
+        return 1
+
+    @property
+    def is_leaf( self ):
+        return True
+
+    def clone( self ):
+        return self
+
+    def multiply( self, other_structure ):
+        return other_structure.clone()
+
+
+leaf = Leaf()
+
+
+class Tree( object ):
+
+    def __init__( self, children, collection_type_description ):
+        self.children = children
+        self.collection_type_description = collection_type_description
+
+    @staticmethod
+    def for_dataset_collection( dataset_collection, collection_type_description ):
+        children = []
+        for element in dataset_collection.elements:
+            if collection_type_description.has_subcollections():
+                child_collection = element.child_collection
+                subcollection_type_description = collection_type_description.subcollection_type_description()  # Type description of children
+                tree = Tree.for_dataset_collection( child_collection, collection_type_description=subcollection_type_description )
+                children.append( ( element.element_identifier, tree ) )
+            else:
+                children.append( ( element.element_identifier, leaf ) )
+        return Tree( children, collection_type_description )
+
+    def walk_collections( self, hdca_dict ):
+        return self._walk_collections( dict_map( lambda hdca: hdca.collection, hdca_dict ) )
+
+    def _walk_collections( self, collection_dict ):
+        for index, ( identifier, substructure ) in enumerate( self.children ):
+            def element( collection ):
+                return collection[ index ]
+
+            if substructure.is_leaf:
+                yield dict_map( element, collection_dict )
+            else:
+                sub_collections = dict_map( lambda collection: element( collection ).child_collection, collection_dict )
+                for element in substructure._walk_collections( sub_collections ):
+                    yield element
+
+    @property
+    def is_leaf( self ):
+        return False
+
+    def can_match( self, other_structure ):
+        if not self.collection_type_description.can_match_type( other_structure.collection_type_description ):
+            return False
+
+        if len( self.children ) != len( other_structure.children ):
+            return False
+
+        for my_child, other_child in zip( self.children, other_structure.children ):
+            # At least one is nested collection...
+            if my_child[ 1 ].is_leaf != other_child[ 1 ].is_leaf:
+                return False
+
+            if not my_child[ 1 ].is_leaf and not my_child[ 1 ].can_match( other_child[ 1 ]):
+                return False
+
+        return True
+
+    def __len__( self ):
+        return sum( [ len( c[ 1 ] ) for c in self.children ] )
+
+    def element_identifiers_for_outputs( self, trans, outputs ):
+        element_identifiers = []
+        elements_collection_type = None
+        for identifier, child in self.children:
+            if isinstance( child, Tree ):
+                child_identifiers = child.element_identifiers_for_outputs( trans, outputs[ 0:len( child ) ] )
+                child_identifiers[ "name" ] = identifier
+                element_identifiers.append( child_identifiers )
+                elements_collection_type = child_identifiers[ "collection_type" ]
+            else:
+                output_object = outputs[ 0 ]
+                element_identifiers.append( dict( name=identifier, __object__=output_object ) )
+                if hasattr( output_object, "collection_type" ):
+                    elements_collection_type = output_object.collection_type
+
+            outputs = outputs[ len( child ): ]
+
+        collection_type = map_over_collection_type( self.collection_type_description.rank_collection_type(), elements_collection_type )
+        return dict(
+            src="new_collection",
+            collection_type=collection_type,
+            element_identifiers=element_identifiers,
+        )
+
+    def multiply( self, other_structure ):
+        if other_structure.is_leaf:
+            return self.clone()
+
+        new_collection_type = self.collection_type_description.multiply( other_structure.collection_type_description )
+        new_children = []
+        for (identifier, structure) in self.children:
+            new_children.append( (identifier, structure.multiply( other_structure ) ) )
+
+        return Tree( new_children, new_collection_type )
+
+    def clone( self ):
+        cloned_children = [(_[0], _[1].clone()) for _ in self.children]
+        return Tree( cloned_children, self.collection_type_description )
+
+
+def dict_map( func, input_dict ):
+    return dict( [ ( k, func(v) ) for k, v in input_dict.items() ] )
+
+
+def get_structure( dataset_collection_instance, collection_type_description, leaf_subcollection_type=None ):
+    if leaf_subcollection_type:
+        collection_type_description = collection_type_description.effective_collection_type_description( leaf_subcollection_type )
+
+    return Tree.for_dataset_collection( dataset_collection_instance.collection, collection_type_description )
diff --git a/lib/galaxy/dataset_collections/subcollections.py b/lib/galaxy/dataset_collections/subcollections.py
new file mode 100644
index 0000000..6a18d1a
--- /dev/null
+++ b/lib/galaxy/dataset_collections/subcollections.py
@@ -0,0 +1,25 @@
+from galaxy import exceptions
+
+
+def split_dataset_collection_instance( dataset_collection_instance, collection_type ):
+    """ Split up collection into collection.
+    """
+    return _split_dataset_collection( dataset_collection_instance.collection, collection_type )
+
+
+def _split_dataset_collection( dataset_collection, collection_type ):
+    this_collection_type = dataset_collection.collection_type
+    if not this_collection_type.endswith( collection_type ) or this_collection_type == collection_type:
+        raise exceptions.MessageException( "Cannot split collection in desired fashion." )
+
+    split_elements = []
+    for element in dataset_collection.elements:
+        child_collection = element.child_collection
+        if child_collection is None:
+            raise exceptions.MessageException( "Cannot split collection in desired fashion." )
+        if child_collection.collection_type == collection_type:
+            split_elements.append( element )
+        else:
+            split_elements.extend( _split_dataset_collection( element.child_collection, element.child_collection.collection_type ) )
+
+    return split_elements
diff --git a/lib/galaxy/dataset_collections/type_description.py b/lib/galaxy/dataset_collections/type_description.py
new file mode 100644
index 0000000..84d2636
--- /dev/null
+++ b/lib/galaxy/dataset_collections/type_description.py
@@ -0,0 +1,127 @@
+
+
+class CollectionTypeDescriptionFactory( object ):
+
+    def __init__( self, type_registry ):
+        # taking in type_registry though not using it, because we will someday
+        # I think.
+        self.type_registry = type_registry
+
+    def for_collection_type( self, collection_type ):
+        return CollectionTypeDescription( collection_type, self )
+
+
+class CollectionTypeDescription( object ):
+    """ Abstraction over dataset collection type that ties together string
+    reprentation in database/model with type registry.
+
+    >>> factory = CollectionTypeDescriptionFactory( None )
+    >>> nested_type_description = factory.for_collection_type( "list:paired" )
+    >>> paired_type_description = factory.for_collection_type( "paired" )
+    >>> nested_type_description.has_subcollections_of_type( "list" )
+    False
+    >>> nested_type_description.has_subcollections_of_type( "list:paired" )
+    False
+    >>> nested_type_description.has_subcollections_of_type( "paired" )
+    True
+    >>> nested_type_description.has_subcollections_of_type( paired_type_description )
+    True
+    >>> nested_type_description.has_subcollections( )
+    True
+    >>> paired_type_description.has_subcollections( )
+    False
+    >>> paired_type_description.rank_collection_type()
+    'paired'
+    >>> nested_type_description.rank_collection_type()
+    'list'
+    >>> nested_type_description.effective_collection_type( paired_type_description )
+    'list'
+    >>> nested_type_description.effective_collection_type_description( paired_type_description ).collection_type
+    'list'
+    """
+
+    def __init__( self, collection_type, collection_type_description_factory ):
+        self.collection_type = collection_type
+        self.collection_type_description_factory = collection_type_description_factory
+        self.__has_subcollections = self.collection_type.find( ":" ) > 0
+
+    def effective_collection_type_description( self, subcollection_type ):
+        effective_collection_type = self.effective_collection_type( subcollection_type )
+        return self.collection_type_description_factory.for_collection_type( effective_collection_type )
+
+    def effective_collection_type( self, subcollection_type ):
+        if hasattr( subcollection_type, 'collection_type' ):
+            subcollection_type = subcollection_type.collection_type
+
+        if not self.has_subcollections_of_type( subcollection_type ):
+            raise ValueError( "Cannot compute effective subcollection type of %s over %s" % ( subcollection_type, self ) )
+
+        return self.collection_type[ :-( len( subcollection_type ) + 1 ) ]
+
+    def has_subcollections_of_type( self, other_collection_type ):
+        """ Take in another type (either flat string or another
+        CollectionTypeDescription) and determine if this collection contains
+        subcollections matching that type.
+
+        The way this is used in map/reduce it seems to make the most sense
+        for this to return True if these subtypes are proper (i.e. a type
+        is not considered to have subcollections of its own type).
+        """
+        if hasattr( other_collection_type, 'collection_type' ):
+            other_collection_type = other_collection_type.collection_type
+        collection_type = self.collection_type
+        return collection_type.endswith( other_collection_type ) and collection_type != other_collection_type
+
+    def is_subcollection_of_type( self, other_collection_type ):
+        if not hasattr( other_collection_type, 'collection_type' ):
+            other_collection_type = self.collection_type_description_factory.for_collection_type( other_collection_type )
+        return other_collection_type.has_subcollections_of_type( self )
+
+    def can_match_type( self, other_collection_type ):
+        if hasattr( other_collection_type, 'collection_type' ):
+            other_collection_type = other_collection_type.collection_type
+        collection_type = self.collection_type
+        return other_collection_type == collection_type
+
+    def subcollection_type_description( self ):
+        if not self.__has_subcollections:
+            raise ValueError( "Cannot generate subcollection type description for flat type %s" % self.collection_type )
+        subcollection_type = self.collection_type.split( ":", 1 )[ 1 ]
+        return self.collection_type_description_factory.for_collection_type( subcollection_type )
+
+    def has_subcollections( self ):
+        return self.__has_subcollections
+
+    def rank_collection_type( self ):
+        """ Return the top-level collection type corresponding to this
+        collection type. For instance the "rank" type of a list of paired
+        data ("list:paired") is "list".
+        """
+        return self.collection_type.split( ":" )[ 0 ]
+
+    def rank_type_plugin( self ):
+        return self.collection_type_description_factory.type_registry.get( self.rank_collection_type() )
+
+    @property
+    def dimension( self ):
+        return len(self.collection_type.split(":")) + 1
+
+    def multiply( self, other_collection_type ):
+        collection_type = map_over_collection_type( self, other_collection_type )
+        return self.collection_type_description_factory.for_collection_type( collection_type )
+
+    def __str__( self ):
+        return "CollectionTypeDescription[%s]" % self.collection_type
+
+
+def map_over_collection_type( mapped_over_collection_type, target_collection_type ):
+    if hasattr( mapped_over_collection_type, 'collection_type' ):
+        mapped_over_collection_type = mapped_over_collection_type.collection_type
+
+    if not target_collection_type:
+        return mapped_over_collection_type
+    else:
+        if hasattr( target_collection_type, 'collection_type' ):
+            target_collection_type = target_collection_type.collection_type
+
+        return "%s:%s" % (mapped_over_collection_type, target_collection_type)
diff --git a/lib/galaxy/dataset_collections/types/__init__.py b/lib/galaxy/dataset_collections/types/__init__.py
new file mode 100644
index 0000000..239b070
--- /dev/null
+++ b/lib/galaxy/dataset_collections/types/__init__.py
@@ -0,0 +1,23 @@
+from galaxy import exceptions
+from abc import ABCMeta
+from abc import abstractmethod
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class DatasetCollectionType(object):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def generate_elements( self, dataset_instances ):
+        """ Generate DatasetCollectionElements with corresponding
+        to the supplied dataset instances or throw exception if
+        this is not a valid collection of the specified type.
+        """
+
+
+class BaseDatasetCollectionType( DatasetCollectionType ):
+
+    def _validation_failed( self, message ):
+        raise exceptions.ObjectAttributeInvalidException( message )
diff --git a/lib/galaxy/dataset_collections/types/list.py b/lib/galaxy/dataset_collections/types/list.py
new file mode 100644
index 0000000..37dbee2
--- /dev/null
+++ b/lib/galaxy/dataset_collections/types/list.py
@@ -0,0 +1,20 @@
+from ..types import BaseDatasetCollectionType
+
+from galaxy.model import DatasetCollectionElement
+
+
+class ListDatasetCollectionType( BaseDatasetCollectionType ):
+    """ A flat list of named elements.
+    """
+    collection_type = "list"
+
+    def __init__( self ):
+        pass
+
+    def generate_elements( self, elements ):
+        for identifier, element in elements.iteritems():
+            association = DatasetCollectionElement(
+                element=element,
+                element_identifier=identifier,
+            )
+            yield association
diff --git a/lib/galaxy/dataset_collections/types/paired.py b/lib/galaxy/dataset_collections/types/paired.py
new file mode 100644
index 0000000..378e3ec
--- /dev/null
+++ b/lib/galaxy/dataset_collections/types/paired.py
@@ -0,0 +1,46 @@
+from ..types import BaseDatasetCollectionType
+
+from galaxy.model import DatasetCollectionElement, HistoryDatasetAssociation
+
+FORWARD_IDENTIFIER = "forward"
+REVERSE_IDENTIFIER = "reverse"
+
+INVALID_IDENTIFIERS_MESSAGE = "Paired instance must define '%s' and '%s' datasets ." % ( FORWARD_IDENTIFIER, REVERSE_IDENTIFIER )
+
+
+class PairedDatasetCollectionType( BaseDatasetCollectionType ):
+    """
+    Paired (left/right) datasets.
+    """
+    collection_type = "paired"
+
+    def __init__( self ):
+        pass
+
+    def generate_elements( self, elements ):
+        forward_dataset = elements.get( FORWARD_IDENTIFIER, None )
+        reverse_dataset = elements.get( REVERSE_IDENTIFIER, None )
+        if not forward_dataset or not reverse_dataset:
+            self._validation_failed( INVALID_IDENTIFIERS_MESSAGE )
+        left_association = DatasetCollectionElement(
+            element=forward_dataset,
+            element_identifier=FORWARD_IDENTIFIER,
+        )
+        right_association = DatasetCollectionElement(
+            element=reverse_dataset,
+            element_identifier=REVERSE_IDENTIFIER,
+        )
+        yield left_association
+        yield right_association
+
+    def prototype_elements( self ):
+        left_association = DatasetCollectionElement(
+            element=HistoryDatasetAssociation(),
+            element_identifier=FORWARD_IDENTIFIER,
+        )
+        right_association = DatasetCollectionElement(
+            element=HistoryDatasetAssociation(),
+            element_identifier=REVERSE_IDENTIFIER,
+        )
+        yield left_association
+        yield right_association
diff --git a/lib/galaxy/datatypes/__init__.py b/lib/galaxy/datatypes/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/datatypes/assembly.py b/lib/galaxy/datatypes/assembly.py
new file mode 100644
index 0000000..015d5f8
--- /dev/null
+++ b/lib/galaxy/datatypes/assembly.py
@@ -0,0 +1,235 @@
+"""
+velvet datatypes
+James E Johnson - University of Minnesota
+for velvet assembler tool in galaxy
+"""
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+import sys
+
+from galaxy.datatypes import data
+from galaxy.datatypes import sequence
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.text import Html
+
+log = logging.getLogger(__name__)
+
+
+class Amos( data.Text ):
+    """Class describing the AMOS assembly file """
+    edam_data = "data_0925"
+    edam_format = "format_3582"
+    file_ext = 'afg'
+
+    def sniff( self, filename ):
+        # FIXME: this method will read the entire file.
+        # It should call get_headers() like other sniff methods.
+        """
+        Determines whether the file is an amos assembly file format
+        Example::
+
+          {CTG
+          iid:1
+          eid:1
+          seq:
+          CCTCTCCTGTAGAGTTCAACCGA-GCCGGTAGAGTTTTATCA
+          .
+          qlt:
+          DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD
+          .
+          {TLE
+          src:1027
+          off:0
+          clr:618,0
+          gap:
+          250 612
+          .
+          }
+          }
+        """
+        isAmos = False
+        try:
+            fh = open( filename )
+            while not isAmos:
+                line = fh.readline()
+                if not line:
+                    break  # EOF
+                line = line.strip()
+                if line:  # first non-empty line
+                    if line.startswith( '{' ):
+                        if re.match(r'{(RED|CTG|TLE)$', line):
+                            isAmos = True
+            fh.close()
+        except:
+            pass
+        return isAmos
+
+
+class Sequences( sequence.Fasta ):
+    """Class describing the Sequences file generated by velveth """
+    edam_data = "data_0925"
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is a velveth produced  fasta format
+        The id line has 3 fields separated by tabs: sequence_name  sequence_index category::
+
+          >SEQUENCE_0_length_35   1       1
+          GGATATAGGGCCAACCCAACTCAACGGCCTGTCTT
+          >SEQUENCE_1_length_35   2       1
+          CGACGAATGACAGGTCACGAATTTGGCGGGGATTA
+        """
+
+        try:
+            fh = open( filename )
+            while True:
+                line = fh.readline()
+                if not line:
+                    break  # EOF
+                line = line.strip()
+                if line:  # first non-empty line
+                    if line.startswith( '>' ):
+                        if not re.match(r'>[^\t]+\t\d+\t\d+$', line):
+                            break
+                        # The next line.strip() must not be '', nor startwith '>'
+                        line = fh.readline().strip()
+                        if line == '' or line.startswith( '>' ):
+                            break
+                        return True
+                    else:
+                        break  # we found a non-empty line, but it's not a fasta header
+            fh.close()
+        except:
+            pass
+        return False
+
+
+class Roadmaps( data.Text ):
+    """Class describing the Sequences file generated by velveth """
+    edam_format = "format_2561"
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is a velveth produced RoadMap::
+          142858  21      1
+          ROADMAP 1
+          ROADMAP 2
+          ...
+        """
+
+        try:
+            fh = open( filename )
+            while True:
+                line = fh.readline()
+                if not line:
+                    break  # EOF
+                line = line.strip()
+                if line:  # first non-empty line
+                    if not re.match(r'\d+\t\d+\t\d+$', line):
+                        break
+                    # The next line.strip() should be 'ROADMAP 1'
+                    line = fh.readline().strip()
+                    if not re.match(r'ROADMAP \d+$', line):
+                        break
+                    return True
+                else:
+                    break  # we found a non-empty line, but it's not a fasta header
+            fh.close()
+        except:
+            pass
+        return False
+
+
+class Velvet( Html ):
+    MetadataElement( name="base_name", desc="base name for velveth dataset", default="velvet", readonly=True, set_in_upload=True)
+    MetadataElement( name="paired_end_reads", desc="has paired-end reads", default="False", readonly=False, set_in_upload=True)
+    MetadataElement( name="long_reads", desc="has long reads", default="False", readonly=False, set_in_upload=True)
+    MetadataElement( name="short2_reads", desc="has 2nd short reads", default="False", readonly=False, set_in_upload=True)
+    composite_type = 'auto_primary_file'
+    allow_datatype_change = False
+    file_ext = 'velvet'
+
+    def __init__( self, **kwd ):
+        Html.__init__( self, **kwd )
+        self.add_composite_file( 'Sequences', mimetype='text/html', description='Sequences', substitute_name_with_metadata=None, is_binary=False )
+        self.add_composite_file( 'Roadmaps', mimetype='text/html', description='Roadmaps', substitute_name_with_metadata=None, is_binary=False )
+        self.add_composite_file( 'Log', mimetype='text/html', description='Log', optional='True', substitute_name_with_metadata=None, is_binary=False )
+
+    def generate_primary_file( self, dataset=None ):
+        log.debug( "Velvet log info  %s %s" % ('JJ generate_primary_file', dataset))
+        rval = ['<html><head><title>Velvet Galaxy Composite Dataset </title></head><p/>']
+        rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
+        for composite_name, composite_file in self.get_composite_files( dataset=dataset ).items():
+            fn = composite_name
+            log.debug( "Velvet log info  %s %s %s" % ('JJ generate_primary_file', fn, composite_file))
+            opt_text = ''
+            if composite_file.optional:
+                opt_text = ' (optional)'
+            if composite_file.get('description'):
+                rval.append( '<li><a href="%s" type="text/plain">%s (%s)</a>%s</li>' % ( fn, fn, composite_file.get('description'), opt_text ) )
+            else:
+                rval.append( '<li><a href="%s" type="text/plain">%s</a>%s</li>' % ( fn, fn, opt_text ) )
+        rval.append( '</ul></div></html>' )
+        return "\n".join( rval )
+
+    def regenerate_primary_file(self, dataset):
+        """
+        cannot do this until we are setting metadata
+        """
+        log.debug( "Velvet log info  %s" % 'JJ regenerate_primary_file')
+        gen_msg = ''
+        try:
+            efp = dataset.extra_files_path
+            log_path = os.path.join(efp, 'Log')
+            f = open(log_path, 'r')
+            log_content = f.read(1000)
+            f.close()
+            log_msg = re.sub('/\S*/', '', log_content)
+            log.debug( "Velveth log info  %s" % log_msg)
+            paired_end_reads = re.search('-(short|long)Paired', log_msg) is not None
+            dataset.metadata.paired_end_reads = paired_end_reads
+            long_reads = re.search('-long', log_msg) is not None
+            dataset.metadata.long_reads = long_reads
+            short2_reads = re.search('-short(Paired)?2', log_msg) is not None
+            dataset.metadata.short2_reads = short2_reads
+            dataset.info = re.sub('.*velveth \S+', 'hash_length', re.sub('\n', ' ', log_msg))
+            if paired_end_reads:
+                gen_msg = gen_msg + ' Paired-End Reads'
+            if long_reads:
+                gen_msg = gen_msg + ' Long Reads'
+            if len(gen_msg) > 0:
+                gen_msg = 'Uses: ' + gen_msg
+        except:
+            log.debug( "Velveth could not read Log file in %s" % efp)
+        log.debug( "Velveth log info  %s" % gen_msg)
+        rval = ['<html><head><title>Velvet Galaxy Composite Dataset </title></head><p/>']
+        # rval.append('<div>Generated:<p/><code> %s </code></div>' %(re.sub('\n','<br>',log_msg)))
+        rval.append('<div>Generated:<p/> %s </div>' % (gen_msg))
+        rval.append('<div>Velveth dataset:<p/><ul>')
+        for composite_name, composite_file in self.get_composite_files( dataset=dataset ).items():
+            fn = composite_name
+            log.debug( "Velvet log info  %s %s %s" % ('JJ regenerate_primary_file', fn, composite_file))
+            if re.search('Log', fn) is None:
+                opt_text = ''
+                if composite_file.optional:
+                    opt_text = ' (optional)'
+                if composite_file.get('description'):
+                    rval.append( '<li><a href="%s" type="text/plain">%s (%s)</a>%s</li>' % ( fn, fn, composite_file.get('description'), opt_text ) )
+                else:
+                    rval.append( '<li><a href="%s" type="text/plain">%s</a>%s</li>' % ( fn, fn, opt_text ) )
+        rval.append( '</ul></div></html>' )
+        with open(dataset.file_name, 'w') as f:
+            f.write("\n".join( rval ))
+            f.write('\n')
+
+    def set_meta( self, dataset, **kwd ):
+        Html.set_meta( self, dataset, **kwd )
+        self.regenerate_primary_file(dataset)
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__])
diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py
new file mode 100644
index 0000000..8788210
--- /dev/null
+++ b/lib/galaxy/datatypes/binary.py
@@ -0,0 +1,1438 @@
+"""Binary classes"""
+from __future__ import print_function
+
+import binascii
+import gzip
+import logging
+import os
+import shutil
+import struct
+import subprocess
+import tempfile
+import zipfile
+
+import pysam
+from bx.seq.twobit import TWOBIT_MAGIC_NUMBER, TWOBIT_MAGIC_NUMBER_SWAP, TWOBIT_MAGIC_SIZE
+
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import DictParameter, ListParameter, MetadataElement, MetadataParameter
+from galaxy.util import FILENAME_VALID_CHARS, nice_size, sqlite, which
+from . import data, dataproviders
+
+
+log = logging.getLogger(__name__)
+
+# Currently these supported binary data types must be manually set on upload
+
+
+class Binary( data.Data ):
+    """Binary data"""
+    edam_format = "format_2333"
+    sniffable_binary_formats = []
+    unsniffable_binary_formats = []
+
+    @staticmethod
+    def register_sniffable_binary_format(data_type, ext, type_class):
+        Binary.sniffable_binary_formats.append({"type": data_type, "ext": ext.lower(), "class": type_class})
+
+    @staticmethod
+    def register_unsniffable_binary_ext(ext):
+        Binary.unsniffable_binary_formats.append(ext.lower())
+
+    @staticmethod
+    def is_sniffable_binary( filename ):
+        format_information = None
+        for format in Binary.sniffable_binary_formats:
+            format_instance = format[ "class" ]()
+            try:
+                if format_instance.sniff(filename):
+                    format_information = ( format["type"], format[ "ext" ] )
+                    break
+            except Exception:
+                # Sniffer raised exception, could be any number of
+                # reasons for this so there is not much to do besides
+                # trying next sniffer.
+                pass
+        return format_information
+
+    @staticmethod
+    def is_ext_unsniffable(ext):
+        return ext in Binary.unsniffable_binary_formats
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = 'binary data'
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def get_mime( self ):
+        """Returns the mime type of the datatype"""
+        return 'application/octet-stream'
+
+    def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, **kwd):
+        trans.response.set_content_type(dataset.get_mime())
+        trans.log_event( "Display dataset id: %s" % str( dataset.id ) )
+        trans.response.headers['Content-Length'] = int( os.stat( dataset.file_name ).st_size )
+        to_ext = dataset.extension
+        fname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in dataset.name)[0:150]
+        trans.response.set_content_type( "application/octet-stream" )  # force octet-stream so Safari doesn't append mime extensions to filename
+        trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (dataset.hid, fname, to_ext)
+        return open( dataset.file_name )
+
+
+class Ab1( Binary ):
+    """Class describing an ab1 binary sequence file"""
+    file_ext = "ab1"
+    edam_format = "format_3000"
+    edam_data = "data_0924"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary ab1 sequence file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary ab1 sequence file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_unsniffable_binary_ext("ab1")
+
+
+class Idat( Binary ):
+    """Binary data in idat format"""
+    file_ext = "idat"
+    edam_format = "format_2058"
+    edam_data = "data_2603"
+
+    def sniff( self, filename ):
+        try:
+            header = open( filename, 'rb' ).read(4)
+            if header == b'IDAT':
+                return True
+            return False
+        except:
+            return False
+
+
+Binary.register_sniffable_binary_format("idat", "idat", Idat)
+
+
+class Cel( Binary ):
+
+    """Binary data in CEL format."""
+    file_ext = "cel"
+    edam_format = "format_1638"
+    edam_data = "data_3110"
+
+    def sniff( self, filename ):
+        """
+        Try to guess if the file is a CEL file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('test.CEL')
+        >>> Cel().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.mz5')
+        >>> Cel().sniff(fname)
+        False
+        """
+        try:
+            header = open( filename, 'rb' ).read(4)
+            if header == b';\x01\x00\x00':
+                return True
+            return False
+        except:
+            return False
+
+
+Binary.register_sniffable_binary_format("cel", "cel", Cel)
+
+
+class CompressedArchive( Binary ):
+    """
+        Class describing an compressed binary file
+        This class can be sublass'ed to implement archive filetypes that will not be unpacked by upload.py.
+    """
+    file_ext = "compressed_archive"
+    compressed = True
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Compressed binary file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Compressed binary file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_unsniffable_binary_ext("compressed_archive")
+
+
+class CompressedZipArchive( CompressedArchive ):
+    """
+        Class describing an compressed binary file
+        This class can be sublass'ed to implement archive filetypes that will not be unpacked by upload.py.
+    """
+    file_ext = "zip"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Compressed zip file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Compressed zip file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_unsniffable_binary_ext("zip")
+
+
+class GenericAsn1Binary( Binary ):
+    """Class for generic ASN.1 binary format"""
+    file_ext = "asn1-binary"
+    edam_format = "format_1966"
+    edam_data = "data_0849"
+
+
+Binary.register_unsniffable_binary_ext("asn1-binary")
+
+
+ at dataproviders.decorators.has_dataproviders
+class Bam( Binary ):
+    """Class describing a BAM binary file"""
+    edam_format = "format_2572"
+    edam_data = "data_0863"
+    file_ext = "bam"
+    track_type = "ReadTrack"
+    data_sources = { "data": "bai", "index": "bigwig" }
+
+    MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, file_ext="bai", readonly=True, no_value=None, visible=False, optional=True )
+    MetadataElement( name="bam_version", default=None, desc="BAM Version", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=None )
+    MetadataElement( name="sort_order", default=None, desc="Sort Order", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=None )
+    MetadataElement( name="read_groups", default=[], desc="Read Groups", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=[] )
+    MetadataElement( name="reference_names", default=[], desc="Chromosome Names", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=[] )
+    MetadataElement( name="reference_lengths", default=[], desc="Chromosome Lengths", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=[] )
+    MetadataElement( name="bam_header", default={}, desc="Dictionary of BAM Headers", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value={} )
+
+    def _get_samtools_version( self ):
+        version = '0.0.0'
+        samtools_exec = which('samtools')
+        if not samtools_exec:
+            message = 'Attempting to use functionality requiring samtools, but it cannot be located on Galaxy\'s PATH.'
+            raise Exception(message)
+
+        # Get the version of samtools via --version-only, if available
+        p = subprocess.Popen( ['samtools', '--version-only'],
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE)
+        output, error = p.communicate()
+
+        # --version-only is available
+        # Format is <version x.y.z>+htslib-<a.b.c>
+        if p.returncode == 0:
+            version = output.split('+')[0]
+            return version
+
+        output = subprocess.Popen( [ 'samtools' ], stderr=subprocess.PIPE, stdout=subprocess.PIPE ).communicate()[1]
+        lines = output.split( '\n' )
+        for line in lines:
+            if line.lower().startswith( 'version' ):
+                # Assuming line looks something like: version: 0.1.12a (r862)
+                version = line.split()[1]
+                break
+        return version
+
+    @staticmethod
+    def merge(split_files, output_file):
+
+        tmp_dir = tempfile.mkdtemp()
+        stderr_name = tempfile.NamedTemporaryFile(dir=tmp_dir, prefix="bam_merge_stderr").name
+        command = ["samtools", "merge", "-f", output_file] + split_files
+        proc = subprocess.Popen( args=command, stderr=open( stderr_name, 'wb' ) )
+        exit_code = proc.wait()
+        # Did merge succeed?
+        stderr = open(stderr_name).read().strip()
+        if stderr:
+            if exit_code != 0:
+                shutil.rmtree(tmp_dir)  # clean up
+                raise Exception( "Error merging BAM files: %s" % stderr )
+            else:
+                print(stderr)
+        os.unlink(stderr_name)
+        os.rmdir(tmp_dir)
+
+    def _is_coordinate_sorted( self, file_name ):
+        """See if the input BAM file is sorted from the header information."""
+        params = [ "samtools", "view", "-H", file_name ]
+        output = subprocess.Popen( params, stderr=subprocess.PIPE, stdout=subprocess.PIPE ).communicate()[0]
+        # find returns -1 if string is not found
+        return output.find( "SO:coordinate" ) != -1 or output.find( "SO:sorted" ) != -1
+
+    def dataset_content_needs_grooming( self, file_name ):
+        """See if file_name is a sorted BAM file"""
+        version = self._get_samtools_version()
+        if version < '0.1.13':
+            return not self._is_coordinate_sorted( file_name )
+        else:
+            # Samtools version 0.1.13 or newer produces an error condition when attempting to index an
+            # unsorted bam file - see http://biostar.stackexchange.com/questions/5273/is-my-bam-file-sorted.
+            # So when using a newer version of samtools, we'll first check if the input BAM file is sorted
+            # from the header information.  If the header is present and sorted, we do nothing by returning False.
+            # If it's present and unsorted or if it's missing, we'll index the bam file to see if it produces the
+            # error.  If it does, sorting is needed so we return True (otherwise False).
+            #
+            # TODO: we're creating an index file here and throwing it away.  We then create it again when
+            # the set_meta() method below is called later in the job process.  We need to enhance this overall
+            # process so we don't create an index twice.  In order to make it worth the time to implement the
+            # upload tool / framework to allow setting metadata from directly within the tool itself, it should be
+            # done generically so that all tools will have the ability.  In testing, a 6.6 gb BAM file took 128
+            # seconds to index with samtools, and 45 minutes to sort, so indexing is relatively inexpensive.
+            if self._is_coordinate_sorted( file_name ):
+                return False
+            index_name = tempfile.NamedTemporaryFile( prefix="bam_index" ).name
+            stderr_name = tempfile.NamedTemporaryFile( prefix="bam_index_stderr" ).name
+            command = 'samtools index %s %s' % ( file_name, index_name )
+            proc = subprocess.Popen( args=command, shell=True, stderr=open( stderr_name, 'wb' ) )
+            proc.wait()
+            stderr = open( stderr_name ).read().strip()
+            if stderr:
+                try:
+                    os.unlink( index_name )
+                except OSError:
+                    pass
+                try:
+                    os.unlink( stderr_name )
+                except OSError:
+                    pass
+                # Return True if unsorted error condition is found (find returns -1 if string is not found).
+                return stderr.find( "[bam_index_core] the alignment is not sorted" ) != -1
+            try:
+                os.unlink( index_name )
+            except OSError:
+                pass
+            try:
+                os.unlink( stderr_name )
+            except OSError:
+                pass
+            return False
+
+    def groom_dataset_content( self, file_name ):
+        """
+        Ensures that the Bam file contents are sorted.  This function is called
+        on an output dataset after the content is initially generated.
+        """
+        # Use samtools to sort the Bam file
+        # $ samtools sort
+        # Usage: samtools sort [-on] [-m <maxMem>] <in.bam> <out.prefix>
+        # Sort alignments by leftmost coordinates. File <out.prefix>.bam will be created.
+        # This command may also create temporary files <out.prefix>.%d.bam when the
+        # whole alignment cannot be fitted into memory ( controlled by option -m ).
+        # do this in a unique temp directory, because of possible <out.prefix>.%d.bam temp files
+        if not self.dataset_content_needs_grooming( file_name ):
+            # Don't re-sort if already sorted
+            return
+        tmp_dir = tempfile.mkdtemp()
+        tmp_sorted_dataset_file_name_prefix = os.path.join( tmp_dir, 'sorted' )
+        stderr_name = tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="bam_sort_stderr" ).name
+        samtools_created_sorted_file_name = "%s.bam" % tmp_sorted_dataset_file_name_prefix  # samtools accepts a prefix, not a filename, it always adds .bam to the prefix
+        command = "samtools sort %s %s" % ( file_name, tmp_sorted_dataset_file_name_prefix )
+        proc = subprocess.Popen( args=command, shell=True, cwd=tmp_dir, stderr=open( stderr_name, 'wb' ) )
+        exit_code = proc.wait()
+        # Did sort succeed?
+        stderr = open( stderr_name ).read().strip()
+        if stderr:
+            if exit_code != 0:
+                shutil.rmtree( tmp_dir)  # clean up
+                raise Exception( "Error Grooming BAM file contents: %s" % stderr )
+            else:
+                print(stderr)
+        # Move samtools_created_sorted_file_name to our output dataset location
+        shutil.move( samtools_created_sorted_file_name, file_name )
+        # Remove temp file and empty temporary directory
+        os.unlink( stderr_name )
+        os.rmdir( tmp_dir )
+
+    def init_meta( self, dataset, copy_from=None ):
+        Binary.init_meta( self, dataset, copy_from=copy_from )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        """ Creates the index for the BAM file. """
+        # These metadata values are not accessible by users, always overwrite
+        index_file = dataset.metadata.bam_index
+        if not index_file:
+            index_file = dataset.metadata.spec['bam_index'].param.new_file( dataset=dataset )
+        # Create the Bam index
+        # $ samtools index
+        # Usage: samtools index <in.bam> [<out.index>]
+        stderr_name = tempfile.NamedTemporaryFile( prefix="bam_index_stderr" ).name
+        command = [ 'samtools', 'index', dataset.file_name, index_file.file_name ]
+        exit_code = subprocess.call( args=command, stderr=open( stderr_name, 'wb' ) )
+        # Did index succeed?
+        if exit_code == -6:
+            # SIGABRT, most likely samtools 1.0+ which does not accept the index name parameter.
+            dataset_symlink = os.path.join( os.path.dirname( index_file.file_name ),
+                                            '__dataset_%d_%s' % ( dataset.id, os.path.basename( index_file.file_name ) ) )
+            os.symlink( dataset.file_name, dataset_symlink )
+            try:
+                command = [ 'samtools', 'index', dataset_symlink ]
+                exit_code = subprocess.call( args=command, stderr=open( stderr_name, 'wb' ) )
+                shutil.move( dataset_symlink + '.bai', index_file.file_name )
+            except Exception as e:
+                open( stderr_name, 'ab+' ).write( 'Galaxy attempted to build the BAM index with samtools 1.0+ but failed: %s\n' % e)
+                exit_code = 1  # Make sure an exception raised by shutil.move() is re-raised below
+            finally:
+                os.unlink( dataset_symlink )
+        stderr = open( stderr_name ).read().strip()
+        if stderr:
+            if exit_code != 0:
+                os.unlink( stderr_name )  # clean up
+                raise Exception( "Error Setting BAM Metadata: %s" % stderr )
+            else:
+                print(stderr)
+        dataset.metadata.bam_index = index_file
+        # Remove temp file
+        os.unlink( stderr_name )
+        # Now use pysam with BAI index to determine additional metadata
+        try:
+            bam_file = pysam.AlignmentFile( dataset.file_name, mode='rb', index_filename=index_file.file_name )
+            dataset.metadata.reference_names = list( bam_file.references )
+            dataset.metadata.reference_lengths = list( bam_file.lengths )
+            dataset.metadata.bam_header = bam_file.header
+            dataset.metadata.read_groups = [ read_group['ID'] for read_group in dataset.metadata.bam_header.get( 'RG', [] ) if 'ID' in read_group ]
+            dataset.metadata.sort_order = dataset.metadata.bam_header.get( 'HD', {} ).get( 'SO', None )
+            dataset.metadata.bam_version = dataset.metadata.bam_header.get( 'HD', {} ).get( 'VN', None )
+        except:
+            # Per Dan, don't log here because doing so will cause datasets that
+            # fail metadata to end in the error state
+            pass
+
+    def sniff( self, filename ):
+        # BAM is compressed in the BGZF format, and must not be uncompressed in Galaxy.
+        # The first 4 bytes of any bam file is 'BAM\1', and the file is binary.
+        try:
+            header = gzip.open( filename ).read(4)
+            if header == b'BAM\1':
+                return True
+            return False
+        except:
+            return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary bam alignments file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary bam alignments file (%s)" % ( nice_size( dataset.get_size() ) )
+
+    # ------------- Dataproviders
+    # pipe through samtools view
+    # ALSO: (as Sam)
+    # bam does not use '#' to indicate comments/headers - we need to strip out those headers from the std. providers
+    # TODO:?? seems like there should be an easier way to do/inherit this - metadata.comment_char?
+    # TODO: incorporate samtools options to control output: regions first, then flags, etc.
+    @dataproviders.decorators.dataprovider_factory( 'line', dataproviders.line.FilteredLineDataProvider.settings )
+    def line_dataprovider( self, dataset, **settings ):
+        samtools_source = dataproviders.dataset.SamtoolsDataProvider( dataset )
+        settings[ 'comment_char' ] = '@'
+        return dataproviders.line.FilteredLineDataProvider( samtools_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'regex-line', dataproviders.line.RegexLineDataProvider.settings )
+    def regex_line_dataprovider( self, dataset, **settings ):
+        samtools_source = dataproviders.dataset.SamtoolsDataProvider( dataset )
+        settings[ 'comment_char' ] = '@'
+        return dataproviders.line.RegexLineDataProvider( samtools_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'column', dataproviders.column.ColumnarDataProvider.settings )
+    def column_dataprovider( self, dataset, **settings ):
+        samtools_source = dataproviders.dataset.SamtoolsDataProvider( dataset )
+        settings[ 'comment_char' ] = '@'
+        return dataproviders.column.ColumnarDataProvider( samtools_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dict', dataproviders.column.DictDataProvider.settings )
+    def dict_dataprovider( self, dataset, **settings ):
+        samtools_source = dataproviders.dataset.SamtoolsDataProvider( dataset )
+        settings[ 'comment_char' ] = '@'
+        return dataproviders.column.DictDataProvider( samtools_source, **settings )
+
+    # these can't be used directly - may need BamColumn, BamDict (Bam metadata -> column/dict)
+    # OR - see genomic_region_dataprovider
+    # @dataproviders.decorators.dataprovider_factory( 'dataset-column', dataproviders.column.ColumnarDataProvider.settings )
+    # def dataset_column_dataprovider( self, dataset, **settings ):
+    #    settings[ 'comment_char' ] = '@'
+    #    return super( Sam, self ).dataset_column_dataprovider( dataset, **settings )
+
+    # @dataproviders.decorators.dataprovider_factory( 'dataset-dict', dataproviders.column.DictDataProvider.settings )
+    # def dataset_dict_dataprovider( self, dataset, **settings ):
+    #    settings[ 'comment_char' ] = '@'
+    #    return super( Sam, self ).dataset_dict_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'header', dataproviders.line.RegexLineDataProvider.settings )
+    def header_dataprovider( self, dataset, **settings ):
+        # in this case we can use an option of samtools view to provide just what we need (w/o regex)
+        samtools_source = dataproviders.dataset.SamtoolsDataProvider( dataset, '-H' )
+        return dataproviders.line.RegexLineDataProvider( samtools_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'id-seq-qual', dataproviders.column.DictDataProvider.settings )
+    def id_seq_qual_dataprovider( self, dataset, **settings ):
+        settings[ 'indeces' ] = [ 0, 9, 10 ]
+        settings[ 'column_types' ] = [ 'str', 'str', 'str' ]
+        settings[ 'column_names' ] = [ 'id', 'seq', 'qual' ]
+        return self.dict_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region', dataproviders.column.ColumnarDataProvider.settings )
+    def genomic_region_dataprovider( self, dataset, **settings ):
+        # GenomicRegionDataProvider currently requires a dataset as source - may not be necc.
+        # TODO:?? consider (at least) the possible use of a kwarg: metadata_source (def. to source.dataset),
+        #   or remove altogether...
+        # samtools_source = dataproviders.dataset.SamtoolsDataProvider( dataset )
+        # return dataproviders.dataset.GenomicRegionDataProvider( samtools_source, metadata_source=dataset,
+        #                                                        2, 3, 3, **settings )
+
+        # instead, set manually and use in-class column gen
+        settings[ 'indeces' ] = [ 2, 3, 3 ]
+        settings[ 'column_types' ] = [ 'str', 'int', 'int' ]
+        return self.column_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region-dict', dataproviders.column.DictDataProvider.settings )
+    def genomic_region_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'indeces' ] = [ 2, 3, 3 ]
+        settings[ 'column_types' ] = [ 'str', 'int', 'int' ]
+        settings[ 'column_names' ] = [ 'chrom', 'start', 'end' ]
+        return self.dict_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'samtools' )
+    def samtools_dataprovider( self, dataset, **settings ):
+        """Generic samtools interface - all options available through settings."""
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.dataset.SamtoolsDataProvider( dataset_source, **settings )
+
+
+Binary.register_sniffable_binary_format("bam", "bam", Bam)
+
+
+class CRAM( Binary ):
+    file_ext = "cram"
+    edam_format = "format_3462"
+    edam_data = "format_0863"
+
+    MetadataElement( name="cram_version", default=None, desc="CRAM Version", param=MetadataParameter, readonly=True, visible=False, optional=False, no_value=None )
+    MetadataElement( name="cram_index", desc="CRAM Index File", param=metadata.FileParameter, file_ext="crai", readonly=True, no_value=None, visible=False, optional=True )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        major_version, minor_version = self.get_cram_version( dataset.file_name )
+        if major_version != -1:
+            dataset.metadata.cram_version = str(major_version) + "." + str(minor_version)
+
+        if not dataset.metadata.cram_index:
+            index_file = dataset.metadata.spec['cram_index'].param.new_file( dataset=dataset )
+            if self.set_index_file(dataset, index_file):
+                dataset.metadata.cram_index = index_file
+
+    def get_cram_version( self, filename):
+        try:
+            with open( filename, "rb") as fh:
+                header = fh.read(6)
+            return ord( header[4] ), ord( header[5] )
+        except Exception as exc:
+            log.warning( '%s, get_cram_version Exception: %s', self, exc )
+            return -1, -1
+
+    def set_index_file(self, dataset, index_file):
+        try:
+            # @todo when pysam 1.2.1 or pysam 1.3.0 gets released and becomes
+            # a dependency of galaxy, use pysam.index(alignment, target_idx)
+            # This currently gives coredump in the current release but is
+            # fixed in the dev branch:
+            # xref: https://github.com/samtools/samtools/issues/199
+
+            dataset_symlink = os.path.join( os.path.dirname( index_file.file_name ), '__dataset_%d_%s' % ( dataset.id, os.path.basename( index_file.file_name ) ) )
+            os.symlink( dataset.file_name, dataset_symlink )
+            pysam.index( dataset_symlink )
+
+            tmp_index = dataset_symlink + ".crai"
+            if os.path.isfile( tmp_index ):
+                shutil.move( tmp_index, index_file.file_name )
+                return index_file.file_name
+            else:
+                os.unlink( dataset_symlink )
+                log.warning( '%s, expected crai index not created for: %s', self, dataset.file_name )
+                return False
+        except Exception as exc:
+            log.warning( '%s, set_index_file Exception: %s', self, exc )
+            return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = 'CRAM binary alignment file'
+            dataset.blurb = 'binary data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        try:
+            header = open( filename, 'rb' ).read(4)
+            if header == b"CRAM":
+                return True
+            return False
+        except:
+            return False
+
+
+Binary.register_sniffable_binary_format('cram', 'cram', CRAM)
+
+
+class Bcf( Binary):
+    """Class describing a BCF file"""
+    edam_format = "format_3020"
+    edam_data = "data_3498"
+    file_ext = "bcf"
+
+    MetadataElement( name="bcf_index", desc="BCF Index File", param=metadata.FileParameter, file_ext="csi", readonly=True, no_value=None, visible=False, optional=True )
+
+    def sniff( self, filename ):
+        # BCF is compressed in the BGZF format, and must not be uncompressed in Galaxy.
+        # The first 3 bytes of any bcf file is 'BCF', and the file is binary.
+        try:
+            header = gzip.open( filename ).read(3)
+            if header == b'BCF':
+                return True
+            return False
+        except:
+            return False
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        """ Creates the index for the BCF file. """
+        # These metadata values are not accessible by users, always overwrite
+        index_file = dataset.metadata.bcf_index
+        if not index_file:
+            index_file = dataset.metadata.spec['bcf_index'].param.new_file( dataset=dataset )
+        # Create the bcf index
+        # $ bcftools index
+        # Usage: bcftools index <in.bcf>
+
+        dataset_symlink = os.path.join( os.path.dirname( index_file.file_name ),
+                                        '__dataset_%d_%s' % ( dataset.id, os.path.basename( index_file.file_name ) ) )
+        os.symlink( dataset.file_name, dataset_symlink )
+
+        stderr_name = tempfile.NamedTemporaryFile( prefix="bcf_index_stderr" ).name
+        command = [ 'bcftools', 'index', dataset_symlink ]
+        try:
+            subprocess.check_call( args=command, stderr=open( stderr_name, 'wb' ) )
+            shutil.move( dataset_symlink + '.csi', index_file.file_name )  # this will fail if bcftools < 1.0 is used, because it creates a .bci index file instead of .csi
+        except Exception as e:
+            stderr = open( stderr_name ).read().strip()
+            raise Exception('Error setting BCF metadata: %s' % (stderr or str(e)))
+        finally:
+            # Remove temp file and symlink
+            os.remove( stderr_name )
+            os.remove( dataset_symlink )
+        dataset.metadata.bcf_index = index_file
+
+
+Binary.register_sniffable_binary_format("bcf", "bcf", Bcf)
+
+
+class H5( Binary ):
+    """
+    Class describing an HDF5 file
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'test.mz5' )
+    >>> H5().sniff( fname )
+    True
+    >>> fname = get_test_fname( 'interval.interval' )
+    >>> H5().sniff( fname )
+    False
+    """
+    file_ext = "h5"
+    edam_format = "format_3590"
+
+    def __init__( self, **kwd ):
+        Binary.__init__( self, **kwd )
+        self._magic = binascii.unhexlify("894844460d0a1a0a")
+
+    def sniff( self, filename ):
+        # The first 8 bytes of any hdf5 file are 0x894844460d0a1a0a
+        try:
+            header = open( filename, 'rb' ).read(8)
+            if header == self._magic:
+                return True
+            return False
+        except:
+            return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary HDF5 file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary HDF5 file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_sniffable_binary_format("h5", "h5", H5)
+
+
+class Scf( Binary ):
+    """Class describing an scf binary sequence file"""
+    edam_format = "format_1632"
+    edam_data = "data_0924"
+    file_ext = "scf"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary scf sequence file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary scf sequence file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_unsniffable_binary_ext("scf")
+
+
+class Sff( Binary ):
+    """ Standard Flowgram Format (SFF) """
+    edam_format = "format_3284"
+    edam_data = "data_0924"
+    file_ext = "sff"
+
+    def sniff( self, filename ):
+        # The first 4 bytes of any sff file is '.sff', and the file is binary. For details
+        # about the format, see http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=formats&m=doc&s=format
+        try:
+            header = open( filename, 'rb' ).read(4)
+            if header == b'.sff':
+                return True
+            return False
+        except:
+            return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary sff file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary sff file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_sniffable_binary_format("sff", "sff", Sff)
+
+
+class BigWig(Binary):
+    """
+    Accessing binary BigWig files from UCSC.
+    The supplemental info in the paper has the binary details:
+    http://bioinformatics.oxfordjournals.org/cgi/content/abstract/btq351v1
+    """
+    edam_format = "format_3006"
+    edam_data = "data_3002"
+    track_type = "LineTrack"
+    data_sources = { "data_standalone": "bigwig" }
+
+    def __init__( self, **kwd ):
+        Binary.__init__( self, **kwd )
+        self._magic = 0x888FFC26
+        self._name = "BigWig"
+
+    def _unpack( self, pattern, handle ):
+        return struct.unpack( pattern, handle.read( struct.calcsize( pattern ) ) )
+
+    def sniff( self, filename ):
+        try:
+            magic = self._unpack( "I", open( filename, 'rb' ) )
+            return magic[0] == self._magic
+        except:
+            return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary UCSC %s file" % self._name
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary UCSC %s file (%s)" % ( self._name, nice_size( dataset.get_size() ) )
+
+
+Binary.register_sniffable_binary_format("bigwig", "bigwig", BigWig)
+
+
+class BigBed(BigWig):
+    """BigBed support from UCSC."""
+    edam_format = "format_3004"
+    edam_data = "data_3002"
+    data_sources = { "data_standalone": "bigbed" }
+
+    def __init__( self, **kwd ):
+        Binary.__init__( self, **kwd )
+        self._magic = 0x8789F2EB
+        self._name = "BigBed"
+
+
+Binary.register_sniffable_binary_format("bigbed", "bigbed", BigBed)
+
+
+class TwoBit (Binary):
+    """Class describing a TwoBit format nucleotide file"""
+    edam_format = "format_3009"
+    edam_data = "data_0848"
+    file_ext = "twobit"
+
+    def sniff(self, filename):
+        try:
+            # All twobit files start with a 16-byte header. If the file is smaller than 16 bytes, it's obviously not a valid twobit file.
+            if os.path.getsize(filename) < 16:
+                return False
+            header = open(filename, 'rb').read(TWOBIT_MAGIC_SIZE)
+            magic = struct.unpack(">L", header)[0]
+            if magic == TWOBIT_MAGIC_NUMBER or magic == TWOBIT_MAGIC_NUMBER_SWAP:
+                return True
+        except IOError:
+            return False
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary TwoBit format nucleotide file"
+            dataset.blurb = nice_size(dataset.get_size())
+        else:
+            return super(TwoBit, self).set_peek(dataset, is_multi_byte)
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "Binary TwoBit format nucleotide file (%s)" % (nice_size(dataset.get_size()))
+
+
+Binary.register_sniffable_binary_format("twobit", "twobit", TwoBit)
+
+
+ at dataproviders.decorators.has_dataproviders
+class SQlite ( Binary ):
+    """Class describing a Sqlite database """
+    MetadataElement( name="tables", default=[], param=ListParameter, desc="Database Tables", readonly=True, visible=True, no_value=[] )
+    MetadataElement( name="table_columns", default={}, param=DictParameter, desc="Database Table Columns", readonly=True, visible=True, no_value={} )
+    MetadataElement( name="table_row_count", default={}, param=DictParameter, desc="Database Table Row Count", readonly=True, visible=True, no_value={} )
+    file_ext = "sqlite"
+    edam_format = "format_3621"
+
+    def init_meta( self, dataset, copy_from=None ):
+        Binary.init_meta( self, dataset, copy_from=copy_from )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        try:
+            tables = []
+            columns = dict()
+            rowcounts = dict()
+            conn = sqlite.connect(dataset.file_name)
+            c = conn.cursor()
+            tables_query = "SELECT name,sql FROM sqlite_master WHERE type='table' ORDER BY name"
+            rslt = c.execute(tables_query).fetchall()
+            for table, sql in rslt:
+                tables.append(table)
+                try:
+                    col_query = 'SELECT * FROM %s LIMIT 0' % table
+                    cur = conn.cursor().execute(col_query)
+                    cols = [col[0] for col in cur.description]
+                    columns[table] = cols
+                except Exception as exc:
+                    log.warning( '%s, set_meta Exception: %s', self, exc )
+            for table in tables:
+                try:
+                    row_query = "SELECT count(*) FROM %s" % table
+                    rowcounts[table] = c.execute(row_query).fetchone()[0]
+                except Exception as exc:
+                    log.warning( '%s, set_meta Exception: %s', self, exc )
+            dataset.metadata.tables = tables
+            dataset.metadata.table_columns = columns
+            dataset.metadata.table_row_count = rowcounts
+        except Exception as exc:
+            log.warning( '%s, set_meta Exception: %s', self, exc )
+
+    def sniff( self, filename ):
+        # The first 16 bytes of any SQLite3 database file is 'SQLite format 3\0', and the file is binary. For details
+        # about the format, see http://www.sqlite.org/fileformat.html
+        try:
+            header = open(filename, 'rb').read(16)
+            if header == b'SQLite format 3\0':
+                return True
+            return False
+        except:
+            return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "SQLite Database"
+            lines = ['SQLite Database']
+            if dataset.metadata.tables:
+                for table in dataset.metadata.tables:
+                    try:
+                        lines.append('%s [%s]' % (table, dataset.metadata.table_row_count[table]))
+                    except:
+                        continue
+            dataset.peek = '\n'.join(lines)
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "SQLite Database (%s)" % ( nice_size( dataset.get_size() ) )
+
+    @dataproviders.decorators.dataprovider_factory( 'sqlite', dataproviders.dataset.SQliteDataProvider.settings )
+    def sqlite_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.dataset.SQliteDataProvider( dataset_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'sqlite-table', dataproviders.dataset.SQliteDataTableProvider.settings )
+    def sqlite_datatableprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.dataset.SQliteDataTableProvider( dataset_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'sqlite-dict', dataproviders.dataset.SQliteDataDictProvider.settings )
+    def sqlite_datadictprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.dataset.SQliteDataDictProvider( dataset_source, **settings )
+
+
+# Binary.register_sniffable_binary_format("sqlite", "sqlite", SQlite)
+
+
+class GeminiSQLite( SQlite ):
+    """Class describing a Gemini Sqlite database """
+    MetadataElement( name="gemini_version", default='0.10.0', param=MetadataParameter, desc="Gemini Version",
+                     readonly=True, visible=True, no_value='0.10.0' )
+    file_ext = "gemini.sqlite"
+    edam_format = "format_3622"
+    edam_data = "data_3498"
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        super( GeminiSQLite, self ).set_meta( dataset, overwrite=overwrite, **kwd )
+        try:
+            conn = sqlite.connect( dataset.file_name )
+            c = conn.cursor()
+            tables_query = "SELECT version FROM version"
+            result = c.execute( tables_query ).fetchall()
+            for version, in result:
+                dataset.metadata.gemini_version = version
+            # TODO: Can/should we detect even more attributes, such as use of PED file, what was input annotation type, etc.
+        except Exception as e:
+            log.warning( '%s, set_meta Exception: %s', self, e )
+
+    def sniff( self, filename ):
+        if super( GeminiSQLite, self ).sniff( filename ):
+            gemini_table_names = [ "gene_detailed", "gene_summary", "resources", "sample_genotype_counts", "sample_genotypes", "samples",
+                                   "variant_impacts", "variants", "version" ]
+            try:
+                conn = sqlite.connect( filename )
+                c = conn.cursor()
+                tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
+                result = c.execute( tables_query ).fetchall()
+                result = [_[0] for _ in result]
+                for table_name in gemini_table_names:
+                    if table_name not in result:
+                        return False
+                return True
+            except Exception as e:
+                log.warning( '%s, sniff Exception: %s', self, e )
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Gemini SQLite Database, version %s" % ( dataset.metadata.gemini_version or 'unknown' )
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Gemini SQLite Database, version %s" % ( dataset.metadata.gemini_version or 'unknown' )
+
+
+class MzSQlite( SQlite ):
+    """Class describing a Proteomics Sqlite database """
+    file_ext = "mz.sqlite"
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        super( MzSQlite, self ).set_meta( dataset, overwrite=overwrite, **kwd )
+
+    def sniff( self, filename ):
+        if super( MzSQlite, self ).sniff( filename ):
+            mz_table_names = ["DBSequence", "Modification", "Peaks", "Peptide", "PeptideEvidence", "Score", "SearchDatabase", "Source", "SpectraData", "Spectrum", "SpectrumIdentification"]
+            try:
+                conn = sqlite.connect( filename )
+                c = conn.cursor()
+                tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
+                result = c.execute( tables_query ).fetchall()
+                result = [_[0] for _ in result]
+                for table_name in mz_table_names:
+                    if table_name not in result:
+                        return False
+                return True
+            except Exception as e:
+                log.warning( '%s, sniff Exception: %s', self, e )
+        return False
+
+
+class IdpDB( SQlite ):
+    """
+    Class describing an IDPicker 3 idpDB (sqlite) database
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'test.idpDB' )
+    >>> IdpDB().sniff( fname )
+    True
+    >>> fname = get_test_fname( 'interval.interval' )
+    >>> IdpDB().sniff( fname )
+    False
+    """
+    file_ext = "idpdb"
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        super( IdpDB, self ).set_meta( dataset, overwrite=overwrite, **kwd )
+
+    def sniff( self, filename ):
+        if super( IdpDB, self ).sniff( filename ):
+            mz_table_names = ["About", "Analysis", "AnalysisParameter", "PeptideSpectrumMatch", "Spectrum", "SpectrumSource"]
+            try:
+                conn = sqlite.connect( filename )
+                c = conn.cursor()
+                tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
+                result = c.execute( tables_query ).fetchall()
+                result = [_[0] for _ in result]
+                for table_name in mz_table_names:
+                    if table_name not in result:
+                        return False
+                return True
+            except Exception as e:
+                log.warning( '%s, sniff Exception: %s', self, e )
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "IDPickerDB SQLite file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "IDPickerDB SQLite file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+Binary.register_sniffable_binary_format( "gemini.sqlite", "gemini.sqlite", GeminiSQLite )
+Binary.register_sniffable_binary_format( "idpdb", "idpdb", IdpDB )
+Binary.register_sniffable_binary_format( "mz.sqlite", "mz.sqlite", MzSQlite )
+# FIXME: We need to register specialized sqlite formats before sqlite, since register_sniffable_binary_format and is_sniffable_binary called in upload.py
+# ignores sniff order declared in datatypes_conf.xml
+Binary.register_sniffable_binary_format("sqlite", "sqlite", SQlite)
+
+
+class Xlsx(Binary):
+    """Class for Excel 2007 (xlsx) files"""
+    file_ext = "xlsx"
+
+    def sniff( self, filename ):
+        # Xlsx is compressed in zip format and must not be uncompressed in Galaxy.
+        try:
+            if zipfile.is_zipfile( filename ):
+                tempzip = zipfile.ZipFile( filename )
+                if "[Content_Types].xml" in tempzip.namelist() and tempzip.read("[Content_Types].xml").find(b'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml') != -1:
+                    return True
+            return False
+        except:
+            return False
+
+
+Binary.register_sniffable_binary_format("xlsx", "xlsx", Xlsx)
+
+
+class Sra( Binary ):
+    """ Sequence Read Archive (SRA) datatype originally from mdshw5/sra-tools-galaxy"""
+    file_ext = 'sra'
+
+    def sniff( self, filename ):
+        """ The first 8 bytes of any NCBI sra file is 'NCBI.sra', and the file is binary.
+        For details about the format, see http://www.ncbi.nlm.nih.gov/books/n/helpsra/SRA_Overview_BK/#SRA_Overview_BK.4_SRA_Data_Structure
+        """
+        try:
+            header = open(filename, 'rb').read(8)
+            if header == b'NCBI.sra':
+                return True
+            else:
+                return False
+        except:
+            return False
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = 'Binary sra file'
+            dataset.blurb = nice_size(dataset.get_size())
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return 'Binary sra file (%s)' % (nice_size(dataset.get_size()))
+
+
+Binary.register_sniffable_binary_format('sra', 'sra', Sra)
+
+
+class RData( Binary ):
+    """Generic R Data file datatype implementation"""
+    file_ext = 'RData'
+
+    def sniff( self, filename ):
+        rdata_header = b'RDX2\nX\n'
+        try:
+            header = open(filename, 'rb').read(7)
+            if header == rdata_header:
+                return True
+
+            header = gzip.open( filename ).read(7)
+            if header == rdata_header:
+                return True
+        except:
+            return False
+
+
+Binary.register_sniffable_binary_format('RData', 'RData', RData)
+
+
+class OxliBinary(Binary):
+
+    @staticmethod
+    def _sniff(filename, oxlitype):
+        try:
+            with open(filename, 'rb') as fileobj:
+                header = fileobj.read(4)
+                if header == b'OXLI':
+                    fileobj.read(1)  # skip the version number
+                    ftype = fileobj.read(1)
+                    if binascii.hexlify(ftype) == oxlitype:
+                        return True
+            return False
+        except IOError:
+            return False
+
+
+class OxliCountGraph(OxliBinary):
+    """
+    OxliCountGraph starts with "OXLI" + one byte version number +
+    8-bit binary '1'
+    Test file generated via::
+
+        load-into-counting.py --n_tables 1 --max-tablesize 1 \\
+            oxli_countgraph.oxlicg khmer/tests/test-data/100-reads.fq.bz2
+
+    using khmer 2.0
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'sequence.csfasta' )
+    >>> OxliCountGraph().sniff( fname )
+    False
+    >>> fname = get_test_fname( "oxli_countgraph.oxlicg" )
+    >>> OxliCountGraph().sniff( fname )
+    True
+    """
+
+    def sniff(self, filename):
+        return OxliBinary._sniff(filename, b"01")
+
+
+Binary.register_sniffable_binary_format("oxli.countgraph", "oxlicg",
+                                        OxliCountGraph)
+
+
+class OxliNodeGraph(OxliBinary):
+    """
+    OxliNodeGraph starts with "OXLI" + one byte version number +
+    8-bit binary '2'
+    Test file generated via::
+
+        load-graph.py --n_tables 1 --max-tablesize 1 oxli_nodegraph.oxling \\
+            khmer/tests/test-data/100-reads.fq.bz2
+
+    using khmer 2.0
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'sequence.csfasta' )
+    >>> OxliNodeGraph().sniff( fname )
+    False
+    >>> fname = get_test_fname( "oxli_nodegraph.oxling" )
+    >>> OxliNodeGraph().sniff( fname )
+    True
+    """
+
+    def sniff(self, filename):
+        return OxliBinary._sniff(filename, b"02")
+
+
+Binary.register_sniffable_binary_format("oxli.nodegraph", "oxling",
+                                        OxliNodeGraph)
+
+
+class OxliTagSet(OxliBinary):
+    """
+    OxliTagSet starts with "OXLI" + one byte version number +
+    8-bit binary '3'
+    Test file generated via::
+
+        load-graph.py --n_tables 1 --max-tablesize 1 oxli_nodegraph.oxling \\
+            khmer/tests/test-data/100-reads.fq.bz2;
+        mv oxli_nodegraph.oxling.tagset oxli_tagset.oxlits
+
+    using khmer 2.0
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'sequence.csfasta' )
+    >>> OxliTagSet().sniff( fname )
+    False
+    >>> fname = get_test_fname( "oxli_tagset.oxlits" )
+    >>> OxliTagSet().sniff( fname )
+    True
+    """
+
+    def sniff(self, filename):
+        return OxliBinary._sniff(filename, b"03")
+
+
+Binary.register_sniffable_binary_format("oxli.tagset", "oxlits", OxliTagSet)
+
+
+class OxliStopTags(OxliBinary):
+    """
+    OxliStopTags starts with "OXLI" + one byte version number +
+    8-bit binary '4'
+    Test file adapted from khmer 2.0's
+    "khmer/tests/test-data/goodversion-k32.stoptags"
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'sequence.csfasta' )
+    >>> OxliStopTags().sniff( fname )
+    False
+    >>> fname = get_test_fname( "oxli_stoptags.oxlist" )
+    >>> OxliStopTags().sniff( fname )
+    True
+    """
+
+    def sniff(self, filename):
+        return OxliBinary._sniff(filename, b"04")
+
+
+Binary.register_sniffable_binary_format("oxli.stoptags", "oxlist",
+                                        OxliStopTags)
+
+
+class OxliSubset(OxliBinary):
+    """
+    OxliSubset starts with "OXLI" + one byte version number +
+    8-bit binary '5'
+    Test file generated via::
+
+        load-graph.py -k 20 example tests/test-data/random-20-a.fa;
+        partition-graph.py example;
+        mv example.subset.0.pmap oxli_subset.oxliss
+
+    using khmer 2.0
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'sequence.csfasta' )
+    >>> OxliSubset().sniff( fname )
+    False
+    >>> fname = get_test_fname( "oxli_subset.oxliss" )
+    >>> OxliSubset().sniff( fname )
+    True
+    """
+
+    def sniff(self, filename):
+        return OxliBinary._sniff(filename, b"05")
+
+
+Binary.register_sniffable_binary_format("oxli.subset", "oxliss", OxliSubset)
+
+
+class OxliGraphLabels(OxliBinary):
+    """
+    OxliGraphLabels starts with "OXLI" + one byte version number +
+    8-bit binary '6'
+    Test file generated via::
+
+        python -c "from khmer import GraphLabels; \\
+            gl = GraphLabels(20, 1e7, 4); \\
+            gl.consume_fasta_and_tag_with_labels('tests/test-data/test-labels.fa'); \\
+            gl.save_labels_and_tags('oxli_graphlabels.oxligl')"
+
+    using khmer 2.0
+
+    >>> from galaxy.datatypes.sniff import get_test_fname
+    >>> fname = get_test_fname( 'sequence.csfasta' )
+    >>> OxliGraphLabels().sniff( fname )
+    False
+    >>> fname = get_test_fname( "oxli_graphlabels.oxligl" )
+    >>> OxliGraphLabels().sniff( fname )
+    True
+    """
+
+    def sniff(self, filename):
+        return OxliBinary._sniff(filename, b"06")
+
+
+Binary.register_sniffable_binary_format("oxli.graphlabels", "oxligl",
+                                        OxliGraphLabels)
+
+
+class SearchGuiArchive ( CompressedArchive ):
+    """Class describing a SearchGUI archive """
+    MetadataElement( name="searchgui_version", default='1.28.0', param=MetadataParameter, desc="SearchGui Version",
+                     readonly=True, visible=True, no_value=None )
+    MetadataElement( name="searchgui_major_version", default='1', param=MetadataParameter, desc="SearchGui Major Version",
+                     readonly=True, visible=True, no_value=None )
+    file_ext = "searchgui_archive"
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        super( SearchGuiArchive, self ).set_meta( dataset, overwrite=overwrite, **kwd )
+        try:
+            if dataset and zipfile.is_zipfile( dataset.file_name ):
+                tempzip = zipfile.ZipFile( dataset.file_name )
+                if 'searchgui.properties' in tempzip.namelist():
+                    fh = tempzip.open('searchgui.properties')
+                    for line in fh:
+                        if line.startswith('searchgui.version'):
+                            version = line.split('=')[1].strip()
+                            dataset.metadata.searchgui_version = version
+                            dataset.metadata.searchgui_major_version = version.split('.')[0]
+                    fh.close()
+                tempzip.close()
+        except Exception as e:
+            log.warning( '%s, set_meta Exception: %s', self, e )
+
+    def sniff( self, filename ):
+        try:
+            if filename and zipfile.is_zipfile( filename ):
+                tempzip = zipfile.ZipFile( filename, 'r' )
+                is_searchgui = 'searchgui.properties' in tempzip.namelist()
+                tempzip.close()
+                return is_searchgui
+        except Exception as e:
+            log.warning( '%s, sniff Exception: %s', self, e )
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "SearchGUI Archive, version %s" % ( dataset.metadata.searchgui_version or 'unknown' )
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "SearchGUI Archive, version %s" % ( dataset.metadata.searchgui_version or 'unknown' )
+
+
+Binary.register_sniffable_binary_format("searchgui_archive", "searchgui_archive", SearchGuiArchive)
+
+
+class NetCDF( Binary ):
+    """Binary data in netCDF format"""
+    file_ext = "netcdf"
+    edam_format = "format_3650"
+    edam_data = "data_0943"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary netCDF file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Binary netCDF file (%s)" % ( nice_size( dataset.get_size() ) )
+
+    def sniff( self, filename ):
+        try:
+            with open( filename, 'rb' ) as f:
+                header = f.read(3)
+            if header == b'CDF':
+                return True
+            return False
+        except:
+            return False
+
+
+Binary.register_sniffable_binary_format("netcdf", "netcdf", NetCDF)
diff --git a/lib/galaxy/datatypes/blast.py b/lib/galaxy/datatypes/blast.py
new file mode 100644
index 0000000..2d68cb3
--- /dev/null
+++ b/lib/galaxy/datatypes/blast.py
@@ -0,0 +1,311 @@
+# This file is now part of the Galaxy Project, but due to historical reasons
+# reflecting time developed outside of the Galaxy Project, this file is under
+# the MIT license.
+#
+# The MIT License (MIT)
+# Copyright (c) 2012,2013,2014,2015,2016 Peter Cock
+# Copyright (c) 2012 Edward Kirton
+# Copyright (c) 2013 Nicola Soranzo
+# Copyright (c) 2014 Bjoern Gruening
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+#
+"""NCBI BLAST datatypes.
+
+Covers the ``blastxml`` format and the BLAST databases.
+"""
+
+import logging
+import os
+from time import sleep
+
+from galaxy.datatypes.data import get_file_peek
+from galaxy.datatypes.data import Data, Text
+from galaxy.datatypes.xml import GenericXml
+
+log = logging.getLogger(__name__)
+
+
+class BlastXml(GenericXml):
+    """NCBI Blast XML Output data"""
+    file_ext = "blastxml"
+    edam_format = "format_3331"
+    edam_data = "data_0857"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = 'NCBI Blast XML data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        """Determines whether the file is blastxml
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('megablast_xml_parser_test1.blastxml')
+        >>> BlastXml().sniff(fname)
+        True
+        >>> fname = get_test_fname('tblastn_four_human_vs_rhodopsin.xml')
+        >>> BlastXml().sniff(fname)
+        True
+        >>> fname = get_test_fname('interval.interval')
+        >>> BlastXml().sniff(fname)
+        False
+        """
+        # TODO - Use a context manager on Python 2.5+ to close handle
+        handle = open(filename)
+        line = handle.readline()
+        if line.strip() != '<?xml version="1.0"?>':
+            handle.close()
+            return False
+        line = handle.readline()
+        if line.strip() not in ['<!DOCTYPE BlastOutput PUBLIC "-//NCBI//NCBI BlastOutput/EN" "http://www.ncbi.nlm.nih.gov/dtd/NCBI_BlastOutput.dtd">',
+                                '<!DOCTYPE BlastOutput PUBLIC "-//NCBI//NCBI BlastOutput/EN" "NCBI_BlastOutput.dtd">']:
+            handle.close()
+            return False
+        line = handle.readline()
+        if line.strip() != '<BlastOutput>':
+            handle.close()
+            return False
+        handle.close()
+        return True
+
+    def merge(split_files, output_file):
+        """Merging multiple XML files is non-trivial and must be done in subclasses."""
+        if len(split_files) == 1:
+            # For one file only, use base class method (move/copy)
+            return Text.merge(split_files, output_file)
+        if not split_files:
+            raise ValueError("Given no BLAST XML files, %r, to merge into %s"
+                             % (split_files, output_file))
+        out = open(output_file, "w")
+        h = None
+        for f in split_files:
+            if not os.path.isfile(f):
+                log.warning("BLAST XML file %s missing, retry in 1s..." % f)
+                sleep(1)
+            if not os.path.isfile(f):
+                log.error("BLAST XML file %s missing" % f)
+                raise ValueError("BLAST XML file %s missing" % f)
+            h = open(f)
+            header = h.readline()
+            if not header:
+                out.close()
+                h.close()
+                # Retry, could be transient error with networked file system...
+                log.warning("BLAST XML file %s empty, retry in 1s..." % f)
+                sleep(1)
+                h = open(f)
+                header = h.readline()
+                if not header:
+                    log.error("BLAST XML file %s was empty" % f)
+                    raise ValueError("BLAST XML file %s was empty" % f)
+            if header.strip() != '<?xml version="1.0"?>':
+                out.write(header)  # for diagnosis
+                out.close()
+                h.close()
+                raise ValueError("%s is not an XML file!" % f)
+            line = h.readline()
+            header += line
+            if line.strip() not in ['<!DOCTYPE BlastOutput PUBLIC "-//NCBI//NCBI BlastOutput/EN" "http://www.ncbi.nlm.nih.gov/dtd/NCBI_BlastOutput.dtd">',
+                                    '<!DOCTYPE BlastOutput PUBLIC "-//NCBI//NCBI BlastOutput/EN" "NCBI_BlastOutput.dtd">']:
+                out.write(header)  # for diagnosis
+                out.close()
+                h.close()
+                raise ValueError("%s is not a BLAST XML file!" % f)
+            while True:
+                line = h.readline()
+                if not line:
+                    out.write(header)  # for diagnosis
+                    out.close()
+                    h.close()
+                    raise ValueError("BLAST XML file %s ended prematurely" % f)
+                header += line
+                if "<Iteration>" in line:
+                    break
+                if len(header) > 10000:
+                    # Something has gone wrong, don't load too much into memory!
+                    # Write what we have to the merged file for diagnostics
+                    out.write(header)
+                    out.close()
+                    h.close()
+                    raise ValueError("The header in BLAST XML file %s is too long" % f)
+            if "<BlastOutput>" not in header:
+                out.close()
+                h.close()
+                raise ValueError("%s is not a BLAST XML file:\n%s\n..." % (f, header))
+            if f == split_files[0]:
+                out.write(header)
+                old_header = header
+            elif old_header[:300] != header[:300]:
+                # Enough to check <BlastOutput_program> and <BlastOutput_version> match
+                out.close()
+                h.close()
+                raise ValueError("BLAST XML headers don't match for %s and %s - have:\n%s\n...\n\nAnd:\n%s\n...\n"
+                                 % (split_files[0], f, old_header[:300], header[:300]))
+            else:
+                out.write("    <Iteration>\n")
+            for line in h:
+                if "</BlastOutput_iterations>" in line:
+                    break
+                # TODO - Increment <Iteration_iter-num> and if required automatic query names
+                # like <Iteration_query-ID>Query_3</Iteration_query-ID> to be increasing?
+                out.write(line)
+            h.close()
+        out.write("  </BlastOutput_iterations>\n")
+        out.write("</BlastOutput>\n")
+        out.close()
+    merge = staticmethod(merge)
+
+
+class _BlastDb(object):
+    """Base class for BLAST database datatype."""
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text."""
+        if not dataset.dataset.purged:
+            dataset.peek = "BLAST database (multiple files)"
+            dataset.blurb = "BLAST database (multiple files)"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset):
+        """Create HTML content, used for displaying peek."""
+        try:
+            return dataset.peek
+        except Exception:
+            return "BLAST database (multiple files)"
+
+    def display_data(self, trans, data, preview=False, filename=None,
+                     to_ext=None, size=None, offset=None, **kwd):
+        """Documented as an old display method, but still gets called via tests etc
+
+        This allows us to format the data shown in the central pane via the "eye" icon.
+        """
+        if filename is not None and filename != "index":
+            # Change nothing - important for the unit tests to access child files:
+            return Data.display_data(self, trans, data, preview, filename,
+                                     to_ext, size, offset, **kwd)
+        if self.file_ext == "blastdbn":
+            title = "This is a nucleotide BLAST database"
+        elif self.file_ext == "blastdbp":
+            title = "This is a protein BLAST database"
+        elif self.file_ext == "blastdbd":
+            title = "This is a domain BLAST database"
+        else:
+            # Error?
+            title = "This is a BLAST database."
+        msg = ""
+        try:
+            # Try to use any text recorded in the dummy index file:
+            handle = open(data.file_name, "rU")
+            msg = handle.read().strip()
+            handle.close()
+        except Exception:
+            pass
+        if not msg:
+            msg = title
+        # Galaxy assumes HTML for the display of composite datatypes,
+        return "<html><head><title>%s</title></head><body><pre>%s</pre></body></html>" % (title, msg)
+
+    def merge(split_files, output_file):
+        """Merge BLAST databases (not implemented for now)."""
+        raise NotImplementedError("Merging BLAST databases is non-trivial (do this via makeblastdb?)")
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """Split a BLAST database (not implemented for now)."""
+        if split_params is None:
+            return None
+        raise NotImplementedError("Can't split BLAST databases")
+
+
+class BlastNucDb(_BlastDb, Data):
+    """Class for nucleotide BLAST database files."""
+    file_ext = 'blastdbn'
+    allow_datatype_change = False
+    composite_type = 'basic'
+
+    def __init__(self, **kwd):
+        Data.__init__(self, **kwd)
+        self.add_composite_file('blastdb.nhr', is_binary=True)  # sequence headers
+        self.add_composite_file('blastdb.nin', is_binary=True)  # index file
+        self.add_composite_file('blastdb.nsq', is_binary=True)  # nucleotide sequences
+        self.add_composite_file('blastdb.nal', is_binary=False, optional=True)  # alias ( -gi_mask option of makeblastdb)
+        self.add_composite_file('blastdb.nhd', is_binary=True, optional=True)  # sorted sequence hash values ( -hash_index option of makeblastdb)
+        self.add_composite_file('blastdb.nhi', is_binary=True, optional=True)  # index of sequence hash values ( -hash_index option of makeblastdb)
+        self.add_composite_file('blastdb.nnd', is_binary=True, optional=True)  # sorted GI values ( -parse_seqids option of makeblastdb and gi present in the description lines)
+        self.add_composite_file('blastdb.nni', is_binary=True, optional=True)  # index of GI values ( -parse_seqids option of makeblastdb and gi present in the description lines)
+        self.add_composite_file('blastdb.nog', is_binary=True, optional=True)  # OID->GI lookup file ( -hash_index or -parse_seqids option of makeblastdb)
+        self.add_composite_file('blastdb.nsd', is_binary=True, optional=True)  # sorted sequence accession values ( -hash_index or -parse_seqids option of makeblastdb)
+        self.add_composite_file('blastdb.nsi', is_binary=True, optional=True)  # index of sequence accession values ( -hash_index or -parse_seqids option of makeblastdb)
+#        self.add_composite_file('blastdb.00.idx', is_binary=True, optional=True)  # first volume of the MegaBLAST index generated by makembindex
+# The previous line should be repeated for each index volume, with filename extensions like '.01.idx', '.02.idx', etc.
+        self.add_composite_file('blastdb.shd', is_binary=True, optional=True)  # MegaBLAST index superheader (-old_style_index false option of makembindex)
+#        self.add_composite_file('blastdb.naa', is_binary=True, optional=True)  # index of a WriteDB column for e.g. mask data
+#        self.add_composite_file('blastdb.nab', is_binary=True, optional=True)  # data of a WriteDB column
+#        self.add_composite_file('blastdb.nac', is_binary=True, optional=True)  # multiple byte order for a WriteDB column
+# The previous 3 lines should be repeated for each WriteDB column, with filename extensions like ('.nba', '.nbb', '.nbc'), ('.nca', '.ncb', '.ncc'), etc.
+
+
+class BlastProtDb(_BlastDb, Data):
+    """Class for protein BLAST database files."""
+    file_ext = 'blastdbp'
+    allow_datatype_change = False
+    composite_type = 'basic'
+
+    def __init__(self, **kwd):
+        Data.__init__(self, **kwd)
+# Component file comments are as in BlastNucDb except where noted
+        self.add_composite_file('blastdb.phr', is_binary=True)
+        self.add_composite_file('blastdb.pin', is_binary=True)
+        self.add_composite_file('blastdb.psq', is_binary=True)  # protein sequences
+        self.add_composite_file('blastdb.phd', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.phi', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.pnd', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.pni', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.pog', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.psd', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.psi', is_binary=True, optional=True)
+#        self.add_composite_file('blastdb.paa', is_binary=True, optional=True)
+#        self.add_composite_file('blastdb.pab', is_binary=True, optional=True)
+#        self.add_composite_file('blastdb.pac', is_binary=True, optional=True)
+# The last 3 lines should be repeated for each WriteDB column, with filename extensions like ('.pba', '.pbb', '.pbc'), ('.pca', '.pcb', '.pcc'), etc.
+
+
+class BlastDomainDb(_BlastDb, Data):
+    """Class for domain BLAST database files."""
+    file_ext = 'blastdbd'
+    allow_datatype_change = False
+    composite_type = 'basic'
+
+    def __init__(self, **kwd):
+        Data.__init__(self, **kwd)
+        self.add_composite_file('blastdb.phr', is_binary=True)
+        self.add_composite_file('blastdb.pin', is_binary=True)
+        self.add_composite_file('blastdb.psq', is_binary=True)
+        self.add_composite_file('blastdb.freq', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.loo', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.psd', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.psi', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.rps', is_binary=True, optional=True)
+        self.add_composite_file('blastdb.aux', is_binary=True, optional=True)
diff --git a/lib/galaxy/datatypes/checkers.py b/lib/galaxy/datatypes/checkers.py
new file mode 100644
index 0000000..ab2fce2
--- /dev/null
+++ b/lib/galaxy/datatypes/checkers.py
@@ -0,0 +1,25 @@
+"""Module proxies :mod:`galaxy.util.checkers` for backward compatibility.
+
+External datatypes may make use of these functions.
+"""
+from galaxy.util.checkers import (
+    check_binary,
+    check_bz2,
+    check_gzip,
+    check_html,
+    check_image,
+    check_zip,
+    is_bz2,
+    is_gzip,
+)
+
+__all__ = (
+    'check_binary',
+    'check_bz2',
+    'check_gzip',
+    'check_html',
+    'check_image',
+    'check_zip',
+    'is_gzip',
+    'is_bz2',
+)
diff --git a/lib/galaxy/datatypes/chrominfo.py b/lib/galaxy/datatypes/chrominfo.py
new file mode 100644
index 0000000..73ec75a
--- /dev/null
+++ b/lib/galaxy/datatypes/chrominfo.py
@@ -0,0 +1,11 @@
+from __future__ import absolute_import
+
+import galaxy.datatypes.tabular
+import galaxy.datatypes.metadata
+from galaxy.datatypes.metadata import MetadataElement
+
+
+class ChromInfo( galaxy.datatypes.tabular.Tabular ):
+    file_ext = "len"
+    MetadataElement( name="chrom", default=1, desc="Chrom column", param=galaxy.datatypes.metadata.ColumnParameter )
+    MetadataElement( name="length", default=2, desc="Length column", param=galaxy.datatypes.metadata.ColumnParameter )
diff --git a/lib/galaxy/datatypes/constructive_solid_geometry.py b/lib/galaxy/datatypes/constructive_solid_geometry.py
new file mode 100644
index 0000000..83b5403
--- /dev/null
+++ b/lib/galaxy/datatypes/constructive_solid_geometry.py
@@ -0,0 +1,467 @@
+"""
+Constructive Solid Geometry file formats.
+"""
+import abc
+
+from galaxy import util
+from galaxy.datatypes import data
+from galaxy.datatypes.binary import Binary
+from galaxy.datatypes.data import get_file_peek
+from galaxy.datatypes.data import nice_size
+from galaxy.datatypes.metadata import MetadataElement
+
+MAX_HEADER_LINES = 500
+MAX_LINE_LEN = 2000
+COLOR_OPTS = ['COLOR_SCALARS', 'red', 'green', 'blue']
+
+
+class Ply(object):
+    """
+    The PLY format describes an object as a collection of vertices,
+    faces and other elements, along with properties such as color and
+    normal direction that can be attached to these elements.  A PLY
+    file contains the description of exactly one object.
+    """
+    subtype = ''
+    # Add metadata elements.
+    MetadataElement(name="file_format", default=None, desc="File format",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="vertex", default=None, desc="Vertex",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="face", default=None, desc="Face",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="other_elements", default=[], desc="Other elements",
+                    readonly=True, optional=True, visible=True, no_value=[])
+
+    @abc.abstractmethod
+    def __init__(self, **kwd):
+        raise NotImplementedError
+
+    def sniff(self, filename):
+        """
+        The structure of a typical PLY file:
+        Header, Vertex List, Face List, (lists of other elements)
+        """
+        with open(filename, "r") as fh:
+            if not self._is_ply_header(fh, self.subtype):
+                return False
+            return True
+        return False
+
+    def _is_ply_header(self, fh, subtype):
+        """
+        The header is a series of carriage-return terminated lines of
+        text that describe the remainder of the file.
+        """
+        valid_header_items = ['comment', 'obj_info', 'element', 'property']
+        # Line 1: ply
+        line = get_next_line(fh)
+        if line != 'ply':
+            return False
+        # Line 2: format ascii 1.0
+        line = get_next_line(fh)
+        if line.find(subtype) < 0:
+            return False
+        stop_index = 0
+        while True:
+            line = get_next_line(fh)
+            stop_index += 1
+            if line == 'end_header':
+                return True
+            items = line.split()
+            if items[0] not in valid_header_items:
+                return False
+            if stop_index > MAX_HEADER_LINES:
+                # If this is a PLY file, there must be an unusually
+                # large number of comments.
+                break
+        return False
+
+    def set_meta(self, dataset, **kwd):
+        if dataset.has_data():
+            with open(dataset.file_name) as fh:
+                for line in fh:
+                    line = line.strip()
+                    if not line:
+                        continue
+                    if line.startswith('format'):
+                        items = line.split()
+                        dataset.metadata.file_format = items[1]
+                    elif line == 'end_header':
+                        # Metadata is complete.
+                        break
+                    elif line.startswith('element'):
+                        items = line.split()
+                        if items[1] == 'face':
+                            dataset.metadata.face = int(items[2])
+                        elif items[1] == 'vertex':
+                            dataset.metadata.vertex = int(items[2])
+                        else:
+                            element_tuple = (items[1], int(items[2]))
+                            dataset.metadata.other_elements.append(element_tuple)
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = "Faces: %s, Vertices: %s" % (str(dataset.metadata.face), str(dataset.metadata.vertex))
+        else:
+            dataset.peek = 'File does not exist'
+            dataset.blurb = 'File purged from disc'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "Ply file (%s)" % (nice_size(dataset.get_size()))
+
+
+class PlyAscii(Ply, data.Text):
+    file_ext = "plyascii"
+    subtype = 'ascii'
+
+    def __init__(self, **kwd):
+        data.Text.__init__(self, **kwd)
+
+
+class PlyBinary(Ply, Binary):
+    file_ext = "plybinary"
+    subtype = 'binary'
+
+    def __init__(self, **kwd):
+        Binary.__init__(self, **kwd)
+
+
+Binary.register_sniffable_binary_format("plybinary", "plybinary", PlyBinary)
+
+
+class Vtk(object):
+    r"""
+    The Visualization Toolkit provides a number of source and writer objects to
+    read and write popular data file formats. The Visualization Toolkit also
+    provides some of its own file formats.
+
+    There are two different styles of file formats available in VTK. The simplest
+    are the legacy, serial formats that are easy to read and write either by hand
+    or programmatically. However, these formats are less flexible than the XML
+    based file formats which support random access, parallel I/O, and portable
+    data compression and are preferred to the serial VTK file formats whenever
+    possible.
+
+    All keyword phrases are written in ASCII form whether the file is binary or
+    ASCII. The binary section of the file (if in binary form) is the data proper;
+    i.e., the numbers that define points coordinates, scalars, cell indices, and
+    so forth.
+
+    Binary data must be placed into the file immediately after the newline
+    ('\\n') character from the previous ASCII keyword and parameter sequence.
+
+    TODO: only legacy formats are currently supported and support for XML formats
+    should be added.
+    """
+    subtype = ''
+    # Add metadata elements.
+    MetadataElement(name="vtk_version", default=None, desc="Vtk version",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="file_format", default=None, desc="File format",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="dataset_type", default=None, desc="Dataset type",
+                    readonly=True, optional=True, visible=True, no_value=None)
+
+    # STRUCTURED_GRID data_type.
+    MetadataElement(name="dimensions", default=[], desc="Dimensions",
+                    readonly=True, optional=True, visible=True, no_value=[])
+    MetadataElement(name="origin", default=[], desc="Origin",
+                    readonly=True, optional=True, visible=True, no_value=[])
+    MetadataElement(name="spacing", default=[], desc="Spacing",
+                    readonly=True, optional=True, visible=True, no_value=[])
+
+    # POLYDATA data_type (Points element is also a component of UNSTRUCTURED_GRID..
+    MetadataElement(name="points", default=None, desc="Points",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="vertices", default=None, desc="Vertices",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="lines", default=None, desc="Lines",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="polygons", default=None, desc="Polygons",
+                    readonly=True, optional=True, visible=True, no_value=None)
+    MetadataElement(name="triangle_strips", default=None, desc="Triangle strips",
+                    readonly=True, optional=True, visible=True, no_value=None)
+
+    # UNSTRUCTURED_GRID data_type.
+    MetadataElement(name="cells", default=None, desc="Cells",
+                    readonly=True, optional=True, visible=True, no_value=None)
+
+    # Additional elements not categorized by data_type.
+    MetadataElement(name="field_names", default=[], desc="Field names",
+                    readonly=True, optional=True, visible=True, no_value=[])
+    # The keys in the field_components map to the list of field_names in the above element
+    # which ensures order for select list options that are built from it.
+    MetadataElement(name="field_components", default={}, desc="Field names and components",
+                    readonly=True, optional=True, visible=True, no_value={})
+
+    @abc.abstractmethod
+    def __init__(self, **kwd):
+        raise NotImplementedError
+
+    def sniff(self, filename):
+        """
+        VTK files can be either ASCII or binary, with two different
+        styles of file formats: legacy or XML.  We'll assume if the
+        file contains a valid VTK header, then it is a valid VTK file.
+        """
+        with open(filename, "r") as fh:
+            if self._is_vtk_header(fh, self.subtype):
+                return True
+            return False
+        return False
+
+    def _is_vtk_header(self, fh, subtype):
+        """
+        The Header section consists of at least 4, but possibly
+        5 lines.  This is tricky because sometimes the 4th line
+        is blank (in which case the 5th line consists of the
+        data_kind) or the 4th line consists of the data_kind (in
+        which case the 5th line is blank).
+        """
+
+        data_kinds = ['STRUCTURED_GRID', 'POLYDATA', 'UNSTRUCTURED_GRID']
+
+        def check_data_kind(line):
+            for data_kind in data_kinds:
+                if line.find(data_kind) >= 0:
+                    return True
+            return False
+
+        # Line 1: vtk DataFile Version 3.0
+        line = get_next_line(fh)
+        if line.find('vtk') < 0:
+            return False
+        # Line 2: can be anything - skip it
+        line = get_next_line(fh)
+        # Line 3: ASCII or BINARY
+        line = get_next_line(fh)
+        if line.find(subtype) < 0:
+            return False
+        # Line 4:
+        line = get_next_line(fh)
+        if line:
+            return check_data_kind(line)
+        # line 5:
+        line = get_next_line(fh)
+        if line:
+            return check_data_kind(line)
+        return False
+
+    def set_meta(self, dataset, **kwd):
+        if dataset.has_data():
+            dataset.metadata.field_names = []
+            dataset.metadata.field_components = {}
+            dataset_type = None
+            field_components = {}
+            dataset_structure_complete = False
+            processing_field_section = False
+            with open(dataset.file_name) as fh:
+                for i, line in enumerate(fh):
+                    line = line.strip()
+                    if not line:
+                        continue
+                    if i < 3:
+                        dataset = self.set_initial_metadata(i, line, dataset)
+                    elif dataset.metadata.file_format == 'ASCII' or not util.is_binary(line):
+                        if dataset_structure_complete:
+                            """
+                            The final part of legacy VTK files describes the dataset attributes.
+                            This part begins with the keywords POINT_DATA or CELL_DATA, followed
+                            by an integer number specifying the number of points or cells,
+                            respectively. Other keyword/data combinations then define the actual
+                            dataset attribute values (i.e., scalars, vectors, tensors, normals,
+                            texture coordinates, or field data).  Dataset attributes are supported
+                            for both points and cells.
+
+                            Each type of attribute data has a dataName associated with it. This is
+                            a character string (without embedded whitespace) used to identify a
+                            particular data.  The dataName is used by the VTK readers to extract
+                            data. As a result, more than one attribute data of the same type can be
+                            included in a file.  For example, two different scalar fields defined
+                            on the dataset points, pressure and temperature, can be contained in
+                            the same file.  If the appropriate dataName is not specified in the VTK
+                            reader, then the first data of that type is extracted from the file.
+                            """
+                            items = line.split()
+                            if items[0] == 'SCALARS':
+                                # Example: SCALARS surface_field double 3
+                                # Scalar definition includes specification of a lookup table. The
+                                # definition of a lookup table is optional. If not specified, the
+                                # default VTK table will be used, and tableName should be
+                                # "default". Also note that the numComp variable is optional.  By
+                                # default the number of components is equal to one.  The parameter
+                                # numComp must range between (1,4) inclusive; in versions of VTK
+                                # prior to vtk2.3 this parameter was not supported.
+                                field_name = items[1]
+                                dataset.metadata.field_names.append(field_name)
+                                try:
+                                    num_components = int(items[-1])
+                                except:
+                                    num_components = 1
+                                field_component_indexes = [str(i) for i in range(num_components)]
+                                field_components[field_name] = field_component_indexes
+                            elif items[0] == 'FIELD':
+                                # The dataset consists of CELL_DATA.
+                                # FIELD FieldData 2
+                                processing_field_section = True
+                                num_fields = int(items[-1])
+                                fields_processed = []
+                            elif processing_field_section:
+                                if len(fields_processed) == num_fields:
+                                    processing_field_section = False
+                                else:
+                                    try:
+                                        float(items[0])
+                                        # Don't process the cell data.
+                                        # 0.0123457 0.197531
+                                    except:
+                                        # Line consists of arrayName numComponents numTuples dataType.
+                                        # Example: surface_field1 1 12 double
+                                        field_name = items[0]
+                                        dataset.metadata.field_names.append(field_name)
+                                        num_components = int(items[1])
+                                        field_component_indexes = [str(i) for i in range(num_components)]
+                                        field_components[field_name] = field_component_indexes
+                                        fields_processed.append(field_name)
+                        elif line.startswith('CELL_DATA'):
+                            # CELL_DATA 3188
+                            dataset_structure_complete = True
+                            dataset.metadata.cells = int(line.split()[1])
+                        elif line.startswith('POINT_DATA'):
+                            # POINT_DATA 1876
+                            dataset_structure_complete = True
+                            dataset.metadata.points = int(line.split()[1])
+                        else:
+                            dataset, dataset_type = self.set_structure_metadata(line, dataset, dataset_type)
+            if len(field_components) > 0:
+                dataset.metadata.field_components = field_components
+
+    def set_initial_metadata(self, i, line, dataset):
+        if i == 0:
+            # The first part of legacy VTK files is the file version and
+            # identifier. This part contains the single line:
+            # # vtk DataFile Version X.Y
+            dataset.metadata.vtk_version = line.lower().split('version')[1]
+            # The second part of legacy VTK files is the header. The header
+            # consists of a character string terminated by end-of-line
+            # character \n. The header is 256 characters maximum. The header
+            # can be used to describe the data and include any other pertinent
+            # information.  We skip the header line...
+        elif i == 2:
+            # The third part of legacy VTK files is the file format.  The file
+            # format describes the type of file, either ASCII or binary. On
+            # this line the single word ASCII or BINARY must appear.
+            dataset.metadata.file_format = line
+        return dataset
+
+    def set_structure_metadata(self, line, dataset, dataset_type):
+        """
+        The fourth part of legacy VTK files is the dataset structure. The
+        geometry part describes the geometry and topology of the dataset.
+        This part begins with a line containing the keyword DATASET followed
+        by a keyword describing the type of dataset.  Then, depending upon
+        the type of dataset, other keyword/ data combinations define the
+        actual data.
+        """
+        if dataset_type is None and line.startswith('DATASET'):
+            dataset_type = line.split()[1]
+            dataset.metadata.dataset_type = dataset_type
+        if dataset_type == 'STRUCTURED_GRID':
+            # The STRUCTURED_GRID format supports 1D, 2D, and 3D structured
+            # grid datasets.  The dimensions nx, ny, nz must be greater
+            # than or equal to 1.  The point coordinates are defined by the
+            # data in the POINTS section. This consists of x-y-z data values
+            # for each point.
+            if line.startswith('DIMENSIONS'):
+                # DIMENSIONS 10 5 1
+                dataset.metadata.dimensions = [line.split()[1:]]
+            elif line.startswith('ORIGIN'):
+                # ORIGIN 0 0 0
+                dataset.metadata.origin = [line.split()[1:]]
+            elif line.startswith('SPACING'):
+                # SPACING 1 1 1
+                dataset.metadata.spacing = [line.split()[1:]]
+        elif dataset_type == 'POLYDATA':
+            # The polygonal dataset consists of arbitrary combinations
+            # of surface graphics primitives vertices, lines, polygons
+            # and triangle strips.  Polygonal data is defined by the POINTS,
+            # VERTICES, LINES, POLYGONS, or TRIANGLE_STRIPS sections.
+            if line.startswith('POINTS'):
+                # POINTS 18 float
+                dataset.metadata.points = int(line.split()[1])
+            elif line.startswith('VERTICES'):
+                dataset.metadata.vertices = int(line.split()[1])
+            elif line.startswith('LINES'):
+                # LINES 5 17
+                dataset.metadata.lines = int(line.split()[1])
+            elif line.startswith('POLYGONS'):
+                # POLYGONS 6 30
+                dataset.metadata.polygons = int(line.split()[1])
+            elif line.startswith('TRIANGLE_STRIPS'):
+                # TRIANGLE_STRIPS 2212 16158
+                dataset.metadata.triangle_strips = int(line.split()[1])
+        elif dataset_type == 'UNSTRUCTURED_GRID':
+            # The unstructured grid dataset consists of arbitrary combinations
+            # of any possible cell type. Unstructured grids are defined by points,
+            # cells, and cell types.
+            if line.startswith('POINTS'):
+                # POINTS 18 float
+                dataset.metadata.points = int(line.split()[1])
+            if line.startswith('CELLS'):
+                # CELLS 756 3024
+                dataset.metadata.cells = int(line.split()[1])
+        return dataset, dataset_type
+
+    def get_blurb(self, dataset):
+        blurb = ""
+        if dataset.metadata.vtk_version is not None:
+            blurb += 'VTK Version %s' % str(dataset.metadata.vtk_version)
+        if dataset.metadata.dataset_type is not None:
+            if blurb:
+                blurb += ' '
+            blurb += str(dataset.metadata.dataset_type)
+        return blurb or 'VTK data'
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = self.get_blurb(dataset)
+        else:
+            dataset.peek = 'File does not exist'
+            dataset.blurb = 'File purged from disc'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "Vtk file (%s)" % (nice_size(dataset.get_size()))
+
+
+class VtkAscii(Vtk, data.Text):
+    file_ext = "vtkascii"
+    subtype = 'ASCII'
+
+    def __init__(self, **kwd):
+        data.Text.__init__(self, **kwd)
+
+
+class VtkBinary(Vtk, Binary):
+    file_ext = "vtkbinary"
+    subtype = 'BINARY'
+
+    def __init__(self, **kwd):
+        Binary.__init__(self, **kwd)
+
+
+Binary.register_sniffable_binary_format("vtkbinary", "vtkbinary", VtkBinary)
+
+
+# Utility functions
+def get_next_line(fh):
+    line = fh.readline(MAX_LINE_LEN)
+    return line.strip()
diff --git a/lib/galaxy/datatypes/converters/__init__.py b/lib/galaxy/datatypes/converters/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/datatypes/converters/bam_to_bai.xml b/lib/galaxy/datatypes/converters/bam_to_bai.xml
new file mode 100644
index 0000000..3f6d363
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bam_to_bai.xml
@@ -0,0 +1,16 @@
+<tool id="CONVERTER_Bam_Bai_0" name="Bam to Bai" hidden="true">
+  <requirements>
+      <requirement type="package">samtools</requirement>
+  </requirements>
+  <command>samtools index $input1 $output1</command>
+  <inputs>
+    <page>
+      <param format="bam" name="input1" type="data" label="Choose BAM"/>
+    </page>
+   </inputs>
+   <outputs>
+      <data format="bai" name="output1"/>
+   </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml b/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
new file mode 100644
index 0000000..6307bcf
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
@@ -0,0 +1,27 @@
+<tool id="CONVERTER_bam_to_bigwig_0" name="Convert BAM to BigWig" version="1.0.0" hidden="true">
+    <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+    <requirements>
+        <requirement type="package">ucsc_tools</requirement>
+        <requirement type="package">bedtools</requirement>
+    </requirements>
+    <command><![CDATA[
+        bedtools genomecov -bg -split -ibam '$input' -g '$chromInfo'
+
+        | LC_COLLATE=C sort -k1,1 -k2,2n
+
+        ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+        ## should only be used on systems with large RAM.
+        ## | wigToBigWig stdin '$chromInfo' '$output'
+
+        ## This can be used anywhere.
+        > temp.bg && bedGraphToBigWig temp.bg '$chromInfo' '$output']]>
+    </command>
+    <inputs>
+        <param format="bam" name="input" type="data" label="Choose BAM file"/>
+    </inputs>
+    <outputs>
+        <data format="bigwig" name="output"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.py b/lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.py
new file mode 100644
index 0000000..35dd32a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+"""
+Uses bcftools to extract a bcf from a bcf.gz
+
+usage: %prog in_file out_file
+"""
+import optparse
+import subprocess
+
+
+def main():
+    # Read options, args.
+    parser = optparse.OptionParser()
+    (options, args) = parser.parse_args()
+    input_fname, output_fname = args
+
+    subprocess.call(["bcftools", "view", input_fname, "-o", output_fname, "-O", "u"])
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.xml b/lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.xml
new file mode 100644
index 0000000..90b67ca
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bcf_bgzip_to_bcf_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_bcf_bgzip_to_bcf" name="Convert BCF_BGZIP to BCF" version="0.0.1" hidden="false">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bcf_bgzip_to_bcf_converter.py '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="bcf_bgzip" name="input1" type="data" label="Choose bcf_bgzip file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bcf" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.py b/lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.py
new file mode 100644
index 0000000..ba52f01
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+"""
+Uses bcftools to compress a bcf into a bcf.gz
+
+usage: %prog in_file out_file
+"""
+import optparse
+import subprocess
+
+
+def main():
+    # Read options, args.
+    parser = optparse.OptionParser()
+    (options, args) = parser.parse_args()
+    input_fname, output_fname = args
+
+    subprocess.call(["bcftools", "view", input_fname, "-o", output_fname, "-O", "b"])
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.xml b/lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.xml
new file mode 100644
index 0000000..5aea96f
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bcf_to_bcf_bgzip_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_bcf_to_bcf_bgzip" name="Convert BCF to BCF_BGZIP" version="0.0.1" hidden="false">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bcf_to_bcf_bgzip_converter.py '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="bcf" name="input1" type="data" label="Choose bcf file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bcf_bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bed_gff_or_vcf_to_bigwig_converter.xml b/lib/galaxy/datatypes/converters/bed_gff_or_vcf_to_bigwig_converter.xml
new file mode 100644
index 0000000..a8fa556
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_gff_or_vcf_to_bigwig_converter.xml
@@ -0,0 +1,34 @@
+<tool id="CONVERTER_bed_gff_or_vcf_to_bigwig_0" name="Convert BED, GFF, or VCF to BigWig" version="1.0.0" hidden="true">
+    <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+    <requirements>
+        <requirement type="package">ucsc_tools</requirement>
+        <requirement type="package">bedtools</requirement>
+    </requirements>
+    <command>
+        ## Remove comments and sort by chromosome.
+        grep -v '^#' $input | sort -k1,1 | 
+
+        ## Generate coverage bedgraph.
+        bedtools genomecov -bg -i stdin -g $chromInfo
+
+        ## Only use split option for bed and gff/gff3/gtf.
+        #if $input.ext in [ 'bed', 'gff', 'gff3', 'gtf' ]:
+            -split
+        #end if
+
+        ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+        ## should only be used on systems with large RAM.
+        ## | wigToBigWig stdin $chromInfo $output
+
+        ## This can be used anywhere.
+        > temp.bg ; bedGraphToBigWig temp.bg $chromInfo $output
+    </command>
+    <inputs>
+        <param format="bed,gff,vcf" name="input" type="data" label="Choose input file"/>
+    </inputs>
+    <outputs>
+        <data format="bigwig" name="output"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bed_to_bgzip_converter.xml b/lib/galaxy/datatypes/converters/bed_to_bgzip_converter.xml
new file mode 100644
index 0000000..97cdc6c
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_to_bgzip_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_bed_to_bgzip_0" name="Convert BED to BGZIP" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bgzip.py -P bed $input1 $output1</command>
+  <inputs>
+    <page>
+        <param format="bed" name="input1" type="data" label="Choose BED file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bed_to_fli_converter.xml b/lib/galaxy/datatypes/converters/bed_to_fli_converter.xml
new file mode 100644
index 0000000..3bda5c9
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_to_fli_converter.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_bed_to_fli_0" name="Convert BED to Feature Location Index">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_fli.py -F bed '$input1' '$output1'</command>
+  <inputs>
+    <param format="bed" name="input1" type="data" label="Choose BED file"/>
+  </inputs>
+  <outputs>
+    <data format="fli" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bed_to_gff_converter.py b/lib/galaxy/datatypes/converters/bed_to_gff_converter.py
new file mode 100644
index 0000000..8217154
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_to_gff_converter.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# This code exists in 2 places: ~/datatypes/converters and ~/tools/filters
+from __future__ import print_function
+
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( output_name, 'w' )
+    out.write( "##gff-version 2\n" )
+    out.write( "##bed_to_gff_converter.py\n\n" )
+    i = 0
+    for i, line in enumerate( open( input_name ) ):
+        complete_bed = False
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ) and not line.startswith( 'track' ) and not line.startswith( 'browser' ):
+            try:
+                elems = line.split( '\t' )
+                if len( elems ) == 12:
+                    complete_bed = True
+                chrom = elems[0]
+                if complete_bed:
+                    feature = "mRNA"
+                else:
+                    try:
+                        feature = elems[3]
+                    except:
+                        feature = 'feature%d' % ( i + 1 )
+                start = int( elems[1] ) + 1
+                end = int( elems[2] )
+                try:
+                    score = elems[4]
+                except:
+                    score = '0'
+                try:
+                    strand = elems[5]
+                except:
+                    strand = '+'
+                try:
+                    group = elems[3]
+                except:
+                    group = 'group%d' % ( i + 1 )
+                if complete_bed:
+                    out.write( '%s\tbed2gff\t%s\t%d\t%d\t%s\t%s\t.\t%s %s;\n' % ( chrom, feature, start, end, score, strand, feature, group  ) )
+                else:
+                    out.write( '%s\tbed2gff\t%s\t%d\t%d\t%s\t%s\t.\t%s;\n' % ( chrom, feature, start, end, score, strand, group  ) )
+                if complete_bed:
+                    # We have all the info necessary to annotate exons for genes and mRNAs
+                    block_count = int( elems[9] )
+                    block_sizes = elems[10].split( ',' )
+                    block_starts = elems[11].split( ',' )
+                    for j in range( block_count ):
+                        exon_start = int( start ) + int( block_starts[j] )
+                        exon_end = exon_start + int( block_sizes[j] ) - 1
+                        out.write( '%s\tbed2gff\texon\t%d\t%d\t%s\t%s\t.\texon %s;\n' % ( chrom, exon_start, exon_end, score, strand, group ) )
+            except:
+                skipped_lines += 1
+                if not first_skipped_line:
+                    first_skipped_line = i + 1
+        else:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = i + 1
+    out.close()
+    info_msg = "%i lines converted to GFF version 2.  " % ( i + 1 - skipped_lines )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/bed_to_gff_converter.xml b/lib/galaxy/datatypes/converters/bed_to_gff_converter.xml
new file mode 100644
index 0000000..1c4151f
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_to_gff_converter.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_bed_to_gff_0" name="Convert BED to GFF" version="2.0.0">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">bed_to_gff_converter.py '$input1' '$output1'</command>
+  <inputs>
+    <param format="bed" name="input1" type="data" label="Choose BED file"/>
+  </inputs>
+  <outputs>
+    <data format="gff" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bed_to_interval_index_converter.xml b/lib/galaxy/datatypes/converters/bed_to_interval_index_converter.xml
new file mode 100644
index 0000000..ba6bfe6
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_to_interval_index_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_bed_to_interval_index_0" name="Convert BED to Interval Index" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_interval_index_converter.py $input1 $output1</command>
+  <inputs>
+    <page>
+        <param format="bed" name="input1" type="data" label="Choose BED file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="interval_index" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bed_to_tabix_converter.xml b/lib/galaxy/datatypes/converters/bed_to_tabix_converter.xml
new file mode 100644
index 0000000..2272958
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bed_to_tabix_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_bed_to_tabix_0" name="Convert BED to tabix" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_tabix_converter.py -P bed '$input1' '$bgzip' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="bed" name="input1" type="data" label="Choose BED file"/>
+        <param format="bgzip" name="bgzip" type="data" label="BGZIP file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="tabix" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py b/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py
new file mode 100644
index 0000000..3fedd85
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+from __future__ import division
+
+import sys
+
+from bx.arrays.array_tree import array_tree_dict_from_reader, FileArrayTreeDict
+
+BLOCK_SIZE = 100
+
+
+class BedGraphReader:
+    def __init__( self, f ):
+        self.f = f
+
+    def __iter__( self ):
+        return self
+
+    def next( self ):
+        while True:
+            line = self.f.readline()
+            if not line:
+                raise StopIteration()
+            if line.isspace():
+                continue
+            if line[0] == "#":
+                continue
+            if line[0].isalpha():
+                if line.startswith( "track" ) or line.startswith( "browser" ):
+                    continue
+
+                feature = line.strip().split()
+                chrom = feature[0]
+                chrom_start = int(feature[1])
+                chrom_end = int(feature[2])
+                score = float(feature[3])
+                return chrom, chrom_start, chrom_end, None, score
+
+
+def main():
+    input_fname = sys.argv[1]
+    out_fname = sys.argv[2]
+
+    reader = BedGraphReader( open( input_fname ) )
+
+    # Fill array from reader
+    d = array_tree_dict_from_reader( reader, {}, block_size=BLOCK_SIZE )
+
+    for array_tree in d.itervalues():
+        array_tree.root.build_summary()
+
+    FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ) )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.xml b/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.xml
new file mode 100644
index 0000000..d1baafb
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_BedGraph_0" name="Index BedGraph for Track Viewer" hidden="true">
+  <!-- Used internally to generate track indexes -->
+  <command interpreter="python">bedgraph_to_array_tree_converter.py '$input' '$output'</command>
+  <inputs>
+    <page>
+      <param format="bedgraph" name="input" type="data" label="Choose BedGraph"/>
+    </page>
+   </inputs>
+   <outputs>
+      <data format="array_tree" name="output"/>
+   </outputs>
+  <help>
+  </help>
+</tool>
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/converters/bedgraph_to_bigwig_converter.xml b/lib/galaxy/datatypes/converters/bedgraph_to_bigwig_converter.xml
new file mode 100644
index 0000000..f91925d
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bedgraph_to_bigwig_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_bedgraph_to_bigwig" name="Convert BedGraph to BigWig" hidden="true">
+  <!-- Used internally to generate track indexes -->
+  <requirements>
+      <requirement type="package">ucsc_tools</requirement>
+  </requirements>
+  <command>grep -v "^track" '$input' | wigToBigWig -clip stdin $chromInfo '$output'</command>
+  <inputs>
+      <param format="bedgraph" name="input" type="data" label="Choose wiggle"/>
+   </inputs>
+   <outputs>
+      <data format="bigwig" name="output"/>
+   </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/bgzip.py b/lib/galaxy/datatypes/converters/bgzip.py
new file mode 100644
index 0000000..3cd6062
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/bgzip.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+"""
+Uses pysam to bgzip a file
+
+usage: %prog in_file out_file
+"""
+import optparse
+import subprocess
+import tempfile
+
+from pysam import ctabix
+
+
+def main():
+    # Read options, args.
+    parser = optparse.OptionParser()
+    parser.add_option( '-c', '--chr-col', type='int', dest='chrom_col' )
+    parser.add_option( '-s', '--start-col', type='int', dest='start_col' )
+    parser.add_option( '-e', '--end-col', type='int', dest='end_col' )
+    parser.add_option( '-P', '--preset', dest='preset' )
+    (options, args) = parser.parse_args()
+    input_fname, output_fname = args
+
+    tmpfile = tempfile.NamedTemporaryFile()
+    sort_params = None
+
+    if options.chrom_col and options.start_col and options.end_col:
+        sort_params = [
+            "sort",
+            "-k%(i)s,%(i)s" % { 'i': options.chrom_col },
+            "-k%(i)i,%(i)in" % { 'i': options.start_col },
+            "-k%(i)i,%(i)in" % { 'i': options.end_col }
+        ]
+    elif options.preset == "bed":
+        sort_params = ["sort", "-k1,1", "-k2,2n", "-k3,3n"]
+    elif options.preset == "vcf":
+        sort_params = ["sort", "-k1,1", "-k2,2n"]
+    elif options.preset == "gff":
+        sort_params = ["sort", "-s", "-k1,1", "-k4,4n"]  # stable sort on start column
+    # Skip any lines starting with "#" and "track"
+    grepped = subprocess.Popen(["grep", "-e", "^\"#\"", "-e", "^track", "-v", input_fname], stderr=subprocess.PIPE, stdout=subprocess.PIPE )
+    after_sort = subprocess.Popen(sort_params, stdin=grepped.stdout, stderr=subprocess.PIPE, stdout=tmpfile )
+    grepped.stdout.close()
+    output, err = after_sort.communicate()
+
+    ctabix.tabix_compress(tmpfile.name, output_fname, force=True)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/cml_to_inchi_converter.xml b/lib/galaxy/datatypes/converters/cml_to_inchi_converter.xml
new file mode 100644
index 0000000..36e3dec
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/cml_to_inchi_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_cml_to_inchi" name="CML to InChI" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -icml '${input}' -oinchi -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="cml" label="Molecules in CML-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="inchi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/cml_to_mol2_converter.xml b/lib/galaxy/datatypes/converters/cml_to_mol2_converter.xml
new file mode 100644
index 0000000..5f279f8
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/cml_to_mol2_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_cml_to_mol2" name="CML to mol2" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -icml '${input}' -omol2 -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="cml" label="Molecules in CML-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol2"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/cml_to_sdf_converter.xml b/lib/galaxy/datatypes/converters/cml_to_sdf_converter.xml
new file mode 100644
index 0000000..5aaee72
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/cml_to_sdf_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_cml_to_sdf" name="CML to SDF" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -icml '${input}' -osdf '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="cml" label="Molecules in CML-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="sdf"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/cml_to_smi_converter.xml b/lib/galaxy/datatypes/converters/cml_to_smi_converter.xml
new file mode 100644
index 0000000..d4f3554
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/cml_to_smi_converter.xml
@@ -0,0 +1,48 @@
+<tool id="CONVERTER_cml_to_smiles" name="CML to SMILES" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command >
+<![CDATA[
+        obabel
+            -icml '${input}'
+            #if $can:
+                -ocan
+            #else:
+                -osmi
+            #end if
+            -O '${output}'
+            -e
+            $remove_h
+            #if $iso_chi or $can or $exp_h:
+                -x$iso_chi$exp_h$can
+            #end if
+            #if $dative_bonds:
+                -b
+            #end if
+            #if int($ph) >= 0:
+                -p $ph
+            #end if
+
+            2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="cml" label="Molecules in CML-format"/>
+        <param name="iso_chi" type="boolean" label="Do not include isotopic or chiral markings (-xi)" truevalue="i" falsevalue="" checked="false" />
+        <param name="can" type="boolean" label="Output in canonical form (-xc)" truevalue="c" falsevalue="" checked="false" />
+        <param name="exp_h" type="boolean" label="Output explicit hydrogens as such (-xh)" truevalue="h" falsevalue="" checked="false" />
+        <param name="remove_h" type="boolean" label="Delete hydrogen atoms (-d)" truevalue="-d" falsevalue="" />
+        <param name="ph" type="float" value="-1" label="Add hydrogens appropriate for pH (-p)" help="-1 means deactivated"/>
+        <param name="dative_bonds" type="boolean" label="Convert dative bonds (e.g. [N+]([O-])=O to N(=O)=O) (-b)" truevalue="-b" falsevalue="" />
+    </inputs>
+    <outputs>
+        <data name="output" format="smi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/encodepeak_to_bgzip_converter.xml b/lib/galaxy/datatypes/converters/encodepeak_to_bgzip_converter.xml
new file mode 100644
index 0000000..194d5a4
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/encodepeak_to_bgzip_converter.xml
@@ -0,0 +1,19 @@
+<tool id="CONVERTER_encodepeak_to_bgzip_0" name="Convert ENCODEPeak to BGZIP" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bgzip.py 
+                                  -c ${input1.metadata.chromCol} 
+                                  -s ${input1.metadata.startCol} 
+                                  -e ${input1.metadata.endCol} 
+                                  '$input1' '$output1'
+  </command>
+  <inputs>
+    <page>
+        <param format="ENCODEPeak" name="input1" type="data" label="Choose ENCODEPeak file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/encodepeak_to_tabix_converter.xml b/lib/galaxy/datatypes/converters/encodepeak_to_tabix_converter.xml
new file mode 100644
index 0000000..0524bfd
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/encodepeak_to_tabix_converter.xml
@@ -0,0 +1,20 @@
+<tool id="CONVERTER_encodepeak_to_tabix_0" name="Convert ENCODEPeak to tabix" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_tabix_converter.py
+                                  -c ${input1.metadata.chromCol} 
+                                  -s ${input1.metadata.startCol} 
+                                  -e ${input1.metadata.endCol} 
+                                  '$input1' '$bgzip' '$output1'
+  </command>
+  <inputs>
+    <page>
+        <param format="encodepeak" name="input1" type="data" label="Choose ENCODEPeak file"/>
+        <param format="bgzip" name="bgzip" type="data" label="BGZIP file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="tabix" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fasta_to_2bit.xml b/lib/galaxy/datatypes/converters/fasta_to_2bit.xml
new file mode 100644
index 0000000..3ccd7ab
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_2bit.xml
@@ -0,0 +1,17 @@
+<tool id="CONVERTER_fasta_to_2bit" name="Convert FASTA to 2bit" version="1.0.0">
+    <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+    <!-- Used on the metadata edit page. -->
+    <requirements>
+        <requirement type="package" version="332">ucsc-fatotwobit</requirement>
+        <requirement type="package">ucsc_tools</requirement>
+    </requirements>
+    <command>faToTwoBit '$input' '$output'</command>
+    <inputs>
+        <param name="input" type="data" format="fasta" label="Fasta file"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="twobit"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fasta_to_bowtie_base_index_converter.xml b/lib/galaxy/datatypes/converters/fasta_to_bowtie_base_index_converter.xml
new file mode 100644
index 0000000..981e065
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_bowtie_base_index_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_fasta_to_bowtie_base_index" name="Convert FASTA to Bowtie base space Index" version="1.0.0">
+  <requirements><requirement type='package'>bowtie</requirement></requirements>
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <requirements>
+      <requirement type="package">bowtie</requirement>
+  </requirements>
+  <command>
+  mkdir '${output.files_path}'
+  && bowtie-build --quiet
+  -f 
+  '$input'  '${output.files_path}/${output.metadata.base_name}'
+  </command>
+  <inputs>
+    <param name="input" type="data" format="fasta" label="Fasta file"/>
+  </inputs>
+  <outputs>
+    <data name="output" format="bowtie_base_index"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fasta_to_bowtie_color_index_converter.xml b/lib/galaxy/datatypes/converters/fasta_to_bowtie_color_index_converter.xml
new file mode 100644
index 0000000..12df6ac
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_bowtie_color_index_converter.xml
@@ -0,0 +1,23 @@
+<tool id="CONVERTER_fasta_to_bowtie_color_index" name="Convert FASTA to Bowtie color space Index" version="1.0.0">
+  <requirements><requirement type='package'>bowtie</requirement></requirements>
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <requirements>
+      <requirement type="package">bowtie</requirement>
+  </requirements>
+  <command>
+  mkdir '${output.files_path}'
+  && bowtie-build --quiet
+  --color
+  -f 
+  '$input'  '${output.files_path}/${output.metadata.base_name}'
+  </command>
+  <inputs>
+    <param name="input" type="data" format="fasta" label="Fasta file"/>
+  </inputs>
+  <outputs>
+    <data name="output" format="bowtie_color_index"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fasta_to_len.py b/lib/galaxy/datatypes/converters/fasta_to_len.py
new file mode 100644
index 0000000..9d8fbf4
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_len.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+"""
+Input: fasta, int
+Output: tabular
+Return titles with lengths of corresponding seq
+"""
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def compute_fasta_length( fasta_file, out_file, keep_first_char, keep_first_word=False ):
+
+    infile = fasta_file
+    out = open( out_file, 'w')
+    keep_first_char = int( keep_first_char )
+
+    fasta_title = ''
+    seq_len = 0
+
+    # number of char to keep in the title
+    if keep_first_char == 0:
+        keep_first_char = None
+    else:
+        keep_first_char += 1
+
+    first_entry = True
+
+    for line in open( infile ):
+        line = line.strip()
+        if not line or line.startswith( '#' ):
+            continue
+        if line[0] == '>':
+            if first_entry is False:
+                if keep_first_word:
+                    fasta_title = fasta_title.split()[0]
+                out.write( "%s\t%d\n" % ( fasta_title[ 1:keep_first_char ], seq_len ) )
+            else:
+                first_entry = False
+            fasta_title = line
+            seq_len = 0
+        else:
+            seq_len += len(line)
+
+    # last fasta-entry
+    if keep_first_word:
+        fasta_title = fasta_title.split()[0]
+    out.write( "%s\t%d\n" % ( fasta_title[ 1:keep_first_char ], seq_len ) )
+    out.close()
+
+
+if __name__ == "__main__" :
+    compute_fasta_length( sys.argv[1], sys.argv[2], sys.argv[3], True )
diff --git a/lib/galaxy/datatypes/converters/fasta_to_len.xml b/lib/galaxy/datatypes/converters/fasta_to_len.xml
new file mode 100644
index 0000000..159114f
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_len.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_fasta_to_len" name="Convert FASTA to len file" version="1.0.0">
+    <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+    <!-- Used on the metadata edit page. -->
+    <command interpreter="python">fasta_to_len.py '$input' '$output' 0</command>
+    <inputs>
+        <param name="input" type="data" format="fasta" label="Fasta file"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="len"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py b/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py
new file mode 100644
index 0000000..9914b7d
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# Variants of this code exists in 2 places, this file which has no
+# user facing options which is called for implicit data conversion,
+# lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py
+# and the user-facing Galaxy tool of the same name which has many
+# options. That version is now on GitHub and the Galaxy Tool Shed:
+# https://github.com/galaxyproject/tools-devteam/tree/master/tools/fasta_to_tabular
+# https://toolshed.g2.bx.psu.edu/view/devteam/fasta_to_tabular
+"""
+Input: fasta
+Output: tabular
+"""
+
+import sys
+import os
+
+seq_hash = {}
+
+
+def __main__():
+    infile = sys.argv[1]
+    outfile = sys.argv[2]
+
+    if not os.path.isfile(infile):
+        sys.stderr.write("Input file %r not found\n" % infile)
+        sys.exit(1)
+
+    with open(infile) as inp:
+        with open(outfile, 'w') as out:
+            sequence = ''
+            for line in inp:
+                line = line.rstrip('\r\n')
+                if line.startswith('>'):
+                    if sequence:
+                        # Flush sequence from previous FASTA record,
+                        # removing any white space
+                        out.write("".join(sequence.split()) + '\n')
+                        sequence = ''
+                    # Strip off the leading '>' and remove any pre-existing
+                    # tabs which would trigger extra columns; write with
+                    # tab to separate this from the sequence column:
+                    out.write(line[1:].replace('\t', ' ') + '\t')
+                else:
+                    # Continuing sequence,
+                    sequence += line
+            # End of FASTA file, flush last sequence
+            if sequence:
+                out.write("".join(sequence.split()) + '\n')
+
+
+if __name__ == "__main__" :
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.xml b/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.xml
new file mode 100644
index 0000000..9769e8e
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_fasta_to_tabular" name="Convert FASTA to Tabular" version="1.0.1">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">fasta_to_tabular_converter.py '$input' '$output'</command>
+  <inputs>
+    <param name="input" type="data" format="fasta" label="Fasta file"/>
+  </inputs>
+  <outputs>
+    <data name="output" format="tabular"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
new file mode 100644
index 0000000..5a90201
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import sys
+
+from galaxy.util.checkers import is_gzip
+
+
+def main():
+    """
+    The format of the file is JSON::
+
+        { "sections" : [
+                { "start" : "x", "end" : "y", "sequences" : "z" },
+                ...
+        ]}
+
+    This works only for UNCOMPRESSED fastq files. The Python GzipFile does not provide seekable
+    offsets via tell(), so clients just have to split the slow way
+    """
+    input_fname = sys.argv[1]
+    if is_gzip(input_fname):
+        print('Conversion is only possible for uncompressed files')
+        sys.exit(1)
+
+    out_file = open(sys.argv[2], 'w')
+
+    current_line = 0
+    sequences = 1000000
+    lines_per_chunk = 4 * sequences
+    chunk_begin = 0
+
+    in_file = open(input_fname)
+
+    out_file.write('{"sections" : [')
+
+    for line in in_file:
+        current_line += 1
+        if 0 == current_line % lines_per_chunk:
+            chunk_end = in_file.tell()
+            out_file.write('{"start":"%s","end":"%s","sequences":"%s"},' % (chunk_begin, chunk_end, sequences))
+            chunk_begin = chunk_end
+
+    chunk_end = in_file.tell()
+    out_file.write('{"start":"%s","end":"%s","sequences":"%s"}' % (chunk_begin, chunk_end, (current_line % lines_per_chunk) / 4))
+    out_file.write(']}\n')
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.xml b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.xml
new file mode 100644
index 0000000..1bf4dc3
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_fastq_to_fqtoc0" name="Convert FASTQ files to seek locations" version="1.0.0" hidden="true">
+  <command interpreter="python">fastq_to_fqtoc.py '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="fastq" name="input1" type="data" label="Choose FASTQ file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="fqtoc" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py b/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py
new file mode 100644
index 0000000..988fcb5
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""
+convert fastqsolexa file to separated sequence and quality files.
+
+assume each sequence and quality score are contained in one line
+the order should be:
+1st line: @title_of_seq
+2nd line: nucleotides
+3rd line: +title_of_qualityscore (might be skipped)
+4th line: quality scores
+(in three forms: a. digits, b. ASCII codes, the first char as the coding base, c. ASCII codes without the first char.)
+
+Usage:
+%python fastqsolexa_to_fasta_converter.py <your_fastqsolexa_filename> <output_seq_filename> <output_score_filename>
+"""
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s" % msg )
+    sys.exit()
+
+
+def __main__():
+    infile_name = sys.argv[1]
+    outfile = open( sys.argv[2], 'w' )
+    fastq_block_lines = 0
+    seq_title_startswith = ''
+
+    for i, line in enumerate( open( infile_name ) ):
+        line = line.rstrip()  # eliminate trailing space and new line characters
+        if not line or line.startswith( '#' ):
+            continue
+        fastq_block_lines = ( fastq_block_lines + 1 ) % 4
+        line_startswith = line[0:1]
+        if fastq_block_lines == 1:
+            # line 1 is sequence title
+            if not seq_title_startswith:
+                seq_title_startswith = line_startswith
+            if seq_title_startswith != line_startswith:
+                stop_err( 'Invalid fastqsolexa format at line %d: %s.' % ( i + 1, line ) )
+            outfile.write( '>%s\n' % line[1:] )
+        elif fastq_block_lines == 2:
+            # line 2 is nucleotides
+            outfile.write( '%s\n' % line )
+        else:
+            pass
+
+    outfile.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.xml b/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.xml
new file mode 100644
index 0000000..eaa652d
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.xml
@@ -0,0 +1,12 @@
+<tool id="CONVERTER_fastqsolexa_to_fasta_0" name="Convert Fastqsolexa to Fasta" version="1.0.0">
+  <description>converts Fastqsolexa file to Fasta format</description>
+  <command interpreter="python">fastqsolexa_to_fasta_converter.py '$input' '$output'</command>
+  <inputs>
+    <param name="input" type="data" format="fastqsolexa" label="Choose Fastqsolexa file"/>
+  </inputs>
+  <outputs>
+    <data name="output" format="fasta"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.py b/lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.py
new file mode 100644
index 0000000..2ac1672
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+"""
+convert fastqsolexa file to separated sequence and quality files.
+
+assume each sequence and quality score are contained in one line
+the order should be:
+1st line: @title_of_seq
+2nd line: nucleotides
+3rd line: +title_of_qualityscore (might be skipped)
+4th line: quality scores
+(in three forms: a. digits, b. ASCII codes, the first char as the coding base, c. ASCII codes without the first char.)
+
+Usage:
+%python fastqsolexa_to_qual_converter.py <your_fastqsolexa_filename> <output_seq_filename> <output_score_filename>
+"""
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s" % msg )
+    sys.exit()
+
+
+def __main__():
+    infile_name = sys.argv[1]
+    outfile_score = open( sys.argv[2], 'w' )
+    # datatype = sys.argv[3]
+    qual_title_startswith = ''
+    seq_title_startswith = ''
+    default_coding_value = 64
+    fastq_block_lines = 0
+
+    for i, line in enumerate( open( infile_name ) ):
+        line = line.rstrip()
+        if not line or line.startswith( '#' ):
+            continue
+        fastq_block_lines = ( fastq_block_lines + 1 ) % 4
+        line_startswith = line[0:1]
+        if fastq_block_lines == 1:
+            # first line is @title_of_seq
+            if not seq_title_startswith:
+                seq_title_startswith = line_startswith
+            if line_startswith != seq_title_startswith:
+                stop_err( 'Invalid fastqsolexa format at line %d: %s.' % ( i + 1, line ) )
+            read_title = line[1:]
+        elif fastq_block_lines == 2:
+            # second line is nucleotides
+            read_length = len( line )
+        elif fastq_block_lines == 3:
+            # third line is +title_of_qualityscore (might be skipped)
+            if not qual_title_startswith:
+                qual_title_startswith = line_startswith
+            if line_startswith != qual_title_startswith:
+                stop_err( 'Invalid fastqsolexa format at line %d: %s.' % ( i + 1, line ) )
+            quality_title = line[1:]
+            if quality_title and read_title != quality_title:
+                stop_err( 'Invalid fastqsolexa format at line %d: sequence title "%s" differes from score title "%s".' % ( i + 1, read_title, quality_title ) )
+            if not quality_title:
+                outfile_score.write( '>%s\n' % read_title )
+            else:
+                outfile_score.write( '>%s\n' % line[1:] )
+        else:
+            # fourth line is quality scores
+            qual = ''
+            fastq_integer = True
+            # peek: ascii or digits?
+            val = line.split()[0]
+
+            try:
+                int( val )
+                fastq_integer = True
+            except:
+                fastq_integer = False
+
+            if fastq_integer:  # digits
+                qual = line
+            else:
+                # ascii
+                quality_score_length = len( line )
+                if quality_score_length == read_length + 1:
+                    quality_score_startswith = ord( line[0:1] )
+                    line = line[1:]
+                elif quality_score_length == read_length:
+                    quality_score_startswith = default_coding_value
+                else:
+                    stop_err( 'Invalid fastqsolexa format at line %d: the number of quality scores ( %d ) is not the same as bases ( %d ).' % ( i + 1, quality_score_length, read_length ) )
+                for j, char in enumerate( line ):
+                    score = ord( char ) - quality_score_startswith    # 64
+                    qual = "%s%s " % ( qual, str( score ) )
+            outfile_score.write( '%s\n' % qual )
+
+    outfile_score.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.xml b/lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.xml
new file mode 100644
index 0000000..a744009
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/fastqsolexa_to_qual_converter.xml
@@ -0,0 +1,11 @@
+<tool id="CONVERTER_fastqsolexa_to_qual_0" name="Convert Fastqsolexa to Qual">
+  <command interpreter="python">fastqsolexa_to_qual_converter.py '$input1' '$output1' ${input1.extension}</command>
+  <inputs>
+    <param format="fastqsolexa" name="input1" type="data" label="Choose Fastqsolexa file"/>
+  </inputs>
+  <outputs>
+    <data format="qualsolexa" name="output1" />
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/gff_to_bed_converter.py b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py
new file mode 100644
index 0000000..7889318
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_bed_converter.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( output_name, 'w' )
+    i = 0
+    for i, line in enumerate( open( input_name ) ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ):
+            try:
+                elems = line.split( '\t' )
+                start = str( int( elems[3] ) - 1 )
+                strand = elems[6]
+                if strand not in ['+', '-']:
+                    strand = '+'
+                # GFF format: chrom source, name, chromStart, chromEnd, score, strand
+                # Bed format: chrom, chromStart, chromEnd, name, score, strand
+                #
+                # Replace any spaces in the name with underscores so UCSC will not complain
+                name = elems[2].replace(" ", "_")
+                out.write( "%s\t%s\t%s\t%s\t0\t%s\n" % ( elems[0], start, elems[4], name, strand ) )
+            except:
+                skipped_lines += 1
+                if not first_skipped_line:
+                    first_skipped_line = i + 1
+        else:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = i + 1
+    out.close()
+    info_msg = "%i lines converted to BED.  " % ( i + 1 - skipped_lines )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/gff_to_bed_converter.xml b/lib/galaxy/datatypes/converters/gff_to_bed_converter.xml
new file mode 100644
index 0000000..21cbf5a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_bed_converter.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_gff_to_bed_0" name="Convert GFF to BED">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">gff_to_bed_converter.py '$input1' '$output1'</command>
+  <inputs>
+    <param format="gff" name="input1" type="data" label="Choose GFF file"/>
+  </inputs>
+  <outputs>
+    <data format="bed" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/gff_to_bgzip_converter.xml b/lib/galaxy/datatypes/converters/gff_to_bgzip_converter.xml
new file mode 100644
index 0000000..eefdced
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_bgzip_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_gff_to_bgzip_0" name="Convert GFF to BGZIP" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bgzip.py -P gff '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="gff" name="input1" type="data" label="Choose GFF file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/gff_to_fli_converter.xml b/lib/galaxy/datatypes/converters/gff_to_fli_converter.xml
new file mode 100644
index 0000000..ecdd6f6
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_fli_converter.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_gff_to_fli_0" name="Convert GFF to Feature Location Index">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_fli.py -F $input1.extension '$input1' '$output1'</command>
+  <inputs>
+    <param format="gff" name="input1" type="data" label="Choose GFF file"/>
+  </inputs>
+  <outputs>
+    <data format="fli" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/gff_to_interval_index_converter.py b/lib/galaxy/datatypes/converters/gff_to_interval_index_converter.py
new file mode 100644
index 0000000..c6b081b
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_interval_index_converter.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+"""
+Convert from GFF file to interval index file.
+
+usage:
+    python gff_to_interval_index_converter.py [input] [output]
+"""
+from __future__ import division
+
+import fileinput
+import sys
+
+from bx.interval_index_file import Indexes
+
+from galaxy.datatypes.util.gff_util import convert_gff_coords_to_bed, GenomicInterval, GFFReaderWrapper
+
+
+def main():
+    # Arguments
+    input_fname, out_fname = sys.argv[1:]
+
+    # Do conversion.
+    index = Indexes()
+    offset = 0
+    reader_wrapper = GFFReaderWrapper( fileinput.FileInput( input_fname ), fix_strand=True )
+    for feature in list( reader_wrapper ):
+        # Add feature; index expects BED coordinates.
+        if isinstance( feature, GenomicInterval ):
+            convert_gff_coords_to_bed( feature )
+            index.add( feature.chrom, feature.start, feature.end, offset )
+
+        # Always increment offset, even if feature is not an interval and hence
+        # not included in the index.
+        offset += feature.raw_size
+
+    index.write( open(out_fname, "w") )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/gff_to_interval_index_converter.xml b/lib/galaxy/datatypes/converters/gff_to_interval_index_converter.xml
new file mode 100644
index 0000000..63c8ba4
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_interval_index_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_gff_to_interval_index_0" name="Convert GFF to Interval Index" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">gff_to_interval_index_converter.py '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="gff" name="input1" type="data" label="Choose GFF file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="interval_index" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/gff_to_tabix_converter.xml b/lib/galaxy/datatypes/converters/gff_to_tabix_converter.xml
new file mode 100644
index 0000000..fac5dc9
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/gff_to_tabix_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_gff_to_tabix_0" name="Convert GFF to tabix" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_tabix_converter.py -P gff '$input1' '$bgzip' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="gff" name="input1" type="data" label="Choose GFF file"/>
+        <param format="bgzip" name="bgzip" type="data" label="BGZIP file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="tabix" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/inchi_to_cml_converter.xml b/lib/galaxy/datatypes/converters/inchi_to_cml_converter.xml
new file mode 100644
index 0000000..97d751a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/inchi_to_cml_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_inchi_to_cml" name="InChI to CML" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -iinchi '${input}' -ocml -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="inchi" label="Molecules in InChI format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="cml"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/inchi_to_mol2_converter.xml b/lib/galaxy/datatypes/converters/inchi_to_mol2_converter.xml
new file mode 100644
index 0000000..6efe9d3
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/inchi_to_mol2_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_inchi_to_mol2" name="InChI to MOL2" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -iinchi '${input}' -omol2 -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="inchi" label="Molecules in InChI format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol2"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/inchi_to_mol_converter.xml b/lib/galaxy/datatypes/converters/inchi_to_mol_converter.xml
new file mode 100644
index 0000000..6d18217
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/inchi_to_mol_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_inchi_to_mol" name="InChI to MOL" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -iinchi '${input}' -omol -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="inchi" label="Molecules in InChI-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/inchi_to_sdf_converter.xml b/lib/galaxy/datatypes/converters/inchi_to_sdf_converter.xml
new file mode 100644
index 0000000..417fd12
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/inchi_to_sdf_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_inchi_to_sdf" name="InChI to SDF" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -iinchi '${input}' -osdf -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="inchi" label="Molecules in InChI format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="sdf"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/inchi_to_smi_converter.xml b/lib/galaxy/datatypes/converters/inchi_to_smi_converter.xml
new file mode 100644
index 0000000..b934c44
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/inchi_to_smi_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_inchi_to_smi" name="InChI to SMILES" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -iinchi '${input}' -osmi -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="inchi" label="Molecules in InChI format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="smi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_bed12_converter.xml b/lib/galaxy/datatypes/converters/interval_to_bed12_converter.xml
new file mode 100644
index 0000000..ff6a060
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bed12_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_interval_to_bed12_0" name="Convert Genomic Intervals To Strict BED12">
+  <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_bedstrict_converter.py '$output1' '$input1' ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} ${input1.metadata.nameCol} ${input1.extension} 12</command>
+  <inputs>
+    <page>
+      <param format="interval" name="input1" type="data" label="Choose intervals"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bed12" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_bed6_converter.xml b/lib/galaxy/datatypes/converters/interval_to_bed6_converter.xml
new file mode 100644
index 0000000..c992a63
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bed6_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_interval_to_bed6_0" name="Convert Genomic Intervals To Strict BED6">
+  <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_bedstrict_converter.py '$output1' '$input1' ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} ${input1.metadata.nameCol} ${input1.extension} 6</command>
+  <inputs>
+    <page>
+      <param format="interval" name="input1" type="data" label="Choose intervals"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bed6" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_bed_converter.py b/lib/galaxy/datatypes/converters/interval_to_bed_converter.py
new file mode 100644
index 0000000..36fec42
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bed_converter.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.intervals.io
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def __main__():
+    output_name = sys.argv[1]
+    input_name = sys.argv[2]
+    try:
+        chromCol = int( sys.argv[3] ) - 1
+    except:
+        stop_err( "'%s' is an invalid chrom column, correct the column settings before attempting to convert the data format." % str( sys.argv[3] ) )
+    try:
+        startCol = int( sys.argv[4] ) - 1
+    except:
+        stop_err( "'%s' is an invalid start column, correct the column settings before attempting to convert the data format." % str( sys.argv[4] ) )
+    try:
+        endCol = int( sys.argv[5] ) - 1
+    except:
+        stop_err( "'%s' is an invalid end column, correct the column settings before attempting to convert the data format." % str( sys.argv[5] ) )
+    try:
+        strandCol = int( sys.argv[6] ) - 1
+    except:
+        strandCol = -1
+    try:
+        nameCol = int( sys.argv[7] ) - 1
+    except:
+        nameCol = -1
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( output_name, 'w' )
+    count = 0
+    for count, region in enumerate( bx.intervals.io.NiceReaderWrapper( open( input_name, 'r' ), chrom_col=chromCol, start_col=startCol, end_col=endCol, strand_col=strandCol, fix_strand=True, return_header=False, return_comments=False ) ):
+        try:
+            if nameCol >= 0:
+                name = region.fields[nameCol]
+            else:
+                raise IndexError
+        except:
+            name = "region_%i" % count
+        try:
+
+            out.write( "%s\t%i\t%i\t%s\t%i\t%s\n" % ( region.chrom, region.start, region.end, name, 0, region.strand ) )
+        except:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = count + 1
+    out.close()
+    print("%i regions converted to BED." % ( count + 1 - skipped_lines ))
+    if skipped_lines > 0:
+        print("Skipped %d blank or invalid lines starting with line # %d." % ( skipped_lines, first_skipped_line ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/interval_to_bed_converter.xml b/lib/galaxy/datatypes/converters/interval_to_bed_converter.xml
new file mode 100644
index 0000000..1e46eef
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bed_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_interval_to_bed_0" name="Convert Genomic Intervals To BED">
+  <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_bed_converter.py '$output1' '$input1' ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} ${input1.metadata.nameCol}</command>
+  <inputs>
+    <page>
+      <param format="interval" name="input1" type="data" label="Choose intervals"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bed" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py
new file mode 100644
index 0000000..8c34e38
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.intervals.io
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def force_bed_field_count( fields, region_count, force_num_columns ):
+    if force_num_columns >= 4 and len( fields ) < 4:
+        fields.append( 'region_%i' % ( region_count ) )
+    if force_num_columns >= 5 and len( fields ) < 5:
+        fields.append( '0' )
+    if force_num_columns >= 6 and len( fields ) < 6:
+        fields.append( '+' )
+    if force_num_columns >= 7 and len( fields ) < 7:
+        fields.append( fields[1] )
+    if force_num_columns >= 8 and len( fields ) < 8:
+        fields.append( fields[2] )
+    if force_num_columns >= 9 and len( fields ) < 9:
+        fields.append( '0' )
+    if force_num_columns >= 10 and len( fields ) < 10:
+        fields.append( '0' )
+    if force_num_columns >= 11 and len( fields ) < 11:
+        fields.append( ',' )
+    if force_num_columns >= 12 and len( fields ) < 12:
+        fields.append( ',' )
+    return fields[:force_num_columns]
+
+
+def __main__():
+    output_name = sys.argv[1]
+    input_name = sys.argv[2]
+    try:
+        chromCol = int( sys.argv[3] ) - 1
+    except:
+        stop_err( "'%s' is an invalid chrom column, correct the column settings before attempting to convert the data format." % str( sys.argv[3] ) )
+    try:
+        startCol = int( sys.argv[4] ) - 1
+    except:
+        stop_err( "'%s' is an invalid start column, correct the column settings before attempting to convert the data format." % str( sys.argv[4] ) )
+    try:
+        endCol = int( sys.argv[5] ) - 1
+    except:
+        stop_err( "'%s' is an invalid end column, correct the column settings before attempting to convert the data format." % str( sys.argv[5] ) )
+    try:
+        strandCol = int( sys.argv[6] ) - 1
+    except:
+        strandCol = -1
+    try:
+        nameCol = int( sys.argv[7] ) - 1
+    except:
+        nameCol = -1
+    try:
+        extension = sys.argv[8]
+    except:
+        extension = 'interval'  # default extension
+    try:
+        force_num_columns = int( sys.argv[9] )
+    except:
+        force_num_columns = None
+
+    skipped_lines = 0
+    first_skipped_line = None
+    out = open( output_name, 'w' )
+    count = 0
+    # does file already conform to bed strict?
+    # if so, we want to keep extended columns, otherwise we'll create a generic 6 column bed file
+    strict_bed = True
+    if extension in [ 'bed', 'bedstrict', 'bed6', 'bed12' ] and ( chromCol, startCol, endCol) == ( 0, 1, 2) and ( nameCol < 0 or nameCol == 3 ) and ( strandCol < 0 or strandCol == 5 ):
+        for count, line in enumerate( open( input_name ) ):
+            line = line.rstrip( '\n\r' )
+            if line == "" or line.startswith("#"):
+                skipped_lines += 1
+                if first_skipped_line is None:
+                    first_skipped_line = count + 1
+                continue
+            fields = line.split('\t')
+            try:
+                assert len( fields ) >= 3, 'A BED file requires at least 3 columns'  # we can't fix this
+                if len(fields) > 12:
+                    strict_bed = False
+                    break
+                # name (fields[3]) can be anything, no verification needed
+                if len( fields ) > 4:
+                    float( fields[4] )  # score - A score between 0 and 1000. If the track line useScore attribute is set to 1 for this annotation data set, the score value will determine the level of gray in which this feature is displayed (higher numbers = darker gray).
+                    if len( fields ) > 5:
+                        assert fields[5] in [ '+', '-' ], 'Invalid strand'  # strand - Defines the strand - either '+' or '-'.
+                        if len( fields ) > 6:
+                            int( fields[6] )  # thickStart - The starting position at which the feature is drawn thickly (for example, the start codon in gene displays).
+                            if len( fields ) > 7:
+                                int( fields[7] )  # thickEnd - The ending position at which the feature is drawn thickly (for example, the stop codon in gene displays).
+                                if len( fields ) > 8:
+                                    if fields[8] != '0':  # itemRgb - An RGB value of the form R,G,B (e.g. 255,0,0). If the track line itemRgb attribute is set to "On", this RBG value will determine the display color of the data contained in this BED line. NOTE: It is recommended that a simple color scheme (eight colors or less) be used with this attribute to avoid overwhelming the color resources of the Genome Browser and your Internet browser.
+                                        fields2 = fields[8].split( ',' )
+                                        assert len( fields2 ) == 3, 'RGB value must be 0 or have length of 3'
+                                        for field in fields2:
+                                            int( field )  # rgb values are integers
+                                    if len( fields ) > 9:
+                                        int( fields[9] )  # blockCount - The number of blocks (exons) in the BED line.
+                                        if len( fields ) > 10:
+                                            if fields[10] != ',':  # blockSizes - A comma-separated list of the block sizes. The number of items in this list should correspond to blockCount.
+                                                fields2 = fields[10].rstrip( "," ).split( "," )  # remove trailing comma and split on comma
+                                                for field in fields2:
+                                                    int( field )
+                                            if len( fields ) > 11:
+                                                if fields[11] != ',':  # blockStarts - A comma-separated list of block starts. All of the blockStart positions should be calculated relative to chromStart. The number of items in this list should correspond to blockCount.
+                                                    fields2 = fields[11].rstrip( "," ).split( "," )  # remove trailing comma and split on comma
+                                                    for field in fields2:
+                                                        int( field )
+            except:
+                strict_bed = False
+                break
+            if force_num_columns is not None and len( fields ) != force_num_columns:
+                line = '\t'.join( force_bed_field_count( fields, count, force_num_columns ) )
+            out.write( "%s\n" % line )
+    else:
+        strict_bed = False
+    out.close()
+
+    if not strict_bed:
+        skipped_lines = 0
+        first_skipped_line = None
+        out = open( output_name, 'w' )
+        count = 0
+        for count, region in enumerate( bx.intervals.io.NiceReaderWrapper( open( input_name, 'r' ), chrom_col=chromCol, start_col=startCol, end_col=endCol, strand_col=strandCol, fix_strand=True, return_header=False, return_comments=False ) ):
+            try:
+                if nameCol >= 0:
+                    name = region.fields[nameCol]
+                else:
+                    raise IndexError
+            except:
+                name = "region_%i" % count
+            try:
+                fields = [str(item) for item in (region.chrom, region.start, region.end, name, 0, region.strand)]
+                if force_num_columns is not None and len( fields ) != force_num_columns:
+                    fields = force_bed_field_count( fields, count, force_num_columns )
+                out.write( "%s\n" % '\t'.join( fields ) )
+            except:
+                skipped_lines += 1
+                if first_skipped_line is None:
+                    first_skipped_line = count + 1
+        out.close()
+    print("%i regions converted to BED." % ( count + 1 - skipped_lines ))
+    if skipped_lines > 0:
+        print("Skipped %d blank or invalid lines starting with line # %d." % ( skipped_lines, first_skipped_line ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.xml b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.xml
new file mode 100644
index 0000000..79e919a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bedstrict_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_interval_to_bedstrict_0" name="Convert Genomic Intervals To Strict BED">
+  <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_bedstrict_converter.py '$output1' '$input1' ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} ${input1.metadata.strandCol} ${input1.metadata.nameCol} ${input1.extension}</command>
+  <inputs>
+    <page>
+      <param format="interval" name="input1" type="data" label="Choose intervals"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bedstrict" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_bgzip_converter.xml b/lib/galaxy/datatypes/converters/interval_to_bgzip_converter.xml
new file mode 100644
index 0000000..11eb5a9
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bgzip_converter.xml
@@ -0,0 +1,19 @@
+<tool id="CONVERTER_interval_to_bgzip_0" name="Convert Interval to BGZIP" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bgzip.py 
+                                  -c ${input1.metadata.chromCol} 
+                                  -s ${input1.metadata.startCol} 
+                                  -e ${input1.metadata.endCol} 
+                                  '$input1' '$output1'
+  </command>
+  <inputs>
+    <page>
+        <param format="interval" name="input1" type="data" label="Choose Interval file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_bigwig_converter.xml b/lib/galaxy/datatypes/converters/interval_to_bigwig_converter.xml
new file mode 100644
index 0000000..7ead71a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_bigwig_converter.xml
@@ -0,0 +1,37 @@
+<tool id="CONVERTER_interval_to_bigwig_0" name="Convert Genomic Intervals To Coverage">
+  <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+    <requirements>
+        <requirement type="package">ucsc_tools</requirement>
+        <requirement type="package">bedtools</requirement>
+    </requirements>
+  <command>
+
+    ## Remove comments and sort by chromosome.
+    grep -v '^#' '$input1' | sort -k${input1.metadata.chromCol},${input1.metadata.chromCol} |
+
+    ## Create simple BED by cutting chrom, start, and end columns.
+    awk -v OFS='	' '{print $${input1.metadata.chromCol},$${input1.metadata.startCol},$${input1.metadata.endCol} }' |
+
+    ## Generate coverage bedgraph.
+    bedtools genomecov -bg -split -i stdin -g $chromInfo 
+
+    ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+    ## should only be used on systems with large RAM.
+    ## | wigToBigWig stdin $chromInfo '$output'
+
+    ## This can be used anywhere.
+    > temp.bg ; bedGraphToBigWig temp.bg $chromInfo '$output'
+
+  </command>
+  <inputs>
+    <page>
+      <param format="interval" name="input1" type="data" label="Choose intervals"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bigwig" name="output"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_coverage.py b/lib/galaxy/datatypes/converters/interval_to_coverage.py
new file mode 100644
index 0000000..e58b3ec
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_coverage.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+"""
+Converter to generate 3 (or 4) column base-pair coverage from an interval file.
+
+usage: %prog bed_file out_file
+    -1, --cols1=N,N,N,N: Columns for chrom, start, end, strand in interval file
+    -2, --cols2=N,N,N,N: Columns for chrom, start, end, strand in coverage file
+"""
+import subprocess
+import tempfile
+from bisect import bisect
+from os import environ
+
+from bx.cookbook import doc_optparse
+from bx.intervals import io
+
+INTERVAL_METADATA = ('chromCol',
+                     'startCol',
+                     'endCol',
+                     'strandCol',)
+
+COVERAGE_METADATA = ('chromCol',
+                     'positionCol',
+                     'forwardCol',
+                     'reverseCol',)
+
+
+def main( interval, coverage ):
+    """
+    Uses a sliding window of partitions to count coverages.
+    Every interval record adds its start and end to the partitions.  The result
+    is a list of partitions, or every position that has a (maybe) different
+    number of basepairs covered.  We don't worry about merging because we pop
+    as the sorted intervals are read in.  As the input start positions exceed
+    the partition positions in partitions, coverages are kicked out in bulk.
+    """
+    partitions = []
+    forward_covs = []
+    reverse_covs = []
+    chrom = None
+    lastchrom = None
+    for record in interval:
+        chrom = record.chrom
+        if lastchrom and not lastchrom == chrom and partitions:
+            for partition in range(0, len(partitions) - 1):
+                forward = forward_covs[partition]
+                reverse = reverse_covs[partition]
+                if forward + reverse > 0:
+                    coverage.write(chrom=chrom, position=range(partitions[partition], partitions[partition + 1]),
+                                   forward=forward, reverse=reverse)
+            partitions = []
+            forward_covs = []
+            reverse_covs = []
+
+        start_index = bisect(partitions, record.start)
+        forward = int(record.strand == "+")
+        reverse = int(record.strand == "-")
+        forward_base = 0
+        reverse_base = 0
+        if start_index > 0:
+            forward_base = forward_covs[start_index - 1]
+            reverse_base = reverse_covs[start_index - 1]
+        partitions.insert(start_index, record.start)
+        forward_covs.insert(start_index, forward_base)
+        reverse_covs.insert(start_index, reverse_base)
+        end_index = bisect(partitions, record.end)
+        for index in range(start_index, end_index):
+            forward_covs[index] += forward
+            reverse_covs[index] += reverse
+        partitions.insert(end_index, record.end)
+        forward_covs.insert(end_index, forward_covs[end_index - 1] - forward )
+        reverse_covs.insert(end_index, reverse_covs[end_index - 1] - reverse )
+
+        if partitions:
+            for partition in range(0, start_index):
+                forward = forward_covs[partition]
+                reverse = reverse_covs[partition]
+                if forward + reverse > 0:
+                    coverage.write(chrom=chrom, position=range(partitions[partition], partitions[partition + 1]),
+                                   forward=forward, reverse=reverse)
+            partitions = partitions[start_index:]
+            forward_covs = forward_covs[start_index:]
+            reverse_covs = reverse_covs[start_index:]
+
+        lastchrom = chrom
+
+    # Finish the last chromosome
+    if partitions:
+        for partition in range(0, len(partitions) - 1):
+            forward = forward_covs[partition]
+            reverse = reverse_covs[partition]
+            if forward + reverse > 0:
+                coverage.write(chrom=chrom, position=range(partitions[partition], partitions[partition + 1]),
+                               forward=forward, reverse=reverse)
+
+
+class CoverageWriter( object ):
+    def __init__( self, out_stream=None, chromCol=0, positionCol=1, forwardCol=2, reverseCol=3 ):
+        self.out_stream = out_stream
+        self.reverseCol = reverseCol
+        self.nlines = 0
+        positions = {str(chromCol): '%(chrom)s',
+                     str(positionCol): '%(position)d',
+                     str(forwardCol): '%(forward)d',
+                     str(reverseCol): '%(reverse)d'}
+        if reverseCol < 0:
+            self.template = "%(0)s\t%(1)s\t%(2)s\n" % positions
+        else:
+            self.template = "%(0)s\t%(1)s\t%(2)s\t%(3)s\n" % positions
+
+    def write(self, **kwargs ):
+        if self.reverseCol < 0:
+            kwargs['forward'] += kwargs['reverse']
+        posgen = kwargs['position']
+        for position in posgen:
+            kwargs['position'] = position
+            self.out_stream.write(self.template % kwargs)
+
+    def close(self):
+        self.out_stream.flush()
+        self.out_stream.close()
+
+
+if __name__ == "__main__":
+    options, args = doc_optparse.parse( __doc__ )
+    try:
+        chr_col_1, start_col_1, end_col_1, strand_col_1 = [int(x) - 1 for x in options.cols1.split(',')]
+        chr_col_2, position_col_2, forward_col_2, reverse_col_2 = [int(x) - 1 for x in options.cols2.split(',')]
+        in_fname, out_fname = args
+    except:
+        doc_optparse.exception()
+
+    # Sort through a tempfile first
+    temp_file = tempfile.NamedTemporaryFile(mode="r")
+    environ['LC_ALL'] = 'POSIX'
+    commandline = "sort -f -n -k %d -k %d -k %d -o %s %s" % (chr_col_1 + 1, start_col_1 + 1, end_col_1 + 1, temp_file.name, in_fname)
+    subprocess.check_call(commandline, shell=True)
+
+    coverage = CoverageWriter( out_stream=open(out_fname, "a"),
+                               chromCol=chr_col_2, positionCol=position_col_2,
+                               forwardCol=forward_col_2, reverseCol=reverse_col_2, )
+    temp_file.seek(0)
+    interval = io.NiceReaderWrapper( temp_file,
+                                     chrom_col=chr_col_1,
+                                     start_col=start_col_1,
+                                     end_col=end_col_1,
+                                     strand_col=strand_col_1,
+                                     fix_strand=True )
+    main( interval, coverage )
+    temp_file.close()
+    coverage.close()
diff --git a/lib/galaxy/datatypes/converters/interval_to_coverage.xml b/lib/galaxy/datatypes/converters/interval_to_coverage.xml
new file mode 100644
index 0000000..7bd21c1
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_coverage.xml
@@ -0,0 +1,18 @@
+<tool id="CONVERTER_interval_to_coverage_0" name="Convert Genomic Intervals To COVERAGE">
+  <!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">interval_to_coverage.py '$input1' '$output1'
+  -1 ${input1.metadata.chromCol},${input1.metadata.startCol},${input1.metadata.endCol},${input1.metadata.strandCol}
+  -2 ${output1.metadata.chromCol},${output1.metadata.positionCol},${output1.metadata.forwardCol},${output1.metadata.reverseCol}
+  </command>
+  <inputs>
+    <page>
+      <param format="interval" name="input1" type="data" label="Choose intervals"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="coverage" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_fli.py b/lib/galaxy/datatypes/converters/interval_to_fli.py
new file mode 100644
index 0000000..467fb90
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_fli.py
@@ -0,0 +1,107 @@
+'''
+Creates a feature location index (FLI) for a given BED/GFF file.
+FLI index has the form::
+
+    [line_length]
+    <symbol1_in_lowercase><tab><symbol1><tab><location>
+    <symbol2_in_lowercase><tab><symbol2><tab><location>
+    ...
+
+where location is formatted as:
+
+    contig:start-end
+
+and symbols are sorted in lexigraphical order.
+'''
+import optparse
+
+from bx.tabular.io import Comment
+
+from galaxy.datatypes.util.gff_util import convert_gff_coords_to_bed, GFFReaderWrapper, read_unordered_gtf
+
+
+def main():
+    # Process arguments.
+    parser = optparse.OptionParser()
+    parser.add_option( '-F', '--format', dest="input_format" )
+    (options, args) = parser.parse_args()
+    in_fname, out_fname = args
+    input_format = options.input_format.lower()
+
+    # Create dict of name-location pairings.
+    name_loc_dict = {}
+    if input_format in [ 'gff', 'gtf' ]:
+        # GTF/GFF format
+
+        # Create reader.
+        if input_format == 'gff':
+            in_reader = GFFReaderWrapper( open( in_fname, 'r' ) )
+        else:  # input_format == 'gtf'
+            in_reader = read_unordered_gtf( open( in_fname, 'r' ) )
+
+        for feature in in_reader:
+            if isinstance( feature, Comment ):
+                continue
+
+            for name in feature.attributes:
+                val = feature.attributes[ name ]
+                try:
+                    float( val )
+                    continue
+                except:
+                    convert_gff_coords_to_bed( feature )
+                    # Value is not a number, so it can be indexed.
+                    if val not in name_loc_dict:
+                        # Value is not in dictionary.
+                        name_loc_dict[ val ] = {
+                            'contig': feature.chrom,
+                            'start': feature.start,
+                            'end': feature.end
+                        }
+                    else:
+                        # Value already in dictionary, so update dictionary.
+                        loc = name_loc_dict[ val ]
+                        if feature.start < loc[ 'start' ]:
+                            loc[ 'start' ] = feature.start
+                        if feature.end > loc[ 'end' ]:
+                            loc[ 'end' ] = feature.end
+    elif input_format == 'bed':
+        # BED format.
+        for line in open( in_fname, 'r' ):
+            # Ignore track lines.
+            if line.startswith("track"):
+                continue
+
+            fields = line.split()
+
+            # Ignore lines with no feature name.
+            if len( fields ) < 4:
+                continue
+
+            # Process line
+            name_loc_dict[ fields[3] ] = {
+                'contig': fields[0],
+                'start': int( fields[1] ),
+                'end': int( fields[2] )
+            }
+
+    # Create sorted list of entries.
+    out = open( out_fname, 'w' )
+    max_len = 0
+    entries = []
+    for name in sorted( name_loc_dict.iterkeys() ):
+        loc = name_loc_dict[ name ]
+        entry = '%s\t%s\t%s' % ( name.lower(), name, '%s:%i-%i' % ( loc[ 'contig' ], loc[ 'start' ], loc[ 'end' ] ) )
+        if len( entry ) > max_len:
+            max_len = len( entry )
+        entries.append( entry )
+
+    # Write padded entries.
+    out.write( str( max_len + 1 ).ljust( max_len ) + '\n' )
+    for entry in entries:
+        out.write( entry.ljust( max_len ) + '\n' )
+    out.close()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib/galaxy/datatypes/converters/interval_to_interval_index_converter.py b/lib/galaxy/datatypes/converters/interval_to_interval_index_converter.py
new file mode 100644
index 0000000..acdd493
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_interval_index_converter.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+"""
+Convert from interval file to interval index file.
+
+usage: %prog <options> in_file out_file
+    -c, --chr-col: chromosome column, default=1
+    -s, --start-col: start column, default=2
+    -e, --end-col: end column, default=3
+"""
+from __future__ import division
+
+import optparse
+
+from bx.interval_index_file import Indexes
+
+
+def main():
+
+    # Read options, args.
+    parser = optparse.OptionParser()
+    parser.add_option( '-c', '--chr-col', type='int', dest='chrom_col', default=1 )
+    parser.add_option( '-s', '--start-col', type='int', dest='start_col', default=2 )
+    parser.add_option( '-e', '--end-col', type='int', dest='end_col', default=3 )
+    (options, args) = parser.parse_args()
+    input_fname, output_fname = args
+
+    # Make column indices 0-based.
+    options.chrom_col -= 1
+    options.start_col -= 1
+    options.end_col -= 1
+
+    # Do conversion.
+    index = Indexes()
+    offset = 0
+    for line in open(input_fname, "r"):
+        feature = line.strip().split()
+        if not feature or feature[0].startswith("track") or feature[0].startswith("#"):
+            offset += len(line)
+            continue
+        chrom = feature[ options.chrom_col ]
+        chrom_start = int( feature[ options.start_col ] )
+        chrom_end = int( feature[ options.end_col ] )
+        index.add( chrom, chrom_start, chrom_end, offset )
+        offset += len(line)
+
+    index.write( open(output_fname, "w") )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/interval_to_interval_index_converter.xml b/lib/galaxy/datatypes/converters/interval_to_interval_index_converter.xml
new file mode 100644
index 0000000..c69f337
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_interval_index_converter.xml
@@ -0,0 +1,19 @@
+<tool id="CONVERTER_interval_to_interval_index_0" name="Convert Interval to Interval Index" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_interval_index_converter.py 
+                                -c ${input1.metadata.chromCol} 
+                                -s ${input1.metadata.startCol} 
+                                -e ${input1.metadata.endCol} 
+                                '$input1' '$output1'
+  </command>
+  <inputs>
+    <page>
+        <param format="interval" name="input1" type="data" label="Choose Interval file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="interval_index" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/interval_to_tabix_converter.py b/lib/galaxy/datatypes/converters/interval_to_tabix_converter.py
new file mode 100644
index 0000000..487e3e9
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_tabix_converter.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+"""
+Uses pysam to index a bgzipped interval file with tabix
+Supported presets: bed, gff, vcf
+
+usage: %prog in_file out_file
+"""
+import optparse
+import os
+import sys
+
+from pysam import ctabix
+
+
+def main():
+    # Read options, args.
+    parser = optparse.OptionParser()
+    parser.add_option( '-c', '--chr-col', type='int', dest='chrom_col' )
+    parser.add_option( '-s', '--start-col', type='int', dest='start_col' )
+    parser.add_option( '-e', '--end-col', type='int', dest='end_col' )
+    parser.add_option( '-P', '--preset', dest='preset' )
+    (options, args) = parser.parse_args()
+    input_fname, index_fname, out_fname = args
+
+    # Create index.
+    if options.preset:
+        # Preset type.
+        ctabix.tabix_index(filename=index_fname, preset=options.preset, keep_original=True,
+                           index_filename=out_fname)
+    else:
+        # For interval files; column indices are 0-based.
+        ctabix.tabix_index(filename=index_fname, seq_col=(options.chrom_col - 1),
+                           start_col=(options.start_col - 1), end_col=(options.end_col - 1),
+                           keep_original=True, index_filename=out_fname)
+    if os.path.getsize(index_fname) == 0:
+        sys.stderr.write("The converted tabix index file is empty, meaning the input data is invalid.")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/interval_to_tabix_converter.xml b/lib/galaxy/datatypes/converters/interval_to_tabix_converter.xml
new file mode 100644
index 0000000..1dc6dde
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/interval_to_tabix_converter.xml
@@ -0,0 +1,20 @@
+<tool id="CONVERTER_interval_to_tabix_0" name="Convert Interval to tabix" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_tabix_converter.py
+                                  -c ${input1.metadata.chromCol} 
+                                  -s ${input1.metadata.startCol} 
+                                  -e ${input1.metadata.endCol} 
+                                  '$input1' '$bgzip' '$output1'
+  </command>
+  <inputs>
+    <page>
+        <param format="interval" name="input1" type="data" label="Choose Interval file"/>
+        <param format="bgzip" name="bgzip" type="data" label="BGZIP file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="tabix" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/len_to_linecount.xml b/lib/galaxy/datatypes/converters/len_to_linecount.xml
new file mode 100644
index 0000000..3398fa4
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/len_to_linecount.xml
@@ -0,0 +1,13 @@
+<tool id="CONVERTER_len_to_linecount" name="Convert Len file to Linecount" version="1.0.0">
+    <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+    <!-- Used on the metadata edit page. -->
+    <command>wc -l '$input' | awk '{print $1}' > '$output' </command>
+    <inputs>
+        <param name="input" type="data" format="len" label="Fasta file"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="linecount"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/lped_to_fped_converter.py b/lib/galaxy/datatypes/converters/lped_to_fped_converter.py
new file mode 100644
index 0000000..1774482
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/lped_to_fped_converter.py
@@ -0,0 +1,111 @@
+# for rgenetics - lped to fbat
+# recode to numeric fbat version
+# much slower so best to always
+# use numeric alleles internally
+from __future__ import print_function
+
+import os
+import sys
+import time
+
+prog = os.path.split(sys.argv[0])[-1]
+myversion = 'Oct 10 2009'
+
+galhtmlprefix = """<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<meta name="generator" content="Galaxy %s tool output - see http://getgalaxy.org" />
+<title></title>
+<link rel="stylesheet" href="/static/style/base.css" type="text/css" />
+</head>
+<body>
+<div class="document">
+"""
+
+
+def timenow():
+    """return current time as a string
+    """
+    return time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(time.time()))
+
+
+def rgConv(inpedfilepath, outhtmlname, outfilepath):
+    """convert linkage ped/map to fbat"""
+    recode = {'A': '1', 'C': '2', 'G': '3', 'T': '4', 'N': '0', '0': '0', '1': '1', '2': '2', '3': '3', '4': '4'}
+    basename = os.path.split(inpedfilepath)[-1]  # get basename
+    inmap = '%s.map' % inpedfilepath
+    inped = '%s.ped' % inpedfilepath
+    outf = '%s.ped' % basename  # note the fbat exe insists that this is the extension for the ped data
+    outfpath = os.path.join(outfilepath, outf)  # where to write the fbat format file to
+    try:
+        mf = open(inmap, 'r')
+    except:
+        sys.stderr.write('%s cannot open inmap file %s - do you have permission?\n' % (prog, inmap))
+        sys.exit(1)
+    try:
+        rsl = [x.split()[1] for x in mf]
+    except:
+        sys.stderr.write('## cannot parse %s' % inmap)
+        sys.exit(1)
+    try:
+        os.makedirs(outfilepath)
+    except:
+        pass  # already exists
+    head = ' '.join(rsl)  # list of rs numbers
+    # TODO add anno to rs but fbat will prolly barf?
+    pedf = open(inped, 'r')
+    o = open(outfpath, 'w', 2 ** 20)
+    o.write(head)
+    o.write('\n')
+    for i, row in enumerate(pedf):
+        if i == 0:
+            lrow = row.split()
+            try:
+                [int(x) for x in lrow[10:50]]  # look for non numeric codes
+            except:
+                dorecode = 1
+        if dorecode:
+            lrow = row.strip().split()
+            p = lrow[:6]
+            g = lrow[6:]
+            gc = [recode.get(z, '0') for z in g]
+            lrow = p + gc
+            row = '%s\n' % ' '.join(lrow)
+        o.write(row)
+    o.close()
+
+
+def main():
+    """call fbater
+    need to work with rgenetics composite datatypes
+    so in and out are html files with data in extrafiles path
+    <command interpreter="python">rg_convert_lped_fped.py '$input1/$input1.metadata.base_name'
+    '$output1' '$output1.extra_files_path'
+    </command>
+    """
+    nparm = 3
+    if len(sys.argv) < nparm:
+        sys.stderr.write('## %s called with %s - needs %d parameters \n' % (prog, sys.argv, nparm))
+        sys.exit(1)
+    inpedfilepath = sys.argv[1]
+    outhtmlname = sys.argv[2]
+    outfilepath = sys.argv[3]
+    try:
+        os.makedirs(outfilepath)
+    except:
+        pass
+    rgConv(inpedfilepath, outhtmlname, outfilepath)
+    flist = os.listdir(outfilepath)
+    with open(outhtmlname, 'w') as f:
+        f.write(galhtmlprefix % prog)
+        print('## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow()))  # becomes info
+        f.write('<div>## Rgenetics: http://rgenetics.org Galaxy Tools %s %s\n<ol>' % (prog, timenow()))
+        for i, data in enumerate( flist ):
+            f.write('<li><a href="%s">%s</a></li>\n' % (os.path.split(data)[-1], os.path.split(data)[-1]))
+        f.write("</div></body></html>")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/lped_to_fped_converter.xml b/lib/galaxy/datatypes/converters/lped_to_fped_converter.xml
new file mode 100644
index 0000000..d359914
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/lped_to_fped_converter.xml
@@ -0,0 +1,15 @@
+<tool id="lped2fpedconvert" name="Convert lped to fped" version="0.01">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">
+   lped_to_fped_converter.py '$input1.extra_files_path/$input1.metadata.base_name' '$output1' '$output1.files_path'
+  </command>
+  <inputs>
+    <param format="lped" name="input1" type="data" label="Choose linkage pedigree file"/>
+  </inputs>
+  <outputs>
+    <data format="fped" name="output1" metadata_source="input1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py
new file mode 100644
index 0000000..fc7ef10
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.py
@@ -0,0 +1,113 @@
+# for rgenetics - lped to pbed
+# where to stop with converters
+# pbed might be central
+# eg lped/eigen/fbat/snpmatrix all to pbed
+# and pbed to lped/eigen/fbat/snpmatrix ?
+# that's a lot of converters
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+import time
+
+prog = os.path.split(sys.argv[0])[-1]
+myversion = 'Oct 10 2009'
+
+galhtmlprefix = """<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<meta name="generator" content="Galaxy %s tool output - see http://getgalaxy.org" />
+<title></title>
+<link rel="stylesheet" href="/static/style/base.css" type="text/css" />
+</head>
+<body>
+<div class="document">
+"""
+
+
+def timenow():
+    """return current time as a string
+    """
+    return time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(time.time()))
+
+
+def getMissval(inped=''):
+    """
+    read some lines...ugly hack - try to guess missing value
+    should be N or 0 but might be . or -
+    """
+    commonmissvals = {'N': 'N', '0': '0', 'n': 'n', '9': '9', '-': '-', '.': '.'}
+    try:
+        f = open(inped, 'r')
+    except:
+        return None  # signal no in file
+    missval = None
+    while missval is None:  # doggedly continue until we solve the mystery
+        try:
+            l = f.readline()
+        except:
+            break
+        ll = l.split()[6:]  # ignore pedigree stuff
+        for c in ll:
+            if commonmissvals.get(c, None):
+                missval = c
+                f.close()
+                return missval
+    if not missval:
+        missval = 'N'  # punt
+    f.close()
+    return missval
+
+
+def rgConv(inpedfilepath, outhtmlname, outfilepath, plink):
+    """
+    """
+    pedf = '%s.ped' % inpedfilepath
+    basename = os.path.split(inpedfilepath)[-1]  # get basename
+    outroot = os.path.join(outfilepath, basename)
+    missval = getMissval(inped=pedf)
+    if not missval:
+        print('### lped_to_pbed_converter.py cannot identify missing value in %s' % pedf)
+        missval = '0'
+    cl = '%s --noweb --file %s --make-bed --out %s --missing-genotype %s' % (plink, inpedfilepath, outroot, missval)
+    p = subprocess.Popen(cl, shell=True, cwd=outfilepath)
+    p.wait()  # run plink
+
+
+def main():
+    """
+    need to work with rgenetics composite datatypes
+    so in and out are html files with data in extrafiles path
+    <command interpreter="python">lped_to_pbed_converter.py '$input1/$input1.metadata.base_name'
+    '$output1' '$output1.extra_files_path' '${GALAXY_DATA_INDEX_DIR}/rg/bin/plink'
+    </command>
+    """
+    nparm = 4
+    if len(sys.argv) < nparm:
+        sys.stderr.write('## %s called with %s - needs %d parameters \n' % (prog, sys.argv, nparm))
+        sys.exit(1)
+    inpedfilepath = sys.argv[1]
+    outhtmlname = sys.argv[2]
+    outfilepath = sys.argv[3]
+    try:
+        os.makedirs(outfilepath)
+    except:
+        pass
+    plink = sys.argv[4]
+    rgConv(inpedfilepath, outhtmlname, outfilepath, plink)
+    flist = os.listdir(outfilepath)
+    with open(outhtmlname, 'w') as f:
+        f.write(galhtmlprefix % prog)
+        s = '## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow())  # becomes info
+        print(s)
+        f.write('<div>%s\n<ol>' % (s))
+        for i, data in enumerate( flist ):
+            f.write('<li><a href="%s">%s</a></li>\n' % (os.path.split(data)[-1], os.path.split(data)[-1]))
+        f.write("</ol></div></div></body></html>")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml
new file mode 100644
index 0000000..475db4b
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/lped_to_pbed_converter.xml
@@ -0,0 +1,16 @@
+<tool id="lped2pbedconvert" name="Convert lped to plink pbed" version="0.01">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">
+   lped_to_pbed_converter.py '$input1.extra_files_path/$input1.metadata.base_name'
+   '$output1' '$output1.files_path' 'plink'
+  </command>
+  <inputs>
+    <param format="lped" name="input1" type="data" label="Choose linkage pedigree file"/>
+  </inputs>
+  <outputs>
+    <data format="pbed" name="output1" metadata_source="input1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py
new file mode 100644
index 0000000..4a6fa47
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+
+from galaxy.tools.util import maf_utilities
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    output_name = sys.argv.pop(1)
+    input_name = sys.argv.pop(1)
+    out = open( output_name, 'w' )
+    count = 0
+    for count, block in enumerate( bx.align.maf.Reader( open( input_name, 'r' ) ) ):
+        spec_counts = {}
+        for c in block.components:
+            spec, chrom = maf_utilities.src_split( c.src )
+            if spec not in spec_counts:
+                spec_counts[ spec ] = 0
+            else:
+                spec_counts[ spec ] += 1
+            out.write( "%s\n" % maf_utilities.get_fasta_header( c, { 'block_index' : count, 'species' : spec, 'sequence_index' : spec_counts[ spec ] }, suffix="%s_%i_%i" % ( spec, count, spec_counts[ spec ] ) ) )
+            out.write( "%s\n" % c.text )
+        out.write( "\n" )
+    out.close()
+    print("%i MAF blocks converted to FASTA." % ( count ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/maf_to_fasta_converter.xml b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.xml
new file mode 100644
index 0000000..fcb6ef2
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/maf_to_fasta_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_maf_to_fasta_0" name="Convert MAF to Fasta" version="1.0.1">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">maf_to_fasta_converter.py '$output1' '$input1'</command>
+  <inputs>
+    <page>
+        <param format="maf" name="input1" type="data" label="Choose MAF file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="fasta" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+<!--  <code file="maf_to_fasta_converter_code.py"/>-->
+</tool>
diff --git a/lib/galaxy/datatypes/converters/maf_to_interval_converter.py b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py
new file mode 100644
index 0000000..3e68f2a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+
+from galaxy.tools.util import maf_utilities
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    output_name = sys.argv.pop(1)
+    input_name = sys.argv.pop(1)
+    species = sys.argv.pop(1)
+    out = open(output_name, 'w')
+    count = 0
+    # write interval header line
+    out.write( "#chrom\tstart\tend\tstrand\n" )
+    try:
+        for block in bx.align.maf.Reader( open( input_name, 'r' ) ):
+            for c in maf_utilities.iter_components_by_src_start( block, species ):
+                if c is not None:
+                    out.write( "%s\t%i\t%i\t%s\n" % ( maf_utilities.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
+                    count += 1
+    except Exception as e:
+        print("There was a problem processing your input: %s" % e, file=sys.stderr)
+    out.close()
+    print("%i MAF blocks converted to Genomic Intervals for species %s." % ( count, species ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml b/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml
new file mode 100644
index 0000000..6e85eaa
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/maf_to_interval_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_maf_to_interval_0" name="Convert MAF to Genomic Intervals" version="1.0.2">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">maf_to_interval_converter.py '$output1' '$input1' '${input1.metadata.dbkey}'</command>
+  <inputs>
+    <page>
+        <param format="maf" name="input1" type="data" label="Choose MAF file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="interval" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+<!--  <code file="maf_to_interval_converter_code.py"/> -->
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol2_to_cml_converter.xml b/lib/galaxy/datatypes/converters/mol2_to_cml_converter.xml
new file mode 100644
index 0000000..24b5f60
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol2_to_cml_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_mol2_to_cml" name="MOL2 to CML" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol2 '${input}' -ocml -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol2" label="Molecules in MOL2-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="cml"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol2_to_inchi_converter.xml b/lib/galaxy/datatypes/converters/mol2_to_inchi_converter.xml
new file mode 100644
index 0000000..be5daa3
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol2_to_inchi_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_mol2_to_inchi" name="MOL2 to InChI" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol2 '${input}' -oinchi -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol2" label="Molecules in MOL2-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="inchi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol2_to_mol_converter.xml b/lib/galaxy/datatypes/converters/mol2_to_mol_converter.xml
new file mode 100644
index 0000000..e121005
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol2_to_mol_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_mol2_to_mol" name="MOL2 to MOL" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol2 '${input}' -omol -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol2" label="Molecules in MOL2-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol2_to_sdf_converter.xml b/lib/galaxy/datatypes/converters/mol2_to_sdf_converter.xml
new file mode 100644
index 0000000..e72b17e
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol2_to_sdf_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_mol2_to_sdf" name="MOL2 to SDF" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol2 '${input}' -osdf '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol2" label="Molecules in MOL2-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="sdf"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol2_to_smi_converter.xml b/lib/galaxy/datatypes/converters/mol2_to_smi_converter.xml
new file mode 100644
index 0000000..c8f30d0
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol2_to_smi_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_mol2_to_smi" name="MOL2 to SMILES" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol2 '${input}' -omol '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol2" label="Molecules in MOL2-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="smi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol_to_cml_converter.xml b/lib/galaxy/datatypes/converters/mol_to_cml_converter.xml
new file mode 100644
index 0000000..a13979d
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol_to_cml_converter.xml
@@ -0,0 +1,21 @@
+<tool id="CONVERTER_mol_to_cml" name="MOL to CML" version="1.0.0">
+    <description></description>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol '${input}' -ocml -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol" label="Molecules in MOL-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="cml"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol_to_inchi_converter.xml b/lib/galaxy/datatypes/converters/mol_to_inchi_converter.xml
new file mode 100644
index 0000000..e14263e
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol_to_inchi_converter.xml
@@ -0,0 +1,21 @@
+<tool id="CONVERTER_mol_to_mol2" name="MOL to MOL2" version="1.0.0">
+    <description></description>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol '${input}' -omol2 -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol" label="Molecules in MOL-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol2"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol_to_mol2_converter.xml b/lib/galaxy/datatypes/converters/mol_to_mol2_converter.xml
new file mode 100644
index 0000000..e14263e
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol_to_mol2_converter.xml
@@ -0,0 +1,21 @@
+<tool id="CONVERTER_mol_to_mol2" name="MOL to MOL2" version="1.0.0">
+    <description></description>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol '${input}' -omol2 -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol" label="Molecules in MOL-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol2"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/mol_to_smi_converter.xml b/lib/galaxy/datatypes/converters/mol_to_smi_converter.xml
new file mode 100644
index 0000000..acc7616
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/mol_to_smi_converter.xml
@@ -0,0 +1,21 @@
+<tool id="CONVERTER_mol_to_smi" name="MOL to SMILES" version="1.0.0">
+    <description></description>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -imol "${input}" -osmi  -O "${output}" -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="mol" label="Molecules in MOL-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="smi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py
new file mode 100644
index 0000000..02a6541
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.py
@@ -0,0 +1,115 @@
+# converter for ldreduced rgenetics datatype
+# used for grr and eigenstrat - shellfish if we get around to it
+from __future__ import print_function
+
+import os
+import sys
+import tempfile
+import subprocess
+import time
+
+prog = "pbed_ldreduced_converter.py"
+
+galhtmlprefix = """<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<meta name="generator" content="Galaxy %s tool output - see http://getgalaxy.org" />
+<title></title>
+<link rel="stylesheet" href="/static/style/base.css" type="text/css" />
+</head>
+<body>
+<div class="document">
+"""
+
+plinke = 'plink'
+
+
+def timenow():
+    """return current time as a string
+    """
+    return time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(time.time()))
+
+
+def pruneLD(plinktasks=[], cd='./', vclbase=[]):
+    """
+    """
+    fplog, plog = tempfile.mkstemp()
+    alog = []
+    alog.append('## Rgenetics: http://rgenetics.org Galaxy Tools rgQC.py Plink pruneLD runner\n')
+    for task in plinktasks:  # each is a list
+        vcl = vclbase + task
+        with open(plog, 'w') as sto:
+            x = subprocess.Popen(' '.join(vcl), shell=True, stdout=sto, stderr=sto, cwd=cd)
+            x.wait()
+        try:
+            lplog = open(plog, 'r').readlines()
+            lplog = [elem for elem in lplog if elem.find('Pruning SNP') == -1]
+            alog += lplog
+            alog.append('\n')
+            os.unlink(plog)  # no longer needed
+        except:
+            alog.append('### %s Strange - no std out from plink when running command line\n%s\n' % (timenow(), ' '.join(vcl)))
+    return alog
+
+
+def makeLDreduced(basename, infpath=None, outfpath=None, plinke='plink', forcerebuild=False, returnFname=False,
+                  winsize="60", winmove="40", r2thresh="0.1" ):
+    """ not there so make and leave in output dir for post job hook to copy back into input extra files path for next time
+    """
+    outbase = os.path.join(outfpath, basename)
+    inbase = os.path.join(infpath)
+    plinktasks = []
+    vclbase = [plinke, '--noweb']
+    plinktasks += [['--bfile', inbase, '--indep-pairwise %s %s %s' % (winsize, winmove, r2thresh), '--out %s' % outbase],
+                   ['--bfile', inbase, '--extract %s.prune.in --make-bed --out %s' % (outbase, outbase)]]
+    vclbase = [plinke, '--noweb']
+    pruneLD(plinktasks=plinktasks, cd=outfpath, vclbase=vclbase)
+
+
+def main():
+    """
+    need to work with rgenetics composite datatypes
+    so in and out are html files with data in extrafiles path
+
+    .. raw:: xml
+
+        <command interpreter="python">
+            pbed_ldreduced_converter.py '$input1.extra_files_path/$input1.metadata.base_name' '$winsize' '$winmove' '$r2thresh'
+            '$output1' '$output1.files_path' 'plink'
+        </command>
+
+    """
+    nparm = 7
+    if len(sys.argv) < nparm:
+        sys.stderr.write('## %s called with %s - needs %d parameters \n' % (prog, sys.argv, nparm))
+        sys.exit(1)
+    inpedfilepath = sys.argv[1]
+    base_name = os.path.split(inpedfilepath)[-1]
+    winsize = sys.argv[2]
+    winmove = sys.argv[3]
+    r2thresh = sys.argv[4]
+    outhtmlname = sys.argv[5]
+    outfilepath = sys.argv[6]
+    try:
+        os.makedirs(outfilepath)
+    except:
+        pass
+    plink = sys.argv[7]
+    makeLDreduced(base_name, infpath=inpedfilepath, outfpath=outfilepath, plinke=plink, forcerebuild=False, returnFname=False,
+                  winsize=winsize, winmove=winmove, r2thresh=r2thresh)
+    flist = os.listdir(outfilepath)
+    with open(outhtmlname, 'w') as f:
+        f.write(galhtmlprefix % prog)
+        s1 = '## Rgenetics: http://rgenetics.org Galaxy Tools %s %s' % (prog, timenow())  # becomes info
+        s2 = 'Input %s, winsize=%s, winmove=%s, r2thresh=%s' % (base_name, winsize, winmove, r2thresh)
+        print('%s %s' % (s1, s2))
+        f.write('<div>%s\n%s\n<ol>' % (s1, s2))
+        for i, data in enumerate( flist ):
+            f.write('<li><a href="%s">%s</a></li>\n' % (os.path.split(data)[-1], os.path.split(data)[-1]))
+        f.write("</div></body></html>")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.xml b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.xml
new file mode 100644
index 0000000..a67dccb
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pbed_ldreduced_converter.xml
@@ -0,0 +1,18 @@
+<tool id="pbed2ldindepconvert" name="Convert plink pbed to ld reduced format" version="0.01">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">
+   pbed_ldreduced_converter.py '$input1.extra_files_path/$input1.metadata.base_name' '60' '55' '0.1' '$output1' '$output1.files_path' 'plink'
+  </command>
+  <inputs>
+   <page>
+     <param format="pbed" name="input1" type="data" label="Choose a compressed Plink binary format genotype file"/>
+   </page>
+  </inputs>
+  <outputs>
+    <data format="ldindep" name="output1" metadata_source="input1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
+ 
diff --git a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py
new file mode 100644
index 0000000..ed45a20
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.py
@@ -0,0 +1,81 @@
+# for rgenetics - lped to pbed
+# where to stop with converters
+# pbed might be central
+# eg lped/eigen/fbat/snpmatrix all to pbed
+# and pbed to lped/eigen/fbat/snpmatrix ?
+# that's a lot of converters
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+import time
+
+
+prog = os.path.split(sys.argv[0])[-1]
+myversion = 'Oct 10 2009'
+
+galhtmlprefix = """<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<meta name="generator" content="Galaxy %s tool output - see http://getgalaxy.org" />
+<title></title>
+<link rel="stylesheet" href="/static/style/base.css" type="text/css" />
+</head>
+<body>
+<div class="document">
+"""
+
+
+def timenow():
+    """return current time as a string
+    """
+    return time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(time.time()))
+
+
+def rgConv(inpedfilepath, outhtmlname, outfilepath, plink):
+    """
+    """
+    basename = os.path.split(inpedfilepath)[-1]  # get basename
+    outroot = os.path.join(outfilepath, basename)
+    cl = '%s --noweb --bfile %s --recode --out %s ' % (plink, inpedfilepath, outroot)
+    p = subprocess.Popen(cl, shell=True, cwd=outfilepath)
+    p.wait()  # run plink
+
+
+def main():
+    """
+    need to work with rgenetics composite datatypes
+    so in and out are html files with data in extrafiles path
+    <command interpreter="python">pbed_to_lped_converter.py '$input1/$input1.metadata.base_name'
+    '$output1' '$output1.extra_files_path' '${GALAXY_DATA_INDEX_DIR}/rg/bin/plink'
+    </command>
+    """
+    nparm = 4
+    if len(sys.argv) < nparm:
+        sys.stderr.write('PBED to LPED converter called with %s - needs %d parameters \n' % (sys.argv, nparm))
+        sys.exit(1)
+    inpedfilepath = sys.argv[1]
+    outhtmlname = sys.argv[2]
+    outfilepath = sys.argv[3]
+    try:
+        os.makedirs(outfilepath)
+    except:
+        pass
+    plink = sys.argv[4]
+    rgConv(inpedfilepath, outhtmlname, outfilepath, plink)
+    flist = os.listdir(outfilepath)
+    with open(outhtmlname, 'w') as f:
+        f.write(galhtmlprefix % prog)
+        s = '## Rgenetics: http://bitbucket.org/rgalaxy Galaxy Tools %s %s' % (prog, timenow())  # becomes info
+        print(s)
+        f.write('<div>%s\n<ol>' % (s))
+        for i, data in enumerate( flist ):
+            f.write('<li><a href="%s">%s</a></li>\n' % (os.path.split(data)[-1], os.path.split(data)[-1]))
+        f.write("</ol></div></div></body></html>")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml
new file mode 100644
index 0000000..84fb236
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pbed_to_lped_converter.xml
@@ -0,0 +1,16 @@
+<tool id="pbed2lpedconvert" name="Convert plink pbed to linkage lped" version="0.01">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">
+   pbed_to_lped_converter.py '$input1.extra_files_path/$input1.metadata.base_name'
+   '$output1' '$output1.files_path' 'plink'
+  </command>
+  <inputs>
+    <param format="pbed" name="input1" type="data" label="Choose compressed Plink binary format genotype file"/>
+  </inputs>
+  <outputs>
+    <data format="lped" name="output1" metadata_source="input1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py
new file mode 100644
index 0000000..e9ebd89
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+assert sys.version_info[:2] >= ( 2, 5 )
+HEADER_STARTS_WITH = ( '@' )
+
+
+def __main__():
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    skipped_lines = 0
+    first_skipped_line = 0
+    header_lines = 0
+    out = open( output_name, 'w' )
+    i = 0
+    for i, line in enumerate( open( input_name ) ):
+        line = line.rstrip( '\r\n' )
+        if line:
+            if line.startswith( HEADER_STARTS_WITH ):
+                header_lines += 1
+            else:
+                try:
+                    elems = line.split( '\t' )
+                    out.write( '%s\t%s\t%s\t%s\t0\t%s\n' % ( elems[0], int(elems[1]) - 1, elems[2], elems[4], elems[3] ) )
+                except Exception as e:
+                    print(e)
+                    skipped_lines += 1
+                    if not first_skipped_line:
+                        first_skipped_line = i + 1
+        else:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = i + 1
+    out.close()
+    info_msg = "%i lines converted to BED.  " % ( i + 1 - skipped_lines )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.xml b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.xml
new file mode 100644
index 0000000..5ed6cac
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/picard_interval_list_to_bed6_converter.xml
@@ -0,0 +1,12 @@
+<tool id="CONVERTER_picard_interval_list_to_bed6" name="Convert Picard Interval List to BED6" version="1.0.0">
+    <description>converter</description>
+    <command interpreter="python">picard_interval_list_to_bed6_converter.py '$input' '$output'</command>
+    <inputs>
+        <param name="input" type="data" format="picard_interval_list" label="Picard Interval List file"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="bed6"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
new file mode 100644
index 0000000..a8f6271
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+"""
+Convert from pileup file to interval index file.
+
+usage: %prog <options> in_file out_file
+"""
+from __future__ import division
+
+import optparse
+
+from bx.interval_index_file import Indexes
+
+
+def main():
+
+    # Read options, args.
+    parser = optparse.OptionParser()
+    (options, args) = parser.parse_args()
+    input_fname, output_fname = args
+
+    # Do conversion.
+    index = Indexes()
+    offset = 0
+    for line in open( input_fname, "r" ):
+        chrom, start = line.split()[ 0:2 ]
+        # Pileup format is 1-based.
+        start = int( start ) - 1
+        index.add( chrom, start, start + 1, offset )
+        offset += len( line )
+
+    index.write( open(output_fname, "w") )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml
new file mode 100644
index 0000000..5043671
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_pileup_to_interval_index_0" name="Convert Pileup to Interval Index" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">pileup_to_interval_index_converter.py '$input' '$output'
+  </command>
+  <inputs>
+    <page>
+        <param format="pileup" name="input" type="data" label="Choose Pileup file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="interval_index" name="output"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py b/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py
new file mode 100644
index 0000000..55266c6
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+"""
+convert a ref.taxonomy file to a seq.taxonomy file
+Usage:
+%python ref_to_seq_taxonomy_converter.py <ref.taxonom> <seq.taxonomy>
+"""
+
+import sys
+import re
+
+assert sys.version_info[:2] >= (2, 4)
+
+
+def __main__():
+    infile_name = sys.argv[1]
+    outfile = open(sys.argv[2], 'w')
+    for i, line in enumerate(open(infile_name)):
+        line = line.rstrip()
+        if line and not line.startswith('#'):
+            fields = line.split('\t')
+            # make sure the 2nd field (taxonomy) ends with a ;
+            outfile.write('%s\t%s;\n' % (fields[0], re.sub(';$', '', fields[1])))
+
+    outfile.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.xml b/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.xml
new file mode 100644
index 0000000..2d1d9db
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/ref_to_seq_taxonomy_converter.xml
@@ -0,0 +1,12 @@
+<tool id="CONVERTER_ref_to_seq_taxomony" name="Convert Ref taxonomy to Seq Taxonomy" version="1.0.0">
+  <description>converts 2 or 3 column sequence taxonomy file to a 2 column mothur taxonomy_outline format</description>
+  <command> python '$__tool_directory__/ref_to_seq_taxonomy_converter.py' '$input' '$output'</command>
+  <inputs>
+    <param name="input" type="data" format="mothur.ref.taxonomy" label="a Sequence Taxomony file"/>
+  </inputs>
+  <outputs>
+    <data name="output" format="mothur.seq.taxonomy"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/converters/sam_to_bam.py b/lib/galaxy/datatypes/converters/sam_to_bam.py
new file mode 100644
index 0000000..6fe14fe
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sam_to_bam.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+
+"""
+A wrapper script for converting SAM to BAM, with sorting.
+%prog input_filename.sam output_filename.bam
+"""
+import os
+import sys
+import optparse
+import tempfile
+import subprocess
+import shutil
+from distutils.version import LooseVersion
+
+CHUNK_SIZE = 2 ** 20  # 1mb
+
+
+def cleanup_before_exit( tmp_dir ):
+    if tmp_dir and os.path.exists( tmp_dir ):
+        shutil.rmtree( tmp_dir )
+
+
+def cmd_exists(cmd):
+    # http://stackoverflow.com/questions/5226958/which-equivalent-function-in-python
+    for path in os.environ["PATH"].split(":"):
+        if os.path.exists(os.path.join(path, cmd)):
+            return True
+    return False
+
+
+def _get_samtools_version():
+    version = '0.0.0'
+    if not cmd_exists('samtools'):
+        raise Exception('This tool needs samtools, but it is not on PATH.')
+    # Get the version of samtools via --version-only, if available
+    p = subprocess.Popen( ['samtools', '--version-only'],
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE)
+    output, error = p.communicate()
+
+    # --version-only is available
+    # Format is <version x.y.z>+htslib-<a.b.c>
+    if p.returncode == 0:
+        version = output.split('+')[0]
+        return version
+
+    output = subprocess.Popen( [ 'samtools' ], stderr=subprocess.PIPE, stdout=subprocess.PIPE ).communicate()[1]
+    lines = output.split( '\n' )
+    for line in lines:
+        if line.lower().startswith( 'version' ):
+            # Assuming line looks something like: version: 0.1.12a (r862)
+            version = line.split()[1]
+            break
+    return version
+
+
+def __main__():
+    # Parse Command Line
+    parser = optparse.OptionParser()
+    (options, args) = parser.parse_args()
+
+    assert len( args ) == 2, 'You must specify the input and output filenames'
+    input_filename, output_filename = args
+
+    tmp_dir = tempfile.mkdtemp( prefix='tmp-sam_to_bam_converter-' )
+
+    # convert to SAM
+    unsorted_bam_filename = os.path.join( tmp_dir, 'unsorted.bam' )
+    unsorted_stderr_filename = os.path.join( tmp_dir, 'unsorted.stderr' )
+    cmd = "samtools view -bS '%s' > '%s'" % ( input_filename, unsorted_bam_filename )
+    proc = subprocess.Popen( args=cmd, stderr=open( unsorted_stderr_filename, 'wb' ), shell=True, cwd=tmp_dir )
+    return_code = proc.wait()
+    if return_code:
+        stderr_target = sys.stderr
+    else:
+        stderr_target = sys.stdout
+    stderr = open( unsorted_stderr_filename )
+    while True:
+        chunk = stderr.read( CHUNK_SIZE )
+        if chunk:
+            stderr_target.write( chunk )
+        else:
+            break
+    stderr.close()
+
+    # sort sam, so indexing will not fail
+    sorted_stderr_filename = os.path.join( tmp_dir, 'sorted.stderr' )
+    sorting_prefix = os.path.join( tmp_dir, 'sorted_bam' )
+    # samtools changed sort command arguments (starting from version 1.3)
+    samtools_version = LooseVersion(_get_samtools_version())
+    if samtools_version < LooseVersion('1.0'):
+        cmd = "samtools sort -o '%s' '%s' > '%s'" % ( unsorted_bam_filename, sorting_prefix, output_filename )
+    else:
+        cmd = "samtools sort -T '%s' '%s' > '%s'" % ( sorting_prefix, unsorted_bam_filename, output_filename )
+    proc = subprocess.Popen( args=cmd, stderr=open( sorted_stderr_filename, 'wb' ), shell=True, cwd=tmp_dir )
+    return_code = proc.wait()
+
+    if return_code:
+        stderr_target = sys.stderr
+    else:
+        stderr_target = sys.stdout
+    stderr = open( sorted_stderr_filename )
+    while True:
+        chunk = stderr.read( CHUNK_SIZE )
+        if chunk:
+            stderr_target.write( chunk )
+        else:
+            break
+    stderr.close()
+
+    cleanup_before_exit( tmp_dir )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/lib/galaxy/datatypes/converters/sam_to_bam.xml b/lib/galaxy/datatypes/converters/sam_to_bam.xml
new file mode 100644
index 0000000..67636c5
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sam_to_bam.xml
@@ -0,0 +1,20 @@
+<tool id="CONVERTER_sam_to_bam" name="Convert SAM to BAM" version="2.0.0">
+    <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+    <!-- Used on the metadata edit page. -->
+    <!-- FIXME: conversion will only work if headers for reference sequences are in input file.
+         To fix this: (a) merge sam_to_bam tool in tools with this conversion (like fasta_to_len 
+         conversion); and (b) define a datatype-specific way to set converter parameters.
+      -->
+    <requirements>
+        <requirement type="package">samtools</requirement>
+    </requirements>
+    <command interpreter="python">sam_to_bam.py '$input1' '$output'</command>
+    <inputs>
+        <param name="input1" type="data" format="sam" label="SAM file"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="bam"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/sam_to_bigwig_converter.xml b/lib/galaxy/datatypes/converters/sam_to_bigwig_converter.xml
new file mode 100644
index 0000000..2465aea
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sam_to_bigwig_converter.xml
@@ -0,0 +1,25 @@
+<tool id="CONVERTER_sam_to_bigwig_0" name="Convert SAM to BigWig" version="1.0.0" hidden="true">
+    <requirements>
+        <requirement type="package">ucsc_tools</requirement>
+        <requirement type="package">samtools</requirement>
+        <requirement type="package">bedtools</requirement>
+    </requirements>
+    <command>
+        samtools view -bh '$input' | bedtools genomecov -bg -split -ibam stdin -g $chromInfo
+
+        ## Streaming the bedgraph file to wigToBigWig is fast but very memory intensive; hence, this
+        ## should only be used on systems with large RAM.
+        ## | wigToBigWig stdin $chromInfo '$output'
+
+        ## This can be used anywhere.
+        > temp.bg ; bedGraphToBigWig temp.bg $chromInfo '$output'
+    </command>
+    <inputs>
+        <param format="bam" name="input" type="data" label="Choose BAM file"/>
+    </inputs>
+    <outputs>
+        <data format="bigwig" name="output"/>
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/sdf_to_cml_converter.xml b/lib/galaxy/datatypes/converters/sdf_to_cml_converter.xml
new file mode 100644
index 0000000..62cf307
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sdf_to_cml_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_sdf_to_cml" name="SDF to CML" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -isdf '${input}' -ocml -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="sdf" label="Molecules in SDF-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="cml"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/sdf_to_inchi_converter.xml b/lib/galaxy/datatypes/converters/sdf_to_inchi_converter.xml
new file mode 100644
index 0000000..af58874
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sdf_to_inchi_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_sdf_to_inchi" name="SDF to InChI" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -isdf '${input}' -oinchi -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="sdf" label="Molecules in SDF-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="inchi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/sdf_to_mol2_converter.xml b/lib/galaxy/datatypes/converters/sdf_to_mol2_converter.xml
new file mode 100644
index 0000000..3d0aac2
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sdf_to_mol2_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_sdf_to_mol2" name="SDF to mol2" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -isdf '${input}' -omol2 -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="sdf" label="Molecules in SDF-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol2"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/sdf_to_smi_converter.xml b/lib/galaxy/datatypes/converters/sdf_to_smi_converter.xml
new file mode 100644
index 0000000..a2b5402
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/sdf_to_smi_converter.xml
@@ -0,0 +1,27 @@
+<tool id="CONVERTER_sdf_to_smiles" name="SDF to SMILES" version="1.0.1">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command >
+<![CDATA[
+        obabel
+            -isdf '${input}'
+            -ocan
+            -O '${output}'
+            -e
+        2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="sdf" label="Molecules in SDF-format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="smi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/smi_to_cml_converter.xml b/lib/galaxy/datatypes/converters/smi_to_cml_converter.xml
new file mode 100644
index 0000000..1a1302d
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/smi_to_cml_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_SMILES_to_cml" name="SMILES to CML" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -ismi '${input}' -ocml -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="smi" label="Molecules in SMILES format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="cml"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/smi_to_inchi_converter.xml b/lib/galaxy/datatypes/converters/smi_to_inchi_converter.xml
new file mode 100644
index 0000000..7a80c50
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/smi_to_inchi_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_SMILES_to_inchi" name="SMILES to InChI" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -ismi '${input}' -oinchi -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="smi" label="Molecules in SMILES format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="inchi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/smi_to_mol2_converter.xml b/lib/galaxy/datatypes/converters/smi_to_mol2_converter.xml
new file mode 100644
index 0000000..6542b42
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/smi_to_mol2_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_SMILES_to_MOL2" name="SMILES to MOL2" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -ismi '${input}' -omol2 -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="smi" label="Molecules in SMILES format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol2"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/smi_to_mol_converter.xml b/lib/galaxy/datatypes/converters/smi_to_mol_converter.xml
new file mode 100644
index 0000000..78ac5db
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/smi_to_mol_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_SMILES_to_MOL" name="SMILES to MOL" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+        obabel -ismi '${input}' -omol -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="smi" label="Molecules in SMILES format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="mol"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/smi_to_sdf_converter.xml b/lib/galaxy/datatypes/converters/smi_to_sdf_converter.xml
new file mode 100644
index 0000000..a61f255
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/smi_to_sdf_converter.xml
@@ -0,0 +1,22 @@
+<tool id="CONVERTER_SMILES_to_sdf" name="SMILES to SDF" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command>
+<![CDATA[
+    obabel -ismi '${input}' -osdf -O '${output}' -e 2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="smi" label="Molecules in SMILES format"/>
+    </inputs>
+    <outputs>
+        <data name="output" format="sdf"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/smi_to_smi_converter.xml b/lib/galaxy/datatypes/converters/smi_to_smi_converter.xml
new file mode 100644
index 0000000..d177053
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/smi_to_smi_converter.xml
@@ -0,0 +1,48 @@
+<tool id="CONVERTER_smiles_to_smiles" name="SMILES to SMILES" version="1.0.0">
+    <description></description>
+    <parallelism method="multi" split_inputs="input" split_mode="to_size" split_size="10000" shared_inputs="" merge_outputs="output"></parallelism>
+    <requirements>
+        <requirement type="package" version="2.3.2">openbabel</requirement>
+    </requirements>
+    <command >
+<![CDATA[
+        obabel
+            -ismi '${input}'
+            #if $can:
+                -ocan
+            #else:
+                -osmi
+            #end if
+            -O '${output}'
+            -e
+            $remove_h
+            #if $iso_chi or $can or $exp_h:
+                -x$iso_chi$exp_h$can
+            #end if
+            #if $dative_bonds:
+                -b
+            #end if
+            #if int($ph) >= 0:
+                -p $ph
+            #end if
+
+            2>&1
+]]>
+    </command>
+    <inputs>
+        <param name="input" type="data" format="smi" label="Molecules in SD-format"/>
+        <param name="iso_chi" type="boolean" label="Do not include isotopic or chiral markings (-xi)" truevalue="i" falsevalue="" checked="false" />
+        <param name="can" type="boolean" label="Output in canonical form (-xc)" truevalue="c" falsevalue="" checked="false" />
+        <param name="exp_h" type="boolean" label="Output explicit hydrogens as such (-xh)" truevalue="h" falsevalue="" checked="false" />
+        <param name="remove_h" type="boolean" label="Delete hydrogen atoms (-d)" truevalue="-d" falsevalue="" />
+        <param name="ph" type="float" value="-1" label="Add hydrogens appropriate for pH (-p)" help="-1 means deactivated"/>
+        <param name="dative_bonds" type="boolean" label="Convert dative bonds (e.g. [N+]([O-])=O to N(=O)=O) (-b)" truevalue="-b" falsevalue="" />
+    </inputs>
+    <outputs>
+        <data name="output" format="smi"/>
+    </outputs>
+    <help>
+<![CDATA[
+]]>
+    </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/tabular_to_dbnsfp.py b/lib/galaxy/datatypes/converters/tabular_to_dbnsfp.py
new file mode 100644
index 0000000..4e1f909
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/tabular_to_dbnsfp.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+"""
+Uses pysam to bgzip a file
+
+usage: %prog in_file out_file
+"""
+
+import optparse
+import os.path
+
+from pysam import ctabix
+
+
+def main():
+    # Read options, args.
+    usage = "Usage: %prog [options] tabular_input_file bgzip_output_file"
+    parser = optparse.OptionParser(usage=usage)
+    parser.add_option( '-c', '--chr-col', type='int', default=0, dest='chrom_col' )
+    parser.add_option( '-s', '--start-col', type='int', default=1, dest='start_col' )
+    parser.add_option( '-e', '--end-col', type='int', default=1, dest='end_col' )
+    (options, args) = parser.parse_args()
+    if len(args) != 2:
+        parser.print_usage()
+        exit(1)
+    input_fname, output_fname = args
+    output_dir = os.path.dirname(output_fname)
+    if not os.path.exists(output_dir):
+        os.makedirs(output_dir)
+    ctabix.tabix_compress(input_fname, output_fname, force=True)
+    # Column indices are 0-based.
+    ctabix.tabix_index(output_fname, seq_col=options.chrom_col, start_col=options.start_col, end_col=options.end_col)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/tabular_to_dbnsfp.xml b/lib/galaxy/datatypes/converters/tabular_to_dbnsfp.xml
new file mode 100644
index 0000000..4e12954
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/tabular_to_dbnsfp.xml
@@ -0,0 +1,12 @@
+<tool id="tabular_to_dbnsfp" name="Convert tabular to dbnsfp" version="1.0.0">
+  <description></description> 
+  <command interpreter="python">tabular_to_dbnsfp.py '$input' '$dbnsfp.extra_files_path/dbNSFP.gz'</command>
+  <inputs>
+      <param format="tabular" name="input" type="data" label="Choose a dbnsfp tabular file"/>
+   </inputs>
+  <outputs>
+    <data format="snpsiftdbnsfp" name="dbnsfp"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/vcf_bgzip_to_tabix_converter.xml b/lib/galaxy/datatypes/converters/vcf_bgzip_to_tabix_converter.xml
new file mode 100644
index 0000000..26b6ea4
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_bgzip_to_tabix_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_vcf_bgzip_to_tabix_0" name="Convert BGZ VCF to tabix" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_tabix_converter.py -P 'vcf' '' '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="vcf_bgzip" name="input1" type="data" label="Choose BGZIP'd VCF file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="tabix" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/vcf_to_bgzip_converter.xml b/lib/galaxy/datatypes/converters/vcf_to_bgzip_converter.xml
new file mode 100644
index 0000000..944c4fa
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_to_bgzip_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_vcf_to_bgzip_0" name="Convert VCF to BGZIP" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">bgzip.py -P vcf '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="vcf" name="input1" type="data" label="Choose Vcf file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/vcf_to_interval_index_converter.py b/lib/galaxy/datatypes/converters/vcf_to_interval_index_converter.py
new file mode 100644
index 0000000..e8d76b4
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_to_interval_index_converter.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+"""
+Convert from VCF file to interval index file.
+"""
+from __future__ import division
+
+import optparse
+
+from bx.interval_index_file import Indexes
+
+import galaxy_utils.sequence.vcf
+
+
+def main():
+    # Read options, args.
+    parser = optparse.OptionParser()
+    (options, args) = parser.parse_args()
+    in_file, out_file = args
+
+    # Do conversion.
+    index = Indexes()
+    reader = galaxy_utils.sequence.vcf.Reader( open( in_file ) )
+    offset = reader.metadata_len
+    for vcf_line in reader:
+        # VCF format provides a chrom and 1-based position for each variant.
+        # IntervalIndex expects 0-based coordinates.
+        index.add( vcf_line.chrom, vcf_line.pos - 1, vcf_line.pos, offset )
+        offset += len( vcf_line.raw_line )
+
+    index.write( open( out_file, "w" ) )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/vcf_to_interval_index_converter.xml b/lib/galaxy/datatypes/converters/vcf_to_interval_index_converter.xml
new file mode 100644
index 0000000..5cd831e
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_to_interval_index_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_vcf_to_interval_index_0" name="Convert VCF to Interval Index" version="1.0.0" hidden="true">
+  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description>
+  <command interpreter="python">vcf_to_interval_index_converter.py '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="vcf" name="input1" type="data" label="Choose VCF file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="interval_index" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/vcf_to_tabix_converter.xml b/lib/galaxy/datatypes/converters/vcf_to_tabix_converter.xml
new file mode 100644
index 0000000..9d37e83
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_to_tabix_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_vcf_to_tabix_0" name="Convert Vcf to tabix" version="1.0.0" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">interval_to_tabix_converter.py -P vcf '$input1' '$bgzip' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="vcf" name="input1" type="data" label="Choose Vcf file"/>
+        <param format="bgzip" name="bgzip" type="data" label="BGZIP file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="tabix" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip.py b/lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip.py
new file mode 100644
index 0000000..6966ac9
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+"""
+Uses pysam to bgzip a vcf file as-is.
+Headers, which are important, are kept.
+Original ordering, which may be specifically needed  by tools or external display applications, is also maintained.
+
+usage: %prog in_file out_file
+"""
+import optparse
+
+from pysam import ctabix
+
+
+def main():
+    # Read options, args.
+    parser = optparse.OptionParser()
+    (options, args) = parser.parse_args()
+    input_fname, output_fname = args
+
+    ctabix.tabix_compress(input_fname, output_fname, force=True)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip_converter.xml b/lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip_converter.xml
new file mode 100644
index 0000000..0e7d6ef
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/vcf_to_vcf_bgzip_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_vcf_to_vcf_bgzip_0" name="Convert VCF to VCF_BGZIP" version="1.0.1" hidden="true">
+<!--  <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <command interpreter="python">vcf_to_vcf_bgzip.py '$input1' '$output1'</command>
+  <inputs>
+    <page>
+        <param format="vcf" name="input1" type="data" label="Choose Vcf file"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="vcf_bgzip" name="output1"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/wig_to_bigwig_converter.xml b/lib/galaxy/datatypes/converters/wig_to_bigwig_converter.xml
new file mode 100644
index 0000000..f0d80a7
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/wig_to_bigwig_converter.xml
@@ -0,0 +1,20 @@
+<tool id="CONVERTER_wig_to_bigwig" name="Convert Wiggle to BigWig" hidden="true">
+  <!-- Used internally to generate track indexes -->
+  <requirements>
+      <requirement type="package">ucsc_tools</requirement>
+  </requirements>
+  <command>
+      grep -v "^track" '$input' | wigToBigWig -clip stdin $chromInfo '$output'
+      2>&1 || echo "Error running wiggle to bigwig converter." >&2
+  </command>
+  <inputs>
+    <page>
+      <param format="wig" name="input" type="data" label="Choose wiggle"/>
+    </page>
+   </inputs>
+   <outputs>
+      <data format="bigwig" name="output"/>
+   </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py
new file mode 100644
index 0000000..820ba7a
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+from __future__ import division
+
+import sys
+
+from bx.arrays.array_tree import array_tree_dict_from_reader, FileArrayTreeDict
+from bx.arrays.wiggle import WiggleReader
+
+BLOCK_SIZE = 100
+
+
+def main():
+
+    input_fname = sys.argv[1]
+    out_fname = sys.argv[2]
+
+    reader = WiggleReader( open( input_fname ) )
+
+    # Fill array from reader
+    d = array_tree_dict_from_reader( reader, {}, block_size=BLOCK_SIZE )
+
+    for array_tree in d.itervalues():
+        array_tree.root.build_summary()
+
+    FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ) )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml
new file mode 100644
index 0000000..30b316c
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.xml
@@ -0,0 +1,14 @@
+<tool id="CONVERTER_Wiggle_0" name="Index Wiggle for Track Viewer" hidden="true">
+  <!-- Used internally to generate track indexes -->
+  <command interpreter="python">wiggle_to_array_tree_converter.py '$input' '$output'</command>
+  <inputs>
+    <page>
+      <param format="wiggle" name="input" type="data" label="Choose wiggle"/>
+    </page>
+   </inputs>
+   <outputs>
+      <data format="array_tree" name="output"/>
+   </outputs>
+  <help>
+  </help>
+</tool>
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py
new file mode 100644
index 0000000..b615b97
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# code is same as ~/tools/stats/wiggle_to_simple.py
+"""
+Read a wiggle track and print out a series of lines containing
+"chrom position score". Ignores track lines, handles bed, variableStep
+and fixedStep wiggle lines.
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.wiggle
+
+from galaxy.util.ucsc import UCSCOutWrapper, UCSCLimitException
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def main():
+    if len( sys.argv ) > 1:
+        in_file = open( sys.argv[1] )
+    else:
+        in_file = open( sys.stdin )
+
+    if len( sys.argv ) > 2:
+        out_file = open( sys.argv[2], "w" )
+    else:
+        out_file = sys.stdout
+
+    try:
+        for fields in bx.wiggle.IntervalReader( UCSCOutWrapper( in_file ) ):
+            out_file.write( "%s\n" % "\t".join( map( str, fields ) ) )
+    except UCSCLimitException:
+        # Wiggle data was truncated, at the very least need to warn the user.
+        print('Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.')
+    except ValueError as e:
+        in_file.close()
+        out_file.close()
+        stop_err( str( e ) )
+
+    in_file.close()
+    out_file.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.xml b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.xml
new file mode 100644
index 0000000..1f92874
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/wiggle_to_simple_converter.xml
@@ -0,0 +1,11 @@
+<tool id="CONVERTER_wiggle_to_interval_0" name="Wiggle to Interval">
+  <!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+  <!-- Used on the metadata edit page. -->
+  <command interpreter="python">wiggle_to_simple_converter.py '$input' '$out_file1' </command>
+  <inputs>
+    <param format="wig" name="input" type="data" label="Convert"/>
+  </inputs>
+  <outputs>
+    <data format="interval" name="out_file1" />
+  </outputs>
+</tool>
diff --git a/lib/galaxy/datatypes/coverage.py b/lib/galaxy/datatypes/coverage.py
new file mode 100644
index 0000000..5a3c7d6
--- /dev/null
+++ b/lib/galaxy/datatypes/coverage.py
@@ -0,0 +1,55 @@
+"""
+Coverage datatypes
+
+"""
+
+import logging
+import math
+
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.tabular import Tabular
+
+log = logging.getLogger(__name__)
+
+
+class LastzCoverage( Tabular ):
+    file_ext = "coverage"
+
+    MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+    MetadataElement( name="positionCol", default=2, desc="Position column", param=metadata.ColumnParameter )
+    MetadataElement( name="forwardCol", default=3, desc="Forward or aggregate read column", param=metadata.ColumnParameter )
+    MetadataElement( name="reverseCol", desc="Optional reverse read column", param=metadata.ColumnParameter, optional=True, no_value=0 )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
+
+    def get_track_window(self, dataset, data, start, end):
+        """
+        Assumes we have a numpy file.
+        """
+        # Maybe if we import here people will still be able to use Galaxy when numpy kills it
+        import numpy
+
+        range = end - start
+        # Determine appropriate resolution to plot ~1000 points
+        resolution = ( 10 ** math.ceil( math.log10( range / 1000 ) ) )
+        # Restrict to valid range
+        resolution = min( resolution, 10000 )
+        resolution = max( resolution, 1 )
+        # Memory map the array (don't load all the data)
+        data = numpy.load( data )
+        # Grab just what we need
+        t_start = math.floor( start / resolution )
+        t_end = math.ceil( end / resolution )
+        x = numpy.arange( t_start, t_end ) * resolution
+        y = data[ t_start : t_end ]
+
+        return zip(x.tolist(), y.tolist())
+
+    def get_track_resolution( self, dataset, start, end):
+        range = end - start
+        # Determine appropriate resolution to plot ~1000 points
+        resolution = math.ceil( 10 ** math.ceil( math.log10( range / 1000 ) ) )
+        # Restrict to valid range
+        resolution = min( resolution, 10000 )
+        resolution = max( resolution, 1 )
+        return resolution
diff --git a/lib/galaxy/datatypes/data.py b/lib/galaxy/datatypes/data.py
new file mode 100644
index 0000000..2c6ff80
--- /dev/null
+++ b/lib/galaxy/datatypes/data.py
@@ -0,0 +1,1018 @@
+from __future__ import absolute_import
+
+import abc
+import logging
+import mimetypes
+import os
+import shutil
+import tempfile
+import zipfile
+from cgi import escape
+from inspect import isclass
+
+import paste
+import six
+
+from galaxy import util
+from galaxy.datatypes.metadata import MetadataElement  # import directly to maintain ease of use in Datatype class definitions
+from galaxy.util import FILENAME_VALID_CHARS
+from galaxy.util import inflector
+from galaxy.util import unicodify
+from galaxy.util.bunch import Bunch
+from galaxy.util.odict import odict
+from galaxy.util.sanitize_html import sanitize_html
+
+from . import dataproviders
+from . import metadata
+
+XSS_VULNERABLE_MIME_TYPES = [
+    'image/svg+xml',  # Unfiltered by Galaxy and may contain JS that would be executed by some browsers.
+    'application/xml',  # Some browsers will evalute SVG embedded JS in such XML documents.
+]
+DEFAULT_MIME_TYPE = 'text/plain'  # Vulnerable mime types will be replaced with this.
+
+log = logging.getLogger(__name__)
+
+# Valid first column and strand column values vor bed, other formats
+col1_startswith = ['chr', 'chl', 'groupun', 'reftig_', 'scaffold', 'super_', 'vcho']
+valid_strand = ['+', '-', '.']
+
+
+class DataMeta( abc.ABCMeta ):
+    """
+    Metaclass for Data class.  Sets up metadata spec.
+    """
+    def __init__( cls, name, bases, dict_ ):
+        cls.metadata_spec = metadata.MetadataSpecCollection()
+        for base in bases:  # loop through bases (class/types) of cls
+            if hasattr( base, "metadata_spec" ):  # base of class Data (object) has no metadata
+                cls.metadata_spec.update( base.metadata_spec )  # add contents of metadata spec of base class to cls
+        metadata.Statement.process( cls )
+
+
+ at six.add_metaclass(DataMeta)
+ at dataproviders.decorators.has_dataproviders
+class Data( object ):
+    """
+    Base class for all datatypes.  Implements basic interfaces as well
+    as class methods for metadata.
+
+    >>> class DataTest( Data ):
+    ...     MetadataElement( name="test" )
+    ...
+    >>> DataTest.metadata_spec.test.name
+    'test'
+    >>> DataTest.metadata_spec.test.desc
+    'test'
+    >>> type( DataTest.metadata_spec.test.param )
+    <class 'galaxy.model.metadata.MetadataParameter'>
+    """
+    edam_data = "data_0006"
+    edam_format = "format_1915"
+    # Data is not chunkable by default.
+    CHUNKABLE = False
+
+    #: Dictionary of metadata fields for this datatype
+    metadata_spec = None
+
+    # Add metadata elements
+    MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
+    # Stores the set of display applications, and viewing methods, supported by this datatype
+    supported_display_apps = {}
+    # If False, the peek is regenerated whenever a dataset of this type is copied
+    copy_safe_peek = True
+    # The dataset contains binary data --> do not space_to_tab or convert newlines, etc.
+    # Allow binary file uploads of this type when True.
+    is_binary = True
+    # Allow user to change between this datatype and others. If False, this datatype
+    # cannot be changed from or into.
+    allow_datatype_change = True
+    # Composite datatypes
+    composite_type = None
+    composite_files = odict()
+    primary_file_name = 'index'
+    # A per datatype setting (inherited): max file size (in bytes) for setting optional metadata
+    _max_optional_metadata_filesize = None
+
+    # Trackster track type.
+    track_type = None
+
+    # Data sources.
+    data_sources = {}
+
+    def __init__(self, **kwd):
+        """Initialize the datatype"""
+        object.__init__(self, **kwd)
+        self.supported_display_apps = self.supported_display_apps.copy()
+        self.composite_files = self.composite_files.copy()
+        self.display_applications = odict()
+
+    def write_from_stream(self, dataset, stream):
+        """Writes data from a stream"""
+        fd = open(dataset.file_name, 'wb')
+        while True:
+            chunk = stream.read(1048576)
+            if not chunk:
+                break
+            os.write(fd, chunk)
+        os.close(fd)
+
+    def set_raw_data(self, dataset, data):
+        """Saves the data on the disc"""
+        fd = open(dataset.file_name, 'wb')
+        os.write(fd, data)
+        os.close(fd)
+
+    def get_raw_data( self, dataset ):
+        """Returns the full data. To stream it open the file_name and read/write as needed"""
+        try:
+            return open(dataset.file_name, 'rb').read(-1)
+        except OSError:
+            log.exception('%s reading a file that does not exist %s' % (self.__class__.__name__, dataset.file_name))
+            return ''
+
+    def dataset_content_needs_grooming( self, file_name ):
+        """This function is called on an output dataset file after the content is initially generated."""
+        return False
+
+    def groom_dataset_content( self, file_name ):
+        """This function is called on an output dataset file if dataset_content_needs_grooming returns True."""
+        pass
+
+    def init_meta( self, dataset, copy_from=None ):
+        # Metadata should be left mostly uninitialized.  Dataset will
+        # handle returning default values when metadata is not set.
+        # copy_from allows metadata to be passed in that will be
+        # copied. (although this seems ambiguous, see
+        # Dataset.set_metadata.  It always copies the rhs in order to
+        # flag the object as modified for SQLAlchemy.
+        if copy_from:
+            dataset.metadata = copy_from.metadata
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        """Unimplemented method, allows guessing of metadata from contents of file"""
+        return True
+
+    def missing_meta( self, dataset, check=[], skip=[] ):
+        """
+        Checks for empty metadata values, Returns True if non-optional metadata is missing
+        Specifying a list of 'check' values will only check those names provided; when used, optionality is ignored
+        Specifying a list of 'skip' items will return True even when a named metadata value is missing
+        """
+        if check:
+            to_check = ( ( to_check, dataset.metadata.get( to_check ) ) for to_check in check )
+        else:
+            to_check = dataset.metadata.items()
+        for key, value in to_check:
+            if key in skip or ( not check and dataset.metadata.spec[key].get( "optional" ) ):
+                continue  # we skip check for optional and nonrequested values here
+            if not value:
+                return True
+        return False
+
+    def set_max_optional_metadata_filesize( self, max_value ):
+        try:
+            max_value = int( max_value )
+        except:
+            return
+        self.__class__._max_optional_metadata_filesize = max_value
+
+    def get_max_optional_metadata_filesize( self ):
+        rval = self.__class__._max_optional_metadata_filesize
+        if rval is None:
+            return -1
+        return rval
+
+    max_optional_metadata_filesize = property( get_max_optional_metadata_filesize, set_max_optional_metadata_filesize )
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = ''
+            dataset.blurb = 'data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset ):
+        """Create HTML table, used for displaying peek"""
+        out = ['<table cellspacing="0" cellpadding="3">']
+        try:
+            if not dataset.peek:
+                dataset.set_peek()
+            data = dataset.peek
+            lines = data.splitlines()
+            for line in lines:
+                line = line.strip()
+                if not line:
+                    continue
+                out.append( '<tr><td>%s</td></tr>' % escape( unicodify( line, 'utf-8' ) ) )
+            out.append( '</table>' )
+            out = "".join( out )
+        except Exception as exc:
+            out = "Can't create peek %s" % str( exc )
+        return out
+
+    def _archive_main_file(self, archive, display_name, data_filename):
+        """Called from _archive_composite_dataset to add central file to archive.
+
+        Unless subclassed, this will add the main dataset file (argument data_filename)
+        to the archive, as an HTML file with its filename derived from the dataset name
+        (argument outfname).
+
+        Returns a tuple of boolean, string, string: (error, msg, messagetype)
+        """
+        error, msg, messagetype = False, "", ""
+        archname = '%s.html' % display_name  # fake the real nature of the html file
+        try:
+            archive.add(data_filename, archname)
+        except IOError:
+            error = True
+            log.exception("Unable to add composite parent %s to temporary library download archive" % data_filename)
+            msg = "Unable to create archive for download, please report this error"
+            messagetype = "error"
+        return error, msg, messagetype
+
+    def _archive_composite_dataset( self, trans, data=None, **kwd ):
+        # save a composite object into a compressed archive for downloading
+        params = util.Params( kwd )
+        outfname = data.name[0:150]
+        outfname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in outfname)
+        if params.do_action is None:
+            params.do_action = 'zip'  # default
+        msg = util.restore_text( params.get( 'msg', ''  ) )
+        if not data:
+            msg = "You must select at least one dataset"
+        else:
+            error = False
+            try:
+                if params.do_action == 'zip':
+                    # Can't use mkstemp - the file must not exist first
+                    tmpd = tempfile.mkdtemp()
+                    util.umask_fix_perms( tmpd, trans.app.config.umask, 0o777, trans.app.config.gid )
+                    tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
+                    archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                    archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
+                elif params.do_action == 'tgz':
+                    archive = util.streamball.StreamBall( 'w|gz' )
+                elif params.do_action == 'tbz':
+                    archive = util.streamball.StreamBall( 'w|bz2' )
+            except (OSError, zipfile.BadZipFile):
+                error = True
+                log.exception( "Unable to create archive for download" )
+                msg = "Unable to create archive for %s for download, please report this error" % outfname
+            if not error:
+                ext = data.extension
+                path = data.file_name
+                fname = os.path.split(path)[-1]
+                efp = data.extra_files_path
+                # Add any central file to the archive,
+
+                display_name = os.path.splitext(outfname)[0]
+                if not display_name.endswith(ext):
+                    display_name = '%s_%s' % (display_name, ext)
+
+                error, msg = self._archive_main_file(archive, display_name, path)[:2]
+                if not error:
+                    # Add any child files to the archive,
+                    for root, dirs, files in os.walk(efp):
+                        for fname in files:
+                            fpath = os.path.join(root, fname)
+                            rpath = os.path.relpath(fpath, efp)
+                            try:
+                                archive.add( fpath, rpath )
+                            except IOError:
+                                error = True
+                                log.exception( "Unable to add %s to temporary library download archive" % rpath)
+                                msg = "Unable to create archive for download, please report this error"
+                                continue
+                if not error:
+                    if params.do_action == 'zip':
+                        archive.close()
+                        tmpfh = open( tmpf )
+                        # CANNOT clean up - unlink/rmdir was always failing because file handle retained to return - must rely on a cron job to clean up tmp
+                        trans.response.set_content_type( "application/x-zip-compressed" )
+                        trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.zip"' % outfname
+                        return tmpfh
+                    else:
+                        trans.response.set_content_type( "application/x-tar" )
+                        outext = 'tgz'
+                        if params.do_action == 'tbz':
+                            outext = 'tbz'
+                        trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (outfname, outext)
+                        archive.wsgi_status = trans.response.wsgi_status()
+                        archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                        return archive.stream
+        return trans.show_error_message( msg )
+
+    def _serve_raw(self, trans, dataset, to_ext):
+        trans.response.headers['Content-Length'] = int( os.stat( dataset.file_name ).st_size )
+        fname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in dataset.name)[0:150]
+        trans.response.set_content_type( "application/octet-stream" )  # force octet-stream so Safari doesn't append mime extensions to filename
+        trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (dataset.hid, fname, to_ext)
+        return open( dataset.file_name )
+
+    def display_data(self, trans, data, preview=False, filename=None, to_ext=None, **kwd):
+        """ Old display method, for transition - though still used by API and
+        test framework. Datatypes should be very careful if overridding this
+        method and this interface between datatypes and Galaxy will likely
+        change.
+
+        TOOD: Document alternatives to overridding this method (data
+        providers?).
+        """
+        # Relocate all composite datatype display to a common location.
+        composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+        composite_extensions.append('html')  # for archiving composite datatypes
+        # Prevent IE8 from sniffing content type since we're explicit about it.  This prevents intentionally text/plain
+        # content from being rendered in the browser
+        trans.response.headers['X-Content-Type-Options'] = 'nosniff'
+        if isinstance( data, six.string_types ):
+            return data
+        if filename and filename != "index":
+            # For files in extra_files_path
+            file_path = trans.app.object_store.get_filename(data.dataset, extra_dir='dataset_%s_files' % data.dataset.id, alt_name=filename)
+            if os.path.exists( file_path ):
+                if os.path.isdir( file_path ):
+                    return trans.show_error_message( "Directory listing is not allowed." )  # TODO: Reconsider allowing listing of directories?
+                mime = mimetypes.guess_type( file_path )[0]
+                if not mime:
+                    try:
+                        mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] )
+                    except:
+                        mime = "text/plain"
+                self._clean_and_set_mime_type( trans, mime )
+                return open( file_path )
+            else:
+                return paste.httpexceptions.HTTPNotFound( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) )
+        self._clean_and_set_mime_type( trans, data.get_mime() )
+
+        trans.log_event( "Display dataset id: %s" % str( data.id ) )
+        from galaxy import datatypes  # DBTODO REMOVE THIS AT REFACTOR
+        if to_ext or isinstance(data.datatype, datatypes.binary.Binary):  # Saving the file, or binary file
+            if data.extension in composite_extensions:
+                return self._archive_composite_dataset( trans, data, **kwd )
+            else:
+                trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
+                if not to_ext:
+                    to_ext = data.extension
+                fname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in data.name)[0:150]
+                trans.response.set_content_type( "application/octet-stream" )  # force octet-stream so Safari doesn't append mime extensions to filename
+                trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (data.hid, fname, to_ext)
+                return open( data.file_name )
+        if not os.path.exists( data.file_name ):
+            raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % data.file_name )
+        max_peek_size = 1000000  # 1 MB
+        if isinstance(data.datatype, datatypes.text.Html):
+            max_peek_size = 10000000  # 10 MB for html
+        preview = util.string_as_bool( preview )
+        if not preview or isinstance(data.datatype, datatypes.images.Image) or os.stat( data.file_name ).st_size < max_peek_size:
+            if trans.app.config.sanitize_all_html and trans.response.get_content_type() == "text/html":
+                # Sanitize anytime we respond with plain text/html content.
+                # Check to see if this dataset's parent job is whitelisted
+                # We cannot currently trust imported datasets for rendering.
+                if not data.creating_job.imported and data.creating_job.tool_id in trans.app.config.sanitize_whitelist:
+                    return open(data.file_name).read()
+                # This is returning to the browser, it needs to be encoded.
+                # TODO Ideally this happens a layer higher, but this is a bad
+                # issue affecting many tools
+                return sanitize_html(open( data.file_name ).read()).encode('utf-8')
+            return open( data.file_name )
+        else:
+            trans.response.set_content_type( "text/html" )
+            return trans.stream_template_mako( "/dataset/large_file.mako",
+                                               truncated_data=open( data.file_name ).read(max_peek_size),
+                                               data=data)
+
+    def display_name(self, dataset):
+        """Returns formatted html of dataset name"""
+        try:
+            return escape( unicodify( dataset.name, 'utf-8' ) )
+        except Exception:
+            return "name unavailable"
+
+    def display_info(self, dataset):
+        """Returns formatted html of dataset info"""
+        try:
+            # Change new line chars to html
+            info = escape( dataset.info )
+            if info.find( '\r\n' ) >= 0:
+                info = info.replace( '\r\n', '<br/>' )
+            if info.find( '\r' ) >= 0:
+                info = info.replace( '\r', '<br/>' )
+            if info.find( '\n' ) >= 0:
+                info = info.replace( '\n', '<br/>' )
+
+            info = unicodify( info, 'utf-8' )
+
+            return info
+        except:
+            return "info unavailable"
+
+    def validate(self, dataset):
+        """Unimplemented validate, return no exceptions"""
+        return list()
+
+    def repair_methods(self, dataset):
+        """Unimplemented method, returns dict with method/option for repairing errors"""
+        return None
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'application/octet-stream'
+
+    def add_display_app( self, app_id, label, file_function, links_function ):
+        """
+        Adds a display app to the datatype.
+        app_id is a unique id
+        label is the primary display label, e.g., display at 'UCSC'
+        file_function is a string containing the name of the function that returns a properly formatted display
+        links_function is a string containing the name of the function that returns a list of (link_name,link)
+        """
+        self.supported_display_apps = self.supported_display_apps.copy()
+        self.supported_display_apps[app_id] = {'label': label, 'file_function': file_function, 'links_function': links_function}
+
+    def remove_display_app(self, app_id):
+        """Removes a display app from the datatype"""
+        self.supported_display_apps = self.supported_display_apps.copy()
+        try:
+            del self.supported_display_apps[app_id]
+        except:
+            log.exception('Tried to remove display app %s from datatype %s, but this display app is not declared.' % ( type, self.__class__.__name__ ) )
+
+    def clear_display_apps( self ):
+        self.supported_display_apps = {}
+
+    def add_display_application( self, display_application ):
+        """New style display applications"""
+        assert display_application.id not in self.display_applications, 'Attempted to add a display application twice'
+        self.display_applications[ display_application.id ] = display_application
+
+    def get_display_application( self, key, default=None ):
+        return self.display_applications.get( key, default )
+
+    def get_display_applications_by_dataset( self, dataset, trans ):
+        rval = odict()
+        for key, value in self.display_applications.items():
+            value = value.filter_by_dataset( dataset, trans )
+            if value.links:
+                rval[key] = value
+        return rval
+
+    def get_display_types(self):
+        """Returns display types available"""
+        return list(self.supported_display_apps.keys())
+
+    def get_display_label(self, type):
+        """Returns primary label for display app"""
+        try:
+            return self.supported_display_apps[type]['label']
+        except:
+            return 'unknown'
+
+    def as_display_type(self, dataset, type, **kwd):
+        """Returns modified file contents for a particular display type """
+        try:
+            if type in self.get_display_types():
+                return getattr(self, self.supported_display_apps[type]['file_function'])(dataset, **kwd)
+        except:
+            log.exception('Function %s is referred to in datatype %s for displaying as type %s, but is not accessible' % (self.supported_display_apps[type]['file_function'], self.__class__.__name__, type) )
+        return "This display type (%s) is not implemented for this datatype (%s)." % ( type, dataset.ext)
+
+    def get_display_links( self, dataset, type, app, base_url, target_frame='_blank', **kwd ):
+        """
+        Returns a list of tuples of (name, link) for a particular display type.  No check on
+        'access' permissions is done here - if you can view the dataset, you can also save it
+        or send it to a destination outside of Galaxy, so Galaxy security restrictions do not
+        apply anyway.
+        """
+        try:
+            if app.config.enable_old_display_applications and type in self.get_display_types():
+                return target_frame, getattr( self, self.supported_display_apps[type]['links_function'] )( dataset, type, app, base_url, **kwd )
+        except:
+            log.exception( 'Function %s is referred to in datatype %s for generating links for type %s, but is not accessible'
+                           % ( self.supported_display_apps[type]['links_function'], self.__class__.__name__, type ) )
+        return target_frame, []
+
+    def get_converter_types(self, original_dataset, datatypes_registry):
+        """Returns available converters by type for this dataset"""
+        return datatypes_registry.get_converters_by_datatype(original_dataset.ext)
+
+    def find_conversion_destination( self, dataset, accepted_formats, datatypes_registry, **kwd ):
+        """Returns ( target_ext, existing converted dataset )"""
+        return datatypes_registry.find_conversion_destination_for_dataset_by_extensions( dataset, accepted_formats, **kwd )
+
+    def convert_dataset(self, trans, original_dataset, target_type, return_output=False, visible=True, deps=None, target_context=None):
+        """This function adds a job to the queue to convert a dataset to another type. Returns a message about success/failure."""
+        converter = trans.app.datatypes_registry.get_converter_by_target_type( original_dataset.ext, target_type )
+
+        if converter is None:
+            raise Exception( "A converter does not exist for %s to %s." % ( original_dataset.ext, target_type ) )
+        # Generate parameter dictionary
+        params = {}
+        # determine input parameter name and add to params
+        input_name = 'input1'
+        for key, value in converter.inputs.items():
+            if deps and value.name in deps:
+                params[value.name] = deps[value.name]
+            elif value.type == 'data':
+                input_name = key
+        # add potentially required/common internal tool parameters e.g. '__job_resource'
+        if target_context:
+            for key, value in target_context.items():
+                if key.startswith( '__' ):
+                    params[ key ] = value
+        params[input_name] = original_dataset
+
+        # Run converter, job is dispatched through Queue
+        converted_dataset = converter.execute( trans, incoming=params, set_output_hid=visible )[1]
+        if len(params) > 0:
+            trans.log_event( "Converter params: %s" % (str(params)), tool_id=converter.id )
+        if not visible:
+            for value in converted_dataset.values():
+                value.visible = False
+        if return_output:
+            return converted_dataset
+        return "The file conversion of %s on data %s has been added to the Queue." % (converter.name, original_dataset.hid)
+
+    # We need to clear associated files before we set metadata
+    # so that as soon as metadata starts to be set, e.g. implicitly converted datasets are deleted and no longer available 'while' metadata is being set, not just after
+    # We'll also clear after setting metadata, for backwards compatibility
+    def after_setting_metadata( self, dataset ):
+        """This function is called on the dataset after metadata is set."""
+        dataset.clear_associated_files( metadata_safe=True )
+
+    def before_setting_metadata( self, dataset ):
+        """This function is called on the dataset before metadata is set."""
+        dataset.clear_associated_files( metadata_safe=True )
+
+    def __new_composite_file( self, name, optional=False, mimetype=None, description=None, substitute_name_with_metadata=None, is_binary=False, to_posix_lines=True, space_to_tab=False, **kwds ):
+        kwds[ 'name' ] = name
+        kwds[ 'optional' ] = optional
+        kwds[ 'mimetype' ] = mimetype
+        kwds[ 'description' ] = description
+        kwds[ 'substitute_name_with_metadata' ] = substitute_name_with_metadata
+        kwds[ 'is_binary' ] = is_binary
+        kwds[ 'to_posix_lines' ] = to_posix_lines
+        kwds[ 'space_to_tab' ] = space_to_tab
+        return Bunch( **kwds )
+
+    def add_composite_file( self, name, **kwds ):
+        # self.composite_files = self.composite_files.copy()
+        self.composite_files[ name ] = self.__new_composite_file( name, **kwds )
+
+    def __substitute_composite_key( self, key, composite_file, dataset=None ):
+        if composite_file.substitute_name_with_metadata:
+            if dataset:
+                meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
+            else:
+                meta_value = self.spec[composite_file.substitute_name_with_metadata].default
+            return key % meta_value
+        return key
+
+    @property
+    def writable_files( self, dataset=None ):
+        files = odict()
+        if self.composite_type != 'auto_primary_file':
+            files[ self.primary_file_name ] = self.__new_composite_file( self.primary_file_name )
+        for key, value in self.get_composite_files( dataset=dataset ).items():
+            files[ key ] = value
+        return files
+
+    def get_composite_files( self, dataset=None ):
+        def substitute_composite_key( key, composite_file ):
+            if composite_file.substitute_name_with_metadata:
+                if dataset:
+                    meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
+                else:
+                    meta_value = self.metadata_spec[ composite_file.substitute_name_with_metadata ].default
+                return key % meta_value
+            return key
+        files = odict()
+        for key, value in self.composite_files.items():
+            files[ substitute_composite_key( key, value ) ] = value
+        return files
+
+    def generate_auto_primary_file( self, dataset=None ):
+        raise Exception( "generate_auto_primary_file is not implemented for this datatype." )
+
+    @property
+    def has_resolution(self):
+        return False
+
+    def matches_any( self, target_datatypes ):
+        """
+        Check if this datatype is of any of the target_datatypes or is
+        a subtype thereof.
+        """
+        datatype_classes = tuple( [ datatype if isclass( datatype ) else datatype.__class__ for datatype in target_datatypes ] )
+        return isinstance( self, datatype_classes )
+
+    def merge( split_files, output_file):
+        """
+            Merge files with copy.copyfileobj() will not hit the
+            max argument limitation of cat. gz and bz2 files are also working.
+        """
+        if not split_files:
+            raise ValueError('Asked to merge zero files as %s' % output_file)
+        elif len(split_files) == 1:
+            shutil.copyfileobj(open(split_files[0], 'rb'), open(output_file, 'wb'))
+        else:
+            fdst = open(output_file, 'wb')
+            for fsrc in split_files:
+                shutil.copyfileobj(open(fsrc, 'rb'), fdst)
+            fdst.close()
+
+    merge = staticmethod(merge)
+
+    def get_visualizations( self, dataset ):
+        """
+        Returns a list of visualizations for datatype.
+        """
+
+        if self.track_type:
+            return [ 'trackster', 'circster' ]
+        return []
+
+    # ------------- Dataproviders
+    def has_dataprovider( self, data_format ):
+        """
+        Returns True if `data_format` is available in `dataproviders`.
+        """
+        return data_format in self.dataproviders
+
+    def dataprovider( self, dataset, data_format, **settings ):
+        """
+        Base dataprovider factory for all datatypes that returns the proper provider
+        for the given `data_format` or raises a `NoProviderAvailable`.
+        """
+        if self.has_dataprovider( data_format ):
+            return self.dataproviders[ data_format ]( self, dataset, **settings )
+        raise dataproviders.exceptions.NoProviderAvailable( self, data_format )
+
+    @dataproviders.decorators.dataprovider_factory( 'base' )
+    def base_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.base.DataProvider( dataset_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'chunk', dataproviders.chunk.ChunkDataProvider.settings )
+    def chunk_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.chunk.ChunkDataProvider( dataset_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'chunk64', dataproviders.chunk.Base64ChunkDataProvider.settings )
+    def chunk64_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.chunk.Base64ChunkDataProvider( dataset_source, **settings )
+
+    def _clean_and_set_mime_type(self, trans, mime):
+        if mime.lower() in XSS_VULNERABLE_MIME_TYPES:
+            if not getattr( trans.app.config, "serve_xss_vulnerable_mimetypes", True ):
+                mime = DEFAULT_MIME_TYPE
+        trans.response.set_content_type( mime )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Text( Data ):
+    edam_format = "format_2330"
+    file_ext = 'txt'
+    line_class = 'line'
+
+    # Add metadata elements
+    MetadataElement( name="data_lines", default=0, desc="Number of data lines", readonly=True, optional=True, visible=False, no_value=0 )
+
+    def write_from_stream(self, dataset, stream):
+        """Writes data from a stream"""
+        # write it twice for now
+        fd, temp_name = tempfile.mkstemp()
+        while True:
+            chunk = stream.read(1048576)
+            if not chunk:
+                break
+            os.write(fd, chunk)
+        os.close(fd)
+        # rewrite the file with unix newlines
+        fp = open(dataset.file_name, 'w')
+        for line in open(temp_name, "U"):
+            line = line.strip() + '\n'
+            fp.write(line)
+        fp.close()
+
+    def set_raw_data(self, dataset, data):
+        """Saves the data on the disc"""
+        fd, temp_name = tempfile.mkstemp()
+        os.write(fd, data)
+        os.close(fd)
+        # rewrite the file with unix newlines
+        fp = open(dataset.file_name, 'w')
+        for line in open(temp_name, "U"):
+            line = line.strip() + '\n'
+            fp.write(line)
+        fp.close()
+        os.remove( temp_name )
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'text/plain'
+
+    def set_meta( self, dataset, **kwd ):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.data_lines = self.count_data_lines(dataset)
+
+    def estimate_file_lines( self, dataset ):
+        """
+        Perform a rough estimate by extrapolating number of lines from a small read.
+        """
+        sample_size = 1048576
+        dataset_fh = open( dataset.file_name )
+        dataset_read = dataset_fh.read(sample_size)
+        dataset_fh.close()
+        sample_lines = dataset_read.count('\n')
+        est_lines = int(sample_lines * (float(dataset.get_size()) / float(sample_size)))
+        return est_lines
+
+    def count_data_lines(self, dataset):
+        """
+        Count the number of lines of data in dataset,
+        skipping all blank lines and comments.
+        """
+        data_lines = 0
+        for line in open( dataset.file_name ):
+            line = line.strip()
+            if line and not line.startswith( '#' ):
+                data_lines += 1
+        return data_lines
+
+    def set_peek( self, dataset, line_count=None, is_multi_byte=False, WIDTH=256, skipchars=None, line_wrap=True ):
+        """
+        Set the peek.  This method is used by various subclasses of Text.
+        """
+        if not dataset.dataset.purged:
+            # The file must exist on disk for the get_file_peek() method
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte, WIDTH=WIDTH, skipchars=skipchars, line_wrap=line_wrap )
+            if line_count is None:
+                # See if line_count is stored in the metadata
+                if dataset.metadata.data_lines:
+                    dataset.blurb = "%s %s" % ( util.commaify( str(dataset.metadata.data_lines) ), inflector.cond_plural(dataset.metadata.data_lines, self.line_class) )
+                else:
+                    # Number of lines is not known ( this should not happen ), and auto-detect is
+                    # needed to set metadata
+                    # This can happen when the file is larger than max_optional_metadata_filesize.
+                    if int(dataset.get_size()) <= 1048576:
+                        # Small dataset, recount all lines and reset peek afterward.
+                        lc = self.count_data_lines(dataset)
+                        dataset.metadata.data_lines = lc
+                        dataset.blurb = "%s %s" % ( util.commaify( str(lc) ), inflector.cond_plural(lc, self.line_class) )
+                    else:
+                        est_lines = self.estimate_file_lines(dataset)
+                        dataset.blurb = "~%s %s" % ( util.commaify(util.roundify(str(est_lines))), inflector.cond_plural(est_lines, self.line_class) )
+            else:
+                dataset.blurb = "%s %s" % ( util.commaify( str(line_count) ), inflector.cond_plural(line_count, self.line_class) )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def split( cls, input_datasets, subdir_generator_function, split_params):
+        """
+        Split the input files by line.
+        """
+        if split_params is None:
+            return
+
+        if len(input_datasets) > 1:
+            raise Exception("Text file splitting does not support multiple files")
+        input_files = [ds.file_name for ds in input_datasets]
+
+        lines_per_file = None
+        chunk_size = None
+        if split_params['split_mode'] == 'number_of_parts':
+            lines_per_file = []
+
+            # Computing the length is expensive!
+            def _file_len(fname):
+                i = 0
+                f = open(fname)
+                for i, _ in enumerate(f):
+                    pass
+                f.close()
+                return i + 1
+            length = _file_len(input_files[0])
+            parts = int(split_params['split_size'])
+            if length < parts:
+                parts = length
+            len_each, remainder = divmod(length, parts)
+            while length > 0:
+                chunk = len_each
+                if remainder > 0:
+                    chunk += 1
+                lines_per_file.append(chunk)
+                remainder -= 1
+                length -= chunk
+        elif split_params['split_mode'] == 'to_size':
+            chunk_size = int(split_params['split_size'])
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+        f = open(input_files[0], 'r')
+        try:
+            chunk_idx = 0
+            file_done = False
+            part_file = None
+            while not file_done:
+                if lines_per_file is None:
+                    this_chunk_size = chunk_size
+                elif chunk_idx < len(lines_per_file):
+                    this_chunk_size = lines_per_file[chunk_idx]
+                    chunk_idx += 1
+                lines_remaining = this_chunk_size
+                part_file = None
+                while lines_remaining > 0:
+                    a_line = f.readline()
+                    if a_line == '':
+                        file_done = True
+                        break
+                    if part_file is None:
+                        part_dir = subdir_generator_function()
+                        part_path = os.path.join(part_dir, os.path.basename(input_files[0]))
+                        part_file = open(part_path, 'w')
+                    part_file.write(a_line)
+                    lines_remaining -= 1
+                if part_file is not None:
+                    part_file.close()
+        except Exception as e:
+            log.error('Unable to split files: %s' % str(e))
+            f.close()
+            if part_file is not None:
+                part_file.close()
+            raise
+        f.close()
+    split = classmethod(split)
+
+    # ------------- Dataproviders
+    @dataproviders.decorators.dataprovider_factory( 'line', dataproviders.line.FilteredLineDataProvider.settings )
+    def line_dataprovider( self, dataset, **settings ):
+        """
+        Returns an iterator over the dataset's lines (that have been stripped)
+        optionally excluding blank lines and lines that start with a comment character.
+        """
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.line.FilteredLineDataProvider( dataset_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'regex-line', dataproviders.line.RegexLineDataProvider.settings )
+    def regex_line_dataprovider( self, dataset, **settings ):
+        """
+        Returns an iterator over the dataset's lines
+        optionally including/excluding lines that match one or more regex filters.
+        """
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.line.RegexLineDataProvider( dataset_source, **settings )
+
+
+class GenericAsn1( Text ):
+    """Class for generic ASN.1 text format"""
+    edam_data = "data_0849"
+    edam_format = "format_1966"
+    file_ext = 'asn1'
+
+
+class LineCount( Text ):
+    """
+    Dataset contains a single line with a single integer that denotes the
+    line count for a related dataset. Used for custom builds.
+    """
+    pass
+
+
+class Newick( Text ):
+    """New Hampshire/Newick Format"""
+    edam_data = "data_0872"
+    edam_format = "format_1910"
+    file_ext = "nhx"
+
+    def __init__(self, **kwd):
+        """Initialize foobar datatype"""
+        Text.__init__( self, **kwd )
+
+    def init_meta( self, dataset, copy_from=None ):
+        Text.init_meta( self, dataset, copy_from=copy_from )
+
+    def sniff( self, filename ):
+        """ Returning false as the newick format is too general and cannot be sniffed."""
+        return False
+
+    def get_visualizations( self, dataset ):
+        """
+        Returns a list of visualizations for datatype.
+        """
+
+        return [ 'phyloviz' ]
+
+
+class Nexus( Text ):
+    """Nexus format as used By Paup, Mr Bayes, etc"""
+    edam_data = "data_0872"
+    edam_format = "format_1912"
+    file_ext = "nex"
+
+    def __init__(self, **kwd):
+        """Initialize foobar datatype"""
+        Text.__init__( self, **kwd )
+
+    def init_meta( self, dataset, copy_from=None ):
+        Text.init_meta( self, dataset, copy_from=copy_from )
+
+    def sniff( self, filename ):
+        """All Nexus Files Simply puts a '#NEXUS' in its first line"""
+        f = open( filename, "r" )
+        firstline = f.readline().upper()
+        f.close()
+
+        if "#NEXUS" in firstline:
+            return True
+        else:
+            return False
+
+    def get_visualizations( self, dataset ):
+        """
+        Returns a list of visualizations for datatype.
+        """
+
+        return [ 'phyloviz' ]
+
+
+# ------------- Utility methods --------------
+
+# nice_size used to be here, but to resolve cyclical dependencies it's been
+# moved to galaxy.util.  It belongs there anyway since it's used outside
+# datatypes.
+nice_size = util.nice_size
+
+
+def get_test_fname( fname ):
+    """Returns test data filename"""
+    path = os.path.dirname(__file__)
+    full_path = os.path.join( path, 'test', fname )
+    return full_path
+
+
+def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5, skipchars=None, line_wrap=True ):
+    """
+    Returns the first LINE_COUNT lines wrapped to WIDTH
+
+    >>> fname = get_test_fname('4.bed')
+    >>> get_file_peek(fname, LINE_COUNT=1)
+    u'chr22\\t30128507\\t31828507\\tuc003bnx.1_cds_2_0_chr22_29227_f\\t0\\t+\\n'
+    """
+    # Set size for file.readline() to a negative number to force it to
+    # read until either a newline or EOF.  Needed for datasets with very
+    # long lines.
+    if WIDTH == 'unlimited':
+        WIDTH = -1
+    if skipchars is None:
+        skipchars = []
+    lines = []
+    count = 0
+    file_type = None
+    data_checked = False
+    temp = open( file_name, "U" )
+    while count < LINE_COUNT:
+        line = temp.readline( WIDTH )
+        if line and not is_multi_byte and not data_checked:
+            # See if we have a compressed or binary file
+            if line[0:2] == util.gzip_magic:
+                file_type = 'gzipped'
+            else:
+                for char in line:
+                    if ord( char ) > 128:
+                        file_type = 'binary'
+                        break
+            data_checked = True
+            if file_type in [ 'gzipped', 'binary' ]:
+                break
+        if not line_wrap:
+            if line.endswith('\n'):
+                line = line[:-1]
+            else:
+                while True:
+                    i = temp.read(1)
+                    if not i or i == '\n':
+                        break
+        skip_line = False
+        for skipchar in skipchars:
+            if line.startswith( skipchar ):
+                skip_line = True
+                break
+        if not skip_line:
+            lines.append( line )
+            count += 1
+    temp.close()
+    if file_type in [ 'gzipped', 'binary' ]:
+        text = "%s file" % file_type
+    else:
+        try:
+            text = util.unicodify( '\n'.join( lines ) )
+        except UnicodeDecodeError:
+            text = "binary/unknown file"
+    return text
diff --git a/lib/galaxy/datatypes/dataproviders/__init__.py b/lib/galaxy/datatypes/dataproviders/__init__.py
new file mode 100644
index 0000000..caf9a7e
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/__init__.py
@@ -0,0 +1,31 @@
+
+# TODO: ---- This is a work in progress ----
+"""
+Dataproviders are iterators with context managers that provide data to some
+consumer datum by datum.
+
+As well as subclassing and overriding to get the proper data, Dataproviders
+can be piped from one to the other.
+
+.. note:: be careful to NOT pipe providers into subclasses of those providers.
+    Subclasses provide all the functionality of their superclasses,
+    so there's generally no need.
+
+.. note:: be careful to when using piped providers that accept the same keywords
+    in their __init__ functions (such as limit or offset) to pass those
+    keywords to the proper (often final) provider. These errors that result
+    can be hard to diagnose.
+"""
+from . import (
+    base,
+    chunk,
+    column,
+    dataset,
+    decorators,
+    exceptions,
+    external,
+    hierarchy,
+    line
+)
+
+__all__ = ('decorators', 'exceptions', 'base', 'chunk', 'line', 'hierarchy', 'column', 'external', 'dataset')
diff --git a/lib/galaxy/datatypes/dataproviders/base.py b/lib/galaxy/datatypes/dataproviders/base.py
new file mode 100644
index 0000000..7e8bce0
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/base.py
@@ -0,0 +1,317 @@
+"""
+Base class(es) for all DataProviders.
+"""
+# there's a blurry line between functionality here and functionality in datatypes module
+# attempting to keep parsing to a minimum here and focus on chopping/pagination/reformat(/filtering-maybe?)
+#   and using as much pre-computed info/metadata from the datatypes module as possible
+# also, this shouldn't be a replacement/re-implementation of the tool layer
+#   (which provides traceability/versioning/reproducibility)
+
+import logging
+from collections import deque
+
+import six
+
+from . import exceptions
+
+log = logging.getLogger( __name__ )
+
+_TODO = """
+hooks into datatypes (define providers inside datatype modules) as factories
+capture tell() when provider is done
+    def stop( self ): self.endpoint = source.tell(); raise StopIteration()
+implement __len__ sensibly where it can be (would be good to have where we're giving some progress - '100 of 300')
+    seems like sniffed files would have this info
+unit tests
+add datum entry/exit point methods: possibly decode, encode
+    or create a class that pipes source through - how would decode work then?
+
+icorporate existing visualization/dataproviders
+some of the sources (esp. in datasets) don't need to be re-created
+YAGNI: InterleavingMultiSourceDataProvider, CombiningMultiSourceDataProvider
+
+datasets API entry point:
+    kwargs should be parsed from strings 2 layers up (in the DatasetsAPI) - that's the 'proper' place for that.
+    but how would it know how/what to parse if it doesn't have access to the classes used in the provider?
+        Building a giant list by sweeping all possible dprov classes doesn't make sense
+    For now - I'm burying them in the class __init__s - but I don't like that
+"""
+
+
+# ----------------------------------------------------------------------------- base classes
+class HasSettings( type ):
+    """
+    Metaclass for data providers that allows defining and inheriting
+    a dictionary named 'settings'.
+
+    Useful for allowing class level access to expected variable types
+    passed to class `__init__` functions so they can be parsed from a query string.
+    """
+    # yeah - this is all too acrobatic
+    def __new__( cls, name, base_classes, attributes ):
+        settings = {}
+        # get settings defined in base classes
+        for base_class in base_classes:
+            base_settings = getattr( base_class, 'settings', None )
+            if base_settings:
+                settings.update( base_settings )
+        # get settings defined in this class
+        new_settings = attributes.pop( 'settings', None )
+        if new_settings:
+            settings.update( new_settings )
+        attributes[ 'settings' ] = settings
+        return type.__new__( cls, name, base_classes, attributes )
+
+
+# ----------------------------------------------------------------------------- base classes
+ at six.add_metaclass(HasSettings)
+class DataProvider( six.Iterator ):
+    """
+    Base class for all data providers. Data providers:
+        (a) have a source (which must be another file-like object)
+        (b) implement both the iterator and context manager interfaces
+        (c) do not allow write methods
+            (but otherwise implement the other file object interface methods)
+    """
+    # a definition of expected types for keyword arguments sent to __init__
+    #   useful for controlling how query string dictionaries can be parsed into correct types for __init__
+    #   empty in this base class
+    settings = {}
+
+    def __init__( self, source, **kwargs ):
+        """
+        :param source: the source that this iterator will loop over.
+            (Should implement the iterable interface and ideally have the
+            context manager interface as well)
+        """
+        self.source = self.validate_source( source )
+
+    def validate_source( self, source ):
+        """
+        Is this a valid source for this provider?
+
+        :raises InvalidDataProviderSource: if the source is considered invalid.
+
+        Meant to be overridden in subclasses.
+        """
+        if not source or not hasattr( source, '__iter__' ):
+            # that's by no means a thorough check
+            raise exceptions.InvalidDataProviderSource( source )
+        return source
+
+    # TODO: (this might cause problems later...)
+    # TODO: some providers (such as chunk's seek and read) rely on this... remove
+    def __getattr__( self, name ):
+        if name == 'source':
+            # if we're inside this fn, source hasn't been set - provide some safety just for this attr
+            return None
+        # otherwise, try to get the attr from the source - allows us to get things like provider.encoding, etc.
+        if hasattr( self.source, name ):
+            return getattr( self.source, name )
+        # raise the proper error
+        return self.__getattribute__( name )
+
+    # write methods should not be allowed
+    def truncate( self, size ):
+        raise NotImplementedError( 'Write methods are purposely disabled' )
+
+    def write( self, string ):
+        raise NotImplementedError( 'Write methods are purposely disabled' )
+
+    def writelines( self, sequence ):
+        raise NotImplementedError( 'Write methods are purposely disabled' )
+
+    # TODO: route read methods through next?
+    # def readline( self ):
+    #    return self.next()
+    def readlines( self ):
+        return [ line for line in self ]
+
+    # iterator interface
+    def __iter__( self ):
+        # it's generators all the way up, Timmy
+        with self:
+            for datum in self.source:
+                yield datum
+
+    def __next__( self ):
+        return next(self.source)
+
+    # context manager interface
+    def __enter__( self ):
+        # make the source's context manager interface optional
+        if hasattr( self.source, '__enter__' ):
+            self.source.__enter__()
+        return self
+
+    def __exit__( self, *args ):
+        # make the source's context manager interface optional, call on source if there
+        if hasattr( self.source, '__exit__' ):
+            self.source.__exit__( *args )
+        # alternately, call close()
+        elif hasattr( self.source, 'close' ):
+            self.source.close()
+
+    def __str__( self ):
+        """
+        String representation for easier debugging.
+
+        Will call `__str__` on its source so this will display piped dataproviders.
+        """
+        # we need to protect against recursion (in __getattr__) if self.source hasn't been set
+        source_str = str( self.source ) if hasattr( self, 'source' ) else ''
+        return '%s(%s)' % ( self.__class__.__name__, str( source_str ) )
+
+
+class FilteredDataProvider( DataProvider ):
+    """
+    Passes each datum through a filter function and yields it if that function
+    returns a non-`None` value.
+
+    Also maintains counters:
+        - `num_data_read`: how many data have been consumed from the source.
+        - `num_valid_data_read`: how many data have been returned from `filter`.
+        - `num_data_returned`: how many data has this provider yielded.
+    """
+    # not useful here - we don't want functions over the query string
+    # settings.update({ 'filter_fn': 'function' })
+
+    def __init__( self, source, filter_fn=None, **kwargs ):
+        """
+        :param filter_fn: a lambda or function that will be passed a datum and
+            return either the (optionally modified) datum or None.
+        """
+        super( FilteredDataProvider, self ).__init__( source, **kwargs )
+        self.filter_fn = filter_fn if hasattr( filter_fn, '__call__' ) else None
+        # count how many data we got from the source
+        self.num_data_read = 0
+        # how many valid data have we gotten from the source
+        #   IOW, data that's passed the filter and been either provided OR have been skipped due to offset
+        self.num_valid_data_read = 0
+        # how many lines have been provided/output
+        self.num_data_returned = 0
+
+    def __iter__( self ):
+        parent_gen = super( FilteredDataProvider, self ).__iter__()
+        for datum in parent_gen:
+            self.num_data_read += 1
+            datum = self.filter( datum )
+            if datum is not None:
+                self.num_valid_data_read += 1
+                self.num_data_returned += 1
+                yield datum
+
+    # TODO: may want to squash this into DataProvider
+    def filter( self, datum ):
+        """
+        When given a datum from the provider's source, return None if the datum
+        'does not pass' the filter or is invalid. Return the datum if it's valid.
+
+        :param datum: the datum to check for validity.
+        :returns: the datum, a modified datum, or None
+
+        Meant to be overridden.
+        """
+        if self.filter_fn:
+            return self.filter_fn( datum )
+        # also can be overriden entirely
+        return datum
+
+
+class LimitedOffsetDataProvider( FilteredDataProvider ):
+    """
+    A provider that uses the counters from FilteredDataProvider to limit the
+    number of data and/or skip `offset` number of data before providing.
+
+    Useful for grabbing sections from a source (e.g. pagination).
+    """
+    # define the expected types of these __init__ arguments so they can be parsed out from query strings
+    settings = {
+        'limit' : 'int',
+        'offset': 'int'
+    }
+
+    # TODO: may want to squash this into DataProvider
+    def __init__( self, source, offset=0, limit=None, **kwargs ):
+        """
+        :param offset:  the number of data to skip before providing.
+        :param limit:   the final number of data to provide.
+        """
+        super( LimitedOffsetDataProvider, self ).__init__( source, **kwargs )
+
+        # how many valid data to skip before we start outputing data - must be positive
+        #   (diff to support neg. indeces - must be pos.)
+        self.offset = max( offset, 0 )
+
+        # how many valid data to return - must be positive (None indicates no limit)
+        self.limit = limit
+        if self.limit is not None:
+            self.limit = max( self.limit, 0 )
+
+    def __iter__( self ):
+        """
+        Iterate over the source until `num_valid_data_read` is greater than
+        `offset`, begin providing datat, and stop when `num_data_returned`
+        is greater than `offset`.
+        """
+        if self.limit is not None and self.limit <= 0:
+            return
+            yield
+
+        parent_gen = super( LimitedOffsetDataProvider, self ).__iter__()
+        for datum in parent_gen:
+            self.num_data_returned -= 1
+            # print 'self.num_data_returned:', self.num_data_returned
+            # print 'self.num_valid_data_read:', self.num_valid_data_read
+
+            if self.num_valid_data_read > self.offset:
+                self.num_data_returned += 1
+                yield datum
+
+            if self.limit is not None and self.num_data_returned >= self.limit:
+                break
+
+    # TODO: skipping lines is inefficient - somehow cache file position/line_num pair and allow provider
+    #   to seek to a pos/line and then begin providing lines
+    # the important catch here is that we need to have accurate pos/line pairs
+    #   in order to preserve the functionality of limit and offset
+    # if file_seek and len( file_seek ) == 2:
+    #    seek_pos, new_line_num = file_seek
+    #    self.seek_and_set_curr_line( seek_pos, new_line_num )
+
+    # def seek_and_set_curr_line( self, file_seek, new_curr_line_num ):
+    #    self.seek( file_seek, os.SEEK_SET )
+    #    self.curr_line_num = new_curr_line_num
+
+
+class MultiSourceDataProvider( DataProvider ):
+    """
+    A provider that iterates over a list of given sources and provides data
+    from one after another.
+
+    An iterator over iterators.
+    """
+    def __init__( self, source_list, **kwargs ):
+        """
+        :param source_list: an iterator of iterables
+        """
+        self.source_list = deque( source_list )
+
+    def __iter__( self ):
+        """
+        Iterate over the source_list, then iterate over the data in each source.
+
+        Skip a given source in `source_list` if it is `None` or invalid.
+        """
+        for source in self.source_list:
+            # just skip falsy sources
+            if not source:
+                continue
+            try:
+                self.source = self.validate_source( source )
+            except exceptions.InvalidDataProviderSource:
+                continue
+
+            parent_gen = super( MultiSourceDataProvider, self ).__iter__()
+            for datum in parent_gen:
+                yield datum
diff --git a/lib/galaxy/datatypes/dataproviders/chunk.py b/lib/galaxy/datatypes/dataproviders/chunk.py
new file mode 100644
index 0000000..435c70d
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/chunk.py
@@ -0,0 +1,79 @@
+"""
+Chunk (N number of bytes at M offset to a source's beginning) provider.
+
+Primarily for file sources but usable by any iterator that has both
+seek and read( N ).
+"""
+import os
+import base64
+import base
+import exceptions
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# -----------------------------------------------------------------------------
+class ChunkDataProvider( base.DataProvider ):
+    """
+    Data provider that yields chunks of data from its file.
+
+    Note: this version does not account for lines and works with Binary datatypes.
+    """
+    MAX_CHUNK_SIZE = 2 ** 16
+    DEFAULT_CHUNK_SIZE = MAX_CHUNK_SIZE
+    settings = {
+        'chunk_index'   : 'int',
+        'chunk_size'    : 'int'
+    }
+
+    # TODO: subclass from LimitedOffsetDataProvider?
+    # see web/framework/base.iterate_file, util/__init__.file_reader, and datatypes.tabular
+    def __init__( self, source, chunk_index=0, chunk_size=DEFAULT_CHUNK_SIZE, **kwargs ):
+        """
+        :param chunk_index: if a source can be divided into N number of
+            `chunk_size` sections, this is the index of which section to
+            return.
+        :param chunk_size:  how large are the desired chunks to return
+            (gen. in bytes).
+        """
+        super( ChunkDataProvider, self ).__init__( source, **kwargs )
+        self.chunk_size = int( chunk_size )
+        self.chunk_pos = int( chunk_index ) * self.chunk_size
+
+    def validate_source( self, source ):
+        """
+        Does the given source have both the methods `seek` and `read`?
+        :raises InvalidDataProviderSource: if not.
+        """
+        source = super( ChunkDataProvider, self ).validate_source( source )
+        if( ( not hasattr( source, 'seek' ) ) or ( not hasattr( source, 'read' ) ) ):
+            raise exceptions.InvalidDataProviderSource( source )
+        return source
+
+    def __iter__( self ):
+        # not reeeally an iterator per se
+        self.__enter__()
+        self.source.seek( self.chunk_pos, os.SEEK_SET )
+        chunk = self.encode( self.source.read( self.chunk_size ) )
+        yield chunk
+        self.__exit__()
+
+    def encode( self, chunk ):
+        """
+        Called on the chunk before returning.
+
+        Overrride to modify, encode, or decode chunks.
+        """
+        return chunk
+
+
+class Base64ChunkDataProvider( ChunkDataProvider ):
+    """
+    Data provider that yields chunks of base64 encoded data from its file.
+    """
+    def encode( self, chunk ):
+        """
+        Return chunks encoded in base 64.
+        """
+        return base64.b64encode( chunk )
diff --git a/lib/galaxy/datatypes/dataproviders/column.py b/lib/galaxy/datatypes/dataproviders/column.py
new file mode 100644
index 0000000..bc97fc6
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/column.py
@@ -0,0 +1,357 @@
+"""
+Providers that provide lists of lists generally where each line of a source
+is further subdivided into multiple data (e.g. columns from a line).
+"""
+
+from six.moves.urllib.parse import unquote_plus
+import re
+
+from . import line
+
+_TODO = """
+move ColumnarDataProvider parsers to more sensible location
+
+TransposedColumnarDataProvider: provides each column as a single array
+    - see existing visualizations/dataprovider/basic.ColumnDataProvider
+"""
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# ----------------------------------------------------------------------------- base classes
+class ColumnarDataProvider( line.RegexLineDataProvider ):
+    """
+    Data provider that provide a list of columns from the lines of its source.
+
+    Columns are returned in the order given in indeces, so this provider can
+    re-arrange columns.
+
+    If any desired index is outside the actual number of columns
+    in the source, this provider will None-pad the output and you are guaranteed
+    the same number of columns as the number of indeces asked for (even if they
+    are filled with None).
+    """
+    settings = {
+        'indeces'       : 'list:int',
+        'column_count'  : 'int',
+        'column_types'  : 'list:str',
+        'parse_columns' : 'bool',
+        'deliminator'   : 'str',
+        'filters'       : 'list:str'
+    }
+
+    def __init__( self, source, indeces=None,
+                  column_count=None, column_types=None, parsers=None, parse_columns=True,
+                  deliminator='\t', filters=None, **kwargs ):
+        """
+        :param indeces: a list of indeces of columns to gather from each row
+            Optional: will default to `None`.
+            If `None`, this provider will return all rows (even when a
+                particular row contains more/less than others).
+            If a row/line does not contain an element at a given index, the
+                provider will-return/fill-with a `None` value as the element.
+        :type indeces: list or None
+
+        :param column_count: an alternate means of defining indeces, use an int
+            here to effectively provide the first N columns.
+            Optional: will default to `None`.
+        :type column_count: int
+
+        :param column_types: a list of string names of types that the
+            provider will use to look up an appropriate parser for the column.
+            (e.g. 'int', 'float', 'str', 'bool')
+            Optional: will default to parsing all columns as strings.
+        :type column_types: list of strings
+
+        :param parsers: a dictionary keyed with column type strings
+            and with values that are functions to use when parsing those
+            types.
+            Optional: will default to using the function `_get_default_parsers`.
+        :type parsers: dictionary
+
+        :param parse_columns: attempt to parse columns?
+            Optional: defaults to `True`.
+        :type parse_columns: bool
+
+        :param deliminator: character(s) used to split each row/line of the source.
+            Optional: defaults to the tab character.
+        :type deliminator: str
+
+        .. note:: that the subclass constructors are passed kwargs - so they're
+            params (limit, offset, etc.) are also applicable here.
+        """
+        # TODO: other columnar formats: csv, etc.
+        super( ColumnarDataProvider, self ).__init__( source, **kwargs )
+
+        # IMPLICIT: if no indeces, column_count, or column_types passed: return all columns
+        self.selected_column_indeces = indeces
+        self.column_count = column_count
+        self.column_types = column_types or []
+        # if no column count given, try to infer from indeces or column_types
+        if not self.column_count:
+            if self.selected_column_indeces:
+                self.column_count = len( self.selected_column_indeces )
+            elif self.column_types:
+                self.column_count = len( self.column_types )
+        # if no indeces given, infer from column_count
+        if not self.selected_column_indeces and self.column_count:
+            self.selected_column_indeces = list( range( self.column_count ) )
+
+        self.deliminator = deliminator
+
+        # how/whether to parse each column value
+        self.parsers = {}
+        if parse_columns:
+            self.parsers = self.get_default_parsers()
+            # overwrite with user desired parsers
+            self.parsers.update( parsers or {} )
+
+        filters = filters or []
+        self.column_filters = []
+        for filter_ in filters:
+            parsed = self.parse_filter( filter_ )
+            # TODO: might be better to error on bad filter/None here
+            if callable( parsed ):
+                self.column_filters.append( parsed )
+
+    def parse_filter( self, filter_param_str ):
+        split = filter_param_str.split( '-', 2 )
+        if not len( split ) >= 3:
+            return None
+        column, op, val = split
+
+        # better checking v. len and indeces
+        column = int( column )
+        if column > len( self.column_types ):
+            return None
+        if self.column_types[ column ] in ( 'float', 'int' ):
+            return self.create_numeric_filter( column, op, val )
+        if self.column_types[ column ] in ( 'str' ):
+            return self.create_string_filter( column, op, val )
+        if self.column_types[ column ] in ( 'list' ):
+            return self.create_list_filter( column, op, val )
+        return None
+
+    def create_numeric_filter( self, column, op, val ):
+        """
+        Return an anonymous filter function that will be passed the array
+        of parsed columns. Return None if no filter function can be
+        created for the given params.
+
+        The function will compare the column at index `column` against `val`
+        using the given op where op is one of:
+
+        - lt: less than
+        - le: less than or equal to
+        - eq: equal to
+        - ne: not equal to
+        - ge: greather than or equal to
+        - gt: greater than
+
+        `val` is cast as float here and will return None if there's a parsing error.
+        """
+        try:
+            val = float( val )
+        except ValueError:
+            return None
+        if 'lt' == op:
+            return lambda d: d[column] < val
+        elif 'le' == op:
+            return lambda d: d[column] <= val
+        elif 'eq' == op:
+            return lambda d: d[column] == val
+        elif 'ne' == op:
+            return lambda d: d[column] != val
+        elif 'ge' == op:
+            return lambda d: d[column] >= val
+        elif 'gt' == op:
+            return lambda d: d[column] > val
+        return None
+
+    def create_string_filter( self, column, op, val ):
+        """
+        Return an anonymous filter function that will be passed the array
+        of parsed columns. Return None if no filter function can be
+        created for the given params.
+
+        The function will compare the column at index `column` against `val`
+        using the given op where op is one of:
+
+        - eq: exactly matches
+        - has: the column contains the substring `val`
+        - re: the column matches the regular expression in `val`
+        """
+        if 'eq' == op:
+            return lambda d: d[column] == val
+        elif 'has' == op:
+            return lambda d: val in d[column]
+        elif 're' == op:
+            val = unquote_plus( val )
+            val = re.compile( val )
+            return lambda d: val.match( d[column] ) is not None
+        return None
+
+    def create_list_filter( self, column, op, val ):
+        """
+        Return an anonymous filter function that will be passed the array
+        of parsed columns. Return None if no filter function can be
+        created for the given params.
+
+        The function will compare the column at index `column` against `val`
+        using the given op where op is one of:
+
+        - eq: the list `val` exactly matches the list in the column
+        - has: the list in the column contains the sublist `val`
+        """
+        if 'eq' == op:
+            val = self.parse_value( val, 'list' )
+            return lambda d: d[column] == val
+        elif 'has' == op:
+            return lambda d: val in d[column]
+        return None
+
+    def get_default_parsers( self ):
+        """
+        Return parser dictionary keyed for each columnar type
+        (as defined in datatypes).
+
+        .. note:: primitives only by default (str, int, float, boolean, None).
+            Other (more complex) types are retrieved as strings.
+
+        :returns: a dictionary of the form:
+            `{ <parser type name> : <function used to parse type> }`
+        """
+        # TODO: move to module level (or datatypes, util)
+        return {
+            # str is default and not needed here
+            'int'   : int,
+            'float' : float,
+            'bool'  : bool,
+
+            # unfortunately, 'list' is used in dataset metadata both for
+            #   query style maps (9th col gff) AND comma-sep strings.
+            #   (disabled for now)
+            # 'list'  : lambda v: v.split( ',' ),
+            # 'csv'   : lambda v: v.split( ',' ),
+            # i don't like how urlparses does sub-lists...
+            # 'querystr' : lambda v: dict([ ( p.split( '=', 1 ) if '=' in p else ( p, True ) )
+            #                              for p in v.split( ';', 1 ) ])
+
+            # 'scifloat': #floating point which may be in scientific notation
+
+            # always with the 1 base, biologists?
+            # 'int1'  : ( lambda i: int( i ) - 1 ),
+
+            # 'gffval': string or '.' for None
+            # 'gffint': # int or '.' for None
+            # 'gffphase': # 0, 1, 2, or '.' for None
+            # 'gffstrand': # -, +, ?, or '.' for None, etc.
+        }
+
+    def filter( self, line ):
+        line = super( ColumnarDataProvider, self ).filter( line )
+        if line is None:
+            return line
+        columns = self.parse_columns_from_line( line )
+        return self.filter_by_columns( columns )
+
+    def parse_columns_from_line( self, line ):
+        """
+        Returns a list of the desired, parsed columns.
+        :param line: the line to parse
+        :type line: str
+        """
+        # TODO: too much going on in this loop - the above should all be precomputed AMAP...
+        all_columns = line.split( self.deliminator )
+        # if no indeces were passed to init, return all columns
+        selected_indeces = self.selected_column_indeces or list( range( len( all_columns ) ) )
+        parsed_columns = []
+        for parser_index, column_index in enumerate( selected_indeces ):
+            parsed_columns.append( self.parse_column_at_index( all_columns, parser_index, column_index ) )
+        return parsed_columns
+
+    def parse_column_at_index( self, columns, parser_index, index ):
+        """
+        Get the column type for the parser from `self.column_types` or `None`
+        if the type is unavailable.
+        """
+        try:
+            return self.parse_value( columns[ index ], self.get_column_type( parser_index ) )
+        # if a selected index is not within columns, return None
+        except IndexError:
+            return None
+
+    def parse_value( self, val, type ):
+        """
+        Attempt to parse and return the given value based on the given type.
+
+        :param val: the column value to parse (often a string)
+        :param type: the string type 'name' used to find the appropriate parser
+        :returns: the parsed value
+            or `value` if no `type` found in `parsers`
+            or `None` if there was a parser error (ValueError)
+        """
+        if type == 'str' or type is None:
+            return val
+        try:
+            return self.parsers[ type ]( val )
+        except KeyError:
+            # no parser - return as string
+            pass
+        except ValueError:
+            # bad value - return None
+            return None
+        return val
+
+    def get_column_type( self, index ):
+        """
+        Get the column type for the parser from `self.column_types` or `None`
+        if the type is unavailable.
+        :param index: the column index
+        :returns: string name of type (e.g. 'float', 'int', etc.)
+        """
+        try:
+            return self.column_types[ index ]
+        except IndexError:
+            return None
+
+    def filter_by_columns( self, columns ):
+        for filter_fn in self.column_filters:
+            if not filter_fn( columns ):
+                return None
+        return columns
+
+
+class DictDataProvider( ColumnarDataProvider ):
+    """
+    Data provider that zips column_names and columns from the source's contents
+    into a dictionary.
+
+    A combination use of both `column_names` and `indeces` allows 'picking'
+    key/value pairs from the source.
+
+    .. note:: The subclass constructors are passed kwargs - so their
+        params (limit, offset, etc.) are also applicable here.
+    """
+    settings = {
+        'column_names'  : 'list:str',
+    }
+
+    def __init__( self, source, column_names=None, **kwargs ):
+        """
+        :param column_names: an ordered list of strings that will be used as the keys
+            for each column in the returned dictionaries.
+            The number of key, value pairs each returned dictionary has will
+            be as short as the number of column names provided.
+        :type column_names:
+        """
+        # TODO: allow passing in a map instead of name->index { 'name1': index1, ... }
+        super( DictDataProvider, self ).__init__( source, **kwargs )
+        self.column_names = column_names or []
+
+    def __iter__( self ):
+        parent_gen = super( DictDataProvider, self ).__iter__()
+        for column_values in parent_gen:
+            map = dict( zip( self.column_names, column_values ) )
+            yield map
diff --git a/lib/galaxy/datatypes/dataproviders/dataset.py b/lib/galaxy/datatypes/dataproviders/dataset.py
new file mode 100644
index 0000000..2ed8314
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/dataset.py
@@ -0,0 +1,782 @@
+"""
+Dataproviders that use either:
+    - the file contents and/or metadata from a Galaxy DatasetInstance as
+        their source.
+    - or provide data in some way relevant to bioinformatic data
+        (e.g. parsing genomic regions from their source)
+"""
+
+
+import base
+import line
+import column
+import external
+from galaxy.util import sqlite
+import sys
+
+from bx import seq as bx_seq
+from bx import wiggle as bx_wig
+from bx import bbi as bx_bbi
+
+_TODO = """
+use bx as much as possible
+gff3 hierarchies
+
+change SamtoolsDataProvider to use pysam
+"""
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# ----------------------------------------------------------------------------- base for using a Glx dataset
+class DatasetDataProvider( base.DataProvider ):
+    """
+    Class that uses the file contents and/or metadata from a Galaxy DatasetInstance
+    as its source.
+
+    DatasetDataProvider can be seen as the intersection between a datatype's
+    metadata and a dataset's file contents. It (so far) mainly provides helper
+    and conv. methods for using dataset metadata to set up and control how
+    the data is provided.
+    """
+    def __init__( self, dataset, **kwargs ):
+        """
+        :param dataset: the Galaxy dataset whose file will be the source
+        :type dataset: model.DatasetInstance
+        """
+        # precondition: dataset is a galaxy.model.DatasetInstance
+        self.dataset = dataset
+        # this dataset file is obviously the source
+        # TODO: this might be a good place to interface with the object_store...
+        super( DatasetDataProvider, self ).__init__( open( dataset.file_name, 'rb' ) )
+
+    # TODO: this is a bit of a mess
+    @classmethod
+    def get_column_metadata_from_dataset( cls, dataset ):
+        """
+        Convenience class method to get column metadata from a dataset.
+
+        :returns: a dictionary of `column_count`, `column_types`, and `column_names`
+            if they're available, setting each to `None` if not.
+        """
+        # re-map keys to fit ColumnarProvider.__init__ kwargs
+        params = {}
+        params[ 'column_count' ] = dataset.metadata.columns
+        params[ 'column_types' ] = dataset.metadata.column_types
+        params[ 'column_names' ] = dataset.metadata.column_names or getattr( dataset.datatype, 'column_names', None )
+        return params
+
+    def get_metadata_column_types( self, indeces=None ):
+        """
+        Return the list of `column_types` for this dataset or `None` if unavailable.
+
+        :param indeces: the indeces for the columns of which to return the types.
+            Optional: defaults to None (return all types)
+        :type indeces: list of ints
+        """
+        metadata_column_types = ( self.dataset.metadata.column_types or
+                                  getattr( self.dataset.datatype, 'column_types', None ) or None )
+        if not metadata_column_types:
+            return metadata_column_types
+        if indeces:
+            column_types = []
+            for index in indeces:
+                column_type = metadata_column_types[ index ] if index < len( metadata_column_types ) else None
+                column_types.append( column_type )
+            return column_types
+        return metadata_column_types
+
+    def get_metadata_column_names( self, indeces=None ):
+        """
+        Return the list of `column_names` for this dataset or `None` if unavailable.
+
+        :param indeces: the indeces for the columns of which to return the names.
+            Optional: defaults to None (return all names)
+        :type indeces: list of ints
+        """
+        metadata_column_names = ( self.dataset.metadata.column_names or
+                                  getattr( self.dataset.datatype, 'column_names', None ) or None )
+        if not metadata_column_names:
+            return metadata_column_names
+        if indeces:
+            column_names = []
+            for index in indeces:
+                column_type = metadata_column_names[ index ] if index < len( metadata_column_names ) else None
+                column_names.append( column_type )
+            return column_names
+        return metadata_column_names
+
+    # TODO: merge the next two
+    def get_indeces_by_column_names( self, list_of_column_names ):
+        """
+        Return the list of column indeces when given a list of column_names.
+
+        :param list_of_column_names: the names of the columns of which to get indeces.
+        :type list_of_column_names: list of strs
+
+        :raises KeyError: if column_names are not found
+        :raises ValueError: if an entry in list_of_column_names is not in column_names
+        """
+        metadata_column_names = ( self.dataset.metadata.column_names or
+                                  getattr( self.dataset.datatype, 'column_names', None ) or None )
+        if not metadata_column_names:
+            raise KeyError( 'No column_names found for ' +
+                            'datatype: %s, dataset: %s' % ( str( self.dataset.datatype ), str( self.dataset ) ) )
+        indeces = []  # if indeces and column_names:
+        # pull using indeces and re-name with given names - no need to alter (does as super would)
+        #    pass
+        for column_name in list_of_column_names:
+            indeces.append( metadata_column_names.index( column_name ) )
+        return indeces
+
+    def get_metadata_column_index_by_name( self, name ):
+        """
+        Return the 1-base index of a sources column with the given `name`.
+        """
+        # metadata columns are 1-based indeces
+        column = getattr( self.dataset.metadata, name )
+        return ( column - 1 ) if ( isinstance( column, int ) and column > 0 ) else None
+
+    def get_genomic_region_indeces( self, check=False ):
+        """
+        Return a list of column indeces for 'chromCol', 'startCol', 'endCol' from
+        a source representing a genomic region.
+
+        :param check: if True will raise a ValueError if any were not found.
+        :type check: bool
+        :raises ValueError: if check is `True` and one or more indeces were not found.
+        :returns: list of column indeces for the named columns.
+        """
+        region_column_names = ( 'chromCol', 'startCol', 'endCol' )
+        region_indices = [ self.get_metadata_column_index_by_name( name ) for name in region_column_names ]
+        if check and not all( map( lambda i: i is not None, region_indices) ):
+            raise ValueError( "Could not determine proper column indices for chrom, start, end: %s" % ( str( region_indices ) ) )
+        return region_indices
+
+
+class ConvertedDatasetDataProvider( DatasetDataProvider ):
+    """
+    Class that uses the file contents of a dataset after conversion to a different
+    format.
+    """
+    def __init__( self, dataset, **kwargs ):
+        raise NotImplementedError( 'Abstract class' )
+        self.original_dataset = dataset
+        self.converted_dataset = self.convert_dataset( dataset, **kwargs )
+        super( ConvertedDatasetDataProvider, self ).__init__( self.converted_dataset, **kwargs )
+        # NOTE: now self.converted_dataset == self.dataset
+
+    def convert_dataset( self, dataset, **kwargs ):
+        """
+        Convert the given dataset in some way.
+        """
+        return dataset
+
+
+# ----------------------------------------------------------------------------- uses metadata for settings
+class DatasetColumnarDataProvider( column.ColumnarDataProvider ):
+    """
+    Data provider that uses a DatasetDataProvider as its source and the
+    dataset's metadata to buuild settings for the ColumnarDataProvider it's
+    inherited from.
+    """
+    def __init__( self, dataset, **kwargs ):
+        """
+        All kwargs are inherited from ColumnarDataProvider.
+        .. seealso:: column.ColumnarDataProvider
+
+        If no kwargs are given, this class will attempt to get those kwargs
+        from the dataset source's metadata.
+        If any kwarg is given, it will override and be used in place of
+        any metadata available.
+        """
+        dataset_source = DatasetDataProvider( dataset )
+        if not kwargs.get( 'column_types', None ):
+            indeces = kwargs.get( 'indeces', None )
+            kwargs[ 'column_types' ] = dataset_source.get_metadata_column_types( indeces=indeces )
+        super( DatasetColumnarDataProvider, self ).__init__( dataset_source, **kwargs )
+
+
+class DatasetDictDataProvider( column.DictDataProvider ):
+    """
+    Data provider that uses a DatasetDataProvider as its source and the
+    dataset's metadata to buuild settings for the DictDataProvider it's
+    inherited from.
+    """
+    def __init__( self, dataset, **kwargs ):
+        """
+        All kwargs are inherited from DictDataProvider.
+        .. seealso:: column.DictDataProvider
+
+        If no kwargs are given, this class will attempt to get those kwargs
+        from the dataset source's metadata.
+        If any kwarg is given, it will override and be used in place of
+        any metadata available.
+
+        The relationship between column_names and indeces is more complex:
+        +-----------------+-------------------------------+-----------------------+
+        |                 | Indeces given                 | Indeces NOT given     |
+        +=================+===============================+=======================+
+        | Names given     | pull indeces, rename w/ names | pull by name          |
+        +=================+-------------------------------+-----------------------+
+        | Names NOT given | pull indeces, name w/ meta    | pull all, name w/meta |
+        +=================+-------------------------------+-----------------------+
+        """
+        dataset_source = DatasetDataProvider( dataset )
+
+        # TODO: getting too complicated - simplify at some lvl, somehow
+        # if no column_types given, get column_types from indeces (or all if indeces == None)
+        indeces = kwargs.get( 'indeces', None )
+        column_names = kwargs.get( 'column_names', None )
+
+        if not indeces and column_names:
+            # pull columns by name
+            indeces = kwargs[ 'indeces' ] = dataset_source.get_indeces_by_column_names( column_names )
+
+        elif indeces and not column_names:
+            # pull using indeces, name with meta
+            column_names = kwargs[ 'column_names' ] = dataset_source.get_metadata_column_names( indeces=indeces )
+
+        elif not indeces and not column_names:
+            # pull all indeces and name using metadata
+            column_names = kwargs[ 'column_names' ] = dataset_source.get_metadata_column_names( indeces=indeces )
+
+        # if no column_types given, use metadata column_types
+        if not kwargs.get( 'column_types', None ):
+            kwargs[ 'column_types' ] = dataset_source.get_metadata_column_types( indeces=indeces )
+
+        super( DatasetDictDataProvider, self ).__init__( dataset_source, **kwargs )
+
+
+# ----------------------------------------------------------------------------- provides a bio-relevant datum
+class GenomicRegionDataProvider( column.ColumnarDataProvider ):
+    """
+    Data provider that parses chromosome, start, and end data from a file
+    using the datasets metadata settings.
+
+    Is a ColumnarDataProvider that uses a DatasetDataProvider as its source.
+
+    If `named_columns` is true, will return dictionaries with the keys
+    'chrom', 'start', 'end'.
+    """
+    # dictionary keys when named_columns=True
+    COLUMN_NAMES = [ 'chrom', 'start', 'end' ]
+    settings = {
+        'chrom_column'  : 'int',
+        'start_column'  : 'int',
+        'end_column'    : 'int',
+        'named_columns' : 'bool',
+    }
+
+    def __init__( self, dataset, chrom_column=None, start_column=None, end_column=None, named_columns=False, **kwargs ):
+        """
+        :param dataset: the Galaxy dataset whose file will be the source
+        :type dataset: model.DatasetInstance
+
+        :param chrom_column: optionally specify the chrom column index
+        :type chrom_column: int
+        :param start_column: optionally specify the start column index
+        :type start_column: int
+        :param end_column: optionally specify the end column index
+        :type end_column: int
+
+        :param named_columns: optionally return dictionaries keying each column
+            with 'chrom', 'start', or 'end'.
+            Optional: defaults to False
+        :type named_columns: bool
+        """
+        # TODO: allow passing in a string format e.g. "{chrom}:{start}-{end}"
+        dataset_source = DatasetDataProvider( dataset )
+
+        if chrom_column is None:
+            chrom_column = dataset_source.get_metadata_column_index_by_name( 'chromCol' )
+        if start_column is None:
+            start_column = dataset_source.get_metadata_column_index_by_name( 'startCol' )
+        if end_column is None:
+            end_column = dataset_source.get_metadata_column_index_by_name( 'endCol' )
+        indeces = [ chrom_column, start_column, end_column ]
+        if not all( map( lambda i: i is not None, indeces ) ):
+            raise ValueError( "Could not determine proper column indeces for" +
+                              " chrom, start, end: %s" % ( str( indeces ) ) )
+        kwargs.update({ 'indeces' : indeces })
+
+        if not kwargs.get( 'column_types', None ):
+            kwargs.update({ 'column_types' : dataset_source.get_metadata_column_types( indeces=indeces ) })
+
+        self.named_columns = named_columns
+        if self.named_columns:
+            self.column_names = self.COLUMN_NAMES
+
+        super( GenomicRegionDataProvider, self ).__init__( dataset_source, **kwargs )
+
+    def __iter__( self ):
+        parent_gen = super( GenomicRegionDataProvider, self ).__iter__()
+        for column_values in parent_gen:
+            if self.named_columns:
+                yield dict( zip( self.column_names, column_values ) )
+            else:
+                yield column_values
+
+
+# TODO: this optionally provides the same data as the above and makes GenomicRegionDataProvider redundant
+#   GenomicRegionDataProvider is a better name, tho
+class IntervalDataProvider( column.ColumnarDataProvider ):
+    """
+    Data provider that parses chromosome, start, and end data (as well as strand
+    and name if set in the metadata) using the dataset's metadata settings.
+
+    If `named_columns` is true, will return dictionaries with the keys
+    'chrom', 'start', 'end' (and 'strand' and 'name' if available).
+    """
+    COLUMN_NAMES = [ 'chrom', 'start', 'end', 'strand', 'name' ]
+    settings = {
+        'chrom_column'  : 'int',
+        'start_column'  : 'int',
+        'end_column'    : 'int',
+        'strand_column' : 'int',
+        'name_column'   : 'int',
+        'named_columns' : 'bool',
+    }
+
+    def __init__( self, dataset, chrom_column=None, start_column=None, end_column=None,
+                  strand_column=None, name_column=None, named_columns=False, **kwargs ):
+        """
+        :param dataset: the Galaxy dataset whose file will be the source
+        :type dataset: model.DatasetInstance
+
+        :param named_columns: optionally return dictionaries keying each column
+            with 'chrom', 'start', 'end', 'strand', or 'name'.
+            Optional: defaults to False
+        :type named_columns: bool
+        """
+        # TODO: allow passing in a string format e.g. "{chrom}:{start}-{end}"
+        dataset_source = DatasetDataProvider( dataset )
+
+        # get genomic indeces and add strand and name
+        self.column_names = []
+        indeces = []
+        # TODO: this is sort of involved and oogly
+        if chrom_column is None:
+            chrom_column = dataset_source.get_metadata_column_index_by_name( 'chromCol' )
+            if chrom_column is not None:
+                self.column_names.append( 'chrom' )
+                indeces.append( chrom_column )
+        if start_column is None:
+            start_column = dataset_source.get_metadata_column_index_by_name( 'startCol' )
+            if start_column is not None:
+                self.column_names.append( 'start' )
+                indeces.append( start_column )
+        if end_column is None:
+            end_column = dataset_source.get_metadata_column_index_by_name( 'endCol' )
+            if end_column is not None:
+                self.column_names.append( 'end' )
+                indeces.append( end_column )
+        if strand_column is None:
+            strand_column = dataset_source.get_metadata_column_index_by_name( 'strandCol' )
+            if strand_column is not None:
+                self.column_names.append( 'strand' )
+                indeces.append( strand_column )
+        if name_column is None:
+            name_column = dataset_source.get_metadata_column_index_by_name( 'nameCol' )
+            if name_column is not None:
+                self.column_names.append( 'name' )
+                indeces.append( name_column )
+
+        kwargs.update({ 'indeces' : indeces })
+        if not kwargs.get( 'column_types', None ):
+            kwargs.update({ 'column_types' : dataset_source.get_metadata_column_types( indeces=indeces ) })
+
+        self.named_columns = named_columns
+
+        super( IntervalDataProvider, self ).__init__( dataset_source, **kwargs )
+
+    def __iter__( self ):
+        parent_gen = super( IntervalDataProvider, self ).__iter__()
+        for column_values in parent_gen:
+            if self.named_columns:
+                yield dict( zip( self.column_names, column_values ) )
+            else:
+                yield column_values
+
+
+# TODO: ideally with these next two - you'd allow pulling some region from the sequence
+#   WITHOUT reading the entire seq into memory - possibly apply some version of limit/offset
+class FastaDataProvider( base.FilteredDataProvider ):
+    """
+    Class that returns fasta format data in a list of maps of the form::
+
+        {
+            id: <fasta header id>,
+            sequence: <joined lines of nucleotide/amino data>
+        }
+    """
+    settings = {
+        'ids'  : 'list:str',
+    }
+
+    def __init__( self, source, ids=None, **kwargs ):
+        """
+        :param ids: optionally return only ids (and sequences) that are in this list.
+            Optional: defaults to None (provide all ids)
+        :type ids: list or None
+        """
+        source = bx_seq.fasta.FastaReader( source )
+        # TODO: validate is a fasta
+        super( FastaDataProvider, self ).__init__( source, **kwargs )
+        self.ids = ids
+        # how to do ids?
+
+    def __iter__( self ):
+        parent_gen = super( FastaDataProvider, self ).__iter__()
+        for fasta_record in parent_gen:
+            yield {
+                'id'  : fasta_record.name,
+                'seq' : fasta_record.text
+            }
+
+
+class TwoBitFastaDataProvider( DatasetDataProvider ):
+    """
+    Class that returns fasta format data in a list of maps of the form::
+
+        {
+            id: <fasta header id>,
+            sequence: <joined lines of nucleotide/amino data>
+        }
+    """
+    settings = {
+        'ids'  : 'list:str',
+    }
+
+    def __init__( self, source, ids=None, **kwargs ):
+        """
+        :param ids: optionally return only ids (and sequences) that are in this list.
+            Optional: defaults to None (provide all ids)
+        :type ids: list or None
+        """
+        source = bx_seq.twobit.TwoBitFile( source )
+        # TODO: validate is a 2bit
+        super( FastaDataProvider, self ).__init__( source, **kwargs )
+        # could do in order provided with twobit
+        self.ids = ids or self.source.keys()
+
+    def __iter__( self ):
+        for id_ in self.ids:
+            yield {
+                'id': id_,
+                'seq': self.source[ id_ ]
+            }
+
+
+# TODO:
+class WiggleDataProvider( base.LimitedOffsetDataProvider ):
+    """
+    Class that returns chrom, pos, data from a wiggle source.
+    """
+    COLUMN_NAMES = [ 'chrom', 'pos', 'value' ]
+    settings = {
+        'named_columns' : 'bool',
+        'column_names'  : 'list:str',
+    }
+
+    def __init__( self, source, named_columns=False, column_names=None, **kwargs ):
+        """
+        :param named_columns: optionally return dictionaries keying each column
+            with 'chrom', 'start', 'end', 'strand', or 'name'.
+            Optional: defaults to False
+        :type named_columns: bool
+
+        :param column_names: an ordered list of strings that will be used as the keys
+            for each column in the returned dictionaries.
+            The number of key, value pairs each returned dictionary has will
+            be as short as the number of column names provided.
+        :type column_names:
+        """
+        # TODO: validate is a wig
+        # still good to maintain a ref to the raw source bc Reader won't
+        self.raw_source = source
+        self.parser = bx_wig.Reader( source )
+        super( WiggleDataProvider, self ).__init__( self.parser, **kwargs )
+
+        self.named_columns = named_columns
+        self.column_names = column_names or self.COLUMN_NAMES
+
+    def __iter__( self ):
+        parent_gen = super( WiggleDataProvider, self ).__iter__()
+        for three_tuple in parent_gen:
+            if self.named_columns:
+                yield dict( zip( self.column_names, three_tuple ) )
+            else:
+                # list is not strictly necessary - but consistent
+                yield list( three_tuple )
+
+
+class BigWigDataProvider( base.LimitedOffsetDataProvider ):
+    """
+    Class that returns chrom, pos, data from a wiggle source.
+    """
+    COLUMN_NAMES = [ 'chrom', 'pos', 'value' ]
+    settings = {
+        'named_columns' : 'bool',
+        'column_names'  : 'list:str',
+    }
+
+    def __init__( self, source, chrom, start, end, named_columns=False, column_names=None, **kwargs ):
+        """
+        :param chrom: which chromosome within the bigbed file to extract data for
+        :type chrom: str
+        :param start: the start of the region from which to extract data
+        :type start: int
+        :param end: the end of the region from which to extract data
+        :type end: int
+
+        :param named_columns: optionally return dictionaries keying each column
+            with 'chrom', 'start', 'end', 'strand', or 'name'.
+            Optional: defaults to False
+        :type named_columns: bool
+
+        :param column_names: an ordered list of strings that will be used as the keys
+            for each column in the returned dictionaries.
+            The number of key, value pairs each returned dictionary has will
+            be as short as the number of column names provided.
+        :type column_names:
+        """
+        raise NotImplementedError( 'Work in progress' )
+        # TODO: validate is a wig
+        # still good to maintain a ref to the raw source bc Reader won't
+        self.raw_source = source
+        self.parser = bx_bbi.bigwig_file.BigWigFile( source )
+        super( BigWigDataProvider, self ).__init__( self.parser, **kwargs )
+
+        self.named_columns = named_columns
+        self.column_names = column_names or self.COLUMN_NAMES
+
+    def __iter__( self ):
+        parent_gen = super( BigWigDataProvider, self ).__iter__()
+        for three_tuple in parent_gen:
+            if self.named_columns:
+                yield dict( zip( self.column_names, three_tuple ) )
+            else:
+                # list is not strictly necessary - but consistent
+                yield list( three_tuple )
+
+
+# ----------------------------------------------------------------------------- binary, external conversion or tool
+class DatasetSubprocessDataProvider( external.SubprocessDataProvider ):
+    """
+    Create a source from running a subprocess on a dataset's file.
+
+    Uses a subprocess as its source and has a dataset (gen. as an input file
+    for the process).
+    """
+    # TODO: below should be a subclass of this and not RegexSubprocess
+    def __init__( self, dataset, *args, **kwargs ):
+        """
+        :param args: the list of strings used to build commands.
+        :type args: variadic function args
+        """
+        raise NotImplementedError( 'Abstract class' )
+        super( DatasetSubprocessDataProvider, self ).__init__( *args, **kwargs )
+        self.dataset = dataset
+
+
+class SamtoolsDataProvider( line.RegexLineDataProvider ):
+    """
+    Data provider that uses samtools on a Sam or Bam file as its source.
+
+    This can be piped through other providers (column, map, genome region, etc.).
+
+    .. note:: that only the samtools 'view' command is currently implemented.
+    """
+    FLAGS_WO_ARGS = 'bhHSu1xXcB'
+    FLAGS_W_ARGS = 'fFqlrs'
+    VALID_FLAGS = FLAGS_WO_ARGS + FLAGS_W_ARGS
+
+    def __init__( self, dataset, options_string='', options_dict=None, regions=None, **kwargs ):
+        """
+        :param options_string: samtools options in string form (flags separated
+            by spaces)
+            Optional: defaults to ''
+        :type options_string: str
+        :param options_dict: dictionary of samtools options
+            Optional: defaults to None
+        :type options_dict: dict or None
+        :param regions: list of samtools regions strings
+            Optional: defaults to None
+        :type regions: list of str or None
+        """
+        # TODO: into validate_source
+
+        # precondition: dataset.datatype is a tabular.Sam or binary.Bam
+        self.dataset = dataset
+
+        options_dict = options_dict or {}
+        # ensure regions are strings
+        regions = [ str( r ) for r in regions ] if regions else []
+
+        # TODO: view only for now
+        # TODO: not properly using overriding super's validate_opts, command here
+        subcommand = 'view'
+        # TODO:?? do we need a path to samtools?
+        subproc_args = self.build_command_list( subcommand, options_string, options_dict, regions )
+# TODO: the composition/inheritance here doesn't make a lot sense
+        subproc_provider = external.SubprocessDataProvider( *subproc_args )
+        super( SamtoolsDataProvider, self ).__init__( subproc_provider, **kwargs )
+
+    def build_command_list( self, subcommand, options_string, options_dict, regions ):
+        """
+        Convert all init args to list form.
+        """
+        command = [ 'samtools', subcommand ]
+        # add options and switches, input file, regions list (if any)
+        command.extend( self.to_options_list( options_string, options_dict ) )
+        command.append( self.dataset.file_name )
+        command.extend( regions )
+        return command
+
+    def to_options_list( self, options_string, options_dict ):
+        """
+        Convert both options_string and options_dict to list form
+        while filtering out non-'valid' options.
+        """
+        opt_list = []
+
+        # strip out any user supplied bash switch formating -> string of option chars
+        #   then compress to single option string of unique, VALID flags with prefixed bash switch char '-'
+        options_string = options_string.strip( '- ' )
+        validated_flag_list = set([ flag for flag in options_string if flag in self.FLAGS_WO_ARGS ])
+
+        # if sam add -S
+        # TODO: not the best test in the world...
+        if( ( self.dataset.ext == 'sam' ) and
+                ( 'S' not in validated_flag_list ) ):
+            validated_flag_list.append( 'S' )
+
+        if validated_flag_list:
+            opt_list.append( '-' + ''.join( validated_flag_list ) )
+
+        for flag, arg in options_dict.items():
+            if flag in self.FLAGS_W_ARGS:
+                opt_list.extend([ '-' + flag, str( arg ) ])
+
+        return opt_list
+
+    @classmethod
+    def extract_options_from_dict( cls, dictionary ):
+        """
+        Separrates valid samtools key/value pair options from a dictionary and
+        returns both as a 2-tuple.
+        """
+        # handy for extracting options from kwargs - but otherwise...
+        # TODO: could be abstracted to util.extract( dict, valid_keys_list )
+        options_dict = {}
+        new_kwargs = {}
+        for key, value in dictionary.items():
+            if key in cls.FLAGS_W_ARGS:
+                options_dict[ key ] = value
+            else:
+                new_kwargs[ key ] = value
+        return options_dict, new_kwargs
+
+
+class BcftoolsDataProvider( line.RegexLineDataProvider ):
+    """
+    Data provider that uses an bcftools on a bcf (or vcf?) file as its source.
+
+    This can be piped through other providers (column, map, genome region, etc.).
+    """
+    def __init__( self, dataset, **kwargs ):
+        # TODO: as samtools
+        raise NotImplementedError()
+        super( BcftoolsDataProvider, self ).__init__( dataset, **kwargs )
+
+
+class BGzipTabixDataProvider( base.DataProvider ):
+    """
+    Data provider that uses an g(un)zip on a file as its source.
+
+    This can be piped through other providers (column, map, genome region, etc.).
+    """
+    def __init__( self, dataset, **kwargs ):
+        # TODO: as samtools - need more info on output format
+        raise NotImplementedError()
+        super( BGzipTabixDataProvider, self ).__init__( dataset, **kwargs )
+
+
+class SQliteDataProvider( base.DataProvider ):
+    """
+    Data provider that uses a sqlite database file as its source.
+
+    Allows any query to be run and returns the resulting rows as sqlite3 row objects
+    """
+    settings = {
+        'query': 'str'
+    }
+
+    def __init__( self, source, query=None, **kwargs ):
+        self.query = query
+        self.connection = sqlite.connect(source.dataset.file_name)
+        super( SQliteDataProvider, self ).__init__( source, **kwargs )
+
+    def __iter__( self ):
+        if (self.query is not None) and sqlite.is_read_only_query(self.query):
+            for row in self.connection.cursor().execute(self.query):
+                yield row
+        else:
+            yield
+
+
+class SQliteDataTableProvider( base.DataProvider ):
+    """
+    Data provider that uses a sqlite database file as its source.
+    Allows any query to be run and returns the resulting rows as arrays of arrays
+    """
+    settings = {
+        'query': 'str',
+        'headers': 'bool',
+        'limit': 'int'
+    }
+
+    def __init__( self, source, query=None, headers=False, limit=sys.maxsize, **kwargs ):
+        self.query = query
+        self.headers = headers
+        self.limit = limit
+        self.connection = sqlite.connect(source.dataset.file_name)
+        super( SQliteDataTableProvider, self ).__init__( source, **kwargs )
+
+    def __iter__( self ):
+        if (self.query is not None) and sqlite.is_read_only_query(self.query):
+            cur = self.connection.cursor()
+            results = cur.execute(self.query)
+            if self.headers:
+                yield [col[0] for col in cur.description]
+            for i, row in enumerate(results):
+                if i >= self.limit:
+                    break
+                yield [val for val in row]
+        else:
+            yield
+
+
+class SQliteDataDictProvider( base.DataProvider ):
+    """
+    Data provider that uses a sqlite database file as its source.
+    Allows any query to be run and returns the resulting rows as arrays of dicts
+    """
+    settings = {
+        'query': 'str'
+    }
+
+    def __init__( self, source, query=None, **kwargs ):
+        self.query = query
+        self.connection = sqlite.connect(source.dataset.file_name)
+        super( SQliteDataDictProvider, self ).__init__( source, **kwargs )
+
+    def __iter__( self ):
+        if (self.query is not None) and sqlite.is_read_only_query(self.query):
+            cur = self.connection.cursor()
+            for row in cur.execute(self.query):
+                yield [dict((cur.description[i][0], value) for i, value in enumerate(row))]
+        else:
+            yield
diff --git a/lib/galaxy/datatypes/dataproviders/decorators.py b/lib/galaxy/datatypes/dataproviders/decorators.py
new file mode 100644
index 0000000..99e01e1
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/decorators.py
@@ -0,0 +1,152 @@
+"""
+DataProvider related decorators.
+"""
+
+# I'd like to decorate the factory methods that give data_providers by the name they can be accessed from. e.g.:
+# @provides( 'id_seq' ) # where 'id_seq' is some 'data_format' string/alias
+# def get_id_seq_provider( dataset, **settings ):
+
+# then in some central dispatch (maybe data.Data), have it look up the proper method by the data_format string
+
+# also it would be good to have this decorator maintain a list of available providers (for a datatype)
+
+# i don't particularly want to cut up method names ( get_([\w_]*)_provider )
+# adapted from: http://stackoverflow.com
+#    /questions/14095616/python-can-i-programmatically-decorate-class-methods-from-a-class-instance
+
+from functools import wraps
+import urllib2
+import copy
+
+import logging
+log = logging.getLogger( __name__ )
+
+_DATAPROVIDER_CLASS_MAP_KEY = 'dataproviders'
+_DATAPROVIDER_METHOD_NAME_KEY = '_dataprovider_name'
+
+
+def has_dataproviders( cls ):
+    """
+    Wraps a class (generally a Datatype), finds methods within that have been
+    decorated with `@dataprovider` and adds them, by their name, to a map
+    in the class.
+
+    This allows a class to maintain a name -> method map, effectively
+    'registering' dataprovider factory methods::
+
+        @has_dataproviders
+        class MyDtype( data.Data ):
+
+            @dataprovider_factory( 'bler' )
+            def provide_some_bler( self, dataset, **settings ):
+                '''blerblerbler'''
+                dataset_source = providers.DatasetDataProvider( dataset )
+                # ... chain other, intermidiate providers here
+                return providers.BlerDataProvider( dataset_source, **settings )
+
+        # use the base method in data.Data
+        provider = dataset.datatype.dataprovider( dataset, 'bler',
+                                                  my_setting='blah', ... )
+        # OR directly from the map
+        provider = dataset.datatype.dataproviders[ 'bler' ]( dataset,
+                                                             my_setting='blah', ... )
+    """
+    # init the class dataproviders map if necc.
+    if not hasattr( cls, _DATAPROVIDER_CLASS_MAP_KEY ):
+        setattr( cls, _DATAPROVIDER_CLASS_MAP_KEY, {} )
+    else:
+        # need to deepcopy or subclasses will modify super.dataproviders as well
+        existing_dataproviders = getattr( cls, _DATAPROVIDER_CLASS_MAP_KEY )
+        copied_dataproviders = copy.deepcopy( existing_dataproviders )
+        setattr( cls, _DATAPROVIDER_CLASS_MAP_KEY, copied_dataproviders )
+
+    dataproviders = getattr( cls, _DATAPROVIDER_CLASS_MAP_KEY )
+
+    # scan for methods with dataprovider names and add them to the map
+    # note: this has a 'cascading' effect
+    #       where it's possible to override a super's provider with a sub's
+    for attr_key, attr_value in cls.__dict__.iteritems():
+        # can't use isinstance( attr_value, MethodType ) bc of wrapping
+        if( ( callable( attr_value ) ) and
+                ( not attr_key.startswith( "__" ) ) and
+                ( getattr( attr_value, _DATAPROVIDER_METHOD_NAME_KEY, None ) ) ):
+            name = getattr( attr_value, _DATAPROVIDER_METHOD_NAME_KEY )
+            dataproviders[ name ] = attr_value
+    return cls
+
+
+def dataprovider_factory( name, settings=None ):
+    """
+    Wraps a class method and marks it as a dataprovider factory and creates a
+    function to parse query strings to __init__ arguments as the
+    `parse_query_string_settings` attribute of the factory function.
+
+    An example use of the `parse_query_string_settings`::
+
+        kwargs = dataset.datatype.dataproviders[ provider ].parse_query_string_settings( query_kwargs )
+        return list( dataset.datatype.dataprovider( dataset, provider, **kwargs ) )
+
+    :param name: what name/key to register the factory under in `cls.dataproviders`
+    :type name: any hashable var
+    :param settings: dictionary containing key/type pairs for parsing query strings
+        to __init__ arguments
+    :type settings: dictionary
+    """
+    # TODO:?? use *args for settings allowing mulitple dictionaries
+    # make a function available through the name->provider dispatch to parse query strings
+    #   callable like:
+    # settings_dict = dataproviders[ provider_name ].parse_query_string_settings( query_kwargs )
+    # TODO: ugh - overly complicated but the best I could think of
+    def parse_query_string_settings( query_kwargs ):
+        return _parse_query_string_settings( query_kwargs, settings )
+
+    def named_dataprovider_factory( func ):
+        setattr( func, _DATAPROVIDER_METHOD_NAME_KEY, name )
+
+        setattr( func, 'parse_query_string_settings', parse_query_string_settings )
+        setattr( func, 'settings', settings )
+        # TODO: I want a way to inherit settings from the previous provider( this_name ) instead of defining over and over
+
+        @wraps( func )
+        def wrapped_dataprovider_factory( self, *args, **kwargs ):
+            return func( self, *args, **kwargs )
+        return wrapped_dataprovider_factory
+    return named_dataprovider_factory
+
+
+def _parse_query_string_settings( query_kwargs, settings=None ):
+    """
+    Parse the values in `query_kwargs` from strings to the proper types
+    listed in the same key in `settings`.
+    """
+    # TODO: this was a relatively late addition: review and re-think
+    def list_from_query_string( s ):
+        # assume csv
+        return s.split( ',' )
+
+    parsers = {
+        'int'   : int,
+        'float' : float,
+        'bool'  : bool,
+        'list:str'      : lambda s: list_from_query_string( s ),
+        'list:escaped'  : lambda s: [ urllib2.unquote( e ) for e in list_from_query_string( s ) ],
+        'list:int'      : lambda s: [ int( i ) for i in list_from_query_string( s ) ],
+    }
+    settings = settings or {}
+    # yay! yet another set of query string parsers! <-- sarcasm
+    # work through the keys in settings finding matching keys in query_kwargs
+    #   if found in both, get the expected/needed type from settings and store the new parsed value
+    #   if we can't parse it (no parser, bad value), delete the key from query_kwargs so the provider will use the defaults
+    for key in settings:
+        if key in query_kwargs:
+            # TODO: this would be the place to sanitize any strings
+            query_value = query_kwargs[ key ]
+            needed_type = settings[ key ]
+            if needed_type != 'str':
+                try:
+                    query_kwargs[ key ] = parsers[ needed_type ]( query_value )
+                except ( KeyError, ValueError ):
+                    del query_kwargs[ key ]
+
+        # TODO:?? do we want to remove query_kwarg entries NOT in settings?
+    return query_kwargs
diff --git a/lib/galaxy/datatypes/dataproviders/exceptions.py b/lib/galaxy/datatypes/dataproviders/exceptions.py
new file mode 100644
index 0000000..c9e8e1d
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/exceptions.py
@@ -0,0 +1,34 @@
+"""
+DataProvider related exceptions.
+"""
+
+
+class InvalidDataProviderSource( TypeError ):
+    """
+    Raised when a unusable source is passed to a provider.
+    """
+    def __init__( self, source=None, msg='' ):
+        msg = msg or 'Invalid source for provider: %s' % ( source )
+        super( InvalidDataProviderSource, self ).__init__( msg )
+
+
+class NoProviderAvailable( TypeError ):
+    """
+    Raised when no provider is found for the given `format_requested`.
+
+    :param factory_source:      the item that the provider was requested from
+    :param format_requested:    the format_requested (a hashable key to access
+        `factory_source.datatypes` with)
+
+    Both params are attached to this class and accessible to the try-catch
+    receiver.
+
+    Meant to be used within a class that builds dataproviders (e.g. a Datatype)
+    """
+    def __init__( self, factory_source, format_requested=None, msg='' ):
+        self.factory_source = factory_source
+        self.format_requested = format_requested
+        msg = msg or 'No provider available in factory_source "%s" for format requested' % ( str( factory_source ) )
+        if self.format_requested:
+            msg += ': "%s"' % ( self.format_requested )
+        super( NoProviderAvailable, self ).__init__( msg )
diff --git a/lib/galaxy/datatypes/dataproviders/external.py b/lib/galaxy/datatypes/dataproviders/external.py
new file mode 100644
index 0000000..4d5eac4
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/external.py
@@ -0,0 +1,163 @@
+"""
+Data providers that iterate over a source that is not in memory
+or not in a file.
+"""
+
+import base
+import gzip
+import line
+import subprocess
+import tempfile
+import urllib
+import urllib2
+
+_TODO = """
+YAGNI: ftp, image, cryptos, sockets
+job queue
+admin: admin server log rgx/stats, ps aux
+"""
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# ----------------------------------------------------------------------------- server subprocess / external prog
+class SubprocessDataProvider( base.DataProvider ):
+    """
+    Data provider that uses the output from an intermediate program and
+    subprocess as its data source.
+    """
+    # TODO: need better ways of checking returncode, stderr for errors and raising
+    def __init__( self, *args, **kwargs ):
+        """
+        :param args: the list of strings used to build commands.
+        :type args: variadic function args
+        """
+        self.exit_code = None
+        command_list = args
+        self.popen = self.subprocess( *command_list, **kwargs )
+        # TODO:?? not communicate()?
+        super( SubprocessDataProvider, self ).__init__( self.popen.stdout )
+        self.exit_code = self.popen.poll()
+
+    # NOTE: there's little protection here v. sending a ';' and a dangerous command here
+    # but...we're all adults here, right? ...RIGHT?!
+    def subprocess( self, *command_list, **kwargs ):
+        """
+        :param args: the list of strings used as commands.
+        :type args: variadic function args
+        """
+        try:
+            # how expensive is this?
+            popen = subprocess.Popen( command_list, stderr=subprocess.PIPE, stdout=subprocess.PIPE )
+            log.info( 'opened subrocess (%s), PID: %s' % ( str( command_list ), str( popen.pid ) ) )
+
+        except OSError as os_err:
+            command_str = ' '.join( self.command )
+            raise OSError( ' '.join([ str( os_err ), ':', command_str ]) )
+
+        return popen
+
+    def __exit__( self, *args ):
+        # poll the subrocess for an exit code
+        self.exit_code = self.popen.poll()
+        log.info( '%s.__exit__, exit_code: %s' % ( str( self ), str( self.exit_code ) ) )
+        return super( SubprocessDataProvider, self ).__exit__( *args )
+
+    def __str__( self ):
+        # provide the pid and current return code
+        source_str = ''
+        if hasattr( self, 'popen' ):
+            source_str = '%s:%s' % ( str( self.popen.pid ), str( self.popen.poll() ) )
+        return '%s(%s)' % ( self.__class__.__name__, str( source_str ) )
+
+
+class RegexSubprocessDataProvider( line.RegexLineDataProvider ):
+    """
+    RegexLineDataProvider that uses a SubprocessDataProvider as its data source.
+    """
+    # this is a conv. class and not really all that necc...
+    def __init__( self, *args, **kwargs ):
+        # using subprocess as proxy data source in filtered line prov.
+        subproc_provider = SubprocessDataProvider( *args )
+        super( RegexSubprocessDataProvider, self ).__init__( subproc_provider, **kwargs )
+
+
+# ----------------------------------------------------------------------------- other apis
+class URLDataProvider( base.DataProvider ):
+    """
+    Data provider that uses the contents of a URL for its data source.
+
+    This can be piped through other providers (column, map, genome region, etc.).
+    """
+    VALID_METHODS = ( 'GET', 'POST' )
+
+    def __init__( self, url, method='GET', data=None, **kwargs ):
+        """
+        :param url: the base URL to open.
+        :param method: the HTTP method to use.
+            Optional: defaults to 'GET'
+        :param data: any data to pass (either in query for 'GET'
+            or as post data with 'POST')
+        :type data: dict
+        """
+        self.url = url
+        self.method = method
+
+        self.data = data or {}
+        encoded_data = urllib.urlencode( self.data )
+
+        if method == 'GET':
+            self.url += '?%s' % ( encoded_data )
+            opened = urllib2.urlopen( url )
+        elif method == 'POST':
+            opened = urllib2.urlopen( url, encoded_data )
+        else:
+            raise ValueError( 'Not a valid method: %s' % ( method ) )
+
+        super( URLDataProvider, self ).__init__( opened, **kwargs )
+        # NOTE: the request object is now accessible as self.source
+
+    def __enter__( self ):
+        pass
+
+    def __exit__( self, *args ):
+        self.source.close()
+
+
+# ----------------------------------------------------------------------------- generic compression
+class GzipDataProvider( base.DataProvider ):
+    """
+    Data provider that uses g(un)zip on a file as its source.
+
+    This can be piped through other providers (column, map, genome region, etc.).
+    """
+    def __init__( self, source, **kwargs ):
+        unzipped = gzip.GzipFile( source, 'rb' )
+        super( GzipDataProvider, self ).__init__( unzipped, **kwargs )
+        # NOTE: the GzipFile is now accessible in self.source
+
+
+# ----------------------------------------------------------------------------- intermediate tempfile
+class TempfileDataProvider( base.DataProvider ):
+    """
+    Writes the data from the given source to a temp file, allowing
+    it to be used as a source where a file_name is needed (e.g. as a parameter
+    to a command line tool: samtools view -t <this_provider.source.file_name>)
+    """
+    def __init__( self, source, **kwargs ):
+        # TODO:
+        raise NotImplementedError()
+        # write the file here
+        self.create_file
+        super( TempfileDataProvider, self ).__init__( self.tmp_file, **kwargs )
+
+    def create_file( self ):
+        self.tmp_file = tempfile.NamedTemporaryFile()
+        return self.tmp_file
+
+    def write_to_file( self ):
+        parent_gen = super( TempfileDataProvider, self ).__iter__()
+        with open( self.tmp_file, 'w' ) as open_file:
+            for datum in parent_gen:
+                open_file.write( datum + '\n' )
diff --git a/lib/galaxy/datatypes/dataproviders/hierarchy.py b/lib/galaxy/datatypes/dataproviders/hierarchy.py
new file mode 100644
index 0000000..3d75f17
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/hierarchy.py
@@ -0,0 +1,138 @@
+"""
+Dataproviders that iterate over lines from their sources.
+"""
+
+import line
+from xml.etree.ElementTree import Element, iterparse
+
+_TODO = """
+"""
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# ----------------------------------------------------------------------------- hierarchal/tree data providers
+class HierarchalDataProvider( line.BlockDataProvider ):
+    """
+    Class that uses formats where a datum may have a parent or children
+    data.
+
+    e.g. XML, HTML, GFF3, Phylogenetic
+    """
+    def __init__( self, source, **kwargs ):
+        # TODO: (and defer to better (than I can write) parsers for each subtype)
+        super( HierarchalDataProvider, self ).__init__( source, **kwargs )
+
+
+# ----------------------------------------------------------------------------- xml
+class XMLDataProvider( HierarchalDataProvider ):
+    """
+    Data provider that converts selected XML elements to dictionaries.
+    """
+    # using xml.etree's iterparse method to keep mem down
+    # TODO:   this, however (AFAIK), prevents the use of xpath
+    settings = {
+        'selector'  : 'str',  # urlencoded
+        'max_depth' : 'int',
+    }
+    ITERPARSE_ALL_EVENTS = ( 'start', 'end', 'start-ns', 'end-ns' )
+    # TODO: move appropo into super
+
+    def __init__( self, source, selector=None, max_depth=None, **kwargs ):
+        """
+        :param  selector:   some partial string in the desired tags to return
+        :param  max_depth:  the number of generations of descendents to return
+        """
+        self.selector = selector
+        self.max_depth = max_depth
+        self.namespaces = {}
+
+        super( XMLDataProvider, self ).__init__( source, **kwargs )
+
+    def matches_selector( self, element, selector=None ):
+        """
+        Returns true if the ``element`` matches the ``selector``.
+
+        :param  element:    an XML ``Element``
+        :param  selector:   some partial string in the desired tags to return
+
+        Change point for more sophisticated selectors.
+        """
+        # search for partial match of selector to the element tag
+        # TODO: add more flexibility here w/o re-implementing xpath
+        # TODO: fails with '#' - browser thinks it's an anchor - use urlencode
+        # TODO: need removal/replacement of etree namespacing here - then move to string match
+        return bool( ( selector is None ) or
+                     ( isinstance( element, Element ) and selector in element.tag ) )
+
+    def element_as_dict( self, element ):
+        """
+        Converts an XML element (its text, tag, and attributes) to dictionary form.
+
+        :param  element:    an XML ``Element``
+        """
+        # TODO: Key collision is unlikely here, but still should be better handled
+        return {
+            'tag'      : element.tag,
+            'text'     : element.text.strip() if element.text else None,
+            # needs shallow copy to protect v. element.clear()
+            'attrib'   : dict( element.attrib )
+        }
+
+    def get_children( self, element, max_depth=None ):
+        """
+        Yield all children of element (and their children - recursively)
+        in dictionary form.
+        :param  element:    an XML ``Element``
+        :param  max_depth:  the number of generations of descendents to return
+        """
+        if not isinstance( max_depth, int ) or max_depth >= 1:
+            for child in element.getchildren():
+                child_data = self.element_as_dict( child )
+
+                next_depth = max_depth - 1 if isinstance( max_depth, int ) else None
+                grand_children = list( self.get_children( child, next_depth ) )
+                if grand_children:
+                    child_data[ 'children' ] = grand_children
+
+                yield child_data
+
+    def __iter__( self ):
+        context = iterparse( self.source, events=self.ITERPARSE_ALL_EVENTS )
+        context = iter( context )
+
+        selected_element = None
+        for event, element in context:
+            if event == 'start-ns':
+                ns, uri = element
+                self.namespaces[ ns ] = uri
+
+            elif event == 'start':
+                if( ( selected_element is None ) and
+                        ( self.matches_selector( element, self.selector ) ) ):
+                    # start tag of selected element - wait for 'end' to emit/yield
+                    selected_element = element
+
+            elif event == 'end':
+                if( ( selected_element is not None ) and ( element == selected_element ) ):
+                    self.num_valid_data_read += 1
+
+                    # offset
+                    if self.num_valid_data_read > self.offset:
+                        # convert to dict and yield
+                        selected_element_dict = self.element_as_dict( selected_element )
+                        children = list( self.get_children( selected_element, self.max_depth ) )
+                        if children:
+                            selected_element_dict[ 'children' ] = children
+                        yield selected_element_dict
+
+                        # limit
+                        self.num_data_returned += 1
+                        if self.limit is not None and self.num_data_returned >= self.limit:
+                            break
+
+                    selected_element.clear()
+                    selected_element = None
+
+                self.num_data_read += 1
diff --git a/lib/galaxy/datatypes/dataproviders/line.py b/lib/galaxy/datatypes/dataproviders/line.py
new file mode 100644
index 0000000..0809773
--- /dev/null
+++ b/lib/galaxy/datatypes/dataproviders/line.py
@@ -0,0 +1,270 @@
+"""
+Dataproviders that iterate over lines from their sources.
+"""
+
+import collections
+import re
+from . import base
+
+import logging
+log = logging.getLogger( __name__ )
+
+_TODO = """
+line offsets (skip to some place in a file) needs to work more efficiently than simply iterating till we're there
+    capture tell() when provider is done
+        def stop( self ): self.endpoint = source.tell(); raise StopIteration()
+a lot of the hierarchy here could be flattened since we're implementing pipes
+"""
+
+
+class FilteredLineDataProvider( base.LimitedOffsetDataProvider ):
+    """
+    Data provider that yields lines of data from its source allowing
+    optional control over which line to start on and how many lines
+    to return.
+    """
+    DEFAULT_COMMENT_CHAR = '#'
+    settings = {
+        'strip_lines'   : 'bool',
+        'strip_newlines': 'bool',
+        'provide_blank' : 'bool',
+        'comment_char'  : 'str',
+    }
+
+    def __init__( self, source, strip_lines=True, strip_newlines=False, provide_blank=False,
+                  comment_char=DEFAULT_COMMENT_CHAR, **kwargs ):
+        """
+        :param strip_lines: remove whitespace from the beginning an ending
+            of each line (or not).
+            Optional: defaults to True
+        :type strip_lines: bool
+
+        :param strip_newlines: remove newlines only
+            (only functions when ``strip_lines`` is false)
+            Optional: defaults to False
+        :type strip_lines: bool
+
+        :param provide_blank: are empty lines considered valid and provided?
+            Optional: defaults to False
+        :type provide_blank: bool
+
+        :param comment_char: character(s) that indicate a line isn't data (a comment)
+            and should not be provided.
+            Optional: defaults to '#'
+        :type comment_char: str
+        """
+        super( FilteredLineDataProvider, self ).__init__( source, **kwargs )
+        self.strip_lines = strip_lines
+        self.strip_newlines = strip_newlines
+        self.provide_blank = provide_blank
+        self.comment_char = comment_char
+
+    def filter( self, line ):
+        """
+        Determines whether to provide line or not.
+
+        :param line: the incoming line from the source
+        :type line: str
+        :returns: a line or `None`
+        """
+        if line is not None:
+            # ??: shouldn't it strip newlines regardless, if not why not use on of the base.dprovs
+            if self.strip_lines:
+                line = line.strip()
+            elif self.strip_newlines:
+                line = line.strip( '\n' )
+            if not self.provide_blank and line == '':
+                return None
+            elif self.comment_char and line.startswith( self.comment_char ):
+                return None
+
+        return super( FilteredLineDataProvider, self ).filter( line )
+
+
+class RegexLineDataProvider( FilteredLineDataProvider ):
+    """
+    Data provider that yields only those lines of data from its source
+    that do (or do not when `invert` is True) match one or more of the given list
+    of regexs.
+
+    .. note:: the regex matches are effectively OR'd (if **any** regex matches
+        the line it is considered valid and will be provided).
+    """
+    settings = {
+        'regex_list'    : 'list:escaped',
+        'invert'        : 'bool',
+    }
+
+    def __init__( self, source, regex_list=None, invert=False, **kwargs ):
+        """
+        :param regex_list: list of strings or regular expression strings that will
+            be `match`ed to each line
+            Optional: defaults to `None` (no matching)
+        :type regex_list: list (of str)
+
+        :param invert: if `True` will provide only lines that **do not match**.
+            Optional: defaults to False
+        :type invert: bool
+        """
+        super( RegexLineDataProvider, self ).__init__( source, **kwargs )
+
+        self.regex_list = regex_list if isinstance( regex_list, list ) else []
+        self.compiled_regex_list = [ re.compile( regex ) for regex in self.regex_list ]
+        self.invert = invert
+        # NOTE: no support for flags
+
+    def filter( self, line ):
+        # NOTE: filter_fn will occur BEFORE any matching
+        line = super( RegexLineDataProvider, self ).filter( line )
+        if line is not None and self.compiled_regex_list:
+            line = self.filter_by_regex( line )
+        return line
+
+    def filter_by_regex( self, line ):
+        matches = any([ regex.match( line ) for regex in self.compiled_regex_list ])
+        if self.invert:
+            return line if not matches else None
+        return line if matches else None
+
+
+# ============================================================================= MICELLAINEOUS OR UNIMPLEMENTED
+# ----------------------------------------------------------------------------- block data providers
+class BlockDataProvider( base.LimitedOffsetDataProvider ):
+    """
+    Class that uses formats where multiple lines combine to describe a single
+    datum. The data output will be a list of either map/dicts or sub-arrays.
+
+    Uses FilteredLineDataProvider as its source (kwargs **not** passed).
+
+    e.g. Fasta, GenBank, MAF, hg log
+    Note: mem intensive (gathers list of lines before output)
+    """
+    def __init__( self, source, new_block_delim_fn=None, block_filter_fn=None, **kwargs ):
+        """
+        :param new_block_delim_fn: T/F function to determine whether a given line
+            is the start of a new block.
+        :type new_block_delim_fn: function
+
+        :param block_filter_fn: function that determines if a block is valid and
+            will be provided.
+            Optional: defaults to `None` (no filtering)
+        :type block_filter_fn: function
+        """
+        # composition - not inheritance
+        # TODO: not a fan of this:
+        ( filter_fn, limit, offset ) = ( kwargs.pop( 'filter_fn', None ),
+                                         kwargs.pop( 'limit', None ), kwargs.pop( 'offset', 0 ) )
+        line_provider = FilteredLineDataProvider( source, **kwargs )
+        super( BlockDataProvider, self ).__init__( line_provider, filter_fn=filter_fn, limit=limit, offset=offset )
+
+        self.new_block_delim_fn = new_block_delim_fn
+        self.block_filter_fn = block_filter_fn
+        self.init_new_block()
+
+    def init_new_block( self ):
+        """
+        Set up internal data for next block.
+        """
+        # called in __init__ and after yielding the prev. block
+        self.block_lines = collections.deque([])
+
+    def __iter__( self ):
+        """
+        Overridden to provide last block.
+        """
+        parent_gen = super( BlockDataProvider, self ).__iter__()
+        for block in parent_gen:
+            yield block
+
+        last_block = self.handle_last_block()
+        if last_block is not None:
+            self.num_data_returned += 1
+            yield last_block
+
+    def filter( self, line ):
+        """
+        Line filter here being used to aggregate/assemble lines into a block
+        and determine whether the line indicates a new block.
+
+        :param line: the incoming line from the source
+        :type line: str
+        :returns: a block or `None`
+        """
+        line = super( BlockDataProvider, self ).filter( line )
+        # TODO: HACK
+        self.num_data_read -= 1
+        if line is None:
+            return None
+
+        block_to_return = None
+        if self.is_new_block( line ):
+            # if we're already in a block, return the prev. block and add the line to a new block
+            if self.block_lines:
+                block_to_return = self.assemble_current_block()
+                block_to_return = self.filter_block( block_to_return )
+                self.num_data_read += 1
+
+                self.init_new_block()
+
+        self.add_line_to_block( line )
+        return block_to_return
+
+    def is_new_block( self, line ):
+        """
+        Returns True if the given line indicates the start of a new block
+        (and the current block should be provided) or False if not.
+        """
+        if self.new_block_delim_fn:
+            return self.new_block_delim_fn( line )
+        return True
+
+    # NOTE:
+    #   some formats have one block attr per line
+    #   some formats rely on having access to multiple lines to make sensible data
+    # So, building the block from the lines can happen in either:
+    #   add_line_to_block AND/OR assemble_current_block
+    def add_line_to_block( self, line ):
+        """
+        Integrate the given line into the current block.
+
+        Called per line.
+        """
+        # here either:
+        #   consume the line (using it to add attrs to self.block)
+        #   save the line (appending to self.block_lines) for use in assemble_current_block
+        self.block_lines.append( line )
+
+    def assemble_current_block( self ):
+        """
+        Build the current data into a block.
+
+        Called per block (just before providing).
+        """
+        # empty block_lines and assemble block
+        return list( ( self.block_lines.popleft() for i in range( len( self.block_lines ) ) ) )
+
+    def filter_block( self, block ):
+        """
+        Is the current block a valid/desired datum.
+
+        Called per block (just before providing).
+        """
+        if self.block_filter_fn:
+            return self.block_filter_fn( block )
+        return block
+
+    def handle_last_block( self ):
+        """
+        Handle any blocks remaining after the main loop.
+        """
+        if self.limit is not None and self.num_data_returned >= self.limit:
+            return None
+
+        last_block = self.assemble_current_block()
+        self.num_data_read += 1
+
+        last_block = self.filter_block( last_block )
+        if last_block is not None:
+            self.num_valid_data_read += 1
+
+        return last_block
diff --git a/lib/galaxy/datatypes/display_applications/__init__.py b/lib/galaxy/datatypes/display_applications/__init__.py
new file mode 100644
index 0000000..61eaed1
--- /dev/null
+++ b/lib/galaxy/datatypes/display_applications/__init__.py
@@ -0,0 +1,3 @@
+"""
+Contains functionality of the newer XML defined external display applications (not hardcoded into datatype classes).
+"""
diff --git a/lib/galaxy/datatypes/display_applications/application.py b/lib/galaxy/datatypes/display_applications/application.py
new file mode 100644
index 0000000..a8e75e9
--- /dev/null
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -0,0 +1,345 @@
+# Contains objects for using external display applications
+import logging
+import urllib
+from six import string_types
+from urllib import quote_plus
+from copy import deepcopy
+
+from galaxy.util import parse_xml, string_as_bool
+from galaxy.util.odict import odict
+from galaxy.util.template import fill_template
+from galaxy.web import url_for
+from parameters import DisplayApplicationParameter, DisplayApplicationDataParameter, DEFAULT_DATASET_NAME
+from util import encode_dataset_user
+
+log = logging.getLogger( __name__ )
+
+# Any basic functions that we want to provide as a basic part of parameter dict should be added to this dict
+BASE_PARAMS = { 'qp': quote_plus, 'url_for': url_for }
+
+
+class DisplayApplicationLink( object ):
+    @classmethod
+    def from_elem( cls, elem, display_application, other_values=None ):
+        rval = DisplayApplicationLink( display_application )
+        rval.id = elem.get( 'id', None )
+        assert rval.id, 'Link elements require a id.'
+        rval.name = elem.get( 'name', rval.id )
+        rval.url = elem.find( 'url' )
+        assert rval.url is not None, 'A url element must be provided for link elements.'
+        rval.other_values = other_values
+        rval.filters = elem.findall( 'filter' )
+        for param_elem in elem.findall( 'param' ):
+            param = DisplayApplicationParameter.from_elem( param_elem, rval )
+            assert param, 'Unable to load parameter from element: %s' % param_elem
+            rval.parameters[ param.name ] = param
+            rval.url_param_name_map[ param.url ] = param.name
+        return rval
+
+    def __init__( self, display_application ):
+        self.display_application = display_application
+        self.parameters = odict()  # parameters are populated in order, allowing lower listed ones to have values of higher listed ones
+        self.url_param_name_map = {}
+        self.url = None
+        self.id = None
+        self.name = None
+
+    def get_display_url( self, data, trans ):
+        dataset_hash, user_hash = encode_dataset_user( trans, data, None )
+        return url_for( controller='dataset',
+                        action="display_application",
+                        dataset_id=dataset_hash,
+                        user_id=user_hash,
+                        app_name=urllib.quote_plus( self.display_application.id ),
+                        link_name=urllib.quote_plus( self.id ),
+                        app_action=None )
+
+    def get_inital_values( self, data, trans ):
+        if self.other_values:
+            rval = odict( self.other_values )
+        else:
+            rval = odict()
+        rval.update( { 'BASE_URL': trans.request.base, 'APP': trans.app } )  # trans automatically appears as a response, need to add properties of trans that we want here
+        for key, value in BASE_PARAMS.iteritems():  # add helper functions/variables
+            rval[ key ] = value
+        rval[ DEFAULT_DATASET_NAME ] = data  # always have the display dataset name available
+        return rval
+
+    def build_parameter_dict( self, data, dataset_hash, user_hash, trans, app_kwds ):
+        other_values = self.get_inital_values( data, trans )
+        other_values[ 'DATASET_HASH' ] = dataset_hash
+        other_values[ 'USER_HASH' ] = user_hash
+        ready = True
+        for name, param in self.parameters.iteritems():
+            assert name not in other_values, "The display parameter '%s' has been defined more than once." % name
+            if param.ready( other_values ):
+                if name in app_kwds and param.allow_override:
+                    other_values[ name ] = app_kwds[ name ]
+                else:
+                    other_values[ name ] = param.get_value( other_values, dataset_hash, user_hash, trans )  # subsequent params can rely on this value
+            else:
+                ready = False
+                other_values[ name ] = param.get_value( other_values, dataset_hash, user_hash, trans )  # subsequent params can rely on this value
+                if other_values[ name ] is None:
+                    # Need to stop here, next params may need this value to determine its own value
+                    return False, other_values
+        return ready, other_values
+
+    def filter_by_dataset( self, data, trans ):
+        context = self.get_inital_values( data, trans )
+        for filter_elem in self.filters:
+            if fill_template( filter_elem.text, context=context ) != filter_elem.get( 'value', 'True' ):
+                return False
+        return True
+
+
+class DynamicDisplayApplicationBuilder( object ):
+
+    def __init__( self, elem, display_application, build_sites ):
+        filename = None
+        data_table = None
+        if elem.get( 'site_type', None ) is not None:
+            filename = build_sites.get( elem.get( 'site_type' ) )
+        else:
+            filename = elem.get( 'from_file', None )
+        if filename is None:
+            data_table_name = elem.get( 'from_data_table', None )
+            if data_table_name:
+                data_table = display_application.app.tool_data_tables.get( data_table_name, None )
+                assert data_table is not None, 'Unable to find data table named "%s".' % data_table_name
+
+        assert filename is not None or data_table is not None, 'Filename or data Table is required for dynamic_links.'
+        skip_startswith = elem.get( 'skip_startswith', None )
+        separator = elem.get( 'separator', '\t' )
+        id_col = elem.get( 'id', None )
+        try:
+            id_col = int( id_col )
+        except:
+            if data_table:
+                if id_col is None:
+                    id_col = data_table.columns.get( 'id', None )
+                if id_col is None:
+                    id_col = data_table.columns.get( 'value', None )
+                try:
+                    id_col = int( id_col )
+                except:
+                    # id is set to a string or None, use column by that name if available
+                    id_col = data_table.columns.get( id_col, None )
+                    id_col = int( id_col )
+        name_col = elem.get( 'name', None )
+        try:
+            name_col = int( name_col )
+        except:
+            if data_table:
+                if name_col is None:
+                    name_col = data_table.columns.get( 'name', None )
+                else:
+                    name_col = data_table.columns.get( name_col, None )
+            else:
+                name_col = None
+        if name_col is None:
+            name_col = id_col
+        max_col = max( id_col, name_col )
+        dynamic_params = {}
+        if data_table is not None:
+            max_col = max( [ max_col ] + data_table.columns.values() )
+            for key, value in data_table.columns.items():
+                dynamic_params[key] = { 'column': value, 'split': False, 'separator': ',' }
+        for dynamic_param in elem.findall( 'dynamic_param' ):
+            name = dynamic_param.get( 'name' )
+            value = int( dynamic_param.get( 'value' ) )
+            split = string_as_bool( dynamic_param.get( 'split', False ) )
+            param_separator = dynamic_param.get( 'separator', ',' )
+            max_col = max( max_col, value )
+            dynamic_params[name] = { 'column': value, 'split': split, 'separator': param_separator }
+        if filename:
+            data_iter = open( filename )
+        elif data_table:
+            version, data_iter = data_table.get_version_fields()
+            display_application.add_data_table_watch( data_table.name, version )
+        links = []
+        for line in data_iter:
+            if isinstance( line, string_types ):
+                if not skip_startswith or not line.startswith( skip_startswith ):
+                    line = line.rstrip( '\n\r' )
+                    if not line:
+                        continue
+                    fields = line.split( separator )
+                else:
+                    continue
+            else:
+                fields = line
+            if len( fields ) > max_col:
+                new_elem = deepcopy( elem )
+                new_elem.set( 'id', fields[id_col] )
+                new_elem.set( 'name', fields[name_col] )
+                dynamic_values = {}
+                for key, attributes in dynamic_params.iteritems():
+                    value = fields[ attributes[ 'column' ] ]
+                    if attributes['split']:
+                        value = value.split( attributes['separator'] )
+                    dynamic_values[key] = value
+                # now populate
+                links.append( DisplayApplicationLink.from_elem( new_elem, display_application, other_values=dynamic_values ) )
+            else:
+                log.warning( 'Invalid dynamic display application link specified in %s: "%s"' % ( filename, line ) )
+        self.links = links
+
+    def __iter__( self ):
+        return iter( self.links )
+
+
+class PopulatedDisplayApplicationLink( object ):
+    def __init__( self, display_application_link, data, dataset_hash, user_hash, trans, app_kwds ):
+        self.link = display_application_link
+        self.data = data
+        self.dataset_hash = dataset_hash
+        self.user_hash = user_hash
+        self.trans = trans
+        self.ready, self.parameters = self.link.build_parameter_dict( self.data, self.dataset_hash, self.user_hash, trans, app_kwds )
+
+    def display_ready( self ):
+        return self.ready
+
+    def get_param_value( self, name ):
+        value = None
+        if self.ready:
+            value = self.parameters.get( name, None )
+            assert value, 'Unknown parameter requested'
+        return value
+
+    def preparing_display( self ):
+        if not self.ready:
+            return self.link.parameters[ self.parameters.keys()[ -1 ] ].is_preparing( self.parameters )
+        return False
+
+    def prepare_display( self ):
+        rval = []
+        found_last = False
+        if not self.ready and not self.preparing_display():
+            other_values = self.parameters
+            for name, param in self.link.parameters.iteritems():
+                if found_last or other_values.keys()[ -1 ] == name:  # found last parameter to be populated
+                    found_last = True
+                    value = param.prepare( other_values, self.dataset_hash, self.user_hash, self.trans )
+                    rval.append( { 'name': name, 'value': value, 'param': param } )
+                    other_values[ name ] = value
+                    if value is None:
+                        # We can go no further until we have a value for this parameter
+                        return rval
+        return rval
+
+    def get_prepare_steps( self, datasets_only=True ):
+        rval = []
+        for name, param in self.link.parameters.iteritems():
+            if datasets_only and not isinstance( param, DisplayApplicationDataParameter ):
+                continue
+            value = self.parameters.get( name, None )
+            rval.append( { 'name': name, 'value': value, 'param': param, 'ready': param.ready( self.parameters ) } )
+        return rval
+
+    def display_url( self ):
+        assert self.display_ready(), 'Display is not yet ready, cannot generate display link'
+        return fill_template( self.link.url.text, context=self.parameters )
+
+    def get_param_name_by_url( self, url ):
+        for name, parameter in self.link.parameters.iteritems():
+            if parameter.build_url( self.parameters ) == url:
+                return name
+        raise ValueError( "Unknown URL parameter name provided: %s" % url )
+
+
+class DisplayApplication( object ):
+    @classmethod
+    def from_file( cls, filename, app ):
+        return cls.from_elem( parse_xml( filename ).getroot(), app, filename=filename )
+
+    @classmethod
+    def from_elem( cls, elem, app, filename=None ):
+        att_dict = cls._get_attributes_from_elem( elem )
+        rval = DisplayApplication( att_dict['id'], att_dict['name'], app, att_dict['version'], filename=filename, elem=elem )
+        rval._load_links_from_elem( elem )
+        return rval
+
+    @classmethod
+    def _get_attributes_from_elem( cls, elem ):
+        display_id = elem.get( 'id', None )
+        assert display_id, "ID tag is required for a Display Application"
+        name = elem.get( 'name', display_id )
+        version = elem.get( 'version', None )
+        return dict( id=display_id, name=name, version=version )
+
+    def __init__( self, display_id, name, app, version=None, filename=None, elem=None ):
+        self.id = display_id
+        self.name = name
+        self.app = app
+        if version is None:
+            version = "1.0.0"
+        self.version = version
+        self.links = odict()
+        self._filename = filename
+        self._elem = elem
+        self._data_table_versions = {}
+
+    def _load_links_from_elem( self, elem ):
+        for link_elem in elem.findall( 'link' ):
+            link = DisplayApplicationLink.from_elem( link_elem, self )
+            if link:
+                self.links[ link.id ] = link
+        try:
+            for dynamic_links in elem.findall( 'dynamic_links' ):
+                for link in DynamicDisplayApplicationBuilder( dynamic_links, self, self.app.datatypes_registry.build_sites ):
+                    self.links[ link.id ] = link
+        except Exception as e:
+            log.error( "Error loading a set of Dynamic Display Application links: %s", e )
+
+    def get_link( self, link_name, data, dataset_hash, user_hash, trans, app_kwds ):
+        # returns a link object with data knowledge to generate links
+        self._check_and_reload()
+        return PopulatedDisplayApplicationLink( self.links[ link_name ], data, dataset_hash, user_hash, trans, app_kwds )
+
+    def filter_by_dataset( self, data, trans ):
+        self._check_and_reload()
+        filtered = DisplayApplication( self.id, self.name, self.app, version=self.version )
+        for link_name, link_value in self.links.iteritems():
+            if link_value.filter_by_dataset( data, trans ):
+                filtered.links[link_name] = link_value
+        return filtered
+
+    def reload( self ):
+        if self._filename:
+            elem = parse_xml( self._filename ).getroot()
+        elif self._elem:
+            elem = self._elem
+        else:
+            raise Exception( "Unable to reload DisplayApplication %s." % ( self.name ) )
+        # All toolshed-specific attributes added by e.g the registry will remain
+        attr_dict = self._get_attributes_from_elem( elem )
+        # We will not allow changing the id at this time (we'll need to fix several mappings upstream to handle this case)
+        assert attr_dict.get( 'id' ) == self.id, ValueError( "You cannot reload a Display application where the ID has changed. You will need to restart the server instead." )
+        # clear old links
+        for key in self.links.keys():
+            del self.links[key]
+        # clear data table versions:
+        for key in self._data_table_versions.keys():
+            del self._data_table_versions[ key ]
+        # Set new attributes
+        for key, value in attr_dict.iteritems():
+            setattr( self, key, value )
+        # Load new links
+        self._load_links_from_elem( elem )
+        return self
+
+    def add_data_table_watch( self, table_name, version=None ):
+        self._data_table_versions[ table_name ] = version
+
+    def _requires_reload( self ):
+        for key, value in self._data_table_versions.iteritems():
+            table = self.app.tool_data_tables.get( key, None )
+            if table and not table.is_current_version( value ):
+                return True
+        return False
+
+    def _check_and_reload( self ):
+        if self._requires_reload():
+            self.reload()
diff --git a/lib/galaxy/datatypes/display_applications/parameters.py b/lib/galaxy/datatypes/display_applications/parameters.py
new file mode 100644
index 0000000..b771aed
--- /dev/null
+++ b/lib/galaxy/datatypes/display_applications/parameters.py
@@ -0,0 +1,249 @@
+# Contains parameters that are used in Display Applications
+import urllib
+from galaxy.util import string_as_bool
+from galaxy.util.bunch import Bunch
+from galaxy.util.template import fill_template
+from galaxy.web import url_for
+import mimetypes
+
+DEFAULT_DATASET_NAME = 'dataset'
+
+
+class DisplayApplicationParameter( object ):
+    """ Abstract Class for Display Application Parameters """
+
+    type = None
+
+    @classmethod
+    def from_elem( cls, elem, link ):
+        param_type = elem.get( 'type', None )
+        assert param_type, 'DisplayApplicationParameter requires a type'
+        return parameter_type_to_class[ param_type ]( elem, link )
+
+    def __init__( self, elem, link ):
+        self.name = elem.get( 'name', None )
+        assert self.name, 'DisplayApplicationParameter requires a name'
+        self.link = link
+        self.url = elem.get( 'url', self.name )  # name used in url for display purposes defaults to name; e.g. want the form of file.ext, where a '.' is not allowed as python variable name/keyword
+        self.mime_type = elem.get( 'mimetype', None )
+        self.guess_mime_type = string_as_bool( elem.get( 'guess_mimetype', 'False' ) )
+        self.viewable = string_as_bool( elem.get( 'viewable', 'False' ) )  # only allow these to be viewed via direct url when explicitly set to viewable
+        self.strip = string_as_bool( elem.get( 'strip', 'False' ) )
+        self.strip_https = string_as_bool( elem.get( 'strip_https', 'False' ) )
+        self.allow_override = string_as_bool( elem.get( 'allow_override', 'False' ) )  # Passing query param app_<name>=<value> to dataset controller allows override if this is true.
+
+    def get_value( self, other_values, dataset_hash, user_hash, trans ):
+        raise Exception('get_value() is unimplemented for DisplayApplicationDataParameter')
+
+    def prepare( self, other_values, dataset_hash, user_hash, trans ):
+        return self.get_value( other_values, dataset_hash, user_hash, trans )
+
+    def ready( self, other_values ):
+        return True
+
+    def is_preparing( self, other_values ):
+        return False
+
+    def build_url( self, other_values ):
+        return fill_template( self.url, context=other_values )
+
+
+class DisplayApplicationDataParameter( DisplayApplicationParameter ):
+    """ Parameter that returns a file_name containing the requested content """
+
+    type = 'data'
+
+    def __init__( self, elem, link ):
+        DisplayApplicationParameter.__init__( self, elem, link )
+        self.extensions = elem.get( 'format', None )
+        if self.extensions:
+            self.extensions = self.extensions.split( "," )
+        self.metadata = elem.get( 'metadata', None )
+        self.allow_extra_files_access = string_as_bool( elem.get( 'allow_extra_files_access', 'False' ) )
+        self.dataset = elem.get( 'dataset', DEFAULT_DATASET_NAME )  # 'dataset' is default name assigned to dataset to be displayed
+        assert not ( self.extensions and self.metadata ), 'A format or a metadata can be defined for a DisplayApplicationParameter, but not both.'
+        assert not ( self.allow_extra_files_access and self.metadata ), 'allow_extra_files_access or metadata can be defined for a DisplayApplicationParameter, but not both.'
+        self.viewable = string_as_bool( elem.get( 'viewable', 'True' ) )  # data params should be viewable
+        self.force_url_param = string_as_bool( elem.get( 'force_url_param', 'False' ) )
+        self.force_conversion = string_as_bool( elem.get( 'force_conversion', 'False' ) )
+
+    @property
+    def formats( self ):
+        if self.extensions:
+            return tuple( map( type, map( self.link.display_application.app.datatypes_registry.get_datatype_by_extension, self.extensions ) ) )
+        return None
+
+    def _get_dataset_like_object( self, other_values ):
+        # this returned object has file_name, state, and states attributes equivalent to a DatasetAssociation
+        data = other_values.get( self.dataset, None )
+        assert data, 'Base dataset could not be found in values provided to DisplayApplicationDataParameter'
+        if isinstance( data, DisplayDataValueWrapper ):
+            data = data.value
+        if self.metadata:
+            rval = getattr( data.metadata, self.metadata, None )
+            assert rval, 'Unknown metadata name (%s) provided for dataset type (%s).' % ( self.metadata, data.datatype.__class__.name )
+            return Bunch( file_name=rval.file_name, state=data.state, states=data.states, extension='data' )
+        elif self.extensions and ( self.force_conversion or not isinstance( data.datatype, self.formats ) ):
+            for ext in self.extensions:
+                rval = data.get_converted_files_by_type( ext )
+                if rval:
+                    return rval
+            assert data.find_conversion_destination( self.formats )[0] is not None, "No conversion path found for data param: %s" % self.name
+            return None
+        return data
+
+    def get_value( self, other_values, dataset_hash, user_hash, trans ):
+        data = self._get_dataset_like_object( other_values )
+        if data:
+            return DisplayDataValueWrapper( data, self, other_values, dataset_hash, user_hash, trans )
+        return None
+
+    def prepare( self, other_values, dataset_hash, user_hash, trans ):
+        data = self._get_dataset_like_object( other_values )
+        if not data and self.formats:
+            data = other_values.get( self.dataset, None )
+            trans.sa_session.refresh( data )
+            # start conversion
+            # FIXME: Much of this is copied (more than once...); should be some abstract method elsewhere called from here
+            # find target ext
+            target_ext, converted_dataset = data.find_conversion_destination( self.formats, converter_safe=True )
+            if target_ext and not converted_dataset:
+                if isinstance( data, DisplayDataValueWrapper ):
+                    data = data.value
+                new_data = data.datatype.convert_dataset( trans, data, target_ext, return_output=True, visible=False ).values()[0]
+                new_data.hid = data.hid
+                new_data.name = data.name
+                trans.sa_session.add( new_data )
+                assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( parent=data, file_type=target_ext, dataset=new_data, metadata_safe=False )
+                trans.sa_session.add( assoc )
+                trans.sa_session.flush()
+            elif converted_dataset and converted_dataset.state == converted_dataset.states.ERROR:
+                raise Exception( "Dataset conversion failed for data parameter: %s" % self.name )
+        return self.get_value( other_values, dataset_hash, user_hash, trans )
+
+    def is_preparing( self, other_values ):
+        value = self._get_dataset_like_object( other_values )
+        if value and value.state in ( value.states.NEW, value.states.UPLOAD, value.states.QUEUED, value.states.RUNNING ):
+            return True
+        return False
+
+    def ready( self, other_values ):
+        value = self._get_dataset_like_object( other_values )
+        if value:
+            if value.state == value.states.OK:
+                return True
+            elif value.state == value.states.ERROR:
+                raise Exception( 'A data display parameter is in the error state: %s' % ( self.name ) )
+        return False
+
+
+class DisplayApplicationTemplateParameter( DisplayApplicationParameter ):
+    """ Parameter that returns a string containing the requested content """
+
+    type = 'template'
+
+    def __init__( self, elem, link ):
+        DisplayApplicationParameter.__init__( self, elem, link )
+        self.text = elem.text or ''
+
+    def get_value( self, other_values, dataset_hash, user_hash, trans ):
+        value = fill_template( self.text, context=other_values )
+        if self.strip:
+            value = value.strip()
+        return DisplayParameterValueWrapper( value, self, other_values, dataset_hash, user_hash, trans )
+
+
+parameter_type_to_class = { DisplayApplicationDataParameter.type: DisplayApplicationDataParameter,
+                            DisplayApplicationTemplateParameter.type: DisplayApplicationTemplateParameter }
+
+
+class DisplayParameterValueWrapper( object ):
+    ACTION_NAME = 'param'
+
+    def __init__( self, value, parameter, other_values, dataset_hash, user_hash, trans ):
+        self.value = value
+        self.parameter = parameter
+        self.other_values = other_values
+        self.trans = trans
+        self._dataset_hash = dataset_hash
+        self._user_hash = user_hash
+        self._url = self.parameter.build_url( self.other_values )
+
+    def __str__( self ):
+        return str( self.value )
+
+    def mime_type( self, action_param_extra=None ):
+        if self.parameter.mime_type is not None:
+            return self.parameter.mime_type
+        if self.parameter.guess_mime_type:
+            mime, encoding = mimetypes.guess_type( self._url )
+            if not mime:
+                mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None )
+            if mime:
+                return mime
+        return 'text/plain'
+
+    @property
+    def url( self ):
+        base_url = self.trans.request.base
+        if self.parameter.strip_https and base_url[ : 5].lower() == 'https':
+            base_url = "http%s" % base_url[ 5: ]
+        return "%s%s" % ( base_url,
+                          url_for( controller='dataset',
+                                   action="display_application",
+                                   dataset_id=self._dataset_hash,
+                                   user_id=self._user_hash,
+                                   app_name=urllib.quote_plus( self.parameter.link.display_application.id ),
+                                   link_name=urllib.quote_plus( self.parameter.link.id ),
+                                   app_action=self.action_name,
+                                   action_param=self._url ) )
+
+    @property
+    def action_name( self ):
+        return self.ACTION_NAME
+
+    @property
+    def qp( self ):
+        # returns quoted str contents
+        return self.other_values[ 'qp' ]( str( self ) )
+
+    def __getattr__( self, key ):
+        return getattr( self.value, key )
+
+
+class DisplayDataValueWrapper( DisplayParameterValueWrapper ):
+    ACTION_NAME = 'data'
+
+    def __str__( self ):
+        # string of data param is filename
+        return str( self.value.file_name )
+
+    def mime_type( self, action_param_extra=None ):
+        if self.parameter.mime_type is not None:
+            return self.parameter.mime_type
+        if self.parameter.guess_mime_type:
+            if action_param_extra:
+                mime, encoding = mimetypes.guess_type( action_param_extra )
+            else:
+                mime, encoding = mimetypes.guess_type( self._url )
+            if not mime:
+                if action_param_extra:
+                    mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( action_param_extra )[ -1 ], None )
+                if not mime:
+                    mime = self.trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( self._url )[ -1 ], None )
+            if mime:
+                return mime
+        if hasattr( self.value, 'get_mime' ):
+            return self.value.get_mime()
+        return self.other_values[ DEFAULT_DATASET_NAME ].get_mime()
+
+    @property
+    def action_name( self ):
+        if self.parameter.force_url_param:
+            return super( DisplayParameterValueWrapper, self ).action_name
+        return self.ACTION_NAME
+
+    @property
+    def qp( self ):
+        # returns quoted url contents
+        return self.other_values[ 'qp' ]( self.url )
diff --git a/lib/galaxy/datatypes/display_applications/util.py b/lib/galaxy/datatypes/display_applications/util.py
new file mode 100644
index 0000000..2aec954
--- /dev/null
+++ b/lib/galaxy/datatypes/display_applications/util.py
@@ -0,0 +1,32 @@
+from Crypto.Cipher import Blowfish
+
+
+def encode_dataset_user( trans, dataset, user ):
+    # encode dataset id as usual
+    # encode user id using the dataset create time as the key
+    dataset_hash = trans.security.encode_id( dataset.id )
+    if user is None:
+        user_hash = 'None'
+    else:
+        user_hash = str( user.id )
+        # Pad to a multiple of 8 with leading "!"
+        user_hash = ( "!" * ( 8 - len( user_hash ) % 8 ) ) + user_hash
+        cipher = Blowfish.new( str( dataset.create_time ) )
+        user_hash = cipher.encrypt( user_hash ).encode( 'hex' )
+    return dataset_hash, user_hash
+
+
+def decode_dataset_user( trans, dataset_hash, user_hash ):
+    # decode dataset id as usual
+    # decode user id using the dataset create time as the key
+    dataset_id = trans.security.decode_id( dataset_hash )
+    dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id )
+    assert dataset, "Bad Dataset id provided to decode_dataset_user"
+    if user_hash in [ None, 'None' ]:
+        user = None
+    else:
+        cipher = Blowfish.new( str( dataset.create_time ) )
+        user_id = cipher.decrypt( user_hash.decode( 'hex' ) ).lstrip( "!" )
+        user = trans.sa_session.query( trans.app.model.User ).get( int( user_id ) )
+        assert user, "A Bad user id was passed to decode_dataset_user"
+    return dataset, user
diff --git a/lib/galaxy/datatypes/genetics.py b/lib/galaxy/datatypes/genetics.py
new file mode 100644
index 0000000..6c2dec9
--- /dev/null
+++ b/lib/galaxy/datatypes/genetics.py
@@ -0,0 +1,821 @@
+"""
+rgenetics datatypes
+Use at your peril
+Ross Lazarus
+for the rgenetics and galaxy projects
+
+genome graphs datatypes derived from Interval datatypes
+genome graphs datasets have a header row with appropriate columnames
+The first column is always the marker - eg columname = rs, first row= rs12345 if the rows are snps
+subsequent row values are all numeric ! Will fail if any non numeric (eg '+' or 'NA') values
+ross lazarus for rgenetics
+august 20 2007
+"""
+
+import logging
+import os
+import sys
+import urllib
+from cgi import escape
+
+from galaxy.datatypes import metadata
+from galaxy.datatypes.text import Html
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.tabular import Tabular
+from galaxy.util import nice_size
+from galaxy.web import url_for
+
+gal_Log = logging.getLogger(__name__)
+verbose = False
+
+
+class GenomeGraphs( Tabular ):
+    """
+    Tab delimited data containing a marker id and any number of numeric values
+    """
+
+    MetadataElement( name="markerCol", default=1, desc="Marker ID column", param=metadata.ColumnParameter )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True )
+    MetadataElement( name="column_types", default=[], desc="Column types", readonly=True, visible=False )
+    file_ext = 'gg'
+
+    def __init__(self, **kwd):
+        """
+        Initialize gg datatype, by adding UCSC display apps
+        """
+        Tabular.__init__(self, **kwd)
+        self.add_display_app( 'ucsc', 'Genome Graph', 'as_ucsc_display_file', 'ucsc_links' )
+
+    def set_meta(self, dataset, **kwd):
+        Tabular.set_meta( self, dataset, **kwd)
+        dataset.metadata.markerCol = 1
+        header = open(dataset.file_name, 'r').readlines()[0].strip().split('\t')
+        dataset.metadata.columns = len(header)
+        t = ['numeric' for x in header]
+        t[0] = 'string'
+        dataset.metadata.column_types = t
+        return True
+
+    def as_ucsc_display_file( self, dataset, **kwd ):
+        """
+        Returns file
+        """
+        return open(dataset.file_name, 'r')
+
+    def ucsc_links( self, dataset, type, app, base_url ):
+        """
+        from the ever-helpful angie hinrichs angie at soe.ucsc.edu
+        a genome graphs call looks like this
+
+        http://genome.ucsc.edu/cgi-bin/hgGenome?clade=mammal&org=Human&db=hg18&hgGenome_dataSetName=dname
+        &hgGenome_dataSetDescription=test&hgGenome_formatType=best%20guess&hgGenome_markerType=best%20guess
+        &hgGenome_columnLabels=best%20guess&hgGenome_maxVal=&hgGenome_labelVals=
+        &hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=http://galaxy.esphealth.org/datasets/333/display/index
+        &hgGenome_doSubmitUpload=submit
+
+        Galaxy gives this for an interval file
+
+        http://genome.ucsc.edu/cgi-bin/hgTracks?db=hg18&position=chr1:1-1000&hgt.customText=
+        http%3A%2F%2Fgalaxy.esphealth.org%2Fdisplay_as%3Fid%3D339%26display_app%3Ducsc
+
+        """
+        ret_val = []
+        if not dataset.dbkey:
+            dataset.dbkey = 'hg18'  # punt!
+        if dataset.has_data():
+            for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
+                if site_name in app.datatypes_registry.get_display_sites('ucsc'):
+                    site_url = site_url.replace('/hgTracks?', '/hgGenome?')  # for genome graphs
+                    internal_url = "%s" % url_for( controller='dataset',
+                                                   dataset_id=dataset.id,
+                                                   action='display_at',
+                                                   filename='ucsc_' + site_name )
+                    display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
+                    display_url = urllib.quote_plus( display_url )
+                    # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+                    # redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+                    sl = ["%sdb=%s" % (site_url, dataset.dbkey ), ]
+                    # sl.append("&hgt.customText=%s")
+                    sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+                    sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+                    sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+                    sl.append("&hgGenome_doSubmitUpload=submit")
+                    sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+                    s = ''.join(sl)
+                    s = urllib.quote_plus(s)
+                    redirect_url = s
+                    link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+                    ret_val.append( (site_name, link) )
+        return ret_val
+
+    def make_html_table( self, dataset, skipchars=[] ):
+        """
+        Create HTML table, used for displaying peek
+        """
+        out = ['<table cellspacing="0" cellpadding="3">']
+        f = open(dataset.file_name, 'r')
+        d = f.readlines()[:5]
+        if len(d) == 0:
+            out = "Cannot find anything to parse in %s" % dataset.name
+            return out
+        hasheader = 0
+        try:
+            ['%f' % x for x in d[0][1:]]  # first is name - see if starts all numerics
+        except:
+            hasheader = 1
+        try:
+            # Generate column header
+            out.append( '<tr>' )
+            if hasheader:
+                for i, name in enumerate(d[0].split() ):
+                    out.append( '<th>%s.%s</th>' % ( i + 1, name ) )
+                d.pop(0)
+                out.append('</tr>')
+            for row in d:
+                out.append('<tr>')
+                out.append(''.join(['<td>%s</td>' % x for x in row.split()]))
+                out.append('</tr>')
+            out.append( '</table>' )
+            out = "".join( out )
+        except Exception as exc:
+            out = "Can't create peek %s" % exc
+        return out
+
+    def validate( self, dataset ):
+        """
+        Validate a gg file - all numeric after header row
+        """
+        errors = list()
+        infile = open(dataset.file_name, "r")
+        infile.next()  # header
+        for i, row in enumerate(infile):
+            ll = row.strip().split('\t')[1:]  # first is alpha feature identifier
+            badvals = []
+            for j, x in enumerate(ll):
+                try:
+                    x = float(x)
+                except:
+                    badvals.append('col%d:%s' % (j + 1, x))
+        if len(badvals) > 0:
+            errors.append('row %d, %s' % (' '.join(badvals)))
+            return errors
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in gg format
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'test_space.txt' )
+        >>> GenomeGraphs().sniff( fname )
+        False
+        >>> fname = get_test_fname( '1.gg' )
+        >>> GenomeGraphs().sniff( fname )
+        True
+        """
+        f = open(filename, 'r')
+        f.readline()  # header
+        rows = [f.readline().split()[1:] for x in range(3)]  # small sample, trimming first column
+        for row in rows:
+            if len(row) < 1:
+                # Must actually have at least one value
+                return False
+            try:
+                [float(x) for x in row]  # first col has been removed
+            except:
+                return False
+        return True
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'application/vnd.ms-excel'
+
+
+class rgTabList(Tabular):
+    """
+    for sampleid and for featureid lists of exclusions or inclusions in the clean tool
+    featureid subsets on statistical criteria -> specialized display such as gg
+    """
+    file_ext = "rgTList"
+
+    def __init__(self, **kwd):
+        """
+        Initialize featurelistt datatype
+        """
+        Tabular.__init__( self, **kwd )
+        self.column_names = []
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table( self, dataset, column_names=self.column_names )
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'text/html'
+
+
+class rgSampleList(rgTabList):
+    """
+    for sampleid exclusions or inclusions in the clean tool
+    output from QC eg excess het, gender error, ibd pair member,eigen outlier,excess mendel errors,...
+    since they can be uploaded, should be flexible
+    but they are persistent at least
+    same infrastructure for expression?
+    """
+    file_ext = "rgSList"
+
+    def __init__(self, **kwd):
+        """
+        Initialize samplelist datatype
+        """
+        rgTabList.__init__( self, **kwd )
+        self.column_names[0] = 'FID'
+        self.column_names[1] = 'IID'
+        # this is what Plink wants as at 2009
+
+    def sniff(self, filename):
+        infile = open(filename, "r")
+        header = infile.next()  # header
+        if header[0] == 'FID' and header[1] == 'IID':
+            return True
+        else:
+            return False
+
+
+class rgFeatureList( rgTabList ):
+    """
+    for featureid lists of exclusions or inclusions in the clean tool
+    output from QC eg low maf, high missingness, bad hwe in controls, excess mendel errors,...
+    featureid subsets on statistical criteria -> specialized display such as gg
+    same infrastructure for expression?
+    """
+    file_ext = "rgFList"
+
+    def __init__(self, **kwd):
+        """Initialize featurelist datatype"""
+        rgTabList.__init__( self, **kwd )
+        for i, s in enumerate(['#FeatureId', 'Chr', 'Genpos', 'Mappos']):
+            self.column_names[i] = s
+
+
+class Rgenetics(Html):
+    """
+    base class to use for rgenetics datatypes
+    derived from html - composite datatype elements
+    stored in extra files path
+    """
+
+    MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default='RgeneticsData',
+                     readonly=True, set_in_upload=True)
+
+    composite_type = 'auto_primary_file'
+    allow_datatype_change = False
+    file_ext = 'rgenetics'
+
+    def generate_primary_file( self, dataset=None ):
+        rval = ['<html><head><title>Rgenetics Galaxy Composite Dataset </title></head><p/>']
+        rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
+        for composite_name, composite_file in self.get_composite_files( dataset=dataset ).iteritems():
+            fn = composite_name
+            opt_text = ''
+            if composite_file.optional:
+                opt_text = ' (optional)'
+            if composite_file.get('description'):
+                rval.append( '<li><a href="%s" type="application/binary">%s (%s)</a>%s</li>' % ( fn, fn, composite_file.get('description'), opt_text ) )
+            else:
+                rval.append( '<li><a href="%s" type="application/binary">%s</a>%s</li>' % ( fn, fn, opt_text ) )
+        rval.append( '</ul></div></html>' )
+        return "\n".join( rval )
+
+    def regenerate_primary_file(self, dataset):
+        """
+        cannot do this until we are setting metadata
+        """
+        efp = dataset.extra_files_path
+        flist = os.listdir(efp)
+        rval = ['<html><head><title>Files for Composite Dataset %s</title></head><body><p/>Composite %s contains:<p/><ul>' % (dataset.name, dataset.name)]
+        for i, fname in enumerate(flist):
+            sfname = os.path.split(fname)[-1]
+            f, e = os.path.splitext(fname)
+            rval.append( '<li><a href="%s">%s</a></li>' % ( sfname, sfname) )
+        rval.append( '</ul></body></html>' )
+        with open(dataset.file_name, 'w') as f:
+            f.write("\n".join( rval ))
+            f.write('\n')
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'text/html'
+
+    def set_meta( self, dataset, **kwd ):
+
+        """
+        for lped/pbed eg
+
+        """
+        Html.set_meta( self, dataset, **kwd )
+        if not kwd.get('overwrite'):
+            if verbose:
+                gal_Log.debug('@@@ rgenetics set_meta called with overwrite = False')
+            return True
+        try:
+            efp = dataset.extra_files_path
+        except:
+            if verbose:
+                gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
+            return False
+        try:
+            flist = os.listdir(efp)
+        except:
+            if verbose:
+                gal_Log.debug('@@@rgenetics set_meta failed %s - dataset %s has no efp ?' % (sys.exc_info()[0], dataset.name))
+            return False
+        if len(flist) == 0:
+            if verbose:
+                gal_Log.debug('@@@rgenetics set_meta failed - %s efp %s is empty?' % (dataset.name, efp))
+            return False
+        self.regenerate_primary_file(dataset)
+        if not dataset.info:
+            dataset.info = 'Galaxy genotype datatype object'
+        if not dataset.blurb:
+            dataset.blurb = 'Composite file - Rgenetics Galaxy toolkit'
+        return True
+
+
+class SNPMatrix(Rgenetics):
+    """
+    BioC SNPMatrix Rgenetics data collections
+    """
+    file_ext = "snpmatrix"
+
+    def set_peek( self, dataset, **kwd ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Binary RGenetics file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        """ need to check the file header hex code
+        """
+        infile = open(filename, "b")
+        head = infile.read(16)
+        head = [hex(x) for x in head]
+        if head != '':
+            return False
+        else:
+            return True
+
+
+class Lped(Rgenetics):
+    """
+    linkage pedigree (ped,map) Rgenetics data collections
+    """
+    file_ext = "lped"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.ped',
+                                 description='Pedigree File',
+                                 substitute_name_with_metadata='base_name',
+                                 is_binary=False )
+        self.add_composite_file( '%s.map',
+                                 description='Map File',
+                                 substitute_name_with_metadata='base_name',
+                                 is_binary=False )
+
+
+class Pphe(Rgenetics):
+    """
+    Plink phenotype file - header must have FID\tIID... Rgenetics data collections
+    """
+    file_ext = "pphe"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.pphe',
+                                 description='Plink Phenotype File',
+                                 substitute_name_with_metadata='base_name',
+                                 is_binary=False )
+
+
+class Fphe(Rgenetics):
+    """
+    fbat pedigree file - mad format with ! as first char on header row
+    Rgenetics data collections
+    """
+    file_ext = "fphe"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.fphe',
+                                 description='FBAT Phenotype File',
+                                 substitute_name_with_metadata='base_name' )
+
+
+class Phe(Rgenetics):
+    """
+    Phenotype file
+    """
+    file_ext = "phe"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.phe',
+                                 description='Phenotype File',
+                                 substitute_name_with_metadata='base_name',
+                                 is_binary=False )
+
+
+class Fped(Rgenetics):
+    """
+    FBAT pedigree format - single file, map is header row of rs numbers. Strange.
+    Rgenetics data collections
+    """
+    file_ext = "fped"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.fped', description='FBAT format pedfile',
+                                 substitute_name_with_metadata='base_name',
+                                 is_binary=False )
+
+
+class Pbed(Rgenetics):
+    """
+    Plink Binary compressed 2bit/geno Rgenetics data collections
+    """
+    file_ext = "pbed"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.bim', substitute_name_with_metadata='base_name', is_binary=False )
+        self.add_composite_file( '%s.bed', substitute_name_with_metadata='base_name', is_binary=True )
+        self.add_composite_file( '%s.fam', substitute_name_with_metadata='base_name', is_binary=False )
+
+
+class ldIndep(Rgenetics):
+    """
+    LD (a good measure of redundancy of information) depleted Plink Binary compressed 2bit/geno
+    This is really a plink binary, but some tools work better with less redundancy so are constrained to
+    these files
+    """
+    file_ext = "ldreduced"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.bim', substitute_name_with_metadata='base_name', is_binary=False )
+        self.add_composite_file( '%s.bed', substitute_name_with_metadata='base_name', is_binary=True )
+        self.add_composite_file( '%s.fam', substitute_name_with_metadata='base_name', is_binary=False )
+
+
+class Eigenstratgeno(Rgenetics):
+    """
+    Eigenstrat format - may be able to get rid of this
+    if we move to shellfish
+    Rgenetics data collections
+    """
+    file_ext = "eigenstratgeno"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata='base_name', is_binary=False )
+        self.add_composite_file( '%s.ind', substitute_name_with_metadata='base_name', is_binary=False )
+        self.add_composite_file( '%s.map', substitute_name_with_metadata='base_name', is_binary=False )
+
+
+class Eigenstratpca(Rgenetics):
+    """
+    Eigenstrat PCA file for case control adjustment
+    Rgenetics data collections
+    """
+    file_ext = "eigenstratpca"
+
+    def __init__( self, **kwd ):
+        Rgenetics.__init__(self, **kwd)
+        self.add_composite_file( '%s.eigenstratpca',
+                                 description='Eigenstrat PCA file', substitute_name_with_metadata='base_name' )
+
+
+class Snptest(Rgenetics):
+    """
+    BioC snptest Rgenetics data collections
+    """
+    file_ext = "snptest"
+
+
+class Pheno(Tabular):
+    """
+    base class for pheno files
+    """
+    file_ext = 'pheno'
+
+
+class RexpBase( Html ):
+    """
+    base class for BioC data structures in Galaxy
+    must be constructed with the pheno data in place since that
+    goes into the metadata for each instance
+    """
+    MetadataElement( name="columns", default=0, desc="Number of columns", visible=True )
+    MetadataElement( name="column_names", default=[], desc="Column names", visible=True )
+    MetadataElement(name="pheCols", default=[], desc="Select list for potentially interesting variables", visible=True)
+    MetadataElement( name="base_name",
+                     desc="base name for all transformed versions of this expression dataset", default='rexpression', set_in_upload=True)
+    MetadataElement( name="pheno_path", desc="Path to phenotype data for this experiment", default="rexpression.pheno", visible=True)
+    file_ext = 'rexpbase'
+    html_table = None
+    is_binary = True
+    composite_type = 'auto_primary_file'
+    allow_datatype_change = False
+
+    def __init__( self, **kwd ):
+        Html.__init__(self, **kwd)
+        self.add_composite_file( '%s.pheno', description='Phenodata tab text file',
+                                 substitute_name_with_metadata='base_name', is_binary=False)
+
+    def generate_primary_file( self, dataset=None ):
+        """
+        This is called only at upload to write the html file
+        cannot rename the datasets here - they come with the default unfortunately
+        """
+        return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>'
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'text/html'
+
+    def get_phecols(self, phenolist=[], maxConc=20):
+        """
+        sept 2009: cannot use whitespace to split - make a more complex structure here
+        and adjust the methods that rely on this structure
+        return interesting phenotype column names for an rexpression eset or affybatch
+        to use in array subsetting and so on. Returns a data structure for a
+        dynamic Galaxy select parameter.
+        A column with only 1 value doesn't change, so is not interesting for
+        analysis. A column with a different value in every row is equivalent to a unique
+        identifier so is also not interesting for anova or limma analysis - both these
+        are removed after the concordance (count of unique terms) is constructed for each
+        column. Then a complication - each remaining pair of columns is tested for
+        redundancy - if two columns are always paired, then only one is needed :)
+        """
+        for nrows, row in enumerate(phenolist):  # construct concordance
+            if len(row.strip()) == 0:
+                break
+            row = row.strip().split('\t')
+            if nrows == 0:  # set up from header
+                head = row
+                totcols = len(row)
+                concordance = [{} for x in head]  # list of dicts
+            else:
+                for col, code in enumerate(row):  # keep column order correct
+                    if col >= totcols:
+                        gal_Log.warning('### get_phecols error in pheno file - row %d col %d (%s) longer than header %s' % (nrows, col, row, head))
+                    else:
+                        concordance[col].setdefault(code, 0)  # first one is zero
+                        concordance[col][code] += 1
+        useCols = []
+        useConc = []  # columns of interest to keep
+        nrows = len(phenolist)
+        nrows -= 1  # drop head from count
+        for c, conc in enumerate(concordance):  # c is column number
+            if (len(conc) > 1) and (len(conc) < min(nrows, maxConc)):  # not all same and not all different!!
+                useConc.append(conc)  # keep concordance
+                useCols.append(c)  # keep column
+        nuse = len(useCols)
+        # now to check for pairs of concordant columns - drop one of these.
+        delme = []
+        p = phenolist[1:]  # drop header
+        plist = [x.strip().split('\t') for x in p]  # list of lists
+        phe = [[x[i] for i in useCols] for x in plist if len(x) >= totcols]  # strip unused data
+        for i in range(0, (nuse - 1)):  # for each interesting column
+            for j in range(i + 1, nuse):
+                kdict = {}
+                for row in phe:  # row is a list of lists
+                    k = '%s%s' % (row[i], row[j])  # composite key
+                    kdict[k] = k
+                if (len(kdict.keys()) == len(concordance[useCols[j]])):  # i and j are always matched
+                    delme.append(j)
+        delme = list(set(delme))  # remove dupes
+        listCol = []
+        delme.sort()
+        delme.reverse()  # must delete from far end!
+        for i in delme:
+            del useConc[i]  # get rid of concordance
+            del useCols[i]  # and usecols entry
+        for i, conc in enumerate(useConc):  # these are all unique columns for the design matrix
+                ccounts = sorted([(conc.get(code, 0), code) for code in conc.keys()])  # decorate
+                cc = [(x[1], x[0]) for x in ccounts]  # list of code count tuples
+                codeDetails = (head[useCols[i]], cc)  # ('foo',[('a',3),('b',11),..])
+                listCol.append(codeDetails)
+        if len(listCol) > 0:
+            res = listCol
+            # metadata.pheCols becomes [('bar;22,zot;113','foo'), ...]
+        else:
+            res = [('no usable phenotype columns found', [('?', 0), ]), ]
+        return res
+
+    def get_pheno(self, dataset):
+        """
+        expects a .pheno file in the extra_files_dir - ugh
+        note that R is wierd and adds the row.name in
+        the header so the columns are all wrong - unless you tell it not to.
+        A file can be written as
+        write.table(file='foo.pheno',pData(foo),sep='\t',quote=F,row.names=F)
+        """
+        p = open(dataset.metadata.pheno_path, 'r').readlines()
+        if len(p) > 0:  # should only need to fix an R pheno file once
+            head = p[0].strip().split('\t')
+            line1 = p[1].strip().split('\t')
+            if len(head) < len(line1):
+                head.insert(0, 'ChipFileName')  # fix R write.table b0rken-ness
+                p[0] = '\t'.join(head)
+        else:
+            p = []
+        return '\n'.join(p)
+
+    def set_peek( self, dataset, **kwd ):
+        """
+        expects a .pheno file in the extra_files_dir - ugh
+        note that R is weird and does not include the row.name in
+        the header. why?"""
+        if not dataset.dataset.purged:
+            pp = os.path.join(dataset.extra_files_path, '%s.pheno' % dataset.metadata.base_name)
+            try:
+                p = open(pp, 'r').readlines()
+            except:
+                p = ['##failed to find %s' % pp, ]
+            dataset.peek = ''.join(p[:5])
+            dataset.blurb = 'Galaxy Rexpression composite file'
+        else:
+            dataset.peek = 'file does not exist\n'
+            dataset.blurb = 'file purged from disk'
+
+    def get_peek( self, dataset ):
+        """
+        expects a .pheno file in the extra_files_dir - ugh
+        """
+        pp = os.path.join(dataset.extra_files_path, '%s.pheno' % dataset.metadata.base_name)
+        try:
+            p = open(pp, 'r').readlines()
+        except:
+            p = ['##failed to find %s' % pp]
+        return ''.join(p[:5])
+
+    def get_file_peek(self, filename):
+        """
+        can't really peek at a filename - need the extra_files_path and such?
+        """
+        h = '## rexpression get_file_peek: no file found'
+        try:
+            h = open(filename, 'r').readlines()
+        except:
+            pass
+        return ''.join(h[:5])
+
+    def regenerate_primary_file(self, dataset):
+        """
+        cannot do this until we are setting metadata
+        """
+        bn = dataset.metadata.base_name
+        flist = os.listdir(dataset.extra_files_path)
+        rval = ['<html><head><title>Files for Composite Dataset %s</title></head><p/>Comprises the following files:<p/><ul>' % (bn)]
+        for i, fname in enumerate(flist):
+            sfname = os.path.split(fname)[-1]
+            rval.append( '<li><a href="%s">%s</a>' % ( sfname, sfname ) )
+        rval.append( '</ul></html>' )
+        with open(dataset.file_name, 'w') as f:
+            f.write("\n".join( rval ))
+            f.write('\n')
+
+    def init_meta( self, dataset, copy_from=None ):
+        if copy_from:
+            dataset.metadata = copy_from.metadata
+
+    def set_meta( self, dataset, **kwd ):
+
+        """
+        NOTE we apply the tabular machinary to the phenodata extracted
+        from a BioC eSet or affybatch.
+
+        """
+        Html.set_meta(self, dataset, **kwd)
+        try:
+            flist = os.listdir(dataset.extra_files_path)
+        except:
+            if verbose:
+                gal_Log.debug('@@@rexpression set_meta failed - no dataset?')
+            return False
+        bn = dataset.metadata.base_name
+        if not bn:
+            for f in flist:
+                n = os.path.splitext(f)[0]
+                bn = n
+                dataset.metadata.base_name = bn
+        if not bn:
+            bn = '?'
+            dataset.metadata.base_name = bn
+        pn = '%s.pheno' % (bn)
+        pp = os.path.join(dataset.extra_files_path, pn)
+        dataset.metadata.pheno_path = pp
+        try:
+            pf = open(pp, 'r').readlines()  # read the basename.phenodata in the extra_files_path
+        except:
+            pf = None
+        if pf:
+            h = pf[0].strip()
+            h = h.split('\t')  # hope is header
+            h = [escape(x) for x in h]
+            dataset.metadata.column_names = h
+            dataset.metadata.columns = len(h)
+            dataset.peek = ''.join(pf[:5])
+        else:
+            dataset.metadata.column_names = []
+            dataset.metadata.columns = 0
+            dataset.peek = 'No pheno file found'
+        if pf and len(pf) > 1:
+            dataset.metadata.pheCols = self.get_phecols(phenolist=pf)
+        else:
+            dataset.metadata.pheCols = [('', 'No useable phenotypes found', False), ]
+        if not dataset.info:
+            dataset.info = 'Galaxy Expression datatype object'
+        if not dataset.blurb:
+            dataset.blurb = 'R loadable BioC expression object for the Rexpression Galaxy toolkit'
+        return True
+
+    def make_html_table( self, pp='nothing supplied from peek\n'):
+        """
+        Create HTML table, used for displaying peek
+        """
+        out = ['<table cellspacing="0" cellpadding="3">', ]
+        try:
+            # Generate column header
+            p = pp.split('\n')
+            for i, row in enumerate(p):
+                lrow = row.strip().split('\t')
+                if i == 0:
+                    orow = ['<th>%s</th>' % escape(x) for x in lrow]
+                    orow.insert(0, '<tr>')
+                    orow.append('</tr>')
+                else:
+                    orow = ['<td>%s</td>' % escape(x) for x in lrow]
+                    orow.insert(0, '<tr>')
+                    orow.append('</tr>')
+                out.append(''.join(orow))
+            out.append( '</table>' )
+            out = "\n".join( out )
+        except Exception as exc:
+            out = "Can't create html table %s" % str( exc )
+        return out
+
+    def display_peek( self, dataset ):
+        """
+        Returns formatted html of peek
+        """
+        out = self.make_html_table(dataset.peek)
+        return out
+
+
+class Affybatch( RexpBase ):
+    """
+    derived class for BioC data structures in Galaxy
+    """
+
+    file_ext = "affybatch"
+
+    def __init__( self, **kwd ):
+        RexpBase.__init__(self, **kwd)
+        self.add_composite_file( '%s.affybatch',
+                                 description='AffyBatch R object saved to file',
+                                 substitute_name_with_metadata='base_name', is_binary=True )
+
+
+class Eset( RexpBase ):
+    """
+    derived class for BioC data structures in Galaxy
+    """
+    file_ext = "eset"
+
+    def __init__( self, **kwd ):
+        RexpBase.__init__(self, **kwd)
+        self.add_composite_file( '%s.eset',
+                                 description='ESet R object saved to file',
+                                 substitute_name_with_metadata='base_name', is_binary=True )
+
+
+class MAlist( RexpBase ):
+    """
+    derived class for BioC data structures in Galaxy
+    """
+    file_ext = "malist"
+
+    def __init__( self, **kwd ):
+        RexpBase.__init__(self, **kwd)
+        self.add_composite_file( '%s.malist',
+                                 description='MAlist R object saved to file',
+                                 substitute_name_with_metadata='base_name', is_binary=True )
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__])
diff --git a/lib/galaxy/datatypes/graph.py b/lib/galaxy/datatypes/graph.py
new file mode 100644
index 0000000..4b4f808
--- /dev/null
+++ b/lib/galaxy/datatypes/graph.py
@@ -0,0 +1,160 @@
+"""
+Graph content classes.
+"""
+
+import data
+import tabular
+import xml
+
+import dataproviders
+from galaxy.util import simplegraph
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Xgmml( xml.GenericXml ):
+    """
+    XGMML graph format
+    (http://wiki.cytoscape.org/Cytoscape_User_Manual/Network_Formats).
+    """
+    file_ext = "xgmml"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """
+        Set the peek and blurb text
+        """
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'XGMML data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        """
+        Returns false and the user must manually set.
+        """
+        return False
+
+    @staticmethod
+    def merge( split_files, output_file ):
+        """
+        Merging multiple XML files is non-trivial and must be done in subclasses.
+        """
+        if len( split_files ) > 1:
+            raise NotImplementedError( "Merging multiple XML files is non-trivial " +
+                                       "and must be implemented for each XML type" )
+        # For one file only, use base class method (move/copy)
+        data.Text.merge( split_files, output_file )
+
+    @dataproviders.decorators.dataprovider_factory( 'node-edge', dataproviders.hierarchy.XMLDataProvider.settings )
+    def node_edge_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return XGMMLGraphDataProvider( dataset_source, **settings )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Sif( tabular.Tabular ):
+    """
+    SIF graph format
+    (http://wiki.cytoscape.org/Cytoscape_User_Manual/Network_Formats).
+
+    First column: node id
+    Second column: relationship type
+    Third to Nth column: target ids for link
+    """
+    file_ext = "sif"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """
+        Set the peek and blurb text
+        """
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'SIF data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        """
+        Returns false and the user must manually set.
+        """
+        return False
+
+    @staticmethod
+    def merge( split_files, output_file ):
+        data.Text.merge( split_files, output_file )
+
+    @dataproviders.decorators.dataprovider_factory( 'node-edge', dataproviders.column.ColumnarDataProvider.settings )
+    def node_edge_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return SIFGraphDataProvider( dataset_source, **settings )
+
+
+# ----------------------------------------------------------------------------- graph specific data providers
+class XGMMLGraphDataProvider( dataproviders.hierarchy.XMLDataProvider ):
+    """
+    Provide two lists: nodes, edges::
+
+        'nodes': contains objects of the form:
+            { 'id' : <some string id>, 'data': <any extra data> }
+        'edges': contains objects of the form:
+            { 'source' : <an index into nodes>, 'target': <an index into nodes>, 'data': <any extra data> }
+    """
+    def __iter__( self ):
+        # use simple graph to store nodes and links, later providing them as a dict
+        #   essentially this is a form of aggregation
+        graph = simplegraph.SimpleGraph()
+
+        parent_gen = super( XGMMLGraphDataProvider, self ).__iter__()
+        for graph_elem in parent_gen:
+            if 'children' not in graph_elem:
+                continue
+            for elem in graph_elem[ 'children' ]:
+                # use endswith to work around Elementtree namespaces
+                if elem[ 'tag' ].endswith( 'node' ):
+                    node_id = elem[ 'attrib' ][ 'id' ]
+                    # pass the entire, parsed xml element as the data
+                    graph.add_node( node_id, **elem )
+
+                elif elem[ 'tag' ].endswith( 'edge' ):
+                    source_id = elem[ 'attrib' ][ 'source' ]
+                    target_id = elem[ 'attrib' ][ 'target' ]
+                    graph.add_edge( source_id, target_id, **elem )
+
+        yield graph.as_dict()
+
+
+class SIFGraphDataProvider( dataproviders.column.ColumnarDataProvider ):
+    """
+    Provide two lists: nodes, edges::
+
+        'nodes': contains objects of the form:
+            { 'id' : <some string id>, 'data': <any extra data> }
+        'edges': contains objects of the form:
+            { 'source' : <an index into nodes>, 'target': <an index into nodes>, 'data': <any extra data> }
+    """
+    def __iter__( self ):
+        # use simple graph to store nodes and links, later providing them as a dict
+        #   essentially this is a form of aggregation
+        graph = simplegraph.SimpleGraph()
+        # SIF is tabular with the source, link-type, and all targets in the columns
+        parent_gen = super( SIFGraphDataProvider, self ).__iter__()
+        for columns in parent_gen:
+            if columns:
+                source_id = columns[0]
+                # there's no extra data for nodes (or links) in the examples I've seen
+                graph.add_node( source_id )
+
+                # targets are the (variadic) remaining columns
+                if len( columns ) >= 3:
+                    relation = columns[1]
+                    targets = columns[2:]
+                    for target_id in targets:
+                        graph.add_node( target_id )
+                        graph.add_edge( source_id, target_id, type=relation )
+
+        yield graph.as_dict()
diff --git a/lib/galaxy/datatypes/images.py b/lib/galaxy/datatypes/images.py
new file mode 100644
index 0000000..eca6905
--- /dev/null
+++ b/lib/galaxy/datatypes/images.py
@@ -0,0 +1,272 @@
+"""
+Image classes
+"""
+import logging
+import zipfile
+from six.moves.urllib.parse import quote_plus
+
+from galaxy.datatypes.binary import Binary
+from galaxy.datatypes.sniff import get_headers
+from galaxy.datatypes.text import Html as HtmlFromText
+from galaxy.util import nice_size
+from galaxy.util.image_util import check_image_type
+from . import data
+
+log = logging.getLogger(__name__)
+
+# TODO: Uploading image files of various types is supported in Galaxy, but on
+# the main public instance, the display_in_upload is not set for these data
+# types in datatypes_conf.xml because we do not allow image files to be uploaded
+# there.  There is currently no API feature that allows uploading files outside
+# of a data library ( where it requires either the upload_paths or upload_directory
+# option to be enabled, which is not the case on the main public instance ).  Because
+# of this, we're currently safe, but when the api is enhanced to allow other uploads,
+# we need to ensure that the implementation is such that image files cannot be uploaded
+# to our main public instance.
+
+
+class Image( data.Data ):
+    """Class describing an image"""
+    edam_data = 'data_2968'
+    edam_format = "format_3547"
+    file_ext = ''
+
+    def __init__(self, **kwd):
+        super(Image, self).__init__(**kwd)
+        self.image_formats = [self.file_ext.upper()]
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = 'Image in %s format' % dataset.extension
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        """Determine if the file is in this format"""
+        return check_image_type( filename, self.image_formats )
+
+
+class Jpg( Image ):
+    edam_format = "format_3579"
+    file_ext = "jpg"
+
+    def __init__(self, **kwd):
+        super(Jpg, self).__init__(**kwd)
+        self.image_formats = ['JPEG']
+
+
+class Png( Image ):
+    edam_format = "format_3603"
+    file_ext = "png"
+
+
+class Tiff( Image ):
+    edam_format = "format_3591"
+    file_ext = "tiff"
+
+
+class Bmp( Image ):
+    edam_format = "format_3592"
+    file_ext = "bmp"
+
+
+class Gif( Image ):
+    edam_format = "format_3467"
+    file_ext = "gif"
+
+
+class Im( Image ):
+    edam_format = "format_3593"
+    file_ext = "im"
+
+
+class Pcd( Image ):
+    edam_format = "format_3594"
+    file_ext = "pcd"
+
+
+class Pcx( Image ):
+    edam_format = "format_3595"
+    file_ext = "pcx"
+
+
+class Ppm( Image ):
+    edam_format = "format_3596"
+    file_ext = "ppm"
+
+
+class Psd( Image ):
+    edam_format = "format_3597"
+    file_ext = "psd"
+
+
+class Xbm( Image ):
+    edam_format = "format_3598"
+    file_ext = "xbm"
+
+
+class Xpm( Image ):
+    edam_format = "format_3599"
+    file_ext = "xpm"
+
+
+class Rgb( Image ):
+    edam_format = "format_3600"
+    file_ext = "rgb"
+
+
+class Pbm( Image ):
+    edam_format = "format_3601"
+    file_ext = "pbm"
+
+
+class Pgm( Image ):
+    edam_format = "format_3602"
+    file_ext = "pgm"
+
+
+class Eps( Image ):
+    edam_format = "format_3466"
+    file_ext = "eps"
+
+
+class Rast( Image ):
+    edam_format = "format_3605"
+    file_ext = "rast"
+
+
+class Pdf( Image ):
+    edam_format = "format_3508"
+    file_ext = "pdf"
+
+    def sniff(self, filename):
+        """Determine if the file is in pdf format."""
+        headers = get_headers(filename, None, 1)
+        try:
+            if headers[0][0].startswith("%PDF"):
+                return True
+            else:
+                return False
+        except IndexError:
+            return False
+
+
+Binary.register_sniffable_binary_format("pdf", "pdf", Pdf)
+
+
+def create_applet_tag_peek( class_name, archive, params ):
+    text = """
+<object classid="java:%s"
+      type="application/x-java-applet"
+      height="30" width="200" align="center" >
+      <param name="archive" value="%s"/>""" % ( class_name, archive )
+    for name, value in params.items():
+        text += """<param name="%s" value="%s"/>""" % ( name, value )
+    text += """
+<object classid="clsid:8AD9C840-044E-11D1-B3E9-00805F499D93"
+        height="30" width="200" >
+        <param name="code" value="%s" />
+        <param name="archive" value="%s"/>""" % ( class_name, archive )
+    for name, value in params.items():
+        text += """<param name="%s" value="%s"/>""" % ( name, value )
+    text += """<div class="errormessage">You must install and enable Java in your browser in order to access this applet.<div></object>
+</object>
+"""
+    return """<div><p align="center">%s</p></div>""" % text
+
+
+class Gmaj( data.Data ):
+    """Class describing a GMAJ Applet"""
+    edam_format = "format_3547"
+    file_ext = "gmaj.zip"
+    copy_safe_peek = False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            if hasattr( dataset, 'history_id' ):
+                params = {
+                    "bundle": "display?id=%s&tofile=yes&toext=.zip" % dataset.id,
+                    "buttonlabel": "Launch GMAJ",
+                    "nobutton": "false",
+                    "urlpause": "100",
+                    "debug": "false",
+                    "posturl": "history_add_to?%s" % "&".join( "%s=%s" % ( x[0], quote_plus( str( x[1] ) ) ) for x in [ ( 'copy_access_from', dataset.id), ( 'history_id', dataset.history_id ), ( 'ext', 'maf' ), ( 'name', 'GMAJ Output on data %s' % dataset.hid ), ( 'info', 'Added by GMAJ' ), ( 'dbkey', dataset.dbkey ) ] )
+                }
+                class_name = "edu.psu.bx.gmaj.MajApplet.class"
+                archive = "/static/gmaj/gmaj.jar"
+                dataset.peek = create_applet_tag_peek( class_name, archive, params )
+                dataset.blurb = 'GMAJ Multiple Alignment Viewer'
+            else:
+                dataset.peek = "After you add this item to your history, you will be able to launch the GMAJ applet."
+                dataset.blurb = 'GMAJ Multiple Alignment Viewer'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "peek unavailable"
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'application/zip'
+
+    def sniff(self, filename):
+        """
+        NOTE: the sniff.convert_newlines() call in the upload utility will keep Gmaj data types from being
+        correctly sniffed, but the files can be uploaded (they'll be sniffed as 'txt').  This sniff function
+        is here to provide an example of a sniffer for a zip file.
+        """
+        if not zipfile.is_zipfile( filename ):
+            return False
+        contains_gmaj_file = False
+        zip_file = zipfile.ZipFile(filename, "r")
+        for name in zip_file.namelist():
+            if name.split(".")[1].strip().lower() == 'gmaj':
+                contains_gmaj_file = True
+                break
+        zip_file.close()
+        if not contains_gmaj_file:
+            return False
+        return True
+
+
+class Html( HtmlFromText ):
+    """Deprecated class. This class should not be used anymore, but the galaxy.datatypes.text:Html one.
+    This is for backwards compatibilities only."""
+
+
+class Laj( data.Text ):
+    """Class describing a LAJ Applet"""
+    file_ext = "laj"
+    copy_safe_peek = False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            if hasattr( dataset, 'history_id' ):
+                params = {
+                    "alignfile1": "display?id=%s" % dataset.id,
+                    "buttonlabel": "Launch LAJ",
+                    "title": "LAJ in Galaxy",
+                    "posturl": quote_plus( "history_add_to?%s" % "&".join( "%s=%s" % ( key, value ) for key, value in { 'history_id': dataset.history_id, 'ext': 'lav', 'name': 'LAJ Output', 'info': 'Added by LAJ', 'dbkey': dataset.dbkey, 'copy_access_from': dataset.id }.items() ) ),
+                    "noseq": "true"
+                }
+                class_name = "edu.psu.cse.bio.laj.LajApplet.class"
+                archive = "/static/laj/laj.jar"
+                dataset.peek = create_applet_tag_peek( class_name, archive, params )
+            else:
+                dataset.peek = "After you add this item to your history, you will be able to launch the LAJ applet."
+                dataset.blurb = 'LAJ Multiple Alignment Viewer'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "peek unavailable"
diff --git a/lib/galaxy/datatypes/interval.py b/lib/galaxy/datatypes/interval.py
new file mode 100644
index 0000000..2305d3c
--- /dev/null
+++ b/lib/galaxy/datatypes/interval.py
@@ -0,0 +1,1572 @@
+"""
+Interval datatypes
+"""
+import logging
+import math
+import os
+import sys
+import tempfile
+import urllib
+
+import numpy
+from bx.intervals.io import GenomicIntervalReader, ParseError
+
+from galaxy import util
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.sniff import get_headers
+from galaxy.datatypes.tabular import Tabular
+from galaxy.datatypes.util.gff_util import parse_gff_attributes
+from galaxy.web import url_for
+
+import data
+import dataproviders
+
+log = logging.getLogger(__name__)
+
+# Contains the meta columns and the words that map to it; list aliases on the
+# right side of the : in decreasing order of priority
+alias_spec = {
+    'chromCol'  : [ 'chrom', 'CHROMOSOME', 'CHROM', 'Chromosome Name' ],
+    'startCol'  : [ 'start', 'START', 'chromStart', 'txStart', 'Start Position (bp)' ],
+    'endCol'    : [ 'end', 'END', 'STOP', 'chromEnd', 'txEnd', 'End Position (bp)' ],
+    'strandCol' : [ 'strand', 'STRAND', 'Strand' ],
+    'nameCol'   : [ 'name', 'NAME', 'Name', 'name2', 'NAME2', 'Name2', 'Ensembl Gene ID', 'Ensembl Transcript ID', 'Ensembl Peptide ID' ]
+}
+
+# a little faster lookup
+alias_helper = {}
+for key, value in alias_spec.items():
+    for elem in value:
+        alias_helper[elem] = key
+
+# Constants for configuring viewport generation: If a line is greater than
+# VIEWPORT_MAX_READS_PER_LINE * VIEWPORT_READLINE_BUFFER_SIZE bytes in size,
+# then we will not generate a viewport for that dataset
+VIEWPORT_READLINE_BUFFER_SIZE = 1048576  # 1MB
+VIEWPORT_MAX_READS_PER_LINE = 10
+
+
+ at dataproviders.decorators.has_dataproviders
+class Interval( Tabular ):
+    """Tab delimited data containing interval information"""
+    edam_data = "data_3002"
+    edam_format = "format_3475"
+    file_ext = "interval"
+    line_class = "region"
+    track_type = "FeatureTrack"
+    data_sources = { "data": "tabix", "index": "bigwig" }
+
+    """Add metadata elements"""
+    MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+    MetadataElement( name="startCol", default=2, desc="Start column", param=metadata.ColumnParameter )
+    MetadataElement( name="endCol", default=3, desc="End column", param=metadata.ColumnParameter )
+    MetadataElement( name="strandCol", desc="Strand column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
+    MetadataElement( name="nameCol", desc="Name/Identifier column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
+
+    def __init__(self, **kwd):
+        """Initialize interval datatype, by adding UCSC display apps"""
+        Tabular.__init__(self, **kwd)
+        self.add_display_app( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+
+    def init_meta( self, dataset, copy_from=None ):
+        Tabular.init_meta( self, dataset, copy_from=copy_from )
+
+    def set_meta( self, dataset, overwrite=True, first_line_is_header=False, **kwd ):
+        """Tries to guess from the line the location number of the column for the chromosome, region start-end and strand"""
+        Tabular.set_meta( self, dataset, overwrite=overwrite, skip=0 )
+        if dataset.has_data():
+            empty_line_count = 0
+            num_check_lines = 100  # only check up to this many non empty lines
+            for i, line in enumerate( open( dataset.file_name ) ):
+                line = line.rstrip( '\r\n' )
+                if line:
+                    if ( first_line_is_header or line[0] == '#' ):
+                        self.init_meta( dataset )
+                        line = line.strip( '#' )
+                        elems = line.split( '\t' )
+                        for meta_name, header_list in alias_spec.iteritems():
+                            for header_val in header_list:
+                                if header_val in elems:
+                                    # found highest priority header to meta_name
+                                    setattr( dataset.metadata, meta_name, elems.index( header_val ) + 1 )
+                                    break  # next meta_name
+                        break  # Our metadata is set, so break out of the outer loop
+                    else:
+                        # Header lines in Interval files are optional. For example, BED is Interval but has no header.
+                        # We'll make a best guess at the location of the metadata columns.
+                        metadata_is_set = False
+                        elems = line.split( '\t' )
+                        if len( elems ) > 2:
+                            for str in data.col1_startswith:
+                                if line.lower().startswith( str ):
+                                    if overwrite or not dataset.metadata.element_is_set( 'chromCol' ):
+                                        dataset.metadata.chromCol = 1
+                                    try:
+                                        int( elems[1] )
+                                        if overwrite or not dataset.metadata.element_is_set( 'startCol' ):
+                                            dataset.metadata.startCol = 2
+                                    except:
+                                        pass  # Metadata default will be used
+                                    try:
+                                        int( elems[2] )
+                                        if overwrite or not dataset.metadata.element_is_set( 'endCol' ):
+                                            dataset.metadata.endCol = 3
+                                    except:
+                                        pass  # Metadata default will be used
+                                    # we no longer want to guess that this column is the 'name', name must now be set manually for interval files
+                                    # we will still guess at the strand, as we can make a more educated guess
+                                    # if len( elems ) > 3:
+                                    #    try:
+                                    #        int( elems[3] )
+                                    #    except:
+                                    #        if overwrite or not dataset.metadata.element_is_set( 'nameCol' ):
+                                    #            dataset.metadata.nameCol = 4
+                                    if len( elems ) < 6 or elems[5] not in data.valid_strand:
+                                        if overwrite or not dataset.metadata.element_is_set(  'strandCol' ):
+                                            dataset.metadata.strandCol = 0
+                                    else:
+                                        if overwrite or not dataset.metadata.element_is_set( 'strandCol' ):
+                                            dataset.metadata.strandCol = 6
+                                    metadata_is_set = True
+                                    break
+                        if metadata_is_set or ( i - empty_line_count ) > num_check_lines:
+                            break  # Our metadata is set or we examined 100 non-empty lines, so break out of the outer loop
+                else:
+                    empty_line_count += 1
+
+    def displayable( self, dataset ):
+        try:
+            return dataset.has_data() \
+                and dataset.state == dataset.states.OK \
+                and dataset.metadata.columns > 0 \
+                and dataset.metadata.data_lines != 0 \
+                and dataset.metadata.chromCol \
+                and dataset.metadata.startCol \
+                and dataset.metadata.endCol
+        except:
+            return False
+
+    def get_estimated_display_viewport( self, dataset, chrom_col=None, start_col=None, end_col=None ):
+        """Return a chrom, start, stop tuple for viewing a file."""
+        viewport_feature_count = 100  # viewport should check at least 100 features; excludes comment lines
+        max_line_count = max( viewport_feature_count, 500 )  # maximum number of lines to check; includes comment lines
+        if not self.displayable( dataset ):
+            return ( None, None, None )
+        try:
+            # If column indexes were not passwed, determine from metadata
+            if chrom_col is None:
+                chrom_col = int( dataset.metadata.chromCol ) - 1
+            if start_col is None:
+                start_col = int( dataset.metadata.startCol ) - 1
+            if end_col is None:
+                end_col = int( dataset.metadata.endCol ) - 1
+            # Scan lines of file to find a reasonable chromosome and range
+            chrom = None
+            start = sys.maxsize
+            end = 0
+            max_col = max( chrom_col, start_col, end_col )
+            fh = open( dataset.file_name )
+            while True:
+                line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                # Stop if at end of file
+                if not line:
+                    break
+                # Skip comment lines
+                if not line.startswith( '#' ):
+                    try:
+                        fields = line.rstrip().split( '\t' )
+                        if len( fields ) > max_col:
+                            if chrom is None or chrom == fields[ chrom_col ]:
+                                start = min( start, int( fields[ start_col ] ) )
+                                end = max( end, int( fields[ end_col ] ) )
+                                # Set chrom last, in case start and end are not integers
+                                chrom = fields[ chrom_col ]
+                            viewport_feature_count -= 1
+                    except Exception:
+                        # Most likely a non-integer field has been encountered
+                        # for start / stop. Just ignore and make sure we finish
+                        # reading the line and decrementing the counters.
+                        pass
+                # Make sure we are at the next new line
+                readline_count = VIEWPORT_MAX_READS_PER_LINE
+                while line.rstrip( '\n\r' ) == line:
+                    assert readline_count > 0, Exception( 'Viewport readline count exceeded for dataset %s.' % dataset.id )
+                    line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                    if not line:
+                        break  # EOF
+                    readline_count -= 1
+                max_line_count -= 1
+                if not viewport_feature_count or not max_line_count:
+                    # exceeded viewport or total line count to check
+                    break
+            if chrom is not None:
+                return ( chrom, str( start ), str( end ) )  # Necessary to return strings?
+        except Exception:
+            # Unexpected error, possibly missing metadata
+            log.exception( "Exception caught attempting to generate viewport for dataset '%d'", dataset.id )
+        return ( None, None, None )
+
+    def as_ucsc_display_file( self, dataset, **kwd ):
+        """Returns file contents with only the bed data"""
+        fd, temp_name = tempfile.mkstemp()
+        c, s, e, t, n = dataset.metadata.chromCol, dataset.metadata.startCol, dataset.metadata.endCol, dataset.metadata.strandCol or 0, dataset.metadata.nameCol or 0
+        c, s, e, t, n = int(c) - 1, int(s) - 1, int(e) - 1, int(t) - 1, int(n) - 1
+        if t >= 0:  # strand column (should) exists
+            for i, elems in enumerate( util.file_iter(dataset.file_name) ):
+                strand = "+"
+                name = "region_%i" % i
+                if n >= 0 and n < len( elems ):
+                    name = elems[n]
+                if t < len(elems):
+                    strand = elems[t]
+                tmp = [ elems[c], elems[s], elems[e], name, '0', strand ]
+                os.write(fd, '%s\n' % '\t'.join(tmp) )
+        elif n >= 0:  # name column (should) exists
+            for i, elems in enumerate( util.file_iter(dataset.file_name) ):
+                name = "region_%i" % i
+                if n >= 0 and n < len( elems ):
+                    name = elems[n]
+                tmp = [ elems[c], elems[s], elems[e], name ]
+                os.write(fd, '%s\n' % '\t'.join(tmp) )
+        else:
+            for elems in util.file_iter(dataset.file_name):
+                tmp = [ elems[c], elems[s], elems[e] ]
+                os.write(fd, '%s\n' % '\t'.join(tmp) )
+        os.close(fd)
+        return open(temp_name)
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table( self, dataset, column_parameter_alias={'chromCol': 'Chrom', 'startCol': 'Start', 'endCol': 'End', 'strandCol': 'Strand', 'nameCol': 'Name'} )
+
+    def ucsc_links( self, dataset, type, app, base_url ):
+        """
+        Generate links to UCSC genome browser sites based on the dbkey
+        and content of dataset.
+        """
+        # Filter UCSC sites to only those that are supported by this build and
+        # enabled.
+        valid_sites = [ ( name, url )
+                        for name, url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey )
+                        if name in app.datatypes_registry.get_display_sites('ucsc') ]
+        if not valid_sites:
+            return []
+        # If there are any valid sites, we need to generate the estimated
+        # viewport
+        chrom, start, stop = self.get_estimated_display_viewport( dataset )
+        if chrom is None:
+            return []
+        # Accumulate links for valid sites
+        ret_val = []
+        for site_name, site_url in valid_sites:
+            internal_url = url_for( controller='dataset', dataset_id=dataset.id,
+                                    action='display_at', filename='ucsc_' + site_name )
+            display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
+                                             % (base_url, url_for( controller='root' ), dataset.id, type) )
+            redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s"
+                                              % (site_url, dataset.dbkey, chrom, start, stop ) )
+            link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+            ret_val.append( ( site_name, link ) )
+        return ret_val
+
+    def validate( self, dataset ):
+        """Validate an interval file using the bx GenomicIntervalReader"""
+        errors = list()
+        c, s, e, t = dataset.metadata.chromCol, dataset.metadata.startCol, dataset.metadata.endCol, dataset.metadata.strandCol
+        c, s, e, t = int(c) - 1, int(s) - 1, int(e) - 1, int(t) - 1
+        infile = open(dataset.file_name, "r")
+        reader = GenomicIntervalReader(
+            infile,
+            chrom_col=c,
+            start_col=s,
+            end_col=e,
+            strand_col=t)
+
+        while True:
+            try:
+                reader.next()
+            except ParseError as e:
+                errors.append(e)
+            except StopIteration:
+                infile.close()
+                return errors
+
+    def repair_methods( self, dataset ):
+        """Return options for removing errors along with a description"""
+        return [("lines", "Remove erroneous lines")]
+
+    def sniff( self, filename ):
+        """
+        Checks for 'intervalness'
+
+        This format is mostly used by galaxy itself.  Valid interval files should include
+        a valid header comment, but this seems to be loosely regulated.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'test_space.txt' )
+        >>> Interval().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'interval.interval' )
+        >>> Interval().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, '\t' )
+        try:
+            """
+            If we got here, we already know the file is_column_based and is not bed,
+            so we'll just look for some valid data.
+            """
+            for hdr in headers:
+                if hdr and not hdr[0].startswith( '#' ):
+                    if len(hdr) < 3:
+                        return False
+                    try:
+                        # Assume chrom start and end are in column positions 1 and 2
+                        # respectively ( for 0 based columns )
+                        int( hdr[1] )
+                        int( hdr[2] )
+                    except:
+                        return False
+            return True
+        except:
+            return False
+
+    def get_track_window(self, dataset, data, start, end):
+        """
+        Assumes the incoming track data is sorted already.
+        """
+        window = list()
+        for record in data:
+            fields = record.rstrip("\n\r").split("\t")
+            record_chrom = fields[dataset.metadata.chromCol - 1]
+            record_start = int(fields[dataset.metadata.startCol - 1])
+            record_end = int(fields[dataset.metadata.endCol - 1])
+            if record_start < end and record_end > start:
+                window.append( (record_chrom, record_start, record_end) )  # Yes I did want to use a generator here, but it doesn't work downstream
+        return window
+
+    def get_track_resolution( self, dataset, start, end):
+        return None
+
+    # ------------- Dataproviders
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dataprovider( self, dataset, **settings ):
+        return dataproviders.dataset.GenomicRegionDataProvider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region-dict',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'named_columns' ] = True
+        return self.genomic_region_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'interval',
+                                                    dataproviders.dataset.IntervalDataProvider.settings )
+    def interval_dataprovider( self, dataset, **settings ):
+        return dataproviders.dataset.IntervalDataProvider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'interval-dict',
+                                                    dataproviders.dataset.IntervalDataProvider.settings )
+    def interval_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'named_columns' ] = True
+        return self.interval_dataprovider( dataset, **settings )
+
+
+class BedGraph( Interval ):
+    """Tab delimited chrom/start/end/datavalue dataset"""
+    edam_format = "format_3583"
+    file_ext = "bedgraph"
+    track_type = "LineTrack"
+    data_sources = { "data": "bigwig", "index": "bigwig" }
+
+    def as_ucsc_display_file( self, dataset, **kwd ):
+        """
+            Returns file contents as is with no modifications.
+            TODO: this is a functional stub and will need to be enhanced moving forward to provide additional support for bedgraph.
+        """
+        return open( dataset.file_name )
+
+    def get_estimated_display_viewport( self, dataset, chrom_col=0, start_col=1, end_col=2 ):
+        """
+            Set viewport based on dataset's first 100 lines.
+        """
+        return Interval.get_estimated_display_viewport( self, dataset, chrom_col=chrom_col, start_col=start_col, end_col=end_col )
+
+
+class Bed( Interval ):
+    """Tab delimited data in BED format"""
+    edam_format = "format_3003"
+    file_ext = "bed"
+    data_sources = { "data": "tabix", "index": "bigwig", "feature_search": "fli" }
+    track_type = Interval.track_type
+
+    column_names = [ 'Chrom', 'Start', 'End', 'Name', 'Score', 'Strand', 'ThickStart', 'ThickEnd', 'ItemRGB', 'BlockCount', 'BlockSizes', 'BlockStarts' ]
+
+    """Add metadata elements"""
+    MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+    MetadataElement( name="startCol", default=2, desc="Start column", param=metadata.ColumnParameter )
+    MetadataElement( name="endCol", default=3, desc="End column", param=metadata.ColumnParameter )
+    MetadataElement( name="strandCol", desc="Strand column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
+    MetadataElement( name="viz_filter_cols", desc="Score column for visualization", default=[4], param=metadata.ColumnParameter, optional=True, multiple=True )
+    # do we need to repeat these? they are the same as should be inherited from interval type
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        """Sets the metadata information for datasets previously determined to be in bed format."""
+        i = 0
+        if dataset.has_data():
+            for i, line in enumerate( open(dataset.file_name) ):
+                metadata_set = False
+                line = line.rstrip('\r\n')
+                if line and not line.startswith('#'):
+                    elems = line.split('\t')
+                    if len(elems) > 2:
+                        for startswith in data.col1_startswith:
+                            if line.lower().startswith( startswith ):
+                                if len( elems ) > 3:
+                                    if overwrite or not dataset.metadata.element_is_set( 'nameCol' ):
+                                        dataset.metadata.nameCol = 4
+                                if len(elems) < 6:
+                                    if overwrite or not dataset.metadata.element_is_set( 'strandCol' ):
+                                        dataset.metadata.strandCol = 0
+                                else:
+                                    if overwrite or not dataset.metadata.element_is_set( 'strandCol' ):
+                                        dataset.metadata.strandCol = 6
+                                metadata_set = True
+                                break
+                if metadata_set:
+                    break
+            Tabular.set_meta( self, dataset, overwrite=overwrite, skip=i )
+
+    def as_ucsc_display_file( self, dataset, **kwd ):
+        """Returns file contents with only the bed data. If bed 6+, treat as interval."""
+        for line in open(dataset.file_name):
+            line = line.strip()
+            if line == "" or line.startswith("#"):
+                continue
+            fields = line.split('\t')
+            """check to see if this file doesn't conform to strict genome browser accepted bed"""
+            try:
+                if len(fields) > 12:
+                    return Interval.as_ucsc_display_file(self, dataset)  # too many fields
+                if len(fields) > 6:
+                    int(fields[6])
+                    if len(fields) > 7:
+                        int(fields[7])
+                        if len(fields) > 8:
+                            if int(fields[8]) != 0:
+                                return Interval.as_ucsc_display_file(self, dataset)
+                            if len(fields) > 9:
+                                int(fields[9])
+                                if len(fields) > 10:
+                                    fields2 = fields[10].rstrip(",").split(",")  # remove trailing comma and split on comma
+                                    for field in fields2:
+                                        int(field)
+                                    if len(fields) > 11:
+                                        fields2 = fields[11].rstrip(",").split(",")  # remove trailing comma and split on comma
+                                        for field in fields2:
+                                            int(field)
+            except:
+                return Interval.as_ucsc_display_file(self, dataset)
+            # only check first line for proper form
+            break
+
+        try:
+            return open(dataset.file_name)
+        except:
+            return "This item contains no content"
+
+    def sniff( self, filename ):
+        """
+        Checks for 'bedness'
+
+        BED lines have three required fields and nine additional optional fields.
+        The number of fields per line must be consistent throughout any single set of data in
+        an annotation track.  The order of the optional fields is binding: lower-numbered
+        fields must always be populated if higher-numbered fields are used.  The data type of
+        all 12 columns is:
+        1-str, 2-int, 3-int, 4-str, 5-int, 6-str, 7-int, 8-int, 9-int or list, 10-int, 11-list, 12-list
+
+        For complete details see http://genome.ucsc.edu/FAQ/FAQformat#format1
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'test_tab.bed' )
+        >>> Bed().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'interval1.bed' )
+        >>> Bed().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'complete.bed' )
+        >>> Bed().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, '\t' )
+        try:
+            if not headers:
+                return False
+            for hdr in headers:
+                if (hdr[0] == '' or hdr[0].startswith( '#' )):
+                    continue
+                valid_col1 = False
+                if len(hdr) < 3 or len(hdr) > 12:
+                    return False
+                for str in data.col1_startswith:
+                    if hdr[0].lower().startswith(str):
+                        valid_col1 = True
+                        break
+                if valid_col1:
+                    try:
+                        int( hdr[1] )
+                        int( hdr[2] )
+                    except:
+                        return False
+                    if len( hdr ) > 4:
+                        # hdr[3] is a string, 'name', which defines the name of the BED line - difficult to test for this.
+                        # hdr[4] is an int, 'score', a score between 0 and 1000.
+                        try:
+                            if int( hdr[4] ) < 0 or int( hdr[4] ) > 1000:
+                                return False
+                        except:
+                            return False
+                    if len( hdr ) > 5:
+                        # hdr[5] is strand
+                        if hdr[5] not in data.valid_strand:
+                            return False
+                    if len( hdr ) > 6:
+                        # hdr[6] is thickStart, the starting position at which the feature is drawn thickly.
+                        try:
+                            int( hdr[6] )
+                        except:
+                            return False
+                    if len( hdr ) > 7:
+                        # hdr[7] is thickEnd, the ending position at which the feature is drawn thickly
+                        try:
+                            int( hdr[7] )
+                        except:
+                            return False
+                    if len( hdr ) > 8:
+                        # hdr[8] is itemRgb, an RGB value of the form R,G,B (e.g. 255,0,0).  However, this could also be an int (e.g., 0)
+                        try:
+                            int( hdr[8] )
+                        except:
+                            try:
+                                hdr[8].split(',')
+                            except:
+                                return False
+                    if len( hdr ) > 9:
+                        # hdr[9] is blockCount, the number of blocks (exons) in the BED line.
+                        try:
+                            block_count = int( hdr[9] )
+                        except:
+                            return False
+                    if len( hdr ) > 10:
+                        # hdr[10] is blockSizes - A comma-separated list of the block sizes.
+                        # Sometimes the blosck_sizes and block_starts lists end in extra commas
+                        try:
+                            block_sizes = hdr[10].rstrip(',').split(',')
+                        except:
+                            return False
+                    if len( hdr ) > 11:
+                        # hdr[11] is blockStarts - A comma-separated list of block starts.
+                        try:
+                            block_starts = hdr[11].rstrip(',').split(',')
+                        except:
+                            return False
+                        if len(block_sizes) != block_count or len(block_starts) != block_count:
+                            return False
+                else:
+                    return False
+            return True
+        except:
+            return False
+
+
+class BedStrict( Bed ):
+    """Tab delimited data in strict BED format - no non-standard columns allowed"""
+    edam_format = "format_3584"
+    file_ext = "bedstrict"
+
+    # no user change of datatype allowed
+    allow_datatype_change = False
+
+    # Read only metadata elements
+    MetadataElement( name="chromCol", default=1, desc="Chrom column", readonly=True, param=metadata.MetadataParameter )
+    MetadataElement( name="startCol", default=2, desc="Start column", readonly=True, param=metadata.MetadataParameter )  # TODO: start and end should be able to be set to these or the proper thick[start/end]?
+    MetadataElement( name="endCol", default=3, desc="End column", readonly=True, param=metadata.MetadataParameter )
+    MetadataElement( name="strandCol", desc="Strand column (click box & select)", readonly=True, param=metadata.MetadataParameter, no_value=0, optional=True )
+    MetadataElement( name="nameCol", desc="Name/Identifier column (click box & select)", readonly=True, param=metadata.MetadataParameter, no_value=0, optional=True )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
+
+    def __init__( self, **kwd ):
+        Tabular.__init__( self, **kwd )
+        self.clear_display_apps()  # only new style display applications for this datatype
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        Tabular.set_meta( self, dataset, overwrite=overwrite, **kwd)  # need column count first
+        if dataset.metadata.columns >= 4:
+            dataset.metadata.nameCol = 4
+            if dataset.metadata.columns >= 6:
+                dataset.metadata.strandCol = 6
+
+    def sniff( self, filename ):
+        return False  # NOTE: This would require aggressively validating the entire file
+
+
+class Bed6( BedStrict ):
+    """Tab delimited data in strict BED format - no non-standard columns allowed; column count forced to 6"""
+    edam_format = "format_3585"
+    file_ext = "bed6"
+
+
+class Bed12( BedStrict ):
+    """Tab delimited data in strict BED format - no non-standard columns allowed; column count forced to 12"""
+    edam_format = "format_3586"
+    file_ext = "bed12"
+
+
+class _RemoteCallMixin:
+    def _get_remote_call_url( self, redirect_url, site_name, dataset, type, app, base_url ):
+        """Retrieve the URL to call out to an external site and retrieve data.
+        This routes our external URL through a local galaxy instance which makes
+        the data available, followed by redirecting to the remote site with a
+        link back to the available information.
+        """
+        internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='%s_%s' % ( type, site_name ) )
+        base_url = app.config.get( "display_at_callback", base_url )
+        display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" %
+                                         ( base_url, url_for( controller='root' ), dataset.id, type ) )
+        link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+        return link
+
+
+ at dataproviders.decorators.has_dataproviders
+class Gff( Tabular, _RemoteCallMixin ):
+    """Tab delimited data in Gff format"""
+    edam_data = "data_1255"
+    edam_format = "format_2305"
+    file_ext = "gff"
+    column_names = [ 'Seqname', 'Source', 'Feature', 'Start', 'End', 'Score', 'Strand', 'Frame', 'Group' ]
+    data_sources = { "data": "interval_index", "index": "bigwig", "feature_search": "fli" }
+    track_type = Interval.track_type
+
+    """Add metadata elements"""
+    MetadataElement( name="columns", default=9, desc="Number of columns", readonly=True, visible=False )
+    MetadataElement( name="column_types", default=['str', 'str', 'str', 'int', 'int', 'int', 'str', 'str', 'str'],
+                     param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
+
+    MetadataElement( name="attributes", default=0, desc="Number of attributes", readonly=True, visible=False, no_value=0 )
+    MetadataElement( name="attribute_types", default={}, desc="Attribute types", param=metadata.DictParameter, readonly=True, visible=False, no_value=[] )
+
+    def __init__( self, **kwd ):
+        """Initialize datatype, by adding GBrowse display app"""
+        Tabular.__init__(self, **kwd)
+        self.add_display_app( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+        self.add_display_app( 'gbrowse', 'display in Gbrowse', 'as_gbrowse_display_file', 'gbrowse_links' )
+
+    def set_attribute_metadata( self, dataset ):
+        """
+        Sets metadata elements for dataset's attributes.
+        """
+
+        # Use first N lines to set metadata for dataset attributes. Attributes
+        # not found in the first N lines will not have metadata.
+        num_lines = 200
+        attribute_types = {}
+        for i, line in enumerate( open( dataset.file_name ) ):
+            if line and not line.startswith( '#' ):
+                elems = line.split( '\t' )
+                if len( elems ) == 9:
+                    try:
+                        # Loop through attributes to set types.
+                        for name, value in parse_gff_attributes( elems[8] ).items():
+                            # Default type is string.
+                            value_type = "str"
+                            try:
+                                # Try int.
+                                int( value )
+                                value_type = "int"
+                            except:
+                                try:
+                                    # Try float.
+                                    float( value )
+                                    value_type = "float"
+                                except:
+                                    pass
+                            attribute_types[ name ] = value_type
+                    except:
+                        pass
+                if i + 1 == num_lines:
+                    break
+
+        # Set attribute metadata and then set additional metadata.
+        dataset.metadata.attribute_types = attribute_types
+        dataset.metadata.attributes = len( attribute_types )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        self.set_attribute_metadata( dataset )
+
+        i = 0
+        for i, line in enumerate( open( dataset.file_name ) ):
+            line = line.rstrip('\r\n')
+            if line and not line.startswith( '#' ):
+                elems = line.split( '\t' )
+                if len(elems) == 9:
+                    try:
+                        int( elems[3] )
+                        int( elems[4] )
+                        break
+                    except:
+                        pass
+        Tabular.set_meta( self, dataset, overwrite=overwrite, skip=i )
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table( self, dataset, column_names=self.column_names )
+
+    def get_estimated_display_viewport( self, dataset ):
+        """
+        Return a chrom, start, stop tuple for viewing a file.  There are slight differences between gff 2 and gff 3
+        formats.  This function should correctly handle both...
+        """
+        viewport_feature_count = 100  # viewport should check at least 100 features; excludes comment lines
+        max_line_count = max( viewport_feature_count, 500 )  # maximum number of lines to check; includes comment lines
+        if self.displayable( dataset ):
+            try:
+                seqid = None
+                start = sys.maxsize
+                stop = 0
+                fh = open( dataset.file_name )
+                while True:
+                    line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                    if not line:
+                        break  # EOF
+                    try:
+                        if line.startswith( '##sequence-region' ):  # ##sequence-region IV 6000000 6030000
+                            elems = line.rstrip( '\n\r' ).split()
+                            if len( elems ) > 3:
+                                # line looks like:
+                                # sequence-region   ctg123 1 1497228
+                                seqid = elems[1]  # IV
+                                start = int( elems[2] )  # 6000000
+                                stop = int( elems[3] )  # 6030000
+                                break  # use location declared in file
+                            elif len( elems ) == 2 and elems[1].find( '..' ) > 0:
+                                # line looks like this:
+                                # sequence-region X:120000..140000
+                                elems = elems[1].split( ':' )
+                                seqid = elems[0]
+                                start = int( elems[1].split( '..' )[0] )
+                                stop = int( elems[1].split( '..' )[1] )
+                                break  # use location declared in file
+                            else:
+                                log.exception( "line (%s) uses an unsupported ##sequence-region definition." % str( line ) )
+                                # break #no break, if bad definition, we try another method
+                        elif line.startswith("browser position"):
+                            # Allow UCSC style browser and track info in the GFF file
+                            pos_info = line.split()[-1]
+                            seqid, startend = pos_info.split(":")
+                            start, stop = map( int, startend.split("-") )
+                            break  # use location declared in file
+                        elif True not in map( line.startswith, ( '#', 'track', 'browser' ) ):  # line.startswith() does not accept iterator in python2.4
+                            viewport_feature_count -= 1
+                            elems = line.rstrip( '\n\r' ).split( '\t' )
+                            if len( elems ) > 3:
+                                if not seqid:
+                                    # We can only set the viewport for a single chromosome
+                                    seqid = elems[0]
+                                if seqid == elems[0]:
+                                    # Make sure we have not spanned chromosomes
+                                    start = min( start, int( elems[3] ) )
+                                    stop = max( stop, int( elems[4] ) )
+                    except:
+                        # most likely start/stop is not an int or not enough fields
+                        pass
+                    # make sure we are at the next new line
+                    readline_count = VIEWPORT_MAX_READS_PER_LINE
+                    while line.rstrip( '\n\r' ) == line:
+                        assert readline_count > 0, Exception( 'Viewport readline count exceeded for dataset %s.' % dataset.id )
+                        line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                        if not line:
+                            break  # EOF
+                        readline_count -= 1
+                    max_line_count -= 1
+                    if not viewport_feature_count or not max_line_count:
+                        # exceeded viewport or total line count to check
+                        break
+                if seqid is not None:
+                    return ( seqid, str( start ), str( stop ) )  # Necessary to return strings?
+            except Exception as e:
+                # unexpected error
+                log.exception( str( e ) )
+        return ( None, None, None )  # could not determine viewport
+
+    def ucsc_links( self, dataset, type, app, base_url ):
+        ret_val = []
+        seqid, start, stop = self.get_estimated_display_viewport( dataset )
+        if seqid is not None:
+            for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+                if site_name in app.datatypes_registry.get_display_sites('ucsc'):
+                    redirect_url = urllib.quote_plus(
+                        "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" %
+                        ( site_url, dataset.dbkey, seqid, start, stop ) )
+                    link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
+                    ret_val.append( ( site_name, link ) )
+        return ret_val
+
+    def gbrowse_links( self, dataset, type, app, base_url ):
+        ret_val = []
+        seqid, start, stop = self.get_estimated_display_viewport( dataset )
+        if seqid is not None:
+            for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+                if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
+                    if seqid.startswith( 'chr' ) and len( seqid ) > 3:
+                        seqid = seqid[3:]
+                    redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, seqid, start, stop ) )
+                    link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
+                    ret_val.append( ( site_name, link ) )
+        return ret_val
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in gff format
+
+        GFF lines have nine required fields that must be tab-separated.
+
+        For complete details see http://genome.ucsc.edu/FAQ/FAQformat#format3
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'gff_version_3.gff' )
+        >>> Gff().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'test.gff' )
+        >>> Gff().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, '\t' )
+        try:
+            if len(headers) < 2:
+                return False
+            for hdr in headers:
+                if hdr and hdr[0].startswith( '##gff-version' ) and hdr[0].find( '2' ) < 0:
+                    return False
+                if hdr and hdr[0] and not hdr[0].startswith( '#' ):
+                    if len(hdr) != 9:
+                        return False
+                    try:
+                        int( hdr[3] )
+                        int( hdr[4] )
+                    except:
+                        return False
+                    if hdr[5] != '.':
+                        try:
+                            float( hdr[5] )
+                        except:
+                            return False
+                    if hdr[6] not in data.valid_strand:
+                        return False
+            return True
+        except:
+            return False
+
+    # ------------- Dataproviders
+    # redefine bc super is Tabular
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dataprovider( self, dataset, **settings ):
+        return dataproviders.dataset.GenomicRegionDataProvider( dataset, 0, 3, 4, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region-dict',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'named_columns' ] = True
+        return self.genomic_region_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'interval',
+                                                    dataproviders.dataset.IntervalDataProvider.settings )
+    def interval_dataprovider( self, dataset, **settings ):
+        return dataproviders.dataset.IntervalDataProvider( dataset, 0, 3, 4, 6, 2, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'interval-dict',
+                                                    dataproviders.dataset.IntervalDataProvider.settings )
+    def interval_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'named_columns' ] = True
+        return self.interval_dataprovider( dataset, **settings )
+
+
+class Gff3( Gff ):
+    """Tab delimited data in Gff3 format"""
+    edam_format = "format_1975"
+    file_ext = "gff3"
+    valid_gff3_strand = ['+', '-', '.', '?']
+    valid_gff3_phase = ['.', '0', '1', '2']
+    column_names = [ 'Seqid', 'Source', 'Type', 'Start', 'End', 'Score', 'Strand', 'Phase', 'Attributes' ]
+    track_type = Interval.track_type
+
+    """Add metadata elements"""
+    MetadataElement( name="column_types", default=['str', 'str', 'str', 'int', 'int', 'float', 'str', 'int', 'list'],
+                     param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
+
+    def __init__(self, **kwd):
+        """Initialize datatype, by adding GBrowse display app"""
+        Gff.__init__(self, **kwd)
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        self.set_attribute_metadata( dataset )
+
+        i = 0
+        for i, line in enumerate( open( dataset.file_name ) ):
+            line = line.rstrip('\r\n')
+            if line and not line.startswith( '#' ):
+                elems = line.split( '\t' )
+                valid_start = False
+                valid_end = False
+                if len( elems ) == 9:
+                    try:
+                        start = int( elems[3] )
+                        valid_start = True
+                    except:
+                        if elems[3] == '.':
+                            valid_start = True
+                    try:
+                        end = int( elems[4] )
+                        valid_end = True
+                    except:
+                        if elems[4] == '.':
+                            valid_end = True
+                    strand = elems[6]
+                    phase = elems[7]
+                    if valid_start and valid_end and start < end and strand in self.valid_gff3_strand and phase in self.valid_gff3_phase:
+                        break
+        Tabular.set_meta( self, dataset, overwrite=overwrite, skip=i )
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in gff version 3 format
+
+        GFF 3 format:
+
+        1) adds a mechanism for representing more than one level
+           of hierarchical grouping of features and subfeatures.
+        2) separates the ideas of group membership and feature name/id
+        3) constrains the feature type field to be taken from a controlled
+           vocabulary.
+        4) allows a single feature, such as an exon, to belong to more than
+           one group at a time.
+        5) provides an explicit convention for pairwise alignments
+        6) provides an explicit convention for features that occupy disjunct regions
+
+        The format consists of 9 columns, separated by tabs (NOT spaces).
+
+        Undefined fields are replaced with the "." character, as described in the original GFF spec.
+
+        For complete details see http://song.sourceforge.net/gff3.shtml
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'test.gff' )
+        >>> Gff3().sniff( fname )
+        False
+        >>> fname = get_test_fname('gff_version_3.gff')
+        >>> Gff3().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, '\t' )
+        try:
+            if len(headers) < 2:
+                return False
+            for hdr in headers:
+                if hdr and hdr[0].startswith( '##gff-version' ) and hdr[0].find( '3' ) >= 0:
+                    return True
+                elif hdr and hdr[0].startswith( '##gff-version' ) and hdr[0].find( '3' ) < 0:
+                    return False
+                # Header comments may have been stripped, so inspect the data
+                if hdr and hdr[0] and not hdr[0].startswith( '#' ):
+                    if len(hdr) != 9:
+                        return False
+                    try:
+                        int( hdr[3] )
+                    except:
+                        if hdr[3] != '.':
+                            return False
+                    try:
+                        int( hdr[4] )
+                    except:
+                        if hdr[4] != '.':
+                            return False
+                    if hdr[5] != '.':
+                        try:
+                            float( hdr[5] )
+                        except:
+                            return False
+                    if hdr[6] not in self.valid_gff3_strand:
+                        return False
+                    if hdr[7] not in self.valid_gff3_phase:
+                        return False
+            return True
+        except:
+            return False
+
+
+class Gtf( Gff ):
+    """Tab delimited data in Gtf format"""
+    edam_format = "format_2306"
+    file_ext = "gtf"
+    column_names = [ 'Seqname', 'Source', 'Feature', 'Start', 'End', 'Score', 'Strand', 'Frame', 'Attributes' ]
+    track_type = Interval.track_type
+
+    """Add metadata elements"""
+    MetadataElement( name="columns", default=9, desc="Number of columns", readonly=True, visible=False )
+    MetadataElement( name="column_types", default=['str', 'str', 'str', 'int', 'int', 'float', 'str', 'int', 'list'],
+                     param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in gtf format
+
+        GTF lines have nine required fields that must be tab-separated. The first eight GTF fields are the same as GFF.
+        The group field has been expanded into a list of attributes. Each attribute consists of a type/value pair.
+        Attributes must end in a semi-colon, and be separated from any following attribute by exactly one space.
+        The attribute list must begin with the two mandatory attributes:
+
+            gene_id value - A globally unique identifier for the genomic source of the sequence.
+            transcript_id value - A globally unique identifier for the predicted transcript.
+
+        For complete details see http://genome.ucsc.edu/FAQ/FAQformat#format4
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( '1.bed' )
+        >>> Gtf().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'test.gff' )
+        >>> Gtf().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'test.gtf' )
+        >>> Gtf().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, '\t' )
+        try:
+            if len(headers) < 2:
+                return False
+            for hdr in headers:
+                if hdr and hdr[0].startswith( '##gff-version' ) and hdr[0].find( '2' ) < 0:
+                    return False
+                if hdr and hdr[0] and not hdr[0].startswith( '#' ):
+                    if len(hdr) != 9:
+                        return False
+                    try:
+                        int( hdr[3] )
+                        int( hdr[4] )
+                    except:
+                        return False
+                    if hdr[5] != '.':
+                        try:
+                            float( hdr[5] )
+                        except:
+                            return False
+                    if hdr[6] not in data.valid_strand:
+                        return False
+
+                    # Check attributes for gene_id, transcript_id
+                    attributes = parse_gff_attributes( hdr[8] )
+                    if len( attributes ) >= 2:
+                        if 'gene_id' not in attributes:
+                            return False
+                        if 'transcript_id' not in attributes:
+                            return False
+                    else:
+                        return False
+            return True
+        except:
+            return False
+
+
+ at dataproviders.decorators.has_dataproviders
+class Wiggle( Tabular, _RemoteCallMixin ):
+    """Tab delimited data in wiggle format"""
+    edam_format = "format_3005"
+    file_ext = "wig"
+    track_type = "LineTrack"
+    data_sources = { "data": "bigwig", "index": "bigwig" }
+
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
+
+    def __init__( self, **kwd ):
+        Tabular.__init__( self, **kwd )
+        self.add_display_app( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+        self.add_display_app( 'gbrowse', 'display in Gbrowse', 'as_gbrowse_display_file', 'gbrowse_links' )
+
+    def get_estimated_display_viewport( self, dataset ):
+        """Return a chrom, start, stop tuple for viewing a file."""
+        viewport_feature_count = 100  # viewport should check at least 100 features; excludes comment lines
+        max_line_count = max( viewport_feature_count, 500 )  # maximum number of lines to check; includes comment lines
+        if self.displayable( dataset ):
+            try:
+                chrom = None
+                start = sys.maxsize
+                end = 0
+                span = 1
+                step = None
+                fh = open( dataset.file_name )
+                while True:
+                    line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                    if not line:
+                        break  # EOF
+                    try:
+                        if line.startswith( "browser" ):
+                            chr_info = line.rstrip( '\n\r' ).split()[-1]
+                            chrom, coords = chr_info.split( ":" )
+                            start, end = map( int, coords.split( "-" ) )
+                            break  # use the browser line
+                        # variableStep chrom=chr20
+                        if line and ( line.lower().startswith( "variablestep" ) or line.lower().startswith( "fixedstep" ) ):
+                            if chrom is not None:
+                                break  # different chrom or different section of the chrom
+                            chrom = line.rstrip( '\n\r' ).split("chrom=")[1].split()[0]
+                            if 'span=' in line:
+                                span = int( line.rstrip( '\n\r' ).split("span=")[1].split()[0] )
+                            if 'step=' in line:
+                                step = int( line.rstrip( '\n\r' ).split("step=")[1].split()[0] )
+                                start = int( line.rstrip( '\n\r' ).split("start=")[1].split()[0] )
+                        else:
+                            fields = line.rstrip( '\n\r' ).split()
+                            if fields:
+                                if step is not None:
+                                    if not end:
+                                        end = start + span
+                                    else:
+                                        end += step
+                                else:
+                                    start = min( int( fields[0] ), start )
+                                    end = max( end, int( fields[0] ) + span )
+                                viewport_feature_count -= 1
+                    except:
+                        pass
+                    # make sure we are at the next new line
+                    readline_count = VIEWPORT_MAX_READS_PER_LINE
+                    while line.rstrip( '\n\r' ) == line:
+                        assert readline_count > 0, Exception( 'Viewport readline count exceeded for dataset %s.' % dataset.id )
+                        line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                        if not line:
+                            break  # EOF
+                        readline_count -= 1
+                    max_line_count -= 1
+                    if not viewport_feature_count or not max_line_count:
+                        # exceeded viewport or total line count to check
+                        break
+                if chrom is not None:
+                    return ( chrom, str( start ), str( end ) )  # Necessary to return strings?
+            except Exception as e:
+                # unexpected error
+                log.exception( str( e ) )
+        return ( None, None, None )  # could not determine viewport
+
+    def gbrowse_links( self, dataset, type, app, base_url ):
+        ret_val = []
+        chrom, start, stop = self.get_estimated_display_viewport( dataset )
+        if chrom is not None:
+            for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+                if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
+                    if chrom.startswith( 'chr' ) and len( chrom ) > 3:
+                        chrom = chrom[3:]
+                    redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, chrom, start, stop ) )
+                    link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
+                    ret_val.append( ( site_name, link ) )
+        return ret_val
+
+    def ucsc_links( self, dataset, type, app, base_url ):
+        ret_val = []
+        chrom, start, stop = self.get_estimated_display_viewport( dataset )
+        if chrom is not None:
+            for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+                if site_name in app.datatypes_registry.get_display_sites('ucsc'):
+                    redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % ( site_url, dataset.dbkey, chrom, start, stop ) )
+                    link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
+                    ret_val.append( ( site_name, link ) )
+        return ret_val
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table( self, dataset, skipchars=['track', '#'] )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        max_data_lines = None
+        i = 0
+        for i, line in enumerate( open( dataset.file_name ) ):
+            line = line.rstrip('\r\n')
+            if line and not line.startswith( '#' ):
+                elems = line.split( '\t' )
+                try:
+                    float( elems[0] )  # "Wiggle track data values can be integer or real, positive or negative values"
+                    break
+                except:
+                    do_break = False
+                    for col_startswith in data.col1_startswith:
+                        if elems[0].lower().startswith( col_startswith ):
+                            do_break = True
+                            break
+                    if do_break:
+                        break
+        if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+            # we'll arbitrarily only use the first 100 data lines in this wig file to calculate tabular attributes (column types)
+            # this should be sufficient, except when we have mixed wig track types (bed, variable, fixed),
+            #    but those cases are not a single table that would have consistant column definitions
+            # optional metadata values set in Tabular class will be 'None'
+            max_data_lines = 100
+        Tabular.set_meta( self, dataset, overwrite=overwrite, skip=i, max_data_lines=max_data_lines )
+
+    def sniff( self, filename ):
+        """
+        Determines wether the file is in wiggle format
+
+        The .wig format is line-oriented. Wiggle data is preceeded by a track definition line,
+        which adds a number of options for controlling the default display of this track.
+        Following the track definition line is the track data, which can be entered in several
+        different formats.
+
+        The track definition line begins with the word 'track' followed by the track type.
+        The track type with version is REQUIRED, and it currently must be wiggle_0.  For example,
+        track type=wiggle_0...
+
+        For complete details see http://genome.ucsc.edu/goldenPath/help/wiggle.html
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'interval1.bed' )
+        >>> Wiggle().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'wiggle.wig' )
+        >>> Wiggle().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, None )
+        try:
+            for hdr in headers:
+                if len(hdr) > 1 and hdr[0] == 'track' and hdr[1].startswith('type=wiggle'):
+                    return True
+            return False
+        except:
+            return False
+
+    def get_track_window(self, dataset, data, start, end):
+        """
+        Assumes we have a numpy file.
+        """
+        range = end - start
+        # Determine appropriate resolution to plot ~1000 points
+        resolution = ( 10 ** math.ceil( math.log10( range / 1000 ) ) )
+        # Restrict to valid range
+        resolution = min( resolution, 100000 )
+        resolution = max( resolution, 1 )
+        # Memory map the array (don't load all the data)
+        data = numpy.load( data )
+        # Grab just what we need
+        t_start = math.floor( start / resolution )
+        t_end = math.ceil( end / resolution )
+        x = numpy.arange( t_start, t_end ) * resolution
+        y = data[ t_start : t_end ]
+
+        return zip(x.tolist(), y.tolist())
+
+    def get_track_resolution( self, dataset, start, end):
+        range = end - start
+        # Determine appropriate resolution to plot ~1000 points
+        resolution = math.ceil( 10 ** math.ceil( math.log10( range / 1000 ) ) )
+        # Restrict to valid range
+        resolution = min( resolution, 100000 )
+        resolution = max( resolution, 1 )
+        return resolution
+
+    # ------------- Dataproviders
+    @dataproviders.decorators.dataprovider_factory( 'wiggle', dataproviders.dataset.WiggleDataProvider.settings )
+    def wiggle_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.dataset.WiggleDataProvider( dataset_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'wiggle-dict', dataproviders.dataset.WiggleDataProvider.settings )
+    def wiggle_dict_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        settings[ 'named_columns' ] = True
+        return dataproviders.dataset.WiggleDataProvider( dataset_source, **settings )
+
+
+class CustomTrack ( Tabular ):
+    """UCSC CustomTrack"""
+    edam_format = "format_3588"
+    file_ext = "customtrack"
+
+    def __init__(self, **kwd):
+        """Initialize interval datatype, by adding UCSC display app"""
+        Tabular.__init__(self, **kwd)
+        self.add_display_app( 'ucsc', 'display at UCSC', 'as_ucsc_display_file', 'ucsc_links' )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        Tabular.set_meta( self, dataset, overwrite=overwrite, skip=1 )
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table( self, dataset, skipchars=['track', '#'] )
+
+    def get_estimated_display_viewport( self, dataset, chrom_col=None, start_col=None, end_col=None ):
+        """Return a chrom, start, stop tuple for viewing a file."""
+        # FIXME: only BED and WIG custom tracks are currently supported
+        # As per previously existing behavior, viewport will only be over the first intervals
+        max_line_count = 100  # maximum number of lines to check; includes comment lines
+        variable_step_wig = False
+        chrom = None
+        span = 1
+        if self.displayable( dataset ):
+            try:
+                fh = open( dataset.file_name )
+                while True:
+                    line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                    if not line:
+                        break  # EOF
+                    if not line.startswith( '#' ):
+                        try:
+                            if variable_step_wig:
+                                fields = line.rstrip().split()
+                                if len( fields ) == 2:
+                                    start = int( fields[ 0 ] )
+                                    return ( chrom, str( start ), str( start + span ) )
+                            elif line and ( line.lower().startswith( "variablestep" ) or line.lower().startswith( "fixedstep" ) ):
+                                chrom = line.rstrip( '\n\r' ).split("chrom=")[1].split()[0]
+                                if 'span=' in line:
+                                    span = int( line.rstrip( '\n\r' ).split("span=")[1].split()[0] )
+                                if 'start=' in line:
+                                    start = int( line.rstrip( '\n\r' ).split("start=")[1].split()[0] )
+                                    return ( chrom, str( start ), str( start + span )  )
+                                else:
+                                    variable_step_wig = True
+                            else:
+                                fields = line.rstrip().split( '\t' )
+                                if len( fields ) >= 3:
+                                    chrom = fields[ 0 ]
+                                    start = int( fields[ 1 ] )
+                                    end = int( fields[ 2 ] )
+                                    return ( chrom, str( start ), str( end ) )
+                        except Exception:
+                            # most likely a non-integer field has been encountered for start / stop
+                            continue
+                    # make sure we are at the next new line
+                    readline_count = VIEWPORT_MAX_READS_PER_LINE
+                    while line.rstrip( '\n\r' ) == line:
+                        assert readline_count > 0, Exception( 'Viewport readline count exceeded for dataset %s.' % dataset.id )
+                        line = fh.readline( VIEWPORT_READLINE_BUFFER_SIZE )
+                        if not line:
+                            break  # EOF
+                        readline_count -= 1
+                    max_line_count -= 1
+                    if not max_line_count:
+                        # exceeded viewport or total line count to check
+                        break
+            except Exception as e:
+                # unexpected error
+                log.exception( str( e ) )
+        return ( None, None, None )  # could not determine viewport
+
+    def ucsc_links( self, dataset, type, app, base_url ):
+        ret_val = []
+        chrom, start, stop = self.get_estimated_display_viewport(dataset)
+        if chrom is not None:
+            for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
+                if site_name in app.datatypes_registry.get_display_sites('ucsc'):
+                    internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
+                    display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
+                    redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop ) )
+                    link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+                    ret_val.append( (site_name, link) )
+        return ret_val
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in customtrack format.
+
+        CustomTrack files are built within Galaxy and are basically bed or interval files with the first line looking
+        something like this.
+
+        track name="User Track" description="User Supplied Track (from Galaxy)" color=0,0,0 visibility=1
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'complete.bed' )
+        >>> CustomTrack().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'ucsc.customtrack' )
+        >>> CustomTrack().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, None )
+        first_line = True
+        for hdr in headers:
+            if first_line:
+                first_line = False
+                try:
+                    if hdr[0].startswith('track'):
+                        color_found = False
+                        visibility_found = False
+                        for elem in hdr[1:]:
+                            if elem.startswith('color'):
+                                color_found = True
+                            if elem.startswith('visibility'):
+                                visibility_found = True
+                            if color_found and visibility_found:
+                                break
+                        if not color_found or not visibility_found:
+                            return False
+                    else:
+                        return False
+                except:
+                    return False
+            else:
+                try:
+                    if hdr[0] and not hdr[0].startswith( '#' ):
+                        if len( hdr ) < 3:
+                            return False
+                        try:
+                            int( hdr[1] )
+                            int( hdr[2] )
+                        except:
+                            return False
+                except:
+                    return False
+        return True
+
+
+class ENCODEPeak( Interval ):
+    '''
+    Human ENCODE peak format. There are both broad and narrow peak formats.
+    Formats are very similar; narrow peak has an additional column, though.
+
+    Broad peak ( http://genome.ucsc.edu/FAQ/FAQformat#format13 ):
+    This format is used to provide called regions of signal enrichment based
+    on pooled, normalized (interpreted) data. It is a BED 6+3 format.
+
+    Narrow peak http://genome.ucsc.edu/FAQ/FAQformat#format12 and :
+    This format is used to provide called peaks of signal enrichment based on
+    pooled, normalized (interpreted) data. It is a BED6+4 format.
+    '''
+    edam_format = "format_3612"
+    file_ext = "encodepeak"
+    column_names = [ 'Chrom', 'Start', 'End', 'Name', 'Score', 'Strand', 'SignalValue', 'pValue', 'qValue', 'Peak' ]
+    data_sources = { "data": "tabix", "index": "bigwig" }
+
+    """Add metadata elements"""
+    MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+    MetadataElement( name="startCol", default=2, desc="Start column", param=metadata.ColumnParameter )
+    MetadataElement( name="endCol", default=3, desc="End column", param=metadata.ColumnParameter )
+    MetadataElement( name="strandCol", desc="Strand column (click box & select)", param=metadata.ColumnParameter, optional=True, no_value=0 )
+    MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
+
+    def sniff( self, filename ):
+        return False
+
+
+class ChromatinInteractions( Interval ):
+    '''
+    Chromatin interactions obtained from 3C/5C/Hi-C experiments.
+    '''
+    file_ext = "chrint"
+    track_type = "DiagonalHeatmapTrack"
+    data_sources = { "data": "tabix", "index": "bigwig" }
+    column_names = [ 'Chrom1', 'Start1', 'End1', 'Chrom2', 'Start2', 'End2', 'Value' ]
+
+    """Add metadata elements"""
+    MetadataElement( name="chrom1Col", default=1, desc="Chrom1 column", param=metadata.ColumnParameter )
+    MetadataElement( name="start1Col", default=2, desc="Start1 column", param=metadata.ColumnParameter )
+    MetadataElement( name="end1Col", default=3, desc="End1 column", param=metadata.ColumnParameter )
+    MetadataElement( name="chrom2Col", default=4, desc="Chrom2 column", param=metadata.ColumnParameter )
+    MetadataElement( name="start2Col", default=5, desc="Start2 column", param=metadata.ColumnParameter )
+    MetadataElement( name="end2Col", default=6, desc="End2 column", param=metadata.ColumnParameter )
+    MetadataElement( name="valueCol", default=7, desc="Value column", param=metadata.ColumnParameter )
+
+    MetadataElement( name="columns", default=7, desc="Number of columns", readonly=True, visible=False )
+
+    def sniff( self, filename ):
+        return False
+
+
+class ScIdx(Tabular):
+    """
+    ScIdx files are 1-based and consist of strand-specific coordinate counts.
+    They always have 5 columns, and the first row is the column labels:
+    'chrom', 'index', 'forward', 'reverse', 'value'.
+    Each line following the first consists of data:
+    chromosome name (type str), peak index (type int), Forward strand peak
+    count (type int), Reverse strand peak count (type int) and value (type int).
+    The value of the 5th 'value' column is the sum of the forward and reverse
+    peak count values.
+    """
+    file_ext = "scidx"
+
+    MetadataElement(name="columns", default=0, desc="Number of columns", readonly=True, visible=False)
+    MetadataElement(name="column_types", default=[], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False, no_value=[])
+
+    def __init__(self, **kwd):
+        """
+        Initialize scidx datatype.
+        """
+        Tabular.__init__(self, **kwd)
+        # Don't set column names since the first
+        # line of the dataset displays them.
+        self.column_names = ['chrom', 'index', 'forward', 'reverse', 'value']
+
+    def sniff(self, filename):
+        """
+        Checks for 'scidx-ness.'
+        """
+        try:
+            count = 0
+            fh = open(filename, "r")
+            while True:
+                line = fh.readline()
+                line = line.strip()
+                # The first line is always a comment like this:
+                # 2015-11-23 20:18:56.51;input.bam;READ1
+                if count == 0:
+                    if line.startswith('#'):
+                        count += 1
+                        continue
+                    else:
+                        return False
+                if not line:
+                    # EOF
+                    if count > 1:
+                        # The second line is always the labels:
+                        # chrom index forward reverse value
+                        # We need at least the column labels and a data line.
+                        return True
+                    return False
+                # Skip first line.
+                if count > 1:
+                    items = line.split('\t')
+                    if len(items) != 5:
+                        return False
+                    index = items[1]
+                    if not index.isdigit():
+                        return False
+                    forward = items[2]
+                    if not forward.isdigit():
+                        return False
+                    reverse = items[3]
+                    if not reverse.isdigit():
+                        return False
+                    value = items[4]
+                    if not value.isdigit():
+                        return False
+                    if int(forward) + int(reverse) != int(value):
+                        return False
+                if count == 100:
+                    return True
+                count += 1
+            if count < 100 and count > 0:
+                return True
+        except:
+            return False
+        finally:
+            fh.close()
+        return False
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__])
diff --git a/lib/galaxy/datatypes/metadata.py b/lib/galaxy/datatypes/metadata.py
new file mode 100644
index 0000000..bc6e06f
--- /dev/null
+++ b/lib/galaxy/datatypes/metadata.py
@@ -0,0 +1,46 @@
+""" Expose the model metadata module as a datatype module also,
+allowing it to live in galaxy.model means the model module doesn't
+have any dependencies on th datatypes module. This module will need
+to remain here for datatypes living in the tool shed so we might as
+well keep and use this interface from the datatypes module.
+"""
+
+from galaxy.model.metadata import (
+    ColumnParameter,
+    ColumnTypesParameter,
+    DBKeyParameter,
+    DictParameter,
+    FileParameter,
+    JobExternalOutputMetadataWrapper,
+    ListParameter,
+    MetadataCollection,
+    MetadataElement,
+    MetadataElementSpec,
+    MetadataParameter,
+    MetadataSpecCollection,
+    MetadataTempFile,
+    PythonObjectParameter,
+    RangeParameter,
+    SelectParameter,
+    Statement,
+)
+
+__all__ = (
+    "Statement",
+    "MetadataElement",
+    "MetadataCollection",
+    "MetadataSpecCollection",
+    "MetadataParameter",
+    "MetadataElementSpec",
+    "SelectParameter",
+    "DBKeyParameter",
+    "RangeParameter",
+    "ColumnParameter",
+    "ColumnTypesParameter",
+    "ListParameter",
+    "DictParameter",
+    "PythonObjectParameter",
+    "FileParameter",
+    "MetadataTempFile",
+    "JobExternalOutputMetadataWrapper",
+)
diff --git a/lib/galaxy/datatypes/molecules.py b/lib/galaxy/datatypes/molecules.py
new file mode 100644
index 0000000..8482ad1
--- /dev/null
+++ b/lib/galaxy/datatypes/molecules.py
@@ -0,0 +1,820 @@
+# -*- coding: utf-8 -*-
+
+from galaxy.datatypes import data
+import logging
+from galaxy.datatypes.sniff import get_headers
+from galaxy.datatypes.data import get_file_peek
+from galaxy.datatypes.tabular import Tabular
+from galaxy.datatypes.binary import Binary
+from galaxy.datatypes.xml import GenericXml
+import subprocess
+import os
+
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes import metadata
+
+log = logging.getLogger(__name__)
+
+
+def count_special_lines(word, filename, invert=False):
+    """
+        searching for special 'words' using the grep tool
+        grep is used to speed up the searching and counting
+        The number of hits is returned.
+    """
+    try:
+        cmd = ["grep", "-c"]
+        if invert:
+            cmd.append('-v')
+        cmd.extend([word, filename])
+        out = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+        return int(out.communicate()[0].split()[0])
+    except:
+        pass
+    return 0
+
+
+def count_lines(filename, non_empty=False):
+    """
+        counting the number of lines from the 'filename' file
+    """
+    try:
+        if non_empty:
+            out = subprocess.Popen(['grep', '-cve', '^\s*$', filename], stdout=subprocess.PIPE)
+        else:
+            out = subprocess.Popen(['wc', '-l', filename], stdout=subprocess.PIPE)
+        return int(out.communicate()[0].split()[0])
+    except:
+        pass
+    return 0
+
+
+class GenericMolFile(data.Text):
+    """
+        abstract class for most of the molecule files
+    """
+    MetadataElement(name="number_of_molecules", default=0, desc="Number of molecules", readonly=True, visible=True, optional=True, no_value=0)
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            if (dataset.metadata.number_of_molecules == 1):
+                dataset.blurb = "1 molecule"
+            else:
+                dataset.blurb = "%s molecules" % dataset.metadata.number_of_molecules
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def get_mime(self):
+        return 'text/plain'
+
+
+class MOL(GenericMolFile):
+    file_ext = "mol"
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number molecules, in the case of MOL its always one.
+        """
+        dataset.metadata.number_of_molecules = 1
+
+
+class SDF(GenericMolFile):
+    file_ext = "sdf"
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a SDF2 file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('drugbank_drugs.sdf')
+        >>> SDF().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> SDF().sniff(fname)
+        False
+        """
+        counter = count_special_lines("^M\s*END", filename) + count_special_lines("^\$\$\$\$", filename)
+        if counter > 0 and counter % 2 == 0:
+            return True
+        else:
+            return False
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of molecules in dataset.
+        """
+        dataset.metadata.number_of_molecules = count_special_lines("^\$\$\$\$", dataset.file_name)
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """
+        Split the input files by molecule records.
+        """
+        if split_params is None:
+            return None
+
+        if len(input_datasets) > 1:
+            raise Exception("SD-file splitting does not support multiple files")
+        input_files = [ds.file_name for ds in input_datasets]
+
+        chunk_size = None
+        if split_params['split_mode'] == 'number_of_parts':
+            raise Exception('Split mode "%s" is currently not implemented for SD-files.' % split_params['split_mode'])
+        elif split_params['split_mode'] == 'to_size':
+            chunk_size = int(split_params['split_size'])
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+        def _read_sdf_records(filename):
+            lines = []
+            with open(filename) as handle:
+                for line in handle:
+                    lines.append(line)
+                    if line.startswith("$$$$"):
+                        yield lines
+                        lines = []
+
+        def _write_part_sdf_file(accumulated_lines):
+            part_dir = subdir_generator_function()
+            part_path = os.path.join(part_dir, os.path.basename(input_files[0]))
+            part_file = open(part_path, 'w')
+            part_file.writelines(accumulated_lines)
+            part_file.close()
+
+        try:
+            sdf_records = _read_sdf_records(input_files[0])
+            sdf_lines_accumulated = []
+            for counter, sdf_record in enumerate(sdf_records, start=1):
+                sdf_lines_accumulated.extend(sdf_record)
+                if counter % chunk_size == 0:
+                    _write_part_sdf_file(sdf_lines_accumulated)
+                    sdf_lines_accumulated = []
+            if sdf_lines_accumulated:
+                _write_part_sdf_file(sdf_lines_accumulated)
+        except Exception as e:
+            log.error('Unable to split files: %s' % str(e))
+            raise
+    split = classmethod(split)
+
+
+class MOL2(GenericMolFile):
+    file_ext = "mol2"
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a MOL2 file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('drugbank_drugs.mol2')
+        >>> MOL2().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> MOL2().sniff(fname)
+        False
+        """
+        if count_special_lines("@<TRIPOS>MOLECULE", filename) > 0:
+            return True
+        else:
+            return False
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.number_of_molecules = count_special_lines("@<TRIPOS>MOLECULE", dataset.file_name)
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """
+        Split the input files by molecule records.
+        """
+        if split_params is None:
+            return None
+
+        if len(input_datasets) > 1:
+            raise Exception("MOL2-file splitting does not support multiple files")
+        input_files = [ds.file_name for ds in input_datasets]
+
+        chunk_size = None
+        if split_params['split_mode'] == 'number_of_parts':
+            raise Exception('Split mode "%s" is currently not implemented for MOL2-files.' % split_params['split_mode'])
+        elif split_params['split_mode'] == 'to_size':
+            chunk_size = int(split_params['split_size'])
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+        def _read_mol2_records(filename):
+            lines = []
+            start = True
+            with open(filename) as handle:
+                for line in handle:
+                    if line.startswith("@<TRIPOS>MOLECULE"):
+                        if start:
+                            start = False
+                        else:
+                            yield lines
+                            lines = []
+                    lines.append(line)
+
+        def _write_part_mol2_file(accumulated_lines):
+            part_dir = subdir_generator_function()
+            part_path = os.path.join(part_dir, os.path.basename(input_files[0]))
+            part_file = open(part_path, 'w')
+            part_file.writelines(accumulated_lines)
+            part_file.close()
+
+        try:
+            mol2_records = _read_mol2_records(input_files[0])
+            mol2_lines_accumulated = []
+            for counter, mol2_record in enumerate(mol2_records, start=1):
+                mol2_lines_accumulated.extend(mol2_record)
+                if counter % chunk_size == 0:
+                    _write_part_mol2_file(mol2_lines_accumulated)
+                    mol2_lines_accumulated = []
+            if mol2_lines_accumulated:
+                _write_part_mol2_file(mol2_lines_accumulated)
+        except Exception as e:
+            log.error('Unable to split files: %s' % str(e))
+            raise
+    split = classmethod(split)
+
+
+class FPS(GenericMolFile):
+    """
+    chemfp fingerprint file: http://code.google.com/p/chem-fingerprints/wiki/FPS
+    """
+    file_ext = "fps"
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a FPS file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('q.fps')
+        >>> FPS().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> FPS().sniff(fname)
+        False
+        """
+        header = get_headers(filename, sep='\t', count=1)
+        if header[0][0].strip() == '#FPS1':
+            return True
+        else:
+            return False
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.number_of_molecules = count_special_lines('^#', dataset.file_name, invert=True)
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """
+        Split the input files by fingerprint records.
+        """
+        if split_params is None:
+            return None
+
+        if len(input_datasets) > 1:
+            raise Exception("FPS-file splitting does not support multiple files")
+        input_files = [ds.file_name for ds in input_datasets]
+
+        chunk_size = None
+        if split_params['split_mode'] == 'number_of_parts':
+            raise Exception('Split mode "%s" is currently not implemented for MOL2-files.' % split_params['split_mode'])
+        elif split_params['split_mode'] == 'to_size':
+            chunk_size = int(split_params['split_size'])
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+        def _write_part_fingerprint_file(accumulated_lines):
+            part_dir = subdir_generator_function()
+            part_path = os.path.join(part_dir, os.path.basename(input_files[0]))
+            part_file = open(part_path, 'w')
+            part_file.writelines(accumulated_lines)
+            part_file.close()
+
+        try:
+            header_lines = []
+            lines_accumulated = []
+            fingerprint_counter = 0
+            for line in open(input_files[0]):
+                if not line.strip():
+                    continue
+                if line.startswith('#'):
+                    header_lines.append(line)
+                else:
+                    fingerprint_counter += 1
+                    lines_accumulated.append(line)
+                if fingerprint_counter != 0 and fingerprint_counter % chunk_size == 0:
+                    _write_part_fingerprint_file(header_lines + lines_accumulated)
+                    lines_accumulated = []
+            if lines_accumulated:
+                _write_part_fingerprint_file(header_lines + lines_accumulated)
+        except Exception as e:
+            log.error('Unable to split files: %s' % str(e))
+            raise
+    split = classmethod(split)
+
+    def merge(split_files, output_file):
+        """
+        Merging fps files requires merging the header manually.
+        We take the header from the first file.
+        """
+        if len(split_files) == 1:
+            # For one file only, use base class method (move/copy)
+            return data.Text.merge(split_files, output_file)
+        if not split_files:
+            raise ValueError("No fps files given, %r, to merge into %s"
+                             % (split_files, output_file))
+        out = open(output_file, "w")
+        first = True
+        for filename in split_files:
+            with open(filename) as handle:
+                for line in handle:
+                    if line.startswith('#'):
+                        if first:
+                            out.write(line)
+                    else:
+                        # line is no header and not a comment, we assume the first header is written to out and we set 'first' to False
+                        first = False
+                        out.write(line)
+        out.close()
+    merge = staticmethod(merge)
+
+
+class OBFS(Binary):
+    """OpenBabel Fastsearch format (fs)."""
+    file_ext = 'fs'
+    composite_type = 'basic'
+    allow_datatype_change = False
+
+    MetadataElement(name="base_name", default='OpenBabel Fastsearch Index',
+                    readonly=True, visible=True, optional=True,)
+
+    def __init__(self, **kwd):
+        """
+            A Fastsearch Index consists of a binary file with the fingerprints
+            and a pointer the actual molecule file.
+        """
+        Binary.__init__(self, **kwd)
+        self.add_composite_file('molecule.fs', is_binary=True,
+                                description='OpenBabel Fastsearch Index')
+        self.add_composite_file('molecule.sdf', optional=True,
+                                is_binary=False, description='Molecule File')
+        self.add_composite_file('molecule.smi', optional=True,
+                                is_binary=False, description='Molecule File')
+        self.add_composite_file('molecule.inchi', optional=True,
+                                is_binary=False, description='Molecule File')
+        self.add_composite_file('molecule.mol2', optional=True,
+                                is_binary=False, description='Molecule File')
+        self.add_composite_file('molecule.cml', optional=True,
+                                is_binary=False, description='Molecule File')
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text."""
+        if not dataset.dataset.purged:
+            dataset.peek = "OpenBabel Fastsearch Index"
+            dataset.blurb = "OpenBabel Fastsearch Index"
+        else:
+            dataset.peek = "file does not exist"
+            dataset.blurb = "file purged from disk"
+
+    def display_peek(self, dataset):
+        """Create HTML content, used for displaying peek."""
+        try:
+            return dataset.peek
+        except:
+            return "OpenBabel Fastsearch Index"
+
+    def display_data(self, trans, data, preview=False, filename=None,
+                     to_ext=None, **kwd):
+        """Apparently an old display method, but still gets called.
+
+        This allows us to format the data shown in the central pane via the "eye" icon.
+        """
+        return "This is a OpenBabel Fastsearch format. You can speed up your similarity and substructure search with it."
+
+    def get_mime(self):
+        """Returns the mime type of the datatype (pretend it is text for peek)"""
+        return 'text/plain'
+
+    def merge(split_files, output_file, extra_merge_args):
+        """Merging Fastsearch indices is not supported."""
+        raise NotImplementedError("Merging Fastsearch indices is not supported.")
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """Splitting Fastsearch indices is not supported."""
+        if split_params is None:
+            return None
+        raise NotImplementedError("Splitting Fastsearch indices is not possible.")
+
+
+class DRF(GenericMolFile):
+    file_ext = "drf"
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.number_of_molecules = count_special_lines('\"ligand id\"', dataset.file_name, invert=True)
+
+
+class PHAR(GenericMolFile):
+    """
+    Pharmacophore database format from silicos-it.
+    """
+    file_ext = "phar"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = "pharmacophore"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class PDB(GenericMolFile):
+    """
+    Protein Databank format.
+    http://www.wwpdb.org/documentation/format33/v3.3.html
+    """
+    file_ext = "pdb"
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a PDB file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('5e5z.pdb')
+        >>> PDB().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> PDB().sniff(fname)
+        False
+        """
+        headers = get_headers(filename, sep=' ', count=300)
+        h = t = c = s = k = e = False
+        for line in headers:
+            section_name = line[0].strip()
+            if section_name == 'HEADER':
+                h = True
+            elif section_name == 'TITLE':
+                t = True
+            elif section_name == 'COMPND':
+                c = True
+            elif section_name == 'SOURCE':
+                s = True
+            elif section_name == 'KEYWDS':
+                k = True
+            elif section_name == 'EXPDTA':
+                e = True
+
+        if h * t * c * s * k * e:
+            return True
+        else:
+            return False
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            atom_numbers = count_special_lines("^ATOM", dataset.file_name)
+            hetatm_numbers = count_special_lines("^HETATM", dataset.file_name)
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = "%s atoms and %s HET-atoms" % (atom_numbers, hetatm_numbers)
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class PDBQT(GenericMolFile):
+    """
+    PDBQT Autodock and Autodock Vina format
+    http://autodock.scripps.edu/faqs-help/faq/what-is-the-format-of-a-pdbqt-file
+    """
+    file_ext = "pdbqt"
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a PDBQT file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('NuBBE_1_obabel_3D.pdbqt')
+        >>> PDBQT().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> PDBQT().sniff(fname)
+        False
+        """
+        headers = get_headers(filename, sep=' ', count=300)
+        h = t = c = s = k = False
+        for line in headers:
+            section_name = line[0].strip()
+            if section_name == 'REMARK':
+                h = True
+            elif section_name == 'ROOT':
+                t = True
+            elif section_name == 'ENDROOT':
+                c = True
+            elif section_name == 'BRANCH':
+                s = True
+            elif section_name == 'TORSDOF':
+                k = True
+
+        if h * t * c * s * k:
+            return True
+        else:
+            return False
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            root_numbers = count_special_lines("^ROOT", dataset.file_name)
+            branch_numbers = count_special_lines("^BRANCH", dataset.file_name)
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = "%s roots and %s branches" % (root_numbers, branch_numbers)
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class grd(data.Text):
+    file_ext = "grd"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = "grids for docking"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class grdtgz(Binary):
+    file_ext = "grd.tgz"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = 'binary data'
+            dataset.blurb = "compressed grids for docking"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class InChI(Tabular):
+    file_ext = "inchi"
+    column_names = ['InChI']
+    MetadataElement(name="columns", default=2, desc="Number of columns", readonly=True, visible=False)
+    MetadataElement(name="column_types", default=['str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False)
+    MetadataElement(name="number_of_molecules", default=0, desc="Number of molecules", readonly=True, visible=True, optional=True, no_value=0)
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.number_of_molecules = self.count_data_lines(dataset)
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            if (dataset.metadata.number_of_molecules == 1):
+                dataset.blurb = "1 molecule"
+            else:
+                dataset.blurb = "%s molecules" % dataset.metadata.number_of_molecules
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a InChI file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('drugbank_drugs.inchi')
+        >>> InChI().sniff(fname)
+        True
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> InChI().sniff(fname)
+        False
+        """
+        inchi_lines = get_headers(filename, sep=' ', count=10)
+        for inchi in inchi_lines:
+            if not inchi[0].startswith('InChI='):
+                return False
+        return True
+
+
+class SMILES(Tabular):
+    file_ext = "smi"
+    column_names = ['SMILES', 'TITLE']
+    MetadataElement(name="columns", default=2, desc="Number of columns", readonly=True, visible=False)
+    MetadataElement(name="column_types", default=['str', 'str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False)
+    MetadataElement(name="number_of_molecules", default=0, desc="Number of molecules", readonly=True, visible=True, optional=True, no_value=0)
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.number_of_molecules = self.count_data_lines(dataset)
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            if dataset.metadata.number_of_molecules == 1:
+                dataset.blurb = "1 molecule"
+            else:
+                dataset.blurb = "%s molecules" % dataset.metadata.number_of_molecules
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    '''
+    def sniff(self, filename):
+        """
+        Its hard or impossible to sniff a SMILES File. We can
+        try to import the first SMILES and check if it is a molecule, but
+        currently its not possible to use external libraries in datatype definition files.
+        Moreover it seems mpossible to inlcude OpenBabel as python library because OpenBabel
+        is GPL licensed.
+        """
+        self.molecule_number = count_lines(filename, non_empty = True)
+        word_count = count_lines(filename)
+
+        if self.molecule_number != word_count:
+            return False
+
+        if self.molecule_number > 0:
+            # test first 3 SMILES
+            smiles_lines = get_headers(filename, sep='\t', count=3)
+            for smiles_line in smiles_lines:
+                if len(smiles_line) > 2:
+                    return False
+                smiles = smiles_line[0]
+                try:
+                    # if we have atoms, we have a molecule
+                    if not len(pybel.readstring('smi', smiles).atoms) > 0:
+                        return False
+                except:
+                    # if convert fails its not a smiles string
+                    return False
+            return True
+        else:
+            return False
+    '''
+
+
+class CML(GenericXml):
+    """
+    Chemical Markup Language
+    http://cml.sourceforge.net/
+    """
+    file_ext = "cml"
+    MetadataElement(name="number_of_molecules", default=0, desc="Number of molecules", readonly=True, visible=True, optional=True, no_value=0)
+
+    def set_meta(self, dataset, **kwd):
+        """
+        Set the number of lines of data in dataset.
+        """
+        dataset.metadata.number_of_molecules = count_special_lines('^\s*<molecule', dataset.file_name)
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            if (dataset.metadata.number_of_molecules == 1):
+                dataset.blurb = "1 molecule"
+            else:
+                dataset.blurb = "%s molecules" % dataset.metadata.number_of_molecules
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        """
+        Try to guess if the file is a CML file.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname('interval.interval')
+        >>> CML().sniff(fname)
+        False
+
+        >>> fname = get_test_fname('drugbank_drugs.cml')
+        >>> CML().sniff(fname)
+        True
+        """
+        handle = open(filename)
+        line = handle.readline()
+        if line.strip() != '<?xml version="1.0"?>':
+            handle.close()
+            return False
+        line = handle.readline()
+        if line.strip().find('http://www.xml-cml.org/schema') == -1:
+            handle.close()
+            return False
+        handle.close()
+        return True
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """
+        Split the input files by molecule records.
+        """
+        if split_params is None:
+            return None
+
+        if len(input_datasets) > 1:
+            raise Exception("CML-file splitting does not support multiple files")
+        input_files = [ds.file_name for ds in input_datasets]
+
+        chunk_size = None
+        if split_params['split_mode'] == 'number_of_parts':
+            raise Exception('Split mode "%s" is currently not implemented for CML-files.' % split_params['split_mode'])
+        elif split_params['split_mode'] == 'to_size':
+            chunk_size = int(split_params['split_size'])
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+        def _read_cml_records(filename):
+            lines = []
+            with open(filename) as handle:
+                for line in handle:
+                    if line.lstrip().startswith('<?xml version="1.0"?>') or \
+                       line.lstrip().startswith('<cml xmlns="http://www.xml-cml.org/schema') or \
+                       line.lstrip().startswith('</cml>'):
+                            continue
+                    lines.append(line)
+                    if line.lstrip().startswith('</molecule>'):
+                        yield lines
+                        lines = []
+
+        header_lines = ['<?xml version="1.0"?>\n', '<cml xmlns="http://www.xml-cml.org/schema">\n']
+        footer_line = ['</cml>\n']
+
+        def _write_part_cml_file(accumulated_lines):
+            part_dir = subdir_generator_function()
+            part_path = os.path.join(part_dir, os.path.basename(input_files[0]))
+            part_file = open(part_path, 'w')
+            part_file.writelines(header_lines)
+            part_file.writelines(accumulated_lines)
+            part_file.writelines(footer_line)
+            part_file.close()
+
+        try:
+            cml_records = _read_cml_records(input_files[0])
+            cml_lines_accumulated = []
+            for counter, cml_record in enumerate(cml_records, start=1):
+                cml_lines_accumulated.extend(cml_record)
+                if counter % chunk_size == 0:
+                    _write_part_cml_file(cml_lines_accumulated)
+                    cml_lines_accumulated = []
+            if cml_lines_accumulated:
+                _write_part_cml_file(cml_lines_accumulated)
+        except Exception as e:
+            log.error('Unable to split files: %s' % str(e))
+            raise
+    split = classmethod(split)
+
+    def merge(split_files, output_file):
+        """
+        Merging CML files.
+        """
+        if len(split_files) == 1:
+            # For one file only, use base class method (move/copy)
+            return data.Text.merge(split_files, output_file)
+        if not split_files:
+            raise ValueError("Given no CML files, %r, to merge into %s"
+                             % (split_files, output_file))
+        with open(output_file, "w") as out:
+            for filename in split_files:
+                with open(filename) as handle:
+                    header = handle.readline()
+                    if not header:
+                        raise ValueError("CML file %s was empty" % filename)
+                    if not header.lstrip().startswith('<?xml version="1.0"?>'):
+                        out.write(header)
+                        raise ValueError("%s is not a valid XML file!" % filename)
+                    line = handle.readline()
+                    header += line
+                    if not line.lstrip().startswith('<cml xmlns="http://www.xml-cml.org/schema'):
+                        out.write(header)
+                        raise ValueError("%s is not a CML file!" % filename)
+                    molecule_found = False
+                    for line in handle.readlines():
+                        # We found two required header lines, the next line should start with <molecule >
+                        if line.lstrip().startswith('</cml>'):
+                            continue
+                        if line.lstrip().startswith('<molecule'):
+                            molecule_found = True
+                        if molecule_found:
+                            out.write(line)
+            out.write("</cml>\n")
+    merge = staticmethod(merge)
diff --git a/lib/galaxy/datatypes/mothur.py b/lib/galaxy/datatypes/mothur.py
new file mode 100644
index 0000000..21d67d7
--- /dev/null
+++ b/lib/galaxy/datatypes/mothur.py
@@ -0,0 +1,935 @@
+"""
+Mothur Metagenomics Datatypes
+"""
+import logging
+import sys
+import re
+from galaxy.datatypes.sniff import get_headers
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.data import Text
+from galaxy.datatypes.tabular import Tabular
+
+log = logging.getLogger(__name__)
+
+
+class Otu(Text):
+    file_ext = 'mothur.otu'
+    MetadataElement(name="columns", default=0, desc="Number of columns", readonly=True, visible=True, no_value=0)
+    MetadataElement(name="labels", default=[], desc="Label Names", readonly=True, visible=True, no_value=[])
+    MetadataElement(name="otulabels", default=[], desc="OTU Names", readonly=True, visible=True, no_value=[])
+
+    def __init__(self, **kwd):
+        super(Otu, self).__init__(**kwd)
+
+    def set_meta(self, dataset, overwrite=True, **kwd):
+        super(Otu, self).set_meta(dataset, overwrite=overwrite, **kwd)
+
+        if dataset.has_data():
+            label_names = set()
+            otulabel_names = set()
+            ncols = 0
+            data_lines = 0
+            comment_lines = 0
+
+            headers = get_headers(dataset.file_name, sep='\t', count=-1)
+            # set otulabels
+            if len(headers[0]) > 2:
+                otulabel_names = headers[0][2:]
+            # set label names and number of lines
+            for line in headers:
+                if len(line) >= 2 and not line[0].startswith('@'):
+                    data_lines += 1
+                    ncols = max(ncols, len(line))
+                    label_names.add(line[0])
+                else:
+                    comment_lines += 1
+            # Set the discovered metadata values for the dataset
+            dataset.metadata.data_lines = data_lines
+            dataset.metadata.columns = ncols
+            dataset.metadata.labels = list(label_names)
+            dataset.metadata.labels.sort()
+            dataset.metadata.otulabels = list(otulabel_names)
+            dataset.metadata.otulabels.sort()
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is otu (operational taxonomic unit) format
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.otu' )
+        >>> Otu().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.otu' )
+        >>> Otu().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                if len(line) < 2:
+                    return False
+                if count >= 1:
+                    try:
+                        check = int(line[1])
+                        if check + 2 != len(line):
+                            return False
+                    except ValueError:
+                        return False
+                count += 1
+        if count > 2:
+            return True
+
+        return False
+
+
+class Sabund(Otu):
+    file_ext = 'mothur.sabund'
+
+    def __init__(self, **kwd):
+        """
+        http://www.mothur.org/wiki/Sabund_file
+        """
+        super(Sabund, self).__init__(**kwd)
+
+    def init_meta(self, dataset, copy_from=None):
+        super(Sabund, self).init_meta(dataset, copy_from=copy_from)
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is otu (operational taxonomic unit) format
+        label<TAB>count[<TAB>value(1..n)]
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.sabund' )
+        >>> Sabund().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.sabund' )
+        >>> Sabund().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                if len(line) < 2:
+                    return False
+                try:
+                    check = int(line[1])
+                    if check + 2 != len(line):
+                        return False
+                    for i in range(2, len(line)):
+                        int(line[i])
+                except ValueError:
+                    return False
+                count += 1
+        if count > 0:
+            return True
+
+        return False
+
+
+class GroupAbund(Otu):
+    file_ext = 'mothur.shared'
+    MetadataElement(name="groups", default=[], desc="Group Names", readonly=True, visible=True, no_value=[])
+
+    def __init__(self, **kwd):
+        super(GroupAbund, self).__init__(**kwd)
+
+    def init_meta(self, dataset, copy_from=None):
+        super(GroupAbund, self).init_meta(dataset, copy_from=copy_from)
+
+    def set_meta(self, dataset, overwrite=True, skip=1, **kwd):
+        super(GroupAbund, self).set_meta(dataset, overwrite=overwrite, **kwd)
+
+        # See if file starts with header line
+        if dataset.has_data():
+            label_names = set()
+            group_names = set()
+            data_lines = 0
+            comment_lines = 0
+            ncols = 0
+
+            headers = get_headers(dataset.file_name, sep='\t', count=-1)
+            for line in headers:
+                if line[0] == 'label' and line[1] == 'Group':
+                    skip = 1
+                    comment_lines += 1
+                else:
+                    skip = 0
+                    data_lines += 1
+                    ncols = max(ncols, len(line))
+                    label_names.add(line[0])
+                    group_names.add(line[1])
+
+            # Set the discovered metadata values for the dataset
+            dataset.metadata.data_lines = data_lines
+            dataset.metadata.columns = ncols
+            dataset.metadata.labels = list(label_names)
+            dataset.metadata.labels.sort()
+            dataset.metadata.groups = list(group_names)
+            dataset.metadata.groups.sort()
+            dataset.metadata.skip = skip
+
+    def sniff(self, filename, vals_are_int=False):
+        """
+        Determines whether the file is a otu (operational taxonomic unit)
+        Shared format
+        label<TAB>group<TAB>count[<TAB>value(1..n)]
+        The first line is column headings as of Mothur v 1.2
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.shared' )
+        >>> GroupAbund().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.shared' )
+        >>> GroupAbund().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                if len(line) < 3:
+                    return False
+                if count > 0 or line[0] != 'label':
+                    try:
+                        check = int(line[2])
+                        if check + 3 != len(line):
+                            return False
+                        for i in range(3, len(line)):
+                            if vals_are_int:
+                                int(line[i])
+                            else:
+                                float(line[i])
+                    except ValueError:
+                        return False
+                count += 1
+        if count > 1:
+            return True
+        return False
+
+
+class SecondaryStructureMap(Tabular):
+    file_ext = 'mothur.map'
+
+    def __init__(self, **kwd):
+        """Initialize secondary structure map datatype"""
+        super(SecondaryStructureMap, self).__init__(**kwd)
+        self.column_names = ['Map']
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a secondary structure map format
+        A single column with an integer value which indicates the row that this
+        row maps to. Check to make sure if structMap[10] = 380 then
+        structMap[380] = 10 and vice versa.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.map' )
+        >>> SecondaryStructureMap().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.map' )
+        >>> SecondaryStructureMap().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        line_num = 0
+        rowidxmap = {}
+        for line in headers:
+            line_num += 1
+            if len(line) > 1:
+                return False
+            try:
+                pointer = int(line[0])
+                if pointer > line_num:
+                    rowidxmap[pointer] = line_num
+                elif pointer > 0 or line_num in rowidxmap:
+                    if rowidxmap[line_num] != pointer:
+                        return False
+            except (ValueError, KeyError):
+                return False
+        if line_num < 3:
+            return False
+        return True
+
+
+class AlignCheck(Tabular):
+    file_ext = 'mothur.align.check'
+
+    def __init__(self, **kwd):
+        """Initialize AlignCheck datatype"""
+        super(AlignCheck, self).__init__(**kwd)
+        self.column_names = ['name', 'pound', 'dash', 'plus', 'equal', 'loop', 'tilde', 'total']
+        self.column_types = ['str', 'int', 'int', 'int', 'int', 'int', 'int', 'int']
+        self.comment_lines = 1
+
+    def set_meta(self, dataset, overwrite=True, **kwd):
+        super(AlignCheck, self).set_meta(dataset, overwrite=overwrite, **kwd)
+
+        dataset.metadata.column_names = self.column_names
+        dataset.metadata.column_types = self.column_types
+        dataset.metadata.comment_lines = self.comment_lines
+        dataset.metadata.data_lines -= self.comment_lines
+
+
+class AlignReport(Tabular):
+    """
+    QueryName	QueryLength	TemplateName	TemplateLength	SearchMethod	SearchScore	AlignmentMethod	QueryStart	QueryEnd	TemplateStart	TemplateEnd	PairwiseAlignmentLength	GapsInQuery	GapsInTemplate	LongestInsert	SimBtwnQuery&Template
+    AY457915	501		82283		1525		kmer		89.07		needleman	5		501		1		499		499			2		0		0		97.6
+    """
+    file_ext = 'mothur.align.report'
+
+    def __init__(self, **kwd):
+        """Initialize AlignCheck datatype"""
+        super(AlignReport, self).__init__(**kwd)
+        self.column_names = ['QueryName', 'QueryLength', 'TemplateName', 'TemplateLength', 'SearchMethod', 'SearchScore',
+                             'AlignmentMethod', 'QueryStart', 'QueryEnd', 'TemplateStart', 'TemplateEnd',
+                             'PairwiseAlignmentLength', 'GapsInQuery', 'GapsInTemplate', 'LongestInsert', 'SimBtwnQuery&Template'
+                             ]
+
+
+class DistanceMatrix(Text):
+    file_ext = 'mothur.dist'
+    """Add metadata elements"""
+    MetadataElement(name="sequence_count", default=0, desc="Number of sequences", readonly=True, visible=True, optional=True, no_value='?')
+
+    def init_meta(self, dataset, copy_from=None):
+        super(DistanceMatrix, self).init_meta(dataset, copy_from=copy_from)
+
+    def set_meta(self, dataset, overwrite=True, skip=0, **kwd):
+        super(DistanceMatrix, self).set_meta(dataset, overwrite=overwrite, skip=skip, **kwd)
+
+        headers = get_headers(dataset.file_name, sep='\t')
+        for line in headers:
+            if not line[0].startswith('@'):
+                try:
+                    dataset.metadata.sequence_count = int(''.join(line))  # seq count sometimes preceded by tab
+                    break
+                except Exception as e:
+                    if not isinstance(self, PairwiseDistanceMatrix):
+                        log.warning("DistanceMatrix set_meta %s" % e)
+
+
+class LowerTriangleDistanceMatrix(DistanceMatrix):
+    file_ext = 'mothur.lower.dist'
+
+    def __init__(self, **kwd):
+        """Initialize secondary structure map datatype"""
+        super(LowerTriangleDistanceMatrix, self).__init__(**kwd)
+
+    def init_meta(self, dataset, copy_from=None):
+        super(LowerTriangleDistanceMatrix, self).init_meta(dataset, copy_from=copy_from)
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a lower-triangle distance matrix (phylip) format
+        The first line has the number of sequences in the matrix.
+        The remaining lines have the sequence name followed by a list of distances from all preceeding sequences
+                5  # possibly but not always preceded by a tab :/
+                U68589
+                U68590	0.3371
+                U68591	0.3609	0.3782
+                U68592	0.4155	0.3197	0.4148
+                U68593	0.2872	0.1690	0.3361	0.2842
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.lower.dist' )
+        >>> LowerTriangleDistanceMatrix().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.lower.dist' )
+        >>> LowerTriangleDistanceMatrix().sniff( fname )
+        False
+        """
+        numlines = 300
+        headers = get_headers(filename, sep='\t', count=numlines)
+        line_num = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                # first line should contain the number of sequences in the file
+                if line_num == 0:
+                    if len(line) > 2:
+                        return False
+                    else:
+                        try:
+                            sequence_count = int(''.join(line))
+                        except ValueError:
+                            return False
+                else:
+                    # number of fields should equal the line number
+                    if len(line) != (line_num):
+                        return False
+                    try:
+                        # Distances should be floats
+                        for column in line[2:]:
+                            float(column)
+                    except ValueError:
+                        return False
+                line_num += 1
+
+        # check if the number of lines in the file was as expected
+        if line_num == sequence_count + 1 or line_num == numlines + 1:
+            return True
+
+        return False
+
+
+class SquareDistanceMatrix(DistanceMatrix):
+    file_ext = 'mothur.square.dist'
+
+    def __init__(self, **kwd):
+        super(SquareDistanceMatrix, self).__init__(**kwd)
+
+    def init_meta(self, dataset, copy_from=None):
+        super(SquareDistanceMatrix, self).init_meta(dataset, copy_from=copy_from)
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a square distance matrix (Column-formatted distance matrix) format
+        The first line has the number of sequences in the matrix.
+        The following lines have the sequence name in the first column plus a column for the distance to each sequence
+        in the row order in which they appear in the matrix.
+               3
+               U68589  0.0000  0.3371  0.3610
+               U68590  0.3371  0.0000  0.3783
+               U68590  0.3371  0.0000  0.3783
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.square.dist' )
+        >>> SquareDistanceMatrix().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.square.dist' )
+        >>> SquareDistanceMatrix().sniff( fname )
+        False
+        """
+        numlines = 300
+        headers = get_headers(filename, sep='\t', count=numlines)
+        line_num = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                if line_num == 0:
+                    if len(line) > 2:
+                        return False
+                    else:
+                        try:
+                            sequence_count = int(''.join(line))
+                        except ValueError:
+                            return False
+                else:
+                    # number of fields should equal the number of sequences
+                    if len(line) != sequence_count + 1:
+                        return False
+                    try:
+                        # Distances should be floats
+                        for column in line[2:]:
+                            float(column)
+                    except ValueError:
+                        return False
+                line_num += 1
+
+        # check if the number of lines in the file was as expected
+        if line_num == sequence_count + 1 or line_num == numlines + 1:
+            return True
+
+        return False
+
+
+class PairwiseDistanceMatrix(DistanceMatrix, Tabular):
+    file_ext = 'mothur.pair.dist'
+
+    def __init__(self, **kwd):
+        """Initialize secondary structure map datatype"""
+        super(PairwiseDistanceMatrix, self).__init__(**kwd)
+        self.column_names = ['Sequence', 'Sequence', 'Distance']
+        self.column_types = ['str', 'str', 'float']
+
+    def set_meta(self, dataset, overwrite=True, skip=None, **kwd):
+        super(PairwiseDistanceMatrix, self).set_meta(dataset, overwrite=overwrite, skip=skip, **kwd)
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a pairwise distance matrix (Column-formatted distance matrix) format
+        The first and second columns have the sequence names and the third column is the distance between those sequences.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.pair.dist' )
+        >>> PairwiseDistanceMatrix().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.pair.dist' )
+        >>> PairwiseDistanceMatrix().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                if len(line) != 3:
+                    return False
+                try:
+                    float(line[2])
+                    try:
+                        # See if it's also an integer
+                        int(line[2])
+                    except ValueError:
+                        # At least one value is not an integer
+                        all_ints = False
+                except ValueError:
+                    return False
+                count += 1
+
+        if count > 2:
+            return not all_ints
+
+        return False
+
+
+class Names(Tabular):
+    file_ext = 'mothur.names'
+
+    def __init__(self, **kwd):
+        """
+        http://www.mothur.org/wiki/Name_file
+        Name file shows the relationship between a representative sequence(col 1)  and the sequences(comma-separated) it represents(col 2)
+        """
+        super(Names, self).__init__(**kwd)
+        self.column_names = ['name', 'representatives']
+        self.columns = 2
+
+
+class Summary(Tabular):
+    file_ext = 'mothur.summary'
+
+    def __init__(self, **kwd):
+        """summarizes the quality of sequences in an unaligned or aligned fasta-formatted sequence file"""
+        super(Summary, self).__init__(**kwd)
+        self.column_names = ['seqname', 'start', 'end', 'nbases', 'ambigs', 'polymer']
+        self.columns = 6
+
+
+class Group(Tabular):
+    file_ext = 'mothur.groups'
+    MetadataElement(name="groups", default=[], desc="Group Names", readonly=True, visible=True, no_value=[])
+
+    def __init__(self, **kwd):
+        """
+        http://www.mothur.org/wiki/Groups_file
+        Group file assigns sequence (col 1)  to a group (col 2)
+        """
+        super(Group, self).__init__(**kwd)
+        self.column_names = ['name', 'group']
+        self.columns = 2
+
+    def set_meta(self, dataset, overwrite=True, skip=None, max_data_lines=None, **kwd):
+        super(Group, self).set_meta(dataset, overwrite, skip, max_data_lines)
+
+        group_names = set()
+        headers = get_headers(dataset.file_name, sep='\t', count=-1)
+        for line in headers:
+            if len(line) > 1:
+                group_names.add(line[1])
+        dataset.metadata.groups = list(group_names)
+
+
+class AccNos(Tabular):
+    file_ext = 'mothur.accnos'
+
+    def __init__(self, **kwd):
+        """A list of names"""
+        super(AccNos, self).__init__(**kwd)
+        self.column_names = ['name']
+        self.columns = 1
+
+
+class Oligos(Text):
+    file_ext = 'mothur.oligos'
+
+    def sniff(self, filename):
+        """
+        http://www.mothur.org/wiki/Oligos_File
+        Determines whether the file is a otu (operational taxonomic unit) format
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.oligos' )
+        >>> Oligos().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.oligos' )
+        >>> Oligos().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@') and not line[0].startswith('#'):
+                if len(line) == 2 and line[0] in ['forward', 'reverse']:
+                    count += 1
+                    continue
+                elif len(line) == 3 and line[0] == 'barcode':
+                    count += 1
+                    continue
+                else:
+                    return False
+        if count > 0:
+            return True
+
+        return False
+
+
+class Frequency(Tabular):
+    file_ext = 'mothur.freq'
+
+    def __init__(self, **kwd):
+        """A list of names"""
+        super(Frequency, self).__init__(**kwd)
+        self.column_names = ['position', 'frequency']
+        self.column_types = ['int', 'float']
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a frequency tabular format for chimera analysis
+        #1.14.0
+        0	0.000
+        1	0.000
+        ...
+        155	0.975
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.freq' )
+        >>> Frequency().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.freq' )
+        >>> Frequency().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@'):
+                if count == 0:
+                    # first line should be #<version string>
+                    if not line[0].startswith('#') and len(line) == 1:
+                        return False
+                else:
+                    # all other lines should be <int> <float>
+                    if len(line) != 2:
+                        return False
+                    try:
+                        int(line[0])
+                        float(line[1])
+                    except Exception:
+                        return False
+                count += 1
+        if count > 1:
+            return True
+
+        return False
+
+
+class Quantile(Tabular):
+    file_ext = 'mothur.quan'
+    MetadataElement(name="filtered", default=False, no_value=False, optional=True, desc="Quantiles calculated using a mask", readonly=True)
+    MetadataElement(name="masked", default=False, no_value=False, optional=True, desc="Quantiles calculated using a frequency filter", readonly=True)
+
+    def __init__(self, **kwd):
+        """Quantiles for chimera analysis"""
+        super(Quantile, self).__init__(**kwd)
+        self.column_names = ['num', 'ten', 'twentyfive', 'fifty', 'seventyfive', 'ninetyfive', 'ninetynine']
+        self.column_types = ['int', 'float', 'float', 'float', 'float', 'float', 'float']
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a quantiles tabular format for chimera analysis
+        1	0	0	0	0	0	0
+        2       0.309198        0.309198        0.37161 0.37161 0.37161 0.37161
+        3       0.510982        0.563213        0.693529        0.858939        1.07442 1.20608
+        ...
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.quan' )
+        >>> Quantile().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.quan' )
+        >>> Quantile().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        for line in headers:
+            if not line[0].startswith('@') and not line[0].startswith('#'):
+                if len(line) != 7:
+                    return False
+                try:
+                    int(line[0])
+                    float(line[1])
+                    float(line[2])
+                    float(line[3])
+                    float(line[4])
+                    float(line[5])
+                    float(line[6])
+                except Exception:
+                    return False
+                count += 1
+        if count > 0:
+            return True
+
+        return False
+
+
+class LaneMask(Text):
+    file_ext = 'mothur.filter'
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a lane mask filter:  1 line consisting of zeros and ones.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.filter' )
+        >>> LaneMask().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.filter' )
+        >>> LaneMask().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        if len(headers) != 1 or len(headers[0]) != 1:
+            return False
+
+        if not re.match('^[01]+$', headers[0][0]):
+            return False
+
+        return True
+
+
+class CountTable(Tabular):
+    MetadataElement(name="groups", default=[], desc="Group Names", readonly=True, visible=True, no_value=[])
+    file_ext = 'mothur.count_table'
+
+    def __init__(self, **kwd):
+        """
+        http://www.mothur.org/wiki/Count_File
+        A table with first column names and following columns integer counts
+        # Example 1:
+        Representative_Sequence total
+        U68630  1
+        U68595  1
+        U68600  1
+        # Example 2 (with group columns):
+        Representative_Sequence total   forest  pasture
+        U68630  1       1       0
+        U68595  1       1       0
+        U68600  1       1       0
+        U68591  1       1       0
+        U68647  1       0       1
+        """
+        super(CountTable, self).__init__(**kwd)
+        self.column_names = ['name', 'total']
+
+    def set_meta(self, dataset, overwrite=True, skip=1, max_data_lines=None, **kwd):
+        super(CountTable, self).set_meta(dataset, overwrite=overwrite, **kwd)
+
+        headers = get_headers(dataset.file_name, sep='\t', count=1)
+        colnames = headers[0]
+        dataset.metadata.column_types = ['str'] + (['int'] * ( len(headers[0]) - 1))
+        if len(colnames) > 1:
+            dataset.metadata.columns = len(colnames)
+        if len(colnames) > 2:
+            dataset.metadata.groups = colnames[2:]
+
+        dataset.metadata.comment_lines = 1
+        dataset.metadata.data_lines -= 1
+
+
+class RefTaxonomy(Tabular):
+    file_ext = 'mothur.ref.taxonomy'
+
+    def __init__(self, **kwd):
+        super(RefTaxonomy, self).__init__(**kwd)
+        self.column_names = ['name', 'taxonomy']
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is a Reference Taxonomy
+
+        http://www.mothur.org/wiki/Taxonomy_outline
+        A table with 2 or 3 columns:
+        - SequenceName
+        - Taxonomy (semicolon-separated taxonomy in descending order)
+        - integer ?
+        Example: 2-column (http://www.mothur.org/wiki/Taxonomy_outline)
+          X56533.1        Eukaryota;Alveolata;Ciliophora;Intramacronucleata;Oligohymenophorea;Hymenostomatida;Tetrahymenina;Glaucomidae;Glaucoma;
+          X97975.1        Eukaryota;Parabasalidea;Trichomonada;Trichomonadida;unclassified_Trichomonadida;
+          AF052717.1      Eukaryota;Parabasalidea;
+        Example: 3-column (http://vamps.mbl.edu/resources/databases.php)
+          v3_AA008	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus	5
+          v3_AA016	Bacteria	120
+          v3_AA019	Archaea;Crenarchaeota;Marine_Group_I	1
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.ref.taxonomy' )
+        >>> RefTaxonomy().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.ref.taxonomy' )
+        >>> RefTaxonomy().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t', count=300)
+        count = 0
+        pat_prog = re.compile('^([^ \t\n\r\x0c\x0b;]+([(]\\d+[)])?(;[^ \t\n\r\x0c\x0b;]+([(]\\d+[)])?)*(;)?)$')
+        found_semicolons = False
+        for line in headers:
+            if not line[0].startswith('@') and not line[0].startswith('#'):
+                if not (2 <= len(line) <= 3):
+                    return False
+                if not pat_prog.match(line[1]):
+                    return False
+                if not found_semicolons and line[1].find(';') > -1:
+                    found_semicolons = True
+                if len(line) == 3:
+                    try:
+                        int(line[2])
+                    except Exception:
+                        return False
+                count += 1
+
+        if count > 0:
+            # Require that at least one entry has semicolons in the 2nd column
+            return found_semicolons
+
+        return False
+
+
+class ConsensusTaxonomy(Tabular):
+    file_ext = 'mothur.cons.taxonomy'
+
+    def __init__(self, **kwd):
+        """A list of names"""
+        super(ConsensusTaxonomy, self).__init__(**kwd)
+        self.column_names = ['OTU', 'count', 'taxonomy']
+
+
+class TaxonomySummary(Tabular):
+    file_ext = 'mothur.tax.summary'
+
+    def __init__(self, **kwd):
+        """A Summary of taxon classification"""
+        super(TaxonomySummary, self).__init__(**kwd)
+        self.column_names = ['taxlevel', 'rankID', 'taxon', 'daughterlevels', 'total']
+
+
+class Axes(Tabular):
+    file_ext = 'mothur.axes'
+
+    def __init__(self, **kwd):
+        """Initialize axes datatype"""
+        super(Axes, self).__init__(**kwd)
+
+    def sniff(self, filename):
+        """
+        Determines whether the file is an axes format
+        The first line may have column headings.
+        The following lines have the name in the first column plus float columns for each axis.
+        ==> 98_sq_phylip_amazon.fn.unique.pca.axes <==
+           group   axis1   axis2
+           forest  0.000000        0.145743
+           pasture 0.145743        0.000000
+
+        ==> 98_sq_phylip_amazon.nmds.axes <==
+                   axis1   axis2
+           U68589  0.262608        -0.077498
+           U68590  0.027118        0.195197
+           U68591  0.329854        0.014395
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'mothur_datatypetest_true.mothur.axes' )
+        >>> Axes().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'mothur_datatypetest_false.mothur.axes' )
+        >>> Axes().sniff( fname )
+        False
+        """
+        headers = get_headers(filename, sep='\t')
+        count = 0
+        col_cnt = None
+        all_integers = True
+        for line in headers:
+            if count != 0:
+                if col_cnt is None:
+                    col_cnt = len(line)
+                    if col_cnt < 2:
+                        return False
+                else:
+                    if len(line) != col_cnt:
+                        return False
+                    try:
+                        for i in range(1, col_cnt):
+                            check = float(line[i])
+                            # Check abs value is <= 1.0
+                            if abs(check) > 1.0:
+                                return False
+                            # Also test for whether value is an integer
+                            try:
+                                check = int(line[i])
+                            except ValueError:
+                                all_integers = False
+                    except ValueError:
+                        return False
+            count += 1
+
+        if count > 0:
+            return not all_integers
+
+        return False
+
+
+class SffFlow(Tabular):
+    MetadataElement(name="flow_values", default="", no_value="", optional=True, desc="Total number of flow values", readonly=True)
+    MetadataElement(name="flow_order", default="TACG", no_value="TACG", desc="Total number of flow values", readonly=False)
+    file_ext = 'mothur.sff.flow'
+    """
+        http://www.mothur.org/wiki/Flow_file
+        The first line is the total number of flow values - 800 for Titanium data. For GS FLX it would be 400.
+        Following lines contain:
+        - SequenceName
+        - the number of useable flows as defined by 454's software
+        - the flow intensity for each base going in the order of TACG.
+        Example:
+          800
+          GQY1XT001CQL4K 85 1.04 0.00 1.00 0.02 0.03 1.02 0.05 ...
+          GQY1XT001CQIRF 84 1.02 0.06 0.98 0.06 0.09 1.05 0.07 ...
+          GQY1XT001CF5YW 88 1.02 0.02 1.01 0.04 0.06 1.02 0.03 ...
+    """
+    def __init__(self, **kwd):
+        super(SffFlow, self).__init__(**kwd)
+
+    def set_meta(self, dataset, overwrite=True, skip=1, max_data_lines=None, **kwd):
+        super(SffFlow, self).set_meta(dataset, overwrite, 1, max_data_lines)
+
+        headers = get_headers(dataset.file_name, sep='\t', count=1)
+        try:
+            flow_values = int(headers[0][0])
+            dataset.metadata.flow_values = flow_values
+        except Exception as e:
+            log.warning("SffFlow set_meta %s" % e)
+
+    def make_html_table(self, dataset, skipchars=[]):
+        """Create HTML table, used for displaying peek"""
+        try:
+            out = '<table cellspacing="0" cellpadding="3">'
+
+            # Generate column header
+            out += '<tr>'
+            out += '<th>%d. Name</th>' % 1
+            out += '<th>%d. Flows</th>' % 2
+            for i in range(3, dataset.metadata.columns + 1):
+                base = dataset.metadata.flow_order[(i + 1) % 4]
+                out += '<th>%d. %d %s</th>' % (i - 2, base)
+            out += '</tr>'
+            out += self.make_html_peek_rows(dataset, skipchars=skipchars)
+            out += '</table>'
+        except Exception as exc:
+            out = "Can't create peek %s" % str(exc)
+        return out
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__])
diff --git a/lib/galaxy/datatypes/msa.py b/lib/galaxy/datatypes/msa.py
new file mode 100644
index 0000000..7b31ac8
--- /dev/null
+++ b/lib/galaxy/datatypes/msa.py
@@ -0,0 +1,206 @@
+import abc
+import logging
+import os
+
+from galaxy.datatypes.binary import Binary
+from galaxy.datatypes.data import get_file_peek, Text
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.util import generic_util
+from galaxy.util import nice_size
+
+log = logging.getLogger(__name__)
+
+
+class Hmmer( Text ):
+    edam_data = "data_1364"
+    edam_format = "format_1370"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = "HMMER Database"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "HMMER database (%s)" % ( nice_size( dataset.get_size() ) )
+
+    @abc.abstractmethod
+    def sniff(self, filename):
+        raise NotImplementedError
+
+
+class Hmmer2( Hmmer ):
+    edam_format = "format_3328"
+    file_ext = "hmm2"
+
+    def sniff(self, filename):
+        """HMMER2 files start with HMMER2.0
+        """
+        with open(filename, 'r') as handle:
+            return handle.read(8) == 'HMMER2.0'
+        return False
+
+
+class Hmmer3( Hmmer ):
+    edam_format = "format_3329"
+    file_ext = "hmm3"
+
+    def sniff(self, filename):
+        """HMMER3 files start with HMMER3/f
+        """
+        with open(filename, 'r') as handle:
+            return handle.read(8) == 'HMMER3/f'
+        return False
+
+
+class HmmerPress( Binary ):
+    """Class for hmmpress database files."""
+    file_ext = 'hmmpress'
+    allow_datatype_change = False
+    composite_type = 'basic'
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text."""
+        if not dataset.dataset.purged:
+            dataset.peek = "HMMER Binary database"
+            dataset.blurb = "HMMER Binary database"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        """Create HTML content, used for displaying peek."""
+        try:
+            return dataset.peek
+        except:
+            return "HMMER3 database (multiple files)"
+
+    def __init__(self, **kwd):
+        Binary.__init__(self, **kwd)
+        # Binary model
+        self.add_composite_file('model.hmm.h3m', is_binary=True)
+        # SSI index for binary model
+        self.add_composite_file('model.hmm.h3i', is_binary=True)
+        # Profiles (MSV part)
+        self.add_composite_file('model.hmm.h3f', is_binary=True)
+        # Profiles (remained)
+        self.add_composite_file('model.hmm.h3p', is_binary=True)
+
+
+Binary.register_unsniffable_binary_ext("hmmpress")
+
+
+class Stockholm_1_0( Text ):
+    edam_data = "data_0863"
+    edam_format = "format_1961"
+    file_ext = "stockholm"
+
+    MetadataElement( name="number_of_models", default=0, desc="Number of multiple alignments", readonly=True, visible=True, optional=True, no_value=0 )
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            if (dataset.metadata.number_of_models == 1):
+                dataset.blurb = "1 alignment"
+            else:
+                dataset.blurb = "%s alignments" % dataset.metadata.number_of_models
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        if generic_util.count_special_lines('^#[[:space:]+]STOCKHOLM[[:space:]+]1.0', filename) > 0:
+            return True
+        else:
+            return False
+
+    def set_meta( self, dataset, **kwd ):
+        """
+
+        Set the number of models in dataset.
+        """
+        dataset.metadata.number_of_models = generic_util.count_special_lines('^#[[:space:]+]STOCKHOLM[[:space:]+]1.0', dataset.file_name)
+
+    def split( cls, input_datasets, subdir_generator_function, split_params):
+        """
+
+        Split the input files by model records.
+        """
+        if split_params is None:
+            return None
+
+        if len(input_datasets) > 1:
+            raise Exception("STOCKHOLM-file splitting does not support multiple files")
+        input_files = [ds.file_name for ds in input_datasets]
+
+        chunk_size = None
+        if split_params['split_mode'] == 'number_of_parts':
+            raise Exception('Split mode "%s" is currently not implemented for STOCKHOLM-files.' % split_params['split_mode'])
+        elif split_params['split_mode'] == 'to_size':
+            chunk_size = int(split_params['split_size'])
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+        def _read_stockholm_records( filename ):
+            lines = []
+            with open(filename) as handle:
+                for line in handle:
+                    lines.append( line )
+                    if line.strip() == '//':
+                        yield lines
+                        lines = []
+
+        def _write_part_stockholm_file( accumulated_lines ):
+            part_dir = subdir_generator_function()
+            part_path = os.path.join( part_dir, os.path.basename( input_files[0] ) )
+            part_file = open( part_path, 'w' )
+            part_file.writelines( accumulated_lines )
+            part_file.close()
+
+        try:
+
+            stockholm_records = _read_stockholm_records( input_files[0] )
+            stockholm_lines_accumulated = []
+            for counter, stockholm_record in enumerate( stockholm_records, start=1):
+                stockholm_lines_accumulated.extend( stockholm_record )
+                if counter % chunk_size == 0:
+                    _write_part_stockholm_file( stockholm_lines_accumulated )
+                    stockholm_lines_accumulated = []
+            if stockholm_lines_accumulated:
+                _write_part_stockholm_file( stockholm_lines_accumulated )
+        except Exception as e:
+            log.error('Unable to split files: %s' % str(e))
+            raise
+    split = classmethod(split)
+
+
+class MauveXmfa( Text ):
+    file_ext = "xmfa"
+
+    MetadataElement( name="number_of_models", default=0, desc="Number of alignmened sequences", readonly=True, visible=True, optional=True, no_value=0 )
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            if (dataset.metadata.number_of_models == 1):
+                dataset.blurb = "1 alignment"
+            else:
+                dataset.blurb = "%s alignments" % dataset.metadata.number_of_models
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        with open(filename, 'r') as handle:
+            return handle.read(21) == '#FormatVersion Mauve1'
+        return False
+
+    def set_meta( self, dataset, **kwd ):
+        dataset.metadata.number_of_models = generic_util.count_special_lines('^#Sequence([[:digit:]]+)Entry', dataset.file_name)
diff --git a/lib/galaxy/datatypes/neo4j.py b/lib/galaxy/datatypes/neo4j.py
new file mode 100644
index 0000000..f3b97b9
--- /dev/null
+++ b/lib/galaxy/datatypes/neo4j.py
@@ -0,0 +1,134 @@
+"""
+Neo4j Composite Dataset
+"""
+import logging
+import sys
+import shutil
+import os
+
+from galaxy.datatypes.data import Data
+from galaxy.datatypes.images import Html
+from galaxy.util import FILENAME_VALID_CHARS
+
+gal_Log = logging.getLogger(__name__)
+verbose = True
+
+
+class Neo4j(Html):
+    """
+    base class to use for neostore datatypes
+    derived from html - composite datatype elements
+    stored in extra files path
+    """
+    def generate_primary_file(self, dataset=None):
+        """
+        This is called only at upload to write the html file
+        cannot rename the datasets here - they come with the default unfortunately
+        """
+        rval = [
+            '<html><head><title>Files for Composite Dataset (%s)</title></head><p/>\
+            This composite dataset is composed of the following files:<p/><ul>' % (
+                self.file_ext)]
+        for composite_name, composite_file in self.get_composite_files(dataset=dataset).iteritems():
+            opt_text = ''
+            if composite_file.optional:
+                opt_text = ' (optional)'
+            rval.append('<li><a href="%s">%s</a>%s' % (composite_name, composite_name, opt_text))
+        rval.append('</ul></html>')
+        return "\n".join(rval)
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'text/html'
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = 'Neo4j database (multiple files)'
+            dataset.blurb = 'Neo4j database (multiple files)'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset):
+        """Create HTML content, used for displaying peek."""
+        try:
+            return dataset.peek
+        except Exception:
+            return "NEO4J database (multiple files)"
+
+    def display_data(self, trans, data, preview=False, filename=None,
+                     to_ext=None, size=None, offset=None, **kwd):
+        """Documented as an old display method, but still gets called via tests etc
+        This allows us to format the data shown in the central pane via the "eye" icon.
+        """
+        if not preview:
+            trans.response.set_content_type(data.get_mime())
+            trans.log_event("Display dataset id: %s" % str(data.id))
+
+            # the target directory name
+            neo4j_dir_name = '/dataset_{}_files/neo4jdb'.format(data.dataset.id)
+            dir_name = str(os.path.dirname(trans.app.object_store.get_filename(data.dataset))) + neo4j_dir_name
+
+            # generate unique filename for this dataset
+            fname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in data.name)[0:150]
+
+            # zip the target directory (dir_name) using the fname
+            shutil.make_archive(fname, 'zip', dir_name)
+            download_zip = fname + '.zip'
+
+            # setup headers for the download
+            trans.response.headers['Content-Length'] = int(os.stat(download_zip).st_size)
+            # force octet-stream so Safari doesn't append mime extensions to filename
+            trans.response.set_content_type("application/octet-stream")
+            trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % \
+                                                            (data.hid, download_zip, "zip")
+            return open(download_zip)
+
+
+class Neo4jDB(Neo4j, Data):
+    """Class for neo4jDB database files."""
+    file_ext = 'neostore'
+    composite_type = 'auto_primary_file'
+    allow_datatype_change = False
+
+    def __init__(self, **kwd):
+        Data.__init__(self, **kwd)
+        self.add_composite_file('neostore', is_binary=True)
+        self.add_composite_file('neostore.id', is_binary=True)
+        self.add_composite_file('neostore.counts.db.a', is_binary=True)
+        self.add_composite_file('neostore.counts.db.b', is_binary=True)
+        self.add_composite_file('neostore.labeltokenstore.db', is_binary=True)
+        self.add_composite_file('neostore.labeltokenstore.db.id', is_binary=True)
+        self.add_composite_file('neostore.labeltokenstore.db.names', is_binary=True)
+        self.add_composite_file('neostore.labeltokenstore.db.names.id', is_binary=True)
+        self.add_composite_file('neostore.nodestore.db', is_binary=True)
+        self.add_composite_file('neostore.nodestore.db.id', is_binary=True)
+        self.add_composite_file('neostore.nodestore.db.labels', is_binary=True)
+        self.add_composite_file('neostore.nodestore.db.labels.id', is_binary=True)
+
+        self.add_composite_file('neostore.propertystore.db', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.id', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.arrays', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.arrays.id', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.index', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.index.id', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.index.keys', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.index.keys.id', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.strings', is_binary=True)
+        self.add_composite_file('neostore.propertystore.db.strings.id', is_binary=True)
+
+        self.add_composite_file('neostore.relationshipgroupstore.db', is_binary=True)
+        self.add_composite_file('neostore.relationshipgroupstore.db.id', is_binary=True)
+        self.add_composite_file('neostore.relationshipstore.db', is_binary=True)
+        self.add_composite_file('neostore.relationshipstore.db.id', is_binary=True)
+        self.add_composite_file('neostore.relationshiptypestore.db.names', is_binary=True)
+        self.add_composite_file('neostore.relationshiptypestore.db.names.id', is_binary=True)
+        self.add_composite_file('neostore.schemastore.db', is_binary=True)
+        self.add_composite_file('neostore.schemastore.db.id', is_binary=True)
+        self.add_composite_file('neostore.transaction.db.0', is_binary=True)
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__])
diff --git a/lib/galaxy/datatypes/ngsindex.py b/lib/galaxy/datatypes/ngsindex.py
new file mode 100644
index 0000000..0b65e7a
--- /dev/null
+++ b/lib/galaxy/datatypes/ngsindex.py
@@ -0,0 +1,77 @@
+"""
+NGS indexes
+"""
+import logging
+import os
+
+from .metadata import MetadataElement
+from .text import Html
+
+log = logging.getLogger(__name__)
+
+
+class BowtieIndex( Html ):
+    """
+    base class for BowtieIndex
+    is subclassed by BowtieColorIndex and BowtieBaseIndex
+    """
+    MetadataElement( name="base_name", desc="base name for this index set", default='galaxy_generated_bowtie_index', set_in_upload=True, readonly=True )
+    MetadataElement( name="sequence_space", desc="sequence_space for this index set", default='unknown', set_in_upload=True, readonly=True )
+
+    is_binary = True
+    composite_type = 'auto_primary_file'
+    allow_datatype_change = False
+
+    def generate_primary_file( self, dataset=None ):
+        """
+        This is called only at upload to write the html file
+        cannot rename the datasets here - they come with the default unfortunately
+        """
+        return '<html><head></head><body>AutoGenerated Primary File for Composite Dataset</body></html>'
+
+    def regenerate_primary_file(self, dataset):
+        """
+        cannot do this until we are setting metadata
+        """
+        bn = dataset.metadata.base_name
+        flist = os.listdir(dataset.extra_files_path)
+        rval = ['<html><head><title>Files for Composite Dataset %s</title></head><p/>Comprises the following files:<p/><ul>' % (bn)]
+        for i, fname in enumerate(flist):
+            sfname = os.path.split(fname)[-1]
+            rval.append( '<li><a href="%s">%s</a>' % ( sfname, sfname ) )
+        rval.append( '</ul></html>' )
+        with open(dataset.file_name, 'w') as f:
+            f.write("\n".join( rval ))
+            f.write('\n')
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Bowtie index file (%s)" % ( dataset.metadata.sequence_space )
+            dataset.blurb = "%s space" % ( dataset.metadata.sequence_space )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Bowtie index file"
+
+
+class BowtieColorIndex( BowtieIndex ):
+    """
+    Bowtie color space index
+    """
+    MetadataElement( name="sequence_space", desc="sequence_space for this index set", default='color', set_in_upload=True, readonly=True )
+
+    file_ext = 'bowtie_color_index'
+
+
+class BowtieBaseIndex( BowtieIndex ):
+    """
+    Bowtie base space index
+    """
+    MetadataElement( name="sequence_space", desc="sequence_space for this index set", default='base', set_in_upload=True, readonly=True )
+
+    file_ext = 'bowtie_base_index'
diff --git a/lib/galaxy/datatypes/proteomics.py b/lib/galaxy/datatypes/proteomics.py
new file mode 100644
index 0000000..eb22d3b
--- /dev/null
+++ b/lib/galaxy/datatypes/proteomics.py
@@ -0,0 +1,419 @@
+"""
+Proteomics Datatypes
+"""
+import logging
+import re
+
+from galaxy.datatypes import data
+from galaxy.datatypes.binary import Binary
+from galaxy.datatypes.data import Text
+from galaxy.datatypes.tabular import Tabular
+from galaxy.datatypes.xml import GenericXml
+from galaxy.util import nice_size
+
+
+log = logging.getLogger(__name__)
+
+
+class Wiff(Binary):
+    """Class for wiff files."""
+    edam_data = "data_2536"
+    edam_format = "format_3710"
+    file_ext = 'wiff'
+    allow_datatype_change = False
+    composite_type = 'auto_primary_file'
+
+    def __init__(self, **kwd):
+        Binary.__init__(self, **kwd)
+
+        self.add_composite_file(
+            'wiff',
+            description='AB SCIEX files in .wiff format. This can contain all needed information or only metadata.',
+            is_binary=True)
+
+        self.add_composite_file(
+            'wiff_scan',
+            description='AB SCIEX spectra file (wiff.scan), if the corresponding .wiff file only contains metadata.',
+            optional='True', is_binary=True)
+
+    def generate_primary_file(self, dataset=None):
+        rval = ['<html><head><title>Wiff Composite Dataset </title></head><p/>']
+        rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
+        for composite_name, composite_file in self.get_composite_files(dataset=dataset).items():
+            fn = composite_name
+            opt_text = ''
+            if composite_file.optional:
+                opt_text = ' (optional)'
+            if composite_file.get('description'):
+                rval.append('<li><a href="%s" type="text/plain">%s (%s)</a>%s</li>' % (fn, fn, composite_file.get('description'), opt_text))
+            else:
+                rval.append('<li><a href="%s" type="text/plain">%s</a>%s</li>' % (fn, fn, opt_text))
+        rval.append('</ul></div></html>')
+        return "\n".join(rval)
+
+
+Binary.register_sniffable_binary_format("wiff", "wiff", Wiff )
+
+
+class PepXmlReport(Tabular):
+    """pepxml converted to tabular report"""
+    edam_data = "data_2536"
+    file_ext = "pepxml.tsv"
+
+    def __init__(self, **kwd):
+        Tabular.__init__(self, **kwd)
+        self.column_names = ['Protein', 'Peptide', 'Assumed Charge', 'Neutral Pep Mass (calculated)', 'Neutral Mass', 'Retention Time', 'Start Scan', 'End Scan', 'Search Engine', 'PeptideProphet Probability', 'Interprophet Probabaility']
+
+    def display_peek(self, dataset):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table(self, dataset, column_names=self.column_names)
+
+
+class ProtXmlReport(Tabular):
+    """protxml converted to tabular report"""
+    edam_data = "data_2536"
+    file_ext = "protxml.tsv"
+    comment_lines = 1
+
+    def __init__(self, **kwd):
+        Tabular.__init__(self, **kwd)
+        self.column_names = [
+            "Entry Number", "Group Probability",
+            "Protein", "Protein Link", "Protein Probability",
+            "Percent Coverage", "Number of Unique Peptides",
+            "Total Independent Spectra", "Percent Share of Spectrum ID's",
+            "Description", "Protein Molecular Weight", "Protein Length",
+            "Is Nondegenerate Evidence", "Weight", "Precursor Ion Charge",
+            "Peptide sequence", "Peptide Link", "NSP Adjusted Probability",
+            "Initial Probability", "Number of Total Termini",
+            "Number of Sibling Peptides Bin", "Number of Instances",
+            "Peptide Group Designator", "Is Evidence?"]
+
+    def display_peek(self, dataset):
+        """Returns formated html of peek"""
+        return Tabular.make_html_table(self, dataset, column_names=self.column_names)
+
+
+class ProteomicsXml(GenericXml):
+    """ An enhanced XML datatype used to reuse code across several
+    proteomic/mass-spec datatypes. """
+    edam_data = "data_2536"
+    edam_format = "format_2032"
+
+    def sniff(self, filename):
+        """ Determines whether the file is the correct XML type. """
+        with open(filename, 'r') as contents:
+            while True:
+                line = contents.readline()
+                if line is None or not line.startswith('<?'):
+                    break
+            # pattern match <root or <ns:root for any ns string
+            pattern = '^<(\w*:)?%s' % self.root
+            return line is not None and re.match(pattern, line) is not None
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = self.blurb
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class PepXml(ProteomicsXml):
+    """pepXML data"""
+    edam_format = "format_3655"
+    file_ext = "pepxml"
+    blurb = 'pepXML data'
+    root = "msms_pipeline_analysis"
+
+
+class MzML(ProteomicsXml):
+    """mzML data"""
+    edam_format = "format_3244"
+    file_ext = "mzml"
+    blurb = 'mzML Mass Spectrometry data'
+    root = "(mzML|indexedmzML)"
+
+
+class ProtXML(ProteomicsXml):
+    """protXML data"""
+    file_ext = "protxml"
+    blurb = 'prot XML Search Results'
+    root = "protein_summary"
+
+
+class MzXML(ProteomicsXml):
+    """mzXML data"""
+    edam_format = "format_3654"
+    file_ext = "mzxml"
+    blurb = "mzXML Mass Spectrometry data"
+    root = "mzXML"
+
+
+class MzIdentML(ProteomicsXml):
+    edam_format = "format_3247"
+    file_ext = "mzid"
+    blurb = "XML identified peptides and proteins."
+    root = "MzIdentML"
+
+
+class TraML(ProteomicsXml):
+    edam_format = "format_3246"
+    file_ext = "traml"
+    blurb = "TraML transition list"
+    root = "TraML"
+
+
+class MzQuantML(ProteomicsXml):
+    edam_format = "format_3248"
+    file_ext = "mzq"
+    blurb = "XML quantification data"
+    root = "MzQuantML"
+
+
+class ConsensusXML(ProteomicsXml):
+    file_ext = "consensusxml"
+    blurb = "OpenMS multiple LC-MS map alignment file"
+    root = "consensusXML"
+
+
+class FeatureXML(ProteomicsXml):
+    file_ext = "featurexml"
+    blurb = "OpenMS feature file"
+    root = "featureMap"
+
+
+class IdXML(ProteomicsXml):
+    file_ext = "idxml"
+    blurb = "OpenMS identification file"
+    root = "IdXML"
+
+
+class TandemXML(ProteomicsXml):
+    edam_format = "format_3711"
+    file_ext = "tandem"
+    blurb = "X!Tandem search results file"
+    root = "bioml"
+
+
+class UniProtXML(ProteomicsXml):
+    file_ext = "uniprotxml"
+    blurb = "UniProt Proteome file"
+    root = "uniprot"
+
+
+class Mgf(Text):
+    """Mascot Generic Format data"""
+    edam_data = "data_2536"
+    edam_format = "format_3651"
+    file_ext = "mgf"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = 'mgf Mascot Generic Format'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        mgf_begin_ions = "BEGIN IONS"
+        max_lines = 100
+
+        with open(filename) as handle:
+            for i, line in enumerate(handle):
+                line = line.rstrip()
+                if line == mgf_begin_ions:
+                    return True
+                if i > max_lines:
+                    return False
+
+
+class MascotDat(Text):
+    """Mascot search results """
+    edam_data = "data_2536"
+    edam_format = "format_3713"
+    file_ext = "mascotdat"
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = 'mascotdat Mascot Search Results'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        mime_version = "MIME-Version: 1.0 (Generated by Mascot version 1.0)"
+        max_lines = 10
+
+        with open(filename) as handle:
+            for i, line in enumerate(handle):
+                line = line.rstrip()
+                if line == mime_version:
+                    return True
+                if i > max_lines:
+                    return False
+
+
+class ThermoRAW(Binary):
+    """Class describing a Thermo Finnigan binary RAW file"""
+    edam_data = "data_2536"
+    edam_format = "format_3712"
+    file_ext = "raw"
+
+    def sniff(self, filename):
+        # Thermo Finnigan RAW format is proprietary and hence not well documented.
+        # Files start with 2 bytes that seem to differ followed by F\0i\0n\0n\0i\0g\0a\0n
+        # This combination represents 17 bytes, but to play safe we read 20 bytes from
+        # the start of the file.
+        try:
+            header = open(filename, 'rb').read(20)
+            finnigan = b'F\0i\0n\0n\0i\0g\0a\0n'
+            if header.find(finnigan) != -1:
+                return True
+            return False
+        except:
+            return False
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        if not dataset.dataset.purged:
+            dataset.peek = "Thermo Finnigan RAW file"
+            dataset.blurb = nice_size(dataset.get_size())
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek(self, dataset):
+        try:
+            return dataset.peek
+        except:
+            return "Thermo Finnigan RAW file (%s)" % (nice_size(dataset.get_size()))
+
+
+Binary.register_sniffable_binary_format("thermo.raw", "raw", ThermoRAW )
+
+
+class Msp(Text):
+    """ Output of NIST MS Search Program chemdata.nist.gov/mass-spc/ftp/mass-spc/PepLib.pdf """
+    file_ext = "msp"
+
+    @staticmethod
+    def next_line_starts_with(contents, prefix):
+        next_line = contents.readline()
+        return next_line is not None and next_line.startswith(prefix)
+
+    def sniff(self, filename):
+        """ Determines whether the file is a NIST MSP output file."""
+        with open(filename, 'r') as f:
+            begin_contents = f.read(1024)
+            if "\n" not in begin_contents:
+                return False
+            lines = begin_contents.splitlines()
+            if len(lines) < 2:
+                return False
+            return lines[0].startswith("Name:") and lines[1].startswith("MW:")
+
+
+class SPLibNoIndex( Text ):
+    """SPlib without index file """
+    file_ext = "splib_noindex"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'Spectral Library without index files'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class SPLib(Msp):
+    """SpectraST Spectral Library. Closely related to msp format"""
+    file_ext = "splib"
+    composite_type = 'auto_primary_file'
+
+    def __init__(self, **kwd):
+        Msp.__init__(self, **kwd)
+        self.add_composite_file('library.splib',
+                                description='Spectral Library. Contains actual library spectra',
+                                is_binary=False)
+        self.add_composite_file('library.spidx',
+                                description='Spectrum index', is_binary=False)
+        self.add_composite_file('library.pepidx',
+                                description='Peptide index', is_binary=False)
+
+    def generate_primary_file(self, dataset=None):
+        rval = ['<html><head><title>Spectral Library Composite Dataset </title></head><p/>']
+        rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
+        for composite_name, composite_file in self.get_composite_files(dataset=dataset).items():
+            fn = composite_name
+            opt_text = ''
+            if composite_file.optional:
+                opt_text = ' (optional)'
+            if composite_file.get('description'):
+                rval.append('<li><a href="%s" type="text/plain">%s (%s)</a>%s</li>' % (fn, fn, composite_file.get('description'), opt_text))
+            else:
+                rval.append('<li><a href="%s" type="text/plain">%s</a>%s</li>' % (fn, fn, opt_text))
+        rval.append('</ul></div></html>')
+        return "\n".join(rval)
+
+    def set_peek(self, dataset, is_multi_byte=False):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek(dataset.file_name, is_multi_byte=is_multi_byte)
+            dataset.blurb = 'splib Spectral Library Format'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        """ Determines whether the file is a SpectraST generated file.
+        """
+        with open(filename, 'r') as contents:
+            return Msp.next_line_starts_with(contents, "Name:") and Msp.next_line_starts_with(contents, "LibID:")
+
+
+class Ms2(Text):
+    file_ext = "ms2"
+
+    def sniff(self, filename):
+        """ Determines whether the file is a valid ms2 file."""
+
+        with open(filename, 'r') as contents:
+            header_lines = []
+            while True:
+                line = contents.readline()
+                if line is None or len(line) == 0:
+                    pass
+                elif line.startswith('H\t'):
+                    header_lines.append(line)
+                else:
+                    break
+
+        for header_field in ['CreationDate', 'Extractor', 'ExtractorVersion', 'ExtractorOptions']:
+            found_header = False
+            for header_line in header_lines:
+                if header_line.startswith('H\t%s' % (header_field)):
+                    found_header = True
+                    break
+            if not found_header:
+                return False
+
+        return True
+
+
+# unsniffable binary format, should do something about this
+class XHunterAslFormat(Binary):
+    """ Annotated Spectra in the HLF format http://www.thegpm.org/HUNTER/format_2006_09_15.html """
+    file_ext = "hlf"
+
+
+class Sf3(Binary):
+    """Class describing a Scaffold SF3 files"""
+    file_ext = "sf3"
diff --git a/lib/galaxy/datatypes/qualityscore.py b/lib/galaxy/datatypes/qualityscore.py
new file mode 100644
index 0000000..9723d73
--- /dev/null
+++ b/lib/galaxy/datatypes/qualityscore.py
@@ -0,0 +1,132 @@
+"""
+Qualityscore class
+"""
+
+import data
+import logging
+log = logging.getLogger(__name__)
+
+
+class QualityScore ( data.Text ):
+    """
+    until we know more about quality score formats
+    """
+    edam_data = "data_2048"
+    edam_format = "format_3606"
+    file_ext = "qual"
+
+
+class QualityScoreSOLiD ( QualityScore ):
+    """
+    until we know more about quality score formats
+    """
+    edam_format = "format_3610"
+    file_ext = "qualsolid"
+
+    def sniff( self, filename ):
+        """
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'sequence.fasta' )
+        >>> QualityScoreSOLiD().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'sequence.qualsolid' )
+        >>> QualityScoreSOLiD().sniff( fname )
+        True
+        """
+        try:
+            fh = open( filename )
+            readlen = None
+            goodblock = 0
+            while True:
+                line = fh.readline()
+                if not line:
+                    if goodblock > 0:
+                        return True
+                    else:
+                        break  # EOF
+                line = line.strip()
+                if line and not line.startswith( '#' ):  # first non-empty non-comment line
+                    if line.startswith( '>' ):
+                        line = fh.readline().strip()
+                        if line == '' or line.startswith( '>' ):
+                            break
+                        try:
+                            [ int( x ) for x in line.split() ]
+                            if not(readlen):
+                                readlen = len(line.split())
+                            assert len(line.split()) == readlen  # SOLiD reads should be of the same length
+                        except:
+                            break
+                        goodblock += 1
+                        if goodblock > 10:
+                            return True
+                    else:
+                        break  # we found a non-empty line, but it's not a header
+            fh.close()
+        except:
+            pass
+        return False
+
+    def set_meta( self, dataset, **kwd ):
+        if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+            dataset.metadata.data_lines = None
+            return
+        return QualityScore.set_meta( self, dataset, **kwd )
+
+
+class QualityScore454 ( QualityScore ):
+    """
+    until we know more about quality score formats
+    """
+    edam_format = "format_3611"
+    file_ext = "qual454"
+
+    def sniff( self, filename ):
+        """
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'sequence.fasta' )
+        >>> QualityScore454().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'sequence.qual454' )
+        >>> QualityScore454().sniff( fname )
+        True
+        """
+        try:
+            fh = open( filename )
+            while True:
+                line = fh.readline()
+                if not line:
+                    break  # EOF
+                line = line.strip()
+                if line and not line.startswith( '#' ):  # first non-empty non-comment line
+                    if line.startswith( '>' ):
+                        line = fh.readline().strip()
+                        if line == '' or line.startswith( '>' ):
+                            break
+                        try:
+                            [ int( x ) for x in line.split() ]
+                        except:
+                            break
+                        return True
+                    else:
+                        break  # we found a non-empty line, but it's not a header
+            fh.close()
+        except:
+            pass
+        return False
+
+
+class QualityScoreSolexa ( QualityScore ):
+    """
+    until we know more about quality score formats
+    """
+    edam_format = "format_3608"
+    file_ext = "qualsolexa"
+
+
+class QualityScoreIllumina ( QualityScore ):
+    """
+    until we know more about quality score formats
+    """
+    edam_format = "format_3609"
+    file_ext = "qualillumina"
diff --git a/lib/galaxy/datatypes/registry.py b/lib/galaxy/datatypes/registry.py
new file mode 100644
index 0000000..099c2bc
--- /dev/null
+++ b/lib/galaxy/datatypes/registry.py
@@ -0,0 +1,877 @@
+"""
+Provides mapping between extensions and datatypes, mime-types, etc.
+"""
+from __future__ import absolute_import
+
+import os
+import tempfile
+import logging
+import imp
+from . import data
+from . import tabular
+from . import interval
+from . import images
+from . import sequence
+from . import qualityscore
+from . import xml
+from . import coverage
+from . import tracks
+from . import binary
+from . import text
+import galaxy.util
+from galaxy.util.odict import odict
+from .display_applications.application import DisplayApplication
+
+
+class ConfigurationError( Exception ):
+    pass
+
+
+class Registry( object ):
+
+    def __init__( self ):
+        self.log = logging.getLogger(__name__)
+        self.log.addHandler( logging.NullHandler() )
+        self.datatypes_by_extension = {}
+        self.mimetypes_by_extension = {}
+        self.datatype_converters = odict()
+        # Converters defined in local datatypes_conf.xml
+        self.converters = []
+        # Converters defined in datatypes_conf.xml included in installed tool shed repositories.
+        self.proprietary_converters = []
+        self.converter_deps = {}
+        self.available_tracks = []
+        self.set_external_metadata_tool = None
+        self.sniff_order = []
+        self.upload_file_formats = []
+        # Datatype elements defined in local datatypes_conf.xml that contain display applications.
+        self.display_app_containers = []
+        # Datatype elements in datatypes_conf.xml included in installed
+        # tool shed repositories that contain display applications.
+        self.proprietary_display_app_containers = []
+        # Map a display application id to a display application
+        self.display_applications = odict()
+        # The following 2 attributes are used in the to_xml_file()
+        # method to persist the current state into an xml file.
+        self.display_path_attr = None
+        self.converters_path_attr = None
+        # The 'default' converters_path defined in local datatypes_conf.xml
+        self.converters_path = None
+        # The 'default' display_path defined in local datatypes_conf.xml
+        self.display_applications_path = None
+        self.inherit_display_application_by_class = []
+        # Keep a list of imported proprietary datatype class modules.
+        self.imported_modules = []
+        self.datatype_elems = []
+        self.sniffer_elems = []
+        self.xml_filename = None
+        # Build sites
+        self.build_sites = {}
+        self.display_sites = {}
+        self.legacy_build_sites = {}
+
+    def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
+        """
+        Parse a datatypes XML file located at root_dir/config (if processing the Galaxy distributed config) or contained within
+        an installed Tool Shed repository.  If deactivate is True, an installed Tool Shed repository that includes custom datatypes
+        is being deactivated or uninstalled, so appropriate loaded datatypes will be removed from the registry.  The value of
+        override will be False when a Tool Shed repository is being installed.  Since installation is occurring after the datatypes
+        registry has been initialized at server startup, its contents cannot be overridden by newly introduced conflicting data types.
+        """
+
+        def __import_module( full_path, datatype_module, datatype_class_name ):
+            open_file_obj, file_name, description = imp.find_module( datatype_module, [ full_path ] )
+            imported_module = imp.load_module( datatype_class_name, open_file_obj, file_name, description )
+            return imported_module
+
+        if root_dir and config:
+            # If handling_proprietary_datatypes is determined as True below, we'll have an elem that looks something like this:
+            # <datatype display_in_upload="true"
+            #           extension="blastxml"
+            #           mimetype="application/xml"
+            #           proprietary_datatype_module="blast"
+            #           proprietary_path="[cloned repository path]"
+            #           type="galaxy.datatypes.blast:BlastXml" />
+            handling_proprietary_datatypes = False
+            # Parse datatypes_conf.xml
+            tree = galaxy.util.parse_xml( config )
+            root = tree.getroot()
+            # Load datatypes and converters from config
+            if deactivate:
+                self.log.debug( 'Deactivating datatypes from %s' % config )
+            else:
+                self.log.debug( 'Loading datatypes from %s' % config )
+            registration = root.find( 'registration' )
+            # Set default paths defined in local datatypes_conf.xml.
+            if not self.converters_path:
+                self.converters_path_attr = registration.get( 'converters_path', 'lib/galaxy/datatypes/converters' )
+                self.converters_path = os.path.join( root_dir, self.converters_path_attr )
+                if not os.path.isdir( self.converters_path ):
+                    raise ConfigurationError( "Directory does not exist: %s" % self.converters_path )
+            if not self.display_applications_path:
+                self.display_path_attr = registration.get( 'display_path', 'display_applications' )
+                self.display_applications_path = os.path.join( root_dir, self.display_path_attr )
+            # Proprietary datatype's <registration> tag may have special attributes, proprietary_converter_path and proprietary_display_path.
+            proprietary_converter_path = registration.get( 'proprietary_converter_path', None )
+            proprietary_display_path = registration.get( 'proprietary_display_path', None )
+            if proprietary_converter_path is not None or proprietary_display_path is not None and not handling_proprietary_datatypes:
+                handling_proprietary_datatypes = True
+            for elem in registration.findall( 'datatype' ):
+                # Keep a status of the process steps to enable stopping the process of handling the datatype if necessary.
+                ok = True
+                extension = self.get_extension( elem )
+                dtype = elem.get( 'type', None )
+                type_extension = elem.get( 'type_extension', None )
+                mimetype = elem.get( 'mimetype', None )
+                display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
+                # If make_subclass is True, it does not necessarily imply that we are subclassing a datatype that is contained
+                # in the distribution.
+                make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
+                edam_format = elem.get( 'edam_format', None )
+                if edam_format and not make_subclass:
+                    self.log.warning("Cannot specify edam_format without setting subclass to True, skipping datatype.")
+                    continue
+                edam_data = elem.get( 'edam_data', None )
+                if edam_data and not make_subclass:
+                    self.log.warning("Cannot specify edam_data without setting subclass to True, skipping datatype.")
+                    continue
+                # Proprietary datatypes included in installed tool shed repositories will include two special attributes
+                # (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
+                # The value of proprietary_path is the path to the cloned location of the tool shed repository's contained
+                # datatypes_conf.xml file.
+                proprietary_path = elem.get( 'proprietary_path', None )
+                proprietary_datatype_module = elem.get( 'proprietary_datatype_module', None )
+                if proprietary_path is not None or proprietary_datatype_module is not None and not handling_proprietary_datatypes:
+                    handling_proprietary_datatypes = True
+                if deactivate:
+                    # We are deactivating or uninstalling an installed tool shed repository, so eliminate the datatype
+                    # elem from the in-memory list of datatype elems.
+                    for in_memory_elem in self.datatype_elems:
+                        in_memory_extension = in_memory_elem.get( 'extension', None )
+                        if in_memory_extension == extension:
+                            in_memory_dtype = elem.get( 'type', None )
+                            in_memory_type_extension = elem.get( 'type_extension', None )
+                            in_memory_mimetype = elem.get( 'mimetype', None )
+                            in_memory_display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
+                            in_memory_make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
+                            if in_memory_dtype == dtype and \
+                                    in_memory_type_extension == type_extension and \
+                                    in_memory_mimetype == mimetype and \
+                                    in_memory_display_in_upload == display_in_upload and \
+                                    in_memory_make_subclass == make_subclass:
+                                self.datatype_elems.remove( in_memory_elem )
+                    if extension is not None and extension in self.datatypes_by_extension:
+                        # We are deactivating or uninstalling an installed tool shed repository, so eliminate the datatype
+                        # from the registry.  TODO: Handle deactivating datatype converters, etc before removing from
+                        # self.datatypes_by_extension.
+                        del self.datatypes_by_extension[ extension ]
+                        if extension in self.upload_file_formats:
+                            self.upload_file_formats.remove( extension )
+                        self.log.debug( "Removed datatype with extension '%s' from the registry." % extension )
+                else:
+                    # We are loading new datatype, so we'll make sure it is correctly defined before proceeding.
+                    can_process_datatype = False
+                    if extension is not None:
+                        if dtype is not None or type_extension is not None:
+                            if override or extension not in self.datatypes_by_extension:
+                                can_process_datatype = True
+                    if can_process_datatype:
+                        if dtype is not None:
+                            try:
+                                fields = dtype.split( ':' )
+                                datatype_module = fields[ 0 ]
+                                datatype_class_name = fields[ 1 ]
+                            except Exception as e:
+                                self.log.exception( 'Error parsing datatype definition for dtype %s: %s' % ( str( dtype ), str( e ) ) )
+                                ok = False
+                            if ok:
+                                datatype_class = None
+                                if proprietary_path and proprietary_datatype_module and datatype_class_name:
+                                    # TODO: previously comments suggested this needs to be locked because it modifys
+                                    # the sys.path, probably true but the previous lock wasn't doing that.
+                                    try:
+                                        imported_module = __import_module( proprietary_path,
+                                                                           proprietary_datatype_module,
+                                                                           datatype_class_name )
+                                        if imported_module not in self.imported_modules:
+                                            self.imported_modules.append( imported_module )
+                                        if hasattr( imported_module, datatype_class_name ):
+                                            datatype_class = getattr( imported_module, datatype_class_name )
+                                    except Exception as e:
+                                        full_path = os.path.join( proprietary_path, proprietary_datatype_module )
+                                        self.log.debug( "Exception importing proprietary code file %s: %s" % ( str( full_path ), str( e ) ) )
+                                # Either the above exception was thrown because the proprietary_datatype_module is not derived from a class
+                                # in the repository, or we are loading Galaxy's datatypes. In either case we'll look in the registry.
+                                if datatype_class is None:
+                                    try:
+                                        # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
+                                        fields = datatype_module.split( '.' )[1:]
+                                        module = __import__( datatype_module )
+                                        for mod in fields:
+                                            module = getattr( module, mod )
+                                        datatype_class = getattr( module, datatype_class_name )
+                                        self.log.debug( 'Retrieved datatype module %s:%s from the datatype registry.' % ( str( datatype_module ), datatype_class_name ) )
+                                    except Exception as e:
+                                        self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
+                                        ok = False
+                        elif type_extension is not None:
+                            try:
+                                datatype_class = self.datatypes_by_extension[ type_extension ].__class__
+                            except Exception as e:
+                                self.log.exception( 'Error determining datatype_class for type_extension %s: %s' % ( str( type_extension ), str( e ) ) )
+                                ok = False
+                        if ok:
+                            if not deactivate:
+                                # A new tool shed repository that contains custom datatypes is being installed, and since installation is
+                                # occurring after the datatypes registry has been initialized at server startup, its contents cannot be
+                                # overridden by new introduced conflicting data types unless the value of override is True.
+                                if extension in self.datatypes_by_extension:
+                                    # Because of the way that the value of can_process_datatype was set above, we know that the value of
+                                    # override is True.
+                                    self.log.debug( "Overriding conflicting datatype with extension '%s', using datatype from %s." %
+                                                    ( str( extension ), str( config ) ) )
+                                if make_subclass:
+                                    datatype_class = type( datatype_class_name, ( datatype_class, ), {} )
+                                    if edam_format:
+                                        datatype_class.edam_format = edam_format
+                                    if edam_data:
+                                        datatype_class.edam_data = edam_data
+                                self.datatypes_by_extension[ extension ] = datatype_class()
+                                if mimetype is None:
+                                    # Use default mimetype per datatype specification.
+                                    mimetype = self.datatypes_by_extension[ extension ].get_mime()
+                                self.mimetypes_by_extension[ extension ] = mimetype
+                                if datatype_class.track_type:
+                                    self.available_tracks.append( extension )
+                                if display_in_upload and extension not in self.upload_file_formats:
+                                    self.upload_file_formats.append( extension )
+                                # Max file size cut off for setting optional metadata.
+                                self.datatypes_by_extension[ extension ].max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+                                for converter in elem.findall( 'converter' ):
+                                    # Build the list of datatype converters which will later be loaded into the calling app's toolbox.
+                                    converter_config = converter.get( 'file', None )
+                                    target_datatype = converter.get( 'target_datatype', None )
+                                    depends_on = converter.get( 'depends_on', None )
+                                    if depends_on is not None and target_datatype is not None:
+                                        if extension not in self.converter_deps:
+                                            self.converter_deps[ extension ] = {}
+                                        self.converter_deps[ extension ][ target_datatype ] = depends_on.split( ',' )
+                                    if converter_config and target_datatype:
+                                        if proprietary_converter_path:
+                                            self.proprietary_converters.append( ( converter_config, extension, target_datatype ) )
+                                        else:
+                                            self.converters.append( ( converter_config, extension, target_datatype ) )
+                                # Add composite files.
+                                for composite_file in elem.findall( 'composite_file' ):
+                                    name = composite_file.get( 'name', None )
+                                    if name is None:
+                                        self.log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+                                    optional = composite_file.get( 'optional', False )
+                                    mimetype = composite_file.get( 'mimetype', None )
+                                    self.datatypes_by_extension[ extension ].add_composite_file( name, optional=optional, mimetype=mimetype )
+                                for display_app in elem.findall( 'display' ):
+                                    if proprietary_display_path:
+                                        if elem not in self.proprietary_display_app_containers:
+                                            self.proprietary_display_app_containers.append( elem )
+                                    else:
+                                        if elem not in self.display_app_containers:
+                                            self.display_app_containers.append( elem )
+                                # Processing the new datatype elem is now complete, so make sure the element defining it is retained by appending
+                                # the new datatype to the in-memory list of datatype elems to enable persistence.
+                                self.datatype_elems.append( elem )
+                    else:
+                        if extension is not None:
+                            if dtype is not None or type_extension is not None:
+                                if extension in self.datatypes_by_extension:
+                                    if not override:
+                                        # Do not load the datatype since it conflicts with an existing datatype which we are not supposed
+                                        # to override.
+                                        self.log.debug( "Ignoring conflicting datatype with extension '%s' from %s." % ( extension, config ) )
+            # Load datatype sniffers from the config - we'll do this even if one or more datatypes were not properly processed in the config
+            # since sniffers are not tightly coupled with datatypes.
+            self.load_datatype_sniffers( root,
+                                         deactivate=deactivate,
+                                         handling_proprietary_datatypes=handling_proprietary_datatypes,
+                                         override=override )
+            self.upload_file_formats.sort()
+            # Load build sites
+            self.load_build_sites( root )
+            # Persist the xml form of the registry into a temporary file so that it can be loaded from the command line by tools and
+            # set_metadata processing.
+            self.to_xml_file()
+        self.set_default_values()
+
+        def append_to_sniff_order():
+            # Just in case any supported data types are not included in the config's sniff_order section.
+            for ext in self.datatypes_by_extension:
+                datatype = self.datatypes_by_extension[ ext ]
+                included = False
+                for atype in self.sniff_order:
+                    if isinstance( atype, datatype.__class__ ):
+                        included = True
+                        break
+                if not included:
+                    self.sniff_order.append( datatype )
+        append_to_sniff_order()
+
+    def load_build_sites( self, root ):
+        if root.find( 'build_sites' ) is not None:
+            for elem in root.find( 'build_sites' ).findall( 'site' ):
+                if not (elem.get( 'type' ) and elem.get( 'file' )):
+                    self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
+                else:
+                    site_type = elem.get( 'type' )
+                    file = elem.get( 'file' )
+                    self.build_sites[site_type] = file
+                    if site_type in ('ucsc', 'gbrowse'):
+                        self.legacy_build_sites[site_type] = galaxy.util.read_build_sites( file )
+                    if elem.get( 'display', None ):
+                        display = elem.get( 'display' )
+                        self.display_sites[site_type] = [ x.strip() for x in display.lower().split( ',' ) ]
+                        self.log.debug( "Loaded build site '%s': %s with display sites: %s", site_type, file, display )
+                    else:
+                        self.log.debug( "Loaded build site '%s': %s", site_type, file )
+
+    def get_legacy_sites_by_build( self, site_type, build ):
+        sites = []
+        for site in self.legacy_build_sites.get(site_type, []):
+            if build in site['builds']:
+                sites.append((site['name'], site['url']))
+        return sites
+
+    def get_display_sites( self, site_type ):
+        return self.display_sites.get( site_type, [] )
+
+    def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
+        """
+        Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
+        distributed config) or contained within an installed Tool Shed repository.  If deactivate is True, an installed Tool
+        Shed repository that includes custom sniffers is being deactivated or uninstalled, so appropriate loaded sniffers will
+        be removed from the registry.  The value of override will be False when a Tool Shed repository is being installed.
+        Since installation is occurring after the datatypes registry has been initialized at server startup, its contents
+        cannot be overridden by newly introduced conflicting sniffers.
+        """
+        sniffer_elem_classes = [ e.attrib[ 'type' ] for e in self.sniffer_elems ]
+        sniffers = root.find( 'sniffers' )
+        if sniffers is not None:
+            for elem in sniffers.findall( 'sniffer' ):
+                # Keep a status of the process steps to enable stopping the process of handling the sniffer if necessary.
+                ok = True
+                dtype = elem.get( 'type', None )
+                if dtype is not None:
+                    try:
+                        fields = dtype.split( ":" )
+                        datatype_module = fields[ 0 ]
+                        datatype_class_name = fields[ 1 ]
+                        module = None
+                    except Exception as e:
+                        self.log.exception( 'Error determining datatype class or module for dtype %s: %s' % ( str( dtype ), str( e ) ) )
+                        ok = False
+                    if ok:
+                        if handling_proprietary_datatypes:
+                            # See if one of the imported modules contains the datatype class name.
+                            for imported_module in self.imported_modules:
+                                if hasattr( imported_module, datatype_class_name ):
+                                    module = imported_module
+                                    break
+                        if module is None:
+                            try:
+                                # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution.
+                                module = __import__( datatype_module )
+                                for comp in datatype_module.split( '.' )[ 1: ]:
+                                    module = getattr( module, comp )
+                            except Exception as e:
+                                self.log.exception( "Error importing datatype class for '%s': %s" % ( str( dtype ), str( e ) ) )
+                                ok = False
+                        if ok:
+                            try:
+                                aclass = getattr( module, datatype_class_name )()
+                            except Exception as e:
+                                self.log.exception( 'Error calling method %s from class %s: %s', str( datatype_class_name ), str( module ), str( e ) )
+                                ok = False
+                            if ok:
+                                if deactivate:
+                                    # We are deactivating or uninstalling an installed Tool Shed repository, so eliminate the appropriate sniffers.
+                                    sniffer_class = elem.get( 'type', None )
+                                    if sniffer_class is not None:
+                                        for index, s_e_c in enumerate( sniffer_elem_classes ):
+                                            if sniffer_class == s_e_c:
+                                                del self.sniffer_elems[ index ]
+                                                sniffer_elem_classes = [ elem.attrib[ 'type' ] for elem in self.sniffer_elems ]
+                                                self.log.debug( "Removed sniffer element for datatype '%s'" % str( dtype ) )
+                                                break
+                                        for sniffer_class in self.sniff_order:
+                                            if sniffer_class.__class__ == aclass.__class__:
+                                                self.sniff_order.remove( sniffer_class )
+                                                self.log.debug( "Removed sniffer class for datatype '%s' from sniff order" % str( dtype ) )
+                                                break
+                                else:
+                                    # We are loading new sniffer, so see if we have a conflicting sniffer already loaded.
+                                    conflict = False
+                                    for conflict_loc, sniffer_class in enumerate( self.sniff_order ):
+                                        if sniffer_class.__class__ == aclass.__class__:
+                                            # We have a conflicting sniffer, so replace the one previously loaded.
+                                            conflict = True
+                                            if override:
+                                                del self.sniff_order[ conflict_loc ]
+                                                self.log.debug( "Removed conflicting sniffer for datatype '%s'" % dtype )
+                                            break
+                                    if conflict:
+                                        if override:
+                                            self.sniff_order.append( aclass )
+                                            self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+                                    else:
+                                        self.sniff_order.append( aclass )
+                                        self.log.debug( "Loaded sniffer for datatype '%s'" % dtype )
+                                    # Processing the new sniffer elem is now complete, so make sure the element defining it is loaded if necessary.
+                                    sniffer_class = elem.get( 'type', None )
+                                    if sniffer_class is not None:
+                                        if sniffer_class not in sniffer_elem_classes:
+                                            self.sniffer_elems.append( elem )
+
+    def get_datatype_class_by_name( self, name ):
+        """
+        Return the datatype class where the datatype's `type` attribute
+        (as defined in the datatype_conf.xml file) contains `name`.
+        """
+        # TODO: obviously not ideal but some of these base classes that are useful for testing datatypes
+        # aren't loaded into the datatypes registry, so we'd need to test for them here
+        if name == 'images.Image':
+            return images.Image
+
+        # TODO: too inefficient - would be better to generate this once as a map and store in this object
+        for ext, datatype_obj in self.datatypes_by_extension.items():
+            datatype_obj_class = datatype_obj.__class__
+            datatype_obj_class_str = str( datatype_obj_class )
+            if name in datatype_obj_class_str:
+                return datatype_obj_class
+        return None
+
+    def get_available_tracks( self ):
+        return self.available_tracks
+
+    def get_mimetype_by_extension( self, ext, default='application/octet-stream' ):
+        """Returns a mimetype based on an extension"""
+        try:
+            mimetype = self.mimetypes_by_extension[ ext ]
+        except KeyError:
+            # datatype was never declared
+            mimetype = default
+            self.log.warning( 'unknown mimetype in data factory %s' % str( ext ) )
+        return mimetype
+
+    def get_datatype_by_extension( self, ext ):
+        """Returns a datatype based on an extension"""
+        try:
+            builder = self.datatypes_by_extension[ ext ]
+        except KeyError:
+            builder = data.Text()
+        return builder
+
+    def change_datatype( self, data, ext ):
+        data.extension = ext
+        # call init_meta and copy metadata from itself.  The datatype
+        # being converted *to* will handle any metadata copying and
+        # initialization.
+        if data.has_data():
+            data.set_size()
+            data.init_meta( copy_from=data )
+        return data
+
+    def load_datatype_converters( self, toolbox, installed_repository_dict=None, deactivate=False ):
+        """
+        If deactivate is False, add datatype converters from self.converters or self.proprietary_converters
+        to the calling app's toolbox.  If deactivate is True, eliminates relevant converters from the calling
+        app's toolbox.
+        """
+        if installed_repository_dict:
+            # Load converters defined by datatypes_conf.xml included in installed tool shed repository.
+            converters = self.proprietary_converters
+        else:
+            # Load converters defined by local datatypes_conf.xml.
+            converters = self.converters
+        for elem in converters:
+            tool_config = elem[ 0 ]
+            source_datatype = elem[ 1 ]
+            target_datatype = elem[ 2 ]
+            if installed_repository_dict:
+                converter_path = installed_repository_dict[ 'converter_path' ]
+            else:
+                converter_path = self.converters_path
+            try:
+                config_path = os.path.join( converter_path, tool_config )
+                converter = toolbox.load_tool( config_path )
+                if installed_repository_dict:
+                    # If the converter is included in an installed tool shed repository, set the tool
+                    # shed related tool attributes.
+                    converter.tool_shed = installed_repository_dict[ 'tool_shed' ]
+                    converter.repository_name = installed_repository_dict[ 'repository_name' ]
+                    converter.repository_owner = installed_repository_dict[ 'repository_owner' ]
+                    converter.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
+                    converter.old_id = converter.id
+                    # The converter should be included in the list of tools defined in tool_dicts.
+                    tool_dicts = installed_repository_dict[ 'tool_dicts' ]
+                    for tool_dict in tool_dicts:
+                        if tool_dict[ 'id' ] == converter.id:
+                            converter.guid = tool_dict[ 'guid' ]
+                            converter.id = tool_dict[ 'guid' ]
+                            break
+                if deactivate:
+                    toolbox.remove_tool_by_id( converter.id, remove_from_panel=False )
+                    if source_datatype in self.datatype_converters:
+                        if target_datatype in self.datatype_converters[ source_datatype ]:
+                            del self.datatype_converters[ source_datatype ][ target_datatype ]
+                    self.log.debug( "Deactivated converter: %s", converter.id )
+                else:
+                    toolbox.register_tool( converter )
+                    if source_datatype not in self.datatype_converters:
+                        self.datatype_converters[ source_datatype ] = odict()
+                    self.datatype_converters[ source_datatype ][ target_datatype ] = converter
+                    self.log.debug( "Loaded converter: %s", converter.id )
+            except Exception as e:
+                if deactivate:
+                    self.log.exception( "Error deactivating converter from (%s): %s" % ( converter_path, str( e ) ) )
+                else:
+                    self.log.exception( "Error loading converter (%s): %s" % ( converter_path, str( e ) ) )
+
+    def load_display_applications( self, app, installed_repository_dict=None, deactivate=False ):
+        """
+        If deactivate is False, add display applications from self.display_app_containers or
+        self.proprietary_display_app_containers to appropriate datatypes.  If deactivate is
+        True, eliminates relevant display applications from appropriate datatypes.
+        """
+        if installed_repository_dict:
+            # Load display applications defined by datatypes_conf.xml included in installed tool shed repository.
+            datatype_elems = self.proprietary_display_app_containers
+        else:
+            # Load display applications defined by local datatypes_conf.xml.
+            datatype_elems = self.display_app_containers
+        for elem in datatype_elems:
+            extension = self.get_extension( elem )
+            for display_app in elem.findall( 'display' ):
+                display_file = display_app.get( 'file', None )
+                if installed_repository_dict:
+                    display_path = installed_repository_dict[ 'display_path' ]
+                    display_file_head, display_file_tail = os.path.split( display_file )
+                    config_path = os.path.join( display_path, display_file_tail )
+                else:
+                    config_path = os.path.join( self.display_applications_path, display_file )
+                try:
+                    inherit = galaxy.util.string_as_bool( display_app.get( 'inherit', 'False' ) )
+                    display_app = DisplayApplication.from_file( config_path, app )
+                    if display_app:
+                        if display_app.id in self.display_applications:
+                            if deactivate:
+                                del self.display_applications[ display_app.id ]
+                            else:
+                                # If we already loaded this display application, we'll use the first one loaded.
+                                display_app = self.display_applications[ display_app.id ]
+                        elif installed_repository_dict:
+                            # If the display application is included in an installed tool shed repository,
+                            # set the tool shed related tool attributes.
+                            display_app.tool_shed = installed_repository_dict[ 'tool_shed' ]
+                            display_app.repository_name = installed_repository_dict[ 'repository_name' ]
+                            display_app.repository_owner = installed_repository_dict[ 'repository_owner' ]
+                            display_app.installed_changeset_revision = installed_repository_dict[ 'installed_changeset_revision' ]
+                            display_app.old_id = display_app.id
+                            # The display application should be included in the list of tools defined in tool_dicts.
+                            tool_dicts = installed_repository_dict[ 'tool_dicts' ]
+                            for tool_dict in tool_dicts:
+                                if tool_dict[ 'id' ] == display_app.id:
+                                    display_app.guid = tool_dict[ 'guid' ]
+                                    display_app.id = tool_dict[ 'guid' ]
+                                    break
+                        if deactivate:
+                            if display_app.id in self.display_applications:
+                                del self.display_applications[ display_app.id ]
+                            if extension in self.datatypes_by_extension:
+                                if display_app.id in self.datatypes_by_extension[ extension ].display_applications:
+                                    del self.datatypes_by_extension[ extension ].display_applications[ display_app.id ]
+                            if inherit and ( self.datatypes_by_extension[ extension ], display_app ) in self.inherit_display_application_by_class:
+                                self.inherit_display_application_by_class.remove( ( self.datatypes_by_extension[ extension ], display_app ) )
+                            self.log.debug( "Deactivated display application '%s' for datatype '%s'." % ( display_app.id, extension ) )
+                        else:
+                            self.display_applications[ display_app.id ] = display_app
+                            self.datatypes_by_extension[ extension ].add_display_application( display_app )
+                            if inherit and ( self.datatypes_by_extension[ extension ], display_app ) not in self.inherit_display_application_by_class:
+                                self.inherit_display_application_by_class.append( ( self.datatypes_by_extension[ extension ], display_app ) )
+                            self.log.debug( "Loaded display application '%s' for datatype '%s', inherit=%s." % ( display_app.id, extension, inherit ) )
+                except Exception as e:
+                    if deactivate:
+                        self.log.exception( "Error deactivating display application (%s): %s" % ( config_path, str( e ) ) )
+                    else:
+                        self.log.exception( "Error loading display application (%s): %s" % ( config_path, str( e ) ) )
+        # Handle display_application subclass inheritance.
+        for extension, d_type1 in self.datatypes_by_extension.iteritems():
+            for d_type2, display_app in self.inherit_display_application_by_class:
+                current_app = d_type1.get_display_application( display_app.id, None )
+                if current_app is None and isinstance( d_type1, type( d_type2 ) ):
+                    self.log.debug( "Adding inherited display application '%s' to datatype '%s'" % ( display_app.id, extension ) )
+                    d_type1.add_display_application( display_app )
+
+    def reload_display_applications( self, display_application_ids=None ):
+        """
+        Reloads display applications: by id, or all if no ids provided
+        Returns tuple( [reloaded_ids], [failed_ids] )
+        """
+        if not display_application_ids:
+            display_application_ids = self.display_applications.keys()
+        elif not isinstance( display_application_ids, list ):
+            display_application_ids = [ display_application_ids ]
+        reloaded = []
+        failed = []
+        for display_application_id in display_application_ids:
+            try:
+                self.display_applications[ display_application_id ].reload()
+                reloaded.append( display_application_id )
+            except Exception as e:
+                self.log.debug( 'Requested to reload display application "%s", but failed: %s.', display_application_id, e  )
+                failed.append( display_application_id )
+        return ( reloaded, failed )
+
+    def load_external_metadata_tool( self, toolbox ):
+        """Adds a tool which is used to set external metadata"""
+        # We need to be able to add a job to the queue to set metadata. The queue will currently only accept jobs with an associated
+        # tool.  We'll load a special tool to be used for Auto-Detecting metadata; this is less than ideal, but effective
+        # Properly building a tool without relying on parsing an XML file is near difficult...so we bundle with Galaxy.
+        set_meta_tool = toolbox.load_hidden_lib_tool( "galaxy/datatypes/set_metadata_tool.xml" )
+        self.set_external_metadata_tool = set_meta_tool
+        self.log.debug( "Loaded external metadata tool: %s", self.set_external_metadata_tool.id )
+
+    def set_default_values( self ):
+        # Default values.
+        if not self.datatypes_by_extension:
+            self.datatypes_by_extension = {
+                'ab1'           : binary.Ab1(),
+                'axt'           : sequence.Axt(),
+                'bam'           : binary.Bam(),
+                'bed'           : interval.Bed(),
+                'coverage'      : coverage.LastzCoverage(),
+                'customtrack'   : interval.CustomTrack(),
+                'csfasta'       : sequence.csFasta(),
+                'db3'           : binary.SQlite(),
+                'fasta'         : sequence.Fasta(),
+                'eland'         : tabular.Eland(),
+                'fastq'         : sequence.Fastq(),
+                'fastqsanger'   : sequence.FastqSanger(),
+                'gemini.sqlite' : binary.GeminiSQLite(),
+                'gtf'           : interval.Gtf(),
+                'gff'           : interval.Gff(),
+                'gff3'          : interval.Gff3(),
+                'genetrack'     : tracks.GeneTrack(),
+                'h5'            : binary.H5(),
+                'idpdb'         : binary.IdpDB(),
+                'interval'      : interval.Interval(),
+                'laj'           : images.Laj(),
+                'lav'           : sequence.Lav(),
+                'maf'           : sequence.Maf(),
+                'mz.sqlite'     : binary.MzSQlite(),
+                'pileup'        : tabular.Pileup(),
+                'qualsolid'     : qualityscore.QualityScoreSOLiD(),
+                'qualsolexa'    : qualityscore.QualityScoreSolexa(),
+                'qual454'       : qualityscore.QualityScore454(),
+                'sam'           : tabular.Sam(),
+                'scf'           : binary.Scf(),
+                'sff'           : binary.Sff(),
+                'tabular'       : tabular.Tabular(),
+                'csv'           : tabular.CSV(),
+                'taxonomy'      : tabular.Taxonomy(),
+                'txt'           : data.Text(),
+                'wig'           : interval.Wiggle(),
+                'xml'           : xml.GenericXml(),
+            }
+            self.mimetypes_by_extension = {
+                'ab1'           : 'application/octet-stream',
+                'axt'           : 'text/plain',
+                'bam'           : 'application/octet-stream',
+                'bed'           : 'text/plain',
+                'customtrack'   : 'text/plain',
+                'csfasta'       : 'text/plain',
+                'db3'           : 'application/octet-stream',
+                'eland'         : 'application/octet-stream',
+                'fasta'         : 'text/plain',
+                'fastq'         : 'text/plain',
+                'fastqsanger'   : 'text/plain',
+                'gemini.sqlite' : 'application/octet-stream',
+                'gtf'           : 'text/plain',
+                'gff'           : 'text/plain',
+                'gff3'          : 'text/plain',
+                'h5'            : 'application/octet-stream',
+                'idpdb'         : 'application/octet-stream',
+                'interval'      : 'text/plain',
+                'laj'           : 'text/plain',
+                'lav'           : 'text/plain',
+                'maf'           : 'text/plain',
+                'memexml'       : 'application/xml',
+                'mz.sqlite'     : 'application/octet-stream',
+                'pileup'        : 'text/plain',
+                'qualsolid'     : 'text/plain',
+                'qualsolexa'    : 'text/plain',
+                'qual454'       : 'text/plain',
+                'sam'           : 'text/plain',
+                'scf'           : 'application/octet-stream',
+                'sff'           : 'application/octet-stream',
+                'tabular'       : 'text/plain',
+                'csv'           : 'text/plain',
+                'taxonomy'      : 'text/plain',
+                'txt'           : 'text/plain',
+                'wig'           : 'text/plain',
+                'xml'           : 'application/xml',
+            }
+        # super supertype fix for input steps in workflows.
+        if 'data' not in self.datatypes_by_extension:
+            self.datatypes_by_extension[ 'data' ] = data.Data()
+            self.mimetypes_by_extension[ 'data' ] = 'application/octet-stream'
+        # Default values - the order in which we attempt to determine data types is critical
+        # because some formats are much more flexibly defined than others.
+        if len( self.sniff_order ) < 1:
+            self.sniff_order = [
+                binary.Bam(),
+                binary.Sff(),
+                binary.H5(),
+                binary.GeminiSQLite(),
+                binary.MzSQlite(),
+                binary.IdpDB(),
+                binary.SQlite(),
+                xml.GenericXml(),
+                sequence.Maf(),
+                sequence.Lav(),
+                sequence.csFasta(),
+                qualityscore.QualityScoreSOLiD(),
+                qualityscore.QualityScore454(),
+                sequence.Fasta(),
+                sequence.Fastq(),
+                interval.Wiggle(),
+                text.Html(),
+                sequence.Axt(),
+                interval.Bed(),
+                interval.CustomTrack(),
+                interval.Gtf(),
+                interval.Gff(),
+                interval.Gff3(),
+                tabular.Pileup(),
+                interval.Interval(),
+                tabular.Sam(),
+                tabular.Eland(),
+                tabular.CSV()
+            ]
+
+    def get_converters_by_datatype( self, ext ):
+        """Returns available converters by source type"""
+        converters = odict()
+        source_datatype = type( self.get_datatype_by_extension( ext ) )
+        for ext2, dict in self.datatype_converters.items():
+            converter_datatype = type( self.get_datatype_by_extension( ext2 ) )
+            if issubclass( source_datatype, converter_datatype ):
+                converters.update( dict )
+        # Ensure ext-level converters are present
+        if ext in self.datatype_converters.keys():
+            converters.update( self.datatype_converters[ ext ] )
+        return converters
+
+    def get_converter_by_target_type( self, source_ext, target_ext ):
+        """Returns a converter based on source and target datatypes"""
+        converters = self.get_converters_by_datatype( source_ext )
+        if target_ext in converters.keys():
+            return converters[ target_ext ]
+        return None
+
+    def find_conversion_destination_for_dataset_by_extensions( self, dataset, accepted_formats, converter_safe=True ):
+        """Returns ( target_ext, existing converted dataset )"""
+        for convert_ext in self.get_converters_by_datatype( dataset.ext ):
+            if self.get_datatype_by_extension( convert_ext ).matches_any( accepted_formats ):
+                converted_dataset = dataset.get_converted_files_by_type( convert_ext )
+                if converted_dataset:
+                    ret_data = converted_dataset
+                elif not converter_safe:
+                    continue
+                else:
+                    ret_data = None
+                return ( convert_ext, ret_data )
+        return ( None, None )
+
+    def get_composite_extensions( self ):
+        return [ ext for ( ext, d_type ) in self.datatypes_by_extension.iteritems() if d_type.composite_type is not None ]
+
+    def get_upload_metadata_params( self, context, group, tool ):
+        """Returns dict of case value:inputs for metadata conditional for upload tool"""
+        rval = {}
+        for ext, d_type in self.datatypes_by_extension.iteritems():
+            inputs = []
+            for meta_name, meta_spec in d_type.metadata_spec.iteritems():
+                if meta_spec.set_in_upload:
+                    help_txt = meta_spec.desc
+                    if not help_txt or help_txt == meta_name:
+                        help_txt = ""
+                    inputs.append( '<param type="text" name="%s" label="Set metadata value for "%s"" value="%s" help="%s"/>' % ( meta_name, meta_name, meta_spec.default, help_txt ) )
+            rval[ ext ] = "\n".join( inputs )
+        if 'auto' not in rval and 'txt' in rval:  # need to manually add 'auto' datatype
+            rval[ 'auto' ] = rval[ 'txt' ]
+        return rval
+
+    @property
+    def edam_formats( self ):
+        """
+        """
+        mapping = dict((k, v.edam_format) for k, v in self.datatypes_by_extension.items())
+        return mapping
+
+    @property
+    def edam_data( self ):
+        """
+        """
+        mapping = dict((k, v.edam_data) for k, v in self.datatypes_by_extension.items())
+        return mapping
+
+    @property
+    def integrated_datatypes_configs( self ):
+        if self.xml_filename and os.path.isfile( self.xml_filename ):
+            return self.xml_filename
+        self.to_xml_file()
+        return self.xml_filename
+
+    def to_xml_file( self ):
+        if self.xml_filename is not None:
+            # If persisted previously, attempt to remove the temporary file in which we were written.
+            try:
+                os.unlink( self.xml_filename )
+            except:
+                pass
+            self.xml_filename = None
+        fd, filename = tempfile.mkstemp()
+        self.xml_filename = os.path.abspath( filename )
+        if self.converters_path_attr:
+            converters_path_str = ' converters_path="%s"' % self.converters_path_attr
+        else:
+            converters_path_str = ''
+        if self.display_path_attr:
+            display_path_str = ' display_path="%s"' % self.display_path_attr
+        else:
+            display_path_str = ''
+        os.write( fd, '<?xml version="1.0"?>\n' )
+        os.write( fd, '<datatypes>\n' )
+        os.write( fd, '<registration%s%s>\n' % ( converters_path_str, display_path_str ) )
+        for elem in self.datatype_elems:
+            os.write( fd, '%s' % galaxy.util.xml_to_string( elem ) )
+        os.write( fd, '</registration>\n' )
+        os.write( fd, '<sniffers>\n' )
+        for elem in self.sniffer_elems:
+            os.write( fd, '%s' % galaxy.util.xml_to_string( elem ) )
+        os.write( fd, '</sniffers>\n' )
+        os.write( fd, '</datatypes>\n' )
+        os.close( fd )
+        os.chmod( self.xml_filename, 0o644 )
+
+    def get_extension( self, elem ):
+        """
+        Function which returns the extension lowercased
+        :param elem:
+        :return extension:
+        """
+        extension = elem.get('extension', None)
+        # If extension is not None and is uppercase or mixed case, we need to lowercase it
+        if extension is not None and not extension.islower():
+            self.log.debug( "%s is not lower case, that could cause troubles in the future. \
+            Please change it to lower case" % extension )
+            extension = extension.lower()
+        return extension
diff --git a/lib/galaxy/datatypes/sequence.py b/lib/galaxy/datatypes/sequence.py
new file mode 100644
index 0000000..d166014
--- /dev/null
+++ b/lib/galaxy/datatypes/sequence.py
@@ -0,0 +1,1068 @@
+"""
+Sequence classes
+"""
+
+import gzip
+import json
+import logging
+import os
+import re
+import string
+import sys
+from cgi import escape
+
+import bx.align.maf
+
+from galaxy import util
+from galaxy.datatypes import metadata
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.sniff import get_headers
+from galaxy.util import nice_size
+from galaxy.util.checkers import is_gzip
+from galaxy.util.image_util import check_image_type
+
+from . import data
+
+if sys.version_info > (3,):
+    long = int
+
+log = logging.getLogger(__name__)
+
+
+class SequenceSplitLocations( data.Text ):
+    """
+    Class storing information about a sequence file composed of multiple gzip files concatenated as
+    one OR an uncompressed file. In the GZIP case, each sub-file's location is stored in start and end.
+
+    The format of the file is JSON::
+
+      { "sections" : [
+              { "start" : "x", "end" : "y", "sequences" : "z" },
+              ...
+      ]}
+
+    """
+    file_ext = "fqtoc"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            try:
+                parsed_data = json.load(open(dataset.file_name))
+                # dataset.peek = json.dumps(data, sort_keys=True, indent=4)
+                dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+                dataset.blurb = '%d sections' % len(parsed_data['sections'])
+            except Exception:
+                dataset.peek = 'Not FQTOC file'
+                dataset.blurb = 'Not FQTOC file'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        if os.path.getsize(filename) < 50000:
+            try:
+                data = json.load(open(filename))
+                sections = data['sections']
+                for section in sections:
+                    if 'start' not in section or 'end' not in section or 'sequences' not in section:
+                        return False
+                return True
+            except:
+                pass
+        return False
+
+
+class Sequence( data.Text ):
+    """Class describing a sequence"""
+    edam_data = "data_2044"
+
+    """Add metadata elements"""
+    MetadataElement( name="sequences", default=0, desc="Number of sequences", readonly=True, visible=False, optional=True, no_value=0 )
+
+    def set_meta( self, dataset, **kwd ):
+        """
+        Set the number of sequences and the number of data lines in dataset.
+        """
+        data_lines = 0
+        sequences = 0
+        for line in open( dataset.file_name ):
+            line = line.strip()
+            if line and line.startswith( '#' ):
+                # We don't count comment lines for sequence data types
+                continue
+            if line and line.startswith( '>' ):
+                sequences += 1
+                data_lines += 1
+            else:
+                data_lines += 1
+        dataset.metadata.data_lines = data_lines
+        dataset.metadata.sequences = sequences
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            if dataset.metadata.sequences:
+                dataset.blurb = "%s sequences" % util.commaify( str( dataset.metadata.sequences ) )
+            else:
+                dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def get_sequences_per_file(total_sequences, split_params):
+        if split_params['split_mode'] == 'number_of_parts':
+            # legacy basic mode - split into a specified number of parts
+            parts = int(split_params['split_size'])
+            sequences_per_file = [total_sequences / parts for i in range(parts)]
+            for i in range(total_sequences % parts):
+                sequences_per_file[i] += 1
+        elif split_params['split_mode'] == 'to_size':
+            # loop through the sections and calculate the number of sequences
+            chunk_size = long(split_params['split_size'])
+            rem = total_sequences % chunk_size
+            sequences_per_file = [chunk_size for i in range(total_sequences / chunk_size)]
+            # TODO: Should we invest the time in a better way to handle small remainders?
+            if rem > 0:
+                sequences_per_file.append(rem)
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+        return sequences_per_file
+    get_sequences_per_file = staticmethod(get_sequences_per_file)
+
+    def do_slow_split( cls, input_datasets, subdir_generator_function, split_params):
+        # count the sequences so we can split
+        # TODO: if metadata is present, take the number of lines / 4
+        if input_datasets[0].metadata is not None and input_datasets[0].metadata.sequences is not None:
+            total_sequences = input_datasets[0].metadata.sequences
+        else:
+            input_file = input_datasets[0].file_name
+            compress = is_gzip(input_file)
+            if compress:
+                # gzip is really slow before python 2.7!
+                in_file = gzip.GzipFile(input_file, 'r')
+            else:
+                # TODO
+                # if a file is not compressed, seek locations can be calculated and stored
+                # ideally, this would be done in metadata
+                # TODO
+                # Add BufferedReader if python 2.7?
+                in_file = open(input_file, 'rt')
+            total_sequences = long(0)
+            for i, line in enumerate(in_file):
+                total_sequences += 1
+            in_file.close()
+            total_sequences /= 4
+
+        sequences_per_file = cls.get_sequences_per_file(total_sequences, split_params)
+        return cls.write_split_files(input_datasets, None, subdir_generator_function, sequences_per_file)
+    do_slow_split = classmethod(do_slow_split)
+
+    def do_fast_split( cls, input_datasets, toc_file_datasets, subdir_generator_function, split_params):
+        data = json.load(open(toc_file_datasets[0].file_name))
+        sections = data['sections']
+        total_sequences = long(0)
+        for section in sections:
+            total_sequences += long(section['sequences'])
+        sequences_per_file = cls.get_sequences_per_file(total_sequences, split_params)
+        return cls.write_split_files(input_datasets, toc_file_datasets, subdir_generator_function, sequences_per_file)
+    do_fast_split = classmethod(do_fast_split)
+
+    def write_split_files(cls, input_datasets, toc_file_datasets, subdir_generator_function, sequences_per_file):
+        directories = []
+
+        def get_subdir(idx):
+            if idx < len(directories):
+                return directories[idx]
+            dir = subdir_generator_function()
+            directories.append(dir)
+            return dir
+
+        # we know how many splits and how many sequences in each. What remains is to write out instructions for the
+        # splitting of all the input files. To decouple the format of those instructions from this code, the exact format of
+        # those instructions is delegated to scripts
+        start_sequence = 0
+        for part_no in range(len(sequences_per_file)):
+            dir = get_subdir(part_no)
+            for ds_no in range(len(input_datasets)):
+                ds = input_datasets[ds_no]
+                base_name = os.path.basename(ds.file_name)
+                part_path = os.path.join(dir, base_name)
+                split_data = dict(class_name='%s.%s' % (cls.__module__, cls.__name__),
+                                  output_name=part_path,
+                                  input_name=ds.file_name,
+                                  args=dict(start_sequence=start_sequence, num_sequences=sequences_per_file[part_no]))
+                if toc_file_datasets is not None:
+                    toc = toc_file_datasets[ds_no]
+                    split_data['args']['toc_file'] = toc.file_name
+                f = open(os.path.join(dir, 'split_info_%s.json' % base_name), 'w')
+                json.dump(split_data, f)
+                f.close()
+            start_sequence += sequences_per_file[part_no]
+        return directories
+    write_split_files = classmethod(write_split_files)
+
+    def split( cls, input_datasets, subdir_generator_function, split_params):
+        """Split a generic sequence file (not sensible or possible, see subclasses)."""
+        if split_params is None:
+            return None
+        raise NotImplementedError("Can't split generic sequence files")
+
+    def get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count):
+        """
+        Uses a Table of Contents dict, parsed from an FQTOC file, to come up with a set of
+        shell commands that will extract the parts necessary
+        >>> three_sections=[dict(start=0, end=74, sequences=10), dict(start=74, end=148, sequences=10), dict(start=148, end=148+76, sequences=10)]
+        >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=0, sequence_count=10)
+        ['dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null >> ./output.gz']
+        >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=1, sequence_count=5)
+        ['(dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +5 2> /dev/null) | head -20 | gzip -c >> ./output.gz']
+        >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=0, sequence_count=20)
+        ['dd bs=1 skip=0 count=148 if=./input.gz 2> /dev/null >> ./output.gz']
+        >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=5, sequence_count=10)
+        ['(dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +21 2> /dev/null) | head -20 | gzip -c >> ./output.gz', '(dd bs=1 skip=74 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +1 2> /dev/null) | head -20 | gzip -c >> ./output.gz']
+        >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=10, sequence_count=10)
+        ['dd bs=1 skip=74 count=74 if=./input.gz 2> /dev/null >> ./output.gz']
+        >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=5, sequence_count=20)
+        ['(dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +21 2> /dev/null) | head -20 | gzip -c >> ./output.gz', 'dd bs=1 skip=74 count=74 if=./input.gz 2> /dev/null >> ./output.gz', '(dd bs=1 skip=148 count=76 if=./input.gz 2> /dev/null )| zcat | ( tail -n +1 2> /dev/null) | head -20 | gzip -c >> ./output.gz']
+        """
+        sections = toc_file['sections']
+        result = []
+
+        current_sequence = long(0)
+        i = 0
+        # skip to the section that contains my starting sequence
+        while i < len(sections) and start_sequence >= current_sequence + long(sections[i]['sequences']):
+            current_sequence += long(sections[i]['sequences'])
+            i += 1
+        if i == len(sections):  # bad input data!
+            raise Exception('No FQTOC section contains starting sequence %s' % start_sequence)
+
+        # These two variables act as an accumulator for consecutive entire blocks that
+        # can be copied verbatim (without decompressing)
+        start_chunk = long(-1)
+        end_chunk = long(-1)
+        copy_chunk_cmd = 'dd bs=1 skip=%s count=%s if=%s 2> /dev/null >> %s'
+
+        while sequence_count > 0 and i < len(sections):
+            # we need to extract partial data. So, find the byte offsets of the chunks that contain the data we need
+            # use a combination of dd (to pull just the right sections out) tail (to skip lines) and head (to get the
+            # right number of lines
+            sequences = long(sections[i]['sequences'])
+            skip_sequences = start_sequence - current_sequence
+            sequences_to_extract = min(sequence_count, sequences - skip_sequences)
+            start_copy = long(sections[i]['start'])
+            end_copy = long(sections[i]['end'])
+            if sequences_to_extract < sequences:
+                if start_chunk > -1:
+                    result.append(copy_chunk_cmd % (start_chunk, end_chunk - start_chunk, input_name, output_name))
+                    start_chunk = -1
+                # extract, unzip, trim, recompress
+                result.append('(dd bs=1 skip=%s count=%s if=%s 2> /dev/null )| zcat | ( tail -n +%s 2> /dev/null) | head -%s | gzip -c >> %s' %
+                              (start_copy, end_copy - start_copy, input_name, skip_sequences * 4 + 1, sequences_to_extract * 4, output_name))
+            else:  # whole section - add it to the start_chunk/end_chunk accumulator
+                if start_chunk == -1:
+                    start_chunk = start_copy
+                end_chunk = end_copy
+            sequence_count -= sequences_to_extract
+            start_sequence += sequences_to_extract
+            current_sequence += sequences
+            i += 1
+        if start_chunk > -1:
+            result.append(copy_chunk_cmd % (start_chunk, end_chunk - start_chunk, input_name, output_name))
+
+        if sequence_count > 0:
+            raise Exception('%s sequences not found in file' % sequence_count)
+
+        return result
+    get_split_commands_with_toc = staticmethod(get_split_commands_with_toc)
+
+    def get_split_commands_sequential(is_compressed, input_name, output_name, start_sequence, sequence_count):
+        """
+        Does a brain-dead sequential scan & extract of certain sequences
+        >>> Sequence.get_split_commands_sequential(True, './input.gz', './output.gz', start_sequence=0, sequence_count=10)
+        ['zcat "./input.gz" | ( tail -n +1 2> /dev/null) | head -40 | gzip -c > "./output.gz"']
+        >>> Sequence.get_split_commands_sequential(False, './input.fastq', './output.fastq', start_sequence=10, sequence_count=10)
+        ['tail -n +41 "./input.fastq" 2> /dev/null | head -40 > "./output.fastq"']
+        """
+        start_line = start_sequence * 4
+        line_count = sequence_count * 4
+        # TODO: verify that tail can handle 64-bit numbers
+        if is_compressed:
+            cmd = 'zcat "%s" | ( tail -n +%s 2> /dev/null) | head -%s | gzip -c' % (input_name, start_line + 1, line_count)
+        else:
+            cmd = 'tail -n +%s "%s" 2> /dev/null | head -%s' % (start_line + 1, input_name, line_count)
+        cmd += ' > "%s"' % output_name
+
+        return [cmd]
+    get_split_commands_sequential = staticmethod(get_split_commands_sequential)
+
+
+class Alignment( data.Text ):
+    """Class describing an alignment"""
+    edam_data = "data_0863"
+
+    """Add metadata elements"""
+    MetadataElement( name="species", desc="Species", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None )
+
+    def split( cls, input_datasets, subdir_generator_function, split_params):
+        """Split a generic alignment file (not sensible or possible, see subclasses)."""
+        if split_params is None:
+            return None
+        raise NotImplementedError("Can't split generic alignment files")
+
+
+class Fasta( Sequence ):
+    """Class representing a FASTA sequence"""
+    edam_format = "format_1929"
+    file_ext = "fasta"
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in fasta format
+
+        A sequence in FASTA format consists of a single-line description, followed by lines of sequence data.
+        The first character of the description line is a greater-than (">") symbol in the first column.
+        All lines should be shorter than 80 characters
+
+        For complete details see http://www.ncbi.nlm.nih.gov/blast/fasta.shtml
+
+        Rules for sniffing as True:
+
+            We don't care about line length (other than empty lines).
+
+            The first non-empty line must start with '>' and the Very Next line.strip() must have sequence data and not be a header.
+
+                'sequence data' here is loosely defined as non-empty lines which do not start with '>'
+
+                This will cause Color Space FASTA (csfasta) to be detected as True (they are, after all, still FASTA files - they have a header line followed by sequence data)
+
+                    Previously this method did some checking to determine if the sequence data had integers (presumably to differentiate between fasta and csfasta)
+
+                    This should be done through sniff order, where csfasta (currently has a null sniff function) is detected for first (stricter definition) followed sometime after by fasta
+
+            We will only check that the first purported sequence is correctly formatted.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'sequence.maf' )
+        >>> Fasta().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'sequence.fasta' )
+        >>> Fasta().sniff( fname )
+        True
+        """
+
+        try:
+            fh = open( filename )
+            while True:
+                line = fh.readline()
+                if not line:
+                    break  # EOF
+                line = line.strip()
+                if line:  # first non-empty line
+                    if line.startswith( '>' ):
+                        # The next line.strip() must not be '', nor startwith '>'
+                        line = fh.readline().strip()
+                        if line == '' or line.startswith( '>' ):
+                            break
+
+                        # If there is a third line, and it isn't a header line, it may not contain chars like '()[].' otherwise it's most likely a DotBracket file
+                        line = fh.readline()
+                        if not line.startswith('>') and re.search("[\(\)\[\]\.]", line):
+                            break
+
+                        return True
+                    else:
+                        break  # we found a non-empty line, but it's not a fasta header
+            fh.close()
+        except:
+            pass
+        return False
+
+    def split(cls, input_datasets, subdir_generator_function, split_params):
+        """Split a FASTA file sequence by sequence.
+
+        Note that even if split_mode="number_of_parts", the actual number of
+        sub-files produced may not match that requested by split_size.
+
+        If split_mode="to_size" then split_size is treated as the number of
+        FASTA records to put in each sub-file (not size in bytes).
+        """
+        if split_params is None:
+            return
+        if len(input_datasets) > 1:
+            raise Exception("FASTA file splitting does not support multiple files")
+        input_file = input_datasets[0].file_name
+
+        # Counting chunk size as number of sequences.
+        if 'split_mode' not in split_params:
+            raise Exception('Tool does not define a split mode')
+        elif split_params['split_mode'] == 'number_of_parts':
+            split_size = int(split_params['split_size'])
+            log.debug("Split %s into %i parts..." % (input_file, split_size))
+            # if split_mode = number_of_parts, and split_size = 10, and
+            # we know the number of sequences (say 1234), then divide by
+            # by ten, giving ten files of approx 123 sequences each.
+            if input_datasets[0].metadata is not None and input_datasets[0].metadata.sequences:
+                # Galaxy has already counted/estimated the number
+                batch_size = 1 + input_datasets[0].metadata.sequences // split_size
+                cls._count_split(input_file, batch_size, subdir_generator_function)
+            else:
+                # OK, if Galaxy hasn't counted them, it may be a big file.
+                # We're not going to count the records which would be slow
+                # and a waste of disk IO time - instead we'll split using
+                # the file size.
+                chunk_size = os.path.getsize(input_file) // split_size
+                cls._size_split(input_file, chunk_size, subdir_generator_function)
+        elif split_params['split_mode'] == 'to_size':
+            # Split the input file into as many sub-files as required,
+            # each containing to_size many sequences
+            batch_size = int(split_params['split_size'])
+            log.debug("Split %s into batches of %i records..." % (input_file, batch_size))
+            cls._count_split(input_file, batch_size, subdir_generator_function)
+        else:
+            raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+    split = classmethod(split)
+
+    def _size_split(cls, input_file, chunk_size, subdir_generator_function):
+        """Split a FASTA file into chunks based on size on disk.
+
+        This does of course preserve complete records - it only splits at the
+        start of a new FASTQ sequence record.
+        """
+        log.debug("Attemping to split FASTA file %s into chunks of %i bytes" % (input_file, chunk_size))
+        f = open(input_file, "rU")
+        part_file = None
+        try:
+            # Note if the input FASTA file has no sequences, we will
+            # produce just one sub-file which will be a copy of it.
+            part_dir = subdir_generator_function()
+            part_path = os.path.join(part_dir, os.path.basename(input_file))
+            part_file = open(part_path, 'w')
+            log.debug("Writing %s part to %s" % (input_file, part_path))
+            start_offset = 0
+            while True:
+                offset = f.tell()
+                line = f.readline()
+                if not line:
+                    break
+                if line[0] == ">" and offset - start_offset >= chunk_size:
+                    # Start a new sub-file
+                    part_file.close()
+                    part_dir = subdir_generator_function()
+                    part_path = os.path.join(part_dir, os.path.basename(input_file))
+                    part_file = open(part_path, 'w')
+                    log.debug("Writing %s part to %s" % (input_file, part_path))
+                    start_offset = f.tell()
+                part_file.write(line)
+        except Exception as e:
+            log.error('Unable to size split FASTA file: %s' % str(e))
+            f.close()
+            if part_file is not None:
+                part_file.close()
+            raise
+        f.close()
+    _size_split = classmethod(_size_split)
+
+    def _count_split(cls, input_file, chunk_size, subdir_generator_function):
+        """Split a FASTA file into chunks based on counting records."""
+        log.debug("Attemping to split FASTA file %s into chunks of %i sequences" % (input_file, chunk_size))
+        f = open(input_file, "rU")
+        part_file = None
+        try:
+            # Note if the input FASTA file has no sequences, we will
+            # produce just one sub-file which will be a copy of it.
+            part_dir = subdir_generator_function()
+            part_path = os.path.join(part_dir, os.path.basename(input_file))
+            part_file = open(part_path, 'w')
+            log.debug("Writing %s part to %s" % (input_file, part_path))
+            rec_count = 0
+            while True:
+                line = f.readline()
+                if not line:
+                    break
+                if line[0] == ">":
+                    rec_count += 1
+                    if rec_count > chunk_size:
+                        # Start a new sub-file
+                        part_file.close()
+                        part_dir = subdir_generator_function()
+                        part_path = os.path.join(part_dir, os.path.basename(input_file))
+                        part_file = open(part_path, 'w')
+                        log.debug("Writing %s part to %s" % (input_file, part_path))
+                        rec_count = 1
+                part_file.write(line)
+            part_file.close()
+        except Exception as e:
+            log.error('Unable to count split FASTA file: %s' % str(e))
+            f.close()
+            if part_file is not None:
+                part_file.close()
+            raise
+        f.close()
+    _count_split = classmethod(_count_split)
+
+
+class csFasta( Sequence ):
+    """ Class representing the SOLID Color-Space sequence ( csfasta ) """
+    edam_format = "format_3589"
+    file_ext = "csfasta"
+
+    def sniff( self, filename ):
+        """
+        Color-space sequence:
+            >2_15_85_F3
+            T213021013012303002332212012112221222112212222
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'sequence.fasta' )
+        >>> csFasta().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'sequence.csfasta' )
+        >>> csFasta().sniff( fname )
+        True
+        """
+        try:
+            fh = open( filename )
+            while True:
+                line = fh.readline()
+                if not line:
+                    break  # EOF
+                line = line.strip()
+                if line and not line.startswith( '#' ):  # first non-empty non-comment line
+                    if line.startswith( '>' ):
+                        line = fh.readline().strip()
+                        if line == '' or line.startswith( '>' ):
+                            break
+                        elif line[0] not in string.ascii_uppercase:
+                            return False
+                        elif len( line ) > 1 and not re.search( '^[\d.]+$', line[1:] ):
+                            return False
+                        return True
+                    else:
+                        break  # we found a non-empty line, but it's not a header
+            fh.close()
+        except:
+            pass
+        return False
+
+    def set_meta( self, dataset, **kwd ):
+        if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+            dataset.metadata.data_lines = None
+            dataset.metadata.sequences = None
+            return
+        return Sequence.set_meta( self, dataset, **kwd )
+
+
+class Fastq ( Sequence ):
+    """Class representing a generic FASTQ sequence"""
+    edam_format = "format_1930"
+    file_ext = "fastq"
+
+    def set_meta( self, dataset, **kwd ):
+        """
+        Set the number of sequences and the number of data lines
+        in dataset.
+        FIXME: This does not properly handle line wrapping
+        """
+        if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+            dataset.metadata.data_lines = None
+            dataset.metadata.sequences = None
+            return
+        data_lines = 0
+        sequences = 0
+        seq_counter = 0     # blocks should be 4 lines long
+        for line in open( dataset.file_name ):
+            line = line.strip()
+            if line and line.startswith( '#' ) and not data_lines:
+                # We don't count comment lines for sequence data types
+                continue
+            seq_counter += 1
+            data_lines += 1
+            if line and line.startswith( '@' ):
+                if seq_counter >= 4:
+                    # count previous block
+                    # blocks should be 4 lines long
+                    sequences += 1
+                    seq_counter = 1
+        if seq_counter >= 4:
+            # count final block
+            sequences += 1
+        dataset.metadata.data_lines = data_lines
+        dataset.metadata.sequences = sequences
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in generic fastq format
+        For details, see http://maq.sourceforge.net/fastq.shtml
+
+        Note: There are three kinds of FASTQ files, known as "Sanger" (sometimes called "Standard"), Solexa, and Illumina
+              These differ in the representation of the quality scores
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( '1.fastqsanger' )
+        >>> Fastq().sniff( fname )
+        True
+        >>> fname = get_test_fname( '2.fastqsanger' )
+        >>> Fastq().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, None )
+        bases_regexp = re.compile( "^[NGTAC]*" )
+        # check that first block looks like a fastq block
+        try:
+            if len( headers ) >= 4 and headers[0][0] and headers[0][0][0] == "@" and headers[2][0] and headers[2][0][0] == "+" and headers[1][0]:
+                # Check the sequence line, make sure it contains only G/C/A/T/N
+                if not bases_regexp.match( headers[1][0] ):
+                    return False
+                return True
+            return False
+        except:
+            return False
+
+    def split( cls, input_datasets, subdir_generator_function, split_params):
+        """
+        FASTQ files are split on cluster boundaries, in increments of 4 lines
+        """
+        if split_params is None:
+            return None
+
+        # first, see if there are any associated FQTOC files that will give us the split locations
+        # if so, we don't need to read the files to do the splitting
+        toc_file_datasets = []
+        for ds in input_datasets:
+            tmp_ds = ds
+            fqtoc_file = None
+            while fqtoc_file is None and tmp_ds is not None:
+                fqtoc_file = tmp_ds.get_converted_files_by_type('fqtoc')
+                tmp_ds = tmp_ds.copied_from_library_dataset_dataset_association
+
+            if fqtoc_file is not None:
+                toc_file_datasets.append(fqtoc_file)
+
+        if len(toc_file_datasets) == len(input_datasets):
+            return cls.do_fast_split(input_datasets, toc_file_datasets, subdir_generator_function, split_params)
+        return cls.do_slow_split(input_datasets, subdir_generator_function, split_params)
+    split = classmethod(split)
+
+    def process_split_file(data):
+        """
+        This is called in the context of an external process launched by a Task (possibly not on the Galaxy machine)
+        to create the input files for the Task. The parameters:
+        data - a dict containing the contents of the split file
+        """
+        args = data['args']
+        input_name = data['input_name']
+        output_name = data['output_name']
+        start_sequence = long(args['start_sequence'])
+        sequence_count = long(args['num_sequences'])
+
+        if 'toc_file' in args:
+            toc_file = json.load(open(args['toc_file'], 'r'))
+            commands = Sequence.get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count)
+        else:
+            commands = Sequence.get_split_commands_sequential(is_gzip(input_name), input_name, output_name, start_sequence, sequence_count)
+        for cmd in commands:
+            if 0 != os.system(cmd):
+                raise Exception("Executing '%s' failed" % cmd)
+        return True
+    process_split_file = staticmethod(process_split_file)
+
+
+class FastqSanger( Fastq ):
+    """Class representing a FASTQ sequence ( the Sanger variant )"""
+    edam_format = "format_1932"
+    file_ext = "fastqsanger"
+
+
+class FastqSolexa( Fastq ):
+    """Class representing a FASTQ sequence ( the Solexa variant )"""
+    edam_format = "format_1933"
+    file_ext = "fastqsolexa"
+
+
+class FastqIllumina( Fastq ):
+    """Class representing a FASTQ sequence ( the Illumina 1.3+ variant )"""
+    edam_format = "format_1931"
+    file_ext = "fastqillumina"
+
+
+class FastqCSSanger( Fastq ):
+    """Class representing a Color Space FASTQ sequence ( e.g a SOLiD variant )"""
+    file_ext = "fastqcssanger"
+
+
+class Maf( Alignment ):
+    """Class describing a Maf alignment"""
+    edam_format = "format_3008"
+    file_ext = "maf"
+
+    # Readonly and optional, users can't unset it, but if it is not set, we are generally ok; if required use a metadata validator in the tool definition
+    MetadataElement( name="blocks", default=0, desc="Number of blocks", readonly=True, optional=True, visible=False, no_value=0 )
+    MetadataElement( name="species_chromosomes", desc="Species Chromosomes", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
+    MetadataElement( name="maf_index", desc="MAF Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
+
+    def init_meta( self, dataset, copy_from=None ):
+        Alignment.init_meta( self, dataset, copy_from=copy_from )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        """
+        Parses and sets species, chromosomes, index from MAF file.
+        """
+        # these metadata values are not accessable by users, always overwrite
+        # Imported here to avoid circular dependency
+        from galaxy.tools.util.maf_utilities import build_maf_index_species_chromosomes
+        indexes, species, species_chromosomes, blocks = build_maf_index_species_chromosomes( dataset.file_name )
+        if indexes is None:
+            return  # this is not a MAF file
+        dataset.metadata.species = species
+        dataset.metadata.blocks = blocks
+
+        # write species chromosomes to a file
+        chrom_file = dataset.metadata.species_chromosomes
+        if not chrom_file:
+            chrom_file = dataset.metadata.spec['species_chromosomes'].param.new_file( dataset=dataset )
+        chrom_out = open( chrom_file.file_name, 'wb' )
+        for spec, chroms in species_chromosomes.items():
+            chrom_out.write( "%s\t%s\n" % ( spec, "\t".join( chroms ) ) )
+        chrom_out.close()
+        dataset.metadata.species_chromosomes = chrom_file
+
+        index_file = dataset.metadata.maf_index
+        if not index_file:
+            index_file = dataset.metadata.spec['maf_index'].param.new_file( dataset=dataset )
+        indexes.write( open( index_file.file_name, 'wb' ) )
+        dataset.metadata.maf_index = index_file
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            # The file must exist on disk for the get_file_peek() method
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            if dataset.metadata.blocks:
+                dataset.blurb = "%s blocks" % util.commaify( str( dataset.metadata.blocks ) )
+            else:
+                # Number of blocks is not known ( this should not happen ), and auto-detect is
+                # needed to set metadata
+                dataset.blurb = "? blocks"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return self.make_html_table( dataset )
+
+    def make_html_table( self, dataset, skipchars=[] ):
+        """Create HTML table, used for displaying peek"""
+        out = ['<table cellspacing="0" cellpadding="3">']
+        try:
+            out.append('<tr><th>Species: ')
+            for species in dataset.metadata.species:
+                out.append( '%s ' % species )
+            out.append( '</th></tr>' )
+            if not dataset.peek:
+                dataset.set_peek()
+            data = dataset.peek
+            lines = data.splitlines()
+            for line in lines:
+                line = line.strip()
+                if not line:
+                    continue
+                out.append( '<tr><td>%s</td></tr>' % escape( line ) )
+            out.append( '</table>' )
+            out = "".join( out )
+        except Exception as exc:
+            out = "Can't create peek %s" % exc
+        return out
+
+    def sniff( self, filename ):
+        """
+        Determines wether the file is in maf format
+
+        The .maf format is line-oriented. Each multiple alignment ends with a blank line.
+        Each sequence in an alignment is on a single line, which can get quite long, but
+        there is no length limit. Words in a line are delimited by any white space.
+        Lines starting with # are considered to be comments. Lines starting with ## can
+        be ignored by most programs, but contain meta-data of one form or another.
+
+        The first line of a .maf file begins with ##maf. This word is followed by white-space-separated
+        variable=value pairs. There should be no white space surrounding the "=".
+
+        For complete details see http://genome.ucsc.edu/FAQ/FAQformat#format5
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'sequence.maf' )
+        >>> Maf().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'sequence.fasta' )
+        >>> Maf().sniff( fname )
+        False
+        """
+        headers = get_headers( filename, None )
+        try:
+            if len(headers) > 1 and headers[0][0] and headers[0][0] == "##maf":
+                return True
+            else:
+                return False
+        except:
+            return False
+
+
+class MafCustomTrack( data.Text ):
+    file_ext = "mafcustomtrack"
+
+    MetadataElement( name="vp_chromosome", default='chr1', desc="Viewport Chromosome", readonly=True, optional=True, visible=False, no_value='' )
+    MetadataElement( name="vp_start", default='1', desc="Viewport Start", readonly=True, optional=True, visible=False, no_value='' )
+    MetadataElement( name="vp_end", default='100', desc="Viewport End", readonly=True, optional=True, visible=False, no_value='' )
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        """
+        Parses and sets viewport metadata from MAF file.
+        """
+        max_block_check = 10
+        chrom = None
+        forward_strand_start = float( 'inf' )
+        forward_strand_end = 0
+        try:
+            maf_file = open( dataset.file_name )
+            maf_file.readline()  # move past track line
+            for i, block in enumerate( bx.align.maf.Reader( maf_file ) ):
+                ref_comp = block.get_component_by_src_start( dataset.metadata.dbkey )
+                if ref_comp:
+                    ref_chrom = bx.align.maf.src_split( ref_comp.src )[-1]
+                    if chrom is None:
+                        chrom = ref_chrom
+                    if chrom == ref_chrom:
+                        forward_strand_start = min( forward_strand_start, ref_comp.forward_strand_start )
+                        forward_strand_end = max( forward_strand_end, ref_comp.forward_strand_end )
+                if i > max_block_check:
+                    break
+
+            if forward_strand_end > forward_strand_start:
+                dataset.metadata.vp_chromosome = chrom
+                dataset.metadata.vp_start = forward_strand_start
+                dataset.metadata.vp_end = forward_strand_end
+        except:
+            pass
+
+
+class Axt( data.Text ):
+    """Class describing an axt alignment"""
+    # gvk- 11/19/09 - This is really an alignment, but we no longer have tools that use this data type, and it is
+    # here simply for backward compatibility ( although it is still in the datatypes registry ).  Subclassing
+    # from data.Text eliminates managing metadata elements inherited from the Alignemnt class.
+
+    edam_data = "data_0863"
+    edam_format = "format_3013"
+    file_ext = "axt"
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in axt format
+
+        axt alignment files are produced from Blastz, an alignment tool available from Webb Miller's lab
+        at Penn State University.
+
+        Each alignment block in an axt file contains three lines: a summary line and 2 sequence lines.
+        Blocks are separated from one another by blank lines.
+
+        The summary line contains chromosomal position and size information about the alignment. It
+        consists of 9 required fields.
+
+        The sequence lines contain the sequence of the primary assembly (line 2) and aligning assembly
+        (line 3) with inserts.  Repeats are indicated by lower-case letters.
+
+        For complete details see http://genome.ucsc.edu/goldenPath/help/axt.html
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'alignment.axt' )
+        >>> Axt().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'alignment.lav' )
+        >>> Axt().sniff( fname )
+        False
+        """
+        headers = get_headers( filename, None )
+        if len(headers) < 4:
+            return False
+        for hdr in headers:
+            if len(hdr) > 0 and hdr[0].startswith("##matrix=axt"):
+                return True
+            if len(hdr) > 0 and not hdr[0].startswith("#"):
+                if len(hdr) != 9:
+                    return False
+                try:
+                    map( int, [hdr[0], hdr[2], hdr[3], hdr[5], hdr[6], hdr[8]] )
+                except:
+                    return False
+                if hdr[7] not in data.valid_strand:
+                    return False
+                else:
+                    return True
+
+
+class Lav( data.Text ):
+    """Class describing a LAV alignment"""
+    # gvk- 11/19/09 - This is really an alignment, but we no longer have tools that use this data type, and it is
+    # here simply for backward compatibility ( although it is still in the datatypes registry ).  Subclassing
+    # from data.Text eliminates managing metadata elements inherited from the Alignemnt class.
+
+    edam_data = "data_0863"
+    edam_format = "format_3014"
+    file_ext = "lav"
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in lav format
+
+        LAV is an alignment format developed by Webb Miller's group. It is the primary output format for BLASTZ.
+        The first line of a .lav file begins with #:lav.
+
+        For complete details see http://www.bioperl.org/wiki/LAV_alignment_format
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'alignment.lav' )
+        >>> Lav().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'alignment.axt' )
+        >>> Lav().sniff( fname )
+        False
+        """
+        headers = get_headers( filename, None )
+        try:
+            if len(headers) > 1 and headers[0][0] and headers[0][0].startswith('#:lav'):
+                return True
+            else:
+                return False
+        except:
+            return False
+
+
+class RNADotPlotMatrix( data.Data ):
+    edam_format = "format_3466"
+    file_ext = "rna_eps"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = 'RNA Dot Plot format (Postscript derivative)'
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff(self, filename):
+        """Determine if the file is in RNA dot plot format."""
+        if check_image_type( filename, ['EPS'] ):
+            seq = False
+            coor = False
+            pairs = False
+            with open( filename ) as handle:
+                for line in handle:
+                    line = line.strip()
+                    if line:
+                        if line.startswith('/sequence'):
+                            seq = True
+                        elif line.startswith('/coor'):
+                            coor = True
+                        elif line.startswith('/pairs'):
+                            pairs = True
+                    if seq and coor and pairs:
+                        return True
+        return False
+
+
+class DotBracket ( Sequence ):
+    edam_data = "data_0880"
+    edam_format = "format_1457"
+    file_ext = "dbn"
+
+    sequence_regexp = re.compile( "^[ACGTURYKMSWBDHVN]+$", re.I)
+    structure_regexp = re.compile( "^[\(\)\.\[\]{}]+$" )
+
+    def set_meta( self, dataset, **kwd ):
+        """
+        Set the number of sequences and the number of data lines
+        in dataset.
+        """
+        if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+            dataset.metadata.data_lines = None
+            dataset.metadata.sequences = None
+            dataset.metadata.seconday_structures = None
+            return
+
+        data_lines = 0
+        sequences = 0
+
+        for line in open( dataset.file_name ):
+            line = line.strip()
+            data_lines += 1
+
+            if line and line.startswith( '>' ):
+                sequences += 1
+
+        dataset.metadata.data_lines = data_lines
+        dataset.metadata.sequences = sequences
+
+    def sniff(self, filename):
+        """
+        Galaxy Dbn (Dot-Bracket notation) rules:
+
+        * The first non-empty line is a header line: no comment lines are allowed.
+
+          * A header line starts with a '>' symbol and continues with 0 or multiple symbols until the line ends.
+
+        * The second non-empty line is a sequence line.
+
+          * A sequence line may only include chars that match the FASTA format (https://en.wikipedia.org/wiki/FASTA_format#Sequence_representation) symbols for nucleotides: ACGTURYKMSWBDHVN, and may thus not include whitespaces.
+          * A sequence line has no prefix and no suffix.
+          * A sequence line is case insensitive.
+
+        * The third non-empty line is a structure (Dot-Bracket) line and only describes the 2D structure of the sequence above it.
+
+          * A structure line must consist of the following chars: '.{}[]()'.
+          * A structure line must be of the same length as the sequence line, and each char represents the structure of the nucleotide above it.
+          * A structure line has no prefix and no suffix.
+          * A nucleotide pairs with only 1 or 0 other nucleotides.
+
+            * In a structure line, the number of '(' symbols equals the number of ')' symbols, the number of '[' symbols equals the number of ']' symbols and the number of '{' symbols equals the number of '}' symbols.
+
+        * The format accepts multiple entries per file, given that each entry is provided as three lines: the header, sequence and structure line.
+
+            * Sniffing is only applied on the first entry.
+
+        * Empty lines are allowed.
+         """
+
+        state = 0
+
+        with open( filename, "r" ) as handle:
+            for line in handle:
+                line = line.strip()
+
+                if line:
+                    # header line
+                    if state == 0:
+                        if(line[0] != '>'):
+                            return False
+                        else:
+                            state = 1
+
+                    # sequence line
+                    elif state == 1:
+                        if not self.sequence_regexp.match(line):
+                            return False
+                        else:
+                            sequence_size = len(line)
+                            state = 2
+
+                    # dot-bracket structure line
+                    elif state == 2:
+                        if sequence_size != len(line) or not self.structure_regexp.match(line) or \
+                                line.count('(') != line.count(')') or \
+                                line.count('[') != line.count(']') or \
+                                line.count('{') != line.count('}'):
+                            return False
+                        else:
+                            return True
+
+        # Number of lines is less than 3
+        return False
diff --git a/lib/galaxy/datatypes/set_metadata_tool.xml b/lib/galaxy/datatypes/set_metadata_tool.xml
new file mode 100644
index 0000000..6d3a95b
--- /dev/null
+++ b/lib/galaxy/datatypes/set_metadata_tool.xml
@@ -0,0 +1,18 @@
+<tool id="__SET_METADATA__" name="Set External Metadata" version="1.0.1" tool_type="set_metadata">
+  <type class="SetMetadataTool" module="galaxy.tools"/>
+  <requirements>
+      <requirement type="package">samtools</requirement>
+  </requirements>
+  <action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/>
+  <command>"\${GALAXY_PYTHON:-python}" "${set_metadata}" ${__SET_EXTERNAL_METADATA_COMMAND_LINE__}</command>
+  <inputs>
+    <param format="data" name="input1" type="data" label="File to set metadata on."/>
+    <param name="__ORIGINAL_DATASET_STATE__" type="hidden" value=""/>
+    <param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value="">
+      <sanitizer sanitize="False"/>
+    </param>
+  </inputs>
+  <configfiles>
+    <configfile name="set_metadata">from galaxy_ext.metadata.set_metadata import set_metadata; set_metadata()</configfile>
+  </configfiles>
+</tool>
diff --git a/lib/galaxy/datatypes/sniff.py b/lib/galaxy/datatypes/sniff.py
new file mode 100644
index 0000000..2c79741
--- /dev/null
+++ b/lib/galaxy/datatypes/sniff.py
@@ -0,0 +1,484 @@
+"""
+File format detector
+"""
+from __future__ import absolute_import
+
+import gzip
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from encodings import search_function as encodings_search_function
+from six import text_type
+
+from galaxy import util
+from galaxy.util import multi_byte
+from galaxy.util import unicodify
+from galaxy.util.checkers import check_binary, check_html, is_gzip
+from galaxy.datatypes.binary import Binary
+
+log = logging.getLogger(__name__)
+
+
+def get_test_fname(fname):
+    """Returns test data filename"""
+    path, name = os.path.split(__file__)
+    full_path = os.path.join(path, 'test', fname)
+    return full_path
+
+
+def stream_to_open_named_file( stream, fd, filename, source_encoding=None, source_error='strict', target_encoding=None, target_error='strict' ):
+    """Writes a stream to the provided file descriptor, returns the file's name and bool( is_multi_byte ). Closes file descriptor"""
+    # signature and behavor is somewhat odd, due to backwards compatibility, but this can/should be done better
+    CHUNK_SIZE = 1048576
+    data_checked = False
+    is_compressed = False
+    is_binary = False
+    is_multi_byte = False
+    if not target_encoding or not encodings_search_function( target_encoding ):
+        target_encoding = util.DEFAULT_ENCODING  # utf-8
+    if not source_encoding:
+        source_encoding = util.DEFAULT_ENCODING  # sys.getdefaultencoding() would mimic old behavior (defaults to ascii)
+    while True:
+        chunk = stream.read( CHUNK_SIZE )
+        if not chunk:
+            break
+        if not data_checked:
+            # See if we're uploading a compressed file
+            if zipfile.is_zipfile( filename ):
+                is_compressed = True
+            else:
+                try:
+                    if text_type( chunk[:2] ) == text_type( util.gzip_magic ):
+                        is_compressed = True
+                except:
+                    pass
+            if not is_compressed:
+                # See if we have a multi-byte character file
+                chars = chunk[:100]
+                is_multi_byte = multi_byte.is_multi_byte( chars )
+                if not is_multi_byte:
+                    is_binary = util.is_binary( chunk )
+            data_checked = True
+        if not is_compressed and not is_binary:
+            if not isinstance( chunk, text_type ):
+                chunk = chunk.decode( source_encoding, source_error )
+            os.write( fd, chunk.encode( target_encoding, target_error ) )
+        else:
+            # Compressed files must be encoded after they are uncompressed in the upload utility,
+            # while binary files should not be encoded at all.
+            os.write( fd, chunk )
+    os.close( fd )
+    return filename, is_multi_byte
+
+
+def stream_to_file( stream, suffix='', prefix='', dir=None, text=False, **kwd ):
+    """Writes a stream to a temporary file, returns the temporary file's name"""
+    fd, temp_name = tempfile.mkstemp( suffix=suffix, prefix=prefix, dir=dir, text=text )
+    return stream_to_open_named_file( stream, fd, temp_name, **kwd )
+
+
+def check_newlines( fname, bytes_to_read=52428800 ):
+    """
+    Determines if there are any non-POSIX newlines in the first
+    number_of_bytes (by default, 50MB) of the file.
+    """
+    CHUNK_SIZE = 2 ** 20
+    f = open( fname, 'r' )
+    for chunk in f.read( CHUNK_SIZE ):
+        if f.tell() > bytes_to_read:
+            break
+        if chunk.count( '\r' ):
+            f.close()
+            return True
+    f.close()
+    return False
+
+
+def convert_newlines( fname, in_place=True, tmp_dir=None, tmp_prefix=None ):
+    """
+    Converts in place a file from universal line endings
+    to Posix line endings.
+
+    >>> fname = get_test_fname('temp.txt')
+    >>> open(fname, 'wt').write("1 2\\r3 4")
+    >>> convert_newlines(fname, tmp_prefix="gxtest", tmp_dir=tempfile.gettempdir())
+    (2, None)
+    >>> open(fname).read()
+    '1 2\\n3 4\\n'
+    """
+    fd, temp_name = tempfile.mkstemp( prefix=tmp_prefix, dir=tmp_dir )
+    fp = os.fdopen( fd, "wt" )
+    i = None
+    for i, line in enumerate( open( fname, "U" ) ):
+        fp.write( "%s\n" % line.rstrip( "\r\n" ) )
+    fp.close()
+    if i is None:
+        i = 0
+    else:
+        i += 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i, None )
+    else:
+        return ( i, temp_name )
+
+
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
+    """
+    Transforms in place a 'sep' separated file to a tab separated one
+
+    >>> fname = get_test_fname('temp.txt')
+    >>> open(fname, 'wt').write("1 2\\n3 4\\n")
+    >>> sep2tabs(fname)
+    (2, None)
+    >>> open(fname).read()
+    '1\\t2\\n3\\t4\\n'
+    """
+    regexp = re.compile( patt )
+    fd, temp_name = tempfile.mkstemp()
+    fp = os.fdopen( fd, "wt" )
+    i = None
+    for i, line in enumerate( open( fname ) ):
+        line = line.rstrip( '\r\n' )
+        elems = regexp.split( line )
+        fp.write( "%s\n" % '\t'.join( elems ) )
+    fp.close()
+    if i is None:
+        i = 0
+    else:
+        i += 1
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i, None )
+    else:
+        return ( i, temp_name )
+
+
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+", tmp_dir=None, tmp_prefix=None ):
+    """
+    Combines above methods: convert_newlines() and sep2tabs()
+    so that files do not need to be read twice
+
+    >>> fname = get_test_fname('temp.txt')
+    >>> open(fname, 'wt').write("1 2\\r3 4")
+    >>> convert_newlines_sep2tabs(fname, tmp_prefix="gxtest", tmp_dir=tempfile.gettempdir())
+    (2, None)
+    >>> open(fname).read()
+    '1\\t2\\n3\\t4\\n'
+    """
+    regexp = re.compile( patt )
+    fd, temp_name = tempfile.mkstemp( prefix=tmp_prefix, dir=tmp_dir )
+    fp = os.fdopen( fd, "wt" )
+    for i, line in enumerate( open( fname, "U" ) ):
+        line = line.rstrip( '\r\n' )
+        elems = regexp.split( line )
+        fp.write( "%s\n" % '\t'.join( elems ) )
+    fp.close()
+    if in_place:
+        shutil.move( temp_name, fname )
+        # Return number of lines in file.
+        return ( i + 1, None )
+    else:
+        return ( i + 1, temp_name )
+
+
+def get_headers( fname, sep, count=60, is_multi_byte=False ):
+    """
+    Returns a list with the first 'count' lines split by 'sep'
+
+    >>> fname = get_test_fname('complete.bed')
+    >>> get_headers(fname,'\\t')
+    [['chr7', '127475281', '127491632', 'NM_000230', '0', '+', '127486022', '127488767', '0', '3', '29,172,3225,', '0,10713,13126,'], ['chr7', '127486011', '127488900', 'D49487', '0', '+', '127486022', '127488767', '0', '2', '155,490,', '0,2399']]
+    """
+    headers = []
+    for idx, line in enumerate(open(fname)):
+        line = line.rstrip('\n\r')
+        if is_multi_byte:
+            # TODO: fix this - sep is never found in line
+            line = unicodify( line, 'utf-8' )
+            sep = sep.encode( 'utf-8' )
+        headers.append( line.split(sep) )
+        if idx == count:
+            break
+    return headers
+
+
+def is_column_based( fname, sep='\t', skip=0, is_multi_byte=False ):
+    """
+    Checks whether the file is column based with respect to a separator
+    (defaults to tab separator).
+
+    >>> fname = get_test_fname('test.gff')
+    >>> is_column_based(fname)
+    True
+    >>> fname = get_test_fname('test_tab.bed')
+    >>> is_column_based(fname)
+    True
+    >>> is_column_based(fname, sep=' ')
+    False
+    >>> fname = get_test_fname('test_space.txt')
+    >>> is_column_based(fname)
+    False
+    >>> is_column_based(fname, sep=' ')
+    True
+    >>> fname = get_test_fname('test_ensembl.tab')
+    >>> is_column_based(fname)
+    True
+    >>> fname = get_test_fname('test_tab1.tabular')
+    >>> is_column_based(fname, sep=' ', skip=0)
+    False
+    >>> fname = get_test_fname('test_tab1.tabular')
+    >>> is_column_based(fname)
+    True
+    """
+    headers = get_headers( fname, sep, is_multi_byte=is_multi_byte )
+    count = 0
+    if not headers:
+        return False
+    for hdr in headers[skip:]:
+        if hdr and hdr[0] and not hdr[0].startswith('#'):
+            if len(hdr) > 1:
+                count = len(hdr)
+            break
+    if count < 2:
+        return False
+    for hdr in headers[skip:]:
+        if hdr and hdr[0] and not hdr[0].startswith('#'):
+            if len(hdr) != count:
+                return False
+    return True
+
+
+def guess_ext( fname, sniff_order, is_multi_byte=False ):
+    """
+    Returns an extension that can be used in the datatype factory to
+    generate a data for the 'fname' file
+
+    >>> from galaxy.datatypes import registry
+    >>> sample_conf = os.path.join(util.galaxy_directory(), "config", "datatypes_conf.xml.sample")
+    >>> datatypes_registry = registry.Registry()
+    >>> datatypes_registry.load_datatypes(root_dir=util.galaxy_directory(), config=sample_conf)
+    >>> sniff_order = datatypes_registry.sniff_order
+    >>> fname = get_test_fname('megablast_xml_parser_test1.blastxml')
+    >>> guess_ext(fname, sniff_order)
+    'blastxml'
+    >>> fname = get_test_fname('interval.interval')
+    >>> guess_ext(fname, sniff_order)
+    'interval'
+    >>> fname = get_test_fname('interval1.bed')
+    >>> guess_ext(fname, sniff_order)
+    'bed'
+    >>> fname = get_test_fname('test_tab.bed')
+    >>> guess_ext(fname, sniff_order)
+    'bed'
+    >>> fname = get_test_fname('sequence.maf')
+    >>> guess_ext(fname, sniff_order)
+    'maf'
+    >>> fname = get_test_fname('sequence.fasta')
+    >>> guess_ext(fname, sniff_order)
+    'fasta'
+    >>> fname = get_test_fname('file.html')
+    >>> guess_ext(fname, sniff_order)
+    'html'
+    >>> fname = get_test_fname('test.gtf')
+    >>> guess_ext(fname, sniff_order)
+    'gtf'
+    >>> fname = get_test_fname('test.gff')
+    >>> guess_ext(fname, sniff_order)
+    'gff'
+    >>> fname = get_test_fname('gff_version_3.gff')
+    >>> guess_ext(fname, sniff_order)
+    'gff3'
+    >>> fname = get_test_fname('temp.txt')
+    >>> open(fname, 'wt').write("a\\t2")
+    >>> guess_ext(fname, sniff_order)
+    'txt'
+    >>> fname = get_test_fname('temp.txt')
+    >>> open(fname, 'wt').write("a\\t2\\nc\\t1\\nd\\t0")
+    >>> guess_ext(fname, sniff_order)
+    'tabular'
+    >>> fname = get_test_fname('temp.txt')
+    >>> open(fname, 'wt').write("a 1 2 x\\nb 3 4 y\\nc 5 6 z")
+    >>> guess_ext(fname, sniff_order)
+    'txt'
+    >>> fname = get_test_fname('test_tab1.tabular')
+    >>> guess_ext(fname, sniff_order)
+    'tabular'
+    >>> fname = get_test_fname('alignment.lav')
+    >>> guess_ext(fname, sniff_order)
+    'lav'
+    >>> fname = get_test_fname('1.sff')
+    >>> guess_ext(fname, sniff_order)
+    'sff'
+    >>> fname = get_test_fname('1.bam')
+    >>> guess_ext(fname, sniff_order)
+    'bam'
+    >>> fname = get_test_fname('3unsorted.bam')
+    >>> guess_ext(fname, sniff_order)
+    'bam'
+    >>> fname = get_test_fname('test.idpDB')
+    >>> guess_ext(fname, sniff_order)
+    'idpdb'
+    >>> fname = get_test_fname('test.mz5')
+    >>> guess_ext(fname, sniff_order)
+    'h5'
+    >>> fname = get_test_fname('issue1818.tabular')
+    >>> guess_ext(fname, sniff_order)
+    'tabular'
+    >>> fname = get_test_fname('drugbank_drugs.cml')
+    >>> guess_ext(fname, sniff_order)
+    'cml'
+    >>> fname = get_test_fname('q.fps')
+    >>> guess_ext(fname, sniff_order)
+    'fps'
+    >>> fname = get_test_fname('drugbank_drugs.inchi')
+    >>> guess_ext(fname, sniff_order)
+    'inchi'
+    >>> fname = get_test_fname('drugbank_drugs.mol2')
+    >>> guess_ext(fname, sniff_order)
+    'mol2'
+    >>> fname = get_test_fname('drugbank_drugs.sdf')
+    >>> guess_ext(fname, sniff_order)
+    'sdf'
+    >>> fname = get_test_fname('5e5z.pdb')
+    >>> guess_ext(fname, sniff_order)
+    'pdb'
+    >>> fname = get_test_fname('mothur_datatypetest_true.mothur.otu')
+    >>> guess_ext(fname, sniff_order)
+    'mothur.otu'
+    """
+    file_ext = None
+    for datatype in sniff_order:
+        """
+        Some classes may not have a sniff function, which is ok.  In fact, the
+        Tabular and Text classes are 2 examples of classes that should never have
+        a sniff function.  Since these classes are default classes, they contain
+        few rules to filter out data of other formats, so they should be called
+        from this function after all other datatypes in sniff_order have not been
+        successfully discovered.
+        """
+        try:
+            if datatype.sniff( fname ):
+                file_ext = datatype.file_ext
+                break
+        except:
+            pass
+    # Ugly hack for tsv vs tabular sniffing, we want to prefer tabular
+    # to tsv but it doesn't have a sniffer - is TSV was sniffed just check
+    # if it is an okay tabular and use that instead.
+    if file_ext == 'tsv':
+        if is_column_based( fname, '\t', 1, is_multi_byte=is_multi_byte ):
+            file_ext = 'tabular'
+    if file_ext is not None:
+        return file_ext
+
+    headers = get_headers( fname, None )
+    is_binary = False
+    if is_multi_byte:
+        is_binary = False
+    else:
+        for hdr in headers:
+            for char in hdr:
+                # old behavior had 'char' possibly having length > 1,
+                # need to determine when/if this occurs
+                is_binary = util.is_binary( char )
+                if is_binary:
+                    break
+            if is_binary:
+                break
+    if is_binary:
+        return 'data'  # default binary data type file extension
+    if is_column_based( fname, '\t', 1, is_multi_byte=is_multi_byte ):
+        return 'tabular'  # default tabular data type file extension
+    return 'txt'  # default text data type file extension
+
+
+def handle_compressed_file( filename, datatypes_registry, ext='auto' ):
+    CHUNK_SIZE = 2 ** 20  # 1Mb
+    is_compressed = False
+    compressed_type = None
+    keep_compressed = False
+    is_valid = False
+    for compressed_type, check_compressed_function in COMPRESSION_CHECK_FUNCTIONS:
+        is_compressed = check_compressed_function( filename )
+        if is_compressed:
+            break  # found compression type
+    if is_compressed:
+        if ext in AUTO_DETECT_EXTENSIONS:
+            check_exts = COMPRESSION_DATATYPES[ compressed_type ]
+        elif ext in COMPRESSED_EXTENSIONS:
+            check_exts = [ ext ]
+        else:
+            check_exts = []
+        for compressed_ext in check_exts:
+            compressed_datatype = datatypes_registry.get_datatype_by_extension( compressed_ext )
+            if compressed_datatype.sniff( filename ):
+                ext = compressed_ext
+                keep_compressed = True
+                is_valid = True
+                break
+
+    if not is_compressed:
+        is_valid = True
+    elif not keep_compressed:
+        is_valid = True
+        fd, uncompressed = tempfile.mkstemp()
+        compressed_file = DECOMPRESSION_FUNCTIONS[ compressed_type ]( filename )
+        while True:
+            try:
+                chunk = compressed_file.read( CHUNK_SIZE )
+            except IOError as e:
+                os.close( fd )
+                os.remove( uncompressed )
+                compressed_file.close()
+                raise IOError( 'Problem uncompressing %s data, please try retrieving the data uncompressed: %s' % ( compressed_type, e ))
+            if not chunk:
+                break
+            os.write( fd, chunk )
+        os.close( fd )
+        compressed_file.close()
+        # Replace the compressed file with the uncompressed file
+        shutil.move( uncompressed, filename )
+    return is_valid, ext
+
+
+def handle_uploaded_dataset_file( filename, datatypes_registry, ext='auto', is_multi_byte=False ):
+    is_valid, ext = handle_compressed_file( filename, datatypes_registry, ext=ext )
+
+    if not is_valid:
+        raise InappropriateDatasetContentError( 'The compressed uploaded file contains inappropriate content.' )
+
+    if ext in AUTO_DETECT_EXTENSIONS:
+        ext = guess_ext( filename, sniff_order=datatypes_registry.sniff_order, is_multi_byte=is_multi_byte )
+
+    if check_binary( filename ):
+        if not Binary.is_ext_unsniffable(ext) and not datatypes_registry.get_datatype_by_extension( ext ).sniff( filename ):
+            raise InappropriateDatasetContentError( 'The binary uploaded file contains inappropriate content.' )
+    elif check_html( filename ):
+        raise InappropriateDatasetContentError( 'The uploaded file contains inappropriate HTML content.' )
+    return ext
+
+
+AUTO_DETECT_EXTENSIONS = [ 'auto' ]  # should 'data' also cause auto detect?
+DECOMPRESSION_FUNCTIONS = dict( gzip=gzip.GzipFile )
+COMPRESSION_CHECK_FUNCTIONS = [ ( 'gzip', is_gzip ) ]
+COMPRESSION_DATATYPES = dict( gzip=[ 'bam' ] )
+COMPRESSED_EXTENSIONS = []
+for exts in COMPRESSION_DATATYPES.values():
+    COMPRESSED_EXTENSIONS.extend( exts )
+
+
+class InappropriateDatasetContentError( Exception ):
+    pass
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__])
diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py
new file mode 100644
index 0000000..b52a1e4
--- /dev/null
+++ b/lib/galaxy/datatypes/tabular.py
@@ -0,0 +1,1139 @@
+"""
+Tabular datatype
+"""
+from __future__ import absolute_import
+
+import abc
+import csv
+import gzip
+import logging
+import os
+import re
+import subprocess
+import sys
+import tempfile
+from cgi import escape
+from json import dumps
+
+from galaxy import util
+from galaxy.datatypes import data, metadata
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes.sniff import get_headers
+from galaxy.util.checkers import is_gzip
+
+from . import dataproviders
+
+if sys.version_info > (3,):
+    long = int
+
+log = logging.getLogger(__name__)
+
+
+ at dataproviders.decorators.has_dataproviders
+class TabularData( data.Text ):
+    """Generic tabular data"""
+    edam_format = "format_3475"
+    # All tabular data is chunkable.
+    CHUNKABLE = True
+
+    """Add metadata elements"""
+    MetadataElement( name="comment_lines", default=0, desc="Number of comment lines", readonly=False, optional=True, no_value=0 )
+    MetadataElement( name="data_lines", default=0, desc="Number of data lines", readonly=True, visible=False, optional=True, no_value=0 )
+    MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=False, no_value=0 )
+    MetadataElement( name="column_types", default=[], desc="Column types", param=metadata.ColumnTypesParameter, readonly=True, visible=False, no_value=[] )
+    MetadataElement( name="column_names", default=[], desc="Column names", readonly=True, visible=False, optional=True, no_value=[] )
+    MetadataElement( name="delimiter", default='\t', desc="Data delimiter", readonly=True, visible=False, optional=True, no_value=[] )
+
+    @abc.abstractmethod
+    def set_meta( self, dataset, **kwd ):
+        raise NotImplementedError
+
+    def set_peek( self, dataset, line_count=None, is_multi_byte=False, WIDTH=256, skipchars=None ):
+        super(TabularData, self).set_peek( dataset, line_count=line_count, is_multi_byte=is_multi_byte, WIDTH=WIDTH, skipchars=skipchars, line_wrap=False )
+        if dataset.metadata.comment_lines:
+            dataset.blurb = "%s, %s comments" % ( dataset.blurb, util.commaify( str( dataset.metadata.comment_lines ) ) )
+
+    def displayable( self, dataset ):
+        try:
+            return dataset.has_data() \
+                and dataset.state == dataset.states.OK \
+                and dataset.metadata.columns > 0 \
+                and dataset.metadata.data_lines != 0
+        except:
+            return False
+
+    def get_chunk(self, trans, dataset, offset=0, ck_size=None):
+        with open(dataset.file_name) as f:
+            f.seek(offset)
+            ck_data = f.read(ck_size or trans.app.config.display_chunk_size)
+            if ck_data and ck_data[-1] != '\n':
+                cursor = f.read(1)
+                while cursor and cursor != '\n':
+                    ck_data += cursor
+                    cursor = f.read(1)
+            last_read = f.tell()
+        return dumps( { 'ck_data': util.unicodify( ck_data ),
+                        'offset': last_read } )
+
+    def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, offset=None, ck_size=None, **kwd):
+        preview = util.string_as_bool( preview )
+        if offset is not None:
+            return self.get_chunk(trans, dataset, offset, ck_size)
+        elif to_ext or not preview:
+            to_ext = to_ext or dataset.extension
+            return self._serve_raw(trans, dataset, to_ext)
+        elif dataset.metadata.columns > 50:
+            # Fancy tabular display is only suitable for datasets without an incredibly large number of columns.
+            # We should add a new datatype 'matrix', with its own draw method, suitable for this kind of data.
+            # For now, default to the old behavior, ugly as it is.  Remove this after adding 'matrix'.
+            max_peek_size = 1000000  # 1 MB
+            if os.stat( dataset.file_name ).st_size < max_peek_size:
+                self._clean_and_set_mime_type( trans, dataset.get_mime() )
+                return open( dataset.file_name )
+            else:
+                trans.response.set_content_type( "text/html" )
+                return trans.stream_template_mako( "/dataset/large_file.mako",
+                                                   truncated_data=open( dataset.file_name ).read(max_peek_size),
+                                                   data=dataset)
+        else:
+            column_names = 'null'
+            if dataset.metadata.column_names:
+                column_names = dataset.metadata.column_names
+            elif hasattr(dataset.datatype, 'column_names'):
+                column_names = dataset.datatype.column_names
+            column_types = dataset.metadata.column_types
+            if not column_types:
+                column_types = []
+            column_number = dataset.metadata.columns
+            if column_number is None:
+                column_number = 'null'
+            return trans.fill_template( "/dataset/tabular_chunked.mako",
+                                        dataset=dataset,
+                                        chunk=self.get_chunk(trans, dataset, 0),
+                                        column_number=column_number,
+                                        column_names=column_names,
+                                        column_types=column_types )
+
+    def make_html_table( self, dataset, **kwargs ):
+        """Create HTML table, used for displaying peek"""
+        out = ['<table cellspacing="0" cellpadding="3">']
+        try:
+            out.append( self.make_html_peek_header( dataset, **kwargs ) )
+            out.append( self.make_html_peek_rows( dataset, **kwargs ) )
+            out.append( '</table>' )
+            out = "".join( out )
+        except Exception as exc:
+            out = "Can't create peek %s" % str( exc )
+        return out
+
+    def make_html_peek_header( self, dataset, skipchars=None, column_names=None, column_number_format='%s', column_parameter_alias=None, **kwargs ):
+        if skipchars is None:
+            skipchars = []
+        if column_names is None:
+            column_names = []
+        if column_parameter_alias is None:
+            column_parameter_alias = {}
+        out = []
+        try:
+            if not column_names and dataset.metadata.column_names:
+                column_names = dataset.metadata.column_names
+
+            columns = dataset.metadata.columns
+            if columns is None:
+                columns = dataset.metadata.spec.columns.no_value
+            column_headers = [None] * columns
+
+            # fill in empty headers with data from column_names
+            for i in range( min( columns, len( column_names ) ) ):
+                if column_headers[i] is None and column_names[i] is not None:
+                    column_headers[i] = column_names[i]
+
+            # fill in empty headers from ColumnParameters set in the metadata
+            for name, spec in dataset.metadata.spec.items():
+                if isinstance( spec.param, metadata.ColumnParameter ):
+                    try:
+                        i = int( getattr( dataset.metadata, name ) ) - 1
+                    except:
+                        i = -1
+                    if 0 <= i < columns and column_headers[i] is None:
+                        column_headers[i] = column_parameter_alias.get(name, name)
+
+            out.append( '<tr>' )
+            for i, header in enumerate( column_headers ):
+                out.append( '<th>' )
+                if header is None:
+                    out.append( column_number_format % str( i + 1 ) )
+                else:
+                    out.append( '%s.%s' % ( str( i + 1 ), escape( header ) ) )
+                out.append( '</th>' )
+            out.append( '</tr>' )
+        except Exception as exc:
+            log.exception( 'make_html_peek_header failed on HDA %s' % dataset.id )
+            raise Exception( "Can't create peek header %s" % str( exc ) )
+        return "".join( out )
+
+    def make_html_peek_rows( self, dataset, skipchars=None, **kwargs ):
+        if skipchars is None:
+            skipchars = []
+        out = []
+        try:
+            if not dataset.peek:
+                dataset.set_peek()
+            columns = dataset.metadata.columns
+            if columns is None:
+                columns = dataset.metadata.spec.columns.no_value
+            for line in dataset.peek.splitlines():
+                if line.startswith( tuple( skipchars ) ):
+                    out.append( '<tr><td colspan="100%%">%s</td></tr>' % escape( line ) )
+                elif line:
+                    elems = line.split( dataset.metadata.delimiter )
+                    # pad shortened elems, since lines could have been truncated by width
+                    if len( elems ) < columns:
+                        elems.extend( [''] * ( columns - len( elems ) ) )
+                    # we may have an invalid comment line or invalid data
+                    if len( elems ) != columns:
+                        out.append( '<tr><td colspan="100%%">%s</td></tr>' % escape( line ) )
+                    else:
+                        out.append( '<tr>' )
+                        for elem in elems:
+                            out.append( '<td>%s</td>' % escape( elem ) )
+                        out.append( '</tr>' )
+        except Exception as exc:
+            log.exception( 'make_html_peek_rows failed on HDA %s' % dataset.id )
+            raise Exception( "Can't create peek rows %s" % str( exc ) )
+        return "".join( out )
+
+    def display_peek( self, dataset ):
+        """Returns formatted html of peek"""
+        return self.make_html_table( dataset )
+
+    # ------------- Dataproviders
+    @dataproviders.decorators.dataprovider_factory( 'column', dataproviders.column.ColumnarDataProvider.settings )
+    def column_dataprovider( self, dataset, **settings ):
+        """Uses column settings that are passed in"""
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        delimiter = dataset.metadata.delimiter
+        return dataproviders.column.ColumnarDataProvider( dataset_source, deliminator=delimiter, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dataset-column',
+                                                    dataproviders.column.ColumnarDataProvider.settings )
+    def dataset_column_dataprovider( self, dataset, **settings ):
+        """Attempts to get column settings from dataset.metadata"""
+        delimiter = dataset.metadata.delimiter
+        return dataproviders.dataset.DatasetColumnarDataProvider( dataset, deliminator=delimiter, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dict', dataproviders.column.DictDataProvider.settings )
+    def dict_dataprovider( self, dataset, **settings ):
+        """Uses column settings that are passed in"""
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        delimiter = dataset.metadata.delimiter
+        return dataproviders.column.DictDataProvider( dataset_source, deliminator=delimiter, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dataset-dict', dataproviders.column.DictDataProvider.settings )
+    def dataset_dict_dataprovider( self, dataset, **settings ):
+        """Attempts to get column settings from dataset.metadata"""
+        delimiter = dataset.metadata.delimiter
+        return dataproviders.dataset.DatasetDictDataProvider( dataset, deliminator=delimiter, **settings )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Tabular( TabularData ):
+    """Tab delimited data"""
+
+    def set_meta( self, dataset, overwrite=True, skip=None, max_data_lines=100000, max_guess_type_data_lines=None, **kwd ):
+        """
+        Tries to determine the number of columns as well as those columns that
+        contain numerical values in the dataset.  A skip parameter is used
+        because various tabular data types reuse this function, and their data
+        type classes are responsible to determine how many invalid comment
+        lines should be skipped. Using None for skip will cause skip to be
+        zero, but the first line will be processed as a header. A
+        max_data_lines parameter is used because various tabular data types
+        reuse this function, and their data type classes are responsible to
+        determine how many data lines should be processed to ensure that the
+        non-optional metadata parameters are properly set; if used, optional
+        metadata parameters will be set to None, unless the entire file has
+        already been read. Using None for max_data_lines will process all data
+        lines.
+
+        Items of interest:
+
+        1. We treat 'overwrite' as always True (we always want to set tabular metadata when called).
+        2. If a tabular file has no data, it will have one column of type 'str'.
+        3. We used to check only the first 100 lines when setting metadata and this class's
+           set_peek() method read the entire file to determine the number of lines in the file.
+           Since metadata can now be processed on cluster nodes, we've merged the line count portion
+           of the set_peek() processing here, and we now check the entire contents of the file.
+        """
+        # Store original skip value to check with later
+        requested_skip = skip
+        if skip is None:
+            skip = 0
+        column_type_set_order = [ 'int', 'float', 'list', 'str'  ]  # Order to set column types in
+        default_column_type = column_type_set_order[-1]  # Default column type is lowest in list
+        column_type_compare_order = list( column_type_set_order )  # Order to compare column types
+        column_type_compare_order.reverse()
+
+        def type_overrules_type( column_type1, column_type2 ):
+            if column_type1 is None or column_type1 == column_type2:
+                return False
+            if column_type2 is None:
+                return True
+            for column_type in column_type_compare_order:
+                if column_type1 == column_type:
+                    return True
+                if column_type2 == column_type:
+                    return False
+            # neither column type was found in our ordered list, this cannot happen
+            raise ValueError( "Tried to compare unknown column types: %s and %s" % ( column_type1, column_type2 ) )
+
+        def is_int( column_text ):
+            try:
+                int( column_text )
+                return True
+            except:
+                return False
+
+        def is_float( column_text ):
+            try:
+                float( column_text )
+                return True
+            except:
+                if column_text.strip().lower() == 'na':
+                    return True  # na is special cased to be a float
+                return False
+
+        def is_list( column_text ):
+            return "," in column_text
+
+        def is_str( column_text ):
+            # anything, except an empty string, is True
+            if column_text == "":
+                return False
+            return True
+        is_column_type = {}  # Dict to store column type string to checking function
+        for column_type in column_type_set_order:
+            is_column_type[column_type] = locals()[ "is_%s" % ( column_type ) ]
+
+        def guess_column_type( column_text ):
+            for column_type in column_type_set_order:
+                if is_column_type[column_type]( column_text ):
+                    return column_type
+            return None
+        data_lines = 0
+        comment_lines = 0
+        column_types = []
+        first_line_column_types = [default_column_type]  # default value is one column of type str
+        if dataset.has_data():
+            # NOTE: if skip > num_check_lines, we won't detect any metadata, and will use default
+            dataset_fh = open( dataset.file_name )
+            i = 0
+            while True:
+                line = dataset_fh.readline()
+                if not line:
+                    break
+                line = line.rstrip( '\r\n' )
+                if i < skip or not line or line.startswith( '#' ):
+                    # We'll call blank lines comments
+                    comment_lines += 1
+                else:
+                    data_lines += 1
+                    if max_guess_type_data_lines is None or data_lines <= max_guess_type_data_lines:
+                        fields = line.split( '\t' )
+                        for field_count, field in enumerate( fields ):
+                            if field_count >= len( column_types ):  # found a previously unknown column, we append None
+                                column_types.append( None )
+                            column_type = guess_column_type( field )
+                            if type_overrules_type( column_type, column_types[field_count] ):
+                                column_types[field_count] = column_type
+                    if i == 0 and requested_skip is None:
+                        # This is our first line, people seem to like to upload files that have a header line, but do not
+                        # start with '#' (i.e. all column types would then most likely be detected as str).  We will assume
+                        # that the first line is always a header (this was previous behavior - it was always skipped).  When
+                        # the requested skip is None, we only use the data from the first line if we have no other data for
+                        # a column.  This is far from perfect, as
+                        # 1,2,3	1.1	2.2	qwerty
+                        # 0	0		1,2,3
+                        # will be detected as
+                        # "column_types": ["int", "int", "float", "list"]
+                        # instead of
+                        # "column_types": ["list", "float", "float", "str"]  *** would seem to be the 'Truth' by manual
+                        # observation that the first line should be included as data.  The old method would have detected as
+                        # "column_types": ["int", "int", "str", "list"]
+                        first_line_column_types = column_types
+                        column_types = [ None for col in first_line_column_types ]
+                if max_data_lines is not None and data_lines >= max_data_lines:
+                    if dataset_fh.tell() != dataset.get_size():
+                        data_lines = None  # Clear optional data_lines metadata value
+                        comment_lines = None  # Clear optional comment_lines metadata value; additional comment lines could appear below this point
+                    break
+                i += 1
+            dataset_fh.close()
+
+        # we error on the larger number of columns
+        # first we pad our column_types by using data from first line
+        if len( first_line_column_types ) > len( column_types ):
+            for column_type in first_line_column_types[len( column_types ):]:
+                column_types.append( column_type )
+        # Now we fill any unknown (None) column_types with data from first line
+        for i in range( len( column_types ) ):
+            if column_types[i] is None:
+                if len( first_line_column_types ) <= i or first_line_column_types[i] is None:
+                    column_types[i] = default_column_type
+                else:
+                    column_types[i] = first_line_column_types[i]
+        # Set the discovered metadata values for the dataset
+        dataset.metadata.data_lines = data_lines
+        dataset.metadata.comment_lines = comment_lines
+        dataset.metadata.column_types = column_types
+        dataset.metadata.columns = len( column_types )
+        dataset.metadata.delimiter = '\t'
+
+    def as_gbrowse_display_file( self, dataset, **kwd ):
+        return open( dataset.file_name )
+
+    def as_ucsc_display_file( self, dataset, **kwd ):
+        return open( dataset.file_name )
+
+
+class Taxonomy( Tabular ):
+    def __init__(self, **kwd):
+        """Initialize taxonomy datatype"""
+        super(Taxonomy, self).__init__( **kwd )
+        self.column_names = ['Name', 'TaxId', 'Root', 'Superkingdom', 'Kingdom', 'Subkingdom',
+                             'Superphylum', 'Phylum', 'Subphylum', 'Superclass', 'Class', 'Subclass',
+                             'Superorder', 'Order', 'Suborder', 'Superfamily', 'Family', 'Subfamily',
+                             'Tribe', 'Subtribe', 'Genus', 'Subgenus', 'Species', 'Subspecies'
+                             ]
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return super(Taxonomy, self).make_html_table( dataset, column_names=self.column_names )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Sam( Tabular ):
+    edam_format = "format_2573"
+    edam_data = "data_0863"
+    file_ext = 'sam'
+    track_type = "ReadTrack"
+    data_sources = { "data": "bam", "index": "bigwig" }
+
+    def __init__(self, **kwd):
+        """Initialize taxonomy datatype"""
+        super( Sam, self ).__init__( **kwd )
+        self.column_names = ['QNAME', 'FLAG', 'RNAME', 'POS', 'MAPQ', 'CIGAR',
+                             'MRNM', 'MPOS', 'ISIZE', 'SEQ', 'QUAL', 'OPT'
+                             ]
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return super( Sam, self ).make_html_table( dataset, column_names=self.column_names )
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in SAM format
+
+        A file in SAM format consists of lines of tab-separated data.
+        The following header line may be the first line::
+
+          @QNAME  FLAG    RNAME   POS     MAPQ    CIGAR   MRNM    MPOS    ISIZE   SEQ     QUAL
+          or
+          @QNAME  FLAG    RNAME   POS     MAPQ    CIGAR   MRNM    MPOS    ISIZE   SEQ     QUAL    OPT
+
+        Data in the OPT column is optional and can consist of tab-separated data
+
+        For complete details see http://samtools.sourceforge.net/SAM1.pdf
+
+        Rules for sniffing as True::
+
+            There must be 11 or more columns of data on each line
+            Columns 2 (FLAG), 4(POS), 5 (MAPQ), 8 (MPOS), and 9 (ISIZE) must be numbers (9 can be negative)
+            We will only check that up to the first 5 alignments are correctly formatted.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'sequence.maf' )
+        >>> Sam().sniff( fname )
+        False
+        >>> fname = get_test_fname( '1.sam' )
+        >>> Sam().sniff( fname )
+        True
+        """
+        try:
+            fh = open( filename )
+            count = 0
+            while True:
+                line = fh.readline()
+                line = line.strip()
+                if not line:
+                    break  # EOF
+                if line:
+                    if line[0] != '@':
+                        line_pieces = line.split('\t')
+                        if len(line_pieces) < 11:
+                            return False
+                        try:
+                            int(line_pieces[1])
+                            int(line_pieces[3])
+                            int(line_pieces[4])
+                            int(line_pieces[7])
+                            int(line_pieces[8])
+                        except ValueError:
+                            return False
+                        count += 1
+                        if count == 5:
+                            return True
+            fh.close()
+            if count < 5 and count > 0:
+                return True
+        except:
+            pass
+        return False
+
+    def set_meta( self, dataset, overwrite=True, skip=None, max_data_lines=5, **kwd ):
+        if dataset.has_data():
+            dataset_fh = open( dataset.file_name )
+            comment_lines = 0
+            if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+                # If the dataset is larger than optional_metadata, just count comment lines.
+                for i, l in enumerate(dataset_fh):
+                    if l.startswith('@'):
+                        comment_lines += 1
+                    else:
+                        # No more comments, and the file is too big to look at the whole thing.  Give up.
+                        dataset.metadata.data_lines = None
+                        break
+            else:
+                # Otherwise, read the whole thing and set num data lines.
+                for i, l in enumerate(dataset_fh):
+                    if l.startswith('@'):
+                        comment_lines += 1
+                dataset.metadata.data_lines = i + 1 - comment_lines
+            dataset_fh.close()
+            dataset.metadata.comment_lines = comment_lines
+            dataset.metadata.columns = 12
+            dataset.metadata.column_types = ['str', 'int', 'str', 'int', 'int', 'str', 'str', 'int', 'int', 'str', 'str', 'str']
+
+    def merge( split_files, output_file):
+        """
+        Multiple SAM files may each have headers. Since the headers should all be the same, remove
+        the headers from files 1-n, keeping them in the first file only
+        """
+        cmd = 'mv %s %s' % ( split_files[0], output_file )
+        result = os.system(cmd)
+        if result != 0:
+            raise Exception('Result %s from %s' % (result, cmd))
+        if len(split_files) > 1:
+            cmd = 'egrep -v -h "^@" %s >> %s' % ( ' '.join(split_files[1:]), output_file )
+        result = os.system(cmd)
+        if result != 0:
+            raise Exception('Result %s from %s' % (result, cmd))
+    merge = staticmethod(merge)
+
+    # Dataproviders
+    # sam does not use '#' to indicate comments/headers - we need to strip out those headers from the std. providers
+    # TODO:?? seems like there should be an easier way to do this - metadata.comment_char?
+    @dataproviders.decorators.dataprovider_factory( 'line', dataproviders.line.FilteredLineDataProvider.settings )
+    def line_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return super( Sam, self ).line_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'regex-line', dataproviders.line.RegexLineDataProvider.settings )
+    def regex_line_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return super( Sam, self ).regex_line_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'column', dataproviders.column.ColumnarDataProvider.settings )
+    def column_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return super( Sam, self ).column_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dataset-column',
+                                                    dataproviders.column.ColumnarDataProvider.settings )
+    def dataset_column_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return super( Sam, self ).dataset_column_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dict', dataproviders.column.DictDataProvider.settings )
+    def dict_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return super( Sam, self ).dict_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'dataset-dict', dataproviders.column.DictDataProvider.settings )
+    def dataset_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return super( Sam, self ).dataset_dict_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'header', dataproviders.line.RegexLineDataProvider.settings )
+    def header_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        headers_source = dataproviders.line.RegexLineDataProvider( dataset_source, regex_list=[ '^@' ] )
+        return dataproviders.line.RegexLineDataProvider( headers_source, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'id-seq-qual', dict_dataprovider.settings )
+    def id_seq_qual_dataprovider( self, dataset, **settings ):
+        # provided as an example of a specified column dict (w/o metadata)
+        settings[ 'indeces' ] = [ 0, 9, 10 ]
+        settings[ 'column_names' ] = [ 'id', 'seq', 'qual' ]
+        return self.dict_dataprovider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return dataproviders.dataset.GenomicRegionDataProvider( dataset, 2, 3, 3, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region-dict',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'comment_char' ] = '@'
+        return dataproviders.dataset.GenomicRegionDataProvider( dataset, 2, 3, 3, True, **settings )
+
+    # @dataproviders.decorators.dataprovider_factory( 'samtools' )
+    # def samtools_dataprovider( self, dataset, **settings ):
+    #     dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+    #     return dataproviders.dataset.SamtoolsDataProvider( dataset_source, **settings )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Pileup( Tabular ):
+    """Tab delimited data in pileup (6- or 10-column) format"""
+    edam_format = "format_3015"
+    file_ext = "pileup"
+    line_class = "genomic coordinate"
+    data_sources = { "data": "tabix" }
+
+    """Add metadata elements"""
+    MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
+    MetadataElement( name="startCol", default=2, desc="Start column", param=metadata.ColumnParameter )
+    MetadataElement( name="endCol", default=2, desc="End column", param=metadata.ColumnParameter )
+    MetadataElement( name="baseCol", default=3, desc="Reference base column", param=metadata.ColumnParameter )
+
+    def init_meta( self, dataset, copy_from=None ):
+        super( Pileup, self ).init_meta( dataset, copy_from=copy_from )
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return super( Pileup, self ).make_html_table( dataset, column_parameter_alias={'chromCol': 'Chrom', 'startCol': 'Start', 'baseCol': 'Base'} )
+
+    def repair_methods( self, dataset ):
+        """Return options for removing errors along with a description"""
+        return [ ("lines", "Remove erroneous lines") ]
+
+    def sniff( self, filename ):
+        """
+        Checks for 'pileup-ness'
+
+        There are two main types of pileup: 6-column and 10-column. For both,
+        the first three and last two columns are the same. We only check the
+        first three to allow for some personalization of the format.
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'interval.interval' )
+        >>> Pileup().sniff( fname )
+        False
+        >>> fname = get_test_fname( '6col.pileup' )
+        >>> Pileup().sniff( fname )
+        True
+        >>> fname = get_test_fname( '10col.pileup' )
+        >>> Pileup().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, '\t' )
+        try:
+            for hdr in headers:
+                if hdr and not hdr[0].startswith( '#' ):
+                    if len( hdr ) < 5:
+                        return False
+                    try:
+                        # chrom start in column 1 (with 0-based columns)
+                        # and reference base is in column 2
+                        chrom = int( hdr[1] )
+                        assert chrom >= 0
+                        assert hdr[2] in [ 'A', 'C', 'G', 'T', 'N', 'a', 'c', 'g', 't', 'n' ]
+                    except:
+                        return False
+            return True
+        except:
+            return False
+
+    # Dataproviders
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dataprovider( self, dataset, **settings ):
+        return dataproviders.dataset.GenomicRegionDataProvider( dataset, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region-dict',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'named_columns' ] = True
+        return self.genomic_region_dataprovider( dataset, **settings )
+
+
+ at dataproviders.decorators.has_dataproviders
+class Vcf( Tabular ):
+    """ Variant Call Format for describing SNPs and other simple genome variations. """
+    edam_format = "format_3016"
+    track_type = "VariantTrack"
+    data_sources = { "data": "tabix", "index": "bigwig" }
+
+    file_ext = 'vcf'
+    column_names = [ 'Chrom', 'Pos', 'ID', 'Ref', 'Alt', 'Qual', 'Filter', 'Info', 'Format', 'data' ]
+
+    MetadataElement( name="columns", default=10, desc="Number of columns", readonly=True, visible=False )
+    MetadataElement( name="column_types", default=['str', 'int', 'str', 'str', 'str', 'int', 'str', 'list', 'str', 'str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
+    MetadataElement( name="viz_filter_cols", desc="Score column for visualization", default=[5], param=metadata.ColumnParameter, optional=True, multiple=True, visible=False )
+    MetadataElement( name="sample_names", default=[], desc="Sample names", readonly=True, visible=False, optional=True, no_value=[] )
+
+    def sniff( self, filename ):
+        headers = get_headers( filename, '\n', count=1 )
+        return headers[0][0].startswith("##fileformat=VCF")
+
+    def display_peek( self, dataset ):
+        """Returns formated html of peek"""
+        return super( Vcf, self ).make_html_table( dataset, column_names=self.column_names )
+
+    def set_meta( self, dataset, **kwd ):
+        super( Vcf, self ).set_meta( dataset, **kwd )
+        source = open( dataset.file_name )
+
+        # Skip comments.
+        line = None
+        for line in source:
+            if not line.startswith( '##' ):
+                break
+
+        if line and line.startswith( '#' ):
+            # Found header line, get sample names.
+            dataset.metadata.sample_names = line.split()[ 9: ]
+
+    @staticmethod
+    def merge(split_files, output_file):
+        stderr_f = tempfile.NamedTemporaryFile(prefix="bam_merge_stderr")
+        stderr_name = stderr_f.name
+        command = ["bcftools", "concat"] + split_files + ["-o", output_file]
+        log.info("Merging vcf files with command [%s]" % " ".join(command))
+        exit_code = subprocess.call( args=command, stderr=open( stderr_name, 'wb' ) )
+        with open(stderr_name, "rb") as f:
+            stderr = f.read().strip()
+        # Did merge succeed?
+        if exit_code != 0:
+            raise Exception("Error merging VCF files: %s" % stderr)
+
+    # Dataproviders
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dataprovider( self, dataset, **settings ):
+        return dataproviders.dataset.GenomicRegionDataProvider( dataset, 0, 1, 1, **settings )
+
+    @dataproviders.decorators.dataprovider_factory( 'genomic-region-dict',
+                                                    dataproviders.dataset.GenomicRegionDataProvider.settings )
+    def genomic_region_dict_dataprovider( self, dataset, **settings ):
+        settings[ 'named_columns' ] = True
+        return self.genomic_region_dataprovider( dataset, **settings )
+
+
+class Eland( Tabular ):
+    """Support for the export.txt.gz file used by Illumina's ELANDv2e aligner"""
+    file_ext = '_export.txt.gz'
+    MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=False )
+    MetadataElement( name="column_types", default=[], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False, no_value=[] )
+    MetadataElement( name="comment_lines", default=0, desc="Number of comments", readonly=True, visible=False )
+    MetadataElement( name="tiles", default=[], param=metadata.ListParameter, desc="Set of tiles", readonly=True, visible=False, no_value=[] )
+    MetadataElement( name="reads", default=[], param=metadata.ListParameter, desc="Set of reads", readonly=True, visible=False, no_value=[] )
+    MetadataElement( name="lanes", default=[], param=metadata.ListParameter, desc="Set of lanes", readonly=True, visible=False, no_value=[] )
+    MetadataElement( name="barcodes", default=[], param=metadata.ListParameter, desc="Set of barcodes", readonly=True, visible=False, no_value=[] )
+
+    def __init__(self, **kwd):
+        """Initialize taxonomy datatype"""
+        super( Eland, self ).__init__( **kwd )
+        self.column_names = ['MACHINE', 'RUN_NO', 'LANE', 'TILE', 'X', 'Y',
+                             'INDEX', 'READ_NO', 'SEQ', 'QUAL', 'CHROM', 'CONTIG',
+                             'POSITION', 'STRAND', 'DESC', 'SRAS', 'PRAS', 'PART_CHROM'
+                             'PART_CONTIG', 'PART_OFFSET', 'PART_STRAND', 'FILT'
+                             ]
+
+    def make_html_table( self, dataset, skipchars=None ):
+        """Create HTML table, used for displaying peek"""
+        if skipchars is None:
+            skipchars = []
+        out = ['<table cellspacing="0" cellpadding="3">']
+        try:
+            # Generate column header
+            out.append( '<tr>' )
+            for i, name in enumerate( self.column_names ):
+                out.append( '<th>%s.%s</th>' % ( str( i + 1 ), name ) )
+            # This data type requires at least 11 columns in the data
+            if dataset.metadata.columns - len( self.column_names ) > 0:
+                for i in range( len( self.column_names ), dataset.metadata.columns ):
+                    out.append( '<th>%s</th>' % str( i + 1 ) )
+                out.append( '</tr>' )
+            out.append( self.make_html_peek_rows( dataset, skipchars=skipchars ) )
+            out.append( '</table>' )
+            out = "".join( out )
+        except Exception as exc:
+            out = "Can't create peek %s" % exc
+        return out
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in ELAND export format
+
+        A file in ELAND export format consists of lines of tab-separated data.
+        There is no header.
+
+        Rules for sniffing as True::
+
+            - There must be 22 columns on each line
+            - LANE, TILEm X, Y, INDEX, READ_NO, SEQ, QUAL, POSITION, *STRAND, FILT must be correct
+            - We will only check that up to the first 5 alignments are correctly formatted.
+        """
+        try:
+            compress = is_gzip(filename)
+            if compress:
+                fh = gzip.GzipFile(filename, 'r')
+            else:
+                fh = open( filename )
+            count = 0
+            while True:
+                line = fh.readline()
+                line = line.strip()
+                if not line:
+                    break  # EOF
+                if line:
+                    line_pieces = line.split('\t')
+                    if len(line_pieces) != 22:
+                        return False
+                    try:
+                        if long(line_pieces[1]) < 0:
+                            raise Exception('Out of range')
+                        if long(line_pieces[2]) < 0:
+                            raise Exception('Out of range')
+                        if long(line_pieces[3]) < 0:
+                            raise Exception('Out of range')
+                        int(line_pieces[4])
+                        int(line_pieces[5])
+                        # can get a lot more specific
+                    except ValueError:
+                        fh.close()
+                        return False
+                    count += 1
+                    if count == 5:
+                        break
+            if count > 0:
+                fh.close()
+                return True
+        except:
+            pass
+        fh.close()
+        return False
+
+    def set_meta( self, dataset, overwrite=True, skip=None, max_data_lines=5, **kwd ):
+        if dataset.has_data():
+            compress = is_gzip(dataset.file_name)
+            if compress:
+                dataset_fh = gzip.GzipFile(dataset.file_name, 'r')
+            else:
+                dataset_fh = open( dataset.file_name )
+            lanes = {}
+            tiles = {}
+            barcodes = {}
+            reads = {}
+            # Should always read the entire file (until we devise a more clever way to pass metadata on)
+            # if self.max_optional_metadata_filesize >= 0 and dataset.get_size() > self.max_optional_metadata_filesize:
+            # If the dataset is larger than optional_metadata, just count comment lines.
+            #     dataset.metadata.data_lines = None
+            # else:
+            # Otherwise, read the whole thing and set num data lines.
+            for i, line in enumerate(dataset_fh):
+                if line:
+                    line_pieces = line.split('\t')
+                    if len(line_pieces) != 22:
+                        raise Exception('%s:%d:Corrupt line!' % (dataset.file_name, i))
+                    lanes[line_pieces[2]] = 1
+                    tiles[line_pieces[3]] = 1
+                    barcodes[line_pieces[6]] = 1
+                    reads[line_pieces[7]] = 1
+                pass
+            dataset.metadata.data_lines = i + 1
+            dataset_fh.close()
+            dataset.metadata.comment_lines = 0
+            dataset.metadata.columns = 21
+            dataset.metadata.column_types = ['str', 'int', 'int', 'int', 'int', 'int', 'str', 'int', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str', 'str']
+            dataset.metadata.lanes = list(lanes.keys())
+            dataset.metadata.tiles = ["%04d" % int(t) for t in tiles.keys()]
+            dataset.metadata.barcodes = [_ for _ in barcodes.keys() if _ != '0'] + ['NoIndex' for _ in barcodes.keys() if _ == '0']
+            dataset.metadata.reads = list(reads.keys())
+
+
+class ElandMulti( Tabular ):
+    file_ext = 'elandmulti'
+
+    def sniff( self, filename ):
+        return False
+
+
+class FeatureLocationIndex( Tabular ):
+    """
+    An index that stores feature locations in tabular format.
+    """
+    file_ext = 'fli'
+    MetadataElement( name="columns", default=2, desc="Number of columns", readonly=True, visible=False )
+    MetadataElement( name="column_types", default=['str', 'str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False, no_value=[] )
+
+
+ at dataproviders.decorators.has_dataproviders
+class BaseCSV( TabularData ):
+    """
+    Delimiter-separated table data.
+    This includes CSV, TSV and other dialects understood by the
+    Python 'csv' module https://docs.python.org/2/library/csv.html
+    Must be extended to define the dialect to use, strict_width and file_ext.
+    See the Python module csv for documentation of dialect settings
+    """
+    delimiter = ','
+    peek_size = 1024  # File chunk used for sniffing CSV dialect
+    big_peek_size = 10240  # Large File chunk used for sniffing CSV dialect
+
+    def is_int( self, column_text ):
+        try:
+            int( column_text )
+            return True
+        except:
+            return False
+
+    def is_float( self, column_text ):
+        try:
+            float( column_text )
+            return True
+        except:
+            if column_text.strip().lower() == 'na':
+                return True  # na is special cased to be a float
+            return False
+
+    def guess_type( self, text ):
+        if self.is_int(text):
+            return 'int'
+        if self.is_float(text):
+            return 'float'
+        else:
+            return 'str'
+
+    def sniff( self, filename ):
+        """ Return True if if recognizes dialect and header. """
+        try:
+            # check the dialect works
+            reader = csv.reader(open(filename, 'r'), self.dialect)
+            # Check we can read header and get columns
+            header_row = next(reader)
+            if len(header_row) < 2:
+                # No columns so not separated by this dialect.
+                return False
+
+            # Check that there is a second row as it is used by set_meta and
+            # that all rows can be read
+            if self.strict_width:
+                num_columns = len(header_row)
+                found_second_line = False
+                for data_row in reader:
+                    found_second_line = True
+                    # All columns must be the same length
+                    if num_columns != len(data_row):
+                        return False
+                if not found_second_line:
+                    return False
+            else:
+                data_row = next(reader)
+                if len(data_row) < 2:
+                    # No columns so not separated by this dialect.
+                    return False
+                # ignore the length in the rest
+                for data_row in reader:
+                    pass
+
+            # Optional: Check Python's csv comes up with a similar dialect
+            auto_dialect = csv.Sniffer().sniff(open(filename, 'r').read(self.big_peek_size))
+            if (auto_dialect.delimiter != self.dialect.delimiter):
+                return False
+            if (auto_dialect.quotechar != self.dialect.quotechar):
+                return False
+            """
+            Not checking for other dialect options
+            They may be mis detected from just the sample.
+            Or not effect the read such as doublequote
+
+            Optional: Check for headers as in the past.
+            Note No way around Python's csv calling Sniffer.sniff again.
+            Note Without checking the dialect returned by sniff
+                  this test may be checking the wrong dialect.
+            """
+            if not csv.Sniffer().has_header(open(filename, 'r').read(self.big_peek_size)):
+                return False
+            return True
+        except:
+            # Not readable by Python's csv using this dialect
+            return False
+
+    def set_meta( self, dataset, **kwd ):
+        with open(dataset.file_name, 'r') as csvfile:
+            # Parse file with the correct dialect
+            reader = csv.reader(csvfile, self.dialect)
+            data_row = None
+            header_row = None
+            try:
+                header_row = next(reader)
+                data_row = next(reader)
+                for row in reader:
+                    pass
+            except csv.Error as e:
+                raise Exception('CSV reader error - line %d: %s' % (reader.line_num, e))
+
+            # Guess column types
+            column_types = []
+            for cell in data_row:
+                column_types.append(self.guess_type(cell))
+
+            # Set metadata
+            dataset.metadata.data_lines = reader.line_num - 1
+            dataset.metadata.comment_lines = 1
+            dataset.metadata.column_types = column_types
+            dataset.metadata.columns = max( len( header_row ), len( data_row ) )
+            dataset.metadata.column_names = header_row
+            dataset.metadata.delimiter = reader.dialect.delimiter
+
+
+ at dataproviders.decorators.has_dataproviders
+class CSV( BaseCSV ):
+    """
+    Comma-separated table data.
+    Only sniffs comma-separated files with at least 2 rows and 2 columns.
+    """
+    file_ext = 'csv'
+    dialect = csv.excel  # This is the default
+    strict_width = False  # Previous csv type did not check column width
+
+
+ at dataproviders.decorators.has_dataproviders
+class TSV( BaseCSV ):
+    """
+    Tab-separated table data.
+    Only sniff tab-separated files with at least 2 rows and 2 columns.
+
+    Note: Use of this datatype is optional as the general tabular datatype will
+    handle most tab-separated files. This datatype is only required for datasets
+    with tabs INSIDE double quotes.
+
+    This datatype currently does not support TSV files where the header has one
+    column less to indicate first column is row names. This kind of file is
+    handled fine by the tabular datatype.
+    """
+    file_ext = 'tsv'
+    dialect = csv.excel_tab
+    strict_width = True  # Leave files with different width to tabular
+
+
+class ConnectivityTable( Tabular ):
+    edam_format = "format_3309"
+    file_ext = "ct"
+
+    header_regexp = re.compile( "^[0-9]+" + "(?:\t|[ ]+)" + ".*?" + "(?:ENERGY|energy|dG)" + "[ \t].*?=")
+    structure_regexp = re.compile( "^[0-9]+" + "(?:\t|[ ]+)" + "[ACGTURYKMSWBDHVN]+" + "(?:\t|[ ]+)" + "[^\t]+" + "(?:\t|[ ]+)" + "[^\t]+" + "(?:\t|[ ]+)" + "[^\t]+" + "(?:\t|[ ]+)" + "[^\t]+")
+
+    def __init__(self, **kwd):
+        super( ConnectivityTable, self ).__init__( **kwd )
+        self.columns = 6
+        self.column_names = ['base_index', 'base', 'neighbor_left', 'neighbor_right', 'partner', 'natural_numbering']
+        self.column_types = ['int', 'str', 'int', 'int', 'int', 'int']
+
+    def set_meta( self, dataset, **kwd ):
+        data_lines = 0
+
+        for line in open( dataset.file_name ):
+            data_lines += 1
+
+        dataset.metadata.data_lines = data_lines
+
+    def sniff(self, filename):
+        """
+        The ConnectivityTable (CT) is a file format used for describing
+        RNA 2D structures by tools including MFOLD, UNAFOLD and
+        the RNAStructure package. The tabular file format is defined as
+        follows::
+
+            5	energy = -12.3	sequence name
+            1	G	0	2	0	1
+            2	A	1	3	0	2
+            3	A	2	4	0	3
+            4	A	3	5	0	4
+            5	C	4	6	1	5
+
+        The links given at the edam ontology page do not indicate what
+        type of separator is used (space or tab) while different
+        implementations exist. The implementation that uses spaces as
+        separator (implemented in RNAStructure) is as follows::
+
+            10    ENERGY = -34.8  seqname
+            1 G       0    2    9    1
+            2 G       1    3    8    2
+            3 G       2    4    7    3
+            4 a       3    5    0    4
+            5 a       4    6    0    5
+            6 a       5    7    0    6
+            7 C       6    8    3    7
+            8 C       7    9    2    8
+            9 C       8   10    1    9
+            10 a       9    0    0   10
+        """
+
+        i = 0
+        j = 1
+
+        try:
+            with open( filename ) as handle:
+                for line in handle:
+                    line = line.strip()
+
+                    if len(line) > 0:
+                        if i == 0:
+                            if not self.header_regexp.match(line):
+                                return False
+                            else:
+                                length = int(re.split('\W+', line, 1)[0])
+                        else:
+                            if not self.structure_regexp.match(line.upper()):
+                                return False
+                            else:
+                                if j != int(re.split('\W+', line, 1)[0]):
+                                    return False
+                                elif j == length:                       # Last line of first sequence has been recheached
+                                    return True
+                                else:
+                                    j += 1
+                        i += 1
+            return False
+        except:
+            return False
+
+    def get_chunk(self, trans, dataset, chunk):
+        ck_index = int(chunk)
+        f = open(dataset.file_name)
+        f.seek(ck_index * trans.app.config.display_chunk_size)
+        # If we aren't at the start of the file, seek to next newline.  Do this better eventually.
+        if f.tell() != 0:
+            cursor = f.read(1)
+            while cursor and cursor != '\n':
+                cursor = f.read(1)
+        ck_data = f.read(trans.app.config.display_chunk_size)
+        cursor = f.read(1)
+        while cursor and ck_data[-1] != '\n':
+            ck_data += cursor
+            cursor = f.read(1)
+
+        # The ConnectivityTable format has several derivatives of which one is delimited by (multiple) spaces.
+        # By converting these spaces back to tabs, chucks can still be interpreted by tab delimited file parsers
+        ck_data_header, ck_data_body = ck_data.split('\n', 1)
+        ck_data_header = re.sub('^([0-9]+)[ ]+', r'\1\t', ck_data_header)
+        ck_data_body = re.sub('\n[ \t]+', '\n', ck_data_body)
+        ck_data_body = re.sub('[ ]+', '\t', ck_data_body)
+
+        return dumps( { 'ck_data': util.unicodify(ck_data_header + "\n" + ck_data_body ), 'ck_index': ck_index + 1 } )
diff --git a/lib/galaxy/datatypes/test/1.bam b/lib/galaxy/datatypes/test/1.bam
new file mode 100644
index 0000000..95c65de
Binary files /dev/null and b/lib/galaxy/datatypes/test/1.bam differ
diff --git a/lib/galaxy/datatypes/test/1.bed b/lib/galaxy/datatypes/test/1.bed
new file mode 100644
index 0000000..eb4c30e
--- /dev/null
+++ b/lib/galaxy/datatypes/test/1.bed
@@ -0,0 +1,65 @@
+chr1	147962192	147962580	CCDS989.1_cds_0_0_chr1_147962193_r	0	-
+chr1	147984545	147984630	CCDS990.1_cds_0_0_chr1_147984546_f	0	+
+chr1	148078400	148078582	CCDS993.1_cds_0_0_chr1_148078401_r	0	-
+chr1	148185136	148185276	CCDS996.1_cds_0_0_chr1_148185137_f	0	+
+chr10	55251623	55253124	CCDS7248.1_cds_0_0_chr10_55251624_r	0	-
+chr11	116124407	116124501	CCDS8374.1_cds_0_0_chr11_116124408_r	0	-
+chr11	116206508	116206563	CCDS8377.1_cds_0_0_chr11_116206509_f	0	+
+chr11	116211733	116212337	CCDS8378.1_cds_0_0_chr11_116211734_r	0	-
+chr11	1812377	1812407	CCDS7726.1_cds_0_0_chr11_1812378_f	0	+
+chr12	38440094	38440321	CCDS8736.1_cds_0_0_chr12_38440095_r	0	-
+chr13	112381694	112381953	CCDS9526.1_cds_0_0_chr13_112381695_f	0	+
+chr14	98710240	98712285	CCDS9949.1_cds_0_0_chr14_98710241_r	0	-
+chr15	41486872	41487060	CCDS10096.1_cds_0_0_chr15_41486873_r	0	-
+chr15	41673708	41673857	CCDS10097.1_cds_0_0_chr15_41673709_f	0	+
+chr15	41679161	41679250	CCDS10098.1_cds_0_0_chr15_41679162_r	0	-
+chr15	41826029	41826196	CCDS10101.1_cds_0_0_chr15_41826030_f	0	+
+chr16	142908	143003	CCDS10397.1_cds_0_0_chr16_142909_f	0	+
+chr16	179963	180135	CCDS10401.1_cds_0_0_chr16_179964_r	0	-
+chr16	244413	244681	CCDS10402.1_cds_0_0_chr16_244414_f	0	+
+chr16	259268	259383	CCDS10403.1_cds_0_0_chr16_259269_r	0	-
+chr18	23786114	23786321	CCDS11891.1_cds_0_0_chr18_23786115_r	0	-
+chr18	59406881	59407046	CCDS11985.1_cds_0_0_chr18_59406882_f	0	+
+chr18	59455932	59456337	CCDS11986.1_cds_0_0_chr18_59455933_r	0	-
+chr18	59600586	59600754	CCDS11988.1_cds_0_0_chr18_59600587_f	0	+
+chr19	59068595	59069564	CCDS12866.1_cds_0_0_chr19_59068596_f	0	+
+chr19	59236026	59236146	CCDS12872.1_cds_0_0_chr19_59236027_r	0	-
+chr19	59297998	59298008	CCDS12877.1_cds_0_0_chr19_59297999_f	0	+
+chr19	59302168	59302288	CCDS12878.1_cds_0_0_chr19_59302169_r	0	-
+chr2	118288583	118288668	CCDS2120.1_cds_0_0_chr2_118288584_f	0	+
+chr2	118394148	118394202	CCDS2121.1_cds_0_0_chr2_118394149_r	0	-
+chr2	220190202	220190242	CCDS2441.1_cds_0_0_chr2_220190203_f	0	+
+chr2	220229609	220230869	CCDS2443.1_cds_0_0_chr2_220229610_r	0	-
+chr20	33330413	33330423	CCDS13249.1_cds_0_0_chr20_33330414_r	0	-
+chr20	33513606	33513792	CCDS13255.1_cds_0_0_chr20_33513607_f	0	+
+chr20	33579500	33579527	CCDS13256.1_cds_0_0_chr20_33579501_r	0	-
+chr20	33593260	33593348	CCDS13257.1_cds_0_0_chr20_33593261_f	0	+
+chr21	32707032	32707192	CCDS13614.1_cds_0_0_chr21_32707033_f	0	+
+chr21	32869641	32870022	CCDS13615.1_cds_0_0_chr21_32869642_r	0	-
+chr21	33321040	33322012	CCDS13620.1_cds_0_0_chr21_33321041_f	0	+
+chr21	33744994	33745040	CCDS13625.1_cds_0_0_chr21_33744995_r	0	-
+chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
+chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
+chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
+chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
+chr5	131424298	131424460	CCDS4149.1_cds_0_0_chr5_131424299_f	0	+
+chr5	131556601	131556672	CCDS4151.1_cds_0_0_chr5_131556602_r	0	-
+chr5	131621326	131621419	CCDS4152.1_cds_0_0_chr5_131621327_f	0	+
+chr5	131847541	131847666	CCDS4155.1_cds_0_0_chr5_131847542_r	0	-
+chr6	108299600	108299744	CCDS5061.1_cds_0_0_chr6_108299601_r	0	-
+chr6	108594662	108594687	CCDS5063.1_cds_0_0_chr6_108594663_f	0	+
+chr6	108640045	108640151	CCDS5064.1_cds_0_0_chr6_108640046_r	0	-
+chr6	108722976	108723115	CCDS5067.1_cds_0_0_chr6_108722977_f	0	+
+chr7	113660517	113660685	CCDS5760.1_cds_0_0_chr7_113660518_f	0	+
+chr7	116512159	116512389	CCDS5771.1_cds_0_0_chr7_116512160_r	0	-
+chr7	116714099	116714152	CCDS5773.1_cds_0_0_chr7_116714100_f	0	+
+chr7	116945541	116945787	CCDS5774.1_cds_0_0_chr7_116945542_r	0	-
+chr8	118881131	118881317	CCDS6324.1_cds_0_0_chr8_118881132_r	0	-
+chr9	128764156	128764189	CCDS6914.1_cds_0_0_chr9_128764157_f	0	+
+chr9	128787519	128789136	CCDS6915.1_cds_0_0_chr9_128787520_r	0	-
+chr9	128882427	128882523	CCDS6917.1_cds_0_0_chr9_128882428_f	0	+
+chr9	128937229	128937445	CCDS6919.1_cds_0_0_chr9_128937230_r	0	-
+chrX	122745047	122745924	CCDS14606.1_cds_0_0_chrX_122745048_f	0	+
+chrX	152648964	152649196	CCDS14733.1_cds_0_0_chrX_152648965_r	0	-
+chrX	152691446	152691471	CCDS14735.1_cds_0_0_chrX_152691447_f	0	+
+chrX	152694029	152694263	CCDS14736.1_cds_0_0_chrX_152694030_r	0	-
diff --git a/lib/galaxy/datatypes/test/1.fastq b/lib/galaxy/datatypes/test/1.fastq
new file mode 100644
index 0000000..01ddd7b
--- /dev/null
+++ b/lib/galaxy/datatypes/test/1.fastq
@@ -0,0 +1,8 @@
+ at HANNIBAL_1_FC302VTAAXX:2:1:228:167
+GAATTGATCAGGACATAGGACAACTGTAGGCACCAT
++HANNIBAL_1_FC302VTAAXX:2:1:228:167
+40 40 40 40 35 40 40 40 25 40 40 26 40 9 33 11 40 35 17 40 40 33 40 7 9 15 3 22 15 30 11 17 9 4 9 4
+ at HANNIBAL_1_FC302VTAAXX:2:1:156:340
+GAGTTCTCGTCGCCTGTAGGCACCATCAATCGTATG
++HANNIBAL_1_FC302VTAAXX:2:1:156:340
+40 15 40 17 6 36 40 40 40 25 40 9 35 33 40 14 14 18 15 17 19 28 31 4 24 18 27 14 15 18 2 8 12 8 11 9
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/1.fastqsanger b/lib/galaxy/datatypes/test/1.fastqsanger
new file mode 100644
index 0000000..3a1f790
--- /dev/null
+++ b/lib/galaxy/datatypes/test/1.fastqsanger
@@ -0,0 +1,8 @@
+ at 1831_573_1004/1
+AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
++
+><C&&9952+C>5<.?<79,=42<292:<(9/-7
+ at 1831_573_1050/1
+TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
++
+;@@17?@=>7??@A8?==@4A?A4)&+.'&+'1,
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/1.fastqsolexa b/lib/galaxy/datatypes/test/1.fastqsolexa
new file mode 100644
index 0000000..01ddd7b
--- /dev/null
+++ b/lib/galaxy/datatypes/test/1.fastqsolexa
@@ -0,0 +1,8 @@
+ at HANNIBAL_1_FC302VTAAXX:2:1:228:167
+GAATTGATCAGGACATAGGACAACTGTAGGCACCAT
++HANNIBAL_1_FC302VTAAXX:2:1:228:167
+40 40 40 40 35 40 40 40 25 40 40 26 40 9 33 11 40 35 17 40 40 33 40 7 9 15 3 22 15 30 11 17 9 4 9 4
+ at HANNIBAL_1_FC302VTAAXX:2:1:156:340
+GAGTTCTCGTCGCCTGTAGGCACCATCAATCGTATG
++HANNIBAL_1_FC302VTAAXX:2:1:156:340
+40 15 40 17 6 36 40 40 40 25 40 9 35 33 40 14 14 18 15 17 19 28 31 4 24 18 27 14 15 18 2 8 12 8 11 9
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/1.gg b/lib/galaxy/datatypes/test/1.gg
new file mode 100644
index 0000000..24b16fa
--- /dev/null
+++ b/lib/galaxy/datatypes/test/1.gg
@@ -0,0 +1,4 @@
+chrX 100000 1.23
+chrX 100000 1.23
+chrX 100000 1.23
+chrX 100000 1.23
diff --git a/lib/galaxy/datatypes/test/1.sam b/lib/galaxy/datatypes/test/1.sam
new file mode 100644
index 0000000..8732751
--- /dev/null
+++ b/lib/galaxy/datatypes/test/1.sam
@@ -0,0 +1,97 @@
+ at QNAME	FLAG	RNAME	POS	MAPQ	CIGAR	MRNM	MPOS	ISIZE	SEQ	QUAL	OPT
+1378_11_329	69	*	0	0	*	*	0	0	AGACCGGGCGGGGTGGCGTTCGGT	%##+'#######%###$#$##$(#
+1378_11_329	133	*	0	0	*	*	0	0	GTTCGTGGCCGGTGGGTGTTTGGG	###$$#$#$&#####$'$#$###$
+1378_17_1788	69	*	0	0	*	*	0	0	TGCCGTGTCTTGCTAACGCCGATT	#'#$$#$###%%##$$$$######
+1378_17_1788	133	*	0	0	*	*	0	0	TGGGTGGATGTGTTGTCGTTCATG	#$#$###$#$#######$#$####
+1378_25_2035	69	*	0	0	*	*	0	0	CTGCGTGTTGGTGTCTACTGGGGT	#%#'##$#$##&%#%$$$%#%#'#
+1378_25_2035	133	*	0	0	*	*	0	0	GTGCGTCGGGGAGGGTGCTGTCGG	######%#$%#$$###($###&&%
+1378_28_770	89	chr11.nib:1-134452384	72131356	37	17M1I5M	=	72131356	0	CACACTGTGACAGACAGCGCAGC	00/02!!0//1200210!!44/1	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_28_770	181	chr11.nib:1-134452384	72131356	0	24M	=	72131356	0	TTGGTGCGCGCGGTTGAGGGTTGG	$$(#%%#$%#%####$%%##$###
+1378_33_1945	113	chr2.nib:1-242951149	181247988	0	23M	chr12.nib:1-132349534	41710908	0	GAGAGAGAGAGAGAGAGAGAGAG	PQRVUMNXYRPUXYXWXSOSZ]M	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:163148	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_33_1945	177	chr12.nib:1-132349534	41710908	0	23M	chr2.nib:1-242951149	181247988	0	AGAGAGAGAGAGAGAGAGAGAGA	SQQWZYURVYWX]]YXTSY]]ZM	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:163148	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_34_789	69	*	0	0	*	*	0	0	ATGGTGGCTGACGCGTTTGACTGT	#$##%#$##$&$#%##$##$###$
+1378_34_789	133	*	0	0	*	*	0	0	GGGCTTGCGTTAGTGAGAGGTTGT	###%$%$%%###$####$###$#&
+1378_35_263	115	chr16.nib:1-88827254	19671878	0	23M	=	19671877	-1	AGAGAGAGAGAGAGAGAGAGTCT	77543:<55#"4!&=964518A>	XT:A:R	CM:i:2	SM:i:0	AM:i:0	X0:i:4	X1:i:137	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_35_263	179	chr16.nib:1-88827254	19671877	0	23M	=	19671878	1	GAGAGAGAGAGAGAGAGAGAGTC	LE7402DD34FL:27AKE>;432	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:265	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_43_186	69	*	0	0	*	*	0	0	ATACTAGTTGGGACGCGTTGTGCT	#$(4%$########$#$###$$$#
+1378_43_186	133	*	0	0	*	*	0	0	GCTAGGGTTTGGGTTTGCGGTGGG	$%#$########%##%#$###'#'
+1378_51_1671	117	chr2.nib:1-242951149	190342418	0	24M	=	190342418	0	CTGGCGTTCTCGGCGTGGATGGGT	#####$$##$#%#%%###%$#$##
+1378_51_1671	153	chr2.nib:1-242951149	190342418	37	16M1I6M	=	190342418	0	TCTAACTTAGCCTCATAATAGCT	/<<!"0///////00/!!0121/	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_56_324	117	chr2.nib:1-242951149	80324999	0	24M	=	80324999	0	TCCAGTCGCGTTGTTAGGTTCGGA	#$#$$$#####%##%%###**#+/
+1378_56_324	153	chr2.nib:1-242951149	80324999	37	8M1I14M	=	80324999	0	TTTAGCCCGAAATGCCTAGAGCA	4;6//11!"11100110////00	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_56_773	69	*	0	0	*	*	0	0	TGTCGTGAGGTCACTTATCCCCAT	&%#%##%%#####&#$%##$%##$
+1378_56_773	133	*	0	0	*	*	0	0	TCTGGTCGGTTTCGGGGAGTGGAA	##%%#&$###$#$##%$####%%$
+1378_62_2027	69	*	0	0	*	*	0	0	CTTCCACGATCTGCTCGCTGTGGT	(#&&$##$$#$%#%$$$#$###'#
+1378_62_2027	133	*	0	0	*	*	0	0	GTTGGCCTGGCCTGCCGTGCTGCG	*##),/%##$)#%##1$#'%.#&#
+1378_62_2029	69	*	0	0	*	*	0	0	TCTGGGCTGTCTTCGGGTCGGTGT	$%$$####$##$$#)##%%#$###
+1378_62_2029	133	*	0	0	*	*	0	0	GGCGGTGTGTGGTGCGGCTGTGCG	/$$$=(####%####)$$%$-&%#
+1378_67_1795	81	chr16.nib:1-88827254	26739130	0	23M	chrY.nib:1-57772954	57401793	0	TGGCATTCCTGTAGGCAGAGAGG	AZWWZS]!"QNXZ]VQ]]]/2]]	XT:A:R	CM:i:2	SM:i:0	AM:i:0	X0:i:3	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_67_1795	161	chrY.nib:1-57772954	57401793	37	23M	chr16.nib:1-88827254	26739130	0	GATCACCCAGGTGATGTAACTCC	]WV]]]]WW]]]]]]]]]]PU]]	XT:A:U	CM:i:0	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_68_466	69	*	0	0	*	*	0	0	GTGATCGTCGGTGCCAGTCCCTGT	#(%)+##$#$#%#+$%##$#####
+1378_68_466	133	*	0	0	*	*	0	0	GTGTCATCTGAGGTAAAGCATTGT	/##$09#$#.=$#$76+$%1'###
+1378_68_1692	117	chr13.nib:1-114142980	36365609	0	24M	=	36365609	0	TTGAACCGGGCACGGGTCTTCTGG	#$#######%###$##%&'%)###
+1378_68_1692	153	chr13.nib:1-114142980	36365609	37	10M1D13M	=	36365609	0	CTGCACATACAGAATATTCATAG	0010/!"0/!!021/132231//	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:10^T13
+1378_80_664	69	*	0	0	*	*	0	0	CTGCTTTGATCCCCGGTGGAGCAC	7#%###$$6#######$##$$$##
+1378_80_664	133	*	0	0	*	*	0	0	TGTCTGCGTTGTATCTCTGGTGTA	%##%,%$$#&$$###$#$%##'%#
+1378_85_1786	69	*	0	0	*	*	0	0	ATACTATGTCGATCTGTAAAAAAA	)&.)#3%@$&%-,2#&+.-%0&./
+1378_85_1786	133	*	0	0	*	*	0	0	CCCTAGGAGCGTATACCGGACGAG	,'&/%/@,&1,&'/)&,6&&1)((
+1378_86_1011	69	*	0	0	*	*	0	0	CTACGTTATTGCTCTGTTTGTCCT	######$%##$$$%###%#$####
+1378_86_1011	133	*	0	0	*	*	0	0	AGGCGATGGGATATTATTTTACTT	:$###)%##$9$###1$$#$2###
+1378_86_1789	89	chr12.nib:1-132349534	39007065	37	23M	=	39007065	0	GCTTTCCATAGATGTGTAATTTC	J2K]]Z5!GN?@U]]]VX]UYYP	XT:A:U	CM:i:1	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:0	XG:i:0	MD:Z:23
+1378_86_1789	181	chr12.nib:1-132349534	39007065	0	24M	=	39007065	0	ACAACTTAAATAATCATGGACCGG	02,5$$0&6#%?*,$'#%&/15.1
+1378_91_1596	69	*	0	0	*	*	0	0	TTAGCGGTTGACTATCTGCTGACA	*&+'#9'(%*'#//,&<),/)'*#
+1378_91_1596	133	*	0	0	*	*	0	0	GCTTTTTCATTCGGTGCCTTTGGA	'>%/3%=()8'#.%?50$&5>%)%
+1378_94_1595	69	chr7.nib:1-158821424	127518258	0	24M	=	127518258	0	CGTGCGACAGCCCATGTTTTCAGA	-=..5,3826&*+.+#+#%%6;%#
+1378_94_1595	137	chr7.nib:1-158821424	127518258	37	23M	=	127518258	0	TGAGATAAACACCTAACATGCTC	M]]FN]]\V]]]Q>T]KIG:LVN	XT:A:U	CM:i:0	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_95_1039	69	*	0	0	*	*	0	0	CGGCGTCCATCTTCGCCTTGAGAT	$##.#$##$$#%$#$%%$###$)$
+1378_95_1039	133	*	0	0	*	*	0	0	GTTCTGTGCCAGGTGAGGTACGGA	&##,./#$&)6##+,'#$$0(##$
+1378_95_1767	65	chr11.nib:1-134452384	65333552	25	23M	chr3.nib:1-199501827	123725482	0	CAACTGGTGGCATCTGGACAAAC	W[[TZYY]]RO<BI7!!:!!>@2	XT:A:U	CM:i:2	SM:i:25	AM:i:25	X0:i:1	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_95_1767	129	chr3.nib:1-199501827	123725482	37	6M1I16M	chr11.nib:1-134452384	65333552	0	ATTTATCTGTCTCATTCATTATT	<AGB8B"!V]]UO/&JB4DE88E	XT:A:U	CM:i:2	SM:i:37	AM:i:25	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_96_1037	69	*	0	0	*	*	0	0	ATCCCCCAAGATGCCTGTTGATTG	$#$'##$$$#%$$#%###+##$#$
+1378_96_1037	133	*	0	0	*	*	0	0	CTGCTGGGCCATTTGACTTACTCA	'$#+#(##-%5##+*&###-.$$$
+1378_96_1764	81	chr15.nib:1-100338915	89251272	25	23M	chr7.nib:1-158821424	19412615	0	AGAAATGGTCGCACCCTCTGGTT	E*2ZEHX\SN]O>SYRL):LIOL	XT:A:U	CM:i:2	SM:i:25	AM:i:25	X0:i:1	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_96_1764	161	chr7.nib:1-158821424	19412615	37	23M	chr15.nib:1-100338915	89251272	0	GTATAGCCCACAACGCCTAATAT	ZMBS]UW]UYR\]QPZ[SMYL7C	XT:A:U	CM:i:0	SM:i:37	AM:i:25	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_98_1574	69	*	0	0	*	*	0	0	GTTCTGCCGGTGTCTGTGGCGGGC	$$#+&$$####%$$$###$%#%%#
+1378_98_1574	133	*	0	0	*	*	0	0	AGGCGAGTGTGGGGGTTGTTTGAG	+%%$#)##%##$####%###$%$#
+1378_107_1647	69	*	0	0	*	*	0	0	AGGCCTACTACGCGTCATTGATAG	&#$$#$(.#%#$$####&$%##($
+1378_107_1647	133	*	0	0	*	*	0	0	GGTCTGGTTCTATGTTGGTCGACT	###'$$#$$$(#%###(#$##$%#
+1378_111_829	69	chr9.nib:1-140273252	82506894	0	24M	=	82506894	0	TGCGGCACTTGCTTCTTCGTATTT	%#%##%#$%#$#%###$$##&#$$
+1378_111_829	137	chr9.nib:1-140273252	82506894	37	4M1I18M	=	82506894	0	GATGCGTAATCTAGTAAAATAAG	0/362//00/5516500210451	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_111_1900	69	*	0	0	*	*	0	0	TCCCCTCGCTCGGCTCTGTGCTGT	$&%*$#(#)##$#'##%(##$#$%
+1378_111_1900	133	*	0	0	*	*	0	0	GCACGCCTTTGGGCTAAGCCGTAA	)$)'#%$########$'#&%$#(#
+1378_112_1483	69	*	0	0	*	*	0	0	TGTCCAGCTATGCGGCTTCCTCCT	%#$+#%#&#$#####%####%$##
+1378_112_1483	133	*	0	0	*	*	0	0	TGGAGTGGTGTGTTTGCTGAGCCA	#$#)#############$#%#%'%
+1378_125_1287	69	*	0	0	*	*	0	0	TGTCTCTGGGGGGCCTGGTTAGGT	$##13$'%#$###$$###$$$#&#
+1378_125_1287	133	*	0	0	*	*	0	0	TGACGTGGGTTGTCCCGTGAGATT	##$%%#$###$##$$#&%##$(%%
+1378_126_468	117	chr11.nib:1-134452384	72541052	0	24M	=	72541052	0	TGCCTCTATACAGATTAGTCCTCT	)7,7..?97594 at 8=,=?813@>7
+1378_126_468	153	chr11.nib:1-134452384	72541052	0	23M	=	72541052	0	AGGCAAGACTCTGTCTCAAAAAA	PK5G]]PDT\]SEXY[]]]]]]]	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:4	X1:i:15713	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_127_664	69	*	0	0	*	*	0	0	AGAGGTTGGTGTCTTGTCGCAGCT	##'#$######$$%######$$$#
+1378_127_664	133	*	0	0	*	*	0	0	TCGCTTTGCCTATGTTTGTTCGGA	#%$%#&##$%#%%###$$###)-'
+1378_129_463	97	chr8.nib:1-146274826	29931771	37	23M	chr19.nib:1-63811651	5702213	0	GTAGCTCTGTTTCACATTAGGGG	J>AQ[G>C?NM:GD=)*PLORIF	XT:A:U	CM:i:1	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:0	XG:i:0	MD:Z:23
+1378_129_463	145	chr19.nib:1-63811651	5702213	0	23M	chr8.nib:1-146274826	29931771	0	AAAAAAAAAAAAAAAAAAAAAAA	JOI:AHGD==@KQB78HF>KA8>	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:583698	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_129_875	69	*	0	0	*	*	0	0	TTTCTATGGCTTACGCTGTCTGCC	#$($##%####%$#$#####$###
+1378_129_875	133	*	0	0	*	*	0	0	GACCTTTACGTATTGGGGGTTGGC	###)###+###$##$#&%##$,#$
+1378_140_1251	69	*	0	0	*	*	0	0	ATCCTAGCGCGGTGTCTTGGGGAC	#$%1#$$$##$##$#$#$##$%$$
+1378_140_1251	133	*	0	0	*	*	0	0	TTTCCTTCGTGTGCGTGCGGAGTG	#%#%$##$$$######.$$$%#%(
+1378_141_809	69	*	0	0	*	*	0	0	TGTCCTCCAGTGTCTGTTGGGTGT	%&,-##$$#(%###$#$$'###'#
+1378_141_809	133	*	0	0	*	*	0	0	TCTCGTGGTTTCTTTTTTATGTGT	##%)##$$#####%$#$#%%#'##
+1378_144_983	69	*	0	0	*	*	0	0	AGCGCCCGGTTGGTGCGGCTCGTC	-$(&%*$#*#))#$$$#%%$#$##
+1378_144_983	133	*	0	0	*	*	0	0	GTTCGTTCGTGGTGTACGAGGGTG	#(#%#####($#%##$$#%##%#)
+1378_153_270	69	*	0	0	*	*	0	0	AGTCCTTGTCCCCTGGGTTTTCCC	+''$#&%$%#$##&$$($#&#$$#
+1378_153_270	133	*	0	0	*	*	0	0	GGCCGTGTGCGGGTGTAGATTGGA	%$##($######&##$&$$$$%##
+1378_155_1689	65	chrX.nib:1-154913754	106941539	37	23M	=	106940385	-1154	ATCTCCTCTTCCTTCCATTCCAC	\]]]Y]]]]]UV]]]ZYZZ]]RV	XT:A:U	CM:i:0	SM:i:37	AM:i:37	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_155_1689	129	chrX.nib:1-154913754	106940385	37	23M	=	106941539	1154	GACTATGAGGTTTTCATTCAACA	]]]]\\]]]YW]]]WRZ]]WIOK	XT:A:U	CM:i:0	SM:i:37	AM:i:37	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_157_1580	69	*	0	0	*	*	0	0	TGGGCCTCGGTGCCCTTGGTCTGT	#%)$##'#$$$&#####%#$#$##
+1378_157_1580	133	*	0	0	*	*	0	0	GGGATTGAAGGGATGTATGCTAGG	#%$&%#$$'%$%#$##*#%$$$$#
+1378_161_317	69	*	0	0	*	*	0	0	TTGGCCGGCAACCCCGGTACCTAA	7<,<'@)@>.)2@/')'&(?/-<(
+1378_161_317	133	*	0	0	*	*	0	0	AATCCATACCCACAAAAGCAGGCC	.&%','(@''?7//+&)+2.+)0)
+1378_177_735	113	chr2.nib:1-242951149	222173182	25	23M	=	222173882	700	TTGTTCAGCGCCGATTGTCAATC	KPNICFMS]]]Z]]]]Y]]]]]]	XT:A:U	CM:i:2	SM:i:25	AM:i:25	X0:i:1	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:1G21
+1378_177_735	177	chr2.nib:1-242951149	222173882	37	23M	=	222173182	-700	AGAATTCCTAACAAAATGTGAAG	ES6-]]]]]]]]]]]]]]]]]]]	XT:A:U	CM:i:1	SM:i:37	AM:i:25	X0:i:1	X1:i:0	XM:i:1	XO:i:0	XG:i:0	MD:Z:23
+1378_181_1684	69	*	0	0	*	*	0	0	CGACTCCCGCATTCACGGTCAAGT	&*#,##$#&$*$$#$#$$$#%$##
+1378_181_1684	133	*	0	0	*	*	0	0	TTTCTGTTGTGGTTTTGTTGGGGT	$##'$%'##%##$%$#$$####$*
+1378_187_1407	69	*	0	0	*	*	0	0	TGGCGTCCACTCGTGGGTCTATCG	$#$'%#$%$%&$%#####$#$#%#
+1378_187_1407	133	*	0	0	*	*	0	0	TTGGGTGAAATCTTGTCGAGTGGA	####&##$$###$#####%##%%)
+1378_203_721	97	chr1.nib:1-247249719	245680524	25	23M	chr2.nib:1-242951149	213173999	0	GTAAAATTTGTGGAGATTTAAGT	]VEFFEZ]XPW]TOVINQ,;T!!	XT:A:U	CM:i:2	SM:i:25	AM:i:25	X0:i:1	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_203_721	145	chr2.nib:1-242951149	213173999	37	4M1I18M	chr1.nib:1-247249719	245680524	0	ACCTAACAAAATTGTTCAATATG	F>8AWT<AV]Q9B"+]O at IF=K]	XT:A:U	CM:i:2	SM:i:37	AM:i:25	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_206_2039	113	chr4.nib:1-191273063	103793427	0	23M	chr18.nib:1-76117153	57165542	0	ACACACACACACACACACACACA	NKWZVWZ]]XV[]]]]]]]]]]]	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:1292040	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_206_2039	177	chr18.nib:1-76117153	57165542	0	23M	chr4.nib:1-191273063	103793427	0	CACACACACACACACACACACAC	NAJ[SPT[]]]W[]]]]]]]]]]	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:1292040	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
diff --git a/lib/galaxy/datatypes/test/1.sff b/lib/galaxy/datatypes/test/1.sff
new file mode 100644
index 0000000..978d88d
Binary files /dev/null and b/lib/galaxy/datatypes/test/1.sff differ
diff --git a/lib/galaxy/datatypes/test/10col.pileup b/lib/galaxy/datatypes/test/10col.pileup
new file mode 100644
index 0000000..d9bb704
--- /dev/null
+++ b/lib/galaxy/datatypes/test/10col.pileup
@@ -0,0 +1,30 @@
+chrM	1	G	G	25	0	25	1	^:.	I
+chrM	2	T	T	25	0	25	1	.	I
+chrM	3	T	T	25	0	25	1	.	I
+chrM	4	A	A	36	0	25	3	.^:.^:.	II+
+chrM	5	A	A	36	0	25	3	...	III
+chrM	6	T	T	36	0	25	3	...	III
+chrM	7	G	G	42	0	25	5	...^:.^:.	IIIII
+chrM	8	T	T	45	0	25	6	.....^:.	IIIIII
+chrM	9	A	A	51	0	25	8	......^:.^:.	IIIIIIII
+chrM	10	G	G	54	0	25	9	........^:.	IIIIIIIII
+chrM	11	C	C	57	0	25	10	.........^:.	IIIIIIIIII
+chrM	12	T	T	60	0	25	11	..........^:.	IIIIIIIIIII
+chrM	13	T	T	78	0	25	17	...........^:.^:.^:.^:.^:.^:.	IIIIIIIIIIIIIIIII
+chrM	14	A	A	56	0	25	18	.......G.........^:.	BIIIIIII+IIIIIIIII
+chrM	15	A	A	87	0	25	20	..................^:.^:.	DIIIIIII(IIIIIIIIIII
+chrM	16	T	T	87	0	25	20	....................	IIIIIIIIIIIIIIIIIIII
+chrM	17	A	A	87	0	25	20	....................	9IIIIIIIIIIIIIIIIIII
+chrM	18	A	A	87	0	25	20	....................	@IIIIIIIIIIIIIIIIIII
+chrM	19	T	T	55	0	25	20	..................GG	IIIIIIIIIIIIIIIIII'A
+chrM	20	A	A	54	0	25	20	..................C.	IIIIIIIIIIIIIII2II#$
+chrM	21	T	T	87	0	25	20	....................	IIIIIIIIIIIIIIIIIIII
+chrM	22	A	A	87	0	25	20	....................	IIIIIIIIIIIIIIAIIIII
+chrM	23	A	A	87	0	25	20	....................	9IIIIIIIIII0IIIIIIII
+chrM	24	A	A	87	0	25	20	........N...........	IIIIIIII"IIIIICIIIII
+chrM	25	G	G	57	0	25	21	...A................^:.	A at .$IIIIIIIFIIIIIIIII
+chrM	26	C	C	57	0	25	21	.......A.............	IIHIDII&IIIIIIIIIIIII
+chrM	27	A	A	99	0	25	24	.....................^:.^:.^:.	IE8IFIII9IIIIIIIIIIIIIII
+chrM	28	A	A	99	0	25	24	........................	1FIIIIIIIIIIIIIIIIDEIIII
+chrM	29	G	G	55	0	25	24	..................NN....	;IIIIII+HII=IIIIII""IIII
+chrM	30	G	G	68	0	25	25	...C....................^:.	;I?&IAI0IIIIIIIIIIIIIIIII
diff --git a/lib/galaxy/datatypes/test/2.fastq b/lib/galaxy/datatypes/test/2.fastq
new file mode 100644
index 0000000..abb5f2f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/2.fastq
@@ -0,0 +1,8 @@
+ at seq1  
+GACAGCTTGGTTTTTAGTGAGTTGTTCCTTTCTTT  
++seq1  
+hhhhhhhhhhhhhhhhhhhhhhhhhhPW at hhhhhh  
+ at seq2  
+GCAATGACGGCAGCAATAAACTCAACAGGTGCTGG  
++seq2  
+hhhhhhhhhhhhhhYhhahhhhWhAhFhSIJGChO
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/2.fastqsanger b/lib/galaxy/datatypes/test/2.fastqsanger
new file mode 100644
index 0000000..44caf2e
--- /dev/null
+++ b/lib/galaxy/datatypes/test/2.fastqsanger
@@ -0,0 +1,8 @@
+ at 1831_573_1004/1
+AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
++
+29 27 34 5 5 24 24 20 17 10 34 29 20 27 13 30 27 22 24 11 28 19 17 27 17 24 17 25 27 7 24 14 12 22
+ at 1831_573_1050/1
+TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
++
+26 31 31 16 22 30 31 28 29 22 30 30 31 32 23 30 28 28 31 19 32 30 32 19 8 5 10 13 6 5 10 6 16 11
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/2.fastqsolexa b/lib/galaxy/datatypes/test/2.fastqsolexa
new file mode 100644
index 0000000..abb5f2f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/2.fastqsolexa
@@ -0,0 +1,8 @@
+ at seq1  
+GACAGCTTGGTTTTTAGTGAGTTGTTCCTTTCTTT  
++seq1  
+hhhhhhhhhhhhhhhhhhhhhhhhhhPW at hhhhhh  
+ at seq2  
+GCAATGACGGCAGCAATAAACTCAACAGGTGCTGG  
++seq2  
+hhhhhhhhhhhhhhYhhahhhhWhAhFhSIJGChO
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/3unsorted.bam b/lib/galaxy/datatypes/test/3unsorted.bam
new file mode 100644
index 0000000..e86c83a
Binary files /dev/null and b/lib/galaxy/datatypes/test/3unsorted.bam differ
diff --git a/lib/galaxy/datatypes/test/4.bed b/lib/galaxy/datatypes/test/4.bed
new file mode 100644
index 0000000..6f32a4f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/4.bed
@@ -0,0 +1 @@
+chr22	30128507	31828507	uc003bnx.1_cds_2_0_chr22_29227_f	0	+
diff --git a/lib/galaxy/datatypes/test/5e5z.pdb b/lib/galaxy/datatypes/test/5e5z.pdb
new file mode 100644
index 0000000..395b1fe
--- /dev/null
+++ b/lib/galaxy/datatypes/test/5e5z.pdb
@@ -0,0 +1,357 @@
+HEADER    DE NOVO PROTEIN, MEMBRANE PROTEIN       09-OCT-15   5E5Z              
+TITLE     STRUCTURE OF THE AMYLOID FORMING PEPTIDE LVHSSN (RESIDUES             
+COMPND    MOL_ID: 1;                                                            
+COMPND   2 MOLECULE: LVHSSN (RESIDUES 16-21) FROM ISLET AMYLOID POLYPEPTIDE;    
+COMPND   3 CHAIN: A;                                                            
+COMPND   4 ENGINEERED: YES                                                      
+SOURCE    MOL_ID: 1;                                                            
+SOURCE   2 SYNTHETIC: YES;                                                      
+SOURCE   3 ORGANISM_SCIENTIFIC: HOMO SAPIENS;                                   
+SOURCE   4 ORGANISM_TAXID: 9606                                                 
+KEYWDS    AMYLOID-LIKE PROTOFIBRIL, DE NOVO PROTEIN, MEMBRANE PROTEIN, PROTEIN  
+KEYWDS   2 FIBRIL                                                               
+EXPDTA    X-RAY DIFFRACTION                                                     
+AUTHOR    A.B.SORIAGA,D.EISENBERG                                               
+REVDAT   2   20-JAN-16 5E5Z    1       JRNL                                     
+REVDAT   1   16-DEC-15 5E5Z    0                                                
+JRNL        AUTH   A.B.SORIAGA,S.SANGWAN,R.MACDONALD,M.R.SAWAYA,D.EISENBERG     
+JRNL        TITL   CRYSTAL STRUCTURES OF IAPP AMYLOIDOGENIC SEGMENTS REVEAL A   
+JRNL        TITL 2 NOVEL PACKING MOTIF OF OUT-OF-REGISTER BETA SHEETS.          
+JRNL        REF    J.PHYS.CHEM.B                              2016              
+JRNL        REFN                   ISSN 1089-5647                               
+JRNL        PMID   26629790                                                     
+JRNL        DOI    10.1021/ACS.JPCB.5B09981                                     
+REMARK   2                                                                      
+REMARK   2 RESOLUTION.    1.66 ANGSTROMS.                                       
+REMARK   3                                                                      
+REMARK   3 REFINEMENT.                                                          
+REMARK   3   PROGRAM     : PHENIX 1.6.4_486                                     
+REMARK   3   AUTHORS     : PAUL ADAMS,PAVEL AFONINE,VINCENT CHEN,IAN            
+REMARK   3               : DAVIS,KRESHNA GOPAL,RALF GROSSE-KUNSTLEVE,           
+REMARK   3               : LI-WEI HUNG,ROBERT IMMORMINO,TOM IOERGER,            
+REMARK   3               : AIRLIE MCCOY,ERIK MCKEE,NIGEL MORIARTY,              
+REMARK   3               : REETAL PAI,RANDY READ,JANE RICHARDSON,               
+REMARK   3               : DAVID RICHARDSON,TOD ROMO,JIM SACCHETTINI,           
+REMARK   3               : NICHOLAS SAUTER,JACOB SMITH,LAURENT                  
+REMARK   3               : STORONI,TOM TERWILLIGER,PETER ZWART                  
+REMARK   3                                                                      
+REMARK   3    REFINEMENT TARGET : LS_WUNIT_K1                                   
+REMARK   3                                                                      
+REMARK   3  DATA USED IN REFINEMENT.                                            
+REMARK   3   RESOLUTION RANGE HIGH (ANGSTROMS) : 1.66                           
+REMARK   3   RESOLUTION RANGE LOW  (ANGSTROMS) : 9.46                           
+REMARK   3   MIN(FOBS/SIGMA_FOBS)              : 0.000                          
+REMARK   3   COMPLETENESS FOR RANGE        (%) : 89.1                           
+REMARK   3   NUMBER OF REFLECTIONS             : 391                            
+REMARK   3                                                                      
+REMARK   3  FIT TO DATA USED IN REFINEMENT.                                     
+REMARK   3   R VALUE     (WORKING + TEST SET) : 0.170                           
+REMARK   3   R VALUE            (WORKING SET) : 0.167                           
+REMARK   3   FREE R VALUE                     : 0.198                           
+REMARK   3   FREE R VALUE TEST SET SIZE   (%) : 4.600                           
+REMARK   3   FREE R VALUE TEST SET COUNT      : 18                              
+REMARK   3                                                                      
+REMARK   3  FIT TO DATA USED IN REFINEMENT (IN BINS).                           
+REMARK   3   BIN  RESOLUTION RANGE  COMPL.    NWORK NFREE   RWORK  RFREE        
+REMARK   3     1  9.4587 -  1.6644    0.89      373    18  0.1673 0.1983        
+REMARK   3                                                                      
+REMARK   3  BULK SOLVENT MODELLING.                                             
+REMARK   3   METHOD USED        : FLAT BULK SOLVENT MODEL                       
+REMARK   3   SOLVENT RADIUS     : 0.00                                          
+REMARK   3   SHRINKAGE RADIUS   : 0.00                                          
+REMARK   3   K_SOL              : 0.60                                          
+REMARK   3   B_SOL              : 251.4                                         
+REMARK   3                                                                      
+REMARK   3  ERROR ESTIMATES.                                                    
+REMARK   3   COORDINATE ERROR (MAXIMUM-LIKELIHOOD BASED)     : 0.310            
+REMARK   3   PHASE ERROR (DEGREES, MAXIMUM-LIKELIHOOD BASED) : 18.270           
+REMARK   3                                                                      
+REMARK   3  B VALUES.                                                           
+REMARK   3   FROM WILSON PLOT           (A**2) : NULL                           
+REMARK   3   MEAN B VALUE      (OVERALL, A**2) : NULL                           
+REMARK   3   OVERALL ANISOTROPIC B VALUE.                                       
+REMARK   3    B11 (A**2) : 0.51090                                              
+REMARK   3    B22 (A**2) : -3.44720                                             
+REMARK   3    B33 (A**2) : -8.26450                                             
+REMARK   3    B12 (A**2) : 0.00000                                              
+REMARK   3    B13 (A**2) : 0.77970                                              
+REMARK   3    B23 (A**2) : 0.00000                                              
+REMARK   3                                                                      
+REMARK   3  TWINNING INFORMATION.                                               
+REMARK   3   FRACTION: NULL                                                     
+REMARK   3   OPERATOR: NULL                                                     
+REMARK   3                                                                      
+REMARK   3  DEVIATIONS FROM IDEAL VALUES.                                       
+REMARK   3                 RMSD          COUNT                                  
+REMARK   3   BOND      :  0.004             46                                  
+REMARK   3   ANGLE     :  0.975             62                                  
+REMARK   3   CHIRALITY :  0.056              8                                  
+REMARK   3   PLANARITY :  0.004              8                                  
+REMARK   3   DIHEDRAL  : 10.740             15                                  
+REMARK   3                                                                      
+REMARK   3  TLS DETAILS                                                         
+REMARK   3   NUMBER OF TLS GROUPS  : 1                                          
+REMARK   3   TLS GROUP : 1                                                      
+REMARK   3    SELECTION: ALL                                                    
+REMARK   3    ORIGIN FOR THE GROUP (A):   4.5323   0.1096   3.9760              
+REMARK   3    T TENSOR                                                          
+REMARK   3      T11:  -0.1260 T22:  -0.0788                                     
+REMARK   3      T33:  -0.0487 T12:   0.0821                                     
+REMARK   3      T13:  -0.0518 T23:   0.0723                                     
+REMARK   3    L TENSOR                                                          
+REMARK   3      L11:   0.1003 L22:   0.0184                                     
+REMARK   3      L33:   0.0647 L12:  -0.0319                                     
+REMARK   3      L13:   0.0506 L23:  -0.0233                                     
+REMARK   3    S TENSOR                                                          
+REMARK   3      S11:   0.0084 S12:  -0.0300 S13:  -0.0565                       
+REMARK   3      S21:   0.0231 S22:   0.0090 S23:   0.0127                       
+REMARK   3      S31:  -0.0046 S32:  -0.0049 S33:  -0.0009                       
+REMARK   3                                                                      
+REMARK   3  NCS DETAILS                                                         
+REMARK   3   NUMBER OF NCS GROUPS : NULL                                        
+REMARK   3                                                                      
+REMARK   3  OTHER REFINEMENT REMARKS: NULL                                      
+REMARK   4                                                                      
+REMARK   4 5E5Z COMPLIES WITH FORMAT V. 3.30, 13-JUL-11                         
+REMARK 100                                                                      
+REMARK 100 THIS ENTRY HAS BEEN PROCESSED BY RCSB ON 09-OCT-15.                  
+REMARK 100 THE DEPOSITION ID IS D_1000214421.                                   
+REMARK 200                                                                      
+REMARK 200 EXPERIMENTAL DETAILS                                                 
+REMARK 200  EXPERIMENT TYPE                : X-RAY DIFFRACTION                  
+REMARK 200  DATE OF DATA COLLECTION        : 10-MAR-10                          
+REMARK 200  TEMPERATURE           (KELVIN) : 291                                
+REMARK 200  PH                             : NULL                               
+REMARK 200  NUMBER OF CRYSTALS USED        : NULL                               
+REMARK 200                                                                      
+REMARK 200  SYNCHROTRON              (Y/N) : Y                                  
+REMARK 200  RADIATION SOURCE               : APS                                
+REMARK 200  BEAMLINE                       : 24-ID-E                            
+REMARK 200  X-RAY GENERATOR MODEL          : NULL                               
+REMARK 200  MONOCHROMATIC OR LAUE    (M/L) : M                                  
+REMARK 200  WAVELENGTH OR RANGE        (A) : 0.979                              
+REMARK 200  MONOCHROMATOR                  : NULL                               
+REMARK 200  OPTICS                         : NULL                               
+REMARK 200                                                                      
+REMARK 200  DETECTOR TYPE                  : CCD                                
+REMARK 200  DETECTOR MANUFACTURER          : ADSC QUANTUM 315                   
+REMARK 200  INTENSITY-INTEGRATION SOFTWARE : DENZO                              
+REMARK 200  DATA SCALING SOFTWARE          : NULL                               
+REMARK 200                                                                      
+REMARK 200  NUMBER OF UNIQUE REFLECTIONS   : 1136                               
+REMARK 200  RESOLUTION RANGE HIGH      (A) : 1.600                              
+REMARK 200  RESOLUTION RANGE LOW       (A) : 100.000                            
+REMARK 200  REJECTION CRITERIA  (SIGMA(I)) : NULL                               
+REMARK 200                                                                      
+REMARK 200 OVERALL.                                                             
+REMARK 200  COMPLETENESS FOR RANGE     (%) : 92.9                               
+REMARK 200  DATA REDUNDANCY                : 2.900                              
+REMARK 200  R MERGE                    (I) : 0.07600                            
+REMARK 200  R SYM                      (I) : NULL                               
+REMARK 200  <I/SIGMA(I)> FOR THE DATA SET  : 17.8600                            
+REMARK 200                                                                      
+REMARK 200 IN THE HIGHEST RESOLUTION SHELL.                                     
+REMARK 200  HIGHEST RESOLUTION SHELL, RANGE HIGH (A) : NULL                     
+REMARK 200  HIGHEST RESOLUTION SHELL, RANGE LOW  (A) : NULL                     
+REMARK 200  COMPLETENESS FOR SHELL     (%) : NULL                               
+REMARK 200  DATA REDUNDANCY IN SHELL       : NULL                               
+REMARK 200  R MERGE FOR SHELL          (I) : NULL                               
+REMARK 200  R SYM FOR SHELL            (I) : NULL                               
+REMARK 200  <I/SIGMA(I)> FOR SHELL         : NULL                               
+REMARK 200                                                                      
+REMARK 200 DIFFRACTION PROTOCOL: SINGLE WAVELENGTH                              
+REMARK 200 METHOD USED TO DETERMINE THE STRUCTURE: MOLECULAR REPLACEMENT        
+REMARK 200 SOFTWARE USED: PHASER                                                
+REMARK 200 STARTING MODEL: NULL                                                 
+REMARK 200                                                                      
+REMARK 200 REMARK: NULL                                                         
+REMARK 280                                                                      
+REMARK 280 CRYSTAL                                                              
+REMARK 280 SOLVENT CONTENT, VS   (%): 6.59                                      
+REMARK 280 MATTHEWS COEFFICIENT, VM (ANGSTROMS**3/DA): 1.32                     
+REMARK 280                                                                      
+REMARK 280 CRYSTALLIZATION CONDITIONS: 20 MG/ML IN WATER AND MIXED WITH 0.09    
+REMARK 280  M HEPES PH 7.5, 1.26M TRI-SODIUM CITRATE, AND 10% GLYCEROL,         
+REMARK 280  VAPOR DIFFUSION, HANGING DROP, TEMPERATURE 291K                     
+REMARK 290                                                                      
+REMARK 290 CRYSTALLOGRAPHIC SYMMETRY                                            
+REMARK 290 SYMMETRY OPERATORS FOR SPACE GROUP: P 1 21 1                         
+REMARK 290                                                                      
+REMARK 290      SYMOP   SYMMETRY                                                
+REMARK 290     NNNMMM   OPERATOR                                                
+REMARK 290       1555   X,Y,Z                                                   
+REMARK 290       2555   -X,Y+1/2,-Z                                             
+REMARK 290                                                                      
+REMARK 290     WHERE NNN -> OPERATOR NUMBER                                     
+REMARK 290           MMM -> TRANSLATION VECTOR                                  
+REMARK 290                                                                      
+REMARK 290 CRYSTALLOGRAPHIC SYMMETRY TRANSFORMATIONS                            
+REMARK 290 THE FOLLOWING TRANSFORMATIONS OPERATE ON THE ATOM/HETATM             
+REMARK 290 RECORDS IN THIS ENTRY TO PRODUCE CRYSTALLOGRAPHICALLY                
+REMARK 290 RELATED MOLECULES.                                                   
+REMARK 290   SMTRY1   1  1.000000  0.000000  0.000000        0.00000            
+REMARK 290   SMTRY2   1  0.000000  1.000000  0.000000        0.00000            
+REMARK 290   SMTRY3   1  0.000000  0.000000  1.000000        0.00000            
+REMARK 290   SMTRY1   2 -1.000000  0.000000  0.000000        0.00000            
+REMARK 290   SMTRY2   2  0.000000  1.000000  0.000000        4.80450            
+REMARK 290   SMTRY3   2  0.000000  0.000000 -1.000000        0.00000            
+REMARK 290                                                                      
+REMARK 290 REMARK: NULL                                                         
+REMARK 300                                                                      
+REMARK 300 BIOMOLECULE: 1                                                       
+REMARK 300 SEE REMARK 350 FOR THE AUTHOR PROVIDED AND/OR PROGRAM                
+REMARK 300 GENERATED ASSEMBLY INFORMATION FOR THE STRUCTURE IN                  
+REMARK 300 THIS ENTRY. THE REMARK MAY ALSO PROVIDE INFORMATION ON               
+REMARK 300 BURIED SURFACE AREA.                                                 
+REMARK 350                                                                      
+REMARK 350 COORDINATES FOR A COMPLETE MULTIMER REPRESENTING THE KNOWN           
+REMARK 350 BIOLOGICALLY SIGNIFICANT OLIGOMERIZATION STATE OF THE                
+REMARK 350 MOLECULE CAN BE GENERATED BY APPLYING BIOMT TRANSFORMATIONS          
+REMARK 350 GIVEN BELOW.  BOTH NON-CRYSTALLOGRAPHIC AND                          
+REMARK 350 CRYSTALLOGRAPHIC OPERATIONS ARE GIVEN.                               
+REMARK 350                                                                      
+REMARK 350 BIOMOLECULE: 1                                                       
+REMARK 350 AUTHOR DETERMINED BIOLOGICAL UNIT: DECAMERIC                         
+REMARK 350 APPLY THE FOLLOWING TO CHAINS: A                                     
+REMARK 350   BIOMT1   1  1.000000  0.000000  0.000000        0.00000            
+REMARK 350   BIOMT2   1  0.000000  1.000000  0.000000        0.00000            
+REMARK 350   BIOMT3   1  0.000000  0.000000  1.000000        0.00000            
+REMARK 350   BIOMT1   2  1.000000  0.000000  0.000000        0.00000            
+REMARK 350   BIOMT2   2  0.000000  1.000000  0.000000       -9.60900            
+REMARK 350   BIOMT3   2  0.000000  0.000000  1.000000        0.00000            
+REMARK 350   BIOMT1   3  1.000000  0.000000  0.000000        0.00000            
+REMARK 350   BIOMT2   3  0.000000  1.000000  0.000000        9.60900            
+REMARK 350   BIOMT3   3  0.000000  0.000000  1.000000        0.00000            
+REMARK 350   BIOMT1   4  1.000000  0.000000  0.000000        9.64300            
+REMARK 350   BIOMT2   4  0.000000  1.000000  0.000000        0.00000            
+REMARK 350   BIOMT3   4  0.000000  0.000000  1.000000        0.00000            
+REMARK 350   BIOMT1   5  1.000000  0.000000  0.000000        9.64300            
+REMARK 350   BIOMT2   5  0.000000  1.000000  0.000000       -9.60900            
+REMARK 350   BIOMT3   5  0.000000  0.000000  1.000000        0.00000            
+REMARK 350   BIOMT1   6  1.000000  0.000000  0.000000        9.64300            
+REMARK 350   BIOMT2   6  0.000000  1.000000  0.000000        9.60900            
+REMARK 350   BIOMT3   6  0.000000  0.000000  1.000000        0.00000            
+REMARK 350   BIOMT1   7 -1.000000  0.000000  0.000000        9.64300            
+REMARK 350   BIOMT2   7  0.000000  1.000000  0.000000       -4.80450            
+REMARK 350   BIOMT3   7  0.000000  0.000000 -1.000000        0.00000            
+REMARK 350   BIOMT1   8 -1.000000  0.000000  0.000000        9.64300            
+REMARK 350   BIOMT2   8  0.000000  1.000000  0.000000        4.80450            
+REMARK 350   BIOMT3   8  0.000000  0.000000 -1.000000        0.00000            
+REMARK 350   BIOMT1   9 -1.000000  0.000000  0.000000       19.28600            
+REMARK 350   BIOMT2   9  0.000000  1.000000  0.000000       -4.80450            
+REMARK 350   BIOMT3   9  0.000000  0.000000 -1.000000        0.00000            
+REMARK 350   BIOMT1  10 -1.000000  0.000000  0.000000       19.28600            
+REMARK 350   BIOMT2  10  0.000000  1.000000  0.000000        4.80450            
+REMARK 350   BIOMT3  10  0.000000  0.000000 -1.000000        0.00000            
+REMARK 900                                                                      
+REMARK 900 RELATED ENTRIES                                                      
+REMARK 900 RELATED ID: 5E5V   RELATED DB: PDB                                   
+REMARK 900 RELATED ID: 5E5X   RELATED DB: PDB                                   
+REMARK 900 RELATED ID: 5E61   RELATED DB: PDB                                   
+DBREF  5E5Z A    1     6  PDB    5E5Z     5E5Z             1      6             
+SEQRES   1 A    6  LEU VAL HIS SER SER ASN                                      
+FORMUL   2  HOH   *(H2 O)                                                       
+CRYST1    9.643    9.609   19.029  90.00 101.22  90.00 P 1 21 1      2          
+ORIGX1      1.000000  0.000000  0.000000        0.00000                         
+ORIGX2      0.000000  1.000000  0.000000        0.00000                         
+ORIGX3      0.000000  0.000000  1.000000        0.00000                         
+SCALE1      0.103702  0.000000  0.020579        0.00000                         
+SCALE2      0.000000  0.104069  0.000000        0.00000                         
+SCALE3      0.000000  0.000000  0.053576        0.00000                         
+ATOM      1  N   LEU A   1       6.078  -0.306  -5.753  1.00  0.00           N  
+ANISOU    1  N   LEU A   1        0      0      0      0      0      0       N  
+ATOM      2  CA  LEU A   1       5.166  -0.026  -4.647  1.00  2.42           C  
+ANISOU    2  CA  LEU A   1      307    307    307      0      0      0       C  
+ATOM      3  C   LEU A   1       5.682  -0.642  -3.356  1.00  3.48           C  
+ANISOU    3  C   LEU A   1      435    443    445      1      1      9       C  
+ATOM      4  O   LEU A   1       6.056  -1.814  -3.322  1.00  3.52           O  
+ANISOU    4  O   LEU A   1      436    449    454      2      2     16       O  
+ATOM      5  CB  LEU A   1       3.755  -0.555  -4.967  1.00  1.86           C  
+ANISOU    5  CB  LEU A   1      232    237    238      1      1      5       C  
+ATOM      6  CG  LEU A   1       2.596  -0.354  -3.975  1.00  6.87           C  
+ANISOU    6  CG  LEU A   1      861    873    877      2      2     14       C  
+ATOM      7  CD1 LEU A   1       2.753  -1.182  -2.704  1.00 11.83           C  
+ANISOU    7  CD1 LEU A   1     1481   1504   1512      4      4     27       C  
+ATOM      8  CD2 LEU A   1       2.404   1.122  -3.638  1.00  4.27           C  
+ANISOU    8  CD2 LEU A   1      537    543    544      1      2      7       C  
+ATOM      9  N   VAL A   2       5.715   0.161  -2.297  1.00  0.61           N  
+ANISOU    9  N   VAL A   2       71     80     82      2      2     11       N  
+ATOM     10  CA  VAL A   2       5.968  -0.352  -0.960  1.00  0.12           C  
+ANISOU   10  CA  VAL A   2        1     20     24      4      4     22       C  
+ATOM     11  C   VAL A   2       4.976   0.281   0.000  1.00  3.40           C  
+ANISOU   11  C   VAL A   2      413    437    440      5      5     27       C  
+ATOM     12  O   VAL A   2       4.746   1.489  -0.046  1.00  3.22           O  
+ANISOU   12  O   VAL A   2      395    414    414      4      5     20       O  
+ATOM     13  CB  VAL A   2       7.400  -0.027  -0.475  1.00  3.56           C  
+ANISOU   13  CB  VAL A   2      440    456    458      3      3     18       C  
+ATOM     14  CG1 VAL A   2       7.566  -0.421   0.993  1.00  7.93           C  
+ANISOU   14  CG1 VAL A   2      986   1012   1016      5      5     30       C  
+ATOM     15  CG2 VAL A   2       8.429  -0.722  -1.342  1.00  6.71           C  
+ANISOU   15  CG2 VAL A   2      841    853    856      2      2     14       C  
+ATOM     16  N   HIS A   3       4.367  -0.537   0.850  1.00  0.22           N  
+ANISOU   16  N   HIS A   3        1     38     44      7      8     41       N  
+ATOM     17  CA  HIS A   3       3.603  -0.011   1.971  1.00  1.73           C  
+ANISOU   17  CA  HIS A   3      189    233    237     10     10     48       C  
+ATOM     18  C   HIS A   3       4.003  -0.675   3.280  1.00  1.84           C  
+ANISOU   18  C   HIS A   3      194    250    255     12     12     61       C  
+ATOM     19  O   HIS A   3       4.208  -1.889   3.338  1.00  0.73           O  
+ANISOU   19  O   HIS A   3       47    109    120     11     12     69       O  
+ATOM     20  CB  HIS A   3       2.095  -0.177   1.781  1.00  2.62           C  
+ANISOU   20  CB  HIS A   3      296    346    351     11     11     54       C  
+ATOM     21  CG  HIS A   3       1.324   0.074   3.040  1.00  2.97           C  
+ANISOU   21  CG  HIS A   3      335    396    399     14     14     66       C  
+ATOM     22  ND1 HIS A   3       0.950  -0.937   3.900  1.00  4.29           N  
+ANISOU   22  ND1 HIS A   3      491    566    573     16     17     82       N  
+ATOM     23  CD2 HIS A   3       0.921   1.230   3.620  1.00  4.90           C  
+ANISOU   23  CD2 HIS A   3      581    642    639     16     16     64       C  
+ATOM     24  CE1 HIS A   3       0.321  -0.417   4.940  1.00  5.53           C  
+ANISOU   24  CE1 HIS A   3      644    727    729     20     20     89       C  
+ATOM     25  NE2 HIS A   3       0.290   0.896   4.794  1.00  6.02           N  
+ANISOU   25  NE2 HIS A   3      714    790    785     20     19     78       N  
+ATOM     26  N   SER A   4       4.099   0.141   4.326  1.00  0.34           N  
+ANISOU   26  N   SER A   4        3     63     63     14     14     62       N  
+ATOM     27  CA  SER A   4       4.357  -0.330   5.683  1.00  1.49           C  
+ANISOU   27  CA  SER A   4      141    213    213     16     16     75       C  
+ATOM     28  C   SER A   4       3.814   0.686   6.681  1.00  2.14           C  
+ANISOU   28  C   SER A   4      222    299    292     20     19     78       C  
+ATOM     29  O   SER A   4       4.008   1.889   6.507  1.00  3.47           O  
+ANISOU   29  O   SER A   4      397    465    454     19     18     68       O  
+ATOM     30  CB  SER A   4       5.858  -0.513   5.905  1.00  5.61           C  
+ANISOU   30  CB  SER A   4      665    734    734     15     15     72       C  
+ATOM     31  OG  SER A   4       6.132  -0.771   7.272  1.00  9.89           O  
+ANISOU   31  OG  SER A   4     1200   1280   1278     18     18     83       O  
+ATOM     32  N   SER A   5       3.138   0.213   7.725  1.00  2.34           N  
+ANISOU   32  N   SER A   5      239    330    322     24     23     93       N  
+ATOM     33  CA  SER A   5       2.651   1.119   8.765  1.00  0.66           C  
+ANISOU   33  CA  SER A   5       24    123    106     28     26     97       C  
+ATOM     34  C   SER A   5       3.677   1.311   9.885  1.00  2.66           C  
+ANISOU   34  C   SER A   5      275    378    356     30     27    100       C  
+ATOM     35  O   SER A   5       3.411   2.024  10.851  1.00  2.02           O  
+ANISOU   35  O   SER A   5      193    303    273     35     30    104       O  
+ATOM     36  CB  SER A   5       1.318   0.639   9.350  1.00  2.68           C  
+ANISOU   36  CB  SER A   5      269    383    365     32     29    113       C  
+ATOM     37  OG  SER A   5       1.478  -0.544  10.117  1.00  2.49           O  
+ANISOU   37  OG  SER A   5      236    363    349     33     31    128       O  
+ATOM     38  N   ASN A   6       4.838   0.672   9.758  1.00  2.94           N  
+ANISOU   38  N   ASN A   6      311    412    394     28     25     98       N  
+ATOM     39  CA  ASN A   6       5.912   0.838  10.741  1.00  4.68           C  
+ANISOU   39  CA  ASN A   6      530    634    613     29     26    100       C  
+ATOM     40  C   ASN A   6       6.574   2.203  10.638  1.00 10.84           C  
+ANISOU   40  C   ASN A   6     1320   1413   1387     28     24     87       C  
+ATOM     41  O   ASN A   6       7.335   2.594  11.519  1.00 13.68           O  
+ANISOU   41  O   ASN A   6     1680   1775   1745     30     26     88       O  
+ATOM     42  CB  ASN A   6       6.986  -0.243  10.589  1.00  5.08           C  
+ANISOU   42  CB  ASN A   6      579    682    668     27     25    102       C  
+ATOM     43  CG  ASN A   6       6.592  -1.558  11.236  1.00  8.08           C  
+ANISOU   43  CG  ASN A   6      948   1067   1057     28     27    120       C  
+ATOM     44  OD1 ASN A   6       5.576  -1.644  11.923  1.00  8.72           O  
+ANISOU   44  OD1 ASN A   6     1022   1152   1139     32     30    131       O  
+ATOM     45  ND2 ASN A   6       7.409  -2.588  11.030  1.00  9.89           N  
+ANISOU   45  ND2 ASN A   6     1174   1293   1290     25     25    122       N  
+ATOM     46  OXT ASN A   6       6.383   2.933   9.667  1.00 14.02           O  
+ANISOU   46  OXT ASN A   6     1730   1811   1787     25     22     75       O  
+TER      47      ASN A   6                                                      
+HETATM   48  O   HOH A 101       8.203   1.052  -4.564  1.00 12.67           O  
+ANISOU   48  O   HOH A 101     1605   1605   1605      0      0      0       O  
+MASTER      227    0    0    0    0    0    0    6   47    1    0    1          
+END                                                                             
diff --git a/lib/galaxy/datatypes/test/6col.pileup b/lib/galaxy/datatypes/test/6col.pileup
new file mode 100644
index 0000000..c8dc44a
--- /dev/null
+++ b/lib/galaxy/datatypes/test/6col.pileup
@@ -0,0 +1,30 @@
+chrM	42	C	1	^:.	I
+chrM	43	C	2	.^:.	II
+chrM	44	T	2	..	II
+chrM	45	A	3	..^:.	III
+chrM	46	G	4	...^:.	IIII
+chrM	47	A	5	....^:,	IIIII
+chrM	48	T	5	....,	IIIII
+chrM	49	G	5	....,	IIIII
+chrM	50	A	5	....,	IIIII
+chrM	51	G	5	....,	IIIII
+chrM	52	T	5	....,	IIIII
+chrM	53	A	5	....,	IIIII
+chrM	54	T	5	....,	IIIII
+chrM	55	T	5	....,	IIIII
+chrM	56	C	5	....,	IIIII
+chrM	57	T	5	....,	IIIII
+chrM	58	T	5	....,	IIIII
+chrM	59	A	5	....,	IIIII
+chrM	60	C	5	....,	IIIII
+chrM	61	T	5	....,	IIIII
+chrM	62	C	5	....,	IIIII
+chrM	63	C	5	....,	IIIII
+chrM	64	A	5	....,	IIIII
+chrM	65	T	5	....,	IIIII
+chrM	66	A	5	....,	IIIII
+chrM	67	A	5	....,	IIIII
+chrM	68	A	5	....,	IIICI
+chrM	69	C	5	....,	IIIII
+chrM	70	A	5	....,	IIIII
+chrM	71	C	5	....,	IIIII
diff --git a/lib/galaxy/datatypes/test/NuBBE_1_obabel_3D.pdbqt b/lib/galaxy/datatypes/test/NuBBE_1_obabel_3D.pdbqt
new file mode 100644
index 0000000..84e3256
--- /dev/null
+++ b/lib/galaxy/datatypes/test/NuBBE_1_obabel_3D.pdbqt
@@ -0,0 +1,55 @@
+REMARK  9 active torsions:
+REMARK  status: ('A' for Active; 'I' for Inactive)
+REMARK    1  A    between atoms: C_2  and  O_3 
+REMARK    2  A    between atoms: C_2  and  C_14 
+REMARK    3  A    between atoms: O_3  and  C_4 
+REMARK    4  A    between atoms: C_4  and  C_5 
+REMARK    5  A    between atoms: C_6  and  C_8 
+REMARK    6  A    between atoms: C_8  and  C_9 
+REMARK    7  A    between atoms: C_9  and  C_10 
+REMARK    8  A    between atoms: C_16  and  O_17 
+REMARK    9  A    between atoms: C_19  and  O_20 
+ROOT
+ATOM      1  O   LIG d   1      -0.947  -0.436  -3.210  0.00  0.00    -0.259 OA
+ATOM      2  C   LIG d   1      -1.207   0.245  -2.235  0.00  0.00     0.293 C 
+ENDROOT
+BRANCH   2   3
+ATOM      3  O   LIG d   1      -1.935  -0.114  -1.151  0.00  0.00    -0.314 OA
+BRANCH   3   4
+ATOM      4  C   LIG d   1      -2.338  -1.483  -1.065  0.00  0.00     0.206 C 
+BRANCH   4   5
+ATOM      5  C   LIG d   1      -1.745  -2.032   0.200  0.00  0.00     0.002 C 
+ATOM      6  C   LIG d   1      -0.550  -2.644   0.338  0.00  0.00    -0.085 C 
+ATOM      7  C   LIG d   1       0.405  -2.885  -0.801  0.00  0.00     0.043 C 
+BRANCH   6   8
+ATOM      8  C   LIG d   1      -0.086  -3.153   1.694  0.00  0.00     0.037 C 
+BRANCH   8   9
+ATOM      9  C   LIG d   1       0.730  -2.114   2.473  0.00  0.00     0.031 C 
+BRANCH   9  10
+ATOM     10  C   LIG d   1       1.187  -2.618   3.818  0.00  0.00    -0.024 C 
+ATOM     11  C   LIG d   1       2.352  -3.233   4.110  0.00  0.00    -0.091 C 
+ATOM     12  C   LIG d   1       3.401  -3.605   3.099  0.00  0.00     0.042 C 
+ATOM     13  C   LIG d   1       2.699  -3.614   5.527  0.00  0.00     0.042 C 
+ENDBRANCH   9  10
+ENDBRANCH   8   9
+ENDBRANCH   6   8
+ENDBRANCH   4   5
+ENDBRANCH   3   4
+ENDBRANCH   2   3
+BRANCH   2  14
+ATOM     14  C   LIG d   1      -0.778   1.657  -2.066  0.00  0.00     0.042 A 
+ATOM     15  C   LIG d   1      -0.640   2.224  -0.790  0.00  0.00     0.057 A 
+ATOM     16  C   LIG d   1      -0.249   3.551  -0.677  0.00  0.00     0.099 A 
+ATOM     17  C   LIG d   1       0.012   4.308  -1.816  0.00  0.00     0.098 A 
+ATOM     18  C   LIG d   1      -0.102   3.753  -3.083  0.00  0.00     0.040 A 
+ATOM     19  C   LIG d   1      -0.494   2.419  -3.212  0.00  0.00     0.020 A 
+BRANCH  16  20
+ATOM     20  O   LIG d   1      -0.104   4.159   0.540  0.00  0.00    -0.358 OA
+ATOM     21  HO  LIG d   1       0.164   5.067   0.617  1.00  0.00     0.217 HD
+ENDBRANCH  16  20
+BRANCH  17  22
+ATOM     22  O   LIG d   1       0.389   5.614  -1.691  0.00  0.00    -0.358 OA
+ATOM     23  HO  LIG d   1       0.567   6.131  -2.469  1.00  0.00     0.217 HD
+ENDBRANCH  17  22
+ENDBRANCH   2  14
+TORSDOF 9
diff --git a/lib/galaxy/datatypes/test/alignment.axt b/lib/galaxy/datatypes/test/alignment.axt
new file mode 100644
index 0000000..c3b0d67
--- /dev/null
+++ b/lib/galaxy/datatypes/test/alignment.axt
@@ -0,0 +1,36 @@
+0 chr7 127489831 127489867 chr6 121127148 121127184 - 2518
+GATGCTTTGCTTCAAATCCATCCAGAATAAAACGCAA
+GATGGCTTGCTTCAGATCCATCCAAAATAGAATGCAG
+
+1 chr7 127479653 127479689 chr6 29111645 29111681 + 2999
+TTACCTGAATTTAATTCACAGTAGCATACAAAAGACT
+TTATCTGAATTTAATTCATAGTAGCATACCAAAGACT
+
+2 chr7 127475993 127477967 chr6 29107919 29109820 + 36883
+GGAACTTCTAGCCAGGACTGCTAAATACGCGCTGTTGGCCCA---------CCAGGCTCACCTATAGCCTTCCTTCAGTCTGGGCTTGGTTTGGATTTCACTGTGGGTGCCATCGCCTTTACACTCCTGTTTCTATAGTTTAAAGATAGTGGTGCTTTGGGAAAGT---GACTCCTTAAATACAGTTAGGTCCAAGTGAG-ACAAGTGGCCTGGCTGTCATTTCAGAATAGCAGCTTCCAAGAGGTGATTAATTTCTGTTGGAAGGGTGAT-CTTTGGGGAGGTGGGTGAAGAGCAGAGACTTGGTGGTACCGTTCCAGGAGCACAGGCTCTCT-----TCCTTTGCA--GTGCAGAATGACCTCTGGCAGCCGGAGTTGTGTTTGTTCTGTAGGATTCTGAGGTGGGCCATGGGCAGCTGGAACTGGGGAA---TTTTGCCAATCTCTTTCATATTAGGATTGTCTGCAGAACCAGATATGGAGG------ [...]
+GGCATTTCTACCCAAGATGGTTAAATACACGTTGCCAGCCTGAGGTCCCTGCATGCCCCTCCTTCAGCTTTCTTTCTTGCTGGGGTTAGTATT-ACTGCATTGTGGGTATGGT-GCCCTTACACTACT----CTATGTCTTACTG-----GGTGCATTAGATATGCCACGATGCCCTGTGTACATTGCTGCTTGGGAGAGGACAA----------------------------------AGCAAGCCAGTCATTTCTATTGGGAGCATGGGACTCAGGAGAGGCAGGCAGAGGGCAGGGATTTGGTGGCACCATCCTGGGAGCATAGGCTCTCTGAGCACTCTTTGCAAGGTGGGAATGGAATTCCAGTAGTGAGAGTCATATTT--TTCGTAGAGTT-TGTGATGGGCAGTGGTCAGCTGGGGCTGGGGGAGAGCATCTCTGACCTATTGCTGATTTGTACGGTGCATGGAACCAAACTTGAAAGGAGGCA [...]
+
+3 chr7 127475282 127475955 chr6 29107216 29107870 + 22599
+GTAGGAATCGCAGCGCCAGCGGTTGCAAGGTAAGGCCCCGG-CGCGCTCCTTCCTCCTTCTCTGCTGGTCTTTCTTGGCAGGCCACAGGGCCCCACACAACTCTGGATCCCGGGGAAACTGAGTCAGGAGGGATGCAGGGCGGATGGCTTAGTTCTGGACTATGATAGCTTTGTACCGAG-----TTCTAGCCAGATAGAAGGTTACCGGGAGCTGGGGAGCGTTGGATTTGCTGCTGGGCTGTGCCGGTGCCCAGAAGGCA------GGACCTTGCAGAACCAGCCAGGTCCCTGGGAGACTGTCAGACCCACCAACCTGGTGGCATTCGCAGAGCTGAGATGCATTGGAAATTGCCTTGGGCACATCCCCAAAGATCAGGATGTCCCACCCCAGTCTGAAGGAGA---TAAAGTTGGGGGTAGGAGAGACGCAGATGCAAGTGATCAGTCTC---AGTCCCAGACATTGCCTTGCTCTGCGGGTAGGAAT [...]
+GGAGGGATCCCTGCTCCAGCAGCTGCAAGGTAAGGCCCGGGGCGCGCTACTTTCTCCTCC---ACCAGTCTTTCT--------AATAGCACCCCATCCAGCTCTGGAAATTAGAGAAACTGAGGCAAGAAGGAGGTCATGTGGACAGCTTGGTGTTGAATT-CAGTAGTTTTGCAGCGAGGGACTCTGCAGACAGAGGGAAGGCTGTTGGGAGCTGAAAGGCATTAAATTCCTGGCTGGGCCG-GCCTGTGCCCACAGGGCACCAACCTGTTCTTGCAAAGCCAGCCAGGGCTCCA--AGCCTGTC-----------TTTGGT-----CTGCAGACTTGAGATGGTTAGG---------------------------------TTTCCCACAGCGTTCAGATGGAAAAAGCAAGGCTGGACGT-GTAGTGAGGCAGCCACAGCTTTCCGCCTTCCTGAAGGCCAGATACAACCTTGCTTTGCTG-------T [...]
+
+4 chr7 127478290 127478549 chr6 29109821 29110062 + 10483
+agagctgaaacaggagtagaaacctatctg-tatctctgATGAGATCAGATCTTTCTGATGAACAGAAAGAATGTAACCCCTGTACTCACACCCTCTCTGCTGGTTACATATGTTAACACGATTTCTCAAATGAGGCTTTTGGTTGCAAATAAGAGAAAATCACTCACGCT-GGCCCTGTGTTTTTCAAATTGTTTATTGTGATCAACATTTGAAAAAAGAGCCGAGACTCTCAAGAGTGCATTACCCACGGTAAGGGTGAA
+agagttgatgcaggagttgaaatctatttgctttccctAATGGGA-----------------ATGAAAAGAATATAACCCCCTCACCCACACTGTGCCTACCAGCTAAGAATGTCAGCATGATTTCTCAAGCAAGGCCTTTGGTTGCAGAGAAGAGAAAATTGCTCCTGCTAAGATCAGTGATTTTCAAATTGTATACTGTGGCCAGCACTTGAAGAATGAGCTGAAATCCCCTAGAG---ATCACCTGCAGCAAGGGCAAA
+
+5 chr7 127483498 127484068 chr6 29113019 29113586 + 20706
+tgagagctggcacaggacacttctgcttatatttcactggccagaacttagtcacatggtcacacctagttgggagactctgagaagtaaa----gtatttattctagatggccatatccctacc-taagacttggagttttctatgactggggaagaacggaagacaagatattgggaaagactagcagcctctactaAAAGGGTGATCtgtgttgatgtgcgtgtgtgtgtg-atgtttgtatga-gcatgtgtgttatgtgttgtgtgtTGGTGGGGCA--GATTCTTGCGAGCACTTTGGTCTCAGATGGACCTGCTACCAGTTCTCTCTGCAGACCCCCATAGGTTTCTCCTAAACCTGGCCTCTCCTATTAGGCAGCCTTACTCAGCGGCAGCTTCTCAGCTCCATGTTTTCAAGGAACCACAATTTATTTCCAGCATCCACTGAAGCATATTATCAGTGGTGATAGAGGGGGCTTGTAAAACTGT [...]
+TGGGAACTGCCACATGGCCCTTCTGCTTTTATTTAATCCATCAGTATTTGGCTGCATAACTGTACCTAATGGT---------AGAAGCAAGCCTGGCAGTTATTCTCCACAGCCACACGCCTCCCATAGGACTTGGAATGTCTTCAAATTCGGACAGAAAGAAAAATAAGACACGGGGAGAAACTAGCAGTCTCCAC-AGCATGGCTATGTATCTTGATGTGTCTGTGTATGTGTGTGTTCATGTTACATAAGCATGTTACGTGCTGT-------TGGGGCATCGATCCGTAACATGGCTTTGATCTCAGATGGATCTGTGACCACTTCTTTCTGCAACCCCAGATGCGTGTCCCAGAAGCCTGGCCTATCATCTTCAGCATACTTCCTCAGCGACAGCATCTCAGTCCCATGTTTCCAGAGCACTGTGATTTATTGCCAGCATCCACTGAGGCATATTATCCATGTCGACAGAGAGGCCTTGTAAACCTGC [...]
+
+6 chr7 127480779 127481318 chr6 29112240 29112723 + 9044
+GCAGCTCCAGGGGCTCCACATTCTACTCTTCTCATTTCTTCTCCAGGGTACCCATGGCAAGGGATGAGGGT--AGAAGATGGGGCAGCCAGGCCTTGATTAAAGGAGAAGGAAGGCAGCCTGTGGAGAGG-GCAGCCCAGGGAGTGCAGAGAGAAGTGGGCCATGAGGGAGACAGCAGAGTGCAGGCTGCGTCCCAAATGAGCATACAGCCCACTGTGAGCCCACC--ATCTTCCTAGA-GACCCCTCTCCTCTCCAGGAGCTGCTTCAGTAGCACTCAGA-----GGAAAGAATGATGCTGTATCAACATTTCAGCAGCTCATCTTTTAACTCTAAGAAAATGGCAGCTCCTAAATGTTCAAAACTGCTTTGGAAACTTCTGGAGAGAGGTTTTGCAGCTCAGGCAGACAGCTGATCGCGGCCTTTCTTCCACCCCAACCCATGCTCTCCCCATGCTCTCCTGCCACAGCTGCAGCGGGCCCCTGGGTCCT [...]
+GCTGCCTCAGGGTCTCTCCTCTCTGTTCTTTTAA---CCTCTCTAGTGTGCCC-TGGCAGGGAGTCACAATGGAGAAGGTGAGGCA------CATGGGTAGATGGCAGGGGAAGGCAGCC---GGAGAGGAGCTTTCTAGTGAG-------ACAACCTGAACATAAAACAGA-AGTAGAGGGCAGG-------------GAGGTCGCCGCCCA----GTGCCCACCCTGTCCTCCCAGATGGTCCCTTTTCTTTCCAGGCTCTGCTGCAATAGTCCCCAGACGTGAGGAACACAGTGTACTGCATCAGTATTTTAGCAATTCCTCTGCCAGCTCCGAGGCTATAACAACTCGTAAGTGGTTAAACTCAATTCAGGAACTTCT--AGAAAGGTTTTTTTG--TGGCCGGGTAGCTGCCAGTAGCCTTGCTCCTTGCCCACCCACT----TCCTCACTCCCTGGAATCACAGCCATGGC--------------- [...]
+
+7 chr7 127484464 127490737 chr6 29114484 29119946 + 162824
+TAAATCCTTGAGGAG-----CTGGAG---GGGTGGGTGGCTCGCACTCCTGCTTTctgg---------atctgaatcctgactctgtcatggacctgtt-tgactttgggcaagttgactcctattcctgagccccatat-ttttctcttctgtgaaattcagattaaaaAAA-CATGGCTTTGATCAAACATTATAAATAATATATAGACAGACTGCTTGTTTTTATTGTATTGCCAG-AAATGAATCCTACTAATATTGCCATCTATGGACAGAAAATGTATTACCTGTCTTCATCAAGACCCAGACGAGGAAGAACACGAAAAGCGGAGATTAATTTTACTGCCATCTCCAGAACCGTCATCCTAATATTTACTTACAT-TTTATTATTATTTCAGGCTCATGCACATATACTTAGCATGGATCATTGGCCACAGACTCGCATACATTTAACTTTATTACCTTT-TGCCTCATGTATCTCATTAAAATT [...]
+taaagccttggagagtgaagctgaggatggggTGTGGGGGTGGCTGTACTTCCATCTGTCCTGCGCACatctcag-cctgacact---acttaccttttgtgactttggaaaagttggctcatctttctgagcctcatttgttttctcttctgtaaaattcagattaaaaagagcgtacctTTAAGCAAGCACTATAAATAATATATAAACAGACTGCTCGTTTTTTCCGTACTGTCAGCGGGTGAGTTATACTAATATTACCACCGATGGAGGACAGCTATGTTACCTGTTCCCACCAATACCCAGATGAGGAAGAACAGAAAAAGGAGAAGTTAGCTTTTCTGCCATCTCCATAATCTTCATCCTAATATTTATGAATGTATTTATTATT----CAAGCTCATA-GCACACCCTGAGCATGGATCACTGGCTACAGGCTTGCATACATTTAACTTTATTATCTTTCTGCCTCATGCATCTCATTAGG--C [...]
+
+8 chr7 127490738 127491632 chr6 29120095 29120873 + 8882
+cataagaccata--------------------------------------------------------------------acagc----------caacaggtggcaggac----------caggactatag------------cccaggtcctctg------------------atacccagAG-CATTACGTGAGCCAGGTAATGAG---GGACTGGAACCAGGG----AGACCGAGCGCTTTC-----------------TGGAAAAGAG-------GAGTTTCGAGGTAGAGTTTGAAGGAGGTGAGGGATGTGAATT-------GCCTGCAGAGAGAAGC-CTGTTTTGTTGGAAGGTTTGGTGTGTGGAGATGCAGAGGTAAAAGTGTGAGCAGTGAGTTACAGCG--------AGAGGCAGAGAAAGAAGAGACAGGAGGGCAAGGGCCATGCTGAAGGGACCTTGAAGGGTAAAGAAGTTTGAT [...]
+CATAATACCGTATAGACACTGCTTGAAGTGTAGTTTTATACAGTGTTTTAAATAACGTTGTATGCATGAAAGACGTTTTTACAGCATGAACCTGTCTACTCATGCCAGCACTCAAAAACCTTGGGGTTTTGGAGCAGTTTGGATCTTGGGTTTTCTGTTAAGAGATGGTTAGCTTATACCTAAAACCATAATGGCAAACAGGCTGCAGGACCAGACTGGATCCTCAGCCCTGAAGTGTGCCCTTCCAGCCAGGTCATACCCTGTGGAGGTGAGCGGGATCAGGTTTTGTGGTG----CTAAGAGAGGAGTTGGAGGTAGATTTTGGAGGATCTGAGGGGTGATGTGATGTTTTATTGGAC-ACTTGGTATGT-----TGAAGGGATGAAAGTCCAAACAGGAAGTGACAGGGAAGACTGAAGAGACCGGGAAAG-AGTGACAGGAAG----------TGCTGAGAGGACTTTATGGGCCACAAAAGTGGCTT [...]
+
diff --git a/lib/galaxy/datatypes/test/alignment.lav b/lib/galaxy/datatypes/test/alignment.lav
new file mode 100644
index 0000000..7944e2a
--- /dev/null
+++ b/lib/galaxy/datatypes/test/alignment.lav
@@ -0,0 +1,178 @@
+#:lav
+d {
+  "blastz out.seq1m seq2data Y=3400 H=0 W=8 B=2 K=3000 C=0 m=83886080 P=0
+     A    C    G    T
+    91 -114  -31 -123
+  -114  100 -125  -31
+   -31 -125  100 -114
+  -123  -31 -114   91
+  O = 400, E = 30, K = 3000, L = 3000, M = 50"
+}
+#:lav
+s {
+  "out.seq1m" 1 1680 0 1
+  "seq2data" 1 1680 0 1
+}
+h {
+   ">seqmask seq1data out.repeats"
+   ">seq2"
+}
+a {
+  s 161304
+  b 1 1
+  e 1680 1680
+  l 1 1 1680 1680 100
+}
+a {
+  s 15173
+  b 186 251
+  e 450 505
+  l 186 251 244 309 92
+  l 256 310 280 334 84
+  l 281 336 307 362 78
+  l 308 374 329 395 91
+  l 341 396 367 422 81
+  l 369 423 419 473 94
+  l 422 474 425 477 75
+  l 426 481 450 505 96
+}
+a {
+  s 12134
+  b 191 343
+  e 361 503
+  l 191 343 215 367 80
+  l 216 369 244 397 86
+  l 256 398 361 503 95
+}
+a {
+  s 4422
+  b 192 431
+  e 276 505
+  l 192 431 212 451 100
+  l 223 452 244 473 95
+  l 247 474 254 481 63
+  l 255 484 276 505 91
+}
+a {
+  s 6872
+  b 192 311
+  e 330 450
+  l 192 311 215 334 83
+  l 216 336 275 395 73
+  l 286 396 307 417 91
+  l 308 428 330 450 96
+}
+a {
+  s 14437
+  b 192 224
+  e 472 505
+  l 192 224 223 255 84
+  l 224 257 255 288 84
+  l 257 289 277 309 90
+  l 288 310 308 330 90
+  l 309 342 362 395 85
+  l 374 396 395 417 91
+  l 396 428 417 449 95
+  l 428 450 449 471 95
+  l 450 483 472 505 91
+}
+a {
+  s 7306
+  b 192 399
+  e 308 505
+  l 192 399 244 451 94
+  l 256 452 277 473 91
+  l 280 474 283 477 100
+  l 284 481 308 505 92
+}
+a {
+  s 11180
+  b 193 290
+  e 396 505
+  l 193 290 212 309 100
+  l 223 310 280 367 76
+  l 281 369 332 420 88
+  l 333 422 335 424 100
+  l 336 429 341 434 83
+  l 342 451 396 505 87
+}
+a {
+  s 14437
+  b 224 192
+  e 505 472
+  l 224 192 255 223 84
+  l 257 224 288 255 84
+  l 289 257 309 277 90
+  l 310 288 330 308 90
+  l 342 309 395 362 85
+  l 396 374 417 395 91
+  l 428 396 449 417 95
+  l 450 428 471 449 95
+  l 483 450 505 472 91
+}
+a {
+  s 15173
+  b 251 186
+  e 505 450
+  l 251 186 309 244 92
+  l 310 256 334 280 84
+  l 336 281 362 307 78
+  l 374 308 395 329 91
+  l 396 341 422 367 81
+  l 423 369 473 419 94
+  l 474 422 477 425 75
+  l 481 426 505 450 96
+}
+a {
+  s 11180
+  b 290 193
+  e 505 396
+  l 290 193 309 212 100
+  l 310 223 367 280 76
+  l 369 281 420 332 88
+  l 422 333 424 335 100
+  l 429 336 434 341 83
+  l 451 342 505 396 87
+}
+a {
+  s 6872
+  b 311 192
+  e 450 330
+  l 311 192 334 215 83
+  l 336 216 395 275 73
+  l 396 286 417 307 91
+  l 428 308 450 330 96
+}
+a {
+  s 12134
+  b 343 191
+  e 503 361
+  l 343 191 367 215 80
+  l 369 216 397 244 86
+  l 398 256 503 361 95
+}
+a {
+  s 7306
+  b 399 192
+  e 505 308
+  l 399 192 451 244 94
+  l 452 256 473 277 91
+  l 474 280 477 283 100
+  l 481 284 505 308 92
+}
+a {
+  s 4422
+  b 431 192
+  e 505 276
+  l 431 192 451 212 100
+  l 452 223 473 244 95
+  l 474 247 481 254 63
+  l 484 255 505 276 91
+}
+x {
+  n 0
+}
+m {
+  n 0
+}
+#:eof
diff --git a/lib/galaxy/datatypes/test/complete.bed b/lib/galaxy/datatypes/test/complete.bed
new file mode 100644
index 0000000..e056e3b
--- /dev/null
+++ b/lib/galaxy/datatypes/test/complete.bed
@@ -0,0 +1,2 @@
+chr7	127475281	127491632	NM_000230	0	+	127486022	127488767	0	3	29,172,3225,	0,10713,13126,
+chr7	127486011	127488900	D49487	0	+	127486022	127488767	0	2	155,490,	0,2399
diff --git a/lib/galaxy/datatypes/test/drugbank_drugs.cml b/lib/galaxy/datatypes/test/drugbank_drugs.cml
new file mode 100644
index 0000000..a807bd8
--- /dev/null
+++ b/lib/galaxy/datatypes/test/drugbank_drugs.cml
@@ -0,0 +1,385 @@
+<?xml version="1.0"?>
+<cml xmlns="http://www.xml-cml.org/schema">
+ <molecule id="Goserelin">
+  <atomArray>
+   <atom id="a1" elementType="O" x2="12.854800" y2="-2.638200"/>
+   <atom id="a2" elementType="O" x2="13.972600" y2="-2.522600"/>
+   <atom id="a3" elementType="O" x2="10.176600" y2="-3.932700"/>
+   <atom id="a4" elementType="O" x2="11.201900" y2="-0.796100"/>
+   <atom id="a5" elementType="O" x2="8.780000" y2="-1.306400"/>
+   <atom id="a6" elementType="O" x2="16.858900" y2="-3.242100"/>
+   <atom id="a7" elementType="O" x2="10.356200" y2="1.216300"/>
+   <atom id="a8" elementType="O" x2="3.270200" y2="4.834100"/>
+   <atom id="a9" elementType="O" x2="2.350000" y2="8.273400"/>
+   <atom id="a10" elementType="O" x2="3.821300" y2="4.220100"/>
+   <atom id="a11" elementType="O" x2="5.217800" y2="1.593800"/>
+   <atom id="a12" elementType="O" x2="7.896000" y2="2.888300"/>
+   <atom id="a13" elementType="O" x2="7.127100" y2="0.535800"/>
+   <atom id="a14" elementType="O" x2="12.483400" y2="3.124900"/>
+   <atom id="a15" elementType="N" x2="13.149500" y2="-4.036400"/>
+   <atom id="a16" elementType="N" x2="11.240200" y2="-2.978400"/>
+   <atom id="a17" elementType="N" x2="15.208900" y2="-3.239300"/>
+   <atom id="a18" elementType="N" x2="10.138300" y2="-1.750300"/>
+   <atom id="a19" elementType="N" x2="15.622600" y2="-2.525500"/>
+   <atom id="a20" elementType="N" x2="12.380600" y2="-6.389000"/>
+   <atom id="a21" elementType="N" x2="9.292600" y2="0.261900"/>
+   <atom id="a22" elementType="N" x2="3.148500" y2="7.039100"/>
+   <atom id="a23" elementType="N" x2="4.333800" y2="5.788400"/>
+   <atom id="a24" elementType="N" x2="16.861300" y2="-1.813200"/>
+   <atom id="a25" elementType="N" x2="8.190700" y2="1.490000"/>
+   <atom id="a26" elementType="N" x2="5.179500" y2="3.776100"/>
+   <atom id="a27" elementType="N" x2="6.281400" y2="2.548000"/>
+   <atom id="a28" elementType="N" x2="11.317000" y2="-7.343300"/>
+   <atom id="a29" elementType="N" x2="12.675300" y2="-7.787300"/>
+   <atom id="a30" elementType="N" x2="3.859600" y2="0.709800"/>
+   <atom id="a31" elementType="N" x2="6.733500" y2="6.382300"/>
+   <atom id="a32" elementType="N" x2="5.950700" y2="7.463600"/>
+   <atom id="a33" elementType="C" x2="13.970100" y2="-3.951600">
+    <atomParity atomRefs4="a33 a39 a34 a15">1</atomParity>
+   </atom>
+   <atom id="a34" elementType="C" x2="14.304300" y2="-4.705800"/>
+   <atom id="a35" elementType="C" x2="13.690300" y2="-5.256900"/>
+   <atom id="a36" elementType="C" x2="12.976600" y2="-4.843100"/>
+   <atom id="a37" elementType="C" x2="12.598400" y2="-3.422400"/>
+   <atom id="a38" elementType="C" x2="11.791200" y2="-3.592500">
+    <atomParity atomRefs4="a38 a16 a37 a40">1</atomParity>
+   </atom>
+   <atom id="a39" elementType="C" x2="14.383800" y2="-3.237800"/>
+   <atom id="a40" elementType="C" x2="11.534900" y2="-4.376700"/>
+   <atom id="a41" elementType="C" x2="12.085900" y2="-4.990700"/>
+   <atom id="a42" elementType="C" x2="9.881900" y2="-2.534500">
+    <atomParity atomRefs4="a42 a18 a43 a44">1</atomParity>
+   </atom>
+   <atom id="a43" elementType="C" x2="10.433000" y2="-3.148600"/>
+   <atom id="a44" elementType="C" x2="9.074700" y2="-2.704600"/>
+   <atom id="a45" elementType="C" x2="8.818400" y2="-3.488900"/>
+   <atom id="a46" elementType="C" x2="11.829500" y2="-5.775000"/>
+   <atom id="a47" elementType="C" x2="8.011200" y2="-3.659000"/>
+   <atom id="a48" elementType="C" x2="9.369400" y2="-4.102900"/>
+   <atom id="a49" elementType="C" x2="9.587300" y2="-1.136300"/>
+   <atom id="a50" elementType="C" x2="9.843600" y2="-0.352100">
+    <atomParity atomRefs4="a50 a21 a51 a49">1</atomParity>
+   </atom>
+   <atom id="a51" elementType="C" x2="10.650900" y2="-0.182000"/>
+   <atom id="a52" elementType="C" x2="16.447600" y2="-2.526900"/>
+   <atom id="a53" elementType="C" x2="2.975600" y2="6.232400">
+    <atomParity atomRefs4="a53 a64 a58 a22">1</atomParity>
+   </atom>
+   <atom id="a54" elementType="C" x2="12.009100" y2="-0.625900"/>
+   <atom id="a55" elementType="C" x2="8.997900" y2="1.660200">
+    <atomParity atomRefs4="a55 a25 a56 a62">1</atomParity>
+   </atom>
+   <atom id="a56" elementType="C" x2="9.548900" y2="1.046100"/>
+   <atom id="a57" elementType="C" x2="12.124200" y2="-7.173200"/>
+   <atom id="a58" elementType="C" x2="2.154900" y2="6.147500"/>
+   <atom id="a59" elementType="C" x2="4.884800" y2="5.174400">
+    <atomParity atomRefs4="a59 a23 a68 a66">1</atomParity>
+   </atom>
+   <atom id="a60" elementType="C" x2="4.923200" y2="2.992000">
+    <atomParity atomRefs4="a60 a26 a73 a63">1</atomParity>
+   </atom>
+   <atom id="a61" elementType="C" x2="1.820700" y2="6.901900"/>
+   <atom id="a62" elementType="C" x2="9.254200" y2="2.444400"/>
+   <atom id="a63" elementType="C" x2="4.115900" y2="2.821900"/>
+   <atom id="a64" elementType="C" x2="3.526600" y2="5.618300"/>
+   <atom id="a65" elementType="C" x2="2.434800" y2="7.452800"/>
+   <atom id="a66" elementType="C" x2="5.692100" y2="5.344500"/>
+   <atom id="a67" elementType="C" x2="6.832400" y2="1.934000">
+    <atomParity atomRefs4="a67 a27 a74 a80">1</atomParity>
+   </atom>
+   <atom id="a68" elementType="C" x2="4.628500" y2="4.390200"/>
+   <atom id="a69" elementType="C" x2="12.179300" y2="-1.433200"/>
+   <atom id="a70" elementType="C" x2="12.816400" y2="-0.455700"/>
+   <atom id="a71" elementType="C" x2="11.838900" y2="0.181400"/>
+   <atom id="a72" elementType="C" x2="3.859600" y2="2.037600"/>
+   <atom id="a73" elementType="C" x2="5.474200" y2="2.377900"/>
+   <atom id="a74" elementType="C" x2="7.639700" y2="2.104100"/>
+   <atom id="a75" elementType="C" x2="10.061500" y2="2.614500"/>
+   <atom id="a76" elementType="C" x2="3.079000" y2="1.786200"/>
+   <atom id="a77" elementType="C" x2="5.948400" y2="6.128700"/>
+   <atom id="a78" elementType="C" x2="4.341100" y2="1.373700"/>
+   <atom id="a79" elementType="C" x2="3.079000" y2="0.961200"/>
+   <atom id="a80" elementType="C" x2="6.576100" y2="1.149800"/>
+   <atom id="a81" elementType="C" x2="2.364500" y2="2.198700"/>
+   <atom id="a82" elementType="C" x2="10.317800" y2="3.398700"/>
+   <atom id="a83" elementType="C" x2="10.612500" y2="2.000500"/>
+   <atom id="a84" elementType="C" x2="2.364500" y2="0.548700"/>
+   <atom id="a85" elementType="C" x2="5.464600" y2="6.797000"/>
+   <atom id="a86" elementType="C" x2="1.650000" y2="1.786200"/>
+   <atom id="a87" elementType="C" x2="1.650000" y2="0.961200"/>
+   <atom id="a88" elementType="C" x2="11.125100" y2="3.568800"/>
+   <atom id="a89" elementType="C" x2="11.419800" y2="2.170600"/>
+   <atom id="a90" elementType="C" x2="6.734900" y2="7.207300"/>
+   <atom id="a91" elementType="C" x2="11.676100" y2="2.954800"/>
+  </atomArray>
+  <bondArray>
+   <bond atomRefs2="a1 a37" order="2"/>
+   <bond atomRefs2="a2 a39" order="2"/>
+   <bond atomRefs2="a3 a43" order="2"/>
+   <bond atomRefs2="a4 a51" order="1"/>
+   <bond atomRefs2="a4 a54" order="1"/>
+   <bond atomRefs2="a5 a49" order="2"/>
+   <bond atomRefs2="a6 a52" order="2"/>
+   <bond atomRefs2="a7 a56" order="2"/>
+   <bond atomRefs2="a8 a64" order="2"/>
+   <bond atomRefs2="a9 a65" order="2"/>
+   <bond atomRefs2="a10 a68" order="2"/>
+   <bond atomRefs2="a11 a73" order="2"/>
+   <bond atomRefs2="a12 a74" order="2"/>
+   <bond atomRefs2="a13 a80" order="1"/>
+   <bond atomRefs2="a14 a91" order="1"/>
+   <bond atomRefs2="a15 a33" order="1"/>
+   <bond atomRefs2="a15 a36" order="1"/>
+   <bond atomRefs2="a15 a37" order="1"/>
+   <bond atomRefs2="a38 a16" order="1"/>
+   <bond atomRefs2="a16 a43" order="1"/>
+   <bond atomRefs2="a17 a19" order="1"/>
+   <bond atomRefs2="a17 a39" order="1"/>
+   <bond atomRefs2="a42 a18" order="1"/>
+   <bond atomRefs2="a18 a49" order="1"/>
+   <bond atomRefs2="a19 a52" order="1"/>
+   <bond atomRefs2="a20 a46" order="1"/>
+   <bond atomRefs2="a20 a57" order="2"/>
+   <bond atomRefs2="a50 a21" order="1"/>
+   <bond atomRefs2="a21 a56" order="1"/>
+   <bond atomRefs2="a22 a53" order="1"/>
+   <bond atomRefs2="a22 a65" order="1"/>
+   <bond atomRefs2="a59 a23" order="1"/>
+   <bond atomRefs2="a23 a64" order="1"/>
+   <bond atomRefs2="a24 a52" order="1"/>
+   <bond atomRefs2="a55 a25" order="1"/>
+   <bond atomRefs2="a25 a74" order="1"/>
+   <bond atomRefs2="a60 a26" order="1"/>
+   <bond atomRefs2="a26 a68" order="1"/>
+   <bond atomRefs2="a67 a27" order="1"/>
+   <bond atomRefs2="a27 a73" order="1"/>
+   <bond atomRefs2="a28 a57" order="1"/>
+   <bond atomRefs2="a29 a57" order="1"/>
+   <bond atomRefs2="a30 a78" order="1"/>
+   <bond atomRefs2="a30 a79" order="1"/>
+   <bond atomRefs2="a31 a77" order="1"/>
+   <bond atomRefs2="a31 a90" order="1"/>
+   <bond atomRefs2="a32 a85" order="1"/>
+   <bond atomRefs2="a32 a90" order="2"/>
+   <bond atomRefs2="a33 a34" order="1"/>
+   <bond atomRefs2="a33 a39" order="1"/>
+   <bond atomRefs2="a34 a35" order="1"/>
+   <bond atomRefs2="a35 a36" order="1"/>
+   <bond atomRefs2="a37 a38" order="1"/>
+   <bond atomRefs2="a38 a40" order="1"/>
+   <bond atomRefs2="a40 a41" order="1"/>
+   <bond atomRefs2="a41 a46" order="1"/>
+   <bond atomRefs2="a42 a43" order="1"/>
+   <bond atomRefs2="a42 a44" order="1"/>
+   <bond atomRefs2="a44 a45" order="1"/>
+   <bond atomRefs2="a45 a47" order="1"/>
+   <bond atomRefs2="a45 a48" order="1"/>
+   <bond atomRefs2="a49 a50" order="1"/>
+   <bond atomRefs2="a50 a51" order="1"/>
+   <bond atomRefs2="a53 a58" order="1"/>
+   <bond atomRefs2="a53 a64" order="1"/>
+   <bond atomRefs2="a54 a69" order="1"/>
+   <bond atomRefs2="a54 a70" order="1"/>
+   <bond atomRefs2="a54 a71" order="1"/>
+   <bond atomRefs2="a55 a56" order="1"/>
+   <bond atomRefs2="a55 a62" order="1"/>
+   <bond atomRefs2="a58 a61" order="1"/>
+   <bond atomRefs2="a59 a66" order="1"/>
+   <bond atomRefs2="a59 a68" order="1"/>
+   <bond atomRefs2="a60 a63" order="1"/>
+   <bond atomRefs2="a60 a73" order="1"/>
+   <bond atomRefs2="a61 a65" order="1"/>
+   <bond atomRefs2="a62 a75" order="1"/>
+   <bond atomRefs2="a63 a72" order="1"/>
+   <bond atomRefs2="a66 a77" order="1"/>
+   <bond atomRefs2="a67 a74" order="1"/>
+   <bond atomRefs2="a67 a80" order="1"/>
+   <bond atomRefs2="a72 a76" order="1"/>
+   <bond atomRefs2="a72 a78" order="2"/>
+   <bond atomRefs2="a75 a82" order="2"/>
+   <bond atomRefs2="a75 a83" order="1"/>
+   <bond atomRefs2="a76 a79" order="1"/>
+   <bond atomRefs2="a76 a81" order="2"/>
+   <bond atomRefs2="a77 a85" order="2"/>
+   <bond atomRefs2="a79 a84" order="2"/>
+   <bond atomRefs2="a81 a86" order="1"/>
+   <bond atomRefs2="a82 a88" order="1"/>
+   <bond atomRefs2="a83 a89" order="2"/>
+   <bond atomRefs2="a84 a87" order="1"/>
+   <bond atomRefs2="a86 a87" order="2"/>
+   <bond atomRefs2="a88 a91" order="2"/>
+   <bond atomRefs2="a89 a91" order="1"/>
+  </bondArray>
+ </molecule>
+ <molecule id="Desmopressin">
+  <atomArray>
+   <atom id="a1" elementType="N" x2="0.000000" y2="-7.864600"/>
+   <atom id="a2" elementType="C" x2="0.674100" y2="-7.460100"/>
+   <atom id="a3" elementType="C" x2="1.393200" y2="-7.864600"/>
+   <atom id="a4" elementType="N" x2="2.112200" y2="-7.460100"/>
+   <atom id="a5" elementType="C" x2="2.831300" y2="-7.864600"/>
+   <atom id="a6" elementType="C" x2="3.550300" y2="-7.460100">
+    <atomParity atomRefs4="a6 a12 a7 a5">1</atomParity>
+   </atom>
+   <atom id="a7" elementType="N" x2="4.269300" y2="-7.864600"/>
+   <atom id="a8" elementType="C" x2="4.943500" y2="-7.460100"/>
+   <atom id="a9" elementType="O" x2="5.662500" y2="-7.864600"/>
+   <atom id="a10" elementType="O" x2="0.674100" y2="-6.651200"/>
+   <atom id="a11" elementType="O" x2="2.831300" y2="-8.718400"/>
+   <atom id="a12" elementType="C" x2="3.550300" y2="-6.651200"/>
+   <atom id="a13" elementType="C" x2="2.831300" y2="-6.246700"/>
+   <atom id="a14" elementType="C" x2="2.831300" y2="-5.437800"/>
+   <atom id="a15" elementType="N" x2="2.112200" y2="-5.033300"/>
+   <atom id="a16" elementType="C" x2="4.943500" y2="-6.651200">
+    <atomParity atomRefs4="a16 a8 a17 a18">1</atomParity>
+   </atom>
+   <atom id="a17" elementType="C" x2="4.314300" y2="-6.156800"/>
+   <atom id="a18" elementType="N" x2="5.617600" y2="-6.156800"/>
+   <atom id="a19" elementType="C" x2="4.539000" y2="-5.392900"/>
+   <atom id="a20" elementType="C" x2="5.347900" y2="-5.392900"/>
+   <atom id="a21" elementType="C" x2="6.336600" y2="-6.561300"/>
+   <atom id="a22" elementType="C" x2="7.055700" y2="-6.156800">
+    <atomParity atomRefs4="a22 a21 a28 a23">1</atomParity>
+   </atom>
+   <atom id="a23" elementType="N" x2="7.774700" y2="-6.561300"/>
+   <atom id="a24" elementType="C" x2="8.448800" y2="-6.156800"/>
+   <atom id="a25" elementType="C" x2="9.167800" y2="-6.561300">
+    <atomParity atomRefs4="a25 a32 a26 a24">1</atomParity>
+   </atom>
+   <atom id="a26" elementType="N" x2="9.886900" y2="-6.156800"/>
+   <atom id="a27" elementType="O" x2="6.336600" y2="-7.415200"/>
+   <atom id="a28" elementType="C" x2="7.055700" y2="-5.347900"/>
+   <atom id="a29" elementType="S" x2="6.336600" y2="-4.943500"/>
+   <atom id="a30" elementType="S" x2="6.336600" y2="-4.134500"/>
+   <atom id="a31" elementType="O" x2="8.448800" y2="-5.347900"/>
+   <atom id="a32" elementType="C" x2="9.167800" y2="-7.415200"/>
+   <atom id="a33" elementType="C" x2="9.886900" y2="-7.819700"/>
+   <atom id="a34" elementType="O" x2="9.886900" y2="-8.628600"/>
+   <atom id="a35" elementType="N" x2="10.606000" y2="-7.415200"/>
+   <atom id="a36" elementType="C" x2="9.886900" y2="-5.347900"/>
+   <atom id="a37" elementType="C" x2="10.606000" y2="-4.943500">
+    <atomParity atomRefs4="a37 a39 a44 a36">1</atomParity>
+   </atom>
+   <atom id="a38" elementType="O" x2="9.167800" y2="-4.943500"/>
+   <atom id="a39" elementType="C" x2="11.325000" y2="-5.347900"/>
+   <atom id="a40" elementType="C" x2="12.044100" y2="-4.943500"/>
+   <atom id="a41" elementType="C" x2="12.763100" y2="-5.347900"/>
+   <atom id="a42" elementType="N" x2="13.482200" y2="-4.943500"/>
+   <atom id="a43" elementType="O" x2="12.763100" y2="-6.201800"/>
+   <atom id="a44" elementType="N" x2="10.606000" y2="-4.134500"/>
+   <atom id="a45" elementType="C" x2="11.325000" y2="-2.876200"/>
+   <atom id="a46" elementType="C" x2="11.325000" y2="-3.730000"/>
+   <atom id="a47" elementType="C" x2="10.606000" y2="-2.471700"/>
+   <atom id="a48" elementType="C" x2="9.886900" y2="-2.876200">
+    <atomParity atomRefs4="a48 a47 a61 a49">1</atomParity>
+   </atom>
+   <atom id="a49" elementType="C" x2="9.886900" y2="-3.730000"/>
+   <atom id="a50" elementType="C" x2="12.763100" y2="-2.876200"/>
+   <atom id="a51" elementType="C" x2="12.763100" y2="-3.730000"/>
+   <atom id="a52" elementType="C" x2="12.044100" y2="-2.471700"/>
+   <atom id="a53" elementType="C" x2="12.044100" y2="-4.134500"/>
+   <atom id="a54" elementType="N" x2="7.055700" y2="-2.876200"/>
+   <atom id="a55" elementType="C" x2="6.336600" y2="-2.471700"/>
+   <atom id="a56" elementType="C" x2="5.617600" y2="-2.876200"/>
+   <atom id="a57" elementType="C" x2="5.617600" y2="-3.730000"/>
+   <atom id="a58" elementType="C" x2="8.448800" y2="-2.876200"/>
+   <atom id="a59" elementType="O" x2="8.448800" y2="-3.730000"/>
+   <atom id="a60" elementType="C" x2="7.774700" y2="-2.471700">
+    <atomParity atomRefs4="a60 a63 a54 a58">1</atomParity>
+   </atom>
+   <atom id="a61" elementType="N" x2="9.167800" y2="-2.471700"/>
+   <atom id="a62" elementType="O" x2="9.167800" y2="-4.134500"/>
+   <atom id="a63" elementType="C" x2="7.774700" y2="-1.662800"/>
+   <atom id="a64" elementType="C" x2="9.167800" y2="-1.662800"/>
+   <atom id="a65" elementType="C" x2="8.448800" y2="-1.258300"/>
+   <atom id="a66" elementType="C" x2="9.886900" y2="-1.258300"/>
+   <atom id="a67" elementType="C" x2="9.886900" y2="-0.404500"/>
+   <atom id="a68" elementType="C" x2="8.448800" y2="-0.404500"/>
+   <atom id="a69" elementType="C" x2="9.167800" y2="0.000000"/>
+   <atom id="a70" elementType="O" x2="6.336600" y2="-1.662800"/>
+   <atom id="a71" elementType="O" x2="10.606000" y2="0.000000"/>
+   <atom id="a72" elementType="C" x2="1.393200" y2="-5.437800"/>
+   <atom id="a73" elementType="N" x2="1.393200" y2="-6.246700"/>
+   <atom id="a74" elementType="N" x2="0.674100" y2="-5.033300"/>
+  </atomArray>
+  <bondArray>
+   <bond atomRefs2="a1 a2" order="1"/>
+   <bond atomRefs2="a2 a3" order="1"/>
+   <bond atomRefs2="a2 a10" order="2"/>
+   <bond atomRefs2="a3 a4" order="1"/>
+   <bond atomRefs2="a4 a5" order="1"/>
+   <bond atomRefs2="a5 a6" order="1"/>
+   <bond atomRefs2="a5 a11" order="2"/>
+   <bond atomRefs2="a6 a7" order="1"/>
+   <bond atomRefs2="a6 a12" order="1"/>
+   <bond atomRefs2="a7 a8" order="1"/>
+   <bond atomRefs2="a8 a9" order="2"/>
+   <bond atomRefs2="a16 a8" order="1"/>
+   <bond atomRefs2="a12 a13" order="1"/>
+   <bond atomRefs2="a13 a14" order="1"/>
+   <bond atomRefs2="a14 a15" order="1"/>
+   <bond atomRefs2="a15 a72" order="1"/>
+   <bond atomRefs2="a16 a17" order="1"/>
+   <bond atomRefs2="a16 a18" order="1"/>
+   <bond atomRefs2="a17 a19" order="1"/>
+   <bond atomRefs2="a18 a20" order="1"/>
+   <bond atomRefs2="a18 a21" order="1"/>
+   <bond atomRefs2="a19 a20" order="1"/>
+   <bond atomRefs2="a22 a21" order="1"/>
+   <bond atomRefs2="a21 a27" order="2"/>
+   <bond atomRefs2="a22 a23" order="1"/>
+   <bond atomRefs2="a22 a28" order="1"/>
+   <bond atomRefs2="a23 a24" order="1"/>
+   <bond atomRefs2="a24 a25" order="1"/>
+   <bond atomRefs2="a24 a31" order="2"/>
+   <bond atomRefs2="a25 a26" order="1"/>
+   <bond atomRefs2="a25 a32" order="1"/>
+   <bond atomRefs2="a26 a36" order="1"/>
+   <bond atomRefs2="a28 a29" order="1"/>
+   <bond atomRefs2="a29 a30" order="1"/>
+   <bond atomRefs2="a30 a57" order="1"/>
+   <bond atomRefs2="a32 a33" order="1"/>
+   <bond atomRefs2="a33 a34" order="2"/>
+   <bond atomRefs2="a33 a35" order="1"/>
+   <bond atomRefs2="a36 a37" order="1"/>
+   <bond atomRefs2="a36 a38" order="2"/>
+   <bond atomRefs2="a37 a39" order="1"/>
+   <bond atomRefs2="a37 a44" order="1"/>
+   <bond atomRefs2="a39 a40" order="1"/>
+   <bond atomRefs2="a40 a41" order="1"/>
+   <bond atomRefs2="a41 a42" order="1"/>
+   <bond atomRefs2="a41 a43" order="2"/>
+   <bond atomRefs2="a44 a49" order="1"/>
+   <bond atomRefs2="a45 a47" order="1"/>
+   <bond atomRefs2="a45 a52" order="1"/>
+   <bond atomRefs2="a45 a46" order="2"/>
+   <bond atomRefs2="a46 a53" order="1"/>
+   <bond atomRefs2="a48 a47" order="1"/>
+   <bond atomRefs2="a48 a61" order="1"/>
+   <bond atomRefs2="a48 a49" order="1"/>
+   <bond atomRefs2="a49 a62" order="2"/>
+   <bond atomRefs2="a50 a51" order="1"/>
+   <bond atomRefs2="a50 a52" order="2"/>
+   <bond atomRefs2="a51 a53" order="2"/>
+   <bond atomRefs2="a54 a55" order="1"/>
+   <bond atomRefs2="a54 a60" order="1"/>
+   <bond atomRefs2="a55 a56" order="1"/>
+   <bond atomRefs2="a55 a70" order="2"/>
+   <bond atomRefs2="a56 a57" order="1"/>
+   <bond atomRefs2="a58 a59" order="2"/>
+   <bond atomRefs2="a58 a60" order="1"/>
+   <bond atomRefs2="a58 a61" order="1"/>
+   <bond atomRefs2="a60 a63" order="1"/>
+   <bond atomRefs2="a63 a65" order="1"/>
+   <bond atomRefs2="a64 a66" order="2"/>
+   <bond atomRefs2="a64 a65" order="1"/>
+   <bond atomRefs2="a65 a68" order="2"/>
+   <bond atomRefs2="a66 a67" order="1"/>
+   <bond atomRefs2="a67 a69" order="2"/>
+   <bond atomRefs2="a67 a71" order="1"/>
+   <bond atomRefs2="a68 a69" order="1"/>
+   <bond atomRefs2="a72 a73" order="2"/>
+   <bond atomRefs2="a72 a74" order="1"/>
+  </bondArray>
+ </molecule>
+</cml>
diff --git a/lib/galaxy/datatypes/test/drugbank_drugs.inchi b/lib/galaxy/datatypes/test/drugbank_drugs.inchi
new file mode 100644
index 0000000..4a9224d
--- /dev/null
+++ b/lib/galaxy/datatypes/test/drugbank_drugs.inchi
@@ -0,0 +1,2 @@
+InChI=1S/C59H84N18O14/c1-31(2)22-40(49(82)68-39(12-8-20-64-57(60)61)56(89)77-21-9-13-46(77)55(88)75-76-58(62)90)69-54(87)45(29-91-59(3,4)5)74-50(83)41(23-32-14-16-35(79)17-15-32)70-53(86)44(28-78)73-51(84)42(24-33-26-65-37-11-7-6-10-36(33)37)71-52(85)43(25-34-27-63-30-66-34)72-48(81)38-18-19-47(80)67-38/h6-7,10-11,14-17,26-27,30-31,38-46,65,78-79H,8-9,12-13,18-25,28-29H2,1-5H3,(H,63,66)(H,67,80)(H,68,82)(H,69,87)(H,70,86)(H,71,85)(H,72,81)(H,73,84)(H,74,83)(H,75,88)(H4,60,61,64)(H3,62,76 [...]
+InChI=1S/C46H64N14O12S2/c47-35(62)15-14-29-40(67)58-32(22-36(48)63)43(70)59-33(45(72)60-18-5-9-34(60)44(71)56-28(8-4-17-52-46(50)51)39(66)53-23-37(49)64)24-74-73-19-16-38(65)54-30(21-26-10-12-27(61)13-11-26)41(68)57-31(42(69)55-29)20-25-6-2-1-3-7-25/h1-3,6-7,10-13,28-34,61H,4-5,8-9,14-24H2,(H2,47,62)(H2,48,63)(H2,49,64)(H,53,66)(H,54,65)(H,55,69)(H,56,71)(H,57,68)(H,58,67)(H,59,70)(H4,50,51,52)/t28-,29-,30-,31-,32-,33-,34-/m0/s1
diff --git a/lib/galaxy/datatypes/test/drugbank_drugs.mol2 b/lib/galaxy/datatypes/test/drugbank_drugs.mol2
new file mode 100644
index 0000000..356242d
--- /dev/null
+++ b/lib/galaxy/datatypes/test/drugbank_drugs.mol2
@@ -0,0 +1,354 @@
+@<TRIPOS>MOLECULE
+Goserelin
+ 91 96 0 0 0
+SMALL
+GASTEIGER
+
+@<TRIPOS>ATOM
+      1 O          12.8548   -2.6382    0.0000 O.2     4  UNK4       -0.2730
+      2 O          13.9726   -2.5226    0.0000 O.2     4  UNK4       -0.2699
+      3 O          10.1766   -3.9327    0.0000 O.2     4  UNK4       -0.2715
+      4 O          11.2019   -0.7961    0.0000 O.3     4  UNK4       -0.3562
+      5 O           8.7800   -1.3064    0.0000 O.2     4  UNK4       -0.2714
+      6 O          16.8589   -3.2421    0.0000 O.2     4  UNK4       -0.2457
+      7 O          10.3562    1.2163    0.0000 O.2     4  UNK4       -0.2715
+      8 O           3.2702    4.8341    0.0000 O.2     1  UNK1       -0.2715
+      9 O           2.3500    8.2734    0.0000 O.2     1  UNK1       -0.2733
+     10 O           3.8213    4.2201    0.0000 O.2     2  HIS2       -0.2715
+     11 O           5.2178    1.5938    0.0000 O.2     3  TRP3       -0.2715
+     12 O           7.8960    2.8883    0.0000 O.2     4  UNK4       -0.2714
+     13 O           7.1271    0.5358    0.0000 O.3     4  UNK4       -0.2179
+     14 O          12.4834    3.1249    0.0000 O.3     4  UNK4       -0.2866
+     15 N          13.1495   -4.0364    0.0000 N.am    4  UNK4       -0.2715
+     16 N          11.2402   -2.9784    0.0000 N.am    4  UNK4       -0.1964
+     17 N          15.2089   -3.2393    0.0000 N.am    4  UNK4       -0.0850
+     18 N          10.1383   -1.7503    0.0000 N.am    4  UNK4       -0.1963
+     19 N          15.6226   -2.5255    0.0000 N.am    4  UNK4       -0.0678
+     20 N          12.3806   -6.3890    0.0000 N.pl3   4  UNK4       -0.0865
+     21 N           9.2926    0.2619    0.0000 N.am    4  UNK4       -0.1937
+     22 N           3.1485    7.0391    0.0000 N.am    1  UNK1       -0.1978
+     23 N           4.3338    5.7884    0.0000 N.am    2  HIS2       -0.1959
+     24 N          16.8613   -1.8132    0.0000 N.am    4  UNK4       -0.0665
+     25 N           8.1907    1.4900    0.0000 N.am    4  UNK4       -0.1959
+     26 N           5.1795    3.7761    0.0000 N.am    3  TRP3       -0.1960
+     27 N           6.2814    2.5480    0.0000 N.am    4  UNK4       -0.1936
+     28 N          11.3170   -7.3433    0.0000 N.pl3   4  UNK4        0.1354
+     29 N          12.6753   -7.7873    0.0000 N.pl3   4  UNK4        0.1354
+     30 NE1         3.8596    0.7098    0.0000 N.ar    3  TRP3       -0.2442
+     31 ND1         6.7335    6.3823    0.0000 N.ar    2  HIS2       -0.2267
+     32 NE2         5.9507    7.4636    0.0000 N.ar    2  HIS2       -0.2212
+     33 C          13.9701   -3.9516    0.0000 C.3     4  UNK4        0.1552
+     34 C          14.3043   -4.7058    0.0000 C.3     4  UNK4        0.0311
+     35 C          13.6903   -5.2569    0.0000 C.3     4  UNK4        0.0237
+     36 C          12.9766   -4.8431    0.0000 C.3     4  UNK4        0.0939
+     37 C          12.5984   -3.4224    0.0000 C.2     4  UNK4        0.2458
+     38 C          11.7912   -3.5925    0.0000 C.3     4  UNK4        0.1714
+     39 C          14.3838   -3.2378    0.0000 C.2     4  UNK4        0.2788
+     40 C          11.5349   -4.3767    0.0000 C.3     4  UNK4        0.0347
+     41 C          12.0859   -4.9907    0.0000 C.3     4  UNK4        0.0492
+     42 C           9.8819   -2.5345    0.0000 C.3     4  UNK4        0.1728
+     43 C          10.4330   -3.1486    0.0000 C.2     4  UNK4        0.2616
+     44 C           9.0747   -2.7046    0.0000 C.3     4  UNK4        0.0311
+     45 C           8.8184   -3.4889    0.0000 C.3     4  UNK4        0.0022
+     46 C          11.8295   -5.7750    0.0000 C.3     4  UNK4        0.2205
+     47 C           8.0112   -3.6590    0.0000 C.3     4  UNK4        0.0001
+     48 C           9.3694   -4.1029    0.0000 C.3     4  UNK4        0.0001
+     49 C           9.5873   -1.1363    0.0000 C.2     4  UNK4        0.2642
+     50 C           9.8436   -0.3521    0.0000 C.3     4  UNK4        0.2021
+     51 C          10.6509   -0.1820    0.0000 C.3     4  UNK4        0.1729
+     52 C          16.4476   -2.5269    0.0000 C.2     4  UNK4        0.3786
+     53 C           2.9756    6.2324    0.0000 C.3     1  UNK1        0.1732
+     54 C          12.0091   -0.6259    0.0000 C.3     4  UNK4        0.0931
+     55 C           8.9979    1.6602    0.0000 C.3     4  UNK4        0.1771
+     56 C           9.5489    1.0461    0.0000 C.2     4  UNK4        0.2620
+     57 C          12.1242   -7.1732    0.0000 C.cat   4  UNK4        0.5346
+     58 C           2.1549    6.1475    0.0000 C.3     1  UNK1        0.0407
+     59 CA          4.8848    5.1744    0.0000 C.3     2  HIS2        0.1787
+     60 CA          4.9232    2.9920    0.0000 C.3     3  TRP3        0.1771
+     61 C           1.8207    6.9019    0.0000 C.3     1  UNK1        0.0891
+     62 C           9.2542    2.4444    0.0000 C.3     4  UNK4        0.0574
+     63 CB          4.1159    2.8219    0.0000 C.3     3  TRP3        0.0590
+     64 C           3.5266    5.6183    0.0000 C.2     1  UNK1        0.2616
+     65 C           2.4348    7.4528    0.0000 C.2     1  UNK1        0.2418
+     66 CB          5.6921    5.3445    0.0000 C.3     2  HIS2        0.0785
+     67 C           6.8324    1.9340    0.0000 C.3     4  UNK4        0.2055
+     68 C           4.6285    4.3902    0.0000 C.2     2  HIS2        0.2620
+     69 C          12.1793   -1.4332    0.0000 C.3     4  UNK4        0.0296
+     70 C          12.8164   -0.4557    0.0000 C.3     4  UNK4        0.0296
+     71 C          11.8389    0.1814    0.0000 C.3     4  UNK4        0.0296
+     72 CG          3.8596    2.0376    0.0000 C.ar    3  TRP3        0.0006
+     73 C           5.4742    2.3779    0.0000 C.2     3  TRP3        0.2620
+     74 C           7.6397    2.1041    0.0000 C.2     4  UNK4        0.2643
+     75 C          10.0615    2.6145    0.0000 C.ar    4  UNK4       -0.0198
+     76 CD2         3.0790    1.7862    0.0000 C.ar    3  TRP3        0.0152
+     77 CG          5.9484    6.1287    0.0000 C.ar    2  HIS2        0.0821
+     78 CD1         4.3411    1.3737    0.0000 C.ar    3  TRP3        0.0946
+     79 CE2         3.0790    0.9612    0.0000 C.ar    3  TRP3        0.0810
+     80 C           6.5761    1.1498    0.0000 C.3     4  UNK4        0.2130
+     81 CE3         2.3645    2.1987    0.0000 C.ar    3  TRP3        0.0012
+     82 C          10.3178    3.3987    0.0000 C.ar    4  UNK4       -0.0009
+     83 C          10.6125    2.0005    0.0000 C.ar    4  UNK4       -0.0009
+     84 CZ2         2.3645    0.5487    0.0000 C.ar    3  TRP3        0.0191
+     85 CD2         5.4646    6.7970    0.0000 C.ar    2  HIS2        0.1154
+     86 CZ3         1.6500    1.7862    0.0000 C.ar    3  TRP3        0.0001
+     87 CH2         1.6500    0.9612    0.0000 C.ar    3  TRP3        0.0015
+     88 C          11.1251    3.5688    0.0000 C.ar    4  UNK4        0.0417
+     89 C          11.4198    2.1706    0.0000 C.ar    4  UNK4        0.0417
+     90 CE1         6.7349    7.2073    0.0000 C.ar    2  HIS2        0.1986
+     91 C          11.6761    2.9548    0.0000 C.ar    4  UNK4        0.1957
+@<TRIPOS>BOND
+     1     1    37    2
+     2     2    39    2
+     3     3    43    2
+     4     4    51    1
+     5     4    54    1
+     6     5    49    2
+     7     6    52    2
+     8     7    56    2
+     9     8    64    2
+    10     9    65    2
+    11    10    68    2
+    12    11    73    2
+    13    12    74    2
+    14    13    80    1
+    15    14    91    1
+    16    15    33    1
+    17    15    36    1
+    18    15    37   am
+    19    38    16    1
+    20    16    43   am
+    21    17    19    1
+    22    17    39   am
+    23    42    18    1
+    24    18    49   am
+    25    19    52   am
+    26    20    46    1
+    27    20    57    2
+    28    50    21    1
+    29    21    56   am
+    30    22    53    1
+    31    22    65   am
+    32    59    23    1
+    33    23    64   am
+    34    24    52   am
+    35    55    25    1
+    36    25    74   am
+    37    60    26    1
+    38    26    68   am
+    39    67    27    1
+    40    27    73   am
+    41    28    57    1
+    42    29    57    1
+    43    30    78   ar
+    44    30    79   ar
+    45    31    77   ar
+    46    31    90   ar
+    47    32    85   ar
+    48    32    90   ar
+    49    33    34    1
+    50    33    39    1
+    51    34    35    1
+    52    35    36    1
+    53    37    38    1
+    54    38    40    1
+    55    40    41    1
+    56    41    46    1
+    57    42    43    1
+    58    42    44    1
+    59    44    45    1
+    60    45    47    1
+    61    45    48    1
+    62    49    50    1
+    63    50    51    1
+    64    53    58    1
+    65    53    64    1
+    66    54    69    1
+    67    54    70    1
+    68    54    71    1
+    69    55    56    1
+    70    55    62    1
+    71    58    61    1
+    72    59    66    1
+    73    59    68    1
+    74    60    63    1
+    75    60    73    1
+    76    61    65    1
+    77    62    75    1
+    78    63    72    1
+    79    66    77    1
+    80    67    74    1
+    81    67    80    1
+    82    72    76   ar
+    83    72    78   ar
+    84    75    82   ar
+    85    75    83   ar
+    86    76    79   ar
+    87    76    81   ar
+    88    77    85   ar
+    89    79    84   ar
+    90    81    86   ar
+    91    82    88   ar
+    92    83    89   ar
+    93    84    87   ar
+    94    86    87   ar
+    95    88    91   ar
+    96    89    91   ar
+@<TRIPOS>MOLECULE
+Desmopressin
+ 74 77 0 0 0
+SMALL
+GASTEIGER
+
+@<TRIPOS>ATOM
+      1 N           0.0000   -7.8646    0.0000 N.am    1  LIG1       -0.0862
+      2 C           0.6741   -7.4601    0.0000 C.2     1  LIG1        0.2828
+      3 C           1.3932   -7.8646    0.0000 C.3     1  LIG1        0.2031
+      4 N           2.1122   -7.4601    0.0000 N.am    1  LIG1       -0.1939
+      5 C           2.8313   -7.8646    0.0000 C.2     1  LIG1        0.2617
+      6 C           3.5503   -7.4601    0.0000 C.3     1  LIG1        0.1729
+      7 N           4.2693   -7.8646    0.0000 N.am    1  LIG1       -0.1964
+      8 C           4.9435   -7.4601    0.0000 C.2     1  LIG1        0.2598
+      9 O           5.6625   -7.8646    0.0000 O.2     1  LIG1       -0.2715
+     10 O           0.6741   -6.6512    0.0000 O.2     1  LIG1       -0.2697
+     11 O           2.8313   -8.7184    0.0000 O.2     1  LIG1       -0.2715
+     12 C           3.5503   -6.6512    0.0000 C.3     1  LIG1        0.0348
+     13 C           2.8313   -6.2467    0.0000 C.3     1  LIG1        0.0492
+     14 C           2.8313   -5.4378    0.0000 C.3     1  LIG1        0.2205
+     15 N           2.1122   -5.0333    0.0000 N.pl3   1  LIG1       -0.0865
+     16 C           4.9435   -6.6512    0.0000 C.3     1  LIG1        0.1536
+     17 C           4.3143   -6.1568    0.0000 C.3     1  LIG1        0.0310
+     18 N           5.6176   -6.1568    0.0000 N.am    1  LIG1       -0.2715
+     19 C           4.5390   -5.3929    0.0000 C.3     1  LIG1        0.0237
+     20 C           5.3479   -5.3929    0.0000 C.3     1  LIG1        0.0939
+     21 C           6.3366   -6.5613    0.0000 C.2     1  LIG1        0.2467
+     22 C           7.0557   -6.1568    0.0000 C.3     1  LIG1        0.1828
+     23 N           7.7747   -6.5613    0.0000 N.am    1  LIG1       -0.1954
+     24 C           8.4488   -6.1568    0.0000 C.2     1  LIG1        0.2621
+     25 C           9.1678   -6.5613    0.0000 C.3     1  LIG1        0.1819
+     26 N           9.8869   -6.1568    0.0000 N.am    1  LIG1       -0.1958
+     27 O           6.3366   -7.4152    0.0000 O.2     1  LIG1       -0.2730
+     28 C           7.0557   -5.3479    0.0000 C.3     1  LIG1        0.0996
+     29 S           6.3366   -4.9435    0.0000 S.3     1  LIG1       -0.0798
+     30 S           6.3366   -4.1345    0.0000 S.3     1  LIG1       -0.0816
+     31 O           8.4488   -5.3479    0.0000 O.2     1  LIG1       -0.2715
+     32 C           9.1678   -7.4152    0.0000 C.3     1  LIG1        0.1195
+     33 C           9.8869   -7.8197    0.0000 C.2     1  LIG1        0.2630
+     34 O           9.8869   -8.6286    0.0000 O.2     1  LIG1       -0.2716
+     35 N          10.6060   -7.4152    0.0000 N.am    1  LIG1       -0.0877
+     36 C           9.8869   -5.3479    0.0000 C.2     1  LIG1        0.2616
+     37 C          10.6060   -4.9435    0.0000 C.3     1  LIG1        0.1733
+     38 O           9.1678   -4.9435    0.0000 O.2     1  LIG1       -0.2715
+     39 C          11.3250   -5.3479    0.0000 C.3     1  LIG1        0.0408
+     40 C          12.0441   -4.9435    0.0000 C.3     1  LIG1        0.0908
+     41 C          12.7631   -5.3479    0.0000 C.2     1  LIG1        0.2608
+     42 N          13.4822   -4.9435    0.0000 N.am    1  LIG1       -0.0878
+     43 O          12.7631   -6.2018    0.0000 O.2     1  LIG1       -0.2717
+     44 N          10.6060   -4.1345    0.0000 N.am    1  LIG1       -0.1963
+     45 C          11.3250   -2.8762    0.0000 C.ar    1  LIG1       -0.0200
+     46 C          11.3250   -3.7300    0.0000 C.ar    1  LIG1       -0.0042
+     47 C          10.6060   -2.4717    0.0000 C.3     1  LIG1        0.0574
+     48 C           9.8869   -2.8762    0.0000 C.3     1  LIG1        0.1771
+     49 C           9.8869   -3.7300    0.0000 C.2     1  LIG1        0.2619
+     50 C          12.7631   -2.8762    0.0000 C.ar    1  LIG1       -0.0003
+     51 C          12.7631   -3.7300    0.0000 C.ar    1  LIG1       -0.0000
+     52 C          12.0441   -2.4717    0.0000 C.ar    1  LIG1       -0.0042
+     53 C          12.0441   -4.1345    0.0000 C.ar    1  LIG1       -0.0003
+     54 N           7.0557   -2.8762    0.0000 N.am    1  LIG1       -0.1974
+     55 C           6.3366   -2.4717    0.0000 C.2     1  LIG1        0.2427
+     56 C           5.6176   -2.8762    0.0000 C.3     1  LIG1        0.0993
+     57 C           5.6176   -3.7300    0.0000 C.3     1  LIG1        0.0783
+     58 C           8.4488   -2.8762    0.0000 C.2     1  LIG1        0.2620
+     59 O           8.4488   -3.7300    0.0000 O.2     1  LIG1       -0.2715
+     60 C           7.7747   -2.4717    0.0000 C.3     1  LIG1        0.1770
+     61 N           9.1678   -2.4717    0.0000 N.am    1  LIG1       -0.1960
+     62 O           9.1678   -4.1345    0.0000 O.2     1  LIG1       -0.2715
+     63 C           7.7747   -1.6628    0.0000 C.3     1  LIG1        0.0574
+     64 C           9.1678   -1.6628    0.0000 C.ar    1  LIG1       -0.0009
+     65 C           8.4488   -1.2583    0.0000 C.ar    1  LIG1       -0.0198
+     66 C           9.8869   -1.2583    0.0000 C.ar    1  LIG1        0.0417
+     67 C           9.8869   -0.4045    0.0000 C.ar    1  LIG1        0.1957
+     68 C           8.4488   -0.4045    0.0000 C.ar    1  LIG1       -0.0009
+     69 C           9.1678    0.0000    0.0000 C.ar    1  LIG1        0.0417
+     70 O           6.3366   -1.6628    0.0000 O.2     1  LIG1       -0.2733
+     71 O          10.6060    0.0000    0.0000 O.3     1  LIG1       -0.2866
+     72 C           1.3932   -5.4378    0.0000 C.cat   1  LIG1        0.5346
+     73 N           1.3932   -6.2467    0.0000 N.pl3   1  LIG1        0.1354
+     74 N           0.6741   -5.0333    0.0000 N.pl3   1  LIG1        0.1354
+@<TRIPOS>BOND
+     1     1     2   am
+     2     2     3    1
+     3     2    10    2
+     4     3     4    1
+     5     4     5   am
+     6     5     6    1
+     7     5    11    2
+     8     6     7    1
+     9     6    12    1
+    10     7     8   am
+    11     8     9    2
+    12    16     8    1
+    13    12    13    1
+    14    13    14    1
+    15    14    15    1
+    16    15    72    1
+    17    16    17    1
+    18    16    18    1
+    19    17    19    1
+    20    18    20    1
+    21    18    21   am
+    22    19    20    1
+    23    22    21    1
+    24    21    27    2
+    25    22    23    1
+    26    22    28    1
+    27    23    24   am
+    28    24    25    1
+    29    24    31    2
+    30    25    26    1
+    31    25    32    1
+    32    26    36   am
+    33    28    29    1
+    34    29    30    1
+    35    30    57    1
+    36    32    33    1
+    37    33    34    2
+    38    33    35   am
+    39    36    37    1
+    40    36    38    2
+    41    37    39    1
+    42    37    44    1
+    43    39    40    1
+    44    40    41    1
+    45    41    42   am
+    46    41    43    2
+    47    44    49   am
+    48    45    47    1
+    49    45    52   ar
+    50    45    46   ar
+    51    46    53   ar
+    52    48    47    1
+    53    48    61    1
+    54    48    49    1
+    55    49    62    2
+    56    50    51   ar
+    57    50    52   ar
+    58    51    53   ar
+    59    54    55   am
+    60    54    60    1
+    61    55    56    1
+    62    55    70    2
+    63    56    57    1
+    64    58    59    2
+    65    58    60    1
+    66    58    61   am
+    67    60    63    1
+    68    63    65    1
+    69    64    66   ar
+    70    64    65   ar
+    71    65    68   ar
+    72    66    67   ar
+    73    67    69   ar
+    74    67    71    1
+    75    68    69   ar
+    76    72    73    2
+    77    72    74    1
diff --git a/lib/galaxy/datatypes/test/drugbank_drugs.sdf b/lib/galaxy/datatypes/test/drugbank_drugs.sdf
new file mode 100644
index 0000000..b03703e
--- /dev/null
+++ b/lib/galaxy/datatypes/test/drugbank_drugs.sdf
@@ -0,0 +1,491 @@
+Goserelin
+  Mrv0541 04221219462D          
+
+ 91 96  0  0  1  0            999 V2000
+   12.8548   -2.6382    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   13.9726   -2.5226    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   10.1766   -3.9327    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   11.2019   -0.7961    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    8.7800   -1.3064    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   16.8589   -3.2421    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   10.3562    1.2163    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    3.2702    4.8341    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    2.3500    8.2734    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    3.8213    4.2201    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    5.2178    1.5938    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    7.8960    2.8883    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    7.1271    0.5358    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   12.4834    3.1249    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   13.1495   -4.0364    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   11.2402   -2.9784    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   15.2089   -3.2393    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   10.1383   -1.7503    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   15.6226   -2.5255    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   12.3806   -6.3890    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    9.2926    0.2619    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    3.1485    7.0391    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    4.3338    5.7884    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   16.8613   -1.8132    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    8.1907    1.4900    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    5.1795    3.7761    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    6.2814    2.5480    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   11.3170   -7.3433    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   12.6753   -7.7873    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    3.8596    0.7098    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    6.7335    6.3823    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    5.9507    7.4636    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   13.9701   -3.9516    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+   14.3043   -4.7058    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   13.6903   -5.2569    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.9766   -4.8431    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.5984   -3.4224    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.7912   -3.5925    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+   14.3838   -3.2378    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.5349   -4.3767    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.0859   -4.9907    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8819   -2.5345    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+   10.4330   -3.1486    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.0747   -2.7046    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.8184   -3.4889    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.8295   -5.7750    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.0112   -3.6590    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.3694   -4.1029    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.5873   -1.1363    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8436   -0.3521    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+   10.6509   -0.1820    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   16.4476   -2.5269    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.9756    6.2324    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+   12.0091   -0.6259    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.9979    1.6602    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+    9.5489    1.0461    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.1242   -7.1732    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.1549    6.1475    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    4.8848    5.1744    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+    4.9232    2.9920    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+    1.8207    6.9019    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.2542    2.4444    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    4.1159    2.8219    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    3.5266    5.6183    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.4348    7.4528    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.6921    5.3445    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    6.8324    1.9340    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+    4.6285    4.3902    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.1793   -1.4332    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.8164   -0.4557    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.8389    0.1814    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    3.8596    2.0376    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.4742    2.3779    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    7.6397    2.1041    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   10.0615    2.6145    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    3.0790    1.7862    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.9484    6.1287    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    4.3411    1.3737    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    3.0790    0.9612    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    6.5761    1.1498    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.3645    2.1987    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   10.3178    3.3987    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   10.6125    2.0005    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.3645    0.5487    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.4646    6.7970    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    1.6500    1.7862    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    1.6500    0.9612    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.1251    3.5688    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.4198    2.1706    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    6.7349    7.2073    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.6761    2.9548    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+  1 37  2  0  0  0  0
+  2 39  2  0  0  0  0
+  3 43  2  0  0  0  0
+  4 51  1  0  0  0  0
+  4 54  1  0  0  0  0
+  5 49  2  0  0  0  0
+  6 52  2  0  0  0  0
+  7 56  2  0  0  0  0
+  8 64  2  0  0  0  0
+  9 65  2  0  0  0  0
+ 10 68  2  0  0  0  0
+ 11 73  2  0  0  0  0
+ 12 74  2  0  0  0  0
+ 13 80  1  0  0  0  0
+ 14 91  1  0  0  0  0
+ 15 33  1  0  0  0  0
+ 15 36  1  0  0  0  0
+ 15 37  1  0  0  0  0
+ 38 16  1  6  0  0  0
+ 16 43  1  0  0  0  0
+ 17 19  1  0  0  0  0
+ 17 39  1  0  0  0  0
+ 42 18  1  6  0  0  0
+ 18 49  1  0  0  0  0
+ 19 52  1  0  0  0  0
+ 20 46  1  0  0  0  0
+ 20 57  2  0  0  0  0
+ 50 21  1  6  0  0  0
+ 21 56  1  0  0  0  0
+ 22 53  1  0  0  0  0
+ 22 65  1  0  0  0  0
+ 59 23  1  1  0  0  0
+ 23 64  1  0  0  0  0
+ 24 52  1  0  0  0  0
+ 55 25  1  1  0  0  0
+ 25 74  1  0  0  0  0
+ 60 26  1  6  0  0  0
+ 26 68  1  0  0  0  0
+ 67 27  1  6  0  0  0
+ 27 73  1  0  0  0  0
+ 28 57  1  0  0  0  0
+ 29 57  1  0  0  0  0
+ 30 78  1  0  0  0  0
+ 30 79  1  0  0  0  0
+ 31 77  1  0  0  0  0
+ 31 90  1  0  0  0  0
+ 32 85  1  0  0  0  0
+ 32 90  2  0  0  0  0
+ 33 34  1  0  0  0  0
+ 33 39  1  6  0  0  0
+ 34 35  1  0  0  0  0
+ 35 36  1  0  0  0  0
+ 37 38  1  0  0  0  0
+ 38 40  1  0  0  0  0
+ 40 41  1  0  0  0  0
+ 41 46  1  0  0  0  0
+ 42 43  1  0  0  0  0
+ 42 44  1  0  0  0  0
+ 44 45  1  0  0  0  0
+ 45 47  1  0  0  0  0
+ 45 48  1  0  0  0  0
+ 49 50  1  0  0  0  0
+ 50 51  1  0  0  0  0
+ 53 58  1  0  0  0  0
+ 53 64  1  6  0  0  0
+ 54 69  1  0  0  0  0
+ 54 70  1  0  0  0  0
+ 54 71  1  0  0  0  0
+ 55 56  1  0  0  0  0
+ 55 62  1  0  0  0  0
+ 58 61  1  0  0  0  0
+ 59 66  1  0  0  0  0
+ 59 68  1  0  0  0  0
+ 60 63  1  0  0  0  0
+ 60 73  1  0  0  0  0
+ 61 65  1  0  0  0  0
+ 62 75  1  0  0  0  0
+ 63 72  1  0  0  0  0
+ 66 77  1  0  0  0  0
+ 67 74  1  0  0  0  0
+ 67 80  1  0  0  0  0
+ 72 76  1  0  0  0  0
+ 72 78  2  0  0  0  0
+ 75 82  2  0  0  0  0
+ 75 83  1  0  0  0  0
+ 76 79  1  0  0  0  0
+ 76 81  2  0  0  0  0
+ 77 85  2  0  0  0  0
+ 79 84  2  0  0  0  0
+ 81 86  1  0  0  0  0
+ 82 88  1  0  0  0  0
+ 83 89  2  0  0  0  0
+ 84 87  1  0  0  0  0
+ 86 87  2  0  0  0  0
+ 88 91  2  0  0  0  0
+ 89 91  1  0  0  0  0
+M  END
+> <DRUGBANK_ID>
+DB00014
+
+> <DRUG_GROUPS>
+approved
+
+> <GENERIC_NAME>
+Goserelin
+
+> <SALTS>
+Goserelin acetate
+
+> <BRANDS>
+Zoladex
+
+> <CHEMICAL_FORMULA>
+C59H84N18O14
+
+> <MOLECULAR_WEIGHT>
+1269.4105
+
+> <EXACT_MASS>
+1268.641439486
+
+> <IUPAC_NAME>
+(2S)-1-[(2S)-2-[(2S)-2-[(2R)-3-(tert-butoxy)-2-[(2S)-2-[(2S)-3-hydroxy-2-[(2S)-2-[(2S)-3-(1H-imidazol-5-yl)-2-{[(2S)-5-oxopyrrolidin-2-yl]formamido}propanamido]-3-(1H-indol-3-yl)propanamido]propanamido]-3-(4-hydroxyphenyl)propanamido]propanamido]-4-methylpentanamido]-5-[(diaminomethylidene)amino]pentanoyl]-N-(carbamoylamino)pyrrolidine-2-carboxamide
+
+> <INCHI_IDENTIFIER>
+InChI=1S/C59H84N18O14/c1-31(2)22-40(49(82)68-39(12-8-20-64-57(60)61)56(89)77-21-9-13-46(77)55(88)75-76-58(62)90)69-54(87)45(29-91-59(3,4)5)74-50(83)41(23-32-14-16-35(79)17-15-32)70-53(86)44(28-78)73-51(84)42(24-33-26-65-37-11-7-6-10-36(33)37)71-52(85)43(25-34-27-63-30-66-34)72-48(81)38-18-19-47(80)67-38/h6-7,10-11,14-17,26-27,30-31,38-46,65,78-79H,8-9,12-13,18-25,28-29H2,1-5H3,(H,63,66)(H,67,80)(H,68,82)(H,69,87)(H,70,86)(H,71,85)(H,72,81)(H,73,84)(H,74,83)(H,75,88)(H4,60,61,64)(H3,62,76 [...]
+
+> <INCHI_KEY>
+InChIKey=BLCLNMBMMGCOAS-URPVMXJPSA-N
+
+> <SMILES>
+CC(C)C[C at H](NC(=O)[C@@H](COC(C)(C)C)NC(=O)[C at H](CC1=CC=C(O)C=C1)NC(=O)[C at H](CO)NC(=O)[C at H](CC1=CNC2=CC=CC=C12)NC(=O)[C at H](CC1=CN=CN1)NC(=O)[C@@H]1CCC(=O)N1)C(=O)N[C@@H](CCCN=C(N)N)C(=O)N1CCC[C at H]1C(=O)NNC(N)=O
+
+> <JCHEM_ACCEPTOR_COUNT>
+18
+
+> <JCHEM_DONOR_COUNT>
+17
+
+> <JCHEM_ACIDIC_PKA>
+9.82
+
+> <ALOGPS_LOGP>
+0.3
+
+> <JCHEM_LOGP>
+-5.2
+
+> <ALOGPS_LOGS>
+-4.7
+
+> <JCHEM_POLARIZABILITY>
+131.22
+
+> <JCHEM_POLAR_SURFACE_AREA>
+495.89
+
+> <JCHEM_REFRACTIVITY>
+325.84
+
+> <JCHEM_ROTATABLE_BOND_COUNT>
+33
+
+> <ALOGPS_SOLUBILITY>
+2.83e-02 g/l
+
+$$$$
+Desmopressin
+  Mrv0541 04221221522D          
+
+ 74 77  0  0  1  0            999 V2000
+    0.0000   -7.8646    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    0.6741   -7.4601    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    1.3932   -7.8646    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.1122   -7.4601    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    2.8313   -7.8646    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    3.5503   -7.4601    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+    4.2693   -7.8646    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    4.9435   -7.4601    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.6625   -7.8646    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    0.6741   -6.6512    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    2.8313   -8.7184    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    3.5503   -6.6512    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.8313   -6.2467    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.8313   -5.4378    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    2.1122   -5.0333    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    4.9435   -6.6512    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+    4.3143   -6.1568    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.6176   -6.1568    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    4.5390   -5.3929    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.3479   -5.3929    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    6.3366   -6.5613    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    7.0557   -6.1568    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+    7.7747   -6.5613    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    8.4488   -6.1568    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.1678   -6.5613    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+    9.8869   -6.1568    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    6.3366   -7.4152    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    7.0557   -5.3479    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    6.3366   -4.9435    0.0000 S   0  0  0  0  0  0  0  0  0  0  0  0
+    6.3366   -4.1345    0.0000 S   0  0  0  0  0  0  0  0  0  0  0  0
+    8.4488   -5.3479    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    9.1678   -7.4152    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8869   -7.8197    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8869   -8.6286    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   10.6060   -7.4152    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8869   -5.3479    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   10.6060   -4.9435    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+    9.1678   -4.9435    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   11.3250   -5.3479    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.0441   -4.9435    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.7631   -5.3479    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   13.4822   -4.9435    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   12.7631   -6.2018    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   10.6060   -4.1345    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+   11.3250   -2.8762    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   11.3250   -3.7300    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   10.6060   -2.4717    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8869   -2.8762    0.0000 C   0  0  1  0  0  0  0  0  0  0  0  0
+    9.8869   -3.7300    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.7631   -2.8762    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.7631   -3.7300    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.0441   -2.4717    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+   12.0441   -4.1345    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    7.0557   -2.8762    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    6.3366   -2.4717    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.6176   -2.8762    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    5.6176   -3.7300    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.4488   -2.8762    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.4488   -3.7300    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    7.7747   -2.4717    0.0000 C   0  0  2  0  0  0  0  0  0  0  0  0
+    9.1678   -2.4717    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    9.1678   -4.1345    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    7.7747   -1.6628    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.1678   -1.6628    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.4488   -1.2583    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8869   -1.2583    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.8869   -0.4045    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    8.4488   -0.4045    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    9.1678    0.0000    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    6.3366   -1.6628    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+   10.6060    0.0000    0.0000 O   0  0  0  0  0  0  0  0  0  0  0  0
+    1.3932   -5.4378    0.0000 C   0  0  0  0  0  0  0  0  0  0  0  0
+    1.3932   -6.2467    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+    0.6741   -5.0333    0.0000 N   0  0  0  0  0  0  0  0  0  0  0  0
+  1  2  1  0  0  0  0
+  2  3  1  0  0  0  0
+  2 10  2  0  0  0  0
+  3  4  1  0  0  0  0
+  4  5  1  0  0  0  0
+  5  6  1  0  0  0  0
+  5 11  2  0  0  0  0
+  6  7  1  0  0  0  0
+  6 12  1  6  0  0  0
+  7  8  1  0  0  0  0
+  8  9  2  0  0  0  0
+ 16  8  1  6  0  0  0
+ 12 13  1  0  0  0  0
+ 13 14  1  0  0  0  0
+ 14 15  1  0  0  0  0
+ 15 72  1  0  0  0  0
+ 16 17  1  0  0  0  0
+ 16 18  1  0  0  0  0
+ 17 19  1  0  0  0  0
+ 18 20  1  0  0  0  0
+ 18 21  1  0  0  0  0
+ 19 20  1  0  0  0  0
+ 22 21  1  6  0  0  0
+ 21 27  2  0  0  0  0
+ 22 23  1  0  0  0  0
+ 22 28  1  0  0  0  0
+ 23 24  1  0  0  0  0
+ 24 25  1  0  0  0  0
+ 24 31  2  0  0  0  0
+ 25 26  1  0  0  0  0
+ 25 32  1  1  0  0  0
+ 26 36  1  0  0  0  0
+ 28 29  1  0  0  0  0
+ 29 30  1  0  0  0  0
+ 30 57  1  0  0  0  0
+ 32 33  1  0  0  0  0
+ 33 34  2  0  0  0  0
+ 33 35  1  0  0  0  0
+ 36 37  1  0  0  0  0
+ 36 38  2  0  0  0  0
+ 37 39  1  1  0  0  0
+ 37 44  1  0  0  0  0
+ 39 40  1  0  0  0  0
+ 40 41  1  0  0  0  0
+ 41 42  1  0  0  0  0
+ 41 43  2  0  0  0  0
+ 44 49  1  0  0  0  0
+ 45 47  1  0  0  0  0
+ 45 52  1  0  0  0  0
+ 45 46  2  0  0  0  0
+ 46 53  1  0  0  0  0
+ 48 47  1  1  0  0  0
+ 48 61  1  0  0  0  0
+ 48 49  1  0  0  0  0
+ 49 62  2  0  0  0  0
+ 50 51  1  0  0  0  0
+ 50 52  2  0  0  0  0
+ 51 53  2  0  0  0  0
+ 54 55  1  0  0  0  0
+ 54 60  1  0  0  0  0
+ 55 56  1  0  0  0  0
+ 55 70  2  0  0  0  0
+ 56 57  1  0  0  0  0
+ 58 59  2  0  0  0  0
+ 58 60  1  0  0  0  0
+ 58 61  1  0  0  0  0
+ 60 63  1  1  0  0  0
+ 63 65  1  0  0  0  0
+ 64 66  2  0  0  0  0
+ 64 65  1  0  0  0  0
+ 65 68  2  0  0  0  0
+ 66 67  1  0  0  0  0
+ 67 69  2  0  0  0  0
+ 67 71  1  0  0  0  0
+ 68 69  1  0  0  0  0
+ 72 73  2  3  0  0  0
+ 72 74  1  0  0  0  0
+M  END
+> <DRUGBANK_ID>
+DB00035
+
+> <DRUG_GROUPS>
+approved
+
+> <GENERIC_NAME>
+Desmopressin
+
+> <SYNONYMS>
+1-Desamino-8-D-arginine vasopressin; Desmopresina [INN-Spanish]; Desmopressine [INN-French]; Desmopressinum [INN-Latin]
+
+> <SALTS>
+Desmopressin acetate
+
+> <BRANDS>
+Adiuretin; Concentraid; DDAVP; Minirin; Stimate
+
+> <CHEMICAL_FORMULA>
+C46H64N14O12S2
+
+> <MOLECULAR_WEIGHT>
+1069.217
+
+> <EXACT_MASS>
+1068.426954962
+
+> <IUPAC_NAME>
+(2S)-2-{[(2S)-1-{[(4R,7S,10S,13S,16S)-13-benzyl-10-(2-carbamoylethyl)-7-(carbamoylmethyl)-16-[(4-hydroxyphenyl)methyl]-6,9,12,15,18-pentaoxo-1,2-dithia-5,8,11,14,17-pentaazacycloicosan-4-yl]carbonyl}pyrrolidin-2-yl]formamido}-5-carbamimidamido-N-(carbamoylmethyl)pentanamide
+
+> <INCHI_IDENTIFIER>
+InChI=1S/C46H64N14O12S2/c47-35(62)15-14-29-40(67)58-32(22-36(48)63)43(70)59-33(45(72)60-18-5-9-34(60)44(71)56-28(8-4-17-52-46(50)51)39(66)53-23-37(49)64)24-74-73-19-16-38(65)54-30(21-26-10-12-27(61)13-11-26)41(68)57-31(42(69)55-29)20-25-6-2-1-3-7-25/h1-3,6-7,10-13,28-34,61H,4-5,8-9,14-24H2,(H2,47,62)(H2,48,63)(H2,49,64)(H,53,66)(H,54,65)(H,55,69)(H,56,71)(H,57,68)(H,58,67)(H,59,70)(H4,50,51,52)/t28-,29-,30-,31-,32-,33-,34-/m0/s1
+
+> <INCHI_KEY>
+InChIKey=NFLWUMRGJYTJIN-NXBWRCJVSA-N
+
+> <SMILES>
+NC(=O)CC[C@@H]1NC(=O)[C at H](CC2=CC=CC=C2)NC(=O)[C at H](CC2=CC=C(O)C=C2)NC(=O)CCSSC[C at H](NC(=O)[C at H](CC(N)=O)NC1=O)C(=O)N1CCC[C at H]1C(=O)N[C@@H](CCCNC(N)=N)C(=O)NCC(N)=O
+
+> <JCHEM_ACCEPTOR_COUNT>
+15
+
+> <JCHEM_DONOR_COUNT>
+14
+
+> <JCHEM_ACIDIC_PKA>
+11.34
+
+> <ALOGPS_LOGP>
+-1
+
+> <JCHEM_LOGP>
+-6.1
+
+> <ALOGPS_LOGS>
+-4
+
+> <JCHEM_POLARIZABILITY>
+106.19
+
+> <JCHEM_POLAR_SURFACE_AREA>
+435.41
+
+> <JCHEM_REFRACTIVITY>
+279.78
+
+> <JCHEM_ROTATABLE_BOND_COUNT>
+19
+
+> <ALOGPS_SOLUBILITY>
+1.10e-01 g/l
+
+$$$$
diff --git a/lib/galaxy/datatypes/test/drugbank_drugs.smi b/lib/galaxy/datatypes/test/drugbank_drugs.smi
new file mode 100644
index 0000000..02a6867
--- /dev/null
+++ b/lib/galaxy/datatypes/test/drugbank_drugs.smi
@@ -0,0 +1,2 @@
+O=C(N1[C@@H](CCC1)C(=O)NNC(=O)N)[C@@H](NC(=O)[C@@H](NC(=O)[C at H](NC(=O)[C@@H](NC(=O)[C@@H](NC(=O)[C@@H](NC(=O)[C@@H](NC(=O)[C at H]1NC(=O)CC1)Cc1[nH]cnc1)Cc1c2c([nH]c1)cccc2)CO)Cc1ccc(O)cc1)COC(C)(C)C)CC(C)C)CCCN=C(N)N	Goserelin
+NC(=O)CNC(=O)[C@@H](NC(=O)[C@@H]1CCCN1C(=O)[C at H]1NC(=O)[C@@H](NC(=O)[C at H](CCC(=O)N)NC(=O)[C at H](Cc2ccccc2)NC(=O)[C@@H](NC(=O)CCSSC1)Cc1ccc(cc1)O)CC(=O)N)CCCNC(=N)N	Desmopressin
diff --git a/lib/galaxy/datatypes/test/file.html b/lib/galaxy/datatypes/test/file.html
new file mode 100644
index 0000000..6fa1288
--- /dev/null
+++ b/lib/galaxy/datatypes/test/file.html
@@ -0,0 +1,74 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+<HTML>
+<HEAD>
+
+	
+	<META HTTP-EQUIV="Content-Type" CONTENT="text/html;CHARSET=iso-8859-1">
+	<META http-equiv="Content-Script-Type" content="text/javascript">
+	<META HTTP-EQUIV="Pragma" CONTENT="no-cache">
+	<META HTTP-EQUIV="Expires" CONTENT="-1">
+	<TITLE>
+Hyperlinks to Genome Browser	</TITLE>
+	<LINK REL="STYLESHEET" HREF="/style/HGStyle.css">
+
+</HEAD>
+<BODY BGCOLOR="#FFF9D2" LINK="0000CC" VLINK="#330066" ALINK="#6600FF">
+<A NAME="TOP"></A>
+
+<TABLE BORDER=0 CELLPADDING=0 CELLSPACING=0 WIDTH="100%">
+
+<!-- +++++++++++++++++++++ HOTLINKS BAR +++++++++++++++++++ -->
+<TR><TD COLSPAN=3 HEIGHT=40 >
+<table bgcolor="#000000" cellpadding="1" cellspacing="1" width="100%%" height="27">
+<tr bgcolor="#2636D1"><td valign="middle">
+	<table BORDER=0 CELLSPACING=0 CELLPADDING=0 bgcolor="#2636D1" height="24"><TR>
+	 	<TD VALIGN="middle"><font color="#89A1DE"> 
+
+ <A HREF="/index.html?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Home</A>    
+       <A HREF="/cgi-bin/hgGateway?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Genomes</A>    
+       <A HREF="/cgi-bin/hgTracks?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Genome Browser</A>    
+       <A HREF="/cgi-bin/hgBlat?command=start&org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">           Blat</A>    
+       <A HREF="/cgi-bin/hgTables?org=Bushbaby&db=otoGar1&hgsid=1118408&hgta_doMainPage=1" class="topbar">
+           Tables</A>    
+       <A HREF="/cgi-bin/hgNear?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Gene Sorter</A>    
+<A HREF="/cgi-bin/hgSession?org=Bushbaby&db=otoGar1&hgsid=1118408&hgS_doMainPage=1" class="topbar">Session</A>   
+       <A HREF="/FAQ/" class="topbar">
+           FAQ</A>    
+
+       <A HREF="/goldenPath/help/hgTablesHelp.html"
+       class="topbar">
+           Help</A> 
+ </font></TD>
+       </TR></TABLE>
+</TD></TR></TABLE>
+</TD></TR>	
+
+
+<!-- +++++++++++++++++++++ CONTENT TABLES +++++++++++++++++++ -->
+<TR><TD COLSPAN=3>	
+  	<!--outer table is for border purposes-->
+  	<TABLE WIDTH="100%" BGCOLOR="#888888" BORDER="0" CELLSPACING="0" CELLPADDING="1"><TR><TD>	
+    <TABLE BGCOLOR="#FFFEE8" WIDTH="100%"  BORDER="0" CELLSPACING="0" CELLPADDING="0"><TR><TD>	
+	<TABLE BGCOLOR="#D9E4F8" BACKGROUND="/images/hr.gif" WIDTH="100%"><TR><TD>
+		<FONT SIZE="4"><b> 
+Hyperlinks to Genome Browser</b></FONT></TD></TR></TABLE>
+	<TABLE BGCOLOR="#FFFEE8" WIDTH="100%" CELLPADDING=0><TR><TH HEIGHT=10></TH></TR>
+	<TR><TD WIDTH=10> </TD><TD>
+	
+
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:96554-98437&gold=pack" TARGET=_blank>scaffold_0.1-193456_25 at scaffold_0.1-193456:96554-98437</A><BR>
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:100227-101729&gold=pack" TARGET=_blank>scaffold_0.1-193456_26 at scaffold_0.1-193456:100227-101729</A><BR>
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:101830-103994&gold=pack" TARGET=_blank>scaffold_0.1-193456_27 at scaffold_0.1-193456:101830-103994</A><BR>
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:105267-107614&gold=pack" TARGET=_blank>scaffold_0.1-193456_28 at scaffold_0.1-193456:105267-107614</A><BR>
+
+	</TD><TD WIDTH=15></TD></TR></TABLE>
+	<br></TD></TR></TABLE>
+	</TD></TR></TABLE>
+	
+</TD></TR></TABLE>
+
+</BODY></HTML>
diff --git a/lib/galaxy/datatypes/test/gff_version_3.gff b/lib/galaxy/datatypes/test/gff_version_3.gff
new file mode 100644
index 0000000..c6263c0
--- /dev/null
+++ b/lib/galaxy/datatypes/test/gff_version_3.gff
@@ -0,0 +1,150 @@
+##gff-version 3
+##date Tue Jun 26 10:48:17 2007
+##sequence-region ctgA 1 50000
+##source gbrowse GFFToGalaxyDumper plugin
+##NOTE: All features dumped.
+ctgA	example	my_feature	22132	24633	.	+	.	ID=My_feature:f15
+ctgA	example	my_feature	46990	48410	.	-	.	ID=My_feature:f11
+ctgA	example	my_feature	44705	47713	.	-	.	ID=My_feature:f01
+ctgA	example	my_feature	36649	40440	.	-	.	ID=My_feature:f03
+ctgA	example	my_feature	23072	23185	.	+	.	ID=My_feature:f14
+ctgA	example	my_feature	37242	38653	.	+	.	ID=My_feature:f04
+ctgA	example	motif	37497	40559	.	-	.	ID=Motif:m15;Note=7-transmembrane
+ctgA	example	my_feature	36034	38167	.	+	.	ID=My_feature:f09
+ctgA	example	motif	28332	30033	.	-	.	ID=Motif:m02;Note=HOX
+ctgA	example	my_feature	4715	5968	.	-	.	ID=My_feature:f05
+ctgA	example	motif	48253	48366	.	+	.	ID=Motif:m01;Note=WD40
+ctgA	example	BAC	1000	20000	.	.	.	ID=BAC:b101.2;Note=Fingerprinted+BAC+with+end+reads
+ctgA	example	right_end_read	19500	20000	.	-	.	Parent=BAC:b101.2
+ctgA	example	left_end_read	1000	1500	.	+	.	Parent=BAC:b101.2
+ctgA	example	motif	13801	14007	.	-	.	ID=Motif:m05;Note=helix+loop+helix
+ctgA	example	coding	1050	9000	.	+	.	ID=mRNA:EDEN.1;Gene=EDEN
+ctgA	example	CDS	1201	1500	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	CDS	3000	3902	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	CDS	5000	5500	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	CDS	7000	7608	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	processed_transcript	1050	9000	.	+	.	ID=mRNA:EDEN.1
+ctgA	example	5'-UTR	1050	1200	.	+	.	Parent=mRNA:EDEN.1
+ctgA	example	3'-UTR	7609	9000	.	+	.	Parent=mRNA:EDEN.1
+ctgA	est	match	5410	7503	.	-	.	ID=EST:agt830.3;Target=agt830.3+1+595
+ctgA	est	HSP	7000	7503	.	-	.	Parent=EST:agt830.3;Target=agt830.3+1+504
+ctgA	est	HSP	5410	5500	.	-	.	Parent=EST:agt830.3;Target=agt830.3+505+595
+ctgA	example	motif	46012	48851	.	+	.	ID=Motif:m09;Note=kinase
+ctgA	example	match	6885	8999	.	-	.	ID=Match:seg03
+ctgA	example	HSP	8306	8999	.	-	.	Parent=Match:seg03
+ctgA	example	HSP	8055	8080	.	-	.	Parent=Match:seg03
+ctgA	example	HSP	7410	7737	.	-	.	Parent=Match:seg03
+ctgA	example	HSP	6885	7241	.	-	.	Parent=Match:seg03
+ctgA	example	my_feature	13280	16394	.	+	.	ID=My_feature:f08
+ctgA	example	match	29771	32937	.	+	.	ID=Match:seg10
+ctgA	example	HSP	29771	29942	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	30042	30340	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	30810	31307	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	31761	31984	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	32374	32937	.	+	.	Parent=Match:seg10
+ctgA	example	match	36616	37227	.	-	.	ID=Match:seg09
+ctgA	example	HSP	37208	37227	.	-	.	Parent=Match:seg09
+ctgA	example	HSP	36616	37057	.	-	.	Parent=Match:seg09
+ctgA	example	motif	11911	15561	.	+	.	ID=Motif:m11;Note=kinase
+ctgA	est	match	1050	3202	.	+	.	ID=EST:agt830.5;Target=agt830.5+1+654
+ctgA	est	HSP	1050	1500	.	+	.	Parent=EST:agt830.5;Target=agt830.5+1+451
+ctgA	est	HSP	3000	3202	.	+	.	Parent=EST:agt830.5;Target=agt830.5+452+654
+ctgA	example	motif	15396	16159	.	+	.	ID=Motif:m03;Note=zinc+finger
+ctgA	est	match	1150	7200	.	+	.	ID=EST:agt767.5;Target=agt767.5+1+1153
+ctgA	est	HSP	1150	1500	.	+	.	Parent=EST:agt767.5;Target=agt767.5+1+351
+ctgA	est	HSP	5000	5500	.	+	.	Parent=EST:agt767.5;Target=agt767.5+352+852
+ctgA	est	HSP	7000	7200	.	+	.	Parent=EST:agt767.5;Target=agt767.5+853+1153
+ctgA	est	match	1050	7300	.	+	.	ID=EST:agt221.5;Target=agt221.5+1+1253
+ctgA	est	HSP	1050	1500	.	+	.	Parent=EST:agt221.5;Target=agt221.5+1+451
+ctgA	est	HSP	5000	5500	.	+	.	Parent=EST:agt221.5;Target=agt221.5+452+952
+ctgA	est	HSP	7000	7300	.	+	.	Parent=EST:agt221.5;Target=agt221.5+953+1253
+ctgA	example	my_feature	19157	22915	.	-	.	ID=My_feature:f13
+ctgA	est	match	8000	9000	.	-	.	ID=EST:agt767.3;Target=agt767.3+1+1001
+ctgA	est	HSP	8000	9000	.	-	.	Parent=EST:agt767.3;Target=agt767.3+1+1001
+ctgA	example	motif	28342	28447	.	-	.	ID=Motif:m10;Note=DEAD+box
+ctgA	example	motif	17667	17690	.	+	.	ID=Motif:m13;Note=DEAD+box
+ctgA	example	trace	44401	45925	.	+	.	ID=name:trace;trace=volvox_trace.scf
+ctgA	example	match	26122	34466	.	+	.	ID=Match:seg02
+ctgA	example	HSP	26122	26126	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	26497	26869	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27201	27325	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27372	27433	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27565	27565	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27813	28091	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	28093	28201	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	28329	28377	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	28829	29194	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	29517	29702	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	29713	30061	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	30329	30774	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	30808	31306	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	31516	31729	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	31753	32154	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	32595	32696	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	32892	32901	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	33127	33388	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	33439	33443	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	33759	34209	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	34401	34466	.	+	.	Parent=Match:seg02
+ctgA	example	contig	1	50000	.	.	.	ID=Contig:ctgA
+ctgA	example	match	41137	47829	.	-	.	ID=Match:seg14
+ctgA	example	HSP	47449	47829	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	46816	46992	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	46092	46318	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	45790	46022	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	45231	45488	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	44763	45030	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	44065	44556	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	43395	43811	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	42890	43270	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	42057	42474	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	41754	41948	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	41137	41318	.	-	.	Parent=Match:seg14
+ctgA	example	match	12531	15870	.	+	.	ID=Match:seg12
+ctgA	example	HSP	12531	12895	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13122	13449	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13452	13745	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13908	13965	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13998	14488	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	14564	14899	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	15185	15276	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	15639	15736	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	15745	15870	.	+	.	Parent=Match:seg12
+ctgA	est	match	7500	8000	.	-	.	ID=EST:agt221.3;Target=agt221.3+1+501
+ctgA	est	HSP	7500	8000	.	-	.	Parent=EST:agt221.3;Target=agt221.3+1+501
+ctgA	example	coding	1300	9000	.	+	.	ID=mRNA:EDEN.3;Gene=EDEN
+ctgA	example	CDS	3301	3902	.	+	0	Parent=mRNA:EDEN.3
+ctgA	example	CDS	5000	5500	.	+	1	Parent=mRNA:EDEN.3
+ctgA	example	CDS	7000	7600	.	+	1	Parent=mRNA:EDEN.3
+ctgA	example	processed_transcript	1300	9000	.	+	.	ID=mRNA:EDEN.3
+ctgA	example	5'-UTR	1300	1500	.	+	.	Parent=mRNA:EDEN.3
+ctgA	example	5'-UTR	3000	3300	.	+	.	Parent=mRNA:EDEN.3
+ctgA	example	3'-UTR	7601	9000	.	+	.	Parent=mRNA:EDEN.3
+ctgA	example	match	26503	35904	.	-	.	ID=Match:seg05
+ctgA	example	HSP	35642	35904	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	35333	35507	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	34605	34983	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	34244	34313	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	33438	33868	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	33053	33325	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	32208	32680	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	32010	32057	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	31421	31817	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	31232	31236	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	30465	30798	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	30108	30216	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	29513	29647	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	28777	29058	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	28225	28316	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	27887	28076	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	27448	27860	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	27172	27185	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	26503	26799	.	-	.	Parent=Match:seg05
+ctgA	example	match	49406	50000	.	+	.	ID=Match:seg13
+ctgA	example	HSP	49406	49476	.	+	.	Parent=Match:seg13
+ctgA	example	HSP	49762	50000	.	+	.	Parent=Match:seg13
+ctgA	example	gene	1050	9000	.	+	.	ID=Gene:EDEN;Note=protein+kinase
+ctgA	example	motif	33325	35791	.	+	.	ID=Motif:m04;Note=Ig-like
+ctgA	example	match	31785	32359	.	+	.	ID=Match:seg01
+ctgA	example	HSP	31785	31939	.	+	.	Parent=Match:seg01
+ctgA	example	HSP	32329	32359	.	+	.	Parent=Match:seg01
diff --git a/lib/galaxy/datatypes/test/interval.interval b/lib/galaxy/datatypes/test/interval.interval
new file mode 100644
index 0000000..3e6c671
--- /dev/null
+++ b/lib/galaxy/datatypes/test/interval.interval
@@ -0,0 +1,5 @@
+chr1	4348187	4348589	3.70	4.90	2.55	0.24	0.46
+chr1	4488177	4488442	4.03	5.77	1.92	-0.67	0.81
+chr1	4774091	4774440	8.07	8.33	7.82	0.85	-0.40
+chr1	4800122	4800409	6.40	7.35	5.44	1.19	-0.42
+chr1	4878925	4879277	2.18	0.28	4.93	-0.96	1.24
diff --git a/lib/galaxy/datatypes/test/interval1.bed b/lib/galaxy/datatypes/test/interval1.bed
new file mode 100644
index 0000000..a3bc1a7
--- /dev/null
+++ b/lib/galaxy/datatypes/test/interval1.bed
@@ -0,0 +1,11 @@
+#chrom	start	end	name	value	strand
+chr7	115444712	115444739	CCDS5763.1_cds_0_0_chr7_115444713_f	0	+
+chr7	115468538	115468624	CCDS5763.1_cds_1_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5763.1_cds_2_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5763.1_cds_3_0_chr7_115484166_f	0	+
+chr7	115485764	115485980	CCDS5763.1_cds_4_0_chr7_115485765_f	0	+
+chr7	115486322	115486481	CCDS5763.1_cds_5_0_chr7_115486323_f	0	+
+chr7	115491298	115491487	CCDS5763.1_cds_6_0_chr7_115491299_f	0	+
+chr7	115468538	115468624	CCDS5764.1_cds_0_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5764.1_cds_1_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5764.1_cds_2_0_chr7_115484166_f	0	+
diff --git a/lib/galaxy/datatypes/test/issue1818.tabular b/lib/galaxy/datatypes/test/issue1818.tabular
new file mode 100644
index 0000000..cd575eb
--- /dev/null
+++ b/lib/galaxy/datatypes/test/issue1818.tabular
@@ -0,0 +1,84 @@
+Name	Major	Score
+Ignatius	Engineering	83
+Austin	Life-Sciences	91
+Zackery	Engineering	54
+Marques	Arts	58
+Darren	Business	94
+Darius	Social-Sciences	51
+Thanh	Engineering	53
+Joe'Quann	Engineering	75
+Bryan	Arts	68
+Devin	Engineering	92
+Joseph	Social-Sciences	61
+Joshua	Life-Sciences	14
+Ja'Won	Social-Sciences	37
+Tyreque	Arts	74
+Sage	Arts	55
+Antonio	Engineering	88
+Michael	Engineering	39
+Randy	Social-Sciences	68
+Dilan	Health-Medicine	84
+Omar	Engineering	99
+Zachary	Arts	80
+Faison	Engineering	47
+Angel	Health-Medicine	100
+Gabriel	Health-Medicine	100
+John	Life-Sciences	70
+Leonard	Business	87
+Juan	Business	79
+Jonathan	Health-Medicine	100
+Christopher	Life-Sciences	59
+Brandon	Life-Sciences	72
+D'Angelo	Health-Medicine	90
+Justin	Social-Sciences	90
+Israel	Health-Medicine	81
+William	Arts	46
+David	Social-Sciences	69
+Drake	Social-Sciences	59
+Drake	Social-Sciences	76
+Nathan	Arts	71
+Trevon	Arts	74
+Aaron	Business	83
+Daniel	Health-Medicine	91
+Kevin	Health-Medicine	100
+Antonio	Engineering	56
+Donovan	Arts	75
+Kerris	Business	82
+Andre	Health-Medicine	72
+Dakota	Business	83
+Aaron	Life-Sciences	58
+Walter	Arts	75
+Isaiah	Arts	80
+Christian	Life-Sciences	67
+Dalton	Health-Medicine	100
+Jesse	Social-Sciences	32
+Diego	Health-Medicine	82
+Nathen	Life-Sciences	46
+Anthony	Life-Sciences	32
+Christian	Business	88
+David	Business	92
+Avery	Engineering	51
+Paul	Arts	63
+Derek	Arts	60
+Levi	Arts	76
+Lance	Social-Sciences	65
+Sonny	Engineering	50
+Shawn	Arts	65
+Leonardo	Engineering	78
+Yeng	Life-Sciences	39
+Leroy	Social-Sciences	74
+Gurnam	Life-Sciences	66
+Fernando	Arts	78
+Williams	Social-Sciences	62
+Roberto	Arts	65
+Teriuse	Business	94
+Nathaniel	Arts	88
+Chase	Social-Sciences	27
+Caleb	Business	87
+Tysza	Business	92
+Nico	Arts	59
+Manuel	Social-Sciences	61
+Patrick	Health-Medicine	92
+Peter	Health-Medicine	86
+Allen	Life-Sciences	50
+Joel	Social-Sciences	72
diff --git a/lib/galaxy/datatypes/test/megablast_xml_parser_test1.blastxml b/lib/galaxy/datatypes/test/megablast_xml_parser_test1.blastxml
new file mode 100644
index 0000000..356bb1a
--- /dev/null
+++ b/lib/galaxy/datatypes/test/megablast_xml_parser_test1.blastxml
@@ -0,0 +1,4117 @@
+<?xml version="1.0"?>
+<!DOCTYPE BlastOutput PUBLIC "-//NCBI//NCBI BlastOutput/EN" "http://www.ncbi.nlm.nih.gov/dtd/NCBI_BlastOutput.dtd">
+<BlastOutput>
+  <BlastOutput_program>blastn</BlastOutput_program>
+  <BlastOutput_version>blastn 2.2.17 [Aug-26-2007]</BlastOutput_version>
+  <BlastOutput_reference>~Reference: Altschul, Stephen F., Thomas L. Madden, Alejandro A. Schaffer, ~Jinghui Zhang, Zheng Zhang, Webb Miller, and David J. Lipman (1997), ~"Gapped BLAST and PSI-BLAST: a new generation of protein database search~programs",  Nucleic Acids Res. 25:3389-3402.</BlastOutput_reference>
+  <BlastOutput_db>Ecoli.fa</BlastOutput_db>
+  <BlastOutput_query-ID>lcl|1_0</BlastOutput_query-ID>
+  <BlastOutput_query-def>0_0.666667</BlastOutput_query-def>
+  <BlastOutput_query-len>30</BlastOutput_query-len>
+  <BlastOutput_param>
+    <Parameters>
+      <Parameters_expect>10</Parameters_expect>
+      <Parameters_sc-match>1</Parameters_sc-match>
+      <Parameters_sc-mismatch>-3</Parameters_sc-mismatch>
+      <Parameters_gap-open>0</Parameters_gap-open>
+      <Parameters_gap-extend>0</Parameters_gap-extend>
+      <Parameters_filter>F</Parameters_filter>
+    </Parameters>
+  </BlastOutput_param>
+  <BlastOutput_iterations>
+    <Iteration>
+      <Iteration_iter-num>1</Iteration_iter-num>
+      <Iteration_query-ID>lcl|1_0</Iteration_query-ID>
+      <Iteration_query-def>0_0.666667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5436010</Hsp_hit-from>
+              <Hsp_hit-to>5436039</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGGACAGCGCCGCCACCAACAAAGCCACCA</Hsp_qseq>
+              <Hsp_hseq>CGGACAGCGCCGCCACCAACAAAGCCACCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>2</Iteration_iter-num>
+      <Iteration_query-ID>lcl|2_0</Iteration_query-ID>
+      <Iteration_query-def>1_0.600000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>696993</Hsp_hit-from>
+              <Hsp_hit-to>697022</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAAACACCGGATGCTCCGGCGCTGGCAGAT</Hsp_qseq>
+              <Hsp_hseq>AAAACACCGGATGCTCCGGCGCTGGCAGAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>3</Iteration_iter-num>
+      <Iteration_query-ID>lcl|3_0</Iteration_query-ID>
+      <Iteration_query-def>2_0.400000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4100018</Hsp_hit-from>
+              <Hsp_hit-to>4100047</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTTGCTTTTAGTACACCGGATTCAGAACCA</Hsp_qseq>
+              <Hsp_hseq>TTTGCTTTTAGTACACCGGATTCAGAACCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>4</Iteration_iter-num>
+      <Iteration_query-ID>lcl|4_0</Iteration_query-ID>
+      <Iteration_query-def>3_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>2305844</Hsp_hit-from>
+              <Hsp_hit-to>2305873</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CCGTCCAGAAAGGTGTATTCATGGGGACGG</Hsp_qseq>
+              <Hsp_hseq>CCGTCCAGAAAGGTGTATTCATGGGGACGG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>5</Iteration_iter-num>
+      <Iteration_query-ID>lcl|5_0</Iteration_query-ID>
+      <Iteration_query-def>4_0.766667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3457901</Hsp_hit-from>
+              <Hsp_hit-to>3457930</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CACGCTACGTGCGCCCCCGCCCAGAAGGCG</Hsp_qseq>
+              <Hsp_hseq>CACGCTACGTGCGCCCCCGCCCAGAAGGCG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>6</Iteration_iter-num>
+      <Iteration_query-ID>lcl|6_0</Iteration_query-ID>
+      <Iteration_query-def>5_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>1264131</Hsp_hit-from>
+              <Hsp_hit-to>1264160</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCACTTAACCCGCTTCGGCGGGTTTTGTTT</Hsp_qseq>
+              <Hsp_hseq>GCACTTAACCCGCTTCGGCGGGTTTTGTTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>1636485</Hsp_hit-from>
+              <Hsp_hit-to>1636514</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCACTTAACCCGCTTCGGCGGGTTTTGTTT</Hsp_qseq>
+              <Hsp_hseq>GCACTTAACCCGCTTCGGCGGGTTTTGTTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>3</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2318694</Hsp_hit-from>
+              <Hsp_hit-to>2318723</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAACAAAACCCGCCGAAGCGGGTTAAGTGC</Hsp_qseq>
+              <Hsp_hseq>AAACAAAACCCGCCGAAGCGGGTTAAGTGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>4</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2777043</Hsp_hit-from>
+              <Hsp_hit-to>2777072</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAACAAAACCCGCCGAAGCGGGTTAAGTGC</Hsp_qseq>
+              <Hsp_hseq>AAACAAAACCCGCCGAAGCGGGTTAAGTGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>7</Iteration_iter-num>
+      <Iteration_query-ID>lcl|7_0</Iteration_query-ID>
+      <Iteration_query-def>6_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1185124</Hsp_hit-from>
+              <Hsp_hit-to>1185153</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TAAGCCGTTACTGGCAGCAAGTGCAGGCAA</Hsp_qseq>
+              <Hsp_hseq>TAAGCCGTTACTGGCAGCAAGTGCAGGCAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>8</Iteration_iter-num>
+      <Iteration_query-ID>lcl|8_0</Iteration_query-ID>
+      <Iteration_query-def>7_0.400000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2458203</Hsp_hit-from>
+              <Hsp_hit-to>2458232</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGAATTTACCGTTATCTATCTTGCCTGCCT</Hsp_qseq>
+              <Hsp_hseq>TGAATTTACCGTTATCTATCTTGCCTGCCT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>10</Iteration_iter-num>
+      <Iteration_query-ID>lcl|10_0</Iteration_query-ID>
+      <Iteration_query-def>9_0.400000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5286666</Hsp_hit-from>
+              <Hsp_hit-to>5286695</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCGTTTTGCTAAACTTCTGCCGGAATATAA</Hsp_qseq>
+              <Hsp_hseq>GCGTTTTGCTAAACTTCTGCCGGAATATAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>11</Iteration_iter-num>
+      <Iteration_query-ID>lcl|11_0</Iteration_query-ID>
+      <Iteration_query-def>10_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2787672</Hsp_hit-from>
+              <Hsp_hit-to>2787701</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAAGAGGCGAGCAGAGTAAAACGCAGGCAA</Hsp_qseq>
+              <Hsp_hseq>AAAGAGGCGAGCAGAGTAAAACGCAGGCAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>13</Iteration_iter-num>
+      <Iteration_query-ID>lcl|13_0</Iteration_query-ID>
+      <Iteration_query-def>12_0.700000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4116145</Hsp_hit-from>
+              <Hsp_hit-to>4116174</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCGGACGATCTTCACGGTCGCCACGCGGAC</Hsp_qseq>
+              <Hsp_hseq>GCGGACGATCTTCACGGTCGCCACGCGGAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>14</Iteration_iter-num>
+      <Iteration_query-ID>lcl|14_0</Iteration_query-ID>
+      <Iteration_query-def>13_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5442351</Hsp_hit-from>
+              <Hsp_hit-to>5442380</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTCTTGTTGGATGGCATACTCCGGCAGCCA</Hsp_qseq>
+              <Hsp_hseq>TTCTTGTTGGATGGCATACTCCGGCAGCCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>15</Iteration_iter-num>
+      <Iteration_query-ID>lcl|15_0</Iteration_query-ID>
+      <Iteration_query-def>14_0.666667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4626492</Hsp_hit-from>
+              <Hsp_hit-to>4626521</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ACCCCGATATCGTCGCAGGCGTTGCCGCAC</Hsp_qseq>
+              <Hsp_hseq>ACCCCGATATCGTCGCAGGCGTTGCCGCAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>16</Iteration_iter-num>
+      <Iteration_query-ID>lcl|16_0</Iteration_query-ID>
+      <Iteration_query-def>15_0.666667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>346897</Hsp_hit-from>
+              <Hsp_hit-to>346926</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>1466536</Hsp_hit-from>
+              <Hsp_hit-to>1466565</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GAAGCGCCTCTTCCAGCGGAGACAGCAGCC</Hsp_qseq>
+              <Hsp_hseq>GAAGCGCCTCTTCCAGCGGAGACAGCAGCC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>3</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1750132</Hsp_hit-from>
+              <Hsp_hit-to>1750161</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>4</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1908046</Hsp_hit-from>
+              <Hsp_hit-to>1908075</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>5</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2279107</Hsp_hit-from>
+              <Hsp_hit-to>2279136</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>6</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2813407</Hsp_hit-from>
+              <Hsp_hit-to>2813436</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>7</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3939162</Hsp_hit-from>
+              <Hsp_hit-to>3939191</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>8</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4656510</Hsp_hit-from>
+              <Hsp_hit-to>4656539</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_qseq>
+              <Hsp_hseq>GGCTGCTGTCTCCGCTGGAAGAGGCGCTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>17</Iteration_iter-num>
+      <Iteration_query-ID>lcl|17_0</Iteration_query-ID>
+      <Iteration_query-def>16_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2595157</Hsp_hit-from>
+              <Hsp_hit-to>2595186</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATGTCCTGATCGAGCGGCGTTTTACCGACC</Hsp_qseq>
+              <Hsp_hseq>ATGTCCTGATCGAGCGGCGTTTTACCGACC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>18</Iteration_iter-num>
+      <Iteration_query-ID>lcl|18_0</Iteration_query-ID>
+      <Iteration_query-def>17_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>717743</Hsp_hit-from>
+              <Hsp_hit-to>717772</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGTGTTGAGTGTCCAGGTAATACGCTCTCG</Hsp_qseq>
+              <Hsp_hseq>GGTGTTGAGTGTCCAGGTAATACGCTCTCG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>20</Iteration_iter-num>
+      <Iteration_query-ID>lcl|20_0</Iteration_query-ID>
+      <Iteration_query-def>19_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1237938</Hsp_hit-from>
+              <Hsp_hit-to>1237967</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GTTGTAAGCGTCAGAACCGATGCGGTCGGT</Hsp_qseq>
+              <Hsp_hseq>GTTGTAAGCGTCAGAACCGATGCGGTCGGT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>21</Iteration_iter-num>
+      <Iteration_query-ID>lcl|21_0</Iteration_query-ID>
+      <Iteration_query-def>20_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4792054</Hsp_hit-from>
+              <Hsp_hit-to>4792083</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGACATCCTGTAGGCTGGCTTCAATGCGAC</Hsp_qseq>
+              <Hsp_hseq>CGACATCCTGTAGGCTGGCTTCAATGCGAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>22</Iteration_iter-num>
+      <Iteration_query-ID>lcl|22_0</Iteration_query-ID>
+      <Iteration_query-def>21_0.733333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>269757</Hsp_hit-from>
+              <Hsp_hit-to>269786</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGGAAGGCTGACGGGCGTCCACACCACGGC</Hsp_qseq>
+              <Hsp_hseq>GGGAAGGCTGACGGGCGTCCACACCACGGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>274007</Hsp_hit-from>
+              <Hsp_hit-to>274036</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGGAAGGCTGACGGGCGTCCACACCACGGC</Hsp_qseq>
+              <Hsp_hseq>GGGAAGGCTGACGGGCGTCCACACCACGGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>23</Iteration_iter-num>
+      <Iteration_query-ID>lcl|23_0</Iteration_query-ID>
+      <Iteration_query-def>22_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2706426</Hsp_hit-from>
+              <Hsp_hit-to>2706455</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGGTATTCCTCAGTTCTCGCTGCATGCCTG</Hsp_qseq>
+              <Hsp_hseq>CGGTATTCCTCAGTTCTCGCTGCATGCCTG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>24</Iteration_iter-num>
+      <Iteration_query-ID>lcl|24_0</Iteration_query-ID>
+      <Iteration_query-def>23_0.600000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1512107</Hsp_hit-from>
+              <Hsp_hit-to>1512136</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTGCCGTTACGCACCACGCCTTCAGTAGCG</Hsp_qseq>
+              <Hsp_hseq>TTGCCGTTACGCACCACGCCTTCAGTAGCG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>25</Iteration_iter-num>
+      <Iteration_query-ID>lcl|25_0</Iteration_query-ID>
+      <Iteration_query-def>24_0.733333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2032108</Hsp_hit-from>
+              <Hsp_hit-to>2032137</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGCGGGCGCACCACGTCGCTGCTGCTGTTC</Hsp_qseq>
+              <Hsp_hseq>CGCGGGCGCACCACGTCGCTGCTGCTGTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>26</Iteration_iter-num>
+      <Iteration_query-ID>lcl|26_0</Iteration_query-ID>
+      <Iteration_query-def>25_0.266667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1343811</Hsp_hit-from>
+              <Hsp_hit-to>1343840</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTTTGGGAAAACTAAATACGCATCAAAAAT</Hsp_qseq>
+              <Hsp_hseq>TTTTGGGAAAACTAAATACGCATCAAAAAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>27</Iteration_iter-num>
+      <Iteration_query-ID>lcl|27_0</Iteration_query-ID>
+      <Iteration_query-def>26_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3112551</Hsp_hit-from>
+              <Hsp_hit-to>3112580</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CAACGAGGCGCTACCGAGTTGTTCAATGCG</Hsp_qseq>
+              <Hsp_hseq>CAACGAGGCGCTACCGAGTTGTTCAATGCG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>29</Iteration_iter-num>
+      <Iteration_query-ID>lcl|29_0</Iteration_query-ID>
+      <Iteration_query-def>28_0.666667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4405442</Hsp_hit-from>
+              <Hsp_hit-to>4405471</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGCTCAATCTGCTCGCGCCTGGCTGGCGTT</Hsp_qseq>
+              <Hsp_hseq>CGCTCAATCTGCTCGCGCCTGGCTGGCGTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>30</Iteration_iter-num>
+      <Iteration_query-ID>lcl|30_0</Iteration_query-ID>
+      <Iteration_query-def>29_0.700000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3600547</Hsp_hit-from>
+              <Hsp_hit-to>3600576</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CTCGTCCGGCGGGCGGTTTTGCCGACAAGG</Hsp_qseq>
+              <Hsp_hseq>CTCGTCCGGCGGGCGGTTTTGCCGACAAGG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>33</Iteration_iter-num>
+      <Iteration_query-ID>lcl|33_0</Iteration_query-ID>
+      <Iteration_query-def>32_0.600000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1435670</Hsp_hit-from>
+              <Hsp_hit-to>1435699</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGAGGATGGTCCTCTGACTCTGCAGGCGCA</Hsp_qseq>
+              <Hsp_hseq>TGAGGATGGTCCTCTGACTCTGCAGGCGCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>34</Iteration_iter-num>
+      <Iteration_query-ID>lcl|34_0</Iteration_query-ID>
+      <Iteration_query-def>33_0.300000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3788178</Hsp_hit-from>
+              <Hsp_hit-to>3788207</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TAGTAACCCTTTAATAAGATTGTCGATTAG</Hsp_qseq>
+              <Hsp_hseq>TAGTAACCCTTTAATAAGATTGTCGATTAG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>35</Iteration_iter-num>
+      <Iteration_query-ID>lcl|35_0</Iteration_query-ID>
+      <Iteration_query-def>34_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>484499</Hsp_hit-from>
+              <Hsp_hit-to>484528</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGATGTTAAATGCATGGCACCTGCCGGTGC</Hsp_qseq>
+              <Hsp_hseq>TGATGTTAAATGCATGGCACCTGCCGGTGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>36</Iteration_iter-num>
+      <Iteration_query-ID>lcl|36_0</Iteration_query-ID>
+      <Iteration_query-def>35_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2250064</Hsp_hit-from>
+              <Hsp_hit-to>2250093</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ACTGCTTTGCCGAGATATTCGAGGTTAACC</Hsp_qseq>
+              <Hsp_hseq>ACTGCTTTGCCGAGATATTCGAGGTTAACC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>37</Iteration_iter-num>
+      <Iteration_query-ID>lcl|37_0</Iteration_query-ID>
+      <Iteration_query-def>36_0.433333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3698178</Hsp_hit-from>
+              <Hsp_hit-to>3698207</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGTTGGCAACATGGCGAGCGTAATCAATTA</Hsp_qseq>
+              <Hsp_hseq>TGTTGGCAACATGGCGAGCGTAATCAATTA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>39</Iteration_iter-num>
+      <Iteration_query-ID>lcl|39_0</Iteration_query-ID>
+      <Iteration_query-def>38_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4242501</Hsp_hit-from>
+              <Hsp_hit-to>4242530</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCAGCAGGATCGGATCGAACTCTGGTTTCT</Hsp_qseq>
+              <Hsp_hseq>GCAGCAGGATCGGATCGAACTCTGGTTTCT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>40</Iteration_iter-num>
+      <Iteration_query-ID>lcl|40_0</Iteration_query-ID>
+      <Iteration_query-def>39_0.400000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3683151</Hsp_hit-from>
+              <Hsp_hit-to>3683180</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TAATGCGGCATTCTCCTGATTTATTGTCAC</Hsp_qseq>
+              <Hsp_hseq>TAATGCGGCATTCTCCTGATTTATTGTCAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>42</Iteration_iter-num>
+      <Iteration_query-ID>lcl|42_0</Iteration_query-ID>
+      <Iteration_query-def>41_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>789086</Hsp_hit-from>
+              <Hsp_hit-to>789115</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AGCTGACGGTCAGCAGGGATACTTCCTGCA</Hsp_qseq>
+              <Hsp_hseq>AGCTGACGGTCAGCAGGGATACTTCCTGCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>43</Iteration_iter-num>
+      <Iteration_query-ID>lcl|43_0</Iteration_query-ID>
+      <Iteration_query-def>42_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1714771</Hsp_hit-from>
+              <Hsp_hit-to>1714800</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCAGTGACTACATCCGCGAGGTGAATGTGG</Hsp_qseq>
+              <Hsp_hseq>GCAGTGACTACATCCGCGAGGTGAATGTGG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>44</Iteration_iter-num>
+      <Iteration_query-ID>lcl|44_0</Iteration_query-ID>
+      <Iteration_query-def>43_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4272582</Hsp_hit-from>
+              <Hsp_hit-to>4272611</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCTTTTTCCAGCATCAACGCCACTGAACAA</Hsp_qseq>
+              <Hsp_hseq>GCTTTTTCCAGCATCAACGCCACTGAACAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>45</Iteration_iter-num>
+      <Iteration_query-ID>lcl|45_0</Iteration_query-ID>
+      <Iteration_query-def>44_0.433333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5481023</Hsp_hit-from>
+              <Hsp_hit-to>5481052</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAAGTGGTAGATAACGTGGTGCAGACTATG</Hsp_qseq>
+              <Hsp_hseq>AAAGTGGTAGATAACGTGGTGCAGACTATG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>46</Iteration_iter-num>
+      <Iteration_query-ID>lcl|46_0</Iteration_query-ID>
+      <Iteration_query-def>45_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>1271829</Hsp_hit-from>
+              <Hsp_hit-to>1271858</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATCCGCGATAAGGTCATCCGGTATCAGGAA</Hsp_qseq>
+              <Hsp_hseq>ATCCGCGATAAGGTCATCCGGTATCAGGAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>1357952</Hsp_hit-from>
+              <Hsp_hit-to>1357981</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATCCGCGATAAGGTCATCCGGTATCAGGAA</Hsp_qseq>
+              <Hsp_hseq>ATCCGCGATAAGGTCATCCGGTATCAGGAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>3</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2310674</Hsp_hit-from>
+              <Hsp_hit-to>2310703</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTCCTGATACCGGATGACCTTATCGCGGAT</Hsp_qseq>
+              <Hsp_hseq>TTCCTGATACCGGATGACCTTATCGCGGAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>4</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2769319</Hsp_hit-from>
+              <Hsp_hit-to>2769348</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTCCTGATACCGGATGACCTTATCGCGGAT</Hsp_qseq>
+              <Hsp_hseq>TTCCTGATACCGGATGACCTTATCGCGGAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>5</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2991293</Hsp_hit-from>
+              <Hsp_hit-to>2991322</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTCCTGATACCGGATGACCTTATCGCGGAT</Hsp_qseq>
+              <Hsp_hseq>TTCCTGATACCGGATGACCTTATCGCGGAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>47</Iteration_iter-num>
+      <Iteration_query-ID>lcl|47_0</Iteration_query-ID>
+      <Iteration_query-def>46_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1080570</Hsp_hit-from>
+              <Hsp_hit-to>1080599</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ACCTGTACCCCAGGACCATGGTACATTTAT</Hsp_qseq>
+              <Hsp_hseq>ACCTGTACCCCAGGACCATGGTACATTTAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1476177</Hsp_hit-from>
+              <Hsp_hit-to>1476206</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ACCTGTACCCCAGGACCATGGTACATTTAT</Hsp_qseq>
+              <Hsp_hseq>ACCTGTACCCCAGGACCATGGTACATTTAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>48</Iteration_iter-num>
+      <Iteration_query-ID>lcl|48_0</Iteration_query-ID>
+      <Iteration_query-def>47_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2887871</Hsp_hit-from>
+              <Hsp_hit-to>2887900</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ACAATGCAACCAGACCAGCCCGGATCGATA</Hsp_qseq>
+              <Hsp_hseq>ACAATGCAACCAGACCAGCCCGGATCGATA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>50</Iteration_iter-num>
+      <Iteration_query-ID>lcl|50_0</Iteration_query-ID>
+      <Iteration_query-def>49_0.433333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4152853</Hsp_hit-from>
+              <Hsp_hit-to>4152882</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGTTATACGGAACAACATTTAACTCCAGCG</Hsp_qseq>
+              <Hsp_hseq>CGTTATACGGAACAACATTTAACTCCAGCG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>51</Iteration_iter-num>
+      <Iteration_query-ID>lcl|51_0</Iteration_query-ID>
+      <Iteration_query-def>50_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3568677</Hsp_hit-from>
+              <Hsp_hit-to>3568706</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTGCTGTTGCCATCGCTTTTCAGGACATAC</Hsp_qseq>
+              <Hsp_hseq>TTGCTGTTGCCATCGCTTTTCAGGACATAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>52</Iteration_iter-num>
+      <Iteration_query-ID>lcl|52_0</Iteration_query-ID>
+      <Iteration_query-def>51_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3739788</Hsp_hit-from>
+              <Hsp_hit-to>3739817</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGACGGTATCGGTACGCAGATTGTGATGGA</Hsp_qseq>
+              <Hsp_hseq>CGACGGTATCGGTACGCAGATTGTGATGGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>53</Iteration_iter-num>
+      <Iteration_query-ID>lcl|53_0</Iteration_query-ID>
+      <Iteration_query-def>52_0.366667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3789407</Hsp_hit-from>
+              <Hsp_hit-to>3789436</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATGAGCATCAACATCGAAATCTCAAACCAA</Hsp_qseq>
+              <Hsp_hseq>ATGAGCATCAACATCGAAATCTCAAACCAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>55</Iteration_iter-num>
+      <Iteration_query-ID>lcl|55_0</Iteration_query-ID>
+      <Iteration_query-def>54_0.400000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3161991</Hsp_hit-from>
+              <Hsp_hit-to>3162020</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GTATTCTTCATTGATTTGTAAGCGGGTACC</Hsp_qseq>
+              <Hsp_hseq>GTATTCTTCATTGATTTGTAAGCGGGTACC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>56</Iteration_iter-num>
+      <Iteration_query-ID>lcl|56_0</Iteration_query-ID>
+      <Iteration_query-def>55_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4384753</Hsp_hit-from>
+              <Hsp_hit-to>4384782</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAGCGCTGTTCAACATAAATTGGCTGACGG</Hsp_qseq>
+              <Hsp_hseq>AAGCGCTGTTCAACATAAATTGGCTGACGG</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>57</Iteration_iter-num>
+      <Iteration_query-ID>lcl|57_0</Iteration_query-ID>
+      <Iteration_query-def>56_0.433333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2915787</Hsp_hit-from>
+              <Hsp_hit-to>2915816</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AACCGTCAGCTCTTTACGCAATATTTTGCC</Hsp_qseq>
+              <Hsp_hseq>AACCGTCAGCTCTTTACGCAATATTTTGCC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>58</Iteration_iter-num>
+      <Iteration_query-ID>lcl|58_0</Iteration_query-ID>
+      <Iteration_query-def>57_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>294604</Hsp_hit-from>
+              <Hsp_hit-to>294633</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGCGAAATCCTCGACGTCCAGGCACGTATT</Hsp_qseq>
+              <Hsp_hseq>CGCGAAATCCTCGACGTCCAGGCACGTATT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>59</Iteration_iter-num>
+      <Iteration_query-ID>lcl|59_0</Iteration_query-ID>
+      <Iteration_query-def>58_0.300000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1553483</Hsp_hit-from>
+              <Hsp_hit-to>1553512</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GTTATCCATTAAAATAGATCGGATCGATAT</Hsp_qseq>
+              <Hsp_hseq>GTTATCCATTAAAATAGATCGGATCGATAT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>60</Iteration_iter-num>
+      <Iteration_query-ID>lcl|60_0</Iteration_query-ID>
+      <Iteration_query-def>59_0.300000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1351506</Hsp_hit-from>
+              <Hsp_hit-to>1351535</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AAAATCTGCATATCATGATAAGAGTGGTTA</Hsp_qseq>
+              <Hsp_hseq>AAAATCTGCATATCATGATAAGAGTGGTTA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>2997458</Hsp_hit-from>
+              <Hsp_hit-to>2997487</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TAACCACTCTTATCATGATATGCAGATTTT</Hsp_qseq>
+              <Hsp_hseq>TAACCACTCTTATCATGATATGCAGATTTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>62</Iteration_iter-num>
+      <Iteration_query-ID>lcl|62_0</Iteration_query-ID>
+      <Iteration_query-def>61_0.366667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1987157</Hsp_hit-from>
+              <Hsp_hit-to>1987186</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TCAGTTTGAAAGCTTTGTCAGCCGTTTTTT</Hsp_qseq>
+              <Hsp_hseq>TCAGTTTGAAAGCTTTGTCAGCCGTTTTTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>63</Iteration_iter-num>
+      <Iteration_query-ID>lcl|63_0</Iteration_query-ID>
+      <Iteration_query-def>62_0.366667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1667830</Hsp_hit-from>
+              <Hsp_hit-to>1667859</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AATCCTAAACCTAGGAATGCCAGAATATCT</Hsp_qseq>
+              <Hsp_hseq>AATCCTAAACCTAGGAATGCCAGAATATCT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>64</Iteration_iter-num>
+      <Iteration_query-ID>lcl|64_0</Iteration_query-ID>
+      <Iteration_query-def>63_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>231418</Hsp_hit-from>
+              <Hsp_hit-to>231447</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_qseq>
+              <Hsp_hseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>2</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3516862</Hsp_hit-from>
+              <Hsp_hit-to>3516891</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TCACGCTCGCAGTCAAGCTGGCTTATGCCA</Hsp_qseq>
+              <Hsp_hseq>TCACGCTCGCAGTCAAGCTGGCTTATGCCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>3</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4226650</Hsp_hit-from>
+              <Hsp_hit-to>4226679</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TCACGCTCGCAGTCAAGCTGGCTTATGCCA</Hsp_qseq>
+              <Hsp_hseq>TCACGCTCGCAGTCAAGCTGGCTTATGCCA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>4</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>4808457</Hsp_hit-from>
+              <Hsp_hit-to>4808486</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_qseq>
+              <Hsp_hseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>5</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>4904735</Hsp_hit-from>
+              <Hsp_hit-to>4904764</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_qseq>
+              <Hsp_hseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>6</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>5048915</Hsp_hit-from>
+              <Hsp_hit-to>5048944</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_qseq>
+              <Hsp_hseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+            <Hsp>
+              <Hsp_num>7</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>30</Hsp_query-from>
+              <Hsp_query-to>1</Hsp_query-to>
+              <Hsp_hit-from>5089922</Hsp_hit-from>
+              <Hsp_hit-to>5089951</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>-1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_qseq>
+              <Hsp_hseq>TGGCATAAGCCAGCTTGACTGCGAGCGTGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>65</Iteration_iter-num>
+      <Iteration_query-ID>lcl|65_0</Iteration_query-ID>
+      <Iteration_query-def>64_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5440369</Hsp_hit-from>
+              <Hsp_hit-to>5440398</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GAGATGACGGTTGCAGAGTCATGCGTTTGA</Hsp_qseq>
+              <Hsp_hseq>GAGATGACGGTTGCAGAGTCATGCGTTTGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>66</Iteration_iter-num>
+      <Iteration_query-ID>lcl|66_0</Iteration_query-ID>
+      <Iteration_query-def>65_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3963858</Hsp_hit-from>
+              <Hsp_hit-to>3963887</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TCTGGATCACGCGCAAACACTGGCTATCGT</Hsp_qseq>
+              <Hsp_hseq>TCTGGATCACGCGCAAACACTGGCTATCGT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>67</Iteration_iter-num>
+      <Iteration_query-ID>lcl|67_0</Iteration_query-ID>
+      <Iteration_query-def>66_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4142497</Hsp_hit-from>
+              <Hsp_hit-to>4142526</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TCTGCTTTAGCAAACAGAGTGTGGTCACGA</Hsp_qseq>
+              <Hsp_hseq>TCTGCTTTAGCAAACAGAGTGTGGTCACGA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>68</Iteration_iter-num>
+      <Iteration_query-ID>lcl|68_0</Iteration_query-ID>
+      <Iteration_query-def>67_0.400000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3532236</Hsp_hit-from>
+              <Hsp_hit-to>3532265</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GCCTTTTGTCTGATCATCCACAATAATGAC</Hsp_qseq>
+              <Hsp_hseq>GCCTTTTGTCTGATCATCCACAATAATGAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>69</Iteration_iter-num>
+      <Iteration_query-ID>lcl|69_0</Iteration_query-ID>
+      <Iteration_query-def>68_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>779994</Hsp_hit-from>
+              <Hsp_hit-to>780023</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GAAGGATAGTTGGTCAGCAACACCAGCGGC</Hsp_qseq>
+              <Hsp_hseq>GAAGGATAGTTGGTCAGCAACACCAGCGGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>71</Iteration_iter-num>
+      <Iteration_query-ID>lcl|71_0</Iteration_query-ID>
+      <Iteration_query-def>70_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1317239</Hsp_hit-from>
+              <Hsp_hit-to>1317268</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGGTGTTCAGCATCTCAACGGTAATTCGCT</Hsp_qseq>
+              <Hsp_hseq>TGGTGTTCAGCATCTCAACGGTAATTCGCT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>72</Iteration_iter-num>
+      <Iteration_query-ID>lcl|72_0</Iteration_query-ID>
+      <Iteration_query-def>71_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5080463</Hsp_hit-from>
+              <Hsp_hit-to>5080492</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CAGGATGCAAACTGCCGGGAGATCCAGTTA</Hsp_qseq>
+              <Hsp_hseq>CAGGATGCAAACTGCCGGGAGATCCAGTTA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>73</Iteration_iter-num>
+      <Iteration_query-ID>lcl|73_0</Iteration_query-ID>
+      <Iteration_query-def>72_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3605409</Hsp_hit-from>
+              <Hsp_hit-to>3605438</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AACTGGAAGGGCTTGGGATGACACAACAGC</Hsp_qseq>
+              <Hsp_hseq>AACTGGAAGGGCTTGGGATGACACAACAGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>74</Iteration_iter-num>
+      <Iteration_query-ID>lcl|74_0</Iteration_query-ID>
+      <Iteration_query-def>73_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3280815</Hsp_hit-from>
+              <Hsp_hit-to>3280844</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTTAAGCGCCAACCAGGCTTCTTTGGTTGC</Hsp_qseq>
+              <Hsp_hseq>TTTAAGCGCCAACCAGGCTTCTTTGGTTGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>76</Iteration_iter-num>
+      <Iteration_query-ID>lcl|76_0</Iteration_query-ID>
+      <Iteration_query-def>75_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2230266</Hsp_hit-from>
+              <Hsp_hit-to>2230295</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATAACCCTCTGCAACCGCCGCTTCAGCAAA</Hsp_qseq>
+              <Hsp_hseq>ATAACCCTCTGCAACCGCCGCTTCAGCAAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>77</Iteration_iter-num>
+      <Iteration_query-ID>lcl|77_0</Iteration_query-ID>
+      <Iteration_query-def>76_0.600000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>462631</Hsp_hit-from>
+              <Hsp_hit-to>462660</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGAAGCCGTACAACGGGCGCTGGAATTCGC</Hsp_qseq>
+              <Hsp_hseq>TGAAGCCGTACAACGGGCGCTGGAATTCGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>78</Iteration_iter-num>
+      <Iteration_query-ID>lcl|78_0</Iteration_query-ID>
+      <Iteration_query-def>77_0.700000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>2939076</Hsp_hit-from>
+              <Hsp_hit-to>2939105</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GAGCTGCAACGCGGTCAGCCAGCTGGCGGT</Hsp_qseq>
+              <Hsp_hseq>GAGCTGCAACGCGGTCAGCCAGCTGGCGGT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>79</Iteration_iter-num>
+      <Iteration_query-ID>lcl|79_0</Iteration_query-ID>
+      <Iteration_query-def>78_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5422053</Hsp_hit-from>
+              <Hsp_hit-to>5422082</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGGAGTATCCGTTCCCCAACGACAAGCATC</Hsp_qseq>
+              <Hsp_hseq>CGGAGTATCCGTTCCCCAACGACAAGCATC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>80</Iteration_iter-num>
+      <Iteration_query-ID>lcl|80_0</Iteration_query-ID>
+      <Iteration_query-def>79_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4387420</Hsp_hit-from>
+              <Hsp_hit-to>4387449</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AATACCGGGAAGAGACAACGGGGTCTCTTT</Hsp_qseq>
+              <Hsp_hseq>AATACCGGGAAGAGACAACGGGGTCTCTTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>82</Iteration_iter-num>
+      <Iteration_query-ID>lcl|82_0</Iteration_query-ID>
+      <Iteration_query-def>81_0.433333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1037150</Hsp_hit-from>
+              <Hsp_hit-to>1037179</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATTAATGTTGCCGGCACAACATAATAGGGC</Hsp_qseq>
+              <Hsp_hseq>ATTAATGTTGCCGGCACAACATAATAGGGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>83</Iteration_iter-num>
+      <Iteration_query-ID>lcl|83_0</Iteration_query-ID>
+      <Iteration_query-def>82_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1182188</Hsp_hit-from>
+              <Hsp_hit-to>1182217</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ACTGGGTTGCTCTGAACAAGAAAGGCGCTA</Hsp_qseq>
+              <Hsp_hseq>ACTGGGTTGCTCTGAACAAGAAAGGCGCTA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>84</Iteration_iter-num>
+      <Iteration_query-ID>lcl|84_0</Iteration_query-ID>
+      <Iteration_query-def>83_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>5430210</Hsp_hit-from>
+              <Hsp_hit-to>5430239</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGCCAGGGACGTATCGCGTCGATATCTATT</Hsp_qseq>
+              <Hsp_hseq>CGCCAGGGACGTATCGCGTCGATATCTATT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>85</Iteration_iter-num>
+      <Iteration_query-ID>lcl|85_0</Iteration_query-ID>
+      <Iteration_query-def>84_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>310995</Hsp_hit-from>
+              <Hsp_hit-to>311024</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGCTCGTTCCCGTCGTGATGAAGCTCGAAA</Hsp_qseq>
+              <Hsp_hseq>TGCTCGTTCCCGTCGTGATGAAGCTCGAAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>86</Iteration_iter-num>
+      <Iteration_query-ID>lcl|86_0</Iteration_query-ID>
+      <Iteration_query-def>85_0.500000</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4174173</Hsp_hit-from>
+              <Hsp_hit-to>4174202</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AGGAAAGCAAACAACACGACCACCATCAGC</Hsp_qseq>
+              <Hsp_hseq>AGGAAAGCAAACAACACGACCACCATCAGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>87</Iteration_iter-num>
+      <Iteration_query-ID>lcl|87_0</Iteration_query-ID>
+      <Iteration_query-def>86_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>163690</Hsp_hit-from>
+              <Hsp_hit-to>163719</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>GGCAACGCAGGCGCATGATTCTGCTTGGAA</Hsp_qseq>
+              <Hsp_hseq>GGCAACGCAGGCGCATGATTCTGCTTGGAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>89</Iteration_iter-num>
+      <Iteration_query-ID>lcl|89_0</Iteration_query-ID>
+      <Iteration_query-def>88_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3071785</Hsp_hit-from>
+              <Hsp_hit-to>3071814</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CGTACCGGGCTGAAAGTAGAAGAGCGTTTC</Hsp_qseq>
+              <Hsp_hseq>CGTACCGGGCTGAAAGTAGAAGAGCGTTTC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>91</Iteration_iter-num>
+      <Iteration_query-ID>lcl|91_0</Iteration_query-ID>
+      <Iteration_query-def>90_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>692131</Hsp_hit-from>
+              <Hsp_hit-to>692160</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>ATCACCGTTTCGCTAACCGGTACGTTTAAC</Hsp_qseq>
+              <Hsp_hseq>ATCACCGTTTCGCTAACCGGTACGTTTAAC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>92</Iteration_iter-num>
+      <Iteration_query-ID>lcl|92_0</Iteration_query-ID>
+      <Iteration_query-def>91_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3307050</Hsp_hit-from>
+              <Hsp_hit-to>3307079</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTCGCCCGGCAAGCTTACCCAACGCTTATC</Hsp_qseq>
+              <Hsp_hseq>TTCGCCCGGCAAGCTTACCCAACGCTTATC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>95</Iteration_iter-num>
+      <Iteration_query-ID>lcl|95_0</Iteration_query-ID>
+      <Iteration_query-def>94_0.466667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>1813009</Hsp_hit-from>
+              <Hsp_hit-to>1813038</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>CCACGGTGATATCTGGTGCCATACTGATAA</Hsp_qseq>
+              <Hsp_hseq>CCACGGTGATATCTGGTGCCATACTGATAA</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>97</Iteration_iter-num>
+      <Iteration_query-ID>lcl|97_0</Iteration_query-ID>
+      <Iteration_query-def>96_0.533333</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>573782</Hsp_hit-from>
+              <Hsp_hit-to>573811</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TTGCCGGGAAGAGAGATATCAATGGCAGGC</Hsp_qseq>
+              <Hsp_hseq>TTGCCGGGAAGAGAGATATCAATGGCAGGC</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>98</Iteration_iter-num>
+      <Iteration_query-ID>lcl|98_0</Iteration_query-ID>
+      <Iteration_query-def>97_0.566667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>3570563</Hsp_hit-from>
+              <Hsp_hit-to>3570592</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>TGCGCCGCCGGATTGTTGCTCAACATGCTT</Hsp_qseq>
+              <Hsp_hseq>TGCGCCGCCGGATTGTTGCTCAACATGCTT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+    <Iteration>
+      <Iteration_iter-num>99</Iteration_iter-num>
+      <Iteration_query-ID>lcl|99_0</Iteration_query-ID>
+      <Iteration_query-def>98_0.366667</Iteration_query-def>
+      <Iteration_query-len>30</Iteration_query-len>
+      <Iteration_hits>
+        <Hit>
+          <Hit_num>1</Hit_num>
+          <Hit_id>gnl|BL_ORD_ID|0</Hit_id>
+          <Hit_def>/depot/data2/galaxy/microbes/eschColi_O157H7EDL933_1/seq/chr.nib:1-5528445</Hit_def>
+          <Hit_accession>0</Hit_accession>
+          <Hit_len>5528445</Hit_len>
+          <Hit_hsps>
+            <Hsp>
+              <Hsp_num>1</Hsp_num>
+              <Hsp_bit-score>59.96</Hsp_bit-score>
+              <Hsp_score>30</Hsp_score>
+              <Hsp_evalue>8.38112e-11</Hsp_evalue>
+              <Hsp_query-from>1</Hsp_query-from>
+              <Hsp_query-to>30</Hsp_query-to>
+              <Hsp_hit-from>4545136</Hsp_hit-from>
+              <Hsp_hit-to>4545165</Hsp_hit-to>
+              <Hsp_query-frame>1</Hsp_query-frame>
+              <Hsp_hit-frame>1</Hsp_hit-frame>
+              <Hsp_identity>30</Hsp_identity>
+              <Hsp_positive>30</Hsp_positive>
+              <Hsp_align-len>30</Hsp_align-len>
+              <Hsp_qseq>AACGCGCTAACCGCCAATAATAACAAAATT</Hsp_qseq>
+              <Hsp_hseq>AACGCGCTAACCGCCAATAATAACAAAATT</Hsp_hseq>
+              <Hsp_midline>||||||||||||||||||||||||||||||</Hsp_midline>
+            </Hsp>
+          </Hit_hsps>
+        </Hit>
+      </Iteration_hits>
+      <Iteration_stat>
+        <Statistics>
+          <Statistics_db-num>1</Statistics_db-num>
+          <Statistics_db-len>5528445</Statistics_db-len>
+          <Statistics_hsp-len>0</Statistics_hsp-len>
+          <Statistics_eff-space>0</Statistics_eff-space>
+          <Statistics_kappa>0.711</Statistics_kappa>
+          <Statistics_lambda>1.374</Statistics_lambda>
+          <Statistics_entropy>1.31</Statistics_entropy>
+        </Statistics>
+      </Iteration_stat>
+    </Iteration>
+  </BlastOutput_iterations>
+</BlastOutput>
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.axes b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.axes
new file mode 100644
index 0000000..1e544a3
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.axes
@@ -0,0 +1,42 @@
+group	axis1	axis2	axis3	axis4	axis5	axis6	axis7	axis8	axis9	axis10	axis11	axis12	axis13	axis14	axis15	axis16	axis17	axis18	axis19	axis20	axis21	axis22	axis23	axis24	axis25	axis26	axis27	axis28	axis29	axis30	axis31	axis32	axis33	axis34	axis35	axis36	axis37	axis38	axis39	axis40	axis41	axis42	axis43	axis44	axis45	axis46	axis47	axis48	axis49	axis50	axis51	axis52	axis53	axis54	axis55	axis56	axis57	axis58	axis59	axis60	axis61	axis62	axis63	axis64	axis65	axis66	axis67	axis68	axis69	axis70	axis7 [...]
+U68589	0.064970	-0.093573	0.049365	-0.002984	0.124961	-0.108993	-0.094822	-0.002607	-0.153501	0.040014	-0.094989	-0.022746	-0.078404	0.001551	0.049195	-0.081796	0.070761	-0.020429	-0.020736	-0.145469	0.015975	0.059624	-0.026394	0.062380	0.023158	-0.081711	0.088897	-0.056857	0.001939	-0.034163	-0.009858	0.073623	-0.021051	-0.105845	-0.010399	-0.046578	-0.054367	0.004614	0.039577	-0.015874	0.047089	0.005236	0.000891	-0.028628	0.012761	0.023969	-0.005549	0.032011	-0.001403	0.007897	0.046736 [...]
+U68590	-0.065054	0.009551	-0.023692	0.056710	0.019710	0.002158	0.047565	0.013482	-0.020494	0.097522	0.136242	0.128859	0.048848	-0.094141	0.060139	-0.083202	-0.005604	-0.059574	0.010508	-0.025733	0.064555	0.092162	-0.004115	-0.042551	-0.024393	0.057596	0.092057	-0.042052	0.039559	0.020819	0.021999	-0.020096	-0.032630	0.023086	-0.066602	-0.007755	0.003435	-0.033439	0.000451	0.085474	-0.011204	0.010680	-0.027816	0.016128	0.010000	-0.062829	0.015542	0.023166	-0.011568	0.003526	-0.015250	-0.0 [...]
+U68591	0.100568	-0.081005	0.022725	0.178999	-0.018758	-0.140047	-0.089799	0.037543	0.173810	-0.060580	0.000954	-0.020918	-0.129375	-0.013041	0.054722	-0.098256	0.068154	-0.058943	-0.055626	0.066430	-0.085158	0.044632	0.044187	-0.040477	0.024711	0.053367	-0.014788	-0.013883	-0.003015	0.059725	-0.005835	0.038787	-0.007487	0.036995	0.098994	-0.012158	-0.004844	0.036558	-0.050647	0.030083	-0.010531	0.011635	0.014683	0.008616	0.005577	-0.004963	-0.050535	0.059924	-0.026765	0.014505	0.012198	0 [...]
+U68592	-0.151995	0.057494	0.162283	0.004289	-0.023613	-0.024830	-0.017908	-0.079047	0.082954	-0.019995	-0.006029	-0.068517	0.035500	-0.089014	-0.075431	-0.007438	-0.020781	0.052088	0.059126	0.021933	0.062302	-0.089185	-0.019640	0.042963	-0.022708	0.058816	0.069397	-0.008808	0.018097	0.046916	-0.011755	-0.010756	0.027169	0.043565	0.065153	-0.011666	0.071674	0.091492	0.021316	0.005553	-0.031093	0.026076	0.037299	-0.061657	0.011902	0.020311	-0.030727	0.026787	0.018004	-0.003713	-0.019660	-0 [...]
+U68593	-0.039480	-0.020067	-0.022333	0.105234	0.027505	-0.002196	0.004058	0.074365	-0.014630	0.116512	-0.011673	0.033273	0.049758	-0.038546	0.016626	-0.020054	-0.010343	0.048617	0.025593	-0.004945	0.027862	-0.040295	-0.058529	0.030829	0.015358	-0.040125	0.031521	-0.004194	-0.145991	0.027678	0.045035	-0.008249	-0.012399	0.020338	0.041654	-0.039516	-0.023230	0.100278	-0.027241	0.001047	-0.070158	0.025234	-0.067700	-0.092084	-0.002777	-0.068915	0.046873	-0.031381	-0.039335	0.004839	-0.01908 [...]
+U68594	0.044021	-0.055638	-0.011619	0.059544	0.043250	-0.010999	0.002471	-0.109754	-0.053780	-0.109806	-0.026834	0.033594	0.001853	-0.009407	-0.096803	0.115331	0.084247	0.016997	0.059545	0.019521	0.015426	-0.085112	0.021698	-0.065795	-0.013511	-0.132903	0.053275	0.073810	-0.060124	-0.027112	0.040044	0.061265	-0.000902	-0.055064	0.011682	0.034925	0.010117	-0.009220	-0.038044	0.067621	0.015119	0.086808	-0.039446	0.077465	0.050161	-0.008221	0.044043	0.032322	0.026014	0.013765	0.018811	-0.04 [...]
+U68595	-0.149434	0.038009	0.189550	-0.004502	-0.017025	0.064885	0.043002	0.042743	-0.004355	0.064262	-0.030819	0.029347	-0.080899	0.006686	0.039654	-0.016045	0.002212	-0.050907	0.031126	0.035999	-0.009112	-0.025360	-0.007245	-0.075190	-0.033080	-0.032696	-0.044220	0.026717	-0.005847	0.083093	0.048470	0.021545	-0.080002	0.058588	-0.025770	0.053055	-0.046365	-0.054288	0.042294	-0.030211	0.070826	0.037796	-0.025523	-0.049328	-0.004900	-0.081032	-0.037108	0.018754	-0.020466	-0.051339	0.02501 [...]
+U68596	0.133793	-0.021319	0.020175	-0.063384	-0.098256	-0.019684	-0.082036	0.042110	0.025666	-0.016072	0.012990	-0.002327	0.046691	0.003207	0.033494	0.035535	-0.000967	-0.027681	-0.044321	-0.003185	0.053516	0.004598	-0.063730	-0.053077	-0.053579	-0.004819	-0.028633	0.026852	0.027572	0.008949	0.034315	0.117495	-0.054493	-0.039230	0.024887	0.043023	-0.027191	0.039477	0.030567	0.035125	-0.055176	0.006389	0.043828	0.034964	0.047288	-0.046511	0.007923	0.032125	-0.036025	0.014156	0.028791	-0.0 [...]
+U68597	-0.014667	-0.029661	-0.033680	0.051144	0.005386	0.044414	0.042409	0.125591	-0.043771	-0.138852	-0.022479	0.009528	-0.024230	-0.029027	0.044269	0.040019	-0.006986	0.072400	0.068085	-0.067073	0.013327	-0.004733	-0.003824	0.017236	-0.026588	0.029154	0.055446	-0.008375	0.043667	0.010453	-0.112300	-0.027393	-0.025036	0.017038	-0.012668	0.006933	-0.025935	0.078785	0.010144	-0.079521	-0.024905	0.024615	0.032711	0.044572	-0.021944	-0.083347	0.050281	-0.012411	-0.073945	0.028697	-0.044205	 [...]
+U68598	-0.094469	0.002099	-0.088366	-0.009354	-0.013578	-0.014047	-0.032695	-0.014510	0.045069	-0.003314	-0.004789	0.030347	0.055915	-0.016497	0.035463	-0.006432	0.011798	-0.024252	0.015406	-0.001775	0.001608	-0.018669	0.012427	0.008361	-0.048570	-0.021833	-0.063773	-0.007716	0.053302	0.056045	-0.030314	-0.021273	0.071617	-0.010293	0.044386	0.000052	-0.041162	0.005751	-0.045227	-0.015133	0.068189	0.054064	-0.025536	-0.024755	0.024362	0.010679	0.012958	-0.061628	-0.040968	0.013426	0.03291 [...]
+U68599	0.090684	-0.033310	0.032898	-0.005209	0.042399	-0.063719	-0.053055	-0.021988	0.000910	0.071725	-0.105574	-0.017161	0.002157	-0.176170	-0.131226	-0.056418	-0.014522	0.002644	-0.005757	-0.220325	0.058290	-0.027293	0.092474	-0.037131	-0.054004	-0.012472	-0.093560	0.017697	0.052528	-0.012313	0.040171	-0.020857	0.048154	0.013215	0.015512	0.035525	0.028495	0.029550	-0.066418	-0.024968	0.026617	-0.014755	-0.018808	-0.006770	0.009482	-0.024425	0.006502	-0.022507	-0.035016	-0.030057	-0.026 [...]
+U68600	0.084271	-0.063564	0.059200	0.161046	0.021519	-0.122676	-0.077872	0.040280	0.163031	-0.044954	-0.004443	0.035418	-0.141332	-0.063820	0.040021	-0.091338	0.006898	0.009917	0.043013	0.087335	-0.044287	-0.045268	0.019336	0.037343	-0.016603	0.010605	0.071104	0.049708	-0.006785	-0.071378	0.032464	-0.048668	0.015178	-0.016127	-0.051606	0.011513	0.006575	-0.016728	0.050245	0.009067	0.012267	-0.032475	-0.045725	0.002669	-0.010948	0.023236	0.043500	-0.073121	0.017380	-0.034790	-0.019074	-0. [...]
+U68601	0.088357	-0.032853	0.025467	-0.116589	-0.082486	0.031488	-0.122925	0.034286	0.002519	-0.000401	-0.011656	0.038536	0.062432	-0.013998	-0.007548	0.007959	0.004253	0.017832	-0.013880	-0.042399	-0.061440	-0.056310	0.044259	0.045265	0.000718	-0.010832	-0.013965	-0.062846	-0.057648	-0.030878	0.009841	0.005448	-0.065887	0.012508	-0.020501	0.025877	-0.017055	0.001532	0.070578	0.042621	-0.011989	-0.113754	-0.025115	0.093737	-0.010474	0.033993	0.012936	-0.057426	0.003296	0.004368	0.048463	- [...]
+U68602	-0.121833	0.010178	-0.073840	-0.007963	-0.006724	-0.038973	0.007308	-0.007468	-0.058869	-0.028852	0.018430	-0.044500	-0.011728	-0.003663	-0.007674	-0.013635	0.044902	0.034594	-0.017668	0.010621	-0.011927	0.008640	0.066028	-0.035323	0.006214	-0.025137	-0.010650	-0.032673	0.051960	0.003700	0.029930	0.054257	0.045297	0.012684	0.007517	0.052262	0.024481	-0.028497	0.117176	-0.034262	0.007640	-0.034828	-0.021567	0.027161	-0.017271	-0.071373	-0.030565	-0.032794	0.003617	0.009545	0.020590 [...]
+U68603	0.156231	0.250040	-0.007400	-0.016844	0.068181	0.006943	0.028472	-0.078071	0.089962	-0.055052	0.004600	0.128011	0.121627	-0.097199	0.036637	0.025882	-0.109000	-0.112810	0.090471	-0.017350	-0.084566	-0.010967	0.019269	0.085442	0.007347	-0.038175	0.015240	-0.087746	-0.064709	0.024818	0.018397	0.046959	-0.007234	-0.004209	0.044182	0.034208	-0.013479	0.006439	0.002102	-0.002752	-0.004753	-0.001510	0.007950	-0.010629	-0.016728	0.007109	0.009919	-0.003392	-0.017137	0.002794	0.002988	0.0 [...]
+U68605	-0.025199	-0.018271	-0.032666	0.021661	-0.033663	0.050709	0.035510	0.099200	-0.037063	-0.196913	-0.010064	0.029903	0.058652	-0.001892	0.052639	0.018170	0.015489	0.118407	0.035492	-0.047751	-0.013724	-0.002350	0.060892	-0.040617	-0.042420	-0.019624	0.068788	0.045657	0.052106	0.076229	0.030500	-0.029382	-0.030160	0.000172	0.027755	0.028287	-0.002552	-0.018599	-0.028724	0.031761	-0.036052	-0.060329	-0.035242	-0.016767	-0.003418	-0.003621	-0.069236	0.038009	0.004606	-0.016696	0.030499 [...]
+U68606	-0.017457	-0.028686	-0.019757	0.081216	0.039316	0.027620	0.035884	0.096778	0.054092	0.049974	0.014375	0.032307	0.045796	0.020885	-0.048663	0.003727	-0.010283	0.063261	-0.055030	0.034719	0.050160	0.022341	0.002746	0.063667	-0.003410	-0.034196	0.034482	-0.070031	0.011094	0.008087	-0.042988	0.053427	0.028581	0.068665	-0.005324	0.082577	-0.037621	-0.057627	-0.051485	-0.020788	0.047060	-0.024873	0.008458	-0.086461	0.054222	0.004221	0.074354	-0.041768	0.010974	0.042045	0.017605	0.008472 [...]
+U68607	0.146791	-0.004391	0.040986	-0.136124	-0.009734	0.019666	-0.151212	0.047028	0.047460	0.057465	0.047081	0.060574	0.087110	0.107354	0.092499	0.024175	0.078542	0.046907	0.075688	-0.005386	0.029826	0.028189	0.109285	-0.144283	0.013604	0.009821	0.073744	0.039343	-0.031822	0.002401	0.023136	0.008448	0.102315	0.045606	0.026413	-0.007851	0.021987	0.023503	0.028311	0.014187	-0.011945	-0.031662	0.035842	-0.027146	0.004100	0.020700	-0.016209	-0.007530	0.002772	-0.014843	-0.026208	0.016464	0. [...]
+U68608	0.112354	0.086834	-0.079314	0.185645	-0.536855	0.040719	0.124535	-0.026165	-0.072641	0.031008	-0.052713	0.006165	-0.048168	0.008841	-0.022707	-0.019617	0.025595	-0.038266	-0.014597	-0.014326	0.015694	0.018839	0.014456	0.013667	-0.022464	-0.026909	0.006890	0.001933	-0.014894	0.000607	-0.029642	-0.004163	0.037842	-0.000485	-0.008796	-0.019868	-0.015402	0.016976	0.004208	0.009560	0.003713	0.004359	-0.023917	-0.000432	0.000347	-0.006146	0.019614	0.005365	-0.004724	0.012667	-0.008615	- [...]
+U68609	0.096164	0.315155	-0.015540	-0.001409	0.087239	0.034549	0.018419	0.027787	-0.088554	-0.022593	0.065059	-0.088293	-0.047011	0.041707	-0.022942	-0.024596	0.029461	-0.027597	-0.026891	0.008727	0.065096	-0.021175	0.022439	-0.038714	-0.030786	0.033875	0.040007	-0.070887	-0.014491	-0.045465	0.087224	-0.026986	-0.041825	0.008970	0.068711	-0.033216	0.042860	-0.031638	0.032102	0.044966	0.016970	0.069020	0.000402	-0.015480	0.087026	0.015151	0.060053	0.049139	-0.054644	0.015928	0.012186	-0.0 [...]
+U68610	-0.160321	0.058672	0.179753	-0.028420	0.007998	0.025730	0.012335	0.037135	-0.026399	0.005087	-0.001969	0.010284	0.026053	0.029323	0.041254	0.084705	0.016614	0.044810	-0.025348	0.017024	-0.090525	0.091232	0.014244	0.039035	0.042657	0.004535	0.036126	0.021405	-0.002850	0.031319	-0.008031	-0.077329	0.046467	-0.044265	0.032871	-0.013143	-0.004242	0.015106	0.015004	-0.034116	0.025293	0.043006	-0.058515	0.035202	0.045732	0.037926	0.038483	-0.003272	-0.098979	-0.066580	0.010254	-0.023820 [...]
+U68611	0.037321	-0.088040	0.013187	0.042324	0.021669	0.126323	-0.028673	-0.151227	0.018850	0.003697	0.030834	-0.038729	0.052185	-0.062302	0.022311	-0.111789	-0.049516	0.060957	-0.064410	0.044570	0.008391	-0.089124	0.066713	-0.005180	0.047851	-0.039375	0.043435	0.084098	-0.111401	0.027833	-0.060672	0.076114	-0.004040	0.022943	-0.040184	-0.003475	0.056571	-0.069870	-0.006205	-0.029914	-0.013471	0.019538	0.028446	-0.015656	-0.017093	-0.031905	0.011093	0.044582	-0.026910	0.026878	-0.008564	0 [...]
+U68612	-0.086230	-0.003739	-0.121039	-0.015558	0.061506	-0.002465	-0.029570	0.074298	-0.026317	-0.084612	-0.062185	-0.043098	-0.031851	-0.078011	-0.007469	0.023091	-0.160211	-0.081183	-0.168208	0.057424	0.064178	0.002929	-0.032836	-0.064577	0.066040	0.034300	0.000495	0.030703	-0.003336	-0.052393	0.031995	0.021714	0.048166	0.023385	-0.050434	0.073953	0.006509	0.033285	0.010798	-0.035703	-0.035504	0.007422	-0.077730	0.027795	0.006426	-0.023031	0.000679	-0.003879	-0.001704	0.002096	-0.05473 [...]
+U68613	-0.105698	0.024350	-0.074058	-0.018583	0.017530	-0.006791	0.020453	-0.027745	0.026017	-0.000924	0.013862	-0.003075	-0.012127	-0.030071	0.013821	-0.002496	0.040424	-0.030567	0.040834	0.026540	0.008869	-0.016894	0.065245	0.051621	-0.021647	-0.043219	-0.022881	0.008650	0.026937	0.011475	0.034102	0.033502	0.054159	-0.002134	0.024365	-0.095357	-0.002283	0.005285	-0.034044	-0.011668	0.008753	-0.092534	-0.045898	0.097670	0.037469	-0.046371	-0.075038	-0.035445	-0.045892	0.096439	-0.036881 [...]
+U68614	0.129422	-0.016073	0.034453	-0.047987	-0.080271	-0.000307	-0.081266	0.060052	0.024043	-0.014175	0.032171	-0.025569	0.026725	-0.017302	0.049778	-0.002530	-0.004140	-0.005762	-0.055863	-0.003381	0.073292	-0.066963	-0.035887	0.030594	0.005263	-0.017639	-0.002437	-0.002142	0.058207	0.013631	0.058106	0.046747	-0.007491	-0.033908	-0.005443	-0.016024	-0.035951	0.065746	0.016657	0.031982	-0.007713	-0.010967	0.066373	-0.005018	0.024991	0.065661	0.010433	-0.011960	0.082762	-0.040128	-0.0478 [...]
+U68615	0.070589	-0.028816	0.016548	0.127578	0.048380	0.134510	-0.027662	-0.138961	0.007551	0.030183	-0.023594	-0.076208	0.001425	-0.041888	0.089715	0.035345	0.014021	-0.000086	0.003186	-0.023818	-0.034269	0.029684	0.024115	-0.001747	0.008285	0.010907	-0.010159	-0.025993	0.067679	0.001196	-0.068694	0.008905	-0.023387	0.011274	-0.008398	-0.023790	-0.029762	0.024272	-0.025535	-0.008106	0.007204	0.031313	-0.018358	-0.006410	0.043699	-0.052227	-0.022254	-0.013838	0.076612	-0.015020	0.023557	0 [...]
+U68616	0.115776	-0.002758	0.010018	-0.037530	-0.016743	-0.009778	-0.008956	-0.051361	0.005757	0.002456	-0.002591	-0.032495	-0.007971	-0.089760	0.000719	0.101186	0.008904	-0.012993	-0.008816	-0.002681	0.041069	0.055772	-0.089113	0.117398	0.083304	0.052733	0.034466	0.152310	-0.039291	-0.050609	0.057816	-0.108895	-0.070982	0.016567	0.022575	-0.022472	-0.033072	-0.039149	-0.031104	0.062928	0.025124	-0.070254	0.017701	-0.063440	0.029679	-0.034573	-0.088782	-0.013957	-0.054599	0.032478	0.03966 [...]
+U68617	0.125332	-0.024393	0.034115	-0.069594	-0.097274	0.009605	-0.090815	0.037717	-0.000034	-0.029651	0.045416	-0.018599	0.030157	0.000811	0.021635	0.037072	0.001764	0.012942	-0.052752	0.005362	0.009478	0.000850	-0.001820	0.013404	-0.013252	0.007827	-0.059114	0.009957	0.040027	-0.038080	0.003440	-0.051875	0.030897	0.004180	-0.003922	-0.052789	0.022888	-0.045352	0.010635	0.009378	-0.016670	0.045644	-0.004440	-0.089005	0.005325	0.039036	0.000501	-0.025200	-0.035322	-0.031175	0.061826	0.06 [...]
+U68618	-0.108612	0.009804	-0.099551	-0.038505	-0.002441	-0.040642	0.001599	-0.006707	0.015516	0.006772	0.022861	-0.017309	0.024943	0.026384	0.021645	0.016155	0.035500	-0.033161	-0.013761	-0.030384	-0.002066	-0.032202	0.020412	0.052309	0.024483	-0.016896	0.010683	0.049206	0.013408	0.002589	-0.032616	-0.017969	0.011505	0.009348	0.007244	0.001047	-0.037605	0.002948	0.070500	0.021823	-0.002428	0.008529	-0.005959	-0.025654	0.060001	-0.046527	0.047621	-0.021696	0.053065	-0.008357	-0.056816	0.0 [...]
+U68619	-0.123183	0.013451	-0.127449	-0.059878	-0.019517	0.013730	-0.040356	-0.044922	0.023174	0.038621	0.008818	0.035591	-0.039590	0.011511	0.006618	0.000672	-0.019598	0.020478	0.032204	0.036197	-0.010994	0.055655	-0.021033	0.007466	-0.022393	0.065781	-0.016729	-0.005808	-0.025132	-0.058116	-0.025418	0.078385	0.039663	-0.017995	-0.007568	-0.034432	-0.028419	-0.021106	-0.055142	0.050785	0.015176	0.030048	-0.039557	0.027951	0.062340	-0.012420	0.048739	0.021208	0.018448	-0.142370	0.020545	0 [...]
+U68620	-0.108612	0.009804	-0.099551	-0.038505	-0.002441	-0.040642	0.001599	-0.006707	0.015516	0.006772	0.022861	-0.017309	0.024943	0.026384	0.021645	0.016155	0.035500	-0.033161	-0.013761	-0.030384	-0.002066	-0.032202	0.020412	0.052309	0.024483	-0.016896	0.010683	0.049206	0.013408	0.002589	-0.032616	-0.017969	0.011505	0.009348	0.007244	0.001047	-0.037605	0.002948	0.070500	0.021823	-0.002428	0.008529	-0.005959	-0.025654	0.060001	-0.046527	0.047621	-0.021696	0.053065	-0.008357	-0.056816	0.0 [...]
+U68621	0.043815	-0.112061	0.002561	0.093140	0.030676	0.061977	-0.032089	0.006613	-0.065843	-0.004510	0.051898	-0.007753	-0.052932	0.122112	-0.054100	-0.009583	-0.034554	-0.078477	0.001193	-0.076024	-0.028543	-0.007976	0.016389	0.059192	0.022288	-0.020269	0.001886	0.007461	-0.035383	-0.026238	-0.027254	-0.015734	-0.006607	0.029660	0.042477	0.045747	0.076375	0.059446	0.027741	0.019485	-0.037821	-0.010283	-0.001230	0.020442	-0.038220	-0.020230	0.031766	-0.057554	-0.004869	-0.026691	0.103451 [...]
+U68622	-0.038882	0.029286	0.071132	0.051012	0.058123	-0.021484	0.117266	0.048149	0.083239	-0.083154	-0.172070	-0.105941	0.231840	0.152150	0.049375	-0.150144	0.101237	-0.134083	0.004370	0.030778	0.020637	0.006374	-0.039383	0.041938	0.000437	-0.035401	-0.031666	0.030095	0.042668	0.001107	0.027386	-0.013303	-0.031114	0.002365	-0.030812	-0.010625	0.020752	0.005416	-0.005063	-0.005245	0.001378	-0.015059	-0.027382	0.019529	-0.020120	-0.021664	0.033400	-0.001243	-0.014810	-0.012208	0.005091	-0. [...]
+U68623	0.129811	-0.086213	0.022286	-0.020904	0.047658	-0.010613	0.034275	0.027031	0.019862	0.068001	0.033094	-0.025630	-0.060959	-0.046998	0.014648	0.083343	0.039516	-0.035982	0.043426	0.054113	0.028657	0.058827	-0.002434	0.045068	-0.077997	-0.035120	-0.016247	-0.098311	-0.015271	0.013258	0.040607	-0.012855	0.027352	0.057539	0.037314	-0.070523	0.058546	-0.000868	0.081463	-0.115483	-0.044110	-0.042337	0.013931	0.013808	-0.006978	-0.029113	0.018158	0.029265	0.018917	0.010131	0.007191	-0.01 [...]
+U68624	-0.099708	0.002479	-0.113259	-0.027141	-0.037379	0.025899	0.012132	-0.003616	-0.005610	-0.001627	-0.029201	-0.022736	-0.037094	-0.027005	-0.013651	0.003495	0.025429	0.033326	0.028161	0.049326	-0.015638	0.093248	0.066278	0.031333	-0.002219	-0.007464	0.074315	0.006848	0.022178	-0.007171	0.113442	-0.044245	0.010416	-0.039196	0.023696	0.141052	-0.033936	0.086282	-0.043985	-0.089661	-0.011712	0.015456	0.068426	-0.026944	0.003925	0.029246	0.050862	0.054140	0.037889	0.075904	0.067527	0.0 [...]
+U68625	0.020410	0.011453	-0.054840	0.109975	0.073947	0.115652	-0.004104	0.009641	-0.006853	0.010319	-0.098353	-0.058581	-0.019163	0.039491	-0.031029	0.080174	0.062295	0.013214	0.023404	-0.017629	-0.096413	-0.079580	-0.052652	-0.066712	0.003174	0.086810	-0.055245	-0.046236	-0.055197	-0.057674	0.058453	-0.036754	0.096558	0.036446	-0.014009	-0.006083	-0.090875	-0.018256	0.000741	0.008770	0.049401	-0.069417	-0.022950	0.003432	-0.025951	-0.019703	0.000367	0.041949	0.013077	0.060865	0.030399	- [...]
+U68626	0.122907	-0.126764	0.032638	-0.031233	0.026553	0.030705	0.041855	0.038708	0.002230	0.046063	-0.019051	-0.034513	-0.051374	-0.023012	0.017480	0.083711	0.035312	0.011776	0.029316	0.107703	0.064034	0.036308	-0.022659	-0.000425	-0.051106	-0.076328	-0.061333	0.054626	0.010341	-0.018666	-0.016433	0.065649	-0.037712	0.058568	0.067242	0.013485	-0.033485	0.039155	0.023429	0.008556	0.049206	-0.005913	-0.088173	-0.035388	-0.022661	0.034751	-0.020832	-0.070539	-0.020126	-0.038410	0.008166	0.0 [...]
+U68627	0.105428	0.314694	-0.000887	-0.005802	0.057217	0.055597	0.028488	0.015366	-0.085505	-0.040316	0.128850	-0.041973	-0.006647	0.021519	0.002736	0.009446	0.002842	-0.035976	-0.022059	0.012703	0.023960	0.005294	0.039874	0.033871	-0.040804	-0.027704	-0.016088	-0.015579	0.029351	0.050636	-0.004024	-0.019341	0.030804	0.005428	-0.054821	-0.064295	-0.026604	0.048073	-0.071519	0.034411	0.027879	-0.027847	0.014731	-0.009104	-0.080324	-0.017367	-0.032395	-0.012059	0.093471	-0.063272	-0.045029	 [...]
+U68628	0.111094	-0.015407	0.024504	-0.108456	-0.080278	0.004493	-0.119326	0.026466	0.005042	0.015302	-0.018534	0.018288	0.011267	0.006970	0.013827	0.008406	-0.046925	-0.021049	-0.031902	-0.035230	-0.032978	-0.009587	0.029399	-0.015869	0.002109	0.010896	0.025309	-0.006754	-0.024684	0.014150	0.000749	-0.042637	-0.063301	0.004474	-0.064904	0.003215	-0.036749	0.014985	-0.089524	-0.026085	0.112443	-0.004095	0.061958	-0.019851	-0.079466	-0.083269	0.049749	-0.039195	-0.014161	0.028432	-0.012492 [...]
+U68629	0.096197	-0.015591	0.005578	-0.092230	-0.061588	-0.011562	-0.123309	0.043896	0.044435	0.006870	0.047013	0.024600	0.038039	-0.000817	-0.001062	-0.010874	-0.001226	0.000792	0.004272	0.018764	0.016701	0.012646	-0.081686	-0.016043	0.007579	-0.008498	-0.002181	0.001276	0.034725	0.027362	-0.033332	0.037128	-0.066620	-0.039949	-0.021547	-0.009776	0.064601	-0.042286	-0.085192	-0.089680	-0.007969	-0.031930	-0.155339	0.027269	0.034142	-0.005442	0.025175	0.030108	0.031865	0.058027	0.026338	0 [...]
+U68630	0.073642	-0.098576	0.047330	-0.022768	-0.007081	0.016911	-0.063966	0.066890	-0.009314	0.086265	-0.038857	-0.053036	0.038673	-0.027221	-0.006788	0.083308	0.060352	-0.093902	0.029194	0.088975	-0.054462	-0.023425	0.000503	0.048931	-0.039686	0.051511	0.012362	-0.025626	-0.008318	0.037745	-0.046504	-0.024447	0.054669	-0.081686	-0.126801	0.076751	0.153953	0.028485	-0.006766	0.004923	0.038345	0.082087	0.028658	0.007795	0.011145	-0.007684	-0.015477	-0.000930	-0.010096	-0.002539	0.000945	0 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.filter b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.filter
new file mode 100644
index 0000000..33e536e
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.filter
@@ -0,0 +1 @@
+000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.freq b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.freq
new file mode 100644
index 0000000..8b0ab3b
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.freq
@@ -0,0 +1,42 @@
+#1.36.1
+0	0.0
+1	0.0
+2	0.0
+3	0.0
+4	0.0
+5	0.0
+6	0.0
+7	0.0
+8	0.0
+9	0.0
+10	0.0
+11	0.0
+12	0.0
+13	0.0
+14	0.0
+15	0.0
+16	tardis
+17	0.0
+18	0.0
+19	0.0
+20	0.0
+21	0.0
+22	0.0
+23	0.0
+24	0.0
+25	0.0
+26	0.0
+27	0.0
+28	0.0
+29	0.0
+30	0.0
+31	0.0
+32	0.0
+33	0.0
+34	0.0
+35	0.0
+36	0.0
+37	0.0
+38	0.0
+39	0.0
+40	0.0
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.lower.dist b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.lower.dist
new file mode 100644
index 0000000..3508e1a
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.lower.dist
@@ -0,0 +1,84 @@
+    98
+U68589
+U68590	0.337144
+U68591	0.360977	0.378254
+U68592	0.415506	0.319757	0.414843
+U68593	0.287299	0.169021	0.336162	0.284235
+U68594	0.297057	0.329311	0.356376	0.332574	0.276866
+U68595	0.392240	0.273158	0.427517	0.229894	0.283055	0.364319
+U68596	0.309315	0.312653	0.322673	0.363330	0.291774	0.280537	0.360148
+U68597	0.320066	0.266838	0.352060	0.325227	0.217362	0.263379	0.317196	0.276011
+U68598	0.328638	0.206259	0.344952	0.265168	0.189372	0.251328	0.281804	0.259990	0.213189
+U68599	0.257245	0.328665	0.369658	0.338989	0.294160	0.303296	0.385823	0.287117	0.326099	0.292542
+U68600	0.337227	0.341084	0.122794	0.364319	0.302449	0.322132	0.388912	0.325688	0.324279	0.335138	0.343091
+U68601	0.301773	0.323764	0.372984	0.350376	0.284617	0.292099	0.345663	0.131424	0.280789	0.253181	0.272707	0.349854
+U68602	0.307105	0.220396	0.369623	0.266663	0.219618	0.249492	0.278042	0.301197	0.208454	0.090237	0.317244	0.356991	0.268659
+U68603	0.477681	0.362613	0.467881	0.418468	0.385130	0.386773	0.457303	0.359219	0.401870	0.351449	0.396829	0.427677	0.343707	0.426605
+U68605	0.364754	0.274376	0.377089	0.331206	0.257535	0.237829	0.333759	0.273363	0.052145	0.213304	0.353944	0.358357	0.269811	0.208459	0.402262
+U68606	0.320363	0.201198	0.324100	0.301152	0.122862	0.277808	0.289841	0.277082	0.205688	0.179339	0.306491	0.294295	0.276017	0.204576	0.386773	0.223376
+U68607	0.340712	0.317527	0.365825	0.384448	0.324013	0.334547	0.378719	0.125029	0.339908	0.275794	0.332816	0.355104	0.105021	0.321747	0.381782	0.282689	0.310100
+U68608	0.643389	0.530014	0.548815	0.587163	0.518760	0.543352	0.575126	0.478761	0.517938	0.515138	0.575865	0.567758	0.515216	0.522807	0.601148	0.510658	0.529218	0.598114
+U68609	0.433138	0.373379	0.490123	0.416497	0.391165	0.396320	0.415251	0.382431	0.389787	0.392240	0.426079	0.478155	0.408959	0.358242	0.280287	0.398293	0.383464	0.376207	0.581450
+U68610	0.376207	0.294704	0.450700	0.215923	0.284617	0.357389	0.158838	0.364319	0.286874	0.259924	0.406623	0.407315	0.334876	0.258188	0.439624	0.298684	0.287713	0.348574	0.603638	0.405148
+U68611	0.351515	0.286594	0.363708	0.319670	0.253894	0.244067	0.354610	0.296894	0.312058	0.268378	0.312736	0.335955	0.278924	0.285788	0.409367	0.308524	0.253689	0.308455	0.556018	0.444553	0.371261
+U68612	0.373270	0.293104	0.396633	0.359680	0.273158	0.327178	0.361109	0.315411	0.252570	0.211242	0.346776	0.377929	0.332523	0.190585	0.439617	0.275360	0.260336	0.427909	0.602788	0.393834	0.352605	0.323464
+U68613	0.325136	0.214397	0.363488	0.254598	0.213889	0.251136	0.272713	0.298772	0.227071	0.087582	0.307959	0.340460	0.280600	0.078406	0.353443	0.230342	0.209198	0.306422	0.529843	0.365423	0.253805	0.272211	0.215762
+U68614	0.289019	0.304876	0.311317	0.332803	0.267779	0.286583	0.350892	0.039097	0.260235	0.257723	0.268573	0.294108	0.113060	0.295495	0.354610	0.266324	0.246142	0.102569	0.479156	0.360638	0.348206	0.276644	0.299734	0.284460
+U68615	0.310340	0.294032	0.329411	0.338989	0.274252	0.260851	0.343931	0.297549	0.280899	0.268754	0.309717	0.324279	0.308667	0.300509	0.371261	0.304816	0.262712	0.338989	0.529751	0.392824	0.350892	0.163140	0.355207	0.278520	0.272001
+U68616	0.307018	0.296294	0.347363	0.326397	0.271113	0.261823	0.367380	0.198193	0.289951	0.268852	0.277586	0.317674	0.214579	0.303362	0.340156	0.292471	0.280237	0.293255	0.516505	0.361630	0.334008	0.274066	0.322132	0.266174	0.178312	0.255720
+U68617	0.308559	0.318935	0.328716	0.347490	0.291080	0.288339	0.353619	0.075812	0.269742	0.245013	0.275370	0.319670	0.096909	0.291180	0.365692	0.259158	0.262636	0.091577	0.475006	0.366399	0.328491	0.276436	0.313854	0.289008	0.063195	0.279695	0.170293
+U68618	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451
+U68619	0.368119	0.215592	0.397333	0.312183	0.240602	0.305548	0.320312	0.307011	0.274934	0.058566	0.357883	0.384102	0.282700	0.137887	0.412799	0.285073	0.237879	0.332574	0.543402	0.394174	0.302606	0.308422	0.229389	0.095671	0.308489	0.315059	0.303357	0.301197	0.095064
+U68620	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451	0.000000	0.095064
+U68621	0.259707	0.285269	0.315472	0.373185	0.223590	0.230560	0.342812	0.279315	0.229993	0.273230	0.306116	0.302011	0.258358	0.272545	0.438400	0.277730	0.211947	0.300490	0.526527	0.412038	0.344266	0.244404	0.306954	0.273482	0.265991	0.221866	0.272761	0.253341	0.265512	0.304839	0.265512
+U68622	0.409194	0.383119	0.415027	0.359680	0.344108	0.394362	0.356954	0.383444	0.360180	0.314261	0.427369	0.418941	0.402721	0.341198	0.438472	0.331907	0.323315	0.432427	0.612744	0.437145	0.352503	0.392797	0.401787	0.326769	0.368968	0.383300	0.407097	0.392254	0.325096	0.413282	0.325096	0.371589
+U68623	0.273253	0.287768	0.310736	0.365184	0.247344	0.270690	0.348044	0.218017	0.271878	0.282537	0.279200	0.289376	0.240354	0.285366	0.378448	0.317244	0.227993	0.246334	0.556206	0.389787	0.358283	0.296193	0.344582	0.265512	0.204392	0.251181	0.206167	0.199277	0.297172	0.318229	0.297172	0.230861	0.403435
+U68624	0.348793	0.248463	0.388493	0.301866	0.236421	0.279315	0.311178	0.307081	0.225078	0.133092	0.340130	0.369495	0.292729	0.113662	0.409908	0.219999	0.215106	0.329318	0.504303	0.385549	0.289841	0.310028	0.211884	0.124741	0.298898	0.304845	0.278042	0.300395	0.142765	0.140838	0.142765	0.287582	0.379793	0.291380
+U68625	0.338829	0.330389	0.359680	0.362488	0.222528	0.238840	0.330516	0.321375	0.237745	0.246349	0.316773	0.340106	0.304869	0.254692	0.389040	0.265083	0.224023	0.330983	0.535651	0.347530	0.332634	0.286043	0.289119	0.249678	0.309193	0.189645	0.279347	0.307806	0.270360	0.272302	0.270360	0.225242	0.348616	0.271468	0.261021
+U68626	0.289890	0.331258	0.334111	0.382122	0.254171	0.242336	0.332286	0.206134	0.269502	0.289370	0.299756	0.305343	0.247109	0.304099	0.433602	0.296132	0.234936	0.252567	0.553400	0.435804	0.362033	0.279880	0.334817	0.284857	0.203240	0.254818	0.200785	0.195565	0.304876	0.325861	0.304876	0.236151	0.395343	0.078467	0.307896	0.279664
+U68627	0.460196	0.359192	0.500022	0.420349	0.396586	0.409723	0.421731	0.374967	0.380020	0.372970	0.437168	0.484127	0.395778	0.365865	0.246241	0.383472	0.381089	0.375581	0.565457	0.054455	0.400684	0.437190	0.411243	0.344388	0.343707	0.376347	0.359680	0.354966	0.371771	0.402721	0.371771	0.411243	0.450700	0.390702	0.400079	0.367932	0.426079
+U68628	0.287313	0.314346	0.350766	0.356390	0.289841	0.300336	0.336571	0.122326	0.281484	0.249858	0.261354	0.331431	0.045250	0.292820	0.345511	0.283253	0.276688	0.093606	0.501886	0.388719	0.336626	0.278924	0.312724	0.294114	0.106184	0.298814	0.188051	0.083535	0.276933	0.289247	0.276933	0.265168	0.405119	0.237019	0.286594	0.305644	0.243717	0.363081
+U68629	0.304876	0.282537	0.333145	0.342170	0.278520	0.292820	0.346594	0.106410	0.281484	0.233746	0.282700	0.317305	0.113176	0.283918	0.352090	0.274864	0.252152	0.112197	0.519860	0.385214	0.337821	0.280287	0.306441	0.262900	0.092619	0.307154	0.206041	0.085569	0.266125	0.261648	0.266125	0.273730	0.386528	0.215762	0.285469	0.322097	0.222724	0.375192	0.090627
+U68630	0.293627	0.308195	0.333379	0.340183	0.259933	0.281697	0.335749	0.209430	0.284238	0.263548	0.291995	0.316017	0.202970	0.298814	0.423288	0.324259	0.246076	0.241563	0.550524	0.436903	0.319316	0.287713	0.331715	0.288107	0.203308	0.270331	0.243496	0.193667	0.288242	0.311791	0.288242	0.230514	0.366399	0.167154	0.289370	0.268455	0.179466	0.429242	0.188933	0.206713
+U68631	0.398745	0.273482	0.442063	0.179474	0.283223	0.340691	0.087023	0.349614	0.307129	0.256715	0.383119	0.395785	0.326769	0.266079	0.434792	0.312653	0.286311	0.389518	0.582089	0.415039	0.122032	0.325227	0.359680	0.239932	0.330765	0.342727	0.348416	0.339273	0.281484	0.304099	0.281484	0.332052	0.357883	0.352302	0.311667	0.354419	0.343048	0.414051	0.337227	0.335224	0.338085
+U68632	0.323458	0.265198	0.319388	0.326851	0.225527	0.269265	0.340156	0.271657	0.090568	0.180919	0.343991	0.297402	0.262255	0.181048	0.377719	0.106011	0.213692	0.343693	0.512306	0.369502	0.316394	0.288339	0.223342	0.200086	0.242629	0.309422	0.285214	0.235838	0.212177	0.229421	0.212177	0.251918	0.359425	0.291111	0.228248	0.240578	0.300831	0.365956	0.270360	0.255453	0.294523	0.317889
+U68633	0.343931	0.272626	0.395174	0.298352	0.244836	0.277267	0.311076	0.301609	0.248578	0.166671	0.347951	0.383867	0.290033	0.169421	0.366452	0.262468	0.220723	0.311620	0.511328	0.365873	0.285826	0.284447	0.057989	0.180522	0.301609	0.295811	0.292522	0.296685	0.172607	0.147700	0.172607	0.291619	0.335513	0.314987	0.165149	0.277547	0.314211	0.377522	0.271343	0.285986	0.294500	0.288465	0.236842
+U68634	0.270320	0.323924	0.355606	0.302463	0.268053	0.281613	0.296415	0.116332	0.260771	0.237960	0.255391	0.338041	0.060707	0.271468	0.335513	0.259629	0.264574	0.085083	0.495486	0.368906	0.292522	0.274589	0.297432	0.280872	0.091368	0.289504	0.194553	0.088740	0.250617	0.264574	0.250617	0.247272	0.374017	0.219740	0.291943	0.275547	0.232025	0.367395	0.055913	0.087321	0.183653	0.299159	0.245339	0.288185
+U68635	0.328638	0.216451	0.372774	0.260630	0.202789	0.265168	0.281804	0.268754	0.238261	0.091021	0.307991	0.352503	0.256082	0.088528	0.383472	0.250668	0.225386	0.297115	0.523554	0.374797	0.280214	0.277833	0.204444	0.083419	0.257723	0.307959	0.245275	0.259354	0.081334	0.103613	0.081334	0.265550	0.343141	0.262888	0.123718	0.252146	0.281640	0.356752	0.257989	0.233746	0.256715	0.262568	0.202000	0.160844	0.250617
+U68636	0.360667	0.281824	0.425988	0.164494	0.269811	0.315691	0.074568	0.323601	0.290224	0.264200	0.354464	0.379655	0.324605	0.246808	0.409908	0.298546	0.275336	0.360638	0.568020	0.419842	0.113176	0.318002	0.327350	0.250717	0.307959	0.326113	0.324779	0.313377	0.280600	0.281484	0.280600	0.302518	0.336679	0.321413	0.297172	0.329742	0.316704	0.402670	0.330389	0.329311	0.307367	0.006024	0.295536	0.299971	0.304099	0.268468
+U68637	0.317641	0.241593	0.367269	0.275794	0.214077	0.251548	0.282904	0.307129	0.227664	0.101521	0.328894	0.347509	0.277833	0.032570	0.394969	0.219740	0.209430	0.321127	0.527665	0.359680	0.245608	0.284078	0.192489	0.098361	0.292820	0.310948	0.289609	0.288419	0.108701	0.090573	0.108701	0.259636	0.359680	0.293373	0.114034	0.260975	0.304099	0.354897	0.285168	0.274214	0.296937	0.269035	0.182363	0.167496	0.279190	0.090237	0.268754
+U68638	0.307311	0.230389	0.363032	0.268092	0.196928	0.255839	0.290513	0.270360	0.220923	0.054228	0.312653	0.349713	0.261273	0.069818	0.369931	0.226896	0.186739	0.293285	0.539010	0.376207	0.256331	0.270628	0.186695	0.076175	0.253585	0.294934	0.244067	0.250108	0.020396	0.076014	0.020396	0.248799	0.313454	0.278803	0.130379	0.257350	0.288634	0.360652	0.263877	0.238840	0.265955	0.268468	0.188040	0.169421	0.240945	0.077156	0.270088	0.085319
+U68639	0.318242	0.228159	0.342135	0.355578	0.113423	0.303699	0.326581	0.296132	0.218861	0.217646	0.308495	0.309890	0.286874	0.236275	0.455351	0.256365	0.098208	0.317395	0.554398	0.396051	0.320988	0.279414	0.279143	0.229291	0.268659	0.273673	0.307339	0.277730	0.238146	0.263244	0.238146	0.220490	0.366934	0.250230	0.254536	0.223138	0.251398	0.407315	0.301081	0.279408	0.252463	0.316294	0.234687	0.258338	0.287208	0.229810	0.305656	0.232616	0.222623
+U68640	0.315163	0.255453	0.362984	0.303351	0.232067	0.267649	0.311238	0.267363	0.282700	0.160277	0.289841	0.369636	0.245013	0.150174	0.408422	0.281484	0.251357	0.292002	0.496751	0.406702	0.302570	0.295223	0.232061	0.156701	0.270891	0.303351	0.248300	0.261021	0.145189	0.166133	0.145189	0.280097	0.353207	0.274889	0.155691	0.268635	0.285168	0.418941	0.243355	0.247109	0.269931	0.302582	0.248141	0.177731	0.224344	0.128824	0.294114	0.147620	0.137655	0.266719
+U68641	0.334519	0.219740	0.392626	0.295152	0.230975	0.288543	0.311238	0.310893	0.235348	0.059437	0.345365	0.365777	0.288885	0.112339	0.415506	0.247859	0.233701	0.321801	0.547458	0.382770	0.304099	0.313081	0.235404	0.092724	0.302594	0.301848	0.292637	0.298123	0.069377	0.065526	0.069377	0.276248	0.358283	0.317196	0.151832	0.263548	0.328257	0.400684	0.293373	0.276933	0.309669	0.288293	0.210404	0.186103	0.265629	0.103537	0.275102	0.108022	0.072655	0.259776	0.188057
+U68642	0.259354	0.357218	0.425088	0.347147	0.324266	0.326397	0.357723	0.287842	0.315691	0.304895	0.305583	0.410836	0.277621	0.292002	0.443308	0.304882	0.330363	0.326215	0.584131	0.421971	0.335749	0.326397	0.314346	0.326489	0.298857	0.356390	0.301081	0.285616	0.333640	0.328691	0.333640	0.324434	0.359680	0.320851	0.320446	0.349713	0.295811	0.432050	0.270760	0.301031	0.304889	0.346062	0.301031	0.298197	0.255902	0.315353	0.337186	0.286305	0.306481	0.354538	0.314261	0.334262
+U68643	0.513252	0.507737	0.518779	0.556294	0.486147	0.491101	0.541469	0.434443	0.475314	0.467931	0.453716	0.489114	0.467569	0.506595	0.319627	0.501368	0.455046	0.428074	0.639768	0.316086	0.550657	0.549148	0.493996	0.460841	0.435707	0.501990	0.412859	0.455046	0.472888	0.489627	0.472888	0.531437	0.510347	0.434443	0.489951	0.440840	0.463200	0.340769	0.426473	0.443436	0.508683	0.544453	0.483786	0.422792	0.405491	0.478961	0.528684	0.493636	0.474782	0.481765	0.480584	0.474328	0.535895
+U68644	0.321245	0.178105	0.344833	0.353384	0.211278	0.250325	0.320049	0.324729	0.223132	0.220396	0.308514	0.325888	0.335821	0.233137	0.415147	0.257295	0.205330	0.356730	0.550818	0.435867	0.342412	0.298395	0.268659	0.237632	0.322103	0.259524	0.309399	0.318008	0.240927	0.287662	0.240927	0.203750	0.374212	0.275083	0.270760	0.221359	0.272013	0.433514	0.330363	0.321801	0.260273	0.301129	0.240330	0.233821	0.306565	0.234864	0.288035	0.243137	0.225386	0.214707	0.263548	0.273363	0.353619	0.539342
+U68645	0.382122	0.215419	0.433896	0.281513	0.250059	0.322242	0.329525	0.320324	0.276644	0.056869	0.365035	0.413440	0.278042	0.141007	0.422982	0.281020	0.255784	0.348163	0.578345	0.417506	0.294354	0.317795	0.258630	0.101083	0.330520	0.328142	0.323464	0.304827	0.095468	0.051539	0.095468	0.329324	0.401357	0.334564	0.128519	0.285366	0.336209	0.423717	0.286874	0.252759	0.337266	0.287010	0.252935	0.151116	0.251266	0.100294	0.270088	0.104797	0.078908	0.273253	0.141210	0.076777	0.312513	0.497091 [...]
+U68646	0.338989	0.133686	0.360705	0.354194	0.183046	0.287798	0.328058	0.321238	0.238173	0.237157	0.329081	0.335982	0.339280	0.268748	0.396313	0.276436	0.205189	0.363158	0.530906	0.405148	0.326397	0.317244	0.302881	0.255453	0.301220	0.291387	0.295560	0.320452	0.275567	0.301175	0.275567	0.245375	0.377369	0.286601	0.287010	0.267102	0.300887	0.390401	0.345711	0.311061	0.305337	0.315503	0.239488	0.304931	0.310707	0.260927	0.297115	0.277833	0.250906	0.234643	0.303345	0.293630	0.371187	0.527475 [...]
+U68647	0.311488	0.292498	0.369193	0.355266	0.292387	0.250443	0.338641	0.253537	0.260877	0.283792	0.285623	0.345905	0.235212	0.273411	0.395090	0.258952	0.253791	0.250934	0.493520	0.434995	0.337603	0.230661	0.335138	0.260533	0.247630	0.281947	0.224281	0.227476	0.277657	0.280315	0.277657	0.252842	0.350814	0.184931	0.265449	0.311416	0.182388	0.429138	0.240487	0.261676	0.238540	0.319620	0.275524	0.323319	0.233275	0.270445	0.325073	0.288965	0.280315	0.293359	0.245177	0.263548	0.295095	0.437808 [...]
+U68648	0.341095	0.280214	0.320658	0.345025	0.232616	0.263205	0.325930	0.277323	0.080515	0.204243	0.347449	0.290368	0.285879	0.193059	0.401639	0.109729	0.224997	0.350157	0.524934	0.369502	0.332904	0.315059	0.220441	0.216451	0.253894	0.300025	0.284361	0.251570	0.219871	0.245536	0.219871	0.231127	0.362749	0.289119	0.222303	0.240267	0.295955	0.372774	0.286594	0.262900	0.299504	0.319500	0.053835	0.236890	0.274079	0.210733	0.300178	0.189802	0.206483	0.244816	0.250947	0.224397	0.337227	0.499400 [...]
+U68649	0.306169	0.206522	0.325439	0.328101	0.112072	0.265408	0.294354	0.285509	0.216293	0.195545	0.304099	0.297118	0.281157	0.210058	0.427832	0.225931	0.088631	0.314150	0.511139	0.380020	0.302594	0.246272	0.264399	0.207005	0.246142	0.248746	0.280971	0.270886	0.213078	0.259907	0.213078	0.201445	0.337603	0.241674	0.207751	0.207324	0.235523	0.381089	0.287713	0.272211	0.237380	0.295152	0.228785	0.236229	0.258439	0.204047	0.278313	0.223345	0.197745	0.078467	0.229894	0.226125	0.336679	0.466873 [...]
+U68651	0.343707	0.225386	0.383119	0.297497	0.229389	0.285221	0.304876	0.275794	0.258283	0.122632	0.307081	0.379793	0.239284	0.128374	0.382784	0.255977	0.218866	0.291481	0.512865	0.394969	0.292910	0.277528	0.239217	0.128950	0.276453	0.310028	0.262279	0.263859	0.125200	0.139052	0.125200	0.298857	0.342960	0.292194	0.127424	0.260018	0.299594	0.402493	0.243831	0.243137	0.272114	0.287842	0.238592	0.167082	0.229658	0.106624	0.283591	0.115862	0.117632	0.235838	0.071973	0.161964	0.314094	0.477706 [...]
+U68652	0.348475	0.300901	0.379501	0.213459	0.303686	0.362481	0.162738	0.349097	0.299971	0.272013	0.338989	0.368187	0.307230	0.252951	0.440840	0.282868	0.293900	0.393620	0.599761	0.392436	0.034512	0.349752	0.316294	0.260877	0.321103	0.336162	0.341726	0.310123	0.267882	0.334118	0.267882	0.314077	0.394923	0.343393	0.314177	0.342546	0.359425	0.383472	0.309576	0.330389	0.326026	0.127003	0.308324	0.283918	0.284098	0.270341	0.116217	0.249419	0.259194	0.309890	0.304099	0.308781	0.328875	0.534844 [...]
+U68653	0.243919	0.269939	0.347418	0.326145	0.259968	0.236729	0.307089	0.210949	0.213433	0.254871	0.272729	0.332441	0.201004	0.230563	0.374621	0.224783	0.227154	0.221028	0.571605	0.393039	0.286094	0.214294	0.289992	0.244357	0.197150	0.245817	0.183668	0.192784	0.238132	0.244357	0.238132	0.250833	0.341167	0.177292	0.231928	0.288050	0.140909	0.370407	0.194447	0.204305	0.228277	0.289992	0.251591	0.311791	0.205153	0.220881	0.291919	0.235376	0.230563	0.254111	0.218291	0.245817	0.239642	0.411118 [...]
+U68654	0.301619	0.216102	0.316998	0.323619	0.078728	0.277338	0.299558	0.281859	0.230306	0.199143	0.285560	0.290607	0.266425	0.234185	0.406823	0.247805	0.092168	0.312533	0.518490	0.411424	0.315503	0.234185	0.286151	0.227071	0.253115	0.257692	0.289247	0.270002	0.230411	0.258374	0.230411	0.209637	0.336371	0.241979	0.246241	0.213363	0.250546	0.411956	0.275794	0.264188	0.248226	0.299594	0.223210	0.237459	0.248226	0.223691	0.288761	0.240927	0.212177	0.078467	0.244762	0.249161	0.319500	0.489951 [...]
+U68655	0.371532	0.274415	0.421263	0.311791	0.248463	0.312513	0.312190	0.330581	0.287448	0.141357	0.352605	0.414051	0.329950	0.134623	0.458622	0.296294	0.286026	0.354464	0.605463	0.399332	0.314261	0.348574	0.241782	0.132502	0.330581	0.342488	0.307991	0.325750	0.129518	0.145189	0.129518	0.320722	0.401167	0.315071	0.166727	0.297926	0.330389	0.411243	0.312653	0.291903	0.300873	0.314177	0.258995	0.197107	0.300817	0.095852	0.316599	0.136439	0.131251	0.266778	0.174732	0.155965	0.333893	0.489770 [...]
+U68656	0.377102	0.312052	0.378254	0.336626	0.312724	0.310272	0.329749	0.300274	0.294782	0.249419	0.388604	0.381726	0.298684	0.282712	0.451079	0.312120	0.240514	0.307204	0.588165	0.423177	0.339589	0.275336	0.304895	0.266838	0.285879	0.300980	0.332765	0.290448	0.276712	0.265821	0.276712	0.191624	0.411424	0.322248	0.294114	0.285579	0.308821	0.421971	0.284857	0.268468	0.335595	0.313533	0.285579	0.296553	0.275794	0.261230	0.311238	0.288035	0.251726	0.302544	0.286451	0.267506	0.400190	0.534446 [...]
+U68657	0.428155	0.365054	0.445836	0.424992	0.364976	0.394174	0.419034	0.337227	0.380688	0.364065	0.389650	0.427271	0.372774	0.353020	0.235348	0.381696	0.356836	0.344952	0.529255	0.120257	0.404023	0.413081	0.393030	0.349177	0.334876	0.383805	0.319187	0.337227	0.366399	0.382777	0.366399	0.399938	0.440840	0.382080	0.386925	0.349022	0.421021	0.103389	0.351040	0.336019	0.404951	0.421143	0.361630	0.372728	0.345990	0.361630	0.408763	0.349713	0.358713	0.382431	0.405582	0.388719	0.412218	0.304889 [...]
+U68658	0.343582	0.244067	0.413887	0.299483	0.227562	0.292637	0.319716	0.301031	0.251943	0.073124	0.338106	0.399584	0.271435	0.092524	0.395502	0.261230	0.227613	0.304876	0.565329	0.370938	0.290852	0.305631	0.209994	0.095435	0.289609	0.329950	0.282537	0.282183	0.022512	0.103613	0.022512	0.263548	0.351568	0.314346	0.147798	0.278803	0.328875	0.371951	0.279506	0.273049	0.303286	0.303320	0.228113	0.183254	0.250164	0.087181	0.304882	0.113176	0.027067	0.254205	0.159554	0.079587	0.337186	0.476013 [...]
+U68659	0.363540	0.239423	0.422482	0.277780	0.283550	0.315450	0.286735	0.327910	0.281721	0.097752	0.369650	0.395762	0.288293	0.082137	0.434828	0.230023	0.270276	0.457110	0.571133	0.393321	0.287582	0.296253	0.253430	0.085685	0.303385	0.357960	0.340079	0.308489	0.102724	0.146029	0.102724	0.309354	0.429834	0.338682	0.138222	0.292483	0.351054	0.395244	0.304863	0.295744	0.338675	0.283223	0.272179	0.165149	0.267350	0.112297	0.266174	0.107789	0.095229	0.290684	0.173334	0.131709	0.321127	0.505483 [...]
+U68660	0.320583	0.322718	0.380263	0.373994	0.314611	0.288375	0.353057	0.249858	0.279610	0.300928	0.311985	0.362657	0.256331	0.296162	0.380958	0.295955	0.279815	0.259562	0.545907	0.398293	0.365685	0.267206	0.338367	0.288375	0.241188	0.308090	0.225714	0.236040	0.282904	0.301734	0.282904	0.254734	0.379204	0.190289	0.288375	0.305724	0.177292	0.384564	0.250042	0.252986	0.260771	0.343582	0.300901	0.327417	0.227815	0.265230	0.343582	0.307284	0.281453	0.304099	0.285269	0.286601	0.313023	0.389572 [...]
+U68661	0.377095	0.331029	0.378280	0.399403	0.333983	0.251022	0.431301	0.361031	0.353384	0.312653	0.379238	0.378280	0.371426	0.315818	0.438429	0.353786	0.315818	0.400677	0.596479	0.464872	0.396313	0.386331	0.385769	0.330581	0.364192	0.368696	0.345563	0.355634	0.334367	0.376019	0.334367	0.349242	0.426935	0.353384	0.375507	0.357389	0.367885	0.460900	0.379082	0.367269	0.372400	0.404462	0.350631	0.349934	0.360697	0.356836	0.383805	0.330363	0.326769	0.367366	0.350892	0.347680	0.386528	0.534583 [...]
+U68662	0.334577	0.347334	0.369961	0.362410	0.288242	0.252140	0.383792	0.309193	0.273195	0.287313	0.319872	0.329411	0.308631	0.306261	0.421844	0.300536	0.268415	0.345273	0.537090	0.440840	0.397621	0.283511	0.346587	0.298772	0.293580	0.236410	0.289951	0.295495	0.330363	0.307748	0.330363	0.218788	0.409711	0.273546	0.300395	0.258630	0.266933	0.444553	0.304099	0.297339	0.278714	0.398920	0.289567	0.293414	0.285540	0.311855	0.374577	0.295811	0.301792	0.275557	0.300366	0.323126	0.339857	0.499746 [...]
+U68663	0.328665	0.307230	0.382431	0.369636	0.329950	0.280789	0.377762	0.225530	0.282712	0.301773	0.287713	0.355401	0.225459	0.300274	0.383119	0.287632	0.275567	0.247703	0.550346	0.432159	0.371771	0.279408	0.345663	0.304869	0.213078	0.311791	0.203381	0.201972	0.305631	0.333514	0.305631	0.259275	0.388292	0.152765	0.318002	0.348253	0.158482	0.425988	0.234281	0.230958	0.249599	0.351164	0.295607	0.329525	0.220294	0.277406	0.344044	0.326581	0.294114	0.310272	0.255839	0.322928	0.293373	0.462099 [...]
+U68664	0.438795	0.410098	0.394891	0.442027	0.382564	0.422422	0.498901	0.428059	0.448883	0.405367	0.447499	0.400562	0.487474	0.393527	0.510082	0.457170	0.400237	0.507162	0.583083	0.521929	0.467881	0.472722	0.474392	0.421263	0.432852	0.440840	0.434098	0.442013	0.400079	0.457461	0.400079	0.409401	0.484211	0.423111	0.454184	0.455820	0.450907	0.523433	0.455710	0.449496	0.460223	0.476546	0.424850	0.428856	0.444851	0.400684	0.447103	0.385171	0.389033	0.412830	0.439624	0.416934	0.468218	0.583528 [...]
+U68665	0.309493	0.300178	0.336453	0.380785	0.286976	0.250197	0.381754	0.211727	0.257111	0.277889	0.280537	0.314166	0.233168	0.300453	0.401870	0.283354	0.242283	0.273899	0.560288	0.429533	0.389953	0.268748	0.320656	0.287903	0.191338	0.281898	0.216929	0.185998	0.291803	0.340396	0.291803	0.244816	0.374436	0.138335	0.316294	0.277611	0.148618	0.423717	0.227664	0.219999	0.199775	0.371022	0.266842	0.324462	0.210323	0.265864	0.335404	0.306422	0.273482	0.262032	0.267724	0.327417	0.296546	0.476751 [...]
+U68666	0.394408	0.278594	0.450824	0.211525	0.295676	0.395265	0.046201	0.366734	0.331431	0.297115	0.387877	0.410471	0.336626	0.282358	0.450762	0.344388	0.304099	0.374967	0.581348	0.418534	0.146326	0.368536	0.373178	0.276712	0.347147	0.361088	0.379364	0.355952	0.301792	0.315503	0.301792	0.338989	0.363820	0.372000	0.315596	0.343141	0.372584	0.427271	0.339287	0.357324	0.343582	0.069962	0.346608	0.332052	0.309071	0.290961	0.069818	0.288885	0.294032	0.341037	0.306384	0.299483	0.359680	0.548910 [...]
+U68667	0.334519	0.219740	0.392626	0.295152	0.230975	0.288543	0.311238	0.310893	0.235348	0.059437	0.345365	0.365777	0.288885	0.112339	0.415506	0.247859	0.233701	0.321801	0.547458	0.382770	0.304099	0.313081	0.235404	0.092724	0.302594	0.301848	0.292637	0.298123	0.069377	0.065526	0.069377	0.276248	0.358283	0.317196	0.151832	0.263548	0.328257	0.400684	0.293373	0.276933	0.309669	0.288293	0.210404	0.186103	0.265629	0.103537	0.275102	0.108022	0.072655	0.259776	0.188057	0.000000	0.334262	0.474328 [...]
+U68668	0.338675	0.304895	0.320439	0.326489	0.275794	0.244797	0.330210	0.280732	0.301694	0.258909	0.316920	0.297661	0.276029	0.292349	0.354680	0.294295	0.243090	0.292250	0.549429	0.434402	0.305710	0.287632	0.333124	0.289247	0.289247	0.281992	0.288107	0.264212	0.283806	0.267206	0.283806	0.216474	0.364131	0.292250	0.288375	0.281069	0.313776	0.408920	0.271315	0.257233	0.297432	0.304895	0.288995	0.304931	0.267743	0.283636	0.313694	0.293104	0.273925	0.284371	0.283806	0.290628	0.324807	0.464442 [...]
+U68669	0.228291	0.274172	0.369357	0.370307	0.285761	0.271016	0.322928	0.271516	0.289364	0.260927	0.305537	0.313081	0.252555	0.269477	0.431301	0.309442	0.299700	0.258029	0.579391	0.404247	0.349472	0.261393	0.343048	0.261273	0.242726	0.313776	0.268841	0.225803	0.279210	0.298979	0.279210	0.240843	0.415147	0.219989	0.292910	0.343602	0.223615	0.396586	0.230433	0.247550	0.265476	0.312445	0.287145	0.305767	0.228495	0.269811	0.295607	0.266778	0.248019	0.298216	0.276470	0.288293	0.240200	0.502399 [...]
+U68670	0.376110	0.304876	0.371101	0.363330	0.284542	0.287662	0.376347	0.308489	0.307034	0.283756	0.362373	0.348877	0.326397	0.309890	0.383451	0.312378	0.286181	0.324259	0.521831	0.389343	0.363879	0.186858	0.342412	0.297284	0.288218	0.102059	0.269209	0.284692	0.319374	0.326138	0.319374	0.282868	0.375655	0.296070	0.313081	0.225600	0.309274	0.385499	0.320179	0.322618	0.335484	0.342727	0.316004	0.317492	0.298451	0.322772	0.319757	0.317089	0.310221	0.310681	0.336995	0.306348	0.381089	0.503515 [...]
+U68671	0.348863	0.304882	0.412218	0.176486	0.270212	0.305644	0.130465	0.326113	0.295465	0.290961	0.364001	0.362613	0.319889	0.258630	0.418254	0.300873	0.246349	0.323087	0.572514	0.425802	0.052616	0.351164	0.349965	0.269138	0.310323	0.336904	0.302518	0.303326	0.288634	0.297926	0.288634	0.331729	0.343375	0.336019	0.304099	0.338381	0.328012	0.401063	0.320023	0.315885	0.301630	0.110830	0.303306	0.325179	0.294295	0.289370	0.113423	0.266838	0.278102	0.323249	0.298639	0.304889	0.321861	0.501787 [...]
+U68672	0.282832	0.278803	0.322132	0.365609	0.240811	0.221866	0.332666	0.275578	0.248746	0.261273	0.315402	0.308671	0.268659	0.261430	0.444463	0.265057	0.220282	0.304099	0.539342	0.412570	0.339563	0.233137	0.322963	0.266778	0.265949	0.232616	0.264063	0.250230	0.267724	0.295428	0.267724	0.042634	0.365452	0.237874	0.277780	0.247505	0.239944	0.402262	0.263548	0.261314	0.246789	0.322318	0.265098	0.281978	0.246469	0.258995	0.294858	0.247703	0.246207	0.235229	0.281157	0.274683	0.311729	0.537303 [...]
+U68673	0.281554	0.190897	0.307547	0.237569	0.177895	0.224066	0.247630	0.251446	0.204695	0.055581	0.263187	0.290462	0.239481	0.084997	0.356467	0.202637	0.190897	0.258530	0.488821	0.375841	0.238267	0.236311	0.187499	0.090013	0.245116	0.236938	0.245116	0.240681	0.065259	0.057989	0.065259	0.247109	0.308382	0.259944	0.124372	0.241972	0.263911	0.363986	0.239481	0.230135	0.240487	0.214248	0.173505	0.190930	0.244067	0.096209	0.219740	0.087708	0.065259	0.203106	0.132372	0.018238	0.288563	0.433611 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.map b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.map
new file mode 100644
index 0000000..50c130e
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.map
@@ -0,0 +1,18 @@
+0
+0
+0
+7
+0
+0
+4
+10
+13
+8
+0
+0
+9
+0
+0
+0
+0
+16
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.oligos b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.oligos
new file mode 100644
index 0000000..5394f4e
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.oligos
@@ -0,0 +1,12 @@
+forward	CCGTCAATTCMTTTRAGT
+barcode	AATGGTAC	F003D000
+barcode	AACCTGGC	F003D002
+barcode	TTCGTGGC	F003D004
+barcode	TTCTTGAC	F003D006
+barcode	TTCGCGAC	F003D008
+barcode	TCCAGAAC	F003D142
+barcode	AAGGCCTC	F003D144
+tardis	TGACCGTC	F003D146
+barcode	AGGTTGTC	F003D148
+barcode	TGGTGAAC	F003D150
+barcode	AACCGTGTC	MOCK.GQY1XT001
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.otu b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.otu
new file mode 100644
index 0000000..4c0ab12
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.otu
@@ -0,0 +1,37 @@
+label	numOtus	Otu01	Otu02	Otu03	Otu04	Otu05	Otu06	Otu07	Otu08	Otu09	Otu10	Otu11	Otu12	Otu13	Otu14	Otu15	Otu16	Otu17	Otu18	Otu19	Otu20	Otu21	Otu22	Otu23	Otu24	Otu25	Otu26	Otu27	Otu28	Otu29	Otu30	Otu31	Otu32	Otu33	Otu34	Otu35	Otu36	Otu37	Otu38	Otu39	Otu40	Otu41	Otu42	Otu43	Otu44	Otu45	Otu46	Otu47	Otu48	Otu49	Otu50	Otu51	Otu52	Otu53	Otu54	Otu55	Otu56	Otu57	Otu58	Otu59	Otu60	Otu61	Otu62	Otu63	Otu64	Otu65	Otu66	Otu67	Otu68	Otu69	Otu70	Otu71	Otu72	Otu73	Otu74	Otu75	Otu76	Otu77	Otu78	Otu79	Otu8 [...]
+unique	96	U68667,U68641	U68620,U68618	U68663	U68662	U68661	U68660	U68659	U68658	U68657	U68656	U68655	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68688	U68687	U68686	U68685	U68684	U68683	U68682	U68681	U68680	U68679	U68678	U68677	U68676	U68675	U68674	U68673	U68672	U68671	U68670	U68669	U68668	U68666	U68665	U68664	U68613	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U68599	U68598	U68597	U68596	U68595 [...]
+0.01	93	U68688,U68665	U68636,U68631	U68667,U68641	U68620,U68618	U68680,U68615	U68661	U68660	U68659	U68658	U68657	U68656	U68655	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68687	U68686	U68685	U68684	U68683	U68682	U68681	U68679	U68678	U68677	U68676	U68675	U68674	U68673	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68663	U68662	U68613	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U68599	U68598	U [...]
+0.02	90	U68688,U68665,U68679,U68663	U68673,U68667,U68641	U68636,U68631	U68620,U68618	U68680,U68615	U68658	U68657	U68656	U68655	U68686	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68638	U68687	U68683	U68682	U68681	U68684	U68678	U68677	U68676	U68675	U68674	U68685	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68662	U68661	U68660	U68659	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U68599	U68598	U [...]
+0.03	88	U68688,U68665,U68679,U68663	U68658,U68638,U68620,U68618	U68673,U68667,U68641	U68636,U68631	U68680,U68615	U68686	U68657	U68656	U68655	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68637	U68687	U68683	U68682	U68681	U68684	U68678	U68677	U68676	U68675	U68674	U68685	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68662	U68661	U68660	U68659	U68613	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U [...]
+0.04	83	U68688,U68665,U68679,U68663	U68658,U68638,U68620,U68618	U68673,U68667,U68641	U68636,U68631	U68637,U68602	U68652,U68610	U68614,U68596	U68678,U68619	U68680,U68615	U68681,U68677	U68656	U68655	U68654	U68653	U68685	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68686	U68687	U68676	U68675	U68674	U68682	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68662	U68661	U68660	U68683	U68659	U68684	U68657	U68613	U68612	U68611	U68609	U68608	U68607	U68606	U68605	U [...]
+0.05	78	U68688,U68665,U68679,U68663	U68658,U68638,U68620,U68618	U68683,U68652,U68610	U68673,U68667,U68641	U68636,U68631	U68637,U68602	U68628,U68601	U68614,U68596	U68666,U68595	U68672,U68621	U68686,U68635	U68678,U68619	U68680,U68615	U68681,U68677	U68654	U68653	U68651	U68649	U68648	U68647	U68646	U68645	U68685	U68644	U68643	U68642	U68640	U68639	U68687	U68676	U68675	U68674	U68671	U68670	U68669	U68668	U68664	U68662	U68661	U68660	U68682	U68659	U68684	U68657	U68656	U68655	U68607	U68608	U68600	U [...]
+0.06	69	U68688,U68665,U68679,U68663	U68683,U68652,U68610,U68671	U68658,U68638,U68620,U68618	U68673,U68667,U68641,U68598	U68634,U68628,U68601	U68678,U68619,U68645	U68636,U68631	U68633,U68612	U68637,U68602	U68627,U68609	U68648,U68632	U68614,U68596	U68666,U68595	U68605,U68597	U68672,U68621	U68686,U68635	U68681,U68677	U68680,U68615	U68676,U68660	U68649	U68647	U68646	U68644	U68682	U68643	U68642	U68640	U68639	U68684	U68685	U68687	U68651	U68653	U68654	U68655	U68656	U68657	U68659	U68661	U68662	U [...]
+0.07	65	U68678,U68619,U68645,U68673,U68667,U68641,U68598	U68688,U68665,U68679,U68663	U68683,U68652,U68610,U68671	U68658,U68638,U68620,U68618	U68634,U68628,U68601	U68617,U68614,U68596	U68633,U68612	U68636,U68631	U68637,U68602	U68627,U68609	U68648,U68632	U68666,U68595	U68605,U68597	U68687,U68592	U68686,U68635	U68682,U68657	U68681,U68677	U68680,U68615	U68676,U68660	U68672,U68621	U68670	U68685	U68684	U68639	U68640	U68674	U68642	U68643	U68644	U68646	U68647	U68669	U68649	U68651	U68653	U68668	U [...]
+0.08	56	U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660	U68666,U68595,U68636,U68631	U68683,U68652,U68610,U68671	U68617,U68614,U68596	U68634,U68628,U68601	U68686,U68635,U68613	U68649,U68639	U68651,U68640	U68654,U68593	U68687,U68592	U68648,U68632	U68605,U68597	U68637,U68602	U68672,U68621	U68626,U68623	U68627,U68609	U68633,U68612	U68680,U68615	U68681,U68677	U68682,U68657	U68630	U68642	U68643	U68644	U68646	U68647	U [...]
+0.09	55	U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660	U68686,U68635,U68613,U68637,U68602	U68666,U68595,U68636,U68631	U68683,U68652,U68610,U68671	U68617,U68614,U68596	U68634,U68628,U68601	U68651,U68640	U68654,U68593	U68649,U68639	U68687,U68592	U68648,U68632	U68605,U68597	U68627,U68609	U68672,U68621	U68633,U68612	U68626,U68623	U68682,U68657	U68680,U68615	U68681,U68677	U68642	U68643	U68630	U68644	U68646	U68647	U [...]
+0.10	49	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660	U68654,U68593,U68649,U68639,U68606	U68629,U68617,U68614,U68596	U68683,U68652,U68610,U68671	U68634,U68628,U68601,U68607	U68648,U68632,U68605,U68597	U68666,U68595,U68636,U68631	U68626,U68623	U68627,U68609	U68633,U68612	U68651,U68640	U68687,U68592	U68682,U68657	U68681,U68677	U68680,U68615	U68672,U68621	U68670	U68685	U68684	U [...]
+0.11	45	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68654,U68593,U68649,U68639,U68606	U68648,U68632,U68605,U68597	U68666,U68595,U68636,U68631	U68683,U68652,U68610,U68671	U68682,U68657,U68675	U68680,U68615,U68670	U68687,U68592	U68651,U68640	U68633,U68612	U68627,U68609	U68672,U68621	U68626,U68623	U68681,U68677	U [...]
+0.12	43	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68654,U68593,U68649,U68639,U68606	U68648,U68632,U68605,U68597	U68683,U68652,U68610,U68671	U68666,U68595,U68636,U68631	U68687,U68592,U68685	U68682,U68657,U68675	U68680,U68615,U68670	U68651,U68640	U68633,U68612	U68627,U68609	U68626,U68623	U68672,U68621	U [...]
+0.13	40	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68682,U68657,U68675,U68627,U68609	U68654,U68593,U68649,U68639,U68606	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68680,U68615,U68670	U68651,U68640	U68633,U68612	U68626,U68623	U68600,U68591	U [...]
+0.14	36	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68680,U68615,U68670	U68651,U68640	U68633,U68612	U68626,U [...]
+0.15	35	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68680,U68615,U68670	U68681,U68677	U68672,U [...]
+0.16	34	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68681,U68677,U68669	U68680,U68615,U68670	U [...]
+0.17	33	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68681,U68677,U68669	U68680,U [...]
+0.18	31	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U68670,U68611	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U [...]
+0.19	30	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U68670,U68611	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U [...]
+0.20	27	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U68670,U68611	U68648,U68632,U [...]
+0.21	25	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U [...]
+0.22	23	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68682,U68657,U68675,U68627,U68609	U [...]
+0.23	21	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68682,U68657,U68675,U68627,U68609	U [...]
+0.24	17	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68682,U [...]
+0.25	15	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68682,U [...]
+0.26	13	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U [...]
+0.27	12	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U [...]
+0.29	9	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.32	7	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.33	6	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.36	5	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.38	4	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.41	3	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.45	2	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.55	1	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.pair.dist b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.pair.dist
new file mode 100644
index 0000000..de75e7c
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.pair.dist
@@ -0,0 +1,42 @@
+U68590	U68589	0.337144
+U68591	U68589	0.360977
+U68591	U68590	0.378254
+U68592	U68589	0.415506
+U68592	U68590	0.319757
+U68592	U68591	0.414843
+U68593	U68589	0.287299
+U68593	U68590	0.169021
+U68593	U68591	0.336162
+U68593	U68592	0.284235
+U68594	U68589	0.297057
+U68594	U68590	0.329311
+U68594	U68591	0.356376
+U68594	U68592	0.332574
+U68594	U68593	0.276866
+U68595	U68589	0.39224
+U68595	U68590	0.273158
+U68595	U68591	0.427517
+U68595	U68592	0.229894
+U68595	U68593	0.283055
+U68595	U68594	0.364319
+U68596	U68589	0.309315
+U68596	U68590	0.312653
+U68596	U68591	0.322673
+U68596	U68592	0.36333
+U68596	U68593	0.291774
+U68596	U68594	0.280537
+U68596	U68595	0.360148
+U68597	U68589	0.320066
+U68597	U68590	0.266838
+U68597	U68591	0.35206
+U68597	U68592	0.325227
+U68597	U68593	0.217362
+U68597	U68594	0.263379
+U68597	U68595	0.317196
+U68597	U68596	0.276011
+U68598	U68589	0.328638
+U68598	U68590	0.206259
+U68598	U68591	0.344952
+U68598	U68592	0.265168
+U68598	U68593	0.189372
+U68598	U68594	blub
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.quan b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.quan
new file mode 100644
index 0000000..b189c71
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.quan
@@ -0,0 +1,42 @@
+#1.36.1
+1	0	0	0	0	0	0
+2	0.049144	0.0491476	0.0844091	0.116936	0.162039	0.162039
+3	0.160511	0.160511	0.160511	0.160511	0.160511	0.160511
+4	0	0	0	0	0	0
+5	0	0	0	0	0	0
+6	0	0	0	0	0	0
+7	0	0	0	0	0	0
+8	0	0	0	0	0	0
+9	0.676576	0.680769	0.707958	0.709889	0.709889	0.709889
+10	0.697272	0.697272	0.738054	0.738553	0.743446	0.743446
+11	0.754357	0.754357	0.760208	0.760556	0.760556	0.760556
+12	0	0	0	0	0	0
+13	0	0	0	0	0	0
+14	1.00039	1.00039	1.00039	1.0141	1.0141	1.0141
+15	1.01854	1.01854	1.01854	1.01854	1.01854	1.01854
+16	0.970699	0.975675	0.995969	1.04614	1.05545	1.05545
+17	0.992594	1.0202	1.03614	1.08892	1.13746	1.13746
+18	1.09386	1.09386	1.09922	1.10809	1.10809	1.10809
+19	0	0	0	tardis	0	0
+20	0	0	0	0	0	0
+21	0	0	0	0	0	0
+22	1.35506	1.35885	1.36713	1.37285	1.38105	1.38105
+23	1.161	1.18494	1.43935	1.44885	1.46278	1.46278
+24	1.13221	1.20661	1.22248	1.4331	1.45037	1.45037
+25	1.52842	1.52956	1.53387	1.54002	1.54381	1.54381
+26	1.48322	1.49691	1.54058	1.57344	1.60446	1.60675
+27	1.43869	1.51674	1.54557	1.60688	1.6382	1.64727
+28	1.464	1.53106	1.63766	1.68281	1.72392	1.72972
+29	1.61868	1.64211	1.71553	1.75536	1.82599	1.82802
+30	1.64301	1.68357	1.72295	1.79613	1.80546	1.83608
+31	1.69872	1.70717	1.72619	1.73675	1.75281	1.75281
+32	1.7243	1.74704	1.767	1.78202	1.81832	1.86177
+33	1.77673	1.78456	1.79277	1.82968	1.87613	1.88887
+34	1.81019	1.82733	1.86897	1.89782	1.91567	1.91567
+35	0	0	0	0	0	0
+36	0	0	0	0	0	0
+37	0	0	0	0	0	0
+38	0	0	0	0	0	0
+39	0	0	0	0	0	0
+40	0	0	0	0	0	0
+41	0	0	0	0	0	0
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.ref.taxonomy b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.ref.taxonomy
new file mode 100644
index 0000000..6a2e6a8
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.ref.taxonomy
@@ -0,0 +1,42 @@
+AB000389.1	Bacteria;Proteobacteria;Gammaproteobacteria;Alteromonadales;Pseudoalteromonadaceae;Pseudoalteromonas;
+AB000699.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB000700.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB000701.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB000702.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB001518.1	Bacteria;Bacteroidetes;Sphingobacteria;Sphingobacteriales;Flammeovirgaceae;Candidatus_Cardinium;
+AB001724.1	Bacteria;Cyanobacteria;SubsectionI;Microcystis;
+AB001774.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001775.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001776.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001777.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001779.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001781.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001783.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001784.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001785.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001791.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001793.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001797.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001802.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001805.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001807.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001809.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001813.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001815.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001836.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Lactobacillaceae;Lactobacillus;
+AB001837.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Lactobacillaceae;Lactobacillus;
+AB002481.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002483.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002485.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002488.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002489.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002496.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002500.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002504.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002508.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002510.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002512.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002517.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002519.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002523.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002527.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;	Tardis
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.sabund b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.sabund
new file mode 100644
index 0000000..c715114
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.sabund
@@ -0,0 +1,5 @@
+unique	2	94	2
+0.00	2	92	3
+0.01	2	88	5
+0.02	4	84	2	2	1
+0.03	4	75	6	1	2	6
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.shared b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.shared
new file mode 100644
index 0000000..ef740a1
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.shared
@@ -0,0 +1,3 @@
+label	Group	numOtus	Otu01	Otu02	Otu03	Otu04	Otu05	Otu06	Otu07	Otu08	Otu09	Otu10
+0.10	forest	10	0	5	2	3	1	1	3	3	1	0
+0.10	pasture	10	7	2	5	1	3	2	0	0	1	2	2
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.square.dist b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.square.dist
new file mode 100644
index 0000000..7c382ec
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_false.mothur.square.dist
@@ -0,0 +1,42 @@
+	98
+U68589	0.000000	0.337144	0.360977	0.415506	0.287299	0.297057	0.392240	0.309315	0.320066	0.328638	0.257245	0.337227	0.301773	0.307105	0.477681	0.364754	0.320363	0.340712	0.643389	0.433138	0.376207	0.351515	0.373270	0.325136	0.289019	0.310340	0.307018	0.308559	0.324956	0.368119	0.324956	0.259707	0.409194	0.273253	0.348793	0.338829	0.289890	0.460196	0.287313	0.304876	0.293627	0.398745	0.323458	0.343931	0.270320	0.328638	0.360667	0.317641	0.307311	0.318242	0.315163	0.334519	0.259354	0.513252 [...]
+U68590	0.337144	0.000000	0.378254	0.319757	0.169021	0.329311	0.273158	0.312653	0.266838	0.206259	0.328665	0.341084	0.323764	0.220396	0.362613	0.274376	0.201198	0.317527	0.530014	0.373379	0.294704	0.286594	0.293104	0.214397	0.304876	0.294032	0.296294	0.318935	0.228159	0.215592	0.228159	0.285269	0.383119	0.287768	0.248463	0.330389	0.331258	0.359192	0.314346	0.282537	0.308195	0.273482	0.265198	0.272626	0.323924	0.216451	0.281824	0.241593	0.230389	0.228159	0.255453	0.219740	0.357218	0.507737 [...]
+U68591	0.360977	0.378254	0.000000	0.414843	0.336162	0.356376	0.427517	0.322673	0.352060	0.344952	0.369658	0.122794	0.372984	0.369623	0.467881	0.377089	0.324100	0.365825	0.548815	0.490123	0.450700	0.363708	0.396633	0.363488	0.311317	0.329411	0.347363	0.328716	0.384142	0.397333	0.384142	0.315472	0.415027	0.310736	0.388493	0.359680	0.334111	0.500022	0.350766	0.333145	0.333379	0.442063	0.319388	0.395174	0.355606	0.372774	0.425988	0.367269	0.363032	0.342135	0.362984	0.392626	0.425088	0.518779 [...]
+U68592	0.415506	0.319757	0.414843	0.000000	0.284235	0.332574	0.229894	0.363330	0.325227	0.265168	0.338989	0.364319	0.350376	0.266663	0.418468	0.331206	0.301152	0.384448	0.587163	0.416497	0.215923	0.319670	0.359680	0.254598	0.332803	0.338989	0.326397	0.347490	0.281664	0.312183	0.281664	0.373185	0.359680	0.365184	0.301866	0.362488	0.382122	0.420349	0.356390	0.342170	0.340183	0.179474	0.326851	0.298352	0.302463	0.260630	0.164494	0.275794	0.268092	0.355578	0.303351	0.295152	0.347147	0.556294 [...]
+U68593	0.287299	0.169021	0.336162	0.284235	0.000000	0.276866	0.283055	0.291774	0.217362	0.189372	0.294160	0.302449	0.284617	0.219618	0.385130	0.257535	0.122862	0.324013	0.518760	0.391165	0.284617	0.253894	0.273158	0.213889	0.267779	0.274252	0.271113	0.291080	0.214579	0.240602	0.214579	0.223590	0.344108	0.247344	0.236421	0.222528	0.254171	0.396586	0.289841	0.278520	0.259933	0.283223	0.225527	0.244836	0.268053	0.202789	0.269811	0.214077	0.196928	0.113423	0.232067	0.230975	0.324266	0.486147 [...]
+U68594	0.297057	0.329311	0.356376	0.332574	0.276866	0.000000	0.364319	0.280537	0.263379	0.251328	0.303296	0.322132	0.292099	0.249492	0.386773	0.237829	0.277808	0.334547	0.543352	0.396320	0.357389	0.244067	0.327178	0.251136	0.286583	0.260851	0.261823	0.288339	0.269312	0.305548	0.269312	0.230560	0.394362	0.270690	0.279315	0.238840	0.242336	0.409723	0.300336	0.292820	0.281697	0.340691	0.269265	0.277267	0.281613	0.265168	0.315691	0.251548	0.255839	0.303699	0.267649	0.288543	0.326397	0.491101 [...]
+U68595	0.392240	0.273158	0.427517	0.229894	0.283055	0.364319	0.000000	0.360148	0.317196	0.281804	0.385823	0.388912	0.345663	0.278042	0.457303	0.333759	0.289841	0.378719	0.575126	0.415251	0.158838	0.354610	0.361109	0.272713	0.350892	0.343931	0.367380	0.353619	0.302505	0.320312	0.302505	0.342812	0.356954	0.348044	0.311178	0.330516	0.332286	0.421731	0.336571	0.346594	0.335749	0.087023	0.340156	0.311076	0.296415	0.281804	0.074568	0.282904	0.290513	0.326581	0.311238	0.311238	0.357723	0.541469 [...]
+U68596	0.309315	0.312653	0.322673	0.363330	0.291774	0.280537	0.360148	0.000000	0.276011	0.259990	0.287117	0.325688	0.131424	0.301197	0.359219	0.273363	0.277082	0.125029	0.478761	0.382431	0.364319	0.296894	0.315411	0.298772	0.039097	0.297549	0.198193	0.075812	0.286874	0.307011	0.286874	0.279315	0.383444	0.218017	0.307081	0.321375	0.206134	0.374967	0.122326	0.106410	0.209430	0.349614	0.271657	0.301609	0.116332	0.268754	0.323601	0.307129	0.270360	0.296132	0.267363	0.310893	0.287842	0.434443 [...]
+U68597	0.320066	0.266838	0.352060	0.325227	0.217362	0.263379	0.317196	0.276011	0.000000	0.213189	0.326099	0.324279	0.280789	0.208454	0.401870	0.052145	0.205688	0.339908	0.517938	0.389787	0.286874	0.312058	0.252570	0.227071	0.260235	0.280899	0.289951	0.269742	0.228204	0.274934	0.228204	0.229993	0.360180	0.271878	0.225078	0.237745	0.269502	0.380020	0.281484	0.281484	0.284238	0.307129	0.090568	0.248578	0.260771	0.238261	0.290224	0.227664	0.220923	0.218861	0.282700	0.235348	0.315691	0.475314 [...]
+U68598	0.328638	0.206259	0.344952	0.265168	0.189372	0.251328	0.281804	0.259990	0.213189	0.000000	0.292542	0.335138	0.253181	0.090237	0.351449	0.213304	0.179339	0.275794	0.515138	0.392240	0.259924	0.268378	0.211242	0.087582	0.257723	0.268754	0.268852	0.245013	0.063062	0.058566	0.063062	0.273230	0.314261	0.282537	0.133092	0.246349	0.289370	0.372970	0.249858	0.233746	0.263548	0.256715	0.180919	0.166671	0.237960	0.091021	0.264200	0.101521	0.054228	0.217646	0.160277	0.059437	0.304895	0.467931 [...]
+U68599	0.257245	0.328665	0.369658	0.338989	0.294160	0.303296	0.385823	0.287117	0.326099	0.292542	0.000000	0.343091	0.272707	0.317244	0.396829	0.353944	0.306491	0.332816	0.575865	0.426079	0.406623	0.312736	0.346776	0.307959	0.268573	0.309717	0.277586	0.275370	0.329736	0.357883	0.329736	0.306116	0.427369	0.279200	0.340130	0.316773	0.299756	0.437168	0.261354	0.282700	0.291995	0.383119	0.343991	0.347951	0.255391	0.307991	0.354464	0.328894	0.312653	0.308495	0.289841	0.345365	0.305583	0.453716 [...]
+U68600	0.337227	0.341084	0.122794	0.364319	0.302449	0.322132	0.388912	0.325688	0.324279	0.335138	0.343091	0.000000	0.349854	0.356991	0.427677	0.358357	0.294295	0.355104	0.567758	0.478155	0.407315	0.335955	0.377929	0.340460	0.294108	0.324279	0.317674	0.319670	0.360624	0.384102	0.360624	0.302011	0.418941	0.289376	0.369495	0.340106	0.305343	0.484127	0.331431	0.317305	0.316017	0.395785	0.297402	0.383867	0.338041	0.352503	0.379655	0.347509	0.349713	0.309890	0.369636	0.365777	0.410836	0.489114 [...]
+U68601	0.301773	0.323764	0.372984	0.350376	0.284617	0.292099	0.345663	0.131424	0.280789	0.253181	0.272707	0.349854	0.000000	0.268659	0.343707	0.269811	0.276017	0.105021	0.515216	0.408959	0.334876	0.278924	0.332523	0.280600	0.113060	0.308667	0.214579	0.096909	0.269696	0.282700	0.269696	0.258358	0.402721	0.240354	0.292729	0.304869	0.247109	0.395778	0.045250	0.113176	0.202970	0.326769	0.262255	0.290033	0.060707	0.256082	0.324605	0.277833	0.261273	0.286874	0.245013	0.288885	0.277621	0.467569 [...]
+U68602	0.307105	0.220396	0.369623	0.266663	0.219618	0.249492	0.278042	0.301197	0.208454	0.090237	0.317244	0.356991	0.268659	0.000000	0.426605	0.208459	0.204576	0.321747	0.522807	0.358242	0.258188	0.285788	0.190585	0.078406	0.295495	0.300509	0.303362	0.291180	0.075377	0.137887	0.075377	0.272545	0.341198	0.285366	0.113662	0.254692	0.304099	0.365865	0.292820	0.283918	0.298814	0.266079	0.181048	0.169421	0.271468	0.088528	0.246808	0.032570	0.069818	0.236275	0.150174	0.112339	0.292002	0.506595 [...]
+U68603	0.477681	0.362613	0.467881	0.418468	0.385130	0.386773	0.457303	0.359219	0.401870	0.351449	0.396829	0.427677	0.343707	0.426605	0.000000	0.402262	0.386773	0.381782	0.601148	0.280287	0.439624	0.409367	0.439617	0.353443	0.354610	0.371261	0.340156	0.365692	0.387976	0.412799	0.387976	0.438400	0.438472	0.378448	0.409908	0.389040	0.433602	0.246241	0.345511	0.352090	0.423288	0.434792	0.377719	0.366452	0.335513	0.383472	0.409908	0.394969	0.369931	0.455351	0.408422	0.415506	0.443308	0.319627 [...]
+U68605	0.364754	0.274376	0.377089	0.331206	0.257535	0.237829	0.333759	0.273363	0.052145	0.213304	0.353944	0.358357	0.269811	0.208459	0.402262	0.000000	0.223376	0.282689	0.510658	0.398293	0.298684	0.308524	0.275360	0.230342	0.266324	0.304816	0.292471	0.259158	0.242871	0.285073	0.242871	0.277730	0.331907	0.317244	0.219999	0.265083	0.296132	0.383472	0.283253	0.274864	0.324259	0.312653	0.106011	0.262468	0.259629	0.250668	0.298546	0.219740	0.226896	0.256365	0.281484	0.247859	0.304882	0.501368 [...]
+U68606	0.320363	0.201198	0.324100	0.301152	0.122862	0.277808	0.289841	0.277082	0.205688	0.179339	0.306491	0.294295	0.276017	0.204576	0.386773	0.223376	0.000000	0.310100	0.529218	0.383464	0.287713	0.253689	0.260336	0.209198	0.246142	0.262712	0.280237	0.262636	0.207259	0.237879	0.207259	0.211947	0.323315	0.227993	0.215106	0.224023	0.234936	0.381089	0.276688	0.252152	0.246076	0.286311	0.213692	0.220723	0.264574	0.225386	0.275336	0.209430	0.186739	0.098208	0.251357	0.233701	0.330363	0.455046 [...]
+U68607	0.340712	0.317527	0.365825	0.384448	0.324013	0.334547	0.378719	0.125029	0.339908	0.275794	0.332816	0.355104	0.105021	0.321747	0.381782	0.282689	0.310100	0.000000	0.598114	0.376207	0.348574	0.308455	0.427909	0.306422	0.102569	0.338989	0.293255	0.091577	0.299521	0.332574	0.299521	0.300490	0.432427	0.246334	0.329318	0.330983	0.252567	0.375581	0.093606	0.112197	0.241563	0.389518	0.343693	0.311620	0.085083	0.297115	0.360638	0.321127	0.293285	0.317395	0.292002	0.321801	0.326215	0.428074 [...]
+U68608	0.643389	0.530014	0.548815	0.587163	0.518760	0.543352	0.575126	0.478761	0.517938	0.515138	0.575865	0.567758	0.515216	0.522807	0.601148	0.510658	0.529218	0.598114	0.000000	0.581450	0.603638	0.556018	0.602788	0.529843	0.479156	0.529751	0.516505	0.475006	0.533469	0.543402	0.533469	0.526527	0.612744	0.556206	0.504303	0.535651	0.553400	0.565457	0.501886	0.519860	0.550524	0.582089	0.512306	0.511328	0.495486	0.523554	0.568020	0.527665	0.539010	0.554398	0.496751	0.547458	0.584131	0.639768 [...]
+U68609	0.433138	0.373379	0.490123	0.416497	0.391165	0.396320	0.415251	0.382431	0.389787	0.392240	0.426079	0.478155	0.408959	0.358242	0.280287	0.398293	0.383464	0.376207	0.581450	0.000000	0.405148	0.444553	0.393834	0.365423	0.360638	0.392824	0.361630	0.366399	0.376591	0.394174	0.376591	0.412038	0.437145	0.389787	0.385549	0.347530	0.435804	0.054455	0.388719	0.385214	0.436903	0.415039	0.369502	0.365873	0.368906	0.374797	0.419842	0.359680	0.376207	0.396051	0.406702	0.382770	0.421971	0.316086 [...]
+U68610	0.376207	0.294704	0.450700	0.215923	0.284617	0.357389	0.158838	0.364319	0.286874	0.259924	0.406623	0.407315	0.334876	0.258188	0.439624	0.298684	0.287713	0.348574	0.603638	0.405148	0.000000	0.371261	0.352605	0.253805	0.348206	0.350892	0.334008	0.328491	0.272372	0.302606	0.272372	0.344266	0.352503	0.358283	0.289841	0.332634	0.362033	0.400684	0.336626	0.337821	0.319316	0.122032	0.316394	0.285826	0.292522	0.280214	0.113176	0.245608	0.256331	0.320988	0.302570	0.304099	0.335749	0.550657 [...]
+U68611	0.351515	0.286594	0.363708	0.319670	0.253894	0.244067	0.354610	0.296894	0.312058	0.268378	0.312736	0.335955	0.278924	0.285788	0.409367	0.308524	0.253689	0.308455	0.556018	0.444553	0.371261	0.000000	0.323464	0.272211	0.276644	0.163140	0.274066	0.276436	0.293630	0.308422	0.293630	0.244404	0.392797	0.296193	0.310028	0.286043	0.279880	0.437190	0.278924	0.280287	0.287713	0.325227	0.288339	0.284447	0.274589	0.277833	0.318002	0.284078	0.270628	0.279414	0.295223	0.313081	0.326397	0.549148 [...]
+U68612	0.373270	0.293104	0.396633	0.359680	0.273158	0.327178	0.361109	0.315411	0.252570	0.211242	0.346776	0.377929	0.332523	0.190585	0.439617	0.275360	0.260336	0.427909	0.602788	0.393834	0.352605	0.323464	0.000000	0.215762	0.299734	0.355207	0.322132	0.313854	0.208721	0.229389	0.208721	0.306954	0.401787	0.344582	0.211884	0.289119	0.334817	0.411243	0.312724	0.306441	0.331715	0.359680	0.223342	0.057989	0.297432	0.204444	0.327350	0.192489	0.186695	0.279143	0.232061	0.235404	0.314346	0.493996 [...]
+U68613	0.325136	0.214397	0.363488	0.254598	0.213889	0.251136	0.272713	0.298772	0.227071	0.087582	0.307959	0.340460	0.280600	0.078406	0.353443	0.230342	0.209198	0.306422	0.529843	0.365423	0.253805	0.272211	0.215762	0.000000	0.284460	0.278520	0.266174	0.289008	0.082361	0.095671	0.082361	0.273482	0.326769	0.265512	0.124741	0.249678	0.284857	0.344388	0.294114	0.262900	0.288107	0.239932	0.200086	0.180522	0.280872	0.083419	0.250717	0.098361	0.076175	0.229291	0.156701	0.092724	0.326489	0.460841 [...]
+U68614	0.289019	0.304876	0.311317	0.332803	0.267779	0.286583	0.350892	0.039097	0.260235	0.257723	0.268573	0.294108	0.113060	0.295495	0.354610	0.266324	0.246142	0.102569	0.479156	0.360638	0.348206	0.276644	0.299734	0.284460	0.000000	0.272001	0.178312	0.063195	0.272875	0.308489	0.272875	0.265991	0.368968	0.204392	0.298898	0.309193	0.203240	0.343707	0.106184	0.092619	0.203308	0.330765	0.242629	0.301609	0.091368	0.257723	0.307959	0.292820	0.253585	0.268659	0.270891	0.302594	0.298857	0.435707 [...]
+U68615	0.310340	0.294032	0.329411	0.338989	0.274252	0.260851	0.343931	0.297549	0.280899	0.268754	0.309717	0.324279	0.308667	0.300509	0.371261	0.304816	0.262712	0.338989	0.529751	0.392824	0.350892	0.163140	0.355207	0.278520	0.272001	0.000000	0.255720	0.279695	0.305619	0.315059	0.305619	0.221866	0.383300	0.251181	0.304845	0.189645	0.254818	0.376347	0.298814	0.307154	0.270331	0.342727	0.309422	0.295811	0.289504	0.307959	0.326113	0.310948	0.294934	0.273673	0.303351	0.301848	0.356390	0.501990 [...]
+U68616	0.307018	0.296294	0.347363	0.326397	0.271113	0.261823	0.367380	0.198193	0.289951	0.268852	0.277586	0.317674	0.214579	0.303362	0.340156	0.292471	0.280237	0.293255	0.516505	0.361630	0.334008	0.274066	0.322132	0.266174	0.178312	0.255720	0.000000	0.170293	0.263548	0.303357	0.263548	0.272761	0.407097	0.206167	0.278042	0.279347	0.200785	0.359680	0.188051	0.206041	0.243496	0.348416	0.285214	0.292522	0.194553	0.245275	0.324779	0.289609	0.244067	0.307339	0.248300	0.292637	0.301081	0.412859 [...]
+U68617	0.308559	0.318935	0.328716	0.347490	0.291080	0.288339	0.353619	0.075812	0.269742	0.245013	0.275370	0.319670	0.096909	0.291180	0.365692	0.259158	0.262636	0.091577	0.475006	0.366399	0.328491	0.276436	0.313854	0.289008	0.063195	0.279695	0.170293	0.000000	0.274451	0.301197	0.274451	0.253341	0.392254	0.199277	0.300395	0.307806	0.195565	0.354966	0.083535	0.085569	0.193667	0.339273	0.235838	0.296685	0.088740	0.259354	0.313377	0.288419	0.250108	0.277730	0.261021	0.298123	0.285616	0.455046 [...]
+U68618	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451	0.000000	0.095064	0.000000	0.265512	0.325096	0.297172	0.142765	0.270360	0.304876	0.371771	0.276933	0.266125	0.288242	0.281484	0.212177	0.172607	0.250617	0.081334	0.280600	0.108701	0.020396	0.238146	0.145189	0.069377	0.333640	0.472888 [...]
+U68619	0.368119	0.215592	0.397333	0.312183	0.240602	0.305548	0.320312	0.307011	0.274934	0.058566	0.357883	0.384102	0.282700	0.137887	0.412799	0.285073	0.237879	0.332574	0.543402	0.394174	0.302606	0.308422	0.229389	0.095671	0.308489	0.315059	0.303357	0.301197	0.095064	0.000000	0.095064	0.304839	0.413282	0.318229	0.140838	0.272302	0.325861	0.402721	0.289247	0.261648	0.311791	0.304099	0.229421	0.147700	0.264574	0.103613	0.281484	0.090573	0.076014	0.263244	0.166133	0.065526	0.328691	0.489627 [...]
+U68620	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451	0.000000	0.095064	0.000000	0.265512	0.325096	0.297172	0.142765	0.270360	0.304876	0.371771	0.276933	0.266125	0.288242	0.281484	0.212177	0.172607	0.250617	0.081334	0.280600	0.108701	0.020396	0.238146	0.145189	0.069377	0.333640	0.472888 [...]
+U68621	0.259707	0.285269	0.315472	0.373185	0.223590	0.230560	0.342812	0.279315	0.229993	0.273230	0.306116	0.302011	0.258358	0.272545	0.438400	0.277730	0.211947	0.300490	0.526527	0.412038	0.344266	0.244404	0.306954	0.273482	0.265991	0.221866	0.272761	0.253341	0.265512	0.304839	0.265512	0.000000	0.371589	0.230861	0.287582	0.225242	0.236151	0.411243	0.265168	0.273730	0.230514	0.332052	0.251918	0.291619	0.247272	0.265550	0.302518	0.259636	0.248799	0.220490	0.280097	0.276248	0.324434	0.531437 [...]
+U68622	0.409194	0.383119	0.415027	0.359680	0.344108	0.394362	0.356954	0.383444	0.360180	0.314261	0.427369	0.418941	0.402721	0.341198	0.438472	0.331907	0.323315	0.432427	0.612744	0.437145	0.352503	0.392797	0.401787	0.326769	0.368968	0.383300	0.407097	0.392254	0.325096	0.413282	0.325096	0.371589	0.000000	0.403435	0.379793	0.348616	0.395343	0.450700	0.405119	0.386528	0.366399	0.357883	0.359425	0.335513	0.374017	0.343141	0.336679	0.359680	0.313454	0.366934	0.353207	0.358283	0.359680	0.510347 [...]
+U68623	0.273253	0.287768	0.310736	0.365184	0.247344	0.270690	0.348044	0.218017	0.271878	0.282537	0.279200	0.289376	0.240354	0.285366	0.378448	0.317244	0.227993	0.246334	0.556206	0.389787	0.358283	0.296193	0.344582	0.265512	0.204392	0.251181	0.206167	0.199277	0.297172	0.318229	0.297172	0.230861	0.403435	0.000000	0.291380	0.271468	0.078467	0.390702	0.237019	0.215762	0.167154	0.352302	0.291111	0.314987	0.219740	0.262888	0.321413	0.293373	0.278803	0.250230	0.274889	0.317196	0.320851	0.434443 [...]
+U68624	0.348793	0.248463	0.388493	0.301866	0.236421	0.279315	0.311178	0.307081	0.225078	0.133092	0.340130	0.369495	0.292729	0.113662	0.409908	0.219999	0.215106	0.329318	0.504303	0.385549	0.289841	0.310028	0.211884	0.124741	0.298898	0.304845	0.278042	0.300395	0.142765	0.140838	0.142765	0.287582	0.379793	0.291380	0.000000	0.261021	0.307896	0.400079	0.286594	0.285469	0.289370	0.311667	0.228248	0.165149	0.291943	0.123718	0.297172	0.114034	0.130379	0.254536	0.155691	0.151832	0.320446	0.489951 [...]
+U68625	0.338829	0.330389	0.359680	0.362488	0.222528	0.238840	0.330516	0.321375	0.237745	0.246349	0.316773	0.340106	0.304869	0.254692	0.389040	0.265083	0.224023	0.330983	0.535651	0.347530	0.332634	0.286043	0.289119	0.249678	0.309193	0.189645	0.279347	0.307806	0.270360	0.272302	0.270360	0.225242	0.348616	0.271468	0.261021	0.000000	0.279664	0.367932	0.305644	0.322097	0.268455	0.354419	0.240578	0.277547	0.275547	0.252146	0.329742	0.260975	0.257350	0.223138	0.268635	0.263548	0.349713	0.440840 [...]
+U68626	0.289890	0.331258	0.334111	0.382122	0.254171	0.242336	0.332286	0.206134	0.269502	0.289370	0.299756	0.305343	0.247109	0.304099	0.433602	0.296132	0.234936	0.252567	0.553400	0.435804	0.362033	0.279880	0.334817	0.284857	0.203240	0.254818	0.200785	0.195565	0.304876	0.325861	0.304876	0.236151	0.395343	0.078467	0.307896	0.279664	0.000000	0.426079	0.243717	0.222724	0.179466	0.343048	0.300831	0.314211	0.232025	0.281640	0.316704	0.304099	0.288634	0.251398	0.285168	0.328257	0.295811	0.463200 [...]
+U68627	0.460196	0.359192	0.500022	0.420349	0.396586	0.409723	0.421731	0.374967	0.380020	0.372970	0.437168	0.484127	0.395778	0.365865	0.246241	0.383472	0.381089	0.375581	0.565457	0.054455	0.400684	0.437190	0.411243	0.344388	0.343707	0.376347	0.359680	0.354966	0.371771	0.402721	0.371771	0.411243	0.450700	0.390702	0.400079	0.367932	0.426079	0.000000	0.363081	0.375192	0.429242	0.414051	0.365956	0.377522	0.367395	0.356752	0.402670	0.354897	0.360652	0.407315	0.418941	0.400684	0.432050	0.340769 [...]
+U68628	0.287313	0.314346	0.350766	0.356390	0.289841	0.300336	0.336571	0.122326	0.281484	0.249858	0.261354	0.331431	0.045250	0.292820	0.345511	0.283253	0.276688	0.093606	0.501886	0.388719	0.336626	0.278924	0.312724	0.294114	0.106184	0.298814	0.188051	0.083535	0.276933	0.289247	0.276933	0.265168	0.405119	0.237019	0.286594	0.305644	0.243717	0.363081	0.000000	0.090627	0.188933	0.337227	0.270360	0.271343	0.055913	0.257989	0.330389	0.285168	0.263877	0.301081	0.243355	0.293373	0.270760	0.426473 [...]
+U68629	0.304876	0.282537	0.333145	0.342170	0.278520	0.292820	0.346594	0.106410	0.281484	0.233746	0.282700	0.317305	0.113176	0.283918	0.352090	0.274864	0.252152	0.112197	0.519860	0.385214	0.337821	0.280287	0.306441	0.262900	0.092619	0.307154	0.206041	0.085569	0.266125	0.261648	0.266125	0.273730	0.386528	0.215762	0.285469	0.322097	0.222724	0.375192	0.090627	0.000000	0.206713	0.335224	0.255453	0.285986	0.087321	0.233746	0.329311	0.274214	0.238840	0.279408	0.247109	0.276933	0.301031	0.443436 [...]
+U68630	0.293627	0.308195	0.333379	0.340183	0.259933	0.281697	0.335749	0.209430	0.284238	0.263548	0.291995	0.316017	0.202970	0.298814	0.423288	0.324259	0.246076	0.241563	0.550524	0.436903	0.319316	0.287713	0.331715	0.288107	0.203308	0.270331	0.243496	0.193667	0.288242	0.311791	0.288242	0.230514	0.366399	0.167154	0.289370	0.268455	0.179466	0.429242	0.188933	0.206713	0.000000	0.338085	0.294523	0.294500	0.183653	0.256715	0.307367	0.296937	0.265955	0.252463	0.269931	0.309669	0.304889	0.508683 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.axes b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.axes
new file mode 100644
index 0000000..0be10f7
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.axes
@@ -0,0 +1,42 @@
+group	axis1	axis2	axis3	axis4	axis5	axis6	axis7	axis8	axis9	axis10	axis11	axis12	axis13	axis14	axis15	axis16	axis17	axis18	axis19	axis20	axis21	axis22	axis23	axis24	axis25	axis26	axis27	axis28	axis29	axis30	axis31	axis32	axis33	axis34	axis35	axis36	axis37	axis38	axis39	axis40	axis41	axis42	axis43	axis44	axis45	axis46	axis47	axis48	axis49	axis50	axis51	axis52	axis53	axis54	axis55	axis56	axis57	axis58	axis59	axis60	axis61	axis62	axis63	axis64	axis65	axis66	axis67	axis68	axis69	axis70	axis7 [...]
+U68589	0.064970	-0.093573	0.049365	-0.002984	0.124961	-0.108993	-0.094822	-0.002607	-0.153501	0.040014	-0.094989	-0.022746	-0.078404	0.001551	0.049195	-0.081796	0.070761	-0.020429	-0.020736	-0.145469	0.015975	0.059624	-0.026394	0.062380	0.023158	-0.081711	0.088897	-0.056857	0.001939	-0.034163	-0.009858	0.073623	-0.021051	-0.105845	-0.010399	-0.046578	-0.054367	0.004614	0.039577	-0.015874	0.047089	0.005236	0.000891	-0.028628	0.012761	0.023969	-0.005549	0.032011	-0.001403	0.007897	0.046736 [...]
+U68590	-0.065054	0.009551	-0.023692	0.056710	0.019710	0.002158	0.047565	0.013482	-0.020494	0.097522	0.136242	0.128859	0.048848	-0.094141	0.060139	-0.083202	-0.005604	-0.059574	0.010508	-0.025733	0.064555	0.092162	-0.004115	-0.042551	-0.024393	0.057596	0.092057	-0.042052	0.039559	0.020819	0.021999	-0.020096	-0.032630	0.023086	-0.066602	-0.007755	0.003435	-0.033439	0.000451	0.085474	-0.011204	0.010680	-0.027816	0.016128	0.010000	-0.062829	0.015542	0.023166	-0.011568	0.003526	-0.015250	-0.0 [...]
+U68591	0.100568	-0.081005	0.022725	0.178999	-0.018758	-0.140047	-0.089799	0.037543	0.173810	-0.060580	0.000954	-0.020918	-0.129375	-0.013041	0.054722	-0.098256	0.068154	-0.058943	-0.055626	0.066430	-0.085158	0.044632	0.044187	-0.040477	0.024711	0.053367	-0.014788	-0.013883	-0.003015	0.059725	-0.005835	0.038787	-0.007487	0.036995	0.098994	-0.012158	-0.004844	0.036558	-0.050647	0.030083	-0.010531	0.011635	0.014683	0.008616	0.005577	-0.004963	-0.050535	0.059924	-0.026765	0.014505	0.012198	0 [...]
+U68592	-0.151995	0.057494	0.162283	0.004289	-0.023613	-0.024830	-0.017908	-0.079047	0.082954	-0.019995	-0.006029	-0.068517	0.035500	-0.089014	-0.075431	-0.007438	-0.020781	0.052088	0.059126	0.021933	0.062302	-0.089185	-0.019640	0.042963	-0.022708	0.058816	0.069397	-0.008808	0.018097	0.046916	-0.011755	-0.010756	0.027169	0.043565	0.065153	-0.011666	0.071674	0.091492	0.021316	0.005553	-0.031093	0.026076	0.037299	-0.061657	0.011902	0.020311	-0.030727	0.026787	0.018004	-0.003713	-0.019660	-0 [...]
+U68593	-0.039480	-0.020067	-0.022333	0.105234	0.027505	-0.002196	0.004058	0.074365	-0.014630	0.116512	-0.011673	0.033273	0.049758	-0.038546	0.016626	-0.020054	-0.010343	0.048617	0.025593	-0.004945	0.027862	-0.040295	-0.058529	0.030829	0.015358	-0.040125	0.031521	-0.004194	-0.145991	0.027678	0.045035	-0.008249	-0.012399	0.020338	0.041654	-0.039516	-0.023230	0.100278	-0.027241	0.001047	-0.070158	0.025234	-0.067700	-0.092084	-0.002777	-0.068915	0.046873	-0.031381	-0.039335	0.004839	-0.01908 [...]
+U68594	0.044021	-0.055638	-0.011619	0.059544	0.043250	-0.010999	0.002471	-0.109754	-0.053780	-0.109806	-0.026834	0.033594	0.001853	-0.009407	-0.096803	0.115331	0.084247	0.016997	0.059545	0.019521	0.015426	-0.085112	0.021698	-0.065795	-0.013511	-0.132903	0.053275	0.073810	-0.060124	-0.027112	0.040044	0.061265	-0.000902	-0.055064	0.011682	0.034925	0.010117	-0.009220	-0.038044	0.067621	0.015119	0.086808	-0.039446	0.077465	0.050161	-0.008221	0.044043	0.032322	0.026014	0.013765	0.018811	-0.04 [...]
+U68595	-0.149434	0.038009	0.189550	-0.004502	-0.017025	0.064885	0.043002	0.042743	-0.004355	0.064262	-0.030819	0.029347	-0.080899	0.006686	0.039654	-0.016045	0.002212	-0.050907	0.031126	0.035999	-0.009112	-0.025360	-0.007245	-0.075190	-0.033080	-0.032696	-0.044220	0.026717	-0.005847	0.083093	0.048470	0.021545	-0.080002	0.058588	-0.025770	0.053055	-0.046365	-0.054288	0.042294	-0.030211	0.070826	0.037796	-0.025523	-0.049328	-0.004900	-0.081032	-0.037108	0.018754	-0.020466	-0.051339	0.02501 [...]
+U68596	0.133793	-0.021319	0.020175	-0.063384	-0.098256	-0.019684	-0.082036	0.042110	0.025666	-0.016072	0.012990	-0.002327	0.046691	0.003207	0.033494	0.035535	-0.000967	-0.027681	-0.044321	-0.003185	0.053516	0.004598	-0.063730	-0.053077	-0.053579	-0.004819	-0.028633	0.026852	0.027572	0.008949	0.034315	0.117495	-0.054493	-0.039230	0.024887	0.043023	-0.027191	0.039477	0.030567	0.035125	-0.055176	0.006389	0.043828	0.034964	0.047288	-0.046511	0.007923	0.032125	-0.036025	0.014156	0.028791	-0.0 [...]
+U68597	-0.014667	-0.029661	-0.033680	0.051144	0.005386	0.044414	0.042409	0.125591	-0.043771	-0.138852	-0.022479	0.009528	-0.024230	-0.029027	0.044269	0.040019	-0.006986	0.072400	0.068085	-0.067073	0.013327	-0.004733	-0.003824	0.017236	-0.026588	0.029154	0.055446	-0.008375	0.043667	0.010453	-0.112300	-0.027393	-0.025036	0.017038	-0.012668	0.006933	-0.025935	0.078785	0.010144	-0.079521	-0.024905	0.024615	0.032711	0.044572	-0.021944	-0.083347	0.050281	-0.012411	-0.073945	0.028697	-0.044205	 [...]
+U68598	-0.094469	0.002099	-0.088366	-0.009354	-0.013578	-0.014047	-0.032695	-0.014510	0.045069	-0.003314	-0.004789	0.030347	0.055915	-0.016497	0.035463	-0.006432	0.011798	-0.024252	0.015406	-0.001775	0.001608	-0.018669	0.012427	0.008361	-0.048570	-0.021833	-0.063773	-0.007716	0.053302	0.056045	-0.030314	-0.021273	0.071617	-0.010293	0.044386	0.000052	-0.041162	0.005751	-0.045227	-0.015133	0.068189	0.054064	-0.025536	-0.024755	0.024362	0.010679	0.012958	-0.061628	-0.040968	0.013426	0.03291 [...]
+U68599	0.090684	-0.033310	0.032898	-0.005209	0.042399	-0.063719	-0.053055	-0.021988	0.000910	0.071725	-0.105574	-0.017161	0.002157	-0.176170	-0.131226	-0.056418	-0.014522	0.002644	-0.005757	-0.220325	0.058290	-0.027293	0.092474	-0.037131	-0.054004	-0.012472	-0.093560	0.017697	0.052528	-0.012313	0.040171	-0.020857	0.048154	0.013215	0.015512	0.035525	0.028495	0.029550	-0.066418	-0.024968	0.026617	-0.014755	-0.018808	-0.006770	0.009482	-0.024425	0.006502	-0.022507	-0.035016	-0.030057	-0.026 [...]
+U68600	0.084271	-0.063564	0.059200	0.161046	0.021519	-0.122676	-0.077872	0.040280	0.163031	-0.044954	-0.004443	0.035418	-0.141332	-0.063820	0.040021	-0.091338	0.006898	0.009917	0.043013	0.087335	-0.044287	-0.045268	0.019336	0.037343	-0.016603	0.010605	0.071104	0.049708	-0.006785	-0.071378	0.032464	-0.048668	0.015178	-0.016127	-0.051606	0.011513	0.006575	-0.016728	0.050245	0.009067	0.012267	-0.032475	-0.045725	0.002669	-0.010948	0.023236	0.043500	-0.073121	0.017380	-0.034790	-0.019074	-0. [...]
+U68601	0.088357	-0.032853	0.025467	-0.116589	-0.082486	0.031488	-0.122925	0.034286	0.002519	-0.000401	-0.011656	0.038536	0.062432	-0.013998	-0.007548	0.007959	0.004253	0.017832	-0.013880	-0.042399	-0.061440	-0.056310	0.044259	0.045265	0.000718	-0.010832	-0.013965	-0.062846	-0.057648	-0.030878	0.009841	0.005448	-0.065887	0.012508	-0.020501	0.025877	-0.017055	0.001532	0.070578	0.042621	-0.011989	-0.113754	-0.025115	0.093737	-0.010474	0.033993	0.012936	-0.057426	0.003296	0.004368	0.048463	- [...]
+U68602	-0.121833	0.010178	-0.073840	-0.007963	-0.006724	-0.038973	0.007308	-0.007468	-0.058869	-0.028852	0.018430	-0.044500	-0.011728	-0.003663	-0.007674	-0.013635	0.044902	0.034594	-0.017668	0.010621	-0.011927	0.008640	0.066028	-0.035323	0.006214	-0.025137	-0.010650	-0.032673	0.051960	0.003700	0.029930	0.054257	0.045297	0.012684	0.007517	0.052262	0.024481	-0.028497	0.117176	-0.034262	0.007640	-0.034828	-0.021567	0.027161	-0.017271	-0.071373	-0.030565	-0.032794	0.003617	0.009545	0.020590 [...]
+U68603	0.156231	0.250040	-0.007400	-0.016844	0.068181	0.006943	0.028472	-0.078071	0.089962	-0.055052	0.004600	0.128011	0.121627	-0.097199	0.036637	0.025882	-0.109000	-0.112810	0.090471	-0.017350	-0.084566	-0.010967	0.019269	0.085442	0.007347	-0.038175	0.015240	-0.087746	-0.064709	0.024818	0.018397	0.046959	-0.007234	-0.004209	0.044182	0.034208	-0.013479	0.006439	0.002102	-0.002752	-0.004753	-0.001510	0.007950	-0.010629	-0.016728	0.007109	0.009919	-0.003392	-0.017137	0.002794	0.002988	0.0 [...]
+U68605	-0.025199	-0.018271	-0.032666	0.021661	-0.033663	0.050709	0.035510	0.099200	-0.037063	-0.196913	-0.010064	0.029903	0.058652	-0.001892	0.052639	0.018170	0.015489	0.118407	0.035492	-0.047751	-0.013724	-0.002350	0.060892	-0.040617	-0.042420	-0.019624	0.068788	0.045657	0.052106	0.076229	0.030500	-0.029382	-0.030160	0.000172	0.027755	0.028287	-0.002552	-0.018599	-0.028724	0.031761	-0.036052	-0.060329	-0.035242	-0.016767	-0.003418	-0.003621	-0.069236	0.038009	0.004606	-0.016696	0.030499 [...]
+U68606	-0.017457	-0.028686	-0.019757	0.081216	0.039316	0.027620	0.035884	0.096778	0.054092	0.049974	0.014375	0.032307	0.045796	0.020885	-0.048663	0.003727	-0.010283	0.063261	-0.055030	0.034719	0.050160	0.022341	0.002746	0.063667	-0.003410	-0.034196	0.034482	-0.070031	0.011094	0.008087	-0.042988	0.053427	0.028581	0.068665	-0.005324	0.082577	-0.037621	-0.057627	-0.051485	-0.020788	0.047060	-0.024873	0.008458	-0.086461	0.054222	0.004221	0.074354	-0.041768	0.010974	0.042045	0.017605	0.008472 [...]
+U68607	0.146791	-0.004391	0.040986	-0.136124	-0.009734	0.019666	-0.151212	0.047028	0.047460	0.057465	0.047081	0.060574	0.087110	0.107354	0.092499	0.024175	0.078542	0.046907	0.075688	-0.005386	0.029826	0.028189	0.109285	-0.144283	0.013604	0.009821	0.073744	0.039343	-0.031822	0.002401	0.023136	0.008448	0.102315	0.045606	0.026413	-0.007851	0.021987	0.023503	0.028311	0.014187	-0.011945	-0.031662	0.035842	-0.027146	0.004100	0.020700	-0.016209	-0.007530	0.002772	-0.014843	-0.026208	0.016464	0. [...]
+U68608	0.112354	0.086834	-0.079314	0.185645	-0.536855	0.040719	0.124535	-0.026165	-0.072641	0.031008	-0.052713	0.006165	-0.048168	0.008841	-0.022707	-0.019617	0.025595	-0.038266	-0.014597	-0.014326	0.015694	0.018839	0.014456	0.013667	-0.022464	-0.026909	0.006890	0.001933	-0.014894	0.000607	-0.029642	-0.004163	0.037842	-0.000485	-0.008796	-0.019868	-0.015402	0.016976	0.004208	0.009560	0.003713	0.004359	-0.023917	-0.000432	0.000347	-0.006146	0.019614	0.005365	-0.004724	0.012667	-0.008615	- [...]
+U68609	0.096164	0.315155	-0.015540	-0.001409	0.087239	0.034549	0.018419	0.027787	-0.088554	-0.022593	0.065059	-0.088293	-0.047011	0.041707	-0.022942	-0.024596	0.029461	-0.027597	-0.026891	0.008727	0.065096	-0.021175	0.022439	-0.038714	-0.030786	0.033875	0.040007	-0.070887	-0.014491	-0.045465	0.087224	-0.026986	-0.041825	0.008970	0.068711	-0.033216	0.042860	-0.031638	0.032102	0.044966	0.016970	0.069020	0.000402	-0.015480	0.087026	0.015151	0.060053	0.049139	-0.054644	0.015928	0.012186	-0.0 [...]
+U68610	-0.160321	0.058672	0.179753	-0.028420	0.007998	0.025730	0.012335	0.037135	-0.026399	0.005087	-0.001969	0.010284	0.026053	0.029323	0.041254	0.084705	0.016614	0.044810	-0.025348	0.017024	-0.090525	0.091232	0.014244	0.039035	0.042657	0.004535	0.036126	0.021405	-0.002850	0.031319	-0.008031	-0.077329	0.046467	-0.044265	0.032871	-0.013143	-0.004242	0.015106	0.015004	-0.034116	0.025293	0.043006	-0.058515	0.035202	0.045732	0.037926	0.038483	-0.003272	-0.098979	-0.066580	0.010254	-0.023820 [...]
+U68611	0.037321	-0.088040	0.013187	0.042324	0.021669	0.126323	-0.028673	-0.151227	0.018850	0.003697	0.030834	-0.038729	0.052185	-0.062302	0.022311	-0.111789	-0.049516	0.060957	-0.064410	0.044570	0.008391	-0.089124	0.066713	-0.005180	0.047851	-0.039375	0.043435	0.084098	-0.111401	0.027833	-0.060672	0.076114	-0.004040	0.022943	-0.040184	-0.003475	0.056571	-0.069870	-0.006205	-0.029914	-0.013471	0.019538	0.028446	-0.015656	-0.017093	-0.031905	0.011093	0.044582	-0.026910	0.026878	-0.008564	0 [...]
+U68612	-0.086230	-0.003739	-0.121039	-0.015558	0.061506	-0.002465	-0.029570	0.074298	-0.026317	-0.084612	-0.062185	-0.043098	-0.031851	-0.078011	-0.007469	0.023091	-0.160211	-0.081183	-0.168208	0.057424	0.064178	0.002929	-0.032836	-0.064577	0.066040	0.034300	0.000495	0.030703	-0.003336	-0.052393	0.031995	0.021714	0.048166	0.023385	-0.050434	0.073953	0.006509	0.033285	0.010798	-0.035703	-0.035504	0.007422	-0.077730	0.027795	0.006426	-0.023031	0.000679	-0.003879	-0.001704	0.002096	-0.05473 [...]
+U68613	-0.105698	0.024350	-0.074058	-0.018583	0.017530	-0.006791	0.020453	-0.027745	0.026017	-0.000924	0.013862	-0.003075	-0.012127	-0.030071	0.013821	-0.002496	0.040424	-0.030567	0.040834	0.026540	0.008869	-0.016894	0.065245	0.051621	-0.021647	-0.043219	-0.022881	0.008650	0.026937	0.011475	0.034102	0.033502	0.054159	-0.002134	0.024365	-0.095357	-0.002283	0.005285	-0.034044	-0.011668	0.008753	-0.092534	-0.045898	0.097670	0.037469	-0.046371	-0.075038	-0.035445	-0.045892	0.096439	-0.036881 [...]
+U68614	0.129422	-0.016073	0.034453	-0.047987	-0.080271	-0.000307	-0.081266	0.060052	0.024043	-0.014175	0.032171	-0.025569	0.026725	-0.017302	0.049778	-0.002530	-0.004140	-0.005762	-0.055863	-0.003381	0.073292	-0.066963	-0.035887	0.030594	0.005263	-0.017639	-0.002437	-0.002142	0.058207	0.013631	0.058106	0.046747	-0.007491	-0.033908	-0.005443	-0.016024	-0.035951	0.065746	0.016657	0.031982	-0.007713	-0.010967	0.066373	-0.005018	0.024991	0.065661	0.010433	-0.011960	0.082762	-0.040128	-0.0478 [...]
+U68615	0.070589	-0.028816	0.016548	0.127578	0.048380	0.134510	-0.027662	-0.138961	0.007551	0.030183	-0.023594	-0.076208	0.001425	-0.041888	0.089715	0.035345	0.014021	-0.000086	0.003186	-0.023818	-0.034269	0.029684	0.024115	-0.001747	0.008285	0.010907	-0.010159	-0.025993	0.067679	0.001196	-0.068694	0.008905	-0.023387	0.011274	-0.008398	-0.023790	-0.029762	0.024272	-0.025535	-0.008106	0.007204	0.031313	-0.018358	-0.006410	0.043699	-0.052227	-0.022254	-0.013838	0.076612	-0.015020	0.023557	0 [...]
+U68616	0.115776	-0.002758	0.010018	-0.037530	-0.016743	-0.009778	-0.008956	-0.051361	0.005757	0.002456	-0.002591	-0.032495	-0.007971	-0.089760	0.000719	0.101186	0.008904	-0.012993	-0.008816	-0.002681	0.041069	0.055772	-0.089113	0.117398	0.083304	0.052733	0.034466	0.152310	-0.039291	-0.050609	0.057816	-0.108895	-0.070982	0.016567	0.022575	-0.022472	-0.033072	-0.039149	-0.031104	0.062928	0.025124	-0.070254	0.017701	-0.063440	0.029679	-0.034573	-0.088782	-0.013957	-0.054599	0.032478	0.03966 [...]
+U68617	0.125332	-0.024393	0.034115	-0.069594	-0.097274	0.009605	-0.090815	0.037717	-0.000034	-0.029651	0.045416	-0.018599	0.030157	0.000811	0.021635	0.037072	0.001764	0.012942	-0.052752	0.005362	0.009478	0.000850	-0.001820	0.013404	-0.013252	0.007827	-0.059114	0.009957	0.040027	-0.038080	0.003440	-0.051875	0.030897	0.004180	-0.003922	-0.052789	0.022888	-0.045352	0.010635	0.009378	-0.016670	0.045644	-0.004440	-0.089005	0.005325	0.039036	0.000501	-0.025200	-0.035322	-0.031175	0.061826	0.06 [...]
+U68618	-0.108612	0.009804	-0.099551	-0.038505	-0.002441	-0.040642	0.001599	-0.006707	0.015516	0.006772	0.022861	-0.017309	0.024943	0.026384	0.021645	0.016155	0.035500	-0.033161	-0.013761	-0.030384	-0.002066	-0.032202	0.020412	0.052309	0.024483	-0.016896	0.010683	0.049206	0.013408	0.002589	-0.032616	-0.017969	0.011505	0.009348	0.007244	0.001047	-0.037605	0.002948	0.070500	0.021823	-0.002428	0.008529	-0.005959	-0.025654	0.060001	-0.046527	0.047621	-0.021696	0.053065	-0.008357	-0.056816	0.0 [...]
+U68619	-0.123183	0.013451	-0.127449	-0.059878	-0.019517	0.013730	-0.040356	-0.044922	0.023174	0.038621	0.008818	0.035591	-0.039590	0.011511	0.006618	0.000672	-0.019598	0.020478	0.032204	0.036197	-0.010994	0.055655	-0.021033	0.007466	-0.022393	0.065781	-0.016729	-0.005808	-0.025132	-0.058116	-0.025418	0.078385	0.039663	-0.017995	-0.007568	-0.034432	-0.028419	-0.021106	-0.055142	0.050785	0.015176	0.030048	-0.039557	0.027951	0.062340	-0.012420	0.048739	0.021208	0.018448	-0.142370	0.020545	0 [...]
+U68620	-0.108612	0.009804	-0.099551	-0.038505	-0.002441	-0.040642	0.001599	-0.006707	0.015516	0.006772	0.022861	-0.017309	0.024943	0.026384	0.021645	0.016155	0.035500	-0.033161	-0.013761	-0.030384	-0.002066	-0.032202	0.020412	0.052309	0.024483	-0.016896	0.010683	0.049206	0.013408	0.002589	-0.032616	-0.017969	0.011505	0.009348	0.007244	0.001047	-0.037605	0.002948	0.070500	0.021823	-0.002428	0.008529	-0.005959	-0.025654	0.060001	-0.046527	0.047621	-0.021696	0.053065	-0.008357	-0.056816	0.0 [...]
+U68621	0.043815	-0.112061	0.002561	0.093140	0.030676	0.061977	-0.032089	0.006613	-0.065843	-0.004510	0.051898	-0.007753	-0.052932	0.122112	-0.054100	-0.009583	-0.034554	-0.078477	0.001193	-0.076024	-0.028543	-0.007976	0.016389	0.059192	0.022288	-0.020269	0.001886	0.007461	-0.035383	-0.026238	-0.027254	-0.015734	-0.006607	0.029660	0.042477	0.045747	0.076375	0.059446	0.027741	0.019485	-0.037821	-0.010283	-0.001230	0.020442	-0.038220	-0.020230	0.031766	-0.057554	-0.004869	-0.026691	0.103451 [...]
+U68622	-0.038882	0.029286	0.071132	0.051012	0.058123	-0.021484	0.117266	0.048149	0.083239	-0.083154	-0.172070	-0.105941	0.231840	0.152150	0.049375	-0.150144	0.101237	-0.134083	0.004370	0.030778	0.020637	0.006374	-0.039383	0.041938	0.000437	-0.035401	-0.031666	0.030095	0.042668	0.001107	0.027386	-0.013303	-0.031114	0.002365	-0.030812	-0.010625	0.020752	0.005416	-0.005063	-0.005245	0.001378	-0.015059	-0.027382	0.019529	-0.020120	-0.021664	0.033400	-0.001243	-0.014810	-0.012208	0.005091	-0. [...]
+U68623	0.129811	-0.086213	0.022286	-0.020904	0.047658	-0.010613	0.034275	0.027031	0.019862	0.068001	0.033094	-0.025630	-0.060959	-0.046998	0.014648	0.083343	0.039516	-0.035982	0.043426	0.054113	0.028657	0.058827	-0.002434	0.045068	-0.077997	-0.035120	-0.016247	-0.098311	-0.015271	0.013258	0.040607	-0.012855	0.027352	0.057539	0.037314	-0.070523	0.058546	-0.000868	0.081463	-0.115483	-0.044110	-0.042337	0.013931	0.013808	-0.006978	-0.029113	0.018158	0.029265	0.018917	0.010131	0.007191	-0.01 [...]
+U68624	-0.099708	0.002479	-0.113259	-0.027141	-0.037379	0.025899	0.012132	-0.003616	-0.005610	-0.001627	-0.029201	-0.022736	-0.037094	-0.027005	-0.013651	0.003495	0.025429	0.033326	0.028161	0.049326	-0.015638	0.093248	0.066278	0.031333	-0.002219	-0.007464	0.074315	0.006848	0.022178	-0.007171	0.113442	-0.044245	0.010416	-0.039196	0.023696	0.141052	-0.033936	0.086282	-0.043985	-0.089661	-0.011712	0.015456	0.068426	-0.026944	0.003925	0.029246	0.050862	0.054140	0.037889	0.075904	0.067527	0.0 [...]
+U68625	0.020410	0.011453	-0.054840	0.109975	0.073947	0.115652	-0.004104	0.009641	-0.006853	0.010319	-0.098353	-0.058581	-0.019163	0.039491	-0.031029	0.080174	0.062295	0.013214	0.023404	-0.017629	-0.096413	-0.079580	-0.052652	-0.066712	0.003174	0.086810	-0.055245	-0.046236	-0.055197	-0.057674	0.058453	-0.036754	0.096558	0.036446	-0.014009	-0.006083	-0.090875	-0.018256	0.000741	0.008770	0.049401	-0.069417	-0.022950	0.003432	-0.025951	-0.019703	0.000367	0.041949	0.013077	0.060865	0.030399	- [...]
+U68626	0.122907	-0.126764	0.032638	-0.031233	0.026553	0.030705	0.041855	0.038708	0.002230	0.046063	-0.019051	-0.034513	-0.051374	-0.023012	0.017480	0.083711	0.035312	0.011776	0.029316	0.107703	0.064034	0.036308	-0.022659	-0.000425	-0.051106	-0.076328	-0.061333	0.054626	0.010341	-0.018666	-0.016433	0.065649	-0.037712	0.058568	0.067242	0.013485	-0.033485	0.039155	0.023429	0.008556	0.049206	-0.005913	-0.088173	-0.035388	-0.022661	0.034751	-0.020832	-0.070539	-0.020126	-0.038410	0.008166	0.0 [...]
+U68627	0.105428	0.314694	-0.000887	-0.005802	0.057217	0.055597	0.028488	0.015366	-0.085505	-0.040316	0.128850	-0.041973	-0.006647	0.021519	0.002736	0.009446	0.002842	-0.035976	-0.022059	0.012703	0.023960	0.005294	0.039874	0.033871	-0.040804	-0.027704	-0.016088	-0.015579	0.029351	0.050636	-0.004024	-0.019341	0.030804	0.005428	-0.054821	-0.064295	-0.026604	0.048073	-0.071519	0.034411	0.027879	-0.027847	0.014731	-0.009104	-0.080324	-0.017367	-0.032395	-0.012059	0.093471	-0.063272	-0.045029	 [...]
+U68628	0.111094	-0.015407	0.024504	-0.108456	-0.080278	0.004493	-0.119326	0.026466	0.005042	0.015302	-0.018534	0.018288	0.011267	0.006970	0.013827	0.008406	-0.046925	-0.021049	-0.031902	-0.035230	-0.032978	-0.009587	0.029399	-0.015869	0.002109	0.010896	0.025309	-0.006754	-0.024684	0.014150	0.000749	-0.042637	-0.063301	0.004474	-0.064904	0.003215	-0.036749	0.014985	-0.089524	-0.026085	0.112443	-0.004095	0.061958	-0.019851	-0.079466	-0.083269	0.049749	-0.039195	-0.014161	0.028432	-0.012492 [...]
+U68629	0.096197	-0.015591	0.005578	-0.092230	-0.061588	-0.011562	-0.123309	0.043896	0.044435	0.006870	0.047013	0.024600	0.038039	-0.000817	-0.001062	-0.010874	-0.001226	0.000792	0.004272	0.018764	0.016701	0.012646	-0.081686	-0.016043	0.007579	-0.008498	-0.002181	0.001276	0.034725	0.027362	-0.033332	0.037128	-0.066620	-0.039949	-0.021547	-0.009776	0.064601	-0.042286	-0.085192	-0.089680	-0.007969	-0.031930	-0.155339	0.027269	0.034142	-0.005442	0.025175	0.030108	0.031865	0.058027	0.026338	0 [...]
+U68630	0.073642	-0.098576	0.047330	-0.022768	-0.007081	0.016911	-0.063966	0.066890	-0.009314	0.086265	-0.038857	-0.053036	0.038673	-0.027221	-0.006788	0.083308	0.060352	-0.093902	0.029194	0.088975	-0.054462	-0.023425	0.000503	0.048931	-0.039686	0.051511	0.012362	-0.025626	-0.008318	0.037745	-0.046504	-0.024447	0.054669	-0.081686	-0.126801	0.076751	0.153953	0.028485	-0.006766	0.004923	0.038345	0.082087	0.028658	0.007795	0.011145	-0.007684	-0.015477	-0.000930	-0.010096	-0.002539	0.000945	0 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.filter b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.filter
new file mode 100644
index 0000000..853ab6f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.filter
@@ -0,0 +1 @@
+000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 [...]
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.freq b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.freq
new file mode 100644
index 0000000..eba2d4b
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.freq
@@ -0,0 +1,42 @@
+#1.36.1
+0	0.0
+1	0.0
+2	0.0
+3	0.0
+4	0.0
+5	0.0
+6	0.0
+7	0.0
+8	0.0
+9	0.0
+10	0.0
+11	0.0
+12	0.0
+13	0.0
+14	0.0
+15	0.0
+16	0.0
+17	0.0
+18	0.0
+19	0.0
+20	0.0
+21	0.0
+22	0.0
+23	0.0
+24	0.0
+25	0.0
+26	0.0
+27	0.0
+28	0.0
+29	0.0
+30	0.0
+31	0.0
+32	0.0
+33	0.0
+34	0.0
+35	0.0
+36	0.0
+37	0.0
+38	0.0
+39	0.0
+40	0.0
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.lower.dist b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.lower.dist
new file mode 100644
index 0000000..3514ee9
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.lower.dist
@@ -0,0 +1,99 @@
+    98
+U68589
+U68590	0.337144
+U68591	0.360977	0.378254
+U68592	0.415506	0.319757	0.414843
+U68593	0.287299	0.169021	0.336162	0.284235
+U68594	0.297057	0.329311	0.356376	0.332574	0.276866
+U68595	0.392240	0.273158	0.427517	0.229894	0.283055	0.364319
+U68596	0.309315	0.312653	0.322673	0.363330	0.291774	0.280537	0.360148
+U68597	0.320066	0.266838	0.352060	0.325227	0.217362	0.263379	0.317196	0.276011
+U68598	0.328638	0.206259	0.344952	0.265168	0.189372	0.251328	0.281804	0.259990	0.213189
+U68599	0.257245	0.328665	0.369658	0.338989	0.294160	0.303296	0.385823	0.287117	0.326099	0.292542
+U68600	0.337227	0.341084	0.122794	0.364319	0.302449	0.322132	0.388912	0.325688	0.324279	0.335138	0.343091
+U68601	0.301773	0.323764	0.372984	0.350376	0.284617	0.292099	0.345663	0.131424	0.280789	0.253181	0.272707	0.349854
+U68602	0.307105	0.220396	0.369623	0.266663	0.219618	0.249492	0.278042	0.301197	0.208454	0.090237	0.317244	0.356991	0.268659
+U68603	0.477681	0.362613	0.467881	0.418468	0.385130	0.386773	0.457303	0.359219	0.401870	0.351449	0.396829	0.427677	0.343707	0.426605
+U68605	0.364754	0.274376	0.377089	0.331206	0.257535	0.237829	0.333759	0.273363	0.052145	0.213304	0.353944	0.358357	0.269811	0.208459	0.402262
+U68606	0.320363	0.201198	0.324100	0.301152	0.122862	0.277808	0.289841	0.277082	0.205688	0.179339	0.306491	0.294295	0.276017	0.204576	0.386773	0.223376
+U68607	0.340712	0.317527	0.365825	0.384448	0.324013	0.334547	0.378719	0.125029	0.339908	0.275794	0.332816	0.355104	0.105021	0.321747	0.381782	0.282689	0.310100
+U68608	0.643389	0.530014	0.548815	0.587163	0.518760	0.543352	0.575126	0.478761	0.517938	0.515138	0.575865	0.567758	0.515216	0.522807	0.601148	0.510658	0.529218	0.598114
+U68609	0.433138	0.373379	0.490123	0.416497	0.391165	0.396320	0.415251	0.382431	0.389787	0.392240	0.426079	0.478155	0.408959	0.358242	0.280287	0.398293	0.383464	0.376207	0.581450
+U68610	0.376207	0.294704	0.450700	0.215923	0.284617	0.357389	0.158838	0.364319	0.286874	0.259924	0.406623	0.407315	0.334876	0.258188	0.439624	0.298684	0.287713	0.348574	0.603638	0.405148
+U68611	0.351515	0.286594	0.363708	0.319670	0.253894	0.244067	0.354610	0.296894	0.312058	0.268378	0.312736	0.335955	0.278924	0.285788	0.409367	0.308524	0.253689	0.308455	0.556018	0.444553	0.371261
+U68612	0.373270	0.293104	0.396633	0.359680	0.273158	0.327178	0.361109	0.315411	0.252570	0.211242	0.346776	0.377929	0.332523	0.190585	0.439617	0.275360	0.260336	0.427909	0.602788	0.393834	0.352605	0.323464
+U68613	0.325136	0.214397	0.363488	0.254598	0.213889	0.251136	0.272713	0.298772	0.227071	0.087582	0.307959	0.340460	0.280600	0.078406	0.353443	0.230342	0.209198	0.306422	0.529843	0.365423	0.253805	0.272211	0.215762
+U68614	0.289019	0.304876	0.311317	0.332803	0.267779	0.286583	0.350892	0.039097	0.260235	0.257723	0.268573	0.294108	0.113060	0.295495	0.354610	0.266324	0.246142	0.102569	0.479156	0.360638	0.348206	0.276644	0.299734	0.284460
+U68615	0.310340	0.294032	0.329411	0.338989	0.274252	0.260851	0.343931	0.297549	0.280899	0.268754	0.309717	0.324279	0.308667	0.300509	0.371261	0.304816	0.262712	0.338989	0.529751	0.392824	0.350892	0.163140	0.355207	0.278520	0.272001
+U68616	0.307018	0.296294	0.347363	0.326397	0.271113	0.261823	0.367380	0.198193	0.289951	0.268852	0.277586	0.317674	0.214579	0.303362	0.340156	0.292471	0.280237	0.293255	0.516505	0.361630	0.334008	0.274066	0.322132	0.266174	0.178312	0.255720
+U68617	0.308559	0.318935	0.328716	0.347490	0.291080	0.288339	0.353619	0.075812	0.269742	0.245013	0.275370	0.319670	0.096909	0.291180	0.365692	0.259158	0.262636	0.091577	0.475006	0.366399	0.328491	0.276436	0.313854	0.289008	0.063195	0.279695	0.170293
+U68618	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451
+U68619	0.368119	0.215592	0.397333	0.312183	0.240602	0.305548	0.320312	0.307011	0.274934	0.058566	0.357883	0.384102	0.282700	0.137887	0.412799	0.285073	0.237879	0.332574	0.543402	0.394174	0.302606	0.308422	0.229389	0.095671	0.308489	0.315059	0.303357	0.301197	0.095064
+U68620	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451	0.000000	0.095064
+U68621	0.259707	0.285269	0.315472	0.373185	0.223590	0.230560	0.342812	0.279315	0.229993	0.273230	0.306116	0.302011	0.258358	0.272545	0.438400	0.277730	0.211947	0.300490	0.526527	0.412038	0.344266	0.244404	0.306954	0.273482	0.265991	0.221866	0.272761	0.253341	0.265512	0.304839	0.265512
+U68622	0.409194	0.383119	0.415027	0.359680	0.344108	0.394362	0.356954	0.383444	0.360180	0.314261	0.427369	0.418941	0.402721	0.341198	0.438472	0.331907	0.323315	0.432427	0.612744	0.437145	0.352503	0.392797	0.401787	0.326769	0.368968	0.383300	0.407097	0.392254	0.325096	0.413282	0.325096	0.371589
+U68623	0.273253	0.287768	0.310736	0.365184	0.247344	0.270690	0.348044	0.218017	0.271878	0.282537	0.279200	0.289376	0.240354	0.285366	0.378448	0.317244	0.227993	0.246334	0.556206	0.389787	0.358283	0.296193	0.344582	0.265512	0.204392	0.251181	0.206167	0.199277	0.297172	0.318229	0.297172	0.230861	0.403435
+U68624	0.348793	0.248463	0.388493	0.301866	0.236421	0.279315	0.311178	0.307081	0.225078	0.133092	0.340130	0.369495	0.292729	0.113662	0.409908	0.219999	0.215106	0.329318	0.504303	0.385549	0.289841	0.310028	0.211884	0.124741	0.298898	0.304845	0.278042	0.300395	0.142765	0.140838	0.142765	0.287582	0.379793	0.291380
+U68625	0.338829	0.330389	0.359680	0.362488	0.222528	0.238840	0.330516	0.321375	0.237745	0.246349	0.316773	0.340106	0.304869	0.254692	0.389040	0.265083	0.224023	0.330983	0.535651	0.347530	0.332634	0.286043	0.289119	0.249678	0.309193	0.189645	0.279347	0.307806	0.270360	0.272302	0.270360	0.225242	0.348616	0.271468	0.261021
+U68626	0.289890	0.331258	0.334111	0.382122	0.254171	0.242336	0.332286	0.206134	0.269502	0.289370	0.299756	0.305343	0.247109	0.304099	0.433602	0.296132	0.234936	0.252567	0.553400	0.435804	0.362033	0.279880	0.334817	0.284857	0.203240	0.254818	0.200785	0.195565	0.304876	0.325861	0.304876	0.236151	0.395343	0.078467	0.307896	0.279664
+U68627	0.460196	0.359192	0.500022	0.420349	0.396586	0.409723	0.421731	0.374967	0.380020	0.372970	0.437168	0.484127	0.395778	0.365865	0.246241	0.383472	0.381089	0.375581	0.565457	0.054455	0.400684	0.437190	0.411243	0.344388	0.343707	0.376347	0.359680	0.354966	0.371771	0.402721	0.371771	0.411243	0.450700	0.390702	0.400079	0.367932	0.426079
+U68628	0.287313	0.314346	0.350766	0.356390	0.289841	0.300336	0.336571	0.122326	0.281484	0.249858	0.261354	0.331431	0.045250	0.292820	0.345511	0.283253	0.276688	0.093606	0.501886	0.388719	0.336626	0.278924	0.312724	0.294114	0.106184	0.298814	0.188051	0.083535	0.276933	0.289247	0.276933	0.265168	0.405119	0.237019	0.286594	0.305644	0.243717	0.363081
+U68629	0.304876	0.282537	0.333145	0.342170	0.278520	0.292820	0.346594	0.106410	0.281484	0.233746	0.282700	0.317305	0.113176	0.283918	0.352090	0.274864	0.252152	0.112197	0.519860	0.385214	0.337821	0.280287	0.306441	0.262900	0.092619	0.307154	0.206041	0.085569	0.266125	0.261648	0.266125	0.273730	0.386528	0.215762	0.285469	0.322097	0.222724	0.375192	0.090627
+U68630	0.293627	0.308195	0.333379	0.340183	0.259933	0.281697	0.335749	0.209430	0.284238	0.263548	0.291995	0.316017	0.202970	0.298814	0.423288	0.324259	0.246076	0.241563	0.550524	0.436903	0.319316	0.287713	0.331715	0.288107	0.203308	0.270331	0.243496	0.193667	0.288242	0.311791	0.288242	0.230514	0.366399	0.167154	0.289370	0.268455	0.179466	0.429242	0.188933	0.206713
+U68631	0.398745	0.273482	0.442063	0.179474	0.283223	0.340691	0.087023	0.349614	0.307129	0.256715	0.383119	0.395785	0.326769	0.266079	0.434792	0.312653	0.286311	0.389518	0.582089	0.415039	0.122032	0.325227	0.359680	0.239932	0.330765	0.342727	0.348416	0.339273	0.281484	0.304099	0.281484	0.332052	0.357883	0.352302	0.311667	0.354419	0.343048	0.414051	0.337227	0.335224	0.338085
+U68632	0.323458	0.265198	0.319388	0.326851	0.225527	0.269265	0.340156	0.271657	0.090568	0.180919	0.343991	0.297402	0.262255	0.181048	0.377719	0.106011	0.213692	0.343693	0.512306	0.369502	0.316394	0.288339	0.223342	0.200086	0.242629	0.309422	0.285214	0.235838	0.212177	0.229421	0.212177	0.251918	0.359425	0.291111	0.228248	0.240578	0.300831	0.365956	0.270360	0.255453	0.294523	0.317889
+U68633	0.343931	0.272626	0.395174	0.298352	0.244836	0.277267	0.311076	0.301609	0.248578	0.166671	0.347951	0.383867	0.290033	0.169421	0.366452	0.262468	0.220723	0.311620	0.511328	0.365873	0.285826	0.284447	0.057989	0.180522	0.301609	0.295811	0.292522	0.296685	0.172607	0.147700	0.172607	0.291619	0.335513	0.314987	0.165149	0.277547	0.314211	0.377522	0.271343	0.285986	0.294500	0.288465	0.236842
+U68634	0.270320	0.323924	0.355606	0.302463	0.268053	0.281613	0.296415	0.116332	0.260771	0.237960	0.255391	0.338041	0.060707	0.271468	0.335513	0.259629	0.264574	0.085083	0.495486	0.368906	0.292522	0.274589	0.297432	0.280872	0.091368	0.289504	0.194553	0.088740	0.250617	0.264574	0.250617	0.247272	0.374017	0.219740	0.291943	0.275547	0.232025	0.367395	0.055913	0.087321	0.183653	0.299159	0.245339	0.288185
+U68635	0.328638	0.216451	0.372774	0.260630	0.202789	0.265168	0.281804	0.268754	0.238261	0.091021	0.307991	0.352503	0.256082	0.088528	0.383472	0.250668	0.225386	0.297115	0.523554	0.374797	0.280214	0.277833	0.204444	0.083419	0.257723	0.307959	0.245275	0.259354	0.081334	0.103613	0.081334	0.265550	0.343141	0.262888	0.123718	0.252146	0.281640	0.356752	0.257989	0.233746	0.256715	0.262568	0.202000	0.160844	0.250617
+U68636	0.360667	0.281824	0.425988	0.164494	0.269811	0.315691	0.074568	0.323601	0.290224	0.264200	0.354464	0.379655	0.324605	0.246808	0.409908	0.298546	0.275336	0.360638	0.568020	0.419842	0.113176	0.318002	0.327350	0.250717	0.307959	0.326113	0.324779	0.313377	0.280600	0.281484	0.280600	0.302518	0.336679	0.321413	0.297172	0.329742	0.316704	0.402670	0.330389	0.329311	0.307367	0.006024	0.295536	0.299971	0.304099	0.268468
+U68637	0.317641	0.241593	0.367269	0.275794	0.214077	0.251548	0.282904	0.307129	0.227664	0.101521	0.328894	0.347509	0.277833	0.032570	0.394969	0.219740	0.209430	0.321127	0.527665	0.359680	0.245608	0.284078	0.192489	0.098361	0.292820	0.310948	0.289609	0.288419	0.108701	0.090573	0.108701	0.259636	0.359680	0.293373	0.114034	0.260975	0.304099	0.354897	0.285168	0.274214	0.296937	0.269035	0.182363	0.167496	0.279190	0.090237	0.268754
+U68638	0.307311	0.230389	0.363032	0.268092	0.196928	0.255839	0.290513	0.270360	0.220923	0.054228	0.312653	0.349713	0.261273	0.069818	0.369931	0.226896	0.186739	0.293285	0.539010	0.376207	0.256331	0.270628	0.186695	0.076175	0.253585	0.294934	0.244067	0.250108	0.020396	0.076014	0.020396	0.248799	0.313454	0.278803	0.130379	0.257350	0.288634	0.360652	0.263877	0.238840	0.265955	0.268468	0.188040	0.169421	0.240945	0.077156	0.270088	0.085319
+U68639	0.318242	0.228159	0.342135	0.355578	0.113423	0.303699	0.326581	0.296132	0.218861	0.217646	0.308495	0.309890	0.286874	0.236275	0.455351	0.256365	0.098208	0.317395	0.554398	0.396051	0.320988	0.279414	0.279143	0.229291	0.268659	0.273673	0.307339	0.277730	0.238146	0.263244	0.238146	0.220490	0.366934	0.250230	0.254536	0.223138	0.251398	0.407315	0.301081	0.279408	0.252463	0.316294	0.234687	0.258338	0.287208	0.229810	0.305656	0.232616	0.222623
+U68640	0.315163	0.255453	0.362984	0.303351	0.232067	0.267649	0.311238	0.267363	0.282700	0.160277	0.289841	0.369636	0.245013	0.150174	0.408422	0.281484	0.251357	0.292002	0.496751	0.406702	0.302570	0.295223	0.232061	0.156701	0.270891	0.303351	0.248300	0.261021	0.145189	0.166133	0.145189	0.280097	0.353207	0.274889	0.155691	0.268635	0.285168	0.418941	0.243355	0.247109	0.269931	0.302582	0.248141	0.177731	0.224344	0.128824	0.294114	0.147620	0.137655	0.266719
+U68641	0.334519	0.219740	0.392626	0.295152	0.230975	0.288543	0.311238	0.310893	0.235348	0.059437	0.345365	0.365777	0.288885	0.112339	0.415506	0.247859	0.233701	0.321801	0.547458	0.382770	0.304099	0.313081	0.235404	0.092724	0.302594	0.301848	0.292637	0.298123	0.069377	0.065526	0.069377	0.276248	0.358283	0.317196	0.151832	0.263548	0.328257	0.400684	0.293373	0.276933	0.309669	0.288293	0.210404	0.186103	0.265629	0.103537	0.275102	0.108022	0.072655	0.259776	0.188057
+U68642	0.259354	0.357218	0.425088	0.347147	0.324266	0.326397	0.357723	0.287842	0.315691	0.304895	0.305583	0.410836	0.277621	0.292002	0.443308	0.304882	0.330363	0.326215	0.584131	0.421971	0.335749	0.326397	0.314346	0.326489	0.298857	0.356390	0.301081	0.285616	0.333640	0.328691	0.333640	0.324434	0.359680	0.320851	0.320446	0.349713	0.295811	0.432050	0.270760	0.301031	0.304889	0.346062	0.301031	0.298197	0.255902	0.315353	0.337186	0.286305	0.306481	0.354538	0.314261	0.334262
+U68643	0.513252	0.507737	0.518779	0.556294	0.486147	0.491101	0.541469	0.434443	0.475314	0.467931	0.453716	0.489114	0.467569	0.506595	0.319627	0.501368	0.455046	0.428074	0.639768	0.316086	0.550657	0.549148	0.493996	0.460841	0.435707	0.501990	0.412859	0.455046	0.472888	0.489627	0.472888	0.531437	0.510347	0.434443	0.489951	0.440840	0.463200	0.340769	0.426473	0.443436	0.508683	0.544453	0.483786	0.422792	0.405491	0.478961	0.528684	0.493636	0.474782	0.481765	0.480584	0.474328	0.535895
+U68644	0.321245	0.178105	0.344833	0.353384	0.211278	0.250325	0.320049	0.324729	0.223132	0.220396	0.308514	0.325888	0.335821	0.233137	0.415147	0.257295	0.205330	0.356730	0.550818	0.435867	0.342412	0.298395	0.268659	0.237632	0.322103	0.259524	0.309399	0.318008	0.240927	0.287662	0.240927	0.203750	0.374212	0.275083	0.270760	0.221359	0.272013	0.433514	0.330363	0.321801	0.260273	0.301129	0.240330	0.233821	0.306565	0.234864	0.288035	0.243137	0.225386	0.214707	0.263548	0.273363	0.353619	0.539342
+U68645	0.382122	0.215419	0.433896	0.281513	0.250059	0.322242	0.329525	0.320324	0.276644	0.056869	0.365035	0.413440	0.278042	0.141007	0.422982	0.281020	0.255784	0.348163	0.578345	0.417506	0.294354	0.317795	0.258630	0.101083	0.330520	0.328142	0.323464	0.304827	0.095468	0.051539	0.095468	0.329324	0.401357	0.334564	0.128519	0.285366	0.336209	0.423717	0.286874	0.252759	0.337266	0.287010	0.252935	0.151116	0.251266	0.100294	0.270088	0.104797	0.078908	0.273253	0.141210	0.076777	0.312513	0.497091 [...]
+U68646	0.338989	0.133686	0.360705	0.354194	0.183046	0.287798	0.328058	0.321238	0.238173	0.237157	0.329081	0.335982	0.339280	0.268748	0.396313	0.276436	0.205189	0.363158	0.530906	0.405148	0.326397	0.317244	0.302881	0.255453	0.301220	0.291387	0.295560	0.320452	0.275567	0.301175	0.275567	0.245375	0.377369	0.286601	0.287010	0.267102	0.300887	0.390401	0.345711	0.311061	0.305337	0.315503	0.239488	0.304931	0.310707	0.260927	0.297115	0.277833	0.250906	0.234643	0.303345	0.293630	0.371187	0.527475 [...]
+U68647	0.311488	0.292498	0.369193	0.355266	0.292387	0.250443	0.338641	0.253537	0.260877	0.283792	0.285623	0.345905	0.235212	0.273411	0.395090	0.258952	0.253791	0.250934	0.493520	0.434995	0.337603	0.230661	0.335138	0.260533	0.247630	0.281947	0.224281	0.227476	0.277657	0.280315	0.277657	0.252842	0.350814	0.184931	0.265449	0.311416	0.182388	0.429138	0.240487	0.261676	0.238540	0.319620	0.275524	0.323319	0.233275	0.270445	0.325073	0.288965	0.280315	0.293359	0.245177	0.263548	0.295095	0.437808 [...]
+U68648	0.341095	0.280214	0.320658	0.345025	0.232616	0.263205	0.325930	0.277323	0.080515	0.204243	0.347449	0.290368	0.285879	0.193059	0.401639	0.109729	0.224997	0.350157	0.524934	0.369502	0.332904	0.315059	0.220441	0.216451	0.253894	0.300025	0.284361	0.251570	0.219871	0.245536	0.219871	0.231127	0.362749	0.289119	0.222303	0.240267	0.295955	0.372774	0.286594	0.262900	0.299504	0.319500	0.053835	0.236890	0.274079	0.210733	0.300178	0.189802	0.206483	0.244816	0.250947	0.224397	0.337227	0.499400 [...]
+U68649	0.306169	0.206522	0.325439	0.328101	0.112072	0.265408	0.294354	0.285509	0.216293	0.195545	0.304099	0.297118	0.281157	0.210058	0.427832	0.225931	0.088631	0.314150	0.511139	0.380020	0.302594	0.246272	0.264399	0.207005	0.246142	0.248746	0.280971	0.270886	0.213078	0.259907	0.213078	0.201445	0.337603	0.241674	0.207751	0.207324	0.235523	0.381089	0.287713	0.272211	0.237380	0.295152	0.228785	0.236229	0.258439	0.204047	0.278313	0.223345	0.197745	0.078467	0.229894	0.226125	0.336679	0.466873 [...]
+U68651	0.343707	0.225386	0.383119	0.297497	0.229389	0.285221	0.304876	0.275794	0.258283	0.122632	0.307081	0.379793	0.239284	0.128374	0.382784	0.255977	0.218866	0.291481	0.512865	0.394969	0.292910	0.277528	0.239217	0.128950	0.276453	0.310028	0.262279	0.263859	0.125200	0.139052	0.125200	0.298857	0.342960	0.292194	0.127424	0.260018	0.299594	0.402493	0.243831	0.243137	0.272114	0.287842	0.238592	0.167082	0.229658	0.106624	0.283591	0.115862	0.117632	0.235838	0.071973	0.161964	0.314094	0.477706 [...]
+U68652	0.348475	0.300901	0.379501	0.213459	0.303686	0.362481	0.162738	0.349097	0.299971	0.272013	0.338989	0.368187	0.307230	0.252951	0.440840	0.282868	0.293900	0.393620	0.599761	0.392436	0.034512	0.349752	0.316294	0.260877	0.321103	0.336162	0.341726	0.310123	0.267882	0.334118	0.267882	0.314077	0.394923	0.343393	0.314177	0.342546	0.359425	0.383472	0.309576	0.330389	0.326026	0.127003	0.308324	0.283918	0.284098	0.270341	0.116217	0.249419	0.259194	0.309890	0.304099	0.308781	0.328875	0.534844 [...]
+U68653	0.243919	0.269939	0.347418	0.326145	0.259968	0.236729	0.307089	0.210949	0.213433	0.254871	0.272729	0.332441	0.201004	0.230563	0.374621	0.224783	0.227154	0.221028	0.571605	0.393039	0.286094	0.214294	0.289992	0.244357	0.197150	0.245817	0.183668	0.192784	0.238132	0.244357	0.238132	0.250833	0.341167	0.177292	0.231928	0.288050	0.140909	0.370407	0.194447	0.204305	0.228277	0.289992	0.251591	0.311791	0.205153	0.220881	0.291919	0.235376	0.230563	0.254111	0.218291	0.245817	0.239642	0.411118 [...]
+U68654	0.301619	0.216102	0.316998	0.323619	0.078728	0.277338	0.299558	0.281859	0.230306	0.199143	0.285560	0.290607	0.266425	0.234185	0.406823	0.247805	0.092168	0.312533	0.518490	0.411424	0.315503	0.234185	0.286151	0.227071	0.253115	0.257692	0.289247	0.270002	0.230411	0.258374	0.230411	0.209637	0.336371	0.241979	0.246241	0.213363	0.250546	0.411956	0.275794	0.264188	0.248226	0.299594	0.223210	0.237459	0.248226	0.223691	0.288761	0.240927	0.212177	0.078467	0.244762	0.249161	0.319500	0.489951 [...]
+U68655	0.371532	0.274415	0.421263	0.311791	0.248463	0.312513	0.312190	0.330581	0.287448	0.141357	0.352605	0.414051	0.329950	0.134623	0.458622	0.296294	0.286026	0.354464	0.605463	0.399332	0.314261	0.348574	0.241782	0.132502	0.330581	0.342488	0.307991	0.325750	0.129518	0.145189	0.129518	0.320722	0.401167	0.315071	0.166727	0.297926	0.330389	0.411243	0.312653	0.291903	0.300873	0.314177	0.258995	0.197107	0.300817	0.095852	0.316599	0.136439	0.131251	0.266778	0.174732	0.155965	0.333893	0.489770 [...]
+U68656	0.377102	0.312052	0.378254	0.336626	0.312724	0.310272	0.329749	0.300274	0.294782	0.249419	0.388604	0.381726	0.298684	0.282712	0.451079	0.312120	0.240514	0.307204	0.588165	0.423177	0.339589	0.275336	0.304895	0.266838	0.285879	0.300980	0.332765	0.290448	0.276712	0.265821	0.276712	0.191624	0.411424	0.322248	0.294114	0.285579	0.308821	0.421971	0.284857	0.268468	0.335595	0.313533	0.285579	0.296553	0.275794	0.261230	0.311238	0.288035	0.251726	0.302544	0.286451	0.267506	0.400190	0.534446 [...]
+U68657	0.428155	0.365054	0.445836	0.424992	0.364976	0.394174	0.419034	0.337227	0.380688	0.364065	0.389650	0.427271	0.372774	0.353020	0.235348	0.381696	0.356836	0.344952	0.529255	0.120257	0.404023	0.413081	0.393030	0.349177	0.334876	0.383805	0.319187	0.337227	0.366399	0.382777	0.366399	0.399938	0.440840	0.382080	0.386925	0.349022	0.421021	0.103389	0.351040	0.336019	0.404951	0.421143	0.361630	0.372728	0.345990	0.361630	0.408763	0.349713	0.358713	0.382431	0.405582	0.388719	0.412218	0.304889 [...]
+U68658	0.343582	0.244067	0.413887	0.299483	0.227562	0.292637	0.319716	0.301031	0.251943	0.073124	0.338106	0.399584	0.271435	0.092524	0.395502	0.261230	0.227613	0.304876	0.565329	0.370938	0.290852	0.305631	0.209994	0.095435	0.289609	0.329950	0.282537	0.282183	0.022512	0.103613	0.022512	0.263548	0.351568	0.314346	0.147798	0.278803	0.328875	0.371951	0.279506	0.273049	0.303286	0.303320	0.228113	0.183254	0.250164	0.087181	0.304882	0.113176	0.027067	0.254205	0.159554	0.079587	0.337186	0.476013 [...]
+U68659	0.363540	0.239423	0.422482	0.277780	0.283550	0.315450	0.286735	0.327910	0.281721	0.097752	0.369650	0.395762	0.288293	0.082137	0.434828	0.230023	0.270276	0.457110	0.571133	0.393321	0.287582	0.296253	0.253430	0.085685	0.303385	0.357960	0.340079	0.308489	0.102724	0.146029	0.102724	0.309354	0.429834	0.338682	0.138222	0.292483	0.351054	0.395244	0.304863	0.295744	0.338675	0.283223	0.272179	0.165149	0.267350	0.112297	0.266174	0.107789	0.095229	0.290684	0.173334	0.131709	0.321127	0.505483 [...]
+U68660	0.320583	0.322718	0.380263	0.373994	0.314611	0.288375	0.353057	0.249858	0.279610	0.300928	0.311985	0.362657	0.256331	0.296162	0.380958	0.295955	0.279815	0.259562	0.545907	0.398293	0.365685	0.267206	0.338367	0.288375	0.241188	0.308090	0.225714	0.236040	0.282904	0.301734	0.282904	0.254734	0.379204	0.190289	0.288375	0.305724	0.177292	0.384564	0.250042	0.252986	0.260771	0.343582	0.300901	0.327417	0.227815	0.265230	0.343582	0.307284	0.281453	0.304099	0.285269	0.286601	0.313023	0.389572 [...]
+U68661	0.377095	0.331029	0.378280	0.399403	0.333983	0.251022	0.431301	0.361031	0.353384	0.312653	0.379238	0.378280	0.371426	0.315818	0.438429	0.353786	0.315818	0.400677	0.596479	0.464872	0.396313	0.386331	0.385769	0.330581	0.364192	0.368696	0.345563	0.355634	0.334367	0.376019	0.334367	0.349242	0.426935	0.353384	0.375507	0.357389	0.367885	0.460900	0.379082	0.367269	0.372400	0.404462	0.350631	0.349934	0.360697	0.356836	0.383805	0.330363	0.326769	0.367366	0.350892	0.347680	0.386528	0.534583 [...]
+U68662	0.334577	0.347334	0.369961	0.362410	0.288242	0.252140	0.383792	0.309193	0.273195	0.287313	0.319872	0.329411	0.308631	0.306261	0.421844	0.300536	0.268415	0.345273	0.537090	0.440840	0.397621	0.283511	0.346587	0.298772	0.293580	0.236410	0.289951	0.295495	0.330363	0.307748	0.330363	0.218788	0.409711	0.273546	0.300395	0.258630	0.266933	0.444553	0.304099	0.297339	0.278714	0.398920	0.289567	0.293414	0.285540	0.311855	0.374577	0.295811	0.301792	0.275557	0.300366	0.323126	0.339857	0.499746 [...]
+U68663	0.328665	0.307230	0.382431	0.369636	0.329950	0.280789	0.377762	0.225530	0.282712	0.301773	0.287713	0.355401	0.225459	0.300274	0.383119	0.287632	0.275567	0.247703	0.550346	0.432159	0.371771	0.279408	0.345663	0.304869	0.213078	0.311791	0.203381	0.201972	0.305631	0.333514	0.305631	0.259275	0.388292	0.152765	0.318002	0.348253	0.158482	0.425988	0.234281	0.230958	0.249599	0.351164	0.295607	0.329525	0.220294	0.277406	0.344044	0.326581	0.294114	0.310272	0.255839	0.322928	0.293373	0.462099 [...]
+U68664	0.438795	0.410098	0.394891	0.442027	0.382564	0.422422	0.498901	0.428059	0.448883	0.405367	0.447499	0.400562	0.487474	0.393527	0.510082	0.457170	0.400237	0.507162	0.583083	0.521929	0.467881	0.472722	0.474392	0.421263	0.432852	0.440840	0.434098	0.442013	0.400079	0.457461	0.400079	0.409401	0.484211	0.423111	0.454184	0.455820	0.450907	0.523433	0.455710	0.449496	0.460223	0.476546	0.424850	0.428856	0.444851	0.400684	0.447103	0.385171	0.389033	0.412830	0.439624	0.416934	0.468218	0.583528 [...]
+U68665	0.309493	0.300178	0.336453	0.380785	0.286976	0.250197	0.381754	0.211727	0.257111	0.277889	0.280537	0.314166	0.233168	0.300453	0.401870	0.283354	0.242283	0.273899	0.560288	0.429533	0.389953	0.268748	0.320656	0.287903	0.191338	0.281898	0.216929	0.185998	0.291803	0.340396	0.291803	0.244816	0.374436	0.138335	0.316294	0.277611	0.148618	0.423717	0.227664	0.219999	0.199775	0.371022	0.266842	0.324462	0.210323	0.265864	0.335404	0.306422	0.273482	0.262032	0.267724	0.327417	0.296546	0.476751 [...]
+U68666	0.394408	0.278594	0.450824	0.211525	0.295676	0.395265	0.046201	0.366734	0.331431	0.297115	0.387877	0.410471	0.336626	0.282358	0.450762	0.344388	0.304099	0.374967	0.581348	0.418534	0.146326	0.368536	0.373178	0.276712	0.347147	0.361088	0.379364	0.355952	0.301792	0.315503	0.301792	0.338989	0.363820	0.372000	0.315596	0.343141	0.372584	0.427271	0.339287	0.357324	0.343582	0.069962	0.346608	0.332052	0.309071	0.290961	0.069818	0.288885	0.294032	0.341037	0.306384	0.299483	0.359680	0.548910 [...]
+U68667	0.334519	0.219740	0.392626	0.295152	0.230975	0.288543	0.311238	0.310893	0.235348	0.059437	0.345365	0.365777	0.288885	0.112339	0.415506	0.247859	0.233701	0.321801	0.547458	0.382770	0.304099	0.313081	0.235404	0.092724	0.302594	0.301848	0.292637	0.298123	0.069377	0.065526	0.069377	0.276248	0.358283	0.317196	0.151832	0.263548	0.328257	0.400684	0.293373	0.276933	0.309669	0.288293	0.210404	0.186103	0.265629	0.103537	0.275102	0.108022	0.072655	0.259776	0.188057	0.000000	0.334262	0.474328 [...]
+U68668	0.338675	0.304895	0.320439	0.326489	0.275794	0.244797	0.330210	0.280732	0.301694	0.258909	0.316920	0.297661	0.276029	0.292349	0.354680	0.294295	0.243090	0.292250	0.549429	0.434402	0.305710	0.287632	0.333124	0.289247	0.289247	0.281992	0.288107	0.264212	0.283806	0.267206	0.283806	0.216474	0.364131	0.292250	0.288375	0.281069	0.313776	0.408920	0.271315	0.257233	0.297432	0.304895	0.288995	0.304931	0.267743	0.283636	0.313694	0.293104	0.273925	0.284371	0.283806	0.290628	0.324807	0.464442 [...]
+U68669	0.228291	0.274172	0.369357	0.370307	0.285761	0.271016	0.322928	0.271516	0.289364	0.260927	0.305537	0.313081	0.252555	0.269477	0.431301	0.309442	0.299700	0.258029	0.579391	0.404247	0.349472	0.261393	0.343048	0.261273	0.242726	0.313776	0.268841	0.225803	0.279210	0.298979	0.279210	0.240843	0.415147	0.219989	0.292910	0.343602	0.223615	0.396586	0.230433	0.247550	0.265476	0.312445	0.287145	0.305767	0.228495	0.269811	0.295607	0.266778	0.248019	0.298216	0.276470	0.288293	0.240200	0.502399 [...]
+U68670	0.376110	0.304876	0.371101	0.363330	0.284542	0.287662	0.376347	0.308489	0.307034	0.283756	0.362373	0.348877	0.326397	0.309890	0.383451	0.312378	0.286181	0.324259	0.521831	0.389343	0.363879	0.186858	0.342412	0.297284	0.288218	0.102059	0.269209	0.284692	0.319374	0.326138	0.319374	0.282868	0.375655	0.296070	0.313081	0.225600	0.309274	0.385499	0.320179	0.322618	0.335484	0.342727	0.316004	0.317492	0.298451	0.322772	0.319757	0.317089	0.310221	0.310681	0.336995	0.306348	0.381089	0.503515 [...]
+U68671	0.348863	0.304882	0.412218	0.176486	0.270212	0.305644	0.130465	0.326113	0.295465	0.290961	0.364001	0.362613	0.319889	0.258630	0.418254	0.300873	0.246349	0.323087	0.572514	0.425802	0.052616	0.351164	0.349965	0.269138	0.310323	0.336904	0.302518	0.303326	0.288634	0.297926	0.288634	0.331729	0.343375	0.336019	0.304099	0.338381	0.328012	0.401063	0.320023	0.315885	0.301630	0.110830	0.303306	0.325179	0.294295	0.289370	0.113423	0.266838	0.278102	0.323249	0.298639	0.304889	0.321861	0.501787 [...]
+U68672	0.282832	0.278803	0.322132	0.365609	0.240811	0.221866	0.332666	0.275578	0.248746	0.261273	0.315402	0.308671	0.268659	0.261430	0.444463	0.265057	0.220282	0.304099	0.539342	0.412570	0.339563	0.233137	0.322963	0.266778	0.265949	0.232616	0.264063	0.250230	0.267724	0.295428	0.267724	0.042634	0.365452	0.237874	0.277780	0.247505	0.239944	0.402262	0.263548	0.261314	0.246789	0.322318	0.265098	0.281978	0.246469	0.258995	0.294858	0.247703	0.246207	0.235229	0.281157	0.274683	0.311729	0.537303 [...]
+U68673	0.281554	0.190897	0.307547	0.237569	0.177895	0.224066	0.247630	0.251446	0.204695	0.055581	0.263187	0.290462	0.239481	0.084997	0.356467	0.202637	0.190897	0.258530	0.488821	0.375841	0.238267	0.236311	0.187499	0.090013	0.245116	0.236938	0.245116	0.240681	0.065259	0.057989	0.065259	0.247109	0.308382	0.259944	0.124372	0.241972	0.263911	0.363986	0.239481	0.230135	0.240487	0.214248	0.173505	0.190930	0.244067	0.096209	0.219740	0.087708	0.065259	0.203106	0.132372	0.018238	0.288563	0.433611 [...]
+U68674	0.289504	0.278803	0.325439	0.309315	0.239605	0.268462	0.316394	0.223786	0.261030	0.245143	0.302126	0.318679	0.210623	0.237879	0.419587	0.284985	0.205189	0.290741	0.537954	0.355843	0.298075	0.255426	0.313061	0.239932	0.203073	0.274376	0.248833	0.190537	0.243245	0.281859	0.243245	0.194237	0.362176	0.217256	0.259158	0.243570	0.216113	0.367269	0.210139	0.219612	0.188717	0.297339	0.281613	0.266003	0.178870	0.240008	0.269138	0.229291	0.224228	0.220617	0.259430	0.273784	0.274021	0.503515 [...]
+U68675	0.460089	0.379815	0.458067	0.378434	0.342481	0.385306	0.401870	0.347735	0.375748	0.358668	0.366244	0.411299	0.341185	0.355139	0.216643	0.372090	0.341185	0.329297	0.532074	0.143736	0.379425	0.382035	0.404247	0.354146	0.325695	0.338367	0.330447	0.322332	0.381290	0.362702	0.381290	0.402807	0.408715	0.402370	0.360697	0.323736	0.404720	0.119702	0.334577	0.326790	0.401357	0.384953	0.351941	0.368881	0.332139	0.368906	0.396051	0.359680	0.371898	0.374017	0.371463	0.394313	0.411749	0.307455 [...]
+U68676	0.290337	0.310886	0.338335	0.346768	0.280607	0.255062	0.333940	0.207419	0.246441	0.276043	0.272810	0.338335	0.228956	0.269591	0.368256	0.259145	0.244975	0.236270	0.500784	0.388888	0.332139	0.235687	0.345949	0.273550	0.201542	0.280815	0.218178	0.194716	0.270320	0.285826	0.270320	0.233863	0.350796	0.170447	0.278306	0.295586	0.162867	0.383504	0.215011	0.202061	0.255419	0.313511	0.248945	0.344178	0.219010	0.248274	0.319528	0.277547	0.263194	0.275042	0.224066	0.274028	0.278844	0.393227 [...]
+U68677	0.225920	0.281221	0.353510	0.331129	0.254808	0.230812	0.293754	0.240415	0.255062	0.262152	0.300845	0.321926	0.235012	0.243556	0.340607	0.266105	0.279720	0.228450	0.497875	0.368588	0.305782	0.247753	0.293125	0.262152	0.221974	0.272352	0.215580	0.220851	0.256579	0.249050	0.256579	0.208494	0.375330	0.239181	0.265297	0.284878	0.234409	0.358636	0.206640	0.224915	0.238912	0.291729	0.254379	0.307485	0.219312	0.255486	0.297491	0.253464	0.259021	0.287687	0.243037	0.250563	0.218895	0.411831 [...]
+U68678	0.353872	0.215933	0.389953	0.288419	0.207740	0.278924	0.278382	0.286170	0.243137	0.061388	0.332052	0.372984	0.268754	0.117427	0.394174	0.254827	0.216429	0.314094	0.520376	0.400684	0.281484	0.285616	0.202309	0.101302	0.283918	0.286170	0.279210	0.285469	0.095671	0.032241	0.095671	0.277621	0.373379	0.297976	0.127277	0.239857	0.302557	0.403362	0.276023	0.271956	0.292250	0.274214	0.204810	0.154579	0.264932	0.088720	0.264854	0.107095	0.083598	0.236460	0.156726	0.073795	0.307230	0.465820 [...]
+U68679	0.301641	0.310375	0.330669	0.385769	0.270341	0.248921	0.385499	0.214959	0.252461	0.292542	0.281814	0.307005	0.235348	0.312120	0.404462	0.287662	0.235818	0.272495	0.555709	0.434603	0.381438	0.282851	0.327279	0.296422	0.196585	0.283182	0.218715	0.194277	0.303332	0.348517	0.303332	0.234974	0.374116	0.138964	0.324586	0.271823	0.145693	0.429810	0.232616	0.228248	0.199030	0.372795	0.263031	0.331375	0.211615	0.277406	0.338395	0.314890	0.285014	0.255021	0.277363	0.338989	0.289954	0.477706 [...]
+U68680	0.318305	0.297172	0.333647	0.336467	0.279352	0.262032	0.346790	0.301197	0.283270	0.278520	0.313735	0.328498	0.311667	0.309846	0.376850	0.309116	0.266562	0.343117	0.534712	0.394174	0.354610	0.167124	0.357693	0.288293	0.276215	0.003141	0.257245	0.284021	0.315503	0.321609	0.315503	0.226805	0.384021	0.257172	0.311547	0.193118	0.259008	0.385499	0.306366	0.311729	0.276281	0.346439	0.313101	0.300788	0.298402	0.308705	0.329950	0.317777	0.304863	0.276383	0.310839	0.308594	0.353619	0.503515 [...]
+U68681	0.236932	0.287175	0.365609	0.363330	0.266663	0.259573	0.341037	0.263548	0.290176	0.261919	0.314147	0.340396	0.264176	0.270377	0.385769	0.303345	0.294656	0.258374	0.529364	0.416497	0.349097	0.267229	0.320851	0.273298	0.245844	0.281688	0.251570	0.247390	0.283055	0.303368	0.283055	0.239151	0.383119	0.246405	0.309357	0.301105	0.256240	0.409723	0.250170	0.256959	0.258452	0.333874	0.290876	0.291729	0.219189	0.301006	0.307959	0.282358	0.269419	0.301900	0.278042	0.289841	0.239642	0.460196 [...]
+U68682	0.387405	0.386427	0.420026	0.401870	0.363123	0.364826	0.428507	0.334715	0.373620	0.375302	0.386034	0.403978	0.357381	0.347187	0.237996	0.393039	0.364826	0.334715	0.488100	0.136010	0.402913	0.405848	0.410134	0.370033	0.322081	0.344338	0.311637	0.323899	0.367161	0.383533	0.367161	0.408545	0.448321	0.373620	0.377735	0.344754	0.408545	0.125402	0.341487	0.311637	0.380047	0.424679	0.370722	0.420331	0.348085	0.365457	0.432003	0.351139	0.371221	0.407437	0.357381	0.377735	0.393393	0.279752 [...]
+U68683	0.329749	0.307257	0.425707	0.181103	0.272278	0.323931	0.119702	0.333640	0.282904	0.288634	0.365508	0.385600	0.304889	0.253399	0.415733	0.294379	0.275561	0.330615	0.554368	0.411059	0.037903	0.348253	0.320298	0.278803	0.317527	0.327350	0.312946	0.307230	0.292447	0.288634	0.292447	0.321154	0.335749	0.335404	0.297057	0.314611	0.338085	0.397439	0.316920	0.320023	0.291729	0.104975	0.303300	0.291729	0.295087	0.282358	0.107562	0.259275	0.279506	0.312871	0.270897	0.293862	0.308943	0.504976 [...]
+U68684	0.306586	0.199943	0.321169	0.296007	0.164269	0.260186	0.310948	0.269374	0.197861	0.163915	0.287563	0.293066	0.259430	0.184423	0.416248	0.223870	0.116311	0.327987	0.502519	0.397402	0.304851	0.243627	0.238203	0.188051	0.251410	0.266067	0.265426	0.259907	0.183758	0.209844	0.183758	0.195624	0.338835	0.258020	0.209404	0.204251	0.261363	0.408615	0.271549	0.254985	0.241563	0.307105	0.202783	0.217779	0.246951	0.187608	0.287313	0.193667	0.163541	0.132969	0.213271	0.207032	0.305619	0.469464 [...]
+U68685	0.365423	0.313533	0.405119	0.116013	0.300336	0.309315	0.227713	0.349614	0.319374	0.292542	0.344149	0.348044	0.329101	0.282017	0.478115	0.334008	0.309357	0.360611	0.586500	0.410286	0.212203	0.358766	0.375352	0.276933	0.336148	0.367583	0.334619	0.329937	0.295744	0.326668	0.295744	0.348044	0.346279	0.355521	0.335561	0.350766	0.379082	0.415352	0.334876	0.322618	0.333010	0.185184	0.338989	0.315857	0.299159	0.280409	0.170628	0.277833	0.276712	0.365261	0.307129	0.323281	0.333145	0.569768 [...]
+U68686	0.322928	0.223156	0.371826	0.276453	0.191909	0.281513	0.300211	0.286451	0.225078	0.085685	0.333874	0.349992	0.257463	0.093482	0.386847	0.236499	0.218231	0.317777	0.522903	0.364001	0.286170	0.288785	0.211388	0.073795	0.275573	0.310075	0.288419	0.279986	0.079795	0.092710	0.079795	0.275120	0.355097	0.285469	0.134401	0.241633	0.301830	0.371596	0.268092	0.253764	0.282537	0.280475	0.182924	0.159261	0.253005	0.043762	0.259354	0.070985	0.061001	0.229921	0.147447	0.106638	0.294934	0.485847 [...]
+U68687	0.412271	0.313533	0.408262	0.065911	0.357920	0.386580	0.228897	0.367366	0.367276	0.271435	0.389291	0.375794	0.360148	0.262636	0.442034	0.344247	0.334817	0.437586	0.597979	0.406702	0.213348	0.359238	0.388467	0.262900	0.349656	0.403898	0.375748	0.349886	0.281484	0.318118	0.281484	0.387518	0.381696	0.405765	0.309357	0.402710	0.416988	0.403588	0.361088	0.350376	0.406654	0.180258	0.335656	0.301609	0.319116	0.277406	0.160611	0.257463	0.270760	0.374644	0.316294	0.292099	0.371022	0.561599 [...]
+U68688	0.323601	0.316704	0.372984	0.382445	0.336085	0.273784	0.390245	0.221892	0.294195	0.286451	0.291580	0.349854	0.233168	0.302582	0.386206	0.295536	0.274451	0.257572	0.561908	0.435867	0.373578	0.278247	0.350106	0.295676	0.207032	0.304857	0.207984	0.198309	0.307927	0.330148	0.307927	0.270760	0.395785	0.157775	0.310948	0.350243	0.163469	0.426170	0.219999	0.222823	0.236621	0.359680	0.294195	0.329525	0.216845	0.274415	0.339877	0.313301	0.285014	0.308667	0.264831	0.333145	0.301811	0.475087 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.map b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.map
new file mode 100644
index 0000000..51d6458
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.map
@@ -0,0 +1,18 @@
+0
+0
+0
+7
+0
+0
+4
+10
+13
+8
+0
+0
+9
+0
+0
+0
+0
+0
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.oligos b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.oligos
new file mode 100644
index 0000000..0938450
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.oligos
@@ -0,0 +1,12 @@
+forward	CCGTCAATTCMTTTRAGT
+barcode	AATGGTAC	F003D000
+barcode	AACCTGGC	F003D002
+barcode	TTCGTGGC	F003D004
+barcode	TTCTTGAC	F003D006
+barcode	TTCGCGAC	F003D008
+barcode	TCCAGAAC	F003D142
+barcode	AAGGCCTC	F003D144
+barcode	TGACCGTC	F003D146
+barcode	AGGTTGTC	F003D148
+barcode	TGGTGAAC	F003D150
+barcode	AACCGTGTC	MOCK.GQY1XT001
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.otu b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.otu
new file mode 100644
index 0000000..3b63601
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.otu
@@ -0,0 +1,37 @@
+label	numOtus	Otu01	Otu02	Otu03	Otu04	Otu05	Otu06	Otu07	Otu08	Otu09	Otu10	Otu11	Otu12	Otu13	Otu14	Otu15	Otu16	Otu17	Otu18	Otu19	Otu20	Otu21	Otu22	Otu23	Otu24	Otu25	Otu26	Otu27	Otu28	Otu29	Otu30	Otu31	Otu32	Otu33	Otu34	Otu35	Otu36	Otu37	Otu38	Otu39	Otu40	Otu41	Otu42	Otu43	Otu44	Otu45	Otu46	Otu47	Otu48	Otu49	Otu50	Otu51	Otu52	Otu53	Otu54	Otu55	Otu56	Otu57	Otu58	Otu59	Otu60	Otu61	Otu62	Otu63	Otu64	Otu65	Otu66	Otu67	Otu68	Otu69	Otu70	Otu71	Otu72	Otu73	Otu74	Otu75	Otu76	Otu77	Otu78	Otu79	Otu8 [...]
+unique	96	U68667,U68641	U68620,U68618	U68663	U68662	U68661	U68660	U68659	U68658	U68657	U68656	U68655	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68688	U68687	U68686	U68685	U68684	U68683	U68682	U68681	U68680	U68679	U68678	U68677	U68676	U68675	U68674	U68673	U68672	U68671	U68670	U68669	U68668	U68666	U68665	U68664	U68613	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U68599	U68598	U68597	U68596	U68595 [...]
+0.01	93	U68688,U68665	U68636,U68631	U68667,U68641	U68620,U68618	U68680,U68615	U68661	U68660	U68659	U68658	U68657	U68656	U68655	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68687	U68686	U68685	U68684	U68683	U68682	U68681	U68679	U68678	U68677	U68676	U68675	U68674	U68673	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68663	U68662	U68613	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U68599	U68598	U [...]
+0.02	90	U68688,U68665,U68679,U68663	U68673,U68667,U68641	U68636,U68631	U68620,U68618	U68680,U68615	U68658	U68657	U68656	U68655	U68686	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68638	U68687	U68683	U68682	U68681	U68684	U68678	U68677	U68676	U68675	U68674	U68685	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68662	U68661	U68660	U68659	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U68599	U68598	U [...]
+0.03	88	U68688,U68665,U68679,U68663	U68658,U68638,U68620,U68618	U68673,U68667,U68641	U68636,U68631	U68680,U68615	U68686	U68657	U68656	U68655	U68654	U68653	U68652	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68637	U68687	U68683	U68682	U68681	U68684	U68678	U68677	U68676	U68675	U68674	U68685	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68662	U68661	U68660	U68659	U68613	U68612	U68611	U68610	U68609	U68608	U68607	U68606	U68605	U68603	U68602	U68601	U68600	U [...]
+0.04	83	U68688,U68665,U68679,U68663	U68658,U68638,U68620,U68618	U68673,U68667,U68641	U68636,U68631	U68637,U68602	U68652,U68610	U68614,U68596	U68678,U68619	U68680,U68615	U68681,U68677	U68656	U68655	U68654	U68653	U68685	U68651	U68649	U68648	U68647	U68646	U68645	U68644	U68643	U68642	U68640	U68639	U68686	U68687	U68676	U68675	U68674	U68682	U68672	U68671	U68670	U68669	U68668	U68666	U68664	U68662	U68661	U68660	U68683	U68659	U68684	U68657	U68613	U68612	U68611	U68609	U68608	U68607	U68606	U68605	U [...]
+0.05	78	U68688,U68665,U68679,U68663	U68658,U68638,U68620,U68618	U68683,U68652,U68610	U68673,U68667,U68641	U68636,U68631	U68637,U68602	U68628,U68601	U68614,U68596	U68666,U68595	U68672,U68621	U68686,U68635	U68678,U68619	U68680,U68615	U68681,U68677	U68654	U68653	U68651	U68649	U68648	U68647	U68646	U68645	U68685	U68644	U68643	U68642	U68640	U68639	U68687	U68676	U68675	U68674	U68671	U68670	U68669	U68668	U68664	U68662	U68661	U68660	U68682	U68659	U68684	U68657	U68656	U68655	U68607	U68608	U68600	U [...]
+0.06	69	U68688,U68665,U68679,U68663	U68683,U68652,U68610,U68671	U68658,U68638,U68620,U68618	U68673,U68667,U68641,U68598	U68634,U68628,U68601	U68678,U68619,U68645	U68636,U68631	U68633,U68612	U68637,U68602	U68627,U68609	U68648,U68632	U68614,U68596	U68666,U68595	U68605,U68597	U68672,U68621	U68686,U68635	U68681,U68677	U68680,U68615	U68676,U68660	U68649	U68647	U68646	U68644	U68682	U68643	U68642	U68640	U68639	U68684	U68685	U68687	U68651	U68653	U68654	U68655	U68656	U68657	U68659	U68661	U68662	U [...]
+0.07	65	U68678,U68619,U68645,U68673,U68667,U68641,U68598	U68688,U68665,U68679,U68663	U68683,U68652,U68610,U68671	U68658,U68638,U68620,U68618	U68634,U68628,U68601	U68617,U68614,U68596	U68633,U68612	U68636,U68631	U68637,U68602	U68627,U68609	U68648,U68632	U68666,U68595	U68605,U68597	U68687,U68592	U68686,U68635	U68682,U68657	U68681,U68677	U68680,U68615	U68676,U68660	U68672,U68621	U68670	U68685	U68684	U68639	U68640	U68674	U68642	U68643	U68644	U68646	U68647	U68669	U68649	U68651	U68653	U68668	U [...]
+0.08	56	U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660	U68666,U68595,U68636,U68631	U68683,U68652,U68610,U68671	U68617,U68614,U68596	U68634,U68628,U68601	U68686,U68635,U68613	U68649,U68639	U68651,U68640	U68654,U68593	U68687,U68592	U68648,U68632	U68605,U68597	U68637,U68602	U68672,U68621	U68626,U68623	U68627,U68609	U68633,U68612	U68680,U68615	U68681,U68677	U68682,U68657	U68630	U68642	U68643	U68644	U68646	U68647	U [...]
+0.09	55	U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660	U68686,U68635,U68613,U68637,U68602	U68666,U68595,U68636,U68631	U68683,U68652,U68610,U68671	U68617,U68614,U68596	U68634,U68628,U68601	U68651,U68640	U68654,U68593	U68649,U68639	U68687,U68592	U68648,U68632	U68605,U68597	U68627,U68609	U68672,U68621	U68633,U68612	U68626,U68623	U68682,U68657	U68680,U68615	U68681,U68677	U68642	U68643	U68630	U68644	U68646	U68647	U [...]
+0.10	49	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660	U68654,U68593,U68649,U68639,U68606	U68629,U68617,U68614,U68596	U68683,U68652,U68610,U68671	U68634,U68628,U68601,U68607	U68648,U68632,U68605,U68597	U68666,U68595,U68636,U68631	U68626,U68623	U68627,U68609	U68633,U68612	U68651,U68640	U68687,U68592	U68682,U68657	U68681,U68677	U68680,U68615	U68672,U68621	U68670	U68685	U68684	U [...]
+0.11	45	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68654,U68593,U68649,U68639,U68606	U68648,U68632,U68605,U68597	U68666,U68595,U68636,U68631	U68683,U68652,U68610,U68671	U68682,U68657,U68675	U68680,U68615,U68670	U68687,U68592	U68651,U68640	U68633,U68612	U68627,U68609	U68672,U68621	U68626,U68623	U68681,U68677	U [...]
+0.12	43	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68654,U68593,U68649,U68639,U68606	U68648,U68632,U68605,U68597	U68683,U68652,U68610,U68671	U68666,U68595,U68636,U68631	U68687,U68592,U68685	U68682,U68657,U68675	U68680,U68615,U68670	U68651,U68640	U68633,U68612	U68627,U68609	U68626,U68623	U68672,U68621	U [...]
+0.13	40	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68682,U68657,U68675,U68627,U68609	U68654,U68593,U68649,U68639,U68606	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68680,U68615,U68670	U68651,U68640	U68633,U68612	U68626,U68623	U68600,U68591	U [...]
+0.14	36	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68680,U68615,U68670	U68651,U68640	U68633,U68612	U68626,U [...]
+0.15	35	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68680,U68615,U68670	U68681,U68677	U68672,U [...]
+0.16	34	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68681,U68677,U68669	U68680,U68615,U68670	U [...]
+0.17	33	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U68681,U68677,U68669	U68680,U [...]
+0.18	31	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U68670,U68611	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U [...]
+0.19	30	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U68670,U68611	U68648,U68632,U68605,U68597	U68687,U68592,U68685	U [...]
+0.20	27	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596	U68684,U68654,U68593,U68649,U68639,U68606	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U68670,U68611	U68648,U68632,U [...]
+0.21	25	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68682,U68657,U68675,U68627,U68609	U68680,U68615,U [...]
+0.22	23	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623	U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68682,U68657,U68675,U68627,U68609	U [...]
+0.23	21	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68682,U68657,U68675,U68627,U68609	U [...]
+0.24	17	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68682,U [...]
+0.25	15	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U68666,U68595,U68636,U68631	U68682,U [...]
+0.26	13	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U [...]
+0.27	12	U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642	U68687,U68592,U68685,U68683,U68652,U68610,U68671,U [...]
+0.29	9	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.32	7	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.33	6	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.36	5	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.38	4	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.41	3	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.45	2	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
+0.55	1	U68688,U68665,U68679,U68663,U68653,U68676,U68660,U68647,U68626,U68623,U68674,U68630,U68634,U68628,U68601,U68607,U68629,U68617,U68614,U68596,U68616,U68681,U68677,U68669,U68589,U68642,U68599,U68686,U68635,U68613,U68637,U68602,U68678,U68619,U68645,U68673,U68667,U68641,U68598,U68658,U68638,U68620,U68618,U68659,U68624,U68655,U68651,U68640,U68633,U68612,U68648,U68632,U68605,U68597,U68684,U68654,U68593,U68649,U68639,U68606,U68646,U68590,U68644,U68680,U68615,U68670,U68611,U68625,U68672,U6 [...]
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.pair.dist b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.pair.dist
new file mode 100644
index 0000000..08b5bb5
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.pair.dist
@@ -0,0 +1,42 @@
+U68590	U68589	0.337144
+U68591	U68589	0.360977
+U68591	U68590	0.378254
+U68592	U68589	0.415506
+U68592	U68590	0.319757
+U68592	U68591	0.414843
+U68593	U68589	0.287299
+U68593	U68590	0.169021
+U68593	U68591	0.336162
+U68593	U68592	0.284235
+U68594	U68589	0.297057
+U68594	U68590	0.329311
+U68594	U68591	0.356376
+U68594	U68592	0.332574
+U68594	U68593	0.276866
+U68595	U68589	0.39224
+U68595	U68590	0.273158
+U68595	U68591	0.427517
+U68595	U68592	0.229894
+U68595	U68593	0.283055
+U68595	U68594	0.364319
+U68596	U68589	0.309315
+U68596	U68590	0.312653
+U68596	U68591	0.322673
+U68596	U68592	0.36333
+U68596	U68593	0.291774
+U68596	U68594	0.280537
+U68596	U68595	0.360148
+U68597	U68589	0.320066
+U68597	U68590	0.266838
+U68597	U68591	0.35206
+U68597	U68592	0.325227
+U68597	U68593	0.217362
+U68597	U68594	0.263379
+U68597	U68595	0.317196
+U68597	U68596	0.276011
+U68598	U68589	0.328638
+U68598	U68590	0.206259
+U68598	U68591	0.344952
+U68598	U68592	0.265168
+U68598	U68593	0.189372
+U68598	U68594	0.251328
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.quan b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.quan
new file mode 100644
index 0000000..0742631
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.quan
@@ -0,0 +1,42 @@
+#1.36.1
+1	0	0	0	0	0	0
+2	0.049144	0.0491476	0.0844091	0.116936	0.162039	0.162039
+3	0.160511	0.160511	0.160511	0.160511	0.160511	0.160511
+4	0	0	0	0	0	0
+5	0	0	0	0	0	0
+6	0	0	0	0	0	0
+7	0	0	0	0	0	0
+8	0	0	0	0	0	0
+9	0.676576	0.680769	0.707958	0.709889	0.709889	0.709889
+10	0.697272	0.697272	0.738054	0.738553	0.743446	0.743446
+11	0.754357	0.754357	0.760208	0.760556	0.760556	0.760556
+12	0	0	0	0	0	0
+13	0	0	0	0	0	0
+14	1.00039	1.00039	1.00039	1.0141	1.0141	1.0141
+15	1.01854	1.01854	1.01854	1.01854	1.01854	1.01854
+16	0.970699	0.975675	0.995969	1.04614	1.05545	1.05545
+17	0.992594	1.0202	1.03614	1.08892	1.13746	1.13746
+18	1.09386	1.09386	1.09922	1.10809	1.10809	1.10809
+19	0	0	0	0	0	0
+20	0	0	0	0	0	0
+21	0	0	0	0	0	0
+22	1.35506	1.35885	1.36713	1.37285	1.38105	1.38105
+23	1.161	1.18494	1.43935	1.44885	1.46278	1.46278
+24	1.13221	1.20661	1.22248	1.4331	1.45037	1.45037
+25	1.52842	1.52956	1.53387	1.54002	1.54381	1.54381
+26	1.48322	1.49691	1.54058	1.57344	1.60446	1.60675
+27	1.43869	1.51674	1.54557	1.60688	1.6382	1.64727
+28	1.464	1.53106	1.63766	1.68281	1.72392	1.72972
+29	1.61868	1.64211	1.71553	1.75536	1.82599	1.82802
+30	1.64301	1.68357	1.72295	1.79613	1.80546	1.83608
+31	1.69872	1.70717	1.72619	1.73675	1.75281	1.75281
+32	1.7243	1.74704	1.767	1.78202	1.81832	1.86177
+33	1.77673	1.78456	1.79277	1.82968	1.87613	1.88887
+34	1.81019	1.82733	1.86897	1.89782	1.91567	1.91567
+35	0	0	0	0	0	0
+36	0	0	0	0	0	0
+37	0	0	0	0	0	0
+38	0	0	0	0	0	0
+39	0	0	0	0	0	0
+40	0	0	0	0	0	0
+41	0	0	0	0	0	0
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.ref.taxonomy b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.ref.taxonomy
new file mode 100644
index 0000000..35dc901
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.ref.taxonomy
@@ -0,0 +1,42 @@
+AB000389.1	Bacteria;Proteobacteria;Gammaproteobacteria;Alteromonadales;Pseudoalteromonadaceae;Pseudoalteromonas;
+AB000699.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB000700.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB000701.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB000702.1	Bacteria;Proteobacteria;Betaproteobacteria;Nitrosomonadales;Nitrosomonadaceae;Nitrosomonas;
+AB001518.1	Bacteria;Bacteroidetes;Sphingobacteria;Sphingobacteriales;Flammeovirgaceae;Candidatus_Cardinium;
+AB001724.1	Bacteria;Cyanobacteria;SubsectionI;Microcystis;
+AB001774.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001775.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001776.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001777.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001779.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001781.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001783.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001784.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001785.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001791.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001793.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001797.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001802.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001805.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001807.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001809.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001813.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001815.1	Bacteria;Chlamydiae;Chlamydiae;Chlamydiales;Chlamydiaceae;Chlamydophila;
+AB001836.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Lactobacillaceae;Lactobacillus;
+AB001837.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Lactobacillaceae;Lactobacillus;
+AB002481.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002483.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002485.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002488.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002489.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002496.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002500.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002504.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002508.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002510.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002512.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002517.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002519.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002523.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
+AB002527.1	Bacteria;Firmicutes;Bacilli;Lactobacillales;Streptococcaceae;Streptococcus;
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.sabund b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.sabund
new file mode 100644
index 0000000..47bb1db
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.sabund
@@ -0,0 +1,5 @@
+unique	2	94	2
+0.00	2	92	3
+0.01	2	88	5
+0.02	4	84	2	2	1
+0.03	4	75	6	1	2
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.shared b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.shared
new file mode 100644
index 0000000..b119fee
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.shared
@@ -0,0 +1,3 @@
+label	Group	numOtus	Otu01	Otu02	Otu03	Otu04	Otu05	Otu06	Otu07	Otu08	Otu09	Otu10
+0.10	forest	10	0	5	2	3	1	1	3	3	1	0
+0.10	pasture	10	7	2	5	1	3	2	0	0	1	2
diff --git a/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.square.dist b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.square.dist
new file mode 100644
index 0000000..298e257
--- /dev/null
+++ b/lib/galaxy/datatypes/test/mothur_datatypetest_true.mothur.square.dist
@@ -0,0 +1,99 @@
+	98
+U68589	0.000000	0.337144	0.360977	0.415506	0.287299	0.297057	0.392240	0.309315	0.320066	0.328638	0.257245	0.337227	0.301773	0.307105	0.477681	0.364754	0.320363	0.340712	0.643389	0.433138	0.376207	0.351515	0.373270	0.325136	0.289019	0.310340	0.307018	0.308559	0.324956	0.368119	0.324956	0.259707	0.409194	0.273253	0.348793	0.338829	0.289890	0.460196	0.287313	0.304876	0.293627	0.398745	0.323458	0.343931	0.270320	0.328638	0.360667	0.317641	0.307311	0.318242	0.315163	0.334519	0.259354	0.513252 [...]
+U68590	0.337144	0.000000	0.378254	0.319757	0.169021	0.329311	0.273158	0.312653	0.266838	0.206259	0.328665	0.341084	0.323764	0.220396	0.362613	0.274376	0.201198	0.317527	0.530014	0.373379	0.294704	0.286594	0.293104	0.214397	0.304876	0.294032	0.296294	0.318935	0.228159	0.215592	0.228159	0.285269	0.383119	0.287768	0.248463	0.330389	0.331258	0.359192	0.314346	0.282537	0.308195	0.273482	0.265198	0.272626	0.323924	0.216451	0.281824	0.241593	0.230389	0.228159	0.255453	0.219740	0.357218	0.507737 [...]
+U68591	0.360977	0.378254	0.000000	0.414843	0.336162	0.356376	0.427517	0.322673	0.352060	0.344952	0.369658	0.122794	0.372984	0.369623	0.467881	0.377089	0.324100	0.365825	0.548815	0.490123	0.450700	0.363708	0.396633	0.363488	0.311317	0.329411	0.347363	0.328716	0.384142	0.397333	0.384142	0.315472	0.415027	0.310736	0.388493	0.359680	0.334111	0.500022	0.350766	0.333145	0.333379	0.442063	0.319388	0.395174	0.355606	0.372774	0.425988	0.367269	0.363032	0.342135	0.362984	0.392626	0.425088	0.518779 [...]
+U68592	0.415506	0.319757	0.414843	0.000000	0.284235	0.332574	0.229894	0.363330	0.325227	0.265168	0.338989	0.364319	0.350376	0.266663	0.418468	0.331206	0.301152	0.384448	0.587163	0.416497	0.215923	0.319670	0.359680	0.254598	0.332803	0.338989	0.326397	0.347490	0.281664	0.312183	0.281664	0.373185	0.359680	0.365184	0.301866	0.362488	0.382122	0.420349	0.356390	0.342170	0.340183	0.179474	0.326851	0.298352	0.302463	0.260630	0.164494	0.275794	0.268092	0.355578	0.303351	0.295152	0.347147	0.556294 [...]
+U68593	0.287299	0.169021	0.336162	0.284235	0.000000	0.276866	0.283055	0.291774	0.217362	0.189372	0.294160	0.302449	0.284617	0.219618	0.385130	0.257535	0.122862	0.324013	0.518760	0.391165	0.284617	0.253894	0.273158	0.213889	0.267779	0.274252	0.271113	0.291080	0.214579	0.240602	0.214579	0.223590	0.344108	0.247344	0.236421	0.222528	0.254171	0.396586	0.289841	0.278520	0.259933	0.283223	0.225527	0.244836	0.268053	0.202789	0.269811	0.214077	0.196928	0.113423	0.232067	0.230975	0.324266	0.486147 [...]
+U68594	0.297057	0.329311	0.356376	0.332574	0.276866	0.000000	0.364319	0.280537	0.263379	0.251328	0.303296	0.322132	0.292099	0.249492	0.386773	0.237829	0.277808	0.334547	0.543352	0.396320	0.357389	0.244067	0.327178	0.251136	0.286583	0.260851	0.261823	0.288339	0.269312	0.305548	0.269312	0.230560	0.394362	0.270690	0.279315	0.238840	0.242336	0.409723	0.300336	0.292820	0.281697	0.340691	0.269265	0.277267	0.281613	0.265168	0.315691	0.251548	0.255839	0.303699	0.267649	0.288543	0.326397	0.491101 [...]
+U68595	0.392240	0.273158	0.427517	0.229894	0.283055	0.364319	0.000000	0.360148	0.317196	0.281804	0.385823	0.388912	0.345663	0.278042	0.457303	0.333759	0.289841	0.378719	0.575126	0.415251	0.158838	0.354610	0.361109	0.272713	0.350892	0.343931	0.367380	0.353619	0.302505	0.320312	0.302505	0.342812	0.356954	0.348044	0.311178	0.330516	0.332286	0.421731	0.336571	0.346594	0.335749	0.087023	0.340156	0.311076	0.296415	0.281804	0.074568	0.282904	0.290513	0.326581	0.311238	0.311238	0.357723	0.541469 [...]
+U68596	0.309315	0.312653	0.322673	0.363330	0.291774	0.280537	0.360148	0.000000	0.276011	0.259990	0.287117	0.325688	0.131424	0.301197	0.359219	0.273363	0.277082	0.125029	0.478761	0.382431	0.364319	0.296894	0.315411	0.298772	0.039097	0.297549	0.198193	0.075812	0.286874	0.307011	0.286874	0.279315	0.383444	0.218017	0.307081	0.321375	0.206134	0.374967	0.122326	0.106410	0.209430	0.349614	0.271657	0.301609	0.116332	0.268754	0.323601	0.307129	0.270360	0.296132	0.267363	0.310893	0.287842	0.434443 [...]
+U68597	0.320066	0.266838	0.352060	0.325227	0.217362	0.263379	0.317196	0.276011	0.000000	0.213189	0.326099	0.324279	0.280789	0.208454	0.401870	0.052145	0.205688	0.339908	0.517938	0.389787	0.286874	0.312058	0.252570	0.227071	0.260235	0.280899	0.289951	0.269742	0.228204	0.274934	0.228204	0.229993	0.360180	0.271878	0.225078	0.237745	0.269502	0.380020	0.281484	0.281484	0.284238	0.307129	0.090568	0.248578	0.260771	0.238261	0.290224	0.227664	0.220923	0.218861	0.282700	0.235348	0.315691	0.475314 [...]
+U68598	0.328638	0.206259	0.344952	0.265168	0.189372	0.251328	0.281804	0.259990	0.213189	0.000000	0.292542	0.335138	0.253181	0.090237	0.351449	0.213304	0.179339	0.275794	0.515138	0.392240	0.259924	0.268378	0.211242	0.087582	0.257723	0.268754	0.268852	0.245013	0.063062	0.058566	0.063062	0.273230	0.314261	0.282537	0.133092	0.246349	0.289370	0.372970	0.249858	0.233746	0.263548	0.256715	0.180919	0.166671	0.237960	0.091021	0.264200	0.101521	0.054228	0.217646	0.160277	0.059437	0.304895	0.467931 [...]
+U68599	0.257245	0.328665	0.369658	0.338989	0.294160	0.303296	0.385823	0.287117	0.326099	0.292542	0.000000	0.343091	0.272707	0.317244	0.396829	0.353944	0.306491	0.332816	0.575865	0.426079	0.406623	0.312736	0.346776	0.307959	0.268573	0.309717	0.277586	0.275370	0.329736	0.357883	0.329736	0.306116	0.427369	0.279200	0.340130	0.316773	0.299756	0.437168	0.261354	0.282700	0.291995	0.383119	0.343991	0.347951	0.255391	0.307991	0.354464	0.328894	0.312653	0.308495	0.289841	0.345365	0.305583	0.453716 [...]
+U68600	0.337227	0.341084	0.122794	0.364319	0.302449	0.322132	0.388912	0.325688	0.324279	0.335138	0.343091	0.000000	0.349854	0.356991	0.427677	0.358357	0.294295	0.355104	0.567758	0.478155	0.407315	0.335955	0.377929	0.340460	0.294108	0.324279	0.317674	0.319670	0.360624	0.384102	0.360624	0.302011	0.418941	0.289376	0.369495	0.340106	0.305343	0.484127	0.331431	0.317305	0.316017	0.395785	0.297402	0.383867	0.338041	0.352503	0.379655	0.347509	0.349713	0.309890	0.369636	0.365777	0.410836	0.489114 [...]
+U68601	0.301773	0.323764	0.372984	0.350376	0.284617	0.292099	0.345663	0.131424	0.280789	0.253181	0.272707	0.349854	0.000000	0.268659	0.343707	0.269811	0.276017	0.105021	0.515216	0.408959	0.334876	0.278924	0.332523	0.280600	0.113060	0.308667	0.214579	0.096909	0.269696	0.282700	0.269696	0.258358	0.402721	0.240354	0.292729	0.304869	0.247109	0.395778	0.045250	0.113176	0.202970	0.326769	0.262255	0.290033	0.060707	0.256082	0.324605	0.277833	0.261273	0.286874	0.245013	0.288885	0.277621	0.467569 [...]
+U68602	0.307105	0.220396	0.369623	0.266663	0.219618	0.249492	0.278042	0.301197	0.208454	0.090237	0.317244	0.356991	0.268659	0.000000	0.426605	0.208459	0.204576	0.321747	0.522807	0.358242	0.258188	0.285788	0.190585	0.078406	0.295495	0.300509	0.303362	0.291180	0.075377	0.137887	0.075377	0.272545	0.341198	0.285366	0.113662	0.254692	0.304099	0.365865	0.292820	0.283918	0.298814	0.266079	0.181048	0.169421	0.271468	0.088528	0.246808	0.032570	0.069818	0.236275	0.150174	0.112339	0.292002	0.506595 [...]
+U68603	0.477681	0.362613	0.467881	0.418468	0.385130	0.386773	0.457303	0.359219	0.401870	0.351449	0.396829	0.427677	0.343707	0.426605	0.000000	0.402262	0.386773	0.381782	0.601148	0.280287	0.439624	0.409367	0.439617	0.353443	0.354610	0.371261	0.340156	0.365692	0.387976	0.412799	0.387976	0.438400	0.438472	0.378448	0.409908	0.389040	0.433602	0.246241	0.345511	0.352090	0.423288	0.434792	0.377719	0.366452	0.335513	0.383472	0.409908	0.394969	0.369931	0.455351	0.408422	0.415506	0.443308	0.319627 [...]
+U68605	0.364754	0.274376	0.377089	0.331206	0.257535	0.237829	0.333759	0.273363	0.052145	0.213304	0.353944	0.358357	0.269811	0.208459	0.402262	0.000000	0.223376	0.282689	0.510658	0.398293	0.298684	0.308524	0.275360	0.230342	0.266324	0.304816	0.292471	0.259158	0.242871	0.285073	0.242871	0.277730	0.331907	0.317244	0.219999	0.265083	0.296132	0.383472	0.283253	0.274864	0.324259	0.312653	0.106011	0.262468	0.259629	0.250668	0.298546	0.219740	0.226896	0.256365	0.281484	0.247859	0.304882	0.501368 [...]
+U68606	0.320363	0.201198	0.324100	0.301152	0.122862	0.277808	0.289841	0.277082	0.205688	0.179339	0.306491	0.294295	0.276017	0.204576	0.386773	0.223376	0.000000	0.310100	0.529218	0.383464	0.287713	0.253689	0.260336	0.209198	0.246142	0.262712	0.280237	0.262636	0.207259	0.237879	0.207259	0.211947	0.323315	0.227993	0.215106	0.224023	0.234936	0.381089	0.276688	0.252152	0.246076	0.286311	0.213692	0.220723	0.264574	0.225386	0.275336	0.209430	0.186739	0.098208	0.251357	0.233701	0.330363	0.455046 [...]
+U68607	0.340712	0.317527	0.365825	0.384448	0.324013	0.334547	0.378719	0.125029	0.339908	0.275794	0.332816	0.355104	0.105021	0.321747	0.381782	0.282689	0.310100	0.000000	0.598114	0.376207	0.348574	0.308455	0.427909	0.306422	0.102569	0.338989	0.293255	0.091577	0.299521	0.332574	0.299521	0.300490	0.432427	0.246334	0.329318	0.330983	0.252567	0.375581	0.093606	0.112197	0.241563	0.389518	0.343693	0.311620	0.085083	0.297115	0.360638	0.321127	0.293285	0.317395	0.292002	0.321801	0.326215	0.428074 [...]
+U68608	0.643389	0.530014	0.548815	0.587163	0.518760	0.543352	0.575126	0.478761	0.517938	0.515138	0.575865	0.567758	0.515216	0.522807	0.601148	0.510658	0.529218	0.598114	0.000000	0.581450	0.603638	0.556018	0.602788	0.529843	0.479156	0.529751	0.516505	0.475006	0.533469	0.543402	0.533469	0.526527	0.612744	0.556206	0.504303	0.535651	0.553400	0.565457	0.501886	0.519860	0.550524	0.582089	0.512306	0.511328	0.495486	0.523554	0.568020	0.527665	0.539010	0.554398	0.496751	0.547458	0.584131	0.639768 [...]
+U68609	0.433138	0.373379	0.490123	0.416497	0.391165	0.396320	0.415251	0.382431	0.389787	0.392240	0.426079	0.478155	0.408959	0.358242	0.280287	0.398293	0.383464	0.376207	0.581450	0.000000	0.405148	0.444553	0.393834	0.365423	0.360638	0.392824	0.361630	0.366399	0.376591	0.394174	0.376591	0.412038	0.437145	0.389787	0.385549	0.347530	0.435804	0.054455	0.388719	0.385214	0.436903	0.415039	0.369502	0.365873	0.368906	0.374797	0.419842	0.359680	0.376207	0.396051	0.406702	0.382770	0.421971	0.316086 [...]
+U68610	0.376207	0.294704	0.450700	0.215923	0.284617	0.357389	0.158838	0.364319	0.286874	0.259924	0.406623	0.407315	0.334876	0.258188	0.439624	0.298684	0.287713	0.348574	0.603638	0.405148	0.000000	0.371261	0.352605	0.253805	0.348206	0.350892	0.334008	0.328491	0.272372	0.302606	0.272372	0.344266	0.352503	0.358283	0.289841	0.332634	0.362033	0.400684	0.336626	0.337821	0.319316	0.122032	0.316394	0.285826	0.292522	0.280214	0.113176	0.245608	0.256331	0.320988	0.302570	0.304099	0.335749	0.550657 [...]
+U68611	0.351515	0.286594	0.363708	0.319670	0.253894	0.244067	0.354610	0.296894	0.312058	0.268378	0.312736	0.335955	0.278924	0.285788	0.409367	0.308524	0.253689	0.308455	0.556018	0.444553	0.371261	0.000000	0.323464	0.272211	0.276644	0.163140	0.274066	0.276436	0.293630	0.308422	0.293630	0.244404	0.392797	0.296193	0.310028	0.286043	0.279880	0.437190	0.278924	0.280287	0.287713	0.325227	0.288339	0.284447	0.274589	0.277833	0.318002	0.284078	0.270628	0.279414	0.295223	0.313081	0.326397	0.549148 [...]
+U68612	0.373270	0.293104	0.396633	0.359680	0.273158	0.327178	0.361109	0.315411	0.252570	0.211242	0.346776	0.377929	0.332523	0.190585	0.439617	0.275360	0.260336	0.427909	0.602788	0.393834	0.352605	0.323464	0.000000	0.215762	0.299734	0.355207	0.322132	0.313854	0.208721	0.229389	0.208721	0.306954	0.401787	0.344582	0.211884	0.289119	0.334817	0.411243	0.312724	0.306441	0.331715	0.359680	0.223342	0.057989	0.297432	0.204444	0.327350	0.192489	0.186695	0.279143	0.232061	0.235404	0.314346	0.493996 [...]
+U68613	0.325136	0.214397	0.363488	0.254598	0.213889	0.251136	0.272713	0.298772	0.227071	0.087582	0.307959	0.340460	0.280600	0.078406	0.353443	0.230342	0.209198	0.306422	0.529843	0.365423	0.253805	0.272211	0.215762	0.000000	0.284460	0.278520	0.266174	0.289008	0.082361	0.095671	0.082361	0.273482	0.326769	0.265512	0.124741	0.249678	0.284857	0.344388	0.294114	0.262900	0.288107	0.239932	0.200086	0.180522	0.280872	0.083419	0.250717	0.098361	0.076175	0.229291	0.156701	0.092724	0.326489	0.460841 [...]
+U68614	0.289019	0.304876	0.311317	0.332803	0.267779	0.286583	0.350892	0.039097	0.260235	0.257723	0.268573	0.294108	0.113060	0.295495	0.354610	0.266324	0.246142	0.102569	0.479156	0.360638	0.348206	0.276644	0.299734	0.284460	0.000000	0.272001	0.178312	0.063195	0.272875	0.308489	0.272875	0.265991	0.368968	0.204392	0.298898	0.309193	0.203240	0.343707	0.106184	0.092619	0.203308	0.330765	0.242629	0.301609	0.091368	0.257723	0.307959	0.292820	0.253585	0.268659	0.270891	0.302594	0.298857	0.435707 [...]
+U68615	0.310340	0.294032	0.329411	0.338989	0.274252	0.260851	0.343931	0.297549	0.280899	0.268754	0.309717	0.324279	0.308667	0.300509	0.371261	0.304816	0.262712	0.338989	0.529751	0.392824	0.350892	0.163140	0.355207	0.278520	0.272001	0.000000	0.255720	0.279695	0.305619	0.315059	0.305619	0.221866	0.383300	0.251181	0.304845	0.189645	0.254818	0.376347	0.298814	0.307154	0.270331	0.342727	0.309422	0.295811	0.289504	0.307959	0.326113	0.310948	0.294934	0.273673	0.303351	0.301848	0.356390	0.501990 [...]
+U68616	0.307018	0.296294	0.347363	0.326397	0.271113	0.261823	0.367380	0.198193	0.289951	0.268852	0.277586	0.317674	0.214579	0.303362	0.340156	0.292471	0.280237	0.293255	0.516505	0.361630	0.334008	0.274066	0.322132	0.266174	0.178312	0.255720	0.000000	0.170293	0.263548	0.303357	0.263548	0.272761	0.407097	0.206167	0.278042	0.279347	0.200785	0.359680	0.188051	0.206041	0.243496	0.348416	0.285214	0.292522	0.194553	0.245275	0.324779	0.289609	0.244067	0.307339	0.248300	0.292637	0.301081	0.412859 [...]
+U68617	0.308559	0.318935	0.328716	0.347490	0.291080	0.288339	0.353619	0.075812	0.269742	0.245013	0.275370	0.319670	0.096909	0.291180	0.365692	0.259158	0.262636	0.091577	0.475006	0.366399	0.328491	0.276436	0.313854	0.289008	0.063195	0.279695	0.170293	0.000000	0.274451	0.301197	0.274451	0.253341	0.392254	0.199277	0.300395	0.307806	0.195565	0.354966	0.083535	0.085569	0.193667	0.339273	0.235838	0.296685	0.088740	0.259354	0.313377	0.288419	0.250108	0.277730	0.261021	0.298123	0.285616	0.455046 [...]
+U68618	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451	0.000000	0.095064	0.000000	0.265512	0.325096	0.297172	0.142765	0.270360	0.304876	0.371771	0.276933	0.266125	0.288242	0.281484	0.212177	0.172607	0.250617	0.081334	0.280600	0.108701	0.020396	0.238146	0.145189	0.069377	0.333640	0.472888 [...]
+U68619	0.368119	0.215592	0.397333	0.312183	0.240602	0.305548	0.320312	0.307011	0.274934	0.058566	0.357883	0.384102	0.282700	0.137887	0.412799	0.285073	0.237879	0.332574	0.543402	0.394174	0.302606	0.308422	0.229389	0.095671	0.308489	0.315059	0.303357	0.301197	0.095064	0.000000	0.095064	0.304839	0.413282	0.318229	0.140838	0.272302	0.325861	0.402721	0.289247	0.261648	0.311791	0.304099	0.229421	0.147700	0.264574	0.103613	0.281484	0.090573	0.076014	0.263244	0.166133	0.065526	0.328691	0.489627 [...]
+U68620	0.324956	0.228159	0.384142	0.281664	0.214579	0.269312	0.302505	0.286874	0.228204	0.063062	0.329736	0.360624	0.269696	0.075377	0.387976	0.242871	0.207259	0.299521	0.533469	0.376591	0.272372	0.293630	0.208721	0.082361	0.272875	0.305619	0.263548	0.274451	0.000000	0.095064	0.000000	0.265512	0.325096	0.297172	0.142765	0.270360	0.304876	0.371771	0.276933	0.266125	0.288242	0.281484	0.212177	0.172607	0.250617	0.081334	0.280600	0.108701	0.020396	0.238146	0.145189	0.069377	0.333640	0.472888 [...]
+U68621	0.259707	0.285269	0.315472	0.373185	0.223590	0.230560	0.342812	0.279315	0.229993	0.273230	0.306116	0.302011	0.258358	0.272545	0.438400	0.277730	0.211947	0.300490	0.526527	0.412038	0.344266	0.244404	0.306954	0.273482	0.265991	0.221866	0.272761	0.253341	0.265512	0.304839	0.265512	0.000000	0.371589	0.230861	0.287582	0.225242	0.236151	0.411243	0.265168	0.273730	0.230514	0.332052	0.251918	0.291619	0.247272	0.265550	0.302518	0.259636	0.248799	0.220490	0.280097	0.276248	0.324434	0.531437 [...]
+U68622	0.409194	0.383119	0.415027	0.359680	0.344108	0.394362	0.356954	0.383444	0.360180	0.314261	0.427369	0.418941	0.402721	0.341198	0.438472	0.331907	0.323315	0.432427	0.612744	0.437145	0.352503	0.392797	0.401787	0.326769	0.368968	0.383300	0.407097	0.392254	0.325096	0.413282	0.325096	0.371589	0.000000	0.403435	0.379793	0.348616	0.395343	0.450700	0.405119	0.386528	0.366399	0.357883	0.359425	0.335513	0.374017	0.343141	0.336679	0.359680	0.313454	0.366934	0.353207	0.358283	0.359680	0.510347 [...]
+U68623	0.273253	0.287768	0.310736	0.365184	0.247344	0.270690	0.348044	0.218017	0.271878	0.282537	0.279200	0.289376	0.240354	0.285366	0.378448	0.317244	0.227993	0.246334	0.556206	0.389787	0.358283	0.296193	0.344582	0.265512	0.204392	0.251181	0.206167	0.199277	0.297172	0.318229	0.297172	0.230861	0.403435	0.000000	0.291380	0.271468	0.078467	0.390702	0.237019	0.215762	0.167154	0.352302	0.291111	0.314987	0.219740	0.262888	0.321413	0.293373	0.278803	0.250230	0.274889	0.317196	0.320851	0.434443 [...]
+U68624	0.348793	0.248463	0.388493	0.301866	0.236421	0.279315	0.311178	0.307081	0.225078	0.133092	0.340130	0.369495	0.292729	0.113662	0.409908	0.219999	0.215106	0.329318	0.504303	0.385549	0.289841	0.310028	0.211884	0.124741	0.298898	0.304845	0.278042	0.300395	0.142765	0.140838	0.142765	0.287582	0.379793	0.291380	0.000000	0.261021	0.307896	0.400079	0.286594	0.285469	0.289370	0.311667	0.228248	0.165149	0.291943	0.123718	0.297172	0.114034	0.130379	0.254536	0.155691	0.151832	0.320446	0.489951 [...]
+U68625	0.338829	0.330389	0.359680	0.362488	0.222528	0.238840	0.330516	0.321375	0.237745	0.246349	0.316773	0.340106	0.304869	0.254692	0.389040	0.265083	0.224023	0.330983	0.535651	0.347530	0.332634	0.286043	0.289119	0.249678	0.309193	0.189645	0.279347	0.307806	0.270360	0.272302	0.270360	0.225242	0.348616	0.271468	0.261021	0.000000	0.279664	0.367932	0.305644	0.322097	0.268455	0.354419	0.240578	0.277547	0.275547	0.252146	0.329742	0.260975	0.257350	0.223138	0.268635	0.263548	0.349713	0.440840 [...]
+U68626	0.289890	0.331258	0.334111	0.382122	0.254171	0.242336	0.332286	0.206134	0.269502	0.289370	0.299756	0.305343	0.247109	0.304099	0.433602	0.296132	0.234936	0.252567	0.553400	0.435804	0.362033	0.279880	0.334817	0.284857	0.203240	0.254818	0.200785	0.195565	0.304876	0.325861	0.304876	0.236151	0.395343	0.078467	0.307896	0.279664	0.000000	0.426079	0.243717	0.222724	0.179466	0.343048	0.300831	0.314211	0.232025	0.281640	0.316704	0.304099	0.288634	0.251398	0.285168	0.328257	0.295811	0.463200 [...]
+U68627	0.460196	0.359192	0.500022	0.420349	0.396586	0.409723	0.421731	0.374967	0.380020	0.372970	0.437168	0.484127	0.395778	0.365865	0.246241	0.383472	0.381089	0.375581	0.565457	0.054455	0.400684	0.437190	0.411243	0.344388	0.343707	0.376347	0.359680	0.354966	0.371771	0.402721	0.371771	0.411243	0.450700	0.390702	0.400079	0.367932	0.426079	0.000000	0.363081	0.375192	0.429242	0.414051	0.365956	0.377522	0.367395	0.356752	0.402670	0.354897	0.360652	0.407315	0.418941	0.400684	0.432050	0.340769 [...]
+U68628	0.287313	0.314346	0.350766	0.356390	0.289841	0.300336	0.336571	0.122326	0.281484	0.249858	0.261354	0.331431	0.045250	0.292820	0.345511	0.283253	0.276688	0.093606	0.501886	0.388719	0.336626	0.278924	0.312724	0.294114	0.106184	0.298814	0.188051	0.083535	0.276933	0.289247	0.276933	0.265168	0.405119	0.237019	0.286594	0.305644	0.243717	0.363081	0.000000	0.090627	0.188933	0.337227	0.270360	0.271343	0.055913	0.257989	0.330389	0.285168	0.263877	0.301081	0.243355	0.293373	0.270760	0.426473 [...]
+U68629	0.304876	0.282537	0.333145	0.342170	0.278520	0.292820	0.346594	0.106410	0.281484	0.233746	0.282700	0.317305	0.113176	0.283918	0.352090	0.274864	0.252152	0.112197	0.519860	0.385214	0.337821	0.280287	0.306441	0.262900	0.092619	0.307154	0.206041	0.085569	0.266125	0.261648	0.266125	0.273730	0.386528	0.215762	0.285469	0.322097	0.222724	0.375192	0.090627	0.000000	0.206713	0.335224	0.255453	0.285986	0.087321	0.233746	0.329311	0.274214	0.238840	0.279408	0.247109	0.276933	0.301031	0.443436 [...]
+U68630	0.293627	0.308195	0.333379	0.340183	0.259933	0.281697	0.335749	0.209430	0.284238	0.263548	0.291995	0.316017	0.202970	0.298814	0.423288	0.324259	0.246076	0.241563	0.550524	0.436903	0.319316	0.287713	0.331715	0.288107	0.203308	0.270331	0.243496	0.193667	0.288242	0.311791	0.288242	0.230514	0.366399	0.167154	0.289370	0.268455	0.179466	0.429242	0.188933	0.206713	0.000000	0.338085	0.294523	0.294500	0.183653	0.256715	0.307367	0.296937	0.265955	0.252463	0.269931	0.309669	0.304889	0.508683 [...]
+U68631	0.398745	0.273482	0.442063	0.179474	0.283223	0.340691	0.087023	0.349614	0.307129	0.256715	0.383119	0.395785	0.326769	0.266079	0.434792	0.312653	0.286311	0.389518	0.582089	0.415039	0.122032	0.325227	0.359680	0.239932	0.330765	0.342727	0.348416	0.339273	0.281484	0.304099	0.281484	0.332052	0.357883	0.352302	0.311667	0.354419	0.343048	0.414051	0.337227	0.335224	0.338085	0.000000	0.317889	0.288465	0.299159	0.262568	0.006024	0.269035	0.268468	0.316294	0.302582	0.288293	0.346062	0.544453 [...]
+U68632	0.323458	0.265198	0.319388	0.326851	0.225527	0.269265	0.340156	0.271657	0.090568	0.180919	0.343991	0.297402	0.262255	0.181048	0.377719	0.106011	0.213692	0.343693	0.512306	0.369502	0.316394	0.288339	0.223342	0.200086	0.242629	0.309422	0.285214	0.235838	0.212177	0.229421	0.212177	0.251918	0.359425	0.291111	0.228248	0.240578	0.300831	0.365956	0.270360	0.255453	0.294523	0.317889	0.000000	0.236842	0.245339	0.202000	0.295536	0.182363	0.188040	0.234687	0.248141	0.210404	0.301031	0.483786 [...]
+U68633	0.343931	0.272626	0.395174	0.298352	0.244836	0.277267	0.311076	0.301609	0.248578	0.166671	0.347951	0.383867	0.290033	0.169421	0.366452	0.262468	0.220723	0.311620	0.511328	0.365873	0.285826	0.284447	0.057989	0.180522	0.301609	0.295811	0.292522	0.296685	0.172607	0.147700	0.172607	0.291619	0.335513	0.314987	0.165149	0.277547	0.314211	0.377522	0.271343	0.285986	0.294500	0.288465	0.236842	0.000000	0.288185	0.160844	0.299971	0.167496	0.169421	0.258338	0.177731	0.186103	0.298197	0.422792 [...]
+U68634	0.270320	0.323924	0.355606	0.302463	0.268053	0.281613	0.296415	0.116332	0.260771	0.237960	0.255391	0.338041	0.060707	0.271468	0.335513	0.259629	0.264574	0.085083	0.495486	0.368906	0.292522	0.274589	0.297432	0.280872	0.091368	0.289504	0.194553	0.088740	0.250617	0.264574	0.250617	0.247272	0.374017	0.219740	0.291943	0.275547	0.232025	0.367395	0.055913	0.087321	0.183653	0.299159	0.245339	0.288185	0.000000	0.250617	0.304099	0.279190	0.240945	0.287208	0.224344	0.265629	0.255902	0.405491 [...]
+U68635	0.328638	0.216451	0.372774	0.260630	0.202789	0.265168	0.281804	0.268754	0.238261	0.091021	0.307991	0.352503	0.256082	0.088528	0.383472	0.250668	0.225386	0.297115	0.523554	0.374797	0.280214	0.277833	0.204444	0.083419	0.257723	0.307959	0.245275	0.259354	0.081334	0.103613	0.081334	0.265550	0.343141	0.262888	0.123718	0.252146	0.281640	0.356752	0.257989	0.233746	0.256715	0.262568	0.202000	0.160844	0.250617	0.000000	0.268468	0.090237	0.077156	0.229810	0.128824	0.103537	0.315353	0.478961 [...]
+U68636	0.360667	0.281824	0.425988	0.164494	0.269811	0.315691	0.074568	0.323601	0.290224	0.264200	0.354464	0.379655	0.324605	0.246808	0.409908	0.298546	0.275336	0.360638	0.568020	0.419842	0.113176	0.318002	0.327350	0.250717	0.307959	0.326113	0.324779	0.313377	0.280600	0.281484	0.280600	0.302518	0.336679	0.321413	0.297172	0.329742	0.316704	0.402670	0.330389	0.329311	0.307367	0.006024	0.295536	0.299971	0.304099	0.268468	0.000000	0.268754	0.270088	0.305656	0.294114	0.275102	0.337186	0.528684 [...]
+U68637	0.317641	0.241593	0.367269	0.275794	0.214077	0.251548	0.282904	0.307129	0.227664	0.101521	0.328894	0.347509	0.277833	0.032570	0.394969	0.219740	0.209430	0.321127	0.527665	0.359680	0.245608	0.284078	0.192489	0.098361	0.292820	0.310948	0.289609	0.288419	0.108701	0.090573	0.108701	0.259636	0.359680	0.293373	0.114034	0.260975	0.304099	0.354897	0.285168	0.274214	0.296937	0.269035	0.182363	0.167496	0.279190	0.090237	0.268754	0.000000	0.085319	0.232616	0.147620	0.108022	0.286305	0.493636 [...]
+U68638	0.307311	0.230389	0.363032	0.268092	0.196928	0.255839	0.290513	0.270360	0.220923	0.054228	0.312653	0.349713	0.261273	0.069818	0.369931	0.226896	0.186739	0.293285	0.539010	0.376207	0.256331	0.270628	0.186695	0.076175	0.253585	0.294934	0.244067	0.250108	0.020396	0.076014	0.020396	0.248799	0.313454	0.278803	0.130379	0.257350	0.288634	0.360652	0.263877	0.238840	0.265955	0.268468	0.188040	0.169421	0.240945	0.077156	0.270088	0.085319	0.000000	0.222623	0.137655	0.072655	0.306481	0.474782 [...]
+U68639	0.318242	0.228159	0.342135	0.355578	0.113423	0.303699	0.326581	0.296132	0.218861	0.217646	0.308495	0.309890	0.286874	0.236275	0.455351	0.256365	0.098208	0.317395	0.554398	0.396051	0.320988	0.279414	0.279143	0.229291	0.268659	0.273673	0.307339	0.277730	0.238146	0.263244	0.238146	0.220490	0.366934	0.250230	0.254536	0.223138	0.251398	0.407315	0.301081	0.279408	0.252463	0.316294	0.234687	0.258338	0.287208	0.229810	0.305656	0.232616	0.222623	0.000000	0.266719	0.259776	0.354538	0.481765 [...]
+U68640	0.315163	0.255453	0.362984	0.303351	0.232067	0.267649	0.311238	0.267363	0.282700	0.160277	0.289841	0.369636	0.245013	0.150174	0.408422	0.281484	0.251357	0.292002	0.496751	0.406702	0.302570	0.295223	0.232061	0.156701	0.270891	0.303351	0.248300	0.261021	0.145189	0.166133	0.145189	0.280097	0.353207	0.274889	0.155691	0.268635	0.285168	0.418941	0.243355	0.247109	0.269931	0.302582	0.248141	0.177731	0.224344	0.128824	0.294114	0.147620	0.137655	0.266719	0.000000	0.188057	0.314261	0.480584 [...]
+U68641	0.334519	0.219740	0.392626	0.295152	0.230975	0.288543	0.311238	0.310893	0.235348	0.059437	0.345365	0.365777	0.288885	0.112339	0.415506	0.247859	0.233701	0.321801	0.547458	0.382770	0.304099	0.313081	0.235404	0.092724	0.302594	0.301848	0.292637	0.298123	0.069377	0.065526	0.069377	0.276248	0.358283	0.317196	0.151832	0.263548	0.328257	0.400684	0.293373	0.276933	0.309669	0.288293	0.210404	0.186103	0.265629	0.103537	0.275102	0.108022	0.072655	0.259776	0.188057	0.000000	0.334262	0.474328 [...]
+U68642	0.259354	0.357218	0.425088	0.347147	0.324266	0.326397	0.357723	0.287842	0.315691	0.304895	0.305583	0.410836	0.277621	0.292002	0.443308	0.304882	0.330363	0.326215	0.584131	0.421971	0.335749	0.326397	0.314346	0.326489	0.298857	0.356390	0.301081	0.285616	0.333640	0.328691	0.333640	0.324434	0.359680	0.320851	0.320446	0.349713	0.295811	0.432050	0.270760	0.301031	0.304889	0.346062	0.301031	0.298197	0.255902	0.315353	0.337186	0.286305	0.306481	0.354538	0.314261	0.334262	0.000000	0.535895 [...]
+U68643	0.513252	0.507737	0.518779	0.556294	0.486147	0.491101	0.541469	0.434443	0.475314	0.467931	0.453716	0.489114	0.467569	0.506595	0.319627	0.501368	0.455046	0.428074	0.639768	0.316086	0.550657	0.549148	0.493996	0.460841	0.435707	0.501990	0.412859	0.455046	0.472888	0.489627	0.472888	0.531437	0.510347	0.434443	0.489951	0.440840	0.463200	0.340769	0.426473	0.443436	0.508683	0.544453	0.483786	0.422792	0.405491	0.478961	0.528684	0.493636	0.474782	0.481765	0.480584	0.474328	0.535895	0.000000 [...]
+U68644	0.321245	0.178105	0.344833	0.353384	0.211278	0.250325	0.320049	0.324729	0.223132	0.220396	0.308514	0.325888	0.335821	0.233137	0.415147	0.257295	0.205330	0.356730	0.550818	0.435867	0.342412	0.298395	0.268659	0.237632	0.322103	0.259524	0.309399	0.318008	0.240927	0.287662	0.240927	0.203750	0.374212	0.275083	0.270760	0.221359	0.272013	0.433514	0.330363	0.321801	0.260273	0.301129	0.240330	0.233821	0.306565	0.234864	0.288035	0.243137	0.225386	0.214707	0.263548	0.273363	0.353619	0.539342 [...]
+U68645	0.382122	0.215419	0.433896	0.281513	0.250059	0.322242	0.329525	0.320324	0.276644	0.056869	0.365035	0.413440	0.278042	0.141007	0.422982	0.281020	0.255784	0.348163	0.578345	0.417506	0.294354	0.317795	0.258630	0.101083	0.330520	0.328142	0.323464	0.304827	0.095468	0.051539	0.095468	0.329324	0.401357	0.334564	0.128519	0.285366	0.336209	0.423717	0.286874	0.252759	0.337266	0.287010	0.252935	0.151116	0.251266	0.100294	0.270088	0.104797	0.078908	0.273253	0.141210	0.076777	0.312513	0.497091 [...]
+U68646	0.338989	0.133686	0.360705	0.354194	0.183046	0.287798	0.328058	0.321238	0.238173	0.237157	0.329081	0.335982	0.339280	0.268748	0.396313	0.276436	0.205189	0.363158	0.530906	0.405148	0.326397	0.317244	0.302881	0.255453	0.301220	0.291387	0.295560	0.320452	0.275567	0.301175	0.275567	0.245375	0.377369	0.286601	0.287010	0.267102	0.300887	0.390401	0.345711	0.311061	0.305337	0.315503	0.239488	0.304931	0.310707	0.260927	0.297115	0.277833	0.250906	0.234643	0.303345	0.293630	0.371187	0.527475 [...]
+U68647	0.311488	0.292498	0.369193	0.355266	0.292387	0.250443	0.338641	0.253537	0.260877	0.283792	0.285623	0.345905	0.235212	0.273411	0.395090	0.258952	0.253791	0.250934	0.493520	0.434995	0.337603	0.230661	0.335138	0.260533	0.247630	0.281947	0.224281	0.227476	0.277657	0.280315	0.277657	0.252842	0.350814	0.184931	0.265449	0.311416	0.182388	0.429138	0.240487	0.261676	0.238540	0.319620	0.275524	0.323319	0.233275	0.270445	0.325073	0.288965	0.280315	0.293359	0.245177	0.263548	0.295095	0.437808 [...]
+U68648	0.341095	0.280214	0.320658	0.345025	0.232616	0.263205	0.325930	0.277323	0.080515	0.204243	0.347449	0.290368	0.285879	0.193059	0.401639	0.109729	0.224997	0.350157	0.524934	0.369502	0.332904	0.315059	0.220441	0.216451	0.253894	0.300025	0.284361	0.251570	0.219871	0.245536	0.219871	0.231127	0.362749	0.289119	0.222303	0.240267	0.295955	0.372774	0.286594	0.262900	0.299504	0.319500	0.053835	0.236890	0.274079	0.210733	0.300178	0.189802	0.206483	0.244816	0.250947	0.224397	0.337227	0.499400 [...]
+U68649	0.306169	0.206522	0.325439	0.328101	0.112072	0.265408	0.294354	0.285509	0.216293	0.195545	0.304099	0.297118	0.281157	0.210058	0.427832	0.225931	0.088631	0.314150	0.511139	0.380020	0.302594	0.246272	0.264399	0.207005	0.246142	0.248746	0.280971	0.270886	0.213078	0.259907	0.213078	0.201445	0.337603	0.241674	0.207751	0.207324	0.235523	0.381089	0.287713	0.272211	0.237380	0.295152	0.228785	0.236229	0.258439	0.204047	0.278313	0.223345	0.197745	0.078467	0.229894	0.226125	0.336679	0.466873 [...]
+U68651	0.343707	0.225386	0.383119	0.297497	0.229389	0.285221	0.304876	0.275794	0.258283	0.122632	0.307081	0.379793	0.239284	0.128374	0.382784	0.255977	0.218866	0.291481	0.512865	0.394969	0.292910	0.277528	0.239217	0.128950	0.276453	0.310028	0.262279	0.263859	0.125200	0.139052	0.125200	0.298857	0.342960	0.292194	0.127424	0.260018	0.299594	0.402493	0.243831	0.243137	0.272114	0.287842	0.238592	0.167082	0.229658	0.106624	0.283591	0.115862	0.117632	0.235838	0.071973	0.161964	0.314094	0.477706 [...]
+U68652	0.348475	0.300901	0.379501	0.213459	0.303686	0.362481	0.162738	0.349097	0.299971	0.272013	0.338989	0.368187	0.307230	0.252951	0.440840	0.282868	0.293900	0.393620	0.599761	0.392436	0.034512	0.349752	0.316294	0.260877	0.321103	0.336162	0.341726	0.310123	0.267882	0.334118	0.267882	0.314077	0.394923	0.343393	0.314177	0.342546	0.359425	0.383472	0.309576	0.330389	0.326026	0.127003	0.308324	0.283918	0.284098	0.270341	0.116217	0.249419	0.259194	0.309890	0.304099	0.308781	0.328875	0.534844 [...]
+U68653	0.243919	0.269939	0.347418	0.326145	0.259968	0.236729	0.307089	0.210949	0.213433	0.254871	0.272729	0.332441	0.201004	0.230563	0.374621	0.224783	0.227154	0.221028	0.571605	0.393039	0.286094	0.214294	0.289992	0.244357	0.197150	0.245817	0.183668	0.192784	0.238132	0.244357	0.238132	0.250833	0.341167	0.177292	0.231928	0.288050	0.140909	0.370407	0.194447	0.204305	0.228277	0.289992	0.251591	0.311791	0.205153	0.220881	0.291919	0.235376	0.230563	0.254111	0.218291	0.245817	0.239642	0.411118 [...]
+U68654	0.301619	0.216102	0.316998	0.323619	0.078728	0.277338	0.299558	0.281859	0.230306	0.199143	0.285560	0.290607	0.266425	0.234185	0.406823	0.247805	0.092168	0.312533	0.518490	0.411424	0.315503	0.234185	0.286151	0.227071	0.253115	0.257692	0.289247	0.270002	0.230411	0.258374	0.230411	0.209637	0.336371	0.241979	0.246241	0.213363	0.250546	0.411956	0.275794	0.264188	0.248226	0.299594	0.223210	0.237459	0.248226	0.223691	0.288761	0.240927	0.212177	0.078467	0.244762	0.249161	0.319500	0.489951 [...]
+U68655	0.371532	0.274415	0.421263	0.311791	0.248463	0.312513	0.312190	0.330581	0.287448	0.141357	0.352605	0.414051	0.329950	0.134623	0.458622	0.296294	0.286026	0.354464	0.605463	0.399332	0.314261	0.348574	0.241782	0.132502	0.330581	0.342488	0.307991	0.325750	0.129518	0.145189	0.129518	0.320722	0.401167	0.315071	0.166727	0.297926	0.330389	0.411243	0.312653	0.291903	0.300873	0.314177	0.258995	0.197107	0.300817	0.095852	0.316599	0.136439	0.131251	0.266778	0.174732	0.155965	0.333893	0.489770 [...]
+U68656	0.377102	0.312052	0.378254	0.336626	0.312724	0.310272	0.329749	0.300274	0.294782	0.249419	0.388604	0.381726	0.298684	0.282712	0.451079	0.312120	0.240514	0.307204	0.588165	0.423177	0.339589	0.275336	0.304895	0.266838	0.285879	0.300980	0.332765	0.290448	0.276712	0.265821	0.276712	0.191624	0.411424	0.322248	0.294114	0.285579	0.308821	0.421971	0.284857	0.268468	0.335595	0.313533	0.285579	0.296553	0.275794	0.261230	0.311238	0.288035	0.251726	0.302544	0.286451	0.267506	0.400190	0.534446 [...]
+U68657	0.428155	0.365054	0.445836	0.424992	0.364976	0.394174	0.419034	0.337227	0.380688	0.364065	0.389650	0.427271	0.372774	0.353020	0.235348	0.381696	0.356836	0.344952	0.529255	0.120257	0.404023	0.413081	0.393030	0.349177	0.334876	0.383805	0.319187	0.337227	0.366399	0.382777	0.366399	0.399938	0.440840	0.382080	0.386925	0.349022	0.421021	0.103389	0.351040	0.336019	0.404951	0.421143	0.361630	0.372728	0.345990	0.361630	0.408763	0.349713	0.358713	0.382431	0.405582	0.388719	0.412218	0.304889 [...]
+U68658	0.343582	0.244067	0.413887	0.299483	0.227562	0.292637	0.319716	0.301031	0.251943	0.073124	0.338106	0.399584	0.271435	0.092524	0.395502	0.261230	0.227613	0.304876	0.565329	0.370938	0.290852	0.305631	0.209994	0.095435	0.289609	0.329950	0.282537	0.282183	0.022512	0.103613	0.022512	0.263548	0.351568	0.314346	0.147798	0.278803	0.328875	0.371951	0.279506	0.273049	0.303286	0.303320	0.228113	0.183254	0.250164	0.087181	0.304882	0.113176	0.027067	0.254205	0.159554	0.079587	0.337186	0.476013 [...]
+U68659	0.363540	0.239423	0.422482	0.277780	0.283550	0.315450	0.286735	0.327910	0.281721	0.097752	0.369650	0.395762	0.288293	0.082137	0.434828	0.230023	0.270276	0.457110	0.571133	0.393321	0.287582	0.296253	0.253430	0.085685	0.303385	0.357960	0.340079	0.308489	0.102724	0.146029	0.102724	0.309354	0.429834	0.338682	0.138222	0.292483	0.351054	0.395244	0.304863	0.295744	0.338675	0.283223	0.272179	0.165149	0.267350	0.112297	0.266174	0.107789	0.095229	0.290684	0.173334	0.131709	0.321127	0.505483 [...]
+U68660	0.320583	0.322718	0.380263	0.373994	0.314611	0.288375	0.353057	0.249858	0.279610	0.300928	0.311985	0.362657	0.256331	0.296162	0.380958	0.295955	0.279815	0.259562	0.545907	0.398293	0.365685	0.267206	0.338367	0.288375	0.241188	0.308090	0.225714	0.236040	0.282904	0.301734	0.282904	0.254734	0.379204	0.190289	0.288375	0.305724	0.177292	0.384564	0.250042	0.252986	0.260771	0.343582	0.300901	0.327417	0.227815	0.265230	0.343582	0.307284	0.281453	0.304099	0.285269	0.286601	0.313023	0.389572 [...]
+U68661	0.377095	0.331029	0.378280	0.399403	0.333983	0.251022	0.431301	0.361031	0.353384	0.312653	0.379238	0.378280	0.371426	0.315818	0.438429	0.353786	0.315818	0.400677	0.596479	0.464872	0.396313	0.386331	0.385769	0.330581	0.364192	0.368696	0.345563	0.355634	0.334367	0.376019	0.334367	0.349242	0.426935	0.353384	0.375507	0.357389	0.367885	0.460900	0.379082	0.367269	0.372400	0.404462	0.350631	0.349934	0.360697	0.356836	0.383805	0.330363	0.326769	0.367366	0.350892	0.347680	0.386528	0.534583 [...]
+U68662	0.334577	0.347334	0.369961	0.362410	0.288242	0.252140	0.383792	0.309193	0.273195	0.287313	0.319872	0.329411	0.308631	0.306261	0.421844	0.300536	0.268415	0.345273	0.537090	0.440840	0.397621	0.283511	0.346587	0.298772	0.293580	0.236410	0.289951	0.295495	0.330363	0.307748	0.330363	0.218788	0.409711	0.273546	0.300395	0.258630	0.266933	0.444553	0.304099	0.297339	0.278714	0.398920	0.289567	0.293414	0.285540	0.311855	0.374577	0.295811	0.301792	0.275557	0.300366	0.323126	0.339857	0.499746 [...]
+U68663	0.328665	0.307230	0.382431	0.369636	0.329950	0.280789	0.377762	0.225530	0.282712	0.301773	0.287713	0.355401	0.225459	0.300274	0.383119	0.287632	0.275567	0.247703	0.550346	0.432159	0.371771	0.279408	0.345663	0.304869	0.213078	0.311791	0.203381	0.201972	0.305631	0.333514	0.305631	0.259275	0.388292	0.152765	0.318002	0.348253	0.158482	0.425988	0.234281	0.230958	0.249599	0.351164	0.295607	0.329525	0.220294	0.277406	0.344044	0.326581	0.294114	0.310272	0.255839	0.322928	0.293373	0.462099 [...]
+U68664	0.438795	0.410098	0.394891	0.442027	0.382564	0.422422	0.498901	0.428059	0.448883	0.405367	0.447499	0.400562	0.487474	0.393527	0.510082	0.457170	0.400237	0.507162	0.583083	0.521929	0.467881	0.472722	0.474392	0.421263	0.432852	0.440840	0.434098	0.442013	0.400079	0.457461	0.400079	0.409401	0.484211	0.423111	0.454184	0.455820	0.450907	0.523433	0.455710	0.449496	0.460223	0.476546	0.424850	0.428856	0.444851	0.400684	0.447103	0.385171	0.389033	0.412830	0.439624	0.416934	0.468218	0.583528 [...]
+U68665	0.309493	0.300178	0.336453	0.380785	0.286976	0.250197	0.381754	0.211727	0.257111	0.277889	0.280537	0.314166	0.233168	0.300453	0.401870	0.283354	0.242283	0.273899	0.560288	0.429533	0.389953	0.268748	0.320656	0.287903	0.191338	0.281898	0.216929	0.185998	0.291803	0.340396	0.291803	0.244816	0.374436	0.138335	0.316294	0.277611	0.148618	0.423717	0.227664	0.219999	0.199775	0.371022	0.266842	0.324462	0.210323	0.265864	0.335404	0.306422	0.273482	0.262032	0.267724	0.327417	0.296546	0.476751 [...]
+U68666	0.394408	0.278594	0.450824	0.211525	0.295676	0.395265	0.046201	0.366734	0.331431	0.297115	0.387877	0.410471	0.336626	0.282358	0.450762	0.344388	0.304099	0.374967	0.581348	0.418534	0.146326	0.368536	0.373178	0.276712	0.347147	0.361088	0.379364	0.355952	0.301792	0.315503	0.301792	0.338989	0.363820	0.372000	0.315596	0.343141	0.372584	0.427271	0.339287	0.357324	0.343582	0.069962	0.346608	0.332052	0.309071	0.290961	0.069818	0.288885	0.294032	0.341037	0.306384	0.299483	0.359680	0.548910 [...]
+U68667	0.334519	0.219740	0.392626	0.295152	0.230975	0.288543	0.311238	0.310893	0.235348	0.059437	0.345365	0.365777	0.288885	0.112339	0.415506	0.247859	0.233701	0.321801	0.547458	0.382770	0.304099	0.313081	0.235404	0.092724	0.302594	0.301848	0.292637	0.298123	0.069377	0.065526	0.069377	0.276248	0.358283	0.317196	0.151832	0.263548	0.328257	0.400684	0.293373	0.276933	0.309669	0.288293	0.210404	0.186103	0.265629	0.103537	0.275102	0.108022	0.072655	0.259776	0.188057	0.000000	0.334262	0.474328 [...]
+U68668	0.338675	0.304895	0.320439	0.326489	0.275794	0.244797	0.330210	0.280732	0.301694	0.258909	0.316920	0.297661	0.276029	0.292349	0.354680	0.294295	0.243090	0.292250	0.549429	0.434402	0.305710	0.287632	0.333124	0.289247	0.289247	0.281992	0.288107	0.264212	0.283806	0.267206	0.283806	0.216474	0.364131	0.292250	0.288375	0.281069	0.313776	0.408920	0.271315	0.257233	0.297432	0.304895	0.288995	0.304931	0.267743	0.283636	0.313694	0.293104	0.273925	0.284371	0.283806	0.290628	0.324807	0.464442 [...]
+U68669	0.228291	0.274172	0.369357	0.370307	0.285761	0.271016	0.322928	0.271516	0.289364	0.260927	0.305537	0.313081	0.252555	0.269477	0.431301	0.309442	0.299700	0.258029	0.579391	0.404247	0.349472	0.261393	0.343048	0.261273	0.242726	0.313776	0.268841	0.225803	0.279210	0.298979	0.279210	0.240843	0.415147	0.219989	0.292910	0.343602	0.223615	0.396586	0.230433	0.247550	0.265476	0.312445	0.287145	0.305767	0.228495	0.269811	0.295607	0.266778	0.248019	0.298216	0.276470	0.288293	0.240200	0.502399 [...]
+U68670	0.376110	0.304876	0.371101	0.363330	0.284542	0.287662	0.376347	0.308489	0.307034	0.283756	0.362373	0.348877	0.326397	0.309890	0.383451	0.312378	0.286181	0.324259	0.521831	0.389343	0.363879	0.186858	0.342412	0.297284	0.288218	0.102059	0.269209	0.284692	0.319374	0.326138	0.319374	0.282868	0.375655	0.296070	0.313081	0.225600	0.309274	0.385499	0.320179	0.322618	0.335484	0.342727	0.316004	0.317492	0.298451	0.322772	0.319757	0.317089	0.310221	0.310681	0.336995	0.306348	0.381089	0.503515 [...]
+U68671	0.348863	0.304882	0.412218	0.176486	0.270212	0.305644	0.130465	0.326113	0.295465	0.290961	0.364001	0.362613	0.319889	0.258630	0.418254	0.300873	0.246349	0.323087	0.572514	0.425802	0.052616	0.351164	0.349965	0.269138	0.310323	0.336904	0.302518	0.303326	0.288634	0.297926	0.288634	0.331729	0.343375	0.336019	0.304099	0.338381	0.328012	0.401063	0.320023	0.315885	0.301630	0.110830	0.303306	0.325179	0.294295	0.289370	0.113423	0.266838	0.278102	0.323249	0.298639	0.304889	0.321861	0.501787 [...]
+U68672	0.282832	0.278803	0.322132	0.365609	0.240811	0.221866	0.332666	0.275578	0.248746	0.261273	0.315402	0.308671	0.268659	0.261430	0.444463	0.265057	0.220282	0.304099	0.539342	0.412570	0.339563	0.233137	0.322963	0.266778	0.265949	0.232616	0.264063	0.250230	0.267724	0.295428	0.267724	0.042634	0.365452	0.237874	0.277780	0.247505	0.239944	0.402262	0.263548	0.261314	0.246789	0.322318	0.265098	0.281978	0.246469	0.258995	0.294858	0.247703	0.246207	0.235229	0.281157	0.274683	0.311729	0.537303 [...]
+U68673	0.281554	0.190897	0.307547	0.237569	0.177895	0.224066	0.247630	0.251446	0.204695	0.055581	0.263187	0.290462	0.239481	0.084997	0.356467	0.202637	0.190897	0.258530	0.488821	0.375841	0.238267	0.236311	0.187499	0.090013	0.245116	0.236938	0.245116	0.240681	0.065259	0.057989	0.065259	0.247109	0.308382	0.259944	0.124372	0.241972	0.263911	0.363986	0.239481	0.230135	0.240487	0.214248	0.173505	0.190930	0.244067	0.096209	0.219740	0.087708	0.065259	0.203106	0.132372	0.018238	0.288563	0.433611 [...]
+U68674	0.289504	0.278803	0.325439	0.309315	0.239605	0.268462	0.316394	0.223786	0.261030	0.245143	0.302126	0.318679	0.210623	0.237879	0.419587	0.284985	0.205189	0.290741	0.537954	0.355843	0.298075	0.255426	0.313061	0.239932	0.203073	0.274376	0.248833	0.190537	0.243245	0.281859	0.243245	0.194237	0.362176	0.217256	0.259158	0.243570	0.216113	0.367269	0.210139	0.219612	0.188717	0.297339	0.281613	0.266003	0.178870	0.240008	0.269138	0.229291	0.224228	0.220617	0.259430	0.273784	0.274021	0.503515 [...]
+U68675	0.460089	0.379815	0.458067	0.378434	0.342481	0.385306	0.401870	0.347735	0.375748	0.358668	0.366244	0.411299	0.341185	0.355139	0.216643	0.372090	0.341185	0.329297	0.532074	0.143736	0.379425	0.382035	0.404247	0.354146	0.325695	0.338367	0.330447	0.322332	0.381290	0.362702	0.381290	0.402807	0.408715	0.402370	0.360697	0.323736	0.404720	0.119702	0.334577	0.326790	0.401357	0.384953	0.351941	0.368881	0.332139	0.368906	0.396051	0.359680	0.371898	0.374017	0.371463	0.394313	0.411749	0.307455 [...]
+U68676	0.290337	0.310886	0.338335	0.346768	0.280607	0.255062	0.333940	0.207419	0.246441	0.276043	0.272810	0.338335	0.228956	0.269591	0.368256	0.259145	0.244975	0.236270	0.500784	0.388888	0.332139	0.235687	0.345949	0.273550	0.201542	0.280815	0.218178	0.194716	0.270320	0.285826	0.270320	0.233863	0.350796	0.170447	0.278306	0.295586	0.162867	0.383504	0.215011	0.202061	0.255419	0.313511	0.248945	0.344178	0.219010	0.248274	0.319528	0.277547	0.263194	0.275042	0.224066	0.274028	0.278844	0.393227 [...]
+U68677	0.225920	0.281221	0.353510	0.331129	0.254808	0.230812	0.293754	0.240415	0.255062	0.262152	0.300845	0.321926	0.235012	0.243556	0.340607	0.266105	0.279720	0.228450	0.497875	0.368588	0.305782	0.247753	0.293125	0.262152	0.221974	0.272352	0.215580	0.220851	0.256579	0.249050	0.256579	0.208494	0.375330	0.239181	0.265297	0.284878	0.234409	0.358636	0.206640	0.224915	0.238912	0.291729	0.254379	0.307485	0.219312	0.255486	0.297491	0.253464	0.259021	0.287687	0.243037	0.250563	0.218895	0.411831 [...]
+U68678	0.353872	0.215933	0.389953	0.288419	0.207740	0.278924	0.278382	0.286170	0.243137	0.061388	0.332052	0.372984	0.268754	0.117427	0.394174	0.254827	0.216429	0.314094	0.520376	0.400684	0.281484	0.285616	0.202309	0.101302	0.283918	0.286170	0.279210	0.285469	0.095671	0.032241	0.095671	0.277621	0.373379	0.297976	0.127277	0.239857	0.302557	0.403362	0.276023	0.271956	0.292250	0.274214	0.204810	0.154579	0.264932	0.088720	0.264854	0.107095	0.083598	0.236460	0.156726	0.073795	0.307230	0.465820 [...]
+U68679	0.301641	0.310375	0.330669	0.385769	0.270341	0.248921	0.385499	0.214959	0.252461	0.292542	0.281814	0.307005	0.235348	0.312120	0.404462	0.287662	0.235818	0.272495	0.555709	0.434603	0.381438	0.282851	0.327279	0.296422	0.196585	0.283182	0.218715	0.194277	0.303332	0.348517	0.303332	0.234974	0.374116	0.138964	0.324586	0.271823	0.145693	0.429810	0.232616	0.228248	0.199030	0.372795	0.263031	0.331375	0.211615	0.277406	0.338395	0.314890	0.285014	0.255021	0.277363	0.338989	0.289954	0.477706 [...]
+U68680	0.318305	0.297172	0.333647	0.336467	0.279352	0.262032	0.346790	0.301197	0.283270	0.278520	0.313735	0.328498	0.311667	0.309846	0.376850	0.309116	0.266562	0.343117	0.534712	0.394174	0.354610	0.167124	0.357693	0.288293	0.276215	0.003141	0.257245	0.284021	0.315503	0.321609	0.315503	0.226805	0.384021	0.257172	0.311547	0.193118	0.259008	0.385499	0.306366	0.311729	0.276281	0.346439	0.313101	0.300788	0.298402	0.308705	0.329950	0.317777	0.304863	0.276383	0.310839	0.308594	0.353619	0.503515 [...]
+U68681	0.236932	0.287175	0.365609	0.363330	0.266663	0.259573	0.341037	0.263548	0.290176	0.261919	0.314147	0.340396	0.264176	0.270377	0.385769	0.303345	0.294656	0.258374	0.529364	0.416497	0.349097	0.267229	0.320851	0.273298	0.245844	0.281688	0.251570	0.247390	0.283055	0.303368	0.283055	0.239151	0.383119	0.246405	0.309357	0.301105	0.256240	0.409723	0.250170	0.256959	0.258452	0.333874	0.290876	0.291729	0.219189	0.301006	0.307959	0.282358	0.269419	0.301900	0.278042	0.289841	0.239642	0.460196 [...]
+U68682	0.387405	0.386427	0.420026	0.401870	0.363123	0.364826	0.428507	0.334715	0.373620	0.375302	0.386034	0.403978	0.357381	0.347187	0.237996	0.393039	0.364826	0.334715	0.488100	0.136010	0.402913	0.405848	0.410134	0.370033	0.322081	0.344338	0.311637	0.323899	0.367161	0.383533	0.367161	0.408545	0.448321	0.373620	0.377735	0.344754	0.408545	0.125402	0.341487	0.311637	0.380047	0.424679	0.370722	0.420331	0.348085	0.365457	0.432003	0.351139	0.371221	0.407437	0.357381	0.377735	0.393393	0.279752 [...]
+U68683	0.329749	0.307257	0.425707	0.181103	0.272278	0.323931	0.119702	0.333640	0.282904	0.288634	0.365508	0.385600	0.304889	0.253399	0.415733	0.294379	0.275561	0.330615	0.554368	0.411059	0.037903	0.348253	0.320298	0.278803	0.317527	0.327350	0.312946	0.307230	0.292447	0.288634	0.292447	0.321154	0.335749	0.335404	0.297057	0.314611	0.338085	0.397439	0.316920	0.320023	0.291729	0.104975	0.303300	0.291729	0.295087	0.282358	0.107562	0.259275	0.279506	0.312871	0.270897	0.293862	0.308943	0.504976 [...]
+U68684	0.306586	0.199943	0.321169	0.296007	0.164269	0.260186	0.310948	0.269374	0.197861	0.163915	0.287563	0.293066	0.259430	0.184423	0.416248	0.223870	0.116311	0.327987	0.502519	0.397402	0.304851	0.243627	0.238203	0.188051	0.251410	0.266067	0.265426	0.259907	0.183758	0.209844	0.183758	0.195624	0.338835	0.258020	0.209404	0.204251	0.261363	0.408615	0.271549	0.254985	0.241563	0.307105	0.202783	0.217779	0.246951	0.187608	0.287313	0.193667	0.163541	0.132969	0.213271	0.207032	0.305619	0.469464 [...]
+U68685	0.365423	0.313533	0.405119	0.116013	0.300336	0.309315	0.227713	0.349614	0.319374	0.292542	0.344149	0.348044	0.329101	0.282017	0.478115	0.334008	0.309357	0.360611	0.586500	0.410286	0.212203	0.358766	0.375352	0.276933	0.336148	0.367583	0.334619	0.329937	0.295744	0.326668	0.295744	0.348044	0.346279	0.355521	0.335561	0.350766	0.379082	0.415352	0.334876	0.322618	0.333010	0.185184	0.338989	0.315857	0.299159	0.280409	0.170628	0.277833	0.276712	0.365261	0.307129	0.323281	0.333145	0.569768 [...]
+U68686	0.322928	0.223156	0.371826	0.276453	0.191909	0.281513	0.300211	0.286451	0.225078	0.085685	0.333874	0.349992	0.257463	0.093482	0.386847	0.236499	0.218231	0.317777	0.522903	0.364001	0.286170	0.288785	0.211388	0.073795	0.275573	0.310075	0.288419	0.279986	0.079795	0.092710	0.079795	0.275120	0.355097	0.285469	0.134401	0.241633	0.301830	0.371596	0.268092	0.253764	0.282537	0.280475	0.182924	0.159261	0.253005	0.043762	0.259354	0.070985	0.061001	0.229921	0.147447	0.106638	0.294934	0.485847 [...]
+U68687	0.412271	0.313533	0.408262	0.065911	0.357920	0.386580	0.228897	0.367366	0.367276	0.271435	0.389291	0.375794	0.360148	0.262636	0.442034	0.344247	0.334817	0.437586	0.597979	0.406702	0.213348	0.359238	0.388467	0.262900	0.349656	0.403898	0.375748	0.349886	0.281484	0.318118	0.281484	0.387518	0.381696	0.405765	0.309357	0.402710	0.416988	0.403588	0.361088	0.350376	0.406654	0.180258	0.335656	0.301609	0.319116	0.277406	0.160611	0.257463	0.270760	0.374644	0.316294	0.292099	0.371022	0.561599 [...]
+U68688	0.323601	0.316704	0.372984	0.382445	0.336085	0.273784	0.390245	0.221892	0.294195	0.286451	0.291580	0.349854	0.233168	0.302582	0.386206	0.295536	0.274451	0.257572	0.561908	0.435867	0.373578	0.278247	0.350106	0.295676	0.207032	0.304857	0.207984	0.198309	0.307927	0.330148	0.307927	0.270760	0.395785	0.157775	0.310948	0.350243	0.163469	0.426170	0.219999	0.222823	0.236621	0.359680	0.294195	0.329525	0.216845	0.274415	0.339877	0.313301	0.285014	0.308667	0.264831	0.333145	0.301811	0.475087 [...]
diff --git a/lib/galaxy/datatypes/test/oxli_countgraph.oxlicg b/lib/galaxy/datatypes/test/oxli_countgraph.oxlicg
new file mode 100644
index 0000000..0884ecf
Binary files /dev/null and b/lib/galaxy/datatypes/test/oxli_countgraph.oxlicg differ
diff --git a/lib/galaxy/datatypes/test/oxli_graphlabels.oxligl b/lib/galaxy/datatypes/test/oxli_graphlabels.oxligl
new file mode 100644
index 0000000..f917fc9
Binary files /dev/null and b/lib/galaxy/datatypes/test/oxli_graphlabels.oxligl differ
diff --git a/lib/galaxy/datatypes/test/oxli_nodegraph.oxling b/lib/galaxy/datatypes/test/oxli_nodegraph.oxling
new file mode 100644
index 0000000..3d18b98
Binary files /dev/null and b/lib/galaxy/datatypes/test/oxli_nodegraph.oxling differ
diff --git a/lib/galaxy/datatypes/test/oxli_stoptags.oxlist b/lib/galaxy/datatypes/test/oxli_stoptags.oxlist
new file mode 100644
index 0000000..5c40a16
Binary files /dev/null and b/lib/galaxy/datatypes/test/oxli_stoptags.oxlist differ
diff --git a/lib/galaxy/datatypes/test/oxli_subset.oxliss b/lib/galaxy/datatypes/test/oxli_subset.oxliss
new file mode 100644
index 0000000..afc6932
Binary files /dev/null and b/lib/galaxy/datatypes/test/oxli_subset.oxliss differ
diff --git a/lib/galaxy/datatypes/test/oxli_tagset.oxlits b/lib/galaxy/datatypes/test/oxli_tagset.oxlits
new file mode 100644
index 0000000..6872683
Binary files /dev/null and b/lib/galaxy/datatypes/test/oxli_tagset.oxlits differ
diff --git a/lib/galaxy/datatypes/test/q.fps b/lib/galaxy/datatypes/test/q.fps
new file mode 100644
index 0000000..d6499c2
--- /dev/null
+++ b/lib/galaxy/datatypes/test/q.fps
@@ -0,0 +1,7 @@
+#FPS1
+#num_bits=881
+#type=CACTVS-E_SCREEN/1.0 extended=2
+#software=CACTVS/unknown
+#source=CID_28434379.sdf
+#date=2012-02-03T13:08:39
+07ce04000000000000000000000000000080060000000c060000000000001a800f0000780008100000101487e9608c0bed3248000580644626204101b4844805901b041c2e19511e45039b8b2924101609401b13e40800000000000100200000040080000010000002000000000000	28434379
diff --git a/lib/galaxy/datatypes/test/sequence.csfasta b/lib/galaxy/datatypes/test/sequence.csfasta
new file mode 100644
index 0000000..f06e604
--- /dev/null
+++ b/lib/galaxy/datatypes/test/sequence.csfasta
@@ -0,0 +1,21 @@
+#comment
+>2_14_26_F3,-1282216.0
+T011213122200221123032111221021210131332222101
+>2_14_192_F3,-1383225.3
+T110021221100310030120022032222111321022112223
+>2_14_233_F3,-1082751.1
+T011001332311121212312022310203312201132111223
+>2_14_294_F3,-687179.1
+T213012132300000021323212232103300033102330332
+>2_14_463_F3
+T132032030200202202003211302222202230022110222
+>2_14_578_F3
+T131013032310120222321211010130110221312110222
+>2_14_956_F3,-1625621.2,-1625360.0
+T210213030022120032001012021321220011232201231
+>2_14_988_F3,1687674.3
+T221202031310031102033002302330301301010023133
+>2_14_1028_F3,754444.2
+T112230301101101120201331111302110031102111321
+>2_14_1035_F3,-1570954.1
+T003033103303232110201102100032203301023110332
diff --git a/lib/galaxy/datatypes/test/sequence.fasta b/lib/galaxy/datatypes/test/sequence.fasta
new file mode 100644
index 0000000..c9ecbb6
--- /dev/null
+++ b/lib/galaxy/datatypes/test/sequence.fasta
@@ -0,0 +1,2 @@
+>hg17
+gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAAAATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGGGAGATATTTGGggaaatt---ttgtatagactagctttca [...]
diff --git a/lib/galaxy/datatypes/test/sequence.maf b/lib/galaxy/datatypes/test/sequence.maf
new file mode 100644
index 0000000..0a3956b
--- /dev/null
+++ b/lib/galaxy/datatypes/test/sequence.maf
@@ -0,0 +1,8 @@
+##maf version=1
+a score=581086.0
+s human.chr7                 115444712 27 + 158628139 ATGGACCTGGAAAACAAAGTGAAGAAG 
+s colobus_monkey.1               49230 27 +   1990688 ATGGACCTGGAAAACAAAGTGAAGAAG 
+s baboon.1                       91916 27 +   1730495 ATGGACCTGGAAAACAAAGTGAAGAAG 
+s macaque.SCAFFOLD85017         143691 27 -    166989 ATGGACCTGGAAAACAAAGTGAAGAAG 
+s dusky_titi.1                   91348 27 +   2096432 ATGGACCTGGAAAACAAAGTGAAGAAG 
+s owl_monkey.1                  132506 27 +   2053814 ATGGACCTGGAAAACAAAGTGAAGAAG 
diff --git a/lib/galaxy/datatypes/test/sequence.qual b/lib/galaxy/datatypes/test/sequence.qual
new file mode 100644
index 0000000..1a79766
--- /dev/null
+++ b/lib/galaxy/datatypes/test/sequence.qual
@@ -0,0 +1,9 @@
+#comment
+>920_14_164_F3
+4 13 18 14 13 14 16 19 22 16 8 16 9 16 6 5 13 13 10 4 6 8 11 6 5 2 20 14 10 3 2 28 6 6 24 
+>920_14_977_F3
+8 10 2 2 2 2 4 3 4 7 3 2 5 2 7 4 5 2 3 3 6 3 3 6 9 2 2 10 3 4 2 2 2 5 5 
+>920_15_315_F3
+7 7 2 2 2 2 3 2 2 2 4 2 2 4 2 3 2 2 2 2 3 4 4 2 2 5 2 3 2 2 2 3 2 2 6 
+>920_16_347_F3
+6 7 3 2 4 3 2 2 2 2 2 2 2 3 3 3 3 2 3 2 3 2 5 3 4 4 2 4 4 3 3 2 4 2 2 
diff --git a/lib/galaxy/datatypes/test/sequence.qual454 b/lib/galaxy/datatypes/test/sequence.qual454
new file mode 100644
index 0000000..56dfc6d
--- /dev/null
+++ b/lib/galaxy/datatypes/test/sequence.qual454
@@ -0,0 +1,16 @@
+>EYKX4VC04IWAEA length=68 xy=3531_0528 region=4 run=R_2007_11_07_16_15_57_
+22 13 9 6 4 3 2 2 1 1 1 1 24 44 33 23 16 11 7 2 28 33 23 18 28 27 27 28 20 21 42 35 21 6 24 25 31 21 28 27 41 34 15 28 28 27 28 28 33 24 27 28 28 24 27 36 27 28 28 28
+28 28 36 30 8 34 25 18
+>EYKX4VC04JKOGH length=48 xy=3808_3903 region=4 run=R_2007_11_07_16_15_57_
+28 28 27 28 38 31 10 28 28 27 27 34 25 28 24 26 27 28 27 37 29 34 25 31 21 28 21 36 28 31 20 24 27 37 28 28 34 27 3 34 25 24 28 28 26 28 35 28
+>EYKX4VC04JIUVK length=84 xy=3788_0830 region=4 run=R_2007_11_07_16_15_57_
+29 20 14 11 8 6 3 1 25 27 25 28 28 27 28 27 28 28 36 28 27 28 36 29 7 28 28 28 27 27 27 35 26 35 26 27 36 28 28 28 38 32 11 28 36 28 27 26 35 25 28 38 31 11 27 28 37 28 27 27
+28 36 29 8 33 24 41 34 19 3 26 28 28 28 35 26 36 29 8 38 32 11 28 28
+>EYKX4VC04JWDRY length=78 xy=3942_1068 region=4 run=R_2007_11_07_16_15_57_
+36 24 14 5 27 20 28 27 28 32 22 28 27 43 36 23 11 27 28 28 28 32 23 36 27 28 28 26 38 32 11 34 25 27 43 36 23 11 38 31 11 37 28 28 28 27 28 30 20 28 32 22 28 36 27 37 30 9 27 28
+28 27 28 42 35 20 5 28 28 28 35 26 27 27 26 39 32 12
+>EYKX4VC04JWMUW length=55 xy=3945_0550 region=4 run=R_2007_11_07_16_15_57_
+36 24 14 4 28 17 34 25 35 25 31 20 28 28 36 27 28 28 24 27 28 28 37 28 27 27 35 25 31 21 27 39 32 12 28 36 28 28 26 27 28 27 26 28 42 35 20 6 28 27 28 28 28 28 28
+>EYKX4VC04JH4RG length=85 xy=3779_3850 region=4 run=R_2007_11_07_16_15_57_
+37 28 35 26 38 31 10 27 37 28 28 38 31 10 27 35 25 25 28 28 28 28 28 28 28 28 27 28 33 23 28 32 22 35 25 31 20 34 25 31 21 26 28 27 26 26 15 36 29 7 27 27 24 36 27 28 37 28 36 28
+27 28 28 28 37 28 28 40 34 14 37 28 28 26 28 36 28 26 28 37 28 28 28 28 27
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/sequence.qualsolid b/lib/galaxy/datatypes/test/sequence.qualsolid
new file mode 100644
index 0000000..f882c7f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/sequence.qualsolid
@@ -0,0 +1,12 @@
+>946_21_302_F3
+2 10 2 2 4 2 21 2 2 4 4 17 5 2 5 2 11 4 2 2 2 10 7 3 15 2 2 2 3 19 3 2 6 3 2 
+>946_21_659_F3
+3 31 3 2 2 2 34 3 2 2 2 31 2 2 3 4 31 8 3 2 2 30 4 8 3 3 2 2 6 9 4 4 6 2 2 
+>946_21_1071_F3
+5 5 2 2 2 8 5 3 2 3 7 7 2 3 4 6 5 2 2 2 5 5 2 2 2 3 8 2 3 3 3 8 2 3 2 
+>946_21_1115_F3
+21 5 2 8 13 31 6 2 17 24 10 27 4 21 29 8 20 2 11 21 13 24 5 5 6 24 31 2 13 6 22 17 6 27 10 
+>946_21_1218_F3
+11 21 2 13 13 16 27 16 19 27 22 28 14 26 24 23 29 10 15 13 6 4 7 16 26 22 11 6 16 22 21 6 4 7 21 
+>946_21_1232_F3
+17 16 2 28 21 31 15 16 10 11 8 20 6 5 18 6 13 23 7 13 4 12 19 8 6 9 10 19 7 10 6 10 20 14 8
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/tblastn_four_human_vs_rhodopsin.xml b/lib/galaxy/datatypes/test/tblastn_four_human_vs_rhodopsin.xml
new file mode 100644
index 0000000..358862f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/tblastn_four_human_vs_rhodopsin.xml
@@ -0,0 +1,741 @@
+<?xml version="1.0"?>
+<!DOCTYPE BlastOutput PUBLIC "-//NCBI//NCBI BlastOutput/EN" "http://www.ncbi.nlm.nih.gov/dtd/NCBI_BlastOutput.dtd">
+<BlastOutput>
+  <BlastOutput_program>tblastn</BlastOutput_program>
+  <BlastOutput_version>TBLASTN 2.2.31+</BlastOutput_version>
+  <BlastOutput_reference>Stephen F. Altschul, Thomas L. Madden, Alejandro A. Sch&auml;ffer, Jinghui Zhang, Zheng Zhang, Webb Miller, and David J. Lipman (1997), "Gapped BLAST and PSI-BLAST: a new generation of protein database search programs", Nucleic Acids Res. 25:3389-3402.</BlastOutput_reference>
+  <BlastOutput_db></BlastOutput_db>
+  <BlastOutput_query-ID>Query_1</BlastOutput_query-ID>
+  <BlastOutput_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</BlastOutput_query-def>
+  <BlastOutput_query-len>406</BlastOutput_query-len>
+  <BlastOutput_param>
+    <Parameters>
+      <Parameters_matrix>BLOSUM80</Parameters_matrix>
+      <Parameters_expect>1e-10</Parameters_expect>
+      <Parameters_gap-open>10</Parameters_gap-open>
+      <Parameters_gap-extend>1</Parameters_gap-extend>
+      <Parameters_filter>F</Parameters_filter>
+    </Parameters>
+  </BlastOutput_param>
+<BlastOutput_iterations>
+<Iteration>
+  <Iteration_iter-num>1</Iteration_iter-num>
+  <Iteration_query-ID>Query_1</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>406</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>19</Statistics_hsp-len>
+      <Statistics_eff-space>127710</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>2</Iteration_iter-num>
+  <Iteration_query-ID>Query_1</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>406</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>19</Statistics_hsp-len>
+      <Statistics_eff-space>127710</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>3</Iteration_iter-num>
+  <Iteration_query-ID>Query_1</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>406</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>19</Statistics_hsp-len>
+      <Statistics_eff-space>127710</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>4</Iteration_iter-num>
+  <Iteration_query-ID>Query_1</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>406</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>19</Statistics_hsp-len>
+      <Statistics_eff-space>127710</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>5</Iteration_iter-num>
+  <Iteration_query-ID>Query_1</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>406</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>19</Statistics_hsp-len>
+      <Statistics_eff-space>127710</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>6</Iteration_iter-num>
+  <Iteration_query-ID>Query_1</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9BS26|ERP44_HUMAN Endoplasmic reticulum resident protein 44 OS=Homo sapiens GN=ERP44 PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>406</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>19</Statistics_hsp-len>
+      <Statistics_eff-space>127710</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>7</Iteration_iter-num>
+  <Iteration_query-ID>Query_2</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens GN=BMP2K PE=1 SV=2</Iteration_query-def>
+  <Iteration_query-len>1161</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>23</Statistics_hsp-len>
+      <Statistics_eff-space>370988</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>8</Iteration_iter-num>
+  <Iteration_query-ID>Query_2</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens GN=BMP2K PE=1 SV=2</Iteration_query-def>
+  <Iteration_query-len>1161</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>23</Statistics_hsp-len>
+      <Statistics_eff-space>370988</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>9</Iteration_iter-num>
+  <Iteration_query-ID>Query_2</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens GN=BMP2K PE=1 SV=2</Iteration_query-def>
+  <Iteration_query-len>1161</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>23</Statistics_hsp-len>
+      <Statistics_eff-space>370988</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>10</Iteration_iter-num>
+  <Iteration_query-ID>Query_2</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens GN=BMP2K PE=1 SV=2</Iteration_query-def>
+  <Iteration_query-len>1161</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>23</Statistics_hsp-len>
+      <Statistics_eff-space>370988</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>11</Iteration_iter-num>
+  <Iteration_query-ID>Query_2</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens GN=BMP2K PE=1 SV=2</Iteration_query-def>
+  <Iteration_query-len>1161</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>23</Statistics_hsp-len>
+      <Statistics_eff-space>370988</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>12</Iteration_iter-num>
+  <Iteration_query-ID>Query_2</Iteration_query-ID>
+  <Iteration_query-def>sp|Q9NSY1|BMP2K_HUMAN BMP-2-inducible protein kinase OS=Homo sapiens GN=BMP2K PE=1 SV=2</Iteration_query-def>
+  <Iteration_query-len>1161</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>23</Statistics_hsp-len>
+      <Statistics_eff-space>370988</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>13</Iteration_iter-num>
+  <Iteration_query-ID>Query_3</Iteration_query-ID>
+  <Iteration_query-def>sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1 SV=4</Iteration_query-def>
+  <Iteration_query-len>1382</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>24</Statistics_hsp-len>
+      <Statistics_eff-space>441350</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>14</Iteration_iter-num>
+  <Iteration_query-ID>Query_3</Iteration_query-ID>
+  <Iteration_query-def>sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1 SV=4</Iteration_query-def>
+  <Iteration_query-len>1382</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>24</Statistics_hsp-len>
+      <Statistics_eff-space>441350</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>15</Iteration_iter-num>
+  <Iteration_query-ID>Query_3</Iteration_query-ID>
+  <Iteration_query-def>sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1 SV=4</Iteration_query-def>
+  <Iteration_query-len>1382</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>24</Statistics_hsp-len>
+      <Statistics_eff-space>441350</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>16</Iteration_iter-num>
+  <Iteration_query-ID>Query_3</Iteration_query-ID>
+  <Iteration_query-def>sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1 SV=4</Iteration_query-def>
+  <Iteration_query-len>1382</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>24</Statistics_hsp-len>
+      <Statistics_eff-space>441350</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>17</Iteration_iter-num>
+  <Iteration_query-ID>Query_3</Iteration_query-ID>
+  <Iteration_query-def>sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1 SV=4</Iteration_query-def>
+  <Iteration_query-len>1382</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>24</Statistics_hsp-len>
+      <Statistics_eff-space>441350</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>18</Iteration_iter-num>
+  <Iteration_query-ID>Query_3</Iteration_query-ID>
+  <Iteration_query-def>sp|P06213|INSR_HUMAN Insulin receptor OS=Homo sapiens GN=INSR PE=1 SV=4</Iteration_query-def>
+  <Iteration_query-len>1382</Iteration_query-len>
+<Iteration_hits>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>24</Statistics_hsp-len>
+      <Statistics_eff-space>441350</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+  <Iteration_message>No hits found</Iteration_message>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>19</Iteration_iter-num>
+  <Iteration_query-ID>Query_4</Iteration_query-ID>
+  <Iteration_query-def>sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>348</Iteration_query-len>
+<Iteration_hits>
+<Hit>
+  <Hit_num>1</Hit_num>
+  <Hit_id>Subject_1</Hit_id>
+  <Hit_def>gi|57163782|ref|NM_001009242.1| Felis catus rhodopsin (RHO), mRNA</Hit_def>
+  <Hit_accession>Subject_1</Hit_accession>
+  <Hit_len>1047</Hit_len>
+  <Hit_hsps>
+    <Hsp>
+      <Hsp_num>1</Hsp_num>
+      <Hsp_bit-score>732.393</Hsp_bit-score>
+      <Hsp_score>1689</Hsp_score>
+      <Hsp_evalue>0</Hsp_evalue>
+      <Hsp_query-from>1</Hsp_query-from>
+      <Hsp_query-to>348</Hsp_query-to>
+      <Hsp_hit-from>1</Hsp_hit-from>
+      <Hsp_hit-to>1044</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>1</Hsp_hit-frame>
+      <Hsp_identity>336</Hsp_identity>
+      <Hsp_positive>343</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>348</Hsp_align-len>
+      <Hsp_qseq>MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEASATVSKTETSQVAPA</Hsp_qseq>
+      <Hsp_hseq>MNGTEGPNFYVPFSNKTGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVFGGFTTTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLVGWSRYIPEGMQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIVIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTLPAFFAKSSSIYNPVIYIMMNKQFRNCMLTTLCCGKNPLGDDEASTTGSKTETSQVAPA</Hsp_hseq>
+      <Hsp_midline>MNGTEGPNFYVPFSN TGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMV GGFT+TLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPL GWSRYIPEG+QCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMI+IFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMT+PAFFAKS++IYNPVIYIMMNKQFRNCMLTT+CCGKNPLGDDEAS T SKTETSQVAPA</Hsp_midline>
+    </Hsp>
+  </Hit_hsps>
+</Hit>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>18</Statistics_hsp-len>
+      <Statistics_eff-space>109230</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>20</Iteration_iter-num>
+  <Iteration_query-ID>Query_4</Iteration_query-ID>
+  <Iteration_query-def>sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>348</Iteration_query-len>
+<Iteration_hits>
+<Hit>
+  <Hit_num>1</Hit_num>
+  <Hit_id>Subject_2</Hit_id>
+  <Hit_def>gi|2734705|gb|U59921.1|BBU59921 Bufo bufo rhodopsin mRNA, complete cds</Hit_def>
+  <Hit_accession>Subject_2</Hit_accession>
+  <Hit_len>1574</Hit_len>
+  <Hit_hsps>
+    <Hsp>
+      <Hsp_num>1</Hsp_num>
+      <Hsp_bit-score>646.12</Hsp_bit-score>
+      <Hsp_score>1489</Hsp_score>
+      <Hsp_evalue>0</Hsp_evalue>
+      <Hsp_query-from>1</Hsp_query-from>
+      <Hsp_query-to>341</Hsp_query-to>
+      <Hsp_hit-from>42</Hsp_hit-from>
+      <Hsp_hit-to>1067</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>3</Hsp_hit-frame>
+      <Hsp_identity>290</Hsp_identity>
+      <Hsp_positive>320</Hsp_positive>
+      <Hsp_gaps>1</Hsp_gaps>
+      <Hsp_align-len>342</Hsp_align-len>
+      <Hsp_qseq>MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEA-SATVSKTE</Hsp_qseq>
+      <Hsp_hseq>MNGTEGPNFYIPMSNKTGVVRSPFEYPQYYLAEPWQYSILCAYMFLLILLGFPINFMTLYVTIQHKKLRTPLNYILLNLAFANHFMVLCGFTVTMYSSMNGYFILGATGCYVEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFSENHAVMGVAFTWIMALSCAVPPLLGWSRYIPEGMQCSCGVDYYTLKPEVNNESFVIYMFVVHFTIPLIIIFFCYGRLVCTVKEAAAQQQESATTQKAEKEVTRMVIIMVVFFLICWVPYASVAFFIFSNQGSEFGPIFMTVPAFFAKSSSIYNPVIYIMLNKQFRNCMITTLCCGKNPFGEDDASSAATSKTE</Hsp_hseq>
+      <Hsp_midline>MNGTEGPNFY+P SN TGVVRSPFEYPQYYLAEPWQ+S+L AYMFLLI+LGFPINF+TLYVT+QHKKLRTPLNYILLNLA A+ FMVL GFT T+Y+S+ GYF+ G TGC +EGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRF ENHA+MGVAFTW+MAL+CA PPL GWSRYIPEG+QCSCG+DYYTLKPEVNNESFVIYMFVVHFTIP+IIIFFCYG+LV TVKEAAAQQQESATTQKAEKEVTRMVIIMV+ FLICWVPYASVAF+IF+ QGS FGPIFMT+PAFFAKS++IYNPVIYIM+NKQFRNCM+TT+CCGKNP G+D+A SA  SKTE</Hsp_midline>
+    </Hsp>
+  </Hit_hsps>
+</Hit>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>18</Statistics_hsp-len>
+      <Statistics_eff-space>109230</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>21</Iteration_iter-num>
+  <Iteration_query-ID>Query_4</Iteration_query-ID>
+  <Iteration_query-def>sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>348</Iteration_query-len>
+<Iteration_hits>
+<Hit>
+  <Hit_num>1</Hit_num>
+  <Hit_id>Subject_3</Hit_id>
+  <Hit_def>gi|283855845|gb|GQ290303.1| Cynopterus brachyotis voucher 20020434 rhodopsin (RHO) gene, exons 1 through 5 and partial cds</Hit_def>
+  <Hit_accession>Subject_3</Hit_accession>
+  <Hit_len>4301</Hit_len>
+  <Hit_hsps>
+    <Hsp>
+      <Hsp_num>1</Hsp_num>
+      <Hsp_bit-score>151.343</Hsp_bit-score>
+      <Hsp_score>342</Hsp_score>
+      <Hsp_evalue>1.39567e-72</Hsp_evalue>
+      <Hsp_query-from>239</Hsp_query-from>
+      <Hsp_query-to>312</Hsp_query-to>
+      <Hsp_hit-from>3147</Hsp_hit-from>
+      <Hsp_hit-to>3368</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>3</Hsp_hit-frame>
+      <Hsp_identity>69</Hsp_identity>
+      <Hsp_positive>73</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>74</Hsp_align-len>
+      <Hsp_qseq>ESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQ</Hsp_qseq>
+      <Hsp_hseq>ESATTQKAEKEVTRMVIIMVIAFLICWLPYAGVAFYIFTHQGSNFGPIFMTLPAFFAKSSSIYNPVIYIMMNKQ</Hsp_hseq>
+      <Hsp_midline>ESATTQKAEKEVTRMVIIMVIAFLICW+PYA VAFYIFTHQGSNFGPIFMT+PAFFAKS++IYNPVIYIMMNKQ</Hsp_midline>
+    </Hsp>
+    <Hsp>
+      <Hsp_num>2</Hsp_num>
+      <Hsp_bit-score>126.324</Hsp_bit-score>
+      <Hsp_score>284</Hsp_score>
+      <Hsp_evalue>1.39567e-72</Hsp_evalue>
+      <Hsp_query-from>177</Hsp_query-from>
+      <Hsp_query-to>235</Hsp_query-to>
+      <Hsp_hit-from>2855</Hsp_hit-from>
+      <Hsp_hit-to>3031</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>2</Hsp_hit-frame>
+      <Hsp_identity>54</Hsp_identity>
+      <Hsp_positive>57</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>59</Hsp_align-len>
+      <Hsp_qseq>RYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAA</Hsp_qseq>
+      <Hsp_hseq>RYIPEGMQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIVIFFCYGQLVFTVKEVRS</Hsp_hseq>
+      <Hsp_midline>RYIPEG+QCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMI+IFFCYGQLVFTVKE  +</Hsp_midline>
+    </Hsp>
+    <Hsp>
+      <Hsp_num>3</Hsp_num>
+      <Hsp_bit-score>229.42</Hsp_bit-score>
+      <Hsp_score>523</Hsp_score>
+      <Hsp_evalue>9.34154e-67</Hsp_evalue>
+      <Hsp_query-from>11</Hsp_query-from>
+      <Hsp_query-to>121</Hsp_query-to>
+      <Hsp_hit-from>1</Hsp_hit-from>
+      <Hsp_hit-to>333</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>1</Hsp_hit-frame>
+      <Hsp_identity>107</Hsp_identity>
+      <Hsp_positive>109</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>111</Hsp_align-len>
+      <Hsp_qseq>VPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGG</Hsp_qseq>
+      <Hsp_hseq>VPFSNKTGVVRSPFEHPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVFGGFTTTLYTSLHGYFVFGPTGCNLEGFFATLGG</Hsp_hseq>
+      <Hsp_midline>VPFSN TGVVRSPFE+PQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMV GGFT+TLYTSLHGYFVFGPTGCNLEGFFATLGG</Hsp_midline>
+    </Hsp>
+    <Hsp>
+      <Hsp_num>4</Hsp_num>
+      <Hsp_bit-score>122.873</Hsp_bit-score>
+      <Hsp_score>276</Hsp_score>
+      <Hsp_evalue>1.03783e-32</Hsp_evalue>
+      <Hsp_query-from>119</Hsp_query-from>
+      <Hsp_query-to>177</Hsp_query-to>
+      <Hsp_hit-from>1404</Hsp_hit-from>
+      <Hsp_hit-to>1580</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>3</Hsp_hit-frame>
+      <Hsp_identity>55</Hsp_identity>
+      <Hsp_positive>56</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>59</Hsp_align-len>
+      <Hsp_qseq>LGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSR</Hsp_qseq>
+      <Hsp_hseq>LAGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGLALTWVMALACAAPPLVGWSR</Hsp_hseq>
+      <Hsp_midline>L GEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMG+A TWVMALACAAPPL GWSR</Hsp_midline>
+    </Hsp>
+    <Hsp>
+      <Hsp_num>5</Hsp_num>
+      <Hsp_bit-score>57.7368</Hsp_bit-score>
+      <Hsp_score>125</Hsp_score>
+      <Hsp_evalue>1.50808e-12</Hsp_evalue>
+      <Hsp_query-from>312</Hsp_query-from>
+      <Hsp_query-to>337</Hsp_query-to>
+      <Hsp_hit-from>4222</Hsp_hit-from>
+      <Hsp_hit-to>4299</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>1</Hsp_hit-frame>
+      <Hsp_identity>23</Hsp_identity>
+      <Hsp_positive>24</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>26</Hsp_align-len>
+      <Hsp_qseq>QFRNCMLTTICCGKNPLGDDEASATV</Hsp_qseq>
+      <Hsp_hseq>QFRNCMLTTLCCGKNPLGDDEASTTA</Hsp_hseq>
+      <Hsp_midline>QFRNCMLTT+CCGKNPLGDDEAS T </Hsp_midline>
+    </Hsp>
+  </Hit_hsps>
+</Hit>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>18</Statistics_hsp-len>
+      <Statistics_eff-space>109230</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>22</Iteration_iter-num>
+  <Iteration_query-ID>Query_4</Iteration_query-ID>
+  <Iteration_query-def>sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>348</Iteration_query-len>
+<Iteration_hits>
+<Hit>
+  <Hit_num>1</Hit_num>
+  <Hit_id>Subject_4</Hit_id>
+  <Hit_def>gi|283855822|gb|GQ290312.1| Myotis ricketti voucher GQX10 rhodopsin (RHO) mRNA, partial cds</Hit_def>
+  <Hit_accession>Subject_4</Hit_accession>
+  <Hit_len>983</Hit_len>
+  <Hit_hsps>
+    <Hsp>
+      <Hsp_num>1</Hsp_num>
+      <Hsp_bit-score>658.198</Hsp_bit-score>
+      <Hsp_score>1517</Hsp_score>
+      <Hsp_evalue>0</Hsp_evalue>
+      <Hsp_query-from>11</Hsp_query-from>
+      <Hsp_query-to>336</Hsp_query-to>
+      <Hsp_hit-from>1</Hsp_hit-from>
+      <Hsp_hit-to>978</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>1</Hsp_hit-frame>
+      <Hsp_identity>310</Hsp_identity>
+      <Hsp_positive>322</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>326</Hsp_align-len>
+      <Hsp_qseq>VPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEASAT</Hsp_qseq>
+      <Hsp_hseq>VPFSNKTGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVANLFMVFGGFTTTLYTSMHGYFVFGATGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGLAFTWVMALACAAPPLAGWSRYIPEGMQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIVIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVVAFLICWLPYASVAFYIFTHQGSNFGPVFMTIPAFFAKSSSIYNPVIYIMMNKQFRNCMLTTLCCGKNPLGDDEASTT</Hsp_hseq>
+      <Hsp_midline>VPFSN TGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVA+LFMV GGFT+TLYTS+HGYFVFG TGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMG+AFTWVMALACAAPPLAGWSRYIPEG+QCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMI+IFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMV+AFLICW+PYASVAFYIFTHQGSNFGP+FMTIPAFFAKS++IYNPVIYIMMNKQFRNCMLTT+CCGKNPLGDDEAS T</Hsp_midline>
+    </Hsp>
+  </Hit_hsps>
+</Hit>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>18</Statistics_hsp-len>
+      <Statistics_eff-space>109230</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>23</Iteration_iter-num>
+  <Iteration_query-ID>Query_4</Iteration_query-ID>
+  <Iteration_query-def>sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>348</Iteration_query-len>
+<Iteration_hits>
+<Hit>
+  <Hit_num>1</Hit_num>
+  <Hit_id>Subject_5</Hit_id>
+  <Hit_def>gi|18148870|dbj|AB062417.1| Synthetic construct Bos taurus gene for rhodopsin, complete cds</Hit_def>
+  <Hit_accession>Subject_5</Hit_accession>
+  <Hit_len>1047</Hit_len>
+  <Hit_hsps>
+    <Hsp>
+      <Hsp_num>1</Hsp_num>
+      <Hsp_bit-score>711.256</Hsp_bit-score>
+      <Hsp_score>1640</Hsp_score>
+      <Hsp_evalue>0</Hsp_evalue>
+      <Hsp_query-from>1</Hsp_query-from>
+      <Hsp_query-to>348</Hsp_query-to>
+      <Hsp_hit-from>1</Hsp_hit-from>
+      <Hsp_hit-to>1044</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>1</Hsp_hit-frame>
+      <Hsp_identity>325</Hsp_identity>
+      <Hsp_positive>337</Hsp_positive>
+      <Hsp_gaps>0</Hsp_gaps>
+      <Hsp_align-len>348</Hsp_align-len>
+      <Hsp_qseq>MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEASATVSKTETSQVAPA</Hsp_qseq>
+      <Hsp_hseq>MNGTEGPNFYVPFSNKTGVVRSPFEAPQYYLAEPWQFSMLAAYMFLLIMLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVFGGFTTTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLVGWSRYIPEGMQCSCGIDYYTPHEETNNESFVIYMFVVHFIIPLIVIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWLPYAGVAFYIFTHQGSDFGPIFMTIPAFFAKTSAVYNPVIYIMMNKQFRNCMVTTLCCGKNPLGDDEASTTVSKTETSQVAPA</Hsp_hseq>
+      <Hsp_midline>MNGTEGPNFYVPFSN TGVVRSPFE PQYYLAEPWQFSMLAAYMFLLI+LGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMV GGFT+TLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPL GWSRYIPEG+QCSCGIDYYT   E NNESFVIYMFVVHF IP+I+IFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICW+PYA VAFYIFTHQGS+FGPIFMTIPAFFAK++A+YNPVIYIMMNKQFRNCM+TT+CCGKNPLGDDEAS TVSKTETSQVAPA</Hsp_midline>
+    </Hsp>
+  </Hit_hsps>
+</Hit>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>18</Statistics_hsp-len>
+      <Statistics_eff-space>109230</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+</Iteration>
+<Iteration>
+  <Iteration_iter-num>24</Iteration_iter-num>
+  <Iteration_query-ID>Query_4</Iteration_query-ID>
+  <Iteration_query-def>sp|P08100|OPSD_HUMAN Rhodopsin OS=Homo sapiens GN=RHO PE=1 SV=1</Iteration_query-def>
+  <Iteration_query-len>348</Iteration_query-len>
+<Iteration_hits>
+<Hit>
+  <Hit_num>1</Hit_num>
+  <Hit_id>Subject_6</Hit_id>
+  <Hit_def>gi|12583664|dbj|AB043817.1| Conger myriaster conf gene for fresh water form rod opsin, complete cds</Hit_def>
+  <Hit_accession>Subject_6</Hit_accession>
+  <Hit_len>1344</Hit_len>
+  <Hit_hsps>
+    <Hsp>
+      <Hsp_num>1</Hsp_num>
+      <Hsp_bit-score>626.708</Hsp_bit-score>
+      <Hsp_score>1444</Hsp_score>
+      <Hsp_evalue>0</Hsp_evalue>
+      <Hsp_query-from>1</Hsp_query-from>
+      <Hsp_query-to>341</Hsp_query-to>
+      <Hsp_hit-from>23</Hsp_hit-from>
+      <Hsp_hit-to>1048</Hsp_hit-to>
+      <Hsp_query-frame>0</Hsp_query-frame>
+      <Hsp_hit-frame>2</Hsp_hit-frame>
+      <Hsp_identity>281</Hsp_identity>
+      <Hsp_positive>311</Hsp_positive>
+      <Hsp_gaps>1</Hsp_gaps>
+      <Hsp_align-len>342</Hsp_align-len>
+      <Hsp_qseq>MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQFRNCMLTTICCGKNPL-GDDEASATVSKTE</Hsp_qseq>
+      <Hsp_hseq>MNGTEGPNFYIPMSNATGVVRSPFEYPQYYLAEPWAFSALSAYMFFLIIAGFPINFLTLYVTIEHKKLRTPLNYILLNLAVADLFMVFGGFTTTMYTSMHGYFVFGPTGCNIEGFFATLGGEIALWCLVVLAIERWMVVCKPVTNFRFGESHAIMGVMVTWTMALACALPPLFGWSRYIPEGLQCSCGIDYYTRAPGINNESFVIYMFTCHFSIPLAVISFCYGRLVCTVKEAAAQQQESETTQRAEREVTRMVVIMVISFLVCWVPYASVAWYIFTHQGSTFGPIFMTIPSFFAKSSALYNPMIYICMNKQFRHCMITTLCCGKNPFEEEDGASATSSKTE</Hsp_hseq>
+      <Hsp_midline>MNGTEGPNFY+P SNATGVVRSPFEYPQYYLAEPW FS L+AYMF LI+ GFPINFLTLYVT++HKKLRTPLNYILLNLAVADLFMV GGFT+T+YTS+HGYFVFGPTGCN+EGFFATLGGEIALW LVVLAIER++VVCKP++NFRFGE HAIMGV  TW MALACA PPL GWSRYIPEGLQCSCGIDYYT  P +NNESFVIYMF  HF+IP+ +I FCYG+LV TVKEAAAQQQES TTQ+AE+EVTRMV+IMVI+FL+CWVPYASVA YIFTHQGS FGPIFMTIP+FFAKS+A+YNP+IYI MNKQFR CM+TT+CCGKNP   +D ASAT SKTE</Hsp_midline>
+    </Hsp>
+  </Hit_hsps>
+</Hit>
+</Iteration_hits>
+  <Iteration_stat>
+    <Statistics>
+      <Statistics_db-num>0</Statistics_db-num>
+      <Statistics_db-len>0</Statistics_db-len>
+      <Statistics_hsp-len>18</Statistics_hsp-len>
+      <Statistics_eff-space>109230</Statistics_eff-space>
+      <Statistics_kappa>0.071</Statistics_kappa>
+      <Statistics_lambda>0.299</Statistics_lambda>
+      <Statistics_entropy>0.27</Statistics_entropy>
+    </Statistics>
+  </Iteration_stat>
+</Iteration>
+</BlastOutput_iterations>
+</BlastOutput>
+
diff --git a/lib/galaxy/datatypes/test/temp.txt b/lib/galaxy/datatypes/test/temp.txt
new file mode 100644
index 0000000..17d07eb
--- /dev/null
+++ b/lib/galaxy/datatypes/test/temp.txt
@@ -0,0 +1,2 @@
+1	2
+3	4
diff --git a/lib/galaxy/datatypes/test/test.CEL b/lib/galaxy/datatypes/test/test.CEL
new file mode 100644
index 0000000..b43e1e3
Binary files /dev/null and b/lib/galaxy/datatypes/test/test.CEL differ
diff --git a/lib/galaxy/datatypes/test/test.gff b/lib/galaxy/datatypes/test/test.gff
new file mode 100644
index 0000000..997a211
--- /dev/null
+++ b/lib/galaxy/datatypes/test/test.gff
@@ -0,0 +1,35 @@
+##gff-version 2
+##Date: Thu Dec 8 19:46:27 2005
+##gff.pl $Rev: 601 $
+##Input file: /depot/data2/galaxy/encode-data/datasets/msa.AR.20051208.bed
+
+chr7	bed2gff	AR	26731313	26731437	.	+	.	score
+chr7	bed2gff	AR	26731491	26731536	.	+	.	score
+chr7	bed2gff	AR	26731541	26731649	.	+	.	score
+chr7	bed2gff	AR	26731659	26731841	.	+	.	score
+chr7	bed2gff	AR	26732568	26732610	.	+	.	score
+chr7	bed2gff	AR	26734557	26734637	.	+	.	score
+chr7	bed2gff	AR	26734831	26734910	.	+	.	score
+chr7	bed2gff	AR	26736373	26736633	.	+	.	score
+chr7	bed2gff	AR	26736746	26736901	.	+	.	score
+chr7	bed2gff	AR	26737921	26738051	.	+	.	score
+chr7	bed2gff	AR	26738401	26739153	.	+	.	score
+chr7	bed2gff	AR	26739453	26739513	.	+	.	score
+chr7	bed2gff	AR	26740335	26740414	.	+	.	score
+chr7	bed2gff	AR	26740893	26740993	.	+	.	score
+chr7	bed2gff	AR	26742668	26742751	.	+	.	score
+chr7	bed2gff	AR	26749586	26749722	.	+	.	score
+chr7	bed2gff	AR	26751358	26751384	.	+	.	score
+chr7	bed2gff	AR	26751386	26751954	.	+	.	score
+chr7	bed2gff	AR	26751956	26752038	.	+	.	score
+chr7	bed2gff	AR	26752804	26753315	.	+	.	score
+chr7	bed2gff	AR	26753602	26754249	.	+	.	score
+chr7	bed2gff	AR	26754708	26754974	.	+	.	score
+chr7	bed2gff	AR	26754985	26755157	.	+	.	score
+chr7	bed2gff	AR	26755165	26755450	.	+	.	score
+chr7	bed2gff	AR	26755472	26756178	.	+	.	score
+chr7	bed2gff	AR	26758416	26758581	.	+	.	score
+chr7	bed2gff	AR	26759185	26759362	.	+	.	score
+chr7	bed2gff	AR	26765620	26765678	.	+	.	score
+chr7	bed2gff	AR	26765680	26766027	.	+	.	score
+chr7	bed2gff	AR	26766029	26766472	.	+	.	score
diff --git a/lib/galaxy/datatypes/test/test.gtf b/lib/galaxy/datatypes/test/test.gtf
new file mode 100644
index 0000000..6e20813
--- /dev/null
+++ b/lib/galaxy/datatypes/test/test.gtf
@@ -0,0 +1,500 @@
+chr13	Cufflinks	transcript	3405463	3405542	1000	.	.	gene_id "CUFF.50189"; transcript_id "CUFF.50189.1"; FPKM "6.3668918357"; frac "1.000000"; conf_lo "0.000000"; conf_hi "17.963819"; cov "0.406914";
+chr13	Cufflinks	exon	3405463	3405542	1000	.	.	gene_id "CUFF.50189"; transcript_id "CUFF.50189.1"; exon_number "1"; FPKM "6.3668918357"; frac "1.000000"; conf_lo "0.000000"; conf_hi "17.963819"; cov "0.406914";
+chr13	Cufflinks	transcript	3473337	3473372	1000	.	.	gene_id "CUFF.50191"; transcript_id "CUFF.50191.1"; FPKM "11.7350749444"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.205225"; cov "0.750000";
+chr13	Cufflinks	exon	3473337	3473372	1000	.	.	gene_id "CUFF.50191"; transcript_id "CUFF.50191.1"; exon_number "1"; FPKM "11.7350749444"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.205225"; cov "0.750000";
+chr13	Cufflinks	transcript	3490319	3490350	1000	.	.	gene_id "CUFF.50193"; transcript_id "CUFF.50193.1"; FPKM "39.6058779373"; frac "1.000000"; conf_lo "0.000000"; conf_hi "85.338807"; cov "2.531250";
+chr13	Cufflinks	exon	3490319	3490350	1000	.	.	gene_id "CUFF.50193"; transcript_id "CUFF.50193.1"; exon_number "1"; FPKM "39.6058779373"; frac "1.000000"; conf_lo "0.000000"; conf_hi "85.338807"; cov "2.531250";
+chr13	Cufflinks	transcript	3565855	3566203	1000	-	.	gene_id "CUFF.50195"; transcript_id "CUFF.50195.1"; FPKM "29.8710998584"; frac "1.000000"; conf_lo "7.290671"; conf_hi "52.451529"; cov "1.909091";
+chr13	Cufflinks	exon	3565855	3565913	1000	-	.	gene_id "CUFF.50195"; transcript_id "CUFF.50195.1"; exon_number "1"; FPKM "29.8710998584"; frac "1.000000"; conf_lo "7.290671"; conf_hi "52.451529"; cov "1.909091";
+chr13	Cufflinks	exon	3566164	3566203	1000	-	.	gene_id "CUFF.50195"; transcript_id "CUFF.50195.1"; exon_number "2"; FPKM "29.8710998584"; frac "1.000000"; conf_lo "7.290671"; conf_hi "52.451529"; cov "1.909091";
+chr13	Cufflinks	transcript	3566475	3566560	1000	.	.	gene_id "CUFF.50197"; transcript_id "CUFF.50197.1"; FPKM "14.7370708604"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.753975"; cov "0.941860";
+chr13	Cufflinks	exon	3566475	3566560	1000	.	.	gene_id "CUFF.50197"; transcript_id "CUFF.50197.1"; exon_number "1"; FPKM "14.7370708604"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.753975"; cov "0.941860";
+chr13	Cufflinks	transcript	3566664	3566942	1000	.	.	gene_id "CUFF.50199"; transcript_id "CUFF.50199.1"; FPKM "31.7874813134"; frac "1.000000"; conf_lo "17.911934"; conf_hi "45.663029"; cov "2.031569";
+chr13	Cufflinks	exon	3566664	3566942	1000	.	.	gene_id "CUFF.50199"; transcript_id "CUFF.50199.1"; exon_number "1"; FPKM "31.7874813134"; frac "1.000000"; conf_lo "17.911934"; conf_hi "45.663029"; cov "2.031569";
+chr13	Cufflinks	transcript	3568042	3568068	1000	.	.	gene_id "CUFF.50201"; transcript_id "CUFF.50201.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	3568042	3568068	1000	.	.	gene_id "CUFF.50201"; transcript_id "CUFF.50201.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	3569564	3569626	1000	.	.	gene_id "CUFF.50203"; transcript_id "CUFF.50203.1"; FPKM "13.4115142222"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.378260"; cov "0.857143";
+chr13	Cufflinks	exon	3569564	3569626	1000	.	.	gene_id "CUFF.50203"; transcript_id "CUFF.50203.1"; exon_number "1"; FPKM "13.4115142222"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.378260"; cov "0.857143";
+chr13	Cufflinks	transcript	3594171	3594199	1000	.	.	gene_id "CUFF.50205"; transcript_id "CUFF.50205.1"; FPKM "29.1353584826"; frac "1.000000"; conf_lo "0.000000"; conf_hi "70.338978"; cov "1.862069";
+chr13	Cufflinks	exon	3594171	3594199	1000	.	.	gene_id "CUFF.50205"; transcript_id "CUFF.50205.1"; exon_number "1"; FPKM "29.1353584826"; frac "1.000000"; conf_lo "0.000000"; conf_hi "70.338978"; cov "1.862069";
+chr13	Cufflinks	transcript	3606116	3613028	1000	-	.	gene_id "CUFF.50207"; transcript_id "CUFF.50207.1"; FPKM "19.6171377865"; frac "1.000000"; conf_lo "0.936995"; conf_hi "38.297281"; cov "1.253750";
+chr13	Cufflinks	exon	3606116	3606146	1000	-	.	gene_id "CUFF.50207"; transcript_id "CUFF.50207.1"; exon_number "1"; FPKM "19.6171377865"; frac "1.000000"; conf_lo "0.936995"; conf_hi "38.297281"; cov "1.253750";
+chr13	Cufflinks	exon	3612965	3613028	1000	-	.	gene_id "CUFF.50207"; transcript_id "CUFF.50207.1"; exon_number "2"; FPKM "19.6171377865"; frac "1.000000"; conf_lo "0.936995"; conf_hi "38.297281"; cov "1.253750";
+chr13	Cufflinks	transcript	3603507	3603533	1000	.	.	gene_id "CUFF.50209"; transcript_id "CUFF.50209.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	3603507	3603533	1000	.	.	gene_id "CUFF.50209"; transcript_id "CUFF.50209.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	3604709	3604735	1000	.	.	gene_id "CUFF.50211"; transcript_id "CUFF.50211.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	3604709	3604735	1000	.	.	gene_id "CUFF.50211"; transcript_id "CUFF.50211.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	3612524	3612550	1000	.	.	gene_id "CUFF.50213"; transcript_id "CUFF.50213.1"; FPKM "117.3321730764"; frac "1.000000"; conf_lo "31.638086"; conf_hi "203.026260"; cov "7.498813";
+chr13	Cufflinks	exon	3612524	3612550	1000	.	.	gene_id "CUFF.50213"; transcript_id "CUFF.50213.1"; exon_number "1"; FPKM "117.3321730764"; frac "1.000000"; conf_lo "31.638086"; conf_hi "203.026260"; cov "7.498813";
+chr13	Cufflinks	transcript	3639250	3639290	1000	.	.	gene_id "CUFF.50215"; transcript_id "CUFF.50215.1"; FPKM "30.9119047316"; frac "1.000000"; conf_lo "0.000000"; conf_hi "66.605898"; cov "1.975610";
+chr13	Cufflinks	exon	3639250	3639290	1000	.	.	gene_id "CUFF.50215"; transcript_id "CUFF.50215.1"; exon_number "1"; FPKM "30.9119047316"; frac "1.000000"; conf_lo "0.000000"; conf_hi "66.605898"; cov "1.975610";
+chr13	Cufflinks	transcript	3649635	3649777	1000	.	.	gene_id "CUFF.50217"; transcript_id "CUFF.50217.1"; FPKM "14.7714230069"; frac "1.000000"; conf_lo "1.559461"; conf_hi "27.983385"; cov "0.944056";
+chr13	Cufflinks	exon	3649635	3649777	1000	.	.	gene_id "CUFF.50217"; transcript_id "CUFF.50217.1"; exon_number "1"; FPKM "14.7714230069"; frac "1.000000"; conf_lo "1.559461"; conf_hi "27.983385"; cov "0.944056";
+chr13	Cufflinks	transcript	3649976	3650072	1000	.	.	gene_id "CUFF.50219"; transcript_id "CUFF.50219.1"; FPKM "26.1317132782"; frac "1.000000"; conf_lo "4.795259"; conf_hi "47.468168"; cov "1.670103";
+chr13	Cufflinks	exon	3649976	3650072	1000	.	.	gene_id "CUFF.50219"; transcript_id "CUFF.50219.1"; exon_number "1"; FPKM "26.1317132782"; frac "1.000000"; conf_lo "4.795259"; conf_hi "47.468168"; cov "1.670103";
+chr13	Cufflinks	transcript	3650165	3650345	1000	.	.	gene_id "CUFF.50221"; transcript_id "CUFF.50221.1"; FPKM "16.3383363867"; frac "1.000000"; conf_lo "3.987715"; conf_hi "28.688958"; cov "1.044199";
+chr13	Cufflinks	exon	3650165	3650345	1000	.	.	gene_id "CUFF.50221"; transcript_id "CUFF.50221.1"; exon_number "1"; FPKM "16.3383363867"; frac "1.000000"; conf_lo "3.987715"; conf_hi "28.688958"; cov "1.044199";
+chr13	Cufflinks	transcript	3650498	3651017	1000	.	.	gene_id "CUFF.50223"; transcript_id "CUFF.50223.1"; FPKM "38.9965567383"; frac "1.000000"; conf_lo "27.739220"; conf_hi "50.253893"; cov "2.492308";
+chr13	Cufflinks	exon	3650498	3651017	1000	.	.	gene_id "CUFF.50223"; transcript_id "CUFF.50223.1"; exon_number "1"; FPKM "38.9965567383"; frac "1.000000"; conf_lo "27.739220"; conf_hi "50.253893"; cov "2.492308";
+chr13	Cufflinks	transcript	3652248	3652287	1000	.	.	gene_id "CUFF.50225"; transcript_id "CUFF.50225.1"; FPKM "21.1231348999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.995759"; cov "1.350000";
+chr13	Cufflinks	exon	3652248	3652287	1000	.	.	gene_id "CUFF.50225"; transcript_id "CUFF.50225.1"; exon_number "1"; FPKM "21.1231348999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.995759"; cov "1.350000";
+chr13	Cufflinks	transcript	3652708	3652757	1000	.	.	gene_id "CUFF.50227"; transcript_id "CUFF.50227.1"; FPKM "16.8985079199"; frac "1.000000"; conf_lo "0.000000"; conf_hi "40.796607"; cov "1.080000";
+chr13	Cufflinks	exon	3652708	3652757	1000	.	.	gene_id "CUFF.50227"; transcript_id "CUFF.50227.1"; exon_number "1"; FPKM "16.8985079199"; frac "1.000000"; conf_lo "0.000000"; conf_hi "40.796607"; cov "1.080000";
+chr13	Cufflinks	transcript	3652858	3652892	1000	.	.	gene_id "CUFF.50229"; transcript_id "CUFF.50229.1"; FPKM "24.1407255999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "58.280867"; cov "1.542857";
+chr13	Cufflinks	exon	3652858	3652892	1000	.	.	gene_id "CUFF.50229"; transcript_id "CUFF.50229.1"; exon_number "1"; FPKM "24.1407255999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "58.280867"; cov "1.542857";
+chr13	Cufflinks	transcript	3803155	3803189	1000	.	.	gene_id "CUFF.50231"; transcript_id "CUFF.50231.1"; FPKM "193.0684834367"; frac "1.000000"; conf_lo "96.519912"; conf_hi "289.617054"; cov "12.339194";
+chr13	Cufflinks	exon	3803155	3803189	1000	.	.	gene_id "CUFF.50231"; transcript_id "CUFF.50231.1"; exon_number "1"; FPKM "193.0684834367"; frac "1.000000"; conf_lo "96.519912"; conf_hi "289.617054"; cov "12.339194";
+chr13	Cufflinks	transcript	3881504	3881530	1000	.	.	gene_id "CUFF.50233"; transcript_id "CUFF.50233.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	3881504	3881530	1000	.	.	gene_id "CUFF.50233"; transcript_id "CUFF.50233.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	3881847	3881940	1000	.	.	gene_id "CUFF.50235"; transcript_id "CUFF.50235.1"; FPKM "11.2303742880"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.439173"; cov "0.717744";
+chr13	Cufflinks	exon	3881847	3881940	1000	.	.	gene_id "CUFF.50235"; transcript_id "CUFF.50235.1"; exon_number "1"; FPKM "11.2303742880"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.439173"; cov "0.717744";
+chr13	Cufflinks	transcript	3882719	3882811	1000	.	.	gene_id "CUFF.50237"; transcript_id "CUFF.50237.1"; FPKM "9.0852193118"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.933660"; cov "0.580645";
+chr13	Cufflinks	exon	3882719	3882811	1000	.	.	gene_id "CUFF.50237"; transcript_id "CUFF.50237.1"; exon_number "1"; FPKM "9.0852193118"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.933660"; cov "0.580645";
+chr13	Cufflinks	transcript	3940646	3940672	1000	.	.	gene_id "CUFF.50239"; transcript_id "CUFF.50239.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	3940646	3940672	1000	.	.	gene_id "CUFF.50239"; transcript_id "CUFF.50239.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	4135893	4135996	1000	.	.	gene_id "CUFF.50241"; transcript_id "CUFF.50241.1"; FPKM "8.1242826538"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.613753"; cov "0.519231";
+chr13	Cufflinks	exon	4135893	4135996	1000	.	.	gene_id "CUFF.50241"; transcript_id "CUFF.50241.1"; exon_number "1"; FPKM "8.1242826538"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.613753"; cov "0.519231";
+chr13	Cufflinks	transcript	4246054	4246080	1000	.	.	gene_id "CUFF.50243"; transcript_id "CUFF.50243.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	4246054	4246080	1000	.	.	gene_id "CUFF.50243"; transcript_id "CUFF.50243.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	4246081	4246107	1000	.	.	gene_id "CUFF.50245"; transcript_id "CUFF.50245.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	4246081	4246107	1000	.	.	gene_id "CUFF.50245"; transcript_id "CUFF.50245.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	4247347	4247373	1000	.	.	gene_id "CUFF.50247"; transcript_id "CUFF.50247.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	4247347	4247373	1000	.	.	gene_id "CUFF.50247"; transcript_id "CUFF.50247.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	4247393	4247419	1000	.	.	gene_id "CUFF.50249"; transcript_id "CUFF.50249.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	4247393	4247419	1000	.	.	gene_id "CUFF.50249"; transcript_id "CUFF.50249.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	4253585	4253611	1000	.	.	gene_id "CUFF.50251"; transcript_id "CUFF.50251.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	4253585	4253611	1000	.	.	gene_id "CUFF.50251"; transcript_id "CUFF.50251.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	4356816	4356842	1000	.	.	gene_id "CUFF.50253"; transcript_id "CUFF.50253.1"; FPKM "31.2563804501"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.485841"; cov "1.997626";
+chr13	Cufflinks	exon	4356816	4356842	1000	.	.	gene_id "CUFF.50253"; transcript_id "CUFF.50253.1"; exon_number "1"; FPKM "31.2563804501"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.485841"; cov "1.997626";
+chr13	Cufflinks	transcript	4591975	4592074	1000	.	.	gene_id "CUFF.50255"; transcript_id "CUFF.50255.1"; FPKM "16.8985079199"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.797016"; cov "1.080000";
+chr13	Cufflinks	exon	4591975	4592074	1000	.	.	gene_id "CUFF.50255"; transcript_id "CUFF.50255.1"; exon_number "1"; FPKM "16.8985079199"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.797016"; cov "1.080000";
+chr13	Cufflinks	transcript	4592148	4592531	1000	.	.	gene_id "CUFF.50257"; transcript_id "CUFF.50257.1"; FPKM "22.0032655207"; frac "1.000000"; conf_lo "12.163106"; conf_hi "31.843425"; cov "1.406250";
+chr13	Cufflinks	exon	4592148	4592531	1000	.	.	gene_id "CUFF.50257"; transcript_id "CUFF.50257.1"; exon_number "1"; FPKM "22.0032655207"; frac "1.000000"; conf_lo "12.163106"; conf_hi "31.843425"; cov "1.406250";
+chr13	Cufflinks	transcript	4592862	4592890	1000	.	.	gene_id "CUFF.50259"; transcript_id "CUFF.50259.1"; FPKM "58.2707169652"; frac "1.000000"; conf_lo "0.000000"; conf_hi "116.541434"; cov "3.724138";
+chr13	Cufflinks	exon	4592862	4592890	1000	.	.	gene_id "CUFF.50259"; transcript_id "CUFF.50259.1"; exon_number "1"; FPKM "58.2707169652"; frac "1.000000"; conf_lo "0.000000"; conf_hi "116.541434"; cov "3.724138";
+chr13	Cufflinks	transcript	4594319	4594938	1000	-	.	gene_id "CUFF.50261"; transcript_id "CUFF.50261.1"; FPKM "29.3887094260"; frac "1.000000"; conf_lo "8.607754"; conf_hi "50.169665"; cov "1.878261";
+chr13	Cufflinks	exon	4594319	4594400	1000	-	.	gene_id "CUFF.50261"; transcript_id "CUFF.50261.1"; exon_number "1"; FPKM "29.3887094260"; frac "1.000000"; conf_lo "8.607754"; conf_hi "50.169665"; cov "1.878261";
+chr13	Cufflinks	exon	4594906	4594938	1000	-	.	gene_id "CUFF.50261"; transcript_id "CUFF.50261.1"; exon_number "2"; FPKM "29.3887094260"; frac "1.000000"; conf_lo "8.607754"; conf_hi "50.169665"; cov "1.878261";
+chr13	Cufflinks	transcript	4596799	4598059	1000	-	.	gene_id "CUFF.50263"; transcript_id "CUFF.50263.1"; FPKM "22.8358215134"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.671643"; cov "1.459459";
+chr13	Cufflinks	exon	4596799	4596828	1000	-	.	gene_id "CUFF.50263"; transcript_id "CUFF.50263.1"; exon_number "1"; FPKM "22.8358215134"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.671643"; cov "1.459459";
+chr13	Cufflinks	exon	4598016	4598059	1000	-	.	gene_id "CUFF.50263"; transcript_id "CUFF.50263.1"; exon_number "2"; FPKM "22.8358215134"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.671643"; cov "1.459459";
+chr13	Cufflinks	transcript	4601790	4601816	1000	.	.	gene_id "CUFF.50265"; transcript_id "CUFF.50265.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	4601790	4601816	1000	.	.	gene_id "CUFF.50265"; transcript_id "CUFF.50265.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	4601884	4601952	1000	.	.	gene_id "CUFF.50267"; transcript_id "CUFF.50267.1"; FPKM "12.2452955941"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.562759"; cov "0.782609";
+chr13	Cufflinks	exon	4601884	4601952	1000	.	.	gene_id "CUFF.50267"; transcript_id "CUFF.50267.1"; exon_number "1"; FPKM "12.2452955941"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.562759"; cov "0.782609";
+chr13	Cufflinks	transcript	3541632	3541797	1000	.	.	gene_id "CUFF.50269"; transcript_id "CUFF.50269.1"; FPKM "10.1798240481"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.359648"; cov "0.650602";
+chr13	Cufflinks	exon	3541632	3541797	1000	.	.	gene_id "CUFF.50269"; transcript_id "CUFF.50269.1"; exon_number "1"; FPKM "10.1798240481"; frac "1.000000"; conf_lo "0.000000"; conf_hi "20.359648"; cov "0.650602";
+chr13	Cufflinks	transcript	3541917	3542016	1000	.	.	gene_id "CUFF.50271"; transcript_id "CUFF.50271.1"; FPKM "12.6738809399"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.308418"; cov "0.810000";
+chr13	Cufflinks	exon	3541917	3542016	1000	.	.	gene_id "CUFF.50271"; transcript_id "CUFF.50271.1"; exon_number "1"; FPKM "12.6738809399"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.308418"; cov "0.810000";
+chr13	Cufflinks	transcript	3542096	3542122	1000	.	.	gene_id "CUFF.50273"; transcript_id "CUFF.50273.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	3542096	3542122	1000	.	.	gene_id "CUFF.50273"; transcript_id "CUFF.50273.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	3548183	3548209	1000	.	.	gene_id "CUFF.50275"; transcript_id "CUFF.50275.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	3548183	3548209	1000	.	.	gene_id "CUFF.50275"; transcript_id "CUFF.50275.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	3559238	3559264	1000	.	.	gene_id "CUFF.50277"; transcript_id "CUFF.50277.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	3559238	3559264	1000	.	.	gene_id "CUFF.50277"; transcript_id "CUFF.50277.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	3559265	3559291	1000	.	.	gene_id "CUFF.50279"; transcript_id "CUFF.50279.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	3559265	3559291	1000	.	.	gene_id "CUFF.50279"; transcript_id "CUFF.50279.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	3561489	3561515	1000	.	.	gene_id "CUFF.50281"; transcript_id "CUFF.50281.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	3561489	3561515	1000	.	.	gene_id "CUFF.50281"; transcript_id "CUFF.50281.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	3561516	3561616	1000	.	.	gene_id "CUFF.50283"; transcript_id "CUFF.50283.1"; FPKM "12.5483969702"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.038038"; cov "0.801980";
+chr13	Cufflinks	exon	3561516	3561616	1000	.	.	gene_id "CUFF.50283"; transcript_id "CUFF.50283.1"; exon_number "1"; FPKM "12.5483969702"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.038038"; cov "0.801980";
+chr13	Cufflinks	transcript	3563788	3563913	1000	.	.	gene_id "CUFF.50285"; transcript_id "CUFF.50285.1"; FPKM "16.7564314774"; frac "1.000000"; conf_lo "1.765464"; conf_hi "31.747399"; cov "1.070920";
+chr13	Cufflinks	exon	3563788	3563913	1000	.	.	gene_id "CUFF.50285"; transcript_id "CUFF.50285.1"; exon_number "1"; FPKM "16.7564314774"; frac "1.000000"; conf_lo "1.765464"; conf_hi "31.747399"; cov "1.070920";
+chr13	Cufflinks	transcript	3564114	3564162	1000	.	.	gene_id "CUFF.50287"; transcript_id "CUFF.50287.1"; FPKM "68.9735017140"; frac "1.000000"; conf_lo "20.201871"; conf_hi "117.745132"; cov "4.408163";
+chr13	Cufflinks	exon	3564114	3564162	1000	.	.	gene_id "CUFF.50287"; transcript_id "CUFF.50287.1"; exon_number "1"; FPKM "68.9735017140"; frac "1.000000"; conf_lo "20.201871"; conf_hi "117.745132"; cov "4.408163";
+chr13	Cufflinks	transcript	5861035	5872268	1000	-	.	gene_id "CUFF.50289"; transcript_id "CUFF.50289.1"; FPKM "7.5439767500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "18.212771"; cov "0.482143";
+chr13	Cufflinks	exon	5861035	5861117	1000	-	.	gene_id "CUFF.50289"; transcript_id "CUFF.50289.1"; exon_number "1"; FPKM "7.5439767500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "18.212771"; cov "0.482143";
+chr13	Cufflinks	exon	5872240	5872268	1000	-	.	gene_id "CUFF.50289"; transcript_id "CUFF.50289.1"; exon_number "2"; FPKM "7.5439767500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "18.212771"; cov "0.482143";
+chr13	Cufflinks	transcript	5864061	5864135	1000	.	.	gene_id "CUFF.50291"; transcript_id "CUFF.50291.1"; FPKM "16.8985079199"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.411224"; cov "1.080000";
+chr13	Cufflinks	exon	5864061	5864135	1000	.	.	gene_id "CUFF.50291"; transcript_id "CUFF.50291.1"; exon_number "1"; FPKM "16.8985079199"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.411224"; cov "1.080000";
+chr13	Cufflinks	transcript	5864192	5864585	1000	.	.	gene_id "CUFF.50293"; transcript_id "CUFF.50293.1"; FPKM "18.2280859542"; frac "1.000000"; conf_lo "9.386166"; conf_hi "27.070006"; cov "1.164975";
+chr13	Cufflinks	exon	5864192	5864585	1000	.	.	gene_id "CUFF.50293"; transcript_id "CUFF.50293.1"; exon_number "1"; FPKM "18.2280859542"; frac "1.000000"; conf_lo "9.386166"; conf_hi "27.070006"; cov "1.164975";
+chr13	Cufflinks	transcript	5865070	5865096	1000	.	.	gene_id "CUFF.50295"; transcript_id "CUFF.50295.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	5865070	5865096	1000	.	.	gene_id "CUFF.50295"; transcript_id "CUFF.50295.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	5865442	5866941	1000	+	.	gene_id "CUFF.50297"; transcript_id "CUFF.50297.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr13	Cufflinks	exon	5865442	5865510	1000	+	.	gene_id "CUFF.50297"; transcript_id "CUFF.50297.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr13	Cufflinks	exon	5866915	5866941	1000	+	.	gene_id "CUFF.50297"; transcript_id "CUFF.50297.1"; exon_number "2"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.446269"; cov "0.843750";
+chr13	Cufflinks	transcript	5866598	5866661	1000	.	.	gene_id "CUFF.50299"; transcript_id "CUFF.50299.1"; FPKM "92.4137151871"; frac "1.000000"; conf_lo "43.016507"; conf_hi "141.810924"; cov "5.906250";
+chr13	Cufflinks	exon	5866598	5866661	1000	.	.	gene_id "CUFF.50299"; transcript_id "CUFF.50299.1"; exon_number "1"; FPKM "92.4137151871"; frac "1.000000"; conf_lo "43.016507"; conf_hi "141.810924"; cov "5.906250";
+chr13	Cufflinks	transcript	5866756	5866871	1000	.	.	gene_id "CUFF.50301"; transcript_id "CUFF.50301.1"; FPKM "83.7641556375"; frac "1.000000"; conf_lo "48.832088"; conf_hi "118.696223"; cov "5.353448";
+chr13	Cufflinks	exon	5866756	5866871	1000	.	.	gene_id "CUFF.50301"; transcript_id "CUFF.50301.1"; exon_number "1"; FPKM "83.7641556375"; frac "1.000000"; conf_lo "48.832088"; conf_hi "118.696223"; cov "5.353448";
+chr13	Cufflinks	transcript	5866964	5867014	1000	.	.	gene_id "CUFF.50303"; transcript_id "CUFF.50303.1"; FPKM "124.2537347053"; frac "1.000000"; conf_lo "60.089382"; conf_hi "188.418087"; cov "7.941176";
+chr13	Cufflinks	exon	5866964	5867014	1000	.	.	gene_id "CUFF.50303"; transcript_id "CUFF.50303.1"; exon_number "1"; FPKM "124.2537347053"; frac "1.000000"; conf_lo "60.089382"; conf_hi "188.418087"; cov "7.941176";
+chr13	Cufflinks	transcript	5867386	5867412	1000	.	.	gene_id "CUFF.50305"; transcript_id "CUFF.50305.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	5867386	5867412	1000	.	.	gene_id "CUFF.50305"; transcript_id "CUFF.50305.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	5867480	5867506	1000	.	.	gene_id "CUFF.50307"; transcript_id "CUFF.50307.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	5867480	5867506	1000	.	.	gene_id "CUFF.50307"; transcript_id "CUFF.50307.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	5867688	5867737	1000	.	.	gene_id "CUFF.50309"; transcript_id "CUFF.50309.1"; FPKM "25.3477618799"; frac "1.000000"; conf_lo "0.000000"; conf_hi "54.616836"; cov "1.620000";
+chr13	Cufflinks	exon	5867688	5867737	1000	.	.	gene_id "CUFF.50309"; transcript_id "CUFF.50309.1"; exon_number "1"; FPKM "25.3477618799"; frac "1.000000"; conf_lo "0.000000"; conf_hi "54.616836"; cov "1.620000";
+chr13	Cufflinks	transcript	5867820	5868008	1000	.	.	gene_id "CUFF.50311"; transcript_id "CUFF.50311.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "3.818923"; conf_hi "27.474610"; cov "1.000000";
+chr13	Cufflinks	exon	5867820	5868008	1000	.	.	gene_id "CUFF.50311"; transcript_id "CUFF.50311.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "3.818923"; conf_hi "27.474610"; cov "1.000000";
+chr13	Cufflinks	transcript	5868254	5868314	1000	.	.	gene_id "CUFF.50313"; transcript_id "CUFF.50313.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr13	Cufflinks	exon	5868254	5868314	1000	.	.	gene_id "CUFF.50313"; transcript_id "CUFF.50313.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr13	Cufflinks	transcript	5869125	5869300	1000	.	.	gene_id "CUFF.50315"; transcript_id "CUFF.50315.1"; FPKM "88.8131808291"; frac "1.000000"; conf_lo "59.611587"; conf_hi "118.014775"; cov "5.676136";
+chr13	Cufflinks	exon	5869125	5869300	1000	.	.	gene_id "CUFF.50315"; transcript_id "CUFF.50315.1"; exon_number "1"; FPKM "88.8131808291"; frac "1.000000"; conf_lo "59.611587"; conf_hi "118.014775"; cov "5.676136";
+chr13	Cufflinks	transcript	5869455	5869484	1000	.	.	gene_id "CUFF.50317"; transcript_id "CUFF.50317.1"; FPKM "133.7631356353"; frac "1.000000"; conf_lo "46.960728"; conf_hi "220.565544"; cov "8.548931";
+chr13	Cufflinks	exon	5869455	5869484	1000	.	.	gene_id "CUFF.50317"; transcript_id "CUFF.50317.1"; exon_number "1"; FPKM "133.7631356353"; frac "1.000000"; conf_lo "46.960728"; conf_hi "220.565544"; cov "8.548931";
+chr13	Cufflinks	transcript	5869555	5869581	1000	.	.	gene_id "CUFF.50319"; transcript_id "CUFF.50319.1"; FPKM "125.1741327402"; frac "1.000000"; conf_lo "36.662655"; conf_hi "213.685611"; cov "8.000000";
+chr13	Cufflinks	exon	5869555	5869581	1000	.	.	gene_id "CUFF.50319"; transcript_id "CUFF.50319.1"; exon_number "1"; FPKM "125.1741327402"; frac "1.000000"; conf_lo "36.662655"; conf_hi "213.685611"; cov "8.000000";
+chr13	Cufflinks	transcript	6205097	6205155	1000	.	.	gene_id "CUFF.50321"; transcript_id "CUFF.50321.1"; FPKM "14.3207694237"; frac "1.000000"; conf_lo "0.000000"; conf_hi "34.573396"; cov "0.915254";
+chr13	Cufflinks	exon	6205097	6205155	1000	.	.	gene_id "CUFF.50321"; transcript_id "CUFF.50321.1"; exon_number "1"; FPKM "14.3207694237"; frac "1.000000"; conf_lo "0.000000"; conf_hi "34.573396"; cov "0.915254";
+chr13	Cufflinks	transcript	6227260	6227293	1000	.	.	gene_id "CUFF.50323"; transcript_id "CUFF.50323.1"; FPKM "18.6233083846"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.047086"; cov "1.190234";
+chr13	Cufflinks	exon	6227260	6227293	1000	.	.	gene_id "CUFF.50323"; transcript_id "CUFF.50323.1"; exon_number "1"; FPKM "18.6233083846"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.047086"; cov "1.190234";
+chr13	Cufflinks	transcript	6553021	6553051	1000	.	.	gene_id "CUFF.50325"; transcript_id "CUFF.50325.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+chr13	Cufflinks	exon	6553021	6553051	1000	.	.	gene_id "CUFF.50325"; transcript_id "CUFF.50325.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+chr13	Cufflinks	transcript	6576412	6576471	1000	.	.	gene_id "CUFF.50327"; transcript_id "CUFF.50327.1"; FPKM "14.0820899333"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.997173"; cov "0.900000";
+chr13	Cufflinks	exon	6576412	6576471	1000	.	.	gene_id "CUFF.50327"; transcript_id "CUFF.50327.1"; exon_number "1"; FPKM "14.0820899333"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.997173"; cov "0.900000";
+chr13	Cufflinks	transcript	6576625	6576734	1000	.	.	gene_id "CUFF.50329"; transcript_id "CUFF.50329.1"; FPKM "26.8839898726"; frac "1.000000"; conf_lo "6.561604"; conf_hi "47.206376"; cov "1.718182";
+chr13	Cufflinks	exon	6576625	6576734	1000	.	.	gene_id "CUFF.50329"; transcript_id "CUFF.50329.1"; exon_number "1"; FPKM "26.8839898726"; frac "1.000000"; conf_lo "6.561604"; conf_hi "47.206376"; cov "1.718182";
+chr13	Cufflinks	transcript	6577727	6577820	1000	.	.	gene_id "CUFF.50331"; transcript_id "CUFF.50331.1"; FPKM "31.4599881488"; frac "1.000000"; conf_lo "7.678472"; conf_hi "55.241504"; cov "2.010638";
+chr13	Cufflinks	exon	6577727	6577820	1000	.	.	gene_id "CUFF.50331"; transcript_id "CUFF.50331.1"; exon_number "1"; FPKM "31.4599881488"; frac "1.000000"; conf_lo "7.678472"; conf_hi "55.241504"; cov "2.010638";
+chr13	Cufflinks	transcript	6579706	6579858	1000	.	.	gene_id "CUFF.50333"; transcript_id "CUFF.50333.1"; FPKM "11.0447764182"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.089553"; cov "0.705882";
+chr13	Cufflinks	exon	6579706	6579858	1000	.	.	gene_id "CUFF.50333"; transcript_id "CUFF.50333.1"; exon_number "1"; FPKM "11.0447764182"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.089553"; cov "0.705882";
+chr13	Cufflinks	transcript	6580126	6580152	1000	.	.	gene_id "CUFF.50335"; transcript_id "CUFF.50335.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	6580126	6580152	1000	.	.	gene_id "CUFF.50335"; transcript_id "CUFF.50335.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	6580257	6580295	1000	.	.	gene_id "CUFF.50337"; transcript_id "CUFF.50337.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr13	Cufflinks	exon	6580257	6580295	1000	.	.	gene_id "CUFF.50337"; transcript_id "CUFF.50337.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr13	Cufflinks	transcript	6583845	6585843	1000	-	.	gene_id "CUFF.50339"; transcript_id "CUFF.50339.1"; FPKM "163.2242242265"; frac "1.000000"; conf_lo "127.815919"; conf_hi "198.632530"; cov "10.431818";
+chr13	Cufflinks	exon	6583845	6583946	1000	-	.	gene_id "CUFF.50339"; transcript_id "CUFF.50339.1"; exon_number "1"; FPKM "163.2242242265"; frac "1.000000"; conf_lo "127.815919"; conf_hi "198.632530"; cov "10.431818";
+chr13	Cufflinks	exon	6585726	6585843	1000	-	.	gene_id "CUFF.50339"; transcript_id "CUFF.50339.1"; exon_number "2"; FPKM "163.2242242265"; frac "1.000000"; conf_lo "127.815919"; conf_hi "198.632530"; cov "10.431818";
+chr13	Cufflinks	transcript	6586295	6587966	1000	-	.	gene_id "CUFF.50341"; transcript_id "CUFF.50341.1"; FPKM "82.5011329424"; frac "1.000000"; conf_lo "60.835274"; conf_hi "104.166992"; cov "5.272727";
+chr13	Cufflinks	exon	6586295	6586359	1000	-	.	gene_id "CUFF.50341"; transcript_id "CUFF.50341.1"; exon_number "1"; FPKM "82.5011329424"; frac "1.000000"; conf_lo "60.835274"; conf_hi "104.166992"; cov "5.272727";
+chr13	Cufflinks	exon	6587735	6587966	1000	-	.	gene_id "CUFF.50341"; transcript_id "CUFF.50341.1"; exon_number "2"; FPKM "82.5011329424"; frac "1.000000"; conf_lo "60.835274"; conf_hi "104.166992"; cov "5.272727";
+chr13	Cufflinks	transcript	6588113	6588703	1000	.	.	gene_id "CUFF.50343"; transcript_id "CUFF.50343.1"; FPKM "42.8896140100"; frac "1.000000"; conf_lo "31.815563"; conf_hi "53.963665"; cov "2.741117";
+chr13	Cufflinks	exon	6588113	6588703	1000	.	.	gene_id "CUFF.50343"; transcript_id "CUFF.50343.1"; exon_number "1"; FPKM "42.8896140100"; frac "1.000000"; conf_lo "31.815563"; conf_hi "53.963665"; cov "2.741117";
+chr13	Cufflinks	transcript	6588763	6588911	1000	.	.	gene_id "CUFF.50345"; transcript_id "CUFF.50345.1"; FPKM "31.1885213287"; frac "1.000000"; conf_lo "12.381135"; conf_hi "49.995907"; cov "1.993289";
+chr13	Cufflinks	exon	6588763	6588911	1000	.	.	gene_id "CUFF.50345"; transcript_id "CUFF.50345.1"; exon_number "1"; FPKM "31.1885213287"; frac "1.000000"; conf_lo "12.381135"; conf_hi "49.995907"; cov "1.993289";
+chr13	Cufflinks	transcript	6588964	6589091	1000	.	.	gene_id "CUFF.50347"; transcript_id "CUFF.50347.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.403919"; cov "0.843750";
+chr13	Cufflinks	exon	6588964	6589091	1000	.	.	gene_id "CUFF.50347"; transcript_id "CUFF.50347.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.403919"; cov "0.843750";
+chr13	Cufflinks	transcript	6589153	6589383	1000	.	.	gene_id "CUFF.50349"; transcript_id "CUFF.50349.1"; FPKM "12.8018999393"; frac "1.000000"; conf_lo "3.124573"; conf_hi "22.479227"; cov "0.818182";
+chr13	Cufflinks	exon	6589153	6589383	1000	.	.	gene_id "CUFF.50349"; transcript_id "CUFF.50349.1"; exon_number "1"; FPKM "12.8018999393"; frac "1.000000"; conf_lo "3.124573"; conf_hi "22.479227"; cov "0.818182";
+chr13	Cufflinks	transcript	6589994	6590086	1000	.	.	gene_id "CUFF.50351"; transcript_id "CUFF.50351.1"; FPKM "9.0852193118"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.933660"; cov "0.580645";
+chr13	Cufflinks	exon	6589994	6590086	1000	.	.	gene_id "CUFF.50351"; transcript_id "CUFF.50351.1"; exon_number "1"; FPKM "9.0852193118"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.933660"; cov "0.580645";
+chr13	Cufflinks	transcript	6590329	6590359	1000	.	.	gene_id "CUFF.50353"; transcript_id "CUFF.50353.1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+chr13	Cufflinks	exon	6590329	6590359	1000	.	.	gene_id "CUFF.50353"; transcript_id "CUFF.50353.1"; exon_number "1"; FPKM "27.2556579354"; frac "1.000000"; conf_lo "0.000000"; conf_hi "65.800979"; cov "1.741935";
+chr13	Cufflinks	transcript	6590592	6590645	1000	.	.	gene_id "CUFF.50355"; transcript_id "CUFF.50355.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.774636"; cov "1.000000";
+chr13	Cufflinks	exon	6590592	6590645	1000	.	.	gene_id "CUFF.50355"; transcript_id "CUFF.50355.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.774636"; cov "1.000000";
+chr13	Cufflinks	transcript	6590963	6591056	1000	.	.	gene_id "CUFF.50357"; transcript_id "CUFF.50357.1"; FPKM "17.9771360850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.954272"; cov "1.148936";
+chr13	Cufflinks	exon	6590963	6591056	1000	.	.	gene_id "CUFF.50357"; transcript_id "CUFF.50357.1"; exon_number "1"; FPKM "17.9771360850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.954272"; cov "1.148936";
+chr13	Cufflinks	transcript	6591182	6591208	1000	.	.	gene_id "CUFF.50359"; transcript_id "CUFF.50359.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	6591182	6591208	1000	.	.	gene_id "CUFF.50359"; transcript_id "CUFF.50359.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	6591662	6591724	1000	.	.	gene_id "CUFF.50361"; transcript_id "CUFF.50361.1"; FPKM "13.4115142222"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.378260"; cov "0.857143";
+chr13	Cufflinks	exon	6591662	6591724	1000	.	.	gene_id "CUFF.50361"; transcript_id "CUFF.50361.1"; exon_number "1"; FPKM "13.4115142222"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.378260"; cov "0.857143";
+chr13	Cufflinks	transcript	6592773	6592874	1000	.	.	gene_id "CUFF.50363"; transcript_id "CUFF.50363.1"; FPKM "12.4253734705"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.772959"; cov "0.794118";
+chr13	Cufflinks	exon	6592773	6592874	1000	.	.	gene_id "CUFF.50363"; transcript_id "CUFF.50363.1"; exon_number "1"; FPKM "12.4253734705"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.772959"; cov "0.794118";
+chr13	Cufflinks	transcript	6580385	6581757	1000	-	.	gene_id "CUFF.50365"; transcript_id "CUFF.50365.1"; FPKM "324.9135847836"; frac "1.000000"; conf_lo "293.684884"; conf_hi "356.142286"; cov "20.765542";
+chr13	Cufflinks	exon	6580385	6580838	1000	-	.	gene_id "CUFF.50365"; transcript_id "CUFF.50365.1"; exon_number "1"; FPKM "324.9135847836"; frac "1.000000"; conf_lo "293.684884"; conf_hi "356.142286"; cov "20.765542";
+chr13	Cufflinks	exon	6581649	6581757	1000	-	.	gene_id "CUFF.50365"; transcript_id "CUFF.50365.1"; exon_number "2"; FPKM "324.9135847836"; frac "1.000000"; conf_lo "293.684884"; conf_hi "356.142286"; cov "20.765542";
+chr13	Cufflinks	transcript	6594213	6594242	1000	.	.	gene_id "CUFF.50367"; transcript_id "CUFF.50367.1"; FPKM "28.1641798665"; frac "1.000000"; conf_lo "0.000000"; conf_hi "67.994345"; cov "1.800000";
+chr13	Cufflinks	exon	6594213	6594242	1000	.	.	gene_id "CUFF.50367"; transcript_id "CUFF.50367.1"; exon_number "1"; FPKM "28.1641798665"; frac "1.000000"; conf_lo "0.000000"; conf_hi "67.994345"; cov "1.800000";
+chr13	Cufflinks	transcript	6594897	6594938	1000	.	.	gene_id "CUFF.50369"; transcript_id "CUFF.50369.1"; FPKM "20.1172713332"; frac "1.000000"; conf_lo "0.000000"; conf_hi "48.567389"; cov "1.285714";
+chr13	Cufflinks	exon	6594897	6594938	1000	.	.	gene_id "CUFF.50369"; transcript_id "CUFF.50369.1"; exon_number "1"; FPKM "20.1172713332"; frac "1.000000"; conf_lo "0.000000"; conf_hi "48.567389"; cov "1.285714";
+chr13	Cufflinks	transcript	6594742	6594836	1000	.	.	gene_id "CUFF.50371"; transcript_id "CUFF.50371.1"; FPKM "13.3409273052"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.745703"; cov "0.852632";
+chr13	Cufflinks	exon	6594742	6594836	1000	.	.	gene_id "CUFF.50371"; transcript_id "CUFF.50371.1"; exon_number "1"; FPKM "13.3409273052"; frac "1.000000"; conf_lo "0.000000"; conf_hi "28.745703"; cov "0.852632";
+chr13	Cufflinks	transcript	6595072	6595132	1000	.	.	gene_id "CUFF.50373"; transcript_id "CUFF.50373.1"; FPKM "20.7768539999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "44.767898"; cov "1.327869";
+chr13	Cufflinks	exon	6595072	6595132	1000	.	.	gene_id "CUFF.50373"; transcript_id "CUFF.50373.1"; exon_number "1"; FPKM "20.7768539999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "44.767898"; cov "1.327869";
+chr13	Cufflinks	transcript	6595199	6595225	1000	.	.	gene_id "CUFF.50375"; transcript_id "CUFF.50375.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	6595199	6595225	1000	.	.	gene_id "CUFF.50375"; transcript_id "CUFF.50375.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	6595246	6595272	1000	.	.	gene_id "CUFF.50377"; transcript_id "CUFF.50377.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	6595246	6595272	1000	.	.	gene_id "CUFF.50377"; transcript_id "CUFF.50377.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	6598001	6598027	1000	.	.	gene_id "CUFF.50379"; transcript_id "CUFF.50379.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	6598001	6598027	1000	.	.	gene_id "CUFF.50379"; transcript_id "CUFF.50379.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	6601936	6601990	1000	.	.	gene_id "CUFF.50381"; transcript_id "CUFF.50381.1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr13	Cufflinks	exon	6601936	6601990	1000	.	.	gene_id "CUFF.50381"; transcript_id "CUFF.50381.1"; exon_number "1"; FPKM "15.3622799272"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.087825"; cov "0.981818";
+chr13	Cufflinks	transcript	6604226	6604297	1000	.	.	gene_id "CUFF.50383"; transcript_id "CUFF.50383.1"; FPKM "17.6026124166"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.928358"; cov "1.125000";
+chr13	Cufflinks	exon	6604226	6604297	1000	.	.	gene_id "CUFF.50383"; transcript_id "CUFF.50383.1"; exon_number "1"; FPKM "17.6026124166"; frac "1.000000"; conf_lo "0.000000"; conf_hi "37.928358"; cov "1.125000";
+chr13	Cufflinks	transcript	6616305	6616331	1000	.	.	gene_id "CUFF.50385"; transcript_id "CUFF.50385.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	exon	6616305	6616331	1000	.	.	gene_id "CUFF.50385"; transcript_id "CUFF.50385.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	transcript	6616841	6616921	1000	.	.	gene_id "CUFF.50387"; transcript_id "CUFF.50387.1"; FPKM "5.2155888642"; frac "1.000000"; conf_lo "0.000000"; conf_hi "15.646767"; cov "0.333333";
+chr13	Cufflinks	exon	6616841	6616921	1000	.	.	gene_id "CUFF.50387"; transcript_id "CUFF.50387.1"; exon_number "1"; FPKM "5.2155888642"; frac "1.000000"; conf_lo "0.000000"; conf_hi "15.646767"; cov "0.333333";
+chr13	Cufflinks	transcript	6617878	6617990	1000	.	.	gene_id "CUFF.50389"; transcript_id "CUFF.50389.1"; FPKM "11.2158238407"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.166742"; cov "0.716814";
+chr13	Cufflinks	exon	6617878	6617990	1000	.	.	gene_id "CUFF.50389"; transcript_id "CUFF.50389.1"; exon_number "1"; FPKM "11.2158238407"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.166742"; cov "0.716814";
+chr13	Cufflinks	transcript	6618127	6618156	1000	.	.	gene_id "CUFF.50391"; transcript_id "CUFF.50391.1"; FPKM "28.1641798665"; frac "1.000000"; conf_lo "0.000000"; conf_hi "67.994345"; cov "1.800000";
+chr13	Cufflinks	exon	6618127	6618156	1000	.	.	gene_id "CUFF.50391"; transcript_id "CUFF.50391.1"; exon_number "1"; FPKM "28.1641798665"; frac "1.000000"; conf_lo "0.000000"; conf_hi "67.994345"; cov "1.800000";
+chr13	Cufflinks	transcript	6618432	6618463	1000	.	.	gene_id "CUFF.50393"; transcript_id "CUFF.50393.1"; FPKM "26.4039186249"; frac "1.000000"; conf_lo "0.000000"; conf_hi "63.744698"; cov "1.687500";
+chr13	Cufflinks	exon	6618432	6618463	1000	.	.	gene_id "CUFF.50393"; transcript_id "CUFF.50393.1"; exon_number "1"; FPKM "26.4039186249"; frac "1.000000"; conf_lo "0.000000"; conf_hi "63.744698"; cov "1.687500";
+chr13	Cufflinks	transcript	6618765	6618809	1000	.	.	gene_id "CUFF.50395"; transcript_id "CUFF.50395.1"; FPKM "28.1641798665"; frac "1.000000"; conf_lo "0.000000"; conf_hi "60.685374"; cov "1.800000";
+chr13	Cufflinks	exon	6618765	6618809	1000	.	.	gene_id "CUFF.50395"; transcript_id "CUFF.50395.1"; exon_number "1"; FPKM "28.1641798665"; frac "1.000000"; conf_lo "0.000000"; conf_hi "60.685374"; cov "1.800000";
+chr13	Cufflinks	transcript	6620226	6620259	1000	.	.	gene_id "CUFF.50397"; transcript_id "CUFF.50397.1"; FPKM "24.8507469411"; frac "1.000000"; conf_lo "0.000000"; conf_hi "59.995010"; cov "1.588235";
+chr13	Cufflinks	exon	6620226	6620259	1000	.	.	gene_id "CUFF.50397"; transcript_id "CUFF.50397.1"; exon_number "1"; FPKM "24.8507469411"; frac "1.000000"; conf_lo "0.000000"; conf_hi "59.995010"; cov "1.588235";
+chr13	Cufflinks	transcript	6795860	6795886	1000	.	.	gene_id "CUFF.50399"; transcript_id "CUFF.50399.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	6795860	6795886	1000	.	.	gene_id "CUFF.50399"; transcript_id "CUFF.50399.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	7155940	7155966	1000	.	.	gene_id "CUFF.50401"; transcript_id "CUFF.50401.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	7155940	7155966	1000	.	.	gene_id "CUFF.50401"; transcript_id "CUFF.50401.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	7676033	7676123	1000	.	.	gene_id "CUFF.50403"; transcript_id "CUFF.50403.1"; FPKM "9.2848944615"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.415718"; cov "0.593407";
+chr13	Cufflinks	exon	7676033	7676123	1000	.	.	gene_id "CUFF.50403"; transcript_id "CUFF.50403.1"; exon_number "1"; FPKM "9.2848944615"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.415718"; cov "0.593407";
+chr13	Cufflinks	transcript	8202861	8202907	1000	.	.	gene_id "CUFF.50405"; transcript_id "CUFF.50405.1"; FPKM "17.9771360850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "43.400646"; cov "1.148936";
+chr13	Cufflinks	exon	8202861	8202907	1000	.	.	gene_id "CUFF.50405"; transcript_id "CUFF.50405.1"; exon_number "1"; FPKM "17.9771360850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "43.400646"; cov "1.148936";
+chr13	Cufflinks	transcript	8210506	8210549	1000	.	.	gene_id "CUFF.50407"; transcript_id "CUFF.50407.1"; FPKM "19.2028499090"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.359781"; cov "1.227273";
+chr13	Cufflinks	exon	8210506	8210549	1000	.	.	gene_id "CUFF.50407"; transcript_id "CUFF.50407.1"; exon_number "1"; FPKM "19.2028499090"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.359781"; cov "1.227273";
+chr13	Cufflinks	transcript	8240024	8240081	1000	.	.	gene_id "CUFF.50409"; transcript_id "CUFF.50409.1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
+chr13	Cufflinks	exon	8240024	8240081	1000	.	.	gene_id "CUFF.50409"; transcript_id "CUFF.50409.1"; exon_number "1"; FPKM "14.5676792413"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.169489"; cov "0.931034";
+chr13	Cufflinks	transcript	8277443	8277522	1000	.	.	gene_id "CUFF.50411"; transcript_id "CUFF.50411.1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr13	Cufflinks	exon	8277443	8277522	1000	.	.	gene_id "CUFF.50411"; transcript_id "CUFF.50411.1"; exon_number "1"; FPKM "10.5615674500"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.497879"; cov "0.675000";
+chr13	Cufflinks	transcript	8277606	8277673	1000	.	.	gene_id "CUFF.50413"; transcript_id "CUFF.50413.1"; FPKM "24.8507469411"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.701494"; cov "1.588235";
+chr13	Cufflinks	exon	8277606	8277673	1000	.	.	gene_id "CUFF.50413"; transcript_id "CUFF.50413.1"; exon_number "1"; FPKM "24.8507469411"; frac "1.000000"; conf_lo "0.000000"; conf_hi "49.701494"; cov "1.588235";
+chr13	Cufflinks	transcript	8277822	8277848	1000	.	.	gene_id "CUFF.50415"; transcript_id "CUFF.50415.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8277822	8277848	1000	.	.	gene_id "CUFF.50415"; transcript_id "CUFF.50415.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8277918	8277977	1000	.	.	gene_id "CUFF.50417"; transcript_id "CUFF.50417.1"; FPKM "21.1231348999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.514030"; cov "1.350000";
+chr13	Cufflinks	exon	8277918	8277977	1000	.	.	gene_id "CUFF.50417"; transcript_id "CUFF.50417.1"; exon_number "1"; FPKM "21.1231348999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.514030"; cov "1.350000";
+chr13	Cufflinks	transcript	8278095	8278121	1000	.	.	gene_id "CUFF.50419"; transcript_id "CUFF.50419.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8278095	8278121	1000	.	.	gene_id "CUFF.50419"; transcript_id "CUFF.50419.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8278201	8278350	1000	.	.	gene_id "CUFF.50421"; transcript_id "CUFF.50421.1"; FPKM "14.0820899333"; frac "1.000000"; conf_lo "1.486686"; conf_hi "26.677494"; cov "0.900000";
+chr13	Cufflinks	exon	8278201	8278350	1000	.	.	gene_id "CUFF.50421"; transcript_id "CUFF.50421.1"; exon_number "1"; FPKM "14.0820899333"; frac "1.000000"; conf_lo "1.486686"; conf_hi "26.677494"; cov "0.900000";
+chr13	Cufflinks	transcript	8278906	8278932	1000	.	.	gene_id "CUFF.50423"; transcript_id "CUFF.50423.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	exon	8278906	8278932	1000	.	.	gene_id "CUFF.50423"; transcript_id "CUFF.50423.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	transcript	8281673	8281699	1000	.	.	gene_id "CUFF.50425"; transcript_id "CUFF.50425.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8281673	8281699	1000	.	.	gene_id "CUFF.50425"; transcript_id "CUFF.50425.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8311626	8311652	1000	.	.	gene_id "CUFF.50427"; transcript_id "CUFF.50427.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8311626	8311652	1000	.	.	gene_id "CUFF.50427"; transcript_id "CUFF.50427.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8321948	8321974	1000	.	.	gene_id "CUFF.50429"; transcript_id "CUFF.50429.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8321948	8321974	1000	.	.	gene_id "CUFF.50429"; transcript_id "CUFF.50429.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8330761	8330829	1000	.	.	gene_id "CUFF.50431"; transcript_id "CUFF.50431.1"; FPKM "12.2452955941"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.562759"; cov "0.782609";
+chr13	Cufflinks	exon	8330761	8330829	1000	.	.	gene_id "CUFF.50431"; transcript_id "CUFF.50431.1"; exon_number "1"; FPKM "12.2452955941"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.562759"; cov "0.782609";
+chr13	Cufflinks	transcript	8334495	8335002	1000	.	.	gene_id "CUFF.50433"; transcript_id "CUFF.50433.1"; FPKM "24.1169650432"; frac "1.000000"; conf_lo "15.160149"; conf_hi "33.073781"; cov "1.541339";
+chr13	Cufflinks	exon	8334495	8335002	1000	.	.	gene_id "CUFF.50433"; transcript_id "CUFF.50433.1"; exon_number "1"; FPKM "24.1169650432"; frac "1.000000"; conf_lo "15.160149"; conf_hi "33.073781"; cov "1.541339";
+chr13	Cufflinks	transcript	8335517	8335639	1000	.	.	gene_id "CUFF.50435"; transcript_id "CUFF.50435.1"; FPKM "13.7386243251"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.477249"; cov "0.878049";
+chr13	Cufflinks	exon	8335517	8335639	1000	.	.	gene_id "CUFF.50435"; transcript_id "CUFF.50435.1"; exon_number "1"; FPKM "13.7386243251"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.477249"; cov "0.878049";
+chr13	Cufflinks	transcript	8390965	8390991	1000	.	.	gene_id "CUFF.50437"; transcript_id "CUFF.50437.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8390965	8390991	1000	.	.	gene_id "CUFF.50437"; transcript_id "CUFF.50437.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8431938	8432046	1000	.	.	gene_id "CUFF.50439"; transcript_id "CUFF.50439.1"; FPKM "15.5032182752"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.006437"; cov "0.990826";
+chr13	Cufflinks	exon	8431938	8432046	1000	.	.	gene_id "CUFF.50439"; transcript_id "CUFF.50439.1"; exon_number "1"; FPKM "15.5032182752"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.006437"; cov "0.990826";
+chr13	Cufflinks	transcript	8431688	8431754	1000	.	.	gene_id "CUFF.50441"; transcript_id "CUFF.50441.1"; FPKM "12.6108268059"; frac "1.000000"; conf_lo "0.000000"; conf_hi "30.445229"; cov "0.805970";
+chr13	Cufflinks	exon	8431688	8431754	1000	.	.	gene_id "CUFF.50441"; transcript_id "CUFF.50441.1"; exon_number "1"; FPKM "12.6108268059"; frac "1.000000"; conf_lo "0.000000"; conf_hi "30.445229"; cov "0.805970";
+chr13	Cufflinks	transcript	8432289	8432315	1000	.	.	gene_id "CUFF.50443"; transcript_id "CUFF.50443.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	8432289	8432315	1000	.	.	gene_id "CUFF.50443"; transcript_id "CUFF.50443.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	8432115	8432188	1000	.	.	gene_id "CUFF.50445"; transcript_id "CUFF.50445.1"; FPKM "11.4179107567"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.565275"; cov "0.729730";
+chr13	Cufflinks	exon	8432115	8432188	1000	.	.	gene_id "CUFF.50445"; transcript_id "CUFF.50445.1"; exon_number "1"; FPKM "11.4179107567"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.565275"; cov "0.729730";
+chr13	Cufflinks	transcript	8463173	8463199	1000	.	.	gene_id "CUFF.50447"; transcript_id "CUFF.50447.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8463173	8463199	1000	.	.	gene_id "CUFF.50447"; transcript_id "CUFF.50447.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8482167	8482193	1000	.	.	gene_id "CUFF.50449"; transcript_id "CUFF.50449.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8482167	8482193	1000	.	.	gene_id "CUFF.50449"; transcript_id "CUFF.50449.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8518188	8518214	1000	.	.	gene_id "CUFF.50451"; transcript_id "CUFF.50451.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8518188	8518214	1000	.	.	gene_id "CUFF.50451"; transcript_id "CUFF.50451.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8619978	8620005	1000	.	.	gene_id "CUFF.50453"; transcript_id "CUFF.50453.1"; FPKM "30.1759069999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "72.851084"; cov "1.928571";
+chr13	Cufflinks	exon	8619978	8620005	1000	.	.	gene_id "CUFF.50453"; transcript_id "CUFF.50453.1"; exon_number "1"; FPKM "30.1759069999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "72.851084"; cov "1.928571";
+chr13	Cufflinks	transcript	8669464	8669490	1000	.	.	gene_id "CUFF.50455"; transcript_id "CUFF.50455.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8669464	8669490	1000	.	.	gene_id "CUFF.50455"; transcript_id "CUFF.50455.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8705396	8705459	1000	.	.	gene_id "CUFF.50457"; transcript_id "CUFF.50457.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr13	Cufflinks	exon	8705396	8705459	1000	.	.	gene_id "CUFF.50457"; transcript_id "CUFF.50457.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr13	Cufflinks	transcript	8719319	8719345	1000	.	.	gene_id "CUFF.50459"; transcript_id "CUFF.50459.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8719319	8719345	1000	.	.	gene_id "CUFF.50459"; transcript_id "CUFF.50459.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8766868	8767005	1000	.	.	gene_id "CUFF.50461"; transcript_id "CUFF.50461.1"; FPKM "12.2452955941"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.490591"; cov "0.782609";
+chr13	Cufflinks	exon	8766868	8767005	1000	.	.	gene_id "CUFF.50461"; transcript_id "CUFF.50461.1"; exon_number "1"; FPKM "12.2452955941"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.490591"; cov "0.782609";
+chr13	Cufflinks	transcript	8767194	8767393	1000	.	.	gene_id "CUFF.50463"; transcript_id "CUFF.50463.1"; FPKM "12.6738809399"; frac "1.000000"; conf_lo "2.325700"; conf_hi "23.022061"; cov "0.810000";
+chr13	Cufflinks	exon	8767194	8767393	1000	.	.	gene_id "CUFF.50463"; transcript_id "CUFF.50463.1"; exon_number "1"; FPKM "12.6738809399"; frac "1.000000"; conf_lo "2.325700"; conf_hi "23.022061"; cov "0.810000";
+chr13	Cufflinks	transcript	8767461	8767531	1000	.	.	gene_id "CUFF.50465"; transcript_id "CUFF.50465.1"; FPKM "17.8505365351"; frac "1.000000"; conf_lo "0.000000"; conf_hi "38.462561"; cov "1.140845";
+chr13	Cufflinks	exon	8767461	8767531	1000	.	.	gene_id "CUFF.50465"; transcript_id "CUFF.50465.1"; exon_number "1"; FPKM "17.8505365351"; frac "1.000000"; conf_lo "0.000000"; conf_hi "38.462561"; cov "1.140845";
+chr13	Cufflinks	transcript	8767695	8767885	1000	.	.	gene_id "CUFF.50467"; transcript_id "CUFF.50467.1"; FPKM "17.6947726910"; frac "1.000000"; conf_lo "5.182679"; conf_hi "30.206866"; cov "1.130890";
+chr13	Cufflinks	exon	8767695	8767885	1000	.	.	gene_id "CUFF.50467"; transcript_id "CUFF.50467.1"; exon_number "1"; FPKM "17.6947726910"; frac "1.000000"; conf_lo "5.182679"; conf_hi "30.206866"; cov "1.130890";
+chr13	Cufflinks	transcript	8767947	8767992	1000	.	.	gene_id "CUFF.50469"; transcript_id "CUFF.50469.1"; FPKM "27.5519150868"; frac "1.000000"; conf_lo "0.000000"; conf_hi "59.366126"; cov "1.760870";
+chr13	Cufflinks	exon	8767947	8767992	1000	.	.	gene_id "CUFF.50469"; transcript_id "CUFF.50469.1"; exon_number "1"; FPKM "27.5519150868"; frac "1.000000"; conf_lo "0.000000"; conf_hi "59.366126"; cov "1.760870";
+chr13	Cufflinks	transcript	8784118	8784193	1000	.	.	gene_id "CUFF.50471"; transcript_id "CUFF.50471.1"; FPKM "16.6761591315"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.932129"; cov "1.065789";
+chr13	Cufflinks	exon	8784118	8784193	1000	.	.	gene_id "CUFF.50471"; transcript_id "CUFF.50471.1"; exon_number "1"; FPKM "16.6761591315"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.932129"; cov "1.065789";
+chr13	Cufflinks	transcript	8802391	8802417	1000	.	.	gene_id "CUFF.50473"; transcript_id "CUFF.50473.1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr13	Cufflinks	exon	8802391	8802417	1000	.	.	gene_id "CUFF.50473"; transcript_id "CUFF.50473.1"; exon_number "1"; FPKM "109.5273661476"; frac "1.000000"; conf_lo "26.732460"; conf_hi "192.322273"; cov "7.000000";
+chr13	Cufflinks	transcript	8802581	8802610	1000	.	.	gene_id "CUFF.50475"; transcript_id "CUFF.50475.1"; FPKM "154.9029892659"; frac "1.000000"; conf_lo "61.492972"; conf_hi "248.313006"; cov "9.900000";
+chr13	Cufflinks	exon	8802581	8802610	1000	.	.	gene_id "CUFF.50475"; transcript_id "CUFF.50475.1"; exon_number "1"; FPKM "154.9029892659"; frac "1.000000"; conf_lo "61.492972"; conf_hi "248.313006"; cov "9.900000";
+chr13	Cufflinks	transcript	8803098	8803283	1000	.	.	gene_id "CUFF.50477"; transcript_id "CUFF.50477.1"; FPKM "18.1704386236"; frac "1.000000"; conf_lo "5.321998"; conf_hi "31.018879"; cov "1.161290";
+chr13	Cufflinks	exon	8803098	8803283	1000	.	.	gene_id "CUFF.50477"; transcript_id "CUFF.50477.1"; exon_number "1"; FPKM "18.1704386236"; frac "1.000000"; conf_lo "5.321998"; conf_hi "31.018879"; cov "1.161290";
+chr13	Cufflinks	transcript	8803340	8803703	1000	.	.	gene_id "CUFF.50479"; transcript_id "CUFF.50479.1"; FPKM "12.7584623804"; frac "1.000000"; conf_lo "5.062328"; conf_hi "20.454597"; cov "0.815406";
+chr13	Cufflinks	exon	8803340	8803703	1000	.	.	gene_id "CUFF.50479"; transcript_id "CUFF.50479.1"; exon_number "1"; FPKM "12.7584623804"; frac "1.000000"; conf_lo "5.062328"; conf_hi "20.454597"; cov "0.815406";
+chr13	Cufflinks	transcript	8803760	8819743	1000	+	.	gene_id "CUFF.50481"; transcript_id "CUFF.50481.1"; FPKM "15.1783005269"; frac "1.000000"; conf_lo "2.785270"; conf_hi "27.571331"; cov "0.970060";
+chr13	Cufflinks	exon	8803760	8803879	1000	+	.	gene_id "CUFF.50481"; transcript_id "CUFF.50481.1"; exon_number "1"; FPKM "15.1783005269"; frac "1.000000"; conf_lo "2.785270"; conf_hi "27.571331"; cov "0.970060";
+chr13	Cufflinks	exon	8819697	8819743	1000	+	.	gene_id "CUFF.50481"; transcript_id "CUFF.50481.1"; exon_number "2"; FPKM "15.1783005269"; frac "1.000000"; conf_lo "2.785270"; conf_hi "27.571331"; cov "0.970060";
+chr13	Cufflinks	transcript	8819122	8819153	1000	.	.	gene_id "CUFF.50483"; transcript_id "CUFF.50483.1"; FPKM "26.4039186249"; frac "1.000000"; conf_lo "0.000000"; conf_hi "63.744698"; cov "1.687500";
+chr13	Cufflinks	exon	8819122	8819153	1000	.	.	gene_id "CUFF.50483"; transcript_id "CUFF.50483.1"; exon_number "1"; FPKM "26.4039186249"; frac "1.000000"; conf_lo "0.000000"; conf_hi "63.744698"; cov "1.687500";
+chr13	Cufflinks	transcript	8831114	8831142	1000	.	.	gene_id "CUFF.50485"; transcript_id "CUFF.50485.1"; FPKM "29.1353584826"; frac "1.000000"; conf_lo "0.000000"; conf_hi "70.338978"; cov "1.862069";
+chr13	Cufflinks	exon	8831114	8831142	1000	.	.	gene_id "CUFF.50485"; transcript_id "CUFF.50485.1"; exon_number "1"; FPKM "29.1353584826"; frac "1.000000"; conf_lo "0.000000"; conf_hi "70.338978"; cov "1.862069";
+chr13	Cufflinks	transcript	8831216	8831252	1000	.	.	gene_id "CUFF.50487"; transcript_id "CUFF.50487.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr13	Cufflinks	exon	8831216	8831252	1000	.	.	gene_id "CUFF.50487"; transcript_id "CUFF.50487.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr13	Cufflinks	transcript	8831404	8831522	1000	.	.	gene_id "CUFF.50489"; transcript_id "CUFF.50489.1"; FPKM "17.7505335293"; frac "1.000000"; conf_lo "1.873974"; conf_hi "33.627093"; cov "1.134454";
+chr13	Cufflinks	exon	8831404	8831522	1000	.	.	gene_id "CUFF.50489"; transcript_id "CUFF.50489.1"; exon_number "1"; FPKM "17.7505335293"; frac "1.000000"; conf_lo "1.873974"; conf_hi "33.627093"; cov "1.134454";
+chr13	Cufflinks	transcript	8849862	8849935	1000	.	.	gene_id "CUFF.50491"; transcript_id "CUFF.50491.1"; FPKM "17.1268661351"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.903268"; cov "1.094595";
+chr13	Cufflinks	exon	8849862	8849935	1000	.	.	gene_id "CUFF.50491"; transcript_id "CUFF.50491.1"; exon_number "1"; FPKM "17.1268661351"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.903268"; cov "1.094595";
+chr13	Cufflinks	transcript	8850038	8850347	1000	.	.	gene_id "CUFF.50493"; transcript_id "CUFF.50493.1"; FPKM "9.5394802774"; frac "1.000000"; conf_lo "2.328311"; conf_hi "16.750650"; cov "0.609677";
+chr13	Cufflinks	exon	8850038	8850347	1000	.	.	gene_id "CUFF.50493"; transcript_id "CUFF.50493.1"; exon_number "1"; FPKM "9.5394802774"; frac "1.000000"; conf_lo "2.328311"; conf_hi "16.750650"; cov "0.609677";
+chr13	Cufflinks	transcript	8864952	8864979	1000	.	.	gene_id "CUFF.50495"; transcript_id "CUFF.50495.1"; FPKM "75.4397674996"; frac "1.000000"; conf_lo "7.964388"; conf_hi "142.915147"; cov "4.821429";
+chr13	Cufflinks	exon	8864952	8864979	1000	.	.	gene_id "CUFF.50495"; transcript_id "CUFF.50495.1"; exon_number "1"; FPKM "75.4397674996"; frac "1.000000"; conf_lo "7.964388"; conf_hi "142.915147"; cov "4.821429";
+chr13	Cufflinks	transcript	8855128	8864773	1000	-	.	gene_id "CUFF.50497"; transcript_id "CUFF.50497.1"; FPKM "6.4009499697"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.202850"; cov "0.409091";
+chr13	Cufflinks	exon	8855128	8855158	1000	-	.	gene_id "CUFF.50497"; transcript_id "CUFF.50497.1"; exon_number "1"; FPKM "6.4009499697"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.202850"; cov "0.409091";
+chr13	Cufflinks	exon	8864739	8864773	1000	-	.	gene_id "CUFF.50497"; transcript_id "CUFF.50497.1"; exon_number "2"; FPKM "6.4009499697"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.202850"; cov "0.409091";
+chr13	Cufflinks	transcript	8965678	8965704	1000	.	.	gene_id "CUFF.50499"; transcript_id "CUFF.50499.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	8965678	8965704	1000	.	.	gene_id "CUFF.50499"; transcript_id "CUFF.50499.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	8972036	8972065	1000	.	.	gene_id "CUFF.50501"; transcript_id "CUFF.50501.1"; FPKM "112.6567194661"; frac "1.000000"; conf_lo "32.996389"; conf_hi "192.317050"; cov "7.200000";
+chr13	Cufflinks	exon	8972036	8972065	1000	.	.	gene_id "CUFF.50501"; transcript_id "CUFF.50501.1"; exon_number "1"; FPKM "112.6567194661"; frac "1.000000"; conf_lo "32.996389"; conf_hi "192.317050"; cov "7.200000";
+chr13	Cufflinks	transcript	9133705	9133859	1000	.	.	gene_id "CUFF.50503"; transcript_id "CUFF.50503.1"; FPKM "8.1766973806"; frac "1.000000"; conf_lo "0.000000"; conf_hi "17.618334"; cov "0.522581";
+chr13	Cufflinks	exon	9133705	9133859	1000	.	.	gene_id "CUFF.50503"; transcript_id "CUFF.50503.1"; exon_number "1"; FPKM "8.1766973806"; frac "1.000000"; conf_lo "0.000000"; conf_hi "17.618334"; cov "0.522581";
+chr13	Cufflinks	transcript	9134178	9134256	1000	.	.	gene_id "CUFF.50505"; transcript_id "CUFF.50505.1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr13	Cufflinks	exon	9134178	9134256	1000	.	.	gene_id "CUFF.50505"; transcript_id "CUFF.50505.1"; exon_number "1"; FPKM "10.6952581772"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.820637"; cov "0.683544";
+chr13	Cufflinks	transcript	9272120	9272153	1000	.	.	gene_id "CUFF.50507"; transcript_id "CUFF.50507.1"; FPKM "24.8212432986"; frac "1.000000"; conf_lo "0.000000"; conf_hi "59.944638"; cov "1.586350";
+chr13	Cufflinks	exon	9272120	9272153	1000	.	.	gene_id "CUFF.50507"; transcript_id "CUFF.50507.1"; exon_number "1"; FPKM "24.8212432986"; frac "1.000000"; conf_lo "0.000000"; conf_hi "59.944638"; cov "1.586350";
+chr13	Cufflinks	transcript	9169898	9172437	1000	+	.	gene_id "CUFF.50509"; transcript_id "CUFF.50509.1"; FPKM "41.4918721248"; frac "1.000000"; conf_lo "16.471332"; conf_hi "66.512412"; cov "2.651786";
+chr13	Cufflinks	exon	9169898	9169928	1000	+	.	gene_id "CUFF.50509"; transcript_id "CUFF.50509.1"; exon_number "1"; FPKM "41.4918721248"; frac "1.000000"; conf_lo "16.471332"; conf_hi "66.512412"; cov "2.651786";
+chr13	Cufflinks	exon	9172357	9172437	1000	+	.	gene_id "CUFF.50509"; transcript_id "CUFF.50509.1"; exon_number "2"; FPKM "41.4918721248"; frac "1.000000"; conf_lo "16.471332"; conf_hi "66.512412"; cov "2.651786";
+chr13	Cufflinks	transcript	9171841	9172220	1000	.	.	gene_id "CUFF.50511"; transcript_id "CUFF.50511.1"; FPKM "108.9509063258"; frac "1.000000"; conf_lo "86.939499"; conf_hi "130.962313"; cov "6.963158";
+chr13	Cufflinks	exon	9171841	9172220	1000	.	.	gene_id "CUFF.50511"; transcript_id "CUFF.50511.1"; exon_number "1"; FPKM "108.9509063258"; frac "1.000000"; conf_lo "86.939499"; conf_hi "130.962313"; cov "6.963158";
+chr13	Cufflinks	transcript	9172647	9173652	1000	.	.	gene_id "CUFF.50513"; transcript_id "CUFF.50513.1"; FPKM "111.9143215254"; frac "1.000000"; conf_lo "98.203357"; conf_hi "125.625287"; cov "7.152553";
+chr13	Cufflinks	exon	9172647	9173652	1000	.	.	gene_id "CUFF.50513"; transcript_id "CUFF.50513.1"; exon_number "1"; FPKM "111.9143215254"; frac "1.000000"; conf_lo "98.203357"; conf_hi "125.625287"; cov "7.152553";
+chr13	Cufflinks	transcript	9277893	9277953	1000	.	.	gene_id "CUFF.50515"; transcript_id "CUFF.50515.1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr13	Cufflinks	exon	9277893	9277953	1000	.	.	gene_id "CUFF.50515"; transcript_id "CUFF.50515.1"; exon_number "1"; FPKM "13.8512359999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.439842"; cov "0.885246";
+chr13	Cufflinks	transcript	9278033	9278094	1000	.	.	gene_id "CUFF.50517"; transcript_id "CUFF.50517.1"; FPKM "13.6278289677"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.900490"; cov "0.870968";
+chr13	Cufflinks	exon	9278033	9278094	1000	.	.	gene_id "CUFF.50517"; transcript_id "CUFF.50517.1"; exon_number "1"; FPKM "13.6278289677"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.900490"; cov "0.870968";
+chr13	Cufflinks	transcript	9278482	9278551	1000	.	.	gene_id "CUFF.50519"; transcript_id "CUFF.50519.1"; FPKM "18.1055441999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.012026"; cov "1.157143";
+chr13	Cufflinks	exon	9278482	9278551	1000	.	.	gene_id "CUFF.50519"; transcript_id "CUFF.50519.1"; exon_number "1"; FPKM "18.1055441999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.012026"; cov "1.157143";
+chr13	Cufflinks	transcript	9278167	9278308	1000	.	.	gene_id "CUFF.50521"; transcript_id "CUFF.50521.1"; FPKM "17.8505365351"; frac "1.000000"; conf_lo "3.275634"; conf_hi "32.425439"; cov "1.140845";
+chr13	Cufflinks	exon	9278167	9278308	1000	.	.	gene_id "CUFF.50521"; transcript_id "CUFF.50521.1"; exon_number "1"; FPKM "17.8505365351"; frac "1.000000"; conf_lo "3.275634"; conf_hi "32.425439"; cov "1.140845";
+chr13	Cufflinks	transcript	9346823	9346849	1000	.	.	gene_id "CUFF.50523"; transcript_id "CUFF.50523.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9346823	9346849	1000	.	.	gene_id "CUFF.50523"; transcript_id "CUFF.50523.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9373600	9373693	1000	.	.	gene_id "CUFF.50525"; transcript_id "CUFF.50525.1"; FPKM "8.9885680425"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.700323"; cov "0.574468";
+chr13	Cufflinks	exon	9373600	9373693	1000	.	.	gene_id "CUFF.50525"; transcript_id "CUFF.50525.1"; exon_number "1"; FPKM "8.9885680425"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.700323"; cov "0.574468";
+chr13	Cufflinks	transcript	9353602	9373527	1000	-	.	gene_id "CUFF.50527"; transcript_id "CUFF.50527.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.010792"; cov "1.038462";
+chr13	Cufflinks	exon	9353602	9353648	1000	-	.	gene_id "CUFF.50527"; transcript_id "CUFF.50527.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.010792"; cov "1.038462";
+chr13	Cufflinks	exon	9373497	9373527	1000	-	.	gene_id "CUFF.50527"; transcript_id "CUFF.50527.1"; exon_number "2"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.010792"; cov "1.038462";
+chr13	Cufflinks	transcript	9386521	9386547	1000	.	.	gene_id "CUFF.50529"; transcript_id "CUFF.50529.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9386521	9386547	1000	.	.	gene_id "CUFF.50529"; transcript_id "CUFF.50529.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9391996	9392202	1000	.	.	gene_id "CUFF.50531"; transcript_id "CUFF.50531.1"; FPKM "12.2404495852"; frac "1.000000"; conf_lo "2.244186"; conf_hi "22.236713"; cov "0.782299";
+chr13	Cufflinks	exon	9391996	9392202	1000	.	.	gene_id "CUFF.50531"; transcript_id "CUFF.50531.1"; exon_number "1"; FPKM "12.2404495852"; frac "1.000000"; conf_lo "2.244186"; conf_hi "22.236713"; cov "0.782299";
+chr13	Cufflinks	transcript	9392422	9392467	1000	.	.	gene_id "CUFF.50533"; transcript_id "CUFF.50533.1"; FPKM "9.1839716956"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.551915"; cov "0.586957";
+chr13	Cufflinks	exon	9392422	9392467	1000	.	.	gene_id "CUFF.50533"; transcript_id "CUFF.50533.1"; exon_number "1"; FPKM "9.1839716956"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.551915"; cov "0.586957";
+chr13	Cufflinks	transcript	9392265	9392321	1000	.	.	gene_id "CUFF.50535"; transcript_id "CUFF.50535.1"; FPKM "14.8232525613"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.786497"; cov "0.947368";
+chr13	Cufflinks	exon	9392265	9392321	1000	.	.	gene_id "CUFF.50535"; transcript_id "CUFF.50535.1"; exon_number "1"; FPKM "14.8232525613"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.786497"; cov "0.947368";
+chr13	Cufflinks	transcript	9392577	9392603	1000	.	.	gene_id "CUFF.50537"; transcript_id "CUFF.50537.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9392577	9392603	1000	.	.	gene_id "CUFF.50537"; transcript_id "CUFF.50537.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9396631	9396825	1000	.	.	gene_id "CUFF.50539"; transcript_id "CUFF.50539.1"; FPKM "8.6659014974"; frac "1.000000"; conf_lo "0.000000"; conf_hi "17.331803"; cov "0.553846";
+chr13	Cufflinks	exon	9396631	9396825	1000	.	.	gene_id "CUFF.50539"; transcript_id "CUFF.50539.1"; exon_number "1"; FPKM "8.6659014974"; frac "1.000000"; conf_lo "0.000000"; conf_hi "17.331803"; cov "0.553846";
+chr13	Cufflinks	transcript	9397263	9397434	1000	.	.	gene_id "CUFF.50541"; transcript_id "CUFF.50541.1"; FPKM "17.1932493371"; frac "1.000000"; conf_lo "4.196374"; conf_hi "30.190124"; cov "1.098837";
+chr13	Cufflinks	exon	9397263	9397434	1000	.	.	gene_id "CUFF.50541"; transcript_id "CUFF.50541.1"; exon_number "1"; FPKM "17.1932493371"; frac "1.000000"; conf_lo "4.196374"; conf_hi "30.190124"; cov "1.098837";
+chr13	Cufflinks	transcript	9398210	9398294	1000	.	.	gene_id "CUFF.50543"; transcript_id "CUFF.50543.1"; FPKM "9.9402987764"; frac "1.000000"; conf_lo "0.000000"; conf_hi "23.998004"; cov "0.635294";
+chr13	Cufflinks	exon	9398210	9398294	1000	.	.	gene_id "CUFF.50543"; transcript_id "CUFF.50543.1"; exon_number "1"; FPKM "9.9402987764"; frac "1.000000"; conf_lo "0.000000"; conf_hi "23.998004"; cov "0.635294";
+chr13	Cufflinks	transcript	9406013	9406051	1000	.	.	gene_id "CUFF.50545"; transcript_id "CUFF.50545.1"; FPKM "10.8323768717"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.497131"; cov "0.692308";
+chr13	Cufflinks	exon	9406013	9406051	1000	.	.	gene_id "CUFF.50545"; transcript_id "CUFF.50545.1"; exon_number "1"; FPKM "10.8323768717"; frac "1.000000"; conf_lo "0.000000"; conf_hi "32.497131"; cov "0.692308";
+chr13	Cufflinks	transcript	9413644	9413670	1000	.	.	gene_id "CUFF.50547"; transcript_id "CUFF.50547.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9413644	9413670	1000	.	.	gene_id "CUFF.50547"; transcript_id "CUFF.50547.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9414053	9414143	1000	.	.	gene_id "CUFF.50549"; transcript_id "CUFF.50549.1"; FPKM "9.2848944615"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.415718"; cov "0.593407";
+chr13	Cufflinks	exon	9414053	9414143	1000	.	.	gene_id "CUFF.50549"; transcript_id "CUFF.50549.1"; exon_number "1"; FPKM "9.2848944615"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.415718"; cov "0.593407";
+chr13	Cufflinks	transcript	9415960	9416015	1000	.	.	gene_id "CUFF.50551"; transcript_id "CUFF.50551.1"; FPKM "15.0879534999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.425542"; cov "0.964286";
+chr13	Cufflinks	exon	9415960	9416015	1000	.	.	gene_id "CUFF.50551"; transcript_id "CUFF.50551.1"; exon_number "1"; FPKM "15.0879534999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.425542"; cov "0.964286";
+chr13	Cufflinks	transcript	9442325	9442351	1000	.	.	gene_id "CUFF.50553"; transcript_id "CUFF.50553.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9442325	9442351	1000	.	.	gene_id "CUFF.50553"; transcript_id "CUFF.50553.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9517895	9517921	1000	.	.	gene_id "CUFF.50555"; transcript_id "CUFF.50555.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9517895	9517921	1000	.	.	gene_id "CUFF.50555"; transcript_id "CUFF.50555.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9584154	9584180	1000	.	.	gene_id "CUFF.50558"; transcript_id "CUFF.50558.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	exon	9584154	9584180	1000	.	.	gene_id "CUFF.50558"; transcript_id "CUFF.50558.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	transcript	9583821	9583888	1000	.	.	gene_id "CUFF.50557"; transcript_id "CUFF.50557.1"; FPKM "12.4253734705"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.997505"; cov "0.794118";
+chr13	Cufflinks	exon	9583821	9583888	1000	.	.	gene_id "CUFF.50557"; transcript_id "CUFF.50557.1"; exon_number "1"; FPKM "12.4253734705"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.997505"; cov "0.794118";
+chr13	Cufflinks	transcript	9585768	9585937	1000	.	.	gene_id "CUFF.50561"; transcript_id "CUFF.50561.1"; FPKM "9.9402987764"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.880598"; cov "0.635294";
+chr13	Cufflinks	exon	9585768	9585937	1000	.	.	gene_id "CUFF.50561"; transcript_id "CUFF.50561.1"; exon_number "1"; FPKM "9.9402987764"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.880598"; cov "0.635294";
+chr13	Cufflinks	transcript	9586173	9593034	1000	-	.	gene_id "CUFF.50563"; transcript_id "CUFF.50563.1"; FPKM "10.3039682439"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.875980"; cov "0.658537";
+chr13	Cufflinks	exon	9586173	9586218	1000	-	.	gene_id "CUFF.50563"; transcript_id "CUFF.50563.1"; exon_number "1"; FPKM "10.3039682439"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.875980"; cov "0.658537";
+chr13	Cufflinks	exon	9592999	9593034	1000	-	.	gene_id "CUFF.50563"; transcript_id "CUFF.50563.1"; exon_number "2"; FPKM "10.3039682439"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.875980"; cov "0.658537";
+chr13	Cufflinks	transcript	9609217	9609243	1000	.	.	gene_id "CUFF.50566"; transcript_id "CUFF.50566.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9609217	9609243	1000	.	.	gene_id "CUFF.50566"; transcript_id "CUFF.50566.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9607682	9607717	1000	.	.	gene_id "CUFF.50565"; transcript_id "CUFF.50565.1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "56.661954"; cov "1.500000";
+chr13	Cufflinks	exon	9607682	9607717	1000	.	.	gene_id "CUFF.50565"; transcript_id "CUFF.50565.1"; exon_number "1"; FPKM "23.4701498888"; frac "1.000000"; conf_lo "0.000000"; conf_hi "56.661954"; cov "1.500000";
+chr13	Cufflinks	transcript	9678669	9678695	1000	.	.	gene_id "CUFF.50569"; transcript_id "CUFF.50569.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9678669	9678695	1000	.	.	gene_id "CUFF.50569"; transcript_id "CUFF.50569.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9667710	9667736	1000	.	.	gene_id "CUFF.50571"; transcript_id "CUFF.50571.1"; FPKM "140.7837465978"; frac "1.000000"; conf_lo "46.915532"; conf_hi "234.651961"; cov "8.997626";
+chr13	Cufflinks	exon	9667710	9667736	1000	.	.	gene_id "CUFF.50571"; transcript_id "CUFF.50571.1"; exon_number "1"; FPKM "140.7837465978"; frac "1.000000"; conf_lo "46.915532"; conf_hi "234.651961"; cov "8.997626";
+chr13	Cufflinks	transcript	9667815	9668061	1000	.	.	gene_id "CUFF.50573"; transcript_id "CUFF.50573.1"; FPKM "87.1763440808"; frac "1.000000"; conf_lo "62.754693"; conf_hi "111.597996"; cov "5.571525";
+chr13	Cufflinks	exon	9667815	9668061	1000	.	.	gene_id "CUFF.50573"; transcript_id "CUFF.50573.1"; exon_number "1"; FPKM "87.1763440808"; frac "1.000000"; conf_lo "62.754693"; conf_hi "111.597996"; cov "5.571525";
+chr13	Cufflinks	transcript	9668143	9668170	1000	.	.	gene_id "CUFF.50575"; transcript_id "CUFF.50575.1"; FPKM "82.8583537693"; frac "1.000000"; conf_lo "12.143066"; conf_hi "153.573642"; cov "5.295558";
+chr13	Cufflinks	exon	9668143	9668170	1000	.	.	gene_id "CUFF.50575"; transcript_id "CUFF.50575.1"; exon_number "1"; FPKM "82.8583537693"; frac "1.000000"; conf_lo "12.143066"; conf_hi "153.573642"; cov "5.295558";
+chr13	Cufflinks	transcript	9688931	9688970	1000	.	.	gene_id "CUFF.50577"; transcript_id "CUFF.50577.1"; FPKM "21.1231348999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.995759"; cov "1.350000";
+chr13	Cufflinks	exon	9688931	9688970	1000	.	.	gene_id "CUFF.50577"; transcript_id "CUFF.50577.1"; exon_number "1"; FPKM "21.1231348999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "50.995759"; cov "1.350000";
+chr13	Cufflinks	transcript	9684078	9685570	1000	.	.	gene_id "CUFF.50579"; transcript_id "CUFF.50579.1"; FPKM "107.1082431700"; frac "1.000000"; conf_lo "96.097777"; conf_hi "118.118710"; cov "6.845392";
+chr13	Cufflinks	exon	9684078	9685570	1000	.	.	gene_id "CUFF.50579"; transcript_id "CUFF.50579.1"; exon_number "1"; FPKM "107.1082431700"; frac "1.000000"; conf_lo "96.097777"; conf_hi "118.118710"; cov "6.845392";
+chr13	Cufflinks	transcript	9690151	9690234	1000	.	.	gene_id "CUFF.50581"; transcript_id "CUFF.50581.1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr13	Cufflinks	exon	9690151	9690234	1000	.	.	gene_id "CUFF.50581"; transcript_id "CUFF.50581.1"; exon_number "1"; FPKM "10.0586356666"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.283695"; cov "0.642857";
+chr13	Cufflinks	transcript	9694461	9694537	1000	.	.	gene_id "CUFF.50583"; transcript_id "CUFF.50583.1"; FPKM "16.4595856363"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.465478"; cov "1.051948";
+chr13	Cufflinks	exon	9694461	9694537	1000	.	.	gene_id "CUFF.50583"; transcript_id "CUFF.50583.1"; exon_number "1"; FPKM "16.4595856363"; frac "1.000000"; conf_lo "0.000000"; conf_hi "35.465478"; cov "1.051948";
+chr13	Cufflinks	transcript	9696900	9696976	1000	.	.	gene_id "CUFF.50585"; transcript_id "CUFF.50585.1"; FPKM "10.9730570909"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.491303"; cov "0.701299";
+chr13	Cufflinks	exon	9696900	9696976	1000	.	.	gene_id "CUFF.50585"; transcript_id "CUFF.50585.1"; exon_number "1"; FPKM "10.9730570909"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.491303"; cov "0.701299";
+chr13	Cufflinks	transcript	9725686	9725787	1000	.	.	gene_id "CUFF.50587"; transcript_id "CUFF.50587.1"; FPKM "12.4253734705"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.772959"; cov "0.794118";
+chr13	Cufflinks	exon	9725686	9725787	1000	.	.	gene_id "CUFF.50587"; transcript_id "CUFF.50587.1"; exon_number "1"; FPKM "12.4253734705"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.772959"; cov "0.794118";
+chr13	Cufflinks	transcript	9725935	9726047	1000	.	.	gene_id "CUFF.50589"; transcript_id "CUFF.50589.1"; FPKM "11.2158238407"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.166742"; cov "0.716814";
+chr13	Cufflinks	exon	9725935	9726047	1000	.	.	gene_id "CUFF.50589"; transcript_id "CUFF.50589.1"; exon_number "1"; FPKM "11.2158238407"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.166742"; cov "0.716814";
+chr13	Cufflinks	transcript	9739796	9739868	1000	.	.	gene_id "CUFF.50591"; transcript_id "CUFF.50591.1"; FPKM "11.5743204931"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.942882"; cov "0.739726";
+chr13	Cufflinks	exon	9739796	9739868	1000	.	.	gene_id "CUFF.50591"; transcript_id "CUFF.50591.1"; exon_number "1"; FPKM "11.5743204931"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.942882"; cov "0.739726";
+chr13	Cufflinks	transcript	9740164	9740202	1000	.	.	gene_id "CUFF.50593"; transcript_id "CUFF.50593.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr13	Cufflinks	exon	9740164	9740202	1000	.	.	gene_id "CUFF.50593"; transcript_id "CUFF.50593.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr13	Cufflinks	transcript	9740296	9740330	1000	.	.	gene_id "CUFF.50595"; transcript_id "CUFF.50595.1"; FPKM "48.2814511998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "96.562902"; cov "3.085714";
+chr13	Cufflinks	exon	9740296	9740330	1000	.	.	gene_id "CUFF.50595"; transcript_id "CUFF.50595.1"; exon_number "1"; FPKM "48.2814511998"; frac "1.000000"; conf_lo "0.000000"; conf_hi "96.562902"; cov "3.085714";
+chr13	Cufflinks	transcript	9741046	9741127	1000	.	.	gene_id "CUFF.50597"; transcript_id "CUFF.50597.1"; FPKM "10.3039682439"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.875980"; cov "0.658537";
+chr13	Cufflinks	exon	9741046	9741127	1000	.	.	gene_id "CUFF.50597"; transcript_id "CUFF.50597.1"; exon_number "1"; FPKM "10.3039682439"; frac "1.000000"; conf_lo "0.000000"; conf_hi "24.875980"; cov "0.658537";
+chr13	Cufflinks	transcript	9741590	9741694	1000	.	.	gene_id "CUFF.50599"; transcript_id "CUFF.50599.1"; FPKM "12.0703627999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.008017"; cov "0.771429";
+chr13	Cufflinks	exon	9741590	9741694	1000	.	.	gene_id "CUFF.50599"; transcript_id "CUFF.50599.1"; exon_number "1"; FPKM "12.0703627999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "26.008017"; cov "0.771429";
+chr13	Cufflinks	transcript	9741399	9741517	1000	.	.	gene_id "CUFF.50601"; transcript_id "CUFF.50601.1"; FPKM "24.8507469411"; frac "1.000000"; conf_lo "6.065348"; conf_hi "43.636146"; cov "1.588235";
+chr13	Cufflinks	exon	9741399	9741517	1000	.	.	gene_id "CUFF.50601"; transcript_id "CUFF.50601.1"; exon_number "1"; FPKM "24.8507469411"; frac "1.000000"; conf_lo "6.065348"; conf_hi "43.636146"; cov "1.588235";
+chr13	Cufflinks	transcript	9868979	9869072	1000	.	.	gene_id "CUFF.50603"; transcript_id "CUFF.50603.1"; FPKM "13.4828520638"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.051509"; cov "0.861702";
+chr13	Cufflinks	exon	9868979	9869072	1000	.	.	gene_id "CUFF.50603"; transcript_id "CUFF.50603.1"; exon_number "1"; FPKM "13.4828520638"; frac "1.000000"; conf_lo "0.000000"; conf_hi "29.051509"; cov "0.861702";
+chr13	Cufflinks	transcript	9872853	9872934	1000	.	.	gene_id "CUFF.50605"; transcript_id "CUFF.50605.1"; FPKM "15.4559523658"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.302949"; cov "0.987805";
+chr13	Cufflinks	exon	9872853	9872934	1000	.	.	gene_id "CUFF.50605"; transcript_id "CUFF.50605.1"; exon_number "1"; FPKM "15.4559523658"; frac "1.000000"; conf_lo "0.000000"; conf_hi "33.302949"; cov "0.987805";
+chr13	Cufflinks	transcript	9874731	9874997	1000	.	.	gene_id "CUFF.50607"; transcript_id "CUFF.50607.1"; FPKM "12.6580583670"; frac "1.000000"; conf_lo "3.707459"; conf_hi "21.608657"; cov "0.808989";
+chr13	Cufflinks	exon	9874731	9874997	1000	.	.	gene_id "CUFF.50607"; transcript_id "CUFF.50607.1"; exon_number "1"; FPKM "12.6580583670"; frac "1.000000"; conf_lo "3.707459"; conf_hi "21.608657"; cov "0.808989";
+chr13	Cufflinks	transcript	9875128	9875201	1000	.	.	gene_id "CUFF.50609"; transcript_id "CUFF.50609.1"; FPKM "22.8358215134"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.671643"; cov "1.459459";
+chr13	Cufflinks	exon	9875128	9875201	1000	.	.	gene_id "CUFF.50609"; transcript_id "CUFF.50609.1"; exon_number "1"; FPKM "22.8358215134"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.671643"; cov "1.459459";
+chr13	Cufflinks	transcript	9875323	9875349	1000	.	.	gene_id "CUFF.50611"; transcript_id "CUFF.50611.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	9875323	9875349	1000	.	.	gene_id "CUFF.50611"; transcript_id "CUFF.50611.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	9875425	9875480	1000	.	.	gene_id "CUFF.50613"; transcript_id "CUFF.50613.1"; FPKM "15.0879534999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.425542"; cov "0.964286";
+chr13	Cufflinks	exon	9875425	9875480	1000	.	.	gene_id "CUFF.50613"; transcript_id "CUFF.50613.1"; exon_number "1"; FPKM "15.0879534999"; frac "1.000000"; conf_lo "0.000000"; conf_hi "36.425542"; cov "0.964286";
+chr13	Cufflinks	transcript	9876121	9876172	1000	.	.	gene_id "CUFF.50615"; transcript_id "CUFF.50615.1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr13	Cufflinks	exon	9876121	9876172	1000	.	.	gene_id "CUFF.50615"; transcript_id "CUFF.50615.1"; exon_number "1"; FPKM "16.2485653076"; frac "1.000000"; conf_lo "0.000000"; conf_hi "39.227507"; cov "1.038462";
+chr13	Cufflinks	transcript	9969155	9969237	1000	.	.	gene_id "CUFF.50617"; transcript_id "CUFF.50617.1"; FPKM "8.6756763125"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.966038"; cov "0.554471";
+chr13	Cufflinks	exon	9969155	9969237	1000	.	.	gene_id "CUFF.50617"; transcript_id "CUFF.50617.1"; exon_number "1"; FPKM "8.6756763125"; frac "1.000000"; conf_lo "0.000000"; conf_hi "21.966038"; cov "0.554471";
+chr13	Cufflinks	transcript	9986765	9986791	1000	.	.	gene_id "CUFF.50619"; transcript_id "CUFF.50619.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	9986765	9986791	1000	.	.	gene_id "CUFF.50619"; transcript_id "CUFF.50619.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	9987242	9987567	1000	.	.	gene_id "CUFF.50621"; transcript_id "CUFF.50621.1"; FPKM "10.3671827730"; frac "1.000000"; conf_lo "3.036478"; conf_hi "17.697888"; cov "0.662577";
+chr13	Cufflinks	exon	9987242	9987567	1000	.	.	gene_id "CUFF.50621"; transcript_id "CUFF.50621.1"; exon_number "1"; FPKM "10.3671827730"; frac "1.000000"; conf_lo "3.036478"; conf_hi "17.697888"; cov "0.662577";
+chr13	Cufflinks	transcript	10010160	10010265	1000	.	.	gene_id "CUFF.50623"; transcript_id "CUFF.50623.1"; FPKM "11.9564914528"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.762659"; cov "0.764151";
+chr13	Cufflinks	exon	10010160	10010265	1000	.	.	gene_id "CUFF.50623"; transcript_id "CUFF.50623.1"; exon_number "1"; FPKM "11.9564914528"; frac "1.000000"; conf_lo "0.000000"; conf_hi "25.762659"; cov "0.764151";
+chr13	Cufflinks	transcript	10010497	10010523	1000	.	.	gene_id "CUFF.50625"; transcript_id "CUFF.50625.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	10010497	10010523	1000	.	.	gene_id "CUFF.50625"; transcript_id "CUFF.50625.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	10012021	10012167	1000	.	.	gene_id "CUFF.50627"; transcript_id "CUFF.50627.1"; FPKM "11.4955836190"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.991167"; cov "0.734694";
+chr13	Cufflinks	exon	10012021	10012167	1000	.	.	gene_id "CUFF.50627"; transcript_id "CUFF.50627.1"; exon_number "1"; FPKM "11.4955836190"; frac "1.000000"; conf_lo "0.000000"; conf_hi "22.991167"; cov "0.734694";
+chr13	Cufflinks	transcript	10019657	10019683	1000	.	.	gene_id "CUFF.50629"; transcript_id "CUFF.50629.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	exon	10019657	10019683	1000	.	.	gene_id "CUFF.50629"; transcript_id "CUFF.50629.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	transcript	10024965	10025028	1000	.	.	gene_id "CUFF.50631"; transcript_id "CUFF.50631.1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr13	Cufflinks	exon	10024965	10025028	1000	.	.	gene_id "CUFF.50631"; transcript_id "CUFF.50631.1"; exon_number "1"; FPKM "13.2019593124"; frac "1.000000"; conf_lo "0.000000"; conf_hi "31.872349"; cov "0.843750";
+chr13	Cufflinks	transcript	10082104	10082206	1000	.	.	gene_id "CUFF.50633"; transcript_id "CUFF.50633.1"; FPKM "8.2031591844"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.804178"; cov "0.524272";
+chr13	Cufflinks	exon	10082104	10082206	1000	.	.	gene_id "CUFF.50633"; transcript_id "CUFF.50633.1"; exon_number "1"; FPKM "8.2031591844"; frac "1.000000"; conf_lo "0.000000"; conf_hi "19.804178"; cov "0.524272";
+chr13	Cufflinks	transcript	10086419	10086446	1000	.	.	gene_id "CUFF.50635"; transcript_id "CUFF.50635.1"; FPKM "60.3518139997"; frac "1.000000"; conf_lo "0.000000"; conf_hi "120.703628"; cov "3.857143";
+chr13	Cufflinks	exon	10086419	10086446	1000	.	.	gene_id "CUFF.50635"; transcript_id "CUFF.50635.1"; exon_number "1"; FPKM "60.3518139997"; frac "1.000000"; conf_lo "0.000000"; conf_hi "120.703628"; cov "3.857143";
+chr13	Cufflinks	transcript	10086886	10086930	1000	.	.	gene_id "CUFF.50637"; transcript_id "CUFF.50637.1"; FPKM "18.7761199110"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.329563"; cov "1.200000";
+chr13	Cufflinks	exon	10086886	10086930	1000	.	.	gene_id "CUFF.50637"; transcript_id "CUFF.50637.1"; exon_number "1"; FPKM "18.7761199110"; frac "1.000000"; conf_lo "0.000000"; conf_hi "45.329563"; cov "1.200000";
+chr13	Cufflinks	transcript	10096818	10096844	1000	.	.	gene_id "CUFF.50639"; transcript_id "CUFF.50639.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	10096818	10096844	1000	.	.	gene_id "CUFF.50639"; transcript_id "CUFF.50639.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	10111271	10111358	1000	.	.	gene_id "CUFF.50641"; transcript_id "CUFF.50641.1"; FPKM "9.6014249545"; frac "1.000000"; conf_lo "0.000000"; conf_hi "23.179890"; cov "0.613636";
+chr13	Cufflinks	exon	10111271	10111358	1000	.	.	gene_id "CUFF.50641"; transcript_id "CUFF.50641.1"; exon_number "1"; FPKM "9.6014249545"; frac "1.000000"; conf_lo "0.000000"; conf_hi "23.179890"; cov "0.613636";
+chr13	Cufflinks	transcript	10182192	10182228	1000	.	.	gene_id "CUFF.50643"; transcript_id "CUFF.50643.1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr13	Cufflinks	exon	10182192	10182228	1000	.	.	gene_id "CUFF.50643"; transcript_id "CUFF.50643.1"; exon_number "1"; FPKM "34.2537322701"; frac "1.000000"; conf_lo "0.000000"; conf_hi "73.806535"; cov "2.189189";
+chr13	Cufflinks	transcript	10189009	10189035	1000	.	.	gene_id "CUFF.50645"; transcript_id "CUFF.50645.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	10189009	10189035	1000	.	.	gene_id "CUFF.50645"; transcript_id "CUFF.50645.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	10197772	10197798	1000	.	.	gene_id "CUFF.50647"; transcript_id "CUFF.50647.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	10197772	10197798	1000	.	.	gene_id "CUFF.50647"; transcript_id "CUFF.50647.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	10200086	10200124	1000	.	.	gene_id "CUFF.50649"; transcript_id "CUFF.50649.1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr13	Cufflinks	exon	10200086	10200124	1000	.	.	gene_id "CUFF.50649"; transcript_id "CUFF.50649.1"; exon_number "1"; FPKM "21.6647537435"; frac "1.000000"; conf_lo "0.000000"; conf_hi "52.303342"; cov "1.384615";
+chr13	Cufflinks	transcript	10213412	10213536	1000	.	.	gene_id "CUFF.50651"; transcript_id "CUFF.50651.1"; FPKM "13.5188063359"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.037613"; cov "0.864000";
+chr13	Cufflinks	exon	10213412	10213536	1000	.	.	gene_id "CUFF.50651"; transcript_id "CUFF.50651.1"; exon_number "1"; FPKM "13.5188063359"; frac "1.000000"; conf_lo "0.000000"; conf_hi "27.037613"; cov "0.864000";
+chr13	Cufflinks	transcript	10223893	10223941	1000	.	.	gene_id "CUFF.50653"; transcript_id "CUFF.50653.1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr13	Cufflinks	exon	10223893	10223941	1000	.	.	gene_id "CUFF.50653"; transcript_id "CUFF.50653.1"; exon_number "1"; FPKM "17.2433754285"; frac "1.000000"; conf_lo "0.000000"; conf_hi "41.629191"; cov "1.102041";
+chr13	Cufflinks	transcript	10289392	10289437	1000	.	.	gene_id "CUFF.50655"; transcript_id "CUFF.50655.1"; FPKM "18.3679433912"; frac "1.000000"; conf_lo "0.000000"; conf_hi "44.344138"; cov "1.173913";
+chr13	Cufflinks	exon	10289392	10289437	1000	.	.	gene_id "CUFF.50655"; transcript_id "CUFF.50655.1"; exon_number "1"; FPKM "18.3679433912"; frac "1.000000"; conf_lo "0.000000"; conf_hi "44.344138"; cov "1.173913";
+chr13	Cufflinks	transcript	10326745	10326771	1000	.	.	gene_id "CUFF.50657"; transcript_id "CUFF.50657.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	10326745	10326771	1000	.	.	gene_id "CUFF.50657"; transcript_id "CUFF.50657.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	10346675	10346701	1000	.	.	gene_id "CUFF.50659"; transcript_id "CUFF.50659.1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	exon	10346675	10346701	1000	.	.	gene_id "CUFF.50659"; transcript_id "CUFF.50659.1"; exon_number "1"; FPKM "31.2935331850"; frac "1.000000"; conf_lo "0.000000"; conf_hi "75.549272"; cov "2.000000";
+chr13	Cufflinks	transcript	10337071	10337097	1000	.	.	gene_id "CUFF.50661"; transcript_id "CUFF.50661.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	10337071	10337097	1000	.	.	gene_id "CUFF.50661"; transcript_id "CUFF.50661.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	10337141	10337167	1000	.	.	gene_id "CUFF.50663"; transcript_id "CUFF.50663.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	10337141	10337167	1000	.	.	gene_id "CUFF.50663"; transcript_id "CUFF.50663.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	10344376	10344402	1000	.	.	gene_id "CUFF.50665"; transcript_id "CUFF.50665.1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	exon	10344376	10344402	1000	.	.	gene_id "CUFF.50665"; transcript_id "CUFF.50665.1"; exon_number "1"; FPKM "46.9402997776"; frac "1.000000"; conf_lo "0.000000"; conf_hi "101.142289"; cov "3.000000";
+chr13	Cufflinks	transcript	10344976	10345002	1000	.	.	gene_id "CUFF.50667"; transcript_id "CUFF.50667.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	10344976	10345002	1000	.	.	gene_id "CUFF.50667"; transcript_id "CUFF.50667.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	10345008	10345034	1000	.	.	gene_id "CUFF.50669"; transcript_id "CUFF.50669.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	10345008	10345034	1000	.	.	gene_id "CUFF.50669"; transcript_id "CUFF.50669.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	10345484	10345510	1000	.	.	gene_id "CUFF.50671"; transcript_id "CUFF.50671.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	10345484	10345510	1000	.	.	gene_id "CUFF.50671"; transcript_id "CUFF.50671.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	transcript	10345538	10345564	1000	.	.	gene_id "CUFF.50673"; transcript_id "CUFF.50673.1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
+chr13	Cufflinks	exon	10345538	10345564	1000	.	.	gene_id "CUFF.50673"; transcript_id "CUFF.50673.1"; exon_number "1"; FPKM "15.6467665925"; frac "1.000000"; conf_lo "0.000000"; conf_hi "46.940300"; cov "1.000000";
diff --git a/lib/galaxy/datatypes/test/test.idpDB b/lib/galaxy/datatypes/test/test.idpDB
new file mode 100644
index 0000000..140cb01
Binary files /dev/null and b/lib/galaxy/datatypes/test/test.idpDB differ
diff --git a/lib/galaxy/datatypes/test/test.mz5 b/lib/galaxy/datatypes/test/test.mz5
new file mode 100644
index 0000000..b43eba6
Binary files /dev/null and b/lib/galaxy/datatypes/test/test.mz5 differ
diff --git a/lib/galaxy/datatypes/test/test_ensembl.tab b/lib/galaxy/datatypes/test/test_ensembl.tab
new file mode 100644
index 0000000..3fe4f80
--- /dev/null
+++ b/lib/galaxy/datatypes/test/test_ensembl.tab
@@ -0,0 +1,8 @@
+Ensembl Gene ID	Ensembl Transcript ID	Chicken Ensembl Gene ID	Orthology Type
+ENSOANG00000013952	ENSOANT00000021996	ENSGALG00000012404	ortholog_one2one
+ENSOANG00000013954	ENSOANT00000021998	ENSGALG00000012401	ortholog_one2one
+ENSOANG00000013954	ENSOANT00000021999	ENSGALG00000012401	ortholog_one2one
+ENSOANG00000013954	ENSOANT00000022000	ENSGALG00000012401	ortholog_one2one
+ENSOANG00000013955	ENSOANT00000022001		
+ENSOANG00000013955	ENSOANT00000022002		
+ENSOANG00000013956	ENSOANT00000022005	ENSGALG00000012400	
diff --git a/lib/galaxy/datatypes/test/test_space.txt b/lib/galaxy/datatypes/test/test_space.txt
new file mode 100644
index 0000000..17b4e08
--- /dev/null
+++ b/lib/galaxy/datatypes/test/test_space.txt
@@ -0,0 +1,10 @@
+chr7    115444712    115444739    CCDS5763.1_cds_0_0_chr7_115444713_f    0    +
+chr7    115468538    115468624    CCDS5763.1_cds_1_0_chr7_115468539_f    0    +
+chr7    115483024    115483277    CCDS5763.1_cds_2_0_chr7_115483025_f    0    +
+chr7    115484165    115484501    CCDS5763.1_cds_3_0_chr7_115484166_f    0    +
+chr7    115485764    115485980    CCDS5763.1_cds_4_0_chr7_115485765_f    0    +
+chr7    115486322    115486481    CCDS5763.1_cds_5_0_chr7_115486323_f    0    +
+chr7    115491298    115491487    CCDS5763.1_cds_6_0_chr7_115491299_f    0    +
+chr7    115468538    115468624    CCDS5764.1_cds_0_0_chr7_115468539_f    0    +
+chr7    115483024    115483277    CCDS5764.1_cds_1_0_chr7_115483025_f    0    +
+chr7    115484165    115484501    CCDS5764.1_cds_2_0_chr7_115484166_f    0    +
diff --git a/lib/galaxy/datatypes/test/test_tab.bed b/lib/galaxy/datatypes/test/test_tab.bed
new file mode 100644
index 0000000..77019f8
--- /dev/null
+++ b/lib/galaxy/datatypes/test/test_tab.bed
@@ -0,0 +1,2 @@
+chr22	1000	5000	cloneA	960	+	1000	5000	0	2	567,488,	0,3512
+chr22	2000	6000	cloneB	900	-	2000	6000	0	2	433,399,	0,3601
diff --git a/lib/galaxy/datatypes/test/test_tab1.tabular b/lib/galaxy/datatypes/test/test_tab1.tabular
new file mode 100644
index 0000000..94e4e0f
--- /dev/null
+++ b/lib/galaxy/datatypes/test/test_tab1.tabular
@@ -0,0 +1,32 @@
+ARdb_0345	phenoCommon	PAIS
+ARdb_0345	protEffect	AR:p.Val866Leu
+ARdb_0111	phenoCommon	CAIS
+ARdb_0111	protEffect	AR:p.Trp718Stop
+ARdb_0040	phenoCommon	CAIS
+ARdb_0040	protEffect	AR:p.Cys576Phe
+ARdb_0037	phenoCommon	PAIS
+ARdb_0037	protEffect	AR:p.Gly568Val
+ARdb_0251	phenoCommon	PAIS
+ARdb_0251	protEffect	AR:p.Arg855His
+ARdb_0039	phenoCommon	CAIS
+ARdb_0039	protEffect	AR:p.Cys576Arg
+ARdb_0034	phenoCommon	CAIS
+ARdb_0034	protEffect	AR:p.Cys559Tyr
+ARdb_0213	phenoCommon	CAIS
+ARdb_0213	protEffect	AR:p.Arg831Stop
+ARdb_0240	phenoCommon	CAIS
+ARdb_0240	protEffect	AR:p.Arg855Cys
+ARdb_0258	phenoCommon	CAIS
+ARdb_0258	protEffect	AR:p.Asp864Gly
+ARdb_0154	phenoCommon	CAIS
+ARdb_0154	protEffect	AR:p.Ser759Phe
+ARdb_0071	phenoCommon	PAIS
+ARdb_0071	protEffect	AR:p.Leu616Arg
+ARdb_0337	phenoCommon	PAIS
+ARdb_0337	protEffect	AR:p.Arg840His
+ARdb_0275	phenoCommon	LNCaP mutation
+ARdb_0275	protEffect	AR:p.Thr877Ala
+ARdb_0096	phenoCommon	CAIS
+ARdb_0096	protEffect	AR:p.Asp695His
+ARdb_0097	phenoCommon	CAIS
+ARdb_0097	protEffect	AR:p.Asp695Asn
diff --git a/lib/galaxy/datatypes/test/ucsc.customtrack b/lib/galaxy/datatypes/test/ucsc.customtrack
new file mode 100644
index 0000000..4769cfa
--- /dev/null
+++ b/lib/galaxy/datatypes/test/ucsc.customtrack
@@ -0,0 +1,3 @@
+track name="User Track" description="User Supplied Track (from Galaxy)" color=0,0,0 visibility=1
+chr7	127475281	127491632	NM_000230	0	+	127486022	127488767	0	3	29,172,3225,	0,10713,13126,
+chr7	127486011	127488900	D49487	0	+	127486022	127488767	0	2	155,490,	0,2399,
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/test/wiggle.wig b/lib/galaxy/datatypes/test/wiggle.wig
new file mode 100644
index 0000000..6b79058
--- /dev/null
+++ b/lib/galaxy/datatypes/test/wiggle.wig
@@ -0,0 +1,3 @@
+track type=wiggle_0 name="tb_knownGene" description="table browser query on knownGene" visibility=3 url= 
+chr7	127475281	127491632	NM_000230	0	+	127486022	127488767	0	3	29,172,3225,	0,10713,13126,
+chr7	127486011	127488900	D49487	0	+	127486022	127488767	0	2	155,490,	0,2399,
\ No newline at end of file
diff --git a/lib/galaxy/datatypes/text.py b/lib/galaxy/datatypes/text.py
new file mode 100644
index 0000000..a1dc938
--- /dev/null
+++ b/lib/galaxy/datatypes/text.py
@@ -0,0 +1,544 @@
+# -*- coding: utf-8 -*-
+""" Clearing house for generic text datatypes that are not XML or tabular.
+"""
+
+import gzip
+import json
+import logging
+import os
+import re
+import subprocess
+import tempfile
+
+from galaxy.datatypes.data import get_file_peek, Text
+from galaxy.datatypes.metadata import MetadataElement, MetadataParameter
+from galaxy.datatypes.sniff import get_headers
+from galaxy.util import nice_size, string_as_bool
+
+log = logging.getLogger(__name__)
+
+
+class Html( Text ):
+    """Class describing an html file"""
+    edam_format = "format_2331"
+    file_ext = "html"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "HTML file"
+            dataset.blurb = nice_size( dataset.get_size() )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def get_mime(self):
+        """Returns the mime type of the datatype"""
+        return 'text/html'
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is in html format
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'complete.bed' )
+        >>> Html().sniff( fname )
+        False
+        >>> fname = get_test_fname( 'file.html' )
+        >>> Html().sniff( fname )
+        True
+        """
+        headers = get_headers( filename, None )
+        try:
+            for i, hdr in enumerate(headers):
+                if hdr and hdr[0].lower().find( '<html>' ) >= 0:
+                    return True
+            return False
+        except:
+            return True
+
+
+class Json( Text ):
+    edam_format = "format_3464"
+    file_ext = "json"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = "JavaScript Object Notation (JSON)"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        """
+            Try to load the string with the json module. If successful it's a json file.
+        """
+        return self._looks_like_json( filename )
+
+    def _looks_like_json( self, filename ):
+        # Pattern used by SequenceSplitLocations
+        if os.path.getsize(filename) < 50000:
+            # If the file is small enough - don't guess just check.
+            try:
+                json.load( open(filename, "r") )
+                return True
+            except Exception:
+                return False
+        else:
+            with open(filename, "r") as fh:
+                while True:
+                    # Grab first chunk of file and see if it looks like json.
+                    start = fh.read(100).strip()
+                    if start:
+                        # simple types are valid JSON as well - but would such a file
+                        # be interesting as JSON in Galaxy?
+                        return start.startswith("[") or start.startswith("{")
+            return False
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "JSON file (%s)" % ( nice_size( dataset.get_size() ) )
+
+
+class Ipynb( Json ):
+    file_ext = "ipynb"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = "IPython Notebook"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        """
+            Try to load the string with the json module. If successful it's a json file.
+        """
+        if self._looks_like_json( filename ):
+            try:
+                ipynb = json.load( open(filename) )
+                if ipynb.get('nbformat', False) is not False and ipynb.get('metadata', False):
+                    return True
+                else:
+                    return False
+            except:
+                return False
+
+    def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, **kwd):
+        config = trans.app.config
+        trust = getattr( config, 'trust_ipython_notebook_conversion', False )
+        if trust:
+            return self._display_data_trusted(trans, dataset, preview=preview, filename=filename, to_ext=to_ext, **kwd)
+        else:
+            return super(Ipynb, self).display_data( trans, dataset, preview=preview, filename=filename, to_ext=to_ext, **kwd )
+
+    def _display_data_trusted(self, trans, dataset, preview=False, filename=None, to_ext=None, **kwd):
+        preview = string_as_bool( preview )
+        if to_ext or not preview:
+            return self._serve_raw(trans, dataset, to_ext)
+        else:
+            ofile_handle = tempfile.NamedTemporaryFile(delete=False)
+            ofilename = ofile_handle.name
+            ofile_handle.close()
+            try:
+                cmd = 'ipython nbconvert --to html --template full %s --output %s' % (dataset.file_name, ofilename)
+                log.info("Calling command %s" % cmd)
+                subprocess.call(cmd, shell=True)
+                ofilename = '%s.html' % ofilename
+            except:
+                ofilename = dataset.file_name
+                log.exception( 'Command "%s" failed. Could not convert the IPython Notebook to HTML, defaulting to plain text.' % cmd )
+            return open( ofilename )
+
+    def set_meta( self, dataset, **kwd ):
+        """
+        Set the number of models in dataset.
+        """
+        pass
+
+
+class Biom1( Json ):
+    """
+        BIOM version 1.0 file format description
+        http://biom-format.org/documentation/format_versions/biom-1.0.html
+    """
+    file_ext = "biom1"
+
+    MetadataElement( name="table_rows", default=[], desc="table_rows", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=[] )
+    MetadataElement( name="table_matrix_element_type", default="", desc="table_matrix_element_type", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value="" )
+    MetadataElement( name="table_format", default="", desc="table_format", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value="" )
+    MetadataElement( name="table_generated_by", default="", desc="table_generated_by", param=MetadataParameter, readonly=True, visible=True, optional=True, no_value="" )
+    MetadataElement( name="table_matrix_type", default="", desc="table_matrix_type", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value="" )
+    MetadataElement( name="table_shape", default=[], desc="table_shape", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=[] )
+    MetadataElement( name="table_format_url", default="", desc="table_format_url", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value="" )
+    MetadataElement( name="table_date", default="", desc="table_date", param=MetadataParameter, readonly=True, visible=True, optional=True, no_value="" )
+    MetadataElement( name="table_type", default="", desc="table_type", param=MetadataParameter, readonly=True, visible=True, optional=True, no_value="" )
+    MetadataElement( name="table_id", default=None, desc="table_id", param=MetadataParameter, readonly=True, visible=True, optional=True, no_value=None )
+    MetadataElement( name="table_columns", default=[], desc="table_columns", param=MetadataParameter, readonly=True, visible=False, optional=True, no_value=[] )
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        super( Biom1, self ).set_peek( dataset, is_multi_byte )
+        if not dataset.dataset.purged:
+            dataset.blurb = "Biological Observation Matrix v1"
+
+    def sniff( self, filename ):
+        is_biom = False
+        if self._looks_like_json( filename ):
+            is_biom = self._looks_like_biom( filename )
+        return is_biom
+
+    def _looks_like_biom( self, filepath, load_size=50000 ):
+        """
+        @param filepath: [str] The path to the evaluated file.
+        @param load_size: [int] The size of the file block load in RAM (in
+                          bytes).
+        """
+        is_biom = False
+        segment_size = int( load_size / 2 )
+        try:
+            with open( filepath, "r" ) as fh:
+                prev_str = ""
+                segment_str = fh.read( segment_size )
+                if segment_str.strip().startswith( '{' ):
+                    while segment_str:
+                        current_str = prev_str + segment_str
+                        if '"format"' in current_str:
+                            current_str = re.sub( r'\s', '', current_str )
+                            if '"format":"BiologicalObservationMatrix' in current_str:
+                                is_biom = True
+                                break
+                        prev_str = segment_str
+                        segment_str = fh.read( segment_size )
+        except Exception:
+            pass
+        return is_biom
+
+    def set_meta( self, dataset, **kwd ):
+        """
+            Store metadata information from the BIOM file.
+        """
+        if dataset.has_data():
+            with open( dataset.file_name ) as fh:
+                try:
+                    json_dict = json.load( fh )
+                except Exception:
+                    return
+
+                def _transform_dict_list_ids( dict_list ):
+                    if dict_list:
+                        return [ x.get( 'id', None ) for x in dict_list ]
+                    return []
+
+                b_transform = { 'rows': _transform_dict_list_ids, 'columns': _transform_dict_list_ids }
+                for ( m_name, b_name ) in [ ('table_rows', 'rows'),
+                                            ('table_matrix_element_type', 'matrix_element_type'),
+                                            ('table_format', 'format'),
+                                            ('table_generated_by', 'generated_by'),
+                                            ('table_matrix_type', 'matrix_type'),
+                                            ('table_shape', 'shape'),
+                                            ('table_format_url', 'format_url'),
+                                            ('table_date', 'date'),
+                                            ('table_type', 'type'),
+                                            ('table_id', 'id'),
+                                            ('table_columns', 'columns') ]:
+                    try:
+                        metadata_value = json_dict.get( b_name, None )
+                        if b_name in b_transform:
+                            metadata_value = b_transform[ b_name ]( metadata_value )
+                        setattr( dataset.metadata, m_name, metadata_value )
+                    except Exception:
+                        pass
+
+
+class Obo( Text ):
+    """
+        OBO file format description
+        http://www.geneontology.org/GO.format.obo-1_2.shtml
+    """
+    edam_data = "data_0582"
+    edam_format = "format_2549"
+    file_ext = "obo"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = "Open Biomedical Ontology (OBO)"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        """
+            Try to guess the Obo filetype.
+            It usually starts with a "format-version:" string and has several stanzas which starts with "id:".
+        """
+        stanza = re.compile(r'^\[.*\]$')
+        with open( filename ) as handle:
+            first_line = handle.readline()
+            if not first_line.startswith('format-version:'):
+                return False
+
+            for line in handle:
+                if stanza.match(line.strip()):
+                    # a stanza needs to begin with an ID tag
+                    if handle.next().startswith('id:'):
+                        return True
+        return False
+
+
+class Arff( Text ):
+    """
+        An ARFF (Attribute-Relation File Format) file is an ASCII text file that describes a list of instances sharing a set of attributes.
+        http://weka.wikispaces.com/ARFF
+    """
+    edam_format = "format_3581"
+    file_ext = "arff"
+
+    """Add metadata elements"""
+    MetadataElement( name="comment_lines", default=0, desc="Number of comment lines", readonly=True, optional=True, no_value=0 )
+    MetadataElement( name="columns", default=0, desc="Number of columns", readonly=True, visible=True, no_value=0 )
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = "Attribute-Relation File Format (ARFF)"
+            dataset.blurb += ", %s comments, %s attributes" % ( dataset.metadata.comment_lines, dataset.metadata.columns )
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        """
+            Try to guess the Arff filetype.
+            It usually starts with a "format-version:" string and has several stanzas which starts with "id:".
+        """
+        with open( filename ) as handle:
+            relation_found = False
+            attribute_found = False
+            for line_count, line in enumerate( handle ):
+                if line_count > 1000:
+                    # only investigate the first 1000 lines
+                    return False
+                line = line.strip()
+                if not line:
+                    continue
+
+                start_string = line[:20].upper()
+                if start_string.startswith("@RELATION"):
+                    relation_found = True
+                elif start_string.startswith("@ATTRIBUTE"):
+                    attribute_found = True
+                elif start_string.startswith("@DATA"):
+                    # @DATA should be the last data block
+                    if relation_found and attribute_found:
+                        return True
+        return False
+
+    def set_meta( self, dataset, **kwd ):
+        """
+            Trying to count the comment lines and the number of columns included.
+            A typical ARFF data block looks like this:
+            @DATA
+            5.1,3.5,1.4,0.2,Iris-setosa
+            4.9,3.0,1.4,0.2,Iris-setosa
+        """
+        if dataset.has_data():
+            comment_lines = 0
+            first_real_line = False
+            data_block = False
+            with open( dataset.file_name ) as handle:
+                for line in handle:
+                    line = line.strip()
+                    if not line:
+                        continue
+                    if line.startswith('%') and not first_real_line:
+                        comment_lines += 1
+                    else:
+                        first_real_line = True
+                    if data_block:
+                        if line.startswith('{'):
+                            # Sparse representation
+                            """
+                                @data
+                                0, X, 0, Y, "class A", {5}
+                            or
+                                @data
+                                {1 X, 3 Y, 4 "class A"}, {5}
+                            """
+                            token = line.split('}', 1)
+                            first_part = token[0]
+                            last_column = first_part.split(',')[-1].strip()
+                            numeric_value = last_column.split()[0]
+                            column_count = int(numeric_value)
+                            if len(token) > 1:
+                                # we have an additional weight
+                                column_count -= 1
+                        else:
+                            columns = line.strip().split(',')
+                            column_count = len(columns)
+                            if columns[-1].strip().startswith('{'):
+                                # we have an additional weight at the end
+                                column_count -= 1
+
+                        # We have now the column_count and we know the initial comment lines. So we can terminate here.
+                        break
+                    if line[:5].upper() == "@DATA":
+                        data_block = True
+        dataset.metadata.comment_lines = comment_lines
+        dataset.metadata.columns = column_count
+
+
+class SnpEffDb( Text ):
+    """Class describing a SnpEff genome build"""
+    edam_format = "format_3624"
+    file_ext = "snpeffdb"
+    MetadataElement( name="genome_version", default=None, desc="Genome Version", readonly=True, visible=True, no_value=None )
+    MetadataElement( name="snpeff_version", default="SnpEff4.0", desc="SnpEff Version", readonly=True, visible=True, no_value=None )
+    MetadataElement( name="regulation", default=[], desc="Regulation Names", readonly=True, visible=True, no_value=[], optional=True)
+    MetadataElement( name="annotation", default=[], desc="Annotation Names", readonly=True, visible=True, no_value=[], optional=True)
+
+    def __init__( self, **kwd ):
+        Text.__init__( self, **kwd )
+
+    # The SnpEff version line was added in SnpEff version 4.1
+    def getSnpeffVersionFromFile(self, path):
+        snpeff_version = None
+        try:
+            fh = gzip.open(path, 'rb')
+            buf = fh.read(100)
+            lines = buf.splitlines()
+            m = re.match('^(SnpEff)\s+(\d+\.\d+).*$', lines[0].strip())
+            if m:
+                snpeff_version = m.groups()[0] + m.groups()[1]
+            fh.close()
+        except:
+            pass
+        return snpeff_version
+
+    def set_meta( self, dataset, **kwd ):
+        Text.set_meta(self, dataset, **kwd )
+        data_dir = dataset.extra_files_path
+        # search data_dir/genome_version for files
+        regulation_pattern = 'regulation_(.+).bin'
+        #  annotation files that are included in snpEff by a flag
+        annotations_dict = {'nextProt.bin': '-nextprot', 'motif.bin': '-motif'}
+        regulations = []
+        annotations = []
+        genome_version = None
+        snpeff_version = None
+        if data_dir and os.path.isdir(data_dir):
+            for root, dirs, files in os.walk(data_dir):
+                for fname in files:
+                    if fname.startswith('snpEffectPredictor'):
+                        # if snpEffectPredictor.bin download succeeded
+                        genome_version = os.path.basename(root)
+                        dataset.metadata.genome_version = genome_version
+                        # read the first line of the gzipped snpEffectPredictor.bin file to get the SnpEff version
+                        snpeff_version = self.getSnpeffVersionFromFile(os.path.join(root, fname))
+                        if snpeff_version:
+                            dataset.metadata.snpeff_version = snpeff_version
+                    else:
+                        m = re.match(regulation_pattern, fname)
+                        if m:
+                            name = m.groups()[0]
+                            regulations.append(name)
+                        elif fname in annotations_dict:
+                            value = annotations_dict[fname]
+                            name = value.lstrip('-')
+                            annotations.append(name)
+            dataset.metadata.regulation = regulations
+            dataset.metadata.annotation = annotations
+            try:
+                with open(dataset.file_name, 'w') as fh:
+                    fh.write("%s\n" % genome_version if genome_version else 'Genome unknown')
+                    fh.write("%s\n" % snpeff_version if snpeff_version else 'SnpEff version unknown')
+                    if annotations:
+                        fh.write("annotations: %s\n" % ','.join(annotations))
+                    if regulations:
+                        fh.write("regulations: %s\n" % ','.join(regulations))
+            except:
+                pass
+
+
+class SnpSiftDbNSFP( Text ):
+    """Class describing a dbNSFP database prepared fpr use by SnpSift dbnsfp """
+    MetadataElement( name='reference_name', default='dbSNFP', desc='Reference Name', readonly=True, visible=True, set_in_upload=True, no_value='dbSNFP' )
+    MetadataElement( name="bgzip", default=None, desc="dbNSFP bgzip", readonly=True, visible=True, no_value=None )
+    MetadataElement( name="index", default=None, desc="Tabix Index File", readonly=True, visible=True, no_value=None)
+    MetadataElement( name="annotation", default=[], desc="Annotation Names", readonly=True, visible=True, no_value=[] )
+    file_ext = "snpsiftdbnsfp"
+    composite_type = 'auto_primary_file'
+    allow_datatype_change = False
+    """
+    ## The dbNSFP file is a tabular file with 1 header line
+    ## The first 4 columns are required to be: chrom	pos	ref	alt
+    ## These match columns 1,2,4,5 of the VCF file
+    ## SnpSift requires the file to be block-gzipped and the indexed with samtools tabix
+    ## Example:
+    ## Compress using block-gzip algorithm
+    bgzip dbNSFP2.3.txt
+    ## Create tabix index
+    tabix -s 1 -b 2 -e 2 dbNSFP2.3.txt.gz
+    """
+    def __init__( self, **kwd ):
+        Text.__init__( self, **kwd )
+        self.add_composite_file( '%s.gz', description='dbNSFP bgzip', substitute_name_with_metadata='reference_name', is_binary=True )
+        self.add_composite_file( '%s.gz.tbi', description='Tabix Index File', substitute_name_with_metadata='reference_name', is_binary=True )
+
+    def init_meta( self, dataset, copy_from=None ):
+        Text.init_meta( self, dataset, copy_from=copy_from )
+
+    def generate_primary_file( self, dataset=None ):
+        """
+        This is called only at upload to write the html file
+        cannot rename the datasets here - they come with the default unfortunately
+        """
+        self.regenerate_primary_file( dataset )
+
+    def regenerate_primary_file(self, dataset):
+        """
+        cannot do this until we are setting metadata
+        """
+        annotations = "dbNSFP Annotations: %s\n" % ','.join(dataset.metadata.annotation)
+        f = open(dataset.file_name, 'a')
+        if dataset.metadata.bgzip:
+            bn = dataset.metadata.bgzip
+            f.write(bn)
+            f.write('\n')
+        f.write(annotations)
+        f.close()
+
+    def set_meta( self, dataset, overwrite=True, **kwd ):
+        try:
+            efp = dataset.extra_files_path
+            if os.path.exists(efp):
+                flist = os.listdir(efp)
+                for i, fname in enumerate(flist):
+                    if fname.endswith('.gz'):
+                        dataset.metadata.bgzip = fname
+                        try:
+                            fh = gzip.open(os.path.join(efp, fname), 'r')
+                            buf = fh.read(5000)
+                            lines = buf.splitlines()
+                            headers = lines[0].split('\t')
+                            dataset.metadata.annotation = headers[4:]
+                        except Exception as e:
+                            log.warning("set_meta fname: %s  %s" % (fname, str(e)))
+                        finally:
+                            fh.close()
+                    if fname.endswith('.tbi'):
+                        dataset.metadata.index = fname
+            self.regenerate_primary_file(dataset)
+        except Exception as e:
+            log.warning("set_meta fname: %s  %s" % (dataset.file_name if dataset and dataset.file_name else 'Unkwown', str(e)))
+
+        def set_peek( self, dataset, is_multi_byte=False ):
+            if not dataset.dataset.purged:
+                dataset.peek = '%s :  %s' % (dataset.metadata.reference_name, ','.join(dataset.metadata.annotation))
+                dataset.blurb = '%s' % dataset.metadata.reference_name
+            else:
+                dataset.peek = 'file does not exist'
+                dataset.blurb = 'file purged from disc'
diff --git a/lib/galaxy/datatypes/tracks.py b/lib/galaxy/datatypes/tracks.py
new file mode 100644
index 0000000..c14bf44
--- /dev/null
+++ b/lib/galaxy/datatypes/tracks.py
@@ -0,0 +1,85 @@
+"""
+Datatype classes for tracks/track views within galaxy.
+"""
+import logging
+
+from galaxy.datatypes.text import Html
+
+from . import binary
+
+log = logging.getLogger(__name__)
+
+
+# GeneTrack is no longer supported but leaving the datatype since
+# files of this type may still exist
+class GeneTrack( binary.Binary ):
+    edam_data = "data_3002"
+    edam_format = "format_2919"
+    file_ext = "genetrack"
+
+    def __init__(self, **kwargs):
+        super( GeneTrack, self ).__init__( **kwargs )
+        # self.add_display_app( 'genetrack', 'View in', '', 'genetrack_link' )
+    # def get_display_links( self, dataset, type, app, base_url, target_frame='galaxy_main', **kwd ): #Force target_frame to be 'galaxy_main'
+    #     return binary.Binary.get_display_links( self, dataset, type, app, base_url, target_frame=target_frame, **kwd )
+    # def genetrack_link( self, hda, type, app, base_url ):
+    #     ret_val = []
+    #     if hda.dataset.has_data():
+    # Get the disk file name and data id
+    #         file_name = hda.dataset.get_file_name()
+    #         data_id  = quote_plus( str( hda.id ) )
+    #         galaxy_url = quote_plus( "%s%s" % ( base_url, url_for( controller = 'tool_runner', tool_id='predict2genetrack' ) ) )
+    # Make it secure
+    #         hashkey = quote_plus( hmac_new( app.config.tool_secret, file_name ) )
+    #         encoded = quote_plus( binascii.hexlify( file_name ) )
+    #         for name, url in util.get_genetrack_sites():
+    #             if name.lower() in app.config.genetrack_display_sites:
+    # send both  parameters filename and hashkey
+    #                 link = "%s?filename=%s&hashkey=%s&input=%s&GALAXY_URL=%s" % ( url, encoded, hashkey, data_id, galaxy_url )
+    #                 ret_val.append( ( name, link ) )
+    #         return ret_val
+
+
+class UCSCTrackHub( Html ):
+    """
+    Datatype for UCSC TrackHub
+    """
+
+    file_ext = 'trackhub'
+    composite_type = 'auto_primary_file'
+
+    def __init__(self, **kwd):
+        Html.__init__(self, **kwd)
+
+    def generate_primary_file( self, dataset=None ):
+        """
+        This is called only at upload to write the html file
+        cannot rename the datasets here - they come with the default unfortunately
+        """
+        rval = [
+            '<html><head><title>Files for Composite Dataset (%s)</title></head><p/>\
+            This composite dataset is composed of the following files:<p/><ul>' % (
+                self.file_ext)]
+        for composite_name, composite_file in self.get_composite_files( dataset=dataset ).items():
+            opt_text = ''
+            if composite_file.optional:
+                opt_text = ' (optional)'
+            rval.append('<li><a href="%s">%s</a>%s' % ( composite_name, composite_name, opt_text) )
+        rval.append('</ul></html>')
+        return "\n".join(rval)
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = "Track Hub structure: Visualization in UCSC Track Hub"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def display_peek( self, dataset ):
+        try:
+            return dataset.peek
+        except:
+            return "Track Hub structure: Visualization in UCSC Track Hub"
+
+    def sniff( self, filename ):
+        return False
diff --git a/lib/galaxy/datatypes/triples.py b/lib/galaxy/datatypes/triples.py
new file mode 100644
index 0000000..af82f3a
--- /dev/null
+++ b/lib/galaxy/datatypes/triples.py
@@ -0,0 +1,188 @@
+"""
+Triple format classes
+"""
+import re
+import data
+import logging
+import xml
+import text
+import binary
+
+log = logging.getLogger(__name__)
+
+
+class Triples( data.Data ):
+    """
+    The abstract base class for the file format that can contain triples
+    """
+    edam_data = "data_0582"
+    edam_format = "format_2376"
+    file_ext = "triples"
+
+    def sniff( self, filename ):
+        """
+        Returns false and the user must manually set.
+        """
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'Triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class NTriples( data.Text, Triples ):
+    """
+    The N-Triples triple data format
+    """
+    edam_format = "format_3256"
+    file_ext = "nt"
+
+    def sniff( self, filename ):
+        with open(filename, "r") as f:
+            # <http://example.org/dir/relfile> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://example.org/type> .
+            if re.compile( r'<[^>]*>\s<[^>]*>\s<[^>]*>\s\.' ).search( f.readline( 1024 ) ):
+                return True
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'N-Triples triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class N3( data.Text, Triples ):
+    """
+    The N3 triple data format
+    """
+    edam_format = "format_3257"
+    file_ext = "n3"
+
+    def sniff( self, filename ):
+        """
+        Returns false and the user must manually set.
+        """
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'Notation-3 Triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class Turtle( data.Text, Triples ):
+    """
+    The Turtle triple data format
+    """
+    edam_format = "format_3255"
+    file_ext = "ttl"
+
+    def sniff( self, filename ):
+        with open(filename, "r") as f:
+            # @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+            line = f.readline( 1024 )
+            if re.compile( r'@prefix\s+[^:]*:\s+<[^>]*>\s\.' ).search( line ):
+                return True
+            if re.compile( r'@base\s+<[^>]*>\s\.' ).search( line ):
+                return True
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'Turtle triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+# TODO: we might want to look at rdflib or a similar, larger lib/egg
+class Rdf( xml.GenericXml, Triples ):
+    """
+    Resource Description Framework format (http://www.w3.org/RDF/).
+    """
+    edam_format = "format_3261"
+    file_ext = "rdf"
+
+    def sniff( self, filename ):
+        with open(filename, "r") as f:
+            firstlines = "".join( f.readlines( 5000 ) )
+            # <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" ...
+            match = re.compile( r'xmlns:([^=]*)="http://www.w3.org/1999/02/22-rdf-syntax-ns#"' ).search( firstlines )
+            if not match and (match.group(1) + ":RDF") in firstlines:
+                return True
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'RDF/XML triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class Jsonld( text.Json, Triples ):
+    """
+    The JSON-LD data format
+    """
+    # format not defined in edam so we use the json format number
+    edam_format = "format_3464"
+    file_ext = "jsonld"
+
+    def sniff( self, filename ):
+        if self._looks_like_json( filename ):
+            with open(filename, "r") as f:
+                firstlines = "".join( f.readlines( 5000 ) )
+                if "\"@id\"" in firstlines or "\"@context\"" in firstlines:
+                    return True
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'JSON-LD triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+class HDT( binary.Binary, Triples ):
+    """
+    The HDT triple data format
+    """
+    edam_format = "format_2376"
+    file_ext = "hdt"
+
+    def sniff( self, filename ):
+        with open(filename, "rb") as f:
+            if f.read(4) == "$HDT":
+                return True
+        return False
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'HDT triple data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+
+binary.Binary.register_sniffable_binary_format('HDT', 'HDT', HDT)
diff --git a/lib/galaxy/datatypes/util/__init__.py b/lib/galaxy/datatypes/util/__init__.py
new file mode 100644
index 0000000..e498fa0
--- /dev/null
+++ b/lib/galaxy/datatypes/util/__init__.py
@@ -0,0 +1,3 @@
+"""
+Utilities for Galaxy datatypes.
+"""
diff --git a/lib/galaxy/datatypes/util/generic_util.py b/lib/galaxy/datatypes/util/generic_util.py
new file mode 100644
index 0000000..86f1947
--- /dev/null
+++ b/lib/galaxy/datatypes/util/generic_util.py
@@ -0,0 +1,19 @@
+import subprocess
+
+
+def count_special_lines( word, filename, invert=False ):
+    """
+        searching for special 'words' using the grep tool
+        grep is used to speed up the searching and counting
+        The number of hits is returned.
+    """
+    try:
+        cmd = ["grep", "-c", "-E"]
+        if invert:
+            cmd.append('-v')
+        cmd.extend([word, filename])
+        out = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+        return int(out.communicate()[0].split()[0])
+    except:
+        pass
+    return 0
diff --git a/lib/galaxy/datatypes/util/gff_util.py b/lib/galaxy/datatypes/util/gff_util.py
new file mode 100644
index 0000000..2a445e0
--- /dev/null
+++ b/lib/galaxy/datatypes/util/gff_util.py
@@ -0,0 +1,437 @@
+"""
+Provides utilities for working with GFF files.
+"""
+import copy
+
+from bx.intervals.io import GenomicInterval, MissingFieldError, NiceReaderWrapper, ParseError, GenomicIntervalReader
+from bx.tabular.io import Header, Comment
+
+from galaxy.util.odict import odict
+
+
+class GFFInterval( GenomicInterval ):
+    """
+    A GFF interval, including attributes. If file is strictly a GFF file,
+    only attribute is 'group.'
+    """
+    def __init__( self, reader, fields, chrom_col=0, feature_col=2, start_col=3, end_col=4,
+                  strand_col=6, score_col=5, default_strand='.', fix_strand=False ):
+        # HACK: GFF format allows '.' for strand but GenomicInterval does not. To get around this,
+        # temporarily set strand and then unset after initing GenomicInterval.
+        unknown_strand = False
+        if not fix_strand and fields[ strand_col ] == '.':
+            unknown_strand = True
+            fields[ strand_col ] = '+'
+        GenomicInterval.__init__( self, reader, fields, chrom_col, start_col, end_col, strand_col,
+                                  default_strand, fix_strand=fix_strand )
+        if unknown_strand:
+            self.strand = '.'
+            self.fields[ strand_col ] = '.'
+
+        # Handle feature, score column.
+        self.feature_col = feature_col
+        if self.feature_col >= self.nfields:
+            raise MissingFieldError( "No field for feature_col (%d)" % feature_col )
+        self.feature = self.fields[ self.feature_col ]
+        self.score_col = score_col
+        if self.score_col >= self.nfields:
+            raise MissingFieldError( "No field for score_col (%d)" % score_col )
+        self.score = self.fields[ self.score_col ]
+
+        # GFF attributes.
+        self.attributes = parse_gff_attributes( fields[8] )
+
+    def copy( self ):
+        return GFFInterval(self.reader, list( self.fields ), self.chrom_col, self.feature_col, self.start_col,
+                           self.end_col, self.strand_col, self.score_col, self.strand)
+
+
+class GFFFeature( GFFInterval ):
+    """
+    A GFF feature, which can include multiple intervals.
+    """
+    def __init__( self, reader, chrom_col=0, feature_col=2, start_col=3, end_col=4,
+                  strand_col=6, score_col=5, default_strand='.', fix_strand=False, intervals=[],
+                  raw_size=0 ):
+        # Use copy so that first interval and feature do not share fields.
+        GFFInterval.__init__( self, reader, copy.deepcopy( intervals[0].fields ), chrom_col, feature_col,
+                              start_col, end_col, strand_col, score_col, default_strand,
+                              fix_strand=fix_strand )
+        self.intervals = intervals
+        self.raw_size = raw_size
+        # Use intervals to set feature attributes.
+        for interval in self.intervals:
+            # Error checking. NOTE: intervals need not share the same strand.
+            if interval.chrom != self.chrom:
+                raise ValueError( "interval chrom does not match self chrom: %s != %s" %
+                                  ( interval.chrom, self.chrom ) )
+            # Set start, end of interval.
+            if interval.start < self.start:
+                self.start = interval.start
+            if interval.end > self.end:
+                self.end = interval.end
+
+    def name( self ):
+        """ Returns feature's name. """
+        name = None
+        # Preference for name: GTF, GFF3, GFF.
+        for attr_name in [
+                # GTF:
+                'gene_id', 'transcript_id',
+                # GFF3:
+                'ID', 'id',
+                # GFF (TODO):
+                'group' ]:
+            name = self.attributes.get( attr_name, None )
+            if name is not None:
+                break
+        return name
+
+    def copy( self ):
+        intervals_copy = []
+        for interval in self.intervals:
+            intervals_copy.append( interval.copy() )
+        return GFFFeature(self.reader, self.chrom_col, self.feature_col, self.start_col, self.end_col, self.strand_col,
+                          self.score_col, self.strand, intervals=intervals_copy )
+
+    def lines( self ):
+        lines = []
+        for interval in self.intervals:
+            lines.append( '\t'.join( interval.fields ) )
+        return lines
+
+
+class GFFIntervalToBEDReaderWrapper( NiceReaderWrapper ):
+    """
+    Reader wrapper that reads GFF intervals/lines and automatically converts
+    them to BED format.
+    """
+
+    def parse_row( self, line ):
+        # HACK: this should return a GFF interval, but bx-python operations
+        # require GenomicInterval objects and subclasses will not work.
+        interval = GenomicInterval( self, line.split( "\t" ), self.chrom_col, self.start_col,
+                                    self.end_col, self.strand_col, self.default_strand,
+                                    fix_strand=self.fix_strand )
+        interval = convert_gff_coords_to_bed( interval )
+        return interval
+
+
+class GFFReaderWrapper( NiceReaderWrapper ):
+    """
+    Reader wrapper for GFF files.
+
+    Wrapper has two major functions:
+
+    1. group entries for GFF file (via group column), GFF3 (via id attribute),
+       or GTF (via gene_id/transcript id);
+    2. convert coordinates from GFF format--starting and ending coordinates
+       are 1-based, closed--to the 'traditional'/BED interval format--0 based,
+       half-open. This is useful when using GFF files as inputs to tools that
+       expect traditional interval format.
+    """
+
+    def __init__( self, reader, chrom_col=0, feature_col=2, start_col=3,
+                  end_col=4, strand_col=6, score_col=5, fix_strand=False, convert_to_bed_coord=False, **kwargs ):
+        NiceReaderWrapper.__init__( self, reader, chrom_col=chrom_col, start_col=start_col, end_col=end_col,
+                                    strand_col=strand_col, fix_strand=fix_strand, **kwargs )
+        self.feature_col = feature_col
+        self.score_col = score_col
+        self.convert_to_bed_coord = convert_to_bed_coord
+        self.last_line = None
+        self.cur_offset = 0
+        self.seed_interval = None
+        self.seed_interval_line_len = 0
+
+    def parse_row( self, line ):
+        interval = GFFInterval( self, line.split( "\t" ), self.chrom_col, self.feature_col,
+                                self.start_col, self.end_col, self.strand_col, self.score_col,
+                                self.default_strand, fix_strand=self.fix_strand )
+        return interval
+
+    # For Python3 this needs to be changed to __next__() after bx-python library is ported too
+    def next( self ):
+        """ Returns next GFFFeature. """
+
+        #
+        # Helper function.
+        #
+
+        def handle_parse_error( parse_error ):
+            """ Actions to take when ParseError found. """
+            if self.outstream:
+                if self.print_delegate and hasattr(self.print_delegate, "__call__"):
+                    self.print_delegate( self.outstream, e, self )
+            self.skipped += 1
+            # no reason to stuff an entire bad file into memmory
+            if self.skipped < 10:
+                self.skipped_lines.append( ( self.linenum, self.current_line, str( e ) ) )
+
+            # For debugging, uncomment this to propogate parsing exceptions up.
+            # I.e. the underlying reason for an unexpected StopIteration exception
+            # can be found by uncommenting this.
+            # raise e
+
+        #
+        # Get next GFFFeature
+        #
+        raw_size = self.seed_interval_line_len
+
+        # If there is no seed interval, set one. Also, if there are no more
+        # intervals to read, this is where iterator dies.
+        if not self.seed_interval:
+            while not self.seed_interval:
+                try:
+                    self.seed_interval = GenomicIntervalReader.next( self )
+                except ParseError as e:
+                    handle_parse_error( e )
+                # TODO: When no longer supporting python 2.4 use finally:
+                # finally:
+                raw_size += len( self.current_line )
+
+        # If header or comment, clear seed interval and return it with its size.
+        if isinstance( self.seed_interval, ( Header, Comment ) ):
+            return_val = self.seed_interval
+            return_val.raw_size = len( self.current_line )
+            self.seed_interval = None
+            self.seed_interval_line_len = 0
+            return return_val
+
+        # Initialize feature identifier from seed.
+        feature_group = self.seed_interval.attributes.get( 'group', None )  # For GFF
+        # For GFF3
+        feature_id = self.seed_interval.attributes.get( 'ID', None )
+        # For GTF.
+        feature_transcript_id = self.seed_interval.attributes.get( 'transcript_id', None )
+
+        # Read all intervals associated with seed.
+        feature_intervals = []
+        feature_intervals.append( self.seed_interval )
+        while True:
+            try:
+                interval = GenomicIntervalReader.next( self )
+                raw_size += len( self.current_line )
+            except StopIteration as e:
+                # No more intervals to read, but last feature needs to be
+                # returned.
+                interval = None
+                raw_size += len( self.current_line )
+                break
+            except ParseError as e:
+                handle_parse_error( e )
+                raw_size += len( self.current_line )
+                continue
+            # TODO: When no longer supporting python 2.4 use finally:
+            # finally:
+            # raw_size += len( self.current_line )
+
+            # Ignore comments.
+            if isinstance( interval, Comment ):
+                continue
+
+            # Determine if interval is part of feature.
+            part_of = False
+            group = interval.attributes.get( 'group', None )
+            # GFF test:
+            if group and feature_group == group:
+                part_of = True
+            # GFF3 test:
+            parent_id = interval.attributes.get( 'Parent', None )
+            cur_id = interval.attributes.get( 'ID', None )
+            if ( cur_id and cur_id == feature_id ) or ( parent_id and parent_id == feature_id ):
+                part_of = True
+            # GTF test:
+            transcript_id = interval.attributes.get( 'transcript_id', None )
+            if transcript_id and transcript_id == feature_transcript_id:
+                part_of = True
+
+            # If interval is not part of feature, clean up and break.
+            if not part_of:
+                # Adjust raw size because current line is not part of feature.
+                raw_size -= len( self.current_line )
+                break
+
+            # Interval associated with feature.
+            feature_intervals.append( interval )
+
+        # Last interval read is the seed for the next interval.
+        self.seed_interval = interval
+        self.seed_interval_line_len = len( self.current_line )
+
+        # Return feature.
+        feature = GFFFeature( self, self.chrom_col, self.feature_col, self.start_col,
+                              self.end_col, self.strand_col, self.score_col,
+                              self.default_strand, fix_strand=self.fix_strand,
+                              intervals=feature_intervals, raw_size=raw_size )
+
+        # Convert to BED coords?
+        if self.convert_to_bed_coord:
+            convert_gff_coords_to_bed( feature )
+
+        return feature
+
+
+def convert_bed_coords_to_gff( interval ):
+    """
+    Converts an interval object's coordinates from BED format to GFF format.
+    Accepted object types include GenomicInterval and list (where the first
+    element in the list is the interval's start, and the second element is
+    the interval's end).
+    """
+    if isinstance( interval, GenomicInterval ):
+        interval.start += 1
+        if isinstance( interval, GFFFeature ):
+            for subinterval in interval.intervals:
+                convert_bed_coords_to_gff( subinterval )
+    elif isinstance(interval, list):
+        interval[ 0 ] += 1
+    return interval
+
+
+def convert_gff_coords_to_bed( interval ):
+    """
+    Converts an interval object's coordinates from GFF format to BED format.
+    Accepted object types include GFFFeature, GenomicInterval, and list (where
+    the first element in the list is the interval's start, and the second
+    element is the interval's end).
+    """
+    if isinstance( interval, GenomicInterval ):
+        interval.start -= 1
+        if isinstance( interval, GFFFeature ):
+            for subinterval in interval.intervals:
+                convert_gff_coords_to_bed( subinterval )
+    elif isinstance(interval, list):
+        interval[ 0 ] -= 1
+    return interval
+
+
+def parse_gff_attributes( attr_str ):
+    """
+    Parses a GFF/GTF attribute string and returns a dictionary of name-value
+    pairs. The general format for a GFF3 attributes string is
+
+        name1=value1;name2=value2
+
+    The general format for a GTF attribute string is
+
+        name1 "value1" ; name2 "value2"
+
+    The general format for a GFF attribute string is a single string that
+    denotes the interval's group; in this case, method returns a dictionary
+    with a single key-value pair, and key name is 'group'
+    """
+    attributes_list = attr_str.split(";")
+    attributes = {}
+    for name_value_pair in attributes_list:
+        # Try splitting by '=' (GFF3) first because spaces are allowed in GFF3
+        # attribute; next, try double quotes for GTF.
+        pair = name_value_pair.strip().split("=")
+        if len( pair ) == 1:
+            pair = name_value_pair.strip().split("\"")
+        if len( pair ) == 1:
+            # Could not split for some reason -- raise exception?
+            continue
+        if pair == '':
+            continue
+        name = pair[0].strip()
+        if name == '':
+            continue
+        # Need to strip double quote from values
+        value = pair[1].strip(" \"")
+        attributes[ name ] = value
+
+    if len( attributes ) == 0:
+        # Could not split attributes string, so entire string must be
+        # 'group' attribute. This is the case for strictly GFF files.
+        attributes['group'] = attr_str
+    return attributes
+
+
+def gff_attributes_to_str( attrs, gff_format ):
+    """
+    Convert GFF attributes to string. Supported formats are GFF3, GTF.
+    """
+    if gff_format == 'GTF':
+        format_string = '%s "%s"'
+        # Convert group (GFF) and ID, parent (GFF3) attributes to transcript_id, gene_id
+        id_attr = None
+        if 'group' in attrs:
+            id_attr = 'group'
+        elif 'ID' in attrs:
+            id_attr = 'ID'
+        elif 'Parent' in attrs:
+            id_attr = 'Parent'
+        if id_attr:
+            attrs['transcript_id'] = attrs['gene_id'] = attrs[id_attr]
+    elif gff_format == 'GFF3':
+        format_string = '%s=%s'
+    attrs_strs = []
+    for name, value in attrs.items():
+        attrs_strs.append( format_string % ( name, value ) )
+    return " ; ".join( attrs_strs )
+
+
+def read_unordered_gtf( iterator, strict=False ):
+    """
+    Returns GTF features found in an iterator. GTF lines need not be ordered
+    or clustered for reader to work. Reader returns GFFFeature objects sorted
+    by transcript_id, chrom, and start position.
+    """
+
+    # -- Get function that generates line/feature key. --
+    def get_transcript_id(fields):
+        return parse_gff_attributes( fields[8] )[ 'transcript_id' ]
+    if strict:
+        # Strict GTF parsing uses transcript_id only to group lines into feature.
+        key_fn = get_transcript_id
+    else:
+        # Use lenient parsing where chromosome + transcript_id is the key. This allows
+        # transcripts with same ID on different chromosomes; this occurs in some popular
+        # datasources, such as RefGenes in UCSC.
+        def key_fn(fields):
+            return fields[0] + '_' + get_transcript_id( fields )
+
+    # Aggregate intervals by transcript_id and collect comments.
+    feature_intervals = odict()
+    comments = []
+    for count, line in enumerate( iterator ):
+        if line.startswith( '#' ):
+            comments.append( Comment( line ) )
+            continue
+
+        line_key = key_fn( line.split('\t') )
+        if line_key in feature_intervals:
+            feature = feature_intervals[ line_key ]
+        else:
+            feature = []
+            feature_intervals[ line_key ] = feature
+        feature.append( GFFInterval( None, line.split( '\t' ) ) )
+
+    # Create features.
+    chroms_features = {}
+    for count, intervals in enumerate( feature_intervals.values() ):
+        # Sort intervals by start position.
+        intervals.sort( lambda a, b: cmp( a.start, b.start ) )
+        feature = GFFFeature( None, intervals=intervals )
+        if feature.chrom not in chroms_features:
+            chroms_features[ feature.chrom ] = []
+        chroms_features[ feature.chrom ].append( feature )
+
+    # Sort features by chrom, start position.
+    chroms_features_sorted = []
+    for chrom_features in chroms_features.values():
+        chroms_features_sorted.append( chrom_features )
+    chroms_features_sorted.sort( lambda a, b: cmp( a[0].chrom, b[0].chrom ) )
+    for features in chroms_features_sorted:
+        features.sort( lambda a, b: cmp( a.start, b.start ) )
+
+    # Yield comments first, then features.
+    # FIXME: comments can appear anywhere in file, not just the beginning.
+    # Ideally, then comments would be associated with features and output
+    # just before feature/line.
+    for comment in comments:
+        yield comment
+
+    for chrom_features in chroms_features_sorted:
+        for feature in chrom_features:
+            yield feature
diff --git a/lib/galaxy/datatypes/xml.py b/lib/galaxy/datatypes/xml.py
new file mode 100644
index 0000000..ce26b0c
--- /dev/null
+++ b/lib/galaxy/datatypes/xml.py
@@ -0,0 +1,156 @@
+"""
+XML format classes
+"""
+import re
+import data
+import logging
+import dataproviders
+
+log = logging.getLogger(__name__)
+
+
+ at dataproviders.decorators.has_dataproviders
+class GenericXml( data.Text ):
+    """Base format class for any XML file."""
+    edam_format = "format_2332"
+    file_ext = "xml"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'XML data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        """
+        Determines whether the file is XML or not
+
+        >>> from galaxy.datatypes.sniff import get_test_fname
+        >>> fname = get_test_fname( 'megablast_xml_parser_test1.blastxml' )
+        >>> GenericXml().sniff( fname )
+        True
+        >>> fname = get_test_fname( 'interval.interval' )
+        >>> GenericXml().sniff( fname )
+        False
+        """
+        # TODO - Use a context manager on Python 2.5+ to close handle
+        handle = open(filename)
+        line = handle.readline()
+        handle.close()
+
+        # TODO - Is there a more robust way to do this?
+        return line.startswith('<?xml ')
+
+    def merge(split_files, output_file):
+        """Merging multiple XML files is non-trivial and must be done in subclasses."""
+        if len(split_files) > 1:
+            raise NotImplementedError("Merging multiple XML files is non-trivial and must be implemented for each XML type")
+        # For one file only, use base class method (move/copy)
+        data.Text.merge(split_files, output_file)
+    merge = staticmethod(merge)
+
+    @dataproviders.decorators.dataprovider_factory( 'xml', dataproviders.hierarchy.XMLDataProvider.settings )
+    def xml_dataprovider( self, dataset, **settings ):
+        dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
+        return dataproviders.hierarchy.XMLDataProvider( dataset_source, **settings )
+
+
+class MEMEXml( GenericXml ):
+    """MEME XML Output data"""
+    file_ext = "memexml"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'MEME XML data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        return False
+
+
+class CisML( GenericXml ):
+    """CisML XML data"""  # see: http://www.ncbi.nlm.nih.gov/pubmed/15001475
+    file_ext = "cisml"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'CisML data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        return False
+
+
+class Phyloxml( GenericXml ):
+    """Format for defining phyloxml data http://www.phyloxml.org/"""
+    edam_data = "data_0872"
+    edam_format = "format_3159"
+    file_ext = "phyloxml"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        """Set the peek and blurb text"""
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = 'Phyloxml data'
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disk'
+
+    def sniff( self, filename ):
+        """"Checking for keyword - 'phyloxml' always in lowercase in the first few lines"""
+
+        f = open( filename, "r" )
+        firstlines = "".join( f.readlines(5) )
+        f.close()
+
+        if "phyloxml" in firstlines:
+            return True
+        return False
+
+    def get_visualizations( self, dataset ):
+        """
+        Returns a list of visualizations for datatype.
+        """
+
+        return [ 'phyloviz' ]
+
+
+class Owl( GenericXml ):
+    """
+        Web Ontology Language OWL format description
+        http://www.w3.org/TR/owl-ref/
+    """
+    edam_format = "format_3262"
+    file_ext = "owl"
+
+    def set_peek( self, dataset, is_multi_byte=False ):
+        if not dataset.dataset.purged:
+            dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
+            dataset.blurb = "Web Ontology Language OWL"
+        else:
+            dataset.peek = 'file does not exist'
+            dataset.blurb = 'file purged from disc'
+
+    def sniff( self, filename ):
+        """
+            Checking for keyword - '<owl' in the first 200 lines.
+        """
+        owl_marker = re.compile(r'\<owl:')
+        with open( filename ) as handle:
+            # Check first 200 lines for the string "<owl:"
+            first_lines = handle.readlines(200)
+            for line in first_lines:
+                if owl_marker.search( line ):
+                    return True
+        return False
diff --git a/lib/galaxy/dependencies/__init__.py b/lib/galaxy/dependencies/__init__.py
new file mode 100644
index 0000000..66b7653
--- /dev/null
+++ b/lib/galaxy/dependencies/__init__.py
@@ -0,0 +1,116 @@
+"""
+Determine what optional dependencies are needed.
+"""
+import pkg_resources
+
+from os.path import dirname, join
+from xml.etree import ElementTree
+
+from galaxy.util import asbool
+from galaxy.util.properties import load_app_properties
+
+
+class ConditionalDependencies( object ):
+    def __init__( self, config_file ):
+        self.config_file = config_file
+        self.config = None
+        self.job_runners = []
+        self.authenticators = []
+        self.object_stores = []
+        self.conditional_reqs = []
+        self.parse_configs()
+        self.get_conditional_requirements()
+
+    def parse_configs( self ):
+        self.config = load_app_properties( ini_file=self.config_file )
+        job_conf_xml = self.config.get(
+            "job_config_file",
+            join( dirname( self.config_file ), 'job_conf.xml' ) )
+        try:
+            for plugin in ElementTree.parse( job_conf_xml ).find( 'plugins' ).findall( 'plugin' ):
+                if 'load' in plugin.attrib:
+                    self.job_runners.append( plugin.attrib['load'] )
+        except (OSError, IOError):
+            pass
+        object_store_conf_xml = self.config.get(
+            "object_store_config_file",
+            join( dirname( self.config_file ), 'object_store_conf.xml' ) )
+        try:
+            for store in ElementTree.parse( object_store_conf_xml ).iter( 'object_store' ):
+                if 'type' in store.attrib:
+                    self.object_stores.append( store.attrib['type'] )
+        except (OSError, IOError):
+            pass
+
+        # Parse auth conf
+        auth_conf_xml = self.config.get(
+            "auth_config_file",
+            join( dirname( self.config_file ), 'auth_conf.xml' ) )
+        try:
+            for auth in ElementTree.parse( auth_conf_xml ).findall( 'authenticator' ):
+                auth_type = auth.find('type')
+                if auth_type is not None:
+                    self.authenticators.append( auth_type.text )
+        except (OSError, IOError):
+            pass
+
+    def get_conditional_requirements( self ):
+        crfile = join( dirname( __file__ ), 'conditional-requirements.txt' )
+        for req in pkg_resources.parse_requirements( open( crfile ).readlines() ):
+            self.conditional_reqs.append( req )
+
+    def check( self, name ):
+        try:
+            name = name.replace('-', '_').replace('.', '_')
+            return getattr( self, 'check_' + name )()
+        except:
+            return False
+
+    def check_psycopg2( self ):
+        return self.config["database_connection"].startswith( "postgres" )
+
+    def check_mysql_python( self ):
+        return self.config["database_connection"].startswith( "mysql" )
+
+    def check_drmaa( self ):
+        return ("galaxy.jobs.runners.drmaa:DRMAAJobRunner" in self.job_runners or
+                "galaxy.jobs.runners.slurm:SlurmJobRunner" in self.job_runners)
+
+    def check_pbs_python( self ):
+        return "galaxy.jobs.runners.pbs:PBSJobRunner" in self.job_runners
+
+    def check_python_openid( self ):
+        return asbool( self.config["enable_openid"] )
+
+    def check_fluent_logger( self ):
+        return asbool( self.config["fluent_log"] )
+
+    def check_raven( self ):
+        return self.config.get("sentry_dsn", None) is not None
+
+    def check_statsd( self ):
+        return self.config.get("statsd_host", None) is not None
+
+    def check_weberror( self ):
+        return ( asbool( self.config["debug"] ) and
+                 asbool( self.config["use_interactive"] ) )
+
+    def check_pygments( self ):
+        # pygments is a dependency of weberror and only weberror
+        return self.check_weberror()
+
+    def check_python_ldap( self ):
+        return ('ldap' in self.authenticators or
+                'activedirectory' in self.authenticators)
+
+    def check_azure_storage( self ):
+        return 'azure_blob' in self.object_stores
+
+
+def optional( config_file ):
+    rval = []
+    conditional = ConditionalDependencies( config_file )
+    for opt in conditional.conditional_reqs:
+        if conditional.check( opt.key ):
+            rval.append( str( opt ) )
+    return rval
diff --git a/lib/galaxy/dependencies/conda-environment.txt b/lib/galaxy/dependencies/conda-environment.txt
new file mode 100644
index 0000000..b1bd21f
--- /dev/null
+++ b/lib/galaxy/dependencies/conda-environment.txt
@@ -0,0 +1,75 @@
+# This file can be used with conda's `--file` option to install as many of
+# Galaxy's dependencies as possible using Conda. The file
+# conda-requirements.txt in this directory can be used to install the remaining
+# missing dependencies using pip:
+#
+# $ conda create --name galaxy --file lib/galaxy/dependencies/conda-environment.txt
+# $ source activate galaxy
+# $ pip install --index-url=https://wheels.galaxyproject.org/simple/ -r lib/galaxy/dependencies/requirements.txt
+#
+# or if you already have an environment:
+#
+# $ source activate galaxy
+# $ conda install --file lib/galaxy/dependencies/conda-environment.txt
+# $ pip install --index-url=https://wheels.galaxyproject.org/simple/ -r lib/galaxy/dependencies/requirements.txt
+#
+# More details are available in the administration section of the Galaxy
+# documentation at https://galaxy.readthedocs.org/
+
+# packages with C extensions
+# numpy should be installed before bx to enable extra features in bx
+numpy
+bx-python
+MarkupSafe
+PyYAML
+SQLAlchemy
+mercurial
+pycrypto
+
+# Install python_lzo if you want to support indexed access to lzo-compressed
+# locally cached maf files via bx-python
+#python_lzo
+
+# pure Python packages
+Paste
+PasteDeploy
+docutils
+#wchartype
+repoze.lru
+Routes
+WebOb
+#WebHelpers
+Mako
+pytz
+Babel
+Whoosh
+#Beaker
+
+# Cheetah and dependencies
+Cheetah
+
+# BioBlend and dependencies
+bioblend
+boto
+requests
+
+# kombu and dependencies
+#kombu
+
+# sqlalchemy-migrate and dependencies
+#sqlalchemy-migrate
+decorator
+#Tempita
+sqlparse
+six
+#Parsley
+nose
+svgwrite
+
+# Fabric and dependencies
+Fabric
+
+# We still pin these dependencies because of modifications to the upstream packages
+
+# Flexible BAM index naming
+#pysam==0.8.4+gx5
diff --git a/lib/galaxy/dependencies/conditional-requirements.txt b/lib/galaxy/dependencies/conditional-requirements.txt
new file mode 100644
index 0000000..ba69031
--- /dev/null
+++ b/lib/galaxy/dependencies/conditional-requirements.txt
@@ -0,0 +1,14 @@
+# These dependencies are only required when certain config options are set
+psycopg2==2.6.1
+WebError==0.10.3
+Pygments==2.0.2
+python-openid
+MySQL-python
+fluent-logger
+raven
+pbs_python
+drmaa
+statsd
+azure-storage==0.32.0
+# PyRods not in PyPI
+python-ldap==2.4.27
diff --git a/lib/galaxy/dependencies/dev-requirements.txt b/lib/galaxy/dependencies/dev-requirements.txt
new file mode 100644
index 0000000..6e3a6df
--- /dev/null
+++ b/lib/galaxy/dependencies/dev-requirements.txt
@@ -0,0 +1,4 @@
+nose
+NoseHTML
+twill==0.9.1
+mock
diff --git a/lib/galaxy/dependencies/pinned-hashed-requirements.txt b/lib/galaxy/dependencies/pinned-hashed-requirements.txt
new file mode 100644
index 0000000..aa60153
--- /dev/null
+++ b/lib/galaxy/dependencies/pinned-hashed-requirements.txt
@@ -0,0 +1,150 @@
+# packages with C extensions
+bx-python==0.7.3 --hash:sha256=0a0fd962edd0f9afdf5d7ccbdf46dc220a0cb652073a93411dd6f7f649f07d91 \
+                 --hash:sha256=46dc3042c7e475363830fa92d4a613ae10b58db7f114ff8193d2ee0bc3c4607e \
+                 --hash:sha256=b93a9378a5d0e29ea8a13e026c170f48bf1f8316a39c342b1eb54f557b4fb3c4 \
+                 --hash:sha256=d06c7bf0ae7f019521aeb5ffe4df48bb5fdd4d75d7ad00e0059b997c4f13ca3a \
+                 --hash:sha256=05d0ab8fd13319d5ef9edb246dc6805640ea7574bb885a0b8607474271ac5793 \
+                 --hash:sha256=5a026869227a6464eba23835765a36ffc2910f7af51b81f672a97c0d005e04a3 \
+                 --hash:sha256=85be43a6b94f0c841d9954f820133db35067854504c1de87dd0089bb06eca18d \
+                 --hash:sha256=d085824eb33c54d333b99dca29b49890457a8263f431b54b17ae3052be191d2b \
+                 --hash:sha256=41bcf85b9be9f61ba7c7bae48c5a5ae3251868a1b25f02b6eb93b29bcabfd00c \
+                 --hash:sha256=c9b71aee26ee85fbcbb3060109d79531bfa8991f4924bdcfed63d909a5937290
+MarkupSafe==0.23 --hash:sha256=3a79d9650b7e79943be51a045c7c39a77ebbe9a337e8af51a5575e39332dce87 \
+                 --hash:sha256=e0a4155493d1cfa72e985dbe1660a44d64a173282801563956e061b24371266a \
+                 --hash:sha256=147013ed157f1423a0d0047043c6bbccbef31a52d82e2a10dfc3c6de9392765c \
+                 --hash:sha256=d25d14cb01284aed33690b85bfde8de206ffcfdc52351588bcf17480839bef6f \
+                 --hash:sha256=29958f81958e0dd9f5dd36d52f2ac549d875dde3559b783578118c9005693852 \
+                 --hash:sha256=287a1c6b0d2a08cb76be6e584bf119649405f015598cf8b78d4a592cc80700a0 \
+                 --hash:sha256=973cadf7b9907f9c81d9b898817d6a485b37325176befec2389fa2ced4c61a51 \
+                 --hash:sha256=a365fd3468c86741aec5901f4b07990aaa0b458fb79e9f3557378a63e6d627e9 \
+                 --hash:sha256=ba7af558b18ba9c05bb0d36f22a5ea65ee11ba44a6cf8a2950f2f80c08260e11 \
+                 --hash:sha256=51bf7f077938c64dd00de6732996ccee8ceba2206c69acfb07f66430ca76372f
+PyYAML==3.11 --hash:sha256=2912c1fb56c316461d003a8b8f1f41e1ed8387b2e47a4523f8a4eec8aa85e241 \
+             --hash:sha256=f4437443dc014c04ceee8cc94d4165f01ba23643991fd2c6b919f26cf9997474 \
+             --hash:sha256=b62cde962d41377c175831fa3fd6ac41585d2e2111ca4c45a6ee80a832457e92 \
+             --hash:sha256=aad37be9d524ba2dba9a3114b620b2800b40e67e455c9741a5cb81a6a0e882f6 \
+             --hash:sha256=03e0f4ac96599c1d687b080f1610ab0469e8785bb48cfdf860f2076a08ea6406 \
+             --hash:sha256=7dc97b5b1b920f6bbe0e47cfa6ff112f6eede35924a63354f80d672354f25160 \
+             --hash:sha256=ceb5dccec2045d4a864c6e808b397b10830476c44bdc64b80c18bf29e481ba07 \
+             --hash:sha256=9b184a4a7d3d383b39cb73429d3798370facaee4263465b575f243c7ede3f9c3 \
+             --hash:sha256=f90314e2ed46ea2da980aff88b91c3cc8ce93edd4658011dd462a8f28399533b \
+             --hash:sha256=80936dca10b611cd54ca52939aac938d59653830a6a6265bef5d11896de8e249
+SQLAlchemy==1.0.15 --hash:sha256=7cbac295f87d2af82aac08346b52f72d0b0c007122904951990a53720ad1f820 \
+                   --hash:sha256=bfcef5888031c323407bd924d5718f2f2541867a8dbac87498ea8c2f632a5e1e \
+                   --hash:sha256=0ee07470bc1e64dbceaca4340e6427476d2b03ff3aa069100f3468587f8a973a \
+                   --hash:sha256=f819742f33ae543ca2c48bc43e35bd75372f7c3e379e4881f9040549d3df94b6 \
+                   --hash:sha256=fce29753f720f8a6920185bd5a4d1670fa6b6936819475197b2bce7644243766 \
+                   --hash:sha256=aa8843df6869f6c999400ae32de5480fba1b7711fe535c168a14225ffd1d3de4
+mercurial==3.7.3 --hash:sha256=49f596820f005ac6f94266b89a0dbd815c0ee0aacd3aa86545dc39fb349ea4bf \
+                 --hash:sha256=75f5b6b708bec2d6e0e68ec8912725a5dcac42df00b122fa73a8c4bd21495039 \
+                 --hash:sha256=8c07cb3404d26641d8829099bd1047582b76def49eca067ba6bea9a73d261775 \
+                 --hash:sha256=6a05bf22101b79b5f56421f7a1c6d301000f759f0114827f37cc9f847c1ab352 \
+                 --hash:sha256=499a26d489325808d591ee1d60a488152a0edb38aa6e64cf2d8bf0ef354ca112 \
+                 --hash:sha256=d816c7752327474d11ab4a39c953995a171b6f37ad3bed3008d4ad9e4166d654 \
+                 --hash:sha256=01d96fe053506cf4ebf8858beca4ea258a853c291b5af89b8ee34b842fb52c6a \
+                 --hash:sha256=ec82ad6511a3d9e4ccb80a44d87ce6c2ef59fbf68aabe025225470e04a0f0631 \
+                 --hash:sha256=2261a7a60279ec094d0a7967337e922dfd82266a5696b910621081d878e2585a \
+                 --hash:sha256=f31fb4dea04f1776e1a503d9bf8d87cbba61717923d1f9ad91d7ef35ef064b24 \
+                 --hash:sha256=1164c24a4cf036ee87f9b492175b2d8688e246c90888804deafe4565bd42c558
+numpy==1.9.2 --hash:sha256=931ce6fd1180a10d58a925b614775706c05de35bca5805e705e1da17e1326862 \
+             --hash:sha256=2340c4b72fdab79ac0fc04bb6629b26eeb93d10f4945b6b1f07d9be8663cab84 \
+             --hash:sha256=83fe34ff67653a639c57cfd04a0d48dda5913250fd95c8466af1d8416b58af59 \
+             --hash:sha256=aeb182d95ddb2dfd81683907be193cfc11c4d0ba744faf457013499fa14a131e \
+             --hash:sha256=d6f1eec7ae250892a2b142af406e43760042badae8e2f168136eace5dd9f05c3 \
+             --hash:sha256=0b0883cac08e3c0999159ba7f34c9b94d26d7d1a2be4017b792145d722646fb7 \
+             --hash:sha256=a124d11b1bec5c517d60ad9414564b5af0828421112d2a00771d22715eab1c3c \
+             --hash:sha256=fabc2f76266de7396e2752baabf287cdd0cc2ed23c1f36ca3c3e8b094e86b87c \
+             --hash:sha256=f83a667d3a38643b3bf6361904cd6c4f2a18d1420c2204c7f269cd36b730c404 \
+             --hash:sha256=dab9bd8c867403340b370ea383c8eda3dc984dd5efc91904a5695e00b1762fc9
+pycrypto==2.6.1 --hash:sha256=44c20785a1057b8d5e190d6a1847f7c62a8180b9b27a3aca7c3ec920bfb8e7b7 \
+                --hash:sha256=785ba61251cda6e574e0affc4412c346ef615455b81287fe0d6a2e1c5aed5172 \
+                --hash:sha256=5ec40d95520a665843915763c8cbbf40457330bd7ebf2a55ffea1b10a519d12b \
+                --hash:sha256=2cb43f0b293a18e6d5818287150994cf6083b1a9cdff65c19d1150a34aa813bc \
+                --hash:sha256=d48e46f4ccd5c21f3e01bbdbd8e7a86443fc7abdf3f93829c3d04ed684ec22c5 \
+                --hash:sha256=30312bd39f45c91f287f3e6e54c40988d30b1be9a9f332585e3cc00ee5f91554 \
+                --hash:sha256=d129d6d3441efecf2bc7e6b36cb35e99642248d72dbf59b992e6940b9359dabc \
+                --hash:sha256=8884b92835a1c0c763099981d19c990fdc3ae2b7011eb93b1f96e34a346b2d2f \
+                --hash:sha256=7e7e9abc0ae522ae86772d98b7144e0d2ef7791d124936f283680f84fe8cce6c \
+                --hash:sha256=61d3b42eaf89108b8a24c3736fb770f768efb89b1efaf0825efb5c316f0bffdc
+
+# Install python_lzo if you want to support indexed access to lzo-compressed
+# locally cached maf files via bx-python
+#python_lzo==1.8 --hash:sha256=638b38bc1d83561a9807365676f5b2ba6f18491753a673e6a9c5913bd542fe1a \
+#                --hash:sha256=cda3d6c27e69a9d04eb96db6b4b96fbe7facdd11dc2b9a82fe3c65b84b40fe44 \
+#                --hash:sha256=7ededfdb65b4eb282a7cc1600c6f5b4f483b39a70b3460c2f4651ee8fc431f77 \
+#                --hash:sha256=86a61008a8ab5f8ca973bd7e7b1d031c89637257dcf7215bff1913f17d3c596f \
+#                --hash:sha256=1d6af33e40383dba0c3ee26eee3e501e805fe42537dd0096788190bbc8fd7aae \
+#                --hash:sha256=6d08ad35322a5969dfd8ef5af7ff4cdf625d6a7bfa8e915b77f5935f58e1c7b1 \
+#                --hash:sha256=99b7e0c186c5b84d80de6e7b70359b6fc456c02bd2b31fc598140fbc6618d651 \
+#                --hash:sha256=be967c38c4e33f66823d10fb5efca42a0a23269459ce41239c173ccc114b603e \
+#                --hash:sha256=a34d7431e30c6d61c22b92d51224ea34d68be51aa42ec95a053b7f19be3fef0d \
+#                --hash:sha256=be76281c1c7f24d44e3415787d2a05688f6bbda2dc9bba92155ae3ca0d6716e7
+
+# pure Python packages
+Paste==2.0.2 --hash:sha256=4e7bb2d8cbdc47ab46e4af0cb61d2af91fccc620a08415d9c4da9e499f0455fd
+PasteDeploy==1.5.2 --hash:sha256=3922127d3acc6e274a800978b9293c874b3ef4ac2eb8bf485e2c85b22d1f260a
+docutils==0.12 --hash:sha256=732dfc2d706ea390c264bc69110d3d6c7c3a520628a052ccc5998466f95d5c29
+wchartype==0.1 --hash:sha256=2932471fe3a4e5cac4539c1b49bbc2bbd24ec3f00532c7f27c80b7756fdc177d
+repoze.lru==0.6 --hash:sha256=731cc0b0a184c9fd270a4f29d9635099bb0398823e9a1a39bf4f488d1ead57b3
+Routes==2.2 --hash:sha256=1d61dc9f1bbd86504de221568743133fbdfeb70f11cee4222cef72aa108e834c
+WebOb==1.4.1 --hash:sha256=dc3c45ac0b56a3c65f47f1da6c23760dfe0139e8a84caaf1cfc9276b3e10875a
+WebHelpers==1.3 --hash:sha256=8969ec3fb851872096067a805d512c3bed1952e6fc66f1ef1d1c6a7470937688
+Mako==1.0.2 --hash:sha256=d3f372cbc2e7de080b5f7056d160f3ac8279353a2ea8f1ef8fd5b0cf0a6a3b17
+pytz==2015.4 --hash:sha256=3bec70eef120ad0bf9143e59bbf36715ec1232fb0550ae2b8aa6c107aacfa6f2
+Babel==2.0 --hash:sha256=aa725635ef189f77b6b0579efe1e91602d7cf63a31f6dd719a88ee53642bb5ab
+Beaker==1.7.0 --hash:sha256=334427853ac291e4ecd1e03addb48e231681ab9b6c2bc793c0f0fb98024b70b1
+Whoosh==2.7.4 --hash:sha256=19d51999afc00df41ae301bde2a77ae5a6eb0cb4b20b3b72be40e800ee95c82f \
+              --hash:sha256=d880b32f6ca2911a84e06b7f8b06cf41a22cd6fc62c3f67cbe97c3cddad18f15
+
+# Cheetah and dependencies
+Cheetah==2.4.4  --hash:sha256=24cf67789b7a9f1e0a55cba266b6aad16e97aaf25810fd6f2f7e7b36005c1161 \
+                --hash:sha256=30c6dd32c066f483462cebc7fc37feb454d6a3d35e8b599da17624b2541761ab \
+                --hash:sha256=fcbff7d96509419cbcefa231d6fee4aca98d2f04f8339848c19da3e82aebcf27 \
+                --hash:sha256=5c758550b8170b01e016b75ec05dbd2c32b56513d6e1ef9177c5290f4e166ad2 \
+                --hash:sha256=6aaeef7f742df6a23c4132bb33000196053b661a150fdd98311f25f09d3730c1 \
+                --hash:sha256=469e7ddc2f4a9b147b2b5c258fc9f97cee9842790c1849b941bae165b390da92 \
+                --hash:sha256=75f05eda3b16c621b5b570f9c5b317f6ea94b01296b9e1ef2583560d153578bd \
+                --hash:sha256=6da983e9d189a3c389e98506871ff836a83df1792dba5e4a684d27d948402d7a \
+                --hash:sha256=55b0c2696eb321415a9eca86303e3c359dee550d7f6fc719bc960f23422d2290 \
+                --hash:sha256=2caf567b6cdd54f806ce12d98302e380d4bbbed8990663f9bce52d69358fc488
+Markdown==2.6.3 --hash:sha256=59417f5eb3dace08309d81e48fc903217d12d66677f367674b22c317b0f04284
+
+# BioBlend and dependencies
+bioblend==0.7.0 --hash:sha256=cb1068c0b05ca069a550ecbbf19014631bb6570b565387de15876c44478ee07d \
+                --hash:sha256=db7dc929199837b6d9a502fcf9d537a57fdee605ca45cd1548583a5eccc648a6
+boto==2.38.0 --hash:sha256=1760584368da0c3861dbed448dcb439334ce831044262a5ca5188167f8bba585
+requests==2.8.1 --hash:sha256=70e285122c97b1d104824482cb929246b2b8a2b9858897c4fab3acec83f1e03c
+requests-toolbelt==0.4.0 --hash:sha256=94694e32845b84ed2186fdf56219b949582ebd08dc3abc55c0549958f2029643
+
+# kombu and dependencies
+kombu==3.0.30  --hash:sha256=562010f0deb1f04e3abd154b0ef5e246d606b54cb2c5f29e8c4651b354d0b082 \
+               --hash:sha256=5baff23a2b2d78eecb7c97700ea359ab80744c719e311fb446513d7b72ddbc96
+amqp==1.4.8 --hash:sha256=c7874d845c9bdffbf754ce84f105a1b8c7a6cffb61f362ac67591536ba8c0c1a \
+            --hash:sha256=caada1c29d5e73e13ed8706442ee0fbb1440de9b6fca4df12fc8ff59dd61f6f6
+anyjson==0.3.3 --hash:sha256=2c84f23be20e3c465151a55917b8e38a04c579394057e232c16fb5ca0c6ae108
+
+# sqlalchemy-migrate and dependencies
+sqlalchemy-migrate==0.10.0 --hash:sha256=2aaa4e7070c8447d564f1c173937c2c5a45f889eb31a0fc80add5fc175c45a5e
+decorator==4.0.2 --hash:sha256=d8c3f7e18efba558084648d6dd6c6b0850e6f5c08c392975ae196c4b4eb7be2a
+Tempita==0.5.3dev --hash:sha256=df0c6513e565b213733b3f8bf7bcca75998f166042692e1712e3fe36568db234
+sqlparse==0.1.16 --hash:sha256=221d383f1236f9c05aadd2c4edf4274144279ad06821d7d9bf5a70751157f75d
+pbr==1.8.0 --hash:sha256=355a0c2ba71120dce1c522387e494c60eb4f44fa9d03067cb9816187e6e803e9
+# six is also a Pulsar client dep
+six==1.9.0 --hash:sha256=cf806ab65d5e16561df30c2cea2fb2cf3e6aa82e48f22036dff177e20f61e188
+Parsley==1.3 --hash:sha256=32fd648334146e4005dae41d559d32edfff9f3d5f503f8b5d233dbfe3a15a28b
+nose==1.3.7 --hash:sha256=f36054c5786e84ff6b131f3249a3cce82787f1a930ab053287c0edaa9f840d0f
+svgwrite==1.1.6 --hash:sha256=e404424dad5eaa41efec6e68daa5c0603e3f4f576ada5dca730669103ab83660
+
+# Fabric and dependencies
+Fabric==1.10.2 --hash:sha256=8fe1eb0b0f271e2d41445e7da4470548d3bdf882aab1c1f143505c599996793b
+paramiko==1.15.2 --hash:sha256=c92b168c8db94a9e1362f3fc27bc21e8c20794885bf2f7d303b5ad717c044ebc
+ecdsa==0.13 --hash:sha256=89f149066e4a1e419892cef830fd0db85c227d5d427f7075422ace83429e2d26
+# pycrypto, but it is a direct Galaxy dependency as well
+
+# Flexible BAM/tabix index naming
+pysam==0.8.4+gx5 --hash:sha256=76d7d0780fc7075583ae6fe358d2b1a513a1ddb0fe33abd8620d44f4854d7ebb \
+                 --hash:sha256=a051b7df7eefb8a30d30bb78043a098d54615e2d7afbbe3300bbaeb29fb8098f \
+                 --hash:sha256=640adc36798a73359f55069f84700e9d2355a1fef0840e9e11d33ffec3ee72a7 \
+                 --hash:sha256=8afcc3420ac5c5f71d80fdedfa658760af593c3213b9d3d96f76b37749e947c5 \
+                 --hash:sha256=b93b66e8b0395fc4fde9eb79a733fba361a2e785d8cb7d6a36527ffd64ea1673 \
+                 --hash:sha256=bcc1b107dbbfee1a9ee740f1e3c5dd13b48e7b8f68032923a681fb82390d6683
diff --git a/lib/galaxy/dependencies/pinned-requirements.txt b/lib/galaxy/dependencies/pinned-requirements.txt
new file mode 100644
index 0000000..3f79978
--- /dev/null
+++ b/lib/galaxy/dependencies/pinned-requirements.txt
@@ -0,0 +1,71 @@
+# packages with C extensions
+bx-python==0.7.3
+MarkupSafe==0.23
+PyYAML==3.11
+SQLAlchemy==1.0.15
+mercurial==3.7.3
+numpy==1.9.2
+pycrypto==2.6.1
+
+# Install python_lzo if you want to support indexed access to lzo-compressed
+# locally cached maf files via bx-python
+#python_lzo==1.8
+
+# pure Python packages
+Paste==2.0.2
+PasteDeploy==1.5.2
+docutils==0.12
+wchartype==0.1
+repoze.lru==0.6
+Routes==2.2
+WebOb==1.4.1
+WebHelpers==1.3
+Mako==1.0.2
+pytz==2015.4
+Babel==2.0
+Beaker==1.7.0
+dictobj==0.3.1
+nose==1.3.7
+Parsley==1.3
+six==1.9.0
+Whoosh==2.7.4
+testfixtures==4.10.0
+
+# Cheetah and dependencies
+Cheetah==2.4.4
+Markdown==2.6.3
+
+# BioBlend and dependencies
+bioblend==0.7.0
+boto==2.38.0
+requests==2.8.1
+requests-toolbelt==0.4.0
+
+# kombu and dependencies
+kombu==3.0.30
+amqp==1.4.8
+anyjson==0.3.3
+
+# Pulsar requirements
+psutil==4.1.0
+pulsar-galaxy-lib==0.7.0.dev5
+
+# sqlalchemy-migrate and dependencies
+sqlalchemy-migrate==0.10.0
+decorator==4.0.2
+Tempita==0.5.3dev
+sqlparse==0.1.16
+pbr==1.8.0
+
+# svgwrite and dependencies
+svgwrite==1.1.6
+pyparsing==2.1.1
+
+# Fabric and dependencies
+Fabric==1.10.2
+paramiko==1.15.2
+ecdsa==0.13
+
+# Flexible BAM index naming
+pysam==0.8.4+gx5
+
diff --git a/lib/galaxy/dependencies/requirements.txt b/lib/galaxy/dependencies/requirements.txt
new file mode 100644
index 0000000..aa6eed7
--- /dev/null
+++ b/lib/galaxy/dependencies/requirements.txt
@@ -0,0 +1,70 @@
+# packages with C extensions
+# numpy should be installed before bx to enable extra features in bx
+numpy
+bx-python
+MarkupSafe
+PyYAML
+SQLAlchemy
+mercurial
+pycrypto
+
+# Install python_lzo if you want to support indexed access to lzo-compressed
+# locally cached maf files via bx-python
+#python_lzo
+
+# pure Python packages
+Paste
+PasteDeploy
+docutils
+wchartype
+repoze.lru
+Routes
+WebOb
+WebHelpers
+Mako
+pytz
+Babel
+Beaker
+dictobj
+nose
+Parsley
+six
+Whoosh
+
+# Cheetah and dependencies
+Cheetah
+Markdown
+
+# BioBlend and dependencies
+bioblend
+boto
+requests
+requests-toolbelt
+
+# kombu and dependencies
+kombu
+
+# Pulsar requirements
+psutil
+pulsar-galaxy-lib>=0.7.0.dev4
+
+# sqlalchemy-migrate and dependencies
+sqlalchemy-migrate
+decorator
+Tempita
+sqlparse
+pbr
+
+# svgwrite and dependencies
+svgwrite
+pyparsing
+
+# Fabric and dependencies
+Fabric
+paramiko
+ecdsa
+
+# We still pin these dependencies because of modifications to the upstream packages
+
+# Flexible BAM index naming
+pysam==0.8.4+gx5
diff --git a/lib/galaxy/eggs/__init__.py b/lib/galaxy/eggs/__init__.py
new file mode 100644
index 0000000..95e6a17
--- /dev/null
+++ b/lib/galaxy/eggs/__init__.py
@@ -0,0 +1,8 @@
+"""
+For backwards compatibility
+"""
+
+import pkg_resources
+
+
+require = pkg_resources.require
diff --git a/lib/galaxy/exceptions/__init__.py b/lib/galaxy/exceptions/__init__.py
new file mode 100644
index 0000000..a7fb022
--- /dev/null
+++ b/lib/galaxy/exceptions/__init__.py
@@ -0,0 +1,180 @@
+"""This module defines Galaxy's custom exceptions.
+
+A Galaxy exception is an exception that extends :class:`MessageException` which
+defines an HTTP status code (represented by the `status_code` attribute) and a
+default error message.
+
+New exceptions should be defined by adding an entry to `error_codes.json` in this
+directory to define a default error message and a Galaxy "error code". A concrete
+Python class should be added in this file defining an HTTP status code (as
+`status_code`) and error code (`error_code`) object loaded dynamically from
+`error_codes.json`.
+
+Reflecting Galaxy's origins as a web application, these exceptions tend to be a
+bit web-oriented. However this module is a dependency of modules and tools that
+have nothing to do with the web - keep this in mind when defining exception names
+and messages.
+"""
+
+from ..exceptions import error_codes
+
+
+class MessageException( Exception ):
+    """Most generic Galaxy exception - indicates merely that some exceptional condition happened."""
+    # status code to be set when used with API.
+    status_code = 400
+    # Error code information embedded into API json responses.
+    err_code = error_codes.UNKNOWN
+
+    def __init__( self, err_msg=None, type="info", **extra_error_info ):
+        self.err_msg = err_msg or self.err_code.default_error_message
+        self.type = type
+        self.extra_error_info = extra_error_info
+
+    def __str__( self ):
+        return self.err_msg
+
+
+class ItemDeletionException( MessageException ):
+    pass
+
+
+class ObjectInvalid( Exception ):
+    """ Accessed object store ID is invalid """
+    pass
+
+# Please keep the exceptions ordered by status code
+
+
+class ActionInputError( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_REQUEST_INVALID_PARAMETER
+
+    def __init__( self, err_msg, type="error" ):
+        super( ActionInputError, self ).__init__( err_msg, type )
+
+
+class DuplicatedSlugException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_SLUG_DUPLICATE
+
+
+class DuplicatedIdentifierException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_IDENTIFIER_DUPLICATE
+
+
+class ObjectAttributeInvalidException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_OBJECT_ATTRIBUTE_INVALID
+
+
+class ObjectAttributeMissingException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_OBJECT_ATTRIBUTE_MISSING
+
+
+class MalformedId( MessageException ):
+    status_code = 400
+    err_code = error_codes.MALFORMED_ID
+
+
+class MalformedContents( MessageException ):
+    status_code = 400
+    err_code = error_codes.MALFORMED_CONTENTS
+
+
+class UnknownContentsType( MessageException ):
+    status_code = 400
+    err_code = error_codes.UNKNOWN_CONTENTS_TYPE
+
+
+class RequestParameterMissingException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_REQUEST_MISSING_PARAMETER
+
+
+class ToolMetaParameterException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_TOOL_META_PARAMETER_PROBLEM
+
+
+class RequestParameterInvalidException( MessageException ):
+    status_code = 400
+    err_code = error_codes.USER_REQUEST_INVALID_PARAMETER
+
+
+class AuthenticationFailed( MessageException ):
+    status_code = 401
+    err_code = error_codes.USER_AUTHENTICATION_FAILED
+
+
+class AuthenticationRequired( MessageException ):
+    status_code = 403
+    # TODO: as 401 and send WWW-Authenticate: ???
+    err_code = error_codes.USER_NO_API_KEY
+
+
+class ItemAccessibilityException( MessageException ):
+    status_code = 403
+    err_code = error_codes.USER_CANNOT_ACCESS_ITEM
+
+
+class ItemOwnershipException( MessageException ):
+    status_code = 403
+    err_code = error_codes.USER_DOES_NOT_OWN_ITEM
+
+
+class ConfigDoesNotAllowException( MessageException ):
+    status_code = 403
+    err_code = error_codes.CONFIG_DOES_NOT_ALLOW
+
+
+class InsufficientPermissionsException( MessageException ):
+    status_code = 403
+    err_code = error_codes.INSUFFICIENT_PERMISSIONS
+
+
+class AdminRequiredException( MessageException ):
+    status_code = 403
+    err_code = error_codes.ADMIN_REQUIRED
+
+
+class ObjectNotFound( MessageException ):
+    """ Accessed object was not found """
+    status_code = 404
+    err_code = error_codes.USER_OBJECT_NOT_FOUND
+
+
+class DeprecatedMethod( MessageException ):
+    """
+    Method (or a particular form/arg signature) has been removed and won't be available later
+    """
+    status_code = 404
+    # TODO:?? 410 Gone?
+    err_code = error_codes.DEPRECATED_API_CALL
+
+
+class Conflict( MessageException ):
+    status_code = 409
+    err_code = error_codes.CONFLICT
+
+
+class ConfigurationError( Exception ):
+    status_code = 500
+    err_code = error_codes.CONFIG_ERROR
+
+
+class InconsistentDatabase ( MessageException ):
+    status_code = 500
+    err_code = error_codes.INCONSISTENT_DATABASE
+
+
+class InternalServerError ( MessageException ):
+    status_code = 500
+    err_code = error_codes.INTERNAL_SERVER_ERROR
+
+
+class NotImplemented ( MessageException ):
+    status_code = 501
+    err_code = error_codes.NOT_IMPLEMENTED
diff --git a/lib/galaxy/exceptions/error_codes.json b/lib/galaxy/exceptions/error_codes.json
new file mode 100644
index 0000000..50faf0f
--- /dev/null
+++ b/lib/galaxy/exceptions/error_codes.json
@@ -0,0 +1,142 @@
+[
+   {
+    "name": "UNKNOWN",
+    "code": 0,
+    "message": "Unknown error occurred while processing request."
+   },
+   {
+    "name": "USER_CANNOT_RUN_AS",
+    "code": 400001,
+    "message": "User does not have permissions to run jobs as another user."
+    },
+   {
+    "name": "USER_INVALID_RUN_AS",
+    "code": 400002,
+    "message": "Invalid run_as request - run_as user does not exist."
+    },
+   {
+    "name": "USER_INVALID_JSON",
+    "code": 400003,
+    "message": "Your request did not appear to be valid JSON, please consult the API documentation."
+    },
+   {
+    "name": "USER_OBJECT_ATTRIBUTE_INVALID",
+    "code": 400004,
+    "message": "Attempted to create or update object with invalid attribute value."
+    },
+   {
+    "name": "USER_OBJECT_ATTRIBUTE_MISSING",
+    "code": 400005,
+    "message": "Attempted to create object without required attribute."
+    },
+   {
+    "name": "USER_SLUG_DUPLICATE",
+    "code": 400006,
+    "message": "Slug must be unique per user."
+    },
+   {
+    "name": "USER_REQUEST_MISSING_PARAMETER",
+    "code": 400007,
+    "message": "Request is missing parameter required to complete desired action."
+   },
+   {
+    "name": "USER_REQUEST_INVALID_PARAMETER",
+    "code": 400008,
+    "message": "Request contained invalid parameter, action could not be completed."
+   },
+   {
+    "name": "MALFORMED_ID",
+    "code": 400009,
+    "message": "The id of the resource is malformed."
+    },
+   {
+    "name": "UNKNOWN_CONTENTS_TYPE",
+    "code": 400010,
+    "message": "The request contains unknown type of contents."
+    },
+   {
+    "name": "USER_IDENTIFIER_DUPLICATE",
+    "code": 400011,
+    "message": "Request contained a duplicated identifier that must be unique."
+    },
+   {
+    "name": "USER_TOOL_META_PARAMETER_PROBLEM",
+    "code": 400012,
+    "message": "Supplied incorrect or incompatible tool meta parameters."
+   },
+   {
+    "name": "MALFORMED_CONTENTS",
+    "code": 400013,
+    "message": "The contents of the request are malformed."
+    },
+   {
+    "name": "USER_AUTHENTICATION_FAILED",
+    "code": 401001,
+    "message": "Authentication failed, invalid credentials supplied."
+   },
+   {
+    "name": "USER_NO_API_KEY",
+    "code": 403001,
+    "message": "Authentication required for this request"
+    },
+   {
+    "name": "USER_CANNOT_ACCESS_ITEM",
+    "code": 403002,
+    "message": "User cannot access specified item."
+    },
+   {
+    "name": "USER_DOES_NOT_OWN_ITEM",
+    "code": 403003,
+    "message": "User does not own specified item."
+   },
+   {
+    "name": "CONFIG_DOES_NOT_ALLOW",
+    "code": 403004,
+    "message": "The configuration of this Galaxy instance does not allow that operation"
+   },
+   {
+    "name": "INSUFFICIENT_PERMISSIONS",
+    "code": 403005,
+    "message": "You don't have proper permissions to perform the requested operation"
+   },
+   {
+    "name": "ADMIN_REQUIRED",
+    "code": 403006,
+    "message": "Action requires admin account."
+   },   
+   {
+    "name": "USER_OBJECT_NOT_FOUND",
+    "code": 404001,
+    "message": "No such object found."
+   },   
+   {
+    "name": "DEPRECATED_API_CALL",
+    "code": 404002,
+    "message": "This API method or call signature has been deprecated and is no longer available"
+   },
+   {
+    "name": "CONFLICT",
+    "code": 409001,
+    "message": "Database conflict prevented fulfilling the request."
+   },
+   {
+    "name": "INTERNAL_SERVER_ERROR",
+    "code": 500001,
+    "message": "Internal server error."
+   },
+   {
+    "name": "INCONSISTENT_DATABASE",
+    "code": 500002,
+    "message": "Inconsistent database prevented fulfilling the request."
+   },
+   {
+    "name": "CONFIG_ERROR",
+    "code": 500003,
+    "message": "Error in a configuration file."
+   },
+   {
+    "name": "NOT_IMPLEMENTED",
+    "code": 501001,
+    "message": "Method is not implemented."
+   }
+]
diff --git a/lib/galaxy/exceptions/error_codes.py b/lib/galaxy/exceptions/error_codes.py
new file mode 100644
index 0000000..3b6e78a
--- /dev/null
+++ b/lib/galaxy/exceptions/error_codes.py
@@ -0,0 +1,47 @@
+"""Defines the :class:`ErrorCode` class and instantiates concrete objects from JSON.
+
+See the file error_codes.json for actual error code descriptions.
+"""
+from json import loads
+from pkg_resources import resource_string
+
+
+# Error codes are provided as a convience to Galaxy API clients, but at this
+# time they do represent part of the more stable interface. They can change
+# without warning between releases.
+UNKNOWN_ERROR_MESSAGE = "Unknown error occurred while processing request."
+
+
+class ErrorCode(object):
+    """Small class allowing object representation for error descriptions loaded from JSON."""
+
+    def __init__(self, code, default_error_message):
+        """Construct a :class:`ErrorCode` from supplied integer and error message."""
+        self.code = code
+        self.default_error_message = default_error_message or UNKNOWN_ERROR_MESSAGE
+
+    def __str__(self):
+        """Return the error code message."""
+        return str(self.default_error_message)
+
+    def __repr__(self):
+        """Return object representation of this error code."""
+        return "ErrorCode[code=%d,message=%s]" % (self.code, str(self.default_error_message))
+
+    def __int__(self):
+        """Return the error code integer."""
+        return int(self.code)
+
+
+def _from_dict(entry):
+    """Build a :class:`ErrorCode` object from a JSON entry."""
+    name = entry.get("name")
+    code = entry.get("code")
+    message = entry.get("message")
+    return (name, ErrorCode(code, message))
+
+
+error_codes_json = resource_string(__name__, 'error_codes.json').decode("UTF-8")
+for entry in loads(error_codes_json):
+    name, error_code_obj = _from_dict(entry)
+    globals()[name] = error_code_obj
diff --git a/lib/galaxy/external_services/__init__.py b/lib/galaxy/external_services/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/external_services/actions.py b/lib/galaxy/external_services/actions.py
new file mode 100644
index 0000000..9e4ed1a
--- /dev/null
+++ b/lib/galaxy/external_services/actions.py
@@ -0,0 +1,193 @@
+# Contains actions that are used in External Services
+import logging
+from urllib import urlopen
+from galaxy.web import url_for
+from galaxy.util.template import fill_template
+from result_handlers.basic import ExternalServiceActionResultHandler
+
+log = logging.getLogger( __name__ )
+
+
+class PopulatedExternalServiceAction( object ):
+    def __init__( self, action, param_dict ):
+        self.action = action
+        self.param_dict = param_dict
+        self.result = None
+        self.handled_results = None
+
+    def __getattr__( self, name ):
+        return getattr( self.action, name )
+
+    def get_action_access_link( self, trans ):
+        return self.action.get_action_access_link( trans, self.param_dict )
+
+    def perform_action( self ):
+        if self.result is None:
+            self.result = self.action.perform_action( self.param_dict )
+        return self.result
+
+    def handle_results( self, trans ):
+        if self.result is None:
+            self.perform_action()
+        if self.handled_results is None:
+            self.handled_results = self.action.handle_action( self.result, self.param_dict, trans )
+        return self.handled_results
+
+
+class ExternalServiceAction( object ):
+    """ Abstract Class for External Service Actions """
+
+    type = None
+
+    @classmethod
+    def from_elem( cls, elem, parent ):
+        action_type = elem.get( 'type', None )
+        assert action_type, 'ExternalServiceAction requires a type'
+        return action_type_to_class[ action_type ]( elem, parent )
+
+    def __init__( self, elem, parent ):
+        self.name = elem.get( 'name', None )
+        assert self.name, 'ExternalServiceAction requires a name'
+        self.label = elem.get( 'label', self.name )
+        self.parent = parent
+        self.result_handlers = []
+        for handler in elem.findall( 'result_handler' ):
+            self.result_handlers.append( ExternalServiceActionResultHandler.from_elem( handler, self ) )
+
+    def __action_url_id( self, param_dict ):
+        rval = self.name
+        parent = self.parent
+        while hasattr( parent.parent, 'parent' ):
+            rval = "%s|%s" % ( parent.name, rval )
+            parent = parent.parent
+        rval = "%s|%s" % ( param_dict['service_instance'].id, rval )
+        return rval
+
+    def get_action_access_link( self, trans, param_dict ):
+        return url_for( controller='/external_services',
+                        action="access_action",
+                        external_service_action=self.__action_url_id( param_dict ),
+                        item=param_dict['item'].id,
+                        item_type=param_dict['item'].__class__.__name__ )
+
+    def populate_action( self, param_dict ):
+        return PopulatedExternalServiceAction( self, param_dict )
+
+    def handle_action( self, completed_action, param_dict, trans ):
+        handled_results = []
+        for handled_result in self.result_handlers:
+            handled_results.append( handled_result.handle_result( completed_action, param_dict, trans ) )
+        return handled_results
+
+    def perform_action( self, param_dict ):
+        raise Exception( 'Abstract Method' )
+
+
+class ExternalServiceResult( object ):
+    def __init__( self, name, param_dict ):
+        self.name = name
+        self.param_dict = param_dict
+
+    @property
+    def content( self ):
+        raise Exception( 'Abstract Method' )
+
+
+class ExternalServiceWebAPIActionResult( ExternalServiceResult ):
+    def __init__( self, name, param_dict, url, method, target ):  # display_handler = None ):
+        ExternalServiceResult.__init__( self, name, param_dict )
+        self.url = url
+        self.method = method
+        self.target = target
+        self._content = None
+
+    @property
+    def content( self ):
+        if self._content is None:
+            self._content = urlopen( self.url ).read()
+        return self._content
+
+
+class ExternalServiceValueResult( ExternalServiceResult ):
+    def __init__( self, name, param_dict, value ):
+        self.name = name
+        self.param_dict = param_dict
+        self.value = value
+
+    @property
+    def content( self ):
+        return self.value
+
+
+class ExternalServiceWebAPIAction( ExternalServiceAction ):
+    """ Action that accesses an external Web API and provides handlers for the requested content """
+
+    type = 'web_api'
+
+    class ExternalServiceWebAPIActionRequest( object ):
+        def __init__( self, elem, parent ):
+            self.target = elem.get( 'target', '_blank' )
+            self.method = elem.get( 'method', 'post' )
+            self.parent = parent
+            self.url = Template( elem.find( 'url' ), parent )
+
+        def get_web_api_action( self, param_dict ):
+            name = self.parent.name
+            target = self.target
+            method = self.method
+            url = self.url.build_template( param_dict ).strip()
+            return ExternalServiceWebAPIActionResult( name, param_dict, url, method, target )
+
+    def __init__( self, elem, parent ):
+        ExternalServiceAction.__init__( self, elem, parent )
+        self.web_api_request = self.ExternalServiceWebAPIActionRequest( elem.find( 'request' ), parent )
+
+    def perform_action( self, param_dict ):
+        return self.web_api_request.get_web_api_action( param_dict )
+
+
+class ExternalServiceWebAction( ExternalServiceAction ):
+    """ Action that accesses an external web application  """
+
+    type = 'web'
+
+    def __init__( self, elem, parent ):
+        ExternalServiceAction.__init__( self, elem, parent )
+        self.request_elem = elem.find( 'request' )
+        self.url = Template( self.request_elem.find( 'url' ), parent )
+        self.target = self.request_elem.get( 'target', '_blank' )
+        self.method = self.request_elem.get( 'method', 'get' )
+
+    def get_action_access_link( self, trans, param_dict ):
+        url = self.url.build_template( param_dict ).strip()
+        return url
+
+
+class ExternalServiceTemplateAction( ExternalServiceAction ):
+    """ Action that redirects to an external URL """
+
+    type = 'template'
+
+    def __init__( self, elem, parent ):
+        ExternalServiceAction.__init__( self, elem, parent )
+        self.template = Template( elem.find( 'template' ), parent )
+
+    def perform_action( self, param_dict ):
+        return ExternalServiceValueResult( self.name, param_dict, self.template.build_template( param_dict ) )
+
+
+action_type_to_class = {
+    ExternalServiceWebAction.type: ExternalServiceWebAction,
+    ExternalServiceWebAPIAction.type: ExternalServiceWebAPIAction,
+    ExternalServiceTemplateAction.type: ExternalServiceTemplateAction }
+
+
+# utility classes
+class Template( object ):
+    def __init__( self, elem, parent ):
+        self.text = elem.text
+        self.parent = parent
+
+    def build_template( self, param_dict ):
+        template = fill_template( self.text, context=param_dict )
+        return template
diff --git a/lib/galaxy/external_services/parameters.py b/lib/galaxy/external_services/parameters.py
new file mode 100644
index 0000000..78122ae
--- /dev/null
+++ b/lib/galaxy/external_services/parameters.py
@@ -0,0 +1,44 @@
+# Contains parameters that are used in External Services
+from galaxy.util import string_as_bool
+from galaxy.util.template import fill_template
+
+
+class ExternalServiceParameter( object ):
+    """ Abstract Class for External Service Parameters """
+
+    type = None
+    requires_user_input = False
+
+    @classmethod
+    def from_elem( cls, elem, parent ):
+        param_type = elem.get( 'type', None )
+        assert param_type, 'ExternalServiceParameter requires a type'
+        return parameter_type_to_class[ param_type ]( elem, parent )
+
+    def __init__( self, elem, parent ):
+        self.name = elem.get( 'name', None )
+        assert self.name, 'ExternalServiceParameter requires a name'
+        self.parent = parent
+
+    def get_value( self, param_dict ):
+        raise Exception( 'Abstract Method' )
+
+
+class ExternalServiceTemplateParameter( ExternalServiceParameter ):
+    """ Parameter that returns a string containing the requested content """
+
+    type = 'template'
+
+    def __init__( self, elem, parent ):
+        ExternalServiceParameter.__init__( self, elem, parent )
+        self.strip = string_as_bool( elem.get( 'strip', 'False' ) )
+        self.text = elem.text
+
+    def get_value( self, param_dict ):
+        value = fill_template( self.text, context=param_dict )
+        if self.strip:
+            value = value.strip()
+        return value
+
+
+parameter_type_to_class = { ExternalServiceTemplateParameter.type: ExternalServiceTemplateParameter }
diff --git a/lib/galaxy/external_services/result_handlers/__init__.py b/lib/galaxy/external_services/result_handlers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/external_services/result_handlers/basic.py b/lib/galaxy/external_services/result_handlers/basic.py
new file mode 100644
index 0000000..5785f2c
--- /dev/null
+++ b/lib/galaxy/external_services/result_handlers/basic.py
@@ -0,0 +1,68 @@
+import logging
+from json import loads
+
+log = logging.getLogger( __name__ )
+
+
+class ExternalServiceActionResultHandler( object ):
+    """ Basic Class for External Service Actions Result Handlers"""
+
+    type = 'display'
+
+    @classmethod
+    def from_elem( cls, elem, parent ):
+        result_type = elem.get( 'type', None )
+        assert result_type, 'ExternalServiceActionResultHandler requires a type'
+        return result_type_to_class[ result_type ]( elem, parent )
+
+    def __init__( self, elem, parent ):
+        self.parent = parent
+
+    def handle_result( self, result, param_dict, trans):
+        return result.content
+        # need to think about how to restore or set mime type:
+        # both as specified in xml and also as set by an action,
+        #    e.g. mimetype returned from web_api action should be reused here...
+
+
+class ExternalServiceActionURLRedirectResultHandler( ExternalServiceActionResultHandler ):
+    """ Basic Class for External Service Actions Result Handlers"""
+
+    type = 'web_redirect'
+
+    @classmethod
+    def from_elem( cls, elem, parent ):
+        result_type = elem.get( 'type', None )
+        assert result_type, 'ExternalServiceActionResultHandler requires a type'
+        return result_type_to_class[ result_type ]( elem, parent )
+
+    def __init__( self, elem, parent ):
+        self.parent = parent
+
+    def handle_result( self, result, param_dict, trans ):
+        return trans.response.send_redirect( result.content )
+
+
+class ExternalServiceActionJSONResultHandler( ExternalServiceActionResultHandler ):
+    """Class for External Service Actions JQuery Result Handler"""
+
+    type = 'json_display'
+
+    def handle_result( self, result, param_dict, trans ):
+        rval = loads( result.content )
+        return trans.fill_template( '/external_services/generic_json.mako', result=rval, param_dict=param_dict, action=self.parent )
+
+
+class ExternalServiceActionJQueryGridResultHandler( ExternalServiceActionResultHandler ):
+    """Class for External Service Actions JQuery Result Handler"""
+
+    type = 'jquery_grid'
+
+    def handle_result( self, result, param_dict, trans ):
+        rval = loads( result.content )
+        return trans.fill_template( '/external_services/generic_jquery_grid.mako', result=rval, param_dict=param_dict, action=self.parent )
+
+
+result_type_to_class = {}
+for handler_class in [ ExternalServiceActionResultHandler, ExternalServiceActionURLRedirectResultHandler, ExternalServiceActionJQueryGridResultHandler, ExternalServiceActionJSONResultHandler ]:
+    result_type_to_class[handler_class.type] = handler_class
diff --git a/lib/galaxy/external_services/service.py b/lib/galaxy/external_services/service.py
new file mode 100644
index 0000000..05f8bc3
--- /dev/null
+++ b/lib/galaxy/external_services/service.py
@@ -0,0 +1,246 @@
+# Contains objects for accessing external services applications
+import logging
+from parameters import ExternalServiceParameter
+from actions import ExternalServiceAction
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+
+class ExternalServiceActionsGroup( object ):
+    def __init__( self, parent, name, label=None ):
+        self.name = name
+        self.label = label
+        self.parent = parent
+        self.items = []
+
+    @classmethod
+    def from_elem( self, elem, parent=None ):
+        """
+        Return ExternalServiceActionsGroup created from an xml element.
+        """
+        if elem is not None:
+            name = elem.get( 'name' )
+            label = elem.get( 'label' )
+            rval = ExternalServiceActionsGroup( parent, name, label=label )
+            rval.load_sub_elems( elem )
+        else:
+            rval = ExternalServiceActionsGroup( None, None )
+        return rval
+
+    def load_sub_elems( self, elem ):
+        for sub_elem in elem:
+            if sub_elem.tag == 'param':
+                self.add_item( ExternalServiceParameter.from_elem( sub_elem, self ) )
+            elif sub_elem.tag == 'action':
+                self.add_item( ExternalServiceAction.from_elem( sub_elem, self ) )
+            elif sub_elem.tag == 'section':
+                self.add_item( ExternalServiceActionsGroup.from_elem( sub_elem, self ) )
+            elif sub_elem.tag == 'conditional':
+                self.add_item( ExternalServiceActionsConditional( sub_elem, self ) )
+            else:
+                raise ValueError( 'Unknown tag: %s' % sub_elem.tag )
+
+    def add_item( self, item ):
+        self.items.append( item )
+
+    def populate( self, service_instance, item=None, param_dict=None ):
+        return PopulatedExternalService( self, service_instance, item, param_dict )
+
+    def prepare_actions( self, param_dict, parent_dict, parent_section ):
+        group = Bunch()
+        group_section = ActionSection( self.name, self.label )
+        parent_section.append( group_section )
+        parent_dict[ self.name ] = group
+        for item in self.items:
+            if isinstance( item, ExternalServiceParameter ):
+                group[ item.name ] = item.get_value( param_dict )
+            elif isinstance( item, ExternalServiceActionsGroup ):
+                group[ item.name ] = item.prepare_actions( param_dict, group, group_section )
+            elif isinstance( item, ExternalServiceAction ):
+                group_section.append( item.populate_action( param_dict ) )
+            elif isinstance( item, ExternalServiceActionsConditional ):
+                conditional_group = Bunch()
+                conditional_group_section = ActionSection( item.name, item.label )
+                group_section.append( conditional_group_section )
+                group[ item.name ] = conditional_group
+                for case in item.get_current_cases( param_dict ):
+                    conditional_group[ case.name ] = case.prepare_actions( param_dict, conditional_group, conditional_group_section )
+            else:
+                raise TypeError( 'unknown item type found: %s' % item )
+        return group
+
+
+class ExternalServiceActionsGroupWhen( ExternalServiceActionsGroup ):
+    type = "when"
+
+    @classmethod
+    def from_elem( self, parent, elem ):
+        """Loads the proper when by attributes of elem"""
+        when_type = elem.get( 'type' )
+        assert when_type in when_type_to_class, TypeError( "When type not implemented: %s" % when_type )
+        return when_type_to_class[ when_type ].from_elem( parent, elem )
+
+    def is_case( self, param_dict ):
+        raise TypeError( "Abstract method" )
+
+    def get_ref( self, param_dict ):
+        ref = param_dict
+        for ref_name in self.parent.ref:
+            assert ref_name in ref, "Required dependency '%s' not found in incoming values" % ref_name
+            ref = ref.get( ref_name )
+        return ref
+
+
+class ValueExternalServiceActionsGroupWhen( ExternalServiceActionsGroupWhen ):
+    type = "value"
+
+    def __init__( self, parent, name, value, label=None ):
+        super( ValueExternalServiceActionsGroupWhen, self ).__init__( parent, name, label )
+        self.value = value
+
+    @classmethod
+    def from_elem( self, parent, elem ):
+        """Returns an instance of this when"""
+        rval = ValueExternalServiceActionsGroupWhen( parent, elem.get( 'name' ), elem.get( 'value' ), elem.get( 'label' ) )
+        rval.load_sub_elems( elem )
+        return rval
+
+    def is_case( self, param_dict ):
+        ref = self.get_ref( param_dict )
+        return bool( str( ref ) == self.value )
+
+
+class BooleanExternalServiceActionsGroupWhen( ExternalServiceActionsGroupWhen ):
+    type = "boolean"
+
+    def __init__( self, parent, name, value, label=None ):
+        super( BooleanExternalServiceActionsGroupWhen, self ).__init__( parent, name, label )
+        self.value = value
+
+    @classmethod
+    def from_elem( self, parent, elem ):
+        """Returns an instance of this when"""
+        rval = BooleanExternalServiceActionsGroupWhen( parent, elem.get( 'name' ), elem.get( 'label' ) )
+        rval.load_sub_elems( elem )
+        return rval
+
+    def is_case( self, param_dict ):
+        ref = self.get_ref( param_dict )
+        return bool( ref )
+
+
+class ItemIsInstanceExternalServiceActionsGroupWhen( ExternalServiceActionsGroupWhen ):
+    type = "item_type"
+
+    def __init__( self, parent, name, value, label=None ):
+        super( ItemIsInstanceExternalServiceActionsGroupWhen, self ).__init__( parent, name, label )
+        self.value = value
+
+    @classmethod
+    def from_elem( self, parent, elem ):
+        """Returns an instance of this when"""
+        rval = ItemIsInstanceExternalServiceActionsGroupWhen( parent, elem.get( 'name' ), elem.get( 'value' ), elem.get( 'label' ) )
+        rval.load_sub_elems( elem )
+        return rval
+
+    def is_case( self, param_dict ):
+        ref = self.get_ref( param_dict )
+        return ref.__class__.__name__.lower() in map( lambda x: x.lower(), self.value.split( '.' ) )  # HACK!
+
+
+when_type_to_class = {}
+for class_type in [ ValueExternalServiceActionsGroupWhen, BooleanExternalServiceActionsGroupWhen, ItemIsInstanceExternalServiceActionsGroupWhen]:
+    when_type_to_class[ class_type.type ] = class_type
+
+
+class ExternalServiceActionsConditional( object ):
+    type = "conditional"
+
+    def __init__( self, elem, parent ):
+        self.parent = parent
+        self.name = elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from ExternalServiceActionsConditional"
+        self.label = elem.get( 'label' )
+        self.ref = elem.get( 'ref', None )
+        assert self.ref is not None, "Required 'ref' attribute missing from ExternalServiceActionsConditional"
+        self.ref = self.ref.split( '.' )
+        self.cases = []
+        for when_elem in elem.findall( 'when' ):
+            self.cases.append( ExternalServiceActionsGroupWhen.from_elem( self, when_elem ) )
+
+    def get_current_cases( self, param_dict ):
+        rval = []
+        for case in self.cases:
+            if case.is_case( param_dict ):
+                rval.append( case )
+        return rval
+
+
+class ActionSection( list ):
+    def __init__( self, name, label ):
+        list.__init__( self )
+        self.name = name
+        self.label = label
+
+    def has_action( self ):
+        for item in self:
+            if not isinstance( item, ActionSection ):
+                return True
+            else:
+                if item.has_action():
+                    return True
+        return False
+
+
+class PopulatedExternalService( object ):
+    def __init__( self, service_group, service_instance, item, param_dict=None ):
+        self.service_group = service_group
+        self.service_instance = service_instance
+        self.item = item
+        self.param_dict = param_dict
+        self.populate()
+
+    def __getattr__( self, name ):
+        return getattr( self.service_instance, name )  # should .service or.service_instance should be here...
+
+    def populate( self ):
+        param_dict = {}
+        param_dict['fields'] = Bunch( **self.service_instance.form_values.content )
+        param_dict['item'] = self.item
+        param_dict['service'] = self.service_group.parent
+        param_dict['service_instance'] = self.service_instance
+        action_list = ActionSection( self.service_group.name, self.service_group.label )
+        for item in self.service_group.items:
+            if isinstance( item, ExternalServiceParameter ):
+                param_dict[ item.name ] = item.get_value( param_dict )
+            elif isinstance( item, ExternalServiceAction ):
+                action_list.append( item.populate_action( param_dict ) )
+            elif isinstance( item, ExternalServiceActionsGroup ):
+                item.prepare_actions( param_dict, param_dict, action_list )
+            else:
+                raise Exception( 'unknown item type found' )
+        self.param_dict = param_dict
+        self.actions = action_list
+
+    def perform_action_by_name( self, actions_list ):
+        action = self.get_action_by_name( actions_list )
+        action.perform_action()
+        return action
+
+    def get_action_by_name( self, actions_list ):
+        action = None
+        actions = self.actions  # populated actions
+        for name in actions_list:
+            action_found = False
+            for action in actions:
+                if action.name == name:
+                    action_found = True
+                    actions = action
+                    break
+            assert action_found, 'Action not found: %s in %s' % ( name, actions_list )
+        assert action, 'Action not found: %s' % actions_list
+        return action
+
+    def __nonzero__( self ):
+        return self.actions.has_action()
diff --git a/lib/galaxy/forms/__init__.py b/lib/galaxy/forms/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/forms/forms.py b/lib/galaxy/forms/forms.py
new file mode 100644
index 0000000..2686864
--- /dev/null
+++ b/lib/galaxy/forms/forms.py
@@ -0,0 +1,323 @@
+"""
+FormDefinition and field factories
+"""
+# TODO: A FormDefinitionField is closely linked to a form_builder result.
+# Can this functionality be further abstracted and merged with form_builder?
+from galaxy.util import string_as_bool
+from galaxy.model import FormDefinitionCurrent, FormDefinition
+
+FORM_TYPES = dict( [ ( f_type.lower(), f_descript ) for f_type, f_descript in FormDefinition.types.items() ] )
+
+
+class FormDefinitionFactory( object ):
+    def __init__( self, form_types, field_type_factories ):
+        self.form_types = form_types
+        self.field_type_factories = field_type_factories
+
+    def new( self, form_type, name, description=None, fields=None, layout=None, form_definition_current=None ):
+        """
+        Return new FormDefinition.
+        """
+        assert form_type in self.form_types, 'Invalid FormDefinition type ( %s not in %s )' % ( form_type, self.form_types.keys() )
+        assert name, 'FormDefinition requires a name'
+        if description is None:
+            description = ''
+        if layout is None:
+            layout = []
+        if fields is None:
+            fields = []
+        # Create new FormDefinitionCurrent
+        if form_definition_current is None:
+            form_definition_current = FormDefinitionCurrent()
+        rval = FormDefinition( name=name,
+                               desc=description,
+                               form_type=self.form_types[form_type],
+                               form_definition_current=form_definition_current,
+                               layout=layout,
+                               fields=fields )
+        form_definition_current.latest_form = rval
+        return rval
+
+    def from_elem( self, elem, form_definition_current=None ):
+        """
+        Return FormDefinition created from an xml element.
+        """
+        name = elem.get( 'name', None )
+        description = elem.get( 'description', None )
+        form_type = elem.get( 'type', None )
+        # load layout
+        layout = []
+        layouts_elem = elem.find( 'layout' )
+        if layouts_elem:
+            for layout_elem in layouts_elem.findall( 'grid' ):
+                layout_name = layout_elem.get( 'name', None )
+                assert layout_name and layout_name not in layout, 'Layout grid element requires a unique name.'
+                layout.append( layout_name )
+        # load fields
+        fields = []
+        fields_elem = elem.find( 'fields' )
+        if fields_elem is not None:
+            for field_elem in fields_elem.findall( 'field' ):
+                field_type = field_elem.get( 'type' )
+                assert field_type in self.field_type_factories, 'Invalid form field type ( %s ).' % field_type
+                fields.append( self.field_type_factories[field_type].from_elem( field_elem, layout ) )
+        # create and return new form
+        return self.new( form_type, name, description=description, fields=fields, layout=layout, form_definition_current=form_definition_current )
+
+
+class FormDefinitionFieldFactory( object ):
+    type = None
+
+    def __get_stored_field_type( self, **kwds ):
+        raise Exception( 'not implemented' )
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = {}
+        assert name, 'Must provide a name'
+        rval['name'] = name
+        if not label:
+            rval['label'] = name
+        else:
+            rval['label'] = label
+        if required:
+            rval['required'] = 'required'
+        else:
+            rval['required'] = 'optional'
+        if helptext is None:
+            helptext = ''
+        rval['helptext'] = helptext
+        if default is None:
+            default = ''
+        rval['default'] = default
+        rval['visible'] = visible
+        # if layout is None: #is this needed?
+        #    layout = ''
+        rval['layout'] = layout
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition created from an xml element.
+        """
+        name = elem.get( 'name' )
+        label = elem.get( 'label' )
+        required = string_as_bool( elem.get( 'required', 'false' ) )
+        default = elem.get( 'value' )
+        helptext = elem.get( 'helptext' )
+        visible = string_as_bool( elem.get( 'visible', 'true' ) )
+        field_layout = elem.get( 'layout', None )
+        if field_layout:
+            assert layout and field_layout in layout, 'Invalid layout specified: %s not in %s' % ( field_layout, layout )
+            field_layout = str( layout.index( field_layout ) )  # existing behavior: integer indexes are stored as strings. why?
+        return self.new( name=name, label=label, required=required, helptext=helptext, default=default, visible=visible, layout=field_layout )
+
+
+class FormDefinitionTextFieldFactory( FormDefinitionFieldFactory ):
+    type = 'text'
+
+    def __get_stored_field_type( self, area ):
+        if area:
+            return 'TextArea'
+        else:
+            return 'TextField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None, area=False ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionTextFieldFactory, self ).new( name=name, label=label,
+                                                                  required=required, helptext=helptext,
+                                                                  default=default, visible=visible,
+                                                                  layout=layout )
+        rval['type'] = self.__get_stored_field_type( area )
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionTextFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type( string_as_bool( elem.get( 'area', 'false' ) ) )
+        return rval
+
+
+class FormDefinitionPasswordFieldFactory( FormDefinitionFieldFactory ):
+    type = 'password'
+
+    def __get_stored_field_type( self ):
+        return 'PasswordField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None, area=False ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionPasswordFieldFactory, self ).new( name=name, label=label,
+                                                                      required=required, helptext=helptext,
+                                                                      default=default, visible=visible,
+                                                                      layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionPasswordFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+
+class FormDefinitionAddressFieldFactory( FormDefinitionFieldFactory ):
+    type = 'address'
+
+    def __get_stored_field_type( self ):
+        return 'AddressField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionAddressFieldFactory, self ).new( name=name, label=label,
+                                                                     required=required, helptext=helptext,
+                                                                     default=default, visible=visible,
+                                                                     layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionAddressFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+
+class FormDefinitionWorkflowFieldFactory( FormDefinitionFieldFactory ):
+    type = 'workflow'
+
+    def __get_stored_field_type( self ):
+        return 'WorkflowField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionWorkflowFieldFactory, self ).new( name=name, label=label,
+                                                                      required=required, helptext=helptext,
+                                                                      default=default, visible=visible,
+                                                                      layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionWorkflowFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+
+class FormDefinitionWorkflowMappingFieldFactory( FormDefinitionFieldFactory ):
+    type = 'workflowmapping'
+
+    def __get_stored_field_type( self ):
+        return 'WorkflowMappingField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionWorkflowMappingFieldFactory, self ).new( name=name, label=label,
+                                                                             required=required, helptext=helptext,
+                                                                             default=default, visible=visible,
+                                                                             layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionWorkflowMappingFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+
+class FormDefinitionHistoryFieldFactory( FormDefinitionFieldFactory ):
+    type = 'history'
+
+    def __get_stored_field_type( self ):
+        return 'HistoryField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionHistoryFieldFactory, self ).new( name=name, label=label,
+                                                                     required=required, helptext=helptext,
+                                                                     default=default, visible=visible,
+                                                                     layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionHistoryFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type()
+        return rval
+
+
+class FormDefinitionSelectFieldFactory( FormDefinitionFieldFactory ):
+    type = 'select'
+
+    def __get_stored_field_type( self, checkboxes ):
+        if checkboxes:
+            return 'CheckboxField'
+        else:
+            return 'SelectField'
+
+    def new( self, name=None, label=None, required=False, helptext=None, default=None, visible=True, layout=None, options=[], checkboxes=False ):
+        """
+        Return new FormDefinition field.
+        """
+        rval = super( FormDefinitionSelectFieldFactory, self ).new( name=name, label=label,
+                                                                    required=required, helptext=helptext,
+                                                                    default=default, visible=visible,
+                                                                    layout=layout )
+        rval['type'] = self.__get_stored_field_type( checkboxes )
+        if options is None:
+            options = []
+        rval['selectlist'] = options
+        return rval
+
+    def from_elem( self, elem, layout=None ):
+        """
+        Return FormDefinition field created from an xml element.
+        """
+        rval = super( FormDefinitionSelectFieldFactory, self ).from_elem( elem, layout=layout )
+        rval['type'] = self.__get_stored_field_type( string_as_bool( elem.get( 'checkboxes', 'false' ) ) )
+        # load select options
+        rval['selectlist'] = []
+        for select_option in elem.findall( 'option' ):
+            value = select_option.get( 'value', None )
+            assert value is not None, 'Must provide a "value" for a select option'
+            rval['selectlist'].append( value )
+        return rval
+
+
+field_type_factories = dict( [ ( field.type, field() ) for field in ( FormDefinitionTextFieldFactory,
+                                                                      FormDefinitionPasswordFieldFactory,
+                                                                      FormDefinitionAddressFieldFactory,
+                                                                      FormDefinitionSelectFieldFactory,
+                                                                      FormDefinitionWorkflowFieldFactory,
+                                                                      FormDefinitionWorkflowMappingFieldFactory,
+                                                                      FormDefinitionHistoryFieldFactory ) ] )
+
+form_factory = FormDefinitionFactory( FORM_TYPES, field_type_factories )
diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py
new file mode 100644
index 0000000..4aa18d5
--- /dev/null
+++ b/lib/galaxy/jobs/__init__.py
@@ -0,0 +1,2122 @@
+"""
+Support for running a tool in Galaxy via an internal job management system
+"""
+import copy
+import datetime
+import logging
+import os
+import pwd
+import random
+import shutil
+import string
+import subprocess
+import sys
+import time
+import traceback
+from abc import ABCMeta, abstractmethod
+from json import loads
+from xml.etree import ElementTree
+
+import six
+
+import galaxy
+from galaxy import model, util
+from galaxy.datatypes import metadata, sniff
+from galaxy.exceptions import ObjectInvalid, ObjectNotFound
+from galaxy.jobs.actions.post import ActionBox
+from galaxy.jobs.mapper import JobRunnerMapper
+from galaxy.jobs.runners import BaseJobRunner, JobState
+from galaxy.util import safe_makedirs, unicodify
+from galaxy.util.bunch import Bunch
+from galaxy.util.expressions import ExpressionContext
+from galaxy.util.xml_macros import load
+
+from .datasets import (DatasetPath, NullDatasetPathRewriter,
+    OutputsToWorkingDirectoryPathRewriter, TaskPathRewriter)
+from .output_checker import check_output
+
+log = logging.getLogger( __name__ )
+
+# This file, if created in the job's working directory, will be used for
+# setting advanced metadata properties on the job and its associated outputs.
+# This interface is currently experimental, is only used by the upload tool,
+# and should eventually become API'd
+TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
+
+# Override with config.default_job_shell.
+DEFAULT_JOB_SHELL = '/bin/bash'
+
+DEFAULT_CLEANUP_JOB = "always"
+
+
+class JobDestination( Bunch ):
+    """
+    Provides details about where a job runs
+    """
+    def __init__(self, **kwds):
+        self['id'] = None
+        self['url'] = None
+        self['tags'] = None
+        self['runner'] = None
+        self['legacy'] = False
+        self['converted'] = False
+        self['shell'] = None
+        self['env'] = []
+        self['resubmit'] = []
+        # dict is appropriate (rather than a bunch) since keys may not be valid as attributes
+        self['params'] = dict()
+
+        # Use the values persisted in an existing job
+        if 'from_job' in kwds and kwds['from_job'].destination_id is not None:
+            self['id'] = kwds['from_job'].destination_id
+            self['params'] = kwds['from_job'].destination_params
+
+        super(JobDestination, self).__init__(**kwds)
+
+        # Store tags as a list
+        if self.tags is not None:
+            self['tags'] = [ x.strip() for x in self.tags.split(',') ]
+
+
+class JobToolConfiguration( Bunch ):
+    """
+    Provides details on what handler and destination a tool should use
+
+    A JobToolConfiguration will have the required attribute 'id' and optional
+    attributes 'handler', 'destination', and 'params'
+    """
+    def __init__(self, **kwds):
+        self['handler'] = None
+        self['destination'] = None
+        self['params'] = dict()
+        super(JobToolConfiguration, self).__init__(**kwds)
+
+    def get_resource_group( self ):
+        return self.get( "resources", None )
+
+
+def config_exception(e, file):
+    abs_path = os.path.abspath(file)
+    message = 'Problem parsing the XML in file %s, ' % abs_path
+    message += 'please correct the indicated portion of the file and restart Galaxy. '
+    message += str(e)
+    log.exception(message)
+    return Exception(message)
+
+
+class JobConfiguration( object ):
+    """A parser and interface to advanced job management features.
+
+    These features are configured in the job configuration, by default, ``job_conf.xml``
+    """
+    DEFAULT_NWORKERS = 4
+
+    JOB_RESOURCE_CONDITIONAL_XML = """<conditional name="__job_resource">
+        <param name="__job_resource__select" type="select" label="Job Resource Parameters">
+            <option value="no">Use default job resource parameters</option>
+            <option value="yes">Specify job resource parameters</option>
+        </param>
+        <when value="no"/>
+        <when value="yes"/>
+    </conditional>"""
+
+    def __init__(self, app):
+        """Parse the job configuration XML.
+        """
+        self.app = app
+        self.runner_plugins = []
+        self.dynamic_params = None
+        self.handlers = {}
+        self.handler_runner_plugins = {}
+        self.default_handler_id = None
+        self.destinations = {}
+        self.destination_tags = {}
+        self.default_destination_id = None
+        self.tools = {}
+        self.resource_groups = {}
+        self.default_resource_group = None
+        self.resource_parameters = {}
+        self.limits = Bunch()
+
+        self.__parse_resource_parameters()
+        # Initialize the config
+        job_config_file = self.app.config.job_config_file
+        try:
+            tree = load(job_config_file)
+            self.__parse_job_conf_xml(tree)
+        except IOError:
+            log.warning( 'Job configuration "%s" does not exist, using legacy'
+                         ' job configuration from Galaxy config file "%s" instead'
+                         % ( self.app.config.job_config_file, self.app.config.config_file ) )
+            self.__parse_job_conf_legacy()
+        except Exception as e:
+            raise config_exception(e, job_config_file)
+
+    def __parse_job_conf_xml(self, tree):
+        """Loads the new-style job configuration from options in the job config file (by default, job_conf.xml).
+
+        :param tree: Object representing the root ``<job_conf>`` object in the job config file.
+        :type tree: ``xml.etree.ElementTree.Element``
+        """
+        root = tree.getroot()
+        log.debug('Loading job configuration from %s' % self.app.config.job_config_file)
+
+        # Parse job plugins
+        plugins = root.find('plugins')
+        if plugins is not None:
+            for plugin in self.__findall_with_required(plugins, 'plugin', ('id', 'type', 'load')):
+                if plugin.get('type') == 'runner':
+                    workers = plugin.get('workers', plugins.get('workers', JobConfiguration.DEFAULT_NWORKERS))
+                    runner_kwds = self.__get_params(plugin)
+                    runner_info = dict(id=plugin.get('id'),
+                                       load=plugin.get('load'),
+                                       workers=int(workers),
+                                       kwds=runner_kwds)
+                    self.runner_plugins.append(runner_info)
+                else:
+                    log.error('Unknown plugin type: %s' % plugin.get('type'))
+            for plugin in self.__findall_with_required(plugins, 'plugin', ('id', 'type')):
+                if plugin.get('id') == 'dynamic' and plugin.get('type') == 'runner':
+                    self.dynamic_params = self.__get_params(plugin)
+
+        # Load tasks if configured
+        if self.app.config.use_tasked_jobs:
+            self.runner_plugins.append(dict(id='tasks', load='tasks', workers=self.app.config.local_task_queue_workers))
+
+        # Parse handlers
+        handlers = root.find('handlers')
+        if handlers is not None:
+            for handler in self.__findall_with_required(handlers, 'handler'):
+                id = handler.get('id')
+                if id in self.handlers:
+                    log.error("Handler '%s' overlaps handler with the same name, ignoring" % id)
+                else:
+                    log.debug("Read definition for handler '%s'" % id)
+                    self.handlers[id] = (id,)
+                    for plugin in handler.findall('plugin'):
+                        if id not in self.handler_runner_plugins:
+                            self.handler_runner_plugins[id] = []
+                        self.handler_runner_plugins[id].append( plugin.get('id') )
+                    if handler.get('tags', None) is not None:
+                        for tag in [ x.strip() for x in handler.get('tags').split(',') ]:
+                            if tag in self.handlers:
+                                self.handlers[tag].append(id)
+                            else:
+                                self.handlers[tag] = [id]
+
+        # Must define at least one handler to have a default.
+        if not self.handlers:
+            raise ValueError("Job configuration file defines no valid handler elements.")
+        # Determine the default handler(s)
+        self.default_handler_id = self.__get_default(handlers, list(self.handlers.keys()))
+
+        # Parse destinations
+        destinations = root.find('destinations')
+        job_metrics = self.app.job_metrics
+        for destination in self.__findall_with_required(destinations, 'destination', ('id', 'runner')):
+            id = destination.get('id')
+            destination_metrics = destination.get( "metrics", None )
+            if destination_metrics:
+                if not util.asbool( destination_metrics ):
+                    # disable
+                    job_metrics.set_destination_instrumenter( id, None )
+                else:
+                    metrics_conf_path = self.app.config.resolve_path( destination_metrics )
+                    job_metrics.set_destination_conf_file( id, metrics_conf_path )
+            else:
+                metrics_elements = self.__findall_with_required( destination, 'job_metrics', () )
+                if metrics_elements:
+                    job_metrics.set_destination_conf_element( id, metrics_elements[ 0 ] )
+            job_destination = JobDestination(**dict(destination.items()))
+            job_destination['params'] = self.__get_params(destination)
+            job_destination['env'] = self.__get_envs(destination)
+            job_destination['resubmit'] = self.__get_resubmits(destination)
+            self.destinations[id] = (job_destination,)
+            if job_destination.tags is not None:
+                for tag in job_destination.tags:
+                    if tag not in self.destinations:
+                        self.destinations[tag] = []
+                    self.destinations[tag].append(job_destination)
+
+        # Determine the default destination
+        self.default_destination_id = self.__get_default(destinations, list(self.destinations.keys()))
+
+        # Parse resources...
+        resources = root.find('resources')
+        if resources is not None:
+            self.default_resource_group = resources.get( "default", None )
+            for group in self.__findall_with_required(resources, 'group'):
+                id = group.get('id')
+                fields_str = group.get('fields', None) or group.text or ''
+                fields = [ f for f in fields_str.split(",") if f ]
+                self.resource_groups[ id ] = fields
+
+        # Parse tool mappings
+        tools = root.find('tools')
+        if tools is not None:
+            for tool in self.__findall_with_required(tools, 'tool'):
+                # There can be multiple definitions with identical ids, but different params
+                id = tool.get('id').lower().rstrip('/')
+                if id not in self.tools:
+                    self.tools[id] = list()
+                self.tools[id].append(JobToolConfiguration(**dict(tool.items())))
+                self.tools[id][-1]['params'] = self.__get_params(tool)
+
+        types = dict(registered_user_concurrent_jobs=int,
+                     anonymous_user_concurrent_jobs=int,
+                     walltime=str,
+                     output_size=util.size_to_bytes)
+
+        self.limits = Bunch(registered_user_concurrent_jobs=None,
+                            anonymous_user_concurrent_jobs=None,
+                            walltime=None,
+                            walltime_delta=None,
+                            output_size=None,
+                            destination_user_concurrent_jobs={},
+                            destination_total_concurrent_jobs={})
+
+        # Parse job limits
+        limits = root.find('limits')
+        if limits is not None:
+            for limit in self.__findall_with_required(limits, 'limit', ('type',)):
+                type = limit.get('type')
+                # concurrent_jobs renamed to destination_user_concurrent_jobs in job_conf.xml
+                if type in ( 'destination_user_concurrent_jobs', 'concurrent_jobs', 'destination_total_concurrent_jobs' ):
+                    id = limit.get('tag', None) or limit.get('id')
+                    if type == 'destination_total_concurrent_jobs':
+                        self.limits.destination_total_concurrent_jobs[id] = int(limit.text)
+                    else:
+                        self.limits.destination_user_concurrent_jobs[id] = int(limit.text)
+                elif limit.text:
+                    self.limits.__dict__[type] = types.get(type, str)(limit.text)
+
+        if self.limits.walltime is not None:
+            h, m, s = [ int( v ) for v in self.limits.walltime.split( ':' ) ]
+            self.limits.walltime_delta = datetime.timedelta( 0, s, 0, 0, m, h )
+
+        log.debug('Done loading job configuration')
+
+    def __parse_job_conf_legacy(self):
+        """Loads the old-style job configuration from options in the galaxy config file (by default, config/galaxy.ini).
+        """
+        log.debug('Loading job configuration from %s' % self.app.config.config_file)
+
+        # Always load local
+        self.runner_plugins = [dict(id='local', load='local', workers=self.app.config.local_job_queue_workers)]
+        # Load tasks if configured
+        if self.app.config.use_tasked_jobs:
+            self.runner_plugins.append(dict(id='tasks', load='tasks', workers=self.app.config.local_task_queue_workers))
+        for runner in self.app.config.start_job_runners:
+            self.runner_plugins.append(dict(id=runner, load=runner, workers=self.app.config.cluster_job_queue_workers))
+
+        # Set the handlers
+        for id in self.app.config.job_handlers:
+            self.handlers[id] = (id,)
+
+        self.handlers['default_job_handlers'] = self.app.config.default_job_handlers
+        self.default_handler_id = 'default_job_handlers'
+
+        # Set tool handler configs
+        for id, tool_handlers in self.app.config.tool_handlers.items():
+            self.tools[id] = list()
+            for handler_config in tool_handlers:
+                # rename the 'name' key to 'handler'
+                handler_config['handler'] = handler_config.pop('name')
+                self.tools[id].append(JobToolConfiguration(**handler_config))
+
+        # Set tool runner configs
+        for id, tool_runners in self.app.config.tool_runners.items():
+            # Might have been created in the handler parsing above
+            if id not in self.tools:
+                self.tools[id] = list()
+            for runner_config in tool_runners:
+                url = runner_config['url']
+                if url not in self.destinations:
+                    # Create a new "legacy" JobDestination - it will have its URL converted to a destination params once the appropriate plugin has loaded
+                    self.destinations[url] = (JobDestination(id=url, runner=url.split(':', 1)[0], url=url, legacy=True, converted=False),)
+                for tool_conf in self.tools[id]:
+                    if tool_conf.params == runner_config.get('params', {}):
+                        tool_conf['destination'] = url
+                        break
+                else:
+                    # There was not an existing config (from the handlers section) with the same params
+                    # rename the 'url' key to 'destination'
+                    runner_config['destination'] = runner_config.pop('url')
+                    self.tools[id].append(JobToolConfiguration(**runner_config))
+
+        self.destinations[self.app.config.default_cluster_job_runner] = (JobDestination(id=self.app.config.default_cluster_job_runner,
+                                                                                        runner=self.app.config.default_cluster_job_runner.split(':', 1)[0],
+                                                                                        url=self.app.config.default_cluster_job_runner,
+                                                                                        legacy=True,
+                                                                                        converted=False),)
+        self.default_destination_id = self.app.config.default_cluster_job_runner
+
+        # Set the job limits
+        self.limits = Bunch(registered_user_concurrent_jobs=self.app.config.registered_user_job_limit,
+                            anonymous_user_concurrent_jobs=self.app.config.anonymous_user_job_limit,
+                            walltime=self.app.config.job_walltime,
+                            walltime_delta=self.app.config.job_walltime_delta,
+                            output_size=self.app.config.output_size_limit,
+                            destination_user_concurrent_jobs={},
+                            destination_total_concurrent_jobs={})
+
+        log.debug('Done loading job configuration')
+
+    def get_tool_resource_xml( self, tool_id, tool_type ):
+        """ Given a tool id, return XML elements describing parameters to
+        insert into job resources.
+
+        :tool id: A tool ID (a string)
+        :tool type: A tool type (a string)
+
+        :returns: List of parameter elements.
+        """
+        if tool_id and tool_type is 'default':
+            # TODO: Only works with exact matches, should handle different kinds of ids
+            # the way destination lookup does.
+            resource_group = None
+            if tool_id in self.tools:
+                resource_group = self.tools[ tool_id ][ 0 ].get_resource_group()
+            resource_group = resource_group or self.default_resource_group
+            if resource_group and resource_group in self.resource_groups:
+                fields_names = self.resource_groups[ resource_group ]
+                fields = [ self.resource_parameters[ n ] for n in fields_names ]
+                if fields:
+                    conditional_element = ElementTree.fromstring( self.JOB_RESOURCE_CONDITIONAL_XML )
+                    when_yes_elem = conditional_element.findall( 'when' )[ 1 ]
+                    for parameter in fields:
+                        when_yes_elem.append( parameter )
+                    return conditional_element
+
+    def __parse_resource_parameters( self ):
+        if os.path.exists( self.app.config.job_resource_params_file ):
+            resource_param_file = self.app.config.job_resource_params_file
+            try:
+                resource_definitions = util.parse_xml( resource_param_file )
+            except Exception as e:
+                raise config_exception( e, resource_param_file )
+            resource_definitions_root = resource_definitions.getroot()
+            # TODO: Also handling conditionals would be awesome!
+            for parameter_elem in resource_definitions_root.findall( "param" ):
+                name = parameter_elem.get( "name" )
+                self.resource_parameters[ name ] = parameter_elem
+
+    def __get_default(self, parent, names):
+        """
+        Returns the default attribute set in a parent tag like <handlers> or
+        <destinations>, or return the ID of the child, if there is no explicit
+        default and only one child.
+
+        :param parent: Object representing a tag that may or may not have a 'default' attribute.
+        :type parent: ``xml.etree.ElementTree.Element``
+        :param names: The list of destination or handler IDs or tags that were loaded.
+        :type names: list of str
+
+        :returns: str -- id or tag representing the default.
+        """
+
+        rval = parent.get('default')
+        if 'default_from_environ' in parent.attrib:
+            environ_var = parent.attrib['default_from_environ']
+            rval = os.environ.get(environ_var, rval)
+        elif 'default_from_config' in parent.attrib:
+            config_val = parent.attrib['default_from_config']
+            rval = self.app.config.config_dict.get(config_val, rval)
+
+        if rval is not None:
+            # If the parent element has a 'default' attribute, use the id or tag in that attribute
+            if rval not in names:
+                raise Exception("<%s> default attribute '%s' does not match a defined id or tag in a child element" % (parent.tag, rval))
+            log.debug("<%s> default set to child with id or tag '%s'" % (parent.tag, rval))
+        elif len(names) == 1:
+            log.info("Setting <%s> default to child with id '%s'" % (parent.tag, names[0]))
+            rval = names[0]
+        else:
+            raise Exception("No <%s> default specified, please specify a valid id or tag with the 'default' attribute" % parent.tag)
+        return rval
+
+    def __findall_with_required(self, parent, match, attribs=None):
+        """Like ``xml.etree.ElementTree.Element.findall()``, except only returns children that have the specified attribs.
+
+        :param parent: Parent element in which to find.
+        :type parent: ``xml.etree.ElementTree.Element``
+        :param match: Name of child elements to find.
+        :type match: str
+        :param attribs: List of required attributes in children elements.
+        :type attribs: list of str
+
+        :returns: list of ``xml.etree.ElementTree.Element``
+        """
+        rval = []
+        if attribs is None:
+            attribs = ('id',)
+        for elem in parent.findall(match):
+            for attrib in attribs:
+                if attrib not in elem.attrib:
+                    log.warning("required '%s' attribute is missing from <%s> element" % (attrib, match))
+                    break
+            else:
+                rval.append(elem)
+        return rval
+
+    def __get_params(self, parent):
+        """Parses any child <param> tags in to a dictionary suitable for persistence.
+
+        :param parent: Parent element in which to find child <param> tags.
+        :type parent: ``xml.etree.ElementTree.Element``
+
+        :returns: dict
+        """
+        rval = {}
+        for param in parent.findall('param'):
+            key = param.get('id')
+            if key in ["container", "container_override"]:
+                from galaxy.tools.deps import requirements
+                containers = map(requirements.container_from_element, list(param))
+                param_value = map(lambda c: c.to_dict(), containers)
+            else:
+                param_value = param.text
+
+            if 'from_environ' in param.attrib:
+                environ_var = param.attrib['from_environ']
+                param_value = os.environ.get(environ_var, param_value)
+            elif 'from_config' in param.attrib:
+                config_val = param.attrib['from_config']
+                param_value = self.app.config.config_dict.get(config_val, param_value)
+
+            rval[key] = param_value
+        return rval
+
+    def __get_envs(self, parent):
+        """Parses any child <env> tags in to a dictionary suitable for persistence.
+
+        :param parent: Parent element in which to find child <env> tags.
+        :type parent: ``xml.etree.ElementTree.Element``
+
+        :returns: dict
+        """
+        rval = []
+        for param in parent.findall('env'):
+            rval.append( dict(
+                name=param.get('id'),
+                file=param.get('file'),
+                execute=param.get('exec'),
+                value=param.text,
+                raw=util.asbool(param.get('raw', 'false'))
+            ) )
+        return rval
+
+    def __get_resubmits(self, parent):
+        """Parses any child <resubmit> tags in to a dictionary suitable for persistence.
+
+        :param parent: Parent element in which to find child <resubmit> tags.
+        :type parent: ``xml.etree.ElementTree.Element``
+
+        :returns: dict
+        """
+        rval = []
+        for resubmit in parent.findall('resubmit'):
+            rval.append( dict(
+                condition=resubmit.get('condition'),
+                destination=resubmit.get('destination'),
+                handler=resubmit.get('handler')
+            ) )
+        return rval
+
+    @property
+    def default_job_tool_configuration(self):
+        """
+        The default JobToolConfiguration, used if a tool does not have an
+        explicit defintion in the configuration.  It consists of a reference to
+        the default handler and default destination.
+
+        :returns: JobToolConfiguration -- a representation of a <tool> element that uses the default handler and destination
+        """
+        return JobToolConfiguration(id='default', handler=self.default_handler_id, destination=self.default_destination_id)
+
+    # Called upon instantiation of a Tool object
+    def get_job_tool_configurations(self, ids):
+        """
+        Get all configured JobToolConfigurations for a tool ID, or, if given
+        a list of IDs, the JobToolConfigurations for the first id in ``ids``
+        matching a tool definition.
+
+        .. note:: You should not mix tool shed tool IDs, versionless tool shed
+             IDs, and tool config tool IDs that refer to the same tool.
+
+        :param ids: Tool ID or IDs to fetch the JobToolConfiguration of.
+        :type ids: list or str.
+        :returns: list -- JobToolConfiguration Bunches representing <tool> elements matching the specified ID(s).
+
+        Example tool ID strings include:
+
+        * Full tool shed id: ``toolshed.example.org/repos/nate/filter_tool_repo/filter_tool/1.0.0``
+        * Tool shed id less version: ``toolshed.example.org/repos/nate/filter_tool_repo/filter_tool``
+        * Tool config tool id: ``filter_tool``
+        """
+        rval = []
+        # listify if ids is a single (string) id
+        ids = util.listify(ids)
+        for id in ids:
+            if id in self.tools:
+                # If a tool has definitions that include job params but not a
+                # definition for jobs without params, include the default
+                # config
+                for job_tool_configuration in self.tools[id]:
+                    if not job_tool_configuration.params:
+                        break
+                else:
+                    rval.append(self.default_job_tool_configuration)
+                rval.extend(self.tools[id])
+                break
+        else:
+            rval.append(self.default_job_tool_configuration)
+        return rval
+
+    def __get_single_item(self, collection):
+        """Given a collection of handlers or destinations, return one item from the collection at random.
+        """
+        # Done like this to avoid random under the assumption it's faster to avoid it
+        if len(collection) == 1:
+            return collection[0]
+        else:
+            return random.choice(collection)
+
+    # This is called by Tool.get_job_handler()
+    def get_handler(self, id_or_tag):
+        """Given a handler ID or tag, return the provided ID or an ID matching the provided tag
+
+        :param id_or_tag: A handler ID or tag.
+        :type id_or_tag: str
+
+        :returns: str -- A valid job handler ID.
+        """
+        if id_or_tag is None:
+            id_or_tag = self.default_handler_id
+        return self.__get_single_item(self.handlers[id_or_tag])
+
+    def get_destination(self, id_or_tag):
+        """Given a destination ID or tag, return the JobDestination matching the provided ID or tag
+
+        :param id_or_tag: A destination ID or tag.
+        :type id_or_tag: str
+
+        :returns: JobDestination -- A valid destination
+
+        Destinations are deepcopied as they are expected to be passed in to job
+        runners, which will modify them for persisting params set at runtime.
+        """
+        if id_or_tag is None:
+            id_or_tag = self.default_destination_id
+        return copy.deepcopy(self.__get_single_item(self.destinations[id_or_tag]))
+
+    def get_destinations(self, id_or_tag):
+        """Given a destination ID or tag, return all JobDestinations matching the provided ID or tag
+
+        :param id_or_tag: A destination ID or tag.
+        :type id_or_tag: str
+
+        :returns: list or tuple of JobDestinations
+
+        Destinations are not deepcopied, so they should not be passed to
+        anything which might modify them.
+        """
+        return self.destinations.get(id_or_tag, None)
+
+    def get_job_runner_plugins(self, handler_id):
+        """Load all configured job runner plugins
+
+        :returns: list of job runner plugins
+        """
+        rval = {}
+        if handler_id in self.handler_runner_plugins:
+            plugins_to_load = [ rp for rp in self.runner_plugins if rp['id'] in self.handler_runner_plugins[handler_id] ]
+            log.info( "Handler '%s' will load specified runner plugins: %s", handler_id, ', '.join( [ rp['id'] for rp in plugins_to_load ] ) )
+        else:
+            plugins_to_load = self.runner_plugins
+            log.info( "Handler '%s' will load all configured runner plugins", handler_id )
+        for runner in plugins_to_load:
+            class_names = []
+            module = None
+            id = runner['id']
+            load = runner['load']
+            if ':' in load:
+                # Name to load was specified as '<module>:<class>'
+                module_name, class_name = load.rsplit(':', 1)
+                class_names = [ class_name ]
+                module = __import__( module_name )
+            else:
+                # Name to load was specified as '<module>'
+                if '.' not in load:
+                    # For legacy reasons, try from galaxy.jobs.runners first if there's no '.' in the name
+                    module_name = 'galaxy.jobs.runners.' + load
+                    try:
+                        module = __import__( module_name )
+                    except ImportError:
+                        # No such module, we'll retry without prepending galaxy.jobs.runners.
+                        # All other exceptions (e.g. something wrong with the module code) will raise
+                        pass
+                if module is None:
+                    # If the name included a '.' or loading from the static runners path failed, try the original name
+                    module = __import__( load )
+                    module_name = load
+            if module is None:
+                # Module couldn't be loaded, error should have already been displayed
+                continue
+            for comp in module_name.split( "." )[1:]:
+                module = getattr( module, comp )
+            if not class_names:
+                # If there's not a ':', we check <module>.__all__ for class names
+                try:
+                    assert module.__all__
+                    class_names = module.__all__
+                except AssertionError:
+                    log.error( 'Runner "%s" does not contain a list of exported classes in __all__' % load )
+                    continue
+            for class_name in class_names:
+                runner_class = getattr( module, class_name )
+                try:
+                    assert issubclass(runner_class, BaseJobRunner)
+                except TypeError:
+                    log.warning("A non-class name was found in __all__, ignoring: %s" % id)
+                    continue
+                except AssertionError:
+                    log.warning("Job runner classes must be subclassed from BaseJobRunner, %s has bases: %s" % (id, runner_class.__bases__))
+                    continue
+                try:
+                    rval[id] = runner_class( self.app, runner[ 'workers' ], **runner.get( 'kwds', {} ) )
+                except TypeError:
+                    log.exception( "Job runner '%s:%s' has not been converted to a new-style runner or encountered TypeError on load"
+                                   % ( module_name, class_name ) )
+                    rval[id] = runner_class( self.app )
+                log.debug( "Loaded job runner '%s:%s' as '%s'" % ( module_name, class_name, id ) )
+        return rval
+
+    def is_id(self, collection):
+        """Given a collection of handlers or destinations, indicate whether the collection represents a tag or a real ID
+
+        :param collection: A representation of a destination or handler
+        :type collection: tuple or list
+
+        :returns: bool
+        """
+        return type(collection) == tuple
+
+    def is_tag(self, collection):
+        """Given a collection of handlers or destinations, indicate whether the collection represents a tag or a real ID
+
+        :param collection: A representation of a destination or handler
+        :type collection: tuple or list
+
+        :returns: bool
+        """
+        return type(collection) == list
+
+    def is_handler(self, server_name):
+        """Given a server name, indicate whether the server is a job handler
+
+        :param server_name: The name to check
+        :type server_name: str
+
+        :return: bool
+        """
+        for collection in self.handlers.values():
+            if server_name in collection:
+                return True
+        return False
+
+    def convert_legacy_destinations(self, job_runners):
+        """Converts legacy (from a URL) destinations to contain the appropriate runner params defined in the URL.
+
+        :param job_runners: All loaded job runner plugins.
+        :type job_runners: list of job runner plugins
+        """
+        for id, destination in [ ( id, destinations[0] ) for id, destinations in self.destinations.items() if self.is_id(destinations) ]:
+            # Only need to deal with real destinations, not members of tags
+            if destination.legacy and not destination.converted:
+                if destination.runner in job_runners:
+                    destination.params = job_runners[destination.runner].url_to_destination(destination.url).params
+                    destination.converted = True
+                    if destination.params:
+                        log.debug("Legacy destination with id '%s', url '%s' converted, got params:" % (id, destination.url))
+                        for k, v in destination.params.items():
+                            log.debug("    %s: %s" % (k, v))
+                    else:
+                        log.debug("Legacy destination with id '%s', url '%s' converted, got params:" % (id, destination.url))
+                else:
+                    log.warning("Legacy destination with id '%s' could not be converted: Unknown runner plugin: %s" % (id, destination.runner))
+
+
+class JobWrapper( object ):
+    """
+    Wraps a 'model.Job' with convenience methods for running processes and
+    state management.
+    """
+    def __init__( self, job, queue, use_persisted_destination=False ):
+        self.job_id = job.id
+        self.session_id = job.session_id
+        self.user_id = job.user_id
+        self.tool = queue.app.toolbox.get_tool( job.tool_id, job.tool_version, exact=True )
+        self.queue = queue
+        self.app = queue.app
+        self.sa_session = self.app.model.context
+        self.extra_filenames = []
+        self.command_line = None
+        self.dependencies = []
+        # Tool versioning variables
+        self.write_version_cmd = None
+        self.version_string = ""
+        self.__galaxy_lib_dir = None
+        # With job outputs in the working directory, we need the working
+        # directory to be set before prepare is run, or else premature deletion
+        # and job recovery fail.
+        # Create the working dir if necessary
+        self._create_working_directory()
+        self.dataset_path_rewriter = self._job_dataset_path_rewriter( self.working_directory )
+        self.output_paths = None
+        self.output_hdas_and_paths = None
+        self.tool_provided_job_metadata = None
+        # Wrapper holding the info required to restore and clean up from files used for setting metadata externally
+        self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job )
+        self.job_runner_mapper = JobRunnerMapper( self, queue.dispatcher.url_to_destination, self.app.job_config )
+        self.params = None
+        if job.params:
+            self.params = loads( job.params )
+        if use_persisted_destination:
+            self.job_runner_mapper.cached_job_destination = JobDestination( from_job=job )
+
+        self.__commands_in_new_shell = True
+        self.__user_system_pwent = None
+        self.__galaxy_system_pwent = None
+
+    def _job_dataset_path_rewriter( self, working_directory ):
+        outputs_to_working_directory = util.asbool(self.get_destination_configuration("outputs_to_working_directory", False))
+        if outputs_to_working_directory:
+            dataset_path_rewriter = OutputsToWorkingDirectoryPathRewriter( working_directory )
+        else:
+            dataset_path_rewriter = NullDatasetPathRewriter( )
+        return dataset_path_rewriter
+
+    @property
+    def cleanup_job(self):
+        """ Remove the job after it is complete, should return "always", "onsuccess", or "never".
+        """
+        return self.get_destination_configuration("cleanup_job", DEFAULT_CLEANUP_JOB)
+
+    @property
+    def requires_containerization(self):
+        return util.asbool(self.get_destination_configuration("require_container", "False"))
+
+    def can_split( self ):
+        # Should the job handler split this job up?
+        return self.app.config.use_tasked_jobs and self.tool.parallelism
+
+    def get_job_runner_url( self ):
+        log.warning('(%s) Job runner URLs are deprecated, use destinations instead.' % self.job_id)
+        return self.job_destination.url
+
+    def get_parallelism(self):
+        return self.tool.parallelism
+
+    @property
+    def shell(self):
+        return self.job_destination.shell or getattr(self.app.config, 'default_job_shell', DEFAULT_JOB_SHELL)
+
+    def disable_commands_in_new_shell(self):
+        """Provide an extension point to disable this isolation,
+        Pulsar builds its own job script so this is not needed for
+        remote jobs."""
+        self.__commands_in_new_shell = False
+
+    @property
+    def strict_shell(self):
+        return self.tool.strict_shell
+
+    @property
+    def commands_in_new_shell(self):
+        return self.__commands_in_new_shell
+
+    @property
+    def galaxy_lib_dir(self):
+        if self.__galaxy_lib_dir is None:
+            self.__galaxy_lib_dir = os.path.abspath( "lib" )  # cwd = galaxy root
+        return self.__galaxy_lib_dir
+
+    @property
+    def galaxy_virtual_env(self):
+        return os.environ.get('VIRTUAL_ENV', None)
+
+    # legacy naming
+    get_job_runner = get_job_runner_url
+
+    @property
+    def job_destination(self):
+        """Return the JobDestination that this job will use to run.  This will
+        either be a configured destination, a randomly selected destination if
+        the configured destination was a tag, or a dynamically generated
+        destination from the dynamic runner.
+
+        Calling this method for the first time causes the dynamic runner to do
+        its calculation, if any.
+
+        :returns: ``JobDestination``
+        """
+        return self.job_runner_mapper.get_job_destination(self.params)
+
+    def get_job( self ):
+        return self.sa_session.query( model.Job ).get( self.job_id )
+
+    def get_id_tag(self):
+        # For compatability with drmaa, which uses job_id right now, and TaskWrapper
+        return self.get_job().get_id_tag()
+
+    def get_param_dict( self ):
+        """
+        Restore the dictionary of parameters from the database.
+        """
+        job = self.get_job()
+        param_dict = dict( [ ( p.name, p.value ) for p in job.parameters ] )
+        param_dict = self.tool.params_from_strings( param_dict, self.app )
+        return param_dict
+
+    def get_version_string_path( self ):
+        return os.path.abspath(os.path.join(self.app.config.new_file_path, "GALAXY_VERSION_STRING_%s" % self.job_id))
+
+    def prepare( self, compute_environment=None ):
+        """
+        Prepare the job to run by creating the working directory and the
+        config files.
+        """
+        self.sa_session.expunge_all()  # this prevents the metadata reverting that has been seen in conjunction with the PBS job runner
+
+        if not os.path.exists( self.working_directory ):
+            os.mkdir( self.working_directory )
+
+        job = self._load_job()
+
+        def get_special( ):
+            special = self.sa_session.query( model.JobExportHistoryArchive ).filter_by( job=job ).first()
+            if not special:
+                special = self.sa_session.query( model.GenomeIndexToolData ).filter_by( job=job ).first()
+            return special
+
+        tool_evaluator = self._get_tool_evaluator( job )
+        compute_environment = compute_environment or self.default_compute_environment( job )
+        tool_evaluator.set_compute_environment( compute_environment, get_special=get_special )
+
+        self.sa_session.flush()
+
+        self.command_line, self.extra_filenames, self.environment_variables = tool_evaluator.build()
+        # Ensure galaxy_lib_dir is set in case there are any later chdirs
+        self.galaxy_lib_dir
+        # Shell fragment to inject dependencies
+        self.dependency_shell_commands = self.tool.build_dependency_shell_commands(job_directory=self.working_directory)
+        # We need command_line persisted to the db in order for Galaxy to re-queue the job
+        # if the server was stopped and restarted before the job finished
+        job.command_line = unicodify(self.command_line)
+        job.dependencies = self.tool.dependencies
+        self.sa_session.add( job )
+        self.sa_session.flush()
+        # Return list of all extra files
+        self.param_dict = tool_evaluator.param_dict
+        version_string_cmd_raw = self.tool.version_string_cmd
+        if version_string_cmd_raw:
+            version_command_template = string.Template(version_string_cmd_raw)
+            version_string_cmd = version_command_template.safe_substitute({"__tool_directory__": compute_environment.tool_directory() })
+            self.write_version_cmd = "%s > %s 2>&1" % ( version_string_cmd, compute_environment.version_path() )
+        else:
+            self.write_version_cmd = None
+        return self.extra_filenames
+
+    def _create_working_directory( self ):
+        job = self.get_job()
+        try:
+            self.app.object_store.create(
+                job, base_dir='job_work', dir_only=True, obj_dir=True )
+            self.working_directory = self.app.object_store.get_filename(
+                job, base_dir='job_work', dir_only=True, obj_dir=True )
+
+            # The tool execution is given a working directory beneath the
+            # "job" working directory.
+            self.tool_working_directory = os.path.join(self.working_directory, "working")
+            safe_makedirs(self.tool_working_directory)
+            log.debug( '(%s) Working directory for job is: %s',
+                       self.job_id, self.working_directory )
+        except ObjectInvalid:
+            raise Exception( '(%s) Unable to create job working directory',
+                             job.id )
+
+    def clear_working_directory( self ):
+        job = self.get_job()
+        if not os.path.exists( self.working_directory ):
+            log.warning( '(%s): Working directory clear requested but %s does '
+                         'not exist',
+                         self.job_id,
+                         self.working_directory )
+            return
+
+        self.app.object_store.create(
+            job, base_dir='job_work', dir_only=True, obj_dir=True,
+            extra_dir='_cleared_contents', extra_dir_at_root=True )
+        base = self.app.object_store.get_filename(
+            job, base_dir='job_work', dir_only=True, obj_dir=True,
+            extra_dir='_cleared_contents', extra_dir_at_root=True )
+        date_str = datetime.datetime.now().strftime( '%Y%m%d-%H%M%S' )
+        arc_dir = os.path.join( base, date_str )
+        shutil.move( self.working_directory, arc_dir )
+        self._create_working_directory()
+        log.debug( '(%s) Previous working directory moved to %s',
+                   self.job_id, arc_dir )
+
+    def default_compute_environment( self, job=None ):
+        if not job:
+            job = self.get_job()
+        return SharedComputeEnvironment( self, job )
+
+    def _load_job( self ):
+        # Load job from database and verify it has user or session.
+        # Restore parameters from the database
+        job = self.get_job()
+        if job.user is None and job.galaxy_session is None:
+            raise Exception( 'Job %s has no user and no session.' % job.id )
+        return job
+
+    def _get_tool_evaluator( self, job ):
+        # Hacky way to avoid cirular import for now.
+        # Placing ToolEvaluator in either jobs or tools
+        # result in ciruclar dependency.
+        from galaxy.tools.evaluation import ToolEvaluator
+
+        tool_evaluator = ToolEvaluator(
+            app=self.app,
+            job=job,
+            tool=self.tool,
+            local_working_directory=self.working_directory,
+        )
+        return tool_evaluator
+
+    def fail( self, message, exception=False, stdout="", stderr="", exit_code=None ):
+        """
+        Indicate job failure by setting state and message on all output
+        datasets.
+        """
+        job = self.get_job()
+        self.sa_session.refresh( job )
+        # if the job was deleted, don't fail it
+        if not job.state == job.states.DELETED:
+            # Check if the failure is due to an exception
+            if exception:
+                # Save the traceback immediately in case we generate another
+                # below
+                job.traceback = traceback.format_exc()
+                # Get the exception and let the tool attempt to generate
+                # a better message
+                etype, evalue, tb = sys.exc_info()
+
+            outputs_to_working_directory = util.asbool(self.get_destination_configuration("outputs_to_working_directory", False))
+            if outputs_to_working_directory:
+                for dataset_path in self.get_output_fnames():
+                    try:
+                        shutil.move( dataset_path.false_path, dataset_path.real_path )
+                        log.debug( "fail(): Moved %s to %s" % ( dataset_path.false_path, dataset_path.real_path ) )
+                    except ( IOError, OSError ) as e:
+                        log.error( "fail(): Missing output file in working directory: %s" % e )
+            for dataset_assoc in job.output_datasets + job.output_library_datasets:
+                dataset = dataset_assoc.dataset
+                self.sa_session.refresh( dataset )
+                dataset.state = dataset.states.ERROR
+                dataset.blurb = 'tool error'
+                dataset.info = message
+                dataset.set_size()
+                dataset.dataset.set_total_size()
+                dataset.mark_unhidden()
+                if dataset.ext == 'auto':
+                    dataset.extension = 'data'
+                # Update (non-library) job output datasets through the object store
+                if dataset not in job.output_library_datasets:
+                    self.app.object_store.update_from_file(dataset.dataset, create=True)
+                # Pause any dependent jobs (and those jobs' outputs)
+                for dep_job_assoc in dataset.dependent_jobs:
+                    self.pause( dep_job_assoc.job, "Execution of this dataset's job is paused because its input datasets are in an error state." )
+                self.sa_session.add( dataset )
+                self.sa_session.flush()
+            job.set_final_state( job.states.ERROR )
+            job.command_line = unicodify(self.command_line)
+            job.info = message
+            # TODO: Put setting the stdout, stderr, and exit code in one place
+            # (not duplicated with the finish method).
+            job.set_streams( stdout, stderr )
+            # Let the exit code be Null if one is not provided:
+            if ( exit_code is not None ):
+                job.exit_code = exit_code
+
+            self.sa_session.add( job )
+            self.sa_session.flush()
+        self._report_error_to_sentry()
+        # Perform email action even on failure.
+        for pja in [pjaa.post_job_action for pjaa in job.post_job_actions if pjaa.post_job_action.action_type == "EmailAction"]:
+            ActionBox.execute(self.app, self.sa_session, pja, job)
+        # If the job was deleted, call tool specific fail actions (used for e.g. external metadata) and clean up
+        if self.tool:
+            self.tool.job_failed( self, message, exception )
+        cleanup_job = self.cleanup_job
+        delete_files = cleanup_job == 'always' or (cleanup_job == 'onsuccess' and job.state == job.states.DELETED)
+        self.cleanup( delete_files=delete_files )
+
+    def pause( self, job=None, message=None ):
+        if job is None:
+            job = self.get_job()
+        if message is None:
+            message = "Execution of this dataset's job is paused"
+        if job.state == job.states.NEW:
+            for dataset_assoc in job.output_datasets + job.output_library_datasets:
+                dataset_assoc.dataset.dataset.state = dataset_assoc.dataset.dataset.states.PAUSED
+                dataset_assoc.dataset.info = message
+                self.sa_session.add( dataset_assoc.dataset )
+            job.set_state( job.states.PAUSED )
+            self.sa_session.add( job )
+
+    def mark_as_resubmitted( self, info=None ):
+        job = self.get_job()
+        self.sa_session.refresh( job )
+        if info is not None:
+            job.info = info
+        job.set_state( model.Job.states.RESUBMITTED )
+        self.sa_session.add( job )
+        self.sa_session.flush()
+
+    def change_state( self, state, info=False, flush=True, job=None ):
+        job_supplied = job is not None
+        if not job_supplied:
+            job = self.get_job()
+            self.sa_session.refresh( job )
+        # Else:
+        # If this is a new job (e.g. initially queued) - we are in the same
+        # thread and no other threads are working on the job yet - so don't refresh.
+
+        if job.state in model.Job.terminal_states:
+            log.warning( "(%s) Ignoring state change from '%s' to '%s' for job "
+                         "that is already terminal", job.id, job.state, state )
+            return
+        for dataset_assoc in job.output_datasets + job.output_library_datasets:
+            dataset = dataset_assoc.dataset
+            if not job_supplied:
+                self.sa_session.refresh( dataset )
+            dataset.raw_set_dataset_state( state )
+            if info:
+                dataset.info = info
+            self.sa_session.add( dataset )
+        if info:
+            job.info = info
+        job.set_state( state )
+        self.sa_session.add( job )
+        if flush:
+            self.sa_session.flush()
+
+    def get_state( self ):
+        job = self.get_job()
+        self.sa_session.refresh( job )
+        return job.state
+
+    def set_runner( self, runner_url, external_id ):
+        log.warning('set_runner() is deprecated, use set_job_destination()')
+        self.set_job_destination(self.job_destination, external_id)
+
+    def set_job_destination( self, job_destination, external_id=None, flush=True, job=None ):
+        """
+        Persist job destination params in the database for recovery.
+
+        self.job_destination is not used because a runner may choose to rewrite
+        parts of the destination (e.g. the params).
+        """
+        if job is None:
+            job = self.get_job()
+        log.debug('(%s) Persisting job destination (destination id: %s)' % (job.id, job_destination.id))
+        job.destination_id = job_destination.id
+        job.destination_params = job_destination.params
+        job.job_runner_name = job_destination.runner
+        job.job_runner_external_id = external_id
+        self.sa_session.add(job)
+        if flush:
+            self.sa_session.flush()
+
+    def get_destination_configuration(self, key, default=None):
+        """ Get a destination parameter that can be defaulted back
+        in app.config if it needs to be applied globally.
+        """
+        return self.get_job().get_destination_configuration(
+            self.app.config, key, default
+        )
+
+    def finish(
+        self,
+        stdout,
+        stderr,
+        tool_exit_code=None,
+        remote_working_directory=None,
+        remote_metadata_directory=None,
+    ):
+        """
+        Called to indicate that the associated command has been run. Updates
+        the output datasets based on stderr and stdout from the command, and
+        the contents of the output files.
+        """
+        # remote_working_directory not used with updated (7.0+ pulsar and 16.04+
+        # originated Galaxy job - keep for a few releases for older jobs)
+        finish_timer = util.ExecutionTimer()
+
+        # default post job setup
+        self.sa_session.expunge_all()
+        job = self.get_job()
+
+        # TODO: After failing here, consider returning from the function.
+        try:
+            self.reclaim_ownership()
+        except:
+            log.exception( '(%s) Failed to change ownership of %s, failing' % ( job.id, self.working_directory ) )
+            return self.fail( job.info, stdout=stdout, stderr=stderr, exit_code=tool_exit_code )
+
+        # if the job was deleted, don't finish it
+        if job.state == job.states.DELETED or job.state == job.states.ERROR:
+            # SM: Note that, at this point, the exit code must be saved in case
+            # there was an error. Errors caught here could mean that the job
+            # was deleted by an administrator (based on old comments), but it
+            # could also mean that a job was broken up into tasks and one of
+            # the tasks failed. So include the stderr, stdout, and exit code:
+            return self.fail( job.info, stderr=stderr, stdout=stdout, exit_code=tool_exit_code )
+
+        # Check the tool's stdout, stderr, and exit code for errors, but only
+        # if the job has not already been marked as having an error.
+        # The job's stdout and stderr will be set accordingly.
+
+        # We set final_job_state to use for dataset management, but *don't* set
+        # job.state until after dataset collection to prevent history issues
+        if ( self.check_tool_output( stdout, stderr, tool_exit_code, job ) ):
+            final_job_state = job.states.OK
+        else:
+            final_job_state = job.states.ERROR
+
+        if self.tool.version_string_cmd:
+            version_filename = self.get_version_string_path()
+            if os.path.exists(version_filename):
+                self.version_string = open(version_filename).read()
+                os.unlink(version_filename)
+
+        outputs_to_working_directory = util.asbool(self.get_destination_configuration("outputs_to_working_directory", False))
+        if outputs_to_working_directory and not self.__link_file_check():
+            for dataset_path in self.get_output_fnames():
+                try:
+                    shutil.move( dataset_path.false_path, dataset_path.real_path )
+                    log.debug( "finish(): Moved %s to %s" % ( dataset_path.false_path, dataset_path.real_path ) )
+                except ( IOError, OSError ):
+                    # this can happen if Galaxy is restarted during the job's
+                    # finish method - the false_path file has already moved,
+                    # and when the job is recovered, it won't be found.
+                    if os.path.exists( dataset_path.real_path ) and os.stat( dataset_path.real_path ).st_size > 0:
+                        log.warning( "finish(): %s not found, but %s is not empty, so it will be used instead"
+                                     % ( dataset_path.false_path, dataset_path.real_path ) )
+                    else:
+                        # Prior to fail we need to set job.state
+                        job.set_state( final_job_state )
+                        return self.fail( "Job %s's output dataset(s) could not be read" % job.id )
+
+        job_context = ExpressionContext( dict( stdout=job.stdout, stderr=job.stderr ) )
+        for dataset_assoc in job.output_datasets + job.output_library_datasets:
+            context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
+            # should this also be checking library associations? - can a library item be added from a history before the job has ended? -
+            # lets not allow this to occur
+            # need to update all associated output hdas, i.e. history was shared with job running
+            for dataset in dataset_assoc.dataset.dataset.history_associations + dataset_assoc.dataset.dataset.library_associations:
+                trynum = 0
+                while trynum < self.app.config.retry_job_output_collection:
+                    try:
+                        # Attempt to short circuit NFS attribute caching
+                        os.stat( dataset.dataset.file_name )
+                        os.chown( dataset.dataset.file_name, os.getuid(), -1 )
+                        trynum = self.app.config.retry_job_output_collection
+                    except ( OSError, ObjectNotFound ) as e:
+                        trynum += 1
+                        log.warning( 'Error accessing %s, will retry: %s', dataset.dataset.file_name, e )
+                        time.sleep( 2 )
+                if getattr( dataset, "hidden_beneath_collection_instance", None ):
+                    dataset.visible = False
+                dataset.blurb = 'done'
+                dataset.peek = 'no peek'
+                dataset.info = (dataset.info or '')
+                if context['stdout'].strip():
+                    # Ensure white space between entries
+                    dataset.info = dataset.info.rstrip() + "\n" + context['stdout'].strip()
+                if context['stderr'].strip():
+                    # Ensure white space between entries
+                    dataset.info = dataset.info.rstrip() + "\n" + context['stderr'].strip()
+                dataset.tool_version = self.version_string
+                dataset.set_size()
+                if 'uuid' in context:
+                    dataset.dataset.uuid = context['uuid']
+                # Update (non-library) job output datasets through the object store
+                if dataset not in job.output_library_datasets:
+                    self.app.object_store.update_from_file(dataset.dataset, create=True)
+                self._collect_extra_files(dataset.dataset, self.working_directory)
+                if job.states.ERROR == final_job_state:
+                    dataset.blurb = "error"
+                    dataset.mark_unhidden()
+                elif dataset.has_data():
+                    # If the tool was expected to set the extension, attempt to retrieve it
+                    if dataset.ext == 'auto':
+                        dataset.extension = context.get( 'ext', 'data' )
+                        dataset.init_meta( copy_from=dataset )
+                    # if a dataset was copied, it won't appear in our dictionary:
+                    # either use the metadata from originating output dataset, or call set_meta on the copies
+                    # it would be quicker to just copy the metadata from the originating output dataset,
+                    # but somewhat trickier (need to recurse up the copied_from tree), for now we'll call set_meta()
+                    retry_internally = util.asbool(self.get_destination_configuration("retry_metadata_internally", True))
+                    if retry_internally and not self.external_output_metadata.external_metadata_set_successfully(dataset, self.sa_session ):
+                        # If Galaxy was expected to sniff type and didn't - do so.
+                        if dataset.ext == "_sniff_":
+                            extension = sniff.handle_uploaded_dataset_file( dataset.dataset.file_name, self.app.datatypes_registry )
+                            dataset.extension = extension
+
+                        # call datatype.set_meta directly for the initial set_meta call during dataset creation
+                        dataset.datatype.set_meta( dataset, overwrite=False )
+                    elif ( job.states.ERROR != final_job_state and
+                            not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) ):
+                        dataset._state = model.Dataset.states.FAILED_METADATA
+                    else:
+                        # load metadata from file
+                        # we need to no longer allow metadata to be edited while the job is still running,
+                        # since if it is edited, the metadata changed on the running output will no longer match
+                        # the metadata that was stored to disk for use via the external process,
+                        # and the changes made by the user will be lost, without warning or notice
+                        output_filename = self.external_output_metadata.get_output_filenames_by_dataset( dataset, self.sa_session ).filename_out
+
+                        def path_rewriter( path ):
+                            if not path:
+                                return path
+                            normalized_remote_working_directory = remote_working_directory and os.path.normpath( remote_working_directory )
+                            normalized_remote_metadata_directory = remote_metadata_directory and os.path.normpath( remote_metadata_directory )
+                            normalized_path = os.path.normpath( path )
+                            if remote_working_directory and normalized_path.startswith( normalized_remote_working_directory ):
+                                return normalized_path.replace( normalized_remote_working_directory, self.working_directory, 1 )
+                            if remote_metadata_directory and normalized_path.startswith( normalized_remote_metadata_directory ):
+                                return normalized_path.replace( normalized_remote_metadata_directory, self.working_directory, 1 )
+                            return path
+
+                        dataset.metadata.from_JSON_dict( output_filename, path_rewriter=path_rewriter )
+                    try:
+                        assert context.get( 'line_count', None ) is not None
+                        if ( not dataset.datatype.composite_type and dataset.dataset.is_multi_byte() ) or self.tool.is_multi_byte:
+                            dataset.set_peek( line_count=context['line_count'], is_multi_byte=True )
+                        else:
+                            dataset.set_peek( line_count=context['line_count'] )
+                    except:
+                        if ( not dataset.datatype.composite_type and dataset.dataset.is_multi_byte() ) or self.tool.is_multi_byte:
+                            dataset.set_peek( is_multi_byte=True )
+                        else:
+                            dataset.set_peek()
+                    for context_key in ['name', 'info', 'dbkey']:
+                        if context_key in context:
+                            context_value = context[context_key]
+                            setattr(dataset, context_key, context_value)
+                else:
+                    dataset.blurb = "empty"
+                    if dataset.ext == 'auto':
+                        dataset.extension = 'txt'
+                self.sa_session.add( dataset )
+            if job.states.ERROR == final_job_state:
+                log.debug( "(%s) setting dataset %s state to ERROR", job.id, dataset_assoc.dataset.dataset.id )
+                # TODO: This is where the state is being set to error. Change it!
+                dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
+                # Pause any dependent jobs (and those jobs' outputs)
+                for dep_job_assoc in dataset_assoc.dataset.dependent_jobs:
+                    self.pause( dep_job_assoc.job, "Execution of this dataset's job is paused because its input datasets are in an error state." )
+            else:
+                dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
+            # If any of the rest of the finish method below raises an
+            # exception, the fail method will run and set the datasets to
+            # ERROR.  The user will never see that the datasets are in error if
+            # they were flushed as OK here, since upon doing so, the history
+            # panel stops checking for updates.  So allow the
+            # self.sa_session.flush() at the bottom of this method set
+            # the state instead.
+
+        for pja in job.post_job_actions:
+            ActionBox.execute(self.app, self.sa_session, pja.post_job_action, job)
+        # Flush all the dataset and job changes above.  Dataset state changes
+        # will now be seen by the user.
+        self.sa_session.flush()
+
+        # Shrink streams and ensure unicode.
+        job.set_streams( job.stdout, job.stderr )
+
+        # The exit code will be null if there is no exit code to be set.
+        # This is so that we don't assign an exit code, such as 0, that
+        # is either incorrect or has the wrong semantics.
+        if tool_exit_code is not None:
+            job.exit_code = tool_exit_code
+        # custom post process setup
+        inp_data = dict( [ ( da.name, da.dataset ) for da in job.input_datasets ] )
+        out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
+        inp_data.update( [ ( da.name, da.dataset ) for da in job.input_library_datasets ] )
+        out_data.update( [ ( da.name, da.dataset ) for da in job.output_library_datasets ] )
+
+        # TODO: eliminate overlap with tools/evaluation.py
+        out_collections = dict( [ ( obj.name, obj.dataset_collection_instance ) for obj in job.output_dataset_collection_instances ] )
+        out_collections.update( [ ( obj.name, obj.dataset_collection ) for obj in job.output_dataset_collections ] )
+
+        input_ext = 'data'
+        input_dbkey = '?'
+        for _, data in inp_data.items():
+            # For loop odd, but sort simulating behavior in galaxy.tools.actions
+            if not data:
+                continue
+            input_ext = data.ext
+            input_dbkey = data.dbkey or '?'
+        # why not re-use self.param_dict here?
+        param_dict = dict( [ ( p.name, p.value ) for p in job.parameters ] )
+        param_dict = self.tool.params_from_strings( param_dict, self.app )
+        # Create generated output children and primary datasets and add to param_dict
+        tool_working_directory = self.tool_working_directory
+        # LEGACY: Remove in 17.XX
+        if not os.path.exists(tool_working_directory):
+            # Maybe this is a legacy job, use the job working directory instead
+            tool_working_directory = self.working_directory
+        collected_datasets = {
+            'children': self.tool.collect_child_datasets(out_data, tool_working_directory),
+            'primary': self.tool.collect_primary_datasets(out_data, tool_working_directory, input_ext, input_dbkey)
+        }
+        self.tool.collect_dynamic_collections(
+            out_collections,
+            job_working_directory=tool_working_directory,
+            inp_data=inp_data,
+            job=job,
+            input_dbkey=input_dbkey,
+        )
+        param_dict.update({'__collected_datasets__': collected_datasets})
+        # Certain tools require tasks to be completed after job execution
+        # ( this used to be performed in the "exec_after_process" hook, but hooks are deprecated ).
+        self.tool.exec_after_process( self.queue.app, inp_data, out_data, param_dict, job=job )
+        # Call 'exec_after_process' hook
+        self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data,
+                             out_data=out_data, param_dict=param_dict,
+                             tool=self.tool, stdout=job.stdout, stderr=job.stderr )
+        job.command_line = unicodify(self.command_line)
+
+        collected_bytes = 0
+        # Once datasets are collected, set the total dataset size (includes extra files)
+        for dataset_assoc in job.output_datasets:
+            dataset_assoc.dataset.dataset.set_total_size()
+            collected_bytes += dataset_assoc.dataset.dataset.get_total_size()
+
+        if job.user:
+            job.user.adjust_total_disk_usage(collected_bytes)
+
+        # Empirically, we need to update job.user and
+        # job.workflow_invocation_step.workflow_invocation in separate
+        # transactions. Best guess as to why is that the workflow_invocation
+        # may or may not exist when the job is first loaded by the handler -
+        # and depending on whether it is or not sqlalchemy orders the updates
+        # differently and deadlocks can occur (one thread updates user and
+        # waits on invocation and the other updates invocation and waits on
+        # user).
+        self.sa_session.flush()
+
+        # fix permissions
+        for path in [ dp.real_path for dp in self.get_mutable_output_fnames() ]:
+            util.umask_fix_perms( path, self.app.config.umask, 0o666, self.app.config.gid )
+
+        # Finally set the job state.  This should only happen *after* all
+        # dataset creation, and will allow us to eliminate force_history_refresh.
+        job.set_final_state( final_job_state )
+        if not job.tasks:
+            # If job was composed of tasks, don't attempt to recollect statisitcs
+            self._collect_metrics( job )
+        self.sa_session.flush()
+        log.debug( 'job %d ended (finish() executed in %s)' % (self.job_id, finish_timer) )
+        if job.state == job.states.ERROR:
+            self._report_error_to_sentry()
+        cleanup_job = self.cleanup_job
+        delete_files = cleanup_job == 'always' or ( job.state == job.states.OK and cleanup_job == 'onsuccess' )
+        self.cleanup( delete_files=delete_files )
+
+    def check_tool_output( self, stdout, stderr, tool_exit_code, job ):
+        return check_output( self.tool, stdout, stderr, tool_exit_code, job )
+
+    def cleanup( self, delete_files=True ):
+        # At least one of these tool cleanup actions (job import), is needed
+        # for thetool to work properly, that is why one might want to run
+        # cleanup but not delete files.
+        try:
+            if delete_files:
+                for fname in self.extra_filenames:
+                    os.remove( fname )
+                self.external_output_metadata.cleanup_external_metadata( self.sa_session )
+            galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
+            galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job()
+            if delete_files:
+                self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, obj_dir=True)
+        except:
+            log.exception( "Unable to cleanup job %d" % self.job_id )
+
+    def _collect_extra_files(self, dataset, job_working_directory):
+        temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( dataset.id ) )
+        extra_dir = None
+        try:
+            # This skips creation of directories - object store
+            # automatically creates them.  However, empty directories will
+            # not be created in the object store at all, which might be a
+            # problem.
+            for root, dirs, files in os.walk( temp_file_path ):
+                extra_dir = root.replace(job_working_directory, '', 1).lstrip(os.path.sep)
+                for f in files:
+                    self.app.object_store.update_from_file(
+                        dataset,
+                        extra_dir=extra_dir,
+                        alt_name=f,
+                        file_name=os.path.join(root, f),
+                        create=True,
+                        preserve_symlinks=True
+                    )
+        except Exception as e:
+            log.debug( "Error in collect_associated_files: %s" % ( e ) )
+
+    def _collect_metrics( self, has_metrics ):
+        job = has_metrics.get_job()
+        per_plugin_properties = self.app.job_metrics.collect_properties( job.destination_id, self.job_id, self.working_directory )
+        if per_plugin_properties:
+            log.info( "Collecting metrics for %s %s" % ( type(has_metrics).__name__, getattr( has_metrics, 'id', None ) ) )
+        for plugin, properties in per_plugin_properties.items():
+            for metric_name, metric_value in properties.items():
+                if metric_value is not None:
+                    has_metrics.add_metric( plugin, metric_name, metric_value )
+
+    def get_output_sizes( self ):
+        sizes = []
+        output_paths = self.get_output_fnames()
+        for outfile in [ str( o ) for o in output_paths ]:
+            if os.path.exists( outfile ):
+                sizes.append( ( outfile, os.stat( outfile ).st_size ) )
+            else:
+                sizes.append( ( outfile, 0 ) )
+        return sizes
+
+    def check_limits(self, runtime=None):
+        if self.app.job_config.limits.output_size > 0:
+            for outfile, size in self.get_output_sizes():
+                if size > self.app.job_config.limits.output_size:
+                    log.warning( '(%s) Job output size %s has exceeded the global output size limit', self.get_id_tag(), os.path.basename( outfile ) )
+                    return ( JobState.runner_states.OUTPUT_SIZE_LIMIT,
+                             'Job output file grew too large (greater than %s), please try different inputs or parameters'
+                             % util.nice_size( self.app.job_config.limits.output_size ) )
+        if self.app.job_config.limits.walltime_delta is not None and runtime is not None:
+            if runtime > self.app.job_config.limits.walltime_delta:
+                log.warning( '(%s) Job runtime %s has exceeded the global walltime, it will be terminated', self.get_id_tag(), runtime )
+                return ( JobState.runner_states.GLOBAL_WALLTIME_REACHED,
+                         'Job ran longer than the maximum allowed execution time (runtime: %s, limit: %s), please try different inputs or parameters'
+                         % ( str(runtime).split('.')[0], self.app.job_config.limits.walltime ) )
+        return None
+
+    def has_limits( self ):
+        has_output_limit = self.app.job_config.limits.output_size > 0
+        has_walltime_limit = self.app.job_config.limits.walltime_delta is not None
+        return has_output_limit or has_walltime_limit
+
+    def get_command_line( self ):
+        return self.command_line
+
+    def get_session_id( self ):
+        return self.session_id
+
+    def get_env_setup_clause( self ):
+        if self.app.config.environment_setup_file is None:
+            return ''
+        return '[ -f "%s" ] && . %s' % ( self.app.config.environment_setup_file, self.app.config.environment_setup_file )
+
+    def get_input_dataset_fnames( self, ds ):
+        filenames = []
+        filenames = [ ds.file_name ]
+        # we will need to stage in metadata file names also
+        # TODO: would be better to only stage in metadata files that are actually needed (found in command line, referenced in config files, etc.)
+        for key, value in ds.metadata.items():
+            if isinstance( value, model.MetadataFile ):
+                filenames.append( value.file_name )
+        return filenames
+
+    def get_input_fnames( self ):
+        job = self.get_job()
+        filenames = []
+        for da in job.input_datasets + job.input_library_datasets:  # da is JobToInputDatasetAssociation object
+            if da.dataset:
+                filenames.extend(self.get_input_dataset_fnames(da.dataset))
+        return filenames
+
+    def get_input_paths( self, job=None ):
+        if job is None:
+            job = self.get_job()
+        paths = []
+        for da in job.input_datasets + job.input_library_datasets:  # da is JobToInputDatasetAssociation object
+            if da.dataset:
+                filenames = self.get_input_dataset_fnames(da.dataset)
+                for real_path in filenames:
+                    false_path = self.dataset_path_rewriter.rewrite_dataset_path( da.dataset, 'input' )
+                    paths.append( DatasetPath( da.id, real_path=real_path, false_path=false_path, mutable=False ) )
+        return paths
+
+    def get_output_fnames( self ):
+        if self.output_paths is None:
+            self.compute_outputs()
+        return self.output_paths
+
+    def get_mutable_output_fnames( self ):
+        if self.output_paths is None:
+            self.compute_outputs()
+        return [dsp for dsp in self.output_paths if dsp.mutable]
+
+    def get_output_hdas_and_fnames( self ):
+        if self.output_hdas_and_paths is None:
+            self.compute_outputs()
+        return self.output_hdas_and_paths
+
+    def compute_outputs( self ):
+        dataset_path_rewriter = self.dataset_path_rewriter
+
+        job = self.get_job()
+        # Job output datasets are combination of history, library, and jeha datasets.
+        special = self.sa_session.query( model.JobExportHistoryArchive ).filter_by( job=job ).first()
+        false_path = None
+
+        results = []
+        for da in job.output_datasets + job.output_library_datasets:
+            da_false_path = dataset_path_rewriter.rewrite_dataset_path( da.dataset, 'output' )
+            mutable = da.dataset.dataset.external_filename is None
+            dataset_path = DatasetPath( da.dataset.dataset.id, da.dataset.file_name, false_path=da_false_path, mutable=mutable )
+            results.append( ( da.name, da.dataset, dataset_path ) )
+
+        self.output_paths = [t[2] for t in results]
+        self.output_hdas_and_paths = dict([(t[0], t[1:]) for t in results])
+        if special:
+            false_path = dataset_path_rewriter.rewrite_dataset_path( special.dataset, 'output' )
+            dsp = DatasetPath( special.dataset.id, special.dataset.file_name, false_path )
+            self.output_paths.append( dsp )
+        return self.output_paths
+
+    def get_output_file_id( self, file ):
+        if self.output_paths is None:
+            self.get_output_fnames()
+        for dp in self.output_paths:
+            outputs_to_working_directory = util.asbool(self.get_destination_configuration("outputs_to_working_directory", False))
+            if outputs_to_working_directory and os.path.basename( dp.false_path ) == file:
+                return dp.dataset_id
+            elif os.path.basename( dp.real_path ) == file:
+                return dp.dataset_id
+        return None
+
+    def get_tool_provided_job_metadata( self ):
+        if self.tool_provided_job_metadata is not None:
+            return self.tool_provided_job_metadata
+
+        # Look for JSONified job metadata
+        self.tool_provided_job_metadata = []
+        meta_file = os.path.join( self.tool_working_directory, TOOL_PROVIDED_JOB_METADATA_FILE )
+        # LEGACY: Remove in 17.XX
+        if not os.path.exists( meta_file ):
+            # Maybe this is a legacy job, use the job working directory instead
+            meta_file = os.path.join( self.working_directory, TOOL_PROVIDED_JOB_METADATA_FILE )
+
+        if os.path.exists( meta_file ):
+            for line in open( meta_file, 'r' ):
+                try:
+                    line = loads( line )
+                    assert 'type' in line
+                except:
+                    log.exception( '(%s) Got JSON data from tool, but data is improperly formatted or no "type" key in data' % self.job_id )
+                    log.debug( 'Offending data was: %s' % line )
+                    continue
+                # Set the dataset id if it's a dataset entry and isn't set.
+                # This isn't insecure.  We loop the job's output datasets in
+                # the finish method, so if a tool writes out metadata for a
+                # dataset id that it doesn't own, it'll just be ignored.
+                if line['type'] == 'dataset' and 'dataset_id' not in line:
+                    try:
+                        line['dataset_id'] = self.get_output_file_id( line['dataset'] )
+                    except KeyError:
+                        log.warning( '(%s) Tool provided job dataset-specific metadata without specifying a dataset' % self.job_id )
+                        continue
+                self.tool_provided_job_metadata.append( line )
+        return self.tool_provided_job_metadata
+
+    def get_dataset_finish_context( self, job_context, dataset ):
+        for meta in self.get_tool_provided_job_metadata():
+            if meta['type'] == 'dataset' and meta['dataset_id'] == dataset.id:
+                return ExpressionContext( meta, job_context )
+        return job_context
+
+    def invalidate_external_metadata( self ):
+        job = self.get_job()
+        self.external_output_metadata.invalidate_external_metadata( [ output_dataset_assoc.dataset for
+                                                                      output_dataset_assoc in
+                                                                      job.output_datasets + job.output_library_datasets ],
+                                                                    self.sa_session )
+
+    def setup_external_metadata( self, exec_dir=None, tmp_dir=None,
+                                 dataset_files_path=None, config_root=None,
+                                 config_file=None, datatypes_config=None,
+                                 resolve_metadata_dependencies=False,
+                                 set_extension=True, **kwds ):
+        # extension could still be 'auto' if this is the upload tool.
+        job = self.get_job()
+        if set_extension:
+            for output_dataset_assoc in job.output_datasets:
+                if output_dataset_assoc.dataset.ext == 'auto':
+                    context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset )
+                    output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' )
+            self.sa_session.flush()
+        if tmp_dir is None:
+            # this dir should should relative to the exec_dir
+            tmp_dir = self.app.config.new_file_path
+        if dataset_files_path is None:
+            dataset_files_path = self.app.model.Dataset.file_path
+        if config_root is None:
+            config_root = self.app.config.root
+        if config_file is None:
+            config_file = self.app.config.config_file
+        if datatypes_config is None:
+            datatypes_config = self.app.datatypes_registry.integrated_datatypes_configs
+        command = self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for
+                                                                           output_dataset_assoc in
+                                                                           job.output_datasets + job.output_library_datasets ],
+                                                                         self.sa_session,
+                                                                         exec_dir=exec_dir,
+                                                                         tmp_dir=tmp_dir,
+                                                                         dataset_files_path=dataset_files_path,
+                                                                         config_root=config_root,
+                                                                         config_file=config_file,
+                                                                         datatypes_config=datatypes_config,
+                                                                         job_metadata=os.path.join( self.tool_working_directory, TOOL_PROVIDED_JOB_METADATA_FILE ),
+                                                                         max_metadata_value_size=self.app.config.max_metadata_value_size,
+                                                                         **kwds )
+        if resolve_metadata_dependencies:
+            metadata_tool = self.app.toolbox.get_tool("__SET_METADATA__")
+            if metadata_tool is not None:
+                # Due to tool shed hacks for migrate and installed tool tests...
+                # see (``setup_shed_tools_for_test`` in test/base/driver_util.py).
+                dependency_shell_commands = metadata_tool.build_dependency_shell_commands(job_directory=self.working_directory, metadata=True)
+                if dependency_shell_commands:
+                    dependency_shell_commands = "; ".join(dependency_shell_commands)
+                    command = "%s; %s" % (dependency_shell_commands, command)
+        return command
+
+    @property
+    def user( self ):
+        job = self.get_job()
+        if job.user is not None:
+            return job.user.email
+        elif job.galaxy_session is not None and job.galaxy_session.user is not None:
+            return job.galaxy_session.user.email
+        elif job.history is not None and job.history.user is not None:
+            return job.history.user.email
+        elif job.galaxy_session is not None:
+            return 'anonymous@' + job.galaxy_session.remote_addr.split()[-1]
+        else:
+            return 'anonymous at unknown'
+
+    def __link_file_check( self ):
+        """ outputs_to_working_directory breaks library uploads where data is
+        linked.  This method is a hack that solves that problem, but is
+        specific to the upload tool and relies on an injected job param.  This
+        method should be removed ASAP and replaced with some properly generic
+        and stateful way of determining link-only datasets. -nate
+        """
+        job = self.get_job()
+        param_dict = job.get_param_values( self.app )
+        return self.tool.id == 'upload1' and param_dict.get( 'link_data_only', None ) == 'link_to_files'
+
+    def _change_ownership( self, username, gid ):
+        job = self.get_job()
+        # FIXME: hardcoded path
+        external_chown_script = self.get_destination_configuration("external_chown_script", None)
+        cmd = [ '/usr/bin/sudo', '-E', external_chown_script, self.working_directory, username, str( gid ) ]
+        log.debug( '(%s) Changing ownership of working directory with: %s' % ( job.id, ' '.join( cmd ) ) )
+        p = subprocess.Popen( cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
+        # TODO: log stdout/stderr
+        stdout, stderr = p.communicate()
+        assert p.returncode == 0
+
+    def change_ownership_for_run( self ):
+        job = self.get_job()
+        external_chown_script = self.get_destination_configuration("external_chown_script", None)
+        if external_chown_script and job.user is not None:
+            try:
+                self._change_ownership( self.user_system_pwent[0], str( self.user_system_pwent[3] ) )
+            except:
+                log.exception( '(%s) Failed to change ownership of %s, making world-writable instead' % ( job.id, self.working_directory ) )
+                os.chmod( self.working_directory, 0o777 )
+
+    def reclaim_ownership( self ):
+        job = self.get_job()
+        external_chown_script = self.get_destination_configuration("external_chown_script", None)
+        if external_chown_script and job.user is not None:
+            self._change_ownership( self.galaxy_system_pwent[0], str( self.galaxy_system_pwent[3] ) )
+
+    @property
+    def user_system_pwent( self ):
+        if self.__user_system_pwent is None:
+            job = self.get_job()
+            try:
+                self.__user_system_pwent = pwd.getpwnam( job.user.email.split('@')[0] )
+            except:
+                pass
+        return self.__user_system_pwent
+
+    @property
+    def galaxy_system_pwent( self ):
+        if self.__galaxy_system_pwent is None:
+            self.__galaxy_system_pwent = pwd.getpwuid(os.getuid())
+        return self.__galaxy_system_pwent
+
+    def get_output_destination( self, output_path ):
+        """
+        Destination for outputs marked as from_work_dir. This is the normal case,
+        just copy these files directly to the ulimate destination.
+        """
+        return output_path
+
+    @property
+    def requires_setting_metadata( self ):
+        if self.tool:
+            return self.tool.requires_setting_metadata
+        return False
+
+    def _report_error_to_sentry( self ):
+        job = self.get_job()
+        tool = self.app.toolbox.get_tool(job.tool_id, tool_version=job.tool_version) or None
+        if self.app.sentry_client and job.state == job.states.ERROR:
+            self.app.sentry_client.capture(
+                'raven.events.Message',
+                message="Galaxy Job Error: %s  v.%s" % (job.tool_id, job.tool_version),
+                extra={
+                    'info' : job.info,
+                    'id' : job.id,
+                    'command_line' : job.command_line,
+                    'stderr' : job.stderr,
+                    'traceback': job.traceback,
+                    'exit_code': job.exit_code,
+                    'stdout': job.stdout,
+                    'handler': job.handler,
+                    'user': self.user,
+                    'tool_version': job.tool_version,
+                    'tool_xml': tool.config_file if tool else None
+                }
+            )
+
+
+class TaskWrapper(JobWrapper):
+    """
+    Extension of JobWrapper intended for running tasks.
+    Should be refactored into a generalized executable unit wrapper parent, then jobs and tasks.
+    """
+    # Abstract this to be more useful for running tasks that *don't* necessarily compose a job.
+
+    def __init__(self, task, queue):
+        super(TaskWrapper, self).__init__(task.job, queue)
+        self.task_id = task.id
+        working_directory = task.working_directory
+        self.working_directory = working_directory
+        job_dataset_path_rewriter = self._job_dataset_path_rewriter( self.working_directory )
+        self.dataset_path_rewriter = TaskPathRewriter( working_directory, job_dataset_path_rewriter )
+        if task.prepare_input_files_cmd is not None:
+            self.prepare_input_files_cmds = [ task.prepare_input_files_cmd ]
+        else:
+            self.prepare_input_files_cmds = None
+        self.status = task.states.NEW
+
+    def can_split( self ):
+        # Should the job handler split this job up? TaskWrapper should
+        # always return False as the job has already been split.
+        return False
+
+    def get_job( self ):
+        if self.job_id:
+            return self.sa_session.query( model.Job ).get( self.job_id )
+        else:
+            return None
+
+    def get_task( self ):
+        return self.sa_session.query(model.Task).get(self.task_id)
+
+    def get_id_tag(self):
+        # For compatibility with drmaa job runner and TaskWrapper, instead of using job_id directly
+        return self.get_task().get_id_tag()
+
+    def get_param_dict( self ):
+        """
+        Restore the dictionary of parameters from the database.
+        """
+        job = self.sa_session.query( model.Job ).get( self.job_id )
+        param_dict = dict( [ ( p.name, p.value ) for p in job.parameters ] )
+        param_dict = self.tool.params_from_strings( param_dict, self.app )
+        return param_dict
+
+    def prepare( self, compute_environment=None ):
+        """
+        Prepare the job to run by creating the working directory and the
+        config files.
+        """
+        # Restore parameters from the database
+        job = self._load_job()
+        task = self.get_task()
+
+        # DBTODO New method for generating command line for a task?
+
+        tool_evaluator = self._get_tool_evaluator( job )
+        compute_environment = compute_environment or self.default_compute_environment( job )
+        tool_evaluator.set_compute_environment( compute_environment )
+
+        self.sa_session.flush()
+
+        self.command_line, self.extra_filenames, self.environment_variables = tool_evaluator.build()
+
+        # Ensure galaxy_lib_dir is set in case there are any later chdirs
+        self.galaxy_lib_dir
+        # Shell fragment to inject dependencies
+        self.dependency_shell_commands = self.tool.build_dependency_shell_commands(job_directory=self.working_directory)
+        # We need command_line persisted to the db in order for Galaxy to re-queue the job
+        # if the server was stopped and restarted before the job finished
+        task.command_line = self.command_line
+        self.sa_session.add( task )
+        self.sa_session.flush()
+
+        self.param_dict = tool_evaluator.param_dict
+        self.status = 'prepared'
+        return self.extra_filenames
+
+    def fail( self, message, exception=False ):
+        log.error("TaskWrapper Failure %s" % message)
+        self.status = 'error'
+        # How do we want to handle task failure?  Fail the job and let it clean up?
+
+    def change_state( self, state, info=False, flush=True, job=None ):
+        task = self.get_task()
+        self.sa_session.refresh( task )
+        if info:
+            task.info = info
+        task.state = state
+        self.sa_session.add( task )
+        self.sa_session.flush()
+
+    def get_state( self ):
+        task = self.get_task()
+        self.sa_session.refresh( task )
+        return task.state
+
+    def get_exit_code( self ):
+        task = self.get_task()
+        self.sa_session.refresh( task )
+        return task.exit_code
+
+    def set_runner( self, runner_url, external_id ):
+        task = self.get_task()
+        self.sa_session.refresh( task )
+        task.task_runner_name = runner_url
+        task.task_runner_external_id = external_id
+        # DBTODO Check task job_runner_stuff
+        self.sa_session.add( task )
+        self.sa_session.flush()
+
+    def finish( self, stdout, stderr, tool_exit_code=None ):
+        # DBTODO integrate previous finish logic.
+        # Simple finish for tasks.  Just set the flag OK.
+        """
+        Called to indicate that the associated command has been run. Updates
+        the output datasets based on stderr and stdout from the command, and
+        the contents of the output files.
+        """
+
+        # This may have ended too soon
+        log.debug( 'task %s for job %d ended; exit code: %d'
+                   % (self.task_id, self.job_id,
+                      tool_exit_code if tool_exit_code is not None else -256 ) )
+        # default post job setup_external_metadata
+        self.sa_session.expunge_all()
+        task = self.get_task()
+        # if the job was deleted, don't finish it
+        if task.state == task.states.DELETED:
+            # Job was deleted by an administrator
+            delete_files = self.cleanup_job in ( 'always', 'onsuccess' )
+            self.cleanup( delete_files=delete_files )
+            return
+        elif task.state == task.states.ERROR:
+            self.fail( task.info )
+            return
+
+        # Check what the tool returned. If the stdout or stderr matched
+        # regular expressions that indicate errors, then set an error.
+        # The same goes if the tool's exit code was in a given range.
+        if ( self.check_tool_output( stdout, stderr, tool_exit_code, task ) ):
+            task.state = task.states.OK
+        else:
+            task.state = task.states.ERROR
+
+        # Save stdout and stderr
+        task.set_streams( stdout, stderr )
+        self._collect_metrics( task )
+        task.exit_code = tool_exit_code
+        task.command_line = self.command_line
+        self.sa_session.flush()
+
+    def cleanup( self, delete_files=True ):
+        # There is no task cleanup.  The job cleans up for all tasks.
+        pass
+
+    def get_command_line( self ):
+        return self.command_line
+
+    def get_session_id( self ):
+        return self.session_id
+
+    def get_output_file_id( self, file ):
+        # There is no permanent output file for tasks.
+        return None
+
+    def get_tool_provided_job_metadata( self ):
+        # DBTODO Handle this as applicable for tasks.
+        return None
+
+    def get_dataset_finish_context( self, job_context, dataset ):
+        # Handled at the parent job level.  Do nothing here.
+        pass
+
+    def setup_external_metadata( self, exec_dir=None, tmp_dir=None, dataset_files_path=None,
+                                 config_root=None, config_file=None, datatypes_config=None,
+                                 set_extension=True, **kwds ):
+        # There is no metadata setting for tasks.  This is handled after the merge, at the job level.
+        return ""
+
+    def get_output_destination( self, output_path ):
+        """
+        Destination for outputs marked as from_work_dir. These must be copied with
+        the same basenme as the path for the ultimate output destination. This is
+        required in the task case so they can be merged.
+        """
+        return os.path.join( self.working_directory, os.path.basename( output_path ) )
+
+
+ at six.add_metaclass(ABCMeta)
+class ComputeEnvironment( object ):
+    """ Definition of the job as it will be run on the (potentially) remote
+    compute server.
+    """
+
+    @abstractmethod
+    def output_paths( self ):
+        """ Output DatasetPaths defined by job. """
+
+    @abstractmethod
+    def input_paths( self ):
+        """ Input DatasetPaths defined by job. """
+
+    @abstractmethod
+    def working_directory( self ):
+        """ Job working directory (potentially remote) """
+
+    @abstractmethod
+    def config_directory( self ):
+        """ Directory containing config files (potentially remote) """
+
+    @abstractmethod
+    def sep( self ):
+        """ os.path.sep for the platform this job will execute in.
+        """
+
+    @abstractmethod
+    def new_file_path( self ):
+        """ Absolute path to dump new files for this job on compute server. """
+
+    @abstractmethod
+    def tool_directory( self ):
+        """ Absolute path to tool files for this job on compute server. """
+
+    @abstractmethod
+    def version_path( self ):
+        """ Location of the version file for the underlying tool. """
+
+    @abstractmethod
+    def unstructured_path_rewriter( self ):
+        """ Return a function that takes in a value, determines if it is path
+        to be rewritten (will be passed non-path values as well - onus is on
+        this function to determine both if its input is a path and if it should
+        be rewritten.)
+        """
+
+
+class SimpleComputeEnvironment( object ):
+
+    def config_directory( self ):
+        return self.working_directory( )
+
+    def sep( self ):
+        return os.path.sep
+
+    def unstructured_path_rewriter( self ):
+        return lambda v: v
+
+
+class SharedComputeEnvironment( SimpleComputeEnvironment ):
+    """ Default ComputeEnviornment for job and task wrapper to pass
+    to ToolEvaluator - valid when Galaxy and compute share all the relevant
+    file systems.
+    """
+
+    def __init__( self, job_wrapper, job ):
+        self.app = job_wrapper.app
+        self.job_wrapper = job_wrapper
+        self.job = job
+
+    def output_paths( self ):
+        return self.job_wrapper.get_output_fnames()
+
+    def input_paths( self ):
+        return self.job_wrapper.get_input_paths( self.job )
+
+    def working_directory( self ):
+        return self.job_wrapper.working_directory
+
+    def new_file_path( self ):
+        return os.path.abspath( self.app.config.new_file_path )
+
+    def version_path( self ):
+        return self.job_wrapper.get_version_string_path()
+
+    def tool_directory( self ):
+        return os.path.abspath(self.job_wrapper.tool.tool_dir)
+
+
+class NoopQueue( object ):
+    """
+    Implements the JobQueue / JobStopQueue interface but does nothing
+    """
+    def put( self, *args, **kwargs ):
+        return
+
+    def put_stop( self, *args ):
+        return
+
+    def shutdown( self ):
+        return
+
+
+class ParallelismInfo(object):
+    """
+    Stores the information (if any) for running multiple instances of the tool in parallel
+    on the same set of inputs.
+    """
+    def __init__(self, tag):
+        self.method = tag.get('method')
+        if isinstance(tag, dict):
+            items = tag.items()
+        else:
+            items = tag.attrib.items()
+        self.attributes = dict( [ item for item in items if item[ 0 ] != 'method' ])
+        if len(self.attributes) == 0:
+            # legacy basic mode - provide compatible defaults
+            self.attributes['split_size'] = 20
+            self.attributes['split_mode'] = 'number_of_parts'
diff --git a/lib/galaxy/jobs/actions/__init__.py b/lib/galaxy/jobs/actions/__init__.py
new file mode 100644
index 0000000..afde1ac
--- /dev/null
+++ b/lib/galaxy/jobs/actions/__init__.py
@@ -0,0 +1,4 @@
+"""
+This package contains job action classes.
+
+"""
diff --git a/lib/galaxy/jobs/actions/post.py b/lib/galaxy/jobs/actions/post.py
new file mode 100644
index 0000000..834fc9c
--- /dev/null
+++ b/lib/galaxy/jobs/actions/post.py
@@ -0,0 +1,384 @@
+"""
+Actions to be run at job completion (or output hda creation, as in the case of
+immediate_actions listed below.  Currently only used in workflows.
+"""
+
+import datetime
+import logging
+import socket
+from json import dumps
+from markupsafe import escape
+from galaxy.util import send_mail
+
+log = logging.getLogger( __name__ )
+
+
+class DefaultJobAction(object):
+    """
+    Base job action.
+    """
+    name = "DefaultJobAction"
+    verbose_name = "Default Job"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict=None):
+        pass
+
+    @classmethod
+    def get_short_str(cls, pja):
+        if pja.action_arguments:
+            return "%s -> %s" % (pja.action_type, escape(pja.action_arguments))
+        else:
+            return "%s" % pja.action_type
+
+
+class EmailAction(DefaultJobAction):
+    """
+    This action sends an email to the galaxy user responsible for a job.
+    """
+    name = "EmailAction"
+    verbose_name = "Email Notification"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        frm = app.config.email_from
+        if frm is None:
+            if action.action_arguments and 'host' in action.action_arguments:
+                host = action.action_arguments['host']
+            else:
+                host = socket.getfqdn()
+            frm = 'galaxy-no-reply@%s' % host
+        to = job.user.email
+        subject = "Galaxy workflow step notification '%s'" % (job.history.name)
+        outdata = ', '.join(ds.dataset.display_name() for ds in job.output_datasets)
+        body = "Your Galaxy job generating dataset '%s' is complete as of %s." % (outdata, datetime.datetime.now().strftime( "%I:%M" ))
+        try:
+            send_mail( frm, to, subject, body, app.config )
+        except Exception as e:
+            log.error("EmailAction PJA Failed, exception: %s" % e)
+
+    @classmethod
+    def get_short_str(cls, pja):
+        if pja.action_arguments and 'host' in pja.action_arguments:
+            return "Email the current user from server %s when this job is complete." % escape(pja.action_arguments['host'])
+        else:
+            return "Email the current user when this job is complete."
+
+
+class ChangeDatatypeAction(DefaultJobAction):
+    name = "ChangeDatatypeAction"
+    verbose_name = "Change Datatype"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        for dataset_assoc in job.output_datasets:
+            if action.output_name == '' or dataset_assoc.name == action.output_name:
+                app.datatypes_registry.change_datatype( dataset_assoc.dataset, action.action_arguments['newtype'])
+
+    @classmethod
+    def get_short_str(cls, pja):
+        return "Set the datatype of output '%s' to '%s'" % (escape(pja.output_name),
+                                                            escape(pja.action_arguments['newtype']))
+
+
+class RenameDatasetAction(DefaultJobAction):
+    name = "RenameDatasetAction"
+    verbose_name = "Rename Dataset"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        # Prevent renaming a dataset to the empty string.
+        if action.action_arguments and action.action_arguments.get('newname', ''):
+            new_name = action.action_arguments['newname']
+
+            #  TODO: Unify and simplify replacement options.
+            #      Add interface through workflow editor UI
+
+            #  The following if statement will process a request to rename
+            #  using an input file name.
+            #  TODO: Replace all matching code with regex
+            #  Proper syntax is #{input_file_variable | option 1 | option n}
+            #    where
+            #      input_file_variable = is the name of an module input variable
+            #      |  = the delimiter for added options. Optional if no options.
+            #      options = basename, upper, lower
+            #      basename = keep all of the file name except the extension
+            #                 (everything before the final ".")
+            #      upper = force the file name to upper case
+            #      lower = force the file name to lower case
+            #  suggested additions:
+            #      "replace" option so you can replace a portion of the name,
+            #      support multiple #{name} in one rename action...
+
+            while new_name.find("#{") > -1:
+                to_be_replaced = ""
+                #  This assumes a single instance of #{variable} will exist
+                start_pos = new_name.find("#{") + 2
+                end_pos = new_name.find("}")
+                to_be_replaced = new_name[start_pos:end_pos]
+                input_file_var = to_be_replaced
+                #  Pull out the piped controls and store them for later
+                #  parsing.
+                tokens = to_be_replaced.split("|")
+                operations = []
+                if len(tokens) > 1:
+                    input_file_var = tokens[0].strip()
+
+                    # Treat . as special symbol (breaks parameter names anyway)
+                    # to allow access to repeat elements, for instance first
+                    # repeat in cat1 would be something like queries_0.input2.
+                    input_file_var = input_file_var.replace(".", "|")
+
+                    for i in range(1, len(tokens)):
+                        operations.append(tokens[i].strip())
+                replacement = ""
+                #  Lookp through inputs find one with "to_be_replaced" input
+                #  variable name, and get the replacement name
+                for input_assoc in job.input_datasets:
+                    if input_assoc.name == input_file_var:
+                        replacement = input_assoc.dataset.name
+
+                #  Do operations on replacement
+                #  Any control that is not defined will be ignored.
+                #  This should be moved out to a class or module function
+                for operation in operations:
+                    # Basename returns everything prior to the final '.'
+                    if operation == "basename":
+                        fields = replacement.split(".")
+                        replacement = fields[0]
+                        if len(fields) > 1:
+                            temp = ""
+                            for i in range(1, len(fields) - 1):
+                                temp += "." + fields[i]
+                            replacement += temp
+                    elif operation == "upper":
+                        replacement = replacement.upper()
+                    elif operation == "lower":
+                        replacement = replacement.lower()
+
+                new_name = new_name.replace("#{%s}" % to_be_replaced, replacement)
+
+            if replacement_dict:
+                for k, v in replacement_dict.iteritems():
+                    new_name = new_name.replace("${%s}" % k, v)
+            for dataset_assoc in job.output_datasets:
+                if action.output_name == '' or dataset_assoc.name == action.output_name:
+                    dataset_assoc.dataset.name = new_name
+
+    @classmethod
+    def get_short_str(cls, pja):
+        # Prevent renaming a dataset to the empty string.
+        if pja.action_arguments and pja.action_arguments.get('newname', ''):
+            return "Rename output '%s' to '%s'." % (escape(pja.output_name),
+                                                    escape(pja.action_arguments['newname']))
+        else:
+            return "Rename action used without a new name specified.  Output name will be unchanged."
+
+
+class HideDatasetAction(DefaultJobAction):
+    name = "HideDatasetAction"
+    verbose_name = "Hide Dataset"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        for dataset_assoc in job.output_datasets:
+            if dataset_assoc.dataset.state != dataset_assoc.dataset.states.ERROR and ( action.output_name == '' or dataset_assoc.name == action.output_name ):
+                dataset_assoc.dataset.visible = False
+
+    @classmethod
+    def get_short_str(cls, pja):
+        return "Hide output '%s'." % escape(pja.output_name)
+
+
+class DeleteDatasetAction(DefaultJobAction):
+    # This is disabled for right now.  Deleting a dataset in the middle of a workflow causes errors (obviously) for the subsequent steps using the data.
+    name = "DeleteDatasetAction"
+    verbose_name = "Delete Dataset"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        for dataset_assoc in job.output_datasets:
+            if action.output_name == '' or dataset_assoc.name == action.output_name:
+                dataset_assoc.dataset.deleted = True
+
+    @classmethod
+    def get_short_str(cls, pja):
+        return "Delete this dataset after creation."
+
+
+class ColumnSetAction(DefaultJobAction):
+    name = "ColumnSetAction"
+    verbose_name = "Assign Columns"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        for dataset_assoc in job.output_datasets:
+            if action.output_name == '' or dataset_assoc.name == action.output_name:
+                for k, v in action.action_arguments.items():
+                    if v != '':
+                        # Try to use both pure integer and 'cX' format.
+                        if v[0] == 'c':
+                            v = v[1:]
+                        v = int(v)
+                        if v != 0:
+                            setattr(dataset_assoc.dataset.metadata, k, v)
+
+    @classmethod
+    def get_short_str(cls, pja):
+        return "Set the following metadata values:<br/>" + "<br/>".join(['%s : %s' % (escape(k), escape(v)) for k, v in pja.action_arguments.iteritems()])
+
+
+class SetMetadataAction(DefaultJobAction):
+    name = "SetMetadataAction"
+    # DBTODO Setting of Metadata is currently broken and disabled.  It should not be used (yet).
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        for data in job.output_datasets:
+            data.set_metadata( action.action_arguments['newtype'] )
+
+
+class DeleteIntermediatesAction(DefaultJobAction):
+    name = "DeleteIntermediatesAction"
+    verbose_name = "Delete Non-Output Completed Intermediate Steps"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        # TODO Optimize this later.  Just making it work for now.
+        # TODO Support purging as well as deletion if user_purge is enabled.
+        # Dataset candidates for deletion must be
+        # 1) Created by the workflow.
+        # 2) Not have any job_to_input_dataset associations with states other
+        # than OK or DELETED.  If a step errors, we don't want to delete/purge it
+        # automatically.
+        # 3) Not marked as a workflow output.
+        # POTENTIAL ISSUES:  When many outputs are being finish()ed
+        # concurrently, sometimes non-terminal steps won't be cleaned up
+        # because of the lag in job state updates.
+        sa_session.flush()
+        if not job.workflow_invocation_step:
+            log.debug("This job is not part of a workflow invocation, delete intermediates aborted.")
+            return
+        wfi = job.workflow_invocation_step.workflow_invocation
+        sa_session.refresh(wfi)
+        if wfi.active:
+            log.debug("Workflow still scheduling so new jobs may appear, skipping deletion of intermediate files.")
+            # Still evaluating workflow so we don't yet have all workflow invocation
+            # steps to start looking at.
+            return
+        outputs_defined = wfi.workflow.has_outputs_defined()
+        if outputs_defined:
+            wfi_steps = [wfistep for wfistep in wfi.steps if not wfistep.workflow_step.workflow_outputs and wfistep.workflow_step.type == "tool"]
+            jobs_to_check = []
+            for wfi_step in wfi_steps:
+                sa_session.refresh(wfi_step)
+                wfi_step_job = wfi_step.job
+                if wfi_step_job:
+                    jobs_to_check.append(wfi_step_job)
+                else:
+                    log.debug("No job found yet for wfi_step %s, (step %s)" % (wfi_step, wfi_step.workflow_step))
+            for j2c in jobs_to_check:
+                creating_jobs = []
+                for input_dataset in j2c.input_datasets:
+                    if not input_dataset.dataset:
+                        log.debug("PJA Async Issue: No dataset attached to input_dataset %s during handling of workflow invocation %s" % (input_dataset.id, wfi))
+                    elif not input_dataset.dataset.creating_job:
+                        log.debug("PJA Async Issue: No creating job attached to dataset %s during handling of workflow invocation %s" % (input_dataset.dataset.id, wfi))
+                    else:
+                        creating_jobs.append((input_dataset, input_dataset.dataset.creating_job))
+                for (input_dataset, creating_job) in creating_jobs:
+                    sa_session.refresh(creating_job)
+                    sa_session.refresh(input_dataset)
+                for input_dataset in [x.dataset for (x, creating_job) in creating_jobs if creating_job.workflow_invocation_step and creating_job.workflow_invocation_step.workflow_invocation == wfi]:
+                    # note that the above input_dataset is a reference to a
+                    # job.input_dataset.dataset at this point
+                    safe_to_delete = True
+                    for job_to_check in [d_j.job for d_j in input_dataset.dependent_jobs]:
+                        if job_to_check != job and job_to_check.state not in [job.states.OK, job.states.DELETED]:
+                            log.debug("Workflow Intermediates cleanup attempted, but non-terminal state '%s' detected for job %s" % (job_to_check.state, job_to_check.id))
+                            safe_to_delete = False
+                    if safe_to_delete:
+                        # Support purging here too.
+                        input_dataset.mark_deleted()
+        else:
+            # No workflow outputs defined, so we can't know what to delete.
+            # We could make this work differently in the future
+            pass
+
+    @classmethod
+    def get_short_str(cls, pja):
+        return "Delete parent datasets of this step created in this workflow that aren't flagged as outputs."
+
+
+class TagDatasetAction(DefaultJobAction):
+    name = "TagDatasetAction"
+    verbose_name = "Add tag to dataset"
+
+    @classmethod
+    def execute(cls, app, sa_session, action, job, replacement_dict):
+        if action.action_arguments:
+            tags = [t.strip() for t in action.action_arguments.get('tags', '').split(',')]
+            if tags:
+                for dataset_assoc in job.output_datasets:
+                    if action.output_name == '' or dataset_assoc.name == action.output_name:
+                        app.tag_handler.set_tags_from_list( job.user, dataset_assoc.dataset, tags)
+            sa_session.flush()
+
+    @classmethod
+    def get_short_str(cls, pja):
+        if pja.action_arguments and pja.action_arguments.get('tags', ''):
+            return "Add tag(s) '%s' to '%s'." % (escape(pja.action_arguments['tags']),
+                                                 escape(pja.output_name))
+        else:
+            return "Tag addition action used without a tag specified.  No tag will be added."
+
+
+class ActionBox(object):
+
+    actions = { "RenameDatasetAction": RenameDatasetAction,
+                "HideDatasetAction": HideDatasetAction,
+                "ChangeDatatypeAction": ChangeDatatypeAction,
+                "ColumnSetAction": ColumnSetAction,
+                "EmailAction": EmailAction,
+                "DeleteIntermediatesAction": DeleteIntermediatesAction,
+                "TagDatasetAction": TagDatasetAction,
+                }
+    public_actions = ['RenameDatasetAction', 'ChangeDatatypeAction',
+                      'ColumnSetAction', 'EmailAction',
+                      'DeleteIntermediatesAction', 'TagDatasetAction']
+    immediate_actions = ['ChangeDatatypeAction', 'RenameDatasetAction',
+                         'TagDatasetAction']
+
+    @classmethod
+    def get_short_str(cls, action):
+        if action.action_type in ActionBox.actions:
+            return ActionBox.actions[action.action_type].get_short_str(action)
+        else:
+            return "Unknown Action"
+
+    @classmethod
+    def handle_incoming(cls, incoming):
+        npd = {}
+        for key, val in incoming.iteritems():
+            if key.startswith('pja'):
+                sp = key.split('__')
+                ao_key = sp[2] + sp[1]
+                # flag / output_name / pjatype / desc
+                if ao_key not in npd:
+                    npd[ao_key] = {'action_type': sp[2],
+                                   'output_name': sp[1],
+                                   'action_arguments': {}}
+                if len(sp) > 3:
+                    if sp[3] == 'output_name':
+                        npd[ao_key]['output_name'] = val
+                    else:
+                        npd[ao_key]['action_arguments'][sp[3]] = val
+            else:
+                # Not pja stuff.
+                pass
+        return dumps(npd)
+
+    @classmethod
+    def execute(cls, app, sa_session, pja, job, replacement_dict=None):
+        if pja.action_type in ActionBox.actions:
+            ActionBox.actions[pja.action_type].execute(app, sa_session, pja, job, replacement_dict)
diff --git a/lib/galaxy/jobs/command_factory.py b/lib/galaxy/jobs/command_factory.py
new file mode 100644
index 0000000..b8cc079
--- /dev/null
+++ b/lib/galaxy/jobs/command_factory.py
@@ -0,0 +1,246 @@
+from logging import getLogger
+from os import getcwd
+from os.path import (
+    abspath,
+    join
+)
+
+from galaxy import util
+from galaxy.jobs.runners.util.job_script import (
+    check_script_integrity,
+    INTEGRITY_INJECTION,
+    write_script,
+)
+
+log = getLogger( __name__ )
+
+CAPTURE_RETURN_CODE = "return_code=$?"
+YIELD_CAPTURED_CODE = 'sh -c "exit $return_code"'
+
+
+def build_command(
+    runner,
+    job_wrapper,
+    container=None,
+    modify_command_for_container=True,
+    include_metadata=False,
+    include_work_dir_outputs=True,
+    create_tool_working_directory=True,
+    remote_command_params={},
+    metadata_directory=None,
+):
+    """
+    Compose the sequence of commands necessary to execute a job. This will
+    currently include:
+
+        - environment settings corresponding to any requirement tags
+        - preparing input files
+        - command line taken from job wrapper
+        - commands to set metadata (if include_metadata is True)
+    """
+    shell = job_wrapper.shell
+    base_command_line = job_wrapper.get_command_line()
+    # job_id = job_wrapper.job_id
+    # log.debug( 'Tool evaluation for job (%s) produced command-line: %s' % ( job_id, base_command_line ) )
+    if not base_command_line:
+        raise Exception("Attempting to run a tool with empty command definition.")
+
+    commands_builder = CommandsBuilder(base_command_line)
+
+    # All job runners currently handle this case which should never occur
+    if not commands_builder.commands:
+        return None
+
+    __handle_version_command(commands_builder, job_wrapper)
+    __handle_task_splitting(commands_builder, job_wrapper)
+
+    # One could imagine also allowing dependencies inside of the container but
+    # that is too sophisticated for a first crack at this - build your
+    # containers ready to go!
+    if not container or container.resolve_dependencies:
+        __handle_dependency_resolution(commands_builder, job_wrapper, remote_command_params)
+
+    if (container and modify_command_for_container) or job_wrapper.commands_in_new_shell:
+        if container and modify_command_for_container:
+            # Many Docker containers do not have /bin/bash.
+            external_command_shell = container.shell
+        else:
+            external_command_shell = shell
+        externalized_commands = __externalize_commands(job_wrapper, external_command_shell, commands_builder, remote_command_params)
+        if container and modify_command_for_container:
+            # Stop now and build command before handling metadata and copying
+            # working directory files back. These should always happen outside
+            # of docker container - no security implications when generating
+            # metadata and means no need for Galaxy to be available to container
+            # and not copying workdir outputs back means on can be more restrictive
+            # of where container can write to in some circumstances.
+            run_in_container_command = container.containerize_command(
+                externalized_commands
+            )
+            commands_builder = CommandsBuilder( run_in_container_command )
+        else:
+            commands_builder = CommandsBuilder( externalized_commands )
+
+    # Don't need to create a separate tool working directory for Pulsar
+    # jobs - that is handled by Pulsar.
+    if create_tool_working_directory:
+        # usually working will already exist, but it will not for task
+        # split jobs.
+        commands_builder.prepend_command("mkdir -p working; cd working")
+
+    if include_work_dir_outputs:
+        __handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_command_params)
+
+    commands_builder.capture_return_code()
+
+    if include_metadata and job_wrapper.requires_setting_metadata:
+        metadata_directory = metadata_directory or job_wrapper.working_directory
+        commands_builder.append_command("cd '%s'" % metadata_directory)
+        __handle_metadata(commands_builder, job_wrapper, runner, remote_command_params)
+
+    return commands_builder.build()
+
+
+def __externalize_commands(job_wrapper, shell, commands_builder, remote_command_params, script_name="tool_script.sh"):
+    local_container_script = join( job_wrapper.working_directory, script_name )
+    tool_commands = commands_builder.build()
+    config = job_wrapper.app.config
+    integrity_injection = ""
+    # Setting shell to none in job_conf.xml disables creating a tool command script,
+    # set -e doesn't work for composite commands but this is necessary for Windows jobs
+    # for instance.
+    if shell and shell.lower() == 'none':
+        return tool_commands
+    if check_script_integrity(config):
+        integrity_injection = INTEGRITY_INJECTION
+    set_e = ""
+    if job_wrapper.strict_shell:
+        set_e = "set -e\n"
+    script_contents = u"#!%s\n%s%s%s" % (
+        shell,
+        integrity_injection,
+        set_e,
+        tool_commands
+    )
+    write_script(local_container_script, script_contents, config)
+    commands = local_container_script
+    if 'working_directory' in remote_command_params:
+        commands = "%s %s" % (shell, join(remote_command_params['working_directory'], script_name))
+    log.info("Built script [%s] for tool command [%s]" % (local_container_script, tool_commands))
+    return commands
+
+
+def __handle_version_command(commands_builder, job_wrapper):
+    # Prepend version string
+    write_version_cmd = job_wrapper.write_version_cmd
+    if write_version_cmd:
+        commands_builder.prepend_command(write_version_cmd)
+
+
+def __handle_task_splitting(commands_builder, job_wrapper):
+    # prepend getting input files (if defined)
+    if getattr(job_wrapper, 'prepare_input_files_cmds', None):
+        commands_builder.prepend_commands(job_wrapper.prepare_input_files_cmds)
+
+
+def __handle_dependency_resolution(commands_builder, job_wrapper, remote_command_params):
+    local_dependency_resolution = remote_command_params.get("dependency_resolution", "local") == "local"
+
+    # Prepend dependency injection
+    if job_wrapper.dependency_shell_commands and local_dependency_resolution:
+        commands_builder.prepend_commands(job_wrapper.dependency_shell_commands)
+
+
+def __handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_command_params):
+    # Append commands to copy job outputs based on from_work_dir attribute.
+    work_dir_outputs_kwds = {}
+    if 'working_directory' in remote_command_params:
+        work_dir_outputs_kwds['job_working_directory'] = remote_command_params['working_directory']
+    work_dir_outputs = runner.get_work_dir_outputs( job_wrapper, **work_dir_outputs_kwds )
+    if work_dir_outputs:
+        commands_builder.capture_return_code()
+        copy_commands = map(__copy_if_exists_command, work_dir_outputs)
+        commands_builder.append_commands(copy_commands)
+
+
+def __handle_metadata(commands_builder, job_wrapper, runner, remote_command_params):
+    # Append metadata setting commands, we don't want to overwrite metadata
+    # that was copied over in init_meta(), as per established behavior
+    metadata_kwds = remote_command_params.get('metadata_kwds', {})
+    exec_dir = metadata_kwds.get( 'exec_dir', abspath( getcwd() ) )
+    tmp_dir = metadata_kwds.get( 'tmp_dir', job_wrapper.working_directory )
+    dataset_files_path = metadata_kwds.get( 'dataset_files_path', runner.app.model.Dataset.file_path )
+    output_fnames = metadata_kwds.get( 'output_fnames', job_wrapper.get_output_fnames() )
+    config_root = metadata_kwds.get( 'config_root', None )
+    config_file = metadata_kwds.get( 'config_file', None )
+    datatypes_config = metadata_kwds.get( 'datatypes_config', None )
+    compute_tmp_dir = metadata_kwds.get( 'compute_tmp_dir', None )
+    resolve_metadata_dependencies = job_wrapper.commands_in_new_shell
+    metadata_command = job_wrapper.setup_external_metadata(
+        exec_dir=exec_dir,
+        tmp_dir=tmp_dir,
+        dataset_files_path=dataset_files_path,
+        output_fnames=output_fnames,
+        set_extension=False,
+        config_root=config_root,
+        config_file=config_file,
+        datatypes_config=datatypes_config,
+        compute_tmp_dir=compute_tmp_dir,
+        resolve_metadata_dependencies=resolve_metadata_dependencies,
+        kwds={ 'overwrite': False }
+    ) or ''
+    metadata_command = metadata_command.strip()
+    if metadata_command:
+        commands_builder.capture_return_code()
+        commands_builder.append_command(metadata_command)
+
+
+def __copy_if_exists_command(work_dir_output):
+    source_file, destination = work_dir_output
+    return "if [ -f %s ] ; then cp %s %s ; fi" % ( source_file, source_file, destination )
+
+
+class CommandsBuilder(object):
+
+    def __init__(self, initial_command=u''):
+        # Remove trailing semi-colon so we can start hacking up this command.
+        # TODO: Refactor to compose a list and join with ';', would be more clean.
+        initial_command = util.unicodify(initial_command)
+        commands = initial_command.rstrip(u"; ")
+        self.commands = commands
+
+        # Coping work dir outputs or setting metadata will mask return code of
+        # tool command. If these are used capture the return code and ensure
+        # the last thing that happens is an exit with return code.
+        self.return_code_captured = False
+
+    def prepend_command(self, command):
+        if command:
+            self.commands = u"%s; %s" % (command,
+                                         self.commands)
+        return self
+
+    def prepend_commands(self, commands):
+        return self.prepend_command(u"; ".join(c for c in commands if c))
+
+    def append_command(self, command):
+        if command:
+            self.commands = u"%s; %s" % (self.commands,
+                                         command)
+        return self
+
+    def append_commands(self, commands):
+        self.append_command(u"; ".join(c for c in commands if c))
+
+    def capture_return_code(self):
+        if not self.return_code_captured:
+            self.return_code_captured = True
+            self.append_command(CAPTURE_RETURN_CODE)
+
+    def build(self):
+        if self.return_code_captured:
+            self.append_command(YIELD_CAPTURED_CODE)
+        return self.commands
+
+
+__all__ = ( "build_command", )
diff --git a/lib/galaxy/jobs/datasets.py b/lib/galaxy/jobs/datasets.py
new file mode 100644
index 0000000..9fcea9c
--- /dev/null
+++ b/lib/galaxy/jobs/datasets.py
@@ -0,0 +1,109 @@
+"""
+Utility classes allowing Job interface to reason about datasets.
+"""
+import os.path
+from abc import ABCMeta
+from abc import abstractmethod
+
+
+def dataset_path_rewrites( dataset_paths ):
+    dataset_paths_with_rewrites = filter( lambda path: getattr( path, "false_path", None ), dataset_paths )
+    return dict( [ ( dp.real_path, dp ) for dp in dataset_paths_with_rewrites ] )
+
+
+class DatasetPath( object ):
+
+    def __init__(
+        self,
+        dataset_id,
+        real_path,
+        false_path=None,
+        false_extra_files_path=None,
+        mutable=True
+    ):
+        self.dataset_id = dataset_id
+        self.real_path = real_path
+        self.false_path = false_path
+        self.false_extra_files_path = false_extra_files_path
+        self.mutable = mutable
+
+    def __str__( self ):
+        if self.false_path is None:
+            return self.real_path
+        else:
+            return self.false_path
+
+    def with_path_for_job( self, false_path, false_extra_files_path=None ):
+        """
+        Clone the dataset path but with a new false_path.
+        """
+        dataset_path = self
+        if false_path is not None:
+            dataset_path = DatasetPath(
+                dataset_id=self.dataset_id,
+                real_path=self.real_path,
+                false_path=false_path,
+                false_extra_files_path=false_extra_files_path,
+                mutable=self.mutable,
+            )
+        return dataset_path
+
+
+class DatasetPathRewriter( object ):
+    """ Used by runner to rewrite paths. """
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def rewrite_dataset_path( self, dataset, dataset_type ):
+        """
+        Dataset type is 'input' or 'output'.
+        Return None to indicate not to rewrite this path.
+        """
+
+
+class NullDatasetPathRewriter( object ):
+    """ Used by default for jobwrapper, do not rewrite anything.
+    """
+
+    def rewrite_dataset_path( self, dataset, dataset_type ):
+        """ Keep path the same.
+        """
+        return None
+
+
+class OutputsToWorkingDirectoryPathRewriter( object ):
+    """ Rewrites all paths to place them in the specified working
+    directory for normal jobs when Galaxy is configured with
+    app.config.outputs_to_working_directory. Job runner base class
+    is responsible for copying these out after job is complete.
+    """
+
+    def __init__( self, working_directory ):
+        self.working_directory = working_directory
+
+    def rewrite_dataset_path( self, dataset, dataset_type ):
+        """ Keep path the same.
+        """
+        if dataset_type == 'output':
+            false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % dataset.id ) )
+            return false_path
+        else:
+            return None
+
+
+class TaskPathRewriter( object ):
+    """ Rewrites all paths to place them in the specified working
+    directory for TaskWrapper. TaskWrapper is responsible for putting
+    them there and pulling them out.
+    """
+
+    def __init__( self, working_directory, job_dataset_path_rewriter ):
+        self.working_directory = working_directory
+        self.job_dataset_path_rewriter = job_dataset_path_rewriter
+
+    def rewrite_dataset_path( self, dataset, dataset_type ):
+        """
+        """
+        dataset_file_name = dataset.file_name
+        job_file_name = self.job_dataset_path_rewriter.rewrite_dataset_path( dataset, dataset_type ) or dataset_file_name
+        return os.path.join( self.working_directory, os.path.basename( job_file_name ) )
diff --git a/lib/galaxy/jobs/deferred/__init__.py b/lib/galaxy/jobs/deferred/__init__.py
new file mode 100644
index 0000000..1a0087b
--- /dev/null
+++ b/lib/galaxy/jobs/deferred/__init__.py
@@ -0,0 +1,191 @@
+"""
+Queue for running deferred code via plugins.
+"""
+import logging
+import os
+import threading
+from Queue import Queue
+
+from galaxy import model
+from galaxy.util.bunch import Bunch
+from galaxy.util.sleeper import Sleeper
+
+log = logging.getLogger( __name__ )
+
+
+class DeferredJobQueue( object ):
+    job_states = Bunch( READY='ready',
+                        WAIT='wait',
+                        INVALID='invalid' )
+
+    def __init__( self, app ):
+        self.app = app
+        self.sa_session = app.model.context.current
+        self.queue = Queue()
+        self.plugins = {}
+        self._load_plugins()
+        self.sleeper = Sleeper()
+        self.running = True
+        self.waiting_jobs = []
+        self.__check_jobs_at_startup()
+        self.monitor_thread = threading.Thread( target=self.__monitor )
+        self.monitor_thread.start()
+        log.info( 'Deferred job queue started' )
+
+    def _load_plugins( self ):
+        for fname in os.listdir( os.path.dirname( __file__ ) ):
+            if not fname.startswith( '_' ) and fname.endswith( '.py' ):
+                name = fname[:-3]
+                module_name = 'galaxy.jobs.deferred.' + name
+                try:
+                    module = __import__( module_name )
+                except:
+                    log.exception( 'Deferred job plugin appears to exist but is not loadable: %s' % module_name )
+                    continue
+                for comp in module_name.split( "." )[1:]:
+                    module = getattr( module, comp )
+                if '__all__' not in dir( module ):
+                    log.error( 'Plugin "%s" does not contain a list of exported classes in __all__' % module_name )
+                    continue
+                for obj in module.__all__:
+                    display_name = ':'.join( ( module_name, obj ) )
+                    plugin = getattr( module, obj )
+                    for name in ( 'check_job', 'run_job' ):
+                        if name not in dir( plugin ):
+                            log.error( 'Plugin "%s" does not contain required method "%s()"' % ( display_name, name ) )
+                            break
+                    else:
+                        self.plugins[obj] = plugin( self.app )
+                        self.plugins[obj].job_states = self.job_states
+                        log.debug( 'Loaded deferred job plugin: %s' % display_name )
+
+    def __check_jobs_at_startup( self ):
+        waiting_jobs = self.sa_session.query( model.DeferredJob ) \
+                                      .filter( model.DeferredJob.state == model.DeferredJob.states.WAITING ).all()
+        for job in waiting_jobs:
+            if not self.__check_job_plugin( job ):
+                continue
+            if 'check_interval' in dir( self.plugins[job.plugin] ):
+                job.check_interval = self.plugins[job.plugin].check_interval
+            log.info( 'Recovered deferred job (id: %s) at startup' % job.id )
+            # Pass the job ID as opposed to the job, since the monitor thread
+            # needs to load it in its own threadlocal scoped session.
+            self.waiting_jobs.append( job.id )
+
+    def __monitor( self ):
+        while self.running:
+            try:
+                self.__monitor_step()
+            except:
+                log.exception( 'Exception in monitor_step' )
+            self.sleeper.sleep( 1 )
+        log.info( 'job queue stopped' )
+
+    def __monitor_step( self ):
+        # TODO: Querying the database with this frequency is bad, we need message passing
+        new_jobs = self.sa_session.query( model.DeferredJob ) \
+                                  .filter( model.DeferredJob.state == model.DeferredJob.states.NEW ).all()
+        for job in new_jobs:
+            if not self.__check_job_plugin( job ):
+                continue
+            job.state = model.DeferredJob.states.WAITING
+            self.sa_session.add( job )
+            self.sa_session.flush()
+            if 'check_interval' in dir( self.plugins[job.plugin] ):
+                job.check_interval = self.plugins[job.plugin].check_interval
+            self.waiting_jobs.append( job )
+        new_waiting = []
+        for job in self.waiting_jobs:
+            try:
+                # Recovered jobs are passed in by ID
+                assert type( job ) is int
+                job = self.sa_session.query( model.DeferredJob ).get( job )
+            except:
+                pass
+            if job.is_check_time:
+                try:
+                    job_state = self.plugins[job.plugin].check_job( job )
+                except Exception as e:
+                    self.__fail_job( job )
+                    log.exception( 'Set deferred job %s to error because of an exception in check_job(): %s' % ( job.id, str( e ) ) )
+                    continue
+                if job_state == self.job_states.READY:
+                    try:
+                        self.plugins[job.plugin].run_job( job )
+                    except Exception as e:
+                        self.__fail_job( job )
+                        log.exception( 'Set deferred job %s to error because of an exception in run_job(): %s' % ( job.id, str( e ) ) )
+                        continue
+                elif job_state == self.job_states.INVALID:
+                    self.__fail_job( job )
+                    log.error( 'Unable to run deferred job (id: %s): Plugin "%s" marked it as invalid' % ( job.id, job.plugin ) )
+                    continue
+                else:
+                    new_waiting.append( job )
+                job.last_check = 'now'
+            else:
+                new_waiting.append( job )
+        self.waiting_jobs = new_waiting
+
+    def __check_job_plugin( self, job ):
+        if job.plugin not in self.plugins:
+            log.error( 'Invalid deferred job plugin: %s' ) % job.plugin
+            job.state = model.DeferredJob.states.ERROR
+            self.sa_session.add( job )
+            self.sa_session.flush()
+            return False
+        return True
+
+    def __check_if_ready_to_run( self, job ):
+        return self.plugins[job.plugin].check_job( job )
+
+    def __fail_job( self, job ):
+        job.state = model.DeferredJob.states.ERROR
+        self.sa_session.add( job )
+        self.sa_session.flush()
+
+    def shutdown( self ):
+        self.running = False
+        self.sleeper.wake()
+
+
+class FakeTrans( object ):
+    """A fake trans for calling the external set metadata tool"""
+    def __init__( self, app, history=None, user=None):
+        class Dummy( object ):
+            def __init__( self ):
+                self.id = None
+        self.app = app
+        self.sa_session = app.model.context.current
+        self.dummy = Dummy()
+        if not history:
+            self.history = Dummy()
+        else:
+            self.history = history
+        if not user:
+            self.user = Dummy()
+        else:
+            self.user = user
+        self.model = app.model
+
+    def get_galaxy_session( self ):
+        return self.dummy
+
+    def log_event( self, message, tool_id=None ):
+        pass
+
+    def get_current_user_roles( self ):
+        if self.user:
+            return self.user.all_roles()
+        else:
+            return []
+
+    def db_dataset_for( self, dbkey ):
+        if self.history is None:
+            return None
+        datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+                                  .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+        for ds in datasets:
+            if dbkey == ds.dbkey:
+                return ds
+        return None
diff --git a/lib/galaxy/jobs/deferred/data_transfer.py b/lib/galaxy/jobs/deferred/data_transfer.py
new file mode 100644
index 0000000..ee8ec8b
--- /dev/null
+++ b/lib/galaxy/jobs/deferred/data_transfer.py
@@ -0,0 +1,384 @@
+"""
+Module for managing data transfer jobs.
+"""
+import logging
+import re
+import shutil
+
+from sqlalchemy import and_
+
+from galaxy.datatypes import sniff
+from galaxy.jobs.actions.post import ActionBox
+from galaxy.jobs.deferred import FakeTrans
+from galaxy.tools.parameters import visit_input_values
+from galaxy.tools.parameters.basic import DataToolParameter
+from galaxy.util.odict import odict
+from galaxy.workflow.modules import module_factory
+
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'DataTransfer', )
+
+
+class DataTransfer( object ):
+    check_interval = 15
+    dataset_name_re = re.compile( '(dataset\d+)_(name)' )
+    dataset_datatype_re = re.compile( '(dataset\d+)_(datatype)' )
+
+    def __init__( self, app ):
+        self.app = app
+        self.sa_session = app.model.context.current
+
+    def create_job( self, trans, **kwd ):
+        raise Exception( "Unimplemented Method" )
+
+    def check_job( self, job ):
+        raise Exception( "Unimplemented Method" )
+
+    def run_job( self, job ):
+        if job.params[ 'type' ] == 'init_transfer':
+            # TODO: don't create new downloads on restart.
+            if job.params[ 'protocol' ] in [ 'http', 'https' ]:
+                results = []
+                for result in job.params[ 'results' ].values():
+                    result[ 'transfer_job' ] = self.app.transfer_manager.new( protocol=job.params[ 'protocol' ],
+                                                                              name=result[ 'name' ],
+                                                                              datatype=result[ 'datatype' ],
+                                                                              url=result[ 'url' ] )
+                    results.append( result )
+            elif job.params[ 'protocol' ] == 'scp':
+                results = []
+                result = {}
+                sample_datasets_dict = job.params[ 'sample_datasets_dict' ]
+                # sample_datasets_dict looks something like the following.  The outer dictionary keys are SampleDataset ids.
+                # {'7': {'status': 'Not started', 'name': '3.bed', 'file_path': '/tmp/library/3.bed', 'sample_id': 7,
+                #        'external_service_id': 2, 'error_msg': '', 'size': '8.0K'}}
+                for sample_dataset_id, sample_dataset_info_dict in sample_datasets_dict.items():
+                    result = {}
+                    result[ 'transfer_job' ] = self.app.transfer_manager.new( protocol=job.params[ 'protocol' ],
+                                                                              host=job.params[ 'host' ],
+                                                                              user_name=job.params[ 'user_name' ],
+                                                                              password=job.params[ 'password' ],
+                                                                              sample_dataset_id=sample_dataset_id,
+                                                                              status=sample_dataset_info_dict[ 'status' ],
+                                                                              name=sample_dataset_info_dict[ 'name' ],
+                                                                              file_path=sample_dataset_info_dict[ 'file_path' ],
+                                                                              sample_id=sample_dataset_info_dict[ 'sample_id' ],
+                                                                              external_service_id=sample_dataset_info_dict[ 'external_service_id' ],
+                                                                              error_msg=sample_dataset_info_dict[ 'error_msg' ],
+                                                                              size=sample_dataset_info_dict[ 'size' ] )
+                    results.append( result )
+            self.app.transfer_manager.run( [ r[ 'transfer_job' ] for r in results ] )
+            for result in results:
+                transfer_job = result.pop( 'transfer_job' )
+                self.create_job( None,
+                                 transfer_job_id=transfer_job.id,
+                                 result=transfer_job.params,
+                                 sample_id=job.params[ 'sample_id' ] )
+                # Update the state of the relevant SampleDataset
+                new_status = self.app.model.SampleDataset.transfer_status.IN_QUEUE
+                self._update_sample_dataset_status( protocol=job.params[ 'protocol' ],
+                                                    sample_id=job.params[ 'sample_id' ],
+                                                    result_dict=transfer_job.params,
+                                                    new_status=new_status,
+                                                    error_msg='' )
+            job.state = self.app.model.DeferredJob.states.OK
+            self.sa_session.add( job )
+            self.sa_session.flush()
+            # TODO: Error handling: failure executing, or errors returned from the manager
+        if job.params[ 'type' ] == 'finish_transfer':
+            protocol = job.params[ 'protocol' ]
+            # Update the state of the relevant SampleDataset
+            new_status = self.app.model.SampleDataset.transfer_status.ADD_TO_LIBRARY
+            if protocol in [ 'http', 'https' ]:
+                result_dict = job.params[ 'result' ]
+                library_dataset_name = result_dict[ 'name' ]
+                extension = result_dict[ 'datatype' ]
+            elif protocol in [ 'scp' ]:
+                # In this case, job.params will be a dictionary that contains a key named 'result'.  The value
+                # of the result key is a dictionary that looks something like:
+                # {'sample_dataset_id': '8', 'status': 'Not started', 'protocol': 'scp', 'name': '3.bed',
+                #  'file_path': '/data/library/3.bed', 'host': '127.0.0.1', 'sample_id': 8, 'external_service_id': 2,
+                #  'local_path': '/tmp/kjl2Ss4', 'password': 'galaxy', 'user_name': 'gvk', 'error_msg': '', 'size': '8.0K'}
+                try:
+                    tj = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params['transfer_job_id'] ) )
+                    result_dict = tj.params
+                    result_dict['local_path'] = tj.path
+                except Exception as e:
+                    log.error( "Updated transfer result unavailable, using old result.  Error was: %s" % str( e ) )
+                    result_dict = job.params[ 'result' ]
+                library_dataset_name = result_dict[ 'name' ]
+                # Determine the data format (see the relevant TODO item in the manual_data_transfer plugin)..
+                extension = sniff.guess_ext( result_dict[ 'local_path' ], sniff_order=self.app.datatypes_registry.sniff_order )
+            self._update_sample_dataset_status( protocol=job.params[ 'protocol' ],
+                                                sample_id=int( job.params[ 'sample_id' ] ),
+                                                result_dict=result_dict,
+                                                new_status=new_status,
+                                                error_msg='' )
+            sample = self.sa_session.query( self.app.model.Sample ).get( int( job.params[ 'sample_id' ] ) )
+            ld = self.app.model.LibraryDataset( folder=sample.folder, name=library_dataset_name )
+            self.sa_session.add( ld )
+            self.sa_session.flush()
+            self.app.security_agent.copy_library_permissions( FakeTrans( self.app ), sample.folder, ld )
+            ldda = self.app.model.LibraryDatasetDatasetAssociation( name=library_dataset_name,
+                                                                    extension=extension,
+                                                                    dbkey='?',
+                                                                    library_dataset=ld,
+                                                                    create_dataset=True,
+                                                                    sa_session=self.sa_session )
+            ldda.message = 'Transferred by the Data Transfer Plugin'
+            self.sa_session.add( ldda )
+            self.sa_session.flush()
+            ldda.state = ldda.states.QUEUED  # flushed in the set property
+            ld.library_dataset_dataset_association_id = ldda.id
+            self.sa_session.add( ld )
+            self.sa_session.flush()
+            try:
+                # Move the dataset from its temporary location
+                shutil.move( job.transfer_job.path, ldda.file_name )
+                ldda.init_meta()
+                for name, spec in ldda.metadata.spec.items():
+                    if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
+                        if spec.get( 'default' ):
+                            setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+                self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( self.app.datatypes_registry.set_external_metadata_tool,
+                                                                                            FakeTrans( self.app,
+                                                                                                       history=sample.history,
+                                                                                                       user=sample.request.user ),
+                                                                                            incoming={ 'input1': ldda } )
+                ldda.state = ldda.states.OK
+                # TODO: not sure if this flush is necessary
+                self.sa_session.add( ldda )
+                self.sa_session.flush()
+            except Exception as e:
+                log.exception( 'Failure preparing library dataset for finished transfer job (id: %s) via deferred job (id: %s):' %
+                               ( str( job.transfer_job.id ), str( job.id ) ) )
+                ldda.state = ldda.states.ERROR
+            if sample.workflow:
+                log.debug( "\n\nLogging sample mappings as: %s" % sample.workflow[ 'mappings' ] )
+                log.debug( "job.params: %s" % job.params )
+                # We have a workflow.  Update all mappings to ldda's, and when the final one is done
+                # execute_workflow with either the provided history, or a new one.
+                sub_done = True
+                rep_done = False
+                for k, v in sample.workflow[ 'mappings' ].items():
+                    if 'hda' not in v and v[ 'ds_tag' ].startswith( 'hi|' ):
+                        sample.workflow[ 'mappings' ][ k ][ 'hda' ] = self.app.security.decode_id( v[ 'ds_tag' ][3:] )
+                for key, value in sample.workflow[ 'mappings' ].items():
+                    if 'url' in value and value[ 'url' ] == job.params[ 'result' ][ 'url' ]:
+                        # DBTODO Make sure all ds| mappings get the URL of the dataset, for linking to later.
+                        # If this dataset maps to what we just finished, update the ldda id in the sample.
+                        sample.workflow[ 'mappings' ][ key ][ 'ldda' ] = ldda.id
+                        rep_done = True
+                    # DBTODO replace the hi| mappings with the hda here.  Just rip off the first three chars.
+                    elif 'ldda' not in value and 'hda' not in value:
+                        # We're not done if some mappings still don't have ldda or hda mappings.
+                        sub_done = False
+                if sub_done and rep_done:
+                    if not sample.history:
+                        new_history = self.app.model.History( name="New History From %s" % sample.name, user=sample.request.user )
+                        self.sa_session.add( new_history )
+                        sample.history = new_history
+                        self.sa_session.flush()
+                    self._execute_workflow( sample )
+                # Check the workflow for substitution done-ness
+                self.sa_session.add( sample )
+                self.sa_session.flush()
+            elif sample.history:
+                # We don't have a workflow, but a history was provided.
+                # No processing, go ahead and chunk everything in the history.
+                if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]:
+                    log.error("Cannot import dataset '%s' to user history since its state is '%s'.  " % ( ldda.name, ldda.dataset.state ))
+                elif ldda.dataset.state in [ 'ok', 'error' ]:
+                    ldda.to_history_dataset_association( target_history=sample.history, add_to_history=True )
+            # Finished
+            job.state = self.app.model.DeferredJob.states.OK
+            self.sa_session.add( job )
+            self.sa_session.flush()
+            # Update the state of the relevant SampleDataset
+            new_status = self.app.model.SampleDataset.transfer_status.COMPLETE
+            self._update_sample_dataset_status( protocol=job.params[ 'protocol' ],
+                                                sample_id=int( job.params[ 'sample_id' ] ),
+                                                result_dict=job.params[ 'result' ],
+                                                new_status=new_status,
+                                                error_msg='' )
+            if sample.datasets and not sample.untransferred_dataset_files:
+                # Update the state of the sample to the sample's request type's final state.
+                new_state = sample.request.type.final_sample_state
+                self._update_sample_state( sample.id, new_state )
+                # Update the state of the request, if possible
+                self._update_request_state( sample.request.id )
+
+    def _missing_params( self, params, required_params ):
+        missing_params = [x for x in required_params if x not in params]
+        if missing_params:
+            log.error( 'Job parameters missing required keys: %s' % ', '.join( missing_params ) )
+            return True
+        return False
+
+    def _update_sample_dataset_status( self, protocol, sample_id, result_dict, new_status, error_msg=None ):
+        # result_dict looks something like:
+        # {'url': '127.0.0.1/data/filtered_subreads.fa', 'name': 'Filtered reads'}
+
+        # TODO: error checking on valid new_status value
+        if protocol in [ 'http', 'https' ]:
+            sample_dataset = self.sa_session.query( self.app.model.SampleDataset ) \
+                                            .filter( and_( self.app.model.SampleDataset.table.c.sample_id == sample_id,
+                                                           self.app.model.SampleDataset.table.c.name == result_dict[ 'name' ],
+                                                           self.app.model.SampleDataset.table.c.file_path == result_dict[ 'url' ] ) ) \
+                                            .first()
+        elif protocol in [ 'scp' ]:
+            sample_dataset = self.sa_session.query( self.app.model.SampleDataset ).get( int( result_dict[ 'sample_dataset_id' ] ) )
+        sample_dataset.status = new_status
+        sample_dataset.error_msg = error_msg
+        self.sa_session.add( sample_dataset )
+        self.sa_session.flush()
+
+    def _update_sample_state( self, sample_id, new_state, comment=None ):
+        sample = self.sa_session.query( self.app.model.Sample ).get( sample_id )
+        if comment is None:
+            comment = 'Sample state set to %s' % str( new_state )
+        event = self.app.model.SampleEvent( sample, new_state, comment )
+        self.sa_session.add( event )
+        self.sa_session.flush()
+
+    def _update_request_state( self, request_id ):
+        request = self.sa_session.query( self.app.model.Request ).get( request_id )
+        # Make sure all the samples of the current request have the same state
+        common_state = request.samples_have_common_state
+        if not common_state:
+            # If the current request state is complete and one of its samples moved from
+            # the final sample state, then move the request state to In-progress
+            if request.is_complete:
+                message = "At least 1 sample state moved from the final sample state, so now the request's state is (%s)" % request.states.SUBMITTED
+                event = self.app.model.RequestEvent( request, request.states.SUBMITTED, message )
+                self.sa_session.add( event )
+                self.sa_session.flush()
+        else:
+            request_type_state = request.type.final_sample_state
+            if common_state.id == request_type_state.id:
+                # Since all the samples are in the final state, change the request state to 'Complete'
+                comment = "All samples of this sequencing request are in the final sample state (%s). " % request_type_state.name
+                state = request.states.COMPLETE
+            else:
+                comment = "All samples of this sequencing request are in the (%s) sample state. " % common_state.name
+                state = request.states.SUBMITTED
+            event = self.app.model.RequestEvent( request, state, comment )
+            self.sa_session.add( event )
+            self.sa_session.flush()
+            # TODO: handle email notification if it is configured to be sent when the samples are in this state.
+
+    def _execute_workflow( self, sample):
+        for key, value in sample.workflow['mappings'].items():
+            if 'hda' not in value and 'ldda' in value:
+                # If HDA is already here, it's an external input, we're not copying anything.
+                ldda = self.sa_session.query( self.app.model.LibraryDatasetDatasetAssociation ).get( value['ldda'] )
+                if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]:
+                    log.error("Cannot import dataset '%s' to user history since its state is '%s'.  " % ( ldda.name, ldda.dataset.state ))
+                elif ldda.dataset.state in [ 'ok', 'error' ]:
+                    hda = ldda.to_history_dataset_association( target_history=sample.history, add_to_history=True )
+                    sample.workflow['mappings'][key]['hda'] = hda.id
+                    self.sa_session.add( sample )
+                    self.sa_session.flush()
+        workflow_dict = sample.workflow
+        import copy
+        new_wf_dict = copy.deepcopy(workflow_dict)
+        for key in workflow_dict['mappings']:
+            if not isinstance(key, int):
+                new_wf_dict['mappings'][int(key)] = workflow_dict['mappings'][key]
+        workflow_dict = new_wf_dict
+        fk_trans = FakeTrans(self.app, history=sample.history, user=sample.request.user)
+        workflow = self.sa_session.query(self.app.model.Workflow).get(workflow_dict['id'])
+        if not workflow:
+            log.error("Workflow mapping failure.")
+            return
+        if len( workflow.steps ) == 0:
+            log.error( "Workflow cannot be run because it does not have any steps" )
+            return
+        if workflow.has_cycles:
+            log.error( "Workflow cannot be run because it contains cycles" )
+            return
+        if workflow.has_errors:
+            log.error( "Workflow cannot be run because of validation errors in some steps" )
+            return
+        # Build the state for each step
+        errors = {}
+        # Build a fake dictionary prior to execution.
+        # Prepare each step
+        for step in workflow.steps:
+            step.upgrade_messages = {}
+            # Contruct modules
+            if step.type == 'tool' or step.type is None:
+                # Restore the tool state for the step
+                step.module = module_factory.from_workflow_step( fk_trans, step )
+                # Fix any missing parameters
+                step.upgrade_messages = step.module.check_and_update_state()
+                # Any connected input needs to have value DummyDataset (these
+                # are not persisted so we need to do it every time)
+                step.module.add_dummy_datasets( connections=step.input_connections )
+                # Store state with the step
+                step.state = step.module.state
+                # Error dict
+                if step.tool_errors:
+                    errors[step.id] = step.tool_errors
+            else:
+                # Non-tool specific stuff?
+                step.module = module_factory.from_workflow_step( fk_trans, step )
+                step.state = step.module.get_runtime_state()
+            # Connections by input name
+            step.input_connections_by_name = dict( ( conn.input_name, conn ) for conn in step.input_connections )
+        for step in workflow.steps:
+            step.upgrade_messages = {}
+            # Connections by input name
+            step.input_connections_by_name = \
+                dict( ( conn.input_name, conn ) for conn in step.input_connections )
+            # Extract just the arguments for this step by prefix
+            step_errors = None
+            if step.type == 'tool' or step.type is None:
+                module = module_factory.from_workflow_step( fk_trans, step )
+                # Fix any missing parameters
+                step.upgrade_messages = module.check_and_update_state()
+                # Any connected input needs to have value DummyDataset (these
+                # are not persisted so we need to do it every time)
+                module.add_dummy_datasets( connections=step.input_connections )
+                # Get the tool
+                tool = module.tool
+                # Get the state
+                step.state = state = module.state
+            if step_errors:
+                errors[step.id] = state.inputs["__errors__"] = step_errors
+        # Run each step, connecting outputs to inputs
+        workflow_invocation = self.app.model.WorkflowInvocation()
+        workflow_invocation.workflow = workflow
+        outputs = odict()
+        for i, step in enumerate( workflow.steps ):
+            job = None
+            if step.type == 'tool' or step.type is None:
+                tool = self.app.toolbox.get_tool( step.tool_id )
+
+                def callback( input, prefixed_name, **kwargs ):
+                    if isinstance( input, DataToolParameter ):
+                        if prefixed_name in step.input_connections_by_name:
+                            conn = step.input_connections_by_name[ prefixed_name ]
+                            return outputs[ conn.output_step.id ][ conn.output_name ]
+                visit_input_values( tool.inputs, step.state.inputs, callback )
+                job, out_data = tool.execute( fk_trans, step.state.inputs, history=sample.history)
+                outputs[ step.id ] = out_data
+                for pja in step.post_job_actions:
+                    if pja.action_type in ActionBox.immediate_actions:
+                        ActionBox.execute(self.app, self.sa_session, pja, job, replacement_dict=None)
+                    else:
+                        job.add_post_job_action(pja)
+            else:
+                job, out_data = step.module.execute( fk_trans, step.state)
+                outputs[ step.id ] = out_data
+                if step.id in workflow_dict['mappings']:
+                    data = self.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( workflow_dict['mappings'][str(step.id)]['hda'] )
+                    outputs[ step.id ]['output'] = data
+            workflow_invocation_step = self.app.model.WorkflowInvocationStep()
+            workflow_invocation_step.workflow_invocation = workflow_invocation
+            workflow_invocation_step.workflow_step = step
+            workflow_invocation_step.job = job
+        self.sa_session.add( workflow_invocation )
+        self.sa_session.flush()
diff --git a/lib/galaxy/jobs/deferred/manual_data_transfer.py b/lib/galaxy/jobs/deferred/manual_data_transfer.py
new file mode 100644
index 0000000..306f897
--- /dev/null
+++ b/lib/galaxy/jobs/deferred/manual_data_transfer.py
@@ -0,0 +1,107 @@
+"""
+Generic module for managing manual data transfer jobs using Galaxy's built-in file browser.
+This module can be used by various external services that are configured to transfer data manually.
+"""
+import logging
+
+from .data_transfer import DataTransfer
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'ManualDataTransferPlugin', )
+
+
+class ManualDataTransferPlugin( DataTransfer ):
+    def __init__( self, app ):
+        super( ManualDataTransferPlugin, self ).__init__( app )
+
+    def create_job( self, trans, **kwd ):
+        if 'sample' in kwd and 'sample_datasets' in kwd and 'external_service' in kwd and 'external_service_type' in kwd:
+            sample = kwd[ 'sample' ]
+            sample_datasets = kwd[ 'sample_datasets' ]
+            external_service = kwd[ 'external_service' ]
+            external_service_type = kwd[ 'external_service_type' ]
+            # TODO: is there a better way to store the protocol?
+            protocol = next(iter(external_service_type.data_transfer.keys()))
+            host = external_service.form_values.content[ 'host' ]
+            user_name = external_service.form_values.content[ 'user_name' ]
+            password = external_service.form_values.content[ 'password' ]
+            # TODO: In the future, we may want to implement a way for the user to associate a selected file with one of
+            # the run outputs configured in the <run_details><results> section of the external service config file.  The
+            # following was a first pass at implementing something (the datatype was included in the sample_dataset_dict),
+            # but without a way for the user to associate stuff it's useless.  However, allowing the user this ability may
+            # open a can of worms, so maybe we shouldn't do it???
+            #
+            # for run_result_file_name, run_result_file_datatype in external_service_type.run_details[ 'results' ].items():
+            #     # external_service_type.run_details[ 'results' ] looks something like: {'dataset1_name': 'dataset1_datatype'}
+            #     if run_result_file_datatype in external_service.form_values.content:
+            #         datatype = external_service.form_values.content[ run_result_file_datatype ]
+            #
+            # When the transfer is automatic (the process used in the SMRT Portal plugin), the datasets and datatypes
+            # can be matched up to those configured in the <run_details><results> settings in the external service type config
+            # (e.g., pacific_biosciences_smrt_portal.xml).  However, that's a bit trickier here since the user is manually
+            # selecting files for transfer.
+            sample_datasets_dict = {}
+            for sample_dataset in sample_datasets:
+                sample_dataset_id = sample_dataset.id
+                sample_dataset_dict = dict( sample_id=sample_dataset.sample.id,
+                                            name=sample_dataset.name,
+                                            file_path=sample_dataset.file_path,
+                                            status=sample_dataset.status,
+                                            error_msg=sample_dataset.error_msg,
+                                            size=sample_dataset.size,
+                                            external_service_id=sample_dataset.external_service.id )
+                sample_datasets_dict[ sample_dataset_id ] = sample_dataset_dict
+            params = { 'type' : 'init_transfer',
+                       'sample_id' : sample.id,
+                       'sample_datasets_dict' : sample_datasets_dict,
+                       'protocol' : protocol,
+                       'host' : host,
+                       'user_name' : user_name,
+                       'password' : password }
+        elif 'transfer_job_id' in kwd:
+            params = { 'type' : 'finish_transfer',
+                       'protocol' : kwd[ 'result' ][ 'protocol' ],
+                       'sample_id' : kwd[ 'sample_id' ],
+                       'result' : kwd[ 'result' ],
+                       'transfer_job_id' : kwd[ 'transfer_job_id' ] }
+        else:
+            log.error( 'No job was created because kwd does not include "samples" and "sample_datasets" or "transfer_job_id".' )
+            return
+        deferred_job = self.app.model.DeferredJob( state=self.app.model.DeferredJob.states.NEW,
+                                                   plugin='ManualDataTransferPlugin',
+                                                   params=params )
+        self.sa_session.add( deferred_job )
+        self.sa_session.flush()
+        log.debug( 'Created a deferred job in the ManualDataTransferPlugin of type: %s' % params[ 'type' ] )
+        # TODO: error reporting to caller (if possible?)
+
+    def check_job( self, job ):
+        if self._missing_params( job.params, [ 'type' ] ):
+            return self.job_states.INVALID
+        if job.params[ 'type' ] == 'init_transfer':
+            if job.params[ 'protocol' ] in [ 'http', 'https' ]:
+                raise Exception( "Manual data transfer is not yet supported for http(s)." )
+            elif job.params[ 'protocol' ] == 'scp':
+                if self._missing_params( job.params, [ 'protocol', 'host', 'user_name', 'password', 'sample_id', 'sample_datasets_dict' ] ):
+                    return self.job_states.INVALID
+                # TODO: what kind of checks do we need here?
+                return self.job_states.READY
+            return self.job_states.WAIT
+        if job.params[ 'type' ] == 'finish_transfer':
+            if self._missing_params( job.params, [ 'transfer_job_id' ] ):
+                return self.job_states.INVALID
+            # Get the TransferJob object and add it to the DeferredJob so we only look it up once.
+            if not hasattr( job, 'transfer_job' ):
+                job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
+            state = self.app.transfer_manager.get_state( job.transfer_job )
+            if not state:
+                log.error( 'No state for transfer job id: %s' % job.transfer_job.id )
+                return self.job_states.WAIT
+            if state[ 'state' ] in self.app.model.TransferJob.terminal_states:
+                return self.job_states.READY
+            log.debug( "Checked on finish transfer job %s, not done yet." % job.id )
+            return self.job_states.WAIT
+        else:
+            log.error( 'Unknown job type for ManualDataTransferPlugin: %s' % str( job.params[ 'type' ] ) )
+            return self.job_states.INVALID
diff --git a/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py
new file mode 100644
index 0000000..6d03ee8
--- /dev/null
+++ b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py
@@ -0,0 +1,132 @@
+"""
+Module for managing jobs in Pacific Bioscience's SMRT Portal and automatically transferring files
+produced by SMRT Portal.
+"""
+import json
+import logging
+from string import Template
+
+from six.moves.urllib.request import urlopen
+
+from .data_transfer import DataTransfer
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'SMRTPortalPlugin', )
+
+
+class SMRTPortalPlugin( DataTransfer ):
+    api_path = '/smrtportal/api'
+
+    def __init__( self, app ):
+        super( SMRTPortalPlugin, self ).__init__( app )
+
+    def create_job( self, trans, **kwd ):
+        if 'secondary_analysis_job_id' in kwd:
+            sample = kwd[ 'sample' ]
+            smrt_job_id = kwd[ 'secondary_analysis_job_id' ]
+            external_service = sample.request.type.get_external_service( 'pacific_biosciences_smrt_portal' )
+            external_service.load_data_transfer_settings( trans )
+            http_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.HTTP ]
+            if not http_configs[ 'automatic_transfer' ]:
+                raise Exception( "Manual data transfer using http is not yet supported." )
+            smrt_host = external_service.form_values.content[ 'host' ]
+            external_service_type = external_service.get_external_service_type( trans )
+            # TODO: is there a better way to store the protocol?
+            # external_service_type.data_transfer looks somethng like
+            # {'http': <galaxy.sample_tracking.data_transfer.HttpDataTransferFactory object at 0x1064239d0>}
+            protocol = next(iter(external_service_type.data_transfer.keys()))
+            results = {}
+            for k, v in external_service.form_values.content.items():
+                match = self.dataset_name_re.match( k ) or self.dataset_datatype_re.match( k )
+                if match:
+                    id, field = match.groups()
+                    if id in results:
+                        results[ id ][ field ] = v
+                    else:
+                        results[ id ] = { field : v }
+            for id, attrs in results.items():
+                url_template = external_service_type.run_details[ 'results_urls' ].get( id + '_name' )
+                url = Template( url_template ).substitute( host=smrt_host, secondary_analysis_job_id=kwd[ 'secondary_analysis_job_id' ] )
+                results[ id ][ 'url' ] = url
+                if sample.workflow:
+                    # DBTODO Make sure all ds| mappings get the URL of the dataset, for linking to later.
+                    for k, v in sample.workflow[ 'mappings' ].items():
+                        if 'ds|%s' % id in v.values():
+                            sample.workflow['mappings'][k]['url'] = url
+            self.sa_session.add(sample)
+            self.sa_session.flush()
+            params = { 'type' : 'init_transfer',
+                       'protocol' : protocol,
+                       'sample_id' : sample.id,
+                       'results' : results,
+                       'smrt_host' : smrt_host,
+                       'smrt_job_id' : smrt_job_id }
+            # Create a new SampleDataset for each run result dataset
+            self._associate_untransferred_datasets_with_sample( sample, external_service, results )
+        elif 'transfer_job_id' in kwd:
+            params = { 'type' : 'finish_transfer',
+                       'protocol' : kwd[ 'result' ][ 'protocol' ],
+                       'sample_id' : kwd[ 'sample_id' ],
+                       'result' : kwd[ 'result' ],
+                       'transfer_job_id' : kwd[ 'transfer_job_id' ] }
+        else:
+            log.error( 'No job was created because kwd does not include "secondary_analysis_job_id" or "transfer_job_id".' )
+            return
+        deferred_job = self.app.model.DeferredJob( state=self.app.model.DeferredJob.states.NEW,
+                                                   plugin='SMRTPortalPlugin',
+                                                   params=params )
+        self.sa_session.add( deferred_job )
+        self.sa_session.flush()
+        log.debug( 'Created a deferred job in the SMRTPortalPlugin of type: %s' % params[ 'type' ] )
+        # TODO: error reporting to caller (if possible?)
+
+    def check_job( self, job ):
+        if self._missing_params( job.params, [ 'type' ] ):
+            return self.job_states.INVALID
+        if job.params[ 'type' ] == 'init_transfer':
+            if self._missing_params( job.params, [ 'smrt_host', 'smrt_job_id' ] ):
+                return self.job_states.INVALID
+            url = 'http://' + job.params[ 'smrt_host' ] + self.api_path + '/Jobs/' + job.params[ 'smrt_job_id' ] + '/Status'
+            r = urlopen( url )
+            status = json.loads( r.read() )
+            # TODO: error handling: unexpected json or bad response, bad url, etc.
+            if status[ 'Code' ] == 'Completed':
+                log.debug( "SMRT Portal job '%s' is Completed.  Initiating transfer." % job.params[ 'smrt_job_id' ] )
+                return self.job_states.READY
+            return self.job_states.WAIT
+        if job.params[ 'type' ] == 'finish_transfer':
+            if self._missing_params( job.params, [ 'transfer_job_id' ] ):
+                return self.job_states.INVALID
+            # Get the TransferJob object and add it to the DeferredJob so we only look it up once.
+            if not hasattr( job, 'transfer_job' ):
+                job.transfer_job = self.sa_session.query( self.app.model.TransferJob ).get( int( job.params[ 'transfer_job_id' ] ) )
+            state = self.app.transfer_manager.get_state( job.transfer_job )
+            if not state:
+                log.error( 'No state for transfer job id: %s' % job.transfer_job.id )
+                return self.job_states.WAIT
+            if state[ 'state' ] in self.app.model.TransferJob.terminal_states:
+                return self.job_states.READY
+            log.debug( "Checked on finish transfer job %s, not done yet." % job.id )
+            return self.job_states.WAIT
+        else:
+            log.error( 'Unknown job type for SMRTPortalPlugin: %s' % str( job.params[ 'type' ] ) )
+            return self.job_states.INVALID
+
+    def _associate_untransferred_datasets_with_sample( self, sample, external_service, results_dict ):
+        # results_dict looks something like:
+        # {'dataset2': {'datatype': 'fasta', 'url': '127.0.0.1:8080/data/filtered_subreads.fa', 'name': 'Filtered reads'} }
+        for key, val in results_dict.items():
+            file_path = val[ 'url' ]
+            status = self.app.model.SampleDataset.transfer_status.NOT_STARTED
+            name = val[ 'name' ]
+            size = 'unknown'
+            sample_dataset = self.app.model.SampleDataset( sample=sample,
+                                                           file_path=file_path,
+                                                           status=status,
+                                                           name=name,
+                                                           error_msg='',
+                                                           size=size,
+                                                           external_service=external_service )
+            self.sa_session.add( sample_dataset )
+            self.sa_session.flush()
diff --git a/lib/galaxy/jobs/dynamic_tool_destination.py b/lib/galaxy/jobs/dynamic_tool_destination.py
new file mode 100755
index 0000000..bec27cb
--- /dev/null
+++ b/lib/galaxy/jobs/dynamic_tool_destination.py
@@ -0,0 +1,1414 @@
+from __future__ import print_function
+
+__version__ = '1.0.0'
+
+from yaml import load
+
+import argparse
+import logging
+import os
+import sys
+import copy
+import collections
+import re
+
+
+# log to galaxy's logger
+log = logging.getLogger(__name__)
+
+# does a lot more logging when set to true
+verbose = True
+
+
+class MalformedYMLException(Exception):
+    pass
+
+
+class ScannerError(Exception):
+    pass
+
+
+class RuleValidator:
+    """
+    This class is the primary facility for validating configs. It's always called
+    in map_tool_to_destination and it's called for validating config directly through
+    DynamicToolDestination.py
+    """
+
+    @classmethod
+    def validate_rule(cls, rule_type, return_bool=False, *args, **kwargs):
+        """
+        This function is responsible for passing each rule to its relevant function.
+
+        @type rule_type: str
+        @param rule_type: the current rule's type
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @rtype: bool, dict (depending on return_bool)
+        @return: validated rule or result of validation (depending on return_bool)
+        """
+        if rule_type == 'file_size':
+            return cls.__validate_file_size_rule(return_bool, *args, **kwargs)
+
+        elif rule_type == 'num_input_datasets':
+            return cls.__validate_num_input_datasets_rule(return_bool, *args, **kwargs)
+
+        elif rule_type == 'records':
+            return cls.__validate_records_rule(return_bool, *args, **kwargs)
+
+        elif rule_type == 'arguments':
+            return cls.__validate_arguments_rule(return_bool, *args, **kwargs)
+
+    @classmethod
+    def __validate_file_size_rule(
+            cls, return_bool, original_rule, counter, tool):
+        """
+        This function is responsible for validating 'file_size' rules.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (depending on return_bool)
+        @return: validated rule or result of validation (depending on return_bool)
+        """
+
+        rule = copy.deepcopy(original_rule)
+        valid_rule = True
+
+        # Users Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_users(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Nice_value Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_nice_value(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Destination Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_destination(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Bounds Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_bounds(
+                valid_rule, return_bool, rule, tool, counter)
+
+        if return_bool:
+            return valid_rule
+
+        else:
+            return rule
+
+    @classmethod
+    def __validate_num_input_datasets_rule(
+            cls, return_bool, original_rule, counter, tool):
+        """
+        This function is responsible for validating 'num_input_datasets' rules.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (depending on return_bool)
+        @return: validated rule or result of validation (depending on return_bool)
+        """
+
+        rule = copy.deepcopy(original_rule)
+        valid_rule = True
+
+        # Users Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_users(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Nice_value Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_nice_value(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Destination Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_destination(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Bounds Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_bounds(
+                valid_rule, return_bool, rule, tool, counter)
+
+        if return_bool:
+            return valid_rule
+
+        else:
+            return rule
+
+    @classmethod
+    def __validate_records_rule(cls, return_bool, original_rule, counter, tool):
+        """
+        This function is responsible for validating 'records' rules.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (depending on return_bool)
+        @return: validated rule or result of validation (depending on return_bool)
+        """
+
+        rule = copy.deepcopy(original_rule)
+        valid_rule = True
+
+        # Users Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_users(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Nice_value Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_nice_value(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Destination Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_destination(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Bounds Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_bounds(
+                valid_rule, return_bool, rule, tool, counter)
+
+        if return_bool:
+            return valid_rule
+
+        else:
+            return rule
+
+    @classmethod
+    def __validate_arguments_rule(
+            cls, return_bool, original_rule, counter, tool):
+        """
+        This is responsible for validating 'arguments' rules.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (depending on return_bool)
+        @return: validated rule or result of validation (depending on return_bool)
+        """
+
+        rule = copy.deepcopy(original_rule)
+        valid_rule = True
+
+        # Users Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_users(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Nice_value Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_nice_value(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Destination Verification #
+        if rule is not None:
+            valid_rule, rule = cls.__validate_destination(
+                valid_rule, return_bool, rule, tool, counter)
+
+        # Arguments Verification (for rule_type arguments; read comment block at top
+        # of function for clarification.
+        if rule is not None:
+            valid_rule, rule = cls.__validate_arguments(
+                valid_rule, return_bool, rule, tool, counter)
+
+        if return_bool:
+            return valid_rule
+
+        else:
+            return rule
+
+    @classmethod
+    def __validate_nice_value(cls, valid_rule, return_bool, rule, tool, counter):
+        """
+        This function is responsible for validating nice_value.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type valid_rule: bool
+        @param valid_rule: returns True if everything is valid. False if it encounters any
+                       abnormalities in the config.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (tuple)
+        @return: validated rule and result of validation
+        """
+
+        if "nice_value" in rule:
+            if rule["nice_value"] < -20 or rule["nice_value"] > 20:
+                error = "nice_value goes from -20 to 20; rule " + str(counter)
+                error += " in '" + str(tool) + "' has a nice_value of '"
+                error += str(rule["nice_value"]) + "'."
+                if not return_bool:
+                    error += " Setting nice_value to 0."
+                    rule["nice_value"] = 0
+
+                if verbose:
+                    log.debug(error)
+                valid_rule = False
+
+        else:
+            error = "No nice_value found for rule " + str(counter) + " in '" + str(tool)
+            error += "'."
+            if not return_bool:
+                error += " Setting nice_value to 0."
+                rule["nice_value"] = 0
+            if verbose:
+                log.debug(error)
+            valid_rule = False
+
+        return valid_rule, rule
+
+    @classmethod
+    def __validate_destination(cls, valid_rule, return_bool, rule, tool, counter):
+        """
+        This function is responsible for validating destination.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type valid_rule: bool
+        @param valid_rule: returns True if everything is valid. False if it encounters any
+                       abnormalities in the config.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (tuple)
+        @return: validated rule and result of validation
+        """
+
+        if "fail_message" in rule:
+            if "destination" not in rule or rule['destination'] != "fail":
+                error = "Found a fail_message for rule " + str(counter)
+                error += " in '" + str(tool) + "', but destination is not 'fail'!"
+                if not return_bool:
+                    error += " Setting destination to 'fail'."
+                if verbose:
+                    log.debug(error)
+
+                valid_rule = False
+
+            rule["destination"] = "fail"
+
+        if "destination" in rule:
+            if isinstance(rule["destination"], str):
+                if rule["destination"] == "fail" and "fail_message" not in rule:
+                    error = "Missing a fail_message for rule " + str(counter)
+                    error += " in '" + str(tool) + "'."
+                    if not return_bool:
+                        error += " Adding generic fail_message."
+                        message = "Invalid parameters for rule " + str(counter)
+                        message += " in '" + str(tool) + "'."
+                        rule["fail_message"] = message
+                    if verbose:
+                        log.debug(error)
+                    valid_rule = False
+            elif isinstance(rule["destination"], dict):
+                if ("priority" in rule["destination"] and isinstance(rule["destination"]["priority"], dict)):
+                    if "med" not in rule["destination"]["priority"]:
+                        error = "No 'med' priority destination for rule " + str(counter)
+                        error += " in '" + str(tool) + "'."
+                        if not return_bool:
+                            error += " Ignoring..."
+                        if verbose:
+                            log.debug(error)
+                        valid_rule = False
+                    else:
+                        for priority in rule["destination"]["priority"]:
+                            if priority not in ["low", "med", "high"]:
+                                error = "Invalid priority destination '" + str(priority)
+                                error += "' for rule " + str(counter)
+                                error += " in '" + str(tool) + "'."
+                                if not return_bool:
+                                    error += " Ignoring..."
+                                if verbose:
+                                    log.debug(error)
+                                valid_rule = False
+                            elif not isinstance(rule["destination"]["priority"][priority], str):
+                                error = "No '" + str(priority)
+                                error += "'priority destination for rule " + str(counter)
+                                error += " in '" + str(tool) + "'."
+                                if not return_bool:
+                                    error += " Ignoring..."
+                                if verbose:
+                                    log.debug(error)
+                                valid_rule = False
+                else:
+                    error = "No destination specified for rule " + str(counter)
+                    error += " in '" + str(tool) + "'."
+                    if not return_bool:
+                        error += " Ignoring..."
+                    if verbose:
+                        log.debug(error)
+                    valid_rule = False
+            else:
+                error = "No destination specified for rule " + str(counter)
+                error += " in '" + str(tool) + "'."
+                if not return_bool:
+                    error += " Ignoring..."
+                if verbose:
+                    log.debug(error)
+                valid_rule = False
+        else:
+            error = "No destination specified for rule " + str(counter)
+            error += " in '" + str(tool) + "'."
+            if not return_bool:
+                error += " Ignoring..."
+            if verbose:
+                log.debug(error)
+            valid_rule = False
+
+        return valid_rule, rule
+
+    @classmethod
+    def __validate_bounds(cls, valid_rule, return_bool, rule, tool, counter):
+        """
+        This function is responsible for validating bounds.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type valid_rule: bool
+        @param valid_rule: returns True if everything is valid. False if it encounters any
+                       abnormalities in the config.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool/None, dict (tuple)
+        @return: validated rule (or None if invalid) and result of validation
+        """
+
+        if "upper_bound" in rule and "lower_bound" in rule:
+            if rule["rule_type"] == "file_size":
+                upper_bound = str_to_bytes(rule["upper_bound"])
+                lower_bound = str_to_bytes(rule["lower_bound"])
+            else:
+                upper_bound = rule["upper_bound"]
+                lower_bound = rule["lower_bound"]
+
+            if lower_bound == "Infinity":
+                error = "Error: lower_bound is set to Infinity, but must be lower than "
+                error += "upper_bound!"
+                if not return_bool:
+                    error += " Setting lower_bound to 0!"
+                    lower_bound = 0
+                    rule["lower_bound"] = 0
+                if verbose:
+                    log.debug(error)
+                valid_rule = False
+
+            if upper_bound == "Infinity":
+                upper_bound = -1
+
+            if upper_bound != -1 and lower_bound > upper_bound:
+
+                error = "lower_bound exceeds upper_bound for rule " + str(counter)
+                error += " in '" + str(tool) + "'."
+                if not return_bool:
+                    error += " Reversing bounds."
+                    temp_upper_bound = rule["upper_bound"]
+                    temp_lower_bound = rule["lower_bound"]
+                    rule["upper_bound"] = temp_lower_bound
+                    rule["lower_bound"] = temp_upper_bound
+                if verbose:
+                    log.debug(error)
+                valid_rule = False
+
+        else:
+            error = "Missing bounds for rule " + str(counter)
+            error += " in '" + str(tool) + "'."
+            if not return_bool:
+                error += " Ignoring rule."
+                rule = None
+            if verbose:
+                log.debug(error)
+            valid_rule = False
+
+        return valid_rule, rule
+
+    @classmethod
+    def __validate_arguments(cls, valid_rule, return_bool, rule, tool, counter):
+        """
+        This function is responsible for validating arguments.
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type valid_rule: bool
+        @param valid_rule: returns True if everything is valid. False if it encounters any
+                       abnormalities in the config.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool/None, dict (tuple)
+        @return: validated rule (or None if invalid) and result of validation
+        """
+
+        if "arguments" not in rule or not isinstance(rule["arguments"], dict):
+            error = "No arguments found for rule " + str(counter) + " in '"
+            error += str(tool) + "' despite being of type arguments."
+            if not return_bool:
+                error += " Ignoring rule."
+                rule = None
+            if verbose:
+                log.debug(error)
+            valid_rule = False
+
+        return valid_rule, rule
+
+    @classmethod
+    def __validate_users(cls, valid_rule, return_bool, rule, tool, counter):
+        """
+        This function is responsible for validating users (if present).
+
+        @type return_bool: bool
+        @param return_bool: True when we are only interested in the result of the
+                              validation, and not the validated rule itself.
+
+        @type valid_rule: bool
+        @param valid_rule: returns True if everything is valid. False if it encounters any
+                       abnormalities in the config.
+
+        @type original_rule: dict
+        @param original_rule: contains the original received rule
+
+        @type counter: int
+        @param counter: this counter is used to identify what rule # is currently being
+                        validated. Necessary for log output.
+
+        @type tool: str
+        @param tool: the name of the current tool. Necessary for log output.
+
+        @rtype: bool, dict (tuple)
+        @return: validated rule and result of validation
+        """
+
+        emailregex = "^[A-Za-z0-9\.\+_-]+@[A-Za-z0-9\._-]+\.[a-zA-Z]*$"
+
+        if "users" in rule:
+            if isinstance(rule["users"], list):
+                for user in reversed(rule["users"]):
+                    if not isinstance(user, str):
+                        error = "Entry '" + str(user) + "' in users for rule "
+                        error += str(counter) + " in tool '" + str(tool) + "' is in an "
+                        error += "invalid format!"
+                        if not return_bool:
+                            error += " Ignoring entry."
+                        if verbose:
+                            log.debug(error)
+                        valid_rule = False
+                        rule["users"].remove(user)
+
+                    else:
+                        if re.match(emailregex, user) is None:
+                            error = "Supplied email '" + str(user) + "' for rule "
+                            error += str(counter) + " in tool '" + str(tool) + "' is in "
+                            error += "an invalid format!"
+                            if not return_bool:
+                                error += " Ignoring email."
+                            if verbose:
+                                log.debug(error)
+                            valid_rule = False
+                            rule["users"].remove(user)
+
+            else:
+                error = "Couldn't find a list under 'users:'!"
+                if not return_bool:
+                    error += " Ignoring rule."
+                    rule = None
+                if verbose:
+                    log.debug(error)
+                valid_rule = False
+
+            # post-processing checking to make sure we didn't just remove all the users
+            # if we did, we should ignore the rule
+            if rule is not None and rule["users"] is not None and len(rule["users"]) == 0:
+                error = "No valid user emails were specified for rule " + str(counter)
+                error += " in tool '" + str(tool) + "'!"
+                if not return_bool:
+                    error += " Ignoring rule."
+                    rule = None
+                if verbose:
+                    log.debug(error)
+                valid_rule = False
+
+        return valid_rule, rule
+
+
+def parse_yaml(path="/config/tool_destinations.yml", test=False, return_bool=False):
+    """
+    Get a yaml file from path and send it to validate_config for validation.
+
+    @type path: str
+    @param path: the path to the config file
+
+    @type test: bool
+    @param test: indicates whether to run in test mode or production mode
+
+    @type return_bool: bool
+    @param return_bool: True when we are only interested in the result of the
+                          validation, and not the validated rule itself.
+
+    @rtype: bool, dict (depending on return_bool)
+    @return: validated rule or result of validation (depending on return_bool)
+
+    """
+    # Import file from path
+    try:
+        if test:
+            config = load(path)
+        else:
+            if path == "/config/tool_destinations.yml":
+                # os.path.realpath gets the path of DynamicToolDestination.py
+                # and then os.path.join is used to go back four directories
+                config_directory = os.path.join(
+                    os.path.dirname(os.path.realpath(__file__)), '../../../..')
+
+                opt_file = config_directory + path
+
+            else:
+                opt_file = path
+
+            with open(opt_file, 'r') as stream:
+                config = load(stream)
+
+        # Test imported file
+        try:
+            if return_bool:
+                valid_config = validate_config(config, return_bool)
+            else:
+                config = validate_config(config)
+        except MalformedYMLException:
+            if verbose:
+                log.error(str(sys.exc_value))
+            raise
+    except ScannerError:
+        if verbose:
+            log.error("Config is too malformed to fix!")
+        raise
+
+    if return_bool:
+        return valid_config
+
+    else:
+        return config
+
+
+def validate_config(obj, return_bool=False):
+    """
+    Validate received config.
+
+    @type obj: dict
+    @param obj: the entire contents of the config
+
+    @type return_bool: bool
+    @param return_bool: True when we are only interested in the result of the
+                          validation, and not the validated rule itself.
+
+    @rtype: bool, dict (depending on return_bool)
+    @return: validated rule or result of validation (depending on return_bool)
+    """
+
+    def infinite_defaultdict():
+        return collections.defaultdict(infinite_defaultdict)
+
+    # Allow new_config to expand automatically when adding values to new levels
+    new_config = infinite_defaultdict()
+
+    global verbose
+    verbose = False
+    valid_config = True
+    valid_rule = True
+    tool_has_default = False
+
+    if return_bool:
+        verbose = True
+
+    elif obj is not None and 'verbose' in obj and isinstance(obj['verbose'], bool):
+        verbose = obj['verbose']
+    else:
+        valid_config = False
+
+    if not return_bool and verbose:
+        log.debug("Running config validation...")
+        # if this is false, then it's definitely because of verbose missing
+
+    if not valid_config and return_bool:
+        log.debug("Missing mandatory field 'verbose' in config!")
+
+    # a list with the available rule_types. Can be expanded on easily in the future
+    available_rule_types = ['file_size', 'num_input_datasets', 'records', 'arguments']
+
+    if obj is not None:
+        # in obj, there should always be only 4 categories: tools, default_destination,
+        # users, and verbose
+
+        if 'default_destination' in obj:
+            if isinstance(obj['default_destination'], str):
+                new_config["default_destination"] = obj['default_destination']
+            elif isinstance(obj['default_destination'], dict):
+                if ('priority' in obj['default_destination'] and
+                        isinstance(obj['default_destination']['priority'], dict)):
+                    if 'med' not in obj['default_destination']['priority']:
+                        error = "No default 'med' priority destination!"
+                        if verbose:
+                            log.debug(error)
+                        valid_config = False
+                    else:
+                        for priority in obj['default_destination']['priority']:
+                            if priority in ['low', 'med', 'high']:
+                                if isinstance(
+                                        obj['default_destination']['priority'][priority],
+                                        str):
+                                    new_config['default_destination']['priority'][
+                                        priority] = obj[
+                                            'default_destination']['priority'][priority]
+                                else:
+                                    error = ("No default '" + str(priority) +
+                                             "' priority destination in config!")
+                                    if verbose:
+                                        log.debug(error)
+                                    valid_config = False
+                            else:
+                                error = ("Invalid default priority destination '" +
+                                         str(priority) + "' found in config!")
+                                if verbose:
+                                    log.debug(error)
+                                valid_config = False
+                else:
+                    error = "No default priority destinations specified in config!"
+                    if verbose:
+                        log.debug(error)
+                    valid_config = False
+            else:
+                error = "No global default destination specified in config!"
+                if verbose:
+                    log.debug(error)
+                valid_config = False
+        else:
+            error = "No global default destination specified in config!"
+            if verbose:
+                log.debug(error)
+            valid_config = False
+
+        if 'users' in obj:
+            if isinstance(obj['users'], dict):
+                for user in obj['users']:
+                    curr = obj['users'][user]
+
+                    if isinstance(curr, dict):
+                        if 'priority' in curr and isinstance(curr['priority'], str):
+                            if curr['priority'] in ['low', 'med', 'high']:
+                                new_config['users'][user]['priority'] = curr['priority']
+                            else:
+                                error = ("User '" + user + "', priority is not valid!" +
+                                         " Must be either low, med, or high.")
+                                if verbose:
+                                    log.debug(error)
+                                valid_config = False
+                        else:
+                            error = "User '" + user + "' is missing a priority!"
+                            if verbose:
+                                log.debug(error)
+                            valid_config = False
+                    else:
+                        error = "User '" + user + "' is missing a priority!"
+                        if verbose:
+                            log.debug(error)
+                        valid_config = False
+            else:
+                error = "Users option is not a dictionary!"
+                if verbose:
+                    log.debug(error)
+                valid_config = False
+
+        if 'tools' in obj:
+            for tool in obj['tools']:
+                curr = obj['tools'][tool]
+
+                # This check is to make sure we have a tool name, and not just
+                # rules right way.
+                if not isinstance(curr, list):
+                    curr_tool_rules = []
+
+                    if curr is not None:
+
+                        # in each tool, there should always be only 2 sub-categories:
+                        # default_destination (not mandatory) and rules (mandatory)
+                        if "default_destination" in curr:
+                            if isinstance(curr['default_destination'], str):
+                                new_config['tools'][tool]['default_destination'] = (curr['default_destination'])
+                                tool_has_default = True
+                            elif isinstance(curr['default_destination'], dict):
+                                if ('priority' in curr['default_destination'] and isinstance(curr['default_destination']['priority'], dict)):
+                                    if ('med' not in curr['default_destination']['priority']):
+                                        error = "No default 'med' priority destination "
+                                        error += "for " + str(tool) + "!"
+                                        if verbose:
+                                            log.debug(error)
+                                        valid_config = False
+                                    else:
+                                        for priority in curr['default_destination']['priority']:
+                                            destination = curr['default_destination']['priority'][priority]
+                                            if priority in ['low', 'med', 'high']:
+                                                if isinstance(destination, str):
+                                                    new_config['tools'][tool]['default_destination']['priority'][priority] = destination
+                                                    tool_has_default = True
+                                                else:
+                                                    error = ("No default '" +
+                                                             str(priority) +
+                                                             "' priority destination " +
+                                                             "for " + str(tool) +
+                                                             " in config!")
+                                                    if verbose:
+                                                        log.debug(error)
+                                                    valid_config = False
+                                            else:
+                                                error = ("Invalid default priority " +
+                                                         "destination '" + str(priority) +
+                                                         "' for " + str(tool) +
+                                                         "found in config!")
+                                                if verbose:
+                                                    log.debug(error)
+                                                valid_config = False
+                                else:
+                                    error = "No default priority destinations specified"
+                                    error += " for " + str(tool) + " in config!"
+                                    if verbose:
+                                        log.debug(error)
+                                    valid_config = False
+
+                        if "rules" in curr and isinstance(curr['rules'], list):
+                            # under rules, there should only be a list of rules
+                            curr_tool = curr
+                            counter = 0
+
+                            for rule in curr_tool['rules']:
+                                if "rule_type" in rule:
+                                    if rule['rule_type'] in available_rule_types:
+                                        validated_rule = None
+                                        counter += 1
+
+                                        # if we're only interested in the result of
+                                        # the validation, then only retrieve the
+                                        # result
+                                        if return_bool:
+                                            valid_rule = RuleValidator.validate_rule(
+                                                rule['rule_type'], return_bool,
+                                                rule, counter, tool)
+
+                                        # otherwise, retrieve the processed rule
+                                        else:
+                                            validated_rule = (
+                                                RuleValidator.validate_rule(
+                                                    rule['rule_type'],
+                                                    return_bool,
+                                                    rule, counter, tool))
+
+                                        # if the result we get is False, then
+                                        # indicate that the whole config is invalid
+                                        if not valid_rule:
+                                            valid_config = False
+
+                                        # if we got a rule back that seems to be
+                                        # valid (or was fixable) then append it to
+                                        # list of ready-to-use tools
+                                        if (not return_bool and
+                                                validated_rule is not None):
+                                            curr_tool_rules.append(
+                                                copy.deepcopy(validated_rule))
+
+                                    # if rule['rule_type'] in available_rule_types
+                                    else:
+                                        error = "Unrecognized rule_type '"
+                                        error += rule['rule_type'] + "' "
+                                        error += "found in '" + str(tool) + "'. "
+                                        if not return_bool:
+                                            error += "Ignoring..."
+                                        if verbose:
+                                            log.debug(error)
+                                        valid_config = False
+
+                                # if "rule_type" in rule
+                                else:
+                                    counter += 1
+                                    error = "No rule_type found for rule "
+                                    error += str(counter)
+                                    error += " in '" + str(tool) + "'."
+                                    if verbose:
+                                        log.debug(error)
+                                    valid_config = False
+
+                        # if "rules" in curr and isinstance(curr['rules'], list):
+                        elif not tool_has_default:
+                            valid_config = False
+                            error = "Tool '" + str(tool) + "' does not have rules nor a"
+                            error += " default_destination!"
+                            if verbose:
+                                log.debug(error)
+
+                    # if obj['tools'][tool] is not None:
+                    else:
+                        valid_config = False
+                        error = "Config section for tool '" + str(tool) + "' is blank!"
+                        if verbose:
+                            log.debug(error)
+
+                    if curr_tool_rules:
+                        new_config['tools'][str(tool)]['rules'] = curr_tool_rules
+
+                # if not isinstance(curr, list)
+                else:
+                    error = "Malformed YML; expected job name, "
+                    error += "but found a list instead!"
+                    if verbose:
+                        log.debug(error)
+                    valid_config = False
+
+        # quickly run through categories to detect unrecognized types
+        for category in obj.keys():
+            if not (category == 'verbose' or category == 'tools' or
+                    category == 'default_destination' or category == 'users'):
+                error = "Unrecognized category '" + category
+                error += "' found in config file!"
+                if verbose:
+                    log.debug(error)
+                valid_config = False
+
+    # if obj is not None
+    else:
+        if verbose:
+            log.debug("No (or empty) config file supplied!")
+        valid_config = False
+
+    if not return_bool:
+        if verbose:
+            log.debug("Finished config validation.")
+
+    if return_bool:
+        return valid_config
+
+    else:
+        return new_config
+
+
+def bytes_to_str(size, unit="YB"):
+    '''
+    Uses the bi convention: 1024 B = 1 KB since this method primarily
+    has inputs of bytes for RAM
+
+    @type size: int
+    @param size: the size in int (bytes) to be converted to str
+
+    @rtype: str
+    @return return_str: the resulting string
+    '''
+    # converts size in bytes to most readable unit
+    units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
+    i = 0
+
+    # mostly called in order to convert "infinity"
+    try:
+        size_changer = int(size)
+    except ValueError:
+        error = "bytes_to_str passed uncastable non numeric value "
+        raise ValueError(error + str(size))
+
+    try:
+        upto = units.index(unit.strip().upper())
+    except ValueError:
+        upto = 9
+
+    while size_changer >= 1024 and i < upto:
+        size_changer = size_changer / 1024.0
+        i += 1
+
+    if size_changer == -1:
+        size_changer = "Infinity"
+        i = 0
+
+    try:
+        return_str = "%.2f %s" % (size_changer, units[i])
+    except TypeError:
+        return_str = "%s" % (size_changer)
+
+    return return_str
+
+
+def str_to_bytes(size):
+    '''
+    Uses the bi convention: 1024 B = 1 KB since this method primarily
+    has inputs of bytes for RAM
+
+    @type size: str
+    @param size: the size in str to be converted to int (bytes)
+
+    @rtype: int
+    @return curr_size: the resulting size converted from str
+    '''
+    units = ["", "b", "kb", "mb", "gb", "tb", "pb", "eb", "zb", "yb"]
+    curr_size = size
+
+    try:
+        if size.lower() != "infinity":
+            # Get the number
+            try:
+                curr_item = size.strip().split(" ")
+                curr_size = "".join(curr_item)
+
+                curr_size = int(curr_size)
+            except ValueError:
+                curr_item = size.strip().split(" ")
+                curr_unit = curr_item[-1].strip().lower()
+                curr_item = curr_item[0:-1]
+                curr_size = "".join(curr_item)
+
+                try:
+                    curr_size = float(curr_size)
+                except ValueError:
+                    error = "Unable to convert size " + str(size)
+                    raise MalformedYMLException(error)
+
+            # Get the unit and convert to bytes
+            try:
+                pos = units.index(curr_unit)
+                for x in range(pos, 1, -1):
+                    curr_size *= 1024
+            except ValueError:
+                error = "Unable to convert size " + str(size)
+                raise MalformedYMLException(error)
+            except (UnboundLocalError, NameError):
+                pass
+        else:
+            curr_size = -1
+    except AttributeError:
+        # If size is not a string (doesn't have .lower())
+        pass
+
+    return curr_size
+
+
+def importer(test):
+    """
+    Uses Mock galaxy for testing or real galaxy for production
+
+    @type test: bool
+    @param test: True when being run from a test
+    """
+    global JobDestination
+    global JobMappingException
+    if test:
+        class JobDestionation(object):
+            def __init__(self, *kwd):
+                self.id = kwd.get('id')
+                self.nativeSpec = kwd.get('params')['nativeSpecification']
+                self.runner = kwd.get('runner')
+        from galaxy.jobs.mapper import JobMappingException
+    else:
+        from galaxy.jobs import JobDestination
+        from galaxy.jobs.mapper import JobMappingException
+
+
+def map_tool_to_destination(
+        job, app, tool, user_email, test=False, path=None):
+    """
+    Dynamically allocate resources
+
+    @param job: galaxy job
+    @param app: current app
+    @param tool: current tool
+
+    @type test: bool
+    @param test: True when running in test mode
+
+    @type path: str
+    @param path: path to tool_destinations.yml
+    """
+    importer(test)
+
+    # set verbose to True by default, just in case (some tests fail without this due to
+    # how the tests apparently work)
+    global verbose
+    verbose = True
+    filesize_rule_present = False
+    num_input_datasets_rule_present = False
+    records_rule_present = False
+
+    # Get configuration from tool_destinations.yml
+    if path is None:
+        path = app.config.tool_destinations_config_file
+
+    try:
+        config = parse_yaml(path)
+    except MalformedYMLException as e:
+        raise JobMappingException(e)
+
+    # Get all inputs from tool and databases
+    inp_data = dict([(da.name, da.dataset) for da in job.input_datasets])
+    inp_data.update([(da.name, da.dataset) for da in job.input_library_datasets])
+
+    if config is not None and str(tool.old_id) in config['tools']:
+        if 'rules' in config['tools'][str(tool.old_id)]:
+            for rule in config['tools'][str(tool.old_id)]['rules']:
+                if rule["rule_type"] == "file_size":
+                    filesize_rule_present = True
+
+                if rule["rule_type"] == "num_input_datasets":
+                    num_input_datasets_rule_present = True
+
+                if rule["rule_type"] == "records":
+                    records_rule_present = True
+
+    file_size = 0
+    records = 0
+    num_input_datasets = 0
+
+    if filesize_rule_present or records_rule_present or num_input_datasets_rule_present:
+        # Loop through the database and look for amount of records
+        try:
+            for line in inp_db:
+                if line[0] == ">":
+                    records += 1
+        except NameError:
+            pass
+        # Loop through each input file and adds the size to the total
+        # or looks through db for records
+        for da in inp_data:
+            try:
+                # If the input is a file, check and add the size
+                if inp_data[da] is not None and os.path.isfile(inp_data[da].file_name):
+                    num_input_datasets += 1
+                    if verbose:
+                        message = "Loading file: " + str(da)
+                        message += str(inp_data[da].file_name)
+                        log.debug(message)
+
+                    # Add to records if the file type is fasta
+                    if inp_data[da].ext == "fasta":
+                        if records_rule_present:
+                            inp_db = open(inp_data[da].file_name)
+
+                            # Try to find automatically computed sequences
+                            metadata = inp_data[da].get_metadata()
+
+                            try:
+                                records += int(metadata.get("sequences"))
+                            except (TypeError, KeyError):
+                                for line in inp_db:
+                                    if line[0] == ">":
+                                        records += 1
+                    elif filesize_rule_present:
+                        query_file = str(inp_data[da].file_name)
+                        file_size += os.path.getsize(query_file)
+            except AttributeError:
+                # Otherwise, say that input isn't a file
+                if verbose:
+                    log.debug("Not a file: " + str(inp_data[da]))
+
+        if verbose:
+            if filesize_rule_present:
+                log.debug("Total size: " + bytes_to_str(file_size))
+            if records_rule_present:
+                log.debug("Total amount of records: " + str(records))
+            if num_input_datasets_rule_present:
+                log.debug("Total number of files: " + str(num_input_datasets))
+
+    matched_rule = None
+    user_authorized = None
+    rule_counter = 0
+
+    # For each different rule for the tool that's running
+    fail_message = None
+
+    # set default priority to med
+    default_priority = 'med'
+    priority = default_priority
+
+    if config is not None:
+        # get the users priority
+        if "users" in config:
+            if user_email in config["users"]:
+                priority = config["users"][user_email]["priority"]
+
+        if "default_destination" in config:
+            if isinstance(config['default_destination'], str):
+                destination = config['default_destination']
+            else:
+                if priority in config['default_destination']['priority']:
+                    destination = config['default_destination']['priority'][priority]
+                else:
+                    destination = ( config['default_destination']['priority'][default_priority])
+            config = config['tools']
+            if str(tool.old_id) in config:
+                if 'rules' in config[str(tool.old_id)]:
+                    for rule in config[str(tool.old_id)]['rules']:
+                        rule_counter += 1
+                        user_authorized = False
+                        if 'users' in rule and isinstance(rule['users'], list):
+                            if user_email in rule['users']:
+                                user_authorized = True
+                        else:
+                            user_authorized = True
+
+                        if user_authorized:
+                            matched = False
+                            if rule["rule_type"] == "file_size":
+
+                                # bounds comparisons
+                                upper_bound = str_to_bytes(rule["upper_bound"])
+                                lower_bound = str_to_bytes(rule["lower_bound"])
+
+                                if upper_bound == -1:
+                                    if lower_bound <= file_size:
+                                        matched = True
+
+                                else:
+                                    if (lower_bound <= file_size and file_size < upper_bound):
+                                        matched = True
+
+                            elif rule["rule_type"] == "num_input_datasets":
+
+                                # bounds comparisons
+                                upper_bound = rule["upper_bound"]
+                                lower_bound = rule["lower_bound"]
+
+                                if upper_bound == "Infinity":
+                                    if lower_bound <= num_input_datasets:
+                                        matched = True
+                                else:
+                                    if (lower_bound <= num_input_datasets and num_input_datasets < upper_bound):
+                                        matched = True
+
+                            elif rule["rule_type"] == "records":
+
+                                # bounds comparisons
+                                upper_bound = str_to_bytes(rule["upper_bound"])
+                                lower_bound = str_to_bytes(rule["lower_bound"])
+
+                                if upper_bound == -1:
+                                    if lower_bound <= records:
+                                        matched = True
+
+                                else:
+                                    if lower_bound <= records and records < upper_bound:
+                                        matched = True
+
+                            elif rule["rule_type"] == "arguments":
+                                options = job.get_param_values(app)
+                                matched = True
+
+                                # check if the args in the config file are available
+                                for arg in rule["arguments"]:
+                                    if arg in options:
+                                        if rule["arguments"][arg] != options[arg]:
+                                            matched = False
+                                            options = "test"
+                                    else:
+                                        matched = False
+                                        if verbose:
+                                            error = "Argument '" + str(arg)
+                                            error = + "' not recognized!"
+                                            log.debug(error)
+
+                            # if we matched a rule
+                            if matched:
+                                if (matched_rule is None or rule["nice_value"] < matched_rule["nice_value"]):
+                                    matched_rule = rule
+                        # if user_authorized
+                        else:
+                            if verbose:
+                                error = "User email '" + str(user_email) + "' not "
+                                error += "specified in list of authorized users for "
+                                error += "rule " + str(rule_counter) + " in tool '"
+                                error += str(tool.old_id) + "'! Ignoring rule."
+                                log.debug(error)
+
+            # if str(tool.old_id) in config
+            else:
+                error = "Tool '" + str(tool.old_id) + "' not specified in config. "
+                error += "Using default destination."
+                if verbose:
+                    log.debug(error)
+
+            if matched_rule is None:
+                if "default_destination" in config[str(tool.old_id)]:
+                    default_tool_destination = (config[str(tool.old_id)]['default_destination'])
+                    if isinstance(default_tool_destination, str):
+                        destination = default_tool_destination
+                    else:
+                        if priority in default_tool_destination['priority']:
+                            destination = default_tool_destination['priority'][priority]
+                        else:
+                            destination = (default_tool_destination['priority'][default_priority])
+            else:
+                if isinstance(matched_rule["destination"], str):
+                    destination = matched_rule["destination"]
+                else:
+                    if priority in matched_rule["destination"]["priority"]:
+                        destination = matched_rule["destination"]["priority"][priority]
+                    else:
+                        destination = (matched_rule["destination"]["priority"][default_priority])
+
+        # if "default_destination" in config
+        else:
+            destination = "fail"
+            fail_message = "Job '" + str(tool.old_id) + "' failed; "
+            fail_message += "no global default destination specified in config!"
+
+    # if config is not None
+    else:
+        destination = "fail"
+        fail_message = "No config file supplied!"
+
+    if destination == "fail":
+        if fail_message:
+            raise JobMappingException(fail_message)
+        else:
+            raise JobMappingException(matched_rule["fail_message"])
+
+    if config is not None:
+        if destination == "fail":
+            output = "An error occurred: " + fail_message
+
+        else:
+            output = "Running '" + str(tool.old_id) + "' with '"
+            output += destination + "'."
+            log.debug(output)
+
+    return destination
+
+
+if __name__ == '__main__':
+    """
+    This function is responsible for running the app if directly run through the
+    commandline. It offers the ability to specify a config through the commandline
+    for checking whether or not it is a valid config. It's to be run from within Galaxy,
+    assuming it is installed correctly within the proper directories in Galaxy, and it
+    looks for the config file in galaxy/config/. It can also be run with a path pointing
+    to a config file if not being run directly from inside Galaxy install directory.
+    """
+    verbose = True
+
+    parser = argparse.ArgumentParser()
+    logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+
+    parser.add_argument(
+        '-c', '--check-config', dest='check_config', nargs='?',
+        help='Use this option to validate tool_destinations.yml.' +
+        ' Optionally, provide the path to the tool_destinations.yml' +
+        ' that you would like to check. Default: galaxy/config/tool_destinations.yml')
+
+    parser.add_argument(
+        '-V', '--version', action='version', version="%(prog)s " + __version__)
+
+    args = parser.parse_args()
+
+    # if run with no arguments, display the help message
+    if len(sys.argv) == 1:
+        parser.print_help()
+        sys.exit(1)
+
+    if args.check_config:
+        valid_config = parse_yaml(path=args.check_config, return_bool=True)
+
+    else:
+        valid_config = parse_yaml(path="/config/tool_destinations.yml", return_bool=True)
+
+    if valid_config:
+        print("Configuration is valid!")
+    else:
+        print("Errors detected; config not valid!")
diff --git a/lib/galaxy/jobs/error_level.py b/lib/galaxy/jobs/error_level.py
new file mode 100644
index 0000000..5b4d634
--- /dev/null
+++ b/lib/galaxy/jobs/error_level.py
@@ -0,0 +1,25 @@
+
+
+# These determine stdio-based error levels from matching on regular expressions
+# and exit codes. They are meant to be used comparatively, such as showing
+# that warning < fatal. This is really meant to just be an enum.
+class StdioErrorLevel( object ):
+    NO_ERROR = 0
+    LOG = 1
+    WARNING = 2
+    FATAL = 3
+    MAX = 3
+    descs = {
+        NO_ERROR: 'No error',
+        LOG: 'Log',
+        WARNING: 'Warning',
+        FATAL: 'Fatal error',
+    }
+
+    @staticmethod
+    def desc( error_level ):
+        err_msg = "Unknown error"
+        if ( error_level > 0 and
+             error_level <= StdioErrorLevel.MAX ):
+            err_msg = StdioErrorLevel.descs[ error_level ]
+        return err_msg
diff --git a/lib/galaxy/jobs/handler.py b/lib/galaxy/jobs/handler.py
new file mode 100644
index 0000000..b408dfd
--- /dev/null
+++ b/lib/galaxy/jobs/handler.py
@@ -0,0 +1,807 @@
+"""
+Galaxy job handler, prepares, runs, tracks, and finishes Galaxy jobs
+"""
+
+import os
+import time
+import logging
+import threading
+from Queue import Queue, Empty
+
+from sqlalchemy.sql.expression import and_, or_, select, func, true, null
+
+from galaxy import model
+from galaxy.util.sleeper import Sleeper
+from galaxy.jobs import JobWrapper, TaskWrapper, JobDestination
+from galaxy.jobs.mapper import JobNotReadyException
+
+log = logging.getLogger( __name__ )
+
+# States for running a job. These are NOT the same as data states
+JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED, JOB_USER_OVER_QUOTA = 'wait', 'error', 'input_error', 'input_deleted', 'ready', 'deleted', 'admin_deleted', 'user_over_quota'
+DEFAULT_JOB_PUT_FAILURE_MESSAGE = 'Unable to run job due to a misconfiguration of the Galaxy job running system.  Please contact a site administrator.'
+
+
+class JobHandler( object ):
+    """
+    Handle the preparation, running, tracking, and finishing of jobs
+    """
+    def __init__( self, app ):
+        self.app = app
+        # The dispatcher launches the underlying job runners
+        self.dispatcher = DefaultJobDispatcher( app )
+        # Queues for starting and stopping jobs
+        self.job_queue = JobHandlerQueue( app, self.dispatcher )
+        self.job_stop_queue = JobHandlerStopQueue( app, self.dispatcher )
+
+    def start( self ):
+        self.job_queue.start()
+
+    def shutdown( self ):
+        self.job_queue.shutdown()
+        self.job_stop_queue.shutdown()
+
+
+class JobHandlerQueue( object ):
+    """
+    Job Handler's Internal Queue, this is what actually implements waiting for
+    jobs to be runnable and dispatching to a JobRunner.
+    """
+    STOP_SIGNAL = object()
+
+    def __init__( self, app, dispatcher ):
+        """Initializes the Job Handler Queue, creates (unstarted) monitoring thread"""
+        self.app = app
+        self.dispatcher = dispatcher
+
+        self.sa_session = app.model.context
+        self.track_jobs_in_database = self.app.config.track_jobs_in_database
+
+        # Initialize structures for handling job limits
+        self.__clear_job_count()
+
+        # Keep track of the pid that started the job manager, only it
+        # has valid threads
+        self.parent_pid = os.getpid()
+        # Contains new jobs. Note this is not used if track_jobs_in_database is True
+        self.queue = Queue()
+        # Contains jobs that are waiting (only use from monitor thread)
+        self.waiting_jobs = []
+        # Contains wrappers of jobs that are limited or ready (so they aren't created unnecessarily/multiple times)
+        self.job_wrappers = {}
+        # Helper for interruptable sleep
+        self.sleeper = Sleeper()
+        self.running = True
+        self.monitor_thread = threading.Thread( name="JobHandlerQueue.monitor_thread", target=self.__monitor )
+        self.monitor_thread.setDaemon( True )
+
+    def start( self ):
+        """
+        Starts the JobHandler's thread after checking for any unhandled jobs.
+        """
+        # Recover jobs at startup
+        self.__check_jobs_at_startup()
+        # Start the queue
+        self.monitor_thread.start()
+        log.info( "job handler queue started" )
+
+    def job_wrapper( self, job, use_persisted_destination=False ):
+        return JobWrapper( job, self, use_persisted_destination=use_persisted_destination )
+
+    def job_pair_for_id( self, id ):
+        job = self.sa_session.query( model.Job ).get( id )
+        return job, self.job_wrapper( job, use_persisted_destination=True )
+
+    def __check_jobs_at_startup( self ):
+        """
+        Checks all jobs that are in the 'new', 'queued' or 'running' state in
+        the database and requeues or cleans up as necessary.  Only run as the
+        job handler starts.
+        In case the activation is enforced it will filter out the jobs of inactive users.
+        """
+        jobs_at_startup = []
+        if self.track_jobs_in_database:
+            in_list = ( model.Job.states.QUEUED,
+                        model.Job.states.RUNNING )
+        else:
+            in_list = ( model.Job.states.NEW,
+                        model.Job.states.QUEUED,
+                        model.Job.states.RUNNING )
+        if self.app.config.user_activation_on:
+                jobs_at_startup = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+                    .outerjoin( model.User ) \
+                    .filter( model.Job.state.in_( in_list ) &
+                             ( model.Job.handler == self.app.config.server_name ) &
+                             or_( ( model.Job.user_id == null() ), ( model.User.active == true() ) ) ).all()
+        else:
+            jobs_at_startup = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+                .filter( model.Job.state.in_( in_list ) &
+                         ( model.Job.handler == self.app.config.server_name ) ).all()
+
+        for job in jobs_at_startup:
+            if not self.app.toolbox.has_tool( job.tool_id, job.tool_version, exact=True ):
+                log.warning( "(%s) Tool '%s' removed from tool config, unable to recover job" % ( job.id, job.tool_id ) )
+                self.job_wrapper( job ).fail( 'This tool was disabled before the job completed.  Please contact your Galaxy administrator.' )
+            elif job.job_runner_name is not None and job.job_runner_external_id is None:
+                # This could happen during certain revisions of Galaxy where a runner URL was persisted before the job was dispatched to a runner.
+                log.debug( "(%s) Job runner assigned but no external ID recorded, adding to the job handler queue" % job.id )
+                job.job_runner_name = None
+                if self.track_jobs_in_database:
+                    job.set_state( model.Job.states.NEW )
+                else:
+                    self.queue.put( ( job.id, job.tool_id ) )
+            elif job.job_runner_name is not None and job.job_runner_external_id is not None and job.destination_id is None:
+                # This is the first start after upgrading from URLs to destinations, convert the URL to a destination and persist
+                job_wrapper = self.job_wrapper( job )
+                job_destination = self.dispatcher.url_to_destination(job.job_runner_name)
+                if job_destination.id is None:
+                    job_destination.id = 'legacy_url'
+                job_wrapper.set_job_destination(job_destination, job.job_runner_external_id)
+                self.dispatcher.recover( job, job_wrapper )
+                log.info('(%s) Converted job from a URL to a destination and recovered' % (job.id))
+            elif job.job_runner_name is None:
+                # Never (fully) dispatched
+                log.debug( "(%s) No job runner assigned and job still in '%s' state, adding to the job handler queue" % ( job.id, job.state ) )
+                if self.track_jobs_in_database:
+                    job.set_state( model.Job.states.NEW )
+                else:
+                    self.queue.put( ( job.id, job.tool_id ) )
+            else:
+                # Already dispatched and running
+                job_wrapper = self.job_wrapper( job )
+                # Use the persisted destination as its params may differ from
+                # what's in the job_conf xml
+                job_destination = JobDestination(id=job.destination_id, runner=job.job_runner_name, params=job.destination_params)
+                # resubmits are not persisted (it's a good thing) so they
+                # should be added back to the in-memory destination on startup
+                try:
+                    config_job_destination = self.app.job_config.get_destination( job.destination_id )
+                    job_destination.resubmit = config_job_destination.resubmit
+                except KeyError:
+                    log.warning( '(%s) Recovered destination id (%s) does not exist in job config (but this may be normal in the case of a dynamically generated destination)', job.id, job.destination_id )
+                job_wrapper.job_runner_mapper.cached_job_destination = job_destination
+                self.dispatcher.recover( job, job_wrapper )
+        if self.sa_session.dirty:
+            self.sa_session.flush()
+
+    def __monitor( self ):
+        """
+        Continually iterate the waiting jobs, checking is each is ready to
+        run and dispatching if so.
+        """
+        while self.running:
+            try:
+                # If jobs are locked, there's nothing to monitor and we skip
+                # to the sleep.
+                if not self.app.job_manager.job_lock:
+                    self.__monitor_step()
+            except:
+                log.exception( "Exception in monitor_step" )
+            # Sleep
+            self.sleeper.sleep( 1 )
+
+    def __monitor_step( self ):
+        """
+        Called repeatedly by `monitor` to process waiting jobs. Gets any new
+        jobs (either from the database or from its own queue), then iterates
+        over all new and waiting jobs to check the state of the jobs each
+        depends on. If the job has dependencies that have not finished, it
+        goes to the waiting queue. If the job has dependencies with errors,
+        it is marked as having errors and removed from the queue. If the job
+        belongs to an inactive user it is ignored.
+        Otherwise, the job is dispatched.
+        """
+        # Pull all new jobs from the queue at once
+        jobs_to_check = []
+        resubmit_jobs = []
+        if self.track_jobs_in_database:
+            # Clear the session so we get fresh states for job and all datasets
+            self.sa_session.expunge_all()
+            # Fetch all new jobs
+            hda_not_ready = self.sa_session.query(model.Job.id).enable_eagerloads(False) \
+                .join(model.JobToInputDatasetAssociation) \
+                .join(model.HistoryDatasetAssociation) \
+                .join(model.Dataset) \
+                .filter(and_( (model.Job.state == model.Job.states.NEW ),
+                              or_( ( model.HistoryDatasetAssociation._state == model.HistoryDatasetAssociation.states.FAILED_METADATA ),
+                                   ( model.HistoryDatasetAssociation.deleted == true() ),
+                                   ( model.Dataset.state != model.Dataset.states.OK ),
+                                   ( model.Dataset.deleted == true() ) ) ) ).subquery()
+            ldda_not_ready = self.sa_session.query(model.Job.id).enable_eagerloads(False) \
+                .join(model.JobToInputLibraryDatasetAssociation) \
+                .join(model.LibraryDatasetDatasetAssociation) \
+                .join(model.Dataset) \
+                .filter(and_((model.Job.state == model.Job.states.NEW),
+                        or_((model.LibraryDatasetDatasetAssociation._state != null()),
+                            (model.LibraryDatasetDatasetAssociation.deleted == true()),
+                            (model.Dataset.state != model.Dataset.states.OK),
+                            (model.Dataset.deleted == true())))).subquery()
+            if self.app.config.user_activation_on:
+                jobs_to_check = self.sa_session.query(model.Job).enable_eagerloads(False) \
+                    .outerjoin( model.User ) \
+                    .filter(and_((model.Job.state == model.Job.states.NEW),
+                                 or_((model.Job.user_id == null()), (model.User.active == true())),
+                                 (model.Job.handler == self.app.config.server_name),
+                                 ~model.Job.table.c.id.in_(hda_not_ready),
+                                 ~model.Job.table.c.id.in_(ldda_not_ready))) \
+                    .order_by(model.Job.id).all()
+            else:
+                jobs_to_check = self.sa_session.query(model.Job).enable_eagerloads(False) \
+                    .filter(and_((model.Job.state == model.Job.states.NEW),
+                                 (model.Job.handler == self.app.config.server_name),
+                                 ~model.Job.table.c.id.in_(hda_not_ready),
+                                 ~model.Job.table.c.id.in_(ldda_not_ready))) \
+                    .order_by(model.Job.id).all()
+            # Fetch all "resubmit" jobs
+            resubmit_jobs = self.sa_session.query(model.Job).enable_eagerloads(False) \
+                .filter(and_((model.Job.state == model.Job.states.RESUBMITTED),
+                             (model.Job.handler == self.app.config.server_name))) \
+                .order_by(model.Job.id).all()
+        else:
+            # Get job objects and append to watch queue for any which were
+            # previously waiting
+            for job_id in self.waiting_jobs:
+                jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
+            try:
+                while 1:
+                    message = self.queue.get_nowait()
+                    if message is self.STOP_SIGNAL:
+                        return
+                    # Unpack the message
+                    job_id, tool_id = message
+                    # Get the job object and append to watch queue
+                    jobs_to_check.append( self.sa_session.query( model.Job ).get( job_id ) )
+            except Empty:
+                pass
+        # Ensure that we get new job counts on each iteration
+        self.__clear_job_count()
+        # Check resubmit jobs first so that limits of new jobs will still be enforced
+        for job in resubmit_jobs:
+            log.debug( '(%s) Job was resubmitted and is being dispatched immediately', job.id )
+            # Reassemble resubmit job destination from persisted value
+            jw = self.job_wrapper( job )
+            jw.job_runner_mapper.cached_job_destination = JobDestination( id=job.destination_id, runner=job.job_runner_name, params=job.destination_params )
+            self.increase_running_job_count(job.user_id, jw.job_destination.id)
+            self.dispatcher.put( jw )
+        # Iterate over new and waiting jobs and look for any that are
+        # ready to run
+        new_waiting_jobs = []
+        for job in jobs_to_check:
+            try:
+                # Check the job's dependencies, requeue if they're not done.
+                # Some of these states will only happen when using the in-memory job queue
+                job_state = self.__check_job_state( job )
+                if job_state == JOB_WAIT:
+                    new_waiting_jobs.append( job.id )
+                elif job_state == JOB_INPUT_ERROR:
+                    log.info( "(%d) Job unable to run: one or more inputs in error state" % job.id )
+                elif job_state == JOB_INPUT_DELETED:
+                    log.info( "(%d) Job unable to run: one or more inputs deleted" % job.id )
+                elif job_state == JOB_READY:
+                    self.dispatcher.put( self.job_wrappers.pop( job.id ) )
+                    log.info( "(%d) Job dispatched" % job.id )
+                elif job_state == JOB_DELETED:
+                    log.info( "(%d) Job deleted by user while still queued" % job.id )
+                elif job_state == JOB_ADMIN_DELETED:
+                    log.info( "(%d) Job deleted by admin while still queued" % job.id )
+                elif job_state == JOB_USER_OVER_QUOTA:
+                    log.info( "(%d) User (%s) is over quota: job paused" % ( job.id, job.user_id ) )
+                    job.set_state( model.Job.states.PAUSED )
+                    for dataset_assoc in job.output_datasets + job.output_library_datasets:
+                        dataset_assoc.dataset.dataset.state = model.Dataset.states.PAUSED
+                        dataset_assoc.dataset.info = "Execution of this dataset's job is paused because you were over your disk quota at the time it was ready to run"
+                        self.sa_session.add( dataset_assoc.dataset.dataset )
+                    self.sa_session.add( job )
+                elif job_state == JOB_ERROR:
+                    log.error( "(%d) Error checking job readiness" % job.id )
+                else:
+                    log.error( "(%d) Job in unknown state '%s'" % ( job.id, job_state ) )
+                    new_waiting_jobs.append( job.id )
+            except Exception:
+                log.exception( "failure running job %d" % job.id )
+        # Update the waiting list
+        if not self.track_jobs_in_database:
+            self.waiting_jobs = new_waiting_jobs
+        # Remove cached wrappers for any jobs that are no longer being tracked
+        for id in self.job_wrappers.keys():
+            if id not in new_waiting_jobs:
+                del self.job_wrappers[id]
+        # Flush, if we updated the state
+        self.sa_session.flush()
+        # Done with the session
+        self.sa_session.remove()
+
+    def __check_job_state( self, job ):
+        """
+        Check if a job is ready to run by verifying that each of its input
+        datasets is ready (specifically in the OK state). If any input dataset
+        has an error, fail the job and return JOB_INPUT_ERROR. If any input
+        dataset is deleted, fail the job and return JOB_INPUT_DELETED.  If all
+        input datasets are in OK state, return JOB_READY indicating that the
+        job can be dispatched. Otherwise, return JOB_WAIT indicating that input
+        datasets are still being prepared.
+        """
+        if not self.track_jobs_in_database:
+            in_memory_not_ready_state = self.__verify_in_memory_job_inputs( job )
+            if in_memory_not_ready_state:
+                return in_memory_not_ready_state
+
+        # Else, if tracking in the database, job.state is guaranteed to be NEW and
+        # the inputs are guaranteed to be OK.
+
+        # Create the job wrapper so that the destination can be set
+        job_id = job.id
+        job_wrapper = self.job_wrappers.get( job_id, None )
+        if not job_wrapper:
+            job_wrapper = self.job_wrapper( job )
+            self.job_wrappers[ job_id ] = job_wrapper
+
+        # If state == JOB_READY, assume job_destination also set - otherwise
+        # in case of various error or cancelled states do not assume
+        # destination has been set.
+        state, job_destination = self.__verify_job_ready( job, job_wrapper )
+
+        if state == JOB_READY:
+            # PASS.  increase usage by one job (if caching) so that multiple jobs aren't dispatched on this queue iteration
+            self.increase_running_job_count(job.user_id, job_destination.id )
+        return state
+
+    def __verify_job_ready( self, job, job_wrapper ):
+        """ Compute job destination and verify job is ready at that
+        destination by checking job limits and quota. If this method
+        return a job state of JOB_READY - it MUST also return a job
+        destination.
+        """
+        job_destination = None
+        try:
+            assert job_wrapper.tool is not None, 'This tool was disabled before the job completed.  Please contact your Galaxy administrator.'
+            # Cause the job_destination to be set and cached by the mapper
+            job_destination = job_wrapper.job_destination
+        except AssertionError as e:
+            log.warning( "(%s) Tool '%s' removed from tool config, unable to run job" % ( job.id, job.tool_id ) )
+            job_wrapper.fail( e )
+            return JOB_ERROR, job_destination
+        except JobNotReadyException as e:
+            job_state = e.job_state or JOB_WAIT
+            return job_state, None
+        except Exception as e:
+            failure_message = getattr( e, 'failure_message', DEFAULT_JOB_PUT_FAILURE_MESSAGE )
+            if failure_message == DEFAULT_JOB_PUT_FAILURE_MESSAGE:
+                log.exception( 'Failed to generate job destination' )
+            else:
+                log.debug( "Intentionally failing job with message (%s)" % failure_message )
+            job_wrapper.fail( failure_message )
+            return JOB_ERROR, job_destination
+        # job is ready to run, check limits
+        # TODO: these checks should be refactored to minimize duplication and made more modular/pluggable
+        state = self.__check_destination_jobs( job, job_wrapper )
+        if state == JOB_READY:
+            state = self.__check_user_jobs( job, job_wrapper )
+        if state == JOB_READY and self.app.config.enable_quotas:
+            quota = self.app.quota_agent.get_quota( job.user )
+            if quota is not None:
+                try:
+                    usage = self.app.quota_agent.get_usage( user=job.user, history=job.history )
+                    if usage > quota:
+                        return JOB_USER_OVER_QUOTA, job_destination
+                except AssertionError as e:
+                    pass  # No history, should not happen with an anon user
+        return state, job_destination
+
+    def __verify_in_memory_job_inputs( self, job ):
+        """ Perform the same checks that happen via SQL for in-memory managed
+        jobs.
+        """
+        if job.state == model.Job.states.DELETED:
+            return JOB_DELETED
+        elif job.state == model.Job.states.ERROR:
+            return JOB_ADMIN_DELETED
+        for dataset_assoc in job.input_datasets + job.input_library_datasets:
+            idata = dataset_assoc.dataset
+            if not idata:
+                continue
+            # don't run jobs for which the input dataset was deleted
+            if idata.deleted:
+                self.job_wrappers.pop(job.id, self.job_wrapper( job )).fail( "input data %s (file: %s) was deleted before the job started" % ( idata.hid, idata.file_name ) )
+                return JOB_INPUT_DELETED
+            # an error in the input data causes us to bail immediately
+            elif idata.state == idata.states.ERROR:
+                self.job_wrappers.pop(job.id, self.job_wrapper( job )).fail( "input data %s is in error state" % ( idata.hid ) )
+                return JOB_INPUT_ERROR
+            elif idata.state == idata.states.FAILED_METADATA:
+                self.job_wrappers.pop(job.id, self.job_wrapper( job )).fail( "input data %s failed to properly set metadata" % ( idata.hid ) )
+                return JOB_INPUT_ERROR
+            elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
+                # need to requeue
+                return JOB_WAIT
+
+        # All inputs ready to go.
+        return None
+
+    def __clear_job_count( self ):
+        self.user_job_count = None
+        self.user_job_count_per_destination = None
+        self.total_job_count_per_destination = None
+
+    def get_user_job_count(self, user_id):
+        self.__cache_user_job_count()
+        # This could have been incremented by a previous job dispatched on this iteration, even if we're not caching
+        rval = self.user_job_count.get(user_id, 0)
+        if not self.app.config.cache_user_job_count:
+            result = self.sa_session.execute(select([func.count(model.Job.table.c.id)])
+                                             .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED,
+                                                         model.Job.states.RUNNING,
+                                                         model.Job.states.RESUBMITTED)),
+                                                         (model.Job.table.c.user_id == user_id))))
+            for row in result:
+                # there should only be one row
+                rval += row[0]
+        return rval
+
+    def __cache_user_job_count( self ):
+        # Cache the job count if necessary
+        if self.user_job_count is None and self.app.config.cache_user_job_count:
+            self.user_job_count = {}
+            query = self.sa_session.execute(select([model.Job.table.c.user_id, func.count(model.Job.table.c.user_id)])
+                                            .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED,
+                                                                                     model.Job.states.RUNNING,
+                                                                                     model.Job.states.RESUBMITTED)),
+                                                        (model.Job.table.c.user_id != null())))
+                                            .group_by(model.Job.table.c.user_id))
+            for row in query:
+                self.user_job_count[row[0]] = row[1]
+        elif self.user_job_count is None:
+            self.user_job_count = {}
+
+    def get_user_job_count_per_destination(self, user_id):
+        self.__cache_user_job_count_per_destination()
+        cached = self.user_job_count_per_destination.get(user_id, {})
+        if self.app.config.cache_user_job_count:
+            rval = cached
+        else:
+            # The cached count is still used even when we're not caching, it is
+            # incremented when a job is run by this handler to ensure that
+            # multiple jobs can't get past the limits in one iteration of the
+            # queue.
+            rval = {}
+            rval.update(cached)
+            result = self.sa_session.execute(select([model.Job.table.c.destination_id, func.count(model.Job.table.c.destination_id).label('job_count')])
+                                             .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING)), (model.Job.table.c.user_id == user_id)))
+                                             .group_by(model.Job.table.c.destination_id))
+            for row in result:
+                # Add the count from the database to the cached count
+                rval[row['destination_id']] = rval.get(row['destination_id'], 0) + row['job_count']
+        return rval
+
+    def __cache_user_job_count_per_destination(self):
+        # Cache the job count if necessary
+        if self.user_job_count_per_destination is None and self.app.config.cache_user_job_count:
+            self.user_job_count_per_destination = {}
+            result = self.sa_session.execute(select([model.Job.table.c.user_id, model.Job.table.c.destination_id, func.count(model.Job.table.c.user_id).label('job_count')])
+                                             .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING))))
+                                             .group_by(model.Job.table.c.user_id, model.Job.table.c.destination_id))
+            for row in result:
+                if row['user_id'] not in self.user_job_count_per_destination:
+                    self.user_job_count_per_destination[row['user_id']] = {}
+                self.user_job_count_per_destination[row['user_id']][row['destination_id']] = row['job_count']
+        elif self.user_job_count_per_destination is None:
+            self.user_job_count_per_destination = {}
+
+    def increase_running_job_count(self, user_id, destination_id):
+        if self.app.job_config.limits.registered_user_concurrent_jobs or \
+           self.app.job_config.limits.anonymous_user_concurrent_jobs or \
+           self.app.job_config.limits.destination_user_concurrent_jobs:
+            if self.user_job_count is None:
+                self.user_job_count = {}
+            if self.user_job_count_per_destination is None:
+                self.user_job_count_per_destination = {}
+            self.user_job_count[user_id] = self.user_job_count.get(user_id, 0) + 1
+            if user_id not in self.user_job_count_per_destination:
+                self.user_job_count_per_destination[user_id] = {}
+            self.user_job_count_per_destination[user_id][destination_id] = self.user_job_count_per_destination[user_id].get(destination_id, 0) + 1
+        if self.app.job_config.limits.destination_total_concurrent_jobs:
+            if self.total_job_count_per_destination is None:
+                self.total_job_count_per_destination = {}
+            self.total_job_count_per_destination[destination_id] = self.total_job_count_per_destination.get(destination_id, 0) + 1
+
+    def __check_user_jobs( self, job, job_wrapper ):
+        # TODO: Update output datasets' _state = LIMITED or some such new
+        # state, so the UI can reflect what jobs are waiting due to concurrency
+        # limits
+        if job.user:
+            # Check the hard limit first
+            if self.app.job_config.limits.registered_user_concurrent_jobs:
+                count = self.get_user_job_count(job.user_id)
+                # Check the user's number of dispatched jobs against the overall limit
+                if count >= self.app.job_config.limits.registered_user_concurrent_jobs:
+                    return JOB_WAIT
+            # If we pass the hard limit, also check the per-destination count
+            id = job_wrapper.job_destination.id
+            count_per_id = self.get_user_job_count_per_destination(job.user_id)
+            if id in self.app.job_config.limits.destination_user_concurrent_jobs:
+                count = count_per_id.get(id, 0)
+                # Check the user's number of dispatched jobs in the assigned destination id against the limit for that id
+                if count >= self.app.job_config.limits.destination_user_concurrent_jobs[id]:
+                    return JOB_WAIT
+            # If we pass the destination limit (if there is one), also check limits on any tags (if any)
+            if job_wrapper.job_destination.tags:
+                for tag in job_wrapper.job_destination.tags:
+                    # Check each tag for this job's destination
+                    if tag in self.app.job_config.limits.destination_user_concurrent_jobs:
+                        # Only if there's a limit defined for this tag
+                        count = 0
+                        for id in [ d.id for d in self.app.job_config.get_destinations(tag) ]:
+                            # Add up the aggregate job total for this tag
+                            count += count_per_id.get(id, 0)
+                        if count >= self.app.job_config.limits.destination_user_concurrent_jobs[tag]:
+                            return JOB_WAIT
+        elif job.galaxy_session:
+            # Anonymous users only get the hard limit
+            if self.app.job_config.limits.anonymous_user_concurrent_jobs:
+                count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+                            .filter( and_( model.Job.session_id == job.galaxy_session.id,
+                                           or_( model.Job.state == model.Job.states.RUNNING,
+                                                model.Job.state == model.Job.states.QUEUED ) ) ).count()
+                if count >= self.app.job_config.limits.anonymous_user_concurrent_jobs:
+                    return JOB_WAIT
+        else:
+            log.warning( 'Job %s is not associated with a user or session so job concurrency limit cannot be checked.' % job.id )
+        return JOB_READY
+
+    def __cache_total_job_count_per_destination( self ):
+        # Cache the job count if necessary
+        if self.total_job_count_per_destination is None:
+            self.total_job_count_per_destination = {}
+            result = self.sa_session.execute(select([model.Job.table.c.destination_id, func.count(model.Job.table.c.destination_id).label('job_count')])
+                                             .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING))))
+                                             .group_by(model.Job.table.c.destination_id))
+            for row in result:
+                self.total_job_count_per_destination[row['destination_id']] = row['job_count']
+
+    def get_total_job_count_per_destination(self):
+        self.__cache_total_job_count_per_destination()
+        # Always use caching (at worst a job will have to wait one iteration,
+        # and this would be more fair anyway as it ensures FIFO scheduling,
+        # insofar as FIFO would be fair...)
+        return self.total_job_count_per_destination
+
+    def __check_destination_jobs( self, job, job_wrapper ):
+        if self.app.job_config.limits.destination_total_concurrent_jobs:
+            id = job_wrapper.job_destination.id
+            count_per_id = self.get_total_job_count_per_destination()
+            if id in self.app.job_config.limits.destination_total_concurrent_jobs:
+                count = count_per_id.get(id, 0)
+                # Check the number of dispatched jobs in the assigned destination id against the limit for that id
+                if count >= self.app.job_config.limits.destination_total_concurrent_jobs[id]:
+                    return JOB_WAIT
+            # If we pass the destination limit (if there is one), also check limits on any tags (if any)
+            if job_wrapper.job_destination.tags:
+                for tag in job_wrapper.job_destination.tags:
+                    # Check each tag for this job's destination
+                    if tag in self.app.job_config.limits.destination_total_concurrent_jobs:
+                        # Only if there's a limit defined for this tag
+                        count = 0
+                        for id in [ d.id for d in self.app.job_config.get_destinations(tag) ]:
+                            # Add up the aggregate job total for this tag
+                            count += count_per_id.get(id, 0)
+                        if count >= self.app.job_config.limits.destination_total_concurrent_jobs[tag]:
+                            return JOB_WAIT
+        return JOB_READY
+
+    def put( self, job_id, tool_id ):
+        """Add a job to the queue (by job identifier)"""
+        if not self.track_jobs_in_database:
+            self.queue.put( ( job_id, tool_id ) )
+            self.sleeper.wake()
+
+    def shutdown( self ):
+        """Attempts to gracefully shut down the worker thread"""
+        if self.parent_pid != os.getpid():
+            # We're not the real job queue, do nothing
+            return
+        else:
+            log.info( "sending stop signal to worker thread" )
+            self.running = False
+            if not self.app.config.track_jobs_in_database:
+                self.queue.put( self.STOP_SIGNAL )
+            self.sleeper.wake()
+            log.info( "job handler queue stopped" )
+            self.dispatcher.shutdown()
+
+
+class JobHandlerStopQueue( object ):
+    """
+    A queue for jobs which need to be terminated prematurely.
+    """
+    STOP_SIGNAL = object()
+
+    def __init__( self, app, dispatcher ):
+        self.app = app
+        self.dispatcher = dispatcher
+
+        self.sa_session = app.model.context
+
+        # Keep track of the pid that started the job manager, only it
+        # has valid threads
+        self.parent_pid = os.getpid()
+        # Contains new jobs. Note this is not used if track_jobs_in_database is True
+        self.queue = Queue()
+
+        # Contains jobs that are waiting (only use from monitor thread)
+        self.waiting = []
+
+        # Helper for interruptable sleep
+        self.sleeper = Sleeper()
+        self.running = True
+        self.monitor_thread = threading.Thread( name="JobHandlerStopQueue.monitor_thread", target=self.monitor )
+        self.monitor_thread.setDaemon( True )
+        self.monitor_thread.start()
+        log.info( "job handler stop queue started" )
+
+    def monitor( self ):
+        """
+        Continually iterate the waiting jobs, stop any that are found.
+        """
+        # HACK: Delay until after forking, we need a way to do post fork notification!!!
+        time.sleep( 10 )
+        while self.running:
+            try:
+                self.monitor_step()
+            except:
+                log.exception( "Exception in monitor_step" )
+            # Sleep
+            self.sleeper.sleep( 1 )
+
+    def monitor_step( self ):
+        """
+        Called repeatedly by `monitor` to stop jobs.
+        """
+        # Pull all new jobs from the queue at once
+        jobs_to_check = []
+        if self.app.config.track_jobs_in_database:
+            # Clear the session so we get fresh states for job and all datasets
+            self.sa_session.expunge_all()
+            # Fetch all new jobs
+            newly_deleted_jobs = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+                                     .filter( ( model.Job.state == model.Job.states.DELETED_NEW ) &
+                                              ( model.Job.handler == self.app.config.server_name ) ).all()
+            for job in newly_deleted_jobs:
+                jobs_to_check.append( ( job, job.stderr ) )
+        # Also pull from the queue (in the case of Administrative stopped jobs)
+        try:
+            while 1:
+                message = self.queue.get_nowait()
+                if message is self.STOP_SIGNAL:
+                    return
+                # Unpack the message
+                job_id, error_msg = message
+                # Get the job object and append to watch queue
+                jobs_to_check.append( ( self.sa_session.query( model.Job ).get( job_id ), error_msg ) )
+        except Empty:
+            pass
+        for job, error_msg in jobs_to_check:
+            if ( job.state not in
+                    ( job.states.DELETED_NEW,
+                      job.states.DELETED ) and
+                    job.finished ):
+                # terminated before it got here
+                log.debug('Job %s already finished, not deleting or stopping', job.id)
+                continue
+            final_state = job.states.DELETED
+            if error_msg is not None:
+                final_state = job.states.ERROR
+                job.info = error_msg
+            job.set_final_state( final_state )
+            self.sa_session.add( job )
+            self.sa_session.flush()
+            if job.job_runner_name is not None:
+                # tell the dispatcher to stop the job
+                self.dispatcher.stop( job )
+
+    def put( self, job_id, error_msg=None ):
+        if not self.app.config.track_jobs_in_database:
+            self.queue.put( ( job_id, error_msg ) )
+
+    def shutdown( self ):
+        """Attempts to gracefully shut down the worker thread"""
+        if self.parent_pid != os.getpid():
+            # We're not the real job queue, do nothing
+            return
+        else:
+            log.info( "sending stop signal to worker thread" )
+            self.running = False
+            if not self.app.config.track_jobs_in_database:
+                self.queue.put( self.STOP_SIGNAL )
+            self.sleeper.wake()
+            log.info( "job handler stop queue stopped" )
+
+
+class DefaultJobDispatcher( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.job_runners = self.app.job_config.get_job_runner_plugins( self.app.config.server_name )
+        # Once plugins are loaded, all job destinations that were created from
+        # URLs can have their URL params converted to the destination's param
+        # dict by the plugin.
+        self.app.job_config.convert_legacy_destinations(self.job_runners)
+        log.debug( "Loaded job runners plugins: " + ':'.join( self.job_runners.keys() ) )
+
+    def __get_runner_name( self, job_wrapper ):
+        if job_wrapper.can_split():
+            runner_name = "tasks"
+        else:
+            runner_name = job_wrapper.job_destination.runner
+        return runner_name
+
+    def url_to_destination( self, url ):
+        """This is used by the runner mapper (a.k.a. dynamic runner) and
+        recovery methods to have runners convert URLs to destinations.
+
+        New-style runner plugin IDs must match the URL's scheme for this to work.
+        """
+        runner_name = url.split(':', 1)[0]
+        try:
+            return self.job_runners[runner_name].url_to_destination(url)
+        except Exception as e:
+            log.exception("Unable to convert legacy job runner URL '%s' to job destination, destination will be the '%s' runner with no params: %s" % (url, runner_name, e))
+            return JobDestination(runner=runner_name)
+
+    def put( self, job_wrapper ):
+        runner_name = self.__get_runner_name( job_wrapper )
+        try:
+            if isinstance(job_wrapper, TaskWrapper):
+                # DBTODO Refactor
+                log.debug( "(%s) Dispatching task %s to %s runner" % ( job_wrapper.job_id, job_wrapper.task_id, runner_name ) )
+            else:
+                log.debug( "(%s) Dispatching to %s runner" % ( job_wrapper.job_id, runner_name ) )
+            self.job_runners[runner_name].put( job_wrapper )
+        except KeyError:
+            log.error( 'put(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
+            job_wrapper.fail( DEFAULT_JOB_PUT_FAILURE_MESSAGE )
+
+    def stop( self, job ):
+        """
+        Stop the given job. The input variable job may be either a Job or a Task.
+        """
+        # The Job and Task classes have been modified so that their accessors
+        # will return the appropriate value.
+        # Note that Jobs and Tasks have runner_names, which are distinct from
+        # the job_runner_name and task_runner_name.
+
+        if ( isinstance( job, model.Job ) ):
+            log.debug( "Stopping job %d:", job.get_id() )
+        elif( isinstance( job, model.Task ) ):
+            log.debug( "Stopping job %d, task %d"
+                       % ( job.get_job().get_id(), job.get_id() ) )
+        else:
+            log.debug( "Unknown job to stop" )
+
+        # The runner name is not set until the job has started.
+        # If we're stopping a task, then the runner_name may be
+        # None, in which case it hasn't been scheduled.
+        if ( job.get_job_runner_name() is not None ):
+            runner_name = ( job.get_job_runner_name().split( ":", 1 ) )[ 0 ]
+            if ( isinstance( job, model.Job ) ):
+                log.debug( "stopping job %d in %s runner" % ( job.get_id(), runner_name ) )
+            elif ( isinstance( job, model.Task ) ):
+                log.debug( "Stopping job %d, task %d in %s runner"
+                           % ( job.get_job().get_id(), job.get_id(), runner_name ) )
+            try:
+                self.job_runners[runner_name].stop_job( job )
+            except KeyError:
+                log.error( 'stop(): (%s) Invalid job runner: %s' % ( job.get_id(), runner_name ) )
+                # Job and output dataset states have already been updated, so nothing is done here.
+
+    def recover( self, job, job_wrapper ):
+        runner_name = ( job.job_runner_name.split(":", 1) )[0]
+        log.debug( "recovering job %d in %s runner" % ( job.get_id(), runner_name ) )
+        try:
+            self.job_runners[runner_name].recover( job, job_wrapper )
+        except KeyError:
+            log.error( 'recover(): (%s) Invalid job runner: %s' % ( job_wrapper.job_id, runner_name ) )
+            job_wrapper.fail( DEFAULT_JOB_PUT_FAILURE_MESSAGE )
+
+    def shutdown( self ):
+        for runner in self.job_runners.itervalues():
+            runner.shutdown()
diff --git a/lib/galaxy/jobs/manager.py b/lib/galaxy/jobs/manager.py
new file mode 100644
index 0000000..4cf5604
--- /dev/null
+++ b/lib/galaxy/jobs/manager.py
@@ -0,0 +1,47 @@
+"""
+Top-level Galaxy job manager, moves jobs to handler(s)
+"""
+
+import logging
+
+from galaxy.jobs import handler, NoopQueue
+
+log = logging.getLogger( __name__ )
+
+
+class JobManager( object ):
+    """
+    Highest level interface to job management.
+
+    TODO: Currently the app accesses "job_queue" and "job_stop_queue" directly.
+          This should be decoupled.
+    """
+    def __init__( self, app ):
+        self.app = app
+        if self.app.is_job_handler():
+            log.debug("Starting job handler")
+            self.job_handler = handler.JobHandler( app )
+            self.job_queue = self.job_handler.job_queue
+            self.job_stop_queue = self.job_handler.job_stop_queue
+        else:
+            self.job_handler = NoopHandler()
+            self.job_queue = self.job_stop_queue = NoopQueue()
+        self.job_lock = False
+
+    def start( self ):
+        self.job_handler.start()
+
+    def shutdown( self ):
+        self.job_handler.shutdown()
+
+
+class NoopHandler( object ):
+    def __init__( self, *args, **kwargs ):
+        self.job_queue = NoopQueue()
+        self.job_stop_queue = NoopQueue()
+
+    def start( self ):
+        pass
+
+    def shutdown( self, *args ):
+        pass
diff --git a/lib/galaxy/jobs/mapper.py b/lib/galaxy/jobs/mapper.py
new file mode 100644
index 0000000..ba2866b
--- /dev/null
+++ b/lib/galaxy/jobs/mapper.py
@@ -0,0 +1,245 @@
+import logging
+import inspect
+import os
+import sys
+
+import galaxy.jobs.rules
+from galaxy.jobs import stock_rules
+from galaxy.jobs.dynamic_tool_destination import map_tool_to_destination
+from .rule_helper import RuleHelper
+
+log = logging.getLogger( __name__ )
+
+DYNAMIC_RUNNER_NAME = "dynamic"
+DYNAMIC_DESTINATION_ID = "dynamic_legacy_from_url"
+
+ERROR_MESSAGE_NO_RULE_FUNCTION = "Galaxy misconfigured - cannot find dynamic rule function name for destination %s."
+ERROR_MESSAGE_RULE_FUNCTION_NOT_FOUND = "Galaxy misconfigured - no rule function named %s found in dynamic rule modules."
+
+
+class JobMappingException( Exception ):
+
+    def __init__( self, failure_message ):
+        self.failure_message = failure_message
+
+
+class JobNotReadyException( Exception ):
+
+    def __init__( self, job_state=None, message=None ):
+        self.job_state = job_state
+        self.message = message
+
+
+STOCK_RULES = dict(
+    choose_one=stock_rules.choose_one,
+    burst=stock_rules.burst,
+    docker_dispatch=stock_rules.docker_dispatch,
+    dtd=map_tool_to_destination,
+)
+
+
+class JobRunnerMapper( object ):
+    """
+    This class is responsible to managing the mapping of jobs
+    (in the form of job_wrappers) to job runner url strings.
+    """
+
+    def __init__( self, job_wrapper, url_to_destination, job_config ):
+        self.job_wrapper = job_wrapper
+        self.url_to_destination = url_to_destination
+        self.job_config = job_config
+
+        self.rules_module = galaxy.jobs.rules
+
+        if job_config.dynamic_params is not None:
+            rules_module_name = job_config.dynamic_params['rules_module']
+            __import__(rules_module_name)
+            self.rules_module = sys.modules[rules_module_name]
+
+    def __get_rule_modules( self ):
+        unsorted_module_names = self.__get_rule_module_names( )
+        # Load modules in reverse order to allow hierarchical overrides
+        # i.e. 000_galaxy_rules.py, 100_site_rules.py, 200_instance_rules.py
+        module_names = sorted( unsorted_module_names, reverse=True )
+        modules = []
+        for rule_module_name in module_names:
+            try:
+                module = __import__( rule_module_name )
+                for comp in rule_module_name.split( "." )[1:]:
+                    module = getattr( module, comp )
+                modules.append( module )
+            except BaseException as exception:
+                exception_str = str( exception )
+                message = "%s rule module could not be loaded: %s" % ( rule_module_name, exception_str )
+                log.debug( message )
+                continue
+        return modules
+
+    def __get_rule_module_names( self ):
+        rules_dir = self.rules_module.__path__[0]
+        names = []
+        for fname in os.listdir( rules_dir ):
+            if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+                base_name = self.rules_module.__name__
+                rule_module_name = "%s.%s" % (base_name, fname[:-len(".py")])
+                names.append( rule_module_name )
+        return names
+
+    def __invoke_expand_function( self, expand_function, destination_params ):
+        function_arg_names = inspect.getargspec( expand_function ).args
+        app = self.job_wrapper.app
+        possible_args = {
+            "job_id": self.job_wrapper.job_id,
+            "tool": self.job_wrapper.tool,
+            "tool_id": self.job_wrapper.tool.id,
+            "job_wrapper": self.job_wrapper,
+            "rule_helper": RuleHelper( app ),
+            "app": app
+        }
+
+        actual_args = {}
+
+        # Send through any job_conf.xml defined args to function
+        for destination_param in destination_params.keys():
+            if destination_param in function_arg_names:
+                actual_args[ destination_param ] = destination_params[ destination_param ]
+
+        # Populate needed args
+        for possible_arg_name in possible_args:
+            if possible_arg_name in function_arg_names:
+                actual_args[ possible_arg_name ] = possible_args[ possible_arg_name ]
+
+        # Don't hit the DB to load the job object if not needed
+        require_db = False
+        for param in ["job", "user", "user_email", "resource_params", "workflow_invocation_uuid"]:
+            if param in function_arg_names:
+                require_db = True
+                break
+        if require_db:
+            job = self.job_wrapper.get_job()
+            user = job.user
+            user_email = user and str(user.email)
+
+            if "job" in function_arg_names:
+                actual_args[ "job" ] = job
+
+            if "user" in function_arg_names:
+                actual_args[ "user" ] = user
+
+            if "user_email" in function_arg_names:
+                actual_args[ "user_email" ] = user_email
+
+            if "resource_params" in function_arg_names:
+                # Find the dymically inserted resource parameters and give them
+                # to rule.
+                param_values = self.__job_params( job )
+                resource_params = {}
+                try:
+                    resource_params_raw = param_values[ "__job_resource" ]
+                    if resource_params_raw[ "__job_resource__select" ].lower() in [ "1", "yes", "true" ]:
+                        for key, value in resource_params_raw.iteritems():
+                            resource_params[ key ] = value
+                except KeyError:
+                    pass
+                actual_args[ "resource_params" ] = resource_params
+
+            if "workflow_invocation_uuid" in function_arg_names:
+                param_values = job.raw_param_dict( )
+                workflow_invocation_uuid = param_values.get( "__workflow_invocation_uuid__", None )
+                actual_args[ "workflow_invocation_uuid" ] = workflow_invocation_uuid
+
+        return expand_function( **actual_args )
+
+    def __job_params( self, job ):
+        app = self.job_wrapper.app
+        param_values = job.get_param_values( app, ignore_errors=True )
+        return param_values
+
+    def __convert_url_to_destination( self, url ):
+        """
+        Job runner URLs are deprecated, but dynamic mapper functions may still
+        be returning them.  Runners are expected to be able to convert these to
+        destinations.
+
+        This method calls
+        JobHandlerQueue.DefaultJobDispatcher.url_to_destination, which in turn
+        calls the url_to_destination method for the appropriate runner.
+        """
+        dest = self.url_to_destination( url )
+        dest['id'] = DYNAMIC_DESTINATION_ID
+        return dest
+
+    def __determine_expand_function_name( self, destination ):
+        # default look for function with name matching an id of tool, unless one specified
+        expand_function_name = destination.params.get('function', None)
+        if not expand_function_name:
+            for tool_id in self.job_wrapper.tool.all_ids:
+                if self.__last_rule_module_with_function( tool_id ):
+                    expand_function_name = tool_id
+                    break
+        return expand_function_name
+
+    def __get_expand_function( self, expand_function_name ):
+        matching_rule_module = self.__last_rule_module_with_function( expand_function_name )
+        if matching_rule_module:
+            expand_function = getattr( matching_rule_module, expand_function_name )
+            return expand_function
+        else:
+            message = ERROR_MESSAGE_RULE_FUNCTION_NOT_FOUND % ( expand_function_name )
+            raise Exception( message )
+
+    def __last_rule_module_with_function( self, function_name ):
+        # self.rule_modules is sorted in reverse order, so find first
+        # wiht function
+        for rule_module in self.__get_rule_modules( ):
+            if hasattr( rule_module, function_name ):
+                return rule_module
+        return None
+
+    def __handle_dynamic_job_destination( self, destination ):
+        expand_type = destination.params.get('type', "python")
+        expand_function = None
+        if expand_type == "python":
+            expand_function_name = self.__determine_expand_function_name( destination )
+            if not expand_function_name:
+                message = ERROR_MESSAGE_NO_RULE_FUNCTION % destination
+                raise Exception( message )
+
+            expand_function = self.__get_expand_function( expand_function_name )
+        elif expand_type in STOCK_RULES:
+            expand_function = STOCK_RULES[ expand_type ]
+        else:
+            raise Exception( "Unhandled dynamic job runner type specified - %s" % expand_type )
+
+        return self.__handle_rule( expand_function, destination )
+
+    def __handle_rule( self, rule_function, destination ):
+        job_destination = self.__invoke_expand_function( rule_function, destination.params )
+        if not isinstance(job_destination, galaxy.jobs.JobDestination):
+            job_destination_rep = str(job_destination)  # Should be either id or url
+            if '://' in job_destination_rep:
+                job_destination = self.__convert_url_to_destination(job_destination_rep)
+            else:
+                job_destination = self.job_config.get_destination(job_destination_rep)
+        return job_destination
+
+    def __cache_job_destination( self, params, raw_job_destination=None ):
+        if raw_job_destination is None:
+            raw_job_destination = self.job_wrapper.tool.get_job_destination( params )
+        if raw_job_destination.runner == DYNAMIC_RUNNER_NAME:
+            job_destination = self.__handle_dynamic_job_destination( raw_job_destination )
+        else:
+            job_destination = raw_job_destination
+        self.cached_job_destination = job_destination
+
+    def get_job_destination( self, params ):
+        """
+        Cache the job_destination to avoid recalculation.
+        """
+        if not hasattr( self, 'cached_job_destination' ):
+            self.__cache_job_destination( params )
+        return self.cached_job_destination
+
+    def cache_job_destination( self, raw_job_destination ):
+        self.__cache_job_destination( None, raw_job_destination=raw_job_destination )
+        return self.cached_job_destination
diff --git a/lib/galaxy/jobs/metrics/__init__.py b/lib/galaxy/jobs/metrics/__init__.py
new file mode 100644
index 0000000..a077b25
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/__init__.py
@@ -0,0 +1,130 @@
+"""This module defines the job metrics collection framework for Galaxy jobs.
+
+The framework consists of two parts - the :class:`JobMetrics` class and
+individual :class:`JobInstrumenter` plugins.
+
+A :class:`JobMetrics` object reads any number of plugins from a configuration
+source such as an XML file, a YAML file, or a dictionary.
+
+Each :class:`JobInstrumenter` plugin object describes how to inject a bits
+of shell code into a job scripts (before and after tool commands run) and then
+collect the output of these from a job directory.
+"""
+import collections
+import logging
+import os
+
+from galaxy import util
+from galaxy.util import plugin_config
+
+from ..metrics import formatting
+
+log = logging.getLogger(__name__)
+
+
+DEFAULT_FORMATTER = formatting.JobMetricFormatter()
+
+
+class JobMetrics(object):
+    """Load and store a collection of :class:`JobInstrumenter` objects."""
+
+    def __init__(self, conf_file=None, **kwargs):
+        """Load :class:`JobInstrumenter` objects from specified configuration file."""
+        self.plugin_classes = self.__plugins_dict()
+        self.default_job_instrumenter = JobInstrumenter.from_file(self.plugin_classes, conf_file, **kwargs)
+        self.job_instrumenters = collections.defaultdict(lambda: self.default_job_instrumenter)
+
+    def format(self, plugin, key, value):
+        """Find :class:`formatting.JobMetricFormatter` corresponding to instrumented plugin value."""
+        if plugin in self.plugin_classes:
+            plugin_class = self.plugin_classes[ plugin ]
+            formatter = plugin_class.formatter
+        else:
+            formatter = DEFAULT_FORMATTER
+        return formatter.format(key, value)
+
+    def set_destination_conf_file(self, destination_id, conf_file):
+        instrumenter = JobInstrumenter.from_file(self.plugin_classes, conf_file)
+        self.set_destination_instrumenter(destination_id, instrumenter)
+
+    def set_destination_conf_element(self, destination_id, element):
+        instrumenter = JobInstrumenter(self.plugin_classes, ('xml', element))
+        self.set_destination_instrumenter(destination_id, instrumenter)
+
+    def set_destination_instrumenter(self, destination_id, job_instrumenter=None):
+        if job_instrumenter is None:
+            job_instrumenter = NULL_JOB_INSTRUMENTER
+        self.job_instrumenters[ destination_id ] = job_instrumenter
+
+    def collect_properties(self, destination_id, job_id, job_directory):
+        return self.job_instrumenters[ destination_id ].collect_properties(job_id, job_directory)
+
+    def __plugins_dict(self):
+        import galaxy.jobs.metrics.instrumenters
+        return plugin_config.plugins_dict(galaxy.jobs.metrics.instrumenters, 'plugin_type')
+
+
+class NullJobInstrumenter(object):
+
+    def pre_execute_commands(self, job_directory):
+        return None
+
+    def post_execute_commands(self, job_directory):
+        return None
+
+    def collect_properties(self, job_id, job_directory):
+        return {}
+
+
+NULL_JOB_INSTRUMENTER = NullJobInstrumenter()
+
+
+class JobInstrumenter(object):
+
+    def __init__(self, plugin_classes, plugins_source, **kwargs):
+        self.extra_kwargs = kwargs
+        self.plugin_classes = plugin_classes
+        self.plugins = self.__plugins_from_source(plugins_source)
+
+    def pre_execute_commands(self, job_directory):
+        commands = []
+        for plugin in self.plugins:
+            try:
+                plugin_commands = plugin.pre_execute_instrument(job_directory)
+                if plugin_commands:
+                    commands.extend(util.listify(plugin_commands))
+            except Exception:
+                log.exception("Failed to generate pre-execute commands for plugin %s" % plugin)
+        return "\n".join([ c for c in commands if c ])
+
+    def post_execute_commands(self, job_directory):
+        commands = []
+        for plugin in self.plugins:
+            try:
+                plugin_commands = plugin.post_execute_instrument(job_directory)
+                if plugin_commands:
+                    commands.extend(util.listify(plugin_commands))
+            except Exception:
+                log.exception("Failed to generate post-execute commands for plugin %s" % plugin)
+        return "\n".join([ c for c in commands if c ])
+
+    def collect_properties(self, job_id, job_directory):
+        per_plugin_properites = {}
+        for plugin in self.plugins:
+            try:
+                properties = plugin.job_properties(job_id, job_directory)
+                if properties:
+                    per_plugin_properites[ plugin.plugin_type ] = properties
+            except Exception:
+                log.exception("Failed to collect job properties for plugin %s" % plugin)
+        return per_plugin_properites
+
+    def __plugins_from_source(self, plugins_source):
+        return plugin_config.load_plugins(self.plugin_classes, plugins_source, self.extra_kwargs)
+
+    @staticmethod
+    def from_file(plugin_classes, conf_file, **kwargs):
+        if not conf_file or not os.path.exists(conf_file):
+            return NULL_JOB_INSTRUMENTER
+        plugins_source = plugin_config.plugin_source_from_path(conf_file)
+        return JobInstrumenter(plugin_classes, plugins_source, **kwargs)
diff --git a/lib/galaxy/jobs/metrics/collectl/__init__.py b/lib/galaxy/jobs/metrics/collectl/__init__.py
new file mode 100644
index 0000000..c3e8815
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/collectl/__init__.py
@@ -0,0 +1,4 @@
+"""Helper functions and data structures for interacting with collectl & data.
+
+More information on collectl can be found at: http://collectl.sourceforge.net/.
+"""
diff --git a/lib/galaxy/jobs/metrics/collectl/cli.py b/lib/galaxy/jobs/metrics/collectl/cli.py
new file mode 100644
index 0000000..90271d6
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/collectl/cli.py
@@ -0,0 +1,141 @@
+"""This module describes :class:`CollectlCli` - an abstraction for building collectl command lines."""
+import logging
+import subprocess
+from string import Template
+
+log = logging.getLogger( __name__ )
+
+COMMAND_LINE_TEMPLATE = Template(
+    "$collectl_path $destination_arg $mode_arg $subsystems_arg $interval_arg $procfilt_arg $flush_arg $sep_arg"
+)
+MODE_RECORD = "record"
+MODE_PLAYBACK = "playback"
+
+
+class CollectlCli( object ):
+    """
+    Abstraction over (some of) the command-line arguments of collectl.
+    Ideally this will be useful for building up command line arguments for
+    remote execution as well as runnning directly on local host.
+
+    This is meant to be a fairly generic utility - for interfacing with
+    collectl CLI - logic more directly related to the Galaxy job metric plugin
+    plugin should be placed in other modules.
+
+    **Keyword Arguments:**
+
+    ``collectl_path``
+        Path to collectl executable (defaults to collectl - i.e.
+        search the PATH).
+
+    ``playback_path`` (defaults to ``None``)
+        If this is ``None``, collectl will run in
+        record mode, else it will playback specified file.
+
+    **Playback Mode Options:**
+
+    ``sep``
+        Separator used in playback mode (set to 9 to produce tsv)
+        (defaults to None).
+
+    **Record Mode Options** (some of these may work in playback mode also)
+
+    ``destination_path``
+        Location of path files to write to (defaults to None
+        and collectl will just use cwd). Really this is just to prefix -
+        collectl will append hostname and datetime to file.
+    ``interval``
+        Setup polling interval (secs) for most subsystems (defaults
+        to None and when unspecified collectl will use default of 1 second).
+    ``interval2``
+        Setup polling interval (secs) for process information
+        (defaults to None and when unspecified collectl will use default to
+        60 seconds).
+    ``interval3``
+        Setup polling interval (secs) for environment information
+        (defaults to None and when unspecified collectl will use default to
+        300 seconds).
+    ``procfilt``
+        Optional argument to procfilt. (defaults to None).
+    ``flush``
+        Optional flush interval (defaults to None).
+    """
+
+    def __init__( self, **kwargs ):
+        command_args = {}
+        command_args[ "collectl_path" ] = kwargs.get( "collectl_path", "collectl" )
+        playback_path = kwargs.get( "playback_path", None )
+        self.mode = MODE_RECORD if not playback_path else MODE_PLAYBACK
+        if self.mode == MODE_RECORD:
+            mode_arg = ""
+        elif self.mode == MODE_PLAYBACK:
+            mode_arg = "-P -p '%s'" % playback_path
+        else:
+            raise Exception( "Invalid mode supplied to CollectlCli - %s" % self.mode )
+        command_args[ "mode_arg" ] = mode_arg
+        command_args[ "interval_arg" ] = self.__interval_arg( kwargs )
+        destination = kwargs.get( "destination_path", None )
+        if destination:
+            destination_arg = "-f '%s'" % destination
+        else:
+            destination_arg = ""
+        command_args[ "destination_arg" ] = destination_arg
+        procfilt = kwargs.get( "procfilt", None )
+        command_args[ "procfilt_arg" ] = "" if not procfilt else "--procfilt %s" % procfilt
+        command_args[ "subsystems_arg" ] = self.__subsystems_arg( kwargs.get( "subsystems", [] ) )
+        flush = kwargs.get( "flush", None )
+        command_args[ "flush_arg"] = "--flush %s" % flush if flush else ""
+        sep = kwargs.get( "sep", None )
+        command_args[ "sep_arg" ] = "--sep=%s" % sep if sep else ""
+
+        self.command_args = command_args
+
+    def __subsystems_arg( self, subsystems ):
+        if subsystems:
+            return "-s%s" % "".join( [ s.command_line_arg for s in subsystems ] )
+        else:
+            return ""
+
+    def __interval_arg( self, kwargs ):
+        if self.mode != MODE_RECORD:
+            return ""
+
+        interval = kwargs.get( "interval", None )
+        if not interval:
+            return ""
+
+        self.__validate_interval_arg( interval )
+        interval_arg = "-i %s" % interval
+        interval2 = kwargs.get( "interval2", None )
+        if not interval2:
+            return interval_arg
+        self.__validate_interval_arg( interval2, multiple_of=int( interval ) )
+        interval_arg = "%s:%s" % ( interval_arg, interval2 )
+
+        interval3 = kwargs.get( "interval3", None )
+        if not interval3:
+            return interval_arg
+        self.__validate_interval_arg( interval3, multiple_of=int( interval ) )
+        interval_arg = "%s:%s" % ( interval_arg, interval3 )
+        return interval_arg
+
+    def __validate_interval_arg( self, value, multiple_of=None ):
+        if value and not str(value).isdigit():
+            raise Exception( "Invalid interval argument supplied, must be integer %s" % value )
+        if multiple_of:
+            if int( value ) % multiple_of != 0:
+                raise Exception( "Invalid interval argument supplied, must multiple of %s" % multiple_of )
+
+    def build_command_line( self ):
+        return COMMAND_LINE_TEMPLATE.substitute( **self.command_args )
+
+    def run( self, stdout=subprocess.PIPE, stderr=subprocess.PIPE ):
+        command_line = self.build_command_line()
+        log.info( "Executing %s" % command_line )
+        proc = subprocess.Popen( command_line, shell=True, stdout=stdout, stderr=stderr )
+        return_code = proc.wait()
+        if return_code:
+            raise Exception( "Problem running collectl command." )
+
+
+__all__ = ( 'CollectlCli', )
diff --git a/lib/galaxy/jobs/metrics/collectl/processes.py b/lib/galaxy/jobs/metrics/collectl/processes.py
new file mode 100644
index 0000000..a10c3d9
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/collectl/processes.py
@@ -0,0 +1,256 @@
+""" Modules will run collectl in playback mode and collect various process
+statistics for a given pid's process and process ancestors.
+"""
+import collections
+import csv
+import logging
+import sys
+import tempfile
+
+from galaxy import util
+
+from ..collectl import stats
+
+if sys.version_info > (3,):
+    long = int
+
+log = logging.getLogger( __name__ )
+
+# Collectl process information cheat sheet:
+#
+# Record process information for current user.
+# %  collectl -sZ -f./__instrument_collectl  -i 10:10 --procfilt U$USER
+#
+# TSV Replay of processing information in plottable mode...
+#
+# % collectl -sZ -P --sep=9 -p __instrument_collectl-jlaptop13-20140322-120919.raw.gz
+#
+# Has following columns:
+#   Date   Time    PID     User    PR      PPID    THRD    S       VmSize  VmLck   VmRSS   VmData  VmStk   VmExe   VmLib   CPU       SysT    UsrT    PCT     AccumT  RKB     WKB     RKBC    WKBC    RSYS    WSYS    CNCL    MajF    MinF    Command
+#
+
+# Process data dumped one row per process per interval.
+# http://collectl.sourceforge.net/Data-detail.html
+PROCESS_COLUMNS = [
+    "#Date",  # Date of interval - e.g. 20140322
+    "Time",  # Time of interval - 12:18:58
+    "PID",  # Process pid.
+    "User",  # Process user.
+    "PR",  # Priority of process.
+    "PPID",  # Parent PID of process.
+    "THRD",  # Thread???
+    "S",  # Process state - S - Sleeping, D - Uninterruptable Sleep, R - Running, Z - Zombie or T - Stopped/Traced
+    # Memory options - http://ewx.livejournal.com/579283.html
+    "VmSize",
+    "VmLck",
+    "VmRSS",
+    "VmData",
+    "VmStk",
+    "VmExe",
+    "VmLib",
+    "CPU",  # CPU number of process
+    "SysT",  # Amount of system time consumed during interval
+    "UsrT",  # Amount user time consumed during interval
+    "PCT",  # Percentage of current interval consumed by task
+    "AccumT",  # Total accumulated System and User time since the process began execution
+    # kilobytes read/written - requires I/O level monitoring to be enabled in kernel.
+    "RKB",  # kilobytes read by process - requires I/O monitoring in kernel
+    "WKB",
+    "RKBC",
+    "WKBC",
+    "RSYS",  # Number of read system calls
+    "WSYS",  # Number of write system calls
+    "CNCL",
+    "MajF",  # Number of major page faults
+    "MinF",  # Number of minor page faults
+    "Command",  # Command executed
+]
+
+# Types of statistics this module can summarize
+STATISTIC_TYPES = [ "max", "min", "sum", "count", "avg" ]
+
+COLUMN_INDICES = dict( [ ( col, i ) for i, col in enumerate( PROCESS_COLUMNS ) ] )
+PID_INDEX = COLUMN_INDICES[ "PID" ]
+PARENT_PID_INDEX = COLUMN_INDICES[ "PPID" ]
+
+DEFAULT_STATISTICS = [
+    ("max", "VmSize"),
+    ("avg", "VmSize"),
+    ("max", "VmRSS"),
+    ("avg", "VmRSS"),
+    ("sum", "SysT"),
+    ("sum", "UsrT"),
+    ("max", "PCT"),
+    ("avg", "PCT"),
+    ("max", "AccumT"),
+    ("sum", "RSYS"),
+    ("sum", "WSYS"),
+]
+
+
+def parse_process_statistics( statistics ):
+    """ Turn string or list of strings into list of tuples in format ( stat,
+    resource ) where stat is a value from STATISTIC_TYPES and resource is a
+    value from PROCESS_COLUMNS.
+    """
+    if statistics is None:
+        statistics = DEFAULT_STATISTICS
+
+    statistics = util.listify( statistics )
+    statistics = [ _tuplize_statistic(_) for _ in statistics ]
+    # Check for validity...
+    for statistic in statistics:
+        if statistic[ 0 ] not in STATISTIC_TYPES:
+            raise Exception( "Unknown statistic type encountered %s" % statistic[ 0 ] )
+        if statistic[ 1 ] not in PROCESS_COLUMNS:
+            raise Exception( "Unknown process column encountered %s" % statistic[ 1 ] )
+    return statistics
+
+
+def generate_process_statistics( collectl_playback_cli, pid, statistics=DEFAULT_STATISTICS ):
+    """ Playback collectl file and generate summary statistics.
+    """
+    with tempfile.NamedTemporaryFile( ) as tmp_tsv:
+        collectl_playback_cli.run( stdout=tmp_tsv )
+        with open( tmp_tsv.name, "r" ) as tsv_file:
+            return _read_process_statistics( tsv_file, pid, statistics )
+
+
+def _read_process_statistics( tsv_file, pid, statistics ):
+    process_summarizer = CollectlProcessSummarizer( pid, statistics )
+    current_interval = None
+
+    for row in csv.reader( tsv_file, dialect="excel-tab" ):
+        if current_interval is None:
+            for header, expected_header in zip( row, PROCESS_COLUMNS ):
+                if header.lower() != expected_header.lower():
+                    raise Exception( "Unknown header value encountered while processing collectl playback - %s" % header )
+
+            # First row, check contains correct header.
+            current_interval = CollectlProcessInterval()
+            continue
+
+        if current_interval.row_is_in( row ):
+            current_interval.add_row( row )
+        else:
+            process_summarizer.handle_interval( current_interval )
+            current_interval = CollectlProcessInterval()
+
+    # Do we have unsummarized rows...
+    if current_interval and current_interval.rows:
+        process_summarizer.handle_interval( current_interval )
+
+    return process_summarizer.get_statistics()
+
+
+class CollectlProcessSummarizer( object ):
+
+    def __init__( self, pid, statistics ):
+        self.pid = pid
+        self.statistics = statistics
+        self.columns_of_interest = set( [ s[ 1 ] for s in statistics ] )
+        self.tree_statistics = collections.defaultdict( stats.StatisticsTracker )
+        self.process_accum_statistics = collections.defaultdict( stats.StatisticsTracker )
+        self.interval_count = 0
+
+    def handle_interval( self, interval ):
+        self.interval_count += 1
+        rows = self.__rows_for_process( interval.rows, self.pid )
+        for column_name in self.columns_of_interest:
+            column_index = COLUMN_INDICES[ column_name ]
+
+            if column_name == "AccumT":
+                # Should not sum this across pids each interval, sum max at end...
+                for r in rows:
+                    pid_seconds = self.__time_to_seconds( r[ column_index ] )
+                    self.process_accum_statistics[ r[ PID_INDEX ] ].track( pid_seconds )
+            else:
+                # All other stastics should be summed across whole process tree
+                # at each interval I guess.
+                if column_name in [ "SysT", "UsrT", "PCT" ]:
+                    to_num = float
+                else:
+                    to_num = long
+
+                interval_stat = sum( to_num( r[ column_index ] ) for r in rows )
+                self.tree_statistics[ column_name ].track( interval_stat )
+
+    def get_statistics( self ):
+        if self.interval_count == 0:
+            return []
+
+        computed_statistics = []
+        for statistic in self.statistics:
+            statistic_type, column = statistic
+            if column == "AccumT":
+                # Only thing that makes sense is sum
+                if statistic_type != "max":
+                    log.warning( "Only statistic max makes sense for AccumT" )
+                    continue
+
+                value = sum( v.max for v in self.process_accum_statistics.values() )
+            else:
+                statistics_tracker = self.tree_statistics[ column ]
+                value = getattr( statistics_tracker, statistic_type )
+
+            computed_statistic = ( statistic, value )
+            computed_statistics.append( computed_statistic )
+
+        return computed_statistics
+
+    def __rows_for_process( self, rows, pid ):
+        process_rows = []
+        pids = self.__all_child_pids( rows, pid )
+        for row in rows:
+            if row[ PID_INDEX ] in pids:
+                process_rows.append( row )
+        return process_rows
+
+    def __all_child_pids( self, rows, pid ):
+        pids_in_process_tree = set( [ str( self.pid ) ] )
+        added = True
+        while added:
+            added = False
+            for row in rows:
+                pid = row[ PID_INDEX ]
+                parent_pid = row[ PARENT_PID_INDEX ]
+                if parent_pid in pids_in_process_tree and pid not in pids_in_process_tree:
+                    pids_in_process_tree.add( pid )
+                    added = True
+        return pids_in_process_tree
+
+    def __time_to_seconds( self, minutes_str ):
+        parts = minutes_str.split( ":" )
+        seconds = 0.0
+        for i, val in enumerate( parts ):
+            seconds += float(val) * ( 60 ** ( len( parts ) - ( i + 1 ) ) )
+        return seconds
+
+
+class CollectlProcessInterval( object ):
+    """ Represent all rows in collectl playback file for given time slice with
+    ability to filter out just rows corresponding to the process tree
+    corresponding to a given pid.
+    """
+
+    def __init__( self ):
+        self.rows = []
+
+    def row_is_in( self, row ):
+        if not self.rows:  # No rows, this row defines interval.
+            return True
+        first_row = self.rows[ 0 ]
+        return first_row[ 0 ] == row[ 0 ] and first_row[ 1 ] == row[ 1 ]
+
+    def add_row( self, row ):
+        self.rows.append( row )
+
+
+def _tuplize_statistic( statistic ):
+    if not isinstance( statistic, tuple ):
+        statistic_split = statistic.split( "_", 1 )
+        statistic = ( statistic_split[ 0 ].lower(), statistic_split[ 1 ] )
+    return statistic
+
+
+__all__ = ( 'generate_process_statistics', )
diff --git a/lib/galaxy/jobs/metrics/collectl/stats.py b/lib/galaxy/jobs/metrics/collectl/stats.py
new file mode 100644
index 0000000..7bf79f5
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/collectl/stats.py
@@ -0,0 +1,27 @@
+""" Primitive module for tracking running statistics without storing values in
+memory.
+"""
+
+
+class StatisticsTracker( object ):
+
+    def __init__( self ):
+        self.min = None
+        self.max = None
+        self.count = 0
+        self.sum = 0
+
+    def track( self, value ):
+        if self.min is None or value < self.min:
+            self.min = value
+        if self.max is None or value > self.max:
+            self.max = value
+        self.count += 1
+        self.sum += value
+
+    @property
+    def avg( self ):
+        if self.count > 0:
+            return self.sum / self.count
+        else:
+            return None
diff --git a/lib/galaxy/jobs/metrics/collectl/subsystems.py b/lib/galaxy/jobs/metrics/collectl/subsystems.py
new file mode 100644
index 0000000..942c5b3
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/collectl/subsystems.py
@@ -0,0 +1,81 @@
+"""Abstractions describing collectl subsystems (specified with the collectl ``-s`` parameter).
+
+Subsystems are essentially monitoring plugins available within collectl.
+"""
+from abc import (
+    ABCMeta,
+    abstractmethod
+)
+
+import six
+
+
+ at six.add_metaclass(ABCMeta)
+class CollectlSubsystem( object ):
+    """ Class providing an abstraction of collectl subsytems.
+    """
+
+    @property
+    @abstractmethod
+    def command_line_arg( self ):
+        """ Return single letter command-line argument used by collectl CLI.
+        """
+
+    @property
+    @abstractmethod
+    def name( self, job_directory ):
+        """ High-level name for subsystem as consumed by this module.
+        """
+
+
+class ProcessesSubsystem( CollectlSubsystem ):
+    command_line_arg = "Z"
+    name = "process"
+
+
+class CpuSubsystem( CollectlSubsystem ):
+    command_line_arg = "C"
+    name = "cpu"
+
+
+class DiskSubsystem( CollectlSubsystem ):
+    command_line_arg = "D"
+    name = "disk"
+
+
+class NetworkSubsystem( CollectlSubsystem ):
+    command_line_arg = "N"
+    name = "network"
+
+
+class EnvironmentSubsystem( CollectlSubsystem ):
+    command_line_arg = "E"
+    name = "environment"
+
+
+class MemorySubsystem( CollectlSubsystem ):
+    command_line_arg = "M"
+    name = "memory"
+
+
+SUBSYSTEMS = [
+    ProcessesSubsystem(),
+    CpuSubsystem(),
+    DiskSubsystem(),
+    NetworkSubsystem(),
+    EnvironmentSubsystem(),
+    MemorySubsystem(),
+]
+SUBSYSTEM_DICT = dict( [ (s.name, s) for s in SUBSYSTEMS ] )
+
+
+def get_subsystem( name ):
+    """
+
+    >>> get_subsystem( "process" ).command_line_arg == "Z"
+    True
+    """
+    return SUBSYSTEM_DICT[ name ]
+
+
+__all__ = ( 'get_subsystem', )
diff --git a/lib/galaxy/jobs/metrics/formatting.py b/lib/galaxy/jobs/metrics/formatting.py
new file mode 100644
index 0000000..6bda184
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/formatting.py
@@ -0,0 +1,18 @@
+"""Utilities related to formatting job metrics for human consumption."""
+
+
+class JobMetricFormatter(object):
+    """Format job metric key-value pairs for human consumption in Web UI."""
+
+    def format(self, key, value):
+        return (str(key), str(value))
+
+
+def seconds_to_str(value):
+    """Convert seconds to a simple simple string describing the amount of time."""
+    if value < 60:
+        return "%s seconds" % value
+    elif value < 3600:
+        return "%s minutes" % (value / 60)
+    else:
+        return "%s hours and %s minutes" % (value / 3600, (value % 3600) / 60)
diff --git a/lib/galaxy/jobs/metrics/instrumenters/__init__.py b/lib/galaxy/jobs/metrics/instrumenters/__init__.py
new file mode 100644
index 0000000..b996daf
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/__init__.py
@@ -0,0 +1,55 @@
+"""This module describes the abstract interface for :class:`InstrumentPlugin`.
+
+These are responsible for collecting and formatting a coherent set of metrics.
+"""
+import os.path
+
+from abc import ABCMeta
+from abc import abstractmethod
+
+from ...metrics import formatting
+
+
+INSTRUMENT_FILE_PREFIX = "__instrument"
+
+
+class InstrumentPlugin( object ):
+    """Describes how to instrument job scripts and retrieve collected metrics."""
+    __metaclass__ = ABCMeta
+    formatter = formatting.JobMetricFormatter()
+
+    @property
+    @abstractmethod
+    def plugin_type( self ):
+        """ Short string providing labelling this plugin """
+
+    def pre_execute_instrument( self, job_directory ):
+        """ Optionally return one or more commands to instrument job. These
+        commands will be executed on the compute server prior to the job
+        running.
+        """
+        return None
+
+    def post_execute_instrument( self, job_directory ):
+        """ Optionally return one or more commands to instrument job. These
+        commands will be executed on the compute server after the tool defined
+        command is ran.
+        """
+        return None
+
+    @abstractmethod
+    def job_properties( self, job_id, job_directory ):
+        """ Collect properties for this plugin from specified job directory.
+        This method will run on the Galaxy server and can assume files created
+        in job_directory with pre_execute_instrument and
+        post_execute_instrument are available.
+        """
+
+    def _instrument_file_name( self, name ):
+        """ Provide a common pattern for naming files used by instrumentation
+        plugins - to ease their staging out of remote job directories.
+        """
+        return "%s_%s_%s" % ( INSTRUMENT_FILE_PREFIX, self.plugin_type, name )
+
+    def _instrument_file_path( self, job_directory, name ):
+        return os.path.join( job_directory, self._instrument_file_name( name ) )
diff --git a/lib/galaxy/jobs/metrics/instrumenters/collectl.py b/lib/galaxy/jobs/metrics/instrumenters/collectl.py
new file mode 100644
index 0000000..49d49b8
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/collectl.py
@@ -0,0 +1,219 @@
+"""The module describes the ``collectl`` job metrics plugin."""
+import logging
+import os
+import shutil
+
+from galaxy import util
+
+from ..collectl import (
+    cli,
+    processes,
+    subsystems
+)
+from ..instrumenters import InstrumentPlugin
+from ...metrics import formatting
+
+log = logging.getLogger( __name__ )
+
+# By default, only grab statistics for user processes (as identified by
+# username).
+DEFAULT_PROCFILT_ON = "username"
+DEFAULT_SUBSYSTEMS = "process"
+# Set to zero to flush every collection.
+DEFAULT_FLUSH_INTERVAL = "0"
+
+FORMATTED_RESOURCE_TITLES = {
+    "PCT": "Percent CPU Usage",
+    "RSYS": "Disk Reads",
+    "WSYS": "Disk Writes",
+}
+
+EMPTY_COLLECTL_FILE_MESSAGE = "Skipping process summary due to empty file... job probably did not run long enough for collectl to gather data."
+
+
+class CollectlFormatter( formatting.JobMetricFormatter ):
+
+    def format( self, key, value ):
+        if key == "pid":
+            return ( "Process ID", int( value ) )
+        elif key == "raw_log_path":
+            return ( "Relative Path of Full Collectl Log", value )
+        elif key == "process_max_AccumT":
+            return ( "Job Runtime (System+User)", formatting.seconds_to_str( float( value ) ) )
+        else:
+            _, stat_type, resource_type = key.split( "_", 2 )
+            if resource_type.startswith( "Vm"):
+                value_str = "%s KB" % int( value )
+            elif resource_type in [ "RSYS", "WSYS" ] and stat_type in [ "count", "max", "sum" ]:
+                value_str = "%d (# system calls)" % int( value )
+            else:
+                value_str = str( value )
+            resource_title = FORMATTED_RESOURCE_TITLES.get( resource_type, resource_type )
+            return ( "%s (%s)" % ( resource_title, stat_type ), value_str )
+
+
+class CollectlPlugin( InstrumentPlugin ):
+    """ Run collectl along with job to capture system and/or process data
+    according to specified collectl subsystems.
+    """
+    plugin_type = "collectl"
+    formatter = CollectlFormatter()
+
+    def __init__( self, **kwargs ):
+        self.__configure_paths( kwargs )
+        self.__configure_subsystems( kwargs )
+        saved_logs_path = kwargs.get( "saved_logs_path", "" )
+        if "app" in kwargs:
+            log.debug("Found path for saved logs: %s" % saved_logs_path)
+            saved_logs_path = kwargs[ "app" ].config.resolve_path( saved_logs_path )
+        self.saved_logs_path = saved_logs_path
+        self.__configure_collectl_recorder_args( kwargs )
+        self.summarize_process_data = util.asbool( kwargs.get( "summarize_process_data", True ) )
+        self.log_collectl_program_output = util.asbool( kwargs.get( "log_collectl_program_output", False ) )
+        if self.summarize_process_data:
+            if subsystems.get_subsystem( "process" ) not in self.subsystems:
+                raise Exception( "Collectl plugin misconfigured - cannot summarize_process_data without process subsystem being enabled." )
+
+            process_statistics = kwargs.get( "process_statistics", None )
+            # None will let processes module use default set of statistics
+            # defined there.
+            self.process_statistics = processes.parse_process_statistics( process_statistics )
+
+    def pre_execute_instrument( self, job_directory ):
+        commands = []
+        # Capture PID of process so we can walk its ancestors when building
+        # statistics for the whole job.
+        commands.append( '''echo "$$" > '%s' ''' % self.__pid_file( job_directory ) )
+        # Run collectl in record mode to capture process and system level
+        # statistics according to supplied subsystems.
+        commands.append( self.__collectl_record_command( job_directory ) )
+        return commands
+
+    def post_execute_instrument( self, job_directory ):
+        commands = []
+        # collectl dies when job script completes, perhaps capture pid of
+        # collectl above and check if it is still alive to allow tracking if
+        # collectl ran successfully through the whole job.
+        return commands
+
+    def job_properties( self, job_id, job_directory ):
+        pid = open( self.__pid_file( job_directory ), "r" ).read().strip()
+        contents = os.listdir( job_directory )
+        try:
+            rel_path = filter( self._is_instrumented_collectl_log, contents )[ 0 ]
+            path = os.path.join( job_directory, rel_path )
+        except IndexError:
+            message = "Failed to find collectl log in directory %s, files were %s" % ( job_directory, contents )
+            raise Exception( message )
+
+        properties = dict(
+            pid=int( pid ),
+        )
+
+        if self.saved_logs_path:
+            destination_rel_dir = os.path.join( *util.directory_hash_id( job_id ) )
+            destination_rel_path = os.path.join( destination_rel_dir, rel_path )
+            destination_path = os.path.join( self.saved_logs_path, destination_rel_path )
+            destination_dir = os.path.dirname( destination_path )
+            if not os.path.isdir( destination_dir ):
+                os.makedirs( destination_dir )
+            shutil.copyfile( path, destination_path )
+            properties[ "raw_log_path" ] = destination_rel_path
+
+        if self.summarize_process_data:
+            # Run collectl in playback and generate statistics of interest
+            summary_statistics = self.__summarize_process_data( pid, path )
+            for statistic, value in summary_statistics:
+                properties[ "process_%s" % "_".join( statistic ) ] = value
+
+        return properties
+
+    def __configure_paths( self, kwargs ):
+        # 95% of time I would expect collectl to just be installed with apt or
+        # yum, but if it is manually installed on not on path, allow
+        # configuration of explicit path - and allow path to be different
+        # between galaxy job handler (local_collectl_path) and compute node
+        # (remote_collectl_path).
+        collectl_path = kwargs.get( "collectl_path", "collectl" )
+        self.remote_collectl_path = kwargs.get( "remote_collectl_path", collectl_path )
+        self.local_collectl_path = kwargs.get( "local_collectl_path", collectl_path )
+
+    def __configure_subsystems( self, kwargs ):
+        raw_subsystems_str = kwargs.get( "subsystems", DEFAULT_SUBSYSTEMS )
+        raw_subsystems = util.listify( raw_subsystems_str, do_strip=True )
+        self.subsystems = [ subsystems.get_subsystem(_) for _ in raw_subsystems ]
+
+    def __configure_collectl_recorder_args( self, kwargs ):
+        collectl_recorder_args = kwargs.copy()
+
+        # Allow deployer to configure separate system and process intervals,
+        # but if they specify just one - use it for both. Thinking here is this
+        # plugin's most useful feature is the process level information so
+        # this is likely what the deployer is attempting to configure.
+        if "interval" in kwargs and "interval2" not in kwargs:
+            collectl_recorder_args[ "interval2" ] = kwargs[ "interval"]
+
+        if "flush" not in kwargs:
+            collectl_recorder_args[ "flush" ] = DEFAULT_FLUSH_INTERVAL
+
+        procfilt_on = kwargs.get( "procfilt_on", DEFAULT_PROCFILT_ON ).lower()
+        # Calculate explicit arguments, rest can just be passed through from
+        # constructor arguments.
+        explicit_args = dict(
+            collectl_path=self.remote_collectl_path,
+            procfilt=procfilt_argument( procfilt_on ),
+            subsystems=self.subsystems,
+        )
+        collectl_recorder_args.update( explicit_args )
+        self.collectl_recorder_args = collectl_recorder_args
+
+    def __summarize_process_data( self, pid, collectl_log_path ):
+        playback_cli_args = dict(
+            collectl_path=self.local_collectl_path,
+            playback_path=collectl_log_path,
+            sep="9"
+        )
+        if not os.stat( collectl_log_path ).st_size:
+            log.debug( EMPTY_COLLECTL_FILE_MESSAGE )
+            return [ ]
+
+        playback_cli = cli.CollectlCli( **playback_cli_args )
+        return processes.generate_process_statistics( playback_cli, pid, self.process_statistics )
+
+    def __collectl_recorder_cli( self, job_directory ):
+        cli_args = self.collectl_recorder_args.copy()
+        cli_args[ "destination_path" ] = self._instrument_file_path( job_directory, "log" )
+        return cli.CollectlCli( **cli_args )
+
+    def __collectl_record_command( self, job_directory ):
+        collectl_cli = self.__collectl_recorder_cli( job_directory )
+        if self.log_collectl_program_output:
+            redirect_to = self._instrument_file_path( job_directory, "program_output" )
+        else:
+            redirect_to = "/dev/null"
+        return "%s > %s 2>&1 &" % (
+            collectl_cli.build_command_line(),
+            redirect_to,
+        )
+
+    def __pid_file( self, job_directory ):
+        return self._instrument_file_path( job_directory, "pid" )
+
+    def _is_instrumented_collectl_log( self, filename ):
+        prefix = self._instrument_file_name( "log" )
+        return filename.startswith( prefix ) and filename.endswith( ".raw.gz" )
+
+
+def procfilt_argument( procfilt_on ):
+    if procfilt_on == "username":
+        return "U$USER"
+    elif procfilt_on == "uid":
+        return "u$UID"
+    else:
+        # Ensure it is empty of None
+        if procfilt_on or procfilt_on.lower() != "none":
+            raise Exception( "Invalid procfilt_on argument encountered")
+        return ""
+
+
+__all__ = ( 'CollectlPlugin', )
diff --git a/lib/galaxy/jobs/metrics/instrumenters/core.py b/lib/galaxy/jobs/metrics/instrumenters/core.py
new file mode 100644
index 0000000..ce5534f
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/core.py
@@ -0,0 +1,88 @@
+"""The module describes the ``core`` job metrics plugin."""
+import logging
+import time
+
+from ..instrumenters import InstrumentPlugin
+from ...metrics import formatting
+
+log = logging.getLogger( __name__ )
+
+GALAXY_SLOTS_KEY = "galaxy_slots"
+START_EPOCH_KEY = "start_epoch"
+END_EPOCH_KEY = "end_epoch"
+RUNTIME_SECONDS_KEY = "runtime_seconds"
+
+
+class CorePluginFormatter( formatting.JobMetricFormatter ):
+
+    def format( self, key, value ):
+        value = int( value )
+        if key == GALAXY_SLOTS_KEY:
+            return ( "Cores Allocated", "%d" % value )
+        elif key == RUNTIME_SECONDS_KEY:
+            return ( "Job Runtime (Wall Clock)", formatting.seconds_to_str( value ) )
+        else:
+            # TODO: Use localized version of this from galaxy.ini
+            title = "Job Start Time" if key == START_EPOCH_KEY else "Job End Time"
+            return (title, time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( value ) ) )
+
+
+class CorePlugin( InstrumentPlugin ):
+    """ Simple plugin that collects data without external dependencies. In
+    particular it currently collects value set for Galaxy slots.
+    """
+    plugin_type = "core"
+    formatter = CorePluginFormatter()
+
+    def __init__( self, **kwargs ):
+        pass
+
+    def pre_execute_instrument( self, job_directory ):
+        commands = []
+        commands.append( self.__record_galaxy_slots_command( job_directory ) )
+        commands.append( self.__record_seconds_since_epoch_to_file( job_directory, "start" ) )
+        return commands
+
+    def post_execute_instrument( self, job_directory ):
+        commands = []
+        commands.append( self.__record_seconds_since_epoch_to_file( job_directory, "end" ) )
+        return commands
+
+    def job_properties( self, job_id, job_directory ):
+        galaxy_slots_file = self.__galaxy_slots_file( job_directory )
+
+        properties = {}
+        properties[ GALAXY_SLOTS_KEY ] = self.__read_integer( galaxy_slots_file )
+        start = self.__read_seconds_since_epoch( job_directory, "start" )
+        end = self.__read_seconds_since_epoch( job_directory, "end" )
+        if start is not None and end is not None:
+            properties[ START_EPOCH_KEY ] = start
+            properties[ END_EPOCH_KEY ] = end
+            properties[ RUNTIME_SECONDS_KEY ] = end - start
+        return properties
+
+    def __record_galaxy_slots_command( self, job_directory ):
+        galaxy_slots_file = self.__galaxy_slots_file( job_directory )
+        return '''echo "$GALAXY_SLOTS" > '%s' ''' % galaxy_slots_file
+
+    def __record_seconds_since_epoch_to_file( self, job_directory, name ):
+        path = self._instrument_file_path( job_directory, "epoch_%s" % name )
+        return 'date +"%s" > ' + path
+
+    def __read_seconds_since_epoch( self, job_directory, name ):
+        path = self._instrument_file_path( job_directory, "epoch_%s" % name )
+        return self.__read_integer( path )
+
+    def __galaxy_slots_file( self, job_directory ):
+        return self._instrument_file_path( job_directory, "galaxy_slots" )
+
+    def __read_integer( self, path ):
+        value = None
+        try:
+            value = int( open( path, "r" ).read() )
+        except Exception:
+            pass
+        return value
+
+
+__all__ = ( 'CorePlugin', )
diff --git a/lib/galaxy/jobs/metrics/instrumenters/cpuinfo.py b/lib/galaxy/jobs/metrics/instrumenters/cpuinfo.py
new file mode 100644
index 0000000..1ce31cb
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/cpuinfo.py
@@ -0,0 +1,64 @@
+"""The module describes the ``cpuinfo`` job metrics plugin."""
+import logging
+import re
+
+from galaxy import util
+
+from ..instrumenters import InstrumentPlugin
+from ...metrics import formatting
+
+log = logging.getLogger( __name__ )
+
+PROCESSOR_LINE = re.compile(r"processor\s*\:\s*(\d+)")
+
+
+class CpuInfoFormatter( formatting.JobMetricFormatter ):
+
+    def format( self, key, value ):
+        if key == "processor_count":
+            return "Processor Count", "%s" % int( value )
+        else:
+            return key, value
+
+
+class CpuInfoPlugin( InstrumentPlugin ):
+    """ Gather information about processor configuration from /proc/cpuinfo.
+    Linux only.
+    """
+    plugin_type = "cpuinfo"
+    formatter = CpuInfoFormatter()
+
+    def __init__( self, **kwargs ):
+        self.verbose = util.asbool( kwargs.get( "verbose", False ) )
+
+    def pre_execute_instrument( self, job_directory ):
+        return "cat /proc/cpuinfo > '%s'" % self.__instrument_cpuinfo_path( job_directory )
+
+    def job_properties( self, job_id, job_directory ):
+        properties = {}
+        processor_count = 0
+        with open( self.__instrument_cpuinfo_path( job_directory ) ) as f:
+            current_processor = None
+            for line in f:
+                line = line.strip().lower()
+                if not line:  # Skip empty lines
+                    continue
+
+                processor_line_match = PROCESSOR_LINE.match( line )
+                if processor_line_match:
+                    processor_count += 1
+                    current_processor = processor_line_match.group( 1 )
+                elif current_processor and self.verbose:
+                    # If verbose, dump information about each processor
+                    # into database...
+                    key, value = line.split( ":", 1 )
+                    key = "processor_%s_%s" % ( current_processor, key.strip() )
+                    value = value
+        properties[ "processor_count" ] = processor_count
+        return properties
+
+    def __instrument_cpuinfo_path( self, job_directory ):
+        return self._instrument_file_path( job_directory, "cpuinfo" )
+
+
+__all__ = ( 'CpuInfoPlugin', )
diff --git a/lib/galaxy/jobs/metrics/instrumenters/env.py b/lib/galaxy/jobs/metrics/instrumenters/env.py
new file mode 100644
index 0000000..762158c
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/env.py
@@ -0,0 +1,73 @@
+"""The module describes the ``env`` job metrics plugin."""
+import logging
+import re
+
+from ..instrumenters import InstrumentPlugin
+from ...metrics import formatting
+
+log = logging.getLogger( __name__ )
+
+
+class EnvFormatter( formatting.JobMetricFormatter ):
+
+    def format( self, key, value ):
+        return ( "%s (runtime environment variable)" % key, value )
+
+
+class EnvPlugin( InstrumentPlugin ):
+    """ Instrumentation plugin capable of recording all or specific environment
+    variables for a job at runtime.
+    """
+    plugin_type = "env"
+    formatter = EnvFormatter()
+
+    def __init__( self, **kwargs ):
+        variables_str = kwargs.get( "variables", None )
+        if variables_str:
+            variables = [ v.strip() for v in variables_str.split(",") ]
+        else:
+            variables = None
+        self.variables = variables
+
+    def pre_execute_instrument( self, job_directory ):
+        """ Use env to dump all environment variables to a file.
+        """
+        return "env > '%s'" % self.__env_file( job_directory )
+
+    def post_execute_instrument( self, job_directory ):
+        return None
+
+    def job_properties( self, job_id, job_directory ):
+        """ Recover environment variables dumped out on compute server and filter
+        out specific variables if needed.
+        """
+        variables = self.variables
+
+        properties = {}
+        env_string = ''.join( open( self.__env_file( job_directory ) ).readlines() )
+        while env_string:
+            # Check if the next lines contain a shell function.
+            # We use '\n\}\n' as regex termination because shell
+            # functions can be nested.
+            # We use the non-greedy '.+?' because of re.DOTALL .
+            m = re.match( '([^=]+)=(\(\) \{.+?\n\})\n', env_string, re.DOTALL )
+            if m is None:
+                m = re.match( '([^=]+)=(.*)\n', env_string )
+            if m is None:
+                # Some problem recording or reading back env output.
+                message_template = "Problem parsing env metric output for job %s - properties will be incomplete"
+                message = message_template % job_id
+                log.debug( message )
+                break
+            (var, value) = m.groups()
+            if not variables or var in variables:
+                properties[ var ] = value
+            env_string = env_string[m.end():]
+
+        return properties
+
+    def __env_file( self, job_directory ):
+        return self._instrument_file_path( job_directory, "vars" )
+
+
+__all__ = ( 'EnvPlugin', )
diff --git a/lib/galaxy/jobs/metrics/instrumenters/meminfo.py b/lib/galaxy/jobs/metrics/instrumenters/meminfo.py
new file mode 100644
index 0000000..1eac6a6
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/meminfo.py
@@ -0,0 +1,66 @@
+"""The module describes the ``meminfo`` job metrics plugin."""
+import re
+import sys
+
+from galaxy import util
+
+from ..instrumenters import InstrumentPlugin
+from ...metrics import formatting
+
+if sys.version_info > (3,):
+    long = int
+
+
+MEMINFO_LINE = re.compile(r"(\w+)\s*\:\s*(\d+) kB")
+
+# Important (non-verbose) meminfo property titles.
+MEMINFO_TITLES = {
+    "memtotal": "Total System Memory",
+    "swaptotal": "Total System Swap"
+}
+
+
+class MemInfoFormatter( formatting.JobMetricFormatter ):
+
+    def format( self, key, value ):
+        title = MEMINFO_TITLES.get( key, key )
+        return title, util.nice_size( value * 1000 )  # kB = *1000, KB = *1024 - wikipedia
+
+
+class MemInfoPlugin( InstrumentPlugin ):
+    """ Gather information about processor configuration from /proc/cpuinfo.
+    Linux only.
+    """
+    plugin_type = "meminfo"
+    formatter = MemInfoFormatter()
+
+    def __init__( self, **kwargs ):
+        self.verbose = util.asbool( kwargs.get( "verbose", False ) )
+
+    def pre_execute_instrument( self, job_directory ):
+        return "cat /proc/meminfo > '%s'" % self.__instrument_meminfo_path( job_directory )
+
+    def job_properties( self, job_id, job_directory ):
+        properties = {}
+        with open( self.__instrument_meminfo_path( job_directory ) ) as f:
+            for line in f:
+                line = line.strip()
+                if not line:  # Skip empty lines
+                    continue
+                line_match = MEMINFO_LINE.match( line )
+                if not line_match:
+                    continue
+                key = line_match.group( 1 ).lower()
+                # By default just grab important meminfo properties with titles
+                # defined for formatter. Grab everything in verbose mode for
+                # an arbitrary snapshot of memory at beginning of run.
+                if key in MEMINFO_TITLES or self.verbose:
+                    value = long( line_match.group( 2 ) )
+                    properties[ key ] = value
+        return properties
+
+    def __instrument_meminfo_path( self, job_directory ):
+        return self._instrument_file_path( job_directory, "meminfo" )
+
+
+__all__ = ( 'MemInfoPlugin', )
diff --git a/lib/galaxy/jobs/metrics/instrumenters/uname.py b/lib/galaxy/jobs/metrics/instrumenters/uname.py
new file mode 100644
index 0000000..fc10fb8
--- /dev/null
+++ b/lib/galaxy/jobs/metrics/instrumenters/uname.py
@@ -0,0 +1,35 @@
+"""The module describes the ``uname`` job metrics plugin."""
+from ..instrumenters import InstrumentPlugin
+from ...metrics import formatting
+
+
+class UnameFormatter( formatting.JobMetricFormatter ):
+
+    def format( self, key, value ):
+        return "Operating System", value
+
+
+class UnamePlugin( InstrumentPlugin ):
+    """ Use uname to gather operating system information about remote system
+    job is running on. Linux only.
+    """
+    plugin_type = "uname"
+    formatter = UnameFormatter()
+
+    def __init__( self, **kwargs ):
+        self.uname_args = kwargs.get( "args", "-a" )
+
+    def pre_execute_instrument( self, job_directory ):
+        return "uname %s > '%s'" % ( self.uname_args, self.__instrument_uname_path( job_directory ) )
+
+    def job_properties( self, job_id, job_directory ):
+        properties = {}
+        with open( self.__instrument_uname_path( job_directory ) ) as f:
+            properties[ "uname" ] = f.read()
+        return properties
+
+    def __instrument_uname_path( self, job_directory ):
+        return self._instrument_file_path( job_directory, "uname" )
+
+
+__all__ = ( 'UnamePlugin', )
diff --git a/lib/galaxy/jobs/output_checker.py b/lib/galaxy/jobs/output_checker.py
new file mode 100644
index 0000000..987d5c6
--- /dev/null
+++ b/lib/galaxy/jobs/output_checker.py
@@ -0,0 +1,166 @@
+import re
+from .error_level import StdioErrorLevel
+import traceback
+
+from logging import getLogger
+log = getLogger( __name__ )
+
+
+def check_output( tool, stdout, stderr, tool_exit_code, job ):
+    """
+    Check the output of a tool - given the stdout, stderr, and the tool's
+    exit code, return True if the tool exited succesfully and False
+    otherwise. No exceptions should be thrown. If this code encounters
+    an exception, it returns True so that the workflow can continue;
+    otherwise, a bug in this code could halt workflow progress.
+
+    Note that, if the tool did not define any exit code handling or
+    any stdio/stderr handling, then it reverts back to previous behavior:
+    if stderr contains anything, then False is returned.
+
+    Note that the job id is just for messages.
+    """
+    # By default, the tool succeeded. This covers the case where the code
+    # has a bug but the tool was ok, and it lets a workflow continue.
+    success = True
+
+    try:
+        # Check exit codes and match regular expressions against stdout and
+        # stderr if this tool was configured to do so.
+        # If there is a regular expression for scanning stdout/stderr,
+        # then we assume that the tool writer overwrote the default
+        # behavior of just setting an error if there is *anything* on
+        # stderr.
+        if ( len( tool.stdio_regexes ) > 0 or
+             len( tool.stdio_exit_codes ) > 0 ):
+            # Check the exit code ranges in the order in which
+            # they were specified. Each exit_code is a StdioExitCode
+            # that includes an applicable range. If the exit code was in
+            # that range, then apply the error level and add a message.
+            # If we've reached a fatal error rule, then stop.
+            max_error_level = StdioErrorLevel.NO_ERROR
+            if tool_exit_code is not None:
+                for stdio_exit_code in tool.stdio_exit_codes:
+                    if ( tool_exit_code >= stdio_exit_code.range_start and
+                         tool_exit_code <= stdio_exit_code.range_end ):
+                        # Tack on a generic description of the code
+                        # plus a specific code description. For example,
+                        # this might prepend "Job 42: Warning (Out of Memory)\n".
+                        code_desc = stdio_exit_code.desc
+                        if ( None is code_desc ):
+                            code_desc = ""
+                        tool_msg = ( "%s: Exit code %d (%s)" % (
+                                     StdioErrorLevel.desc( stdio_exit_code.error_level ),
+                                     tool_exit_code,
+                                     code_desc ) )
+                        log.info( "Job %s: %s" % (job.get_id_tag(), tool_msg) )
+                        stderr = tool_msg + "\n" + stderr
+                        max_error_level = max( max_error_level,
+                                               stdio_exit_code.error_level )
+                        if ( max_error_level >=
+                             StdioErrorLevel.FATAL ):
+                            break
+
+            if max_error_level < StdioErrorLevel.FATAL:
+                # We'll examine every regex. Each regex specifies whether
+                # it is to be run on stdout, stderr, or both. (It is
+                # possible for neither stdout nor stderr to be scanned,
+                # but those regexes won't be used.) We record the highest
+                # error level, which are currently "warning" and "fatal".
+                # If fatal, then we set the job's state to ERROR.
+                # If warning, then we still set the job's state to OK
+                # but include a message. We'll do this if we haven't seen
+                # a fatal error yet
+                for regex in tool.stdio_regexes:
+                    # If ( this regex should be matched against stdout )
+                    #   - Run the regex's match pattern against stdout
+                    #   - If it matched, then determine the error level.
+                    #       o If it was fatal, then we're done - break.
+                    # Repeat the stdout stuff for stderr.
+                    # TODO: Collapse this into a single function.
+                    if ( regex.stdout_match ):
+                        regex_match = re.search( regex.match, stdout,
+                                                 re.IGNORECASE )
+                        if ( regex_match ):
+                            rexmsg = __regex_err_msg( regex_match, regex)
+                            log.info( "Job %s: %s"
+                                      % ( job.get_id_tag(), rexmsg ) )
+                            stdout = rexmsg + "\n" + stdout
+                            max_error_level = max( max_error_level,
+                                                   regex.error_level )
+                            if ( max_error_level >=
+                                 StdioErrorLevel.FATAL ):
+                                break
+
+                    if ( regex.stderr_match ):
+                        regex_match = re.search( regex.match, stderr,
+                                                 re.IGNORECASE )
+                        if ( regex_match ):
+                            rexmsg = __regex_err_msg( regex_match, regex)
+                            log.info( "Job %s: %s"
+                                      % ( job.get_id_tag(), rexmsg ) )
+                            stderr = rexmsg + "\n" + stderr
+                            max_error_level = max( max_error_level,
+                                                   regex.error_level )
+                            if ( max_error_level >=
+                                 StdioErrorLevel.FATAL ):
+                                break
+
+            # If we encountered a fatal error, then we'll need to set the
+            # job state accordingly. Otherwise the job is ok:
+            if max_error_level >= StdioErrorLevel.FATAL:
+                log.debug("Tool exit code indicates an error, failing job.")
+                success = False
+            else:
+                success = True
+
+        # When there are no regular expressions and no exit codes to check,
+        # default to the previous behavior: when there's anything on stderr
+        # the job has an error, and the job is ok otherwise.
+        else:
+            # TODO: Add in the tool and job id:
+            # log.debug( "Tool did not define exit code or stdio handling; "
+            #          + "checking stderr for success" )
+            if stderr:
+                peak = stderr[0:250]
+                log.debug("Tool produced standard error failing job - [%s]" % peak)
+                success = False
+            else:
+                success = True
+
+    # On any exception, return True.
+    except:
+        tb = traceback.format_exc()
+        log.warning( "Tool check encountered unexpected exception; " +
+                     "assuming tool was successful: " + tb )
+        success = True
+
+    # Store the modified stdout and stderr in the job:
+    if job is not None:
+        job.set_streams( stdout, stderr )
+
+    return success
+
+
+def __regex_err_msg( match, regex ):
+    """
+    Return a message about the match on tool output using the given
+    ToolStdioRegex regex object. The regex_match is a MatchObject
+    that will contain the string matched on.
+    """
+    # Get the description for the error level:
+    err_msg = StdioErrorLevel.desc( regex.error_level ) + ": "
+    # If there's a description for the regular expression, then use it.
+    # Otherwise, we'll take the first 256 characters of the match.
+    if None is not regex.desc:
+        err_msg += regex.desc
+    else:
+        mstart = match.start()
+        mend = match.end()
+        err_msg += "Matched on "
+        # TODO: Move the constant 256 somewhere else besides here.
+        if mend - mstart > 256:
+            err_msg += match.string[ mstart : mstart + 256 ] + "..."
+        else:
+            err_msg += match.string[ mstart: mend ]
+    return err_msg
diff --git a/lib/galaxy/jobs/rule_helper.py b/lib/galaxy/jobs/rule_helper.py
new file mode 100644
index 0000000..872e2f6
--- /dev/null
+++ b/lib/galaxy/jobs/rule_helper.py
@@ -0,0 +1,196 @@
+from datetime import datetime
+import hashlib
+import random
+
+from sqlalchemy import func
+
+from galaxy import model
+from galaxy import util
+
+import logging
+log = logging.getLogger( __name__ )
+
+VALID_JOB_HASH_STRATEGIES = ["job", "user", "history", "workflow_invocation"]
+
+
+class RuleHelper( object ):
+    """ Utility to allow job rules to interface cleanly with the rest of
+    Galaxy and shield them from low-level details of models, metrics, etc....
+
+    Currently focus is on figuring out job statistics for a given user, but
+    could interface with other stuff as well.
+    """
+
+    def __init__( self, app ):
+        self.app = app
+
+    def supports_docker( self, job_or_tool ):
+        """ Job rules can pass this function a job, job_wrapper, or tool and
+        determine if the underlying tool believes it can be containered.
+        """
+        # Not a ton of logic in this method - but the idea is to shield rule
+        # developers from the details and they shouldn't have to know how to
+        # interrogate tool or job to figure out if it can be run in a
+        # container.
+        if hasattr( job_or_tool, 'containers' ):
+            tool = job_or_tool
+        elif hasattr( job_or_tool, 'tool' ):
+            # Have a JobWrapper-like
+            tool = job_or_tool.tool
+        else:
+            # Have a Job object.
+            tool = self.app.toolbox.get_tool( job_or_tool.tool_id )
+        return any( [ c.type == "docker" for c in tool.containers ] )
+
+    def job_count(
+        self,
+        **kwds
+    ):
+        query = self.query( model.Job )
+        return self._filter_job_query( query, **kwds ).count()
+
+    def sum_job_runtime(
+        self,
+        **kwds
+    ):
+        # TODO: Consider sum_core_hours or something that scales runtime by
+        # by calculated cores per job.
+        query = self.metric_query(
+            select=func.sum( model.JobMetricNumeric.table.c.metric_value ),
+            metric_name="runtime_seconds",
+            plugin="core",
+        )
+        query = query.join( model.Job )
+        return float( self._filter_job_query( query, **kwds ).first()[ 0 ] )
+
+    def metric_query( self, select, metric_name, plugin, numeric=True ):
+        metric_class = model.JobMetricNumeric if numeric else model.JobMetricText
+        query = self.query( select )
+        query = query.filter( metric_class.table.c.plugin == plugin )
+        query = query.filter( metric_class.table.c.metric_name == metric_name )
+        return query
+
+    def query( self, select_expression ):
+        return self.app.model.context.query( select_expression )
+
+    def _filter_job_query(
+        self,
+        query,
+        for_user_email=None,
+        for_destination=None,
+        for_destinations=None,
+        for_job_states=None,
+        created_in_last=None,
+        updated_in_last=None,
+    ):
+        if for_destination is not None:
+            for_destinations = [ for_destination ]
+
+        query = query.join( model.User )
+        if for_user_email is not None:
+            query = query.filter( model.User.table.c.email == for_user_email )
+
+        if for_destinations is not None:
+            if len( for_destinations ) == 1:
+                query = query.filter( model.Job.table.c.destination_id == for_destinations[ 0 ] )
+            else:
+                query = query.filter( model.Job.table.c.destination_id.in_( for_destinations ) )
+
+        if created_in_last is not None:
+            end_date = datetime.now()
+            start_date = end_date - created_in_last
+            query = query.filter( model.Job.table.c.create_time >= start_date )
+
+        if updated_in_last is not None:
+            end_date = datetime.now()
+            start_date = end_date - updated_in_last
+            log.info( end_date )
+            log.info( start_date )
+            query = query.filter( model.Job.table.c.update_time >= start_date )
+
+        if for_job_states is not None:
+            # Optimize the singleton case - can be much more performant in my experience.
+            if len( for_job_states ) == 1:
+                query = query.filter( model.Job.table.c.state == for_job_states[ 0 ] )
+            else:
+                query = query.filter( model.Job.table.c.state.in_( for_job_states ) )
+
+        return query
+
+    def should_burst( self, destination_ids, num_jobs, job_states=None ):
+        """ Check if the specified destinations ``destination_ids`` have at
+        least ``num_jobs`` assigned to it - send in ``job_state`` as ``queued``
+        to limit this check to number of jobs queued.
+
+        See stock_rules for an simple example of using this function - but to
+        get the most out of it - it should probably be used with custom job
+        rules that can respond to the bursting by allocating resources,
+        launching cloud nodes, etc....
+        """
+        if job_states is None:
+            job_states = "queued,running"
+        from_destination_job_count = self.job_count(
+            for_destinations=destination_ids,
+            for_job_states=util.listify( job_states )
+        )
+        # Would this job push us over maximum job count before requiring
+        # bursting (roughly... very roughly given many handler threads may be
+        # scheduling jobs).
+        return ( from_destination_job_count + 1 ) > int( num_jobs )
+
+    def choose_one( self, lst, hash_value=None ):
+        """ Choose a random value from supplied list. If hash_value is passed
+        in then every request with that same hash_value would produce the same
+        choice from the supplied list.
+        """
+        if hash_value is None:
+            return random.choice( lst )
+
+        if not isinstance( hash_value, int ):
+            # Convert hash_value string into index
+            as_hex = hashlib.md5( hash_value ).hexdigest()
+            hash_value = int(as_hex, 16)
+        # else assumed to be 'random' int from 0-~Inf
+        random_index = hash_value % len( lst )
+        return lst[ random_index ]
+
+    def job_hash( self, job, hash_by=None ):
+        """ Produce a reproducible hash for the given job on various
+        criteria - for instance if hash_by is "workflow_invocation,history" -
+        all jobs within the same workflow invocation will receive the same
+        hash - for jobs outside of workflows all jobs within the same history
+        will receive the same hash, other jobs will be hashed on job's id
+        randomly.
+
+        Primarily intended for use with ``choose_one`` above - to consistent
+        route or schedule related jobs.
+        """
+        if hash_by is None:
+            hash_by = [ "job" ]
+        hash_bys = util.listify( hash_by )
+        for hash_by in hash_bys:
+            job_hash = self._try_hash_for_job( job, hash_by )
+            if job_hash:
+                return job_hash
+
+        # Fall back to just hashing by job id, should always return a value.
+        return self._try_hash_for_job( job, "job" )
+
+    def _try_hash_for_job( self, job, hash_by ):
+        """ May return False or None if hash type is invalid for that job -
+        e.g. attempting to hash by user for anonymous job or by workflow
+        invocation for jobs outside of workflows.
+        """
+        if hash_by not in VALID_JOB_HASH_STRATEGIES:
+            message = "Do not know how to hash jobs by %s, must be one of %s" % ( hash_by, VALID_JOB_HASH_STRATEGIES )
+            raise Exception( message )
+
+        if hash_by == "workflow_invocation":
+            return job.raw_param_dict().get( "__workflow_invocation_uuid__", None )
+        elif hash_by == "history":
+            return job.history_id
+        elif hash_by == "user":
+            user = job.user
+            return user and user.id
+        elif hash_by == "job":
+            return job.id
diff --git a/lib/galaxy/jobs/rules/__init__.py b/lib/galaxy/jobs/rules/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/jobs/runners/__init__.py b/lib/galaxy/jobs/runners/__init__.py
new file mode 100644
index 0000000..17ac05e
--- /dev/null
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -0,0 +1,631 @@
+"""
+Base classes for job runner plugins.
+"""
+
+import os
+import time
+import string
+import logging
+import datetime
+import threading
+import subprocess
+
+from Queue import Queue, Empty
+
+import galaxy.jobs
+from galaxy.jobs.command_factory import build_command
+from galaxy import model
+from galaxy.util import DATABASE_MAX_STRING_SIZE, shrink_stream_by_size
+from galaxy.util import in_directory
+from galaxy.util import ParamsWithSpecs
+from galaxy.util import ExecutionTimer
+from galaxy.util.bunch import Bunch
+from galaxy.jobs.runners.util.job_script import write_script
+from galaxy.jobs.runners.util.job_script import job_script
+from galaxy.jobs.runners.util.env import env_to_statement
+
+from .state_handler_factory import build_state_handlers
+
+log = logging.getLogger( __name__ )
+
+STOP_SIGNAL = object()
+
+
+JOB_RUNNER_PARAMETER_UNKNOWN_MESSAGE = "Invalid job runner parameter for this plugin: %s"
+JOB_RUNNER_PARAMETER_MAP_PROBLEM_MESSAGE = "Job runner parameter '%s' value '%s' could not be converted to the correct type"
+JOB_RUNNER_PARAMETER_VALIDATION_FAILED_MESSAGE = "Job runner parameter %s failed validation"
+
+GALAXY_LIB_ADJUST_TEMPLATE = """GALAXY_LIB="%s"; if [ "$GALAXY_LIB" != "None" ]; then if [ -n "$PYTHONPATH" ]; then PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"; else PYTHONPATH="$GALAXY_LIB"; fi; export PYTHONPATH; fi;"""
+GALAXY_VENV_TEMPLATE = """GALAXY_VIRTUAL_ENV="%s"; if [ "$GALAXY_VIRTUAL_ENV" != "None" -a -z "$VIRTUAL_ENV" -a -f "$GALAXY_VIRTUAL_ENV/bin/activate" ]; then . "$GALAXY_VIRTUAL_ENV/bin/activate"; fi;"""
+
+
+class RunnerParams( ParamsWithSpecs ):
+
+    def _param_unknown_error( self, name ):
+        raise Exception( JOB_RUNNER_PARAMETER_UNKNOWN_MESSAGE % name )
+
+    def _param_map_error( self, name, value ):
+        raise Exception( JOB_RUNNER_PARAMETER_MAP_PROBLEM_MESSAGE % ( name, value ) )
+
+    def _param_vaildation_error( self, name, value ):
+        raise Exception( JOB_RUNNER_PARAMETER_VALIDATION_FAILED_MESSAGE % name )
+
+
+class BaseJobRunner( object ):
+    DEFAULT_SPECS = dict( recheck_missing_job_retries=dict( map=int, valid=lambda x: x >= 0, default=0 ) )
+
+    def __init__( self, app, nworkers, **kwargs ):
+        """Start the job runner
+        """
+        self.app = app
+        self.sa_session = app.model.context
+        self.nworkers = nworkers
+        runner_param_specs = self.DEFAULT_SPECS.copy()
+        if 'runner_param_specs' in kwargs:
+            runner_param_specs.update( kwargs.pop( 'runner_param_specs' ) )
+        if kwargs:
+            log.debug( 'Loading %s with params: %s', self.runner_name, kwargs )
+        self.runner_params = RunnerParams( specs=runner_param_specs, params=kwargs )
+        self.runner_state_handlers = build_state_handlers()
+
+    def _init_worker_threads(self):
+        """Start ``nworkers`` worker threads.
+        """
+        self.work_queue = Queue()
+        self.work_threads = []
+        log.debug('Starting %s %s workers' % (self.nworkers, self.runner_name))
+        for i in range(self.nworkers):
+            worker = threading.Thread( name="%s.work_thread-%d" % (self.runner_name, i), target=self.run_next )
+            worker.setDaemon( True )
+            worker.start()
+            self.work_threads.append( worker )
+
+    def run_next(self):
+        """Run the next item in the work queue (a job waiting to run)
+        """
+        while True:
+            ( method, arg ) = self.work_queue.get()
+            if method is STOP_SIGNAL:
+                return
+            # id and name are collected first so that the call of method() is the last exception.
+            try:
+                if isinstance(arg, AsynchronousJobState):
+                    job_id = arg.job_wrapper.get_id_tag()
+                else:
+                    # arg should be a JobWrapper/TaskWrapper
+                    job_id = arg.get_id_tag()
+            except:
+                job_id = 'unknown'
+            try:
+                name = method.__name__
+            except:
+                name = 'unknown'
+            try:
+                method(arg)
+            except:
+                log.exception( "(%s) Unhandled exception calling %s" % ( job_id, name ) )
+
+    # Causes a runner's `queue_job` method to be called from a worker thread
+    def put(self, job_wrapper):
+        """Add a job to the queue (by job identifier), indicate that the job is ready to run.
+        """
+        put_timer = ExecutionTimer()
+        job = job_wrapper.get_job()
+        # Change to queued state before handing to worker thread so the runner won't pick it up again
+        job_wrapper.change_state( model.Job.states.QUEUED, flush=False, job=job )
+        # Persist the destination so that the job will be included in counts if using concurrency limits
+        job_wrapper.set_job_destination( job_wrapper.job_destination, None, flush=False, job=job )
+        self.sa_session.flush()
+        self.mark_as_queued(job_wrapper)
+        log.debug("Job [%s] queued %s" % (job_wrapper.job_id, put_timer))
+
+    def mark_as_queued(self, job_wrapper):
+        self.work_queue.put( ( self.queue_job, job_wrapper ) )
+
+    def shutdown( self ):
+        """Attempts to gracefully shut down the worker threads
+        """
+        log.info( "%s: Sending stop signal to %s worker threads" % ( self.runner_name, len( self.work_threads ) ) )
+        for i in range( len( self.work_threads ) ):
+            self.work_queue.put( ( STOP_SIGNAL, None ) )
+
+    # Most runners should override the legacy URL handler methods and destination param method
+    def url_to_destination(self, url):
+        """
+        Convert a legacy URL to a JobDestination.
+
+        Job runner URLs are deprecated, JobDestinations should be used instead.
+        This base class method converts from a URL to a very basic
+        JobDestination without destination params.
+        """
+        return galaxy.jobs.JobDestination(runner=url.split(':')[0])
+
+    def parse_destination_params(self, params):
+        """Parse the JobDestination ``params`` dict and return the runner's native representation of those params.
+        """
+        raise NotImplementedError()
+
+    def prepare_job(self, job_wrapper, include_metadata=False, include_work_dir_outputs=True,
+                    modify_command_for_container=True):
+        """Some sanity checks that all runners' queue_job() methods are likely to want to do
+        """
+        job_id = job_wrapper.get_id_tag()
+        job_state = job_wrapper.get_state()
+        job_wrapper.is_ready = False
+        job_wrapper.runner_command_line = None
+
+        # Make sure the job hasn't been deleted
+        if job_state == model.Job.states.DELETED:
+            log.debug( "(%s) Job deleted by user before it entered the %s queue" % ( job_id, self.runner_name ) )
+            if self.app.config.cleanup_job in ( "always", "onsuccess" ):
+                job_wrapper.cleanup()
+            return False
+        elif job_state != model.Job.states.QUEUED:
+            log.info( "(%s) Job is in state %s, skipping execution" % ( job_id, job_state ) )
+            # cleanup may not be safe in all states
+            return False
+
+        # Prepare the job
+        try:
+            job_wrapper.prepare()
+            job_wrapper.runner_command_line = self.build_command_line(
+                job_wrapper,
+                include_metadata=include_metadata,
+                include_work_dir_outputs=include_work_dir_outputs,
+                modify_command_for_container=modify_command_for_container
+            )
+        except Exception as e:
+            log.exception("(%s) Failure preparing job" % job_id)
+            job_wrapper.fail( e.message if hasattr( e, 'message' ) else "Job preparation failed", exception=True )
+            return False
+
+        if not job_wrapper.runner_command_line:
+            job_wrapper.finish( '', '' )
+            return False
+
+        return True
+
+    # Runners must override the job handling methods
+    def queue_job(self, job_wrapper):
+        raise NotImplementedError()
+
+    def stop_job(self, job):
+        raise NotImplementedError()
+
+    def recover(self, job, job_wrapper):
+        raise NotImplementedError()
+
+    def build_command_line( self, job_wrapper, include_metadata=False, include_work_dir_outputs=True,
+                            modify_command_for_container=True ):
+        container = self._find_container( job_wrapper )
+        if not container and job_wrapper.requires_containerization:
+            raise Exception("Failed to find a container when required, contact Galaxy admin.")
+        return build_command(
+            self,
+            job_wrapper,
+            include_metadata=include_metadata,
+            include_work_dir_outputs=include_work_dir_outputs,
+            modify_command_for_container=modify_command_for_container,
+            container=container
+        )
+
+    def get_work_dir_outputs( self, job_wrapper, job_working_directory=None, tool_working_directory=None ):
+        """
+        Returns list of pairs (source_file, destination) describing path
+        to work_dir output file and ultimate destination.
+        """
+        if tool_working_directory is not None and job_working_directory is not None:
+            raise Exception("get_work_dir_outputs called with both a job and tool working directory, only one may be specified")
+
+        if tool_working_directory is None:
+            if not job_working_directory:
+                job_working_directory = os.path.abspath( job_wrapper.working_directory )
+            tool_working_directory = os.path.join(job_working_directory, "working")
+
+        # Set up dict of dataset id --> output path; output path can be real or
+        # false depending on outputs_to_working_directory
+        output_paths = {}
+        for dataset_path in job_wrapper.get_output_fnames():
+            path = dataset_path.real_path
+            if self.app.config.outputs_to_working_directory:
+                path = dataset_path.false_path
+            output_paths[ dataset_path.dataset_id ] = path
+
+        output_pairs = []
+        # Walk job's output associations to find and use from_work_dir attributes.
+        job = job_wrapper.get_job()
+        job_tool = job_wrapper.tool
+        for (joda, dataset) in self._walk_dataset_outputs( job ):
+            if joda and job_tool:
+                hda_tool_output = job_tool.find_output_def( joda.name )
+                if hda_tool_output and hda_tool_output.from_work_dir:
+                    # Copy from working dir to HDA.
+                    # TODO: move instead of copy to save time?
+                    source_file = os.path.join( tool_working_directory, hda_tool_output.from_work_dir )
+                    destination = job_wrapper.get_output_destination( output_paths[ dataset.dataset_id ] )
+                    if in_directory( source_file, tool_working_directory ):
+                        output_pairs.append( ( source_file, destination ) )
+                    else:
+                        # Security violation.
+                        log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
+        return output_pairs
+
+    def _walk_dataset_outputs( self, job ):
+        for dataset_assoc in job.output_datasets + job.output_library_datasets:
+            for dataset in dataset_assoc.dataset.dataset.history_associations + dataset_assoc.dataset.dataset.library_associations:
+                if isinstance( dataset, self.app.model.HistoryDatasetAssociation ):
+                    joda = self.sa_session.query( self.app.model.JobToOutputDatasetAssociation ).filter_by( job=job, dataset=dataset ).first()
+                    yield (joda, dataset)
+        # TODO: why is this not just something easy like:
+        # for dataset_assoc in job.output_datasets + job.output_library_datasets:
+        #      yield (dataset_assoc, dataset_assoc.dataset)
+        #  I don't understand the reworking it backwards.  -John
+
+    def _handle_metadata_externally( self, job_wrapper, resolve_requirements=False ):
+        """
+        Set metadata externally. Used by the Pulsar job runner where this
+        shouldn't be attached to command line to execute.
+        """
+        # run the metadata setting script here
+        # this is terminate-able when output dataset/job is deleted
+        # so that long running set_meta()s can be canceled without having to reboot the server
+        if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths:
+            lib_adjust = GALAXY_LIB_ADJUST_TEMPLATE % job_wrapper.galaxy_lib_dir
+            venv = GALAXY_VENV_TEMPLATE % job_wrapper.galaxy_virtual_env
+            external_metadata_script = job_wrapper.setup_external_metadata( output_fnames=job_wrapper.get_output_fnames(),
+                                                                            set_extension=True,
+                                                                            tmp_dir=job_wrapper.working_directory,
+                                                                            # We don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
+                                                                            kwds={ 'overwrite' : False } )
+            external_metadata_script = "%s %s %s" % (lib_adjust, venv, external_metadata_script)
+            if resolve_requirements:
+                dependency_shell_commands = self.app.datatypes_registry.set_external_metadata_tool.build_dependency_shell_commands(job_directory=job_wrapper.working_directory)
+                if dependency_shell_commands:
+                    if isinstance( dependency_shell_commands, list ):
+                        dependency_shell_commands = "&&".join( dependency_shell_commands )
+                    external_metadata_script = "%s&&%s" % ( dependency_shell_commands, external_metadata_script )
+            log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) )
+            external_metadata_proc = subprocess.Popen( args=external_metadata_script,
+                                                       shell=True,
+                                                       cwd=job_wrapper.working_directory,
+                                                       env=os.environ,
+                                                       preexec_fn=os.setpgrp )
+            job_wrapper.external_output_metadata.set_job_runner_external_pid( external_metadata_proc.pid, self.sa_session )
+            external_metadata_proc.wait()
+            log.debug( 'execution of external set_meta for job %d finished' % job_wrapper.job_id )
+
+    def get_job_file(self, job_wrapper, **kwds):
+        job_metrics = job_wrapper.app.job_metrics
+        job_instrumenter = job_metrics.job_instrumenters[ job_wrapper.job_destination.id ]
+
+        env_setup_commands = kwds.get( 'env_setup_commands', [] )
+        env_setup_commands.append( job_wrapper.get_env_setup_clause() or '' )
+        destination = job_wrapper.job_destination or {}
+        envs = destination.get( "env", [] )
+        envs.extend( job_wrapper.environment_variables )
+        for env in envs:
+            env_setup_commands.append( env_to_statement( env ) )
+        command_line = job_wrapper.runner_command_line
+        options = dict(
+            job_instrumenter=job_instrumenter,
+            galaxy_lib=job_wrapper.galaxy_lib_dir,
+            galaxy_virtual_env=job_wrapper.galaxy_virtual_env,
+            env_setup_commands=env_setup_commands,
+            working_directory=os.path.abspath( job_wrapper.working_directory ),
+            command=command_line,
+            shell=job_wrapper.shell,
+        )
+        # Additional logging to enable if debugging from_work_dir handling, metadata
+        # commands, etc... (or just peak in the job script.)
+        job_id = job_wrapper.job_id
+        log.debug( '(%s) command is: %s' % ( job_id, command_line ) )
+        options.update(**kwds)
+        return job_script(**options)
+
+    def write_executable_script( self, path, contents, mode=0o755 ):
+        write_script( path, contents, self.app.config, mode=mode )
+
+    def _find_container(
+        self,
+        job_wrapper,
+        compute_working_directory=None,
+        compute_tool_directory=None,
+        compute_job_directory=None,
+    ):
+        job_directory_type = "galaxy" if compute_working_directory is None else "pulsar"
+        if not compute_working_directory:
+            compute_working_directory = job_wrapper.tool_working_directory
+
+        if not compute_job_directory:
+            compute_job_directory = job_wrapper.working_directory
+
+        if not compute_tool_directory:
+            compute_tool_directory = job_wrapper.tool.tool_dir
+
+        tool = job_wrapper.tool
+        from galaxy.tools.deps import containers
+        tool_info = containers.ToolInfo(tool.containers, tool.requirements)
+        job_info = containers.JobInfo(
+            compute_working_directory,
+            compute_tool_directory,
+            compute_job_directory,
+            job_directory_type,
+        )
+
+        destination_info = job_wrapper.job_destination.params
+        return self.app.container_finder.find_container(
+            tool_info,
+            destination_info,
+            job_info
+        )
+
+    def _handle_runner_state( self, runner_state, job_state ):
+        try:
+            for handler in self.runner_state_handlers.get(runner_state, []):
+                handler(self.app, self, job_state)
+                if job_state.runner_state_handled:
+                    break
+        except:
+            log.exception('Caught exception in runner state handler:')
+
+    def mark_as_resubmitted( self, job_state, info=None ):
+        job_state.job_wrapper.mark_as_resubmitted( info=info )
+        if not self.app.config.track_jobs_in_database:
+            job_state.job_wrapper.change_state( model.Job.states.QUEUED )
+            self.app.job_manager.job_handler.dispatcher.put( job_state.job_wrapper )
+
+
+class JobState( object ):
+    """
+    Encapsulate state of jobs.
+    """
+    runner_states = Bunch(
+        WALLTIME_REACHED='walltime_reached',
+        MEMORY_LIMIT_REACHED='memory_limit_reached',
+        GLOBAL_WALLTIME_REACHED='global_walltime_reached',
+        OUTPUT_SIZE_LIMIT='output_size_limit'
+    )
+
+    def __init__( self ):
+        self.runner_state_handled = False
+
+    def set_defaults( self, files_dir ):
+        if self.job_wrapper is not None:
+            id_tag = self.job_wrapper.get_id_tag()
+            if files_dir is not None:
+                self.job_file = JobState.default_job_file( files_dir, id_tag )
+                self.output_file = os.path.join( files_dir, 'galaxy_%s.o' % id_tag )
+                self.error_file = os.path.join( files_dir, 'galaxy_%s.e' % id_tag )
+                self.exit_code_file = os.path.join( files_dir, 'galaxy_%s.ec' % id_tag )
+            job_name = 'g%s' % id_tag
+            if self.job_wrapper.tool.old_id:
+                job_name += '_%s' % self.job_wrapper.tool.old_id
+            if self.job_wrapper.user:
+                job_name += '_%s' % self.job_wrapper.user
+            self.job_name = ''.join( map( lambda x: x if x in ( string.letters + string.digits + '_' ) else '_', job_name ) )
+
+    @staticmethod
+    def default_job_file( files_dir, id_tag ):
+        return os.path.join( files_dir, 'galaxy_%s.sh' % id_tag )
+
+    @staticmethod
+    def default_exit_code_file( files_dir, id_tag ):
+        return os.path.join( files_dir, 'galaxy_%s.ec' % id_tag )
+
+
+class AsynchronousJobState( JobState ):
+    """
+    Encapsulate the state of an asynchronous job, this should be subclassed as
+    needed for various job runners to capture additional information needed
+    to communicate with distributed resource manager.
+    """
+
+    def __init__( self, files_dir=None, job_wrapper=None, job_id=None, job_file=None, output_file=None, error_file=None, exit_code_file=None, job_name=None, job_destination=None  ):
+        super( AsynchronousJobState, self ).__init__()
+        self.old_state = None
+        self._running = False
+        self.check_count = 0
+        self.start_time = None
+
+        self.job_wrapper = job_wrapper
+        # job_id is the DRM's job id, not the Galaxy job id
+        self.job_id = job_id
+        self.job_destination = job_destination
+
+        self.job_file = job_file
+        self.output_file = output_file
+        self.error_file = error_file
+        self.exit_code_file = exit_code_file
+        self.job_name = job_name
+
+        self.set_defaults( files_dir )
+
+        self.cleanup_file_attributes = [ 'job_file', 'output_file', 'error_file', 'exit_code_file' ]
+
+    @property
+    def running( self ):
+        return self._running
+
+    @running.setter
+    def running( self, is_running ):
+        self._running = is_running
+        # This will be invalid for job recovery
+        if self.start_time is None:
+            self.start_time = datetime.datetime.now()
+
+    def check_limits( self, runtime=None ):
+        limit_state = None
+        if self.job_wrapper.has_limits():
+            self.check_count += 1
+            if self.running and (self.check_count % 20 == 0):
+                if runtime is None:
+                    runtime = datetime.datetime.now() - (self.start_time or datetime.datetime.now())
+                self.check_count = 0
+                limit_state = self.job_wrapper.check_limits( runtime=runtime )
+        if limit_state is not None:
+            # Set up the job for failure, but the runner will do the actual work
+            self.runner_state, self.fail_message = limit_state
+            self.stop_job = True
+            return True
+        return False
+
+    def cleanup( self ):
+        for file in [ getattr( self, a ) for a in self.cleanup_file_attributes if hasattr( self, a ) ]:
+            try:
+                os.unlink( file )
+            except Exception as e:
+                log.debug( "(%s/%s) Unable to cleanup %s: %s" % ( self.job_wrapper.get_id_tag(), self.job_id, file, str( e ) ) )
+
+    def register_cleanup_file_attribute( self, attribute ):
+        if attribute not in self.cleanup_file_attributes:
+            self.cleanup_file_attributes.append( attribute )
+
+
+class AsynchronousJobRunner( BaseJobRunner ):
+    """Parent class for any job runner that runs jobs asynchronously (e.g. via
+    a distributed resource manager).  Provides general methods for having a
+    thread to monitor the state of asynchronous jobs and submitting those jobs
+    to the correct methods (queue, finish, cleanup) at appropriate times..
+    """
+
+    def __init__( self, app, nworkers, **kwargs ):
+        super( AsynchronousJobRunner, self ).__init__( app, nworkers, **kwargs )
+        # 'watched' and 'queue' are both used to keep track of jobs to watch.
+        # 'queue' is used to add new watched jobs, and can be called from
+        # any thread (usually by the 'queue_job' method). 'watched' must only
+        # be modified by the monitor thread, which will move items from 'queue'
+        # to 'watched' and then manage the watched jobs.
+        self.watched = []
+        self.monitor_queue = Queue()
+
+    def _init_monitor_thread(self):
+        self.monitor_thread = threading.Thread( name="%s.monitor_thread" % self.runner_name, target=self.monitor )
+        self.monitor_thread.setDaemon( True )
+        self.monitor_thread.start()
+
+    def handle_stop(self):
+        # DRMAA and SGE runners should override this and disconnect.
+        pass
+
+    def monitor( self ):
+        """
+        Watches jobs currently in the monitor queue and deals with state
+        changes (queued to running) and job completion.
+        """
+        while True:
+            # Take any new watched jobs and put them on the monitor list
+            try:
+                while True:
+                    async_job_state = self.monitor_queue.get_nowait()
+                    if async_job_state is STOP_SIGNAL:
+                        # TODO: This is where any cleanup would occur
+                        self.handle_stop()
+                        return
+                    self.watched.append( async_job_state )
+            except Empty:
+                pass
+            # Iterate over the list of watched jobs and check state
+            try:
+                self.check_watched_items()
+            except Exception:
+                log.exception('Unhandled exception checking active jobs')
+            # Sleep a bit before the next state check
+            time.sleep( 1 )
+
+    def monitor_job(self, job_state):
+        self.monitor_queue.put( job_state )
+
+    def shutdown( self ):
+        """Attempts to gracefully shut down the monitor thread"""
+        log.info( "%s: Sending stop signal to monitor thread" % self.runner_name )
+        self.monitor_queue.put( STOP_SIGNAL )
+        # Call the parent's shutdown method to stop workers
+        super( AsynchronousJobRunner, self ).shutdown()
+
+    def check_watched_items(self):
+        """
+        This method is responsible for iterating over self.watched and handling
+        state changes and updating self.watched with a new list of watched job
+        states. Subclasses can opt to override this directly (as older job runners will
+        initially) or just override check_watched_item and allow the list processing to
+        reuse the logic here.
+        """
+        new_watched = []
+        for async_job_state in self.watched:
+            new_async_job_state = self.check_watched_item(async_job_state)
+            if new_async_job_state:
+                new_watched.append(new_async_job_state)
+        self.watched = new_watched
+
+    # Subclasses should implement this unless they override check_watched_items all together.
+    def check_watched_item(self, job_state):
+        raise NotImplementedError()
+
+    def finish_job( self, job_state ):
+        """
+        Get the output/error for a finished job, pass to `job_wrapper.finish`
+        and cleanup all the job's temporary files.
+        """
+        galaxy_id_tag = job_state.job_wrapper.get_id_tag()
+        external_job_id = job_state.job_id
+
+        # To ensure that files below are readable, ownership must be reclaimed first
+        job_state.job_wrapper.reclaim_ownership()
+
+        # wait for the files to appear
+        which_try = 0
+        while which_try < (self.app.config.retry_job_output_collection + 1):
+            try:
+                stdout = shrink_stream_by_size( open( job_state.output_file, "r" ), DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+                stderr = shrink_stream_by_size( open( job_state.error_file, "r" ), DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+                which_try = (self.app.config.retry_job_output_collection + 1)
+            except Exception as e:
+                if which_try == self.app.config.retry_job_output_collection:
+                    stdout = ''
+                    stderr = 'Job output not returned from cluster'
+                    log.error( '(%s/%s) %s: %s' % ( galaxy_id_tag, external_job_id, stderr, str( e ) ) )
+                else:
+                    time.sleep(1)
+                which_try += 1
+
+        try:
+            # This should be an 8-bit exit code, but read ahead anyway:
+            exit_code_str = open( job_state.exit_code_file, "r" ).read(32)
+        except:
+            # By default, the exit code is 0, which typically indicates success.
+            exit_code_str = "0"
+
+        try:
+            # Decode the exit code. If it's bogus, then just use 0.
+            exit_code = int(exit_code_str)
+        except:
+            log.warning( "(%s/%s) Exit code '%s' invalid. Using 0." % ( galaxy_id_tag, external_job_id, exit_code_str ) )
+            exit_code = 0
+
+        # clean up the job files
+        cleanup_job = job_state.job_wrapper.cleanup_job
+        if cleanup_job == "always" or ( not stderr and cleanup_job == "onsuccess" ):
+            job_state.cleanup()
+
+        try:
+            job_state.job_wrapper.finish( stdout, stderr, exit_code )
+        except:
+            log.exception( "(%s/%s) Job wrapper finish method failed" % ( galaxy_id_tag, external_job_id ) )
+            job_state.job_wrapper.fail( "Unable to finish job", exception=True )
+
+    def fail_job( self, job_state ):
+        if getattr( job_state, 'stop_job', True ):
+            self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )
+        self._handle_runner_state( 'failure', job_state )
+        # Not convinced this is the best way to indicate this state, but
+        # something necessary
+        if not job_state.runner_state_handled:
+            job_state.job_wrapper.fail( getattr( job_state, 'fail_message', 'Job failed' ) )
+            if job_state.job_wrapper.cleanup_job == "always":
+                job_state.cleanup()
+
+    def mark_as_finished(self, job_state):
+        self.work_queue.put( ( self.finish_job, job_state ) )
+
+    def mark_as_failed(self, job_state):
+        self.work_queue.put( ( self.fail_job, job_state ) )
diff --git a/lib/galaxy/jobs/runners/cli.py b/lib/galaxy/jobs/runners/cli.py
new file mode 100644
index 0000000..70e8c3f
--- /dev/null
+++ b/lib/galaxy/jobs/runners/cli.py
@@ -0,0 +1,225 @@
+"""
+Job control via a command line interface (e.g. qsub/qstat), possibly over a remote connection (e.g. ssh).
+"""
+
+import logging
+
+from galaxy import model
+from galaxy.jobs import JobDestination
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+from galaxy.util import asbool
+
+from .util.cli import CliInterface, split_params
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'ShellJobRunner', )
+
+DEFAULT_EMBED_METADATA_IN_JOB = True
+
+
+class ShellJobRunner( AsynchronousJobRunner ):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "ShellRunner"
+
+    def __init__( self, app, nworkers ):
+        """Start the job runner """
+        super( ShellJobRunner, self ).__init__( app, nworkers )
+
+        self.cli_interface = CliInterface()
+        self._init_monitor_thread()
+        self._init_worker_threads()
+
+    def get_cli_plugins( self, shell_params, job_params ):
+        return self.cli_interface.get_plugins( shell_params, job_params )
+
+    def url_to_destination( self, url ):
+        params = {}
+        shell_params, job_params = url.split( '/' )[ 2:4 ]
+        # split 'foo=bar&baz=quux' into { 'foo' : 'bar', 'baz' : 'quux' }
+        shell_params = dict( [ ( 'shell_' + k, v ) for k, v in [ kv.split( '=', 1 ) for kv in shell_params.split( '&' ) ] ] )
+        job_params = dict( [ ( 'job_' + k, v ) for k, v in [ kv.split( '=', 1 ) for kv in job_params.split( '&' ) ] ] )
+        params.update( shell_params )
+        params.update( job_params )
+        log.debug( "Converted URL '%s' to destination runner=cli, params=%s" % ( url, params ) )
+        # Create a dynamic JobDestination
+        return JobDestination( runner='cli', params=params )
+
+    def parse_destination_params( self, params ):
+        return split_params( params )
+
+    def queue_job( self, job_wrapper ):
+        """Create job script and submit it to the DRM"""
+        # prepare the job
+        include_metadata = asbool( job_wrapper.job_destination.params.get( "embed_metadata_in_job", DEFAULT_EMBED_METADATA_IN_JOB ) )
+        if not self.prepare_job( job_wrapper, include_metadata=include_metadata ):
+            return
+
+        # Get shell and job execution interface
+        job_destination = job_wrapper.job_destination
+        shell_params, job_params = self.parse_destination_params(job_destination.params)
+        shell, job_interface = self.get_cli_plugins(shell_params, job_params)
+
+        # wrapper.get_id_tag() instead of job_id for compatibility with TaskWrappers.
+        galaxy_id_tag = job_wrapper.get_id_tag()
+
+        # define job attributes
+        ajs = AsynchronousJobState( files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper )
+
+        job_file_kwargs = job_interface.job_script_kwargs(ajs.output_file, ajs.error_file, ajs.job_name)
+        script = self.get_job_file(
+            job_wrapper,
+            exit_code_path=ajs.exit_code_file,
+            **job_file_kwargs
+        )
+
+        try:
+            self.write_executable_script( ajs.job_file, script )
+        except:
+            log.exception("(%s) failure writing job script" % galaxy_id_tag )
+            job_wrapper.fail("failure preparing job script", exception=True)
+            return
+
+        # job was deleted while we were preparing it
+        if job_wrapper.get_state() == model.Job.states.DELETED:
+            log.info("(%s) Job deleted by user before it entered the queue" % galaxy_id_tag )
+            if job_wrapper.cleanup_job in ("always", "onsuccess"):
+                job_wrapper.cleanup()
+            return
+
+        log.debug( "(%s) submitting file: %s" % ( galaxy_id_tag, ajs.job_file ) )
+
+        cmd_out = shell.execute(job_interface.submit(ajs.job_file))
+        if cmd_out.returncode != 0:
+            log.error('(%s) submission failed (stdout): %s' % (galaxy_id_tag, cmd_out.stdout))
+            log.error('(%s) submission failed (stderr): %s' % (galaxy_id_tag, cmd_out.stderr))
+            job_wrapper.fail("failure submitting job")
+            return
+        # Some job runners return something like 'Submitted batch job XXXX'
+        # Strip and split to get job ID.
+        external_job_id = cmd_out.stdout.strip().split()[-1]
+        if not external_job_id:
+            log.error('(%s) submission did not return a job identifier, failing job' % galaxy_id_tag)
+            job_wrapper.fail("failure submitting job")
+            return
+
+        log.info("(%s) queued with identifier: %s" % ( galaxy_id_tag, external_job_id ) )
+
+        # store runner information for tracking if Galaxy restarts
+        job_wrapper.set_job_destination( job_destination, external_job_id )
+
+        # Store state information for job
+        ajs.job_id = external_job_id
+        ajs.old_state = 'new'
+        ajs.job_destination = job_destination
+
+        # Add to our 'queue' of jobs to monitor
+        self.monitor_queue.put( ajs )
+
+    def check_watched_items( self ):
+        """
+        Called by the monitor thread to look at each watched job and deal
+        with state changes.
+        """
+        new_watched = []
+
+        job_states = self.__get_job_states()
+
+        for ajs in self.watched:
+            external_job_id = ajs.job_id
+            id_tag = ajs.job_wrapper.get_id_tag()
+            old_state = ajs.old_state
+            state = job_states.get(external_job_id, None)
+            if state is None:
+                if ajs.job_wrapper.get_state() == model.Job.states.DELETED:
+                    continue
+
+                external_metadata = not asbool( ajs.job_wrapper.job_destination.params.get( "embed_metadata_in_job", DEFAULT_EMBED_METADATA_IN_JOB ) )
+                if external_metadata:
+                    self._handle_metadata_externally( ajs.job_wrapper, resolve_requirements=True )
+
+                log.debug("(%s/%s) job not found in batch state check" % ( id_tag, external_job_id ) )
+                shell_params, job_params = self.parse_destination_params(ajs.job_destination.params)
+                shell, job_interface = self.get_cli_plugins(shell_params, job_params)
+                cmd_out = shell.execute(job_interface.get_single_status(external_job_id))
+                state = job_interface.parse_single_status(cmd_out.stdout, external_job_id)
+                if state == model.Job.states.OK:
+                    log.debug('(%s/%s) job execution finished, running job wrapper finish method' % ( id_tag, external_job_id ) )
+                    self.work_queue.put( ( self.finish_job, ajs ) )
+                    continue
+                else:
+                    log.warning('(%s/%s) job not found in batch state check, but found in individual state check' % ( id_tag, external_job_id ) )
+                    if state != old_state:
+                        ajs.job_wrapper.change_state( state )
+            else:
+                if state != old_state:
+                    log.debug("(%s/%s) state change: from %s to %s" % ( id_tag, external_job_id, old_state, state ) )
+                    ajs.job_wrapper.change_state( state )
+                if state == model.Job.states.RUNNING and not ajs.running:
+                    ajs.running = True
+                    ajs.job_wrapper.change_state( model.Job.states.RUNNING )
+            ajs.old_state = state
+            if state == model.Job.states.OK:
+                self.work_queue.put( ( self.finish_job, ajs ) )
+            else:
+                new_watched.append( ajs )
+        # Replace the watch list with the updated version
+        self.watched = new_watched
+
+    def __get_job_states(self):
+        job_destinations = {}
+        job_states = {}
+        # unique the list of destinations
+        for ajs in self.watched:
+            if ajs.job_destination.id not in job_destinations:
+                job_destinations[ajs.job_destination.id] = dict( job_destination=ajs.job_destination, job_ids=[ ajs.job_id ] )
+            else:
+                job_destinations[ajs.job_destination.id]['job_ids'].append( ajs.job_id )
+        # check each destination for the listed job ids
+        for job_destination_id, v in job_destinations.items():
+            job_destination = v['job_destination']
+            job_ids = v['job_ids']
+            shell_params, job_params = self.parse_destination_params(job_destination.params)
+            shell, job_interface = self.get_cli_plugins(shell_params, job_params)
+            cmd_out = shell.execute(job_interface.get_status(job_ids))
+            assert cmd_out.returncode == 0, cmd_out.stderr
+            job_states.update(job_interface.parse_status(cmd_out.stdout, job_ids))
+        return job_states
+
+    def stop_job( self, job ):
+        """Attempts to delete a dispatched job"""
+        try:
+            shell_params, job_params = self.parse_destination_params(job.destination_params)
+            shell, job_interface = self.get_cli_plugins(shell_params, job_params)
+            cmd_out = shell.execute(job_interface.delete( job.job_runner_external_id ))
+            assert cmd_out.returncode == 0, cmd_out.stderr
+            log.debug( "(%s/%s) Terminated at user's request" % ( job.id, job.job_runner_external_id ) )
+        except Exception as e:
+            log.debug( "(%s/%s) User killed running job, but error encountered during termination: %s" % ( job.id, job.job_runner_external_id, e ) )
+
+    def recover( self, job, job_wrapper ):
+        """Recovers jobs stuck in the queued/running state when Galaxy started"""
+        job_id = job.get_job_runner_external_id()
+        if job_id is None:
+            self.put( job_wrapper )
+            return
+        ajs = AsynchronousJobState( files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper )
+        ajs.job_id = str( job_id )
+        ajs.command_line = job.command_line
+        ajs.job_wrapper = job_wrapper
+        ajs.job_destination = job_wrapper.job_destination
+        if job.state == model.Job.states.RUNNING:
+            log.debug( "(%s/%s) is still in running state, adding to the runner monitor queue" % ( job.id, job.job_runner_external_id ) )
+            ajs.old_state = model.Job.states.RUNNING
+            ajs.running = True
+            self.monitor_queue.put( ajs )
+        elif job.state == model.Job.states.QUEUED:
+            log.debug( "(%s/%s) is still in queued state, adding to the runner monitor queue" % ( job.id, job.job_runner_external_id ) )
+            ajs.old_state = model.Job.states.QUEUED
+            ajs.running = False
+            self.monitor_queue.put( ajs )
diff --git a/lib/galaxy/jobs/runners/condor.py b/lib/galaxy/jobs/runners/condor.py
new file mode 100644
index 0000000..afa0217
--- /dev/null
+++ b/lib/galaxy/jobs/runners/condor.py
@@ -0,0 +1,237 @@
+"""
+Job control via the Condor DRM.
+"""
+import logging
+import os
+
+from galaxy import model
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+from galaxy.jobs.runners.util.condor import (
+    build_submit_description,
+    condor_stop,
+    condor_submit,
+    submission_params,
+    summarize_condor_log
+)
+from galaxy.util import asbool
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'CondorJobRunner', )
+
+
+class CondorJobState( AsynchronousJobState ):
+    def __init__( self, **kwargs ):
+        """
+        Encapsulates state related to a job that is being run via the DRM and
+        that we need to monitor.
+        """
+        super( CondorJobState, self ).__init__( **kwargs )
+        self.failed = False
+        self.user_log = None
+        self.user_log_size = 0
+
+
+class CondorJobRunner( AsynchronousJobRunner ):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "CondorRunner"
+
+    def __init__( self, app, nworkers ):
+        """Initialize this job runner and start the monitor thread"""
+        super( CondorJobRunner, self ).__init__( app, nworkers )
+        self._init_monitor_thread()
+        self._init_worker_threads()
+
+    def queue_job( self, job_wrapper ):
+        """Create job script and submit it to the DRM"""
+
+        # prepare the job
+        include_metadata = asbool( job_wrapper.job_destination.params.get( "embed_metadata_in_job", True ) )
+        if not self.prepare_job( job_wrapper, include_metadata=include_metadata):
+            return
+
+        # get configured job destination
+        job_destination = job_wrapper.job_destination
+
+        # wrapper.get_id_tag() instead of job_id for compatibility with TaskWrappers.
+        galaxy_id_tag = job_wrapper.get_id_tag()
+
+        # get destination params
+        query_params = submission_params(prefix="", **job_destination.params)
+        container = None
+        universe = query_params.get('universe', None)
+        if universe and universe.strip().lower() == 'docker':
+            container = self.find_container( job_wrapper )
+            if container:
+                # HTCondor needs the image as 'docker_image'
+                query_params.update({'docker_image': container})
+
+        galaxy_slots = query_params.get('request_cpus', None)
+        if galaxy_slots:
+            galaxy_slots_statement = 'GALAXY_SLOTS="%s"; export GALAXY_SLOTS_CONFIGURED="1"' % galaxy_slots
+        else:
+            galaxy_slots_statement = 'GALAXY_SLOTS="1"'
+
+        # define job attributes
+        cjs = CondorJobState(
+            files_dir=self.app.config.cluster_files_directory,
+            job_wrapper=job_wrapper
+        )
+
+        cluster_directory = self.app.config.cluster_files_directory
+        cjs.user_log = os.path.join( cluster_directory, 'galaxy_%s.condor.log' % galaxy_id_tag )
+        cjs.register_cleanup_file_attribute( 'user_log' )
+        submit_file = os.path.join( cluster_directory, 'galaxy_%s.condor.desc' % galaxy_id_tag )
+        executable = cjs.job_file
+
+        build_submit_params = dict(
+            executable=executable,
+            output=cjs.output_file,
+            error=cjs.error_file,
+            user_log=cjs.user_log,
+            query_params=query_params,
+        )
+
+        submit_file_contents = build_submit_description(**build_submit_params)
+        script = self.get_job_file(
+            job_wrapper,
+            exit_code_path=cjs.exit_code_file,
+            slots_statement=galaxy_slots_statement,
+        )
+        try:
+            self.write_executable_script( executable, script )
+        except:
+            job_wrapper.fail( "failure preparing job script", exception=True )
+            log.exception( "(%s) failure preparing job script" % galaxy_id_tag )
+            return
+
+        cleanup_job = job_wrapper.cleanup_job
+        try:
+            open(submit_file, "w").write(submit_file_contents)
+        except Exception:
+            if cleanup_job == "always":
+                cjs.cleanup()
+                # job_wrapper.fail() calls job_wrapper.cleanup()
+            job_wrapper.fail( "failure preparing submit file", exception=True )
+            log.exception( "(%s) failure preparing submit file" % galaxy_id_tag )
+            return
+
+        # job was deleted while we were preparing it
+        if job_wrapper.get_state() == model.Job.states.DELETED:
+            log.debug( "Job %s deleted by user before it entered the queue" % galaxy_id_tag )
+            if cleanup_job in ("always", "onsuccess"):
+                os.unlink( submit_file )
+                cjs.cleanup()
+                job_wrapper.cleanup()
+            return
+
+        log.debug( "(%s) submitting file %s" % ( galaxy_id_tag, executable ) )
+
+        external_job_id, message = condor_submit(submit_file)
+        if external_job_id is None:
+            log.debug( "condor_submit failed for job %s: %s" % (job_wrapper.get_id_tag(), message) )
+            if self.app.config.cleanup_job == "always":
+                os.unlink( submit_file )
+                cjs.cleanup()
+            job_wrapper.fail( "condor_submit failed", exception=True )
+            return
+
+        os.unlink( submit_file )
+
+        log.info( "(%s) queued as %s" % ( galaxy_id_tag, external_job_id ) )
+
+        # store runner information for tracking if Galaxy restarts
+        job_wrapper.set_job_destination( job_destination, external_job_id )
+
+        # Store DRM related state information for job
+        cjs.job_id = external_job_id
+        cjs.job_destination = job_destination
+
+        # Add to our 'queue' of jobs to monitor
+        self.monitor_queue.put( cjs )
+
+    def check_watched_items( self ):
+        """
+        Called by the monitor thread to look at each watched job and deal
+        with state changes.
+        """
+        new_watched = []
+        for cjs in self.watched:
+            job_id = cjs.job_id
+            galaxy_id_tag = cjs.job_wrapper.get_id_tag()
+            try:
+                if os.stat( cjs.user_log ).st_size == cjs.user_log_size:
+                    new_watched.append( cjs )
+                    continue
+                s1, s4, s7, s5, s9, log_size = summarize_condor_log(cjs.user_log, job_id)
+                job_running = s1 and not (s4 or s7)
+                job_complete = s5
+                job_failed = s9
+                cjs.user_log_size = log_size
+            except Exception:
+                # so we don't kill the monitor thread
+                log.exception( "(%s/%s) Unable to check job status" % ( galaxy_id_tag, job_id ) )
+                log.warning( "(%s/%s) job will now be errored" % ( galaxy_id_tag, job_id ) )
+                cjs.fail_message = "Cluster could not complete job"
+                self.work_queue.put( ( self.fail_job, cjs ) )
+                continue
+            if job_running and not cjs.running:
+                log.debug( "(%s/%s) job is now running" % ( galaxy_id_tag, job_id ) )
+                cjs.job_wrapper.change_state( model.Job.states.RUNNING )
+            if not job_running and cjs.running:
+                log.debug( "(%s/%s) job has stopped running" % ( galaxy_id_tag, job_id ) )
+                # Will switching from RUNNING to QUEUED confuse Galaxy?
+                # cjs.job_wrapper.change_state( model.Job.states.QUEUED )
+            if job_complete:
+                if cjs.job_wrapper.get_state() != model.Job.states.DELETED:
+                    external_metadata = not asbool( cjs.job_wrapper.job_destination.params.get( "embed_metadata_in_job", True) )
+                    if external_metadata:
+                        self._handle_metadata_externally( cjs.job_wrapper, resolve_requirements=True )
+                    log.debug( "(%s/%s) job has completed" % ( galaxy_id_tag, job_id ) )
+                    self.work_queue.put( ( self.finish_job, cjs ) )
+                continue
+            if job_failed:
+                log.debug( "(%s/%s) job failed" % ( galaxy_id_tag, job_id ) )
+                cjs.failed = True
+                self.work_queue.put( ( self.finish_job, cjs ) )
+                continue
+            cjs.runnning = job_running
+            new_watched.append( cjs )
+        # Replace the watch list with the updated version
+        self.watched = new_watched
+
+    def stop_job( self, job ):
+        """Attempts to delete a job from the DRM queue"""
+        external_id = job.job_runner_external_id
+        failure_message = condor_stop(external_id)
+        if failure_message:
+            log.debug("(%s/%s). Failed to stop condor %s" % (external_id, failure_message))
+
+    def recover( self, job, job_wrapper ):
+        """Recovers jobs stuck in the queued/running state when Galaxy started"""
+        # TODO Check if we need any changes here
+        job_id = job.get_job_runner_external_id()
+        galaxy_id_tag = job_wrapper.get_id_tag()
+        if job_id is None:
+            self.put( job_wrapper )
+            return
+        cjs = CondorJobState( job_wrapper=job_wrapper, files_dir=self.app.config.cluster_files_directory )
+        cjs.job_id = str( job_id )
+        cjs.command_line = job.get_command_line()
+        cjs.job_wrapper = job_wrapper
+        cjs.job_destination = job_wrapper.job_destination
+        cjs.user_log = os.path.join( self.app.config.cluster_files_directory, 'galaxy_%s.condor.log' % galaxy_id_tag )
+        cjs.register_cleanup_file_attribute( 'user_log' )
+        if job.state == model.Job.states.RUNNING:
+            log.debug( "(%s/%s) is still in running state, adding to the DRM queue" % ( job.id, job.job_runner_external_id ) )
+            cjs.running = True
+            self.monitor_queue.put( cjs )
+        elif job.state == model.Job.states.QUEUED:
+            log.debug( "(%s/%s) is still in DRM queued state, adding to the DRM queue" % ( job.id, job.job_runner_external_id ) )
+            cjs.running = False
+            self.monitor_queue.put( cjs )
diff --git a/lib/galaxy/jobs/runners/drmaa.py b/lib/galaxy/jobs/runners/drmaa.py
new file mode 100644
index 0000000..7c08984
--- /dev/null
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -0,0 +1,400 @@
+"""
+Job control via the DRMAA API.
+"""
+from __future__ import absolute_import
+
+import json
+import logging
+import os
+import string
+import subprocess
+import time
+
+from galaxy import model
+from galaxy.jobs import JobDestination
+from galaxy.jobs.handler import DEFAULT_JOB_PUT_FAILURE_MESSAGE
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+from galaxy.util import asbool
+
+drmaa = None
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'DRMAAJobRunner', )
+
+RETRY_EXCEPTIONS_LOWER = frozenset(['invalidjobexception', 'internalexception'])
+
+
+class DRMAAJobRunner( AsynchronousJobRunner ):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "DRMAARunner"
+    restrict_job_name_length = 15
+
+    def __init__( self, app, nworkers, **kwargs ):
+        """Start the job runner"""
+        global drmaa
+
+        runner_param_specs = {
+            'drmaa_library_path': dict( map=str, default=os.environ.get( 'DRMAA_LIBRARY_PATH', None ) ) }
+        for retry_exception in RETRY_EXCEPTIONS_LOWER:
+            runner_param_specs[retry_exception + '_state'] = dict( map=str, valid=lambda x: x in ( model.Job.states.OK, model.Job.states.ERROR ), default=model.Job.states.OK )
+            runner_param_specs[retry_exception + '_retries'] = dict( map=int, valid=lambda x: int >= 0, default=0 )
+
+        if 'runner_param_specs' not in kwargs:
+            kwargs[ 'runner_param_specs' ] = dict()
+        kwargs[ 'runner_param_specs' ].update( runner_param_specs )
+
+        super( DRMAAJobRunner, self ).__init__( app, nworkers, **kwargs )
+
+        # This allows multiple drmaa runners (although only one per handler) in the same job config file
+        if 'drmaa_library_path' in kwargs:
+            log.info( 'Overriding DRMAA_LIBRARY_PATH due to runner plugin parameter: %s', self.runner_params.drmaa_library_path )
+            os.environ['DRMAA_LIBRARY_PATH'] = self.runner_params.drmaa_library_path
+
+        # Import is delayed until runner initialization to allow for the
+        # drmaa_library_path plugin param to override $DRMAA_LIBRARY_PATH
+        try:
+            drmaa = __import__( "drmaa" )
+        except (ImportError, RuntimeError) as exc:
+            raise exc.__class__('The Python drmaa package is required to use this '
+                                'feature, please install it or correct the '
+                                'following error:\n%s: %s' %
+                                (exc.__class__.__name__, str(exc)))
+        from pulsar.managers.util.drmaa import DrmaaSessionFactory
+
+        # Subclasses may need access to state constants
+        self.drmaa_job_states = drmaa.JobState
+
+        # Descriptive state strings pulled from the drmaa lib itself
+        self.drmaa_job_state_strings = {
+            drmaa.JobState.UNDETERMINED: 'process status cannot be determined',
+            drmaa.JobState.QUEUED_ACTIVE: 'job is queued and active',
+            drmaa.JobState.SYSTEM_ON_HOLD: 'job is queued and in system hold',
+            drmaa.JobState.USER_ON_HOLD: 'job is queued and in user hold',
+            drmaa.JobState.USER_SYSTEM_ON_HOLD: 'job is queued and in user and system hold',
+            drmaa.JobState.RUNNING: 'job is running',
+            drmaa.JobState.SYSTEM_SUSPENDED: 'job is system suspended',
+            drmaa.JobState.USER_SUSPENDED: 'job is user suspended',
+            drmaa.JobState.DONE: 'job finished normally',
+            drmaa.JobState.FAILED: 'job finished, but failed',
+        }
+
+        # Ensure a DRMAA session exists and is initialized
+        self.ds = DrmaaSessionFactory().get()
+
+        self.userid = None
+
+        self._init_monitor_thread()
+        self._init_worker_threads()
+
+    def url_to_destination(self, url):
+        """Convert a legacy URL to a job destination"""
+        if not url:
+            return
+        native_spec = url.split('/')[2]
+        if native_spec:
+            params = dict( nativeSpecification=native_spec )
+            log.debug( "Converted URL '%s' to destination runner=drmaa, params=%s" % ( url, params ) )
+            return JobDestination( runner='drmaa', params=params )
+        else:
+            log.debug( "Converted URL '%s' to destination runner=drmaa" % url )
+            return JobDestination( runner='drmaa' )
+
+    def get_native_spec( self, url ):
+        """Get any native DRM arguments specified by the site configuration"""
+        try:
+            return url.split('/')[2] or None
+        except:
+            return None
+
+    def queue_job( self, job_wrapper ):
+        """Create job script and submit it to the DRM"""
+        # prepare the job
+
+        # external_runJob_script can be None, in which case it's not used.
+        external_runjob_script = job_wrapper.get_destination_configuration("drmaa_external_runjob_script", None)
+
+        include_metadata = asbool( job_wrapper.job_destination.params.get( "embed_metadata_in_job", True) )
+        if not self.prepare_job( job_wrapper, include_metadata=include_metadata):
+            return
+
+        # get configured job destination
+        job_destination = job_wrapper.job_destination
+
+        # wrapper.get_id_tag() instead of job_id for compatibility with TaskWrappers.
+        galaxy_id_tag = job_wrapper.get_id_tag()
+
+        job_name = self._job_name(job_wrapper)
+        ajs = AsynchronousJobState( files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper, job_name=job_name )
+
+        # set up the drmaa job template
+        jt = dict(
+            remoteCommand=ajs.job_file,
+            jobName=ajs.job_name,
+            workingDirectory=job_wrapper.working_directory,
+            outputPath=":%s" % ajs.output_file,
+            errorPath=":%s" % ajs.error_file
+        )
+
+        # Avoid a jt.exitCodePath for now - it's only used when finishing.
+        native_spec = job_destination.params.get('nativeSpecification', None)
+        if native_spec is not None:
+            jt['nativeSpecification'] = native_spec
+
+        # fill in the DRM's job run template
+        script = self.get_job_file(job_wrapper, exit_code_path=ajs.exit_code_file)
+        try:
+            self.write_executable_script( ajs.job_file, script )
+        except:
+            job_wrapper.fail( "failure preparing job script", exception=True )
+            log.exception( "(%s) failure writing job script" % galaxy_id_tag )
+            return
+
+        # job was deleted while we were preparing it
+        if job_wrapper.get_state() == model.Job.states.DELETED:
+            log.debug( "(%s) Job deleted by user before it entered the queue" % galaxy_id_tag )
+            if job_wrapper.cleanup_job in ( "always", "onsuccess" ):
+                job_wrapper.cleanup()
+            return
+
+        log.debug( "(%s) submitting file %s", galaxy_id_tag, ajs.job_file )
+        if native_spec:
+            log.debug( "(%s) native specification is: %s", galaxy_id_tag, native_spec )
+
+        # runJob will raise if there's a submit problem
+        if external_runjob_script is None:
+            # TODO: create a queue for retrying submission indefinitely
+            # TODO: configurable max tries and sleep
+            trynum = 0
+            external_job_id = None
+            fail_msg = None
+            while external_job_id is None and trynum < 5:
+                try:
+                    external_job_id = self.ds.run_job(**jt)
+                    break
+                except ( drmaa.InternalException, drmaa.DeniedByDrmException ) as e:
+                    trynum += 1
+                    log.warning( '(%s) drmaa.Session.runJob() failed, will retry: %s', galaxy_id_tag, e )
+                    fail_msg = "Unable to run this job due to a cluster error, please retry it later"
+                    time.sleep( 5 )
+                except:
+                    log.exception( '(%s) drmaa.Session.runJob() failed unconditionally', galaxy_id_tag )
+                    trynum = 5
+            else:
+                log.error( "(%s) All attempts to submit job failed" % galaxy_id_tag )
+                if not fail_msg:
+                    fail_msg = DEFAULT_JOB_PUT_FAILURE_MESSAGE
+                job_wrapper.fail( fail_msg )
+                return
+        else:
+            job_wrapper.change_ownership_for_run()
+            # if user credentials are not available, use galaxy credentials (if permitted)
+            allow_guests = asbool(job_wrapper.job_destination.params.get( "allow_guests", False) )
+            pwent = job_wrapper.user_system_pwent
+            if pwent is None:
+                if not allow_guests:
+                    fail_msg = "User %s is not mapped to any real user, and not permitted to start jobs." % job_wrapper.user
+                    job_wrapper.fail( fail_msg )
+                    return
+                pwent = job_wrapper.galaxy_system_pwent
+            log.debug( '(%s) submitting with credentials: %s [uid: %s]' % ( galaxy_id_tag, pwent[0], pwent[2] ) )
+            filename = self.store_jobtemplate(job_wrapper, jt)
+            self.userid = pwent[2]
+            external_job_id = self.external_runjob(external_runjob_script, filename, pwent[2]).strip()
+        log.info( "(%s) queued as %s" % ( galaxy_id_tag, external_job_id ) )
+
+        # store runner information for tracking if Galaxy restarts
+        job_wrapper.set_job_destination( job_destination, external_job_id )
+
+        # Store DRM related state information for job
+        ajs.job_id = external_job_id
+        ajs.old_state = 'new'
+        ajs.job_destination = job_destination
+
+        # Add to our 'queue' of jobs to monitor
+        self.monitor_queue.put( ajs )
+
+    def _complete_terminal_job( self, ajs, drmaa_state, **kwargs ):
+        """
+        Handle a job upon its termination in the DRM. This method is meant to
+        be overridden by subclasses to improve post-mortem and reporting of
+        failures.
+        Returns True if job was not actually terminal, None otherwise.
+        """
+        if drmaa_state == drmaa.JobState.FAILED:
+            if ajs.job_wrapper.get_state() != model.Job.states.DELETED:
+                ajs.stop_job = False
+                ajs.fail_message = "The cluster DRM system terminated this job"
+                self.work_queue.put( ( self.fail_job, ajs ) )
+        elif drmaa_state == drmaa.JobState.DONE:
+            # External metadata processing for external runjobs
+            external_metadata = not asbool( ajs.job_wrapper.job_destination.params.get( "embed_metadata_in_job", True) )
+            if external_metadata:
+                self._handle_metadata_externally( ajs.job_wrapper, resolve_requirements=True )
+            if ajs.job_wrapper.get_state() != model.Job.states.DELETED:
+                self.work_queue.put( ( self.finish_job, ajs ) )
+
+    def check_watched_items( self ):
+        """
+        Called by the monitor thread to look at each watched job and deal
+        with state changes.
+        """
+        new_watched = []
+        for ajs in self.watched:
+            external_job_id = ajs.job_id
+            galaxy_id_tag = ajs.job_wrapper.get_id_tag()
+            old_state = ajs.old_state
+            try:
+                assert external_job_id not in ( None, 'None' ), '(%s/%s) Invalid job id' % ( galaxy_id_tag, external_job_id )
+                state = self.ds.job_status( external_job_id )
+                # Reset exception retries
+                for retry_exception in RETRY_EXCEPTIONS_LOWER:
+                    setattr( ajs, retry_exception + '_retries', 0)
+            except ( drmaa.InternalException, drmaa.InvalidJobException ) as e:
+                ecn = type(e).__name__
+                retry_param = ecn.lower() + '_retries'
+                state_param = ecn.lower() + '_state'
+                retries = getattr( ajs, retry_param, 0 )
+                log.warning("(%s/%s) unable to check job status because of %s exception for %d consecutive tries: %s", galaxy_id_tag, external_job_id, ecn, retries + 1, e)
+                if self.runner_params[ retry_param ] > 0:
+                    if retries < self.runner_params[ retry_param ]:
+                        # will retry check on next iteration
+                        setattr( ajs, retry_param, retries + 1 )
+                        new_watched.append( ajs )
+                        continue
+                if self.runner_params[ state_param ] == model.Job.states.OK:
+                    log.warning( "(%s/%s) job will now be finished OK", galaxy_id_tag, external_job_id )
+                    self.work_queue.put( ( self.finish_job, ajs ) )
+                elif self.runner_params[ state_param ] == model.Job.states.ERROR:
+                    log.warning( "(%s/%s) job will now be errored", galaxy_id_tag, external_job_id )
+                    self.work_queue.put( ( self.fail_job, ajs ) )
+                else:
+                    raise Exception( "%s is set to an invalid value (%s), this should not be possible. See galaxy.jobs.drmaa.__init__()", state_param, self.runner_params[ state_param ] )
+                continue
+            except drmaa.DrmCommunicationException as e:
+                log.warning( "(%s/%s) unable to communicate with DRM: %s", galaxy_id_tag, external_job_id, e )
+                new_watched.append( ajs )
+                continue
+            except Exception as e:
+                # so we don't kill the monitor thread
+                log.exception( "(%s/%s) unable to check job status: %s" % ( galaxy_id_tag, external_job_id, e ) )
+                log.warning( "(%s/%s) job will now be errored" % ( galaxy_id_tag, external_job_id ) )
+                ajs.fail_message = "Cluster could not complete job"
+                self.work_queue.put( ( self.fail_job, ajs ) )
+                continue
+            if state != old_state:
+                log.debug( "(%s/%s) state change: %s" % ( galaxy_id_tag, external_job_id, self.drmaa_job_state_strings[state] ) )
+            if state == drmaa.JobState.RUNNING and not ajs.running:
+                ajs.running = True
+                ajs.job_wrapper.change_state( model.Job.states.RUNNING )
+            if state in ( drmaa.JobState.FAILED, drmaa.JobState.DONE ):
+                if self._complete_terminal_job( ajs, drmaa_state=state ) is not None:
+                    # job was not actually terminal
+                    state = ajs.old_state
+                else:
+                    continue
+            if ajs.check_limits():
+                self.work_queue.put( ( self.fail_job, ajs ) )
+                continue
+            ajs.old_state = state
+            new_watched.append( ajs )
+        # Replace the watch list with the updated version
+        self.watched = new_watched
+
+    def stop_job( self, job ):
+        """Attempts to delete a job from the DRM queue"""
+        try:
+            ext_id = job.get_job_runner_external_id()
+            assert ext_id not in ( None, 'None' ), 'External job id is None'
+            kill_script = job.get_destination_configuration(self.app.config, "drmaa_external_killjob_script", None)
+            if kill_script is None:
+                self.ds.kill( ext_id )
+            else:
+                # FIXME: hardcoded path
+                subprocess.Popen( [ '/usr/bin/sudo', '-E', kill_script, str( ext_id ), str( self.userid ) ], shell=False )
+            log.debug( "(%s/%s) Removed from DRM queue at user's request" % ( job.get_id(), ext_id ) )
+        except drmaa.InvalidJobException:
+            log.debug( "(%s/%s) User killed running job, but it was already dead" % ( job.get_id(), ext_id ) )
+        except Exception as e:
+            log.debug( "(%s/%s) User killed running job, but error encountered removing from DRM queue: %s" % ( job.get_id(), ext_id, e ) )
+
+    def recover( self, job, job_wrapper ):
+        """Recovers jobs stuck in the queued/running state when Galaxy started"""
+        job_id = job.get_job_runner_external_id()
+        if job_id is None:
+            self.put( job_wrapper )
+            return
+        ajs = AsynchronousJobState( files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper )
+        ajs.job_id = str( job_id )
+        ajs.command_line = job.get_command_line()
+        ajs.job_wrapper = job_wrapper
+        ajs.job_destination = job_wrapper.job_destination
+        if job.state == model.Job.states.RUNNING:
+            log.debug( "(%s/%s) is still in running state, adding to the DRM queue" % ( job.get_id(), job.get_job_runner_external_id() ) )
+            ajs.old_state = drmaa.JobState.RUNNING
+            ajs.running = True
+            self.monitor_queue.put( ajs )
+        elif job.get_state() == model.Job.states.QUEUED:
+            log.debug( "(%s/%s) is still in DRM queued state, adding to the DRM queue" % ( job.get_id(), job.get_job_runner_external_id() ) )
+            ajs.old_state = drmaa.JobState.QUEUED_ACTIVE
+            ajs.running = False
+            self.monitor_queue.put( ajs )
+
+    def store_jobtemplate(self, job_wrapper, jt):
+        """ Stores the content of a DRMAA JobTemplate object in a file as a JSON string.
+        Path is hard-coded, but it's no worse than other path in this module.
+        Uses Galaxy's JobID, so file is expected to be unique."""
+        filename = "%s/%s.jt_json" % (self.app.config.cluster_files_directory, job_wrapper.get_id_tag())
+        with open(filename, 'w+') as fp:
+            json.dump(jt, fp)
+        log.debug( '(%s) Job script for external submission is: %s' % ( job_wrapper.job_id, filename ) )
+        return filename
+
+    def external_runjob(self, external_runjob_script, jobtemplate_filename, username):
+        """ runs an external script the will QSUB a new job.
+        The external script will be run with sudo, and will setuid() to the specified user.
+        Effectively, will QSUB as a different user (then the one used by Galaxy).
+        """
+        script_parts = external_runjob_script.split()
+        script = script_parts[0]
+        command = [ '/usr/bin/sudo', '-E', script]
+        for script_argument in script_parts[1:]:
+            command.append(script_argument)
+
+        command.extend( [ str(username), jobtemplate_filename ] )
+        log.info("Running command %s" % command)
+        p = subprocess.Popen(command,
+                             shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        (stdoutdata, stderrdata) = p.communicate()
+        exitcode = p.returncode
+        # os.unlink(jobtemplate_filename)
+        if exitcode != 0:
+            # There was an error in the child process
+            raise RuntimeError("External_runjob failed (exit code %s)\nChild process reported error:\n%s" % (str(exitcode), stderrdata))
+        if not stdoutdata.strip():
+            raise RuntimeError("External_runjob did return the job id: %s" % (stdoutdata))
+
+        # The expected output is a single line containing a single numeric value:
+        # the DRMAA job-ID. If not the case, will throw an error.
+        jobId = stdoutdata
+        return jobId
+
+    def _job_name(self, job_wrapper):
+        external_runjob_script = job_wrapper.get_destination_configuration("drmaa_external_runjob_script", None)
+        galaxy_id_tag = job_wrapper.get_id_tag()
+
+        # define job attributes
+        job_name = 'g%s' % galaxy_id_tag
+        if job_wrapper.tool.old_id:
+            job_name += '_%s' % job_wrapper.tool.old_id
+        if external_runjob_script is None:
+            job_name += '_%s' % job_wrapper.user
+        job_name = ''.join( x if x in ( string.letters + string.digits + '_' ) else '_' for x in job_name )
+        if self.restrict_job_name_length:
+            job_name = job_name[:self.restrict_job_name_length]
+        return job_name
diff --git a/lib/galaxy/jobs/runners/godocker.py b/lib/galaxy/jobs/runners/godocker.py
new file mode 100644
index 0000000..aad612c
--- /dev/null
+++ b/lib/galaxy/jobs/runners/godocker.py
@@ -0,0 +1,471 @@
+import json
+import logging
+import time
+from datetime import datetime
+
+import requests
+
+from galaxy import model
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+
+
+log = logging.getLogger(__name__)
+
+__all__ = ('GodockerJobRunner', )
+
+
+class Godocker(object):
+    """
+    API parameters
+    """
+    def __init__(self, server, login, apikey, noCert):
+        self.token = None
+        self.server = server
+        self.login = login
+        self.apikey = apikey
+        self.noCert = noCert
+
+    def setToken(self, token):
+        self.token = token
+
+    def http_post_request(self, query, data, header):
+        """ post request with query """
+
+        verify_ssl = not self.noCert
+        try:
+            url = self.server + query
+            res = requests.post(url, data, headers=header, verify=verify_ssl)
+
+        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
+            log.error('A transport error occurred in the GoDocker job runner:', e)
+            return False
+
+        return self.test_status_code(res)
+
+    def http_get_request(self, query, header):
+        """ get request with query, server and header required """
+
+        # remove warnings if using --no-certificate
+        requests.packages.urllib3.disable_warnings()
+        verify_ssl = not self.noCert
+        try:
+            url = self.server + query
+            res = requests.get(url, headers=header, verify=verify_ssl)
+
+        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
+            log.error('A communication error occurred in the GoDocker job runner:', e)
+            return False
+
+        return self.test_status_code(res)
+
+    def http_delete_request(self, query, header):
+        """ delete request with query, server and header required """
+
+        # remove warnings if using --no-certificate
+        requests.packages.urllib3.disable_warnings()
+        verify_ssl = not self.noCert
+        try:
+            url = self.server + query
+            res = requests.delete(url, headers=header, verify=verify_ssl)
+
+        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
+            log.error('A communication error occurred in the GoDocker job runner:', e)
+            return False
+
+        return self.test_status_code(res)
+
+    def http_put_request(self, query, data, header):
+        """ put request with query """
+
+        # remove warnings if using --no-certificate
+        requests.packages.urllib3.disable_warnings()
+        verify_ssl = not self.noCert
+        try:
+            url = self.server + query
+            res = requests.put(url, data, headers=header, verify=verify_ssl)
+
+        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e:
+            log.error('A communication error occurred in the GoDocker job runner:', e)
+            return False
+
+        return self.test_status_code(res)
+
+    def test_status_code(self, httpresult):
+        """ exit if status code is 401 or 403 or 404 or 200"""
+        if httpresult.status_code == 401:
+            log.debug('Unauthorized : this server could not verify that you are authorized to access the document you requested.')
+
+        elif httpresult.status_code == 403:
+            log.debug('Forbidden : Access was denied to this resource. Not authorized to access this resource.')
+
+        elif httpresult.status_code == 404:
+            log.debug('Not Found : The resource could not be found.')
+
+        elif httpresult.status_code == 200:
+            return httpresult
+
+        return False
+
+
+class GodockerJobRunner(AsynchronousJobRunner):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "GodockerJobRunner"
+
+    def __init__(self, app, nworkers, **kwargs):
+        """ 1: Get runner_param_specs from job_conf.xml
+            2: Initialise job runner parent object
+            3: Login to godocker and store the token
+            4: Start the worker and monitor threads
+        """
+        runner_param_specs = dict(godocker_master=dict(map=str), user=dict(map=str), key=dict(map=str), godocker_project=dict(map=str))
+        if 'runner_param_specs' not in kwargs:
+            kwargs['runner_param_specs'] = dict()
+        kwargs['runner_param_specs'].update(runner_param_specs)
+
+        # Start the job runner parent object
+        super(GodockerJobRunner, self).__init__(app, nworkers, **kwargs)
+
+        # godocker API login call
+        self.auth = self.login(self.runner_params["key"], self.runner_params["user"], self.runner_params["godocker_master"])
+
+        if not self.auth:
+            log.error("Authentication failure, GoDocker runner cannot be started")
+        else:
+            """ Following methods starts threads.
+                These methods invoke threading.Thread(name,target)
+                      which in turn invokes methods monitor() and run_next().
+            """
+            self._init_monitor_thread()
+            self._init_worker_threads()
+
+    def queue_job(self, job_wrapper):
+        """ Create job script and submit it to godocker """
+        if not self.prepare_job(job_wrapper, include_metadata=False, include_work_dir_outputs=True, modify_command_for_container=False):
+            return
+
+        job_destination = job_wrapper.job_destination
+        """ Submit job to godocker """
+        job_id = self.post_task(job_wrapper)
+        if not job_id:
+            log.error("Job creation failure.  No Response from GoDocker")
+            job_wrapper.fail("Not submitted")
+        else:
+            log.debug("Starting queue_job for job " + job_id)
+            # Create an object of AsynchronousJobState and add it to the monitor queue.
+            ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper, job_id=job_id, job_destination=job_destination)
+            self.monitor_queue.put(ajs)
+        return None
+
+    def check_watched_item(self, job_state):
+        """ Get the job current status from GoDocker
+                    using job_id and update the status in galaxy.
+            If the job execution is successful, call
+                    mark_as_finished() and return 'None' to galaxy.
+            else if the job failed, call mark_as_failed()
+                    and return 'None' to galaxy.
+            else if the job is running or in pending state, simply
+                    return the 'AsynchronousJobState object' (job_state).
+        """
+        ''' This function is called by check_watched_items() where
+                    param job_state is an object of AsynchronousJobState.
+            Expected return type of this function is None or
+                    AsynchronousJobState object with updated running status.
+        '''
+        """ Get task from GoDocker """
+        job_status_god = self.get_task(job_state.job_id)
+        log.debug("Job ID: " + str(job_state.job_id) + " Job Status: " + str(job_status_god['status']['primary']))
+
+        if job_status_god['status']['primary'] == "over":
+            job_state.running = False
+            job_state.job_wrapper.change_state(model.Job.states.OK)
+            if self.create_log_file(job_state, job_status_god):
+                self.mark_as_finished(job_state)
+            else:
+                self.mark_as_failed(job_state)
+            '''The function mark_as_finished() executes:
+                        self.work_queue.put((self.finish_job, job_state))
+           *self.finish_job ->
+            job_state.job_wrapper.finish( stdout, stderr, exit_code )
+            job_state.job_wrapper.reclaim_ownership()
+            job_state.cleanup()
+           *self.work_queue.put( method , arg ) ->
+            The run_next() method starts execution on starting worker threads.
+            This run_next() method executes method(arg)
+                        by using self.work_queue.get()
+           *Possible outcomes of finish_job(job_state) ->
+            job_state.job_wrapper.finish( stdout, stderr, exit_code )
+            job_state.job_wrapper.fail( "Unable to finish job", exception=True)
+           *Similar workflow is done for mark_as_failed() method.
+            '''
+            return None
+
+        elif job_status_god['status']['primary'] == "running":
+            job_state.running = True
+            job_state.job_wrapper.change_state(model.Job.states.RUNNING)
+            return job_state
+
+        elif job_status_god['status']['primary'] == "pending":
+            return job_state
+
+        elif job_status_god['status']['exitcode'] not in [None, 0]:
+            job_state.running = False
+            job_state.job_wrapper.change_state(model.Job.states.ERROR)
+            self.create_log_file(job_state, job_status_god)
+            self.mark_as_failed(job_state)
+            return None
+
+        else:
+            job_state.running = False
+            self.create_log_file(job_state, job_status_god)
+            self.mark_as_failed(job_state)
+            return None
+
+    def stop_job(self, job):
+        """ Attempts to delete a dispatched executing Job in GoDocker """
+        '''This function is called by fail_job()
+           where param job = self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id )
+           No Return data expected
+        '''
+        log.debug("STOP JOB EXECUTION OF JOB ID: " + str(job.id))
+        # Get task status from GoDocker.
+        job_status_god = self.get_task_status(job.id)
+        if job_status_god['status']['primary'] != "over":
+            # Initiate a delete call,if the job is running in GoDocker.
+            self.delete_task(job.id)
+        return None
+
+    def recover(self, job, job_wrapper):
+        """ Recovers jobs stuck in the queued/running state when Galaxy started """
+        """ This method is called by galaxy at the time of startup.
+            Jobs in Running & Queued status in galaxy are put in the monitor_queue by creating an AsynchronousJobState object
+        """
+        job_id = job_wrapper.job_id
+        ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper)
+        ajs.job_id = str(job_id)
+        ajs.job_destination = job_wrapper.job_destination
+        job_wrapper.command_line = job.command_line
+        ajs.job_wrapper = job_wrapper
+        if job.state == model.Job.states.RUNNING:
+            log.debug("(%s/%s) is still in running state, adding to the god queue" % (job.id, job.get_job_runner_external_id()))
+            ajs.old_state = 'R'
+            ajs.running = True
+            self.monitor_queue.put(ajs)
+
+        elif job.state == model.Job.states.QUEUED:
+            log.debug("(%s/%s) is still in god queued state, adding to the god queue" % (job.id, job.get_job_runner_external_id()))
+            ajs.old_state = 'Q'
+            ajs.running = False
+            self.monitor_queue.put(ajs)
+
+    # Helper functions
+
+    def create_log_file(self, job_state, job_status_god):
+        """ Create log files in galaxy, namely error_file, output_file, exit_code_file
+            Return true, if all the file creations are successful
+        """
+        path = None
+        for vol in job_status_god['container']['volumes']:
+            if vol['name'] == "go-docker":
+                path = str(vol['path'])
+        if path:
+            god_output_file = path + "/god.log"
+            god_error_file = path + "/god.err"
+            try:
+                # Read from GoDocker output_file and write it into galaxy output_file.
+                f = open(god_output_file, "r")
+                out_log = f.read()
+                log_file = open(job_state.output_file, "w")
+                log_file.write(out_log)
+                log_file.close()
+                f.close()
+                # Read from GoDocker error_file and write it into galaxy error_file.
+                f = open(god_error_file, "r")
+                out_log = f.read()
+                log_file = open(job_state.error_file, "w")
+                log_file.write(out_log)
+                log_file.close()
+                f.close()
+                # Read from GoDocker exit_code and write it into galaxy exit_code_file.
+                out_log = str(job_status_god['status']['exitcode'])
+                log_file = open(job_state.exit_code_file, "w")
+                log_file.write(out_log)
+                log_file.close()
+                f.close()
+                log.debug("CREATE OUTPUT FILE: " + str(job_state.output_file))
+                log.debug("CREATE ERROR FILE: " + str(job_state.error_file))
+                log.debug("CREATE EXIT CODE FILE: " + str(job_state.exit_code_file))
+            except IOError as e:
+                log.error('Could not access task log file %s' % str(e))
+                log.debug("IO Error occurred when accessing the files.")
+                return False
+        return True
+
+    # GoDocker API helper functions
+
+    def login(self, apikey, login, server, noCert=False):
+        """ Login to GoDocker and return the token
+            Create Login model schema of GoDocker and call the http_post_request method.
+        """
+        log.debug("LOGIN TASK TO BE EXECUTED \n")
+        log.debug("GODOCKER LOGIN: " + str(login))
+        data = json.dumps({'user': login, 'apikey': apikey})
+        # Create object of Godocker class
+        g_auth = Godocker(server, login, apikey, noCert)
+        auth = g_auth.http_post_request("/api/1.0/authenticate", data, {'Content-type': 'application/json', 'Accept': 'application/json'})
+        if not auth:
+            log.error("GoDocker authentication Error.")
+        else:
+            log.debug("GoDocker authentication successful.")
+            token = auth.json()['token']
+            g_auth.setToken(token)
+        # Return the object of Godocker class
+        return g_auth
+
+    def post_task(self, job_wrapper):
+        """ Sumbit job to GoDocker and return jobid
+            Create Job model schema of GoDocker and call the http_post_request method.
+        """
+        # Get the params from <destination> tag in job_conf by using job_destination.params[param]
+        if self.auth.token:
+            job_destination = job_wrapper.job_destination
+            try:
+                docker_cpu = int(job_destination.params["docker_cpu"])
+            except:
+                docker_cpu = 1
+            try:
+                docker_ram = int(job_destination.params["docker_memory"])
+            except:
+                docker_ram = 1
+            try:
+                docker_image = self._find_container(job_wrapper).container_id
+                log.debug("GoDocker runner using container %s.", docker_image)
+            except:
+                log.error("Unable to find docker_image for job %s, failing." % job_wrapper.job_id)
+                return False
+
+            volumes = []
+            labels = []
+            tags_tab = ['galaxy', job_wrapper.tool.id]
+            tasks_depends = []
+            name = job_wrapper.tool.name
+            description = "galaxy job"
+            array = None
+            project = None
+            try:
+                project = str(self.runner_params["godocker_project"])
+            except KeyError:
+                log.debug("godocker_project not defined, using default.")
+            try:
+                volume = job_destination.params["godocker_volumes"]
+                volume = volume.split(",")
+                for i in volume:
+                    temp = dict({"name": i})
+                    volumes.append(temp)
+            except:
+                log.debug("godocker_volume not set, using default.")
+
+            dt = datetime.now()
+            # Enable galaxy venv in the docker containers
+            try:
+                if(job_destination.params["virtualenv"] == "true"):
+                    GALAXY_VENV_TEMPLATE = """GALAXY_VIRTUAL_ENV="%s"; if [ "$GALAXY_VIRTUAL_ENV" != "None" -a -z "$VIRTUAL_ENV" -a -f "$GALAXY_VIRTUAL_ENV/bin/activate" ]; then . "$GALAXY_VIRTUAL_ENV/bin/activate"; fi;"""
+                    venv = GALAXY_VENV_TEMPLATE % job_wrapper.galaxy_virtual_env
+                    command = "#!/bin/bash\n" + "cd " + job_wrapper.working_directory + "\n" + venv + "\n" + job_wrapper.runner_command_line
+                else:
+                    command = "#!/bin/bash\n" + "cd " + job_wrapper.working_directory + "\n" + job_wrapper.runner_command_line
+            except:
+                command = "#!/bin/bash\n" + "cd " + job_wrapper.working_directory + "\n" + job_wrapper.runner_command_line
+
+            # GoDocker Job model schema
+            job = {
+                'date': time.mktime(dt.timetuple()),
+                'meta': {
+                    'name': name,
+                    'description': description,
+                    'tags': tags_tab
+                },
+                'requirements': {
+                    'cpu': docker_cpu,
+                    'ram': docker_ram,
+                    'array': {'values': array},
+                    'label': labels,
+                    'tasks': tasks_depends,
+                    'tmpstorage': None
+                },
+                'container': {
+                    'image': str(docker_image),
+                    'volumes': volumes,
+                    'network': True,
+                    'id': None,
+                    'meta': None,
+                    'stats': None,
+                    'ports': [],
+                    'root': False
+                },
+                'command': {
+                    'interactive': False,
+                    'cmd': command,
+                },
+                'status': {
+                    'primary': None,
+                    'secondary': None
+                }
+            }
+            if project is not None:
+                job['user'] = {"project": project}
+
+            result = self.auth.http_post_request(
+                "/api/1.0/task", json.dumps(job),
+                {'Authorization': 'Bearer ' + self.auth.token, 'Content-type': 'application/json', 'Accept': 'application/json'}
+            )
+            # Return job_id
+            return str(result.json()['id'])
+
+    def get_task(self, job_id):
+        """ Get job details from GoDocker and return the job.
+            Pass job_id to the http_get_request method.
+        """
+        job = False
+        if self.auth.token:
+            result = self.auth.http_get_request("/api/1.0/task/" + str(job_id), {'Authorization': 'Bearer ' + self.auth.token})
+            job = result.json()
+        # Return the job
+        return job
+
+    def task_suspend(self, job_id):
+        """ Suspend actively running job in galaxy.
+            Pass job_id to the http_get_request method.
+        """
+        job = False
+        if self.auth.token:
+            result = self.auth.http_get_request("/api/1.0/task/" + str(job_id) + "/suspend", {'Authorization': 'Bearer ' + self.auth.token})
+            job = result.json()
+        # Return the job
+        return job
+
+    def get_task_status(self, job_id):
+        """ Get job status from GoDocker and return the status of job.
+            Pass job_id to http_get_request method.
+        """
+        job = False
+        if self.auth.token:
+            result = self.auth.http_get_request("/api/1.0/task/" + str(job_id) + "/status", {'Authorization': 'Bearer ' + self.auth.token})
+            job = result.json()
+        # Return task status
+        return job
+
+    def delete_task(self, job_id):
+        """ Delete a suspended task in GoDocker.
+            Pass job_id to http_delete_request method.
+        """
+        job = False
+        if self.auth.token:
+            result = self.auth.http_delete_request("/api/1.0/task/" + str(job_id), {'Authorization': 'Bearer ' + self.auth.token})
+            job = result.json()
+        # Return the job
+        return job
diff --git a/lib/galaxy/jobs/runners/kubernetes.py b/lib/galaxy/jobs/runners/kubernetes.py
new file mode 100644
index 0000000..576086f
--- /dev/null
+++ b/lib/galaxy/jobs/runners/kubernetes.py
@@ -0,0 +1,369 @@
+"""
+Offload jobs to a Kubernetes cluster.
+"""
+
+import logging
+from os import environ as os_environ
+
+from six import text_type
+
+from galaxy import model
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+
+# pykube imports:
+try:
+    from pykube.config import KubeConfig
+    from pykube.http import HTTPClient
+    from pykube.objects import (
+        Job,
+        Pod
+    )
+except ImportError as exc:
+    KubeConfig = None
+    K8S_IMPORT_MESSAGE = ('The Python pykube package is required to use '
+                          'this feature, please install it or correct the '
+                          'following error:\nImportError %s' % str(exc))
+
+log = logging.getLogger(__name__)
+
+__all__ = ('KubernetesJobRunner', )
+
+
+class KubernetesJobRunner(AsynchronousJobRunner):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "KubernetesRunner"
+
+    def __init__(self, app, nworkers, **kwargs):
+        # Check if pykube was importable, fail if not
+        assert KubeConfig is not None, K8S_IMPORT_MESSAGE
+        runner_param_specs = dict(
+            k8s_config_path=dict(map=str, default=os_environ.get('KUBECONFIG', None)),
+            k8s_use_service_account=dict(map=bool, default=False),
+            k8s_persistent_volume_claim_name=dict(map=str),
+            k8s_persistent_volume_claim_mount_path=dict(map=str),
+            k8s_namespace=dict(map=str, default="default"),
+            k8s_pod_retrials=dict(map=int, valid=lambda x: int > 0, default=3))
+
+        if 'runner_param_specs' not in kwargs:
+            kwargs['runner_param_specs'] = dict()
+        kwargs['runner_param_specs'].update(runner_param_specs)
+
+        """Start the job runner parent object """
+        super(KubernetesJobRunner, self).__init__(app, nworkers, **kwargs)
+
+        # self.cli_interface = CliInterface()
+
+        if "k8s_use_service_account" in self.runner_params and self.runner_params["k8s_use_service_account"]:
+            self._pykube_api = HTTPClient(KubeConfig.from_service_account())
+        else:
+            self._pykube_api = HTTPClient(KubeConfig.from_file(self.runner_params["k8s_config_path"]))
+        self._galaxy_vol_name = "pvc-galaxy"  # TODO this needs to be read from params!!
+
+        self._init_monitor_thread()
+        self._init_worker_threads()
+
+    def queue_job(self, job_wrapper):
+        """Create job script and submit it to Kubernetes cluster"""
+        # prepare the job
+        # We currently don't need to include_metadata or include_work_dir_outputs, as working directory is the same
+        # were galaxy will expect results.
+        log.debug("Starting queue_job for job " + job_wrapper.get_id_tag())
+        if not self.prepare_job(job_wrapper, include_metadata=False, modify_command_for_container=False):
+            return
+
+        job_destination = job_wrapper.job_destination
+
+        # Construction of the Kubernetes Job object follows: http://kubernetes.io/docs/user-guide/persistent-volumes/
+        k8s_job_name = self.__produce_unique_k8s_job_name(job_wrapper.get_id_tag())
+        k8s_job_obj = {
+            "apiVersion": "extensions/v1beta1",
+            "kind": "Job",
+            "metadata":
+            # metadata.name is the name of the pod resource created, and must be unique
+            # http://kubernetes.io/docs/user-guide/configuring-containers/
+                {
+                    "name": k8s_job_name,
+                    "namespace": "default",  # TODO this should be set
+                    "labels": {"app": k8s_job_name},
+                }
+            ,
+            "spec": self.__get_k8s_job_spec(job_wrapper)
+        }
+
+        # Checks if job exists
+        job = Job(self._pykube_api, k8s_job_obj)
+        if job.exists():
+            job.delete()
+        # Creates the Kubernetes Job
+        # TODO if a job with that ID exists, what should we do?
+        # TODO do we trust that this is the same job and use that?
+        # TODO or create a new job as we cannot make sure
+        Job(self._pykube_api, k8s_job_obj).create()
+
+        # define job attributes in the AsyncronousJobState for follow-up
+        ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper,
+                                   job_id=k8s_job_name, job_destination=job_destination)
+        self.monitor_queue.put(ajs)
+
+        # external_runJob_script can be None, in which case it's not used.
+        external_runjob_script = None
+        return external_runjob_script
+
+    def __produce_unique_k8s_job_name(self, galaxy_internal_job_id):
+        # wrapper.get_id_tag() instead of job_id for compatibility with TaskWrappers.
+        return "galaxy-" + galaxy_internal_job_id
+
+    def __get_k8s_job_spec(self, job_wrapper):
+        """Creates the k8s Job spec. For a Job spec, the only requirement is to have a .spec.template."""
+        k8s_job_spec = {"template": self.__get_k8s_job_spec_template(job_wrapper)}
+        return k8s_job_spec
+
+    def __get_k8s_job_spec_template(self, job_wrapper):
+        """The k8s spec template is nothing but a Pod spec, except that it is nested and does not have an apiversion
+        nor kind. In addition to required fields for a Pod, a pod template in a job must specify appropriate labels
+        (see pod selector) and an appropriate restart policy."""
+        k8s_spec_template = {
+            "metadata": {
+                "labels": {"app": self.__produce_unique_k8s_job_name(job_wrapper.get_id_tag())}
+            },
+            "spec": {
+                "volumes": self.__get_k8s_mountable_volumes(job_wrapper),
+                "restartPolicy": self.__get_k8s_restart_policy(job_wrapper),
+                "containers": self.__get_k8s_containers(job_wrapper)
+            }
+        }
+        # TODO include other relevant elements that people might want to use from
+        # TODO http://kubernetes.io/docs/api-reference/v1/definitions/#_v1_podspec
+
+        return k8s_spec_template
+
+    def __get_k8s_restart_policy(self, job_wrapper):
+        """The default Kubernetes restart policy for Jobs"""
+        return "Never"
+
+    def __get_k8s_mountable_volumes(self, job_wrapper):
+        """Provides the required volumes that the containers in the pod should be able to mount. This should be using
+        the new persistent volumes and persistent volumes claim objects. This requires that both a PersistentVolume and
+        a PersistentVolumeClaim are created before starting galaxy (starting a k8s job).
+        """
+        # TODO on this initial version we only support a single volume to be mounted.
+        k8s_mountable_volume = {
+            "name": self._galaxy_vol_name,
+            "persistentVolumeClaim": {
+                "claimName": self.runner_params['k8s_persistent_volume_claim_name']
+            }
+        }
+        return [k8s_mountable_volume]
+
+    def __get_k8s_containers(self, job_wrapper):
+        """Fills in all required for setting up the docker containers to be used."""
+        k8s_container = {
+            "name": self.__get_k8s_container_name(job_wrapper),
+            "image": self._find_container(job_wrapper).container_id,
+            # this form of command overrides the entrypoint and allows multi command
+            # command line execution, separated by ;, which is what Galaxy does
+            # to assemble the command.
+            # TODO possibly shell needs to be set by job_wrapper
+            "command": ["/bin/bash", "-c", job_wrapper.runner_command_line],
+            "workingDir": job_wrapper.working_directory,
+            "volumeMounts": [{
+                "mountPath": self.runner_params['k8s_persistent_volume_claim_mount_path'],
+                "name": self._galaxy_vol_name
+            }]
+        }
+
+        # if self.__requires_ports(job_wrapper):
+        #    k8s_container['ports'] = self.__get_k8s_containers_ports(job_wrapper)
+
+        return [k8s_container]
+
+    # def __get_k8s_containers_ports(self, job_wrapper):
+
+    #    for k,v self.runner_params:
+    #        if k.startswith("container_port_"):
+
+    def __assemble_k8s_container_image_name(self, job_wrapper):
+        """Assembles the container image name as repo/owner/image:tag, where repo, owner and tag are optional"""
+        job_destination = job_wrapper.job_destination
+
+        # Determine the job's Kubernetes destination (context, namespace) and options from the job destination
+        # definition
+        repo = ""
+        owner = ""
+        if 'repo' in job_destination.params:
+            repo = job_destination.params['repo'] + "/"
+        if 'owner' in job_destination.params:
+            owner = job_destination.params['owner'] + "/"
+
+        k8s_cont_image = repo + owner + job_destination.params['image']
+
+        if 'tag' in job_destination.params:
+            k8s_cont_image += ":" + job_destination.params['tag']
+
+        return k8s_cont_image
+
+    def __get_k8s_container_name(self, job_wrapper):
+        # TODO check if this is correct
+        return job_wrapper.job_destination.id
+
+    def check_watched_item(self, job_state):
+        """Checks the state of a job already submitted on k8s. Job state is a AsynchronousJobState"""
+        jobs = Job.objects(self._pykube_api).filter(selector="app=" + job_state.job_id)
+        if len(jobs.response['items']) == 1:
+            job = Job(self._pykube_api, jobs.response['items'][0])
+            job_destination = job_state.job_wrapper.job_destination
+            succeeded = 0
+            active = 0
+            failed = 0
+
+            max_pod_retrials = 1
+            if 'k8s_pod_retrials' in self.runner_params:
+                max_pod_retrials = int(self.runner_params['k8s_pod_retrials'])
+            if 'max_pod_retrials' in job_destination.params:
+                max_pod_retrials = int(job_destination.params['max_pod_retrials'])
+
+            if 'succeeded' in job.obj['status']:
+                succeeded = job.obj['status']['succeeded']
+            if 'active' in job.obj['status']:
+                active = job.obj['status']['active']
+            if 'failed' in job.obj['status']:
+                failed = job.obj['status']['failed']
+
+            # This assumes jobs dependent on a single pod, single container
+            if succeeded > 0:
+                self.__produce_log_file(job_state)
+                error_file = open(job_state.error_file, 'w')
+                error_file.write("")
+                error_file.close()
+                job_state.running = False
+                self.mark_as_finished(job_state)
+                return None
+            elif active > 0 and failed <= max_pod_retrials:
+                job_state.running = True
+                return job_state
+            elif failed > max_pod_retrials:
+                self.__produce_log_file(job_state)
+                error_file = open(job_state.error_file, 'w')
+                error_file.write("Exceeded max number of Kubernetes pod retrials allowed for job\n")
+                error_file.close()
+                job_state.running = False
+                job_state.fail_message = "More pods failed than allowed. See stdout for pods details."
+                self.mark_as_failed(job_state)
+                job.scale(replicas=0)
+                return None
+
+            # We should not get here
+            log.debug(
+                "Reaching unexpected point for Kubernetes job, where it is not classified as succ., active nor failed.")
+            return job_state
+
+        elif len(jobs.response['items']) == 0:
+            # there is no job responding to this job_id, it is either lost or something happened.
+            log.error("No Jobs are available under expected selector app=" + job_state.job_id)
+            error_file = open(job_state.error_file, 'w')
+            error_file.write("No Kubernetes Jobs are available under expected selector app=" + job_state.job_id + "\n")
+            error_file.close()
+            self.mark_as_failed(job_state)
+            return job_state
+        else:
+            # there is more than one job associated to the expected unique job id used as selector.
+            log.error("There is more than one Kubernetes Job associated to job id " + job_state.job_id)
+            self.__produce_log_file(job_state)
+            error_file = open(job_state.error_file, 'w')
+            error_file.write("There is more than one Kubernetes Job associated to job id " + job_state.job_id + "\n")
+            error_file.close()
+            self.mark_as_failed(job_state)
+            return job_state
+
+    def fail_job(self, job_state):
+        """
+        Kubernetes runner overrides fail_job (called by mark_as_failed) to rescue the pod's log files which are left as
+        stdout (pods logs are the natural stdout and stderr of the running processes inside the pods) and are
+        deleted in the parent implementation as part of the failing the job process.
+
+        :param job_state:
+        :return:
+        """
+
+        # First we rescue the pods logs
+        with open(job_state.output_file, 'r') as outfile:
+            stdout_content = outfile.read()
+
+        if getattr(job_state, 'stop_job', True):
+            self.stop_job(self.sa_session.query(self.app.model.Job).get(job_state.job_wrapper.job_id))
+        self._handle_runner_state('failure', job_state)
+        # Not convinced this is the best way to indicate this state, but
+        # something necessary
+        if not job_state.runner_state_handled:
+            job_state.job_wrapper.fail(
+                message=getattr(job_state, 'fail_message', 'Job failed'),
+                stdout=stdout_content, stderr='See stdout for pod\'s stderr.'
+            )
+            if job_state.job_wrapper.cleanup_job == "always":
+                job_state.cleanup()
+
+    def __produce_log_file(self, job_state):
+        pod_r = Pod.objects(self._pykube_api).filter(selector="app=" + job_state.job_id)
+        logs = ""
+        for pod_obj in pod_r.response['items']:
+            try:
+                pod = Pod(self._pykube_api, pod_obj)
+                logs += "\n\n==== Pod " + pod.name + " log start ====\n\n"
+                logs += pod.logs(timestamps=True)
+                logs += "\n\n==== Pod " + pod.name + " log end   ===="
+            except Exception as detail:
+                log.info("Could not write pod\'s " + pod_obj['metadata']['name'] +
+                         " log file due to HTTPError " + str(detail))
+
+        logs_file_path = job_state.output_file
+        logs_file = open(logs_file_path, mode="w")
+        if isinstance(logs, text_type):
+            logs = logs.encode('utf8')
+        logs_file.write(logs)
+        logs_file.close()
+        return logs_file_path
+
+    def stop_job(self, job):
+        """Attempts to delete a dispatched job to the k8s cluster"""
+        try:
+            jobs = Job.objects(self._pykube_api).filter(selector="app=" +
+                                                                 self.__produce_unique_k8s_job_name(job.get_id_tag()))
+            if len(jobs.response['items']) >= 0:
+                job_to_delete = Job(self._pykube_api, jobs.response['items'][0])
+                job_to_delete.scale(replicas=0)
+            # TODO assert whether job parallelism == 0
+            # assert not job_to_delete.exists(), "Could not delete job,"+job.job_runner_external_id+" it still exists"
+            log.debug("(%s/%s) Terminated at user's request" % (job.id, job.job_runner_external_id))
+        except Exception as e:
+            log.debug("(%s/%s) User killed running job, but error encountered during termination: %s" % (
+                job.id, job.job_runner_external_id, e))
+
+    def recover(self, job, job_wrapper):
+        """Recovers jobs stuck in the queued/running state when Galaxy started"""
+        # TODO this needs to be implemented to override unimplemented base method
+        job_id = job.get_job_runner_external_id()
+        if job_id is None:
+            self.put(job_wrapper)
+            return
+        ajs = AsynchronousJobState(files_dir=job_wrapper.working_directory, job_wrapper=job_wrapper)
+        ajs.job_id = str(job_id)
+        ajs.command_line = job.command_line
+        ajs.job_wrapper = job_wrapper
+        ajs.job_destination = job_wrapper.job_destination
+        if job.state == model.Job.states.RUNNING:
+            log.debug("(%s/%s) is still in running state, adding to the runner monitor queue" % (
+                job.id, job.job_runner_external_id))
+            ajs.old_state = model.Job.states.RUNNING
+            ajs.running = True
+            self.monitor_queue.put(ajs)
+        elif job.state == model.Job.states.QUEUED:
+            log.debug("(%s/%s) is still in queued state, adding to the runner monitor queue" % (
+                job.id, job.job_runner_external_id))
+            ajs.old_state = model.Job.states.QUEUED
+            ajs.running = False
+            self.monitor_queue.put(ajs)
diff --git a/lib/galaxy/jobs/runners/local.py b/lib/galaxy/jobs/runners/local.py
new file mode 100644
index 0000000..27fecd6
--- /dev/null
+++ b/lib/galaxy/jobs/runners/local.py
@@ -0,0 +1,207 @@
+"""
+Job runner plugin for executing jobs on the local system via the command line.
+"""
+import datetime
+import errno
+import logging
+import os
+import subprocess
+import tempfile
+from time import sleep
+
+from galaxy import model
+from galaxy.util import (
+    asbool,
+    DATABASE_MAX_STRING_SIZE,
+    shrink_stream_by_size
+)
+
+from ..runners import (
+    BaseJobRunner,
+    JobState
+)
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'LocalJobRunner', )
+
+DEFAULT_POOL_SLEEP_TIME = 1
+# TODO: Set to false and just get rid of this option. It would simplify this
+# class nicely. -John
+DEFAULT_EMBED_METADATA_IN_JOB = True
+
+
+class LocalJobRunner( BaseJobRunner ):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "LocalRunner"
+
+    def __init__( self, app, nworkers ):
+        """Start the job runner """
+
+        # create a local copy of os.environ to use as env for subprocess.Popen
+        self._environ = os.environ.copy()
+
+        # Set TEMP if a valid temp value is not already set
+        if not ( 'TMPDIR' in self._environ or 'TEMP' in self._environ or 'TMP' in self._environ ):
+            self._environ[ 'TEMP' ] = os.path.abspath(tempfile.gettempdir())
+
+        super( LocalJobRunner, self ).__init__( app, nworkers )
+        self._init_worker_threads()
+
+    def __command_line( self, job_wrapper ):
+        """
+        """
+        command_line = job_wrapper.runner_command_line
+
+        # slots would be cleaner name, but don't want deployers to see examples and think it
+        # is going to work with other job runners.
+        slots = job_wrapper.job_destination.params.get( "local_slots", None ) or os.environ.get("GALAXY_SLOTS", None)
+        if slots:
+            slots_statement = 'GALAXY_SLOTS="%d"; export GALAXY_SLOTS; GALAXY_SLOTS_CONFIGURED="1"; export GALAXY_SLOTS_CONFIGURED;' % ( int( slots ) )
+        else:
+            slots_statement = 'GALAXY_SLOTS="1"; export GALAXY_SLOTS;'
+
+        job_id = job_wrapper.get_id_tag()
+        job_file = JobState.default_job_file( job_wrapper.working_directory, job_id )
+        exit_code_path = JobState.default_exit_code_file( job_wrapper.working_directory, job_id )
+        job_script_props = {
+            'slots_statement': slots_statement,
+            'command': command_line,
+            'exit_code_path': exit_code_path,
+            'working_directory': job_wrapper.working_directory,
+        }
+        job_file_contents = self.get_job_file( job_wrapper, **job_script_props )
+        self.write_executable_script( job_file, job_file_contents )
+        return job_file, exit_code_path
+
+    def queue_job( self, job_wrapper ):
+        # prepare the job
+        include_metadata = asbool( job_wrapper.job_destination.params.get( "embed_metadata_in_job", DEFAULT_EMBED_METADATA_IN_JOB ) )
+        if not self.prepare_job( job_wrapper, include_metadata=include_metadata ):
+            return
+
+        stderr = stdout = ''
+        exit_code = 0
+
+        # command line has been added to the wrapper by prepare_job()
+        command_line, exit_code_path = self.__command_line( job_wrapper )
+        job_id = job_wrapper.get_id_tag()
+
+        try:
+            stdout_file = tempfile.NamedTemporaryFile( suffix='_stdout', dir=job_wrapper.working_directory )
+            stderr_file = tempfile.NamedTemporaryFile( suffix='_stderr', dir=job_wrapper.working_directory )
+            log.debug( '(%s) executing job script: %s' % ( job_id, command_line ) )
+            proc = subprocess.Popen( args=command_line,
+                                     shell=True,
+                                     cwd=job_wrapper.working_directory,
+                                     stdout=stdout_file,
+                                     stderr=stderr_file,
+                                     env=self._environ,
+                                     preexec_fn=os.setpgrp )
+            job_wrapper.set_job_destination(job_wrapper.job_destination, proc.pid)
+            job_wrapper.change_state( model.Job.states.RUNNING )
+
+            terminated = self.__poll_if_needed( proc, job_wrapper, job_id )
+            if terminated:
+                return
+
+            # Reap the process and get the exit code.
+            exit_code = proc.wait()
+            try:
+                exit_code = int( open( exit_code_path, 'r' ).read() )
+            except Exception:
+                log.warning( "Failed to read exit code from path %s" % exit_code_path )
+                pass
+            stdout_file.seek( 0 )
+            stderr_file.seek( 0 )
+            stdout = shrink_stream_by_size( stdout_file, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+            stderr = shrink_stream_by_size( stderr_file, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+            stdout_file.close()
+            stderr_file.close()
+            log.debug('execution finished: %s' % command_line)
+        except Exception:
+            log.exception("failure running job %d" % job_wrapper.job_id)
+            job_wrapper.fail( "failure running job", exception=True )
+            return
+        external_metadata = not asbool( job_wrapper.job_destination.params.get( "embed_metadata_in_job", DEFAULT_EMBED_METADATA_IN_JOB ) )
+        if external_metadata:
+            self._handle_metadata_externally( job_wrapper, resolve_requirements=True )
+        # Finish the job!
+        try:
+            job_wrapper.finish( stdout, stderr, exit_code )
+        except:
+            log.exception("Job wrapper finish method failed")
+            job_wrapper.fail("Unable to finish job", exception=True)
+
+    def stop_job( self, job ):
+        # if our local job has JobExternalOutputMetadata associated, then our primary job has to have already finished
+        job_ext_output_metadata = job.get_external_output_metadata()
+        try:
+            pid = job_ext_output_metadata[0].job_runner_external_pid  # every JobExternalOutputMetadata has a pid set, we just need to take from one of them
+            assert pid not in [ None, '' ]
+        except Exception:
+            # metadata internal or job not complete yet
+            pid = job.get_job_runner_external_id()
+        if pid in [ None, '' ]:
+            log.warning( "stop_job(): %s: no PID in database for job, unable to stop" % job.get_id() )
+            return
+        pid = int( pid )
+        if not self._check_pid( pid ):
+            log.warning( "stop_job(): %s: PID %d was already dead or can't be signaled" % ( job.get_id(), pid ) )
+            return
+        for sig in [ 15, 9 ]:
+            try:
+                os.killpg( pid, sig )
+            except OSError as e:
+                log.warning( "stop_job(): %s: Got errno %s when attempting to signal %d to PID %d: %s" % ( job.get_id(), errno.errorcode[e.errno], sig, pid, e.strerror ) )
+                return  # give up
+            sleep( 2 )
+            if not self._check_pid( pid ):
+                log.debug( "stop_job(): %s: PID %d successfully killed with signal %d" % ( job.get_id(), pid, sig ) )
+                return
+        else:
+            log.warning( "stop_job(): %s: PID %d refuses to die after signaling TERM/KILL" % ( job.get_id(), pid ) )
+
+    def recover( self, job, job_wrapper ):
+        # local jobs can't be recovered
+        job_wrapper.change_state( model.Job.states.ERROR, info="This job was killed when Galaxy was restarted.  Please retry the job." )
+
+    def _check_pid( self, pid ):
+        try:
+            os.kill( pid, 0 )
+            return True
+        except OSError as e:
+            if e.errno == errno.ESRCH:
+                log.debug( "_check_pid(): PID %d is dead" % pid )
+            else:
+                log.warning( "_check_pid(): Got errno %s when attempting to check PID %d: %s" % ( errno.errorcode[e.errno], pid, e.strerror ) )
+            return False
+
+    def _terminate( self, proc ):
+        os.killpg( proc.pid, 15 )
+        sleep( 1 )
+        if proc.poll() is None:
+            os.killpg( proc.pid, 9 )
+        return proc.wait()  # reap
+
+    def __poll_if_needed( self, proc, job_wrapper, job_id ):
+        # Only poll if needed (i.e. job limits are set)
+        if not job_wrapper.has_limits():
+            return
+
+        job_start = datetime.datetime.now()
+        i = 0
+        # Iterate until the process exits, periodically checking its limits
+        while proc.poll() is None:
+            i += 1
+            if (i % 20) == 0:
+                limit_state = job_wrapper.check_limits(runtime=datetime.datetime.now() - job_start)
+                if limit_state is not None:
+                    job_wrapper.fail(limit_state[1])
+                    log.debug('(%s) Terminating process group' % job_id)
+                    self._terminate(proc)
+                    return True
+            else:
+                sleep( DEFAULT_POOL_SLEEP_TIME )
diff --git a/lib/galaxy/jobs/runners/pbs.py b/lib/galaxy/jobs/runners/pbs.py
new file mode 100644
index 0000000..5fe1af7
--- /dev/null
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -0,0 +1,558 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import time
+import traceback
+from datetime import timedelta
+
+try:
+    import pbs
+    PBS_IMPORT_MESSAGE = None
+except ImportError as exc:
+    pbs = None
+    PBS_IMPORT_MESSAGE = ('The Python pbs-python package is required to use '
+                          'this feature, please install it or correct the '
+                          'following error:\nImportError %s' % str(exc))
+
+from galaxy import (
+    model,
+    util
+)
+from galaxy.jobs import JobDestination
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'PBSJobRunner', )
+
+CLUSTER_ERROR_MESSAGE = "Job cannot be completed due to a cluster error, please retry it later: %s"
+
+# The last two lines execute the command and then retrieve the command's
+# exit code ($?) and write it to a file.
+pbs_symlink_template = """
+for dataset in %s; do
+    dir=`dirname $dataset`
+    file=`basename $dataset`
+    [ ! -d $dir ] && mkdir -p $dir
+    [ ! -e $dataset ] && ln -s %s/$file $dataset
+done
+mkdir -p %s
+"""
+
+PBS_ARGMAP = {
+    'destination'           : '-q',
+    'Execution_Time'        : '-a',
+    'Account_Name'          : '-A',
+    'Checkpoint'            : '-c',
+    'Error_Path'            : '-e',
+    'Group_List'            : '-g',
+    'Hold_Types'            : '-h',
+    'Join_Paths'            : '-j',
+    'Keep_Files'            : '-k',
+    'Resource_List'         : '-l',
+    'Mail_Points'           : '-m',
+    'Mail_Users'            : '-M',
+    'Job_Name'              : '-N',
+    'Output_Path'           : '-o',
+    'Priority'              : '-p',
+    'Rerunable'             : '-r',
+    'Shell_Path_List'       : '-S',
+    'job_array_request'     : '-t',
+    'User_List'             : '-u',
+    'Variable_List'         : '-v',
+}
+
+# From pbs' pbs_job.h
+JOB_EXIT_STATUS = {
+    0: "job exec successful",
+    -1: "job exec failed, before files, no retry",
+    -2: "job exec failed, after files, no retry",
+    -3: "job execution failed, do retry",
+    -4: "job aborted on MOM initialization",
+    -5: "job aborted on MOM init, chkpt, no migrate",
+    -6: "job aborted on MOM init, chkpt, ok migrate",
+    -7: "job restart failed",
+    -8: "exec() of user command failed",
+    -9: "could not create/open stdout stderr files",
+    -10: "job exceeded a memory limit",
+    -11: "job exceeded a walltime limit",
+    -12: "job exceeded a cpu time limit",
+}
+
+
+class PBSJobRunner( AsynchronousJobRunner ):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "PBSRunner"
+
+    def __init__( self, app, nworkers ):
+        """Start the job runner """
+        # Check if PBS was importable, fail if not
+        assert pbs is not None, PBS_IMPORT_MESSAGE
+        if app.config.pbs_application_server and app.config.outputs_to_working_directory:
+            raise Exception( "pbs_application_server (file staging) and outputs_to_working_directory options are mutually exclusive" )
+
+        # Set the default server during startup
+        self.__default_pbs_server = None
+        self.default_pbs_server     # this is a method with a property decorator, so this causes the default server to be set
+
+        # Proceed with general initialization
+        super( PBSJobRunner, self ).__init__( app, nworkers )
+        self._init_monitor_thread()
+        self._init_worker_threads()
+
+    @property
+    def default_pbs_server(self):
+        if self.__default_pbs_server is None:
+            self.__default_pbs_server = pbs.pbs_default()
+            log.debug( "Set default PBS server to %s" % self.default_pbs_server )
+        return self.__default_pbs_server
+
+    def url_to_destination(self, url):
+        """Convert a legacy URL to a job destination"""
+
+        if not url:
+            return
+
+        # Determine the the PBS server
+        url_split = url.split("/")
+        server = url_split[2]
+        if server == '':
+            server = self.default_pbs_server
+        if server is None:
+            raise Exception( "Could not find TORQUE server" )
+
+        # Determine the queue, set the PBS destination (not the same thing as a Galaxy job destination)
+        pbs_destination = '@%s' % server
+        pbs_queue = url_split[3] or None
+        if pbs_queue is not None:
+            pbs_destination = '%s%s' % (pbs_queue, pbs_destination)
+
+        params = dict(destination=pbs_destination)
+
+        # Determine the args (long-format args were never supported in URLs so they are not supported here)
+        try:
+            opts = url.split('/')[4].strip().lstrip('-').split(' -')
+            assert opts != ['']
+            # stripping the - comes later (in parse_destination_params)
+            for i, opt in enumerate(opts):
+                opts[i] = '-' + opt
+        except:
+            opts = []
+        for opt in opts:
+            param, value = opt.split( None, 1 )
+            params[param] = value
+
+        log.debug("Converted URL '%s' to destination runner=pbs, params=%s" % (url, params))
+
+        # Create a dynamic JobDestination
+        return JobDestination(runner='pbs', params=params)
+
+    def parse_destination_params(self, params):
+        """A wrapper method around __args_to_attrs() that allow administrators to define PBS
+        params as either command-line options (as in ``qsub(1B)``) or more human-readable "long"
+        args (as in ``pbs_submit(3B)``).
+
+        :returns: list of dicts -- The dicts map directly to pbs attropl structs (see ``pbs_submit(3B)``)
+        """
+        args = {}
+        for arg, value in params.items():
+            try:
+                if not arg.startswith('-'):
+                    arg = PBS_ARGMAP[arg]
+                arg = arg.lstrip('-')
+                args[arg] = value
+            except:
+                log.warning('Unrecognized long argument in destination params: %s' % arg)
+        return self.__args_to_attrs(args)
+
+    # Internal stuff
+    def __args_to_attrs(self, args):
+        """Convert a list of PBS command-line args (as in ``qsub(1B)``) to PBS' internal attribute representations.
+
+        :returns: list of dicts -- The dicts map directly to pbs attropl structs (see ``pbs_submit(3B)``)
+        """
+        rval = []
+        for arg, value in args.items():
+            if arg == 'l':
+                resource_attrs = value.split(',')
+                for j, ( res, val ) in enumerate( [ a.split('=', 1) for a in resource_attrs ] ):
+                    rval.append( dict( name=pbs.ATTR_l, value=val, resource=res ) )
+            else:
+                try:
+                    rval.append( dict( name=getattr( pbs, 'ATTR_' + arg ), value=value ) )
+                except AttributeError as e:
+                    raise Exception("Invalid parameter '%s': %s" % (arg, e))
+        return rval
+
+    def __get_pbs_server(self, job_destination_params):
+        if job_destination_params is None:
+            return None
+        return job_destination_params['destination'].split('@')[-1]
+
+    def queue_job( self, job_wrapper ):
+        """Create PBS script for a job and submit it to the PBS queue"""
+        # prepare the job
+        if not self.prepare_job( job_wrapper, include_metadata=not( self.app.config.pbs_stage_path ) ):
+            return
+
+        job_destination = job_wrapper.job_destination
+
+        # Determine the job's PBS destination (server/queue) and options from the job destination definition
+        pbs_queue_name = None
+        pbs_server_name = self.default_pbs_server
+        pbs_options = []
+        if '-q' in job_destination.params and 'destination' not in job_destination.params:
+            job_destination.params['destination'] = job_destination.params.pop('-q')
+        if 'destination' in job_destination.params:
+            if '@' in job_destination.params['destination']:
+                # Destination includes a server
+                pbs_queue_name, pbs_server_name = job_destination.params['destination'].split('@')
+                if pbs_queue_name == '':
+                    # e.g. `qsub -q @server`
+                    pbs_queue_name = None
+            else:
+                # Destination is just a queue
+                pbs_queue_name = job_destination.params['destination']
+            job_destination.params.pop('destination')
+
+        # Parse PBS params
+        pbs_options = self.parse_destination_params(job_destination.params)
+
+        # Explicitly set the determined PBS destination in the persisted job destination for recovery
+        job_destination.params['destination'] = '%s@%s' % (pbs_queue_name or '', pbs_server_name)
+
+        c = pbs.pbs_connect( util.smart_str( pbs_server_name ) )
+        if c <= 0:
+            errno, text = pbs.error()
+            job_wrapper.fail( "Unable to queue job for execution.  Resubmitting the job may succeed." )
+            log.error( "Connection to PBS server for submit failed: %s: %s" % ( errno, text ) )
+            return
+
+        # define job attributes
+        ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+        efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+        ecfile = "%s/%s.ec" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+
+        output_fnames = job_wrapper.get_output_fnames()
+
+        # If an application server is set, we're staging
+        if self.app.config.pbs_application_server:
+            pbs_ofile = self.app.config.pbs_application_server + ':' + ofile
+            pbs_efile = self.app.config.pbs_application_server + ':' + efile
+            output_files = [ str( o ) for o in output_fnames ]
+            output_files.append(ecfile)
+            stagein = self.get_stage_in_out( job_wrapper.get_input_fnames() + output_files, symlink=True )
+            stageout = self.get_stage_in_out( output_files )
+            attrs = [
+                dict( name=pbs.ATTR_o, value=pbs_ofile ),
+                dict( name=pbs.ATTR_e, value=pbs_efile ),
+                dict( name=pbs.ATTR_stagein, value=stagein ),
+                dict( name=pbs.ATTR_stageout, value=stageout ),
+            ]
+        # If not, we're using NFS
+        else:
+            attrs = [
+                dict( name=pbs.ATTR_o, value=ofile ),
+                dict( name=pbs.ATTR_e, value=efile ),
+            ]
+
+        # define PBS job options
+        attrs.append( dict( name=pbs.ATTR_N, value=str( "%s_%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id, job_wrapper.user ) ) ) )
+        job_attrs = pbs.new_attropl( len( attrs ) + len( pbs_options ) )
+        for i, attr in enumerate( attrs + pbs_options ):
+            job_attrs[i].name = attr['name']
+            job_attrs[i].value = attr['value']
+            if 'resource' in attr:
+                job_attrs[i].resource = attr['resource']
+        exec_dir = os.path.abspath( job_wrapper.working_directory )
+
+        # write the job script
+        if self.app.config.pbs_stage_path != '':
+            # touch the ecfile so that it gets staged
+            with open(ecfile, 'a'):
+                os.utime(ecfile, None)
+
+            stage_commands = pbs_symlink_template % (
+                " ".join( job_wrapper.get_input_fnames() + output_files ),
+                self.app.config.pbs_stage_path,
+                exec_dir,
+            )
+        else:
+            stage_commands = ''
+
+        env_setup_commands = [ stage_commands ]
+        script = self.get_job_file(job_wrapper, exit_code_path=ecfile, env_setup_commands=env_setup_commands)
+        job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+        self.write_executable_script( job_file, script )
+        # job was deleted while we were preparing it
+        if job_wrapper.get_state() == model.Job.states.DELETED:
+            log.debug( "Job %s deleted by user before it entered the PBS queue" % job_wrapper.job_id )
+            pbs.pbs_disconnect(c)
+            if job_wrapper.cleanup_job in ( "always", "onsuccess" ):
+                self.cleanup( ( ofile, efile, ecfile, job_file ) )
+                job_wrapper.cleanup()
+            return
+
+        # submit
+        # The job tag includes the job and the task identifier
+        # (if a TaskWrapper was passed in):
+        galaxy_job_id = job_wrapper.get_id_tag()
+        log.debug("(%s) submitting file %s" % ( galaxy_job_id, job_file ) )
+
+        tries = 0
+        while tries < 5:
+            job_id = pbs.pbs_submit(c, job_attrs, job_file, pbs_queue_name, None)
+            tries += 1
+            if job_id:
+                pbs.pbs_disconnect(c)
+                break
+            errno, text = pbs.error()
+            log.warning( "(%s) pbs_submit failed (try %d/5), PBS error %d: %s" % (galaxy_job_id, tries, errno, text) )
+            time.sleep(2)
+        else:
+            log.error( "(%s) All attempts to submit job failed" % galaxy_job_id )
+            job_wrapper.fail( "Unable to run this job due to a cluster error, please retry it later" )
+            return
+
+        if pbs_queue_name is None:
+            log.debug("(%s) queued in default queue as %s" % (galaxy_job_id, job_id) )
+        else:
+            log.debug("(%s) queued in %s queue as %s" % (galaxy_job_id, pbs_queue_name, job_id) )
+
+        # persist destination
+        job_wrapper.set_job_destination( job_destination, job_id )
+
+        # Store PBS related state information for job
+        job_state = AsynchronousJobState()
+        job_state.job_wrapper = job_wrapper
+        job_state.job_id = job_id
+        job_state.job_file = job_file
+        job_state.output_file = ofile
+        job_state.error_file = efile
+        job_state.exit_code_file = ecfile
+        job_state.old_state = 'N'
+        job_state.running = False
+        job_state.job_destination = job_destination
+
+        # Add to our 'queue' of jobs to monitor
+        self.monitor_queue.put( job_state )
+
+    def check_watched_items( self ):
+        """
+        Called by the monitor thread to look at each watched job and deal
+        with state changes.
+        """
+        new_watched = []
+        # reduce pbs load by batching status queries
+        ( failures, statuses ) = self.check_all_jobs()
+        for pbs_job_state in self.watched:
+            job_id = pbs_job_state.job_id
+            galaxy_job_id = pbs_job_state.job_wrapper.get_id_tag()
+            old_state = pbs_job_state.old_state
+            pbs_server_name = self.__get_pbs_server(pbs_job_state.job_destination.params)
+            if pbs_server_name in failures:
+                log.debug( "(%s/%s) Skipping state check because PBS server connection failed" % ( galaxy_job_id, job_id ) )
+                new_watched.append( pbs_job_state )
+                continue
+            try:
+                status = statuses[job_id]
+            except KeyError:
+                if pbs_job_state.job_wrapper.get_state() == model.Job.states.DELETED:
+                    continue
+                try:
+                    # Recheck to make sure it wasn't a communication problem
+                    self.check_single_job( pbs_server_name, job_id )
+                    log.warning( "(%s/%s) PBS job was not in state check list, but was found with individual state check" % ( galaxy_job_id, job_id ) )
+                    new_watched.append( pbs_job_state )
+                except:
+                    errno, text = pbs.error()
+                    if errno == 15001:
+                        # 15001 == job not in queue
+                        log.debug("(%s/%s) PBS job has left queue" % (galaxy_job_id, job_id) )
+                        self.work_queue.put( ( self.finish_job, pbs_job_state ) )
+                    else:
+                        # Unhandled error, continue to monitor
+                        log.info("(%s/%s) PBS state check resulted in error (%d): %s" % (galaxy_job_id, job_id, errno, text) )
+                        new_watched.append( pbs_job_state )
+                continue
+            if status.job_state != old_state:
+                log.debug("(%s/%s) PBS job state changed from %s to %s" % ( galaxy_job_id, job_id, old_state, status.job_state ) )
+            if status.job_state == "R" and not pbs_job_state.running:
+                pbs_job_state.running = True
+                pbs_job_state.job_wrapper.change_state( model.Job.states.RUNNING )
+            if status.job_state == "R" and status.get( 'resources_used', False ):
+                # resources_used may not be in the status for new jobs
+                h, m, s = [ int( i ) for i in status.resources_used.walltime.split( ':' ) ]
+                runtime = timedelta( 0, s, 0, 0, m, h )
+                if pbs_job_state.check_limits( runtime=runtime ):
+                    self.work_queue.put( ( self.fail_job, pbs_job_state ) )
+                    continue
+            elif status.job_state == "C":
+                # "keep_completed" is enabled in PBS, so try to check exit status
+                try:
+                    assert int( status.exit_status ) == 0
+                    log.debug("(%s/%s) PBS job has completed successfully" % ( galaxy_job_id, job_id ) )
+                except AssertionError:
+                    exit_status = int( status.exit_status )
+                    error_message = JOB_EXIT_STATUS.get( exit_status, 'Unknown error: %s' % status.exit_status )
+                    pbs_job_state.fail_message = CLUSTER_ERROR_MESSAGE % error_message
+                    log.error( '(%s/%s) PBS job failed: %s' % ( galaxy_job_id, job_id, error_message ) )
+                    pbs_job_state.stop_job = False
+                    self.work_queue.put( ( self.fail_job, pbs_job_state ) )
+                    continue
+                except AttributeError:
+                    # No exit_status, can't verify proper completion so we just have to assume success.
+                    log.debug("(%s/%s) PBS job has completed" % ( galaxy_job_id, job_id ) )
+                self.work_queue.put( ( self.finish_job, pbs_job_state ) )
+                continue
+            pbs_job_state.old_state = status.job_state
+            new_watched.append( pbs_job_state )
+        # Replace the watch list with the updated version
+        self.watched = new_watched
+
+    def check_all_jobs( self ):
+        """
+        Returns a list of servers that failed to be contacted and a dict
+        of "job_id : status" pairs (where status is a bunchified version
+        of the API's structure.
+        """
+        servers = []
+        failures = []
+        statuses = {}
+        for pbs_job_state in self.watched:
+            pbs_server_name = self.__get_pbs_server(pbs_job_state.job_destination.params)
+            if pbs_server_name not in servers:
+                servers.append( pbs_server_name )
+            pbs_job_state.check_count += 1
+        for pbs_server_name in servers:
+            c = pbs.pbs_connect( util.smart_str( pbs_server_name ) )
+            if c <= 0:
+                log.debug("connection to PBS server %s for state check failed" % pbs_server_name )
+                failures.append( pbs_server_name )
+                continue
+            stat_attrl = pbs.new_attrl(3)
+            stat_attrl[0].name = pbs.ATTR_state
+            stat_attrl[1].name = pbs.ATTR_used
+            stat_attrl[2].name = pbs.ATTR_exitstat
+            jobs = pbs.pbs_statjob( c, None, stat_attrl, None )
+            pbs.pbs_disconnect( c )
+            statuses.update( self.convert_statjob_to_bunches( jobs ) )
+        return( ( failures, statuses ) )
+
+    def convert_statjob_to_bunches( self, statjob_out ):
+        statuses = {}
+        for job in statjob_out:
+            status = {}
+            for attrib in job.attribs:
+                if attrib.resource is None:
+                    status[ attrib.name ] = attrib.value
+                else:
+                    if attrib.name not in status:
+                        status[ attrib.name ] = Bunch()
+                    status[ attrib.name ][ attrib.resource ] = attrib.value
+            statuses[ job.name ] = Bunch( **status )
+        return statuses
+
+    def check_single_job( self, pbs_server_name, job_id ):
+        """
+        Returns the state of a single job, used to make sure a job is
+        really dead.
+        """
+        c = pbs.pbs_connect( util.smart_str( pbs_server_name ) )
+        if c <= 0:
+            log.debug("connection to PBS server %s for state check failed" % pbs_server_name )
+            return None
+        stat_attrl = pbs.new_attrl(1)
+        stat_attrl[0].name = pbs.ATTR_state
+        jobs = pbs.pbs_statjob( c, job_id, stat_attrl, None )
+        pbs.pbs_disconnect( c )
+        return jobs[0].attribs[0].value
+
+    def fail_job( self, pbs_job_state ):
+        """
+        Separated out so we can use the worker threads for it.
+        """
+        # NB: The stop_job method was modified to limit exceptions being sent up here,
+        # so the wrapper's fail method will now be called in case of error:
+        if pbs_job_state.stop_job:
+            self.stop_job( self.sa_session.query( self.app.model.Job ).get( pbs_job_state.job_wrapper.job_id ) )
+        pbs_job_state.job_wrapper.fail( pbs_job_state.fail_message )
+        if pbs_job_state.job_wrapper.cleanup_job == "always":
+            self.cleanup( ( pbs_job_state.output_file, pbs_job_state.error_file, pbs_job_state.exit_code_file, pbs_job_state.job_file ) )
+
+    def get_stage_in_out( self, fnames, symlink=False ):
+        """Convenience function to create a stagein/stageout list"""
+        stage = ''
+        for fname in fnames:
+            if os.access(fname, os.R_OK):
+                if stage:
+                    stage += ','
+                # pathnames are now absolute
+                if symlink and self.app.config.pbs_stage_path:
+                    stage_name = os.path.join(self.app.config.pbs_stage_path, os.path.split(fname)[1])
+                else:
+                    stage_name = fname
+                stage += "%s@%s:%s" % (stage_name, self.app.config.pbs_dataset_server, fname)
+        return stage
+
+    def stop_job( self, job ):
+        """Attempts to delete a job from the PBS queue"""
+        job_id = job.get_job_runner_external_id().encode('utf-8')
+        job_tag = "(%s/%s)" % ( job.get_id_tag(), job_id )
+        log.debug( "%s Stopping PBS job" % job_tag )
+
+        # Declare the connection handle c so that it can be cleaned up:
+        c = None
+
+        try:
+            pbs_server_name = self.__get_pbs_server( job.destination_params )
+            if pbs_server_name is None:
+                log.debug("(%s) Job queued but no destination stored in job params, cannot delete"
+                          % job_tag )
+                return
+            c = pbs.pbs_connect( util.smart_str( pbs_server_name ) )
+            if c <= 0:
+                log.debug("(%s) Connection to PBS server for job delete failed"
+                          % job_tag )
+                return
+            pbs.pbs_deljob( c, job_id, '' )
+            log.debug( "%s Removed from PBS queue before job completion"
+                       % job_tag )
+        except:
+            e = traceback.format_exc()
+            log.debug( "%s Unable to stop job: %s" % ( job_tag, e ) )
+        finally:
+            # Cleanup: disconnect from the server.
+            if ( None is not c ):
+                pbs.pbs_disconnect( c )
+
+    def recover( self, job, job_wrapper ):
+        """Recovers jobs stuck in the queued/running state when Galaxy started"""
+        job_id = job.get_job_runner_external_id()
+        pbs_job_state = AsynchronousJobState()
+        pbs_job_state.output_file = "%s/%s.o" % (self.app.config.cluster_files_directory, job.id)
+        pbs_job_state.error_file = "%s/%s.e" % (self.app.config.cluster_files_directory, job.id)
+        pbs_job_state.exit_code_file = "%s/%s.ec" % (self.app.config.cluster_files_directory, job.id)
+        pbs_job_state.job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job.id)
+        pbs_job_state.job_id = str( job_id )
+        pbs_job_state.runner_url = job_wrapper.get_job_runner_url()
+        pbs_job_state.job_destination = job_wrapper.job_destination
+        job_wrapper.command_line = job.command_line
+        pbs_job_state.job_wrapper = job_wrapper
+        if job.state == model.Job.states.RUNNING:
+            log.debug( "(%s/%s) is still in running state, adding to the PBS queue" % ( job.id, job.get_job_runner_external_id() ) )
+            pbs_job_state.old_state = 'R'
+            pbs_job_state.running = True
+            self.monitor_queue.put( pbs_job_state )
+        elif job.state == model.Job.states.QUEUED:
+            log.debug( "(%s/%s) is still in PBS queued state, adding to the PBS queue" % ( job.id, job.get_job_runner_external_id() ) )
+            pbs_job_state.old_state = 'Q'
+            pbs_job_state.running = False
+            self.monitor_queue.put( pbs_job_state )
diff --git a/lib/galaxy/jobs/runners/pulsar.py b/lib/galaxy/jobs/runners/pulsar.py
new file mode 100644
index 0000000..ff0ed03
--- /dev/null
+++ b/lib/galaxy/jobs/runners/pulsar.py
@@ -0,0 +1,858 @@
+"""Job runner used to execute Galaxy jobs through Pulsar.
+
+More infromation on Pulsar can be found at http://pulsar.readthedocs.org/.
+"""
+from __future__ import absolute_import  # Need to import pulsar_client absolutely.
+
+import errno
+import logging
+import os
+from distutils.version import LooseVersion
+from time import sleep
+
+import pulsar.core
+import yaml
+from pulsar.client import (
+    build_client_manager,
+    ClientJobDescription,
+    ClientOutputs,
+    finish_job as pulsar_finish_job,
+    PathMapper,
+    PulsarClientTransportError,
+    PulsarOutputs,
+    submit_job as pulsar_submit_job,
+    url_to_destination_params
+)
+
+from galaxy import model
+from galaxy.jobs import (
+    ComputeEnvironment,
+    JobDestination
+)
+from galaxy.jobs.command_factory import build_command
+from galaxy.jobs.runners import (
+    AsynchronousJobRunner,
+    AsynchronousJobState
+)
+from galaxy.tools.deps import dependencies
+from galaxy.util import (
+    galaxy_directory,
+    specs,
+    string_as_bool_or_none
+)
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+__all__ = (
+    'PulsarLegacyJobRunner',
+    'PulsarRESTJobRunner',
+    'PulsarMQJobRunner',
+    'PulsarEmbeddedJobRunner',
+)
+
+MINIMUM_PULSAR_VERSION = LooseVersion("0.7.0.dev3")
+
+NO_REMOTE_GALAXY_FOR_METADATA_MESSAGE = "Pulsar misconfiguration - Pulsar client configured to set metadata remotely, but remote Pulsar isn't properly configured with a galaxy_home directory."
+NO_REMOTE_DATATYPES_CONFIG = "Pulsar client is configured to use remote datatypes configuration when setting metadata externally, but Pulsar is not configured with this information. Defaulting to datatypes_conf.xml."
+GENERIC_REMOTE_ERROR = "Failed to communicate with remote job server."
+FAILED_REMOTE_ERROR = "Remote job server indicated a problem running or monitoring this job."
+LOST_REMOTE_ERROR = "Remote job server could not determine this job's state."
+
+UPGRADE_PULSAR_ERROR = "Galaxy is misconfigured, please contact administrator. The target Pulsar server is unsupported, this version of Galaxy requires Pulsar version %s or newer." % MINIMUM_PULSAR_VERSION
+
+# Is there a good way to infer some default for this? Can only use
+# url_for from web threads. https://gist.github.com/jmchilton/9098762
+DEFAULT_GALAXY_URL = "http://localhost:8080"
+
+PULSAR_PARAM_SPECS = dict(
+    transport=dict(
+        map=specs.to_str_or_none,
+        valid=specs.is_in("urllib", "curl", None),
+        default=None
+    ),
+    transport_timeout=dict(
+        map=lambda val: None if val == "None" else int(val),
+        default=None,
+    ),
+    cache=dict(
+        map=specs.to_bool_or_none,
+        default=None,
+    ),
+    amqp_url=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    galaxy_url=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    pulsar_config=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    manager=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    persistence_directory=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    amqp_acknowledge=dict(
+        map=specs.to_bool_or_none,
+        default=None
+    ),
+    amqp_ack_republish_time=dict(
+        map=lambda val: None if val == "None" else int(val),
+        default=None,
+    ),
+    amqp_consumer_timeout=dict(
+        map=lambda val: None if val == "None" else float(val),
+        default=None,
+    ),
+    amqp_connect_ssl_ca_certs=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    amqp_connect_ssl_keyfile=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    amqp_connect_ssl_certfile=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    amqp_connect_ssl_cert_reqs=dict(
+        map=specs.to_str_or_none,
+        default=None,
+    ),
+    # http://kombu.readthedocs.org/en/latest/reference/kombu.html#kombu.Producer.publish
+    amqp_publish_retry=dict(
+        map=specs.to_bool,
+        default=False,
+    ),
+    amqp_publish_priority=dict(
+        map=int,
+        valid=lambda x: 0 <= x and x <= 9,
+        default=0,
+    ),
+    # http://kombu.readthedocs.org/en/latest/reference/kombu.html#kombu.Exchange.delivery_mode
+    amqp_publish_delivery_mode=dict(
+        map=str,
+        valid=specs.is_in("transient", "persistent"),
+        default="persistent",
+    ),
+    amqp_publish_retry_max_retries=dict(
+        map=int,
+        default=None,
+    ),
+    amqp_publish_retry_interval_start=dict(
+        map=int,
+        default=None,
+    ),
+    amqp_publish_retry_interval_step=dict(
+        map=int,
+        default=None,
+    ),
+    amqp_publish_retry_interval_max=dict(
+        map=int,
+        default=None,
+    ),
+)
+
+
+PARAMETER_SPECIFICATION_REQUIRED = object()
+PARAMETER_SPECIFICATION_IGNORED = object()
+
+
+class PulsarJobRunner( AsynchronousJobRunner ):
+    """Base class for pulsar job runners."""
+
+    runner_name = "PulsarJobRunner"
+    default_build_pulsar_app = False
+
+    def __init__( self, app, nworkers, **kwds ):
+        """Start the job runner."""
+        super( PulsarJobRunner, self ).__init__( app, nworkers, runner_param_specs=PULSAR_PARAM_SPECS, **kwds )
+        self._init_worker_threads()
+        galaxy_url = self.runner_params.galaxy_url
+        if not galaxy_url:
+            galaxy_url = app.config.galaxy_infrastructure_url
+        if galaxy_url:
+            galaxy_url = galaxy_url.rstrip("/")
+        self.galaxy_url = galaxy_url
+        self.__init_client_manager()
+        self._monitor()
+
+    def _monitor( self ):
+        # Extension point allow MQ variant to setup callback instead
+        self._init_monitor_thread()
+
+    def __init_client_manager( self ):
+        pulsar_conf = self.runner_params.get('pulsar_config', None)
+        self.__init_pulsar_app(pulsar_conf)
+
+        client_manager_kwargs = {}
+        for kwd in 'manager', 'cache', 'transport', 'persistence_directory':
+            client_manager_kwargs[ kwd ] = self.runner_params[ kwd ]
+        if self.pulsar_app is not None:
+            client_manager_kwargs[ "pulsar_app" ] = self.pulsar_app
+            # TODO: Hack remove this following line pulsar lib update
+            # that includes https://github.com/galaxyproject/pulsar/commit/ce0636a5b64fae52d165bcad77b2caa3f0e9c232
+            client_manager_kwargs[ "file_cache" ] = None
+
+        for kwd in self.runner_params.keys():
+            if kwd.startswith( 'amqp_' ) or kwd.startswith( 'transport_' ):
+                client_manager_kwargs[ kwd ] = self.runner_params[ kwd ]
+        self.client_manager = build_client_manager(**client_manager_kwargs)
+
+    def __init_pulsar_app( self, pulsar_conf_path ):
+        if pulsar_conf_path is None and not self.default_build_pulsar_app:
+            self.pulsar_app = None
+            return
+        conf = {}
+        if pulsar_conf_path is None:
+            log.info("Creating a Pulsar app with default configuration (no pulsar_conf specified).")
+        else:
+            log.info("Loading Pulsar app configuration from %s" % pulsar_conf_path)
+            with open(pulsar_conf_path, "r") as f:
+                conf.update(yaml.load(f) or {})
+        if "job_metrics_config_file" not in conf:
+            conf["job_metrics"] = self.app.job_metrics
+        if "staging_directory" not in conf:
+            conf["staging_directory"] = "database/pulsar_staging"
+        if "persistence_directory" not in conf:
+            conf["persistence_directory"] = "database/pulsar_persisted_data"
+        if "galaxy_home" not in conf:
+            conf["galaxy_home"] = galaxy_directory()
+        self.pulsar_app = pulsar.core.PulsarApp(**conf)
+
+    def url_to_destination( self, url ):
+        """Convert a legacy URL to a job destination."""
+        return JobDestination( runner="pulsar", params=url_to_destination_params( url ) )
+
+    def check_watched_item(self, job_state):
+        try:
+            client = self.get_client_from_state(job_state)
+            status = client.get_status()
+        except PulsarClientTransportError as exc:
+            log.error("Communication error with Pulsar server on state check, will retry: %s", exc)
+            return job_state
+        except Exception:
+            # An orphaned job was put into the queue at app startup, so remote server went down
+            # either way we are done I guess.
+            self.mark_as_finished(job_state)
+            return None
+        job_state = self._update_job_state_for_status(job_state, status)
+        return job_state
+
+    def _update_job_state_for_status(self, job_state, pulsar_status):
+        if pulsar_status == "complete":
+            self.mark_as_finished(job_state)
+            return None
+        if pulsar_status in ["failed", "lost"]:
+            if pulsar_status == "failed":
+                message = FAILED_REMOTE_ERROR
+            else:
+                message = LOST_REMOTE_ERROR
+            if not job_state.job_wrapper.get_job().finished:
+                self.fail_job(job_state, message)
+            return None
+        if pulsar_status == "running" and not job_state.running:
+            job_state.running = True
+            job_state.job_wrapper.change_state( model.Job.states.RUNNING )
+        return job_state
+
+    def queue_job(self, job_wrapper):
+        job_destination = job_wrapper.job_destination
+        self._populate_parameter_defaults( job_destination )
+
+        command_line, client, remote_job_config, compute_environment = self.__prepare_job( job_wrapper, job_destination )
+
+        if not command_line:
+            return
+
+        try:
+            dependencies_description = PulsarJobRunner.__dependencies_description( client, job_wrapper )
+            rewrite_paths = not PulsarJobRunner.__rewrite_parameters( client )
+            unstructured_path_rewrites = {}
+            if compute_environment:
+                unstructured_path_rewrites = compute_environment.unstructured_path_rewrites
+
+            client_job_description = ClientJobDescription(
+                command_line=command_line,
+                input_files=self.get_input_files(job_wrapper),
+                client_outputs=self.__client_outputs(client, job_wrapper),
+                working_directory=job_wrapper.tool_working_directory,
+                metadata_directory=job_wrapper.working_directory,
+                tool=job_wrapper.tool,
+                config_files=job_wrapper.extra_filenames,
+                dependencies_description=dependencies_description,
+                env=client.env,
+                rewrite_paths=rewrite_paths,
+                arbitrary_files=unstructured_path_rewrites,
+            )
+            job_id = pulsar_submit_job(client, client_job_description, remote_job_config)
+            log.info("Pulsar job submitted with job_id %s" % job_id)
+            job_wrapper.set_job_destination( job_destination, job_id )
+            job_wrapper.change_state( model.Job.states.QUEUED )
+        except Exception:
+            job_wrapper.fail( "failure running job", exception=True )
+            log.exception("failure running job %d" % job_wrapper.job_id)
+            return
+
+        pulsar_job_state = AsynchronousJobState()
+        pulsar_job_state.job_wrapper = job_wrapper
+        pulsar_job_state.job_id = job_id
+        pulsar_job_state.old_state = True
+        pulsar_job_state.running = False
+        pulsar_job_state.job_destination = job_destination
+        self.monitor_job(pulsar_job_state)
+
+    def __prepare_job(self, job_wrapper, job_destination):
+        """Build command-line and Pulsar client for this job."""
+        command_line = None
+        client = None
+        remote_job_config = None
+        compute_environment = None
+        try:
+            client = self.get_client_from_wrapper(job_wrapper)
+            tool = job_wrapper.tool
+            remote_job_config = client.setup(tool.id, tool.version)
+            PulsarJobRunner.check_job_config(remote_job_config)
+            rewrite_parameters = PulsarJobRunner.__rewrite_parameters( client )
+            prepare_kwds = {}
+            if rewrite_parameters:
+                compute_environment = PulsarComputeEnvironment( client, job_wrapper, remote_job_config )
+                prepare_kwds[ 'compute_environment' ] = compute_environment
+            job_wrapper.prepare( **prepare_kwds )
+            self.__prepare_input_files_locally(job_wrapper)
+            remote_metadata = PulsarJobRunner.__remote_metadata( client )
+            dependency_resolution = PulsarJobRunner.__dependency_resolution( client )
+            metadata_kwds = self.__build_metadata_configuration(client, job_wrapper, remote_metadata, remote_job_config)
+            remote_command_params = dict(
+                working_directory=remote_job_config['metadata_directory'],
+                metadata_kwds=metadata_kwds,
+                dependency_resolution=dependency_resolution,
+            )
+            remote_working_directory = remote_job_config['working_directory']
+            # TODO: Following defs work for Pulsar, always worked for Pulsar but should be
+            # calculated at some other level.
+            remote_job_directory = os.path.abspath(os.path.join(remote_working_directory, os.path.pardir))
+            remote_tool_directory = os.path.abspath(os.path.join(remote_job_directory, "tool_files"))
+            container = self._find_container(
+                job_wrapper,
+                compute_working_directory=remote_working_directory,
+                compute_tool_directory=remote_tool_directory,
+                compute_job_directory=remote_job_directory,
+            )
+            job_wrapper.disable_commands_in_new_shell()
+            metadata_directory = None
+            if remote_metadata:
+                metadata_directory = remote_job_config['metadata_directory']
+
+            # Pulsar handles ``create_tool_working_directory`` and
+            # ``include_work_dir_outputs`` details.
+            command_line = build_command(
+                self,
+                job_wrapper=job_wrapper,
+                container=container,
+                include_metadata=remote_metadata,
+                metadata_directory=metadata_directory,
+                create_tool_working_directory=False,
+                include_work_dir_outputs=False,
+                remote_command_params=remote_command_params,
+            )
+        except UnsupportedPulsarException as e:
+            job_wrapper.fail( e.message, exception=False )
+            log.exception("failure running job %d" % job_wrapper.job_id)
+        except Exception:
+            job_wrapper.fail( "failure preparing job", exception=True )
+            log.exception("failure running job %d" % job_wrapper.job_id)
+
+        # If we were able to get a command line, run the job
+        if not command_line:
+            job_wrapper.finish( '', '' )
+
+        return command_line, client, remote_job_config, compute_environment
+
+    def __prepare_input_files_locally(self, job_wrapper):
+        """Run task splitting commands locally."""
+        prepare_input_files_cmds = getattr(job_wrapper, 'prepare_input_files_cmds', None)
+        if prepare_input_files_cmds is not None:
+            for cmd in prepare_input_files_cmds:  # run the commands to stage the input files
+                if 0 != os.system(cmd):
+                    raise Exception('Error running file staging command: %s' % cmd)
+            job_wrapper.prepare_input_files_cmds = None  # prevent them from being used in-line
+
+    def _populate_parameter_defaults( self, job_destination ):
+        updated = False
+        params = job_destination.params
+        for key, value in self.destination_defaults.items():
+            if key in params:
+                if value is PARAMETER_SPECIFICATION_IGNORED:
+                    log.warning( "Pulsar runner in selected configuration ignores parameter %s" % key )
+                continue
+            # if self.runner_params.get( key, None ):
+            #    # Let plugin define defaults for some parameters -
+            #    # for instance that way jobs_directory can be
+            #    # configured next to AMQP url (where it belongs).
+            #    params[ key ] = self.runner_params[ key ]
+            #    continue
+
+            if not value:
+                continue
+
+            if value is PARAMETER_SPECIFICATION_REQUIRED:
+                raise Exception( "Pulsar destination does not define required parameter %s" % key )
+            elif value is not PARAMETER_SPECIFICATION_IGNORED:
+                params[ key ] = value
+                updated = True
+        return updated
+
+    def get_output_files(self, job_wrapper):
+        output_paths = job_wrapper.get_output_fnames()
+        return [ str( o ) for o in output_paths ]   # Force job_path from DatasetPath objects.
+
+    def get_input_files(self, job_wrapper):
+        input_paths = job_wrapper.get_input_paths()
+        return [ str( i ) for i in input_paths ]  # Force job_path from DatasetPath objects.
+
+    def get_client_from_wrapper(self, job_wrapper):
+        job_id = job_wrapper.job_id
+        if hasattr(job_wrapper, 'task_id'):
+            job_id = "%s_%s" % (job_id, job_wrapper.task_id)
+        params = job_wrapper.job_destination.params.copy()
+        for key, value in params.items():
+            if value:
+                params[key] = model.User.expand_user_properties( job_wrapper.get_job().user, value )
+
+        env = getattr( job_wrapper.job_destination, "env", [] )
+        return self.get_client( params, job_id, env )
+
+    def get_client_from_state(self, job_state):
+        job_destination_params = job_state.job_destination.params
+        job_id = job_state.job_id
+        return self.get_client( job_destination_params, job_id )
+
+    def get_client( self, job_destination_params, job_id, env=[] ):
+        # Cannot use url_for outside of web thread.
+        # files_endpoint = url_for( controller="job_files", job_id=encoded_job_id )
+
+        encoded_job_id = self.app.security.encode_id(job_id)
+        job_key = self.app.security.encode_id( job_id, kind="jobs_files" )
+        endpoint_base = "%s/api/jobs/%s/files?job_key=%s"
+        if self.app.config.nginx_upload_job_files_path:
+            endpoint_base = "%s" + \
+                            self.app.config.nginx_upload_job_files_path + \
+                            "?job_id=%s&job_key=%s"
+        files_endpoint = endpoint_base % (
+            self.galaxy_url,
+            encoded_job_id,
+            job_key
+        )
+        get_client_kwds = dict(
+            job_id=str( job_id ),
+            files_endpoint=files_endpoint,
+            env=env
+        )
+        return self.client_manager.get_client( job_destination_params, **get_client_kwds )
+
+    def finish_job( self, job_state ):
+        stderr = stdout = ''
+        job_wrapper = job_state.job_wrapper
+        try:
+            client = self.get_client_from_state(job_state)
+            run_results = client.full_status()
+            remote_working_directory = run_results.get("working_directory", None)
+            remote_metadata_directory = run_results.get("metadata_directory", None)
+            stdout = run_results.get('stdout', '')
+            stderr = run_results.get('stderr', '')
+            exit_code = run_results.get('returncode', None)
+            pulsar_outputs = PulsarOutputs.from_status_response(run_results)
+            # Use Pulsar client code to transfer/copy files back
+            # and cleanup job if needed.
+            completed_normally = \
+                job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]
+            cleanup_job = job_wrapper.cleanup_job
+            client_outputs = self.__client_outputs(client, job_wrapper)
+            finish_args = dict( client=client,
+                                job_completed_normally=completed_normally,
+                                cleanup_job=cleanup_job,
+                                client_outputs=client_outputs,
+                                pulsar_outputs=pulsar_outputs )
+            failed = pulsar_finish_job( **finish_args )
+            if failed:
+                job_wrapper.fail("Failed to find or download one or more job outputs from remote server.", exception=True)
+        except Exception:
+            message = GENERIC_REMOTE_ERROR
+            job_wrapper.fail( message, exception=True )
+            log.exception("failure finishing job %d" % job_wrapper.job_id)
+            return
+        if not PulsarJobRunner.__remote_metadata( client ):
+            self._handle_metadata_externally( job_wrapper, resolve_requirements=True )
+        # Finish the job
+        try:
+            job_wrapper.finish(
+                stdout,
+                stderr,
+                exit_code,
+                remote_working_directory=remote_working_directory,
+                remote_metadata_directory=remote_metadata_directory,
+            )
+        except Exception:
+            log.exception("Job wrapper finish method failed")
+            job_wrapper.fail("Unable to finish job", exception=True)
+
+    def fail_job( self, job_state, message=GENERIC_REMOTE_ERROR ):
+        """Seperated out so we can use the worker threads for it."""
+        self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )
+        job_state.job_wrapper.fail( getattr( job_state, "fail_message", message ) )
+
+    def check_pid( self, pid ):
+        try:
+            os.kill( pid, 0 )
+            return True
+        except OSError as e:
+            if e.errno == errno.ESRCH:
+                log.debug( "check_pid(): PID %d is dead" % pid )
+            else:
+                log.warning( "check_pid(): Got errno %s when attempting to check PID %d: %s" % ( errno.errorcode[e.errno], pid, e.strerror ) )
+            return False
+
+    def stop_job( self, job ):
+        # if our local job has JobExternalOutputMetadata associated, then our primary job has to have already finished
+        client = self.get_client( job.destination_params, job.job_runner_external_id )
+        job_ext_output_metadata = job.get_external_output_metadata()
+        if not PulsarJobRunner.__remote_metadata( client ) and job_ext_output_metadata:
+            pid = job_ext_output_metadata[0].job_runner_external_pid  # every JobExternalOutputMetadata has a pid set, we just need to take from one of them
+            if pid in [ None, '' ]:
+                log.warning( "stop_job(): %s: no PID in database for job, unable to stop" % job.id )
+                return
+            pid = int( pid )
+            if not self.check_pid( pid ):
+                log.warning( "stop_job(): %s: PID %d was already dead or can't be signaled" % ( job.id, pid ) )
+                return
+            for sig in [ 15, 9 ]:
+                try:
+                    os.killpg( pid, sig )
+                except OSError as e:
+                    log.warning( "stop_job(): %s: Got errno %s when attempting to signal %d to PID %d: %s" % ( job.id, errno.errorcode[e.errno], sig, pid, e.strerror ) )
+                    return  # give up
+                sleep( 2 )
+                if not self.check_pid( pid ):
+                    log.debug( "stop_job(): %s: PID %d successfully killed with signal %d" % ( job.id, pid, sig ) )
+                    return
+                else:
+                    log.warning( "stop_job(): %s: PID %d refuses to die after signaling TERM/KILL" % ( job.id, pid ) )
+        else:
+            # Remote kill
+            pulsar_url = job.job_runner_name
+            job_id = job.job_runner_external_id
+            log.debug("Attempt remote Pulsar kill of job with url %s and id %s" % (pulsar_url, job_id))
+            client = self.get_client(job.destination_params, job_id)
+            client.kill()
+
+    def recover( self, job, job_wrapper ):
+        """Recover jobs stuck in the queued/running state when Galaxy started."""
+        job_state = self._job_state( job, job_wrapper )
+        job_wrapper.command_line = job.get_command_line()
+        state = job.get_state()
+        if state in [model.Job.states.RUNNING, model.Job.states.QUEUED]:
+            log.debug( "(Pulsar/%s) is still in running state, adding to the Pulsar queue" % ( job.get_id()) )
+            job_state.old_state = True
+            job_state.running = state == model.Job.states.RUNNING
+            self.monitor_queue.put( job_state )
+
+    def shutdown( self ):
+        super( PulsarJobRunner, self ).shutdown()
+        self.client_manager.shutdown()
+
+    def _job_state( self, job, job_wrapper ):
+        job_state = AsynchronousJobState()
+        # TODO: Determine why this is set when using normal message queue updates
+        # but not CLI submitted MQ updates...
+        raw_job_id = job.get_job_runner_external_id() or job_wrapper.job_id
+        job_state.job_id = str( raw_job_id )
+        job_state.runner_url = job_wrapper.get_job_runner_url()
+        job_state.job_destination = job_wrapper.job_destination
+        job_state.job_wrapper = job_wrapper
+        return job_state
+
+    def __client_outputs( self, client, job_wrapper ):
+        work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
+        output_files = self.get_output_files( job_wrapper )
+        client_outputs = ClientOutputs(
+            working_directory=job_wrapper.tool_working_directory,
+            metadata_directory=job_wrapper.working_directory,
+            work_dir_outputs=work_dir_outputs,
+            output_files=output_files,
+            version_file=job_wrapper.get_version_string_path(),
+        )
+        return client_outputs
+
+    @staticmethod
+    def check_job_config(remote_job_config):
+        # 0.6.0 was newest Pulsar version that did not report it's version.
+        pulsar_version = LooseVersion(remote_job_config.get('pulsar_version', "0.6.0"))
+        log.info("pulsar_version is %s" % pulsar_version)
+        if pulsar_version < MINIMUM_PULSAR_VERSION:
+            raise UnsupportedPulsarException()
+
+    @staticmethod
+    def __dependencies_description( pulsar_client, job_wrapper ):
+        dependency_resolution = PulsarJobRunner.__dependency_resolution( pulsar_client )
+        remote_dependency_resolution = dependency_resolution == "remote"
+        if not remote_dependency_resolution:
+            return None
+        requirements = job_wrapper.tool.requirements or []
+        installed_tool_dependencies = job_wrapper.tool.installed_tool_dependencies or []
+        return dependencies.DependenciesDescription(
+            requirements=requirements,
+            installed_tool_dependencies=installed_tool_dependencies,
+        )
+
+    @staticmethod
+    def __dependency_resolution( pulsar_client ):
+        dependency_resolution = pulsar_client.destination_params.get( "dependency_resolution", "local" )
+        if dependency_resolution not in ["none", "local", "remote"]:
+            raise Exception("Unknown dependency_resolution value encountered %s" % dependency_resolution)
+        return dependency_resolution
+
+    @staticmethod
+    def __remote_metadata( pulsar_client ):
+        remote_metadata = string_as_bool_or_none( pulsar_client.destination_params.get( "remote_metadata", False ) )
+        return remote_metadata
+
+    @staticmethod
+    def __use_remote_datatypes_conf( pulsar_client ):
+        """Use remote metadata datatypes instead of Galaxy's.
+
+        When setting remote metadata, use integrated datatypes from this
+        Galaxy instance or use the datatypes config configured via the remote
+        Pulsar.
+
+        Both options are broken in different ways for same reason - datatypes
+        may not match. One can push the local datatypes config to the remote
+        server - but there is no guarentee these datatypes will be defined
+        there. Alternatively, one can use the remote datatype config - but
+        there is no guarentee that it will contain all the datatypes available
+        to this Galaxy.
+        """
+        use_remote_datatypes = string_as_bool_or_none( pulsar_client.destination_params.get( "use_remote_datatypes", False ) )
+        return use_remote_datatypes
+
+    @staticmethod
+    def __rewrite_parameters( pulsar_client ):
+        return string_as_bool_or_none( pulsar_client.destination_params.get( "rewrite_parameters", False ) ) or False
+
+    def __build_metadata_configuration(self, client, job_wrapper, remote_metadata, remote_job_config):
+        metadata_kwds = {}
+        if remote_metadata:
+            remote_system_properties = remote_job_config.get("system_properties", {})
+            remote_galaxy_home = remote_system_properties.get("galaxy_home", None)
+            if not remote_galaxy_home:
+                raise Exception(NO_REMOTE_GALAXY_FOR_METADATA_MESSAGE)
+            metadata_kwds['exec_dir'] = remote_galaxy_home
+            outputs_directory = remote_job_config['outputs_directory']
+            configs_directory = remote_job_config['configs_directory']
+            working_directory = remote_job_config['working_directory']
+            metadata_directory = remote_job_config['metadata_directory']
+            # For metadata calculation, we need to build a list of of output
+            # file objects with real path indicating location on Galaxy server
+            # and false path indicating location on compute server. Since the
+            # Pulsar disables from_work_dir copying as part of the job command
+            # line we need to take the list of output locations on the Pulsar
+            # server (produced by self.get_output_files(job_wrapper)) and for
+            # each work_dir output substitute the effective path on the Pulsar
+            # server relative to the remote working directory as the
+            # false_path to send the metadata command generation module.
+            work_dir_outputs = self.get_work_dir_outputs(job_wrapper, tool_working_directory=working_directory)
+            outputs = [Bunch(false_path=os.path.join(outputs_directory, os.path.basename(path)), real_path=path) for path in self.get_output_files(job_wrapper)]
+            for output in outputs:
+                for pulsar_workdir_path, real_path in work_dir_outputs:
+                    if real_path == output.real_path:
+                        output.false_path = pulsar_workdir_path
+            metadata_kwds['output_fnames'] = outputs
+            metadata_kwds['compute_tmp_dir'] = metadata_directory
+            metadata_kwds['config_root'] = remote_galaxy_home
+            default_config_file = os.path.join(remote_galaxy_home, 'config/galaxy.ini')
+            metadata_kwds['config_file'] = remote_system_properties.get('galaxy_config_file', default_config_file)
+            metadata_kwds['dataset_files_path'] = remote_system_properties.get('galaxy_dataset_files_path', None)
+            if PulsarJobRunner.__use_remote_datatypes_conf( client ):
+                remote_datatypes_config = remote_system_properties.get('galaxy_datatypes_config_file', None)
+                if not remote_datatypes_config:
+                    log.warning(NO_REMOTE_DATATYPES_CONFIG)
+                    remote_datatypes_config = os.path.join(remote_galaxy_home, 'datatypes_conf.xml')
+                metadata_kwds['datatypes_config'] = remote_datatypes_config
+            else:
+                integrates_datatypes_config = self.app.datatypes_registry.integrated_datatypes_configs
+                # Ensure this file gets pushed out to the remote config dir.
+                job_wrapper.extra_filenames.append(integrates_datatypes_config)
+
+                metadata_kwds['datatypes_config'] = os.path.join(configs_directory, os.path.basename(integrates_datatypes_config))
+        return metadata_kwds
+
+
+class PulsarLegacyJobRunner( PulsarJobRunner ):
+    """Flavor of Pulsar job runner mimicking behavior of old LWR runner."""
+
+    destination_defaults = dict(
+        rewrite_parameters="false",
+        dependency_resolution="local",
+    )
+
+
+class PulsarMQJobRunner( PulsarJobRunner ):
+    """Flavor of Pulsar job runner with sensible defaults for message queue communication."""
+
+    destination_defaults = dict(
+        default_file_action="remote_transfer",
+        rewrite_parameters="true",
+        dependency_resolution="remote",
+        jobs_directory=PARAMETER_SPECIFICATION_REQUIRED,
+        url=PARAMETER_SPECIFICATION_IGNORED,
+        private_token=PARAMETER_SPECIFICATION_IGNORED
+    )
+
+    def _monitor( self ):
+        # This is a message queue driven runner, don't monitor
+        # just setup required callback.
+        self.client_manager.ensure_has_status_update_callback(self.__async_update)
+        self.client_manager.ensure_has_ack_consumers()
+
+    def __async_update( self, full_status ):
+        job_id = None
+        try:
+            job_id = full_status[ "job_id" ]
+            job, job_wrapper = self.app.job_manager.job_handler.job_queue.job_pair_for_id( job_id )
+            job_state = self._job_state( job, job_wrapper )
+            self._update_job_state_for_status(job_state, full_status[ "status" ] )
+        except Exception:
+            log.exception( "Failed to update Pulsar job status for job_id %s" % job_id )
+            raise
+            # Nothing else to do? - Attempt to fail the job?
+
+
+class PulsarRESTJobRunner( PulsarJobRunner ):
+    """Flavor of Pulsar job runner with sensible defaults for RESTful usage."""
+
+    destination_defaults = dict(
+        default_file_action="transfer",
+        rewrite_parameters="true",
+        dependency_resolution="remote",
+        url=PARAMETER_SPECIFICATION_REQUIRED,
+    )
+
+
+class PulsarEmbeddedJobRunner(PulsarJobRunner):
+    """Flavor of Puslar job runnner that runs Pulsar's server code directly within Galaxy.
+
+    This is an appropriate job runner for when the desire is to use Pulsar staging
+    but their is not need to run a remote service.
+    """
+
+    destination_defaults = dict(
+        default_file_action="copy",
+        rewrite_parameters="true",
+        dependency_resolution="remote",
+    )
+    default_build_pulsar_app = True
+
+
+class PulsarComputeEnvironment( ComputeEnvironment ):
+
+    def __init__( self, pulsar_client, job_wrapper, remote_job_config ):
+        self.pulsar_client = pulsar_client
+        self.job_wrapper = job_wrapper
+        self.local_path_config = job_wrapper.default_compute_environment()
+        self.unstructured_path_rewrites = {}
+        # job_wrapper.prepare is going to expunge the job backing the following
+        # computations, so precalculate these paths.
+        self._wrapper_input_paths = self.local_path_config.input_paths()
+        self._wrapper_output_paths = self.local_path_config.output_paths()
+        self.path_mapper = PathMapper(pulsar_client, remote_job_config, self.local_path_config.working_directory())
+        self._config_directory = remote_job_config[ "configs_directory" ]
+        self._working_directory = remote_job_config[ "working_directory" ]
+        self._sep = remote_job_config[ "system_properties" ][ "separator" ]
+        self._tool_dir = remote_job_config[ "tools_directory" ]
+        version_path = self.local_path_config.version_path()
+        new_version_path = self.path_mapper.remote_version_path_rewrite(version_path)
+        if new_version_path:
+            version_path = new_version_path
+        self._version_path = version_path
+
+    def output_paths( self ):
+        local_output_paths = self._wrapper_output_paths
+
+        results = []
+        for local_output_path in local_output_paths:
+            wrapper_path = str( local_output_path )
+            remote_path = self.path_mapper.remote_output_path_rewrite( wrapper_path )
+            results.append( self._dataset_path( local_output_path, remote_path ) )
+        return results
+
+    def input_paths( self ):
+        local_input_paths = self._wrapper_input_paths
+
+        results = []
+        for local_input_path in local_input_paths:
+            wrapper_path = str( local_input_path )
+            # This will over-copy in some cases. For instance in the case of task
+            # splitting, this input will be copied even though only the work dir
+            # input will actually be used.
+            remote_path = self.path_mapper.remote_input_path_rewrite( wrapper_path )
+            results.append( self._dataset_path( local_input_path, remote_path ) )
+        return results
+
+    def _dataset_path( self, local_dataset_path, remote_path ):
+        remote_extra_files_path = None
+        if remote_path:
+            remote_extra_files_path = "%s_files" % remote_path[ 0:-len( ".dat" ) ]
+        return local_dataset_path.with_path_for_job( remote_path, remote_extra_files_path )
+
+    def working_directory( self ):
+        return self._working_directory
+
+    def config_directory( self ):
+        return self._config_directory
+
+    def new_file_path( self ):
+        return self.working_directory()  # Problems with doing this?
+
+    def sep( self ):
+        return self._sep
+
+    def version_path( self ):
+        return self._version_path
+
+    def rewriter( self, parameter_value ):
+        unstructured_path_rewrites = self.unstructured_path_rewrites
+        if parameter_value in unstructured_path_rewrites:
+            # Path previously mapped, use previous mapping.
+            return unstructured_path_rewrites[ parameter_value ]
+        if parameter_value in unstructured_path_rewrites.values():
+            # Path is a rewritten remote path (this might never occur,
+            # consider dropping check...)
+            return parameter_value
+
+        rewrite, new_unstructured_path_rewrites = self.path_mapper.check_for_arbitrary_rewrite( parameter_value )
+        if rewrite:
+            unstructured_path_rewrites.update(new_unstructured_path_rewrites)
+            return rewrite
+        else:
+            # Did need to rewrite, use original path or value.
+            return parameter_value
+
+    def unstructured_path_rewriter( self ):
+        return self.rewriter
+
+    def tool_directory( self ):
+        return self._tool_dir
+
+
+class UnsupportedPulsarException(Exception):
+
+    def __init__(self):
+        super(UnsupportedPulsarException, self).__init__(UPGRADE_PULSAR_ERROR)
diff --git a/lib/galaxy/jobs/runners/slurm.py b/lib/galaxy/jobs/runners/slurm.py
new file mode 100644
index 0000000..1a9a2cf
--- /dev/null
+++ b/lib/galaxy/jobs/runners/slurm.py
@@ -0,0 +1,150 @@
+"""
+SLURM job control via the DRMAA API.
+"""
+import logging
+import os
+import subprocess
+import time
+
+from galaxy import model
+from galaxy.jobs.runners.drmaa import DRMAAJobRunner
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'SlurmJobRunner', )
+
+SLURM_MEMORY_LIMIT_EXCEEDED_MSG = 'slurmstepd: error: Exceeded job memory limit'
+SLURM_MEMORY_LIMIT_EXCEEDED_PARTIAL_WARNINGS = [': Exceeded job memory limit at some point.',
+                                                ': Exceeded step memory limit at some point.']
+SLURM_MEMORY_LIMIT_SCAN_SIZE = 16 * 1024 * 1024  # 16MB
+
+
+class SlurmJobRunner( DRMAAJobRunner ):
+    runner_name = "SlurmRunner"
+    restrict_job_name_length = False
+
+    def _complete_terminal_job( self, ajs, drmaa_state, **kwargs ):
+        def _get_slurm_state_with_sacct(job_id, cluster):
+            cmd = ['sacct', '-n', '-o state']
+            if cluster:
+                cmd.extend( [ '-M', cluster ] )
+            cmd.extend(['-j', "%s.batch" % job_id])
+            p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
+            stdout, stderr = p.communicate()
+            if p.returncode != 0:
+                stderr = stderr.strip()
+                if stderr == 'SLURM accounting storage is disabled':
+                    log.warning('SLURM accounting storage is not properly configured, unable to run sacct')
+                    return
+                raise Exception( '`%s` returned %s, stderr: %s' % ( ' '.join( cmd ), p.returncode, stderr ) )
+            return stdout.strip()
+
+        def _get_slurm_state():
+            cmd = [ 'scontrol', '-o' ]
+            if '.' in ajs.job_id:
+                # custom slurm-drmaa-with-cluster-support job id syntax
+                job_id, cluster = ajs.job_id.split('.', 1)
+                cmd.extend( [ '-M', cluster ] )
+            else:
+                job_id = ajs.job_id
+                cluster = None
+            cmd.extend( [ 'show', 'job', job_id ] )
+            p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
+            stdout, stderr = p.communicate()
+            if p.returncode != 0:
+                # Will need to be more clever here if this message is not consistent
+                if stderr == 'slurm_load_jobs error: Invalid job id specified\n':
+                    # The job may be old, try to get its state with sacct
+                    job_state = _get_slurm_state_with_sacct(job_id, cluster)
+                    if job_state:
+                        return job_state
+                    return 'NOT_FOUND'
+                raise Exception( '`%s` returned %s, stderr: %s' % ( ' '.join( cmd ), p.returncode, stderr ) )
+            job_info_dict = dict( [ out_param.split( '=', 1 ) for out_param in stdout.split() ] )
+            return job_info_dict['JobState']
+
+        try:
+            if drmaa_state == self.drmaa_job_states.FAILED:
+                slurm_state = _get_slurm_state()
+                sleep = 1
+                while slurm_state == 'COMPLETING':
+                    log.debug( '(%s/%s) Waiting %s seconds for failed job to exit COMPLETING state for post-mortem', ajs.job_wrapper.get_id_tag(), ajs.job_id, sleep )
+                    time.sleep( sleep )
+                    sleep *= 2
+                    if sleep > 64:
+                        ajs.fail_message = "This job failed and the system timed out while trying to determine the cause of the failure."
+                        break
+                    slurm_state = _get_slurm_state()
+                if slurm_state == 'NOT_FOUND':
+                    log.warning( '(%s/%s) Job not found, assuming job check exceeded MinJobAge and completing as successful', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                    drmaa_state = self.drmaa_job_states.DONE
+                elif slurm_state == 'TIMEOUT':
+                    log.info( '(%s/%s) Job hit walltime', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                    ajs.fail_message = "This job was terminated because it ran longer than the maximum allowed job run time."
+                    ajs.runner_state = ajs.runner_states.WALLTIME_REACHED
+                elif slurm_state == 'NODE_FAIL':
+                    log.warning( '(%s/%s) Job failed due to node failure, attempting resubmission', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                    ajs.job_wrapper.change_state( model.Job.states.QUEUED, info='Job was resubmitted due to node failure' )
+                    try:
+                        self.queue_job( ajs.job_wrapper )
+                        return
+                    except:
+                        ajs.fail_message = "This job failed due to a cluster node failure, and an attempt to resubmit the job failed."
+                elif slurm_state == 'CANCELLED':
+                    # Check to see if the job was killed for exceeding memory consumption
+                    if self.__check_memory_limit( ajs.error_file ):
+                        log.info( '(%s/%s) Job hit memory limit', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                        ajs.fail_message = "This job was terminated because it used more memory than it was allocated."
+                        ajs.runner_state = ajs.runner_states.MEMORY_LIMIT_REACHED
+                    else:
+                        log.info( '(%s/%s) Job was cancelled via slurm (e.g. with scancel(1))', ajs.job_wrapper.get_id_tag(), ajs.job_id )
+                        ajs.fail_message = "This job failed because it was cancelled by an administrator."
+                elif slurm_state in ('PENDING', 'RUNNING'):
+                    log.warning( '(%s/%s) Job was reported by drmaa as terminal but job state in SLURM is: %s, returning to monitor queue', ajs.job_wrapper.get_id_tag(), ajs.job_id, slurm_state )
+                    return True
+                else:
+                    log.warning( '(%s/%s) Job failed due to unknown reasons, job state in SLURM was: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, slurm_state )
+                    ajs.fail_message = "This job failed for reasons that could not be determined."
+                if drmaa_state == self.drmaa_job_states.FAILED:
+                    ajs.fail_message += '\nPlease click the bug icon to report this problem if you need help.'
+                    ajs.stop_job = False
+                    self.work_queue.put( ( self.fail_job, ajs ) )
+                    return
+            if drmaa_state == self.drmaa_job_states.DONE:
+                with open(ajs.error_file, 'r+') as f:
+                    if os.path.getsize(ajs.error_file) > SLURM_MEMORY_LIMIT_SCAN_SIZE:
+                        f.seek(-SLURM_MEMORY_LIMIT_SCAN_SIZE, os.SEEK_END)
+                        f.readline()
+                    pos = f.tell()
+                    lines = f.readlines()
+                    f.seek(pos)
+                    for line in lines:
+                        stripped_line = line.strip()
+                        if any([_ in stripped_line for _ in SLURM_MEMORY_LIMIT_EXCEEDED_PARTIAL_WARNINGS]):
+                            log.debug( '(%s/%s) Job completed, removing SLURM exceeded memory warning: "%s"', ajs.job_wrapper.get_id_tag(), ajs.job_id, stripped_line )
+                        else:
+                            f.write(line)
+                    f.truncate()
+        except Exception:
+            log.exception( '(%s/%s) Failure in SLURM _complete_terminal_job(), job final state will be: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, drmaa_state )
+        # by default, finish the job with the state from drmaa
+        return super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state=drmaa_state )
+
+    def __check_memory_limit( self, efile_path ):
+        """
+        A very poor implementation of tail, but it doesn't need to be fancy
+        since we are only searching the last 2K
+        """
+        try:
+            log.debug( 'Checking %s for exceeded memory message from slurm', efile_path )
+            with open( efile_path ) as f:
+                if os.path.getsize(efile_path) > 2048:
+                    f.seek(-2048, os.SEEK_END)
+                    f.readline()
+                for line in f.readlines():
+                    if line.strip() == SLURM_MEMORY_LIMIT_EXCEEDED_MSG:
+                        return True
+        except:
+            log.exception('Error reading end of %s:', efile_path)
+
+        return False
diff --git a/lib/galaxy/jobs/runners/state_handler_factory.py b/lib/galaxy/jobs/runners/state_handler_factory.py
new file mode 100644
index 0000000..6f1d24d
--- /dev/null
+++ b/lib/galaxy/jobs/runners/state_handler_factory.py
@@ -0,0 +1,23 @@
+import logging
+
+from galaxy.util.submodules import submodules
+
+import galaxy.jobs.runners.state_handlers
+
+
+log = logging.getLogger(__name__)
+
+
+def build_state_handlers():
+    return _get_state_handlers_dict()
+
+
+def _get_state_handlers_dict():
+    state_handlers = {}
+    for module in submodules(galaxy.jobs.runners.state_handlers):
+        for func in module.__all__:
+            if func not in state_handlers:
+                state_handlers[func] = []
+            state_handlers[func].append(getattr(module, func))
+            log.debug("Loaded '%s' state handler from module %s", func, module.__name__)
+    return state_handlers
diff --git a/lib/galaxy/jobs/runners/state_handlers/__init__.py b/lib/galaxy/jobs/runners/state_handlers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/jobs/runners/state_handlers/resubmit.py b/lib/galaxy/jobs/runners/state_handlers/resubmit.py
new file mode 100644
index 0000000..007aaea
--- /dev/null
+++ b/lib/galaxy/jobs/runners/state_handlers/resubmit.py
@@ -0,0 +1,67 @@
+import logging
+
+__all__ = ('failure', )
+
+log = logging.getLogger(__name__)
+
+MESSAGES = dict(
+    walltime_reached='it reached the walltime',
+    memory_limit_reached='it exceeded the amount of allocated memory'
+)
+
+
+def failure(app, job_runner, job_state):
+    runner_state = getattr(job_state, 'runner_state', None)
+    if (not runner_state or
+        runner_state not in (job_state.runner_states.WALLTIME_REACHED,
+                             job_state.runner_states.MEMORY_LIMIT_REACHED)):
+        # not set or not a handleable runner state
+        return
+    # Intercept jobs that hit the walltime and have a walltime or
+    # nonspecific resubmit destination configured
+    for resubmit in job_state.job_destination.get('resubmit'):
+        condition = resubmit.get('condition', None)
+        if condition and condition != runner_state:
+            # There is a resubmit defined for the destination but
+            # its condition is not for the encountered state
+            continue
+        log.info("(%s/%s) Job will be resubmitted to '%s' because %s at "
+                 "the '%s' destination",
+                 job_state.job_wrapper.job_id,
+                 job_state.job_id,
+                 resubmit['destination'],
+                 MESSAGES[job_state.runner_state],
+                 job_state.job_wrapper.job_destination.id )
+        # fetch JobDestination for the id or tag
+        new_destination = app.job_config.get_destination(
+            resubmit['destination'])
+        # Resolve dynamic if necessary
+        new_destination = (job_state.job_wrapper.job_runner_mapper
+                           .cache_job_destination(new_destination))
+        # Reset job state
+        job_state.job_wrapper.clear_working_directory()
+        job_state.job_wrapper.invalidate_external_metadata()
+        job = job_state.job_wrapper.get_job()
+        if resubmit.get('handler', None):
+            log.debug('(%s/%s) Job reassigned to handler %s',
+                      job_state.job_wrapper.job_id, job_state.job_id,
+                      resubmit['handler'])
+            job.set_handler(resubmit['handler'])
+            job_runner.sa_session.add( job )
+            # Is this safe to do here?
+            job_runner.sa_session.flush()
+        # Cache the destination to prevent rerunning dynamic after
+        # resubmit
+        job_state.job_wrapper.job_runner_mapper \
+            .cached_job_destination = new_destination
+        job_state.job_wrapper.set_job_destination(new_destination)
+        # Clear external ID (state change below flushes the change)
+        job.job_runner_external_id = None
+        # Allow the UI to query for resubmitted state
+        if job.params is None:
+            job.params = {}
+        job_state.runner_state_handled = True
+        info = "This job was resubmitted to the queue because %s on its " \
+               "compute resource." % MESSAGES[job_state.runner_state]
+        job_runner.mark_as_resubmitted(job_state, info=info)
+        return
diff --git a/lib/galaxy/jobs/runners/tasks.py b/lib/galaxy/jobs/runners/tasks.py
new file mode 100644
index 0000000..a312481
--- /dev/null
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -0,0 +1,246 @@
+import errno
+import logging
+import os
+from time import sleep
+
+from galaxy import model
+from galaxy.jobs import TaskWrapper
+from galaxy.jobs.runners import BaseJobRunner
+
+log = logging.getLogger( __name__ )
+
+__all__ = ( 'TaskedJobRunner', )
+
+
+class TaskedJobRunner( BaseJobRunner ):
+    """
+    Job runner backed by a finite pool of worker threads. FIFO scheduling
+    """
+    runner_name = "TaskRunner"
+
+    def __init__( self, app, nworkers ):
+        """Start the job runner with 'nworkers' worker threads"""
+        super( TaskedJobRunner, self ).__init__( app, nworkers )
+        self._init_worker_threads()
+
+    def queue_job( self, job_wrapper ):
+        # prepare the job
+        if not self.prepare_job( job_wrapper ):
+            return
+
+        # command line has been added to the wrapper by prepare_job()
+        command_line = job_wrapper.runner_command_line
+
+        stderr = stdout = ''
+
+        # Persist the destination
+        job_wrapper.set_job_destination(job_wrapper.job_destination)
+
+        # This is the job's exit code, which will depend on the tasks'
+        # exit code. The overall job's exit code will be one of two values:
+        # o if the job is successful, then the last task scanned will be
+        #   used to determine the exit code. Note that this is not the same
+        #   thing as the last task to complete, which could be added later.
+        # o if a task fails, then the job will fail and the failing task's
+        #   exit code will become the job's exit code.
+        job_exit_code = None
+
+        try:
+            job_wrapper.change_state( model.Job.states.RUNNING )
+            self.sa_session.flush()
+            # Split with the defined method.
+            parallelism = job_wrapper.get_parallelism()
+            try:
+                splitter = getattr(__import__('galaxy.jobs.splitters', globals(), locals(), [parallelism.method]), parallelism.method)
+            except:
+                job_wrapper.change_state( model.Job.states.ERROR )
+                job_wrapper.fail("Job Splitting Failed, no match for '%s'" % parallelism)
+                return
+            tasks = splitter.do_split(job_wrapper)
+            # Not an option for now.  Task objects don't *do* anything
+            # useful yet, but we'll want them tracked outside this thread
+            # to do anything.
+            # if track_tasks_in_database:
+            task_wrappers = []
+            for task in tasks:
+                self.sa_session.add(task)
+            self.sa_session.flush()
+            # Must flush prior to the creation and queueing of task wrappers.
+            for task in tasks:
+                tw = TaskWrapper(task, job_wrapper.queue)
+                task_wrappers.append(tw)
+                self.app.job_manager.job_handler.dispatcher.put(tw)
+            tasks_complete = False
+            count_complete = 0
+            sleep_time = 1
+            # sleep/loop until no more progress can be made. That is when
+            # all tasks are one of { OK, ERROR, DELETED }. If a task
+            completed_states = [ model.Task.states.OK,
+                                 model.Task.states.ERROR,
+                                 model.Task.states.DELETED ]
+
+            # TODO: Should we report an error (and not merge outputs) if
+            # one of the subtasks errored out?  Should we prevent any that
+            # are pending from being started in that case?
+            # SM: I'm
+            # If any task has an error, then we will stop all of them
+            # immediately. Tasks that are in the QUEUED state will be
+            # moved to the DELETED state. The task's runner should
+            # ignore tasks that are not in the QUEUED state.
+            # Deleted tasks are not included right now.
+            #
+            while tasks_complete is False:
+                count_complete = 0
+                tasks_complete = True
+                for tw in task_wrappers:
+                    task_state = tw.get_state()
+                    if ( model.Task.states.ERROR == task_state ):
+                        job_exit_code = tw.get_exit_code()
+                        log.debug( "Canceling job %d: Task %s returned an error"
+                                   % ( tw.job_id, tw.task_id ) )
+                        self._cancel_job( job_wrapper, task_wrappers )
+                        tasks_complete = True
+                        break
+                    elif task_state not in completed_states:
+                        tasks_complete = False
+                    else:
+                        job_exit_code = tw.get_exit_code()
+                        count_complete = count_complete + 1
+                if tasks_complete is False:
+                    sleep( sleep_time )
+                    if sleep_time < 8:
+                        sleep_time *= 2
+            job_wrapper.reclaim_ownership()      # if running as the actual user, change ownership before merging.
+            log.debug('execution finished - beginning merge: %s' % command_line)
+            stdout, stderr = splitter.do_merge(job_wrapper, task_wrappers)
+        except Exception:
+            job_wrapper.fail( "failure running job", exception=True )
+            log.exception("failure running job %d" % job_wrapper.job_id)
+            return
+
+        # run the metadata setting script here
+        # this is terminate-able when output dataset/job is deleted
+        # so that long running set_meta()s can be canceled without having to reboot the server
+        self._handle_metadata_externally(job_wrapper, resolve_requirements=True )
+        # Finish the job
+        try:
+            job_wrapper.finish( stdout, stderr, job_exit_code )
+        except:
+            log.exception("Job wrapper finish method failed")
+            job_wrapper.fail("Unable to finish job", exception=True)
+
+    def stop_job( self, job ):
+        # We need to stop all subtasks. This is going to stay in the task
+        # runner because the task runner also starts all the tasks.
+        # First, get the list of tasks from job.tasks, which uses SQL
+        # alchemy to retrieve a job's list of tasks.
+        tasks = job.get_tasks()
+        if ( len( tasks ) > 0 ):
+            for task in tasks:
+                log.debug( "Killing task's job " + str(task.get_id()) )
+                self.app.job_manager.job_handler.dispatcher.stop(task)
+
+        # There were no subtasks, so just kill the job. We'll touch
+        # this if the tasks runner is used but the tool does not use
+        # parallelism.
+        else:
+            # if our local job has JobExternalOutputMetadata associated, then our primary job has to have already finished
+            if job.external_output_metadata:
+                pid = job.external_output_metadata[0].job_runner_external_pid  # every JobExternalOutputMetadata has a pid set, we just need to take from one of them
+            else:
+                pid = job.job_runner_external_id
+            if pid in [ None, '' ]:
+                log.warning( "stop_job(): %s: no PID in database for job, unable to stop" % job.id )
+                return
+            self._stop_pid( pid, job.id )
+
+    def recover( self, job, job_wrapper ):
+        # DBTODO Task Recovery, this should be possible.
+        job_wrapper.change_state( model.Job.states.ERROR, info="This job was killed when Galaxy was restarted.  Please retry the job." )
+
+    def _cancel_job( self, job_wrapper, task_wrappers ):
+        """
+        Cancel the given job. The job's state will be set to ERROR.
+        Any running tasks will be cancelled, and any queued/pending
+        tasks will be marked as DELETED so that runners know not
+        to run those tasks.
+        """
+        job = job_wrapper.get_job()
+        job.set_state( model.Job.states.ERROR )
+
+        # For every task (except the one that already had an error)
+        #       - If the task is queued, then mark it as deleted
+        #         so that the runner will not run it later. (It would
+        #         be great to remove stuff from a runner's queue before
+        #         the runner picks it up, but that isn't possible in
+        #         most APIs.)
+        #       - If the task is running, then tell the runner
+        #         (via the dispatcher) to cancel the task.
+        #       - Else the task is new or waiting (which should be
+        #         impossible) or in an error or deleted state already,
+        #         so skip it.
+        # This is currently done in two loops. If a running task is
+        # cancelled, then a queued task could take its place before
+        # it's marked as deleted.
+        # TODO: Eliminate the chance of a race condition wrt state.
+        for task_wrapper in task_wrappers:
+            task = task_wrapper.get_task()
+            task_state = task.get_state()
+            if ( model.Task.states.QUEUED == task_state ):
+                log.debug( "_cancel_job for job %d: Task %d is not running; setting state to DELETED"
+                           % ( job.get_id(), task.get_id() ) )
+                task_wrapper.change_state( task.states.DELETED )
+        # If a task failed, then the caller will have waited a few seconds
+        # before recognizing the failure. In that time, a queued task could
+        # have been picked up by a runner but not marked as running.
+        # So wait a few seconds so that we can eliminate such tasks once they
+        # are running.
+        sleep(5)
+        for task_wrapper in task_wrappers:
+            if ( model.Task.states.RUNNING == task_wrapper.get_state() ):
+                task = task_wrapper.get_task()
+                log.debug( "_cancel_job for job %d: Stopping running task %d"
+                           % ( job.get_id(), task.get_id() ) )
+                job_wrapper.app.job_manager.job_handler.dispatcher.stop( task )
+
+    def _check_pid( self, pid ):
+        # DBTODO Need to check all subtask pids and return some sort of cumulative result.
+        return True
+        try:
+            os.kill( pid, 0 )
+            return True
+        except OSError as e:
+            if e.errno == errno.ESRCH:
+                log.debug( "_check_pid(): PID %d is dead" % pid )
+            else:
+                log.warning( "_check_pid(): Got errno %s when attempting to check PID %d: %s" % ( errno.errorcode[e.errno], pid, e.strerror ) )
+            return False
+
+    def _stop_pid( self, pid, job_id ):
+        """
+        This method stops the given process id whether it's a task or job.
+        It is meant to be a private helper method, but it is mostly reusable.
+        The first argument is the process id to stop, and the second id is the
+        job's id (which is used for logging messages only right now).
+        """
+        pid = int( pid )
+        log.debug( "Stopping pid %s" % pid )
+        if not self._check_pid( pid ):
+            log.warning( "_stop_pid(): %s: PID %d was already dead or can't be signaled" % ( job_id, pid ) )
+            return
+        for sig in [ 15, 9 ]:
+            try:
+                os.killpg( pid, sig )
+            except OSError as e:
+                # This warning could be bogus; many tasks are stopped with
+                # SIGTERM (signal 15), but ymmv depending on the platform.
+                log.warning( "_stop_pid(): %s: Got errno %s when attempting to signal %d to PID %d: %s" % ( job_id, errno.errorcode[e.errno], sig, pid, e.strerror ) )
+                return
+            # TODO: If we're stopping lots of tasks, then we will want to put this
+            # avoid a two-second overhead using some other asynchronous method.
+            sleep( 2 )
+            if not self._check_pid( pid ):
+                log.debug( "_stop_pid(): %s: PID %d successfully killed with signal %d" % ( job_id, pid, sig ) )
+                return
+        else:
+            log.warning( "_stop_pid(): %s: PID %d refuses to die after signaling TERM/KILL" % ( job_id, pid ) )
diff --git a/lib/galaxy/jobs/runners/util/__init__.py b/lib/galaxy/jobs/runners/util/__init__.py
new file mode 100644
index 0000000..c0bd4da
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/__init__.py
@@ -0,0 +1,10 @@
+"""
+This module and its submodules contains utilities for running external
+processes and interfacing with job managers. This module should contain
+functionality shared between Galaxy and the Pulsar.
+"""
+from galaxy.util.bunch import Bunch
+
+from .kill import kill_pid
+
+__all__ = ('kill_pid', 'Bunch')
diff --git a/lib/galaxy/jobs/runners/util/cli/__init__.py b/lib/galaxy/jobs/runners/util/cli/__init__.py
new file mode 100644
index 0000000..cdd2143
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/__init__.py
@@ -0,0 +1,73 @@
+"""
+"""
+from glob import glob
+from os.path import basename, join
+from os import getcwd
+
+DEFAULT_SHELL_PLUGIN = 'LocalShell'
+
+ERROR_MESSAGE_NO_JOB_PLUGIN = "No job plugin parameter found, cannot create CLI job interface"
+ERROR_MESSAGE_NO_SUCH_JOB_PLUGIN = "Failed to find job_plugin of type %s, available types include %s"
+
+
+class CliInterface(object):
+    """
+    High-level interface for loading shell and job plugins and matching
+    them to specified parameters.
+    """
+
+    def __init__(self, code_dir='lib'):
+        """
+        """
+        def __load(module_path, d):
+            module_pattern = join(join(getcwd(), code_dir, *module_path.split('.')), '*.py')
+            for file in glob(module_pattern):
+                if basename(file).startswith('_'):
+                    continue
+                module_name = '%s.%s' % (module_path, basename(file).rsplit('.py', 1)[0])
+                module = __import__(module_name)
+                for comp in module_name.split(".")[1:]:
+                    module = getattr(module, comp)
+                for name in module.__all__:
+                    try:
+                        d[name] = getattr(module, name)
+                    except TypeError:
+                        raise TypeError("Invalid type for name %s" % name)
+
+        self.cli_shells = {}
+        self.cli_job_interfaces = {}
+
+        module_prefix = self.__module__
+        __load('%s.shell' % module_prefix, self.cli_shells)
+        __load('%s.job' % module_prefix, self.cli_job_interfaces)
+
+    def get_plugins(self, shell_params, job_params):
+        """
+        Return shell and job interface defined by and configured via
+        specified params.
+        """
+        shell = self.get_shell_plugin(shell_params)
+        job_interface = self.get_job_interface(job_params)
+        return shell, job_interface
+
+    def get_shell_plugin(self, shell_params):
+        shell_plugin = shell_params.get('plugin', DEFAULT_SHELL_PLUGIN)
+        shell = self.cli_shells[shell_plugin](**shell_params)
+        return shell
+
+    def get_job_interface(self, job_params):
+        job_plugin = job_params.get('plugin', None)
+        if not job_plugin:
+            raise ValueError(ERROR_MESSAGE_NO_JOB_PLUGIN)
+        job_plugin_class = self.cli_job_interfaces.get(job_plugin, None)
+        if not job_plugin_class:
+            raise ValueError(ERROR_MESSAGE_NO_SUCH_JOB_PLUGIN % (job_plugin, self.cli_job_interfaces.keys()))
+        job_interface = job_plugin_class(**job_params)
+
+        return job_interface
+
+
+def split_params(params):
+    shell_params = dict((k.replace('shell_', '', 1), v) for k, v in params.items() if k.startswith('shell_'))
+    job_params = dict((k.replace('job_', '', 1), v) for k, v in params.items() if k.startswith('job_'))
+    return shell_params, job_params
diff --git a/lib/galaxy/jobs/runners/util/cli/factory.py b/lib/galaxy/jobs/runners/util/cli/factory.py
new file mode 100644
index 0000000..8955493
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/factory.py
@@ -0,0 +1,22 @@
+try:
+    from galaxy.jobs.runners.util.cli import (
+        CliInterface,
+        split_params
+    )
+    code_dir = 'lib'
+except ImportError:
+    from pulsar.managers.util.cli import (
+        CliInterface,
+        split_params
+    )
+    code_dir = '.'
+
+
+def build_cli_interface():
+    return CliInterface(code_dir=code_dir)
+
+
+def get_shell(params):
+    cli_interface = build_cli_interface()
+    shell_params, _ = split_params(params)
+    return cli_interface.get_shell_plugin(shell_params)
diff --git a/lib/galaxy/jobs/runners/util/cli/job/__init__.py b/lib/galaxy/jobs/runners/util/cli/job/__init__.py
new file mode 100644
index 0000000..6835066
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/job/__init__.py
@@ -0,0 +1,58 @@
+"""
+Abstract base class for cli job plugins.
+"""
+from abc import ABCMeta, abstractmethod
+
+
+class BaseJobExec(object):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def __init__(self, **params):
+        """
+        Constructor for CLI job executor.
+        """
+
+    def job_script_kwargs(self, ofile, efile, job_name):
+        """ Return extra keyword argument for consumption by job script
+        module.
+        """
+        return {}
+
+    @abstractmethod
+    def submit(self, script_file):
+        """
+        Given specified script_file path, yield command to submit it
+        to external job manager.
+        """
+
+    @abstractmethod
+    def delete(self, job_id):
+        """
+        Given job id, return command to stop execution or dequeue specified
+        job.
+        """
+
+    @abstractmethod
+    def get_status(self, job_ids=None):
+        """
+        Return command to get statuses of specified job ids.
+        """
+
+    @abstractmethod
+    def get_single_status(self, job_id):
+        """
+        Return command to get the status of a single, specified job.
+        """
+
+    @abstractmethod
+    def parse_status(self, status, job_ids):
+        """
+        Parse the statuses of output from get_status command.
+        """
+
+    @abstractmethod
+    def parse_single_status(self, status, job_id):
+        """
+        Parse the status of output from get_single_status command.
+        """
diff --git a/lib/galaxy/jobs/runners/util/cli/job/slurm.py b/lib/galaxy/jobs/runners/util/cli/job/slurm.py
new file mode 100644
index 0000000..5ae3cb5
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/job/slurm.py
@@ -0,0 +1,98 @@
+# A simple CLI runner for slurm that can be used when running Galaxy from a
+# non-submit host and using a Slurm cluster.
+
+from ..job import BaseJobExec
+
+from logging import getLogger
+
+try:
+    from galaxy.model import Job
+    job_states = Job.states
+except ImportError:
+    # Not in Galaxy, map Galaxy job states to Pulsar ones.
+    from galaxy.util import enum
+    job_states = enum(RUNNING='running', OK='complete', QUEUED='queued', ERROR="failed")
+
+log = getLogger(__name__)
+
+argmap = {
+    'time': '-t',
+    'ncpus': '-c',
+    'partition': '-p'
+}
+
+
+class Slurm(BaseJobExec):
+
+    def __init__(self, **params):
+        self.params = {}
+        for k, v in params.items():
+            self.params[k] = v
+
+    def job_script_kwargs(self, ofile, efile, job_name):
+        scriptargs = {'-o': ofile,
+                      '-e': efile,
+                      '-J': job_name}
+
+        # Map arguments using argmap.
+        for k, v in self.params.items():
+            if k == 'plugin':
+                continue
+            try:
+                if not k.startswith('-'):
+                    k = argmap[k]
+                scriptargs[k] = v
+            except:
+                log.warning('Unrecognized long argument passed to Slurm CLI plugin: %s' % k)
+
+        # Generated template.
+        template_scriptargs = ''
+        for k, v in scriptargs.items():
+            template_scriptargs += '#SBATCH %s %s\n' % (k, v)
+        return dict(headers=template_scriptargs)
+
+    def submit(self, script_file):
+        return 'sbatch %s' % script_file
+
+    def delete(self, job_id):
+        return 'scancel %s' % job_id
+
+    def get_status(self, job_ids=None):
+        return "squeue -a -o '%A %t'"
+
+    def get_single_status(self, job_id):
+        return "squeue -a -o '%A %t' -j " + job_id
+
+    def parse_status(self, status, job_ids):
+        # Get status for each job, skipping header.
+        rval = {}
+        for line in status.splitlines()[1:]:
+            id, state = line.split()
+            if id in job_ids:
+                # map job states to Galaxy job states.
+                rval[id] = self._get_job_state(state)
+        return rval
+
+    def parse_single_status(self, status, job_id):
+        status = status.splitlines()
+        if len(status) > 1:
+            # Job still on cluster and has state.
+            id, state = status[1].split()
+            return self._get_job_state(state)
+        # else line like "slurm_load_jobs error: Invalid job id specified"
+        return job_states.OK
+
+    def _get_job_state(self, state):
+        try:
+            return {
+                'F': job_states.ERROR,
+                'R': job_states.RUNNING,
+                'CG': job_states.RUNNING,
+                'PD': job_states.QUEUED,
+                'CD': job_states.OK
+            }.get(state)
+        except KeyError:
+            raise KeyError("Failed to map slurm status code [%s] to job state." % state)
+
+
+__all__ = ('Slurm',)
diff --git a/lib/galaxy/jobs/runners/util/cli/job/slurm_torque.py b/lib/galaxy/jobs/runners/util/cli/job/slurm_torque.py
new file mode 100644
index 0000000..b3de609
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/job/slurm_torque.py
@@ -0,0 +1,29 @@
+import re
+from .torque import Torque
+
+__all__ = ('SlurmTorque',)
+
+
+class SlurmTorque(Torque):
+    """ A CLI job executor for Slurm's Torque compatibility mode. This differs
+    from real torque CLI in that -x command line is not available so job status
+    needs to be parsed from qstat table instead of XML.
+    """
+
+    def get_status(self, job_ids=None):
+        return 'qstat'
+
+    def parse_status(self, status, job_ids):
+        rval = {}
+        for line in status.strip().splitlines():
+            if line.startswith("Job ID"):
+                continue
+            line_parts = re.compile("\s+").split(line)
+            if len(line_parts) < 5:
+                continue
+            id = line_parts[0]
+            state = line_parts[4]
+            if id in job_ids:
+                # map PBS job states to Galaxy job states.
+                rval[id] = self._get_job_state(state)
+        return rval
diff --git a/lib/galaxy/jobs/runners/util/cli/job/torque.py b/lib/galaxy/jobs/runners/util/cli/job/torque.py
new file mode 100644
index 0000000..d4e0f49
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/job/torque.py
@@ -0,0 +1,124 @@
+from logging import getLogger
+try:
+    import xml.etree.cElementTree as et
+except:
+    import xml.etree.ElementTree as et
+
+try:
+    from galaxy.model import Job
+    job_states = Job.states
+except ImportError:
+    # Not in Galaxy, map Galaxy job states to Pulsar ones.
+    from galaxy.util import enum
+    job_states = enum(RUNNING='running', OK='complete', QUEUED='queued')
+
+from ..job import BaseJobExec
+
+log = getLogger(__name__)
+
+ERROR_MESSAGE_UNRECOGNIZED_ARG = 'Unrecognized long argument passed to Torque CLI plugin: %s'
+
+
+argmap = {'destination': '-q',
+          'Execution_Time': '-a',
+          'Account_Name': '-A',
+          'Checkpoint': '-c',
+          'Error_Path': '-e',
+          'Group_List': '-g',
+          'Hold_Types': '-h',
+          'Join_Paths': '-j',
+          'Keep_Files': '-k',
+          'Resource_List': '-l',
+          'Mail_Points': '-m',
+          'Mail_Users': '-M',
+          'Job_Name': '-N',
+          'Output_Path': '-o',
+          'Priority': '-p',
+          'Rerunable': '-r',
+          'Shell_Path_List': '-S',
+          'job_array_request': '-t',
+          'User_List': '-u',
+          'Variable_List': '-v'}
+
+
+class Torque(BaseJobExec):
+
+    def __init__(self, **params):
+        self.params = {}
+        for k, v in params.items():
+            self.params[k] = v
+
+    def job_script_kwargs(self, ofile, efile, job_name):
+        pbsargs = {'-o': ofile,
+                   '-e': efile,
+                   '-N': job_name}
+        for k, v in self.params.items():
+            if k == 'plugin':
+                continue
+            try:
+                if not k.startswith('-'):
+                    k = argmap[k]
+                pbsargs[k] = v
+            except KeyError:
+                log.warning(ERROR_MESSAGE_UNRECOGNIZED_ARG % k)
+        template_pbsargs = ''
+        for k, v in pbsargs.items():
+            template_pbsargs += '#PBS %s %s\n' % (k, v)
+        return dict(headers=template_pbsargs)
+
+    def submit(self, script_file):
+        return 'qsub %s' % script_file
+
+    def delete(self, job_id):
+        return 'qdel %s' % job_id
+
+    def get_status(self, job_ids=None):
+        return 'qstat -x'
+
+    def get_single_status(self, job_id):
+        return 'qstat -f %s' % job_id
+
+    def parse_status(self, status, job_ids):
+        # in case there's noise in the output, find the big blob 'o xml
+        tree = None
+        rval = {}
+        for line in status.strip().splitlines():
+            try:
+                tree = et.fromstring(line.strip())
+                assert tree.tag == 'Data'
+                break
+            except Exception:
+                tree = None
+        if tree is None:
+            log.warning('No valid qstat XML return from `qstat -x`, got the following: %s' % status)
+            return None
+        else:
+            for job in tree.findall('Job'):
+                id = job.find('Job_Id').text
+                if id in job_ids:
+                    state = job.find('job_state').text
+                    # map PBS job states to Galaxy job states.
+                    rval[id] = self._get_job_state(state)
+        return rval
+
+    def parse_single_status(self, status, job_id):
+        for line in status.splitlines():
+            line = line.split(' = ')
+            if line[0] == 'job_state':
+                return self._get_job_state(line[1].strip())
+        # no state found, job has exited
+        return job_states.OK
+
+    def _get_job_state(self, state):
+        try:
+            return {
+                'E': job_states.RUNNING,
+                'R': job_states.RUNNING,
+                'Q': job_states.QUEUED,
+                'C': job_states.OK
+            }.get(state)
+        except KeyError:
+            raise KeyError("Failed to map torque status code [%s] to job state." % state)
+
+
+__all__ = ('Torque',)
diff --git a/lib/galaxy/jobs/runners/util/cli/shell/__init__.py b/lib/galaxy/jobs/runners/util/cli/shell/__init__.py
new file mode 100644
index 0000000..e14f4a8
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/shell/__init__.py
@@ -0,0 +1,19 @@
+"""
+Abstract base class for runners which execute commands via a shell.
+"""
+from abc import ABCMeta, abstractmethod
+
+
+class BaseShellExec(object):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def __init__(self, *args, **kwargs):
+        """
+        Constructor for shell executor instance.
+        """
+
+    def execute(self, cmd, persist=False, timeout=60):
+        """
+        Execute the specified command via defined shell.
+        """
diff --git a/lib/galaxy/jobs/runners/util/cli/shell/local.py b/lib/galaxy/jobs/runners/util/cli/shell/local.py
new file mode 100644
index 0000000..9fc5904
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/shell/local.py
@@ -0,0 +1,61 @@
+from tempfile import TemporaryFile
+from time import sleep
+from subprocess import Popen, PIPE
+
+from ..shell import BaseShellExec
+from ....util import Bunch, kill_pid
+
+from logging import getLogger
+log = getLogger(__name__)
+
+TIMEOUT_ERROR_MESSAGE = u'Execution timed out'
+TIMEOUT_RETURN_CODE = -1
+DEFAULT_TIMEOUT = 60
+DEFAULT_TIMEOUT_CHECK_INTERVAL = 3
+
+
+class LocalShell(BaseShellExec):
+    """
+
+    >>> shell = LocalShell()
+    >>> def exec_python(script, **kwds): return shell.execute('python -c "%s"' % script, **kwds)
+    >>> exec_result = exec_python("from __future__ import print_function; print('Hello World')")
+    >>> exec_result.stderr == u''
+    True
+    >>> exec_result.stdout.strip() == u'Hello World'
+    True
+    >>> exec_result = exec_python("import time; time.sleep(90)", timeout=1, timeout_check_interval=.1)
+    >>> exec_result.stdout == u''
+    True
+    >>> exec_result.stderr == 'Execution timed out'
+    True
+    >>> exec_result.returncode == TIMEOUT_RETURN_CODE
+    True
+    """
+
+    def __init__(self, **kwds):
+        pass
+
+    def execute(self, cmd, persist=False, timeout=DEFAULT_TIMEOUT, timeout_check_interval=DEFAULT_TIMEOUT_CHECK_INTERVAL, **kwds):
+        outf = TemporaryFile()
+        p = Popen(cmd, shell=True, stdin=None, stdout=outf, stderr=PIPE)
+        # poll until timeout
+
+        for i in range(int(timeout / timeout_check_interval)):
+            r = p.poll()
+            if r is not None:
+                break
+            sleep(timeout_check_interval)
+        else:
+            kill_pid(p.pid)
+            return Bunch(stdout=u'', stderr=TIMEOUT_ERROR_MESSAGE, returncode=TIMEOUT_RETURN_CODE)
+        outf.seek(0)
+        return Bunch(stdout=_read_str(outf), stderr=_read_str(p.stderr), returncode=p.returncode)
+
+
+def _read_str(stream):
+    contents = stream.read()
+    return contents.decode('UTF-8') if isinstance(contents, bytes) else contents
+
+
+__all__ = ('LocalShell',)
diff --git a/lib/galaxy/jobs/runners/util/cli/shell/rsh.py b/lib/galaxy/jobs/runners/util/cli/shell/rsh.py
new file mode 100644
index 0000000..b6fdcc8
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/cli/shell/rsh.py
@@ -0,0 +1,40 @@
+from .local import LocalShell
+
+from logging import getLogger
+log = getLogger(__name__)
+
+__all__ = ('RemoteShell', 'SecureShell', 'GlobusSecureShell')
+
+
+class RemoteShell(LocalShell):
+
+    def __init__(self, rsh='rsh', rcp='rcp', hostname='localhost', username=None, **kwargs):
+        super(RemoteShell, self).__init__(**kwargs)
+        self.rsh = rsh
+        self.rcp = rcp
+        self.hostname = hostname
+        self.username = username
+        self.sessions = {}
+
+    def execute(self, cmd, persist=False, timeout=60):
+        # TODO: implement persistence
+        if self.username is None:
+            fullcmd = '%s %s %s' % (self.rsh, self.hostname, cmd)
+        else:
+            fullcmd = '%s -l %s %s %s' % (self.rsh, self.username, self.hostname, cmd)
+        return super(RemoteShell, self).execute(fullcmd, persist, timeout)
+
+
+class SecureShell(RemoteShell):
+    SSH_NEW_KEY_STRING = 'Are you sure you want to continue connecting'
+
+    def __init__(self, rsh='ssh', rcp='scp', **kwargs):
+        rsh += ' -oStrictHostKeyChecking=yes -oConnectTimeout=60'
+        rcp += ' -oStrictHostKeyChecking=yes -oConnectTimeout=60'
+        super(SecureShell, self).__init__(rsh=rsh, rcp=rcp, **kwargs)
+
+
+class GlobusSecureShell(SecureShell):
+
+    def __init__(self, rsh='gsissh', rcp='gsiscp', **kwargs):
+        super(GlobusSecureShell, self).__init__(rsh=rsh, rcp=rcp, **kwargs)
diff --git a/lib/galaxy/jobs/runners/util/condor/__init__.py b/lib/galaxy/jobs/runners/util/condor/__init__.py
new file mode 100644
index 0000000..d0103ca
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/condor/__init__.py
@@ -0,0 +1,115 @@
+"""
+Condor helper utilities.
+"""
+from subprocess import Popen, PIPE, STDOUT, check_call, CalledProcessError
+from ..external import parse_external_id
+
+DEFAULT_QUERY_CLASSAD = dict(
+    universe='vanilla',
+    getenv='true',
+    notification='NEVER',
+)
+
+PROBLEM_RUNNING_CONDOR_SUBMIT = \
+    "Problem encountered while running condor_submit."
+PROBLEM_PARSING_EXTERNAL_ID = \
+    "Failed to find job id from condor_submit"
+
+SUBMIT_PARAM_PREFIX = "submit_"
+
+
+def submission_params(prefix=SUBMIT_PARAM_PREFIX, **kwds):
+    submission_params = {}
+    for key in kwds:
+        value = kwds[key]
+        key = key.lower()
+        if key.startswith(prefix):
+            condor_key = key[len(prefix):]
+            submission_params[condor_key] = value
+    return submission_params
+
+
+def build_submit_description(executable, output, error, user_log, query_params):
+    """
+    Build up the contents of a condor submit description file.
+
+    >>> submit_args = dict(executable='/path/to/script', output='o', error='e', user_log='ul')
+    >>> submit_args['query_params'] = dict()
+    >>> default_description = build_submit_description(**submit_args)
+    >>> assert 'executable = /path/to/script' in default_description
+    >>> assert 'output = o' in default_description
+    >>> assert 'error = e' in default_description
+    >>> assert 'queue' in default_description
+    >>> assert 'universe = vanilla' in default_description
+    >>> assert 'universe = standard' not in default_description
+    >>> submit_args['query_params'] = dict(universe='standard')
+    >>> std_description = build_submit_description(**submit_args)
+    >>> assert 'universe = vanilla' not in std_description
+    >>> assert 'universe = standard' in std_description
+    """
+    all_query_params = DEFAULT_QUERY_CLASSAD.copy()
+    all_query_params.update(query_params)
+
+    submit_description = []
+    for key, value in all_query_params.items():
+        submit_description.append('%s = %s' % (key, value))
+    submit_description.append('executable = ' + executable)
+    submit_description.append('output = ' + output)
+    submit_description.append('error = ' + error)
+    submit_description.append('log = ' + user_log)
+    submit_description.append('queue')
+    return '\n'.join(submit_description)
+
+
+def condor_submit(submit_file):
+    """
+    Submit a condor job described by the given file. Parse an external id for
+    the submission or return None and a reason for the failure.
+    """
+    external_id = None
+    try:
+        submit = Popen(('condor_submit', submit_file), stdout=PIPE, stderr=STDOUT)
+        message, _ = submit.communicate()
+        if submit.returncode == 0:
+            external_id = parse_external_id(message, type='condor')
+        else:
+            message = PROBLEM_PARSING_EXTERNAL_ID
+    except Exception as e:
+        message = str(e)
+    return external_id, message
+
+
+def condor_stop(external_id):
+    """
+    Stop running condor job and return a failure_message if this
+    fails.
+    """
+    failure_message = None
+    try:
+        check_call(('condor_rm', external_id))
+    except CalledProcessError:
+        failure_message = "condor_rm failed"
+    except Exception as e:
+        "error encountered calling condor_rm: %s" % e
+    return failure_message
+
+
+def summarize_condor_log(log_file, external_id):
+    """
+    """
+    log_job_id = external_id.zfill(3)
+    s1 = s4 = s7 = s5 = s9 = False
+    with open(log_file, 'r') as log_handle:
+        for line in log_handle:
+            if '001 (' + log_job_id + '.' in line:
+                s1 = True
+            if '004 (' + log_job_id + '.' in line:
+                s4 = True
+            if '007 (' + log_job_id + '.' in line:
+                s7 = True
+            if '005 (' + log_job_id + '.' in line:
+                s5 = True
+            if '009 (' + log_job_id + '.' in line:
+                s9 = True
+        file_size = log_handle.tell()
+    return s1, s4, s7, s5, s9, file_size
diff --git a/lib/galaxy/jobs/runners/util/drmaa/__init__.py b/lib/galaxy/jobs/runners/util/drmaa/__init__.py
new file mode 100644
index 0000000..0a8da7a
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/drmaa/__init__.py
@@ -0,0 +1,56 @@
+try:
+    from drmaa import JobControlAction, Session
+except ImportError as e:
+    # Will not be able to use DRMAA
+    Session = None
+
+NO_DRMAA_MESSAGE = "Attempt to use DRMAA, but DRMAA Python library cannot be loaded."
+
+
+class DrmaaSessionFactory(object):
+    """
+    Abstraction used to production DrmaaSession wrappers.
+    """
+    def __init__(self):
+        self.session_constructor = Session
+
+    def get(self, **kwds):
+        session_constructor = self.session_constructor
+        if not session_constructor:
+            raise Exception(NO_DRMAA_MESSAGE)
+        return DrmaaSession(session_constructor(), **kwds)
+
+
+class DrmaaSession(object):
+    """
+    Abstraction around `drmaa` module `Session` objects.
+    """
+
+    def __init__(self, session, **kwds):
+        self.session = session
+        session.initialize()
+
+    def run_job(self, **kwds):
+        """
+        Create a DRMAA job template, populate with specified properties,
+        run the job, and return the external_job_id.
+        """
+        template = self.session.createJobTemplate()
+        try:
+            for key in kwds:
+                setattr(template, key, kwds[key])
+            return self.session.runJob(template)
+        finally:
+            self.session.deleteJobTemplate(template)
+
+    def kill(self, external_job_id):
+        return self.session.control(str(external_job_id), JobControlAction.TERMINATE)
+
+    def job_status(self, external_job_id):
+        return self.session.jobStatus(str(external_job_id))
+
+    def close(self):
+        return self.session.exit()
+
+
+__all__ = ('DrmaaSessionFactory', )
diff --git a/lib/galaxy/jobs/runners/util/env.py b/lib/galaxy/jobs/runners/util/env.py
new file mode 100644
index 0000000..f426722
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/env.py
@@ -0,0 +1,40 @@
+
+RAW_VALUE_BY_DEFAULT = False
+
+
+def env_to_statement(env):
+    ''' Return the abstraction description of an environment variable definition
+    into a statement for shell script.
+
+    >>> env_to_statement(dict(name='X', value='Y'))
+    'X="Y"; export X'
+    >>> env_to_statement(dict(name='X', value='Y', raw=True))
+    'X=Y; export X'
+    >>> env_to_statement(dict(name='X', value='"A","B","C"'))
+    'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X'
+    >>> env_to_statement(dict(file="Y"))
+    '. "Y"'
+    >>> env_to_statement(dict(file="'RAW $FILE'", raw=True))
+    ". 'RAW $FILE'"
+    >>> # Source file takes precedence
+    >>> env_to_statement(dict(name='X', value='"A","B","C"', file="S"))
+    '. "S"'
+    >>> env_to_statement(dict(execute="module load java/1.5.1"))
+    'module load java/1.5.1'
+    '''
+    source_file = env.get('file', None)
+    if source_file:
+        return '. %s' % __escape(source_file, env)
+    execute = env.get('execute', None)
+    if execute:
+        return execute
+    name = env['name']
+    value = __escape(env['value'], env)
+    return '%s=%s; export %s' % (name, value, name)
+
+
+def __escape(value, env):
+    raw = env.get('raw', RAW_VALUE_BY_DEFAULT)
+    if not raw:
+        value = '"' + value.replace('"', '\\"') + '"'
+    return value
diff --git a/lib/galaxy/jobs/runners/util/external.py b/lib/galaxy/jobs/runners/util/external.py
new file mode 100644
index 0000000..a02b448
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/external.py
@@ -0,0 +1,37 @@
+from re import search
+
+EXTERNAL_ID_TYPE_ANY = None
+
+EXTERNAL_ID_PATTERNS = [
+    ('condor', r'submitted to cluster (\d+)\.'),
+    ('slurm', r'Submitted batch job (\w+)'),
+    ('torque', r'(.+)'),  # Default 'pattern' assumed by Galaxy code circa August 2013.
+]
+
+
+def parse_external_id(output, type=EXTERNAL_ID_TYPE_ANY):
+    """
+    Attempt to parse the output of job submission commands for an external id.__doc__
+
+    >>> parse_external_id("12345.pbsmanager")
+    '12345.pbsmanager'
+    >>> parse_external_id('Submitted batch job 185')
+    '185'
+    >>> parse_external_id('Submitted batch job 185', type='torque')
+    'Submitted batch job 185'
+    >>> parse_external_id('submitted to cluster 125.')
+    '125'
+    >>> parse_external_id('submitted to cluster 125.', type='slurm')
+    >>>
+    """
+    external_id = None
+    for pattern_type, pattern in EXTERNAL_ID_PATTERNS:
+        if type != EXTERNAL_ID_TYPE_ANY and type != pattern_type:
+            continue
+
+        match = search(pattern, output)
+        if match:
+            external_id = match.group(1)
+            break
+
+    return external_id
diff --git a/lib/galaxy/jobs/runners/util/job_script/CLUSTER_SLOTS_STATEMENT.sh b/lib/galaxy/jobs/runners/util/job_script/CLUSTER_SLOTS_STATEMENT.sh
new file mode 100644
index 0000000..624f77c
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/job_script/CLUSTER_SLOTS_STATEMENT.sh
@@ -0,0 +1,29 @@
+export GALAXY_SLOTS_CONFIGURED="1"
+if [ -n "$SLURM_CPUS_ON_NODE" ]; then
+    # This should be valid on SLURM except in the case that srun is used to
+    # submit additional job steps under an existing allocation, which we do not
+    # currently do.
+    GALAXY_SLOTS="$SLURM_CPUS_ON_NODE"
+elif [ -n "$SLURM_NTASKS" ] || [ -n "$SLURM_CPUS_PER_TASK" ]; then
+    # $SLURM_CPUS_ON_NODE should be set correctly on SLURM (even on old
+    # installations), but keep the $SLURM_NTASKS logic as a backup since this
+    # was the previous method under SLURM.
+    #
+    # Multiply these values since SLURM_NTASKS is total tasks over all nodes.
+    # GALAXY_SLOTS maps to CPUS on a single node and shouldn't be used for
+    # multi-node requests.
+    GALAXY_SLOTS=`expr "${SLURM_NTASKS:-1}" \* "${SLURM_CPUS_PER_TASK:-1}"`
+elif [ -n "$NSLOTS" ]; then
+    GALAXY_SLOTS="$NSLOTS"
+elif [ -n "$NCPUS" ]; then
+    GALAXY_SLOTS="$NCPUS"
+elif [ -n "$PBS_NCPUS" ]; then
+    GALAXY_SLOTS="$PBS_NCPUS"
+elif [ -f "$PBS_NODEFILE" ]; then
+    GALAXY_SLOTS=`wc -l < $PBS_NODEFILE`
+elif [ -n "$LSB_DJOB_NUMPROC" ]; then
+    GALAXY_SLOTS="$LSB_DJOB_NUMPROC"
+else
+    GALAXY_SLOTS="1"
+    unset GALAXY_SLOTS_CONFIGURED
+fi
diff --git a/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh b/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
new file mode 100644
index 0000000..a220550
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
@@ -0,0 +1,27 @@
+#!$shell
+
+$headers
+$integrity_injection
+$slots_statement
+export GALAXY_SLOTS
+GALAXY_LIB="$galaxy_lib"
+if [ "$GALAXY_LIB" != "None" ]; then
+    if [ -n "$PYTHONPATH" ]; then
+        PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
+    else
+        PYTHONPATH="$GALAXY_LIB"
+    fi
+    export PYTHONPATH
+fi
+$env_setup_commands
+GALAXY_VIRTUAL_ENV="$galaxy_virtual_env"
+if [ "$GALAXY_VIRTUAL_ENV" != "None" -a -z "$VIRTUAL_ENV" \
+     -a -f "$GALAXY_VIRTUAL_ENV/bin/activate" ]; then
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+GALAXY_PYTHON=`which python`
+$instrument_pre_commands
+cd $working_directory
+$command
+echo $? > $exit_code_path
+$instrument_post_commands
diff --git a/lib/galaxy/jobs/runners/util/job_script/__init__.py b/lib/galaxy/jobs/runners/util/job_script/__init__.py
new file mode 100644
index 0000000..1bcc391
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/job_script/__init__.py
@@ -0,0 +1,149 @@
+import os
+import subprocess
+import time
+from string import Template
+
+from pkg_resources import resource_string
+from six import text_type
+
+from galaxy.util import unicodify
+
+DEFAULT_SHELL = '/bin/bash'
+
+DEFAULT_JOB_FILE_TEMPLATE = Template(
+    resource_string(__name__, 'DEFAULT_JOB_FILE_TEMPLATE.sh').decode('UTF-8')
+)
+
+SLOTS_STATEMENT_CLUSTER_DEFAULT = \
+    resource_string(__name__, 'CLUSTER_SLOTS_STATEMENT.sh').decode('UTF-8')
+
+SLOTS_STATEMENT_SINGLE = """
+GALAXY_SLOTS="1"
+"""
+
+INTEGRITY_INJECTION = """
+# The following block can be used by the job system
+# to ensure this script is runnable before actually attempting
+# to run it.
+if [ -n "$ABC_TEST_JOB_SCRIPT_INTEGRITY_XYZ" ]; then
+    exit 42
+fi
+"""
+
+INTEGRITY_SYNC_COMMAND = "/bin/sync"
+DEFAULT_INTEGRITY_CHECK = True
+DEFAULT_INTEGRITY_COUNT = 35
+DEFAULT_INTEGRITY_SLEEP = .25
+REQUIRED_TEMPLATE_PARAMS = ['working_directory', 'command', 'exit_code_path']
+OPTIONAL_TEMPLATE_PARAMS = {
+    'galaxy_lib': None,
+    'galaxy_virtual_env': None,
+    'headers': '',
+    'env_setup_commands': [],
+    'slots_statement': SLOTS_STATEMENT_CLUSTER_DEFAULT,
+    'instrument_pre_commands': '',
+    'instrument_post_commands': '',
+    'integrity_injection': INTEGRITY_INJECTION,
+    'shell': DEFAULT_SHELL,
+}
+
+
+def job_script(template=DEFAULT_JOB_FILE_TEMPLATE, **kwds):
+    """
+
+    >>> has_exception = False
+    >>> try: job_script()
+    ... except Exception as e: has_exception = True
+    >>> has_exception
+    True
+    >>> script = job_script(working_directory='wd', command='uptime', exit_code_path='ec')
+    >>> '\\nuptime\\n' in script
+    True
+    >>> 'echo $? > ec' in script
+    True
+    >>> 'GALAXY_LIB="None"' in script
+    True
+    >>> script.startswith('#!/bin/sh\\n#PBS -test\\n')
+    False
+    >>> script = job_script(working_directory='wd', command='uptime', exit_code_path='ec', headers='#PBS -test')
+    >>> script.startswith('#!/bin/bash\\n\\n#PBS -test\\n')
+    True
+    >>> script = job_script(working_directory='wd', command='uptime', exit_code_path='ec', slots_statement='GALAXY_SLOTS="$SLURM_JOB_NUM_NODES"')
+    >>> script.find('GALAXY_SLOTS="$SLURM_JOB_NUM_NODES"\\nexport GALAXY_SLOTS\\n') > 0
+    True
+    """
+    if any([param not in kwds for param in REQUIRED_TEMPLATE_PARAMS]):
+        raise Exception("Failed to create job_script, a required parameter is missing.")
+    job_instrumenter = kwds.get("job_instrumenter", None)
+    if job_instrumenter:
+        del kwds["job_instrumenter"]
+        working_directory = kwds.get("metadata_directory", kwds["working_directory"])
+        kwds["instrument_pre_commands"] = job_instrumenter.pre_execute_commands(working_directory) or ''
+        kwds["instrument_post_commands"] = job_instrumenter.post_execute_commands(working_directory) or ''
+
+    template_params = OPTIONAL_TEMPLATE_PARAMS.copy()
+    template_params.update(**kwds)
+    env_setup_commands_str = "\n".join(template_params["env_setup_commands"])
+    template_params["env_setup_commands"] = env_setup_commands_str
+    for key, value in template_params.items():
+        template_params[key] = unicodify(value)
+    if not isinstance(template, Template):
+        template = Template(template)
+    return template.safe_substitute(template_params)
+
+
+def check_script_integrity(config):
+    return getattr(config, "check_job_script_integrity", DEFAULT_INTEGRITY_CHECK)
+
+
+def write_script(path, contents, config, mode=0o755):
+    dir = os.path.dirname(path)
+    if not os.path.exists(dir):
+        os.makedirs(dir)
+
+    with open(path, 'w') as f:
+        if isinstance(contents, text_type):
+            contents = contents.encode("UTF-8")
+        f.write(contents)
+    os.chmod(path, mode)
+    _handle_script_integrity(path, config)
+
+
+def _handle_script_integrity(path, config):
+    if not check_script_integrity(config):
+        return
+
+    script_integrity_verified = False
+    count = getattr(config, "check_job_script_integrity_count", DEFAULT_INTEGRITY_COUNT)
+    sleep_amt = getattr(config, "check_job_script_integrity_sleep", DEFAULT_INTEGRITY_SLEEP)
+    for i in range(count):
+        try:
+            proc = subprocess.Popen([path], shell=True, env={"ABC_TEST_JOB_SCRIPT_INTEGRITY_XYZ": "1"})
+            proc.wait()
+            if proc.returncode == 42:
+                script_integrity_verified = True
+                break
+
+            # Else we will sync and wait to see if the script becomes
+            # executable.
+            try:
+                # sync file system to avoid "Text file busy" problems.
+                # These have occurred both in Docker containers and on EC2 clusters
+                # under high load.
+                subprocess.check_call(INTEGRITY_SYNC_COMMAND)
+            except Exception:
+                pass
+            time.sleep(sleep_amt)
+        except Exception:
+            pass
+
+    if not script_integrity_verified:
+        raise Exception("Failed to write job script, could not verify job script integrity.")
+
+
+__all__ = (
+    'check_script_integrity',
+    'job_script',
+    'write_script',
+    'INTEGRITY_INJECTION',
+)
diff --git a/lib/galaxy/jobs/runners/util/kill.py b/lib/galaxy/jobs/runners/util/kill.py
new file mode 100644
index 0000000..5202246
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/kill.py
@@ -0,0 +1,65 @@
+import os
+from platform import system
+from time import sleep
+from subprocess import Popen
+
+try:
+    from psutil import Process, NoSuchProcess
+except ImportError:
+    """ Don't make psutil a strict requirement, but use if available. """
+    Process = None
+
+
+def kill_pid(pid, use_psutil=True):
+    if use_psutil and Process:
+        _psutil_kill_pid(pid)
+    else:
+        _stock_kill_pid(pid)
+
+
+def _psutil_kill_pid(pid):
+    """
+    http://stackoverflow.com/questions/1230669/subprocess-deleting-child-processes-in-windows
+    """
+    try:
+        parent = Process(pid)
+        for child in parent.get_children(recursive=True):
+            child.kill()
+        parent.kill()
+    except NoSuchProcess:
+        return
+
+
+def _stock_kill_pid(pid):
+    is_windows = system() == 'Windows'
+
+    if is_windows:
+        __kill_windows(pid)
+    else:
+        __kill_posix(pid)
+
+
+def __kill_windows(pid):
+    try:
+        Popen("taskkill /F /T /PID %i" % pid, shell=True)
+    except Exception:
+        pass
+
+
+def __kill_posix(pid):
+    def __check_pid():
+        try:
+            os.kill(pid, 0)
+            return True
+        except OSError:
+            return False
+
+    if __check_pid():
+        for sig in [15, 9]:
+            try:
+                os.killpg(pid, sig)
+            except OSError:
+                return
+            sleep(1)
+            if not __check_pid():
+                return
diff --git a/lib/galaxy/jobs/runners/util/retry.py b/lib/galaxy/jobs/runners/util/retry.py
new file mode 100644
index 0000000..4d9da3c
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/retry.py
@@ -0,0 +1,112 @@
+from itertools import count
+from time import sleep
+
+import logging
+log = logging.getLogger(__name__)
+
+DEFAULT_MAX_RETRIES = -1  # By default don't retry.
+DEFAULT_INTERVAL_START = 2.0
+DEFAULT_INTERVAL_MAX = 30.0
+DEFAULT_INTERVAL_STEP = 2.0
+DEFAULT_CATCH = (Exception,)
+
+DEFAULT_DESCRIPTION = "action"
+
+
+class RetryActionExecutor(object):
+
+    def __init__(self, **kwds):
+        # Use variables that match kombu to keep things consistent across
+        # Pulsar.
+        # http://ask.github.io/kombu/reference/kombu.connection.html#kombu.connection.BrokerConnection.ensure_connection
+        raw_max_retries = kwds.get("max_retries", DEFAULT_MAX_RETRIES)
+        self.max_retries = None if not raw_max_retries else int(raw_max_retries)
+        self.interval_start = float(kwds.get("interval_start", DEFAULT_INTERVAL_START))
+        self.interval_step = float(kwds.get("interval_step", DEFAULT_INTERVAL_STEP))
+        self.interval_max = float(kwds.get("interval_max", DEFAULT_INTERVAL_MAX))
+        self.errback = kwds.get("errback", self.__default_errback)
+        self.catch = kwds.get("catch", DEFAULT_CATCH)
+
+        self.default_description = kwds.get("description", DEFAULT_DESCRIPTION)
+
+    def execute(self, action, description=None):
+        def on_error(exc, intervals, retries, interval=0):
+            interval = next(intervals)
+            if self.errback:
+                errback_args = [exc, interval]
+                if description is not None:
+                    errback_args.append(description)
+                self.errback(exc, interval, description)
+            return interval
+
+        return _retry_over_time(
+            action,
+            catch=self.catch,
+            max_retries=self.max_retries,
+            interval_start=self.interval_start,
+            interval_step=self.interval_step,
+            interval_max=self.interval_max,
+            errback=on_error,
+        )
+
+    def __default_errback(self, exc, interval, description=None):
+        description = description or self.default_description
+        log.info(
+            "Failed to execute %s, retrying in %s seconds.",
+            description,
+            interval,
+            exc_info=True
+        )
+
+
+# Following functions are derived from Kombu versions @
+# https://github.com/celery/kombu/blob/master/kombu/utils/__init__.py
+# BSD License (https://github.com/celery/kombu/blob/master/LICENSE)
+def _retry_over_time(fun, catch, args=[], kwargs={}, errback=None,
+                     max_retries=None, interval_start=2, interval_step=2,
+                     interval_max=30):
+    """Retry the function over and over until max retries is exceeded.
+
+    For each retry we sleep a for a while before we try again, this interval
+    is increased for every retry until the max seconds is reached.
+
+    :param fun: The function to try
+    :param catch: Exceptions to catch, can be either tuple or a single
+        exception class.
+    :keyword args: Positional arguments passed on to the function.
+    :keyword kwargs: Keyword arguments passed on to the function.
+    :keyword max_retries: Maximum number of retries before we give up.
+        If this is not set, we will retry forever.
+    :keyword interval_start: How long (in seconds) we start sleeping between
+        retries.
+    :keyword interval_step: By how much the interval is increased for each
+        retry.
+    :keyword interval_max: Maximum number of seconds to sleep between retries.
+
+    """
+    retries = 0
+    interval_range = __fxrange(interval_start,
+                               interval_max + interval_start,
+                               interval_step, repeatlast=True)
+    for retries in count():
+        try:
+            return fun(*args, **kwargs)
+        except catch as exc:
+            if max_retries and retries >= max_retries:
+                raise
+            tts = float(errback(exc, interval_range, retries) if errback
+                        else next(interval_range))
+            if tts:
+                sleep(tts)
+
+
+def __fxrange(start=1.0, stop=None, step=1.0, repeatlast=False):
+    cur = start * 1.0
+    while 1:
+        if not stop or cur <= stop:
+            yield cur
+            cur += step
+        else:
+            if not repeatlast:
+                break
+            yield cur - step
diff --git a/lib/galaxy/jobs/runners/util/sudo.py b/lib/galaxy/jobs/runners/util/sudo.py
new file mode 100644
index 0000000..bb9e03d
--- /dev/null
+++ b/lib/galaxy/jobs/runners/util/sudo.py
@@ -0,0 +1,24 @@
+import logging
+
+from subprocess import Popen, PIPE
+
+SUDO_PATH = '/usr/bin/sudo'
+SUDO_PRESERVE_ENVIRONMENT_ARG = '-E'
+SUDO_USER_ARG = '-u'
+
+log = logging.getLogger(__name__)
+
+
+def sudo_popen(*args, **kwargs):
+    """
+    Helper method for building and executing Popen command. This is potentially
+    sensetive code so should probably be centralized.
+    """
+    user = kwargs.get("user", None)
+    full_command = [SUDO_PATH, SUDO_PRESERVE_ENVIRONMENT_ARG]
+    if user:
+        full_command.extend([SUDO_USER_ARG, user])
+    full_command.extend(args)
+    log.info("About to execute the following sudo command - [%s]" % ' '.join(full_command))
+    p = Popen(full_command, shell=False, stdout=PIPE, stderr=PIPE)
+    return p
diff --git a/lib/galaxy/jobs/splitters/__init__.py b/lib/galaxy/jobs/splitters/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/jobs/splitters/basic.py b/lib/galaxy/jobs/splitters/basic.py
new file mode 100644
index 0000000..6ac56b6
--- /dev/null
+++ b/lib/galaxy/jobs/splitters/basic.py
@@ -0,0 +1,26 @@
+import logging
+import multi
+
+log = logging.getLogger( __name__ )
+
+
+def set_basic_defaults(job_wrapper):
+    parent_job = job_wrapper.get_job()
+    parallelism = job_wrapper.get_parallelism()
+    parallelism.attributes['split_inputs'] = parent_job.input_datasets[0].name
+    parallelism.attributes['merge_outputs'] = job_wrapper.get_output_hdas_and_fnames().keys()[0]
+
+
+def do_split(job_wrapper):
+    if len(job_wrapper.get_input_fnames()) > 1 or len(job_wrapper.get_output_fnames()) > 1:
+        log.error("The basic splitter is not capable of handling jobs with multiple inputs or outputs.")
+        raise Exception( "Job Splitting Failed, the basic splitter only handles tools with one input and one output" )
+    # add in the missing information for splitting the one input and merging the one output
+    set_basic_defaults(job_wrapper)
+    return multi.do_split(job_wrapper)
+
+
+def do_merge( job_wrapper, task_wrappers):
+    # add in the missing information for splitting the one input and merging the one output
+    set_basic_defaults(job_wrapper)
+    return multi.do_merge(job_wrapper, task_wrappers)
diff --git a/lib/galaxy/jobs/splitters/multi.py b/lib/galaxy/jobs/splitters/multi.py
new file mode 100644
index 0000000..4be3b2d
--- /dev/null
+++ b/lib/galaxy/jobs/splitters/multi.py
@@ -0,0 +1,195 @@
+import os
+import logging
+import shutil
+import inspect
+
+from galaxy import model, util
+
+
+log = logging.getLogger( __name__ )
+
+
+def do_split(job_wrapper):
+    parent_job = job_wrapper.get_job()
+    working_directory = os.path.abspath(job_wrapper.working_directory)
+
+    parallel_settings = job_wrapper.get_parallelism().attributes
+    # Syntax: split_inputs="input1,input2" shared_inputs="genome"
+    # Designates inputs to be split or shared
+    split_inputs = parallel_settings.get("split_inputs")
+    if split_inputs is None:
+        split_inputs = []
+    else:
+        split_inputs = [x.strip() for x in split_inputs.split(",")]
+
+    shared_inputs = parallel_settings.get("shared_inputs")
+    auto_shared_inputs = False
+    if shared_inputs is None:
+        shared_inputs = []
+        auto_shared_inputs = True
+    else:
+        shared_inputs = [x.strip() for x in shared_inputs.split(",")]
+    illegal_inputs = [x for x in shared_inputs if x in split_inputs]
+    if len(illegal_inputs) > 0:
+        raise Exception("Inputs have conflicting parallelism attributes: %s" % str( illegal_inputs ))
+
+    subdir_index = [0]  # use a list to get around Python 2.x lame closure support
+    task_dirs = []
+
+    def get_new_working_directory_name():
+        dir = os.path.join(working_directory, 'task_%d' % subdir_index[0])
+        subdir_index[0] = subdir_index[0] + 1
+        if not os.path.exists(dir):
+            os.makedirs(dir)
+        task_dirs.append(dir)
+        return dir
+
+    # For things like paired end alignment, we need two inputs to be split. Since all inputs to all
+    # derived subtasks need to be correlated, allow only one input type to be split
+    # If shared_inputs are not specified, we assume all non-split inputs are shared inputs.
+    # For any input we must consider if each input is None. With optional arguments, a data input could be set to optional
+    type_to_input_map = {}
+    for input in parent_job.input_datasets:
+        if input.dataset is None:
+            if input.name in shared_inputs:
+                shared_inputs.remove(input.name)
+            else:
+                pass
+        else:
+            if input.name in split_inputs:
+                type_to_input_map.setdefault(input.dataset.datatype, []).append(input.name)
+            elif input.name in shared_inputs:
+                pass  # pass original file name
+            elif auto_shared_inputs:
+                shared_inputs.append(input.name)
+            else:
+                log_error = "The input '%s' does not define a method for implementing parallelism" % str(input.name)
+                log.exception(log_error)
+                raise Exception(log_error)
+
+    if len(type_to_input_map) > 1:
+        log_error = "The multi splitter does not support splitting inputs of more than one type"
+        log.error(log_error)
+        raise Exception(log_error)
+
+    # split the first one to build up the task directories
+    input_datasets = []
+    for input in parent_job.input_datasets:
+        if input.name in split_inputs:
+            this_input_files = job_wrapper.get_input_dataset_fnames(input.dataset)
+            if len(this_input_files) > 1:
+                log_error = "The input '%s' is composed of multiple files - splitting is not allowed" % str(input.name)
+                log.error(log_error)
+                raise Exception(log_error)
+            input_datasets.append(input.dataset)
+
+    input_type = type_to_input_map.keys()[0]
+    # DBTODO execute an external task to do the splitting, this should happen at refactor.
+    # If the number of tasks is sufficiently high, we can use it to calculate job completion % and give a running status.
+    try:
+        input_type.split(input_datasets, get_new_working_directory_name, parallel_settings)
+    except AttributeError:
+        log_error = "The type '%s' does not define a method for splitting files" % str(input_type)
+        log.error(log_error)
+        raise
+    log.debug('do_split created %d parts' % len(task_dirs))
+    # next, after we know how many divisions there are, add the shared inputs via soft links
+    for input in parent_job.input_datasets:
+        if input and input.name in shared_inputs:
+            names = job_wrapper.get_input_dataset_fnames(input.dataset)
+            for dir in task_dirs:
+                for file in names:
+                    os.symlink(file, os.path.join(dir, os.path.basename(file)))
+    tasks = []
+    prepare_files = os.path.join(util.galaxy_directory(), 'extract_dataset_parts.sh') + ' %s'
+    for dir in task_dirs:
+        task = model.Task(parent_job, dir, prepare_files % dir)
+        tasks.append(task)
+    return tasks
+
+
+def do_merge( job_wrapper, task_wrappers):
+    parallel_settings = job_wrapper.get_parallelism().attributes
+    # Syntax: merge_outputs="export" pickone_outputs="genomesize"
+    # Designates outputs to be merged, or selected from as a representative
+    merge_outputs = parallel_settings.get("merge_outputs")
+    if merge_outputs is None:
+        merge_outputs = []
+    else:
+        merge_outputs = [x.strip() for x in merge_outputs.split(",")]
+    pickone_outputs = parallel_settings.get("pickone_outputs")
+    if pickone_outputs is None:
+        pickone_outputs = []
+    else:
+        pickone_outputs = [x.strip() for x in pickone_outputs.split(",")]
+
+    illegal_outputs = [x for x in merge_outputs if x in pickone_outputs]
+    if len(illegal_outputs) > 0:
+        return ('Tool file error', 'Outputs have conflicting parallelism attributes: %s' % str( illegal_outputs ))
+
+    stdout = ''
+    stderr = ''
+
+    try:
+        working_directory = job_wrapper.working_directory
+        task_dirs = [os.path.join(working_directory, x) for x in os.listdir(working_directory) if x.startswith('task_')]
+        assert task_dirs, "Should be at least one sub-task!"
+        # TODO: Output datasets can be very complex. This doesn't handle metadata files
+        outputs = job_wrapper.get_output_hdas_and_fnames()
+        output_paths = job_wrapper.get_output_fnames()
+        pickone_done = []
+        task_dirs = [os.path.join(working_directory, x) for x in os.listdir(working_directory) if x.startswith('task_')]
+        task_dirs.sort(key=lambda x: int(x.split('task_')[-1]))
+        for index, output in enumerate( outputs ):
+            output_file_name = str( output_paths[ index ] )  # Use false_path if set, else real path.
+            base_output_name = os.path.basename(output_file_name)
+            if output in merge_outputs:
+                output_dataset = outputs[output][0]
+                output_type = output_dataset.datatype
+                output_files = [os.path.join(dir, base_output_name) for dir in task_dirs]
+                # Just include those files f in the output list for which the
+                # file f exists; some files may not exist if a task fails.
+                output_files = [ f for f in output_files if os.path.exists(f) ]
+                if output_files:
+                    log.debug('files %s ' % output_files)
+                    if len(output_files) < len(task_dirs):
+                        log.debug('merging only %i out of expected %i files for %s'
+                                  % (len(output_files), len(task_dirs), output_file_name))
+                    # First two args to merge always output_files and path of dataset. More
+                    # complicated merge methods may require more parameters. Set those up here.
+                    extra_merge_arg_names = inspect.getargspec( output_type.merge ).args[2:]
+                    extra_merge_args = {}
+                    if "output_dataset" in extra_merge_arg_names:
+                        extra_merge_args["output_dataset"] = output_dataset
+                    output_type.merge(output_files, output_file_name, **extra_merge_args)
+                    log.debug('merge finished: %s' % output_file_name)
+                else:
+                    msg = 'nothing to merge for %s (expected %i files)' \
+                          % (output_file_name, len(task_dirs))
+                    log.debug(msg)
+                    stderr += msg + "\n"
+            elif output in pickone_outputs:
+                # just pick one of them
+                if output not in pickone_done:
+                    task_file_name = os.path.join(task_dirs[0], base_output_name)
+                    shutil.move( task_file_name, output_file_name )
+                    pickone_done.append(output)
+            else:
+                log_error = "The output '%s' does not define a method for implementing parallelism" % output
+                log.exception(log_error)
+                raise Exception(log_error)
+    except Exception as e:
+        stdout = 'Error merging files'
+        log.exception( stdout )
+        stderr = str(e)
+
+    for tw in task_wrappers:
+        # Prevent repetitive output, e.g. "Sequence File Aligned"x20
+        # Eventually do a reduce for jobs that output "N reads mapped", combining all N for tasks.
+        out = tw.get_task().stdout.strip()
+        err = tw.get_task().stderr.strip()
+        if len(out) > 0:
+            stdout += "\n" + tw.working_directory + ':\n' + out
+        if len(err) > 0:
+            stderr += "\n" + tw.working_directory + ':\n' + err
+    return (stdout, stderr)
diff --git a/lib/galaxy/jobs/stock_rules.py b/lib/galaxy/jobs/stock_rules.py
new file mode 100644
index 0000000..9d1d79b
--- /dev/null
+++ b/lib/galaxy/jobs/stock_rules.py
@@ -0,0 +1,25 @@
+""" Stock job 'dynamic' rules for use in job_conf.xml - these may cover some
+simple use cases but will just proxy into functions in rule_helper so similar
+functionality - but more tailored and composable can be utilized in custom
+rules.
+"""
+
+from galaxy import util
+
+
+def choose_one( rule_helper, job, destination_ids, hash_by="job" ):
+    destination_id_list = util.listify( destination_ids )
+    job_hash = rule_helper.job_hash( job, hash_by )
+    return rule_helper.choose_one( destination_id_list, hash_value=job_hash )
+
+
+def burst( rule_helper, job, from_destination_ids, to_destination_id, num_jobs, job_states=None):
+    from_destination_ids = util.listify( from_destination_ids )
+    if rule_helper.should_burst( from_destination_ids, num_jobs=num_jobs, job_states=job_states ):
+        return to_destination_id
+    else:
+        return from_destination_ids[ 0 ]
+
+
+def docker_dispatch( rule_helper, tool, docker_destination_id, default_destination_id ):
+    return docker_destination_id if rule_helper.supports_docker( tool ) else default_destination_id
diff --git a/lib/galaxy/jobs/transfer_manager.py b/lib/galaxy/jobs/transfer_manager.py
new file mode 100644
index 0000000..ccd5c88
--- /dev/null
+++ b/lib/galaxy/jobs/transfer_manager.py
@@ -0,0 +1,159 @@
+"""
+Manage transfers from arbitrary URLs to temporary files.  Socket interface for
+IPC with multiple process configurations.
+"""
+import json
+import logging
+import os
+import socket
+import subprocess
+import threading
+
+from galaxy.util import listify, sleeper
+from galaxy.util.json import jsonrpc_request, validate_jsonrpc_response
+
+log = logging.getLogger( __name__ )
+
+
+class TransferManager( object ):
+    """
+    Manage simple data transfers from URLs to temporary locations.
+    """
+    def __init__( self, app ):
+        self.app = app
+        self.sa_session = app.model.context.current
+        self.command = 'python %s' % os.path.abspath( os.path.join( os.getcwd(), 'scripts', 'transfer.py' ) )
+        if app.config.get_bool( 'enable_job_recovery', True ):
+            # Only one Galaxy server process should be able to recover jobs! (otherwise you'll have nasty race conditions)
+            self.running = True
+            self.sleeper = sleeper.Sleeper()
+            self.restarter = threading.Thread( target=self.__restarter )
+            self.restarter.start()
+
+    def new( self, path=None, **kwd ):
+        if 'protocol' not in kwd:
+            raise Exception( 'Missing required parameter "protocol".' )
+        protocol = kwd[ 'protocol' ]
+        if protocol in [ 'http', 'https' ]:
+            if 'url' not in kwd:
+                raise Exception( 'Missing required parameter "url".' )
+        elif protocol == 'scp':
+            # TODO: add more checks here?
+            if 'sample_dataset_id' not in kwd:
+                raise Exception( 'Missing required parameter "sample_dataset_id".' )
+            if 'file_path' not in kwd:
+                raise Exception( 'Missing required parameter "file_path".' )
+        transfer_job = self.app.model.TransferJob( state=self.app.model.TransferJob.states.NEW, params=kwd )
+        self.sa_session.add( transfer_job )
+        self.sa_session.flush()
+        return transfer_job
+
+    def run( self, transfer_jobs ):
+        """
+        This method blocks, so if invoking the transfer manager ever starts
+        taking too long, we should move it to a thread.  However, the
+        transfer_manager will either daemonize or return after submitting to a
+        running daemon, so it should be fairly quick to return.
+        """
+        transfer_jobs = listify( transfer_jobs )
+        printable_tj_ids = ', '.join( [ str( tj.id ) for tj in transfer_jobs ] )
+        log.debug( 'Initiating transfer job(s): %s' % printable_tj_ids )
+        # Set all jobs running before spawning, or else updating the state may
+        # clobber a state change performed by the worker.
+        [ tj.__setattr__( 'state', tj.states.RUNNING ) for tj in transfer_jobs ]
+        self.sa_session.add_all( transfer_jobs )
+        self.sa_session.flush()
+        for tj in transfer_jobs:
+            # The transfer script should daemonize fairly quickly - if this is
+            # not the case, this process will need to be moved to a
+            # non-blocking method.
+            cmd = '%s %s' % ( self.command, tj.id )
+            log.debug( 'Transfer command is: %s' % cmd )
+            p = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
+            p.wait()
+            output = p.stdout.read( 32768 )
+            if p.returncode != 0:
+                log.error( 'Spawning transfer job failed: %s: %s' % ( tj.id, output ) )
+                tj.state = tj.states.ERROR
+                tj.info = 'Spawning transfer job failed: %s' % output.splitlines()[-1]
+                self.sa_session.add( tj )
+                self.sa_session.flush()
+
+    def get_state( self, transfer_jobs, via_socket=False ):
+        transfer_jobs = listify( transfer_jobs )
+        rval = []
+        for tj in transfer_jobs:
+            if via_socket and tj.state not in tj.terminal_states and tj.socket:
+                try:
+                    request = jsonrpc_request( method='get_state', id=True )
+                    sock = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
+                    sock.settimeout( 5 )
+                    sock.connect( ( 'localhost', tj.socket ) )
+                    sock.send( json.dumps( request ) )
+                    response = sock.recv( 8192 )
+                    valid, response = validate_jsonrpc_response( response, id=request['id'] )
+                    if not valid:
+                        # No valid response received, make some pseudo-json-rpc
+                        raise Exception( dict( code=128, message='Did not receive valid response from transfer daemon for state' ) )
+                    if 'error' in response:
+                        # Response was valid but Request resulted in an error
+                        raise Exception( response['error'])
+                    else:
+                        # Request was valid
+                        response['result']['transfer_job_id'] = tj.id
+                        rval.append( response['result'] )
+                except Exception as e:
+                    # State checking via the transfer daemon failed, just
+                    # return the state from the database instead.  Callers can
+                    # look for the 'error' member of the response to see why
+                    # the check failed.
+                    self.sa_session.refresh( tj )
+                    error = e.args
+                    if type( error ) != dict:
+                        error = dict( code=256, message='Error connecting to transfer daemon', data=str( e ) )
+                    rval.append( dict( transfer_job_id=tj.id, state=tj.state, error=error ) )
+            else:
+                self.sa_session.refresh( tj )
+                rval.append( dict( transfer_job_id=tj.id, state=tj.state ) )
+        for tj_state in rval:
+            if tj_state['state'] in self.app.model.TransferJob.terminal_states:
+                log.debug( 'Transfer job %s is in terminal state: %s' % ( tj_state['transfer_job_id'], tj_state['state'] ) )
+            elif tj_state['state'] == self.app.model.TransferJob.states.PROGRESS and 'percent' in tj_state:
+                log.debug( 'Transfer job %s is %s%% complete' % ( tj_state[ 'transfer_job_id' ], tj_state[ 'percent' ] ) )
+        if len( rval ) == 1:
+            return rval[0]
+        return rval
+
+    def __restarter( self ):
+        log.info( 'Transfer job restarter starting up...' )
+        while self.running:
+            dead = []
+            self.sa_session.expunge_all()  # our session is threadlocal so this is safe.
+            for tj in self.sa_session.query( self.app.model.TransferJob ) \
+                          .filter( self.app.model.TransferJob.state == self.app.model.TransferJob.states.RUNNING ):
+                if not tj.pid:
+                    continue
+                # This will only succeed if the process exists and is owned by the
+                # user running Galaxy (unless that user is root, in which case it
+                # can be owned by anyone - but you're not running Galaxy as root,
+                # right?).  This is not guaranteed proof that the transfer is alive
+                # since another process may have assumed the original process' PID.
+                # But that will only cause the transfer to not restart until that
+                # process dies, which hopefully won't be too long from now...  If
+                # it becomes a problem, try to talk to the socket a few times and
+                # restart the transfer if socket communication fails repeatedly.
+                try:
+                    os.kill( tj.pid, 0 )
+                except:
+                    self.sa_session.refresh( tj )
+                    if tj.state == tj.states.RUNNING:
+                        log.error( 'Transfer job %s is marked as running but pid %s appears to be dead.' % ( tj.id, tj.pid ) )
+                        dead.append( tj )
+            if dead:
+                self.run( dead )
+            self.sleeper.sleep( 30 )
+        log.info( 'Transfer job restarter shutting down...' )
+
+    def shutdown( self ):
+        self.running = False
+        self.sleeper.wake()
diff --git a/lib/galaxy/main.py b/lib/galaxy/main.py
new file mode 120000
index 0000000..e9cd3b7
--- /dev/null
+++ b/lib/galaxy/main.py
@@ -0,0 +1 @@
+../../scripts/galaxy-main
\ No newline at end of file
diff --git a/lib/galaxy/managers/__init__.py b/lib/galaxy/managers/__init__.py
new file mode 100644
index 0000000..fc89738
--- /dev/null
+++ b/lib/galaxy/managers/__init__.py
@@ -0,0 +1,33 @@
+"""
+Classes that manage resources (models, tools, etc.) by using the current
+Transaction.
+
+Encapsulates the intersection of trans (or trans.sa_session), models,
+and Controllers.
+
+Responsibilities:
+
+- model operations that involve the trans/sa_session (CRUD)
+- security: ownership, accessibility
+- common aspect-oriented operations via new mixins: sharable, annotatable,
+  tagable, ratable
+
+Not responsible for:
+
+- encoding/decoding ids
+- any http gobblygook
+- formatting of returned data (always python structures)
+- formatting of raised errors
+
+The goal is to have Controllers only handle:
+
+- query-string/payload parsing and encoding/decoding ids
+- http
+- return formatting
+- control, improve namespacing in Controllers
+- DRY for Controller ops (define here - use in both UI/API Controllers)
+
+In other words, 'Business logic' independent of web transactions/user context
+(trans) should be pushed into models - but logic that requires the context
+trans should be placed under this module.
+"""
diff --git a/lib/galaxy/managers/annotatable.py b/lib/galaxy/managers/annotatable.py
new file mode 100644
index 0000000..7a0aa65
--- /dev/null
+++ b/lib/galaxy/managers/annotatable.py
@@ -0,0 +1,108 @@
+"""
+Mixins for Annotatable model managers and serializers.
+"""
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# needed to extract this for use in manager *and* serializer, ideally, would use self.manager.annotation
+# from serializer, but history_contents has no self.manager
+# TODO: fix
+def _match_by_user( item, user ):
+    if not user:
+        return None
+    for annotation in item.annotations:
+        if annotation.user_id == user.id:
+            return annotation.annotation
+    return None
+
+
+class AnnotatableManagerMixin( object ):
+    #: class of AnnotationAssociation (e.g. HistoryAnnotationAssociation)
+    annotation_assoc = None
+
+    def annotation( self, item ):
+        """
+        Return the annotation string made by the `item`'s owner or `None` if there
+        is no annotation.
+        """
+        # NOTE: only works with sharable (.user)
+        return self._user_annotation( item, item.user )
+
+    # TODO: should/do we support multiple, non-owner annotation of items?
+    def annotate( self, item, annotation, user=None, flush=True ):
+        """
+        Create a new annotation on `item` or delete the existing if annotation
+        is `None`.
+        """
+        if not user:
+            return None
+        if annotation is None:
+            self._delete_annotation( item, user, flush=flush )
+            return None
+
+        annotation_obj = item.add_item_annotation( self.session(), user, item, annotation )
+        if flush:
+            self.session().flush()
+        return annotation_obj
+
+    def _user_annotation( self, item, user ):
+        return _match_by_user( item, user )
+
+    def _delete_annotation( self, item, user, flush=True ):
+        returned = item.delete_item_annotation( self.session(), user, item )
+        if flush:
+            self.session().flush()
+        return returned
+
+
+class AnnotatableSerializerMixin( object ):
+
+    def add_serializers( self ):
+        self.serializers[ 'annotation' ] = self.serialize_annotation
+
+    def serialize_annotation( self, item, key, user=None, **context ):
+        """
+        Get and serialize an `item`'s annotation.
+        """
+        annotation = _match_by_user( item, user )
+        return annotation.strip() if annotation else None
+
+
+class AnnotatableDeserializerMixin( object ):
+
+    def add_deserializers( self ):
+        self.deserializers[ 'annotation' ] = self.deserialize_annotation
+
+    def deserialize_annotation( self, item, key, val, user=None, **context ):
+        """
+        Make sure `val` is a valid annotation and assign it, deleting any existing
+        if `val` is None.
+        """
+        val = self.validate.nullable_basestring( key, val )
+        return self.manager.annotate( item, val, user=user, flush=False )
+
+
+# TODO: I'm not entirely convinced this (or tags) are a good idea for filters since they involve a/the user
+class AnnotatableFilterMixin( object ):
+
+    def _owner_annotation( self, item ):
+        """
+        Get the annotation by the item's owner.
+        """
+        return _match_by_user( item, item.user )
+
+    def filter_annotation_contains( self, item, val ):
+        """
+        Test whether `val` is in the owner's annotation.
+        """
+        owner_annotation = self._owner_annotation( item )
+        if owner_annotation is None:
+            return False
+        return val in owner_annotation
+
+    def _add_parsers( self ):
+        self.fn_filter_parsers.update({
+            'annotation'    : { 'op': { 'has': self.filter_annotation_contains, } },
+        })
diff --git a/lib/galaxy/managers/api_keys.py b/lib/galaxy/managers/api_keys.py
new file mode 100644
index 0000000..16025bb
--- /dev/null
+++ b/lib/galaxy/managers/api_keys.py
@@ -0,0 +1,26 @@
+
+
+class ApiKeyManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def create_api_key( self, user ):
+        guid = self.app.security.get_new_guid()
+        new_key = self.app.model.APIKeys()
+        new_key.user_id = user.id
+        new_key.key = guid
+        sa_session = self.app.model.context
+        sa_session.add( new_key )
+        sa_session.flush()
+        return guid
+
+    def get_or_create_api_key( self, user ):
+        # Logic Galaxy has always used - but it would appear to have a race
+        # condition. Worth fixing? Would kind of need a message queue to fix
+        # in multiple process mode.
+        if user.api_keys:
+            key = user.api_keys[0].key
+        else:
+            key = self.create_api_key( user )
+        return key
diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py
new file mode 100644
index 0000000..afe2ce2
--- /dev/null
+++ b/lib/galaxy/managers/base.py
@@ -0,0 +1,1149 @@
+"""
+Keeps the older BaseController security and fetching methods and also
+defines a base ModelManager, ModelSerializer, and ModelDeserializer.
+
+ModelManagers are used for operations on models that occur outside the scope of
+a single model object, such as:
+
+- object creation
+- object lookup
+- interactions between 2+ objects of different model classes
+
+(Since these were to replace model Mixins from
+web/framework/base/controller.py the rule of thumb used there also generally
+has been applied here: if it uses the trans or sa_session, put it in a manager
+and not the model.)
+
+ModelSerializers allow flexible conversion of model objects to dictionaries.
+They control what keys are sent, how values are simplified, can remap keys,
+and allow both predefined and user controlled key sets.
+
+ModelDeserializers control how a model validates and process an incoming
+attribute change to a model object.
+"""
+# TODO: it may be there's a better way to combine the above three classes
+#   such as: a single flat class, serializers being singletons in the manager, etc.
+#   instead of the three separate classes. With no 'apparent' perfect scheme
+#   I'm opting to just keep them separate.
+import datetime
+import logging
+import re
+
+import routes
+import sqlalchemy
+from six import string_types
+
+from galaxy import exceptions
+from galaxy import model
+from galaxy.model import tool_shed_install
+
+log = logging.getLogger( __name__ )
+
+
+# ==== accessors from base/controller.py
+def security_check( trans, item, check_ownership=False, check_accessible=False ):
+    """
+    Security checks for an item: checks if (a) user owns item or (b) item
+    is accessible to user. This is a generic method for dealing with objects
+    uniformly from the older controller mixin code - however whenever possible
+    the managers for a particular model should be used to perform security
+    checks.
+    """
+
+    # all items are accessible to an admin
+    if trans.user_is_admin():
+        return item
+
+    # Verify ownership: there is a current user and that user is the same as the item's
+    if check_ownership:
+        if not trans.user:
+            raise exceptions.ItemOwnershipException( "Must be logged in to manage Galaxy items", type='error' )
+        if item.user != trans.user:
+            raise exceptions.ItemOwnershipException( "%s is not owned by the current user" % item.__class__.__name__, type='error' )
+
+    # Verify accessible:
+    #   if it's part of a lib - can they access via security
+    #   if it's something else (sharable) have they been added to the item's users_shared_with_dot_users
+    if check_accessible:
+        if type( item ) in ( trans.app.model.LibraryFolder, trans.app.model.LibraryDatasetDatasetAssociation, trans.app.model.LibraryDataset ):
+            if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), item, trans.user ):
+                raise exceptions.ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
+        else:
+            if ( item.user != trans.user ) and ( not item.importable ) and ( trans.user not in item.users_shared_with_dot_users ):
+                raise exceptions.ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
+    return item
+
+
+def get_class( class_name ):
+    """
+    Returns the class object that a string denotes. Without this method, we'd have
+    to do eval(<class_name>).
+    """
+    if class_name == 'History':
+        item_class = model.History
+    elif class_name == 'HistoryDatasetAssociation':
+        item_class = model.HistoryDatasetAssociation
+    elif class_name == 'Page':
+        item_class = model.Page
+    elif class_name == 'StoredWorkflow':
+        item_class = model.StoredWorkflow
+    elif class_name == 'Visualization':
+        item_class = model.Visualization
+    elif class_name == 'Tool':
+        item_class = model.Tool
+    elif class_name == 'Job':
+        item_class = model.Job
+    elif class_name == 'User':
+        item_class = model.User
+    elif class_name == 'Group':
+        item_class = model.Group
+    elif class_name == 'Role':
+        item_class = model.Role
+    elif class_name == 'Quota':
+        item_class = model.Quota
+    elif class_name == 'Library':
+        item_class = model.Library
+    elif class_name == 'LibraryFolder':
+        item_class = model.LibraryFolder
+    elif class_name == 'LibraryDatasetDatasetAssociation':
+        item_class = model.LibraryDatasetDatasetAssociation
+    elif class_name == 'LibraryDataset':
+        item_class = model.LibraryDataset
+    elif class_name == 'ToolShedRepository':
+        item_class = tool_shed_install.ToolShedRepository
+    else:
+        item_class = None
+    return item_class
+
+
+def decode_id(app, id):
+    try:
+        # note: use str - occasionally a fully numeric id will be placed in post body and parsed as int via JSON
+        #   resulting in error for valid id
+        return app.security.decode_id( str( id ) )
+    except ( ValueError, TypeError ):
+        msg = "Malformed id ( %s ) specified, unable to decode" % ( str( id ) )
+        raise exceptions.MalformedId( msg, id=str( id ) )
+
+
+def get_object( trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
+    """
+    Convenience method to get a model object with the specified checks. This is
+    a generic method for dealing with objects uniformly from the older
+    controller mixin code - however whenever possible the managers for a
+    particular model should be used to load objects.
+    """
+    decoded_id = decode_id(trans.app, id)
+    try:
+        item_class = get_class( class_name )
+        assert item_class is not None
+        item = trans.sa_session.query( item_class ).get( decoded_id )
+        assert item is not None
+    except Exception:
+        log.exception( "Invalid %s id ( %s ) specified." % ( class_name, id ) )
+        raise exceptions.MessageException( "Invalid %s id ( %s ) specified" % ( class_name, id ), type="error" )
+
+    if check_ownership or check_accessible:
+        security_check( trans, item, check_ownership, check_accessible )
+    if deleted is True and not item.deleted:
+        raise exceptions.ItemDeletionException( '%s "%s" is not deleted'
+                                                % ( class_name, getattr( item, 'name', id ) ), type="warning" )
+    elif deleted is False and item.deleted:
+        raise exceptions.ItemDeletionException( '%s "%s" is deleted'
+                                                % ( class_name, getattr( item, 'name', id ) ), type="warning" )
+    return item
+
+
+# =============================================================================
+def munge_lists( listA, listB ):
+    """
+    Combine two lists into a single list.
+
+    (While allowing them to be None, non-lists, or lists.)
+    """
+    # TODO: there's nothing specifically filter or model-related here - move to util
+    if listA is None:
+        return listB
+    if listB is None:
+        return listA
+    if not isinstance( listA, list ):
+        listA = [ listA ]
+    if not isinstance( listB, list ):
+        listB = [ listB ]
+    return listA + listB
+
+
+# -----------------------------------------------------------------------------
+class ModelManager( object ):
+    """
+    Base class for all model/resource managers.
+
+    Provides common queries and CRUD operations as a (hopefully) light layer
+    over the ORM.
+    """
+    model_class = object
+    foreign_key_name = None
+
+    def __init__( self, app ):
+        self.app = app
+
+    def session( self ):
+        return self.app.model.context
+
+    def _session_setattr( self, item, attr, val, fn=None, flush=True ):
+        if fn:
+            fn( item, attr, val )
+        else:
+            setattr( item, attr, val )
+
+        self.session().add( item )
+        if flush:
+            self.session().flush()
+        return item
+
+    # .... query foundation wrapper
+    def query( self, eagerloads=True, **kwargs ):
+        """
+        Return a basic query from model_class, filters, order_by, and limit and offset.
+
+        Set eagerloads to False to disable them for this query.
+        """
+        query = self.session().query( self.model_class )
+        # joined table loading
+        if eagerloads is False:
+            query = query.enable_eagerloads( False )
+        return self._filter_and_order_query( query, **kwargs )
+
+    def _filter_and_order_query( self, query, filters=None, order_by=None, limit=None, offset=None, **kwargs ):
+        # TODO: not a lot of functional cohesion here
+        query = self._apply_orm_filters( query, filters )
+        query = self._apply_order_by( query, order_by )
+        query = self._apply_orm_limit_offset( query, limit, offset )
+        return query
+
+    # .... filters
+    def _apply_orm_filters( self, query, filters ):
+        """
+        Add any filters to the given query.
+        """
+        if filters is None:
+            return query
+
+        if not isinstance( filters, list ):
+            filters = [ filters ]
+        # note: implicit AND
+        for filter in filters:
+            query = query.filter( filter )
+        return query
+
+    def _munge_filters( self, filtersA, filtersB ):
+        """
+        Combine two lists into a single list.
+
+        (While allowing them to be None, non-lists, or lists.)
+        """
+        return munge_lists( filtersA, filtersB )
+
+    # .... order, limit, and offset
+    def _apply_order_by( self, query, order_by ):
+        """
+        Return the query after adding the order_by clauses.
+
+        Use the manager's default_order_by if order_by is None.
+        """
+        if order_by is None:
+            return query.order_by( *self._default_order_by() )
+
+        if isinstance( order_by, ( list, tuple ) ):
+            return query.order_by( *order_by )
+        return query.order_by( order_by )
+
+    def _default_order_by( self ):
+        """
+        Returns a tuple of columns for the default order when getting multiple models.
+        """
+        return ( self.model_class.create_time, )
+
+    def _apply_orm_limit_offset( self, query, limit, offset ):
+        """
+        Return the query after applying the given limit and offset (if not None).
+        """
+        if limit is not None:
+            query = query.limit( limit )
+        if offset is not None:
+            query = query.offset( offset )
+        return query
+
+    # .... query resolution
+    def one( self, **kwargs ):
+        """
+        Sends kwargs to build the query and returns one and only one model.
+        """
+        query = self.query( **kwargs )
+        return self._one_with_recast_errors( query )
+
+    def _one_with_recast_errors( self, query ):
+        """
+        Call sqlalchemy's one and recast errors to serializable errors if any.
+
+        :raises exceptions.ObjectNotFound: if no model is found
+        :raises exceptions.InconsistentDatabase: if more than one model is found
+        """
+        # overridden to raise serializable errors
+        try:
+            return query.one()
+        except sqlalchemy.orm.exc.NoResultFound:
+            raise exceptions.ObjectNotFound( self.model_class.__name__ + ' not found' )
+        except sqlalchemy.orm.exc.MultipleResultsFound:
+            raise exceptions.InconsistentDatabase( 'found more than one ' + self.model_class.__name__ )
+
+    def _one_or_none( self, query ):
+        """
+        Return the object if found, None if it's not.
+
+        :raises exceptions.InconsistentDatabase: if more than one model is found
+        """
+        try:
+            return self._one_with_recast_errors( query )
+        except exceptions.ObjectNotFound:
+            return None
+
+    # NOTE: at this layer, all ids are expected to be decoded and in int form
+    def by_id( self, id, **kwargs ):
+        """
+        Gets a model by primary id.
+        """
+        id_filter = self.model_class.id == id
+        return self.one( filters=id_filter, **kwargs )
+
+    # .... multirow queries
+    def list( self, filters=None, order_by=None, limit=None, offset=None, **kwargs ):
+        """
+        Returns all objects matching the given filters
+        """
+        # list becomes a way of applying both filters generated in the orm (such as .user ==)
+        # and functional filters that aren't currently possible using the orm (such as instance calcluated values
+        # or annotations/tags). List splits those two filters and applies limits/offsets
+        # only after functional filters (if any) using python.
+        orm_filters, fn_filters = self._split_filters( filters )
+        if not fn_filters:
+            # if no fn_filtering required, we can use the 'all orm' version with limit offset
+            return self._orm_list( filters=orm_filters, order_by=order_by,
+                limit=limit, offset=offset, **kwargs )
+
+        # fn filters will change the number of items returnable by limit/offset - remove them here from the orm query
+        query = self.query( filters=orm_filters, order_by=order_by, limit=None, offset=None, **kwargs )
+        items = query.all()
+
+        # apply limit, offset after SQL filtering
+        items = self._apply_fn_filters_gen( items, fn_filters )
+        return list( self._apply_fn_limit_offset_gen( items, limit, offset ) )
+
+    def _split_filters( self, filters ):
+        """
+        Splits `filters` into a tuple of two lists:
+            a list of filters to be added to the SQL query
+        and a list of functional filters to be applied after the SQL query.
+        """
+        orm_filters, fn_filters = ( [], [] )
+        if filters is None:
+            return ( orm_filters, fn_filters )
+        if not isinstance( filters, list ):
+            filters = [ filters ]
+        for filter_ in filters:
+            if self._is_fn_filter( filter_ ):
+                fn_filters.append( filter_ )
+            else:
+                orm_filters.append( filter_ )
+        return ( orm_filters, fn_filters )
+
+    def _is_fn_filter( self, filter_ ):
+        """
+        Returns True if `filter_` is a functional filter to be applied after the SQL query.
+        """
+        return callable( filter_ )
+
+    def _orm_list( self, query=None, **kwargs ):
+        """
+        Sends kwargs to build the query return all models found.
+        """
+        query = query or self.query( **kwargs )
+        return query.all()
+
+    def _apply_fn_filters_gen( self, items, filters ):
+        """
+        If all the filter functions in `filters` return True for an item in `items`,
+        yield that item.
+        """
+        # cpu-expensive
+        for item in items:
+            filter_results = [f( item ) for f in filters]
+            if all( filter_results ):
+                yield item
+
+    def _apply_fn_limit_offset_gen( self, items, limit, offset ):
+        """
+        Iterate over `items` and begin yielding items after
+        `offset` number of items and stop when we've yielded
+        `limit` number of items.
+        """
+        # change negative limit, offset to None
+        if limit is not None and limit < 0:
+            limit = None
+        if offset is not None and offset < 0:
+            offset = None
+
+        yielded = 0
+        for i, item in enumerate( items ):
+            if offset is not None and i < offset:
+                continue
+            if limit is not None and yielded >= limit:
+                break
+            yield item
+            yielded += 1
+
+    def by_ids( self, ids, filters=None, **kwargs ):
+        """
+        Returns an in-order list of models with the matching ids in `ids`.
+        """
+        if not ids:
+            return []
+        ids_filter = self.model_class.id.in_( ids )
+        found = self.list( filters=self._munge_filters( ids_filter, filters ), **kwargs )
+        # TODO: this does not order by the original 'ids' array
+
+        # ...could use get (supposedly since found are in the session, the db won't be hit twice)
+        # return map( self.session().query( self.model_class ).get, ids )
+
+        # ...could implement own version here - slow?
+        return self._order_items_by_id( ids, found )
+
+    def _order_items_by_id( self, ids, items ):
+        """
+        Given a list of (unique) ids and a list of items having an 'id' attribute,
+        return items that have the given ids in that order.
+
+        If an id in ids is not found or if an item in items doesn't have a given
+        id, they will not be in the returned list.
+        """
+        ID_ATTR_NAME = 'id'
+        # TODO:?? aside from sqlalx.get mentioned above, I haven't seen an in-SQL way
+        #   to make this happen. This may not be the most efficient way either.
+        # NOTE: that this isn't sorting by id - this is matching the order in items to the order in ids
+        # move items list into dict by id
+        item_dict = {}
+        for item in items:
+            item_id = getattr( item, ID_ATTR_NAME, None )
+            if item_id:
+                item_dict[ item_id ] = item
+        # pull from map in order of ids
+        in_order = []
+        for id in ids:
+            if id in item_dict:
+                in_order.append( item_dict[ id ] )
+        return in_order
+
+    def create( self, flush=True, *args, **kwargs ):
+        """
+        Generically create a new model.
+        """
+        # override in subclasses
+        item = self.model_class( *args, **kwargs )
+        self.session().add( item )
+        if flush:
+            self.session().flush()
+        return item
+
+    def copy( self, item, **kwargs ):
+        """
+        Clone or copy an item.
+        """
+        raise exceptions.NotImplemented( 'Abstract method' )
+
+    def update( self, item, new_values, flush=True, **kwargs ):
+        """
+        Given a dictionary of new values, update `item` and return it.
+
+        ..note: NO validation or deserialization occurs here.
+        """
+        self.session().add( item )
+        for key, value in new_values.items():
+            if hasattr( item, key ):
+                setattr( item, key, value )
+        if flush:
+            self.session().flush()
+        return item
+
+    def associate( self, associate_with, item, foreign_key_name=None ):
+        """
+        Generically associate `item` with `associate_with` based on `foreign_key_name`.
+        """
+        foreign_key_name = foreign_key_name or self.foreign_key_name
+        setattr( associate_with, foreign_key_name, item )
+        return item
+
+    def _foreign_key( self, associated_model_class, foreign_key_name=None ):
+        foreign_key_name = foreign_key_name or self.foreign_key_name
+        return getattr( associated_model_class, foreign_key_name )
+
+    def query_associated( self, associated_model_class, item, foreign_key_name=None ):
+        """
+        Generically query other items that have been associated with this `item`.
+        """
+        foreign_key = self._foreign_key( associated_model_class, foreign_key_name=foreign_key_name )
+        return self.session().query( associated_model_class ).filter( foreign_key == item )
+
+    # a rename of sql DELETE to differentiate from the Galaxy notion of mark_as_deleted
+    # def destroy( self, item, **kwargs ):
+    #    return item
+
+
+# ---- code for classes that use one *main* model manager
+# TODO: this may become unecessary if we can access managers some other way (class var, app, etc.)
+class HasAModelManager( object ):
+    """
+    Mixin used where serializers, deserializers, filter parsers, etc.
+    need some functionality around the model they're mainly concerned with
+    and would perform that functionality with a manager.
+    """
+
+    #: the class used to create this serializer's generically accessible model_manager
+    model_manager_class = None
+    # examples where this doesn't really work are ConfigurationSerializer (no manager)
+    # and contents (2 managers)
+
+    def __init__( self, app, manager=None, **kwargs ):
+        self._manager = manager
+
+    @property
+    def manager( self ):
+        """Return an appropriate manager if it exists, instantiate if not."""
+        # PRECONDITION: assumes self.app is assigned elsewhere
+        if not self._manager:
+            # TODO: pass this serializer to it
+            self._manager = self.model_manager_class( self.app )
+            # this will error for unset model_manager_class'es
+        return self._manager
+
+
+# ==== SERIALIZERS/to_dict,from_dict
+class ModelSerializingError( exceptions.InternalServerError ):
+    """Thrown when request model values can't be serialized"""
+    pass
+
+
+class ModelDeserializingError( exceptions.ObjectAttributeInvalidException ):
+    """Thrown when an incoming value isn't usable by the model
+    (bad type, out of range, etc.)
+    """
+    pass
+
+
+class SkipAttribute( Exception ):
+    """
+    Raise this inside a serializer to prevent the returned dictionary from having
+    a the associated key or value for this attribute.
+    """
+    pass
+
+
+class ModelSerializer( HasAModelManager ):
+    """
+    Turns models into JSONable dicts.
+
+    Maintains a map of requestable keys and the Callable() serializer functions
+    that should be called for those keys.
+    E.g. { 'x' : lambda item, key: item.x, ... }
+
+    Note: if a key to serialize is not listed in the Serializer.serializable_keyset
+    or serializers, it will not be returned.
+
+    To serialize call:
+        my_serializer = MySerializer( app )
+        ...
+        keys_to_serialize = [ 'id', 'name', 'attr1', 'attr2', ... ]
+        item_dict = MySerializer.serialize( my_item, keys_to_serialize )
+    """
+    #: 'service' to use for getting urls - use class var to allow overriding when testing
+    url_for = staticmethod( routes.url_for )
+
+    def __init__( self, app, **kwargs ):
+        """
+        Set up serializer map, any additional serializable keys, and views here.
+        """
+        super( ModelSerializer, self ).__init__( app, **kwargs )
+        self.app = app
+
+        # a list of valid serializable keys that can use the default (string) serializer
+        #   this allows us to: 'mention' the key without adding the default serializer
+        # TODO: we may want to eventually error if a key is requested
+        #   that is in neither serializable_keyset or serializers
+        self.serializable_keyset = set([])
+        # a map of dictionary keys to the functions (often lambdas) that create the values for those keys
+        self.serializers = {}
+        # add subclass serializers defined there
+        self.add_serializers()
+        # update the keyset by the serializers (removing the responsibility from subclasses)
+        self.serializable_keyset.update( self.serializers.keys() )
+
+        # views are collections of serializable attributes (a named array of keys)
+        #   inspired by model.dict_{view}_visible_keys
+        self.views = {}
+        self.default_view = None
+
+    def add_serializers( self ):
+        """
+        Register a map of attribute keys -> serializing functions that will serialize
+        the attribute.
+        """
+        self.serializers.update({
+            'id'            : self.serialize_id,
+            'create_time'   : self.serialize_date,
+            'update_time'   : self.serialize_date,
+        })
+
+    def add_view( self, view_name, key_list, include_keys_from=None ):
+        """
+        Add the list of serializable attributes `key_list` to the serializer's
+        view dictionary under the key `view_name`.
+
+        If `include_keys_from` is a proper view name, extend `key_list` by
+        the list in that view.
+        """
+        key_list = list( set( key_list + self.views.get( include_keys_from, [] ) ) )
+        self.views[ view_name ] = key_list
+        self.serializable_keyset.update( key_list )
+        return key_list
+
+    def serialize( self, item, keys, **context ):
+        """
+        Serialize the model `item` to a dictionary.
+
+        Given model `item` and the list `keys`, create and return a dictionary
+        built from each key in `keys` that also exists in `serializers` and
+        values of calling the keyed/named serializers on item.
+        """
+        # TODO: constrain context to current_user/whos_asking when that's all we need (trans)
+        returned = {}
+        for key in keys:
+            # check both serializers and serializable keys
+            if key in self.serializers:
+                try:
+                    returned[ key ] = self.serializers[ key ]( item, key, **context )
+                except SkipAttribute:
+                    # dont add this key if the deserializer threw this
+                    pass
+            elif key in self.serializable_keyset:
+                returned[ key ] = self.default_serializer( item, key, **context )
+            # ignore bad/unreg keys
+        return returned
+
+    def skip( self, msg='skipped' ):
+        """
+        To be called from inside a serializer to skip it.
+
+        Handy for config checks, information hiding, etc.
+        """
+        raise SkipAttribute( msg )
+
+    def _remap_from( self, original_key ):
+        if original_key in self.serializers:
+            return self.serializers[ original_key ]
+        if original_key in self.serializable_keyset:
+            return lambda i, k, **c: self.default_serializer( i, original_key, **c )
+        raise KeyError( 'serializer not found for remap: ' + original_key )
+
+    def default_serializer( self, item, key, **context ):
+        """
+        Serialize the `item`'s attribute named `key`.
+        """
+        # TODO:?? point of change but not really necessary?
+        return getattr( item, key )
+
+    # serializers for common galaxy objects
+    def serialize_date( self, item, key, **context ):
+        """
+        Serialize a date attribute of `item`.
+        """
+        date = getattr( item, key )
+        return date.isoformat() if date is not None else None
+
+    def serialize_id( self, item, key, **context ):
+        """
+        Serialize an id attribute of `item`.
+        """
+        id = getattr( item, key )
+        # Note: it may not be best to encode the id at this layer
+        return self.app.security.encode_id( id ) if id is not None else None
+
+    def serialize_type_id( self, item, key, **context ):
+        """
+        Serialize an type-id for `item`.
+        """
+        TYPE_ID_SEP = '-'
+        type_id = getattr( item, key )
+        if type_id is None:
+            return None
+        split = type_id.split( TYPE_ID_SEP, 1 )
+        # Note: it may not be best to encode the id at this layer
+        return TYPE_ID_SEP.join([ split[0], self.app.security.encode_id( split[1] )])
+
+    # serializing to a view where a view is a predefied list of keys to serialize
+    def serialize_to_view( self, item, view=None, keys=None, default_view=None, **context ):
+        """
+        Use a predefined list of keys (the string `view`) and any additional keys
+        listed in `keys`.
+
+        The combinations can be:
+            `view` only: return those keys listed in the named view
+            `keys` only: return those keys listed
+            no `view` or `keys`: use the `default_view` if any
+            `view` and `keys`: combine both into one list of keys
+        """
+
+        # TODO: default view + view makes no sense outside the API.index context - move default view there
+        all_keys = []
+        keys = keys or []
+        # chose explicit over concise here
+        if view:
+            if keys:
+                all_keys = self._view_to_keys( view ) + keys
+            else:
+                all_keys = self._view_to_keys( view )
+        else:
+            if keys:
+                all_keys = keys
+            elif default_view:
+                all_keys = self._view_to_keys( default_view )
+
+        return self.serialize( item, all_keys, **context )
+
+    def _view_to_keys( self, view=None ):
+        """
+        Converts a known view into a list of keys.
+
+        :raises ModelSerializingError: if the view is not listed in `self.views`.
+        """
+        if view is None:
+            view = self.default_view
+        if view not in self.views:
+            raise ModelSerializingError( 'unknown view', view=view, available_views=self.views )
+        return self.views[ view ][:]
+
+
+class ModelDeserializer( HasAModelManager ):
+    """
+    An object that converts an incoming serialized dict into values that can be
+    directly assigned to an item's attributes and assigns them.
+    """
+    # TODO:?? a larger question is: which should be first? Deserialize then validate - or - validate then deserialize?
+
+    def __init__( self, app, validator=None, **kwargs ):
+        """
+        Set up deserializers and validator.
+        """
+        super( ModelDeserializer, self ).__init__( app, **kwargs )
+        self.app = app
+
+        self.deserializers = {}
+        self.deserializable_keyset = set([])
+        self.add_deserializers()
+        # a sub object that can validate incoming values
+        self.validate = validator or ModelValidator( self.app )
+
+    def add_deserializers( self ):
+        """
+        Register a map of attribute keys -> functions that will deserialize data
+        into attributes to be assigned to the item.
+        """
+        # to be overridden in subclasses
+        pass
+
+    def deserialize( self, item, data, flush=True, **context ):
+        """
+        Convert an incoming serialized dict into values that can be
+        directly assigned to an item's attributes and assign them
+        """
+        # TODO: constrain context to current_user/whos_asking when that's all we need (trans)
+        sa_session = self.app.model.context
+        new_dict = {}
+        for key, val in data.items():
+            if key in self.deserializers:
+                new_dict[ key ] = self.deserializers[ key ]( item, key, val, **context )
+            # !important: don't error on unreg. keys -- many clients will add weird ass keys onto the model
+
+        # TODO:?? add and flush here or in manager?
+        if flush and len( new_dict ):
+            sa_session.add( item )
+            sa_session.flush()
+
+        return new_dict
+
+    # ... common deserializers for primitives
+    def default_deserializer( self, item, key, val, **context ):
+        """
+        If the incoming `val` is different than the `item` value change it
+        and, in either case, return the value.
+        """
+        # TODO: sets the item attribute to value (this may not work in all instances)
+
+        # only do the following if val == getattr( item, key )
+        if hasattr( item, key ) and getattr( item, key ) != val:
+            setattr( item, key, val )
+        return val
+
+    def deserialize_basestring( self, item, key, val, convert_none_to_empty=False, **context ):
+        val = '' if ( convert_none_to_empty and val is None ) else self.validate.basestring( key, val )
+        return self.default_deserializer( item, key, val, **context )
+
+    def deserialize_bool( self, item, key, val, **context ):
+        val = self.validate.bool( key, val )
+        return self.default_deserializer( item, key, val, **context )
+
+    def deserialize_int( self, item, key, val, min=None, max=None, **context ):
+        val = self.validate.int_range( key, val, min, max )
+        return self.default_deserializer( item, key, val, **context )
+
+    # def deserialize_date( self, item, key, val ):
+    #    #TODO: parse isoformat date into date object
+
+    # ... common deserializers for Galaxy
+    def deserialize_genome_build( self, item, key, val, **context ):
+        """
+        Make sure `val` is a valid dbkey and assign it.
+        """
+        val = self.validate.genome_build( key, val )
+        return self.default_deserializer( item, key, val, **context )
+
+
+class ModelValidator( HasAModelManager ):
+    """
+    An object that inspects a dictionary (generally meant to be a set of
+    new/updated values for the model) and raises an error if a value is
+    not acceptable.
+    """
+
+    def __init__( self, app, *args, **kwargs ):
+        super( ModelValidator, self ).__init__( app, **kwargs )
+        self.app = app
+
+    def type( self, key, val, types ):
+        """
+        Check `val` against the type (or tuple of types) in `types`.
+
+        :raises exceptions.RequestParameterInvalidException: if not an instance.
+        """
+        if not isinstance( val, types ):
+            msg = 'must be a type: %s' % ( str( types ) )
+            raise exceptions.RequestParameterInvalidException( msg, key=key, val=val )
+        return val
+
+    # validators for primitives and compounds of primitives
+    def basestring( self, key, val ):
+        return self.type( key, val, string_types )
+
+    def bool( self, key, val ):
+        return self.type( key, val, bool )
+
+    def int( self, key, val ):
+        return self.type( key, val, int )
+
+    def nullable_basestring( self, key, val ):
+        """
+        Must be a basestring or None.
+        """
+        return self.type( key, val, ( string_types, type( None ) ) )
+
+    def int_range( self, key, val, min=None, max=None ):
+        """
+        Must be a int between min and max.
+        """
+        val = self.type( key, val, int )
+        if min is not None and val < min:
+            raise exceptions.RequestParameterInvalidException( "less than minimum", key=key, val=val, min=min )
+        if max is not None and val > max:
+            raise exceptions.RequestParameterInvalidException( "greater than maximum", key=key, val=val, max=max )
+        return val
+
+    def basestring_list( self, key, val ):
+        """
+        Must be a list of basestrings.
+        """
+        # TODO: Here's where compound types start becoming a nightmare. Any more or more complex
+        #   and should find a different way.
+        val = self.type( key, val, list )
+        return [ self.basestring( key, elem ) for elem in val ]
+
+    # validators for Galaxy
+    def genome_build( self, key, val ):
+        """
+        Must be a valid base_string.
+
+        Note: no checking against installation's ref list is done as many
+        data sources consider this an open field.
+        """
+        # TODO: is this correct?
+        if val is None:
+            return '?'
+        # currently, data source sites like UCSC are able to set the genome build to non-local build names
+        # afterwards, attempting to validate the whole model will choke here
+        # for genome_build_shortname, longname in self.app.genome_builds.get_genome_build_names( trans=trans ):
+        #     if val == genome_build_shortname:
+        #         return val
+        # raise exceptions.RequestParameterInvalidException( "invalid reference", key=key, val=val )
+        # IOW: fallback to string validation
+        return self.basestring( key, val )
+
+    # def slug( self, item, key, val ):
+    #    """validate slug"""
+    #    pass
+
+
+# ==== Building query filters based on model data
+class ModelFilterParser( HasAModelManager ):
+    """
+    Converts string tuples (partially converted query string params) of
+    attr, op, val into either:
+
+    - ORM based filters (filters that can be applied by the ORM at the SQL
+      level) or
+    - functional filters (filters that use derived values or values not
+      within the SQL tables)
+
+    These filters can then be applied to queries.
+
+    This abstraction allows 'smarter' application of limit and offset at either the
+    SQL level or the generator/list level based on the presence of functional
+    filters. In other words, if no functional filters are present, limit and offset
+    may be applied at the SQL level. If functional filters are present, limit and
+    offset need to applied at the list level.
+
+    These might be safely be replaced in the future by creating SQLAlchemy
+    hybrid properties or more thoroughly mapping derived values.
+    """
+    # ??: this class kindof 'lives' in both the world of the controllers/param-parsing and to models/orm
+    # (as the model informs how the filter params are parsed)
+    # I have no great idea where this 'belongs', so it's here for now
+
+    #: model class
+    model_class = None
+
+    def __init__( self, app, **kwargs ):
+        """
+        Set up serializer map, any additional serializable keys, and views here.
+        """
+        super( ModelFilterParser, self ).__init__( app, **kwargs )
+        self.app = app
+
+        #: regex for testing/dicing iso8601 date strings, with optional time and ms, but allowing only UTC timezone
+        self.date_string_re = re.compile( r'^(\d{4}\-\d{2}\-\d{2})[T| ]{0,1}(\d{2}:\d{2}:\d{2}(?:\.\d{1,6}){0,1}){0,1}Z{0,1}$' )
+
+        # dictionary containing parsing data for ORM/SQLAlchemy-based filters
+        # ..note: although kind of a pain in the ass and verbose, opt-in/whitelisting allows more control
+        #   over potentially expensive queries
+        self.orm_filter_parsers = {}
+
+        #: dictionary containing parsing data for functional filters - applied after a query is made
+        self.fn_filter_parsers = {}
+
+        # set up both of the above
+        self._add_parsers()
+
+    def _add_parsers( self ):
+        """
+        Set up, extend, or alter `orm_filter_parsers` and `fn_filter_parsers`.
+        """
+        # note: these are the default filters for all models
+        self.orm_filter_parsers.update({
+            # (prob.) applicable to all models
+            'id'            : { 'op': ( 'in' ) },
+            'encoded_id'    : { 'column' : 'id', 'op': ( 'in' ), 'val': self.parse_id_list },
+            # dates can be directly passed through the orm into a filter (no need to parse into datetime object)
+            'create_time'   : { 'op': ( 'le', 'ge' ), 'val': self.parse_date },
+            'update_time'   : { 'op': ( 'le', 'ge' ), 'val': self.parse_date },
+        })
+
+    def parse_filters( self, filter_tuple_list ):
+        """
+        Parse string 3-tuples (attr, op, val) into orm or functional filters.
+        """
+        # TODO: allow defining the default filter op in this class (and not 'eq' in base/controller.py)
+        parsed = []
+        for ( attr, op, val ) in filter_tuple_list:
+            filter_ = self.parse_filter( attr, op, val )
+            parsed.append( filter_ )
+        return parsed
+
+    def parse_filter( self, attr, op, val ):
+        """
+        Attempt to parse filter as a custom/fn filter, then an orm filter, and
+        if neither work - raise an error.
+
+        :raises exceptions.RequestParameterInvalidException: if no functional or orm
+            filter can be parsed.
+        """
+        try:
+            # check for a custom filter
+            fn_filter = self._parse_fn_filter( attr, op, val )
+            if fn_filter is not None:
+                return fn_filter
+
+            # if no custom filter found, try to make an ORM filter
+            # note: have to use explicit is None here, bool( sqlalx.filter ) == False
+            orm_filter = self._parse_orm_filter( attr, op, val )
+            if orm_filter is not None:
+                return orm_filter
+
+        # by convention, assume most val parsers raise ValueError
+        except ValueError as val_err:
+            raise exceptions.RequestParameterInvalidException( 'unparsable value for filter',
+                column=attr, operation=op, value=val, ValueError=str( val_err ) )
+
+        # if neither of the above work, raise an error with how-to info
+        # TODO: send back all valid filter keys in exception for added user help
+        raise exceptions.RequestParameterInvalidException( 'bad filter', column=attr, operation=op )
+
+    # ---- fn filters
+    def _parse_fn_filter( self, attr, op, val ):
+        """
+        Attempt to parse a non-ORM filter function.
+        """
+        # fn_filter_list is a dict: fn_filter_list[ attr ] = { 'opname1' : opfn1, 'opname2' : opfn2, etc. }
+
+        # attr, op is a nested dictionary pointing to the filter fn
+        attr_map = self.fn_filter_parsers.get( attr, None )
+        if not attr_map:
+            return None
+        allowed_ops = attr_map.get( 'op' )
+        # allowed ops is a map here, op => fn
+        filter_fn = allowed_ops.get( op, None )
+        if not filter_fn:
+            return None
+        # parse the val from string using the 'val' parser if present (otherwise, leave as string)
+        val_parser = attr_map.get( 'val', None )
+        if val_parser:
+            val = val_parser( val )
+
+        # curry/partial and fold the val in there now
+        return lambda i: filter_fn( i, val )
+
+    # ---- ORM filters
+    def _parse_orm_filter( self, attr, op, val ):
+        """
+        Attempt to parse a ORM-based filter.
+
+        Using SQLAlchemy, this would yield a sql.elements.BinaryExpression.
+        """
+        # orm_filter_list is a dict: orm_filter_list[ attr ] = <list of allowed ops>
+        column_map = self.orm_filter_parsers.get( attr, None )
+        if not column_map:
+            # no column mapping (not whitelisted)
+            return None
+        # attr must be a whitelisted column by attr name or by key passed in column_map
+        # note: column_map[ 'column' ] takes precedence
+        if 'column' in column_map:
+            attr = column_map[ 'column' ]
+        column = self.model_class.table.columns.get( attr )
+        if column is None:
+            # could be a property (hybrid_property, etc.) - assume we can make a filter from it
+            column = getattr( self.model_class, attr )
+        if column is None:
+            # no orm column
+            return None
+
+        # op must be whitelisted: contained in the list orm_filter_list[ attr ][ 'op' ]
+        allowed_ops = column_map.get( 'op' )
+        if op not in allowed_ops:
+            return None
+        op = self._convert_op_string_to_fn( column, op )
+        if not op:
+            return None
+
+        # parse the val from string using the 'val' parser if present (otherwise, leave as string)
+        val_parser = column_map.get( 'val', None )
+        if val_parser:
+            val = val_parser( val )
+
+        orm_filter = op( val )
+        return orm_filter
+
+    #: these are the easier/shorter string equivalents to the python operator fn names that need '__' around them
+    UNDERSCORED_OPS = ( 'lt', 'le', 'eq', 'ne', 'ge', 'gt' )
+
+    def _convert_op_string_to_fn( self, column, op_string ):
+        """
+        Convert the query string filter op shorthand into actual ORM usable
+        function names, then return the ORM function.
+        """
+        # correct op_string to usable function key
+        fn_name = op_string
+        if op_string in self.UNDERSCORED_OPS:
+            fn_name = '__' + op_string + '__'
+        elif op_string == 'in':
+            fn_name = 'in_'
+
+        # get the column fn using the op_string and error if not a callable attr
+        # TODO: special case 'not in' - or disallow?
+        op_fn = getattr( column, fn_name, None )
+        if not op_fn or not callable( op_fn ):
+            return None
+        return op_fn
+
+    # ---- preset fn_filters: dictionaries of standard filter ops for standard datatypes
+    def string_standard_ops( self, key ):
+        return {
+            'op' : {
+                'eq'        : lambda i, v: v == getattr( i, key ),
+                'contains'  : lambda i, v: v in getattr( i, key ),
+            }
+        }
+
+    # --- more parsers! yay!
+    # TODO: These should go somewhere central - we've got ~6 parser modules/sections now
+    def parse_bool( self, bool_string ):
+        """
+        Parse a boolean from a string.
+        """
+        # Be strict here to remove complexity of options (but allow already parsed).
+        if bool_string in ( 'True', True ):
+            return True
+        if bool_string in ( 'False', False ):
+            return False
+        raise ValueError( 'invalid boolean: ' + str( bool_string ) )
+
+    def parse_id_list( self, id_list_string, sep=',' ):
+        """
+        Split `id_list_string` at `sep`.
+        """
+        # TODO: move id decoding out
+        id_list = [ self.app.security.decode_id( id_ ) for id_ in id_list_string.split( sep ) ]
+        return id_list
+
+    def parse_int_list( self, int_list_string, sep=',' ):
+        """
+        Split `int_list_string` at `sep` and parse as ints.
+        """
+        # TODO: move id decoding out
+        int_list = [ int( v ) for v in int_list_string.split( sep ) ]
+        return int_list
+
+    def parse_date( self, date_string ):
+        """
+        Reformats a string containing either seconds from epoch or an iso8601 formated
+        date string into a new date string usable within a filter query.
+
+        Seconds from epoch can be a floating point value as well (i.e containing ms).
+        """
+        # assume it's epoch if no date separator is present
+        try:
+            epoch = float( date_string )
+            datetime_obj = datetime.datetime.fromtimestamp( epoch )
+            return datetime_obj.isoformat( sep=' ' )
+        except ValueError:
+            pass
+
+        match = self.date_string_re.match( date_string )
+        if match:
+            date_string = ' '.join([ group for group in match.groups() if group ])
+            return date_string
+        raise ValueError( 'datetime strings must be in the ISO 8601 format and in the UTC' )
diff --git a/lib/galaxy/managers/citations.py b/lib/galaxy/managers/citations.py
new file mode 100644
index 0000000..ef83443
--- /dev/null
+++ b/lib/galaxy/managers/citations.py
@@ -0,0 +1,167 @@
+import functools
+import os
+import urllib2
+
+from beaker.cache import CacheManager
+from beaker.util import parse_cache_config_options
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class CitationsManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.doi_cache = DoiCache( app.config )
+
+    def citations_for_tool( self, tool ):
+        return tool.citations
+
+    def citations_for_tool_ids( self, tool_ids ):
+        citation_collection = CitationCollection()
+        for tool_id in tool_ids:
+            tool = self._get_tool( tool_id )
+            for citation in self.citations_for_tool( tool ):
+                citation_collection.add( citation )
+        return citation_collection.citations
+
+    def parse_citation( self, citation_elem, tool_directory ):
+        return parse_citation( citation_elem, tool_directory, self )
+
+    def _get_tool( self, tool_id ):
+        tool = self.app.toolbox.get_tool( tool_id )
+        return tool
+
+
+class DoiCache( object ):
+
+    def __init__( self, config ):
+        cache_opts = {
+            'cache.type': getattr( config, 'citation_cache_type', 'file'),
+            'cache.data_dir': getattr( config, 'citation_cache_data_dir', None),
+            'cache.lock_dir': getattr( config, 'citation_cache_lock_dir', None),
+        }
+        self._cache = CacheManager(**parse_cache_config_options(cache_opts)).get_cache('doi')
+
+    def _raw_get_bibtex( self, doi ):
+        dx_url = "http://dx.doi.org/" + doi
+        headers = {'Accept': 'text/bibliography; style=bibtex, application/x-bibtex'}
+        req = urllib2.Request(dx_url, data="", headers=headers)
+        response = urllib2.urlopen(req)
+        bibtex = response.read()
+        return bibtex
+
+    def get_bibtex( self, doi ):
+        createfunc = functools.partial(self._raw_get_bibtex, doi)
+        return self._cache.get(key=doi, createfunc=createfunc)
+
+
+def parse_citation( elem, directory, citation_manager ):
+    """ Parse an abstract citation entry from the specified XML element.
+    The directory parameter should be used to find external files for this
+    citation.
+    """
+    citation_type = elem.attrib.get( 'type', None )
+    citation_class = CITATION_CLASSES.get( citation_type, None )
+    if not citation_class:
+        log.warning("Unknown or unspecified citation type: %s" % citation_type)
+        return None
+    return citation_class( elem, directory, citation_manager )
+
+
+class CitationCollection( object ):
+
+    def __init__( self ):
+        self.citations = []
+
+    def __iter__( self ):
+        return self.citations.__iter__()
+
+    def __len__( self ):
+        return len( self.citations )
+
+    def add( self, new_citation ):
+        for citation in self.citations:
+            if citation.equals( new_citation ):
+                # TODO: We have two equivalent citations, pick the more
+                # informative/complete/correct.
+                return False
+
+        self.citations.append( new_citation )
+        return True
+
+
+class BaseCitation( object ):
+
+    def to_dict( self, citation_format ):
+        if citation_format == "bibtex":
+            return dict(
+                format="bibtex",
+                content=self.to_bibtex(),
+            )
+        else:
+            raise Exception("Unknown citation format %s" % citation_format)
+
+    def equals( self, other_citation ):
+        if self.has_doi() and other_citation.has_doi():
+            return self.doi() == other_citation.doi()
+        else:
+            # TODO: Do a better job figuring out if this is the same citation.
+            return self.to_bibtex() == other_citation.to_bibtex()
+
+    def has_doi( self ):
+        return False
+
+
+class BibtexCitation( BaseCitation ):
+
+    def __init__( self, elem, directory, citation_manager ):
+        bibtex_file = elem.attrib.get("file", None)
+        if bibtex_file:
+            raw_bibtex = open(os.path.join(directory, bibtex_file), "r").read()
+        else:
+            raw_bibtex = elem.text.strip()
+        self._set_raw_bibtex( raw_bibtex )
+
+    def _set_raw_bibtex( self, raw_bibtex ):
+        self.raw_bibtex = raw_bibtex
+
+    def to_bibtex( self ):
+        return self.raw_bibtex
+
+
+class DoiCitation( BaseCitation ):
+    BIBTEX_UNSET = object()
+
+    def __init__( self, elem, directory, citation_manager ):
+        self.__doi = elem.text.strip()
+        self.doi_cache = citation_manager.doi_cache
+        self.raw_bibtex = DoiCitation.BIBTEX_UNSET
+
+    def has_doi( self ):
+        return True
+
+    def doi( self ):
+        return self.__doi
+
+    def to_bibtex( self ):
+        if self.raw_bibtex is DoiCitation.BIBTEX_UNSET:
+            try:
+                self.raw_bibtex = self.doi_cache.get_bibtex(self.__doi)
+            except Exception:
+                log.exception("Failed to fetch bibtex for DOI %s" % self.__doi)
+
+        if self.raw_bibtex is DoiCitation.BIBTEX_UNSET:
+            return """@MISC{%s,
+                DOI = {%s},
+                note = {Failed to fetch BibTeX for DOI.}
+            }""" % (self.__doi, self.__doi)
+        else:
+            return self.raw_bibtex
+
+
+CITATION_CLASSES = dict(
+    bibtex=BibtexCitation,
+    doi=DoiCitation,
+)
diff --git a/lib/galaxy/managers/collections.py b/lib/galaxy/managers/collections.py
new file mode 100644
index 0000000..24dbdff
--- /dev/null
+++ b/lib/galaxy/managers/collections.py
@@ -0,0 +1,338 @@
+from galaxy import model
+from galaxy.dataset_collections import builder
+from galaxy.dataset_collections.matching import MatchingCollections
+from galaxy.dataset_collections.registry import DatasetCollectionTypesRegistry
+from galaxy.dataset_collections.type_description import CollectionTypeDescriptionFactory
+from galaxy.exceptions import ItemAccessibilityException
+from galaxy.exceptions import MessageException
+from galaxy.exceptions import RequestParameterInvalidException
+from galaxy.managers import hdas  # TODO: Refactor all mixin use into managers.
+from galaxy.managers import histories
+from galaxy.managers import lddas
+from galaxy.managers import tags
+from galaxy.managers.collections_util import validate_input_element_identifiers
+from galaxy.util import odict
+from galaxy.util import validation
+import logging
+log = logging.getLogger( __name__ )
+
+
+ERROR_INVALID_ELEMENTS_SPECIFICATION = "Create called with invalid parameters, must specify element identifiers."
+ERROR_NO_COLLECTION_TYPE = "Create called without specifing a collection type."
+
+
+class DatasetCollectionManager( object ):
+    """
+    Abstraction for interfacing with dataset collections instance - ideally abstarcts
+    out model and plugin details.
+    """
+    ELEMENTS_UNINITIALIZED = object()
+
+    def __init__( self, app ):
+        self.type_registry = DatasetCollectionTypesRegistry( app )
+        self.collection_type_descriptions = CollectionTypeDescriptionFactory( self.type_registry )
+        self.model = app.model
+        self.security = app.security
+
+        self.hda_manager = hdas.HDAManager( app )
+        self.history_manager = histories.HistoryManager( app )
+        self.tag_manager = tags.TagManager( app )
+        self.ldda_manager = lddas.LDDAManager( app )
+
+    def create(
+        self,
+        trans,
+        parent,
+        # PRECONDITION: security checks on ability to add to parent
+        # occurred during load.
+        name,
+        collection_type,
+        element_identifiers=None,
+        elements=None,
+        implicit_collection_info=None,
+        trusted_identifiers=None,  # Trust preloaded element objects
+    ):
+        """
+        """
+        # Trust embedded, newly created objects created by tool subsystem.
+        if trusted_identifiers is None:
+            trusted_identifiers = implicit_collection_info is not None
+
+        if element_identifiers and not trusted_identifiers:
+            validate_input_element_identifiers( element_identifiers )
+
+        dataset_collection = self.create_dataset_collection(
+            trans=trans,
+            collection_type=collection_type,
+            element_identifiers=element_identifiers,
+            elements=elements,
+        )
+
+        if isinstance( parent, model.History ):
+            dataset_collection_instance = self.model.HistoryDatasetCollectionAssociation(
+                collection=dataset_collection,
+                name=name,
+            )
+            if implicit_collection_info:
+                for input_name, input_collection in implicit_collection_info[ "implicit_inputs" ]:
+                    dataset_collection_instance.add_implicit_input_collection( input_name, input_collection )
+                for output_dataset in implicit_collection_info.get( "outputs" ):
+                    if output_dataset not in trans.sa_session:
+                        output_dataset = trans.sa_session.query( type( output_dataset ) ).get( output_dataset.id )
+                    if isinstance( output_dataset, model.HistoryDatasetAssociation ):
+                        output_dataset.hidden_beneath_collection_instance = dataset_collection_instance
+                    elif isinstance( output_dataset, model.HistoryDatasetCollectionAssociation ):
+                        dataset_collection_instance.add_implicit_input_collection( input_name, input_collection )
+                    else:
+                        # dataset collection, don't need to do anything...
+                        pass
+                    trans.sa_session.add( output_dataset )
+
+                dataset_collection_instance.implicit_output_name = implicit_collection_info[ "implicit_output_name" ]
+
+            log.debug("Created collection with %d elements" % ( len( dataset_collection_instance.collection.elements ) ) )
+            # Handle setting hid
+            parent.add_dataset_collection( dataset_collection_instance )
+
+        elif isinstance( parent, model.LibraryFolder ):
+            dataset_collection_instance = self.model.LibraryDatasetCollectionAssociation(
+                collection=dataset_collection,
+                folder=parent,
+                name=name,
+            )
+
+        else:
+            message = "Internal logic error - create called with unknown parent type %s" % type( parent )
+            log.exception( message )
+            raise MessageException( message )
+
+        return self.__persist( dataset_collection_instance )
+
+    def create_dataset_collection(
+        self,
+        trans,
+        collection_type,
+        element_identifiers=None,
+        elements=None,
+    ):
+        if element_identifiers is None and elements is None:
+            raise RequestParameterInvalidException( ERROR_INVALID_ELEMENTS_SPECIFICATION )
+        if not collection_type:
+            raise RequestParameterInvalidException( ERROR_NO_COLLECTION_TYPE )
+        collection_type_description = self.collection_type_descriptions.for_collection_type( collection_type )
+        # If we have elements, this is an internal request, don't need to load
+        # objects from identifiers.
+        if elements is None:
+            if collection_type_description.has_subcollections( ):
+                # Nested collection - recursively create collections and update identifiers.
+                self.__recursively_create_collections( trans, element_identifiers )
+            elements = self.__load_elements( trans, element_identifiers )
+        # else if elements is set, it better be an ordered dict!
+
+        if elements is not self.ELEMENTS_UNINITIALIZED:
+            type_plugin = collection_type_description.rank_type_plugin()
+            dataset_collection = builder.build_collection( type_plugin, elements )
+        else:
+            dataset_collection = model.DatasetCollection( populated=False )
+        dataset_collection.collection_type = collection_type
+        return dataset_collection
+
+    def set_collection_elements( self, dataset_collection, dataset_instances ):
+        if dataset_collection.populated:
+            raise Exception("Cannot reset elements of an already populated dataset collection.")
+
+        collection_type = dataset_collection.collection_type
+        collection_type_description = self.collection_type_descriptions.for_collection_type( collection_type )
+        type_plugin = collection_type_description.rank_type_plugin()
+        builder.set_collection_elements( dataset_collection, type_plugin, dataset_instances )
+        dataset_collection.mark_as_populated()
+
+        return dataset_collection
+
+    def collection_builder_for( self, dataset_collection ):
+        collection_type = dataset_collection.collection_type
+        collection_type_description = self.collection_type_descriptions.for_collection_type( collection_type )
+        return builder.BoundCollectionBuilder( dataset_collection, collection_type_description )
+
+    def delete( self, trans, instance_type, id ):
+        dataset_collection_instance = self.get_dataset_collection_instance( trans, instance_type, id, check_ownership=True )
+        dataset_collection_instance.deleted = True
+        trans.sa_session.add( dataset_collection_instance )
+        trans.sa_session.flush( )
+
+    def update( self, trans, instance_type, id, payload ):
+        dataset_collection_instance = self.get_dataset_collection_instance( trans, instance_type, id, check_ownership=True )
+        if trans.user is None:
+            anon_allowed_payload = {}
+            if 'deleted' in payload:
+                anon_allowed_payload[ 'deleted' ] = payload[ 'deleted' ]
+            if 'visible' in payload:
+                anon_allowed_payload[ 'visible' ] = payload[ 'visible' ]
+            payload = self._validate_and_parse_update_payload( anon_allowed_payload )
+        else:
+            payload = self._validate_and_parse_update_payload( payload )
+        changed = self._set_from_dict( trans, dataset_collection_instance, payload )
+        return changed
+
+    def copy(
+        self,
+        trans,
+        parent,
+        # PRECONDITION: security checks on ability to add to parent
+        # occurred during load.
+        source,
+        encoded_source_id,
+    ):
+        assert source == "hdca"  # for now
+        source_hdca = self.__get_history_collection_instance( trans, encoded_source_id )
+        new_hdca = source_hdca.copy()
+        parent.add_dataset_collection( new_hdca )
+        trans.sa_session.add( new_hdca )
+        trans.sa_session.flush()
+        return new_hdca
+
+    def _set_from_dict( self, trans, dataset_collection_instance, new_data ):
+        # send what we can down into the model
+        changed = dataset_collection_instance.set_from_dict( new_data )
+        # the rest (often involving the trans) - do here
+        if 'annotation' in new_data.keys() and trans.get_user():
+            dataset_collection_instance.add_item_annotation( trans.sa_session, trans.get_user(), dataset_collection_instance, new_data[ 'annotation' ] )
+            changed[ 'annotation' ] = new_data[ 'annotation' ]
+        if 'tags' in new_data.keys() and trans.get_user():
+            self.tag_manager.set_tags_from_list( trans.get_user(), dataset_collection_instance, new_data[ 'tags' ] )
+
+        if changed.keys():
+            trans.sa_session.flush()
+
+        return changed
+
+    def _validate_and_parse_update_payload( self, payload ):
+        validated_payload = {}
+        for key, val in payload.items():
+            if val is None:
+                continue
+            if key in ( 'name' ):
+                val = validation.validate_and_sanitize_basestring( key, val )
+                validated_payload[ key ] = val
+            if key in ( 'deleted', 'visible' ):
+                validated_payload[ key ] = validation.validate_boolean( key, val )
+            elif key == 'tags':
+                validated_payload[ key ] = validation.validate_and_sanitize_basestring_list( key, val )
+        return validated_payload
+
+    def history_dataset_collections(self, history, query):
+        collections = history.active_dataset_collections
+        collections = filter( query.direct_match, collections )
+        return collections
+
+    def __persist( self, dataset_collection_instance ):
+        context = self.model.context
+        context.add( dataset_collection_instance )
+        context.flush()
+        return dataset_collection_instance
+
+    def __recursively_create_collections( self, trans, element_identifiers ):
+        for index, element_identifier in enumerate( element_identifiers ):
+            try:
+                if not element_identifier[ "src" ] == "new_collection":
+                    # not a new collection, keep moving...
+                    continue
+            except KeyError:
+                # Not a dictionary, just an id of an HDA - move along.
+                continue
+
+            # element identifier is a dict with src new_collection...
+            collection_type = element_identifier.get( "collection_type", None )
+            collection = self.create_dataset_collection(
+                trans=trans,
+                collection_type=collection_type,
+                element_identifiers=element_identifier[ "element_identifiers" ],
+            )
+            element_identifier[ "__object__" ] = collection
+
+        return element_identifiers
+
+    def __load_elements( self, trans, element_identifiers ):
+        elements = odict.odict()
+        for element_identifier in element_identifiers:
+            elements[ element_identifier[ "name" ] ] = self.__load_element( trans, element_identifier )
+        return elements
+
+    def __load_element( self, trans, element_identifier ):
+        # if not isinstance( element_identifier, dict ):
+        #    # Is allowing this to just be the id of an hda too clever? Somewhat
+        #    # consistent with other API methods though.
+        #    element_identifier = dict( src='hda', id=str( element_identifier ) )
+
+        # Previously created collection already found in request, just pass
+        # through as is.
+        if "__object__" in element_identifier:
+            the_object = element_identifier[ "__object__" ]
+            if the_object is not None and the_object.id:
+                context = self.model.context
+                if the_object not in context:
+                    the_object = context.query( type(the_object) ).get(the_object.id)
+            return the_object
+
+        # dateset_identifier is dict {src=hda|ldda|hdca|new_collection, id=<encoded_id>}
+        try:
+            src_type = element_identifier.get( 'src', 'hda' )
+        except AttributeError:
+            raise MessageException( "Dataset collection element definition (%s) not dictionary-like." % element_identifier )
+        encoded_id = element_identifier.get( 'id', None )
+        if not src_type or not encoded_id:
+            message_template = "Problem decoding element identifier %s - must contain a 'src' and a 'id'."
+            message = message_template % element_identifier
+            raise RequestParameterInvalidException( message )
+
+        if src_type == 'hda':
+            decoded_id = int( trans.app.security.decode_id( encoded_id ) )
+            element = self.hda_manager.get_accessible( decoded_id, trans.user )
+        elif src_type == 'ldda':
+            element = self.ldda_manager.get( trans, encoded_id )
+        elif src_type == 'hdca':
+            # TODO: Option to copy? Force copy? Copy or allow if not owned?
+            element = self.__get_history_collection_instance( trans, encoded_id ).collection
+        # TODO: ldca.
+        else:
+            raise RequestParameterInvalidException( "Unknown src_type parameter supplied '%s'." % src_type )
+        return element
+
+    def match_collections( self, collections_to_match ):
+        """
+        May seem odd to place it here, but planning to grow sophistication and
+        get plugin types involved so it will likely make sense in the future.
+        """
+        return MatchingCollections.for_collections( collections_to_match, self.collection_type_descriptions )
+
+    def get_dataset_collection_instance( self, trans, instance_type, id, **kwds ):
+        """
+        """
+        if instance_type == "history":
+            return self.__get_history_collection_instance( trans, id, **kwds )
+        elif instance_type == "library":
+            return self.__get_library_collection_instance( trans, id, **kwds )
+
+    def get_dataset_collection( self, trans, encoded_id ):
+        collection_id = int( trans.app.security.decode_id( encoded_id ) )
+        collection = trans.sa_session.query( trans.app.model.DatasetCollection ).get( collection_id )
+        return collection
+
+    def __get_history_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ):
+        instance_id = int( trans.app.security.decode_id( id ) )
+        collection_instance = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( instance_id )
+        if check_ownership:
+            self.history_manager.error_unless_owner( collection_instance.history, trans.user, current_history=trans.history )
+        if check_accessible:
+            self.history_manager.error_unless_accessible( collection_instance.history, trans.user, current_history=trans.history )
+        return collection_instance
+
+    def __get_library_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ):
+        if check_ownership:
+            raise NotImplemented( "Functionality (getting library dataset collection with ownership check) unimplemented." )
+        instance_id = int( trans.security.decode_id( id ) )
+        collection_instance = trans.sa_session.query( trans.app.model.LibraryDatasetCollectionAssociation ).get( instance_id )
+        if check_accessible:
+            if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), collection_instance, trans.user ):
+                raise ItemAccessibilityException( "LibraryDatasetCollectionAssociation is not accessible to the current user", type='error' )
+        return collection_instance
diff --git a/lib/galaxy/managers/collections_util.py b/lib/galaxy/managers/collections_util.py
new file mode 100644
index 0000000..16bfaee
--- /dev/null
+++ b/lib/galaxy/managers/collections_util.py
@@ -0,0 +1,98 @@
+import logging
+
+from galaxy import exceptions, model, web
+
+log = logging.getLogger( __name__ )
+
+ERROR_MESSAGE_UNKNOWN_SRC = "Unknown dataset source (src) %s."
+ERROR_MESSAGE_NO_NESTED_IDENTIFIERS = "Dataset source new_collection requires nested element_identifiers for new collection."
+ERROR_MESSAGE_NO_NAME = "Cannot load invalid dataset identifier - missing name - %s"
+ERROR_MESSAGE_NO_COLLECTION_TYPE = "No collection_type define for nested collection %s."
+ERROR_MESSAGE_INVALID_PARAMETER_FOUND = "Found invalid parameter %s in element identifier description %s."
+ERROR_MESSAGE_DUPLICATED_IDENTIFIER_FOUND = "Found duplicated element identifier name %s."
+
+
+def api_payload_to_create_params( payload ):
+    """
+    Cleanup API payload to pass into dataset_collections.
+    """
+    required_parameters = [ "collection_type", "element_identifiers" ]
+    missing_parameters = [ p for p in required_parameters if p not in payload ]
+    if missing_parameters:
+        message = "Missing required parameters %s" % missing_parameters
+        raise exceptions.ObjectAttributeMissingException( message )
+
+    params = dict(
+        collection_type=payload.get( "collection_type" ),
+        element_identifiers=payload.get( "element_identifiers" ),
+        name=payload.get( "name", None ),
+    )
+    return params
+
+
+def validate_input_element_identifiers( element_identifiers ):
+    """ Scan through the list of element identifiers supplied by the API consumer
+    and verify the structure is valid.
+    """
+    log.debug( "Validating %d element identifiers for collection creation." % len( element_identifiers ) )
+    identifier_names = set()
+    for element_identifier in element_identifiers:
+        if "__object__" in element_identifier:
+            message = ERROR_MESSAGE_INVALID_PARAMETER_FOUND % ( "__object__", element_identifier )
+            raise exceptions.RequestParameterInvalidException( message )
+        if "name" not in element_identifier:
+            message = ERROR_MESSAGE_NO_NAME % element_identifier
+            raise exceptions.RequestParameterInvalidException( message )
+        name = element_identifier[ "name" ]
+        if name in identifier_names:
+            message = ERROR_MESSAGE_DUPLICATED_IDENTIFIER_FOUND % name
+            raise exceptions.RequestParameterInvalidException( message )
+        else:
+            identifier_names.add( name )
+        src = element_identifier.get( "src", "hda" )
+        if src not in [ "hda", "hdca", "ldda", "new_collection" ]:
+            message = ERROR_MESSAGE_UNKNOWN_SRC % src
+            raise exceptions.RequestParameterInvalidException( message )
+        if src == "new_collection":
+            if "element_identifiers" not in element_identifier:
+                message = ERROR_MESSAGE_NO_NESTED_IDENTIFIERS
+                raise exceptions.RequestParameterInvalidException( ERROR_MESSAGE_NO_NESTED_IDENTIFIERS )
+            if "collection_type" not in element_identifier:
+                message = ERROR_MESSAGE_NO_COLLECTION_TYPE % element_identifier
+                raise exceptions.RequestParameterInvalidException( message )
+            validate_input_element_identifiers( element_identifier[ "element_identifiers" ] )
+
+
+def dictify_dataset_collection_instance( dataset_collection_instance, parent, security, view="element" ):
+    dict_value = dataset_collection_instance.to_dict( view=view )
+    encoded_id = security.encode_id( dataset_collection_instance.id )
+    if isinstance( parent, model.History ):
+        encoded_history_id = security.encode_id( parent.id )
+        dict_value[ 'url' ] = web.url_for( 'history_content_typed', history_id=encoded_history_id, id=encoded_id, type="dataset_collection" )
+    elif isinstance( parent, model.LibraryFolder ):
+        encoded_library_id = security.encode_id( parent.library.id )
+        encoded_folder_id = security.encode_id( parent.id )
+        # TODO: Work in progress - this end-point is not right yet...
+        dict_value[ 'url' ] = web.url_for( 'library_content', library_id=encoded_library_id, id=encoded_id, folder_id=encoded_folder_id )
+    if view == "element":
+        collection = dataset_collection_instance.collection
+        dict_value[ 'elements' ] = [ dictify_element(_) for _ in collection.elements ]
+        dict_value[ 'populated' ] = collection.populated
+    security.encode_all_ids( dict_value, recursive=True )  # TODO: Use Kyle's recursive formulation of this.
+    return dict_value
+
+
+def dictify_element( element ):
+    dictified = element.to_dict( view="element" )
+    object_detials = element.element_object.to_dict()
+    if element.child_collection:
+        # Recursively yield elements for each nested collection...
+        child_collection = element.child_collection
+        object_detials[ "elements" ] = [ dictify_element(_) for _ in child_collection.elements ]
+        object_detials[ "populated" ] = child_collection.populated
+
+    dictified[ "object" ] = object_detials
+    return dictified
+
+
+__all__ = ( 'api_payload_to_create_params', 'dictify_dataset_collection_instance' )
diff --git a/lib/galaxy/managers/configuration.py b/lib/galaxy/managers/configuration.py
new file mode 100644
index 0000000..b5bacc8
--- /dev/null
+++ b/lib/galaxy/managers/configuration.py
@@ -0,0 +1,100 @@
+"""
+Serializers for Galaxy config file data: ConfigSerializer for all users
+and a more expanded set of data for admin in AdminConfigSerializer.
+
+Used by both the API and bootstrapped data.
+"""
+# TODO: this is a bit of an odd duck. It uses the serializer structure from managers
+#   but doesn't have a model like them. It might be better in config.py or a
+#   totally new area, but I'm leaving it in managers for now for class consistency.
+
+from galaxy.web.framework.base import server_starttime
+from galaxy.managers import base
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ConfigSerializer( base.ModelSerializer ):
+    """Configuration (galaxy.ini) settings viewable by all users"""
+
+    def __init__( self, app ):
+        super( ConfigSerializer, self ).__init__( app )
+
+        self.default_view = 'all'
+        self.add_view( 'all', self.serializers.keys() )
+
+    def default_serializer( self, config, key ):
+        return getattr( config, key, None )
+
+    def add_serializers( self ):
+        def _defaults_to( default ):
+            return lambda i, k, **c: getattr( i, k, default )
+
+        self.serializers = {
+            # TODO: this is available from user data, remove
+            'is_admin_user'             : lambda *a, **c: False,
+
+            'brand'                     : _defaults_to( '' ),
+            # TODO: this doesn't seem right
+            'logo_url'                  : lambda i, k, **c: self.url_for( i.get( k, '/' ) ),
+            'logo_src'                  : lambda i, k, **c: self.url_for( '/static/images/galaxyIcon_noText.png' ),
+            'terms_url'                 : _defaults_to( '' ),
+
+            # TODO: don't hardcode here - hardcode defaults once in config.py
+            'wiki_url'                  : _defaults_to( "http://galaxyproject.org/" ),
+            'search_url'                : _defaults_to( "http://galaxyproject.org/search/usegalaxy/" ),
+            'mailing_lists'             : _defaults_to( "https://wiki.galaxyproject.org/MailingLists" ),
+            'screencasts_url'           : _defaults_to( "https://vimeo.com/galaxyproject" ),
+            'citation_url'              : _defaults_to( "https://wiki.galaxyproject.org/CitingGalaxy" ),
+            'support_url'               : _defaults_to( "https://wiki.galaxyproject.org/Support" ),
+            'lims_doc_url'              : _defaults_to( "https://usegalaxy.org/u/rkchak/p/sts" ),
+            'biostar_url'               : _defaults_to( '' ),
+            'biostar_url_redirect'      : lambda *a, **c: self.url_for( controller='biostar', action='biostar_redirect',
+                                                                        qualified=True ),
+
+            'communication_server_host' : _defaults_to( None ),
+            'communication_server_port' : _defaults_to( None ),
+            'persistent_communication_rooms' : _defaults_to( None ),
+            'allow_user_creation'       : _defaults_to( False ),
+            'use_remote_user'           : _defaults_to( None ),
+            'remote_user_logout_href'   : _defaults_to( '' ),
+            'datatypes_disable_auto'    : _defaults_to( False ),
+            'allow_user_dataset_purge'  : _defaults_to( False ),
+            'ga_code'                   : _defaults_to( None ),
+            'enable_unique_workflow_defaults' : _defaults_to( False ),
+
+            # TODO: is there no 'correct' way to get an api url? controller='api', action='tools' is a hack
+            # at any rate: the following works with path_prefix but is still brittle
+            # TODO: change this to (more generic) upload_path and incorporate config.nginx_upload_path into building it
+            'nginx_upload_path'         : lambda i, k, **c: getattr( i, k, False ) or self.url_for( '/api/tools' ),
+            'ftp_upload_dir'            : _defaults_to( None ),
+            'ftp_upload_site'           : _defaults_to( None ),
+            'version_major'             : _defaults_to( None ),
+            'require_login'             : _defaults_to( None ),
+            'inactivity_box_content'    : _defaults_to( None ),
+            'message_box_content'       : _defaults_to( None ),
+            'message_box_visible'       : _defaults_to( False ),
+            'message_box_class'         : _defaults_to( 'info' ),
+            'server_startttime'         : lambda i, k, **c: server_starttime,
+        }
+
+
+class AdminConfigSerializer( ConfigSerializer ):
+    """Configuration attributes viewable only by admin users"""
+
+    def add_serializers( self ):
+        super( AdminConfigSerializer, self ).add_serializers()
+
+        def _defaults_to( default ):
+            return lambda i, k, **c: getattr( i, k, default )
+
+        self.serializers.update({
+            # TODO: this is available from user serialization: remove
+            'is_admin_user'             : lambda *a: True,
+
+            'library_import_dir'        : _defaults_to( None ),
+            'user_library_import_dir'   : _defaults_to( None ),
+            'allow_library_path_paste'  : _defaults_to( False ),
+            'allow_user_deletion'       : _defaults_to( False ),
+        })
diff --git a/lib/galaxy/managers/containers.py b/lib/galaxy/managers/containers.py
new file mode 100644
index 0000000..cac4964
--- /dev/null
+++ b/lib/galaxy/managers/containers.py
@@ -0,0 +1,131 @@
+"""
+Manager mixins to unify the interface into things that can contain: Datasets
+and other (nested) containers.
+
+(e.g. DatasetCollections, Histories, LibraryFolders)
+"""
+# Histories should be DatasetCollections.
+# Libraries should be DatasetCollections.
+
+import operator
+
+from galaxy import model
+import galaxy.exceptions
+import galaxy.util
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# ====
+class ContainerManagerMixin( object ):
+    """
+    A class that tracks/contains two types of items:
+        1) some non-container object (such as datasets)
+        2) other sub-containers nested within this one
+
+    Levels of nesting are not considered here; In other words,
+    each of the methods below only work on the first level of
+    nesting.
+    """
+    # TODO: terminology is getting a bit convoluted and silly at this point: rename three public below?
+    # TODO: this should be an open mapping (not just 2)
+    #: the classes that can be contained
+    contained_class = None
+    subcontainer_class = None
+    #: how any contents lists produced are ordered - (string) attribute name to sort on or tuple of attribute names
+    default_order_by = None
+
+    # ---- interface
+    def contents( self, container ):
+        """
+        Returns both types of contents: filtered and in some order.
+        """
+        iters = []
+        iters.append( self.contained( container ) )
+        iters.append( self.subcontainers( container ) )
+        return galaxy.util.merge_sorted_iterables( self.order_contents_on, *iters )
+
+    def contained( self, container, **kwargs ):
+        """
+        Returns non-container objects.
+        """
+        return self._filter_contents( container, self.contained_class, **kwargs )
+
+    def subcontainers( self, container, **kwargs ):
+        """
+        Returns only the containers within this one.
+        """
+        return self._filter_contents( container, self.subcontainer_class, **kwargs )
+
+    # ---- private
+    def _filter_contents( self, container, content_class, **kwargs ):
+        # TODO: use list (or by_history etc.)
+        container_filter = self._filter_to_contained( container, content_class )
+        query = self.session().query( content_class ).filter( container_filter )
+        return query
+
+    def _get_filter_for_contained( self, container, content_class ):
+        raise galaxy.exceptions.NotImplemented( 'Abstract class' )
+
+    def _content_manager( self, content ):
+        raise galaxy.exceptions.NotImplemented( 'Abstract class' )
+
+
+class LibraryFolderAsContainerManagerMixin( ContainerManagerMixin ):
+    # can contain two types of subcontainer: LibraryFolder, LibraryDatasetCollectionAssociation
+    # has as the top level container: Library
+
+    contained_class = model.LibraryDataset
+    subcontainer_class = model.LibraryFolder
+    # subcontainer_class = model.LibraryDatasetCollectionAssociation
+    order_contents_on = operator.attrgetter( 'create_time' )
+
+    def _get_filter_for_contained( self, container, content_class ):
+        if content_class == self.subcontainer_class:
+            return self.subcontainer_class.parent == container
+        return self.contained_class.folder == container
+
+    def _content_manager( self, content ):
+        # type snifffing is inevitable
+        if isinstance( content, model.LibraryDataset ):
+            return self.lda_manager
+        elif isinstance( content, model.LibraryFolder ):
+            return self.folder_manager
+        raise TypeError( 'Unknown contents class: ' + str( content ) )
+
+
+class DatasetCollectionAsContainerManagerMixin( ContainerManagerMixin ):
+
+    # (note: unlike the other collections, dc's wrap both contained and subcontainers in this class)
+    contained_class = model.DatasetCollectionElement
+    subcontainer_class = model.DatasetCollection
+    order_contents_on = operator.attrgetter( 'element_index' )
+
+    def _get_filter_for_contained( self, container, content_class ):
+        return content_class.collection == container
+
+    def _content_manager( self, content ):
+        # type snifffing is inevitable
+        if isinstance( content, model.DatasetCollectionElement ):
+            return self.collection_manager
+        elif isinstance( content, model.DatasetCollection ):
+            return self.collection_manager
+        raise TypeError( 'Unknown contents class: ' + str( content ) )
+
+
+# ====
+class ContainableModelMixin:
+    """
+    Mixin for that which goes in a container.
+    """
+
+    # ---- interface
+    def parent_container( self, containable ):
+        """
+        Return this item's parent container or None if unrecorded.
+        """
+        raise galaxy.exceptions.NotImplemented( 'Abstract class' )
+
+    def set_parent_container( self, containable, new_parent_container ):
+        raise galaxy.exceptions.NotImplemented( 'Abstract class' )
diff --git a/lib/galaxy/managers/context.py b/lib/galaxy/managers/context.py
new file mode 100644
index 0000000..aaa192c
--- /dev/null
+++ b/lib/galaxy/managers/context.py
@@ -0,0 +1,192 @@
+"""
+Mixins for transaction-like objects.
+"""
+import string
+from json import dumps
+
+from six import text_type
+
+from galaxy.util import bunch
+
+
+class ProvidesAppContext( object ):
+    """ For transaction-like objects to provide Galaxy convience layer for
+    database and event handling.
+
+    Mixed in class must provide `app` property.
+    """
+
+    def log_action( self, user=None, action=None, context=None, params=None):
+        """
+        Application-level logging of user actions.
+        """
+        if self.app.config.log_actions:
+            action = self.app.model.UserAction(action=action, context=context, params=text_type( dumps( params ) ) )
+            try:
+                if user:
+                    action.user = user
+                else:
+                    action.user = self.user
+            except:
+                action.user = None
+            try:
+                action.session_id = self.galaxy_session.id
+            except:
+                action.session_id = None
+            self.sa_session.add( action )
+            self.sa_session.flush()
+
+    def log_event( self, message, tool_id=None, **kwargs ):
+        """
+        Application level logging. Still needs fleshing out (log levels and such)
+        Logging events is a config setting - if False, do not log.
+        """
+        if self.app.config.log_events:
+            event = self.app.model.Event()
+            event.tool_id = tool_id
+            try:
+                event.message = message % kwargs
+            except:
+                event.message = message
+            try:
+                event.history = self.get_history()
+            except:
+                event.history = None
+            try:
+                event.history_id = self.history.id
+            except:
+                event.history_id = None
+            try:
+                event.user = self.user
+            except:
+                event.user = None
+            try:
+                event.session_id = self.galaxy_session.id
+            except:
+                event.session_id = None
+            self.sa_session.add( event )
+            self.sa_session.flush()
+
+    @property
+    def sa_session( self ):
+        """
+        Returns a SQLAlchemy session -- currently just gets the current
+        session from the threadlocal session context, but this is provided
+        to allow migration toward a more SQLAlchemy 0.4 style of use.
+        """
+        return self.app.model.context.current
+
+    def expunge_all( self ):
+        app = self.app
+        context = app.model.context
+        context.expunge_all()
+        # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
+        if hasattr(app, 'install_model'):
+            install_model = app.install_model
+            if install_model != app.model:
+                install_model.context.expunge_all()
+
+    def get_toolbox(self):
+        """Returns the application toolbox"""
+        return self.app.toolbox
+
+    @property
+    def model( self ):
+        return self.app.model
+
+    @property
+    def install_model( self ):
+        return self.app.install_model
+
+    def request_types(self):
+        if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
+            return True
+        return False
+
+
+class ProvidesUserContext( object ):
+    """ For transaction-like objects to provide Galaxy convience layer for
+    reasoning about users.
+
+    Mixed in class must provide `user`, `api_inherit_admin`, and `app`
+    properties.
+    """
+
+    @property
+    def anonymous( self ):
+        return self.user is None and not self.api_inherit_admin
+
+    def get_current_user_roles( self ):
+        user = self.user
+        if user:
+            roles = user.all_roles()
+        else:
+            roles = []
+        return roles
+
+    def user_is_admin( self ):
+        if self.api_inherit_admin:
+            return True
+        return self.user and self.user.email in self.app.config.admin_users_list
+
+    def user_can_do_run_as( self ):
+        run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
+        if not run_as_users:
+            return False
+        user_in_run_as_users = self.user and self.user.email in run_as_users
+        # Can do if explicitly in list or master_api_key supplied.
+        can_do_run_as = user_in_run_as_users or self.api_inherit_admin
+        return can_do_run_as
+
+    @property
+    def user_ftp_dir( self ):
+        base_dir = self.app.config.ftp_upload_dir
+        if base_dir is None:
+            return None
+        else:
+            # e.g. 'email' or 'username'
+            identifier_attr = self.app.config.ftp_upload_dir_identifier
+            identifier_value = getattr(self.user, identifier_attr)
+            template = self.app.config.ftp_upload_dir_template
+            path = string.Template(template).safe_substitute(dict(
+                ftp_upload_dir=base_dir,
+                ftp_upload_dir_identifier=identifier_value,
+            ))
+            return path
+
+
+class ProvidesHistoryContext( object ):
+    """ For transaction-like objects to provide Galaxy convience layer for
+    reasoning about histories.
+
+    Mixed in class must provide `user`, `history`, and `app`
+    properties.
+    """
+
+    def db_dataset_for( self, dbkey ):
+        """
+        Returns the db_file dataset associated/needed by `dataset`, or `None`.
+        """
+        # If no history, return None.
+        if self.history is None:
+            return None
+        # TODO: when does this happen? is it Bunch or util.bunch.Bunch?
+        if isinstance( self.history, bunch.Bunch ):
+            # The API presents a Bunch for a history.  Until the API is
+            # more fully featured for handling this, also return None.
+            return None
+        datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+                                  .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+        for ds in datasets:
+            if dbkey == ds.dbkey:
+                return ds
+        return None
+
+    @property
+    def db_builds( self ):
+        """
+        Returns the builds defined by galaxy and the builds defined by
+        the user (chromInfo in history).
+        """
+        # FIXME: This method should be removed
+        return self.app.genome_builds.get_genome_build_names( trans=self )
diff --git a/lib/galaxy/managers/datasets.py b/lib/galaxy/managers/datasets.py
new file mode 100644
index 0000000..6dd1d40
--- /dev/null
+++ b/lib/galaxy/managers/datasets.py
@@ -0,0 +1,649 @@
+"""
+Manager and Serializer for Datasets.
+"""
+import glob
+import os
+from six import string_types
+
+from galaxy import model
+from galaxy import exceptions
+import galaxy.datatypes.metadata
+
+from galaxy.managers import base
+from galaxy.managers import secured
+from galaxy.managers import deletable
+from galaxy.managers import roles
+from galaxy.managers import rbac_secured
+from galaxy.managers import users
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class DatasetManager( base.ModelManager, secured.AccessibleManagerMixin, deletable.PurgableManagerMixin ):
+    """
+    Manipulate datasets: the components contained in DatasetAssociations/DatasetInstances/HDAs/LDDAs
+    """
+    model_class = model.Dataset
+    foreign_key_name = 'dataset'
+
+    # TODO:?? get + error_if_uploading is common pattern, should upload check be worked into access/owed?
+
+    def __init__( self, app ):
+        super( DatasetManager, self ).__init__( app )
+        self.permissions = DatasetRBACPermissions( app )
+        # needed for admin test
+        self.user_manager = users.UserManager( app )
+
+    def create( self, manage_roles=None, access_roles=None, flush=True, **kwargs ):
+        """
+        Create and return a new Dataset object.
+        """
+        # default to NEW state on new datasets
+        kwargs.update( dict( state=( kwargs.get( 'state', model.Dataset.states.NEW ) ) ) )
+        dataset = model.Dataset( **kwargs )
+        self.session().add( dataset )
+
+        self.permissions.set( dataset, manage_roles, access_roles, flush=False )
+
+        if flush:
+            self.session().flush()
+        return dataset
+
+    def copy( self, dataset, **kwargs ):
+        raise galaxy.exceptions.NotImplemented( 'Datasets cannot be copied' )
+
+    def purge( self, dataset, flush=True ):
+        """
+        Remove the object_store/file for this dataset from storage and mark
+        as purged.
+
+        :raises exceptions.ConfigDoesNotAllowException: if the instance doesn't allow
+        """
+        self.error_unless_dataset_purge_allowed( dataset )
+
+        # the following also marks dataset as purged and deleted
+        dataset.full_delete()
+        self.session().add( dataset )
+        if flush:
+            self.session().flush()
+        return dataset
+
+    # TODO: this may be more conv. somewhere else
+    # TODO: how to allow admin bypass?
+    def error_unless_dataset_purge_allowed( self, msg=None ):
+        if not self.app.config.allow_user_dataset_purge:
+            msg = msg or 'This instance does not allow user dataset purging'
+            raise exceptions.ConfigDoesNotAllowException( msg )
+
+    # .... accessibility
+    # datasets can implement the accessible interface, but accessibility is checked in an entirely different way
+    #   than those resources that have a user attribute (histories, pages, etc.)
+    def is_accessible( self, dataset, user, **kwargs ):
+        """
+        Is this dataset readable/viewable to user?
+        """
+        if self.user_manager.is_admin( user ):
+            return True
+        if self.has_access_permission( dataset, user ):
+            return True
+        return False
+
+    def has_access_permission( self, dataset, user ):
+        """
+        Return T/F if the user has role-based access to the dataset.
+        """
+        roles = user.all_roles_exploiting_cache() if user else []
+        return self.app.security_agent.can_access_dataset( roles, dataset )
+
+    # TODO: implement above for groups
+    # TODO: datatypes?
+    # .... data, object_store
+
+
+# TODO: SecurityAgentDatasetRBACPermissions( object ):
+
+class DatasetRBACPermissions( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.access = rbac_secured.AccessDatasetRBACPermission( app )
+        self.manage = rbac_secured.ManageDatasetRBACPermission( app )
+
+    # TODO: temporary facade over security_agent
+    def available_roles( self, trans, dataset, controller='root' ):
+        return self.app.security_agent.get_legitimate_roles( trans, dataset, controller )
+
+    def get( self, dataset, flush=True ):
+        manage = self.manage.by_dataset( dataset )
+        access = self.access.by_dataset( dataset )
+        return ( manage, access )
+
+    def set( self, dataset, manage_roles, access_roles, flush=True ):
+        manage = self.manage.set( dataset, manage_roles or [], flush=False )
+        access = self.access.set( dataset, access_roles or [], flush=flush )
+        return ( manage, access )
+
+    # ---- conv. settings
+    def set_public_with_single_manager( self, dataset, user, flush=True ):
+        manage = self.manage.grant( dataset, user, flush=flush )
+        self.access.clear( dataset, flush=False )
+        return ( [ manage ], [] )
+
+    def set_private_to_one_user( self, dataset, user, flush=True ):
+        manage = self.manage.grant( dataset, user, flush=False )
+        access = self.access.set_private( dataset, user, flush=flush )
+        return ( [ manage ], access )
+
+
+class DatasetSerializer( base.ModelSerializer, deletable.PurgableSerializerMixin ):
+    model_manager_class = DatasetManager
+
+    def __init__( self, app ):
+        super( DatasetSerializer, self ).__init__( app )
+        self.dataset_manager = self.manager
+        # needed for admin test
+        self.user_manager = users.UserManager( app )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id',
+            'create_time',
+            'update_time',
+            'state',
+            'deleted',
+            'purged',
+            'purgable',
+            # 'object_store_id',
+            # 'external_filename',
+            # 'extra_files_path',
+            'file_size',
+            'total_size',
+            'uuid',
+        ])
+        # could do visualizations and/or display_apps
+
+    def add_serializers( self ):
+        super( DatasetSerializer, self ).add_serializers()
+        deletable.PurgableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'create_time'   : self.serialize_date,
+            'update_time'   : self.serialize_date,
+
+            'uuid'          : lambda i, k, **c: str( i.uuid ) if i.uuid else None,
+            'file_name'     : self.serialize_file_name,
+            'extra_files_path' : self.serialize_extra_files_path,
+            'permissions'   : self.serialize_permissions,
+
+            'total_size'    : lambda i, k, **c: int( i.get_total_size() ),
+            'file_size'     : lambda i, k, **c: int( i.get_size() )
+        })
+
+    def serialize_file_name( self, dataset, key, user=None, **context ):
+        """
+        If the config allows or the user is admin, return the file name
+        of the file that contains this dataset's data.
+        """
+        is_admin = self.user_manager.is_admin( user )
+        # expensive: allow conifg option due to cost of operation
+        if is_admin or self.app.config.expose_dataset_path:
+            return dataset.file_name
+        self.skip()
+
+    def serialize_extra_files_path( self, dataset, key, user=None, **context ):
+        """
+        If the config allows or the user is admin, return the file path.
+        """
+        is_admin = self.user_manager.is_admin( user )
+        # expensive: allow conifg option due to cost of operation
+        if is_admin or self.app.config.expose_dataset_path:
+            return dataset.extra_files_path
+        self.skip()
+
+    def serialize_permissions( self, dataset, key, user=None, **context ):
+        """
+        """
+        if not self.dataset_manager.permissions.manage.is_permitted( dataset, user ):
+            self.skip()
+
+        management_permissions = self.dataset_manager.permissions.manage.by_dataset( dataset )
+        access_permissions = self.dataset_manager.permissions.access.by_dataset( dataset )
+        permissions = {
+            'manage' : [ self.app.security.encode_id( perm.role.id ) for perm in management_permissions ],
+            'access' : [ self.app.security.encode_id( perm.role.id ) for perm in access_permissions ],
+        }
+        return permissions
+
+
+class DatasetDeserializer( base.ModelDeserializer, deletable.PurgableDeserializerMixin ):
+    model_manager_class = DatasetManager
+
+    def __init__( self, app ):
+        super( DatasetDeserializer, self ).__init__( app )
+        # TODO: this manager may make more sense inside rbac_secured
+        self.role_manager = roles.RoleManager( app )
+
+    def add_deserializers( self ):
+        super( DatasetDeserializer, self ).add_deserializers()
+        # not much to set here besides permissions and purged/deleted
+        deletable.PurgableDeserializerMixin.add_deserializers( self )
+
+        self.deserializers.update({
+            'permissions' : self.deserialize_permissions,
+        })
+
+    def deserialize_permissions( self, dataset, key, permissions, user=None, **context ):
+        """
+        Create permissions for each list of encoded role ids in the (validated)
+        `permissions` dictionary, where `permissions` is in the form::
+
+            { 'manage': [ <role id 1>, ... ], 'access': [ <role id 2>, ... ] }
+        """
+        self.manager.permissions.manage.error_unless_permitted( dataset, user )
+        self._validate_permissions( permissions, **context )
+        manage = self._list_of_roles_from_ids( permissions[ 'manage' ] )
+        access = self._list_of_roles_from_ids( permissions[ 'access' ] )
+        self.manager.permissions.set( dataset, manage, access, flush=False )
+        return permissions
+
+    def _validate_permissions( self, permissions, **context ):
+        self.validate.type( 'permissions', permissions, dict )
+        for permission_key in ( 'manage', 'access' ):
+            if( not isinstance( permissions.get( permission_key, None ), list ) ):
+                msg = 'permissions requires "{0}" as a list of role ids'.format( permission_key )
+                raise exceptions.RequestParameterInvalidException( msg )
+
+        # TODO: push down into permissions?
+        manage_permissions = permissions[ 'manage' ]
+        if len( manage_permissions ) < 1:
+            raise exceptions.RequestParameterInvalidException( 'At least one managing role is required' )
+
+        return permissions
+
+    def _list_of_roles_from_ids( self, id_list ):
+        # TODO: this may make more sense inside rbac_secured
+        # note: no checking of valid roles is made
+        return self.role_manager.by_ids( [ self.app.security.decode_id( id_ ) for id_ in id_list ] )
+
+
+# ============================================================================= AKA DatasetInstanceManager
+class DatasetAssociationManager( base.ModelManager,
+                                 secured.AccessibleManagerMixin,
+                                 deletable.PurgableManagerMixin ):
+    """
+    DatasetAssociation/DatasetInstances are intended to be working
+    proxies to a Dataset, associated with either a library or a
+    user/history (HistoryDatasetAssociation).
+    """
+    # DA's were meant to be proxies - but were never fully implemented as them
+    # Instead, a dataset association HAS a dataset but contains metadata specific to a library (lda) or user (hda)
+    model_class = model.DatasetInstance
+
+    # NOTE: model_manager_class should be set in HDA/LDA subclasses
+
+    def __init__( self, app ):
+        super( DatasetAssociationManager, self ).__init__( app )
+        self.dataset_manager = DatasetManager( app )
+
+    def is_accessible( self, dataset_assoc, user, **kwargs ):
+        """
+        Is this DA accessible to `user`?
+        """
+        # defer to the dataset
+        return self.dataset_manager.is_accessible( dataset_assoc.dataset, user )
+
+    def purge( self, dataset_assoc, flush=True ):
+        """
+        Purge this DatasetInstance and the dataset underlying it.
+        """
+        # error here if disallowed - before jobs are stopped
+        # TODO: this check may belong in the controller
+        self.dataset_manager.error_unless_dataset_purge_allowed()
+        super( DatasetAssociationManager, self ).purge( dataset_assoc, flush=flush )
+
+        # stop any jobs outputing the dataset_assoc
+        if dataset_assoc.creating_job_associations:
+            job = dataset_assoc.creating_job_associations[0].job
+            if not job.finished:
+                # signal to stop the creating job
+                job.mark_deleted( self.app.config.track_jobs_in_database )
+                self.app.job_manager.job_stop_queue.put( job.id )
+
+        # more importantly, purge underlying dataset as well
+        if dataset_assoc.dataset.user_can_purge:
+            self.dataset_manager.purge( dataset_assoc.dataset )
+        return dataset_assoc
+
+    def by_user( self, user ):
+        raise galaxy.exceptions.NotImplemented( 'Abstract Method' )
+
+    # .... associated job
+    def creating_job( self, dataset_assoc ):
+        """
+        Return the `Job` that created this dataset or None if not found.
+        """
+        # TODO: is this needed? Can't you use the dataset_assoc.creating_job attribute? When is this None?
+        # TODO: this would be even better if outputs and inputs were the underlying datasets
+        job = None
+        for job_output_assoc in dataset_assoc.creating_job_associations:
+            job = job_output_assoc.job
+            break
+        return job
+
+    def stop_creating_job( self, dataset_assoc ):
+        """
+        Stops an dataset_assoc's creating job if all the job's other outputs are deleted.
+        """
+        # TODO: use in purge above
+        RUNNING_STATES = (
+            self.app.model.Job.states.QUEUED,
+            self.app.model.Job.states.RUNNING,
+            self.app.model.Job.states.NEW
+        )
+        if dataset_assoc.parent_id is None and len( dataset_assoc.creating_job_associations ) > 0:
+            # Mark associated job for deletion
+            job = dataset_assoc.creating_job_associations[0].job
+            if job.state in RUNNING_STATES:
+                # Are *all* of the job's other output datasets deleted?
+                if job.check_if_output_datasets_deleted():
+                    job.mark_deleted( self.app.config.track_jobs_in_database )
+                    self.app.job_manager.job_stop_queue.put( job.id )
+                    return True
+        return False
+
+    def is_composite( self, dataset_assoc ):
+        """
+        Return True if this hda/ldda is a composite type dataset.
+
+        .. note:: see also (whereever we keep information on composite datatypes?)
+        """
+        return dataset_assoc.extension in self.app.datatypes_registry.get_composite_extensions()
+
+    def extra_files( self, dataset_assoc ):
+        """Return a list of file paths for composite files, an empty list otherwise."""
+        if not self.is_composite( dataset_assoc ):
+            return []
+        return glob.glob( os.path.join( dataset_assoc.dataset.extra_files_path, '*' ) )
+
+
+class _UnflattenedMetadataDatasetAssociationSerializer( base.ModelSerializer,
+                                                        deletable.PurgableSerializerMixin ):
+
+    def __init__( self, app ):
+        self.dataset_serializer = DatasetSerializer( app )
+        super( _UnflattenedMetadataDatasetAssociationSerializer, self ).__init__( app )
+
+    def add_serializers( self ):
+        super( _UnflattenedMetadataDatasetAssociationSerializer, self ).add_serializers()
+        deletable.PurgableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'create_time'   : self.serialize_date,
+            'update_time'   : self.serialize_date,
+
+            # underlying dataset
+            'dataset'       : lambda i, k, **c: self.dataset_serializer.serialize_to_view( i.dataset, view='summary', **c ),
+            'dataset_id'    : self._proxy_to_dataset( key='id' ),
+            # TODO: why is this named uuid!? The da doesn't have a uuid - it's the underlying dataset's uuid!
+            'uuid'          : self._proxy_to_dataset( key='uuid' ),
+            # 'dataset_uuid'  : self._proxy_to_dataset( key='uuid' ),
+            'file_name'     : self._proxy_to_dataset( serializer=self.dataset_serializer.serialize_file_name ),
+            'extra_files_path' : self._proxy_to_dataset( serializer=self.dataset_serializer.serialize_extra_files_path ),
+            'permissions'   : self._proxy_to_dataset( serializer=self.dataset_serializer.serialize_permissions ),
+            # TODO: do the sizes proxy accurately/in the same way?
+            'size'          : lambda i, k, **c: int( i.get_size() ),
+            'file_size'     : lambda i, k, **c: self.serializers[ 'size' ]( i, k, **c ),
+            'nice_size'     : lambda i, k, **c: i.get_size( nice_size=True ),
+
+            # common to lddas and hdas - from mapping.py
+            'copied_from_history_dataset_association_id'        : self.serialize_id,
+            'copied_from_library_dataset_dataset_association_id': self.serialize_id,
+            'info'          : lambda i, k, **c: i.info.strip() if isinstance( i.info, string_types ) else i.info,
+            'blurb'         : lambda i, k, **c: i.blurb,
+            'peek'          : lambda i, k, **c: i.display_peek() if i.peek and i.peek != 'no peek' else None,
+
+            'meta_files'    : self.serialize_meta_files,
+            'metadata'      : self.serialize_metadata,
+
+            'creating_job'  : self.serialize_creating_job,
+            'rerunnable'    : self.serialize_rerunnable,
+
+            'parent_id'     : self.serialize_id,
+            'designation'   : lambda i, k, **c: i.designation,
+
+            # 'extended_metadata'     : self.serialize_extended_metadata,
+            # 'extended_metadata_id'  : self.serialize_id,
+
+            # remapped
+            'genome_build'  : lambda i, k, **c: i.dbkey,
+
+            # derived (not mapped) attributes
+            'data_type'     : lambda i, k, **c: i.datatype.__class__.__module__ + '.' + i.datatype.__class__.__name__,
+
+            'converted'     : self.serialize_converted_datasets,
+            # TODO: metadata/extra files
+        })
+        # this an abstract superclass, so no views created
+        # because of that: we need to add a few keys that will use the default serializer
+        self.serializable_keyset.update([ 'name', 'state', 'tool_version', 'extension', 'visible', 'dbkey' ])
+
+    def _proxy_to_dataset( self, serializer=None, key=None ):
+        # dataset associations are (rough) proxies to datasets - access their serializer using this remapping fn
+        # remapping done by either kwarg key: IOW dataset attr key (e.g. uuid)
+        # or by kwarg serializer: a function that's passed in (e.g. permissions)
+        if key:
+            serializer = self.dataset_serializer.serializers.get( key )
+        if serializer:
+            return lambda i, k, **c: serializer( i.dataset, key or k, **c )
+        raise TypeError( 'kwarg serializer or key needed')
+
+    def serialize_meta_files( self, dataset_assoc, key, **context ):
+        """
+        Cycle through meta files and return them as a list of dictionaries.
+        """
+        meta_files = []
+        for meta_type in dataset_assoc.metadata.spec.keys():
+            if isinstance( dataset_assoc.metadata.spec[ meta_type ].param, galaxy.datatypes.metadata.FileParameter ):
+                meta_files.append(
+                    dict( file_type=meta_type,
+                          download_url=self.url_for( 'history_contents_metadata_file',
+                                                     history_id=self.app.security.encode_id(dataset_assoc.history_id),
+                                                     history_content_id=self.app.security.encode_id(dataset_assoc.id),
+                                                     metadata_file=meta_type) ) )
+        return meta_files
+
+    def serialize_metadata( self, dataset_assoc, key, excluded=None, **context ):
+        """
+        Cycle through metadata and return as dictionary.
+        """
+        # dbkey is a repeat actually (metadata_dbkey == genome_build)
+        # excluded = [ 'dbkey' ] if excluded is None else excluded
+        excluded = [] if excluded is None else excluded
+
+        metadata = {}
+        for name, spec in dataset_assoc.metadata.spec.items():
+            if name in excluded:
+                continue
+            val = dataset_assoc.metadata.get( name )
+            # NOTE: no files
+            if isinstance( val, model.MetadataFile ):
+                # only when explicitly set: fetching filepaths can be expensive
+                if not self.app.config.expose_dataset_path:
+                    continue
+                val = val.file_name
+            # TODO:? possibly split this off?
+            # If no value for metadata, look in datatype for metadata.
+            elif val is None and hasattr( dataset_assoc.datatype, name ):
+                val = getattr( dataset_assoc.datatype, name )
+            metadata[ name ] = val
+
+        return metadata
+
+    def serialize_creating_job( self, dataset, key, **context ):
+        """
+        Return the id of the Job that created this dataset (or its original)
+        or None if no `creating_job` is found.
+        """
+        if dataset.creating_job:
+            return self.serialize_id( dataset.creating_job, 'id' )
+        else:
+            return None
+
+    def serialize_rerunnable( self, dataset, key, **context ):
+        """
+        Return False if this tool that created this dataset can't be re-run
+        (e.g. upload).
+        """
+        if dataset.creating_job:
+            tool = self.app.toolbox.get_tool( dataset.creating_job.tool_id, dataset.creating_job.tool_version )
+            if tool and tool.is_workflow_compatible:
+                return True
+        return False
+
+    def serialize_converted_datasets( self, dataset_assoc, key, **context ):
+        """
+        Return a file extension -> converted dataset encoded id map with all
+        the existing converted datasets associated with this instance.
+
+        This filters out deleted associations.
+        """
+        id_map = {}
+        for converted in dataset_assoc.implicitly_converted_datasets:
+            if not converted.deleted and converted.dataset:
+                id_map[ converted.type ] = self.serialize_id( converted.dataset, 'id' )
+        return id_map
+
+
+class DatasetAssociationSerializer( _UnflattenedMetadataDatasetAssociationSerializer ):
+    # TODO: remove this class - metadata should be a sub-object instead as in the superclass
+
+    def add_serializers( self ):
+        super( DatasetAssociationSerializer, self ).add_serializers()
+        # remove the single nesting key here
+        del self.serializers[ 'metadata' ]
+
+    def serialize( self, dataset_assoc, keys, **context ):
+        """
+        Override to add metadata as flattened keys on the serialized DatasetInstance.
+        """
+        # if 'metadata' isn't removed from keys here serialize will retrieve the un-serializable MetadataCollection
+        # TODO: remove these when metadata is sub-object
+        KEYS_HANDLED_SEPARATELY = ( 'metadata', )
+        left_to_handle = self._pluck_from_list( keys, KEYS_HANDLED_SEPARATELY )
+        serialized = super( DatasetAssociationSerializer, self ).serialize( dataset_assoc, keys, **context )
+
+        # add metadata directly to the dict instead of as a sub-object
+        if 'metadata' in left_to_handle:
+            metadata = self._prefixed_metadata( dataset_assoc )
+            serialized.update( metadata )
+        return serialized
+
+    # TODO: this is more util/gen. use
+    def _pluck_from_list( self, l, elems ):
+        """
+        Removes found elems from list l and returns list of found elems if found.
+        """
+        found = []
+        for elem in elems:
+            try:
+                index = l.index( elem )
+                found.append( l.pop( index ) )
+            except ValueError:
+                pass
+        return found
+
+    def _prefixed_metadata( self, dataset_assoc ):
+        """
+        Adds (a prefixed version of) the DatasetInstance metadata to the dict,
+        prefixing each key with 'metadata_'.
+        """
+        # build the original, nested dictionary
+        metadata = self.serialize_metadata( dataset_assoc, 'metadata' )
+
+        # prefix each key within and return
+        prefixed = {}
+        for key, val in metadata.items():
+            prefixed_key = 'metadata_' + key
+            prefixed[ prefixed_key ] = val
+        return prefixed
+
+
+class DatasetAssociationDeserializer( base.ModelDeserializer, deletable.PurgableDeserializerMixin ):
+
+    def add_deserializers( self ):
+        super( DatasetAssociationDeserializer, self ).add_deserializers()
+        deletable.PurgableDeserializerMixin.add_deserializers( self )
+
+        self.deserializers.update({
+            'name' : self.deserialize_basestring,
+            'info' : self.deserialize_basestring,
+        })
+        self.deserializable_keyset.update( self.deserializers.keys() )
+
+# TODO: untested
+    def deserialize_metadata( self, dataset_assoc, metadata_key, metadata_dict, **context ):
+        """
+        """
+        self.validate.type( metadata_key, metadata_dict, dict )
+        returned = {}
+        for key, val in metadata_dict.items():
+            returned[ key ] = self.deserialize_metadatum( dataset_assoc, key, val, **context )
+        return returned
+
+    def deserialize_metadatum( self, dataset_assoc, key, val, **context ):
+        """
+        """
+        if key not in dataset_assoc.datatype.metadata_spec:
+            return
+        metadata_specification = dataset_assoc.datatype.metadata_spec[ key ]
+        if metadata_specification.get( 'readonly' ):
+            return
+        unwrapped_val = metadata_specification.unwrap( val )
+        setattr( dataset_assoc.metadata, key, unwrapped_val )
+        # ...?
+        return unwrapped_val
+
+
+class DatasetAssociationFilterParser( base.ModelFilterParser, deletable.PurgableFiltersMixin ):
+
+    def _add_parsers( self ):
+        super( DatasetAssociationFilterParser, self )._add_parsers()
+        deletable.PurgableFiltersMixin._add_parsers( self )
+
+        self.orm_filter_parsers.update({
+            'name'      : { 'op': ( 'eq', 'contains', 'like' ) },
+            'state'     : { 'column' : '_state', 'op': ( 'eq', 'in' ) },
+            'visible'   : { 'op': ( 'eq' ), 'val': self.parse_bool },
+        })
+        self.fn_filter_parsers.update({
+            'genome_build' : self.string_standard_ops( 'dbkey' ),
+            'data_type' : {
+                'op': {
+                    'eq' : self.eq_datatype,
+                    'isinstance' : self.isinstance_datatype
+                }
+            }
+        })
+
+    def eq_datatype( self, dataset_assoc, class_str ):
+        """
+        Is the `dataset_assoc` datatype equal to the registered datatype `class_str`?
+        """
+        comparison_class = self.app.datatypes_registry.get_datatype_class_by_name( class_str )
+        return ( comparison_class and
+            dataset_assoc.datatype.__class__ == comparison_class )
+
+    def isinstance_datatype( self, dataset_assoc, class_strs ):
+        """
+        Is the `dataset_assoc` datatype derived from any of the registered
+        datatypes in the comma separated string `class_strs`?
+        """
+        parse_datatype_fn = self.app.datatypes_registry.get_datatype_class_by_name
+        comparison_classes = []
+        for class_str in class_strs.split( ',' ):
+            datatype_class = parse_datatype_fn( class_str )
+            if datatype_class:
+                comparison_classes.append( datatype_class )
+        return ( comparison_classes and
+            isinstance( dataset_assoc.datatype, comparison_classes ) )
diff --git a/lib/galaxy/managers/deletable.py b/lib/galaxy/managers/deletable.py
new file mode 100644
index 0000000..dc55eea
--- /dev/null
+++ b/lib/galaxy/managers/deletable.py
@@ -0,0 +1,116 @@
+"""
+Many models in Galaxy are not meant to be removed from the database but only
+marked as deleted. These models have the boolean attribute 'deleted'.
+
+Other models are deletable and also may be purged. Most often these are
+models have some backing/supporting resources that can be removed as well
+(e.g. Datasets have data files on a drive). Purging these models removes
+the supporting resources as well. These models also have the boolean
+attribute 'purged'.
+"""
+
+
+class DeletableManagerMixin( object ):
+    """
+    A mixin/interface for a model that is deletable (i.e. has a 'deleted' attr).
+
+    Many resources in Galaxy can be marked as deleted - meaning (in most cases)
+    that they are no longer needed, should not be displayed, or may be actually
+    removed by an admin/script.
+    """
+    def delete( self, item, flush=True, **kwargs ):
+        """
+        Mark as deleted and return.
+        """
+        return self._session_setattr( item, 'deleted', True, flush=flush )
+
+    def undelete( self, item, flush=True, **kwargs ):
+        """
+        Mark as not deleted and return.
+        """
+        return self._session_setattr( item, 'deleted', False, flush=flush )
+
+
+class DeletableSerializerMixin( object ):
+
+    def add_serializers( self ):
+        self.serializable_keyset.add( 'deleted' )
+
+
+# TODO: these are of questionable value if we don't want to enable users to delete/purge via update
+class DeletableDeserializerMixin( object ):
+
+    def add_deserializers( self ):
+        self.deserializers[ 'deleted' ] = self.deserialize_deleted
+
+    def deserialize_deleted( self, item, key, val, **context ):
+        """
+        Delete or undelete `item` based on `val` then return `item.deleted`.
+        """
+        new_deleted = self.validate.bool( key, val )
+        if new_deleted == item.deleted:
+            return item.deleted
+        # TODO:?? flush=False?
+        if new_deleted:
+            self.manager.delete( item, flush=False )
+        else:
+            self.manager.undelete( item, flush=False )
+        return item.deleted
+
+
+class DeletableFiltersMixin( object ):
+
+    def _add_parsers( self ):
+        self.orm_filter_parsers.update({
+            'deleted': { 'op': ( 'eq' ), 'val': self.parse_bool }
+        })
+
+
+class PurgableManagerMixin( DeletableManagerMixin ):
+    """
+    A manager interface/mixin for a resource that allows deleting and purging where
+    purging is often removal of some additional, non-db resource (e.g. a dataset's
+    file).
+    """
+    def purge( self, item, flush=True, **kwargs ):
+        """
+        Mark as purged and return.
+
+        Override this in subclasses to do the additional resource removal.
+        """
+        return self._session_setattr( item, 'purged', True, flush=flush )
+
+
+class PurgableSerializerMixin( DeletableSerializerMixin ):
+
+    def add_serializers( self ):
+        DeletableSerializerMixin.add_serializers( self )
+        self.serializable_keyset.add( 'purged' )
+
+
+class PurgableDeserializerMixin( DeletableDeserializerMixin ):
+
+    def add_deserializers( self ):
+        DeletableDeserializerMixin.add_deserializers( self )
+        self.deserializers[ 'purged' ] = self.deserialize_purged
+
+    def deserialize_purged( self, item, key, val, **context ):
+        """
+        If `val` is True, purge `item` and return `item.purged`.
+        """
+        new_purged = self.validate.bool( key, val )
+        if new_purged == item.purged:
+            return item.purged
+        # do we want to error if something attempts to 'unpurge'?
+        if new_purged:
+            self.manager.purge( item, flush=False )
+        return item.purged
+
+
+class PurgableFiltersMixin( DeletableFiltersMixin ):
+
+    def _add_parsers( self ):
+        DeletableFiltersMixin._add_parsers( self )
+        self.orm_filter_parsers.update({
+            'purged': { 'op': ( 'eq' ), 'val': self.parse_bool }
+        })
diff --git a/lib/galaxy/managers/folders.py b/lib/galaxy/managers/folders.py
new file mode 100644
index 0000000..cbb4ad3
--- /dev/null
+++ b/lib/galaxy/managers/folders.py
@@ -0,0 +1,288 @@
+"""
+Manager and Serializer for Library Folders.
+"""
+
+from galaxy.exceptions import ItemAccessibilityException
+from galaxy.exceptions import InconsistentDatabase
+from galaxy.exceptions import RequestParameterInvalidException
+from galaxy.exceptions import InternalServerError
+from galaxy.exceptions import AuthenticationRequired
+from galaxy.exceptions import InsufficientPermissionsException
+from galaxy.exceptions import MalformedId
+from sqlalchemy.orm.exc import MultipleResultsFound
+from sqlalchemy.orm.exc import NoResultFound
+import logging
+log = logging.getLogger( __name__ )
+
+
+# =============================================================================
+class FolderManager( object ):
+    """
+    Interface/service object for interacting with folders.
+    """
+
+    def get( self, trans, decoded_folder_id, check_manageable=False, check_accessible=True):
+        """
+        Get the folder from the DB.
+
+        :param  decoded_folder_id:       decoded folder id
+        :type   decoded_folder_id:       int
+        :param  check_manageable:        flag whether the check that user can manage item
+        :type   check_manageable:        bool
+        :param  check_accessible:        flag whether to check that user can access item
+        :type   check_accessible:        bool
+
+        :returns:   the requested folder
+        :rtype:     LibraryFolder
+
+        :raises: InconsistentDatabase, RequestParameterInvalidException, InternalServerError
+        """
+        try:
+            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).filter( trans.app.model.LibraryFolder.table.c.id == decoded_folder_id ).one()
+        except MultipleResultsFound:
+            raise InconsistentDatabase( 'Multiple folders found with the same id.' )
+        except NoResultFound:
+            raise RequestParameterInvalidException( 'No folder found with the id provided.' )
+        except Exception as e:
+            raise InternalServerError( 'Error loading from the database.' + str( e ) )
+        folder = self.secure( trans, folder, check_manageable, check_accessible )
+        return folder
+
+    def secure( self, trans, folder, check_manageable=True, check_accessible=True ):
+        """
+        Check if (a) user can manage folder or (b) folder is accessible to user.
+
+        :param  folder:                  folder item
+        :type   folder:                  LibraryFolder
+        :param  check_manageable:        flag whether to check that user can manage item
+        :type   check_manageable:        bool
+        :param  check_accessible:        flag whether to check that user can access item
+        :type   check_accessible:        bool
+
+        :returns:   the original folder
+        :rtype:     LibraryFolder
+        """
+        # all folders are accessible to an admin
+        if trans.user_is_admin():
+            return folder
+        if check_manageable:
+            folder = self.check_manageable( trans, folder )
+        if check_accessible:
+            folder = self.check_accessible( trans, folder )
+        return folder
+
+    def check_manageable( self, trans, folder ):
+        """
+        Check whether the user can manage the folder.
+
+        :returns:   the original folder
+        :rtype:     LibraryFolder
+
+        :raises: AuthenticationRequired, InsufficientPermissionsException
+        """
+        if not trans.user:
+            raise AuthenticationRequired( "Must be logged in to manage Galaxy items.", type='error' )
+        current_user_roles = trans.get_current_user_roles()
+        if not trans.app.security_agent.can_manage_library_item( current_user_roles, folder ):
+            raise InsufficientPermissionsException( "You don't have permissions to manage this folder.", type='error' )
+        else:
+            return folder
+
+    def check_accessible( self, trans, folder ):
+        """
+        Check whether the folder is accessible to current user.
+        By default every folder is accessible (contents have their own permissions).
+        """
+        return folder
+
+    def get_folder_dict( self, trans, folder ):
+        """
+        Return folder data in the form of a dictionary.
+
+        :param  folder:       folder item
+        :type   folder:       LibraryFolder
+
+        :returns:   dict with data about the folder
+        :rtype:     dictionary
+
+        """
+        folder_dict = folder.to_dict( view='element' )
+        folder_dict = trans.security.encode_all_ids( folder_dict, True )
+        folder_dict[ 'id' ] = 'F' + folder_dict[ 'id' ]
+        if folder_dict[ 'parent_id' ] is not None:
+            folder_dict[ 'parent_id' ] = 'F' + folder_dict[ 'parent_id' ]
+        folder_dict['update_time'] = folder.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+        return folder_dict
+
+    def create( self, trans, parent_folder_id, new_folder_name, new_folder_description='' ):
+        """
+        Create a new folder under the given folder.
+
+        :param  parent_folder_id:       decoded id
+        :type   parent_folder_id:       int
+        :param  new_folder_name:        name of the new folder
+        :type   new_folder_name:        str
+        :param  new_folder_description: description of the folder (optional, defaults to empty string)
+        :type   new_folder_description: str
+
+        :returns:   the new folder
+        :rtype:     LibraryFolder
+
+        :raises: InsufficientPermissionsException
+        """
+        parent_folder = self.get( trans, parent_folder_id )
+        current_user_roles = trans.get_current_user_roles()
+        if not ( trans.user_is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, parent_folder ) ):
+            raise InsufficientPermissionsException( 'You do not have proper permission to create folders under given folder.' )
+        new_folder = trans.app.model.LibraryFolder( name=new_folder_name, description=new_folder_description )
+        # We are associating the last used genome build with folders, so we will always
+        # initialize a new folder with the first dbkey in genome builds list which is currently
+        # ?    unspecified (?)
+        new_folder.genome_build = trans.app.genome_builds.default_value
+        parent_folder.add_folder( new_folder )
+        trans.sa_session.add( new_folder )
+        trans.sa_session.flush()
+        # New folders default to having the same permissions as their parent folder
+        trans.app.security_agent.copy_library_permissions( trans, parent_folder, new_folder )
+        return new_folder
+
+    def update( self, trans, folder, name=None, description=None):
+        """
+        Update the given folder's name or description.
+
+        :param  folder:        the model object
+        :type   folder:        LibraryFolder
+        :param  name:          new name for the library folder
+        :type   name:          str
+        :param  description:   new description for the library folder
+        :type   description:   str
+
+        :returns:   the folder
+        :rtype:     LibraryFolder
+
+        :raises: ItemAccessibilityException, InsufficientPermissionsException
+        """
+        changed = False
+        if not trans.user_is_admin():
+            if not self.check_manageable( trans, folder ):
+                raise InsufficientPermissionsException( "You do not have proper permission to update the library folder." )
+        if folder.deleted is True:
+            raise ItemAccessibilityException( "You cannot update a deleted library folder. Undelete it first." )
+        if name is not None and name != folder.name:
+            folder.name = name
+            changed = True
+        if description is not None and description != folder.description:
+            folder.description = description
+            changed = True
+        if changed:
+            trans.sa_session.add( folder )
+            trans.sa_session.flush()
+        return folder
+
+    def delete( self, trans, folder, undelete=False ):
+        """
+        Mark given folder deleted/undeleted based on the flag.
+
+        :param  folder:        the model object
+        :type   folder:        LibraryFolder
+        :param  undelete:      flag whether to delete (when False) or undelete
+        :type   undelete:      Bool
+
+        :returns:   the folder
+        :rtype:     LibraryFolder
+
+        :raises: ItemAccessibilityException
+        """
+        if not trans.user_is_admin():
+            folder = self.check_manageable( trans, folder )
+        if undelete:
+            folder.deleted = False
+        else:
+            folder.deleted = True
+        trans.sa_session.add( folder )
+        trans.sa_session.flush()
+        return folder
+
+    def get_current_roles( self, trans, folder ):
+        """
+        Find all roles currently connected to relevant permissions
+        on the folder.
+
+        :param  folder:      the model object
+        :type   folder:      LibraryFolder
+
+        :returns:   dict of current roles for all available permission types
+        :rtype:     dictionary
+        """
+        # Omit duplicated roles by converting to set
+        modify_roles = set( trans.app.security_agent.get_roles_for_action( folder, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY ) )
+        manage_roles = set( trans.app.security_agent.get_roles_for_action( folder, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ) )
+        add_roles = set( trans.app.security_agent.get_roles_for_action( folder, trans.app.security_agent.permitted_actions.LIBRARY_ADD ) )
+
+        modify_folder_role_list = [ ( modify_role.name, trans.security.encode_id( modify_role.id ) ) for modify_role in modify_roles ]
+        manage_folder_role_list = [ ( manage_role.name, trans.security.encode_id( manage_role.id ) ) for manage_role in manage_roles ]
+        add_library_item_role_list = [ ( add_role.name, trans.security.encode_id( add_role.id ) ) for add_role in add_roles ]
+        return dict( modify_folder_role_list=modify_folder_role_list,
+                     manage_folder_role_list=manage_folder_role_list,
+                     add_library_item_role_list=add_library_item_role_list )
+
+    def can_add_item( self, trans, folder ):
+        """
+        Return true if the user has permissions to add item to the given folder.
+        """
+        if trans.user_is_admin:
+            return True
+        current_user_roles = trans.get_current_user_roles()
+        add_roles = set( trans.app.security_agent.get_roles_for_action( folder, trans.app.security_agent.permitted_actions.LIBRARY_ADD ) )
+        for role in current_user_roles:
+            if role in add_roles:
+                return True
+        return False
+
+    def cut_the_prefix( self, encoded_folder_id ):
+        """
+        Remove the prefix from the encoded folder id.
+
+        :param encoded_folder_id: encoded id of the Folder object with 'F' prepended
+        :type  encoded_folder_id: string
+
+        :returns:  encoded Folder id without the 'F' prefix
+        :rtype:    string
+
+        :raises: MalformedId
+        """
+        if ( ( len( encoded_folder_id ) % 16 == 1 ) and encoded_folder_id.startswith( 'F' ) ):
+            cut_id = encoded_folder_id[ 1: ]
+        else:
+            raise MalformedId( 'Malformed folder id ( %s ) specified, unable to decode.' % str( encoded_folder_id ) )
+        return cut_id
+
+    def decode_folder_id( self, trans, encoded_folder_id ):
+        """
+        Decode the folder id given that it has already lost the prefixed 'F'.
+
+        :param encoded_folder_id: encoded id of the Folder object
+        :type  encoded_folder_id: string
+
+        :returns:  decoded Folder id
+        :rtype:    int
+
+        :raises: MalformedId
+        """
+        try:
+            decoded_id = trans.security.decode_id( encoded_folder_id )
+        except ValueError:
+            raise MalformedId( "Malformed folder id ( %s ) specified, unable to decode" % ( str( encoded_folder_id ) ) )
+        return decoded_id
+
+    def cut_and_decode( self, trans, encoded_folder_id ):
+        """
+        Cuts the folder prefix (the prepended 'F') and returns the decoded id.
+
+        :param encoded_folder_id: encoded id of the Folder object
+        :type  encoded_folder_id: string
+
+        :returns:  decoded Folder id
+        :rtype:    int
+        """
+        return self.decode_folder_id( trans, self.cut_the_prefix( encoded_folder_id ) )
diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py
new file mode 100644
index 0000000..1616756
--- /dev/null
+++ b/lib/galaxy/managers/hdas.py
@@ -0,0 +1,479 @@
+"""
+Manager and Serializer for HDAs.
+
+HistoryDatasetAssociations (HDAs) are datasets contained or created in a
+history.
+"""
+
+import os
+import gettext
+
+from galaxy import model
+from galaxy import exceptions
+from galaxy import datatypes
+from galaxy.managers import datasets
+from galaxy.managers import secured
+from galaxy.managers import taggable
+from galaxy.managers import annotatable
+from galaxy.managers import users
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class HDAManager( datasets.DatasetAssociationManager,
+                  secured.OwnableManagerMixin,
+                  taggable.TaggableManagerMixin,
+                  annotatable.AnnotatableManagerMixin ):
+    """
+    Interface/service object for interacting with HDAs.
+    """
+    model_class = model.HistoryDatasetAssociation
+    foreign_key_name = 'history_dataset_association'
+
+    tag_assoc = model.HistoryDatasetAssociationTagAssociation
+    annotation_assoc = model.HistoryDatasetAssociationAnnotationAssociation
+
+    # TODO: move what makes sense into DatasetManager
+    # TODO: which of these are common with LDDAs and can be pushed down into DatasetAssociationManager?
+
+    def __init__( self, app ):
+        """
+        Set up and initialize other managers needed by hdas.
+        """
+        super( HDAManager, self ).__init__( app )
+        self.user_manager = users.UserManager( app )
+
+    # .... security and permissions
+    def is_accessible( self, hda, user, **kwargs ):
+        """
+        Override to allow owners (those that own the associated history).
+        """
+        # this, apparently, is not True:
+        #   if I have a copy of a dataset and anyone who manages permissions on it revokes my access
+        #   I can not access that dataset even if it's in my history
+        # if self.is_owner( hda, user, **kwargs ):
+        #     return True
+        return super( HDAManager, self ).is_accessible( hda, user )
+
+    def is_owner( self, hda, user, current_history=None, **kwargs ):
+        """
+        Use history to see if current user owns HDA.
+        """
+        history = hda.history
+        if self.user_manager.is_admin( user ):
+            return True
+        # allow anonymous user to access current history
+        # TODO: some dup here with historyManager.is_owner but prevents circ import
+        # TODO: awkward kwarg (which is my new band name); this may not belong here - move to controller?
+        if self.user_manager.is_anonymous( user ):
+            if current_history and history == current_history:
+                return True
+            return False
+        return history.user == user
+
+    # .... create and copy
+    def create( self, history=None, dataset=None, flush=True, **kwargs ):
+        """
+        Create a new hda optionally passing in it's history and dataset.
+
+        ..note: to explicitly set hid to `None` you must pass in `hid=None`, otherwise
+        it will be automatically set.
+        """
+        if not dataset:
+            kwargs[ 'create_dataset' ] = True
+        hda = model.HistoryDatasetAssociation( history=history, dataset=dataset,
+                                               sa_session=self.app.model.context, **kwargs )
+
+        if history:
+            history.add_dataset( hda, set_hid=( 'hid' not in kwargs ) )
+        # TODO:?? some internal sanity check here (or maybe in add_dataset) to make sure hids are not duped?
+
+        self.session().add( hda )
+        if flush:
+            self.session().flush()
+        return hda
+
+    def copy( self, hda, history=None, **kwargs ):
+        """
+        Copy and return the given HDA.
+        """
+        # TODO:?? not using the following as this fn does not set history and COPIES hid (this doesn't seem correct)
+        # return hda.copy()
+        copy = model.HistoryDatasetAssociation(
+            name=hda.name,
+            info=hda.info,
+            blurb=hda.blurb,
+            peek=hda.peek,
+            tool_version=hda.tool_version,
+            extension=hda.extension,
+            dbkey=hda.dbkey,
+            dataset=hda.dataset,
+            visible=hda.visible,
+            deleted=hda.deleted,
+            parent_id=kwargs.get( 'parent_id', None ),
+        )
+        # add_dataset will update the hid to the next avail. in history
+        if history:
+            history.add_dataset( copy )
+
+        copy.copied_from_history_dataset_association = hda
+        copy.set_size()
+
+        original_annotation = self.annotation( hda )
+        self.annotate( copy, original_annotation, user=history.user )
+
+        # TODO: update from kwargs?
+
+        # Need to set after flushed, as MetadataFiles require dataset.id
+        self.session().add( copy )
+        self.session().flush()
+        copy.metadata = hda.metadata
+
+        # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+        if not hda.datatype.copy_safe_peek:
+            copy.set_peek()
+
+        self.session().flush()
+
+        # these use a second session flush and need to be after the first
+        original_tags = self.get_tags( hda )
+        self.set_tags( copy, original_tags, user=history.user )
+
+        return copy
+
+    def copy_ldda( self, history, ldda, **kwargs ):
+        """
+        Copy this HDA as a LDDA and return.
+        """
+        return ldda.to_history_dataset_association( history, add_to_history=True )
+
+    # .... deletion and purging
+    def purge( self, hda, flush=True):
+        """
+        Purge this HDA and the dataset underlying it.
+        """
+        user = hda.history.user or None
+        quota_amount_reduction = 0
+        if user:
+            quota_amount_reduction = hda.quota_amount( user )
+        super( HDAManager, self ).purge( hda, flush=flush )
+        # decrease the user's space used
+        if quota_amount_reduction:
+            user.adjust_total_disk_usage(-quota_amount_reduction)
+        return hda
+
+    # .... states
+    def error_if_uploading( self, hda ):
+        """
+        Raise error if HDA is still uploading.
+        """
+        # TODO: may be better added to an overridden get_accessible
+        if hda.state == model.Dataset.states.UPLOAD:
+            raise exceptions.Conflict( "Please wait until this dataset finishes uploading" )
+        return hda
+
+    def has_been_resubmitted( self, hda ):
+        """
+        Return True if the hda's job was resubmitted at any point.
+        """
+        job_states = model.Job.states
+        query = ( self._job_state_history_query( hda )
+                  .filter( model.JobStateHistory.state == job_states.RESUBMITTED ) )
+        return self.app.model.context.query( query.exists() ).scalar()
+
+    def _job_state_history_query( self, hda ):
+        """
+        Return a query of the job's state history for the job that created this hda.
+        """
+        session = self.app.model.context
+        JobToOutputDatasetAssociation = model.JobToOutputDatasetAssociation
+        JobStateHistory = model.JobStateHistory
+
+        # TODO: this does not play well with copied hdas
+        # NOTE: don't eagerload (JODA will load the hda were using!)
+        hda_id = hda.id
+        query = ( session.query( JobToOutputDatasetAssociation, JobStateHistory )
+                  .filter( JobToOutputDatasetAssociation.dataset_id == hda_id )
+                  .filter( JobStateHistory.job_id == JobToOutputDatasetAssociation.job_id )
+                  .enable_eagerloads( False ) )
+        return query
+
+    def data_conversion_status( self, hda ):
+        """
+        Returns a message if an hda is not ready to be used in visualization.
+        """
+        # this is a weird syntax and return val
+        if not hda:
+            return self.model_class.conversion_messages.NO_DATA
+        if hda.state == model.Job.states.ERROR:
+            return self.model_class.conversion_messages.ERROR
+        if hda.state != model.Job.states.OK:
+            return self.model_class.conversion_messages.PENDING
+        return None
+
+    # .... data
+    # TODO: to data provider or Text datatype directly
+    def text_data( self, hda, preview=True ):
+        """
+        Get data from text file, truncating if necessary.
+        """
+        # 1 MB
+        MAX_PEEK_SIZE = 1000000
+
+        truncated = False
+        hda_data = None
+        # For now, cannot get data from non-text datasets.
+        if not isinstance( hda.datatype, datatypes.data.Text ):
+            return truncated, hda_data
+        if not os.path.exists( hda.file_name ):
+            return truncated, hda_data
+
+        truncated = preview and os.stat( hda.file_name ).st_size > MAX_PEEK_SIZE
+        hda_data = open( hda.file_name ).read( MAX_PEEK_SIZE )
+        return truncated, hda_data
+
+    # .... annotatable
+    def annotation( self, hda ):
+        # override to scope to history owner
+        return self._user_annotation( hda, hda.history.user )
+
+
+class HDASerializer(  # datasets._UnflattenedMetadataDatasetAssociationSerializer,
+        datasets.DatasetAssociationSerializer,
+        taggable.TaggableSerializerMixin,
+        annotatable.AnnotatableSerializerMixin ):
+    model_manager_class = HDAManager
+
+    def __init__( self, app ):
+        super( HDASerializer, self ).__init__( app )
+        self.hda_manager = self.manager
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id',
+            'type_id',
+            'name',
+            'history_id', 'hid',
+            'history_content_type',
+            'dataset_id',
+            'state', 'extension',
+            'deleted', 'purged', 'visible',
+            'type', 'url',
+            'create_time', 'update_time',
+        ])
+        self.add_view( 'detailed', [
+            'model_class',
+            'history_id', 'hid',
+            # why include if model_class is there?
+            'hda_ldda',
+            # TODO: accessible needs to go away
+            'accessible',
+
+            # remapped
+            'genome_build', 'misc_info', 'misc_blurb',
+            'file_ext', 'file_size',
+
+            'resubmitted',
+            'metadata', 'meta_files', 'data_type',
+            'peek',
+
+            'creating_job',
+            'rerunnable',
+
+            'uuid',
+            'permissions',
+            'file_name',
+
+            'display_apps',
+            'display_types',
+            'visualizations',
+
+            # 'url',
+            'download_url',
+
+            'annotation', 'tags',
+
+            'api_type'
+        ], include_keys_from='summary' )
+
+        self.add_view( 'extended', [
+            'tool_version', 'parent_id', 'designation',
+        ], include_keys_from='detailed' )
+
+        # keyset returned to create show a dataset where the owner has no access
+        self.add_view( 'inaccessible', [
+            'accessible',
+            'id', 'name', 'history_id', 'hid', 'history_content_type',
+            'state', 'deleted', 'visible'
+        ])
+
+    def add_serializers( self ):
+        super( HDASerializer, self ).add_serializers()
+        taggable.TaggableSerializerMixin.add_serializers( self )
+        annotatable.AnnotatableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'model_class'   : lambda *a, **c: 'HistoryDatasetAssociation',
+            'history_content_type': lambda *a, **c: 'dataset',
+            'hda_ldda'      : lambda *a, **c: 'hda',
+            'type_id'       : self.serialize_type_id,
+
+            'history_id'    : self.serialize_id,
+
+            # remapped
+            'misc_info'     : self._remap_from( 'info' ),
+            'misc_blurb'    : self._remap_from( 'blurb' ),
+            'file_ext'      : self._remap_from( 'extension' ),
+            'file_path'     : self._remap_from( 'file_name' ),
+
+            'resubmitted'   : lambda i, k, **c: self.hda_manager.has_been_resubmitted( i ),
+            'display_apps'  : self.serialize_display_apps,
+            'display_types' : self.serialize_old_display_applications,
+            'visualizations': self.serialize_visualization_links,
+
+            # 'url'   : url_for( 'history_content_typed', history_id=encoded_history_id, id=encoded_id, type="dataset" ),
+            # TODO: this intermittently causes a routes.GenerationException - temp use the legacy route to prevent this
+            #   see also: https://trello.com/c/5d6j4X5y
+            #   see also: https://sentry.galaxyproject.org/galaxy/galaxy-main/group/20769/events/9352883/
+            'url'           : lambda i, k, **c: self.url_for( 'history_content',
+                                                              history_id=self.app.security.encode_id( i.history_id ),
+                                                              id=self.app.security.encode_id( i.id ) ),
+            'urls'          : self.serialize_urls,
+
+            # TODO: backwards compat: need to go away
+            'download_url'  : lambda i, k, **c: self.url_for( 'history_contents_display',
+                                                              history_id=self.app.security.encode_id( i.history.id ),
+                                                              history_content_id=self.app.security.encode_id( i.id ) ),
+            'parent_id'     : self.serialize_id,
+            # TODO: to DatasetAssociationSerializer
+            'accessible'    : lambda i, k, user=None, **c: self.manager.is_accessible( i, user ),
+            'api_type'      : lambda *a, **c: 'file',
+            'type'          : lambda *a, **c: 'file'
+        })
+
+    def serialize( self, hda, keys, user=None, **context ):
+        """
+        Override to hide information to users not able to access.
+        """
+        # TODO: to DatasetAssociationSerializer
+        if not self.manager.is_accessible( hda, user, **context ):
+            keys = self._view_to_keys( 'inaccessible' )
+        return super( HDASerializer, self ).serialize( hda, keys, user=user, **context )
+
+    def serialize_display_apps( self, hda, key, trans=None, **context ):
+        """
+        Return dictionary containing new-style display app urls.
+        """
+        display_apps = []
+        for display_app in hda.get_display_applications( trans ).itervalues():
+
+            app_links = []
+            for link_app in display_app.links.itervalues():
+                app_links.append({
+                    'target': link_app.url.get( 'target_frame', '_blank' ),
+                    'href': link_app.get_display_url( hda, trans ),
+                    'text': gettext.gettext( link_app.name )
+                })
+            if app_links:
+                display_apps.append( dict( label=display_app.name, links=app_links ) )
+
+        return display_apps
+
+    def serialize_old_display_applications( self, hda, key, trans=None, **context ):
+        """
+        Return dictionary containing old-style display app urls.
+        """
+        display_apps = []
+        if not self.app.config.enable_old_display_applications:
+            return display_apps
+
+        display_link_fn = hda.datatype.get_display_links
+        for display_app in hda.datatype.get_display_types():
+            target_frame, display_links = display_link_fn( hda, display_app, self.app, trans.request.base )
+
+            if len( display_links ) > 0:
+                display_label = hda.datatype.get_display_label( display_app )
+
+                app_links = []
+                for display_name, display_link in display_links:
+                    app_links.append({
+                        'target': target_frame,
+                        'href': display_link,
+                        'text': gettext.gettext( display_name )
+                    })
+                if app_links:
+                    display_apps.append( dict( label=display_label, links=app_links ) )
+
+        return display_apps
+
+    def serialize_visualization_links( self, hda, key, trans=None, **context ):
+        """
+        Return a list of dictionaries with links to visualization pages
+        for those visualizations that apply to this hda.
+        """
+        # use older system if registry is off in the config
+        if not self.app.visualizations_registry:
+            return hda.get_visualizations()
+        return self.app.visualizations_registry.get_visualizations( trans, hda )
+
+    def serialize_urls( self, hda, key, **context ):
+        """
+        Return web controller urls useful for this HDA.
+        """
+        url_for = self.url_for
+        encoded_id = self.app.security.encode_id( hda.id )
+        urls = {
+            'purge'         : url_for( controller='dataset', action='purge_async', dataset_id=encoded_id ),
+            'display'       : url_for( controller='dataset', action='display', dataset_id=encoded_id, preview=True ),
+            'edit'          : url_for( controller='dataset', action='edit', dataset_id=encoded_id ),
+            'download'      : url_for( controller='dataset', action='display',
+                                       dataset_id=encoded_id, to_ext=hda.extension ),
+            'report_error'  : url_for( controller='dataset', action='errors', id=encoded_id ),
+            'rerun'         : url_for( controller='tool_runner', action='rerun', id=encoded_id ),
+            'show_params'   : url_for( controller='dataset', action='show_params', dataset_id=encoded_id ),
+            'visualization' : url_for( controller='visualization', action='index',
+                                       id=encoded_id, model='HistoryDatasetAssociation' ),
+            'meta_download' : url_for( controller='dataset', action='get_metadata_file',
+                                       hda_id=encoded_id, metadata_name='' ),
+        }
+        return urls
+
+
+class HDADeserializer( datasets.DatasetAssociationDeserializer,
+                       taggable.TaggableDeserializerMixin,
+                       annotatable.AnnotatableDeserializerMixin ):
+    """
+    Interface/service object for validating and deserializing dictionaries into histories.
+    """
+    model_manager_class = HDAManager
+
+    def __init__( self, app ):
+        super( HDADeserializer, self ).__init__( app )
+        self.hda_manager = self.manager
+
+    def add_deserializers( self ):
+        super( HDADeserializer, self ).add_deserializers()
+        taggable.TaggableDeserializerMixin.add_deserializers( self )
+        annotatable.AnnotatableDeserializerMixin.add_deserializers( self )
+
+        self.deserializers.update({
+            'visible'       : self.deserialize_bool,
+            # remapped
+            'genome_build'  : lambda i, k, v, **c: self.deserialize_genome_build( i, 'dbkey', v ),
+            'misc_info'     : lambda i, k, v, **c: self.deserialize_basestring( i, 'info', v,
+                                                                                convert_none_to_empty=True ),
+        })
+        self.deserializable_keyset.update( self.deserializers.keys() )
+
+
+class HDAFilterParser( datasets.DatasetAssociationFilterParser,
+                       taggable.TaggableFilterMixin,
+                       annotatable.AnnotatableFilterMixin ):
+    model_manager_class = HDAManager
+    model_class = model.HistoryDatasetAssociation
+
+    def _add_parsers( self ):
+        super( HDAFilterParser, self )._add_parsers()
+        taggable.TaggableFilterMixin._add_parsers( self )
+        annotatable.AnnotatableFilterMixin._add_parsers( self )
diff --git a/lib/galaxy/managers/hdcas.py b/lib/galaxy/managers/hdcas.py
new file mode 100644
index 0000000..15111f0
--- /dev/null
+++ b/lib/galaxy/managers/hdcas.py
@@ -0,0 +1,261 @@
+"""
+Manager and Serializer for HDCAs.
+
+HistoryDatasetCollectionAssociations (HDCAs) are datasets contained or created in a
+history.
+"""
+
+from galaxy import model
+
+from galaxy.managers import base
+from galaxy.managers import secured
+from galaxy.managers import deletable
+from galaxy.managers import taggable
+from galaxy.managers import annotatable
+
+from galaxy.managers import hdas
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# TODO: to DatasetCollectionInstanceManager
+class HDCAManager(
+        base.ModelManager,
+        secured.AccessibleManagerMixin,
+        secured.OwnableManagerMixin,
+        deletable.PurgableManagerMixin,
+        taggable.TaggableManagerMixin,
+        annotatable.AnnotatableManagerMixin ):
+    """
+    Interface/service object for interacting with HDCAs.
+    """
+    model_class = model.HistoryDatasetCollectionAssociation
+    foreign_key_name = 'history_dataset_collection_association'
+
+    tag_assoc = model.HistoryDatasetCollectionTagAssociation
+    annotation_assoc = model.HistoryDatasetCollectionAnnotationAssociation
+
+    def __init__( self, app ):
+        """
+        Set up and initialize other managers needed by hdcas.
+        """
+        super( HDCAManager, self ).__init__( app )
+
+    def map_datasets( self, content, fn, *parents ):
+        """
+        Iterate over the datasets of a given collection, recursing into collections, and
+        calling fn on each dataset.
+
+        Uses the same kwargs as `contents` above.
+        """
+        returned = []
+        # lots of nesting going on within the nesting
+        collection = content.collection if hasattr( content, 'collection' ) else content
+        this_parents = ( content, ) + parents
+        for element in collection.elements:
+            next_parents = ( element, ) + this_parents
+            if element.is_collection:
+                processed_list = self.map_datasets( element.child_collection, fn, *next_parents )
+                returned.extend( processed_list )
+            else:
+                processed = fn( element.dataset_instance, *next_parents )
+                returned.append( processed )
+        return returned
+
+    # TODO: un-stub
+
+
+# serializers
+# -----------------------------------------------------------------------------
+class DCESerializer( base.ModelSerializer ):
+    """
+    Serializer for DatasetCollectionElements.
+    """
+
+    def __init__( self, app ):
+        super( DCESerializer, self ).__init__( app )
+        self.hda_serializer = hdas.HDASerializer( app )
+        self.dc_serializer = DCSerializer( app, dce_serializer=self )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id', 'model_class',
+            'element_index',
+            'element_identifier',
+            'element_type',
+            'object'
+        ])
+
+    def add_serializers( self ):
+        super( DCESerializer, self ).add_serializers()
+        self.serializers.update({
+            'model_class'   : lambda *a, **c: 'DatasetCollectionElement',
+            'object'        : self.serialize_object
+        })
+
+    def serialize_object( self, item, key, **context ):
+        if item.hda:
+            return self.hda_serializer.serialize_to_view( item.hda, view='summary', **context )
+        if item.child_collection:
+            return self.dc_serializer.serialize_to_view( item.child_collection, view='detailed', **context )
+        return 'object'
+
+
+class DCSerializer( base.ModelSerializer ):
+    """
+    Serializer for DatasetCollections.
+    """
+
+    def __init__( self, app, dce_serializer=None ):
+        super( DCSerializer, self ).__init__( app )
+        self.dce_serializer = dce_serializer or DCESerializer( app )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id',
+            'create_time',
+            'update_time',
+            'collection_type',
+            'populated',
+            'populated_state',
+            'populated_state_message',
+        ])
+        self.add_view( 'detailed', [
+            'elements'
+        ], include_keys_from='summary' )
+
+    def add_serializers( self ):
+        super( DCSerializer, self ).add_serializers()
+        self.serializers.update({
+            'model_class'   : lambda *a, **c: 'DatasetCollection',
+            'elements'      : self.serialize_elements,
+            'element_count' : self.serialize_element_count
+        })
+
+    def serialize_elements( self, item, key, **context ):
+        returned = []
+        for element in item.elements:
+            serialized = self.dce_serializer.serialize_to_view( element, view='summary', **context )
+            returned.append( serialized )
+        return returned
+
+    def serialize_element_count( self, item, key, **context ):
+        """Return the count of elements for this collection."""
+        # TODO: app.model.context -> session
+        # TODO: to the container interface (dataset_collection_contents)
+        return ( self.app.model.context.query( model.DatasetCollectionElement )
+            .filter( model.DatasetCollectionElement.dataset_collection_id == item.id )
+            .count() )
+
+
+class DCASerializer( base.ModelSerializer ):
+    """
+    Base (abstract) Serializer class for HDCAs and LDCAs.
+    """
+
+    def __init__( self, app, dce_serializer=None ):
+        super( DCASerializer, self ).__init__( app )
+        self.dce_serializer = dce_serializer or DCESerializer( app )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id',
+            'create_time', 'update_time',
+            'collection_type',
+            'populated',
+            'populated_state',
+            'populated_state_message',
+        ])
+        self.add_view( 'detailed', [
+            'elements'
+        ], include_keys_from='summary' )
+
+    def add_serializers( self ):
+        super( DCASerializer, self ).add_serializers()
+        # most attributes are (kinda) proxied from DCs - we need a serializer to proxy to
+        self.dc_serializer = DCSerializer( self.app )
+        # then set the serializers to point to it for those attrs
+        collection_keys = [
+            'create_time',
+            'update_time',
+            'collection_type',
+            'populated',
+            'populated_state',
+            'populated_state_message',
+            'elements',
+            'element_count'
+        ]
+        for key in collection_keys:
+            self.serializers[ key ] = self._proxy_to_dataset_collection( key=key )
+
+    def _proxy_to_dataset_collection( self, serializer=None, key=None ):
+        # dataset_collection associations are (rough) proxies to datasets - access their serializer using this remapping fn
+        # remapping done by either kwarg key: IOW dataset attr key (e.g. populated_state)
+        # or by kwarg serializer: a function that's passed in (e.g. elements)
+        if key:
+            return lambda i, k, **c: self.dc_serializer.serialize( i.collection, [ k ], **c )[ k ]
+        if serializer:
+            return lambda i, k, **c: serializer( i.collection, key or k, **c )
+        raise TypeError( 'kwarg serializer or key needed')
+
+
+class HDCASerializer(
+        DCASerializer,
+        taggable.TaggableSerializerMixin,
+        annotatable.AnnotatableSerializerMixin ):
+    """
+    Serializer for HistoryDatasetCollectionAssociations.
+    """
+
+    def __init__( self, app ):
+        super( HDCASerializer, self ).__init__( app )
+        self.hdca_manager = HDCAManager( app )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id',
+            'type_id',
+            'name',
+            'history_id', 'hid',
+            'history_content_type',
+
+            'collection_type',
+            'populated',
+            'populated_state',
+            'populated_state_message',
+
+            'name',
+            'type_id',
+            'history_id',
+            'hid',
+            'history_content_type',
+            'deleted',
+            # 'purged',
+            'visible',
+            'type', 'url',
+            'create_time', 'update_time',
+        ])
+        self.add_view( 'detailed', [
+            'elements'
+        ], include_keys_from='summary' )
+
+    def add_serializers( self ):
+        super( HDCASerializer, self ).add_serializers()
+        taggable.TaggableSerializerMixin.add_serializers( self )
+        annotatable.AnnotatableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'model_class'               : lambda *a, **c: self.hdca_manager.model_class.__class__.__name__,
+            # TODO: remove
+            'type'                      : lambda *a, **c: 'collection',
+            # part of a history and container
+            'history_id'                : self.serialize_id,
+            'history_content_type'      : lambda *a, **c: self.hdca_manager.model_class.content_type,
+            'type_id'                   : self.serialize_type_id,
+
+            'url'   : lambda i, k, **c: self.url_for( 'history_content_typed',
+                                                      history_id=self.app.security.encode_id( i.history_id ),
+                                                      id=self.app.security.encode_id( i.id ),
+                                                      type=self.hdca_manager.model_class.content_type ),
+        })
diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py
new file mode 100644
index 0000000..9eafb9c
--- /dev/null
+++ b/lib/galaxy/managers/histories.py
@@ -0,0 +1,388 @@
+"""
+Manager and Serializer for histories.
+
+Histories are containers for datasets or dataset collections
+created (or copied) by users over the course of an analysis.
+"""
+
+from sqlalchemy import desc, asc
+
+from galaxy import model
+from galaxy import exceptions as glx_exceptions
+from galaxy.managers import sharable
+from galaxy.managers import deletable
+from galaxy.managers import hdas
+# from galaxy.managers import hdcas
+from galaxy.managers import history_contents
+
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class HistoryManager( sharable.SharableModelManager, deletable.PurgableManagerMixin ):
+
+    model_class = model.History
+    foreign_key_name = 'history'
+    user_share_model = model.HistoryUserShareAssociation
+
+    tag_assoc = model.HistoryTagAssociation
+    annotation_assoc = model.HistoryAnnotationAssociation
+    rating_assoc = model.HistoryRatingAssociation
+
+    # TODO: incorporate imp/exp (or alias to)
+
+    def __init__( self, app, *args, **kwargs ):
+        super( HistoryManager, self ).__init__( app, *args, **kwargs )
+        self.hda_manager = hdas.HDAManager( app )
+        self.contents_manager = history_contents.HistoryContentsManager( app )
+        self.contents_filters = history_contents.HistoryContentsFilters( app )
+
+    def copy( self, history, user, **kwargs ):
+        """
+        Copy and return the given `history`.
+        """
+        return history.copy( target_user=user, **kwargs )
+
+    # .... sharable
+    # overriding to handle anonymous users' current histories in both cases
+    def by_user( self, user, current_history=None, **kwargs ):
+        """
+        Get all the histories for a given user (allowing anon users' theirs)
+        ordered by update time.
+        """
+        # handle default and/or anonymous user (which still may not have a history yet)
+        if self.user_manager.is_anonymous( user ):
+            return [ current_history ] if current_history else []
+        return super( HistoryManager, self ).by_user( user, **kwargs )
+
+    def is_owner( self, history, user, current_history=None, **kwargs ):
+        """
+        True if the current user is the owner of the given history.
+        """
+        # anon users are only allowed to view their current history
+        if self.user_manager.is_anonymous( user ):
+            if current_history and history == current_history:
+                return True
+            return False
+        return super( HistoryManager, self ).is_owner( history, user )
+
+    # TODO: possibly to sharable or base
+    def most_recent( self, user, filters=None, current_history=None, **kwargs ):
+        """
+        Return the most recently update history for the user.
+
+        If user is anonymous, return the current history. If the user is anonymous
+        and the current history is deleted, return None.
+        """
+        if self.user_manager.is_anonymous( user ):
+            return None if ( not current_history or current_history.deleted ) else current_history
+        desc_update_time = desc( self.model_class.table.c.update_time )
+        filters = self._munge_filters( filters, self.model_class.user_id == user.id )
+        # TODO: normalize this return value
+        return self.query( filters=filters, order_by=desc_update_time, limit=1, **kwargs ).first()
+
+    # .... purgable
+    def purge( self, history, flush=True, **kwargs ):
+        """
+        Purge this history and all HDAs, Collections, and Datasets inside this history.
+        """
+        self.hda_manager.dataset_manager.error_unless_dataset_purge_allowed()
+        # First purge all the datasets
+        for hda in history.datasets:
+            if not hda.purged:
+                self.hda_manager.purge( hda, flush=True )
+
+        # Now mark the history as purged
+        super( HistoryManager, self ).purge( history, flush=flush, **kwargs )
+
+    # .... current
+    # TODO: make something to bypass the anon user + current history permissions issue
+    # def is_current_users_current_history( self, history, trans ):
+    #     pass
+
+    def get_current( self, trans ):
+        """
+        Return the current history.
+        """
+        # TODO: trans
+        return trans.get_history()
+
+    def set_current( self, trans, history ):
+        """
+        Set the current history.
+        """
+        # TODO: trans
+        trans.set_history( history )
+        return history
+
+    def set_current_by_id( self, trans, history_id ):
+        """
+        Set the current history by an id.
+        """
+        return self.set_current( trans, self.by_id( history_id ) )
+
+    # order_by parsing - similar to FilterParser but not enough yet to warrant a class?
+    def parse_order_by( self, order_by_string, default=None ):
+        """Return an ORM compatible order_by using the given string"""
+        # TODO: generalize into class
+        # TODO: general (enough) columns
+        if order_by_string in ( 'create_time', 'create_time-dsc' ):
+            return desc( self.model_class.create_time )
+        if order_by_string == 'create_time-asc':
+            return asc( self.model_class.create_time )
+        if order_by_string in ( 'update_time', 'update_time-dsc' ):
+            return desc( self.model_class.update_time )
+        if order_by_string == 'update_time-asc':
+            return asc( self.model_class.update_time )
+        if order_by_string in ( 'name', 'name-asc' ):
+            return asc( self.model_class.name )
+        if order_by_string == 'name-dsc':
+            return desc( self.model_class.name )
+        # TODO: history columns
+        if order_by_string in ( 'size', 'size-dsc' ):
+            return desc( self.model_class.disk_size )
+        if order_by_string == 'size-asc':
+            return asc( self.model_class.disk_size )
+        # TODO: add functional/non-orm orders (such as rating)
+        if default:
+            return self.parse_order_by( default )
+        raise glx_exceptions.RequestParameterInvalidException( 'Unkown order_by', order_by=order_by_string,
+            available=[ 'create_time', 'update_time', 'name', 'size' ])
+
+    def non_ready_jobs( self, history ):
+        """Return the currently running job objects associated with this history.
+
+        Where running is defined as new, waiting, queued, running, resubmitted,
+        and upload.
+        """
+        # TODO: defer to jobModelManager (if there was one)
+        # TODO: genericize the params to allow other filters
+        jobs = ( self.session().query( model.Job )
+            .filter( model.Job.history == history )
+            .filter( model.Job.state.in_( model.Job.non_ready_states ) ) )
+        return jobs
+
+
+class HistorySerializer( sharable.SharableModelSerializer, deletable.PurgableSerializerMixin ):
+    """
+    Interface/service object for serializing histories into dictionaries.
+    """
+    model_manager_class = HistoryManager
+    SINGLE_CHAR_ABBR = 'h'
+
+    def __init__( self, app, **kwargs ):
+        super( HistorySerializer, self ).__init__( app, **kwargs )
+
+        self.history_manager = self.manager
+        self.hda_manager = hdas.HDAManager( app )
+        self.hda_serializer = hdas.HDASerializer( app )
+        self.history_contents_serializer = history_contents.HistoryContentsSerializer( app )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id',
+            'model_class',
+            'name',
+            'deleted',
+            'purged',
+            # 'count'
+            'url',
+            # TODO: why these?
+            'published',
+            'annotation',
+            'tags',
+        ])
+        self.add_view( 'detailed', [
+            'contents_url',
+            'empty',
+            'size',
+            'user_id',
+            'create_time',
+            'update_time',
+            'importable',
+            'slug',
+            'username_and_slug',
+            'genome_build',
+            # TODO: remove the next three - instead getting the same info from the 'hdas' list
+            'state',
+            'state_details',
+            'state_ids',
+            # 'community_rating',
+            # 'user_rating',
+        ], include_keys_from='summary' )
+        # in the Historys' case, each of these views includes the keys from the previous
+
+        #: ..note: this is a custom view for newer (2016/3) UI and should be considered volatile
+        self.add_view( 'dev-detailed', [
+            'contents_url',
+            'size',
+            'user_id',
+            'create_time',
+            'update_time',
+            'importable',
+            'slug',
+            'username_and_slug',
+            'genome_build',
+            # 'contents_states',
+            'contents_active',
+            'hid_counter',
+        ], include_keys_from='summary' )
+
+    # assumes: outgoing to json.dumps and sanitized
+    def add_serializers( self ):
+        super( HistorySerializer, self ).add_serializers()
+        deletable.PurgableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'model_class'   : lambda *a, **c: 'History',
+            'size'          : lambda i, k, **c: int( i.disk_size ),
+            'nice_size'     : lambda i, k, **c: i.disk_nice_size,
+            'state'         : self.serialize_history_state,
+
+            'url'           : lambda i, k, **c: self.url_for( 'history', id=self.app.security.encode_id( i.id ) ),
+            'contents_url'  : lambda i, k, **c: self.url_for( 'history_contents',
+                                                              history_id=self.app.security.encode_id( i.id ) ),
+
+            'empty'         : lambda i, k, **c: ( len( i.datasets ) + len( i.dataset_collections ) ) <= 0,
+            'count'         : lambda i, k, **c: len( i.datasets ),
+            'hdas'          : lambda i, k, **c: [ self.app.security.encode_id( hda.id ) for hda in i.datasets ],
+            'state_details' : self.serialize_state_counts,
+            'state_ids'     : self.serialize_state_ids,
+            'contents'      : self.serialize_contents,
+            'non_ready_jobs': lambda i, k, **c: [ self.app.security.encode_id( job.id ) for job
+                                                  in self.manager.non_ready_jobs( i ) ],
+
+            'contents_states': self.serialize_contents_states,
+            'contents_active': self.serialize_contents_active,
+        })
+
+    # remove this
+    def serialize_state_ids( self, history, key, **context ):
+        """
+        Return a dictionary keyed to possible dataset states and valued with lists
+        containing the ids of each HDA in that state.
+        """
+        state_ids = {}
+        for state in model.Dataset.states.values():
+            state_ids[ state ] = []
+
+        # TODO:?? collections and coll. states?
+        for hda in history.datasets:
+            # TODO: do not encode ids at this layer
+            encoded_id = self.app.security.encode_id( hda.id )
+            state_ids[ hda.state ].append( encoded_id )
+        return state_ids
+
+    # remove this
+    def serialize_state_counts( self, history, key, exclude_deleted=True, exclude_hidden=False, **context ):
+        """
+        Return a dictionary keyed to possible dataset states and valued with the number
+        of datasets in this history that have those states.
+        """
+        # TODO: the default flags above may not make a lot of sense (T,T?)
+        state_counts = {}
+        for state in model.Dataset.states.values():
+            state_counts[ state ] = 0
+
+        # TODO:?? collections and coll. states?
+        for hda in history.datasets:
+            if exclude_deleted and hda.deleted:
+                continue
+            if exclude_hidden and not hda.visible:
+                continue
+            state_counts[ hda.state ] = state_counts[ hda.state ] + 1
+        return state_counts
+
+    # TODO: remove this (is state used/useful?)
+    def serialize_history_state( self, history, key, **context ):
+        """
+        Returns the history state based on the states of the HDAs it contains.
+        """
+        states = model.Dataset.states
+        # (default to ERROR)
+        state = states.ERROR
+        # TODO: history_state and state_counts are classically calc'd at the same time
+        #   so this is rel. ineff. - if we keep this...
+        hda_state_counts = self.serialize_state_counts( history, 'counts', exclude_deleted=True, **context )
+        num_hdas = sum( hda_state_counts.values() )
+        if num_hdas == 0:
+            state = states.NEW
+
+        else:
+            if (hda_state_counts[states.RUNNING] > 0 or
+                    hda_state_counts[states.SETTING_METADATA] > 0 or
+                    hda_state_counts[states.UPLOAD] > 0):
+                state = states.RUNNING
+            # TODO: this method may be more useful if we *also* polled the histories jobs here too
+            elif (hda_state_counts[ states.QUEUED ] > 0 or
+                    hda_state_counts[states.NEW] > 0):
+                state = states.QUEUED
+            elif (hda_state_counts[states.ERROR] > 0 or
+                    hda_state_counts[states.FAILED_METADATA] > 0):
+                state = states.ERROR
+            elif hda_state_counts[ states.OK ] == num_hdas:
+                state = states.OK
+
+        return state
+
+    def serialize_contents( self, history, key, trans=None, user=None, **context ):
+        returned = []
+        for content in self.manager.contents_manager._union_of_contents_query( history ).all():
+            serialized = self.history_contents_serializer.serialize_to_view( content,
+                view='summary', trans=trans, user=user )
+            returned.append( serialized )
+        return returned
+
+    def serialize_contents_states( self, history, key, trans=None, **context ):
+        """
+        Return a dictionary containing the counts of all contents in each state
+        keyed by the distinct states.
+
+        Note: does not include deleted/hidden contents.
+        """
+        return self.manager.contents_manager.state_counts( history )
+
+    def serialize_contents_active( self, history, key, **context ):
+        """
+        Return a dictionary keyed with 'deleted', 'hidden', and 'active' with values
+        for each representing the count of contents in each state.
+
+        Note: counts for deleted and hidden overlap; In other words, a dataset that's
+        both deleted and hidden will be added to both totals.
+        """
+        return self.manager.contents_manager.active_counts( history )
+
+
+class HistoryDeserializer( sharable.SharableModelDeserializer, deletable.PurgableDeserializerMixin ):
+    """
+    Interface/service object for validating and deserializing dictionaries into histories.
+    """
+    model_manager_class = HistoryManager
+
+    def __init__( self, app ):
+        super( HistoryDeserializer, self ).__init__( app )
+        self.history_manager = self.manager
+
+    def add_deserializers( self ):
+        super( HistoryDeserializer, self ).add_deserializers()
+        deletable.PurgableDeserializerMixin.add_deserializers( self )
+
+        self.deserializers.update({
+            'name'          : self.deserialize_basestring,
+            'genome_build'  : self.deserialize_genome_build,
+        })
+
+
+class HistoryFilters( sharable.SharableModelFilters, deletable.PurgableFiltersMixin ):
+    model_class = model.History
+    model_manager_class = HistoryManager
+
+    def _add_parsers( self ):
+        super( HistoryFilters, self )._add_parsers()
+        deletable.PurgableFiltersMixin._add_parsers( self )
+        self.orm_filter_parsers.update({
+            # history specific
+            'name'          : { 'op': ( 'eq', 'contains', 'like' ) },
+            'genome_build'  : { 'op': ( 'eq', 'contains', 'like' ) },
+        })
diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py
new file mode 100644
index 0000000..206c9ca
--- /dev/null
+++ b/lib/galaxy/managers/history_contents.py
@@ -0,0 +1,486 @@
+"""
+Heterogenous lists/contents are difficult to query properly since unions are
+not easily made.
+"""
+
+from sqlalchemy import literal
+from sqlalchemy import sql
+from sqlalchemy import asc, desc
+from sqlalchemy import true, false
+from sqlalchemy import func
+from sqlalchemy.orm import eagerload
+from sqlalchemy.orm import undefer
+
+from galaxy import model
+from galaxy import exceptions as glx_exceptions
+
+from galaxy.managers import base
+from galaxy.managers import deletable
+from galaxy.managers import containers
+from galaxy.managers import hdas
+from galaxy.managers import hdcas
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# into its own class to have it's own filters, etc.
+# TODO: but can't inherit from model manager (which assumes only one model)
+class HistoryContentsManager( containers.ContainerManagerMixin ):
+
+    root_container_class = model.History
+
+    contained_class = model.HistoryDatasetAssociation
+    contained_class_manager_class = hdas.HDAManager
+    contained_class_type_name = 'dataset'
+
+    subcontainer_class = model.HistoryDatasetCollectionAssociation
+    subcontainer_class_manager_class = hdcas.HDCAManager
+    subcontainer_class_type_name = 'dataset_collection'
+
+    #: the columns which are common to both subcontainers and non-subcontainers.
+    #  (Also the attributes that may be filtered or orderered_by)
+    common_columns = (
+        "history_id",
+        "history_content_type",
+        "id",
+        "type_id",
+        "hid",
+        # joining columns
+        "dataset_id",
+        "collection_id",
+        "name",
+        "state",
+        "deleted",
+        "purged",
+        "visible",
+        "create_time",
+        "update_time",
+    )
+    default_order_by = 'hid'
+
+    def __init__( self, app ):
+        self.app = app
+        self.contained_manager = self.contained_class_manager_class( app )
+        self.subcontainer_manager = self.subcontainer_class_manager_class( app )
+
+    # ---- interface
+    def contained( self, container, filters=None, limit=None, offset=None, order_by=None, **kwargs ):
+        """
+        Returns non-subcontainer objects within `container`.
+        """
+        filter_to_inside_container = self._get_filter_for_contained( container, self.contained_class )
+        filters = base.munge_lists( filter_to_inside_container, filters )
+        return self.contained_manager.list( filters=filters, limit=limit, offset=offset, order_by=order_by, **kwargs )
+
+    def subcontainers( self, container, filters=None, limit=None, offset=None, order_by=None, **kwargs ):
+        """
+        Returns only the containers within `container`.
+        """
+        filter_to_inside_container = self._get_filter_for_contained( container, self.subcontainer_class )
+        filters = base.munge_lists( filter_to_inside_container, filters )
+        # TODO: collections.DatasetCollectionManager doesn't have the list
+        # return self.subcontainer_manager.list( filters=filters, limit=limit, offset=offset, order_by=order_by, **kwargs )
+        return self._session().query( self.subcontainer_class ).filter( filters ).all()
+
+    def contents( self, container, filters=None, limit=None, offset=None, order_by=None, **kwargs ):
+        """
+        Returns a list of both/all types of contents, filtered and in some order.
+        """
+        # TODO?: we could branch here based on 'if limit is None and offset is None' - to a simpler (non-union) query
+        # for now, I'm just using this (even for non-limited/offset queries) to reduce code paths
+        return self._union_of_contents( container,
+            filters=filters, limit=limit, offset=offset, order_by=order_by, **kwargs )
+
+    def contents_count( self, container, filters=None, limit=None, offset=None, order_by=None, **kwargs ):
+        """
+        Returns a count of both/all types of contents, based on the given filters.
+        """
+        return self.contents_query( container,
+            filters=filters, limit=limit, offset=offset, order_by=order_by, **kwargs ).count()
+
+    def contents_query( self, container, filters=None, limit=None, offset=None, order_by=None, **kwargs ):
+        """
+        Returns the contents union query for subqueries, etc.
+        """
+        return self._union_of_contents_query( container,
+            filters=filters, limit=limit, offset=offset, order_by=order_by, **kwargs )
+
+    # order_by parsing - similar to FilterParser but not enough yet to warrant a class?
+    def parse_order_by( self, order_by_string, default=None ):
+        """Return an ORM compatible order_by using the given string"""
+        if order_by_string in ( 'hid', 'hid-dsc' ):
+            return desc( 'hid' )
+        if order_by_string == 'hid-asc':
+            return asc( 'hid' )
+        if order_by_string in ( 'create_time', 'create_time-dsc' ):
+            return desc( 'create_time' )
+        if order_by_string == 'create_time-asc':
+            return asc( 'create_time' )
+        if order_by_string in ( 'update_time', 'update_time-dsc' ):
+            return desc( 'update_time' )
+        if order_by_string == 'update_time-asc':
+            return asc( 'update_time' )
+        if order_by_string in ( 'name', 'name-asc' ):
+            return asc( 'name' )
+        if order_by_string == 'name-dsc':
+            return desc( 'name' )
+        if default:
+            return self.parse_order_by( default )
+        # TODO: allow order_by None
+        raise glx_exceptions.RequestParameterInvalidException( 'Unknown order_by', order_by=order_by_string,
+            available=[ 'create_time', 'update_time', 'name', 'hid' ])
+
+    # history specific methods
+    def state_counts( self, history ):
+        """
+        Return a dictionary containing the counts of all contents in each state
+        keyed by the distinct states.
+
+        Note: does not include deleted/hidden contents.
+        """
+        filters = [
+            sql.column( 'deleted' ) == false(),
+            sql.column( 'visible' ) == true()
+        ]
+        contents_subquery = self._union_of_contents_query( history, filters=filters ).subquery()
+        statement = ( sql.select([ sql.column( 'state' ), func.count('*') ])
+            .select_from( contents_subquery )
+            .group_by( sql.column( 'state' ) ) )
+        counts = self.app.model.context.execute( statement ).fetchall()
+        return dict( counts )
+
+    def active_counts( self, history ):
+        """
+        Return a dictionary keyed with 'deleted', 'hidden', and 'active' with values
+        for each representing the count of contents in each state.
+
+        Note: counts for deleted and hidden overlap; In other words, a dataset that's
+        both deleted and hidden will be added to both totals.
+        """
+        returned = dict( deleted=0, hidden=0, active=0 )
+        contents_subquery = self._union_of_contents_query( history ).subquery()
+        columns = [
+            sql.column( 'deleted' ),
+            sql.column( 'visible' ),
+            func.count( '*' )
+        ]
+        statement = ( sql.select( columns )
+            .select_from( contents_subquery )
+            .group_by( sql.column( 'deleted' ), sql.column( 'visible' ) ) )
+        groups = self.app.model.context.execute( statement ).fetchall()
+        for deleted, visible, count in groups:
+            if deleted:
+                returned[ 'deleted' ] += count
+            if not visible:
+                returned[ 'hidden' ] += count
+            if not deleted and visible:
+                returned[ 'active' ] += count
+        return returned
+
+    def map_datasets( self, history, fn, **kwargs ):
+        """
+        Iterate over the datasets of a given history, recursing into collections, and
+        calling fn on each dataset.
+
+        Uses the same kwargs as `contents` above.
+        """
+        returned = []
+        contents = self.contents( history, **kwargs )
+        for content in contents:
+            if isinstance( content, self.subcontainer_class ):
+                processed_list = self.subcontainer_manager.map_datasets( content, fn )
+                returned.extend( processed_list )
+            else:
+                processed = fn( content )
+                returned.append( processed )
+        return returned
+
+    # ---- private
+    def _session( self ):
+        return self.app.model.context
+
+    def _filter_to_contents_query( self, container, content_class, **kwargs ):
+        # TODO: use list (or by_history etc.)
+        container_filter = self._get_filter_for_contained( container, content_class )
+        query = self._session().query( content_class ).filter( container_filter )
+        return query
+
+    def _get_filter_for_contained( self, container, content_class ):
+        return content_class.history == container
+
+    def _union_of_contents( self, container, expand_models=True, **kwargs ):
+        """
+        Returns a limited and offset list of both types of contents, filtered
+        and in some order.
+        """
+        contents_results = self._union_of_contents_query( container, **kwargs ).all()
+        if not expand_models:
+            return contents_results
+
+        # partition ids into a map of { component_class names -> list of ids } from the above union query
+        id_map = dict( (( self.contained_class_type_name, [] ), ( self.subcontainer_class_type_name, [] )) )
+        for result in contents_results:
+            result_type = self._get_union_type( result )
+            contents_id = self._get_union_id( result )
+            if result_type in id_map:
+                id_map[ result_type ].append( contents_id )
+            else:
+                raise TypeError( 'Unknown contents type:', result_type )
+
+        # query 2 & 3: use the ids to query each component_class, returning an id->full component model map
+        contained_ids = id_map[ self.contained_class_type_name ]
+        id_map[ self.contained_class_type_name ] = self._contained_id_map( contained_ids )
+        subcontainer_ids = id_map[ self.subcontainer_class_type_name ]
+        id_map[ self.subcontainer_class_type_name ] = self._subcontainer_id_map( subcontainer_ids )
+
+        # cycle back over the union query to create an ordered list of the objects returned in queries 2 & 3 above
+        contents = []
+        # TODO: or as generator?
+        for result in contents_results:
+            result_type = self._get_union_type( result )
+            contents_id = self._get_union_id( result )
+            content = id_map[ result_type ][ contents_id ]
+            contents.append( content )
+        return contents
+
+    def _union_of_contents_query( self, container, filters=None, limit=None, offset=None, order_by=None, **kwargs ):
+        """
+        Returns a query for a limited and offset list of both types of contents,
+        filtered and in some order.
+        """
+        order_by = order_by if order_by is not None else self.default_order_by
+        order_by = order_by if isinstance( order_by, ( tuple, list ) ) else ( order_by, )
+
+        # TODO: 3 queries and 3 iterations over results - this is undoubtedly better solved in the actual SQL layer
+        # via one common table for contents, Some Yonder Resplendent and Fanciful Join, or ORM functionality
+        # Here's the (bizarre) strategy:
+        #   1. create a union of common columns between contents classes - filter, order, and limit/offset this
+        #   2. extract the ids returned from 1 for each class, query each content class by that id list
+        #   3. use the results/order from 1 to recombine/merge the 2+ query result lists from 2, return that
+
+        # note: I'm trying to keep these private functions as generic as possible in order to move them toward base later
+
+        # query 1: create a union of common columns for which the component_classes can be filtered/limited
+        contained_query = self._contents_common_query_for_contained( container.id )
+        subcontainer_query = self._contents_common_query_for_subcontainer( container.id )
+        contents_query = contained_query.union( subcontainer_query )
+
+        # TODO: this needs the same fn/orm split that happens in the main query
+        for orm_filter in ( filters or [] ):
+            contents_query = contents_query.filter( orm_filter )
+        contents_query = contents_query.order_by( *order_by )
+
+        if limit is not None:
+            contents_query = contents_query.limit( limit )
+        if offset is not None:
+            contents_query = contents_query.offset( offset )
+        return contents_query
+
+    def _contents_common_columns( self, component_class, **kwargs ):
+        columns = []
+        # pull column from class by name or override with kwargs if listed there, then label
+        for column_name in self.common_columns:
+            if column_name in kwargs:
+                column = kwargs.get( column_name, None )
+            elif column_name == "model_class":
+                column = literal( component_class.__name__ )
+            else:
+                column = getattr( component_class, column_name )
+            column = column.label( column_name )
+            columns.append( column )
+        return columns
+
+    def _contents_common_query_for_contained( self, history_id ):
+        component_class = self.contained_class
+        # TODO: and now a join with Dataset - this is getting sad
+        columns = self._contents_common_columns( component_class,
+            history_content_type=literal( 'dataset' ),
+            state=model.Dataset.state,
+            # do not have inner collections
+            collection_id=literal( None )
+        )
+        subquery = self._session().query( *columns )
+        # for the HDA's we need to join the Dataset since it has an actual state column
+        subquery = subquery.join( model.Dataset, model.Dataset.id == component_class.dataset_id )
+        subquery = subquery.filter( component_class.history_id == history_id )
+        return subquery
+
+    def _contents_common_query_for_subcontainer( self, history_id ):
+        component_class = self.subcontainer_class
+        columns = self._contents_common_columns( component_class,
+            history_content_type=literal( 'dataset_collection' ),
+            # do not have datasets
+            dataset_id=literal( None ),
+            state=model.DatasetCollection.populated_state,
+            # TODO: should be purgable? fix
+            purged=literal( False ),
+            # these are attached instead to the inner collection joined below
+            create_time=model.DatasetCollection.create_time,
+            update_time=model.DatasetCollection.update_time
+        )
+        subquery = self._session().query( *columns )
+        # for the HDCA's we need to join the DatasetCollection since it has update/create times
+        subquery = subquery.join( model.DatasetCollection,
+            model.DatasetCollection.id == component_class.collection_id )
+        subquery = subquery.filter( component_class.history_id == history_id )
+        return subquery
+
+    def _get_union_type( self, union ):
+        """Return the string name of the class for this row in the union results"""
+        return str( union[ 1 ] )
+
+    def _get_union_id( self, union ):
+        """Return the id for this row in the union results"""
+        return union[ 2 ]
+
+    def _contained_id_map( self, id_list ):
+        """Return an id to model map of all contained-type models in the id_list."""
+        if not id_list:
+            return []
+        component_class = self.contained_class
+        query = ( self._session().query( component_class )
+            .filter( component_class.id.in_( id_list ) )
+            .options( undefer( '_metadata' ) )
+            .options( eagerload( 'dataset.actions' ) )
+            .options( eagerload( 'tags' ) )
+            .options( eagerload( 'annotations' ) ) )
+        return dict( ( row.id, row ) for row in query.all() )
+
+    def _subcontainer_id_map( self, id_list ):
+        """Return an id to model map of all subcontainer-type models in the id_list."""
+        if not id_list:
+            return []
+        component_class = self.subcontainer_class
+        query = ( self._session().query( component_class )
+            .filter( component_class.id.in_( id_list ) )
+            .options( eagerload( 'collection' ) )
+            .options( eagerload( 'tags' ) )
+            .options( eagerload( 'annotations' ) ) )
+        return dict( ( row.id, row ) for row in query.all() )
+
+
+class HistoryContentsSerializer( base.ModelSerializer, deletable.PurgableSerializerMixin ):
+    """
+    Interface/service object for serializing histories into dictionaries.
+    """
+    model_manager_class = HistoryContentsManager
+
+    def __init__( self, app, **kwargs ):
+        super( HistoryContentsSerializer, self ).__init__( app, **kwargs )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            "id",
+            "type_id",
+            "history_id",
+            "hid",
+            "history_content_type",
+            "visible",
+            "dataset_id",
+            "collection_id",
+            "name",
+            "state",
+            "deleted",
+            "purged",
+            "create_time",
+            "update_time",
+        ])
+
+    # assumes: outgoing to json.dumps and sanitized
+    def add_serializers( self ):
+        super( HistoryContentsSerializer, self ).add_serializers()
+        deletable.PurgableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'type_id'       : self.serialize_type_id,
+            'history_id'    : self.serialize_id,
+            'dataset_id'    : self.serialize_id_or_skip,
+            'collection_id' : self.serialize_id_or_skip,
+        })
+
+    def serialize_id_or_skip( self, content, key, **context ):
+        """Serialize id or skip if attribute with `key` is not present."""
+        if not hasattr( content, key ):
+            raise base.SkipAttribute( 'no such attribute' )
+        return self.serialize_id( content, key, **context )
+
+
+class HistoryContentsFilters( base.ModelFilterParser, deletable.PurgableFiltersMixin ):
+    # surprisingly (but ominously), this works for both content classes in the union that's filtered
+    model_class = model.HistoryDatasetAssociation
+
+    # TODO: history_content_type filter doesn't work with psycopg2: column does not exist (even with hybrid props)
+    def _parse_orm_filter( self, attr, op, val ):
+
+        def raise_filter_err( attr, op, val, msg ):
+            raise glx_exceptions.RequestParameterInvalidException( msg, column=attr, operation=op, val=val )
+
+        # we need to use some manual/text/column fu here since some where clauses on the union don't work
+        # using the model_class defined above - they need to be wrapped in their own .column()
+        # (and some of these are *not* a normal columns (especially 'state') anyway)
+        # TODO: genericize these - can probably extract a _get_column( attr, ... ) or something
+        # special cases...special cases everywhere
+        if attr == 'history_content_type' and op == 'eq':
+            if val == 'dataset':
+                return sql.column( 'history_content_type' ) == 'dataset'
+            if val == 'dataset_collection':
+                return sql.column( 'history_content_type' ) == 'dataset_collection'
+            raise_filter_err( attr, op, val, 'bad op in filter' )
+
+        if attr == 'type_id':
+            if op == 'eq':
+                return sql.column( 'type_id' ) == val
+            if op == 'in':
+                return sql.column( 'type_id' ).in_( self.parse_type_id_list( val ) )
+            raise_filter_err( attr, op, val, 'bad op in filter' )
+
+        if attr in ( 'update_time', 'create_time' ):
+            if op == 'ge':
+                return sql.column( attr ) >= self.parse_date( val )
+            if op == 'le':
+                return sql.column( attr ) <= self.parse_date( val )
+            raise_filter_err( attr, op, val, 'bad op in filter' )
+
+        if attr == 'state':
+            valid_states = model.Dataset.states.values()
+            if op == 'eq':
+                if val not in valid_states:
+                    raise_filter_err( attr, op, val, 'invalid state in filter' )
+                return sql.column( 'state' ) == val
+            if op == 'in':
+                states = [ s for s in val.split( ',' ) if s ]
+                for state in states:
+                    if state not in valid_states:
+                        raise_filter_err( attr, op, state, 'invalid state in filter' )
+                return sql.column( 'state' ).in_( states )
+            raise_filter_err( attr, op, val, 'bad op in filter' )
+
+        return super( HistoryContentsFilters, self )._parse_orm_filter( attr, op, val )
+
+    def decode_type_id( self, type_id ):
+        TYPE_ID_SEP = '-'
+        split = type_id.split( TYPE_ID_SEP, 1 )
+        return TYPE_ID_SEP.join([ split[0], str( self.app.security.decode_id( split[1] ) ) ])
+
+    def parse_type_id_list( self, type_id_list_string, sep=',' ):
+        """
+        Split `type_id_list_string` at `sep`.
+        """
+        return [ self.decode_type_id( type_id ) for type_id in type_id_list_string.split( sep ) ]
+
+    def _add_parsers( self ):
+        super( HistoryContentsFilters, self )._add_parsers()
+        deletable.PurgableFiltersMixin._add_parsers( self )
+        self.orm_filter_parsers.update({
+            'history_content_type' : { 'op': ( 'eq' ) },
+            'type_id'       : { 'op': ( 'eq', 'in' ), 'val': self.parse_type_id_list },
+            'hid'           : { 'op': ( 'eq', 'ge', 'le' ), 'val': int },
+            # TODO: needs a different val parser - but no way to add to the above
+            # 'hid-in'        : { 'op': ( 'in' ), 'val': self.parse_int_list },
+            'name'          : { 'op': ( 'eq', 'contains', 'like' ) },
+            'state'         : { 'op': ( 'eq', 'in' ) },
+            'visible'       : { 'op': ( 'eq' ), 'val': self.parse_bool },
+            'create_time'   : { 'op': ( 'le', 'ge' ), 'val': self.parse_date },
+            'update_time'   : { 'op': ( 'le', 'ge' ), 'val': self.parse_date },
+        })
diff --git a/lib/galaxy/managers/lddas.py b/lib/galaxy/managers/lddas.py
new file mode 100644
index 0000000..9d9b779
--- /dev/null
+++ b/lib/galaxy/managers/lddas.py
@@ -0,0 +1,19 @@
+from galaxy.managers import base as manager_base
+
+
+class LDDAManager( object ):
+    """
+    A fairly sparse manager for LDDAs.
+    """
+
+    def __init__( self, app ):
+        """
+        Set up and initialize other managers needed by lddas.
+        """
+        pass
+
+    def get( self, trans, id, check_accessible=True ):
+        return manager_base.get_object( trans, id,
+                                        'LibraryDatasetDatasetAssociation',
+                                        check_ownership=False,
+                                        check_accessible=check_accessible )
diff --git a/lib/galaxy/managers/libraries.py b/lib/galaxy/managers/libraries.py
new file mode 100644
index 0000000..9bcb4d8
--- /dev/null
+++ b/lib/galaxy/managers/libraries.py
@@ -0,0 +1,258 @@
+"""
+Manager and Serializer for libraries.
+"""
+import logging
+
+from sqlalchemy import and_, false, not_, or_, true
+from sqlalchemy.orm.exc import MultipleResultsFound
+from sqlalchemy.orm.exc import NoResultFound
+
+from galaxy import exceptions
+from galaxy.managers import folders
+from galaxy.util import pretty_print_time_interval
+
+log = logging.getLogger( __name__ )
+
+
+# =============================================================================
+class LibraryManager( object ):
+    """
+    Interface/service object for interacting with libraries.
+    """
+
+    def __init__( self, *args, **kwargs ):
+        super( LibraryManager, self ).__init__( *args, **kwargs )
+
+    def get( self, trans, decoded_library_id, check_accessible=True ):
+        """
+        Get the library from the DB.
+
+        :param  decoded_library_id:       decoded library id
+        :type   decoded_library_id:       int
+        :param  check_accessible:         flag whether to check that user can access item
+        :type   check_accessible:         bool
+
+        :returns:   the requested library
+        :rtype:     galaxy.model.Library
+        """
+        try:
+            library = trans.sa_session.query( trans.app.model.Library ).filter( trans.app.model.Library.table.c.id == decoded_library_id ).one()
+        except MultipleResultsFound:
+            raise exceptions.InconsistentDatabase( 'Multiple libraries found with the same id.' )
+        except NoResultFound:
+            raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.' )
+        except Exception as e:
+            raise exceptions.InternalServerError( 'Error loading from the database.' + str( e ) )
+        library = self.secure( trans, library, check_accessible)
+        return library
+
+    def create( self, trans, name, description='', synopsis=''):
+        """
+        Create a new library.
+        """
+        if not trans.user_is_admin:
+            raise exceptions.ItemAccessibilityException( 'Only administrators can create libraries.' )
+        else:
+            library = trans.app.model.Library( name=name, description=description, synopsis=synopsis )
+            root_folder = trans.app.model.LibraryFolder( name=name, description='' )
+            library.root_folder = root_folder
+            trans.sa_session.add_all( ( library, root_folder ) )
+            trans.sa_session.flush()
+            return library
+
+    def update( self, trans, library, name=None, description=None, synopsis=None ):
+        """
+        Update the given library
+        """
+        changed = False
+        if not trans.user_is_admin():
+            raise exceptions.ItemAccessibilityException( 'Only administrators can update libraries.' )
+        if library.deleted:
+            raise exceptions.RequestParameterInvalidException( 'You cannot modify a deleted library. Undelete it first.' )
+        if name is not None:
+            library.name = name
+            changed = True
+            #  When library is renamed the root folder has to be renamed too.
+            folder_manager = folders.FolderManager()
+            folder_manager.update( trans, library.root_folder, name=name )
+        if description is not None:
+            library.description = description
+            changed = True
+        if synopsis is not None:
+            library.synopsis = synopsis
+            changed = True
+        if changed:
+            trans.sa_session.add( library )
+            trans.sa_session.flush()
+        return library
+
+    def delete( self, trans, library, undelete=False ):
+        """
+        Mark given library deleted/undeleted based on the flag.
+        """
+        if not trans.user_is_admin():
+            raise exceptions.ItemAccessibilityException( 'Only administrators can delete and undelete libraries.' )
+        if undelete:
+            library.deleted = False
+        else:
+            library.deleted = True
+        trans.sa_session.add( library )
+        trans.sa_session.flush()
+        return library
+
+    def list( self, trans, deleted=False ):
+        """
+        Return a list of libraries from the DB.
+
+        :param  deleted: if True, show only ``deleted`` libraries, if False show only ``non-deleted``
+        :type   deleted: boolean (optional)
+
+        :returns: query that will emit all accessible libraries
+        :rtype: sqlalchemy query
+        """
+        is_admin = trans.user_is_admin()
+        query = trans.sa_session.query( trans.app.model.Library )
+
+        if is_admin:
+            if deleted is None:
+                #  Flag is not specified, do not filter on it.
+                pass
+            elif deleted:
+                query = query.filter( trans.app.model.Library.table.c.deleted == true() )
+            else:
+                query = query.filter( trans.app.model.Library.table.c.deleted == false() )
+        else:
+            #  Nonadmins can't see deleted libraries
+            current_user_role_ids = [ role.id for role in trans.get_current_user_roles() ]
+            library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
+            restricted_library_ids = [ lp.library_id for lp in (
+                trans.sa_session.query( trans.model.LibraryPermissions ).filter(
+                    trans.model.LibraryPermissions.table.c.action == library_access_action
+                ).distinct() ) ]
+            accessible_restricted_library_ids = [ lp.library_id for lp in (
+                trans.sa_session.query( trans.model.LibraryPermissions ).filter(
+                    and_(
+                        trans.model.LibraryPermissions.table.c.action == library_access_action,
+                        trans.model.LibraryPermissions.table.c.role_id.in_( current_user_role_ids )
+                    ) ) ) ]
+            query = query.filter( or_(
+                not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ),
+                trans.model.Library.table.c.id.in_( accessible_restricted_library_ids )
+            ) )
+        return query
+
+    def secure( self, trans, library, check_accessible=True ):
+        """
+        Check if library is accessible to user.
+
+        :param  library:                 library
+        :type   library:                 galaxy.model.Library
+        :param  check_accessible:        flag whether to check that user can access library
+        :type   check_accessible:        bool
+
+        :returns:   the original folder
+        :rtype:     LibraryFolder
+        """
+        # all libraries are accessible to an admin
+        if trans.user_is_admin():
+            return library
+        if check_accessible:
+            library = self.check_accessible( trans, library )
+        return library
+
+    def check_accessible( self, trans, library ):
+        """
+        Check whether the library is accessible to current user.
+        """
+        if not trans.app.security_agent.can_access_library( trans.get_current_user_roles(), library ):
+            raise exceptions.ObjectNotFound( 'Library with the id provided was not found.' )
+        elif library.deleted:
+            raise exceptions.ObjectNotFound( 'Library with the id provided is deleted.' )
+        else:
+            return library
+
+    def get_library_dict( self, trans, library ):
+        """
+        Return library data in the form of a dictionary.
+
+        :param  library:       library
+        :type   library:       galaxy.model.Library
+
+        :returns:   dict with data about the library
+        :rtype:     dictionary
+        """
+        library_dict = library.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'root_folder_id': trans.security.encode_id } )
+        if trans.app.security_agent.library_is_public( library, contents=False ):
+            library_dict[ 'public' ] = True
+        library_dict[ 'create_time_pretty'] = pretty_print_time_interval( library.create_time, precise=True )
+        current_user_roles = trans.get_current_user_roles()
+        if not trans.user_is_admin():
+            library_dict[ 'can_user_add' ] = trans.app.security_agent.can_add_library_item( current_user_roles, library )
+            library_dict[ 'can_user_modify' ] = trans.app.security_agent.can_modify_library_item( current_user_roles, library )
+            library_dict[ 'can_user_manage' ] = trans.app.security_agent.can_manage_library_item( current_user_roles, library )
+        else:
+            library_dict[ 'can_user_add' ] = True
+            library_dict[ 'can_user_modify' ] = True
+            library_dict[ 'can_user_manage' ] = True
+        return library_dict
+
+    def get_current_roles( self, trans, library ):
+        """
+        Load all permissions currently related to the given library.
+
+        :param  library:      the model object
+        :type   library:      galaxy.model.Library
+
+        :rtype:     dictionary
+        :returns:   dict of current roles for all available permission types
+        """
+        access_library_role_list = [ ( access_role.name, trans.security.encode_id( access_role.id ) ) for access_role in self.get_access_roles( trans, library ) ]
+        modify_library_role_list = [ ( modify_role.name, trans.security.encode_id( modify_role.id ) ) for modify_role in self.get_modify_roles( trans, library ) ]
+        manage_library_role_list = [ ( manage_role.name, trans.security.encode_id( manage_role.id ) ) for manage_role in self.get_manage_roles( trans, library ) ]
+        add_library_item_role_list = [ ( add_role.name, trans.security.encode_id( add_role.id ) ) for add_role in self.get_add_roles( trans, library ) ]
+        return dict( access_library_role_list=access_library_role_list,
+                     modify_library_role_list=modify_library_role_list,
+                     manage_library_role_list=manage_library_role_list,
+                     add_library_item_role_list=add_library_item_role_list )
+
+    def get_access_roles( self, trans, library ):
+        """
+        Load access roles for all library permissions
+        """
+        return set( library.get_access_roles( trans ) )
+
+    def get_modify_roles( self, trans, library ):
+        """
+        Load modify roles for all library permissions
+        """
+        return set( trans.app.security_agent.get_roles_for_action( library, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY ) )
+
+    def get_manage_roles( self, trans, library ):
+        """
+        Load manage roles for all library permissions
+        """
+        return set( trans.app.security_agent.get_roles_for_action( library, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ) )
+
+    def get_add_roles( self, trans, library ):
+        """
+        Load add roles for all library permissions
+        """
+        return set( trans.app.security_agent.get_roles_for_action( library, trans.app.security_agent.permitted_actions.LIBRARY_ADD ) )
+
+    def set_permission_roles( self, trans, library, access_roles, modify_roles, manage_roles, add_roles ):
+        """
+        Set permissions on the given library.
+        """
+
+    def make_public( self, trans, library ):
+        """
+        Makes the given library public (removes all access roles)
+        """
+        trans.app.security_agent.make_library_public( library )
+        return self.is_public( trans, library )
+
+    def is_public( self, trans, library ):
+        """
+        Return true if lib is public.
+        """
+        return trans.app.security_agent.library_is_public( library )
diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py
new file mode 100644
index 0000000..373007f
--- /dev/null
+++ b/lib/galaxy/managers/pages.py
@@ -0,0 +1,74 @@
+"""
+Manager and Serializers for Pages.
+
+Pages are markup created and saved by users that can contain Galaxy objects
+(such as datasets) and are often used to describe or present an analysis
+from within Galaxy.
+"""
+
+from galaxy import model
+from galaxy.managers import sharable
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class PageManager( sharable.SharableModelManager ):
+    """
+    """
+
+    model_class = model.Page
+    foreign_key_name = 'page'
+    user_share_model = model.PageUserShareAssociation
+
+    tag_assoc = model.PageTagAssociation
+    annotation_assoc = model.PageAnnotationAssociation
+    rating_assoc = model.PageRatingAssociation
+
+    def __init__( self, app, *args, **kwargs ):
+        """
+        """
+        super( PageManager, self ).__init__( app, *args, **kwargs )
+
+    def copy( self, trans, page, user, **kwargs ):
+        """
+        """
+        pass
+
+
+class PageSerializer( sharable.SharableModelSerializer ):
+    """
+    Interface/service object for serializing pages into dictionaries.
+    """
+    SINGLE_CHAR_ABBR = 'p'
+
+    def __init__( self, app ):
+        super( PageSerializer, self ).__init__( app )
+        self.page_manager = PageManager( app )
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [] )
+        self.add_view( 'detailed', [] )
+
+    def add_serializers( self ):
+        super( PageSerializer, self ).add_serializers()
+        self.serializers.update({
+        })
+
+
+class PageDeserializer( sharable.SharableModelDeserializer ):
+    """
+    Interface/service object for validating and deserializing dictionaries
+    into pages.
+    """
+    model_manager_class = PageManager
+
+    def __init__( self, app ):
+        super( PageDeserializer, self ).__init__( app )
+        self.page_manager = self.manager
+
+    def add_deserializers( self ):
+        super( PageDeserializer, self ).add_deserializers()
+        self.deserializers.update({
+        })
+        self.deserializable_keyset.update( self.deserializers.keys() )
diff --git a/lib/galaxy/managers/ratable.py b/lib/galaxy/managers/ratable.py
new file mode 100644
index 0000000..217029e
--- /dev/null
+++ b/lib/galaxy/managers/ratable.py
@@ -0,0 +1,123 @@
+"""
+Mixins for Ratable model managers and serializers.
+"""
+
+from sqlalchemy.sql.expression import func
+from . import base
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class RatableManagerMixin( object ):
+
+    #: class of RatingAssociation (e.g. HistoryRatingAssociation)
+    rating_assoc = None
+
+    def rating( self, item, user, as_int=True ):
+        """Returns the integer rating given to this item by the user.
+
+        Returns the full rating model if `as_int` is False.
+        """
+        rating = self.query_associated( self.rating_assoc, item ).filter_by( user=user ).first()
+        # most common case is assumed to be 'get the number'
+        if not as_int:
+            return rating
+        # get the int value if there's a rating
+        return rating.rating if rating is not None else None
+
+    def ratings( self, item ):
+        """Returns a list of all rating values given to this item."""
+        return [ r.rating for r in item.ratings ]
+
+    def ratings_avg( self, item ):
+        """Returns the average of all ratings given to this item."""
+        foreign_key = self._foreign_key( self.rating_assoc )
+        avg = self.session().query( func.avg( self.rating_assoc.rating ) ).filter( foreign_key == item ).scalar()
+        return avg or 0.0
+
+    def ratings_count( self, item ):
+        """Returns the number of ratings given to this item."""
+        foreign_key = self._foreign_key( self.rating_assoc )
+        return self.session().query( func.count( self.rating_assoc.rating ) ).filter( foreign_key == item ).scalar()
+
+    def rate( self, item, user, value, flush=True ):
+        """Updates or creates a rating for this item and user. Returns the rating"""
+        # TODO?: possible generic update_or_create
+        # TODO?: update and create to RatingsManager (if not overkill)
+        rating = self.rating( item, user, as_int=False )
+        if not rating:
+            rating = self.rating_assoc( user=user )
+            self.associate( rating, item )
+        rating.rating = value
+
+        self.session().add( rating )
+        if flush:
+            self.session().flush()
+        return rating
+
+    # TODO?: all ratings for a user
+
+
+class RatableSerializerMixin( object ):
+
+    def add_serializers( self ):
+        self.serializers[ 'user_rating' ] = self.serialize_user_rating
+        self.serializers[ 'community_rating' ] = self.serialize_community_rating
+
+    def serialize_user_rating( self, item, key, user=None, **context ):
+        """Returns the integer rating given to this item by the user."""
+        if not user:
+            raise base.ModelSerializingError( 'user_rating requires a user',
+                model_class=self.manager.model_class, id=self.serialize_id( item, 'id' ) )
+        return self.manager.rating( item, user )
+
+    def serialize_community_rating( self, item, key, **context ):
+        """
+        Returns a dictionary containing:
+            `average` the (float) average of all ratings of this object
+            `count` the number of ratings
+        """
+        # ??: seems like two queries (albeit in-sql functions) would slower
+        # than getting the rows and calc'ing both here with one query
+        manager = self.manager
+        return {
+            'average' : manager.ratings_avg( item ),
+            'count'   : manager.ratings_count( item ),
+        }
+
+
+class RatableDeserializerMixin( object ):
+
+    def add_deserializers( self ):
+        self.deserializers[ 'user_rating' ] = self.deserialize_rating
+
+    def deserialize_rating( self, item, key, val, user=None, **context ):
+        if not user:
+            raise base.ModelDeserializingError( 'user_rating requires a user',
+                model_class=self.manager.model_class, id=self.serialize_id( item, 'id' ) )
+        val = self.validate.int_range( key, val, 0, 5 )
+        return self.manager.rate( item, user, val, flush=False )
+
+
+class RatableFilterMixin( object ):
+
+    def _ratings_avg_accessor( self, item ):
+        return self.manager.ratings_avg( item )
+
+    def _add_parsers( self ):
+        """
+        Adds the following filters:
+            `community_rating`: filter
+        """
+        self.fn_filter_parsers.update({
+            'community_rating': {
+                'op': {
+                    'eq' : lambda i, v: self._ratings_avg_accessor( i ) == v,
+                    # TODO: default to greater than (currently 'eq' due to base/controller.py)
+                    'ge' : lambda i, v: self._ratings_avg_accessor( i ) >= v,
+                    'le' : lambda i, v: self._ratings_avg_accessor( i ) <= v,
+                },
+                'val' : float
+            }
+        })
diff --git a/lib/galaxy/managers/rbac_secured.py b/lib/galaxy/managers/rbac_secured.py
new file mode 100644
index 0000000..0b2883b
--- /dev/null
+++ b/lib/galaxy/managers/rbac_secured.py
@@ -0,0 +1,271 @@
+from galaxy import security
+import galaxy.exceptions
+from galaxy import model
+from galaxy.managers import users
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class RBACPermissionFailedException( galaxy.exceptions.InsufficientPermissionsException ):
+    pass
+
+
+class RBACPermission( object ):
+    """
+    Base class for wrangling/controlling the permissions ORM models (\*Permissions, Roles)
+    that control which users can perform certain actions on their associated models
+    (Libraries, Datasets).
+    """
+
+    permissions_class = None
+    permission_failed_error_class = RBACPermissionFailedException
+
+    def __init__( self, app ):
+        self.app = app
+        self.user_manager = users.UserManager( app )
+
+    def session( self ):
+        return self.app.model.context
+
+    # TODO: implement group
+    # TODO: how does admin play into this?
+    def is_permitted( self, item, user ):
+        raise NotImplementedError( "abstract parent class" )
+
+    def error_unless_permitted( self, item, user ):
+        if not self.is_permitted( item, user ):
+            error_info = dict( model_class=item.__class__, id=getattr( item, 'id', None ) )
+            raise self.permission_failed_error_class( **error_info )
+
+    def grant( self, item, user, flush=True ):
+        raise NotImplementedError( "abstract parent class" )
+
+    def revoke( self, item, user, flush=True ):
+        raise NotImplementedError( "abstract parent class" )
+
+    def _role_is_permitted( self, item, role ):
+        raise NotImplementedError( "abstract parent class" )
+
+    def _error_unless_role_permitted( self, item, role ):
+        if not self._role_is_permitted( item, role ):
+            error_info = dict( model_class=item.__class__, id=getattr( item, 'id', None ) )
+            raise self.permission_failed_error_class( **error_info )
+
+    def _grant_role( self, item, role, flush=True ):
+        raise NotImplementedError( "abstract parent class" )
+
+    def _revoke_role( self, item, role, flush=True ):
+        raise NotImplementedError( "abstract parent class" )
+
+
+class DatasetRBACPermission( RBACPermission ):
+    """
+    Base class for the manage and access RBAC permissions used by dataset security.
+
+    The DatasetPermissions used by the RBAC agent are associations between a Dataset
+    and a single Role.
+
+    DatasetPermissions are typed (but not polymorphic themselves) by a string 'action'.
+    There are two types:
+
+    - manage permissions : can a role manage the permissions on a dataset
+    - access : can a role read/look at/copy a dataset
+    """
+    permissions_class = model.DatasetPermissions
+    action_name = None
+
+    # ---- double secrect probation
+    def __assert_action( self ):
+        if not self.action_name:
+            raise NotImplementedError( "abstract parent class" + " needs action_name" )
+
+    # ---- interface
+    def by_dataset( self, dataset ):
+        self.__assert_action()
+        all_permissions = self._all_types_by_dataset( dataset )
+        return filter( lambda p: p.action == self.action_name, all_permissions )
+
+    # TODO: list?
+    def by_roles( self, dataset, roles ):
+        permissions = self.by_dataset( dataset )
+        return filter( lambda p: p.role in roles, permissions )
+
+    def by_role( self, dataset, role ):
+        permissions = self.by_dataset( dataset )
+        found = filter( lambda p: p.role == role, permissions )
+        if not found:
+            return None
+        if len( found ) > 1:
+            raise galaxy.exceptions.InconsistentDatabase( dataset=dataset.id, role=role.id )
+        return found[0]
+
+    def set( self, dataset, roles, flush=True ):
+        # NOTE: this removes all previous permissions of this type
+        self.clear( dataset, flush=False )
+        permissions = []
+        for role in roles:
+            permissions.append( self._create( dataset, role, flush=False ) )
+        if flush:
+            self.session().flush()
+        return permissions
+
+    def clear( self, dataset, flush=True ):
+        permissions = self.by_dataset( dataset )
+        return self._delete( permissions, flush=flush )
+
+    # ---- private
+    def _create( self, dataset, role, flush=True ):
+        permission = self.permissions_class( self.action_name, dataset, role )
+        self.session().add( permission )
+        if flush:
+            self.session().flush()
+        return permission
+
+    def _roles( self, dataset ):
+        return [ permission.role for permission in self.by_dataset( dataset ) ]
+
+    def _all_types_by_dataset( self, dataset ):
+        return dataset.actions
+
+    # as a general rule, DatasetPermissions are considered disposable
+    #   and there is no reason to update the models
+
+    # TODO: list?
+    def _delete( self, permissions, flush=True ):
+        for permission in permissions:
+            if permission in self.session().new:
+                self.session().expunge( permission )
+            else:
+                self.session().delete( permission )
+        if flush:
+            self.session().flush()
+
+    def _revoke_role( self, dataset, role, flush=True ):
+        role_permissions = self.by_roles( dataset, [ role ] )
+        return self._delete( role_permissions, flush=flush )
+
+
+def iterable_has_all( iterable, has_these ):
+    for item in has_these:
+        if item not in iterable:
+            return False
+    return True
+
+
+class DatasetManagePermissionFailedException( RBACPermissionFailedException ):
+    pass
+
+
+class ManageDatasetRBACPermission( DatasetRBACPermission ):
+    """
+    A class that controls the dataset permissions that control
+    who can manage that dataset's permissions.
+
+    When checking permissions for a user, if any of the user's roles
+    have permission on the dataset
+    """
+    # TODO: We may also be able to infer/record the dataset 'owner' as well.
+    action_name = security.RBACAgent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action
+    permission_failed_error_class = DatasetManagePermissionFailedException
+
+    # ---- interface
+    def is_permitted( self, dataset, user ):
+        # anonymous users cannot manage permissions on datasets
+        if self.user_manager.is_anonymous( user ):
+            return False
+        # admin is always permitted
+        # TODO: could probably move this into RBACPermission and call that first
+        if self.user_manager.is_admin( user ):
+            return True
+        for role in user.all_roles():
+            if self._role_is_permitted( dataset, role ):
+                return True
+        return False
+
+    def grant( self, dataset, user, flush=True ):
+        private_role = self._user_private_role( user )
+        return self._grant_role( dataset, private_role, flush=flush )
+
+    def revoke( self, dataset, user, flush=True ):
+        private_role = self._user_private_role( user )
+        return self._revoke_role( dataset, private_role, flush=flush )
+
+    # ---- private
+    def _role_is_permitted( self, dataset, role ):
+        return role in self._roles( dataset )
+
+    def _user_private_role( self, user ):
+        # error with 401 if no user
+        self.user_manager.error_if_anonymous( user )
+        return self.user_manager.private_role( user )
+
+    def _grant_role( self, dataset, role, flush=True ):
+        existing = self.by_role( dataset, role )
+        if existing:
+            return existing
+        return self._create( dataset, role, flush=flush )
+
+    def _revoke_role( self, dataset, role, flush=True ):
+        permission = self.by_roles( dataset, [ role ] )
+        return self._delete( [ permission ], flush=flush )
+
+
+class DatasetAccessPermissionFailedException( RBACPermissionFailedException ):
+    pass
+
+
+class AccessDatasetRBACPermission( DatasetRBACPermission ):
+    """
+    A class to manage access permissions on a dataset.
+
+    An user must have all the Roles of all the access permissions associated
+    with a dataset in order to access it.
+    """
+    action_name = security.RBACAgent.permitted_actions.DATASET_ACCESS.action
+    permission_failed_error_class = DatasetAccessPermissionFailedException
+
+    # ---- interface
+    def is_permitted( self, dataset, user ):
+        current_roles = self._roles( dataset )
+        # NOTE: that because of short circuiting this allows
+        #   anonymous access to public datasets
+        return ( self._is_public_based_on_roles( current_roles ) or
+                 # admin is always permitted
+                 self.user_manager.is_admin( user ) or
+                 self._user_has_all_roles( user, current_roles ) )
+
+    def grant( self, item, user ):
+        pass
+        # not so easy
+        # need to check for a sharing role
+        # then add the new user to it
+
+    def revoke( self, item, user ):
+        pass
+        # not so easy
+
+    # TODO: these are a lil off message
+    def is_public( self, dataset ):
+        current_roles = self._roles( dataset )
+        return self._is_public_based_on_roles( current_roles )
+
+    def set_private( self, dataset, user, flush=True ):
+        private_role = self.user_manager.private_role( user )
+        return self.set( dataset, [ private_role ], flush=flush )
+
+    # ---- private
+    def _is_public_based_on_roles( self, roles ):
+        return len( roles ) == 0
+
+    def _user_has_all_roles( self, user, roles ):
+        user_roles = []
+        if not self.user_manager.is_anonymous( user ):
+            user_roles = user.all_roles()
+        return iterable_has_all( user_roles, roles )
+
+    def _role_is_permitted( self, dataset, role ):
+        current_roles = self._roles( dataset )
+        return ( self._is_public_based_on_roles( current_roles ) or
+                 # if there's only one role and this is it, let em in
+                 ( ( len( current_roles ) == 1 ) and ( role == current_roles[0] ) ) )
diff --git a/lib/galaxy/managers/roles.py b/lib/galaxy/managers/roles.py
new file mode 100644
index 0000000..085594c
--- /dev/null
+++ b/lib/galaxy/managers/roles.py
@@ -0,0 +1,49 @@
+"""
+Manager and Serializer for Roles.
+"""
+
+from sqlalchemy.orm import exc as sqlalchemy_exceptions
+
+import galaxy.exceptions
+from galaxy import model
+from galaxy.managers import base
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class RoleManager( base.ModelManager ):
+    """
+    Business logic for roles.
+    """
+    model_class = model.Role
+    foreign_key_name = 'role'
+
+    user_assoc = model.UserRoleAssociation
+    group_assoc = model.GroupRoleAssociation
+
+    def __init__( self, app ):
+        super( RoleManager, self ).__init__( app )
+
+    def get( self, trans, decoded_role_id ):
+        """
+        Method loads the role from the DB based on the given role id.
+
+        :param  decoded_role_id:      id of the role to load from the DB
+        :type   decoded_role_id:      int
+
+        :returns:   the loaded Role object
+        :rtype:     galaxy.model.Role
+
+        :raises: InconsistentDatabase, RequestParameterInvalidException, InternalServerError
+        """
+        try:
+            role = ( self.session().query( self.model_class )
+                     .filter( self.model_class.id == decoded_role_id ).one() )
+        except sqlalchemy_exceptions.MultipleResultsFound:
+            raise galaxy.exceptions.InconsistentDatabase( 'Multiple roles found with the same id.' )
+        except sqlalchemy_exceptions.NoResultFound:
+            raise galaxy.exceptions.RequestParameterInvalidException( 'No role found with the id provided.' )
+        except Exception as e:
+            raise galaxy.exceptions.InternalServerError( 'Error loading from the database.' + str(e) )
+        return role
diff --git a/lib/galaxy/managers/secured.py b/lib/galaxy/managers/secured.py
new file mode 100644
index 0000000..c08acc9
--- /dev/null
+++ b/lib/galaxy/managers/secured.py
@@ -0,0 +1,121 @@
+"""
+Accessible models can be read and copied but not modified or deleted.
+
+Owned models can be modified and deleted.
+"""
+
+from galaxy import exceptions
+
+
+class AccessibleManagerMixin( object ):
+    """
+    A security interface to check if a User can read/view an item's.
+
+    This can also be thought of as 'read but not modify' privileges.
+    """
+
+    # don't want to override by_id since consumers will also want to fetch w/o any security checks
+    def is_accessible( self, item, user, **kwargs ):
+        """
+        Return True if the item accessible to user.
+        """
+        # override in subclasses
+        raise exceptions.NotImplemented( "Abstract interface Method" )
+
+    def get_accessible( self, id, user, **kwargs ):
+        """
+        Return the item with the given id if it's accessible to user,
+        otherwise raise an error.
+
+        :raises exceptions.ItemAccessibilityException:
+        """
+        item = self.by_id( id )
+        return self.error_unless_accessible( item, user, **kwargs )
+
+    def error_unless_accessible( self, item, user, **kwargs ):
+        """
+        Raise an error if the item is NOT accessible to user, otherwise return the item.
+
+        :raises exceptions.ItemAccessibilityException:
+        """
+        if self.is_accessible( item, user, **kwargs ):
+            return item
+        raise exceptions.ItemAccessibilityException( "%s is not accessible by user" % ( self.model_class.__name__ ) )
+
+    # TODO:?? are these even useful?
+    def list_accessible( self, user, **kwargs ):
+        """
+        Return a list of items accessible to the user, raising an error if ANY
+        are inaccessible.
+
+        :raises exceptions.ItemAccessibilityException:
+        """
+        raise exceptions.NotImplemented( "Abstract interface Method" )
+        # NOTE: this will be a large, inefficient list if filters are not passed in kwargs
+        # items = ModelManager.list( self, trans, **kwargs )
+        # return [ self.error_unless_accessible( trans, item, user ) for item in items ]
+
+    def filter_accessible( self, user, **kwargs ):
+        """
+        Return a list of items accessible to the user.
+        """
+        raise exceptions.NotImplemented( "Abstract interface Method" )
+        # NOTE: this will be a large, inefficient list if filters are not  passed in kwargs
+        # items = ModelManager.list( self, trans, **kwargs )
+        # return filter( lambda item: self.is_accessible( trans, item, user ), items )
+
+
+class OwnableManagerMixin( object ):
+    """
+    A security interface to check if a User is an item's owner.
+
+    Some resources are associated with the User that created or imported them
+    and these Users can be considered the models' owner.
+
+    This can also be thought of as write/edit privileges.
+    """
+
+    def is_owner( self, item, user, **kwargs ):
+        """
+        Return True if user owns the item.
+        """
+        # override in subclasses
+        raise exceptions.NotImplemented( "Abstract interface Method" )
+
+    def get_owned( self, id, user, **kwargs ):
+        """
+        Return the item with the given id if owned by the user,
+        otherwise raise an error.
+
+        :raises exceptions.ItemOwnershipException:
+        """
+        item = self.by_id( id )
+        return self.error_unless_owner( item, user, **kwargs )
+
+    def error_unless_owner( self, item, user, **kwargs ):
+        """
+        Raise an error if the item is NOT owned by user, otherwise return the item.
+
+        :raises exceptions.ItemAccessibilityException:
+        """
+        if self.is_owner( item, user, **kwargs ):
+            return item
+        raise exceptions.ItemOwnershipException( "%s is not owned by user" % ( self.model_class.__name__ ) )
+
+    def list_owned( self, user, **kwargs ):
+        """
+        Return a list of items owned by the user, raising an error if ANY
+        are not.
+
+        :raises exceptions.ItemAccessibilityException:
+        """
+        raise exceptions.NotImplemented( "Abstract interface Method" )
+        # just alias to by_user (easier/same thing)
+        # return self.by_user( trans, user, **kwargs )
+
+    def filter_owned( self, user, **kwargs ):
+        """
+        Return a list of items owned by the user.
+        """
+        # just alias to list_owned
+        return self.list_owned( user, **kwargs )
diff --git a/lib/galaxy/managers/sharable.py b/lib/galaxy/managers/sharable.py
new file mode 100644
index 0000000..a091881
--- /dev/null
+++ b/lib/galaxy/managers/sharable.py
@@ -0,0 +1,441 @@
+"""
+Superclass Manager and Serializers for Sharable objects.
+
+A sharable Galaxy object:
+    has an owner/creator User
+    is sharable with other, specific Users
+    is importable (copyable) by users that have access
+    has a slug which can be used as a link to view the resource
+    can be published effectively making it available to all other Users
+    can be rated
+"""
+
+from sqlalchemy import true
+import re
+from galaxy import exceptions
+
+from galaxy.managers import base
+from galaxy.managers import secured
+from galaxy.managers import taggable
+from galaxy.managers import annotatable
+from galaxy.managers import ratable
+from galaxy.managers import users
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class SharableModelManager( base.ModelManager, secured.OwnableManagerMixin, secured.AccessibleManagerMixin,
+        taggable.TaggableManagerMixin, annotatable.AnnotatableManagerMixin, ratable.RatableManagerMixin ):
+    # e.g. histories, pages, stored workflows, visualizations
+    # base.DeleteableModelMixin? (all four are deletable)
+
+    #: the model used for UserShareAssociations with this model
+    user_share_model = None
+
+    #: the single character abbreviation used in username_and_slug: e.g. 'h' for histories: u/user/h/slug
+    SINGLE_CHAR_ABBR = None
+
+    def __init__( self, app ):
+        super( SharableModelManager, self ).__init__( app )
+        # user manager is needed to check access/ownership/admin
+        self.user_manager = users.UserManager( app )
+
+    # .... has a user
+    def by_user( self, user, filters=None, **kwargs ):
+        """
+        Return list for all items (of model_class type) associated with the given
+        `user`.
+        """
+        user_filter = self.model_class.user_id == user.id
+        filters = self._munge_filters( user_filter, filters )
+        return self.list( filters=filters, **kwargs )
+
+    # .... owned/accessible interfaces
+    def is_owner( self, item, user, **kwargs ):
+        """
+        Return true if this sharable belongs to `user` (or `user` is an admin).
+        """
+        # ... effectively a good fit to have this here, but not semantically
+        if self.user_manager.is_admin( user ):
+            return True
+        return item.user == user
+
+    def is_accessible( self, item, user, **kwargs ):
+        """
+        If the item is importable, is owned by `user`, or (the valid) `user`
+        is in 'users shared with' list for the item: return True.
+        """
+        if item.importable:
+            return True
+        # note: owners always have access - checking for accessible implicitly checks for ownership
+        if self.is_owner( item, user, **kwargs ):
+            return True
+        if self.user_manager.is_anonymous( user ):
+            return False
+        if user in item.users_shared_with_dot_users:
+            return True
+        return False
+
+    # .... importable
+    def make_importable( self, item, flush=True ):
+        """
+        Makes item accessible--viewable and importable--and sets item's slug.
+        Does not flush/commit changes, however. Item must have name, user,
+        importable, and slug attributes.
+        """
+        self.create_unique_slug( item, flush=False )
+        return self._session_setattr( item, 'importable', True, flush=flush )
+
+    def make_non_importable( self, item, flush=True ):
+        """
+        Makes item accessible--viewable and importable--and sets item's slug.
+        Does not flush/commit changes, however. Item must have name, user,
+        importable, and slug attributes.
+        """
+        # item must be unpublished if non-importable
+        if item.published:
+            self.unpublish( item, flush=False )
+        return self._session_setattr( item, 'importable', False, flush=flush )
+
+    # .... published
+    def publish( self, item, flush=True ):
+        """
+        Set both the importable and published flags on `item` to True.
+        """
+        # item must be importable to be published
+        if not item.importable:
+            self.make_importable( item, flush=False )
+        return self._session_setattr( item, 'published', True, flush=flush )
+
+    def unpublish( self, item, flush=True ):
+        """
+        Set the published flag on `item` to False.
+        """
+        return self._session_setattr( item, 'published', False, flush=flush )
+
+    def _query_published( self, filters=None, **kwargs ):
+        """
+        Return a query for all published items.
+        """
+        published_filter = self.model_class.published == true()
+        filters = self._munge_filters( published_filter, filters )
+        return self.query( filters=filters, **kwargs )
+
+    def list_published( self, filters=None, **kwargs ):
+        """
+        Return a list of all published items.
+        """
+        published_filter = self.model_class.published == true()
+        filters = self._munge_filters( published_filter, filters )
+        return self.list( filters=filters, **kwargs )
+
+    # .... user sharing
+    # sharing is often done via a 3rd table btwn a User and an item -> a <Item>UserShareAssociation
+    def get_share_assocs( self, item, user=None ):
+        """
+        Get the UserShareAssociations for the `item`.
+
+        Optionally send in `user` to test for a single match.
+        """
+        query = self.query_associated( self.user_share_model, item )
+        if user is not None:
+            query = query.filter_by( user=user )
+        return query.all()
+
+    def share_with( self, item, user, flush=True ):
+        """
+        Get or create a share for the given user (or users if `user` is a list).
+        """
+        # precondition: user has been validated
+        # allow user to be a list and call recursivly
+        if isinstance( user, list ):
+            return map( lambda user: self.share_with( item, user, flush=False ), user )
+        # get or create
+        existing = self.get_share_assocs( item, user=user )
+        if existing:
+            return existing.pop( 0 )
+        return self._create_user_share_assoc( item, user, flush=flush )
+
+    def _create_user_share_assoc( self, item, user, flush=True ):
+        """
+        Create a share for the given user.
+        """
+        user_share_assoc = self.user_share_model()
+        self.session().add( user_share_assoc )
+        self.associate( user_share_assoc, item )
+        user_share_assoc.user = user
+
+        # ensure an item slug so shared users can access
+        if not item.slug:
+            self.create_unique_slug( item )
+
+        if flush:
+            self.session().flush()
+        return user_share_assoc
+
+    def unshare_with( self, item, user, flush=True ):
+        """
+        Delete a user share (or list of shares) from the database.
+        """
+        if isinstance( user, list ):
+            return map( lambda user: self.unshare_with( item, user, flush=False ), user )
+        # Look for and delete sharing relation for user.
+        user_share_assoc = self.get_share_assocs( item, user=user )[0]
+        self.session().delete( user_share_assoc )
+        if flush:
+            self.session().flush()
+        return user_share_assoc
+
+    def _query_shared_with( self, user, eagerloads=True, **kwargs ):
+        """
+        Return a query for this model already filtered to models shared
+        with a particular user.
+        """
+        query = self.session().query( self.model_class ).join( 'users_shared_with' )
+        if eagerloads is False:
+            query = query.enable_eagerloads( False )
+        # TODO: as filter in FilterParser also
+        query = query.filter( self.user_share_model.user == user )
+        return self._filter_and_order_query( query, **kwargs )
+
+    def list_shared_with( self, user, filters=None, order_by=None, limit=None, offset=None, **kwargs ):
+        """
+        Return a list of those models shared with a particular user.
+        """
+        # TODO: refactor out dupl-code btwn base.list
+        orm_filters, fn_filters = self._split_filters( filters )
+        if not fn_filters:
+            # if no fn_filtering required, we can use the 'all orm' version with limit offset
+            query = self._query_shared_with( user, filters=orm_filters,
+                order_by=order_by, limit=limit, offset=offset, **kwargs )
+            return self._orm_list( query=query, **kwargs )
+
+        # fn filters will change the number of items returnable by limit/offset - remove them here from the orm query
+        query = self._query_shared_with( user, filters=orm_filters,
+            order_by=order_by, limit=None, offset=None, **kwargs )
+        # apply limit and offset afterwards
+        items = self._apply_fn_filters_gen( query.all(), fn_filters )
+        return list( self._apply_fn_limit_offset_gen( items, limit, offset ) )
+
+    # .... slugs
+    # slugs are human readable strings often used to link to sharable resources (replacing ids)
+    # TODO: as validator, deserializer, etc. (maybe another object entirely?)
+    def set_slug( self, item, new_slug, user, flush=True ):
+        """
+        Validate and set the new slug for `item`.
+        """
+        # precondition: has been validated
+        if not self.is_valid_slug( new_slug ):
+            raise exceptions.RequestParameterInvalidException( "Invalid slug", slug=new_slug )
+
+        # error if slug is already in use
+        if self._slug_exists( user, new_slug ):
+            raise exceptions.Conflict( "Slug already exists", slug=new_slug )
+
+        item.slug = new_slug
+        if flush:
+            self.session().flush()
+        return item
+
+    def is_valid_slug( self, slug ):
+        """
+        Returns true if `slug` is valid.
+        """
+        VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
+        return VALID_SLUG_RE.match( slug )
+
+    def _existing_set_of_slugs( self, user ):
+        query = ( self.session().query( self.model_class.slug )
+                  .filter_by( user=user ) )
+        return list( set( query.all() ) )
+
+    def _slug_exists( self, user, slug ):
+        query = ( self.session().query( self.model_class.slug )
+                  .filter_by( user=user, slug=slug ) )
+        return query.count() != 0
+
+    def _slugify( self, start_with ):
+        # Replace whitespace with '-'
+        slug_base = re.sub( "\s+", "-", start_with )
+        # Remove all non-alphanumeric characters.
+        slug_base = re.sub( "[^a-zA-Z0-9\-]", "", slug_base )
+        # Remove trailing '-'.
+        if slug_base.endswith('-'):
+            slug_base = slug_base[:-1]
+        return slug_base
+
+    def _default_slug_base( self, item ):
+        # override in subclasses
+        if hasattr( item, 'title' ):
+            return item.title.lower()
+        return item.name.lower()
+
+    def get_unique_slug( self, item ):
+        """
+        Returns a slug that is unique among user's importable items
+        for item's class.
+        """
+        cur_slug = item.slug
+
+        # Setup slug base.
+        if cur_slug is None or cur_slug == "":
+            slug_base = self._slugify( self._default_slug_base( item ) )
+        else:
+            slug_base = cur_slug
+
+        # Using slug base, find a slug that is not taken. If slug is taken,
+        # add integer to end.
+        new_slug = slug_base
+        count = 1
+        while ( self.session().query( item.__class__ )
+                    .filter_by( user=item.user, slug=new_slug, importable=True )
+                    .count() != 0 ):
+            # Slug taken; choose a new slug based on count. This approach can
+            # handle numerous items with the same name gracefully.
+            new_slug = '%s-%i' % ( slug_base, count )
+            count += 1
+
+        return new_slug
+
+    def create_unique_slug( self, item, flush=True ):
+        """
+        Set a new, unique slug on the item.
+        """
+        item.slug = self.get_unique_slug( item )
+        self.session().add( item )
+        if flush:
+            self.session().flush()
+        return item
+
+    # TODO: def by_slug( self, user, **kwargs ):
+
+
+class SharableModelSerializer( base.ModelSerializer,
+       taggable.TaggableSerializerMixin, annotatable.AnnotatableSerializerMixin, ratable.RatableSerializerMixin ):
+    # TODO: stub
+    SINGLE_CHAR_ABBR = None
+
+    def add_serializers( self ):
+        super( SharableModelSerializer, self ).add_serializers()
+        taggable.TaggableSerializerMixin.add_serializers( self )
+        annotatable.AnnotatableSerializerMixin.add_serializers( self )
+        ratable.RatableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'user_id'           : self.serialize_id,
+            'username_and_slug' : self.serialize_username_and_slug,
+            'users_shared_with' : self.serialize_users_shared_with
+        })
+        # these use the default serializer but must still be white-listed
+        self.serializable_keyset.update([
+            'importable', 'published', 'slug'
+        ])
+
+    def serialize_username_and_slug( self, item, key, **context ):
+        if not ( item.user and item.slug and self.SINGLE_CHAR_ABBR ):
+            return None
+        return ( '/' ).join(( 'u', item.user.username, self.SINGLE_CHAR_ABBR, item.slug ) )
+
+    # the only ones that needs any fns:
+    #   user/user_id
+    #   username_and_slug?
+
+    def serialize_users_shared_with( self, item, key, user=None, **context ):
+        """
+        Returns a list of encoded ids for users the item has been shared.
+
+        Skipped if the requesting user is not the owner.
+        """
+        # TODO: still an open question as to whether key removal based on user
+        # should be handled here or at a higher level (even if we didn't have to pass user (via thread context, etc.))
+        if not self.manager.is_owner( item, user ):
+            self.skip()
+
+        share_assocs = self.manager.get_share_assocs( item )
+        return [ self.serialize_id( share, 'user_id' ) for share in share_assocs ]
+
+
+class SharableModelDeserializer( base.ModelDeserializer,
+        taggable.TaggableDeserializerMixin, annotatable.AnnotatableDeserializerMixin, ratable.RatableDeserializerMixin ):
+
+    def add_deserializers( self ):
+        super( SharableModelDeserializer, self ).add_deserializers()
+        taggable.TaggableDeserializerMixin.add_deserializers( self )
+        annotatable.AnnotatableDeserializerMixin.add_deserializers( self )
+        ratable.RatableDeserializerMixin.add_deserializers( self )
+
+        self.deserializers.update({
+            'published'         : self.deserialize_published,
+            'importable'        : self.deserialize_importable,
+            'users_shared_with' : self.deserialize_users_shared_with,
+        })
+
+    def deserialize_published( self, item, key, val, **context ):
+        """
+        """
+        val = self.validate.bool( key, val )
+        if item.published == val:
+            return val
+
+        if val:
+            self.manager.publish( item, flush=False )
+        else:
+            self.manager.unpublish( item, flush=False )
+        return item.published
+
+    def deserialize_importable( self, item, key, val, **context ):
+        """
+        """
+        val = self.validate.bool( key, val )
+        if item.importable == val:
+            return val
+
+        if val:
+            self.manager.make_importable( item, flush=False )
+        else:
+            self.manager.make_non_importable( item, flush=False )
+        return item.importable
+
+    # TODO: def deserialize_slug( self, item, val, **context ):
+
+    def deserialize_users_shared_with( self, item, key, val, **context ):
+        """
+        Accept a list of encoded user_ids, validate them as users, and then
+        add or remove user shares in order to update the users_shared_with to
+        match the given list finally returning the new list of shares.
+        """
+        unencoded_ids = [ self.app.security.decode_id( id_ ) for id_ in val ]
+        new_users_shared_with = set( self.manager.user_manager.by_ids( unencoded_ids ) )
+        current_shares = self.manager.get_share_assocs( item )
+        currently_shared_with = set([ share.user for share in current_shares ])
+
+        needs_adding = new_users_shared_with - currently_shared_with
+        for user in needs_adding:
+            current_shares.append( self.manager.share_with( item, user, flush=False ) )
+
+        needs_removing = currently_shared_with - new_users_shared_with
+        for user in needs_removing:
+            current_shares.remove( self.manager.unshare_with( item, user, flush=False ) )
+
+        self.manager.session().flush()
+        # TODO: or should this return the list of ids?
+        return current_shares
+
+
+class SharableModelFilters( base.ModelFilterParser,
+        taggable.TaggableFilterMixin, annotatable.AnnotatableFilterMixin, ratable.RatableFilterMixin ):
+
+    def _add_parsers( self ):
+        super( SharableModelFilters, self )._add_parsers()
+        taggable.TaggableFilterMixin._add_parsers( self )
+        annotatable.AnnotatableFilterMixin._add_parsers( self )
+        ratable.RatableFilterMixin._add_parsers( self )
+
+        self.orm_filter_parsers.update({
+            'importable'    : { 'op': ( 'eq' ), 'val': self.parse_bool },
+            'published'     : { 'op': ( 'eq' ), 'val': self.parse_bool },
+            'slug'          : { 'op': ( 'eq', 'contains', 'like' ) },
+            # chose by user should prob. only be available for admin? (most often we'll only need trans.user)
+            # 'user'          : { 'op': ( 'eq' ), 'val': self.parse_id_list },
+        })
diff --git a/lib/galaxy/managers/taggable.py b/lib/galaxy/managers/taggable.py
new file mode 100644
index 0000000..4be6f2b
--- /dev/null
+++ b/lib/galaxy/managers/taggable.py
@@ -0,0 +1,122 @@
+"""
+Mixins for Taggable model managers and serializers.
+"""
+
+# from galaxy import exceptions as galaxy_exceptions
+
+import logging
+
+from galaxy.util import unicodify
+
+log = logging.getLogger( __name__ )
+
+
+# TODO: work out the relation between serializers and managers and then fold these into the parent of the two
+def _tag_str_gen( item ):
+    # TODO: which user is this? all?
+    for tag in item.tags:
+        tag_str = tag.user_tname
+        if tag.value is not None:
+            tag_str += ":" + tag.user_value
+        yield tag_str
+
+
+def _tags_to_strings( item ):
+    if not hasattr( item, 'tags' ):
+        return None
+    return list( _tag_str_gen( item ) )
+
+
+def _tags_from_strings( item, tag_handler, new_tags_list, user=None ):
+    # TODO: have to assume trans.user here...
+    if not user:
+        # raise galaxy_exceptions.RequestParameterMissingException( 'User required for tags on ' + str( item ) )
+        # TODO: this becomes a 'silent failure' - no tags are set. This is a questionable approach but
+        # I haven't found a better one for anon users copying items with tags
+        return
+    # TODO: duped from tags manager - de-dupe when moved to taggable mixin
+    tag_handler.delete_item_tags( user, item )
+    new_tags_str = ','.join( new_tags_list )
+    tag_handler.apply_item_tags( user, item, unicodify( new_tags_str, 'utf-8' ) )
+    # TODO:!! does the creation of new_tags_list mean there are now more and more unused tag rows in the db?
+
+
+class TaggableManagerMixin( object ):
+    #: class of TagAssociation (e.g. HistoryTagAssociation)
+    tag_assoc = None
+
+    # TODO: most of this can be done by delegating to the TagManager?
+    def get_tags( self, item ):
+        """
+        Return a list of tag strings.
+        """
+        return _tags_to_strings( item )
+
+    def set_tags( self, item, new_tags, user=None ):
+        """
+        Set an `item`'s tags from a list of strings.
+        """
+        return _tags_from_strings( item, self.app.tag_handler, new_tags, user=user )
+
+    # def tags_by_user( self, user, **kwargs ):
+    # TODO: here or TagManager
+    #    pass
+
+
+class TaggableSerializerMixin( object ):
+
+    def add_serializers( self ):
+        self.serializers[ 'tags' ] = self.serialize_tags
+
+    def serialize_tags( self, item, key, **context ):
+        """
+        Return tags as a list of strings.
+        """
+        return _tags_to_strings( item )
+
+
+class TaggableDeserializerMixin( object ):
+
+    def add_deserializers( self ):
+        self.deserializers[ 'tags' ] = self.deserialize_tags
+
+    def deserialize_tags( self, item, key, val, user=None, **context ):
+        """
+        Make sure `val` is a valid list of tag strings and assign them.
+
+        Note: this will erase any previous tags.
+        """
+        new_tags_list = self.validate.basestring_list( key, val )
+        _tags_from_strings( item, self.app.tag_handler, new_tags_list, user=user )
+        return item.tags
+
+
+class TaggableFilterMixin( object ):
+
+    def filter_has_partial_tag( self, item, val ):
+        """
+        Return True if any tag partially contains `val`.
+        """
+        for tag_str in _tag_str_gen( item ):
+            if val in tag_str:
+                return True
+        return False
+
+    def filter_has_tag( self, item, val ):
+        """
+        Return True if any tag exactly equals `val`.
+        """
+        for tag_str in _tag_str_gen( item ):
+            if val == tag_str:
+                return True
+        return False
+
+    def _add_parsers( self ):
+        self.fn_filter_parsers.update({
+            'tag': {
+                'op': {
+                    'eq'    : self.filter_has_tag,
+                    'has'   : self.filter_has_partial_tag,
+                }
+            }
+        })
diff --git a/lib/galaxy/managers/tags.py b/lib/galaxy/managers/tags.py
new file mode 100644
index 0000000..6bb0a6e
--- /dev/null
+++ b/lib/galaxy/managers/tags.py
@@ -0,0 +1,319 @@
+import logging
+import re
+
+from galaxy.util import unicodify
+from six import string_types
+from sqlalchemy.sql import select
+from sqlalchemy.sql.expression import func
+
+log = logging.getLogger( __name__ )
+
+
+# Item-specific information needed to perform tagging.
+class ItemTagAssocInfo( object ):
+    def __init__( self, item_class, tag_assoc_class, item_id_col ):
+        self.item_class = item_class
+        self.tag_assoc_class = tag_assoc_class
+        self.item_id_col = item_id_col
+
+
+class TagManager( object ):
+    """
+    Manages CRUD operations related to tagging objects.
+    """
+
+    def __init__( self, app ):
+        self.app = app
+        # Minimum tag length.
+        self.min_tag_len = 2
+        # Maximum tag length.
+        self.max_tag_len = 255
+        # Tag separator.
+        self.tag_separators = ',;'
+        # Hierarchy separator.
+        self.hierarchy_separator = '.'
+        # Key-value separator.
+        self.key_value_separators = "=:"
+        # Initialize with known classes - add to this in subclasses.
+        self.item_tag_assoc_info = {}
+
+    def set_tags_from_list( self, user, item, new_tags_list ):
+        # precondition: item is already security checked against user
+        # precondition: incoming tags is a list of sanitized/formatted strings
+
+        self.delete_item_tags( user, item )
+        new_tags_str = ','.join( new_tags_list )
+        self.apply_item_tags( user, item, unicodify( new_tags_str, 'utf-8' ) )
+        self.app.model.context.flush()
+        return item.tags
+
+    def get_tag_assoc_class( self, item_class ):
+        """Returns tag association class for item class."""
+        return self.item_tag_assoc_info[item_class.__name__].tag_assoc_class
+
+    def get_id_col_in_item_tag_assoc_table( self, item_class ):
+        """Returns item id column in class' item-tag association table."""
+        return self.item_tag_assoc_info[item_class.__name__].item_id_col
+
+    def get_community_tags( self, item=None, limit=None ):
+        """Returns community tags for an item."""
+        # Get item-tag association class.
+        item_class = item.__class__
+        item_tag_assoc_class = self.get_tag_assoc_class( item_class )
+        if not item_tag_assoc_class:
+            return []
+        # Build select statement.
+        cols_to_select = [ item_tag_assoc_class.table.c.tag_id, func.count( '*' ) ]
+        from_obj = item_tag_assoc_class.table.join( item_class.table ).join( self.app.model.Tag.table )
+        where_clause = ( self.get_id_col_in_item_tag_assoc_table( item_class ) == item.id )
+        order_by = [ func.count( "*" ).desc() ]
+        group_by = item_tag_assoc_class.table.c.tag_id
+        # Do query and get result set.
+        query = select( columns=cols_to_select,
+                        from_obj=from_obj,
+                        whereclause=where_clause,
+                        group_by=group_by,
+                        order_by=order_by,
+                        limit=limit )
+        result_set = self.app.model.context.execute( query )
+        # Return community tags.
+        community_tags = []
+        for row in result_set:
+            tag_id = row[0]
+            community_tags.append( self.get_tag_by_id( tag_id ) )
+        return community_tags
+
+    def get_tool_tags( self ):
+        result_set = self.app.model.context.execute( select( columns=[ self.app.model.ToolTagAssociation.table.c.tag_id ],
+                                                             from_obj=self.app.model.ToolTagAssociation.table ).distinct() )
+
+        tags = []
+        for row in result_set:
+            tag_id = row[0]
+            tags.append( self.get_tag_by_id( tag_id ) )
+        return tags
+
+    def remove_item_tag( self, user, item, tag_name ):
+        """Remove a tag from an item."""
+        # Get item tag association.
+        item_tag_assoc = self._get_item_tag_assoc( user, item, tag_name )
+        # Remove association.
+        if item_tag_assoc:
+            # Delete association.
+            self.app.model.context.delete( item_tag_assoc )
+            item.tags.remove( item_tag_assoc )
+            return True
+        return False
+
+    def delete_item_tags( self, user, item ):
+        """Delete tags from an item."""
+        # Delete item-tag associations.
+        for tag in item.tags:
+            self.app.model.context.delete( tag )
+        # Delete tags from item.
+        del item.tags[:]
+
+    def item_has_tag( self, user, item, tag ):
+        """Returns true if item is has a given tag."""
+        # Get tag name.
+        if isinstance( tag, string_types ):
+            tag_name = tag
+        elif isinstance( tag, self.app.model.Tag ):
+            tag_name = tag.name
+        # Check for an item-tag association to see if item has a given tag.
+        item_tag_assoc = self._get_item_tag_assoc( user, item, tag_name )
+        if item_tag_assoc:
+            return True
+        return False
+
+    def apply_item_tag( self, user, item, name, value=None ):
+        # Use lowercase name for searching/creating tag.
+        lc_name = name.lower()
+        # Get or create item-tag association.
+        item_tag_assoc = self._get_item_tag_assoc( user, item, lc_name )
+        if not item_tag_assoc:
+            # Create item-tag association.
+            # Create tag; if None, skip the tag (and log error).
+            tag = self._get_or_create_tag( lc_name )
+            if not tag:
+                log.warning( "Failed to create tag with name %s" % lc_name )
+                return
+            # Create tag association based on item class.
+            item_tag_assoc_class = self.get_tag_assoc_class( item.__class__ )
+            item_tag_assoc = item_tag_assoc_class()
+            # Add tag to association.
+            item.tags.append( item_tag_assoc )
+            item_tag_assoc.tag = tag
+            item_tag_assoc.user = user
+        # Apply attributes to item-tag association. Strip whitespace from user name and tag.
+        lc_value = None
+        if value:
+            lc_value = value.lower()
+        item_tag_assoc.user_tname = name
+        item_tag_assoc.user_value = value
+        item_tag_assoc.value = lc_value
+        return item_tag_assoc
+
+    def apply_item_tags( self, user, item, tags_str ):
+        """Apply tags to an item."""
+        # Parse tags.
+        parsed_tags = self.parse_tags( tags_str )
+        # Apply each tag.
+        for name, value in parsed_tags.items():
+            self.apply_item_tag( user, item, name, value )
+
+    def get_tags_str( self, tags ):
+        """Build a string from an item's tags."""
+        # Return empty string if there are no tags.
+        if not tags:
+            return ""
+        # Create string of tags.
+        tags_str_list = list()
+        for tag in tags:
+            tag_str = tag.user_tname
+            if tag.value is not None:
+                tag_str += ":" + tag.user_value
+            tags_str_list.append( tag_str )
+        return ", ".join( tags_str_list )
+
+    def get_tag_by_id( self, tag_id ):
+        """Get a Tag object from a tag id."""
+        return self.app.model.context.query( self.app.model.Tag ).filter_by( id=tag_id ).first()
+
+    def get_tag_by_name( self, tag_name ):
+        """Get a Tag object from a tag name (string)."""
+        if tag_name:
+            return self.app.model.context.query( self.app.model.Tag ).filter_by( name=tag_name.lower() ).first()
+        return None
+
+    def _create_tag( self, tag_str ):
+        """Create a Tag object from a tag string."""
+        tag_hierarchy = tag_str.split( self.hierarchy_separator )
+        tag_prefix = ""
+        parent_tag = None
+        for sub_tag in tag_hierarchy:
+            # Get or create subtag.
+            tag_name = tag_prefix + self._scrub_tag_name( sub_tag )
+            tag = self.app.model.context.query( self.app.model.Tag ).filter_by( name=tag_name).first()
+            if not tag:
+                tag = self.app.model.Tag( type=0, name=tag_name )
+            # Set tag parent.
+            tag.parent = parent_tag
+            # Update parent and tag prefix.
+            parent_tag = tag
+            tag_prefix = tag.name + self.hierarchy_separator
+        return tag
+
+    def _get_or_create_tag( self, tag_str ):
+        """Get or create a Tag object from a tag string."""
+        # Scrub tag; if tag is None after being scrubbed, return None.
+        scrubbed_tag_str = self._scrub_tag_name( tag_str )
+        if not scrubbed_tag_str:
+            return None
+        # Get item tag.
+        tag = self.get_tag_by_name( scrubbed_tag_str )
+        # Create tag if necessary.
+        if tag is None:
+            tag = self._create_tag( scrubbed_tag_str )
+        return tag
+
+    def _get_item_tag_assoc( self, user, item, tag_name ):
+        """
+        Return ItemTagAssociation object for a user, item, and tag string; returns None if there is
+        no such association.
+        """
+        scrubbed_tag_name = self._scrub_tag_name( tag_name )
+        for item_tag_assoc in item.tags:
+            if ( item_tag_assoc.user == user ) and ( item_tag_assoc.user_tname == scrubbed_tag_name ):
+                return item_tag_assoc
+        return None
+
+    def parse_tags( self, tag_str ):
+        """
+        Returns a list of raw (tag-name, value) pairs derived from a string; method scrubs tag names and values as well.
+        Return value is a dictionary where tag-names are keys.
+        """
+        # Gracefully handle None.
+        if not tag_str:
+            return dict()
+        # Split tags based on separators.
+        reg_exp = re.compile( '[' + self.tag_separators + ']' )
+        raw_tags = reg_exp.split( tag_str )
+        # Extract name-value pairs.
+        name_value_pairs = dict()
+        for raw_tag in raw_tags:
+            nv_pair = self._get_name_value_pair( raw_tag )
+            scrubbed_name = self._scrub_tag_name( nv_pair[0] )
+            scrubbed_value = self._scrub_tag_value( nv_pair[1] )
+            name_value_pairs[scrubbed_name] = scrubbed_value
+        return name_value_pairs
+
+    def _scrub_tag_value( self, value ):
+        """Scrub a tag value."""
+        # Gracefully handle None:
+        if not value:
+            return None
+        # Remove whitespace from value.
+        reg_exp = re.compile( '\s' )
+        scrubbed_value = re.sub( reg_exp, "", value )
+        return scrubbed_value
+
+    def _scrub_tag_name( self, name ):
+        """Scrub a tag name."""
+        # Gracefully handle None:
+        if not name:
+            return None
+        # Remove whitespace from name.
+        reg_exp = re.compile( '\s' )
+        scrubbed_name = re.sub( reg_exp, "", name )
+        # Ignore starting ':' char.
+        if scrubbed_name.startswith( self.hierarchy_separator ):
+            scrubbed_name = scrubbed_name[1:]
+        # If name is too short or too long, return None.
+        if len( scrubbed_name ) < self.min_tag_len or len( scrubbed_name ) > self.max_tag_len:
+            return None
+        return scrubbed_name
+
+    def _scrub_tag_name_list( self, tag_name_list ):
+        """Scrub a tag name list."""
+        scrubbed_tag_list = list()
+        for tag in tag_name_list:
+            scrubbed_tag_list.append( self._scrub_tag_name( tag ) )
+        return scrubbed_tag_list
+
+    def _get_name_value_pair( self, tag_str ):
+        """Get name, value pair from a tag string."""
+        # Use regular expression to parse name, value.
+        reg_exp = re.compile( "[" + self.key_value_separators + "]" )
+        name_value_pair = reg_exp.split( tag_str )
+        # Add empty slot if tag does not have value.
+        if len( name_value_pair ) < 2:
+            name_value_pair.append( None )
+        return name_value_pair
+
+
+class GalaxyTagManager( TagManager ):
+    def __init__( self, app ):
+        from galaxy import model
+        TagManager.__init__( self, app )
+        self.item_tag_assoc_info["History"] = ItemTagAssocInfo( model.History,
+                                                                model.HistoryTagAssociation,
+                                                                model.HistoryTagAssociation.table.c.history_id )
+        self.item_tag_assoc_info["HistoryDatasetAssociation"] = \
+            ItemTagAssocInfo( model.HistoryDatasetAssociation,
+                              model.HistoryDatasetAssociationTagAssociation,
+                              model.HistoryDatasetAssociationTagAssociation.table.c.history_dataset_association_id )
+        self.item_tag_assoc_info["Page"] = ItemTagAssocInfo( model.Page,
+                                                             model.PageTagAssociation,
+                                                             model.PageTagAssociation.table.c.page_id )
+        self.item_tag_assoc_info["StoredWorkflow"] = ItemTagAssocInfo( model.StoredWorkflow,
+                                                                       model.StoredWorkflowTagAssociation,
+                                                                       model.StoredWorkflowTagAssociation.table.c.stored_workflow_id )
+        self.item_tag_assoc_info["Visualization"] = ItemTagAssocInfo( model.Visualization,
+                                                                      model.VisualizationTagAssociation,
+                                                                      model.VisualizationTagAssociation.table.c.visualization_id )
+
+
+class CommunityTagManager( TagManager):
+    def __init__( self, app ):
+        TagManager.__init__( self, app )
diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py
new file mode 100644
index 0000000..b29d48e
--- /dev/null
+++ b/lib/galaxy/managers/users.py
@@ -0,0 +1,373 @@
+"""
+Manager and Serializer for Users.
+"""
+
+import sqlalchemy
+
+from galaxy import model
+from galaxy import exceptions
+from galaxy import util
+
+from galaxy.managers import base
+from galaxy.managers import deletable
+from galaxy.managers import api_keys
+from galaxy.security import validate_user_input
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class UserManager( base.ModelManager, deletable.PurgableManagerMixin ):
+    model_class = model.User
+    foreign_key_name = 'user'
+
+    # TODO: there is quite a bit of functionality around the user (authentication, permissions, quotas, groups/roles)
+    #   most of which it may be unneccessary to have here
+
+    # TODO: incorp BaseAPIController.validate_in_users_and_groups
+    # TODO: incorp CreatesUsersMixin
+    # TODO: incorp CreatesApiKeysMixin
+    # TODO: incorporate UsesFormDefinitionsMixin?
+
+    def create( self, webapp_name=None, **kwargs ):
+        """
+        Create a new user.
+        """
+        # TODO: deserialize and validate here
+        email = kwargs[ 'email' ]
+        username = kwargs[ 'username' ]
+        password = kwargs[ 'password' ]
+        self._error_on_duplicate_email( email )
+
+        user = model.User( email=email, password=password )
+        user.username = username
+
+        if self.app.config.user_activation_on:
+            user.active = False
+        else:
+            # Activation is off, every new user is active by default.
+            user.active = True
+
+        self.session().add( user )
+        try:
+            self.session().flush()
+            # TODO:?? flush needed for permissions below? If not, make optional
+        except sqlalchemy.exc.IntegrityError as db_err:
+            raise exceptions.Conflict( db_err.message )
+
+        # can throw an sqlalx.IntegrityError if username not unique
+
+        self.app.security_agent.create_private_user_role( user )
+        if webapp_name == 'galaxy':
+            # We set default user permissions, before we log in and set the default history permissions
+            permissions = self.app.config.new_user_dataset_access_role_default_private
+            self.app.security_agent.user_set_default_permissions( user, default_access_private=permissions )
+        return user
+
+    def delete(self, user):
+        user.deleted = True
+        self.session().add(user)
+        self.session().flush()
+
+    def _error_on_duplicate_email( self, email ):
+        """
+        Check for a duplicate email and raise if found.
+
+        :raises exceptions.Conflict: if any are found
+        """
+        # TODO: remove this check when unique=True is added to the email column
+        if self.by_email( email ) is not None:
+            raise exceptions.Conflict( 'Email must be unique', email=email )
+
+    # ---- filters
+    def by_email( self, email, filters=None, **kwargs ):
+        """
+        Find a user by their email.
+        """
+        filters = self._munge_filters( self.model_class.email == email, filters )
+        try:
+            # TODO: use one_or_none
+            return super( UserManager, self ).one( filters=filters, **kwargs )
+        except exceptions.ObjectNotFound:
+            return None
+
+    def by_email_like( self, email_with_wildcards, filters=None, order_by=None, **kwargs ):
+        """
+        Find a user searching with SQL wildcards.
+        """
+        filters = self._munge_filters( self.model_class.email.like( email_with_wildcards ), filters )
+        order_by = order_by or ( model.User.email, )
+        return super( UserManager, self ).list( filters=filters, order_by=order_by, **kwargs )
+
+    # ---- admin
+    def is_admin( self, user ):
+        """
+        Return True if this user is an admin.
+        """
+        admin_emails = self._admin_emails()
+        return user and admin_emails and user.email in admin_emails
+
+    def _admin_emails( self ):
+        """
+        Return a list of admin email addresses from the config file.
+        """
+        return [ email.strip() for email in self.app.config.get( "admin_users", "" ).split( "," ) ]
+
+    def admins( self, filters=None, **kwargs ):
+        """
+        Return a list of admin Users.
+        """
+        filters = self._munge_filters( self.model_class.email.in_( self._admin_emails() ), filters )
+        return super( UserManager, self ).list( filters=filters, **kwargs )
+
+    def error_unless_admin( self, user, msg="Administrators only", **kwargs ):
+        """
+        Raise an error if `user` is not an admin.
+
+        :raises exceptions.AdminRequiredException: if `user` is not an admin.
+        """
+        # useful in admin only methods
+        if not self.is_admin( user ):
+            raise exceptions.AdminRequiredException( msg, **kwargs )
+        return user
+
+    # ---- anonymous
+    def is_anonymous( self, user ):
+        """
+        Return True if `user` is anonymous.
+        """
+        # define here for single point of change and make more readable
+        return user is None
+
+    def error_if_anonymous( self, user, msg="Log-in required", **kwargs ):
+        """
+        Raise an error if `user` is anonymous.
+        """
+        if user is None:
+            # TODO: code is correct (401) but should be named AuthenticationRequired (401 and 403 are flipped)
+            raise exceptions.AuthenticationFailed( msg, **kwargs )
+        return user
+
+    # ---- current
+    def current_user( self, trans ):
+        # define here for single point of change and make more readable
+        # TODO: trans
+        return trans.user
+
+    # ---- api keys
+    def create_api_key( self, user ):
+        """
+        Create and return an API key for `user`.
+        """
+        # TODO: seems like this should return the model
+        return api_keys.ApiKeyManager( self.app ).create_api_key( user )
+
+    # TODO: possibly move to ApiKeyManager
+    def valid_api_key( self, user ):
+        """
+        Return this most recent APIKey for this user or None if none have been created.
+        """
+        query = ( self.session().query( model.APIKeys )
+                  .filter_by( user=user )
+                  .order_by( sqlalchemy.desc( model.APIKeys.create_time ) ) )
+        all = query.all()
+        if len( all ):
+            return all[0]
+        return None
+
+    # TODO: possibly move to ApiKeyManager
+    def get_or_create_valid_api_key( self, user ):
+        """
+        Return this most recent APIKey for this user or create one if none have been
+        created.
+        """
+        existing = self.valid_api_key( user )
+        if existing:
+            return existing
+        return self.create_api_key( self, user )
+
+    # ---- preferences
+    def preferences( self, user ):
+        log.warn(dict( (key, value) for key, value in user.preferences.items() ))
+        return dict( (key, value) for key, value in user.preferences.items() )
+
+    # ---- roles and permissions
+    def private_role( self, user ):
+        return self.app.security_agent.get_private_user_role( user )
+
+    def sharing_roles( self, user ):
+        return self.app.security_agent.get_sharing_roles( user )
+
+    def default_permissions( self, user ):
+        return self.app.security_agent.user_get_default_permissions( user )
+
+    def quota( self, user ):
+        # TODO: use quota manager
+        return self.app.quota_agent.get_percent( user=user )
+
+    def tags_used( self, user, tag_models=None ):
+        """
+        Return a list of distinct 'user_tname:user_value' strings that the
+        given user has used.
+        """
+        # TODO: simplify and unify with tag manager
+        if self.is_anonymous( user ):
+            return []
+
+        # get all the taggable model TagAssociations
+        if not tag_models:
+            tag_models = [ v.tag_assoc_class for v in self.app.tag_handler.item_tag_assoc_info.values() ]
+        # create a union of subqueries for each for this user - getting only the tname and user_value
+        all_tags_query = None
+        for tag_model in tag_models:
+            subq = ( self.session().query( tag_model.user_tname, tag_model.user_value )
+                     .filter( tag_model.user == user ) )
+            all_tags_query = subq if all_tags_query is None else all_tags_query.union( subq )
+
+        # if nothing init'd the query, bail
+        if all_tags_query is None:
+            return []
+
+        # boil the tag tuples down into a sorted list of DISTINCT name:val strings
+        tags = all_tags_query.distinct().all()
+        tags = [( ( name + ':' + val ) if val else name ) for name, val in tags ]
+        return sorted( tags )
+
+    def has_requests( self, user, trans ):
+        """
+        """
+        if self.is_anonymous( user ):
+            return False
+        request_types = self.app.security_agent.get_accessible_request_types( trans, user )
+        return bool( user.requests or request_types )
+
+
+class UserSerializer( base.ModelSerializer, deletable.PurgableSerializerMixin ):
+    model_manager_class = UserManager
+
+    def __init__( self, app ):
+        """
+        Convert a User and associated data to a dictionary representation.
+        """
+        super( UserSerializer, self ).__init__( app )
+        self.user_manager = self.manager
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [
+            'id', 'email', 'username'
+        ])
+        self.add_view( 'detailed', [
+            # 'update_time',
+            # 'create_time',
+            'is_admin',
+            'total_disk_usage',
+            'nice_total_disk_usage',
+            'quota_percent',
+            'deleted',
+            'purged',
+            # 'active',
+
+            'preferences',
+            #  all tags
+            'tags_used',
+            # all annotations
+            # 'annotations'
+        ], include_keys_from='summary' )
+
+    def add_serializers( self ):
+        super( UserSerializer, self ).add_serializers()
+        deletable.PurgableSerializerMixin.add_serializers( self )
+
+        self.serializers.update({
+            'id'            : self.serialize_id,
+            'create_time'   : self.serialize_date,
+            'update_time'   : self.serialize_date,
+            'is_admin'      : lambda i, k, **c: self.user_manager.is_admin( i ),
+
+            'preferences'   : lambda i, k, **c: self.user_manager.preferences( i ),
+
+            'total_disk_usage' : lambda i, k, **c: float( i.total_disk_usage ),
+            'quota_percent' : lambda i, k, **c: self.user_manager.quota( i ),
+
+            'tags_used'     : lambda i, k, **c: self.user_manager.tags_used( i ),
+            'has_requests'  : lambda i, k, trans=None, **c: self.user_manager.has_requests( i, trans )
+        })
+
+
+class UserDeserializer( base.ModelDeserializer ):
+    """
+    Service object for validating and deserializing dictionaries that
+    update/alter users.
+    """
+    model_manager_class = UserManager
+
+    def add_deserializers( self ):
+        super( UserDeserializer, self ).add_deserializers()
+        self.deserializers.update({
+            'username'  : self.deserialize_username,
+        })
+
+    def deserialize_username( self, item, key, username, trans=None, **context ):
+        # TODO: validate_user_input requires trans and should(?) raise exceptions
+        # move validation to UserValidator and use self.app, exceptions instead
+        validation_error = validate_user_input.validate_publicname( trans, username, user=item )
+        if validation_error:
+            raise base.ModelDeserializingError( validation_error )
+        return self.default_deserializer( item, key, username, trans=trans, **context )
+
+
+class CurrentUserSerializer( UserSerializer ):
+    model_manager_class = UserManager
+
+    def serialize( self, user, keys, **kwargs ):
+        """
+        Override to return at least some usage info if user is anonymous.
+        """
+        kwargs[ 'current_user' ] = user
+        if self.user_manager.is_anonymous( user ):
+            return self.serialize_current_anonymous_user( user, keys, **kwargs )
+        return super( UserSerializer, self ).serialize( user, keys, **kwargs )
+
+    def serialize_current_anonymous_user( self, user, keys, trans=None, **kwargs ):
+        # use the current history if any to get usage stats for trans' anonymous user
+        # TODO: might be better as sep. Serializer class
+        usage = 0
+        percent = None
+
+        history = trans.history
+        if history:
+            usage = self.app.quota_agent.get_usage( trans, history=trans.history )
+            percent = self.app.quota_agent.get_percent( trans=trans, usage=usage )
+
+        # a very small subset of keys available
+        values = {
+            'id'                    : None,
+            'total_disk_usage'      : float( usage ),
+            'nice_total_disk_usage' : util.nice_size( usage ),
+            'quota_percent'         : percent,
+        }
+        serialized = {}
+        for key in keys:
+            if key in values:
+                serialized[ key ] = values[ key ]
+        return serialized
+
+
+class AdminUserFilterParser( base.ModelFilterParser, deletable.PurgableFiltersMixin ):
+    model_manager_class = UserManager
+    model_class = model.User
+
+    def _add_parsers( self ):
+        super( AdminUserFilterParser, self )._add_parsers()
+        deletable.PurgableFiltersMixin._add_parsers( self )
+
+        # PRECONDITION: user making the query has been verified as an admin
+        self.orm_filter_parsers.update({
+            'email'         : { 'op': ( 'eq', 'contains', 'like' ) },
+            'username'      : { 'op': ( 'eq', 'contains', 'like' ) },
+            'active'        : { 'op': ( 'eq' ) },
+            'disk_usage'    : { 'op': ( 'le', 'ge' ) }
+        })
+
+        self.fn_filter_parsers.update({
+        })
diff --git a/lib/galaxy/managers/visualizations.py b/lib/galaxy/managers/visualizations.py
new file mode 100644
index 0000000..34368fb
--- /dev/null
+++ b/lib/galaxy/managers/visualizations.py
@@ -0,0 +1,77 @@
+"""
+Manager and Serializers for Visualizations.
+
+Visualizations are saved configurations/variables used to
+reproduce a specific view in a Galaxy visualization.
+"""
+
+from galaxy import model
+from galaxy.managers import sharable
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class VisualizationManager( sharable.SharableModelManager ):
+    """
+    Handle operations outside and between visualizations and other models.
+    """
+
+    # TODO: revisions
+
+    model_class = model.Visualization
+    foreign_key_name = 'visualization'
+    user_share_model = model.VisualizationUserShareAssociation
+
+    tag_assoc = model.VisualizationTagAssociation
+    annotation_assoc = model.VisualizationAnnotationAssociation
+    rating_assoc = model.VisualizationRatingAssociation
+
+    def __init__( self, app, *args, **kwargs ):
+        """
+        """
+        super( VisualizationManager, self ).__init__( app, *args, **kwargs )
+
+    # def copy( self, trans, visualization, user, **kwargs ):
+    #    """
+    #    """
+    #    pass
+
+
+class VisualizationSerializer( sharable.SharableModelSerializer ):
+    """
+    Interface/service object for serializing visualizations into dictionaries.
+    """
+    model_manager_class = VisualizationManager
+    SINGLE_CHAR_ABBR = 'v'
+
+    def __init__( self, app ):
+        super( VisualizationSerializer, self ).__init__( app )
+        self.visualization_manager = self.manager
+
+        self.default_view = 'summary'
+        self.add_view( 'summary', [] )
+        self.add_view( 'detailed', [] )
+
+    def add_serializers( self ):
+        super( VisualizationSerializer, self ).add_serializers()
+        self.serializers.update({
+        })
+
+
+class VisualizationDeserializer( sharable.SharableModelDeserializer ):
+    """
+    Interface/service object for validating and deserializing
+    dictionaries into visualizations.
+    """
+    model_manager_class = VisualizationManager
+
+    def __init__( self, app ):
+        super( VisualizationDeserializer, self ).__init__( app )
+        self.visualization_manager = self.manager
+
+    def add_deserializers( self ):
+        super( VisualizationDeserializer, self ).add_deserializers()
+        self.deserializers.update({
+        })
+        self.deserializable_keyset.update( self.deserializers.keys() )
diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py
new file mode 100644
index 0000000..dcdfa9a
--- /dev/null
+++ b/lib/galaxy/managers/workflows.py
@@ -0,0 +1,956 @@
+from __future__ import absolute_import
+
+from six import string_types
+
+from collections import namedtuple
+import logging
+import json
+import uuid
+
+from sqlalchemy import and_
+
+from galaxy import model
+from galaxy import util
+from galaxy import exceptions
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.workflow import modules
+from .base import decode_id
+
+# For WorkflowContentManager
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.workflow.steps import attach_ordered_steps
+from galaxy.workflow.modules import module_factory, is_tool_module_type, ToolModule, WorkflowModuleInjector, MissingToolException
+from galaxy.tools.parameters.basic import DataToolParameter, DataCollectionToolParameter, workflow_building_modes
+from galaxy.tools.parameters import visit_input_values, params_to_incoming
+from galaxy.jobs.actions.post import ActionBox
+from galaxy.web import url_for
+
+log = logging.getLogger( __name__ )
+
+
+class WorkflowsManager( object ):
+    """ Handle CRUD type operaitons related to workflows. More interesting
+    stuff regarding workflow execution, step sorting, etc... can be found in
+    the galaxy.workflow module.
+    """
+
+    def __init__( self, app ):
+        self.app = app
+
+    def get_stored_workflow( self, trans, workflow_id ):
+        """ Use a supplied ID (UUID or encoded stored workflow ID) to find
+        a workflow.
+        """
+        if util.is_uuid(workflow_id):
+            # see if they have passed in the UUID for a workflow that is attached to a stored workflow
+            workflow_uuid = uuid.UUID(workflow_id)
+            stored_workflow = trans.sa_session.query(trans.app.model.StoredWorkflow).filter( and_(
+                trans.app.model.StoredWorkflow.latest_workflow_id == trans.app.model.Workflow.id,
+                trans.app.model.Workflow.uuid == workflow_uuid
+            )).first()
+            if stored_workflow is None:
+                raise exceptions.ObjectNotFound( "Workflow not found: %s" % workflow_id )
+        else:
+            workflow_id = decode_id( self.app, workflow_id )
+            query = trans.sa_session.query( trans.app.model.StoredWorkflow )
+            stored_workflow = query.get( workflow_id )
+        if stored_workflow is None:
+            raise exceptions.ObjectNotFound( "No such workflow found." )
+        return stored_workflow
+
+    def get_stored_accessible_workflow( self, trans, workflow_id ):
+        """ Get a stored workflow from a encoded stored workflow id and
+        make sure it accessible to the user.
+        """
+        stored_workflow = self.get_stored_workflow( trans, workflow_id )
+
+        # check to see if user has permissions to selected workflow
+        if stored_workflow.user != trans.user and not trans.user_is_admin():
+            if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
+                message = "Workflow is not owned by or shared with current user"
+                raise exceptions.ItemAccessibilityException( message )
+
+        return stored_workflow
+
+    def get_owned_workflow( self, trans, encoded_workflow_id ):
+        """ Get a workflow (non-stored) from a encoded workflow id and
+        make sure it accessible to the user.
+        """
+        workflow_id = decode_id( self.app, encoded_workflow_id )
+        workflow = trans.sa_session.query( model.Workflow ).get( workflow_id )
+        self.check_security( trans, workflow, check_ownership=True )
+        return workflow
+
+    def check_security( self, trans, has_workflow, check_ownership=True, check_accessible=True):
+        """ check accessibility or ownership of workflows, storedworkflows, and
+        workflowinvocations. Throw an exception or returns True if user has
+        needed level of access.
+        """
+        if not check_ownership or check_accessible:
+            return True
+
+        # If given an invocation follow to workflow...
+        if isinstance( has_workflow, model.WorkflowInvocation ):
+            has_workflow = has_workflow.workflow
+
+        # stored workflow contains security stuff - follow that workflow to
+        # that unless given a stored workflow.
+        if isinstance( has_workflow, model.Workflow ):
+            stored_workflow = has_workflow.top_level_stored_workflow
+        else:
+            stored_workflow = has_workflow
+
+        if stored_workflow.user != trans.user and not trans.user_is_admin():
+            if check_ownership:
+                raise exceptions.ItemOwnershipException()
+            # else check_accessible...
+            if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ).filter_by(user=trans.user, stored_workflow=stored_workflow ).count() == 0:
+                raise exceptions.ItemAccessibilityException()
+
+        return True
+
+    def get_invocation( self, trans, decoded_invocation_id ):
+        try:
+            workflow_invocation = trans.sa_session.query(
+                self.app.model.WorkflowInvocation
+            ).get( decoded_invocation_id )
+        except Exception:
+            raise exceptions.ObjectNotFound()
+        self.check_security( trans, workflow_invocation, check_ownership=True, check_accessible=False )
+        return workflow_invocation
+
+    def cancel_invocation( self, trans, decoded_invocation_id ):
+        workflow_invocation = self.get_invocation( trans, decoded_invocation_id )
+        cancelled = workflow_invocation.cancel()
+
+        if cancelled:
+            trans.sa_session.add( workflow_invocation )
+            trans.sa_session.flush()
+        else:
+            # TODO: More specific exception?
+            raise exceptions.MessageException( "Cannot cancel an inactive workflow invocation." )
+
+        return workflow_invocation
+
+    def get_invocation_step( self, trans, decoded_workflow_invocation_step_id ):
+        try:
+            workflow_invocation_step = trans.sa_session.query(
+                model.WorkflowInvocationStep
+            ).get( decoded_workflow_invocation_step_id )
+        except Exception:
+            raise exceptions.ObjectNotFound()
+        self.check_security( trans, workflow_invocation_step.workflow_invocation, check_ownership=True, check_accessible=False )
+        return workflow_invocation_step
+
+    def update_invocation_step( self, trans, decoded_workflow_invocation_step_id, action ):
+        if action is None:
+            raise exceptions.RequestParameterMissingException( "Updating workflow invocation step requires an action parameter. " )
+
+        workflow_invocation_step = self.get_invocation_step( trans, decoded_workflow_invocation_step_id )
+        workflow_invocation = workflow_invocation_step.workflow_invocation
+        if not workflow_invocation.active:
+            raise exceptions.RequestParameterInvalidException( "Attempting to modify the state of an completed workflow invocation." )
+
+        step = workflow_invocation_step.workflow_step
+        module = modules.module_factory.from_workflow_step( trans, step )
+        performed_action = module.do_invocation_step_action( step, action )
+        workflow_invocation_step.action = performed_action
+        trans.sa_session.add( workflow_invocation_step )
+        trans.sa_session.flush()
+        return workflow_invocation_step
+
+    def build_invocations_query( self, trans, decoded_stored_workflow_id ):
+        try:
+            stored_workflow = trans.sa_session.query(
+                self.app.model.StoredWorkflow
+            ).get( decoded_stored_workflow_id )
+        except Exception:
+            raise exceptions.ObjectNotFound()
+        self.check_security( trans, stored_workflow, check_ownership=True, check_accessible=False )
+        return trans.sa_session.query(
+            model.WorkflowInvocation
+        ).filter_by(
+            workflow_id=stored_workflow.latest_workflow_id
+        )
+
+
+CreatedWorkflow = namedtuple("CreatedWorkflow", ["stored_workflow", "workflow", "missing_tools"])
+
+
+class WorkflowContentsManager(UsesAnnotations):
+
+    def __init__(self, app):
+        self.app = app
+
+    def build_workflow_from_dict(
+        self,
+        trans,
+        data,
+        source=None,
+        add_to_menu=False,
+        publish=False,
+        create_stored_workflow=True,
+    ):
+        # Put parameters in workflow mode
+        trans.workflow_building_mode = True
+        # If there's a source, put it in the workflow name.
+        if source:
+            name = "%s (imported from %s)" % ( data['name'], source )
+        else:
+            name = data['name']
+        workflow, missing_tool_tups = self._workflow_from_dict(
+            trans,
+            data,
+            name=name,
+        )
+        if 'uuid' in data:
+            workflow.uuid = data['uuid']
+
+        if create_stored_workflow:
+            # Connect up
+            stored = model.StoredWorkflow()
+            stored.name = workflow.name
+            workflow.stored_workflow = stored
+            stored.latest_workflow = workflow
+            stored.user = trans.user
+            stored.published = publish
+            if data[ 'annotation' ]:
+                annotation = sanitize_html( data[ 'annotation' ], 'utf-8', 'text/html' )
+                self.add_item_annotation( trans.sa_session, stored.user, stored, annotation )
+
+            # Persist
+            trans.sa_session.add( stored )
+
+            if add_to_menu:
+                if trans.user.stored_workflow_menu_entries is None:
+                    trans.user.stored_workflow_menu_entries = []
+                menuEntry = model.StoredWorkflowMenuEntry()
+                menuEntry.stored_workflow = stored
+                trans.user.stored_workflow_menu_entries.append( menuEntry )
+
+        else:
+            stored = None
+            # Persist
+            trans.sa_session.add( workflow )
+
+        trans.sa_session.flush()
+
+        return CreatedWorkflow(
+            stored_workflow=stored,
+            workflow=workflow,
+            missing_tools=missing_tool_tups
+        )
+
+    def update_workflow_from_dict(self, trans, stored_workflow, workflow_data):
+        # Put parameters in workflow mode
+        trans.workflow_building_mode = True
+
+        workflow, missing_tool_tups = self._workflow_from_dict(
+            trans,
+            workflow_data,
+            name=stored_workflow.name,
+        )
+
+        if missing_tool_tups:
+            errors = []
+            for missing_tool_tup in missing_tool_tups:
+                errors.append("Step %s requires tool '%s'." % (missing_tool_tup[3], missing_tool_tup[0]))
+            raise MissingToolsException(workflow, errors)
+
+        # Connect up
+        workflow.stored_workflow = stored_workflow
+        stored_workflow.latest_workflow = workflow
+        # Persist
+        trans.sa_session.flush()
+        # Return something informative
+        errors = []
+        if workflow.has_errors:
+            errors.append( "Some steps in this workflow have validation errors" )
+        if workflow.has_cycles:
+            errors.append( "This workflow contains cycles" )
+        return workflow, errors
+
+    def _workflow_from_dict(self, trans, data, name):
+        if isinstance(data, string_types):
+            data = json.loads(data)
+
+        # Create new workflow from source data
+        workflow = model.Workflow()
+
+        workflow.name = name
+
+        # Assume no errors until we find a step that has some
+        workflow.has_errors = False
+        # Create each step
+        steps = []
+        # The editor will provide ids for each step that we don't need to save,
+        # but do need to use to make connections
+        steps_by_external_id = {}
+
+        # Keep track of tools required by the workflow that are not available in
+        # the local Galaxy instance.  Each tuple in the list of missing_tool_tups
+        # will be ( tool_id, tool_name, tool_version ).
+        missing_tool_tups = []
+
+        for step_dict in self.__walk_step_dicts( data ):
+            module, step = self.__track_module_from_dict( trans, steps, steps_by_external_id, step_dict )
+            is_tool = is_tool_module_type( module.type )
+            if is_tool and module.tool is None:
+                # A required tool is not available in the local Galaxy instance.
+                tool_id = step_dict.get('content_id', step_dict.get('tool_id', None))
+                assert tool_id is not None  # Threw an exception elsewhere if not
+
+                missing_tool_tup = ( tool_id, step_dict[ 'name' ], step_dict[ 'tool_version' ], step_dict[ 'id'] )
+                if missing_tool_tup not in missing_tool_tups:
+                    missing_tool_tups.append( missing_tool_tup )
+
+                # Save the entire step_dict in the unused config field, be parsed later
+                # when we do have the tool
+                step.config = json.dumps(step_dict)
+
+            if step.tool_errors:
+                workflow.has_errors = True
+
+        # Second pass to deal with connections between steps
+        self.__connect_workflow_steps( steps, steps_by_external_id )
+
+        # Order the steps if possible
+        attach_ordered_steps( workflow, steps )
+
+        return workflow, missing_tool_tups
+
+    def workflow_to_dict( self, trans, stored, style="export" ):
+        """ Export the workflow contents to a dictionary ready for JSON-ification and to be
+        sent out via API for instance. There are three styles of export allowed 'export', 'instance', and
+        'editor'. The Galaxy team will do it best to preserve the backward compatibility of the
+        'export' stye - this is the export method meant to be portable across Galaxy instances and over
+        time. The 'editor' style is subject to rapid and unannounced changes. The 'instance' export
+        option describes the workflow in a context more tied to the current Galaxy instance and includes
+        fields like 'url' and 'url' and actual unencoded step ids instead of 'order_index'.
+        """
+        if style == "editor":
+            return self._workflow_to_dict_editor( trans, stored )
+        elif style == "legacy":
+            return self._workflow_to_dict_instance( stored, legacy=True )
+        elif style == "instance":
+            return self._workflow_to_dict_instance( stored, legacy=False )
+        elif style == "run":
+            return self._workflow_to_dict_run( trans, stored )
+        else:
+            return self._workflow_to_dict_export( trans, stored )
+
+    def _workflow_to_dict_run( self, trans, stored ):
+        """
+        Builds workflow dictionary used by run workflow form
+        """
+        workflow = stored.latest_workflow
+        if len( workflow.steps ) == 0:
+            raise exceptions.MessageException( 'Workflow cannot be run because it does not have any steps.' )
+        if workflow.has_cycles:
+            raise exceptions.MessageException( 'Workflow cannot be run because it contains cycles.' )
+        trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
+        module_injector = WorkflowModuleInjector( trans )
+        has_upgrade_messages = False
+        step_version_changes = []
+        missing_tools = []
+        errors = {}
+        for step in workflow.steps:
+            try:
+                module_injector.inject( step, steps=workflow.steps )
+            except MissingToolException:
+                if step.tool_id not in missing_tools:
+                    missing_tools.append( step.tool_id )
+                continue
+            if step.upgrade_messages:
+                has_upgrade_messages = True
+            if step.type == 'tool' or step.type is None:
+                if step.module.version_changes:
+                    step_version_changes.extend( step.module.version_changes )
+                if step.tool_errors:
+                    errors[ step.id ] = step.tool_errors
+        if missing_tools:
+            workflow.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, workflow )
+            raise exceptions.MessageException( 'Following tools missing: %s' % missing_tools )
+        workflow.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, workflow )
+        step_order_indices = {}
+        for step in workflow.steps:
+            step_order_indices[ step.id ] = step.order_index
+        step_models = []
+        for i, step in enumerate( workflow.steps ):
+            step_model = None
+            if step.type == 'tool':
+                incoming = {}
+                tool = trans.app.toolbox.get_tool( step.tool_id )
+                params_to_incoming( incoming, tool.inputs, step.state.inputs, trans.app )
+                step_model = tool.to_json( trans, incoming, workflow_building_mode=workflow_building_modes.USE_HISTORY )
+                step_model[ 'post_job_actions' ] = [{
+                    'short_str'         : ActionBox.get_short_str( pja ),
+                    'action_type'       : pja.action_type,
+                    'output_name'       : pja.output_name,
+                    'action_arguments'  : pja.action_arguments
+                } for pja in step.post_job_actions ]
+            else:
+                inputs = step.module.get_runtime_inputs( connections=step.output_connections )
+                step_model = {
+                    'name'   : step.module.name,
+                    'inputs' : [ input.to_dict( trans ) for input in inputs.itervalues() ]
+                }
+            step_model[ 'step_type' ] = step.type
+            step_model[ 'step_index' ] = step.order_index
+            step_model[ 'output_connections' ] = [ {
+                'input_step_index'  : step_order_indices.get( oc.input_step_id ),
+                'output_step_index' : step_order_indices.get( oc.output_step_id ),
+                'input_name'        : oc.input_name,
+                'output_name'       : oc.output_name
+            } for oc in step.output_connections ]
+            if step.annotations:
+                step_model[ 'annotation' ] = step.annotations[ 0 ].annotation
+            if step.upgrade_messages:
+                step_model[ 'messages' ] = step.upgrade_messages
+            step_models.append( step_model )
+        return {
+            'id'                    : trans.app.security.encode_id( stored.id ),
+            'history_id'            : trans.app.security.encode_id( trans.history.id ) if trans.history else None,
+            'name'                  : stored.name,
+            'steps'                 : step_models,
+            'step_version_changes'  : step_version_changes,
+            'has_upgrade_messages'  : has_upgrade_messages
+        }
+
+    def _workflow_to_dict_editor(self, trans, stored):
+        """
+        """
+        workflow = stored.latest_workflow
+        # Pack workflow data into a dictionary and return
+        data = {}
+        data['name'] = workflow.name
+        data['steps'] = {}
+        data['upgrade_messages'] = {}
+        # For each step, rebuild the form and encode the state
+        for step in workflow.steps:
+            # Load from database representation
+            module = module_factory.from_workflow_step( trans, step )
+            if not module:
+                step_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, step )
+                annotation_str = ""
+                if step_annotation:
+                    annotation_str = step_annotation.annotation
+                invalid_tool_form_html = """<div class="toolForm tool-node-error">
+                                            <div class="toolFormTitle form-row-error">Unrecognized Tool: %s</div>
+                                            <div class="toolFormBody"><div class="form-row">
+                                            The tool id '%s' for this tool is unrecognized.<br/><br/>
+                                            To save this workflow, you will need to delete this step or enable the tool.
+                                            </div></div></div>""" % (step.tool_id, step.tool_id)
+                step_dict = {
+                    'id': step.order_index,
+                    'type': 'invalid',
+                    'content_id': step.content_id,
+                    'name': 'Unrecognized Tool: %s' % step.tool_id,
+                    'tool_state': None,
+                    'tooltip': None,
+                    'tool_errors': ["Unrecognized Tool Id: %s" % step.tool_id],
+                    'data_inputs': [],
+                    'data_outputs': [],
+                    'form_html': invalid_tool_form_html,
+                    'annotation': annotation_str,
+                    'input_connections': {},
+                    'post_job_actions': {},
+                    'uuid': str(step.uuid),
+                    'label': step.label or None,
+                    'workflow_outputs': []
+                }
+                # Position
+                step_dict['position'] = step.position
+                # Add to return value
+                data['steps'][step.order_index] = step_dict
+                continue
+            # Fix any missing parameters
+            upgrade_message = module.check_and_update_state()
+            if upgrade_message:
+                data['upgrade_messages'][step.order_index] = upgrade_message
+            if (hasattr(module, "version_changes")) and (module.version_changes):
+                if step.order_index in data['upgrade_messages']:
+                    data['upgrade_messages'][step.order_index][module.tool.name] = "\n".join(module.version_changes)
+                else:
+                    data['upgrade_messages'][step.order_index] = {module.tool.name: "\n".join(module.version_changes)}
+            # Get user annotation.
+            step_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, step )
+            annotation_str = ""
+            if step_annotation:
+                annotation_str = step_annotation.annotation
+            form_html = None
+            if trans.history:
+                # If in a web session, attach form html. No reason to do
+                # so for API requests.
+                form_html = module.get_config_form()
+            # Pack attributes into plain dictionary
+            step_dict = {
+                'id': step.order_index,
+                'type': module.type,
+                'content_id': module.get_content_id(),
+                'name': module.get_name(),
+                'tool_state': module.get_state(),
+                'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
+                'tool_errors': module.get_errors(),
+                'data_inputs': module.get_data_inputs(),
+                'data_outputs': module.get_data_outputs(),
+                'form_html': form_html,
+                'annotation': annotation_str,
+                'post_job_actions': {},
+                'uuid': str(step.uuid) if step.uuid else None,
+                'label': step.label or None,
+                'workflow_outputs': []
+            }
+            # Connections
+            input_connections = step.input_connections
+            input_connections_type = {}
+            multiple_input = {}  # Boolean value indicating if this can be mutliple
+            if step.type is None or step.type == 'tool':
+                # Determine full (prefixed) names of valid input datasets
+                data_input_names = {}
+
+                def callback( input, prefixed_name, **kwargs ):
+                    if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
+                        data_input_names[ prefixed_name ] = True
+                        multiple_input[ prefixed_name ] = input.multiple
+                        if isinstance( input, DataToolParameter ):
+                            input_connections_type[ input.name ] = "dataset"
+                        if isinstance( input, DataCollectionToolParameter ):
+                            input_connections_type[ input.name ] = "dataset_collection"
+                visit_input_values( module.tool.inputs, module.state.inputs, callback )
+                # Filter
+                # FIXME: this removes connection without displaying a message currently!
+                input_connections = [ conn for conn in input_connections if conn.input_name in data_input_names ]
+                # post_job_actions
+                pja_dict = {}
+                for pja in step.post_job_actions:
+                    pja_dict[pja.action_type + pja.output_name] = dict(
+                        action_type=pja.action_type,
+                        output_name=pja.output_name,
+                        action_arguments=pja.action_arguments
+                    )
+                step_dict['post_job_actions'] = pja_dict
+
+            # workflow outputs
+            outputs = []
+            for output in step.unique_workflow_outputs:
+                output_label = output.label
+                output_name = output.output_name
+                output_uuid = str(output.uuid) if output.uuid else None
+                outputs.append({"output_name": output_name,
+                                "uuid": output_uuid,
+                                "label": output_label})
+            step_dict['workflow_outputs'] = outputs
+
+            # Encode input connections as dictionary
+            input_conn_dict = {}
+            for conn in input_connections:
+                input_type = "dataset"
+                if conn.input_name in input_connections_type:
+                    input_type = input_connections_type[ conn.input_name ]
+                conn_dict = dict( id=conn.output_step.order_index, output_name=conn.output_name, input_type=input_type )
+                if conn.input_name in multiple_input:
+                    if conn.input_name in input_conn_dict:
+                        input_conn_dict[ conn.input_name ].append( conn_dict )
+                    else:
+                        input_conn_dict[ conn.input_name ] = [ conn_dict ]
+                else:
+                    input_conn_dict[ conn.input_name ] = conn_dict
+            step_dict['input_connections'] = input_conn_dict
+            # Position
+            step_dict['position'] = step.position
+            # Add to return value
+            data['steps'][step.order_index] = step_dict
+        return data
+
+    def _workflow_to_dict_export( self, trans, stored=None, workflow=None ):
+        """ Export the workflow contents to a dictionary ready for JSON-ification and export.
+        """
+        if workflow is None:
+            assert stored is not None
+            workflow = stored.latest_workflow
+
+        annotation_str = ""
+        if stored is not None:
+            workflow_annotation = self.get_item_annotation_obj( trans.sa_session, trans.user, stored )
+            if workflow_annotation:
+                annotation_str = workflow_annotation.annotation
+        # Pack workflow data into a dictionary and return
+        data = {}
+        data['a_galaxy_workflow'] = 'true'  # Placeholder for identifying galaxy workflow
+        data['format-version'] = "0.1"
+        data['name'] = workflow.name
+        data['annotation'] = annotation_str
+        if workflow.uuid is not None:
+            data['uuid'] = str(workflow.uuid)
+        data['steps'] = {}
+        # For each step, rebuild the form and encode the state
+        for step in workflow.steps:
+            # Load from database representation
+            module = module_factory.from_workflow_step( trans, step )
+            if not module:
+                return None
+            # Get user annotation.
+            step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step )
+            annotation_str = ""
+            if step_annotation:
+                annotation_str = step_annotation.annotation
+            content_id = module.get_content_id()
+            # Step info
+            step_dict = {
+                'id': step.order_index,
+                'type': module.type,
+                'content_id': content_id,
+                'tool_id': content_id,  # For worklfows exported to older Galaxies,
+                                        # eliminate after a few years...
+                'tool_version': step.tool_version,
+                'name': module.get_name(),
+                'tool_state': module.get_state(),
+                'tool_errors': module.get_errors(),
+                'uuid': str(step.uuid),
+                'label': step.label or None,
+                # 'data_inputs': module.get_data_inputs(),
+                # 'data_outputs': module.get_data_outputs(),
+                'annotation': annotation_str
+            }
+            # Add tool shed repository information and post-job actions to step dict.
+            if module.type == 'tool':
+                if module.tool.tool_shed_repository:
+                    tsr = module.tool.tool_shed_repository
+                    step_dict["tool_shed_repository"] = {
+                        'name': tsr.name,
+                        'owner': tsr.owner,
+                        'changeset_revision': tsr.changeset_revision,
+                        'tool_shed': tsr.tool_shed
+                    }
+                pja_dict = {}
+                for pja in step.post_job_actions:
+                    pja_dict[pja.action_type + pja.output_name] = dict(
+                        action_type=pja.action_type,
+                        output_name=pja.output_name,
+                        action_arguments=pja.action_arguments )
+                step_dict[ 'post_job_actions' ] = pja_dict
+
+            if module.type == 'subworkflow':
+                del step_dict['content_id']
+                del step_dict['tool_version']
+                del step_dict['tool_state']
+                del step_dict['tool_errors']
+                subworkflow = step.subworkflow
+                subworkflow_as_dict = self._workflow_to_dict_export(
+                    trans,
+                    stored=None,
+                    workflow=subworkflow
+                )
+                step_dict['subworkflow'] = subworkflow_as_dict
+
+            # Data inputs
+            step_dict['inputs'] = module.get_runtime_input_dicts( annotation_str )
+            # User outputs
+
+            workflow_outputs_dicts = []
+            for workflow_output in step.unique_workflow_outputs:
+                workflow_output_dict = dict(
+                    output_name=workflow_output.output_name,
+                    label=workflow_output.label,
+                    uuid=str(workflow_output.uuid) if workflow_output.uuid is not None else None,
+                )
+                workflow_outputs_dicts.append(workflow_output_dict)
+            step_dict['workflow_outputs'] = workflow_outputs_dicts
+
+            # All step outputs
+            step_dict['outputs'] = []
+            if type( module ) is ToolModule:
+                for output in module.get_data_outputs():
+                    step_dict['outputs'].append( { 'name': output['name'], 'type': output['extensions'][0] } )
+
+            # Connections
+            input_connections = step.input_connections
+            if step.type is None or step.type == 'tool':
+                # Determine full (prefixed) names of valid input datasets
+                data_input_names = {}
+
+                def callback( input, prefixed_name, **kwargs ):
+                    if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
+                        data_input_names[ prefixed_name ] = True
+                # FIXME: this updates modules silently right now; messages from updates should be provided.
+                module.check_and_update_state()
+                visit_input_values( module.tool.inputs, module.state.inputs, callback )
+                # Filter
+                # FIXME: this removes connection without displaying a message currently!
+                input_connections = [ conn for conn in input_connections if (conn.input_name in data_input_names or conn.non_data_connection) ]
+
+            # Encode input connections as dictionary
+            input_conn_dict = {}
+            unique_input_names = set( [conn.input_name for conn in input_connections] )
+            for input_name in unique_input_names:
+                input_conn_dicts = []
+                for conn in input_connections:
+                    if conn.input_name != input_name:
+                        continue
+                    input_conn = dict(
+                        id=conn.output_step.order_index,
+                        output_name=conn.output_name
+                    )
+                    if conn.input_subworkflow_step is not None:
+                        subworkflow_step_id = conn.input_subworkflow_step.order_index
+                        input_conn["input_subworkflow_step_id"] = subworkflow_step_id
+
+                    input_conn_dicts.append(input_conn)
+                input_conn_dict[ input_name ] = input_conn_dicts
+
+            # Preserve backward compatability. Previously Galaxy
+            # assumed input connections would be dictionaries not
+            # lists of dictionaries, so replace any singleton list
+            # with just the dictionary so that workflows exported from
+            # newer Galaxy instances can be used with older Galaxy
+            # instances if they do no include multiple input
+            # tools. This should be removed at some point. Mirrored
+            # hack in _workflow_from_dict should never be removed so
+            # existing workflow exports continue to function.
+            for input_name, input_conn in dict(input_conn_dict).iteritems():
+                if len(input_conn) == 1:
+                    input_conn_dict[input_name] = input_conn[0]
+            step_dict['input_connections'] = input_conn_dict
+            # Position
+            step_dict['position'] = step.position
+            # Add to return value
+            data['steps'][step.order_index] = step_dict
+        return data
+
+    def _workflow_to_dict_instance(self, stored, legacy=True):
+        encode = self.app.security.encode_id
+        sa_session = self.app.model.context
+        item = stored.to_dict( view='element', value_mapper={ 'id': encode } )
+        workflow = stored.latest_workflow
+        item['url'] = url_for('workflow', id=item['id'])
+        item['owner'] = stored.user.username
+        inputs = {}
+        for step in workflow.input_steps:
+            step_type = step.type
+            if step.tool_inputs and "name" in step.tool_inputs:
+                label = step.tool_inputs['name']
+            elif step_type == "data_input":
+                label = "Input Dataset"
+            elif step_type == "data_collection_input":
+                label = "Input Dataset Collection"
+            else:
+                raise ValueError("Invalid step_type %s" % step_type)
+            if legacy:
+                index = step.id
+            else:
+                index = step.order_index
+            step_uuid = str(step.uuid) if step.uuid else None
+            inputs[index] = {'label': label, 'value': "", "uuid": step_uuid}
+        item['inputs'] = inputs
+        item['annotation'] = self.get_item_annotation_str( sa_session, stored.user, stored )
+        steps = {}
+        steps_to_order_index = {}
+        for step in workflow.steps:
+            steps_to_order_index[step.id] = step.order_index
+        for step in workflow.steps:
+            step_uuid = str(step.uuid) if step.uuid else None
+            step_id = step.id if legacy else step.order_index
+            step_type = step.type
+            step_dict = {'id': step_id,
+                         'type': step_type,
+                         'tool_id': step.tool_id,
+                         'tool_version': step.tool_version,
+                         'annotation': self.get_item_annotation_str( sa_session, stored.user, step ),
+                         'tool_inputs': step.tool_inputs,
+                         'input_steps': {}}
+
+            if step_type == 'subworkflow':
+                del step_dict['tool_id']
+                del step_dict['tool_version']
+                del step_dict['tool_inputs']
+                workflow = step.subworkflow
+                step_dict['stored_workflow_id'] = encode(workflow.stored_workflow.id)
+                step_dict['workflow_id'] = encode(workflow.id)
+
+            for conn in step.input_connections:
+                step_id = step.id if legacy else step.order_index
+                source_id = conn.output_step_id
+                source_step = source_id if legacy else steps_to_order_index[source_id]
+                step_dict['input_steps'][conn.input_name] = {'source_step': source_step,
+                                                             'step_output': conn.output_name}
+
+            steps[step_id] = step_dict
+
+        item['steps'] = steps
+        return item
+
+    def __walk_step_dicts( self, data ):
+        """ Walk over the supplid step dictionaries and return them in a way designed
+        to preserve step order when possible.
+        """
+        supplied_steps = data[ 'steps' ]
+        # Try to iterate through imported workflow in such a way as to
+        # preserve step order.
+        step_indices = supplied_steps.keys()
+        try:
+            step_indices = sorted( step_indices, key=int )
+        except ValueError:
+            # to defensive, were these ever or will they ever not be integers?
+            pass
+
+        discovered_labels = set()
+        discovered_uuids = set()
+
+        discovered_output_labels = set()
+        discovered_output_uuids = set()
+
+        # First pass to build step objects and populate basic values
+        for step_index in step_indices:
+            step_dict = supplied_steps[ step_index ]
+            uuid = step_dict.get("uuid", None)
+            if uuid and uuid != "None":
+                if uuid in discovered_uuids:
+                    raise exceptions.DuplicatedIdentifierException("Duplicate step UUID in request.")
+                discovered_uuids.add(uuid)
+            label = step_dict.get("label", None)
+            if label:
+                if label in discovered_labels:
+                    raise exceptions.DuplicatedIdentifierException("Duplicated step label in request.")
+                discovered_labels.add(label)
+
+            if 'workflow_outputs' in step_dict:
+                outputs = step_dict['workflow_outputs']
+                # outputs may be list of name (deprecated legacy behavior)
+                # or dictionary of names to {uuid: <uuid>, label: <label>}
+                if isinstance(outputs, dict):
+                    for output_name in outputs:
+                        output_dict = outputs[output_name]
+                        output_label = output_dict.get("label", None)
+                        if output_label:
+                            if label in discovered_output_labels:
+                                raise exceptions.DuplicatedIdentifierException("Duplicated workflow output label in request.")
+                            discovered_output_labels.add(label)
+
+                        output_uuid = step_dict.get("output_uuid", None)
+                        if output_uuid:
+                            if output_uuid in discovered_output_uuids:
+                                raise exceptions.DuplicatedIdentifierException("Duplicate workflow output UUID in request.")
+                            discovered_output_uuids.add(uuid)
+
+            yield step_dict
+
+    def __track_module_from_dict( self, trans, steps, steps_by_external_id, step_dict ):
+        module, step = self.__module_from_dict( trans, step_dict )
+        # Create the model class for the step
+        steps.append( step )
+        steps_by_external_id[ step_dict['id' ] ] = step
+        if 'workflow_outputs' in step_dict:
+            workflow_outputs = step_dict['workflow_outputs']
+            found_output_names = set([])
+            for workflow_output in workflow_outputs:
+                # Allow workflow outputs as list of output_names for backward compatiblity.
+                if not isinstance(workflow_output, dict):
+                    workflow_output = {"output_name": workflow_output}
+                output_name = workflow_output["output_name"]
+                if output_name in found_output_names:
+                    raise exceptions.ObjectAttributeInvalidException("Duplicate workflow outputs with name [%s] found." % output_name)
+                if not output_name:
+                    raise exceptions.ObjectAttributeInvalidException("Workflow output with empty name encountered.")
+                found_output_names.add(output_name)
+                uuid = workflow_output.get("uuid", None)
+                label = workflow_output.get("label", None)
+                m = step.create_or_update_workflow_output(
+                    output_name=output_name,
+                    uuid=uuid,
+                    label=label,
+                )
+                trans.sa_session.add(m)
+        return module, step
+
+    def __module_from_dict( self, trans, step_dict ):
+        """ Create a WorkflowStep model object and corresponding module
+        representing type-specific functionality from the incoming dictionary.
+        """
+        step = model.WorkflowStep()
+        # TODO: Consider handling position inside module.
+        step.position = step_dict['position']
+        if "uuid" in step_dict and step_dict['uuid'] != "None":
+            step.uuid = step_dict["uuid"]
+        if "label" in step_dict:
+            step.label = step_dict["label"]
+
+        step_type = step_dict.get("type", None)
+        if step_type == "subworkflow":
+            subworkflow = self.__load_subworkflow_from_step_dict(
+                trans, step_dict
+            )
+            step_dict["subworkflow"] = subworkflow
+
+        module = module_factory.from_dict( trans, step_dict )
+        module.save_to_step( step )
+
+        annotation = step_dict[ 'annotation' ]
+        if annotation:
+            annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+            self.add_item_annotation( trans.sa_session, trans.get_user(), step, annotation )
+
+        # Stick this in the step temporarily
+        step.temp_input_connections = step_dict['input_connections']
+
+        return module, step
+
+    def __load_subworkflow_from_step_dict(self, trans, step_dict):
+        embedded_subworkflow = step_dict.get("subworkflow", None)
+        subworkflow_id = step_dict.get("content_id", None)
+        if embedded_subworkflow and subworkflow_id:
+            raise Exception("Subworkflow step defines both subworkflow and content_id, only one may be specified.")
+
+        if not embedded_subworkflow and not subworkflow_id:
+            raise Exception("Subworkflow step must define either subworkflow or content_id.")
+
+        if embedded_subworkflow:
+            subworkflow = self.build_workflow_from_dict(
+                trans,
+                embedded_subworkflow,
+                create_stored_workflow=False,
+            ).workflow
+        else:
+            workflow_manager = WorkflowsManager(self.app)
+            subworkflow = workflow_manager.get_owned_workflow(
+                trans, subworkflow_id
+            )
+
+        return subworkflow
+
+    def __connect_workflow_steps( self, steps, steps_by_external_id ):
+        """ Second pass to deal with connections between steps.
+
+        Create workflow connection objects using externally specified ids
+        using during creation or update.
+        """
+        for step in steps:
+            # Input connections
+            for input_name, conn_list in step.temp_input_connections.iteritems():
+                if not conn_list:
+                    continue
+                if not isinstance(conn_list, list):  # Older style singleton connection
+                    conn_list = [conn_list]
+                for conn_dict in conn_list:
+                    if 'output_name' not in conn_dict or 'id' not in conn_dict:
+                        template = "Invalid connection [%s] - must be dict with output_name and id fields."
+                        message = template % conn_dict
+                        raise exceptions.MessageException(message)
+                    conn = model.WorkflowStepConnection()
+                    conn.input_step = step
+                    conn.input_name = input_name
+                    conn.output_name = conn_dict['output_name']
+                    conn.output_step = steps_by_external_id[ conn_dict['id'] ]
+
+                    input_subworkflow_step_index = conn_dict.get('input_subworkflow_step_id', None)
+                    if input_subworkflow_step_index is not None:
+                        conn.input_subworkflow_step = step.subworkflow.step_by_index(input_subworkflow_step_index)
+
+            del step.temp_input_connections
+
+
+class MissingToolsException(exceptions.MessageException):
+
+    def __init__(self, workflow, errors):
+        self.workflow = workflow
+        self.errors = errors
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
new file mode 100644
index 0000000..4794ce0
--- /dev/null
+++ b/lib/galaxy/model/__init__.py
@@ -0,0 +1,5212 @@
+"""
+Galaxy data model classes
+
+Naming: try to use class names that have a distinct plural form so that
+the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
+"""
+import codecs
+import errno
+import json
+import logging
+import numbers
+import operator
+import os
+import socket
+import time
+from datetime import datetime, timedelta
+from string import Template
+from uuid import UUID, uuid4
+
+from six import string_types
+from sqlalchemy import (and_, func, join, not_, or_, select, true, type_coerce,
+                        types)
+from sqlalchemy.ext import hybrid
+from sqlalchemy.orm import aliased, joinedload, object_session
+
+import galaxy.model.metadata
+import galaxy.model.orm.now
+import galaxy.security.passwords
+import galaxy.util
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.security import get_permitted_actions
+from galaxy.util import (directory_hash_id, Params, ready_name_for_url,
+                         restore_text, send_mail, unicodify, unique_id)
+from galaxy.util.bunch import Bunch
+from galaxy.util.dictifiable import Dictifiable
+from galaxy.util.hash_util import new_secure_hash
+from galaxy.util.multi_byte import is_multi_byte
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.web.form_builder import (AddressField, CheckboxField, HistoryField,
+                                     PasswordField, SelectField, TextArea, TextField, WorkflowField,
+                                     WorkflowMappingField)
+from galaxy.web.framework.helpers import to_unicode
+
+log = logging.getLogger( __name__ )
+
+_datatypes_registry = None
+
+# When constructing filters with in for a fixed set of ids, maximum
+# number of items to place in the IN statement. Different databases
+# are going to have different limits so it is likely best to not let
+# this be unlimited - filter in Python if over this limit.
+MAX_IN_FILTER_LENGTH = 100
+
+
+class NoConverterException(Exception):
+    def __init__(self, value):
+        self.value = value
+
+    def __str__(self):
+        return repr(self.value)
+
+
+class ConverterDependencyException(Exception):
+    def __init__(self, value):
+        self.value = value
+
+    def __str__(self):
+        return repr(self.value)
+
+
+def _get_datatypes_registry():
+    if _datatypes_registry is None:
+        raise Exception("galaxy.model.set_datatypes_registry must be called before performing certain DatasetInstance operations.")
+    return _datatypes_registry
+
+
+def set_datatypes_registry( d_registry ):
+    """
+    Set up datatypes_registry
+    """
+    global _datatypes_registry
+    _datatypes_registry = d_registry
+
+
+class HasName:
+
+    def get_display_name( self ):
+        """
+        These objects have a name attribute can be either a string or a unicode
+        object. If string, convert to unicode object assuming 'utf-8' format.
+        """
+        name = self.name
+        name = unicodify( name, 'utf-8' )
+        return name
+
+
+class JobLike:
+
+    def _init_metrics( self ):
+        self.text_metrics = []
+        self.numeric_metrics = []
+
+    def add_metric( self, plugin, metric_name, metric_value ):
+        plugin = unicodify( plugin, 'utf-8' )
+        metric_name = unicodify( metric_name, 'utf-8' )
+        if isinstance( metric_value, numbers.Number ):
+            metric = self._numeric_metric( plugin, metric_name, metric_value )
+            self.numeric_metrics.append( metric )
+        else:
+            metric_value = unicodify( metric_value, 'utf-8' )
+            if len( metric_value ) > 1022:
+                # Truncate these values - not needed with sqlite
+                # but other backends must need it.
+                metric_value = metric_value[ :1022 ]
+            metric = self._text_metric( plugin, metric_name, metric_value )
+            self.text_metrics.append( metric )
+
+    @property
+    def metrics( self ):
+        # TODO: Make iterable, concatenate with chain
+        return self.text_metrics + self.numeric_metrics
+
+    def set_streams( self, stdout, stderr ):
+        stdout = galaxy.util.unicodify( stdout )
+        stderr = galaxy.util.unicodify( stderr )
+        if ( len( stdout ) > galaxy.util.DATABASE_MAX_STRING_SIZE ):
+            stdout = galaxy.util.shrink_string_by_size( stdout, galaxy.util.DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+            log.info( "stdout for %s %d is greater than %s, only a portion will be logged to database", type(self), self.id, galaxy.util.DATABASE_MAX_STRING_SIZE_PRETTY )
+        self.stdout = stdout
+        if ( len( stderr ) > galaxy.util.DATABASE_MAX_STRING_SIZE ):
+            stderr = galaxy.util.shrink_string_by_size( stderr, galaxy.util.DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+            log.info( "stderr for %s %d is greater than %s, only a portion will be logged to database", type(self), self.id, galaxy.util.DATABASE_MAX_STRING_SIZE_PRETTY )
+        self.stderr = stderr
+
+    def log_str(self):
+        extra = ""
+        safe_id = getattr(self, "id", None)
+        if safe_id is not None:
+            extra += "id=%s" % safe_id
+        else:
+            extra += "unflushed"
+
+        return "%s[%s,tool_id=%s]" % (self.__class__.__name__, extra, self.tool_id)
+
+
+class User( object, Dictifiable ):
+    use_pbkdf2 = True
+    """
+    Data for a Galaxy user or admin and relations to their
+    histories, credentials, and roles.
+    """
+    # attributes that will be accessed and returned when calling to_dict( view='collection' )
+    dict_collection_visible_keys = ( 'id', 'email', 'username' )
+    # attributes that will be accessed and returned when calling to_dict( view='element' )
+    dict_element_visible_keys = ( 'id', 'email', 'username', 'total_disk_usage', 'nice_total_disk_usage' )
+
+    def __init__( self, email=None, password=None ):
+        self.email = email
+        self.password = password
+        self.external = False
+        self.deleted = False
+        self.purged = False
+        self.active = False
+        self.activation_token = None
+        self.username = None
+        self.last_password_change = None
+        # Relationships
+        self.histories = []
+        self.credentials = []
+        # ? self.roles = []
+
+    def set_password_cleartext( self, cleartext ):
+        """
+        Set user password to the digest of `cleartext`.
+        """
+        if User.use_pbkdf2:
+            self.password = galaxy.security.passwords.hash_password( cleartext )
+        else:
+            self.password = new_secure_hash( text_type=cleartext )
+        self.last_password_change = datetime.now()
+
+    def check_password( self, cleartext ):
+        """
+        Check if `cleartext` matches user password when hashed.
+        """
+        return galaxy.security.passwords.check_password( cleartext, self.password )
+
+    def all_roles( self ):
+        """
+        Return a unique list of Roles associated with this user or any of their groups.
+        """
+        try:
+            db_session = object_session( self )
+            user = db_session.query(
+                User
+            ).filter_by(  # don't use get, it will use session variant.
+                id=self.id
+            ).options(
+                joinedload("roles"),
+                joinedload("roles.role"),
+                joinedload("groups"),
+                joinedload("groups.group"),
+                joinedload("groups.group.roles"),
+                joinedload("groups.group.roles.role")
+            ).one()
+        except Exception:
+            # If not persistent user, just use models normaly and
+            # skip optimizations...
+            user = self
+
+        roles = [ ura.role for ura in user.roles ]
+        for group in [ uga.group for uga in user.groups ]:
+            for role in [ gra.role for gra in group.roles ]:
+                if role not in roles:
+                    roles.append( role )
+        return roles
+
+    def all_roles_exploiting_cache( self ):
+        """
+        """
+        roles = [ ura.role for ura in self.roles ]
+        for group in [ uga.group for uga in self.groups ]:
+            for role in [ gra.role for gra in group.roles ]:
+                if role not in roles:
+                    roles.append( role )
+        return roles
+
+    def get_disk_usage( self, nice_size=False ):
+        """
+        Return byte count of disk space used by user or a human-readable
+        string if `nice_size` is `True`.
+        """
+        rval = 0
+        if self.disk_usage is not None:
+            rval = self.disk_usage
+        if nice_size:
+            rval = galaxy.util.nice_size( rval )
+        return rval
+
+    def set_disk_usage( self, bytes ):
+        """
+        Manually set the disk space used by a user to `bytes`.
+        """
+        self.disk_usage = bytes
+
+    total_disk_usage = property( get_disk_usage, set_disk_usage )
+
+    def adjust_total_disk_usage( self, amount ):
+        if amount != 0:
+            self.disk_usage = func.coalesce(self.table.c.disk_usage, 0) + amount
+
+    @property
+    def nice_total_disk_usage( self ):
+        """
+        Return byte count of disk space used in a human-readable string.
+        """
+        return self.get_disk_usage( nice_size=True )
+
+    def calculate_disk_usage( self ):
+        """
+        Return byte count total of disk space used by all non-purged, non-library
+        HDAs in non-purged histories.
+        """
+        # maintain a list so that we don't double count
+        dataset_ids = []
+        total = 0
+        # this can be a huge number and can run out of memory, so we avoid the mappers
+        db_session = object_session( self )
+        for history in db_session.query( History ).enable_eagerloads( False ).filter_by( user_id=self.id, purged=False ).yield_per( 1000 ):
+            for hda in db_session.query( HistoryDatasetAssociation ).enable_eagerloads( False ).filter_by( history_id=history.id, purged=False ).yield_per( 1000 ):
+                # TODO: def hda.counts_toward_disk_usage():
+                #   return ( not self.dataset.purged and not self.dataset.library_associations )
+                if hda.dataset.id not in dataset_ids and not hda.dataset.purged and not hda.dataset.library_associations:
+                    dataset_ids.append( hda.dataset.id )
+                    total += hda.dataset.get_total_size()
+        return total
+
+    @staticmethod
+    def user_template_environment( user ):
+        """
+
+        >>> env = User.user_template_environment(None)
+        >>> env['__user_email__']
+        'Anonymous'
+        >>> env['__user_id__']
+        'Anonymous'
+        >>> user = User('foo at example.com')
+        >>> user.id = 6
+        >>> user.username = 'foo2'
+        >>> env = User.user_template_environment(user)
+        >>> env['__user_id__']
+        '6'
+        >>> env['__user_name__']
+        'foo2'
+        """
+        if user:
+            user_id = '%d' % user.id
+            user_email = str( user.email )
+            user_name = str( user.username )
+        else:
+            user = None
+            user_id = 'Anonymous'
+            user_email = 'Anonymous'
+            user_name = 'Anonymous'
+        environment = {}
+        environment[ '__user__' ] = user
+        environment[ '__user_id__' ] = environment[ 'userId' ] = user_id
+        environment[ '__user_email__' ] = environment[ 'userEmail' ] = user_email
+        environment[ '__user_name__' ] = user_name
+        return environment
+
+    @staticmethod
+    def expand_user_properties( user, in_string ):
+        """
+        """
+        environment = User.user_template_environment( user )
+        return Template( in_string ).safe_substitute( environment )
+
+
+class PasswordResetToken( object ):
+    def __init__( self, user, token=None):
+        if token:
+            self.token = token
+        else:
+            self.token = unique_id()
+        self.user = user
+        self.expiration_time = galaxy.model.orm.now.now() + timedelta(hours=24)
+
+
+class BaseJobMetric( object ):
+
+    def __init__( self, plugin, metric_name, metric_value ):
+        self.plugin = plugin
+        self.metric_name = metric_name
+        self.metric_value = metric_value
+
+
+class JobMetricText( BaseJobMetric ):
+    pass
+
+
+class JobMetricNumeric( BaseJobMetric ):
+    pass
+
+
+class TaskMetricText( BaseJobMetric ):
+    pass
+
+
+class TaskMetricNumeric( BaseJobMetric ):
+    pass
+
+
+class Job( object, JobLike, Dictifiable ):
+    dict_collection_visible_keys = [ 'id', 'state', 'exit_code', 'update_time', 'create_time' ]
+    dict_element_visible_keys = [ 'id', 'state', 'exit_code', 'update_time', 'create_time'  ]
+
+    """
+    A job represents a request to run a tool given input datasets, tool
+    parameters, and output datasets.
+    """
+    _numeric_metric = JobMetricNumeric
+    _text_metric = JobMetricText
+
+    states = Bunch( NEW='new',
+                    RESUBMITTED='resubmitted',
+                    UPLOAD='upload',
+                    WAITING='waiting',
+                    QUEUED='queued',
+                    RUNNING='running',
+                    OK='ok',
+                    ERROR='error',
+                    PAUSED='paused',
+                    DELETED='deleted',
+                    DELETED_NEW='deleted_new' )
+    terminal_states = [ states.OK,
+                        states.ERROR,
+                        states.DELETED ]
+    #: job states where the job hasn't finished and the model may still change
+    non_ready_states = [
+        states.NEW,
+        states.RESUBMITTED,
+        states.UPLOAD,
+        states.WAITING,
+        states.QUEUED,
+        states.RUNNING,
+    ]
+
+    # Please include an accessor (get/set pair) for any new columns/members.
+    def __init__( self ):
+        self.session_id = None
+        self.user_id = None
+        self.tool_id = None
+        self.tool_version = None
+        self.command_line = None
+        self.dependencies = []
+        self.param_filename = None
+        self.parameters = []
+        self.input_datasets = []
+        self.output_datasets = []
+        self.input_dataset_collections = []
+        self.output_dataset_collection_instances = []
+        self.output_dataset_collections = []
+        self.input_library_datasets = []
+        self.output_library_datasets = []
+        self.state = Job.states.NEW
+        self.info = None
+        self.job_runner_name = None
+        self.job_runner_external_id = None
+        self.destination_id = None
+        self.destination_params = None
+        self.post_job_actions = []
+        self.state_history = []
+        self.imported = False
+        self.handler = None
+        self.exit_code = None
+        self._init_metrics()
+        self.state_history.append( JobStateHistory( self ) )
+
+    @property
+    def finished( self ):
+        states = self.states
+        return self.state in [
+            states.OK,
+            states.ERROR,
+            states.DELETED,
+            states.DELETED_NEW,
+        ]
+
+    # TODO: Add accessors for members defined in SQL Alchemy for the Job table and
+    # for the mapper defined to the Job table.
+    def get_external_output_metadata( self ):
+        """
+        The external_output_metadata is currently a reference from Job to
+        JobExternalOutputMetadata. It exists for a job but not a task.
+        """
+        return self.external_output_metadata
+
+    def get_session_id( self ):
+        return self.session_id
+
+    def get_user_id( self ):
+        return self.user_id
+
+    def get_tool_id( self ):
+        return self.tool_id
+
+    def get_tool_version( self ):
+        return self.tool_version
+
+    def get_command_line( self ):
+        return self.command_line
+
+    def get_dependencies(self):
+        return self.dependencies
+
+    def get_param_filename( self ):
+        return self.param_filename
+
+    def get_parameters( self ):
+        return self.parameters
+
+    def get_input_datasets( self ):
+        return self.input_datasets
+
+    def get_output_datasets( self ):
+        return self.output_datasets
+
+    def get_input_library_datasets( self ):
+        return self.input_library_datasets
+
+    def get_output_library_datasets( self ):
+        return self.output_library_datasets
+
+    def get_state( self ):
+        return self.state
+
+    def get_info( self ):
+        return self.info
+
+    def get_job_runner_name( self ):
+        # This differs from the Task class in that job_runner_name is
+        # accessed instead of task_runner_name. Note that the field
+        # runner_name is not the same thing.
+        return self.job_runner_name
+
+    def get_job_runner_external_id( self ):
+        # This is different from the Task just in the member accessed:
+        return self.job_runner_external_id
+
+    def get_post_job_actions( self ):
+        return self.post_job_actions
+
+    def get_imported( self ):
+        return self.imported
+
+    def get_handler( self ):
+        return self.handler
+
+    def get_params( self ):
+        return self.params
+
+    def get_user( self ):
+        # This is defined in the SQL Alchemy mapper as a relation to the User.
+        return self.user
+
+    def get_id( self ):
+        # This is defined in the SQL Alchemy's Job table (and not in the model).
+        return self.id
+
+    def get_tasks( self ):
+        # The tasks member is pert of a reference in the SQL Alchemy schema:
+        return self.tasks
+
+    def get_id_tag( self ):
+        """
+        Return a tag that can be useful in identifying a Job.
+        This returns the Job's get_id
+        """
+        return "%s" % self.id
+
+    def set_session_id( self, session_id ):
+        self.session_id = session_id
+
+    def set_user_id( self, user_id ):
+        self.user_id = user_id
+
+    def set_tool_id( self, tool_id ):
+        self.tool_id = tool_id
+
+    def set_tool_version( self, tool_version ):
+        self.tool_version = tool_version
+
+    def set_command_line( self, command_line ):
+        self.command_line = command_line
+
+    def set_dependencies( self, dependencies ):
+        self.dependencies = dependencies
+
+    def set_param_filename( self, param_filename ):
+        self.param_filename = param_filename
+
+    def set_parameters( self, parameters ):
+        self.parameters = parameters
+
+    def set_input_datasets( self, input_datasets ):
+        self.input_datasets = input_datasets
+
+    def set_output_datasets( self, output_datasets ):
+        self.output_datasets = output_datasets
+
+    def set_input_library_datasets( self, input_library_datasets ):
+        self.input_library_datasets = input_library_datasets
+
+    def set_output_library_datasets( self, output_library_datasets ):
+        self.output_library_datasets = output_library_datasets
+
+    def set_info( self, info ):
+        self.info = info
+
+    def set_runner_name( self, job_runner_name ):
+        self.job_runner_name = job_runner_name
+
+    def get_job( self ):
+        # Added so job and task have same interface (.get_job() ) to get at
+        # underlying job object.
+        return self
+
+    def set_runner_external_id( self, job_runner_external_id ):
+        self.job_runner_external_id = job_runner_external_id
+
+    def set_post_job_actions( self, post_job_actions ):
+        self.post_job_actions = post_job_actions
+
+    def set_imported( self, imported ):
+        self.imported = imported
+
+    def set_handler( self, handler ):
+        self.handler = handler
+
+    def set_params( self, params ):
+        self.params = params
+
+    def add_parameter( self, name, value ):
+        self.parameters.append( JobParameter( name, value ) )
+
+    def add_input_dataset( self, name, dataset=None, dataset_id=None ):
+        assoc = JobToInputDatasetAssociation( name, dataset )
+        if dataset is None and dataset_id is not None:
+            assoc.dataset_id = dataset_id
+        self.input_datasets.append( assoc )
+
+    def add_output_dataset( self, name, dataset ):
+        self.output_datasets.append( JobToOutputDatasetAssociation( name, dataset ) )
+
+    def add_input_dataset_collection( self, name, dataset ):
+        self.input_dataset_collections.append( JobToInputDatasetCollectionAssociation( name, dataset ) )
+
+    def add_output_dataset_collection( self, name, dataset_collection_instance ):
+        self.output_dataset_collection_instances.append( JobToOutputDatasetCollectionAssociation( name, dataset_collection_instance ) )
+
+    def add_implicit_output_dataset_collection( self, name, dataset_collection ):
+        self.output_dataset_collections.append( JobToImplicitOutputDatasetCollectionAssociation( name, dataset_collection ) )
+
+    def add_input_library_dataset( self, name, dataset ):
+        self.input_library_datasets.append( JobToInputLibraryDatasetAssociation( name, dataset ) )
+
+    def add_output_library_dataset( self, name, dataset ):
+        self.output_library_datasets.append( JobToOutputLibraryDatasetAssociation( name, dataset ) )
+
+    def add_post_job_action(self, pja):
+        self.post_job_actions.append( PostJobActionAssociation( pja, self ) )
+
+    def set_state( self, state ):
+        """
+        Save state history
+        """
+        self.state = state
+        self.state_history.append( JobStateHistory( self ) )
+
+    def get_param_values( self, app, ignore_errors=False ):
+        """
+        Read encoded parameter values from the database and turn back into a
+        dict of tool parameter values.
+        """
+        param_dict = self.raw_param_dict()
+        tool = app.toolbox.get_tool( self.tool_id )
+        param_dict = tool.params_from_strings( param_dict, app, ignore_errors=ignore_errors )
+        return param_dict
+
+    def raw_param_dict( self ):
+        param_dict = dict( [ ( p.name, p.value ) for p in self.parameters ] )
+        return param_dict
+
+    def check_if_output_datasets_deleted( self ):
+        """
+        Return true if all of the output datasets associated with this job are
+        in the deleted state
+        """
+        for dataset_assoc in self.output_datasets:
+            dataset = dataset_assoc.dataset
+            # only the originator of the job can delete a dataset to cause
+            # cancellation of the job, no need to loop through history_associations
+            if not dataset.deleted:
+                return False
+        return True
+
+    def mark_deleted( self, track_jobs_in_database=False ):
+        """
+        Mark this job as deleted, and mark any output datasets as discarded.
+        """
+        if self.finished:
+            # Do not modify the state/outputs of jobs that are already terminal
+            return
+        if track_jobs_in_database:
+            self.state = Job.states.DELETED_NEW
+        else:
+            self.state = Job.states.DELETED
+        self.info = "Job output deleted by user before job completed."
+        for dataset_assoc in self.output_datasets:
+            dataset = dataset_assoc.dataset
+            dataset.deleted = True
+            dataset.state = dataset.states.DISCARDED
+            for dataset in dataset.dataset.history_associations:
+                # propagate info across shared datasets
+                dataset.deleted = True
+                dataset.blurb = 'deleted'
+                dataset.peek = 'Job deleted'
+                dataset.info = 'Job output deleted by user before job completed'
+
+    def to_dict( self, view='collection', system_details=False ):
+        rval = super( Job, self ).to_dict( view=view )
+        rval['tool_id'] = self.tool_id
+        if system_details:
+            # System level details that only admins should have.
+            rval['external_id'] = self.job_runner_external_id
+            rval['command_line'] = self.command_line
+
+        if view == 'element':
+            param_dict = dict( [ ( p.name, p.value ) for p in self.parameters ] )
+            rval['params'] = param_dict
+
+            input_dict = {}
+            for i in self.input_datasets:
+                if i.dataset is not None:
+                    input_dict[i.name] = {
+                        "id" : i.dataset.id, "src" : "hda",
+                        "uuid" : str(i.dataset.dataset.uuid) if i.dataset.dataset.uuid is not None else None
+                    }
+            for i in self.input_library_datasets:
+                if i.dataset is not None:
+                    input_dict[i.name] = {
+                        "id" : i.dataset.id, "src" : "ldda",
+                        "uuid": str(i.dataset.dataset.uuid) if i.dataset.dataset.uuid is not None else None
+                    }
+            for k in input_dict:
+                if k in param_dict:
+                    del param_dict[k]
+            rval['inputs'] = input_dict
+
+            output_dict = {}
+            for i in self.output_datasets:
+                if i.dataset is not None:
+                    output_dict[i.name] = {
+                        "id" : i.dataset.id, "src" : "hda",
+                        "uuid" : str(i.dataset.dataset.uuid) if i.dataset.dataset.uuid is not None else None
+                    }
+            for i in self.output_library_datasets:
+                if i.dataset is not None:
+                    output_dict[i.name] = {
+                        "id" : i.dataset.id, "src" : "ldda",
+                        "uuid" : str(i.dataset.dataset.uuid) if i.dataset.dataset.uuid is not None else None
+                    }
+            rval['outputs'] = output_dict
+
+        return rval
+
+    def set_final_state( self, final_state ):
+        self.set_state( final_state )
+        if self.workflow_invocation_step:
+            self.workflow_invocation_step.update()
+
+    def get_destination_configuration(self, config, key, default=None):
+        """ Get a destination parameter that can be defaulted back
+        in specified config if it needs to be applied globally.
+        """
+        param_unspecified = object()
+        config_value = (self.destination_params or {}).get(key, param_unspecified)
+        if config_value is param_unspecified:
+            config_value = getattr(config, key, param_unspecified)
+        if config_value is param_unspecified:
+            config_value = default
+        return config_value
+
+
+class Task( object, JobLike ):
+    """
+    A task represents a single component of a job.
+    """
+    _numeric_metric = TaskMetricNumeric
+    _text_metric = TaskMetricText
+
+    states = Bunch( NEW='new',
+                    WAITING='waiting',
+                    QUEUED='queued',
+                    RUNNING='running',
+                    OK='ok',
+                    ERROR='error',
+                    DELETED='deleted' )
+
+    # Please include an accessor (get/set pair) for any new columns/members.
+    def __init__( self, job, working_directory, prepare_files_cmd ):
+        self.command_line = None
+        self.parameters = []
+        self.state = Task.states.NEW
+        self.info = None
+        self.working_directory = working_directory
+        self.task_runner_name = None
+        self.task_runner_external_id = None
+        self.job = job
+        self.stdout = ""
+        self.stderr = ""
+        self.exit_code = None
+        self.prepare_input_files_cmd = prepare_files_cmd
+        self._init_metrics()
+
+    def get_param_values( self, app ):
+        """
+        Read encoded parameter values from the database and turn back into a
+        dict of tool parameter values.
+        """
+        param_dict = dict( [ ( p.name, p.value ) for p in self.parent_job.parameters ] )
+        tool = app.toolbox.get_tool( self.tool_id )
+        param_dict = tool.params_from_strings( param_dict, app )
+        return param_dict
+
+    def get_id( self ):
+        # This is defined in the SQL Alchemy schema:
+        return self.id
+
+    def get_id_tag( self ):
+        """
+        Return an id tag suitable for identifying the task.
+        This combines the task's job id and the task's own id.
+        """
+        return "%s_%s" % ( self.job.get_id(), self.get_id() )
+
+    def get_command_line( self ):
+        return self.command_line
+
+    def get_parameters( self ):
+        return self.parameters
+
+    def get_state( self ):
+        return self.state
+
+    def get_info( self ):
+        return self.info
+
+    def get_working_directory( self ):
+        return self.working_directory
+
+    def get_task_runner_name( self ):
+        return self.task_runner_name
+
+    def get_task_runner_external_id( self ):
+        return self.task_runner_external_id
+
+    def get_job( self ):
+        return self.job
+
+    def get_stdout( self ):
+        return self.stdout
+
+    def get_stderr( self ):
+        return self.stderr
+
+    def get_prepare_input_files_cmd( self ):
+        return self.prepare_input_files_cmd
+
+    # The following accessors are for members that are in the Job class but
+    # not in the Task class. So they can either refer to the parent Job
+    # or return None, depending on whether Tasks need to point to the parent
+    # (e.g., for a session) or never use the member (e.g., external output
+    # metdata). These can be filled in as needed.
+    def get_external_output_metadata( self ):
+        """
+        The external_output_metadata is currently a backref to
+        JobExternalOutputMetadata. It exists for a job but not a task,
+        and when a task is cancelled its corresponding parent Job will
+        be cancelled. So None is returned now, but that could be changed
+        to self.get_job().get_external_output_metadata().
+        """
+        return None
+
+    def get_job_runner_name( self ):
+        """
+        Since runners currently access Tasks the same way they access Jobs,
+        this method just refers to *this* instance's runner.
+        """
+        return self.task_runner_name
+
+    def get_job_runner_external_id( self ):
+        """
+        Runners will use the same methods to get information about the Task
+        class as they will about the Job class, so this method just returns
+        the task's external id.
+        """
+        # TODO: Merge into get_runner_external_id.
+        return self.task_runner_external_id
+
+    def get_session_id( self ):
+        # The Job's galaxy session is equal to the Job's session, so the
+        # Job's session is the same as the Task's session.
+        return self.get_job().get_session_id()
+
+    def set_id( self, id ):
+        # This is defined in the SQL Alchemy's mapper and not here.
+        # This should never be called.
+        self.id = id
+
+    def set_command_line( self, command_line ):
+        self.command_line = command_line
+
+    def set_parameters( self, parameters ):
+        self.parameters = parameters
+
+    def set_state( self, state ):
+        self.state = state
+
+    def set_info( self, info ):
+        self.info = info
+
+    def set_working_directory( self, working_directory ):
+        self.working_directory = working_directory
+
+    def set_task_runner_name( self, task_runner_name ):
+        self.task_runner_name = task_runner_name
+
+    def set_job_runner_external_id( self, task_runner_external_id ):
+        # This method is available for runners that do not want/need to
+        # differentiate between the kinds of Runnable things (Jobs and Tasks)
+        # that they're using.
+        log.debug( "Task %d: Set external id to %s"
+                   % ( self.id, task_runner_external_id ) )
+        self.task_runner_external_id = task_runner_external_id
+
+    def set_task_runner_external_id( self, task_runner_external_id ):
+        self.task_runner_external_id = task_runner_external_id
+
+    def set_job( self, job ):
+        self.job = job
+
+    def set_stdout( self, stdout ):
+        self.stdout = stdout
+
+    def set_stderr( self, stderr ):
+        self.stderr = stderr
+
+    def set_prepare_input_files_cmd( self, prepare_input_files_cmd ):
+        self.prepare_input_files_cmd = prepare_input_files_cmd
+
+
+class JobParameter( object ):
+    def __init__( self, name, value ):
+        self.name = name
+        self.value = value
+
+
+class JobToInputDatasetAssociation( object ):
+    def __init__( self, name, dataset ):
+        self.name = name
+        self.dataset = dataset
+
+
+class JobToOutputDatasetAssociation( object ):
+    def __init__( self, name, dataset ):
+        self.name = name
+        self.dataset = dataset
+
+
+class JobToInputDatasetCollectionAssociation( object ):
+    def __init__( self, name, dataset ):
+        self.name = name
+        self.dataset = dataset
+
+
+# Many jobs may map to one HistoryDatasetCollection using these for a given
+# tool output (if mapping over an input collection).
+class JobToOutputDatasetCollectionAssociation( object ):
+    def __init__( self, name, dataset_collection_instance ):
+        self.name = name
+        self.dataset_collection_instance = dataset_collection_instance
+
+
+# A DatasetCollection will be mapped to at most one job per tool output
+# using these. (You can think of many of these models as going into the
+# creation of a JobToOutputDatasetCollectionAssociation.)
+class JobToImplicitOutputDatasetCollectionAssociation( object ):
+    def __init__( self, name, dataset_collection ):
+        self.name = name
+        self.dataset_collection = dataset_collection
+
+
+class JobToInputLibraryDatasetAssociation( object ):
+    def __init__( self, name, dataset ):
+        self.name = name
+        self.dataset = dataset
+
+
+class JobToOutputLibraryDatasetAssociation( object ):
+    def __init__( self, name, dataset ):
+        self.name = name
+        self.dataset = dataset
+
+
+class JobStateHistory( object ):
+    def __init__( self, job ):
+        self.job = job
+        self.state = job.state
+        self.info = job.info
+
+
+class ImplicitlyCreatedDatasetCollectionInput( object ):
+    def __init__( self, name, input_dataset_collection ):
+        self.name = name
+        self.input_dataset_collection = input_dataset_collection
+
+
+class PostJobAction( object ):
+    def __init__( self, action_type, workflow_step, output_name=None, action_arguments=None):
+        self.action_type = action_type
+        self.output_name = output_name
+        self.action_arguments = action_arguments
+        self.workflow_step = workflow_step
+
+
+class PostJobActionAssociation( object ):
+    def __init__(self, pja, job=None, job_id=None ):
+        if job is not None:
+            self.job = job
+        elif job_id is not None:
+            self.job_id = job_id
+        else:
+            raise Exception("PostJobActionAssociation must be created with a job or a job_id.")
+        self.post_job_action = pja
+
+
+class JobExternalOutputMetadata( object ):
+    def __init__( self, job=None, dataset=None ):
+        self.job = job
+        if isinstance( dataset, galaxy.model.HistoryDatasetAssociation ):
+            self.history_dataset_association = dataset
+        elif isinstance( dataset, galaxy.model.LibraryDatasetDatasetAssociation ):
+            self.library_dataset_dataset_association = dataset
+
+    @property
+    def dataset( self ):
+        if self.history_dataset_association:
+            return self.history_dataset_association
+        elif self.library_dataset_dataset_association:
+            return self.library_dataset_dataset_association
+        return None
+
+
+class JobExportHistoryArchive( object ):
+    def __init__( self, job=None, history=None, dataset=None, compressed=False,
+                  history_attrs_filename=None, datasets_attrs_filename=None,
+                  jobs_attrs_filename=None ):
+        self.job = job
+        self.history = history
+        self.dataset = dataset
+        self.compressed = compressed
+        self.history_attrs_filename = history_attrs_filename
+        self.datasets_attrs_filename = datasets_attrs_filename
+        self.jobs_attrs_filename = jobs_attrs_filename
+
+    @property
+    def up_to_date( self ):
+        """ Return False, if a new export should be generated for corresponding
+        history.
+        """
+        job = self.job
+        return job.state not in [ Job.states.ERROR, Job.states.DELETED ] \
+            and job.update_time > self.history.update_time
+
+    @property
+    def ready( self ):
+        return self.job.state == Job.states.OK
+
+    @property
+    def preparing( self ):
+        return self.job.state in [ Job.states.RUNNING, Job.states.QUEUED, Job.states.WAITING ]
+
+    @property
+    def export_name( self ):
+        # Stream archive.
+        hname = ready_name_for_url( self.history.name )
+        hname = "Galaxy-History-%s.tar" % ( hname )
+        if self.compressed:
+            hname += ".gz"
+        return hname
+
+
+class JobImportHistoryArchive( object ):
+    def __init__( self, job=None, history=None, archive_dir=None ):
+        self.job = job
+        self.history = history
+        self.archive_dir = archive_dir
+
+
+class GenomeIndexToolData( object ):
+    def __init__( self, job=None, params=None, dataset=None, deferred_job=None,
+                  transfer_job=None, fasta_path=None, created_time=None, modified_time=None,
+                  dbkey=None, user=None, indexer=None ):
+        self.job = job
+        self.dataset = dataset
+        self.fasta_path = fasta_path
+        self.user = user
+        self.indexer = indexer
+        self.created_time = created_time
+        self.modified_time = modified_time
+        self.deferred = deferred_job
+        self.transfer = transfer_job
+
+
+class DeferredJob( object ):
+    states = Bunch( NEW='new',
+                    WAITING='waiting',
+                    QUEUED='queued',
+                    RUNNING='running',
+                    OK='ok',
+                    ERROR='error' )
+
+    def __init__( self, state=None, plugin=None, params=None ):
+        self.state = state
+        self.plugin = plugin
+        self.params = params
+
+    def get_check_interval( self ):
+        if not hasattr( self, '_check_interval' ):
+            self._check_interval = None
+        return self._check_interval
+
+    def set_check_interval( self, seconds ):
+        self._check_interval = seconds
+    check_interval = property( get_check_interval, set_check_interval )
+
+    def get_last_check( self ):
+        if not hasattr( self, '_last_check' ):
+            self._last_check = 0
+        return self._last_check
+
+    def set_last_check( self, seconds ):
+        try:
+            self._last_check = int( seconds )
+        except:
+            self._last_check = time.time()
+    last_check = property( get_last_check, set_last_check )
+
+    @property
+    def is_check_time( self ):
+        if self.check_interval is None:
+            return True
+        elif ( int( time.time() ) - self.last_check ) > self.check_interval:
+            return True
+        else:
+            return False
+
+
+class Group( object, Dictifiable  ):
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'name' )
+
+    def __init__( self, name=None ):
+        self.name = name
+        self.deleted = False
+
+
+class UserGroupAssociation( object ):
+    def __init__( self, user, group ):
+        self.user = user
+        self.group = group
+
+
+def is_hda(d):
+    return isinstance( d, HistoryDatasetAssociation )
+
+
+class History( object, Dictifiable, UsesAnnotations, HasName ):
+
+    dict_collection_visible_keys = ( 'id', 'name', 'published', 'deleted' )
+    dict_element_visible_keys = ( 'id', 'name', 'genome_build', 'deleted', 'purged', 'update_time',
+                                  'published', 'importable', 'slug', 'empty' )
+    default_name = 'Unnamed history'
+
+    def __init__( self, id=None, name=None, user=None ):
+        self.id = id
+        self.name = name or History.default_name
+        self.deleted = False
+        self.purged = False
+        self.importing = False
+        self.genome_build = None
+        self.published = False
+        # Relationships
+        self.user = user
+        self.datasets = []
+        self.galaxy_sessions = []
+        self.tags = []
+
+    @property
+    def empty( self ):
+        return self.hid_counter == 1
+
+    def _next_hid( self, n=1 ):
+        # this is overriden in mapping.py db_next_hid() method
+        if len( self.datasets ) == 0:
+            return n
+        else:
+            last_hid = 0
+            for dataset in self.datasets:
+                if dataset.hid > last_hid:
+                    last_hid = dataset.hid
+            return last_hid + n
+
+    def add_galaxy_session( self, galaxy_session, association=None ):
+        if association is None:
+            self.galaxy_sessions.append( GalaxySessionToHistoryAssociation( galaxy_session, self ) )
+        else:
+            self.galaxy_sessions.append( association )
+
+    def add_dataset( self, dataset, parent_id=None, genome_build=None, set_hid=True, quota=True ):
+        if isinstance( dataset, Dataset ):
+            dataset = HistoryDatasetAssociation(dataset=dataset)
+            object_session( self ).add( dataset )
+            object_session( self ).flush()
+        elif not isinstance( dataset, HistoryDatasetAssociation ):
+            raise TypeError( "You can only add Dataset and HistoryDatasetAssociation instances to a history" +
+                             " ( you tried to add %s )." % str( dataset ) )
+        if parent_id:
+            for data in self.datasets:
+                if data.id == parent_id:
+                    dataset.hid = data.hid
+                    break
+            else:
+                if set_hid:
+                    dataset.hid = self._next_hid()
+        else:
+            if set_hid:
+                dataset.hid = self._next_hid()
+        if quota and self.user:
+            self.user.adjust_total_disk_usage(dataset.quota_amount(self.user))
+        dataset.history = self
+        if genome_build not in [None, '?']:
+            self.genome_build = genome_build
+        dataset.history_id = self.id
+        return dataset
+
+    def add_datasets( self, sa_session, datasets, parent_id=None, genome_build=None, set_hid=True, quota=True, flush=False ):
+        """ Optimized version of add_dataset above that minimizes database
+        interactions when adding many datasets to history at once.
+        """
+        all_hdas = all( is_hda(_) for _ in datasets )
+        optimize = len( datasets) > 1 and parent_id is None and all_hdas and set_hid
+        if optimize:
+            self.__add_datasets_optimized( datasets, genome_build=genome_build )
+            if quota and self.user:
+                disk_usage = sum([d.get_total_size() for d in datasets])
+                self.user.adjust_total_disk_usage(disk_usage)
+
+            sa_session.add_all( datasets )
+            if flush:
+                sa_session.flush()
+        else:
+            for dataset in datasets:
+                self.add_dataset( dataset, parent_id=parent_id, genome_build=genome_build, set_hid=set_hid, quota=quota )
+                sa_session.add( dataset )
+                if flush:
+                    sa_session.flush()
+
+    def __add_datasets_optimized( self, datasets, genome_build=None ):
+        """ Optimized version of add_dataset above that minimizes database
+        interactions when adding many datasets to history at once under
+        certain circumstances.
+        """
+        n = len( datasets )
+
+        base_hid = self._next_hid( n=n )
+        set_genome = genome_build not in [None, '?']
+        for i, dataset in enumerate( datasets ):
+            dataset.hid = base_hid + i
+            dataset.history = self
+            if set_genome:
+                self.genome_build = genome_build
+        for dataset in datasets:
+            dataset.history_id = self.id
+        return datasets
+
+    def add_dataset_collection( self, history_dataset_collection, set_hid=True ):
+        if set_hid:
+            history_dataset_collection.hid = self._next_hid()
+        history_dataset_collection.history = self
+        # TODO: quota?
+        self.dataset_collections.append( history_dataset_collection )
+        return history_dataset_collection
+
+    def copy( self, name=None, target_user=None, activatable=False, all_datasets=False ):
+        """
+        Return a copy of this history using the given `name` and `target_user`.
+        If `activatable`, copy only non-deleted datasets. If `all_datasets`, copy
+        non-deleted, deleted, and purged datasets.
+        """
+        name = name or self.name
+        applies_to_quota = target_user != self.user
+
+        # Create new history.
+        new_history = History( name=name, user=target_user )
+        db_session = object_session( self )
+        db_session.add( new_history )
+        db_session.flush()
+
+        # copy history tags and annotations (if copying user is not anonymous)
+        if target_user:
+            self.copy_item_annotation( db_session, self.user, self, target_user, new_history )
+            new_history.copy_tags_from(target_user=target_user, source_history=self)
+
+        # Copy HDAs.
+        if activatable:
+            hdas = self.activatable_datasets
+        elif all_datasets:
+            hdas = self.datasets
+        else:
+            hdas = self.active_datasets
+        for hda in hdas:
+            # Copy HDA.
+            new_hda = hda.copy( copy_children=True )
+            new_history.add_dataset( new_hda, set_hid=False, quota=applies_to_quota )
+            db_session.add( new_hda )
+            db_session.flush()
+
+            if target_user:
+                new_hda.copy_item_annotation( db_session, self.user, hda, target_user, new_hda )
+                new_hda.copy_tags_from( target_user, hda )
+
+        # Copy history dataset collections
+        if all_datasets:
+            hdcas = self.dataset_collections
+        else:
+            hdcas = self.active_dataset_collections
+        for hdca in hdcas:
+            new_hdca = hdca.copy()
+            new_history.add_dataset_collection( new_hdca, set_hid=False )
+            db_session.add( new_hdca )
+            db_session.flush()
+
+            if target_user:
+                new_hdca.copy_item_annotation( db_session, self.user, hdca, target_user, new_hdca )
+
+        new_history.hid_counter = self.hid_counter
+        db_session.add( new_history )
+        db_session.flush()
+
+        return new_history
+
+    @property
+    def activatable_datasets( self ):
+        # This needs to be a list
+        return [ hda for hda in self.datasets if not hda.dataset.deleted ]
+
+    def to_dict( self, view='collection', value_mapper=None ):
+
+        # Get basic value.
+        rval = super( History, self ).to_dict( view=view, value_mapper=value_mapper )
+
+        # Add tags.
+        tags_str_list = []
+        for tag in self.tags:
+            tag_str = tag.user_tname
+            if tag.value is not None:
+                tag_str += ":" + tag.user_value
+            tags_str_list.append( tag_str )
+        rval[ 'tags' ] = tags_str_list
+
+        if view == 'element':
+            rval[ 'size' ] = int( self.disk_size )
+
+        return rval
+
+    @property
+    def latest_export( self ):
+        exports = self.exports
+        return exports and exports[ 0 ]
+
+    def unhide_datasets( self ):
+        for dataset in self.datasets:
+            dataset.mark_unhidden()
+
+    def resume_paused_jobs( self ):
+        for dataset in self.datasets:
+            job = dataset.creating_job
+            if job is not None and job.state == Job.states.PAUSED:
+                job.set_state(Job.states.NEW)
+
+    @hybrid.hybrid_property
+    def disk_size( self ):
+        """
+        Return the size in bytes of this history by summing the 'total_size's of
+        all non-purged, unique datasets within it.
+        """
+        # non-.expression part of hybrid.hybrid_property: called when an instance is the namespace (not the class)
+        db_session = object_session( self )
+        rval = db_session.query(
+            func.sum( db_session.query( HistoryDatasetAssociation.dataset_id, Dataset.total_size ).join( Dataset )
+                    .filter( HistoryDatasetAssociation.table.c.history_id == self.id )
+                    .filter( HistoryDatasetAssociation.purged != true() )
+                    .filter( Dataset.purged != true() )
+                    # unique datasets only
+                    .distinct().subquery().c.total_size ) ).first()[0]
+        if rval is None:
+            rval = 0
+        return rval
+
+    @disk_size.expression
+    def disk_size( cls ):
+        """
+        Return a query scalar that will get any history's size in bytes by summing
+        the 'total_size's of all non-purged, unique datasets within it.
+        """
+        # .expression acts as a column_property and should return a scalar
+        # first, get the distinct datasets within a history that are not purged
+        hda_to_dataset_join = join( HistoryDatasetAssociation, Dataset,
+            HistoryDatasetAssociation.table.c.dataset_id == Dataset.table.c.id )
+        distinct_datasets = (
+            select([
+                # use labels here to better accrss from the query above
+                HistoryDatasetAssociation.table.c.history_id.label( 'history_id' ),
+                Dataset.total_size.label( 'dataset_size' ),
+                Dataset.id.label( 'dataset_id' )
+            ])
+            .where( HistoryDatasetAssociation.table.c.purged != true() )
+            .where( Dataset.table.c.purged != true() )
+            .select_from( hda_to_dataset_join )
+            # TODO: slow (in general) but most probably here - index total_size for easier sorting/distinct?
+            .distinct()
+        )
+        # postgres needs an alias on FROM
+        distinct_datasets_alias = aliased( distinct_datasets, name="datasets" )
+        # then, bind as property of history using the cls.id
+        size_query = (
+            select([
+                func.coalesce( func.sum( distinct_datasets_alias.c.dataset_size ), 0 )
+            ])
+            .select_from( distinct_datasets_alias )
+            .where( distinct_datasets_alias.c.history_id == cls.id )
+        )
+        # label creates a scalar
+        return size_query.label( 'disk_size' )
+
+    @property
+    def disk_nice_size( self ):
+        """Returns human readable size of history on disk."""
+        return galaxy.util.nice_size( self.disk_size )
+
+    @property
+    def active_datasets_children_and_roles( self ):
+        if not hasattr(self, '_active_datasets_children_and_roles'):
+            db_session = object_session( self )
+            query = ( db_session.query( HistoryDatasetAssociation )
+                      .filter( HistoryDatasetAssociation.table.c.history_id == self.id )
+                      .filter( not_( HistoryDatasetAssociation.deleted ) )
+                      .order_by( HistoryDatasetAssociation.table.c.hid.asc() )
+                      .options( joinedload("children"),
+                                joinedload("dataset"),
+                                joinedload("dataset.actions"),
+                                joinedload("dataset.actions.role"),
+                                ))
+            self._active_datasets_children_and_roles = query.all()
+        return self._active_datasets_children_and_roles
+
+    @property
+    def active_contents( self ):
+        """ Return all active contents ordered by hid.
+        """
+        return self.contents_iter( types=[ "dataset", "dataset_collection" ], deleted=False, visible=True )
+
+    def contents_iter( self, **kwds ):
+        """
+        Fetch filtered list of contents of history.
+        """
+        default_contents_types = [
+            'dataset',
+        ]
+        types = kwds.get('types', default_contents_types)
+        iters = []
+        if 'dataset' in types:
+            iters.append( self.__dataset_contents_iter( **kwds ) )
+        if 'dataset_collection' in types:
+            iters.append( self.__collection_contents_iter( **kwds ) )
+        return galaxy.util.merge_sorted_iterables( operator.attrgetter( "hid" ), *iters )
+
+    def __dataset_contents_iter(self, **kwds):
+        return self.__filter_contents( HistoryDatasetAssociation, **kwds )
+
+    def __filter_contents( self, content_class, **kwds ):
+        db_session = object_session( self )
+        assert db_session is not None
+        query = db_session.query( content_class ).filter( content_class.table.c.history_id == self.id )
+        query = query.order_by( content_class.table.c.hid.asc() )
+        deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) )
+        if deleted is not None:
+            query = query.filter( content_class.deleted == deleted )
+        visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) )
+        if visible is not None:
+            query = query.filter( content_class.visible == visible )
+        if 'ids' in kwds:
+            ids = kwds['ids']
+            max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH)
+            if len(ids) < max_in_filter_length:
+                query = query.filter( content_class.id.in_(ids) )
+            else:
+                query = (content for content in query if content.id in ids)
+        return query
+
+    def __collection_contents_iter( self, **kwds ):
+        return self.__filter_contents( HistoryDatasetCollectionAssociation, **kwds )
+
+    def copy_tags_from(self, target_user, source_history):
+        for src_shta in source_history.tags:
+            new_shta = src_shta.copy()
+            new_shta.user = target_user
+            self.tags.append(new_shta)
+
+
+class HistoryUserShareAssociation( object ):
+    def __init__( self ):
+        self.history = None
+        self.user = None
+
+
+class UserRoleAssociation( object ):
+    def __init__( self, user, role ):
+        self.user = user
+        self.role = role
+
+
+class GroupRoleAssociation( object ):
+    def __init__( self, group, role ):
+        self.group = group
+        self.role = role
+
+
+class Role( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'name', 'description', 'type' )
+    private_id = None
+    types = Bunch(
+        PRIVATE='private',
+        SYSTEM='system',
+        USER='user',
+        ADMIN='admin',
+        SHARING='sharing'
+    )
+
+    def __init__( self, name="", description="", type="system", deleted=False ):
+        self.name = name
+        self.description = description
+        self.type = type
+        self.deleted = deleted
+
+
+class UserQuotaAssociation( object, Dictifiable ):
+    dict_element_visible_keys = ( 'user', )
+
+    def __init__( self, user, quota ):
+        self.user = user
+        self.quota = quota
+
+
+class GroupQuotaAssociation( object, Dictifiable ):
+    dict_element_visible_keys = ( 'group', )
+
+    def __init__( self, group, quota ):
+        self.group = group
+        self.quota = quota
+
+
+class Quota( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'name', 'description', 'bytes', 'operation', 'display_amount', 'default', 'users', 'groups' )
+    valid_operations = ( '+', '-', '=' )
+
+    def __init__( self, name="", description="", amount=0, operation="=" ):
+        self.name = name
+        self.description = description
+        if amount is None:
+            self.bytes = -1
+        else:
+            self.bytes = amount
+        self.operation = operation
+
+    def get_amount( self ):
+        if self.bytes == -1:
+            return None
+        return self.bytes
+
+    def set_amount( self, amount ):
+        if amount is None:
+            self.bytes = -1
+        else:
+            self.bytes = amount
+    amount = property( get_amount, set_amount )
+
+    @property
+    def display_amount( self ):
+        if self.bytes == -1:
+            return "unlimited"
+        else:
+            return galaxy.util.nice_size( self.bytes )
+
+
+class DefaultQuotaAssociation( Quota, Dictifiable ):
+    dict_element_visible_keys = ( 'type', )
+    types = Bunch(
+        UNREGISTERED='unregistered',
+        REGISTERED='registered'
+    )
+
+    def __init__( self, type, quota ):
+        assert type in self.types.__dict__.values(), 'Invalid type'
+        self.type = type
+        self.quota = quota
+
+
+class DatasetPermissions( object ):
+    def __init__( self, action, dataset, role=None, role_id=None ):
+        self.action = action
+        self.dataset = dataset
+        if role is not None:
+            self.role = role
+        else:
+            self.role_id = role_id
+
+
+class LibraryPermissions( object ):
+    def __init__( self, action, library_item, role ):
+        self.action = action
+        if isinstance( library_item, Library ):
+            self.library = library_item
+        else:
+            raise Exception( "Invalid Library specified: %s" % library_item.__class__.__name__ )
+        self.role = role
+
+
+class LibraryFolderPermissions( object ):
+    def __init__( self, action, library_item, role ):
+        self.action = action
+        if isinstance( library_item, LibraryFolder ):
+            self.folder = library_item
+        else:
+            raise Exception( "Invalid LibraryFolder specified: %s" % library_item.__class__.__name__ )
+        self.role = role
+
+
+class LibraryDatasetPermissions( object ):
+    def __init__( self, action, library_item, role ):
+        self.action = action
+        if isinstance( library_item, LibraryDataset ):
+            self.library_dataset = library_item
+        else:
+            raise Exception( "Invalid LibraryDataset specified: %s" % library_item.__class__.__name__ )
+        self.role = role
+
+
+class LibraryDatasetDatasetAssociationPermissions( object ):
+    def __init__( self, action, library_item, role ):
+        self.action = action
+        if isinstance( library_item, LibraryDatasetDatasetAssociation ):
+            self.library_dataset_dataset_association = library_item
+        else:
+            raise Exception( "Invalid LibraryDatasetDatasetAssociation specified: %s" % library_item.__class__.__name__ )
+        self.role = role
+
+
+class DefaultUserPermissions( object ):
+    def __init__( self, user, action, role ):
+        self.user = user
+        self.action = action
+        self.role = role
+
+
+class DefaultHistoryPermissions( object ):
+    def __init__( self, history, action, role ):
+        self.history = history
+        self.action = action
+        self.role = role
+
+
+class StorableObject( object ):
+
+    def __init__( self, id, **kwargs):
+        self.id = id
+
+
+class Dataset( StorableObject ):
+    states = Bunch( NEW='new',
+                    UPLOAD='upload',
+                    QUEUED='queued',
+                    RUNNING='running',
+                    OK='ok',
+                    EMPTY='empty',
+                    ERROR='error',
+                    DISCARDED='discarded',
+                    PAUSED='paused',
+                    SETTING_METADATA='setting_metadata',
+                    FAILED_METADATA='failed_metadata')
+    # failed_metadata is only valid as DatasetInstance state currently
+
+    non_ready_states = (
+        states.NEW,
+        states.UPLOAD,
+        states.QUEUED,
+        states.RUNNING,
+        states.SETTING_METADATA
+    )
+    ready_states = tuple( set( states.__dict__.values() ) - set( non_ready_states ) )
+    valid_input_states = tuple(
+        set( states.__dict__.values() ) - set( [states.ERROR, states.DISCARDED] )
+    )
+    terminal_states = (
+        states.OK,
+        states.EMPTY,
+        states.ERROR,
+        states.DISCARDED,
+        states.FAILED_METADATA,
+    )
+
+    conversion_messages = Bunch( PENDING="pending",
+                                 NO_DATA="no data",
+                                 NO_CHROMOSOME="no chromosome",
+                                 NO_CONVERTER="no converter",
+                                 NO_TOOL="no tool",
+                                 DATA="data",
+                                 ERROR="error",
+                                 OK="ok" )
+
+    permitted_actions = get_permitted_actions( filter='DATASET' )
+    file_path = "/tmp/"
+    object_store = None  # This get initialized in mapping.py (method init) by app.py
+    engine = None
+
+    def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True, uuid=None ):
+        super(Dataset, self).__init__(id=id)
+        self.state = state
+        self.deleted = False
+        self.purged = False
+        self.purgable = purgable
+        self.external_filename = external_filename
+        self.external_extra_files_path = None
+        self._extra_files_path = extra_files_path
+        self.file_size = file_size
+        if uuid is None:
+            self.uuid = uuid4()
+        else:
+            self.uuid = UUID(str(uuid))
+
+    def in_ready_state( self ):
+        return self.state in self.ready_states
+
+    def get_file_name( self ):
+        if not self.external_filename:
+            assert self.id is not None, "ID must be set before filename used (commit the object)"
+            assert self.object_store is not None, "Object Store has not been initialized for dataset %s" % self.id
+            filename = self.object_store.get_filename( self )
+            return filename
+        else:
+            filename = self.external_filename
+        # Make filename absolute
+        return os.path.abspath( filename )
+
+    def set_file_name( self, filename ):
+        if not filename:
+            self.external_filename = None
+        else:
+            self.external_filename = filename
+    file_name = property( get_file_name, set_file_name )
+
+    def get_extra_files_path( self ):
+        # Unlike get_file_name - external_extra_files_path is not backed by an
+        # actual database column so if SA instantiates this object - the
+        # attribute won't exist yet.
+        if not getattr( self, "external_extra_files_path", None ):
+            return self.object_store.get_filename( self, dir_only=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id )
+        else:
+            return os.path.abspath( self.external_extra_files_path )
+
+    def set_extra_files_path( self, extra_files_path ):
+        if not extra_files_path:
+            self.external_extra_files_path = None
+        else:
+            self.external_extra_files_path = extra_files_path
+    extra_files_path = property( get_extra_files_path, set_extra_files_path)
+
+    def _calculate_size( self ):
+        if self.external_filename:
+            try:
+                return os.path.getsize(self.external_filename)
+            except OSError:
+                return 0
+        else:
+            return self.object_store.size(self)
+
+    def get_size( self, nice_size=False ):
+        """Returns the size of the data on disk"""
+        if self.file_size:
+            if nice_size:
+                return galaxy.util.nice_size( self.file_size )
+            else:
+                return self.file_size
+        else:
+            if nice_size:
+                return galaxy.util.nice_size( self._calculate_size() )
+            else:
+                return self._calculate_size()
+
+    def set_size( self ):
+        """Returns the size of the data on disk"""
+        if not self.file_size:
+            self.file_size = self._calculate_size()
+
+    def get_total_size( self ):
+        if self.total_size is not None:
+            return self.total_size
+        # for backwards compatibility, set if unset
+        self.set_total_size()
+        db_session = object_session( self )
+        db_session.flush()
+        return self.total_size
+
+    def set_total_size( self ):
+        if self.file_size is None:
+            self.set_size()
+        self.total_size = self.file_size or 0
+        if self.object_store.exists(self, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True):
+            for root, dirs, files in os.walk( self.extra_files_path ):
+                self.total_size += sum( [ os.path.getsize( os.path.join( root, file ) ) for file in files if os.path.exists( os.path.join( root, file ) ) ] )
+
+    def has_data( self ):
+        """Detects whether there is any data"""
+        return self.get_size() > 0
+
+    def mark_deleted( self, include_children=True ):
+        self.deleted = True
+
+    def is_multi_byte( self ):
+        if not self.has_data():
+            return False
+        try:
+            return is_multi_byte( codecs.open( self.file_name, 'r', 'utf-8' ).read( 100 ) )
+        except UnicodeDecodeError:
+            return False
+    # FIXME: sqlalchemy will replace this
+
+    def _delete(self):
+        """Remove the file that corresponds to this data"""
+        self.object_store.delete(self)
+
+    @property
+    def user_can_purge( self ):
+        return self.purged is False \
+            and not bool( self.library_associations ) \
+            and len( self.history_associations ) == len( self.purged_history_associations )
+
+    def full_delete( self ):
+        """Remove the file and extra files, marks deleted and purged"""
+        # os.unlink( self.file_name )
+        self.object_store.delete(self)
+        if self.object_store.exists(self, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True):
+            self.object_store.delete(self, entire_dir=True, extra_dir=self._extra_files_path or "dataset_%d_files" % self.id, dir_only=True)
+        # if os.path.exists( self.extra_files_path ):
+        #     shutil.rmtree( self.extra_files_path )
+        # TODO: purge metadata files
+        self.deleted = True
+        self.purged = True
+
+    def get_access_roles( self, trans ):
+        roles = []
+        for dp in self.actions:
+            if dp.action == trans.app.security_agent.permitted_actions.DATASET_ACCESS.action:
+                roles.append( dp.role )
+        return roles
+
+    def get_manage_permissions_roles( self, trans ):
+        roles = []
+        for dp in self.actions:
+            if dp.action == trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+                roles.append( dp.role )
+        return roles
+
+    def has_manage_permissions_roles( self, trans ):
+        for dp in self.actions:
+            if dp.action == trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
+                return True
+        return False
+
+
+class DatasetInstance( object ):
+    """A base class for all 'dataset instances', HDAs, LDAs, etc"""
+    states = Dataset.states
+    conversion_messages = Dataset.conversion_messages
+    permitted_actions = Dataset.permitted_actions
+
+    def __init__( self, id=None, hid=None, name=None, info=None, blurb=None, peek=None, tool_version=None, extension=None,
+                  dbkey=None, metadata=None, history=None, dataset=None, deleted=False, designation=None,
+                  parent_id=None, validation_errors=None, visible=True, create_dataset=False, sa_session=None,
+                  extended_metadata=None, flush=True ):
+        self.name = name or "Unnamed dataset"
+        self.id = id
+        self.info = info
+        self.blurb = blurb
+        self.peek = peek
+        self.tool_version = tool_version
+        self.extension = extension
+        self.designation = designation
+        # set private variable to None here, since the attribute may be needed in by MetadataCollection.__init__
+        self._metadata = None
+        self.metadata = metadata or dict()
+        self.extended_metadata = extended_metadata
+        if dbkey:  # dbkey is stored in metadata, only set if non-zero, or else we could clobber one supplied by input 'metadata'
+            self.dbkey = dbkey
+        self.deleted = deleted
+        self.visible = visible
+        # Relationships
+        if not dataset and create_dataset:
+            # Had to pass the sqlalchemy session in order to create a new dataset
+            dataset = Dataset( state=Dataset.states.NEW )
+            if flush:
+                sa_session.add( dataset )
+                sa_session.flush()
+        self.dataset = dataset
+        self.parent_id = parent_id
+        self.validation_errors = validation_errors
+
+    @property
+    def ext( self ):
+        return self.extension
+
+    def get_dataset_state( self ):
+        # self._state is currently only used when setting metadata externally
+        # leave setting the state as-is, we'll currently handle this specially in the external metadata code
+        if self._state:
+            return self._state
+        return self.dataset.state
+
+    def raw_set_dataset_state( self, state ):
+        if state != self.dataset.state:
+            self.dataset.state = state
+            return True
+        else:
+            return False
+
+    def set_dataset_state( self, state ):
+        if self.raw_set_dataset_state( state ):
+            object_session( self ).add( self.dataset )
+            object_session( self ).flush()  # flush here, because hda.flush() won't flush the Dataset object
+    state = property( get_dataset_state, set_dataset_state )
+
+    def get_file_name( self ):
+        return self.dataset.get_file_name()
+
+    def set_file_name(self, filename):
+        return self.dataset.set_file_name( filename )
+    file_name = property( get_file_name, set_file_name )
+
+    @property
+    def extra_files_path( self ):
+        return self.dataset.extra_files_path
+
+    @property
+    def datatype( self ):
+        return _get_datatypes_registry().get_datatype_by_extension( self.extension )
+
+    def get_metadata( self ):
+        # using weakref to store parent (to prevent circ ref),
+        #   does a Session.clear() cause parent to be invalidated, while still copying over this non-database attribute?
+        if not hasattr( self, '_metadata_collection' ) or self._metadata_collection.parent != self:
+            self._metadata_collection = galaxy.model.metadata.MetadataCollection( self )
+        return self._metadata_collection
+
+    def set_metadata( self, bunch ):
+        # Needs to accept a MetadataCollection, a bunch, or a dict
+        self._metadata = self.metadata.make_dict_copy( bunch )
+    metadata = property( get_metadata, set_metadata )
+    # This provide backwards compatibility with using the old dbkey
+    # field in the database.  That field now maps to "old_dbkey" (see mapping.py).
+
+    def get_dbkey( self ):
+        dbkey = self.metadata.dbkey
+        if not isinstance(dbkey, list):
+            dbkey = [dbkey]
+        if dbkey in [[None], []]:
+            return "?"
+        return dbkey[0]
+
+    def set_dbkey( self, value ):
+        if "dbkey" in self.datatype.metadata_spec:
+            if not isinstance(value, list):
+                self.metadata.dbkey = [value]
+            else:
+                self.metadata.dbkey = value
+    dbkey = property( get_dbkey, set_dbkey )
+
+    def change_datatype( self, new_ext ):
+        self.clear_associated_files()
+        _get_datatypes_registry().change_datatype( self, new_ext )
+
+    def get_size( self, nice_size=False ):
+        """Returns the size of the data on disk"""
+        if nice_size:
+            return galaxy.util.nice_size( self.dataset.get_size() )
+        return self.dataset.get_size()
+
+    def set_size( self ):
+        """Returns the size of the data on disk"""
+        return self.dataset.set_size()
+
+    def get_total_size( self ):
+        return self.dataset.get_total_size()
+
+    def set_total_size( self ):
+        return self.dataset.set_total_size()
+
+    def has_data( self ):
+        """Detects whether there is any data"""
+        return self.dataset.has_data()
+
+    def get_raw_data( self ):
+        """Returns the full data. To stream it open the file_name and read/write as needed"""
+        return self.datatype.get_raw_data( self )
+
+    def write_from_stream( self, stream ):
+        """Writes data from a stream"""
+        self.datatype.write_from_stream(self, stream)
+
+    def set_raw_data( self, data ):
+        """Saves the data on the disc"""
+        self.datatype.set_raw_data(self, data)
+
+    def get_mime( self ):
+        """Returns the mime type of the data"""
+        try:
+            return _get_datatypes_registry().get_mimetype_by_extension( self.extension.lower() )
+        except AttributeError:
+            # extension is None
+            return 'data'
+
+    def is_multi_byte( self ):
+        """Data consists of multi-byte characters"""
+        return self.dataset.is_multi_byte()
+
+    def set_peek( self, is_multi_byte=False ):
+        return self.datatype.set_peek( self, is_multi_byte=is_multi_byte )
+
+    def init_meta( self, copy_from=None ):
+        return self.datatype.init_meta( self, copy_from=copy_from )
+
+    def set_meta( self, **kwd ):
+        self.clear_associated_files( metadata_safe=True )
+        return self.datatype.set_meta( self, **kwd )
+
+    def missing_meta( self, **kwd ):
+        return self.datatype.missing_meta( self, **kwd )
+
+    def as_display_type( self, type, **kwd ):
+        return self.datatype.as_display_type( self, type, **kwd )
+
+    def display_peek( self ):
+        return self.datatype.display_peek( self )
+
+    def display_name( self ):
+        return self.datatype.display_name( self )
+
+    def display_info( self ):
+        return self.datatype.display_info( self )
+
+    def get_converted_files_by_type( self, file_type ):
+        for assoc in self.implicitly_converted_datasets:
+            if not assoc.deleted and assoc.type == file_type:
+                if assoc.dataset:
+                    return assoc.dataset
+                return assoc.dataset_ldda
+        return None
+
+    def get_converted_dataset_deps(self, trans, target_ext):
+        """
+        Returns dict of { "dependency" => HDA }
+        """
+        # List of string of dependencies
+        try:
+            depends_list = trans.app.datatypes_registry.converter_deps[self.extension][target_ext]
+        except KeyError:
+            depends_list = []
+        return dict([ (dep, self.get_converted_dataset(trans, dep)) for dep in depends_list ])
+
+    def get_converted_dataset(self, trans, target_ext, target_context=None):
+        """
+        Return converted dataset(s) if they exist, along with a dict of dependencies.
+        If not converted yet, do so and return None (the first time). If unconvertible, raise exception.
+        """
+        # See if we can convert the dataset
+        if target_ext not in self.get_converter_types():
+            raise NoConverterException("Conversion from '%s' to '%s' not possible" % (self.extension, target_ext) )
+        deps = {}
+        # List of string of dependencies
+        try:
+            depends_list = trans.app.datatypes_registry.converter_deps[self.extension][target_ext]
+        except KeyError:
+            depends_list = []
+        # See if converted dataset already exists, either in metadata in conversions.
+        converted_dataset = self.get_metadata_dataset( target_ext )
+        if converted_dataset:
+            return converted_dataset
+        converted_dataset = self.get_converted_files_by_type( target_ext )
+        if converted_dataset:
+            return converted_dataset
+        # Conversion is possible but hasn't been done yet, run converter.
+        # Check if we have dependencies
+        try:
+            for dependency in depends_list:
+                dep_dataset = self.get_converted_dataset(trans, dependency)
+                if dep_dataset is None:
+                    # None means converter is running first time
+                    return None
+                elif dep_dataset.state == Job.states.ERROR:
+                    raise ConverterDependencyException("A dependency (%s) was in an error state." % dependency)
+                elif dep_dataset.state != Job.states.OK:
+                    # Pending
+                    return None
+                deps[dependency] = dep_dataset
+        except NoConverterException:
+            raise NoConverterException("A dependency (%s) is missing a converter." % dependency)
+        except KeyError:
+            pass  # No deps
+        new_dataset = next(iter(self.datatype.convert_dataset( trans, self, target_ext, return_output=True, visible=False, deps=deps, target_context=target_context ).values()))
+        new_dataset.name = self.name
+        self.copy_attributes( new_dataset )
+        assoc = ImplicitlyConvertedDatasetAssociation( parent=self, file_type=target_ext, dataset=new_dataset, metadata_safe=False )
+        session = trans.sa_session
+        session.add( new_dataset )
+        session.add( assoc )
+        session.flush()
+        return new_dataset
+
+    def copy_attributes( self, new_dataset ):
+        """
+        Copies attributes to a new datasets, used for implicit conversions
+        """
+        pass
+
+    def get_metadata_dataset( self, dataset_ext ):
+        """
+        Returns an HDA that points to a metadata file which contains a
+        converted data with the requested extension.
+        """
+        for name, value in self.metadata.items():
+            # HACK: MetadataFile objects do not have a type/ext, so need to use metadata name
+            # to determine type.
+            if dataset_ext == 'bai' and name == 'bam_index' and isinstance( value, MetadataFile ):
+                # HACK: MetadataFile objects cannot be used by tools, so return
+                # a fake HDA that points to metadata file.
+                fake_dataset = Dataset( state=Dataset.states.OK, external_filename=value.file_name )
+                fake_hda = HistoryDatasetAssociation( dataset=fake_dataset )
+                return fake_hda
+
+    def clear_associated_files( self, metadata_safe=False, purge=False ):
+        raise Exception( "Unimplemented" )
+
+    def get_child_by_designation(self, designation):
+        for child in self.children:
+            if child.designation == designation:
+                return child
+        return None
+
+    def get_converter_types(self):
+        return self.datatype.get_converter_types( self, _get_datatypes_registry() )
+
+    def can_convert_to(self, format):
+        return format in self.get_converter_types()
+
+    def find_conversion_destination( self, accepted_formats, **kwd ):
+        """Returns ( target_ext, existing converted dataset )"""
+        return self.datatype.find_conversion_destination( self, accepted_formats, _get_datatypes_registry(), **kwd )
+
+    def add_validation_error( self, validation_error ):
+        self.validation_errors.append( validation_error )
+
+    def extend_validation_errors( self, validation_errors ):
+        self.validation_errors.extend(validation_errors)
+
+    def mark_deleted( self, include_children=True ):
+        self.deleted = True
+        if include_children:
+            for child in self.children:
+                child.mark_deleted()
+
+    def mark_undeleted( self, include_children=True ):
+        self.deleted = False
+        if include_children:
+            for child in self.children:
+                child.mark_undeleted()
+
+    def mark_unhidden( self, include_children=True ):
+        self.visible = True
+        if include_children:
+            for child in self.children:
+                child.mark_unhidden()
+
+    def undeletable( self ):
+        if self.purged:
+            return False
+        return True
+
+    @property
+    def is_ok(self):
+        return self.state == self.states.OK
+
+    @property
+    def is_pending( self ):
+        """
+        Return true if the dataset is neither ready nor in error
+        """
+        return self.state in ( self.states.NEW, self.states.UPLOAD,
+                               self.states.QUEUED, self.states.RUNNING,
+                               self.states.SETTING_METADATA )
+
+    @property
+    def source_library_dataset( self ):
+        def get_source( dataset ):
+            if isinstance( dataset, LibraryDatasetDatasetAssociation ):
+                if dataset.library_dataset:
+                    return ( dataset, dataset.library_dataset )
+            if dataset.copied_from_library_dataset_dataset_association:
+                source = get_source( dataset.copied_from_library_dataset_dataset_association )
+                if source:
+                    return source
+            if dataset.copied_from_history_dataset_association:
+                source = get_source( dataset.copied_from_history_dataset_association )
+                if source:
+                    return source
+            return ( None, None )
+        return get_source( self )
+
+    @property
+    def source_dataset_chain( self ):
+        def _source_dataset_chain( dataset, lst ):
+            try:
+                cp_from_ldda = dataset.copied_from_library_dataset_dataset_association
+                if cp_from_ldda:
+                    lst.append( (cp_from_ldda, "(Data Library)") )
+                    return _source_dataset_chain( cp_from_ldda, lst )
+            except Exception as e:
+                log.warning( e )
+            try:
+                cp_from_hda = dataset.copied_from_history_dataset_association
+                if cp_from_hda:
+                    lst.append( (cp_from_hda, cp_from_hda.history.name) )
+                    return _source_dataset_chain( cp_from_hda, lst )
+            except Exception as e:
+                log.warning( e )
+            return lst
+        return _source_dataset_chain( self, [] )
+
+    @property
+    def creating_job( self ):
+        creating_job_associations = None
+        if self.creating_job_associations:
+            creating_job_associations = self.creating_job_associations
+        else:
+            inherit_chain = self.source_dataset_chain
+            if inherit_chain:
+                creating_job_associations = inherit_chain[-1][0].creating_job_associations
+        if creating_job_associations:
+            return creating_job_associations[0].job
+        return None
+
+    def get_display_applications( self, trans ):
+        return self.datatype.get_display_applications_by_dataset( self, trans )
+
+    def get_visualizations( self ):
+        return self.datatype.get_visualizations( self )
+
+    def get_datasources( self, trans ):
+        """
+        Returns datasources for dataset; if datasources are not available
+        due to indexing, indexing is started. Return value is a dictionary
+        with entries of type
+        (<datasource_type> : {<datasource_name>, <indexing_message>}).
+        """
+        data_sources_dict = {}
+        msg = None
+        for source_type, source_list in self.datatype.data_sources.items():
+            data_source = None
+            if source_type == "data_standalone":
+                # Nothing to do.
+                msg = None
+                data_source = source_list
+            else:
+                # Convert.
+                if isinstance( source_list, string_types ):
+                    source_list = [ source_list ]
+
+                # Loop through sources until viable one is found.
+                for source in source_list:
+                    msg = self.convert_dataset( trans, source )
+                    # No message or PENDING means that source is viable. No
+                    # message indicates conversion was done and is successful.
+                    if not msg or msg == self.conversion_messages.PENDING:
+                        data_source = source
+                        break
+
+            # Store msg.
+            data_sources_dict[ source_type ] = { "name": data_source, "message": msg }
+
+        return data_sources_dict
+
+    def convert_dataset( self, trans, target_type ):
+        """
+        Converts a dataset to the target_type and returns a message indicating
+        status of the conversion. None is returned to indicate that dataset
+        was converted successfully.
+        """
+
+        # Get converted dataset; this will start the conversion if necessary.
+        try:
+            converted_dataset = self.get_converted_dataset( trans, target_type )
+        except NoConverterException:
+            return self.conversion_messages.NO_CONVERTER
+        except ConverterDependencyException as dep_error:
+            return { 'kind': self.conversion_messages.ERROR, 'message': dep_error.value }
+
+        # Check dataset state and return any messages.
+        msg = None
+        if converted_dataset and converted_dataset.state == Dataset.states.ERROR:
+            job_id = trans.sa_session.query( JobToOutputDatasetAssociation ) \
+                .filter_by( dataset_id=converted_dataset.id ).first().job_id
+            job = trans.sa_session.query( Job ).get( job_id )
+            msg = { 'kind': self.conversion_messages.ERROR, 'message': job.stderr }
+        elif not converted_dataset or converted_dataset.state != Dataset.states.OK:
+            msg = self.conversion_messages.PENDING
+
+        return msg
+
+
+class HistoryDatasetAssociation( DatasetInstance, Dictifiable, UsesAnnotations, HasName ):
+    """
+    Resource class that creates a relation between a dataset and a user history.
+    """
+
+    def __init__( self,
+                  hid=None,
+                  history=None,
+                  copied_from_history_dataset_association=None,
+                  copied_from_library_dataset_dataset_association=None,
+                  sa_session=None,
+                  **kwd ):
+        """
+        Create a a new HDA and associate it with the given history.
+        """
+        # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset
+        # parameter is True so that the new object can be flushed.  Is there a better way?
+        DatasetInstance.__init__( self, sa_session=sa_session, **kwd )
+        self.hid = hid
+        # Relationships
+        self.history = history
+        self.copied_from_history_dataset_association = copied_from_history_dataset_association
+        self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
+
+    def copy( self, copy_children=False, parent_id=None ):
+        """
+        Create a copy of this HDA.
+        """
+        hda = HistoryDatasetAssociation( hid=self.hid,
+                                         name=self.name,
+                                         info=self.info,
+                                         blurb=self.blurb,
+                                         peek=self.peek,
+                                         tool_version=self.tool_version,
+                                         extension=self.extension,
+                                         dbkey=self.dbkey,
+                                         dataset=self.dataset,
+                                         visible=self.visible,
+                                         deleted=self.deleted,
+                                         parent_id=parent_id,
+                                         copied_from_history_dataset_association=self )
+        # update init non-keywords as well
+        hda.purged = self.purged
+
+        object_session( self ).add( hda )
+        object_session( self ).flush()
+        hda.set_size()
+        # Need to set after flushed, as MetadataFiles require dataset.id
+        hda.metadata = self.metadata
+        if copy_children:
+            for child in self.children:
+                child.copy( copy_children=copy_children, parent_id=hda.id )
+        if not self.datatype.copy_safe_peek:
+            # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+            hda.set_peek()
+        object_session( self ).flush()
+        return hda
+
+    def copy_attributes( self, new_dataset ):
+        new_dataset.hid = self.hid
+
+    def to_library_dataset_dataset_association( self, trans, target_folder,
+                                                replace_dataset=None, parent_id=None, user=None, roles=None, ldda_message='' ):
+        """
+        Copy this HDA to a library optionally replacing an existing LDDA.
+        """
+        if replace_dataset:
+            # The replace_dataset param ( when not None ) refers to a LibraryDataset that
+            #   is being replaced with a new version.
+            library_dataset = replace_dataset
+        else:
+            # If replace_dataset is None, the Library level permissions will be taken from the folder and
+            #   applied to the new LibraryDataset, and the current user's DefaultUserPermissions will be applied
+            #   to the associated Dataset.
+            library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info )
+            object_session( self ).add( library_dataset )
+            object_session( self ).flush()
+        if not user:
+            # This should never happen since users must be authenticated to upload to a data library
+            user = self.history.user
+        ldda = LibraryDatasetDatasetAssociation( name=self.name,
+                                                 info=self.info,
+                                                 blurb=self.blurb,
+                                                 peek=self.peek,
+                                                 tool_version=self.tool_version,
+                                                 extension=self.extension,
+                                                 dbkey=self.dbkey,
+                                                 dataset=self.dataset,
+                                                 library_dataset=library_dataset,
+                                                 visible=self.visible,
+                                                 deleted=self.deleted,
+                                                 parent_id=parent_id,
+                                                 copied_from_history_dataset_association=self,
+                                                 user=user )
+        object_session( self ).add( ldda )
+        object_session( self ).flush()
+        # If roles were selected on the upload form, restrict access to the Dataset to those roles
+        roles = roles or []
+        for role in roles:
+            dp = trans.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action,
+                                                 ldda.dataset, role )
+            trans.sa_session.add( dp )
+            trans.sa_session.flush()
+        # Must set metadata after ldda flushed, as MetadataFiles require ldda.id
+        ldda.metadata = self.metadata
+        if ldda_message:
+            ldda.message = ldda_message
+        if not replace_dataset:
+            target_folder.add_library_dataset( library_dataset, genome_build=ldda.dbkey )
+            object_session( self ).add( target_folder )
+            object_session( self ).flush()
+        library_dataset.library_dataset_dataset_association_id = ldda.id
+        object_session( self ).add( library_dataset )
+        object_session( self ).flush()
+        for child in self.children:
+            child.to_library_dataset_dataset_association( trans,
+                                                          target_folder=target_folder,
+                                                          replace_dataset=replace_dataset,
+                                                          parent_id=ldda.id,
+                                                          user=ldda.user )
+        if not self.datatype.copy_safe_peek:
+            # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+            ldda.set_peek()
+        object_session( self ).flush()
+        return ldda
+
+    def clear_associated_files( self, metadata_safe=False, purge=False ):
+        """
+        """
+        # metadata_safe = True means to only clear when assoc.metadata_safe == False
+        for assoc in self.implicitly_converted_datasets:
+            if not assoc.deleted and ( not metadata_safe or not assoc.metadata_safe ):
+                assoc.clear( purge=purge )
+        for assoc in self.implicitly_converted_parent_datasets:
+            assoc.clear( purge=purge, delete_dataset=False )
+
+    def get_access_roles( self, trans ):
+        """
+        Return The access roles associated with this HDA's dataset.
+        """
+        return self.dataset.get_access_roles( trans )
+
+    def quota_amount( self, user ):
+        """
+        Return the disk space used for this HDA relevant to user quotas.
+
+        If the user has multiple instances of this dataset, it will not affect their
+        disk usage statistic.
+        """
+        rval = 0
+        # Anon users are handled just by their single history size.
+        if not user:
+            return rval
+        # Gets an HDA and its children's disk usage, if the user does not already
+        #   have an association of the same dataset
+        if not self.dataset.library_associations and not self.purged and not self.dataset.purged:
+            for hda in self.dataset.history_associations:
+                if hda.id == self.id:
+                    continue
+                if not hda.purged and hda.history and hda.history.user and hda.history.user == user:
+                    break
+            else:
+                rval += self.get_total_size()
+        for child in self.children:
+            rval += child.get_disk_usage( user )
+        return rval
+
+    def to_dict( self, view='collection', expose_dataset_path=False ):
+        """
+        Return attributes of this HDA that are exposed using the API.
+        """
+        # Since this class is a proxy to rather complex attributes we want to
+        # display in other objects, we can't use the simpler method used by
+        # other model classes.
+        hda = self
+        rval = dict( id=hda.id,
+                     hda_ldda='hda',
+                     uuid=( lambda uuid: str( uuid ) if uuid else None )( hda.dataset.uuid ),
+                     hid=hda.hid,
+                     file_ext=hda.ext,
+                     peek=( lambda hda: hda.display_peek() if hda.peek and hda.peek != 'no peek' else None )( hda ),
+                     model_class=self.__class__.__name__,
+                     name=hda.name,
+                     deleted=hda.deleted,
+                     purged=hda.purged,
+                     visible=hda.visible,
+                     state=hda.state,
+                     history_content_type=hda.history_content_type,
+                     file_size=int( hda.get_size() ),
+                     create_time=hda.create_time.isoformat(),
+                     update_time=hda.update_time.isoformat(),
+                     data_type=hda.datatype.__class__.__module__ + '.' + hda.datatype.__class__.__name__,
+                     genome_build=hda.dbkey,
+                     misc_info=hda.info.strip() if isinstance( hda.info, string_types ) else hda.info,
+                     misc_blurb=hda.blurb )
+
+        # add tags string list
+        tags_str_list = []
+        for tag in self.tags:
+            tag_str = tag.user_tname
+            if tag.value is not None:
+                tag_str += ":" + tag.user_value
+            tags_str_list.append( tag_str )
+        rval[ 'tags' ] = tags_str_list
+
+        if hda.copied_from_library_dataset_dataset_association is not None:
+            rval['copied_from_ldda_id'] = hda.copied_from_library_dataset_dataset_association.id
+
+        if hda.history is not None:
+            rval['history_id'] = hda.history.id
+
+        if hda.extended_metadata is not None:
+            rval['extended_metadata'] = hda.extended_metadata.data
+
+        rval[ 'peek' ] = to_unicode( hda.display_peek() )
+
+        for name, spec in hda.metadata.spec.items():
+            val = hda.metadata.get( name )
+            if isinstance( val, MetadataFile ):
+                # only when explicitly set: fetching filepaths can be expensive
+                if not expose_dataset_path:
+                    continue
+                val = val.file_name
+            # If no value for metadata, look in datatype for metadata.
+            elif val is None and hasattr( hda.datatype, name ):
+                val = getattr( hda.datatype, name )
+            rval['metadata_' + name] = val
+        return rval
+
+    @property
+    def history_content_type( self ):
+        return "dataset"
+
+    # TODO: down into DatasetInstance
+    content_type = u'dataset'
+
+    @hybrid.hybrid_property
+    def type_id( self ):
+        return u'-'.join([ self.content_type, str( self.id ) ])
+
+    @type_id.expression
+    def type_id( cls ):
+        return (( type_coerce( cls.content_type, types.Unicode ) + u'-' +
+                  type_coerce( cls.id, types.Unicode ) ).label( 'type_id' ))
+
+    def copy_tags_from( self, target_user, source_hda ):
+        """
+        Copy tags from `source_hda` to this HDA and assign them the user `target_user`.
+        """
+        for source_tag_assoc in source_hda.tags:
+            new_tag_assoc = source_tag_assoc.copy()
+            new_tag_assoc.user = target_user
+            self.tags.append( new_tag_assoc )
+
+
+class HistoryDatasetAssociationDisplayAtAuthorization( object ):
+    def __init__( self, hda=None, user=None, site=None ):
+        self.history_dataset_association = hda
+        self.user = user
+        self.site = site
+
+
+class HistoryDatasetAssociationSubset( object ):
+    def __init__(self, hda, subset, location):
+        self.hda = hda
+        self.subset = subset
+        self.location = location
+
+
+class Library( object, Dictifiable, HasName ):
+    permitted_actions = get_permitted_actions( filter='LIBRARY' )
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'deleted', 'name', 'description', 'synopsis', 'root_folder_id', 'create_time' )
+
+    def __init__( self, name=None, description=None, synopsis=None, root_folder=None ):
+        self.name = name or "Unnamed library"
+        self.description = description
+        self.synopsis = synopsis
+        self.root_folder = root_folder
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        """
+        We prepend an F to folders.
+        """
+        rval = super( Library, self ).to_dict( view=view, value_mapper=value_mapper )
+        if 'root_folder_id' in rval:
+            rval[ 'root_folder_id' ] = 'F' + str(rval[ 'root_folder_id' ])
+        return rval
+
+    def get_active_folders( self, folder, folders=None ):
+        # TODO: should we make sure the library is not deleted?
+        def sort_by_attr( seq, attr ):
+            """
+            Sort the sequence of objects by object's attribute
+            Arguments:
+            seq  - the list or any sequence (including immutable one) of objects to sort.
+            attr - the name of attribute to sort by
+            """
+            # Use the "Schwartzian transform"
+            # Create the auxiliary list of tuples where every i-th tuple has form
+            # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
+            # only to provide stable sorting, but mainly to eliminate comparison of objects
+            # (which can be expensive or prohibited) in case of equal attribute values.
+            intermed = map( None, (getattr(_, attr) for _ in seq), range( len( seq ) ), seq )
+            intermed.sort()
+            return [_[-1] for _ in intermed]
+        if folders is None:
+            active_folders = [ folder ]
+        for active_folder in folder.active_folders:
+            active_folders.extend( self.get_active_folders( active_folder, folders ) )
+        return sort_by_attr( active_folders, 'id' )
+
+    def get_info_association( self, restrict=False, inherited=False ):
+        if self.info_association:
+            if not inherited or self.info_association[0].inheritable:
+                return self.info_association[0], inherited
+            else:
+                return None, inherited
+        return None, inherited
+
+    def get_template_widgets( self, trans, get_contents=True ):
+        # See if we have any associated templates - the returned value for
+        # inherited is not applicable at the library level.  The get_contents
+        # param is passed by callers that are inheriting a template - these
+        # are usually new library datsets for which we want to include template
+        # fields on the upload form, but not necessarily the contents of the
+        # inherited template saved for the parent.
+        info_association, inherited = self.get_info_association()
+        if info_association:
+            template = info_association.template
+            if get_contents:
+                # See if we have any field contents
+                info = info_association.info
+                if info:
+                    return template.get_widgets( trans.user, contents=info.content )
+            return template.get_widgets( trans.user )
+        return []
+
+    def get_access_roles( self, trans ):
+        roles = []
+        for lp in self.actions:
+            if lp.action == trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action:
+                roles.append( lp.role )
+        return roles
+
+
+class LibraryFolder( object, Dictifiable, HasName ):
+    dict_element_visible_keys = ( 'id', 'parent_id', 'name', 'description', 'item_count', 'genome_build', 'update_time', 'deleted' )
+
+    def __init__( self, name=None, description=None, item_count=0, order_id=None ):
+        self.name = name or "Unnamed folder"
+        self.description = description
+        self.item_count = item_count
+        self.order_id = order_id
+        self.genome_build = None
+
+    def add_library_dataset( self, library_dataset, genome_build=None ):
+        library_dataset.folder_id = self.id
+        library_dataset.order_id = self.item_count
+        self.item_count += 1
+        if genome_build not in [None, '?']:
+            self.genome_build = genome_build
+
+    def add_folder( self, folder ):
+        folder.parent_id = self.id
+        folder.order_id = self.item_count
+        self.item_count += 1
+
+    def get_info_association( self, restrict=False, inherited=False ):
+        # If restrict is True, we will return this folder's info_association, not inheriting.
+        # If restrict is False, we'll return the next available info_association in the
+        # inheritable hierarchy if it is "inheritable".  True is also returned if the
+        # info_association was inherited and False if not.  This enables us to eliminate
+        # displaying any contents of the inherited template.
+        if self.info_association:
+            if not inherited or self.info_association[0].inheritable:
+                return self.info_association[0], inherited
+            else:
+                return None, inherited
+        if restrict:
+            return None, inherited
+        if self.parent:
+            return self.parent.get_info_association( inherited=True )
+        if self.library_root:
+            return self.library_root[0].get_info_association( inherited=True )
+        return None, inherited
+
+    def get_template_widgets( self, trans, get_contents=True ):
+        # See if we have any associated templates.  The get_contents
+        # param is passed by callers that are inheriting a template - these
+        # are usually new library datsets for which we want to include template
+        # fields on the upload form.
+        info_association, inherited = self.get_info_association()
+        if info_association:
+            if inherited:
+                template = info_association.template.current.latest_form
+            else:
+                template = info_association.template
+            # See if we have any field contents, but only if the info_association was
+            # not inherited ( we do not want to display the inherited contents ).
+            # (gvk: 8/30/10) Based on conversations with Dan, we agreed to ALWAYS inherit
+            # contents.  We'll use this behavior until we hear from the community that
+            # contents should not be inherited.  If we don't hear anything for a while,
+            # eliminate the old commented out behavior.
+            # if not inherited and get_contents:
+            if get_contents:
+                info = info_association.info
+                if info:
+                    return template.get_widgets( trans.user, info.content )
+            else:
+                return template.get_widgets( trans.user )
+        return []
+
+    @property
+    def activatable_library_datasets( self ):
+        # This needs to be a list
+        return [ ld for ld in self.datasets if ld.library_dataset_dataset_association and not ld.library_dataset_dataset_association.dataset.deleted ]
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        rval = super( LibraryFolder, self ).to_dict( view=view, value_mapper=value_mapper  )
+        info_association, inherited = self.get_info_association()
+        if info_association:
+            if inherited:
+                template = info_association.template.current.latest_form
+            else:
+                template = info_association.template
+            rval['data_template'] = template.name
+        rval['library_path'] = self.library_path
+        rval['parent_library_id'] = self.parent_library.id
+        return rval
+
+    @property
+    def library_path(self):
+        l_path = []
+        f = self
+        while f.parent:
+            l_path.insert(0, f.name)
+            f = f.parent
+        return l_path
+
+    @property
+    def parent_library( self ):
+        f = self
+        while f.parent:
+            f = f.parent
+        return f.library_root[0]
+
+
+class LibraryDataset( object ):
+    # This class acts as a proxy to the currently selected LDDA
+    upload_options = [ ( 'upload_file', 'Upload files' ),
+                       ( 'upload_directory', 'Upload directory of files' ),
+                       ( 'upload_paths', 'Upload files from filesystem paths' ),
+                       ( 'import_from_history', 'Import datasets from your current history' ) ]
+
+    def __init__( self, folder=None, order_id=None, name=None, info=None, library_dataset_dataset_association=None, **kwd ):
+        self.folder = folder
+        self.order_id = order_id
+        self.name = name
+        self.info = info
+        self.library_dataset_dataset_association = library_dataset_dataset_association
+
+    def set_library_dataset_dataset_association( self, ldda ):
+        self.library_dataset_dataset_association = ldda
+        ldda.library_dataset = self
+        object_session( self ).add_all( ( ldda, self ) )
+        object_session( self ).flush()
+
+    def get_info( self ):
+        if self.library_dataset_dataset_association:
+            return self.library_dataset_dataset_association.info
+        elif self._info:
+            return self._info
+        else:
+            return 'no info'
+
+    def set_info( self, info ):
+        self._info = info
+    info = property( get_info, set_info )
+
+    def get_name( self ):
+        if self.library_dataset_dataset_association:
+            return self.library_dataset_dataset_association.name
+        elif self._name:
+            return self._name
+        else:
+            return 'Unnamed dataset'
+
+    def set_name( self, name ):
+        self._name = name
+    name = property( get_name, set_name )
+
+    def display_name( self ):
+        self.library_dataset_dataset_association.display_name()
+
+    def to_dict( self, view='collection' ):
+        # Since this class is a proxy to rather complex attributes we want to
+        # display in other objects, we can't use the simpler method used by
+        # other model classes.
+        ldda = self.library_dataset_dataset_association
+        template_data = {}
+        for temp_info in ldda.info_association:
+            template = temp_info.template
+            content = temp_info.info.content
+            tmp_dict = {}
+            for field in template.fields:
+                tmp_dict[field['label']] = content[field['name']]
+            template_data[template.name] = tmp_dict
+
+        rval = dict( id=self.id,
+                     ldda_id=ldda.id,
+                     parent_library_id=self.folder.parent_library.id,
+                     folder_id=self.folder_id,
+                     model_class=self.__class__.__name__,
+                     state=ldda.state,
+                     name=ldda.name,
+                     file_name=ldda.file_name,
+                     uploaded_by=ldda.user.email,
+                     message=ldda.message,
+                     date_uploaded=ldda.create_time.isoformat(),
+                     file_size=int( ldda.get_size() ),
+                     file_ext=ldda.ext,
+                     data_type=ldda.datatype.__class__.__module__ + '.' + ldda.datatype.__class__.__name__,
+                     genome_build=ldda.dbkey,
+                     misc_info=ldda.info,
+                     misc_blurb=ldda.blurb,
+                     peek=( lambda ldda: ldda.display_peek() if ldda.peek and ldda.peek != 'no peek' else None )( ldda ),
+                     template_data=template_data )
+        if ldda.dataset.uuid is None:
+            rval['uuid'] = None
+        else:
+            rval['uuid'] = str(ldda.dataset.uuid)
+        for name, spec in ldda.metadata.spec.items():
+            val = ldda.metadata.get( name )
+            if isinstance( val, MetadataFile ):
+                val = val.file_name
+            elif isinstance( val, list ):
+                val = ', '.join( [str(v) for v in val] )
+            rval['metadata_' + name] = val
+        return rval
+
+
+class LibraryDatasetDatasetAssociation( DatasetInstance, HasName ):
+    def __init__( self,
+                  copied_from_history_dataset_association=None,
+                  copied_from_library_dataset_dataset_association=None,
+                  library_dataset=None,
+                  user=None,
+                  sa_session=None,
+                  **kwd ):
+        # FIXME: sa_session is must be passed to DataSetInstance if the create_dataset
+        # parameter in kwd is True so that the new object can be flushed.  Is there a better way?
+        DatasetInstance.__init__( self, sa_session=sa_session, **kwd )
+        if copied_from_history_dataset_association:
+            self.copied_from_history_dataset_association_id = copied_from_history_dataset_association.id
+        if copied_from_library_dataset_dataset_association:
+            self.copied_from_library_dataset_dataset_association_id = copied_from_library_dataset_dataset_association.id
+        self.library_dataset = library_dataset
+        self.user = user
+
+    def to_history_dataset_association( self, target_history, parent_id=None, add_to_history=False ):
+        hda = HistoryDatasetAssociation( name=self.name,
+                                         info=self.info,
+                                         blurb=self.blurb,
+                                         peek=self.peek,
+                                         tool_version=self.tool_version,
+                                         extension=self.extension,
+                                         dbkey=self.dbkey,
+                                         dataset=self.dataset,
+                                         visible=self.visible,
+                                         deleted=self.deleted,
+                                         parent_id=parent_id,
+                                         copied_from_library_dataset_dataset_association=self,
+                                         history=target_history )
+        object_session( self ).add( hda )
+        object_session( self ).flush()
+        hda.metadata = self.metadata  # need to set after flushed, as MetadataFiles require dataset.id
+        if add_to_history and target_history:
+            target_history.add_dataset( hda )
+        for child in self.children:
+            child.to_history_dataset_association( target_history=target_history, parent_id=hda.id, add_to_history=False )
+        if not self.datatype.copy_safe_peek:
+            hda.set_peek()  # in some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+        object_session( self ).flush()
+        return hda
+
+    def copy( self, copy_children=False, parent_id=None, target_folder=None ):
+        ldda = LibraryDatasetDatasetAssociation( name=self.name,
+                                                 info=self.info,
+                                                 blurb=self.blurb,
+                                                 peek=self.peek,
+                                                 tool_version=self.tool_version,
+                                                 extension=self.extension,
+                                                 dbkey=self.dbkey,
+                                                 dataset=self.dataset,
+                                                 visible=self.visible,
+                                                 deleted=self.deleted,
+                                                 parent_id=parent_id,
+                                                 copied_from_library_dataset_dataset_association=self,
+                                                 folder=target_folder )
+        object_session( self ).add( ldda )
+        object_session( self ).flush()
+        # Need to set after flushed, as MetadataFiles require dataset.id
+        ldda.metadata = self.metadata
+        if copy_children:
+            for child in self.children:
+                child.copy( copy_children=copy_children, parent_id=ldda.id )
+        if not self.datatype.copy_safe_peek:
+            # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+            ldda.set_peek()
+        object_session( self ).flush()
+        return ldda
+
+    def clear_associated_files( self, metadata_safe=False, purge=False ):
+        return
+
+    def get_access_roles( self, trans ):
+        return self.dataset.get_access_roles( trans )
+
+    def get_manage_permissions_roles( self, trans ):
+        return self.dataset.get_manage_permissions_roles( trans )
+
+    def has_manage_permissions_roles( self, trans ):
+        return self.dataset.has_manage_permissions_roles( trans )
+
+    def get_info_association( self, restrict=False, inherited=False ):
+        # If restrict is True, we will return this ldda's info_association whether it
+        # exists or not ( in which case None will be returned ).  If restrict is False,
+        # we'll return the next available info_association in the inheritable hierarchy.
+        # True is also returned if the info_association was inherited, and False if not.
+        # This enables us to eliminate displaying any contents of the inherited template.
+        # SM: Accessing self.info_association can cause a query to be emitted
+        if self.info_association:
+            return self.info_association[0], inherited
+        if restrict:
+            return None, inherited
+        return self.library_dataset.folder.get_info_association( inherited=True )
+
+    def to_dict( self, view='collection' ):
+        # Since this class is a proxy to rather complex attributes we want to
+        # display in other objects, we can't use the simpler method used by
+        # other model classes.
+        ldda = self
+        try:
+            file_size = int( ldda.get_size() )
+        except OSError:
+            file_size = 0
+
+        rval = dict( id=ldda.id,
+                     hda_ldda='ldda',
+                     model_class=self.__class__.__name__,
+                     name=ldda.name,
+                     deleted=ldda.deleted,
+                     visible=ldda.visible,
+                     state=ldda.state,
+                     library_dataset_id=ldda.library_dataset_id,
+                     file_size=file_size,
+                     file_name=ldda.file_name,
+                     update_time=ldda.update_time.isoformat(),
+                     file_ext=ldda.ext,
+                     data_type=ldda.datatype.__class__.__module__ + '.' + ldda.datatype.__class__.__name__,
+                     genome_build=ldda.dbkey,
+                     misc_info=ldda.info,
+                     misc_blurb=ldda.blurb )
+        if ldda.dataset.uuid is None:
+            rval['uuid'] = None
+        else:
+            rval['uuid'] = str(ldda.dataset.uuid)
+        rval['parent_library_id'] = ldda.library_dataset.folder.parent_library.id
+        if ldda.extended_metadata is not None:
+            rval['extended_metadata'] = ldda.extended_metadata.data
+        for name, spec in ldda.metadata.spec.items():
+            val = ldda.metadata.get( name )
+            if isinstance( val, MetadataFile ):
+                val = val.file_name
+            # If no value for metadata, look in datatype for metadata.
+            elif val is None and hasattr( ldda.datatype, name ):
+                val = getattr( ldda.datatype, name )
+            rval['metadata_' + name] = val
+        return rval
+
+    def get_template_widgets( self, trans, get_contents=True ):
+        # See if we have any associated templatesThe get_contents
+        # param is passed by callers that are inheriting a template - these
+        # are usually new library datsets for which we want to include template
+        # fields on the upload form, but not necessarily the contents of the
+        # inherited template saved for the parent.
+        info_association, inherited = self.get_info_association()
+        if info_association:
+            if inherited:
+                template = info_association.template.current.latest_form
+            else:
+                template = info_association.template
+            # See if we have any field contents, but only if the info_association was
+            # not inherited ( we do not want to display the inherited contents ).
+            # (gvk: 8/30/10) Based on conversations with Dan, we agreed to ALWAYS inherit
+            # contents.  We'll use this behavior until we hear from the community that
+            # contents should not be inherited.  If we don't hear anything for a while,
+            # eliminate the old commented out behavior.
+            # if not inherited and get_contents:
+            if get_contents:
+                info = info_association.info
+                if info:
+                    return template.get_widgets( trans.user, info.content )
+            else:
+                return template.get_widgets( trans.user )
+        return []
+
+    def templates_dict( self, use_name=False ):
+        """
+        Returns a dict of template info
+        """
+        # TODO: Should have a method that allows names and labels to be returned together in a structured way
+        template_data = {}
+        for temp_info in self.info_association:
+            template = temp_info.template
+            content = temp_info.info.content
+            tmp_dict = {}
+            for field in template.fields:
+                if use_name:
+                    name = field[ 'name' ]
+                else:
+                    name = field[ 'label' ]
+                tmp_dict[ name ] = content.get( field[ 'name' ] )
+            template_data[template.name] = tmp_dict
+        return template_data
+
+    def templates_json( self, use_name=False ):
+        return json.dumps( self.templates_dict( use_name=use_name ) )
+
+
+class ExtendedMetadata( object ):
+    def __init__(self, data):
+        self.data = data
+
+
+class ExtendedMetadataIndex( object ):
+    def __init__( self, extended_metadata, path, value):
+        self.extended_metadata = extended_metadata
+        self.path = path
+        self.value = value
+
+
+class LibraryInfoAssociation( object ):
+    def __init__( self, library, form_definition, info, inheritable=False ):
+        self.library = library
+        self.template = form_definition
+        self.info = info
+        self.inheritable = inheritable
+
+
+class LibraryFolderInfoAssociation( object ):
+    def __init__( self, folder, form_definition, info, inheritable=False ):
+        self.folder = folder
+        self.template = form_definition
+        self.info = info
+        self.inheritable = inheritable
+
+
+class LibraryDatasetDatasetInfoAssociation( object ):
+    def __init__( self, library_dataset_dataset_association, form_definition, info ):
+        # TODO: need to figure out if this should be inheritable to the associated LibraryDataset
+        self.library_dataset_dataset_association = library_dataset_dataset_association
+        self.template = form_definition
+        self.info = info
+
+    @property
+    def inheritable( self ):
+        return True  # always allow inheriting, used for replacement
+
+
+class ValidationError( object ):
+    def __init__( self, message=None, err_type=None, attributes=None ):
+        self.message = message
+        self.err_type = err_type
+        self.attributes = attributes
+
+
+class DatasetToValidationErrorAssociation( object ):
+    def __init__( self, dataset, validation_error ):
+        self.dataset = dataset
+        self.validation_error = validation_error
+
+
+class ImplicitlyConvertedDatasetAssociation( object ):
+
+    def __init__( self, id=None, parent=None, dataset=None, file_type=None, deleted=False, purged=False, metadata_safe=True ):
+        self.id = id
+        if isinstance(dataset, HistoryDatasetAssociation):
+            self.dataset = dataset
+        elif isinstance(dataset, LibraryDatasetDatasetAssociation):
+            self.dataset_ldda = dataset
+        else:
+            raise AttributeError( 'Unknown dataset type provided for dataset: %s' % type( dataset ) )
+        if isinstance(parent, HistoryDatasetAssociation):
+            self.parent_hda = parent
+        elif isinstance(parent, LibraryDatasetDatasetAssociation):
+            self.parent_ldda = parent
+        else:
+            raise AttributeError( 'Unknown dataset type provided for parent: %s' % type( parent ) )
+        self.type = file_type
+        self.deleted = deleted
+        self.purged = purged
+        self.metadata_safe = metadata_safe
+
+    def clear( self, purge=False, delete_dataset=True ):
+        self.deleted = True
+        if self.dataset:
+            if delete_dataset:
+                self.dataset.deleted = True
+            if purge:
+                self.dataset.purged = True
+        if purge and self.dataset.deleted:  # do something with purging
+            self.purged = True
+            try:
+                os.unlink( self.file_name )
+            except Exception as e:
+                log.error( "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e ) )
+
+
+DEFAULT_COLLECTION_NAME = "Unnamed Collection"
+
+
+class DatasetCollection( object, Dictifiable, UsesAnnotations ):
+    """
+    """
+    dict_collection_visible_keys = ( 'id', 'collection_type' )
+    dict_element_visible_keys = ( 'id', 'collection_type' )
+    populated_states = Bunch(
+        NEW='new',  # New dataset collection, unpopulated elements
+        OK='ok',  # Collection elements populated (HDAs may or may not have errors)
+        FAILED='failed',  # some problem populating state, won't be populated
+    )
+
+    def __init__(
+        self,
+        id=None,
+        collection_type=None,
+        populated=True,
+    ):
+        self.id = id
+        self.collection_type = collection_type
+        if not populated:
+            self.populated_state = DatasetCollection.populated_states.NEW
+
+    @property
+    def populated( self ):
+        top_level_populated = self.populated_state == DatasetCollection.populated_states.OK
+        if top_level_populated and self.has_subcollections:
+            return all(e.child_collection.populated for e in self.elements)
+        return top_level_populated
+
+    @property
+    def waiting_for_elements( self ):
+        top_level_waiting = self.populated_state == DatasetCollection.populated_states.NEW
+        if not top_level_waiting and self.has_subcollections:
+            return any(e.child_collection.waiting_for_elements for e in self.elements)
+        return top_level_waiting
+
+    def mark_as_populated( self ):
+        self.populated_state = DatasetCollection.populated_states.OK
+
+    def handle_population_failed( self, message ):
+        self.populated_state = DatasetCollection.populated_states.FAILED
+        self.populated_state_message = message
+
+    @property
+    def dataset_instances( self ):
+        instances = []
+        for element in self.elements:
+            if element.is_collection:
+                instances.extend( element.child_collection.dataset_instances )
+            else:
+                instance = element.dataset_instance
+                instances.append( instance )
+        return instances
+
+    @property
+    def dataset_elements( self ):
+        elements = []
+        for element in self.elements:
+            if element.is_collection:
+                elements.extend( element.child_collection.dataset_elements )
+            else:
+                elements.append( element )
+        return elements
+
+    @property
+    def state( self ):
+        # TODO: DatasetCollection state handling...
+        return 'ok'
+
+    def validate( self ):
+        if self.collection_type is None:
+            raise Exception("Each dataset collection must define a collection type.")
+
+    def __getitem__( self, key ):
+        get_by_attribute = "element_index" if isinstance( key, int ) else "element_identifier"
+        for element in self.elements:
+            if getattr( element, get_by_attribute ) == key:
+                return element
+        error_message = "Dataset collection has no %s with key %s." % ( get_by_attribute, key )
+        raise KeyError( error_message )
+
+    def copy( self, destination=None, element_destination=None ):
+        new_collection = DatasetCollection(
+            collection_type=self.collection_type,
+        )
+        for element in self.elements:
+            element.copy_to_collection(
+                new_collection,
+                destination=destination,
+                element_destination=element_destination,
+            )
+        object_session( self ).add( new_collection )
+        object_session( self ).flush()
+        return new_collection
+
+    def set_from_dict( self, new_data ):
+        # Nothing currently editable in this class.
+        return {}
+
+    @property
+    def has_subcollections(self):
+        return ":" in self.collection_type
+
+
+class DatasetCollectionInstance( object, HasName ):
+    """
+    """
+    def __init__(
+        self,
+        collection=None,
+        deleted=False,
+    ):
+        # Relationships
+        self.collection = collection
+        # Since deleted property is shared between history and dataset collections,
+        # it could be on either table - some places in the code however it is convient
+        # it is on instance instead of collection.
+        self.deleted = deleted
+
+    @property
+    def state( self ):
+        return self.collection.state
+
+    @property
+    def populated( self ):
+        return self.collection.populated
+
+    @property
+    def dataset_instances( self ):
+        return self.collection.dataset_instances
+
+    def display_name( self ):
+        return self.get_display_name()
+
+    def _base_to_dict( self, view ):
+        return dict(
+            id=self.id,
+            name=self.name,
+            collection_type=self.collection.collection_type,
+            populated=self.populated,
+            populated_state=self.collection.populated_state,
+            populated_state_message=self.collection.populated_state_message,
+            type="collection",  # contents type (distinguished from file or folder (in case of library))
+        )
+
+    def set_from_dict( self, new_data ):
+        """
+        Set object attributes to the values in dictionary new_data limiting
+        to only those keys in dict_element_visible_keys.
+
+        Returns a dictionary of the keys, values that have been changed.
+        """
+        # precondition: keys are proper, values are parsed and validated
+        changed = self.collection.set_from_dict( new_data )
+
+        # unknown keys are ignored here
+        for key in ( k for k in new_data.keys() if k in self.editable_keys ):
+            new_val = new_data[ key ]
+            old_val = self.__getattribute__( key )
+            if new_val == old_val:
+                continue
+
+            self.__setattr__( key, new_val )
+            changed[ key ] = new_val
+
+        return changed
+
+
+class HistoryDatasetCollectionAssociation( DatasetCollectionInstance, UsesAnnotations, Dictifiable ):
+    """ Associates a DatasetCollection with a History. """
+    editable_keys = ( 'name', 'deleted', 'visible' )
+
+    def __init__(
+        self,
+        id=None,
+        hid=None,
+        collection=None,
+        history=None,
+        name=None,
+        deleted=False,
+        visible=True,
+        copied_from_history_dataset_collection_association=None,
+        implicit_output_name=None,
+        implicit_input_collections=[],
+    ):
+        super( HistoryDatasetCollectionAssociation, self ).__init__(
+            collection=collection,
+            deleted=deleted,
+        )
+        self.id = id
+        self.hid = hid
+        self.history = history
+        self.name = name
+        self.visible = visible
+        self.copied_from_history_dataset_collection_association = copied_from_history_dataset_collection_association
+        self.implicit_output_name = implicit_output_name
+        self.implicit_input_collections = implicit_input_collections
+
+    @property
+    def history_content_type( self ):
+        return "dataset_collection"
+
+    # TODO: down into DatasetCollectionInstance
+    content_type = u'dataset_collection'
+
+    @hybrid.hybrid_property
+    def type_id( self ):
+        return u'-'.join([ self.content_type, str( self.id ) ])
+
+    @type_id.expression
+    def type_id( cls ):
+        return (( type_coerce( cls.content_type, types.Unicode ) + u'-' +
+                  type_coerce( cls.id, types.Unicode ) ).label( 'type_id' ))
+
+    def to_hda_representative( self, multiple=False ):
+        rval = []
+        for dataset in self.collection.dataset_elements:
+            rval.append( dataset.dataset_instance )
+            if multiple is False:
+                break
+        if len( rval ) > 0:
+            return rval if multiple else rval[ 0 ]
+
+    def to_dict( self, view='collection' ):
+        dict_value = dict(
+            hid=self.hid,
+            history_id=self.history.id,
+            history_content_type=self.history_content_type,
+            visible=self.visible,
+            deleted=self.deleted,
+            **self._base_to_dict(view=view)
+        )
+        return dict_value
+
+    def add_implicit_input_collection( self, name, history_dataset_collection ):
+        self.implicit_input_collections.append( ImplicitlyCreatedDatasetCollectionInput( name, history_dataset_collection)  )
+
+    def find_implicit_input_collection( self, name ):
+        matching_collection = None
+        for implicit_input_collection in self.implicit_input_collections:
+            if implicit_input_collection.name == name:
+                matching_collection = implicit_input_collection.input_dataset_collection
+                break
+        return matching_collection
+
+    def copy( self, element_destination=None ):
+        """
+        Create a copy of this history dataset collection association. Copy
+        underlying collection.
+        """
+        hdca = HistoryDatasetCollectionAssociation(
+            hid=self.hid,
+            collection=None,
+            visible=self.visible,
+            deleted=self.deleted,
+            name=self.name,
+            copied_from_history_dataset_collection_association=self,
+        )
+        collection_copy = self.collection.copy(
+            destination=hdca,
+            element_destination=element_destination,
+        )
+        hdca.collection = collection_copy
+        object_session( self ).add( hdca )
+        object_session( self ).flush()
+        return hdca
+
+
+class LibraryDatasetCollectionAssociation( DatasetCollectionInstance, Dictifiable ):
+    """ Associates a DatasetCollection with a library folder. """
+    editable_keys = ( 'name', 'deleted' )
+
+    def __init__(
+        self,
+        id=None,
+        collection=None,
+        name=None,
+        deleted=False,
+        folder=None,
+    ):
+        super(LibraryDatasetCollectionAssociation, self).__init__(
+            collection=collection,
+            deleted=deleted,
+        )
+        self.id = id
+        self.folder = folder
+        self.name = name
+
+    def to_dict( self, view='collection' ):
+        dict_value = dict(
+            folder_id=self.folder.id,
+            **self._base_to_dict(view=view)
+        )
+        return dict_value
+
+
+class DatasetCollectionElement( object, Dictifiable ):
+    """ Associates a DatasetInstance (hda or ldda) with a DatasetCollection. """
+    # actionable dataset id needs to be available via API...
+    dict_collection_visible_keys = ( 'id', 'element_type', 'element_index', 'element_identifier' )
+    dict_element_visible_keys = ( 'id', 'element_type', 'element_index', 'element_identifier' )
+
+    def __init__(
+        self,
+        id=None,
+        collection=None,
+        element=None,
+        element_index=None,
+        element_identifier=None,
+    ):
+        if isinstance(element, HistoryDatasetAssociation):
+            self.hda = element
+        elif isinstance(element, LibraryDatasetDatasetAssociation):
+            self.ldda = element
+        elif isinstance( element, DatasetCollection ):
+            self.child_collection = element
+        else:
+            raise AttributeError( 'Unknown element type provided: %s' % type( element ) )
+
+        self.id = id
+        self.collection = collection
+        self.element_index = element_index
+        self.element_identifier = element_identifier or str(element_index)
+
+    @property
+    def element_type( self ):
+        if self.hda:
+            return "hda"
+        elif self.ldda:
+            return "ldda"
+        elif self.child_collection:
+            # TOOD: Rename element_type to element_type.
+            return "dataset_collection"
+        else:
+            raise Exception( "Unknown element instance type" )
+
+    @property
+    def is_collection( self ):
+        return self.element_type == "dataset_collection"
+
+    @property
+    def element_object( self ):
+        if self.hda:
+            return self.hda
+        elif self.ldda:
+            return self.ldda
+        elif self.child_collection:
+            return self.child_collection
+        else:
+            raise Exception( "Unknown element instance type" )
+
+    @property
+    def dataset_instance( self ):
+        element_object = self.element_object
+        if isinstance( element_object, DatasetCollection ):
+            raise AttributeError( "Nested collection has no associated dataset_instance." )
+        return element_object
+
+    @property
+    def dataset( self ):
+        return self.dataset_instance.dataset
+
+    def first_dataset_instance( self ):
+        element_object = self.element_object
+        if isinstance( element_object, DatasetCollection ):
+            return element_object.dataset_instances[ 0 ]
+        else:
+            return element_object
+
+    def copy_to_collection( self, collection, destination=None, element_destination=None ):
+        element_object = self.element_object
+        if element_destination:
+            if self.is_collection:
+                element_object = element_object.copy(
+                    destination=destination,
+                    element_destination=element_destination
+                )
+            else:
+                new_element_object = element_object.copy( copy_children=True )
+                if destination is not None and element_object.hidden_beneath_collection_instance:
+                    new_element_object.hidden_beneath_collection_instance = destination
+                # Ideally we would not need to give the following
+                # element an HID and it would exist in the history only
+                # as an element of the containing collection.
+                element_destination.add_dataset( new_element_object )
+                element_object = new_element_object
+
+        new_element = DatasetCollectionElement(
+            element=element_object,
+            collection=collection,
+            element_index=self.element_index,
+            element_identifier=self.element_identifier,
+        )
+        return new_element
+
+
+class Event( object ):
+    def __init__( self, message=None, history=None, user=None, galaxy_session=None ):
+        self.history = history
+        self.galaxy_session = galaxy_session
+        self.user = user
+        self.tool_id = None
+        self.message = message
+
+
+class GalaxySession( object ):
+    def __init__( self,
+                  id=None,
+                  user=None,
+                  remote_host=None,
+                  remote_addr=None,
+                  referer=None,
+                  current_history=None,
+                  session_key=None,
+                  is_valid=False,
+                  prev_session_id=None,
+                  last_action=None ):
+        self.id = id
+        self.user = user
+        self.remote_host = remote_host
+        self.remote_addr = remote_addr
+        self.referer = referer
+        self.current_history = current_history
+        self.session_key = session_key
+        self.is_valid = is_valid
+        self.prev_session_id = prev_session_id
+        self.histories = []
+        self.last_action = last_action or datetime.now()
+
+    def add_history( self, history, association=None ):
+        if association is None:
+            self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
+        else:
+            self.histories.append( association )
+
+    def get_disk_usage( self ):
+        if self.disk_usage is None:
+            return 0
+        return self.disk_usage
+
+    def set_disk_usage( self, bytes ):
+        self.disk_usage = bytes
+    total_disk_usage = property( get_disk_usage, set_disk_usage )
+
+
+class GalaxySessionToHistoryAssociation( object ):
+    def __init__( self, galaxy_session, history ):
+        self.galaxy_session = galaxy_session
+        self.history = history
+
+
+class UCI( object ):
+    def __init__( self ):
+        self.id = None
+        self.user = None
+
+
+class StoredWorkflow( object, Dictifiable):
+
+    dict_collection_visible_keys = ( 'id', 'name', 'published', 'deleted' )
+    dict_element_visible_keys = ( 'id', 'name', 'published', 'deleted' )
+
+    def __init__( self ):
+        self.id = None
+        self.user = None
+        self.name = None
+        self.slug = None
+        self.published = False
+        self.latest_workflow_id = None
+        self.workflows = []
+
+    def copy_tags_from(self, target_user, source_workflow):
+        for src_swta in source_workflow.owner_tags:
+            new_swta = src_swta.copy()
+            new_swta.user = target_user
+            self.tags.append(new_swta)
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper=value_mapper )
+        tags_str_list = []
+        for tag in self.tags:
+            tag_str = tag.user_tname
+            if tag.value is not None:
+                tag_str += ":" + tag.user_value
+            tags_str_list.append( tag_str )
+        rval['tags'] = tags_str_list
+        rval['latest_workflow_uuid'] = ( lambda uuid: str( uuid ) if self.latest_workflow.uuid else None )( self.latest_workflow.uuid )
+        return rval
+
+
+class Workflow( object, Dictifiable ):
+
+    dict_collection_visible_keys = ( 'name', 'has_cycles', 'has_errors' )
+    dict_element_visible_keys = ( 'name', 'has_cycles', 'has_errors' )
+    input_step_types = ['data_input', 'data_collection_input', 'parameter_input']
+
+    def __init__( self, uuid=None ):
+        self.user = None
+        self.name = None
+        self.has_cycles = None
+        self.has_errors = None
+        self.steps = []
+        if uuid is None:
+            self.uuid = uuid4()
+        else:
+            self.uuid = UUID(str(uuid))
+
+    def has_outputs_defined(self):
+        """
+        Returns true or false indicating whether or not a workflow has outputs defined.
+        """
+        for step in self.steps:
+            if step.workflow_outputs:
+                return True
+        return False
+
+    def to_dict( self, view='collection', value_mapper=None):
+        rval = super( Workflow, self ).to_dict( view=view, value_mapper=value_mapper )
+        rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
+        return rval
+
+    @property
+    def steps_by_id( self ):
+        steps = {}
+        for step in self.steps:
+            step_id = step.id
+            steps[ step_id ] = step
+        return steps
+
+    def step_by_index(self, order_index):
+        for step in self.steps:
+            if order_index == step.order_index:
+                return step
+        raise KeyError("Workflow has no step with order_index '%s'" % order_index)
+
+    @property
+    def input_steps(self):
+        for step in self.steps:
+            if step.type in Workflow.input_step_types:
+                yield step
+
+    @property
+    def workflow_outputs(self):
+        for step in self.steps:
+            for workflow_output in step.workflow_outputs:
+                yield workflow_output
+
+    @property
+    def top_level_workflow( self ):
+        """ If this workflow is not attached to stored workflow directly,
+        recursively grab its parents until it is the top level workflow
+        which must have a stored workflow associated with it.
+        """
+        top_level_workflow = self
+        if self.stored_workflow is None:
+            # TODO: enforce this at creation...
+            assert len(self.parent_workflow_steps) == 1
+            return self.parent_workflow_steps[0].workflow.top_level_workflow
+        return top_level_workflow
+
+    @property
+    def top_level_stored_workflow( self ):
+        """ If this workflow is not attached to stored workflow directly,
+        recursively grab its parents until it is the top level workflow
+        which must have a stored workflow associated with it and then
+        grab that stored workflow.
+        """
+        return self.top_level_workflow.stored_workflow
+
+    def copy(self):
+        """ Copy a workflow (without user information) for a new
+        StoredWorkflow object.
+        """
+        copied_workflow = Workflow()
+        copied_workflow.name = self.name
+        copied_workflow.has_cycles = self.has_cycles
+        copied_workflow.has_errors = self.has_errors
+
+        # Map old step ids to new steps
+        step_mapping = {}
+        copied_steps = []
+        for step in self.steps:
+            copied_step = WorkflowStep()
+            copied_steps.append(copied_step)
+            step_mapping[step.id] = copied_step
+
+        for old_step, new_step in zip(self.steps, copied_steps):
+            old_step.copy_to(new_step, step_mapping)
+        copied_workflow.steps = copied_steps
+        return copied_workflow
+
+    def log_str(self):
+        extra = ""
+        if self.stored_workflow:
+            extra = ",name=%s" % self.stored_workflow.name
+        return "Workflow[id=%d%s]" % (self.id, extra)
+
+
+class WorkflowStep( object ):
+
+    def __init__( self ):
+        self.id = None
+        self.type = None
+        self.tool_id = None
+        self.tool_inputs = None
+        self.tool_errors = None
+        self.position = None
+        self.input_connections = []
+        self.config = None
+        self.label = None
+        self.uuid = uuid4()
+        self.workflow_outputs = []
+        self._input_connections_by_name = None
+
+    @property
+    def unique_workflow_outputs(self):
+        # Older Galaxy workflows may have multiple WorkflowOutputs
+        # per "output_name", when serving these back to the editor
+        # feed only a "best" output per "output_name.""
+        outputs = {}
+        for workflow_output in self.workflow_outputs:
+            output_name = workflow_output.output_name
+
+            if output_name in outputs:
+                found_output = outputs[output_name]
+                if found_output.label is None and workflow_output.label is not None:
+                    outputs[output_name] = workflow_output
+            else:
+                outputs[output_name] = workflow_output
+        return list(outputs.values())
+
+    @property
+    def content_id( self ):
+        content_id = None
+        if self.type == "tool":
+            content_id = self.tool_id
+        elif self.type == "subworkflow":
+            content_id = self.subworkflow.id
+        else:
+            content_id = None
+        return content_id
+
+    @property
+    def input_connections_by_name(self):
+        if self._input_connections_by_name is None:
+            self.setup_input_connections_by_name()
+        return self._input_connections_by_name
+
+    def setup_input_connections_by_name(self):
+        # Ensure input_connections has already been set.
+
+        # Make connection information available on each step by input name.
+        input_connections_by_name = {}
+        for conn in self.input_connections:
+            input_name = conn.input_name
+            if input_name not in input_connections_by_name:
+                input_connections_by_name[input_name] = []
+            input_connections_by_name[input_name].append(conn)
+        self._input_connections_by_name = input_connections_by_name
+
+    def create_or_update_workflow_output(self, output_name, label, uuid):
+        output = self.workflow_output_for(output_name)
+        if output is None:
+            output = WorkflowOutput(workflow_step=self, output_name=output_name)
+        if uuid is not None:
+            output.uuid = uuid
+        if label is not None:
+            output.label = label
+        return output
+
+    def workflow_output_for(self, output_name):
+        target_output = None
+        for workflow_output in self.workflow_outputs:
+            if workflow_output.output_name == output_name:
+                target_output = workflow_output
+                break
+        return target_output
+
+    def copy_to(self, copied_step, step_mapping):
+        copied_step.order_index = self.order_index
+        copied_step.type = self.type
+        copied_step.tool_id = self.tool_id
+        copied_step.tool_inputs = self.tool_inputs
+        copied_step.tool_errors = self.tool_errors
+        copied_step.position = self.position
+        copied_step.config = self.config
+        copied_step.label = self.label
+        copied_step.input_connections = copy_list(self.input_connections)
+
+        subworkflow_step_mapping = {}
+        subworkflow = self.subworkflow
+        if subworkflow:
+            copied_subworkflow = subworkflow.copy()
+            copied_step.subworkflow = copied_subworkflow
+            for subworkflow_step, copied_subworkflow_step in zip(subworkflow.steps, copied_subworkflow.steps):
+                subworkflow_step_mapping[subworkflow_step.id] = copied_subworkflow_step
+
+        for old_conn, new_conn in zip(self.input_connections, copied_step.input_connections):
+            # new_conn.input_step = new_
+            new_conn.input_step = step_mapping[old_conn.input_step_id]
+            new_conn.output_step = step_mapping[old_conn.output_step_id]
+            if old_conn.input_subworkflow_step_id:
+                new_conn.input_subworkflow_step = subworkflow_step_mapping[old_conn.input_subworkflow_step_id]
+        for orig_pja in self.post_job_actions:
+            PostJobAction( orig_pja.action_type,
+                           copied_step,
+                           output_name=orig_pja.output_name,
+                           action_arguments=orig_pja.action_arguments )
+        copied_step.workflow_outputs = copy_list(self.workflow_outputs, copied_step)
+
+    def log_str(self):
+        return "WorkflowStep[index=%d,type=%s]" % (self.order_index, self.type)
+
+
+class WorkflowStepConnection( object ):
+    # Constant used in lieu of output_name and input_name to indicate an
+    # implicit connection between two steps that is not dependent on a dataset
+    # or a dataset collection. Allowing for instance data manager steps to setup
+    # index data before a normal tool runs or for workflows that manage data
+    # outside of Galaxy.
+    NON_DATA_CONNECTION = "__NO_INPUT_OUTPUT_NAME__"
+
+    def __init__( self ):
+        self.output_step_id = None
+        self.output_name = None
+        self.input_step_id = None
+        self.input_name = None
+
+    def set_non_data_connection(self):
+        self.output_name = WorkflowStepConnection.NON_DATA_CONNECTION
+        self.input_name = WorkflowStepConnection.NON_DATA_CONNECTION
+
+    @property
+    def non_data_connection(self):
+        return (self.output_name == WorkflowStepConnection.NON_DATA_CONNECTION and
+                self.input_name == WorkflowStepConnection.NON_DATA_CONNECTION)
+
+    def copy(self):
+        # TODO: handle subworkflow ids...
+        copied_connection = WorkflowStepConnection()
+        copied_connection.output_name = self.output_name
+        copied_connection.input_name = self.input_name
+        return copied_connection
+
+
+class WorkflowOutput(object):
+
+    def __init__( self, workflow_step, output_name=None, label=None, uuid=None):
+        self.workflow_step = workflow_step
+        self.output_name = output_name
+        self.label = label
+        if uuid is None:
+            self.uuid = uuid4()
+        else:
+            self.uuid = UUID(str(uuid))
+
+    def copy(self, copied_step):
+        copied_output = WorkflowOutput(copied_step)
+        copied_output.output_name = self.output_name
+        copied_output.label = self.label
+        return copied_output
+
+
+class StoredWorkflowUserShareAssociation( object ):
+
+    def __init__( self ):
+        self.stored_workflow = None
+        self.user = None
+
+
+class StoredWorkflowMenuEntry( object ):
+
+    def __init__( self ):
+        self.stored_workflow = None
+        self.user = None
+        self.order_index = None
+
+
+class WorkflowInvocation( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'update_time', 'workflow_id', 'history_id', 'uuid', 'state' )
+    dict_element_visible_keys = ( 'id', 'update_time', 'workflow_id', 'history_id', 'uuid', 'state' )
+    states = Bunch(
+        NEW='new',  # Brand new workflow invocation... maybe this should be same as READY
+        READY='ready',  # Workflow ready for another iteration of scheduling.
+        SCHEDULED='scheduled',  # Workflow has been scheduled.
+        CANCELLED='cancelled',
+        FAILED='failed',
+    )
+
+    def __init__(self):
+        self.subworkflow_invocations = []
+        self.step_states = []
+        self.steps = []
+
+    def create_subworkflow_invocation_for_step( self, step ):
+        assert step.type == "subworkflow"
+        subworkflow_invocation = WorkflowInvocation()
+        return self.attach_subworkflow_invocation_for_step( step, subworkflow_invocation )
+
+    def attach_subworkflow_invocation_for_step( self, step, subworkflow_invocation ):
+        assert step.type == "subworkflow"
+        assoc = WorkflowInvocationToSubworkflowInvocationAssociation()
+        assoc.workflow_invocation = self
+        assoc.workflow_step = step
+        subworkflow_invocation.history = self.history
+        subworkflow_invocation.workflow = step.subworkflow
+        assoc.subworkflow_invocation = subworkflow_invocation
+        self.subworkflow_invocations.append(assoc)
+        return assoc
+
+    def get_subworkflow_invocation_for_step( self, step ):
+        assoc = self.get_subworkflow_invocation_association_for_step(step)
+        return assoc.subworkflow_invocation
+
+    def get_subworkflow_invocation_association_for_step( self, step ):
+        assert step.type == "subworkflow"
+        assoc = None
+        for subworkflow_invocation in self.subworkflow_invocations:
+            if subworkflow_invocation.workflow_step == step:
+                assoc = subworkflow_invocation
+                break
+        return assoc
+
+    @property
+    def active( self ):
+        """ Indicates the workflow invocation is somehow active - and in
+        particular valid actions may be performed on its
+        WorkflowInvocationSteps.
+        """
+        states = WorkflowInvocation.states
+        return self.state in [ states.NEW, states.READY ]
+
+    def cancel( self ):
+        if not self.active:
+            return False
+        else:
+            self.state = WorkflowInvocation.states.CANCELLED
+            return True
+
+    def fail( self ):
+        self.state = WorkflowInvocation.states.FAILED
+
+    def step_states_by_step_id( self ):
+        step_states = {}
+        for step_state in self.step_states:
+            step_id = step_state.workflow_step_id
+            step_states[ step_id ] = step_state
+        return step_states
+
+    def step_invocations_by_step_id( self ):
+        step_invocations = {}
+        for invocation_step in self.steps:
+            step_id = invocation_step.workflow_step_id
+            if step_id not in step_invocations:
+                step_invocations[ step_id ] = []
+            step_invocations[ step_id ].append( invocation_step )
+        return step_invocations
+
+    def step_invocations_for_step_id( self, step_id ):
+        step_invocations = []
+        for invocation_step in self.steps:
+            if step_id == invocation_step.workflow_step_id:
+                step_invocations.append( invocation_step )
+        return step_invocations
+
+    @staticmethod
+    def poll_active_workflow_ids(
+        sa_session,
+        scheduler=None,
+        handler=None
+    ):
+        and_conditions = [
+            or_(
+                WorkflowInvocation.state == WorkflowInvocation.states.NEW,
+                WorkflowInvocation.state == WorkflowInvocation.states.READY
+            ),
+        ]
+        if scheduler is not None:
+            and_conditions.append( WorkflowInvocation.scheduler == scheduler )
+        if handler is not None:
+            and_conditions.append( WorkflowInvocation.handler == handler )
+
+        query = sa_session.query(
+            WorkflowInvocation
+        ).filter( and_( *and_conditions ) )
+        # Immediately just load all ids into memory so time slicing logic
+        # is relatively intutitive.
+        return [wi.id for wi in query.all()]
+
+    def to_dict( self, view='collection', value_mapper=None, step_details=False ):
+        rval = super( WorkflowInvocation, self ).to_dict( view=view, value_mapper=value_mapper )
+        if view == 'element':
+            steps = []
+            for step in self.steps:
+                if step_details:
+                    v = step.to_dict(view='element')
+                else:
+                    v = step.to_dict(view='collection')
+                steps.append( v )
+            rval['steps'] = steps
+
+            inputs = {}
+            for step in self.steps:
+                if step.workflow_step.type == 'tool':
+                    for step_input in step.workflow_step.input_connections:
+                        output_step_type = step_input.output_step.type
+                        if output_step_type in [ 'data_input', 'data_collection_input' ]:
+                            src = "hda" if output_step_type == 'data_input' else 'hdca'
+                            for job_input in step.job.input_datasets:
+                                if job_input.name == step_input.input_name:
+                                    inputs[str(step_input.output_step.order_index)] = {
+                                        "id": job_input.dataset_id, "src": src,
+                                        "uuid" : str(job_input.dataset.dataset.uuid) if job_input.dataset.dataset.uuid is not None else None
+                                    }
+            rval['inputs'] = inputs
+        return rval
+
+    def update( self ):
+        self.update_time = galaxy.model.orm.now.now()
+
+    def add_input( self, content, step_id ):
+        history_content_type = getattr(content, "history_content_type", None)
+        if history_content_type == "dataset":
+            request_to_content = WorkflowRequestToInputDatasetAssociation()
+            request_to_content.dataset = content
+            request_to_content.workflow_step_id = step_id
+            self.input_datasets.append( request_to_content )
+        elif history_content_type == "dataset_collection":
+            request_to_content = WorkflowRequestToInputDatasetCollectionAssociation()
+            request_to_content.dataset_collection = content
+            request_to_content.workflow_step_id = step_id
+            self.input_dataset_collections.append( request_to_content )
+        else:
+            request_to_content = WorkflowRequestInputStepParmeter()
+            request_to_content.parameter_value = content
+            request_to_content.workflow_step_id = step_id
+            self.input_step_parameters.append( request_to_content )
+
+    def has_input_for_step( self, step_id ):
+        for content in self.input_datasets:
+            if content.workflow_step_id == step_id:
+                return True
+        for content in self.input_dataset_collections:
+            if content.workflow_step_id == step_id:
+                return True
+        return False
+
+
+class WorkflowInvocationToSubworkflowInvocationAssociation( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'workflow_step_id', 'workflow_invocation_id', 'subworkflow_invocation_id' )
+    dict_element_visible_keys = ( 'id', 'workflow_step_id', 'workflow_invocation_id', 'subworkflow_invocation_id' )
+
+
+class WorkflowInvocationStep( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id', 'action' )
+    dict_element_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id', 'action' )
+
+    def update( self ):
+        self.workflow_invocation.update()
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        rval = super( WorkflowInvocationStep, self ).to_dict( view=view, value_mapper=value_mapper )
+        rval['order_index'] = self.workflow_step.order_index
+        rval['workflow_step_label'] = self.workflow_step.label
+        rval['workflow_step_uuid'] = str(self.workflow_step.uuid)
+        rval['state'] = self.job.state if self.job is not None else None
+        if self.job is not None and view == 'element':
+            output_dict = {}
+            for i in self.job.output_datasets:
+                if i.dataset is not None:
+                    output_dict[i.name] = {
+                        "id" : i.dataset.id, "src" : "hda",
+                        "uuid" : str(i.dataset.dataset.uuid) if i.dataset.dataset.uuid is not None else None
+                    }
+            for i in self.job.output_library_datasets:
+                if i.dataset is not None:
+                    output_dict[i.name] = {
+                        "id" : i.dataset.id, "src" : "ldda",
+                        "uuid" : str(i.dataset.dataset.uuid) if i.dataset.dataset.uuid is not None else None
+                    }
+            rval['outputs'] = output_dict
+        return rval
+
+
+class WorkflowRequest( object, Dictifiable ):
+    dict_collection_visible_keys = [ 'id', 'name', 'type', 'state', 'history_id', 'workflow_id' ]
+    dict_element_visible_keys = [ 'id', 'name', 'type', 'state', 'history_id', 'workflow_id' ]
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        rval = super( WorkflowRequest, self ).to_dict( view=view, value_mapper=value_mapper )
+        return rval
+
+
+class WorkflowRequestInputParameter(object, Dictifiable):
+    """ Workflow-related parameters not tied to steps or inputs.
+    """
+    dict_collection_visible_keys = ['id', 'name', 'value', 'type']
+    types = Bunch(
+        REPLACEMENT_PARAMETERS='replacements',
+        META_PARAMETERS='meta',  #
+    )
+
+    def __init__( self, name=None, value=None, type=None ):
+        self.name = name
+        self.value = value
+        self.type = type
+
+
+class WorkflowRequestStepState(object, Dictifiable):
+    """ Workflow step value parameters.
+    """
+    dict_collection_visible_keys = ['id', 'name', 'value', 'workflow_step_id']
+
+    def __init__( self, workflow_step=None, name=None, value=None ):
+        self.workflow_step = workflow_step
+        self.name = name
+        self.value = value
+        self.type = type
+
+
+class WorkflowRequestToInputDatasetAssociation(object, Dictifiable):
+    """ Workflow step input dataset parameters.
+    """
+    dict_collection_visible_keys = ['id', 'workflow_invocation_id', 'workflow_step_id', 'dataset_id', 'name' ]
+
+
+class WorkflowRequestToInputDatasetCollectionAssociation(object, Dictifiable):
+    """ Workflow step input dataset collection parameters.
+    """
+    dict_collection_visible_keys = ['id', 'workflow_invocation_id', 'workflow_step_id', 'dataset_collection_id', 'name' ]
+
+
+class WorkflowRequestInputStepParmeter(object, Dictifiable):
+    """ Workflow step parameter inputs.
+    """
+    dict_collection_visible_keys = ['id', 'workflow_invocation_id', 'workflow_step_id', 'parameter_value' ]
+
+
+class MetadataFile( StorableObject ):
+
+    def __init__( self, dataset=None, name=None ):
+        super(MetadataFile, self).__init__(id=None)
+        if isinstance( dataset, HistoryDatasetAssociation ):
+            self.history_dataset = dataset
+        elif isinstance( dataset, LibraryDatasetDatasetAssociation ):
+            self.library_dataset = dataset
+        self.name = name
+
+    @property
+    def file_name( self ):
+        assert self.id is not None, "ID must be set before filename used (commit the object)"
+        # Ensure the directory structure and the metadata file object exist
+        try:
+            da = self.history_dataset or self.library_dataset
+            if self.object_store_id is None and da is not None:
+                self.object_store_id = da.dataset.object_store_id
+            if not da.dataset.object_store.exists( self, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id ):
+                da.dataset.object_store.create( self, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
+            path = da.dataset.object_store.get_filename( self, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % self.id )
+            return path
+        except AttributeError:
+            # In case we're not working with the history_dataset
+            # print "Caught AttributeError"
+            path = os.path.join( Dataset.file_path, '_metadata_files', *directory_hash_id( self.id ) )
+            # Create directory if it does not exist
+            try:
+                os.makedirs( path )
+            except OSError as e:
+                # File Exists is okay, otherwise reraise
+                if e.errno != errno.EEXIST:
+                    raise
+            # Return filename inside hashed directory
+            return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) )
+
+
+class FormDefinition( object, Dictifiable ):
+    # The following form_builder classes are supported by the FormDefinition class.
+    supported_field_types = [ AddressField, CheckboxField, PasswordField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
+    types = Bunch( REQUEST='Sequencing Request Form',
+                   SAMPLE='Sequencing Sample Form',
+                   EXTERNAL_SERVICE='External Service Information Form',
+                   RUN_DETAILS_TEMPLATE='Sample run details template',
+                   LIBRARY_INFO_TEMPLATE='Library information template',
+                   USER_INFO='User Information' )
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'name', 'desc', 'form_definition_current_id', 'fields', 'layout' )
+
+    def __init__( self, name=None, desc=None, fields=[], form_definition_current=None, form_type=None, layout=None ):
+        self.name = name
+        self.desc = desc
+        self.fields = fields
+        self.form_definition_current = form_definition_current
+        self.type = form_type
+        self.layout = layout
+
+    def grid_fields( self, grid_index ):
+        # Returns a dictionary whose keys are integers corresponding to field positions
+        # on the grid and whose values are the field.
+        gridfields = {}
+        for i, f in enumerate( self.fields ):
+            if str( f[ 'layout' ] ) == str( grid_index ):
+                gridfields[i] = f
+        return gridfields
+
+    def get_widgets( self, user, contents={}, **kwd ):
+        '''
+        Return the list of widgets that comprise a form definition,
+        including field contents if any.
+        '''
+        params = Params( kwd )
+        widgets = []
+        for index, field in enumerate( self.fields ):
+            field_type = field[ 'type' ]
+            if 'name' in field:
+                field_name = field[ 'name' ]
+            else:
+                # Default to names like field_0, field_1, etc for backward compatibility
+                # (not sure this is necessary)...
+                field_name = 'field_%i' % index
+            # Determine the value of the field
+            if field_name in kwd:
+                # The form was submitted via refresh_on_change
+                if field_type == 'CheckboxField':
+                    value = CheckboxField.is_checked( params.get( field_name, False ) )
+                else:
+                    value = restore_text( params.get( field_name, '' ) )
+            elif contents:
+                try:
+                    # This field has a saved value.
+                    value = str( contents[ field[ 'name' ] ] )
+                except:
+                    # If there was an error getting the saved value, we'll still
+                    # display the widget, but it will be empty.
+                    if field_type == 'AddressField':
+                        value = 'none'
+                    elif field_type == 'CheckboxField':
+                        # Since we do not have contents, set checkbox value to False
+                        value = False
+                    else:
+                        # Set other field types to empty string
+                        value = ''
+            else:
+                # If none of the above, then leave the field empty
+                if field_type == 'AddressField':
+                    value = 'none'
+                elif field_type == 'CheckboxField':
+                    # Since we do not have contents, set checkbox value to False
+                    value = False
+                else:
+                    # Set other field types to the default value of the field
+                    value = field.get( 'default', '' )
+            # Create the field widget
+            field_widget = eval( field_type )( field_name )
+            if field_type in [ 'TextField', 'PasswordField' ]:
+                field_widget.set_size( 40 )
+                field_widget.value = value
+            elif field_type == 'TextArea':
+                field_widget.set_size( 3, 40 )
+                field_widget.value = value
+            elif field_type in ['AddressField', 'WorkflowField', 'WorkflowMappingField', 'HistoryField']:
+                field_widget.user = user
+                field_widget.value = value
+                field_widget.params = params
+            elif field_type == 'SelectField':
+                for option in field[ 'selectlist' ]:
+
+                    if option == value:
+                        field_widget.add_option( option, option, selected=True )
+                    else:
+                        field_widget.add_option( option, option )
+            elif field_type == 'CheckboxField':
+
+                field_widget.set_checked( value )
+            if field[ 'required' ] == 'required':
+                req = 'Required'
+            else:
+                req = 'Optional'
+            if field[ 'helptext' ]:
+                helptext = '%s (%s)' % ( field[ 'helptext' ], req )
+            else:
+                helptext = '(%s)' % req
+            widgets.append( dict( label=field[ 'label' ],
+
+                                  widget=field_widget,
+                                  helptext=helptext ) )
+        return widgets
+
+    def field_as_html( self, field ):
+        """Generates disabled html for a field"""
+        type = field[ 'type' ]
+        form_field = None
+        for field_type in self.supported_field_types:
+            if type == field_type.__name__:
+                # Name it AddressField, CheckboxField, etc.
+                form_field = field_type( type )
+                break
+        if form_field:
+            return form_field.get_html( disabled=True )
+        # Return None if unsupported field type
+        return None
+
+
+class FormDefinitionCurrent( object ):
+    def __init__(self, form_definition=None):
+        self.latest_form = form_definition
+
+
+class FormValues( object ):
+    def __init__(self, form_def=None, content=None):
+        self.form_definition = form_def
+        self.content = content
+
+
+class Request( object, Dictifiable ):
+    states = Bunch( NEW='New',
+                    SUBMITTED='In Progress',
+                    REJECTED='Rejected',
+                    COMPLETE='Complete' )
+    dict_collection_visible_keys = ( 'id', 'name', 'state' )
+
+    def __init__( self, name=None, desc=None, request_type=None, user=None, form_values=None, notification=None ):
+        self.name = name
+        self.desc = desc
+        self.type = request_type
+        self.values = form_values
+        self.user = user
+        self.notification = notification
+        self.samples_list = []
+
+    @property
+    def state( self ):
+        latest_event = self.latest_event
+        if latest_event:
+            return latest_event.state
+        return None
+
+    @property
+    def latest_event( self ):
+        if self.events:
+            return self.events[0]
+        return None
+
+    @property
+    def samples_have_common_state( self ):
+        """
+        Returns the state of this request's samples when they are all
+        in one common state. Otherwise returns False.
+        """
+        state_for_comparison = self.samples[0].state
+        if state_for_comparison is None:
+            for s in self.samples:
+                if s.state is not None:
+                    return False
+        for s in self.samples:
+            if s.state.id != state_for_comparison.id:
+                return False
+        return state_for_comparison
+
+    @property
+    def last_comment( self ):
+        latest_event = self.latest_event
+        if latest_event:
+            if latest_event.comment:
+                return latest_event.comment
+            return ''
+        return 'No comment'
+
+    def get_sample( self, sample_name ):
+        for sample in self.samples:
+            if sample.name == sample_name:
+                return sample
+        return None
+
+    @property
+    def is_unsubmitted( self ):
+        return self.state in [ self.states.REJECTED, self.states.NEW ]
+
+    @property
+    def is_rejected( self ):
+        return self.state == self.states.REJECTED
+
+    @property
+    def is_submitted( self ):
+        return self.state == self.states.SUBMITTED
+
+    @property
+    def is_new( self ):
+
+        return self.state == self.states.NEW
+
+    @property
+    def is_complete( self ):
+        return self.state == self.states.COMPLETE
+
+    @property
+    def samples_without_library_destinations( self ):
+        # Return all samples that are not associated with a library
+        samples = []
+        for sample in self.samples:
+            if not sample.library:
+                samples.append( sample )
+        return samples
+
+    @property
+    def samples_with_bar_code( self ):
+        # Return all samples that have associated bar code
+        samples = []
+        for sample in self.samples:
+            if sample.bar_code:
+                samples.append( sample )
+        return samples
+
+    def send_email_notification( self, trans, common_state, final_state=False ):
+        # Check if an email notification is configured to be sent when the samples
+        # are in this state
+        if self.notification and common_state.id not in self.notification[ 'sample_states' ]:
+            return
+        comments = ''
+        # Send email
+        if trans.app.config.smtp_server is not None and self.notification and self.notification[ 'email' ]:
+            body = """
+Galaxy Sample Tracking Notification
+===================================
+
+User:                     %(user)s
+
+Sequencing request:       %(request_name)s
+Sequencer configuration:  %(request_type)s
+Sequencing request state: %(request_state)s
+
+Number of samples:        %(num_samples)s
+All samples in state:     %(sample_state)s
+
+"""
+            values = dict( user=self.user.email,
+                           request_name=self.name,
+                           request_type=self.type.name,
+                           request_state=self.state,
+                           num_samples=str( len( self.samples ) ),
+                           sample_state=common_state.name,
+                           create_time=self.create_time,
+                           submit_time=self.create_time )
+            body = body % values
+            # check if this is the final state of the samples
+            if final_state:
+                txt = "Sample Name -> Data Library/Folder\r\n"
+                for s in self.samples:
+                    if s.library:
+                        library_name = s.library.name
+                        folder_name = s.folder.name
+                    else:
+                        library_name = 'No target data library'
+                        folder_name = 'No target data library folder'
+                    txt = txt + "%s -> %s/%s\r\n" % ( s.name, library_name, folder_name )
+                body = body + txt
+            to = self.notification['email']
+            frm = trans.app.config.email_from
+            if frm is None:
+                host = trans.request.host.split( ':' )[0]
+                if host in [ 'localhost', '127.0.0.1', '0.0.0.0' ]:
+                    host = socket.getfqdn()
+                frm = 'galaxy-no-reply@' + host
+            subject = "Galaxy Sample Tracking notification: '%s' sequencing request" % self.name
+            try:
+                send_mail( frm, to, subject, body, trans.app.config )
+                comments = "Email notification sent to %s." % ", ".join( to ).strip().strip( ',' )
+            except Exception as e:
+                comments = "Email notification failed. (%s)" % str(e)
+            # update the request history with the email notification event
+        elif not trans.app.config.smtp_server:
+            comments = "Email notification failed as SMTP server not set in config file"
+        if comments:
+            event = RequestEvent( self, self.state, comments )
+            trans.sa_session.add( event )
+            trans.sa_session.flush()
+        return comments
+
+
+class RequestEvent( object ):
+    def __init__(self, request=None, request_state=None, comment=''):
+        self.request = request
+        self.state = request_state
+        self.comment = comment
+
+
+class ExternalService( object ):
+    data_transfer_protocol = Bunch( HTTP='http',
+                                    HTTPS='https',
+                                    SCP='scp' )
+
+    def __init__( self, name=None, description=None, external_service_type_id=None, version=None, form_definition_id=None, form_values_id=None, deleted=None ):
+        self.name = name
+        self.description = description
+        self.external_service_type_id = external_service_type_id
+        self.version = version
+        self.form_definition_id = form_definition_id
+        self.form_values_id = form_values_id
+        self.deleted = deleted
+        self.label = None  # Used in the request_type controller's __build_external_service_select_field() method
+
+    def get_external_service_type( self, trans ):
+        return trans.app.external_service_types.all_external_service_types[ self.external_service_type_id ]
+
+    def load_data_transfer_settings( self, trans ):
+        trans.app.external_service_types.reload( self.external_service_type_id )
+        self.data_transfer = {}
+        external_service_type = self.get_external_service_type( trans )
+        for data_transfer_protocol, data_transfer_obj in external_service_type.data_transfer.items():
+            if data_transfer_protocol == self.data_transfer_protocol.SCP:
+                scp_configs = {}
+                automatic_transfer = data_transfer_obj.config.get( 'automatic_transfer', 'false' )
+                scp_configs[ 'automatic_transfer' ] = galaxy.util.string_as_bool( automatic_transfer )
+                scp_configs[ 'host' ] = self.form_values.content.get( data_transfer_obj.config.get( 'host', '' ), '' )
+                scp_configs[ 'user_name' ] = self.form_values.content.get( data_transfer_obj.config.get( 'user_name', '' ), '' )
+                scp_configs[ 'password' ] = self.form_values.content.get( data_transfer_obj.config.get( 'password', '' ), '' )
+                scp_configs[ 'data_location' ] = self.form_values.content.get( data_transfer_obj.config.get( 'data_location', '' ), '' )
+                scp_configs[ 'rename_dataset' ] = self.form_values.content.get( data_transfer_obj.config.get( 'rename_dataset', '' ), '' )
+                self.data_transfer[ self.data_transfer_protocol.SCP ] = scp_configs
+            if data_transfer_protocol == self.data_transfer_protocol.HTTP:
+                http_configs = {}
+                automatic_transfer = data_transfer_obj.config.get( 'automatic_transfer', 'false' )
+                http_configs[ 'automatic_transfer' ] = galaxy.util.string_as_bool( automatic_transfer )
+                self.data_transfer[ self.data_transfer_protocol.HTTP ] = http_configs
+
+    def populate_actions( self, trans, item, param_dict=None ):
+        return self.get_external_service_type( trans ).actions.populate( self, item, param_dict=param_dict )
+
+
+class RequestType( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name', 'desc' )
+    dict_element_visible_keys = ( 'id', 'name', 'desc', 'request_form_id', 'sample_form_id' )
+    rename_dataset_options = Bunch( NO='Do not rename',
+                                    SAMPLE_NAME='Preprend sample name',
+                                    EXPERIMENT_NAME='Prepend experiment name',
+                                    EXPERIMENT_AND_SAMPLE_NAME='Prepend experiment and sample name')
+    permitted_actions = get_permitted_actions( filter='REQUEST_TYPE' )
+
+    def __init__( self, name=None, desc=None, request_form=None, sample_form=None ):
+        self.name = name
+        self.desc = desc
+        self.request_form = request_form
+        self.sample_form = sample_form
+
+    @property
+    def external_services( self ):
+        external_services = []
+        for rtesa in self.external_service_associations:
+            external_services.append( rtesa.external_service )
+        return external_services
+
+    def get_external_service( self, external_service_type_id ):
+        for rtesa in self.external_service_associations:
+            if rtesa.external_service.external_service_type_id == external_service_type_id:
+                return rtesa.external_service
+        return None
+
+    def get_external_services_for_manual_data_transfer( self, trans ):
+        '''Returns all external services that use manual data transfer'''
+        external_services = []
+        for rtesa in self.external_service_associations:
+            external_service = rtesa.external_service
+            # load data transfer settings
+            external_service.load_data_transfer_settings( trans )
+            if external_service.data_transfer:
+                for transfer_type, transfer_type_settings in external_service.data_transfer.items():
+                    if not transfer_type_settings[ 'automatic_transfer' ]:
+                        external_services.append( external_service )
+        return external_services
+
+    def delete_external_service_associations( self, trans ):
+        '''Deletes all external service associations.'''
+        flush_needed = False
+        for rtesa in self.external_service_associations:
+            trans.sa_session.delete( rtesa )
+            flush_needed = True
+        if flush_needed:
+            trans.sa_session.flush()
+
+    def add_external_service_association( self, trans, external_service ):
+        rtesa = trans.model.RequestTypeExternalServiceAssociation( self, external_service )
+        trans.sa_session.add( rtesa )
+        trans.sa_session.flush()
+
+    @property
+    def final_sample_state( self ):
+        # The states mapper for this object orders ascending
+        return self.states[-1]
+
+    @property
+    def run_details( self ):
+        if self.run:
+            # self.run[0] is [RequestTypeRunAssociation]
+            return self.run[0]
+        return None
+
+    def get_template_widgets( self, trans, get_contents=True ):
+        # See if we have any associated templates.  The get_contents param
+        # is passed by callers that are inheriting a template - these are
+        # usually new samples for which we want to include template fields,
+        # but not necessarily the contents of the inherited template.
+        rtra = self.run_details
+        if rtra:
+            run = rtra.run
+            template = run.template
+            if get_contents:
+                # See if we have any field contents
+                info = run.info
+                if info:
+                    return template.get_widgets( trans.user, contents=info.content )
+            return template.get_widgets( trans.user )
+        return []
+
+
+class RequestTypeExternalServiceAssociation( object ):
+    def __init__( self, request_type, external_service ):
+        self.request_type = request_type
+        self.external_service = external_service
+
+
+class RequestTypePermissions( object ):
+    def __init__( self, action, request_type, role ):
+        self.action = action
+        self.request_type = request_type
+        self.role = role
+
+
+class Sample( object, Dictifiable ):
+    # The following form_builder classes are supported by the Sample class.
+    supported_field_types = [ CheckboxField, SelectField, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
+    bulk_operations = Bunch( CHANGE_STATE='Change state',
+                             SELECT_LIBRARY='Select data library and folder' )
+    dict_collection_visible_keys = ( 'id', 'name' )
+
+    def __init__(self, name=None, desc=None, request=None, form_values=None, bar_code=None, library=None, folder=None, workflow=None, history=None):
+        self.name = name
+        self.desc = desc
+        self.request = request
+        self.values = form_values
+        self.bar_code = bar_code
+        self.library = library
+        self.folder = folder
+        self.history = history
+        self.workflow = workflow
+
+    @property
+    def state( self ):
+        latest_event = self.latest_event
+        if latest_event:
+            return latest_event.state
+        return None
+
+    @property
+    def latest_event( self ):
+        if self.events:
+            return self.events[0]
+        return None
+
+    @property
+    def adding_to_library_dataset_files( self ):
+        adding_to_library_datasets = []
+        for dataset in self.datasets:
+            if dataset.status == SampleDataset.transfer_status.ADD_TO_LIBRARY:
+                adding_to_library_datasets.append( dataset )
+        return adding_to_library_datasets
+
+    @property
+    def inprogress_dataset_files( self ):
+        inprogress_datasets = []
+        for dataset in self.datasets:
+            if dataset.status not in [ SampleDataset.transfer_status.NOT_STARTED, SampleDataset.transfer_status.COMPLETE ]:
+                inprogress_datasets.append( dataset )
+        return inprogress_datasets
+
+    @property
+    def queued_dataset_files( self ):
+        queued_datasets = []
+        for dataset in self.datasets:
+            if dataset.status == SampleDataset.transfer_status.IN_QUEUE:
+                queued_datasets.append( dataset )
+        return queued_datasets
+
+    @property
+    def transfer_error_dataset_files( self ):
+        transfer_error_datasets = []
+        for dataset in self.datasets:
+            if dataset.status == SampleDataset.transfer_status.ERROR:
+                transfer_error_datasets.append( dataset )
+        return transfer_error_datasets
+
+    @property
+    def transferred_dataset_files( self ):
+        transferred_datasets = []
+        for dataset in self.datasets:
+            if dataset.status == SampleDataset.transfer_status.COMPLETE:
+                transferred_datasets.append( dataset )
+        return transferred_datasets
+
+    @property
+    def transferring_dataset_files( self ):
+        transferring_datasets = []
+        for dataset in self.datasets:
+            if dataset.status == SampleDataset.transfer_status.TRANSFERRING:
+                transferring_datasets.append( dataset )
+        return transferring_datasets
+
+    @property
+    def untransferred_dataset_files( self ):
+        untransferred_datasets = []
+        for dataset in self.datasets:
+            if dataset.status != SampleDataset.transfer_status.COMPLETE:
+                untransferred_datasets.append( dataset )
+        return untransferred_datasets
+
+    @property
+    def run_details( self ):
+        # self.runs is a list of SampleRunAssociations ordered descending on update_time.
+        if self.runs:
+            # Always use the latest run details template, self.runs[0] is a SampleRunAssociation
+            return self.runs[0]
+        # Inherit this sample's RequestType run details, if one exists.
+        return self.request.type.run_details
+
+    def get_template_widgets( self, trans, get_contents=True ):
+        # Samples have a one-to-many relationship with run details, so we return the
+        # widgets for last associated template.  The get_contents param will populate
+        # the widget fields with values from the template inherited from the sample's
+        # RequestType.
+        template = None
+        if self.runs:
+            # The self.runs mapper orders descending on update_time.
+            run = self.runs[0].run
+            template = run.template
+        if template is None:
+            # There are no run details associated with this sample, so inherit the
+            # run details template from the sample's RequestType.
+            rtra = self.request.type.run_details
+            if rtra:
+                run = rtra.run
+                template = run.template
+        if template:
+            if get_contents:
+                # See if we have any field contents
+                info = run.info
+                if info:
+                    return template.get_widgets( trans.user, contents=info.content )
+            return template.get_widgets( trans.user )
+        return []
+
+    def populate_external_services( self, param_dict=None, trans=None ):
+        if self.request and self.request.type:
+            return [ service.populate_actions( item=self, param_dict=param_dict, trans=trans ) for service in self.request.type.external_services ]
+
+
+class SampleState( object ):
+    def __init__(self, name=None, desc=None, request_type=None):
+        self.name = name
+        self.desc = desc
+        self.request_type = request_type
+
+
+class SampleEvent( object ):
+    def __init__(self, sample=None, sample_state=None, comment=''):
+        self.sample = sample
+        self.state = sample_state
+        self.comment = comment
+
+
+class SampleDataset( object ):
+    transfer_status = Bunch( NOT_STARTED='Not started',
+                             IN_QUEUE='In queue',
+                             TRANSFERRING='Transferring dataset',
+                             ADD_TO_LIBRARY='Adding to data library',
+                             COMPLETE='Complete',
+                             ERROR='Error' )
+
+    def __init__( self, sample=None, name=None, file_path=None, status=None, error_msg=None, size=None, external_service=None ):
+        self.sample = sample
+        self.name = name
+        self.file_path = file_path
+        self.status = status
+        self.error_msg = error_msg
+        self.size = size
+        self.external_service = external_service
+
+
+class Run( object ):
+    def __init__( self, form_definition, form_values, subindex=None ):
+        self.template = form_definition
+        self.info = form_values
+        self.subindex = subindex
+
+
+class RequestTypeRunAssociation( object ):
+    def __init__( self, request_type, run ):
+        self.request_type = request_type
+        self.run = run
+
+
+class SampleRunAssociation( object ):
+    def __init__( self, sample, run ):
+        self.sample = sample
+        self.run = run
+
+
+class UserAddress( object ):
+    def __init__( self, user=None, desc=None, name=None, institution=None,
+                  address=None, city=None, state=None, postal_code=None,
+                  country=None, phone=None ):
+        self.user = user
+        self.desc = desc
+        self.name = name
+        self.institution = institution
+        self.address = address
+        self.city = city
+        self.state = state
+        self.postal_code = postal_code
+        self.country = country
+        self.phone = phone
+
+    def get_html(self):
+        # This should probably be deprecated eventually.  It should currently
+        # sanitize.
+        # TODO Find out where else uses this and replace with
+        # templates
+        html = ''
+        if self.name:
+            html = html + sanitize_html(self.name)
+        if self.institution:
+            html = html + '<br/>' + sanitize_html(self.institution)
+        if self.address:
+            html = html + '<br/>' + sanitize_html(self.address)
+        if self.city:
+            html = html + '<br/>' + sanitize_html(self.city)
+        if self.state:
+            html = html + ' ' + sanitize_html(self.state)
+        if self.postal_code:
+            html = html + ' ' + sanitize_html(self.postal_code)
+        if self.country:
+            html = html + '<br/>' + sanitize_html(self.country)
+        if self.phone:
+            html = html + '<br/>' + 'phone: ' + sanitize_html(self.phone)
+        return html
+
+
+class UserOpenID( object ):
+    def __init__( self, user=None, session=None, openid=None ):
+        self.user = user
+        self.session = session
+        self.openid = openid
+
+
+class Page( object, Dictifiable ):
+    dict_element_visible_keys = [ 'id', 'title', 'latest_revision_id', 'slug', 'published', 'importable', 'deleted' ]
+
+    def __init__( self ):
+        self.id = None
+        self.user = None
+        self.title = None
+        self.slug = None
+        self.latest_revision_id = None
+        self.revisions = []
+        self.importable = None
+        self.published = None
+
+    def to_dict( self, view='element' ):
+        rval = super( Page, self ).to_dict( view=view )
+        rev = []
+        for a in self.revisions:
+            rev.append(a.id)
+        rval['revision_ids'] = rev
+        return rval
+
+
+class PageRevision( object, Dictifiable ):
+    dict_element_visible_keys = [ 'id', 'page_id', 'title', 'content' ]
+
+    def __init__( self ):
+        self.user = None
+        self.title = None
+        self.content = None
+
+    def to_dict( self, view='element' ):
+        rval = super( PageRevision, self ).to_dict( view=view )
+        rval['create_time'] = str(self.create_time)
+        rval['update_time'] = str(self.update_time)
+        return rval
+
+
+class PageUserShareAssociation( object ):
+    def __init__( self ):
+        self.page = None
+        self.user = None
+
+
+class Visualization( object ):
+    def __init__( self, id=None, user=None, type=None, title=None, dbkey=None, slug=None, latest_revision=None ):
+        self.id = id
+        self.user = user
+        self.type = type
+        self.title = title
+        self.dbkey = dbkey
+        self.slug = slug
+        self.latest_revision = latest_revision
+        self.revisions = []
+        if self.latest_revision:
+            self.revisions.append( latest_revision )
+
+    def copy( self, user=None, title=None ):
+        """
+        Provide copy of visualization with only its latest revision.
+        """
+        # NOTE: a shallow copy is done: the config is copied as is but datasets
+        # are not copied nor are the dataset ids changed. This means that the
+        # user does not have a copy of the data in his/her history and the
+        # user who owns the datasets may delete them, making them inaccessible
+        # for the current user.
+        # TODO: a deep copy option is needed.
+
+        if not user:
+            user = self.user
+        if not title:
+            title = self.title
+
+        copy_viz = Visualization( user=user, type=self.type, title=title, dbkey=self.dbkey )
+        copy_revision = self.latest_revision.copy( visualization=copy_viz )
+        copy_viz.latest_revision = copy_revision
+        return copy_viz
+
+
+class VisualizationRevision( object ):
+    def __init__( self, visualization=None, title=None, dbkey=None, config=None ):
+        self.id = None
+        self.visualization = visualization
+        self.title = title
+        self.dbkey = dbkey
+        self.config = config
+
+    def copy( self, visualization=None ):
+        """
+        Returns a copy of this object.
+        """
+        if not visualization:
+            visualization = self.visualization
+
+        return VisualizationRevision(
+            visualization=visualization,
+            title=self.title,
+            dbkey=self.dbkey,
+            config=self.config
+        )
+
+
+class VisualizationUserShareAssociation( object ):
+    def __init__( self ):
+        self.visualization = None
+        self.user = None
+
+
+class TransferJob( object ):
+    # These states are used both by the transfer manager's IPC and the object
+    # state in the database.  Not all states are used by both.
+    states = Bunch( NEW='new',
+                    UNKNOWN='unknown',
+                    PROGRESS='progress',
+                    RUNNING='running',
+                    ERROR='error',
+                    DONE='done' )
+    terminal_states = [ states.ERROR,
+                        states.DONE ]
+
+    def __init__( self, state=None, path=None, info=None, pid=None, socket=None, params=None ):
+        self.state = state
+        self.path = path
+        self.info = info
+        self.pid = pid
+        self.socket = socket
+        self.params = params
+
+
+class Tag ( object ):
+    def __init__( self, id=None, type=None, parent_id=None, name=None ):
+        self.id = id
+        self.type = type
+        self.parent_id = parent_id
+        self.name = name
+
+    def __str__( self ):
+        return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
+
+
+class ItemTagAssociation ( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'user_tname', 'user_value' )
+    dict_element_visible_keys = dict_collection_visible_keys
+
+    def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ):
+        self.id = id
+        self.user = user
+        self.item_id = item_id
+        self.tag_id = tag_id
+        self.user_tname = user_tname
+        self.value = None
+        self.user_value = None
+
+    def copy(self):
+        new_ta = type(self)()
+        new_ta.tag_id = self.tag_id
+        new_ta.user_tname = self.user_tname
+        new_ta.value = self.value
+        new_ta.user_value = self.user_value
+        return new_ta
+
+
+class HistoryTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class DatasetTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class HistoryDatasetAssociationTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class PageTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class WorkflowStepTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class StoredWorkflowTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class VisualizationTagAssociation ( ItemTagAssociation ):
+    pass
+
+
+class HistoryDatasetCollectionTagAssociation( ItemTagAssociation ):
+    pass
+
+
+class LibraryDatasetCollectionTagAssociation( ItemTagAssociation ):
+    pass
+
+
+class ToolTagAssociation( ItemTagAssociation ):
+    def __init__( self, id=None, user=None, tool_id=None, tag_id=None, user_tname=None, value=None ):
+        self.id = id
+        self.user = user
+        self.tool_id = tool_id
+        self.tag_id = tag_id
+        self.user_tname = user_tname
+        self.value = None
+        self.user_value = None
+
+
+class WorkRequestTagAssociation( ItemTagAssociation ):
+    def __init__( self, id=None, user=None, workflow_request_id=None, tag_id=None, user_tname=None, value=None ):
+        self.id = id
+        self.user = user
+        self.workflow_request_id = workflow_request_id
+        self.tag_id = tag_id
+        self.user_tname = user_tname
+        self.value = None
+        self.user_value = None
+
+
+# Item annotation classes.
+class HistoryAnnotationAssociation( object ):
+    pass
+
+
+class HistoryDatasetAssociationAnnotationAssociation( object ):
+    pass
+
+
+class StoredWorkflowAnnotationAssociation( object ):
+    pass
+
+
+class WorkflowStepAnnotationAssociation( object ):
+    pass
+
+
+class PageAnnotationAssociation( object ):
+    pass
+
+
+class VisualizationAnnotationAssociation( object ):
+    pass
+
+
+class HistoryDatasetCollectionAnnotationAssociation( object ):
+    pass
+
+
+class LibraryDatasetCollectionAnnotationAssociation( object ):
+    pass
+
+
+# Item rating classes.
+
+class ItemRatingAssociation( object ):
+    def __init__( self, id=None, user=None, item=None, rating=0 ):
+        self.id = id
+        self.user = user
+        self.item = item
+        self.rating = rating
+
+    def set_item( self, item ):
+        """ Set association's item. """
+        pass
+
+
+class HistoryRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, history ):
+        self.history = history
+
+
+class HistoryDatasetAssociationRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, history_dataset_association ):
+        self.history_dataset_association = history_dataset_association
+
+
+class StoredWorkflowRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, stored_workflow ):
+        self.stored_workflow = stored_workflow
+
+
+class PageRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, page ):
+        self.page = page
+
+
+class VisualizationRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, visualization ):
+        self.visualization = visualization
+
+
+class HistoryDatasetCollectionRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, dataset_collection ):
+        self.dataset_collection = dataset_collection
+
+
+class LibraryDatasetCollectionRatingAssociation( ItemRatingAssociation ):
+    def set_item( self, dataset_collection ):
+        self.dataset_collection = dataset_collection
+
+
+# Data Manager Classes
+class DataManagerHistoryAssociation( object ):
+    def __init__( self, id=None, history=None, user=None ):
+        self.id = id
+        self.history = history
+        self.user = user
+
+
+class DataManagerJobAssociation( object ):
+    def __init__( self, id=None, job=None, data_manager_id=None ):
+        self.id = id
+        self.job = job
+        self.data_manager_id = data_manager_id
+# end of Data Manager Classes
+
+
+class UserPreference ( object ):
+    def __init__( self, name=None, value=None):
+        self.name = name
+        self.value = value
+
+
+class UserAction( object ):
+    def __init__( self, id=None, create_time=None, user_id=None, session_id=None, action=None, params=None, context=None):
+        self.id = id
+        self.create_time = create_time
+        self.user_id = user_id
+        self.session_id = session_id
+        self.action = action
+        self.params = params
+        self.context = context
+
+
+class APIKeys( object ):
+    def __init__( self, id=None, user_id=None, key=None):
+        self.id = id
+        self.user_id = user_id
+        self.key = key
+
+
+def copy_list(lst, *args, **kwds):
+    if lst is None:
+        return lst
+    else:
+        return [el.copy(*args, **kwds) for el in lst]
diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py
new file mode 100644
index 0000000..60574a9
--- /dev/null
+++ b/lib/galaxy/model/base.py
@@ -0,0 +1,39 @@
+"""
+Shared model and mapping code between Galaxy and Tool Shed, trying to
+generalize to generic database connections.
+"""
+
+from sqlalchemy.orm import scoped_session, sessionmaker
+from galaxy.util.bunch import Bunch
+from inspect import getmembers, isclass
+
+
+# TODO: Refactor this to be a proper class, not a bunch.
+class ModelMapping(Bunch):
+
+    def __init__(self, model_modules, engine):
+        self.engine = engine
+        context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
+        # For backward compatibility with "context.current"
+        # deprecated?
+        context.current = context
+        self.context = context
+        self.session = context
+
+        model_classes = {}
+        for module in model_modules:
+            m_obs = getmembers(module, isclass)
+            m_obs = dict([m for m in m_obs if m[1].__module__ == module.__name__])
+            model_classes.update(m_obs)
+
+        super(ModelMapping, self).__init__(**model_classes)
+
+        context.remove()
+        context.configure( bind=engine )
+
+    @property
+    def Session(self):
+        """
+        For backward compat., deprecated.
+        """
+        return self.context
diff --git a/lib/galaxy/model/custom_types.py b/lib/galaxy/model/custom_types.py
new file mode 100644
index 0000000..21bf881
--- /dev/null
+++ b/lib/galaxy/model/custom_types.py
@@ -0,0 +1,333 @@
+import binascii
+import copy
+import json
+import logging
+import uuid
+
+from sys import getsizeof
+from itertools import chain
+from collections import deque
+
+import sqlalchemy
+
+from galaxy import app
+from galaxy.util.aliaspickler import AliasPickleModule
+from sqlalchemy.types import CHAR, LargeBinary, String, TypeDecorator
+from sqlalchemy.ext.mutable import Mutable
+
+log = logging.getLogger( __name__ )
+
+# Default JSON encoder and decoder
+json_encoder = json.JSONEncoder( sort_keys=True )
+json_decoder = json.JSONDecoder( )
+
+
+def _sniffnfix_pg9_hex(value):
+    """
+    Sniff for and fix postgres 9 hex decoding issue
+    """
+    try:
+        if value[0] == 'x':
+            return binascii.unhexlify(value[1:])
+        elif value.startswith( '\\x' ):
+            return binascii.unhexlify( value[2:] )
+        else:
+            return value
+    except Exception:
+        return value
+
+
+class JSONType(sqlalchemy.types.TypeDecorator):
+    """
+    Represents an immutable structure as a json-encoded string.
+
+    If default is, for example, a dict, then a NULL value in the
+    database will be exposed as an empty dict.
+    """
+
+    # TODO: Figure out why this is a large binary, and provide a migratino to
+    # something like sqlalchemy.String, or even better, when applicable, native
+    # sqlalchemy.dialects.postgresql.JSON
+    impl = LargeBinary
+
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            value = json_encoder.encode(value)
+        return value
+
+    def process_result_value(self, value, dialect):
+        if value is not None:
+            value = json_decoder.decode( str( _sniffnfix_pg9_hex( value ) ) )
+        return value
+
+    def load_dialect_impl(self, dialect):
+        if dialect.name == "mysql":
+            return dialect.type_descriptor(sqlalchemy.dialects.mysql.MEDIUMBLOB)
+        else:
+            return self.impl
+
+    def copy_value( self, value ):
+        return copy.deepcopy( value )
+
+    def compare_values( self, x, y ):
+        return ( x == y )
+
+
+class MutationObj(Mutable):
+    """
+    Mutable JSONType for SQLAlchemy from original gist:
+    https://gist.github.com/dbarnett/1730610
+
+    Using minor changes from this fork of the gist:
+    https://gist.github.com/miracle2k/52a031cced285ba9b8cd
+
+    And other minor changes to make it work for us.
+    """
+    @classmethod
+    def coerce(cls, key, value):
+        if isinstance(value, dict) and not isinstance(value, MutationDict):
+            return MutationDict.coerce(key, value)
+        if isinstance(value, list) and not isinstance(value, MutationList):
+            return MutationList.coerce(key, value)
+        return value
+
+    @classmethod
+    def _listen_on_attribute(cls, attribute, coerce, parent_cls):
+        key = attribute.key
+        if parent_cls is not attribute.class_:
+            return
+
+        # rely on "propagate" here
+        parent_cls = attribute.class_
+
+        def load(state, *args):
+            val = state.dict.get(key, None)
+            if coerce and key not in state.unloaded:
+                val = cls.coerce(key, val)
+                state.dict[key] = val
+            if isinstance(val, cls):
+                val._parents[state.obj()] = key
+
+        def set(target, value, oldvalue, initiator):
+            if not isinstance(value, cls):
+                value = cls.coerce(key, value)
+            if isinstance(value, cls):
+                value._parents[target.obj()] = key
+            if isinstance(oldvalue, cls):
+                oldvalue._parents.pop(target.obj(), None)
+            return value
+
+        def pickle(state, state_dict):
+            val = state.dict.get(key, None)
+            if isinstance(val, cls):
+                if 'ext.mutable.values' not in state_dict:
+                    state_dict['ext.mutable.values'] = []
+                state_dict['ext.mutable.values'].append(val)
+
+        def unpickle(state, state_dict):
+            if 'ext.mutable.values' in state_dict:
+                for val in state_dict['ext.mutable.values']:
+                    val._parents[state.obj()] = key
+
+        sqlalchemy.event.listen(parent_cls, 'load', load, raw=True, propagate=True)
+        sqlalchemy.event.listen(parent_cls, 'refresh', load, raw=True, propagate=True)
+        sqlalchemy.event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True)
+        sqlalchemy.event.listen(parent_cls, 'pickle', pickle, raw=True, propagate=True)
+        sqlalchemy.event.listen(parent_cls, 'unpickle', unpickle, raw=True, propagate=True)
+
+
+class MutationDict(MutationObj, dict):
+    @classmethod
+    def coerce(cls, key, value):
+        """Convert plain dictionary to MutationDict"""
+        self = MutationDict((k, MutationObj.coerce(key, v)) for (k, v) in value.items())
+        self._key = key
+        return self
+
+    def __setitem__(self, key, value):
+        if hasattr(self, '_key'):
+            value = MutationObj.coerce(self._key, value)
+        dict.__setitem__(self, key, value)
+        self.changed()
+
+    def __delitem__(self, key):
+        dict.__delitem__(self, key)
+        self.changed()
+
+    def __getstate__(self):
+        return dict(self)
+
+    def __setstate__(self, state):
+        self.update(state)
+
+
+class MutationList(MutationObj, list):
+    @classmethod
+    def coerce(cls, key, value):
+        """Convert plain list to MutationList"""
+        self = MutationList((MutationObj.coerce(key, v) for v in value))
+        self._key = key
+        return self
+
+    def __setitem__(self, idx, value):
+        list.__setitem__(self, idx, MutationObj.coerce(self._key, value))
+        self.changed()
+
+    def __setslice__(self, start, stop, values):
+        list.__setslice__(self, start, stop, (MutationObj.coerce(self._key, v) for v in values))
+        self.changed()
+
+    def __delitem__(self, idx):
+        list.__delitem__(self, idx)
+        self.changed()
+
+    def __delslice__(self, start, stop):
+        list.__delslice__(self, start, stop)
+        self.changed()
+
+    def __copy__(self):
+        return MutationList(MutationObj.coerce(self._key, self[:]))
+
+    def __deepcopy__(self, memo):
+        return MutationList(MutationObj.coerce(self._key, copy.deepcopy(self[:])))
+
+    def append(self, value):
+        list.append(self, MutationObj.coerce(self._key, value))
+        self.changed()
+
+    def insert(self, idx, value):
+        list.insert(self, idx, MutationObj.coerce(self._key, value))
+        self.changed()
+
+    def extend(self, values):
+        list.extend(self, (MutationObj.coerce(self._key, v) for v in values))
+        self.changed()
+
+    def pop(self, *args, **kw):
+        value = list.pop(self, *args, **kw)
+        self.changed()
+        return value
+
+    def remove(self, value):
+        list.remove(self, value)
+        self.changed()
+
+
+MutationObj.associate_with(JSONType)
+
+metadata_pickler = AliasPickleModule( {
+    ( "cookbook.patterns", "Bunch" ): ( "galaxy.util.bunch", "Bunch" )
+} )
+
+
+def total_size(o, handlers={}, verbose=False):
+    """ Returns the approximate memory footprint an object and all of its contents.
+
+    Automatically finds the contents of the following builtin containers and
+    their subclasses:  tuple, list, deque, dict, set and frozenset.
+    To search other containers, add handlers to iterate over their contents:
+
+        handlers = {SomeContainerClass: iter,
+                    OtherContainerClass: OtherContainerClass.get_elements}
+
+    Recipe from:  https://code.activestate.com/recipes/577504-compute-memory-footprint-of-an-object-and-its-cont/
+    """
+    def dict_handler(d):
+        return chain.from_iterable(d.items())
+    all_handlers = { tuple: iter,
+                     list: iter,
+                     deque: iter,
+                     dict: dict_handler,
+                     set: iter,
+                     frozenset: iter }
+    all_handlers.update(handlers)     # user handlers take precedence
+    seen = set()                      # track which object id's have already been seen
+    default_size = getsizeof(0)       # estimate sizeof object without __sizeof__
+
+    def sizeof(o):
+        if id(o) in seen:       # do not double count the same object
+            return 0
+        seen.add(id(o))
+        s = getsizeof(o, default_size)
+
+        for typ, handler in all_handlers.items():
+            if isinstance(o, typ):
+                s += sum(map(sizeof, handler(o)))
+                break
+        return s
+
+    return sizeof(o)
+
+
+class MetadataType( JSONType ):
+    """
+    Backward compatible metadata type. Can read pickles or JSON, but always
+    writes in JSON.
+    """
+
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            if app.app and app.app.config.max_metadata_value_size:
+                for k, v in value.items():
+                    sz = total_size(v)
+                    if sz > app.app.config.max_metadata_value_size:
+                        del value[k]
+                        log.warning('Refusing to bind metadata key %s due to size (%s)' % (k, sz))
+            value = json_encoder.encode(value)
+        return value
+
+    def process_result_value( self, value, dialect ):
+        if value is None:
+            return None
+        ret = None
+        try:
+            ret = metadata_pickler.loads( str( value ) )
+            if ret:
+                ret = dict( ret.__dict__ )
+        except:
+            try:
+                ret = json_decoder.decode( str( _sniffnfix_pg9_hex(value) ) )
+            except:
+                ret = None
+        return ret
+
+
+class UUIDType(TypeDecorator):
+    """
+    Platform-independent UUID type.
+
+    Based on http://docs.sqlalchemy.org/en/rel_0_8/core/types.html#backend-agnostic-guid-type
+    Changed to remove sqlalchemy 0.8 specific code
+
+    CHAR(32), storing as stringified hex values.
+    """
+    impl = CHAR
+
+    def load_dialect_impl(self, dialect):
+        return dialect.type_descriptor(CHAR(32))
+
+    def process_bind_param(self, value, dialect):
+        if value is None:
+            return value
+        else:
+            if not isinstance(value, uuid.UUID):
+                return "%.32x" % uuid.UUID(value)
+            else:
+                # hexstring
+                return "%.32x" % value
+
+    def process_result_value(self, value, dialect):
+        if value is None:
+            return value
+        else:
+            return uuid.UUID(value)
+
+
+class TrimmedString( TypeDecorator ):
+    impl = String
+
+    def process_bind_param( self, value, dialect ):
+        """Automatically truncate string values"""
+        if self.impl.length and value is not None:
+            value = value[0:self.impl.length]
+        return value
diff --git a/lib/galaxy/model/item_attrs.py b/lib/galaxy/model/item_attrs.py
new file mode 100644
index 0000000..b4dd1f3
--- /dev/null
+++ b/lib/galaxy/model/item_attrs.py
@@ -0,0 +1,171 @@
+import logging
+
+from sqlalchemy.sql.expression import func
+
+# Cannot import galaxy.model b/c it creates a circular import graph.
+import galaxy
+
+log = logging.getLogger( __name__ )
+
+
+class RuntimeException( Exception ):
+    pass
+
+
+class UsesItemRatings:
+    """
+        Mixin for getting and setting item ratings.
+
+        Class makes two assumptions:
+        (1) item-rating association table is named <item_class>RatingAssocation
+        (2) item-rating association table has a column with a foreign key referencing
+        item table that contains the item's id.
+    """
+    def get_ave_item_rating_data( self, db_session, item, webapp_model=None ):
+        """ Returns the average rating for an item."""
+        if webapp_model is None:
+            webapp_model = galaxy.model
+        item_rating_assoc_class = self._get_item_rating_assoc_class( item, webapp_model=webapp_model )
+        if not item_rating_assoc_class:
+            raise RuntimeException( "Item does not have ratings: %s" % item.__class__.__name__ )
+        item_id_filter = self._get_item_id_filter_str( item, item_rating_assoc_class )
+        ave_rating = db_session.query( func.avg( item_rating_assoc_class.rating ) ).filter( item_id_filter ).scalar()
+        # Convert ave_rating to float; note: if there are no item ratings, ave rating is None.
+        if ave_rating:
+            ave_rating = float( ave_rating )
+        else:
+            ave_rating = 0
+        num_ratings = int( db_session.query( func.count( item_rating_assoc_class.rating ) ).filter( item_id_filter ).scalar() )
+        return ( ave_rating, num_ratings )
+
+    def rate_item( self, db_session, user, item, rating, webapp_model=None ):
+        """ Rate an item. Return type is <item_class>RatingAssociation. """
+        if webapp_model is None:
+            webapp_model = galaxy.model
+        item_rating = self.get_user_item_rating( db_session, user, item, webapp_model=webapp_model )
+        if not item_rating:
+            # User has not yet rated item; create rating.
+            item_rating_assoc_class = self._get_item_rating_assoc_class( item, webapp_model=webapp_model )
+            item_rating = item_rating_assoc_class()
+            item_rating.user = user
+            item_rating.set_item( item )
+            item_rating.rating = rating
+            db_session.add( item_rating )
+            db_session.flush()
+        elif item_rating.rating != rating:
+            # User has rated item; update rating.
+            item_rating.rating = rating
+            db_session.flush()
+        return item_rating
+
+    def get_user_item_rating( self, db_session, user, item, webapp_model=None ):
+        """ Returns user's rating for an item. Return type is <item_class>RatingAssociation. """
+        if webapp_model is None:
+            webapp_model = galaxy.model
+        item_rating_assoc_class = self._get_item_rating_assoc_class( item, webapp_model=webapp_model )
+        if not item_rating_assoc_class:
+            raise RuntimeException( "Item does not have ratings: %s" % item.__class__.__name__ )
+
+        # Query rating table by user and item id.
+        item_id_filter = self._get_item_id_filter_str( item, item_rating_assoc_class )
+        return db_session.query( item_rating_assoc_class ).filter_by( user=user ).filter( item_id_filter ).first()
+
+    def _get_item_rating_assoc_class( self, item, webapp_model=None ):
+        """ Returns an item's item-rating association class. """
+        if webapp_model is None:
+            webapp_model = galaxy.model
+        item_rating_assoc_class = '%sRatingAssociation' % item.__class__.__name__
+        return getattr( webapp_model, item_rating_assoc_class, None )
+
+    def _get_item_id_filter_str( self, item, item_rating_assoc_class, webapp_model=None ):
+        # Get foreign key in item-rating association table that references item table.
+        if webapp_model is None:
+            webapp_model = galaxy.model
+        item_fk = None
+        for fk in item_rating_assoc_class.table.foreign_keys:
+            if fk.references( item.table ):
+                item_fk = fk
+                break
+
+        if not item_fk:
+            raise RuntimeException( "Cannot find item id column in item-rating association table: %s, %s" % item_rating_assoc_class.__name__, item_rating_assoc_class.table.name )
+
+        # TODO: can we provide a better filter than a raw string?
+        return "%s=%i" % ( item_fk.parent.name, item.id )
+
+
+class UsesAnnotations:
+    """ Mixin for getting and setting item annotations. """
+    def get_item_annotation_str( self, db_session, user, item ):
+        """ Returns a user's annotation string for an item. """
+        annotation_obj = self.get_item_annotation_obj( db_session, user, item )
+        if annotation_obj:
+            return galaxy.util.unicodify( annotation_obj.annotation )
+        return None
+
+    def get_item_annotation_obj( self, db_session, user, item ):
+        """ Returns a user's annotation object for an item. """
+        # Get annotation association class.
+        annotation_assoc_class = self._get_annotation_assoc_class( item )
+        if not annotation_assoc_class:
+            return None
+
+        # Get annotation association object.
+        annotation_assoc = db_session.query( annotation_assoc_class ).filter_by( user=user )
+
+        # TODO: use filtering like that in _get_item_id_filter_str()
+        if item.__class__ == galaxy.model.History:
+            annotation_assoc = annotation_assoc.filter_by( history=item )
+        elif item.__class__ == galaxy.model.HistoryDatasetAssociation:
+            annotation_assoc = annotation_assoc.filter_by( hda=item )
+        elif item.__class__ == galaxy.model.StoredWorkflow:
+            annotation_assoc = annotation_assoc.filter_by( stored_workflow=item )
+        elif item.__class__ == galaxy.model.WorkflowStep:
+            annotation_assoc = annotation_assoc.filter_by( workflow_step=item )
+        elif item.__class__ == galaxy.model.Page:
+            annotation_assoc = annotation_assoc.filter_by( page=item )
+        elif item.__class__ == galaxy.model.Visualization:
+            annotation_assoc = annotation_assoc.filter_by( visualization=item )
+        return annotation_assoc.first()
+
+    def add_item_annotation( self, db_session, user, item, annotation ):
+        """ Add or update an item's annotation; a user can only have a single annotation for an item. """
+        # Get/create annotation association object.
+        annotation_assoc = self.get_item_annotation_obj( db_session, user, item )
+        if not annotation_assoc:
+            annotation_assoc_class = self._get_annotation_assoc_class( item )
+            if not annotation_assoc_class:
+                return None
+            annotation_assoc = annotation_assoc_class()
+            item.annotations.append( annotation_assoc )
+            annotation_assoc.user = user
+        # Set annotation.
+        annotation_assoc.annotation = annotation
+        return annotation_assoc
+
+    def delete_item_annotation( self, db_session, user, item):
+        annotation_assoc = self.get_item_annotation_obj( db_session, user, item )
+        if annotation_assoc:
+            db_session.delete(annotation_assoc)
+            db_session.flush()
+
+    def copy_item_annotation( self, db_session, source_user, source_item, target_user, target_item ):
+        """ Copy an annotation from a user/item source to a user/item target. """
+        if source_user and target_user:
+            annotation_str = self.get_item_annotation_str( db_session, source_user, source_item )
+            if annotation_str:
+                annotation = self.add_item_annotation( db_session, target_user, target_item, annotation_str )
+                return annotation
+        return None
+
+    def _get_annotation_assoc_class( self, item ):
+        """ Returns an item's item-annotation association class. """
+        class_name = '%sAnnotationAssociation' % item.__class__.__name__
+        return getattr( galaxy.model, class_name, None )
+
+
+__all__ = (
+    'UsesAnnotations',
+    'UsesItemRatings',
+    'RuntimeException',
+)
diff --git a/lib/galaxy/model/mapping.py b/lib/galaxy/model/mapping.py
new file mode 100644
index 0000000..28dba68
--- /dev/null
+++ b/lib/galaxy/model/mapping.py
@@ -0,0 +1,2598 @@
+"""
+Details of how the data model objects are mapped onto the relational database
+are encapsulated here.
+"""
+
+import logging
+
+from sqlalchemy import (
+    and_,
+    asc,
+    Boolean,
+    Column,
+    DateTime,
+    desc,
+    false,
+    ForeignKey,
+    Integer,
+    MetaData,
+    not_,
+    Numeric,
+    select,
+    String,
+    Table,
+    TEXT,
+    Text,
+    true,
+    Unicode,
+    UniqueConstraint
+)
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.orderinglist import ordering_list
+from sqlalchemy.orm import backref, class_mapper, deferred, mapper, object_session, relation
+from sqlalchemy.orm.collections import attribute_mapped_collection
+from sqlalchemy.types import BigInteger
+
+from galaxy import model
+from galaxy.model.base import ModelMapping
+from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
+from galaxy.model.orm.engine_factory import build_engine
+from galaxy.model.orm.now import now
+from galaxy.security import GalaxyRBACAgent
+
+log = logging.getLogger( __name__ )
+
+metadata = MetaData()
+
+
+model.User.table = Table(
+    "galaxy_user", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "email", TrimmedString( 255 ), index=True, nullable=False ),
+    Column( "username", TrimmedString( 255 ), index=True, unique=True ),
+    Column( "password", TrimmedString( 255 ), nullable=False ),
+    Column( "last_password_change", DateTime, default=now ),
+    Column( "external", Boolean, default=False ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "disk_usage", Numeric( 15, 0 ), index=True ),
+    Column( "active", Boolean, index=True, default=True, nullable=False ),
+    Column( "activation_token", TrimmedString( 64 ), nullable=True, index=True ) )
+
+model.UserAddress.table = Table(
+    "user_address", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "desc", TrimmedString( 255 )),
+    Column( "name", TrimmedString( 255 ), nullable=False),
+    Column( "institution", TrimmedString( 255 )),
+    Column( "address", TrimmedString( 255 ), nullable=False),
+    Column( "city", TrimmedString( 255 ), nullable=False),
+    Column( "state", TrimmedString( 255 ), nullable=False),
+    Column( "postal_code", TrimmedString( 255 ), nullable=False),
+    Column( "country", TrimmedString( 255 ), nullable=False),
+    Column( "phone", TrimmedString( 255 )),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ) )
+
+model.UserOpenID.table = Table(
+    "galaxy_user_openid", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "openid", TEXT, index=True, unique=True ),
+    Column( "provider", TrimmedString( 255 ) ) )
+
+model.PasswordResetToken.table = Table(
+    "password_reset_token", metadata,
+    Column( "token", String( 32 ), primary_key=True, unique=True, index=True ),
+    Column( "expiration_time", DateTime ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+model.History.table = Table(
+    "history", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "hid_counter", Integer, default=1 ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "importing", Boolean, index=True, default=False ),
+    Column( "genome_build", TrimmedString( 40 ) ),
+    Column( "importable", Boolean, default=False ),
+    Column( "slug", TEXT, index=True ),
+    Column( "published", Boolean, index=True, default=False ) )
+
+model.HistoryUserShareAssociation.table = Table(
+    "history_user_share_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+model.HistoryDatasetAssociation.table = Table(
+    "history_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "state", TrimmedString( 64 ), index=True, key="_state" ),
+    Column( "copied_from_history_dataset_association_id", Integer,
+            ForeignKey( "history_dataset_association.id" ), nullable=True ),
+    Column( "copied_from_library_dataset_dataset_association_id", Integer,
+            ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "blurb", TrimmedString( 255 ) ),
+    Column( "peek", TEXT ),
+    Column( "tool_version", TEXT ),
+    Column( "extension", TrimmedString( 64 ) ),
+    Column( "metadata", MetadataType(), key="_metadata" ),
+    Column( "parent_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
+    Column( "designation", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "visible", Boolean ),
+    Column( "extended_metadata_id", Integer, ForeignKey( "extended_metadata.id" ), index=True ),
+    Column( "hid", Integer ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "hidden_beneath_collection_instance_id",
+            ForeignKey( "history_dataset_collection_association.id" ), nullable=True ) )
+
+model.Dataset.table = Table(
+    "dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "state", TrimmedString( 64 ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "purgable", Boolean, default=True ),
+    Column( "object_store_id", TrimmedString( 255 ), index=True ),
+    Column( "external_filename", TEXT ),
+    Column( "_extra_files_path", TEXT ),
+    Column( 'file_size', Numeric( 15, 0 ) ),
+    Column( 'total_size', Numeric( 15, 0 ) ),
+    Column( 'uuid', UUIDType() ) )
+
+# hda read access permission given by a user to a specific site (gen. for external display applications)
+model.HistoryDatasetAssociationDisplayAtAuthorization.table = Table(
+    "history_dataset_association_display_at_authorization", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "site", TrimmedString( 255 ) ) )
+
+model.HistoryDatasetAssociationSubset.table = Table(
+    "history_dataset_association_subset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "location", Unicode(255), index=True) )
+
+model.ImplicitlyConvertedDatasetAssociation.table = Table(
+    "implicitly_converted_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
+    Column( "hda_parent_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "ldda_parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "metadata_safe", Boolean, index=True, default=True ),
+    Column( "type", TrimmedString( 255 ) ) )
+
+model.ValidationError.table = Table(
+    "validation_error", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "message", TrimmedString( 255 ) ),
+    Column( "err_type", TrimmedString( 64 ) ),
+    Column( "attributes", TEXT ) )
+
+model.Group.table = Table(
+    "galaxy_group", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True, unique=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.UserGroupAssociation.table = Table(
+    "user_group_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+model.UserRoleAssociation.table = Table(
+    "user_role_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+model.GroupRoleAssociation.table = Table(
+    "group_role_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+model.Role.table = Table(
+    "role", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True, unique=True ),
+    Column( "description", TEXT ),
+    Column( "type", String( 40 ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.UserQuotaAssociation.table = Table(
+    "user_quota_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+model.GroupQuotaAssociation.table = Table(
+    "group_quota_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+    Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+model.Quota.table = Table(
+    "quota", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True, unique=True ),
+    Column( "description", TEXT ),
+    Column( "bytes", BigInteger ),
+    Column( "operation", String( 8 ) ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.DefaultQuotaAssociation.table = Table(
+    "default_quota_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "type", String( 32 ), index=True, unique=True ),
+    Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ) )
+
+model.DatasetPermissions.table = Table(
+    "dataset_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.LibraryPermissions.table = Table(
+    "library_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.LibraryFolderPermissions.table = Table(
+    "library_folder_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.LibraryDatasetPermissions.table = Table(
+    "library_dataset_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.LibraryDatasetDatasetAssociationPermissions.table = Table(
+    "library_dataset_dataset_association_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_dataset_dataset_association_id", Integer,
+        ForeignKey("library_dataset_dataset_association.id" ),
+        nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.DefaultUserPermissions.table = Table(
+    "default_user_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "action", TEXT ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.DefaultHistoryPermissions.table = Table(
+    "default_history_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "action", TEXT ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.LibraryDataset.table = Table(
+    "library_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    # current version of dataset, if null, there is not a current version selected
+    Column( "library_dataset_dataset_association_id", Integer,
+        ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" ),
+        nullable=True, index=True ),
+    Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    # not currently being used, but for possible future use
+    Column( "order_id", Integer ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    # when not None/null this will supercede display in library (but not when imported into user's history?)
+    Column( "name", TrimmedString( 255 ), key="_name", index=True ),
+    # when not None/null this will supercede display in library (but not when imported into user's history?)
+    Column( "info", TrimmedString( 255 ), key="_info" ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ) )
+
+model.LibraryDatasetDatasetAssociation.table = Table(
+    "library_dataset_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "state", TrimmedString( 64 ), index=True, key="_state" ),
+    Column( "copied_from_history_dataset_association_id", Integer,
+        ForeignKey( "history_dataset_association.id", use_alter=True, name='history_dataset_association_dataset_id_fkey' ),
+        nullable=True ),
+    Column( "copied_from_library_dataset_dataset_association_id", Integer,
+        ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name='library_dataset_dataset_association_id_fkey' ),
+        nullable=True ),
+    Column( "name", TrimmedString( 255 ), index=True ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "blurb", TrimmedString( 255 ) ),
+    Column( "peek", TEXT ),
+    Column( "tool_version", TEXT ),
+    Column( "extension", TrimmedString( 64 ) ),
+    Column( "metadata", MetadataType(), key="_metadata" ),
+    Column( "parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "designation", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "visible", Boolean ),
+    Column( "extended_metadata_id", Integer, ForeignKey( "extended_metadata.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "message", TrimmedString( 255 ) ) )
+
+model.ExtendedMetadata.table = Table(
+    "extended_metadata", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "data", JSONType ) )
+
+model.ExtendedMetadataIndex.table = Table(
+    "extended_metadata_index", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "extended_metadata_id", Integer,
+        ForeignKey("extended_metadata.id", onupdate="CASCADE", ondelete="CASCADE" ), index=True ),
+    Column( "path", String( 255 )),
+    Column( "value", TEXT))
+
+model.Library.table = Table(
+    "library", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "root_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "description", TEXT ),
+    Column( "synopsis", TEXT ) )
+
+model.LibraryFolder.table = Table(
+    "library_folder", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "parent_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TEXT, index=True ),
+    Column( "description", TEXT ),
+    Column( "order_id", Integer ),  # not currently being used, but for possible future use
+    Column( "item_count", Integer ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "genome_build", TrimmedString( 40 ) ) )
+
+model.LibraryInfoAssociation.table = Table(
+    "library_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
+    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "inheritable", Boolean, index=True, default=False ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.LibraryFolderInfoAssociation.table = Table(
+    "library_folder_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "inheritable", Boolean, index=True, default=False ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.LibraryDatasetDatasetInfoAssociation.table = Table(
+    "library_dataset_dataset_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_dataset_association_id", Integer,
+        ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
+    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.Job.table = Table(
+    "job", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    Column( "tool_id", String( 255 ) ),
+    Column( "tool_version", TEXT, default="1.0.0" ),
+    Column( "state", String( 64 ), index=True ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "command_line", TEXT ),
+    Column( "dependencies", JSONType, nullable=True),
+    Column( "param_filename", String( 1024 ) ),
+    Column( "runner_name", String( 255 ) ),
+    Column( "stdout", TEXT ),
+    Column( "stderr", TEXT ),
+    Column( "exit_code", Integer, nullable=True ),
+    Column( "traceback", TEXT ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+    Column( "job_runner_name", String( 255 ) ),
+    Column( "job_runner_external_id", String( 255 ) ),
+    Column( "destination_id", String( 255 ), nullable=True ),
+    Column( "destination_params", JSONType, nullable=True ),
+    Column( "object_store_id", TrimmedString( 255 ), index=True ),
+    Column( "imported", Boolean, default=False, index=True ),
+    Column( "params", TrimmedString(255), index=True ),
+    Column( "handler", TrimmedString( 255 ), index=True ) )
+
+model.JobStateHistory.table = Table(
+    "job_state_history", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "state", String( 64 ), index=True ),
+    Column( "info", TrimmedString( 255 ) ) )
+
+model.JobParameter.table = Table(
+    "job_parameter", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "name", String(255) ),
+    Column( "value", TEXT ) )
+
+model.JobToInputDatasetAssociation.table = Table(
+    "job_to_input_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "name", String(255) ) )
+
+model.JobToOutputDatasetAssociation.table = Table(
+    "job_to_output_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "name", String(255) ) )
+
+model.JobToInputDatasetCollectionAssociation.table = Table(
+    "job_to_input_dataset_collection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "name", Unicode(255) ) )
+
+model.JobToImplicitOutputDatasetCollectionAssociation.table = Table(
+    "job_to_implicit_output_dataset_collection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
+    Column( "name", Unicode(255) ) )
+
+model.JobToOutputDatasetCollectionAssociation.table = Table(
+    "job_to_output_dataset_collection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "name", Unicode(255) ) )
+
+model.JobToInputLibraryDatasetAssociation.table = Table(
+    "job_to_input_library_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
+    Column( "name", String(255) ) )
+
+model.JobToOutputLibraryDatasetAssociation.table = Table(
+    "job_to_output_library_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
+    Column( "name", String(255) ) )
+
+model.ImplicitlyCreatedDatasetCollectionInput.table = Table(
+    "implicitly_created_dataset_collection_inputs", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "dataset_collection_id", Integer,
+        ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "input_dataset_collection_id", Integer,
+        ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "name", Unicode(255) ) )
+
+model.JobExternalOutputMetadata.table = Table(
+    "job_external_output_metadata", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "history_dataset_association_id", Integer,
+        ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "library_dataset_dataset_association_id", Integer,
+        ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
+    Column( "is_valid", Boolean, default=True ),
+    Column( "filename_in", String( 255 ) ),
+    Column( "filename_out", String( 255 ) ),
+    Column( "filename_results_code", String( 255 ) ),
+    Column( "filename_kwds", String( 255 ) ),
+    Column( "filename_override_metadata", String( 255 ) ),
+    Column( "job_runner_external_pid", String( 255 ) ) )
+
+model.JobExportHistoryArchive.table = Table(
+    "job_export_history_archive", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "compressed", Boolean, index=True, default=False ),
+    Column( "history_attrs_filename", TEXT ),
+    Column( "datasets_attrs_filename", TEXT ),
+    Column( "jobs_attrs_filename", TEXT ) )
+
+model.JobImportHistoryArchive.table = Table(
+    "job_import_history_archive", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "archive_dir", TEXT ) )
+
+
+JOB_METRIC_MAX_LENGTH = 1023
+
+model.JobMetricText.table = Table(
+    "job_metric_text", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "plugin", Unicode(255) ),
+    Column( "metric_name", Unicode(255) ),
+    Column( "metric_value", Unicode(JOB_METRIC_MAX_LENGTH) ) )
+
+model.TaskMetricText.table = Table(
+    "task_metric_text", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
+    Column( "plugin", Unicode(255) ),
+    Column( "metric_name", Unicode(255) ),
+    Column( "metric_value", Unicode(JOB_METRIC_MAX_LENGTH) ) )
+
+model.JobMetricNumeric.table = Table(
+    "job_metric_numeric", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "plugin", Unicode(255) ),
+    Column( "metric_name", Unicode(255) ),
+    Column( "metric_value", Numeric( 22, 7 ) ) )
+
+model.TaskMetricNumeric.table = Table(
+    "task_metric_numeric", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
+    Column( "plugin", Unicode(255) ),
+    Column( "metric_name", Unicode(255) ),
+    Column( "metric_value", Numeric( 22, 7 ) ) )
+
+
+model.GenomeIndexToolData.table = Table(
+    "genome_index_tool_data", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "deferred_job_id", Integer, ForeignKey( "deferred_job.id" ), index=True ),
+    Column( "transfer_job_id", Integer, ForeignKey( "transfer_job.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "fasta_path", String( 255 ) ),
+    Column( "created_time", DateTime, default=now ),
+    Column( "modified_time", DateTime, default=now, onupdate=now ),
+    Column( "indexer", String( 64 ) ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+model.Task.table = Table(
+    "task", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "execution_time", DateTime ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "state", String( 64 ), index=True ),
+    Column( "command_line", TEXT ),
+    Column( "param_filename", String( 1024 ) ),
+    Column( "runner_name", String( 255 ) ),
+    Column( "stdout", TEXT ),
+    Column( "stderr", TEXT ),
+    Column( "exit_code", Integer, nullable=True ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "traceback", TEXT ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ),
+    Column( "working_directory", String(1024)),
+    Column( "task_runner_name", String( 255 ) ),
+    Column( "task_runner_external_id", String( 255 ) ),
+    Column( "prepare_input_files_cmd", TEXT ) )
+
+model.PostJobAction.table = Table(
+    "post_job_action", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
+    Column( "action_type", String(255), nullable=False ),
+    Column( "output_name", String(255), nullable=True ),
+    Column( "action_arguments", JSONType, nullable=True ) )
+
+model.PostJobActionAssociation.table = Table(
+    "post_job_action_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ),
+    Column( "post_job_action_id", Integer, ForeignKey( "post_job_action.id" ), index=True, nullable=False ) )
+
+model.DeferredJob.table = Table(
+    "deferred_job", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "state", String( 64 ), index=True ),
+    Column( "plugin", String( 128 ), index=True ),
+    Column( "params", JSONType ) )
+
+model.TransferJob.table = Table(
+    "transfer_job", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "state", String( 64 ), index=True ),
+    Column( "path", String( 1024 ) ),
+    Column( "info", TEXT ),
+    Column( "pid", Integer ),
+    Column( "socket", Integer ),
+    Column( "params", JSONType ) )
+
+model.DatasetCollection.table = Table(
+    "dataset_collection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "collection_type", Unicode(255), nullable=False ),
+    Column( "populated_state", TrimmedString( 64 ), default='ok', nullable=False ),
+    Column( "populated_state_message", TEXT ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+model.HistoryDatasetCollectionAssociation.table = Table(
+    "history_dataset_collection_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "hid", Integer ),
+    Column( "visible", Boolean ),
+    Column( "deleted", Boolean, default=False ),
+    Column( "copied_from_history_dataset_collection_association_id", Integer,
+        ForeignKey( "history_dataset_collection_association.id" ), nullable=True ),
+    Column( "implicit_output_name", Unicode(255), nullable=True ) )
+
+model.LibraryDatasetCollectionAssociation.table = Table(
+    "library_dataset_collection_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
+    Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, default=False ) )
+
+model.DatasetCollectionElement.table = Table(
+    "dataset_collection_element", metadata,
+    Column( "id", Integer, primary_key=True ),
+    # Parent collection id describing what collection this element belongs to.
+    Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=False ),
+    # Child defined by this association - HDA, LDDA, or another dataset association...
+    Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
+    Column( "child_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=True ),
+    # Element index and identifier to define this parent-child relationship.
+    Column( "element_index", Integer ),
+    Column( "element_identifier", Unicode(255), ) )
+
+model.Event.table = Table(
+    "event", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True, nullable=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+    Column( "message", TrimmedString( 1024 ) ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
+    Column( "tool_id", String( 255 ) ) )
+
+model.GalaxySession.table = Table(
+    "galaxy_session", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+    Column( "remote_host", String( 255 ) ),
+    Column( "remote_addr", String( 255 ) ),
+    Column( "referer", TEXT ),
+    Column( "current_history_id", Integer, ForeignKey( "history.id" ), nullable=True ),
+    # unique 128 bit random number coerced to a string
+    Column( "session_key", TrimmedString( 255 ), index=True, unique=True ),
+    Column( "is_valid", Boolean, default=False ),
+    # saves a reference to the previous session so we have a way to chain them together
+    Column( "prev_session_id", Integer ),
+    Column( "disk_usage", Numeric( 15, 0 ), index=True ),
+    Column( "last_action", DateTime) )
+
+model.GalaxySessionToHistoryAssociation.table = Table(
+    "galaxy_session_to_history", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
+
+model.StoredWorkflow.table = Table(
+    "stored_workflow", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+    Column( "latest_workflow_id", Integer,
+        ForeignKey( "workflow.id", use_alter=True, name='stored_workflow_latest_workflow_id_fk' ), index=True ),
+    Column( "name", TEXT ),
+    Column( "deleted", Boolean, default=False ),
+    Column( "importable", Boolean, default=False ),
+    Column( "slug", TEXT, index=True ),
+    Column( "published", Boolean, index=True, default=False ) )
+
+model.Workflow.table = Table(
+    "workflow", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    # workflows will belong to either a stored workflow or a parent/nesting workflow.
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True, nullable=True ),
+    Column( "parent_workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=True ),
+    Column( "name", TEXT ),
+    Column( "has_cycles", Boolean ),
+    Column( "has_errors", Boolean ),
+    Column( "uuid", UUIDType, nullable=True ) )
+
+model.WorkflowStep.table = Table(
+    "workflow_step", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
+    Column( "subworkflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=True ),
+    Column( "type", String(64) ),
+    Column( "tool_id", TEXT ),
+    # Reserved for future
+    Column( "tool_version", TEXT ),
+    Column( "tool_inputs", JSONType ),
+    Column( "tool_errors", JSONType ),
+    Column( "position", JSONType ),
+    Column( "config", JSONType ),
+    Column( "order_index", Integer ),
+    Column( "uuid", UUIDType ),
+    # Column( "input_connections", JSONType ),
+    Column( "label", Unicode(255) ) )
+
+model.WorkflowRequestStepState.table = Table(
+    "workflow_request_step_states", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer,
+        ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id" )),
+    Column( "value", JSONType ) )
+
+model.WorkflowRequestInputParameter.table = Table(
+    "workflow_request_input_parameters", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer,
+        ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
+    Column( "name", Unicode(255) ),
+    Column( "value", TEXT ),
+    Column( "type", Unicode(255) ) )
+
+model.WorkflowRequestInputStepParmeter.table = Table(
+    "workflow_request_input_step_parameter", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
+    Column( "parameter_value", JSONType ),
+)
+
+model.WorkflowRequestToInputDatasetAssociation.table = Table(
+    "workflow_request_to_input_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "name", String(255) ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ) )
+
+model.WorkflowRequestToInputDatasetCollectionAssociation.table = Table(
+    "workflow_request_to_input_collection_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "name", String(255) ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
+    Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ) )
+
+model.WorkflowStepConnection.table = Table(
+    "workflow_step_connection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "output_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+    Column( "input_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+    Column( "output_name", TEXT ),
+    Column( "input_name", TEXT ),
+    Column( "input_subworkflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+)
+
+model.WorkflowOutput.table = Table(
+    "workflow_output", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True, nullable=False ),
+    Column( "output_name", String(255), nullable=True ),
+    Column( "label", Unicode(255) ),
+    Column( "uuid", UUIDType ),
+)
+
+model.WorkflowInvocation.table = Table(
+    "workflow_invocation", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
+    Column( "state", TrimmedString( 64 ), index=True ),
+    Column( "scheduler", TrimmedString( 255 ), index=True ),
+    Column( "handler", TrimmedString( 255 ), index=True ),
+    Column( 'uuid', UUIDType() ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
+
+model.WorkflowInvocationStep.table = Table(
+    "workflow_invocation_step", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True, nullable=False ),
+    Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=True ),
+    Column( "action", JSONType, nullable=True ) )
+
+model.WorkflowInvocationToSubworkflowInvocationAssociation.table = Table(
+    "workflow_invocation_to_subworkflow_invocation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "subworkflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
+)
+
+model.StoredWorkflowUserShareAssociation.table = Table(
+    "stored_workflow_user_share_connection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+model.StoredWorkflowMenuEntry.table = Table(
+    "stored_workflow_menu_entry", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "order_index", Integer ) )
+
+model.MetadataFile.table = Table(
+    "metadata_file", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "name", TEXT ),
+    Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "lda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "object_store_id", TrimmedString( 255 ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ) )
+
+model.FormDefinitionCurrent.table = Table(
+    "form_definition_current", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "latest_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.FormDefinition.table = Table(
+    "form_definition", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "desc", TEXT ),
+    Column( "form_definition_current_id", Integer,
+        ForeignKey( "form_definition_current.id", name='for_def_form_def_current_id_fk', use_alter=True ), index=True ),
+    Column( "fields", JSONType() ),
+    Column( "type", TrimmedString( 255 ), index=True ),
+    Column( "layout", JSONType() ) )
+
+model.ExternalService.table = Table(
+    "external_service", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "description", TEXT ),
+    Column( "external_service_type_id", TrimmedString( 255 ), nullable=False ),
+    Column( "version", TrimmedString( 255 ) ),
+    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.RequestType.table = Table(
+    "request_type", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "desc", TEXT ),
+    Column( "request_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "sample_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.RequestTypeExternalServiceAssociation.table = Table(
+    "request_type_external_service_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
+    Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
+
+model.RequestTypePermissions.table = Table(
+    "request_type_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+model.FormValues.table = Table(
+    "form_values", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "content", JSONType()) )
+
+model.Request.table = Table(
+    "request", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "desc", TEXT ),
+    Column( "notification", JSONType() ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+model.RequestEvent.table = Table(
+    "request_event", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
+    Column( "state", TrimmedString( 255 ), index=True ),
+    Column( "comment", TEXT ) )
+
+model.Sample.table = Table(
+    "sample", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "desc", TEXT ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
+    Column( "bar_code", TrimmedString( 255 ), index=True ),
+    Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
+    Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "workflow", JSONType, nullable=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), nullable=True ) )
+
+model.SampleState.table = Table(
+    "sample_state", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "desc", TEXT ),
+    Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ) )
+
+model.SampleEvent.table = Table(
+    "sample_event", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
+    Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
+    Column( "comment", TEXT ) )
+
+model.SampleDataset.table = Table(
+    "sample_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
+    Column( "name", TrimmedString( 255 ), nullable=False ),
+    Column( "file_path", TEXT ),
+    Column( "status", TrimmedString( 255 ), nullable=False ),
+    Column( "error_msg", TEXT ),
+    Column( "size", TrimmedString( 255 ) ),
+    Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
+
+model.Run.table = Table(
+    "run", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "subindex", TrimmedString( 255 ), index=True ) )
+
+model.RequestTypeRunAssociation.table = Table(
+    "request_type_run_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True, nullable=False ),
+    Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
+
+model.SampleRunAssociation.table = Table(
+    "sample_run_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True, nullable=False ),
+    Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
+
+model.Page.table = Table(
+    "page", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+    Column( "latest_revision_id", Integer,
+        ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
+    Column( "title", TEXT ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "importable", Boolean, index=True, default=False ),
+    Column( "slug", TEXT, unique=True, index=True ),
+    Column( "published", Boolean, index=True, default=False ) )
+
+model.PageRevision.table = Table(
+    "page_revision", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
+    Column( "title", TEXT ),
+    Column( "content", TEXT ) )
+
+model.PageUserShareAssociation.table = Table(
+    "page_user_share_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+model.Visualization.table = Table(
+    "visualization", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+    Column( "latest_revision_id", Integer,
+        ForeignKey( "visualization_revision.id", use_alter=True, name='visualization_latest_revision_id_fk' ), index=True ),
+    Column( "title", TEXT ),
+    Column( "type", TEXT ),
+    Column( "dbkey", TEXT, index=True ),
+    Column( "deleted", Boolean, default=False, index=True ),
+    Column( "importable", Boolean, default=False, index=True ),
+    Column( "slug", TEXT, index=True ),
+    Column( "published", Boolean, default=False, index=True ) )
+
+model.VisualizationRevision.table = Table(
+    "visualization_revision", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True, nullable=False ),
+    Column( "title", TEXT ),
+    Column( "dbkey", TEXT, index=True ),
+    Column( "config", JSONType ) )
+
+model.VisualizationUserShareAssociation.table = Table(
+    "visualization_user_share_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+# Data Manager tables
+model.DataManagerHistoryAssociation.table = Table(
+    "data_manager_history_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+model.DataManagerJobAssociation.table = Table(
+    "data_manager_job_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "data_manager_id", TEXT, index=True ) )
+
+# Tagging tables.
+model.Tag.table = Table(
+    "tag", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "type", Integer ),
+    Column( "parent_id", Integer, ForeignKey( "tag.id" ) ),
+    Column( "name", TrimmedString(255) ),
+    UniqueConstraint( "name" ) )
+
+model.HistoryTagAssociation.table = Table(
+    "history_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.DatasetTagAssociation.table = Table(
+    "dataset_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.HistoryDatasetAssociationTagAssociation.table = Table(
+    "history_dataset_association_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.StoredWorkflowTagAssociation.table = Table(
+    "stored_workflow_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", Unicode(255), index=True ),
+    Column( "value", Unicode(255), index=True ),
+    Column( "user_value", Unicode(255), index=True ) )
+
+model.PageTagAssociation.table = Table(
+    "page_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.WorkflowStepTagAssociation.table = Table(
+    "workflow_step_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", Unicode(255), index=True ),
+    Column( "value", Unicode(255), index=True ),
+    Column( "user_value", Unicode(255), index=True ) )
+
+model.VisualizationTagAssociation.table = Table(
+    "visualization_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.HistoryDatasetCollectionTagAssociation.table = Table(
+    "history_dataset_collection_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_collection_id", Integer,
+        ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.LibraryDatasetCollectionTagAssociation.table = Table(
+    "library_dataset_collection_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_collection_id", Integer,
+        ForeignKey( "library_dataset_collection_association.id" ), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+model.ToolTagAssociation.table = Table(
+    "tool_tag_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "tool_id", TrimmedString(255), index=True ),
+    Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "user_tname", TrimmedString(255), index=True ),
+    Column( "value", TrimmedString(255), index=True ),
+    Column( "user_value", TrimmedString(255), index=True ) )
+
+# Annotation tables.
+
+model.HistoryAnnotationAssociation.table = Table(
+    "history_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.HistoryDatasetAssociationAnnotationAssociation.table = Table(
+    "history_dataset_association_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_association_id", Integer,
+        ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.StoredWorkflowAnnotationAssociation.table = Table(
+    "stored_workflow_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.WorkflowStepAnnotationAssociation.table = Table(
+    "workflow_step_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.PageAnnotationAssociation.table = Table(
+    "page_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.VisualizationAnnotationAssociation.table = Table(
+    "visualization_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.HistoryDatasetCollectionAnnotationAssociation.table = Table(
+    "history_dataset_collection_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_collection_id", Integer,
+        ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+model.LibraryDatasetCollectionAnnotationAssociation.table = Table(
+    "library_dataset_collection_annotation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_collection_id", Integer,
+        ForeignKey( "library_dataset_collection_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "annotation", TEXT, index=True ) )
+
+# Ratings tables.
+model.HistoryRatingAssociation.table = Table( "history_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+model.HistoryDatasetAssociationRatingAssociation.table = Table(
+    "history_dataset_association_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_association_id", Integer,
+        ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+model.StoredWorkflowRatingAssociation.table = Table(
+    "stored_workflow_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+model.PageRatingAssociation.table = Table(
+    "page_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+model.VisualizationRatingAssociation.table = Table(
+    "visualization_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+model.HistoryDatasetCollectionRatingAssociation.table = Table(
+    "history_dataset_collection_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_dataset_collection_id", Integer,
+        ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+model.LibraryDatasetCollectionRatingAssociation.table = Table(
+    "library_dataset_collection_rating_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_collection_id", Integer,
+        ForeignKey( "library_dataset_collection_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "rating", Integer, index=True ) )
+
+# User tables.
+model.UserPreference.table = Table(
+    "user_preference", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "name", Unicode( 255 ), index=True),
+    Column( "value", Text ) )
+
+model.UserAction.table = Table(
+    "user_action", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
+    Column( "action", Unicode( 255 ) ),
+    Column( "context", Unicode( 512 ) ),
+    Column( "params", Unicode( 1024 ) ) )
+
+model.APIKeys.table = Table(
+    "api_keys", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
+
+
+# With the tables defined we can define the mappers and setup the
+# relationships between the model objects.
+def simple_mapping( model, **kwds ):
+    mapper( model, model.table, properties=kwds )
+
+
+mapper( model.Sample, model.Sample.table, properties=dict(
+    events=relation( model.SampleEvent,
+        backref="sample",
+        order_by=desc( model.SampleEvent.table.c.update_time ) ),
+    datasets=relation( model.SampleDataset,
+        backref="sample",
+        order_by=desc( model.SampleDataset.table.c.update_time ) ),
+    values=relation( model.FormValues,
+        primaryjoin=( model.Sample.table.c.form_values_id == model.FormValues.table.c.id ) ),
+    request=relation( model.Request,
+        primaryjoin=( model.Sample.table.c.request_id == model.Request.table.c.id ) ),
+    folder=relation( model.LibraryFolder,
+        primaryjoin=( model.Sample.table.c.folder_id == model.LibraryFolder.table.c.id ) ),
+    library=relation( model.Library,
+        primaryjoin=( model.Sample.table.c.library_id == model.Library.table.c.id ) ),
+    history=relation( model.History,
+        primaryjoin=( model.Sample.table.c.history_id == model.History.table.c.id ) ),
+) )
+
+mapper( model.FormValues, model.FormValues.table, properties=dict(
+    form_definition=relation( model.FormDefinition,
+        primaryjoin=( model.FormValues.table.c.form_definition_id == model.FormDefinition.table.c.id ) )
+) )
+
+mapper( model.Request, model.Request.table, properties=dict(
+    values=relation( model.FormValues,
+        primaryjoin=( model.Request.table.c.form_values_id == model.FormValues.table.c.id )),
+    type=relation( model.RequestType,
+        primaryjoin=( model.Request.table.c.request_type_id == model.RequestType.table.c.id ) ),
+    user=relation( model.User,
+        primaryjoin=( model.Request.table.c.user_id == model.User.table.c.id ),
+        backref="requests" ),
+    samples=relation( model.Sample,
+        primaryjoin=( model.Request.table.c.id == model.Sample.table.c.request_id ),
+        order_by=asc( model.Sample.table.c.id ) ),
+    events=relation( model.RequestEvent,
+        backref="request",
+        order_by=desc( model.RequestEvent.table.c.update_time ) )
+) )
+
+mapper( model.RequestEvent, model.RequestEvent.table, properties=None )
+
+mapper( model.ExternalService, model.ExternalService.table, properties=dict(
+    form_definition=relation( model.FormDefinition,
+        primaryjoin=( model.ExternalService.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
+    form_values=relation( model.FormValues,
+        primaryjoin=( model.ExternalService.table.c.form_values_id == model.FormValues.table.c.id ) )
+) )
+
+mapper( model.RequestType, model.RequestType.table, properties=dict(
+    states=relation( model.SampleState,
+        backref="request_type",
+        primaryjoin=( model.RequestType.table.c.id == model.SampleState.table.c.request_type_id ),
+        order_by=asc( model.SampleState.table.c.update_time ) ),
+    request_form=relation( model.FormDefinition,
+        primaryjoin=( model.RequestType.table.c.request_form_id == model.FormDefinition.table.c.id ) ),
+    sample_form=relation( model.FormDefinition,
+        primaryjoin=( model.RequestType.table.c.sample_form_id == model.FormDefinition.table.c.id ) ),
+) )
+
+mapper( model.RequestTypeExternalServiceAssociation, model.RequestTypeExternalServiceAssociation.table, properties=dict(
+    request_type=relation( model.RequestType,
+        primaryjoin=( ( model.RequestTypeExternalServiceAssociation.table.c.request_type_id == model.RequestType.table.c.id ) ),
+        backref="external_service_associations" ),
+    external_service=relation( model.ExternalService,
+        primaryjoin=( model.RequestTypeExternalServiceAssociation.table.c.external_service_id == model.ExternalService.table.c.id ) )
+) )
+
+
+mapper( model.RequestTypePermissions, model.RequestTypePermissions.table, properties=dict(
+    request_type=relation( model.RequestType, backref="actions" ),
+    role=relation( model.Role, backref="request_type_actions" )
+) )
+
+mapper( model.FormDefinition, model.FormDefinition.table, properties=dict(
+    current=relation( model.FormDefinitionCurrent,
+        primaryjoin=( model.FormDefinition.table.c.form_definition_current_id == model.FormDefinitionCurrent.table.c.id ) )
+) )
+
+mapper( model.FormDefinitionCurrent, model.FormDefinitionCurrent.table, properties=dict(
+    forms=relation( model.FormDefinition,
+        backref='form_definition_current',
+        cascade="all, delete-orphan",
+        primaryjoin=( model.FormDefinitionCurrent.table.c.id == model.FormDefinition.table.c.form_definition_current_id ) ),
+    latest_form=relation( model.FormDefinition,
+        post_update=True,
+        primaryjoin=( model.FormDefinitionCurrent.table.c.latest_form_id == model.FormDefinition.table.c.id ) )
+) )
+
+mapper( model.SampleEvent, model.SampleEvent.table, properties=dict(
+    state=relation( model.SampleState,
+        primaryjoin=( model.SampleEvent.table.c.sample_state_id == model.SampleState.table.c.id ) ),
+) )
+
+mapper( model.SampleState, model.SampleState.table, properties=None )
+
+mapper( model.SampleDataset, model.SampleDataset.table, properties=dict(
+    external_service=relation( model.ExternalService,
+        primaryjoin=( model.SampleDataset.table.c.external_service_id == model.ExternalService.table.c.id ) )
+) )
+
+
+mapper( model.SampleRunAssociation, model.SampleRunAssociation.table, properties=dict(
+    sample=relation( model.Sample, backref="runs", order_by=desc( model.Run.table.c.update_time ) ),
+    run=relation( model.Run, backref="samples", order_by=asc( model.Sample.table.c.id ) )
+) )
+
+mapper( model.RequestTypeRunAssociation, model.RequestTypeRunAssociation.table, properties=dict(
+    request_type=relation( model.RequestType, backref="run" ),
+    run=relation( model.Run, backref="request_type" )
+) )
+
+mapper( model.Run, model.Run.table, properties=dict(
+    template=relation( model.FormDefinition,
+        primaryjoin=( model.Run.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
+    info=relation( model.FormValues,
+        primaryjoin=( model.Run.table.c.form_values_id == model.FormValues.table.c.id ) )
+) )
+
+mapper( model.UserAddress, model.UserAddress.table, properties=dict(
+    user=relation( model.User,
+        primaryjoin=( model.UserAddress.table.c.user_id == model.User.table.c.id ),
+        backref='addresses',
+        order_by=desc(model.UserAddress.table.c.update_time ) ),
+) )
+
+mapper( model.UserOpenID, model.UserOpenID.table, properties=dict(
+    session=relation( model.GalaxySession,
+        primaryjoin=( model.UserOpenID.table.c.session_id == model.GalaxySession.table.c.id ),
+        backref='openids',
+        order_by=desc( model.UserOpenID.table.c.update_time ) ),
+    user=relation( model.User,
+        primaryjoin=( model.UserOpenID.table.c.user_id == model.User.table.c.id ),
+        backref='openids',
+        order_by=desc( model.UserOpenID.table.c.update_time ) )
+) )
+
+mapper( model.ValidationError, model.ValidationError.table )
+
+simple_mapping( model.HistoryDatasetAssociation,
+    dataset=relation( model.Dataset,
+        primaryjoin=( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ), lazy=False ),
+    # .history defined in History mapper
+    copied_from_history_dataset_association=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id ==
+                      model.HistoryDatasetAssociation.table.c.id ),
+        remote_side=[model.HistoryDatasetAssociation.table.c.id],
+        uselist=False ),
+    copied_to_history_dataset_associations=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) ),
+    copied_from_library_dataset_dataset_association=relation(
+        model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ),
+        uselist=False ),
+    copied_to_library_dataset_dataset_associations=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    implicitly_converted_datasets=relation( model.ImplicitlyConvertedDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) ),
+    implicitly_converted_parent_datasets=relation( model.ImplicitlyConvertedDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) ),
+    children=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociation.table.c.parent_id ==
+                      model.HistoryDatasetAssociation.table.c.id ),
+        backref=backref( "parent",
+            primaryjoin=( model.HistoryDatasetAssociation.table.c.parent_id ==
+                          model.HistoryDatasetAssociation.table.c.id ),
+            remote_side=[model.HistoryDatasetAssociation.table.c.id], uselist=False ) ),
+    visible_children=relation( model.HistoryDatasetAssociation,
+        primaryjoin=(
+            ( model.HistoryDatasetAssociation.table.c.parent_id == model.HistoryDatasetAssociation.table.c.id ) &
+            ( model.HistoryDatasetAssociation.table.c.visible == true() ) ),
+        remote_side=[model.HistoryDatasetAssociation.table.c.id] ),
+    tags=relation( model.HistoryDatasetAssociationTagAssociation,
+        order_by=model.HistoryDatasetAssociationTagAssociation.table.c.id,
+        backref='history_tag_associations' ),
+    annotations=relation( model.HistoryDatasetAssociationAnnotationAssociation,
+        order_by=model.HistoryDatasetAssociationAnnotationAssociation.table.c.id,
+        backref="hdas" ),
+    ratings=relation( model.HistoryDatasetAssociationRatingAssociation,
+        order_by=model.HistoryDatasetAssociationRatingAssociation.table.c.id,
+        backref="hdas" ),
+    extended_metadata=relation( model.ExtendedMetadata,
+        primaryjoin=( ( model.HistoryDatasetAssociation.table.c.extended_metadata_id ==
+                        model.ExtendedMetadata.table.c.id ) ) ),
+    hidden_beneath_collection_instance=relation( model.HistoryDatasetCollectionAssociation,
+        primaryjoin=( ( model.HistoryDatasetAssociation.table.c.hidden_beneath_collection_instance_id ==
+                        model.HistoryDatasetCollectionAssociation.table.c.id ) ),
+        uselist=False,
+        backref="hidden_dataset_instances"),
+    _metadata=deferred(model.HistoryDatasetAssociation.table.c._metadata)
+)
+
+simple_mapping( model.Dataset,
+    history_associations=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) ),
+    active_history_associations=relation( model.HistoryDatasetAssociation,
+        primaryjoin=(
+            ( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) &
+            ( model.HistoryDatasetAssociation.table.c.deleted == false() ) &
+            ( model.HistoryDatasetAssociation.table.c.purged == false() ) ) ),
+    purged_history_associations=relation( model.HistoryDatasetAssociation,
+        primaryjoin=(
+            ( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) &
+            ( model.HistoryDatasetAssociation.table.c.purged == true() ) ) ),
+    library_associations=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id ) ),
+    active_library_associations=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=(
+            ( model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id ) &
+            ( model.LibraryDatasetDatasetAssociation.table.c.deleted == false() ) ) ),
+    tags=relation(model.DatasetTagAssociation,
+        order_by=model.DatasetTagAssociation.table.c.id,
+        backref='datasets')
+)
+
+mapper( model.HistoryDatasetAssociationDisplayAtAuthorization, model.HistoryDatasetAssociationDisplayAtAuthorization.table, properties=dict(
+    history_dataset_association=relation( model.HistoryDatasetAssociation ),
+    user=relation( model.User )
+) )
+
+mapper( model.HistoryDatasetAssociationSubset, model.HistoryDatasetAssociationSubset.table, properties=dict(
+    hda=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociationSubset.table.c.history_dataset_association_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) ),
+    subset=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociationSubset.table.c.history_dataset_association_subset_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) )
+) )
+
+mapper( model.ImplicitlyConvertedDatasetAssociation, model.ImplicitlyConvertedDatasetAssociation.table, properties=dict(
+    parent_hda=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) ),
+    parent_ldda=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    dataset_ldda=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    dataset=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id ==
+                      model.HistoryDatasetAssociation.table.c.id ) )
+) )
+
+mapper( model.History, model.History.table, properties=dict(
+    galaxy_sessions=relation( model.GalaxySessionToHistoryAssociation ),
+    datasets=relation( model.HistoryDatasetAssociation,
+        backref="history",
+        order_by=asc(model.HistoryDatasetAssociation.table.c.hid) ),
+    exports=relation( model.JobExportHistoryArchive,
+        primaryjoin=( model.JobExportHistoryArchive.table.c.history_id == model.History.table.c.id ),
+        order_by=desc( model.JobExportHistoryArchive.table.c.id ) ),
+    active_datasets=relation( model.HistoryDatasetAssociation,
+        primaryjoin=(
+            ( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) &
+            not_( model.HistoryDatasetAssociation.table.c.deleted )
+        ),
+        order_by=asc( model.HistoryDatasetAssociation.table.c.hid ),
+        viewonly=True ),
+    active_dataset_collections=relation( model.HistoryDatasetCollectionAssociation,
+        primaryjoin=(
+            ( model.HistoryDatasetCollectionAssociation.table.c.history_id == model.History.table.c.id ) &
+            not_( model.HistoryDatasetCollectionAssociation.table.c.deleted )
+        ),
+        order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ),
+        viewonly=True ),
+    visible_datasets=relation( model.HistoryDatasetAssociation,
+        primaryjoin=(
+            ( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) &
+            not_( model.HistoryDatasetAssociation.table.c.deleted ) &
+            model.HistoryDatasetAssociation.table.c.visible
+        ),
+        order_by=asc( model.HistoryDatasetAssociation.table.c.hid ),
+        viewonly=True ),
+    visible_dataset_collections=relation( model.HistoryDatasetCollectionAssociation,
+        primaryjoin=(
+            ( model.HistoryDatasetCollectionAssociation.table.c.history_id == model.History.table.c.id ) &
+            not_( model.HistoryDatasetCollectionAssociation.table.c.deleted ) &
+            model.HistoryDatasetCollectionAssociation.table.c.visible
+        ),
+        order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ),
+        viewonly=True ),
+    tags=relation( model.HistoryTagAssociation,
+        order_by=model.HistoryTagAssociation.table.c.id,
+        backref="histories" ),
+    annotations=relation( model.HistoryAnnotationAssociation,
+        order_by=model.HistoryAnnotationAssociation.table.c.id,
+        backref="histories" ),
+    ratings=relation( model.HistoryRatingAssociation,
+        order_by=model.HistoryRatingAssociation.table.c.id,
+        backref="histories" )
+) )
+
+# Set up proxy so that
+#   History.users_shared_with
+# returns a list of users that history is shared with.
+model.History.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
+
+mapper( model.HistoryUserShareAssociation, model.HistoryUserShareAssociation.table, properties=dict(
+    user=relation( model.User, backref='histories_shared_by_others' ),
+    history=relation( model.History, backref='users_shared_with' )
+) )
+
+mapper( model.User, model.User.table, properties=dict(
+    histories=relation( model.History,
+        backref="user",
+        order_by=desc(model.History.table.c.update_time ) ),
+    active_histories=relation( model.History,
+        primaryjoin=(
+            ( model.History.table.c.user_id == model.User.table.c.id ) &
+            ( not_( model.History.table.c.deleted ) )
+        ),
+        order_by=desc( model.History.table.c.update_time ) ),
+
+    galaxy_sessions=relation( model.GalaxySession,
+        order_by=desc( model.GalaxySession.table.c.update_time ) ),
+    stored_workflow_menu_entries=relation( model.StoredWorkflowMenuEntry,
+        backref="user",
+        cascade="all, delete-orphan",
+        collection_class=ordering_list( 'order_index' ) ),
+    _preferences=relation( model.UserPreference,
+        backref="user",
+        collection_class=attribute_mapped_collection('name')),
+    # addresses=relation( UserAddress,
+    #     primaryjoin=( User.table.c.id == UserAddress.table.c.user_id ) ),
+    values=relation( model.FormValues,
+        primaryjoin=( model.User.table.c.form_values_id == model.FormValues.table.c.id ) ),
+    api_keys=relation( model.APIKeys,
+        backref="user",
+        order_by=desc( model.APIKeys.table.c.create_time ) ),
+) )
+
+mapper( model.PasswordResetToken, model.PasswordResetToken.table,
+        properties=dict( user=relation( model.User, backref="reset_tokens") ) )
+
+
+# Set up proxy so that this syntax is possible:
+# <user_obj>.preferences[pref_name] = pref_value
+model.User.preferences = association_proxy('_preferences', 'value', creator=model.UserPreference)
+
+mapper( model.Group, model.Group.table, properties=dict(
+    users=relation( model.UserGroupAssociation )
+) )
+
+mapper( model.UserGroupAssociation, model.UserGroupAssociation.table, properties=dict(
+    user=relation( model.User, backref="groups" ),
+    group=relation( model.Group, backref="members" )
+) )
+
+mapper( model.DefaultUserPermissions, model.DefaultUserPermissions.table, properties=dict(
+    user=relation( model.User, backref="default_permissions" ),
+    role=relation( model.Role )
+) )
+
+mapper( model.DefaultHistoryPermissions, model.DefaultHistoryPermissions.table, properties=dict(
+    history=relation( model.History, backref="default_permissions" ),
+    role=relation( model.Role )
+) )
+
+mapper( model.Role, model.Role.table, properties=dict(
+    users=relation( model.UserRoleAssociation ),
+    groups=relation( model.GroupRoleAssociation )
+) )
+
+mapper( model.UserRoleAssociation, model.UserRoleAssociation.table, properties=dict(
+    user=relation( model.User, backref="roles" ),
+    non_private_roles=relation(
+        model.User,
+        backref="non_private_roles",
+        primaryjoin=(
+            ( model.User.table.c.id == model.UserRoleAssociation.table.c.user_id ) &
+            ( model.UserRoleAssociation.table.c.role_id == model.Role.table.c.id ) &
+            not_( model.Role.table.c.name == model.User.table.c.email ) )
+    ),
+    role=relation( model.Role )
+) )
+
+mapper( model.GroupRoleAssociation, model.GroupRoleAssociation.table, properties=dict(
+    group=relation( model.Group, backref="roles" ),
+    role=relation( model.Role )
+) )
+
+mapper( model.Quota, model.Quota.table, properties=dict(
+    users=relation( model.UserQuotaAssociation ),
+    groups=relation( model.GroupQuotaAssociation )
+) )
+
+mapper( model.UserQuotaAssociation, model.UserQuotaAssociation.table, properties=dict(
+    user=relation( model.User, backref="quotas" ),
+    quota=relation( model.Quota )
+) )
+
+mapper( model.GroupQuotaAssociation, model.GroupQuotaAssociation.table, properties=dict(
+    group=relation( model.Group, backref="quotas" ),
+    quota=relation( model.Quota )
+) )
+
+mapper( model.DefaultQuotaAssociation, model.DefaultQuotaAssociation.table, properties=dict(
+    quota=relation( model.Quota, backref="default" )
+) )
+
+mapper( model.DatasetPermissions, model.DatasetPermissions.table, properties=dict(
+    dataset=relation( model.Dataset, backref="actions" ),
+    role=relation( model.Role, backref="dataset_actions" )
+) )
+
+mapper( model.LibraryPermissions, model.LibraryPermissions.table, properties=dict(
+    library=relation( model.Library, backref="actions" ),
+    role=relation( model.Role, backref="library_actions" )
+) )
+
+mapper( model.LibraryFolderPermissions, model.LibraryFolderPermissions.table, properties=dict(
+    folder=relation( model.LibraryFolder, backref="actions" ),
+    role=relation( model.Role, backref="library_folder_actions" )
+) )
+
+mapper( model.LibraryDatasetPermissions, model.LibraryDatasetPermissions.table, properties=dict(
+    library_dataset=relation( model.LibraryDataset, backref="actions" ),
+    role=relation( model.Role, backref="library_dataset_actions" )
+) )
+
+mapper( model.LibraryDatasetDatasetAssociationPermissions, model.LibraryDatasetDatasetAssociationPermissions.table, properties=dict(
+    library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation, backref="actions" ),
+    role=relation( model.Role, backref="library_dataset_dataset_actions" )
+) )
+
+mapper( model.Library, model.Library.table, properties=dict(
+    root_folder=relation( model.LibraryFolder, backref=backref( "library_root" ) )
+) )
+
+mapper( model.ExtendedMetadata, model.ExtendedMetadata.table, properties=dict(
+    children=relation( model.ExtendedMetadataIndex,
+        primaryjoin=( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ),
+        backref=backref( "parent",
+            primaryjoin=( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) ) )
+) )
+
+mapper( model.ExtendedMetadataIndex, model.ExtendedMetadataIndex.table, properties=dict(
+    extended_metadata=relation( model.ExtendedMetadata,
+        primaryjoin=( ( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) ) )
+) )
+
+
+mapper( model.LibraryInfoAssociation, model.LibraryInfoAssociation.table, properties=dict(
+    library=relation( model.Library,
+        primaryjoin=(
+            ( model.LibraryInfoAssociation.table.c.library_id == model.Library.table.c.id ) &
+            ( not_( model.LibraryInfoAssociation.table.c.deleted ) )
+        ),
+        backref="info_association" ),
+    template=relation( model.FormDefinition,
+        primaryjoin=( model.LibraryInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
+    info=relation( model.FormValues,
+        primaryjoin=( model.LibraryInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
+) )
+
+mapper( model.LibraryFolder, model.LibraryFolder.table, properties=dict(
+    folders=relation( model.LibraryFolder,
+        primaryjoin=( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ),
+        order_by=asc( model.LibraryFolder.table.c.name ),
+        backref=backref( "parent",
+            primaryjoin=( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ),
+            remote_side=[model.LibraryFolder.table.c.id] ) ),
+    active_folders=relation( model.LibraryFolder,
+        primaryjoin=(
+            ( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ) &
+            ( not_( model.LibraryFolder.table.c.deleted ) )
+        ),
+        order_by=asc( model.LibraryFolder.table.c.name ),
+        # """sqlalchemy.exc.ArgumentError: Error creating eager relationship 'active_folders'
+        # on parent class '<class 'galaxy.model.LibraryFolder'>' to child class '<class 'galaxy.model.LibraryFolder'>':
+        # Cant use eager loading on a self referential relationship."""
+        lazy=True,
+        viewonly=True ),
+    datasets=relation( model.LibraryDataset,
+        primaryjoin=( ( model.LibraryDataset.table.c.folder_id == model.LibraryFolder.table.c.id ) ),
+        order_by=asc( model.LibraryDataset.table.c._name ),
+        lazy=True,
+        viewonly=True ),
+    active_datasets=relation( model.LibraryDataset,
+        primaryjoin=(
+            ( model.LibraryDataset.table.c.folder_id == model.LibraryFolder.table.c.id ) &
+            ( not_( model.LibraryDataset.table.c.deleted ) )
+        ),
+        order_by=asc( model.LibraryDataset.table.c._name ),
+        lazy=True,
+        viewonly=True )
+) )
+
+mapper( model.LibraryFolderInfoAssociation, model.LibraryFolderInfoAssociation.table, properties=dict(
+    folder=relation( model.LibraryFolder,
+        primaryjoin=(
+            ( model.LibraryFolderInfoAssociation.table.c.library_folder_id == model.LibraryFolder.table.c.id ) &
+            ( not_( model.LibraryFolderInfoAssociation.table.c.deleted ) )
+        ),
+        backref="info_association" ),
+    template=relation( model.FormDefinition,
+        primaryjoin=( model.LibraryFolderInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
+    info=relation( model.FormValues,
+        primaryjoin=( model.LibraryFolderInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
+) )
+
+mapper( model.LibraryDataset, model.LibraryDataset.table, properties=dict(
+    folder=relation( model.LibraryFolder ),
+    library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.LibraryDataset.table.c.library_dataset_dataset_association_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    expired_datasets=relation( model.LibraryDatasetDatasetAssociation,
+        foreign_keys=[model.LibraryDataset.table.c.id, model.LibraryDataset.table.c.library_dataset_dataset_association_id ],
+        primaryjoin=(
+            ( model.LibraryDataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id ) &
+            ( not_( model.LibraryDataset.table.c.library_dataset_dataset_association_id ==
+                    model.LibraryDatasetDatasetAssociation.table.c.id ) )
+        ),
+        viewonly=True,
+        uselist=True )
+) )
+
+mapper( model.LibraryDatasetDatasetAssociation, model.LibraryDatasetDatasetAssociation.table, properties=dict(
+    dataset=relation( model.Dataset ),
+    library_dataset=relation( model.LibraryDataset,
+    primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id == model.LibraryDataset.table.c.id ) ),
+    # user=relation( model.User.mapper ),
+    user=relation( model.User ),
+    copied_from_library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ),
+        remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id],
+        uselist=False ),
+    copied_to_library_dataset_dataset_associations=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    copied_from_history_dataset_association=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_history_dataset_association_id ==
+                      model.HistoryDatasetAssociation.table.c.id  ),
+        uselist=False ),
+    copied_to_history_dataset_associations=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    implicitly_converted_datasets=relation( model.ImplicitlyConvertedDatasetAssociation,
+        primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    children=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.parent_id ==
+                      model.LibraryDatasetDatasetAssociation.table.c.id ),
+        backref=backref( "parent",
+            primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.parent_id ==
+                          model.LibraryDatasetDatasetAssociation.table.c.id ),
+            remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id] ) ),
+    visible_children=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=(
+            ( model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ) &
+            ( model.LibraryDatasetDatasetAssociation.table.c.visible == true() )
+        ),
+        remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id] ),
+    extended_metadata=relation( model.ExtendedMetadata,
+        primaryjoin=( ( model.LibraryDatasetDatasetAssociation.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) )
+    ),
+    _metadata=deferred(model.LibraryDatasetDatasetAssociation.table.c._metadata)
+) )
+
+mapper( model.LibraryDatasetDatasetInfoAssociation, model.LibraryDatasetDatasetInfoAssociation.table, properties=dict(
+    library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=(
+            ( model.LibraryDatasetDatasetInfoAssociation.table.c.library_dataset_dataset_association_id ==
+              model.LibraryDatasetDatasetAssociation.table.c.id ) &
+            ( not_( model.LibraryDatasetDatasetInfoAssociation.table.c.deleted ) )
+        ),
+        backref="info_association" ),
+    template=relation( model.FormDefinition,
+        primaryjoin=( model.LibraryDatasetDatasetInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
+    info=relation( model.FormValues,
+        primaryjoin=( model.LibraryDatasetDatasetInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
+) )
+
+mapper( model.JobToInputDatasetAssociation, model.JobToInputDatasetAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset=relation( model.HistoryDatasetAssociation,
+        lazy=False,
+        backref="dependent_jobs" )
+) )
+
+mapper( model.JobToOutputDatasetAssociation, model.JobToOutputDatasetAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset=relation( model.HistoryDatasetAssociation,
+        lazy=False )
+) )
+
+mapper( model.JobToInputDatasetCollectionAssociation, model.JobToInputDatasetCollectionAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset_collection=relation( model.HistoryDatasetCollectionAssociation,
+        lazy=False,
+        backref="dependent_jobs" )
+) )
+
+mapper( model.JobToOutputDatasetCollectionAssociation, model.JobToOutputDatasetCollectionAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset_collection_instance=relation( model.HistoryDatasetCollectionAssociation,
+        lazy=False,
+        backref="output_dataset_collection_instances" )
+) )
+
+mapper( model.JobToImplicitOutputDatasetCollectionAssociation, model.JobToImplicitOutputDatasetCollectionAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset_collection=relation( model.DatasetCollection,
+        backref="output_dataset_collections" )
+) )
+
+mapper( model.JobToInputLibraryDatasetAssociation, model.JobToInputLibraryDatasetAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset=relation( model.LibraryDatasetDatasetAssociation,
+        lazy=False,
+        backref="dependent_jobs" )
+) )
+
+mapper( model.JobToOutputLibraryDatasetAssociation, model.JobToOutputLibraryDatasetAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    dataset=relation( model.LibraryDatasetDatasetAssociation,
+        lazy=False )
+) )
+
+simple_mapping( model.JobStateHistory,
+    job=relation( model.Job, backref="state_history" ) )
+
+simple_mapping( model.JobMetricText,
+    job=relation( model.Job, backref="text_metrics" ) )
+
+simple_mapping( model.TaskMetricText,
+    task=relation( model.Task, backref="text_metrics" ) )
+
+simple_mapping( model.JobMetricNumeric,
+    job=relation( model.Job, backref="numeric_metrics" ) )
+
+simple_mapping( model.TaskMetricNumeric,
+    task=relation( model.Task, backref="numeric_metrics" ) )
+
+simple_mapping( model.ImplicitlyCreatedDatasetCollectionInput,
+    input_dataset_collection=relation( model.HistoryDatasetCollectionAssociation,
+        primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.id ==
+                        model.ImplicitlyCreatedDatasetCollectionInput.table.c.input_dataset_collection_id ) ),
+        # backref="implicitly_created_dataset_collections",
+    ),
+)
+
+mapper( model.JobParameter, model.JobParameter.table )
+
+mapper( model.JobExternalOutputMetadata, model.JobExternalOutputMetadata.table, properties=dict(
+    job=relation( model.Job ),
+    history_dataset_association=relation( model.HistoryDatasetAssociation, lazy=False ),
+    library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation, lazy=False )
+) )
+
+mapper( model.JobExportHistoryArchive, model.JobExportHistoryArchive.table, properties=dict(
+    job=relation( model.Job ),
+    history=relation( model.History ),
+    dataset=relation( model.Dataset )
+) )
+
+mapper( model.JobImportHistoryArchive, model.JobImportHistoryArchive.table, properties=dict(
+    job=relation( model.Job ),
+    history=relation( model.History )
+) )
+
+mapper( model.GenomeIndexToolData, model.GenomeIndexToolData.table, properties=dict(
+    job=relation( model.Job, backref='job' ),
+    dataset=relation( model.Dataset ),
+    user=relation( model.User ),
+    deferred=relation( model.DeferredJob, backref='deferred_job' ),
+    transfer=relation( model.TransferJob, backref='transfer_job' )
+) )
+
+mapper(model.PostJobAction, model.PostJobAction.table, properties=dict(
+    workflow_step=relation( model.WorkflowStep,
+        backref='post_job_actions',
+        primaryjoin=(model.WorkflowStep.table.c.id == model.PostJobAction.table.c.workflow_step_id ) )
+) )
+
+mapper( model.PostJobActionAssociation, model.PostJobActionAssociation.table, properties=dict(
+    job=relation( model.Job ),
+    post_job_action=relation( model.PostJobAction)
+) )
+
+mapper( model.Job, model.Job.table, properties=dict(
+    # user=relation( model.User.mapper ),
+    user=relation( model.User ),
+    galaxy_session=relation( model.GalaxySession ),
+    history=relation( model.History ),
+    library_folder=relation( model.LibraryFolder, lazy=True ),
+    parameters=relation( model.JobParameter, lazy=True ),
+    input_datasets=relation( model.JobToInputDatasetAssociation ),
+    output_datasets=relation( model.JobToOutputDatasetAssociation, lazy=True ),
+    output_dataset_collection_instances=relation( model.JobToOutputDatasetCollectionAssociation, lazy=True ),
+    output_dataset_collections=relation( model.JobToImplicitOutputDatasetCollectionAssociation, lazy=True ),
+    post_job_actions=relation( model.PostJobActionAssociation, lazy=False ),
+    input_library_datasets=relation( model.JobToInputLibraryDatasetAssociation ),
+    output_library_datasets=relation( model.JobToOutputLibraryDatasetAssociation, lazy=True ),
+    external_output_metadata=relation( model.JobExternalOutputMetadata, lazy=True ),
+    tasks=relation( model.Task )
+) )
+
+mapper( model.Task, model.Task.table, properties=dict(
+    job=relation( model.Job )
+) )
+
+mapper( model.DeferredJob, model.DeferredJob.table, properties={} )
+
+mapper( model.TransferJob, model.TransferJob.table, properties={} )
+
+
+simple_mapping( model.DatasetCollection,
+    elements=relation( model.DatasetCollectionElement,
+        primaryjoin=( model.DatasetCollection.table.c.id == model.DatasetCollectionElement.table.c.dataset_collection_id ),
+        remote_side=[ model.DatasetCollectionElement.table.c.dataset_collection_id ],
+        backref="collection",
+        order_by=model.DatasetCollectionElement.table.c.element_index )
+)
+
+simple_mapping( model.HistoryDatasetCollectionAssociation,
+    collection=relation( model.DatasetCollection ),
+    history=relation( model.History,
+        backref='dataset_collections' ),
+    copied_from_history_dataset_collection_association=relation( model.HistoryDatasetCollectionAssociation,
+        primaryjoin=( model.HistoryDatasetCollectionAssociation.table.c.copied_from_history_dataset_collection_association_id ==
+                      model.HistoryDatasetCollectionAssociation.table.c.id ),
+        remote_side=[model.HistoryDatasetCollectionAssociation.table.c.id],
+        uselist=False ),
+    copied_to_history_dataset_collection_associations=relation( model.HistoryDatasetCollectionAssociation,
+        primaryjoin=( model.HistoryDatasetCollectionAssociation.table.c.copied_from_history_dataset_collection_association_id ==
+                      model.HistoryDatasetCollectionAssociation.table.c.id ) ),
+    implicit_input_collections=relation( model.ImplicitlyCreatedDatasetCollectionInput,
+        primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.id ==
+                        model.ImplicitlyCreatedDatasetCollectionInput.table.c.dataset_collection_id ) ),
+        backref="dataset_collection",
+    ),
+    tags=relation( model.HistoryDatasetCollectionTagAssociation,
+        order_by=model.HistoryDatasetCollectionTagAssociation.table.c.id,
+        backref='dataset_collections' ),
+    annotations=relation( model.HistoryDatasetCollectionAnnotationAssociation,
+        order_by=model.HistoryDatasetCollectionAnnotationAssociation.table.c.id,
+        backref="dataset_collections" ),
+    ratings=relation( model.HistoryDatasetCollectionRatingAssociation,
+        order_by=model.HistoryDatasetCollectionRatingAssociation.table.c.id,
+        backref="dataset_collections" )
+)
+
+simple_mapping( model.LibraryDatasetCollectionAssociation,
+    collection=relation( model.DatasetCollection ),
+    folder=relation( model.LibraryFolder,
+        backref='dataset_collections' ),
+    tags=relation( model.LibraryDatasetCollectionTagAssociation,
+        order_by=model.LibraryDatasetCollectionTagAssociation.table.c.id,
+        backref='dataset_collections' ),
+    annotations=relation( model.LibraryDatasetCollectionAnnotationAssociation,
+        order_by=model.LibraryDatasetCollectionAnnotationAssociation.table.c.id,
+        backref="dataset_collections" ),
+    ratings=relation( model.LibraryDatasetCollectionRatingAssociation,
+        order_by=model.LibraryDatasetCollectionRatingAssociation.table.c.id,
+        backref="dataset_collections" ) )
+
+simple_mapping( model.DatasetCollectionElement,
+    hda=relation( model.HistoryDatasetAssociation,
+        primaryjoin=( model.DatasetCollectionElement.table.c.hda_id == model.HistoryDatasetAssociation.table.c.id ) ),
+    ldda=relation( model.LibraryDatasetDatasetAssociation,
+        primaryjoin=( model.DatasetCollectionElement.table.c.ldda_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
+    child_collection=relation( model.DatasetCollection,
+        primaryjoin=( model.DatasetCollectionElement.table.c.child_collection_id == model.DatasetCollection.table.c.id ) ) )
+
+mapper( model.Event, model.Event.table, properties=dict(
+    history=relation( model.History ),
+    galaxy_session=relation( model.GalaxySession ),
+    # user=relation( model.User.mapper ) ) )
+    user=relation( model.User )
+) )
+
+mapper( model.GalaxySession, model.GalaxySession.table, properties=dict(
+    histories=relation( model.GalaxySessionToHistoryAssociation ),
+    current_history=relation( model.History ),
+    # user=relation( model.User.mapper ) ) )
+    user=relation( model.User )
+) )
+
+mapper( model.GalaxySessionToHistoryAssociation, model.GalaxySessionToHistoryAssociation.table, properties=dict(
+    galaxy_session=relation( model.GalaxySession ),
+    history=relation( model.History )
+) )
+
+mapper( model.Workflow, model.Workflow.table, properties=dict(
+    steps=relation( model.WorkflowStep,
+        backref='workflow',
+        primaryjoin=( ( model.Workflow.table.c.id == model.WorkflowStep.table.c.workflow_id ) ),
+        order_by=asc( model.WorkflowStep.table.c.order_index ),
+        cascade="all, delete-orphan",
+        lazy=False )
+) )
+
+mapper( model.WorkflowStep, model.WorkflowStep.table, properties=dict(
+    subworkflow=relation( model.Workflow,
+        primaryjoin=( ( model.Workflow.table.c.id == model.WorkflowStep.table.c.subworkflow_id ) ),
+        backref="parent_workflow_steps"),
+    tags=relation( model.WorkflowStepTagAssociation,
+        order_by=model.WorkflowStepTagAssociation.table.c.id,
+        backref="workflow_steps" ),
+    annotations=relation( model.WorkflowStepAnnotationAssociation,
+        order_by=model.WorkflowStepAnnotationAssociation.table.c.id,
+        backref="workflow_steps" )
+) )
+
+mapper( model.WorkflowOutput, model.WorkflowOutput.table, properties=dict(
+    workflow_step=relation( model.WorkflowStep,
+        backref='workflow_outputs',
+        primaryjoin=( model.WorkflowStep.table.c.id == model.WorkflowOutput.table.c.workflow_step_id ) )
+) )
+
+mapper( model.WorkflowStepConnection, model.WorkflowStepConnection.table, properties=dict(
+    input_step=relation( model.WorkflowStep,
+        backref="input_connections",
+        cascade="all",
+        primaryjoin=( model.WorkflowStepConnection.table.c.input_step_id == model.WorkflowStep.table.c.id ) ),
+    input_subworkflow_step=relation( model.WorkflowStep,
+        backref=backref("parent_workflow_input_connections", uselist=True),
+        primaryjoin=( model.WorkflowStepConnection.table.c.input_subworkflow_step_id == model.WorkflowStep.table.c.id ),
+    ),
+    output_step=relation( model.WorkflowStep,
+        backref="output_connections",
+        cascade="all",
+        primaryjoin=( model.WorkflowStepConnection.table.c.output_step_id == model.WorkflowStep.table.c.id ) ),
+) )
+
+
+mapper( model.StoredWorkflow, model.StoredWorkflow.table, properties=dict(
+    user=relation( model.User,
+        primaryjoin=( model.User.table.c.id == model.StoredWorkflow.table.c.user_id ),
+        backref='stored_workflows' ),
+    workflows=relation( model.Workflow,
+        backref='stored_workflow',
+        cascade="all, delete-orphan",
+        primaryjoin=( model.StoredWorkflow.table.c.id == model.Workflow.table.c.stored_workflow_id ) ),
+    latest_workflow=relation( model.Workflow,
+        post_update=True,
+        primaryjoin=( model.StoredWorkflow.table.c.latest_workflow_id == model.Workflow.table.c.id ),
+        lazy=False ),
+    tags=relation( model.StoredWorkflowTagAssociation,
+        order_by=model.StoredWorkflowTagAssociation.table.c.id,
+        backref="stored_workflows" ),
+    owner_tags=relation( model.StoredWorkflowTagAssociation,
+        primaryjoin=(
+            and_( model.StoredWorkflow.table.c.id == model.StoredWorkflowTagAssociation.table.c.stored_workflow_id,
+                  model.StoredWorkflow.table.c.user_id == model.StoredWorkflowTagAssociation.table.c.user_id )
+        ),
+        order_by=model.StoredWorkflowTagAssociation.table.c.id ),
+    annotations=relation( model.StoredWorkflowAnnotationAssociation,
+        order_by=model.StoredWorkflowAnnotationAssociation.table.c.id,
+        backref="stored_workflows" ),
+    ratings=relation( model.StoredWorkflowRatingAssociation,
+        order_by=model.StoredWorkflowRatingAssociation.table.c.id,
+        backref="stored_workflows" )
+) )
+
+# Set up proxy so that
+#   StoredWorkflow.users_shared_with
+# returns a list of users that workflow is shared with.
+model.StoredWorkflow.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
+
+mapper( model.StoredWorkflowUserShareAssociation, model.StoredWorkflowUserShareAssociation.table, properties=dict(
+    user=relation( model.User,
+        backref='workflows_shared_by_others' ),
+    stored_workflow=relation( model.StoredWorkflow,
+        backref='users_shared_with' )
+) )
+
+mapper( model.StoredWorkflowMenuEntry, model.StoredWorkflowMenuEntry.table, properties=dict(
+    stored_workflow=relation( model.StoredWorkflow )
+) )
+
+mapper( model.WorkflowInvocation, model.WorkflowInvocation.table, properties=dict(
+    history=relation( model.History ),
+    input_parameters=relation( model.WorkflowRequestInputParameter ),
+    step_states=relation( model.WorkflowRequestStepState ),
+    input_step_parameters=relation( model.WorkflowRequestInputStepParmeter ),
+    input_datasets=relation( model.WorkflowRequestToInputDatasetAssociation ),
+    input_dataset_collections=relation( model.WorkflowRequestToInputDatasetCollectionAssociation ),
+    subworkflow_invocations=relation( model.WorkflowInvocationToSubworkflowInvocationAssociation,
+        primaryjoin=( ( model.WorkflowInvocationToSubworkflowInvocationAssociation.table.c.workflow_invocation_id == model.WorkflowInvocation.table.c.id ) ),
+        backref=backref("parent_workflow_invocation", uselist=False),
+        uselist=True,
+    ),
+    steps=relation( model.WorkflowInvocationStep,
+        backref='workflow_invocation' ),
+    workflow=relation( model.Workflow )
+) )
+
+mapper( model.WorkflowInvocationToSubworkflowInvocationAssociation, model.WorkflowInvocationToSubworkflowInvocationAssociation.table, properties=dict(
+    subworkflow_invocation=relation( model.WorkflowInvocation,
+        primaryjoin=( ( model.WorkflowInvocationToSubworkflowInvocationAssociation.table.c.subworkflow_invocation_id == model.WorkflowInvocation.table.c.id ) ),
+        backref="parent_workflow_invocation_association",
+        uselist=False,
+    ),
+    workflow_step=relation( model.WorkflowStep ),
+) )
+
+mapper( model.WorkflowInvocationStep, model.WorkflowInvocationStep.table, properties=dict(
+    workflow_step=relation( model.WorkflowStep ),
+    job=relation( model.Job,
+        backref=backref( 'workflow_invocation_step',
+            uselist=False ) )
+) )
+
+simple_mapping( model.WorkflowRequestInputParameter,
+    workflow_invocation=relation( model.WorkflowInvocation ) )
+
+simple_mapping( model.WorkflowRequestStepState,
+    workflow_invocation=relation( model.WorkflowInvocation ),
+    workflow_step=relation( model.WorkflowStep ) )
+
+simple_mapping( model.WorkflowRequestInputStepParmeter,
+    workflow_invocation=relation( model.WorkflowInvocation ),
+    workflow_step=relation( model.WorkflowStep ) )
+
+simple_mapping( model.WorkflowRequestToInputDatasetAssociation,
+    workflow_invocation=relation( model.WorkflowInvocation ),
+    workflow_step=relation( model.WorkflowStep ),
+    dataset=relation( model.HistoryDatasetAssociation ) )
+
+
+simple_mapping( model.WorkflowRequestToInputDatasetCollectionAssociation,
+    workflow_invocation=relation( model.WorkflowInvocation ),
+    workflow_step=relation( model.WorkflowStep ),
+    dataset_collection=relation( model.HistoryDatasetCollectionAssociation ) )
+
+
+mapper( model.MetadataFile, model.MetadataFile.table, properties=dict(
+    history_dataset=relation( model.HistoryDatasetAssociation ),
+    library_dataset=relation( model.LibraryDatasetDatasetAssociation )
+) )
+
+mapper( model.PageRevision, model.PageRevision.table )
+
+mapper( model.Page, model.Page.table, properties=dict(
+    user=relation( model.User ),
+    revisions=relation( model.PageRevision,
+        backref='page',
+        cascade="all, delete-orphan",
+        primaryjoin=( model.Page.table.c.id == model.PageRevision.table.c.page_id ) ),
+    latest_revision=relation( model.PageRevision,
+        post_update=True,
+        primaryjoin=( model.Page.table.c.latest_revision_id == model.PageRevision.table.c.id ),
+        lazy=False ),
+    tags=relation(model.PageTagAssociation,
+        order_by=model.PageTagAssociation.table.c.id,
+        backref="pages" ),
+    annotations=relation( model.PageAnnotationAssociation,
+        order_by=model.PageAnnotationAssociation.table.c.id,
+        backref="pages" ),
+    ratings=relation( model.PageRatingAssociation,
+        order_by=model.PageRatingAssociation.table.c.id,
+        backref="pages" )
+) )
+
+# Set up proxy so that
+#   Page.users_shared_with
+# returns a list of users that page is shared with.
+model.Page.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
+
+mapper( model.PageUserShareAssociation, model.PageUserShareAssociation.table,
+        properties=dict( user=relation( model.User, backref='pages_shared_by_others' ),
+                         page=relation( model.Page, backref='users_shared_with' ) ) )
+
+mapper( model.VisualizationRevision, model.VisualizationRevision.table )
+
+mapper( model.Visualization, model.Visualization.table, properties=dict(
+    user=relation( model.User ),
+    revisions=relation( model.VisualizationRevision,
+        backref='visualization',
+        cascade="all, delete-orphan",
+        primaryjoin=( model.Visualization.table.c.id == model.VisualizationRevision.table.c.visualization_id ) ),
+    latest_revision=relation( model.VisualizationRevision,
+        post_update=True,
+        primaryjoin=( model.Visualization.table.c.latest_revision_id == model.VisualizationRevision.table.c.id ),
+        lazy=False ),
+    tags=relation( model.VisualizationTagAssociation,
+        order_by=model.VisualizationTagAssociation.table.c.id,
+        backref="visualizations" ),
+    annotations=relation( model.VisualizationAnnotationAssociation,
+        order_by=model.VisualizationAnnotationAssociation.table.c.id,
+        backref="visualizations" ),
+    ratings=relation( model.VisualizationRatingAssociation,
+        order_by=model.VisualizationRatingAssociation.table.c.id,
+        backref="visualizations" )
+) )
+
+# Set up proxy so that
+#   Visualization.users_shared_with
+# returns a list of users that visualization is shared with.
+model.Visualization.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
+
+mapper( model.VisualizationUserShareAssociation, model.VisualizationUserShareAssociation.table, properties=dict(
+    user=relation( model.User,
+        backref='visualizations_shared_by_others' ),
+    visualization=relation( model.Visualization,
+        backref='users_shared_with' )
+) )
+
+# Tag tables.
+simple_mapping( model.Tag,
+    children=relation( model.Tag, backref=backref( 'parent', remote_side=[model.Tag.table.c.id] ) ) )
+
+
+def tag_mapping( tag_association_class, backref_name ):
+    simple_mapping( tag_association_class, tag=relation( model.Tag, backref=backref_name), user=relation( model.User ) )
+
+
+tag_mapping( model.HistoryTagAssociation, "tagged_histories" )
+tag_mapping( model.DatasetTagAssociation, "tagged_datasets" )
+tag_mapping( model.HistoryDatasetAssociationTagAssociation, "tagged_history_dataset_associations" )
+tag_mapping( model.PageTagAssociation, "tagged_pages" )
+tag_mapping( model.StoredWorkflowTagAssociation, "tagged_workflows" )
+tag_mapping( model.WorkflowStepTagAssociation, "tagged_workflow_steps" )
+tag_mapping( model.VisualizationTagAssociation, "tagged_visualizations" )
+tag_mapping( model.HistoryDatasetCollectionTagAssociation, "tagged_history_dataset_collections" )
+tag_mapping( model.LibraryDatasetCollectionTagAssociation, "tagged_library_dataset_collections" )
+tag_mapping( model.ToolTagAssociation, "tagged_tools" )
+
+
+# Annotation tables.
+def annotation_mapping( annotation_class, **kwds ):
+    kwds = dict( (key, relation( value ) ) for key, value in kwds.items() )
+    simple_mapping( annotation_class, **dict(user=relation( model.User ), **kwds ) )
+
+
+annotation_mapping( model.HistoryAnnotationAssociation, history=model.History )
+annotation_mapping( model.HistoryDatasetAssociationAnnotationAssociation, hda=model.HistoryDatasetAssociation )
+annotation_mapping( model.StoredWorkflowAnnotationAssociation, stored_workflow=model.StoredWorkflow )
+annotation_mapping( model.WorkflowStepAnnotationAssociation, workflow_step=model.WorkflowStep )
+annotation_mapping( model.PageAnnotationAssociation, page=model.Page )
+annotation_mapping( model.VisualizationAnnotationAssociation, visualization=model.Visualization )
+annotation_mapping( model.HistoryDatasetCollectionAnnotationAssociation,
+    history_dataset_collection=model.HistoryDatasetCollectionAssociation )
+annotation_mapping( model.LibraryDatasetCollectionAnnotationAssociation,
+    library_dataset_collection=model.LibraryDatasetCollectionAssociation )
+
+
+# Rating tables.
+def rating_mapping( rating_class, **kwds ):
+    kwds = dict( (key, relation( value ) ) for key, value in kwds.items() )
+    simple_mapping( rating_class, **dict(user=relation( model.User ), **kwds ) )
+
+
+rating_mapping( model.HistoryRatingAssociation, history=model.History )
+rating_mapping( model.HistoryDatasetAssociationRatingAssociation, hda=model.HistoryDatasetAssociation )
+rating_mapping( model.StoredWorkflowRatingAssociation, stored_workflow=model.StoredWorkflow )
+rating_mapping( model.PageRatingAssociation, page=model.Page )
+rating_mapping( model.VisualizationRatingAssociation, visualizaiton=model.Visualization )
+rating_mapping( model.HistoryDatasetCollectionRatingAssociation,
+    history_dataset_collection=model.HistoryDatasetCollectionAssociation )
+rating_mapping( model.LibraryDatasetCollectionRatingAssociation,
+    libary_dataset_collection=model.LibraryDatasetCollectionAssociation )
+
+# Data Manager tables
+mapper( model.DataManagerHistoryAssociation, model.DataManagerHistoryAssociation.table, properties=dict(
+    history=relation( model.History ),
+    user=relation( model.User,
+        backref='data_manager_histories' )
+) )
+
+mapper( model.DataManagerJobAssociation, model.DataManagerJobAssociation.table, properties=dict(
+    job=relation( model.Job,
+        backref=backref( 'data_manager_association', uselist=False ),
+        uselist=False )
+) )
+
+# User tables.
+mapper( model.UserPreference, model.UserPreference.table, properties={} )
+mapper( model.UserAction, model.UserAction.table, properties=dict(
+    # user=relation( model.User.mapper )
+    user=relation( model.User )
+) )
+mapper( model.APIKeys, model.APIKeys.table, properties={} )
+
+# model.HistoryDatasetAssociation.mapper.add_property( "creating_job_associations",
+#     relation( model.JobToOutputDatasetAssociation ) )
+# model.LibraryDatasetDatasetAssociation.mapper.add_property( "creating_job_associations",
+#     relation( model.JobToOutputLibraryDatasetAssociation ) )
+class_mapper( model.HistoryDatasetAssociation ).add_property(
+    "creating_job_associations", relation( model.JobToOutputDatasetAssociation ) )
+class_mapper( model.LibraryDatasetDatasetAssociation ).add_property(
+    "creating_job_associations", relation( model.JobToOutputLibraryDatasetAssociation ) )
+class_mapper( model.HistoryDatasetCollectionAssociation ).add_property(
+    "creating_job_associations", relation( model.JobToOutputDatasetCollectionAssociation ) )
+
+
+# Helper methods.
+def db_next_hid( self, n=1 ):
+    """
+    db_next_hid( self )
+
+    Override __next_hid to generate from the database in a concurrency safe way.
+    Loads the next history ID from the DB and returns it.
+    It also saves the future next_id into the DB.
+
+    :rtype:     int
+    :returns:   the next history id
+    """
+    conn = object_session( self ).connection()
+    table = self.table
+    trans = conn.begin()
+    try:
+        next_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
+        table.update( table.c.id == self.id ).execute( hid_counter=( next_hid + n ) )
+        trans.commit()
+        return next_hid
+    except:
+        trans.rollback()
+        raise
+
+
+model.History._next_hid = db_next_hid
+
+
+def _workflow_invocation_update( self ):
+    conn = object_session( self ).connection()
+    table = self.table
+    now_val = now()
+    stmt = table.update().values(update_time=now_val).where(and_(table.c.id == self.id, table.c.update_time < now_val))
+    conn.execute(stmt)
+
+
+model.WorkflowInvocation.update = _workflow_invocation_update
+
+
+def init( file_path, url, engine_options={}, create_tables=False, map_install_models=False,
+        database_query_profiling_proxy=False, object_store=None, trace_logger=None, use_pbkdf2=True ):
+    """Connect mappings to the database"""
+    # Connect dataset to the file path
+    model.Dataset.file_path = file_path
+    # Connect dataset to object store
+    model.Dataset.object_store = object_store
+    # Use PBKDF2 password hashing?
+    model.User.use_pbkdf2 = use_pbkdf2
+    # Load the appropriate db module
+    engine = build_engine( url, engine_options, database_query_profiling_proxy, trace_logger )
+
+    # Connect the metadata to the database.
+    metadata.bind = engine
+
+    model_modules = [model]
+    if map_install_models:
+        import galaxy.model.tool_shed_install.mapping  # noqa: F401
+        from galaxy.model import tool_shed_install
+        model_modules.append(tool_shed_install)
+
+    result = ModelMapping(model_modules, engine=engine)
+
+    # Create tables if needed
+    if create_tables:
+        metadata.create_all()
+        # metadata.engine.commit()
+
+    result.create_tables = create_tables
+    # load local galaxy security policy
+    result.security_agent = GalaxyRBACAgent( result )
+    return result
diff --git a/lib/galaxy/model/metadata.py b/lib/galaxy/model/metadata.py
new file mode 100644
index 0000000..932020b
--- /dev/null
+++ b/lib/galaxy/model/metadata.py
@@ -0,0 +1,895 @@
+"""
+Galaxy Metadata
+
+"""
+
+import copy
+import json
+import logging
+import os
+import shutil
+import sys
+import tempfile
+import weakref
+from os.path import abspath
+
+from six import string_types
+from six.moves import cPickle
+from sqlalchemy.orm import object_session
+
+import galaxy.model
+from galaxy.util import (in_directory, listify, string_as_bool,
+                         stringify_dictionary_keys)
+from galaxy.util.json import safe_dumps
+from galaxy.util.object_wrapper import sanitize_lists_to_string
+from galaxy.util.odict import odict
+from galaxy.web import form_builder
+
+log = logging.getLogger(__name__)
+
+STATEMENTS = "__galaxy_statements__"  # this is the name of the property in a Datatype class where new metadata spec element Statements are stored
+
+
+class Statement( object ):
+    """
+    This class inserts its target into a list in the surrounding
+    class.  the data.Data class has a metaclass which executes these
+    statements.  This is how we shove the metadata element spec into
+    the class.
+    """
+    def __init__( self, target ):
+        self.target = target
+
+    def __call__( self, *args, **kwargs ):
+        # get the locals dictionary of the frame object one down in the call stack (i.e. the Datatype class calling MetadataElement)
+        class_locals = sys._getframe( 1 ).f_locals
+        # get and set '__galaxy_statments__' to an empty list if not in locals dict
+        statements = class_locals.setdefault( STATEMENTS, [] )
+        # add Statement containing info to populate a MetadataElementSpec
+        statements.append( ( self, args, kwargs ) )
+
+    @classmethod
+    def process( cls, element ):
+        for statement, args, kwargs in getattr( element, STATEMENTS, [] ):
+            statement.target( element, *args, **kwargs )  # statement.target is MetadataElementSpec, element is a Datatype class
+
+
+class MetadataCollection( object ):
+    """
+    MetadataCollection is not a collection at all, but rather a proxy
+    to the real metadata which is stored as a Dictionary. This class
+    handles processing the metadata elements when they are set and
+    retrieved, returning default values in cases when metadata is not set.
+    """
+    def __init__(self, parent ):
+        self.parent = parent
+        # initialize dict if needed
+        if self.parent._metadata is None:
+            self.parent._metadata = {}
+
+    def get_parent( self ):
+        if "_parent" in self.__dict__:
+            return self.__dict__["_parent"]()
+        return None
+
+    def set_parent( self, parent ):
+        # use weakref to prevent a circular reference interfering with garbage
+        # collection: hda/lda (parent) <--> MetadataCollection (self) ; needs to be
+        # hashable, so cannot use proxy.
+        self.__dict__["_parent"] = weakref.ref( parent )
+    parent = property( get_parent, set_parent )
+
+    @property
+    def spec( self ):
+        return self.parent.datatype.metadata_spec
+
+    def __iter__( self ):
+        return self.parent._metadata.__iter__()
+
+    def get( self, key, default=None ):
+        try:
+            return self.__getattr__( key ) or default
+        except:
+            return default
+
+    def items(self):
+        return iter( [ ( k, self.get( k ) ) for k in self.spec.keys() ] )
+
+    def __str__(self):
+        return dict( self.items() ).__str__()
+
+    def __bool__( self ):
+        return bool( self.parent._metadata )
+    __nonzero__ = __bool__
+
+    def __getattr__( self, name ):
+        if name in self.spec:
+            if name in self.parent._metadata:
+                return self.spec[name].wrap( self.parent._metadata[name], object_session( self.parent ) )
+            return self.spec[name].wrap( self.spec[name].default, object_session( self.parent ) )
+        if name in self.parent._metadata:
+            return self.parent._metadata[name]
+
+    def __setattr__( self, name, value ):
+        if name == "parent":
+            return self.set_parent( value )
+        else:
+            if name in self.spec:
+                self.parent._metadata[name] = self.spec[name].unwrap( value )
+            else:
+                self.parent._metadata[name] = value
+
+    def remove_key( self, name ):
+        if name in self.parent._metadata:
+            del self.parent._metadata[name]
+        else:
+            log.info( "Attempted to delete invalid key '%s' from MetadataCollection" % name )
+
+    def element_is_set( self, name ):
+        return bool( self.parent._metadata.get( name, False ) )
+
+    def get_html_by_name( self, name, **kwd ):
+        if name in self.spec:
+            rval = self.spec[name].param.get_html( value=getattr( self, name ), context=self, **kwd )
+            if rval is None:
+                return self.spec[name].no_value
+            return rval
+
+    def make_dict_copy( self, to_copy ):
+        """Makes a deep copy of input iterable to_copy according to self.spec"""
+        rval = {}
+        for key, value in to_copy.items():
+            if key in self.spec:
+                rval[key] = self.spec[key].param.make_copy( value, target_context=self, source_context=to_copy )
+        return rval
+
+    def from_JSON_dict( self, filename=None, path_rewriter=None, json_dict=None ):
+        dataset = self.parent
+        if filename is not None:
+            log.debug( 'loading metadata from file for: %s %s' % ( dataset.__class__.__name__, dataset.id ) )
+            JSONified_dict = json.load( open( filename ) )
+        elif json_dict is not None:
+            log.debug( 'loading metadata from dict for: %s %s' % ( dataset.__class__.__name__, dataset.id ) )
+            if isinstance( json_dict, string_types ):
+                JSONified_dict = json.loads( json_dict )
+            elif isinstance( json_dict, dict ):
+                JSONified_dict = json_dict
+            else:
+                raise ValueError( "json_dict must be either a dictionary or a string, got %s." % ( type( json_dict ) ) )
+        else:
+            raise ValueError( "You must provide either a filename or a json_dict" )
+        for name, spec in self.spec.items():
+            if name in JSONified_dict:
+                from_ext_kwds = {}
+                external_value = JSONified_dict[ name ]
+                param = spec.param
+                if isinstance( param, FileParameter ):
+                    from_ext_kwds[ 'path_rewriter' ] = path_rewriter
+                dataset._metadata[ name ] = param.from_external_value( external_value, dataset, **from_ext_kwds )
+            elif name in dataset._metadata:
+                # if the metadata value is not found in our externally set metadata but it has a value in the 'old'
+                # metadata associated with our dataset, we'll delete it from our dataset's metadata dict
+                del dataset._metadata[ name ]
+        if '__extension__' in JSONified_dict:
+            dataset.extension = JSONified_dict['__extension__']
+
+    def to_JSON_dict( self, filename=None ):
+        # galaxy.model.customtypes.json_encoder.encode()
+        meta_dict = {}
+        dataset_meta_dict = self.parent._metadata
+        for name, spec in self.spec.items():
+            if name in dataset_meta_dict:
+                meta_dict[ name ] = spec.param.to_external_value( dataset_meta_dict[ name ] )
+        if '__extension__' in dataset_meta_dict:
+            meta_dict[ '__extension__' ] = dataset_meta_dict['__extension__']
+        if filename is None:
+            return json.dumps( meta_dict )
+        json.dump( meta_dict, open( filename, 'wb+' ) )
+
+    def __getstate__( self ):
+        # cannot pickle a weakref item (self._parent), when
+        # data._metadata_collection is None, it will be recreated on demand
+        return None
+
+
+class MetadataSpecCollection( odict ):
+    """
+    A simple extension of dict which allows cleaner access to items
+    and allows the values to be iterated over directly as if it were a
+    list.  append() is also implemented for simplicity and does not
+    "append".
+    """
+    def __init__( self, dict=None ):
+        odict.__init__( self, dict=None )
+
+    def append( self, item ):
+        self[item.name] = item
+
+    def iter( self ):
+        return iter(self.values())
+
+    def __getattr__( self, name ):
+        return self.get( name )
+
+    def __repr__( self ):
+        # force elements to draw with __str__ for sphinx-apidoc
+        return ', '.join([ item.__str__() for item in self.iter() ])
+
+
+class MetadataParameter( object ):
+    def __init__( self, spec ):
+        self.spec = spec
+
+    def get_html_field( self, value=None, context=None, other_values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+        return form_builder.TextField( self.spec.name, value=value )
+
+    def get_html( self, value, context=None, other_values=None, **kwd ):
+        """
+        The "context" is simply the metadata collection/bunch holding
+        this piece of metadata. This is passed in to allow for
+        metadata to validate against each other (note: this could turn
+        into a huge, recursive mess if not done with care). For
+        example, a column assignment should validate against the
+        number of columns in the dataset.
+        """
+        context = context or {}
+        other_values = other_values or {}
+
+        if self.spec.get("readonly"):
+            return value
+        if self.spec.get("optional"):
+            checked = False
+            if value:
+                checked = "true"
+            checkbox = form_builder.CheckboxField( "is_" + self.spec.name, checked=checked )
+            return checkbox.get_html() + self.get_html_field( value=value, context=context, other_values=other_values, **kwd ).get_html()
+        else:
+            return self.get_html_field( value=value, context=context, other_values=other_values, **kwd ).get_html()
+
+    def to_string( self, value ):
+        return str( value )
+
+    def to_safe_string( self, value ):
+        return sanitize_lists_to_string( self.to_string( value ) )
+
+    def make_copy( self, value, target_context=None, source_context=None ):
+        return copy.deepcopy( value )
+
+    @classmethod
+    def marshal( cls, value ):
+        """
+        This method should/can be overridden to convert the incoming
+        value to whatever type it is supposed to be.
+        """
+        return value
+
+    def validate( self, value ):
+        """
+        Throw an exception if the value is invalid.
+        """
+        pass
+
+    def unwrap( self, form_value ):
+        """
+        Turns a value into its storable form.
+        """
+        value = self.marshal( form_value )
+        self.validate( value )
+        return value
+
+    def wrap( self, value, session ):
+        """
+        Turns a value into its usable form.
+        """
+        return value
+
+    def from_external_value( self, value, parent ):
+        """
+        Turns a value read from an external dict into its value to be pushed directly into the metadata dict.
+        """
+        return value
+
+    def to_external_value( self, value ):
+        """
+        Turns a value read from a metadata into its value to be pushed directly into the external dict.
+        """
+        return value
+
+
+class MetadataElementSpec( object ):
+    """
+    Defines a metadata element and adds it to the metadata_spec (which
+    is a MetadataSpecCollection) of datatype.
+    """
+    def __init__( self, datatype, name=None, desc=None,
+                  param=MetadataParameter, default=None, no_value=None,
+                  visible=True, set_in_upload=False, **kwargs ):
+        self.name = name
+        self.desc = desc or name
+        self.default = default
+        self.no_value = no_value
+        self.visible = visible
+        self.set_in_upload = set_in_upload
+        # Catch-all, allows for extra attributes to be set
+        self.__dict__.update(kwargs)
+        # set up param last, as it uses values set above
+        self.param = param( self )
+        # add spec element to the spec
+        datatype.metadata_spec.append( self )
+
+    def get( self, name, default=None ):
+        return self.__dict__.get(name, default)
+
+    def wrap( self, value, session ):
+        """
+        Turns a stored value into its usable form.
+        """
+        return self.param.wrap( value, session )
+
+    def unwrap( self, value ):
+        """
+        Turns an incoming value into its storable form.
+        """
+        return self.param.unwrap( value )
+
+    def __str__( self ):
+        # TODO??: assuming param is the class of this MetadataElementSpec - add the plain class name for that
+        spec_dict = dict( param_class=self.param.__class__.__name__ )
+        spec_dict.update( self.__dict__ )
+        return ( "{name} ({param_class}): {desc}, defaults to '{default}'".format( **spec_dict ) )
+
+
+# create a statement class that, when called,
+#   will add a new MetadataElementSpec to a class's metadata_spec
+MetadataElement = Statement( MetadataElementSpec )
+
+
+"""
+MetadataParameter sub-classes.
+"""
+
+
+class SelectParameter( MetadataParameter ):
+    def __init__( self, spec ):
+        MetadataParameter.__init__( self, spec )
+        self.values = self.spec.get( "values" )
+        self.multiple = string_as_bool( self.spec.get( "multiple" ) )
+
+    def to_string( self, value ):
+        if value in [ None, [] ]:
+            return str( self.spec.no_value )
+        if not isinstance( value, list ):
+            value = [value]
+        return ",".join( map( str, value ) )
+
+    def get_html_field( self, value=None, context=None, other_values=None, values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+
+        field = form_builder.SelectField( self.spec.name, multiple=self.multiple, display=self.spec.get("display") )
+        if self.values:
+            value_list = self.values
+        elif values:
+            value_list = values
+        elif value:
+            value_list = [ ( v, v ) for v in listify( value )]
+        else:
+            value_list = []
+        for val, label in value_list:
+            try:
+                if ( self.multiple and val in value ) or ( not self.multiple and val == value ):
+                    field.add_option( label, val, selected=True )
+                else:
+                    field.add_option( label, val, selected=False )
+            except TypeError:
+                field.add_option( val, label, selected=False )
+        return field
+
+    def get_html( self, value, context=None, other_values=None, values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+
+        if self.spec.get("readonly"):
+            if value in [ None, [] ]:
+                return str( self.spec.no_value )
+            return ", ".join( map( str, value ) )
+        return MetadataParameter.get_html( self, value, context=context, other_values=other_values, values=values, **kwd )
+
+    def wrap( self, value, session ):
+        # do we really need this (wasteful)? - yes because we are not sure that
+        # all existing selects have been stored previously as lists. Also this
+        # will handle the case where defaults/no_values are specified and are
+        # single non-list values.
+        value = self.marshal( value )
+        if self.multiple:
+            return value
+        elif value:
+            return value[0]  # single select, only return the first value
+        return None
+
+    @classmethod
+    def marshal( cls, value ):
+        # Store select as list, even if single item
+        if value is None:
+            return []
+        if not isinstance( value, list ):
+            return [value]
+        return value
+
+
+class DBKeyParameter( SelectParameter ):
+
+    def get_html_field( self, value=None, context=None, other_values=None, values=None, **kwd):
+        context = context or {}
+        other_values = other_values or {}
+        try:
+            values = kwd['trans'].app.genome_builds.get_genome_build_names( kwd['trans'] )
+        except KeyError:
+            pass
+        return super(DBKeyParameter, self).get_html_field( value, context, other_values, values, **kwd)
+
+    def get_html( self, value=None, context=None, other_values=None, values=None, **kwd):
+        context = context or {}
+        other_values = other_values or {}
+        try:
+            values = kwd['trans'].app.genome_builds.get_genome_build_names( kwd['trans'] )
+        except KeyError:
+            pass
+        return super(DBKeyParameter, self).get_html( value, context, other_values, values, **kwd)
+
+
+class RangeParameter( SelectParameter ):
+
+    def __init__( self, spec ):
+        SelectParameter.__init__( self, spec )
+        # The spec must be set with min and max values
+        self.min = spec.get( "min" ) or 1
+        self.max = spec.get( "max" ) or 1
+        self.step = self.spec.get( "step" ) or 1
+
+    def get_html_field( self, value=None, context=None, other_values=None, values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+
+        if values is None:
+            values = list(zip( range( self.min, self.max, self.step ), range( self.min, self.max, self.step ) ))
+        return SelectParameter.get_html_field( self, value=value, context=context, other_values=other_values, values=values, **kwd )
+
+    def get_html( self, value, context=None, other_values=None, values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+
+        if values is None:
+            values = list(zip( range( self.min, self.max, self.step ), range( self.min, self.max, self.step ) ))
+        return SelectParameter.get_html( self, value, context=context, other_values=other_values, values=values, **kwd )
+
+    @classmethod
+    def marshal( cls, value ):
+        value = SelectParameter.marshal( value )
+        values = [ int(x) for x in value ]
+        return values
+
+
+class ColumnParameter( RangeParameter ):
+
+    def get_html_field( self, value=None, context=None, other_values=None, values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+
+        if values is None and context:
+            column_range = range( 1, ( context.columns or 0 ) + 1, 1 )
+            values = list(zip( column_range, column_range ))
+        return RangeParameter.get_html_field( self, value=value, context=context, other_values=other_values, values=values, **kwd )
+
+    def get_html( self, value, context=None, other_values=None, values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+
+        if values is None and context:
+            column_range = range( 1, ( context.columns or 0 ) + 1, 1 )
+            values = list(zip( column_range, column_range ))
+        return RangeParameter.get_html( self, value, context=context, other_values=other_values, values=values, **kwd )
+
+
+class ColumnTypesParameter( MetadataParameter ):
+
+    def to_string( self, value ):
+        return ",".join( map( str, value ) )
+
+
+class ListParameter( MetadataParameter ):
+
+    def to_string( self, value ):
+        return ",".join( [str(x) for x in value] )
+
+
+class DictParameter( MetadataParameter ):
+
+    def to_string( self, value ):
+        return json.dumps( value )
+
+    def to_safe_string( self, value ):
+        # We do not sanitize json dicts
+        return safe_dumps( value )
+
+
+class PythonObjectParameter( MetadataParameter ):
+
+    def to_string( self, value ):
+        if not value:
+            return self.spec._to_string( self.spec.no_value )
+        return self.spec._to_string( value )
+
+    def get_html_field( self, value=None, context=None, other_values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+        return form_builder.TextField( self.spec.name, value=self._to_string( value ) )
+
+    def get_html( self, value=None, context=None, other_values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+        return str( self )
+
+    @classmethod
+    def marshal( cls, value ):
+        return value
+
+
+class FileParameter( MetadataParameter ):
+
+    def to_string( self, value ):
+        if not value:
+            return str( self.spec.no_value )
+        return value.file_name
+
+    def to_safe_string( self, value ):
+        # We do not sanitize file names
+        return self.to_string( value )
+
+    def get_html_field( self, value=None, context=None, other_values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+        return form_builder.TextField( self.spec.name, value=str( value.id ) )
+
+    def get_html( self, value=None, context=None, other_values=None, **kwd ):
+        context = context or {}
+        other_values = other_values or {}
+        return "<div>No display available for Metadata Files</div>"
+
+    def wrap( self, value, session ):
+        if value is None:
+            return None
+        if isinstance( value, galaxy.model.MetadataFile ) or isinstance( value, MetadataTempFile ):
+            return value
+        mf = session.query( galaxy.model.MetadataFile ).get( value )
+        return mf
+
+    def make_copy( self, value, target_context, source_context ):
+        value = self.wrap( value, object_session( target_context.parent ) )
+        if value:
+            new_value = galaxy.model.MetadataFile( dataset=target_context.parent, name=self.spec.name )
+            object_session( target_context.parent ).add( new_value )
+            object_session( target_context.parent ).flush()
+            shutil.copy( value.file_name, new_value.file_name )
+            return self.unwrap( new_value )
+        return None
+
+    @classmethod
+    def marshal( cls, value ):
+        if isinstance( value, galaxy.model.MetadataFile ):
+            value = value.id
+        return value
+
+    def from_external_value( self, value, parent, path_rewriter=None ):
+        """
+        Turns a value read from a external dict into its value to be pushed directly into the metadata dict.
+        """
+        if MetadataTempFile.is_JSONified_value( value ):
+            value = MetadataTempFile.from_JSON( value )
+        if isinstance( value, MetadataTempFile ):
+            mf = parent.metadata.get( self.spec.name, None)
+            if mf is None:
+                mf = self.new_file( dataset=parent, **value.kwds )
+            # Ensure the metadata file gets updated with content
+            file_name = value.file_name
+            if path_rewriter:
+                # Job may have run with a different (non-local) tmp/working
+                # directory. Correct.
+                file_name = path_rewriter( file_name )
+            parent.dataset.object_store.update_from_file( mf,
+                                                          file_name=file_name,
+                                                          extra_dir='_metadata_files',
+                                                          extra_dir_at_root=True,
+                                                          alt_name=os.path.basename(mf.file_name) )
+            os.unlink( file_name )
+            value = mf.id
+        return value
+
+    def to_external_value( self, value ):
+        """
+        Turns a value read from a metadata into its value to be pushed directly into the external dict.
+        """
+        if isinstance( value, galaxy.model.MetadataFile ):
+            value = value.id
+        elif isinstance( value, MetadataTempFile ):
+            value = MetadataTempFile.to_JSON( value )
+        return value
+
+    def new_file( self, dataset=None, **kwds ):
+        if object_session( dataset ):
+            mf = galaxy.model.MetadataFile( name=self.spec.name, dataset=dataset, **kwds )
+            object_session( dataset ).add( mf )
+            object_session( dataset ).flush()  # flush to assign id
+            return mf
+        else:
+            # we need to make a tmp file that is accessable to the head node,
+            # we will be copying its contents into the MetadataFile objects filename after restoring from JSON
+            # we do not include 'dataset' in the kwds passed, as from_JSON_value() will handle this for us
+            return MetadataTempFile( **kwds )
+
+
+# This class is used when a database file connection is not available
+class MetadataTempFile( object ):
+    tmp_dir = 'database/tmp'  # this should be overwritten as necessary in calling scripts
+
+    def __init__( self, **kwds ):
+        self.kwds = kwds
+        self._filename = None
+
+    @property
+    def file_name( self ):
+        if self._filename is None:
+            # we need to create a tmp file, accessable across all nodes/heads, save the name, and return it
+            self._filename = abspath( tempfile.NamedTemporaryFile( dir=self.tmp_dir, prefix="metadata_temp_file_" ).name )
+            open( self._filename, 'wb+' )  # create an empty file, so it can't be reused using tempfile
+        return self._filename
+
+    def to_JSON( self ):
+        return { '__class__': self.__class__.__name__,
+                 'filename': self.file_name,
+                 'kwds': self.kwds }
+
+    @classmethod
+    def from_JSON( cls, json_dict ):
+        # need to ensure our keywords are not unicode
+        rval = cls( **stringify_dictionary_keys( json_dict['kwds'] ) )
+        rval._filename = json_dict['filename']
+        return rval
+
+    @classmethod
+    def is_JSONified_value( cls, value ):
+        return ( isinstance( value, dict ) and value.get( '__class__', None ) == cls.__name__ )
+
+    @classmethod
+    def cleanup_from_JSON_dict_filename( cls, filename ):
+        try:
+            for key, value in json.load( open( filename ) ).items():
+                if cls.is_JSONified_value( value ):
+                    value = cls.from_JSON( value )
+                if isinstance( value, cls ) and os.path.exists( value.file_name ):
+                    log.debug( 'Cleaning up abandoned MetadataTempFile file: %s' % value.file_name )
+                    os.unlink( value.file_name )
+        except Exception as e:
+            log.debug( 'Failed to cleanup MetadataTempFile temp files from %s: %s' % ( filename, e ) )
+
+
+class JobExternalOutputMetadataWrapper( object ):
+    """
+    Class with methods allowing set_meta() to be called externally to the
+    Galaxy head.
+    This class allows access to external metadata filenames for all outputs
+    associated with a job.
+    We will use JSON as the medium of exchange of information, except for the
+    DatasetInstance object which will use pickle (in the future this could be
+    JSONified as well)
+    """
+
+    def __init__( self, job ):
+        self.job_id = job.id
+
+    def get_output_filenames_by_dataset( self, dataset, sa_session ):
+        if isinstance( dataset, galaxy.model.HistoryDatasetAssociation ):
+            return sa_session.query( galaxy.model.JobExternalOutputMetadata ) \
+                             .filter_by( job_id=self.job_id,
+                                         history_dataset_association_id=dataset.id,
+                                         is_valid=True ) \
+                             .first()  # there should only be one or None
+        elif isinstance( dataset, galaxy.model.LibraryDatasetDatasetAssociation ):
+            return sa_session.query( galaxy.model.JobExternalOutputMetadata ) \
+                             .filter_by( job_id=self.job_id,
+                                         library_dataset_dataset_association_id=dataset.id,
+                                         is_valid=True ) \
+                             .first()  # there should only be one or None
+        return None
+
+    def get_dataset_metadata_key( self, dataset ):
+        # Set meta can be called on library items and history items,
+        # need to make different keys for them, since ids can overlap
+        return "%s_%d" % ( dataset.__class__.__name__, dataset.id )
+
+    def invalidate_external_metadata( self, datasets, sa_session ):
+        for dataset in datasets:
+            jeom = self.get_output_filenames_by_dataset( dataset, sa_session )
+            # shouldn't be more than one valid, but you never know
+            while jeom:
+                jeom.is_valid = False
+                sa_session.add( jeom )
+                sa_session.flush()
+                jeom = self.get_output_filenames_by_dataset( dataset, sa_session )
+
+    def setup_external_metadata( self, datasets, sa_session, exec_dir=None,
+                                 tmp_dir=None, dataset_files_path=None,
+                                 output_fnames=None, config_root=None,
+                                 config_file=None, datatypes_config=None,
+                                 job_metadata=None, compute_tmp_dir=None,
+                                 include_command=True, max_metadata_value_size=0,
+                                 kwds=None):
+        kwds = kwds or {}
+        if tmp_dir is None:
+            tmp_dir = MetadataTempFile.tmp_dir
+        else:
+            MetadataTempFile.tmp_dir = tmp_dir
+
+        if not os.path.exists(tmp_dir):
+            os.makedirs(tmp_dir)
+
+        # path is calculated for Galaxy, may be different on compute - rewrite
+        # for the compute server.
+        def metadata_path_on_compute(path):
+            compute_path = path
+            if compute_tmp_dir and tmp_dir and in_directory(path, tmp_dir):
+                path_relative = os.path.relpath(path, tmp_dir)
+                compute_path = os.path.join(compute_tmp_dir, path_relative)
+            return compute_path
+
+        # fill in metadata_files_dict and return the command with args required to set metadata
+        def __metadata_files_list_to_cmd_line( metadata_files ):
+            def __get_filename_override():
+                if output_fnames:
+                    for dataset_path in output_fnames:
+                        if dataset_path.real_path == metadata_files.dataset.file_name:
+                            return dataset_path.false_path or dataset_path.real_path
+                return ""
+            line = '"%s,%s,%s,%s,%s,%s"' % (
+                metadata_path_on_compute(metadata_files.filename_in),
+                metadata_path_on_compute(metadata_files.filename_kwds),
+                metadata_path_on_compute(metadata_files.filename_out),
+                metadata_path_on_compute(metadata_files.filename_results_code),
+                __get_filename_override(),
+                metadata_path_on_compute(metadata_files.filename_override_metadata),
+            )
+            return line
+        if not isinstance( datasets, list ):
+            datasets = [ datasets ]
+        if exec_dir is None:
+            exec_dir = os.path.abspath( os.getcwd() )
+        if dataset_files_path is None:
+            dataset_files_path = galaxy.model.Dataset.file_path
+        if config_root is None:
+            config_root = os.path.abspath( os.getcwd() )
+        if datatypes_config is None:
+            raise Exception( 'In setup_external_metadata, the received datatypes_config is None.' )
+            datatypes_config = 'datatypes_conf.xml'
+        metadata_files_list = []
+        for dataset in datasets:
+            key = self.get_dataset_metadata_key( dataset )
+            # future note:
+            # wonkiness in job execution causes build command line to be called more than once
+            # when setting metadata externally, via 'auto-detect' button in edit attributes, etc.,
+            # we don't want to overwrite (losing the ability to cleanup) our existing dataset keys and files,
+            # so we will only populate the dictionary once
+            metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session )
+            if not metadata_files:
+                job = sa_session.query( galaxy.model.Job ).get( self.job_id )
+                metadata_files = galaxy.model.JobExternalOutputMetadata( job=job, dataset=dataset )
+                # we are using tempfile to create unique filenames, tempfile always returns an absolute path
+                # we will use pathnames relative to the galaxy root, to accommodate instances where the galaxy root
+                # is located differently, i.e. on a cluster node with a different filesystem structure
+
+                # file to store existing dataset
+                metadata_files.filename_in = abspath( tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="metadata_in_%s_" % key ).name )
+
+                # FIXME: HACK
+                # sqlalchemy introduced 'expire_on_commit' flag for sessionmaker at version 0.5x
+                # This may be causing the dataset attribute of the dataset_association object to no-longer be loaded into memory when needed for pickling.
+                # For now, we'll simply 'touch' dataset_association.dataset to force it back into memory.
+                dataset.dataset  # force dataset_association.dataset to be loaded before pickling
+                # A better fix could be setting 'expire_on_commit=False' on the session, or modifying where commits occur, or ?
+
+                # Touch also deferred column
+                dataset._metadata
+
+                cPickle.dump( dataset, open( metadata_files.filename_in, 'wb+' ) )
+                # file to store metadata results of set_meta()
+                metadata_files.filename_out = abspath( tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="metadata_out_%s_" % key ).name )
+                open( metadata_files.filename_out, 'wb+' )  # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible)
+                # file to store a 'return code' indicating the results of the set_meta() call
+                # results code is like (True/False - if setting metadata was successful/failed , exception or string of reason of success/failure )
+                metadata_files.filename_results_code = abspath( tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="metadata_results_%s_" % key ).name )
+                # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible)
+                json.dump( ( False, 'External set_meta() not called' ), open( metadata_files.filename_results_code, 'wb+' ) )
+                # file to store kwds passed to set_meta()
+                metadata_files.filename_kwds = abspath( tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="metadata_kwds_%s_" % key ).name )
+                json.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True )
+                # existing metadata file parameters need to be overridden with cluster-writable file locations
+                metadata_files.filename_override_metadata = abspath( tempfile.NamedTemporaryFile( dir=tmp_dir, prefix="metadata_override_%s_" % key ).name )
+                open( metadata_files.filename_override_metadata, 'wb+' )  # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible)
+                override_metadata = []
+                for meta_key, spec_value in dataset.metadata.spec.items():
+                    if isinstance( spec_value.param, FileParameter ) and dataset.metadata.get( meta_key, None ) is not None:
+                        metadata_temp = MetadataTempFile()
+                        shutil.copy( dataset.metadata.get( meta_key, None ).file_name, metadata_temp.file_name )
+                        override_metadata.append( ( meta_key, metadata_temp.to_JSON() ) )
+                json.dump( override_metadata, open( metadata_files.filename_override_metadata, 'wb+' ) )
+                # add to session and flush
+                sa_session.add( metadata_files )
+                sa_session.flush()
+            metadata_files_list.append( metadata_files )
+        args = '"%s" "%s" %s %s' % ( datatypes_config,
+                                     job_metadata,
+                                     " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ),
+                                     max_metadata_value_size)
+        if include_command:
+            # return command required to build
+            fd, fp = tempfile.mkstemp( suffix='.py', dir=tmp_dir, prefix="set_metadata_" )
+            metadata_script_file = abspath( fp )
+            os.fdopen( fd, 'w' ).write( 'from galaxy_ext.metadata.set_metadata import set_metadata; set_metadata()' )
+            return 'python "%s" %s' % ( metadata_path_on_compute(metadata_script_file), args )
+        else:
+            # return args to galaxy_ext.metadata.set_metadata required to build
+            return args
+
+    def external_metadata_set_successfully( self, dataset, sa_session ):
+        metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session )
+        if not metadata_files:
+            return False  # this file doesn't exist
+        rval, rstring = json.load( open( metadata_files.filename_results_code ) )
+        if not rval:
+            log.debug( 'setting metadata externally failed for %s %s: %s' % ( dataset.__class__.__name__, dataset.id, rstring ) )
+        return rval
+
+    def cleanup_external_metadata( self, sa_session ):
+        log.debug( 'Cleaning up external metadata files' )
+        for metadata_files in sa_session.query( galaxy.model.Job ).get( self.job_id ).external_output_metadata:
+            # we need to confirm that any MetadataTempFile files were removed, if not we need to remove them
+            # can occur if the job was stopped before completion, but a MetadataTempFile is used in the set_meta
+            MetadataTempFile.cleanup_from_JSON_dict_filename( metadata_files.filename_out )
+            dataset_key = self.get_dataset_metadata_key( metadata_files.dataset )
+            for key, fname in [ ( 'filename_in', metadata_files.filename_in ),
+                                ( 'filename_out', metadata_files.filename_out ),
+                                ( 'filename_results_code', metadata_files.filename_results_code ),
+                                ( 'filename_kwds', metadata_files.filename_kwds ),
+                                ( 'filename_override_metadata', metadata_files.filename_override_metadata ) ]:
+                try:
+                    os.remove( fname )
+                except Exception as e:
+                    log.debug( 'Failed to cleanup external metadata file (%s) for %s: %s' % ( key, dataset_key, e ) )
+
+    def set_job_runner_external_pid( self, pid, sa_session ):
+        for metadata_files in sa_session.query( galaxy.model.Job ).get( self.job_id ).external_output_metadata:
+            metadata_files.job_runner_external_pid = pid
+            sa_session.add( metadata_files )
+            sa_session.flush()
+
+
+__all__ = (
+    "Statement",
+    "MetadataElement",
+    "MetadataCollection",
+    "MetadataSpecCollection",
+    "MetadataParameter",
+    "MetadataElementSpec",
+    "SelectParameter",
+    "DBKeyParameter",
+    "RangeParameter",
+    "ColumnParameter",
+    "ColumnTypesParameter",
+    "ListParameter",
+    "DictParameter",
+    "PythonObjectParameter",
+    "FileParameter",
+    "MetadataTempFile",
+    "JobExternalOutputMetadataWrapper",
+)
diff --git a/lib/galaxy/model/migrate/__init__.py b/lib/galaxy/model/migrate/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/model/migrate/check.py b/lib/galaxy/model/migrate/check.py
new file mode 100644
index 0000000..2d71a44
--- /dev/null
+++ b/lib/galaxy/model/migrate/check.py
@@ -0,0 +1,127 @@
+import sys
+import os.path
+import logging
+
+from migrate.versioning import repository, schema
+
+from sqlalchemy import create_engine
+from sqlalchemy import MetaData
+from sqlalchemy.exc import NoSuchTableError
+from sqlalchemy import Table
+
+log = logging.getLogger( __name__ )
+
+# path relative to galaxy
+migrate_repository_directory = os.path.abspath(os.path.dirname( __file__ )).replace( os.getcwd() + os.path.sep, '', 1 )
+migrate_repository = repository.Repository( migrate_repository_directory )
+
+
+def create_or_verify_database( url, galaxy_config_file, engine_options={}, app=None ):
+    """
+    Check that the database is use-able, possibly creating it if empty (this is
+    the only time we automatically create tables, otherwise we force the
+    user to do it using the management script so they can create backups).
+
+    1) Empty database --> initialize with latest version and return
+    2) Database older than migration support --> fail and require manual update
+    3) Database at state where migrate support introduced --> add version control information but make no changes (might still require manual update)
+    4) Database versioned but out of date --> fail with informative message, user must run "sh manage_db.sh upgrade"
+    """
+    # Create engine and metadata
+    engine = create_engine( url, **engine_options )
+
+    def migrate():
+        try:
+            # Declare the database to be under a repository's version control
+            db_schema = schema.ControlledSchema.create( engine, migrate_repository )
+        except:
+            # The database is already under version control
+            db_schema = schema.ControlledSchema( engine, migrate_repository )
+        # Apply all scripts to get to current version
+        migrate_to_current_version( engine, db_schema )
+
+    meta = MetaData( bind=engine )
+    if app and getattr( app.config, 'database_auto_migrate', False ):
+        migrate()
+        return
+
+    # Try to load dataset table
+    try:
+        Table( "dataset", meta, autoload=True )
+    except NoSuchTableError:
+        # No 'dataset' table means a completely uninitialized database.  If we have an app, we'll
+        # set its new_installation setting to True so the tool migration process will be skipped.
+        if app:
+            app.new_installation = True
+        log.info( "No database, initializing" )
+        migrate()
+        return
+    try:
+        hda_table = Table( "history_dataset_association", meta, autoload=True )
+    except NoSuchTableError:
+        raise Exception( "Your database is older than hg revision 1464:c7acaa1bb88f and will need to be updated manually" )
+    # There is a 'history_dataset_association' table, so we (hopefully) have
+    # version 1 of the database, but without the migrate_version table. This
+    # happens if the user has a build from right before migration was added.
+    # Verify that this is true, if it is any older they'll have to update
+    # manually
+    if 'copied_from_history_dataset_association_id' not in hda_table.c:
+        # The 'copied_from_history_dataset_association_id' column was added in
+        # rev 1464:c7acaa1bb88f.  This is the oldest revision we currently do
+        # automated versioning for, so stop here
+        raise Exception( "Your database is older than hg revision 1464:c7acaa1bb88f and will need to be updated manually" )
+    # At revision 1464:c7acaa1bb88f or greater (database version 1), make sure
+    # that the db has version information. This is the trickiest case -- we
+    # have a database but no version control, and are assuming it is a certain
+    # version. If the user has postion version 1 changes this could cause
+    # problems
+    try:
+        Table( "migrate_version", meta, autoload=True )
+    except NoSuchTableError:
+        # The database exists but is not yet under migrate version control, so init with version 1
+        log.info( "Adding version control to existing database" )
+        try:
+            Table( "metadata_file", meta, autoload=True )
+            schema.ControlledSchema.create( engine, migrate_repository, version=2 )
+        except NoSuchTableError:
+            schema.ControlledSchema.create( engine, migrate_repository, version=1 )
+    # Verify that the code and the DB are in sync
+    db_schema = schema.ControlledSchema( engine, migrate_repository )
+    if migrate_repository.versions.latest != db_schema.version:
+        config_arg = ''
+        if galaxy_config_file and os.path.abspath( os.path.join( os.getcwd(), 'config', 'galaxy.ini' ) ) != galaxy_config_file:
+            config_arg = ' -c %s' % galaxy_config_file.replace( os.path.abspath( os.getcwd() ), '.' )
+        raise Exception( "Your database has version '%d' but this code expects version '%d'.  Please backup your database and then migrate the schema by running 'sh manage_db.sh%s upgrade'."
+                         % ( db_schema.version, migrate_repository.versions.latest, config_arg ) )
+    else:
+        log.info( "At database version %d" % db_schema.version )
+
+
+def migrate_to_current_version( engine, schema ):
+    # Changes to get to current version
+    try:
+        changeset = schema.changeset( None )
+    except Exception as e:
+        log.error("Problem determining migration changeset for engine [%s]" % engine)
+        raise e
+    for ver, change in changeset:
+        nextver = ver + changeset.step
+        log.info( 'Migrating %s -> %s... ' % ( ver, nextver ) )
+        old_stdout = sys.stdout
+
+        class FakeStdout( object ):
+            def __init__( self ):
+                self.buffer = []
+
+            def write( self, s ):
+                self.buffer.append( s )
+
+            def flush( self ):
+                pass
+        sys.stdout = FakeStdout()
+        try:
+            schema.runchange( ver, change, changeset.step )
+        finally:
+            for message in "".join( sys.stdout.buffer ).split( "\n" ):
+                log.info( message )
+            sys.stdout = old_stdout
diff --git a/lib/galaxy/model/migrate/migrate.cfg b/lib/galaxy/model/migrate/migrate.cfg
new file mode 100644
index 0000000..3fd7400
--- /dev/null
+++ b/lib/galaxy/model/migrate/migrate.cfg
@@ -0,0 +1,20 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=Galaxy
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to 
+# change the table name in each database too. 
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the 
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the 
+# commit continues, perhaps ending successfully. 
+# Databases in this list MUST compile successfully during a commit, or the 
+# entire commit will fail. List the databases your application will actually 
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
diff --git a/lib/galaxy/model/migrate/versions/0001_initial_tables.py b/lib/galaxy/model/migrate/versions/0001_initial_tables.py
new file mode 100644
index 0000000..82d5627
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0001_initial_tables.py
@@ -0,0 +1,201 @@
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Numeric, String, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Tables as of changeset 1464:c7acaa1bb88f
+User_table = Table( "galaxy_user", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "email", TrimmedString( 255 ), nullable=False ),
+    Column( "password", TrimmedString( 40 ), nullable=False ),
+    Column( "external", Boolean, default=False ) )
+
+History_table = Table( "history", metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "hid_counter", Integer, default=1 ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "genome_build", TrimmedString( 40 ) ) )
+
+HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
+    Column( "hid", Integer ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "blurb", TrimmedString( 255 ) ),
+    Column( "peek", TEXT ),
+    Column( "extension", TrimmedString( 64 ) ),
+    Column( "metadata", MetadataType(), key="_metadata" ),
+    Column( "parent_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
+    Column( "designation", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "visible", Boolean ) )
+
+Dataset_table = Table( "dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "state", TrimmedString( 64 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "purgable", Boolean, default=True ),
+    Column( "external_filename", TEXT ),
+    Column( "_extra_files_path", TEXT ),
+    Column( 'file_size', Numeric( 15, 0 ) ) )
+
+ImplicitlyConvertedDatasetAssociation_table = Table( "implicitly_converted_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "hda_parent_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "metadata_safe", Boolean, index=True, default=True ),
+    Column( "type", TrimmedString( 255 ) ) )
+
+ValidationError_table = Table( "validation_error", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "message", TrimmedString( 255 ) ),
+    Column( "err_type", TrimmedString( 64 ) ),
+    Column( "attributes", TEXT ) )
+
+Job_table = Table( "job", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "tool_id", String( 255 ) ),
+    Column( "tool_version", TEXT, default="1.0.0" ),
+    Column( "state", String( 64 ) ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "command_line", TEXT ),
+    Column( "param_filename", String( 1024 ) ),
+    Column( "runner_name", String( 255 ) ),
+    Column( "stdout", TEXT ),
+    Column( "stderr", TEXT ),
+    Column( "traceback", TEXT ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
+    Column( "job_runner_name", String( 255 ) ),
+    Column( "job_runner_external_id", String( 255 ) ) )
+
+JobParameter_table = Table( "job_parameter", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "name", String(255) ),
+    Column( "value", TEXT ) )
+
+JobToInputDatasetAssociation_table = Table( "job_to_input_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "name", String(255) ) )
+
+JobToOutputDatasetAssociation_table = Table( "job_to_output_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "name", String(255) ) )
+
+Event_table = Table( "event", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True, nullable=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+    Column( "message", TrimmedString( 1024 ) ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
+    Column( "tool_id", String( 255 ) ) )
+
+GalaxySession_table = Table( "galaxy_session", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+    Column( "remote_host", String( 255 ) ),
+    Column( "remote_addr", String( 255 ) ),
+    Column( "referer", TEXT ),
+    Column( "current_history_id", Integer, ForeignKey( "history.id" ), nullable=True ),
+    Column( "session_key", TrimmedString( 255 ), index=True, unique=True ),
+    Column( "is_valid", Boolean, default=False ),
+    Column( "prev_session_id", Integer ) )
+
+GalaxySessionToHistoryAssociation_table = Table( "galaxy_session_to_history", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
+
+StoredWorkflow_table = Table( "stored_workflow", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+    Column( "latest_workflow_id", Integer,
+            ForeignKey( "workflow.id", use_alter=True, name='stored_workflow_latest_workflow_id_fk' ), index=True ),
+    Column( "name", TEXT ),
+    Column( "deleted", Boolean, default=False ) )
+
+Workflow_table = Table( "workflow", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True, nullable=False ),
+    Column( "name", TEXT ),
+    Column( "has_cycles", Boolean ),
+    Column( "has_errors", Boolean ) )
+
+WorkflowStep_table = Table( "workflow_step", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
+    Column( "type", String(64) ),
+    Column( "tool_id", TEXT ),
+    Column( "tool_version", TEXT ),
+    Column( "tool_inputs", JSONType ),
+    Column( "tool_errors", JSONType ),
+    Column( "position", JSONType ),
+    Column( "config", JSONType ),
+    Column( "order_index", Integer ) )
+
+WorkflowStepConnection_table = Table( "workflow_step_connection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "output_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+    Column( "input_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+    Column( "output_name", TEXT ),
+    Column( "input_name", TEXT) )
+
+StoredWorkflowUserShareAssociation_table = Table( "stored_workflow_user_share_connection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+StoredWorkflowMenuEntry_table = Table( "stored_workflow_menu_entry", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "order_index", Integer ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.create_all()
diff --git a/lib/galaxy/model/migrate/versions/0002_metadata_file_table.py b/lib/galaxy/model/migrate/versions/0002_metadata_file_table.py
new file mode 100644
index 0000000..d1b20f0
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0002_metadata_file_table.py
@@ -0,0 +1,30 @@
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# New table in changeset 1568:0b022adfdc34
+MetadataFile_table = Table( "metadata_file", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "name", TEXT ),
+    Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    MetadataFile_table.create()
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    MetadataFile_table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0003_security_and_libraries.py b/lib/galaxy/model/migrate/versions/0003_security_and_libraries.py
new file mode 100644
index 0000000..4bdef1f
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0003_security_and_libraries.py
@@ -0,0 +1,770 @@
+import datetime
+import logging
+import sys
+
+from migrate import ForeignKeyConstraint
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Index, Integer, MetaData, String, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+# New tables as of changeset 2341:5498ac35eedd
+Group_table = Table( "galaxy_group", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True, unique=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+UserGroupAssociation_table = Table( "user_group_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+UserRoleAssociation_table = Table( "user_role_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GroupRoleAssociation_table = Table( "group_role_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+Role_table = Table( "role", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True, unique=True ),
+    Column( "description", TEXT ),
+    Column( "type", String( 40 ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+DatasetPermissions_table = Table( "dataset_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+LibraryPermissions_table = Table( "library_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+LibraryFolderPermissions_table = Table( "library_folder_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+LibraryDatasetPermissions_table = Table( "library_dataset_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+LibraryDatasetDatasetAssociationPermissions_table = Table( "library_dataset_dataset_association_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+Index( "ix_lddap_library_dataset_dataset_association_id", LibraryDatasetDatasetAssociationPermissions_table.c.library_dataset_dataset_association_id )
+
+LibraryItemInfoPermissions_table = Table( "library_item_info_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), nullable=True, index=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+LibraryItemInfoTemplatePermissions_table = Table( "library_item_info_template_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "action", TEXT ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), nullable=True ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+Index( "ix_liitp_library_item_info_template_id", LibraryItemInfoTemplatePermissions_table.c.library_item_info_template_id )
+
+DefaultUserPermissions_table = Table( "default_user_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "action", TEXT ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+DefaultHistoryPermissions_table = Table( "default_history_permissions", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+    Column( "action", TEXT ),
+    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+LibraryDataset_table = Table( "library_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" ), nullable=True, index=True ),  # current version of dataset, if null, there is not a current version selected
+    Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    Column( "order_id", Integer ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), key="_name" ),  # when not None/null this will supercede display in library (but not when imported into user's history?)
+    Column( "info", TrimmedString( 255 ), key="_info" ),  # when not None/null this will supercede display in library (but not when imported into user's history?)
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id", use_alter=True, name='history_dataset_association_dataset_id_fkey' ), nullable=True ),
+    Column( "copied_from_library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name='library_dataset_dataset_association_id_fkey' ), nullable=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "blurb", TrimmedString( 255 ) ),
+    Column( "peek", TEXT ),
+    Column( "extension", TrimmedString( 64 ) ),
+    Column( "metadata", MetadataType(), key="_metadata" ),
+    Column( "parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "designation", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "visible", Boolean ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "message", TrimmedString( 255 ) ) )
+
+Library_table = Table( "library", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "root_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", String( 255 ), index=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "description", TEXT ) )
+
+LibraryFolder_table = Table( "library_folder", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "parent_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TEXT ),
+    Column( "description", TEXT ),
+    Column( "order_id", Integer ),
+    Column( "item_count", Integer ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "genome_build", TrimmedString( 40 ) ) )
+
+LibraryItemInfoTemplateElement_table = Table( "library_item_info_template_element", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "optional", Boolean, index=True, default=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "name", TEXT ),
+    Column( "description", TEXT ),
+    Column( "type", TEXT, default='string' ),
+    Column( "order_id", Integer ),
+    Column( "options", JSONType() ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ) ) )
+Index( "ix_liite_library_item_info_template_id", LibraryItemInfoTemplateElement_table.c.library_item_info_template_id )
+
+LibraryItemInfoTemplate_table = Table( "library_item_info_template", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "optional", Boolean, index=True, default=True ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "name", TEXT ),
+    Column( "description", TEXT ),
+    Column( "item_count", Integer, default=0 ) )
+
+LibraryInfoTemplateAssociation_table = Table( "library_info_template_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ) ) )
+Index( "ix_lita_library_item_info_template_id", LibraryInfoTemplateAssociation_table.c.library_item_info_template_id )
+
+LibraryFolderInfoTemplateAssociation_table = Table( "library_folder_info_template_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ) ) )
+Index( "ix_lfita_library_item_info_template_id", LibraryFolderInfoTemplateAssociation_table.c.library_item_info_template_id )
+
+LibraryDatasetInfoTemplateAssociation_table = Table( "library_dataset_info_template_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ) ) )
+Index( "ix_ldita_library_item_info_template_id", LibraryDatasetInfoTemplateAssociation_table.c.library_item_info_template_id )
+
+LibraryDatasetDatasetInfoTemplateAssociation_table = Table( "library_dataset_dataset_info_template_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ) ) )
+Index( "ix_lddita_library_dataset_dataset_association_id", LibraryDatasetDatasetInfoTemplateAssociation_table.c.library_dataset_dataset_association_id )
+Index( "ix_lddita_library_item_info_template_id", LibraryDatasetDatasetInfoTemplateAssociation_table.c.library_item_info_template_id )
+
+LibraryItemInfoElement_table = Table( "library_item_info_element", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "contents", JSONType() ),
+    Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
+    Column( "library_item_info_template_element_id", Integer, ForeignKey( "library_item_info_template_element.id" ) ) )
+Index( "ix_liie_library_item_info_template_element_id", LibraryItemInfoElement_table.c.library_item_info_template_element_id )
+
+LibraryItemInfo_table = Table( "library_item_info", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ),
+    Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), nullable=True, index=True ) )
+
+LibraryInfoAssociation_table = Table( "library_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
+    Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
+
+LibraryFolderInfoAssociation_table = Table( "library_folder_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+    Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
+
+LibraryDatasetInfoAssociation_table = Table( "library_dataset_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
+    Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
+
+LibraryDatasetDatasetInfoAssociation_table = Table( "library_dataset_dataset_info_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ) ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
+Index( "ix_lddia_library_dataset_dataset_association_id", LibraryDatasetDatasetInfoAssociation_table.c.library_dataset_dataset_association_id )
+Index( "ix_lddia_library_item_info_id", LibraryDatasetDatasetInfoAssociation_table.c.library_item_info_id )
+
+JobExternalOutputMetadata_table = Table( "job_external_output_metadata", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "filename_in", String( 255 ) ),
+    Column( "filename_out", String( 255 ) ),
+    Column( "filename_results_code", String( 255 ) ),
+    Column( "filename_kwds", String( 255 ) ),
+    Column( "job_runner_external_pid", String( 255 ) ) )
+Index( "ix_jeom_library_dataset_dataset_association_id", JobExternalOutputMetadata_table.c.library_dataset_dataset_association_id )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+
+    def nextval( table, col='id' ):
+        if migrate_engine.name in ['postgres', 'postgresql']:
+            return "nextval('%s_%s_seq')" % ( table, col )
+        elif migrate_engine.name in ['mysql', 'sqlite']:
+            return "null"
+        else:
+            raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+    def localtimestamp():
+        if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+            return "LOCALTIMESTAMP"
+        elif migrate_engine.name == 'sqlite':
+            return "current_date || ' ' || current_time"
+        else:
+            raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+    def boolean_false():
+        if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+            return False
+        elif migrate_engine.name == 'sqlite':
+            return 0
+        else:
+            raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+    # Add 2 new columns to the galaxy_user table
+    try:
+        User_table = Table( "galaxy_user", metadata, autoload=True )
+    except NoSuchTableError:
+        User_table = None
+        log.debug( "Failed loading table galaxy_user" )
+    if User_table is not None:
+        try:
+            col = Column( 'deleted', Boolean, index=True, default=False )
+            col.create( User_table, index_name='ix_user_deleted')
+            assert col is User_table.c.deleted
+        except Exception as e:
+            log.debug( "Adding column 'deleted' to galaxy_user table failed: %s" % ( str( e ) ) )
+        try:
+            col = Column( 'purged', Boolean, index=True, default=False )
+            col.create( User_table, index_name='ix_user_purged')
+            assert col is User_table.c.purged
+        except Exception as e:
+            log.debug( "Adding column 'purged' to galaxy_user table failed: %s" % ( str( e ) ) )
+    # Add 1 new column to the history_dataset_association table
+    try:
+        HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        HistoryDatasetAssociation_table = None
+        log.debug( "Failed loading table history_dataset_association" )
+    if HistoryDatasetAssociation_table is not None:
+        try:
+            col = Column( 'copied_from_library_dataset_dataset_association_id', Integer, nullable=True )
+            col.create( HistoryDatasetAssociation_table)
+            assert col is HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
+        except Exception as e:
+            log.debug( "Adding column 'copied_from_library_dataset_dataset_association_id' to history_dataset_association table failed: %s" % ( str( e ) ) )
+    # Add 1 new column to the metadata_file table
+    try:
+        MetadataFile_table = Table( "metadata_file", metadata, autoload=True )
+    except NoSuchTableError:
+        MetadataFile_table = None
+        log.debug( "Failed loading table metadata_file" )
+    if MetadataFile_table is not None:
+        try:
+            col = Column( 'lda_id', Integer, index=True, nullable=True )
+            col.create( MetadataFile_table, index_name='ix_metadata_file_lda_id')
+            assert col is MetadataFile_table.c.lda_id
+        except Exception as e:
+            log.debug( "Adding column 'lda_id' to metadata_file table failed: %s" % ( str( e ) ) )
+    # Add 1 new column to the stored_workflow table - changeset 2328
+    try:
+        StoredWorkflow_table = Table( "stored_workflow", metadata,
+            Column( "latest_workflow_id", Integer,
+                ForeignKey( "workflow.id", use_alter=True, name='stored_workflow_latest_workflow_id_fk' ), index=True ),
+            autoload=True, extend_existing=True )
+    except NoSuchTableError:
+        StoredWorkflow_table = None
+        log.debug( "Failed loading table stored_workflow" )
+    if StoredWorkflow_table is not None:
+        try:
+            col = Column( 'importable', Boolean, default=False )
+            col.create( StoredWorkflow_table )
+            assert col is StoredWorkflow_table.c.importable
+        except Exception as e:
+            log.debug( "Adding column 'importable' to stored_workflow table failed: %s" % ( str( e ) ) )
+    # Create an index on the Job.state column - changeset 2192
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+    except NoSuchTableError:
+        Job_table = None
+        log.debug( "Failed loading table job" )
+    if Job_table is not None:
+        try:
+            i = Index( 'ix_job_state', Job_table.c.state )
+            i.create()
+        except Exception as e:
+            log.debug( "Adding index to job.state column failed: %s" % ( str( e ) ) )
+    # Add all of the new tables above
+    metadata.create_all()
+    # Add 1 foreign key constraint to the history_dataset_association table
+    try:
+        HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        HistoryDatasetAssociation_table = None
+        log.debug( "Failed loading table history_dataset_association" )
+    try:
+        LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetDatasetAssociation_table = None
+        log.debug( "Failed loading table library_dataset_dataset_association" )
+    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
+        try:
+            cons = ForeignKeyConstraint( [HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id],
+                                         [LibraryDatasetDatasetAssociation_table.c.id],
+                                         name='history_dataset_association_copied_from_library_dataset_da_fkey' )
+            # Create the constraint
+            cons.create()
+        except Exception as e:
+            log.debug( "Adding foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' to table 'history_dataset_association' failed: %s" % ( str( e ) ) )
+    # Add 1 foreign key constraint to the metadata_file table
+    try:
+        MetadataFile_table = Table( "metadata_file", metadata, autoload=True )
+    except NoSuchTableError:
+        MetadataFile_table = None
+        log.debug( "Failed loading table metadata_file" )
+    try:
+        LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetDatasetAssociation_table = None
+        log.debug( "Failed loading table library_dataset_dataset_association" )
+    if migrate_engine.name != 'sqlite':
+        # Sqlite can't alter table add foreign key.
+        if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
+            try:
+                cons = ForeignKeyConstraint( [MetadataFile_table.c.lda_id],
+                                             [LibraryDatasetDatasetAssociation_table.c.id],
+                                             name='metadata_file_lda_id_fkey' )
+                # Create the constraint
+                cons.create()
+            except Exception as e:
+                log.debug( "Adding foreign key constraint 'metadata_file_lda_id_fkey' to table 'metadata_file' failed: %s" % ( str( e ) ) )
+    # Make sure we have at least 1 user
+    cmd = "SELECT * FROM galaxy_user;"
+    users = migrate_engine.execute( cmd ).fetchall()
+    if users:
+        cmd = "SELECT * FROM role;"
+        roles = migrate_engine.execute( cmd ).fetchall()
+        if not roles:
+            # Create private roles for each user - pass 1
+            cmd = \
+                "INSERT INTO role " + \
+                "SELECT %s AS id," + \
+                "%s AS create_time," + \
+                "%s AS update_time," + \
+                "email AS name," + \
+                "email AS description," + \
+                "'private' As type," + \
+                "%s AS deleted " + \
+                "FROM galaxy_user " + \
+                "ORDER BY id;"
+            cmd = cmd % ( nextval('role'), localtimestamp(), localtimestamp(), boolean_false() )
+            migrate_engine.execute( cmd )
+            # Create private roles for each user - pass 2
+            if migrate_engine.name in ['postgres', 'postgresql', 'sqlite']:
+                cmd = "UPDATE role SET description = 'Private role for ' || description;"
+            elif migrate_engine.name == 'mysql':
+                cmd = "UPDATE role SET description = CONCAT( 'Private role for ', description );"
+            migrate_engine.execute( cmd )
+            # Create private roles for each user - pass 3
+            cmd = \
+                "INSERT INTO user_role_association " + \
+                "SELECT %s AS id," + \
+                "galaxy_user.id AS user_id," + \
+                "role.id AS role_id," + \
+                "%s AS create_time," + \
+                "%s AS update_time " + \
+                "FROM galaxy_user, role " + \
+                "WHERE galaxy_user.email = role.name " + \
+                "ORDER BY galaxy_user.id;"
+            cmd = cmd % ( nextval('user_role_association'), localtimestamp(), localtimestamp() )
+            migrate_engine.execute( cmd )
+            # Create default permissions for each user
+            cmd = \
+                "INSERT INTO default_user_permissions " + \
+                "SELECT %s AS id," + \
+                "galaxy_user.id AS user_id," + \
+                "'manage permissions' AS action," + \
+                "user_role_association.role_id AS role_id " + \
+                "FROM galaxy_user " + \
+                "JOIN user_role_association ON user_role_association.user_id = galaxy_user.id " + \
+                "ORDER BY galaxy_user.id;"
+            cmd = cmd % nextval('default_user_permissions')
+            migrate_engine.execute( cmd )
+            # Create default history permissions for each active history associated with a user
+
+            cmd = \
+                "INSERT INTO default_history_permissions " + \
+                "SELECT %s AS id," + \
+                "history.id AS history_id," + \
+                "'manage permissions' AS action," + \
+                "user_role_association.role_id AS role_id " + \
+                "FROM history " + \
+                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
+                "WHERE history.purged = %s AND history.user_id IS NOT NULL;"
+            cmd = cmd % ( nextval('default_history_permissions'), boolean_false() )
+            migrate_engine.execute( cmd )
+            # Create "manage permissions" dataset_permissions for all activate-able datasets
+            cmd = \
+                "INSERT INTO dataset_permissions " + \
+                "SELECT %s AS id," + \
+                "%s AS create_time," + \
+                "%s AS update_time," + \
+                "'manage permissions' AS action," + \
+                "history_dataset_association.dataset_id AS dataset_id," + \
+                "user_role_association.role_id AS role_id " + \
+                "FROM history " + \
+                "JOIN history_dataset_association ON history_dataset_association.history_id = history.id " + \
+                "JOIN dataset ON history_dataset_association.dataset_id = dataset.id " + \
+                "JOIN user_role_association ON user_role_association.user_id = history.user_id " + \
+                "WHERE dataset.purged = %s AND history.user_id IS NOT NULL;"
+            cmd = cmd % ( nextval('dataset_permissions'), localtimestamp(), localtimestamp(), boolean_false() )
+            migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    # NOTE: all new data added in the upgrade method is eliminated here via table drops
+    # Drop 1 foreign key constraint from the metadata_file table
+    try:
+        MetadataFile_table = Table( "metadata_file", metadata, autoload=True )
+    except NoSuchTableError:
+        MetadataFile_table = None
+        log.debug( "Failed loading table metadata_file" )
+    try:
+        LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetDatasetAssociation_table = None
+        log.debug( "Failed loading table library_dataset_dataset_association" )
+    if MetadataFile_table is not None and LibraryDatasetDatasetAssociation_table is not None:
+        try:
+            cons = ForeignKeyConstraint( [MetadataFile_table.c.lda_id],
+                                         [LibraryDatasetDatasetAssociation_table.c.id],
+                                         name='metadata_file_lda_id_fkey' )
+            # Drop the constraint
+            cons.drop()
+        except Exception as e:
+            log.debug( "Dropping foreign key constraint 'metadata_file_lda_id_fkey' from table 'metadata_file' failed: %s" % ( str( e ) ) )
+    # Drop 1 foreign key constraint from the history_dataset_association table
+    try:
+        HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        HistoryDatasetAssociation_table = None
+        log.debug( "Failed loading table history_dataset_association" )
+    try:
+        LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetDatasetAssociation_table = None
+        log.debug( "Failed loading table library_dataset_dataset_association" )
+    if HistoryDatasetAssociation_table is not None and LibraryDatasetDatasetAssociation_table is not None:
+        try:
+            cons = ForeignKeyConstraint( [HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id],
+                                         [LibraryDatasetDatasetAssociation_table.c.id],
+                                         name='history_dataset_association_copied_from_library_dataset_da_fkey' )
+            # Drop the constraint
+            cons.drop()
+        except Exception as e:
+            log.debug( "Dropping foreign key constraint 'history_dataset_association_copied_from_library_dataset_da_fkey' from table 'history_dataset_association' failed: %s" % ( str( e ) ) )
+    # Drop all of the new tables above
+    try:
+        UserGroupAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping user_group_association table failed: %s" % str( e ) )
+    try:
+        UserRoleAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping user_role_association table failed: %s" % str( e ) )
+    try:
+        GroupRoleAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping group_role_association table failed: %s" % str( e ) )
+    try:
+        Group_table.drop()
+    except Exception as e:
+        log.debug( "Dropping galaxy_group table failed: %s" % str( e ) )
+    try:
+        DatasetPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping dataset_permissions table failed: %s" % str( e ) )
+    try:
+        LibraryPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_permissions table failed: %s" % str( e ) )
+    try:
+        LibraryFolderPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_folder_permissions table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_permissions table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetDatasetAssociationPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_dataset_association_permissions table failed: %s" % str( e ) )
+    try:
+        LibraryItemInfoPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_permissions table failed: %s" % str( e ) )
+    try:
+        LibraryItemInfoTemplatePermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_template_permissions table failed: %s" % str( e ) )
+    try:
+        DefaultUserPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping default_user_permissions table failed: %s" % str( e ) )
+    try:
+        DefaultHistoryPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping default_history_permissions table failed: %s" % str( e ) )
+    try:
+        Role_table.drop()
+    except Exception as e:
+        log.debug( "Dropping role table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetDatasetInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_dataset_info_association table failed: %s" % str( e ) )
+    try:
+        LibraryDataset_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetDatasetAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_dataset_association table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetDatasetInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_dataset_info_template_association table failed: %s" % str( e ) )
+    try:
+        JobExternalOutputMetadata_table.drop()
+    except Exception as e:
+        log.debug( "Dropping job_external_output_metadata table failed: %s" % str( e ) )
+    try:
+        Library_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library table failed: %s" % str( e ) )
+    try:
+        LibraryFolder_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_folder table failed: %s" % str( e ) )
+    try:
+        LibraryItemInfoTemplateElement_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_template_element table failed: %s" % str( e ) )
+    try:
+        LibraryInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_info_template_association table failed: %s" % str( e ) )
+    try:
+        LibraryFolderInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_folder_info_template_association table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_info_template_association table failed: %s" % str( e ) )
+    try:
+        LibraryInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_info_association table failed: %s" % str( e ) )
+    try:
+        LibraryFolderInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_folder_info_association table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_info_association table failed: %s" % str( e ) )
+    try:
+        LibraryItemInfoElement_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_element table failed: %s" % str( e ) )
+    try:
+        LibraryItemInfo_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info table failed: %s" % str( e ) )
+    try:
+        LibraryItemInfoTemplate_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_template table failed: %s" % str( e ) )
+    # Drop the index on the Job.state column - changeset 2192
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+    except NoSuchTableError:
+        Job_table = None
+        log.debug( "Failed loading table job" )
+    if Job_table is not None:
+        try:
+            i = Index( 'ix_job_state', Job_table.c.state )
+            i.drop()
+        except Exception as e:
+            log.debug( "Dropping index from job.state column failed: %s" % ( str( e ) ) )
+    # Drop 1 column from the stored_workflow table - changeset 2328
+    try:
+        StoredWorkflow_table = Table( "stored_workflow", metadata, autoload=True )
+    except NoSuchTableError:
+        StoredWorkflow_table = None
+        log.debug( "Failed loading table stored_workflow" )
+    if StoredWorkflow_table is not None:
+        try:
+            col = StoredWorkflow_table.c.importable
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'importable' from stored_workflow table failed: %s" % ( str( e ) ) )
+    # Drop 1 column from the metadata_file table
+    try:
+        MetadataFile_table = Table( "metadata_file", metadata, autoload=True )
+    except NoSuchTableError:
+        MetadataFile_table = None
+        log.debug( "Failed loading table metadata_file" )
+    if MetadataFile_table is not None:
+        try:
+            col = MetadataFile_table.c.lda_id
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'lda_id' from metadata_file table failed: %s" % ( str( e ) ) )
+    # Drop 1 column from the history_dataset_association table
+    try:
+        HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+    except NoSuchTableError:
+        HistoryDatasetAssociation_table = None
+        log.debug( "Failed loading table history_dataset_association" )
+    if HistoryDatasetAssociation_table is not None:
+        try:
+            col = HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'copied_from_library_dataset_dataset_association_id' from history_dataset_association table failed: %s" % ( str( e ) ) )
+    # Drop 2 columns from the galaxy_user table
+    try:
+        User_table = Table( "galaxy_user", metadata, autoload=True )
+    except NoSuchTableError:
+        User_table = None
+        log.debug( "Failed loading table galaxy_user" )
+    if User_table is not None:
+        try:
+            col = User_table.c.deleted
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'deleted' from galaxy_user table failed: %s" % ( str( e ) ) )
+        try:
+            col = User_table.c.purged
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'purged' from galaxy_user table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py b/lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py
new file mode 100644
index 0000000..8331b9b
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0004_indexes_and_defaults.py
@@ -0,0 +1,65 @@
+import logging
+import sys
+
+from sqlalchemy import Index, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    User_table = Table( "galaxy_user", metadata, autoload=True )
+    HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+
+    def boolean_false():
+        if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+            return False
+        elif migrate_engine.name == 'sqlite':
+            return 0
+        else:
+            raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name)
+    # Load existing tables
+    metadata.reflect()
+    # Add 2 indexes to the galaxy_user table
+    i = Index( 'ix_galaxy_user_deleted', User_table.c.deleted )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_galaxy_user_deleted' to galaxy_user table failed: %s" % ( str( e ) ) )
+    i = Index( 'ix_galaxy_user_purged', User_table.c.purged )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_galaxy_user_purged' to galaxy_user table failed: %s" % ( str( e ) ) )
+    # Set the default data in the galaxy_user table, but only for null values
+    cmd = "UPDATE galaxy_user SET deleted = %s WHERE deleted is null"
+    cmd = cmd % boolean_false()
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Setting default data for galaxy_user.deleted column failed: %s" % ( str( e ) ) )
+    cmd = "UPDATE galaxy_user SET purged = %s WHERE purged is null"
+    cmd = cmd % boolean_false()
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Setting default data for galaxy_user.purged column failed: %s" % ( str( e ) ) )
+    # Add 1 index to the history_dataset_association table
+    i = Index( 'ix_hda_copied_from_library_dataset_dataset_association_id', HistoryDatasetAssociation_table.c.copied_from_library_dataset_dataset_association_id )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_hda_copied_from_library_dataset_dataset_association_id' to history_dataset_association table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py
new file mode 100644
index 0000000..62754cd
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py
@@ -0,0 +1,746 @@
+import datetime
+import errno
+import logging
+import os
+import time
+
+from sqlalchemy import and_, Boolean, Column, DateTime, false, ForeignKey, Integer, MetaData, not_, Numeric, Table, TEXT, true
+from sqlalchemy.orm import backref, mapper, relation, scoped_session, sessionmaker
+
+from galaxy.model.metadata import MetadataCollection
+from galaxy.model.custom_types import MetadataType, TrimmedString
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+now = datetime.datetime.utcnow
+metadata = MetaData()
+context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
+
+
+# classes
+def get_permitted_actions( **kwds ):
+    return Bunch()
+
+
+def directory_hash_id( id ):
+    s = str( id )
+    l = len( s )
+    # Shortcut -- ids 0-999 go under ../000/
+    if l < 4:
+        return [ "000" ]
+    # Pad with zeros until a multiple of three
+    padded = ( ( 3 - len( s ) % 3 ) * "0" ) + s
+    # Drop the last three digits -- 1000 files per directory
+    padded = padded[:-3]
+    # Break into chunks of three
+    return [ padded[i * 3:(i + 1) * 3] for i in range( len( padded ) // 3 ) ]
+
+
+class Dataset( object ):
+    states = Bunch( NEW='new',
+                    UPLOAD='upload',
+                    QUEUED='queued',
+                    RUNNING='running',
+                    OK='ok',
+                    EMPTY='empty',
+                    ERROR='error',
+                    DISCARDED='discarded' )
+    permitted_actions = get_permitted_actions( filter='DATASET' )
+    file_path = "/tmp/"
+    engine = None
+
+    def __init__( self, id=None, state=None, external_filename=None, extra_files_path=None, file_size=None, purgable=True ):
+        self.id = id
+        self.state = state
+        self.deleted = False
+        self.purged = False
+        self.purgable = purgable
+        self.external_filename = external_filename
+        self._extra_files_path = extra_files_path
+        self.file_size = file_size
+
+    def get_file_name( self ):
+        if not self.external_filename:
+            assert self.id is not None, "ID must be set before filename used (commit the object)"
+            # First try filename directly under file_path
+            filename = os.path.join( self.file_path, "dataset_%d.dat" % self.id )
+            # Only use that filename if it already exists (backward compatibility),
+            # otherwise construct hashed path
+            if not os.path.exists( filename ):
+                dir = os.path.join( self.file_path, *directory_hash_id( self.id ) )
+                # Create directory if it does not exist
+                try:
+                    os.makedirs( dir )
+                except OSError as e:
+                    # File Exists is okay, otherwise reraise
+                    if e.errno != errno.EEXIST:
+                        raise
+                # Return filename inside hashed directory
+                return os.path.abspath( os.path.join( dir, "dataset_%d.dat" % self.id ) )
+        else:
+            filename = self.external_filename
+        # Make filename absolute
+        return os.path.abspath( filename )
+
+    def set_file_name( self, filename ):
+        if not filename:
+            self.external_filename = None
+        else:
+            self.external_filename = filename
+    file_name = property( get_file_name, set_file_name )
+
+    @property
+    def extra_files_path( self ):
+        if self._extra_files_path:
+            path = self._extra_files_path
+        else:
+            path = os.path.join( self.file_path, "dataset_%d_files" % self.id )
+            # only use path directly under self.file_path if it exists
+            if not os.path.exists( path ):
+                path = os.path.join( os.path.join( self.file_path, *directory_hash_id( self.id ) ), "dataset_%d_files" % self.id )
+        # Make path absolute
+        return os.path.abspath( path )
+
+    def get_size( self ):
+        """Returns the size of the data on disk"""
+        if self.file_size:
+            return self.file_size
+        else:
+            try:
+                return os.path.getsize( self.file_name )
+            except OSError:
+                return 0
+
+    def set_size( self ):
+        """Returns the size of the data on disk"""
+        try:
+            if not self.file_size:
+                self.file_size = os.path.getsize( self.file_name )
+        except OSError:
+            self.file_size = 0
+
+    def has_data( self ):
+        """Detects whether there is any data"""
+        return self.get_size() > 0
+
+    def mark_deleted( self, include_children=True ):
+        self.deleted = True
+    # FIXME: sqlalchemy will replace this
+
+    def _delete(self):
+        """Remove the file that corresponds to this data"""
+        try:
+            os.remove(self.data.file_name)
+        except OSError as e:
+            log.critical('%s delete error %s' % (self.__class__.__name__, e))
+
+
+class DatasetInstance( object ):
+    """A base class for all 'dataset instances', HDAs, LDAs, etc"""
+    states = Dataset.states
+    permitted_actions = Dataset.permitted_actions
+
+    def __init__( self, id=None, hid=None, name=None, info=None, blurb=None, peek=None, extension=None,
+                  dbkey=None, metadata=None, history=None, dataset=None, deleted=False, designation=None,
+                  parent_id=None, validation_errors=None, visible=True, create_dataset=False ):
+        self.name = name or "Unnamed dataset"
+        self.id = id
+        self.info = info
+        self.blurb = blurb
+        self.peek = peek
+        self.extension = extension
+        self.designation = designation
+        self.metadata = metadata or dict()
+        if dbkey:  # dbkey is stored in metadata, only set if non-zero, or else we could clobber one supplied by input 'metadata'
+            self.dbkey = dbkey
+        self.deleted = deleted
+        self.visible = visible
+        # Relationships
+        if not dataset and create_dataset:
+            dataset = Dataset( state=Dataset.states.NEW )
+            context.add( dataset )
+            context.flush()
+        self.dataset = dataset
+        self.parent_id = parent_id
+        self.validation_errors = validation_errors
+
+    @property
+    def ext( self ):
+        return self.extension
+
+    def get_dataset_state( self ):
+        return self.dataset.state
+
+    def set_dataset_state( self, state ):
+        self.dataset.state = state
+        context.add( self.dataset )
+        context.flush()  # flush here, because hda.flush() won't flush the Dataset object
+    state = property( get_dataset_state, set_dataset_state )
+
+    def get_file_name( self ):
+        return self.dataset.get_file_name()
+
+    def set_file_name(self, filename):
+        return self.dataset.set_file_name( filename )
+    file_name = property( get_file_name, set_file_name )
+
+    @property
+    def extra_files_path( self ):
+        return self.dataset.extra_files_path
+
+    def get_metadata( self ):
+        if not hasattr( self, '_metadata_collection' ) or self._metadata_collection.parent != self:  # using weakref to store parent (to prevent circ ref), does a context.clear() cause parent to be invalidated, while still copying over this non-database attribute?
+            self._metadata_collection = MetadataCollection( self )
+        return self._metadata_collection
+
+    def set_metadata( self, bunch ):
+        # Needs to accept a MetadataCollection, a bunch, or a dict
+        self._metadata = self.metadata.make_dict_copy( bunch )
+    metadata = property( get_metadata, set_metadata )
+    # This provide backwards compatibility with using the old dbkey
+    # field in the database.  That field now maps to "old_dbkey" (see mapping.py).
+
+    def get_dbkey( self ):
+        dbkey = self.metadata.dbkey
+        if not isinstance(dbkey, list):
+            dbkey = [dbkey]
+        if dbkey in [[None], []]:
+            return "?"
+        return dbkey[0]
+
+    def set_dbkey( self, value ):
+        if "dbkey" in self.datatype.metadata_spec:
+            if not isinstance(value, list):
+                self.metadata.dbkey = [value]
+            else:
+                self.metadata.dbkey = value
+    dbkey = property( get_dbkey, set_dbkey )
+
+    def get_size( self ):
+        """Returns the size of the data on disk"""
+        return self.dataset.get_size()
+
+    def set_size( self ):
+        """Returns the size of the data on disk"""
+        return self.dataset.set_size()
+
+    def has_data( self ):
+        """Detects whether there is any data"""
+        return self.dataset.has_data()
+
+    def get_raw_data( self ):
+        """Returns the full data. To stream it open the file_name and read/write as needed"""
+        return self.datatype.get_raw_data( self )
+
+    def write_from_stream( self, stream ):
+        """Writes data from a stream"""
+        self.datatype.write_from_stream(self, stream)
+
+    def set_raw_data( self, data ):
+        """Saves the data on the disc"""
+        self.datatype.set_raw_data(self, data)
+
+    def set_peek( self, is_multi_byte=False ):
+        return self.datatype.set_peek( self, is_multi_byte=is_multi_byte )
+
+    def init_meta( self, copy_from=None ):
+        return self.datatype.init_meta( self, copy_from=copy_from )
+
+    def set_meta( self, **kwd ):
+        self.clear_associated_files( metadata_safe=True )
+        return self.datatype.set_meta( self, **kwd )
+
+    def missing_meta( self, **kwd ):
+        return self.datatype.missing_meta( self, **kwd )
+
+    def as_display_type( self, type, **kwd ):
+        return self.datatype.as_display_type( self, type, **kwd )
+
+    def display_peek( self ):
+        return self.datatype.display_peek( self )
+
+    def display_name( self ):
+        return self.datatype.display_name( self )
+
+    def display_info( self ):
+        return self.datatype.display_info( self )
+
+    def get_converted_files_by_type( self, file_type ):
+        valid = []
+        for assoc in self.implicitly_converted_datasets:
+            if not assoc.deleted and assoc.type == file_type:
+                valid.append( assoc.dataset )
+        return valid
+
+    def clear_associated_files( self, metadata_safe=False, purge=False ):
+        raise Exception( 'Unimplemented' )
+
+    def get_child_by_designation(self, designation):
+        for child in self.children:
+            if child.designation == designation:
+                return child
+        return None
+
+    def add_validation_error( self, validation_error ):
+        self.validation_errors.append( validation_error )
+
+    def extend_validation_errors( self, validation_errors ):
+        self.validation_errors.extend(validation_errors)
+
+    def mark_deleted( self, include_children=True ):
+        self.deleted = True
+        if include_children:
+            for child in self.children:
+                child.mark_deleted()
+
+    def mark_undeleted( self, include_children=True ):
+        self.deleted = False
+        if include_children:
+            for child in self.children:
+                child.mark_undeleted()
+
+    def undeletable( self ):
+        if self.purged:
+            return False
+        return True
+
+    @property
+    def source_library_dataset( self ):
+        def get_source( dataset ):
+            if isinstance( dataset, LibraryDatasetDatasetAssociation ):
+                if dataset.library_dataset:
+                    return ( dataset, dataset.library_dataset )
+            if dataset.copied_from_library_dataset_dataset_association:
+                source = get_source( dataset.copied_from_library_dataset_dataset_association )
+                if source:
+                    return source
+            if dataset.copied_from_history_dataset_association:
+                source = get_source( dataset.copied_from_history_dataset_association )
+                if source:
+                    return source
+            return ( None, None )
+        return get_source( self )
+
+
+class HistoryDatasetAssociation( DatasetInstance ):
+    def __init__( self,
+                  hid=None,
+                  history=None,
+                  copied_from_history_dataset_association=None,
+                  copied_from_library_dataset_dataset_association=None,
+                  **kwd ):
+        DatasetInstance.__init__( self, **kwd )
+        self.hid = hid
+        # Relationships
+        self.history = history
+        self.copied_from_history_dataset_association = copied_from_history_dataset_association
+        self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
+
+    def copy( self, copy_children=False, parent_id=None, target_history=None ):
+        hda = HistoryDatasetAssociation( hid=self.hid,
+                                         name=self.name,
+                                         info=self.info,
+                                         blurb=self.blurb,
+                                         peek=self.peek,
+                                         extension=self.extension,
+                                         dbkey=self.dbkey,
+                                         dataset=self.dataset,
+                                         visible=self.visible,
+                                         deleted=self.deleted,
+                                         parent_id=parent_id,
+                                         copied_from_history_dataset_association=self,
+                                         history=target_history )
+        context.add( hda )
+        context.flush()
+        hda.set_size()
+        # Need to set after flushed, as MetadataFiles require dataset.id
+        hda.metadata = self.metadata
+        if copy_children:
+            for child in self.children:
+                child.copy( copy_children=copy_children, parent_id=hda.id )
+        if not self.datatype.copy_safe_peek:
+            # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+            hda.set_peek()
+        context.flush()
+        return hda
+
+    def to_library_dataset_dataset_association( self, target_folder, replace_dataset=None, parent_id=None ):
+        if replace_dataset:
+            # The replace_dataset param ( when not None ) refers to a LibraryDataset that is being replaced with a new version.
+            library_dataset = replace_dataset
+        else:
+            # If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new
+            # LibraryDataset, and the current user's DefaultUserPermissions will be applied to the associated Dataset.
+            library_dataset = LibraryDataset( folder=target_folder, name=self.name, info=self.info )
+            context.add( library_dataset )
+            context.flush()
+        ldda = LibraryDatasetDatasetAssociation( name=self.name,
+                                                 info=self.info,
+                                                 blurb=self.blurb,
+                                                 peek=self.peek,
+                                                 extension=self.extension,
+                                                 dbkey=self.dbkey,
+                                                 dataset=self.dataset,
+                                                 library_dataset=library_dataset,
+                                                 visible=self.visible,
+                                                 deleted=self.deleted,
+                                                 parent_id=parent_id,
+                                                 copied_from_history_dataset_association=self,
+                                                 user=self.history.user )
+        context.add( ldda )
+        context.flush()
+        # Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
+        # Must set metadata after ldda flushed, as MetadataFiles require ldda.id
+        ldda.metadata = self.metadata
+        if not replace_dataset:
+            target_folder.add_library_dataset( library_dataset, genome_build=ldda.dbkey )
+            context.add( target_folder )
+            context.flush()
+        library_dataset.library_dataset_dataset_association_id = ldda.id
+        context.add( library_dataset )
+        context.flush()
+        for child in self.children:
+            child.to_library_dataset_dataset_association( target_folder=target_folder, replace_dataset=replace_dataset, parent_id=ldda.id )
+        if not self.datatype.copy_safe_peek:
+            # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+            ldda.set_peek()
+        context.flush()
+        return ldda
+
+    def clear_associated_files( self, metadata_safe=False, purge=False ):
+        # metadata_safe = True means to only clear when assoc.metadata_safe == False
+        for assoc in self.implicitly_converted_datasets:
+            if not metadata_safe or not assoc.metadata_safe:
+                assoc.clear( purge=purge )
+
+
+class LibraryDatasetDatasetAssociation( DatasetInstance ):
+    def __init__( self,
+                  copied_from_history_dataset_association=None,
+                  copied_from_library_dataset_dataset_association=None,
+                  library_dataset=None,
+                  user=None,
+                  **kwd ):
+        DatasetInstance.__init__( self, **kwd )
+        self.copied_from_history_dataset_association = copied_from_history_dataset_association
+        self.copied_from_library_dataset_dataset_association = copied_from_library_dataset_dataset_association
+        self.library_dataset = library_dataset
+        self.user = user
+
+    def to_history_dataset_association( self, target_history, parent_id=None ):
+        hid = target_history._next_hid()
+        hda = HistoryDatasetAssociation( name=self.name,
+                                         info=self.info,
+                                         blurb=self.blurb,
+                                         peek=self.peek,
+                                         extension=self.extension,
+                                         dbkey=self.dbkey,
+                                         dataset=self.dataset,
+                                         visible=self.visible,
+                                         deleted=self.deleted,
+                                         parent_id=parent_id,
+                                         copied_from_library_dataset_dataset_association=self,
+                                         history=target_history,
+                                         hid=hid )
+        context.add( hda )
+        context.flush()
+        hda.metadata = self.metadata  # need to set after flushed, as MetadataFiles require dataset.id
+        for child in self.children:
+            child.to_history_dataset_association( target_history=target_history, parent_id=hda.id )
+        if not self.datatype.copy_safe_peek:
+            hda.set_peek()  # in some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+        context.flush()
+        return hda
+
+    def copy( self, copy_children=False, parent_id=None, target_folder=None ):
+        ldda = LibraryDatasetDatasetAssociation( name=self.name,
+                                                 info=self.info,
+                                                 blurb=self.blurb,
+                                                 peek=self.peek,
+                                                 extension=self.extension,
+                                                 dbkey=self.dbkey,
+                                                 dataset=self.dataset,
+                                                 visible=self.visible,
+                                                 deleted=self.deleted,
+                                                 parent_id=parent_id,
+                                                 copied_from_library_dataset_dataset_association=self,
+                                                 folder=target_folder )
+        context.add( ldda )
+        context.flush()
+        # Need to set after flushed, as MetadataFiles require dataset.id
+        ldda.metadata = self.metadata
+        if copy_children:
+            for child in self.children:
+                child.copy( copy_children=copy_children, parent_id=ldda.id )
+        if not self.datatype.copy_safe_peek:
+            # In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
+            ldda.set_peek()
+        context.flush()
+        return ldda
+
+    def clear_associated_files( self, metadata_safe=False, purge=False ):
+        return
+
+    def get_library_item_info_templates( self, template_list=[], restrict=False ):
+        # If restrict is True, we'll return only those templates directly associated with this LibraryDatasetDatasetAssociation
+        if self.library_dataset_dataset_info_template_associations:
+            template_list.extend( [ lddita.library_item_info_template for lddita in self.library_dataset_dataset_info_template_associations if lddita.library_item_info_template not in template_list ] )
+        self.library_dataset.get_library_item_info_templates( template_list, restrict )
+        return template_list
+
+
+class LibraryDataset( object ):
+    # This class acts as a proxy to the currently selected LDDA
+    def __init__( self, folder=None, order_id=None, name=None, info=None, library_dataset_dataset_association=None, **kwd ):
+        self.folder = folder
+        self.order_id = order_id
+        self.name = name
+        self.info = info
+        self.library_dataset_dataset_association = library_dataset_dataset_association
+
+    def set_library_dataset_dataset_association( self, ldda ):
+        self.library_dataset_dataset_association = ldda
+        ldda.library_dataset = self
+        context.add_all( ( self, ldda ) )
+        context.flush()
+
+    def get_info( self ):
+        if self.library_dataset_dataset_association:
+            return self.library_dataset_dataset_association.info
+        elif self._info:
+            return self._info
+        else:
+            return 'no info'
+
+    def set_info( self, info ):
+        self._info = info
+    info = property( get_info, set_info )
+
+    def get_name( self ):
+        if self.library_dataset_dataset_association:
+            return self.library_dataset_dataset_association.name
+        elif self._name:
+            return self._name
+        else:
+            return 'Unnamed dataset'
+
+    def set_name( self, name ):
+        self._name = name
+    name = property( get_name, set_name )
+
+    def display_name( self ):
+        self.library_dataset_dataset_association.display_name()
+
+    def get_purged( self ):
+        return self.library_dataset_dataset_association.dataset.purged
+
+    def set_purged( self, purged ):
+        if purged:
+            raise Exception( "Not implemented" )
+        if not purged and self.purged:
+            raise Exception( "Cannot unpurge once purged" )
+    purged = property( get_purged, set_purged )
+
+    def get_library_item_info_templates( self, template_list=[], restrict=False ):
+        # If restrict is True, we'll return only those templates directly associated with this LibraryDataset
+        if self.library_dataset_info_template_associations:
+            template_list.extend( [ ldita.library_item_info_template for ldita in self.library_dataset_info_template_associations if ldita.library_item_info_template not in template_list ] )
+        if restrict not in [ 'True', True ]:
+            self.folder.get_library_item_info_templates( template_list, restrict )
+        return template_list
+
+# tables
+
+
+Dataset.table = Table( "dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "state", TrimmedString( 64 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "purged", Boolean, index=True, default=False ),
+    Column( "purgable", Boolean, default=True ),
+    Column( "external_filename", TEXT ),
+    Column( "_extra_files_path", TEXT ),
+    Column( 'file_size', Numeric( 15, 0 ) ) )
+
+
+HistoryDatasetAssociation.table = Table( "history_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
+    Column( "copied_from_library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "hid", Integer ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "blurb", TrimmedString( 255 ) ),
+    Column( "peek", TEXT ),
+    Column( "extension", TrimmedString( 64 ) ),
+    Column( "metadata", MetadataType(), key="_metadata" ),
+    Column( "parent_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
+    Column( "designation", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "visible", Boolean ) )
+
+
+LibraryDatasetDatasetAssociation.table = Table( "library_dataset_dataset_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), index=True ),
+    Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id", use_alter=True, name='history_dataset_association_dataset_id_fkey' ), nullable=True ),
+    Column( "copied_from_library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name='library_dataset_dataset_association_id_fkey' ), nullable=True ),
+    Column( "name", TrimmedString( 255 ) ),
+    Column( "info", TrimmedString( 255 ) ),
+    Column( "blurb", TrimmedString( 255 ) ),
+    Column( "peek", TEXT ),
+    Column( "extension", TrimmedString( 64 ) ),
+    Column( "metadata", MetadataType(), key="_metadata" ),
+    Column( "parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
+    Column( "designation", TrimmedString( 255 ) ),
+    Column( "deleted", Boolean, index=True, default=False ),
+    Column( "visible", Boolean ),
+    Column( "message", TrimmedString( 255 ) ) )
+
+LibraryDataset.table = Table( "library_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" ), nullable=True, index=True ),  # current version of dataset, if null, there is not a current version selected
+    Column( "order_id", Integer ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "name", TrimmedString( 255 ), key="_name" ),  # when not None/null this will supercede display in library (but not when imported into user's history?)
+    Column( "info", TrimmedString( 255 ), key="_info" ),  # when not None/null this will supercede display in library (but not when imported into user's history?)
+    Column( "deleted", Boolean, index=True, default=False ) )
+
+
+# mappers
+
+
+mapper( Dataset, Dataset.table,
+    properties=dict(
+        history_associations=relation(
+            HistoryDatasetAssociation,
+            primaryjoin=( Dataset.table.c.id == HistoryDatasetAssociation.table.c.dataset_id ) ),
+        active_history_associations=relation(
+            HistoryDatasetAssociation,
+            primaryjoin=( ( Dataset.table.c.id == HistoryDatasetAssociation.table.c.dataset_id ) & ( HistoryDatasetAssociation.table.c.deleted == false() ) ) ),
+        library_associations=relation(
+            LibraryDatasetDatasetAssociation,
+            primaryjoin=( Dataset.table.c.id == LibraryDatasetDatasetAssociation.table.c.dataset_id ) ),
+        active_library_associations=relation(
+            LibraryDatasetDatasetAssociation,
+            primaryjoin=( ( Dataset.table.c.id == LibraryDatasetDatasetAssociation.table.c.dataset_id ) & ( LibraryDatasetDatasetAssociation.table.c.deleted == false() ) ) ) ) )
+
+
+mapper( HistoryDatasetAssociation, HistoryDatasetAssociation.table,
+    properties=dict(
+        dataset=relation(
+            Dataset,
+            primaryjoin=( Dataset.table.c.id == HistoryDatasetAssociation.table.c.dataset_id ), lazy=False ),
+        # .history defined in History mapper
+        copied_to_history_dataset_associations=relation(
+            HistoryDatasetAssociation,
+            primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id == HistoryDatasetAssociation.table.c.id ),
+            backref=backref( "copied_from_history_dataset_association", primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id == HistoryDatasetAssociation.table.c.id ), remote_side=[HistoryDatasetAssociation.table.c.id], uselist=False ) ),
+        copied_to_library_dataset_dataset_associations=relation(
+            LibraryDatasetDatasetAssociation,
+            primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ),
+            backref=backref( "copied_from_history_dataset_association", primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ), remote_side=[LibraryDatasetDatasetAssociation.table.c.id], uselist=False ) ),
+        children=relation(
+            HistoryDatasetAssociation,
+            primaryjoin=( HistoryDatasetAssociation.table.c.parent_id == HistoryDatasetAssociation.table.c.id ),
+            backref=backref( "parent", primaryjoin=( HistoryDatasetAssociation.table.c.parent_id == HistoryDatasetAssociation.table.c.id ), remote_side=[HistoryDatasetAssociation.table.c.id], uselist=False ) ),
+        visible_children=relation(
+            HistoryDatasetAssociation,
+            primaryjoin=( ( HistoryDatasetAssociation.table.c.parent_id == HistoryDatasetAssociation.table.c.id ) & ( HistoryDatasetAssociation.table.c.visible == true() ) ) ) ) )
+
+mapper( LibraryDatasetDatasetAssociation, LibraryDatasetDatasetAssociation.table,
+    properties=dict(
+        dataset=relation( Dataset ),
+        library_dataset=relation( LibraryDataset,
+        primaryjoin=( LibraryDatasetDatasetAssociation.table.c.library_dataset_id == LibraryDataset.table.c.id ) ),
+        copied_to_library_dataset_dataset_associations=relation(
+            LibraryDatasetDatasetAssociation,
+            primaryjoin=( LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ),
+            backref=backref( "copied_from_library_dataset_dataset_association", primaryjoin=( LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ), remote_side=[LibraryDatasetDatasetAssociation.table.c.id] ) ),
+        copied_to_history_dataset_associations=relation(
+            HistoryDatasetAssociation,
+            primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ),
+            backref=backref( "copied_from_library_dataset_dataset_association", primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ), remote_side=[LibraryDatasetDatasetAssociation.table.c.id], uselist=False ) ),
+        children=relation(
+            LibraryDatasetDatasetAssociation,
+            primaryjoin=( LibraryDatasetDatasetAssociation.table.c.parent_id == LibraryDatasetDatasetAssociation.table.c.id ),
+            backref=backref( "parent", primaryjoin=( LibraryDatasetDatasetAssociation.table.c.parent_id == LibraryDatasetDatasetAssociation.table.c.id ), remote_side=[LibraryDatasetDatasetAssociation.table.c.id] ) ),
+        visible_children=relation(
+            LibraryDatasetDatasetAssociation,
+            primaryjoin=( ( LibraryDatasetDatasetAssociation.table.c.parent_id == LibraryDatasetDatasetAssociation.table.c.id ) & ( LibraryDatasetDatasetAssociation.table.c.visible == true() ) ) ) ) )
+
+mapper( LibraryDataset, LibraryDataset.table,
+    properties=dict(
+        library_dataset_dataset_association=relation( LibraryDatasetDatasetAssociation, primaryjoin=( LibraryDataset.table.c.library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ) ),
+        expired_datasets=relation( LibraryDatasetDatasetAssociation, foreign_keys=[LibraryDataset.table.c.id, LibraryDataset.table.c.library_dataset_dataset_association_id ], primaryjoin=( ( LibraryDataset.table.c.id == LibraryDatasetDatasetAssociation.table.c.library_dataset_id ) & ( not_( LibraryDataset.table.c.library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ) ) ), viewonly=True, uselist=True ) ) )
+
+
+def __guess_dataset_by_filename( filename ):
+    """Return a guessed dataset by filename"""
+    try:
+        fields = os.path.split( filename )
+        if fields:
+            if fields[-1].startswith( 'dataset_' ) and fields[-1].endswith( '.dat' ):  # dataset_%d.dat
+                return Dataset.get( int( fields[-1][ len( 'dataset_' ): -len( '.dat' ) ] ) )
+    except:
+        pass  # some parsing error, we can't guess Dataset
+    return None
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    log.debug( "Fixing a discrepancy concerning deleted shared history items." )
+    affected_items = 0
+    start_time = time.time()
+    for dataset in context.query( Dataset ).filter( and_( Dataset.deleted == true(), Dataset.purged == false() ) ):
+        for dataset_instance in dataset.history_associations + dataset.library_associations:
+            if not dataset_instance.deleted:
+                dataset.deleted = False
+                if dataset.file_size in [ None, 0 ]:
+                    dataset.set_size()  # Restore filesize
+                affected_items += 1
+                break
+    context.flush()
+    log.debug( "%i items affected, and restored." % ( affected_items ) )
+    log.debug( "Time elapsed: %s" % ( time.time() - start_time ) )
+
+    # fix share before hda
+    log.debug( "Fixing a discrepancy concerning cleaning up deleted history items shared before HDAs." )
+    dataset_by_filename = {}
+    changed_associations = 0
+    start_time = time.time()
+    for dataset in context.query( Dataset ).filter( Dataset.external_filename.like( '%dataset_%.dat' ) ):
+        if dataset.file_name in dataset_by_filename:
+            guessed_dataset = dataset_by_filename[ dataset.file_name ]
+        else:
+            guessed_dataset = __guess_dataset_by_filename( dataset.file_name )
+            if guessed_dataset and dataset.file_name != guessed_dataset.file_name:  # not os.path.samefile( dataset.file_name, guessed_dataset.file_name ):
+                guessed_dataset = None
+            dataset_by_filename[ dataset.file_name ] = guessed_dataset
+
+        if guessed_dataset is not None and guessed_dataset.id != dataset.id:  # could we have a self referential dataset?
+            for dataset_instance in dataset.history_associations + dataset.library_associations:
+                dataset_instance.dataset = guessed_dataset
+                changed_associations += 1
+            # mark original Dataset as deleted and purged, it is no longer in use, but do not delete file_name contents
+            dataset.deleted = True
+            dataset.external_filename = "Dataset was result of share before HDA, and has been replaced: %s mapped to Dataset %s" % ( dataset.external_filename, guessed_dataset.id )
+            dataset.purged = True  # we don't really purge the file here, but we mark it as purged, since this dataset is now defunct
+    context.flush()
+    log.debug( "%i items affected, and restored." % ( changed_associations ) )
+    log.debug( "Time elapsed: %s" % ( time.time() - start_time ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    log.debug( "Downgrade is not possible." )
diff --git a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py
new file mode 100644
index 0000000..f81b725
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py
@@ -0,0 +1,65 @@
+"""
+This migration script changes certain values in the history_dataset_association.extension
+column, specifically 'qual' is chaged to be 'qual454'.
+"""
+from __future__ import print_function
+
+import logging
+import sys
+
+from sqlalchemy import Index, MetaData, Table
+from sqlalchemy.orm import scoped_session, sessionmaker
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script changes certain values in the history_dataset_association.extension")
+    print("column, specifically 'qual' is chaged to be 'qual454'.")
+    print("========================================")
+
+
+def upgrade(migrate_engine):
+    display_migration_details()
+    metadata.bind = migrate_engine
+    db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+    HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+    # Load existing tables
+    metadata.reflect()
+    # Add 2 indexes to the galaxy_user table
+    i = Index( 'ix_hda_extension', HistoryDatasetAssociation_table.c.extension )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_hda_extension' to history_dataset_association table failed: %s" % ( str( e ) ) )
+
+    # Set the default data in the galaxy_user table, but only for null values
+    cmd = "UPDATE history_dataset_association SET extension = 'qual454' WHERE extension = 'qual' and peek like \'>%%\'"
+    try:
+        db_session.execute( cmd )
+    except Exception as e:
+        log.debug( "Resetting extension qual to qual454 in history_dataset_association failed: %s" % ( str( e ) ) )
+    cmd = "UPDATE history_dataset_association SET extension = 'qualsolexa' WHERE extension = 'qual' and peek not like \'>%%\'"
+    try:
+        db_session.execute( cmd )
+    except Exception as e:
+        log.debug( "Resetting extension qual to qualsolexa in history_dataset_association failed: %s" % ( str( e ) ) )
+    # Add 1 index to the history_dataset_association table
+    try:
+        i.drop()
+    except Exception as e:
+        log.debug( "Dropping index 'ix_hda_extension' to history_dataset_association table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0007_sharing_histories.py b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py
new file mode 100644
index 0000000..c40f076
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py
@@ -0,0 +1,84 @@
+"""
+This migration script creates the new history_user_share_association table, and adds
+a new boolean type column to the history table.  This provides support for sharing
+histories in the same way that workflows are shared.
+"""
+from __future__ import print_function
+
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, ForeignKey, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script creates the new history_user_share_association table, and adds")
+    print("a new boolean type column to the history table.  This provides support for sharing")
+    print("histories in the same way that workflows are shared.")
+    print("========================================")
+
+
+HistoryUserShareAssociation_table = Table( "history_user_share_association", metadata,
+                                           Column( "id", Integer, primary_key=True ),
+                                           Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                           Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Create the history_user_share_association table
+    try:
+        HistoryUserShareAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating history_user_share_association table failed: %s" % str( e ) )
+    # Add 1 column to the history table
+    try:
+        History_table = Table( "history", metadata, autoload=True )
+    except NoSuchTableError:
+        History_table = None
+        log.debug( "Failed loading table history" )
+    if History_table is not None:
+        try:
+            col = Column( 'importable', Boolean, index=True, default=False )
+            col.create( History_table, index_name='ix_history_importable')
+            assert col is History_table.c.importable
+        except Exception as e:
+            log.debug( "Adding column 'importable' to history table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    # Drop 1 column from the history table
+    try:
+        History_table = Table( "history", metadata, autoload=True )
+    except NoSuchTableError:
+        History_table = None
+        log.debug( "Failed loading table history" )
+    if History_table is not None:
+        try:
+            col = History_table.c.importable
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'importable' from history table failed: %s" % ( str( e ) ) )
+    # Drop the history_user_share_association table
+    try:
+        HistoryUserShareAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping history_user_share_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py
new file mode 100644
index 0000000..5499381
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py
@@ -0,0 +1,206 @@
+"""
+This migration script adds the following new tables for supporting Galaxy forms:
+1) form_definition_current
+2) form_definition
+3) form_values
+4) request_type
+5) request
+6) sample
+7) sample_state
+8) sample_event
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from migrate import ForeignKeyConstraint
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script adds the following new tables for supporting Galaxy forms:")
+    print("1) form_definition_current")
+    print("2) form_definition")
+    print("3) form_values")
+    print("4) request_type")
+    print("5) request")
+    print("6) sample")
+    print("7) sample_state")
+    print("8) sample_event")
+    print("========================================")
+
+
+FormDefinitionCurrent_table = Table('form_definition_current', metadata,
+                                    Column( "id", Integer, primary_key=True),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                                    Column( "latest_form_id", Integer, index=True ),
+                                    Column( "deleted", Boolean, index=True, default=False ))
+
+FormDefinition_table = Table('form_definition', metadata,
+                             Column( "id", Integer, primary_key=True),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "name", TrimmedString( 255 ), nullable=False ),
+                             Column( "desc", TEXT ),
+                             Column( "form_definition_current_id", Integer, ForeignKey( "form_definition_current.id" ), index=True, nullable=False ),
+                             Column( "fields", JSONType()) )
+
+FormValues_table = Table('form_values', metadata,
+                         Column( "id", Integer, primary_key=True),
+                         Column( "create_time", DateTime, default=now ),
+                         Column( "update_time", DateTime, default=now, onupdate=now ),
+                         Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                         Column( "content", JSONType()) )
+
+RequestType_table = Table('request_type', metadata,
+                          Column( "id", Integer, primary_key=True),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "name", TrimmedString( 255 ), nullable=False ),
+                          Column( "desc", TEXT ),
+                          Column( "request_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                          Column( "sample_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ) )
+
+Request_table = Table('request', metadata,
+                      Column( "id", Integer, primary_key=True),
+                      Column( "create_time", DateTime, default=now ),
+                      Column( "update_time", DateTime, default=now, onupdate=now ),
+                      Column( "name", TrimmedString( 255 ), nullable=False ),
+                      Column( "desc", TEXT ),
+                      Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+                      Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
+                      Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                      Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
+                      Column( "deleted", Boolean, index=True, default=False ) )
+
+Sample_table = Table('sample', metadata,
+                     Column( "id", Integer, primary_key=True),
+                     Column( "create_time", DateTime, default=now ),
+                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                     Column( "name", TrimmedString( 255 ), nullable=False ),
+                     Column( "desc", TEXT ),
+                     Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+                     Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
+                     Column( "deleted", Boolean, index=True, default=False )  )
+
+SampleState_table = Table('sample_state', metadata,
+                          Column( "id", Integer, primary_key=True),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "name", TrimmedString( 255 ), nullable=False ),
+                          Column( "desc", TEXT ),
+                          Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ) )
+
+SampleEvent_table = Table('sample_event', metadata,
+                          Column( "id", Integer, primary_key=True),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
+                          Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
+                          Column( "comment", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Add all of the new tables above
+#    metadata.create_all()
+    try:
+        FormDefinitionCurrent_table.create()
+    except Exception as e:
+        log.debug( "Creating form_definition_current table failed: %s" % str( e ) )
+    try:
+        FormDefinition_table.create()
+    except Exception as e:
+        log.debug( "Creating form_definition table failed: %s" % str( e ) )
+    # Add 1 foreign key constraint to the form_definition_current table
+    if FormDefinitionCurrent_table is not None and FormDefinition_table is not None:
+        try:
+            cons = ForeignKeyConstraint( [FormDefinitionCurrent_table.c.latest_form_id],
+                                         [FormDefinition_table.c.id],
+                                         name='form_definition_current_latest_form_id_fk' )
+            # Create the constraint
+            cons.create()
+        except Exception as e:
+            log.debug( "Adding foreign key constraint 'form_definition_current_latest_form_id_fk' to table 'form_definition_current' failed: %s" % ( str( e ) ) )
+    try:
+        FormValues_table.create()
+    except Exception as e:
+        log.debug( "Creating form_values table failed: %s" % str( e ) )
+    try:
+        RequestType_table.create()
+    except Exception as e:
+        log.debug( "Creating request_type table failed: %s" % str( e ) )
+    try:
+        Request_table.create()
+    except Exception as e:
+        log.debug( "Creating request table failed: %s" % str( e ) )
+    try:
+        Sample_table.create()
+    except Exception as e:
+        log.debug( "Creating sample table failed: %s" % str( e ) )
+    try:
+        SampleState_table.create()
+    except Exception as e:
+        log.debug( "Creating sample_state table failed: %s" % str( e ) )
+    try:
+        SampleEvent_table.create()
+    except Exception as e:
+        log.debug( "Creating sample_event table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    try:
+        FormDefinition_table.drop()
+    except Exception as e:
+        log.debug( "Dropping form_definition table failed: %s" % str( e ) )
+    try:
+        FormDefinitionCurrent_table.drop()
+    except Exception as e:
+        log.debug( "Dropping form_definition_current table failed: %s" % str( e ) )
+    try:
+        FormValues_table.drop()
+    except Exception as e:
+        log.debug( "Dropping form_values table failed: %s" % str( e ) )
+    try:
+        Request_table.drop()
+    except Exception as e:
+        log.debug( "Dropping request table failed: %s" % str( e ) )
+    try:
+        RequestType_table.drop()
+    except Exception as e:
+        log.debug( "Dropping request_type table failed: %s" % str( e ) )
+    try:
+        Sample_table.drop()
+    except Exception as e:
+        log.debug( "Dropping sample table failed: %s" % str( e ) )
+    try:
+        SampleState_table.drop()
+    except Exception as e:
+        log.debug( "Dropping sample_state table failed: %s" % str( e ) )
+    try:
+        SampleEvent_table.drop()
+    except Exception as e:
+        log.debug( "Dropping sample_event table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0009_request_table.py b/lib/galaxy/model/migrate/versions/0009_request_table.py
new file mode 100644
index 0000000..536ac69
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0009_request_table.py
@@ -0,0 +1,62 @@
+"""
+This migration script adds a new column to 2 tables:
+1) a new boolean type column named 'submitted' to the 'request' table
+2) a new string type column named 'bar_code' to the 'sample' table
+"""
+from __future__ import print_function
+
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script adds a new column to 2 tables:")
+    print("1) a new boolean type column named 'submitted' to the 'request' table")
+    print("2) a new string type column named 'bar_code' to the 'sample' table")
+    print("========================================")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    Request_table = Table( "request", metadata, autoload=True )
+    Sample_table = Table( "sample", metadata, autoload=True )
+    metadata.reflect()
+    # Add 1 column to the request table
+    if Request_table is not None:
+        try:
+            col = Column( 'submitted', Boolean, default=False )
+            col.create( Request_table)
+            assert col is Request_table.c.submitted
+        except Exception as e:
+            log.debug( "Adding column 'submitted' to request table failed: %s" % ( str( e ) ) )
+
+    # Add 1 column to the sample table
+    if Sample_table is not None:
+        try:
+            col = Column( "bar_code", TrimmedString( 255 ), index=True )
+            col.create( Sample_table, index_name='ix_sample_bar_code')
+            assert col is Sample_table.c.bar_code
+        except Exception as e:
+            log.debug( "Adding column 'bar_code' to sample table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py
new file mode 100644
index 0000000..ccd7855
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py
@@ -0,0 +1,72 @@
+"""
+This migration script adds the history_dataset_association_display_at_authorization table,
+which allows 'private' datasets to be displayed at external sites without making them public.
+If using mysql, this script will display the following error, which is corrected in the next
+migration script:
+
+history_dataset_association_display_at_authorization table failed:  (OperationalError)
+(1059, "Identifier name  'ix_history_dataset_association_display_at_authorization_update_time'
+is too long
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script adds the history_dataset_association_display_at_authorization table, which")
+    print("allows 'private' datasets to be displayed at external sites without making them public.")
+    print("")
+    print("If using mysql, this script will display the following error, which is corrected in the next migration")
+    print("script: history_dataset_association_display_at_authorization table failed:  (OperationalError)")
+    print("(1059, 'Identifier name  'ix_history_dataset_association_display_at_authorization_update_time'")
+    print("is too long.")
+    print("========================================")
+
+
+HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata,
+                                                               Column( "id", Integer, primary_key=True ),
+                                                               Column( "create_time", DateTime, default=now ),
+                                                               Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+                                                               Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                               Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                               Column( "site", TrimmedString( 255 ) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    try:
+        HistoryDatasetAssociationDisplayAtAuthorization_table.create()
+    except Exception as e:
+        log.debug( "Creating history_dataset_association_display_at_authorization table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    try:
+        HistoryDatasetAssociationDisplayAtAuthorization_table.drop()
+    except Exception as e:
+        log.debug( "Dropping history_dataset_association_display_at_authorization table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py
new file mode 100644
index 0000000..680ac57
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py
@@ -0,0 +1,67 @@
+"""
+This script fixes a problem introduced in 0010_hda_display_at_atuhz_table.py.  MySQL has a
+name length limit and thus the index "ix_hdadaa_history_dataset_association_id" has to be
+manually created.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This script fixes a problem introduced in the previous migration script ( 9->10 ).  MySQL")
+    print("has a name length limit and thus the index 'ix_hdadaa_history_dataset_association_id' has")
+    print("to be manually created.")
+    print("========================================")
+
+
+HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata,
+                                                               Column( "id", Integer, primary_key=True ),
+                                                               Column( "create_time", DateTime, default=now ),
+                                                               Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+                                                               Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                               Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                               Column( "site", TrimmedString( 255 ) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    if migrate_engine.name == 'mysql':
+        # Load existing tables
+        metadata.reflect()
+        i = Index( "ix_hdadaa_history_dataset_association_id", HistoryDatasetAssociationDisplayAtAuthorization_table.c.history_dataset_association_id )
+        try:
+            i.create()
+        except Exception as e:
+            log.debug( "Adding index 'ix_hdadaa_history_dataset_association_id' to table 'history_dataset_association_display_at_authorization' table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    if migrate_engine.name == 'mysql':
+        # Load existing tables
+        metadata.reflect()
+        i = Index( "ix_hdadaa_history_dataset_association_id", HistoryDatasetAssociationDisplayAtAuthorization_table.c.history_dataset_association_id )
+        try:
+            i.drop()
+        except Exception as e:
+            log.debug( "Removing index 'ix_hdadaa_history_dataset_association_id' from table 'history_dataset_association_display_at_authorization' table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0012_user_address.py b/lib/galaxy/model/migrate/versions/0012_user_address.py
new file mode 100644
index 0000000..5091e28
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0012_user_address.py
@@ -0,0 +1,99 @@
+"""
+This script adds a new user_address table that is currently only used with sample requests, where
+a user can select from a list of his addresses to associate with the request.  This script also
+drops the request.submitted column which was boolean and replaces it with a request.state column
+which is a string, allowing for more flexibility with request states.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This script adds a new user_address table that is currently only used with sample requests, where")
+    print("a user can select from a list of his addresses to associate with the request.  This script also")
+    print("drops the request.submitted column which was boolean and replaces it with a request.state column")
+    print("which is a string, allowing for more flexibility with request states.")
+    print("========================================")
+
+
+UserAddress_table = Table( "user_address", metadata,
+                           Column( "id", Integer, primary_key=True),
+                           Column( "create_time", DateTime, default=now ),
+                           Column( "update_time", DateTime, default=now, onupdate=now ),
+                           Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                           Column( "desc", TEXT),
+                           Column( "name", TrimmedString( 255 ), nullable=False),
+                           Column( "institution", TrimmedString( 255 )),
+                           Column( "address", TrimmedString( 255 ), nullable=False),
+                           Column( "city", TrimmedString( 255 ), nullable=False),
+                           Column( "state", TrimmedString( 255 ), nullable=False),
+                           Column( "postal_code", TrimmedString( 255 ), nullable=False),
+                           Column( "country", TrimmedString( 255 ), nullable=False),
+                           Column( "phone", TrimmedString( 255 )),
+                           Column( "deleted", Boolean, index=True, default=False ),
+                           Column( "purged", Boolean, index=True, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Add all of the new tables above
+    try:
+        UserAddress_table.create()
+    except Exception as e:
+        log.debug( "Creating user_address table failed: %s" % str( e ) )
+    # Add 1 column to the request_type table
+    try:
+        RequestType_table = Table( "request_type", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestType_table = None
+        log.debug( "Failed loading table request_type" )
+    if RequestType_table is not None:
+        try:
+            col = Column( "deleted", Boolean, index=True, default=False )
+            col.create( RequestType_table, index_name='ix_request_type_deleted')
+            assert col is RequestType_table.c.deleted
+        except Exception as e:
+            log.debug( "Adding column 'deleted' to request_type table failed: %s" % ( str( e ) ) )
+
+    # Delete the submitted column
+    # This fails for sqlite, so skip the drop -- no conflicts in the future
+    try:
+        Request_table = Table( "request", metadata, autoload=True )
+    except NoSuchTableError:
+        Request_table = None
+        log.debug( "Failed loading table request" )
+    if Request_table is not None:
+        if migrate_engine.name != 'sqlite':
+            # DBTODO drop from table doesn't work in sqlite w/ sqlalchemy-migrate .6+
+            Request_table.c.submitted.drop()
+        col = Column( "state", TrimmedString( 255 ), index=True  )
+        col.create( Request_table, index_name='ix_request_state')
+        assert col is Request_table.c.state
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py
new file mode 100644
index 0000000..dabafd7
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py
@@ -0,0 +1,247 @@
+"""
+This migration script eliminates all of the tables that were used for the 1st version of the
+library templates where template fields and contents were each stored as a separate table row
+in various library item tables.  All of these tables are dropped in this script, eliminating all
+existing template data.  A total of 14 existing tables are dropped.
+
+We're now basing library templates on forms, so field contents are
+stored as a jsonified list in the form_values table.  This script introduces the following 3
+new association tables:
+1) library_info_association
+2) library_folder_info_association
+3) library_dataset_dataset_info_association
+
+If using mysql, this script will throw an (OperationalError) exception due to a long index name on
+the library_dataset_dataset_info_association table, which is OK because the script creates an index
+with a shortened name.
+"""
+from __future__ import print_function
+
+import logging
+import sys
+
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script eliminates all of the tables that were used for the 1st version of the")
+    print("library templates where template fields and contents were each stored as a separate table row")
+    print("in various library item tables.  All of these tables are dropped in this script, eliminating all")
+    print("existing template data.  A total of 14 existing tables are dropped.")
+    print("")
+    print("We're now basing library templates on Galaxy forms, so field contents are stored as a jsonified")
+    print("list in the form_values table.  This script introduces the following 3 new association tables:")
+    print("1) library_info_association")
+    print("2) library_folder_info_association")
+    print("3) library_dataset_dataset_info_association")
+    print("")
+    print("If using mysql, this script will throw an (OperationalError) exception due to a long index name")
+    print("on the library_dataset_dataset_info_association table, which is OK because the script creates")
+    print("an index with a shortened name.")
+    print("========================================")
+
+
+LibraryInfoAssociation_table = Table( 'library_info_association', metadata,
+                                      Column( "id", Integer, primary_key=True),
+                                      Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
+                                      Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                                      Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+LibraryFolderInfoAssociation_table = Table( 'library_folder_info_association', metadata,
+                                            Column( "id", Integer, primary_key=True),
+                                            Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+                                            Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                                            Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+LibraryDatasetDatasetInfoAssociation_table = Table( 'library_dataset_dataset_info_association', metadata,
+                                                    Column( "id", Integer, primary_key=True),
+                                                    Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
+                                                    Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                                                    Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Drop all of the original library_item_info tables
+    # NOTE: all existing library item into template data is eliminated here via table drops
+    try:
+        LibraryItemInfoPermissions_table = Table( "library_item_info_permissions", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryItemInfoPermissions_table = None
+        log.debug( "Failed loading table library_item_info_permissions" )
+    try:
+        LibraryItemInfoPermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_permissions table failed: %s" % str( e ) )
+
+    try:
+        LibraryItemInfoTemplatePermissions_table = Table( "library_item_info_template_permissions", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryItemInfoTemplatePermissions_table = None
+        log.debug( "Failed loading table library_item_info_template_permissions" )
+    try:
+        LibraryItemInfoTemplatePermissions_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_template_permissions table failed: %s" % str( e ) )
+
+    try:
+        LibraryItemInfoElement_table = Table( "library_item_info_element", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryItemInfoElement_table = None
+        log.debug( "Failed loading table library_item_info_element" )
+    try:
+        LibraryItemInfoElement_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_element table failed: %s" % str( e ) )
+
+    try:
+        LibraryItemInfoTemplateElement_table = Table( "library_item_info_template_element", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryItemInfoTemplateElement_table = None
+        log.debug( "Failed loading table library_item_info_template_element" )
+    try:
+        LibraryItemInfoTemplateElement_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_template_element table failed: %s" % str( e ) )
+
+    try:
+        LibraryInfoTemplateAssociation_table = Table( "library_info_template_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryInfoTemplateAssociation_table = None
+        log.debug( "Failed loading table library_info_template_association" )
+    try:
+        LibraryInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_info_template_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryFolderInfoTemplateAssociation_table = Table( "library_folder_info_template_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryFolderInfoTemplateAssociation_table = None
+        log.debug( "Failed loading table library_folder_info_template_association" )
+    try:
+        LibraryFolderInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_folder_info_template_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryDatasetInfoTemplateAssociation_table = Table( "library_dataset_info_template_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetInfoTemplateAssociation_table = None
+        log.debug( "Failed loading table library_dataset_info_template_association" )
+    try:
+        LibraryDatasetInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_info_template_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryDatasetDatasetInfoTemplateAssociation_table = Table( "library_dataset_dataset_info_template_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetDatasetInfoTemplateAssociation_table = None
+        log.debug( "Failed loading table library_dataset_dataset_info_template_association" )
+    try:
+        LibraryDatasetDatasetInfoTemplateAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_dataset_info_template_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryInfoAssociation_table = Table( "library_info_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryInfoAssociation_table = None
+        log.debug( "Failed loading table library_info_association" )
+    try:
+        LibraryInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_info_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryFolderInfoAssociation_table = Table( "library_folder_info_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryFolderInfoAssociation_table = None
+        log.debug( "Failed loading table library_folder_info_association" )
+    try:
+        LibraryFolderInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_folder_info_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryDatasetInfoAssociation_table = Table( "library_dataset_info_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetInfoAssociation_table = None
+        log.debug( "Failed loading table library_dataset_info_association" )
+    try:
+        LibraryDatasetInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_info_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryDatasetDatasetInfoAssociation_table = Table( "library_dataset_dataset_info_association", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryDatasetDatasetInfoAssociation_table = None
+        log.debug( "Failed loading table library_dataset_dataset_info_association" )
+    try:
+        LibraryDatasetDatasetInfoAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_dataset_dataset_info_association table failed: %s" % str( e ) )
+
+    try:
+        LibraryItemInfo_table = Table( "library_item_info", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryItemInfo_table = None
+        log.debug( "Failed loading table library_item_info" )
+    try:
+        LibraryItemInfo_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info table failed: %s" % str( e ) )
+
+    try:
+        LibraryItemInfoTemplate_table = Table( "library_item_info_template", metadata, autoload=True )
+    except NoSuchTableError:
+        LibraryItemInfoTemplate_table = None
+        log.debug( "Failed loading table library_item_info_template" )
+    try:
+        LibraryItemInfoTemplate_table.drop()
+    except Exception as e:
+        log.debug( "Dropping library_item_info_template table failed: %s" % str( e ) )
+
+    # Create all new tables above
+    try:
+        LibraryInfoAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating library_info_association table failed: %s" % str( e ) )
+    try:
+        LibraryFolderInfoAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating library_folder_info_association table failed: %s" % str( e ) )
+    try:
+        LibraryDatasetDatasetInfoAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating library_dataset_dataset_info_association table failed: %s" % str( e ) )
+    # Fix index on LibraryDatasetDatasetInfoAssociation_table for mysql
+    if migrate_engine.name == 'mysql':
+        # Load existing tables
+        metadata.reflect()
+        i = Index( "ix_lddaia_ldda_id", LibraryDatasetDatasetInfoAssociation_table.c.library_dataset_dataset_association_id )
+        try:
+            i.create()
+        except Exception as e:
+            log.debug( "Adding index 'ix_lddaia_ldda_id' to table 'library_dataset_dataset_info_association' table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    log.debug( "Downgrade is not possible." )
diff --git a/lib/galaxy/model/migrate/versions/0014_pages.py b/lib/galaxy/model/migrate/versions/0014_pages.py
new file mode 100644
index 0000000..4978864
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0014_pages.py
@@ -0,0 +1,74 @@
+"""
+Migration script to add support for "Pages".
+  1) Creates Page and PageRevision tables
+  2) Adds username column to User table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, MetaData, String, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+Page_table = Table( "page", metadata,
+                    Column( "id", Integer, primary_key=True ),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                    Column( "latest_revision_id", Integer,
+                            ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
+                    Column( "title", TEXT ),
+                    Column( "slug", TEXT, unique=True, index=True ) )
+
+PageRevision_table = Table( "page_revision", metadata,
+                            Column( "id", Integer, primary_key=True ),
+                            Column( "create_time", DateTime, default=now ),
+                            Column( "update_time", DateTime, default=now, onupdate=now ),
+                            Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
+                            Column( "title", TEXT ),
+                            Column( "content", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        if migrate_engine.name == 'mysql':
+            # Strip slug index prior to creation so we can do it manually.
+            slug_index = None
+            for ix in Page_table.indexes:
+                if ix.name == 'ix_page_slug':
+                    slug_index = ix
+            Page_table.indexes.remove(slug_index)
+        Page_table.create()
+        if migrate_engine.name == 'mysql':
+            # Create slug index manually afterward.
+            i = Index( "ix_page_slug", Page_table.c.slug, mysql_length=200)
+            i.create()
+    except Exception as ex:
+        log.debug(ex)
+        log.debug( "Could not create page table" )
+    try:
+        PageRevision_table.create()
+    except:
+        log.debug( "Could not create page_revision table" )
+
+    # Add 1 column to the user table
+    User_table = Table( "galaxy_user", metadata, autoload=True )
+    col = Column( 'username', String(255), index=True, unique=True, default=False )
+    col.create( User_table, index_name='ix_user_username', unique_name='username' )
+    assert col is User_table.c.username
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    Page_table.drop()
+    PageRevision_table.drop()
+    User_table = Table( "galaxy_user", metadata, autoload=True )
+    User_table.c.username.drop()
diff --git a/lib/galaxy/model/migrate/versions/0015_tagging.py b/lib/galaxy/model/migrate/versions/0015_tagging.py
new file mode 100644
index 0000000..eacb1dd
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0015_tagging.py
@@ -0,0 +1,119 @@
+"""
+This migration script adds the tables necessary to support tagging of histories,
+datasets, and history-dataset associations (user views of datasets).
+
+If using mysql, this script will display the following error, which is corrected in the next
+migration script:
+
+history_dataset_association_tag_association table failed:  (OperationalError)
+(1059, "Identifier name 'ix_history_dataset_association_tag_association_history_dataset_association_id'
+is too long)
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from migrate import UniqueConstraint
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds the tables necessary to support tagging of histories,")
+    print("datasets, and history-dataset associations (user views of datasets).")
+    print("")
+    print("If using mysql, this script will display the following error, which is ")
+    print("corrected in the next migration script:")
+    print("history_dataset_association_tag_association table failed:  ")
+    print("(OperationalError) (1059, 'Identifier name ")
+    print("'ix_history_dataset_association_tag_association_history_dataset_association_id'")
+    print("is too long)")
+
+
+# New tables to support tagging of histories, datasets, and history-dataset associations.
+Tag_table = Table( "tag", metadata,
+                   Column( "id", Integer, primary_key=True ),
+                   Column( "type", Integer ),
+                   Column( "parent_id", Integer, ForeignKey( "tag.id" ) ),
+                   Column( "name", TrimmedString(255) ),
+                   UniqueConstraint( "name" ) )
+
+HistoryTagAssociation_table = Table( "history_tag_association", metadata,
+                                     Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                     Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                     Column( "user_tname", TrimmedString(255), index=True),
+                                     Column( "value", TrimmedString(255), index=True),
+                                     Column( "user_value", TrimmedString(255), index=True) )
+
+DatasetTagAssociation_table = Table( "dataset_tag_association", metadata,
+                                     Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+                                     Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                     Column( "user_tname", TrimmedString(255), index=True),
+                                     Column( "value", TrimmedString(255), index=True),
+                                     Column( "user_value", TrimmedString(255), index=True) )
+
+HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata,
+                                                       Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                       Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                                       Column( "user_tname", TrimmedString(255), index=True),
+                                                       Column( "value", TrimmedString(255), index=True),
+                                                       Column( "user_value", TrimmedString(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    metadata.reflect()
+    try:
+        Tag_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating tag table failed: %s" % str( e ) )
+    try:
+        HistoryTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating history_tag_association table failed: %s" % str( e ) )
+    try:
+        DatasetTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating dataset_tag_association table failed: %s" % str( e ) )
+    try:
+        HistoryDatasetAssociationTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Tag_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping tag table failed: %s" % str( e ) )
+    try:
+        HistoryTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_tag_association table failed: %s" % str( e ) )
+    try:
+        DatasetTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping dataset_tag_association table failed: %s" % str( e ) )
+    try:
+        HistoryDatasetAssociationTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_dataset_association_tag_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py b/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py
new file mode 100644
index 0000000..24ecd09
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0016_v0015_mysql_index_fix.py
@@ -0,0 +1,56 @@
+"""
+This script fixes a problem introduced in 0015_tagging.py. MySQL has a name length
+limit and thus the index "ix_hda_ta_history_dataset_association_id" has to be
+manually created.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("")
+    print("This script fixes a problem introduced in 0015_tagging.py.  MySQL has a")
+    print("name length limit and thus the index 'ix_hda_ta_history_dataset_association_id'")
+    print("has to be manually created.")
+
+
+HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata,
+                                                       Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                       Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                                       Column( "user_tname", TrimmedString(255), index=True),
+                                                       Column( "value", TrimmedString(255), index=True),
+                                                       Column( "user_value", TrimmedString(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    metadata.reflect()
+    i = Index( "ix_hda_ta_history_dataset_association_id", HistoryDatasetAssociationTagAssociation_table.c.history_dataset_association_id )
+    try:
+        i.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Adding index 'ix_hdata_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    i = Index( "ix_hda_ta_history_dataset_association_id", HistoryDatasetAssociationTagAssociation_table.c.history_dataset_association_id )
+    try:
+        i.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Removing index 'ix_hdata_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py
new file mode 100644
index 0000000..579827c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0017_library_item_indexes.py
@@ -0,0 +1,59 @@
+"""
+This script adds 3 indexes to table columns: library_folder.name,
+library_dataset.name, library_dataset_dataset_association.name.
+"""
+from __future__ import print_function
+
+import logging
+import sys
+
+from sqlalchemy import Index, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This script adds 3 indexes to table columns: library_folder.name,")
+    print("library_dataset.name, library_dataset_dataset_association.name.")
+    print("========================================")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
+    LibraryDatasetDatasetAssociation_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+    LibraryDataset_table = Table( "library_dataset", metadata, autoload=True )
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Add 1 index to the library_folder table
+    i = Index( 'ix_library_folder_name', LibraryFolder_table.c.name, mysql_length=200 )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_library_folder_name' to library_folder table failed: %s" % ( str( e ) ) )
+    # Add 1 index to the library_dataset_dataset_association table
+    i = Index( 'ix_library_dataset_dataset_association_name', LibraryDatasetDatasetAssociation_table.c.name )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_library_dataset_dataset_association_name' to library_dataset_dataset_association table failed: %s" % ( str( e ) ) )
+    # Add 1 index to the library_dataset table
+    i = Index( 'ix_library_dataset_name', LibraryDataset_table.c.name )
+    try:
+        i.create()
+    except Exception as e:
+        log.debug( "Adding index 'ix_library_dataset_name' to library_dataset table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    log.debug( "Downgrade is not possible." )
diff --git a/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
new file mode 100644
index 0000000..3fd2853
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0018_ordered_tags_and_page_tags.py
@@ -0,0 +1,119 @@
+"""
+This migration script provides support for (a) ordering tags by recency and
+(b) tagging pages. This script deletes all existing tags.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table
+from sqlalchemy.exc import OperationalError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("")
+    print("This migration script provides support for (a) ordering tags by recency and")
+    print("(b) tagging pages. This script deletes all existing tags.")
+
+
+HistoryTagAssociation_table = Table( "history_tag_association", metadata,
+                                     Column( "id", Integer, primary_key=True ),
+                                     Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                     Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                     Column( "user_tname", TrimmedString(255), index=True),
+                                     Column( "value", TrimmedString(255), index=True),
+                                     Column( "user_value", TrimmedString(255), index=True) )
+
+DatasetTagAssociation_table = Table( "dataset_tag_association", metadata,
+                                     Column( "id", Integer, primary_key=True ),
+                                     Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+                                     Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                     Column( "user_tname", TrimmedString(255), index=True),
+                                     Column( "value", TrimmedString(255), index=True),
+                                     Column( "user_value", TrimmedString(255), index=True) )
+
+HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata,
+                                                       Column( "id", Integer, primary_key=True ),
+                                                       Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                       Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                                       Column( "user_tname", TrimmedString(255), index=True),
+                                                       Column( "value", TrimmedString(255), index=True),
+                                                       Column( "user_value", TrimmedString(255), index=True) )
+
+PageTagAssociation_table = Table( "page_tag_association", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+                                  Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                  Column( "user_tname", TrimmedString(255), index=True),
+                                  Column( "value", TrimmedString(255), index=True),
+                                  Column( "user_value", TrimmedString(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    metadata.reflect()
+
+    #
+    # Recreate tables.
+    #
+    try:
+        HistoryTagAssociation_table.drop()
+        HistoryTagAssociation_table.create()
+    except Exception as e:
+        print("Recreating history_tag_association table failed: %s" % str( e ))
+        log.debug( "Recreating history_tag_association table failed: %s" % str( e ) )
+
+    try:
+        DatasetTagAssociation_table.drop()
+        DatasetTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Recreating dataset_tag_association table failed: %s" % str( e ) )
+
+    try:
+        HistoryDatasetAssociationTagAssociation_table.drop()
+        HistoryDatasetAssociationTagAssociation_table.create()
+    except OperationalError as e:
+        # Handle error that results from and index name that is too long; this occurs
+        # in MySQL.
+        if str(e).find("CREATE INDEX") != -1:
+            # Manually create index.
+            i = Index( "ix_hda_ta_history_dataset_association_id", HistoryDatasetAssociationTagAssociation_table.c.history_dataset_association_id )
+            try:
+                i.create()
+            except Exception as e:
+                print(str(e))
+                log.debug( "Adding index 'ix_hda_ta_history_dataset_association_id' to table 'history_dataset_association_tag_association' table failed: %s" % str( e ) )
+    except Exception as e:
+        print(str(e))
+        log.debug( "Recreating history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+    # Create page_tag_association table.
+    try:
+        PageTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating page_tag_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # No need to downgrade other tagging tables. They work fine with verision 16 code.
+
+    # Drop page_tag_association table.
+    try:
+        PageTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping page_tag_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0019_request_library_folder.py b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py
new file mode 100644
index 0000000..2e08da6
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0019_request_library_folder.py
@@ -0,0 +1,89 @@
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from migrate import ForeignKeyConstraint
+from sqlalchemy import Column, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("""This script creates a request.folder_id column which is a foreign
+key to the library_folder table. This also adds a 'type' and 'layout' column
+to the form_definition table.""")
+    print("========================================")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Create the folder_id column
+    try:
+        Request_table = Table( "request", metadata, autoload=True )
+    except NoSuchTableError:
+        Request_table = None
+        log.debug( "Failed loading table request" )
+    if Request_table is not None:
+        try:
+            col = Column( "folder_id", Integer, index=True )
+            col.create( Request_table, index_name='ix_request_folder_id')
+            assert col is Request_table.c.folder_id
+        except Exception as e:
+            log.debug( "Adding column 'folder_id' to request table failed: %s" % ( str( e ) ) )
+        try:
+            LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
+        except NoSuchTableError:
+            LibraryFolder_table = None
+            log.debug( "Failed loading table library_folder" )
+        # Add 1 foreign key constraint to the library_folder table
+        if migrate_engine.name != 'sqlite' and Request_table is not None and LibraryFolder_table is not None:
+            try:
+                cons = ForeignKeyConstraint( [Request_table.c.folder_id],
+                                             [LibraryFolder_table.c.id],
+                                             name='request_folder_id_fk' )
+                # Create the constraint
+                cons.create()
+            except Exception as e:
+                log.debug( "Adding foreign key constraint 'request_folder_id_fk' to table 'library_folder' failed: %s" % ( str( e ) ) )
+    # Create the type column in form_definition
+    try:
+        FormDefinition_table = Table( "form_definition", metadata, autoload=True )
+    except NoSuchTableError:
+        FormDefinition_table = None
+        log.debug( "Failed loading table form_definition" )
+    if FormDefinition_table is not None:
+        try:
+            col = Column( "type", TrimmedString( 255 ), index=True )
+            col.create( FormDefinition_table, index_name='ix_form_definition_type')
+            assert col is FormDefinition_table.c.type
+        except Exception as e:
+            log.debug( "Adding column 'type' to form_definition table failed: %s" % ( str( e ) ) )
+        try:
+            col = Column( "layout", JSONType())
+            col.create( FormDefinition_table )
+            assert col is FormDefinition_table.c.layout
+        except Exception as e:
+            log.debug( "Adding column 'layout' to form_definition table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0020_library_upload_job.py b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py
new file mode 100644
index 0000000..3608553
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0020_library_upload_job.py
@@ -0,0 +1,128 @@
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from migrate import ForeignKeyConstraint
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, String, Table
+from sqlalchemy.exc import NoSuchTableError
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("")
+    print("========================================")
+    print("""This script creates a job_to_output_library_dataset table for allowing library
+uploads to run as regular jobs.  To support this, a library_folder_id column is
+added to the job table, and library_folder/output_library_datasets relations
+are added to the Job object.  An index is also added to the dataset.state
+column.""")
+    print("========================================")
+
+
+JobToOutputLibraryDatasetAssociation_table = Table( "job_to_output_library_dataset", metadata,
+                                                    Column( "id", Integer, primary_key=True ),
+                                                    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                                    Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
+                                                    Column( "name", String(255) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    # Create the job_to_output_library_dataset table
+    try:
+        JobToOutputLibraryDatasetAssociation_table.create()
+    except Exception as e:
+        print("Creating job_to_output_library_dataset table failed: %s" % str( e ))
+        log.debug( "Creating job_to_output_library_dataset table failed: %s" % str( e ) )
+    # Create the library_folder_id column
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+    except NoSuchTableError:
+        Job_table = None
+        log.debug( "Failed loading table job" )
+    if Job_table is not None:
+        try:
+            col = Column( "library_folder_id", Integer, index=True )
+            col.create( Job_table, index_name='ix_job_library_folder_id')
+            assert col is Job_table.c.library_folder_id
+        except Exception as e:
+            log.debug( "Adding column 'library_folder_id' to job table failed: %s" % ( str( e ) ) )
+        try:
+            LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
+        except NoSuchTableError:
+            LibraryFolder_table = None
+            log.debug( "Failed loading table library_folder" )
+        # Add 1 foreign key constraint to the job table
+        if migrate_engine.name != 'sqlite':
+            # Sqlite can't alter-table-add-foreign-key
+            if Job_table is not None and LibraryFolder_table is not None:
+                try:
+                    cons = ForeignKeyConstraint( [Job_table.c.library_folder_id],
+                                                 [LibraryFolder_table.c.id],
+                                                 name='job_library_folder_id_fk' )
+                    # Create the constraint
+                    cons.create()
+                except Exception as e:
+                    log.debug( "Adding foreign key constraint 'job_library_folder_id_fk' to table 'library_folder' failed: %s" % ( str( e ) ) )
+    # Create the ix_dataset_state index
+    try:
+        Dataset_table = Table( "dataset", metadata, autoload=True )
+    except NoSuchTableError:
+        Dataset_table = None
+        log.debug( "Failed loading table dataset" )
+    i = Index( "ix_dataset_state", Dataset_table.c.state )
+    try:
+        i.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Adding index 'ix_dataset_state' to dataset table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop the library_folder_id column
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+    except NoSuchTableError:
+        Job_table = None
+        log.debug( "Failed loading table job" )
+    if Job_table is not None:
+        try:
+            col = Job_table.c.library_folder_id
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'library_folder_id' from job table failed: %s" % ( str( e ) ) )
+    # Drop the job_to_output_library_dataset table
+    try:
+        JobToOutputLibraryDatasetAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping job_to_output_library_dataset table failed: %s" % str( e ) )
+    # Drop the ix_dataset_state index
+    try:
+        Dataset_table = Table( "dataset", metadata, autoload=True )
+    except NoSuchTableError:
+        Dataset_table = None
+        log.debug( "Failed loading table dataset" )
+    i = Index( "ix_dataset_state", Dataset_table.c.state )
+    try:
+        i.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping index 'ix_dataset_state' from dataset table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0021_user_prefs.py b/lib/galaxy/model/migrate/versions/0021_user_prefs.py
new file mode 100644
index 0000000..a608492
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0021_user_prefs.py
@@ -0,0 +1,49 @@
+"""
+This migration script adds a user preferences table to Galaxy.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, Unicode
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds a user preferences table to Galaxy.")
+    print("")
+
+
+# New table to support user preferences.
+
+UserPreference_table = Table( "user_preference", metadata,
+                              Column( "id", Integer, primary_key=True ),
+                              Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                              Column( "name", Unicode( 255 ), index=True),
+                              Column( "value", Unicode( 1024 ) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    metadata.reflect()
+    try:
+        UserPreference_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating user_preference table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        UserPreference_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping user_preference table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0022_visualization_tables.py b/lib/galaxy/model/migrate/versions/0022_visualization_tables.py
new file mode 100644
index 0000000..38da7fc
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0022_visualization_tables.py
@@ -0,0 +1,53 @@
+"""
+Migration script to add support for storing visualizations.
+  1) Creates Visualization and VisualizationRevision tables
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+Visualization_table = Table( "visualization", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "latest_revision_id", Integer,
+                                     ForeignKey( "visualization_revision.id", use_alter=True, name='visualization_latest_revision_id_fk' ), index=True ),
+                             Column( "title", TEXT ),
+                             Column( "type", TEXT ) )
+
+VisualizationRevision_table = Table( "visualization_revision", metadata,
+                                     Column( "id", Integer, primary_key=True ),
+                                     Column( "create_time", DateTime, default=now ),
+                                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                                     Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True, nullable=False ),
+                                     Column( "title", TEXT ),
+                                     Column( "config", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Visualization_table.create()
+    except:
+        log.debug( "Could not create page table" )
+    try:
+        VisualizationRevision_table.create()
+    except:
+        log.debug( "Could not create page_revision table" )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    Visualization_table.drop()
+    VisualizationRevision_table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py b/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py
new file mode 100644
index 0000000..78fe1dc
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0023_page_published_and_deleted_columns.py
@@ -0,0 +1,38 @@
+"""
+Migration script to add columns for tracking whether pages are deleted and
+publicly accessible.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    print(__doc__)
+    metadata.reflect()
+
+    Page_table = Table( "page", metadata, autoload=True )
+
+    c = Column( "published", Boolean, index=True, default=False )
+    c.create( Page_table, index_name='ix_page_published' )
+    assert c is Page_table.c.published
+
+    c = Column( "deleted", Boolean, index=True, default=False )
+    c.create( Page_table, index_name='ix_page_deleted')
+    assert c is Page_table.c.deleted
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    Page_table = Table( "page", metadata, autoload=True )
+    Page_table.c.published.drop()
+    Page_table.c.deleted.drop()
diff --git a/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py b/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py
new file mode 100644
index 0000000..08aade5
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0024_page_slug_unique_constraint.py
@@ -0,0 +1,43 @@
+"""
+Remove unique constraint from page slugs to allow creating a page with
+the same slug as a deleted page.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Index, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    Page_table = Table( "page", metadata, autoload=True )
+
+    try:
+
+        # Sqlite doesn't support .alter, so we need to drop an recreate
+
+        i = Index( "ix_page_slug", Page_table.c.slug )
+        i.drop()
+
+        i = Index( "ix_page_slug", Page_table.c.slug, unique=False )
+        i.create()
+
+    except:
+
+        # Mysql doesn't have a named index, but alter should work
+
+        Page_table.c.slug.alter( unique=False )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
diff --git a/lib/galaxy/model/migrate/versions/0025_user_info.py b/lib/galaxy/model/migrate/versions/0025_user_info.py
new file mode 100644
index 0000000..11e0078
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0025_user_info.py
@@ -0,0 +1,69 @@
+"""
+This script adds a foreign key to the form_values table in the galaxy_user table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from migrate import ForeignKeyConstraint
+from sqlalchemy import Column, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This script adds a foreign key to the form_values table in the galaxy_user table")
+    print("========================================")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    try:
+        User_table = Table( "galaxy_user", metadata, autoload=True )
+    except NoSuchTableError:
+        User_table = None
+        log.debug( "Failed loading table galaxy_user" )
+    if User_table is not None:
+        try:
+            col = Column( "form_values_id", Integer, index=True )
+            col.create( User_table, index_name='ix_user_form_values_id')
+            assert col is User_table.c.form_values_id
+        except Exception as e:
+            log.debug( "Adding column 'form_values_id' to galaxy_user table failed: %s" % ( str( e ) ) )
+        try:
+            FormValues_table = Table( "form_values", metadata, autoload=True )
+        except NoSuchTableError:
+            FormValues_table = None
+            log.debug( "Failed loading table form_values" )
+        if migrate_engine.name != 'sqlite':
+            # Add 1 foreign key constraint to the form_values table
+            if User_table is not None and FormValues_table is not None:
+                try:
+                    cons = ForeignKeyConstraint( [User_table.c.form_values_id],
+                                                 [FormValues_table.c.id],
+                                                 name='user_form_values_id_fk' )
+                    # Create the constraint
+                    cons.create()
+                except Exception as e:
+                    log.debug( "Adding foreign key constraint 'user_form_values_id_fk' to table 'galaxy_user' failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0026_cloud_tables.py b/lib/galaxy/model/migrate/versions/0026_cloud_tables.py
new file mode 100644
index 0000000..3f29890
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0026_cloud_tables.py
@@ -0,0 +1,159 @@
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print
+    print("========================================")
+    print("This script adds tables needed for Galaxy cloud functionality.")
+    print("========================================")
+
+
+CloudImage_table = Table( "cloud_image", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "provider_type", TEXT ),
+                          Column( "image_id", TEXT, nullable=False ),
+                          Column( "manifest", TEXT ),
+                          Column( "state", TEXT ),
+                          Column( "architecture", TEXT ),
+                          Column( "deleted", Boolean, default=False ) )
+
+""" UserConfiguredInstance (UCI) table """
+UCI_table = Table( "cloud_uci", metadata,
+                   Column( "id", Integer, primary_key=True ),
+                   Column( "create_time", DateTime, default=now ),
+                   Column( "update_time", DateTime, default=now, onupdate=now ),
+                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                   Column( "credentials_id", Integer, ForeignKey( "cloud_user_credentials.id" ), index=True ),
+                   Column( "key_pair_name", TEXT ),
+                   Column( "key_pair_material", TEXT ),
+                   Column( "name", TEXT ),
+                   Column( "state", TEXT ),
+                   Column( "error", TEXT ),
+                   Column( "total_size", Integer ),
+                   Column( "launch_time", DateTime ),
+                   Column( "deleted", Boolean, default=False ) )
+
+CloudInstance_table = Table( "cloud_instance", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "launch_time", DateTime ),
+                             Column( "stop_time", DateTime ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True ),
+                             Column( "type", TEXT ),
+                             Column( "reservation_id", TEXT ),
+                             Column( "instance_id", TEXT ),
+                             Column( "mi_id", Integer, ForeignKey( "cloud_image.id" ), index=True ),
+                             Column( "state", TEXT ),
+                             Column( "error", TEXT ),
+                             Column( "public_dns", TEXT ),
+                             Column( "private_dns", TEXT ),
+                             Column( "security_group", TEXT ),
+                             Column( "availability_zone", TEXT ) )
+
+CloudStore_table = Table( "cloud_store", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "attach_time", DateTime ),
+                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                          Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True, nullable=False ),
+                          Column( "volume_id", TEXT ),
+                          Column( "size", Integer, nullable=False ),
+                          Column( "availability_zone", TEXT ),
+                          Column( "inst_id", Integer, ForeignKey( "cloud_instance.id" ) ),
+                          Column( "status", TEXT ),
+                          Column( "device", TEXT ),
+                          Column( "space_consumed", Integer ),
+                          Column( "error", TEXT ),
+                          Column( "deleted", Boolean, default=False ) )
+
+CloudSnapshot_table = Table( "cloud_snapshot", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True ),
+                             Column( "store_id", Integer, ForeignKey( "cloud_store.id" ), index=True, nullable=False ),
+                             Column( "snapshot_id", TEXT ),
+                             Column( "status", TEXT ),
+                             Column( "description", TEXT ),
+                             Column( "error", TEXT ),
+                             Column( "deleted", Boolean, default=False ) )
+
+CloudUserCredentials_table = Table( "cloud_user_credentials", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                                    Column( "provider_id", Integer, ForeignKey( "cloud_provider.id" ), index=True, nullable=False ),
+                                    Column( "name", TEXT ),
+                                    Column( "access_key", TEXT ),
+                                    Column( "secret_key", TEXT ),
+                                    Column( "deleted", Boolean, default=False ) )
+
+CloudProvider_table = Table( "cloud_provider", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "type", TEXT, nullable=False ),
+                             Column( "name", TEXT ),
+                             Column( "region_connection", TEXT ),
+                             Column( "region_name", TEXT ),
+                             Column( "region_endpoint", TEXT ),
+                             Column( "is_secure", Boolean ),
+                             Column( "host", TEXT ),
+                             Column( "port", Integer ),
+                             Column( "proxy", TEXT ),
+                             Column( "proxy_port", TEXT ),
+                             Column( "proxy_user", TEXT ),
+                             Column( "proxy_pass", TEXT ),
+                             Column( "debug", Integer ),
+                             Column( "https_connection_factory", TEXT ),
+                             Column( "path", TEXT ),
+                             Column( "deleted", Boolean, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+
+    CloudProvider_table.create()
+    CloudUserCredentials_table.create()
+
+    CloudImage_table.create()
+    UCI_table.create()
+
+    CloudInstance_table.create()
+    CloudStore_table.create()
+    CloudSnapshot_table.create()
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    CloudInstance_table.drop()
+    CloudSnapshot_table.drop()
+    CloudStore_table.drop()
+
+    UCI_table.drop()
+    CloudImage_table.drop()
+
+    CloudUserCredentials_table.drop()
+    CloudProvider_table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0027_request_events.py b/lib/galaxy/model/migrate/versions/0027_request_events.py
new file mode 100644
index 0000000..081d0e6
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0027_request_events.py
@@ -0,0 +1,100 @@
+"""
+This migration script adds the request_event table and
+removes the state field in the request table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This migration script adds the request_event table and")
+    print("removes the state field in the request table")
+    print("========================================")
+
+
+RequestEvent_table = Table('request_event', metadata,
+    Column( "id", Integer, primary_key=True),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
+    Column( "state", TrimmedString( 255 ), index=True ),
+    Column( "comment", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+
+    def localtimestamp():
+        if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+            return "LOCALTIMESTAMP"
+        elif migrate_engine.name == 'sqlite':
+            return "current_date || ' ' || current_time"
+        else:
+            raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+    def nextval( table, col='id' ):
+        if migrate_engine.name in ['postgres', 'postgresql']:
+            return "nextval('%s_%s_seq')" % ( table, col )
+        elif migrate_engine.name in ['mysql', 'sqlite']:
+            return "null"
+        else:
+            raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+    # Load existing tables
+    metadata.reflect()
+    # Add new request_event table
+    try:
+        RequestEvent_table.create()
+    except Exception as e:
+        log.debug( "Creating request_event table failed: %s" % str( e ) )
+    # move the current state of all existing requests to the request_event table
+
+    cmd = \
+        "INSERT INTO request_event " + \
+        "SELECT %s AS id," + \
+        "%s AS create_time," + \
+        "%s AS update_time," + \
+        "request.id AS request_id," + \
+        "request.state AS state," + \
+        "'%s' AS comment " + \
+        "FROM request;"
+    cmd = cmd % ( nextval('request_event'), localtimestamp(), localtimestamp(), 'Imported from request table')
+    migrate_engine.execute( cmd )
+
+    if migrate_engine.name != 'sqlite':
+        # Delete the state column
+        try:
+            Request_table = Table( "request", metadata, autoload=True )
+        except NoSuchTableError:
+            Request_table = None
+            log.debug( "Failed loading table request" )
+        if Request_table is not None:
+            try:
+                Request_table.c.state.drop()
+            except Exception as e:
+                log.debug( "Deleting column 'state' to request table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py b/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py
new file mode 100644
index 0000000..73be3f5
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0028_external_metadata_file_override.py
@@ -0,0 +1,50 @@
+"""
+This script adds the filename_override_metadata column to the JobExternalOutputMetadata table,
+allowing existing metadata files to be written when using external metadata and a cluster
+set up with read-only access to database/files
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, String, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("========================================")
+    print("This script adds the filename_override_metadata column to the JobExternalOutputMetadata table,")
+    print(" allowing existing metadata files to be written when using external metadata and a cluster")
+    print("set up with read-only access to database/files")
+    print("========================================")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    try:
+        job_external_output_metadata = Table( "job_external_output_metadata", metadata, autoload=True )
+        col = Column( "filename_override_metadata", String( 255 ) )
+        col.create( job_external_output_metadata )
+        assert col is job_external_output_metadata.c.filename_override_metadata
+    except Exception as e:
+        log.debug( "Adding column 'filename_override_metadata' to job_external_output_metadata table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0029_user_actions.py b/lib/galaxy/model/migrate/versions/0029_user_actions.py
new file mode 100644
index 0000000..cd4460c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0029_user_actions.py
@@ -0,0 +1,51 @@
+"""
+This migration script adds a user actions table to Galaxy.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, Unicode
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds a user actions table to Galaxy.")
+    print("")
+
+
+# New table to store user actions.
+UserAction_table = Table( "user_action", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                          Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
+                          Column( "action", Unicode( 255 ) ),
+                          Column( "context", Unicode( 512 ) ),
+                          Column( "params", Unicode( 1024 ) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    metadata.reflect()
+    try:
+        UserAction_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating user_action table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        UserAction_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping user_action table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0030_history_slug_column.py b/lib/galaxy/model/migrate/versions/0030_history_slug_column.py
new file mode 100644
index 0000000..11f05f4
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0030_history_slug_column.py
@@ -0,0 +1,40 @@
+"""
+Migration script to add column for a history slug.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, Index, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    print(__doc__)
+    metadata.reflect()
+
+    History_table = Table( "history", metadata, autoload=True )
+
+    # Mysql needs manual index creation because of max length index.
+    if migrate_engine.name != 'mysql':
+        # Create slug column.
+        c = Column( "slug", TEXT, index=True )
+        c.create( History_table, index_name='ix_history_slug')
+    else:
+        c = Column( "slug", TEXT )
+        c.create( History_table, index_name='')
+        i = Index( "ix_history_slug", History_table.c.slug, mysql_length=200)
+        i.create()
+    assert c is History_table.c.slug
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    History_table = Table( "history", metadata, autoload=True )
+    History_table.c.slug.drop()
diff --git a/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py b/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py
new file mode 100644
index 0000000..2ebe57c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0031_community_and_workflow_tags.py
@@ -0,0 +1,178 @@
+"""
+Migration script to (a) add and populate necessary columns for doing community tagging of histories, datasets, and pages and \
+(b) add table for doing individual and community tagging of workflows.
+
+SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so this script will generate error messages when run against \
+SQLite; however, script does execute successfully against SQLite.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, Unicode
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+StoredWorkflowTagAssociation_table = Table( "stored_workflow_tag_association", metadata,
+                                            Column( "id", Integer, primary_key=True ),
+                                            Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+                                            Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                            Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                            Column( "user_tname", Unicode(255), index=True),
+                                            Column( "value", Unicode(255), index=True),
+                                            Column( "user_value", Unicode(255), index=True) )
+
+WorkflowTagAssociation_table = Table( "workflow_tag_association", metadata,
+                                      Column( "id", Integer, primary_key=True ),
+                                      Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True ),
+                                      Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                      Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                      Column( "user_tname", Unicode(255), index=True),
+                                      Column( "value", Unicode(255), index=True),
+                                      Column( "user_value", Unicode(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create user_id column in history_tag_association table.
+    HistoryTagAssociation_table = Table( "history_tag_association", metadata, autoload=True )
+    if migrate_engine.name != 'sqlite':
+        c = Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+        try:
+            c.create( HistoryTagAssociation_table, index_name='ix_history_tag_association_user_id')
+            assert c is HistoryTagAssociation_table.c.user_id
+        except Exception as e:
+            # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises.
+            print(str(e))
+            log.debug( "Adding user_id column to history_tag_association table failed: %s" % str( e ) )
+    else:
+        c = Column( "user_id", Integer)
+        try:
+            c.create( HistoryTagAssociation_table)
+            assert c is HistoryTagAssociation_table.c.user_id
+        except Exception as e:
+            # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises.
+            print(str(e))
+            log.debug( "Adding user_id column to history_tag_association table failed: %s" % str( e ) )
+
+    # Populate column so that user_id is the id of the user who owns the history (and, up to now, was the only person able to tag the history).
+    if c is HistoryTagAssociation_table.c.user_id:
+        migrate_engine.execute(
+            "UPDATE history_tag_association SET user_id=( SELECT user_id FROM history WHERE history_tag_association.history_id = history.id )" )
+
+    if migrate_engine.name != 'sqlite':
+        # Create user_id column in history_dataset_association_tag_association table.
+        HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata, autoload=True )
+        c = Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+        try:
+            c.create( HistoryDatasetAssociationTagAssociation_table, index_name='ix_history_dataset_association_tag_association_user_id')
+            assert c is HistoryDatasetAssociationTagAssociation_table.c.user_id
+        except Exception as e:
+            # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises.
+            print(str(e))
+            log.debug( "Adding user_id column to history_dataset_association_tag_association table failed: %s" % str( e ) )
+    else:
+        # In sqlite, we can no longer quietly fail to add foreign key.
+        # Create user_id column in history_dataset_association_tag_association table.
+        HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata, autoload=True )
+        c = Column( "user_id", Integer)
+        try:
+            c.create( HistoryDatasetAssociationTagAssociation_table)
+            assert c is HistoryDatasetAssociationTagAssociation_table.c.user_id
+        except Exception as e:
+            # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises.
+            print(str(e))
+            log.debug( "Adding user_id column to history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+    # Populate column so that user_id is the id of the user who owns the history_dataset_association (and, up to now, was the only person able to tag the page).
+    if c is HistoryDatasetAssociationTagAssociation_table.c.user_id:
+        migrate_engine.execute(
+            "UPDATE history_dataset_association_tag_association SET user_id=( SELECT history.user_id FROM history, history_dataset_association WHERE history_dataset_association.history_id = history.id AND history_dataset_association.id = history_dataset_association_tag_association.history_dataset_association_id)" )
+    if migrate_engine.name != 'sqlite':
+        # Create user_id column in page_tag_association table.
+        PageTagAssociation_table = Table( "page_tag_association", metadata, autoload=True )
+        c = Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+        try:
+            c.create( PageTagAssociation_table, index_name='ix_page_tag_association_user_id')
+            assert c is PageTagAssociation_table.c.user_id
+        except Exception as e:
+            # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises.
+            print(str(e))
+            log.debug( "Adding user_id column to page_tag_association table failed: %s" % str( e ) )
+    else:
+        # Create user_id column in page_tag_association table.
+        PageTagAssociation_table = Table( "page_tag_association", metadata, autoload=True )
+        c = Column( "user_id", Integer )
+        try:
+            c.create( PageTagAssociation_table )
+            assert c is PageTagAssociation_table.c.user_id
+        except Exception as e:
+            # SQLite does not support 'ALTER TABLE ADD FOREIGN KEY', so catch exception if it arises.
+            print(str(e))
+            log.debug( "Adding user_id column to page_tag_association table failed: %s" % str( e ) )
+
+    # Populate column so that user_id is the id of the user who owns the page (and, up to now, was the only person able to tag the page).
+    if c is PageTagAssociation_table.c.user_id:
+        migrate_engine.execute(
+            "UPDATE page_tag_association SET user_id=( SELECT user_id FROM page WHERE page_tag_association.page_id = page.id )" )
+
+    # Create stored_workflow_tag_association table.
+    try:
+        StoredWorkflowTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating stored_workflow_tag_association table failed: %s" % str( e ) )
+
+    # Create workflow_tag_association table.
+    try:
+        WorkflowTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating workflow_tag_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop user_id column from history_tag_association table.
+    HistoryTagAssociation_table = Table( "history_tag_association", metadata, autoload=True )
+    try:
+        HistoryTagAssociation_table.c.user_id.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping column user_id from history_tag_association table failed: %s" % str( e ) )
+
+    # Drop user_id column from history_dataset_association_tag_association table.
+    HistoryDatasetAssociationTagAssociation_table = Table( "history_dataset_association_tag_association", metadata, autoload=True )
+    try:
+        HistoryDatasetAssociationTagAssociation_table.c.user_id.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping column user_id from history_dataset_association_tag_association table failed: %s" % str( e ) )
+
+    # Drop user_id column from page_tag_association table.
+    PageTagAssociation_table = Table( "page_tag_association", metadata, autoload=True )
+    try:
+        PageTagAssociation_table.c.user_id.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping column user_id from page_tag_association table failed: %s" % str( e ) )
+
+    # Drop stored_workflow_tag_association table.
+    try:
+        StoredWorkflowTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping stored_workflow_tag_association table failed: %s" % str( e ) )
+
+    # Drop workflow_tag_association table.
+    try:
+        WorkflowTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping workflow_tag_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py b/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py
new file mode 100644
index 0000000..7f30dde
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0032_stored_workflow_slug_column.py
@@ -0,0 +1,43 @@
+"""
+Migration script to add slug column for stored workflow.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, Index, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    print(__doc__)
+    metadata.reflect()
+
+    StoredWorkflow_table = Table( "stored_workflow", metadata, autoload=True )
+
+    # Create slug column.
+    c = Column( "slug", TEXT )
+    c.create( StoredWorkflow_table )
+
+    assert c is StoredWorkflow_table.c.slug
+
+    # Create slug index.
+    if migrate_engine.name != 'sqlite':
+        try:
+            i = Index( "ix_stored_workflow_slug", StoredWorkflow_table.c.slug, mysql_length=200 )
+            i.create()
+        except:
+            # Mysql doesn't have a named index, but alter should work
+            StoredWorkflow_table.c.slug.alter( unique=False )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    StoredWorkflow_table = Table( "stored_workflow", metadata, autoload=True )
+    StoredWorkflow_table.c.slug.drop()
diff --git a/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py b/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py
new file mode 100644
index 0000000..616eaec
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0033_published_cols_for_histories_and_workflows.py
@@ -0,0 +1,104 @@
+"""
+Migration script to add necessary columns for distinguishing between viewing/importing and publishing histories, \
+workflows, and pages. Script adds published column to histories and workflows and importable column to pages.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, Index, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create published column in history table.
+    History_table = Table( "history", metadata, autoload=True )
+    c = Column( "published", Boolean, index=True )
+    try:
+        c.create( History_table, index_name='ix_history_published')
+        assert c is History_table.c.published
+    except Exception as e:
+        print("Adding published column to history table failed: %s" % str( e ))
+        log.debug( "Adding published column to history table failed: %s" % str( e ) )
+
+    if migrate_engine.name != 'sqlite':
+        # Create index for published column in history table.
+        try:
+            i = Index( "ix_history_published", History_table.c.published )
+            i.create()
+        except:
+            # Mysql doesn't have a named index, but alter should work
+            History_table.c.published.alter( unique=False )
+
+    # Create published column in stored workflows table.
+    StoredWorkflow_table = Table( "stored_workflow", metadata, autoload=True )
+    c = Column( "published", Boolean, index=True )
+    try:
+        c.create( StoredWorkflow_table, index_name='ix_stored_workflow_published')
+        assert c is StoredWorkflow_table.c.published
+    except Exception as e:
+        print("Adding published column to stored_workflow table failed: %s" % str( e ))
+        log.debug( "Adding published column to stored_workflow table failed: %s" % str( e ) )
+
+    if migrate_engine.name != 'sqlite':
+        # Create index for published column in stored workflows table.
+        try:
+            i = Index( "ix_stored_workflow_published", StoredWorkflow_table.c.published )
+            i.create()
+        except:
+            # Mysql doesn't have a named index, but alter should work
+            StoredWorkflow_table.c.published.alter( unique=False )
+
+    # Create importable column in page table.
+    Page_table = Table( "page", metadata, autoload=True )
+    c = Column( "importable", Boolean, index=True )
+    try:
+        c.create( Page_table, index_name='ix_page_importable')
+        assert c is Page_table.c.importable
+    except Exception as e:
+        print("Adding importable column to page table failed: %s" % str( e ))
+        log.debug( "Adding importable column to page table failed: %s" % str( e ) )
+
+    if migrate_engine.name != 'sqlite':
+        # Create index for importable column in page table.
+        try:
+            i = Index( "ix_page_importable", Page_table.c.importable )
+            i.create()
+        except:
+            # Mysql doesn't have a named index, but alter should work
+            Page_table.c.importable.alter( unique=False )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop published column from history table.
+    History_table = Table( "history", metadata, autoload=True )
+    try:
+        History_table.c.published.drop()
+    except Exception as e:
+        print("Dropping column published from history table failed: %s" % str( e ))
+        log.debug( "Dropping column published from history table failed: %s" % str( e ) )
+
+    # Drop published column from stored_workflow table.
+    StoredWorkflow_table = Table( "stored_workflow", metadata, autoload=True )
+    try:
+        StoredWorkflow_table.c.published.drop()
+    except Exception as e:
+        print("Dropping column published from stored_workflow table failed: %s" % str( e ))
+        log.debug( "Dropping column published from stored_workflow table failed: %s" % str( e ) )
+
+    # Drop importable column from page table.
+    Page_table = Table( "page", metadata, autoload=True )
+    try:
+        Page_table.c.importable.drop()
+    except Exception as e:
+        print("Dropping column importable from page table failed: %s" % str( e ))
+        log.debug( "Dropping column importable from page table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py b/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py
new file mode 100644
index 0000000..b40e04e
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0034_page_user_share_association.py
@@ -0,0 +1,41 @@
+"""
+Migration script to create a table for page-user share association.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+PageUserShareAssociation_table = Table( "page_user_share_association", metadata,
+                                        Column( "id", Integer, primary_key=True ),
+                                        Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+                                        Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create stored_workflow_tag_association table.
+    try:
+        PageUserShareAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating page_user_share_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop workflow_tag_association table.
+    try:
+        PageUserShareAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping page_user_share_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py b/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py
new file mode 100644
index 0000000..55600d8
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0035_item_annotations_and_workflow_step_tags.py
@@ -0,0 +1,143 @@
+"""
+Migration script to (a) create tables for annotating objects and (b) create tags for workflow steps.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table, TEXT, Unicode
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Annotation tables.
+
+HistoryAnnotationAssociation_table = Table( "history_annotation_association", metadata,
+                                            Column( "id", Integer, primary_key=True ),
+                                            Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                            Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                            Column( "annotation", TEXT ) )
+
+HistoryDatasetAssociationAnnotationAssociation_table = Table( "history_dataset_association_annotation_association", metadata,
+                                                              Column( "id", Integer, primary_key=True ),
+                                                              Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                              Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                              Column( "annotation", TEXT ) )
+
+StoredWorkflowAnnotationAssociation_table = Table( "stored_workflow_annotation_association", metadata,
+                                                   Column( "id", Integer, primary_key=True ),
+                                                   Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+                                                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                   Column( "annotation", TEXT ) )
+
+WorkflowStepAnnotationAssociation_table = Table( "workflow_step_annotation_association", metadata,
+                                                 Column( "id", Integer, primary_key=True ),
+                                                 Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+                                                 Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                 Column( "annotation", TEXT ) )
+
+# Tagging tables.
+
+WorkflowStepTagAssociation_table = Table( "workflow_step_tag_association", metadata,
+                                          Column( "id", Integer, primary_key=True ),
+                                          Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
+                                          Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                          Column( "user_tname", Unicode(255), index=True),
+                                          Column( "value", Unicode(255), index=True),
+                                          Column( "user_value", Unicode(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create history_annotation_association table.
+    try:
+        HistoryAnnotationAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating history_annotation_association table failed: %s" % str( e ) )
+
+    # Create history_dataset_association_annotation_association table.
+    try:
+        HistoryDatasetAssociationAnnotationAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating history_dataset_association_annotation_association table failed: %s" % str( e ) )
+
+    # Create stored_workflow_annotation_association table.
+    try:
+        StoredWorkflowAnnotationAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating stored_workflow_annotation_association table failed: %s" % str( e ) )
+
+    # Create workflow_step_annotation_association table.
+    try:
+        WorkflowStepAnnotationAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating workflow_step_annotation_association table failed: %s" % str( e ) )
+
+    # Create workflow_step_tag_association table.
+    try:
+        WorkflowStepTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating workflow_step_tag_association table failed: %s" % str( e ) )
+
+    haaa = Index( "ix_history_anno_assoc_annotation", HistoryAnnotationAssociation_table.c.annotation, mysql_length=200)
+    hdaaa = Index( "ix_history_dataset_anno_assoc_annotation", HistoryDatasetAssociationAnnotationAssociation_table.c.annotation, mysql_length=200)
+    swaaa = Index( "ix_stored_workflow_ann_assoc_annotation", StoredWorkflowAnnotationAssociation_table.c.annotation, mysql_length=200)
+    wsaaa = Index( "ix_workflow_step_ann_assoc_annotation", WorkflowStepAnnotationAssociation_table.c.annotation, mysql_length=200)
+
+    try:
+        haaa.create()
+        hdaaa.create()
+        swaaa.create()
+        wsaaa.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating annotation indices failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop history_annotation_association table.
+    try:
+        HistoryAnnotationAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_annotation_association table failed: %s" % str( e ) )
+
+    # Drop history_dataset_association_annotation_association table.
+    try:
+        HistoryDatasetAssociationAnnotationAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_dataset_association_annotation_association table failed: %s" % str( e ) )
+
+    # Drop stored_workflow_annotation_association table.
+    try:
+        StoredWorkflowAnnotationAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping stored_workflow_annotation_association table failed: %s" % str( e ) )
+
+    # Drop workflow_step_annotation_association table.
+    try:
+        WorkflowStepAnnotationAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping workflow_step_annotation_association table failed: %s" % str( e ) )
+
+    # Drop workflow_step_tag_association table.
+    try:
+        WorkflowStepTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping workflow_step_tag_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py b/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py
new file mode 100644
index 0000000..bf09189
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0036_add_deleted_column_to_library_template_assoc_tables.py
@@ -0,0 +1,60 @@
+"""
+Migration script to add a deleted column to the following tables:
+library_info_association, library_folder_info_association, library_dataset_dataset_info_association.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def get_false_value(migrate_engine):
+    if migrate_engine.name == 'sqlite':
+        return '0'
+    else:
+        return 'false'
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    LibraryInfoAssociation_table = Table( "library_info_association", metadata, autoload=True )
+    c = Column( "deleted", Boolean, index=True, default=False )
+    c.create( LibraryInfoAssociation_table, index_name='ix_library_info_association_deleted')
+    assert c is LibraryInfoAssociation_table.c.deleted
+    cmd = "UPDATE library_info_association SET deleted = %s" % get_false_value(migrate_engine)
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "deleted to false in library_info_association failed: %s" % ( str( e ) ) )
+
+    LibraryFolderInfoAssociation_table = Table( "library_folder_info_association", metadata, autoload=True )
+    c = Column( "deleted", Boolean, index=True, default=False )
+    c.create( LibraryFolderInfoAssociation_table, index_name='ix_library_folder_info_association_deleted')
+    assert c is LibraryFolderInfoAssociation_table.c.deleted
+    cmd = "UPDATE library_folder_info_association SET deleted = %s" % get_false_value(migrate_engine)
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "deleted to false in library_folder_info_association failed: %s" % ( str( e ) ) )
+
+    LibraryDatasetDatasetInfoAssociation_table = Table( "library_dataset_dataset_info_association", metadata, autoload=True )
+    c = Column( "deleted", Boolean, index=True, default=False )
+    c.create( LibraryDatasetDatasetInfoAssociation_table, index_name='ix_library_dataset_dataset_info_association_deleted')
+    assert c is LibraryDatasetDatasetInfoAssociation_table.c.deleted
+    cmd = "UPDATE library_dataset_dataset_info_association SET deleted = %s" % get_false_value(migrate_engine)
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "deleted to false in library_dataset_dataset_info_association failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0037_samples_library.py b/lib/galaxy/model/migrate/versions/0037_samples_library.py
new file mode 100644
index 0000000..3f7e541
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0037_samples_library.py
@@ -0,0 +1,151 @@
+"""
+This migration script removes the library_id & folder_id fields in the 'request' table and
+adds the same to the 'sample' table. This also adds a 'datatx' column to request_type table
+to store the sequencer login information. Finally, this adds a 'dataset_files' column to
+the sample table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import JSONType, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    # Load existing tables
+    metadata.reflect()
+    # retuest_type table
+    try:
+        RequestType_table = Table( "request_type", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestType_table = None
+        log.debug( "Failed loading table request_type" )
+    if RequestType_table is not None:
+        # Add the datatx_info column in 'request_type' table
+        try:
+            col = Column( "datatx_info", JSONType() )
+            col.create( RequestType_table )
+            assert col is RequestType_table.c.datatx_info
+        except Exception as e:
+            log.debug( "Adding column 'datatx_info' to request_type table failed: %s" % ( str( e ) ) )
+    # request table
+    try:
+        Request_table = Table( "request", metadata, autoload=True )
+    except NoSuchTableError:
+        Request_table = None
+        log.debug( "Failed loading table request" )
+    if Request_table is not None:
+        # Delete library_id & folder_id columns in the table 'request'.
+        # if Galaxy is running on sqlite, the delete/recreate the table
+        # otherwise remove the specific columns
+        if migrate_engine.name == 'sqlite':
+            # create a temporary table
+            RequestTemp_table = Table( 'request_temp', metadata,
+                                       Column( "id", Integer, primary_key=True),
+                                       Column( "create_time", DateTime, default=now ),
+                                       Column( "update_time", DateTime, default=now, onupdate=now ),
+                                       Column( "name", TrimmedString( 255 ), nullable=False ),
+                                       Column( "desc", TEXT ),
+                                       Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+                                       Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
+                                       Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                       Column( "deleted", Boolean, index=True, default=False ) )
+            try:
+                RequestTemp_table.create()
+            except Exception as e:
+                log.debug( "Creating request_temp table failed: %s" % str( e ) )
+            # insert all the rows from the request table to the request_temp table
+            cmd = "INSERT INTO request_temp SELECT id, create_time, " + \
+                "update_time, name, desc, form_values_id, request_type_id, " + \
+                "user_id, deleted FROM request;"
+            migrate_engine.execute( cmd )
+            # delete the 'request' table
+            try:
+                Request_table.drop()
+            except Exception as e:
+                log.debug( "Dropping request table failed: %s" % str( e ) )
+            # rename table request_temp to request
+            cmd = "ALTER TABLE request_temp RENAME TO request"
+            migrate_engine.execute( cmd )
+        else:
+            # Delete the library_id column in 'request' table
+            try:
+                Request_table.c.library_id.drop()
+            except Exception as e:
+                log.debug( "Deleting column 'library_id' to request table failed: %s" % ( str( e ) ) )
+            # Delete the folder_id column in 'request' table
+            try:
+                Request_table.c.folder_id.drop()
+            except Exception as e:
+                log.debug( "Deleting column 'folder_id' to request table failed: %s" % ( str( e ) ) )
+    # sample table
+    try:
+        Sample_table = Table( "sample", metadata, autoload=True )
+    except NoSuchTableError:
+        Sample_table = None
+        log.debug( "Failed loading table sample" )
+    if Sample_table is not None:
+        # Add the dataset_files column in 'sample' table
+        try:
+            col = Column( "dataset_files", JSONType() )
+            col.create( Sample_table )
+            assert col is Sample_table.c.dataset_files
+        except Exception as e:
+            log.debug( "Adding column 'dataset_files' to sample table failed: %s" % ( str( e ) ) )
+        # library table
+        try:
+            Library_table = Table( "library", metadata, autoload=True )
+        except NoSuchTableError:
+            Library_table = None
+            log.debug( "Failed loading table library" )
+        if Library_table is not None:
+            # Add the library_id column in 'sample' table
+            try:
+                if migrate_engine.name != 'sqlite':
+                    col = Column( "library_id", Integer, ForeignKey( "library.id" ), index=True )
+                else:
+                    col = Column( "library_id", Integer, index=True )
+                col.create( Sample_table, index_name='ix_sample_library_id')
+                assert col is Sample_table.c.library_id
+            except Exception as e:
+                log.debug( "Adding column 'library_id' to sample table failed: %s" % ( str( e ) ) )
+        # library_folder table
+        try:
+            LibraryFolder_table = Table( "library_folder", metadata, autoload=True )
+        except NoSuchTableError:
+            LibraryFolder_table = None
+            log.debug( "Failed loading table library_folder" )
+        if LibraryFolder_table is not None:
+            # Add the library_id column in 'sample' table
+            try:
+                if migrate_engine.name != 'sqlite':
+                    col = Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True )
+                else:
+                    col = Column( "folder_id", Integer, index=True )
+                col.create( Sample_table, index_name='ix_sample_library_folder_id')
+                assert col is Sample_table.c.folder_id
+            except Exception as e:
+                log.debug( "Adding column 'folder_id' to sample table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py b/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py
new file mode 100644
index 0000000..e87a1b7
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0038_add_inheritable_column_to_library_template_assoc_tables.py
@@ -0,0 +1,81 @@
+"""
+Migration script to add an inheritable column to the following tables:
+library_info_association, library_folder_info_association.
+Also, in case of sqlite check if the previous migration script deleted the
+request table and if so, restore the table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def get_false_value(migrate_engine):
+    if migrate_engine.name == 'sqlite':
+        return '0'
+    else:
+        return 'false'
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+
+    #
+    # In case of sqlite, check if the previous migration script deleted the
+    # request table and if so, restore the table.
+    #
+    if migrate_engine.name == 'sqlite':
+        if not migrate_engine.has_table('request'):
+            # load the tables referenced in foreign keys
+            metadata.reflect(only=['form_values', 'request_type', 'galaxy_user'])
+            # create a temporary table
+            Request_table = Table( 'request', metadata,
+                                   Column( "id", Integer, primary_key=True),
+                                   Column( "create_time", DateTime, default=now ),
+                                   Column( "update_time", DateTime, default=now, onupdate=now ),
+                                   Column( "name", TrimmedString( 255 ), nullable=False ),
+                                   Column( "desc", TEXT ),
+                                   Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+                                   Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
+                                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                   Column( "deleted", Boolean, index=True, default=False ) )
+            try:
+                Request_table.create()
+            except Exception as e:
+                log.debug( "Creating request table failed: %s" % str( e ) )
+
+    metadata.reflect()
+
+    LibraryInfoAssociation_table = Table( "library_info_association", metadata, autoload=True )
+    c = Column( "inheritable", Boolean, index=True, default=False )
+    c.create( LibraryInfoAssociation_table, index_name='ix_library_info_association_inheritable')
+    assert c is LibraryInfoAssociation_table.c.inheritable
+    cmd = "UPDATE library_info_association SET inheritable = %s" % get_false_value(migrate_engine)
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Setting value of column inheritable to false in library_info_association failed: %s" % ( str( e ) ) )
+
+    LibraryFolderInfoAssociation_table = Table( "library_folder_info_association", metadata, autoload=True )
+    c = Column( "inheritable", Boolean, index=True, default=False )
+    c.create( LibraryFolderInfoAssociation_table, index_name='ix_library_folder_info_association_inheritable')
+    assert c is LibraryFolderInfoAssociation_table.c.inheritable
+    cmd = "UPDATE library_folder_info_association SET inheritable = %s" % get_false_value(migrate_engine)
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Setting value of column inheritable to false in library_folder_info_association failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py b/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py
new file mode 100644
index 0000000..72a7303
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0039_add_synopsis_column_to_library_table.py
@@ -0,0 +1,27 @@
+"""
+Migration script to add a synopsis column to the library table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    Library_table = Table( "library", metadata, autoload=True )
+    c = Column( "synopsis", TEXT )
+    c.create( Library_table )
+    assert c is Library_table.c.synopsis
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0040_page_annotations.py b/lib/galaxy/model/migrate/versions/0040_page_annotations.py
new file mode 100644
index 0000000..9fe0e05
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0040_page_annotations.py
@@ -0,0 +1,42 @@
+"""
+Migration script to (a) create tables for annotating pages.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+PageAnnotationAssociation_table = Table( "page_annotation_association", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+                                         Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                         Column( "annotation", TEXT, index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create history_annotation_association table.
+    try:
+        PageAnnotationAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating page_annotation_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop page_annotation_association table.
+    try:
+        PageAnnotationAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping page_annotation_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py b/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py
new file mode 100644
index 0000000..a1a4179
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0041_workflow_invocation.py
@@ -0,0 +1,50 @@
+"""
+Migration script to create tables for tracking workflow invocations.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+logging.basicConfig( level=logging.DEBUG )
+log = logging.getLogger( __name__ )
+now = datetime.datetime.utcnow
+metadata = MetaData()
+
+WorkflowInvocation_table = Table( "workflow_invocation", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "create_time", DateTime, default=now ),
+                                  Column( "update_time", DateTime, default=now, onupdate=now ),
+                                  Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ) )
+
+WorkflowInvocationStep_table = Table( "workflow_invocation_step", metadata,
+                                      Column( "id", Integer, primary_key=True ),
+                                      Column( "create_time", DateTime, default=now ),
+                                      Column( "update_time", DateTime, default=now, onupdate=now ),
+                                      Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True, nullable=False ),
+                                      Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
+                                      Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ) )
+
+tables = [ WorkflowInvocation_table, WorkflowInvocationStep_table ]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    for table in tables:
+        try:
+            table.create()
+        except:
+            log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    for table in tables:
+        table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py b/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py
new file mode 100644
index 0000000..a5f0cf6
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0042_workflow_invocation_fix.py
@@ -0,0 +1,56 @@
+"""
+Drop and readd workflow invocation tables, allowing null jobs
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+logging.basicConfig( level=logging.DEBUG )
+log = logging.getLogger( __name__ )
+now = datetime.datetime.utcnow
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # 1) Drop
+    for table_name in [ "workflow_invocation_step", "workflow_invocation" ]:
+        try:
+            t = Table( table_name, metadata, autoload=True )
+            t.drop()
+            metadata.remove(t)
+        except:
+            log.exception( "Failed to drop table '%s', ignoring (might result in wrong schema)" % table_name )
+
+    # 2) Readd
+    WorkflowInvocation_table = Table( "workflow_invocation", metadata,
+                                      Column( "id", Integer, primary_key=True ),
+                                      Column( "create_time", DateTime, default=now ),
+                                      Column( "update_time", DateTime, default=now, onupdate=now ),
+                                      Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ) )
+
+    WorkflowInvocationStep_table = Table( "workflow_invocation_step", metadata,
+                                          Column( "id", Integer, primary_key=True ),
+                                          Column( "create_time", DateTime, default=now ),
+                                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                                          Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True, nullable=False ),
+                                          Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
+                                          Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=True ) )
+
+    for table in [ WorkflowInvocation_table, WorkflowInvocationStep_table ]:
+        try:
+            table.create()
+        except:
+            log.exception( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # No downgrade
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py
new file mode 100644
index 0000000..642d2e9
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0043_visualization_sharing_tagging_annotating.py
@@ -0,0 +1,197 @@
+"""
+Migration script to create tables and columns for sharing visualizations.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, ForeignKey, Index, Integer, MetaData, Table, TEXT, Unicode
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Sharing visualizations.
+
+VisualizationUserShareAssociation_table = Table( "visualization_user_share_association", metadata,
+                                                 Column( "id", Integer, primary_key=True ),
+                                                 Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+                                                 Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+# Tagging visualizations.
+
+VisualizationTagAssociation_table = Table( "visualization_tag_association", metadata,
+                                           Column( "id", Integer, primary_key=True ),
+                                           Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+                                           Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                           Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                           Column( "user_tname", Unicode(255), index=True),
+                                           Column( "value", Unicode(255), index=True),
+                                           Column( "user_value", Unicode(255), index=True) )
+
+# Annotating visualizations.
+
+VisualizationAnnotationAssociation_table = Table( "visualization_annotation_association", metadata,
+                                                  Column( "id", Integer, primary_key=True ),
+                                                  Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+                                                  Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                  Column( "annotation", TEXT, index=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    Visualiation_table = Table( "visualization", metadata, autoload=True )
+    # Create visualization_user_share_association table.
+    try:
+        VisualizationUserShareAssociation_table.create()
+    except Exception as e:
+        print("Creating visualization_user_share_association table failed: %s" % str( e ))
+        log.debug( "Creating visualization_user_share_association table failed: %s" % str( e ) )
+
+    # Get default boolean value 'false' so that columns can be initialized.
+    if migrate_engine.name in ['mysql', 'sqlite']:
+        default_false = "0"
+    elif migrate_engine.name in ['postgres', 'postgresql']:
+        default_false = "false"
+
+    # Add columns & create indices for supporting sharing to visualization table.
+    deleted_column = Column( "deleted", Boolean, default=False, index=True )
+    importable_column = Column( "importable", Boolean, default=False, index=True )
+    slug_column = Column( "slug", TEXT )
+    published_column = Column( "published", Boolean, index=True )
+
+    try:
+        # Add column.
+        deleted_column.create( Visualiation_table, index_name="ix_visualization_deleted")
+        assert deleted_column is Visualiation_table.c.deleted
+
+        # Fill column with default value.
+        cmd = "UPDATE visualization SET deleted = %s" % default_false
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        print("Adding deleted column to visualization table failed: %s" % str( e ))
+        log.debug( "Adding deleted column to visualization table failed: %s" % str( e ) )
+
+    try:
+        # Add column.
+        importable_column.create( Visualiation_table, index_name='ix_visualization_importable')
+        assert importable_column is Visualiation_table.c.importable
+
+        # Fill column with default value.
+        cmd = "UPDATE visualization SET importable = %s" % default_false
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        print("Adding importable column to visualization table failed: %s" % str( e ))
+        log.debug( "Adding importable column to visualization table failed: %s" % str( e ) )
+
+    try:
+        slug_column.create( Visualiation_table )
+        assert slug_column is Visualiation_table.c.slug
+    except Exception as e:
+        print("Adding slug column to visualization table failed: %s" % str( e ))
+        log.debug( "Adding slug column to visualization table failed: %s" % str( e ) )
+
+    try:
+        if migrate_engine.name == 'mysql':
+            # Have to create index manually.
+            cmd = "CREATE INDEX ix_visualization_slug ON visualization ( slug ( 100 ) )"
+            migrate_engine.execute( cmd )
+        else:
+            i = Index( "ix_visualization_slug", Visualiation_table.c.slug )
+            i.create()
+    except Exception as e:
+        print("Adding index 'ix_visualization_slug' failed: %s" % str( e ))
+        log.debug( "Adding index 'ix_visualization_slug' failed: %s" % str( e ) )
+
+    try:
+        # Add column.
+        published_column.create( Visualiation_table, index_name='ix_visualization_published')
+        assert published_column is Visualiation_table.c.published
+
+        # Fill column with default value.
+        cmd = "UPDATE visualization SET published = %s" % default_false
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        print("Adding published column to visualization table failed: %s" % str( e ))
+        log.debug( "Adding published column to visualization table failed: %s" % str( e ) )
+
+    # Create visualization_tag_association table.
+    try:
+        VisualizationTagAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating visualization_tag_association table failed: %s" % str( e ) )
+
+    # Create visualization_annotation_association table.
+    try:
+        VisualizationAnnotationAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating visualization_annotation_association table failed: %s" % str( e ) )
+
+    # Need to create index for visualization annotation manually to deal with errors.
+    try:
+        if migrate_engine.name == 'mysql':
+            # Have to create index manually.
+            cmd = "CREATE INDEX ix_visualization_annotation_association_annotation ON visualization_annotation_association ( annotation ( 100 ) )"
+            migrate_engine.execute( cmd )
+        else:
+            i = Index( "ix_visualization_annotation_association_annotation", VisualizationAnnotationAssociation_table.c.annotation )
+            i.create()
+    except Exception as e:
+        print("Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e ))
+        log.debug( "Adding index 'ix_visualization_annotation_association_annotation' failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    Visualiation_table = Table( "visualization", metadata, autoload=True )
+    # Drop visualization_user_share_association table.
+    try:
+        VisualizationUserShareAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping visualization_user_share_association table failed: %s" % str( e ) )
+
+    # Drop columns for supporting sharing from visualization table.
+    try:
+        Visualiation_table.c.deleted.drop()
+    except Exception as e:
+        print("Dropping deleted column from visualization table failed: %s" % str( e ))
+        log.debug( "Dropping deleted column from visualization table failed: %s" % str( e ) )
+
+    try:
+        Visualiation_table.c.importable.drop()
+    except Exception as e:
+        print("Dropping importable column from visualization table failed: %s" % str( e ))
+        log.debug( "Dropping importable column from visualization table failed: %s" % str( e ) )
+
+    try:
+        Visualiation_table.c.slug.drop()
+    except Exception as e:
+        print("Dropping slug column from visualization table failed: %s" % str( e ))
+        log.debug( "Dropping slug column from visualization table failed: %s" % str( e ) )
+
+    try:
+        Visualiation_table.c.published.drop()
+    except Exception as e:
+        print("Dropping published column from visualization table failed: %s" % str( e ))
+        log.debug( "Dropping published column from visualization table failed: %s" % str( e ) )
+
+    # Drop visualization_tag_association table.
+    try:
+        VisualizationTagAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping visualization_tag_association table failed: %s" % str( e ) )
+
+    # Drop visualization_annotation_association table.
+    try:
+        VisualizationAnnotationAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping visualization_annotation_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py b/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py
new file mode 100644
index 0000000..825a824
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0044_add_notify_column_to_request_table.py
@@ -0,0 +1,27 @@
+"""
+Migration script to add a notify column to the request table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    Request_table = Table( "request", metadata, autoload=True )
+    c = Column( "notify", Boolean, default=False  )
+    c.create( Request_table )
+    assert c is Request_table.c.notify
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py
new file mode 100644
index 0000000..72b0cd7
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0045_request_type_permissions_table.py
@@ -0,0 +1,36 @@
+"""
+Migration script to add the request_type_permissions table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+RequestTypePermissions_table = Table( "request_type_permissions", metadata,
+                                      Column( "id", Integer, primary_key=True ),
+                                      Column( "create_time", DateTime, default=now ),
+                                      Column( "update_time", DateTime, default=now, onupdate=now ),
+                                      Column( "action", TEXT ),
+                                      Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), nullable=True, index=True ),
+                                      Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        RequestTypePermissions_table.create()
+    except Exception as e:
+        log.debug( "Creating request_type_permissions table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0046_post_job_actions.py b/lib/galaxy/model/migrate/versions/0046_post_job_actions.py
new file mode 100644
index 0000000..e282800
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0046_post_job_actions.py
@@ -0,0 +1,49 @@
+"""
+Migration script to create tables for handling post-job actions.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+
+logging.basicConfig( level=logging.DEBUG )
+log = logging.getLogger( __name__ )
+now = datetime.datetime.utcnow
+metadata = MetaData()
+
+PostJobAction_table = Table("post_job_action", metadata,
+                            Column("id", Integer, primary_key=True),
+                            Column("workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False),
+                            Column("action_type", String(255), nullable=False),
+                            Column("output_name", String(255), nullable=True),
+                            Column("action_arguments", JSONType, nullable=True))
+
+# PostJobActionAssociation_table = Table("post_job_action_association", metadata,
+#     Column("id", Integer, primary_key=True),
+#     Column("post_job_action_id", Integer, ForeignKey("post_job_action.id"), index=True, nullable=False),
+#     Column("job_id", Integer, ForeignKey("job.id"), index=True, nullable=False))
+
+tables = [PostJobAction_table]  # , PostJobActionAssociation_table]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    for table in tables:
+        try:
+            table.create()
+        except:
+            log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    for table in tables:
+        table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py b/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py
new file mode 100644
index 0000000..846ae8c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0047_job_table_user_id_column.py
@@ -0,0 +1,84 @@
+"""
+Add a user_id column to the job table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+    except NoSuchTableError:
+        Job_table = None
+        log.debug( "Failed loading table job" )
+    if Job_table is not None:
+
+        if migrate_engine.name != 'sqlite':
+            try:
+                col = Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True )
+                col.create( Job_table, index_name='ix_job_user_id' )
+                assert col is Job_table.c.user_id
+            except Exception as e:
+                log.debug( "Adding column 'user_id' to job table failed: %s" % ( str( e ) ) )
+        else:
+            try:
+                col = Column( "user_id", Integer, nullable=True)
+                col.create( Job_table )
+                assert col is Job_table.c.user_id
+            except Exception as e:
+                log.debug( "Adding column 'user_id' to job table failed: %s" % ( str( e ) ) )
+        try:
+            cmd = "SELECT job.id AS galaxy_job_id, " \
+                + "galaxy_session.user_id AS galaxy_user_id " \
+                + "FROM job " \
+                + "JOIN galaxy_session ON job.session_id = galaxy_session.id;"
+            job_users = migrate_engine.execute( cmd ).fetchall()
+            print("Updating user_id column in job table for ", len( job_users ), " rows...")
+            print("")
+            update_count = 0
+            for row in job_users:
+                if row.galaxy_user_id:
+                    cmd = "UPDATE job SET user_id = %d WHERE id = %d" % ( int( row.galaxy_user_id ), int( row.galaxy_job_id ) )
+                    update_count += 1
+                migrate_engine.execute( cmd )
+            print("Updated the user_id column for ", update_count, " rows in the job table.  ")
+            print(len( job_users ) - update_count, " rows have no user_id since the value was NULL in the galaxy_session table.")
+            print("")
+        except Exception as e:
+            log.debug( "Updating job.user_id column failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+    except NoSuchTableError:
+        Job_table = None
+        log.debug( "Failed loading table job" )
+    if Job_table:
+        try:
+            col = Job_table.c.user_id
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'user_id' from job table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py b/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py
new file mode 100644
index 0000000..e4003f9
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0048_dataset_instance_state_column.py
@@ -0,0 +1,65 @@
+"""
+Add a state column to the history_dataset_association and library_dataset_dataset_association table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+DATASET_INSTANCE_TABLE_NAMES = [ 'history_dataset_association', 'library_dataset_dataset_association' ]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    dataset_instance_tables = []
+    for table_name in DATASET_INSTANCE_TABLE_NAMES:
+        try:
+            dataset_instance_tables.append( ( table_name, Table( table_name, metadata, autoload=True ) ) )
+        except NoSuchTableError:
+            log.debug( "Failed loading table %s" % table_name )
+    if dataset_instance_tables:
+        for table_name, dataset_instance_table in dataset_instance_tables:
+            index_name = "ix_%s_state" % table_name
+            try:
+                col = Column( "state", TrimmedString( 64 ), index=True, nullable=True )
+                col.create( dataset_instance_table, index_name=index_name)
+                assert col is dataset_instance_table.c.state
+            except Exception as e:
+                log.debug( "Adding column 'state' to %s table failed: %s" % ( table_name, str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    dataset_instance_tables = []
+    for table_name in DATASET_INSTANCE_TABLE_NAMES:
+        try:
+            dataset_instance_tables.append( ( table_name, Table( table_name, metadata, autoload=True ) ) )
+        except NoSuchTableError:
+            log.debug( "Failed loading table %s" % table_name )
+    if dataset_instance_tables:
+        for table_name, dataset_instance_table in dataset_instance_tables:
+            try:
+                col = dataset_instance_table.c.state
+                col.drop()
+            except Exception as e:
+                log.debug( "Dropping column 'state' from %s table failed: %s" % ( table_name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0049_api_keys_table.py b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py
new file mode 100644
index 0000000..6bfa9fa
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0049_api_keys_table.py
@@ -0,0 +1,41 @@
+"""
+Migration script to add the api_keys table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+APIKeys_table = Table( "api_keys", metadata,
+                       Column( "id", Integer, primary_key=True ),
+                       Column( "create_time", DateTime, default=now ),
+                       Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                       Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        APIKeys_table.create()
+    except Exception as e:
+        log.debug( "Creating api_keys table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    try:
+        APIKeys_table.drop()
+    except Exception as e:
+        log.debug( "Dropping api_keys table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py b/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py
new file mode 100644
index 0000000..4de62c8
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0050_drop_cloud_tables.py
@@ -0,0 +1,149 @@
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def display_migration_details():
+    print
+    print("========================================")
+    print("This script drops tables that were associated with the old Galaxy Cloud functionality.")
+    print("========================================")
+
+
+CloudImage_table = Table( "cloud_image", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "provider_type", TEXT ),
+                          Column( "image_id", TEXT, nullable=False ),
+                          Column( "manifest", TEXT ),
+                          Column( "state", TEXT ),
+                          Column( "architecture", TEXT ),
+                          Column( "deleted", Boolean, default=False ) )
+
+""" UserConfiguredInstance (UCI) table """
+UCI_table = Table( "cloud_uci", metadata,
+                   Column( "id", Integer, primary_key=True ),
+                   Column( "create_time", DateTime, default=now ),
+                   Column( "update_time", DateTime, default=now, onupdate=now ),
+                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                   Column( "credentials_id", Integer, ForeignKey( "cloud_user_credentials.id" ), index=True ),
+                   Column( "key_pair_name", TEXT ),
+                   Column( "key_pair_material", TEXT ),
+                   Column( "name", TEXT ),
+                   Column( "state", TEXT ),
+                   Column( "error", TEXT ),
+                   Column( "total_size", Integer ),
+                   Column( "launch_time", DateTime ),
+                   Column( "deleted", Boolean, default=False ) )
+
+CloudInstance_table = Table( "cloud_instance", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "launch_time", DateTime ),
+                             Column( "stop_time", DateTime ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True ),
+                             Column( "type", TEXT ),
+                             Column( "reservation_id", TEXT ),
+                             Column( "instance_id", TEXT ),
+                             Column( "mi_id", Integer, ForeignKey( "cloud_image.id" ), index=True ),
+                             Column( "state", TEXT ),
+                             Column( "error", TEXT ),
+                             Column( "public_dns", TEXT ),
+                             Column( "private_dns", TEXT ),
+                             Column( "security_group", TEXT ),
+                             Column( "availability_zone", TEXT ) )
+
+CloudStore_table = Table( "cloud_store", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "attach_time", DateTime ),
+                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                          Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True, nullable=False ),
+                          Column( "volume_id", TEXT ),
+                          Column( "size", Integer, nullable=False ),
+                          Column( "availability_zone", TEXT ),
+                          Column( "inst_id", Integer, ForeignKey( "cloud_instance.id" ) ),
+                          Column( "status", TEXT ),
+                          Column( "device", TEXT ),
+                          Column( "space_consumed", Integer ),
+                          Column( "error", TEXT ),
+                          Column( "deleted", Boolean, default=False ) )
+
+CloudSnapshot_table = Table( "cloud_snapshot", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True ),
+                             Column( "store_id", Integer, ForeignKey( "cloud_store.id" ), index=True, nullable=False ),
+                             Column( "snapshot_id", TEXT ),
+                             Column( "status", TEXT ),
+                             Column( "description", TEXT ),
+                             Column( "error", TEXT ),
+                             Column( "deleted", Boolean, default=False ) )
+
+CloudUserCredentials_table = Table( "cloud_user_credentials", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                                    Column( "provider_id", Integer, ForeignKey( "cloud_provider.id" ), index=True, nullable=False ),
+                                    Column( "name", TEXT ),
+                                    Column( "access_key", TEXT ),
+                                    Column( "secret_key", TEXT ),
+                                    Column( "deleted", Boolean, default=False ) )
+
+CloudProvider_table = Table( "cloud_provider", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                             Column( "type", TEXT, nullable=False ),
+                             Column( "name", TEXT ),
+                             Column( "region_connection", TEXT ),
+                             Column( "region_name", TEXT ),
+                             Column( "region_endpoint", TEXT ),
+                             Column( "is_secure", Boolean ),
+                             Column( "host", TEXT ),
+                             Column( "port", Integer ),
+                             Column( "proxy", TEXT ),
+                             Column( "proxy_port", TEXT ),
+                             Column( "proxy_user", TEXT ),
+                             Column( "proxy_pass", TEXT ),
+                             Column( "debug", Integer ),
+                             Column( "https_connection_factory", TEXT ),
+                             Column( "path", TEXT ),
+                             Column( "deleted", Boolean, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    display_migration_details()
+    # Load existing tables
+    metadata.reflect()
+    try:
+        CloudSnapshot_table.drop()
+        CloudStore_table.drop()
+        CloudInstance_table.drop()
+        CloudImage_table.drop()
+        UCI_table.drop()
+        CloudUserCredentials_table.drop()
+        CloudProvider_table.drop()
+    except Exception as e:
+        log.debug( "Dropping cloud tables failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py b/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py
new file mode 100644
index 0000000..2c88a70
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0051_imported_col_for_jobs_table.py
@@ -0,0 +1,49 @@
+"""
+Migration script to add imported column for jobs table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create and initialize imported column in job table.
+    Jobs_table = Table( "job", metadata, autoload=True )
+    c = Column( "imported", Boolean, default=False, index=True )
+    try:
+        # Create
+        c.create( Jobs_table, index_name="ix_job_imported")
+        assert c is Jobs_table.c.imported
+
+        # Initialize.
+        if migrate_engine.name in ['mysql', 'sqlite']:
+            default_false = "0"
+        elif migrate_engine.name in ['postgres', 'postgresql']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE job SET imported=%s" % default_false )
+
+    except Exception as e:
+        print("Adding imported column to job table failed: %s" % str( e ))
+        log.debug( "Adding imported column to job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop imported column from job table.
+    Jobs_table = Table( "job", metadata, autoload=True )
+    try:
+        Jobs_table.c.imported.drop()
+    except Exception as e:
+        print("Dropping column imported from job table failed: %s" % str( e ))
+        log.debug( "Dropping column imported from job table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py
new file mode 100644
index 0000000..d34c6f0
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py
@@ -0,0 +1,95 @@
+"""
+Migration script to add the sample_dataset table and remove the 'dataset_files' column
+from the 'sample' table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+from json import loads
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def nextval( migrate_engine, table, col='id' ):
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        return "nextval('%s_%s_seq')" % ( table, col )
+    elif migrate_engine.name in ['mysql', 'sqlite']:
+        return "null"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def localtimestamp(migrate_engine):
+    if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+        return "LOCALTIMESTAMP"
+    elif migrate_engine.name == 'sqlite':
+        return "current_date || ' ' || current_time"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+SampleDataset_table = Table('sample_dataset', metadata,
+                            Column( "id", Integer, primary_key=True ),
+                            Column( "create_time", DateTime, default=now ),
+                            Column( "update_time", DateTime, default=now, onupdate=now ),
+                            Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
+                            Column( "name", TrimmedString( 255 ), nullable=False ),
+                            Column( "file_path", TrimmedString( 255 ), nullable=False ),
+                            Column( "status", TrimmedString( 255 ), nullable=False ),
+                            Column( "error_msg", TEXT ),
+                            Column( "size", TrimmedString( 255 ) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        SampleDataset_table.create()
+    except Exception as e:
+        log.debug( "Creating sample_dataset table failed: %s" % str( e ) )
+
+    cmd = "SELECT id, dataset_files FROM sample"
+    result = migrate_engine.execute( cmd )
+    for r in result:
+        sample_id = r[0]
+        if r[1]:
+            dataset_files = loads(r[1])
+            for df in dataset_files:
+                if isinstance(df, dict):
+                    cmd = "INSERT INTO sample_dataset VALUES (%s, %s, %s, %s, '%s', '%s', '%s', '%s', '%s')"
+                    cmd = cmd % ( nextval(migrate_engine, 'sample_dataset'),
+                                  localtimestamp(migrate_engine),
+                                  localtimestamp(migrate_engine),
+                                  str(sample_id),
+                                  df.get('name', ''),
+                                  df.get('filepath', ''),
+                                  df.get('status', '').replace('"', '').replace("'", ""),
+                                  "",
+                                  df.get('size', '').replace('"', '').replace("'", "").replace(df.get('filepath', ''), '').strip() )
+                migrate_engine.execute( cmd )
+
+    # Delete the dataset_files column in the Sample table
+    try:
+        Sample_table = Table( "sample", metadata, autoload=True )
+    except NoSuchTableError:
+        Sample_table = None
+        log.debug( "Failed loading table sample" )
+    if Sample_table is not None:
+        try:
+            Sample_table.c.dataset_files.drop()
+        except Exception as e:
+            log.debug( "Deleting column 'dataset_files' from the 'sample' table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0053_item_ratings.py b/lib/galaxy/model/migrate/versions/0053_item_ratings.py
new file mode 100644
index 0000000..83db5b7
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0053_item_ratings.py
@@ -0,0 +1,134 @@
+"""
+Migration script to create tables for rating histories, datasets, workflows, pages, and visualizations.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Rating tables.
+
+HistoryRatingAssociation_table = Table( "history_rating_association", metadata,
+                                        Column( "id", Integer, primary_key=True ),
+                                        Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                        Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                        Column( "rating", Integer, index=True) )
+
+HistoryDatasetAssociationRatingAssociation_table = Table( "history_dataset_association_rating_association", metadata,
+                                                          Column( "id", Integer, primary_key=True ),
+                                                          Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+                                                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                          Column( "rating", Integer, index=True) )
+
+StoredWorkflowRatingAssociation_table = Table( "stored_workflow_rating_association", metadata,
+                                               Column( "id", Integer, primary_key=True ),
+                                               Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
+                                               Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                               Column( "rating", Integer, index=True) )
+
+PageRatingAssociation_table = Table( "page_rating_association", metadata,
+                                     Column( "id", Integer, primary_key=True ),
+                                     Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
+                                     Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                     Column( "rating", Integer, index=True) )
+
+VisualizationRatingAssociation_table = Table( "visualization_rating_association", metadata,
+                                              Column( "id", Integer, primary_key=True ),
+                                              Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
+                                              Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                              Column( "rating", Integer, index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create history_rating_association table.
+    try:
+        HistoryRatingAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating history_rating_association table failed: %s" % str( e ) )
+
+    # Create history_dataset_association_rating_association table.
+    try:
+        HistoryDatasetAssociationRatingAssociation_table.create()
+    except Exception as e:
+        # MySQL cannot handle long index names; when we see this error, create the index name manually.
+        if migrate_engine.name == 'mysql' and \
+                str(e).lower().find("identifier name 'ix_history_dataset_association_rating_association_history_dataset_association_id' is too long"):
+            i = Index( "ix_hda_rating_association_hda_id", HistoryDatasetAssociationRatingAssociation_table.c.history_dataset_association_id )
+            try:
+                i.create()
+            except Exception as e:
+                print(str(e))
+                log.debug( "Adding index 'ix_hda_rating_association_hda_id' to table 'history_dataset_association_rating_association' table failed: %s" % str( e ) )
+        else:
+            print(str(e))
+            log.debug( "Creating history_dataset_association_rating_association table failed: %s" % str( e ) )
+
+    # Create stored_workflow_rating_association table.
+    try:
+        StoredWorkflowRatingAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating stored_workflow_rating_association table failed: %s" % str( e ) )
+
+    # Create page_rating_association table.
+    try:
+        PageRatingAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating page_rating_association table failed: %s" % str( e ) )
+
+    # Create visualization_rating_association table.
+    try:
+        VisualizationRatingAssociation_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating visualization_rating_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop history_rating_association table.
+    try:
+        HistoryRatingAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_rating_association table failed: %s" % str( e ) )
+
+    # Drop history_dataset_association_rating_association table.
+    try:
+        HistoryDatasetAssociationRatingAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_dataset_association_rating_association table failed: %s" % str( e ) )
+
+    # Drop stored_workflow_rating_association table.
+    try:
+        StoredWorkflowRatingAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping stored_workflow_rating_association table failed: %s" % str( e ) )
+
+    # Drop page_rating_association table.
+    try:
+        PageRatingAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping page_rating_association table failed: %s" % str( e ) )
+
+    # Drop visualization_rating_association table.
+    try:
+        VisualizationRatingAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping visualization_rating_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py
new file mode 100644
index 0000000..3cfe92d
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py
@@ -0,0 +1,56 @@
+"""
+Migration script to add dbkey column for visualization.
+"""
+from __future__ import print_function
+
+import logging
+from json import loads
+
+from sqlalchemy import Column, Index, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    print(__doc__)
+    metadata.reflect()
+
+    Visualization_table = Table( "visualization", metadata, autoload=True )
+    Visualization_revision_table = Table( "visualization_revision", metadata, autoload=True )
+
+    # Create dbkey columns.
+    x = Column( "dbkey", TEXT )
+    y = Column( "dbkey", TEXT )
+    x.create( Visualization_table )
+    y.create( Visualization_revision_table )
+    # Manually create indexes for compatability w/ mysql_length.
+    xi = Index( "ix_visualization_dbkey", Visualization_table.c.dbkey, mysql_length=200)
+    xi.create()
+    yi = Index( "ix_visualization_revision_dbkey", Visualization_revision_table.c.dbkey, mysql_length=200)
+    yi.create()
+    assert x is Visualization_table.c.dbkey
+    assert y is Visualization_revision_table.c.dbkey
+
+    all_viz = migrate_engine.execute( "SELECT visualization.id as viz_id, visualization_revision.id as viz_rev_id, visualization_revision.config FROM visualization_revision \
+                    LEFT JOIN visualization ON visualization.id=visualization_revision.visualization_id" )
+    for viz in all_viz:
+        viz_id = viz['viz_id']
+        viz_rev_id = viz['viz_rev_id']
+        if viz[Visualization_revision_table.c.config]:
+            dbkey = loads(viz[Visualization_revision_table.c.config]).get('dbkey', "").replace("'", "\\'")
+            migrate_engine.execute("UPDATE visualization_revision SET dbkey='%s' WHERE id=%s" % (dbkey, viz_rev_id))
+            migrate_engine.execute("UPDATE visualization SET dbkey='%s' WHERE id=%s" % (dbkey, viz_id))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    Visualization_table = Table( "visualization", metadata, autoload=True )
+    Visualization_revision_table = Table( "visualization_revision", metadata, autoload=True )
+
+    Visualization_table.c.dbkey.drop()
+    Visualization_revision_table.c.dbkey.drop()
diff --git a/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py b/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py
new file mode 100644
index 0000000..1a7eca5
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0055_add_pja_assoc_for_jobs.py
@@ -0,0 +1,38 @@
+"""
+Migration script to add the post_job_action_association table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+PostJobActionAssociation_table = Table("post_job_action_association", metadata,
+                                       Column("id", Integer, primary_key=True),
+                                       Column("post_job_action_id", Integer, ForeignKey("post_job_action.id"), index=True, nullable=False),
+                                       Column("job_id", Integer, ForeignKey("job.id"), index=True, nullable=False))
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        PostJobActionAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating PostJobActionAssociation table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    try:
+        PostJobActionAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping PostJobActionAssociation table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py b/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py
new file mode 100644
index 0000000..d0a0f7e
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0056_workflow_outputs.py
@@ -0,0 +1,38 @@
+"""
+Migration script to create tables for adding explicit workflow outputs.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table
+
+logging.basicConfig( level=logging.DEBUG )
+log = logging.getLogger( __name__ )
+
+metadata = MetaData()
+
+WorkflowOutput_table = Table( "workflow_output", metadata,
+                              Column( "id", Integer, primary_key=True ),
+                              Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True, nullable=False),
+                              Column( "output_name", String(255), nullable=True))
+
+tables = [WorkflowOutput_table]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    for table in tables:
+        try:
+            table.create()
+        except:
+            log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    for table in tables:
+        table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0057_request_notify.py b/lib/galaxy/model/migrate/versions/0057_request_notify.py
new file mode 100644
index 0000000..2c2c100
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0057_request_notify.py
@@ -0,0 +1,58 @@
+"""
+Migration script to modify the 'notify' field in the 'request' table from a boolean
+to a JSONType
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+from json import dumps
+
+from sqlalchemy import Column, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Request_table = Table( "request", metadata, autoload=True )
+    except NoSuchTableError:
+        Request_table = None
+        log.debug( "Failed loading table 'request'" )
+
+    if Request_table is not None:
+        # create the column again as JSONType
+        try:
+            col = Column( "notification", JSONType() )
+            col.create( Request_table )
+            assert col is Request_table.c.notification
+        except Exception as e:
+            log.debug( "Creating column 'notification' in the 'request' table failed: %s" % ( str( e ) ) )
+
+        cmd = "SELECT id, user_id, notify FROM request"
+        result = migrate_engine.execute( cmd )
+        for r in result:
+            id = int(r[0])
+            notify_new = dict(email=[], sample_states=[], body='', subject='')
+            cmd = "UPDATE request SET notification='%s' WHERE id=%i" % (dumps(notify_new), id)
+            migrate_engine.execute( cmd )
+
+        # remove the 'notify' column for non-sqlite databases.
+        if migrate_engine.name != 'sqlite':
+            try:
+                Request_table.c.notify.drop()
+            except Exception as e:
+                log.debug( "Deleting column 'notify' from the 'request' table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0058_history_import_export.py b/lib/galaxy/model/migrate/versions/0058_history_import_export.py
new file mode 100644
index 0000000..7b14d66
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0058_history_import_export.py
@@ -0,0 +1,48 @@
+"""
+Migration script to create table for exporting histories to archives.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, ForeignKey, Integer, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Table to add.
+
+JobExportHistoryArchive_table = Table( "job_export_history_archive", metadata,
+                                       Column( "id", Integer, primary_key=True ),
+                                       Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                       Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                       Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+                                       Column( "compressed", Boolean, index=True, default=False ),
+                                       Column( "history_attrs_filename", TEXT ),
+                                       Column( "datasets_attrs_filename", TEXT ),
+                                       Column( "jobs_attrs_filename", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Create job_export_history_archive table.
+    try:
+        JobExportHistoryArchive_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating job_export_history_archive table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop job_export_history_archive table.
+    try:
+        JobExportHistoryArchive_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping job_export_history_archive table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py
new file mode 100644
index 0000000..3768a41
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py
@@ -0,0 +1,55 @@
+"""
+Migration script to modify the 'file_path' field type in 'sample_dataset' table
+to 'TEXT' so that it can support large file paths exceeding 255 characters
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        SampleDataset_table = Table( "sample_dataset", metadata, autoload=True )
+    except NoSuchTableError:
+        SampleDataset_table = None
+        log.debug( "Failed loading table 'sample_dataset'" )
+
+    if SampleDataset_table is not None:
+        cmd = "SELECT id, file_path FROM sample_dataset"
+        result = migrate_engine.execute( cmd )
+        filepath_dict = {}
+        for r in result:
+            id = int(r[0])
+            filepath_dict[id] = r[1]
+        # remove the 'file_path' column
+        try:
+            SampleDataset_table.c.file_path.drop()
+        except Exception as e:
+            log.debug( "Deleting column 'file_path' from the 'sample_dataset' table failed: %s" % ( str( e ) ) )
+        # create the column again
+        try:
+            col = Column( "file_path", TEXT )
+            col.create( SampleDataset_table )
+            assert col is SampleDataset_table.c.file_path
+        except Exception as e:
+            log.debug( "Creating column 'file_path' in the 'sample_dataset' table failed: %s" % ( str( e ) ) )
+
+        for id, file_path in filepath_dict.items():
+            cmd = "update sample_dataset set file_path='%s' where id=%i" % (file_path, id)
+            migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0060_history_archive_import.py b/lib/galaxy/model/migrate/versions/0060_history_archive_import.py
new file mode 100644
index 0000000..ccdaa73
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0060_history_archive_import.py
@@ -0,0 +1,72 @@
+"""
+Migration script to create column and table for importing histories from
+file archives.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, ForeignKey, Integer, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Columns to add.
+
+importing_col = Column( "importing", Boolean, index=True, default=False )
+ldda_parent_col = Column( "ldda_parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True )
+
+# Table to add.
+
+JobImportHistoryArchive_table = Table( "job_import_history_archive", metadata,
+                                       Column( "id", Integer, primary_key=True ),
+                                       Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                       Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                       Column( "archive_dir", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Add column to history table and initialize.
+    try:
+        History_table = Table( "history", metadata, autoload=True )
+        importing_col.create( History_table, index_name="ix_history_importing")
+        assert importing_col is History_table.c.importing
+
+        # Initialize column to false.
+        if migrate_engine.name in ['mysql', 'sqlite']:
+            default_false = "0"
+        elif migrate_engine.name in ['postgres', 'postgresql']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE history SET importing=%s" % default_false )
+    except Exception as e:
+        print(str(e))
+        log.debug( "Adding column 'importing' to history table failed: %s" % str( e ) )
+
+    # Create job_import_history_archive table.
+    try:
+        JobImportHistoryArchive_table.create()
+    except Exception as e:
+        log.debug( "Creating job_import_history_archive table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop 'importing' column from history table.
+    try:
+        History_table = Table( "history", metadata, autoload=True )
+        importing_col = History_table.c.importing
+        importing_col.drop()
+    except Exception as e:
+        log.debug( "Dropping column 'importing' from history table failed: %s" % ( str( e ) ) )
+
+    # Drop job_import_history_archive table.
+    try:
+        JobImportHistoryArchive_table.drop()
+    except Exception as e:
+        log.debug( "Dropping job_import_history_archive table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0061_tasks.py b/lib/galaxy/model/migrate/versions/0061_tasks.py
new file mode 100644
index 0000000..b83d64b
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0061_tasks.py
@@ -0,0 +1,50 @@
+"""
+Migration script to create tables task management.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, String, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+now = datetime.datetime.utcnow
+
+Task_table = Table( "task", metadata,
+                    Column( "id", Integer, primary_key=True ),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "execution_time", DateTime ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "state", String( 64 ), index=True ),
+                    Column( "command_line", TEXT ),
+                    Column( "param_filename", String( 1024 ) ),
+                    Column( "runner_name", String( 255 ) ),
+                    Column( "stdout", TEXT ),
+                    Column( "stderr", TEXT ),
+                    Column( "traceback", TEXT ),
+                    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ),
+                    Column( "part_file", String(1024)),
+                    Column( "task_runner_name", String( 255 ) ),
+                    Column( "task_runner_external_id", String( 255 ) ) )
+
+tables = [Task_table]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    for table in tables:
+        try:
+            table.create()
+        except:
+            log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    for table in tables:
+        table.drop()
diff --git a/lib/galaxy/model/migrate/versions/0062_user_openid_table.py b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py
new file mode 100644
index 0000000..519ce81
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0062_user_openid_table.py
@@ -0,0 +1,58 @@
+"""
+Migration script to create table for associating sessions and users with
+OpenIDs.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Table to add
+
+UserOpenID_table = Table( "galaxy_user_openid", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+                          Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
+                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                          Column( "openid", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create galaxy_user_openid table
+    try:
+        UserOpenID_table.create()
+    except Exception as e:
+        log.debug( "Creating galaxy_user_openid table failed: %s" % str( e ) )
+
+    ix_name = 'ix_galaxy_user_openid_openid'
+    if migrate_engine.name == 'mysql':
+        i = "ALTER TABLE galaxy_user_openid ADD UNIQUE INDEX ( openid( 255 ) )"
+        migrate_engine.execute( i )
+    else:
+        i = Index( ix_name, UserOpenID_table.c.openid, unique=True )
+        try:
+            i.create()
+        except Exception as e:
+            log.debug( "Adding index '%s' failed: %s" % ( ix_name, str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop galaxy_user_openid table
+    try:
+        UserOpenID_table.drop()
+    except Exception as e:
+        log.debug( "Dropping galaxy_user_openid table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0063_sequencer_table.py b/lib/galaxy/model/migrate/versions/0063_sequencer_table.py
new file mode 100644
index 0000000..c90f262
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0063_sequencer_table.py
@@ -0,0 +1,56 @@
+"""
+Migration script to create a new 'sequencer' table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Table to add
+Sequencer_table = Table( 'sequencer', metadata,
+                         Column( "id", Integer, primary_key=True ),
+                         Column( "create_time", DateTime, default=now ),
+                         Column( "update_time", DateTime, default=now, onupdate=now ),
+                         Column( "name", TrimmedString( 255 ), nullable=False ),
+                         Column( "description", TEXT ),
+                         Column( "sequencer_type_id", TrimmedString( 255 ), nullable=False ),
+                         Column( "version", TrimmedString( 255 ) ),
+                         Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                         Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+                         Column( "deleted", Boolean, index=True, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    # create the sequencer table
+    try:
+        Sequencer_table.create()
+    except Exception as e:
+        log.debug( "Creating 'sequencer' table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # delete sequencer table
+    try:
+        Sequencer_table = Table( "sequencer", metadata, autoload=True )
+    except NoSuchTableError:
+        Sequencer_table = None
+        log.debug( "Failed loading table sequencer" )
+    if Sequencer_table:
+        try:
+            Sequencer_table.drop()
+        except Exception as e:
+            log.debug( "Deleting 'sequencer' table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py b/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py
new file mode 100644
index 0000000..b9f3849
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0064_add_run_and_sample_run_association_tables.py
@@ -0,0 +1,67 @@
+"""
+Migration script to add the run and sample_run_association tables.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+Run_table = Table( "run", metadata,
+                   Column( "id", Integer, primary_key=True ),
+                   Column( "create_time", DateTime, default=now ),
+                   Column( "update_time", DateTime, default=now, onupdate=now ),
+                   Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                   Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
+                   Column( "deleted", Boolean, index=True, default=False ) )
+
+RequestTypeRunAssociation_table = Table( "request_type_run_association", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True, nullable=False ),
+                                         Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
+
+SampleRunAssociation_table = Table( "sample_run_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True, nullable=False ),
+                                    Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Run_table.create()
+    except Exception as e:
+        log.debug( "Creating Run_table table failed: %s" % str( e ) )
+    try:
+        RequestTypeRunAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating RequestTypeRunAssociation table failed: %s" % str( e ) )
+    try:
+        SampleRunAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating SampleRunAssociation table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    try:
+        SampleRunAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping SampleRunAssociation table failed: %s" % str( e ) )
+    try:
+        RequestTypeRunAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping RequestTypeRunAssociation table failed: %s" % str( e ) )
+    try:
+        Run_table.drop()
+    except Exception as e:
+        log.debug( "Dropping Run_table table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py
new file mode 100644
index 0000000..d64c5bf
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py
@@ -0,0 +1,142 @@
+"""
+Migration script to add 'name' attribute to the JSON dict which describes
+a form definition field and the form values in the database. In the 'form_values'
+table, the 'content' column is now a JSON dict instead of a list.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+from json import loads, dumps
+
+from sqlalchemy import MetaData, Table
+
+from galaxy.model.custom_types import _sniffnfix_pg9_hex
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Table( "form_definition", metadata, autoload=True )
+    except Exception as e:
+        log.debug( "Loading 'form_definition' table failed: %s" % str( e ) )
+    try:
+        Table( "form_values", metadata, autoload=True )
+    except Exception as e:
+        log.debug( "Loading 'form_values' table failed: %s" % str( e ) )
+
+    def get_value(lst, index):
+        try:
+            return str(lst[index]).replace("'", "''")
+        except IndexError:
+            return ''
+    # Go through the entire table and add a 'name' attribute for each field
+    # in the list of fields for each form definition
+    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
+    result = migrate_engine.execute( cmd )
+    for row in result:
+        form_definition_id = row[0]
+        fields = str( row[1] )
+        if not fields.strip():
+            continue
+        fields_list = loads( _sniffnfix_pg9_hex( fields ) )
+        if len( fields_list ):
+            for index, field in enumerate( fields_list ):
+                field[ 'name' ] = 'field_%i' % index
+                field[ 'helptext' ] = field[ 'helptext' ].replace("'", "''").replace('"', "")
+                field[ 'label' ] = field[ 'label' ].replace("'", "''")
+            fields_json = dumps( fields_list )
+            if migrate_engine.name == 'mysql':
+                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % ( fields_json, form_definition_id )
+            else:
+                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % ( fields_json, form_definition_id )
+            migrate_engine.execute( cmd )
+    # replace the values list in the content field of the form_values table with a name:value dict
+    cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
+          " FROM form_values, form_definition" \
+          " WHERE form_values.form_definition_id=form_definition.id" \
+          " ORDER BY form_values.id ASC"
+    result = migrate_engine.execute( cmd )
+    for row in result:
+        form_values_id = int( row[0] )
+        if not str( row[1] ).strip():
+            continue
+        row1 = str(row[1]).replace('\n', '').replace('\r', '')
+        values_list = loads( str( row1 ).strip() )
+        if not str( row[2] ).strip():
+            continue
+        fields_list = loads( str( row[2] ).strip() )
+        if fields_list and isinstance(values_list, list):
+            values_dict = {}
+            for field_index, field in enumerate( fields_list ):
+                field_name = field[ 'name' ]
+                values_dict[ field_name ] = get_value(values_list, field_index )
+            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % ( dumps( values_dict ), form_values_id )
+            migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Table( "form_definition", metadata, autoload=True )
+    except Exception as e:
+        log.debug( "Loading 'form_definition' table failed: %s" % str( e ) )
+    try:
+        Table( "form_values", metadata, autoload=True )
+    except Exception as e:
+        log.debug( "Loading 'form_values' table failed: %s" % str( e ) )
+    # remove the name attribute in the content column JSON dict in the form_values table
+    # and restore it to a list of values
+    cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
+          " FROM form_values, form_definition" \
+          " WHERE form_values.form_definition_id=form_definition.id" \
+          " ORDER BY form_values.id ASC"
+    result = migrate_engine.execute( cmd )
+    for row in result:
+        form_values_id = int( row[0] )
+        if not str( row[1] ).strip():
+            continue
+        values_dict = loads( str( row[1] ) )
+        if not str( row[2] ).strip():
+            continue
+        fields_list = loads( str( row[2] ) )
+        if fields_list:
+            values_list = []
+            for field_index, field in enumerate( fields_list ):
+                field_name = field[ 'name' ]
+                field_value = values_dict[ field_name ]
+                values_list.append( field_value )
+            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % ( dumps( values_list ), form_values_id )
+            migrate_engine.execute( cmd )
+    # remove name attribute from the field column of the form_definition table
+    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
+    result = migrate_engine.execute( cmd )
+    for row in result:
+        form_definition_id = row[0]
+        fields = str( row[1] )
+        if not fields.strip():
+            continue
+        fields_list = loads( _sniffnfix_pg9_hex( fields ) )
+        if len( fields_list ):
+            for index, field in enumerate( fields_list ):
+                if 'name' in field:
+                    del field[ 'name' ]
+            if migrate_engine.name == 'mysql':
+                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % ( dumps( fields_list ), form_definition_id )
+            else:
+                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % ( dumps( fields_list ), form_definition_id )
+        migrate_engine.execute( cmd )
diff --git a/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py b/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py
new file mode 100644
index 0000000..c7d2f5d
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0066_deferred_job_and_transfer_job_tables.py
@@ -0,0 +1,69 @@
+"""
+Migration script to create table for storing deferred job and managed transfer
+information.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, Integer, MetaData, String, Table
+
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Table to add
+
+DeferredJob_table = Table( "deferred_job", metadata,
+                           Column( "id", Integer, primary_key=True ),
+                           Column( "create_time", DateTime, default=now ),
+                           Column( "update_time", DateTime, default=now, onupdate=now ),
+                           Column( "state", String( 64 ), index=True ),
+                           Column( "plugin", String( 128 ), index=True ),
+                           Column( "params", JSONType ) )
+
+TransferJob_table = Table( "transfer_job", metadata,
+                           Column( "id", Integer, primary_key=True ),
+                           Column( "create_time", DateTime, default=now ),
+                           Column( "update_time", DateTime, default=now, onupdate=now ),
+                           Column( "state", String( 64 ), index=True ),
+                           Column( "path", String( 1024 ) ),
+                           Column( "params", JSONType ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create deferred_job table
+    try:
+        DeferredJob_table.create()
+    except Exception as e:
+        log.error( "Creating deferred_job table failed: %s" % str( e ) )
+
+    # Create transfer_job table
+    try:
+        TransferJob_table.create()
+    except Exception as e:
+        log.error( "Creating transfer_job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop deferred_job table
+    try:
+        DeferredJob_table.drop()
+    except Exception as e:
+        log.error( "Dropping deferred_job table failed: %s" % str( e ) )
+
+    # Drop transfer_job table
+    try:
+        TransferJob_table.drop()
+    except Exception as e:
+        log.error( "Dropping transfer_job table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py
new file mode 100644
index 0000000..d1fc39d
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py
@@ -0,0 +1,297 @@
+"""
+Migration script to populate the 'sequencer' table and it is populated using unique
+entries in the 'datatx_info' column in the 'request_type' table. It also deletes the 'datatx_info'
+column in the 'request_type' table and adds a foreign key to the 'sequencer' table. The
+actual contents of the datatx_info column are stored as form_values.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+from json import dumps, loads
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def nextval( migrate_engine, table, col='id' ):
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        return "nextval('%s_%s_seq')" % ( table, col )
+    elif migrate_engine.name in ['mysql', 'sqlite']:
+        return "null"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def localtimestamp( migrate_engine ):
+    if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+        return "LOCALTIMESTAMP"
+    elif migrate_engine.name == 'sqlite':
+        return "current_date || ' ' || current_time"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def get_latest_id( migrate_engine, table ):
+    result = migrate_engine.execute( "select id from %s order by id desc" % table )
+    row = result.fetchone()
+    if row:
+        return row[0]
+    else:
+        raise Exception( 'Unable to get the latest id in the %s table.' % table )
+
+
+def boolean( migrate_engine, value ):
+    if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+        return value
+    elif migrate_engine.name == 'sqlite':
+        if value in [ 'True', 'true' ]:
+            return 1
+        return 0
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def create_sequencer_form_definition( migrate_engine ):
+    '''
+    Create a new form_definition containing 5 fields (host, username, password,
+    data_dir & rename_datasets) which described the existing datatx_info json
+    dict in the request_type table
+    '''
+    # create new form_definition_current in the db
+    cmd = "INSERT INTO form_definition_current VALUES ( %s, %s, %s, %s, %s )"
+    cmd = cmd % ( nextval( migrate_engine, 'form_definition_current' ),
+                  localtimestamp( migrate_engine ),
+                  localtimestamp( migrate_engine ),
+                  'NULL',
+                  boolean( migrate_engine, 'false' ) )
+    migrate_engine.execute( cmd )
+    # get this form_definition_current id
+    form_definition_current_id = get_latest_id( migrate_engine, 'form_definition_current' )
+    # create new form_definition in the db
+    form_definition_name = 'Generic sequencer form'
+    form_definition_desc = ''
+    form_definition_fields = []
+    fields = [ ( 'Host', 'TextField' ),
+               ( 'User name', 'TextField' ),
+               ( 'Password', 'PasswordField' ),
+               ( 'Data directory', 'TextField' ) ]
+    for index, ( label, field_type ) in enumerate( fields ):
+        form_definition_fields.append( { 'name': 'field_%i' % index,
+                                         'label': label,
+                                         'helptext': '',
+                                         'visible': True,
+                                         'required': False,
+                                         'type': field_type,
+                                         'selectlist': [],
+                                         'layout': 'none',
+                                         'default': '' } )
+    form_definition_fields.append( { 'name': 'field_%i' % len( fields ),
+                                     'label': 'Prepend the experiment name and sample name to the dataset name?',
+                                     'helptext': 'Galaxy datasets are renamed by prepending the experiment name and sample name to the dataset name, ensuring dataset names remain unique in Galaxy even when multiple datasets have the same name on the sequencer.',
+                                     'visible': True,
+                                     'required': False,
+                                     'type': 'SelectField',
+                                     'selectlist': [ 'Do not rename',
+                                                     'Preprend sample name',
+                                                     'Prepend experiment name',
+                                                     'Prepend experiment and sample name' ],
+                                     'layout': 'none',
+                                     'default': '' } )
+    form_definition_type = 'Sequencer Information Form'
+    form_definition_layout = dumps('[]')
+    cmd = "INSERT INTO form_definition VALUES ( %s, %s, %s, '%s', '%s', %s, '%s', '%s', '%s' )"
+    cmd = cmd % ( nextval( migrate_engine, 'form_definition' ),
+                  localtimestamp( migrate_engine ),
+                  localtimestamp( migrate_engine ),
+                  form_definition_name,
+                  form_definition_desc,
+                  form_definition_current_id,
+                  dumps( form_definition_fields ),
+                  form_definition_type,
+                  form_definition_layout )
+    migrate_engine.execute( cmd )
+    # get this form_definition id
+    form_definition_id = get_latest_id( migrate_engine, 'form_definition' )
+    # update the form_definition_id column in form_definition_current
+    cmd = "UPDATE form_definition_current SET latest_form_id=%i WHERE id=%i" % ( form_definition_id, form_definition_current_id )
+    migrate_engine.execute( cmd )
+    return form_definition_id
+
+
+def get_sequencer_id( migrate_engine, sequencer_info ):
+    '''Get the sequencer id corresponding to the sequencer information'''
+    # Check if there is any existing sequencer which have the same sequencer
+    # information fields & values
+    cmd = "SELECT sequencer.id, form_values.content FROM sequencer, form_values WHERE sequencer.form_values_id=form_values.id"
+    result = migrate_engine.execute( cmd )
+    for row in result:
+        sequencer_id = row[0]
+        values = str( row[1] )
+        if not values.strip():
+            continue
+        values = loads( values )
+        # proceed only if sequencer_info is a valid list
+        if values and isinstance(values, dict):
+            if sequencer_info.get( 'host', '' ) == values.get( 'field_0', '' ) \
+               and sequencer_info.get( 'username', '' ) == values.get( 'field_1', '' ) \
+               and sequencer_info.get( 'password', '' ) == values.get( 'field_2', '' ) \
+               and sequencer_info.get( 'data_dir', '' ) == values.get( 'field_3', '' ) \
+               and sequencer_info.get( 'rename_dataset', '' ) == values.get( 'field_4', '' ):
+                return sequencer_id
+    return None
+
+
+def add_sequencer( migrate_engine, sequencer_index, sequencer_form_definition_id, sequencer_info ):
+    '''Adds a new sequencer to the sequencer table along with its form values.'''
+    # Create a new form values record with the supplied sequencer information
+    values = dumps( { 'field_0': sequencer_info.get( 'host', '' ),
+                      'field_1': sequencer_info.get( 'username', '' ),
+                      'field_2': sequencer_info.get( 'password', '' ),
+                      'field_3': sequencer_info.get( 'data_dir', '' ),
+                      'field_4': sequencer_info.get( 'rename_dataset', '' ) } )
+    cmd = "INSERT INTO form_values VALUES ( %s, %s, %s, %s, '%s' )" % ( nextval( migrate_engine, 'form_values' ),
+                                                                        localtimestamp( migrate_engine ),
+                                                                        localtimestamp( migrate_engine ),
+                                                                        sequencer_form_definition_id,
+                                                                        values )
+    migrate_engine.execute(cmd)
+    sequencer_form_values_id = get_latest_id( migrate_engine, 'form_values' )
+    # Create a new sequencer record with reference to the form value created above.
+    name = 'Sequencer_%i' % sequencer_index
+    desc = ''
+    version = ''
+    sequencer_type_id = 'simple_unknown_sequencer'
+    cmd = "INSERT INTO sequencer VALUES ( %s, %s, %s, '%s', '%s', '%s', '%s', %s, %s, %s )"
+    cmd = cmd % ( nextval( migrate_engine, 'sequencer'),
+                  localtimestamp( migrate_engine ),
+                  localtimestamp( migrate_engine ),
+                  name,
+                  desc,
+                  sequencer_type_id,
+                  version,
+                  sequencer_form_definition_id,
+                  sequencer_form_values_id,
+                  boolean( migrate_engine, 'false' ) )
+    migrate_engine.execute(cmd)
+    return get_latest_id( migrate_engine, 'sequencer' )
+
+
+def update_sequencer_id_in_request_type( migrate_engine, request_type_id, sequencer_id ):
+    '''Update the foreign key to the sequencer table in the request_type table'''
+    cmd = "UPDATE request_type SET sequencer_id=%i WHERE id=%i" % ( sequencer_id, request_type_id )
+    migrate_engine.execute( cmd )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        RequestType_table = Table( "request_type", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestType_table = None
+        log.debug( "Failed loading table 'request_type'" )
+    if RequestType_table is None:
+        return
+    # load the sequencer table
+    try:
+        Sequencer_table = Table( "sequencer", metadata, autoload=True )
+    except NoSuchTableError:
+        Sequencer_table = None
+        log.debug( "Failed loading table 'sequencer'" )
+    if Sequencer_table is None:
+        return
+    # create foreign key field to the sequencer table in the request_type table
+    try:
+        col = Column( "sequencer_id", Integer, ForeignKey( "sequencer.id" ), nullable=True )
+        col.create( RequestType_table )
+        assert col is RequestType_table.c.sequencer_id
+    except Exception as e:
+        log.debug( "Creating column 'sequencer_id' in the 'request_type' table failed: %s" % ( str( e ) ) )
+    # copy the sequencer information contained in the 'datatx_info' column
+    # of the request_type table to the form values referenced in the sequencer table
+    cmd = "SELECT id, name, datatx_info FROM request_type ORDER BY id ASC"
+    result = migrate_engine.execute( cmd )
+    results_list = result.fetchall()
+    # Proceed only if request_types exists
+    if len( results_list ):
+        # In this migration script the all the contents of the datatx_info are stored as form_values
+        # with a pointer to the sequencer table. This way the sequencer information can be customized
+        # by the admin and is no longer restricted to host, username, password, data directory.
+        # For the existing request_types in the database, we add a new form_definition
+        # with these 4 fields. Then we populate the sequencer table with unique datatx_info
+        # column from the existing request_types.
+        sequencer_form_definition_id = create_sequencer_form_definition( migrate_engine )
+        sequencer_index = 1
+        for row in results_list:
+            request_type_id = row[0]
+            sequencer_info = str( row[2] )  # datatx_info column
+            # skip if sequencer_info is empty
+            if not sequencer_info.strip() or sequencer_info in ['None', 'null']:
+                continue
+            sequencer_info = loads( sequencer_info.strip() )
+            # proceed only if sequencer_info is a valid dict
+            if sequencer_info and isinstance(sequencer_info, dict):
+                # check if this sequencer has already been added to the sequencer table
+                sequencer_id = get_sequencer_id( migrate_engine, sequencer_info )
+                if not sequencer_id:
+                    # add to the sequencer table
+                    sequencer_id = add_sequencer( migrate_engine, sequencer_index, sequencer_form_definition_id, sequencer_info )
+                # now update the sequencer_id column in request_type table
+                update_sequencer_id_in_request_type( migrate_engine, request_type_id, sequencer_id )
+                sequencer_index = sequencer_index + 1
+
+    # Finally delete the 'datatx_info' column from the request_type table
+    try:
+        RequestType_table.c.datatx_info.drop()
+    except Exception as e:
+        log.debug( "Deleting column 'datatx_info' in the 'request_type' table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        RequestType_table = Table( "request_type", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestType_table = None
+        log.debug( "Failed loading table 'request_type'" )
+    if RequestType_table is not None:
+        # create the 'datatx_info' column
+        try:
+            col = Column( "datatx_info", JSONType() )
+            col.create( RequestType_table )
+            assert col is RequestType_table.c.datatx_info
+        except Exception as e:
+            log.debug( "Creating column 'datatx_info' in the 'request_type' table failed: %s" % ( str( e ) ) )
+        # restore the datatx_info column data in the request_type table with data from
+        # the sequencer and the form_values table
+        cmd = "SELECT request_type.id, form_values.content "\
+              + " FROM request_type, sequencer, form_values "\
+              + " WHERE request_type.sequencer_id=sequencer.id AND sequencer.form_values_id=form_values.id "\
+              + " ORDER  BY request_type.id ASC"
+        result = migrate_engine.execute( cmd )
+        for row in result:
+            request_type_id = row[0]
+            seq_values = loads( str( row[1] ) )
+            # create the datatx_info json dict
+            datatx_info = dumps( dict( host=seq_values.get( 'field_0', '' ),
+                                       username=seq_values.get( 'field_1', '' ),
+                                       password=seq_values.get( 'field_2', '' ),
+                                       data_dir=seq_values.get( 'field_3', '' ),
+                                       rename_dataset=seq_values.get( 'field_4', '' ) ) )
+            # update the column
+            cmd = "UPDATE request_type SET datatx_info='%s' WHERE id=%i" % ( datatx_info, request_type_id )
+            migrate_engine.execute( cmd )
+        # delete foreign key field to the sequencer table in the request_type table
+        try:
+            RequestType_table.c.sequencer_id.drop()
+        except Exception as e:
+            log.debug( "Deleting column 'sequencer_id' in the 'request_type' table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py
new file mode 100644
index 0000000..4f2e8be
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py
@@ -0,0 +1,275 @@
+"""
+This migration script renames the sequencer table to 'external_service' table and
+creates a association table, 'request_type_external_service_association' and
+populates it. The 'sequencer_id' foreign_key from the 'request_type' table is removed.
+The 'sequencer_type_id' column is renamed to 'external_service_type_id' in the renamed
+table 'external_service'. Finally, adds a foreign key to the external_service table in the
+sample_dataset table and populates it.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from migrate import ForeignKeyConstraint
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def nextval( migrate_engine, table, col='id' ):
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        return "nextval('%s_%s_seq')" % ( table, col )
+    elif migrate_engine.name in ['mysql', 'sqlite']:
+        return "null"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    # Load existing tables
+    metadata.reflect()
+    # add a foreign key to the external_service table in the sample_dataset table
+    try:
+        SampleDataset_table = Table( "sample_dataset", metadata, autoload=True )
+    except NoSuchTableError:
+        SampleDataset_table = None
+        log.debug( "Failed loading table 'sample_dataset'" )
+    if SampleDataset_table is None:
+        return
+    try:
+        Sequencer_table = Table( "sequencer", metadata, autoload=True )
+    except NoSuchTableError:
+        Sequencer_table = None
+        log.debug( "Failed loading table 'sequencer'" )
+    if Sequencer_table is None:
+        return
+    # create the column. Call it external_services_id as the table 'sequencer' is
+    # going to be renamed to 'external_service'
+    try:
+        col = Column( "external_service_id", Integer, index=True )
+        col.create( SampleDataset_table, index_name="ix_sample_dataset_external_service_id" )
+        assert col is SampleDataset_table.c.external_service_id
+    except Exception as e:
+        log.debug( "Creating column 'external_service_id' in the 'sample_dataset' table failed: %s" % ( str( e ) ) )
+    if migrate_engine.name != 'sqlite':
+        # Add the foreign key constraint
+        try:
+            cons = ForeignKeyConstraint( [SampleDataset_table.c.external_service_id],
+                                         [Sequencer_table.c.id],
+                                         name='sample_dataset_external_services_id_fk' )
+            # Create the constraint
+            cons.create()
+        except Exception as e:
+            log.debug( "Adding foreign key constraint 'sample_dataset_external_services_id_fk' to table 'sample_dataset' failed: %s" % ( str( e ) ) )
+    # populate the column
+    cmd = "SELECT sample_dataset.id, request_type.sequencer_id " \
+          + " FROM sample_dataset, sample, request, request_type " \
+          + " WHERE sample.id=sample_dataset.sample_id and request.id=sample.request_id and request.request_type_id=request_type.id " \
+          + " ORDER BY sample_dataset.id"
+    result = migrate_engine.execute( cmd )
+    for r in result:
+        sample_dataset_id = int(r[0])
+        sequencer_id = int(r[1])
+        cmd = "UPDATE sample_dataset SET external_service_id='%i' where id=%i" % ( sequencer_id, sample_dataset_id )
+        migrate_engine.execute( cmd )
+    # load request_type table
+    try:
+        RequestType_table = Table( "request_type", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestType_table = None
+        log.debug( "Failed loading table request_type" )
+    if RequestType_table is None:
+        return
+    # rename 'sequencer' table to 'external_service'
+    cmd = "ALTER TABLE sequencer RENAME TO external_service"
+    migrate_engine.execute( cmd )
+    try:
+        ExternalServices_table = Table( "external_service", metadata, autoload=True )
+    except NoSuchTableError:
+        ExternalServices_table = None
+        log.debug( "Failed loading table 'external_service'" )
+    if ExternalServices_table is None:
+        return
+    # if running postgres then rename the primary key sequence too
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        cmd = "ALTER TABLE sequencer_id_seq RENAME TO external_service_id_seq"
+        migrate_engine.execute( cmd )
+    # rename 'sequencer_type_id' column to 'external_service_type_id' in the table 'external_service'
+    # create the column as 'external_service_type_id'
+    try:
+        col = Column( "external_service_type_id", TrimmedString( 255 ) )
+        col.create( ExternalServices_table )
+        assert col is ExternalServices_table.c.external_service_type_id
+    except Exception as e:
+        log.debug( "Creating column 'external_service_type_id' in the 'external_service' table failed: %s" % ( str( e ) ) )
+    # populate this new column
+    cmd = "UPDATE external_service SET external_service_type_id=sequencer_type_id"
+    migrate_engine.execute( cmd )
+    # remove the 'sequencer_type_id' column
+    try:
+        ExternalServices_table.c.sequencer_type_id.drop()
+    except Exception as e:
+        log.debug( "Deleting column 'sequencer_type_id' from the 'external_service' table failed: %s" % ( str( e ) ) )
+    # create 'request_type_external_service_association' table
+    RequestTypeExternalServiceAssociation_table = Table( "request_type_external_service_association", metadata,
+                                                         Column( "id", Integer, primary_key=True ),
+                                                         Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
+                                                         Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
+    try:
+        RequestTypeExternalServiceAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating request_type_external_service_association table failed: %s" % str( e ) )
+    try:
+        RequestTypeExternalServiceAssociation_table = Table( "request_type_external_service_association", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestTypeExternalServiceAssociation_table = None
+        log.debug( "Failed loading table request_type_external_service_association" )
+    if RequestTypeExternalServiceAssociation_table is None:
+        return
+    # populate 'request_type_external_service_association' table
+    cmd = "SELECT id, sequencer_id FROM request_type ORDER BY id ASC"
+    result = migrate_engine.execute( cmd )
+    results_list = result.fetchall()
+    # Proceed only if request_types exists
+    if len( results_list ):
+        for row in results_list:
+            request_type_id = row[0]
+            sequencer_id = row[1]
+            if not sequencer_id:
+                sequencer_id = 'null'
+            cmd = "INSERT INTO request_type_external_service_association VALUES ( %s, %s, %s )"
+            cmd = cmd % ( nextval( migrate_engine, 'request_type_external_service_association' ),
+                          request_type_id,
+                          sequencer_id )
+            migrate_engine.execute( cmd )
+    # drop the 'sequencer_id' column in the 'request_type' table
+    # sqlite does not support dropping columns
+    if migrate_engine.name == 'sqlite':
+        # In sqlite, create a temp table without the column that needs to be removed.
+        # then copy all the rows from the original table and finally rename the temp table
+        RequestTypeTemp_table = Table( 'request_type_temp', metadata,
+                                       Column( "id", Integer, primary_key=True),
+                                       Column( "create_time", DateTime, default=now ),
+                                       Column( "update_time", DateTime, default=now, onupdate=now ),
+                                       Column( "name", TrimmedString( 255 ), nullable=False ),
+                                       Column( "desc", TEXT ),
+                                       Column( "request_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                                       Column( "sample_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+                                       Column( "deleted", Boolean, index=True, default=False ) )
+        try:
+            RequestTypeTemp_table.create()
+        except Exception as e:
+            log.debug( "Creating request_type_temp table failed: %s" % str( e ) )
+        # insert all the rows from the request table to the request_temp table
+        cmd = "INSERT INTO request_type_temp SELECT id, create_time," + \
+            "update_time, name, desc, request_form_id, sample_form_id," + \
+            "deleted FROM request_type;"
+        migrate_engine.execute( cmd )
+        # delete the 'request_type' table
+        try:
+            RequestType_table.drop()
+        except Exception as e:
+            log.debug( "Dropping request_type table failed: %s" % str( e ) )
+        # rename table request_temp to request
+        cmd = "ALTER TABLE request_type_temp RENAME TO request_type"
+        migrate_engine.execute( cmd )
+    else:
+        try:
+            RequestType_table.c.sequencer_id.drop()
+        except Exception as e:
+            log.debug( "Deleting column 'sequencer_id' from the 'request_type' table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    # load sequencer & request_type table
+    try:
+        RequestType_table = Table( "request_type", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestType_table = None
+        log.debug( "Failed loading table request_type" )
+    if RequestType_table is None:
+        return
+    try:
+        ExternalServices_table = Table( "external_service", metadata, autoload=True )
+    except NoSuchTableError:
+        ExternalServices_table = None
+        log.debug( "Failed loading table 'external_service'" )
+    if ExternalServices_table is None:
+        return
+    try:
+        RequestTypeExternalServiceAssociation_table = Table( "request_type_external_service_association", metadata, autoload=True )
+    except NoSuchTableError:
+        RequestTypeExternalServiceAssociation_table = None
+        log.debug( "Failed loading table request_type_external_service_association" )
+    # create the 'sequencer_id' column in the 'request_type' table
+    try:
+        col = Column( "sequencer_id", Integer, ForeignKey( "external_service.id" ), nullable=True, index=True )
+        col.create( RequestType_table )
+        assert col is RequestType_table.c.sequencer_id
+    except Exception as e:
+        log.debug( "Creating column 'sequencer_id' in the 'request_type' table failed: %s" % ( str( e ) ) )
+    # populate 'sequencer_id' column in the 'request_type' table from the
+    # 'request_type_external_service_association' table
+    cmd = "SELECT request_type_id, external_service_id FROM request_type_external_service_association ORDER BY id ASC"
+    result = migrate_engine.execute( cmd )
+    results_list = result.fetchall()
+    # Proceed only if request_types exists
+    if len( results_list ):
+        for row in results_list:
+            request_type_id = row[0]
+            external_service_id = row[1]
+            cmd = "UPDATE request_type SET sequencer_id=%i WHERE id=%i" % ( external_service_id, request_type_id )
+            migrate_engine.execute( cmd )
+    # remove the 'request_type_external_service_association' table
+    if RequestTypeExternalServiceAssociation_table is not None:
+        try:
+            RequestTypeExternalServiceAssociation_table.drop()
+        except Exception as e:
+            log.debug( "Deleting 'request_type_external_service_association' table failed: %s" % str( e ) )
+    # rename 'external_service_type_id' column to 'sequencer_type_id' in the table 'external_service'
+    # create the column 'sequencer_type_id'
+    try:
+        col = Column( "sequencer_type_id", TrimmedString( 255 ) )
+        col.create( ExternalServices_table )
+        assert col is ExternalServices_table.c.sequencer_type_id
+    except Exception as e:
+        log.debug( "Creating column 'sequencer_type_id' in the 'external_service' table failed: %s" % ( str( e ) ) )
+    # populate this new column
+    cmd = "UPDATE external_service SET sequencer_type_id=external_service_type_id"
+    migrate_engine.execute( cmd )
+    # remove the 'external_service_type_id' column
+    try:
+        ExternalServices_table.c.external_service_type_id.drop()
+    except Exception as e:
+        log.debug( "Deleting column 'external_service_type_id' from the 'external_service' table failed: %s" % ( str( e ) ) )
+    # rename the 'external_service' table to 'sequencer'
+    cmd = "ALTER TABLE external_service RENAME TO sequencer"
+    migrate_engine.execute( cmd )
+    # if running postgres then rename the primary key sequence too
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        cmd = "ALTER SEQUENCE external_service_id_seq RENAME TO sequencer_id_seq"
+        migrate_engine.execute( cmd )
+    # drop the 'external_service_id' column in the 'sample_dataset' table
+    try:
+        SampleDataset_table = Table( "sample_dataset", metadata, autoload=True )
+    except NoSuchTableError:
+        SampleDataset_table = None
+        log.debug( "Failed loading table 'sample_dataset'" )
+    if SampleDataset_table is None:
+        return
+    try:
+        SampleDataset_table.c.external_service_id.drop()
+    except Exception as e:
+        log.debug( "Deleting column 'external_service_id' from the 'sample_dataset' table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py b/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py
new file mode 100644
index 0000000..8167456
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0069_rename_sequencer_form_type.py
@@ -0,0 +1,32 @@
+"""
+Migration script to rename the sequencer information form type to external service information form
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import MetaData
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    current_form_type = 'Sequencer Information Form'
+    new_form_type = "External Service Information Form"
+    cmd = "update form_definition set type='%s' where type='%s'" % ( new_form_type, current_form_type )
+    migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    new_form_type = 'Sequencer Information Form'
+    current_form_type = "External Service Information Form"
+    cmd = "update form_definition set type='%s' where type='%s'" % ( new_form_type, current_form_type )
+    migrate_engine.execute( cmd )
diff --git a/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py b/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py
new file mode 100644
index 0000000..f51401d
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0070_add_info_column_to_deferred_job_table.py
@@ -0,0 +1,36 @@
+"""
+Migration script to add 'info' column to the transfer_job table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        TransferJob_table = Table( "transfer_job", metadata, autoload=True )
+        c = Column( "info", TEXT )
+        c.create( TransferJob_table )
+        assert c is TransferJob_table.c.info
+    except Exception as e:
+        print("Adding info column to transfer_job table failed: %s" % str( e ))
+        log.debug( "Adding info column to transfer_job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        TransferJob_table = Table( "transfer_job", metadata, autoload=True )
+        TransferJob_table.c.info.drop()
+    except Exception as e:
+        print("Dropping info column from transfer_job table failed: %s" % str( e ))
+        log.debug( "Dropping info column from transfer_job table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py b/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py
new file mode 100644
index 0000000..2d58091
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0071_add_history_and_workflow_to_sample.py
@@ -0,0 +1,47 @@
+"""
+Migration script to add 'workflow' and 'history' columns for a sample.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Sample_table = Table( "sample", metadata, autoload=True )
+        c1 = Column( "workflow", JSONType, nullable=True )
+        c2 = Column( "history_id", Integer, ForeignKey( "history.id" ), nullable=True)
+        c1.create( Sample_table )
+        c2.create( Sample_table )
+        assert c1 is Sample_table.c.workflow
+        assert c2 is Sample_table.c.history_id
+    except Exception as e:
+        print("Adding history and workflow columns to sample table failed: %s" % str( e ))
+        log.debug( "Adding history and workflow columns to sample table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Sample_table = Table( "sample", metadata, autoload=True )
+        Sample_table.c.workflow.drop()
+    except Exception as e:
+        print("Dropping workflow column from sample table failed: %s" % str( e ))
+        log.debug( "Dropping workflow column from sample table failed: %s" % str( e ) )
+    try:
+        Sample_table = Table( "sample", metadata, autoload=True )
+        Sample_table.c.history_id.drop()
+    except Exception as e:
+        print("Dropping history column from sample table failed: %s" % str( e ))
+        log.debug( "Dropping history column from sample table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py b/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py
new file mode 100644
index 0000000..a183b52
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0072_add_pid_and_socket_columns_to_transfer_job_table.py
@@ -0,0 +1,40 @@
+"""
+Migration script to add 'pid' and 'socket' columns to the transfer_job table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        TransferJob_table = Table( "transfer_job", metadata, autoload=True )
+        c = Column( "pid", Integer )
+        c.create( TransferJob_table )
+        assert c is TransferJob_table.c.pid
+        c = Column( "socket", Integer )
+        c.create( TransferJob_table )
+        assert c is TransferJob_table.c.socket
+    except Exception as e:
+        print("Adding columns to transfer_job table failed: %s" % str( e ))
+        log.debug( "Adding columns to transfer_job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        TransferJob_table = Table( "transfer_job", metadata, autoload=True )
+        TransferJob_table.c.pid.drop()
+        TransferJob_table.c.socket.drop()
+    except Exception as e:
+        print("Dropping columns from transfer_job table failed: %s" % str( e ))
+        log.debug( "Dropping columns from transfer_job table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py b/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py
new file mode 100644
index 0000000..8002f8e
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0073_add_ldda_to_implicit_conversion_table.py
@@ -0,0 +1,40 @@
+"""
+Migration script to add 'ldda_parent_id' column to the implicitly_converted_dataset_association table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True )
+        if migrate_engine.name != 'sqlite':
+            c = Column( "ldda_parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True )
+        else:
+            # Can't use the ForeignKey in sqlite.
+            c = Column( "ldda_parent_id", Integer, index=True, nullable=True )
+        c.create( Implicitly_converted_table, index_name="ix_implicitly_converted_dataset_assoc_ldda_parent_id")
+        assert c is Implicitly_converted_table.c.ldda_parent_id
+    except Exception as e:
+        print("Adding ldda_parent_id column to implicitly_converted_dataset_association table failed: %s" % str( e ))
+        log.debug( "Adding ldda_parent_id column to implicitly_converted_dataset_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True )
+        Implicitly_converted_table.c.ldda_parent_id.drop()
+    except Exception as e:
+        print("Dropping ldda_parent_id column from implicitly_converted_dataset_association table failed: %s" % str( e ))
+        log.debug( "Dropping ldda_parent_id column from implicitly_converted_dataset_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py b/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py
new file mode 100644
index 0000000..688b00d
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0074_add_purged_column_to_library_dataset_table.py
@@ -0,0 +1,70 @@
+"""
+Migration script to add 'purged' column to the library_dataset table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def boolean_false(migrate_engine):
+    if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+        return False
+    elif migrate_engine.name == 'sqlite':
+        return 0
+    else:
+        raise Exception( 'Unable to set True data value for unknown database type: %s' % str( migrate_engine.name ) )
+
+
+def boolean_true(migrate_engine):
+    if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+        return True
+    elif migrate_engine.name == 'sqlite':
+        return 1
+    else:
+        raise Exception( 'Unable to set False data value for unknown database type: %s' % str( migrate_engine.name ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        LibraryDataset_table = Table( "library_dataset", metadata, autoload=True )
+        c = Column( "purged", Boolean, index=True, default=False )
+        c.create( LibraryDataset_table, index_name='ix_library_dataset_purged')
+        assert c is LibraryDataset_table.c.purged
+    except Exception as e:
+        print("Adding purged column to library_dataset table failed: ", str( e ))
+    # Update the purged flag to the default False
+    cmd = "UPDATE library_dataset SET purged = %s;" % boolean_false(migrate_engine)
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Setting default data for library_dataset.purged column failed: %s" % ( str( e ) ) )
+
+    # Update the purged flag for those LibaryDatasets whose purged flag should be True.  This happens
+    # when the LibraryDataset has no active LibraryDatasetDatasetAssociations.
+    cmd = "SELECT * FROM library_dataset WHERE deleted = %s;" % boolean_true(migrate_engine)
+    deleted_lds = migrate_engine.execute( cmd ).fetchall()
+    for row in deleted_lds:
+        cmd = "SELECT * FROM library_dataset_dataset_association WHERE library_dataset_id = %d AND library_dataset_dataset_association.deleted = %s;" % ( int( row.id ), boolean_false(migrate_engine) )
+        active_lddas = migrate_engine.execute( cmd ).fetchall()
+        if not active_lddas:
+            print("Updating purged column to True for LibraryDataset id : ", int( row.id ))
+            cmd = "UPDATE library_dataset SET purged = %s WHERE id = %d;" % ( boolean_true(migrate_engine), int( row.id ) )
+            migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        LibraryDataset_table = Table( "library_dataset", metadata, autoload=True )
+        LibraryDataset_table.c.purged.drop()
+    except Exception as e:
+        print("Dropping purged column from library_dataset table failed: ", str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py b/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py
new file mode 100644
index 0000000..431a170
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0075_add_subindex_column_to_run_table.py
@@ -0,0 +1,33 @@
+"""
+Migration script to add a 'subindex' column to the run table.
+"""
+from __future__ import print_function
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Run_table = Table( "run", metadata, autoload=True )
+        c = Column( "subindex", TrimmedString( 255 ), index=True )
+        c.create( Run_table, index_name="ix_run_subindex")
+        assert c is Run_table.c.subindex
+    except Exception as e:
+        print("Adding the subindex column to the run table failed: ", str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Run_table = Table( "run", metadata, autoload=True )
+        Run_table.c.subindex.drop()
+    except Exception as e:
+        print("Dropping the subindex column from run table failed: ", str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py
new file mode 100644
index 0000000..d17f381
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py
@@ -0,0 +1,90 @@
+'''
+This migration script fixes the data corruption caused in the form_values
+table (content json field) by migrate script 65.
+'''
+from __future__ import print_function
+
+import logging
+from json import dumps, loads
+
+from sqlalchemy import MetaData
+
+from galaxy.model.custom_types import _sniffnfix_pg9_hex
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    cmd = "SELECT form_values.id as id, form_values.content as field_values, form_definition.fields as fdfields " \
+          + " FROM form_definition, form_values " \
+          + " WHERE form_values.form_definition_id=form_definition.id " \
+          + " ORDER BY form_values.id"
+    result = migrate_engine.execute( cmd )
+    corrupted_rows = 0
+    for row in result:
+        # first check if loading the dict from the json succeeds
+        # if that fails, it means that the content field is corrupted.
+        try:
+            field_values_dict = loads( _sniffnfix_pg9_hex( str( row['field_values'] ) ) )
+        except Exception:
+            corrupted_rows = corrupted_rows + 1
+            # content field is corrupted
+            fields_list = loads( _sniffnfix_pg9_hex( str( row['fdfields'] ) ) )
+            field_values_str = _sniffnfix_pg9_hex( str( row['field_values'] ) )
+            try:
+                # Encoding errors?  Just to be safe.
+                print("Attempting to fix row %s" % row['id'])
+                print("Prior to replacement: %s" % field_values_str)
+            except:
+                pass
+            field_values_dict = {}
+            # look for each field name in the values and extract its value (string)
+            for index in range( len(fields_list) ):
+                field = fields_list[index]
+                field_name_key = '"%s": "' % field['name']
+                field_index = field_values_str.find( field_name_key )
+                if field_index == -1:
+                    # if the field name is not present the field values dict then
+                    # inform the admin that this form values cannot be fixed
+                    print("The 'content' field of row 'id' %i does not have the field '%s' in the 'form_values' table and could not be fixed by this migration script." % ( int( field['id'] ), field['name'] ))
+                else:
+                    # check if this is the last field
+                    if index == len( fields_list ) - 1:
+                        # since this is the last field, the value string lies between the
+                        # field name and the '"}' string at the end, hence len(field_values_str) - 2
+                        value = field_values_str[ field_index + len( field_name_key ):len( field_values_str ) - 2 ]
+                    else:
+                        # if this is not the last field then the value string lies between
+                        # this field name and the next field name
+                        next_field = fields_list[index + 1]
+                        next_field_index = field_values_str.find( '", "%s": "' % next_field['name'] )
+                        value = field_values_str[ field_index + len( field_name_key ):next_field_index ]
+                    # clean up the value string, escape the required quoutes and newline characters
+                    value = value.replace( "'", "\''" )\
+                                 .replace( '"', '\\\\"' )\
+                                 .replace( '\r', "\\\\r" )\
+                                 .replace( '\n', "\\\\n" )\
+                                 .replace( '\t', "\\\\t" )
+                    # add to the new values dict
+                    field_values_dict[ field['name'] ] = value
+            # update the db
+            json_values = dumps(field_values_dict)
+            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % ( json_values, int( row['id'] ) )
+            migrate_engine.execute( cmd )
+            try:
+                print("Post replacement: %s" % json_values)
+            except:
+                pass
+    if corrupted_rows:
+        print('Fixed %i corrupted rows.' % corrupted_rows)
+    else:
+        print('No corrupted rows found.')
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py
new file mode 100644
index 0000000..cedde80
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0077_create_tool_tag_association_table.py
@@ -0,0 +1,49 @@
+"""
+Migration script to create table for storing tool tag associations.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Table to add
+
+ToolTagAssociation_table = Table( "tool_tag_association", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "tool_id", TrimmedString(255), index=True ),
+                                  Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                  Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                  Column( "user_tname", TrimmedString(255), index=True),
+                                  Column( "value", TrimmedString(255), index=True),
+                                  Column( "user_value", TrimmedString(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create tool_tag_association table
+    try:
+        ToolTagAssociation_table.create()
+    except Exception as e:
+        log.error( "Creating tool_tag_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop tool_tag_association table
+    try:
+        ToolTagAssociation_table.drop()
+    except Exception as e:
+        log.error( "Dropping tool_tag_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py b/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py
new file mode 100644
index 0000000..653cd52
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0078_add_columns_for_disk_usage_accounting.py
@@ -0,0 +1,88 @@
+"""
+Migration script to add 'total_size' column to the dataset table, 'purged'
+column to the HDA table, and 'disk_usage' column to the User and GalaxySession
+tables.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Numeric, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    try:
+        Dataset_table = Table( "dataset", metadata, autoload=True )
+        c = Column( 'total_size', Numeric( 15, 0 ) )
+        c.create( Dataset_table )
+        assert c is Dataset_table.c.total_size
+    except Exception as e:
+        print("Adding total_size column to dataset table failed: %s" % str( e ))
+        log.debug( "Adding total_size column to dataset table failed: %s" % str( e ) )
+
+    try:
+        HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+        c = Column( "purged", Boolean, index=True, default=False )
+        c.create( HistoryDatasetAssociation_table, index_name="ix_history_dataset_association_purged")
+        assert c is HistoryDatasetAssociation_table.c.purged
+        migrate_engine.execute(HistoryDatasetAssociation_table.update().values(purged=False))
+    except Exception as e:
+        print("Adding purged column to history_dataset_association table failed: %s" % str( e ))
+        log.debug( "Adding purged column to history_dataset_association table failed: %s" % str( e ) )
+
+    try:
+        User_table = Table( "galaxy_user", metadata, autoload=True )
+        c = Column( 'disk_usage', Numeric( 15, 0 ), index=True )
+        c.create( User_table, index_name="ix_galaxy_user_disk_usage")
+        assert c is User_table.c.disk_usage
+    except Exception as e:
+        print("Adding disk_usage column to galaxy_user table failed: %s" % str( e ))
+        log.debug( "Adding disk_usage column to galaxy_user table failed: %s" % str( e ) )
+
+    try:
+        GalaxySession_table = Table( "galaxy_session", metadata, autoload=True )
+        c = Column( 'disk_usage', Numeric( 15, 0 ), index=True )
+        c.create( GalaxySession_table, index_name="ix_galaxy_session_disk_usage")
+        assert c is GalaxySession_table.c.disk_usage
+    except Exception as e:
+        print("Adding disk_usage column to galaxy_session table failed: %s" % str( e ))
+        log.debug( "Adding disk_usage column to galaxy_session table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Dataset_table = Table( "dataset", metadata, autoload=True )
+        Dataset_table.c.total_size.drop()
+    except Exception as e:
+        print("Dropping total_size column from dataset table failed: %s" % str( e ))
+        log.debug( "Dropping total_size column from dataset table failed: %s" % str( e ) )
+
+    try:
+        HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
+        HistoryDatasetAssociation_table.c.purged.drop()
+    except Exception as e:
+        print("Dropping purged column from history_dataset_association table failed: %s" % str( e ))
+        log.debug( "Dropping purged column from history_dataset_association table failed: %s" % str( e ) )
+
+    try:
+        User_table = Table( "galaxy_user", metadata, autoload=True )
+        User_table.c.disk_usage.drop()
+    except Exception as e:
+        print("Dropping disk_usage column from galaxy_user table failed: %s" % str( e ))
+        log.debug( "Dropping disk_usage column from galaxy_user table failed: %s" % str( e ) )
+
+    try:
+        GalaxySession_table = Table( "galaxy_session", metadata, autoload=True )
+        GalaxySession_table.c.disk_usage.drop()
+    except Exception as e:
+        print("Dropping disk_usage column from galaxy_session table failed: %s" % str( e ))
+        log.debug( "Dropping disk_usage column from galaxy_session table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py b/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py
new file mode 100644
index 0000000..3360b82
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0079_input_library_to_job_table.py
@@ -0,0 +1,43 @@
+"""
+Migration script to add the job_to_input_library_dataset table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table
+
+log = logging.getLogger( __name__ )
+
+metadata = MetaData()
+
+JobToInputLibraryDatasetAssociation_table = Table( "job_to_input_library_dataset", metadata,
+                                                   Column( "id", Integer, primary_key=True ),
+                                                   Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                                   Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
+                                                   Column( "name", String(255) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create the job_to_input_library_dataset table
+    try:
+        JobToInputLibraryDatasetAssociation_table.create()
+    except Exception as e:
+        print("Creating job_to_input_library_dataset table failed: %s" % str( e ))
+        log.debug( "Creating job_to_input_library_dataset table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the job_to_input_library_dataset table
+    try:
+        JobToInputLibraryDatasetAssociation_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping job_to_input_library_dataset table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0080_quota_tables.py b/lib/galaxy/model/migrate/versions/0080_quota_tables.py
new file mode 100644
index 0000000..411d827
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0080_quota_tables.py
@@ -0,0 +1,105 @@
+"""
+Migration script to create tables for disk quotas.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import BigInteger, Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Tables to add
+
+Quota_table = Table( "quota", metadata,
+                     Column( "id", Integer, primary_key=True ),
+                     Column( "create_time", DateTime, default=now ),
+                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                     Column( "name", String( 255 ), index=True, unique=True ),
+                     Column( "description", TEXT ),
+                     Column( "bytes", BigInteger ),
+                     Column( "operation", String( 8 ) ),
+                     Column( "deleted", Boolean, index=True, default=False ) )
+
+UserQuotaAssociation_table = Table( "user_quota_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                    Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GroupQuotaAssociation_table = Table( "group_quota_association", metadata,
+                                     Column( "id", Integer, primary_key=True ),
+                                     Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+                                     Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
+                                     Column( "create_time", DateTime, default=now ),
+                                     Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+DefaultQuotaAssociation_table = Table( "default_quota_association", metadata,
+                                       Column( "id", Integer, primary_key=True ),
+                                       Column( "create_time", DateTime, default=now ),
+                                       Column( "update_time", DateTime, default=now, onupdate=now ),
+                                       Column( "type", String( 32 ), index=True, unique=True ),
+                                       Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create quota table
+    try:
+        Quota_table.create()
+    except Exception as e:
+        log.debug( "Creating quota table failed: %s" % str( e ) )
+
+    # Create user_quota_association table
+    try:
+        UserQuotaAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating user_quota_association table failed: %s" % str( e ) )
+
+    # Create group_quota_association table
+    try:
+        GroupQuotaAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating group_quota_association table failed: %s" % str( e ) )
+
+    # Create default_quota_association table
+    try:
+        DefaultQuotaAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating default_quota_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop default_quota_association table
+    try:
+        DefaultQuotaAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping default_quota_association table failed: %s" % str( e ) )
+
+    # Drop group_quota_association table
+    try:
+        GroupQuotaAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping group_quota_association table failed: %s" % str( e ) )
+
+    # Drop user_quota_association table
+    try:
+        UserQuotaAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping user_quota_association table failed: %s" % str( e ) )
+
+    # Drop quota table
+    try:
+        Quota_table.drop()
+    except Exception as e:
+        log.debug( "Dropping quota table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py b/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py
new file mode 100644
index 0000000..281c134
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0081_add_tool_version_to_hda_ldda.py
@@ -0,0 +1,40 @@
+"""
+Migration script to add a 'tool_version' column to the hda/ldda tables.
+"""
+from __future__ import print_function
+
+from sqlalchemy import Column, MetaData, Table, TEXT
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        hda_table = Table( "history_dataset_association", metadata, autoload=True )
+        c = Column( "tool_version", TEXT )
+        c.create( hda_table )
+        assert c is hda_table.c.tool_version
+
+        ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+        c = Column( "tool_version", TEXT )
+        c.create( ldda_table )
+        assert c is ldda_table.c.tool_version
+
+    except Exception as e:
+        print("Adding the tool_version column to the hda/ldda tables failed: ", str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        hda_table = Table( "history_dataset_association", metadata, autoload=True )
+        hda_table.c.tool_version.drop()
+
+        ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+        ldda_table.c.tool_version.drop()
+    except Exception as e:
+        print("Dropping the tool_version column from hda/ldda table failed: ", str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py b/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py
new file mode 100644
index 0000000..f8ea186
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0082_add_tool_shed_repository_table.py
@@ -0,0 +1,55 @@
+"""
+Migration script to add the tool_shed_repository table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, Integer, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+# New table to store information about cloned tool shed repositories.
+ToolShedRepository_table = Table( "tool_shed_repository", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "create_time", DateTime, default=now ),
+                                  Column( "update_time", DateTime, default=now, onupdate=now ),
+                                  Column( "tool_shed", TrimmedString( 255 ), index=True ),
+                                  Column( "name", TrimmedString( 255 ), index=True ),
+                                  Column( "description", TEXT ),
+                                  Column( "owner", TrimmedString( 255 ), index=True ),
+                                  Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+                                  Column( "deleted", Boolean, index=True, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        ToolShedRepository_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_shed_repository table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ToolShedRepository_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_shed_repository table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py b/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py
new file mode 100644
index 0000000..220046e
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0083_add_prepare_files_to_task.py
@@ -0,0 +1,64 @@
+"""
+Migration script to add 'prepare_input_files_cmd' column to the task table and to rename a column.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, String, Table, TEXT
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        c = Column( "prepare_input_files_cmd", TEXT, nullable=True )
+        c.create( task_table )
+        assert c is task_table.c.prepare_input_files_cmd
+    except Exception as e:
+        print("Adding prepare_input_files_cmd column to task table failed: %s" % str( e ))
+        log.debug( "Adding prepare_input_files_cmd column to task table failed: %s" % str( e ) )
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        c = Column( "working_directory", String( 1024 ), nullable=True )
+        c.create( task_table )
+        assert c is task_table.c.working_directory
+    except Exception as e:
+        print("Adding working_directory column to task table failed: %s" % str( e ))
+        log.debug( "Adding working_directory column to task table failed: %s" % str( e ) )
+
+    # remove the 'part_file' column - nobody used tasks before this, so no data needs to be migrated
+    try:
+        task_table.c.part_file.drop()
+    except Exception as e:
+        log.debug( "Deleting column 'part_file' from the 'task' table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        task_table.c.prepare_input_files_cmd.drop()
+    except Exception as e:
+        print("Dropping prepare_input_files_cmd column from task table failed: %s" % str( e ))
+        log.debug( "Dropping prepare_input_files_cmd column from task table failed: %s" % str( e ) )
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        task_table.c.working_directory.drop()
+    except Exception as e:
+        print("Dropping working_directory column from task table failed: %s" % str( e ))
+        log.debug( "Dropping working_directory column from task table failed: %s" % str( e ) )
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        c = Column( "part_file", String( 1024 ), nullable=True )
+        c.create( task_table )
+        assert c is task_table.c.part_file
+    except Exception as e:
+        print("Adding part_file column to task table failed: %s" % str( e ))
+        log.debug( "Adding part_file column to task table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py b/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py
new file mode 100644
index 0000000..549c7f5
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0084_add_ldda_id_to_implicit_conversion_table.py
@@ -0,0 +1,39 @@
+"""
+Migration script to add 'ldda_id' column to the implicitly_converted_dataset_association table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True )
+        if migrate_engine.name != 'sqlite':
+            c = Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True )
+        else:
+            c = Column( "ldda_id", Integer, index=True, nullable=True )
+        c.create( Implicitly_converted_table, index_name="ix_implicitly_converted_ds_assoc_ldda_id")
+        assert c is Implicitly_converted_table.c.ldda_id
+    except Exception as e:
+        print("Adding ldda_id column to implicitly_converted_dataset_association table failed: %s" % str( e ))
+        log.debug( "Adding ldda_id column to implicitly_converted_dataset_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Implicitly_converted_table = Table( "implicitly_converted_dataset_association", metadata, autoload=True )
+        Implicitly_converted_table.c.ldda_id.drop()
+    except Exception as e:
+        print("Dropping ldda_id column from implicitly_converted_dataset_association table failed: %s" % str( e ))
+        log.debug( "Dropping ldda_id column from implicitly_converted_dataset_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0085_add_task_info.py b/lib/galaxy/model/migrate/versions/0085_add_task_info.py
new file mode 100644
index 0000000..35dacee
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0085_add_task_info.py
@@ -0,0 +1,38 @@
+"""
+Migration script to add 'info' column to the task table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        c = Column( "info", TrimmedString(255), nullable=True )
+        c.create( task_table )
+        assert c is task_table.c.info
+    except Exception as e:
+        print("Adding info column to table table failed: %s" % str( e ))
+        log.debug( "Adding info column to task table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        task_table.c.info.drop()
+    except Exception as e:
+        print("Dropping info column from task table failed: %s" % str( e ))
+        log.debug( "Dropping info column from task table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py
new file mode 100644
index 0000000..cc39f7b
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0086_add_tool_shed_repository_table_columns.py
@@ -0,0 +1,82 @@
+"""
+Migration script to add the metadata, update_available and includes_datatypes columns to the tool_shed_repository table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def get_default_false(migrate_engine):
+    if migrate_engine.name in ['mysql', 'sqlite']:
+        return "0"
+    elif migrate_engine.name in ['postgres', 'postgresql']:
+        return "false"
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    c = Column( "metadata", JSONType(), nullable=True )
+    try:
+        c.create( ToolShedRepository_table )
+        assert c is ToolShedRepository_table.c.metadata
+    except Exception as e:
+        print("Adding metadata column to the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Adding metadata column to the tool_shed_repository table failed: %s" % str( e ) )
+    c = Column( "includes_datatypes", Boolean, index=True, default=False )
+    try:
+        c.create( ToolShedRepository_table, index_name="ix_tool_shed_repository_includes_datatypes")
+        assert c is ToolShedRepository_table.c.includes_datatypes
+        migrate_engine.execute( "UPDATE tool_shed_repository SET includes_datatypes=%s" % get_default_false(migrate_engine))
+    except Exception as e:
+        print("Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ) )
+    c = Column( "update_available", Boolean, default=False )
+    try:
+        c.create( ToolShedRepository_table )
+        assert c is ToolShedRepository_table.c.update_available
+        migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % get_default_false(migrate_engine))
+    except Exception as e:
+        print("Adding update_available column to the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Adding update_available column to the tool_shed_repository table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    try:
+        ToolShedRepository_table.c.metadata.drop()
+    except Exception as e:
+        print("Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ) )
+    try:
+        ToolShedRepository_table.c.includes_datatypes.drop()
+    except Exception as e:
+        print("Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ) )
+    try:
+        ToolShedRepository_table.c.update_available.drop()
+    except Exception as e:
+        print("Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py
new file mode 100644
index 0000000..6892030
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0087_tool_id_guid_map_table.py
@@ -0,0 +1,54 @@
+"""
+Migration script to create the tool_id_guid_map table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, Integer, MetaData, String, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "tool_id", String( 255 ) ),
+                             Column( "tool_version", TEXT ),
+                             Column( "tool_shed", TrimmedString( 255 ) ),
+                             Column( "repository_owner", TrimmedString( 255 ) ),
+                             Column( "repository_name", TrimmedString( 255 ) ),
+                             Column( "guid", TEXT, index=True, unique=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        ToolIdGuidMap_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ToolIdGuidMap_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
new file mode 100644
index 0000000..7c68112
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
@@ -0,0 +1,67 @@
+"""
+Migration script to add the installed_changeset_revision column to the tool_shed_repository table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    col = Column( "installed_changeset_revision", TrimmedString( 255 ) )
+    try:
+        col.create( ToolShedRepository_table )
+        assert col is ToolShedRepository_table.c.installed_changeset_revision
+    except Exception as e:
+        print("Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) )
+    # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision.
+    # This will be problematic if the value of changeset_revision was updated to something other than the value
+    # that it was when the repository was installed (because the install path determined in real time will attempt to
+    # find the repository using the updated changeset_revison instead of the required installed_changeset_revision),
+    # but at the time this script was written, this scenario is extremely unlikely.
+    cmd = "SELECT id AS id, " \
+        + "installed_changeset_revision AS installed_changeset_revision, " \
+        + "changeset_revision AS changeset_revision " \
+        + "FROM tool_shed_repository;"
+    tool_shed_repositories = migrate_engine.execute( cmd ).fetchall()
+    update_count = 0
+    for row in tool_shed_repositories:
+        cmd = "UPDATE tool_shed_repository " \
+            + "SET installed_changeset_revision = '%s' " % row.changeset_revision \
+            + "WHERE changeset_revision = '%s';" % row.changeset_revision
+        migrate_engine.execute( cmd )
+        update_count += 1
+    print("Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table.  ")
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    try:
+        ToolShedRepository_table.c.installed_changeset_revision.drop()
+    except Exception as e:
+        print("Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ))
+        log.debug( "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py b/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py
new file mode 100644
index 0000000..51917b5
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0089_add_object_store_id_columns.py
@@ -0,0 +1,40 @@
+"""
+Migration script to add 'object_store_id' column to various tables
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    for t_name in ( 'dataset', 'job', 'metadata_file' ):
+        t = Table( t_name, metadata, autoload=True )
+        c = Column( "object_store_id", TrimmedString( 255 ), index=True )
+        try:
+            c.create( t, index_name="ix_%s_object_store_id" % t_name)
+            assert c is t.c.object_store_id
+        except Exception as e:
+            print("Adding object_store_id column to %s table failed: %s" % ( t_name, str( e ) ))
+            log.debug( "Adding object_store_id column to %s table failed: %s" % ( t_name, str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    for t_name in ( 'dataset', 'job', 'metadata_file' ):
+        t = Table( t_name, metadata, autoload=True )
+        try:
+            t.c.object_store_id.drop()
+        except Exception as e:
+            print("Dropping object_store_id column from %s table failed: %s" % ( t_name, str( e ) ))
+            log.debug( "Dropping object_store_id column from %s table failed: %s" % ( t_name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py
new file mode 100644
index 0000000..56a8c21
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0090_add_tool_shed_repository_table_columns.py
@@ -0,0 +1,63 @@
+"""
+Migration script to add the uninstalled and dist_to_shed columns to the tool_shed_repository table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def default_false(migrate_engine):
+    if migrate_engine.name in ['mysql', 'sqlite']:
+        return "0"
+    elif migrate_engine.name in ['postgres', 'postgresql']:
+        return "false"
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    c = Column( "uninstalled", Boolean, default=False )
+    try:
+        c.create( ToolShedRepository_table )
+        assert c is ToolShedRepository_table.c.uninstalled
+        migrate_engine.execute( "UPDATE tool_shed_repository SET uninstalled=%s" % default_false(migrate_engine) )
+    except Exception as e:
+        print("Adding uninstalled column to the tool_shed_repository table failed: %s" % str( e ))
+    c = Column( "dist_to_shed", Boolean, default=False )
+    try:
+        c.create( ToolShedRepository_table )
+        assert c is ToolShedRepository_table.c.dist_to_shed
+        migrate_engine.execute( "UPDATE tool_shed_repository SET dist_to_shed=%s" % default_false(migrate_engine) )
+    except Exception as e:
+        print("Adding dist_to_shed column to the tool_shed_repository table failed: %s" % str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    try:
+        ToolShedRepository_table.c.uninstalled.drop()
+    except Exception as e:
+        print("Dropping column uninstalled from the tool_shed_repository table failed: %s" % str( e ))
+    try:
+        ToolShedRepository_table.c.dist_to_shed.drop()
+    except Exception as e:
+        print("Dropping column dist_to_shed from the tool_shed_repository table failed: %s" % str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py
new file mode 100644
index 0000000..7b33cdd
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py
@@ -0,0 +1,125 @@
+"""
+Migration script to create the tool_version and tool_version_association tables and drop the tool_id_guid_map table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+from json import loads
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, String, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import _sniffnfix_pg9_hex, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def nextval( migrate_engine, table, col='id' ):
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        return "nextval('%s_%s_seq')" % ( table, col )
+    elif migrate_engine.name in ['mysql', 'sqlite']:
+        return "null"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def localtimestamp(migrate_engine):
+    if migrate_engine.name in ['mysql', 'postgres', 'postgresql']:
+        return "LOCALTIMESTAMP"
+    elif migrate_engine.name == 'sqlite':
+        return "current_date || ' ' || current_time"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+ToolVersion_table = Table( "tool_version", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, default=now, onupdate=now ),
+    Column( "tool_id", String( 255 ) ),
+    Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
+
+ToolVersionAssociation_table = Table( "tool_version_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
+    Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+
+    ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True )
+
+    metadata.reflect()
+    # Create the tables.
+    try:
+        ToolVersion_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_version table failed: %s" % str( e ) )
+    try:
+        ToolVersionAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_version_association table failed: %s" % str( e ) )
+    # Populate the tool table with tools included in installed tool shed repositories.
+    cmd = "SELECT id, metadata FROM tool_shed_repository"
+    result = migrate_engine.execute( cmd )
+    count = 0
+    for row in result:
+        if row[1]:
+            tool_shed_repository_id = row[0]
+            repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) )
+            # Create a new row in the tool table for each tool included in repository.  We will NOT
+            # handle tool_version_associaions because we do not have the information we need to do so.
+            tools = repository_metadata.get( 'tools', [] )
+            for tool_dict in tools:
+                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
+                    ( nextval( migrate_engine, 'tool_version' ), localtimestamp( migrate_engine ), localtimestamp( migrate_engine ), tool_dict[ 'guid' ], tool_shed_repository_id )
+                migrate_engine.execute( cmd )
+                count += 1
+    print("Added %d rows to the new tool_version table." % count)
+    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
+    try:
+        ToolIdGuidMap_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata,
+        Column( "id", Integer, primary_key=True ),
+        Column( "create_time", DateTime, default=now ),
+        Column( "update_time", DateTime, default=now, onupdate=now ),
+        Column( "tool_id", String( 255 ) ),
+        Column( "tool_version", TEXT ),
+        Column( "tool_shed", TrimmedString( 255 ) ),
+        Column( "repository_owner", TrimmedString( 255 ) ),
+        Column( "repository_name", TrimmedString( 255 ) ),
+        Column( "guid", TEXT, index=True, unique=True ) )
+
+    metadata.reflect()
+    try:
+        ToolVersionAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_version_association table failed: %s" % str( e ) )
+    try:
+        ToolVersion_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_version table failed: %s" % str( e ) )
+    try:
+        ToolIdGuidMap_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py
new file mode 100644
index 0000000..2f8df45
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py
@@ -0,0 +1,52 @@
+"""
+Migration script to create the migrate_tools table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, Integer, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+MigrateTools_table = Table( "migrate_tools", metadata,
+                            Column( "repository_id", TrimmedString( 255 ) ),
+                            Column( "repository_path", TEXT ),
+                            Column( "version", Integer ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+
+    metadata.reflect()
+    # Create the table.
+    try:
+        MigrateTools_table.create()
+        cmd = "INSERT INTO migrate_tools VALUES ('GalaxyTools', 'lib/galaxy/tool_shed/migrate', %d)" % 1
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Creating migrate_tools table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        MigrateTools_table.drop()
+    except Exception as e:
+        log.debug( "Dropping migrate_tools table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py
new file mode 100644
index 0000000..94aae09
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py
@@ -0,0 +1,51 @@
+"""
+Migration script to create "params" column in job table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Column to add.
+params_col = Column( "params", TrimmedString(255), index=True )
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds a 'params' column to the Job table.")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Add column to Job table.
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+        params_col.create( Job_table, index_name="ix_job_params")
+        assert params_col is Job_table.c.params
+
+    except Exception as e:
+        print(str(e))
+        log.debug( "Adding column 'params' to job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop column from Job table.
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+        params_col = Job_table.c.params
+        params_col.drop()
+    except Exception as e:
+        log.debug( "Dropping column 'params' from job table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py
new file mode 100644
index 0000000..a534875
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0094_add_job_handler_col.py
@@ -0,0 +1,51 @@
+"""
+Migration script to create "handler" column in job table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Column to add.
+handler_col = Column( "handler", TrimmedString(255), index=True )
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds a 'handler' column to the Job table.")
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Add column to Job table.
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+        handler_col.create( Job_table, index_name="ix_job_handler" )
+        assert handler_col is Job_table.c.handler
+
+    except Exception as e:
+        print(str(e))
+        log.debug( "Adding column 'handler' to job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop column from Job table.
+    try:
+        Job_table = Table( "job", metadata, autoload=True )
+        handler_col = Job_table.c.handler
+        handler_col.drop()
+    except Exception as e:
+        log.debug( "Dropping column 'handler' from job table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0095_hda_subsets.py b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py
new file mode 100644
index 0000000..fc26291
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0095_hda_subsets.py
@@ -0,0 +1,54 @@
+"""
+Migration script to create table for tracking history_dataset_association subsets.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table, Unicode
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# Table to add.
+
+HistoryDatasetAssociationSubset_table = Table( "history_dataset_association_subset", metadata,
+                                               Column( "id", Integer, primary_key=True ),
+                                               Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ) ),
+                                               Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ) ),
+                                               Column( "location", Unicode(255), index=True) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    # Create history_dataset_association_subset.
+    try:
+        HistoryDatasetAssociationSubset_table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Creating history_dataset_association_subset table failed: %s" % str( e ) )
+
+    # Manually create indexes because they are too long for MySQL databases.
+    i1 = Index( "ix_hda_id", HistoryDatasetAssociationSubset_table.c.history_dataset_association_id )
+    i2 = Index( "ix_hda_subset_id", HistoryDatasetAssociationSubset_table.c.history_dataset_association_subset_id )
+    try:
+        i1.create()
+        i2.create()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Adding indices to table 'history_dataset_association_subset' table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop history_dataset_association_subset table.
+    try:
+        HistoryDatasetAssociationSubset_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug( "Dropping history_dataset_association_subset table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0096_openid_provider.py b/lib/galaxy/model/migrate/versions/0096_openid_provider.py
new file mode 100644
index 0000000..3610515
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0096_openid_provider.py
@@ -0,0 +1,47 @@
+"""
+Migration script to add column to openid table for provider.
+Remove any OpenID entries with nonunique GenomeSpace Identifier
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+BAD_IDENTIFIER = 'https://identity.genomespace.org/identityServer/xrd.jsp'
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    try:
+        OpenID_table = Table( "galaxy_user_openid", metadata, autoload=True )
+        c = Column( "provider", TrimmedString( 255 ) )
+        c.create( OpenID_table )
+        assert c is OpenID_table.c.provider
+    except Exception as e:
+        print("Adding provider column to galaxy_user_openid table failed: %s" % str( e ))
+        log.debug( "Adding provider column to galaxy_user_openid table failed: %s" % str( e ) )
+
+    try:
+        cmd = "DELETE FROM galaxy_user_openid WHERE openid='%s'" % ( BAD_IDENTIFIER )
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Deleting bad Identifiers from galaxy_user_openid failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        OpenID_table = Table( "galaxy_user_openid", metadata, autoload=True )
+        OpenID_table.c.provider.drop()
+    except Exception as e:
+        print("Dropping provider column from galaxy_user_openid table failed: %s" % str( e ))
+        log.debug( "Dropping provider column from galaxy_user_openid table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
new file mode 100644
index 0000000..1e20160
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
@@ -0,0 +1,47 @@
+"""
+Migration script to add the ctx_rev column to the tool_shed_repository table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    col = Column( "ctx_rev", TrimmedString( 10 ) )
+    try:
+        col.create( ToolShedRepository_table )
+        assert col is ToolShedRepository_table.c.ctx_rev
+    except Exception as e:
+        print("Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    try:
+        ToolShedRepository_table.c.ctx_rev.drop()
+    except Exception as e:
+        print("Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py b/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py
new file mode 100644
index 0000000..6b8283b
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0098_genome_index_tool_data_table.py
@@ -0,0 +1,54 @@
+"""
+Migration script to create the genome_index_tool_data table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, String, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+# New table in changeset TODO:TODO
+GenomeIndexToolData_table = Table( "genome_index_tool_data", metadata,
+                                   Column( "id", Integer, primary_key=True ),
+                                   Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                   Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
+                                   Column( "deferred_job_id", Integer, ForeignKey( "deferred_job.id" ), index=True ),
+                                   Column( "transfer_job_id", Integer, ForeignKey( "transfer_job.id" ), index=True ),
+                                   Column( "fasta_path", String( 255 ) ),
+                                   Column( "created_time", DateTime, default=now ),
+                                   Column( "modified_time", DateTime, default=now, onupdate=now ),
+                                   Column( "indexer", String( 64 ) ),
+                                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+
+    metadata.reflect()
+    try:
+        GenomeIndexToolData_table.create()
+    except Exception as e:
+        log.debug( "Creating genome_index_tool_data table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        GenomeIndexToolData_table.drop()
+    except Exception as e:
+        log.debug( "Dropping genome_index_tool_data table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py
new file mode 100644
index 0000000..b3e24b8
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0099_add_tool_dependency_table.py
@@ -0,0 +1,54 @@
+"""
+Migration script to add the tool_dependency table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+# New table to store information about cloned tool shed repositories.
+ToolDependency_table = Table( "tool_dependency", metadata,
+                              Column( "id", Integer, primary_key=True ),
+                              Column( "create_time", DateTime, default=now ),
+                              Column( "update_time", DateTime, default=now, onupdate=now ),
+                              Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
+                              Column( "installed_changeset_revision", TrimmedString( 255 ) ),
+                              Column( "name", TrimmedString( 255 ) ),
+                              Column( "version", TrimmedString( 40 ) ),
+                              Column( "type", TrimmedString( 40 ) ),
+                              Column( "uninstalled", Boolean, default=False ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        ToolDependency_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_dependency table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ToolDependency_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_dependency table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py
new file mode 100644
index 0000000..ed8deec
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0100_alter_tool_dependency_table_version_column.py
@@ -0,0 +1,53 @@
+"""
+Migration script to alter the type of the tool_dependency.version column from TrimmedString(40) to Text.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    Table( "tool_dependency", metadata, autoload=True )
+    # Change the tool_dependency table's version column from TrimmedString to Text.
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        cmd = "ALTER TABLE tool_dependency ALTER COLUMN version TYPE Text;"
+    elif migrate_engine.name == 'mysql':
+        cmd = "ALTER TABLE tool_dependency MODIFY COLUMN version Text;"
+    else:
+        # We don't have to do anything for sqlite tables.  From the sqlite documentation at http://sqlite.org/datatype3.html:
+        # 1.0 Storage Classes and Datatypes
+        # Each value stored in an SQLite database (or manipulated by the database engine) has one of the following storage classes:
+        # NULL. The value is a NULL value.
+        # INTEGER. The value is a signed integer, stored in 1, 2, 3, 4, 6, or 8 bytes depending on the magnitude of the value.
+        # REAL. The value is a floating point value, stored as an 8-byte IEEE floating point number.
+        # TEXT. The value is a text string, stored using the database encoding (UTF-8, UTF-16BE or UTF-16LE).
+        # BLOB. The value is a blob of data, stored exactly as it was input.
+        cmd = None
+    if cmd:
+        try:
+            migrate_engine.execute( cmd )
+        except Exception as e:
+            log.debug( "Altering tool_dependency.version column from TrimmedString(40) to Text failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    # Not necessary to change column type Text to TrimmedString(40).
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
new file mode 100644
index 0000000..fcdf802
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
@@ -0,0 +1,44 @@
+"""
+Migration script to drop the installed_changeset_revision column from the tool_dependency table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+    except NoSuchTableError:
+        ToolDependency_table = None
+        log.debug( "Failed loading table tool_dependency" )
+    if ToolDependency_table is not None:
+        try:
+            col = ToolDependency_table.c.installed_changeset_revision
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'installed_changeset_revision' from tool_dependency table failed: %s" % ( str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    pass
diff --git a/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
new file mode 100644
index 0000000..773b4bd
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0102_add_tool_dependency_status_columns.py
@@ -0,0 +1,74 @@
+"""
+Migration script to add status and error_message columns to the tool_dependency table and drop the uninstalled column from the tool_dependency table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+    if migrate_engine.name == 'sqlite':
+        col = Column( "status", TrimmedString( 255 ))
+    else:
+        col = Column( "status", TrimmedString( 255 ), nullable=False)
+    try:
+        col.create( ToolDependency_table )
+        assert col is ToolDependency_table.c.status
+    except Exception as e:
+        print("Adding status column to the tool_dependency table failed: %s" % str( e ))
+    col = Column( "error_message", TEXT )
+    try:
+        col.create( ToolDependency_table )
+        assert col is ToolDependency_table.c.error_message
+    except Exception as e:
+        print("Adding error_message column to the tool_dependency table failed: %s" % str( e ))
+
+    if migrate_engine.name != 'sqlite':
+        # This breaks in sqlite due to failure to drop check constraint.
+        # TODO move to alembic.
+        try:
+            ToolDependency_table.c.uninstalled.drop()
+        except Exception as e:
+            print("Dropping uninstalled column from the tool_dependency table failed: %s" % str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+    try:
+        ToolDependency_table.c.status.drop()
+    except Exception as e:
+        print("Dropping column status from the tool_dependency table failed: %s" % str( e ))
+    try:
+        ToolDependency_table.c.error_message.drop()
+    except Exception as e:
+        print("Dropping column error_message from the tool_dependency table failed: %s" % str( e ))
+    col = Column( "uninstalled", Boolean, default=False )
+    try:
+        col.create( ToolDependency_table )
+        assert col is ToolDependency_table.c.uninstalled
+    except Exception as e:
+        print("Adding uninstalled column to the tool_dependency table failed: %s" % str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py b/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py
new file mode 100644
index 0000000..e19d0c7
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0103_add_tool_shed_repository_status_columns.py
@@ -0,0 +1,71 @@
+"""Migration script to add status and error_message columns to the tool_shed_repository table."""
+from __future__ import print_function
+
+import datetime
+
+from sqlalchemy import Column, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    # Add the status column to the tool_shed_repository table.
+    col = Column( "status", TrimmedString( 255 ) )
+    try:
+        col.create( ToolShedRepository_table )
+        assert col is ToolShedRepository_table.c.status
+    except Exception as e:
+        print("Adding status column to the tool_shed_repository table failed: %s" % str( e ))
+    # Add the error_message column to the tool_shed_repository table.
+    col = Column( "error_message", TEXT )
+    try:
+        col.create( ToolShedRepository_table )
+        assert col is ToolShedRepository_table.c.error_message
+    except Exception as e:
+        print("Adding error_message column to the tool_shed_repository table failed: %s" % str( e ))
+    # Update the status column value for tool_shed_repositories to the default value 'Installed'.
+    cmd = "UPDATE tool_shed_repository SET status = 'Installed';"
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        print("Exception executing sql command: ")
+        print(cmd)
+        print(str( e ))
+    # Update the status column for tool_shed_repositories that have been uninstalled.
+    cmd = "UPDATE tool_shed_repository SET status = 'Uninstalled' WHERE uninstalled;"
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        print("Exception executing sql command: ")
+        print(cmd)
+        print(str( e ))
+    # Update the status column for tool_shed_repositories that have been deactivated.
+    cmd = "UPDATE tool_shed_repository SET status = 'Deactivated' where deleted and not uninstalled;"
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        print("Exception executing sql command: ")
+        print(cmd)
+        print(str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    try:
+        ToolShedRepository_table.c.status.drop()
+    except Exception as e:
+        print("Dropping column status from the tool_shed_repository table failed: %s" % str( e ))
+    try:
+        ToolShedRepository_table.c.error_message.drop()
+    except Exception as e:
+        print("Dropping column error_message from the tool_shed_repository table failed: %s" % str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py b/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
new file mode 100644
index 0000000..11e2ae9
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
@@ -0,0 +1,86 @@
+"""
+Migration script to update the deferred job parameters for liftover transfer jobs.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, Integer, MetaData, String, Table
+from sqlalchemy.orm import mapper, scoped_session, sessionmaker
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+from galaxy.util.bunch import Bunch
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
+
+
+class DeferredJob( object ):
+    states = Bunch( NEW='new',
+                    WAITING='waiting',
+                    QUEUED='queued',
+                    RUNNING='running',
+                    OK='ok',
+                    ERROR='error' )
+
+    def __init__( self, state=None, plugin=None, params=None ):
+        self.state = state
+        self.plugin = plugin
+        self.params = params
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    DeferredJob.table = Table( "deferred_job", metadata,
+                               Column( "id", Integer, primary_key=True ),
+                               Column( "create_time", DateTime, default=now ),
+                               Column( "update_time", DateTime, default=now, onupdate=now ),
+                               Column( "state", String( 64 ), index=True ),
+                               Column( "plugin", String( 128 ), index=True ),
+                               Column( "params", JSONType ) )
+
+    mapper( DeferredJob, DeferredJob.table, properties={} )
+
+    liftoverjobs = dict()
+
+    jobs = context.query( DeferredJob ).filter_by( plugin='LiftOverTransferPlugin' ).all()
+
+    for job in jobs:
+        if job.params[ 'parentjob' ] not in liftoverjobs:
+            liftoverjobs[ job.params[ 'parentjob' ] ] = []
+        liftoverjobs[ job.params[ 'parentjob'] ].append( job.id )
+
+    for parent in liftoverjobs:
+        lifts = liftoverjobs[ parent ]
+        deferred = context.query( DeferredJob ).filter_by( id=parent ).first()
+        deferred.params[ 'liftover' ] = lifts
+
+    context.flush()
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+
+    jobs = context.query( DeferredJob ).filter_by( plugin='GenomeTransferPlugin' ).all()
+
+    for job in jobs:
+        if len( job.params[ 'liftover' ] ) == 0:
+            continue
+        transfers = []
+        for lift in job.params[ 'liftover' ]:
+            liftoverjob = context.query( DeferredJob ).filter_by( id=lift ).first()
+            transfers.append( liftoverjob.params[ 'transfer_job_id' ] )
+        job.params[ 'liftover' ] = transfers
+
+    context.flush()
diff --git a/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py b/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py
new file mode 100644
index 0000000..58a538f
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0105_add_cleanup_event_table.py
@@ -0,0 +1,120 @@
+"""
+Migration script to add the cleanup_event* tables.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+# New table to log cleanup events
+CleanupEvent_table = Table( "cleanup_event", metadata,
+                            Column( "id", Integer, primary_key=True ),
+                            Column( "create_time", DateTime, default=now ),
+                            Column( "message", TrimmedString( 1024 ) ) )
+
+CleanupEventDatasetAssociation_table = Table( "cleanup_event_dataset_association", metadata,
+                                              Column( "id", Integer, primary_key=True ),
+                                              Column( "create_time", DateTime, default=now ),
+                                              Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                              Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ) )
+
+CleanupEventMetadataFileAssociation_table = Table( "cleanup_event_metadata_file_association", metadata,
+                                                   Column( "id", Integer, primary_key=True ),
+                                                   Column( "create_time", DateTime, default=now ),
+                                                   Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                                   Column( "metadata_file_id", Integer, ForeignKey( "metadata_file.id" ), index=True ) )
+
+CleanupEventHistoryAssociation_table = Table( "cleanup_event_history_association", metadata,
+                                              Column( "id", Integer, primary_key=True ),
+                                              Column( "create_time", DateTime, default=now ),
+                                              Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                              Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
+
+CleanupEventHistoryDatasetAssociationAssociation_table = Table( "cleanup_event_hda_association", metadata,
+                                                                Column( "id", Integer, primary_key=True ),
+                                                                Column( "create_time", DateTime, default=now ),
+                                                                Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                                                Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ) )
+
+CleanupEventLibraryAssociation_table = Table( "cleanup_event_library_association", metadata,
+                                              Column( "id", Integer, primary_key=True ),
+                                              Column( "create_time", DateTime, default=now ),
+                                              Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                              Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ) )
+
+CleanupEventLibraryFolderAssociation_table = Table( "cleanup_event_library_folder_association", metadata,
+                                                    Column( "id", Integer, primary_key=True ),
+                                                    Column( "create_time", DateTime, default=now ),
+                                                    Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                                    Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ) )
+
+CleanupEventLibraryDatasetAssociation_table = Table( "cleanup_event_library_dataset_association", metadata,
+                                                     Column( "id", Integer, primary_key=True ),
+                                                     Column( "create_time", DateTime, default=now ),
+                                                     Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                                     Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), index=True ) )
+
+CleanupEventLibraryDatasetDatasetAssociationAssociation_table = Table( "cleanup_event_ldda_association", metadata,
+                                                                       Column( "id", Integer, primary_key=True ),
+                                                                       Column( "create_time", DateTime, default=now ),
+                                                                       Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                                                       Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ) )
+
+CleanupEventImplicitlyConvertedDatasetAssociationAssociation_table = Table( "cleanup_event_icda_association", metadata,
+                                                                            Column( "id", Integer, primary_key=True ),
+                                                                            Column( "create_time", DateTime, default=now ),
+                                                                            Column( "cleanup_event_id", Integer, ForeignKey( "cleanup_event.id" ), index=True, nullable=True ),
+                                                                            Column( "icda_id", Integer, ForeignKey( "implicitly_converted_dataset_association.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        CleanupEvent_table.create()
+        CleanupEventDatasetAssociation_table.create()
+        CleanupEventMetadataFileAssociation_table.create()
+        CleanupEventHistoryAssociation_table.create()
+        CleanupEventHistoryDatasetAssociationAssociation_table.create()
+        CleanupEventLibraryAssociation_table.create()
+        CleanupEventLibraryFolderAssociation_table.create()
+        CleanupEventLibraryDatasetAssociation_table.create()
+        CleanupEventLibraryDatasetDatasetAssociationAssociation_table.create()
+        CleanupEventImplicitlyConvertedDatasetAssociationAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        CleanupEventImplicitlyConvertedDatasetAssociationAssociation_table.drop()
+        CleanupEventLibraryDatasetDatasetAssociationAssociation_table.drop()
+        CleanupEventLibraryDatasetAssociation_table.drop()
+        CleanupEventLibraryFolderAssociation_table.drop()
+        CleanupEventLibraryAssociation_table.drop()
+        CleanupEventHistoryDatasetAssociationAssociation_table.drop()
+        CleanupEventHistoryAssociation_table.drop()
+        CleanupEventMetadataFileAssociation_table.drop()
+        CleanupEventDatasetAssociation_table.drop()
+        CleanupEvent_table.drop()
+    except Exception as e:
+        log.debug( "Dropping table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py b/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py
new file mode 100644
index 0000000..843aea4
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0106_add_missing_indexes.py
@@ -0,0 +1,81 @@
+"""
+Migration script to create missing indexes.  Adding new columns to existing tables via SQLAlchemy does not create the index, even if the column definition includes index=True.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Index, MetaData, Table
+from sqlalchemy.engine import reflection
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+metadata = MetaData()
+
+indexes = (
+    ( "ix_metadata_file_lda_id", 'metadata_file', 'lda_id' ),                                   # 0003
+    ( "ix_history_importable", 'history', 'importable' ),                                       # 0007
+    ( "ix_sample_bar_code", 'sample', 'bar_code' ),                                             # 0009
+    ( "ix_request_type_deleted", 'request_type', 'deleted' ),                                   # 0012
+    ( "ix_galaxy_user_username", 'galaxy_user', 'username' ),                                   # 0014
+    ( "ix_form_definition_type", 'form_definition', 'type' ),                                   # 0019
+    ( "ix_form_definition_layout", 'form_definition', 'layout' ),                               # 0019
+    ( "ix_job_library_folder_id", 'job', 'library_folder_id' ),                                 # 0020
+    ( "ix_page_published", 'page', 'published' ),                                               # 0023
+    ( "ix_page_deleted", 'page', 'deleted' ),                                                   # 0023
+    ( "ix_galaxy_user_form_values_id", 'galaxy_user', 'form_values_id' ),                       # 0025
+    ( "ix_lia_deleted", 'library_info_association', 'deleted' ),                                # 0036
+    ( "ix_lfia_deleted", 'library_folder_info_association', 'deleted' ),                        # 0036
+    ( "ix_lddia_deleted", 'library_dataset_dataset_info_association', 'deleted' ),              # 0036
+    ( "ix_sample_library_id", 'sample', 'library_id' ),                                         # 0037
+    ( "ix_sample_folder_id", 'sample', 'folder_id' ),                                           # 0037
+    ( "ix_lia_inheritable", 'library_info_association', 'inheritable' ),                        # 0038
+    ( "ix_lfia_inheritable", 'library_folder_info_association', 'inheritable' ),                # 0038
+    ( "ix_job_imported", 'job', 'imported' ),                                                   # 0051
+    ( "ix_request_notification", 'request', 'notification' ),                                   # 0057
+    ( "ix_sd_external_service_id", 'sample_dataset', 'external_service_id' ),                   # 0068
+    ( "ix_icda_ldda_parent_id", 'implicitly_converted_dataset_association', 'ldda_parent_id' ),  # 0073
+    ( "ix_library_dataset_purged", 'library_dataset', 'purged' ),                               # 0074
+    ( "ix_run_subindex", 'run', 'subindex' ),                                                   # 0075
+    ( "ix_history_dataset_association_purged", 'history_dataset_association', 'purged' ),       # 0078
+    ( "ix_galaxy_user_disk_usage", 'galaxy_user', 'disk_usage' ),                               # 0078
+    ( "ix_galaxy_session_disk_usage", 'galaxy_session', 'disk_usage' ),                         # 0078
+    ( "ix_icda_ldda_id", 'implicitly_converted_dataset_association', 'ldda_id' ),               # 0084
+    ( "ix_tsr_includes_datatypes", 'tool_shed_repository', 'includes_datatypes' ),              # 0086
+    ( "ix_dataset_object_store_id", 'dataset', 'object_store_id' ),                             # 0089
+    ( "ix_job_object_store_id", 'job', 'object_store_id' ),                                     # 0089
+    ( "ix_metadata_file_object_store_id", 'metadata_file', 'object_store_id' ),                 # 0089
+    ( "ix_job_handler", 'job', 'handler' ),                                                     # 0094
+    ( "ix_galaxy_user_email", 'galaxy_user', 'email' )                                          # 0106
+)
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    insp = reflection.Inspector.from_engine(migrate_engine)
+    # Create missing indexes
+    for ix, table, col in indexes:
+        try:
+            log.debug("Creating index '%s' on column '%s' in table '%s'" % (ix, col, table))
+            t = Table( table, metadata, autoload=True )
+            if ix not in [ins_ix.get('name', None) for ins_ix in insp.get_indexes(table)]:
+                Index( ix, t.c[col] ).create()
+            else:
+                pass  # Index already exists, don't recreate.
+        except Exception as e:
+            log.error("Unable to create index '%s': %s" % (ix, str(e)))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop indexes
+    for ix, table, col in indexes:
+        try:
+            t = Table( table, metadata, autoload=True )
+            Index( ix, t.c[col] ).drop()
+        except Exception as e:
+            log.error("Unable to drop index '%s': %s" % (ix, str(e)))
diff --git a/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py b/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py
new file mode 100644
index 0000000..ea1cfcc
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0107_add_exit_code_to_job_and_task.py
@@ -0,0 +1,68 @@
+"""
+Add the exit_code column to the Job and Task tables.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+# There was a bug when only one column was used for both tables,
+# so create separate columns.
+exit_code_job_col = Column( "exit_code", Integer, nullable=True )
+exit_code_task_col = Column( "exit_code", Integer, nullable=True )
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds a 'handler' column to the Job table.")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Add the exit_code column to the Job table.
+    try:
+        job_table = Table( "job", metadata, autoload=True )
+        exit_code_job_col.create( job_table )
+        assert exit_code_job_col is job_table.c.exit_code
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'exit_code' to job table failed: %s" % str( e ) )
+        return
+
+    # Add the exit_code column to the Task table.
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        exit_code_task_col.create( task_table )
+        assert exit_code_task_col is task_table.c.exit_code
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'exit_code' to task table failed: %s" % str( e ) )
+        return
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the Job table's exit_code column.
+    try:
+        job_table = Table( "job", metadata, autoload=True )
+        exit_code_col = job_table.c.exit_code
+        exit_code_col.drop()
+    except Exception as e:
+        log.debug( "Dropping 'exit_code' column from job table failed: %s" % ( str( e ) ) )
+
+    # Drop the Job table's exit_code column.
+    try:
+        task_table = Table( "task", metadata, autoload=True )
+        exit_code_col = task_table.c.exit_code
+        exit_code_col.drop()
+    except Exception as e:
+        log.debug( "Dropping 'exit_code' column from task table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
new file mode 100644
index 0000000..35fe96f
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0108_add_extended_metadata.py
@@ -0,0 +1,76 @@
+"""
+Add the ExtendedMetadata and ExtendedMetadataIndex tables
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table, TEXT
+
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+ExtendedMetadata_table = Table("extended_metadata", metadata,
+                               Column( "id", Integer, primary_key=True ),
+                               Column( "data", JSONType ) )
+
+ExtendedMetadataIndex_table = Table("extended_metadata_index", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "extended_metadata_id", Integer, ForeignKey("extended_metadata.id",
+                                                                                        onupdate="CASCADE",
+                                                                                        ondelete="CASCADE" ),
+                                            index=True ),
+                                    Column( "path", String( 255 )),
+                                    Column( "value", TEXT))
+
+extended_metadata_ldda_col = Column( "extended_metadata_id", Integer, ForeignKey("extended_metadata.id"), nullable=True )
+
+
+def display_migration_details():
+    print("This migration script adds a ExtendedMetadata tables")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ExtendedMetadata_table.create()
+    except:
+        log.debug("Could not create ExtendedMetadata Table.")
+    try:
+        ExtendedMetadataIndex_table.create()
+    except:
+        log.debug("Could not create ExtendedMetadataIndex Table.")
+    # Add the extended_metadata_id to the ldda table
+    try:
+        ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+        extended_metadata_ldda_col.create( ldda_table )
+        assert extended_metadata_ldda_col is ldda_table.c.extended_metadata_id
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'extended_metadata_id' to library_dataset_dataset_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ExtendedMetadataIndex_table.drop()
+    except Exception as e:
+        log.debug( "Dropping 'extended_metadata_index' table failed: %s" % ( str( e ) ) )
+
+    try:
+        ExtendedMetadata_table.drop()
+    except Exception as e:
+        log.debug( "Dropping 'extended_metadata' table failed: %s" % ( str( e ) ) )
+
+    # Drop the LDDA table's extended metadata ID column.
+    try:
+        ldda_table = Table( "library_dataset_dataset_association", metadata, autoload=True )
+        extended_metadata_id = ldda_table.c.extended_metadata_id
+        extended_metadata_id.drop()
+    except Exception as e:
+        log.debug( "Dropping 'extended_metadata_id' column from library_dataset_dataset_association table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
new file mode 100644
index 0000000..ac94a18
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
@@ -0,0 +1,61 @@
+"""
+Migration script to add the repository_dependency and repository_repository_dependency_association tables.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+RepositoryDependency_table = Table( "repository_dependency", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                                    Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
+RepositoryRepositoryDependencyAssociation_table = Table( "repository_repository_dependency_association", metadata,
+                                                         Column( "id", Integer, primary_key=True ),
+                                                         Column( "create_time", DateTime, default=now ),
+                                                         Column( "update_time", DateTime, default=now, onupdate=now ),
+                                                         Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+                                                         Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        RepositoryDependency_table.create()
+    except Exception as e:
+        log.debug( "Creating repository_dependency table failed: %s" % str( e ) )
+    try:
+        RepositoryRepositoryDependencyAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating repository_repository_dependency_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        RepositoryRepositoryDependencyAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping repository_repository_dependency_association table failed: %s" % str( e ) )
+    try:
+        RepositoryDependency_table.drop()
+    except Exception as e:
+        log.debug( "Dropping repository_dependency table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py b/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py
new file mode 100644
index 0000000..7f6437c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0110_add_dataset_uuid.py
@@ -0,0 +1,49 @@
+"""
+Add UUID column to dataset table
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import UUIDType
+
+log = logging.getLogger( __name__ )
+dataset_uuid_column = Column( "uuid", UUIDType, nullable=True )
+
+
+def display_migration_details():
+    print("")
+    print("This migration adds uuid column to dataset table")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Add the uuid colum to the dataset table
+    try:
+        dataset_table = Table( "dataset", metadata, autoload=True )
+        dataset_uuid_column.create( dataset_table )
+        assert dataset_uuid_column is dataset_table.c.uuid
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'uuid' to dataset table failed: %s" % str( e ) )
+        return
+
+
+def downgrade(migrate_engine):
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the dataset table's uuid column.
+    try:
+        dataset_table = Table( "dataset", metadata, autoload=True )
+        dataset_uuid = dataset_table.c.uuid
+        dataset_uuid.drop()
+    except Exception as e:
+        log.debug( "Dropping 'uuid' column from dataset table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py b/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py
new file mode 100644
index 0000000..d0efe30
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0111_add_job_destinations.py
@@ -0,0 +1,56 @@
+"""
+Add support for job destinations to the job table
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, String, Table
+
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds 'destination_id' and 'destination_params' columns to the Job table.")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    Job_table = Table( "job", metadata, autoload=True )
+
+    c = Column( "destination_id", String( 255 ), nullable=True )
+    try:
+        c.create( Job_table )
+        assert c is Job_table.c.destination_id
+    except Exception as e:
+        log.error( "Adding column 'destination_id' to job table failed: %s" % str( e ) )
+
+    c = Column( "destination_params", JSONType, nullable=True )
+    try:
+        c.create( Job_table )
+        assert c is Job_table.c.destination_params
+    except Exception as e:
+        log.error( "Adding column 'destination_params' to job table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    Job_table = Table( "job", metadata, autoload=True )
+
+    try:
+        Job_table.c.destination_params.drop()
+    except Exception as e:
+        log.error( "Dropping column 'destination_params' from job table failed: %s" % str( e ) )
+
+    try:
+        Job_table.c.destination_id.drop()
+    except Exception as e:
+        log.error( "Dropping column 'destination_id' from job table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py b/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py
new file mode 100644
index 0000000..a95bb55
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0112_add_data_manager_history_association_and_data_manager_job_association_tables.py
@@ -0,0 +1,59 @@
+"""
+Migration script to add the data_manager_history_association table and data_manager_job_association.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+metadata = MetaData()
+
+DataManagerHistoryAssociation_table = Table( "data_manager_history_association", metadata,
+                                             Column( "id", Integer, primary_key=True),
+                                             Column( "create_time", DateTime, default=now ),
+                                             Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+                                             Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+DataManagerJobAssociation_table = Table( "data_manager_job_association", metadata,
+                                         Column( "id", Integer, primary_key=True),
+                                         Column( "create_time", DateTime, default=now ),
+                                         Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+                                         Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                         Column( "data_manager_id", TEXT, index=True ) )
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        DataManagerHistoryAssociation_table.create()
+        log.debug( "Created data_manager_history_association table" )
+    except Exception as e:
+        log.debug( "Creating data_manager_history_association table failed: %s" % str( e ) )
+    try:
+        DataManagerJobAssociation_table.create()
+        log.debug( "Created data_manager_job_association table" )
+    except Exception as e:
+        log.debug( "Creating data_manager_job_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        DataManagerHistoryAssociation_table.drop()
+        log.debug( "Dropped data_manager_history_association table" )
+    except Exception as e:
+        log.debug( "Dropping data_manager_history_association table failed: %s" % str( e ) )
+    try:
+        DataManagerJobAssociation_table.drop()
+        log.debug( "Dropped data_manager_job_association table" )
+    except Exception as e:
+        log.debug( "Dropping data_manager_job_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py
new file mode 100644
index 0000000..2b42b03
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0113_update_migrate_tools_table.py
@@ -0,0 +1,35 @@
+"""
+Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    # Create the table.
+    try:
+        cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    try:
+        cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py b/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py
new file mode 100644
index 0000000..79dd48a
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0114_update_migrate_tools_table_again.py
@@ -0,0 +1,35 @@
+"""
+Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    # Create the table.
+    try:
+        cmd = "UPDATE migrate_tools set repository_path='lib/tool_shed/galaxy_install/migrate';"
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    try:
+        cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) )
diff --git a/lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py b/lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py
new file mode 100644
index 0000000..c981e19
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py
@@ -0,0 +1,26 @@
+"""
+Expand the length of the password fields in the galaxy_user table to allow for other hasing schemes
+"""
+import logging
+
+from sqlalchemy import MetaData, String, Table
+
+log = logging.getLogger( __name__ )
+
+
+def upgrade( migrate_engine ):
+    meta = MetaData( bind=migrate_engine )
+    user = Table( 'galaxy_user', meta, autoload=True )
+    try:
+        user.c.password.alter(type=String(255))
+    except:
+        log.exception( "Altering password column failed" )
+
+
+def downgrade(migrate_engine):
+    meta = MetaData( bind=migrate_engine )
+    user = Table( 'galaxy_user', meta, autoload=True )
+    try:
+        user.c.password.alter(type=String(40))
+    except:
+        log.exception( "Altering password column failed" )
diff --git a/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py b/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py
new file mode 100644
index 0000000..699e81a
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py
@@ -0,0 +1,81 @@
+"""
+Migration script to drop the update_available Boolean column and replace it with the tool_shed_status JSONType column in the tool_shed_repository table.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def default_false( migrate_engine ):
+    if migrate_engine.name in ['mysql', 'sqlite']:
+        return "0"
+    elif migrate_engine.name in [ 'postgres', 'postgresql' ]:
+        return "false"
+
+
+def upgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    except NoSuchTableError:
+        ToolShedRepository_table = None
+        log.debug( "Failed loading table tool_shed_repository" )
+    if ToolShedRepository_table is not None:
+        # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database.
+        if migrate_engine.name != 'sqlite':
+            try:
+                col = ToolShedRepository_table.c.update_available
+                col.drop()
+            except Exception as e:
+                print("Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ))
+        c = Column( "tool_shed_status", JSONType, nullable=True )
+        try:
+            c.create( ToolShedRepository_table )
+            assert c is ToolShedRepository_table.c.tool_shed_status
+        except Exception as e:
+            print("Adding tool_shed_status column to the tool_shed_repository table failed: %s" % str( e ))
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+    except NoSuchTableError:
+        ToolShedRepository_table = None
+        log.debug( "Failed loading table tool_shed_repository" )
+    if ToolShedRepository_table is not None:
+        # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database.
+        if migrate_engine.name != 'sqlite':
+            try:
+                col = ToolShedRepository_table.c.tool_shed_status
+                col.drop()
+            except Exception as e:
+                print("Dropping column tool_shed_status from the tool_shed_repository table failed: %s" % str( e ))
+            c = Column( "update_available", Boolean, default=False )
+            try:
+                c.create( ToolShedRepository_table )
+                assert c is ToolShedRepository_table.c.update_available
+                migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % default_false( migrate_engine ) )
+            except Exception as e:
+                print("Adding column update_available to the tool_shed_repository table failed: %s" % str( e ))
diff --git a/lib/galaxy/model/migrate/versions/0117_add_user_activation.py b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py
new file mode 100644
index 0000000..75f17fb
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py
@@ -0,0 +1,58 @@
+'''
+Created on Sep 10, 2013
+
+ at author: marten
+
+Adds 'active' and 'activation_token' columns to the galaxy_user table.
+'''
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+user_active_column = Column( "active", Boolean, default=True, nullable=True )
+user_activation_token_column = Column( "activation_token", TrimmedString( 64 ), nullable=True )
+
+
+def display_migration_details():
+    print("")
+    print("This migration script adds active and activation_token columns to the user table")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Add the active and activation_token columns to the user table in one try because the depend on each other.
+    try:
+        user_table = Table( "galaxy_user", metadata, autoload=True )
+        user_active_column.create( table=user_table, populate_default=True)
+        user_activation_token_column.create( table=user_table )
+        assert user_active_column is user_table.c.active
+        assert user_activation_token_column is user_table.c.activation_token
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding columns 'active' and 'activation_token' to galaxy_user table failed: %s" % str( e ) )
+        return
+
+
+def downgrade(migrate_engine):
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the user table's active and activation_token columns in one try because the depend on each other.
+    try:
+        user_table = Table( "galaxy_user", metadata, autoload=True )
+        user_active = user_table.c.active
+        user_activation_token = user_table.c.activation_token
+        user_active.drop()
+        user_activation_token.drop()
+    except Exception as e:
+        log.debug( "Dropping 'active' and 'activation_token' columns from galaxy_user table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py b/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py
new file mode 100644
index 0000000..df10d0d
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0118_add_hda_extended_metadata.py
@@ -0,0 +1,43 @@
+"""
+Add link from history_dataset_association to the extended_metadata table
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+extended_metadata_hda_col = Column( "extended_metadata_id", Integer, ForeignKey("extended_metadata.id"), nullable=True )
+
+
+def display_migration_details():
+    print("This migration script adds a ExtendedMetadata links to HistoryDatasetAssociation tables")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    try:
+        hda_table = Table( "history_dataset_association", metadata, autoload=True )
+        extended_metadata_hda_col.create( hda_table )
+        assert extended_metadata_hda_col is hda_table.c.extended_metadata_id
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'extended_metadata_id' to history_dataset_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the HDA table's extended metadata ID column.
+    try:
+        hda_table = Table( "history_dataset_association", metadata, autoload=True )
+        extended_metadata_id = hda_table.c.extended_metadata_id
+        extended_metadata_id.drop()
+    except Exception as e:
+        log.debug( "Dropping 'extended_metadata_id' column from history_dataset_association table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0119_job_metrics.py b/lib/galaxy/model/migrate/versions/0119_job_metrics.py
new file mode 100644
index 0000000..800ead1
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0119_job_metrics.py
@@ -0,0 +1,99 @@
+"""
+Migration script for job metric plugins.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Numeric, Table, Unicode
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+TEXT_METRIC_MAX_LENGTH = 1023
+
+JobMetricText_table = Table(
+    "job_metric_text",
+    metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "plugin", Unicode(255), ),
+    Column( "metric_name", Unicode(255), ),
+    Column( "metric_value", Unicode(TEXT_METRIC_MAX_LENGTH), ),
+)
+
+
+TaskMetricText_table = Table(
+    "task_metric_text",
+    metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
+    Column( "plugin", Unicode(255), ),
+    Column( "metric_name", Unicode(255), ),
+    Column( "metric_value", Unicode(TEXT_METRIC_MAX_LENGTH), ),
+)
+
+
+JobMetricNumeric_table = Table(
+    "job_metric_numeric",
+    metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "plugin", Unicode(255), ),
+    Column( "metric_name", Unicode(255), ),
+    Column( "metric_value", Numeric( 22, 7 ), ),
+)
+
+
+TaskMetricNumeric_table = Table(
+    "task_metric_numeric",
+    metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
+    Column( "plugin", Unicode(255), ),
+    Column( "metric_name", Unicode(255), ),
+    Column( "metric_value", Numeric( 22, 7 ), ),
+)
+
+
+TABLES = [
+    JobMetricText_table,
+    TaskMetricText_table,
+    JobMetricNumeric_table,
+    TaskMetricNumeric_table,
+]
+
+
+def upgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    for table in TABLES:
+        __create(table)
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    for table in TABLES:
+        __drop(table)
+
+
+def __create(table):
+    try:
+        table.create()
+    except Exception as e:
+        print(str(e))
+        log.debug("Creating %s table failed: %s" % (table.name, str( e ) ) )
+
+
+def __drop(table):
+    try:
+        table.drop()
+    except Exception as e:
+        print(str(e))
+        log.debug("Dropping %s table failed: %s" % (table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0120_dataset_collections.py b/lib/galaxy/model/migrate/versions/0120_dataset_collections.py
new file mode 100644
index 0000000..5b3fcdb
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0120_dataset_collections.py
@@ -0,0 +1,176 @@
+"""
+Migration script for tables related to dataset collections.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT, Unicode
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+DatasetCollection_table = Table( "dataset_collection", metadata,
+                                 Column( "id", Integer, primary_key=True ),
+                                 Column( "collection_type", Unicode(255), nullable=False, ),
+                                 Column( "create_time", DateTime, default=now ),
+                                 Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+HistoryDatasetCollectionAssociation_table = Table( "history_dataset_collection_association", metadata,
+                                                   Column( "id", Integer, primary_key=True ),
+                                                   Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
+                                                   Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+                                                   Column( "hid", Integer ),
+                                                   Column( "name", TrimmedString( 255 ) ),
+                                                   Column( "deleted", Boolean, default=False ),
+                                                   Column( "visible", Boolean, default=True ),
+                                                   Column( "copied_from_history_dataset_collection_association_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), nullable=True ),
+                                                   Column( "implicit_output_name", Unicode(255), nullable=True ) )
+
+LibraryDatasetCollectionAssociation_table = Table( "library_dataset_collection_association", metadata,
+                                                   Column( "id", Integer, primary_key=True ),
+                                                   Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
+                                                   Column( "name", TrimmedString( 255 ) ),
+                                                   Column( "deleted", Boolean, default=False ),
+                                                   Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ) )
+
+DatasetCollectionElement_table = Table( "dataset_collection_element", metadata,
+                                        Column( "id", Integer, primary_key=True ),
+                                        Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=False ),
+                                        Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
+                                        Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
+                                        Column( "child_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=True ),
+                                        Column( "element_index", Integer, nullable=False ),
+                                        Column( "element_identifier", Unicode(255), nullable=False ) )
+
+HistoryDatasetCollectionAnnotationAssociation_table = Table( "history_dataset_collection_annotation_association", metadata,
+                                                             Column( "id", Integer, primary_key=True ),
+                                                             Column( "history_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                             Column( "annotation", TEXT ) )
+
+LibraryDatasetCollectionAnnotationAssociation_table = Table( "library_dataset_collection_annotation_association", metadata,
+                                                             Column( "id", Integer, primary_key=True ),
+                                                             Column( "library_dataset_collection_id", Integer, ForeignKey( "library_dataset_collection_association.id" ), index=True ),
+                                                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                             Column( "annotation", TEXT ) )
+
+HistoryDatasetCollectionRatingAssociation_table = Table( "history_dataset_collection_rating_association", metadata,
+                                                         Column( "id", Integer, primary_key=True ),
+                                                         Column( "history_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                         Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                         Column( "rating", Integer, index=True) )
+
+LibraryDatasetCollectionRatingAssociation_table = Table( "library_dataset_collection_rating_association", metadata,
+                                                         Column( "id", Integer, primary_key=True ),
+                                                         Column( "library_dataset_collection_id", Integer, ForeignKey( "library_dataset_collection_association.id" ), index=True ),
+                                                         Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                         Column( "rating", Integer, index=True) )
+
+HistoryDatasetCollectionTagAssociation_table = Table( "history_dataset_collection_tag_association", metadata,
+                                                      Column( "id", Integer, primary_key=True ),
+                                                      Column( "history_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                      Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                                      Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                      Column( "user_tname", Unicode(255), index=True),
+                                                      Column( "value", Unicode(255), index=True),
+                                                      Column( "user_value", Unicode(255), index=True) )
+
+LibraryDatasetCollectionTagAssociation_table = Table( "library_dataset_collection_tag_association", metadata,
+                                                      Column( "id", Integer, primary_key=True ),
+                                                      Column( "library_dataset_collection_id", Integer, ForeignKey( "library_dataset_collection_association.id" ), index=True ),
+                                                      Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                                      Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                                      Column( "user_tname", Unicode(255), index=True),
+                                                      Column( "value", Unicode(255), index=True),
+                                                      Column( "user_value", Unicode(255), index=True) )
+
+JobToInputDatasetCollectionAssociation_table = Table( "job_to_input_dataset_collection", metadata,
+                                                      Column( "id", Integer, primary_key=True ),
+                                                      Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                                      Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                      Column( "name", Unicode(255) ) )
+
+JobToOutputDatasetCollectionAssociation_table = Table( "job_to_output_dataset_collection", metadata,
+                                                       Column( "id", Integer, primary_key=True ),
+                                                       Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                                                       Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                       Column( "name", Unicode(255) ) )
+
+ImplicitlyCreatedDatasetCollectionInput_table = Table( "implicitly_created_dataset_collection_inputs", metadata,
+                                                       Column( "id", Integer, primary_key=True ),
+                                                       Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                       Column( "input_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+                                                       Column( "name", Unicode(255) ) )
+
+# TODO: Find a better name for this column...
+HiddenBeneathCollection_column = Column( "hidden_beneath_collection_instance_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), nullable=True )
+
+
+TABLES = [
+    DatasetCollection_table,
+    HistoryDatasetCollectionAssociation_table,
+    LibraryDatasetCollectionAssociation_table,
+    DatasetCollectionElement_table,
+    JobToInputDatasetCollectionAssociation_table,
+    JobToOutputDatasetCollectionAssociation_table,
+    ImplicitlyCreatedDatasetCollectionInput_table,
+    HistoryDatasetCollectionAnnotationAssociation_table,
+    HistoryDatasetCollectionRatingAssociation_table,
+    HistoryDatasetCollectionTagAssociation_table,
+    LibraryDatasetCollectionAnnotationAssociation_table,
+    LibraryDatasetCollectionRatingAssociation_table,
+    LibraryDatasetCollectionTagAssociation_table,
+]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    for table in TABLES:
+        __create(table)
+
+    try:
+        hda_table = Table( "history_dataset_association", metadata, autoload=True )
+        HiddenBeneathCollection_column.create( hda_table )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Creating HDA column failed." )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    for table in TABLES:
+        __drop(table)
+
+    try:
+        hda_table = Table( "history_dataset_association", metadata, autoload=True )
+        hidden_beneath_collection_instance_id_col = hda_table.c.hidden_beneath_collection_instance_id
+        hidden_beneath_collection_instance_id_col.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping HDA column failed." )
+
+
+def __create(table):
+    try:
+        table.create()
+    except Exception as e:
+        print(str(e))
+        log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) )
+
+
+def __drop(table):
+    try:
+        table.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py b/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py
new file mode 100644
index 0000000..2614128
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0121_workflow_uuids.py
@@ -0,0 +1,54 @@
+"""
+Add UUIDs to workflows
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import UUIDType
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+"""
+Because both workflow and job requests can be determined
+based the a fixed data structure, their IDs are based on
+hashing the data structure
+"""
+workflow_uuid_column = Column( "uuid", UUIDType, nullable=True )
+
+
+def display_migration_details():
+    print("This migration script adds a UUID column to workflows")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Add the uuid colum to the workflow table
+    try:
+        workflow_table = Table( "workflow", metadata, autoload=True )
+        workflow_uuid_column.create( workflow_table )
+        assert workflow_uuid_column is workflow_table.c.uuid
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'uuid' to workflow table failed: %s" % str( e ) )
+        return
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the workflow table's uuid column.
+    try:
+        workflow_table = Table( "workflow", metadata, autoload=True )
+        workflow_uuid = workflow_table.c.uuid
+        workflow_uuid.drop()
+    except Exception as e:
+        log.debug( "Dropping 'uuid' column from workflow table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py b/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py
new file mode 100644
index 0000000..e2b5dcf
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0122_grow_mysql_blobs.py
@@ -0,0 +1,59 @@
+"""
+Migration script to grow MySQL blobs.
+"""
+from __future__ import print_function
+
+from sqlalchemy import MetaData
+
+import datetime
+now = datetime.datetime.utcnow
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData()
+
+BLOB_COLUMNS = [
+    ("deferred_job", "params"),
+    ("extended_metadata", "data"),
+    ("form_definition", "fields"),
+    ("form_definition", "layout"),
+    ("form_values", "content"),
+    ("history_dataset_association", "metadata"),
+    ("job", "destination_params"),
+    ("library_dataset_dataset_association", "metadata"),
+    ("post_job_action", "action_arguments"),
+    ("request", "notification"),
+    ("sample", "workflow"),
+    ("transfer_job", "params"),
+    ("workflow_step", "tool_inputs"),
+    ("workflow_step", "tool_errors"),
+    ("workflow_step", "position"),
+    ("workflow_step", "config"),
+    ("tool_shed_repository", "metadata"),
+    ("tool_shed_repository", "tool_shed_status"),
+]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    if migrate_engine.name != "mysql":
+        return
+
+    for (table, column) in BLOB_COLUMNS:
+        cmd = "ALTER TABLE %s MODIFY COLUMN %s MEDIUMBLOB;" % (table, column)
+        try:
+            migrate_engine.execute( cmd )
+        except Exception as e:
+            print("Failed to grow column %s.%s" % (table, column))
+            print(str( e ))
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Ignoring..., changed the datatype so no guarantee these columns weren't
+    # MEDIUMBLOBs before.
diff --git a/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py b/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py
new file mode 100644
index 0000000..11a8d38
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0123_add_workflow_request_tables.py
@@ -0,0 +1,143 @@
+"""
+Migration script for workflow request tables.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Table, TEXT, Unicode
+
+from galaxy.model.custom_types import JSONType, TrimmedString, UUIDType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+WorkflowRequestInputParameter_table = Table(
+    "workflow_request_input_parameters", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
+    Column( "name", Unicode(255) ),
+    Column( "type", Unicode(255) ),
+    Column( "value", TEXT ),
+)
+
+
+WorkflowRequestStepState_table = Table(
+    "workflow_request_step_states", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id" )),
+    Column( "value", JSONType ),
+)
+
+
+WorkflowRequestToInputDatasetAssociation_table = Table(
+    "workflow_request_to_input_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "name", String(255) ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
+    Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+)
+
+
+WorkflowRequestToInputDatasetCollectionAssociation_table = Table(
+    "workflow_request_to_input_collection_dataset", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "name", String(255) ),
+    Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
+    Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
+    Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
+)
+
+
+TABLES = [
+    WorkflowRequestInputParameter_table,
+    WorkflowRequestStepState_table,
+    WorkflowRequestToInputDatasetAssociation_table,
+    WorkflowRequestToInputDatasetCollectionAssociation_table,
+]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    for table in TABLES:
+        __create(table)
+
+    History_column = Column( "history_id", Integer, ForeignKey( "history.id" ), nullable=True )
+    State_column = Column( "state", TrimmedString( 64 ) )
+
+    # TODO: Handle indexes correctly
+    SchedulerId_column = Column( "scheduler", TrimmedString(255) )
+    HandlerId_column = Column( "handler", TrimmedString(255) )
+    WorkflowUUID_column = Column( "uuid", UUIDType, nullable=True )
+    __add_column( History_column, "workflow_invocation", metadata )
+    __add_column( State_column, "workflow_invocation", metadata )
+    __add_column( SchedulerId_column, "workflow_invocation", metadata, index_nane="id_workflow_invocation_scheduler" )
+    __add_column( HandlerId_column, "workflow_invocation", metadata, index_name="id_workflow_invocation_handler" )
+    __add_column( WorkflowUUID_column, "workflow_invocation", metadata )
+
+    # All previous invocations have been scheduled...
+    cmd = "UPDATE workflow_invocation SET state = 'scheduled'"
+    try:
+        migrate_engine.execute( cmd )
+    except Exception as e:
+        log.debug( "failed to update past workflow invocation states: %s" % ( str( e ) ) )
+
+    WorkflowInvocationStepAction_column = Column( "action", JSONType, nullable=True )
+    __add_column( WorkflowInvocationStepAction_column, "workflow_invocation_step", metadata )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    for table in TABLES:
+        __drop(table)
+
+    __drop_column( "state", "workflow_invocation", metadata )
+    __drop_column( "scheduler_id", "workflow_invocation", metadata )
+    __drop_column( "uuid", "workflow_invocation", metadata )
+    __drop_column( "history_id", "workflow_invocation", metadata )
+    __drop_column( "handler_id", "workflow_invocation", metadata )
+    __drop_column( "action", "workflow_invocation_step", metadata )
+
+
+def __add_column(column, table_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        column.create( table, **kwds )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Adding column %s column failed." % column)
+
+
+def __drop_column( column_name, table_name, metadata ):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping column %s failed." % column_name )
+
+
+def __create(table):
+    try:
+        table.create()
+    except Exception as e:
+        print(str(e))
+        log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) )
+
+
+def __drop(table):
+    try:
+        table.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0124_job_state_history.py b/lib/galaxy/model/migrate/versions/0124_job_state_history.py
new file mode 100644
index 0000000..2c93ca3
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0124_job_state_history.py
@@ -0,0 +1,46 @@
+"""
+Migration script for the job state history table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, String, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+JobStateHistory_table = Table( "job_state_history", metadata,
+                               Column( "id", Integer, primary_key=True ),
+                               Column( "create_time", DateTime, default=now ),
+                               Column( "update_time", DateTime, default=now, onupdate=now ),
+                               Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+                               Column( "state", String( 64 ), index=True ),
+                               Column( "info", TrimmedString( 255 ) ) )
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    try:
+        JobStateHistory_table.create()
+    except Exception as e:
+        print(str(e))
+        log.exception("Creating %s table failed: %s" % (JobStateHistory_table.name, str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    try:
+        JobStateHistory_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception("Dropping %s table failed: %s" % (JobStateHistory_table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py b/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py
new file mode 100644
index 0000000..d53ec8e
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0125_workflow_step_tracking.py
@@ -0,0 +1,52 @@
+"""
+Migration script to enhance workflow step usability by adding labels and UUIDs.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString, UUIDType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    StepLabel_column = Column( "label", TrimmedString(255) )
+    StepUUID_column = Column( "uuid", UUIDType, nullable=True )
+    __add_column( StepLabel_column, "workflow_step", metadata )
+    __add_column( StepUUID_column, "workflow_step", metadata )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    __drop_column( "label", "workflow_step", metadata )
+    __drop_column( "uuid", "workflow_step", metadata )
+
+
+def __add_column(column, table_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        column.create( table, **kwds )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Adding column %s failed." % column)
+
+
+def __drop_column( column_name, table_name, metadata ):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping column %s failed." % column_name )
diff --git a/lib/galaxy/model/migrate/versions/0126_password_reset.py b/lib/galaxy/model/migrate/versions/0126_password_reset.py
new file mode 100644
index 0000000..d7387e6
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0126_password_reset.py
@@ -0,0 +1,39 @@
+"""
+Migration script for the password reset table
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, String, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+PasswordResetToken_table = Table("password_reset_token", metadata,
+                                 Column( "token", String( 32 ), primary_key=True, unique=True, index=True ),
+                                 Column( "expiration_time", DateTime ),
+                                 Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ))
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    try:
+        PasswordResetToken_table.create()
+    except Exception as e:
+        print(str(e))
+        log.exception("Creating %s table failed: %s" % (PasswordResetToken_table.name, str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        PasswordResetToken_table.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception("Dropping %s table failed: %s" % (PasswordResetToken_table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py b/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py
new file mode 100644
index 0000000..ca4d4c4
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0127_output_collection_adjustments.py
@@ -0,0 +1,83 @@
+"""
+Migration script updating collections tables for output collections.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, ForeignKey, Integer, MetaData, Table, TEXT, Unicode
+
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+JobToImplicitOutputDatasetCollectionAssociation_table = Table(
+    "job_to_implicit_output_dataset_collection", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+    Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
+    Column( "name", Unicode(255) )
+)
+
+
+TABLES = [
+    JobToImplicitOutputDatasetCollectionAssociation_table,
+]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    for table in TABLES:
+        __create(table)
+
+    try:
+        dataset_collection_table = Table( "dataset_collection", metadata, autoload=True )
+        # need server_default because column in non-null
+        populated_state_column = Column( 'populated_state', TrimmedString( 64 ), default='ok', server_default="ok", nullable=False )
+        populated_state_column.create( dataset_collection_table )
+
+        populated_message_column = Column( 'populated_state_message', TEXT, nullable=True )
+        populated_message_column.create( dataset_collection_table )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Creating dataset collection populated column failed." )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    for table in TABLES:
+        __drop(table)
+
+    try:
+        dataset_collection_table = Table( "dataset_collection", metadata, autoload=True )
+        populated_state_column = dataset_collection_table.c.populated_state
+        populated_state_column.drop()
+        populated_message_column = dataset_collection_table.c.populated_state_message
+        populated_message_column.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping dataset collection populated_state/ column failed." )
+
+
+def __create(table):
+    try:
+        table.create()
+    except Exception as e:
+        print(str(e))
+        log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) )
+
+
+def __drop(table):
+    try:
+        table.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0128_session_timeout.py b/lib/galaxy/model/migrate/versions/0128_session_timeout.py
new file mode 100644
index 0000000..948ec9c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0128_session_timeout.py
@@ -0,0 +1,47 @@
+"""
+Migration script to add session update time (used for timeouts)
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    lastaction_column = Column( "last_action", DateTime )
+    __add_column( lastaction_column, "galaxy_session", metadata )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    __drop_column( "last_action", "galaxy_session", metadata )
+
+
+def __add_column(column, table_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        column.create( table, **kwds )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Adding column %s failed." % column)
+
+
+def __drop_column( column_name, table_name, metadata ):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping column %s failed." % column_name )
diff --git a/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py b/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py
new file mode 100644
index 0000000..c7ba31a
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0129_job_external_output_metadata_validity.py
@@ -0,0 +1,47 @@
+"""
+Migration script to allow invalidation of job external output metadata temp files
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+
+    isvalid_column = Column( "is_valid", Boolean, default=True )
+    __add_column( isvalid_column, "job_external_output_metadata", metadata )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    __drop_column( "is_valid", "job_external_output_metadata", metadata )
+
+
+def __add_column(column, table_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        column.create( table, **kwds )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Adding column %s failed." % column)
+
+
+def __drop_column( column_name, table_name, metadata ):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping column %s failed." % column_name )
diff --git a/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py b/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py
new file mode 100644
index 0000000..55994f6
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0130_change_pref_datatype.py
@@ -0,0 +1,27 @@
+"""
+Migration script to change the value column of user_preference from varchar to text.
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import MetaData, Table, Text
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    t = Table("user_preference", metadata, autoload=True)
+    t.c.value.alter(type=Text)
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Pass, since we don't want to potentially truncate data.
diff --git a/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py b/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py
new file mode 100644
index 0000000..bf791ec
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0131_subworkflow_and_input_parameter_modules.py
@@ -0,0 +1,135 @@
+"""
+Migration script to support subworkflows and workflow request input parameters
+"""
+from __future__ import print_function
+
+import datetime
+import logging
+
+from sqlalchemy import Column, Integer, ForeignKey, MetaData, Table, Index, ForeignKeyConstraint
+
+from galaxy.model.custom_types import TrimmedString, UUIDType, JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+WorkflowInvocationToSubworkflowInvocationAssociation_table = Table(
+    "workflow_invocation_to_subworkflow_invocation_association", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer ),
+    Column( "subworkflow_invocation_id", Integer ),
+    Column( "workflow_step_id", Integer ),
+    ForeignKeyConstraint(['workflow_invocation_id'], ['workflow_invocation.id'], name='fk_wfi_swi_wfi'),
+    ForeignKeyConstraint(['subworkflow_invocation_id'], ['workflow_invocation.id'], name='fk_wfi_swi_swi'),
+    ForeignKeyConstraint(['workflow_step_id'], ['workflow_step.id'], name='fk_wfi_swi_ws')
+)
+
+WorkflowRequestInputStepParameter_table = Table(
+    "workflow_request_input_step_parameter", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "workflow_invocation_id", Integer ),
+    Column( "workflow_step_id", Integer ),
+    Column( "parameter_value", JSONType ),
+    ForeignKeyConstraint(['workflow_invocation_id'], ['workflow_invocation.id'], name='fk_wfreq_isp_wfi'),
+    ForeignKeyConstraint(['workflow_step_id'], ['workflow_step.id'], name='fk_wfreq_isp_ws')
+)
+
+TABLES = [
+    WorkflowInvocationToSubworkflowInvocationAssociation_table,
+    WorkflowRequestInputStepParameter_table,
+]
+
+INDEXES = [
+    Index( "ix_wfinv_swfinv_wfi", WorkflowInvocationToSubworkflowInvocationAssociation_table.c.workflow_invocation_id),
+    Index( "ix_wfinv_swfinv_swfi", WorkflowInvocationToSubworkflowInvocationAssociation_table.c.subworkflow_invocation_id),
+    Index( "ix_wfreq_inputstep_wfi", WorkflowRequestInputStepParameter_table.c.workflow_invocation_id)
+]
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print(__doc__)
+    metadata.reflect()
+    if migrate_engine.name in ['postgres', 'postgresql']:
+        subworkflow_id_column = Column( "subworkflow_id", Integer, ForeignKey("workflow.id"), nullable=True )
+        input_subworkflow_step_id_column = Column( "input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), nullable=True )
+        parent_workflow_id_column = Column( "parent_workflow_id", Integer, ForeignKey("workflow.id"), nullable=True )
+    else:
+        subworkflow_id_column = Column( "subworkflow_id", Integer, nullable=True )
+        input_subworkflow_step_id_column = Column( "input_subworkflow_step_id", Integer, nullable=True )
+        parent_workflow_id_column = Column( "parent_workflow_id", Integer, nullable=True )
+    __add_column( subworkflow_id_column, "workflow_step", metadata )
+    __add_column( input_subworkflow_step_id_column, "workflow_step_connection", metadata )
+    __add_column( parent_workflow_id_column, "workflow", metadata )
+    workflow_output_label_column = Column( "label", TrimmedString(255) )
+    workflow_output_uuid_column = Column( "uuid", UUIDType, nullable=True )
+    __add_column( workflow_output_label_column, "workflow_output", metadata )
+    __add_column( workflow_output_uuid_column, "workflow_output", metadata )
+
+    # Make stored_workflow_id nullable, since now workflows can belong to either
+    # a stored workflow or a parent workflow.
+    __alter_column("workflow", "stored_workflow_id", metadata, nullable=True)
+
+    for table in TABLES:
+        # Indexes are automatically created when the tables are.
+        __create(table)
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    __drop_column( "subworkflow_id", "workflow_step", metadata )
+    __drop_column( "parent_workflow_id", "workflow", metadata )
+
+    __drop_column( "input_subworkflow_step_id", "workflow_step_connection", metadata )
+
+    __drop_column( "label", "workflow_output", metadata )
+    __drop_column( "uuid", "workflow_output", metadata )
+
+    for table in TABLES:
+        __drop(table)
+
+
+def __alter_column(table_name, column_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).alter(**kwds)
+    except Exception as e:
+        print(str(e))
+        log.exception( "Adding column %s failed." % column_name)
+
+
+def __add_column(column, table_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        column.create( table, **kwds )
+    except Exception as e:
+        print(str(e))
+        log.exception( "Adding column %s failed." % column)
+
+
+def __drop_column( column_name, table_name, metadata ):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).drop()
+    except Exception as e:
+        print(str(e))
+        log.exception( "Dropping column %s failed." % column_name )
+
+
+def __create(table):
+    try:
+        table.create()
+    except Exception as e:
+        print(str(e))
+        log.exception("Creating %s table failed: %s" % (table.name, str( e ) ) )
+
+
+def __drop(table):
+    try:
+        table.drop()
+    except Exception as e:
+        print(str(e))
+        log.exception("Dropping %s table failed: %s" % (table.name, str( e ) ) )
diff --git a/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py b/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py
new file mode 100755
index 0000000..bb90cc7
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0132_add_lastpasswordchange_to_user.py
@@ -0,0 +1,18 @@
+"""
+Migration script to add a last_password_change field to the user table
+"""
+
+from sqlalchemy import Table, MetaData, DateTime, Column
+
+
+def upgrade(migrate_engine):
+    meta = MetaData(bind=migrate_engine)
+    account = Table('galaxy_user', meta, autoload=True)
+    lpc = Column('last_password_change', DateTime())
+    lpc.create(account)
+
+
+def downgrade(migrate_engine):
+    meta = MetaData(bind=migrate_engine)
+    account = Table('galaxy_user', meta, autoload=True)
+    account.c.last_password_change.drop()
diff --git a/lib/galaxy/model/migrate/versions/0133_add_dependency_column_to_job.py b/lib/galaxy/model/migrate/versions/0133_add_dependency_column_to_job.py
new file mode 100644
index 0000000..a0e9877
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0133_add_dependency_column_to_job.py
@@ -0,0 +1,49 @@
+"""
+Add dependencies column to jobs table
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+jobs_dependencies_column = Column( "dependencies", JSONType, nullable=True )
+
+
+def display_migration_details():
+    print("")
+    print("This migration adds dependencies column to job table")
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Add the dependencies column to the job table
+    try:
+        jobs_table = Table( "job", metadata, autoload=True )
+        jobs_dependencies_column.create( jobs_table )
+        assert jobs_dependencies_column is jobs_table.c.dependencies
+    except Exception as e:
+        print(str(e))
+        log.error( "Adding column 'dependencies' to job table failed: %s" % str( e ) )
+        return
+
+
+def downgrade(migrate_engine):
+    metadata = MetaData()
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the job table's dependencies column.
+    try:
+        jobs_table = Table( "job", metadata, autoload=True )
+        jobs_dependencies = jobs_table.c.dependencies
+        jobs_dependencies.drop()
+    except Exception as e:
+        log.debug( "Dropping 'dependencies' column from job table failed: %s" % ( str( e ) ) )
diff --git a/lib/galaxy/model/orm/__init__.py b/lib/galaxy/model/orm/__init__.py
new file mode 100644
index 0000000..f30f013
--- /dev/null
+++ b/lib/galaxy/model/orm/__init__.py
@@ -0,0 +1,3 @@
+"""
+galaxy.model.orm - ORM-related functionality
+"""
diff --git a/lib/galaxy/model/orm/engine_factory.py b/lib/galaxy/model/orm/engine_factory.py
new file mode 100644
index 0000000..4183e98
--- /dev/null
+++ b/lib/galaxy/model/orm/engine_factory.py
@@ -0,0 +1,21 @@
+import logging
+log = logging.getLogger( __name__ )
+
+from sqlalchemy import create_engine
+
+
+def build_engine(url, engine_options, database_query_profiling_proxy=False, trace_logger=None):
+    # Should we use the logging proxy?
+    if database_query_profiling_proxy:
+        import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+        proxy = logging_connection_proxy.LoggingProxy()
+    # If metlog is enabled, do micrologging
+    elif trace_logger:
+        import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+        proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
+    else:
+        proxy = None
+
+    # Create the database engine
+    engine = create_engine( url, proxy=proxy, **engine_options )
+    return engine
diff --git a/lib/galaxy/model/orm/logging_connection_proxy.py b/lib/galaxy/model/orm/logging_connection_proxy.py
new file mode 100644
index 0000000..985c0a2
--- /dev/null
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -0,0 +1,76 @@
+import inspect
+import logging
+import os
+import threading
+import time
+
+from sqlalchemy.interfaces import ConnectionProxy
+
+log = logging.getLogger( __name__ )
+
+wd = os.getcwd()
+
+
+def stripwd( s ):
+    if s.startswith( wd ):
+        return s[len(wd):]
+    return s
+
+
+def pretty_stack():
+    rval = []
+    for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
+        rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
+    return rval
+
+
+class LoggingProxy(ConnectionProxy):
+    """
+    Logs SQL statements using standard logging module
+    """
+
+    def begin(self, conn, begin):
+        thread_ident = threading.current_thread().ident
+        begin(conn)
+        log.debug("begin transaction: thread: %r" % thread_ident)
+
+    def commit(self, conn, commit):
+        thread_ident = threading.current_thread().ident
+        commit(conn)
+        log.debug("commit transaction: thread: %r" % thread_ident)
+
+    def rollback(self, conn, rollback):
+        thread_ident = threading.current_thread().ident
+        rollback(conn)
+        log.debug("rollback transaction: thread: %r" % thread_ident)
+
+    def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+        thread_ident = threading.current_thread().ident
+        start = time.clock()
+        rval = execute(cursor, statement, parameters, context)
+        duration = time.clock() - start
+        log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r thread: %r",
+                   statement, parameters, executemany, duration, " > ".join( pretty_stack() ), thread_ident )
+        return rval
+
+
+class TraceLoggerProxy(ConnectionProxy):
+    """
+    Logs SQL statements using a metlog client
+    """
+    def __init__( self, trace_logger ):
+        self.trace_logger = trace_logger
+
+    def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+        start = time.clock()
+        rval = execute(cursor, statement, parameters, context)
+        duration = time.clock() - start
+        self.trace_logger.log(
+            "sqlalchemy_query",
+            message="Query executed",
+            statement=statement,
+            parameters=parameters,
+            executemany=executemany,
+            duration=duration
+        )
+        return rval
diff --git a/lib/galaxy/model/orm/now.py b/lib/galaxy/model/orm/now.py
new file mode 100644
index 0000000..1b90b5b
--- /dev/null
+++ b/lib/galaxy/model/orm/now.py
@@ -0,0 +1,13 @@
+from datetime import datetime
+
+# NOTE REGARDING TIMESTAMPS:
+#   It is currently difficult to have the timestamps calculated by the
+#   database in a portable way, so we're doing it in the client. This
+#   also saves us from needing to postfetch on postgres. HOWEVER: it
+#   relies on the client's clock being set correctly, so if clustering
+#   web servers, use a time server to ensure synchronization
+
+# Return the current time in UTC without any timezone information
+now = datetime.utcnow
+
+__all__ = ('now', )
diff --git a/lib/galaxy/model/orm/scripts.py b/lib/galaxy/model/orm/scripts.py
new file mode 100644
index 0000000..d7fce87
--- /dev/null
+++ b/lib/galaxy/model/orm/scripts.py
@@ -0,0 +1,113 @@
+"""
+Code to support database helper scripts (create_db.py, manage_db.py, etc...).
+"""
+import logging
+import os.path
+
+from galaxy.util.properties import load_app_properties
+
+
+log = logging.getLogger( __name__ )
+
+DEFAULT_CONFIG_FILE = 'config/galaxy.ini'
+DEFAULT_CONFIG_PREFIX = ''
+DEFAULT_DATABASE = 'galaxy'
+
+DATABASE = {
+    "galaxy":
+        {
+            'repo': 'lib/galaxy/model/migrate',
+            'old_config_file': 'universe_wsgi.ini',
+            'default_sqlite_file': './database/universe.sqlite',
+            'config_override': 'GALAXY_CONFIG_',
+        },
+    "tool_shed":
+        {
+            'repo': 'lib/galaxy/webapps/tool_shed/model/migrate',
+            'config_file': 'config/tool_shed.ini',
+            'old_config_file': 'tool_shed_wsgi.ini',
+            'default_sqlite_file': './database/community.sqlite',
+            'config_override': 'TOOL_SHED_CONFIG_',
+        },
+    "install":
+        {
+            'repo': 'lib/galaxy/model/tool_shed_install/migrate',
+            'old_config_file': 'universe_wsgi.ini',
+            'config_prefix': 'install_',
+            'default_sqlite_file': './database/install.sqlite',
+            'config_override': 'GALAXY_INSTALL_CONFIG_',
+        },
+}
+
+
+def read_config_file_arg( argv, default, old_default ):
+    if '-c' in argv:
+        pos = argv.index( '-c' )
+        argv.pop(pos)
+        config_file = argv.pop( pos )
+    else:
+        if not os.path.exists( default ) and os.path.exists( old_default ):
+            config_file = old_default
+        elif os.path.exists( default ):
+            config_file = default
+        else:
+            config_file = default + ".sample"
+    return config_file
+
+
+def get_config( argv, cwd=None ):
+    """
+    Read sys.argv and parse out repository of migrations and database url.
+
+    >>> from ConfigParser import SafeConfigParser
+    >>> from tempfile import mkdtemp
+    >>> config_dir = mkdtemp()
+    >>> os.makedirs(os.path.join(config_dir, 'config'))
+    >>> def write_ini(path, property, value):
+    ...     p = SafeConfigParser()
+    ...     p.add_section('app:main')
+    ...     p.set('app:main', property, value)
+    ...     with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f)
+    >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1')
+    >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir)
+    >>> config['repo']
+    'lib/galaxy/webapps/tool_shed/model/migrate'
+    >>> config['db_url']
+    'sqlite:///pg/testdb1'
+    >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite')
+    >>> config = get_config(['manage_db.py'], cwd=config_dir)
+    >>> config['db_url']
+    'sqlite:///moo.sqlite?isolation_level=IMMEDIATE'
+    >>> config['repo']
+    'lib/galaxy/model/migrate'
+    """
+    if argv and (argv[-1] in DATABASE):
+        database = argv.pop()  # database name tool_shed, galaxy, or install.
+    else:
+        database = 'galaxy'
+    database_defaults = DATABASE[ database ]
+
+    default = database_defaults.get( 'config_file', DEFAULT_CONFIG_FILE )
+    old_default = database_defaults.get( 'old_config_file' )
+    if cwd is not None:
+        default = os.path.join( cwd, default )
+        old_default = os.path.join( cwd, old_default )
+    config_file = read_config_file_arg( argv, default, old_default )
+    repo = database_defaults[ 'repo' ]
+    config_prefix = database_defaults.get( 'config_prefix', DEFAULT_CONFIG_PREFIX )
+    config_override = database_defaults.get( 'config_override', 'GALAXY_CONFIG_' )
+    default_sqlite_file = database_defaults[ 'default_sqlite_file' ]
+    if cwd:
+        config_file = os.path.join( cwd, config_file )
+
+    properties = load_app_properties( ini_file=config_file, config_prefix=config_override )
+
+    if ("%sdatabase_connection" % config_prefix) in properties:
+        db_url = properties[ "%sdatabase_connection" % config_prefix ]
+    elif ("%sdatabase_file" % config_prefix) in properties:
+        database_file = properties[ "%sdatabase_file" % config_prefix ]
+        db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % database_file
+    else:
+        db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file
+
+    return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
diff --git a/lib/galaxy/model/search.py b/lib/galaxy/model/search.py
new file mode 100644
index 0000000..6f4372a
--- /dev/null
+++ b/lib/galaxy/model/search.py
@@ -0,0 +1,670 @@
+"""
+The GQL (Galaxy Query Language) search engine parsers a simple 'SQL-Like' query
+syntax to obtain items from the Galaxy installations.
+Rather then allow/force the user to do queries on the Galaxy schema, it uses
+a small set of 'Views' which are simple table representations of complex galaxy ideas.
+So while a history and its tags may exist in seperate tables in the real schema, in
+GQL they exist in the same view
+
+Example Queries:
+
+select name, id, file_size from hda
+
+select name from hda
+
+select name, model_class from ldda
+
+select * from history
+
+select * from workflow
+
+select id, name from history where name='Unnamed history'
+
+select * from history where name='Unnamed history'
+
+"""
+
+import logging
+import re
+from json import dumps
+
+import parsley
+from sqlalchemy import and_
+from sqlalchemy.orm import aliased
+
+from galaxy.model import (HistoryDatasetAssociation, LibraryDatasetDatasetAssociation,
+                          History, Library, LibraryFolder, LibraryDataset, StoredWorkflowTagAssociation,
+                          StoredWorkflow, HistoryTagAssociation, HistoryDatasetAssociationTagAssociation,
+                          ExtendedMetadata, ExtendedMetadataIndex, HistoryAnnotationAssociation, Job, JobParameter,
+                          JobToInputLibraryDatasetAssociation, JobToInputDatasetAssociation, JobToOutputDatasetAssociation,
+                          Page, PageRevision)
+from galaxy.model.tool_shed_install import ToolVersion
+
+log = logging.getLogger( __name__ )
+
+
+class ViewField(object):
+    """
+    A ViewField defines a field in a view that filter operations can be applied to
+    These filter operations are either handled with standard sqlalchemy filter calls,
+    or passed to specialized handlers (such as when a table join would be needed to
+    do the filtering)
+
+    Parameters:
+
+    sqlalchemy_field - Simple filtering using existing table columns, the argument is an sqlalchemy column
+        that the right hand value will be compared against
+
+    handler - Requires more specialized code to do filtering, usually requires a table join in order to
+        process the conditional
+
+    post_filter - Unable to do simple sqlalchemy based table filtering, filter is applied to loaded object
+        Thus methods avalible to the object can be used for filtering. example: a library folder must climb
+        its chain of parents to find out which library it belongs to
+
+    """
+    def __init__(self, name, sqlalchemy_field=None, handler=None, post_filter=None, id_decode=False):
+        self.name = name
+        self.sqlalchemy_field = sqlalchemy_field
+        self.handler = handler
+        self.post_filter = post_filter
+        self.id_decode = id_decode
+
+
+class ViewQueryBaseClass(object):
+    FIELDS = {}
+    VIEW_NAME = "undefined"
+
+    def __init__(self):
+        self.query = None
+        self.do_query = False
+        self.state = {}
+        self.post_filter = []
+
+    def decode_query_ids(self, trans, conditional):
+        if conditional.operator == 'and':
+            self.decode_query_ids(trans, conditional.left)
+            self.decode_query_ids(trans, conditional.right)
+        else:
+            left_base = conditional.left.split('.')[0]
+            if left_base in self.FIELDS:
+                field = self.FIELDS[left_base]
+                if field.id_decode:
+                    conditional.right = trans.security.decode_id( conditional.right )
+
+    def filter(self, left, operator, right):
+        if operator == 'and':
+            self.filter(left.left, left.operator, left.right)
+            self.filter(right.left, right.operator, right.right)
+        else:
+            left_base = left.split('.')[0]
+            if left_base in self.FIELDS:
+                self.do_query = True
+                field = self.FIELDS[left_base]
+                if field.sqlalchemy_field is not None:
+                    clazz, attribute = field.sqlalchemy_field
+                    sqlalchemy_field_value = getattr(clazz, attribute)
+                    if operator == "=":
+                        # print field.sqlalchemy_field == right, field.sqlalchemy_field, right
+                        self.query = self.query.filter( sqlalchemy_field_value == right )
+                    elif operator == "!=":
+                        self.query = self.query.filter( sqlalchemy_field_value != right )
+                    elif operator == "like":
+                        self.query = self.query.filter( sqlalchemy_field_value.like(right) )
+                    else:
+                        raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+                elif field.handler is not None:
+                    field.handler(self, left, operator, right)
+                elif field.post_filter is not None:
+                    self.post_filter.append( [field.post_filter, left, operator, right] )
+                else:
+                    raise GalaxyParseError("Unable to filter on field: %s" % (left))
+
+            else:
+                raise GalaxyParseError("Unknown field: %s" % (left))
+
+    def search(self, trans):
+        raise GalaxyParseError("Unable to search view: %s" % (self.VIEW_NAME))
+
+    def get_results(self, force_query=False):
+        if self.query is not None and (force_query or self.do_query):
+            for row in self.query.distinct().all():
+                selected = True
+                for f in self.post_filter:
+                    if not f[0](row, f[1], f[2], f[3]):
+                        selected = False
+                if selected:
+                    yield row
+
+
+##################
+# Library Dataset Searching
+##################
+
+
+def library_extended_metadata_filter(view, left, operator, right):
+    view.do_query = True
+    if 'extended_metadata_joined' not in view.state:
+        view.query = view.query.join( ExtendedMetadata )
+        view.state['extended_metadata_joined'] = True
+    alias = aliased( ExtendedMetadataIndex )
+    field = "/%s" % ("/".join(left.split(".")[1:]))
+    # print "FIELD", field
+    view.query = view.query.filter(
+        and_(
+            ExtendedMetadata.id == alias.extended_metadata_id,
+            alias.path == field,
+            alias.value == str(right)
+        )
+    )
+
+
+def ldda_parent_library_filter(item, left, operator, right):
+    if operator == '=':
+        return right == item.library_dataset.folder.parent_library.id
+    elif operator == '!=':
+        return right != item.library_dataset.folder.parent_library.id
+    raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+class LibraryDatasetDatasetView(ViewQueryBaseClass):
+    VIEW_NAME = "library_dataset_dataset"
+    FIELDS = {
+        'extended_metadata': ViewField('extended_metadata', handler=library_extended_metadata_filter),
+        'name': ViewField('name', sqlalchemy_field=(LibraryDatasetDatasetAssociation, "name")),
+        'id': ViewField('id', sqlalchemy_field=(LibraryDatasetDatasetAssociation, 'id'), id_decode=True),
+        'deleted': ViewField('deleted', sqlalchemy_field=(LibraryDatasetDatasetAssociation, "deleted")),
+        'parent_library_id': ViewField('parent_library_id', id_decode=True, post_filter=ldda_parent_library_filter),
+        'data_type': ViewField('data_type', sqlalchemy_field=(LibraryDatasetDatasetAssociation, "extension")),
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( LibraryDatasetDatasetAssociation )
+
+
+##################
+# Library Searching
+##################
+
+class LibraryView(ViewQueryBaseClass):
+    VIEW_NAME = "library"
+    FIELDS = {
+        'name': ViewField('name', sqlalchemy_field=(Library, "name")),
+        'id': ViewField('id', sqlalchemy_field=(Library, 'id'), id_decode=True),
+        'deleted': ViewField('deleted', sqlalchemy_field=(Library, "deleted")),
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( Library )
+
+
+##################
+# Library Folder Searching
+##################
+def library_folder_parent_library_id_filter(item, left, operator, right):
+    if operator == '=':
+        return item.parent_library.id == right
+    if operator == '!=':
+        return item.parent_library.id != right
+    raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+def library_path_filter(item, left, operator, right):
+    lpath = "/" + "/".join(item.library_path)
+    if operator == '=':
+        return lpath == right
+    if operator == '!=':
+        return lpath != right
+    raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+class LibraryFolderView(ViewQueryBaseClass):
+    VIEW_NAME = "library_folder"
+    FIELDS = {
+        'name': ViewField('name', sqlalchemy_field=(LibraryFolder, "name")),
+        'id': ViewField('id', sqlalchemy_field=(LibraryFolder, "id"), id_decode=True),
+        'parent_id': ViewField('parent_id', sqlalchemy_field=(LibraryFolder, "parent_id"), id_decode=True),
+        'parent_library_id': ViewField('parent_library_id', post_filter=library_folder_parent_library_id_filter, id_decode=True),
+        'library_path': ViewField('library_path', post_filter=library_path_filter)
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( LibraryFolder )
+
+
+##################
+# Library Dataset Searching
+##################
+def library_dataset_name_filter(item, left, operator, right):
+    if operator == '=':
+        return item.name == right
+    if operator == '!=':
+        return item.name != right
+    raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+class LibraryDatasetView(ViewQueryBaseClass):
+    VIEW_NAME = "library_dataset"
+    FIELDS = {
+        'name': ViewField('name', post_filter=library_dataset_name_filter),
+        'id': ViewField('id', sqlalchemy_field=(LibraryDataset, "id"), id_decode=True),
+        'folder_id': ViewField('folder_id', sqlalchemy_field=(LibraryDataset, "folder_id"), id_decode=True)
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( LibraryDataset )
+
+
+##################
+# Tool Searching
+##################
+class ToolView(ViewQueryBaseClass):
+    VIEW_NAME = "tool"
+    FIELDS = {
+        'tool_id': ViewField('name', sqlalchemy_field=(ToolVersion, "tool_id")),
+        'id': ViewField('id', sqlalchemy_field=(ToolVersion, "id")),
+    }
+
+    def search(self, trans):
+        self.query = trans.install_model.context.query( ToolVersion )
+
+
+##################
+# History Dataset Searching
+##################
+def history_dataset_handle_tag(view, left, operator, right):
+    if operator == "=":
+        view.do_query = True
+        # aliasing the tag association table, so multiple links to different tags can be formed during a single query
+        tag_table = aliased(HistoryDatasetAssociationTagAssociation)
+
+        view.query = view.query.filter(
+            HistoryDatasetAssociation.id == tag_table.history_dataset_association_id
+        )
+        tmp = right.split(":")
+        view.query = view.query.filter( tag_table.user_tname == tmp[0] )
+        if len(tmp) > 1:
+            view.query = view.query.filter( tag_table.user_value == tmp[1] )
+    else:
+        raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+def history_dataset_extended_metadata_filter(view, left, operator, right):
+    view.do_query = True
+    if 'extended_metadata_joined' not in view.state:
+        view.query = view.query.join( ExtendedMetadata )
+        view.state['extended_metadata_joined'] = True
+    alias = aliased( ExtendedMetadataIndex )
+    field = "/%s" % ("/".join(left.split(".")[1:]))
+    # print "FIELD", field
+    view.query = view.query.filter(
+        and_(
+            ExtendedMetadata.id == alias.extended_metadata_id,
+            alias.path == field,
+            alias.value == str(right)
+        )
+    )
+
+
+class HistoryDatasetView(ViewQueryBaseClass):
+    DOMAIN = "history_dataset"
+    FIELDS = {
+        'name': ViewField('name', sqlalchemy_field=(HistoryDatasetAssociation, "name")),
+        'id': ViewField('id', sqlalchemy_field=(HistoryDatasetAssociation, "id"), id_decode=True),
+        'history_id': ViewField('history_id', sqlalchemy_field=(HistoryDatasetAssociation, "history_id"), id_decode=True),
+        'tag': ViewField("tag", handler=history_dataset_handle_tag),
+        'copied_from_ldda_id': ViewField("copied_from_ldda_id",
+                                         sqlalchemy_field=(HistoryDatasetAssociation, "copied_from_library_dataset_dataset_association_id"),
+                                         id_decode=True),
+        'copied_from_hda_id': ViewField("copied_from_hda_id",
+                                        sqlalchemy_field=(HistoryDatasetAssociation, "copied_from_history_dataset_association_id"),
+                                        id_decode=True),
+        'deleted': ViewField('deleted', sqlalchemy_field=(HistoryDatasetAssociation, "deleted")),
+        'extended_metadata': ViewField('extended_metadata', handler=history_dataset_extended_metadata_filter)
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( HistoryDatasetAssociation )
+
+
+##################
+# History Searching
+##################
+
+
+def history_handle_tag(view, left, operator, right):
+    if operator == "=":
+        view.do_query = True
+        tag_table = aliased(HistoryTagAssociation)
+        view.query = view.query.filter(
+            History.id == tag_table.history_id
+        )
+        tmp = right.split(":")
+        view.query = view.query.filter( tag_table.user_tname == tmp[0] )
+        if len(tmp) > 1:
+            view.query = view.query.filter( tag_table.user_value == tmp[1] )
+    else:
+        raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+def history_handle_annotation(view, left, operator, right):
+    if operator == "=":
+        view.do_query = True
+        view.query = view.query.filter( and_(
+            HistoryAnnotationAssociation.history_id == History.id,
+            HistoryAnnotationAssociation.annotation == right
+        ) )
+    elif operator == "like":
+        view.do_query = True
+        view.query = view.query.filter( and_(
+            HistoryAnnotationAssociation.history_id == History.id,
+            HistoryAnnotationAssociation.annotation.like( right )
+        ) )
+    else:
+        raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+class HistoryView(ViewQueryBaseClass):
+    DOMAIN = "history"
+    FIELDS = {
+        'name': ViewField('name', sqlalchemy_field=(History, "name")),
+        'id': ViewField('id', sqlalchemy_field=(History, "id"), id_decode=True),
+        'tag': ViewField("tag", handler=history_handle_tag),
+        'annotation': ViewField("annotation", handler=history_handle_annotation),
+        'deleted': ViewField('deleted', sqlalchemy_field=(History, "deleted"))
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( History )
+
+
+##################
+# Workflow Searching
+##################
+
+
+def workflow_tag_handler(view, left, operator, right):
+    if operator == "=":
+        view.do_query = True
+        view.query = view.query.filter(
+            StoredWorkflow.id == StoredWorkflowTagAssociation.stored_workflow_id
+        )
+        tmp = right.split(":")
+        view.query = view.query.filter( StoredWorkflowTagAssociation.user_tname == tmp[0] )
+        if len(tmp) > 1:
+            view.query = view.query.filter( StoredWorkflowTagAssociation.user_value == tmp[1] )
+    else:
+        raise GalaxyParseError("Invalid comparison operator: %s" % (operator))
+
+
+class WorkflowView(ViewQueryBaseClass):
+    DOMAIN = "workflow"
+    FIELDS = {
+        'name': ViewField('name', sqlalchemy_field=(StoredWorkflow, "name")),
+        'id': ViewField('id', sqlalchemy_field=(StoredWorkflow, "id"), id_decode=True),
+        'tag': ViewField('tag', handler=workflow_tag_handler),
+        'deleted': ViewField('deleted', sqlalchemy_field=(StoredWorkflow, "deleted")),
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( StoredWorkflow )
+
+
+##################
+# Job Searching
+##################
+
+
+def job_param_filter(view, left, operator, right):
+    view.do_query = True
+    alias = aliased( JobParameter )
+    param_name = re.sub(r'^param.', '', left)
+    view.query = view.query.filter(
+        and_(
+            Job.id == alias.job_id,
+            alias.name == param_name,
+            alias.value == dumps(right)
+        )
+    )
+
+
+def job_input_hda_filter(view, left, operator, right):
+    view.do_query = True
+    alias = aliased( JobToInputDatasetAssociation )
+    param_name = re.sub(r'^input_hda.', '', left)
+    view.query = view.query.filter(
+        and_(
+            Job.id == alias.job_id,
+            alias.name == param_name,
+            alias.dataset_id == right
+        )
+    )
+
+
+def job_input_ldda_filter(view, left, operator, right):
+    view.do_query = True
+    alias = aliased( JobToInputLibraryDatasetAssociation )
+    param_name = re.sub(r'^input_ldda.', '', left)
+    view.query = view.query.filter(
+        and_(
+            Job.id == alias.job_id,
+            alias.name == param_name,
+            alias.ldda_id == right
+        )
+    )
+
+
+def job_output_hda_filter(view, left, operator, right):
+    view.do_query = True
+    alias = aliased( JobToOutputDatasetAssociation )
+    param_name = re.sub(r'^output_hda.', '', left)
+    view.query = view.query.filter(
+        and_(
+            Job.id == alias.job_id,
+            alias.name == param_name,
+            alias.dataset_id == right
+        )
+    )
+
+
+class JobView(ViewQueryBaseClass):
+    DOMAIN = "job"
+    FIELDS = {
+        'tool_name': ViewField('tool_name', sqlalchemy_field=(Job, "tool_id")),
+        'state': ViewField('state', sqlalchemy_field=(Job, "state")),
+        'param': ViewField('param', handler=job_param_filter),
+        'input_ldda': ViewField('input_ldda', handler=job_input_ldda_filter, id_decode=True),
+        'input_hda': ViewField('input_hda', handler=job_input_hda_filter, id_decode=True),
+        'output_hda': ViewField('output_hda', handler=job_output_hda_filter, id_decode=True)
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( Job )
+
+
+##################
+# Page Searching
+##################
+
+
+class PageView(ViewQueryBaseClass):
+    DOMAIN = "page"
+    FIELDS = {
+        'id': ViewField('id', sqlalchemy_field=(Page, "id"), id_decode=True),
+        'slug': ViewField('slug', sqlalchemy_field=(Page, "slug")),
+        'title': ViewField('title', sqlalchemy_field=(Page, "title")),
+        'deleted': ViewField('deleted', sqlalchemy_field=(Page, "deleted"))
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( Page )
+
+
+##################
+# Page Revision Searching
+##################
+
+
+class PageRevisionView(ViewQueryBaseClass):
+    DOMAIN = "page_revision"
+    FIELDS = {
+        'id': ViewField('id', sqlalchemy_field=(PageRevision, "id"), id_decode=True),
+        'title': ViewField('title', sqlalchemy_field=(PageRevision, "title")),
+        'page_id': ViewField('page_id', sqlalchemy_field=(PageRevision, "page_id"), id_decode=True),
+    }
+
+    def search(self, trans):
+        self.query = trans.sa_session.query( PageRevision )
+
+
+# The view mapping takes a user's name for a table and maps it to a View class
+# that will handle queries.
+
+view_mapping = {
+    'library': LibraryView,
+    'library_folder': LibraryFolderView,
+    'library_dataset_dataset': LibraryDatasetDatasetView,
+    'library_dataset': LibraryDatasetView,
+    'lda': LibraryDatasetView,
+    'ldda': LibraryDatasetDatasetView,
+    'history_dataset': HistoryDatasetView,
+    'hda': HistoryDatasetView,
+    'history': HistoryView,
+    'workflow': WorkflowView,
+    'tool': ToolView,
+    'job': JobView,
+    'page': PageView,
+    'page_revision': PageRevisionView,
+}
+
+# The GQL gramar is defined in Parsley syntax ( http://parsley.readthedocs.org/en/latest/ )
+
+gqlGrammar = """
+expr = 'select' bs field_desc:f bs 'from' bs word:t (
+    bs 'where' bs conditional:c ws -> GalaxyQuery(f,t,c)
+    | ws -> GalaxyQuery(f, t, None) )
+bs = ' '+
+ws = ' '*
+field_desc = ( '*' -> ['*']
+    | field_list )
+field_list = field_name:x (
+    ws ',' ws field_list:y -> [x] + y
+    | -> [x]
+    )
+conditional = logic_statement:x (
+    bs 'and' bs conditional:y -> GalaxyQueryAnd(x,y)
+    | -> x
+    )
+word = alphanum+:x -> "".join(x)
+field_name = word:x (
+    '.' quote_word:y  -> x + "." + y
+    |-> x
+    )
+alphanum = anything:x ?(re.search(r'\w', x) is not None) -> x
+logic_statement = field_name:left ws comparison:comp ws value_word:right -> GalaxyQueryComparison(left, comp, right)
+value_word = (
+    'false' -> False
+    | 'False' -> False
+    | 'true' -> True
+    | 'True' -> True
+    | 'None' -> None
+    | quote_word )
+comparison = ( '=' -> '='
+    | '>' -> '>'
+    | '<' -> '<'
+    | '!=' -> '!='
+    | '>=' -> '>='
+    | '<=' -> '<='
+    | 'like' -> 'like'
+    )
+quote_word = "'" not_quote*:x "'" -> "".join(x)
+not_quote = anything:x ?(x != "'") -> x
+not_dquote = anything:x ?(x != '"') -> x
+"""
+
+
+class GalaxyQuery(object):
+    """
+    This class represents a data structure of a compiled GQL query
+    """
+    def __init__(self, field_list, table_name, conditional):
+        self.field_list = field_list
+        self.table_name = table_name
+        self.conditional = conditional
+
+
+class GalaxyQueryComparison(object):
+    """
+    This class represents the data structure of the comparison arguments of a
+    compiled GQL query (ie where name='Untitled History')
+    """
+    def __init__(self, left, operator, right):
+        self.left = left
+        self.operator = operator
+        self.right = right
+
+
+class GalaxyQueryAnd(object):
+    """
+    This class represents the data structure of the comparison arguments of a
+    compiled GQL query (ie where name='Untitled History')
+    """
+    def __init__(self, left, right):
+        self.left = left
+        self.operator = 'and'
+        self.right = right
+
+
+class GalaxyParseError(Exception):
+    pass
+
+
+class SearchQuery(object):
+    def __init__(self, view, query):
+        self.view = view
+        self.query = query
+
+    def decode_query_ids(self, trans):
+        if self.query.conditional is not None:
+            self.view.decode_query_ids(trans, self.query.conditional)
+
+    def process(self, trans):
+        self.view.search(trans)
+        if self.query.conditional is not None:
+            self.view.filter(
+                self.query.conditional.left,
+                self.query.conditional.operator,
+                self.query.conditional.right
+            )
+        return self.view.get_results(True)
+
+    def item_to_api_value(self, item):
+        r = item.to_dict( view='element' )
+        if self.query.field_list.count("*"):
+            return r
+        o = {}
+        for a in r:
+            if a in self.query.field_list:
+                o[a] = r[a]
+        return o
+
+
+class GalaxySearchEngine(object):
+    """
+    Primary class for searching. Parses GQL (Galaxy Query Language) queries and returns a 'SearchQuery' class
+    """
+    def __init__(self):
+        self.parser = parsley.makeGrammar(gqlGrammar, {
+            're': re,
+            'GalaxyQuery': GalaxyQuery,
+            'GalaxyQueryComparison': GalaxyQueryComparison,
+            'GalaxyQueryAnd': GalaxyQueryAnd
+        })
+
+    def query(self, query_text):
+        q = self.parser(query_text).expr()
+
+        if q.table_name in view_mapping:
+            view = view_mapping[q.table_name]()
+            return SearchQuery(view, q)
+        raise GalaxyParseError("No such table %s" % (q.table_name))
diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py
new file mode 100644
index 0000000..ba0cc4b
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/__init__.py
@@ -0,0 +1,662 @@
+import logging
+import os
+from galaxy.util.dictifiable import Dictifiable
+from galaxy.util.bunch import Bunch
+from galaxy.util import asbool
+from tool_shed.util import common_util
+from urlparse import urljoin
+
+log = logging.getLogger( __name__ )
+
+
+class ToolShedRepository( object ):
+    dict_collection_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+                                     'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
+    dict_element_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+                                  'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
+    installation_status = Bunch( NEW='New',
+                                 CLONING='Cloning',
+                                 SETTING_TOOL_VERSIONS='Setting tool versions',
+                                 INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
+                                 INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
+                                 LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
+                                 INSTALLED='Installed',
+                                 DEACTIVATED='Deactivated',
+                                 ERROR='Error',
+                                 UNINSTALLED='Uninstalled' )
+    states = Bunch( INSTALLING='running',
+                    OK='ok',
+                    WARNING='queued',
+                    ERROR='error',
+                    UNINSTALLED='deleted_new' )
+
+    def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
+                  changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, tool_shed_status=None, deleted=False,
+                  uninstalled=False, dist_to_shed=False, status=None, error_message=None ):
+        self.id = id
+        self.create_time = create_time
+        self.tool_shed = tool_shed
+        self.name = name
+        self.description = description
+        self.owner = owner
+        self.installed_changeset_revision = installed_changeset_revision
+        self.changeset_revision = changeset_revision
+        self.ctx_rev = ctx_rev
+        self.metadata = metadata
+        self.includes_datatypes = includes_datatypes
+        self.tool_shed_status = tool_shed_status
+        self.deleted = deleted
+        self.uninstalled = uninstalled
+        self.dist_to_shed = dist_to_shed
+        self.status = status
+        self.error_message = error_message
+
+    def as_dict( self, value_mapper=None ):
+        return self.to_dict( view='element', value_mapper=value_mapper )
+
+    @property
+    def can_install( self ):
+        return self.status == self.installation_status.NEW
+
+    @property
+    def can_reset_metadata( self ):
+        return self.status == self.installation_status.INSTALLED
+
+    @property
+    def can_uninstall( self ):
+        return self.status != self.installation_status.UNINSTALLED
+
+    @property
+    def can_deactivate( self ):
+        return self.status not in [ self.installation_status.DEACTIVATED,
+                                    self.installation_status.ERROR,
+                                    self.installation_status.UNINSTALLED ]
+
+    @property
+    def can_reinstall_or_activate( self ):
+        return self.deleted
+
+    def get_sharable_url( self, app ):
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, self.tool_shed )
+        if tool_shed_url:
+            # Append a slash to the tool shed URL, because urlparse.urljoin will eliminate
+            # the last part of a URL if it does not end with a forward slash.
+            tool_shed_url = '%s/' % tool_shed_url
+            return urljoin( tool_shed_url, 'view/%s/%s' % ( self.owner, self.name ) )
+        return tool_shed_url
+
+    def get_shed_config_filename( self ):
+        shed_config_filename = None
+        if self.metadata:
+            shed_config_filename = self.metadata.get( 'shed_config_filename', shed_config_filename )
+        return shed_config_filename
+
+    def get_shed_config_dict( self, app, default=None ):
+        """
+        Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
+        in the shed_tool_conf_dict.
+        """
+
+        def _is_valid_shed_config_filename( filename ):
+            for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+                if filename == shed_tool_conf_dict[ 'config_filename' ]:
+                    return True
+            return False
+
+        if not self.shed_config_filename or not _is_valid_shed_config_filename( self.shed_config_filename ):
+            self.guess_shed_config( app, default=default )
+        if self.shed_config_filename:
+            for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+                if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
+                    return shed_tool_conf_dict
+        return default
+
+    def get_tool_relative_path( self, app ):
+        shed_conf_dict = self.get_shed_config_dict( app )
+        tool_path = None
+        relative_path = None
+        if shed_conf_dict:
+            tool_path = shed_conf_dict[ 'tool_path' ]
+            relative_path = os.path.join( self.tool_shed_path_name, 'repos', self.owner, self.name, self.installed_changeset_revision )
+        return tool_path, relative_path
+
+    def guess_shed_config( self, app, default=None ):
+        tool_ids = []
+        metadata = self.metadata or {}
+        for tool in metadata.get( 'tools', [] ):
+            tool_ids.append( tool.get( 'guid' ) )
+        for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+            name = shed_tool_conf_dict[ 'config_filename' ]
+            for elem in shed_tool_conf_dict[ 'config_elems' ]:
+                if elem.tag == 'tool':
+                    for sub_elem in elem.findall( 'id' ):
+                        tool_id = sub_elem.text.strip()
+                        if tool_id in tool_ids:
+                            self.shed_config_filename = name
+                            return shed_tool_conf_dict
+                elif elem.tag == "section":
+                    for tool_elem in elem.findall( 'tool' ):
+                        for sub_elem in tool_elem.findall( 'id' ):
+                            tool_id = sub_elem.text.strip()
+                            if tool_id in tool_ids:
+                                self.shed_config_filename = name
+                                return shed_tool_conf_dict
+        if self.includes_datatypes:
+            # We need to search by file paths here, which is less desirable.
+            tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed )
+            for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+                tool_path = shed_tool_conf_dict[ 'tool_path' ]
+                relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+                if os.path.exists( relative_path ):
+                    self.shed_config_filename = shed_tool_conf_dict[ 'config_filename' ]
+                    return shed_tool_conf_dict
+        return default
+
+    @property
+    def has_readme_files( self ):
+        if self.metadata:
+            return 'readme_files' in self.metadata
+        return False
+
+    @property
+    def has_repository_dependencies( self ):
+        if self.metadata:
+            repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
+            repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+            # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
+            for rd_tup in repository_dependencies:
+                tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( rd_tup )
+                if not asbool( only_if_compiling_contained_td ):
+                    return True
+        return False
+
+    @property
+    def has_repository_dependencies_only_if_compiling_contained_td( self ):
+        if self.metadata:
+            repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
+            repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+            # [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
+            for rd_tup in repository_dependencies:
+                tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( rd_tup )
+                if not asbool( only_if_compiling_contained_td ):
+                    return False
+            return True
+        return False
+
+    @property
+    def in_error_state( self ):
+        return self.status == self.installation_status.ERROR
+
+    @property
+    def includes_data_managers( self ):
+        if self.metadata:
+            return bool( len( self.metadata.get( 'data_manager', {} ).get( 'data_managers', {} ) ) )
+        return False
+
+    @property
+    def includes_tools( self ):
+        if self.metadata:
+            return 'tools' in self.metadata
+        return False
+
+    @property
+    def includes_tools_for_display_in_tool_panel( self ):
+        if self.includes_tools:
+            tool_dicts = self.metadata[ 'tools' ]
+            for tool_dict in tool_dicts:
+                if tool_dict.get( 'add_to_tool_panel', True ):
+                    return True
+        return False
+
+    @property
+    def includes_tool_dependencies( self ):
+        if self.metadata:
+            return 'tool_dependencies' in self.metadata
+        return False
+
+    @property
+    def includes_workflows( self ):
+        if self.metadata:
+            return 'workflows' in self.metadata
+        return False
+
+    @property
+    def installed_repository_dependencies( self ):
+        """Return the repository's repository dependencies that are currently installed."""
+        installed_required_repositories = []
+        for required_repository in self.repository_dependencies:
+            if required_repository.status == self.installation_status.INSTALLED:
+                installed_required_repositories.append( required_repository )
+        return installed_required_repositories
+
+    @property
+    def installed_tool_dependencies( self ):
+        """Return the repository's tool dependencies that are currently installed, but possibly in an error state."""
+        installed_dependencies = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED ]:
+                installed_dependencies.append( tool_dependency )
+        return installed_dependencies
+
+    @property
+    def is_deprecated_in_tool_shed( self ):
+        if self.tool_shed_status:
+            return asbool( self.tool_shed_status.get( 'repository_deprecated', False ) )
+        return False
+
+    @property
+    def is_deactivated_or_installed( self ):
+        return self.status in [ self.installation_status.DEACTIVATED,
+                                self.installation_status.INSTALLED ]
+
+    @property
+    def is_installed( self ):
+        return self.status == self.installation_status.INSTALLED
+
+    @property
+    def is_latest_installable_revision( self ):
+        if self.tool_shed_status:
+            return asbool( self.tool_shed_status.get( 'latest_installable_revision', False ) )
+        return False
+
+    @property
+    def is_new( self ):
+        return self.status == self.installation_status.NEW
+
+    @property
+    def missing_repository_dependencies( self ):
+        """Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
+        missing_required_repositories = []
+        for required_repository in self.repository_dependencies:
+            if required_repository.status not in [ self.installation_status.INSTALLED ]:
+                missing_required_repositories.append( required_repository )
+        return missing_required_repositories
+
+    @property
+    def missing_tool_dependencies( self ):
+        """Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
+        missing_dependencies = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status not in [ ToolDependency.installation_status.INSTALLED ]:
+                missing_dependencies.append( tool_dependency )
+        return missing_dependencies
+
+    def repo_files_directory( self, app ):
+        repo_path = self.repo_path( app )
+        if repo_path:
+            return os.path.join( repo_path, self.name )
+        return None
+
+    def repo_path( self, app ):
+        tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed )
+        for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+            tool_path = shed_tool_conf_dict[ 'tool_path' ]
+            relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+            if os.path.exists( relative_path ):
+                return relative_path
+        return None
+
+    @property
+    def repository_dependencies( self ):
+        """
+        Return all of this repository's repository dependencies, ignoring their attributes like prior_installation_required and
+        only_if_compiling_contained_td.
+        """
+        required_repositories = []
+        for rrda in self.required_repositories:
+            repository_dependency = rrda.repository_dependency
+            required_repository = repository_dependency.repository
+            if required_repository:
+                required_repositories.append( required_repository )
+        return required_repositories
+
+    @property
+    def repository_dependencies_being_installed( self ):
+        """Return the repository's repository dependencies that are currently being installed."""
+        required_repositories_being_installed = []
+        for required_repository in self.repository_dependencies:
+            if required_repository.status in [ self.installation_status.CLONING,
+                                               self.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+                                               self.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                               self.installation_status.LOADING_PROPRIETARY_DATATYPES,
+                                               self.installation_status.SETTING_TOOL_VERSIONS ]:
+                required_repositories_being_installed.append( required_repository )
+        return required_repositories_being_installed
+
+    @property
+    def repository_dependencies_missing_or_being_installed( self ):
+        """Return the repository's repository dependencies that are either missing or currently being installed."""
+        required_repositories_missing_or_being_installed = []
+        for required_repository in self.repository_dependencies:
+            if required_repository.status in [ self.installation_status.ERROR,
+                                               self.installation_status.INSTALLING,
+                                               self.installation_status.NEVER_INSTALLED,
+                                               self.installation_status.UNINSTALLED ]:
+                required_repositories_missing_or_being_installed.append( required_repository )
+        return required_repositories_missing_or_being_installed
+
+    @property
+    def repository_dependencies_with_installation_errors( self ):
+        """Return the repository's repository dependencies that have installation errors."""
+        required_repositories_with_installation_errors = []
+        for required_repository in self.repository_dependencies:
+            if required_repository.status == self.installation_status.ERROR:
+                required_repositories_with_installation_errors.append( required_repository )
+        return required_repositories_with_installation_errors
+
+    @property
+    def requires_prior_installation_of( self ):
+        """
+        Return a list of repository dependency tuples like (tool_shed, name, owner, changeset_revision, prior_installation_required) for this
+        repository's repository dependencies where prior_installation_required is True.  By definition, repository dependencies are required to
+        be installed in order for this repository to function correctly.  However, those repository dependencies that are defined for this
+        repository with prior_installation_required set to True place them in a special category in that the required repositories must be
+        installed before this repository is installed.  Among other things, this enables these "special" repository dependencies to include
+        information that enables the successful installation of this repository.  This method is not used during the initial installation of
+        this repository, but only after it has been installed (metadata must be set for this repository in order for this method to be useful).
+        """
+        required_rd_tups_that_must_be_installed = []
+        if self.has_repository_dependencies:
+            rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+            for rd_tup in rd_tups:
+                if len( rd_tup ) == 5:
+                    tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                        common_util.parse_repository_dependency_tuple( rd_tup, contains_error=False )
+                    if asbool( prior_installation_required ):
+                        required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
+                elif len( rd_tup ) == 6:
+                    tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                        common_util.parse_repository_dependency_tuple( rd_tup, contains_error=False )
+                    # The repository dependency will only be required to be previously installed if it does not fall into the category of
+                    # a repository that must be installed only so that its contained tool dependency can be used for compiling the tool
+                    # dependency of the dependent repository.
+                    if not asbool( only_if_compiling_contained_td ):
+                        if asbool( prior_installation_required ):
+                            required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
+        return required_rd_tups_that_must_be_installed
+
+    @property
+    def revision_update_available( self ):
+        # This method should be named update_available, but since it is no longer possible to drop a table column using migration scripts
+        # with the sqlite database (see ~/galaxy/model/migrate/versions/0016_drop_update_available_col_add_tool_shed_status_col.py), we
+        # have to name it in such a way that it will not conflict with the eliminated tool_shed_repository.update_available column (which
+        # cannot be eliminated if using the sqlite database).
+        if self.tool_shed_status:
+            return asbool( self.tool_shed_status.get( 'revision_update', False ) )
+        return False
+
+    def set_shed_config_filename( self, value ):
+        self.metadata[ 'shed_config_filename' ] = value
+
+    shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        if value_mapper is None:
+            value_mapper = {}
+        rval = {}
+        try:
+            visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
+        except AttributeError:
+            raise Exception( 'Unknown API view: %s' % view )
+        for key in visible_keys:
+            try:
+                rval[ key ] = self.__getattribute__( key )
+                if key in value_mapper:
+                    rval[ key ] = value_mapper.get( key, rval[ key ] )
+            except AttributeError:
+                rval[ key ] = None
+        return rval
+
+    @property
+    def tool_dependencies_being_installed( self ):
+        dependencies_being_installed = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+                dependencies_being_installed.append( tool_dependency )
+        return dependencies_being_installed
+
+    @property
+    def tool_dependencies_installed_or_in_error( self ):
+        """Return the repository's tool dependencies that are currently installed, but possibly in an error state."""
+        installed_dependencies = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
+                                           ToolDependency.installation_status.ERROR ]:
+                installed_dependencies.append( tool_dependency )
+        return installed_dependencies
+
+    @property
+    def tool_dependencies_missing_or_being_installed( self ):
+        dependencies_missing_or_being_installed = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status in [ ToolDependency.installation_status.ERROR,
+                                           ToolDependency.installation_status.INSTALLING,
+                                           ToolDependency.installation_status.NEVER_INSTALLED,
+                                           ToolDependency.installation_status.UNINSTALLED ]:
+                dependencies_missing_or_being_installed.append( tool_dependency )
+        return dependencies_missing_or_being_installed
+
+    @property
+    def tool_dependencies_with_installation_errors( self ):
+        dependencies_with_installation_errors = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status == ToolDependency.installation_status.ERROR:
+                dependencies_with_installation_errors.append( tool_dependency )
+        return dependencies_with_installation_errors
+
+    @property
+    def tool_shed_path_name( self ):
+        tool_shed_url = self.tool_shed
+        if tool_shed_url.find( ':' ) > 0:
+            # Eliminate the port, if any, since it will result in an invalid directory name.
+            tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+        return tool_shed_url.rstrip( '/' )
+
+    @property
+    def tuples_of_repository_dependencies_needed_for_compiling_td( self ):
+        """
+        Return tuples defining this repository's repository dependencies that are necessary only for compiling this repository's tool
+        dependencies.
+        """
+        rd_tups_of_repositories_needed_for_compiling_td = []
+        if self.metadata:
+            repository_dependencies = self.metadata.get( 'repository_dependencies', None )
+            rd_tups = repository_dependencies[ 'repository_dependencies' ]
+            for rd_tup in rd_tups:
+                if len( rd_tup ) == 6:
+                    tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
+                    if asbool( only_if_compiling_contained_td ):
+                        rd_tups_of_repositories_needed_for_compiling_td.append( ( tool_shed, name, owner, changeset_revision, 'False', 'True' ) )
+        return rd_tups_of_repositories_needed_for_compiling_td
+
+    @property
+    def uninstalled_repository_dependencies( self ):
+        """Return the repository's repository dependencies that have been uninstalled."""
+        uninstalled_required_repositories = []
+        for required_repository in self.repository_dependencies:
+            if required_repository.status == self.installation_status.UNINSTALLED:
+                uninstalled_required_repositories.append( required_repository )
+        return uninstalled_required_repositories
+
+    @property
+    def uninstalled_tool_dependencies( self ):
+        """Return the repository's tool dependencies that have been uninstalled."""
+        uninstalled_tool_dependencies = []
+        for tool_dependency in self.tool_dependencies:
+            if tool_dependency.status == ToolDependency.installation_status.UNINSTALLED:
+                uninstalled_tool_dependencies.append( tool_dependency )
+        return uninstalled_tool_dependencies
+
+    @property
+    def upgrade_available( self ):
+        if self.tool_shed_status:
+            if self.is_deprecated_in_tool_shed:
+                # Only allow revision upgrades if the repository is not deprecated in the tool shed.
+                return False
+            return asbool( self.tool_shed_status.get( 'revision_upgrade', False ) )
+        return False
+
+
+class RepositoryRepositoryDependencyAssociation( object ):
+
+    def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ):
+        self.tool_shed_repository_id = tool_shed_repository_id
+        self.repository_dependency_id = repository_dependency_id
+
+
+class RepositoryDependency( object ):
+
+    def __init__( self, tool_shed_repository_id=None ):
+        self.tool_shed_repository_id = tool_shed_repository_id
+
+
+class ToolDependency( object ):
+    installation_status = Bunch( NEVER_INSTALLED='Never installed',
+                                 INSTALLING='Installing',
+                                 INSTALLED='Installed',
+                                 ERROR='Error',
+                                 UNINSTALLED='Uninstalled' )
+
+    states = Bunch( INSTALLING='running',
+                    OK='ok',
+                    WARNING='queued',
+                    ERROR='error',
+                    UNINSTALLED='deleted_new' )
+
+    def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, status=None, error_message=None ):
+        self.tool_shed_repository_id = tool_shed_repository_id
+        self.name = name
+        self.version = version
+        self.type = type
+        self.status = status
+        self.error_message = error_message
+
+    @property
+    def can_install( self ):
+        return self.status in [ self.installation_status.NEVER_INSTALLED, self.installation_status.UNINSTALLED ]
+
+    @property
+    def can_uninstall( self ):
+        return self.status in [ self.installation_status.ERROR, self.installation_status.INSTALLED ]
+
+    @property
+    def can_update( self ):
+        return self.status in [ self.installation_status.NEVER_INSTALLED,
+                                self.installation_status.INSTALLED,
+                                self.installation_status.ERROR,
+                                self.installation_status.UNINSTALLED ]
+
+    def get_env_shell_file_path( self, app ):
+        installation_directory = self.installation_directory( app )
+        file_path = os.path.join( installation_directory, 'env.sh' )
+        if os.path.exists( file_path ):
+            return file_path
+        return None
+
+    @property
+    def in_error_state( self ):
+        return self.status == self.installation_status.ERROR
+
+    def installation_directory( self, app ):
+        if self.type == 'package':
+            return os.path.join( app.config.tool_dependency_dir,
+                                 self.name,
+                                 self.version,
+                                 self.tool_shed_repository.owner,
+                                 self.tool_shed_repository.name,
+                                 self.tool_shed_repository.installed_changeset_revision )
+        if self.type == 'set_environment':
+            return os.path.join( app.config.tool_dependency_dir,
+                                 'environment_settings',
+                                 self.name,
+                                 self.tool_shed_repository.owner,
+                                 self.tool_shed_repository.name,
+                                 self.tool_shed_repository.installed_changeset_revision )
+
+    @property
+    def is_installed( self ):
+        return self.status == self.installation_status.INSTALLED
+
+
+class ToolVersion( object, Dictifiable ):
+    dict_element_visible_keys = ( 'id', 'tool_shed_repository' )
+
+    def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
+        self.id = id
+        self.create_time = create_time
+        self.tool_id = tool_id
+        self.tool_shed_repository = tool_shed_repository
+
+    def get_previous_version( self, app ):
+        parent_id = app.tool_version_cache.tool_id_to_parent_id.get(self.id, None)
+        if parent_id:
+            return app.tool_version_cache.tool_version_by_id[parent_id]
+        else:
+            return None
+
+    def get_next_version( self, app ):
+        child_id = app.tool_version_cache.parent_id_to_tool_id.get(self.id, None)
+        if child_id:
+            return app.tool_version_cache.tool_version_by_id[child_id]
+        else:
+            return None
+
+    def get_versions( self, app ):
+        tool_versions = []
+
+        # Prepend ancestors.
+        def __ancestors( app, tool_version ):
+            # Should we handle multiple parents at each level?
+            previous_version = tool_version.get_previous_version( app )
+            if previous_version:
+                if previous_version not in tool_versions:
+                    tool_versions.insert( 0, previous_version )
+                    __ancestors( app, previous_version )
+
+        # Append descendants.
+        def __descendants( app, tool_version ):
+            # Should we handle multiple child siblings at each level?
+            next_version = tool_version.get_next_version( app )
+            if next_version:
+                if next_version not in tool_versions:
+                    tool_versions.append( next_version )
+                    __descendants( app, next_version )
+
+        __ancestors( app, self )
+        if self not in tool_versions:
+            tool_versions.append( self )
+        __descendants( app, self )
+        return tool_versions
+
+    def get_version_ids( self, app, reverse=False ):
+        version_ids = [ tool_version.tool_id for tool_version in self.get_versions( app ) ]
+        if reverse:
+            version_ids.reverse()
+        return version_ids
+
+    def to_dict( self, view='element' ):
+        rval = super( ToolVersion, self ).to_dict( view=view )
+        rval[ 'tool_name' ] = self.tool_id
+        for a in self.parent_tool_association:
+            rval[ 'parent_tool_id' ] = a.parent_id
+        for a in self.child_tool_association:
+            rval[ 'child_tool_id' ] = a.tool_id
+        return rval
+
+
+class ToolVersionAssociation( object ):
+
+    def __init__( self, id=None, tool_id=None, parent_id=None ):
+        self.id = id
+        self.tool_id = tool_id
+        self.parent_id = parent_id
+
+
+class MigrateTools( object ):
+
+    def __init__( self, repository_id=None, repository_path=None, version=None ):
+        self.repository_id = repository_id
+        self.repository_path = repository_path
+        self.version = version
diff --git a/lib/galaxy/model/tool_shed_install/mapping.py b/lib/galaxy/model/tool_shed_install/mapping.py
new file mode 100644
index 0000000..bbff370
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/mapping.py
@@ -0,0 +1,119 @@
+from galaxy.model import tool_shed_install as install_model
+from sqlalchemy import MetaData
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table, TEXT
+from sqlalchemy.orm import relation, mapper
+from galaxy.model.custom_types import JSONType, TrimmedString
+from galaxy.model.orm.now import now
+from galaxy.model.base import ModelMapping
+from galaxy.model.orm.engine_factory import build_engine
+
+metadata = MetaData()
+
+install_model.ToolShedRepository.table = Table( "tool_shed_repository", metadata,
+                                                Column( "id", Integer, primary_key=True ),
+                                                Column( "create_time", DateTime, default=now ),
+                                                Column( "update_time", DateTime, default=now, onupdate=now ),
+                                                Column( "tool_shed", TrimmedString( 255 ), index=True ),
+                                                Column( "name", TrimmedString( 255 ), index=True ),
+                                                Column( "description", TEXT ),
+                                                Column( "owner", TrimmedString( 255 ), index=True ),
+                                                Column( "installed_changeset_revision", TrimmedString( 255 ) ),
+                                                Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+                                                Column( "ctx_rev", TrimmedString( 10 ) ),
+                                                Column( "metadata", JSONType, nullable=True ),
+                                                Column( "includes_datatypes", Boolean, index=True, default=False ),
+                                                Column( "tool_shed_status", JSONType, nullable=True ),
+                                                Column( "deleted", Boolean, index=True, default=False ),
+                                                Column( "uninstalled", Boolean, default=False ),
+                                                Column( "dist_to_shed", Boolean, default=False ),
+                                                Column( "status", TrimmedString( 255 ) ),
+                                                Column( "error_message", TEXT ) )
+
+install_model.RepositoryRepositoryDependencyAssociation.table = Table( 'repository_repository_dependency_association', metadata,
+                                                                       Column( "id", Integer, primary_key=True ),
+                                                                       Column( "create_time", DateTime, default=now ),
+                                                                       Column( "update_time", DateTime, default=now, onupdate=now ),
+                                                                       Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+                                                                       Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+install_model.RepositoryDependency.table = Table( "repository_dependency", metadata,
+                                                  Column( "id", Integer, primary_key=True ),
+                                                  Column( "create_time", DateTime, default=now ),
+                                                  Column( "update_time", DateTime, default=now, onupdate=now ),
+                                                  Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
+install_model.ToolDependency.table = Table( "tool_dependency", metadata,
+                                            Column( "id", Integer, primary_key=True ),
+                                            Column( "create_time", DateTime, default=now ),
+                                            Column( "update_time", DateTime, default=now, onupdate=now ),
+                                            Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
+                                            Column( "name", TrimmedString( 255 ) ),
+                                            Column( "version", TEXT ),
+                                            Column( "type", TrimmedString( 40 ) ),
+                                            Column( "status", TrimmedString( 255 ), nullable=False ),
+                                            Column( "error_message", TEXT ) )
+
+install_model.ToolVersion.table = Table( "tool_version", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "create_time", DateTime, default=now ),
+                                         Column( "update_time", DateTime, default=now, onupdate=now ),
+                                         Column( "tool_id", String( 255 ) ),
+                                         Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
+
+install_model.ToolVersionAssociation.table = Table( "tool_version_association", metadata,
+                                                    Column( "id", Integer, primary_key=True ),
+                                                    Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
+                                                    Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
+
+install_model.MigrateTools.table = Table( "migrate_tools", metadata,
+                                          Column( "repository_id", TrimmedString( 255 ) ),
+                                          Column( "repository_path", TEXT ),
+                                          Column( "version", Integer ) )
+
+mapper( install_model.ToolShedRepository, install_model.ToolShedRepository.table,
+        properties=dict( tool_versions=relation( install_model.ToolVersion,
+                                                 primaryjoin=( install_model.ToolShedRepository.table.c.id == install_model.ToolVersion.table.c.tool_shed_repository_id ),
+                                                 backref='tool_shed_repository' ),
+                         tool_dependencies=relation( install_model.ToolDependency,
+                                                     primaryjoin=( install_model.ToolShedRepository.table.c.id == install_model.ToolDependency.table.c.tool_shed_repository_id ),
+                                                     order_by=install_model.ToolDependency.table.c.name,
+                                                     backref='tool_shed_repository' ),
+                         required_repositories=relation( install_model.RepositoryRepositoryDependencyAssociation,
+                                                         primaryjoin=( install_model.ToolShedRepository.table.c.id == install_model.RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
+
+mapper( install_model.RepositoryRepositoryDependencyAssociation, install_model.RepositoryRepositoryDependencyAssociation.table,
+        properties=dict( repository=relation( install_model.ToolShedRepository,
+                                              primaryjoin=( install_model.RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id == install_model.ToolShedRepository.table.c.id ) ),
+                         repository_dependency=relation( install_model.RepositoryDependency,
+                                                         primaryjoin=( install_model.RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == install_model.RepositoryDependency.table.c.id ) ) ) )
+
+mapper( install_model.RepositoryDependency, install_model.RepositoryDependency.table,
+        properties=dict( repository=relation( install_model.ToolShedRepository,
+                                              primaryjoin=( install_model.RepositoryDependency.table.c.tool_shed_repository_id == install_model.ToolShedRepository.table.c.id ) ) ) )
+
+mapper( install_model.ToolDependency, install_model.ToolDependency.table )
+
+mapper( install_model.ToolVersion, install_model.ToolVersion.table,
+        properties=dict(
+            parent_tool_association=relation( install_model.ToolVersionAssociation,
+                                              primaryjoin=( install_model.ToolVersion.table.c.id == install_model.ToolVersionAssociation.table.c.tool_id ) ),
+            child_tool_association=relation( install_model.ToolVersionAssociation,
+                                             primaryjoin=( install_model.ToolVersion.table.c.id == install_model.ToolVersionAssociation.table.c.parent_id ) ) ) )
+
+mapper( install_model.ToolVersionAssociation, install_model.ToolVersionAssociation.table )
+
+
+def init( url, engine_options={}, create_tables=False ):
+    """Connect mappings to the database"""
+    # Load the appropriate db module
+    engine = build_engine( url, engine_options )
+    # Connect the metadata to the database.
+    metadata.bind = engine
+    result = ModelMapping( [ install_model ], engine=engine )
+    # Create tables if needed
+    if create_tables:
+        metadata.create_all()
+        # metadata.engine.commit()
+    result.create_tables = create_tables
+    # load local galaxy security policy
+    return result
diff --git a/lib/galaxy/model/tool_shed_install/migrate/__init__.py b/lib/galaxy/model/tool_shed_install/migrate/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/model/tool_shed_install/migrate/check.py b/lib/galaxy/model/tool_shed_install/migrate/check.py
new file mode 100644
index 0000000..c7d1639
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/check.py
@@ -0,0 +1,97 @@
+import sys
+import os.path
+import logging
+
+# from sqlalchemy import *
+from sqlalchemy import create_engine
+from sqlalchemy import MetaData
+from sqlalchemy import Table
+
+from sqlalchemy.exc import NoSuchTableError
+from migrate.versioning import repository, schema
+
+log = logging.getLogger( __name__ )
+
+# path relative to galaxy
+migrate_repository_directory = os.path.abspath(os.path.dirname( __file__ )).replace( os.getcwd() + os.path.sep, '', 1 )
+migrate_repository = repository.Repository( migrate_repository_directory )
+
+
+def create_or_verify_database( url, engine_options={}, app=None ):
+    """
+    """
+    # Create engine and metadata
+    engine = create_engine( url, **engine_options )
+
+    def migrate():
+        try:
+            # Declare the database to be under a repository's version control
+            db_schema = schema.ControlledSchema.create( engine, migrate_repository )
+        except:
+            # The database is already under version control
+            db_schema = schema.ControlledSchema( engine, migrate_repository )
+        # Apply all scripts to get to current version
+        migrate_to_current_version( engine, db_schema )
+
+    meta = MetaData( bind=engine )
+    if app and getattr( app.config, 'database_auto_migrate', False ):
+        migrate()
+        return
+
+    # Try to load tool_shed_repository table
+    try:
+        Table( "tool_shed_repository", meta, autoload=True )
+    except NoSuchTableError:
+        # No table means a completely uninitialized database.  If we
+        # have an app, we'll set its new_installation setting to True
+        # so the tool migration process will be skipped.
+        migrate()
+        return
+
+    try:
+        Table( "migrate_version", meta, autoload=True )
+    except NoSuchTableError:
+        # The database exists but is not yet under migrate version control, so init with version 1
+        log.info( "Adding version control to existing database" )
+        try:
+            Table( "metadata_file", meta, autoload=True )
+            schema.ControlledSchema.create( engine, migrate_repository, version=2 )
+        except NoSuchTableError:
+            schema.ControlledSchema.create( engine, migrate_repository, version=1 )
+
+    # Verify that the code and the DB are in sync
+    db_schema = schema.ControlledSchema( engine, migrate_repository )
+    if migrate_repository.versions.latest != db_schema.version:
+        exception_msg = "Your database has version '%d' but this code expects version '%d'.  " % ( db_schema.version, migrate_repository.versions.latest )
+        exception_msg += "Back up your database and then migrate the schema by running the following from your Galaxy installation directory:"
+        exception_msg += "\n\nsh manage_db.sh upgrade install\n"
+
+    else:
+        log.info( "At database version %d" % db_schema.version )
+
+
+def migrate_to_current_version( engine, schema ):
+    # Changes to get to current version
+    changeset = schema.changeset( None )
+    for ver, change in changeset:
+        nextver = ver + changeset.step
+        log.info( 'Migrating %s -> %s... ' % ( ver, nextver ) )
+        old_stdout = sys.stdout
+
+        class FakeStdout( object ):
+            def __init__( self ):
+                self.buffer = []
+
+            def write( self, s ):
+                self.buffer.append( s )
+
+            def flush( self ):
+                pass
+
+        sys.stdout = FakeStdout()
+        try:
+            schema.runchange( ver, change, changeset.step )
+        finally:
+            for message in "".join( sys.stdout.buffer ).split( "\n" ):
+                log.info( message )
+            sys.stdout = old_stdout
diff --git a/lib/galaxy/model/tool_shed_install/migrate/migrate.cfg b/lib/galaxy/model/tool_shed_install/migrate/migrate.cfg
new file mode 100644
index 0000000..acfc987
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/migrate.cfg
@@ -0,0 +1,20 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=ToolShedInstall
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to 
+# change the table name in each database too. 
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the 
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the 
+# commit continues, perhaps ending successfully. 
+# Databases in this list MUST compile successfully during a commit, or the 
+# entire commit will fail. List the databases your application will actually 
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
new file mode 120000
index 0000000..e0b1831
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
@@ -0,0 +1 @@
+../../../migrate/versions/0082_add_tool_shed_repository_table.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
new file mode 120000
index 0000000..7cacf92
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
@@ -0,0 +1 @@
+../../../migrate/versions/0086_add_tool_shed_repository_table_columns.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
new file mode 120000
index 0000000..3e9dea9
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
@@ -0,0 +1 @@
+../../../migrate/versions/0087_tool_id_guid_map_table.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
new file mode 120000
index 0000000..4e57eb1
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
@@ -0,0 +1 @@
+../../../migrate/versions/0088_add_installed_changeset_revison_column.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
new file mode 120000
index 0000000..6359c3b
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
@@ -0,0 +1 @@
+../../../migrate/versions/0090_add_tool_shed_repository_table_columns.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
new file mode 120000
index 0000000..9a4526d
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
@@ -0,0 +1 @@
+../../../migrate/versions/0091_add_tool_version_tables.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
new file mode 120000
index 0000000..9fbb816
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
@@ -0,0 +1 @@
+../../../migrate/versions/0092_add_migrate_tools_table.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
new file mode 120000
index 0000000..a1268f1
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
@@ -0,0 +1 @@
+../../../migrate/versions/0097_add_ctx_rev_column.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
new file mode 120000
index 0000000..a119ddd
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
@@ -0,0 +1 @@
+../../../migrate/versions/0099_add_tool_dependency_table.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
new file mode 120000
index 0000000..1a39c58
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
@@ -0,0 +1 @@
+../../../migrate/versions/0100_alter_tool_dependency_table_version_column.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
new file mode 120000
index 0000000..eed3a38
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
@@ -0,0 +1 @@
+../../../migrate/versions/0101_drop_installed_changeset_revision_column.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
new file mode 120000
index 0000000..c5759e0
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
@@ -0,0 +1 @@
+../../../migrate/versions/0102_add_tool_dependency_status_columns.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
new file mode 120000
index 0000000..c17fdea
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
@@ -0,0 +1 @@
+../../../migrate/versions/0103_add_tool_shed_repository_status_columns.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
new file mode 120000
index 0000000..5f07fdf
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
@@ -0,0 +1 @@
+../../../migrate/versions/0109_add_repository_dependency_tables.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
new file mode 120000
index 0000000..55c1ced
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
@@ -0,0 +1 @@
+../../../migrate/versions/0113_update_migrate_tools_table.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
new file mode 120000
index 0000000..7af8a2a
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
@@ -0,0 +1 @@
+../../../migrate/versions/0114_update_migrate_tools_table_again.py
\ No newline at end of file
diff --git a/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py b/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
new file mode 120000
index 0000000..d874f48
--- /dev/null
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
@@ -0,0 +1 @@
+../../../migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py
\ No newline at end of file
diff --git a/lib/galaxy/model/util.py b/lib/galaxy/model/util.py
new file mode 100644
index 0000000..7295589
--- /dev/null
+++ b/lib/galaxy/model/util.py
@@ -0,0 +1,33 @@
+"""
+Utility helpers related to the model
+"""
+
+
+def pgcalc( sa_session, id, dryrun=False ):
+    """
+    Utility method for quickly recalculating user disk usage in postgres.
+
+    TODO: Check against the recently updated versions of sqlalchemy if this
+    'special' postgresql version is even necessary.
+    """
+    sql_calc = """SELECT COALESCE(SUM(total_size), 0)
+                  FROM (  SELECT DISTINCT ON (d.id) d.total_size, d.id
+                          FROM history_dataset_association hda
+                               JOIN history h ON h.id = hda.history_id
+                               JOIN dataset d ON hda.dataset_id = d.id
+                          WHERE h.user_id = :id
+                                AND h.purged = false
+                                AND hda.purged = false
+                                AND d.purged = false
+                                AND d.id NOT IN (SELECT dataset_id
+                                                 FROM library_dataset_dataset_association)
+                  ) sizes"""
+    sql_update = """UPDATE galaxy_user
+                    SET disk_usage = (%s)
+                    WHERE id = :id
+                    RETURNING disk_usage;""" % sql_calc
+    if dryrun:
+        r = sa_session.execute(sql_calc, {'id': id})
+    else:
+        r = sa_session.execute(sql_update, {'id': id})
+    return r.fetchone()[0]
diff --git a/lib/galaxy/objectstore/__init__.py b/lib/galaxy/objectstore/__init__.py
new file mode 100644
index 0000000..1bbad70
--- /dev/null
+++ b/lib/galaxy/objectstore/__init__.py
@@ -0,0 +1,791 @@
+"""
+objectstore package, abstraction for storing blobs of data for use in Galaxy.
+
+all providers ensure that data can be accessed on the filesystem for running
+tools
+"""
+
+import logging
+import os
+import random
+import shutil
+import threading
+
+from xml.etree import ElementTree
+
+try:
+    from sqlalchemy.orm import object_session
+except ImportError:
+    object_session = None
+
+from galaxy.exceptions import ObjectInvalid, ObjectNotFound
+from galaxy.util import (
+    directory_hash_id,
+    force_symlink,
+    safe_makedirs,
+    safe_relpath,
+    umask_fix_perms,
+)
+from galaxy.util.odict import odict
+from galaxy.util.sleeper import Sleeper
+
+NO_SESSION_ERROR_MESSAGE = "Attempted to 'create' object store entity in configuration with no database session present."
+
+log = logging.getLogger( __name__ )
+
+
+class ObjectStore(object):
+
+    """ObjectStore abstract interface.
+
+    FIELD DESCRIPTIONS (these apply to all the methods in this class):
+
+    :type obj: StorableObject
+    :param obj: A Galaxy object with an assigned database ID accessible via
+        the .id attribute.
+
+    :type base_dir: string
+    :param base_dir: A key in `self.extra_dirs` corresponding to the base
+        directory in which this object should be created, or `None` to specify
+        the default directory.
+
+    :type dir_only: boolean
+    :param dir_only: If `True`, check only the path where the file identified
+        by `obj` should be located, not the dataset itself. This option applies
+        to `extra_dir` argument as well.
+
+    :type extra_dir: string
+    :param extra_dir: Append `extra_dir` to the directory structure where the
+        dataset identified by `obj` should be located. (e.g.,
+        000/extra_dir/obj.id). Valid values include 'job_work' (defaulting to
+        config.jobs_directory =
+        '$GALAXY_ROOT/database/jobs_directory');
+        'temp' (defaulting to config.new_file_path =
+        '$GALAXY_ROOT/database/tmp').
+
+    :type extra_dir_at_root: boolean
+    :param extra_dir_at_root: Applicable only if `extra_dir` is set. If True,
+        the `extra_dir` argument is placed at root of the created directory
+        structure rather than at the end (e.g., extra_dir/000/obj.id vs.
+        000/extra_dir/obj.id)
+
+    :type alt_name: string
+    :param alt_name: Use this name as the alternative name for the created
+        dataset rather than the default.
+
+    :type obj_dir: boolean
+    :param obj_dir: Append a subdirectory named with the object's ID (e.g.
+        000/obj.id)
+    """
+
+    def __init__(self, config, **kwargs):
+        """
+        :type config: object
+        :param config: An object, most likely populated from
+            `galaxy/config.ini`, having the following attributes:
+
+            * object_store_check_old_style (only used by the
+              :class:`DiskObjectStore` subclass)
+            * jobs_directory -- Each job is given a unique empty directory
+              as its current working directory. This option defines in what
+              parent directory those directories will be created.
+            * new_file_path -- Used to set the 'temp' extra_dir.
+        """
+        self.running = True
+        self.extra_dirs = {}
+        self.config = config
+        self.check_old_style = config.object_store_check_old_style
+        self.extra_dirs['job_work'] = config.jobs_directory
+        self.extra_dirs['temp'] = config.new_file_path
+
+    def shutdown(self):
+        """Close any connections for this ObjectStore."""
+        self.running = False
+
+    def exists(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+        """Return True if the object identified by `obj` exists, False otherwise."""
+        raise NotImplementedError()
+
+    def file_ready(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Check if a file corresponding to a dataset is ready to be used.
+
+        Return True if so, False otherwise
+        """
+        return True
+
+    def create(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Mark the object (`obj`) as existing in the store, but with no content.
+
+        This method will create a proper directory structure for
+        the file if the directory does not already exist.
+        """
+        raise NotImplementedError()
+
+    def empty(self, obj, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Test if the object identified by `obj` has content.
+
+        If the object does not exist raises `ObjectNotFound`.
+        """
+        raise NotImplementedError()
+
+    def size(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Return size of the object identified by `obj`.
+
+        If the object does not exist, return 0.
+        """
+        raise NotImplementedError()
+
+    def delete(self, obj, entire_dir=False, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Delete the object identified by `obj`.
+
+        :type entire_dir: boolean
+        :param entire_dir: If True, delete the entire directory pointed to by
+                           extra_dir. For safety reasons, this option applies
+                           only for and in conjunction with the extra_dir or
+                           obj_dir options.
+        """
+        raise NotImplementedError()
+
+    def get_data(self, obj, start=0, count=-1, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Fetch `count` bytes of data offset by `start` bytes using `obj.id`.
+
+        If the object does not exist raises `ObjectNotFound`.
+
+        :type start: int
+        :param start: Set the position to start reading the dataset file
+
+        :type count: int
+        :param count: Read at most `count` bytes from the dataset
+        """
+        raise NotImplementedError()
+
+    def get_filename(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Get the expected filename with absolute path for object with id `obj.id`.
+
+        This can be used to access the contents of the object.
+        """
+        raise NotImplementedError()
+
+    def update_from_file(self, obj, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False, file_name=None, create=False):
+        """
+        Inform the store that the file associated with `obj.id` has been updated.
+
+        If `file_name` is provided, update from that file instead of the
+        default.
+        If the object does not exist raises `ObjectNotFound`.
+
+        :type file_name: string
+        :param file_name: Use file pointed to by `file_name` as the source for
+                          updating the dataset identified by `obj`
+
+        :type create: boolean
+        :param create: If True and the default dataset does not exist, create
+            it first.
+        """
+        raise NotImplementedError()
+
+    def get_object_url(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Return the URL for direct acces if supported, otherwise return None.
+
+        Note: need to be careful to not bypass dataset security with this.
+        """
+        raise NotImplementedError()
+
+    def get_store_usage_percent(self):
+        """Return the percentage indicating how full the store is."""
+        raise NotImplementedError()
+
+
+class DiskObjectStore(ObjectStore):
+
+    """
+    Standard Galaxy object store.
+
+    Stores objects in files under a specific directory on disk.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> import tempfile
+    >>> file_path=tempfile.mkdtemp()
+    >>> obj = Bunch(id=1)
+    >>> s = DiskObjectStore(Bunch(umask=0o077, jobs_directory=file_path, new_file_path=file_path, object_store_check_old_style=False), file_path=file_path)
+    >>> s.create(obj)
+    >>> s.exists(obj)
+    True
+    >>> assert s.get_filename(obj) == file_path + '/000/dataset_1.dat'
+    """
+
+    def __init__(self, config, config_xml=None, file_path=None, extra_dirs=None):
+        """
+        :type config: object
+        :param config: An object, most likely populated from
+            `galaxy/config.ini`, having the same attributes needed by
+            :class:`ObjectStore` plus:
+
+            * file_path -- Default directory to store objects to disk in.
+            * umask -- the permission bits for newly created files.
+
+        :type config_xml: ElementTree
+
+        :type file_path: str
+        :param file_path: Override for the `config.file_path` value.
+
+        :type extra_dirs: dict
+        :param extra_dirs: Keys are string, values are directory paths.
+        """
+        super(DiskObjectStore, self).__init__(config)
+        self.file_path = file_path or config.file_path
+        # The new config_xml overrides universe settings.
+        if config_xml is not None:
+            for e in config_xml:
+                if e.tag == 'files_dir':
+                    self.file_path = e.get('path')
+                else:
+                    self.extra_dirs[e.get('type')] = e.get('path')
+        if extra_dirs is not None:
+            self.extra_dirs.update( extra_dirs )
+
+    def _get_filename(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False):
+        """
+        Return the absolute path for the file corresponding to the `obj.id`.
+
+        This is regardless of whether or not the file exists.
+        """
+        path = self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir,
+                                    extra_dir_at_root=extra_dir_at_root, alt_name=alt_name,
+                                    obj_dir=False, old_style=True)
+        # For backward compatibility: check the old style root path first;
+        # otherwise construct hashed path.
+        if not os.path.exists(path):
+            return self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir,
+                                        extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
+
+    # TODO: rename to _disk_path or something like that to avoid conflicts with
+    # children that'll use the local_extra_dirs decorator, e.g. S3
+    def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False, **kwargs):
+        """
+        Construct the absolute path for accessing the object identified by `obj.id`.
+
+        :type base_dir: string
+        :param base_dir: A key in self.extra_dirs corresponding to the base
+                         directory in which this object should be created, or
+                         None to specify the default directory.
+
+        :type dir_only: boolean
+        :param dir_only: If True, check only the path where the file
+                         identified by `obj` should be located, not the
+                         dataset itself. This option applies to `extra_dir`
+                         argument as well.
+
+        :type extra_dir: string
+        :param extra_dir: Append the value of this parameter to the expected
+            path used to access the object identified by `obj` (e.g.,
+            /files/000/<extra_dir>/dataset_10.dat).
+
+        :type alt_name: string
+        :param alt_name: Use this name as the alternative name for the returned
+                         dataset rather than the default.
+
+        :type old_style: boolean
+        param old_style: This option is used for backward compatibility. If
+            `True` then the composed directory structure does not include a
+            hash id (e.g., /files/dataset_10.dat (old) vs.
+            /files/000/dataset_10.dat (new))
+        """
+        base = os.path.abspath(self.extra_dirs.get(base_dir, self.file_path))
+        # extra_dir should never be constructed from provided data but just
+        # make sure there are no shenannigans afoot
+        if extra_dir and extra_dir != os.path.normpath(extra_dir):
+            log.warning('extra_dir is not normalized: %s', extra_dir)
+            raise ObjectInvalid("The requested object is invalid")
+        # ensure that any parent directory references in alt_name would not
+        # result in a path not contained in the directory path constructed here
+        if alt_name and not safe_relpath(alt_name):
+            log.warning('alt_name would locate path outside dir: %s', alt_name)
+            raise ObjectInvalid("The requested object is invalid")
+        if old_style:
+            if extra_dir is not None:
+                path = os.path.join(base, extra_dir)
+            else:
+                path = base
+        else:
+            # Construct hashed path
+            rel_path = os.path.join(*directory_hash_id(obj.id))
+            # Create a subdirectory for the object ID
+            if obj_dir:
+                rel_path = os.path.join(rel_path, str(obj.id))
+            # Optionally append extra_dir
+            if extra_dir is not None:
+                if extra_dir_at_root:
+                    rel_path = os.path.join(extra_dir, rel_path)
+                else:
+                    rel_path = os.path.join(rel_path, extra_dir)
+            path = os.path.join(base, rel_path)
+        if not dir_only:
+            path = os.path.join(path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
+        return os.path.abspath(path)
+
+    def exists(self, obj, **kwargs):
+        """Override `ObjectStore`'s stub and check on disk."""
+        if self.check_old_style:
+            path = self._construct_path(obj, old_style=True, **kwargs)
+            # For backward compatibility: check root path first; otherwise
+            # construct and check hashed path.
+            if os.path.exists(path):
+                return True
+        return os.path.exists(self._construct_path(obj, **kwargs))
+
+    def create(self, obj, **kwargs):
+        """Override `ObjectStore`'s stub by creating any files and folders on disk."""
+        if not self.exists(obj, **kwargs):
+            path = self._construct_path(obj, **kwargs)
+            dir_only = kwargs.get('dir_only', False)
+            # Create directory if it does not exist
+            dir = path if dir_only else os.path.dirname(path)
+            safe_makedirs(dir)
+            # Create the file if it does not exist
+            if not dir_only:
+                open(path, 'w').close()  # Should be rb?
+                umask_fix_perms(path, self.config.umask, 0o666)
+
+    def empty(self, obj, **kwargs):
+        """Override `ObjectStore`'s stub by checking file size on disk."""
+        return os.path.getsize(self.get_filename(obj, **kwargs)) == 0
+
+    def size(self, obj, **kwargs):
+        """Override `ObjectStore`'s stub by return file size on disk.
+
+        Returns 0 if the object doesn't exist yet or other error.
+        """
+        if self.exists(obj, **kwargs):
+            try:
+                return os.path.getsize(self.get_filename(obj, **kwargs))
+            except OSError:
+                return 0
+        else:
+            return 0
+
+    def delete(self, obj, entire_dir=False, **kwargs):
+        """Override `ObjectStore`'s stub; delete the file or folder on disk."""
+        path = self.get_filename(obj, **kwargs)
+        extra_dir = kwargs.get('extra_dir', None)
+        obj_dir = kwargs.get('obj_dir', False)
+        try:
+            if entire_dir and (extra_dir or obj_dir):
+                shutil.rmtree(path)
+                return True
+            if self.exists(obj, **kwargs):
+                os.remove(path)
+                return True
+        except OSError as ex:
+            log.critical('%s delete error %s' % (self._get_filename(obj, **kwargs), ex))
+        return False
+
+    def get_data(self, obj, start=0, count=-1, **kwargs):
+        """Override `ObjectStore`'s stub; retrieve data directly from disk."""
+        data_file = open(self.get_filename(obj, **kwargs), 'r')  # Should be rb?
+        data_file.seek(start)
+        content = data_file.read(count)
+        data_file.close()
+        return content
+
+    def get_filename(self, obj, **kwargs):
+        """
+        Override `ObjectStore`'s stub.
+
+        If `object_store_check_old_style` is set to `True` in config then the
+        root path is checked first.
+        """
+        if self.check_old_style:
+            path = self._construct_path(obj, old_style=True, **kwargs)
+            # For backward compatibility, check root path first; otherwise,
+            # construct and return hashed path
+            if os.path.exists(path):
+                return path
+        return self._construct_path(obj, **kwargs)
+
+    def update_from_file(self, obj, file_name=None, create=False, **kwargs):
+        """`create` parameter is not used in this implementation."""
+        preserve_symlinks = kwargs.pop( 'preserve_symlinks', False )
+        # FIXME: symlinks and the object store model may not play well together
+        # these should be handled better, e.g. registering the symlink'd file
+        # as an object
+        if create:
+            self.create(obj, **kwargs)
+        if file_name and self.exists(obj, **kwargs):
+            try:
+                if preserve_symlinks and os.path.islink( file_name ):
+                    force_symlink( os.readlink( file_name ), self.get_filename( obj, **kwargs ) )
+                else:
+                    shutil.copy( file_name, self.get_filename( obj, **kwargs ) )
+            except IOError as ex:
+                log.critical('Error copying %s to %s: %s' % (file_name, self._get_filename(obj, **kwargs), ex))
+                raise ex
+
+    def get_object_url(self, obj, **kwargs):
+        """
+        Override `ObjectStore`'s stub.
+
+        Returns `None`, we have no URLs.
+        """
+        return None
+
+    def get_store_usage_percent(self):
+        """Override `ObjectStore`'s stub by return percent storage used."""
+        st = os.statvfs(self.file_path)
+        return ( float( st.f_blocks - st.f_bavail ) / st.f_blocks ) * 100
+
+
+class NestedObjectStore(ObjectStore):
+
+    """
+    Base for ObjectStores that use other ObjectStores.
+
+    Example: DistributedObjectStore, HierarchicalObjectStore
+    """
+
+    def __init__(self, config, config_xml=None):
+        """Extend `ObjectStore`'s constructor."""
+        super(NestedObjectStore, self).__init__(config)
+        self.backends = {}
+
+    def shutdown(self):
+        """For each backend, shuts them down."""
+        for store in self.backends.values():
+            store.shutdown()
+        super(NestedObjectStore, self).shutdown()
+
+    def exists(self, obj, **kwargs):
+        """Determine if the `obj` exists in any of the backends."""
+        return self._call_method('exists', obj, False, False, **kwargs)
+
+    def file_ready(self, obj, **kwargs):
+        """Determine if the file for `obj` is ready to be used by any of the backends."""
+        return self._call_method('file_ready', obj, False, False, **kwargs)
+
+    def create(self, obj, **kwargs):
+        """Create a backing file in a random backend."""
+        random.choice(list(self.backends.values())).create(obj, **kwargs)
+
+    def empty(self, obj, **kwargs):
+        """For the first backend that has this `obj`, determine if it is empty."""
+        return self._call_method('empty', obj, True, False, **kwargs)
+
+    def size(self, obj, **kwargs):
+        """For the first backend that has this `obj`, return its size."""
+        return self._call_method('size', obj, 0, False, **kwargs)
+
+    def delete(self, obj, **kwargs):
+        """For the first backend that has this `obj`, delete it."""
+        return self._call_method('delete', obj, False, False, **kwargs)
+
+    def get_data(self, obj, **kwargs):
+        """For the first backend that has this `obj`, get data from it."""
+        return self._call_method('get_data', obj, ObjectNotFound, True, **kwargs)
+
+    def get_filename(self, obj, **kwargs):
+        """For the first backend that has this `obj`, get its filename."""
+        return self._call_method('get_filename', obj, ObjectNotFound, True, **kwargs)
+
+    def update_from_file(self, obj, **kwargs):
+        """For the first backend that has this `obj`, update it from the given file."""
+        if kwargs.get('create', False):
+            self.create(obj, **kwargs)
+            kwargs['create'] = False
+        return self._call_method('update_from_file', obj, ObjectNotFound, True, **kwargs)
+
+    def get_object_url(self, obj, **kwargs):
+        """For the first backend that has this `obj`, get its URL."""
+        return self._call_method('get_object_url', obj, None, False, **kwargs)
+
+    def _call_method(self, method, obj, default, default_is_exception,
+            **kwargs):
+        """Check all children object stores for the first one with the dataset."""
+        for key, store in self.backends.items():
+            if store.exists(obj, **kwargs):
+                return store.__getattribute__(method)(obj, **kwargs)
+        if default_is_exception:
+            raise default('objectstore, _call_method failed: %s on %s, kwargs: %s'
+                          % ( method, str( obj ), str( kwargs ) ) )
+        else:
+            return default
+
+
+class DistributedObjectStore(NestedObjectStore):
+
+    """
+    ObjectStore that defers to a list of backends.
+
+    When getting objects the first store where the object exists is used.
+    When creating objects they are created in a store selected randomly, but
+    with weighting.
+    """
+
+    def __init__(self, config, config_xml=None, fsmon=False):
+        """
+        :type config: object
+        :param config: An object, most likely populated from
+            `galaxy/config.ini`, having the same attributes needed by
+            :class:`NestedObjectStore` plus:
+
+            * distributed_object_store_config_file
+
+        :type config_xml: ElementTree
+
+        :type fsmon: bool
+        :param fsmon: If True, monitor the file system for free space,
+            removing backends when they get too full.
+        """
+        super(DistributedObjectStore, self).__init__(config,
+                config_xml=config_xml)
+        if config_xml is None:
+            self.distributed_config = config.distributed_object_store_config_file
+            assert self.distributed_config is not None, \
+                "distributed object store ('object_store = distributed') " \
+                "requires a config file, please set one in " \
+                "'distributed_object_store_config_file')"
+        self.backends = {}
+        self.weighted_backend_ids = []
+        self.original_weighted_backend_ids = []
+        self.max_percent_full = {}
+        self.global_max_percent_full = 0.0
+        random.seed()
+        self.__parse_distributed_config(config, config_xml)
+        self.sleeper = None
+        if fsmon and ( self.global_max_percent_full or [_ for _ in self.max_percent_full.values() if _ != 0.0] ):
+            self.sleeper = Sleeper()
+            self.filesystem_monitor_thread = threading.Thread(target=self.__filesystem_monitor)
+            self.filesystem_monitor_thread.setDaemon( True )
+            self.filesystem_monitor_thread.start()
+            log.info("Filesystem space monitor started")
+
+    def __parse_distributed_config(self, config, config_xml=None):
+        if config_xml is None:
+            root = ElementTree.parse(self.distributed_config).getroot()
+            log.debug('Loading backends for distributed object store from %s', self.distributed_config)
+        else:
+            root = config_xml.find('backends')
+            log.debug('Loading backends for distributed object store from %s', config_xml.get('id'))
+        self.global_max_percent_full = float(root.get('maxpctfull', 0))
+        for elem in [ e for e in root if e.tag == 'backend' ]:
+            id = elem.get('id')
+            weight = int(elem.get('weight', 1))
+            maxpctfull = float(elem.get('maxpctfull', 0))
+            if elem.get('type', 'disk'):
+                path = None
+                extra_dirs = {}
+                for sub in elem:
+                    if sub.tag == 'files_dir':
+                        path = sub.get('path')
+                    elif sub.tag == 'extra_dir':
+                        type = sub.get('type')
+                        extra_dirs[type] = sub.get('path')
+                self.backends[id] = DiskObjectStore(config, file_path=path, extra_dirs=extra_dirs)
+                self.max_percent_full[id] = maxpctfull
+                log.debug("Loaded disk backend '%s' with weight %s and file_path: %s" % (id, weight, path))
+                if extra_dirs:
+                    log.debug("    Extra directories:")
+                    for type, dir in extra_dirs.items():
+                        log.debug("        %s: %s" % (type, dir))
+            for i in range(0, weight):
+                # The simplest way to do weighting: add backend ids to a
+                # sequence the number of times equalling weight, then randomly
+                # choose a backend from that sequence at creation
+                self.weighted_backend_ids.append(id)
+        self.original_weighted_backend_ids = self.weighted_backend_ids
+
+    def shutdown(self):
+        """Shut down. Kill the free space monitor if there is one."""
+        super(DistributedObjectStore, self).shutdown()
+        if self.sleeper is not None:
+            self.sleeper.wake()
+
+    def __filesystem_monitor(self):
+        while self.running:
+            new_weighted_backend_ids = self.original_weighted_backend_ids
+            for id, backend in self.backends.items():
+                maxpct = self.max_percent_full[id] or self.global_max_percent_full
+                pct = backend.get_store_usage_percent()
+                if pct > maxpct:
+                    new_weighted_backend_ids = [_ for _ in new_weighted_backend_ids if _ != id]
+            self.weighted_backend_ids = new_weighted_backend_ids
+            self.sleeper.sleep(120)  # Test free space every 2 minutes
+
+    def create(self, obj, **kwargs):
+        """The only method in which obj.object_store_id may be None."""
+        if obj.object_store_id is None or not self.exists(obj, **kwargs):
+            if obj.object_store_id is None or obj.object_store_id not in self.weighted_backend_ids:
+                try:
+                    obj.object_store_id = random.choice(self.weighted_backend_ids)
+                except IndexError:
+                    raise ObjectInvalid('objectstore.create, could not generate '
+                                        'obj.object_store_id: %s, kwargs: %s'
+                                        % ( str( obj ), str( kwargs ) ) )
+                _create_object_in_session( obj )
+                log.debug("Selected backend '%s' for creation of %s %s"
+                          % (obj.object_store_id, obj.__class__.__name__, obj.id))
+            else:
+                log.debug("Using preferred backend '%s' for creation of %s %s"
+                          % (obj.object_store_id, obj.__class__.__name__, obj.id))
+            self.backends[obj.object_store_id].create(obj, **kwargs)
+
+    def _call_method(self, method, obj, default, default_is_exception, **kwargs):
+        object_store_id = self.__get_store_id_for(obj, **kwargs)
+        if object_store_id is not None:
+            return self.backends[object_store_id].__getattribute__(method)(obj, **kwargs)
+        if default_is_exception:
+            raise default('objectstore, _call_method failed: %s on %s, kwargs: %s'
+                          % ( method, str( obj ), str( kwargs ) ) )
+        else:
+            return default
+
+    def __get_store_id_for(self, obj, **kwargs):
+        if obj.object_store_id is not None and obj.object_store_id in self.backends:
+            return obj.object_store_id
+        else:
+            # if this instance has been switched from a non-distributed to a
+            # distributed object store, or if the object's store id is invalid,
+            # try to locate the object
+            log.warning('The backend object store ID (%s) for %s object with ID %s is invalid'
+                        % (obj.object_store_id, obj.__class__.__name__, obj.id))
+            for id, store in self.backends.items():
+                if store.exists(obj, **kwargs):
+                    log.warning('%s object with ID %s found in backend object store with ID %s'
+                                % (obj.__class__.__name__, obj.id, id))
+                    obj.object_store_id = id
+                    _create_object_in_session( obj )
+                    return id
+        return None
+
+
+class HierarchicalObjectStore(NestedObjectStore):
+
+    """
+    ObjectStore that defers to a list of backends.
+
+    When getting objects the first store where the object exists is used.
+    When creating obects only the first store is used.
+    """
+
+    def __init__(self, config, config_xml=None, fsmon=False):
+        """The default contructor. Extends `NestedObjectStore`."""
+        super(HierarchicalObjectStore, self).__init__(config, config_xml=config_xml)
+        self.backends = odict()
+        for b in sorted(config_xml.find('backends'), key=lambda b: int(b.get('order'))):
+            self.backends[int(b.get('order'))] = build_object_store_from_config(config, fsmon=fsmon, config_xml=b)
+
+    def exists(self, obj, **kwargs):
+        """Check all child object stores."""
+        for store in self.backends.values():
+            if store.exists(obj, **kwargs):
+                return True
+        return False
+
+    def create(self, obj, **kwargs):
+        """Call the primary object store."""
+        self.backends[0].create(obj, **kwargs)
+
+
+def build_object_store_from_config(config, fsmon=False, config_xml=None):
+    """
+    Invoke the appropriate object store.
+
+    Will use the `object_store_config_file` attribute of the `config` object to
+    configure a new object store from the specified XML file.
+
+    Or you can specify the obect store type in the `object_store` attribute of
+    the `config` object. Currently 'disk', 's3', 'swift', 'distributed',
+    'hierarchical', 'irods', and 'pulsar' are supported values.
+
+    """
+    if config_xml is None and os.path.exists( config.object_store_config_file ):
+        # This is a top level invocation of build_object_store_from_config, and
+        # we have an object_store_conf.xml -- read the .xml and build
+        # accordingly
+        root = ElementTree.parse(config.object_store_config_file).getroot()
+        store = root.get('type')
+        config_xml = root
+    elif config_xml is not None:
+        store = config_xml.get('type')
+    else:
+        store = config.object_store
+
+    if store == 'disk':
+        return DiskObjectStore(config=config, config_xml=config_xml)
+    elif store == 's3':
+        from .s3 import S3ObjectStore
+        return S3ObjectStore(config=config, config_xml=config_xml)
+    elif store == 'swift':
+        from .s3 import SwiftObjectStore
+        return SwiftObjectStore(config=config, config_xml=config_xml)
+    elif store == 'distributed':
+        return DistributedObjectStore(
+            config=config, fsmon=fsmon, config_xml=config_xml)
+    elif store == 'hierarchical':
+        return HierarchicalObjectStore(config=config, config_xml=config_xml)
+    elif store == 'irods':
+        from .rods import IRODSObjectStore
+        return IRODSObjectStore(config=config, config_xml=config_xml)
+    elif store == 'azure_blob':
+        from .azure_blob import AzureBlobObjectStore
+        return AzureBlobObjectStore(config=config, config_xml=config_xml)
+    # Disable the Pulsar object store for now until it receives some attention
+    # elif store == 'pulsar':
+    #    from .pulsar import PulsarObjectStore
+    #    return PulsarObjectStore(config=config, config_xml=config_xml)
+    else:
+        log.error("Unrecognized object store definition: {0}".format(store))
+
+
+def local_extra_dirs( func ):
+    """Non-local plugin decorator using local directories for the extra_dirs (job_work and temp)."""
+    def wraps( self, *args, **kwargs ):
+        if kwargs.get( 'base_dir', None ) is None:
+            return func( self, *args, **kwargs )
+        else:
+            for c in self.__class__.__mro__:
+                if c.__name__ == 'DiskObjectStore':
+                    return getattr( c, func.__name__ )( self, *args, **kwargs )
+            raise Exception("Could not call DiskObjectStore's %s method, does your "
+                            "Object Store plugin inherit from DiskObjectStore?"
+                            % func.__name__ )
+    return wraps
+
+
+def convert_bytes(bytes):
+    """A helper function used for pretty printing disk usage."""
+    if bytes is None:
+        bytes = 0
+    bytes = float(bytes)
+
+    if bytes >= 1099511627776:
+        terabytes = bytes / 1099511627776
+        size = '%.2fTB' % terabytes
+    elif bytes >= 1073741824:
+        gigabytes = bytes / 1073741824
+        size = '%.2fGB' % gigabytes
+    elif bytes >= 1048576:
+        megabytes = bytes / 1048576
+        size = '%.2fMB' % megabytes
+    elif bytes >= 1024:
+        kilobytes = bytes / 1024
+        size = '%.2fKB' % kilobytes
+    else:
+        size = '%.2fb' % bytes
+    return size
+
+
+def _create_object_in_session( obj ):
+    session = object_session( obj ) if object_session is not None else None
+    if session is not None:
+        object_session( obj ).add( obj )
+        object_session( obj ).flush()
+    else:
+        raise Exception( NO_SESSION_ERROR_MESSAGE )
diff --git a/lib/galaxy/objectstore/azure_blob.py b/lib/galaxy/objectstore/azure_blob.py
new file mode 100644
index 0000000..64efa4a
--- /dev/null
+++ b/lib/galaxy/objectstore/azure_blob.py
@@ -0,0 +1,541 @@
+"""
+Object Store plugin for the Microsoft Azure Block Blob Storage system
+"""
+
+import logging
+import os
+import shutil
+import threading
+import time
+
+from datetime import datetime
+
+from galaxy.exceptions import ObjectInvalid, ObjectNotFound
+from galaxy.util import directory_hash_id, safe_relpath, umask_fix_perms
+from galaxy.util.sleeper import Sleeper
+from ..objectstore import convert_bytes, ObjectStore
+
+try:
+    from azure.common import AzureHttpError
+    from azure.storage import CloudStorageAccount
+    from azure.storage.blob import BlockBlobService
+    from azure.storage.blob.models import Blob
+except ImportError:
+    BlockBlobService = None
+
+NO_BLOBSERVICE_ERROR_MESSAGE = ("ObjectStore configured, but no azure.storage.blob dependency available."
+                                "Please install and properly configure azure.storage.blob or modify Object Store configuration.")
+
+log = logging.getLogger( __name__ )
+
+
+class AzureBlobObjectStore(ObjectStore):
+    """
+    Object store that stores objects as blobs in an Azure Blob Container. A local
+    cache exists that is used as an intermediate location for files between
+    Galaxy and Azure.
+    """
+    def __init__(self, config, config_xml):
+        if BlockBlobService is None:
+            raise Exception(NO_BLOBSERVICE_ERROR_MESSAGE)
+        super(AzureBlobObjectStore, self).__init__(config)
+
+        self.staging_path = self.config.file_path
+        self.transfer_progress = 0
+        self._parse_config_xml(config_xml)
+        self._configure_connection()
+        self.container_lease = self._get_container_lease()
+
+        # Clean cache only if value is set in galaxy.ini
+        if self.cache_size != -1:
+            # Convert GBs to bytes for comparison
+            self.cache_size = self.cache_size * 1073741824
+            # Helper for interruptable sleep
+            self.sleeper = Sleeper()
+            self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
+            self.cache_monitor_thread.start()
+            log.info("Cache cleaner manager started")
+
+    ###################
+    # Private Methods #
+    ###################
+
+    # config_xml is an ElementTree object.
+    def _parse_config_xml(self, config_xml):
+        try:
+            auth_xml = config_xml.find('auth')
+            self.account_name = auth_xml.get('account_name')
+            self.account_key = auth_xml.get('account_key')
+            container_xml = config_xml.find('container')
+            self.container_name = container_xml.get('name')
+            self.max_chunk_size = int(container_xml.get('max_chunk_size', 250))  # currently unused
+            cache_xml = config_xml.find('cache')
+            self.cache_size = float(cache_xml.get('size', -1))
+            self.staging_path = cache_xml.get('path', self.config.object_store_cache_path)
+
+            for d_xml in config_xml.findall('extra_dir'):
+                self.extra_dirs[d_xml.get('type')] = d_xml.get('path')
+
+            log.debug("Object cache dir:    %s", self.staging_path)
+            log.debug("       job work dir: %s", self.extra_dirs['job_work'])
+
+        except Exception:
+            # Toss it back up after logging, we can't continue loading at this point.
+            log.exception("Malformed ObjectStore Configuration XML -- unable to continue")
+            raise
+
+    def _configure_connection(self):
+        log.debug("Configuring Connection")
+        self.account = CloudStorageAccount(self.account_name, self.account_key)
+        self.service = self.account.create_block_blob_service()
+
+    def _get_container_lease(self):
+        """ Sometimes a handle to a container is not established right away so try
+        it a few times. Raise error is connection is not established. """
+        for i in range(5):
+            try:
+                self.service.break_container_lease(self.container_name)
+                container_lease = self.service.acquire_container_lease(self.container_name)
+                log.debug("Using azure blob store with container '%s'", self.container_name)
+                return container_lease
+            except AzureHttpError:
+                try:
+                    log.debug("container not found, creating azure blob store container with name '%s'", self.container_name)
+                    self.service.create_container(self.container_name)
+                    container_lease = self.service.acquire_container_lease(self.container_name)
+                    return container_lease
+                except AzureHttpError:
+                    log.exception("Could not get container '%s', attempt %s/5", self.container_name, i + 1)
+                    time.sleep(2)
+        # All the attempts have been exhausted and connection was not established,
+        # raise error
+        raise AzureHttpError
+
+    def _construct_path(self, obj, base_dir=None, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False, **kwargs):
+        # extra_dir should never be constructed from provided data but just
+        # make sure there are no shenannigans afoot
+        if extra_dir and extra_dir != os.path.normpath(extra_dir):
+            log.warning('extra_dir is not normalized: %s', extra_dir)
+            raise ObjectInvalid("The requested object is invalid")
+        # ensure that any parent directory references in alt_name would not
+        # result in a path not contained in the directory path constructed here
+        if alt_name:
+            if not safe_relpath(alt_name):
+                log.warning('alt_name would locate path outside dir: %s', alt_name)
+                raise ObjectInvalid("The requested object is invalid")
+            # alt_name can contain parent directory references, but S3 will not
+            # follow them, so if they are valid we normalize them out
+            alt_name = os.path.normpath(alt_name)
+
+        rel_path = os.path.join(*directory_hash_id(obj.id))
+
+        if extra_dir is not None:
+            if extra_dir_at_root:
+                rel_path = os.path.join(extra_dir, rel_path)
+            else:
+                rel_path = os.path.join(rel_path, extra_dir)
+
+        # for JOB_WORK directory
+        if obj_dir:
+            rel_path = os.path.join(rel_path, str(obj.id))
+        if base_dir:
+            base = self.extra_dirs.get(base_dir)
+            return os.path.join(base, rel_path)
+
+        # S3 folders are marked by having trailing '/' so add it now
+        # rel_path = '%s/' % rel_path # assume for now we don't need this in Azure blob storage.
+
+        if not dir_only:
+            rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
+
+        return rel_path
+
+    def _fix_permissions(self, rel_path):
+        """ Set permissions on rel_path"""
+        for basedir, _, files in os.walk(rel_path):
+            umask_fix_perms(basedir, self.config.umask, 0o777, self.config.gid)
+            for filename in files:
+                path = os.path.join(basedir, filename)
+                # Ignore symlinks
+                if os.path.islink(path):
+                    continue
+                umask_fix_perms(path, self.config.umask, 0o666, self.config.gid)
+
+    def _get_cache_path(self, rel_path):
+        return os.path.abspath(os.path.join(self.staging_path, rel_path))
+
+    def _get_transfer_progress(self):
+        return self.transfer_progress
+
+    def _get_size_in_azure(self, rel_path):
+        try:
+            properties = self.service.get_blob_properties(self.container_name, rel_path)
+            # Currently this returns a blob and not a BlobProperties object
+            # Similar issue for the ruby https://github.com/Azure/azure-storage-ruby/issues/13
+            # The typecheck is an attempt at future-proofing this when/if the bug is fixed.
+            if type(properties) is Blob:
+                properties = properties.properties
+            if properties:
+                size_in_bytes = properties.content_length
+                return size_in_bytes
+        except AzureHttpError:
+            log.exception("Could not get size of blob '%s' from Azure", rel_path)
+            return -1
+
+    def _in_azure(self, rel_path):
+        try:
+            exists = self.service.exists(self.container_name, rel_path)
+        except AzureHttpError:
+            log.exception("Trouble checking existence of Azure blob '%s'", rel_path)
+            return False
+        return exists
+
+    def _in_cache(self, rel_path):
+        """ Check if the given dataset is in the local cache. """
+        cache_path = self._get_cache_path(rel_path)
+        return os.path.exists(cache_path)
+
+    def _pull_into_cache(self, rel_path):
+        # Ensure the cache directory structure exists (e.g., dataset_#_files/)
+        rel_path_dir = os.path.dirname(rel_path)
+        if not os.path.exists(self._get_cache_path(rel_path_dir)):
+            os.makedirs(self._get_cache_path(rel_path_dir))
+        # Now pull in the file
+        file_ok = self._download(rel_path)
+        self._fix_permissions(self._get_cache_path(rel_path_dir))
+        return file_ok
+
+    def _transfer_cb(self, complete, total):
+        self.transfer_progress = float(complete) / float(total) * 100  # in percent
+
+    def _download(self, rel_path):
+        local_destination = self._get_cache_path(rel_path)
+        try:
+            log.debug("Pulling '%s' into cache to %s", rel_path, local_destination)
+            if self.cache_size > 0 and self._get_size_in_azure(rel_path) > self.cache_size:
+                log.critical("File %s is larger (%s) than the cache size (%s). Cannot download.",
+                             rel_path, self._get_size_in_azure(rel_path), self.cache_size)
+                return False
+            else:
+                self.transfer_progress = 0  # Reset transfer progress counter
+                self.service.get_blob_to_path(self.container_name, rel_path, local_destination, progress_callback=self._transfer_cb)
+                return True
+        except AzureHttpError:
+            log.exception("Problem downloading '%s' from Azure", rel_path)
+        return False
+
+    def _push_to_os(self, rel_path, source_file=None, from_string=None):
+        """
+        Push the file pointed to by ``rel_path`` to the object store naming the blob
+        ``rel_path``. If ``source_file`` is provided, push that file instead while
+        still using ``rel_path`` as the blob name.
+        If ``from_string`` is provided, set contents of the file to the value of
+        the string.
+        """
+        try:
+            source_file = source_file or self._get_cache_path(rel_path)
+
+            if not os.path.exists(source_file):
+                log.error("Tried updating blob '%s' from source file '%s', but source file does not exist.", rel_path, source_file)
+                return False
+
+            if os.path.getsize(source_file) == 0:
+                log.debug("Wanted to push file '%s' to azure blob '%s' but its size is 0; skipping.", source_file, rel_path)
+                return True
+
+            if from_string:
+                self.service.create_blob_from_text(self.container_name, rel_path, from_string, progress_callback=self._transfer_cb)
+                log.debug("Pushed data from string '%s' to blob '%s'", from_string, rel_path)
+            else:
+                start_time = datetime.now()
+                log.debug("Pushing cache file '%s' of size %s bytes to '%s'", source_file, os.path.getsize(source_file), rel_path)
+                self.transfer_progress = 0  # Reset transfer progress counter
+                self.service.create_blob_from_path(self.container_name, rel_path, source_file, progress_callback=self._transfer_cb)
+                end_time = datetime.now()
+                log.debug("Pushed cache file '%s' to blob '%s' (%s bytes transfered in %s sec)",
+                          source_file, rel_path, os.path.getsize(source_file), end_time - start_time)
+            return True
+
+        except AzureHttpError:
+            log.exception("Trouble pushing to Azure Blob '%s' from file '%s'", rel_path, source_file)
+        return False
+
+    ##################
+    # Public Methods #
+    ##################
+
+    def exists(self, obj, **kwargs):
+        in_cache = in_azure = False
+        rel_path = self._construct_path(obj, **kwargs)
+
+        in_cache = self._in_cache(rel_path)
+        in_azure = self._in_azure(rel_path)
+        # log.debug("~~~~~~ File '%s' exists in cache: %s; in azure: %s" % (rel_path, in_cache, in_azure))
+        # dir_only does not get synced so shortcut the decision
+        dir_only = kwargs.get('dir_only', False)
+        base_dir = kwargs.get('base_dir', None)
+        if dir_only:
+            if in_cache or in_azure:
+                return True
+            # for JOB_WORK directory
+            elif base_dir:
+                if not os.path.exists(rel_path):
+                    os.makedirs(rel_path)
+                return True
+            else:
+                return False
+
+        # TODO: Sync should probably not be done here. Add this to an async upload stack?
+        if in_cache and not in_azure:
+            self._push_to_os(rel_path, source_file=self._get_cache_path(rel_path))
+            return True
+        elif in_azure:
+            return True
+        else:
+            return False
+
+    def file_ready(self, obj, **kwargs):
+        """
+        A helper method that checks if a file corresponding to a dataset is
+        ready and available to be used. Return ``True`` if so, ``False`` otherwise.
+        """
+        rel_path = self._construct_path(obj, **kwargs)
+        # Make sure the size in cache is available in its entirety
+        if self._in_cache(rel_path):
+            local_size = os.path.getsize(self._get_cache_path(rel_path))
+            remote_size = self._get_size_in_azure(rel_path)
+            if local_size == remote_size:
+                return True
+            else:
+                log.debug("Waiting for dataset %s to transfer from OS: %s/%s", rel_path, local_size, remote_size)
+
+        return False
+
+    def create(self, obj, **kwargs):
+
+        if not self.exists(obj, **kwargs):
+
+            # Pull out locally used fields
+            extra_dir = kwargs.get('extra_dir', None)
+            extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
+            dir_only = kwargs.get('dir_only', False)
+            alt_name = kwargs.get('alt_name', None)
+
+            # Construct hashed path
+            rel_path = os.path.join(*directory_hash_id(obj.id))
+
+            # Optionally append extra_dir
+            if extra_dir is not None:
+                if extra_dir_at_root:
+                    rel_path = os.path.join(extra_dir, rel_path)
+                else:
+                    rel_path = os.path.join(rel_path, extra_dir)
+
+            # Create given directory in cache
+            cache_dir = os.path.join(self.staging_path, rel_path)
+            if not os.path.exists(cache_dir):
+                os.makedirs(cache_dir)
+
+            # Although not really necessary to create S3 folders (because S3 has
+            # flat namespace), do so for consistency with the regular file system
+            # S3 folders are marked by having trailing '/' so add it now
+            # s3_dir = '%s/' % rel_path
+            # self._push_to_os(s3_dir, from_string='')
+            # If instructed, create the dataset in cache & in S3
+            if not dir_only:
+                rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
+                open(os.path.join(self.staging_path, rel_path), 'w').close()
+                self._push_to_os(rel_path, from_string='')
+
+    def empty(self, obj, **kwargs):
+        if self.exists(obj, **kwargs):
+            return bool(self.size(obj, **kwargs) > 0)
+        else:
+            raise ObjectNotFound( 'objectstore.empty, object does not exist: %s, kwargs: %s' % ( str( obj ), str( kwargs ) ) )
+
+    def size(self, obj, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        if self._in_cache(rel_path):
+            try:
+                return os.path.getsize(self._get_cache_path(rel_path))
+            except OSError as ex:
+                log.info("Could not get size of file '%s' in local cache, will try Azure. Error: %s", rel_path, ex)
+        elif self.exists(obj, **kwargs):
+            return self._get_size_in_azure(rel_path)
+        log.warning("Did not find dataset '%s', returning 0 for size", rel_path)
+        return 0
+
+    def delete(self, obj, entire_dir=False, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        extra_dir = kwargs.get('extra_dir', None)
+        base_dir = kwargs.get('base_dir', None)
+        dir_only = kwargs.get('dir_only', False)
+        obj_dir = kwargs.get('obj_dir', False)
+        try:
+            if base_dir and dir_only and obj_dir:
+                # Remove temporary data in JOB_WORK directory
+                shutil.rmtree(os.path.abspath(rel_path))
+                return True
+
+            # For the case of extra_files, because we don't have a reference to
+            # individual files/blobs we need to remove the entire directory structure
+            # with all the files in it. This is easy for the local file system,
+            # but requires iterating through each individual blob in Azure and deleing it.
+            if entire_dir and extra_dir:
+                shutil.rmtree(self._get_cache_path(rel_path))
+                blobs = self.service.list_blobs(self.container_name, prefix=rel_path)
+                for blob in blobs:
+                    log.debug("Deleting from Azure: %s", blob)
+                    self.service.delete_blob(self.container_name, blob.name)
+                return True
+            else:
+                # Delete from cache first
+                os.unlink(self._get_cache_path(rel_path))
+                # Delete from S3 as well
+                if self._in_azure(rel_path):
+                    log.debug("Deleting from Azure: %s", rel_path)
+                    self.service.delete_blob(self.container_name, rel_path)
+                    return True
+        except AzureHttpError:
+            log.exception("Could not delete blob '%s' from Azure", rel_path)
+        except OSError:
+            log.exception('%s delete error', self.get_filename(obj, **kwargs))
+        return False
+
+    def get_data(self, obj, start=0, count=-1, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        # Check cache first and get file if not there
+        if not self._in_cache(rel_path):
+            self._pull_into_cache(rel_path)
+        # Read the file content from cache
+        data_file = open(self._get_cache_path(rel_path), 'r')
+        data_file.seek(start)
+        content = data_file.read(count)
+        data_file.close()
+        return content
+
+    def get_filename(self, obj, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        base_dir = kwargs.get('base_dir', None)
+        dir_only = kwargs.get('dir_only', False)
+        obj_dir = kwargs.get('obj_dir', False)
+
+        # for JOB_WORK directory
+        if base_dir and dir_only and obj_dir:
+            return os.path.abspath(rel_path)
+
+        cache_path = self._get_cache_path(rel_path)
+        # S3 does not recognize directories as files so cannot check if those exist.
+        # So, if checking dir only, ensure given dir exists in cache and return
+        # the expected cache path.
+        # dir_only = kwargs.get('dir_only', False)
+        # if dir_only:
+        #     if not os.path.exists(cache_path):
+        #         os.makedirs(cache_path)
+        #     return cache_path
+        # Check if the file exists in the cache first
+        if self._in_cache(rel_path):
+            return cache_path
+        # Check if the file exists in persistent storage and, if it does, pull it into cache
+        elif self.exists(obj, **kwargs):
+            if dir_only:  # Directories do not get pulled into cache
+                return cache_path
+            else:
+                if self._pull_into_cache(rel_path):
+                    return cache_path
+        # For the case of retrieving a directory only, return the expected path
+        # even if it does not exist.
+        # if dir_only:
+        #     return cache_path
+        raise ObjectNotFound( 'objectstore.get_filename, no cache_path: %s, kwargs: %s' % ( str( obj ), str( kwargs ) ) )
+
+        return cache_path  # Until the upload tool does not explicitly create the dataset, return expected path
+
+    def update_from_file(self, obj, file_name=None, create=False, **kwargs):
+        if create is True:
+            self.create(obj, **kwargs)
+        elif self.exists(obj, **kwargs):
+            rel_path = self._construct_path(obj, **kwargs)
+            # Chose whether to use the dataset file itself or an alternate file
+            if file_name:
+                source_file = os.path.abspath(file_name)
+                # Copy into cache
+                cache_file = self._get_cache_path(rel_path)
+                try:
+                    if source_file != cache_file:
+                        # FIXME? Should this be a `move`?
+                        shutil.copy2(source_file, cache_file)
+                    self._fix_permissions(cache_file)
+                except OSError:
+                    log.exception("Trouble copying source file '%s' to cache '%s'", source_file, cache_file)
+            else:
+                source_file = self._get_cache_path(rel_path)
+
+            self._push_to_os(rel_path, source_file)
+
+        else:
+            raise ObjectNotFound( 'objectstore.update_from_file, object does not exist: %s, kwargs: %s' % ( str( obj ), str( kwargs ) ) )
+
+    def get_object_url(self, obj, **kwargs):
+        if self.exists(obj, **kwargs):
+            rel_path = self._construct_path(obj, **kwargs)
+            try:
+                url = self.service.make_blob_url(container_name=self.container_name, blob_name=rel_path)
+                return url
+            except AzureHttpError:
+                log.exception("Trouble generating URL for dataset '%s'", rel_path)
+        return None
+
+    def get_store_usage_percent(self):
+        return 0.0
+
+    ##################
+    # Secret Methods #
+    ##################
+
+    def __cache_monitor(self):
+        time.sleep(2)  # Wait for things to load before starting the monitor
+        while self.running:
+            total_size = 0
+            # Is this going to be too expensive of an operation to be done frequently?
+            file_list = []
+            for dirpath, _, filenames in os.walk(self.staging_path):
+                for filename in filenames:
+                    filepath = os.path.join(dirpath, filename)
+                    file_size = os.path.getsize(filepath)
+                    total_size += file_size
+                    # Get the time given file was last accessed
+                    last_access_time = time.localtime(os.stat(filepath)[7])
+                    # Compose a tuple of the access time and the file path
+                    file_tuple = last_access_time, filepath, file_size
+                    file_list.append(file_tuple)
+            # Sort the file list (based on access time)
+            file_list.sort()
+            # Initiate cleaning once within 10% of the defined cache size?
+            cache_limit = self.cache_size * 0.9
+            if total_size > cache_limit:
+                log.info("Initiating cache cleaning: current cache size: %s; clean until smaller than: %s",
+                         convert_bytes(total_size), convert_bytes(cache_limit))
+                # How much to delete? If simply deleting up to the cache-10% limit,
+                # is likely to be deleting frequently and may run the risk of hitting
+                # the limit - maybe delete additional #%?
+                # For now, delete enough to leave at least 10% of the total cache free
+                delete_this_much = total_size - cache_limit
+                # Keep deleting datasets from file_list until deleted_amount does not
+                # exceed delete_this_much; start deleting from the front of the file list,
+                # which assumes the oldest files come first on the list.
+                deleted_amount = 0
+                for entry in enumerate(file_list):
+                    if deleted_amount < delete_this_much:
+                        deleted_amount += entry[2]
+                        os.remove(entry[1])
+                        # Debugging code for printing deleted files' stats
+                        # folder, file_name = os.path.split(f[1])
+                        # file_date = time.strftime("%m/%d/%y %H:%M:%S", f[0])
+                        # log.debug("%s. %-25s %s, size %s (deleted %s/%s)" \
+                        #     % (i, file_name, convert_bytes(f[2]), file_date, \
+                        #     convert_bytes(deleted_amount), convert_bytes(delete_this_much)))
+                    else:
+                        log.debug("Cache cleaning done. Total space freed: %s", convert_bytes(deleted_amount))
+
+            self.sleeper.sleep(30)  # Test cache size every 30 seconds?
diff --git a/lib/galaxy/objectstore/pulsar.py b/lib/galaxy/objectstore/pulsar.py
new file mode 100644
index 0000000..f4be174
--- /dev/null
+++ b/lib/galaxy/objectstore/pulsar.py
@@ -0,0 +1,79 @@
+from __future__ import absolute_import  # Need to import pulsar_client absolutely.
+from ..objectstore import ObjectStore
+
+try:
+    from pulsar.client.manager import ObjectStoreClientManager
+except ImportError:
+    ObjectStoreClientManager = None
+
+
+class PulsarObjectStore(ObjectStore):
+    """
+    Object store implementation that delegates to a remote Pulsar server.
+
+    This may be more aspirational than practical for now, it would be good to
+    Galaxy to a point that a handler thread could be setup that doesn't attempt
+    to access the disk files returned by a (this) object store - just passing
+    them along to the Pulsar unmodified. That modification - along with this
+    implementation and Pulsar job destinations would then allow Galaxy to fully
+    manage jobs on remote servers with completely different mount points.
+
+    This implementation should be considered beta and may be dropped from
+    Galaxy at some future point or significantly modified.
+    """
+
+    def __init__(self, config, config_xml):
+        self.pulsar_client = self.__build_pulsar_client(config_xml)
+
+    def exists(self, obj, **kwds):
+        return self.pulsar_client.exists(**self.__build_kwds(obj, **kwds))
+
+    def file_ready(self, obj, **kwds):
+        return self.pulsar_client.file_ready(**self.__build_kwds(obj, **kwds))
+
+    def create(self, obj, **kwds):
+        return self.pulsar_client.create(**self.__build_kwds(obj, **kwds))
+
+    def empty(self, obj, **kwds):
+        return self.pulsar_client.empty(**self.__build_kwds(obj, **kwds))
+
+    def size(self, obj, **kwds):
+        return self.pulsar_client.size(**self.__build_kwds(obj, **kwds))
+
+    def delete(self, obj, **kwds):
+        return self.pulsar_client.delete(**self.__build_kwds(obj, **kwds))
+
+    # TODO: Optimize get_data.
+    def get_data(self, obj, **kwds):
+        return self.pulsar_client.get_data(**self.__build_kwds(obj, **kwds))
+
+    def get_filename(self, obj, **kwds):
+        return self.pulsar_client.get_filename(**self.__build_kwds(obj, **kwds))
+
+    def update_from_file(self, obj, **kwds):
+        return self.pulsar_client.update_from_file(**self.__build_kwds(obj, **kwds))
+
+    def get_store_usage_percent(self):
+        return self.pulsar_client.get_store_usage_percent()
+
+    def get_object_url(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+        return None
+
+    def __build_kwds(self, obj, **kwds):
+        kwds['object_id'] = obj.id
+        return kwds
+        pass
+
+    def __build_pulsar_client(self, config_xml):
+        if ObjectStoreClientManager is None:
+            raise Exception("Pulsar client code not available, cannot use this module.")
+        url = config_xml.get("url")
+        private_token = config_xml.get("private_token", None)
+        transport = config_xml.get("transport", None)
+        manager_options = dict(transport=transport)
+        client_options = dict(url=url, private_token=private_token)
+        pulsar_client = ObjectStoreClientManager(**manager_options).get_client(client_options)
+        return pulsar_client
+
+    def shutdown(self):
+        pass
diff --git a/lib/galaxy/objectstore/rods.py b/lib/galaxy/objectstore/rods.py
new file mode 100644
index 0000000..a6e70e7
--- /dev/null
+++ b/lib/galaxy/objectstore/rods.py
@@ -0,0 +1,349 @@
+"""
+Object Store plugin for the Integrated Rule-Oriented Data Store (iRODS)
+
+The module is named rods to avoid conflicting with the PyRods module, irods
+"""
+
+import logging
+import os
+import time
+
+from posixpath import basename as path_basename
+from posixpath import dirname as path_dirname
+from posixpath import join as path_join
+
+from galaxy.exceptions import ObjectInvalid, ObjectNotFound
+from galaxy.util import safe_relpath
+
+from ..objectstore import DiskObjectStore, local_extra_dirs
+
+try:
+    import irods
+except ImportError:
+    irods = None
+
+
+IRODS_IMPORT_MESSAGE = ('The Python irods package is required to use this '
+                        'feature, please install it')
+
+log = logging.getLogger( __name__ )
+
+
+class IRODSObjectStore( DiskObjectStore ):
+    """
+    Galaxy object store based on iRODS
+    """
+    def __init__( self, config, file_path=None, extra_dirs=None ):
+        super( IRODSObjectStore, self ).__init__( config, file_path=file_path, extra_dirs=extra_dirs )
+        assert irods is not None, IRODS_IMPORT_MESSAGE
+        self.cache_path = config.object_store_cache_path
+        self.default_resource = config.irods_default_resource or None
+
+        # Connect to iRODS (AssertionErrors will be raised if anything goes wrong)
+        self.rods_env, self.rods_conn = rods_connect()
+
+        # if the root collection path in the config is unset or relative, try to use a sensible default
+        if config.irods_root_collection_path is None or ( config.irods_root_collection_path is not None and not config.irods_root_collection_path.startswith( '/' ) ):
+            rods_home = self.rods_env.rodsHome
+            assert rods_home != '', "Unable to initialize iRODS Object Store: rodsHome cannot be determined and irods_root_collection_path in Galaxy config is unset or not absolute."
+            if config.irods_root_collection_path is None:
+                self.root_collection_path = path_join( rods_home, 'galaxy_data' )
+            else:
+                self.root_collection_path = path_join( rods_home, config.irods_root_collection_path )
+        else:
+            self.root_collection_path = config.irods_root_collection_path
+
+        # will return a collection object regardless of whether it exists
+        self.root_collection = irods.irodsCollection( self.rods_conn, self.root_collection_path )
+
+        if self.root_collection.getId() == -1:
+            log.warning( "iRODS root collection does not exist, will attempt to create: %s", self.root_collection_path )
+            self.root_collection.upCollection()
+            assert self.root_collection.createCollection( os.path.basename( self.root_collection_path ) ) == 0, "iRODS root collection creation failed: %s" % self.root_collection_path
+            self.root_collection = irods.irodsCollection( self.rods_conn, self.root_collection_path )
+            assert self.root_collection.getId() != -1, "iRODS root collection creation claimed success but still does not exist"
+
+        if self.default_resource is None:
+            self.default_resource = self.rods_env.rodsDefResource
+
+        log.info( "iRODS data for this instance will be stored in collection: %s, resource: %s", self.root_collection_path, self.default_resource )
+
+    def __get_rods_path( self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, strip_dat=True, **kwargs ):
+        # extra_dir should never be constructed from provided data but just
+        # make sure there are no shenannigans afoot
+        if extra_dir and extra_dir != os.path.normpath(extra_dir):
+            log.warning('extra_dir is not normalized: %s', extra_dir)
+            raise ObjectInvalid("The requested object is invalid")
+        # ensure that any parent directory references in alt_name would not
+        # result in a path not contained in the directory path constructed here
+        if alt_name:
+            if not safe_relpath(alt_name):
+                log.warning('alt_name would locate path outside dir: %s', alt_name)
+                raise ObjectInvalid("The requested object is invalid")
+            # alt_name can contain parent directory references, but iRODS will
+            # not follow them, so if they are valid we normalize them out
+            alt_name = os.path.normpath(alt_name)
+        path = ""
+        if extra_dir is not None:
+            path = extra_dir
+
+        # extra_dir_at_root is ignored - since the iRODS plugin does not use
+        # the directory hash, there is only one level of subdirectory.
+
+        if not dir_only:
+            # the .dat extension is stripped when stored in iRODS
+            # TODO: is the strip_dat kwarg the best way to implement this?
+            if strip_dat and alt_name and alt_name.endswith( '.dat' ):
+                alt_name = os.path.splitext( alt_name )[0]
+            default_name = 'dataset_%s' % obj.id
+            if not strip_dat:
+                default_name += '.dat'
+            path = path_join( path, alt_name if alt_name else default_name )
+
+        path = path_join( self.root_collection_path, path )
+        return path
+
+    def __get_cache_path( self, obj, **kwargs ):
+        # FIXME: does not handle collections
+        # FIXME: collisions could occur here
+        return os.path.join( self.cache_path, path_basename( self.__get_rods_path( obj, strip_dat=False, **kwargs ) ) )
+
+    def __clean_cache_entry( self, obj, **kwargs ):
+        # FIXME: does not handle collections
+        try:
+            os.unlink( self.__get_cache_path( obj, **kwargs ) )
+        except OSError:
+            # it is expected that we'll call this method a lot regardless of
+            # whether we think the cached file exists
+            pass
+
+    def __get_rods_handle( self, obj, mode='r', **kwargs ):
+        if kwargs.get( 'dir_only', False ):
+            return irods.irodsCollection( self.rods_conn, self.__get_rods_path( obj, **kwargs ) )
+        else:
+            return irods.irodsOpen( self.rods_conn, self.__get_rods_path( obj, **kwargs ), mode )
+
+    def __mkcolls( self, rods_path ):
+        """
+        An os.makedirs() for iRODS collections.  `rods_path` is the desired collection to create.
+        """
+        assert rods_path.startswith( self.root_collection_path + '/' ), '__mkcolls(): Creating collections outside the root collection is not allowed (requested path was: %s)' % rods_path
+        mkcolls = []
+        c = irods.irodsCollection( self.rods_conn, rods_path )
+        while c.getId() == -1:
+            assert c.getCollName().startswith( self.root_collection_path + '/' ), '__mkcolls(): Attempted to move above the root collection: %s' % c.getCollName()
+            mkcolls.append( c.getCollName() )
+            c.upCollection()
+        for collname in reversed( mkcolls ):
+            log.debug( 'Creating collection %s' % collname )
+            ci = irods.collInp_t()
+            ci.collName = collname
+            status = irods.rcCollCreate( self.rods_conn, ci )
+            assert status == 0, '__mkcolls(): Failed to create collection: %s' % collname
+
+    @local_extra_dirs
+    def exists( self, obj, **kwargs ):
+        doi = irods.dataObjInp_t()
+        doi.objPath = self.__get_rods_path( obj, **kwargs )
+        log.debug( 'exists(): checking: %s', doi.objPath )
+        return irods.rcObjStat( self.rods_conn, doi ) is not None
+
+    @local_extra_dirs
+    def create(self, obj, **kwargs):
+        if not self.exists( obj, **kwargs ):
+            rods_path = self.__get_rods_path( obj, **kwargs )
+            log.debug( 'create(): %s', rods_path )
+            dir_only = kwargs.get( 'dir_only', False )
+            # short circuit collection creation since most of the time it will
+            # be the root collection which already exists
+            collection_path = rods_path if dir_only else path_dirname( rods_path )
+            if collection_path != self.root_collection_path:
+                self.__mkcolls( collection_path )
+            if not dir_only:
+                # rcDataObjCreate is used instead of the irodsOpen wrapper so
+                # that we can prevent overwriting
+                doi = irods.dataObjInp_t()
+                doi.objPath = rods_path
+                doi.createMode = 0o640
+                doi.dataSize = 0  # 0 actually means "unknown", although literally 0 would be preferable
+                irods.addKeyVal( doi.condInput, irods.DEST_RESC_NAME_KW, self.default_resource )
+                status = irods.rcDataObjCreate( self.rods_conn, doi )
+                assert status >= 0, 'create(): rcDataObjCreate() failed: %s: %s: %s' % ( rods_path, status, irods.strerror( status ) )
+
+    @local_extra_dirs
+    def empty( self, obj, **kwargs ):
+        assert 'dir_only' not in kwargs, 'empty(): `dir_only` parameter is invalid here'
+        h = self.__get_rods_handle( obj, **kwargs )
+        try:
+            return h.getSize() == 0
+        except AttributeError:
+            # h is None
+            raise ObjectNotFound()
+
+    def size( self, obj, **kwargs ):
+        assert 'dir_only' not in kwargs, 'size(): `dir_only` parameter is invalid here'
+        h = self.__get_rods_handle( obj, **kwargs )
+        try:
+            return h.getSize()
+        except AttributeError:
+            # h is None
+            return 0
+
+    @local_extra_dirs
+    def delete( self, obj, entire_dir=False, **kwargs ):
+        assert 'dir_only' not in kwargs, 'delete(): `dir_only` parameter is invalid here'
+        rods_path = self.__get_rods_path( obj, **kwargs )
+        # __get_rods_path prepends self.root_collection_path but we are going
+        # to ensure that it's valid anyway for safety's sake
+        assert rods_path.startswith( self.root_collection_path + '/' ), 'ERROR: attempt to delete object outside root collection (path was: %s)' % rods_path
+        if entire_dir:
+            # TODO
+            raise NotImplementedError()
+        h = self.__get_rods_handle( obj, **kwargs )
+        try:
+            # note: PyRods' irodsFile.delete() does not set force
+            status = h.delete()
+            assert status == 0, '%d: %s' % ( status, irods.strerror( status ) )
+            return True
+        except AttributeError:
+            log.warning( 'delete(): operation failed: object does not exist: %s', rods_path )
+        except AssertionError as e:
+            # delete() does not raise on deletion failure
+            log.error( 'delete(): operation failed: %s', e )
+        finally:
+            # remove the cached entry (finally is executed even when the try
+            # contains a return)
+            self.__clean_cache_entry( self, obj, **kwargs )
+        return False
+
+    @local_extra_dirs
+    def get_data( self, obj, start=0, count=-1, **kwargs ):
+        log.debug( 'get_data(): %s' )
+        h = self.__get_rods_handle( obj, **kwargs )
+        try:
+            h.seek( start )
+        except AttributeError:
+            raise ObjectNotFound()
+        if count == -1:
+            return h.read()
+        else:
+            return h.read( count )
+        # TODO: make sure implicit close is okay, DiskObjectStore actually
+        # reads data into a var, closes, and returns the var
+
+    @local_extra_dirs
+    def get_filename( self, obj, **kwargs ):
+        log.debug( "get_filename(): called on %s %s. For better performance, avoid this method and use get_data() instead.", obj.__class__.__name__, obj.id )
+        cached_path = self.__get_cache_path( obj, **kwargs )
+
+        if not self.exists( obj, **kwargs ):
+            raise ObjectNotFound()
+
+        # TODO: implement or define whether dir_only is valid
+        if 'dir_only' in kwargs:
+            raise NotImplementedError()
+
+        # cache hit
+        if os.path.exists( cached_path ):
+            return os.path.abspath( cached_path )
+
+        # cache miss
+        # TODO: thread this
+        incoming_path = os.path.join( os.path.dirname( cached_path ), "__incoming_%s" % os.path.basename( cached_path ) )
+        doi = irods.dataObjInp_t()
+        doi.objPath = self.__get_rods_path( obj, **kwargs )
+        doi.dataSize = 0  # TODO: does this affect performance? should we get size?
+        doi.numThreads = 0
+        # TODO: might want to VERIFY_CHKSUM_KW
+        log.debug( 'get_filename(): caching %s to %s', doi.objPath, incoming_path )
+
+        # do the iget
+        status = irods.rcDataObjGet( self.rods_conn, doi, incoming_path )
+
+        # if incoming already exists, we'll wait for another process or thread
+        # to finish caching
+        if status != irods.OVERWRITE_WITHOUT_FORCE_FLAG:
+            assert status == 0, 'get_filename(): iget %s failed (%s): %s' % ( doi.objPath, status, irods.strerror( status ) )
+            # POSIX rename is atomic
+            # TODO: rename without clobbering
+            os.rename( incoming_path, cached_path )
+            log.debug( 'get_filename(): cached %s to %s', doi.objPath, cached_path )
+
+        # another process or thread is caching, wait for it
+        while not os.path.exists( cached_path ):
+            # TODO: force restart after mod time > some configurable, or
+            # otherwise deal with this potential deadlock and interrupted
+            # transfers
+            time.sleep( 5 )
+            log.debug( "get_filename(): waiting on incoming '%s' for %s %s", incoming_path, obj.__class__.__name__, obj.id )
+
+        return os.path.abspath( cached_path )
+
+    @local_extra_dirs
+    def update_from_file(self, obj, file_name=None, create=False, **kwargs):
+        assert 'dir_only' not in kwargs, 'update_from_file(): `dir_only` parameter is invalid here'
+
+        # do not create if not requested
+        if create and not self.exists( obj, **kwargs ):
+            raise ObjectNotFound()
+
+        if file_name is None:
+            file_name = self.__get_cache_path( obj, **kwargs )
+
+        # put will create if necessary
+        doi = irods.dataObjInp_t()
+        doi.objPath = self.__get_rods_path( obj, **kwargs )
+        doi.createMode = 0o640
+        doi.dataSize = os.stat( file_name ).st_size
+        doi.numThreads = 0
+        irods.addKeyVal( doi.condInput, irods.DEST_RESC_NAME_KW, self.default_resource )
+        irods.addKeyVal( doi.condInput, irods.FORCE_FLAG_KW, '' )
+        # TODO: might want to VERIFY_CHKSUM_KW
+        log.debug( 'update_from_file(): updating %s to %s', file_name, doi.objPath )
+
+        # do the iput
+        status = irods.rcDataObjPut( self.rods_conn, doi, file_name )
+        assert status == 0, 'update_from_file(): iput %s failed (%s): %s' % ( doi.objPath, status, irods.strerror( status ) )
+
+    def get_object_url(self, obj, **kwargs):
+        return None
+
+    def get_store_usage_percent(self):
+        return 0.0
+
+
+# monkeypatch an strerror method into the irods module
+def _rods_strerror( errno ):
+    """
+    The missing `strerror` for iRODS error codes
+    """
+    if not hasattr( irods, '__rods_strerror_map' ):
+        irods.__rods_strerror_map = {}
+        for name in dir( irods ):
+            v = getattr( irods, name )
+            if type( v ) == int and v < 0:
+                irods.__rods_strerror_map[ v ] = name
+    return irods.__rods_strerror_map.get( errno, 'GALAXY_NO_ERRNO_MAPPING_FOUND' )
+
+
+if irods is not None:
+    irods.strerror = _rods_strerror
+
+
+def rods_connect():
+    """
+    A basic iRODS connection mechanism that connects using the current iRODS
+    environment
+    """
+    status, env = irods.getRodsEnv()
+    assert status == 0, 'connect(): getRodsEnv() failed (%s): %s' % ( status, irods.strerror( status ) )
+    conn, err = irods.rcConnect( env.rodsHost,
+                                 env.rodsPort,
+                                 env.rodsUserName,
+                                 env.rodsZone )
+    assert err.status == 0, 'connect(): rcConnect() failed (%s): %s' % ( err.status, err.msg )
+    status, pw = irods.obfGetPw()
+    assert status == 0, 'connect(): getting password with obfGetPw() failed (%s): %s' % ( status, irods.strerror( status ) )
+    status = irods.clientLoginWithObfPassword( conn, pw )
+    assert status == 0, 'connect(): logging in with clientLoginWithObfPassword() failed (%s): %s' % ( status, irods.strerror( status ) )
+    return env, conn
diff --git a/lib/galaxy/objectstore/s3.py b/lib/galaxy/objectstore/s3.py
new file mode 100644
index 0000000..919d9d1
--- /dev/null
+++ b/lib/galaxy/objectstore/s3.py
@@ -0,0 +1,638 @@
+"""
+Object Store plugin for the Amazon Simple Storage Service (S3)
+"""
+
+import logging
+import multiprocessing
+import os
+import shutil
+import subprocess
+import threading
+import time
+
+from datetime import datetime
+
+from galaxy.exceptions import ObjectInvalid, ObjectNotFound
+from galaxy.util import (
+    directory_hash_id,
+    safe_relpath,
+    string_as_bool,
+    umask_fix_perms,
+)
+from galaxy.util.sleeper import Sleeper
+
+from .s3_multipart_upload import multipart_upload
+from ..objectstore import convert_bytes, ObjectStore
+
+try:
+    # Imports are done this way to allow objectstore code to be used outside of Galaxy.
+    import boto
+
+    from boto.exception import S3ResponseError
+    from boto.s3.key import Key
+    from boto.s3.connection import S3Connection
+except ImportError:
+    boto = None
+
+NO_BOTO_ERROR_MESSAGE = ("S3/Swift object store configured, but no boto dependency available."
+                         "Please install and properly configure boto or modify object store configuration.")
+
+log = logging.getLogger( __name__ )
+logging.getLogger('boto').setLevel(logging.INFO)  # Otherwise boto is quite noisy
+
+
+class S3ObjectStore(ObjectStore):
+    """
+    Object store that stores objects as items in an AWS S3 bucket. A local
+    cache exists that is used as an intermediate location for files between
+    Galaxy and S3.
+    """
+    def __init__(self, config, config_xml):
+        if boto is None:
+            raise Exception(NO_BOTO_ERROR_MESSAGE)
+        super(S3ObjectStore, self).__init__(config)
+        self.staging_path = self.config.file_path
+        self.transfer_progress = 0
+        self._parse_config_xml(config_xml)
+        self._configure_connection()
+        self.bucket = self._get_bucket(self.bucket)
+        # Clean cache only if value is set in galaxy.ini
+        if self.cache_size != -1:
+            # Convert GBs to bytes for comparison
+            self.cache_size = self.cache_size * 1073741824
+            # Helper for interruptable sleep
+            self.sleeper = Sleeper()
+            self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
+            self.cache_monitor_thread.start()
+            log.info("Cache cleaner manager started")
+        # Test if 'axel' is available for parallel download and pull the key into cache
+        try:
+            subprocess.call('axel')
+            self.use_axel = True
+        except OSError:
+            self.use_axel = False
+
+    def _configure_connection(self):
+        log.debug("Configuring S3 Connection")
+        self.conn = S3Connection(self.access_key, self.secret_key)
+
+    def _parse_config_xml(self, config_xml):
+        try:
+            a_xml = config_xml.findall('auth')[0]
+            self.access_key = a_xml.get('access_key')
+            self.secret_key = a_xml.get('secret_key')
+            b_xml = config_xml.findall('bucket')[0]
+            self.bucket = b_xml.get('name')
+            self.use_rr = string_as_bool(b_xml.get('use_reduced_redundancy', "False"))
+            self.max_chunk_size = int(b_xml.get('max_chunk_size', 250))
+            cn_xml = config_xml.findall('connection')
+            if not cn_xml:
+                cn_xml = {}
+            else:
+                cn_xml = cn_xml[0]
+            self.host = cn_xml.get('host', None)
+            self.port = int(cn_xml.get('port', 6000))
+            self.multipart = string_as_bool(cn_xml.get('multipart', 'True'))
+            self.is_secure = string_as_bool(cn_xml.get('is_secure', 'True'))
+            self.conn_path = cn_xml.get('conn_path', '/')
+            c_xml = config_xml.findall('cache')[0]
+            self.cache_size = float(c_xml.get('size', -1))
+            self.staging_path = c_xml.get('path', self.config.object_store_cache_path)
+
+            for d_xml in config_xml.findall('extra_dir'):
+                self.extra_dirs[d_xml.get('type')] = d_xml.get('path')
+
+            log.debug("Object cache dir:    %s", self.staging_path)
+            log.debug("       job work dir: %s", self.extra_dirs['job_work'])
+
+            # for multipart upload
+            self.s3server = {'access_key': self.access_key,
+                             'secret_key': self.secret_key,
+                             'is_secure': self.is_secure,
+                             'max_chunk_size': self.max_chunk_size,
+                             'host': self.host,
+                             'port': self.port,
+                             'use_rr': self.use_rr,
+                             'conn_path': self.conn_path}
+        except Exception:
+            # Toss it back up after logging, we can't continue loading at this point.
+            log.exception("Malformed ObjectStore Configuration XML -- unable to continue")
+            raise
+
+    def __cache_monitor(self):
+        time.sleep(2)  # Wait for things to load before starting the monitor
+        while self.running:
+            total_size = 0
+            # Is this going to be too expensive of an operation to be done frequently?
+            file_list = []
+            for dirpath, _, filenames in os.walk(self.staging_path):
+                for filename in filenames:
+                    filepath = os.path.join(dirpath, filename)
+                    file_size = os.path.getsize(filepath)
+                    total_size += file_size
+                    # Get the time given file was last accessed
+                    last_access_time = time.localtime(os.stat(filepath)[7])
+                    # Compose a tuple of the access time and the file path
+                    file_tuple = last_access_time, filepath, file_size
+                    file_list.append(file_tuple)
+            # Sort the file list (based on access time)
+            file_list.sort()
+            # Initiate cleaning once within 10% of the defined cache size?
+            cache_limit = self.cache_size * 0.9
+            if total_size > cache_limit:
+                log.info("Initiating cache cleaning: current cache size: %s; clean until smaller than: %s",
+                         convert_bytes(total_size), convert_bytes(cache_limit))
+                # How much to delete? If simply deleting up to the cache-10% limit,
+                # is likely to be deleting frequently and may run the risk of hitting
+                # the limit - maybe delete additional #%?
+                # For now, delete enough to leave at least 10% of the total cache free
+                delete_this_much = total_size - cache_limit
+                self.__clean_cache(file_list, delete_this_much)
+            self.sleeper.sleep(30)  # Test cache size every 30 seconds?
+
+    def __clean_cache(self, file_list, delete_this_much):
+        """ Keep deleting files from the file_list until the size of the deleted
+        files is greater than the value in delete_this_much parameter.
+
+        :type file_list: list
+        :param file_list: List of candidate files that can be deleted. This method
+            will start deleting files from the beginning of the list so the list
+            should be sorted accordingly. The list must contains 3-element tuples,
+            positioned as follows: position 0 holds file last accessed timestamp
+            (as time.struct_time), position 1 holds file path, and position 2 has
+            file size (e.g., (<access time>, /mnt/data/dataset_1.dat), 472394)
+
+        :type delete_this_much: int
+        :param delete_this_much: Total size of files, in bytes, that should be deleted.
+        """
+        # Keep deleting datasets from file_list until deleted_amount does not
+        # exceed delete_this_much; start deleting from the front of the file list,
+        # which assumes the oldest files come first on the list.
+        deleted_amount = 0
+        for entry in enumerate(file_list):
+            if deleted_amount < delete_this_much:
+                deleted_amount += entry[2]
+                os.remove(entry[1])
+                # Debugging code for printing deleted files' stats
+                # folder, file_name = os.path.split(f[1])
+                # file_date = time.strftime("%m/%d/%y %H:%M:%S", f[0])
+                # log.debug("%s. %-25s %s, size %s (deleted %s/%s)" \
+                #     % (i, file_name, convert_bytes(f[2]), file_date, \
+                #     convert_bytes(deleted_amount), convert_bytes(delete_this_much)))
+            else:
+                log.debug("Cache cleaning done. Total space freed: %s", convert_bytes(deleted_amount))
+                return
+
+    def _get_bucket(self, bucket_name):
+        """ Sometimes a handle to a bucket is not established right away so try
+        it a few times. Raise error is connection is not established. """
+        for i in range(5):
+            try:
+                bucket = self.conn.get_bucket(bucket_name)
+                log.debug("Using cloud object store with bucket '%s'", bucket.name)
+                return bucket
+            except S3ResponseError:
+                try:
+                    log.debug("Bucket not found, creating s3 bucket with handle '%s'", bucket_name)
+                    self.conn.create_bucket(bucket_name)
+                except S3ResponseError:
+                    log.exception("Could not get bucket '%s', attempt %s/5", bucket_name, i + 1)
+                    time.sleep(2)
+        # All the attempts have been exhausted and connection was not established,
+        # raise error
+        raise S3ResponseError
+
+    def _fix_permissions(self, rel_path):
+        """ Set permissions on rel_path"""
+        for basedir, _, files in os.walk(rel_path):
+            umask_fix_perms(basedir, self.config.umask, 0o777, self.config.gid)
+            for filename in files:
+                path = os.path.join(basedir, filename)
+                # Ignore symlinks
+                if os.path.islink(path):
+                    continue
+                umask_fix_perms( path, self.config.umask, 0o666, self.config.gid )
+
+    def _construct_path(self, obj, base_dir=None, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, obj_dir=False, **kwargs):
+        # extra_dir should never be constructed from provided data but just
+        # make sure there are no shenannigans afoot
+        if extra_dir and extra_dir != os.path.normpath(extra_dir):
+            log.warning('extra_dir is not normalized: %s', extra_dir)
+            raise ObjectInvalid("The requested object is invalid")
+        # ensure that any parent directory references in alt_name would not
+        # result in a path not contained in the directory path constructed here
+        if alt_name:
+            if not safe_relpath(alt_name):
+                log.warning('alt_name would locate path outside dir: %s', alt_name)
+                raise ObjectInvalid("The requested object is invalid")
+            # alt_name can contain parent directory references, but S3 will not
+            # follow them, so if they are valid we normalize them out
+            alt_name = os.path.normpath(alt_name)
+        rel_path = os.path.join(*directory_hash_id(obj.id))
+        if extra_dir is not None:
+            if extra_dir_at_root:
+                rel_path = os.path.join(extra_dir, rel_path)
+            else:
+                rel_path = os.path.join(rel_path, extra_dir)
+
+        # for JOB_WORK directory
+        if obj_dir:
+            rel_path = os.path.join(rel_path, str(obj.id))
+        if base_dir:
+            base = self.extra_dirs.get(base_dir)
+            return os.path.join(base, rel_path)
+
+        # S3 folders are marked by having trailing '/' so add it now
+        rel_path = '%s/' % rel_path
+
+        if not dir_only:
+            rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
+        return rel_path
+
+    def _get_cache_path(self, rel_path):
+        return os.path.abspath(os.path.join(self.staging_path, rel_path))
+
+    def _get_transfer_progress(self):
+        return self.transfer_progress
+
+    def _get_size_in_s3(self, rel_path):
+        try:
+            key = self.bucket.get_key(rel_path)
+            if key:
+                return key.size
+        except S3ResponseError:
+            log.exception("Could not get size of key '%s' from S3", rel_path)
+            return -1
+
+    def _key_exists(self, rel_path):
+        exists = False
+        try:
+            # A hackish way of testing if the rel_path is a folder vs a file
+            is_dir = rel_path[-1] == '/'
+            if is_dir:
+                keyresult = self.bucket.get_all_keys(prefix=rel_path)
+                if len(keyresult) > 0:
+                    exists = True
+                else:
+                    exists = False
+            else:
+                key = Key(self.bucket, rel_path)
+                exists = key.exists()
+        except S3ResponseError:
+            log.exception("Trouble checking existence of S3 key '%s'", rel_path)
+            return False
+        if rel_path[0] == '/':
+            raise
+        return exists
+
+    def _in_cache(self, rel_path):
+        """ Check if the given dataset is in the local cache and return True if so. """
+        # log.debug("------ Checking cache for rel_path %s" % rel_path)
+        cache_path = self._get_cache_path(rel_path)
+        return os.path.exists(cache_path)
+        # TODO: Part of checking if a file is in cache should be to ensure the
+        # size of the cached file matches that on S3. Once the upload tool explicitly
+        # creates, this check sould be implemented- in the mean time, it's not
+        # looking likely to be implementable reliably.
+        # if os.path.exists(cache_path):
+        #     # print "***1 %s exists" % cache_path
+        #     if self._key_exists(rel_path):
+        #         # print "***2 %s exists in S3" % rel_path
+        #         # Make sure the size in cache is available in its entirety
+        #         # print "File '%s' cache size: %s, S3 size: %s" % (cache_path, os.path.getsize(cache_path), self._get_size_in_s3(rel_path))
+        #         if os.path.getsize(cache_path) == self._get_size_in_s3(rel_path):
+        #             # print "***2.1 %s exists in S3 and the size is the same as in cache (in_cache=True)" % rel_path
+        #             exists = True
+        #         else:
+        #             # print "***2.2 %s exists but differs in size from cache (in_cache=False)" % cache_path
+        #             exists = False
+        #     else:
+        #         # Although not perfect decision making, this most likely means
+        #         # that the file is currently being uploaded
+        #         # print "***3 %s found in cache but not in S3 (in_cache=True)" % cache_path
+        #         exists = True
+        # else:
+        #     return False
+
+    def _pull_into_cache(self, rel_path):
+        # Ensure the cache directory structure exists (e.g., dataset_#_files/)
+        rel_path_dir = os.path.dirname(rel_path)
+        if not os.path.exists(self._get_cache_path(rel_path_dir)):
+            os.makedirs(self._get_cache_path(rel_path_dir))
+        # Now pull in the file
+        file_ok = self._download(rel_path)
+        self._fix_permissions(self._get_cache_path(rel_path_dir))
+        return file_ok
+
+    def _transfer_cb(self, complete, total):
+        self.transfer_progress += 10
+
+    def _download(self, rel_path):
+        try:
+            log.debug("Pulling key '%s' into cache to %s", rel_path, self._get_cache_path(rel_path))
+            key = self.bucket.get_key(rel_path)
+            # Test if cache is large enough to hold the new file
+            if self.cache_size > 0 and key.size > self.cache_size:
+                log.critical("File %s is larger (%s) than the cache size (%s). Cannot download.",
+                             rel_path, key.size, self.cache_size)
+                return False
+            if self.use_axel:
+                log.debug("Parallel pulled key '%s' into cache to %s", rel_path, self._get_cache_path(rel_path))
+                ncores = multiprocessing.cpu_count()
+                url = key.generate_url(7200)
+                ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url))
+                if ret_code == 0:
+                    return True
+            else:
+                log.debug("Pulled key '%s' into cache to %s", rel_path, self._get_cache_path(rel_path))
+                self.transfer_progress = 0  # Reset transfer progress counter
+                key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
+                return True
+        except S3ResponseError:
+            log.exception("Problem downloading key '%s' from S3 bucket '%s'", rel_path, self.bucket.name)
+        return False
+
+    def _push_to_os(self, rel_path, source_file=None, from_string=None):
+        """
+        Push the file pointed to by ``rel_path`` to the object store naming the key
+        ``rel_path``. If ``source_file`` is provided, push that file instead while
+        still using ``rel_path`` as the key name.
+        If ``from_string`` is provided, set contents of the file to the value of
+        the string.
+        """
+        try:
+            source_file = source_file if source_file else self._get_cache_path(rel_path)
+            if os.path.exists(source_file):
+                key = Key(self.bucket, rel_path)
+                if os.path.getsize(source_file) == 0 and key.exists():
+                    log.debug("Wanted to push file '%s' to S3 key '%s' but its size is 0; skipping.", source_file, rel_path)
+                    return True
+                if from_string:
+                    key.set_contents_from_string(from_string, reduced_redundancy=self.use_rr)
+                    log.debug("Pushed data from string '%s' to key '%s'", from_string, rel_path)
+                else:
+                    start_time = datetime.now()
+                    log.debug("Pushing cache file '%s' of size %s bytes to key '%s'", source_file, os.path.getsize(source_file), rel_path)
+                    mb_size = os.path.getsize(source_file) / 1e6
+                    if mb_size < 10 or (not self.multipart):
+                        self.transfer_progress = 0  # Reset transfer progress counter
+                        key.set_contents_from_filename(source_file,
+                                                       reduced_redundancy=self.use_rr,
+                                                       cb=self._transfer_cb,
+                                                       num_cb=10)
+                    else:
+                        multipart_upload(self.s3server, self.bucket, key.name, source_file, mb_size)
+                    end_time = datetime.now()
+                    log.debug("Pushed cache file '%s' to key '%s' (%s bytes transfered in %s sec)",
+                              source_file, rel_path, os.path.getsize(source_file), end_time - start_time)
+                return True
+            else:
+                log.error("Tried updating key '%s' from source file '%s', but source file does not exist.",
+                          rel_path, source_file)
+        except S3ResponseError:
+            log.exception("Trouble pushing S3 key '%s' from file '%s'", rel_path, source_file)
+        return False
+
+    def file_ready(self, obj, **kwargs):
+        """
+        A helper method that checks if a file corresponding to a dataset is
+        ready and available to be used. Return ``True`` if so, ``False`` otherwise.
+        """
+        rel_path = self._construct_path(obj, **kwargs)
+        # Make sure the size in cache is available in its entirety
+        if self._in_cache(rel_path):
+            if os.path.getsize(self._get_cache_path(rel_path)) == self._get_size_in_s3(rel_path):
+                return True
+            log.debug("Waiting for dataset %s to transfer from OS: %s/%s", rel_path,
+                      os.path.getsize(self._get_cache_path(rel_path)), self._get_size_in_s3(rel_path))
+        return False
+
+    def exists(self, obj, **kwargs):
+        in_cache = in_s3 = False
+        rel_path = self._construct_path(obj, **kwargs)
+
+        # Check cache
+        if self._in_cache(rel_path):
+            in_cache = True
+        # Check S3
+        in_s3 = self._key_exists(rel_path)
+        # log.debug("~~~~~~ File '%s' exists in cache: %s; in s3: %s" % (rel_path, in_cache, in_s3))
+        # dir_only does not get synced so shortcut the decision
+        dir_only = kwargs.get('dir_only', False)
+        base_dir = kwargs.get('base_dir', None)
+        if dir_only:
+            if in_cache or in_s3:
+                return True
+            # for JOB_WORK directory
+            elif base_dir:
+                if not os.path.exists(rel_path):
+                    os.makedirs(rel_path)
+                return True
+            else:
+                return False
+
+        # TODO: Sync should probably not be done here. Add this to an async upload stack?
+        if in_cache and not in_s3:
+            self._push_to_os(rel_path, source_file=self._get_cache_path(rel_path))
+            return True
+        elif in_s3:
+            return True
+        else:
+            return False
+
+    def create(self, obj, **kwargs):
+        if not self.exists(obj, **kwargs):
+
+            # Pull out locally used fields
+            extra_dir = kwargs.get('extra_dir', None)
+            extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
+            dir_only = kwargs.get('dir_only', False)
+            alt_name = kwargs.get('alt_name', None)
+
+            # Construct hashed path
+            rel_path = os.path.join(*directory_hash_id(obj.id))
+
+            # Optionally append extra_dir
+            if extra_dir is not None:
+                if extra_dir_at_root:
+                    rel_path = os.path.join(extra_dir, rel_path)
+                else:
+                    rel_path = os.path.join(rel_path, extra_dir)
+
+            # Create given directory in cache
+            cache_dir = os.path.join(self.staging_path, rel_path)
+            if not os.path.exists(cache_dir):
+                os.makedirs(cache_dir)
+
+            # Although not really necessary to create S3 folders (because S3 has
+            # flat namespace), do so for consistency with the regular file system
+            # S3 folders are marked by having trailing '/' so add it now
+            # s3_dir = '%s/' % rel_path
+            # self._push_to_os(s3_dir, from_string='')
+            # If instructed, create the dataset in cache & in S3
+            if not dir_only:
+                rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
+                open(os.path.join(self.staging_path, rel_path), 'w').close()
+                self._push_to_os(rel_path, from_string='')
+
+    def empty(self, obj, **kwargs):
+        if self.exists(obj, **kwargs):
+            return bool(self.size(obj, **kwargs) > 0)
+        else:
+            raise ObjectNotFound( 'objectstore.empty, object does not exist: %s, kwargs: %s'
+                                  % ( str( obj ), str( kwargs ) ) )
+
+    def size(self, obj, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        if self._in_cache(rel_path):
+            try:
+                return os.path.getsize(self._get_cache_path(rel_path))
+            except OSError as ex:
+                log.info("Could not get size of file '%s' in local cache, will try S3. Error: %s", rel_path, ex)
+        elif self.exists(obj, **kwargs):
+            return self._get_size_in_s3(rel_path)
+        log.warning("Did not find dataset '%s', returning 0 for size", rel_path)
+        return 0
+
+    def delete(self, obj, entire_dir=False, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        extra_dir = kwargs.get('extra_dir', None)
+        base_dir = kwargs.get('base_dir', None)
+        dir_only = kwargs.get('dir_only', False)
+        obj_dir = kwargs.get('obj_dir', False)
+        try:
+            # Remove temparory data in JOB_WORK directory
+            if base_dir and dir_only and obj_dir:
+                shutil.rmtree(os.path.abspath(rel_path))
+                return True
+
+            # For the case of extra_files, because we don't have a reference to
+            # individual files/keys we need to remove the entire directory structure
+            # with all the files in it. This is easy for the local file system,
+            # but requires iterating through each individual key in S3 and deleing it.
+            if entire_dir and extra_dir:
+                shutil.rmtree(self._get_cache_path(rel_path))
+                results = self.bucket.get_all_keys(prefix=rel_path)
+                for key in results:
+                    log.debug("Deleting key %s", key.name)
+                    key.delete()
+                return True
+            else:
+                # Delete from cache first
+                os.unlink(self._get_cache_path(rel_path))
+                # Delete from S3 as well
+                if self._key_exists(rel_path):
+                    key = Key(self.bucket, rel_path)
+                    log.debug("Deleting key %s", key.name)
+                    key.delete()
+                    return True
+        except S3ResponseError:
+            log.exception("Could not delete key '%s' from S3", rel_path)
+        except OSError:
+            log.exception('%s delete error', self.get_filename(obj, **kwargs))
+        return False
+
+    def get_data(self, obj, start=0, count=-1, **kwargs):
+        rel_path = self._construct_path(obj, **kwargs)
+        # Check cache first and get file if not there
+        if not self._in_cache(rel_path):
+            self._pull_into_cache(rel_path)
+        # Read the file content from cache
+        data_file = open(self._get_cache_path(rel_path), 'r')
+        data_file.seek(start)
+        content = data_file.read(count)
+        data_file.close()
+        return content
+
+    def get_filename(self, obj, **kwargs):
+        base_dir = kwargs.get('base_dir', None)
+        dir_only = kwargs.get('dir_only', False)
+        obj_dir = kwargs.get('obj_dir', False)
+        rel_path = self._construct_path(obj, **kwargs)
+
+        # for JOB_WORK directory
+        if base_dir and dir_only and obj_dir:
+            return os.path.abspath(rel_path)
+
+        cache_path = self._get_cache_path(rel_path)
+        # S3 does not recognize directories as files so cannot check if those exist.
+        # So, if checking dir only, ensure given dir exists in cache and return
+        # the expected cache path.
+        # dir_only = kwargs.get('dir_only', False)
+        # if dir_only:
+        #     if not os.path.exists(cache_path):
+        #         os.makedirs(cache_path)
+        #     return cache_path
+        # Check if the file exists in the cache first
+        if self._in_cache(rel_path):
+            return cache_path
+        # Check if the file exists in persistent storage and, if it does, pull it into cache
+        elif self.exists(obj, **kwargs):
+            if dir_only:  # Directories do not get pulled into cache
+                return cache_path
+            else:
+                if self._pull_into_cache(rel_path):
+                    return cache_path
+        # For the case of retrieving a directory only, return the expected path
+        # even if it does not exist.
+        # if dir_only:
+        #     return cache_path
+        raise ObjectNotFound( 'objectstore.get_filename, no cache_path: %s, kwargs: %s'
+                              % ( str( obj ), str( kwargs ) ) )
+        # return cache_path # Until the upload tool does not explicitly create the dataset, return expected path
+
+    def update_from_file(self, obj, file_name=None, create=False, **kwargs):
+        if create:
+            self.create(obj, **kwargs)
+        if self.exists(obj, **kwargs):
+            rel_path = self._construct_path(obj, **kwargs)
+            # Chose whether to use the dataset file itself or an alternate file
+            if file_name:
+                source_file = os.path.abspath(file_name)
+                # Copy into cache
+                cache_file = self._get_cache_path(rel_path)
+                try:
+                    if source_file != cache_file:
+                        # FIXME? Should this be a `move`?
+                        shutil.copy2(source_file, cache_file)
+                    self._fix_permissions(cache_file)
+                except OSError:
+                    log.exception("Trouble copying source file '%s' to cache '%s'", source_file, cache_file)
+            else:
+                source_file = self._get_cache_path(rel_path)
+            # Update the file on S3
+            self._push_to_os(rel_path, source_file)
+        else:
+            raise ObjectNotFound( 'objectstore.update_from_file, object does not exist: %s, kwargs: %s'
+                                  % ( str( obj ), str( kwargs ) ) )
+
+    def get_object_url(self, obj, **kwargs):
+        if self.exists(obj, **kwargs):
+            rel_path = self._construct_path(obj, **kwargs)
+            try:
+                key = Key(self.bucket, rel_path)
+                return key.generate_url(expires_in=86400)  # 24hrs
+            except S3ResponseError:
+                log.exception("Trouble generating URL for dataset '%s'", rel_path)
+        return None
+
+    def get_store_usage_percent(self):
+        return 0.0
+
+
+class SwiftObjectStore(S3ObjectStore):
+    """
+    Object store that stores objects as items in a Swift bucket. A local
+    cache exists that is used as an intermediate location for files between
+    Galaxy and Swift.
+    """
+
+    def _configure_connection(self):
+        log.debug("Configuring Swift Connection")
+        self.conn = boto.connect_s3(aws_access_key_id=self.access_key,
+                                    aws_secret_access_key=self.secret_key,
+                                    is_secure=self.is_secure,
+                                    host=self.host,
+                                    port=self.port,
+                                    calling_format=boto.s3.connection.OrdinaryCallingFormat(),
+                                    path=self.conn_path)
diff --git a/lib/galaxy/objectstore/s3_multipart_upload.py b/lib/galaxy/objectstore/s3_multipart_upload.py
new file mode 100644
index 0000000..9b40b70
--- /dev/null
+++ b/lib/galaxy/objectstore/s3_multipart_upload.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+"""
+Split large file into multiple pieces for upload to S3.
+This parallelizes the task over available cores using multiprocessing.
+Code mostly taken form CloudBioLinux.
+"""
+
+import contextlib
+import functools
+import glob
+import multiprocessing
+import os
+import subprocess
+
+from multiprocessing.pool import IMapIterator
+
+try:
+    import boto
+    from boto.s3.connection import S3Connection
+except ImportError:
+    boto = None
+
+
+def map_wrap(f):
+    @functools.wraps(f)
+    def wrapper(*args, **kwargs):
+        return f(*args, **kwargs)
+    return wrapper
+
+
+def mp_from_ids(s3server, mp_id, mp_keyname, mp_bucketname):
+    """Get the multipart upload from the bucket and multipart IDs.
+
+    This allows us to reconstitute a connection to the upload
+    from within multiprocessing functions.
+    """
+    if s3server['host']:
+        conn = boto.connect_s3(aws_access_key_id=s3server['access_key'],
+                               aws_secret_access_key=s3server['secret_key'],
+                               is_secure=s3server['is_secure'],
+                               host=s3server['host'],
+                               port=s3server['port'],
+                               calling_format=boto.s3.connection.OrdinaryCallingFormat(),
+                               path=s3server['conn_path'])
+    else:
+        conn = S3Connection(s3server['access_key'], s3server['secret_key'])
+
+    bucket = conn.lookup(mp_bucketname)
+    mp = boto.s3.multipart.MultiPartUpload(bucket)
+    mp.key_name = mp_keyname
+    mp.id = mp_id
+    return mp
+
+
+ at map_wrap
+def transfer_part(s3server, mp_id, mp_keyname, mp_bucketname, i, part):
+    """Transfer a part of a multipart upload. Designed to be run in parallel.
+    """
+    mp = mp_from_ids(s3server, mp_id, mp_keyname, mp_bucketname)
+    with open(part) as t_handle:
+        mp.upload_part_from_file(t_handle, i + 1)
+    os.remove(part)
+
+
+def multipart_upload(s3server, bucket, s3_key_name, tarball, mb_size):
+    """Upload large files using Amazon's multipart upload functionality.
+    """
+    cores = multiprocessing.cpu_count()
+
+    def split_file(in_file, mb_size, split_num=5):
+        prefix = os.path.join(os.path.dirname(in_file),
+                              "%sS3PART" % (os.path.basename(s3_key_name)))
+        max_chunk = s3server['max_chunk_size']
+        # Split chunks so they are 5MB < chunk < 250MB(max_chunk_size)
+        split_size = int(max(min(mb_size / (split_num * 2.0), max_chunk), 5))
+        if not os.path.exists("%saa" % prefix):
+            cl = ["split", "-b%sm" % split_size, in_file, prefix]
+            subprocess.check_call(cl)
+        return sorted(glob.glob("%s*" % prefix))
+
+    mp = bucket.initiate_multipart_upload(s3_key_name,
+                                          reduced_redundancy=s3server['use_rr'])
+
+    with multimap(cores) as pmap:
+        for _ in pmap(transfer_part, ((s3server, mp.id, mp.key_name, mp.bucket_name, i, part)
+                                      for (i, part) in
+                                      enumerate(split_file(tarball, mb_size, cores)))):
+            pass
+    mp.complete_upload()
+
+
+ at contextlib.contextmanager
+def multimap(cores=None):
+    """Provide multiprocessing imap like function.
+
+    The context manager handles setting up the pool, worked around interrupt issues
+    and terminating the pool on completion.
+    """
+    if cores is None:
+        cores = max(multiprocessing.cpu_count() - 1, 1)
+
+    def wrapper(func):
+        def wrap(self, timeout=None):
+            return func(self, timeout=timeout if timeout is not None else 1e100)
+        return wrap
+    IMapIterator.next = wrapper(IMapIterator.next)
+    pool = multiprocessing.Pool(cores)
+    yield pool.imap
+    pool.terminate()
diff --git a/lib/galaxy/openid/__init__.py b/lib/galaxy/openid/__init__.py
new file mode 100644
index 0000000..d196c84
--- /dev/null
+++ b/lib/galaxy/openid/__init__.py
@@ -0,0 +1,3 @@
+"""
+OpenID functionality
+"""
diff --git a/lib/galaxy/openid/providers.py b/lib/galaxy/openid/providers.py
new file mode 100644
index 0000000..ac12300
--- /dev/null
+++ b/lib/galaxy/openid/providers.py
@@ -0,0 +1,145 @@
+"""
+Contains OpenID provider functionality
+"""
+
+import os
+import logging
+from galaxy.util import parse_xml, string_as_bool
+from galaxy.util.odict import odict
+
+
+log = logging.getLogger( __name__ )
+
+NO_PROVIDER_ID = 'None'
+RESERVED_PROVIDER_IDS = [ NO_PROVIDER_ID ]
+
+
+class OpenIDProvider( object ):
+    '''An OpenID Provider object.'''
+    @classmethod
+    def from_file( cls, filename ):
+        return cls.from_elem( parse_xml( filename ).getroot() )
+
+    @classmethod
+    def from_elem( cls, xml_root ):
+        provider_elem = xml_root
+        provider_id = provider_elem.get( 'id', None )
+        provider_name = provider_elem.get( 'name', provider_id )
+        op_endpoint_url = provider_elem.find( 'op_endpoint_url' )
+        if op_endpoint_url is not None:
+            op_endpoint_url = op_endpoint_url.text
+        never_associate_with_user = string_as_bool( provider_elem.get( 'never_associate_with_user', 'False' ) )
+        assert (provider_id and provider_name and op_endpoint_url), Exception( "OpenID Provider improperly configured" )
+        assert provider_id not in RESERVED_PROVIDER_IDS, Exception( 'Specified OpenID Provider uses a reserved id: %s' % ( provider_id ) )
+        sreg_required = []
+        sreg_optional = []
+        use_for = {}
+        store_user_preference = {}
+        use_default_sreg = True
+        for elem in provider_elem.findall( 'sreg' ):
+            use_default_sreg = False
+            for field_elem in elem.findall( 'field' ):
+                sreg_name = field_elem.get( 'name' )
+                assert sreg_name, Exception( 'A name is required for a sreg element' )
+                if string_as_bool( field_elem.get( 'required' ) ):
+                    sreg_required.append( sreg_name )
+                else:
+                    sreg_optional.append( sreg_name )
+                for use_elem in field_elem.findall( 'use_for' ):
+                    use_for[ use_elem.get( 'name' ) ] = sreg_name
+                for store_user_preference_elem in field_elem.findall( 'store_user_preference' ):
+                    store_user_preference[ store_user_preference_elem.get( 'name' ) ] = sreg_name
+        if use_default_sreg:
+            sreg_required = None
+            sreg_optional = None
+            use_for = None
+        return cls( provider_id, provider_name, op_endpoint_url, sreg_required=sreg_required, sreg_optional=sreg_optional, use_for=use_for, store_user_preference=store_user_preference, never_associate_with_user=never_associate_with_user )
+
+    def __init__( self, id, name, op_endpoint_url, sreg_required=None, sreg_optional=None, use_for=None, store_user_preference=None, never_associate_with_user=None ):
+        '''When sreg options are not specified, defaults are used.'''
+        self.id = id
+        self.name = name
+        self.op_endpoint_url = op_endpoint_url
+        if sreg_optional is None:
+            self.sreg_optional = [ 'nickname', 'email' ]
+        else:
+            self.sreg_optional = sreg_optional
+        if sreg_required:
+            self.sreg_required = sreg_required
+        else:
+            self.sreg_required = []
+        if use_for is not None:
+            self.use_for = use_for
+        else:
+            self.use_for = {}
+            if 'nickname' in ( self.sreg_optional + self.sreg_required ):
+                self.use_for[ 'username' ] = 'nickname'
+            if 'email' in ( self.sreg_optional + self.sreg_required ):
+                self.use_for[ 'email' ] = 'email'
+        if store_user_preference:
+            self.store_user_preference = store_user_preference
+        else:
+            self.store_user_preference = {}
+        if never_associate_with_user:
+            self.never_associate_with_user = True
+        else:
+            self.never_associate_with_user = False
+
+    def post_authentication( self, trans, openid_manager, info ):
+        sreg_attributes = openid_manager.get_sreg( info )
+        for store_pref_name, store_pref_value_name in self.store_user_preference.iteritems():
+            if store_pref_value_name in ( self.sreg_optional + self.sreg_required ):
+                trans.user.preferences[ store_pref_name ] = sreg_attributes.get( store_pref_value_name )
+            else:
+                raise Exception( 'Only sreg is currently supported.' )
+        trans.sa_session.add( trans.user )
+        trans.sa_session.flush()
+
+    def has_post_authentication_actions( self ):
+        return bool( self.store_user_preference )
+
+
+class OpenIDProviders( object ):
+    '''Collection of OpenID Providers'''
+    NO_PROVIDER_ID = NO_PROVIDER_ID
+
+    @classmethod
+    def from_file( cls, filename ):
+        try:
+            return cls.from_elem( parse_xml( filename ).getroot() )
+        except Exception as e:
+            log.error( 'Failed to load OpenID Providers: %s' % ( e ) )
+            return cls()
+
+    @classmethod
+    def from_elem( cls, xml_root ):
+        oid_elem = xml_root
+        providers = odict()
+        for elem in oid_elem.findall( 'provider' ):
+            try:
+                provider = OpenIDProvider.from_file( os.path.join( 'openid', elem.get( 'file' ) ) )
+                providers[ provider.id ] = provider
+                log.debug( 'Loaded OpenID provider: %s (%s)' % ( provider.name, provider.id ) )
+            except Exception as e:
+                log.error( 'Failed to add OpenID provider: %s' % ( e ) )
+        return cls( providers )
+
+    def __init__( self, providers=None ):
+        if providers:
+            self.providers = providers
+        else:
+            self.providers = odict()
+        self._banned_identifiers = [ provider.op_endpoint_url for provider in self.providers.itervalues() if provider.never_associate_with_user ]
+
+    def __iter__( self ):
+        for provider in self.providers.itervalues():
+            yield provider
+
+    def get( self, name, default=None ):
+        if name in self.providers:
+            return self.providers[ name ]
+        else:
+            return default
+
+    def new_provider_from_identifier( self, identifier ):
+        return OpenIDProvider( None, identifier, identifier, never_associate_with_user=identifier in self._banned_identifiers )
diff --git a/lib/galaxy/queue_worker.py b/lib/galaxy/queue_worker.py
new file mode 100644
index 0000000..be16dce
--- /dev/null
+++ b/lib/galaxy/queue_worker.py
@@ -0,0 +1,197 @@
+"""
+Galaxy control queue and worker.  This is used to handle 'app' control like
+reloading the toolbox, etc., across multiple processes.
+"""
+
+import logging
+import threading
+import time
+
+import galaxy.queues
+from galaxy import util
+
+from kombu import Connection
+from kombu.mixins import ConsumerMixin
+from kombu.pools import producers
+
+logging.getLogger('kombu').setLevel(logging.WARNING)
+log = logging.getLogger(__name__)
+
+
+def send_control_task(app, task, noop_self=False, kwargs={}):
+    log.info("Sending %s control task." % task)
+    payload = {'task': task,
+               'kwargs': kwargs}
+    if noop_self:
+        payload['noop'] = app.config.server_name
+    try:
+        c = Connection(app.config.amqp_internal_connection)
+        with producers[c].acquire(block=True) as producer:
+            producer.publish(payload, exchange=galaxy.queues.galaxy_exchange,
+                             declare=[galaxy.queues.galaxy_exchange] + galaxy.queues.all_control_queues_for_declare(app.config),
+                             routing_key='control')
+    except Exception:
+        # This is likely connection refused.
+        # TODO Use the specific Exception above.
+        log.exception("Error sending control task: %s." % payload)
+
+
+# Tasks -- to be reorganized into a separate module as appropriate.  This is
+# just an example method.  Ideally this gets pushed into atomic tasks, whether
+# where they're currently invoked, or elsewhere.  (potentially using a dispatch
+# decorator).
+
+def create_panel_section(app, **kwargs):
+    """
+    Updates in memory toolbox dictionary.
+    """
+    log.debug("Updating in-memory tool panel")
+    app.toolbox.create_section(kwargs)
+
+
+def reload_tool(app, **kwargs):
+    params = util.Params(kwargs)
+    tool_id = params.get('tool_id', None)
+    log.debug("Executing reload tool task for %s" % tool_id)
+    if tool_id:
+        app.toolbox.reload_tool_by_id( tool_id )
+    else:
+        log.error("Reload tool invoked without tool id.")
+
+
+def reload_toolbox(app, **kwargs):
+    log.debug("Executing toolbox reload on '%s'", app.config.server_name)
+    reload_count = app.toolbox._reload_count
+    app.toolbox = _get_new_toolbox(app)
+    app.toolbox._reload_count = reload_count + 1
+
+
+def _get_new_toolbox(app):
+    """
+    Generate a new toolbox, by constructing a toolbox from the config files,
+    and then adding pre-existing data managers from the old toolbox to the new toolbox.
+    """
+    from galaxy import tools
+    from galaxy.tools.special_tools import load_lib_tools
+    from galaxy.tools.toolbox.lineages.tool_shed import ToolVersionCache
+    app.tool_version_cache = ToolVersionCache(app)  # Load new tools into version cache
+    tool_configs = app.config.tool_configs
+    if app.config.migrated_tools_config not in tool_configs:
+        tool_configs.append(app.config.migrated_tools_config)
+    start = time.time()
+    new_toolbox = tools.ToolBox(tool_configs, app.config.tool_path, app, app.toolbox._tool_conf_watcher)
+    new_toolbox.data_manager_tools = app.toolbox.data_manager_tools
+    load_lib_tools(new_toolbox)
+    new_toolbox.load_hidden_lib_tool( "galaxy/datatypes/set_metadata_tool.xml" )
+    [new_toolbox.register_tool(tool) for tool in new_toolbox.data_manager_tools.values()]
+    end = time.time() - start
+    log.debug("Toolbox reload took %d seconds", end)
+    app.reindex_tool_search(new_toolbox)
+    return new_toolbox
+
+
+def reload_data_managers(app, **kwargs):
+    from galaxy.tools.data_manager.manager import DataManagers
+    log.debug("Executing data managers reload on '%s'", app.config.server_name)
+    app._configure_tool_data_tables(from_shed_config=False)
+    reload_tool_data_tables(app)
+    reload_count = app.data_managers._reload_count
+    app.data_managers = DataManagers(app, conf_watchers=app.data_managers.conf_watchers)
+    app.data_managers._reload_count = reload_count + 1
+
+
+def reload_display_application(app, **kwargs):
+    display_application_ids = kwargs.get('display_application_ids', None)
+    log.debug("Executing display application reload task for %s" % display_application_ids)
+    app.datatypes_registry.reload_display_applications( display_application_ids)
+
+
+def reload_sanitize_whitelist(app):
+    log.debug("Executing reload sanitize whitelist control task.")
+    app.config.reload_sanitize_whitelist()
+
+
+def reload_tool_data_tables(app, **kwargs):
+    params = util.Params(kwargs)
+    log.debug("Executing tool data table reload for %s" % params.get('table_names', 'all tables'))
+    table_names = app.tool_data_tables.reload_tables(table_names=params.get('table_name', None))
+    log.debug("Finished data table reload for %s" % table_names)
+
+
+def admin_job_lock(app, **kwargs):
+    job_lock = kwargs.get('job_lock', False)
+    # job_queue is exposed in the root app, but this will be 'fixed' at some
+    # point, so we're using the reference from the handler.
+    app.job_manager.job_lock = job_lock
+    log.info("Administrative Job Lock is now set to %s. Jobs will %s dispatch."
+             % (job_lock, "not" if job_lock else "now"))
+
+
+control_message_to_task = { 'create_panel_section': create_panel_section,
+                            'reload_tool': reload_tool,
+                            'reload_toolbox': reload_toolbox,
+                            'reload_data_managers': reload_data_managers,
+                            'reload_display_application': reload_display_application,
+                            'reload_tool_data_tables': reload_tool_data_tables,
+                            'admin_job_lock': admin_job_lock,
+                            'reload_sanitize_whitelist': reload_sanitize_whitelist}
+
+
+class GalaxyQueueWorker(ConsumerMixin, threading.Thread):
+    """
+    This is a flexible worker for galaxy's queues.  Each process, web or
+    handler, will have one of these used for dispatching so called 'control'
+    tasks.
+    """
+    def __init__(self, app, queue=None, task_mapping=control_message_to_task, connection=None):
+        super(GalaxyQueueWorker, self).__init__()
+        log.info("Initializing %s Galaxy Queue Worker on %s", app.config.server_name, util.mask_password_from_url(app.config.amqp_internal_connection))
+        self.daemon = True
+        if connection:
+            self.connection = connection
+        else:
+            self.connection = app.amqp_internal_connection_obj
+        # explicitly force connection instead of lazy-connecting the first
+        # time it is required.
+        self.connection.connect()
+        self.app = app
+        # Eventually we may want different workers w/ their own queues and task
+        # mappings.  Right now, there's only the one.
+        if queue:
+            # Allows assignment of a particular queue for this worker.
+            self.control_queue = queue
+        else:
+            # Default to figuring out which control queue to use based on the app config.
+            queue = galaxy.queues.control_queue_from_config(app.config)
+        self.task_mapping = task_mapping
+        self.declare_queues = galaxy.queues.all_control_queues_for_declare(app.config)
+        # TODO we may want to purge the queue at the start to avoid executing
+        # stale 'reload_tool', etc messages.  This can happen if, say, a web
+        # process goes down and messages get sent before it comes back up.
+        # Those messages will no longer be useful (in any current case)
+
+    def bind_and_start(self):
+        log.info("Binding and starting galaxy control worker for %s", self.app.config.server_name)
+        self.control_queue = galaxy.queues.control_queue_from_config(self.app.config)
+        self.start()
+
+    def get_consumers(self, Consumer, channel):
+        return [Consumer(queues=self.control_queue,
+                         callbacks=[self.process_task])]
+
+    def process_task(self, body, message):
+        if body['task'] in self.task_mapping:
+            if body.get('noop', None) != self.app.config.server_name:
+                try:
+                    f = self.task_mapping[body['task']]
+                    log.info("Instance '%s' recieved '%s' task, executing now.", self.app.config.server_name, body['task'])
+                    f(self.app, **body['kwargs'])
+                except Exception:
+                    # this shouldn't ever throw an exception, but...
+                    log.exception("Error running control task type: %s" % body['task'])
+        else:
+            log.warning("Recieved a malformed task message:\n%s" % body)
+        message.ack()
+
+    def shutdown(self):
+        self.should_stop = True
diff --git a/lib/galaxy/queues.py b/lib/galaxy/queues.py
new file mode 100644
index 0000000..150c416
--- /dev/null
+++ b/lib/galaxy/queues.py
@@ -0,0 +1,41 @@
+"""
+
+All message queues used by Galaxy
+
+"""
+
+from galaxy.util.postfork import process_is_uwsgi
+
+from kombu import Exchange, Queue, Connection
+
+ALL_CONTROL = "control.*"
+galaxy_exchange = Exchange('galaxy_core_exchange', type='topic')
+
+
+def all_control_queues_for_declare(config):
+    """
+    For in-memory routing (used by sqlalchemy-based transports), we need to be able to
+    build the entire routing table in producers.
+
+    Refactor later to actually persist this somewhere instead of building it repeatedly.
+    """
+    possible_uwsgi_queues = []
+    if process_is_uwsgi:
+        import uwsgi
+        possible_uwsgi_queues = [Queue("control.%s.%s" % (config.server_name.split('.')[0], wkr['id']), galaxy_exchange, routing_key='control') for wkr in uwsgi.workers()]
+    return possible_uwsgi_queues + [Queue('control.%s' % q, galaxy_exchange, routing_key='control') for q in config.server_names]
+
+
+def control_queue_from_config(config):
+    """
+    Returns a Queue instance with the correct name and routing key for this
+    galaxy process's config
+    """
+    return Queue("control.%s" % config.server_name, galaxy_exchange, routing_key='control')
+
+
+def connection_from_config(config):
+    if config.amqp_internal_connection:
+        return Connection(config.amqp_internal_connection)
+    else:
+        return None
diff --git a/lib/galaxy/quota/__init__.py b/lib/galaxy/quota/__init__.py
new file mode 100644
index 0000000..b445184
--- /dev/null
+++ b/lib/galaxy/quota/__init__.py
@@ -0,0 +1,191 @@
+"""
+Galaxy Quotas
+
+"""
+import logging
+import galaxy.util
+
+log = logging.getLogger(__name__)
+
+
+class NoQuotaAgent( object ):
+    """Base quota agent, always returns no quota"""
+    def __init__( self, model ):
+        self.model = model
+        self.sa_session = model.context
+
+    def get_quota( self, user, nice_size=False ):
+        return None
+
+    @property
+    def default_quota( self ):
+        return None
+
+    def get_usage( self, trans=None, user=False, history=False ):
+        if trans:
+            user = trans.user
+            history = trans.history
+        assert user is not False, "Could not determine user."
+        if not user:
+            assert history, "Could not determine anonymous user's history."
+            usage = history.disk_size
+        else:
+            usage = user.total_disk_usage
+        return usage
+
+    def get_percent( self, trans=None, user=False, history=False, usage=False, quota=False ):
+        return None
+
+    def get_user_quotas( self, user ):
+        return []
+
+
+class QuotaAgent( NoQuotaAgent ):
+    """Class that handles galaxy quotas"""
+    def get_quota( self, user, nice_size=False ):
+        """
+        Calculated like so:
+
+            1. Anonymous users get the default quota.
+            2. Logged in users start with the highest of their associated '='
+               quotas or the default quota, if there are no associated '='
+               quotas.  If an '=' unlimited (-1 in the database) quota is found
+               during this process, the user has no quota (aka unlimited).
+            3. Quota is increased or decreased by any corresponding '+' or '-'
+               quotas.
+        """
+        if not user:
+            return self.default_unregistered_quota
+        quotas = []
+        for group in [ uga.group for uga in user.groups ]:
+            for quota in [ gqa.quota for gqa in group.quotas ]:
+                if quota not in quotas:
+                    quotas.append( quota )
+        for quota in [ uqa.quota for uqa in user.quotas ]:
+            if quota not in quotas:
+                quotas.append( quota )
+        use_default = True
+        max = 0
+        adjustment = 0
+        rval = 0
+        for quota in quotas:
+            if quota.deleted:
+                continue
+            if quota.operation == '=' and quota.bytes == -1:
+                rval = None
+                break
+            elif quota.operation == '=':
+                use_default = False
+                if quota.bytes > max:
+                    max = quota.bytes
+            elif quota.operation == '+':
+                adjustment += quota.bytes
+            elif quota.operation == '-':
+                adjustment -= quota.bytes
+        if use_default:
+            max = self.default_registered_quota
+            if max is None:
+                rval = None
+        if rval is not None:
+            rval = max + adjustment
+            if rval <= 0:
+                rval = 0
+        if nice_size:
+            if rval is not None:
+                rval = galaxy.util.nice_size( rval )
+            else:
+                rval = 'unlimited'
+        return rval
+
+    @property
+    def default_unregistered_quota( self ):
+        return self._default_quota( self.model.DefaultQuotaAssociation.types.UNREGISTERED )
+
+    @property
+    def default_registered_quota( self ):
+        return self._default_quota( self.model.DefaultQuotaAssociation.types.REGISTERED )
+
+    def _default_quota( self, default_type ):
+        dqa = self.sa_session.query( self.model.DefaultQuotaAssociation ).filter( self.model.DefaultQuotaAssociation.table.c.type == default_type ).first()
+        if not dqa:
+            return None
+        if dqa.quota.bytes < 0:
+            return None
+        return dqa.quota.bytes
+
+    def set_default_quota( self, default_type, quota ):
+        # Unset the current default(s) associated with this quota, if there are any
+        for dqa in quota.default:
+            self.sa_session.delete( dqa )
+        # Unset the current users/groups associated with this quota
+        for uqa in quota.users:
+            self.sa_session.delete( uqa )
+        for gqa in quota.groups:
+            self.sa_session.delete( gqa )
+        # Find the old default, assign the new quota if it exists
+        dqa = self.sa_session.query( self.model.DefaultQuotaAssociation ).filter( self.model.DefaultQuotaAssociation.table.c.type == default_type ).first()
+        if dqa:
+            dqa.quota = quota
+        # Or create if necessary
+        else:
+            dqa = self.model.DefaultQuotaAssociation( default_type, quota )
+        self.sa_session.add( dqa )
+        self.sa_session.flush()
+
+    def get_percent( self, trans=None, user=False, history=False, usage=False, quota=False ):
+        """
+        Return the percentage of any storage quota applicable to the user/transaction.
+        """
+        # if trans passed, use it to get the user, history (instead of/override vals passed)
+        if trans:
+            user = trans.user
+            history = trans.history
+        # if quota wasn't passed, attempt to get the quota
+        if quota is False:
+            quota = self.get_quota( user )
+        # return none if no applicable quotas or quotas disabled
+        if quota is None:
+            return None
+        # get the usage, if it wasn't passed
+        if usage is False:
+            usage = self.get_usage( trans, user, history )
+        try:
+            return min( ( int( float( usage ) / quota * 100 ), 100 ) )
+        except ZeroDivisionError:
+            return 100
+
+    def set_entity_quota_associations( self, quotas=[], users=[], groups=[], delete_existing_assocs=True ):
+        for quota in quotas:
+            if delete_existing_assocs:
+                flush_needed = False
+                for a in quota.users + quota.groups:
+                    self.sa_session.delete( a )
+                    flush_needed = True
+                if flush_needed:
+                    self.sa_session.flush()
+            for user in users:
+                uqa = self.model.UserQuotaAssociation( user, quota )
+                self.sa_session.add( uqa )
+            for group in groups:
+                gqa = self.model.GroupQuotaAssociation( group, quota )
+                self.sa_session.add( gqa )
+            self.sa_session.flush()
+
+    def get_user_quotas( self, user ):
+        rval = []
+        if not user:
+            dqa = self.sa_session.query( self.model.DefaultQuotaAssociation ) \
+                .filter( self.model.DefaultQuotaAssociation.table.c.type == self.model.DefaultQuotaAssociation.types.UNREGISTERED ).first()
+            if dqa:
+                rval.append( dqa.quota )
+        else:
+            dqa = self.sa_session.query( self.model.DefaultQuotaAssociation ) \
+                .filter( self.model.DefaultQuotaAssociation.table.c.type == self.model.DefaultQuotaAssociation.types.REGISTERED ).first()
+            if dqa:
+                rval.append( dqa.quota )
+            for uqa in user.quotas:
+                rval.append( uqa.quota )
+            for group in [ uga.group for uga in user.groups ]:
+                for gqa in group.quotas:
+                    rval.append( gqa.quota )
+        return rval
diff --git a/lib/galaxy/sample_tracking/__init__.py b/lib/galaxy/sample_tracking/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/sample_tracking/data_transfer.py b/lib/galaxy/sample_tracking/data_transfer.py
new file mode 100644
index 0000000..13b3a66
--- /dev/null
+++ b/lib/galaxy/sample_tracking/data_transfer.py
@@ -0,0 +1,58 @@
+class DataTransferFactory( object ):
+    type = None
+
+    def parse( self ):
+        pass
+
+
+class ScpDataTransferFactory( DataTransferFactory ):
+    type = 'scp'
+
+    def __init__( self ):
+        pass
+
+    def parse( self, config_file, elem ):
+        self.config = {}
+        # TODO: The 'automatic_transfer' setting is for future use.  If set to True, we will need to
+        # ensure the sample has an associated destination data library before it moves to a certain state
+        # ( e.g., Run started ).
+        self.config[ 'automatic_transfer' ] = elem.get( 'automatic_transfer' )
+        self.config[ 'host' ] = elem.get( 'host' )
+        self.config[ 'user_name' ] = elem.get( 'user_name' )
+        self.config[ 'password' ] = elem.get( 'password' )
+        self.config[ 'data_location' ] = elem.get( 'data_location' )
+        # 'rename_dataset' is optional and it may not be defined in all external types
+        # It is only used is AB SOLiD external service type for now
+        rename_dataset = elem.get( 'rename_dataset', None )
+        if rename_dataset:
+            self.config['rename_dataset'] = rename_dataset
+        # Validate
+        for name, value in self.config.items():
+            assert value, "'%s' attribute missing in 'data_transfer' element of type 'scp' in external_service_type xml config file: '%s'." % ( name, config_file )
+
+
+class HttpDataTransferFactory( DataTransferFactory ):
+    type = 'http'
+
+    def __init__( self ):
+        pass
+
+    def parse( self, config_file, elem ):
+        self.config = {}
+        self.config[ 'automatic_transfer' ] = elem.get( 'automatic_transfer' )
+        # Validate
+        for name, value in self.config.items():
+            assert value, "'%s' attribute missing in 'data_transfer' element of type 'http' in external_service_type xml config file: '%s'." % ( name, config_file )
+
+
+class FtpDataTransferFactory( DataTransferFactory ):
+    type = 'ftp'
+
+    def __init__( self ):
+        pass
+
+    def parse( self, elem ):
+        pass
+
+
+data_transfer_factories = dict( [ ( data_transfer.type, data_transfer() ) for data_transfer in [ ScpDataTransferFactory, HttpDataTransferFactory, FtpDataTransferFactory ] ] )
diff --git a/lib/galaxy/sample_tracking/external_service_types.py b/lib/galaxy/sample_tracking/external_service_types.py
new file mode 100644
index 0000000..1e52efc
--- /dev/null
+++ b/lib/galaxy/sample_tracking/external_service_types.py
@@ -0,0 +1,122 @@
+import os
+import logging
+from galaxy.util.odict import odict
+from galaxy import util, model
+from galaxy.forms.forms import form_factory
+from galaxy.external_services.service import ExternalServiceActionsGroup
+from galaxy.sample_tracking.data_transfer import data_transfer_factories
+log = logging.getLogger( __name__ )
+
+
+class ExternalServiceTypeNotFoundException( Exception ):
+    pass
+
+
+class ExternalServiceTypesCollection( object ):
+
+    def __init__( self, config_filename, root_dir, app ):
+        self.all_external_service_types = odict()
+        self.root_dir = root_dir
+        self.app = app
+        try:
+            self.load_all( config_filename )
+        except:
+            log.exception( "ExternalServiceTypesCollection error reading %s", config_filename )
+
+    def load_all( self, config_filename ):
+        self.visible_external_service_types = []
+        tree = util.parse_xml( config_filename )
+        root = tree.getroot()
+        for elem in root:
+            try:
+                if elem.tag == 'external_service_type':
+                    file_path = elem.get( "file" )
+                    visible = util.string_as_bool( elem.get( "visible" ) )
+                    external_service_type = self.load_external_service_type( os.path.join( self.root_dir, file_path ), visible )
+                    self.all_external_service_types[ external_service_type.id ] = external_service_type
+                    log.debug( "Loaded external_service_type: %s %s" % ( external_service_type.name, external_service_type.config_version ) )
+                    if visible:
+                        self.visible_external_service_types.append( external_service_type.id )
+            except:
+                log.exception( "error reading external_service_type from path: %s" % file_path )
+
+    def load_external_service_type( self, config_file, visible=True ):
+        # Parse XML configuration file and get the root element
+        tree = util.parse_xml( config_file )
+        root = tree.getroot()
+        return ExternalServiceType( config_file, root, visible )
+
+    def reload( self, external_service_type_id ):
+        """
+        Attempt to reload the external_service_type identified by 'external_service_type_id', if successful
+        replace the old external_service_type.
+        """
+        if external_service_type_id not in self.all_external_service_types.keys():
+            raise ExternalServiceTypeNotFoundException( "No external_service_type with id %s" % external_service_type_id )
+        old_external_service_type = self.all_external_service_types[ external_service_type_id ]
+        new_external_service_type = self.load_external_service_type( old_external_service_type.config_file )
+        self.all_external_service_types[ external_service_type_id ] = new_external_service_type
+        log.debug( "Reloaded external_service_type %s" % ( external_service_type_id ) )
+        return new_external_service_type
+
+
+class ExternalServiceType( object ):
+    def __init__( self, external_service_type_xml_config, root, visible=True ):
+        self.config_file = external_service_type_xml_config
+        self.parse( root )
+        self.visible = visible
+        root.clear()
+
+    def parse( self, root ):
+        # Get the name
+        self.name = root.get( "name" )
+        if not self.name:
+            raise Exception( "Missing external_service_type 'name'" )
+        # Get the UNIQUE id for the tool
+        self.id = root.get( "id" )
+        if not self.id:
+            raise Exception( "Missing external_service_type 'id'" )
+        self.config_version = root.get( "version" )
+        if not self.config_version:
+            self.config_version = '1.0.0'
+        self.description = util.xml_text(root, "description")
+        self.version = util.xml_text( root.find( "version" ) )
+        # parse the form
+        self.form_definition = form_factory.from_elem( root.find( 'form' ) )
+        self.parse_data_transfer_settings( root )
+        self.parse_run_details( root )
+        # external services actions
+        self.actions = ExternalServiceActionsGroup.from_elem( root.find( 'actions' ), parent=self )
+
+    def parse_data_transfer_settings( self, root ):
+        self.data_transfer = {}
+        data_transfer_settings_elem = root.find( 'data_transfer_settings' )
+        # Currently only data transfer using scp or http is supported.
+        for data_transfer_elem in data_transfer_settings_elem.findall( "data_transfer" ):
+            if data_transfer_elem.get( 'protocol' ) == model.ExternalService.data_transfer_protocol.SCP:
+                scp_data_transfer = data_transfer_factories[ model.ExternalService.data_transfer_protocol.SCP ]
+                scp_data_transfer.parse( self.config_file, data_transfer_elem  )
+                self.data_transfer[ model.ExternalService.data_transfer_protocol.SCP ] = scp_data_transfer
+            if data_transfer_elem.get( 'protocol' ) == model.ExternalService.data_transfer_protocol.HTTP:
+                http_data_transfer = data_transfer_factories[ model.ExternalService.data_transfer_protocol.HTTP ]
+                http_data_transfer.parse( self.config_file, data_transfer_elem  )
+                self.data_transfer[ model.ExternalService.data_transfer_protocol.HTTP ] = http_data_transfer
+
+    def parse_run_details( self, root ):
+        self.run_details = {}
+        run_details_elem = root.find( 'run_details' )
+        if run_details_elem is not None:
+            results_elem = run_details_elem.find( 'results' )
+            if results_elem is not None:
+                # Get the list of resulting datatypes
+                # TODO: the 'results_urls' attribute is only useful if the transfer protocol is http(s), so check if that is the case.
+                self.run_details[ 'results' ], self.run_details[ 'results_urls' ] = self.parse_run_details_results( results_elem )
+
+    def parse_run_details_results( self, root ):
+        datatypes_dict = {}
+        urls_dict = {}
+        for datatype_elem in root.findall( "dataset" ):
+            name = datatype_elem.get( 'name' )
+            datatypes_dict[ name ] = datatype_elem.get( 'datatype' )
+            urls_dict[ name ] = datatype_elem.get( 'url', None )
+        return datatypes_dict, urls_dict
diff --git a/lib/galaxy/sample_tracking/request_types.py b/lib/galaxy/sample_tracking/request_types.py
new file mode 100644
index 0000000..632b190
--- /dev/null
+++ b/lib/galaxy/sample_tracking/request_types.py
@@ -0,0 +1,33 @@
+"""
+RequestType
+"""
+from galaxy.model import RequestType
+from sample import sample_state_factory
+
+RENAME_DATASET_OPTIONS = dict( [ ( f_type.lower(), f_descript ) for f_type, f_descript in RequestType.rename_dataset_options.items() ] )
+
+
+class RequestTypeFactory( object ):
+    def __init__( self, sample_state_factory, rename_dataset_options ):
+        self.sample_state_factory = sample_state_factory
+        self.rename_dataset_options = rename_dataset_options
+
+    def new( self, name, request_form, sample_form, external_service, description=None, sample_states=None ):
+        """Return new RequestType."""
+        assert name, 'RequestType requires a name'
+        return RequestType( name=name, desc=description, request_form=request_form, sample_form=sample_form, external_service=external_service )
+
+    def from_elem( self, elem, request_form, sample_form, external_service ):
+        """Return RequestType created from an xml string."""
+        name = elem.get( 'name', '' )
+        description = elem.get( 'description', '' )
+        rval = self.new( name, request_form, sample_form, external_service=external_service, description=description )
+        # load states
+        sample_states_elem = elem.find( 'sample_states' )
+        if sample_states_elem:
+            for sample_state_elem in sample_states_elem.findall( 'state' ):
+                self.sample_state_factory.from_elem( rval, sample_state_elem )  # should this be the return value??
+        return rval
+
+
+request_type_factory = RequestTypeFactory( sample_state_factory, RENAME_DATASET_OPTIONS )
diff --git a/lib/galaxy/sample_tracking/sample.py b/lib/galaxy/sample_tracking/sample.py
new file mode 100644
index 0000000..64b4062
--- /dev/null
+++ b/lib/galaxy/sample_tracking/sample.py
@@ -0,0 +1,26 @@
+"""
+Sample
+"""
+from galaxy.model import SampleState
+
+
+class SampleStateFactory( object ):
+    def new( self, request_type, name, description=None ):
+        """
+        Return new SampleState.
+        """
+        assert name, 'SampleState requires a name'
+        if description is None:
+            description = ''
+        return SampleState( name=name, desc=description, request_type=request_type )
+
+    def from_elem( self, request_type, elem ):
+        """
+        Return SampleState created from an xml string.
+        """
+        name = elem.get( 'name', None )
+        description = elem.get( 'description', None )
+        return self.new( request_type, name, description=description )
+
+
+sample_state_factory = SampleStateFactory()
diff --git a/lib/galaxy/security/__init__.py b/lib/galaxy/security/__init__.py
new file mode 100644
index 0000000..f1122e9
--- /dev/null
+++ b/lib/galaxy/security/__init__.py
@@ -0,0 +1,1588 @@
+"""
+Galaxy Security
+
+"""
+import logging
+import socket
+from datetime import datetime, timedelta
+
+from sqlalchemy import and_, false, or_, not_
+from sqlalchemy.orm import eagerload_all
+
+from galaxy.util import listify
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger(__name__)
+
+
+class Action( object ):
+    def __init__( self, action, description, model ):
+        self.action = action
+        self.description = description
+        self.model = model
+
+
+class RBACAgent:
+    """Class that handles galaxy security"""
+    permitted_actions = Bunch(
+        DATASET_MANAGE_PERMISSIONS=Action( "manage permissions", "Users having associated role can manage the roles associated with permissions on this dataset", "grant" ),
+        DATASET_ACCESS=Action( "access", "Users having associated role can import this dataset into their history for analysis", "restrict" ),
+        LIBRARY_ACCESS=Action( "access library", "Restrict access to this library to only users having associated role", "restrict" ),
+        LIBRARY_ADD=Action( "add library item", "Users having associated role can add library items to this library item", "grant" ),
+        LIBRARY_MODIFY=Action( "modify library item", "Users having associated role can modify this library item", "grant" ),
+        LIBRARY_MANAGE=Action( "manage library permissions", "Users having associated role can manage roles associated with permissions on this library item", "grant" ),
+        # Request type permissions
+        REQUEST_TYPE_ACCESS=Action( "access request_type", "Restrict access to this request type to only users having associated role", "restrict" )
+    )
+
+    def get_action( self, name, default=None ):
+        """Get a permitted action by its dict key or action name"""
+        for k, v in self.permitted_actions.items():
+            if k == name or v.action == name:
+                return v
+        return default
+
+    def get_actions( self ):
+        """Get all permitted actions as a list of Action objects"""
+        return list(self.permitted_actions.__dict__.values())
+
+    def get_item_actions( self, action, item ):
+        raise Exception( 'No valid method of retrieving action (%s) for item %s.' % ( action, item ) )
+
+    def guess_derived_permissions_for_datasets( self, datasets=[] ):
+        raise Exception( "Unimplemented Method" )
+
+    def can_access_dataset( self, roles, dataset ):
+        raise Exception( "Unimplemented Method" )
+
+    def can_manage_dataset( self, roles, dataset ):
+        raise Exception( "Unimplemented Method" )
+
+    def can_access_library( self, roles, library ):
+        raise Exception( "Unimplemented Method" )
+
+    def can_add_library_item( self, roles, item ):
+        raise Exception( "Unimplemented Method" )
+
+    def can_modify_library_item( self, roles, item ):
+        raise Exception( "Unimplemented Method" )
+
+    def can_manage_library_item( self, roles, item ):
+        raise Exception( "Unimplemented Method" )
+
+    def associate_components( self, **kwd ):
+        raise Exception( 'No valid method of associating provided components: %s' % kwd )
+
+    def create_private_user_role( self, user ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_private_user_role( self, user ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_accessible_request_types( self, trans, user ):
+        raise Exception( "Unimplemented Method" )
+
+    def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False ):
+        raise Exception( "Unimplemented Method" )
+
+    def history_set_default_permissions( self, history, permissions=None, dataset=False, bypass_manage_permission=False ):
+        raise Exception( "Unimplemented Method" )
+
+    def set_all_dataset_permissions( self, dataset, permissions, new=False ):
+        raise Exception( "Unimplemented Method" )
+
+    def set_dataset_permission( self, dataset, permission ):
+        raise Exception( "Unimplemented Method" )
+
+    def set_all_library_permissions( self, trans, dataset, permissions ):
+        raise Exception( "Unimplemented Method" )
+
+    def set_library_item_permission( self, library_item, permission ):
+        raise Exception( "Unimplemented Method" )
+
+    def library_is_public( self, library ):
+        raise Exception( "Unimplemented Method" )
+
+    def make_library_public( self, library ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_accessible_libraries( self, trans, user ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_permitted_libraries( self, trans, user, actions ):
+        raise Exception( "Unimplemented Method" )
+
+    def folder_is_public( self, library ):
+        raise Exception( "Unimplemented Method" )
+
+    def make_folder_public( self, folder, count=0 ):
+        raise Exception( "Unimplemented Method" )
+
+    def dataset_is_public( self, dataset ):
+        raise Exception( "Unimplemented Method" )
+
+    def make_dataset_public( self, dataset ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_permissions( self, library_dataset ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_all_roles( self, trans, cntrller ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_legitimate_roles( self, trans, item, cntrller ):
+        raise Exception( "Unimplemented Method" )
+
+    def derive_roles_from_access( self, trans, item_id, cntrller, library=False, **kwd ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_component_associations( self, **kwd ):
+        raise Exception( "Unimplemented Method" )
+
+    def components_are_associated( self, **kwd ):
+        return bool( self.get_component_associations( **kwd ) )
+
+    def convert_permitted_action_strings( self, permitted_action_strings ):
+        """
+        When getting permitted actions from an untrusted source like a
+        form, ensure that they match our actual permitted actions.
+        """
+        return [_ for _ in [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] if _ is not None]
+
+
+class GalaxyRBACAgent( RBACAgent ):
+    def __init__( self, model, permitted_actions=None ):
+        self.model = model
+        if permitted_actions:
+            self.permitted_actions = permitted_actions
+        # List of "library_item" objects and their associated permissions and info template objects
+        self.library_item_assocs = (
+            ( self.model.Library, self.model.LibraryPermissions ),
+            ( self.model.LibraryFolder, self.model.LibraryFolderPermissions ),
+            ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions ),
+            ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions ) )
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session"""
+        return self.model.context
+
+    def sort_by_attr( self, seq, attr ):
+        """
+        Sort the sequence of objects by object's attribute
+        Arguments:
+        seq  - the list or any sequence (including immutable one) of objects to sort.
+        attr - the name of attribute to sort by
+        """
+        # Use the "Schwartzian transform"
+        # Create the auxiliary list of tuples where every i-th tuple has form
+        # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
+        # only to provide stable sorting, but mainly to eliminate comparison of objects
+        # (which can be expensive or prohibited) in case of equal attribute values.
+        intermed = map( None, (getattr(_, attr) for _ in seq), range( len( seq ) ), seq )
+        intermed.sort()
+        return [_[-1] for _ in intermed]
+
+    def _get_npns_roles( self, trans ):
+        """
+        non-private, non-sharing roles
+        """
+        return trans.sa_session.query( trans.app.model.Role ) \
+                    .filter( and_( self.model.Role.table.c.deleted == false(),
+                        self.model.Role.table.c.type != self.model.Role.types.PRIVATE,
+                        self.model.Role.table.c.type != self.model.Role.types.SHARING ) ) \
+                    .order_by( self.model.Role.table.c.name )
+
+    def get_all_roles( self, trans, cntrller ):
+        admin_controller = cntrller in [ 'library_admin' ]
+        roles = set()
+        if not trans.user:
+            return self._get_npns_roles( trans )
+        if admin_controller:
+            # The library is public and the user is an admin, so all roles are legitimate
+            for role in trans.sa_session.query( trans.app.model.Role ) \
+                                        .filter( self.model.Role.table.c.deleted == false() ) \
+                                        .order_by( self.model.Role.table.c.name ):
+                roles.add( role )
+        else:
+            # Add the current user's private role
+            roles.add( self.get_private_user_role( trans.user ) )
+            # Add the current user's sharing roles
+            for role in self.get_sharing_roles( trans.user ):
+                roles.add( role )
+            # Add all remaining non-private, non-sharing roles
+            for role in self._get_npns_roles( trans):
+                roles.add( role )
+        return self.sort_by_attr( [ role for role in roles ], 'name' )
+
+    def get_roles_for_action( self, item, action ):
+        """
+        Return a list containing the roles associated with given action on given item
+        where item is one of Library, LibraryFolder, LibraryDatasetDatasetAssociation,
+        LibraryDataset, Dataset.
+        """
+        roles = []
+        for item_permission in item.actions:
+                permission_action = self.get_action( item_permission.action )
+                if permission_action == action:
+                    roles.append( item_permission.role )
+        return roles
+
+    def get_valid_roles( self, trans, item, query=None, page=None, page_limit=None, is_library_access=False ):
+        """
+        This method retrieves the list of possible roles that user can select
+        in the item permissions form. Admins can select any role so the
+        results are paginated in order to save the bandwidth and to speed
+        things up.
+        Standard users can select their own private role, any of their
+        sharing roles and any public role (not private and not sharing).
+        """
+        roles = []
+        if query is not None:
+            query = query.replace( '_', '/_' ).replace( '%', '/%' ).replace( '/', '//' )
+            search_query = query + '%'
+            log.debug('search_query: ' + str(search_query))
+
+        # Limit the query only to get the page needed
+        if page is not None and page_limit is not None:
+            paginated = True
+            limit = page * page_limit
+        else:
+            paginated = False
+
+        total_count = None
+
+        if isinstance( item, self.model.Library ) and self.library_is_public( item ):
+            is_public_item = True
+        elif isinstance( item, self.model.Dataset ) and self.dataset_is_public( item ):
+            is_public_item = True
+        elif isinstance( item, self.model.LibraryFolder ):
+            is_public_item = True
+        else:
+            is_public_item = False
+
+        # For public items and for library access admins can choose from all roles
+        if trans.user_is_admin() and ( is_public_item or is_library_access ):
+            # Add all non-deleted roles that fit the query
+            db_query = trans.sa_session.query( trans.app.model.Role ).filter( self.model.Role.table.c.deleted == false() )
+            if query is not None:
+                db_query = db_query.filter( self.model.Role.table.c.name.like( search_query, escape='/' ) )
+            total_count = db_query.count()
+            if paginated:
+                # Takes the least number of results from beginning that includes the requested page
+                roles = db_query.order_by( self.model.Role.table.c.name ).limit( limit ).all()
+                page_start = ( page * page_limit ) - page_limit
+                page_end = page_start + page_limit
+                if total_count < page_start:
+                    # Return empty list if there are less results than the requested position
+                    roles = []
+                else:
+                    roles = roles[ page_start:page_end ]
+            else:
+                roles = db_query.order_by( self.model.Role.table.c.name )
+
+        # Non-admin and public item
+        elif is_public_item:
+            # Add the current user's private role
+            roles.append( self.get_private_user_role( trans.user ) )
+            # Add the current user's sharing roles
+            for role in self.get_sharing_roles( trans.user ):
+                roles.append( role )
+            # Add all remaining non-private, non-sharing roles
+            for role in self._get_npns_roles( trans ):
+                roles.append( role )
+        #  User is not admin and item is not public
+        #  User will see all the roles derived from the access roles on the item
+        else:
+            # If item has roles associated with the access permission, we need to start with them.
+            access_roles = item.get_access_roles( trans )
+            for role in access_roles:
+                if trans.user_is_admin() or self.ok_to_display( trans.user, role ):
+                    roles.append( role )
+                    # Each role potentially has users.  We need to find all roles that each of those users have.
+                    for ura in role.users:
+                        user = ura.user
+                        for ura2 in user.roles:
+                            if trans.user_is_admin() or self.ok_to_display( trans.user, ura2.role ):
+                                roles.append( ura2.role )
+                    # Each role also potentially has groups which, in turn, have members ( users ).  We need to
+                    # find all roles that each group's members have.
+                    for gra in role.groups:
+                        group = gra.group
+                        for uga in group.users:
+                            user = uga.user
+                            for ura in user.roles:
+                                if trans.user_is_admin() or self.ok_to_display( trans.user, ura.role ):
+                                    roles.append( ura.role )
+
+        # Omit duplicated roles by converting to set
+        return_roles = set( roles )
+        if total_count is None:
+            total_count = len( return_roles )
+        return self.sort_by_attr( [ role for role in return_roles ], 'name' ), total_count
+
+    def get_legitimate_roles( self, trans, item, cntrller ):
+        """
+        Return a sorted list of legitimate roles that can be associated with a permission on
+        item where item is a Library or a Dataset.  The cntrller param is the controller from
+        which the request is sent.  We cannot use trans.user_is_admin() because the controller is
+        what is important since admin users do not necessarily have permission to do things
+        on items outside of the admin view.
+
+        If cntrller is from the admin side ( e.g., library_admin ):
+
+            - if item is public, all roles, including private roles, are legitimate.
+            - if item is restricted, legitimate roles are derived from the users and groups associated
+              with each role that is associated with the access permission ( i.e., DATASET_MANAGE_PERMISSIONS or
+              LIBRARY_MANAGE ) on item.  Legitimate roles will include private roles.
+
+        If cntrller is not from the admin side ( e.g., root, library ):
+
+            - if item is public, all non-private roles, except for the current user's private role,
+              are legitimate.
+            - if item is restricted, legitimate roles are derived from the users and groups associated
+              with each role that is associated with the access permission on item.  Private roles, except
+              for the current user's private role, will be excluded.
+        """
+        admin_controller = cntrller in [ 'library_admin' ]
+        roles = set()
+        if ( isinstance( item, self.model.Library ) and self.library_is_public( item ) ) or \
+                ( isinstance( item, self.model.Dataset ) and self.dataset_is_public( item ) ):
+            return self.get_all_roles( trans, cntrller )
+        # If item has roles associated with the access permission, we need to start with them.
+        access_roles = item.get_access_roles( trans )
+        for role in access_roles:
+            if admin_controller or self.ok_to_display( trans.user, role ):
+                roles.add( role )
+                # Each role potentially has users.  We need to find all roles that each of those users have.
+                for ura in role.users:
+                    user = ura.user
+                    for ura2 in user.roles:
+                        if admin_controller or self.ok_to_display( trans.user, ura2.role ):
+                            roles.add( ura2.role )
+                # Each role also potentially has groups which, in turn, have members ( users ).  We need to
+                # find all roles that each group's members have.
+                for gra in role.groups:
+                    group = gra.group
+                    for uga in group.users:
+                        user = uga.user
+                        for ura in user.roles:
+                            if admin_controller or self.ok_to_display( trans.user, ura.role ):
+                                roles.add( ura.role )
+        return self.sort_by_attr( [ role for role in roles ], 'name' )
+
+    def ok_to_display( self, user, role ):
+        """
+        Method for checking if:
+        - a role is private and is the current user's private role
+        - a role is a sharing role and belongs to the current user
+        """
+        if user:
+            if role.type == self.model.Role.types.PRIVATE:
+                return role == self.get_private_user_role( user )
+            if role.type == self.model.Role.types.SHARING:
+                return role in self.get_sharing_roles( user )
+            # If role.type is neither private nor sharing, it's ok to display
+            return True
+        return role.type != self.model.Role.types.PRIVATE and role.type != self.model.Role.types.SHARING
+
+    def allow_action( self, roles, action, item ):
+        """
+        Method for checking a permission for the current user ( based on roles ) to perform a
+        specific action on an item, which must be one of:
+        Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+        """
+        # SM: Note that calling get_item_actions will emit a query.
+        item_actions = self.get_item_actions( action, item )
+
+        if not item_actions:
+            return action.model == 'restrict'
+        ret_val = False
+        # For DATASET_ACCESS only, user must have ALL associated roles
+        if action == self.permitted_actions.DATASET_ACCESS:
+            for item_action in item_actions:
+                if item_action.role not in roles:
+                    break
+            else:
+                ret_val = True
+        # For remaining actions, user must have any associated role
+        else:
+            for item_action in item_actions:
+                if item_action.role in roles:
+                    ret_val = True
+                    break
+        return ret_val
+
+    def get_actions_for_items( self, trans, action, permission_items ):
+        # TODO: Rename this; it's a replacement for get_item_actions, but it
+        # doesn't represent what it's really doing, which is confusing.
+        # TODO: Make this work for other classes besides lib_datasets.
+        # That should be as easy as checking the type and writing a query for each;
+        # we're avoiding using the SQLAlchemy backrefs because they can cause lots
+        # of queries to be generated.
+        #
+        # Originally, get_item_actions did:
+        # return [ permission for permission in item.actions if permission.action == action.action ]
+        # The "item" can be just about anything with permissions, and referencing
+        # item.actions causes the item's permissions to be retrieved.
+        # This method will retrieve all permissions for all "items" and only
+        # return the permissions associated with that given action.
+        # We initialize the permissions list to be empty; we will return an
+        # empty list by default.
+        #
+        # If the dataset id has no corresponding action in its permissions,
+        # then the returned permissions will not carry an entry for the dataset.
+        ret_permissions = {}
+        if ( len( permission_items ) > 0 ):
+            # SM: NB: LibraryDatasets became Datasets for some odd reason.
+            if ( isinstance( permission_items[0], trans.model.LibraryDataset ) ):
+                ids = [ item.library_dataset_id for item in permission_items ]
+                permissions = trans.sa_session.query( trans.model.LibraryDatasetPermissions ) \
+                                   .filter( and_( trans.model.LibraryDatasetPermissions.library_dataset_id.in_( ids ),
+                                                  trans.model.LibraryDatasetPermissions.action == action.action ) ) \
+                                   .all()
+
+                # Massage the return data. We will return a list of permissions
+                # for each library dataset. So we initialize the return list to
+                # have an empty list for each dataset. Then each permission is
+                # appended to the right lib dataset.
+                # TODO: Consider eliminating the initialization and just return
+                # empty values for each library dataset id.
+                for item in permission_items:
+                    ret_permissions[ item.library_dataset_id ] = []
+                for permission in permissions:
+                    ret_permissions[ permission.library_dataset_id ].append( permission )
+            elif ( isinstance( permission_items[0], trans.model.Dataset ) ):
+                ids = [ item.id for item in permission_items ]
+                permissions = trans.sa_session.query( trans.model.DatasetPermissions ) \
+                                   .filter( and_( trans.model.DatasetPermissions.dataset_id.in_( ids ),
+                                                  trans.model.DatasetPermissions.action == action.action ) ) \
+                                   .all()
+
+                # Massage the return data. We will return a list of permissions
+                # for each library dataset. So we initialize the return list to
+                # have an empty list for each dataset. Then each permission is
+                # appended to the right lib dataset.
+                # TODO: Consider eliminating the initialization and just return
+                # empty values for each library dataset id.
+                for item in permission_items:
+                    ret_permissions[ item.id ] = []
+                for permission in permissions:
+                    ret_permissions[ permission.dataset_id ].append( permission )
+
+        # Test that we get the same response from get_item_actions each item:
+        test_code = False
+        if test_code:
+            try:
+                log.debug( "get_actions_for_items: Test start" )
+                for item in permission_items:
+                    base_result = self.get_item_actions( action, item )
+                    new_result = ret_permissions[ item.library_dataset_id ]
+                    # For now, just test against LibraryDatasetIds; other classes
+                    # are not tested yet.
+                    if len( base_result ) == len( new_result ):
+                        common_result = set(base_result).intersection( new_result )
+                        if len( common_result ) == len( base_result ):
+                            log.debug( "Match on permissions for id %d" %
+                                       item.library_dataset_id )
+                        # TODO: Fix this failure message:
+                        else:
+                            log.debug( "Error: dataset %d; originally: %s; now: %s"
+                                       % ( item.library_dataset_id,
+                                           base_result, new_result ) )
+                    else:
+                        log.debug( "Error: dataset %d: had %d entries, now %d entries"
+                                   % ( item.library_dataset_id, len( base_result ),
+                                       len( new_result ) ) )
+                log.debug( "get_actions_for_items: Test end" )
+            except Exception as e:
+                log.debug( "Exception in test code: %s" % e )
+
+        return ret_permissions
+
+    def allow_action_on_libitems( self, trans, user_roles, action, items ):
+        """
+        This should be the equivalent of allow_action defined on multiple items.
+        It is meant to specifically replace allow_action for multiple
+        LibraryDatasets, but it could be reproduced or modified for
+        allow_action's permitted classes - Dataset, Library, LibraryFolder, and
+        LDDAs.
+        """
+        all_items_actions = self.get_actions_for_items( trans, action, items )
+        ret_allow_action = {}
+
+        # Change item to lib_dataset or vice-versa.
+        for item in items:
+            if item.id in all_items_actions:
+                item_actions = all_items_actions[ item.id ]
+
+                if self.permitted_actions.DATASET_ACCESS == action:
+                    ret_allow_action[ item.id ] = True
+                    for item_action in item_actions:
+                        if item_action.role not in user_roles:
+                            ret_allow_action[ item.id ] = False
+                            break
+
+                # Else look for just one dataset role to be in the list of
+                # acceptable user roles:
+                else:
+                    ret_allow_action[ item.id ] = False
+                    for item_action in item_actions:
+                        if item_action.role in user_roles:
+                            ret_allow_action[ item.id ] = True
+                            break
+
+            else:
+                if 'restrict' == action.model:
+                    ret_allow_action[ item.id ] = True
+                else:
+                    ret_allow_action[ item.id ] = False
+
+        # Test it: the result for each dataset should match the result for
+        # allow_action:
+        test_code = False
+        if test_code:
+            log.debug( "allow_action_for_items: test start" )
+            for item in items:
+                orig_value = self.allow_action( user_roles, action, item )
+                if orig_value == ret_allow_action[ item.id ]:
+                    log.debug( "Item %d: success" % item.id )
+                else:
+                    log.debug( "Item %d: fail: original: %s; new: %s"
+                               % ( item.id, orig_value, ret_allow_action[ item.id ] ) )
+            log.debug( "allow_action_for_items: test end" )
+        return ret_allow_action
+
+    # DELETEME: SM: DO NOT TOUCH! This actually works.
+    def dataset_access_mapping( self, trans, user_roles, datasets ):
+        '''
+        For the given list of datasets, return a mapping of the datasets' ids
+        to whether they can be accessed by the user or not. The datasets input
+        is expected to be a simple list of Dataset objects.
+        '''
+        datasets_public_map = self.datasets_are_public( trans, datasets )
+        datasets_allow_action_map = self.allow_action_on_libitems( trans, user_roles, self.permitted_actions.DATASET_ACCESS, datasets )
+        can_access = {}
+        for dataset in datasets:
+            can_access[ dataset.id ] = datasets_public_map[ dataset.id ] or datasets_allow_action_map[ dataset.id ]
+        return can_access
+
+    def dataset_permission_map_for_access( self, trans, user_roles, libitems ):
+        '''
+        For a given list of library items (e.g., Datasets), return a map of the
+        datasets' ids to whether they can have permission to use that action
+        (e.g., "access" or "modify") on the dataset. The libitems input is
+        expected to be a simple list of library items, such as Datasets or
+        LibraryDatasets.
+        NB: This is currently only usable for Datasets; it was intended to
+        be used for any library item.
+        '''
+        # Map the library items to whether they are publicly accessible or not.
+        # Then determine what actions are allowed on the item (in case it's not
+        # public). Finally, the item is accessible if it's publicly available
+        # or the right permissions are enabled.
+        # TODO: This only works for Datasets; other code is using X_is_public,
+        # so this will have to be rewritten to support other items.
+        libitems_public_map = self.datasets_are_public( trans, libitems )
+        libitems_allow_action_map = self.allow_action_on_libitems(
+            trans, user_roles, self.permitted_actions.DATASET_ACCESS, libitems
+        )
+        can_access = {}
+        for libitem in libitems:
+            can_access[ libitem.id ] = libitems_public_map[ libitem.id ] or libitems_allow_action_map[ libitem.id ]
+        return can_access
+
+    def item_permission_map_for_modify( self, trans, user_roles, libitems ):
+        return self.allow_action_on_libitems(
+            trans, user_roles, self.permitted_actions.LIBRARY_MODIFY, libitems
+        )
+
+    def item_permission_map_for_manage( self, trans, user_roles, libitems ):
+        return self.allow_action_on_libitems(
+            trans, user_roles, self.permitted_actions.LIBRARY_MANAGE, libitems
+        )
+
+    def item_permission_map_for_add( self, trans, user_roles, libitems ):
+        return self.allow_action_on_libitems(
+            trans, user_roles, self.permitted_actions.LIBRARY_ADD, libitems
+        )
+
+    def can_access_dataset( self, user_roles, dataset ):
+        # SM: dataset_is_public will access dataset.actions, which is a
+        # backref that causes a query to be made to DatasetPermissions
+        retval = self.dataset_is_public( dataset ) or self.allow_action( user_roles, self.permitted_actions.DATASET_ACCESS, dataset )
+        return retval
+
+    def can_manage_dataset( self, roles, dataset ):
+        return self.allow_action( roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset )
+
+    def can_access_library( self, roles, library ):
+        return self.library_is_public( library ) or self.allow_action( roles, self.permitted_actions.LIBRARY_ACCESS, library )
+
+    def get_accessible_libraries( self, trans, user ):
+        """Return all data libraries that the received user can access"""
+        accessible_libraries = []
+        current_user_role_ids = [ role.id for role in user.all_roles() ]
+        library_access_action = self.permitted_actions.LIBRARY_ACCESS.action
+        restricted_library_ids = [ lp.library_id for lp in trans.sa_session.query( trans.model.LibraryPermissions )
+                                   .filter( trans.model.LibraryPermissions.table.c.action == library_access_action ).distinct() ]
+        accessible_restricted_library_ids = [ lp.library_id for lp in trans.sa_session.query(
+                                              trans.model.LibraryPermissions ).filter(
+                                              and_( trans.model.LibraryPermissions.table.c.action == library_access_action,
+                                              trans.model.LibraryPermissions.table.c.role_id.in_( current_user_role_ids ) ) ) ]
+        # Filter to get libraries accessible by the current user.  Get both
+        # public libraries and restricted libraries accessible by the current user.
+        for library in trans.sa_session.query( trans.model.Library ) \
+                                       .filter( and_( trans.model.Library.table.c.deleted == false(),
+                                                      ( or_( not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ),
+                                                             trans.model.Library.table.c.id.in_( accessible_restricted_library_ids ) ) ) ) ) \
+                                       .order_by( trans.app.model.Library.name ):
+            accessible_libraries.append( library )
+        return accessible_libraries
+
+    def has_accessible_folders( self, trans, folder, user, roles, search_downward=True ):
+        if self.has_accessible_library_datasets( trans, folder, user, roles, search_downward=search_downward ) or \
+                self.can_add_library_item( roles, folder ) or \
+                self.can_modify_library_item( roles, folder ) or \
+                self.can_manage_library_item( roles, folder ):
+            return True
+        if search_downward:
+            for folder in folder.active_folders:
+                return self.has_accessible_folders( trans, folder, user, roles, search_downward=search_downward )
+        return False
+
+    def has_accessible_library_datasets( self, trans, folder, user, roles, search_downward=True ):
+        for library_dataset in trans.sa_session.query( trans.model.LibraryDataset ) \
+                .filter( and_( trans.model.LibraryDataset.table.c.deleted == false(),
+                               trans.app.model.LibraryDataset.table.c.folder_id == folder.id ) ):
+            if self.can_access_library_item( roles, library_dataset, user ):
+                return True
+        if search_downward:
+            return self.__active_folders_have_accessible_library_datasets( trans, folder, user, roles )
+        return False
+
+    def __active_folders_have_accessible_library_datasets( self, trans, folder, user, roles ):
+        for active_folder in folder.active_folders:
+            if self.has_accessible_library_datasets( trans, active_folder, user, roles ):
+                return True
+        return False
+
+    def can_access_library_item( self, roles, item, user ):
+        if type( item ) == self.model.Library:
+            return self.can_access_library( roles, item )
+        elif type( item ) == self.model.LibraryFolder:
+            return self.can_access_library( roles, item.parent_library ) and self.check_folder_contents( user, roles, item )[0]
+        elif type( item ) == self.model.LibraryDataset:
+            return self.can_access_library( roles, item.folder.parent_library ) and self.can_access_dataset( roles, item.library_dataset_dataset_association.dataset )
+        elif type( item ) == self.model.LibraryDatasetDatasetAssociation:
+            return self.can_access_library( roles, item.library_dataset.folder.parent_library ) and self.can_access_dataset( roles, item.dataset )
+        elif type( item ) == self.model.LibraryDatasetCollectionAssociation:
+            return self.can_access_library( roles, item.folder.parent_library )
+        else:
+            log.warning( 'Unknown library item type: %s' % type( item ) )
+            return False
+
+    def can_add_library_item( self, roles, item ):
+        return self.allow_action( roles, self.permitted_actions.LIBRARY_ADD, item )
+
+    def can_modify_library_item( self, roles, item ):
+        return self.allow_action( roles, self.permitted_actions.LIBRARY_MODIFY, item )
+
+    def can_manage_library_item( self, roles, item ):
+        return self.allow_action( roles, self.permitted_actions.LIBRARY_MANAGE, item )
+
+    def get_item_actions( self, action, item ):
+        # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+        # SM: Accessing item.actions emits a query to Library_Dataset_Permissions
+        # if the item is a LibraryDataset:
+        # TODO: Pass in the item's actions - the item isn't needed
+        return [ permission for permission in item.actions if permission.action == action.action ]
+
+    def guess_derived_permissions_for_datasets( self, datasets=[] ):
+        """Returns a dict of { action : [ role, role, ... ] } for the output dataset based upon provided datasets"""
+        perms = {}
+        for dataset in datasets:
+            if not isinstance( dataset, self.model.Dataset ):
+                dataset = dataset.dataset
+            these_perms = {}
+            # initialize blank perms
+            for action in self.get_actions():
+                these_perms[ action ] = []
+            # collect this dataset's perms
+            these_perms = self.get_permissions( dataset )
+            # join or intersect this dataset's permissions with others
+            for action, roles in these_perms.items():
+                if action not in perms.keys():
+                    perms[ action ] = roles
+                else:
+                    if action.model == 'grant':
+                        # intersect existing roles with new roles
+                        perms[ action ] = [_ for _ in roles if _ in perms[ action ]]
+                    elif action.model == 'restrict':
+                        # join existing roles with new roles
+                        perms[ action ].extend( [_ for _ in roles if _ not in perms[ action ]] )
+        return perms
+
+    def associate_components( self, **kwd ):
+        if 'user' in kwd:
+            if 'group' in kwd:
+                return self.associate_user_group( kwd['user'], kwd['group'] )
+            elif 'role' in kwd:
+                return self.associate_user_role( kwd['user'], kwd['role'] )
+        elif 'role' in kwd:
+            if 'group' in kwd:
+                return self.associate_group_role( kwd['group'], kwd['role'] )
+        if 'action' in kwd:
+            if 'dataset' in kwd and 'role' in kwd:
+                return self.associate_action_dataset_role( kwd['action'], kwd['dataset'], kwd['role'] )
+        raise Exception( 'No valid method of associating provided components: %s' % kwd )
+
+    def associate_user_group( self, user, group ):
+        assoc = self.model.UserGroupAssociation( user, group )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def associate_user_role( self, user, role ):
+        assoc = self.model.UserRoleAssociation( user, role )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def associate_group_role( self, group, role ):
+        assoc = self.model.GroupRoleAssociation( group, role )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def associate_action_dataset_role( self, action, dataset, role ):
+        assoc = self.model.DatasetPermissions( action, dataset, role )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def create_private_user_role( self, user ):
+        # Create private role
+        role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE )
+        self.sa_session.add( role )
+        self.sa_session.flush()
+        # Add user to role
+        self.associate_components( role=role, user=user )
+        return role
+
+    def get_private_user_role( self, user, auto_create=False ):
+        role = self.sa_session.query( self.model.Role ) \
+                              .filter( and_( self.model.Role.table.c.name == user.email,
+                                             self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
+                              .first()
+        if not role:
+            if auto_create:
+                return self.create_private_user_role( user )
+            else:
+                return None
+        return role
+
+    def get_sharing_roles( self, user ):
+        return self.sa_session.query( self.model.Role ) \
+                              .filter( and_( ( self.model.Role.table.c.name ).like( "Sharing role for: %" + user.email + "%" ),
+                                             self.model.Role.table.c.type == self.model.Role.types.SHARING ) )
+
+    def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False, default_access_private=False ):
+        # bypass_manage_permission is used to change permissions of datasets in a userless history when logging in
+        flush_needed = False
+        if user is None:
+            return None
+        if not permissions:
+            # default permissions
+            permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS: [ self.get_private_user_role( user, auto_create=True ) ] }
+            # new_user_dataset_access_role_default_private is set as True in config file
+            if default_access_private:
+                permissions[ self.permitted_actions.DATASET_ACCESS ] = next(iter(permissions.values()))
+        # Delete all of the current default permissions for the user
+        for dup in user.default_permissions:
+            self.sa_session.delete( dup )
+            flush_needed = True
+        # Add the new default permissions for the user
+        for action, roles in permissions.items():
+            if isinstance( action, Action ):
+                action = action.action
+            for dup in [ self.model.DefaultUserPermissions( user, action, role ) for role in roles ]:
+                self.sa_session.add( dup )
+                flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+        if history:
+            for history in user.active_histories:
+                self.history_set_default_permissions( history, permissions=permissions, dataset=dataset, bypass_manage_permission=bypass_manage_permission )
+
+    def user_get_default_permissions( self, user ):
+        permissions = {}
+        for dup in user.default_permissions:
+            action = self.get_action( dup.action )
+            if action in permissions:
+                permissions[ action ].append( dup.role )
+            else:
+                permissions[ action ] = [ dup.role ]
+        return permissions
+
+    def history_set_default_permissions( self, history, permissions={}, dataset=False, bypass_manage_permission=False ):
+        # bypass_manage_permission is used to change permissions of datasets in a user-less history when logging in
+        flush_needed = False
+        user = history.user
+        if not user:
+            # default permissions on a user-less history are None
+            return None
+        if not permissions:
+            permissions = self.user_get_default_permissions( user )
+        # Delete all of the current default permission for the history
+        for dhp in history.default_permissions:
+            self.sa_session.delete( dhp )
+            flush_needed = True
+        # Add the new default permissions for the history
+        for action, roles in permissions.items():
+            if isinstance( action, Action ):
+                action = action.action
+            for dhp in [ self.model.DefaultHistoryPermissions( history, action, role ) for role in roles ]:
+                self.sa_session.add( dhp )
+                flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+        if dataset:
+            # Only deal with datasets that are not purged
+            for hda in history.activatable_datasets:
+                dataset = hda.dataset
+                if dataset.library_associations:
+                    # Don't change permissions on a dataset associated with a library
+                    continue
+                if [ assoc for assoc in dataset.history_associations if assoc.history not in user.histories ]:
+                    # Don't change permissions on a dataset associated with a history not owned by the user
+                    continue
+                if bypass_manage_permission or self.can_manage_dataset( user.all_roles(), dataset ):
+                    self.set_all_dataset_permissions( dataset, permissions )
+
+    def history_get_default_permissions( self, history ):
+        permissions = {}
+        for dhp in history.default_permissions:
+            action = self.get_action( dhp.action )
+            if action in permissions:
+                permissions[ action ].append( dhp.role )
+            else:
+                permissions[ action ] = [ dhp.role ]
+        return permissions
+
+    def set_all_dataset_permissions( self, dataset, permissions={}, new=False, flush=True ):
+        """
+        Set new full permissions on a dataset, eliminating all current permissions.
+        Permission looks like: { Action : [ Role, Role ] }
+        """
+        # Make sure that DATASET_MANAGE_PERMISSIONS is associated with at least 1 role
+        has_dataset_manage_permissions = False
+        for action, roles in permissions.items():
+            if isinstance( action, Action ):
+                if action == self.permitted_actions.DATASET_MANAGE_PERMISSIONS and roles:
+                    has_dataset_manage_permissions = True
+                    break
+            elif action == self.permitted_actions.DATASET_MANAGE_PERMISSIONS.action and roles:
+                has_dataset_manage_permissions = True
+                break
+        if not has_dataset_manage_permissions:
+            return "At least 1 role must be associated with the <b>manage permissions</b> permission on this dataset."
+        flush_needed = False
+        # Delete all of the current permissions on the dataset
+        if not new:
+            for dp in dataset.actions:
+                self.sa_session.delete( dp )
+                flush_needed = True
+        # Add the new permissions on the dataset
+        for action, roles in permissions.items():
+            if isinstance( action, Action ):
+                action = action.action
+            for role in roles:
+                dp = self.model.DatasetPermissions( action, dataset, role_id=role.id )
+                self.sa_session.add( dp )
+                flush_needed = True
+        if flush_needed and flush:
+            self.sa_session.flush()
+        return ""
+
+    def set_dataset_permission( self, dataset, permission={} ):
+        """
+        Set a specific permission on a dataset, leaving all other current permissions on the dataset alone.
+        Permission looks like: { Action.action : [ Role, Role ] }
+        """
+        flush_needed = False
+        for action, roles in permission.items():
+            if isinstance( action, Action ):
+                action = action.action
+            # Delete the current specific permission on the dataset if one exists
+            for dp in dataset.actions:
+                if dp.action == action:
+                    self.sa_session.delete( dp )
+                    flush_needed = True
+            # Add the new specific permission on the dataset
+            for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
+                self.sa_session.add( dp )
+                flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+
+    def get_permissions( self, item ):
+        """
+        Return a dictionary containing the actions and associated roles on item
+        where item is one of Library, LibraryFolder, LibraryDatasetDatasetAssociation,
+        LibraryDataset, Dataset.  The dictionary looks like: { Action : [ Role, Role ] }.
+        """
+        permissions = {}
+        for item_permission in item.actions:
+            action = self.get_action( item_permission.action )
+            if action in permissions:
+                permissions[ action ].append( item_permission.role )
+            else:
+                permissions[ action ] = [ item_permission.role ]
+        return permissions
+
+    def get_accessible_request_types( self, trans, user ):
+        """Return all RequestTypes that the received user has permission to access."""
+        accessible_request_types = []
+        current_user_role_ids = [ role.id for role in user.all_roles() ]
+        request_type_access_action = self.permitted_actions.REQUEST_TYPE_ACCESS.action
+        restricted_request_type_ids = [ rtp.request_type_id for rtp in trans.sa_session.query(
+                                        trans.model.RequestTypePermissions ).filter(
+                                        trans.model.RequestTypePermissions.table.c.action == request_type_access_action ).distinct()
+                                        ]
+        accessible_restricted_request_type_ids = [ rtp.request_type_id for rtp in trans.sa_session.query(
+                                                   trans.model.RequestTypePermissions ).filter(
+                                                   and_( trans.model.RequestTypePermissions.table.c.action == request_type_access_action,
+                                                         trans.model.RequestTypePermissions.table.c.role_id.in_( current_user_role_ids ) ) ) ]
+        # Filter to get libraries accessible by the current user.  Get both
+        # public libraries and restricted libraries accessible by the current user.
+        for request_type in trans.sa_session.query( trans.model.RequestType ) \
+                                            .filter( and_( trans.model.RequestType.table.c.deleted == false(),
+                                                           ( or_( not_( trans.model.RequestType.table.c.id.in_( restricted_request_type_ids ) ),
+                                                                  trans.model.RequestType.table.c.id.in_( accessible_restricted_request_type_ids ) ) ) ) ) \
+                                            .order_by( trans.app.model.RequestType.name ):
+            accessible_request_types.append( request_type )
+        return accessible_request_types
+
+    def copy_dataset_permissions( self, src, dst ):
+        if not isinstance( src, self.model.Dataset ):
+            src = src.dataset
+        if not isinstance( dst, self.model.Dataset ):
+            dst = dst.dataset
+        self.set_all_dataset_permissions( dst, self.get_permissions( src ) )
+
+    def privately_share_dataset( self, dataset, users=[] ):
+        intersect = None
+        for user in users:
+            roles = [ ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING ]
+            if intersect is None:
+                intersect = roles
+            else:
+                new_intersect = []
+                for role in roles:
+                    if role in intersect:
+                        new_intersect.append( role )
+                intersect = new_intersect
+        sharing_role = None
+        if intersect:
+            for role in intersect:
+                if not [_ for _ in [ ura.user for ura in role.users ] if _ not in users]:
+                    # only use a role if it contains ONLY the users we're sharing with
+                    sharing_role = role
+                    break
+        if sharing_role is None:
+            sharing_role = self.model.Role( name="Sharing role for: " + ", ".join( [ u.email for u in users ] ),
+                                            type=self.model.Role.types.SHARING )
+            self.sa_session.add( sharing_role )
+            self.sa_session.flush()
+            for user in users:
+                self.associate_components( user=user, role=sharing_role )
+        self.set_dataset_permission( dataset, { self.permitted_actions.DATASET_ACCESS: [ sharing_role ] } )
+
+    def set_all_library_permissions( self, trans, library_item, permissions={} ):
+        # Set new permissions on library_item, eliminating all current permissions
+        flush_needed = False
+        for role_assoc in library_item.actions:
+            self.sa_session.delete( role_assoc )
+            flush_needed = True
+        # Add the new permissions on library_item
+        for item_class, permission_class in self.library_item_assocs:
+            if isinstance( library_item, item_class ):
+                for action, roles in permissions.items():
+                    if isinstance( action, Action ):
+                        action = action.action
+                    for role_assoc in [ permission_class( action, library_item, role ) for role in roles ]:
+                        self.sa_session.add( role_assoc )
+                        flush_needed = True
+                    if isinstance( library_item, self.model.LibraryDatasetDatasetAssociation ):
+                        # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
+                        # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
+                        # permission.  In this case, we'll reset this permission to the library_item user's private role.
+                        if not library_item.dataset.has_manage_permissions_roles( trans ):
+                            # Well this looks like a bug, this should be looked at.
+                            # Default permissions above is single hash that keeps getting reeditted here
+                            # because permission is being defined instead of permissions. -John
+                            permissions[ self.permitted_actions.DATASET_MANAGE_PERMISSIONS ] = [ trans.app.security_agent.get_private_user_role( library_item.user ) ]
+                            self.set_dataset_permission( library_item.dataset, permissions )
+                        if action == self.permitted_actions.LIBRARY_MANAGE.action and roles:
+                            # Handle the special case when we are setting the LIBRARY_MANAGE_PERMISSION on a
+                            # library_dataset_dataset_association since the roles need to be applied to the
+                            # DATASET_MANAGE_PERMISSIONS permission on the associated dataset.
+                            permissions = {}
+                            permissions[ self.permitted_actions.DATASET_MANAGE_PERMISSIONS ] = roles
+                            self.set_dataset_permission( library_item.dataset, permissions )
+        if flush_needed:
+            self.sa_session.flush()
+
+    def set_library_item_permission( self, library_item, permission={} ):
+        """
+        Set a specific permission on a library item, leaving all other current permissions on the item alone.
+        Permission looks like: { Action.action : [ Role, Role ] }
+        """
+        flush_needed = False
+        for action, roles in permission.items():
+            if isinstance( action, Action ):
+                action = action.action
+            # Delete the current specific permission on the library item if one exists
+            for item_permission in library_item.actions:
+                if item_permission.action == action:
+                    self.sa_session.delete( item_permission )
+                    flush_needed = True
+            # Add the new specific permission on the library item
+            if isinstance( library_item, self.model.LibraryDataset ):
+                for item_permission in [ self.model.LibraryDatasetPermissions( action, library_item, role ) for role in roles ]:
+                    self.sa_session.add( item_permission )
+                    flush_needed = True
+            elif isinstance( library_item, self.model.LibraryPermissions):
+                for item_permission in [ self.model.LibraryPermissions( action, library_item, role ) for role in roles ]:
+                    self.sa_session.add( item_permission )
+                    flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+
+    def library_is_public( self, library, contents=False ):
+        if contents:
+            # Check all contained folders and datasets to find any that are not public
+            if not self.folder_is_public( library.root_folder ):
+                return False
+        # A library is considered public if there are no "access" actions associated with it.
+        return self.permitted_actions.LIBRARY_ACCESS.action not in [ a.action for a in library.actions ]
+
+    def library_is_unrestricted( self, library ):
+        # A library is considered unrestricted if there are no "access" actions associated with it.
+        return self.permitted_actions.LIBRARY_ACCESS.action not in [ a.action for a in library.actions ]
+
+    def make_library_public( self, library, contents=False ):
+        flush_needed = False
+        if contents:
+            # Make all contained folders (include deleted folders, but not purged folders), public
+            self.make_folder_public( library.root_folder )
+        # A library is considered public if there are no LIBRARY_ACCESS actions associated with it.
+        for lp in library.actions:
+            if lp.action == self.permitted_actions.LIBRARY_ACCESS.action:
+                self.sa_session.delete( lp )
+                flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+
+    def folder_is_public( self, folder ):
+        for sub_folder in folder.folders:
+            if not self.folder_is_public( sub_folder ):
+                return False
+        for library_dataset in folder.datasets:
+            ldda = library_dataset.library_dataset_dataset_association
+            if ldda and ldda.dataset and not self.dataset_is_public( ldda.dataset ):
+                return False
+        return True
+
+    def folder_is_unrestricted( self, folder ):
+        # TODO implement folder restrictions
+        # for now all folders are _visible_ but the restricted datasets within are not visible
+        return True
+
+    def make_folder_public( self, folder ):
+        # Make all of the contents (include deleted contents, but not purged contents) of folder public
+        for sub_folder in folder.folders:
+            if not sub_folder.purged:
+                self.make_folder_public( sub_folder )
+        for library_dataset in folder.datasets:
+            dataset = library_dataset.library_dataset_dataset_association.dataset
+            if not dataset.purged and not self.dataset_is_public( dataset ):
+                self.make_dataset_public( dataset )
+
+    def dataset_is_public( self, dataset ):
+        """
+        A dataset is considered public if there are no "access" actions
+        associated with it.  Any other actions ( 'manage permissions',
+        'edit metadata' ) are irrelevant. Accessing dataset.actions
+        will cause a query to be emitted.
+        """
+        return self.permitted_actions.DATASET_ACCESS.action not in [ a.action for a in dataset.actions ]
+
+    def dataset_is_unrestricted( self, trans, dataset):
+        """
+        Different implementation of the method above with signature:
+        def dataset_is_public( self, dataset )
+        """
+        return len( dataset.library_dataset_dataset_association.get_access_roles( trans ) ) == 0
+
+    def dataset_is_private_to_user( self, trans, dataset ):
+        """
+        If the LibraryDataset object has exactly one access role and that is
+        the current user's private role then we consider the dataset private.
+        """
+        private_role = self.get_private_user_role( trans.user )
+        access_roles = dataset.library_dataset_dataset_association.get_access_roles( trans )
+
+        if len(access_roles) != 1:
+            return False
+        else:
+            if access_roles[0].id == private_role.id:
+                return True
+            else:
+                return False
+
+    def datasets_are_public( self, trans, datasets ):
+        '''
+        Given a transaction object and a list of Datasets, return
+        a mapping from Dataset ids to whether the Dataset is public
+        or not. All Dataset ids should be returned in the mapping's keys.
+        '''
+        # We go the other way around from dataset_is_public: we start with
+        # all datasets being marked as public. If there is an access action
+        # associated with the dataset, then we mark it as nonpublic:
+        datasets_public = {}
+        dataset_ids = [ dataset.id for dataset in datasets ]
+        for dataset_id in dataset_ids:
+            datasets_public[ dataset_id ] = True
+
+        # Now get all datasets which have DATASET_ACCESS actions:
+        access_data_perms = trans.sa_session.query( trans.app.model.DatasetPermissions ) \
+                                 .filter( and_( trans.app.model.DatasetPermissions.dataset_id.in_( dataset_ids ),
+                                                trans.app.model.DatasetPermissions.action == self.permitted_actions.DATASET_ACCESS.action ) ) \
+                                 .all()
+        # Every dataset returned has "access" privileges associated with it,
+        # so it's not public.
+        for permission in access_data_perms:
+            datasets_public[ permission.dataset_id ] = False
+        return datasets_public
+
+    def make_dataset_public( self, dataset ):
+        # A dataset is considered public if there are no "access" actions associated with it.  Any
+        # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+        flush_needed = False
+        for dp in dataset.actions:
+            if dp.action == self.permitted_actions.DATASET_ACCESS.action:
+                self.sa_session.delete( dp )
+                flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+
+    def derive_roles_from_access( self, trans, item_id, cntrller, library=False, **kwd ):
+        # Check the access permission on a dataset.  If library is true, item_id refers to a library.  If library
+        # is False, item_id refers to a dataset ( item_id must currently be decoded before being sent ).  The
+        # cntrller param is the calling controller, which needs to be passed to get_legitimate_roles().
+        msg = ''
+        permissions = {}
+        # accessible will be True only if at least 1 user has every role in DATASET_ACCESS_in
+        accessible = False
+        # legitimate will be True only if all roles in DATASET_ACCESS_in are in the set of roles returned from
+        # get_legitimate_roles()
+        # legitimate = False # TODO: not used
+        # private_role_found will be true only if more than 1 role is being associated with the DATASET_ACCESS
+        # permission on item, and at least 1 of the roles is private.
+        private_role_found = False
+        error = False
+        for k, v in get_permitted_actions( filter='DATASET' ).items():
+            in_roles = [ self.sa_session.query( self.model.Role ).get( x ) for x in listify( kwd.get( k + '_in', [] ) ) ]
+            if v == self.permitted_actions.DATASET_ACCESS and in_roles:
+                if library:
+                    item = self.sa_session.query( self.model.Library ).get( item_id )
+                else:
+                    item = self.sa_session.query( self.model.Dataset ).get( item_id )
+                if ( library and not self.library_is_public( item ) ) or ( not library and not self.dataset_is_public( item ) ):
+                    # Ensure that roles being associated with DATASET_ACCESS are a subset of the legitimate roles
+                    # derived from the roles associated with the access permission on item if it's not public.  This
+                    # will keep illegitimate roles from being associated with the DATASET_ACCESS permission on the
+                    # dataset (i.e., in the case where item is .a library, if Role1 is associated with LIBRARY_ACCESS,
+                    # then only those users that have Role1 should be associated with DATASET_ACCESS.
+                    legitimate_roles = self.get_legitimate_roles( trans, item, cntrller )
+                    illegitimate_roles = []
+                    for role in in_roles:
+                        if role not in legitimate_roles:
+                            illegitimate_roles.append( role )
+                    if illegitimate_roles:
+                        # This condition should never occur since illegitimate roles are filtered out of the set of
+                        # roles displayed on the forms, but just in case there is a bug somewhere that incorrectly
+                        # filters, we'll display this message.
+                        error = True
+                        msg += "The following roles are not associated with users that have the 'access' permission on this "
+                        msg += "item, so they were incorrectly displayed: "
+                        for role in illegitimate_roles:
+                            msg += "%s, " % role.name
+                        msg = msg.rstrip( ", " )
+                        new_in_roles = []
+                        for role in in_roles:
+                            if role in legitimate_roles:
+                                new_in_roles.append( role )
+                        in_roles = new_in_roles
+                if len( in_roles ) > 1:
+                    # At least 1 user must have every role associated with the access
+                    # permission on this dataset, or the dataset is not accessible.
+                    # Since we have more than 1 role, none of them can be private.
+                    for role in in_roles:
+                        if role.type == self.model.Role.types.PRIVATE:
+                            private_role_found = True
+                            break
+                if len( in_roles ) == 1:
+                    accessible = True
+                else:
+                    # At least 1 user must have every role associated with the access
+                    # permission on this dataset, or the dataset is not accessible.
+                    in_roles_set = set()
+                    for role in in_roles:
+                        in_roles_set.add( role )
+                    users_set = set()
+                    for role in in_roles:
+                        for ura in role.users:
+                            users_set.add( ura.user )
+                        for gra in role.groups:
+                            group = gra.group
+                            for uga in group.users:
+                                users_set.add( uga.user )
+                    # Make sure that at least 1 user has every role being associated with the dataset.
+                    for user in users_set:
+                        user_roles_set = set()
+                        for ura in user.roles:
+                            user_roles_set.add( ura.role )
+                        if in_roles_set.issubset( user_roles_set ):
+                            accessible = True
+                            break
+                if private_role_found or not accessible:
+                    error = True
+                    # Don't set the permissions for DATASET_ACCESS if inaccessible or multiple roles with
+                    # at least 1 private, but set all other permissions.
+                    permissions[ self.get_action( v.action ) ] = []
+                    msg = "At least 1 user must have every role associated with accessing datasets.  "
+                    if private_role_found:
+                        msg += "Since you are associating more than 1 role, no private roles are allowed."
+                    if not accessible:
+                        msg += "The roles you attempted to associate for access would make the datasets in-accessible by everyone."
+                else:
+                    permissions[ self.get_action( v.action ) ] = in_roles
+            else:
+                permissions[ self.get_action( v.action ) ] = in_roles
+        return permissions, in_roles, error, msg
+
+    def copy_library_permissions( self, trans, source_library_item, target_library_item, user=None ):
+        # Copy all relevant permissions from source.
+        permissions = {}
+        for role_assoc in source_library_item.actions:
+            if role_assoc.action != self.permitted_actions.LIBRARY_ACCESS.action:
+                # LIBRARY_ACCESS is a special permission that is set only at the library level.
+                if role_assoc.action in permissions:
+                    permissions[role_assoc.action].append( role_assoc.role )
+                else:
+                    permissions[role_assoc.action] = [ role_assoc.role ]
+        self.set_all_library_permissions( trans, target_library_item, permissions )
+        if user:
+            item_class = None
+            for item_class, permission_class in self.library_item_assocs:
+                if isinstance( target_library_item, item_class ):
+                    break
+            if item_class:
+                # Make sure user's private role is included
+                private_role = self.model.security_agent.get_private_user_role( user )
+                for name, action in self.permitted_actions.items():
+                    if not permission_class.filter_by( role_id=private_role.id, action=action.action ).first():
+                        lp = permission_class( action.action, target_library_item, private_role )
+                        self.sa_session.add( lp )
+                        self.sa_session.flush()
+            else:
+                raise Exception( 'Invalid class (%s) specified for target_library_item (%s)' %
+                                 ( target_library_item.__class__, target_library_item.__class__.__name__ ) )
+
+    def get_permitted_libraries( self, trans, user, actions ):
+        """
+        This method is historical (it is not currently used), but may be useful again at some
+        point.  It returns a dictionary whose keys are library objects and whose values are a
+        comma-separated string of folder ids.  This method works with the show_library_item()
+        method below, and it returns libraries for which the received user has permission to
+        perform the received actions.  Here is an example call to this method to return all
+        libraries for which the received user has LIBRARY_ADD permission::
+
+            libraries = trans.app.security_agent.get_permitted_libraries( trans, user,
+                [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ] )
+        """
+        all_libraries = trans.sa_session.query( trans.app.model.Library ) \
+                                        .filter( trans.app.model.Library.table.c.deleted == false() ) \
+                                        .order_by( trans.app.model.Library.name )
+        roles = user.all_roles()
+        actions_to_check = actions
+        # The libraries dictionary looks like: { library : '1,2' }, library : '3' }
+        # Its keys are the libraries that should be displayed for the current user and whose values are a
+        # string of comma-separated folder ids, of the associated folders the should NOT be displayed.
+        # The folders that should not be displayed may not be a complete list, but it is ultimately passed
+        # to the calling method to keep from re-checking the same folders when the library / folder
+        # select lists are rendered.
+        libraries = {}
+        for library in all_libraries:
+            can_show, hidden_folder_ids = self.show_library_item( self, roles, library, actions_to_check )
+            if can_show:
+                libraries[ library ] = hidden_folder_ids
+        return libraries
+
+    def show_library_item( self, user, roles, library_item, actions_to_check, hidden_folder_ids='' ):
+        """
+        This method must be sent an instance of Library() or LibraryFolder().  Recursive execution produces a
+        comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along with
+        the string, True is returned if the current user has permission to perform any 1 of actions_to_check
+        on library_item. Otherwise, cycle through all sub-folders in library_item until one is found that meets
+        this criteria, if it exists.  This method does not necessarily scan the entire library as it returns
+        when it finds the first library_item that allows user to perform any one action in actions_to_check.
+        """
+        for action in actions_to_check:
+            if self.allow_action( roles, action, library_item ):
+                return True, hidden_folder_ids
+        if isinstance( library_item, self.model.Library ):
+            return self.show_library_item( user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids='' )
+        if isinstance( library_item, self.model.LibraryFolder ):
+            for folder in library_item.active_folders:
+                can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
+                if can_show:
+                    return True, hidden_folder_ids
+                if hidden_folder_ids:
+                    hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, folder.id )
+                else:
+                    hidden_folder_ids = '%d' % folder.id
+        return False, hidden_folder_ids
+
+    def get_showable_folders( self, user, roles, library_item, actions_to_check, hidden_folder_ids=[], showable_folders=[] ):
+        """
+        This method must be sent an instance of Library(), all the folders of which are scanned to determine if
+        user is allowed to perform any action in actions_to_check. The param hidden_folder_ids, if passed, should
+        contain a list of folder IDs which was generated when the library was previously scanned
+        using the same actions_to_check. A list of showable folders is generated. This method scans the entire library.
+        """
+        if isinstance( library_item, self.model.Library ):
+            return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] )
+        if isinstance( library_item, self.model.LibraryFolder ):
+            if library_item.id not in hidden_folder_ids:
+                for action in actions_to_check:
+                    if self.allow_action( roles, action, library_item ):
+                        showable_folders.append( library_item )
+                        break
+            for folder in library_item.active_folders:
+                self.get_showable_folders( user, roles, folder, actions_to_check, showable_folders=showable_folders )
+        return showable_folders
+
+    def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ):
+        for user in users:
+            if delete_existing_assocs:
+                flush_needed = False
+                for a in user.non_private_roles + user.groups:
+                    self.sa_session.delete( a )
+                    flush_needed = True
+                if flush_needed:
+                    self.sa_session.flush()
+            self.sa_session.refresh( user )
+            for role in roles:
+                # Make sure we are not creating an additional association with a PRIVATE role
+                if role not in user.roles:
+                    self.associate_components( user=user, role=role )
+            for group in groups:
+                self.associate_components( user=user, group=group )
+
+    def set_entity_group_associations( self, groups=[], users=[], roles=[], delete_existing_assocs=True ):
+        for group in groups:
+            if delete_existing_assocs:
+                flush_needed = False
+                for a in group.roles + group.users:
+                    self.sa_session.delete( a )
+                    flush_needed = True
+                if flush_needed:
+                    self.sa_session.flush()
+            for role in roles:
+                self.associate_components( group=group, role=role )
+            for user in users:
+                self.associate_components( group=group, user=user )
+
+    def set_entity_role_associations( self, roles=[], users=[], groups=[], delete_existing_assocs=True ):
+        for role in roles:
+            if delete_existing_assocs:
+                flush_needed = False
+                for a in role.users + role.groups:
+                    self.sa_session.delete( a )
+                    flush_needed = True
+                if flush_needed:
+                    self.sa_session.flush()
+            for user in users:
+                self.associate_components( user=user, role=role )
+            for group in groups:
+                self.associate_components( group=group, role=role )
+
+    def get_component_associations( self, **kwd ):
+        assert len( kwd ) == 2, 'You must specify exactly 2 Galaxy security components to check for associations.'
+        if 'dataset' in kwd:
+            if 'action' in kwd:
+                return self.sa_session.query( self.model.DatasetPermissions ).filter_by( action=kwd['action'].action, dataset_id=kwd['dataset'].id ).first()
+        elif 'user' in kwd:
+            if 'group' in kwd:
+                return self.sa_session.query( self.model.UserGroupAssociation ).filter_by( group_id=kwd['group'].id, user_id=kwd['user'].id ).first()
+            elif 'role' in kwd:
+                return self.sa_session.query( self.model.UserRoleAssociation ).filter_by( role_id=kwd['role'].id, user_id=kwd['user'].id ).first()
+        elif 'group' in kwd:
+            if 'role' in kwd:
+                return self.sa_session.query( self.model.GroupRoleAssociation ).filter_by( role_id=kwd['role'].id, group_id=kwd['group'].id ).first()
+        raise Exception( 'No valid method of associating provided components: %s' % kwd )
+
+    def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ):
+        """
+        This method must always be sent an instance of LibraryFolder().  Recursive execution produces a
+        comma-separated string of folder ids whose folders do NOT meet the criteria for showing.  Along
+        with the string, True is returned if the current user has permission to access folder. Otherwise,
+        cycle through all sub-folders in folder until one is found that meets this criteria, if it exists.
+        This method does not necessarily scan the entire library as it returns when it finds the first
+        folder that is accessible to user.
+        """
+        # If a folder is writeable, it's accessable and we need not go further
+        if self.can_add_library_item( roles, folder ):
+            return True, ''
+        action = self.permitted_actions.DATASET_ACCESS
+
+        lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \
+                               .join( "library_dataset" ) \
+                               .filter( self.model.LibraryDataset.folder == folder ) \
+                               .join( "dataset" ) \
+                               .options( eagerload_all( "dataset.actions" ) ) \
+                               .all()
+
+        for ldda in lddas:
+            ldda_access_permissions = self.get_item_actions( action, ldda.dataset )
+            if not ldda_access_permissions:
+                # Dataset is public
+                return True, hidden_folder_ids
+            for ldda_access_permission in ldda_access_permissions:
+                if ldda_access_permission.role in roles:
+                    # The current user has access permission on the dataset
+                    return True, hidden_folder_ids
+        for sub_folder in folder.active_folders:
+            can_access, hidden_folder_ids = self.check_folder_contents( user, roles, sub_folder, hidden_folder_ids=hidden_folder_ids )
+            if can_access:
+                return True, hidden_folder_ids
+            if hidden_folder_ids:
+                hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, sub_folder.id )
+            else:
+                hidden_folder_ids = '%d' % sub_folder.id
+        return False, hidden_folder_ids
+
+    def can_access_request_type( self, roles, request_type ):
+        action = self.permitted_actions.REQUEST_TYPE_ACCESS
+        request_type_actions = []
+        for permission in request_type.actions:
+            if permission.action == action.action:
+                request_type_actions.append( permission )
+        if not request_type_actions:
+            return True
+        ret_val = False
+        for request_type_action in request_type_actions:
+            if request_type_action.role in roles:
+                ret_val = True
+                break
+        return ret_val
+
+    def set_request_type_permissions( self, request_type, permissions={} ):
+        # Set new permissions on request_type, eliminating all current permissions
+        for role_assoc in request_type.actions:
+            self.sa_session.delete( role_assoc )
+        # Add the new permissions on request_type
+        permission_class = self.model.RequestTypePermissions
+        flush_needed = False
+        for action, roles in permissions.items():
+            if isinstance( action, Action ):
+                action = action.action
+            for role_assoc in [ permission_class( action, request_type, role ) for role in roles ]:
+                self.sa_session.add( role_assoc )
+                flush_needed = True
+        if flush_needed:
+            self.sa_session.flush()
+
+
+class HostAgent( RBACAgent ):
+    """
+    A simple security agent which allows access to datasets based on host.
+    This exists so that externals sites such as UCSC can gain access to
+    datasets which have permissions which would normally prevent such access.
+    """
+    # TODO: Make sites user configurable
+    sites = Bunch(
+        ucsc_main=( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
+                    'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
+        ucsc_test=( 'hgwdev.cse.ucsc.edu', ),
+        ucsc_archaea=( 'lowepub.cse.ucsc.edu', )
+    )
+
+    def __init__( self, model, permitted_actions=None ):
+        self.model = model
+        if permitted_actions:
+            self.permitted_actions = permitted_actions
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session"""
+        return self.model.context
+
+    def allow_action( self, addr, action, **kwd ):
+        if 'dataset' in kwd and action == self.permitted_actions.DATASET_ACCESS:
+            hda = kwd['dataset']
+            if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in hda.dataset.actions ]:
+                log.debug( 'Allowing access to public dataset with hda: %i.' % hda.id )
+                return True  # dataset has no roles associated with the access permission, thus is already public
+            hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
+                                    .filter_by( history_dataset_association_id=hda.id ).first()
+            if not hdadaa:
+                log.debug( 'Denying access to private dataset with hda: %i.  No hdadaa record for this dataset.' % hda.id )
+                return False  # no auth
+            # We could just look up the reverse of addr, but then we'd also
+            # have to verify it with the forward address and special case any
+            # IPs (instead of hosts) in the server list.
+            #
+            # This would be improved by caching, but that's what the OS's name
+            # service cache daemon is for (you ARE running nscd, right?).
+            for server in HostAgent.sites.get( hdadaa.site, [] ):
+                # We're going to search in order, but if the remote site is load
+                # balancing their connections (as UCSC does), this is okay.
+                try:
+                    if socket.gethostbyname( server ) == addr:
+                        break  # remote host is in the server list
+                except ( socket.error, socket.gaierror ):
+                    pass  # can't resolve, try next
+            else:
+                log.debug( 'Denying access to private dataset with hda: %i.  Remote addr is not a valid server for site: %s.' % ( hda.id, hdadaa.site ) )
+                return False  # remote addr is not in the server list
+            if ( datetime.utcnow() - hdadaa.update_time ) > timedelta( seconds=60 ):
+                log.debug( 'Denying access to private dataset with hda: %i.  Authorization was granted, but has expired.' % hda.id )
+                return False  # not authz'd in the last 60 seconds
+            log.debug( 'Allowing access to private dataset with hda: %i.  Remote server is: %s.' % ( hda.id, server ) )
+            return True
+        else:
+            raise Exception( 'The dataset access permission is the only valid permission in the host security agent.' )
+
+    def set_dataset_permissions( self, hda, user, site ):
+        hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
+                                .filter_by( history_dataset_association_id=hda.id ).first()
+        if hdadaa:
+            hdadaa.update_time = datetime.utcnow()
+        else:
+            hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization( hda=hda, user=user, site=site )
+        self.sa_session.add( hdadaa )
+        self.sa_session.flush()
+
+
+def get_permitted_actions( filter=None ):
+    '''Utility method to return a subset of RBACAgent's permitted actions'''
+    if filter is None:
+        return RBACAgent.permitted_actions
+    tmp_bunch = Bunch()
+    [ tmp_bunch.__dict__.__setitem__(k, v) for k, v in RBACAgent.permitted_actions.items() if k.startswith( filter ) ]
+    return tmp_bunch
diff --git a/lib/galaxy/security/passwords.py b/lib/galaxy/security/passwords.py
new file mode 100644
index 0000000..79dc573
--- /dev/null
+++ b/lib/galaxy/security/passwords.py
@@ -0,0 +1,82 @@
+import hmac
+import hashlib
+from struct import Struct
+from operator import xor
+from itertools import starmap
+from os import urandom
+from base64 import b64encode
+from galaxy.util import safe_str_cmp
+
+SALT_LENGTH = 12
+KEY_LENGTH = 24
+HASH_FUNCTION = 'sha256'
+COST_FACTOR = 10000
+
+
+def hash_password( password ):
+    """
+    Hash a password, currently will use the PBKDF2 scheme.
+    """
+    return hash_password_PBKDF2( password )
+
+
+def check_password( guess, hashed ):
+    """
+    Check a hashed password. Supports either PBKDF2 if the hash is
+    prefixed with that string, or sha1 otherwise.
+    """
+    if hashed.startswith( "PBKDF2" ):
+        if check_password_PBKDF2( guess, hashed ):
+            return True
+    else:
+        # Passwords were originally encoded with sha1 and hexed
+        if hashlib.sha1( guess ).hexdigest() == hashed:
+            return True
+    # Password does not match
+    return False
+
+
+def hash_password_PBKDF2( password ):
+    # Generate a random salt
+    salt = b64encode( urandom( SALT_LENGTH ) )
+    # Apply the pbkdf2 encoding
+    hashed = pbkdf2_bin( bytes( password ), salt, COST_FACTOR, KEY_LENGTH, getattr( hashlib, HASH_FUNCTION ) )
+    # Format
+    return 'PBKDF2${0}${1}${2}${3}'.format( HASH_FUNCTION, COST_FACTOR, salt, b64encode( hashed ) )
+
+
+def check_password_PBKDF2( guess, hashed ):
+    # Split the database representation to extract cost_factor and salt
+    name, hash_function, cost_factor, salt, encoded_original = hashed.split( '$', 5 )
+    # Hash the guess using the same parameters
+    hashed_guess = pbkdf2_bin( bytes( guess ), salt, int( cost_factor ), KEY_LENGTH, getattr( hashlib, hash_function ) )
+    encoded_guess = b64encode( hashed_guess )
+    return safe_str_cmp( encoded_original, encoded_guess )
+
+
+# Taken from https://github.com/mitsuhiko/python-pbkdf2/blob/master/pbkdf2.py
+# (c) Copyright 2011 by Armin Ronacher, BSD LICENSE
+_pack_int = Struct('>I').pack
+
+
+def pbkdf2_bin( data, salt, iterations=1000, keylen=24, hashfunc=None ):
+    """Returns a binary digest for the PBKDF2 hash algorithm of `data`
+    with the given `salt`.  It iterates `iterations` time and produces a
+    key of `keylen` bytes.  By default SHA-1 is used as hash function,
+    a different hashlib `hashfunc` can be provided.
+    """
+    hashfunc = hashfunc or hashlib.sha1
+    mac = hmac.new(data, None, hashfunc)
+
+    def _pseudorandom(x, mac=mac):
+        h = mac.copy()
+        h.update(x)
+        return [ord(_) for _ in h.digest()]
+    buf = []
+    for block in range(1, -(-keylen // mac.digest_size) + 1):
+        rv = u = _pseudorandom(salt + _pack_int(block))
+        for i in range(iterations - 1):
+            u = _pseudorandom(''.join(map(chr, u)))
+            rv = starmap( xor, zip( rv, u ) )  # Python < 2.6.8: starmap requires function inputs to be tuples, so we need to use zip instead of izip
+        buf.extend(rv)
+    return ''.join(map(chr, buf))[:keylen]
diff --git a/lib/galaxy/security/validate_user_input.py b/lib/galaxy/security/validate_user_input.py
new file mode 100644
index 0000000..c3b927a
--- /dev/null
+++ b/lib/galaxy/security/validate_user_input.py
@@ -0,0 +1,86 @@
+"""
+Utilities for validating inputs related to user objects.
+
+The validate_* methods in this file return simple messages that do not contain
+user inputs - so these methods do not need to be escaped.
+"""
+import logging
+import re
+
+log = logging.getLogger( __name__ )
+
+# Email validity parameters
+VALID_EMAIL_RE = re.compile( "[^@]+@[^@]+\.[^@]+" )
+EMAIL_MAX_LEN = 255
+
+# Public name validity parameters
+PUBLICNAME_MIN_LEN = 3
+PUBLICNAME_MAX_LEN = 255
+VALID_PUBLICNAME_RE = re.compile( "^[a-z0-9._\-]+$" )
+VALID_PUBLICNAME_SUB = re.compile( "[^a-z0-9._\-]" )
+FILL_CHAR = '-'
+
+# Password validity parameters
+PASSWORD_MIN_LEN = 6
+
+
+def validate_email( trans, email, user=None, check_dup=True ):
+    """
+    Validates the email format, also checks whether the domain is blacklisted in the disposable domains configuration.
+    """
+    message = ''
+    if user and user.email == email:
+        return message
+    if not( VALID_EMAIL_RE.match( email ) ):
+        message = "The format of the email address is not correct."
+    elif len( email ) > EMAIL_MAX_LEN:
+        message = "Email address cannot be more than %d characters in length." % EMAIL_MAX_LEN
+    elif check_dup and trans.sa_session.query( trans.app.model.User ).filter_by( email=email ).first():
+        message = "User with that email already exists."
+    #  If the blacklist is not empty filter out the disposable domains.
+    elif trans.app.config.blacklist_content is not None:
+        domain = email.split('@')[1]
+        if len( domain.split('.') ) > 2:
+            domain = ('.').join( domain.split('.')[-2:] )
+        if domain in trans.app.config.blacklist_content:
+            message = "Please enter your permanent email address."
+    return message
+
+
+def validate_publicname( trans, publicname, user=None ):
+    # User names must be at least three characters in length and contain only lower-case
+    # letters, numbers, and the '-' character.
+    if user and user.username == publicname:
+        return ''
+    if len( publicname ) < PUBLICNAME_MIN_LEN:
+        return "Public name must be at least %d characters in length." % ( PUBLICNAME_MIN_LEN )
+    if len( publicname ) > PUBLICNAME_MAX_LEN:
+        return "Public name cannot be more than %d characters in length." % ( PUBLICNAME_MAX_LEN )
+    if not( VALID_PUBLICNAME_RE.match( publicname ) ):
+        return "Public name must contain only lower-case letters, numbers, '.', '_' and '-'."
+    if trans.sa_session.query( trans.app.model.User ).filter_by( username=publicname ).first():
+        return "Public name is taken; please choose another."
+    return ''
+
+
+def transform_publicname( trans, publicname, user=None ):
+    # User names must be at least four characters in length and contain only lower-case
+    # letters, numbers, and the '-' character.
+    # TODO: Enhance to allow generation of semi-random publicnnames e.g., when valid but taken
+    if user and user.username == publicname:
+        return publicname
+    elif publicname not in [ 'None', None, '' ]:
+        publicname = publicname.lower()
+        publicname = re.sub( VALID_PUBLICNAME_SUB, FILL_CHAR, publicname )
+        publicname = publicname.ljust( PUBLICNAME_MIN_LEN + 1, FILL_CHAR )[:PUBLICNAME_MAX_LEN]
+        if not trans.sa_session.query( trans.app.model.User ).filter_by( username=publicname ).first():
+            return publicname
+    return ''
+
+
+def validate_password( trans, password, confirm ):
+    if len( password ) < PASSWORD_MIN_LEN:
+        return "Use a password of at least %d characters." % PASSWORD_MIN_LEN
+    elif password != confirm:
+        return "Passwords don't match."
+    return ''
diff --git a/lib/galaxy/tags/__init__.py b/lib/galaxy/tags/__init__.py
new file mode 100644
index 0000000..4054609
--- /dev/null
+++ b/lib/galaxy/tags/__init__.py
@@ -0,0 +1,3 @@
+"""
+Galaxy tagging classes and methods.
+"""
diff --git a/lib/galaxy/tags/tag_handler.py b/lib/galaxy/tags/tag_handler.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py
new file mode 100755
index 0000000..ba8ee6c
--- /dev/null
+++ b/lib/galaxy/tools/__init__.py
@@ -0,0 +1,2516 @@
+"""
+Classes encapsulating galaxy tools and tool configuration.
+"""
+
+import glob
+import json
+import logging
+import os
+import re
+import tarfile
+import tempfile
+import threading
+import urllib
+from datetime import datetime
+
+from cgi import FieldStorage
+from xml.etree import ElementTree
+from mako.template import Template
+from paste import httpexceptions
+from six import string_types
+
+from galaxy.version import VERSION_MAJOR
+from galaxy import model
+from galaxy.managers import histories
+from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
+from galaxy import exceptions
+from galaxy.queue_worker import (
+    reload_toolbox,
+    send_control_task
+)
+from galaxy.tools.actions import DefaultToolAction
+from galaxy.tools.actions.upload import UploadToolAction
+from galaxy.tools.actions.data_source import DataSourceToolAction
+from galaxy.tools.actions.data_manager import DataManagerToolAction
+from galaxy.tools.actions.model_operations import ModelOperationToolAction
+from galaxy.tools.deps import views
+from galaxy.tools.deps import CachedDependencyManager
+from galaxy.tools.parameters import params_to_incoming, check_param, params_from_strings, params_to_strings, visit_input_values
+from galaxy.tools.parameters import output_collect
+from galaxy.tools.parameters.basic import (BaseURLToolParameter,
+                                           DataToolParameter, DataCollectionToolParameter, HiddenToolParameter,
+                                           SelectToolParameter, ToolParameter)
+from galaxy.tools.parameters.grouping import Conditional, ConditionalWhen, Repeat, Section, UploadDataset
+from galaxy.tools.parameters.input_translation import ToolInputTranslator
+from galaxy.tools.test import parse_tests
+from galaxy.tools.parser import get_tool_source
+from galaxy.tools.parser.xml import XmlPageSource
+from galaxy.tools.parser import ToolOutputCollectionPart
+from galaxy.tools.toolbox import BaseGalaxyToolBox
+from galaxy.util import rst_to_html, string_as_bool
+from galaxy.util import ExecutionTimer
+from galaxy.util import listify
+from galaxy.util import unicodify
+from galaxy.tools.parameters.meta import expand_meta_parameters
+from galaxy.util.bunch import Bunch
+from galaxy.util.expressions import ExpressionContext
+from galaxy.util.json import json_fix
+from galaxy.util.odict import odict
+from galaxy.util.template import fill_template
+from galaxy.web import url_for
+from galaxy.web.form_builder import SelectField
+from galaxy.util.dictifiable import Dictifiable
+from galaxy.work.context import WorkRequestContext
+from tool_shed.util import common_util
+import tool_shed.util.repository_util as repository_util
+from tool_shed.util import shed_util_common as suc
+
+from .loader import template_macro_params, raw_tool_xml_tree, imported_macro_paths
+from .execute import execute as execute_job
+import galaxy.jobs
+
+log = logging.getLogger( __name__ )
+
+HELP_UNINITIALIZED = threading.Lock()
+MODEL_TOOLS_PATH = os.path.abspath(os.path.dirname(__file__))
+
+
+class ToolErrorLog:
+    def __init__(self):
+        self.error_stack = []
+        self.max_errors = 100
+
+    def add_error(self, file, phase, exception):
+        self.error_stack.insert(0, {
+            "file": file,
+            "time": str(datetime.now()),
+            "phase": phase,
+            "error": str(exception)
+        } )
+        if len(self.error_stack) > self.max_errors:
+            self.error_stack.pop()
+
+
+global_tool_errors = ToolErrorLog()
+
+
+class ToolInputsNotReadyException( Exception ):
+    pass
+
+
+class ToolNotFoundException( Exception ):
+    pass
+
+
+class ToolBox( BaseGalaxyToolBox ):
+    """ A derivative of AbstractToolBox with knowledge about Tool internals -
+    how to construct them, action types, dependency management, etc....
+    """
+
+    def __init__( self, config_filenames, tool_root_dir, app, tool_conf_watcher=None ):
+        self._reload_count = 0
+        super( ToolBox, self ).__init__(
+            config_filenames=config_filenames,
+            tool_root_dir=tool_root_dir,
+            app=app,
+            tool_conf_watcher=tool_conf_watcher
+        )
+
+    def handle_reload_toolbox(self):
+        reload_toolbox(self.app)
+
+    def handle_panel_update(self, section_dict):
+        """
+        Sends a panel update to all threads/processes.
+        """
+        send_control_task(self.app, 'create_panel_section', kwargs=section_dict)
+        # The following local call to self.create_section should be unnecessary
+        # but occasionally the local ToolPanelElements instance appears to not
+        # get updated.
+        self.create_section(section_dict)
+
+    def has_reloaded(self, other_toolbox):
+        return self._reload_count != other_toolbox._reload_count
+
+    @property
+    def all_requirements(self):
+        reqs = [json.dumps(req, sort_keys=True) for _, tool in self.tools() for req in tool.tool_requirements]
+        return [json.loads(req) for req in set(reqs)]
+
+    @property
+    def tools_by_id( self ):
+        # Deprecated method, TODO - eliminate calls to this in test/.
+        return self._tools_by_id
+
+    def create_tool( self, config_file, repository_id=None, guid=None, **kwds ):
+        try:
+            tool_source = get_tool_source( config_file, enable_beta_formats=getattr( self.app.config, "enable_beta_tool_formats", False ) )
+        except Exception as e:
+            # capture and log parsing errors
+            global_tool_errors.add_error(config_file, "Tool XML parsing", e)
+            raise e
+        # Allow specifying a different tool subclass to instantiate
+        tool_module = tool_source.parse_tool_module()
+        if tool_module is not None:
+            module, cls = tool_module
+            mod = __import__( module, globals(), locals(), [cls] )
+            ToolClass = getattr( mod, cls )
+        elif tool_source.parse_tool_type():
+            tool_type = tool_source.parse_tool_type()
+            ToolClass = tool_types.get( tool_type )
+        else:
+            # Normal tool
+            root = getattr( tool_source, 'root', None )
+            ToolClass = Tool
+        tool = ToolClass( config_file, tool_source, self.app, guid=guid, repository_id=repository_id, **kwds )
+        return tool
+
+    def handle_datatypes_changed( self ):
+        """ Refresh upload tools when new datatypes are added. """
+        for tool_id in self._tools_by_id:
+            tool = self._tools_by_id[ tool_id ]
+            if isinstance( tool.tool_action, UploadToolAction ):
+                self.reload_tool_by_id( tool_id )
+
+    def get_tool_components( self, tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=False ):
+        """
+        Retrieve all loaded versions of a tool from the toolbox and return a select list enabling
+        selection of a different version, the list of the tool's loaded versions, and the specified tool.
+        """
+        toolbox = self
+        tool_version_select_field = None
+        tools = []
+        tool = None
+        # Backwards compatibility for datasource tools that have default tool_id configured, but which
+        # are now using only GALAXY_URL.
+        tool_ids = listify( tool_id )
+        for tool_id in tool_ids:
+            if get_loaded_tools_by_lineage:
+                tools = toolbox.get_loaded_tools_by_lineage( tool_id )
+            else:
+                tools = toolbox.get_tool( tool_id, tool_version=tool_version, get_all_versions=True )
+            if tools:
+                tool = toolbox.get_tool( tool_id, tool_version=tool_version, get_all_versions=False )
+                if len( tools ) > 1:
+                    tool_version_select_field = self.__build_tool_version_select_field( tools, tool.id, set_selected )
+                break
+        return tool_version_select_field, tools, tool
+
+    def _path_template_kwds( self ):
+        return {
+            "model_tools_path": MODEL_TOOLS_PATH,
+        }
+
+    def _get_tool_shed_repository( self, tool_shed, name, owner, installed_changeset_revision ):
+        # Abstract toolbox doesn't have a dependency on the the database, so
+        # override _get_tool_shed_repository here to provide this information.
+
+        return repository_util.get_installed_repository(
+            self.app,
+            tool_shed=tool_shed,
+            name=name,
+            owner=owner,
+            installed_changeset_revision=installed_changeset_revision
+        )
+
+    def __build_tool_version_select_field( self, tools, tool_id, set_selected ):
+        """Build a SelectField whose options are the ids for the received list of tools."""
+        options = []
+        refresh_on_change_values = []
+        for tool in tools:
+            options.insert( 0, ( tool.version, tool.id ) )
+            refresh_on_change_values.append( tool.id )
+        select_field = SelectField( name='tool_id', refresh_on_change=True, refresh_on_change_values=refresh_on_change_values )
+        for option_tup in options:
+            selected = set_selected and option_tup[ 1 ] == tool_id
+            if selected:
+                select_field.add_option( 'version %s' % option_tup[ 0 ], option_tup[ 1 ], selected=True )
+            else:
+                select_field.add_option( 'version %s' % option_tup[ 0 ], option_tup[ 1 ] )
+        return select_field
+
+
+class DefaultToolState( object ):
+    """
+    Keeps track of the state of a users interaction with a tool between
+    requests.
+    """
+    def __init__( self ):
+        self.page = 0
+        self.rerun_remap_job_id = None
+        self.inputs = None
+
+    def encode( self, tool, app ):
+        """
+        Convert the data to a string
+        """
+        # Convert parameters to a dictionary of strings, and save curent
+        # page in that dict
+        value = params_to_strings( tool.inputs, self.inputs, app )
+        value["__page__"] = self.page
+        value["__rerun_remap_job_id__"] = self.rerun_remap_job_id
+        return json.dumps( value )
+
+    def decode( self, value, tool, app ):
+        """
+        Restore the state from a string
+        """
+        values = json_fix( json.loads( value ) )
+        self.page = values.pop( "__page__" )
+        if '__rerun_remap_job_id__' in values:
+            self.rerun_remap_job_id = values.pop( "__rerun_remap_job_id__" )
+        else:
+            self.rerun_remap_job_id = None
+        self.inputs = params_from_strings( tool.inputs, values, app, ignore_errors=True )
+
+    def copy( self ):
+        """
+        Shallow copy of the state
+        """
+        new_state = DefaultToolState()
+        new_state.page = self.page
+        new_state.rerun_remap_job_id = self.rerun_remap_job_id
+        new_state.inputs = self.inputs
+        return new_state
+
+
+class Tool( object, Dictifiable ):
+    """
+    Represents a computational tool that can be executed through Galaxy.
+    """
+
+    tool_type = 'default'
+    requires_setting_metadata = True
+    default_tool_action = DefaultToolAction
+    dict_collection_visible_keys = ( 'id', 'name', 'version', 'description', 'labels' )
+
+    def __init__( self, config_file, tool_source, app, guid=None, repository_id=None, allow_code_files=True ):
+        """Load a tool from the config named by `config_file`"""
+        # Determine the full path of the directory where the tool config is
+        self.config_file = config_file
+        self.tool_dir = os.path.dirname( config_file )
+        self.app = app
+        self.repository_id = repository_id
+        self._allow_code_files = allow_code_files
+        # setup initial attribute values
+        self.inputs = odict()
+        self.stdio_exit_codes = list()
+        self.stdio_regexes = list()
+        self.inputs_by_page = list()
+        self.display_by_page = list()
+        self.action = '/tool_runner/index'
+        self.target = 'galaxy_main'
+        self.method = 'post'
+        self.labels = []
+        self.check_values = True
+        self.nginx_upload = False
+        self.input_required = False
+        self.display_interface = True
+        self.require_login = False
+        self.rerun = False
+        # Define a place to keep track of all input   These
+        # differ from the inputs dictionary in that inputs can be page
+        # elements like conditionals, but input_params are basic form
+        # parameters like SelectField objects.  This enables us to more
+        # easily ensure that parameter dependencies like index files or
+        # tool_data_table_conf.xml entries exist.
+        self.input_params = []
+        # Attributes of tools installed from Galaxy tool sheds.
+        self.tool_shed = None
+        self.repository_name = None
+        self.repository_owner = None
+        self.changeset_revision = None
+        self.installed_changeset_revision = None
+        # The tool.id value will be the value of guid, but we'll keep the
+        # guid attribute since it is useful to have.
+        self.guid = guid
+        self.old_id = None
+        self.version = None
+        self.dependencies = []
+        # Enable easy access to this tool's version lineage.
+        self.lineage_ids = []
+        # populate toolshed repository info, if available
+        self.populate_tool_shed_info()
+        # add tool resource parameters
+        self.populate_resource_parameters( tool_source )
+        # Parse XML element containing configuration
+        try:
+            self.parse( tool_source, guid=guid )
+        except Exception as e:
+            global_tool_errors.add_error(config_file, "Tool Loading", e)
+            raise e
+        self.history_manager = histories.HistoryManager( app )
+        self._view = views.DependencyResolversView(app)
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session"""
+        return self.app.model.context
+
+    @property
+    def tool_version( self ):
+        """Return a ToolVersion if one exists for our id"""
+        return self.app.install_model.context.query( self.app.install_model.ToolVersion ) \
+                                             .filter( self.app.install_model.ToolVersion.table.c.tool_id == self.id ) \
+                                             .first()
+
+    @property
+    def tool_versions( self ):
+        # If we have versions, return them.
+        tool_version = self.tool_version
+        if tool_version:
+            return tool_version.get_versions( self.app )
+        return []
+
+    @property
+    def tool_shed_repository( self ):
+        # If this tool is included in an installed tool shed repository, return it.
+        if self.tool_shed:
+            return repository_util.get_installed_repository( self.app,
+                                                             tool_shed=self.tool_shed,
+                                                             name=self.repository_name,
+                                                             owner=self.repository_owner,
+                                                             installed_changeset_revision=self.installed_changeset_revision )
+        return None
+
+    @property
+    def produces_collections_of_unknown_type( self ):
+
+        def output_is_dynamic_collection(output):
+            if not output.collection:
+                return False
+            if output.structure.collection_type:
+                return False
+            return True
+
+        return any( map( output_is_dynamic_collection, self.outputs.values() ) )
+
+    @property
+    def produces_collections_with_unknown_structure( self ):
+
+        def output_is_dynamic(output):
+            if not output.collection:
+                return False
+            return output.dynamic_structure
+
+        return any( map( output_is_dynamic, self.outputs.values() ) )
+
+    @property
+    def valid_input_states( self ):
+        return model.Dataset.valid_input_states
+
+    def __get_job_tool_configuration(self, job_params=None):
+        """Generalized method for getting this tool's job configuration.
+
+        :type job_params: dict or None
+        :returns: `galaxy.jobs.JobToolConfiguration` -- JobToolConfiguration that matches this `Tool` and the given `job_params`
+        """
+        rval = None
+        if len(self.job_tool_configurations) == 1:
+            # If there's only one config, use it rather than wasting time on comparisons
+            rval = self.job_tool_configurations[0]
+        elif job_params is None:
+            for job_tool_config in self.job_tool_configurations:
+                if not job_tool_config.params:
+                    rval = job_tool_config
+                    break
+        else:
+            for job_tool_config in self.job_tool_configurations:
+                if job_tool_config.params:
+                    # There are job params and this config has params defined
+                    for param, value in job_params.items():
+                        if param not in job_tool_config.params or job_tool_config.params[param] != job_params[param]:
+                            break
+                    else:
+                        # All params match, use this config
+                        rval = job_tool_config
+                        break
+                else:
+                    rval = job_tool_config
+        assert rval is not None, 'Could not get a job tool configuration for Tool %s with job_params %s, this is a bug' % (self.id, job_params)
+        return rval
+
+    def get_job_handler(self, job_params=None):
+        """Get a suitable job handler for this `Tool` given the provided `job_params`.  If multiple handlers are valid for combination of `Tool` and `job_params` (e.g. the defined handler is a handler tag), one will be selected at random.
+
+        :param job_params: Any params specific to this job (e.g. the job source)
+        :type job_params: dict or None
+
+        :returns: str -- The id of a job handler for a job run of this `Tool`
+        """
+        # convert tag to ID if necessary
+        return self.app.job_config.get_handler(self.__get_job_tool_configuration(job_params=job_params).handler)
+
+    def get_job_destination(self, job_params=None):
+        """
+        :returns: galaxy.jobs.JobDestination -- The destination definition and runner parameters.
+        """
+        return self.app.job_config.get_destination(self.__get_job_tool_configuration(job_params=job_params).destination)
+
+    def get_panel_section( self ):
+        return self.app.toolbox.get_integrated_section_for_tool( self )
+
+    def allow_user_access( self, user, attempting_access=True ):
+        """
+        :returns: bool -- Whether the user is allowed to access the tool.
+        """
+        if self.require_login and user is None:
+            return False
+        return True
+
+    def parse( self, tool_source, guid=None ):
+        """
+        Read tool configuration from the element `root` and fill in `self`.
+        """
+        self.profile = float( tool_source.parse_profile() )
+        # Get the UNIQUE id for the tool
+        self.old_id = tool_source.parse_id()
+        if guid is None:
+            self.id = self.old_id
+        else:
+            self.id = guid
+        if not self.id:
+            raise Exception( "Missing tool 'id' for tool at '%s'" % tool_source )
+
+        if self.profile >= 16.04 and VERSION_MAJOR < self.profile:
+            template = "The tool %s targets version %s of Galaxy, you should upgrade Galaxy to ensure proper functioning of this tool."
+            message = template % (self.id, self.profile)
+            log.warning(message)
+
+        # Get the (user visible) name of the tool
+        self.name = tool_source.parse_name()
+        if not self.name:
+            raise Exception( "Missing tool 'name' for tool with id '%s' at '%s'" % (self.id, tool_source) )
+
+        self.version = tool_source.parse_version()
+        if not self.version:
+            if self.profile < 16.04:
+                # For backward compatibility, some tools may not have versions yet.
+                self.version = "1.0.0"
+            else:
+                raise Exception( "Missing tool 'version' for tool with id '%s' at '%s'" % (self.id, tool_source) )
+
+        self.edam_operations = tool_source.parse_edam_operations()
+        self.edam_topics = tool_source.parse_edam_topics()
+
+        # Support multi-byte tools
+        self.is_multi_byte = tool_source.parse_is_multi_byte()
+        # Legacy feature, ignored by UI.
+        self.force_history_refresh = False
+
+        self.display_interface = tool_source.parse_display_interface( default=self.display_interface )
+
+        self.require_login = tool_source.parse_require_login( self.require_login )
+
+        request_param_translation_elem = tool_source.parse_request_param_translation_elem()
+        if request_param_translation_elem is not None:
+            # Load input translator, used by datasource tools to change names/values of incoming parameters
+            self.input_translator = ToolInputTranslator.from_element( request_param_translation_elem )
+        else:
+            self.input_translator = None
+
+        self.parse_command( tool_source )
+        self.environment_variables = self.parse_environment_variables( tool_source )
+
+        # Parameters used to build URL for redirection to external app
+        redirect_url_params = tool_source.parse_redirect_url_params_elem()
+        if redirect_url_params is not None and redirect_url_params.text is not None:
+            # get rid of leading / trailing white space
+            redirect_url_params = redirect_url_params.text.strip()
+            # Replace remaining white space with something we can safely split on later
+            # when we are building the params
+            self.redirect_url_params = redirect_url_params.replace( ' ', '**^**' )
+        else:
+            self.redirect_url_params = ''
+
+        # Short description of the tool
+        self.description = tool_source.parse_description()
+
+        # Versioning for tools
+        self.version_string_cmd = None
+        version_command = tool_source.parse_version_command()
+        if version_command is not None:
+            self.version_string_cmd = version_command.strip()
+
+            version_cmd_interpreter = tool_source.parse_version_command_interpreter()
+            if version_cmd_interpreter:
+                executable = self.version_string_cmd.split()[0]
+                abs_executable = os.path.abspath(os.path.join(self.tool_dir, executable))
+                command_line = self.version_string_cmd.replace(executable, abs_executable, 1)
+                self.version_string_cmd = version_cmd_interpreter + " " + command_line
+
+        # Parallelism for tasks, read from tool config.
+        self.parallelism = tool_source.parse_parallelism()
+
+        # Get JobToolConfiguration(s) valid for this particular Tool.  At least
+        # a 'default' will be provided that uses the 'default' handler and
+        # 'default' destination.  I thought about moving this to the
+        # job_config, but it makes more sense to store here. -nate
+        self_ids = [ self.id.lower() ]
+        if self.old_id != self.id:
+            # Handle toolshed guids
+            self_ids = [ self.id.lower(), self.id.lower().rsplit('/', 1)[0], self.old_id.lower() ]
+        self.all_ids = self_ids
+
+        # In the toolshed context, there is no job config.
+        if hasattr( self.app, 'job_config' ):
+            self.job_tool_configurations = self.app.job_config.get_job_tool_configurations(self_ids)
+
+        # Is this a 'hidden' tool (hidden in tool menu)
+        self.hidden = tool_source.parse_hidden()
+
+        self.__parse_legacy_features(tool_source)
+
+        # Load any tool specific options (optional)
+        self.options = dict(
+            sanitize=tool_source.parse_sanitize(),
+            refresh=tool_source.parse_refresh(),
+        )
+        self.options = Bunch(** self.options)
+
+        # Parse tool inputs (if there are any required)
+        self.parse_inputs( tool_source )
+
+        # Parse tool help
+        self.parse_help( tool_source )
+
+        # Description of outputs produced by an invocation of the tool
+        self.parse_outputs( tool_source )
+
+        # Parse result handling for tool exit codes and stdout/stderr messages:
+        self.parse_stdio( tool_source )
+
+        self.strict_shell = tool_source.parse_strict_shell()
+
+        # Any extra generated config files for the tool
+        self.__parse_config_files(tool_source)
+        # Action
+        action = tool_source.parse_action_module()
+        if action is None:
+            self.tool_action = self.default_tool_action()
+        else:
+            module, cls = action
+            mod = __import__( module, globals(), locals(), [cls])
+            self.tool_action = getattr( mod, cls )()
+        # Tests
+        self.__parse_tests(tool_source)
+
+        # Requirements (dependencies)
+        requirements, containers = tool_source.parse_requirements_and_containers()
+        self.requirements = requirements
+        self.containers = containers
+
+        self.citations = self._parse_citations( tool_source )
+
+        # Determine if this tool can be used in workflows
+        self.is_workflow_compatible = self.check_workflow_compatible(tool_source)
+        self.__parse_trackster_conf( tool_source )
+
+    def __parse_legacy_features(self, tool_source):
+        self.code_namespace = dict()
+        self.hook_map = {}
+        self.uihints = {}
+
+        if not hasattr(tool_source, 'root'):
+            return
+
+        # TODO: Move following logic into XmlToolSource.
+        root = tool_source.root
+        # Load any tool specific code (optional) Edit: INS 5/29/2007,
+        # allow code files to have access to the individual tool's
+        # "module" if it has one.  Allows us to reuse code files, etc.
+        if self._allow_code_files:
+            for code_elem in root.findall("code"):
+                for hook_elem in code_elem.findall("hook"):
+                    for key, value in hook_elem.items():
+                        # map hook to function
+                        self.hook_map[key] = value
+                file_name = code_elem.get("file")
+                code_path = os.path.join( self.tool_dir, file_name )
+                execfile( code_path, self.code_namespace )
+
+        # User interface hints
+        uihints_elem = root.find( "uihints" )
+        if uihints_elem is not None:
+            for key, value in uihints_elem.attrib.iteritems():
+                self.uihints[ key ] = value
+
+    def __parse_tests(self, tool_source):
+        self.__tests_source = tool_source
+        self.__tests_populated = False
+
+    def __parse_config_files(self, tool_source):
+        self.config_files = []
+        if not hasattr(tool_source, 'root'):
+            return
+
+        root = tool_source.root
+        conf_parent_elem = root.find("configfiles")
+        if conf_parent_elem is not None:
+            inputs_elem = conf_parent_elem.find( "inputs" )
+            if inputs_elem is not None:
+                name = inputs_elem.get( "name" )
+                filename = inputs_elem.get( "filename", None )
+                format = inputs_elem.get("format", "json")
+                content = dict(format=format)
+                self.config_files.append( ( name, filename, content ) )
+            for conf_elem in conf_parent_elem.findall( "configfile" ):
+                name = conf_elem.get( "name" )
+                filename = conf_elem.get( "filename", None )
+                content = conf_elem.text
+                self.config_files.append( ( name, filename, content ) )
+
+    def __parse_trackster_conf(self, tool_source):
+        self.trackster_conf = None
+        if not hasattr(tool_source, 'root'):
+            return
+
+        # Trackster configuration.
+        trackster_conf = tool_source.root.find( "trackster_conf" )
+        if trackster_conf is not None:
+            self.trackster_conf = TracksterConfig.parse( trackster_conf )
+
+    @property
+    def tests( self ):
+        if not self.__tests_populated:
+            tests_source = self.__tests_source
+            if tests_source:
+                try:
+                    self.__tests = parse_tests( self, tests_source )
+                except:
+                    self.__tests = None
+                    log.exception( "Failed to parse tool tests" )
+            else:
+                self.__tests = None
+            self.__tests_populated = True
+        return self.__tests
+
+    def parse_command( self, tool_source ):
+        """
+        """
+        # Command line (template). Optional for tools that do not invoke a local program
+        command = tool_source.parse_command()
+        if command is not None:
+            self.command = command.lstrip()  # get rid of leading whitespace
+            # Must pre-pend this AFTER processing the cheetah command template
+            self.interpreter = tool_source.parse_interpreter()
+        else:
+            self.command = ''
+            self.interpreter = None
+
+    def parse_environment_variables( self, tool_source ):
+        return tool_source.parse_environment_variables()
+
+    def parse_inputs( self, tool_source ):
+        """
+        Parse the "<inputs>" element and create appropriate `ToolParameter`s.
+        This implementation supports multiple pages and grouping constructs.
+        """
+        # Load parameters (optional)
+        pages = tool_source.parse_input_pages()
+        enctypes = set()
+        if pages.inputs_defined:
+            if hasattr(pages, "input_elem"):
+                input_elem = pages.input_elem
+                # Handle properties of the input form
+                self.check_values = string_as_bool( input_elem.get("check_values", self.check_values ) )
+                self.nginx_upload = string_as_bool( input_elem.get( "nginx_upload", self.nginx_upload ) )
+                self.action = input_elem.get( 'action', self.action )
+                # If we have an nginx upload, save the action as a tuple instead of
+                # a string. The actual action needs to get url_for run to add any
+                # prefixes, and we want to avoid adding the prefix to the
+                # nginx_upload_path. This logic is handled in the tool_form.mako
+                # template.
+                if self.nginx_upload and self.app.config.nginx_upload_path:
+                    if '?' in urllib.unquote_plus( self.action ):
+                        raise Exception( 'URL parameters in a non-default tool action can not be used '
+                                         'in conjunction with nginx upload.  Please convert them to '
+                                         'hidden POST parameters' )
+                    self.action = (self.app.config.nginx_upload_path + '?nginx_redir=',
+                                   urllib.unquote_plus(self.action))
+                self.target = input_elem.get( "target", self.target )
+                self.method = input_elem.get( "method", self.method )
+                # Parse the actual parameters
+                # Handle multiple page case
+            for page_source in pages.page_sources:
+                inputs = self.parse_input_elem( page_source, enctypes )
+                display = page_source.parse_display()
+                self.inputs_by_page.append( inputs )
+                self.inputs.update( inputs )
+                self.display_by_page.append( display )
+        else:
+            self.inputs_by_page.append( self.inputs )
+            self.display_by_page.append( None )
+        self.display = self.display_by_page[0]
+        self.npages = len( self.inputs_by_page )
+        self.last_page = len( self.inputs_by_page ) - 1
+        self.has_multiple_pages = bool( self.last_page )
+        # Determine the needed enctype for the form
+        if len( enctypes ) == 0:
+            self.enctype = "application/x-www-form-urlencoded"
+        elif len( enctypes ) == 1:
+            self.enctype = enctypes.pop()
+        else:
+            raise Exception( "Conflicting required enctypes: %s" % str( enctypes ) )
+        # Check if the tool either has no parameters or only hidden (and
+        # thus hardcoded)  FIXME: hidden parameters aren't
+        # parameters at all really, and should be passed in a different
+        # way, making this check easier.
+        template_macros = {}
+        if hasattr(tool_source, 'root'):
+            template_macros = template_macro_params(tool_source.root)
+        self.template_macro_params = template_macros
+        for param in self.inputs.values():
+            if not isinstance( param, ( HiddenToolParameter, BaseURLToolParameter ) ):
+                self.input_required = True
+                break
+
+    def parse_help( self, tool_source ):
+        """
+        Parse the help text for the tool. Formatted in reStructuredText, but
+        stored as Mako to allow for dynamic image paths.
+        This implementation supports multiple pages.
+        """
+        # TODO: Allow raw HTML or an external link.
+        self.__help = HELP_UNINITIALIZED
+        self.__help_by_page = HELP_UNINITIALIZED
+        self.__help_source = tool_source
+
+    def parse_outputs( self, tool_source ):
+        """
+        Parse <outputs> elements and fill in self.outputs (keyed by name)
+        """
+        self.outputs, self.output_collections = tool_source.parse_outputs(self)
+
+    # TODO: Include the tool's name in any parsing warnings.
+    def parse_stdio( self, tool_source ):
+        """
+        Parse <stdio> element(s) and fill in self.return_codes,
+        self.stderr_rules, and self.stdout_rules. Return codes have a range
+        and an error type (fault or warning).  Stderr and stdout rules have
+        a regular expression and an error level (fault or warning).
+        """
+        exit_codes, regexes = tool_source.parse_stdio()
+        self.stdio_exit_codes = exit_codes
+        self.stdio_regexes = regexes
+
+    def _parse_citations( self, tool_source ):
+        # TODO: Move following logic into ToolSource abstraction.
+        if not hasattr(tool_source, 'root'):
+            return []
+
+        root = tool_source.root
+        citations = []
+        citations_elem = root.find("citations")
+        if citations_elem is None:
+            return citations
+
+        for citation_elem in citations_elem:
+            if citation_elem.tag != "citation":
+                pass
+            citation = self.app.citations_manager.parse_citation( citation_elem, self.tool_dir )
+            if citation:
+                citations.append( citation )
+        return citations
+
+    def parse_input_elem( self, page_source, enctypes, context=None ):
+        """
+        Parse a parent element whose children are inputs -- these could be
+        groups (repeat, conditional) or param elements. Groups will be parsed
+        recursively.
+        """
+        rval = odict()
+        context = ExpressionContext( rval, context )
+        for input_source in page_source.parse_input_sources():
+            # Repeat group
+            input_type = input_source.parse_input_type()
+            if input_type == "repeat":
+                group = Repeat()
+                group.name = input_source.get( "name" )
+                group.title = input_source.get( "title" )
+                group.help = input_source.get( "help", None )
+                page_source = input_source.parse_nested_inputs_source()
+                group.inputs = self.parse_input_elem( page_source, enctypes, context )
+                group.default = int( input_source.get( "default", 0 ) )
+                group.min = int( input_source.get( "min", 0 ) )
+                # Use float instead of int so that 'inf' can be used for no max
+                group.max = float( input_source.get( "max", "inf" ) )
+                assert group.min <= group.max, \
+                    ValueError( "Min repeat count must be less-than-or-equal to the max." )
+                # Force default to be within min-max range
+                group.default = min( max( group.default, group.min ), group.max )
+                rval[group.name] = group
+            elif input_type == "conditional":
+                group = Conditional()
+                group.name = input_source.get( "name" )
+                group.value_ref = input_source.get( 'value_ref', None )
+                group.value_ref_in_group = input_source.get_bool( 'value_ref_in_group', True )
+                value_from = input_source.get("value_from", None)
+                if value_from:
+                    value_from = value_from.split( ':' )
+                    group.value_from = locals().get( value_from[0] )
+                    group.test_param = rval[ group.value_ref ]
+                    group.test_param.refresh_on_change = True
+                    for attr in value_from[1].split( '.' ):
+                        group.value_from = getattr( group.value_from, attr )
+                    for case_value, case_inputs in group.value_from( context, group, self ).iteritems():
+                        case = ConditionalWhen()
+                        case.value = case_value
+                        if case_inputs:
+                            page_source = XmlPageSource( ElementTree.XML( "<when>%s</when>" % case_inputs ) )
+                            case.inputs = self.parse_input_elem( page_source, enctypes, context )
+                        else:
+                            case.inputs = odict()
+                        group.cases.append( case )
+                else:
+                    # Should have one child "input" which determines the case
+                    test_param_input_source = input_source.parse_test_input_source()
+                    group.test_param = self.parse_param_elem( test_param_input_source, enctypes, context )
+                    if group.test_param.optional:
+                        log.warning("Tool with id %s declares a conditional test parameter as optional, this is invalid and will be ignored." % self.id)
+                        group.test_param.optional = False
+                    possible_cases = list( group.test_param.legal_values )  # store possible cases, undefined whens will have no inputs
+                    # Must refresh when test_param changes
+                    group.test_param.refresh_on_change = True
+                    # And a set of possible cases
+                    for (value, case_inputs_source) in input_source.parse_when_input_sources():
+                        case = ConditionalWhen()
+                        case.value = value
+                        case.inputs = self.parse_input_elem( case_inputs_source, enctypes, context )
+                        group.cases.append( case )
+                        try:
+                            possible_cases.remove( case.value )
+                        except:
+                            log.warning( "Tool %s: a when tag has been defined for '%s (%s) --> %s', but does not appear to be selectable." %
+                                         ( self.id, group.name, group.test_param.name, case.value ) )
+                    for unspecified_case in possible_cases:
+                        log.warning( "Tool %s: a when tag has not been defined for '%s (%s) --> %s', assuming empty inputs." %
+                                     ( self.id, group.name, group.test_param.name, unspecified_case ) )
+                        case = ConditionalWhen()
+                        case.value = unspecified_case
+                        case.inputs = odict()
+                        group.cases.append( case )
+                rval[group.name] = group
+            elif input_type == "section":
+                group = Section()
+                group.name = input_source.get( "name" )
+                group.title = input_source.get( "title" )
+                group.help = input_source.get( "help", None )
+                group.expanded = input_source.get_bool( "expanded", False )
+                page_source = input_source.parse_nested_inputs_source()
+                group.inputs = self.parse_input_elem( page_source, enctypes, context )
+                rval[group.name] = group
+            elif input_type == "upload_dataset":
+                elem = input_source.elem()
+                group = UploadDataset()
+                group.name = elem.get( "name" )
+                group.title = elem.get( "title" )
+                group.file_type_name = elem.get( 'file_type_name', group.file_type_name )
+                group.default_file_type = elem.get( 'default_file_type', group.default_file_type )
+                group.metadata_ref = elem.get( 'metadata_ref', group.metadata_ref )
+                rval[ group.file_type_name ].refresh_on_change = True
+                rval[ group.file_type_name ].refresh_on_change_values = \
+                    self.app.datatypes_registry.get_composite_extensions()
+                group_page_source = XmlPageSource(elem)
+                group.inputs = self.parse_input_elem( group_page_source, enctypes, context )
+                rval[ group.name ] = group
+            elif input_type == "param":
+                param = self.parse_param_elem( input_source, enctypes, context )
+                rval[param.name] = param
+                if hasattr( param, 'data_ref' ):
+                    param.ref_input = context[ param.data_ref ]
+                self.input_params.append( param )
+        return rval
+
+    def parse_param_elem( self, input_source, enctypes, context ):
+        """
+        Parse a single "<param>" element and return a ToolParameter instance.
+        Also, if the parameter has a 'required_enctype' add it to the set
+        enctypes.
+        """
+        param = ToolParameter.build( self, input_source )
+        param_enctype = param.get_required_enctype()
+        if param_enctype:
+            enctypes.add( param_enctype )
+        # If parameter depends on any other paramters, we must refresh the
+        # form when it changes
+        for name in param.get_dependencies():
+            # Let it throw exception, but give some hint what the problem might be
+            if name not in context:
+                log.error("Could not find dependency '%s' of parameter '%s' in tool %s" % (name, param.name, self.name) )
+            context[ name ].refresh_on_change = True
+        return param
+
+    def populate_resource_parameters( self, tool_source ):
+        root = getattr( tool_source, 'root', None )
+        if root is not None and hasattr( self.app, 'job_config' ) and hasattr( self.app.job_config, 'get_tool_resource_xml' ):
+            resource_xml = self.app.job_config.get_tool_resource_xml( root.get( 'id' ), self.tool_type )
+            if resource_xml is not None:
+                inputs = root.find( 'inputs' )
+                if inputs is None:
+                    inputs = ElementTree.fromstring( '<inputs/>' )
+                    root.append( inputs )
+                inputs.append( resource_xml )
+
+    def populate_tool_shed_info( self ):
+        if self.repository_id is not None and self.app.name == 'galaxy':
+            repository_id = self.app.security.decode_id( self.repository_id )
+            tool_shed_repository = self.app.install_model.context.query( self.app.install_model.ToolShedRepository ).get( repository_id )
+            if tool_shed_repository:
+                self.tool_shed = tool_shed_repository.tool_shed
+                self.repository_name = tool_shed_repository.name
+                self.repository_owner = tool_shed_repository.owner
+                self.changeset_revision = tool_shed_repository.changeset_revision
+                self.installed_changeset_revision = tool_shed_repository.installed_changeset_revision
+
+    @property
+    def help(self):
+        if self.__help is HELP_UNINITIALIZED:
+            self.__ensure_help()
+        return self.__help
+
+    @property
+    def help_by_page(self):
+        if self.__help_by_page is HELP_UNINITIALIZED:
+            self.__ensure_help()
+        return self.__help_by_page
+
+    def __ensure_help(self):
+        with HELP_UNINITIALIZED:
+            if self.__help is HELP_UNINITIALIZED:
+                self.__inititalize_help()
+
+    def __inititalize_help(self):
+        tool_source = self.__help_source
+        self.__help = None
+        self.__help_by_page = []
+        help_header = ""
+        help_footer = ""
+        help_text = tool_source.parse_help()
+        if help_text is not None:
+            if self.repository_id and help_text.find( '.. image:: ' ) >= 0:
+                # Handle tool help image display for tools that are contained in repositories in the tool shed or installed into Galaxy.
+                try:
+                    help_text = suc.set_image_paths( self.app, self.repository_id, help_text )
+                except Exception as e:
+                    log.exception( "Exception in parse_help, so images may not be properly displayed:\n%s" % str( e ) )
+            try:
+                self.__help = Template( rst_to_html(help_text), input_encoding='utf-8',
+                                        output_encoding='utf-8', default_filters=[ 'decode.utf8' ],
+                                        encoding_errors='replace' )
+            except:
+                log.exception( "error in help for tool %s" % self.name )
+
+            # Handle deprecated multi-page help text in XML case.
+            if hasattr(tool_source, "root"):
+                help_elem = tool_source.root.find("help")
+                help_header = help_text
+                help_pages = help_elem.findall( "page" )
+                # Multiple help page case
+                if help_pages:
+                    for help_page in help_pages:
+                        self.__help_by_page.append( help_page.text )
+                        help_footer = help_footer + help_page.tail
+                # Each page has to rendered all-together because of backreferences allowed by rst
+                try:
+                    self.__help_by_page = [ Template( rst_to_html( help_header + x + help_footer ),
+                                                      input_encoding='utf-8', output_encoding='utf-8',
+                                                      default_filters=[ 'decode.utf8' ],
+                                                      encoding_errors='replace' )
+                                            for x in self.__help_by_page ]
+                except:
+                    log.exception( "error in multi-page help for tool %s" % self.name )
+        # Pad out help pages to match npages ... could this be done better?
+        while len( self.__help_by_page ) < self.npages:
+            self.__help_by_page.append( self.__help )
+
+    def find_output_def( self, name ):
+        # name is JobToOutputDatasetAssociation name.
+        # TODO: to defensive, just throw IndexError and catch somewhere
+        # up that stack.
+        if ToolOutputCollectionPart.is_named_collection_part_name( name ):
+            collection_name, part = ToolOutputCollectionPart.split_output_name( name )
+            collection_def = self.output_collections.get( collection_name, None )
+            if not collection_def:
+                return None
+            return collection_def.outputs.get( part, None )
+        else:
+            return self.outputs.get( name, None )
+
+    def check_workflow_compatible( self, tool_source ):
+        """
+        Determine if a tool can be used in workflows. External tools and the
+        upload tool are currently not supported by workflows.
+        """
+        # Multiple page tools are not supported -- we're eliminating most
+        # of these anyway
+        if self.has_multiple_pages:
+            return False
+        # This is probably the best bet for detecting external web tools
+        # right now
+        if self.tool_type.startswith( 'data_source' ):
+            return False
+
+        if self.produces_collections_of_unknown_type:
+            # Getting there...
+            return False
+
+        if hasattr( tool_source, "root"):
+            root = tool_source.root
+            if not string_as_bool( root.get( "workflow_compatible", "True" ) ):
+                return False
+
+        # TODO: Anyway to capture tools that dynamically change their own
+        #       outputs?
+        return True
+
+    def new_state( self, trans ):
+        """
+        Create a new `DefaultToolState` for this tool. It will be initialized
+        with default values for inputs.
+        """
+        state = DefaultToolState()
+        state.inputs = {}
+        self.fill_in_new_state( trans, self.inputs, state.inputs )
+        return state
+
+    def fill_in_new_state( self, trans, inputs, state, context=None ):
+        """
+        Fill in a tool state dictionary with default values for all parameters
+        in the dictionary `inputs`. Grouping elements are filled in recursively.
+        """
+        context = ExpressionContext( state, context )
+        for input in inputs.itervalues():
+            state[ input.name ] = input.get_initial_value( trans, context )
+
+    def get_param( self, key ):
+        """
+        Returns the parameter named `key` or None if there is no such
+        parameter.
+        """
+        return self.inputs.get( key, None )
+
+    def get_hook(self, name):
+        """
+        Returns an object from the code file referenced by `code_namespace`
+        (this will normally be a callable object)
+        """
+        if self.code_namespace:
+            # Try to look up hook in self.hook_map, otherwise resort to default
+            if name in self.hook_map and self.hook_map[name] in self.code_namespace:
+                return self.code_namespace[self.hook_map[name]]
+            elif name in self.code_namespace:
+                return self.code_namespace[name]
+        return None
+
+    def visit_inputs( self, values, callback ):
+        """
+        Call the function `callback` on each parameter of this tool. Visits
+        grouping parameters recursively and constructs unique prefixes for
+        each nested set of  The callback method is then called as:
+
+        `callback( level_prefix, parameter, parameter_value )`
+        """
+        # HACK: Yet another hack around check_values -- WHY HERE?
+        if self.check_values:
+            visit_input_values( self.inputs, values, callback )
+
+    def handle_input( self, trans, incoming, history=None ):
+        """
+        Process incoming parameters for this tool from the dict `incoming`,
+        update the tool state (or create if none existed), and either return
+        to the form or execute the tool (only if 'execute' was clicked and
+        there were no errors).
+        """
+        request_context = WorkRequestContext( app=trans.app, user=trans.user, history=history or trans.history )
+        rerun_remap_job_id = None
+        if 'rerun_remap_job_id' in incoming:
+            try:
+                rerun_remap_job_id = trans.app.security.decode_id( incoming[ 'rerun_remap_job_id' ] )
+            except Exception as exception:
+                log.error( str( exception ) )
+                raise exceptions.MessageException( 'Failure executing tool (attempting to rerun invalid job).' )
+
+        # Fixed set of input parameters may correspond to any number of jobs.
+        # Expand these out to individual parameters for given jobs (tool executions).
+        expanded_incomings, collection_info = expand_meta_parameters( trans, self, incoming )
+        if not expanded_incomings:
+            raise exceptions.MessageException( 'Tool execution failed, trying to run a tool over an empty collection.' )
+
+        # Remapping a single job to many jobs doesn't make sense, so disable
+        # remap if multi-runs of tools are being used.
+        if rerun_remap_job_id and len( expanded_incomings ) > 1:
+            raise exceptions.MessageException( 'Failure executing tool (cannot create multiple jobs when remapping existing job).' )
+
+        # Process incoming data
+        validation_timer = ExecutionTimer()
+        all_errors = []
+        all_params = []
+        for expanded_incoming in expanded_incomings:
+            params = {}
+            errors = {}
+            if self.input_translator:
+                self.input_translator.translate( expanded_incoming )
+            if not self.check_values:
+                # If `self.check_values` is false we don't do any checking or
+                # processing on input  This is used to pass raw values
+                # through to/from external sites.
+                params = expanded_incoming
+            else:
+                # Update state for all inputs on the current page taking new
+                # values from `incoming`.
+                self.populate_state( request_context, self.inputs, expanded_incoming, params, errors )
+
+                # If the tool provides a `validate_input` hook, call it.
+                validate_input = self.get_hook( 'validate_input' )
+                if validate_input:
+                    validate_input( request_context, errors, params, self.inputs )
+            all_errors.append( errors )
+            all_params.append( params )
+        log.debug( 'Validated and populated state for tool request %s' % validation_timer )
+        # If there were errors, we stay on the same page and display them
+        if any( all_errors ):
+            err_data = { key: value for d in all_errors for ( key, value ) in d.iteritems() }
+            raise exceptions.MessageException( ', '.join( [ msg for msg in err_data.itervalues() ] ), err_data=err_data )
+        else:
+            execution_tracker = execute_job( trans, self, all_params, history=request_context.history, rerun_remap_job_id=rerun_remap_job_id, collection_info=collection_info )
+            if execution_tracker.successful_jobs:
+                return dict( out_data=execution_tracker.output_datasets,
+                             num_jobs=len( execution_tracker.successful_jobs ),
+                             job_errors=execution_tracker.execution_errors,
+                             jobs=execution_tracker.successful_jobs,
+                             output_collections=execution_tracker.output_collections,
+                             implicit_collections=execution_tracker.implicit_collections )
+            else:
+                raise exceptions.MessageException( execution_tracker.execution_errors[ 0 ] )
+
+    def handle_single_execution( self, trans, rerun_remap_job_id, params, history, mapping_over_collection, execution_cache=None ):
+        """
+        Return a pair with whether execution is successful as well as either
+        resulting output data or an error message indicating the problem.
+        """
+        try:
+            job, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id, mapping_over_collection=mapping_over_collection, execution_cache=execution_cache )
+        except httpexceptions.HTTPFound as e:
+            # if it's a paste redirect exception, pass it up the stack
+            raise e
+        except ToolInputsNotReadyException as e:
+            return False, e
+        except Exception as e:
+            log.exception('Exception caught while attempting tool execution:')
+            message = 'Error executing tool: %s' % str(e)
+            return False, message
+        if isinstance( out_data, odict ):
+            return job, out_data.items()
+        else:
+            if isinstance( out_data, string_types ):
+                message = out_data
+            else:
+                message = 'Failure executing tool (invalid data returned from tool execution)'
+            return False, message
+
+    def find_fieldstorage( self, x ):
+        if isinstance( x, FieldStorage ):
+            raise InterruptedUpload( None )
+        elif isinstance(x, dict):
+            [ self.find_fieldstorage( y ) for y in x.values() ]
+        elif isinstance(x, list):
+            [ self.find_fieldstorage( y ) for y in x ]
+
+    @property
+    def params_with_missing_data_table_entry( self ):
+        """
+        Return all parameters that are dynamically generated select lists whose
+        options require an entry not currently in the tool_data_table_conf.xml file.
+        """
+        params = []
+        for input_param in self.input_params:
+            if isinstance( input_param, SelectToolParameter ) and input_param.is_dynamic:
+                options = input_param.options
+                if options and options.missing_tool_data_table_name and input_param not in params:
+                    params.append( input_param )
+        return params
+
+    @property
+    def params_with_missing_index_file( self ):
+        """
+        Return all parameters that are dynamically generated
+        select lists whose options refer to a  missing .loc file.
+        """
+        params = []
+        for input_param in self.input_params:
+            if isinstance( input_param, SelectToolParameter ) and input_param.is_dynamic:
+                options = input_param.options
+                if options and options.missing_index_file and input_param not in params:
+                    params.append( input_param )
+        return params
+
+    def get_static_param_values( self, trans ):
+        """
+        Returns a map of parameter names and values if the tool does not
+        require any user input. Will raise an exception if any parameter
+        does require input.
+        """
+        args = dict()
+        for key, param in self.inputs.iteritems():
+            # BaseURLToolParameter is now a subclass of HiddenToolParameter, so
+            # we must check if param is a BaseURLToolParameter first
+            if isinstance( param, BaseURLToolParameter ):
+                args[key] = param.get_initial_value( trans, None )
+            elif isinstance( param, HiddenToolParameter ):
+                args[key] = model.User.expand_user_properties( trans.user, param.value )
+            else:
+                raise Exception( "Unexpected parameter type" )
+        return args
+
+    def execute( self, trans, incoming={}, set_output_hid=True, history=None, **kwargs ):
+        """
+        Execute the tool using parameter values in `incoming`. This just
+        dispatches to the `ToolAction` instance specified by
+        `self.tool_action`. In general this will create a `Job` that
+        when run will build the tool's outputs, e.g. `DefaultToolAction`.
+        """
+        return self.tool_action.execute( self, trans, incoming=incoming, set_output_hid=set_output_hid, history=history, **kwargs )
+
+    def params_to_strings( self, params, app ):
+        return params_to_strings( self.inputs, params, app )
+
+    def params_from_strings( self, params, app, ignore_errors=False ):
+        return params_from_strings( self.inputs, params, app, ignore_errors )
+
+    def check_and_update_param_values( self, values, trans, update_values=True, workflow_building_mode=False ):
+        """
+        Check that all parameters have values, and fill in with default
+        values where necessary. This could be called after loading values
+        from a database in case new parameters have been added.
+        """
+        messages = {}
+        request_context = WorkRequestContext( app=trans.app, user=trans.user, history=trans.history, workflow_building_mode=workflow_building_mode )
+
+        def validate_inputs( input, value, error, parent, context, prefixed_name, prefixed_label, **kwargs ):
+            if not error:
+                value, error = check_param( request_context, input, value, context )
+            if error:
+                if update_values:
+                    try:
+                        previous_value = value
+                        value = input.get_initial_value( request_context, context )
+                        if not prefixed_name.startswith( '__' ):
+                            messages[ prefixed_name ] = error if previous_value == value else '%s Using default: \'%s\'.' % ( error, value )
+                        parent[ input.name ] = value
+                    except:
+                        messages[ prefixed_name ] = 'Attempt to replace invalid value for \'%s\' failed.' % ( prefixed_label )
+                else:
+                    messages[ prefixed_name ] = error
+
+        visit_input_values( self.inputs, values, validate_inputs )
+        return messages
+
+    def build_dependency_cache(self, **kwds):
+        if isinstance(self.app.toolbox.dependency_manager, CachedDependencyManager):
+            self.app.toolbox.dependency_manager.build_cache(
+                requirements=self.requirements,
+                installed_tool_dependencies=self.installed_tool_dependencies,
+                tool_dir=self.tool_dir,
+                job_directory=None,
+                metadata=False,
+                tool_instance=self,
+                **kwds
+            )
+
+    def build_dependency_shell_commands( self, job_directory=None, metadata=False ):
+        """
+        Return a list of commands to be run to populate the current environment to include this tools requirements.
+        """
+        return self.app.toolbox.dependency_manager.dependency_shell_commands(
+            requirements=self.requirements,
+            installed_tool_dependencies=self.installed_tool_dependencies,
+            tool_dir=self.tool_dir,
+            job_directory=job_directory,
+            metadata=metadata,
+            tool_instance=self
+        )
+
+    @property
+    def installed_tool_dependencies(self):
+        if self.tool_shed_repository:
+            installed_tool_dependencies = self.tool_shed_repository.tool_dependencies_installed_or_in_error
+        else:
+            installed_tool_dependencies = None
+        return installed_tool_dependencies
+
+    @property
+    def tool_requirements(self):
+        """
+        Return all requiremens of type package
+        """
+        reqs = [req.to_dict() for req in self.requirements if req.type == 'package']
+        return reqs
+
+    @property
+    def tool_requirements_status(self):
+        """
+        Return a list of dictionaries for all tool dependencies with their associated status
+        """
+        return self._view.get_requirements_status(self.tool_requirements, self.installed_tool_dependencies)
+
+    def build_redirect_url_params( self, param_dict ):
+        """
+        Substitute parameter values into self.redirect_url_params
+        """
+        if not self.redirect_url_params:
+            return
+        redirect_url_params = None
+        # Substituting parameter values into the url params
+        redirect_url_params = fill_template( self.redirect_url_params, context=param_dict )
+        # Remove newlines
+        redirect_url_params = redirect_url_params.replace( "\n", " " ).replace( "\r", " " )
+        return redirect_url_params
+
+    def parse_redirect_url( self, data, param_dict ):
+        """
+        Parse the REDIRECT_URL tool param. Tools that send data to an external
+        application via a redirect must include the following 3 tool params:
+
+        1) REDIRECT_URL - the url to which the data is being sent
+
+        2) DATA_URL - the url to which the receiving application will send an
+           http post to retrieve the Galaxy data
+
+        3) GALAXY_URL - the url to which the external application may post
+           data as a response
+        """
+        redirect_url = param_dict.get( 'REDIRECT_URL' )
+        redirect_url_params = self.build_redirect_url_params( param_dict )
+        # Add the parameters to the redirect url.  We're splitting the param
+        # string on '**^**' because the self.parse() method replaced white
+        # space with that separator.
+        params = redirect_url_params.split( '**^**' )
+        rup_dict = {}
+        for param in params:
+            p_list = param.split( '=' )
+            p_name = p_list[0]
+            p_val = p_list[1]
+            rup_dict[ p_name ] = p_val
+        DATA_URL = param_dict.get( 'DATA_URL', None )
+        assert DATA_URL is not None, "DATA_URL parameter missing in tool config."
+        DATA_URL += "/%s/display" % str( data.id )
+        redirect_url += "?DATA_URL=%s" % DATA_URL
+        # Add the redirect_url_params to redirect_url
+        for p_name in rup_dict:
+            redirect_url += "&%s=%s" % ( p_name, rup_dict[ p_name ] )
+        # Add the current user email to redirect_url
+        if data.history.user:
+            USERNAME = str( data.history.user.email )
+        else:
+            USERNAME = 'Anonymous'
+        redirect_url += "&USERNAME=%s" % USERNAME
+        return redirect_url
+
+    def call_hook( self, hook_name, *args, **kwargs ):
+        """
+        Call the custom code hook function identified by 'hook_name' if any,
+        and return the results
+        """
+        try:
+            code = self.get_hook( hook_name )
+            if code:
+                return code( *args, **kwargs )
+        except Exception as e:
+            original_message = ''
+            if len( e.args ):
+                original_message = e.args[0]
+            e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, original_message ), )
+            raise
+
+    def exec_before_job( self, app, inp_data, out_data, param_dict={} ):
+        pass
+
+    def exec_after_process( self, app, inp_data, out_data, param_dict, job=None ):
+        pass
+
+    def job_failed( self, job_wrapper, message, exception=False ):
+        """
+        Called when a job has failed
+        """
+        pass
+
+    def collect_child_datasets( self, output, job_working_directory ):
+        """
+        Look for child dataset files, create HDA and attach to parent.
+        """
+        children = {}
+        # Loop through output file names, looking for generated children in
+        # form of 'child_parentId_designation_visibility_extension'
+        for name, outdata in output.items():
+            filenames = []
+            if 'new_file_path' in self.app.config.collect_outputs_from:
+                filenames.extend( glob.glob(os.path.join(self.app.config.new_file_path, "child_%i_*" % outdata.id) ) )
+            if 'job_working_directory' in self.app.config.collect_outputs_from:
+                filenames.extend( glob.glob(os.path.join(job_working_directory, "child_%i_*" % outdata.id) ) )
+            for filename in filenames:
+                if name not in children:
+                    children[name] = {}
+                fields = os.path.basename(filename).split("_")
+                designation = fields[2]
+                visible = fields[3].lower()
+                if visible == "visible":
+                    visible = True
+                else:
+                    visible = False
+                ext = fields[4].lower()
+                child_dataset = self.app.model.HistoryDatasetAssociation( extension=ext,
+                                                                          parent_id=outdata.id,
+                                                                          designation=designation,
+                                                                          visible=visible,
+                                                                          dbkey=outdata.dbkey,
+                                                                          create_dataset=True,
+                                                                          sa_session=self.sa_session )
+                self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
+                # Move data from temp location to dataset location
+                self.app.object_store.update_from_file(child_dataset.dataset, file_name=filename, create=True)
+                self.sa_session.add( child_dataset )
+                self.sa_session.flush()
+                child_dataset.set_size()
+                child_dataset.name = "Secondary Dataset (%s)" % ( designation )
+                child_dataset.init_meta()
+                child_dataset.set_meta()
+                child_dataset.set_peek()
+                # Associate new dataset with job
+                job = None
+                for assoc in outdata.creating_job_associations:
+                    job = assoc.job
+                    break
+                if job:
+                    assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s|%s__' % ( name, designation ), child_dataset )
+                    assoc.job = job
+                    self.sa_session.add( assoc )
+                    self.sa_session.flush()
+                child_dataset.state = outdata.state
+                self.sa_session.add( child_dataset )
+                self.sa_session.flush()
+                # Add child to return dict
+                children[name][designation] = child_dataset
+                # Need to update all associated output hdas, i.e. history was
+                # shared with job running
+                for dataset in outdata.dataset.history_associations:
+                    if outdata == dataset:
+                        continue
+                    # Create new child dataset
+                    child_data = child_dataset.copy( parent_id=dataset.id )
+                    self.sa_session.add( child_data )
+                    self.sa_session.flush()
+        return children
+
+    def collect_primary_datasets( self, output, job_working_directory, input_ext, input_dbkey="?" ):
+        """
+        Find any additional datasets generated by a tool and attach (for
+        cases where number of outputs is not known in advance).
+        """
+        return output_collect.collect_primary_datasets( self, output, job_working_directory, input_ext, input_dbkey=input_dbkey )
+
+    def collect_dynamic_collections( self, output, **kwds ):
+        """ Find files corresponding to dynamically structured collections.
+        """
+        return output_collect.collect_dynamic_collections( self, output, **kwds )
+
+    def to_archive(self):
+        tool = self
+        tarball_files = []
+        temp_files = []
+        tool_xml = open( os.path.abspath( tool.config_file ), 'r' ).read()
+        # Retrieve tool help images and rewrite the tool's xml into a temporary file with the path
+        # modified to be relative to the repository root.
+        image_found = False
+        if tool.help is not None:
+            tool_help = tool.help._source
+            # Check each line of the rendered tool help for an image tag that points to a location under static/
+            for help_line in tool_help.split( '\n' ):
+                image_regex = re.compile( 'img alt="[^"]+" src="\${static_path}/([^"]+)"' )
+                matches = re.search( image_regex, help_line )
+                if matches is not None:
+                    tool_help_image = matches.group(1)
+                    tarball_path = tool_help_image
+                    filesystem_path = os.path.abspath( os.path.join( self.app.config.root, 'static', tool_help_image ) )
+                    if os.path.exists( filesystem_path ):
+                        tarball_files.append( ( filesystem_path, tarball_path ) )
+                        image_found = True
+                        tool_xml = tool_xml.replace( '${static_path}/%s' % tarball_path, tarball_path )
+        # If one or more tool help images were found, add the modified tool XML to the tarball instead of the original.
+        if image_found:
+            fd, new_tool_config = tempfile.mkstemp( suffix='.xml' )
+            os.close( fd )
+            open( new_tool_config, 'w' ).write( tool_xml )
+            tool_tup = ( os.path.abspath( new_tool_config ), os.path.split( tool.config_file )[-1]  )
+            temp_files.append( os.path.abspath( new_tool_config ) )
+        else:
+            tool_tup = ( os.path.abspath( tool.config_file ), os.path.split( tool.config_file )[-1]  )
+        tarball_files.append( tool_tup )
+        # TODO: This feels hacky.
+        tool_command = tool.command.strip().split()[0]
+        tool_path = os.path.dirname( os.path.abspath( tool.config_file ) )
+        # Add the tool XML to the tuple that will be used to populate the tarball.
+        if os.path.exists( os.path.join( tool_path, tool_command ) ):
+            tarball_files.append( ( os.path.join( tool_path, tool_command ), tool_command ) )
+        # Find and add macros and code files.
+        for external_file in tool.get_externally_referenced_paths( os.path.abspath( tool.config_file ) ):
+            external_file_abspath = os.path.abspath( os.path.join( tool_path, external_file ) )
+            tarball_files.append( ( external_file_abspath, external_file ) )
+        if os.path.exists( os.path.join( tool_path, "Dockerfile" ) ):
+            tarball_files.append( ( os.path.join( tool_path, "Dockerfile" ), "Dockerfile" ) )
+        # Find tests, and check them for test data.
+        tests = tool.tests
+        if tests is not None:
+            for test in tests:
+                # Add input file tuples to the list.
+                for input in test.inputs:
+                    for input_value in test.inputs[ input ]:
+                        input_filename = str( input_value )
+                        input_path = os.path.abspath( os.path.join( 'test-data', input_filename ) )
+                        if os.path.exists( input_path ):
+                            td_tup = ( input_path, os.path.join( 'test-data', input_filename ) )
+                            tarball_files.append( td_tup )
+                # And add output file tuples to the list.
+                for label, filename, _ in test.outputs:
+                    output_filepath = os.path.abspath( os.path.join( 'test-data', filename ) )
+                    if os.path.exists( output_filepath ):
+                        td_tup = ( output_filepath, os.path.join( 'test-data', filename ) )
+                        tarball_files.append( td_tup )
+        for param in tool.input_params:
+            # Check for tool data table definitions.
+            if hasattr( param, 'options' ):
+                if hasattr( param.options, 'tool_data_table' ):
+                    data_table = param.options.tool_data_table
+                    if hasattr( data_table, 'filenames' ):
+                        data_table_definitions = []
+                        for data_table_filename in data_table.filenames:
+                            # FIXME: from_shed_config seems to always be False.
+                            if not data_table.filenames[ data_table_filename ][ 'from_shed_config' ]:
+                                tar_file = data_table.filenames[ data_table_filename ][ 'filename' ] + '.sample'
+                                sample_file = os.path.join( data_table.filenames[ data_table_filename ][ 'tool_data_path' ],
+                                                            tar_file )
+                                # Use the .sample file, if one exists. If not, skip this data table.
+                                if os.path.exists( sample_file ):
+                                    tarfile_path, tarfile_name = os.path.split( tar_file )
+                                    tarfile_path = os.path.join( 'tool-data', tarfile_name )
+                                    tarball_files.append( ( sample_file, tarfile_path ) )
+                                data_table_definitions.append( data_table.xml_string )
+                        if len( data_table_definitions ) > 0:
+                            # Put the data table definition XML in a temporary file.
+                            table_definition = '<?xml version="1.0" encoding="utf-8"?>\n<tables>\n    %s</tables>'
+                            table_definition = table_definition % '\n'.join( data_table_definitions )
+                            fd, table_conf = tempfile.mkstemp()
+                            os.close( fd )
+                            open( table_conf, 'w' ).write( table_definition )
+                            tarball_files.append( ( table_conf, os.path.join( 'tool-data', 'tool_data_table_conf.xml.sample' ) ) )
+                            temp_files.append( table_conf )
+        # Create the tarball.
+        fd, tarball_archive = tempfile.mkstemp( suffix='.tgz' )
+        os.close( fd )
+        tarball = tarfile.open( name=tarball_archive, mode='w:gz' )
+        # Add the files from the previously generated list.
+        for fspath, tarpath in tarball_files:
+            tarball.add( fspath, arcname=tarpath )
+        tarball.close()
+        # Delete any temporary files that were generated.
+        for temp_file in temp_files:
+            os.remove( temp_file )
+        return tarball_archive
+
+    def to_dict( self, trans, link_details=False, io_details=False ):
+        """ Returns dict of tool. """
+
+        # Basic information
+        tool_dict = super( Tool, self ).to_dict()
+
+        tool_dict["edam_operations"] = self.edam_operations
+        tool_dict["edam_topics"] = self.edam_topics
+
+        # Fill in ToolShedRepository info
+        if hasattr(self, 'tool_shed') and self.tool_shed:
+            tool_dict['tool_shed_repository'] = {
+                'name': self.repository_name,
+                'owner': self.repository_owner,
+                'changeset_revision': self.changeset_revision,
+                'tool_shed': self.tool_shed
+            }
+
+        # If an admin user, expose the path to the actual tool config XML file.
+        if trans.user_is_admin():
+            tool_dict[ 'config_file' ] = os.path.abspath( self.config_file )
+
+        # Add link details.
+        if link_details:
+            # Add details for creating a hyperlink to the tool.
+            if not isinstance( self, DataSourceTool ):
+                link = url_for( controller='tool_runner', tool_id=self.id )
+            else:
+                link = url_for( controller='tool_runner', action='data_source_redirect', tool_id=self.id )
+
+            # Basic information
+            tool_dict.update( { 'link': link,
+                                'min_width': self.uihints.get( 'minwidth', -1 ),
+                                'target': self.target } )
+
+        # Add input and output details.
+        if io_details:
+            tool_dict[ 'inputs' ] = [ input.to_dict( trans ) for input in self.inputs.values() ]
+            tool_dict[ 'outputs' ] = [ output.to_dict( app=self.app ) for output in self.outputs.values() ]
+
+        tool_dict[ 'panel_section_id' ], tool_dict[ 'panel_section_name' ] = self.get_panel_section()
+
+        tool_class = self.__class__
+        regular_form = tool_class == Tool or isinstance(self, DatabaseOperationTool)
+        tool_dict["form_style"] = "regular" if regular_form else "special"
+
+        return tool_dict
+
+    def to_json( self, trans, kwd={}, job=None, workflow_building_mode=False ):
+        """
+        Recursively creates a tool dictionary containing repeats, dynamic options and updated states.
+        """
+        history_id = kwd.get( 'history_id', None )
+        history = None
+        try:
+            if history_id is not None:
+                history = self.history_manager.get_owned( trans.security.decode_id( history_id ), trans.user, current_history=trans.history )
+            else:
+                history = trans.get_history()
+            if history is None and job is not None:
+                history = self.history_manager.get_owned( job.history.id, trans.user, current_history=trans.history )
+            if history is None:
+                raise exceptions.MessageException( 'History unavailable. Please specify a valid history id' )
+        except Exception as e:
+            raise exceptions.MessageException( '[history_id=%s] Failed to retrieve history. %s.' % ( history_id, str( e ) ) )
+
+        # build request context
+        request_context = WorkRequestContext( app=trans.app, user=trans.user, history=history, workflow_building_mode=workflow_building_mode )
+
+        # load job parameters into incoming
+        tool_message = ''
+        tool_warnings = ''
+        if job:
+            try:
+                job_params = job.get_param_values( self.app, ignore_errors=True )
+                tool_warnings = self.check_and_update_param_values( job_params, request_context, update_values=False )
+                self._map_source_to_history( request_context, self.inputs, job_params )
+                tool_message = self._compare_tool_version( job )
+                params_to_incoming( kwd, self.inputs, job_params, self.app )
+            except Exception as e:
+                raise exceptions.MessageException( str( e ) )
+
+        # create parameter object
+        params = galaxy.util.Params( kwd, sanitize=False )
+
+        # populates model from state
+        def populate_model( inputs, state_inputs, group_inputs, other_values=None ):
+            other_values = ExpressionContext( state_inputs, other_values )
+            for input_index, input in enumerate( inputs.itervalues() ):
+                tool_dict = None
+                group_state = state_inputs.get( input.name, {} )
+                if input.type == 'repeat':
+                    tool_dict = input.to_dict( request_context )
+                    group_cache = tool_dict[ 'cache' ] = {}
+                    for i in range( len( group_state ) ):
+                        group_cache[ i ] = {}
+                        populate_model( input.inputs, group_state[ i ], group_cache[ i ], other_values )
+                elif input.type == 'conditional':
+                    tool_dict = input.to_dict( request_context )
+                    if 'test_param' in tool_dict:
+                        test_param = tool_dict[ 'test_param' ]
+                        test_param[ 'value' ] = input.test_param.value_to_basic( group_state.get( test_param[ 'name' ], input.test_param.get_initial_value( request_context, other_values ) ), self.app )
+                        test_param[ 'text_value' ] = input.test_param.value_to_display_text( test_param[ 'value' ], self.app )
+                        for i in range( len( tool_dict['cases'] ) ):
+                            current_state = {}
+                            if i == group_state.get( '__current_case__' ):
+                                current_state = group_state
+                            populate_model( input.cases[ i ].inputs, current_state, tool_dict[ 'cases' ][ i ][ 'inputs' ], other_values )
+                elif input.type == 'section':
+                    tool_dict = input.to_dict( request_context )
+                    populate_model( input.inputs, group_state, tool_dict[ 'inputs' ], other_values )
+                else:
+                    try:
+                        tool_dict = input.to_dict( request_context, other_values=other_values )
+                        tool_dict[ 'value' ] = input.value_to_basic( state_inputs.get( input.name, input.get_initial_value( request_context, other_values ) ), self.app, use_security=True )
+                        tool_dict[ 'text_value' ] = input.value_to_display_text( tool_dict[ 'value' ], self.app )
+                    except Exception as e:
+                        tool_dict = input.to_dict( request_context )
+                        log.exception('tools::to_json() - Skipping parameter expansion \'%s\': %s.' % ( input.name, e ) )
+                        pass
+                group_inputs[ input_index ] = tool_dict
+
+        # expand incoming parameters (parameters might trigger multiple tool executions,
+        # here we select the first execution only in order to resolve dynamic parameters)
+        expanded_incomings, _ = expand_meta_parameters( trans, self, params.__dict__ )
+        if expanded_incomings:
+            params.__dict__ = expanded_incomings[ 0 ]
+
+        # do param translation here, used by datasource tools
+        if self.input_translator:
+            self.input_translator.translate( params )
+
+        # create tool state
+        state_inputs = {}
+        state_errors = {}
+        self.populate_state( request_context, self.inputs, params.__dict__, state_inputs, state_errors )
+
+        # create tool model
+        tool_model = self.to_dict( request_context )
+        tool_model[ 'inputs' ] = {}
+        populate_model( self.inputs, state_inputs, tool_model[ 'inputs' ] )
+
+        # create tool help
+        tool_help = ''
+        if self.help:
+            tool_help = self.help.render( static_path=url_for( '/static' ), host_url=url_for( '/', qualified=True ) )
+            tool_help = unicodify( tool_help, 'utf-8' )
+
+        # create tool versions
+        tool_versions = []
+        tools = self.app.toolbox.get_loaded_tools_by_lineage( self.id )
+        for t in tools:
+            if t.version not in tool_versions:
+                tool_versions.append( t.version )
+
+        # update tool model
+        tool_model.update({
+            'id'            : self.id,
+            'help'          : tool_help,
+            'citations'     : bool( self.citations ),
+            'biostar_url'   : self.app.config.biostar_url,
+            'sharable_url'  : self.tool_shed_repository.get_sharable_url( self.app ) if self.tool_shed_repository else None,
+            'message'       : tool_message,
+            'warnings'      : tool_warnings,
+            'versions'      : tool_versions,
+            'requirements'  : [ { 'name' : r.name, 'version' : r.version } for r in self.requirements ],
+            'errors'        : state_errors,
+            'state_inputs'  : params_to_strings( self.inputs, state_inputs, self.app ),
+            'job_id'        : trans.security.encode_id( job.id ) if job else None,
+            'job_remap'     : self._get_job_remap( job ),
+            'history_id'    : trans.security.encode_id( history.id ),
+            'display'       : self.display_interface,
+            'action'        : url_for( self.action ),
+            'method'        : self.method,
+            'enctype'       : self.enctype
+        })
+        return tool_model
+
+    # populates state from incoming parameters
+    def populate_state( self, request_context, inputs, incoming, state, errors={}, prefix='', context=None ):
+        context = ExpressionContext( state, context )
+        for input in inputs.itervalues():
+            state[ input.name ] = input.get_initial_value( request_context, context )
+            key = prefix + input.name
+            group_state = state[ input.name ]
+            group_prefix = '%s|' % ( key )
+            if input.type == 'repeat':
+                rep_index = 0
+                del group_state[:]
+                while True:
+                    rep_prefix = '%s_%d' % ( key, rep_index )
+                    if not any( [ incoming_key.startswith( rep_prefix ) for incoming_key in incoming.keys() ] ) and rep_index >= input.min:
+                        break
+                    if rep_index < input.max:
+                        new_state = { '__index__' : rep_index }
+                        group_state.append( new_state )
+                        self.populate_state( request_context, input.inputs, incoming, new_state, errors, prefix=rep_prefix + '|', context=context )
+                    rep_index += 1
+            elif input.type == 'conditional':
+                if input.value_ref and not input.value_ref_in_group:
+                    test_param_key = prefix + input.test_param.name
+                else:
+                    test_param_key = group_prefix + input.test_param.name
+                test_param_value = incoming.get( test_param_key, group_state.get( input.test_param.name ) )
+                value, error = check_param( request_context, input.test_param, test_param_value, context )
+                if error:
+                    errors[ test_param_key ] = error
+                else:
+                    try:
+                        current_case = input.get_current_case( value )
+                        group_state = state[ input.name ] = {}
+                        self.populate_state( request_context, input.cases[ current_case ].inputs, incoming, group_state, errors, prefix=group_prefix, context=context )
+                        group_state[ '__current_case__' ] = current_case
+                    except Exception:
+                        errors[ test_param_key ] = 'The selected case is unavailable/invalid.'
+                        pass
+                group_state[ input.test_param.name ] = value
+            elif input.type == 'section':
+                self.populate_state( request_context, input.inputs, incoming, group_state, errors, prefix=group_prefix, context=context )
+            elif input.type == 'upload_dataset':
+                d_type = input.get_datatype( request_context, context=context )
+                writable_files = d_type.writable_files
+                while len( group_state ) > len( writable_files ):
+                    del group_state[ -1 ]
+                while len( writable_files ) > len( group_state ):
+                    new_state = { '__index__' : len( group_state ) }
+                    for upload_item in input.inputs.itervalues():
+                        new_state[ upload_item.name ] = upload_item.get_initial_value( request_context, context )
+                    group_state.append( new_state )
+                for i, rep_state in enumerate( group_state ):
+                    rep_index = rep_state[ '__index__' ]
+                    rep_prefix = '%s_%d|' % ( key, rep_index )
+                    self.populate_state( request_context, input.inputs, incoming, rep_state, errors, prefix=rep_prefix, context=context )
+            else:
+                param_value = self._get_incoming_value( incoming, key, state.get( input.name ) )
+                value, error = check_param( request_context, input, param_value, context )
+                if error:
+                    errors[ key ] = error
+                state[ input.name ] = value
+
+    def _get_incoming_value( self, incoming, key, default ):
+        """
+        Fetch value from incoming dict directly or check special nginx upload
+        created variants of this key.
+        """
+        if '__' + key + '__is_composite' in incoming:
+            composite_keys = incoming[ '__' + key + '__keys' ].split()
+            value = dict()
+            for composite_key in composite_keys:
+                value[ composite_key ] = incoming[ key + '_' + composite_key ]
+            return value
+        else:
+            return incoming.get( key, default )
+
+    def _get_job_remap( self, job):
+        if job:
+            if job.state == job.states.ERROR:
+                try:
+                    if [ hda.dependent_jobs for hda in [ jtod.dataset for jtod in job.output_datasets ] if hda.dependent_jobs ]:
+                        return True
+                except Exception as exception:
+                    log.error( str( exception ) )
+                    pass
+        return False
+
+    def _map_source_to_history( self, trans, tool_inputs, params ):
+        # Need to remap dataset parameters. Job parameters point to original
+        # dataset used; parameter should be the analygous dataset in the
+        # current history.
+        history = trans.history
+
+        # Create index for hdas.
+        hda_source_dict = {}
+        for hda in history.datasets:
+            key = '%s_%s' % ( hda.hid, hda.dataset.id )
+            hda_source_dict[ hda.dataset.id ] = hda_source_dict[ key ] = hda
+
+        # Ditto for dataset collections.
+        hdca_source_dict = {}
+        for hdca in history.dataset_collections:
+            key = '%s_%s' % ( hdca.hid, hdca.collection.id )
+            hdca_source_dict[ hdca.collection.id ] = hdca_source_dict[ key ] = hdca
+
+        # Map dataset or collection to current history
+        def map_to_history( value ):
+            id = None
+            source = None
+            if isinstance( value, self.app.model.HistoryDatasetAssociation ):
+                id = value.dataset.id
+                source = hda_source_dict
+            elif isinstance( value, self.app.model.HistoryDatasetCollectionAssociation ):
+                id = value.collection.id
+                source = hdca_source_dict
+            else:
+                return None
+            key = '%s_%s' % ( value.hid, id )
+            if key in source:
+                return source[ key ]
+            elif id in source:
+                return source[ id ]
+            else:
+                return None
+
+        def mapping_callback( input, value, **kwargs ):
+            if isinstance( input, DataToolParameter ):
+                if isinstance(value, list):
+                    values = []
+                    for val in value:
+                        new_val = map_to_history( val )
+                        if new_val:
+                            values.append( new_val )
+                        else:
+                            values.append( val )
+                    return values
+                else:
+                    return map_to_history( value )
+            elif isinstance( input, DataCollectionToolParameter ):
+                return map_to_history( value )
+        visit_input_values( tool_inputs, params, mapping_callback )
+
+    def _compare_tool_version( self, job ):
+        """
+        Compares a tool version with the tool version from a job (from ToolRunner).
+        """
+        tool_id = job.tool_id
+        tool_version = job.tool_version
+        message = ''
+        try:
+            select_field, tools, tool = self.app.toolbox.get_tool_components( tool_id, tool_version=tool_version, get_loaded_tools_by_lineage=False, set_selected=True )
+            if tool is None:
+                raise exceptions.MessageException( 'This dataset was created by an obsolete tool (%s). Can\'t re-run.' % tool_id )
+            if ( self.id != tool_id and self.old_id != tool_id ) or self.version != tool_version:
+                if self.id == tool_id:
+                    if tool_version is None:
+                        # for some reason jobs don't always keep track of the tool version.
+                        message = ''
+                    else:
+                        message = 'This job was run with tool version "%s", which is not available.  ' % tool_version
+                        if len( tools ) > 1:
+                            message += 'You can re-run the job with the selected tool or choose another version of the tool.'
+                        else:
+                            message += 'You can re-run the job with this tool version, which is a different version of the original tool.'
+                else:
+                    new_tool_shed_url = '%s/%s/' % ( tool.tool_shed_repository.get_sharable_url( tool.app ), tool.tool_shed_repository.changeset_revision )
+                    old_tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_id.split( '/repos/' )[ 0 ] )
+                    old_tool_shed_url = '%s/view/%s/%s/' % ( old_tool_shed_url, tool.repository_owner, tool.repository_name )
+                    message = 'This job was run with <a href=\"%s\" target=\"_blank\">tool id \"%s\"</a>, version "%s", which is not available.  ' % ( old_tool_shed_url, tool_id, tool_version )
+                    if len( tools ) > 1:
+                        message += 'You can re-run the job with the selected <a href=\"%s\" target=\"_blank\">tool id \"%s\"</a> or choose another derivation of the tool.' % ( new_tool_shed_url, self.id )
+                    else:
+                        message += 'You can re-run the job with <a href=\"%s\" target=\"_blank\">tool id \"%s\"</a>, which is a derivation of the original tool.' % ( new_tool_shed_url, self.id )
+        except Exception as e:
+            raise exceptions.MessageException( str( e ) )
+        return message
+
+    def get_default_history_by_trans( self, trans, create=False ):
+        return trans.get_history( create=create )
+
+    @classmethod
+    def get_externally_referenced_paths( self, path ):
+        """ Return relative paths to externally referenced files by the tool
+        described by file at `path`. External components should not assume things
+        about the structure of tool xml files (this is the tool's responsibility).
+        """
+        tree = raw_tool_xml_tree(path)
+        root = tree.getroot()
+        external_paths = []
+        for code_elem in root.findall( 'code' ):
+            external_path = code_elem.get( 'file' )
+            if external_path:
+                external_paths.append( external_path )
+        external_paths.extend( imported_macro_paths( root ) )
+        # May also need to load external citation files as well at some point.
+        return external_paths
+
+
+class OutputParameterJSONTool( Tool ):
+    """
+    Alternate implementation of Tool that provides parameters and other values
+    JSONified within the contents of an output dataset
+    """
+    tool_type = 'output_parameter_json'
+
+    def _prepare_json_list( self, param_list ):
+        rval = []
+        for value in param_list:
+            if isinstance( value, dict ):
+                rval.append( self._prepare_json_param_dict( value ) )
+            elif isinstance( value, list ):
+                rval.append( self._prepare_json_list( value ) )
+            else:
+                rval.append( str( value ) )
+        return rval
+
+    def _prepare_json_param_dict( self, param_dict ):
+        rval = {}
+        for key, value in param_dict.iteritems():
+            if isinstance( value, dict ):
+                rval[ key ] = self._prepare_json_param_dict( value )
+            elif isinstance( value, list ):
+                rval[ key ] = self._prepare_json_list( value )
+            else:
+                rval[ key ] = str( value )
+        return rval
+
+    def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
+        if param_dict is None:
+            param_dict = {}
+        json_params = {}
+        json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict )  # it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
+        json_params[ 'output_data' ] = []
+        json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=galaxy.jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
+        json_filename = None
+        for i, ( out_name, data ) in enumerate( out_data.iteritems() ):
+            # use wrapped dataset to access certain values
+            wrapped_data = param_dict.get( out_name )
+            # allow multiple files to be created
+            file_name = str( wrapped_data )
+            extra_files_path = str( wrapped_data.files_path )
+            data_dict = dict( out_data_name=out_name,
+                              ext=data.ext,
+                              dataset_id=data.dataset.id,
+                              hda_id=data.id,
+                              file_name=file_name,
+                              extra_files_path=extra_files_path )
+            json_params[ 'output_data' ].append( data_dict )
+            if json_filename is None:
+                json_filename = file_name
+        out = open( json_filename, 'w' )
+        out.write( json.dumps( json_params ) )
+        out.close()
+
+
+class DataSourceTool( OutputParameterJSONTool ):
+    """
+    Alternate implementation of Tool for data_source tools -- those that
+    allow the user to query and extract data from another web site.
+    """
+    tool_type = 'data_source'
+    default_tool_action = DataSourceToolAction
+
+    def _build_GALAXY_URL_parameter( self ):
+        return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=%s" />' % self.id ) )
+
+    def parse_inputs( self, tool_source ):
+        super( DataSourceTool, self ).parse_inputs( tool_source )
+        # Open all data_source tools in _top.
+        self.target = '_top'
+        if 'GALAXY_URL' not in self.inputs:
+            self.inputs[ 'GALAXY_URL' ] = self._build_GALAXY_URL_parameter()
+            self.inputs_by_page[0][ 'GALAXY_URL' ] = self.inputs[ 'GALAXY_URL' ]
+
+    def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
+        if param_dict is None:
+            param_dict = {}
+        dbkey = param_dict.get( 'dbkey' )
+        info = param_dict.get( 'info' )
+        data_type = param_dict.get( 'data_type' )
+        name = param_dict.get( 'name' )
+
+        json_params = {}
+        json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict )  # it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
+        json_params[ 'output_data' ] = []
+        json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=galaxy.jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
+        json_filename = None
+        for i, ( out_name, data ) in enumerate( out_data.iteritems() ):
+            # use wrapped dataset to access certain values
+            wrapped_data = param_dict.get( out_name )
+            # allow multiple files to be created
+            cur_base_param_name = 'GALAXY|%s|' % out_name
+            cur_name = param_dict.get( cur_base_param_name + 'name', name )
+            cur_dbkey = param_dict.get( cur_base_param_name + 'dkey', dbkey )
+            cur_info = param_dict.get( cur_base_param_name + 'info', info )
+            cur_data_type = param_dict.get( cur_base_param_name + 'data_type', data_type )
+            if cur_name:
+                data.name = cur_name
+            if not data.info and cur_info:
+                data.info = cur_info
+            if cur_dbkey:
+                data.dbkey = cur_dbkey
+            if cur_data_type:
+                data.extension = cur_data_type
+            file_name = str( wrapped_data )
+            extra_files_path = str( wrapped_data.files_path )
+            data_dict = dict( out_data_name=out_name,
+                              ext=data.ext,
+                              dataset_id=data.dataset.id,
+                              hda_id=data.id,
+                              file_name=file_name,
+                              extra_files_path=extra_files_path )
+            json_params[ 'output_data' ].append( data_dict )
+            if json_filename is None:
+                json_filename = file_name
+        out = open( json_filename, 'w' )
+        out.write( json.dumps( json_params ) )
+        out.close()
+
+
+class AsyncDataSourceTool( DataSourceTool ):
+    tool_type = 'data_source_async'
+
+    def _build_GALAXY_URL_parameter( self ):
+        return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/async/%s" />' % self.id ) )
+
+
+class DataDestinationTool( Tool ):
+    tool_type = 'data_destination'
+
+
+class SetMetadataTool( Tool ):
+    """
+    Tool implementation for special tool that sets metadata on an existing
+    dataset.
+    """
+    tool_type = 'set_metadata'
+    requires_setting_metadata = False
+
+    def exec_after_process( self, app, inp_data, out_data, param_dict, job=None ):
+        for name, dataset in inp_data.iteritems():
+            external_metadata = JobExternalOutputMetadataWrapper( job )
+            if external_metadata.external_metadata_set_successfully( dataset, app.model.context ):
+                dataset.metadata.from_JSON_dict( external_metadata.get_output_filenames_by_dataset( dataset, app.model.context ).filename_out )
+            else:
+                dataset._state = model.Dataset.states.FAILED_METADATA
+                self.sa_session.add( dataset )
+                self.sa_session.flush()
+                return
+            # If setting external metadata has failed, how can we inform the
+            # user? For now, we'll leave the default metadata and set the state
+            # back to its original.
+            dataset.datatype.after_setting_metadata( dataset )
+            if job and job.tool_id == '1.0.0':
+                dataset.state = param_dict.get( '__ORIGINAL_DATASET_STATE__' )
+            else:
+                # Revert dataset.state to fall back to dataset.dataset.state
+                dataset._state = None
+            # Need to reset the peek, which may rely on metadata
+            dataset.set_peek()
+            self.sa_session.add( dataset )
+            self.sa_session.flush()
+
+    def job_failed( self, job_wrapper, message, exception=False ):
+        job = job_wrapper.sa_session.query( model.Job ).get( job_wrapper.job_id )
+        if job:
+            inp_data = {}
+            for dataset_assoc in job.input_datasets:
+                inp_data[dataset_assoc.name] = dataset_assoc.dataset
+            return self.exec_after_process( job_wrapper.app, inp_data, {}, job_wrapper.get_param_dict(), job=job )
+
+
+class ExportHistoryTool( Tool ):
+    tool_type = 'export_history'
+
+
+class ImportHistoryTool( Tool ):
+    tool_type = 'import_history'
+
+
+class DataManagerTool( OutputParameterJSONTool ):
+    tool_type = 'manage_data'
+    default_tool_action = DataManagerToolAction
+
+    def __init__( self, config_file, root, app, guid=None, data_manager_id=None, **kwds ):
+        self.data_manager_id = data_manager_id
+        super( DataManagerTool, self ).__init__( config_file, root, app, guid=guid, **kwds )
+        if self.data_manager_id is None:
+            self.data_manager_id = self.id
+
+    def exec_after_process( self, app, inp_data, out_data, param_dict, job=None, **kwds ):
+        assert self.allow_user_access( job.user ), "You must be an admin to access this tool."
+        # run original exec_after_process
+        super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job=job, **kwds )
+        # process results of tool
+        if job and job.state == job.states.ERROR:
+            return
+        # Job state may now be 'running' instead of previous 'error', but datasets are still set to e.g. error
+        for dataset in out_data.itervalues():
+            if dataset.state != dataset.states.OK:
+                return
+        data_manager_id = job.data_manager_association.data_manager_id
+        data_manager = self.app.data_managers.get_manager( data_manager_id, None )
+        assert data_manager is not None, "Invalid data manager (%s) requested. It may have been removed before the job completed." % ( data_manager_id )
+        data_manager.process_result( out_data )
+
+    def get_default_history_by_trans( self, trans, create=False ):
+        def _create_data_manager_history( user ):
+            history = trans.app.model.History( name='Data Manager History (automatically created)', user=user )
+            data_manager_association = trans.app.model.DataManagerHistoryAssociation( user=user, history=history )
+            trans.sa_session.add_all( ( history, data_manager_association ) )
+            trans.sa_session.flush()
+            return history
+        user = trans.user
+        assert user, 'You must be logged in to use this tool.'
+        assert self.allow_user_access( user ), "You must be an admin to access this tool."
+        history = user.data_manager_histories
+        if not history:
+            # create
+            if create:
+                history = _create_data_manager_history( user )
+            else:
+                history = None
+        else:
+            for history in reversed( history ):
+                history = history.history
+                if not history.deleted:
+                    break
+            if history.deleted:
+                if create:
+                    history = _create_data_manager_history( user )
+                else:
+                    history = None
+        return history
+
+    def allow_user_access( self, user, attempting_access=True ):
+        """
+        :param user: model object representing user.
+        :type user: galaxy.model.User
+        :param attempting_access: is the user attempting to do something with the
+                               the tool (set false for incidental checks like toolbox
+                               listing)
+        :type attempting_access:  bool
+
+        :returns: bool -- Whether the user is allowed to access the tool.
+        Data Manager tools are only accessible to admins.
+        """
+        if super( DataManagerTool, self ).allow_user_access( user ) and self.app.config.is_admin_user( user ):
+            return True
+        # If this is just an incidental check - do not log the scary message
+        # about users attempting to do something problematic.
+        if attempting_access:
+            if user:
+                user = user.id
+            log.debug( "User (%s) attempted to access a data manager tool (%s), but is not an admin.", user, self.id )
+        return False
+
+
+class DatabaseOperationTool( Tool ):
+    default_tool_action = ModelOperationToolAction
+    require_dataset_ok = True
+
+    @property
+    def valid_input_states( self ):
+        if self.require_dataset_ok:
+            return (model.Dataset.states.OK,)
+        else:
+            return model.Dataset.terminal_states
+
+    @property
+    def allow_errored_inputs( self ):
+        return not self.require_dataset_ok
+
+    def check_inputs_ready( self, input_datasets, input_dataset_collections ):
+        def check_dataset_instance( input_dataset ):
+            if input_dataset.is_pending:
+                raise ToolInputsNotReadyException()
+
+            if self.require_dataset_ok:
+                if input_dataset.state != input_dataset.dataset.states.OK:
+                    raise ValueError("Tool requires inputs to be in valid state.")
+
+        for input_dataset in input_datasets.values():
+            check_dataset_instance( input_dataset )
+
+        for input_dataset_collection in input_dataset_collections.values():
+            if not input_dataset_collection.collection.populated:
+                raise ToolInputsNotReadyException()
+
+            map( check_dataset_instance, input_dataset_collection.dataset_instances )
+
+    def produce_outputs( self, trans, out_data, output_collections, incoming, history ):
+        return self._outputs_dict()
+
+    def _outputs_dict( self ):
+        return odict()
+
+
+class UnzipCollectionTool( DatabaseOperationTool ):
+    tool_type = 'unzip_collection'
+
+    def produce_outputs( self, trans, out_data, output_collections, incoming, history ):
+        has_collection = incoming[ "input" ]
+        if hasattr(has_collection, "element_type"):
+            # It is a DCE
+            collection = has_collection.element_object
+        else:
+            # It is an HDCA
+            collection = has_collection.collection
+
+        assert collection.collection_type == "paired"
+        forward_o, reverse_o = collection.dataset_instances
+        forward, reverse = forward_o.copy(), reverse_o.copy()
+        # TODO: rename...
+        history.add_dataset( forward, set_hid=True )
+        history.add_dataset( reverse, set_hid=True )
+        out_data["forward"] = forward
+        out_data["reverse"] = reverse
+
+
+class ZipCollectionTool( DatabaseOperationTool ):
+    tool_type = 'zip_collection'
+
+    def produce_outputs( self, trans, out_data, output_collections, incoming, history ):
+        forward_o = incoming[ "input_forward" ]
+        reverse_o = incoming[ "input_reverse" ]
+
+        forward, reverse = forward_o.copy(), reverse_o.copy()
+        new_elements = odict()
+        new_elements["forward"] = forward
+        new_elements["reverse"] = reverse
+
+        output_collections.create_collection(
+            self.outputs.values()[0], "output", elements=new_elements
+        )
+
+
+class MergeCollectionTool( DatabaseOperationTool ):
+    tool_type = 'merge_collection'
+
+    def produce_outputs( self, trans, out_data, output_collections, incoming, history ):
+        input_lists = []
+
+        for incoming_repeat in incoming[ "inputs" ]:
+            input_lists.append(incoming_repeat["input"])
+
+        advanced = incoming.get("advanced", None)
+        dupl_actions = "keep_first"
+        suffix_pattern = None
+        if advanced is not None:
+            dupl_actions = advanced["conflict"]['duplicate_options']
+
+            if dupl_actions in ['suffix_conflict', 'suffix_every', 'suffix_conflict_rest']:
+                suffix_pattern = advanced['conflict']['suffix_pattern']
+
+        new_element_structure = odict()
+
+        # Which inputs does the identifier appear in.
+        identifiers_map = {}
+        for input_num, input_list in enumerate(input_lists):
+            for dce in input_list.collection.elements:
+                    element_identifier = dce.element_identifier
+                    if element_identifier not in identifiers_map:
+                        identifiers_map[element_identifier] = []
+                    elif dupl_actions == "fail":
+                        raise Exception("Duplicate collection element identifiers found for [%s]" % element_identifier)
+                    identifiers_map[element_identifier].append(input_num)
+
+        for copy, input_list in enumerate(input_lists):
+            for dce in input_list.collection.elements:
+                element = dce.element_object
+                valid = False
+
+                # dealing with a single element
+                if hasattr(element, "is_ok"):
+                    if element.is_ok:
+                        valid = True
+                elif hasattr(element, "dataset_instances"):
+                    # we are probably a list:paired dataset, both need to be in non error state
+                    forward_o, reverse_o = element.dataset_instances
+                    if forward_o.is_ok and reverse_o.is_ok:
+                        valid = True
+
+                if valid:
+                    element_identifier = dce.element_identifier
+                    identifier_seen = element_identifier in new_element_structure
+                    appearances = identifiers_map[element_identifier]
+                    add_suffix = False
+                    if dupl_actions == "suffix_every":
+                        add_suffix = True
+                    elif dupl_actions == "suffix_conflict" and len(appearances) > 1:
+                        add_suffix = True
+                    elif dupl_actions == "suffix_conflict_rest" and len(appearances) > 1 and appearances[0] != copy:
+                        add_suffix = True
+
+                    if dupl_actions == "keep_first" and identifier_seen:
+                        continue
+
+                    if add_suffix:
+                        suffix = suffix_pattern.replace("#", str(copy + 1))
+                        effective_identifer = "%s%s" % (element_identifier, suffix)
+                    else:
+                        effective_identifer = element_identifier
+
+                    new_element_structure[effective_identifer] = element
+
+        # Don't copy until we know everything is fine and we have the structure of the list ready to go.
+        new_elements = odict()
+        for key, value in new_element_structure.items():
+            new_elements[key] = value.copy()
+
+        output_collections.create_collection(
+            self.outputs.values()[0], "output", elements=new_elements
+        )
+
+
+class FilterFailedDatasetsTool( DatabaseOperationTool ):
+    tool_type = 'filter_failed_datasets_collection'
+    require_dataset_ok = False
+
+    def produce_outputs( self, trans, out_data, output_collections, incoming, history ):
+        hdca = incoming[ "input" ]
+
+        assert hdca.collection.collection_type == "list" or hdca.collection.collection_type == 'list:paired'
+
+        new_elements = odict()
+
+        for dce in hdca.collection.elements:
+            element = dce.element_object
+
+            valid = False
+
+            # dealing with a single element
+            if hasattr(element, "is_ok"):
+                if element.is_ok:
+                    valid = True
+            elif hasattr(element, "dataset_instances"):
+                # we are probably a list:paired dataset, both need to be in non error state
+                forward_o, reverse_o = element.dataset_instances
+                if forward_o.is_ok and reverse_o.is_ok:
+                    valid = True
+
+            if valid:
+                element_identifier = dce.element_identifier
+                new_elements[element_identifier] = element.copy()
+
+        output_collections.create_collection(
+            self.outputs.values()[0], "output", elements=new_elements
+        )
+
+
+class FlattenTool( DatabaseOperationTool ):
+    tool_type = 'flatten_collection'
+
+    def produce_outputs( self, trans, out_data, output_collections, incoming, history ):
+        hdca = incoming[ "input" ]
+        join_identifier = incoming["join_identifier"]
+        new_elements = odict()
+
+        def add_elements(collection, prefix=""):
+            for dce in collection.elements:
+                dce_object = dce.element_object
+                dce_identifier = dce.element_identifier
+                identifier = "%s%s%s" % (prefix, join_identifier, dce_identifier) if prefix else dce_identifier
+                if dce.is_collection:
+                    add_elements(dce_object, prefix=identifier)
+                else:
+                    new_elements[identifier] = dce_object.copy()
+
+        add_elements(hdca.collection)
+        output_collections.create_collection(
+            self.outputs.values()[0], "output", elements=new_elements
+        )
+
+
+# Populate tool_type to ToolClass mappings
+tool_types = {}
+for tool_class in [ Tool, SetMetadataTool, OutputParameterJSONTool,
+                    DataManagerTool, DataSourceTool, AsyncDataSourceTool,
+                    UnzipCollectionTool, ZipCollectionTool, MergeCollectionTool,
+                    DataDestinationTool ]:
+    tool_types[ tool_class.tool_type ] = tool_class
+
+
+# ---- Utility classes to be factored out -----------------------------------
+class TracksterConfig:
+    """ Trackster configuration encapsulation. """
+
+    def __init__( self, actions ):
+        self.actions = actions
+
+    @staticmethod
+    def parse( root ):
+        actions = []
+        for action_elt in root.findall( "action" ):
+            actions.append( SetParamAction.parse( action_elt ) )
+        return TracksterConfig( actions )
+
+
+class SetParamAction:
+    """ Set parameter action. """
+
+    def __init__( self, name, output_name ):
+        self.name = name
+        self.output_name = output_name
+
+    @staticmethod
+    def parse( elt ):
+        """ Parse action from element. """
+        return SetParamAction( elt.get( "name" ), elt.get( "output_name" ) )
+
+
+class BadValue( object ):
+    def __init__( self, value ):
+        self.value = value
+
+
+class InterruptedUpload( Exception ):
+    pass
diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py
new file mode 100644
index 0000000..e4ff04a
--- /dev/null
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -0,0 +1,821 @@
+import json
+import logging
+import re
+from json import dumps
+
+from six import string_types
+
+from galaxy import model
+from galaxy.exceptions import ObjectInvalid
+from galaxy.model import LibraryDatasetDatasetAssociation
+from galaxy.tools.parameters import update_param
+from galaxy.tools.parameters.basic import DataCollectionToolParameter, DataToolParameter, RuntimeValue
+from galaxy.tools.parameters.wrapped import WrappedParameters
+from galaxy.util import ExecutionTimer
+from galaxy.util.none_like import NoneDataset
+from galaxy.util.odict import odict
+from galaxy.util.template import fill_template
+from galaxy.web import url_for
+
+log = logging.getLogger( __name__ )
+
+
+class ToolExecutionCache( object ):
+    """ An object mean to cache calculation caused by repeatedly evaluting
+    the same tool by the same user with slightly different parameters.
+    """
+    def __init__(self, trans):
+        self.trans = trans
+        self.current_user_roles = trans.get_current_user_roles()
+
+
+class ToolAction( object ):
+    """
+    The actions to be taken when a tool is run (after parameters have
+    been converted and validated).
+    """
+    def execute( self, tool, trans, incoming={}, set_output_hid=True ):
+        raise TypeError("Abstract method")
+
+
+class DefaultToolAction( object ):
+    """Default tool action is to run an external command"""
+
+    def collect_input_datasets( self, tool, param_values, trans, current_user_roles=None ):
+        """
+        Collect any dataset inputs from incoming. Returns a mapping from
+        parameter name to Dataset instance for each tool parameter that is
+        of the DataToolParameter type.
+        """
+        if current_user_roles is None:
+            current_user_roles = trans.get_current_user_roles()
+        input_datasets = odict()
+
+        def visitor( input, value, prefix, parent=None, **kwargs ):
+
+            def process_dataset( data, formats=None ):
+                if not data or isinstance( data, RuntimeValue ):
+                    return None
+                if formats is None:
+                    formats = input.formats
+                if not data.datatype.matches_any( formats ):
+                    # Need to refresh in case this conversion just took place, i.e. input above in tool performed the same conversion
+                    trans.sa_session.refresh( data )
+                    target_ext, converted_dataset = data.find_conversion_destination( formats )
+                    if target_ext:
+                        if converted_dataset:
+                            data = converted_dataset
+                        else:
+                            data = data.get_converted_dataset( trans, target_ext, target_context=parent )
+
+                if not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+                    raise Exception( "User does not have permission to use a dataset (%s) provided for input." % data.id )
+                return data
+            if isinstance( input, DataToolParameter ):
+                if isinstance( value, list ):
+                    # If there are multiple inputs with the same name, they
+                    # are stored as name1, name2, ...
+                    for i, v in enumerate( value ):
+                        processed_dataset = process_dataset( v )
+                        if i == 0:
+                            # Allow copying metadata to output, first item will be source.
+                            input_datasets[ prefix + input.name ] = processed_dataset
+                        input_datasets[ prefix + input.name + str( i + 1 ) ] = processed_dataset
+                        conversions = []
+                        for conversion_name, conversion_extensions, conversion_datatypes in input.conversions:
+                            new_data = process_dataset( input_datasets[ prefix + input.name + str( i + 1 ) ], conversion_datatypes )
+                            if not new_data or new_data.datatype.matches_any( conversion_datatypes ):
+                                input_datasets[ prefix + conversion_name + str( i + 1 ) ] = new_data
+                                conversions.append( ( conversion_name, new_data ) )
+                            else:
+                                raise Exception('A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, conversion_extensions ) )
+                        if parent:
+                            parent[ input.name ][ i ] = input_datasets[ prefix + input.name + str( i + 1 ) ]
+                            for conversion_name, conversion_data in conversions:
+                                # allow explicit conversion to be stored in job_parameter table
+                                parent[ conversion_name ][ i ] = conversion_data.id  # a more robust way to determine JSONable value is desired
+                        else:
+                            param_values[ input.name ][ i ] = input_datasets[ prefix + input.name + str( i + 1 ) ]
+                            for conversion_name, conversion_data in conversions:
+                                # allow explicit conversion to be stored in job_parameter table
+                                param_values[ conversion_name ][i] = conversion_data.id  # a more robust way to determine JSONable value is desired
+                else:
+                    input_datasets[ prefix + input.name ] = process_dataset( value )
+                    conversions = []
+                    for conversion_name, conversion_extensions, conversion_datatypes in input.conversions:
+                        new_data = process_dataset( input_datasets[ prefix + input.name ], conversion_datatypes )
+                        if not new_data or new_data.datatype.matches_any( conversion_datatypes ):
+                            input_datasets[ prefix + conversion_name ] = new_data
+                            conversions.append( ( conversion_name, new_data ) )
+                        else:
+                            raise Exception( 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name ].extension, conversion_extensions ) )
+                    target_dict = parent
+                    if not target_dict:
+                        target_dict = param_values
+                    target_dict[ input.name ] = input_datasets[ prefix + input.name ]
+                    for conversion_name, conversion_data in conversions:
+                        # allow explicit conversion to be stored in job_parameter table
+                        target_dict[ conversion_name ] = conversion_data.id  # a more robust way to determine JSONable value is desired
+            elif isinstance( input, DataCollectionToolParameter ):
+                if not value:
+                    return
+
+                dataset_instances = []
+                if hasattr( value, 'child_collection' ):
+                    # if we are mapping a collection over a tool, we only require the child_collection
+                    dataset_instances = value.child_collection.dataset_instances
+                else:
+                    # else the tool takes a collection as input so we need everything
+                    dataset_instances = value.collection.dataset_instances
+
+                for i, v in enumerate( dataset_instances ):
+                    data = v
+                    if not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+                        raise Exception( "User does not have permission to use a dataset (%s) provided for input." % data.id )
+                    # Skipping implicit conversion stuff for now, revisit at
+                    # some point and figure out if implicitly converting a
+                    # dataset collection makes senese.
+                    input_datasets[ prefix + input.name + str( i + 1 ) ] = data
+
+        tool.visit_inputs( param_values, visitor )
+        return input_datasets
+
+    def collect_input_dataset_collections( self, tool, param_values ):
+        def append_to_key( the_dict, key, value ):
+            if key not in the_dict:
+                the_dict[ key ] = []
+            the_dict[ key ].append( value )
+
+        input_dataset_collections = dict()
+
+        def visitor( input, value, prefix, parent=None, **kwargs ):
+            if isinstance( input, DataToolParameter ):
+                values = value
+                if not isinstance( values, list ):
+                    values = [ value ]
+                for i, value in enumerate(values):
+                    if isinstance( value, model.HistoryDatasetCollectionAssociation ):
+                        append_to_key( input_dataset_collections, prefix + input.name, ( value, True ) )
+                        target_dict = parent
+                        if not target_dict:
+                            target_dict = param_values
+                        # This is just a DataToolParameter, so replace this
+                        # collection with individual datasets. Database will still
+                        # record collection which should be enought for workflow
+                        # extraction and tool rerun.
+                        dataset_instances = value.collection.dataset_instances
+                        if i == 0:
+                            target_dict[ input.name ] = []
+                        target_dict[ input.name ].extend( dataset_instances )
+            elif isinstance( input, DataCollectionToolParameter ):
+                append_to_key( input_dataset_collections, prefix + input.name, ( value, False ) )
+
+        tool.visit_inputs( param_values, visitor )
+        return input_dataset_collections
+
+    def _check_access( self, tool, trans ):
+        assert tool.allow_user_access( trans.user ), "User (%s) is not allowed to access this tool." % ( trans.user )
+
+    def _collect_inputs( self, tool, trans, incoming, history, current_user_roles ):
+        """ Collect history as well as input datasets and collections. """
+        app = trans.app
+        # Set history.
+        if not history:
+            history = tool.get_default_history_by_trans( trans, create=True )
+        if history not in trans.sa_session:
+            history = trans.sa_session.query( app.model.History ).get( history.id )
+
+        # Track input dataset collections - but replace with simply lists so collect
+        # input datasets can process these normally.
+        inp_dataset_collections = self.collect_input_dataset_collections( tool, incoming )
+        # Collect any input datasets from the incoming parameters
+        inp_data = self.collect_input_datasets( tool, incoming, trans, current_user_roles=current_user_roles )
+
+        return history, inp_data, inp_dataset_collections
+
+    def execute(self, tool, trans, incoming={}, return_job=False, set_output_hid=True, history=None, job_params=None, rerun_remap_job_id=None, mapping_over_collection=False, execution_cache=None ):
+        """
+        Executes a tool, creating job and tool outputs, associating them, and
+        submitting the job to the job queue. If history is not specified, use
+        trans.history as destination for tool's output datasets.
+        """
+        self._check_access( tool, trans )
+        app = trans.app
+        if execution_cache is None:
+            execution_cache = ToolExecutionCache(trans)
+        current_user_roles = execution_cache.current_user_roles
+        history, inp_data, inp_dataset_collections = self._collect_inputs(tool, trans, incoming, history, current_user_roles)
+
+        # Build name for output datasets based on tool name and input names
+        on_text = self._get_on_text( inp_data )
+
+        # format='input" previously would give you a random extension from
+        # the input extensions, now it should just give "input" as the output
+        # format.
+        input_ext = 'data' if tool.profile < 16.04 else "input"
+        input_dbkey = incoming.get( "dbkey", "?" )
+        for name, data in reversed(inp_data.items()):
+            if not data:
+                data = NoneDataset( datatypes_registry=app.datatypes_registry )
+                continue
+
+            # Convert LDDA to an HDA.
+            if isinstance(data, LibraryDatasetDatasetAssociation):
+                data = data.to_history_dataset_association( None )
+                inp_data[name] = data
+
+            if tool.profile < 16.04:
+                input_ext = data.ext
+
+            if data.dbkey not in [None, '?']:
+                input_dbkey = data.dbkey
+
+            identifier = getattr( data, "element_identifier", None )
+            if identifier is not None:
+                incoming[ "%s|__identifier__" % name ] = identifier
+
+        # Collect chromInfo dataset and add as parameters to incoming
+        ( chrom_info, db_dataset ) = app.genome_builds.get_chrom_info( input_dbkey, trans=trans, custom_build_hack_get_len_from_fasta_conversion=tool.id != 'CONVERTER_fasta_to_len' )
+        if db_dataset:
+            inp_data.update( { "chromInfo": db_dataset } )
+        incoming[ "chromInfo" ] = chrom_info
+
+        # Determine output dataset permission/roles list
+        existing_datasets = [ inp for inp in inp_data.values() if inp ]
+        if existing_datasets:
+            output_permissions = app.security_agent.guess_derived_permissions_for_datasets( existing_datasets )
+        else:
+            # No valid inputs, we will use history defaults
+            output_permissions = app.security_agent.history_get_default_permissions( history )
+
+        # Add the dbkey to the incoming parameters
+        incoming[ "dbkey" ] = input_dbkey
+        # wrapped params are used by change_format action and by output.label; only perform this wrapping once, as needed
+        wrapped_params = self._wrapped_params( trans, tool, incoming )
+
+        out_data = odict()
+        input_collections = dict( (k, v[0][0]) for k, v in inp_dataset_collections.items() )
+        output_collections = OutputCollections(
+            trans,
+            history,
+            tool=tool,
+            tool_action=self,
+            input_collections=input_collections,
+            mapping_over_collection=mapping_over_collection,
+            on_text=on_text,
+            incoming=incoming,
+            params=wrapped_params.params,
+            job_params=job_params,
+        )
+
+        # Keep track of parent / child relationships, we'll create all the
+        # datasets first, then create the associations
+        parent_to_child_pairs = []
+        child_dataset_names = set()
+        object_store_populator = ObjectStorePopulator( app )
+
+        def handle_output( name, output, hidden=None ):
+            if output.parent:
+                parent_to_child_pairs.append( ( output.parent, name ) )
+                child_dataset_names.add( name )
+            # What is the following hack for? Need to document under what
+            # conditions can the following occur? (james at bx.psu.edu)
+            # HACK: the output data has already been created
+            #      this happens i.e. as a result of the async controller
+            if name in incoming:
+                dataid = incoming[name]
+                data = trans.sa_session.query( app.model.HistoryDatasetAssociation ).get( dataid )
+                assert data is not None
+                out_data[name] = data
+            else:
+                ext = determine_output_format(
+                    output,
+                    wrapped_params.params,
+                    inp_data,
+                    inp_dataset_collections,
+                    input_ext
+                )
+                data = app.model.HistoryDatasetAssociation( extension=ext, create_dataset=True, flush=False )
+                if hidden is None:
+                    hidden = output.hidden
+                if hidden:
+                    data.visible = False
+                trans.sa_session.add( data )
+                trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions, new=True )
+
+            # Must flush before setting object store id currently.
+            # TODO: optimize this.
+            trans.sa_session.flush()
+            object_store_populator.set_object_store_id( data )
+
+            # This may not be neccesary with the new parent/child associations
+            data.designation = name
+            # Copy metadata from one of the inputs if requested.
+
+            # metadata source can be either a string referencing an input
+            # or an actual object to copy.
+            metadata_source = output.metadata_source
+            if metadata_source:
+                if isinstance( metadata_source, string_types ):
+                    metadata_source = inp_data.get( metadata_source )
+
+            if metadata_source is not None:
+                data.init_meta( copy_from=metadata_source )
+            else:
+                data.init_meta()
+            # Take dbkey from LAST input
+            data.dbkey = str(input_dbkey)
+            # Set state
+            data.blurb = "queued"
+            # Set output label
+            data.name = self.get_output_name( output, data, tool, on_text, trans, incoming, history, wrapped_params.params, job_params )
+            # Store output
+            out_data[ name ] = data
+            if output.actions:
+                # Apply pre-job tool-output-dataset actions; e.g. setting metadata, changing format
+                output_action_params = dict( out_data )
+                output_action_params.update( incoming )
+                output.actions.apply_action( data, output_action_params )
+            # Also set the default values of actions of type metadata
+            self.set_metadata_defaults( output, data, tool, on_text, trans, incoming, history, wrapped_params.params, job_params )
+            # Flush all datasets at once.
+            return data
+
+        for name, output in tool.outputs.items():
+            if not filter_output(output, incoming):
+                if output.collection:
+                    collections_manager = app.dataset_collections_service
+                    element_identifiers = []
+                    known_outputs = output.known_outputs( input_collections, collections_manager.type_registry )
+                    # Just to echo TODO elsewhere - this should be restructured to allow
+                    # nested collections.
+                    for output_part_def in known_outputs:
+                        # Add elements to top-level collection, unless nested...
+                        current_element_identifiers = element_identifiers
+                        current_collection_type = output.structure.collection_type
+
+                        for parent_id in (output_part_def.parent_ids or []):
+                            # TODO: replace following line with formal abstractions for doing this.
+                            current_collection_type = ":".join(current_collection_type.split(":")[1:])
+                            name_to_index = dict((value["name"], index) for (index, value) in enumerate(current_element_identifiers))
+                            if parent_id not in name_to_index:
+                                if parent_id not in current_element_identifiers:
+                                    index = len(current_element_identifiers)
+                                    current_element_identifiers.append(dict(
+                                        name=parent_id,
+                                        collection_type=current_collection_type,
+                                        src="new_collection",
+                                        element_identifiers=[],
+                                    ))
+                                else:
+                                    index = name_to_index[parent_id]
+                            current_element_identifiers = current_element_identifiers[ index ][ "element_identifiers" ]
+
+                        effective_output_name = output_part_def.effective_output_name
+                        element = handle_output( effective_output_name, output_part_def.output_def, hidden=True )
+                        # TODO: this shouldn't exist in the top-level of the history at all
+                        # but for now we are still working around that by hiding the contents
+                        # there.
+                        # Following hack causes dataset to no be added to history...
+                        child_dataset_names.add( effective_output_name )
+
+                        history.add_dataset( element, set_hid=set_output_hid, quota=False )
+                        trans.sa_session.add( element )
+                        trans.sa_session.flush()
+
+                        current_element_identifiers.append({
+                            "__object__": element,
+                            "name": output_part_def.element_identifier,
+                        })
+                        log.info(element_identifiers)
+
+                    if output.dynamic_structure:
+                        assert not element_identifiers  # known_outputs must have been empty
+                        element_kwds = dict(elements=collections_manager.ELEMENTS_UNINITIALIZED)
+                    else:
+                        element_kwds = dict(element_identifiers=element_identifiers)
+
+                    output_collections.create_collection(
+                        output=output,
+                        name=name,
+                        **element_kwds
+                    )
+                else:
+                    handle_output_timer = ExecutionTimer()
+                    handle_output( name, output )
+                    log.info("Handled output named %s for tool %s %s" % (name, tool.id, handle_output_timer))
+
+        add_datasets_timer = ExecutionTimer()
+        # Add all the top-level (non-child) datasets to the history unless otherwise specified
+        datasets_to_persist = []
+        for name in out_data.keys():
+            if name not in child_dataset_names and name not in incoming:  # don't add children; or already existing datasets, i.e. async created
+                data = out_data[ name ]
+                datasets_to_persist.append( data )
+        # Set HID and add to history.
+        # This is brand new and certainly empty so don't worry about quota.
+        # TOOL OPTIMIZATION NOTE - from above loop to the job create below 99%+
+        # of execution time happens within in history.add_datasets.
+        history.add_datasets( trans.sa_session, datasets_to_persist, set_hid=set_output_hid, quota=False, flush=False )
+
+        # Add all the children to their parents
+        for parent_name, child_name in parent_to_child_pairs:
+            parent_dataset = out_data[ parent_name ]
+            child_dataset = out_data[ child_name ]
+            parent_dataset.children.append( child_dataset )
+
+        log.info("Added output datasets to history %s" % add_datasets_timer)
+        job_setup_timer = ExecutionTimer()
+        # Create the job object
+        job, galaxy_session = self._new_job_for_session( trans, tool, history )
+        self._record_inputs( trans, tool, job, incoming, inp_data, inp_dataset_collections, current_user_roles )
+        self._record_outputs( job, out_data, output_collections )
+        job.object_store_id = object_store_populator.object_store_id
+        if job_params:
+            job.params = dumps( job_params )
+        job.set_handler(tool.get_job_handler(job_params))
+        trans.sa_session.add( job )
+        # Now that we have a job id, we can remap any outputs if this is a rerun and the user chose to continue dependent jobs
+        # This functionality requires tracking jobs in the database.
+        if app.config.track_jobs_in_database and rerun_remap_job_id is not None:
+            try:
+                old_job = trans.sa_session.query( app.model.Job ).get(rerun_remap_job_id)
+                assert old_job is not None, '(%s/%s): Old job id is invalid' % (rerun_remap_job_id, job.id)
+                assert old_job.tool_id == job.tool_id, '(%s/%s): Old tool id (%s) does not match rerun tool id (%s)' % (old_job.id, job.id, old_job.tool_id, job.tool_id)
+                if trans.user is not None:
+                    assert old_job.user_id == trans.user.id, '(%s/%s): Old user id (%s) does not match rerun user id (%s)' % (old_job.id, job.id, old_job.user_id, trans.user.id)
+                elif trans.user is None and type( galaxy_session ) == trans.model.GalaxySession:
+                    assert old_job.session_id == galaxy_session.id, '(%s/%s): Old session id (%s) does not match rerun session id (%s)' % (old_job.id, job.id, old_job.session_id, galaxy_session.id)
+                else:
+                    raise Exception('(%s/%s): Remapping via the API is not (yet) supported' % (old_job.id, job.id))
+                # Duplicate PJAs before remap.
+                for pjaa in old_job.post_job_actions:
+                    job.add_post_job_action(pjaa.post_job_action)
+                for jtod in old_job.output_datasets:
+                    for (job_to_remap, jtid) in [(jtid.job, jtid) for jtid in jtod.dataset.dependent_jobs]:
+                        if (trans.user is not None and job_to_remap.user_id == trans.user.id) or (trans.user is None and job_to_remap.session_id == galaxy_session.id):
+                            if job_to_remap.state == job_to_remap.states.PAUSED:
+                                job_to_remap.state = job_to_remap.states.NEW
+                            for hda in [ dep_jtod.dataset for dep_jtod in job_to_remap.output_datasets ]:
+                                if hda.state == hda.states.PAUSED:
+                                    hda.state = hda.states.NEW
+                                    hda.info = None
+                            input_values = dict( [ ( p.name, json.loads( p.value ) ) for p in job_to_remap.parameters ] )
+                            update_param( jtid.name, input_values, str( out_data[ jtod.name ].id ) )
+                            for p in job_to_remap.parameters:
+                                p.value = json.dumps( input_values[ p.name ] )
+                            jtid.dataset = out_data[jtod.name]
+                            jtid.dataset.hid = jtod.dataset.hid
+                            log.info('Job %s input HDA %s remapped to new HDA %s' % (job_to_remap.id, jtod.dataset.id, jtid.dataset.id))
+                            trans.sa_session.add(job_to_remap)
+                            trans.sa_session.add(jtid)
+                    jtod.dataset.visible = False
+                    trans.sa_session.add(jtod)
+            except Exception:
+                log.exception('Cannot remap rerun dependencies.')
+
+        log.info("Setup for job %s complete, ready to flush %s" % (job.log_str(), job_setup_timer))
+
+        job_flush_timer = ExecutionTimer()
+        trans.sa_session.flush()
+        log.info("Flushed transaction for job %s %s" % (job.log_str(), job_flush_timer))
+        # Some tools are not really executable, but jobs are still created for them ( for record keeping ).
+        # Examples include tools that redirect to other applications ( epigraph ).  These special tools must
+        # include something that can be retrieved from the params ( e.g., REDIRECT_URL ) to keep the job
+        # from being queued.
+        if 'REDIRECT_URL' in incoming:
+            # Get the dataset - there should only be 1
+            for name in inp_data.keys():
+                dataset = inp_data[ name ]
+            redirect_url = tool.parse_redirect_url( dataset, incoming )
+            # GALAXY_URL should be include in the tool params to enable the external application
+            # to send back to the current Galaxy instance
+            GALAXY_URL = incoming.get( 'GALAXY_URL', None )
+            assert GALAXY_URL is not None, "GALAXY_URL parameter missing in tool config."
+            redirect_url += "&GALAXY_URL=%s" % GALAXY_URL
+            # Job should not be queued, so set state to ok
+            job.set_state( app.model.Job.states.OK )
+            job.info = "Redirected to: %s" % redirect_url
+            trans.sa_session.add( job )
+            trans.sa_session.flush()
+            trans.response.send_redirect( url_for( controller='tool_runner', action='redirect', redirect_url=redirect_url ) )
+        else:
+            # Put the job in the queue if tracking in memory
+            app.job_queue.put( job.id, job.tool_id )
+            trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+            return job, out_data
+
+    def _wrapped_params( self, trans, tool, incoming ):
+        wrapped_params = WrappedParameters( trans, tool, incoming )
+        return wrapped_params
+
+    def _get_on_text( self, inp_data ):
+        input_names = []
+        for name, data in reversed( inp_data.items() ):
+            if getattr( data, "hid", None ):
+                input_names.append( 'data %s' % data.hid )
+
+        return on_text_for_names( input_names )
+
+    def _new_job_for_session( self, trans, tool, history ):
+        job = trans.app.model.Job()
+        galaxy_session = None
+
+        if hasattr( trans, "get_galaxy_session" ):
+            galaxy_session = trans.get_galaxy_session()
+            # If we're submitting from the API, there won't be a session.
+            if type( galaxy_session ) == trans.model.GalaxySession:
+                job.session_id = galaxy_session.id
+        if trans.user is not None:
+            job.user_id = trans.user.id
+        job.history_id = history.id
+        job.tool_id = tool.id
+        try:
+            # For backward compatibility, some tools may not have versions yet.
+            job.tool_version = tool.version
+        except:
+            job.tool_version = "1.0.0"
+        return job, galaxy_session
+
+    def _record_inputs( self, trans, tool, job, incoming, inp_data, inp_dataset_collections, current_user_roles ):
+        # FIXME: Don't need all of incoming here, just the defined parameters
+        #        from the tool. We need to deal with tools that pass all post
+        #        parameters to the command as a special case.
+        for name, dataset_collection_info_pairs in inp_dataset_collections.items():
+            first_reduction = True
+            for ( dataset_collection, reduced ) in dataset_collection_info_pairs:
+                # TODO: update incoming for list...
+                if reduced and first_reduction:
+                    first_reduction = False
+                    incoming[ name ] = []
+                if reduced:
+                    incoming[ name ].append( { 'id': dataset_collection.id, 'src': 'hdca' } )
+                # Should verify security? We check security of individual
+                # datasets below?
+                # TODO: verify can have multiple with same name, don't want to loose tracability
+                job.add_input_dataset_collection( name, dataset_collection )
+        for name, value in tool.params_to_strings( incoming, trans.app ).items():
+            job.add_parameter( name, value )
+        self._check_input_data_access( trans, job, inp_data, current_user_roles )
+
+    def _record_outputs( self, job, out_data, output_collections ):
+        out_collections = output_collections.out_collections
+        out_collection_instances = output_collections.out_collection_instances
+        for name, dataset in out_data.items():
+            job.add_output_dataset( name, dataset )
+        for name, dataset_collection in out_collections.items():
+            job.add_implicit_output_dataset_collection( name, dataset_collection )
+        for name, dataset_collection_instance in out_collection_instances.items():
+            job.add_output_dataset_collection( name, dataset_collection_instance )
+
+    def _check_input_data_access( self, trans, job, inp_data, current_user_roles ):
+        access_timer = ExecutionTimer()
+        for name, dataset in inp_data.items():
+            if dataset:
+                if not trans.app.security_agent.can_access_dataset( current_user_roles, dataset.dataset ):
+                    raise Exception("User does not have permission to use a dataset (%s) provided for input." % dataset.id)
+                if dataset in trans.sa_session:
+                    job.add_input_dataset( name, dataset=dataset )
+                else:
+                    job.add_input_dataset( name, dataset_id=dataset.id )
+            else:
+                job.add_input_dataset( name, None )
+        job_str = job.log_str()
+        log.info("Verified access to datasets for %s %s" % (job_str, access_timer))
+
+    def get_output_name( self, output, dataset, tool, on_text, trans, incoming, history, params, job_params ):
+        if output.label:
+            params['tool'] = tool
+            params['on_string'] = on_text
+            return fill_template( output.label, context=params )
+        else:
+            return self._get_default_data_name( dataset, tool, on_text=on_text, trans=trans, incoming=incoming, history=history, params=params, job_params=job_params )
+
+    def set_metadata_defaults( self, output, dataset, tool, on_text, trans, incoming, history, params, job_params ):
+        """
+        This allows to map names of input files to metadata default values. Example:
+
+        <data format="tabular" name="output" label="Tabular output, aggregates data from individual_inputs" >
+            <actions>
+                <action name="column_names" type="metadata" default="${','.join([input.name for input in $individual_inputs ])}" />
+            </actions>
+        </data>
+        """
+        if output.actions:
+            for action in output.actions.actions:
+                if action.tag == "metadata" and action.default:
+                    metadata_new_value = fill_template( action.default, context=params ).split(",")
+                    dataset.metadata.__setattr__(str(action.name), metadata_new_value)
+
+    def _get_default_data_name( self, dataset, tool, on_text=None, trans=None, incoming=None, history=None, params=None, job_params=None, **kwd ):
+        name = tool.name
+        if on_text:
+            name += ( " on " + on_text )
+        return name
+
+
+class ObjectStorePopulator( object ):
+    """ Small helper for interacting with the object store and making sure all
+    datasets from a job end up with the same object_store_id.
+    """
+
+    def __init__( self, app ):
+        self.object_store = app.object_store
+        self.object_store_id = None
+
+    def set_object_store_id( self, data ):
+        # Create an empty file immediately.  The first dataset will be
+        # created in the "default" store, all others will be created in
+        # the same store as the first.
+        data.dataset.object_store_id = self.object_store_id
+        try:
+            self.object_store.create( data.dataset )
+        except ObjectInvalid:
+            raise Exception('Unable to create output dataset: object store is full')
+        self.object_store_id = data.dataset.object_store_id  # these will be the same thing after the first output
+
+
+class OutputCollections(object):
+    """ Keeps track of collections (DC or HDCA) created by actions.
+
+    Actions do fairly different things depending on whether we are creating
+    just part of an collection or a whole output collection (mapping_over_collection
+    parameter).
+    """
+
+    def __init__(self, trans, history, tool, tool_action, input_collections, mapping_over_collection, on_text, incoming, params, job_params):
+        self.trans = trans
+        self.history = history
+        self.tool = tool
+        self.tool_action = tool_action
+        self.input_collections = input_collections
+        self.mapping_over_collection = mapping_over_collection
+        self.on_text = on_text
+        self.incoming = incoming
+        self.params = params
+        self.job_params = job_params
+        self.out_collections = {}
+        self.out_collection_instances = {}
+
+    def create_collection(self, output, name, **element_kwds):
+        input_collections = self.input_collections
+        collections_manager = self.trans.app.dataset_collections_service
+        collection_type = output.structure.collection_type
+        if collection_type is None:
+            collection_type_source = output.structure.collection_type_source
+            if collection_type_source is None:
+                # TODO: Not a new problem, but this should be determined
+                # sooner.
+                raise Exception("Could not determine collection type to create.")
+            if collection_type_source not in input_collections:
+                raise Exception("Could not find collection type source with name [%s]." % collection_type_source)
+
+            collection_type = input_collections[collection_type_source].collection.collection_type
+
+        if self.mapping_over_collection:
+            dc = collections_manager.create_dataset_collection(
+                self.trans,
+                collection_type=collection_type,
+                **element_kwds
+            )
+            self.out_collections[ name ] = dc
+        else:
+            hdca_name = self.tool_action.get_output_name(
+                output,
+                None,
+                self.tool,
+                self.on_text,
+                self.trans,
+                self.incoming,
+                self.history,
+                self.params,
+                self.job_params,
+            )
+            hdca = collections_manager.create(
+                self.trans,
+                self.history,
+                name=hdca_name,
+                collection_type=collection_type,
+                trusted_identifiers=True,
+                **element_kwds
+            )
+            # name here is name of the output element - not name
+            # of the hdca.
+            self.out_collection_instances[ name ] = hdca
+
+
+def on_text_for_names( input_names ):
+    # input_names may contain duplicates... this is because the first value in
+    # multiple input dataset parameters will appear twice once as param_name
+    # and once as param_name1.
+    unique_names = []
+    for name in input_names:
+        if name not in unique_names:
+            unique_names.append( name )
+    input_names = unique_names
+
+    # Build name for output datasets based on tool name and input names
+    if len( input_names ) == 1:
+        on_text = input_names[0]
+    elif len( input_names ) == 2:
+        on_text = '%s and %s' % tuple(input_names[0:2])
+    elif len( input_names ) == 3:
+        on_text = '%s, %s, and %s' % tuple(input_names[0:3])
+    elif len( input_names ) > 3:
+        on_text = '%s, %s, and others' % tuple(input_names[0:2])
+    else:
+        on_text = ""
+    return on_text
+
+
+def filter_output(output, incoming):
+    for filter in output.filters:
+        try:
+            if not eval( filter.text.strip(), globals(), incoming ):
+                return True  # do not create this dataset
+        except Exception as e:
+            log.debug( 'Dataset output filter failed: %s' % e )
+    return False
+
+
+def determine_output_format(output, parameter_context, input_datasets, input_dataset_collections, random_input_ext):
+    """ Determines the output format for a dataset based on an abstract
+    description of the output (galaxy.tools.parser.ToolOutput), the parameter
+    wrappers, a map of the input datasets (name => HDA), and the last input
+    extensions in the tool form.
+
+    TODO: Don't deal with XML here - move this logic into ToolOutput.
+    TODO: Make the input extension used deterministic instead of random.
+    """
+    # the type should match the input
+    ext = output.format
+    if ext == "input":
+        ext = random_input_ext
+    format_source = output.format_source
+    if format_source is not None and format_source in input_datasets:
+        try:
+            input_dataset = input_datasets[output.format_source]
+            input_extension = input_dataset.ext
+            ext = input_extension
+        except Exception:
+            pass
+    elif format_source is not None:
+        if re.match(r"^[^\[\]]*\[[^\[\]]*\]$", format_source):
+            collection_name, element_index = format_source[0:-1].split("[")
+            # Treat as json to interpret "forward" vs 0 with type
+            # Make it feel more like Python, single quote better in XML also.
+            element_index = element_index.replace("'", '"')
+            element_index = json.loads(element_index)
+
+            if collection_name in input_dataset_collections:
+                try:
+                    input_collection = input_dataset_collections[collection_name][0][0]
+                    input_dataset = input_collection.collection[element_index].element_object
+                    input_extension = input_dataset.ext
+                    ext = input_extension
+                except Exception as e:
+                    log.debug("Exception while trying to determine format_source: %s", e)
+                    pass
+
+    # process change_format tags
+    if output.change_format is not None:
+        new_format_set = False
+        for change_elem in output.change_format:
+            for when_elem in change_elem.findall( 'when' ):
+                check = when_elem.get( 'input', None )
+                if check is not None:
+                    try:
+                        if '$' not in check:
+                            # allow a simple name or more complex specifications
+                            check = '${%s}' % check
+                        if str( fill_template( check, context=parameter_context ) ) == when_elem.get( 'value', None ):
+                            ext = when_elem.get( 'format', ext )
+                    except:  # bad tag input value; possibly referencing a param within a different conditional when block or other nonexistent grouping construct
+                        continue
+                else:
+                    check = when_elem.get( 'input_dataset', None )
+                    if check is not None:
+                        check = input_datasets.get( check, None )
+                        # At this point check is a HistoryDatasetAssociation object.
+                        check_format = when_elem.get( 'format', ext )
+                        check_value = when_elem.get( 'value', None )
+                        check_attribute = when_elem.get( 'attribute', None )
+                        if check is not None and check_value is not None and check_attribute is not None:
+                            # See if the attribute to be checked belongs to the HistoryDatasetAssociation object.
+                            if hasattr( check, check_attribute ):
+                                if str( getattr( check, check_attribute ) ) == str( check_value ):
+                                    ext = check_format
+                                    new_format_set = True
+                                    break
+                            # See if the attribute to be checked belongs to the metadata associated with the
+                            # HistoryDatasetAssociation object.
+                            if check.metadata is not None:
+                                metadata_value = check.metadata.get( check_attribute, None )
+                                if metadata_value is not None:
+                                    if str( metadata_value ) == str( check_value ):
+                                        ext = check_format
+                                        new_format_set = True
+                                        break
+            if new_format_set:
+                break
+    return ext
diff --git a/lib/galaxy/tools/actions/data_manager.py b/lib/galaxy/tools/actions/data_manager.py
new file mode 100644
index 0000000..cdf2ff8
--- /dev/null
+++ b/lib/galaxy/tools/actions/data_manager.py
@@ -0,0 +1,18 @@
+from __init__ import DefaultToolAction
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class DataManagerToolAction( DefaultToolAction ):
+    """Tool action used for Data Manager Tools"""
+
+    def execute( self, tool, trans, **kwds ):
+        rval = super( DataManagerToolAction, self ).execute( tool, trans, **kwds )
+        if isinstance( rval, tuple ) and len( rval ) == 2 and isinstance( rval[0], trans.app.model.Job ):
+            assoc = trans.app.model.DataManagerJobAssociation( job=rval[0], data_manager_id=tool.data_manager_id  )
+            trans.sa_session.add( assoc )
+            trans.sa_session.flush()
+        else:
+            log.error( "Got bad return value from DefaultToolAction.execute(): %s" % ( rval ) )
+        return rval
diff --git a/lib/galaxy/tools/actions/data_source.py b/lib/galaxy/tools/actions/data_source.py
new file mode 100644
index 0000000..08086e9
--- /dev/null
+++ b/lib/galaxy/tools/actions/data_source.py
@@ -0,0 +1,13 @@
+from __init__ import DefaultToolAction
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class DataSourceToolAction( DefaultToolAction ):
+    """Tool action used for Data Source Tools"""
+
+    def _get_default_data_name( self, dataset, tool, on_text=None, trans=None, incoming=None, history=None, params=None, job_params=None, **kwd ):
+        if incoming and 'name' in incoming:
+            return incoming[ 'name' ]
+        return super( DataSourceToolAction, self )._get_default_data_name( dataset, tool, on_text=on_text, trans=trans, incoming=incoming, history=history, params=params, job_params=job_params )
diff --git a/lib/galaxy/tools/actions/history_imp_exp.py b/lib/galaxy/tools/actions/history_imp_exp.py
new file mode 100644
index 0000000..e6d3ff0
--- /dev/null
+++ b/lib/galaxy/tools/actions/history_imp_exp.py
@@ -0,0 +1,144 @@
+import os
+import tempfile
+
+from galaxy.tools.actions import ToolAction
+from galaxy.tools.imp_exp import JobExportHistoryArchiveWrapper
+from galaxy.util.odict import odict
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ImportHistoryToolAction( ToolAction ):
+    """Tool action used for importing a history to an archive. """
+
+    def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, **kwargs ):
+        #
+        # Create job.
+        #
+        job = trans.app.model.Job()
+        session = trans.get_galaxy_session()
+        job.session_id = session and session.id
+        if history:
+            history_id = history.id
+        elif trans.history:
+            history_id = trans.history.id
+        else:
+            history_id = None
+        job.history_id = history_id
+        job.tool_id = tool.id
+        job.user_id = trans.user.id
+        start_job_state = job.state  # should be job.states.NEW
+        job.state = job.states.WAITING  # we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
+        trans.sa_session.add( job )
+        trans.sa_session.flush()  # ensure job.id are available
+
+        #
+        # Setup job and job wrapper.
+        #
+
+        # Add association for keeping track of job, history relationship.
+
+        # Use abspath because mkdtemp() does not, contrary to the documentation,
+        # always return an absolute path.
+        archive_dir = os.path.abspath( tempfile.mkdtemp() )
+        jiha = trans.app.model.JobImportHistoryArchive( job=job, archive_dir=archive_dir )
+        trans.sa_session.add( jiha )
+
+        #
+        # Add parameters to job_parameter table.
+        #
+
+        # Set additional parameters.
+        incoming[ '__DEST_DIR__' ] = jiha.archive_dir
+        for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+            job.add_parameter( name, value )
+
+        job.state = start_job_state  # job inputs have been configured, restore initial job state
+        job.set_handler(tool.get_job_handler(None))
+        trans.sa_session.flush()
+
+        # Queue the job for execution
+        trans.app.job_queue.put( job.id, tool.id )
+        trans.log_event( "Added import history job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+
+        return job, odict()
+
+
+class ExportHistoryToolAction( ToolAction ):
+    """Tool action used for exporting a history to an archive. """
+
+    def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, **kwargs ):
+        #
+        # Get history to export.
+        #
+        history = None
+        for name, value in incoming.iteritems():
+            if isinstance( value, trans.app.model.History ):
+                history_param_name = name
+                history = value
+                del incoming[ history_param_name ]
+                break
+
+        if not history:
+            raise Exception( 'There is no history to export.' )
+
+        #
+        # Create the job and output dataset objects
+        #
+        job = trans.app.model.Job()
+        session = trans.get_galaxy_session()
+        job.session_id = session and session.id
+        if history:
+            history_id = history.id
+        else:
+            history_id = trans.history.id
+        job.history_id = history_id
+        job.tool_id = tool.id
+        if trans.user:
+            # If this is an actual user, run the job as that individual.  Otherwise we're running as guest.
+            job.user_id = trans.user.id
+        start_job_state = job.state  # should be job.states.NEW
+        job.state = job.states.WAITING  # we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
+        trans.sa_session.add( job )
+
+        # Create dataset that will serve as archive.
+        archive_dataset = trans.app.model.Dataset()
+        trans.sa_session.add( archive_dataset )
+
+        trans.sa_session.flush()  # ensure job.id and archive_dataset.id are available
+        trans.app.object_store.create( archive_dataset )  # set the object store id, create dataset (if applicable)
+
+        #
+        # Setup job and job wrapper.
+        #
+
+        # Add association for keeping track of job, history, archive relationship.
+        jeha = trans.app.model.JobExportHistoryArchive( job=job, history=history,
+                                                        dataset=archive_dataset,
+                                                        compressed=incoming[ 'compress' ] )
+        trans.sa_session.add( jeha )
+
+        job_wrapper = JobExportHistoryArchiveWrapper( job )
+        cmd_line = job_wrapper.setup_job( trans, jeha, include_hidden=incoming[ 'include_hidden' ],
+                                          include_deleted=incoming[ 'include_deleted' ] )
+
+        #
+        # Add parameters to job_parameter table.
+        #
+
+        # Set additional parameters.
+        incoming[ '__HISTORY_TO_EXPORT__' ] = history.id
+        incoming[ '__EXPORT_HISTORY_COMMAND_INPUTS_OPTIONS__' ] = cmd_line
+        for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+            job.add_parameter( name, value )
+
+        job.state = start_job_state  # job inputs have been configured, restore initial job state
+        job.set_handler(tool.get_job_handler(None))
+        trans.sa_session.flush()
+
+        # Queue the job for execution
+        trans.app.job_queue.put( job.id, tool.id )
+        trans.log_event( "Added export history job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+
+        return job, odict()
diff --git a/lib/galaxy/tools/actions/metadata.py b/lib/galaxy/tools/actions/metadata.py
new file mode 100644
index 0000000..1b12f69
--- /dev/null
+++ b/lib/galaxy/tools/actions/metadata.py
@@ -0,0 +1,110 @@
+import logging
+from json import dumps
+
+from __init__ import ToolAction
+from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
+from galaxy.jobs.datasets import DatasetPath
+from galaxy.util.odict import odict
+
+log = logging.getLogger( __name__ )
+
+
+class SetMetadataToolAction( ToolAction ):
+    """Tool action used for setting external metadata on an existing dataset"""
+
+    def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, job_params=None, **kwargs ):
+        """
+        Execute using a web transaction.
+        """
+        job, odict = self.execute_via_app( tool, trans.app, trans.get_galaxy_session().id,
+                                           trans.history.id, trans.user, incoming, set_output_hid,
+                                           overwrite, history, job_params )
+        # FIXME: can remove this when logging in execute_via_app method.
+        trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+        return job, odict
+
+    def execute_via_app( self, tool, app, session_id, history_id, user=None,
+                         incoming={}, set_output_hid=False, overwrite=True,
+                         history=None, job_params=None ):
+        """
+        Execute using application.
+        """
+        for name, value in incoming.iteritems():
+            if isinstance( value, app.model.HistoryDatasetAssociation ):
+                dataset = value
+                dataset_name = name
+                type = 'hda'
+                break
+            elif isinstance( value, app.model.LibraryDatasetDatasetAssociation ):
+                dataset = value
+                dataset_name = name
+                type = 'ldda'
+                break
+            else:
+                raise Exception( 'The dataset to set metadata on could not be determined.' )
+
+        sa_session = app.model.context
+
+        # Create the job object
+        job = app.model.Job()
+        job.session_id = session_id
+        job.history_id = history_id
+        job.tool_id = tool.id
+        if user:
+            job.user_id = user.id
+        if job_params:
+            job.params = dumps( job_params )
+        start_job_state = job.state  # should be job.states.NEW
+        try:
+            # For backward compatibility, some tools may not have versions yet.
+            job.tool_version = tool.version
+        except:
+            job.tool_version = "1.0.1"
+        job.state = job.states.WAITING  # we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
+        job.set_handler(tool.get_job_handler( job_params ))
+        sa_session.add( job )
+        sa_session.flush()  # ensure job.id is available
+
+        # add parameters to job_parameter table
+        # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)?
+        incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state
+        input_paths = [DatasetPath( dataset.id, real_path=dataset.file_name, mutable=False )]
+        app.object_store.create(job, base_dir='job_work', dir_only=True, extra_dir=str(job.id))
+        job_working_dir = app.object_store.get_filename(job, base_dir='job_work', dir_only=True, extra_dir=str(job.id))
+        external_metadata_wrapper = JobExternalOutputMetadataWrapper( job )
+        cmd_line = external_metadata_wrapper.setup_external_metadata( dataset,
+                                                                      sa_session,
+                                                                      exec_dir=None,
+                                                                      tmp_dir=job_working_dir,
+                                                                      dataset_files_path=app.model.Dataset.file_path,
+                                                                      output_fnames=input_paths,
+                                                                      config_root=app.config.root,
+                                                                      config_file=app.config.config_file,
+                                                                      datatypes_config=app.datatypes_registry.integrated_datatypes_configs,
+                                                                      job_metadata=None,
+                                                                      include_command=False,
+                                                                      max_metadata_value_size=app.config.max_metadata_value_size,
+                                                                      kwds={ 'overwrite' : overwrite } )
+        incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line
+        for name, value in tool.params_to_strings( incoming, app ).iteritems():
+            job.add_parameter( name, value )
+        # add the dataset to job_to_input_dataset table
+        if type == 'hda':
+            job.add_input_dataset( dataset_name, dataset )
+        elif type == 'ldda':
+            job.add_input_library_dataset( dataset_name, dataset )
+        # Need a special state here to show that metadata is being set and also allow the job to run
+        # i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
+        dataset._state = dataset.states.SETTING_METADATA
+        job.state = start_job_state  # job inputs have been configured, restore initial job state
+        sa_session.flush()
+
+        # Queue the job for execution
+        app.job_queue.put( job.id, tool.id )
+        # FIXME: need to add event logging to app and log events there rather than trans.
+        # trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+
+        # clear e.g. converted files
+        dataset.datatype.before_setting_metadata( dataset )
+
+        return job, odict()
diff --git a/lib/galaxy/tools/actions/model_operations.py b/lib/galaxy/tools/actions/model_operations.py
new file mode 100644
index 0000000..5fd8dea
--- /dev/null
+++ b/lib/galaxy/tools/actions/model_operations.py
@@ -0,0 +1,67 @@
+from galaxy.tools.actions import (
+    DefaultToolAction,
+    OutputCollections,
+    ToolExecutionCache,
+)
+from galaxy.util.odict import odict
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ModelOperationToolAction( DefaultToolAction ):
+
+    def check_inputs_ready( self, tool, trans, incoming, history ):
+        history, inp_data, inp_dataset_collections = self._collect_inputs(tool, trans, incoming, history)
+
+        tool.check_inputs_ready( inp_data, inp_dataset_collections )
+
+    def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, job_params=None, mapping_over_collection=False, execution_cache=None, **kwargs ):
+        if execution_cache is None:
+            execution_cache = ToolExecutionCache(trans)
+
+        current_user_roles = execution_cache.current_user_roles
+        history, inp_data, inp_dataset_collections = self._collect_inputs(tool, trans, incoming, history, current_user_roles)
+
+        # Build name for output datasets based on tool name and input names
+        on_text = self._get_on_text( inp_data )
+
+        # wrapped params are used by change_format action and by output.label; only perform this wrapping once, as needed
+        wrapped_params = self._wrapped_params( trans, tool, incoming )
+
+        out_data = odict()
+        input_collections = dict( [ (k, v[0][0]) for k, v in inp_dataset_collections.iteritems() ] )
+        output_collections = OutputCollections(
+            trans,
+            history,
+            tool=tool,
+            tool_action=self,
+            input_collections=input_collections,
+            mapping_over_collection=mapping_over_collection,
+            on_text=on_text,
+            incoming=incoming,
+            params=wrapped_params.params,
+            job_params=job_params,
+        )
+
+        #
+        # Create job.
+        #
+        job, galaxy_session = self._new_job_for_session( trans, tool, history )
+        self._produce_outputs( trans, tool, out_data, output_collections, incoming=incoming, history=history )
+        self._record_inputs( trans, tool, job, incoming, inp_data, inp_dataset_collections, current_user_roles )
+        self._record_outputs( job, out_data, output_collections )
+        job.state = job.states.OK
+        trans.sa_session.add( job )
+        trans.sa_session.flush()  # ensure job.id are available
+
+        # Queue the job for execution
+        # trans.app.job_queue.put( job.id, tool.id )
+        # trans.log_event( "Added database job action to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+        log.info("Calling produce_outputs, tool is %s" % tool)
+        return job, out_data
+
+    def _produce_outputs( self, trans, tool, out_data, output_collections, incoming, history, **kwargs ):
+        tool.produce_outputs( trans, out_data, output_collections, incoming, history=history )
+        trans.sa_session.add_all( out_data.values() )
+        trans.sa_session.flush()
diff --git a/lib/galaxy/tools/actions/upload.py b/lib/galaxy/tools/actions/upload.py
new file mode 100644
index 0000000..30aeb1c
--- /dev/null
+++ b/lib/galaxy/tools/actions/upload.py
@@ -0,0 +1,38 @@
+from __init__ import ToolAction
+from galaxy.tools.actions import upload_common
+
+from galaxy.util import ExecutionTimer
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class UploadToolAction( ToolAction ):
+
+    def execute( self, tool, trans, incoming={}, set_output_hid=True, history=None, **kwargs ):
+        dataset_upload_inputs = []
+        for input_name, input in tool.inputs.iteritems():
+            if input.type == "upload_dataset":
+                dataset_upload_inputs.append( input )
+        assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
+
+        persisting_uploads_timer = ExecutionTimer()
+        precreated_datasets = upload_common.get_precreated_datasets( trans, incoming, trans.app.model.HistoryDatasetAssociation )
+        incoming = upload_common.persist_uploads( incoming )
+        log.debug("Persisted uploads %s" % persisting_uploads_timer)
+        # We can pass an empty string as the cntrller here since it is used to check whether we
+        # are in an admin view, and this tool is currently not used there.
+        check_and_cleanup_timer = ExecutionTimer()
+        uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs, history=history )
+        upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
+
+        if not uploaded_datasets:
+            return None, 'No data was entered in the upload form, please go back and choose data to upload.'
+
+        log.debug("Checked and cleaned uploads %s" % check_and_cleanup_timer)
+        create_job_timer = ExecutionTimer()
+        json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
+        data_list = [ ud.data for ud in uploaded_datasets ]
+        rval = upload_common.create_job( trans, incoming, tool, json_file_path, data_list, history=history )
+        log.debug("Created upload job %s" % create_job_timer)
+        return rval
diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py
new file mode 100644
index 0000000..4946023
--- /dev/null
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -0,0 +1,427 @@
+import logging
+import os
+import pwd
+import StringIO
+import subprocess
+import tempfile
+from cgi import FieldStorage
+from json import dumps
+
+from sqlalchemy.orm import eagerload_all
+
+from galaxy import datatypes, util
+from galaxy.exceptions import ObjectInvalid
+from galaxy.util.odict import odict
+
+log = logging.getLogger( __name__ )
+
+
+def persist_uploads( params ):
+    """
+    Turn any uploads in the submitted form to persisted files.
+    """
+    if 'files' in params:
+        new_files = []
+        for upload_dataset in params['files']:
+            f = upload_dataset['file_data']
+            if isinstance( f, FieldStorage ):
+                assert not isinstance( f.file, StringIO.StringIO )
+                assert f.file.name != '<fdopen>'
+                local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
+                f.file.close()
+                upload_dataset['file_data'] = dict( filename=f.filename,
+                                                    local_filename=local_filename )
+            elif type( f ) == dict and 'local_filename' not in f:
+                raise Exception( 'Uploaded file was encoded in a way not understood by Galaxy.' )
+            if upload_dataset['url_paste'] and upload_dataset['url_paste'].strip() != '':
+                upload_dataset['url_paste'], is_multi_byte = datatypes.sniff.stream_to_file( StringIO.StringIO( upload_dataset['url_paste'] ), prefix="strio_url_paste_" )
+            else:
+                upload_dataset['url_paste'] = None
+            new_files.append( upload_dataset )
+        params['files'] = new_files
+    return params
+
+
+def handle_library_params( trans, params, folder_id, replace_dataset=None ):
+    # FIXME: the received params has already been parsed by util.Params() by the time it reaches here,
+    # so no complex objects remain.  This is not good because it does not allow for those objects to be
+    # manipulated here.  The received params should be the original kwd from the initial request.
+    library_bunch = util.bunch.Bunch()
+    library_bunch.replace_dataset = replace_dataset
+    library_bunch.message = params.get( 'ldda_message', '' )
+    # See if we have any template field contents
+    library_bunch.template_field_contents = {}
+    template_id = params.get( 'template_id', None )
+    library_bunch.folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+    # We are inheriting the folder's info_association, so we may have received inherited contents or we may have redirected
+    # here after the user entered template contents ( due to errors ).
+    if template_id not in [ None, 'None' ]:
+        library_bunch.template = trans.sa_session.query( trans.app.model.FormDefinition ).get( template_id )
+        for field in library_bunch.template.fields:
+            field_name = field[ 'name' ]
+            if params.get( field_name, False ):
+                field_value = util.restore_text( params.get( field_name, ''  ) )
+                library_bunch.template_field_contents[ field_name ] = field_value
+    else:
+        library_bunch.template = None
+    library_bunch.roles = []
+    for role_id in util.listify( params.get( 'roles', [] ) ):
+        role = trans.sa_session.query( trans.app.model.Role ).get( role_id )
+        library_bunch.roles.append( role )
+    return library_bunch
+
+
+def get_precreated_datasets( trans, params, data_obj, controller='root' ):
+    """
+    Get any precreated datasets (when using asynchronous uploads).
+    """
+    rval = []
+    async_datasets = []
+    if params.get( 'async_datasets', None ) not in ["None", "", None]:
+        async_datasets = params['async_datasets'].split(',')
+    current_user_roles = trans.get_current_user_roles()
+    for id in async_datasets:
+        try:
+            data = trans.sa_session.query( data_obj ).get( int( id ) )
+        except:
+            log.exception( 'Unable to load precreated dataset (%s) sent in upload form' % id )
+            continue
+        if data_obj is trans.app.model.HistoryDatasetAssociation:
+            if trans.user is None and trans.galaxy_session.current_history != data.history:
+                log.error( 'Got a precreated dataset (%s) but it does not belong to anonymous user\'s current session (%s)' % ( data.id, trans.galaxy_session.id ) )
+            elif data.history.user != trans.user:
+                log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
+            else:
+                rval.append( data )
+        elif data_obj is trans.app.model.LibraryDatasetDatasetAssociation:
+            if controller == 'library' and not trans.app.security_agent.can_add_library_item( current_user_roles, data.library_dataset.folder ):
+                log.error( 'Got a precreated dataset (%s) but this user (%s) is not allowed to write to it' % ( data.id, trans.user.id ) )
+            else:
+                rval.append( data )
+    return rval
+
+
+def get_precreated_dataset( precreated_datasets, name ):
+    """
+    Return a dataset matching a name from the list of precreated (via async
+    upload) datasets. If there's more than one upload with the exact same
+    name, we need to pop one (the first) so it isn't chosen next time.
+    """
+    names = [ d.name for d in precreated_datasets ]
+    if names.count( name ) > 0:
+        return precreated_datasets.pop( names.index( name ) )
+    else:
+        return None
+
+
+def cleanup_unused_precreated_datasets( precreated_datasets ):
+    for data in precreated_datasets:
+        log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
+        data.state = data.states.ERROR
+        data.info = 'No file contents were available.'
+
+
+def __new_history_upload( trans, uploaded_dataset, history=None, state=None ):
+    if not history:
+        history = trans.history
+    hda = trans.app.model.HistoryDatasetAssociation( name=uploaded_dataset.name,
+                                                     extension=uploaded_dataset.file_type,
+                                                     dbkey=uploaded_dataset.dbkey,
+                                                     history=history,
+                                                     create_dataset=True,
+                                                     sa_session=trans.sa_session )
+    if state:
+        hda.state = state
+    else:
+        hda.state = hda.states.QUEUED
+    trans.sa_session.add( hda )
+    trans.sa_session.flush()
+    history.add_dataset( hda, genome_build=uploaded_dataset.dbkey )
+    permissions = trans.app.security_agent.history_get_default_permissions( history )
+    trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
+    trans.sa_session.flush()
+    return hda
+
+
+def __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
+    current_user_roles = trans.get_current_user_roles()
+    if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
+        # This doesn't have to be pretty - the only time this should happen is if someone's being malicious.
+        raise Exception( "User is not authorized to add datasets to this library." )
+    folder = library_bunch.folder
+    if uploaded_dataset.get( 'in_folder', False ):
+        # Create subfolders if desired
+        for name in uploaded_dataset.in_folder.split( os.path.sep ):
+            trans.sa_session.refresh( folder )
+            matches = filter( lambda x: x.name == name, active_folders( trans, folder ) )
+            if matches:
+                folder = matches[0]
+            else:
+                new_folder = trans.app.model.LibraryFolder( name=name, description='Automatically created by upload tool' )
+                new_folder.genome_build = trans.app.genome_builds.default_value
+                folder.add_folder( new_folder )
+                trans.sa_session.add( new_folder )
+                trans.sa_session.flush()
+                trans.app.security_agent.copy_library_permissions( trans, folder, new_folder )
+                folder = new_folder
+    if library_bunch.replace_dataset:
+        ld = library_bunch.replace_dataset
+    else:
+        ld = trans.app.model.LibraryDataset( folder=folder, name=uploaded_dataset.name )
+        trans.sa_session.add( ld )
+        trans.sa_session.flush()
+        trans.app.security_agent.copy_library_permissions( trans, folder, ld )
+    ldda = trans.app.model.LibraryDatasetDatasetAssociation( name=uploaded_dataset.name,
+                                                             extension=uploaded_dataset.file_type,
+                                                             dbkey=uploaded_dataset.dbkey,
+                                                             library_dataset=ld,
+                                                             user=trans.user,
+                                                             create_dataset=True,
+                                                             sa_session=trans.sa_session )
+    trans.sa_session.add( ldda )
+    if state:
+        ldda.state = state
+    else:
+        ldda.state = ldda.states.QUEUED
+    ldda.message = library_bunch.message
+    trans.sa_session.flush()
+    # Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
+    trans.app.security_agent.copy_library_permissions( trans, ld, ldda )
+    if library_bunch.replace_dataset:
+        # Copy the Dataset level permissions from replace_dataset to the new LibraryDatasetDatasetAssociation.dataset
+        trans.app.security_agent.copy_dataset_permissions( library_bunch.replace_dataset.library_dataset_dataset_association.dataset, ldda.dataset )
+    else:
+        # Copy the current user's DefaultUserPermissions to the new LibraryDatasetDatasetAssociation.dataset
+        trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, trans.app.security_agent.user_get_default_permissions( trans.user ) )
+        folder.add_library_dataset( ld, genome_build=uploaded_dataset.dbkey )
+        trans.sa_session.add( folder )
+        trans.sa_session.flush()
+    ld.library_dataset_dataset_association_id = ldda.id
+    trans.sa_session.add( ld )
+    trans.sa_session.flush()
+    # Handle template included in the upload form, if any.  If the upload is not asynchronous ( e.g., URL paste ),
+    # then the template and contents will be included in the library_bunch at this point.  If the upload is
+    # asynchronous ( e.g., uploading a file ), then the template and contents will be included in the library_bunch
+    # in the get_uploaded_datasets() method below.
+    if library_bunch.template and library_bunch.template_field_contents:
+        # Since information templates are inherited, the template fields can be displayed on the upload form.
+        # If the user has added field contents, we'll need to create a new form_values and info_association
+        # for the new library_dataset_dataset_association object.
+        # Create a new FormValues object, using the template we previously retrieved
+        form_values = trans.app.model.FormValues( library_bunch.template, library_bunch.template_field_contents )
+        trans.sa_session.add( form_values )
+        trans.sa_session.flush()
+        # Create a new info_association between the current ldda and form_values
+        # TODO: Currently info_associations at the ldda level are not inheritable to the associated LibraryDataset,
+        # we need to figure out if this is optimal
+        info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( ldda, library_bunch.template, form_values )
+        trans.sa_session.add( info_association )
+        trans.sa_session.flush()
+    # If roles were selected upon upload, restrict access to the Dataset to those roles
+    if library_bunch.roles:
+        for role in library_bunch.roles:
+            dp = trans.app.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, ldda.dataset, role )
+            trans.sa_session.add( dp )
+            trans.sa_session.flush()
+    return ldda
+
+
+def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None ):
+    if library_bunch:
+        return __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
+    else:
+        return __new_history_upload( trans, uploaded_dataset, history=history, state=state )
+
+
+def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None, history=None ):
+    uploaded_datasets = []
+    for dataset_upload_input in dataset_upload_inputs:
+        uploaded_datasets.extend( dataset_upload_input.get_uploaded_datasets( trans, params ) )
+    for uploaded_dataset in uploaded_datasets:
+        data = get_precreated_dataset( precreated_datasets, uploaded_dataset.name )
+        if not data:
+            data = new_upload( trans, cntrller, uploaded_dataset, library_bunch=library_bunch, history=history )
+        else:
+            data.extension = uploaded_dataset.file_type
+            data.dbkey = uploaded_dataset.dbkey
+            data.uuid = uploaded_dataset.uuid
+            trans.sa_session.add( data )
+            trans.sa_session.flush()
+            if library_bunch:
+                library_bunch.folder.genome_build = uploaded_dataset.dbkey
+                trans.sa_session.add( library_bunch.folder )
+                # Handle template included in the upload form, if any.  If the upload is asynchronous ( e.g., file upload ),
+                # then the template and contents will be included in the library_bunch at this point.  If the upload is
+                # not asynchronous ( e.g., URL paste ), then the template and contents will be included in the library_bunch
+                # in the new_library_upload() method above.
+                if library_bunch.template and library_bunch.template_field_contents:
+                    # Since information templates are inherited, the template fields can be displayed on the upload form.
+                    # If the user has added field contents, we'll need to create a new form_values and info_association
+                    # for the new library_dataset_dataset_association object.
+                    # Create a new FormValues object, using the template we previously retrieved
+                    form_values = trans.app.model.FormValues( library_bunch.template, library_bunch.template_field_contents )
+                    trans.sa_session.add( form_values )
+                    trans.sa_session.flush()
+                    # Create a new info_association between the current ldda and form_values
+                    # TODO: Currently info_associations at the ldda level are not inheritable to the associated LibraryDataset,
+                    # we need to figure out if this is optimal
+                    info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( data, library_bunch.template, form_values )
+                    trans.sa_session.add( info_association )
+                trans.sa_session.flush()
+            else:
+                if not history:
+                    history = trans.history
+                history.genome_build = uploaded_dataset.dbkey
+        uploaded_dataset.data = data
+    return uploaded_datasets
+
+
+def create_paramfile( trans, uploaded_datasets ):
+    """
+    Create the upload tool's JSON "param" file.
+    """
+    def _chown( path ):
+        try:
+            pwent = pwd.getpwnam( trans.user.email.split('@')[0] )
+            cmd = [ '/usr/bin/sudo', '-E', trans.app.config.external_chown_script, path, pwent[0], str( pwent[3] ) ]
+            log.debug( 'Changing ownership of %s with: %s' % ( path, ' '.join( cmd ) ) )
+            p = subprocess.Popen( cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
+            stdout, stderr = p.communicate()
+            assert p.returncode == 0, stderr
+        except Exception as e:
+            log.warning( 'Changing ownership of uploaded file %s failed: %s' % ( path, str( e ) ) )
+
+    # TODO: json_file should go in the working directory
+    json_file = tempfile.mkstemp()
+    json_file_path = json_file[1]
+    json_file = os.fdopen( json_file[0], 'w' )
+    for uploaded_dataset in uploaded_datasets:
+        data = uploaded_dataset.data
+        if uploaded_dataset.type == 'composite':
+            # we need to init metadata before the job is dispatched
+            data.init_meta()
+            for meta_name, meta_value in uploaded_dataset.metadata.iteritems():
+                setattr( data.metadata, meta_name, meta_value )
+            trans.sa_session.add( data )
+            trans.sa_session.flush()
+            json = dict( file_type=uploaded_dataset.file_type,
+                         dataset_id=data.dataset.id,
+                         dbkey=uploaded_dataset.dbkey,
+                         type=uploaded_dataset.type,
+                         metadata=uploaded_dataset.metadata,
+                         primary_file=uploaded_dataset.primary_file,
+                         composite_file_paths=uploaded_dataset.composite_files,
+                         composite_files=dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+        else:
+            try:
+                is_binary = uploaded_dataset.datatype.is_binary
+            except:
+                is_binary = None
+            try:
+                link_data_only = uploaded_dataset.link_data_only
+            except:
+                link_data_only = 'copy_files'
+            try:
+                uuid_str = uploaded_dataset.uuid
+            except:
+                uuid_str = None
+            try:
+                purge_source = uploaded_dataset.purge_source
+            except:
+                purge_source = True
+            json = dict( file_type=uploaded_dataset.file_type,
+                         ext=uploaded_dataset.ext,
+                         name=uploaded_dataset.name,
+                         dataset_id=data.dataset.id,
+                         dbkey=uploaded_dataset.dbkey,
+                         type=uploaded_dataset.type,
+                         is_binary=is_binary,
+                         link_data_only=link_data_only,
+                         uuid=uuid_str,
+                         to_posix_lines=getattr(uploaded_dataset, "to_posix_lines", True),
+                         purge_source=purge_source,
+                         space_to_tab=uploaded_dataset.space_to_tab,
+                         in_place=trans.app.config.external_chown_script is None,
+                         path=uploaded_dataset.path )
+            # TODO: This will have to change when we start bundling inputs.
+            # Also, in_place above causes the file to be left behind since the
+            # user cannot remove it unless the parent directory is writable.
+            if link_data_only == 'copy_files' and trans.app.config.external_chown_script:
+                _chown( uploaded_dataset.path )
+        json_file.write( dumps( json ) + '\n' )
+    json_file.close()
+    if trans.app.config.external_chown_script:
+        _chown( json_file_path )
+    return json_file_path
+
+
+def create_job( trans, params, tool, json_file_path, data_list, folder=None, history=None, job_params=None ):
+    """
+    Create the upload job.
+    """
+    job = trans.app.model.Job()
+    galaxy_session = trans.get_galaxy_session()
+    if type( galaxy_session ) == trans.model.GalaxySession:
+        job.session_id = galaxy_session.id
+    if trans.user is not None:
+        job.user_id = trans.user.id
+    if folder:
+        job.library_folder_id = folder.id
+    else:
+        if not history:
+            history = trans.history
+        job.history_id = history.id
+    job.tool_id = tool.id
+    job.tool_version = tool.version
+    job.set_state( job.states.UPLOAD )
+    trans.sa_session.add( job )
+    trans.sa_session.flush()
+    log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
+    trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
+
+    for name, value in tool.params_to_strings( params, trans.app ).iteritems():
+        job.add_parameter( name, value )
+    job.add_parameter( 'paramfile', dumps( json_file_path ) )
+    object_store_id = None
+    for i, dataset in enumerate( data_list ):
+        if folder:
+            job.add_output_library_dataset( 'output%i' % i, dataset )
+        else:
+            job.add_output_dataset( 'output%i' % i, dataset )
+        # Create an empty file immediately
+        if not dataset.dataset.external_filename:
+            dataset.dataset.object_store_id = object_store_id
+            try:
+                trans.app.object_store.create( dataset.dataset )
+            except ObjectInvalid:
+                raise Exception('Unable to create output dataset: object store is full')
+            object_store_id = dataset.dataset.object_store_id
+            trans.sa_session.add( dataset )
+            # open( dataset.file_name, "w" ).close()
+    job.object_store_id = object_store_id
+    job.set_state( job.states.NEW )
+    job.set_handler( tool.get_job_handler( None ) )
+    if job_params:
+        for name, value in job_params.iteritems():
+            job.add_parameter( name, value )
+    trans.sa_session.add( job )
+    trans.sa_session.flush()
+
+    # Queue the job for execution
+    trans.app.job_queue.put( job.id, job.tool_id )
+    trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+    output = odict()
+    for i, v in enumerate( data_list ):
+        output[ 'output%i' % i ] = v
+    return job, output
+
+
+def active_folders( trans, folder ):
+    # Stolen from galaxy.web.controllers.library_common (importing from which causes a circular issues).
+    # Much faster way of retrieving all active sub-folders within a given folder than the
+    # performance of the mapper.  This query also eagerloads the permissions on each folder.
+    return trans.sa_session.query( trans.app.model.LibraryFolder ) \
+                           .filter_by( parent=folder, deleted=False ) \
+                           .options( eagerload_all( "actions" ) ) \
+                           .order_by( trans.app.model.LibraryFolder.table.c.name ) \
+                           .all()
diff --git a/lib/galaxy/tools/cwl/__init__.py b/lib/galaxy/tools/cwl/__init__.py
new file mode 100644
index 0000000..fe28381
--- /dev/null
+++ b/lib/galaxy/tools/cwl/__init__.py
@@ -0,0 +1,18 @@
+from .cwltool_deps import (
+    needs_shell_quoting,
+    shellescape,
+)
+from .parser import tool_proxy, workflow_proxy
+from .representation import to_cwl_job, to_galaxy_parameters
+from .runtime_actions import handle_outputs
+
+
+__all__ = (
+    'tool_proxy',
+    'workflow_proxy',
+    'handle_outputs',
+    'to_cwl_job',
+    'to_galaxy_parameters',
+    'needs_shell_quoting',
+    'shellescape',
+)
diff --git a/lib/galaxy/tools/cwl/cwltool_deps.py b/lib/galaxy/tools/cwl/cwltool_deps.py
new file mode 100644
index 0000000..a2a946c
--- /dev/null
+++ b/lib/galaxy/tools/cwl/cwltool_deps.py
@@ -0,0 +1,76 @@
+"""Logic for dealing with cwltool as an optional dependency.
+
+Use this as the import interface for cwltool and just call
+:func:`ensure_cwltool_available` before using any of the imported
+functionality at runtime.
+"""
+import re
+
+try:
+    import requests
+except ImportError:
+    requests = None
+
+try:
+    from cwltool import (
+        main,
+        workflow,
+        job,
+        process,
+    )
+except (ImportError, SyntaxError):
+    # Drop SyntaxError once cwltool supports Python 3
+    main = None
+    workflow = None
+    job = None
+    process = None
+
+try:
+    from cwltool import load_tool
+except (ImportError, SyntaxError):
+    load_tool = None
+
+try:
+    import shellescape
+except ImportError:
+    shellescape = None
+
+try:
+    import schema_salad
+except (ImportError, SyntaxError):
+    # Drop SyntaxError once schema_salad supports Python 3
+    schema_salad = None
+
+needs_shell_quoting = re.compile(r"""(^$|[\s|&;()<>\'"$@])""").search
+
+
+def ensure_cwltool_available():
+    """Assert optional dependencies proxied via this module are available at runtime.
+
+    Throw an ImportError with a description of the problem if they do not exist.
+    """
+    if main is None or workflow is None or shellescape is None:
+        message = "This feature requires cwltool and dependencies to be available, they are not."
+        if main is None:
+            message += " cwltool is not unavailable."
+        elif load_tool is None:
+            message += " cwltool.load_tool is unavailable - cwltool version is too old."
+        if requests is None:
+            message += " Library 'requests' unavailable."
+        if shellescape is None:
+            message += " Library 'shellescape' unavailable."
+        if schema_salad is None:
+            message += " Library 'schema_salad' unavailable."
+        raise ImportError(message)
+
+
+__all__ = (
+    'main',
+    'load_tool',
+    'workflow',
+    'process',
+    'ensure_cwltool_available',
+    'schema_salad',
+    'shellescape',
+    'needs_shell_quoting',
+)
diff --git a/lib/galaxy/tools/cwl/parser.py b/lib/galaxy/tools/cwl/parser.py
new file mode 100644
index 0000000..3e10644
--- /dev/null
+++ b/lib/galaxy/tools/cwl/parser.py
@@ -0,0 +1,624 @@
+""" This module provides proxy objects around objects from the common
+workflow language reference implementation library cwltool. These proxies
+adapt cwltool to Galaxy features and abstract the library away from the rest
+of the framework.
+"""
+from __future__ import absolute_import
+
+import json
+import logging
+import os
+from abc import ABCMeta, abstractmethod
+
+import six
+
+from galaxy.util import safe_makedirs
+from galaxy.util.bunch import Bunch
+from galaxy.util.odict import odict
+
+from .cwltool_deps import (
+    ensure_cwltool_available,
+    main,
+    workflow,
+)
+
+from .schema import schema_loader
+
+log = logging.getLogger(__name__)
+
+JOB_JSON_FILE = ".cwl_job.json"
+SECONDARY_FILES_EXTRA_PREFIX = "__secondary_files__"
+
+
+SUPPORTED_TOOL_REQUIREMENTS = [
+    "CreateFileRequirement",
+    "DockerRequirement",
+    "EnvVarRequirement",
+    "InlineJavascriptRequirement",
+]
+
+
+SUPPORTED_WORKFLOW_REQUIREMENTS = SUPPORTED_TOOL_REQUIREMENTS + [
+]
+
+
+def tool_proxy(tool_path):
+    """ Provide a proxy object to cwltool data structures to just
+    grab relevant data.
+    """
+    ensure_cwltool_available()
+    tool = to_cwl_tool_object(tool_path)
+    return tool
+
+
+def workflow_proxy(workflow_path):
+    ensure_cwltool_available()
+    workflow = to_cwl_workflow_object(workflow_path)
+    return workflow
+
+
+def load_job_proxy(job_directory):
+    ensure_cwltool_available()
+    job_objects_path = os.path.join(job_directory, JOB_JSON_FILE)
+    job_objects = json.load(open(job_objects_path, "r"))
+    tool_path = job_objects["tool_path"]
+    job_inputs = job_objects["job_inputs"]
+    output_dict = job_objects["output_dict"]
+    cwl_tool = tool_proxy(tool_path)
+    cwl_job = cwl_tool.job_proxy(job_inputs, output_dict, job_directory=job_directory)
+    return cwl_job
+
+
+def to_cwl_tool_object(tool_path):
+    proxy_class = None
+    cwl_tool = schema_loader.tool(path=tool_path)
+    if isinstance(cwl_tool, int):
+        raise Exception("Failed to load tool.")
+
+    raw_tool = cwl_tool.tool
+    check_requirements(raw_tool)
+    if "class" not in raw_tool:
+        raise Exception("File does not declare a class, not a valid Draft 3+ CWL tool.")
+    process_class = raw_tool["class"]
+    if process_class == "CommandLineTool":
+        proxy_class = CommandLineToolProxy
+    elif process_class == "ExpressionTool":
+        proxy_class = ExpressionToolProxy
+    else:
+        raise Exception("File not a CWL CommandLineTool.")
+    if "cwlVersion" not in raw_tool:
+        raise Exception("File does not declare a CWL version, pre-draft 3 CWL tools are not supported.")
+
+    proxy = proxy_class(cwl_tool, tool_path)
+    return proxy
+
+
+def to_cwl_workflow_object(workflow_path):
+    proxy_class = WorkflowProxy
+    make_tool = workflow.defaultMakeTool
+    cwl_workflow = main.load_tool(workflow_path, False, False, make_tool, False)
+    raw_workflow = cwl_workflow.tool
+    check_requirements(raw_workflow, tool=False)
+
+    proxy = proxy_class(cwl_workflow, workflow_path)
+    return proxy
+
+
+def check_requirements(rec, tool=True):
+    if isinstance(rec, dict):
+        if "requirements" in rec:
+            for r in rec["requirements"]:
+                if tool:
+                    possible = SUPPORTED_TOOL_REQUIREMENTS
+                else:
+                    possible = SUPPORTED_WORKFLOW_REQUIREMENTS
+                if r["class"] not in possible:
+                    raise Exception("Unsupported requirement %s" % r["class"])
+        for d in rec:
+            check_requirements(rec[d], tool=tool)
+    if isinstance(rec, list):
+        for d in rec:
+            check_requirements(d, tool=tool)
+
+
+ at six.add_metaclass(ABCMeta)
+class ToolProxy( object ):
+
+    def __init__(self, tool, tool_path):
+        self._tool = tool
+        self._tool_path = tool_path
+
+    def job_proxy(self, input_dict, output_dict, job_directory="."):
+        """ Build a cwltool.job.Job describing computation using a input_json
+        Galaxy will generate mapping the Galaxy description of the inputs into
+        a cwltool compatible variant.
+        """
+        return JobProxy(self, input_dict, output_dict, job_directory=job_directory)
+
+    @abstractmethod
+    def input_instances(self):
+        """ Return InputInstance objects describing mapping to Galaxy inputs. """
+
+    @abstractmethod
+    def output_instances(self):
+        """ Return OutputInstance objects describing mapping to Galaxy inputs. """
+
+    @abstractmethod
+    def docker_identifier(self):
+        """ Return docker identifier for embedding in tool description. """
+
+    @abstractmethod
+    def description(self):
+        """ Return description to tool. """
+
+    @abstractmethod
+    def label(self):
+        """ Return label for tool. """
+
+
+class CommandLineToolProxy(ToolProxy):
+
+    def description(self):
+        return self._tool.tool.get('doc')
+
+    def label(self):
+        return self._tool.tool.get('label')
+
+    def input_instances(self):
+        return self._find_inputs(self._tool.inputs_record_schema)
+
+    def _find_inputs(self, schema):
+        schema_type = schema["type"]
+        if isinstance(schema_type, list):
+            raise Exception("Union types not yet implemented.")
+        elif isinstance(schema_type, dict):
+            return self._find_inputs(schema_type)
+        else:
+            if schema_type in self._tool.schemaDefs:
+                schema = self._tool.schemaDefs[schema_type]
+
+            if schema["type"] == "record":
+                return [_simple_field_to_input(_) for _ in schema["fields"]]
+
+    def output_instances(self):
+        outputs_schema = self._tool.outputs_record_schema
+        return self._find_outputs(outputs_schema)
+
+    def _find_outputs(self, schema):
+        rval = []
+        if not rval and schema["type"] == "record":
+            for output in schema["fields"]:
+                # output_type = output.get("type", None)
+                # if output_type != "File":
+                #     template = "Unhandled output type [%s] encountered."
+                #     raise Exception(template % output_type)
+                rval.append(_simple_field_to_output(output))
+
+        return rval
+
+    def docker_identifier(self):
+        tool = self._tool.tool
+        reqs_and_hints = tool.get("requirements", []) + tool.get("hints", [])
+        for hint in reqs_and_hints:
+            if hint["class"] == "DockerRequirement":
+                if "dockerImageId" in hint:
+                    return hint["dockerImageId"]
+                else:
+                    return hint["dockerPull"]
+        return None
+
+
+class ExpressionToolProxy(CommandLineToolProxy):
+    pass
+
+
+class JobProxy(object):
+
+    def __init__(self, tool_proxy, input_dict, output_dict, job_directory):
+        self._tool_proxy = tool_proxy
+        self._input_dict = input_dict
+        self._output_dict = output_dict
+        self._job_directory = job_directory
+
+        self._final_output = []
+        self._ok = True
+        self._cwl_job = None
+        self._is_command_line_job = None
+
+    def cwl_job(self):
+        self._ensure_cwl_job_initialized()
+        return self._cwl_job
+
+    @property
+    def is_command_line_job(self):
+        self._ensure_cwl_job_initialized()
+        assert self._is_command_line_job is not None
+        return self._is_command_line_job
+
+    def _ensure_cwl_job_initialized(self):
+        if self._cwl_job is None:
+            self._cwl_job = next(self._tool_proxy._tool.job(
+                self._input_dict,
+                self._output_callback,
+                basedir=self._job_directory,
+                use_container=False
+            ))
+            self._is_command_line_job = hasattr(self._cwl_job, "command_line")
+
+    @property
+    def command_line(self):
+        if self.is_command_line_job:
+            return self.cwl_job().command_line
+        else:
+            return ["true"]
+
+    @property
+    def stdin(self):
+        if self.is_command_line_job:
+            return self.cwl_job().stdin
+        else:
+            return None
+
+    @property
+    def stdout(self):
+        if self.is_command_line_job:
+            return self.cwl_job().stdout
+        else:
+            return None
+
+    @property
+    def environment(self):
+        if self.is_command_line_job:
+            return self.cwl_job().environment
+        else:
+            return {}
+
+    @property
+    def generate_files(self):
+        if self.is_command_line_job:
+            return self.cwl_job().generatefiles
+        else:
+            return {}
+
+    def _output_callback(self, out, process_status):
+        if process_status == "success":
+            self._final_output = out
+        else:
+            self._ok = False
+
+        log.info("Output are %s, status is %s" % (out, process_status))
+
+    def collect_outputs(self, tool_working_directory):
+        if not self.is_command_line_job:
+            self.cwl_job().run(
+            )
+            return self._final_output
+        else:
+            return self.cwl_job().collect_outputs(tool_working_directory)
+
+    def save_job(self):
+        job_file = JobProxy._job_file(self._job_directory)
+        job_objects = {
+            "tool_path": os.path.abspath(self._tool_proxy._tool_path),
+            "job_inputs": self._input_dict,
+            "output_dict": self._output_dict,
+        }
+        json.dump(job_objects, open(job_file, "w"))
+
+    def _output_extra_files_dir(self, output_name):
+        output_id = self.output_id(output_name)
+        return os.path.join(self._job_directory, "dataset_%s_files" % output_id)
+
+    def output_id(self, output_name):
+        output_id = self._output_dict[output_name]["id"]
+        return output_id
+
+    def output_path(self, output_name):
+        output_id = self._output_dict[output_name]["path"]
+        return output_id
+
+    def output_secondary_files_dir(self, output_name, create=False):
+        extra_files_dir = self._output_extra_files_dir(output_name)
+        secondary_files_dir = os.path.join(extra_files_dir, SECONDARY_FILES_EXTRA_PREFIX)
+        if create and not os.path.exists(secondary_files_dir):
+            safe_makedirs(secondary_files_dir)
+        return secondary_files_dir
+
+    @staticmethod
+    def _job_file(job_directory):
+        return os.path.join(job_directory, JOB_JSON_FILE)
+
+
+class WorkflowProxy(object):
+
+    def __init__(self, workflow, workflow_path):
+        self._workflow = workflow
+        self._workflow_path = workflow_path
+
+    def step_proxies(self):
+        proxies = []
+        for step in self._workflow.steps:
+            proxies.append(StepProxy(self, step))
+        return proxies
+
+    @property
+    def runnables(self):
+        runnables = []
+        for step in self._workflow.steps:
+            if "run" in step.tool:
+                runnables.append(step.tool["run"])
+        return runnables
+
+    def to_dict(self):
+        name = os.path.basename(self._workflow_path)
+        steps = {}
+
+        index = 0
+        for i, input_dict in self._workflow.tool['inputs']:
+            index += 1
+            steps[index] = input_dict
+
+        for i, step_proxy in enumerate(self.step_proxies()):
+            index += 1
+            steps[index] = step_proxy.to_dict()
+
+        return {
+            'name': name,
+            'steps': steps,
+        }
+
+
+class StepProxy(object):
+
+    def __init__(self, workflow_proxy, step):
+        self._workflow_proxy = workflow_proxy
+        self._step = step
+
+    def to_dict(self):
+        return {}
+
+
+def _simple_field_union(field):
+    field_type = _field_to_field_type(field)  # Must be a list if in here?
+
+    def any_of_in_field_type(types):
+        return any([t in field_type for t in types])
+
+    name, label, description = _field_metadata(field)
+
+    case_name = "_cwl__type_"
+    case_label = "Specify Parameter %s As" % label
+
+    def value_input(**kwds):
+        value_name = "_cwl__value_"
+        value_label = label
+        value_description = description
+        return InputInstance(
+            value_name,
+            value_label,
+            value_description,
+            **kwds
+        )
+
+    select_options = []
+    case_options = []
+    if "null" in field_type:
+        select_options.append({"value": "null", "label": "None", "selected": True})
+        case_options.append(("null", []))
+    if any_of_in_field_type(["Any", "string"]):
+        select_options.append({"value": "string", "label": "Simple String"})
+        case_options.append(("string", [value_input(input_type=INPUT_TYPE.TEXT)]))
+    if any_of_in_field_type(["Any", "boolean"]):
+        select_options.append({"value": "boolean", "label": "Boolean"})
+        case_options.append(("boolean", [value_input(input_type=INPUT_TYPE.BOOLEAN)]))
+    if any_of_in_field_type(["Any", "int"]):
+        select_options.append({"value": "int", "label": "Integer"})
+        case_options.append(("int", [value_input(input_type=INPUT_TYPE.INTEGER)]))
+    if any_of_in_field_type(["Any", "float"]):
+        select_options.append({"value": "float", "label": "Floating Point Number"})
+        case_options.append(("float", [value_input(input_type=INPUT_TYPE.FLOAT)]))
+    if any_of_in_field_type(["Any", "File"]):
+        select_options.append({"value": "data", "label": "Dataset"})
+        case_options.append(("data", [value_input(input_type=INPUT_TYPE.DATA)]))
+    if "Any" in field_type:
+        select_options.append({"value": "json", "label": "JSON Data Structure"})
+        case_options.append(("json", [value_input(input_type=INPUT_TYPE.TEXT, area=True)]))
+
+    case_input = SelectInputInstance(
+        name=case_name,
+        label=case_label,
+        description=False,
+        options=select_options,
+    )
+
+    return ConditionalInstance(name, case_input, case_options)
+
+
+def _simple_field_to_input(field):
+    field_type = _field_to_field_type(field)
+    if isinstance(field_type, list):
+        # Length must be greater than 1...
+        return _simple_field_union(field)
+
+    name, label, description = _field_metadata(field)
+
+    type_kwds = _simple_field_to_input_type_kwds(field)
+    return InputInstance(name, label, description, **type_kwds)
+
+
+def _simple_field_to_input_type_kwds(field, field_type=None):
+    simple_map_type_map = {
+        "File": INPUT_TYPE.DATA,
+        "int": INPUT_TYPE.INTEGER,
+        "long": INPUT_TYPE.INTEGER,
+        "float": INPUT_TYPE.INTEGER,
+        "double": INPUT_TYPE.INTEGER,
+        "string": INPUT_TYPE.TEXT,
+        "boolean": INPUT_TYPE.BOOLEAN,
+    }
+
+    if field_type is None:
+        field_type = _field_to_field_type(field)
+
+    if field_type in simple_map_type_map.keys():
+        input_type = simple_map_type_map[field_type]
+        return {"input_type": input_type, "array": False}
+    elif field_type == "array":
+        if isinstance(field["type"], dict):
+            array_type = field["type"]["items"]
+        else:
+            array_type = field["items"]
+        if array_type in simple_map_type_map.keys():
+            input_type = simple_map_type_map[array_type]
+        return {"input_type": input_type, "array": True}
+    else:
+        raise Exception("Unhandled simple field type encountered - [%s]." % field_type)
+
+
+def _field_to_field_type(field):
+    field_type = field["type"]
+    if isinstance(field_type, dict):
+        field_type = field_type["type"]
+    if isinstance(field_type, list):
+        field_type_length = len(field_type)
+        if field_type_length == 0:
+            raise Exception("Zero-length type list encountered, invalid CWL?")
+        elif len(field_type) == 1:
+            field_type = field_type[0]
+
+    if field_type == "Any":
+        field_type = ["Any"]
+
+    return field_type
+
+
+def _field_metadata(field):
+    name = field["name"]
+    label = field.get("label", None)
+    description = field.get("doc", None)
+    return name, label, description
+
+
+def _simple_field_to_output(field):
+    name = field["name"]
+    output_data_class = field["type"]
+    output_instance = OutputInstance(
+        name,
+        output_data_type=output_data_class,
+        output_type=OUTPUT_TYPE.GLOB
+    )
+    return output_instance
+
+
+INPUT_TYPE = Bunch(
+    DATA="data",
+    INTEGER="integer",
+    FLOAT="float",
+    TEXT="text",
+    BOOLEAN="boolean",
+    SELECT="select",
+    CONDITIONAL="conditional",
+)
+
+
+class ConditionalInstance(object):
+
+    def __init__(self, name, case, whens):
+        self.input_type = INPUT_TYPE.CONDITIONAL
+        self.name = name
+        self.case = case
+        self.whens = whens
+
+    def to_dict(self):
+
+        as_dict = dict(
+            name=self.name,
+            type=INPUT_TYPE.CONDITIONAL,
+            test=self.case.to_dict(),
+            when=odict(),
+        )
+        for value, block in self.whens:
+            as_dict["when"][value] = [i.to_dict() for i in block]
+
+        return as_dict
+
+
+class SelectInputInstance(object):
+
+    def __init__(self, name, label, description, options):
+        self.input_type = INPUT_TYPE.SELECT
+        self.name = name
+        self.label = label
+        self.description = description
+        self.options = options
+
+    def to_dict(self):
+        # TODO: serialize options...
+        as_dict = dict(
+            name=self.name,
+            label=self.label or self.name,
+            help=self.description,
+            type=self.input_type,
+            options=self.options,
+        )
+        return as_dict
+
+
+class InputInstance(object):
+
+    def __init__(self, name, label, description, input_type, array=False, area=False):
+        self.input_type = input_type
+        self.name = name
+        self.label = label
+        self.description = description
+        self.required = True
+        self.array = array
+        self.area = area
+
+    def to_dict(self, itemwise=True):
+        if itemwise and self.array:
+            as_dict = dict(
+                type="repeat",
+                name="%s_repeat" % self.name,
+                title="%s" % self.name,
+                blocks=[
+                    self.to_dict(itemwise=False)
+                ]
+            )
+        else:
+            as_dict = dict(
+                name=self.name,
+                label=self.label or self.name,
+                help=self.description,
+                type=self.input_type,
+                optional=not self.required,
+            )
+            if self.area:
+                as_dict["area"] = True
+
+            if self.input_type == INPUT_TYPE.INTEGER:
+                as_dict["value"] = "0"
+            if self.input_type == INPUT_TYPE.FLOAT:
+                as_dict["value"] = "0.0"
+        return as_dict
+
+
+OUTPUT_TYPE = Bunch(
+    GLOB="glob",
+    STDOUT="stdout",
+)
+
+
+class OutputInstance(object):
+
+    def __init__(self, name, output_data_type, output_type, path=None):
+        self.name = name
+        self.output_data_type = output_data_type
+        self.output_type = output_type
+        self.path = path
+
+
+__all__ = (
+    'tool_proxy',
+    'load_job_proxy',
+)
diff --git a/lib/galaxy/tools/cwl/representation.py b/lib/galaxy/tools/cwl/representation.py
new file mode 100644
index 0000000..140708b
--- /dev/null
+++ b/lib/galaxy/tools/cwl/representation.py
@@ -0,0 +1,177 @@
+""" This module is responsible for converting between Galaxy's tool
+input description and the CWL description for a job json. """
+
+import json
+import logging
+import os
+
+from six import string_types
+
+from galaxy.exceptions import RequestParameterInvalidException
+from galaxy.util import safe_makedirs, string_as_bool
+
+log = logging.getLogger(__name__)
+
+NOT_PRESENT = object()
+
+GALAXY_TO_CWL_TYPES = {
+    'integer': 'integer',
+    'float': 'float',
+    'data': 'File',
+    'boolean': 'boolean',
+}
+
+
+def to_cwl_job(tool, param_dict, local_working_directory):
+    """ tool is Galaxy's representation of the tool and param_dict is the
+    parameter dictionary with wrapped values.
+    """
+    inputs = tool.inputs
+    input_json = {}
+
+    inputs_dir = os.path.join(local_working_directory, "_inputs")
+
+    def simple_value(input, param_dict_value, cwl_type=None):
+        # Hmm... cwl_type isn't really the cwl type in every case,
+        # like in the case of json for instance.
+        if cwl_type is None:
+            input_type = input.type
+            cwl_type = GALAXY_TO_CWL_TYPES[input_type]
+
+        if cwl_type == "null":
+            assert param_dict_value is None
+            return None
+        if cwl_type == "File":
+            dataset_wrapper = param_dict_value
+            extra_files_path = dataset_wrapper.extra_files_path
+            secondary_files_path = os.path.join(extra_files_path, "__secondary_files__")
+            path = str(dataset_wrapper)
+            if os.path.exists(secondary_files_path):
+                safe_makedirs(inputs_dir)
+                name = os.path.basename(path)
+                new_input_path = os.path.join(inputs_dir, name)
+                os.symlink(path, new_input_path)
+                for secondary_file_name in os.listdir(secondary_files_path):
+                    secondary_file_path = os.path.join(secondary_files_path, secondary_file_name)
+                    os.symlink(secondary_file_path, new_input_path + secondary_file_name)
+                path = new_input_path
+
+            return {"path": path, "class": "File"}
+        elif cwl_type == "integer":
+            return int(str(param_dict_value))
+        elif cwl_type == "long":
+            return int(str(param_dict_value))
+        elif cwl_type == "float":
+            return float(str(param_dict_value))
+        elif cwl_type == "double":
+            return float(str(param_dict_value))
+        elif cwl_type == "boolean":
+            return string_as_bool(param_dict_value)
+        elif cwl_type == "string":
+            return str(param_dict_value)
+        elif cwl_type == "json":
+            raw_value = param_dict_value.value
+            log.info("raw_value is %s (%s)" % (raw_value, type(raw_value)))
+            return json.loads(raw_value)
+        else:
+            return str(param_dict_value)
+
+    for input_name, input in inputs.iteritems():
+        if input.type == "repeat":
+            only_input = input.inputs.values()[0]
+            array_value = []
+            for instance in param_dict[input_name]:
+                array_value.append(simple_value(only_input, instance[input_name[:-len("_repeat")]]))
+            input_json[input_name[:-len("_repeat")]] = array_value
+        elif input.type == "conditional":
+            assert input_name in param_dict, "No value for %s in %s" % (input_name, param_dict)
+            current_case = param_dict[input_name]["_cwl__type_"]
+            if str(current_case) != "null":  # str because it is a wrapped...
+                case_index = input.get_current_case( current_case )
+                case_input = input.cases[ case_index ].inputs["_cwl__value_"]
+                case_value = param_dict[input_name]["_cwl__value_"]
+                input_json[input_name] = simple_value(case_input, case_value, cwl_type=current_case)
+        else:
+            input_json[input_name] = simple_value(input, param_dict[input_name])
+
+    input_json["allocatedResources"] = {
+        "cpu": "$GALAXY_SLOTS",
+    }
+    return input_json
+
+
+def to_galaxy_parameters(tool, as_dict):
+    """ Tool is Galaxy's representation of the tool and as_dict is a Galaxified
+    representation of the input json (no paths, HDA references for instance).
+    """
+    inputs = tool.inputs
+    galaxy_request = {}
+
+    def from_simple_value(input, param_dict_value, cwl_type=None):
+        if cwl_type == "json":
+            return json.dumps(param_dict_value)
+        else:
+            return param_dict_value
+
+    for input_name, input in inputs.iteritems():
+        as_dict_value = as_dict.get(input_name, NOT_PRESENT)
+        galaxy_input_type = input.type
+
+        if galaxy_input_type == "repeat":
+            if input_name not in as_dict:
+                continue
+
+            only_input = input.inputs.values()[0]
+            for index, value in enumerate(as_dict_value):
+                key = "%s_repeat_0|%s" % (input_name, only_input.name)
+                galaxy_value = from_simple_value(only_input, value)
+                galaxy_request[key] = galaxy_value
+        elif galaxy_input_type == "conditional":
+            case_strings = input.case_strings
+            # TODO: less crazy handling of defaults...
+            if (as_dict_value is NOT_PRESENT or as_dict_value is None) and "null" in case_strings:
+                cwl_type = "null"
+            elif (as_dict_value is NOT_PRESENT or as_dict_value is None):
+                raise RequestParameterInvalidException(
+                    "Cannot translate CWL datatype - value [%s] of type [%s] with case_strings [%s]. Non-null property must be set." % (
+                        as_dict_value, type(as_dict_value), case_strings
+                    )
+                )
+            elif isinstance(as_dict_value, bool) and "boolean" in case_strings:
+                cwl_type = "boolean"
+            elif isinstance(as_dict_value, int) and "integer" in case_strings:
+                cwl_type = "integer"
+            elif isinstance(as_dict_value, int) and "long" in case_strings:
+                cwl_type = "long"
+            elif isinstance(as_dict_value, (int, float)) and "float" in case_strings:
+                cwl_type = "float"
+            elif isinstance(as_dict_value, (int, float)) and "double" in case_strings:
+                cwl_type = "double"
+            elif isinstance(as_dict_value, string_types) and "string" in case_strings:
+                cwl_type = "string"
+            elif isinstance(as_dict_value, dict) and "src" in as_dict_value and "id" in as_dict_value:
+                # Bit problematic...
+                cwl_type = "File"
+            elif "json" in case_strings and as_dict_value is not None:
+                cwl_type = "json"
+            else:
+                raise RequestParameterInvalidException(
+                    "Cannot translate CWL datatype - value [%s] of type [%s] with case_strings [%s]." % (
+                        as_dict_value, type(as_dict_value), case_strings
+                    )
+                )
+            galaxy_request["%s|_cwl__type_" % input_name] = cwl_type
+            if cwl_type != "null":
+                current_case_index = input.get_current_case(cwl_type)
+                current_case_inputs = input.cases[ current_case_index ].inputs
+                current_case_input = current_case_inputs[ "_cwl__value_" ]
+                galaxy_value = from_simple_value(current_case_input, as_dict_value, cwl_type)
+                galaxy_request["%s|_cwl__value_" % input_name] = galaxy_value
+        elif as_dict_value is NOT_PRESENT:
+            continue
+        else:
+            galaxy_value = from_simple_value(input, as_dict_value)
+            galaxy_request[input_name] = galaxy_value
+
+    log.info("Converted galaxy_request is %s" % galaxy_request)
+    return galaxy_request
diff --git a/lib/galaxy/tools/cwl/runtime_actions.py b/lib/galaxy/tools/cwl/runtime_actions.py
new file mode 100644
index 0000000..07a4b2c
--- /dev/null
+++ b/lib/galaxy/tools/cwl/runtime_actions.py
@@ -0,0 +1,54 @@
+import json
+import os
+import shutil
+
+from .parser import (
+    JOB_JSON_FILE,
+    load_job_proxy,
+)
+
+
+def handle_outputs(job_directory=None):
+    # Relocate dynamically collected files to pre-determined locations
+    # registered with ToolOutput objects via from_work_dir handling.
+    if job_directory is None:
+        job_directory = os.path.join(os.getcwd(), os.path.pardir)
+    cwl_job_file = os.path.join(job_directory, JOB_JSON_FILE)
+    if not os.path.exists(cwl_job_file):
+        # Not a CWL job, just continue
+        return
+    job_proxy = load_job_proxy(job_directory)
+    tool_working_directory = os.path.join(job_directory, "working")
+    outputs = job_proxy.collect_outputs(tool_working_directory)
+    for output_name, output in outputs.items():
+        target_path = job_proxy.output_path( output_name )
+        if isinstance(output, dict) and "path" in output:
+            output_path = output["path"]
+            if output["class"] != "File":
+                open("galaxy.json", "w").write(json.dump({
+                    "dataset_id": job_proxy.output_id(output_name),
+                    "type": "dataset",
+                    "ext": "expression.json",
+                }))
+            shutil.move(output_path, target_path)
+            for secondary_file in output.get("secondaryFiles", []):
+                # TODO: handle nested files...
+                secondary_file_path = secondary_file["path"]
+                assert secondary_file_path.startswith(output_path)
+                secondary_file_name = secondary_file_path[len(output_path):]
+                secondary_files_dir = job_proxy.output_secondary_files_dir(
+                    output_name, create=True
+                )
+                extra_target = os.path.join(secondary_files_dir, secondary_file_name)
+                shutil.move(
+                    secondary_file_path,
+                    extra_target,
+                )
+        else:
+            with open(target_path, "w") as f:
+                f.write(json.dumps(output))
+
+
+__all__ = (
+    'handle_outputs',
+)
diff --git a/lib/galaxy/tools/cwl/schema.py b/lib/galaxy/tools/cwl/schema.py
new file mode 100644
index 0000000..fb24b78
--- /dev/null
+++ b/lib/galaxy/tools/cwl/schema.py
@@ -0,0 +1,73 @@
+"""Abstraction around cwltool and related libraries for loading a CWL artifact."""
+import os
+
+from collections import namedtuple
+
+from six.moves.urllib.parse import urldefrag
+
+from .cwltool_deps import (
+    ensure_cwltool_available,
+    load_tool,
+    schema_salad,
+    workflow,
+)
+
+RawProcessReference = namedtuple("RawProcessReference", ["process_object", "uri"])
+ProcessDefinition = namedtuple("ProcessDefinition", ["process_object", "metadata", "document_loader", "avsc_names", "raw_process_reference"])
+
+
+class SchemaLoader(object):
+
+    def __init__(self, strict=True):
+        self._strict = strict
+        self._raw_document_loader = None
+
+    @property
+    def raw_document_loader(self):
+        if self._raw_document_loader is None:
+            ensure_cwltool_available()
+            self._raw_document_loader = schema_salad.ref_resolver.Loader({"cwl": "https://w3id.org/cwl/cwl#", "id": "@id"})
+
+        return self._raw_document_loader
+
+    def raw_process_reference(self, path):
+        uri = "file://" + os.path.abspath(path)
+        fileuri, _ = urldefrag(uri)
+        return RawProcessReference(self.raw_document_loader.fetch(fileuri), uri)
+
+    def process_definition(self, raw_reference):
+        document_loader, avsc_names, process_object, metadata, uri = load_tool.validate_document(
+            self.raw_document_loader,
+            raw_reference.process_object,
+            raw_reference.uri,
+        )
+        process_def = ProcessDefinition(
+            process_object,
+            metadata,
+            document_loader,
+            avsc_names,
+            raw_reference,
+        )
+        return process_def
+
+    def tool(self, **kwds):
+        process_definition = kwds.get("process_definition", None)
+        if process_definition is None:
+            raw_process_reference = kwds.get("raw_process_reference", None)
+            if raw_process_reference is None:
+                raw_process_reference = self.raw_process_reference(kwds["path"])
+            process_definition = self.process_definition(raw_process_reference)
+
+        make_tool = kwds.get("make_tool", workflow.defaultMakeTool)
+        tool = load_tool.make_tool(
+            process_definition.document_loader,
+            process_definition.avsc_names,
+            process_definition.metadata,
+            process_definition.raw_process_reference.uri,
+            make_tool,
+            {"strict": self._strict},
+        )
+        return tool
+
+
+schema_loader = SchemaLoader()
diff --git a/lib/galaxy/tools/data/__init__.py b/lib/galaxy/tools/data/__init__.py
new file mode 100644
index 0000000..f13d4c9
--- /dev/null
+++ b/lib/galaxy/tools/data/__init__.py
@@ -0,0 +1,727 @@
+"""
+Manage tool data tables, which store (at the application level) data that is
+used by tools, for example in the generation of dynamic options. Tables are
+loaded and stored by names which tools use to refer to them. This allows
+users to configure data tables for a local Galaxy instance without needing
+to modify the tool configurations.
+"""
+
+import logging
+import os
+import os.path
+import re
+import string
+import hashlib
+
+from glob import glob
+from tempfile import NamedTemporaryFile
+from urllib2 import urlopen
+
+from galaxy import util
+from galaxy.util.odict import odict
+
+from galaxy.util.dictifiable import Dictifiable
+
+log = logging.getLogger( __name__ )
+
+DEFAULT_TABLE_TYPE = 'tabular'
+
+
+class ToolDataTableManager( object ):
+    """Manages a collection of tool data tables"""
+
+    def __init__( self, tool_data_path, config_filename=None ):
+        self.tool_data_path = tool_data_path
+        # This stores all defined data table entries from both the tool_data_table_conf.xml file and the shed_tool_data_table_conf.xml file
+        # at server startup. If tool shed repositories are installed that contain a valid file named tool_data_table_conf.xml.sample, entries
+        # from that file are inserted into this dict at the time of installation.
+        self.data_tables = {}
+        for single_config_filename in util.listify( config_filename ):
+            if not single_config_filename:
+                continue
+            self.load_from_config_file( single_config_filename, self.tool_data_path, from_shed_config=False )
+
+    def __getitem__( self, key ):
+        return self.data_tables.__getitem__( key )
+
+    def __setitem__( self, key, value ):
+        return self.data_tables.__setitem__( key, value )
+
+    def __contains__( self, key ):
+        return self.data_tables.__contains__( key )
+
+    def get( self, name, default=None ):
+        try:
+            return self[ name ]
+        except KeyError:
+            return default
+
+    def set( self, name, value ):
+        self[ name ] = value
+
+    def get_tables( self ):
+        return self.data_tables
+
+    def load_from_config_file( self, config_filename, tool_data_path, from_shed_config=False ):
+        """
+        This method is called under 3 conditions:
+
+        1. When the ToolDataTableManager is initialized (see __init__ above).
+        2. Just after the ToolDataTableManager is initialized and the additional entries defined by shed_tool_data_table_conf.xml
+           are being loaded into the ToolDataTableManager.data_tables.
+        3. When a tool shed repository that includes a tool_data_table_conf.xml.sample file is being installed into a local
+           Galaxy instance.  In this case, we have 2 entry types to handle, files whose root tag is <tables>, for example:
+        """
+        table_elems = []
+        if not isinstance( config_filename, list ):
+            config_filename = [ config_filename ]
+        for filename in config_filename:
+            tree = util.parse_xml( filename )
+            root = tree.getroot()
+            for table_elem in root.findall( 'table' ):
+                table = ToolDataTable.from_elem( table_elem, tool_data_path, from_shed_config, filename=filename )
+                table_elems.append( table_elem )
+                if table.name not in self.data_tables:
+                    self.data_tables[ table.name ] = table
+                    log.debug( "Loaded tool data table '%s' from file '%s'", table.name, filename )
+                else:
+                    log.debug( "Loading another instance of data table '%s' from file '%s', attempting to merge content.", table.name, filename )
+                    self.data_tables[ table.name ].merge_tool_data_table( table, allow_duplicates=False )  # only merge content, do not persist to disk, do not allow duplicate rows when merging
+                    # FIXME: This does not account for an entry with the same unique build ID, but a different path.
+        return table_elems
+
+    def add_new_entries_from_config_file( self, config_filename, tool_data_path, shed_tool_data_table_config, persist=False ):
+        """
+        This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
+        installed into a local galaxy instance.  We have 2 cases to handle, files whose root tag is <tables>, for example::
+
+            <tables>
+                <!-- Location of Tmap files -->
+                <table name="tmap_indexes" comment_char="#">
+                    <columns>value, dbkey, name, path</columns>
+                    <file path="tool-data/tmap_index.loc" />
+                </table>
+            </tables>
+
+        and files whose root tag is <table>, for example::
+
+            <!-- Location of Tmap files -->
+            <table name="tmap_indexes" comment_char="#">
+                <columns>value, dbkey, name, path</columns>
+                <file path="tool-data/tmap_index.loc" />
+            </table>
+
+        """
+        error_message = ''
+        try:
+            table_elems = self.load_from_config_file( config_filename=config_filename,
+                                                      tool_data_path=tool_data_path,
+                                                      from_shed_config=True )
+        except Exception as e:
+            error_message = 'Error attempting to parse file %s: %s' % ( str( os.path.split( config_filename )[ 1 ] ), str( e ) )
+            log.debug( error_message )
+            table_elems = []
+        if persist:
+            # Persist Galaxy's version of the changed tool_data_table_conf.xml file.
+            self.to_xml_file( shed_tool_data_table_config, table_elems )
+        return table_elems, error_message
+
+    def to_xml_file( self, shed_tool_data_table_config, new_elems=None, remove_elems=None ):
+        """
+        Write the current in-memory version of the shed_tool_data_table_conf.xml file to disk.
+        remove_elems are removed before new_elems are added.
+        """
+        if not ( new_elems or remove_elems ):
+            log.debug( 'ToolDataTableManager.to_xml_file called without any elements to add or remove.' )
+            return  # no changes provided, no need to persist any changes
+        if not new_elems:
+            new_elems = []
+        if not remove_elems:
+            remove_elems = []
+        full_path = os.path.abspath( shed_tool_data_table_config )
+        # FIXME: we should lock changing this file by other threads / head nodes
+        try:
+            tree = util.parse_xml( full_path )
+            root = tree.getroot()
+            out_elems = [ elem for elem in root ]
+        except Exception as e:
+            out_elems = []
+            log.debug( 'Could not parse existing tool data table config, assume no existing elements: %s', e )
+        for elem in remove_elems:
+            # handle multiple occurrences of remove elem in existing elems
+            while elem in out_elems:
+                remove_elems.remove( elem )
+        # add new elems
+        out_elems.extend( new_elems )
+        with open( full_path, 'wb' ) as out:
+            out.write( '<?xml version="1.0"?>\n<tables>\n' )
+            for elem in out_elems:
+                out.write( util.xml_to_string( elem, pretty=True ) )
+            out.write( '</tables>\n' )
+        os.chmod( full_path, 0644 )
+
+    def reload_tables( self, table_names=None ):
+        """
+        Reload tool data tables.
+        """
+        tables = self.get_tables()
+        if not table_names:
+            table_names = tables.keys()
+        elif not isinstance( table_names, list ):
+            table_names = [ table_names ]
+        for table_name in table_names:
+            tables[ table_name ].reload_from_files()
+            log.debug( "Reloaded tool data table '%s' from files.", table_name )
+        return table_names
+
+
+class ToolDataTable( object ):
+
+    @classmethod
+    def from_elem( cls, table_elem, tool_data_path, from_shed_config, filename ):
+        table_type = table_elem.get( 'type', 'tabular' )
+        assert table_type in tool_data_table_types, "Unknown data table type '%s'" % type
+        return tool_data_table_types[ table_type ]( table_elem, tool_data_path, from_shed_config=from_shed_config, filename=filename )
+
+    def __init__( self, config_element, tool_data_path, from_shed_config=False, filename=None ):
+        self.name = config_element.get( 'name' )
+        self.comment_char = config_element.get( 'comment_char' )
+        self.empty_field_value = config_element.get( 'empty_field_value', '' )
+        self.empty_field_values = {}
+        self.allow_duplicate_entries = util.asbool( config_element.get( 'allow_duplicate_entries', True ) )
+        self.here = filename and os.path.dirname(filename)
+        self.filenames = odict()
+        self.tool_data_path = tool_data_path
+        self.missing_index_file = None
+        # increment this variable any time a new entry is added, or when the table is totally reloaded
+        # This value has no external meaning, and does not represent an abstract version of the underlying data
+        self._loaded_content_version = 1
+        self._load_info = ( [ config_element, tool_data_path ], { 'from_shed_config': from_shed_config } )
+        self._merged_load_info = []
+
+    def _update_version( self, version=None ):
+        if version is not None:
+            self._loaded_content_version = version
+        else:
+            self._loaded_content_version += 1
+        return self._loaded_content_version
+
+    def get_empty_field_by_name( self, name ):
+        return self.empty_field_values.get( name, self.empty_field_value )
+
+    def _add_entry( self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+        raise NotImplementedError( "Abstract method" )
+
+    def add_entry( self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+        self._add_entry( entry, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
+        return self._update_version()
+
+    def add_entries( self, entries, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+        if entries:
+            for entry in entries:
+                self.add_entry( entry, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
+        return self._loaded_content_version
+
+    def _remove_entry(self, values):
+        raise NotImplementedError( "Abstract method" )
+
+    def remove_entry(self, values):
+        self._remove_entry( values )
+        return self._update_version()
+
+    def is_current_version( self, other_version ):
+        return self._loaded_content_version == other_version
+
+    def merge_tool_data_table( self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+        raise NotImplementedError( "Abstract method" )
+
+    def reload_from_files( self ):
+        new_version = self._update_version()
+        merged_info = self._merged_load_info
+        self.__init__( *self._load_info[0], **self._load_info[1] )
+        self._update_version( version=new_version )
+        for ( tool_data_table_class, load_info ) in merged_info:
+            self.merge_tool_data_table( tool_data_table_class( *load_info[0], **load_info[1] ), allow_duplicates=False )
+        return self._update_version()
+
+
+class TabularToolDataTable( ToolDataTable, Dictifiable ):
+    """
+    Data stored in a tabular / separated value format on disk, allows multiple
+    files to be merged but all must have the same column definitions::
+
+        <table type="tabular" name="test">
+            <column name='...' index = '...' />
+            <file path="..." />
+            <file path="..." />
+        </table>
+
+    """
+    dict_collection_visible_keys = [ 'name' ]
+
+    type_key = 'tabular'
+
+    def __init__( self, config_element, tool_data_path, from_shed_config=False, filename=None ):
+        super( TabularToolDataTable, self ).__init__( config_element, tool_data_path, from_shed_config, filename)
+        self.config_element = config_element
+        self.data = []
+        self.configure_and_load( config_element, tool_data_path, from_shed_config)
+
+    def configure_and_load( self, config_element, tool_data_path, from_shed_config=False, url_timeout=10 ):
+        """
+        Configure and load table from an XML element.
+        """
+        self.separator = config_element.get( 'separator', '\t' )
+        self.comment_char = config_element.get( 'comment_char', '#' )
+        # Configure columns
+        self.parse_column_spec( config_element )
+
+        # store repo info if available:
+        repo_elem = config_element.find( 'tool_shed_repository' )
+        if repo_elem is not None:
+            repo_info = dict( tool_shed=repo_elem.find( 'tool_shed' ).text, name=repo_elem.find( 'repository_name' ).text,
+                              owner=repo_elem.find( 'repository_owner' ).text, installed_changeset_revision=repo_elem.find( 'installed_changeset_revision' ).text )
+        else:
+            repo_info = None
+        # Read every file
+        for file_element in config_element.findall( 'file' ):
+            tmp_file = None
+            filename = file_element.get( 'path', None )
+            if filename is None:
+                # Handle URLs as files
+                filename = file_element.get( 'url', None )
+                if filename:
+                    tmp_file = NamedTemporaryFile( prefix='TTDT_URL_%s-' % self.name )
+                    try:
+                        tmp_file.write( urlopen( filename, timeout=url_timeout ).read() )
+                    except Exception as e:
+                        log.error( 'Error loading Data Table URL "%s": %s', filename, e )
+                        continue
+                    log.debug( 'Loading Data Table URL "%s" as filename "%s".', filename, tmp_file.name )
+                    filename = tmp_file.name
+                    tmp_file.flush()
+            filename = file_path = expand_here_template( filename, here=self.here )
+            found = False
+            if file_path is None:
+                log.debug( "Encountered a file element (%s) that does not contain a path value when loading tool data table '%s'.", util.xml_to_string( file_element ), self.name )
+                continue
+
+            # FIXME: splitting on and merging paths from a configuration file when loading is wonky
+            # Data should exist on disk in the state needed, i.e. the xml configuration should
+            # point directly to the desired file to load. Munging of the tool_data_tables_conf.xml.sample
+            # can be done during installing / testing / metadata resetting with the creation of a proper
+            # tool_data_tables_conf.xml file, containing correct <file path=> attributes. Allowing a
+            # path.join with a different root should be allowed, but splitting should not be necessary.
+            if tool_data_path and from_shed_config:
+                # Must identify with from_shed_config as well, because the
+                # regular galaxy app has and uses tool_data_path.
+                # We're loading a tool in the tool shed, so we cannot use the Galaxy tool-data
+                # directory which is hard-coded into the tool_data_table_conf.xml entries.
+                filename = os.path.split( file_path )[ 1 ]
+                filename = os.path.join( tool_data_path, filename )
+            if os.path.exists( filename ):
+                found = True
+            elif os.path.exists( "%s.sample" % filename ) and not from_shed_config:
+                log.info("Could not find tool data %s, reading sample" % filename)
+                filename = "%s.sample" % filename
+                found = True
+            else:
+                # Since the path attribute can include a hard-coded path to a specific directory
+                # (e.g., <file path="tool-data/cg_crr_files.loc" />) which may not be the same value
+                # as self.tool_data_path, we'll parse the path to get the filename and see if it is
+                # in self.tool_data_path.
+                file_path, file_name = os.path.split( filename )
+                if file_path and file_path != self.tool_data_path:
+                    corrected_filename = os.path.join( self.tool_data_path, file_name )
+                    if os.path.exists( corrected_filename ):
+                        filename = corrected_filename
+                        found = True
+
+            errors = []
+            if found:
+                self.extend_data_with( filename, errors=errors )
+                self._update_version()
+            else:
+                self.missing_index_file = filename
+                log.warning( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
+
+            if filename not in self.filenames or not self.filenames[ filename ][ 'found' ]:
+                self.filenames[ filename ] = dict( found=found, filename=filename, from_shed_config=from_shed_config, tool_data_path=tool_data_path,
+                                                   config_element=config_element, tool_shed_repository=repo_info, errors=errors )
+            else:
+                log.debug( "Filename '%s' already exists in filenames (%s), not adding", filename, self.filenames.keys() )
+            # Remove URL tmp file
+            if tmp_file is not None:
+                tmp_file.close()
+
+    def merge_tool_data_table( self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+        assert self.columns == other_table.columns, "Merging tabular data tables with non matching columns is not allowed: %s:%s != %s:%s" % ( self.name, self.columns, other_table.name, other_table.columns )
+        # merge filename info
+        for filename, info in other_table.filenames.iteritems():
+            if filename not in self.filenames:
+                self.filenames[ filename ] = info
+        # save info about table
+        self._merged_load_info.append( ( other_table.__class__, other_table._load_info ) )
+        # If we are merging in a data table that does not allow duplicates, enforce that upon the data table
+        if self.allow_duplicate_entries and not other_table.allow_duplicate_entries:
+            log.debug( 'While attempting to merge tool data table "%s", the other instance of the table specified that duplicate entries are not allowed, now deduplicating all previous entries.', self.name )
+            self.allow_duplicate_entries = False
+            self._deduplicate_data()
+        # add data entries and return current data table version
+        return self.add_entries( other_table.data, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
+
+    def handle_found_index_file( self, filename ):
+        self.missing_index_file = None
+        self.extend_data_with( filename )
+
+    def get_fields( self ):
+        return self.data
+
+    def get_field(self, value):
+        rval = None
+        for i in self.get_named_fields_list():
+            if i['value'] == value:
+                rval = TabularToolDataField(i)
+        return rval
+
+    def get_named_fields_list( self ):
+        rval = []
+        named_columns = self.get_column_name_list()
+        for fields in self.get_fields():
+            field_dict = {}
+            for i, field in enumerate( fields ):
+                if i == len( named_columns ):
+                    break
+                field_name = named_columns[i]
+                if field_name is None:
+                    field_name = i  # check that this is supposed to be 0 based.
+                field_dict[ field_name ] = field
+            rval.append( field_dict )
+        return rval
+
+    def get_version_fields( self ):
+        return ( self._loaded_content_version, self.get_fields() )
+
+    def parse_column_spec( self, config_element ):
+        """
+        Parse column definitions, which can either be a set of 'column' elements
+        with a name and index (as in dynamic options config), or a shorthand
+        comma separated list of names in order as the text of a 'column_names'
+        element.
+
+        A column named 'value' is required.
+        """
+        self.columns = {}
+        if config_element.find( 'columns' ) is not None:
+            column_names = util.xml_text( config_element.find( 'columns' ) )
+            column_names = [ n.strip() for n in column_names.split( ',' ) ]
+            for index, name in enumerate( column_names ):
+                self.columns[ name ] = index
+                self.largest_index = index
+        else:
+            self.largest_index = 0
+            for column_elem in config_element.findall( 'column' ):
+                name = column_elem.get( 'name', None )
+                assert name is not None, "Required 'name' attribute missing from column def"
+                index = column_elem.get( 'index', None )
+                assert index is not None, "Required 'index' attribute missing from column def"
+                index = int( index )
+                self.columns[name] = index
+                if index > self.largest_index:
+                    self.largest_index = index
+                empty_field_value = column_elem.get( 'empty_field_value', None )
+                if empty_field_value is not None:
+                    self.empty_field_values[ name ] = empty_field_value
+        assert 'value' in self.columns, "Required 'value' column missing from column def"
+        if 'name' not in self.columns:
+            self.columns['name'] = self.columns['value']
+
+    def extend_data_with( self, filename, errors=None ):
+        here = os.path.dirname(os.path.abspath(filename))
+        self.data.extend( self.parse_file_fields( open( filename ), errors=errors, here=here ) )
+        if not self.allow_duplicate_entries:
+            self._deduplicate_data()
+
+    def parse_file_fields( self, reader, errors=None, here="__HERE__" ):
+        """
+        Parse separated lines from file and return a list of tuples.
+
+        TODO: Allow named access to fields using the column names.
+        """
+        separator_char = "<TAB>" if self.separator == "\t" else self.separator
+
+        rval = []
+        for i, line in enumerate( reader ):
+            if line.lstrip().startswith( self.comment_char ):
+                continue
+            line = line.rstrip( "\n\r" )
+            if line:
+                line = expand_here_template( line, here=here )
+                fields = line.split( self.separator )
+                if self.largest_index < len( fields ):
+                    rval.append( fields )
+                else:
+                    line_error = "Line %i in tool data table '%s' is invalid (HINT: '%s' characters must be used to separate fields):\n%s" % ( ( i + 1 ), self.name, separator_char, line )
+                    if errors is not None:
+                        errors.append( line_error )
+                    log.warning( line_error )
+        if hasattr(reader, "name"):
+            log.debug("Loaded %i lines from '%s' for '%s'", len(rval), reader.name, self.name)
+        return rval
+
+    def get_column_name_list( self ):
+        rval = []
+        for i in range( self.largest_index + 1 ):
+            found_column = False
+            for name, index in self.columns.iteritems():
+                if index == i:
+                    if not found_column:
+                        rval.append( name )
+                    elif name == 'value':
+                        # the column named 'value' always has priority over other named columns
+                        rval[ -1 ] = name
+                    found_column = True
+            if not found_column:
+                rval.append( None )
+        return rval
+
+    def get_entry( self, query_attr, query_val, return_attr, default=None ):
+        """
+        Returns table entry associated with a col/val pair.
+        """
+        rval = self.get_entries( query_attr, query_val, return_attr, default=default, limit=1 )
+        if rval:
+            return rval[0]
+        return default
+
+    def get_entries( self, query_attr, query_val, return_attr, default=None, limit=None ):
+        """
+        Returns table entry associated with a col/val pair.
+        """
+        query_col = self.columns.get( query_attr, None )
+        if query_col is None:
+            return default
+        if return_attr is not None:
+            return_col = self.columns.get( return_attr, None )
+            if return_col is None:
+                return default
+        rval = []
+        # Look for table entry.
+        for fields in self.get_fields():
+            if fields[ query_col ] == query_val:
+                if return_attr is None:
+                    field_dict = {}
+                    for i, col_name in enumerate( self.get_column_name_list() ):
+                        field_dict[ col_name or i ] = fields[i]
+                    rval.append( field_dict )
+                else:
+                    rval.append( fields[ return_col ] )
+                if limit is not None and len( rval ) == limit:
+                    break
+        return rval or default
+
+    def get_filename_for_source( self, source, default=None ):
+        if source:
+            # if dict, assume is compatible info dict, otherwise call method
+            if isinstance( source, dict ):
+                source_repo_info = source
+            else:
+                source_repo_info = source.get_tool_shed_repository_info_dict()
+        else:
+            source_repo_info = None
+        filename = default
+        for name, value in self.filenames.iteritems():
+            repo_info = value.get( 'tool_shed_repository', None )
+            if ( not source_repo_info and not repo_info ) or ( source_repo_info and repo_info and source_repo_info == repo_info ):
+                filename = name
+                break
+        return filename
+
+    def _add_entry( self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
+        # accepts dict or list of columns
+        if isinstance( entry, dict ):
+            fields = []
+            for column_name in self.get_column_name_list():
+                if column_name not in entry:
+                    log.debug( "Using default column value for column '%s' when adding data table entry (%s) to table '%s'.", column_name, entry, self.name )
+                    field_value = self.get_empty_field_by_name( column_name )
+                else:
+                    field_value = entry[ column_name ]
+                fields.append( field_value )
+        else:
+            fields = entry
+        is_error = False
+        if self.largest_index < len( fields ):
+            fields = self._replace_field_separators( fields )
+            if fields not in self.get_fields() or ( allow_duplicates and self.allow_duplicate_entries ):
+                self.data.append( fields )
+            else:
+                log.debug( "Attempted to add fields (%s) to data table '%s', but this entry already exists and allow_duplicates is False.", fields, self.name )
+                is_error = True
+        else:
+            log.error( "Attempted to add fields (%s) to data table '%s', but there were not enough fields specified ( %i < %i ).", fields, self.name, len( fields ), self.largest_index + 1 )
+            is_error = True
+        filename = None
+
+        if persist and ( not is_error or persist_on_error ):
+            filename = self.get_filename_for_source( entry_source )
+            if filename is None:
+                # should we default to using any filename here instead?
+                log.error( "Unable to determine filename for persisting data table '%s' values: '%s'.", self.name, fields )
+                is_error = True
+            else:
+                # FIXME: Need to lock these files for editing
+                log.debug( "Persisting changes to file: %s", filename )
+                try:
+                    data_table_fh = open( filename, 'r+b' )
+                except IOError as e:
+                    log.warning( 'Error opening data table file (%s) with r+b, assuming file does not exist and will open as wb: %s', filename, e )
+                    data_table_fh = open( filename, 'wb' )
+                if os.stat( filename )[6] != 0:
+                    # ensure last existing line ends with new line
+                    data_table_fh.seek( -1, 2 )  # last char in file
+                    last_char = data_table_fh.read( 1 )
+                    if last_char not in [ '\n', '\r' ]:
+                        data_table_fh.write( '\n' )
+                data_table_fh.write( "%s\n" % ( self.separator.join( fields ) ) )
+        return not is_error
+
+    def _remove_entry( self, values):
+
+        # update every file
+        for filename in self.filenames:
+
+            if os.path.exists( filename ):
+                values = self._replace_field_separators( values )
+                self.filter_file_fields( filename, values )
+            else:
+                log.warning( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
+
+        self.reload_from_files()
+
+    def filter_file_fields( self, loc_file, values ):
+        """
+        Reads separated lines from file and print back only the lines that pass a filter.
+        """
+        with open(loc_file) as reader:
+            rval = ""
+            for line in reader:
+                if line.lstrip().startswith( self.comment_char ):
+                    rval += line
+                else:
+                    line_s = line.rstrip( "\n\r" )
+                    if line_s:
+                        fields = line_s.split( self.separator )
+                        if fields != values:
+                            rval += line
+
+        with open(loc_file, 'wb') as writer:
+            writer.write(rval)
+
+        return rval
+
+    def _replace_field_separators( self, fields, separator=None, replace=None, comment_char=None ):
+        # make sure none of the fields contain separator
+        # make sure separator replace is different from comment_char,
+        # due to possible leading replace
+        if separator is None:
+            separator = self.separator
+        if replace is None:
+            if separator == " ":
+                if comment_char == "\t":
+                    replace = "_"
+                else:
+                    replace = "\t"
+            else:
+                if comment_char == " ":
+                    replace = "_"
+                else:
+                    replace = " "
+        return map( lambda x: x.replace( separator, replace ), fields )
+
+    def _deduplicate_data( self ):
+        # Remove duplicate entries, without recreating self.data object
+        dup_lines = []
+        hash_list = []
+        for i, fields in enumerate( self.data ):
+            fields_hash = hash( self.separator.join( fields ) )
+            if fields_hash in hash_list:
+                dup_lines.append( i )
+                log.debug( 'Found duplicate entry in tool data table "%s", but duplicates are not allowed, removing additional entry for: "%s"', self.name, fields )
+            else:
+                hash_list.append( fields_hash )
+        for i in reversed( dup_lines ):
+            self.data.pop( i )
+
+    @property
+    def xml_string( self ):
+        return util.xml_to_string( self.config_element )
+
+    def to_dict(self, view='collection'):
+        rval = super(TabularToolDataTable, self).to_dict()
+        if view == 'element':
+            rval['columns'] = sorted(self.columns.keys(), key=lambda x: self.columns[x])
+            rval['fields'] = self.get_fields()
+        return rval
+
+
+class TabularToolDataField(Dictifiable, object):
+
+    dict_collection_visible_keys = []
+
+    def __init__(self, data):
+        self.data = data
+
+    def __getitem__(self, key):
+        return self.data[key]
+
+    def get_base_path(self):
+        return os.path.normpath(os.path.abspath( self.data['path'] ))
+
+    def get_base_dir(self):
+        path = self.get_base_path()
+        if not os.path.isdir(path):
+            path = os.path.dirname(path)
+        return path
+
+    def clean_base_dir(self, path):
+        return re.sub( "^" + self.get_base_dir() + r"/*", "", path )
+
+    def get_files(self):
+        return glob( self.get_base_path() + "*" )
+
+    def get_filesize_map(self, rm_base_dir=False):
+        out = {}
+        for path in self.get_files():
+            if rm_base_dir:
+                out[self.clean_base_dir(path)] = os.path.getsize(path)
+            else:
+                out[path] = os.path.getsize(path)
+        return out
+
+    def get_fingerprint(self):
+        sha1 = hashlib.sha1()
+        fmap = self.get_filesize_map(True)
+        for k in sorted(fmap.keys()):
+            sha1.update(k)
+            sha1.update(str(fmap[k]))
+        return sha1.hexdigest()
+
+    def to_dict(self):
+        rval = super(TabularToolDataField, self).to_dict()
+        rval['name'] = self.data['value']
+        rval['fields'] = self.data
+        rval['base_dir'] = self.get_base_dir(),
+        rval['files'] = self.get_filesize_map(True)
+        rval['fingerprint'] = self.get_fingerprint()
+        return rval
+
+
+def expand_here_template(content, here=None):
+    if here and content:
+        content = string.Template(content).safe_substitute( { "__HERE__": here })
+    return content
+
+
+# Registry of tool data types by type_key
+tool_data_table_types = dict( [ ( cls.type_key, cls ) for cls in [ TabularToolDataTable ] ] )
diff --git a/lib/galaxy/tools/data_manager/__init__.py b/lib/galaxy/tools/data_manager/__init__.py
new file mode 100644
index 0000000..52fea2a
--- /dev/null
+++ b/lib/galaxy/tools/data_manager/__init__.py
@@ -0,0 +1,3 @@
+"""
+Data Manager
+"""
diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py
new file mode 100644
index 0000000..44691e7
--- /dev/null
+++ b/lib/galaxy/tools/data_manager/manager.py
@@ -0,0 +1,399 @@
+import errno
+import json
+import os
+from six import string_types
+
+from galaxy import util
+from galaxy.util.odict import odict
+from galaxy.util.template import fill_template
+from galaxy.tools.data import TabularToolDataTable
+from galaxy.tools.toolbox.watcher import get_tool_conf_watcher
+from tool_shed.util import common_util
+from tool_shed.util import repository_util
+from galaxy.queue_worker import reload_data_managers
+from galaxy.queue_worker import send_control_task
+
+# set up logger
+import logging
+log = logging.getLogger( __name__ )
+
+SUPPORTED_DATA_TABLE_TYPES = ( TabularToolDataTable )
+VALUE_TRANSLATION_FUNCTIONS = dict( abspath=os.path.abspath )
+DEFAULT_VALUE_TRANSLATION_TYPE = 'template'
+
+
+class DataManagers( object ):
+    def __init__( self, app, xml_filename=None, conf_watchers=None ):
+        self.app = app
+        self.data_managers = odict()
+        self.managed_data_tables = odict()
+        self.tool_path = None
+        self._reload_count = 0
+        self.filename = xml_filename or self.app.config.data_manager_config_file
+        for filename in util.listify( self.filename ):
+            if not filename:
+                continue
+            self.load_from_xml( filename )
+        if self.app.config.shed_data_manager_config_file:
+            self.load_from_xml( self.app.config.shed_data_manager_config_file, store_tool_path=False, replace_existing=True )
+        if conf_watchers:
+            self.conf_watchers = conf_watchers
+        else:
+            self.conf_watchers = self.get_conf_watchers()
+
+    def get_conf_watchers(self):
+        conf_watchers = []
+        conf_watchers.extend([(get_tool_conf_watcher(lambda: reload_data_managers(self.app)), filename) for filename in util.listify(self.filename) if filename])
+        if self.app.config.shed_data_manager_config_file:
+            conf_watchers.append((get_tool_conf_watcher(lambda: reload_data_managers(self.app)), self.app.config.shed_data_manager_config_file))
+        [watcher.watch_file(filename) for watcher, filename in conf_watchers]
+        return [watcher for watcher, filename in conf_watchers]
+
+    def shutdown(self):
+        [watcher.shutdown() for watcher in self.conf_watchers]
+
+    def load_from_xml( self, xml_filename, store_tool_path=True, replace_existing=False ):
+        try:
+            tree = util.parse_xml( xml_filename )
+        except Exception as e:
+            log.error( 'There was an error parsing your Data Manager config file "%s": %s' % ( xml_filename, e ) )
+            return  # we are not able to load any data managers
+        root = tree.getroot()
+        if root.tag != 'data_managers':
+            log.error( 'A data managers configuration must have a "data_managers" tag as the root. "%s" is present' % ( root.tag ) )
+            return
+        if store_tool_path:
+            tool_path = root.get( 'tool_path', None )
+            if tool_path is None:
+                tool_path = self.app.config.tool_path
+            if not tool_path:
+                tool_path = '.'
+            self.tool_path = tool_path
+        for data_manager_elem in root.findall( 'data_manager' ):
+            self.load_manager_from_elem( data_manager_elem, replace_existing=replace_existing )
+
+    def load_manager_from_elem( self, data_manager_elem, tool_path=None, add_manager=True, replace_existing=False ):
+        try:
+            data_manager = DataManager( self, data_manager_elem, tool_path=tool_path )
+        except Exception as e:
+            log.error( "Error loading data_manager '%s':\n%s" % ( e, util.xml_to_string( data_manager_elem ) ) )
+            return None
+        if add_manager:
+            self.add_manager( data_manager, replace_existing=replace_existing )
+        log.debug( 'Loaded Data Manager: %s' % ( data_manager.id ) )
+        return data_manager
+
+    def add_manager( self, data_manager, replace_existing=False ):
+        if not replace_existing:
+            assert data_manager.id not in self.data_managers, "A data manager has been defined twice: %s" % ( data_manager.id )
+        elif data_manager.id in self.data_managers:
+            # Data Manager already exists, remove first one and replace with new one
+            log.warning( "A data manager has been defined twice and will be replaced with the last loaded version: %s" % ( data_manager.id ) )
+            self.remove_manager( data_manager.id  )
+        self.data_managers[ data_manager.id ] = data_manager
+        for data_table_name in data_manager.data_tables.keys():
+            if data_table_name not in self.managed_data_tables:
+                self.managed_data_tables[ data_table_name ] = []
+            self.managed_data_tables[ data_table_name ].append( data_manager )
+
+    def get_manager( self, *args, **kwds ):
+        return self.data_managers.get( *args, **kwds )
+
+    def remove_manager( self, manager_ids ):
+        if not isinstance( manager_ids, list ):
+            manager_ids = [ manager_ids ]
+        for manager_id in manager_ids:
+            data_manager = self.get_manager( manager_id, None )
+            if data_manager is not None:
+                del self.data_managers[ manager_id ]
+                # remove tool from toolbox
+                if data_manager.tool:
+                    self.app.toolbox.remove_tool_by_id( data_manager.tool.id )
+                # determine if any data_tables are no longer tracked
+                for data_table_name in data_manager.data_tables.keys():
+                    remove_data_table_tracking = True
+                    for other_data_manager in self.data_managers.itervalues():
+                        if data_table_name in other_data_manager.data_tables:
+                            remove_data_table_tracking = False
+                            break
+                    if remove_data_table_tracking and data_table_name in self.managed_data_tables:
+                        del self.managed_data_tables[ data_table_name ]
+
+
+class DataManager( object ):
+    GUID_TYPE = 'data_manager'
+    DEFAULT_VERSION = "0.0.1"
+
+    def __init__( self, data_managers, elem=None, tool_path=None ):
+        self.data_managers = data_managers
+        self.declared_id = None
+        self.name = None
+        self.description = None
+        self.version = self.DEFAULT_VERSION
+        self.guid = None
+        self.tool = None
+        self.data_tables = odict()
+        self.output_ref_by_data_table = {}
+        self.move_by_data_table_column = {}
+        self.value_translation_by_data_table_column = {}
+        self.tool_shed_repository_info_dict = None
+        self.undeclared_tables = False
+        if elem is not None:
+            self.load_from_element( elem, tool_path or self.data_managers.tool_path )
+
+    def load_from_element( self, elem, tool_path ):
+        assert elem.tag == 'data_manager', 'A data manager configuration must have a "data_manager" tag as the root. "%s" is present' % ( elem.tag )
+        self.declared_id = elem.get( 'id', None )
+        self.guid = elem.get( 'guid', None )
+        path = elem.get( 'tool_file', None )
+        self.version = elem.get( 'version', self.version )
+        tool_shed_repository_id = None
+        tool_guid = None
+        if path is None:
+            tool_elem = elem.find( 'tool' )
+            assert tool_elem is not None, "Error loading tool for data manager. Make sure that a tool_file attribute or a tool tag set has been defined:\n%s" % ( util.xml_to_string( elem ) )
+            path = tool_elem.get( "file", None )
+            tool_guid = tool_elem.get( "guid", None )
+            # need to determine repository info so that dependencies will work correctly
+            tool_shed_url = tool_elem.find( 'tool_shed' ).text
+            # Handle protocol changes.
+            tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.data_managers.app, tool_shed_url )
+            # The protocol is not stored in the database.
+            tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url )
+            repository_name = tool_elem.find( 'repository_name' ).text
+            repository_owner = tool_elem.find( 'repository_owner' ).text
+            installed_changeset_revision = tool_elem.find( 'installed_changeset_revision' ).text
+            self.tool_shed_repository_info_dict = dict( tool_shed=tool_shed,
+                                                        name=repository_name,
+                                                        owner=repository_owner,
+                                                        installed_changeset_revision=installed_changeset_revision )
+            tool_shed_repository = \
+                repository_util.get_installed_repository( self.data_managers.app,
+                                                          tool_shed=tool_shed,
+                                                          name=repository_name,
+                                                          owner=repository_owner,
+                                                          installed_changeset_revision=installed_changeset_revision )
+            if tool_shed_repository is None:
+                log.warning( 'Could not determine tool shed repository from database. This should only ever happen when running tests.' )
+                # we'll set tool_path manually here from shed_conf_file
+                tool_shed_repository_id = None
+                try:
+                    tool_path = util.parse_xml( elem.get( 'shed_conf_file' ) ).getroot().get( 'tool_path', tool_path )
+                except Exception as e:
+                    log.error( 'Error determining tool_path for Data Manager during testing: %s', e )
+            else:
+                tool_shed_repository_id = self.data_managers.app.security.encode_id( tool_shed_repository.id )
+            # use shed_conf_file to determine tool_path
+            shed_conf_file = elem.get( "shed_conf_file", None )
+            if shed_conf_file:
+                shed_conf = self.data_managers.app.toolbox.get_shed_config_dict_by_filename( shed_conf_file, None )
+                if shed_conf:
+                    tool_path = shed_conf.get( "tool_path", tool_path )
+        assert path is not None, "A tool file path could not be determined:\n%s" % ( util.xml_to_string( elem ) )
+        self.load_tool( os.path.join( tool_path, path ),
+                        guid=tool_guid,
+                        data_manager_id=self.id,
+                        tool_shed_repository_id=tool_shed_repository_id )
+        self.name = elem.get( 'name', self.tool.name )
+        self.description = elem.get( 'description', self.tool.description )
+        self.undeclared_tables = util.asbool( elem.get( 'undeclared_tables', self.undeclared_tables ) )
+
+        for data_table_elem in elem.findall( 'data_table' ):
+            data_table_name = data_table_elem.get( "name" )
+            assert data_table_name is not None, "A name is required for a data table entry"
+            if data_table_name not in self.data_tables:
+                self.data_tables[ data_table_name ] = odict()
+            output_elem = data_table_elem.find( 'output' )
+            if output_elem is not None:
+                for column_elem in output_elem.findall( 'column' ):
+                    column_name = column_elem.get( 'name', None )
+                    assert column_name is not None, "Name is required for column entry"
+                    data_table_coumn_name = column_elem.get( 'data_table_name', column_name )
+                    self.data_tables[ data_table_name ][ data_table_coumn_name ] = column_name
+                    output_ref = column_elem.get( 'output_ref', None )
+                    if output_ref is not None:
+                        if data_table_name not in self.output_ref_by_data_table:
+                            self.output_ref_by_data_table[ data_table_name ] = {}
+                        self.output_ref_by_data_table[ data_table_name ][ data_table_coumn_name ] = output_ref
+                    value_translation_elems = column_elem.findall( 'value_translation' )
+                    if value_translation_elems is not None:
+                        for value_translation_elem in value_translation_elems:
+                            value_translation = value_translation_elem.text
+                            if value_translation is not None:
+                                value_translation_type = value_translation_elem.get( 'type', DEFAULT_VALUE_TRANSLATION_TYPE )
+                                if data_table_name not in self.value_translation_by_data_table_column:
+                                    self.value_translation_by_data_table_column[ data_table_name ] = {}
+                                if data_table_coumn_name not in self.value_translation_by_data_table_column[ data_table_name ]:
+                                    self.value_translation_by_data_table_column[ data_table_name ][ data_table_coumn_name ] = []
+                                if value_translation_type == 'function':
+                                    if value_translation in VALUE_TRANSLATION_FUNCTIONS:
+                                        value_translation = VALUE_TRANSLATION_FUNCTIONS[ value_translation ]
+                                    else:
+                                        raise ValueError( "Unsupported value translation function: '%s'" % ( value_translation ) )
+                                else:
+                                    assert value_translation_type == DEFAULT_VALUE_TRANSLATION_TYPE, ValueError( "Unsupported value translation type: '%s'" % ( value_translation_type ) )
+                                self.value_translation_by_data_table_column[ data_table_name ][ data_table_coumn_name ].append( value_translation )
+
+                    for move_elem in column_elem.findall( 'move' ):
+                        move_type = move_elem.get( 'type', 'directory' )
+                        relativize_symlinks = move_elem.get( 'relativize_symlinks', False )  # TODO: should we instead always relativize links?
+                        source_elem = move_elem.find( 'source' )
+                        if source_elem is None:
+                            source_base = None
+                            source_value = ''
+                        else:
+                            source_base = source_elem.get( 'base', None )
+                            source_value = source_elem.text
+                        target_elem = move_elem.find( 'target' )
+                        if target_elem is None:
+                            target_base = None
+                            target_value = ''
+                        else:
+                            target_base = target_elem.get( 'base', None )
+                            target_value = target_elem.text
+                        if data_table_name not in self.move_by_data_table_column:
+                            self.move_by_data_table_column[ data_table_name ] = {}
+                        self.move_by_data_table_column[ data_table_name ][ data_table_coumn_name ] = \
+                            dict( type=move_type,
+                                  source_base=source_base,
+                                  source_value=source_value,
+                                  target_base=target_base,
+                                  target_value=target_value,
+                                  relativize_symlinks=relativize_symlinks )
+
+    @property
+    def id( self ):
+        return self.guid or self.declared_id  # if we have a guid, we will use that as the data_manager id
+
+    def load_tool( self, tool_filename, guid=None, data_manager_id=None, tool_shed_repository_id=None ):
+        toolbox = self.data_managers.app.toolbox
+        tool = toolbox.load_hidden_tool( tool_filename,
+                                         guid=guid,
+                                         data_manager_id=data_manager_id,
+                                         repository_id=tool_shed_repository_id )
+        self.data_managers.app.toolbox.data_manager_tools[ tool.id ] = tool
+        self.tool = tool
+        return tool
+
+    def process_result( self, out_data ):
+        data_manager_dicts = {}
+        data_manager_dict = {}
+        # TODO: fix this merging below
+        for output_name, output_dataset in out_data.iteritems():
+            try:
+                output_dict = json.loads( open( output_dataset.file_name ).read() )
+            except Exception as e:
+                log.warning( 'Error reading DataManagerTool json for "%s": %s' % ( output_name, e ) )
+                continue
+            data_manager_dicts[ output_name ] = output_dict
+            for key, value in output_dict.iteritems():
+                if key not in data_manager_dict:
+                    data_manager_dict[ key ] = {}
+                data_manager_dict[ key ].update( value )
+            data_manager_dict.update( output_dict )
+
+        data_tables_dict = data_manager_dict.get( 'data_tables', {} )
+        for data_table_name in self.data_tables.iterkeys():
+            data_table_values = data_tables_dict.pop( data_table_name, None )
+            if not data_table_values:
+                log.warning( 'No values for data table "%s" were returned by the data manager "%s".' % ( data_table_name, self.id ) )
+                continue  # next data table
+            data_table = self.data_managers.app.tool_data_tables.get( data_table_name, None )
+            if data_table is None:
+                log.error( 'The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % ( self.id, data_table_name, data_table_values, data_table_name, 'tool_data_table_conf.xml' ) )
+                continue  # next table name
+            if not isinstance( data_table, SUPPORTED_DATA_TABLE_TYPES ):
+                log.error( 'The data manager "%s" returned an unsupported data table "%s" with type "%s" with new entries "%s". These entries will not be created. Please confirm that the data table is of a supported type (%s).' % ( self.id, data_table_name, type( data_table ), data_table_values, SUPPORTED_DATA_TABLE_TYPES ) )
+                continue  # next table name
+            output_ref_values = {}
+            if data_table_name in self.output_ref_by_data_table:
+                for data_table_column, output_ref in self.output_ref_by_data_table[ data_table_name ].iteritems():
+                    output_ref_dataset = out_data.get( output_ref, None )
+                    assert output_ref_dataset is not None, "Referenced output was not found."
+                    output_ref_values[ data_table_column ] = output_ref_dataset
+
+            if not isinstance( data_table_values, list ):
+                data_table_values = [ data_table_values ]
+            for data_table_row in data_table_values:
+                data_table_value = dict( **data_table_row )  # keep original values here
+                for name, value in data_table_row.iteritems():  # FIXME: need to loop through here based upon order listed in data_manager config
+                    if name in output_ref_values:
+                        self.process_move( data_table_name, name, output_ref_values[ name ].extra_files_path, **data_table_value )
+                        data_table_value[ name ] = self.process_value_translation( data_table_name, name, **data_table_value )
+                data_table.add_entry( data_table_value, persist=True, entry_source=self )
+                send_control_task(self.data_managers.app,
+                                  'reload_tool_data_tables',
+                                  noop_self=True,
+                                  kwargs={'table_name': data_table_name} )
+        if self.undeclared_tables and data_tables_dict:
+            # We handle the data move, by just moving all the data out of the extra files path
+            # moving a directory and the target already exists, we move the contents instead
+            log.debug( 'Attempting to add entries for undeclared tables: %s.', ', '.join( data_tables_dict.keys() ) )
+            for ref_file in out_data.values():
+                util.move_merge( ref_file.extra_files_path, self.data_managers.app.config.galaxy_data_manager_data_path )
+            path_column_names = [ 'path' ]
+            for data_table_name, data_table_values in data_tables_dict.iteritems():
+                data_table = self.data_managers.app.tool_data_tables.get( data_table_name, None )
+                if not isinstance( data_table_values, list ):
+                    data_table_values = [ data_table_values ]
+                for data_table_row in data_table_values:
+                    data_table_value = dict( **data_table_row )  # keep original values here
+                    for name, value in data_table_row.iteritems():
+                        if name in path_column_names:
+                            data_table_value[ name ] = os.path.abspath( os.path.join( self.data_managers.app.config.galaxy_data_manager_data_path, value ) )
+                    data_table.add_entry( data_table_value, persist=True, entry_source=self )
+                    send_control_task(self.data_managers.app, 'reload_tool_data_tables',
+                                      noop_self=True,
+                                      kwargs={'table_name': data_table_name} )
+        else:
+            for data_table_name, data_table_values in data_tables_dict.iteritems():
+                # tool returned extra data table entries, but data table was not declared in data manager
+                # do not add these values, but do provide messages
+                log.warning( 'The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % ( self.id, data_table_name, data_table_values, data_table_name, self.data_managers.filename ) )
+
+    def process_move( self, data_table_name, column_name, source_base_path, relative_symlinks=False, **kwd ):
+        if data_table_name in self.move_by_data_table_column and column_name in self.move_by_data_table_column[ data_table_name ]:
+            move_dict = self.move_by_data_table_column[ data_table_name ][ column_name ]
+            source = move_dict[ 'source_base' ]
+            if source is None:
+                source = source_base_path
+            else:
+                source = fill_template( source, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )
+            if move_dict[ 'source_value' ]:
+                source = os.path.join( source, fill_template( move_dict[ 'source_value' ], GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )  )
+            target = move_dict[ 'target_base' ]
+            if target is None:
+                target = self.data_managers.app.config.galaxy_data_manager_data_path
+            else:
+                target = fill_template( target, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )
+            if move_dict[ 'target_value' ]:
+                target = os.path.join( target, fill_template( move_dict[ 'target_value' ], GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd  ) )
+
+            if move_dict[ 'type' ] == 'file':
+                dirs = os.path.split( target )[0]
+                try:
+                    os.makedirs( dirs )
+                except OSError as e:
+                    if e.errno != errno.EEXIST:
+                        raise e
+            # moving a directory and the target already exists, we move the contents instead
+            util.move_merge( source, target )
+
+            if move_dict.get( 'relativize_symlinks', False ):
+                util.relativize_symlinks( target )
+
+            return True
+        return False
+
+    def process_value_translation( self, data_table_name, column_name, **kwd ):
+        value = kwd.get( column_name )
+        if data_table_name in self.value_translation_by_data_table_column and column_name in self.value_translation_by_data_table_column[ data_table_name ]:
+            for value_translation in self.value_translation_by_data_table_column[ data_table_name ][ column_name ]:
+                if isinstance( value_translation, string_types ):
+                    value = fill_template( value_translation, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd  )
+                else:
+                    value = value_translation( value )
+        return value
+
+    def get_tool_shed_repository_info_dict( self ):
+        return self.tool_shed_repository_info_dict
diff --git a/lib/galaxy/tools/deps/__init__.py b/lib/galaxy/tools/deps/__init__.py
new file mode 100644
index 0000000..4ad4fee
--- /dev/null
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -0,0 +1,223 @@
+"""
+Dependency management for tools.
+"""
+
+import json
+import logging
+import os.path
+import shutil
+
+from collections import OrderedDict
+
+from galaxy.util import (
+    hash_util,
+    plugin_config
+)
+
+from .resolvers import NullDependency
+from .resolvers.conda import CondaDependencyResolver, DEFAULT_ENSURE_CHANNELS
+from .resolvers.galaxy_packages import GalaxyPackageDependencyResolver
+from .resolvers.tool_shed_packages import ToolShedPackageDependencyResolver
+
+log = logging.getLogger( __name__ )
+
+# TODO: Load these from the plugins. Would require a two step initialization of
+# DependencyManager - where the plugins are loaded first and then the config
+# is parsed and sent through.
+EXTRA_CONFIG_KWDS = {
+    'conda_prefix': None,
+    'conda_exec': None,
+    'conda_debug': None,
+    'conda_ensure_channels': DEFAULT_ENSURE_CHANNELS,
+    'conda_auto_install': False,
+    'conda_auto_init': False,
+    'conda_copy_dependencies': False,
+}
+
+CONFIG_VAL_NOT_FOUND = object()
+
+
+def build_dependency_manager( config ):
+    if getattr( config, "use_tool_dependencies", False ):
+        dependency_manager_kwds = {
+            'default_base_path': config.tool_dependency_dir,
+            'conf_file': config.dependency_resolvers_config_file,
+        }
+        for key, default_value in EXTRA_CONFIG_KWDS.items():
+            value = getattr(config, key, CONFIG_VAL_NOT_FOUND)
+            if value is CONFIG_VAL_NOT_FOUND and hasattr(config, "config_dict"):
+                value = config.config_dict.get(key, CONFIG_VAL_NOT_FOUND)
+            if value is CONFIG_VAL_NOT_FOUND:
+                value = default_value
+            dependency_manager_kwds[key] = value
+        if config.use_cached_dependency_manager:
+            dependency_manager_kwds['tool_dependency_cache_dir'] = config.tool_dependency_cache_dir
+            dependency_manager = CachedDependencyManager(**dependency_manager_kwds)
+        else:
+            dependency_manager = DependencyManager( **dependency_manager_kwds )
+    else:
+        dependency_manager = NullDependencyManager()
+
+    return dependency_manager
+
+
+class NullDependencyManager( object ):
+    dependency_resolvers = []
+
+    def uses_tool_shed_dependencies(self):
+        return False
+
+    def dependency_shell_commands( self, requirements, **kwds ):
+        return []
+
+    def find_dep( self, name, version=None, type='package', **kwds ):
+        return NullDependency(version=version, name=name)
+
+
+class DependencyManager( object ):
+    """
+    A DependencyManager attempts to resolve named and versioned dependencies by
+    searching for them under a list of directories. Directories should be
+    of the form:
+
+        $BASE/name/version/...
+
+    and should each contain a file 'env.sh' which can be sourced to make the
+    dependency available in the current shell environment.
+    """
+    def __init__( self, default_base_path, conf_file=None, **extra_config ):
+        """
+        Create a new dependency manager looking for packages under the paths listed
+        in `base_paths`.  The default base path is app.config.tool_dependency_dir.
+        """
+        if not os.path.exists( default_base_path ):
+            log.warning( "Path '%s' does not exist, ignoring", default_base_path )
+        if not os.path.isdir( default_base_path ):
+            log.warning( "Path '%s' is not directory, ignoring", default_base_path )
+        self.extra_config = extra_config
+        self.default_base_path = os.path.abspath( default_base_path )
+        self.resolver_classes = self.__resolvers_dict()
+        self.dependency_resolvers = self.__build_dependency_resolvers( conf_file )
+
+    def dependency_shell_commands( self, requirements, **kwds ):
+        requirement_to_dependency = self.requirements_to_dependencies(requirements, **kwds)
+        return [dependency.shell_commands(requirement) for requirement, dependency in requirement_to_dependency.items()]
+
+    def requirements_to_dependencies(self, requirements, **kwds):
+        """
+        Takes a list of requirements and returns a dictionary
+        with requirements as key and dependencies as value.
+        """
+        requirement_to_dependency = OrderedDict()
+        for requirement in requirements:
+            if requirement.type in [ 'package', 'set_environment' ]:
+                dependency = self.find_dep( name=requirement.name,
+                                            version=requirement.version,
+                                            type=requirement.type,
+                                            **kwds )
+                log.debug(dependency.resolver_msg)
+                if dependency.dependency_type:
+                    requirement_to_dependency[requirement] = dependency
+        if 'tool_instance' in kwds:
+            kwds['tool_instance'].dependencies = [dep.to_dict() for dep in requirement_to_dependency.values()]
+        return requirement_to_dependency
+
+    def uses_tool_shed_dependencies(self):
+        return any( map( lambda r: isinstance( r, ToolShedPackageDependencyResolver ), self.dependency_resolvers ) )
+
+    def find_dep( self, name, version=None, type='package', **kwds ):
+        log.debug('Find dependency %s version %s' % (name, version))
+        index = kwds.get('index', None)
+        require_exact = kwds.get('exact', False)
+        for i, resolver in enumerate(self.dependency_resolvers):
+            if index is not None and i != index:
+                continue
+            dependency = resolver.resolve( name, version, type, **kwds )
+            if require_exact and not dependency.exact:
+                continue
+            if not isinstance(dependency, NullDependency):
+                return dependency
+        return NullDependency(version=version, name=name)
+
+    def __build_dependency_resolvers( self, conf_file ):
+        if not conf_file:
+            return self.__default_dependency_resolvers()
+        if not os.path.exists( conf_file ):
+            log.debug( "Unable to find config file '%s'", conf_file)
+            return self.__default_dependency_resolvers()
+        plugin_source = plugin_config.plugin_source_from_path( conf_file )
+        return self.__parse_resolver_conf_xml( plugin_source )
+
+    def __default_dependency_resolvers( self ):
+        return [
+            ToolShedPackageDependencyResolver(self),
+            GalaxyPackageDependencyResolver(self),
+            GalaxyPackageDependencyResolver(self, versionless=True),
+            CondaDependencyResolver(self),
+            CondaDependencyResolver(self, versionless=True),
+        ]
+
+    def __parse_resolver_conf_xml(self, plugin_source):
+        """
+        """
+        extra_kwds = dict( dependency_manager=self )
+        return plugin_config.load_plugins( self.resolver_classes, plugin_source, extra_kwds )
+
+    def __resolvers_dict( self ):
+        import galaxy.tools.deps.resolvers
+        return plugin_config.plugins_dict( galaxy.tools.deps.resolvers, 'resolver_type' )
+
+
+class CachedDependencyManager(DependencyManager):
+    def __init__(self, default_base_path, conf_file=None, **extra_config):
+        super(CachedDependencyManager, self).__init__(default_base_path=default_base_path, conf_file=conf_file, **extra_config)
+
+    def build_cache(self, requirements, **kwds):
+        resolved_dependencies = self.requirements_to_dependencies(requirements, **kwds)
+        cacheable_dependencies = [dep for dep in resolved_dependencies.values() if dep.cacheable]
+        hashed_dependencies_dir = self.get_hashed_dependencies_path(cacheable_dependencies)
+        if os.path.exists(hashed_dependencies_dir):
+            if kwds.get('force_rebuild', False):
+                try:
+                    shutil.rmtree(hashed_dependencies_dir)
+                except Exception:
+                    log.warning("Could not delete cached dependencies directory '%s'" % hashed_dependencies_dir)
+                    raise
+            else:
+                log.debug("Cached dependencies directory '%s' already exists, skipping build", hashed_dependencies_dir)
+                return
+        [dep.build_cache(hashed_dependencies_dir) for dep in cacheable_dependencies]
+
+    def dependency_shell_commands( self, requirements, **kwds ):
+        """
+        Runs a set of requirements through the dependency resolvers and returns
+        a list of commands required to activate the dependencies. If dependencies
+        are cacheable and the cache exists, will generate commands to activate
+        cached environments.
+        """
+        resolved_dependencies = self.requirements_to_dependencies(requirements, **kwds)
+        cacheable_dependencies = [dep for dep in resolved_dependencies.values() if dep.cacheable]
+        hashed_dependencies_dir = self.get_hashed_dependencies_path(cacheable_dependencies)
+        if os.path.exists(hashed_dependencies_dir):
+            [dep.set_cache_path(hashed_dependencies_dir) for dep in cacheable_dependencies]
+        commands = [dep.shell_commands(req) for req, dep in resolved_dependencies.items()]
+        return commands
+
+    def hash_dependencies(self, resolved_dependencies):
+        """Return hash for dependencies"""
+        resolved_dependencies = [(dep.name, dep.version, dep.exact, dep.dependency_type) for dep in resolved_dependencies]
+        hash_str = json.dumps(sorted(resolved_dependencies))
+        return hash_util.new_secure_hash(hash_str)[:8]  # short hash
+
+    def get_hashed_dependencies_path(self, resolved_dependencies):
+        """
+        Returns the path to the hashed dependencies directory (but does not evaluate whether the path exists).
+
+        :param resolved_dependencies: list of resolved dependencies
+        :type resolved_dependencies: list
+
+        :return: path
+        :rtype: str
+        """
+        req_hashes = self.hash_dependencies(resolved_dependencies)
+        return os.path.abspath(os.path.join(self.extra_config['tool_dependency_cache_dir'], req_hashes))
diff --git a/lib/galaxy/tools/deps/brew_exts.py b/lib/galaxy/tools/deps/brew_exts.py
new file mode 100755
index 0000000..37ce647
--- /dev/null
+++ b/lib/galaxy/tools/deps/brew_exts.py
@@ -0,0 +1,556 @@
+#!/usr/bin/env python
+
+# % brew vinstall samtools 1.0
+# % brew vinstall samtools 0.1.19
+# % brew vinstall samtools 1.1
+# % brew env samtools 1.1
+# PATH=/home/john/.linuxbrew/Cellar/htslib/1.1/bin:/home/john/.linuxbrew/Cellar/samtools/1.1/bin:$PATH
+# export PATH
+# LD_LIBRARY_PATH=/home/john/.linuxbrew/Cellar/htslib/1.1/lib:/home/john/.linuxbrew/Cellar/samtools/1.1/lib:$LD_LIBRARY_PATH
+# export LD_LIBRARY_PATH
+# % . <(brew env samtools 1.1)
+# % which samtools
+# /home/john/.linuxbrew/Cellar/samtools/1.1/bin/samtools
+# % . <(brew env samtools 0.1.19)
+# % which samtools
+# /home/john/.linuxbrew/Cellar/samtools/0.1.19/bin/samtools
+# % brew vuninstall samtools 1.0
+# % brew vdeps samtools 1.1
+# htslib at 1.1
+# % brew vdeps samtools 0.1.19
+
+from __future__ import print_function
+
+import argparse
+import contextlib
+import glob
+import json
+import os
+import re
+import string
+import subprocess
+import sys
+
+WHITESPACE_PATTERN = re.compile("[\s]+")
+
+DESCRIPTION = "Script built on top of linuxbrew to operate on isolated, versioned brew installed environments."
+
+if sys.platform == "darwin":
+    DEFAULT_HOMEBREW_ROOT = "/usr/local"
+else:
+    DEFAULT_HOMEBREW_ROOT = os.path.join(os.path.expanduser("~"), ".linuxbrew")
+
+NO_BREW_ERROR_MESSAGE = "Could not find brew on PATH, please place on path or pass to script with --brew argument."
+CANNOT_DETERMINE_TAP_ERROR_MESSAGE = "Cannot determine tap of specified recipe - please use fully qualified recipe (e.g. homebrew/science/samtools)."
+VERBOSE = False
+RELAXED = False
+BREW_ARGS = []
+
+
+class BrewContext(object):
+
+    def __init__(self, args=None):
+        ensure_brew_on_path(args)
+        raw_config = brew_execute(["config"])
+        config_lines = [l.strip().split(":", 1) for l in raw_config.split("\n") if l]
+        config = dict([(p[0].strip(), p[1].strip()) for p in config_lines])
+        # unset if "/usr/local" -> https://github.com/Homebrew/homebrew/blob/master/Library/Homebrew/cmd/config.rb
+        homebrew_prefix = config.get("HOMEBREW_PREFIX", "/usr/local")
+        homebrew_cellar = config.get("HOMEBREW_CELLAR", os.path.join(homebrew_prefix, "Cellar"))
+        self.homebrew_prefix = homebrew_prefix
+        self.homebrew_cellar = homebrew_cellar
+
+
+class RecipeContext(object):
+
+    @staticmethod
+    def from_args(args, brew_context=None):
+        return RecipeContext(args.recipe, args.version, brew_context)
+
+    def __init__(self, recipe, version, brew_context=None):
+        self.recipe = recipe
+        self.version = version
+        self.brew_context = brew_context or BrewContext()
+
+    @property
+    def cellar_path(self):
+        return recipe_cellar_path(self.brew_context.homebrew_cellar, self.recipe, self.version)
+
+    @property
+    def tap_path(self):
+        return os.path.join(self.brew_context.homebrew_prefix, "Library", "Taps", self.__tap_path(self.recipe))
+
+    def __tap_path(self, recipe):
+        parts = recipe.split("/")
+        if len(parts) == 1:
+            info = brew_info(self.recipe)
+            from_url = info["from_url"]
+            if not from_url:
+                raise Exception(CANNOT_DETERMINE_TAP_ERROR_MESSAGE)
+            from_url_parts = from_url.split("/")
+            blob_index = from_url_parts.index("blob")  # comes right after username and repository
+            if blob_index < 2:
+                raise Exception(CANNOT_DETERMINE_TAP_ERROR_MESSAGE)
+            username = from_url_parts[blob_index - 2]
+            repository = from_url_parts[blob_index - 1]
+        else:
+            assert len(parts) == 3
+            parts = recipe.split("/")
+            username = parts[0]
+            repository = "homebrew-%s" % parts[1]
+
+        path = os.path.join(username, repository)
+        return path
+
+
+def main():
+    global VERBOSE
+    global RELAXED
+    global BREW_ARGS
+    parser = argparse.ArgumentParser(description=DESCRIPTION)
+    parser.add_argument("--brew", help="Path to linuxbrew 'brew' executable to target")
+    actions = ["vinstall", "vuninstall", "vdeps", "vinfo", "env"]
+    action = __action(sys)
+    if not action:
+        parser.add_argument('action', metavar='action', help="Versioned action to perform.", choices=actions)
+    parser.add_argument('recipe', metavar='recipe', help="Recipe for action - should be absolute (e.g. homebrew/science/samtools).")
+    parser.add_argument('version', metavar='version', help="Version for action (e.g. 0.1.19).")
+    parser.add_argument('--relaxed', action='store_true', help="Relaxed processing - for instance allow use of env on non-vinstall-ed recipes.")
+    parser.add_argument('--verbose', action='store_true', help="Verbose output")
+    parser.add_argument('restargs', nargs=argparse.REMAINDER)
+    args = parser.parse_args()
+    if args.verbose:
+        VERBOSE = True
+    if args.relaxed:
+        RELAXED = True
+    BREW_ARGS = args.restargs
+    if not action:
+        action = args.action
+    brew_context = BrewContext(args)
+    recipe_context = RecipeContext.from_args(args, brew_context)
+    if action == "vinstall":
+        versioned_install(recipe_context, args.recipe, args.version)
+    elif action == "vuninstall":
+        brew_execute(["switch", args.recipe, args.version])
+        brew_execute(["uninstall", args.recipe])
+    elif action == "vdeps":
+        print_versioned_deps(recipe_context, args.recipe, args.version)
+    elif action == "env":
+        env_statements = build_env_statements_from_recipe_context(recipe_context)
+        print(env_statements)
+    elif action == "vinfo":
+        with brew_head_at_version(recipe_context, args.recipe, args.version):
+            print(brew_info(args.recipe))
+    else:
+        raise NotImplementedError()
+
+
+class CommandLineException(Exception):
+
+    def __init__(self, command, stdout, stderr):
+        self.command = command
+        self.stdout = stdout
+        self.stderr = stderr
+        self.message = ("Failed to execute command-line %s, stderr was:\n"
+                        "-------->>begin stderr<<--------\n"
+                        "%s\n"
+                        "-------->>end stderr<<--------\n"
+                        "-------->>begin stdout<<--------\n"
+                        "%s\n"
+                        "-------->>end stdout<<--------\n"
+                        ) % (command, stderr, stdout)
+
+    def __str__(self):
+        return self.message
+
+
+def versioned_install(recipe_context, package=None, version=None, installed_deps=[]):
+    if package is None:
+        package = recipe_context.recipe
+        version = recipe_context.version
+
+    attempt_unlink(package)
+    with brew_head_at_version(recipe_context, package, version):
+        deps = brew_deps(package)
+        deps_metadata = []
+        dep_to_version = {}
+        for dep in deps:
+            version_info = brew_versions_info(dep, recipe_context.tap_path)[0]
+            dep_version = version_info[0]
+            dep_to_version[dep] = dep_version
+            versioned = version_info[2]
+            if versioned:
+                dep_to_version[dep] = dep_version
+                if dep in installed_deps:
+                    continue
+                versioned_install(recipe_context, dep, dep_version)
+                installed_deps.append(dep)
+            else:
+                # Install latest.
+                dep_to_version[dep] = None
+                if dep in installed_deps:
+                    continue
+                unversioned_install(dep)
+        try:
+            for dep in deps:
+                dep_version = dep_to_version[dep]
+                if dep_version:
+                    brew_execute(["switch", dep, dep_version])
+                else:
+                    brew_execute(["link", dep])
+                # dep_version obtained from brew versions doesn't
+                # include revision. This linked_keg attribute does.
+                keg_verion = brew_info(dep)["linked_keg"]
+                dep_metadata = {
+                    'name': dep,
+                    'version': keg_verion,
+                    'versioned': versioned
+                }
+                deps_metadata.append(dep_metadata)
+
+            cellar_root = recipe_context.brew_context.homebrew_cellar
+            cellar_path = recipe_context.cellar_path
+            env_actions = build_env_actions(deps_metadata, cellar_root, cellar_path, custom_only=True)
+            env = EnvAction.build_env(env_actions)
+            args = ["install"]
+            if VERBOSE:
+                args.append("--verbose")
+            args.extend(BREW_ARGS)
+            args.append(package)
+            brew_execute(args, env=env)
+            deps = brew_execute(["deps", package])
+            deps = [d.strip() for d in deps.split("\n") if d]
+            metadata = {
+                'deps': deps_metadata
+            }
+            cellar_root = recipe_context.brew_context.homebrew_cellar
+            cellar_path = recipe_cellar_path( cellar_root, package, version )
+            v_metadata_path = os.path.join(cellar_path, "INSTALL_RECEIPT_VERSIONED.json")
+            with open(v_metadata_path, "w") as f:
+                json.dump(metadata, f)
+
+        finally:
+            attempt_unlink_all(package, deps)
+
+
+def commit_for_version(recipe_context, package, version):
+    tap_path = recipe_context.tap_path
+    commit = None
+    with brew_head_at_commit("master", tap_path):
+        version_to_commit = brew_versions_info(package, tap_path)
+        if version is None:
+            version = version_to_commit[0][0]
+            commit = version_to_commit[0][1]
+        else:
+            for mapping in version_to_commit:
+                if mapping[0] == version:
+                    commit = mapping[1]
+    if commit is None:
+        raise Exception("Failed to find commit for version %s" % version)
+    return commit
+
+
+def print_versioned_deps(recipe_context, recipe, version):
+    deps = load_versioned_deps(recipe_context.cellar_path)
+    for dep in deps:
+        val = dep['name']
+        if dep['versioned']:
+            val += "@%s" % dep['version']
+        print(val)
+
+
+def load_versioned_deps(cellar_path, relaxed=None):
+    if relaxed is None:
+        relaxed = RELAXED
+    v_metadata_path = os.path.join(cellar_path, "INSTALL_RECEIPT_VERSIONED.json")
+    if not os.path.isfile(v_metadata_path):
+        if RELAXED:
+            return []
+        else:
+            raise IOError("Could not locate versioned receipt file: {}".format(v_metadata_path))
+    with open(v_metadata_path, "r") as f:
+        metadata = json.load(f)
+    return metadata['deps']
+
+
+def unversioned_install(package):
+    try:
+        deps = brew_deps(package)
+        for dep in deps:
+            brew_execute(["link", dep])
+        brew_execute(["install", package])
+    finally:
+        attempt_unlink_all(package, deps)
+
+
+def attempt_unlink_all(package, deps):
+    for dep in deps:
+        attempt_unlink(dep)
+    attempt_unlink(package)
+
+
+def attempt_unlink(package):
+    try:
+        brew_execute(["unlink", package])
+    except Exception:
+        # TODO: warn
+        pass
+
+
+def brew_execute(args, env=None):
+    os.environ["HOMEBREW_NO_EMOJI"] = "1"  # simplify brew parsing.
+    cmds = ["brew"] + args
+    return execute(cmds, env=env)
+
+
+def build_env_statements_from_recipe_context(recipe_context, **kwds):
+    cellar_root = recipe_context.brew_context.homebrew_cellar
+    env_statements = build_env_statements(cellar_root, recipe_context.cellar_path, **kwds)
+    return env_statements
+
+
+def build_env_statements(cellar_root, cellar_path, relaxed=None, custom_only=False):
+    deps = load_versioned_deps(cellar_path, relaxed=relaxed)
+    actions = build_env_actions(deps, cellar_root, cellar_path, relaxed, custom_only)
+    env_statements = []
+    for action in actions:
+        env_statements.extend(action.to_statements())
+    return "\n".join(env_statements)
+
+
+def build_env_actions(deps, cellar_root, cellar_path, relaxed=None, custom_only=False):
+
+    path_appends = []
+    ld_path_appends = []
+    actions = []
+
+    def handle_keg(cellar_path):
+        bin_path = os.path.join(cellar_path, "bin")
+        if os.path.isdir(bin_path):
+            path_appends.append(bin_path)
+        lib_path = os.path.join(cellar_path, "lib")
+        if os.path.isdir(lib_path):
+            ld_path_appends.append(lib_path)
+        env_path = os.path.join(cellar_path, "platform_environment.json")
+        if os.path.exists(env_path):
+            with open(env_path, "r") as f:
+                env_metadata = json.load(f)
+                if "actions" in env_metadata:
+                    def to_action(desc):
+                        return EnvAction(cellar_path, desc)
+                    actions.extend(map(to_action, env_metadata["actions"]))
+
+    for dep in deps:
+        package = dep['name']
+        version = dep['version']
+        dep_cellar_path = recipe_cellar_path( cellar_root, package, version )
+        handle_keg( dep_cellar_path )
+
+    handle_keg( cellar_path )
+    if not custom_only:
+        if path_appends:
+            actions.append(EnvAction(cellar_path, {"action": "prepend", "variable": "PATH", "value": ":".join(path_appends)}))
+        if ld_path_appends:
+            actions.append(EnvAction(cellar_path, {"action": "prepend", "variable": "LD_LIBRARY_PATH", "value": ":".join(path_appends)}))
+    return actions
+
+
+class EnvAction(object):
+
+    def __init__(self, keg_root, action_description):
+        self.variable = action_description["variable"]
+        self.action = action_description["action"]
+        self.value = string.Template(action_description["value"]).safe_substitute({
+            'KEG_ROOT': keg_root,
+        })
+
+    @staticmethod
+    def build_env(env_actions):
+        new_env = os.environ.copy()
+        map(lambda env_action: env_action.modify_environ(new_env), env_actions)
+        return new_env
+
+    def modify_environ(self, environ):
+        if self.action == "set" or not environ.get(self.variable, ""):
+            environ[self.variable] = self.__eval("${value}")
+        elif self.action == "prepend":
+            environ[self.variable] = self.__eval("${value}:%s" % environ[self.variable])
+        else:
+            environ[self.variable] = self.__eval("%s:${value}" % environ[self.variable])
+
+    def __eval(self, template):
+        return string.Template(template).safe_substitute(
+            variable=self.variable,
+            value=self.value,
+        )
+
+    def to_statements(self):
+        if self.action == "set":
+            template = '''${variable}="${value}"'''
+        elif self.action == "prepend":
+            template = '''${variable}="${value}:$$${variable}"'''
+        else:
+            template = '''${variable}="$$${variable}:${value}"'''
+        return [
+            self.__eval(template),
+            "export %s" % self.variable
+        ]
+
+
+ at contextlib.contextmanager
+def brew_head_at_version(recipe_context, package, version):
+    commit = commit_for_version(recipe_context, package, version)
+    tap_path = recipe_context.tap_path
+    with brew_head_at_commit(commit, tap_path):
+        yield
+
+
+ at contextlib.contextmanager
+def brew_head_at_commit(commit, tap_path):
+    try:
+        os.chdir(tap_path)
+        current_commit = git_execute(["rev-parse", "HEAD"]).strip()
+        try:
+            git_execute(["checkout", commit])
+            yield
+        finally:
+            git_execute(["checkout", current_commit])
+    finally:
+        # TODO: restore chdir - or better yet just don't chdir
+        # shouldn't be needed.
+        pass
+
+
+def git_execute(args):
+    cmds = ["git"] + args
+    return execute(cmds)
+
+
+def execute(cmds, env=None):
+    subprocess_kwds = dict(
+        shell=False,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+    )
+    if env:
+        subprocess_kwds["env"] = env
+    p = subprocess.Popen(cmds, **subprocess_kwds)
+    # log = p.stdout.read()
+    global VERBOSE
+    stdout, stderr = p.communicate()
+    if p.returncode != 0:
+        raise CommandLineException(" ".join(cmds), stdout, stderr)
+    if VERBOSE:
+        print(stdout)
+    return stdout
+
+
+def brew_deps(package):
+    args = ["deps"]
+    args.extend(BREW_ARGS)
+    args.append(package)
+    stdout = brew_execute(args)
+    return [p.strip() for p in stdout.split("\n") if p]
+
+
+def brew_info(recipe):
+    info_json = brew_execute(["info", "--json=v1", recipe])
+    info = json.loads(info_json)[0]
+    info.update(extended_brew_info(recipe))
+    return info
+
+
+def extended_brew_info(recipe):
+    # Extract more info from non-json variant. JSON variant should
+    # include this in a backward compatible way (TODO: Open PR).
+    raw_info = brew_execute(["info", recipe])
+    extra_info = dict(
+        from_url=None,
+        build_dependencies=[],
+        required_dependencies=[],
+        recommended_dependencies=[],
+        optional_dependencies=[],
+    )
+
+    for line in raw_info.split("\n"):
+        if line.startswith("From: "):
+            extra_info["from_url"] = line[len("From: "):].strip()
+        for dep_type in ["Build", "Required", "Recommended", "Optional"]:
+            if line.startswith("%s: " % dep_type):
+                key = "%s_dependencies" % dep_type.lower()
+                raw_val = line[len("%s: " % dep_type):]
+                extra_info[key].extend(raw_val.split(", "))
+    return extra_info
+
+
+def brew_versions_info(package, tap_path):
+
+    def versioned(recipe_path):
+        if not os.path.isabs(recipe_path):
+            recipe_path = os.path.join(os.getcwd(), recipe_path)
+        # Dependencies in the same repository should be versioned,
+        # core dependencies (presumably in base homebrew) are not
+        # versioned.
+        return tap_path in recipe_path
+
+    # TODO: Also use tags.
+    stdout = brew_execute(["versions", package])
+    version_parts = [l for l in stdout.split("\n") if l and "git checkout" in l]
+    version_parts = map(lambda l: WHITESPACE_PATTERN.split(l), version_parts)
+    info = [(p[0], p[3], versioned(p[4])) for p in version_parts]
+    return info
+
+
+def __action(sys):
+    script_name = os.path.basename(sys.argv[0])
+    if script_name.startswith("brew-"):
+        return script_name[len("brew-"):]
+    else:
+        return None
+
+
+def recipe_cellar_path(cellar_path, recipe, version):
+    recipe_base = recipe.split("/")[-1]
+    recipe_base_path = os.path.join(cellar_path, recipe_base, version)
+    revision_paths = glob.glob(recipe_base_path + "_*")
+    if revision_paths:
+        revisions = map(lambda x: int(x.rsplit("_", 1)[-1]), revision_paths)
+        max_revision = max(revisions)
+        recipe_path = "%s_%d" % (recipe_base_path, max_revision)
+    else:
+        recipe_path = recipe_base_path
+    return recipe_path
+
+
+def ensure_brew_on_path(args):
+    brew_on_path = which("brew")
+    if brew_on_path:
+        brew_on_path = os.path.abspath(brew_on_path)
+
+    def ensure_on_path(brew):
+        if brew != brew_on_path:
+            os.environ["PATH"] = "%s:%s" % (os.path.dirname(brew), os.environ["PATH"])
+
+    default_brew_path = os.path.join(DEFAULT_HOMEBREW_ROOT, "bin", "brew")
+    if args and args.brew:
+        user_brew_path = os.path.abspath(args.brew)
+        ensure_on_path(user_brew_path)
+    elif brew_on_path:
+        return brew_on_path
+    elif os.path.exists(default_brew_path):
+        ensure_on_path(default_brew_path)
+    else:
+        raise Exception(NO_BREW_ERROR_MESSAGE)
+
+
+def which(file):
+    # http://stackoverflow.com/questions/5226958/which-equivalent-function-in-python
+    for path in os.environ["PATH"].split(":"):
+        if os.path.exists(path + "/" + file):
+                return path + "/" + file
+
+    return None
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/tools/deps/brew_util.py b/lib/galaxy/tools/deps/brew_util.py
new file mode 100644
index 0000000..b970af7
--- /dev/null
+++ b/lib/galaxy/tools/deps/brew_util.py
@@ -0,0 +1,39 @@
+""" brew_exts defines generic extensions to Homebrew this file
+builds on those abstraction and provides Galaxy specific functionality
+not useful to the brew external commands.
+"""
+from ..deps import brew_exts
+
+DEFAULT_TAP = "homebrew/science"
+
+
+class HomebrewRecipe(object):
+
+    def __init__(self, recipe, version, tap):
+        self.recipe = recipe
+        self.version = version
+        self.tap = tap
+
+
+def requirements_to_recipes(requirements):
+    return filter(None, map(requirement_to_recipe, requirements))
+
+
+def requirement_to_recipe(requirement):
+    if requirement.type != "package":
+        return None
+    # TOOD: Allow requirements to annotate optionalbrew specific
+    # adaptions.
+    recipe_name = requirement.name
+    recipe_version = requirement.version
+    return HomebrewRecipe(recipe_name, recipe_version, tap=DEFAULT_TAP)
+
+
+def requirements_to_recipe_contexts(requirements, brew_context):
+    def to_recipe_context(homebrew_recipe):
+        return brew_exts.RecipeContext(
+            homebrew_recipe.recipe,
+            homebrew_recipe.version,
+            brew_context
+        )
+    return map(to_recipe_context, requirements_to_recipes(requirements))
diff --git a/lib/galaxy/tools/deps/commands.py b/lib/galaxy/tools/deps/commands.py
new file mode 100644
index 0000000..02a3bf8
--- /dev/null
+++ b/lib/galaxy/tools/deps/commands.py
@@ -0,0 +1,155 @@
+"""Generic I/O and shell processing code used by Galaxy tool dependencies."""
+import os
+import subprocess
+import sys as _sys
+
+from six.moves import shlex_quote
+
+from galaxy.util import which
+
+STDOUT_INDICATOR = "-"
+
+
+def redirecting_io(sys=_sys):
+    """Predicate to determine if we are redicting I/O in process."""
+    assert sys is not None
+    return not hasattr(sys.stdout, "fileno")
+
+
+def redirect_aware_commmunicate(p, sys=_sys):
+    """Variant of process.communicate that works with in process I/O redirection."""
+    assert sys is not None
+    out, err = p.communicate()
+    if redirecting_io(sys=sys):
+        if out:
+            sys.stdout.write(out)
+            out = None
+        if err:
+            sys.stderr.write(err)
+            err = None
+    return out, err
+
+
+def shell(cmds, env=None, **kwds):
+    """Run shell commands with `shell_process` and wait."""
+    sys = kwds.get("sys", _sys)
+    assert sys is not None
+    p = shell_process(cmds, env, **kwds)
+    if redirecting_io(sys=sys):
+        redirect_aware_commmunicate(p, sys=sys)
+        exit = p.returncode
+        return exit
+    else:
+        return p.wait()
+
+
+def shell_process(cmds, env=None, **kwds):
+    """A high-level method wrapping subprocess.Popen.
+
+    Handles details such as environment extension and in process I/O
+    redirection.
+    """
+    sys = kwds.get("sys", _sys)
+    popen_kwds = dict(
+        shell=True,
+    )
+    if kwds.get("stdout", None) is None and redirecting_io(sys=sys):
+        popen_kwds["stdout"] = subprocess.PIPE
+    if kwds.get("stderr", None) is None and redirecting_io(sys=sys):
+        popen_kwds["stderr"] = subprocess.PIPE
+
+    popen_kwds.update(**kwds)
+    if env:
+        new_env = os.environ.copy()
+        new_env.update(env)
+        popen_kwds["env"] = new_env
+    p = subprocess.Popen(cmds, **popen_kwds)
+    return p
+
+
+def execute(cmds):
+    """Execute commands and throw an exception on a non-zero exit.
+
+    Return the standard output if the commands are successful
+    """
+    return _wait(cmds, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+
+
+def argv_to_str(command_argv, quote=True):
+    """Convert an argv command list to a string for shell subprocess.
+
+    If None appears in the command list it is simply excluded.
+
+    Arguments are quoted with shlex_quote. That said, this method is not meant to be
+    used in security critical paths of code and should not be used to sanitize
+    code.
+    """
+    map_func = shlex_quote if quote else lambda x: x
+    return " ".join([map_func(c) for c in command_argv if c is not None])
+
+
+def _wait(cmds, **popen_kwds):
+    p = subprocess.Popen(cmds, **popen_kwds)
+    stdout, stderr = p.communicate()
+    if p.returncode != 0:
+        raise CommandLineException(argv_to_str(cmds), stdout, stderr, p.returncode)
+    return stdout
+
+
+def download_command(url, to=STDOUT_INDICATOR, quote_url=False):
+    """Build a command line to download a URL.
+
+    By default the URL will be downloaded to standard output but a specific
+    file can be specified with the `to` argument.
+    """
+    if quote_url:
+        url = "'%s'" % url
+        if to != STDOUT_INDICATOR:
+            to = "'%s'" % to
+    if which("wget"):
+        download_cmd = ["wget", "-q"]
+        if to == STDOUT_INDICATOR:
+            download_cmd += ["-O", STDOUT_INDICATOR, url]
+        else:
+            download_cmd += ["--recursive", "-O", to, url]
+    else:
+        download_cmd = ["curl", "-L", url]
+        if to != STDOUT_INDICATOR:
+            download_cmd += ["-o", to]
+    return download_cmd
+
+
+class CommandLineException(Exception):
+    """An exception indicating a non-zero command-line exit."""
+
+    def __init__(self, command, stdout, stderr, returncode):
+        """Construct a CommandLineException from command and standard I/O."""
+        self.command = command
+        self.stdout = stdout
+        self.stderr = stderr
+        self.returncode = returncode
+        self.message = ("Failed to execute command-line %s, stderr was:\n"
+                        "-------->>begin stderr<<--------\n"
+                        "%s\n"
+                        "-------->>end stderr<<--------\n"
+                        "-------->>begin stdout<<--------\n"
+                        "%s\n"
+                        "-------->>end stdout<<--------\n"
+                        ) % (command, stderr, stdout)
+
+    def __str__(self):
+        """Return a verbose error message indicating the command problem."""
+        return self.message
+
+
+__all__ = (
+    'argv_to_str',
+    'CommandLineException',
+    'download_command',
+    'execute',
+    'redirect_aware_commmunicate',
+    'redirecting_io',
+    'shell',
+    'shell_process',
+    'which',
+)
diff --git a/lib/galaxy/tools/deps/conda_compat.py b/lib/galaxy/tools/deps/conda_compat.py
new file mode 100644
index 0000000..c45573e
--- /dev/null
+++ b/lib/galaxy/tools/deps/conda_compat.py
@@ -0,0 +1,112 @@
+"""Compat. layer with conda_build/verify if Galaxy/galaxy-lib not installed through conda.
+
+In general there are utilities available for Conda building and parsing that are high-quality
+and should be utilized when available but that are only available in conda channels and not in
+PyPI. This module serves as a PyPI capable interface to these utilities.
+"""
+import collections
+import os
+
+import yaml
+
+try:
+    from conda_build.metadata import MetaData
+except ImportError:
+    MetaData = None
+
+try:
+    from anaconda_verify.recipe import parse, render_jinja2
+except ImportError:
+    render_jinja2 = None
+    parse = None
+
+
+class _Memoized(object):
+
+    def __init__(self, func):
+        self.func = func
+        self.cache = {}
+
+    def __call__(self, *args):
+        if not isinstance(args, collections.Hashable):
+            # uncacheable. a list, for instance.
+            # better to not cache than blow up.
+            return self.func(*args)
+        if args in self.cache:
+            return self.cache[args]
+        else:
+            value = self.func(*args)
+            self.cache[args] = value
+            return value
+
+
+def _parse(data, cfg):
+    """Parse metadata YAML."""
+    assert cfg is None, "Conda utilities for evaluating cfg are not available."
+    return dict(yamlize(data))
+
+
+def _render_jinja2(recipe_dir):
+    """Evaluate Conda recipe as a jinja template."""
+    try:
+        import jinja2
+    except ImportError:
+        raise Exception("Failed to import jinja2 for evaluating Conda recipe templates.")
+
+    loaders = [jinja2.FileSystemLoader(recipe_dir)]
+    env = jinja2.Environment(loader=jinja2.ChoiceLoader(loaders))
+    template = env.get_or_select_template('meta.yaml')
+    return template.render(environment=env)
+
+
+ at _Memoized
+def yamlize(data):
+    res = yaml.load(data)
+    # ensure the result is a dict
+    if res is None:
+        res = {}
+    return res
+
+
+if render_jinja2 is None:
+    render_jinja2 = _render_jinja2
+
+if parse is None:
+    parse = _parse
+
+
+def raw_metadata(recipe_dir):
+    """Evaluate Conda template if needed and return raw metadata for supplied recipe directory."""
+    meta_path = os.path.join(recipe_dir, 'meta.yaml')
+    with open(meta_path, 'rb') as fi:
+        data = fi.read()
+        if b'{{' in data:
+            data = render_jinja2(recipe_dir)
+    meta = parse(data, None)
+    return meta
+
+
+class _MetaData(object):
+
+    def __init__(self, input_dir):
+        self.meta = raw_metadata(input_dir)
+
+    def get_value(self, field, default=None):
+        """Get nested field value or supplied default is not present."""
+        section, key = field.split('/')
+        submeta = self.meta.get(section)
+        if submeta is None:
+            submeta = {}
+        res = submeta.get(key)
+        if res is None:
+            res = default
+        return res
+
+
+if MetaData is None:
+    MetaData = _MetaData
+
+__all__ = (
+    "MetaData",
+    "raw_metadata",
+)
diff --git a/lib/galaxy/tools/deps/conda_util.py b/lib/galaxy/tools/deps/conda_util.py
new file mode 100644
index 0000000..47f1b09
--- /dev/null
+++ b/lib/galaxy/tools/deps/conda_util.py
@@ -0,0 +1,533 @@
+import functools
+import hashlib
+import json
+import logging
+import os
+import re
+import shutil
+import tempfile
+
+from distutils.version import LooseVersion
+from sys import platform as _platform
+
+import six
+import yaml
+
+from ..deps import commands
+from ..deps import installable
+
+log = logging.getLogger(__name__)
+
+# Not sure there are security concerns, lets just fail fast if we are going
+# break shell commands we are building.
+SHELL_UNSAFE_PATTERN = re.compile(r"[\s\"']")
+
+IS_OS_X = _platform == "darwin"
+
+# BSD 3-clause
+CONDA_LICENSE = "http://docs.continuum.io/anaconda/eula"
+VERSIONED_ENV_DIR_NAME = re.compile(r"__(.*)@(.*)")
+UNVERSIONED_ENV_DIR_NAME = re.compile(r"__(.*)@_uv_")
+USE_PATH_EXEC_DEFAULT = False
+CONDA_VERSION = "3.19.3"
+
+
+def conda_link():
+    if IS_OS_X:
+        url = "https://repo.continuum.io/miniconda/Miniconda2-4.0.5-MacOSX-x86_64.sh"
+    else:
+        url = "https://repo.continuum.io/miniconda/Miniconda2-4.0.5-Linux-x86_64.sh"
+    return url
+
+
+def find_conda_prefix(conda_prefix=None):
+    """ If supplied conda_prefix is not set, default to the default location
+    for Miniconda installs.
+    """
+    if conda_prefix is None:
+        return os.path.join(os.path.expanduser("~"), "miniconda2")
+    return conda_prefix
+
+
+class CondaContext(installable.InstallableContext):
+    installable_description = "Conda"
+
+    def __init__(self, conda_prefix=None, conda_exec=None,
+                 shell_exec=None, debug=False, ensure_channels='',
+                 condarc_override=None, use_path_exec=USE_PATH_EXEC_DEFAULT, copy_dependencies=False):
+        self.condarc_override = condarc_override
+        if not conda_exec and use_path_exec:
+            conda_exec = commands.which("conda")
+        if conda_exec:
+            conda_exec = os.path.normpath(conda_exec)
+        self.conda_exec = conda_exec
+        self.debug = debug
+        self.shell_exec = shell_exec or commands.shell
+        self.copy_dependencies = copy_dependencies
+
+        if conda_prefix is None:
+            info = self.conda_info()
+            if info and "default_prefix" in info:
+                conda_prefix = info["default_prefix"]
+        if conda_prefix is None:
+            conda_prefix = find_conda_prefix(conda_prefix)
+
+        self.conda_prefix = conda_prefix
+        if conda_exec is None:
+            self.conda_exec = self._bin("conda")
+        if ensure_channels:
+            if not isinstance(ensure_channels, list):
+                ensure_channels = [c for c in ensure_channels.split(",") if c]
+        else:
+            ensure_channels = None
+        self.ensure_channels = ensure_channels
+        self.ensured_channels = False
+
+    def ensure_channels_configured(self):
+        if not self.ensured_channels:
+            self.ensured_channels = True
+
+            changed = False
+            conda_conf = self.load_condarc()
+            if "channels" not in conda_conf:
+                conda_conf["channels"] = []
+            channels = conda_conf["channels"]
+            for channel in self.ensure_channels:
+                if channel not in channels:
+                    changed = True
+                    channels.append(channel)
+
+            if changed:
+                self.save_condarc(conda_conf)
+
+    def conda_info(self):
+        if self.conda_exec is not None:
+            info_out = commands.execute([self.conda_exec, "info", "--json"])
+            info = json.loads(info_out)
+            return info
+        else:
+            return None
+
+    def is_conda_installed(self):
+        """
+        Check if conda_exec exists
+        """
+        if os.path.exists(self.conda_exec):
+            return True
+        else:
+            return False
+
+    def can_install_conda(self):
+        """
+        If conda_exec is set to a path outside of conda_prefix,
+        there is no use installing conda into conda_prefix, since it can't be used by galaxy.
+        If conda_exec equals conda_prefix/bin/conda, we can install conda if either conda_prefix
+        does not exist or is empty.
+        """
+        conda_exec = os.path.abspath(self.conda_exec)
+        conda_prefix_plus_exec = os.path.abspath(os.path.join(self.conda_prefix, 'bin/conda'))
+        if conda_exec == conda_prefix_plus_exec:
+            if not os.path.exists(self.conda_prefix):
+                return True
+            elif os.listdir(self.conda_prefix) == []:
+                os.rmdir(self.conda_prefix)  # Conda's install script fails if path exists (even if empty).
+                return True
+            else:
+                log.warning("Cannot install Conda because conda_prefix '%s' exists and is not empty.",
+                            self.conda_prefix)
+                return False
+        else:
+            log.warning("Skipping installation of Conda into conda_prefix '%s', "
+                        "since conda_exec '%s' is set to a path outside of conda_prefix.",
+                        self.conda_prefix, self.conda_exec)
+            return False
+
+    def load_condarc(self):
+        condarc = self.condarc
+        if os.path.exists(condarc):
+            with open(condarc, "r") as f:
+                return yaml.safe_load(f)
+        else:
+            return {"channels": ["defaults"]}
+
+    def save_condarc(self, conf):
+        condarc = self.condarc
+        try:
+            with open(condarc, "w") as f:
+                return yaml.safe_dump(conf, f)
+        except IOError:
+            template = ("Failed to update write to path [%s] while attempting to update conda configuration, "
+                        "please update the configuration to override the condarc location or "
+                        "grant this application write to the parent directory.")
+            message = template % condarc
+            raise Exception(message)
+
+    @property
+    def condarc(self):
+        if self.condarc_override:
+            return self.condarc_override
+        else:
+            home = os.path.expanduser("~")
+            return os.path.join(home, ".condarc")
+
+    def command(self, operation, args):
+        if isinstance(args, list):
+            args = " ".join(args)
+        conda_prefix = self.conda_exec
+        if self.debug:
+            conda_prefix += " --debug"
+        return "%s %s %s" % (conda_prefix, operation, args)
+
+    def exec_command(self, operation, args):
+        command = self.command(operation, args)
+        env = {'HOME': self.conda_prefix}  # We don't want to pollute ~/.conda, which may not even be writable
+        condarc_override = self.condarc_override
+        if condarc_override:
+            env["CONDARC"] = condarc_override
+        log.debug("Executing command: %s", command)
+        try:
+            return self.shell_exec(command, env=env)
+        except commands.CommandLineException as e:
+            log.warning(e)
+            return e.returncode
+
+    def exec_create(self, args):
+        create_base_args = [
+            "-y"
+        ]
+        create_base_args.extend(args)
+        return self.exec_command("create", create_base_args)
+
+    def exec_remove(self, args):
+        remove_base_args = [
+            "remove",
+            "-y",
+            "--name"
+        ]
+        remove_base_args.extend(args)
+        return self.exec_command("env", remove_base_args)
+
+    def exec_install(self, args):
+        install_base_args = [
+            "-y"
+        ]
+        install_base_args.extend(args)
+        return self.exec_command("install", install_base_args)
+
+    def exec_clean(self, args=[]):
+        """
+        Clean up after conda installation.
+        """
+        clean_base_args = [
+            "--tarballs",
+            "-y"
+        ]
+        clean_base_args.extend(args)
+        return self.exec_command("clean", clean_base_args)
+
+    def export_list(self, name, path):
+        return self.exec_command("list", [
+            "--name", name,
+            "--export", ">", path
+        ])
+
+    def env_path(self, env_name):
+        return os.path.join(self.envs_path, env_name)
+
+    @property
+    def envs_path(self):
+        return os.path.join(self.conda_prefix, "envs")
+
+    def has_env(self, env_name):
+        env_path = self.env_path(env_name)
+        return os.path.isdir(env_path)
+
+    @property
+    def deactivate(self):
+        return self._bin("deactivate")
+
+    @property
+    def activate(self):
+        return self._bin("activate")
+
+    def is_installed(self):
+        return self.is_conda_installed()
+
+    def can_install(self):
+        return self.can_install_conda()
+
+    @property
+    def parent_path(self):
+        return os.path.dirname(os.path.abspath(self.conda_prefix))
+
+    def _bin(self, name):
+        return os.path.join(self.conda_prefix, "bin", name)
+
+
+def installed_conda_targets(conda_context):
+    envs_path = conda_context.envs_path
+    dir_contents = os.listdir(envs_path) if os.path.exists(envs_path) else []
+    for name in dir_contents:
+        versioned_match = VERSIONED_ENV_DIR_NAME.match(name)
+        if versioned_match:
+            yield CondaTarget(versioned_match.group(1), versioned_match.group(2))
+
+        unversioned_match = UNVERSIONED_ENV_DIR_NAME.match(name)
+        if unversioned_match:
+            yield CondaTarget(unversioned_match.group(1))
+
+
+ at six.python_2_unicode_compatible
+class CondaTarget(object):
+
+    def __init__(self, package, version=None, channel=None):
+        if SHELL_UNSAFE_PATTERN.search(package) is not None:
+            raise ValueError("Invalid package [%s] encountered." % package)
+        self.package = package
+        if version and SHELL_UNSAFE_PATTERN.search(version) is not None:
+            raise ValueError("Invalid version [%s] encountered." % version)
+        self.version = version
+        if channel and SHELL_UNSAFE_PATTERN.search(channel) is not None:
+            raise ValueError("Invalid version [%s] encountered." % channel)
+        self.channel = channel
+
+    def __str__(self):
+        attributes = "package=%s" % self.package
+        if self.version is not None:
+            attributes = "%s,version=%s" % (self.package, self.version)
+        else:
+            attributes = "%s,unversioned" % self.package
+
+        if self.channel:
+            attributes = "%s,channel=%s" % self.channel
+
+        return "CondaTarget[%s]" % attributes
+
+    __repr__ = __str__
+
+    @property
+    def package_specifier(self):
+        """ Return a package specifier as consumed by conda install/create.
+        """
+        if self.version:
+            return "%s=%s" % (self.package, self.version)
+        else:
+            return self.package
+
+    @property
+    def install_environment(self):
+        """ The dependency resolution and installation frameworks will
+        expect each target to be installed it its own environment with
+        a fixed and predictable name given package and version.
+        """
+        if self.version:
+            return "__%s@%s" % (self.package, self.version)
+        else:
+            return "__%s at _uv_" % (self.package)
+
+    def __hash__(self):
+        return hash((self.package, self.version, self.channel))
+
+    def __eq__(self, other):
+        if isinstance(other, self.__class__):
+            return (self.package, self.version, self.channel) == (other.package, other.version, other.channel)
+        return False
+
+    def __ne__(self, other):
+        return not(self == other)
+
+
+def hash_conda_packages(conda_packages, conda_target=None):
+    """ Produce a unique hash on supplied packages.
+    TODO: Ideally we would do this in such a way that preserved environments.
+    """
+    h = hashlib.new('sha256')
+    for conda_package in conda_packages:
+        h.update(conda_package.install_environment)
+    return h.hexdigest()
+
+
+# shell makes sense for planemo, in Galaxy this should just execute
+# these commands as Python
+def install_conda(conda_context=None):
+    conda_context = _ensure_conda_context(conda_context)
+    f, script_path = tempfile.mkstemp(suffix=".sh", prefix="conda_install")
+    os.close(f)
+    download_cmd = " ".join(commands.download_command(conda_link(), to=script_path, quote_url=True))
+    install_cmd = "bash '%s' -b -p '%s'" % (script_path, conda_context.conda_prefix)
+    fix_version_cmd = "%s install -y -q conda=%s " % (os.path.join(conda_context.conda_prefix, 'bin/conda'), CONDA_VERSION)
+    full_command = "%s && %s && %s" % (download_cmd, install_cmd, fix_version_cmd)
+    try:
+        return conda_context.shell_exec(full_command)
+    finally:
+        if os.path.exists(script_path):
+            os.remove(script_path)
+
+
+def install_conda_target(conda_target, conda_context=None):
+    """ Install specified target into a its own environment.
+    """
+    conda_context = _ensure_conda_context(conda_context)
+    conda_context.ensure_channels_configured()
+    create_args = [
+        "--name", conda_target.install_environment,  # enviornment for package
+        conda_target.package_specifier,
+    ]
+    return conda_context.exec_create(create_args)
+
+
+def cleanup_failed_install(conda_target, conda_context=None):
+    conda_context = _ensure_conda_context(conda_context)
+    if conda_context.has_env(conda_target.install_environment):
+        conda_context.exec_remove([conda_target.install_environment])
+
+
+def best_search_result(conda_target, conda_context=None, channels_override=None):
+    """Find best "conda search" result for specified target.
+
+    Return ``None`` if no results match.
+    """
+    conda_context = _ensure_conda_context(conda_context)
+    if not channels_override:
+        conda_context.ensure_channels_configured()
+
+    search_cmd = [conda_context.conda_exec, "search", "--full-name", "--json"]
+    if channels_override:
+        search_cmd.append("--override-channels")
+        for channel in channels_override:
+            search_cmd.extend(["--channel", channel])
+    search_cmd.append(conda_target.package)
+    res = commands.execute(search_cmd)
+    hits = json.loads(res).get(conda_target.package, [])
+    hits = sorted(hits, key=lambda hit: LooseVersion(hit['version']), reverse=True)
+
+    if len(hits) == 0:
+        return (None, None)
+
+    best_result = (hits[0], False)
+
+    for hit in hits:
+        if is_search_hit_exact(conda_target, hit):
+            best_result = (hit, True)
+            break
+
+    return best_result
+
+
+def is_search_hit_exact(conda_target, search_hit):
+    target_version = conda_target.version
+    # It'd be nice to make request verson of 1.0 match available
+    # version of 1.0.3 or something like that.
+    return not target_version or search_hit['version'] == target_version
+
+
+def is_target_available(conda_target, conda_context=None, channels_override=None):
+    """Check if a specified target is available for installation.
+
+    If the package name exists return ``True`` (the ``bool``). If in addition
+    the version matches exactly return "exact" (a string). Otherwise return
+    ``False``.
+    """
+    (best_hit, exact) = best_search_result(conda_target, conda_context, channels_override)
+    if best_hit:
+        return 'exact' if exact else True
+    else:
+        return False
+
+
+def is_conda_target_installed(conda_target, conda_context=None):
+    conda_context = _ensure_conda_context(conda_context)
+    # fail by default
+    if conda_context.has_env(conda_target.install_environment):
+        return True
+    else:
+        return False
+
+
+def filter_installed_targets(conda_targets, conda_context=None):
+    conda_context = _ensure_conda_context(conda_context)
+    installed = functools.partial(is_conda_target_installed,
+                                  conda_context=conda_context)
+    return list(filter(installed, conda_targets))
+
+
+def build_isolated_environment(
+    conda_packages,
+    path=None,
+    copy=False,
+    conda_context=None,
+):
+    """ Build a new environment (or reuse an existing one from hashes)
+    for specified conda packages.
+    """
+    if not isinstance(conda_packages, list):
+        conda_packages = [conda_packages]
+
+    # Lots we could do in here, hashing, checking revisions, etc...
+    conda_context = _ensure_conda_context(conda_context)
+    try:
+        hash = hash_conda_packages(conda_packages)
+        tempdir = tempfile.mkdtemp(prefix="jobdeps", suffix=hash)
+        tempdir_name = os.path.basename(tempdir)
+
+        export_paths = []
+        for conda_package in conda_packages:
+            name = conda_package.install_environment
+            export_path = os.path.join(tempdir, name)
+            conda_context.export_list(
+                name,
+                export_path
+            )
+            export_paths.append(export_path)
+        create_args = ["--unknown", "--offline"]
+        if path is None:
+            create_args.extend(["--name", tempdir_name])
+        else:
+            create_args.extend(["--prefix", path])
+
+        if copy:
+            create_args.append("--copy")
+        for export_path in export_paths:
+            create_args.extend([
+                "--file", export_path, ">", "/dev/null"
+            ])
+
+        if path is not None and os.path.exists(path):
+            exit_code = conda_context.exec_install(create_args)
+        else:
+            exit_code = conda_context.exec_create(create_args)
+
+        return (path or tempdir_name, exit_code)
+    finally:
+        conda_context.exec_clean()
+        shutil.rmtree(tempdir)
+
+
+def requirement_to_conda_targets(requirement, conda_context=None):
+    conda_target = None
+    if requirement.type == "package":
+        conda_target = CondaTarget(requirement.name,
+                                   version=requirement.version)
+    return conda_target
+
+
+def requirements_to_conda_targets(requirements, conda_context=None):
+    r_to_ct = functools.partial(requirement_to_conda_targets,
+                                conda_context=conda_context)
+    conda_targets = (r_to_ct(_) for _ in requirements)
+    return [c for c in conda_targets if c is not None]
+
+
+def _ensure_conda_context(conda_context):
+    if conda_context is None:
+        conda_context = CondaContext()
+    return conda_context
+
+
+__all__ = (
+    'CondaContext',
+    'CondaTarget',
+    'install_conda',
+    'install_conda_target',
+    'requirements_to_conda_targets',
+)
diff --git a/lib/galaxy/tools/deps/container_resolvers/__init__.py b/lib/galaxy/tools/deps/container_resolvers/__init__.py
new file mode 100644
index 0000000..c478bcc
--- /dev/null
+++ b/lib/galaxy/tools/deps/container_resolvers/__init__.py
@@ -0,0 +1,50 @@
+"""The module defines the abstract interface for resolving container images for tool execution."""
+from abc import (
+    ABCMeta,
+    abstractmethod,
+    abstractproperty,
+)
+
+from galaxy.util.dictifiable import Dictifiable
+
+
+class ContainerResolver(Dictifiable, object):
+    """Description of a technique for resolving container images for tool execution."""
+
+    # Keys for dictification.
+    dict_collection_visible_keys = ['resolver_type']
+
+    __metaclass__ = ABCMeta
+
+    def __init__(self, app_info=None, **kwds):
+        """Default initializer for ``ContainerResolver`` subclasses."""
+        self.app_info = app_info
+        self.resolver_kwds = kwds
+
+    def _get_config_option(self, key, default=None, config_prefix=None, **kwds):
+        """Look in resolver-specific settings for option and then fallback to
+        global settings.
+        """
+        global_key = "%s_%s" % (config_prefix, key)
+        if key in kwds:
+            return kwds.get(key)
+        elif self.app_info and hasattr(self.app_info, global_key):
+            return getattr(self.app_info, global_key)
+        else:
+            return default
+
+    @abstractmethod
+    def resolve(self, tool_info):
+        """Find a container matching all supplied requirements for tool.
+
+        The supplied argument is a :class:`galaxy.tools.deps.containers.ToolInfo` description
+        of the tool and its requirements.
+        """
+
+    @abstractproperty
+    def resolver_type(self):
+        """Short label for the type of container resolution."""
+
+    def _container_type_enabled(self, container_description, enabled_container_types):
+        """Return a boolean indicating if the specified container type is enabled."""
+        return container_description.type in enabled_container_types
diff --git a/lib/galaxy/tools/deps/container_resolvers/explicit.py b/lib/galaxy/tools/deps/container_resolvers/explicit.py
new file mode 100644
index 0000000..851f0c1
--- /dev/null
+++ b/lib/galaxy/tools/deps/container_resolvers/explicit.py
@@ -0,0 +1,26 @@
+"""This module describes the :class:`ExplicitContainerResolver` ContainerResolver plugin."""
+import logging
+
+from ..container_resolvers import (
+    ContainerResolver,
+)
+
+log = logging.getLogger(__name__)
+
+
+class ExplicitContainerResolver(ContainerResolver):
+    """Find explicit containers referenced in the tool description (e.g. tool XML file) if present."""
+
+    resolver_type = "explicit"
+
+    def resolve(self, enabled_container_types, tool_info):
+        """Find a container explicitly mentioned in tool description.
+
+        This ignores the tool requirements and assumes the tool author crafted
+        a correct container.
+        """
+        for container_description in tool_info.container_descriptions:
+            if self._container_type_enabled(container_description):
+                return True
+
+        return False
diff --git a/lib/galaxy/tools/deps/container_resolvers/mulled.py b/lib/galaxy/tools/deps/container_resolvers/mulled.py
new file mode 100644
index 0000000..dcf15a0
--- /dev/null
+++ b/lib/galaxy/tools/deps/container_resolvers/mulled.py
@@ -0,0 +1,207 @@
+"""This module describes the :class:`MulledContainerResolver` ContainerResolver plugin."""
+
+import collections
+import logging
+
+from ..container_resolvers import (
+    ContainerResolver,
+)
+from ..docker_util import build_docker_images_command
+from ..mulled.mulled_build import (
+    check_output,
+    DEFAULT_CHANNELS,
+    ensure_installed,
+    InvolucroContext,
+    mull_targets,
+)
+from ..mulled.mulled_build_tool import requirements_to_mulled_targets
+from ..mulled.util import (
+    image_name,
+    mulled_tags_for,
+    split_tag,
+)
+from ..requirements import ContainerDescription
+
+log = logging.getLogger(__name__)
+
+
+CachedMulledImageSingleTarget = collections.namedtuple("CachedMulledImageSingleTarget", ["package_name", "version", "build", "image_identifier"])
+CachedMulledImageMultiTarget = collections.namedtuple("CachedMulledImageMultiTarget", ["hash", "image_identifier"])
+
+CachedMulledImageSingleTarget.multi_target = False
+CachedMulledImageMultiTarget.multi_target = True
+
+
+def list_cached_mulled_images(namespace=None):
+    command = build_docker_images_command(truncate=True, sudo_docker=False)
+    command = "%s | tail -n +2 | tr -s ' ' | cut -d' ' -f1,2" % command
+    images_and_versions = check_output(command)
+    name_filter = get_filter(namespace)
+
+    def output_line_to_image(line):
+        image_name, version = line.split(" ", 1)
+        identifier = "%s:%s" % (image_name, version)
+        url, namespace, package_description = image_name.split("/")
+
+        if package_description.startswith("mulled-v1-"):
+            hash = package_description
+            image = CachedMulledImageMultiTarget(hash, identifier)
+        else:
+            build = None
+            if not version or version == "latest":
+                version = None
+
+            if version and "--" in version:
+                version, build = split_tag(version)
+
+            image = CachedMulledImageSingleTarget(image_name, version, build, identifier)
+
+        return image
+
+    return [output_line_to_image(_) for _ in filter(name_filter, images_and_versions.splitlines())]
+
+
+def get_filter(namespace):
+    prefix = "quay.io/" if namespace is None else "quay.io/%s" % namespace
+    return lambda name: name.startswith(prefix) and name.count("/") == 2
+
+
+def cached_container_description(targets, namespace):
+    if len(targets) == 0:
+        return None
+
+    cached_images = list_cached_mulled_images(namespace)
+    image = None
+    if len(targets) == 1:
+        target = targets[0]
+        for cached_image in cached_images:
+            if cached_image.multi_target:
+                continue
+            if not cached_image.package_name == target.package_name:
+                continue
+            if not target.version or target.version == cached_image.version:
+                image = cached_image
+                break
+    else:
+        name = image_name(targets)
+        for cached_image in cached_images:
+            if not cached_image.multi_target:
+                continue
+
+            if name == cached_image.hash:
+                image = cached_image
+                break
+
+    container = None
+    if image:
+        container = ContainerDescription(
+            image.image_identifier,
+            type="docker",
+        )
+
+    return container
+
+
+class CachedMulledContainerResolver(ContainerResolver):
+
+    resolver_type = "cached_mulled"
+
+    def __init__(self, app_info=None, namespace=None):
+        super(CachedMulledContainerResolver, self).__init__(app_info)
+        self.namespace = namespace
+
+    def resolve(self, enabled_container_types, tool_info):
+        targets = mulled_targets(tool_info)
+        return cached_container_description(targets, self.namespace)
+
+
+class MulledContainerResolver(ContainerResolver):
+    """Look for mulled images matching tool dependencies."""
+
+    resolver_type = "mulled"
+
+    def __init__(self, app_info=None, namespace="biocontainers"):
+        super(MulledContainerResolver, self).__init__(app_info)
+        self.namespace = namespace
+
+    def resolve(self, enabled_container_types, tool_info):
+        targets = mulled_targets(tool_info)
+        if len(targets) == 0:
+            return None
+
+        name = None
+
+        if len(targets) == 1:
+            target = targets[0]
+            target_version = target.version
+            tags = mulled_tags_for(self.namespace, target.package_name)
+
+            if not tags:
+                return None
+
+            if target_version:
+                for tag in tags:
+                    version, build = split_tag(tag)
+                    if version == target_version:
+                        name = "%s:%s--%s" % (target.package_name, version, build)
+                        break
+            else:
+                version, build = split_tag(tags[0])
+                name = "%s:%s--%s" % (target.package_name, version, build)
+        else:
+            base_image_name = image_name(targets)
+            tags = mulled_tags_for(self.namespace, base_image_name)
+            if tags:
+                name = "%s:%s" % (base_image_name, tags[0])
+
+        if name:
+            return ContainerDescription(
+                "quay.io/%s/%s" % (self.namespace, name),
+                type="docker",
+            )
+
+
+class BuildMulledContainerResolver(ContainerResolver):
+    """Look for mulled images matching tool dependencies."""
+
+    resolver_type = "build_mulled"
+
+    def __init__(self, app_info=None, namespace="local", **kwds):
+        super(BuildMulledContainerResolver, self).__init__(app_info)
+        self._involucro_context_kwds = {
+            'involucro_bin': self._get_config_option("involucro_path", None)
+        }
+        self.namespace = namespace
+        self._mulled_kwds = {
+            'namespace': namespace,
+            'channels': self._get_config_option("channels", DEFAULT_CHANNELS, prefix="mulled"),
+        }
+        self.auto_init = self._get_config_option("auto_init", DEFAULT_CHANNELS, prefix="involucro")
+
+    def resolve(self, enabled_container_types, tool_info):
+        targets = mulled_targets(tool_info)
+        if len(targets) == 0:
+            return None
+
+        mull_targets(
+            targets,
+            involucro_context=self._get_involucro_context(),
+            **self._mulled_kwds
+        )
+        return cached_container_description(targets, self.namespace)
+
+    def _get_involucro_context(self):
+        involucro_context = InvolucroContext(**self._involucro_context_kwds)
+        self.enabled = ensure_installed(involucro_context, self.auto_init)
+        return involucro_context
+
+
+def mulled_targets(tool_info):
+    return requirements_to_mulled_targets(tool_info.requirements)
+
+
+__all__ = (
+    "CachedMulledContainerResolver",
+    "MulledContainerResolver",
+    "BuildMulledContainerResolver",
+)
diff --git a/lib/galaxy/tools/deps/containers.py b/lib/galaxy/tools/deps/containers.py
new file mode 100644
index 0000000..d303496
--- /dev/null
+++ b/lib/galaxy/tools/deps/containers.py
@@ -0,0 +1,452 @@
+import logging
+import os
+import string
+
+from abc import (
+    ABCMeta,
+    abstractmethod
+)
+
+import six
+
+from galaxy.util import asbool
+from galaxy.util import plugin_config
+
+from .container_resolvers.explicit import ExplicitContainerResolver
+from .container_resolvers.mulled import (
+    BuildMulledContainerResolver,
+    CachedMulledContainerResolver,
+    MulledContainerResolver,
+)
+from .requirements import ContainerDescription
+from .requirements import DEFAULT_CONTAINER_RESOLVE_DEPENDENCIES, DEFAULT_CONTAINER_SHELL
+from ..deps import docker_util
+
+log = logging.getLogger(__name__)
+
+DOCKER_CONTAINER_TYPE = "docker"
+DEFAULT_CONTAINER_TYPE = DOCKER_CONTAINER_TYPE
+ALL_CONTAINER_TYPES = [DOCKER_CONTAINER_TYPE]
+
+LOAD_CACHED_IMAGE_COMMAND_TEMPLATE = '''
+python << EOF
+import re, tarfile, json, subprocess
+t = tarfile.TarFile("${cached_image_file}")
+meta_str = t.extractfile('repositories').read()
+meta = json.loads(meta_str)
+tag, tag_value = meta.items()[0]
+rev, rev_value = tag_value.items()[0]
+cmd = "${images_cmd}"
+proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
+stdo, stde = proc.communicate()
+found = False
+for line in stdo.split("\\n"):
+    tmp = re.split(r'\s+', line)
+    if tmp[0] == tag and tmp[1] == rev and tmp[2] == rev_value:
+        found = True
+if not found:
+    print "Loading image"
+    cmd = "cat ${cached_image_file} | ${load_cmd}"
+    subprocess.check_call(cmd, shell=True)
+EOF
+'''
+
+
+class ContainerFinder(object):
+
+    def __init__(self, app_info):
+        self.app_info = app_info
+        self.container_registry = ContainerRegistry(app_info)
+
+    def __enabled_container_types(self, destination_info):
+        return [t for t in ALL_CONTAINER_TYPES if self.__container_type_enabled(t, destination_info)]
+
+    def find_container(self, tool_info, destination_info, job_info):
+        enabled_container_types = self.__enabled_container_types(destination_info)
+
+        # Short-cut everything else and just skip checks if no container type is enabled.
+        if not enabled_container_types:
+            return NULL_CONTAINER
+
+        def __destination_container(container_description=None, container_id=None, container_type=None):
+            if container_description:
+                container_id = container_description.identifier
+                container_type = container_description.type
+            container = self.__destination_container(
+                container_id,
+                container_type,
+                tool_info,
+                destination_info,
+                job_info,
+                container_description,
+            )
+            return container
+
+        if "container_override" in destination_info:
+            container_description = ContainerDescription.from_dict(destination_info["container_override"][0])
+            if container_description:
+                container = __destination_container(container_description)
+                if container:
+                    return container
+
+        # Is destination forcing Galaxy to use a particular container do it,
+        # this is likely kind of a corner case. For instance if deployers
+        # do not trust the containers annotated in tools.
+        for container_type in CONTAINER_CLASSES.keys():
+            container_id = self.__overridden_container_id(container_type, destination_info)
+            if container_id:
+                container = __destination_container(container_type=container_type, container_id=container_id)
+                if container:
+                    return container
+
+        # Otherwise lets see if we can find container for the tool.
+        container_description = self.container_registry.find_best_container_description(enabled_container_types, tool_info)
+        container = __destination_container(container_description)
+        if container:
+            return container
+
+        # If we still don't have a container, check to see if any container
+        # types define a default container id and use that.
+        if "container" in destination_info:
+            container_description = ContainerDescription.from_dict(destination_info["container"][0])
+            if container_description:
+                container = __destination_container(container_description)
+                if container:
+                    return container
+
+        for container_type in CONTAINER_CLASSES.keys():
+            container_id = self.__default_container_id(container_type, destination_info)
+            if container_id:
+                container = __destination_container(container_type=container_type, container_id=container_id)
+                if container:
+                    return container
+
+        return NULL_CONTAINER
+
+    def __overridden_container_id(self, container_type, destination_info):
+        if not self.__container_type_enabled(container_type, destination_info):
+            return None
+        if "%s_container_id_override" % container_type in destination_info:
+            return destination_info.get("%s_container_id_override" % container_type)
+        if "%s_image_override" % container_type in destination_info:
+            return self.__build_container_id_from_parts(container_type, destination_info, mode="override")
+
+    def __build_container_id_from_parts(self, container_type, destination_info, mode):
+        repo = ""
+        owner = ""
+        repo_key = "%s_repo_%s" % (container_type, mode)
+        owner_key = "%s_owner_%s" % (container_type, mode)
+        if repo_key in destination_info:
+            repo = destination_info[repo_key] + "/"
+        if owner_key in destination_info:
+            owner = destination_info[owner_key] + "/"
+        cont_id = repo + owner + destination_info["%s_image_%s" % (container_type, mode)]
+        tag_key = "%s_tag_%s" % (container_type, mode)
+        if tag_key in destination_info:
+            cont_id += ":" + destination_info[tag_key]
+        return cont_id
+
+    def __default_container_id(self, container_type, destination_info):
+        if not self.__container_type_enabled(container_type, destination_info):
+            return None
+        key = "%s_default_container_id" % container_type
+        # Also allow docker_image...
+        if key not in destination_info:
+            key = "%s_image" % container_type
+        if key in destination_info:
+            return destination_info.get(key)
+        elif "%s_image_default" in destination_info:
+            return self.__build_container_id_from_parts(container_type, destination_info, mode="default")
+        return None
+
+    def __destination_container(self, container_id, container_type, tool_info, destination_info, job_info, container_description=None):
+        # TODO: ensure destination_info is dict-like
+        if not self.__container_type_enabled(container_type, destination_info):
+            return NULL_CONTAINER
+
+        # TODO: Right now this assumes all containers available when a
+        # container type is - there should be more thought put into this.
+        # Checking which are availalbe - settings policies for what can be
+        # auto-fetched, etc....
+        return CONTAINER_CLASSES[container_type](container_id, self.app_info, tool_info, destination_info, job_info, container_description)
+
+    def __container_type_enabled(self, container_type, destination_info):
+        return asbool(destination_info.get("%s_enabled" % container_type, False))
+
+
+class NullContainerFinder(object):
+
+    def find_container(self, tool_info, destination_info, job_info):
+        return []
+
+
+class ContainerRegistry(object):
+    """Loop through enabled ContainerResolver plugins and find first match."""
+
+    def __init__(self, app_info):
+        self.resolver_classes = self.__resolvers_dict()
+        self.enable_beta_mulled_containers = app_info.enable_beta_mulled_containers
+        self.app_info = app_info
+        self.container_resolvers = self.__build_container_resolvers(app_info)
+
+    def __build_container_resolvers( self, app_info ):
+        conf_file = getattr(app_info, 'containers_resolvers_config_file', None)
+        if not conf_file:
+            return self.__default_containers_resolvers()
+        if not os.path.exists( conf_file ):
+            log.debug( "Unable to find config file '%s'", conf_file)
+            return self.__default_containers_resolvers()
+        plugin_source = plugin_config.plugin_source_from_path( conf_file )
+        return self.__parse_resolver_conf_xml( plugin_source )
+
+    def __parse_resolver_conf_xml(self, plugin_source):
+        extra_kwds = {}
+        return plugin_config.load_plugins(self.resolver_classes, plugin_source, extra_kwds)
+
+    def __default_containers_resolvers(self):
+        default_resolvers = [
+            ExplicitContainerResolver(self.app_info),
+        ]
+        if self.enable_beta_mulled_containers:
+            default_resolvers.extend([
+                CachedMulledContainerResolver(self.app_info),
+                MulledContainerResolver(self.app_info, namespace="biocontainers"),
+                BuildMulledContainerResolver(self.app_info),
+            ])
+        return default_resolvers
+
+    def __resolvers_dict( self ):
+        import galaxy.tools.deps.container_resolvers
+        return plugin_config.plugins_dict( galaxy.tools.deps.container_resolvers, 'resolver_type' )
+
+    def find_best_container_description(self, enabled_container_types, tool_info):
+        """Yield best container description of supplied types matching tool info."""
+        for container_resolver in self.container_resolvers:
+            container_description = container_resolver.resolve(enabled_container_types, tool_info)
+            log.info("Checking with container resolver [%s] found description [%s]" % (container_resolver, container_description))
+            if container_description:
+                assert container_description.type in enabled_container_types
+                return container_description
+
+        return None
+
+
+class AppInfo(object):
+
+    def __init__(
+        self,
+        galaxy_root_dir=None,
+        default_file_path=None,
+        outputs_to_working_directory=False,
+        container_image_cache_path=None,
+        library_import_dir=None,
+        enable_beta_mulled_containers=False,
+        containers_resolvers_config_file=None,
+        involucro_path=None,
+        involucro_auto_init=True,
+    ):
+        self.galaxy_root_dir = galaxy_root_dir
+        self.default_file_path = default_file_path
+        # TODO: Vary default value for docker_volumes based on this...
+        self.outputs_to_working_directory = outputs_to_working_directory
+        self.container_image_cache_path = container_image_cache_path
+        self.library_import_dir = library_import_dir
+        self.enable_beta_mulled_containers = enable_beta_mulled_containers
+        self.containers_resolvers_config_file = containers_resolvers_config_file
+        self.involucro_path = involucro_path
+        self.involucro_auto_init = involucro_auto_init
+
+
+class ToolInfo(object):
+    # TODO: Introduce tool XML syntax to annotate the optional environment
+    # variables they can consume (e.g. JVM options, license keys, etc..)
+    # and add these to env_path_through
+
+    def __init__(self, container_descriptions=[], requirements=[]):
+        self.container_descriptions = container_descriptions
+        self.requirements = requirements
+        self.env_pass_through = ["GALAXY_SLOTS"]
+
+
+class JobInfo(object):
+
+    def __init__(self, working_directory, tool_directory, job_directory, job_directory_type):
+        self.working_directory = working_directory
+        self.job_directory = job_directory
+        # Tool files may be remote staged - so this is unintuitively a property
+        # of the job not of the tool.
+        self.tool_directory = tool_directory
+        self.job_directory_type = job_directory_type  # "galaxy" or "pulsar"
+
+
+class Container( object ):
+    __metaclass__ = ABCMeta
+
+    def __init__(self, container_id, app_info, tool_info, destination_info, job_info, container_description):
+        self.container_id = container_id
+        self.app_info = app_info
+        self.tool_info = tool_info
+        self.destination_info = destination_info
+        self.job_info = job_info
+        self.container_description = container_description
+
+    @property
+    def resolve_dependencies(self):
+        return DEFAULT_CONTAINER_RESOLVE_DEPENDENCIES if not self.container_description else self.container_description.resolve_dependencies
+
+    @property
+    def shell(self):
+        return DEFAULT_CONTAINER_SHELL if not self.container_description else self.container_description.shell
+
+    @abstractmethod
+    def containerize_command(self, command):
+        """
+        Use destination supplied container configuration parameters,
+        container_id, and command to build a new command that runs
+        input command in container.
+        """
+
+
+class DockerContainer(Container):
+
+    def containerize_command(self, command):
+        def prop(name, default):
+            destination_name = "docker_%s" % name
+            return self.destination_info.get(destination_name, default)
+
+        env_directives = []
+        for pass_through_var in self.tool_info.env_pass_through:
+            env_directives.append('"%s=$%s"' % (pass_through_var, pass_through_var))
+
+        # Allow destinations to explicitly set environment variables just for
+        # docker container. Better approach is to set for destination and then
+        # pass through only what tool needs however. (See todo in ToolInfo.)
+        for key, value in six.iteritems(self.destination_info):
+            if key.startswith("docker_env_"):
+                env = key[len("docker_env_"):]
+                env_directives.append('"%s=%s"' % (env, value))
+
+        working_directory = self.job_info.working_directory
+        if not working_directory:
+            raise Exception("Cannot containerize command [%s] without defined working directory." % working_directory)
+
+        volumes_raw = self.__expand_str(self.destination_info.get("docker_volumes", "$defaults"))
+        # TODO: Remove redundant volumes...
+        volumes = docker_util.DockerVolume.volumes_from_str(volumes_raw)
+        volumes_from = self.destination_info.get("docker_volumes_from", docker_util.DEFAULT_VOLUMES_FROM)
+
+        docker_host_props = dict(
+            docker_cmd=prop("cmd", docker_util.DEFAULT_DOCKER_COMMAND),
+            sudo=asbool(prop("sudo", docker_util.DEFAULT_SUDO)),
+            sudo_cmd=prop("sudo_cmd", docker_util.DEFAULT_SUDO_COMMAND),
+            host=prop("host", docker_util.DEFAULT_HOST),
+        )
+
+        cached_image_file = self.__get_cached_image_file()
+        if not cached_image_file:
+            # TODO: Add option to cache it once here and create cached_image_file.
+            cache_command = docker_util.build_docker_cache_command(self.container_id, **docker_host_props)
+        else:
+            cache_command = self.__cache_from_file_command(cached_image_file, docker_host_props)
+        run_command = docker_util.build_docker_run_command(
+            command,
+            self.container_id,
+            volumes=volumes,
+            volumes_from=volumes_from,
+            env_directives=env_directives,
+            working_directory=working_directory,
+            net=prop("net", "none"),  # By default, docker instance has networking disabled
+            auto_rm=asbool(prop("auto_rm", docker_util.DEFAULT_AUTO_REMOVE)),
+            set_user=prop("set_user", docker_util.DEFAULT_SET_USER),
+            run_extra_arguments=prop("run_extra_arguments", docker_util.DEFAULT_RUN_EXTRA_ARGUMENTS),
+            **docker_host_props
+        )
+        return "%s\n%s" % (cache_command, run_command)
+
+    def __cache_from_file_command(self, cached_image_file, docker_host_props):
+        images_cmd = docker_util.build_docker_images_command(truncate=False, **docker_host_props)
+        load_cmd = docker_util.build_docker_load_command(**docker_host_props)
+
+        return string.Template(LOAD_CACHED_IMAGE_COMMAND_TEMPLATE).safe_substitute(
+            cached_image_file=cached_image_file,
+            images_cmd=images_cmd,
+            load_cmd=load_cmd
+        )
+
+    def __get_cached_image_file(self):
+        container_id = self.container_id
+        cache_directory = os.path.abspath(self.__get_destination_overridable_property("container_image_cache_path"))
+        cache_path = docker_cache_path(cache_directory, container_id)
+        return cache_path if os.path.exists(cache_path) else None
+
+    def __get_destination_overridable_property(self, name):
+        prop_name = "docker_%s" % name
+        if prop_name in self.destination_info:
+            return self.destination_info[prop_name]
+        else:
+            return getattr(self.app_info, name)
+
+    def __expand_str(self, value):
+        if not value:
+            return value
+
+        template = string.Template(value)
+        variables = dict()
+
+        def add_var(name, value):
+            if value:
+                variables[name] = os.path.abspath(value)
+
+        add_var("working_directory", self.job_info.working_directory)
+        add_var("job_directory", self.job_info.job_directory)
+        add_var("tool_directory", self.job_info.tool_directory)
+        add_var("galaxy_root", self.app_info.galaxy_root_dir)
+        add_var("default_file_path", self.app_info.default_file_path)
+        add_var("library_import_dir", self.app_info.library_import_dir)
+
+        if self.job_info.job_directory and self.job_info.job_directory_type == "pulsar":
+            # We have a Pulsar job directory, so everything needed (excluding index
+            # files) should be available in job_directory...
+            defaults = "$job_directory:ro,$tool_directory:ro,$job_directory/outputs:rw,$working_directory:rw"
+        else:
+            defaults = "$galaxy_root:ro,$tool_directory:ro"
+            if self.job_info.job_directory:
+                defaults += ",$job_directory:ro"
+            if self.app_info.outputs_to_working_directory:
+                # Should need default_file_path (which is a course estimate given
+                # object stores anyway).
+                defaults += ",$working_directory:rw,$default_file_path:ro"
+            else:
+                defaults += ",$working_directory:rw,$default_file_path:rw"
+
+        if self.app_info.library_import_dir:
+            defaults += ",$library_import_dir:ro"
+
+        # Define $defaults that can easily be extended with external library and
+        # index data without deployer worrying about above details.
+        variables["defaults"] = string.Template(defaults).safe_substitute(variables)
+
+        return template.safe_substitute(variables)
+
+
+def docker_cache_path(cache_directory, container_id):
+    file_container_id = container_id.replace("/", "_slash_")
+    cache_file_name = "docker_%s.tar" % file_container_id
+    return os.path.join(cache_directory, cache_file_name)
+
+
+CONTAINER_CLASSES = dict(
+    docker=DockerContainer,
+)
+
+
+class NullContainer(object):
+
+    def __init__(self):
+        pass
+
+    def __nonzero__(self):
+        return False
+
+
+NULL_CONTAINER = NullContainer()
diff --git a/lib/galaxy/tools/deps/dependencies.py b/lib/galaxy/tools/deps/dependencies.py
new file mode 100644
index 0000000..6dd372a
--- /dev/null
+++ b/lib/galaxy/tools/deps/dependencies.py
@@ -0,0 +1,68 @@
+from galaxy.tools.deps.requirements import ToolRequirement
+from galaxy.util import bunch
+
+
+class DependenciesDescription(object):
+    """ Capture (in a readily serializable way) context related a tool
+    dependencies - both the tool's listed requirements and the tool shed
+    related context required to resolve dependencies via the
+    ToolShedPackageDependencyResolver.
+
+    This is meant to enable remote resolution of dependencies, by the Pulsar or
+    other potential remote execution mechanisms.
+    """
+
+    def __init__(self, requirements=[], installed_tool_dependencies=[]):
+        self.requirements = requirements
+        # tool shed installed tool dependencies...
+        self.installed_tool_dependencies = installed_tool_dependencies
+
+    def to_dict(self):
+        return dict(
+            requirements=[r.to_dict() for r in self.requirements],
+            installed_tool_dependencies=[DependenciesDescription._toolshed_install_dependency_to_dict(d) for d in self.installed_tool_dependencies]
+        )
+
+    @staticmethod
+    def from_dict(as_dict):
+        if as_dict is None:
+            return None
+
+        requirements_dicts = as_dict.get('requirements', [])
+        requirements = [ToolRequirement.from_dict(r) for r in requirements_dicts]
+        installed_tool_dependencies_dicts = as_dict.get('installed_tool_dependencies', [])
+        installed_tool_dependencies = map(DependenciesDescription._toolshed_install_dependency_from_dict, installed_tool_dependencies_dicts)
+        return DependenciesDescription(
+            requirements=requirements,
+            installed_tool_dependencies=installed_tool_dependencies
+        )
+
+    @staticmethod
+    def _toolshed_install_dependency_from_dict(as_dict):
+        # Rather than requiring full models in Pulsar, just use simple objects
+        # containing only properties and associations used to resolve
+        # dependencies for tool execution.
+        repository_object = bunch.Bunch(
+            name=as_dict['repository_name'],
+            owner=as_dict['repository_owner'],
+            installed_changeset_revision=as_dict['repository_installed_changeset'],
+        )
+        dependency_object = bunch.Bunch(
+            name=as_dict['dependency_name'],
+            version=as_dict['dependency_version'],
+            type=as_dict['dependency_type'],
+            tool_shed_repository=repository_object,
+        )
+        return dependency_object
+
+    @staticmethod
+    def _toolshed_install_dependency_to_dict(tool_dependency):
+        tool_shed_repository = tool_dependency.tool_shed_repository
+        return dict(
+            dependency_name=tool_dependency.name,
+            dependency_version=tool_dependency.version,
+            dependency_type=tool_dependency.type,
+            repository_name=tool_shed_repository.name,
+            repository_owner=tool_shed_repository.owner,
+            repository_installed_changeset=tool_shed_repository.installed_changeset_revision,
+        )
diff --git a/lib/galaxy/tools/deps/docker_util.py b/lib/galaxy/tools/deps/docker_util.py
new file mode 100644
index 0000000..5d27d2b
--- /dev/null
+++ b/lib/galaxy/tools/deps/docker_util.py
@@ -0,0 +1,215 @@
+"""Utilities for building up Docker commands...
+
+...using common defaults and configuration mechanisms.
+"""
+import os
+
+from six.moves import shlex_quote
+
+from .commands import argv_to_str
+
+DEFAULT_DOCKER_COMMAND = "docker"
+DEFAULT_SUDO = True
+DEFAULT_SUDO_COMMAND = "sudo"
+DEFAULT_HOST = None
+DEFAULT_VOLUME_MOUNT_TYPE = "rw"
+DEFAULT_WORKING_DIRECTORY = None
+DEFAULT_NET = None
+DEFAULT_MEMORY = None
+DEFAULT_VOLUMES_FROM = None
+DEFAULT_AUTO_REMOVE = True
+DEFAULT_SET_USER = "$UID"
+DEFAULT_RUN_EXTRA_ARGUMENTS = None
+
+
+class DockerVolume(object):
+
+    def __init__(self, path, to_path=None, how=DEFAULT_VOLUME_MOUNT_TYPE):
+        self.from_path = path
+        self.to_path = to_path or path
+        if not DockerVolume.__valid_how(how):
+            raise ValueError("Invalid way to specify docker volume %s" % how)
+        self.how = how
+
+    @staticmethod
+    def volumes_from_str(volumes_as_str):
+        if not volumes_as_str:
+            return []
+        volume_strs = [v.strip() for v in volumes_as_str.split(",")]
+        return [DockerVolume.volume_from_str(_) for _ in volume_strs]
+
+    @staticmethod
+    def volume_from_str(as_str):
+        if not as_str:
+            raise ValueError("Failed to parse docker volume from %s" % as_str)
+        parts = as_str.split(":", 2)
+        kwds = dict(path=parts[0])
+        if len(parts) == 2:
+            if DockerVolume.__valid_how(parts[1]):
+                kwds["how"] = parts[1]
+            else:
+                kwds["to_path"] = parts[1]
+        elif len(parts) == 3:
+            kwds["to_path"] = parts[1]
+            kwds["how"] = parts[2]
+        return DockerVolume(**kwds)
+
+    @staticmethod
+    def __valid_how(how):
+        return how in ["ro", "rw"]
+
+    def __str__(self):
+        return ":".join([self.from_path, self.to_path, self.how])
+
+
+def kill_command(
+    container,
+    signal=None,
+    **kwds
+):
+    args = (["-s", signal] if signal else []) + [container]
+    return command_list("kill", args, **kwds)
+
+
+def logs_command(
+    container,
+    **kwds
+):
+    return command_list("logs", **kwds)
+
+
+def build_command(
+    image,
+    docker_build_path,
+    **kwds
+):
+    if os.path.isfile(docker_build_path):
+        docker_build_path = os.path.dirname(os.path.abspath(docker_build_path))
+    return command_list("build", ["-t", image, docker_build_path], **kwds)
+
+
+def build_save_image_command(
+    image,
+    destination,
+    **kwds
+):
+    return command_list("save", ["-o", destination, image], **kwds)
+
+
+def build_pull_command(
+    tag,
+    **kwds
+):
+    return command_list("pull", [tag], **kwds)
+
+
+def build_docker_cache_command(
+    image,
+    **kwds
+):
+    inspect_image_command = command_shell("inspect", [image], **kwds)
+    pull_image_command = command_shell("pull", [image], **kwds)
+    cache_command = "%s > /dev/null 2>&1\n[ $? -ne 0 ] && %s > /dev/null 2>&1\n" % (inspect_image_command, pull_image_command)
+    return cache_command
+
+
+def build_docker_images_command(truncate=True, **kwds):
+    args = ["--no-trunc"] if not truncate else[]
+    return command_shell("images", args, **kwds)
+
+
+def build_docker_load_command(**kwds):
+    return command_shell("load", [])
+
+
+def build_docker_run_command(
+    container_command,
+    image,
+    interactive=False,
+    terminal=False,
+    tag=None,
+    volumes=[],
+    volumes_from=DEFAULT_VOLUMES_FROM,
+    memory=DEFAULT_MEMORY,
+    env_directives=[],
+    working_directory=DEFAULT_WORKING_DIRECTORY,
+    name=None,
+    net=DEFAULT_NET,
+    run_extra_arguments=DEFAULT_RUN_EXTRA_ARGUMENTS,
+    docker_cmd=DEFAULT_DOCKER_COMMAND,
+    sudo=DEFAULT_SUDO,
+    sudo_cmd=DEFAULT_SUDO_COMMAND,
+    auto_rm=DEFAULT_AUTO_REMOVE,
+    set_user=DEFAULT_SET_USER,
+    host=DEFAULT_HOST,
+):
+    command_parts = _docker_prefix(
+        docker_cmd=docker_cmd,
+        sudo=sudo,
+        sudo_cmd=sudo_cmd,
+        host=host
+    )
+    command_parts.append("run")
+    if interactive:
+        command_parts.append("-i")
+    if terminal:
+        command_parts.append("-t")
+    for env_directive in env_directives:
+        command_parts.extend(["-e", shlex_quote(env_directive)])
+    for volume in volumes:
+        command_parts.extend(["-v", shlex_quote(str(volume))])
+    if volumes_from:
+        command_parts.extend(["--volumes-from", shlex_quote(str(volumes_from))])
+    if memory:
+        command_parts.extend(["-m", shlex_quote(memory)])
+    if name:
+        command_parts.extend(["--name", shlex_quote(name)])
+    if working_directory:
+        command_parts.extend(["-w", shlex_quote(working_directory)])
+    if net:
+        command_parts.extend(["--net", shlex_quote(net)])
+    if auto_rm:
+        command_parts.append("--rm")
+    if run_extra_arguments:
+        command_parts.append(run_extra_arguments)
+    if set_user:
+        user = set_user
+        if set_user == DEFAULT_SET_USER:
+            user = str(os.geteuid())
+        command_parts.extend(["-u", user])
+    full_image = image
+    if tag:
+        full_image = "%s:%s" % (full_image, tag)
+    command_parts.append(shlex_quote(full_image))
+    command_parts.append(container_command)
+    return " ".join(command_parts)
+
+
+def command_list(command, command_args=[], **kwds):
+    """Return Docker command as an argv list."""
+    command_parts = _docker_prefix(**kwds)
+    command_parts.append(command)
+    command_parts.extend(command_args)
+    return command_parts
+
+
+def command_shell(command, command_args=[], **kwds):
+    """Return Docker command as a string for a shell."""
+    return argv_to_str(command_list(command, command_args, **kwds))
+
+
+def _docker_prefix(
+    docker_cmd=DEFAULT_DOCKER_COMMAND,
+    sudo=DEFAULT_SUDO,
+    sudo_cmd=DEFAULT_SUDO_COMMAND,
+    host=DEFAULT_HOST,
+    **kwds
+):
+    """Prefix to issue a docker command."""
+    command_parts = []
+    if sudo:
+        command_parts.append(sudo_cmd)
+    command_parts.append(docker_cmd)
+    if host:
+        command_parts.extend(["-H", host])
+    return command_parts
diff --git a/lib/galaxy/tools/deps/dockerfiles.py b/lib/galaxy/tools/deps/dockerfiles.py
new file mode 100644
index 0000000..bb0a292
--- /dev/null
+++ b/lib/galaxy/tools/deps/dockerfiles.py
@@ -0,0 +1,75 @@
+import logging
+import os
+
+from ..deps import commands
+from ..deps import docker_util
+from ..deps.containers import docker_cache_path
+from ..deps.requirements import parse_requirements_from_xml
+from ...tools import loader_directory
+
+log = logging.getLogger(__name__)
+
+
+def docker_host_args(**kwds):
+    return dict(
+        docker_cmd=kwds["docker_cmd"],
+        sudo=kwds["docker_sudo"],
+        sudo_cmd=kwds["docker_sudo_cmd"],
+        host=kwds["docker_host"]
+    )
+
+
+def dockerfile_build(path, dockerfile=None, error=log.error, **kwds):
+    expected_container_names = set()
+    tool_directories = set()
+    for (tool_path, tool_xml) in loader_directory.load_tool_elements_from_path(path):
+        requirements, containers = parse_requirements_from_xml(tool_xml)
+        for container in containers:
+            if container.type == "docker":
+                expected_container_names.add(container.identifier)
+                tool_directories.add(os.path.dirname(tool_path))
+                break
+
+    if len(expected_container_names) == 0:
+        error("Could not find any docker identifiers to generate.")
+
+    if len(expected_container_names) > 1:
+        error("Multiple different docker identifiers found for selected tools [%s]", expected_container_names)
+
+    image_identifier = expected_container_names.pop()
+
+    dockerfile = __find_dockerfile(dockerfile, tool_directories)
+    if dockerfile is not None:
+        docker_command_parts = docker_util.build_command(
+            image_identifier,
+            dockerfile,
+            **docker_host_args(**kwds)
+        )
+    else:
+        docker_command_parts = docker_util.build_pull_command(image_identifier, **docker_host_args(**kwds))
+        commands.execute(docker_command_parts)
+
+    commands.execute(docker_command_parts)
+    docker_image_cache = kwds['docker_image_cache']
+    if docker_image_cache:
+        destination = docker_cache_path(docker_image_cache, image_identifier)
+        save_image_command_parts = docker_util.build_save_image_command(
+            image_identifier,
+            destination,
+            **docker_host_args(**kwds)
+        )
+        commands.execute(save_image_command_parts)
+
+
+def __find_dockerfile(dockerfile, tool_directories):
+    if dockerfile is not None:
+        return dockerfile
+    search_directories = ["."]
+    if len(tool_directories) == 1:
+        tool_directory = tool_directories.pop()
+        search_directories.insert(0, tool_directory)
+    for directory in search_directories:
+        potential_dockerfile = os.path.join(directory, "Dockerfile")
+        if os.path.exists(potential_dockerfile):
+            return potential_dockerfile
+    return None
diff --git a/lib/galaxy/tools/deps/installable.py b/lib/galaxy/tools/deps/installable.py
new file mode 100644
index 0000000..cacc069
--- /dev/null
+++ b/lib/galaxy/tools/deps/installable.py
@@ -0,0 +1,77 @@
+"""Abstractions for installing local software managed and required by Galaxy/galaxy-lib."""
+
+import logging
+import os
+
+from abc import (
+    ABCMeta,
+    abstractmethod,
+    abstractproperty,
+)
+
+from galaxy.util.filelock import (
+    FileLock,
+    FileLockException
+)
+
+log = logging.getLogger(__name__)
+
+
+class InstallableContext(object):
+    """Represent a directory/configuration of something that can be installed."""
+
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def is_installed(self):
+        """Return bool indicating if the configured software is installed."""
+
+    @abstractmethod
+    def can_install(self):
+        """Check preconditions for installation."""
+
+    @abstractproperty
+    def installable_description(self):
+        """Short description of thing being installed for log statements."""
+
+    @abstractproperty
+    def parent_path(self):
+        """Return parent path of the location the installable will be created within."""
+
+
+def ensure_installed(installable_context, install_func, auto_init):
+    """Make sure target is installed - handle multiple processes potentially attempting installation."""
+    parent_path = installable_context.parent_path
+    desc = installable_context.installable_description
+
+    def _check():
+        if not installable_context.is_installed():
+            if auto_init:
+                if installable_context.can_install():
+                    if install_func(installable_context):
+                        installed = False
+                        log.warning("%s installation requested and failed." % desc)
+                    else:
+                        installed = installable_context.is_installed()
+                        if not installed:
+                            log.warning("%s installation requested, seemed to succeed, but not found." % desc)
+                else:
+                    installed = False
+            else:
+                installed = False
+                log.warning("%s not installed and auto-installation disabled.", desc)
+        else:
+            installed = True
+        return installed
+
+    if not os.path.exists(parent_path):
+        os.mkdir(parent_path)
+
+    try:
+        if auto_init and os.access(parent_path, os.W_OK):
+            with FileLock(os.path.join(parent_path, desc.lower())):
+                return _check()
+        else:
+            return _check()
+    except FileLockException:
+        return ensure_installed(installable_context, auto_init)
diff --git a/lib/galaxy/tools/deps/mulled/__init__.py b/lib/galaxy/tools/deps/mulled/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/tools/deps/mulled/_cli.py b/lib/galaxy/tools/deps/mulled/_cli.py
new file mode 100644
index 0000000..067063d
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/_cli.py
@@ -0,0 +1,20 @@
+"""CLI helpers for mulled command-line tools."""
+
+import argparse
+
+
+def arg_parser(argv, globals):
+    """Build an argparser for this CLI tool."""
+    doc = globals["__doc__"]
+    description, epilog = doc.split("\n", 1)
+    parser = argparse.ArgumentParser(
+        description=description,
+        epilog=epilog,
+        formatter_class=argparse.RawTextHelpFormatter,
+    )
+    return parser
+
+
+__all__ = (
+    "arg_parser",
+)
diff --git a/lib/galaxy/tools/deps/mulled/invfile.lua b/lib/galaxy/tools/deps/mulled/invfile.lua
new file mode 100644
index 0000000..629c15f
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/invfile.lua
@@ -0,0 +1,67 @@
+-- http://stackoverflow.com/questions/19262761/lua-need-to-split-at-comma/19263313#19263313
+function string:split( inSplitPattern, outResults )
+  if not outResults then
+    outResults = { }
+  end
+  local theStart = 1
+  local theSplitStart, theSplitEnd = string.find( self, inSplitPattern, theStart )
+  while theSplitStart do
+    table.insert( outResults, string.sub( self, theStart, theSplitStart-1 ) )
+    theStart = theSplitEnd + 1
+    theSplitStart, theSplitEnd = string.find( self, inSplitPattern, theStart )
+  end
+  table.insert( outResults, string.sub( self, theStart ) )
+  return outResults
+end
+
+local repo = VAR.REPO
+
+local channel_args = ''
+local channels = VAR.CHANNELS:split(",")
+for i = 1, #channels do
+    channel_args = channel_args .. " -c " .. channels[i]
+end
+
+local target_args = ''
+local targets = VAR.TARGETS:split(",")
+for i = 1, #targets do
+    target_args = target_args .. " " .. targets[i]
+end
+
+local bind_args = {}
+local binds_table = VAR.BINDS:split(",")
+for i = 1, #binds_table do
+    table.insert(bind_args, binds_table[i])
+end
+
+inv.task('build')
+    .using('continuumio/miniconda:latest')
+        .withHostConfig({binds = {"build:/data"}})
+        .run('rm', '-rf', '/data/dist')
+    .using('continuumio/miniconda:latest')
+        .withHostConfig({binds = bind_args})
+        .run('/bin/sh', '-c', 'conda install '
+            .. channel_args .. ' '
+            .. target_args
+            .. ' -p /usr/local --copy --yes')
+    .wrap('build/dist')
+        .at('/usr/local')
+        .inImage('bgruening/busybox-bash:0.1')
+        .as(repo)
+
+inv.task('test')
+    .using(repo)
+    .withConfig({entrypoint = {'/bin/sh', '-c'}})
+    .run(VAR.TEST)
+
+inv.task('push')
+    .push(repo)
+
+inv.task('build-and-test')
+    .runTask('build')
+    .runTask('test')
+
+inv.task('all')
+    .runTask('build')
+    .runTask('test')
+    .runTask('push')
diff --git a/lib/galaxy/tools/deps/mulled/mulled_build.py b/lib/galaxy/tools/deps/mulled/mulled_build.py
new file mode 100644
index 0000000..3a4e169
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/mulled_build.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+"""Build a mulled image for specified conda targets.
+
+Examples
+
+Build a mulled image with:
+
+    mulled-build build 'samtools=1.3.1--4,bedtools=2.22'
+
+"""
+from __future__ import print_function
+
+import json
+import os
+import string
+import subprocess
+import sys
+from sys import platform as _platform
+
+try:
+    import yaml
+except ImportError:
+    yaml = None
+
+from galaxy.tools.deps import commands, installable
+
+from ._cli import arg_parser
+from .util import build_target, conda_build_target_str, image_name
+from ..conda_compat import MetaData
+
+DIRNAME = os.path.dirname(__file__)
+DEFAULT_CHANNEL = "bioconda"
+DEFAULT_EXTRA_CHANNELS = ["conda-forge", "r"]
+DEFAULT_CHANNELS = [DEFAULT_CHANNEL] + DEFAULT_EXTRA_CHANNELS
+DEFAULT_REPOSITORY_TEMPLATE = "quay.io/${namespace}/${image}"
+DEFAULT_BINDS = ["build/dist:/usr/local/"]
+IS_OS_X = _platform == "darwin"
+INVOLUCRO_VERSION = "1.1.2"
+
+
+def involucro_link():
+    if IS_OS_X:
+        url = "https://github.com/involucro/involucro/releases/download/v%s/involucro.darwin" % INVOLUCRO_VERSION
+    else:
+        url = "https://github.com/involucro/involucro/releases/download/v%s/involucro" % INVOLUCRO_VERSION
+    return url
+
+
+def get_tests(args, pkg_path):
+    """Extract test cases given a recipe's meta.yaml file."""
+    recipes_dir = args.recipes_dir
+
+    tests = []
+    input_dir = os.path.dirname(os.path.join(recipes_dir, pkg_path))
+    recipe_meta = MetaData(input_dir)
+
+    tests_commands = recipe_meta.get_value('test/commands')
+    tests_imports = recipe_meta.get_value('test/imports')
+    requirements = recipe_meta.get_value('requirements/run')
+
+    if tests_imports or tests_commands:
+        if tests_commands:
+            tests.append(' && '.join(tests_commands))
+        if tests_imports and 'python' in requirements:
+            tests.append(' && '.join('python -c "import %s"' % imp for imp in tests_imports))
+        elif tests_imports and ('perl' in requirements or 'perl-threaded' in requirements):
+            tests.append(' && '.join('''perl -e "use %s;"''' % imp for imp in tests_imports))
+
+    tests = ' && '.join(tests)
+    tests = tests.replace('$R ', 'Rscript ')
+    return tests
+
+
+def get_pkg_name(args, pkg_path):
+    """Extract the package name from a given meta.yaml file."""
+    recipes_dir = args.recipes_dir
+
+    input_dir = os.path.dirname(os.path.join(recipes_dir, pkg_path))
+    recipe_meta = MetaData(input_dir)
+    return recipe_meta.get_value('package/name')
+
+
+def get_affected_packages(args):
+    """Return a list of all meta.yaml file that where modified/created recently.
+
+    Length of time to check for indicated by the ``hours`` parameter.
+    """
+    recipes_dir = args.recipes_dir
+    hours = args.diff_hours
+    cmd = """cd '%s' && git log --diff-filter=ACMRTUXB --name-only --pretty="" --since="%s hours ago" | grep -E '^recipes/.*/meta.yaml' | sort | uniq""" % (recipes_dir, hours)
+    pkg_list = check_output(cmd, shell=True)
+    ret = list()
+    for pkg in pkg_list.strip().split('\n'):
+        if pkg and os.path.exists(os.path.join( recipes_dir, pkg )):
+            ret.append( (get_pkg_name(args, pkg), get_tests(args, pkg)) )
+    return ret
+
+
+def check_output(cmd, shell=True):
+    return subprocess.check_output(cmd, shell=shell)
+
+
+def conda_versions(pkg_name, file_name):
+    """Return all conda version strings for a specified package name."""
+    j = json.load(open(file_name))
+    ret = list()
+    for pkg in j['packages'].values():
+        if pkg['name'] == pkg_name:
+            ret.append('%s--%s' % (pkg['version'], pkg['build']))
+    return ret
+
+
+def mull_targets(
+    targets, involucro_context=None,
+    command="build", channels=DEFAULT_CHANNELS, namespace="mulled",
+    test='true', image_build=None, name_override=None,
+    repository_template=DEFAULT_REPOSITORY_TEMPLATE, dry_run=False,
+    binds=DEFAULT_BINDS
+):
+    targets = list(targets)
+    if involucro_context is None:
+        involucro_context = InvolucroContext()
+
+    repo_template_kwds = {
+        "namespace": namespace,
+        "image": image_name(targets, image_build=image_build, name_override=name_override)
+    }
+    repo = string.Template(repository_template).safe_substitute(repo_template_kwds)
+
+    for channel in channels:
+        if channel.startswith('file://'):
+            bind_path = channel.lstrip('file://')
+            binds.append('/%s:/%s' % (bind_path, bind_path))
+
+    channels = ",".join(channels)
+    target_str = ",".join(map(conda_build_target_str, targets))
+    bind_str = ",".join(binds)
+    involucro_args = [
+        '-f', '%s/invfile.lua' % DIRNAME,
+        '-set', "CHANNELS='%s'" % channels,
+        '-set', "TEST='%s'" % test,
+        '-set', "TARGETS='%s'" % target_str,
+        '-set', "REPO='%s'" % repo,
+        '-set', "BINDS='%s'" % bind_str,
+        command,
+    ]
+    print(" ".join(involucro_context.build_command(involucro_args)))
+    if not dry_run:
+        ensure_installed(involucro_context, True)
+        return involucro_context.exec_command(involucro_args)
+    return 0
+
+
+def context_from_args(args):
+    return InvolucroContext(involucro_bin=args.involucro_path)
+
+
+class InvolucroContext(installable.InstallableContext):
+
+    installable_description = "Involucro"
+
+    def __init__(self, involucro_bin=None, shell_exec=None, verbose="3"):
+        if involucro_bin is None:
+            if os.path.exists("./involucro"):
+                self.involucro_bin = "./involucro"
+            else:
+                self.involucro_bin = "involucro"
+        else:
+            self.involucro_bin = involucro_bin
+        self.shell_exec = shell_exec or commands.shell
+        self.verbose = verbose
+
+    def build_command(self, involucro_args):
+        return [self.involucro_bin, "-v=%s" % self.verbose] + involucro_args
+
+    def exec_command(self, involucro_args):
+        cmd = self.build_command(involucro_args)
+        return self.shell_exec(" ".join(cmd))
+
+    def is_installed(self):
+        return os.path.exists(self.involucro_bin)
+
+    def can_install(self):
+        return True
+
+    @property
+    def parent_path(self):
+        return os.path.dirname(os.path.abspath(self.involucro_bin))
+
+
+def ensure_installed(involucro_context, auto_init):
+    return installable.ensure_installed(involucro_context, install_involucro, auto_init)
+
+
+def install_involucro(involucro_context=None, to_path=None):
+    to_path = involucro_context.involucro_bin
+    download_cmd = " ".join(commands.download_command(involucro_link(), to=to_path, quote_url=True))
+    full_cmd = "%s && chmod +x %s" % (download_cmd, to_path)
+    return involucro_context.shell_exec(full_cmd)
+
+
+def add_build_arguments(parser):
+    """Base arguments describing how to 'mull'."""
+    parser.add_argument('--involucro-path', dest="involucro_path", default=None,
+                        help="Path to involucro (if not set will look in working directory and on PATH).")
+    parser.add_argument('--force-rebuild', dest="force_rebuild", action="store_true",
+                        help="Rebuild package even if already published.")
+    parser.add_argument('--dry-run', dest='dry_run', action="store_true",
+                        help='Just print commands instead of executing them.')
+    parser.add_argument('-n', '--namespace', dest='namespace', default="mulled",
+                        help='quay.io namespace.')
+    parser.add_argument('-r', '--repository_template', dest='repository_template', default=DEFAULT_REPOSITORY_TEMPLATE,
+                        help='Docker repository target for publication (only quay.io or compat. API is currently supported).')
+    parser.add_argument('-c', '--channel', dest='channel', default=DEFAULT_CHANNEL,
+                        help='Target conda channel')
+    parser.add_argument('--extra-channels', dest='extra_channels', default=",".join(DEFAULT_EXTRA_CHANNELS),
+                        help='Dependent conda channels.')
+
+
+def add_single_image_arguments(parser):
+    parser.add_argument("--name-override", dest="name_override", default=None,
+                        help="Override mulled image name - this is not recommended since metadata will not be detectable from the name of resulting images")
+    parser.add_argument("--image-build", dest="image_build", default=None,
+                        help="Build a versioned variant of this image.")
+
+
+def target_str_to_targets(targets_raw):
+    def parse_target(target_str):
+        if "=" in target_str:
+            package_name, version = target_str.split("=", 1)
+            build = None
+            if "--" in version:
+                version, build = version.split('--')
+            target = build_target(package_name, version, build)
+        else:
+            target = build_target(target_str)
+        return target
+
+    targets = [parse_target(_) for _ in targets_raw.split(",")]
+    return targets
+
+
+def args_to_mull_targets_kwds(args):
+    kwds = {}
+    if hasattr(args, "image_build"):
+        kwds["image_build"] = args.image_build
+    if hasattr(args, "name_override"):
+        kwds["name_override"] = args.name_override
+    if hasattr(args, "namespace"):
+        kwds["namespace"] = args.namespace
+    if hasattr(args, "dry_run"):
+        kwds["dry_run"] = args.dry_run
+    if hasattr(args, "test"):
+        kwds["test"] = args.test
+    if hasattr(args, "channel"):
+        channels = [args.channel]
+        if hasattr(args, "extra_channels"):
+            channels += args.extra_channels.split(",")
+        kwds["channels"] = channels
+    if hasattr(args, "command"):
+        kwds["command"] = args.command
+    if hasattr(args, "repository_template"):
+        kwds["repository_template"] = args.repository_template
+
+    kwds["involucro_context"] = context_from_args(args)
+
+    return kwds
+
+
+def main(argv=None):
+    """Main entry-point for the CLI tool."""
+    parser = arg_parser(argv, globals())
+    add_build_arguments(parser)
+    add_single_image_arguments(parser)
+    parser.add_argument('command', metavar='COMMAND', help='Command (build-and-test, build, all)')
+    parser.add_argument('targets', metavar="TARGETS", default=None, help="Build a single container with specific package(s).")
+    parser.add_argument('--repository-name', dest="repository_name", default=None, help="Name of mulled container (leave blank to auto-generate based on packages - recommended).")
+    parser.add_argument('--test', help='Provide a test command for the container.')
+    args = parser.parse_args()
+    targets = target_str_to_targets(args.targets)
+    sys.exit(mull_targets(targets, **args_to_mull_targets_kwds(args)))
+
+
+__all__ = ("main", )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib/galaxy/tools/deps/mulled/mulled_build_channel.py b/lib/galaxy/tools/deps/mulled/mulled_build_channel.py
new file mode 100644
index 0000000..6463d3e
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/mulled_build_channel.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+"""Build a mulled images for all recent conda recipe updates that don't have existing images.
+
+Examples:
+
+Build mulled images for recent bioconda changes with:
+
+    mulled-build-channel build
+
+Build, test, and publish images with the follow command:
+
+    mulled-build-channel all
+
+See recent changes that would be built with:
+
+    mulled-build-channel list
+
+"""
+from __future__ import print_function
+
+import os
+import sys
+import time
+
+from ._cli import arg_parser
+from .mulled_build import (
+    add_build_arguments,
+    args_to_mull_targets_kwds,
+    build_target,
+    check_output,
+    conda_versions,
+    get_affected_packages,
+    mull_targets,
+)
+from .util import quay_versions, version_sorted
+
+
+def _fetch_repo_data(args):
+    repo_data = args.repo_data
+    channel = args.channel
+    if repo_data is None:
+        repo_data = "%s-repodata.json" % channel
+    if not os.path.exists(repo_data):
+        platform_tag = 'osx-64' if sys.platform == 'darwin' else 'linux-64'
+        check_output("wget --quiet https://conda.anaconda.org/%s/%s/repodata.json.bz2 -O '%s.bz2' && bzip2 -d '%s.bz2'" % (channel, platform_tag, repo_data, repo_data))
+    return repo_data
+
+
+def _new_versions(quay, conda):
+    """Calculate the versions that are in conda but not on quay.io."""
+    sconda = set(conda)
+    squay = set(quay) if quay else set()
+    return sconda - squay  # sconda.symmetric_difference(squay)
+
+
+def run_channel(args, build_last_n_versions=1):
+    """Build list of involucro commands (as shell snippet) to run."""
+    pkgs = get_affected_packages(args)
+    for pkg_name, pkg_tests in pkgs:
+        repo_data = _fetch_repo_data(args)
+        c = conda_versions(pkg_name, repo_data)
+        # only package the most recent N versions
+        c = version_sorted(c)[:build_last_n_versions]
+
+        if not args.force_rebuild:
+            time.sleep(1)
+            q = quay_versions(args.namespace, pkg_name)
+            versions = _new_versions(q, c)
+        else:
+            versions = c
+
+        for tag in versions:
+            target = build_target(pkg_name, tag=tag)
+            targets = [target]
+            mull_targets(targets, test=pkg_tests, **args_to_mull_targets_kwds(args))
+
+
+def get_pkg_names(args):
+    """Print package names that would be affected."""
+    print('\n'.join(pkg_name for pkg_name, pkg_tests in get_affected_packages(args)))
+
+
+def add_channel_arguments(parser):
+    """Add arguments only used if running mulled over a whole conda channel."""
+    parser.add_argument('--repo-data', dest='repo_data', default=None,
+                        help='Published repository data (will be fetched from --channel if not available and written). Defaults to [channel_name]-repodata.json.')
+    parser.add_argument('--diff-hours', dest='diff_hours', default="25",
+                        help='If finding all recently changed recipes, use this number of hours.')
+    parser.add_argument('--recipes-dir', dest="recipes_dir", default="./bioconda-recipes")
+
+
+def main(argv=None):
+    """Main entry-point for the CLI tool."""
+    parser = arg_parser(argv, globals())
+    add_channel_arguments(parser)
+    add_build_arguments(parser)
+    parser.add_argument('command', metavar='COMMAND', help='Command (list, build-and-test, build, all)')
+    parser.add_argument('--targets', dest="targets", default=None, help="Build a single container with specific package(s).")
+    parser.add_argument('--repository-name', dest="repository_name", default=None, help="Name of a single container (leave blank to auto-generate based on packages).")
+    args = parser.parse_args()
+    if args.command == "list":
+        get_pkg_names(args)
+    else:
+        run_channel(args)
+
+
+__all__ = ("main", )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib/galaxy/tools/deps/mulled/mulled_build_files.py b/lib/galaxy/tools/deps/mulled/mulled_build_files.py
new file mode 100644
index 0000000..931da76
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/mulled_build_files.py
@@ -0,0 +1,79 @@
+"""Build all composite mulled recipes discovered in TSV files.
+
+Use mulled-build-channel to build images for single recipes for a whole conda
+channel. This script instead builds images for combinations of recipes. This
+script can be given a single TSV file or a directory of TSV files to process.
+
+Examples:
+
+Build all recipes discovered in tsv files in a single directory.
+
+    mulled-build-files build
+
+"""
+
+import collections
+import glob
+import os
+
+from ._cli import arg_parser
+from .mulled_build import (
+    add_build_arguments,
+    args_to_mull_targets_kwds,
+    mull_targets,
+    target_str_to_targets,
+)
+
+
+def main(argv=None):
+    """Main entry-point for the CLI tool."""
+    parser = arg_parser(argv, globals())
+    add_build_arguments(parser)
+    parser.add_argument('command', metavar='COMMAND', help='Command (build-and-test, build, all)')
+    parser.add_argument('files', metavar="FILES", default=".",
+                        help="Path to directory (or single file) of TSV files describing composite recipes.")
+    args = parser.parse_args()
+    for targets in generate_targets(args.files):
+        mull_targets(targets, **args_to_mull_targets_kwds(args))
+
+
+def generate_targets(target_source):
+    """Generate all targets from TSV files in specified file or directory."""
+    target_source = os.path.abspath(target_source)
+    if os.path.isdir(target_source):
+        target_source_files = glob.glob(target_source + "/*.tsv")
+    else:
+        target_source_files = [target_source]
+
+    for target_source_file in target_source_files:
+        with open(target_source_file, "r") as f:
+            for line in f.readlines():
+                if line:
+                    line = line.strip()
+
+                if not line or line.startswith("#"):
+                    continue
+
+                yield line_to_targets(line)
+
+
+def line_to_targets(line_str):
+    line = _parse_line(line_str)
+    return target_str_to_targets(line)
+
+
+_Line = collections.namedtuple("_Line", ["targets", "image_build", "name_override"])
+
+
+def _parse_line(line_str):
+    line_parts = line_str.split(" ")
+    assert len(line_parts) < 3, "Too many fields in line [%s], expect at most 3 - targets, image build number, and name override." % line_str
+    line_parts += [None] * (3 - len(line_parts))
+    return _Line(*line_parts)
+
+
+__all__ = ("main", )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib/galaxy/tools/deps/mulled/mulled_build_tool.py b/lib/galaxy/tools/deps/mulled/mulled_build_tool.py
new file mode 100644
index 0000000..fbe353d
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/mulled_build_tool.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+"""Build a mulled images for a tool source (Galaxy or CWL tool).
+
+Examples:
+
+Build mulled images for requirements defined in a tool:
+
+    mulled-build-tool build path/to/tool_file.xml
+
+"""
+
+from galaxy.tools.parser import get_tool_source
+
+from ._cli import arg_parser
+from .mulled_build import (
+    add_build_arguments,
+    add_single_image_arguments,
+    args_to_mull_targets_kwds,
+    build_target,
+    mull_targets,
+)
+
+
+def main(argv=None):
+    """Main entry-point for the CLI tool."""
+    parser = arg_parser(argv, globals())
+    add_build_arguments(parser)
+    add_single_image_arguments(parser)
+    parser.add_argument('command', metavar='COMMAND', help='Command (build-and-test, build, all)')
+    parser.add_argument('tool', metavar="TOOL", default=None, help="Path to tool to build mulled image for.")
+    args = parser.parse_args()
+    tool_source = get_tool_source(args.tool)
+    requirements, _ = tool_source.parse_requirements_and_containers()
+    targets = requirements_to_mulled_targets(requirements)
+    mull_targets(targets, **args_to_mull_targets_kwds(args))
+
+
+def requirements_to_mulled_targets(requirements):
+    """Convert Galaxy's representation of requirements into mulled Target objects.
+
+    Only package requirements are retained.
+    """
+    package_requirements = [r for r in requirements if r.type == "package"]
+    targets = [build_target(r.name, r.version) for r in package_requirements]
+    return targets
+
+
+__all__ = ("main", "requirements_to_mulled_targets")
+
+
+if __name__ == '__main__':
+    main()
diff --git a/lib/galaxy/tools/deps/mulled/mulled_search.py b/lib/galaxy/tools/deps/mulled/mulled_search.py
new file mode 100644
index 0000000..724956a
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/mulled_search.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import sys
+import tempfile
+
+try:
+    import requests
+except ImportError:
+    requests = None
+
+try:
+    from whoosh.fields import Schema
+    from whoosh.fields import TEXT
+    from whoosh.fields import STORED
+    from whoosh.index import create_in
+    from whoosh.qparser import QueryParser
+except ImportError:
+    Schema = TEXT = STORED = create_in = QueryParser = None
+
+QUAY_API_URL = 'https://quay.io/api/v1/repository'
+
+
+class QuaySearch():
+    """
+    Tool to search within a quay organization for a given software name.
+    """
+    def __init__(self, organization):
+        self.index = None
+        self.organization = organization
+
+    def build_index(self):
+        """
+        Create an index to quickly examine the repositories of a given quay.io organization.
+        """
+        # download all information about the repositories from the
+        # given organization in self.organization
+
+        parameters = {'public': 'true', 'namespace': self.organization}
+        r = requests.get(QUAY_API_URL, headers={'Accept-encoding': 'gzip'}, params=parameters,
+                         timeout=12)
+
+        tmp_dir = tempfile.mkdtemp()
+        schema = Schema(title=TEXT(stored=True), content=STORED)
+        self.index = create_in(tmp_dir, schema)
+
+        json_decoder = json.JSONDecoder()
+        decoded_request = json_decoder.decode(r.text)
+        writer = self.index.writer()
+        for repository in decoded_request['repositories']:
+            writer.add_document(title=repository['name'], content=repository['description'])
+        writer.commit()
+
+    def search_repository(self, search_string, non_strict):
+        """
+        Search Docker containers on quay.io.
+        Results are displayed with all available versions,
+        including the complete image name.
+        """
+        # with statement closes searcher after usage.
+        with self.index.searcher() as searcher:
+            search_string = "*%s*" % search_string
+            query = QueryParser("title", self.index.schema).parse(search_string)
+            results = searcher.search(query)
+            if non_strict:
+                # look for spelling errors and use suggestions as a search term too
+                corrector = searcher.corrector("title")
+                suggestions = corrector.suggest(search_string, limit=2)
+
+                # get all repositories with suggested keywords
+                for suggestion in suggestions:
+                    search_string = "*%s*" % suggestion
+                    query = QueryParser("title", self.index.schema).parse(search_string)
+                    results_tmp = searcher.search(query)
+                    results.extend(results_tmp)
+
+            sys.stdout.write("The query \033[1m %s \033[0m resulted in %s result(s).\n" % (search_string, len(results)))
+
+            if non_strict:
+                sys.stdout.write('The search was relaxed and the following search terms were searched: ')
+                sys.stdout.write('\033[1m %s \033[0m\n' % ', '.join(suggestions))
+
+            out = list()
+            for result in results:
+                title = result['title']
+                for version in self.get_additional_repository_information(title):
+                    row = [title]
+                    row.append(version)
+                    out.append(row)
+            if out:
+                col_width = max(len(word) for row in out for word in row) + 2  # padding
+                for row in out:
+                    name = row[0]
+                    version = row[1]
+                    sys.stdout.write("".join(word.ljust(col_width) for word in row) + "docker pull quay.io/%s/%s:%s\n" % (self.organization, name, version))
+            else:
+                sys.stdout.write("No results found for %s in quay.io/%s.\n" % (search_string, self.organization))
+
+    def get_additional_repository_information(self, repository_string):
+        """
+        Function downloads additional information from quay.io to
+        get the tag-field which includes the version number.
+        """
+        url = "%s/%s/%s" % (QUAY_API_URL, self.organization, repository_string)
+        r = requests.get(url, headers={'Accept-encoding': 'gzip'}, timeout=12)
+
+        json_decoder = json.JSONDecoder()
+        decoded_request = json_decoder.decode(r.text)
+        return decoded_request['tags']
+
+
+def main(argv=None):
+    parser = argparse.ArgumentParser(description='Searches in a given quay organization for a repository')
+    parser.add_argument('-o', '--organization', dest='organization_string', default="mulled",
+                        help='Change organization. Default is mulled.')
+    parser.add_argument('--non-strict', dest='non_strict', action="store_true",
+                        help='Autocorrection of typos activated. Lists more results but can be confusing.\
+                        For too many queries quay.io blocks the request and the results can be incomplete.')
+    parser.add_argument('-s', '--search', required=True,
+                        help='The name of the tool you want to search for.')
+    args = parser.parse_args()
+
+    quay = QuaySearch(args.organization_string)
+    quay.build_index()
+
+    quay.search_repository(args.search, args.non_strict)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/tools/deps/mulled/util.py b/lib/galaxy/tools/deps/mulled/util.py
new file mode 100644
index 0000000..6ea62f2
--- /dev/null
+++ b/lib/galaxy/tools/deps/mulled/util.py
@@ -0,0 +1,117 @@
+"""Utilities for working with mulled abstractions outside the mulled package."""
+from __future__ import print_function
+
+import collections
+import hashlib
+
+from distutils.version import LooseVersion
+
+try:
+    import requests
+except ImportError:
+    requests = None
+
+
+def quay_versions(namespace, pkg_name):
+    """Get all version tags for a Docker image stored on quay.io for supplied package name."""
+    if requests is None:
+        raise Exception("requets library is unavailable, functionality not available.")
+
+    assert namespace is not None
+    assert pkg_name is not None
+    url = 'https://quay.io/api/v1/repository/%s/%s' % (namespace, pkg_name)
+    response = requests.get(url, timeout=None)
+    data = response.json()
+    if 'error_type' in data and data['error_type'] == "invalid_token":
+        return []
+
+    if 'tags' not in data:
+        raise Exception("Unexpected response from quay.io - not tags description found [%s]" % data)
+
+    return [tag for tag in data['tags'] if tag != 'latest']
+
+
+def mulled_tags_for(namespace, image):
+    """Fetch remote tags available for supplied image name.
+
+    The result will be sorted so newest tags are first.
+    """
+    tags = quay_versions(namespace, image)
+    tags = version_sorted(tags)
+    return tags
+
+
+def split_tag(tag):
+    """Split mulled image name into conda version and conda build."""
+    version = tag.split('--', 1)[0]
+    build = tag.split('--', 1)[1]
+    return version, build
+
+
+def version_sorted(elements):
+    """Sort iterable based on loose description of "version" from newest to oldest."""
+    return sorted(elements, key=LooseVersion, reverse=True)
+
+
+Target = collections.namedtuple("Target", ["package_name", "version", "build"])
+
+
+def build_target(package_name, version=None, build=None, tag=None):
+    """Use supplied arguments to build a :class:`Target` object."""
+    if tag is not None:
+        assert version is None
+        assert build is None
+        version, build = split_tag(tag)
+
+    return Target(package_name, version, build)
+
+
+def conda_build_target_str(target):
+    rval = target.package_name
+    if target.version:
+        rval += "=%s" % target.version
+
+        if target.build:
+            rval += "=%s" % target.build
+
+    return rval
+
+
+def image_name(targets, image_build=None, name_override=None):
+    if name_override is not None:
+        print("WARNING: Overriding mulled image name, auto-detection of 'mulled' package attributes will fail to detect result.")
+        return name_override
+
+    targets = list(targets)
+    if len(targets) == 1:
+        target = targets[0]
+        suffix = ""
+        if target.version is not None:
+            if image_build is not None:
+                print("WARNING: Hard-coding image build instead of using Conda build - this is not recommended.")
+                suffix = image_build
+            else:
+                suffix += ":%s" % target.version
+                build = target.build
+                if build is not None:
+                    suffix += "--%s" % build
+        return "%s%s" % (target.package_name, suffix)
+    else:
+        targets_order = sorted(targets, key=lambda t: t.package_name)
+        requirements_buffer = "\n".join(map(conda_build_target_str, targets_order))
+        m = hashlib.sha1()
+        m.update(requirements_buffer.encode())
+        suffix = "" if not image_build else ":%s" % image_build
+        return "mulled-v1-%s%s" % (m.hexdigest(), suffix)
+
+
+__all__ = (
+    "build_target",
+    "conda_build_target_str",
+    "image_name",
+    "mulled_tags_for",
+    "quay_versions",
+    "split_tag",
+    "Target",
+    "version_sorted",
+)
diff --git a/lib/galaxy/tools/deps/requirements.py b/lib/galaxy/tools/deps/requirements.py
new file mode 100644
index 0000000..0414ea2
--- /dev/null
+++ b/lib/galaxy/tools/deps/requirements.py
@@ -0,0 +1,136 @@
+from galaxy.util import asbool, xml_text
+
+DEFAULT_REQUIREMENT_TYPE = "package"
+DEFAULT_REQUIREMENT_VERSION = None
+
+
+class ToolRequirement( object ):
+    """
+    Represents an external requirement that must be available for the tool to
+    run (for example, a program, package, or library).  Requirements can
+    optionally assert a specific version.
+    """
+    def __init__( self, name=None, type=None, version=None ):
+        self.name = name
+        self.type = type
+        self.version = version
+
+    def to_dict( self ):
+        return dict(name=self.name, type=self.type, version=self.version)
+
+    @staticmethod
+    def from_dict( dict ):
+        version = dict.get( "version", None )
+        name = dict.get("name", None)
+        type = dict.get("type", None)
+        return ToolRequirement( name=name, type=type, version=version )
+
+    def __eq__(self, other):
+        return self.name == other.name and self.type == other.type and self.version == other.version
+
+
+DEFAULT_CONTAINER_TYPE = "docker"
+DEFAULT_CONTAINER_RESOLVE_DEPENDENCIES = False
+DEFAULT_CONTAINER_SHELL = "/bin/sh"  # Galaxy assumes bash, but containers are usually thinner.
+
+
+class ContainerDescription( object ):
+
+    def __init__(
+        self,
+        identifier=None,
+        type=DEFAULT_CONTAINER_TYPE,
+        resolve_dependencies=DEFAULT_CONTAINER_RESOLVE_DEPENDENCIES,
+        shell=DEFAULT_CONTAINER_SHELL,
+    ):
+        self.identifier = identifier
+        self.type = type
+        self.resolve_dependencies = resolve_dependencies
+        self.shell = shell
+
+    def to_dict( self ):
+        return dict(
+            identifier=self.identifier,
+            type=self.type,
+            resolve_dependencies=self.resolve_dependencies,
+            shell=self.shell,
+        )
+
+    @staticmethod
+    def from_dict( dict ):
+        identifier = dict["identifier"]
+        type = dict.get("type", DEFAULT_CONTAINER_TYPE)
+        resolve_dependencies = dict.get("resolve_dependencies", DEFAULT_CONTAINER_RESOLVE_DEPENDENCIES)
+        shell = dict.get("shell", DEFAULT_CONTAINER_SHELL)
+        return ContainerDescription(
+            identifier=identifier,
+            type=type,
+            resolve_dependencies=resolve_dependencies,
+            shell=shell,
+        )
+
+
+def parse_requirements_from_dict( root_dict ):
+    requirements = root_dict.get("requirements", [])
+    containers = root_dict.get("containers", [])
+    return map(ToolRequirement.from_dict, requirements), map(ContainerDescription.from_dict, containers)
+
+
+def parse_requirements_from_xml( xml_root ):
+    """
+
+    >>> from xml.etree import ElementTree
+    >>> def load_requirements( contents ):
+    ...     contents_document = '''<tool><requirements>%s</requirements></tool>'''
+    ...     root = ElementTree.fromstring( contents_document % contents )
+    ...     return parse_requirements_from_xml( root )
+    >>> reqs, containers = load_requirements('''<requirement>bwa</requirement>''')
+    >>> reqs[0].name
+    'bwa'
+    >>> reqs[0].version is None
+    True
+    >>> reqs[0].type
+    'package'
+    >>> reqs, containers = load_requirements('''<requirement type="binary" version="1.3.3">cufflinks</requirement>''')
+    >>> reqs[0].name
+    'cufflinks'
+    >>> reqs[0].version
+    '1.3.3'
+    >>> reqs[0].type
+    'binary'
+    """
+    requirements_elem = xml_root.find( "requirements" )
+
+    requirement_elems = []
+    if requirements_elem is not None:
+        requirement_elems = requirements_elem.findall( 'requirement' )
+
+    requirements = []
+    for requirement_elem in requirement_elems:
+        name = xml_text( requirement_elem )
+        type = requirement_elem.get( "type", DEFAULT_REQUIREMENT_TYPE )
+        version = requirement_elem.get( "version", DEFAULT_REQUIREMENT_VERSION )
+        requirement = ToolRequirement( name=name, type=type, version=version )
+        requirements.append( requirement )
+
+    container_elems = []
+    if requirements_elem is not None:
+        container_elems = requirements_elem.findall( 'container' )
+
+    containers = map(container_from_element, container_elems)
+
+    return requirements, containers
+
+
+def container_from_element(container_elem):
+    identifier = xml_text(container_elem)
+    type = container_elem.get("type", DEFAULT_CONTAINER_TYPE)
+    resolve_dependencies = asbool(container_elem.get("resolve_dependencies", DEFAULT_CONTAINER_RESOLVE_DEPENDENCIES))
+    shell = container_elem.get("shell", DEFAULT_CONTAINER_SHELL)
+    container = ContainerDescription(
+        identifier=identifier,
+        type=type,
+        resolve_dependencies=resolve_dependencies,
+        shell=shell,
+    )
+    return container
diff --git a/lib/galaxy/tools/deps/resolvers/__init__.py b/lib/galaxy/tools/deps/resolvers/__init__.py
new file mode 100644
index 0000000..c24c107
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/__init__.py
@@ -0,0 +1,128 @@
+"""The module defines the abstract interface for dealing tool dependency resolution plugins."""
+from abc import (
+    ABCMeta,
+    abstractmethod,
+    abstractproperty,
+)
+
+from galaxy.util.dictifiable import Dictifiable
+
+from ..requirements import ToolRequirement
+
+
+class DependencyResolver(Dictifiable, object):
+    """Abstract description of a technique for resolving container images for tool execution."""
+
+    # Keys for dictification.
+    dict_collection_visible_keys = ['resolver_type', 'resolves_simple_dependencies']
+    # A "simple" dependency is one that does not depend on the the tool
+    # resolving the dependency. Classic tool shed dependencies are non-simple
+    # because the repository install context is used in dependency resolution
+    # so the same requirement tags in different tools will have very different
+    # resolution.
+    disabled = False
+    resolves_simple_dependencies = True
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def resolve( self, name, version, type, **kwds ):
+        """Given inputs describing dependency in the abstract yield a Dependency object.
+
+        The Dependency object describes various attributes (script, bin,
+        version) used to build scripts with the dependency availble. Here
+        script is the env.sh file to source before running a job, if that is
+        not found the bin directory will be appended to the path (if it is
+        not ``None``). Finally, version is the resolved tool dependency
+        version (which may differ from requested version for instance if the
+        request version is 'default'.)
+        """
+
+    def _get_config_option(self, key, dependency_resolver, default=None, config_prefix=None, **kwds):
+        """ Look in resolver-specific settings for option and then fallback to
+        global settings.
+        """
+        global_key = "%s_%s" % (config_prefix, key)
+        if key in kwds:
+            return kwds.get(key)
+        elif global_key in dependency_resolver.extra_config:
+            return dependency_resolver.extra_config.get(global_key)
+        else:
+            return default
+
+
+class ListableDependencyResolver:
+    """ Mix this into a ``DependencyResolver`` and implement to indicate
+    the dependency resolver can iterate over its dependencies and generate
+    requirements.
+    """
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def list_dependencies(self):
+        """ List the "simple" requirements that may be resolved "exact"-ly
+        by this dependency resolver.
+        """
+
+    def _to_requirement(self, name, version=None):
+        return ToolRequirement(name=name, type="package", version=version)
+
+
+class InstallableDependencyResolver:
+    """ Mix this into a ``DependencyResolver`` and implement to indicate
+    the dependency resolver can attempt to install new dependencies.
+    """
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def install_dependency(self, name, version, type, **kwds):
+        """ Attempt to install this dependency if a recipe to do so
+        has been registered in some way.
+        """
+
+
+class Dependency(Dictifiable, object):
+    dict_collection_visible_keys = ['dependency_type', 'exact', 'name', 'version', 'cacheable']
+    __metaclass__ = ABCMeta
+    cacheable = False
+
+    @abstractmethod
+    def shell_commands( self, requirement ):
+        """
+        Return shell commands to enable this dependency.
+        """
+
+    @abstractproperty
+    def exact( self ):
+        """ Return true if version information wasn't discarded to resolve
+        the dependency.
+        """
+
+    @property
+    def resolver_msg(self):
+        """
+        Return a message describing this dependency
+        """
+        return "Using dependency %s version %s of type %s" % (self.name, self.version, self.dependency_type)
+
+
+class NullDependency( Dependency ):
+    dependency_type = None
+    exact = True
+
+    def __init__(self, version=None, name=None):
+        self.version = version
+        self.name = name
+
+    @property
+    def resolver_msg(self):
+        """
+        Return a message describing this dependency
+        """
+        return "Dependency %s not found." % self.name
+
+    def shell_commands( self, requirement ):
+        return None
+
+
+class DependencyException(Exception):
+    pass
diff --git a/lib/galaxy/tools/deps/resolvers/brewed_tool_shed_packages.py b/lib/galaxy/tools/deps/resolvers/brewed_tool_shed_packages.py
new file mode 100644
index 0000000..5585de5
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/brewed_tool_shed_packages.py
@@ -0,0 +1,149 @@
+"""
+This dependency resolver resolves tool shed dependencies (those defined
+tool_dependencies.xml) installed using Platform Homebrew and converted
+via shed2tap (e.g. https://github.com/jmchilton/homebrew-toolshed).
+"""
+import logging
+import os
+from xml.etree import ElementTree as ET
+
+from .resolver_mixins import (
+    UsesHomebrewMixin,
+    UsesInstalledRepositoriesMixin,
+    UsesToolDependencyDirMixin,
+)
+from ..resolvers import DependencyResolver, NullDependency
+
+log = logging.getLogger(__name__)
+
+
+class HomebrewToolShedDependencyResolver(
+    DependencyResolver,
+    UsesHomebrewMixin,
+    UsesToolDependencyDirMixin,
+    UsesInstalledRepositoriesMixin,
+):
+    resolver_type = "tool_shed_tap"
+
+    def __init__(self, dependency_manager, **kwds):
+        self._init_homebrew(**kwds)
+        self._init_base_path(dependency_manager, **kwds)
+
+    def resolve(self, name, version, type, **kwds):
+        if type != "package":
+            return NullDependency(version=version, name=name)
+        if version is None:
+            return NullDependency(version=version, name=name)
+
+        return self._find_tool_dependencies(name, version, type, **kwds)
+
+    def _find_tool_dependencies(self, name, version, type, **kwds):
+        installed_tool_dependency = self._get_installed_dependency(name, type, version=version, **kwds)
+        if installed_tool_dependency:
+            return self._resolve_from_installed_tool_dependency(name, version, installed_tool_dependency)
+
+        if "tool_dir" in kwds:
+            tool_directory = os.path.abspath(kwds["tool_dir"])
+            tool_depenedencies_path = os.path.join(tool_directory, "tool_dependencies.xml")
+            if os.path.exists(tool_depenedencies_path):
+                return self._resolve_from_tool_dependencies_path(name, version, tool_depenedencies_path)
+
+        return NullDependency(version=version, name=name)
+
+    def _resolve_from_installed_tool_dependency(self, name, version, installed_tool_dependency):
+        tool_shed_repository = installed_tool_dependency.tool_shed_repository
+        recipe_name = build_recipe_name(
+            package_name=name,
+            package_version=version,
+            repository_owner=tool_shed_repository.owner,
+            repository_name=tool_shed_repository.name,
+        )
+        return self._find_dep_default(recipe_name, None)
+
+    def _resolve_from_tool_dependencies_path(self, name, version, tool_dependencies_path):
+        try:
+            raw_dependencies = RawDependencies(tool_dependencies_path)
+        except Exception:
+            log.debug("Failed to parse dependencies in file %s" % tool_dependencies_path)
+            return NullDependency(version=version, name=name)
+
+        raw_dependency = raw_dependencies.find(name, version)
+        if not raw_dependency:
+            return NullDependency(version=version, name=name)
+
+        recipe_name = build_recipe_name(
+            package_name=name,
+            package_version=version,
+            repository_owner=raw_dependency.repository_owner,
+            repository_name=raw_dependency.repository_name
+        )
+        dep = self._find_dep_default(recipe_name, None)
+        return dep
+
+
+class RawDependencies(object):
+
+    def __init__(self, dependencies_file):
+        self.root = ET.parse(dependencies_file).getroot()
+        dependencies = []
+        package_els = self.root.findall("package") or []
+        for package_el in package_els:
+            repository_el = package_el.find("repository")
+            if repository_el is None:
+                continue
+            dependency = RawDependency(self, package_el, repository_el)
+            dependencies.append(dependency)
+        self.dependencies = dependencies
+
+    def find(self, package_name, package_version):
+        target_dependency = None
+
+        for dependency in self.dependencies:
+            if dependency.package_name == package_name and dependency.package_version == package_version:
+                target_dependency = dependency
+                break
+        return target_dependency
+
+
+class RawDependency(object):
+
+    def __init__(self, dependencies, package_el, repository_el):
+        self.dependencies = dependencies
+        self.package_el = package_el
+        self.repository_el = repository_el
+
+    def __repr__(self):
+        temp = "Dependency[package_name=%s,version=%s,dependent_package=%s]"
+        return temp % (
+            self.package_el.attrib["name"],
+            self.package_el.attrib["version"],
+            self.repository_el.attrib["name"]
+        )
+
+    @property
+    def repository_owner(self):
+        return self.repository_el.attrib["owner"]
+
+    @property
+    def repository_name(self):
+        return self.repository_el.attrib["name"]
+
+    @property
+    def package_name(self):
+        return self.package_el.attrib["name"]
+
+    @property
+    def package_version(self):
+        return self.package_el.attrib["version"]
+
+
+def build_recipe_name(package_name, package_version, repository_owner, repository_name):
+    # TODO: Consider baking package_name and package_version into name? (would be more "correct")
+    owner = repository_owner.replace("-", "")
+    name = repository_name
+    name = name.replace("_", "").replace("-", "")
+    base = "%s_%s" % (owner, name)
+    return base
+
+
+__all__ = ('HomebrewToolShedDependencyResolver', )
diff --git a/lib/galaxy/tools/deps/resolvers/conda.py b/lib/galaxy/tools/deps/resolvers/conda.py
new file mode 100644
index 0000000..0e4fb35
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/conda.py
@@ -0,0 +1,257 @@
+"""
+This is still an experimental module and there will almost certainly be backward
+incompatible changes coming.
+"""
+
+import logging
+import os
+
+import galaxy.tools.deps.installable
+
+from ..conda_util import (
+    build_isolated_environment,
+    cleanup_failed_install,
+    CondaContext,
+    CondaTarget,
+    install_conda,
+    install_conda_target,
+    installed_conda_targets,
+    is_conda_target_installed,
+    USE_PATH_EXEC_DEFAULT,
+)
+from ..resolvers import (
+    Dependency,
+    DependencyException,
+    DependencyResolver,
+    InstallableDependencyResolver,
+    ListableDependencyResolver,
+    NullDependency,
+)
+
+
+DEFAULT_BASE_PATH_DIRECTORY = "_conda"
+DEFAULT_CONDARC_OVERRIDE = "_condarc"
+DEFAULT_ENSURE_CHANNELS = "conda-forge,r,bioconda,iuc"
+
+log = logging.getLogger(__name__)
+
+
+class CondaDependencyResolver(DependencyResolver, ListableDependencyResolver, InstallableDependencyResolver):
+    dict_collection_visible_keys = DependencyResolver.dict_collection_visible_keys + ['conda_prefix', 'versionless', 'ensure_channels', 'auto_install']
+    resolver_type = "conda"
+
+    def __init__(self, dependency_manager, **kwds):
+        self.versionless = _string_as_bool(kwds.get('versionless', 'false'))
+        self.dependency_manager = dependency_manager
+
+        def get_option(name):
+            return self._get_config_option(name, dependency_manager, config_prefix="conda", **kwds)
+
+        # Conda context options (these define the environment)
+        conda_prefix = get_option("prefix")
+        if conda_prefix is None:
+            conda_prefix = os.path.join(
+                dependency_manager.default_base_path, DEFAULT_BASE_PATH_DIRECTORY
+            )
+        conda_prefix = os.path.abspath(conda_prefix)
+
+        self.conda_prefix_parent = os.path.dirname(conda_prefix)
+
+        # warning is related to conda problem discussed in https://github.com/galaxyproject/galaxy/issues/2537, remove when that is resolved
+        conda_prefix_warning_length = 50
+        if len(conda_prefix) >= conda_prefix_warning_length:
+            log.warning("Conda install prefix '%s' is %d characters long, this can cause problems with package installation, consider setting a shorter prefix (conda_prefix in galaxy.ini)" % (conda_prefix, len(conda_prefix)))
+
+        condarc_override = get_option("condarc_override")
+        if condarc_override is None:
+            condarc_override = os.path.join(
+                dependency_manager.default_base_path, DEFAULT_CONDARC_OVERRIDE
+            )
+
+        copy_dependencies = _string_as_bool(get_option("copy_dependencies"))
+        conda_exec = get_option("exec")
+        debug = _string_as_bool(get_option("debug"))
+        ensure_channels = get_option("ensure_channels")
+        use_path_exec = get_option("use_path_exec")
+        if use_path_exec is None:
+            use_path_exec = USE_PATH_EXEC_DEFAULT
+        else:
+            use_path_exec = _string_as_bool(use_path_exec)
+        if ensure_channels is None:
+            ensure_channels = DEFAULT_ENSURE_CHANNELS
+
+        conda_context = CondaContext(
+            conda_prefix=conda_prefix,
+            conda_exec=conda_exec,
+            debug=debug,
+            ensure_channels=ensure_channels,
+            condarc_override=condarc_override,
+            use_path_exec=use_path_exec,
+            copy_dependencies=copy_dependencies
+        )
+        self.ensure_channels = ensure_channels
+
+        # Conda operations options (these define how resolution will occur)
+        auto_install = _string_as_bool(get_option("auto_install"))
+        self.auto_init = _string_as_bool(get_option("auto_init"))
+        self.conda_context = conda_context
+        self.disabled = not galaxy.tools.deps.installable.ensure_installed(conda_context, install_conda, self.auto_init)
+        self.auto_install = auto_install
+        self.copy_dependencies = copy_dependencies
+
+    def clean(self, **kwds):
+        return self.conda_context.exec_clean()
+
+    def resolve(self, name, version, type, **kwds):
+        # Check for conda just not being there, this way we can enable
+        # conda by default and just do nothing in not configured.
+        if not os.path.isdir(self.conda_context.conda_prefix):
+            return NullDependency(version=version, name=name)
+
+        if type != "package":
+            return NullDependency(version=version, name=name)
+
+        exact = not self.versionless or version is None
+        if self.versionless:
+            version = None
+
+        conda_target = CondaTarget(name, version=version)
+        is_installed = is_conda_target_installed(
+            conda_target, conda_context=self.conda_context
+        )
+
+        job_directory = kwds.get("job_directory", None)
+        if not is_installed and self.auto_install and job_directory:
+            is_installed = self.install_dependency(name=name, version=version, type=type)
+
+        if not is_installed:
+            return NullDependency(version=version, name=name)
+
+        # Have installed conda_target and job_directory to send it to.
+        # If dependency is for metadata generation, store environment in conda-metadata-env
+        if kwds.get("metadata", False):
+            conda_env = "conda-metadata-env"
+        else:
+            conda_env = "conda-env"
+
+        if job_directory:
+            conda_environment = os.path.join(job_directory, conda_env)
+        else:
+            conda_environment = None
+
+        return CondaDependency(
+            self.conda_context,
+            conda_environment,
+            exact,
+            name,
+            version
+        )
+
+    def list_dependencies(self):
+        for install_target in installed_conda_targets(self.conda_context):
+            name = install_target.package
+            version = install_target.version
+            yield self._to_requirement(name, version)
+
+    def install_dependency(self, name, version, type, **kwds):
+        "Returns True on (seemingly) successfull installation"
+        if type != "package":
+            log.warning("Cannot install dependencies of type '%s'" % type)
+            return False
+
+        if self.versionless:
+            version = None
+
+        conda_target = CondaTarget(name, version=version)
+
+        is_installed = is_conda_target_installed(
+            conda_target, conda_context=self.conda_context
+        )
+
+        if is_installed:
+            return is_installed
+
+        return_code = install_conda_target(conda_target, conda_context=self.conda_context)
+        if return_code != 0:
+            is_installed = False
+        else:
+            # Recheck if installed
+            is_installed = is_conda_target_installed(
+                conda_target, conda_context=self.conda_context
+            )
+        if not is_installed:
+            log.debug("Removing failed conda install of {}, version '{}'".format(name, version))
+            cleanup_failed_install(conda_target, conda_context=self.conda_context)
+
+        return is_installed
+
+    @property
+    def prefix(self):
+        return self.conda_context.conda_prefix
+
+
+class CondaDependency(Dependency):
+    dict_collection_visible_keys = Dependency.dict_collection_visible_keys + ['environment_path', 'name', 'version']
+    dependency_type = 'conda'
+    cacheable = True
+
+    def __init__(self, conda_context, environment_path, exact, name=None, version=None):
+        self.activate = conda_context.activate
+        self.conda_context = conda_context
+        self.environment_path = environment_path
+        self._exact = exact
+        self._name = name
+        self._version = version
+        self.cache_path = None
+
+    @property
+    def exact(self):
+        return self._exact
+
+    @property
+    def name(self):
+        return self._name
+
+    @property
+    def version(self):
+        return self._version
+
+    def build_cache(self, cache_path):
+        self.set_cache_path(cache_path)
+        self.build_environment()
+
+    def set_cache_path(self, cache_path):
+        self.cache_path = cache_path
+        self.environment_path = cache_path
+
+    def build_environment(self):
+        env_path, exit_code = build_isolated_environment(
+            CondaTarget(self.name, self.version),
+            path=self.environment_path,
+            copy=self.conda_context.copy_dependencies,
+            conda_context=self.conda_context,
+        )
+        if exit_code:
+            if len(os.path.abspath(self.environment_path)) > 79:
+                # TODO: remove this once conda_build version 2 is released and packages have been rebuilt.
+                raise DependencyException("Conda dependency failed to build job environment. "
+                                          "This is most likely a limitation in conda. "
+                                          "You can try to shorten the path to the job_working_directory.")
+            raise DependencyException("Conda dependency seemingly installed but failed to build job environment.")
+
+    def shell_commands(self, requirement):
+        if not self.cache_path:
+            # Build an isolated environment if not using a cached dependency manager
+            self.build_environment()
+        return """[ "$CONDA_DEFAULT_ENV" = "%s" ] || . %s '%s' > conda_activate.log 2>&1 """ % (
+            self.environment_path,
+            self.activate,
+            self.environment_path
+        )
+
+
+def _string_as_bool( value ):
+    return str( value ).lower() == "true"
+
+
+__all__ = ('CondaDependencyResolver', 'DEFAULT_ENSURE_CHANNELS')
diff --git a/lib/galaxy/tools/deps/resolvers/galaxy_packages.py b/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
new file mode 100644
index 0000000..858ff0f
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/galaxy_packages.py
@@ -0,0 +1,129 @@
+import logging
+
+from os import listdir
+from os.path import (
+    basename,
+    exists,
+    isdir,
+    islink,
+    join,
+    realpath,
+)
+
+from .resolver_mixins import UsesToolDependencyDirMixin
+
+from ..resolvers import (
+    Dependency,
+    DependencyResolver,
+    ListableDependencyResolver,
+    NullDependency,
+)
+
+log = logging.getLogger( __name__ )
+
+
+class GalaxyPackageDependency(Dependency):
+    dict_collection_visible_keys = Dependency.dict_collection_visible_keys + ['script', 'path', 'version', 'name']
+    dependency_type = 'galaxy_package'
+
+    def __init__( self, script, path, version, name, exact=True ):
+        self.script = script
+        self.path = path
+        self.version = version
+        self.name = name
+        self._exact = exact
+
+    @property
+    def exact(self):
+        return self._exact
+
+    def shell_commands( self, requirement ):
+        base_path = self.path
+        if self.script is None and base_path is None:
+            log.warning( "Failed to resolve dependency on '%s', ignoring", requirement.name )
+            commands = None
+        elif requirement.type == 'package' and self.script is None:
+            commands = 'PACKAGE_BASE=%s; export PACKAGE_BASE; PATH="%s/bin:$PATH"; export PATH' % ( base_path, base_path )
+        else:
+            commands = 'PACKAGE_BASE=%s; export PACKAGE_BASE; . %s' % ( base_path, self.script )
+        return commands
+
+
+class ToolShedDependency(GalaxyPackageDependency):
+    dependency_type = 'tool_shed_package'
+
+
+class BaseGalaxyPackageDependencyResolver(DependencyResolver, UsesToolDependencyDirMixin):
+    dict_collection_visible_keys = DependencyResolver.dict_collection_visible_keys + ['base_path', 'versionless']
+    dependency_type = GalaxyPackageDependency
+
+    def __init__(self, dependency_manager, **kwds):
+        # Galaxy tool shed requires explicit versions on XML elements,
+        # this in inconvient for testing or Galaxy instances not utilizing
+        # the tool shed so allow a fallback version of the Galaxy package
+        # resolver that will just grab 'default' version of exact version
+        # unavailable.
+        self.versionless = str(kwds.get('versionless', "false")).lower() == "true"
+        self._init_base_path( dependency_manager, **kwds )
+
+    def resolve( self, name, version, type, **kwds ):
+        """
+        Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a
+        symbolic link (if found). Returns a triple of: env_script, base_path, real_version
+        """
+        if version is None or self.versionless:
+            exact = not self.versionless or version is None
+            return self._find_dep_default( name, type=type, exact=exact, **kwds )
+        else:
+            return self._find_dep_versioned( name, version, type=type, **kwds )
+
+    def _find_dep_versioned( self, name, version, type='package', **kwds ):
+        base_path = self.base_path
+        path = join( base_path, name, version )
+        return self._galaxy_package_dep(path, version, name, True)
+
+    def _find_dep_default( self, name, type='package', exact=True, **kwds ):
+        base_path = self.base_path
+        path = join( base_path, name, 'default' )
+        if islink( path ):
+            real_path = realpath( path )
+            real_version = basename( real_path )
+            return self._galaxy_package_dep(real_path, real_version, name, exact)
+        else:
+            return NullDependency(version=None, name=name)
+
+    def _galaxy_package_dep( self, path, version, name, exact ):
+        script = join( path, 'env.sh' )
+        if exists( script ):
+            return self.dependency_type(script, path, version, name, exact)
+        elif exists( join( path, 'bin' ) ):
+            return self.dependency_type(None, path, version, name, exact)
+        return NullDependency(version=version, name=name)
+
+
+class GalaxyPackageDependencyResolver(BaseGalaxyPackageDependencyResolver, ListableDependencyResolver):
+    resolver_type = "galaxy_packages"
+
+    def list_dependencies(self):
+        base_path = self.base_path
+        for package_name in listdir(base_path):
+            package_dir = join(base_path, package_name)
+            if isdir(package_dir):
+                for version in listdir(package_dir):
+                    version_dir = join(package_dir, version)
+                    if version == "default":
+                        version = None
+                    valid_dependency = _is_dependency_directory(version_dir)
+                    if valid_dependency:
+                        yield self._to_requirement(package_name, version)
+
+
+def _is_dependency_directory(directory):
+    return exists(join(directory, 'env.sh')) or exists(join(directory, 'bin'))
+
+
+__all__ = (
+    'GalaxyPackageDependency',
+    'GalaxyPackageDependencyResolver',
+    'ToolShedDependency'
+)
diff --git a/lib/galaxy/tools/deps/resolvers/homebrew.py b/lib/galaxy/tools/deps/resolvers/homebrew.py
new file mode 100644
index 0000000..395eb1e
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/homebrew.py
@@ -0,0 +1,56 @@
+"""
+This file implements a brew resolver for Galaxy requirements. In order for Galaxy
+to pick up on recursively defined and versioned brew dependencies recipes should
+be installed using the experimental `brew-vinstall` external command.
+
+More information here:
+
+https://github.com/jmchilton/brew-tests
+https://github.com/Homebrew/homebrew-science/issues/1191
+
+This is still an experimental module and there will almost certainly be backward
+incompatible changes coming.
+"""
+
+
+from .resolver_mixins import UsesHomebrewMixin
+from ..resolvers import DependencyResolver, NullDependency
+
+# TODO: Implement prefer version linked...
+PREFER_VERSION_LINKED = 'linked'
+PREFER_VERSION_LATEST = 'latest'
+UNKNOWN_PREFER_VERSION_MESSAGE_TEMPLATE = "HomebrewDependencyResolver prefer_version must be %s"
+UNKNOWN_PREFER_VERSION_MESSAGE = UNKNOWN_PREFER_VERSION_MESSAGE_TEMPLATE % (PREFER_VERSION_LATEST)
+DEFAULT_PREFER_VERSION = PREFER_VERSION_LATEST
+
+
+class HomebrewDependencyResolver(DependencyResolver, UsesHomebrewMixin):
+    resolver_type = "homebrew"
+
+    def __init__(self, dependency_manager, **kwds):
+        self.versionless = _string_as_bool(kwds.get('versionless', 'false'))
+        self.prefer_version = kwds.get('prefer_version', None)
+
+        if self.prefer_version is None:
+            self.prefer_version = DEFAULT_PREFER_VERSION
+
+        if self.versionless and self.prefer_version not in [PREFER_VERSION_LATEST]:
+            raise Exception(UNKNOWN_PREFER_VERSION_MESSAGE)
+
+        self._init_homebrew(**kwds)
+
+    def resolve(self, name, version, type, **kwds):
+        if type != "package":
+            return NullDependency(version=version, name=name)
+
+        if version is None or self.versionless:
+            return self._find_dep_default(name, version)
+        else:
+            return self._find_dep_versioned(name, version)
+
+
+def _string_as_bool( value ):
+    return str( value ).lower() == "true"
+
+
+__all__ = ('HomebrewDependencyResolver', )
diff --git a/lib/galaxy/tools/deps/resolvers/modules.py b/lib/galaxy/tools/deps/resolvers/modules.py
new file mode 100644
index 0000000..227abb7
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/modules.py
@@ -0,0 +1,183 @@
+"""
+This file contains the outline of an implementation to load environment modules
+(http://modules.sourceforge.net/).
+
+This is a community contributed feature and the core Galaxy team does utilize
+it, hence support for it will be minimal. The Galaxy team eagerly welcomes
+community contribution and maintenance however.
+"""
+import logging
+from os import environ, pathsep
+from os.path import exists, isdir, join
+from subprocess import PIPE, Popen
+
+from six import StringIO
+
+from ..resolvers import Dependency, DependencyResolver, NullDependency
+
+log = logging.getLogger( __name__ )
+
+DEFAULT_MODULECMD_PATH = "modulecmd"  # Just check path
+DEFAULT_MODULE_PATH = '/usr/share/modules/modulefiles'
+DEFAULT_INDICATOR = '(default)'
+DEFAULT_MODULE_PREFETCH = "true"
+UNKNOWN_FIND_BY_MESSAGE = "ModuleDependencyResolver does not know how to find modules by [%s], find_by should be one of %s"
+
+
+class ModuleDependencyResolver(DependencyResolver):
+    dict_collection_visible_keys = DependencyResolver.dict_collection_visible_keys + ['base_path', 'modulepath']
+    resolver_type = "modules"
+
+    def __init__(self, dependency_manager, **kwds):
+        self.versionless = _string_as_bool(kwds.get('versionless', 'false'))
+        find_by = kwds.get('find_by', 'avail')
+        prefetch = _string_as_bool(kwds.get('prefetch', DEFAULT_MODULE_PREFETCH))
+        self.modulecmd = kwds.get('modulecmd', DEFAULT_MODULECMD_PATH)
+        self.modulepath = kwds.get('modulepath', self.__default_modulespath())
+        self.default_indicator = kwds.get('default_indicator', DEFAULT_INDICATOR)
+        if find_by == 'directory':
+            self.module_checker = DirectoryModuleChecker(self, self.modulepath, prefetch)
+        elif find_by == 'avail':
+            self.module_checker = AvailModuleChecker(self, self.modulepath, prefetch, self.default_indicator)
+        else:
+            raise Exception(UNKNOWN_FIND_BY_MESSAGE % (find_by, ["avail", "directory"]))
+
+    def __default_modulespath(self):
+        if 'MODULEPATH' in environ:
+            module_path = environ['MODULEPATH']
+        elif 'MODULESHOME' in environ:
+            module_path = join(environ['MODULESHOME'], 'modulefiles')
+        else:
+            module_path = DEFAULT_MODULE_PATH
+        return module_path
+
+    def resolve( self, name, version, type, **kwds ):
+        if type != "package":
+            return NullDependency(version=version, name=name)
+
+        if self.__has_module(name, version):
+            return ModuleDependency(self, name, version, exact=True)
+        elif self.versionless and self.__has_module(name, None):
+            return ModuleDependency(self, name, None, exact=False)
+
+        return NullDependency(version=version, name=name)
+
+    def __has_module(self, name, version):
+        return self.module_checker.has_module(name, version)
+
+
+class DirectoryModuleChecker(object):
+    """Finds module by path.
+
+    Searches the paths listed in modulepath to for a file or directory matching the module name.
+    If the version=True, searches for files named module/version."""
+    def __init__(self, module_dependency_resolver, modulepath, prefetch):
+        self.module_dependency_resolver = module_dependency_resolver
+        self.directories = modulepath.split(pathsep)
+        if prefetch:
+            log.warning("Created module dependency resolver with prefetch enabled, but directory module checker does not support this.")
+
+    def has_module(self, module, version):
+        has_module = False
+        for directory in self.directories:
+            module_directory = join(directory, module)
+            has_module_directory = isdir( module_directory )
+            if not version:
+                has_module = has_module_directory or exists(module_directory)  # could be a bare modulefile
+            else:
+                modulefile = join(  module_directory, version )
+                has_modulefile = exists( modulefile )
+                has_module = has_module_directory and has_modulefile
+            if has_module:
+                break
+        return has_module
+
+
+class AvailModuleChecker(object):
+    """Finds modules by searching output of 'module avail'.
+
+    Parses the Environment Modules 'module avail' output, splitting
+    module names into module and version on '/' and discarding a postfix matching default_indicator
+    (by default '(default)'. Matching is done using the module and
+    (if version=True) the module version."""
+    def __init__(self, module_dependency_resolver, modulepath, prefetch, default_indicator=DEFAULT_INDICATOR):
+        self.module_dependency_resolver = module_dependency_resolver
+        self.modulepath = modulepath
+        self.default_indicator = default_indicator
+        if prefetch:
+            prefetched_modules = []
+            for module in self.__modules():
+                prefetched_modules.append(module)
+        else:
+            prefetched_modules = None
+        self.prefetched_modules = prefetched_modules
+
+    def has_module(self, module, version):
+        module_generator = self.prefetched_modules
+        if module_generator is None:
+            module_generator = self.__modules()
+
+        for module_name, module_version in module_generator:
+            names_match = module == module_name
+            module_match = names_match and (version is None or module_version == version)
+            if module_match:
+                return True
+        return False
+
+    def __modules(self):
+        raw_output = self.__module_avail_output().decode("utf-8")
+        for line in StringIO(raw_output):
+            line = line and line.strip()
+            if not line or line.startswith("-"):
+                continue
+
+            line_modules = line.split()
+            for module in line_modules:
+                if module.endswith(self.default_indicator):
+                    module = module[0:-len(self.default_indicator)].strip()
+                module_parts = module.split('/')
+                module_version = None
+                if len(module_parts) == 2:
+                    module_version = module_parts[1]
+                module_name = module_parts[0]
+                yield module_name, module_version
+
+    def __module_avail_output(self):
+        avail_command = [self.module_dependency_resolver.modulecmd, 'sh', 'avail']
+        return Popen(avail_command, stderr=PIPE, env={'MODULEPATH': self.modulepath}).communicate()[1]
+
+
+class ModuleDependency(Dependency):
+    """Converts module dependencies into shell expressions using modulecmd.
+
+    Using Environment Modules' 'modulecmd' (specifically 'modulecmd sh load') to
+    convert module specifications into shell expressions for inclusion in
+    the script used to run a tool in Galaxy."""
+    dict_collection_visible_keys = Dependency.dict_collection_visible_keys + ['module_name', 'module_version']
+    dependency_type = 'module'
+
+    def __init__(self, module_dependency_resolver, module_name, module_version=None, exact=True):
+        self.module_dependency_resolver = module_dependency_resolver
+        self.module_name = module_name
+        self.module_version = module_version
+        self._exact = exact
+
+    @property
+    def exact(self):
+        return self._exact
+
+    def shell_commands(self, requirement):
+        module_to_load = self.module_name
+        if self.module_version:
+            module_to_load = '%s/%s' % (self.module_name, self.module_version)
+        command = 'MODULEPATH=%s; export MODULEPATH; eval `%s sh load %s`' % (self.module_dependency_resolver.modulepath,
+                                                                              self.module_dependency_resolver.modulecmd,
+                                                                              module_to_load)
+        return command
+
+
+def _string_as_bool( value ):
+    return str( value ).lower() == "true"
+
+
+__all__ = ('ModuleDependencyResolver', )
diff --git a/lib/galaxy/tools/deps/resolvers/resolver_mixins.py b/lib/galaxy/tools/deps/resolvers/resolver_mixins.py
new file mode 100644
index 0000000..1a330af
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/resolver_mixins.py
@@ -0,0 +1,83 @@
+import os
+
+from ..brew_exts import (
+    build_env_statements,
+    DEFAULT_HOMEBREW_ROOT,
+    recipe_cellar_path,
+)
+from ..resolvers import Dependency, NullDependency
+
+
+class UsesHomebrewMixin:
+
+    def _init_homebrew(self, **kwds):
+        cellar_root = kwds.get('cellar', None)
+        if cellar_root is None:
+            cellar_root = os.path.join(DEFAULT_HOMEBREW_ROOT, "Cellar")
+
+        self.cellar_root = cellar_root
+
+    def _find_dep_versioned(self, name, version):
+        recipe_path = recipe_cellar_path(self.cellar_root, name, version)
+        if not os.path.exists(recipe_path) or not os.path.isdir(recipe_path):
+            return NullDependency(version=version, name=name)
+
+        commands = build_env_statements(self.cellar_root, recipe_path, relaxed=True)
+        return HomebrewDependency(commands)
+
+    def _find_dep_default(self, name, version):
+        installed_versions = self._installed_versions(name)
+        if not installed_versions:
+            return NullDependency(version=version, name=name)
+
+        # Just grab newest installed version - may make sense some day to find
+        # the linked version instead.
+        default_version = sorted(installed_versions, reverse=True)[0]
+        return self._find_dep_versioned(name, default_version, exact=version is None)
+
+    def _installed_versions(self, recipe):
+        recipe_base_path = os.path.join(self.cellar_root, recipe)
+        if not os.path.exists(recipe_base_path):
+            return []
+
+        names = os.listdir(recipe_base_path)
+        return filter(lambda n: os.path.isdir(os.path.join(recipe_base_path, n)), names)
+
+
+class UsesToolDependencyDirMixin:
+
+    def _init_base_path(self, dependency_manager, **kwds):
+        self.base_path = os.path.abspath( kwds.get('base_path', dependency_manager.default_base_path) )
+
+
+class UsesInstalledRepositoriesMixin:
+
+    def _get_installed_dependency( self, name, type, version=None, **kwds ):
+        installed_tool_dependencies = kwds.get("installed_tool_dependencies", [])
+        for installed_tool_dependency in (installed_tool_dependencies or []):
+            name_and_type_equal = installed_tool_dependency.name == name and installed_tool_dependency.type == type
+            if version:
+                if name_and_type_equal and installed_tool_dependency.version == version:
+                    return installed_tool_dependency
+            else:
+                if name_and_type_equal:
+                    return installed_tool_dependency
+        return None
+
+
+class HomebrewDependency(Dependency):
+
+    def __init__(self, commands, exact=True):
+        self.commands = commands
+        self._exact = exact
+
+    @property
+    def exact(self):
+        return self._exact
+
+    def shell_commands(self, requirement):
+        raw_commands = self.commands.replace("\n", ";")
+        return raw_commands
+
+    def __repr__(self):
+        return "PlatformBrewDependency[commands=%s]" % self.commands
diff --git a/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py b/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
new file mode 100644
index 0000000..8b6ddac
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/tool_shed_packages.py
@@ -0,0 +1,66 @@
+from os.path import abspath, exists, join
+
+from .galaxy_packages import BaseGalaxyPackageDependencyResolver, ToolShedDependency
+from .resolver_mixins import UsesInstalledRepositoriesMixin
+from ..resolvers import NullDependency
+
+
+class ToolShedPackageDependencyResolver(BaseGalaxyPackageDependencyResolver, UsesInstalledRepositoriesMixin):
+    resolver_type = "tool_shed_packages"
+    # Resolution of these dependencies depends on more than just the requirement
+    # tag, it depends on the tool installation context - therefore these are
+    # non-simple.
+    dependency_type = ToolShedDependency
+    resolves_simple_dependencies = False
+
+    def __init__(self, dependency_manager, **kwds):
+        super(ToolShedPackageDependencyResolver, self).__init__(dependency_manager, **kwds)
+
+    def _find_dep_versioned( self, name, version, type='package', **kwds ):
+        installed_tool_dependency = self._get_installed_dependency( name, type, version=version, **kwds )
+        if installed_tool_dependency:
+            path = self._get_package_installed_dependency_path( installed_tool_dependency, name, version )
+            return self._galaxy_package_dep(path, version, name, True)
+        else:
+            return NullDependency(version=version, name=name)
+
+    def _find_dep_default( self, name, type='package', **kwds ):
+        if type == 'set_environment' and kwds.get('installed_tool_dependencies', None):
+            installed_tool_dependency = self._get_installed_dependency( name, type, version=None, **kwds )
+            if installed_tool_dependency:
+                dependency = self._get_set_environment_installed_dependency_script_path( installed_tool_dependency, name )
+                is_galaxy_dep = isinstance(dependency, ToolShedDependency)
+                has_script_dep = is_galaxy_dep and dependency.script and dependency.path
+                if has_script_dep:
+                    # Environment settings do not use versions.
+                    return ToolShedDependency(dependency.script, dependency.path, None, name, True)
+        return NullDependency(version=None, name=name)
+
+    def _get_package_installed_dependency_path( self, installed_tool_dependency, name, version ):
+        tool_shed_repository = installed_tool_dependency.tool_shed_repository
+        base_path = self.base_path
+        return join(
+            base_path,
+            name,
+            version,
+            tool_shed_repository.owner,
+            tool_shed_repository.name,
+            tool_shed_repository.installed_changeset_revision
+        )
+
+    def _get_set_environment_installed_dependency_script_path( self, installed_tool_dependency, name ):
+        tool_shed_repository = installed_tool_dependency.tool_shed_repository
+        base_path = self.base_path
+        path = abspath( join( base_path,
+                              'environment_settings',
+                              name,
+                              tool_shed_repository.owner,
+                              tool_shed_repository.name,
+                              tool_shed_repository.installed_changeset_revision ) )
+        if exists( path ):
+            script = join( path, 'env.sh' )
+            return ToolShedDependency(script, path, None, name, True)
+        return NullDependency(version=None, name=name)
+
+
+__all__ = ('ToolShedPackageDependencyResolver', )
diff --git a/lib/galaxy/tools/deps/resolvers/unlinked_tool_shed_packages.py b/lib/galaxy/tools/deps/resolvers/unlinked_tool_shed_packages.py
new file mode 100644
index 0000000..c02bcd4
--- /dev/null
+++ b/lib/galaxy/tools/deps/resolvers/unlinked_tool_shed_packages.py
@@ -0,0 +1,197 @@
+"""
+Backup resolvers for when dependencies can not be loaded from the database.
+Mainly suited for testing stage.
+
+Ideally all dependencies will be stored in the database
+    when a tool is added from a Tool Shed.
+That should remain the preferred way of locating dependencies.
+
+In cases where that is not possible
+    for example during testing this resolver can act as a backup.
+This resolver looks not just for manually added dependencies
+    but also ones added from a Tool Shed.
+
+This tool is still under development so the default behaviour could change.
+It has been tested when placed in the same directory as galaxy_packages.py
+
+At the time of writing July 3 2015 this resolver has to be plugged in.
+See bottom for instructions on how to add this resolver.
+
+"""
+import logging
+from os import listdir
+from os.path import exists, getmtime, join
+
+from .galaxy_packages import BaseGalaxyPackageDependencyResolver
+from ..resolvers import Dependency, NullDependency
+
+log = logging.getLogger( __name__ )
+
+MANUAL = "manual"
+PREFERRED_OWNERS = MANUAL + ",iuc,devteam"
+
+
+class UnlinkedToolShedPackageDependencyResolver(BaseGalaxyPackageDependencyResolver):
+    dict_collection_visible_keys = BaseGalaxyPackageDependencyResolver.dict_collection_visible_keys + ['preferred_owners', 'select_by_owner']
+    resolver_type = "unlinked_tool_shed_packages"
+
+    def __init__(self, dependency_manager, **kwds):
+        super(UnlinkedToolShedPackageDependencyResolver, self).__init__(dependency_manager, **kwds)
+        # Provide a list of preferred owners whose dependency to use
+        self.preferred_owners = kwds.get('preferred_owners', PREFERRED_OWNERS).split(",")
+        # Option to ignore owner and just use last modified time
+        self.select_by_owner = str(kwds.get('select_by_owner', "true")).lower() != "false"
+
+    def _find_dep_versioned( self, name, version, type='package', **kwds ):
+        try:
+            possibles = self._find_possible_dependencies(name, version, type)
+            if len(possibles) == 0:
+                log.debug("Unable to find dependency,'%s' '%s' '%s'", name, version, type)
+                return NullDependency(version=version, name=name)
+            elif len(possibles) == 1:
+                # Only one candidate found so ignore any preference rules
+                return possibles[0].dependency
+            else:
+                # Pick the preferred one
+                return self._select_preferred_dependency(possibles).dependency
+        except:
+            log.exception("Unexpected error hunting for dependency '%s' '%s''%s'", name, version, type)
+            return NullDependency(version=version, name=name)
+
+    # Finds all possible dependency to use
+    # Should be extended as required
+    # Returns CandidateDependency objects with data for preference picking
+    def _find_possible_dependencies(self, name, version, type):
+        possibles = []
+        if exists(self.base_path):
+            path = join( self.base_path, name, version )
+            if exists(path):
+                # First try the way without owner/name/revision
+                package = self._galaxy_package_dep(path, version, name, True)
+                if not isinstance(package, NullDependency):
+                    log.debug("Found dependency '%s' '%s' '%s' at '%s'", name, version, type, path)
+                    possibles.append(CandidateDependency(package, path))
+                # now try with an owner/name/revision
+                for owner in listdir(path):
+                    owner_path = join(path, owner)
+                    for package_name in listdir(owner_path):
+                        if package_name.lower().startswith("package_" + name.lower()):
+                            package_path = join(owner_path, package_name)
+                            for revision in listdir(package_path):
+                                revision_path = join(package_path, revision)
+                                package = self._galaxy_package_dep(revision_path, version, name, True)
+                                if not isinstance(package, NullDependency):
+                                    log.debug("Found dependency '%s' '%s' '%s' at '%s'", name, version, type, revision_path)
+                                    possibles.append(CandidateDependency(package, package_path, owner))
+        return possibles
+
+    def _select_preferred_dependency(self, possibles, by_owner=None):
+        if by_owner is None:
+            by_owner = self.select_by_owner
+        preferred = []
+        if by_owner:
+            for owner in self.preferred_owners:
+                for candidate in possibles:
+                    if candidate.owner == owner:
+                        preferred.append(candidate)
+                if len(preferred) == 1:
+                    log.debug("Picked dependency based on owner '%s'", owner)
+                    return preferred[0]
+                elif len(preferred) > 1:
+                    log.debug("Multiple dependency found with owner '%s'", owner)
+                    break
+        if len(preferred) == 0:
+            preferred = possibles
+        latest_modified = 0
+        for candidate in preferred:
+            modified = getmtime(candidate.path)
+            if latest_modified < modified:
+                latest_candidate = candidate
+                latest_modified = modified
+        log.debug("Picking dependency at '%s' as it was the last modified", latest_candidate.path)
+        return latest_candidate
+
+    """
+    #Currently no need has been found for expand the verionsless method
+    #This is an example of how it could be done
+    def _find_dep_default( self, name, type='package', **kwds ):
+        try:
+            possibles = TODO
+            if len(possibles) == 0:
+                log.debug("Unable to find dependency,'%s' default '%s'", name, type)
+                return NullDependency(version=None, name=name)
+            elif len(possibles) == 1:
+                #Only one candidate found so ignore any preference rules
+                return possibles[0].dependency
+            else:
+                #Pick the preferred one
+                return self._select_preferred_dependency(possibles, by_owner=False).dependency
+        except:
+            log.exception("Unexpected error hunting for dependency '%s' default '%s'", name, type)
+            return NullDependency(version=None, name=name)
+    """
+
+
+class CandidateDependency(Dependency):
+    dict_collection_visible_keys = Dependency.dict_collection_visible_keys + ['dependency', 'path', 'owner']
+    dependency_type = 'unlinked_tool_shed_package'
+
+    @property
+    def exact(self):
+        return self.dependency.exact
+
+    def __init__(self, dependency, path, owner=MANUAL):
+        self.dependency = dependency
+        self.path = path
+        self.owner = owner
+
+    def shell_commands( self, requirement ):
+        """
+        Return shell commands to enable this dependency.
+        """
+        return self.dependency.shell_commands( requirement )
+
+
+__all__ = ('UnlinkedToolShedPackageDependencyResolver', )
+
+"""
+At the time of writing July 3 2015 this resolver has to be plugged in.
+
+Adding resolver instructions:
+
+1. create a dependency_resolvers_config.xml file
+<dependency_resolvers>
+  <tool_shed_packages />
+  <galaxy_packages />
+  <galaxy_packages versionless="true" />
+  <unlinked_tool_shed_packages />
+</dependency_resolvers>
+
+
+1a. ALWAYS add <tool_shed_packages /> first!!!!
+
+1b. <galaxy_packages /> is optional as
+    this resolver will also find dependency found by that resolver
+1bi Current default is to use a dependency to find that way first!
+1bii So an alternative version of dependency_resolvers_config.xml
+<dependency_resolvers>
+  <tool_shed_packages />
+  <unlinked_tool_shed_packages />
+  <unlinked_tool_shed_packages versionless="true" />
+</dependency_resolvers>
+
+1c. See __init__ for optional config values
+1ci versionless currently is handled by the super class
+    GalaxyPackageDependencyResolver
+
+2. Add a parameter to config.ini
+dependency_resolvers_config_file = ./config/dependency_resolvers_config.xml
+
+2a. File name/path can be different
+2b. config key must be dependency_resolvers_config_file
+
+3. For planemo it may be required to specify:
+--dependency_resolvers_config_file (xml file described in 1 above)
+--tool_dependency_dir (root of dependencies typically galaxy/dependency_dir)
+See planemo test --help for more information
+"""
diff --git a/lib/galaxy/tools/deps/views.py b/lib/galaxy/tools/deps/views.py
new file mode 100644
index 0000000..8f23335
--- /dev/null
+++ b/lib/galaxy/tools/deps/views.py
@@ -0,0 +1,141 @@
+from galaxy.exceptions import (
+    NotImplemented,
+    RequestParameterMissingException
+)
+
+
+class DependencyResolversView(object):
+    """ Provide a RESTfulish/JSONy interface to a galaxy.tools.deps.DependencyResolver
+    object. This can be adapted by the Galaxy web framework or other web apps.
+    """
+
+    def __init__(self, app):
+        self._app = app
+
+    def index(self):
+        return [r.to_dict() for r in self._dependency_resolvers]
+
+    def show(self, index):
+        return self._dependency_resolver(index).to_dict()
+
+    def reload(self):
+        self.toolbox.reload_dependency_manager()
+
+    def manager_requirements(self):
+        requirements = []
+        for index, resolver in enumerate(self._dependency_resolvers):
+            if not hasattr(resolver, "list_dependencies"):
+                continue
+            for requirement in resolver.list_dependencies():
+                requirements.append({"index": index, "requirement": requirement.to_dict()})
+        return requirements
+
+    def resolver_requirements(self, index):
+        requirements = []
+        resolver = self._dependency_resolver(index)
+        if not hasattr(resolver, "list_dependencies"):
+            raise NotImplemented()
+        for requirement in resolver.list_dependencies():
+            requirements.append(requirement.to_dict())
+        return requirements
+
+    def manager_dependency(self, **kwds):
+        return self._dependency(**kwds)
+
+    def resolver_dependency(self, index, **kwds):
+        return self._dependency(**kwds)
+
+    def install_dependency(self, index=None, **payload):
+        """
+        Installs dependency using highest priority resolver that supports dependency installation
+        (Currently only the conda resolver supports this). If index is given, attempt
+        installation directly using the corresponding resolver.
+        Returns True on success, False on failure.
+        payload is dictionary that must container name, version and type,
+        e.g. {'name': 'numpy', version='1.9.1', type='package'}
+        """
+        if index:
+            return self._install_dependency(index, **payload)
+        else:
+            for index in self.installable_resolvers:
+                success = self._install_dependency(index, **payload)
+                if success:
+                    return success
+            return False
+
+    def _install_dependency(self, index, **payload):
+        """
+        Resolver install dependency should return True when installation succeeds,
+        False if not successful
+        """
+        resolver = self._dependency_resolver(index)
+        if not hasattr(resolver, "install_dependency"):
+            raise NotImplemented()
+
+        name, version, type, extra_kwds = self._parse_dependency_info(payload)
+        return resolver.install_dependency(
+            name=name,
+            version=version,
+            type=type,
+            **extra_kwds
+        )
+
+    def _dependency(self, index=None, **kwds):
+        if index is not None:
+            index = int(index)
+
+        name, version, type, extra_kwds = self._parse_dependency_info(kwds)
+        resolve_kwds = dict(
+            job_directory=None,
+            index=index,
+            **extra_kwds
+        )
+        dependency = self._dependency_manager.find_dep(
+            name, version=version, type=type, **resolve_kwds
+        )
+        return dependency.to_dict()
+
+    def _parse_dependency_info(self, kwds):
+        extra_kwds = kwds.copy()
+        name = extra_kwds.pop("name", None)
+        if name is None:
+            raise RequestParameterMissingException("Missing 'name' parameter required for resolution.")
+        version = extra_kwds.pop("version", None)
+        type = extra_kwds.pop("type", "package")
+        return name, version, type, extra_kwds
+
+    def _dependency_resolver(self, index):
+        index = int(index)
+        return self._dependency_resolvers[index]
+
+    @property
+    def _dependency_manager(self):
+        return self._app.toolbox.dependency_manager
+
+    @property
+    def _dependency_resolvers(self):
+        dependency_manager = self._dependency_manager
+        dependency_resolvers = dependency_manager.dependency_resolvers
+        return dependency_resolvers
+
+    @property
+    def installable_resolvers(self):
+        """
+        List index for all active resolvers that have the 'install_dependency' attribute
+        """
+        return [index for index, resolver in enumerate(self._dependency_resolvers) if hasattr(resolver, "install_dependency") and not resolver.disabled ]
+
+    def get_requirements_status(self, requested_requirements, installed_tool_dependencies=None):
+        return [self.manager_dependency(installed_tool_dependencies=installed_tool_dependencies, **req) for req in requested_requirements]
+
+    def clean(self, index=None, **kwds):
+        if index:
+            resolver = self._dependency_resolver(index)
+            if not hasattr(resolver, "clean"):
+                raise NotImplemented()
+            else:
+                resolver.clean()
+                return "OK"
+        else:
+            [resolver.clean(**kwds) for resolver in self._dependency_resolvers if hasattr(resolver, 'clean')]
+            return "OK"
diff --git a/lib/galaxy/tools/errors.py b/lib/galaxy/tools/errors.py
new file mode 100644
index 0000000..18ca677
--- /dev/null
+++ b/lib/galaxy/tools/errors.py
@@ -0,0 +1,249 @@
+"""
+Functionality for dealing with tool errors.
+"""
+import string
+from galaxy import model, util, web
+import cgi
+from galaxy.util import unicodify
+
+error_report_template = """
+GALAXY TOOL ERROR REPORT
+------------------------
+
+This error report was sent from the Galaxy instance hosted on the server
+"${host}"
+-----------------------------------------------------------------------------
+This is in reference to dataset id ${dataset_id} (${dataset_id_encoded}) from history id ${history_id} (${history_id_encoded})
+-----------------------------------------------------------------------------
+You should be able to view the history containing the related history item (${hda_id_encoded})
+
+${hid}: ${history_item_name}
+
+by logging in as a Galaxy admin user to the Galaxy instance referenced above
+and pointing your browser to the following link.
+
+${history_view_link}
+-----------------------------------------------------------------------------
+The user ${email_str} provided the following information:
+
+${message}
+-----------------------------------------------------------------------------
+info url: ${hda_show_params_link}
+job id: ${job_id} (${job_id_encoded})
+tool id: ${job_tool_id}
+tool version: ${tool_version}
+job pid or drm id: ${job_runner_external_id}
+job tool version: ${job_tool_version}
+-----------------------------------------------------------------------------
+job command line:
+${job_command_line}
+-----------------------------------------------------------------------------
+job stderr:
+${job_stderr}
+-----------------------------------------------------------------------------
+job stdout:
+${job_stdout}
+-----------------------------------------------------------------------------
+job info:
+${job_info}
+-----------------------------------------------------------------------------
+job traceback:
+${job_traceback}
+-----------------------------------------------------------------------------
+(This is an automated message).
+"""
+
+error_report_template_html = """
+<html><head></head>
+    <body>
+<style type="text/css">
+tr:nth-child(even) {background-color: #f2f2f2}
+table{margin: 1em;}
+.mono{font-family: monospace;}
+pre {
+white-space: pre-wrap;
+white-space: -moz-pre-wrap;
+white-space: -pre-wrap;
+white-space: -o-pre-wrap;
+word-wrap: break-word;
+background: #eee;
+border:1px solid black;
+padding:10px;
+}
+</style>
+
+<h1>Galaxy Tool Error Report</h1>
+<span class="sub"><i>from</i> <span class="mono"><a href="${host}">${host}</a></span>
+
+<h3>Error Localization</h3>
+<table>
+    <tbody>
+        <tr><td>Dataset</td><td>${dataset_id} (${dataset_id_encoded})</td></tr>
+        <tr><td>History</td><td><a href="${history_view_link}">${history_id} (${history_id_encoded})</a></td></tr>
+        <tr><td>Failed Job</td><td>${hid}: ${history_item_name} (${hda_id_encoded})</td></tr>
+    </tbody>
+</table>
+
+<h3>User Provided Information</h3>
+
+The user <a href="mailto:${email_str}"><span class="mono">${email_str}</span></a> provided the following information:
+
+<pre>
+${message}
+</pre>
+
+
+<h3>Detailed Job Information</h3>
+
+Job environment and execution information is available at the job <a href="${hda_show_params_link}">Info Page</a>.
+
+<table>
+    <tbody>
+        <tr><td>Job ID</td><td>${job_id} (${job_id_encoded})</td></tr>
+        <tr><td>Tool ID</td><td>${job_tool_id}</td></tr>
+        <tr><td>Tool Version</td><td>${tool_version}</td></tr>
+        <tr><td>Job PID or DRM id</td><td>${job_runner_external_id}</td></tr>
+        <tr><td>Job Tool Version</td><td>${job_tool_version}</td></tr>
+    </tbody>
+</table>
+
+<h3>Job Execution and Failure Information</h3>
+
+<h4>Command Line</h4>
+<pre>
+${job_command_line}
+</pre>
+
+<h4>stderr</h4>
+<pre>
+${job_stderr}
+</pre>
+
+<h4>stdout</h4>
+<pre>
+${job_stdout}
+</pre>
+
+<h4>Job Information</h4>
+<pre>
+${job_info}
+</pre>
+
+<h4>Job Traceback</h4>
+<pre>
+${job_traceback}
+</pre>
+
+This is an automated message. Do not reply to this address.
+</body></html>
+"""
+
+
+class ErrorReporter( object ):
+    def __init__( self, hda, app ):
+        # Get the dataset
+        sa_session = app.model.context
+        if not isinstance( hda, model.HistoryDatasetAssociation ):
+            hda_id = hda
+            try:
+                hda = sa_session.query( model.HistoryDatasetAssociation ).get( hda_id )
+                assert hda is not None, ValueError( "No HDA yet" )
+            except Exception:
+                hda = sa_session.query( model.HistoryDatasetAssociation ).get( app.security.decode_id( hda_id ) )
+        assert isinstance( hda, model.HistoryDatasetAssociation ), ValueError( "Bad value provided for HDA (%s)." % ( hda ) )
+        self.hda = hda
+        # Get the associated job
+        self.job = hda.creating_job
+        self.app = app
+        self.tool_id = self.job.tool_id
+        self.report = None
+
+    def _can_access_dataset( self, user ):
+        if user:
+            roles = user.all_roles()
+        else:
+            roles = []
+        return self.app.security_agent.can_access_dataset( roles, self.hda.dataset )
+
+    def create_report( self, user, email='', message='', **kwd ):
+        hda = self.hda
+        job = self.job
+        host = web.url_for( '/', qualified=True )
+        history_id_encoded = self.app.security.encode_id( hda.history_id )
+        history_view_link = web.url_for( controller="history", action="view", id=history_id_encoded, qualified=True )
+        hda_id_encoded = self.app.security.encode_id( hda.id )
+        hda_show_params_link = web.url_for( controller="dataset", action="show_params", dataset_id=hda_id_encoded, qualified=True )
+        # Build the email message
+        if user and user.email != email:
+            email_str = "'%s' (providing preferred contact email '%s')" % (user.email, email)
+        else:
+            email_str = "'%s'" % (email or 'anonymously')
+
+        report_variables = dict(
+            host=host,
+            dataset_id_encoded=self.app.security.encode_id( hda.dataset_id ),
+            dataset_id=hda.dataset_id,
+            history_id_encoded=history_id_encoded,
+            history_id=hda.history_id,
+            hda_id_encoded=hda_id_encoded,
+            hid=hda.hid,
+            history_item_name=hda.get_display_name(),
+            history_view_link=history_view_link,
+            hda_show_params_link=hda_show_params_link,
+            job_id_encoded=self.app.security.encode_id( job.id ),
+            job_id=job.id,
+            tool_version=job.tool_version,
+            job_tool_id=job.tool_id,
+            job_tool_version=hda.tool_version,
+            job_runner_external_id=job.job_runner_external_id,
+            job_command_line=job.command_line,
+            job_stderr=util.unicodify( job.stderr ),
+            job_stdout=util.unicodify( job.stdout ),
+            job_info=util.unicodify( job.info ),
+            job_traceback=util.unicodify( job.traceback ),
+            email_str=email_str,
+            message=util.unicodify( message )
+        )
+
+        self.report = string.Template( error_report_template ).safe_substitute( report_variables )
+
+        # Escape all of the content  for use in the HTML report
+        for parameter in report_variables.keys():
+            if report_variables[parameter] is not None:
+                report_variables[parameter] = cgi.escape(unicodify(report_variables[parameter]))
+
+        self.html_report = string.Template( error_report_template_html ).safe_substitute( report_variables )
+
+    def _send_report( self, user, email=None, message=None, **kwd ):
+        return self.report
+
+    def send_report( self, user, email=None, message=None, **kwd ):
+        if self.report is None:
+            self.create_report( user, email=email, message=message, **kwd )
+        return self._send_report( user, email=email, message=message, **kwd )
+
+
+class EmailErrorReporter( ErrorReporter ):
+    def _send_report( self, user, email=None, message=None, **kwd ):
+        smtp_server = self.app.config.smtp_server
+        assert smtp_server, ValueError( "Mail is not configured for this galaxy instance" )
+        to_address = self.app.config.error_email_to
+        assert to_address, ValueError( "Error reporting has been disabled for this galaxy instance" )
+
+        frm = to_address
+        # Check email a bit
+        email = email or ''
+        email = email.strip()
+        parts = email.split()
+        if len( parts ) == 1 and len( email ) > 0 and self._can_access_dataset( user ):
+            to = to_address + ", " + email
+        else:
+            to = to_address
+        subject = "Galaxy tool error report from %s" % email
+        try:
+            subject = "%s (%s)" % ( subject, self.app.toolbox.get_tool( self.job.tool_id, self.job.tool_version ).old_id )
+        except Exception:
+            pass
+
+        # Send it
+        return util.send_mail( frm, to, subject, self.report, self.app.config, html=self.html_report )
diff --git a/lib/galaxy/tools/evaluation.py b/lib/galaxy/tools/evaluation.py
new file mode 100644
index 0000000..e0a2e32
--- /dev/null
+++ b/lib/galaxy/tools/evaluation.py
@@ -0,0 +1,585 @@
+import json
+import os
+import tempfile
+from six import string_types
+
+from galaxy import model
+from galaxy.util.object_wrapper import wrap_with_safe_string
+from galaxy.util.bunch import Bunch
+from galaxy.util.none_like import NoneDataset
+from galaxy.util.template import fill_template
+from galaxy.tools.wrappers import (
+    ToolParameterValueWrapper,
+    DatasetFilenameWrapper,
+    DatasetListWrapper,
+    DatasetCollectionWrapper,
+    SelectToolParameterWrapper,
+    InputValueWrapper,
+    RawObjectWrapper
+)
+from galaxy.tools.parameters.basic import (
+    DataToolParameter,
+    DataCollectionToolParameter,
+    SelectToolParameter,
+)
+from galaxy.tools.parameters import wrapped_json, visit_input_values
+from galaxy.tools.parameters.grouping import Conditional, Repeat, Section
+from galaxy.tools import global_tool_errors
+from galaxy.jobs.datasets import dataset_path_rewrites
+from galaxy.work.context import WorkRequestContext
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ToolEvaluator( object ):
+    """ An abstraction linking together a tool and a job runtime to evaluate
+    tool inputs in an isolated, testable manner.
+    """
+
+    def __init__( self, app, tool, job, local_working_directory ):
+        self.app = app
+        self.job = job
+        self.tool = tool
+        self.local_working_directory = local_working_directory
+
+    def set_compute_environment( self, compute_environment, get_special=None ):
+        """
+        Setup the compute environment and established the outline of the param_dict
+        for evaluating command and config cheetah templates.
+        """
+        self.compute_environment = compute_environment
+        self.unstructured_path_rewriter = compute_environment.unstructured_path_rewriter()
+
+        job = self.job
+        incoming = dict( [ ( p.name, p.value ) for p in job.parameters ] )
+        incoming = self.tool.params_from_strings( incoming, self.app )
+
+        # Full parameter validation
+        request_context = WorkRequestContext( app=self.app, user=job.history and job.history.user, history=job.history )
+
+        def validate_inputs( input, value, context, **kwargs ):
+            value = input.from_json( value, request_context, context )
+            input.validate( value, request_context )
+        visit_input_values( self.tool.inputs, incoming, validate_inputs )
+
+        # Restore input / output data lists
+        inp_data = dict( [ ( da.name, da.dataset ) for da in job.input_datasets ] )
+        out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
+        inp_data.update( [ ( da.name, da.dataset ) for da in job.input_library_datasets ] )
+        out_data.update( [ ( da.name, da.dataset ) for da in job.output_library_datasets ] )
+
+        out_collections = dict( [ ( obj.name, obj.dataset_collection_instance ) for obj in job.output_dataset_collection_instances ] )
+        out_collections.update( [ ( obj.name, obj.dataset_collection ) for obj in job.output_dataset_collections ] )
+
+        if get_special:
+
+            # Set up output dataset association for export history jobs. Because job
+            # uses a Dataset rather than an HDA or LDA, it's necessary to set up a
+            # fake dataset association that provides the needed attributes for
+            # preparing a job.
+            class FakeDatasetAssociation ( object ):
+                def __init__( self, dataset=None ):
+                    self.dataset = dataset
+                    self.file_name = dataset.file_name
+                    self.metadata = dict()
+                    self.children = []
+
+            special = get_special()
+            if special:
+                out_data[ "output_file" ] = FakeDatasetAssociation( dataset=special.dataset )
+
+        # These can be passed on the command line if wanted as $__user_*__
+        incoming.update( model.User.user_template_environment( job.history and job.history.user ) )
+
+        # Build params, done before hook so hook can use
+        param_dict = self.build_param_dict(
+            incoming,
+            inp_data,
+            out_data,
+            output_collections=out_collections,
+            output_paths=compute_environment.output_paths(),
+            job_working_directory=compute_environment.working_directory(),
+            input_paths=compute_environment.input_paths()
+        )
+
+        # Certain tools require tasks to be completed prior to job execution
+        # ( this used to be performed in the "exec_before_job" hook, but hooks are deprecated ).
+        self.tool.exec_before_job( self.app, inp_data, out_data, param_dict )
+        # Run the before queue ("exec_before_job") hook
+        self.tool.call_hook( 'exec_before_job', self.app, inp_data=inp_data,
+                             out_data=out_data, tool=self.tool, param_dict=incoming)
+
+        self.param_dict = param_dict
+
+    def build_param_dict( self, incoming, input_datasets, output_datasets, output_collections, output_paths, job_working_directory, input_paths=[] ):
+        """
+        Build the dictionary of parameters for substituting into the command
+        line. Each value is wrapped in a `InputValueWrapper`, which allows
+        all the attributes of the value to be used in the template, *but*
+        when the __str__ method is called it actually calls the
+        `to_param_dict_string` method of the associated input.
+        """
+        param_dict = dict()
+
+        def input():
+            raise SyntaxError("Unbound variable input.")  # Don't let $input hang Python evaluation process.
+
+        param_dict["input"] = input
+
+        param_dict.update(self.tool.template_macro_params)
+        # All parameters go into the param_dict
+        param_dict.update( incoming )
+
+        input_dataset_paths = dataset_path_rewrites( input_paths )
+        self.__populate_wrappers(param_dict, input_datasets, input_dataset_paths, job_working_directory)
+        self.__populate_input_dataset_wrappers(param_dict, input_datasets, input_dataset_paths)
+        self.__populate_output_dataset_wrappers(param_dict, output_datasets, output_paths, job_working_directory)
+        self.__populate_output_collection_wrappers(param_dict, output_collections, output_paths, job_working_directory)
+        self.__populate_unstructured_path_rewrites(param_dict)
+        # Call param dict sanitizer, before non-job params are added, as we don't want to sanitize filenames.
+        self.__sanitize_param_dict( param_dict )
+        # Parameters added after this line are not sanitized
+        self.__populate_non_job_params(param_dict)
+
+        # Return the dictionary of parameters
+        return param_dict
+
+    def __walk_inputs(self, inputs, input_values, func):
+
+        def do_walk( inputs, input_values ):
+            """
+            Wraps parameters as neccesary.
+            """
+            for input in inputs.itervalues():
+                if isinstance( input, Repeat ):
+                    for d in input_values[ input.name ]:
+                        do_walk( input.inputs, d )
+                elif isinstance( input, Conditional ):
+                    values = input_values[ input.name ]
+                    current = values["__current_case__"]
+                    func( values, input.test_param )
+                    do_walk( input.cases[current].inputs, values )
+                elif isinstance( input, Section ):
+                    values = input_values[ input.name ]
+                    do_walk( input.inputs, values )
+                else:
+                    func( input_values, input )
+
+        do_walk( inputs, input_values )
+
+    def __populate_wrappers(self, param_dict, input_datasets, input_dataset_paths, job_working_directory):
+
+        def wrap_input( input_values, input ):
+            value = input_values[ input.name ]
+            if isinstance( input, DataToolParameter ) and input.multiple:
+                dataset_instances = DatasetListWrapper.to_dataset_instances( value )
+                input_values[ input.name ] = \
+                    DatasetListWrapper( job_working_directory,
+                                        dataset_instances,
+                                        dataset_paths=input_dataset_paths,
+                                        datatypes_registry=self.app.datatypes_registry,
+                                        tool=self.tool,
+                                        name=input.name )
+
+            elif isinstance( input, DataToolParameter ):
+                # FIXME: We're populating param_dict with conversions when
+                #        wrapping values, this should happen as a separate
+                #        step before wrapping (or call this wrapping step
+                #        something more generic) (but iterating this same
+                #        list twice would be wasteful)
+                # Add explicit conversions by name to current parent
+                for conversion_name, conversion_extensions, conversion_datatypes in input.conversions:
+                    # If we are at building cmdline step, then converters
+                    # have already executed
+                    conv_ext, converted_dataset = input_values[ input.name ].find_conversion_destination( conversion_datatypes )
+                    # When dealing with optional inputs, we'll provide a
+                    # valid extension to be used for None converted dataset
+                    if not conv_ext:
+                        conv_ext = conversion_extensions[0]
+                    # input_values[ input.name ] is None when optional
+                    # dataset, 'conversion' of optional dataset should
+                    # create wrapper around NoneDataset for converter output
+                    if input_values[ input.name ] and not converted_dataset:
+                        # Input that converter is based from has a value,
+                        # but converted dataset does not exist
+                        raise Exception( 'A path for explicit datatype conversion has not been found: %s --/--> %s'
+                                         % ( input_values[ input.name ].extension, conversion_extensions ) )
+                    else:
+                        # Trick wrapper into using target conv ext (when
+                        # None) without actually being a tool parameter
+                        input_values[ conversion_name ] = \
+                            DatasetFilenameWrapper( converted_dataset,
+                                                    datatypes_registry=self.app.datatypes_registry,
+                                                    tool=Bunch( conversion_name=Bunch( extensions=conv_ext ) ),
+                                                    name=conversion_name )
+                # Wrap actual input dataset
+                dataset = input_values[ input.name ]
+                wrapper_kwds = dict(
+                    datatypes_registry=self.app.datatypes_registry,
+                    tool=self,
+                    name=input.name
+                )
+                if dataset:
+                    # A None dataset does not have a filename
+                    real_path = dataset.file_name
+                    if real_path in input_dataset_paths:
+                        wrapper_kwds[ "dataset_path" ] = input_dataset_paths[ real_path ]
+                identifier_key = identifier_key_dict.get(dataset, None)
+                if identifier_key:
+                    element_identifier = param_dict.get(identifier_key, None)
+                    if element_identifier:
+                        wrapper_kwds[ "identifier" ] = element_identifier
+                input_values[ input.name ] = \
+                    DatasetFilenameWrapper( dataset, **wrapper_kwds )
+            elif isinstance( input, DataCollectionToolParameter ):
+                dataset_collection = value
+                wrapper_kwds = dict(
+                    datatypes_registry=self.app.datatypes_registry,
+                    dataset_paths=input_dataset_paths,
+                    tool=self,
+                    name=input.name
+                )
+                wrapper = DatasetCollectionWrapper(
+                    job_working_directory,
+                    dataset_collection,
+                    **wrapper_kwds
+                )
+                input_values[ input.name ] = wrapper
+            elif isinstance( input, SelectToolParameter ):
+                input_values[ input.name ] = SelectToolParameterWrapper(
+                    input, value, self.app, other_values=param_dict, path_rewriter=self.unstructured_path_rewriter )
+            else:
+                input_values[ input.name ] = InputValueWrapper(
+                    input, value, param_dict )
+
+        # HACK: only wrap if check_values is not false, this deals with external
+        #       tools where the inputs don't even get passed through. These
+        #       tools (e.g. UCSC) should really be handled in a special way.
+        if self.tool.check_values:
+            identifier_key_dict = dict((v, "%s|__identifier__" % k) for k, v in input_datasets.iteritems())  # allows lookup of identifier through HDA.
+            self.__walk_inputs( self.tool.inputs, param_dict, wrap_input )
+
+    def __populate_input_dataset_wrappers(self, param_dict, input_datasets, input_dataset_paths):
+        # TODO: Update this method for dataset collections? Need to test. -John.
+
+        # FIXME: when self.check_values==True, input datasets are being wrapped
+        #        twice (above and below, creating 2 separate
+        #        DatasetFilenameWrapper objects - first is overwritten by
+        #        second), is this necessary? - if we get rid of this way to
+        #        access children, can we stop this redundancy, or is there
+        #        another reason for this?
+        # - Only necessary when self.check_values is False (==external dataset
+        #   tool?: can this be abstracted out as part of being a datasouce tool?)
+        # - But we still want (ALWAYS) to wrap input datasets (this should be
+        #   checked to prevent overhead of creating a new object?)
+        # Additionally, datasets go in the param dict. We wrap them such that
+        # if the bare variable name is used it returns the filename (for
+        # backwards compatibility). We also add any child datasets to the
+        # the param dict encoded as:
+        #   "_CHILD___{dataset_name}___{child_designation}",
+        # but this should be considered DEPRECATED, instead use:
+        #   $dataset.get_child( 'name' ).filename
+        for name, data in input_datasets.items():
+            param_dict_value = param_dict.get(name, None)
+            if not isinstance(param_dict_value, (DatasetFilenameWrapper, DatasetListWrapper)):
+                wrapper_kwds = dict(
+                    datatypes_registry=self.app.datatypes_registry,
+                    tool=self,
+                    name=name,
+                )
+                if data:
+                    real_path = data.file_name
+                    if real_path in input_dataset_paths:
+                        dataset_path = input_dataset_paths[ real_path ]
+                        wrapper_kwds[ 'dataset_path' ] = dataset_path
+                param_dict[name] = DatasetFilenameWrapper( data, **wrapper_kwds )
+            if data:
+                for child in data.children:
+                    param_dict[ "_CHILD___%s___%s" % ( name, child.designation ) ] = DatasetFilenameWrapper( child )
+
+    def __populate_output_collection_wrappers(self, param_dict, output_collections, output_paths, job_working_directory):
+        output_dataset_paths = dataset_path_rewrites( output_paths )
+        tool = self.tool
+        for name, out_collection in output_collections.items():
+            if name not in tool.output_collections:
+                continue
+                # message_template = "Name [%s] not found in tool.output_collections %s"
+                # message = message_template % ( name, tool.output_collections )
+                # raise AssertionError( message )
+
+            wrapper_kwds = dict(
+                datatypes_registry=self.app.datatypes_registry,
+                dataset_paths=output_dataset_paths,
+                tool=tool,
+                name=name
+            )
+            wrapper = DatasetCollectionWrapper(
+                job_working_directory,
+                out_collection,
+                **wrapper_kwds
+            )
+            param_dict[ name ] = wrapper
+            # TODO: Handle nested collections...
+            output_def = tool.output_collections[ name ]
+            for element_identifier, output_def in output_def.outputs.items():
+                if not output_def.implicit:
+                    dataset_wrapper = wrapper[ element_identifier ]
+                    param_dict[ output_def.name ] = dataset_wrapper
+                    log.info("Updating param_dict for %s with %s" % (output_def.name, dataset_wrapper) )
+
+    def __populate_output_dataset_wrappers(self, param_dict, output_datasets, output_paths, job_working_directory):
+        output_dataset_paths = dataset_path_rewrites( output_paths )
+        for name, hda in output_datasets.items():
+            # Write outputs to the working directory (for security purposes)
+            # if desired.
+            real_path = hda.file_name
+            if real_path in output_dataset_paths:
+                dataset_path = output_dataset_paths[ real_path ]
+                param_dict[name] = DatasetFilenameWrapper( hda, dataset_path=dataset_path )
+                try:
+                    open( dataset_path.false_path, 'w' ).close()
+                except EnvironmentError:
+                    pass  # May well not exist - e.g. Pulsar.
+            else:
+                param_dict[name] = DatasetFilenameWrapper( hda )
+            # Provide access to a path to store additional files
+            # TODO: path munging for cluster/dataset server relocatability
+            param_dict[name].files_path = os.path.abspath(os.path.join( job_working_directory, "dataset_%s_files" % (hda.dataset.id) ))
+            for child in hda.children:
+                param_dict[ "_CHILD___%s___%s" % ( name, child.designation ) ] = DatasetFilenameWrapper( child )
+        for out_name, output in self.tool.outputs.iteritems():
+            if out_name not in param_dict and output.filters:
+                # Assume the reason we lack this output is because a filter
+                # failed to pass; for tool writing convienence, provide a
+                # NoneDataset
+                ext = getattr( output, "format", None )  # populate only for output datasets (not collections)
+                param_dict[ out_name ] = NoneDataset( datatypes_registry=self.app.datatypes_registry, ext=ext )
+
+    def __populate_non_job_params(self, param_dict):
+        # -- Add useful attributes/functions for use in creating command line.
+
+        # Function for querying a data table.
+        def get_data_table_entry(table_name, query_attr, query_val, return_attr):
+            """
+            Queries and returns an entry in a data table.
+            """
+
+            if table_name in self.app.tool_data_tables:
+                return self.app.tool_data_tables[ table_name ].get_entry( query_attr, query_val, return_attr )
+
+        param_dict['__tool_directory__'] = self.compute_environment.tool_directory()
+        param_dict['__get_data_table_entry__'] = get_data_table_entry
+
+        # We add access to app here, this allows access to app.config, etc
+        param_dict['__app__'] = RawObjectWrapper( self.app )
+        # More convienent access to app.config.new_file_path; we don't need to
+        # wrap a string, but this method of generating additional datasets
+        # should be considered DEPRECATED
+        param_dict['__new_file_path__'] = self.compute_environment.new_file_path()
+        # The following points to location (xxx.loc) files which are pointers
+        # to locally cached data
+        param_dict['__tool_data_path__'] = param_dict['GALAXY_DATA_INDEX_DIR'] = self.app.config.tool_data_path
+        # For the upload tool, we need to know the root directory and the
+        # datatypes conf path, so we can load the datatypes registry
+        param_dict['__root_dir__'] = param_dict['GALAXY_ROOT_DIR'] = os.path.abspath( self.app.config.root )
+        param_dict['__datatypes_config__'] = param_dict['GALAXY_DATATYPES_CONF_FILE'] = self.app.datatypes_registry.integrated_datatypes_configs
+        param_dict['__admin_users__'] = self.app.config.admin_users
+        param_dict['__user__'] = RawObjectWrapper( param_dict.get( '__user__', None ) )
+
+    def __populate_unstructured_path_rewrites(self, param_dict):
+
+        def rewrite_unstructured_paths( input_values, input ):
+            if isinstance( input, SelectToolParameter ):
+                input_values[ input.name ] = SelectToolParameterWrapper(
+                    input, input_values[ input.name ], self.app, other_values=param_dict, path_rewriter=self.unstructured_path_rewriter )
+
+        if not self.tool.check_values and self.unstructured_path_rewriter:
+            # The tools weren't "wrapped" yet, but need to be in order to get
+            # the paths rewritten.
+            self.__walk_inputs( self.tool.inputs, param_dict, rewrite_unstructured_paths )
+
+    def __sanitize_param_dict( self, param_dict ):
+        """
+        Sanitize all values that will be substituted on the command line, with the exception of ToolParameterValueWrappers,
+        which already have their own specific sanitization rules and also exclude special-cased named values.
+        We will only examine the first level for values to skip; the wrapping function will recurse as necessary.
+
+        Note: this method follows the style of the similar populate calls, in that param_dict is modified in-place.
+        """
+        # chromInfo is a filename, do not sanitize it.
+        skip = [ 'chromInfo' ] + self.tool.template_macro_params.keys()
+        if not self.tool or not self.tool.options or self.tool.options.sanitize:
+            for key, value in param_dict.items():
+                if key not in skip:
+                    # Remove key so that new wrapped object will occupy key slot
+                    del param_dict[key]
+                    # And replace with new wrapped key
+                    param_dict[ wrap_with_safe_string( key, no_wrap_classes=ToolParameterValueWrapper ) ] = wrap_with_safe_string( value, no_wrap_classes=ToolParameterValueWrapper )
+
+    def build( self ):
+        """
+        Build runtime description of job to execute, evaluate command and
+        config templates corresponding to this tool with these inputs on this
+        compute environment.
+        """
+        self.extra_filenames = []
+        self.command_line = None
+
+        try:
+            self.__build_config_files( )
+        except Exception as e:
+            # capture and log parsing errors
+            global_tool_errors.add_error(self.tool.config_file, "Building Config Files", e)
+            raise e
+        try:
+            self.__build_param_file( )
+        except Exception as e:
+            # capture and log parsing errors
+            global_tool_errors.add_error(self.tool.config_file, "Building Param File", e)
+            raise e
+        try:
+            self.__build_command_line( )
+        except Exception as e:
+            # capture and log parsing errors
+            global_tool_errors.add_error(self.tool.config_file, "Building Command Line", e)
+            raise e
+        try:
+            self.__build_environment_variables()
+        except Exception as e:
+            global_tool_errors.add_error(self.tool.config_file, "Building Environment Variables", e)
+            raise e
+
+        return self.command_line, self.extra_filenames, self.environment_variables
+
+    def __build_command_line( self ):
+        """
+        Build command line to invoke this tool given a populated param_dict
+        """
+        command = self.tool.command
+        param_dict = self.param_dict
+        interpreter = self.tool.interpreter
+        command_line = None
+        if not command:
+            return
+        try:
+            # Substituting parameters into the command
+            command_line = fill_template( command, context=param_dict )
+            cleaned_command_line = []
+            # Remove leading and trailing whitespace from each line for readability.
+            for line in command_line.split( '\n' ):
+                cleaned_command_line.append( line.strip() )
+            command_line = '\n'.join( cleaned_command_line )
+            # Remove newlines from command line, and any leading/trailing white space
+            command_line = command_line.replace( "\n", " " ).replace( "\r", " " ).strip()
+        except Exception:
+            # Modify exception message to be more clear
+            # e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ), )
+            raise
+        if interpreter:
+            # TODO: path munging for cluster/dataset server relocatability
+            executable = command_line.split()[0]
+            tool_dir = os.path.abspath( self.tool.tool_dir )
+            abs_executable = os.path.join( tool_dir, executable )
+            command_line = command_line.replace(executable, abs_executable, 1)
+            command_line = interpreter + " " + command_line
+        self.command_line = command_line
+
+    def __build_config_files( self ):
+        """
+        Build temporary file for file based parameter transfer if needed
+        """
+        param_dict = self.param_dict
+        config_filenames = []
+        for name, filename, content in self.tool.config_files:
+            config_text, is_template = self.__build_config_file_text(content)
+            # If a particular filename was forced by the config use it
+            directory = self.local_working_directory
+            if filename is not None:
+                config_filename = os.path.join( directory, filename )
+            else:
+                fd, config_filename = tempfile.mkstemp( dir=directory )
+                os.close( fd )
+            self.__write_workdir_file( config_filename, config_text, param_dict, is_template=is_template )
+            self.__register_extra_file( name, config_filename )
+            config_filenames.append( config_filename )
+        return config_filenames
+
+    def __build_environment_variables( self ):
+        param_dict = self.param_dict
+        environment_variables = []
+        for environment_variable_def in self.tool.environment_variables:
+            directory = self.local_working_directory
+            environment_variable = environment_variable_def.copy()
+            environment_variable_template = environment_variable_def["template"]
+            fd, config_filename = tempfile.mkstemp( dir=directory )
+            os.close( fd )
+            self.__write_workdir_file( config_filename, environment_variable_template, param_dict )
+            config_file_basename = os.path.basename( config_filename )
+            environment_variable["value"] = "`cat %s`" % config_file_basename
+            environment_variable["raw"] = True
+            environment_variables.append(environment_variable)
+
+        self.environment_variables = environment_variables
+        return environment_variables
+
+    def __build_param_file( self ):
+        """
+        Build temporary file for file based parameter transfer if needed
+        """
+        param_dict = self.param_dict
+        directory = self.local_working_directory
+        command = self.tool.command
+        if self.tool.profile < 16.04 and command and "$param_file" in command:
+            fd, param_filename = tempfile.mkstemp( dir=directory )
+            os.close( fd )
+            f = open( param_filename, "w" )
+            for key, value in param_dict.items():
+                # parameters can be strings or lists of strings, coerce to list
+                if not isinstance(value, list):
+                    value = [ value ]
+                for elem in value:
+                    f.write( '%s=%s\n' % (key, elem) )
+            f.close()
+            self.__register_extra_file( 'param_file', param_filename )
+            return param_filename
+        else:
+            return None
+
+    def __build_config_file_text( self, content ):
+        if isinstance( content, string_types ):
+            return content, True
+
+        content_format = content["format"]
+        if content_format != "json":
+            template = "Galaxy can only currently convert inputs to json, format [%s] is unhandled"
+            message = template % content_format
+            raise Exception(message)
+
+        return json.dumps(wrapped_json.json_wrap(self.tool.inputs, self.param_dict)), False
+
+    def __write_workdir_file( self, config_filename, content, context, is_template=True ):
+        if is_template:
+            value = fill_template( content, context=context )
+        else:
+            value = content
+        with open( config_filename, "w" ) as f:
+            f.write( value )
+        # For running jobs as the actual user, ensure the config file is globally readable
+        os.chmod( config_filename, 0644 )
+
+    def __register_extra_file( self, name, local_config_path ):
+        """
+        Takes in the local path to a config file and registers the (potentially
+        remote) ultimate path of the config file with the parameter dict.
+        """
+        self.extra_filenames.append( local_config_path )
+        config_basename = os.path.basename( local_config_path )
+        compute_config_path = self.__join_for_compute(self.compute_environment.config_directory(), config_basename)
+        self.param_dict[ name ] = compute_config_path
+
+    def __join_for_compute( self, *args ):
+        """
+        os.path.join but with compute_environment.sep for cross-platform
+        compat.
+        """
+        return self.compute_environment.sep().join( args )
diff --git a/lib/galaxy/tools/exception_handling.py b/lib/galaxy/tools/exception_handling.py
new file mode 100644
index 0000000..0c16162
--- /dev/null
+++ b/lib/galaxy/tools/exception_handling.py
@@ -0,0 +1,6 @@
+# We put a tool that references this package into the tool shed
+# so we have to provide this legacy location for import indefinitely
+# it seems.
+from galaxy.util.ucsc import UCSCLimitException, UCSCOutWrapper
+
+__all__ = ('UCSCOutWrapper', 'UCSCLimitException')
diff --git a/lib/galaxy/tools/execute.py b/lib/galaxy/tools/execute.py
new file mode 100644
index 0000000..45fd192
--- /dev/null
+++ b/lib/galaxy/tools/execute.py
@@ -0,0 +1,207 @@
+"""
+Once state information has been calculated, handle actually executing tools
+from various states, tracking results, and building implicit dataset
+collections from matched collections.
+"""
+import collections
+import logging
+from threading import Thread
+
+from six.moves.queue import Queue
+
+from galaxy.tools.actions import on_text_for_names, ToolExecutionCache
+from galaxy.tools.parser import ToolOutputCollectionPart
+from galaxy.util import ExecutionTimer
+
+log = logging.getLogger( __name__ )
+
+EXECUTION_SUCCESS_MESSAGE = "Tool [%s] created job [%s] %s"
+
+
+def execute( trans, tool, param_combinations, history, rerun_remap_job_id=None, collection_info=None, workflow_invocation_uuid=None ):
+    """
+    Execute a tool and return object containing summary (output data, number of
+    failures, etc...).
+    """
+    all_jobs_timer = ExecutionTimer()
+    execution_tracker = ToolExecutionTracker( tool, param_combinations, collection_info )
+    app = trans.app
+    execution_cache = ToolExecutionCache(trans)
+
+    def execute_single_job(params):
+        job_timer = ExecutionTimer()
+        if workflow_invocation_uuid:
+            params[ '__workflow_invocation_uuid__' ] = workflow_invocation_uuid
+        elif '__workflow_invocation_uuid__' in params:
+            # Only workflow invocation code gets to set this, ignore user supplied
+            # values or rerun parameters.
+            del params[ '__workflow_invocation_uuid__' ]
+        job, result = tool.handle_single_execution( trans, rerun_remap_job_id, params, history, collection_info, execution_cache )
+        if job:
+            message = EXECUTION_SUCCESS_MESSAGE % (tool.id, job.id, job_timer)
+            log.debug(message)
+            execution_tracker.record_success( job, result )
+        else:
+            execution_tracker.record_error( result )
+
+    config = app.config
+    burst_at = getattr( config, 'tool_submission_burst_at', 10 )
+    burst_threads = getattr( config, 'tool_submission_burst_threads', 1 )
+
+    tool_action = tool.action
+    if hasattr( tool_action, "check_inputs_ready" ):
+        for params in execution_tracker.param_combinations:
+            # This will throw an exception if the tool is not ready.
+            tool_action.check_inputs_ready(
+                tool,
+                trans,
+                params,
+                history
+            )
+
+    job_count = len(execution_tracker.param_combinations)
+    if job_count < burst_at or burst_threads < 2:
+        for params in execution_tracker.param_combinations:
+            execute_single_job(params)
+    else:
+        q = Queue()
+
+        def worker():
+            while True:
+                params = q.get()
+                execute_single_job(params)
+                q.task_done()
+
+        for i in range(burst_threads):
+            t = Thread(target=worker)
+            t.daemon = True
+            t.start()
+
+        for params in execution_tracker.param_combinations:
+            q.put(params)
+
+        q.join()
+
+    log.debug("Executed %d job(s) for tool %s request: %s" % (job_count, tool.id, all_jobs_timer))
+    if collection_info:
+        history = history or tool.get_default_history_by_trans( trans )
+        if len(param_combinations) == 0:
+            template = "Attempting to map over an empty collection, this is not yet implemented. colleciton_info is [%s]"
+            message = template % collection_info
+            log.warn(message)
+            raise Exception(message)
+        params = param_combinations[0]
+        execution_tracker.create_output_collections( trans, history, params )
+
+    return execution_tracker
+
+
+class ToolExecutionTracker( object ):
+
+    def __init__( self, tool, param_combinations, collection_info ):
+        self.tool = tool
+        self.param_combinations = param_combinations
+        self.collection_info = collection_info
+        self.successful_jobs = []
+        self.failed_jobs = 0
+        self.execution_errors = []
+        self.output_datasets = []
+        self.output_collections = []
+        self.outputs_by_output_name = collections.defaultdict(list)
+        self.implicit_collections = {}
+
+    def record_success( self, job, outputs ):
+        self.successful_jobs.append( job )
+        self.output_datasets.extend( outputs )
+        for output_name, output_dataset in outputs:
+            if ToolOutputCollectionPart.is_named_collection_part_name( output_name ):
+                # Skip known collection outputs, these will be covered by
+                # output collections.
+                continue
+            self.outputs_by_output_name[ output_name ].append( output_dataset )
+        for job_output in job.output_dataset_collections:
+            self.outputs_by_output_name[ job_output.name ].append( job_output.dataset_collection )
+        for job_output in job.output_dataset_collection_instances:
+            self.output_collections.append( ( job_output.name, job_output.dataset_collection_instance ) )
+
+    def record_error( self, error ):
+        self.failed_jobs += 1
+        message = "There was a failure executing a job for tool [%s] - %s"
+        log.warning(message, self.tool.id, error)
+        self.execution_errors.append( error )
+
+    def create_output_collections( self, trans, history, params ):
+        # TODO: Move this function - it doesn't belong here but it does need
+        # the information in this class and potential extensions.
+        if self.failed_jobs > 0:
+            return []
+
+        structure = self.collection_info.structure
+
+        # params is just one sample tool param execution with parallelized
+        # collection replaced with a specific dataset. Need to replace this
+        # with the collection and wrap everything up so can evaluate output
+        # label.
+        params.update( self.collection_info.collections )  # Replace datasets with source collections for labelling outputs.
+
+        collection_names = ["collection %d" % c.hid for c in self.collection_info.collections.values()]
+        on_text = on_text_for_names( collection_names )
+
+        collections = {}
+
+        implicit_inputs = list(self.collection_info.collections.items())
+        for output_name, outputs in self.outputs_by_output_name.items():
+            if not len( structure ) == len( outputs ):
+                # Output does not have the same structure, if all jobs were
+                # successfully submitted this shouldn't have happened.
+                log.warning( "Problem matching up datasets while attempting to create implicit dataset collections")
+                continue
+            output = self.tool.outputs[ output_name ]
+            element_identifiers = structure.element_identifiers_for_outputs( trans, outputs )
+
+            implicit_collection_info = dict(
+                implicit_inputs=implicit_inputs,
+                implicit_output_name=output_name,
+                outputs=outputs
+            )
+            try:
+                output_collection_name = self.tool.tool_action.get_output_name(
+                    output,
+                    dataset=None,
+                    tool=self.tool,
+                    on_text=on_text,
+                    trans=trans,
+                    history=history,
+                    params=params,
+                    incoming=None,
+                    job_params=None,
+                )
+            except Exception:
+                output_collection_name = "%s across %s" % ( self.tool.name, on_text )
+
+            child_element_identifiers = element_identifiers[ "element_identifiers" ]
+            collection_type = element_identifiers[ "collection_type" ]
+            collection = trans.app.dataset_collections_service.create(
+                trans=trans,
+                parent=history,
+                name=output_collection_name,
+                element_identifiers=child_element_identifiers,
+                collection_type=collection_type,
+                implicit_collection_info=implicit_collection_info,
+            )
+            for job in self.successful_jobs:
+                # TODO: Think through this, may only want this for output
+                # collections - or we may be already recording data in some
+                # other way.
+                if job not in trans.sa_session:
+                    job = trans.sa_session.query( trans.app.model.Job ).get( job.id )
+                job.add_output_dataset_collection( output_name, collection )
+            collections[ output_name ] = collection
+
+        # Needed to flush the association created just above with
+        # job.add_output_dataset_collection.
+        trans.sa_session.flush()
+        self.implicit_collections = collections
+
+
+__all__ = ( 'execute', )
diff --git a/lib/galaxy/tools/filter_failed_collection.xml b/lib/galaxy/tools/filter_failed_collection.xml
new file mode 100644
index 0000000..fb0969f
--- /dev/null
+++ b/lib/galaxy/tools/filter_failed_collection.xml
@@ -0,0 +1,45 @@
+<tool id="__FILTER_FAILED_DATASETS__"
+      name="Filter failed"
+      version="1.0.0"
+      tool_type="filter_failed_datasets_collection">
+    <description>datasets from a list</description>
+    <type class="FilterFailedDatasetsTool" module="galaxy.tools" />
+    <action module="galaxy.tools.actions.model_operations"
+            class="ModelOperationToolAction"/>
+    <inputs>
+        <param type="data_collection" name="input" label="Input Collection" />
+    </inputs>
+    <outputs>
+        <collection name="output" format_source="input" type_source="input" label="${on_string} (filtered failed datasets)" >
+        </collection>
+    </outputs>
+    <tests>
+        <!-- Test framework has no way of creating a collection with
+             failed elements, so best we can do is verify identity on
+             an okay collection. API tests verify this tool works
+             though.
+        -->
+        <test>
+            <param name="input">
+                <collection type="list">
+                    <element name="e1" value="simple_line.txt" />
+                </collection>
+            </param>
+            <output_collection name="output" type="list">
+                <element name="e1">
+                  <assert_contents>
+                      <has_text_matching expression="^This is a line of text.\n$" />
+                  </assert_contents>
+                </element>
+            </output_collection>
+        </test>
+    </tests>
+    <help>
+        This tool takes a list dataset collection and filters out the failed
+        datasets from it. This is useful for continuing a multi-sample analysis
+        when one of more of the samples fails at some point.
+
+        This tool will create new history datasets from your collection
+        but your quota usage will not increase.
+    </help>
+</tool>
diff --git a/lib/galaxy/tools/filters/__init__.py b/lib/galaxy/tools/filters/__init__.py
new file mode 100644
index 0000000..c984816
--- /dev/null
+++ b/lib/galaxy/tools/filters/__init__.py
@@ -0,0 +1,5 @@
+"""Deprecated module for toolbox filters.
+
+Filters placed in this module will still work - but filters should be
+moved to lib/galaxy/tools/toolbox/filters.
+"""
diff --git a/lib/galaxy/tools/flatten_collection.xml b/lib/galaxy/tools/flatten_collection.xml
new file mode 100644
index 0000000..279fd4a
--- /dev/null
+++ b/lib/galaxy/tools/flatten_collection.xml
@@ -0,0 +1,56 @@
+<tool id="__FLATTEN__"
+      name="Flatten Collection"
+      version="1.0.0"
+      tool_type="filter_collection">
+    <description>into a flat list of datasets</description>
+    <type class="FlattenTool" module="galaxy.tools" />
+    <action module="galaxy.tools.actions.model_operations"
+            class="ModelOperationToolAction"/>
+    <inputs>
+        <param type="data_collection" name="input" label="Input Collection" />
+        <param type="select" name="join_identifier" label="Join collection identifiers using" help="">
+            <option value="_">_</option>
+            <option value=":">:</option>
+            <option value="-">-</option>
+        </param>
+    </inputs>
+    <outputs>
+        <collection name="output" format_source="input" type="list" label="${on_string} (flattened)" >
+        </collection>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input">
+                <collection type="list:paired">
+                    <element name="i1">
+                        <collection type="paired">
+                            <element name="forward" value="simple_line.txt" />
+                            <element name="reverse" value="simple_line_alternative.txt" />
+                        </collection>
+                    </element>
+                </collection>
+            </param>
+            <output_collection name="output" type="list">
+              <element name="i1_forward">
+                <assert_contents>
+                  <has_text_matching expression="^This is a line of text.\n$" />
+                </assert_contents>
+              </element>
+              <element name="i1_reverse">
+                <assert_contents>
+                  <has_text_matching expression="^This is a different line of text.\n$" />
+                </assert_contents>
+              </element>
+            </output_collection>
+        </test>
+    </tests>
+    <help>
+        This tool takes nested collections such as a list of lists or a list of
+        dataset pairs - and produces a flat list from the inputs. The collection
+        identifiers are merged together to create new collection identifiers in
+        the flattened result.
+
+        This tool will create new history datasets from your collection
+        but your quota usage will not increase.
+    </help>
+</tool>
diff --git a/lib/galaxy/tools/imp_exp/__init__.py b/lib/galaxy/tools/imp_exp/__init__.py
new file mode 100644
index 0000000..248da34
--- /dev/null
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -0,0 +1,534 @@
+import datetime
+import json
+import logging
+import os
+import shutil
+import tempfile
+from json import dumps, loads
+
+from sqlalchemy.orm import eagerload, eagerload_all
+from sqlalchemy.sql import expression
+
+from galaxy import model
+from galaxy.exceptions import MalformedContents
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.web.framework.helpers import to_unicode
+
+log = logging.getLogger(__name__)
+
+
+class JobImportHistoryArchiveWrapper( object, UsesAnnotations ):
+    """
+        Class provides support for performing jobs that import a history from
+        an archive.
+    """
+    def __init__( self, app, job_id ):
+        self.app = app
+        self.job_id = job_id
+        self.sa_session = self.app.model.context
+
+    def cleanup_after_job( self ):
+        """ Set history, datasets, and jobs' attributes and clean up archive directory. """
+
+        #
+        # Helper methods.
+        #
+
+        def file_in_dir( file_path, a_dir ):
+            """ Returns true if file is in directory. """
+            abs_file_path = os.path.abspath( file_path )
+            return os.path.split( abs_file_path )[0] == a_dir
+
+        def read_file_contents( file_path ):
+            """ Read contents of a file. """
+            fp = open( file_path, 'rb' )
+            buffsize = 1048576
+            file_contents = ''
+            try:
+                while True:
+                    file_contents += fp.read( buffsize )
+                    if not file_contents or len( file_contents ) % buffsize != 0:
+                        break
+            except OverflowError:
+                pass
+            fp.close()
+            return file_contents
+
+        def get_tag_str( tag, value ):
+            """ Builds a tag string for a tag, value pair. """
+            if not value:
+                return tag
+            else:
+                return tag + ":" + value
+
+        #
+        # Import history.
+        #
+
+        jiha = self.sa_session.query( model.JobImportHistoryArchive ).filter_by( job_id=self.job_id ).first()
+        if jiha:
+            try:
+                archive_dir = jiha.archive_dir
+                user = jiha.job.user
+
+                #
+                # Create history.
+                #
+                history_attr_file_name = os.path.join( archive_dir, 'history_attrs.txt')
+                history_attr_str = read_file_contents( history_attr_file_name )
+                history_attrs = loads( history_attr_str )
+
+                # Create history.
+                new_history = model.History( name='imported from archive: %s' % history_attrs['name'].encode( 'utf-8' ),
+                                             user=user )
+                new_history.importing = True
+                new_history.hid_counter = history_attrs['hid_counter']
+                new_history.genome_build = history_attrs['genome_build']
+                self.sa_session.add( new_history )
+                jiha.history = new_history
+                self.sa_session.flush()
+
+                # Add annotation, tags.
+                if user:
+                    self.add_item_annotation( self.sa_session, user, new_history, history_attrs[ 'annotation' ] )
+                    """
+                    TODO: figure out to how add tags to item.
+                    for tag, value in history_attrs[ 'tags' ].items():
+                        trans.app.tag_handler.apply_item_tags( trans, trans.user, new_history, get_tag_str( tag, value ) )
+                    """
+
+                #
+                # Create datasets.
+                #
+                datasets_attrs_file_name = os.path.join( archive_dir, 'datasets_attrs.txt')
+                datasets_attr_str = read_file_contents( datasets_attrs_file_name )
+                datasets_attrs = loads( datasets_attr_str )
+
+                if os.path.exists( datasets_attrs_file_name + ".provenance" ):
+                    provenance_attr_str = read_file_contents( datasets_attrs_file_name + ".provenance" )
+                    provenance_attrs = loads( provenance_attr_str )
+                    datasets_attrs += provenance_attrs
+
+                # Get counts of how often each dataset file is used; a file can
+                # be linked to multiple dataset objects (HDAs).
+                datasets_usage_counts = {}
+                for dataset_attrs in datasets_attrs:
+                    temp_dataset_file_name = \
+                        os.path.abspath( os.path.join( archive_dir, dataset_attrs['file_name'] ) )
+                    if ( temp_dataset_file_name not in datasets_usage_counts ):
+                        datasets_usage_counts[ temp_dataset_file_name ] = 0
+                    datasets_usage_counts[ temp_dataset_file_name ] += 1
+
+                # Create datasets.
+                for dataset_attrs in datasets_attrs:
+                    metadata = dataset_attrs['metadata']
+
+                    # Create dataset and HDA.
+                    hda = model.HistoryDatasetAssociation( name=dataset_attrs['name'].encode( 'utf-8' ),
+                                                           extension=dataset_attrs['extension'],
+                                                           info=dataset_attrs['info'].encode( 'utf-8' ),
+                                                           blurb=dataset_attrs['blurb'],
+                                                           peek=dataset_attrs['peek'],
+                                                           designation=dataset_attrs['designation'],
+                                                           visible=dataset_attrs['visible'],
+                                                           dbkey=metadata['dbkey'],
+                                                           metadata=metadata,
+                                                           history=new_history,
+                                                           create_dataset=True,
+                                                           sa_session=self.sa_session )
+                    if 'uuid' in dataset_attrs:
+                        hda.dataset.uuid = dataset_attrs["uuid"]
+                    if dataset_attrs.get('exported', True) is False:
+                        hda.state = hda.states.DISCARDED
+                        hda.deleted = True
+                        hda.purged = True
+                    else:
+                        hda.state = hda.states.OK
+                    self.sa_session.add( hda )
+                    self.sa_session.flush()
+                    new_history.add_dataset( hda, genome_build=None )
+                    hda.hid = dataset_attrs['hid']  # Overwrite default hid set when HDA added to history.
+                    # TODO: Is there a way to recover permissions? Is this needed?
+                    # permissions = trans.app.security_agent.history_get_default_permissions( new_history )
+                    # trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
+                    self.sa_session.flush()
+                    if dataset_attrs.get('exported', True) is True:
+                        # Do security check and move/copy dataset data.
+                        temp_dataset_file_name = \
+                            os.path.realpath( os.path.abspath( os.path.join( archive_dir, dataset_attrs['file_name'] ) ) )
+                        if not file_in_dir( temp_dataset_file_name, os.path.join( archive_dir, "datasets" ) ):
+                            raise MalformedContents( "Invalid dataset path: %s" % temp_dataset_file_name )
+                        if datasets_usage_counts[ temp_dataset_file_name ] == 1:
+                            self.app.object_store.update_from_file( hda.dataset, file_name=temp_dataset_file_name, create=True )
+
+                            # Import additional files if present. Histories exported previously might not have this attribute set.
+                            dataset_extra_files_path = dataset_attrs.get( 'extra_files_path', None )
+                            if dataset_extra_files_path:
+                                try:
+                                    file_list = os.listdir( os.path.join( archive_dir, dataset_extra_files_path ) )
+                                except OSError:
+                                    file_list = []
+
+                                if file_list:
+                                    for extra_file in file_list:
+                                        self.app.object_store.update_from_file(
+                                            hda.dataset, extra_dir='dataset_%s_files' % hda.dataset.id,
+                                            alt_name=extra_file, file_name=os.path.join( archive_dir, dataset_extra_files_path, extra_file ),
+                                            create=True )
+                        else:
+                            datasets_usage_counts[ temp_dataset_file_name ] -= 1
+                            shutil.copyfile( temp_dataset_file_name, hda.file_name )
+                        hda.dataset.set_total_size()  # update the filesize record in the database
+
+                    # Set tags, annotations.
+                    if user:
+                        self.add_item_annotation( self.sa_session, user, hda, dataset_attrs[ 'annotation' ] )
+                        # TODO: Set tags.
+                        """
+                        for tag, value in dataset_attrs[ 'tags' ].items():
+                            trans.app.tag_handler.apply_item_tags( trans, trans.user, hda, get_tag_str( tag, value ) )
+                            self.sa_session.flush()
+                        """
+
+                    # Although metadata is set above, need to set metadata to recover BAI for BAMs.
+                    if hda.extension == 'bam':
+                        self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute_via_app(
+                            self.app.datatypes_registry.set_external_metadata_tool, self.app, jiha.job.session_id,
+                            new_history.id, jiha.job.user, incoming={ 'input1': hda }, overwrite=False
+                        )
+
+                #
+                # Create jobs.
+                #
+
+                # Read jobs attributes.
+                jobs_attr_file_name = os.path.join( archive_dir, 'jobs_attrs.txt')
+                jobs_attr_str = read_file_contents( jobs_attr_file_name )
+
+                # Decode jobs attributes.
+                def as_hda( obj_dct ):
+                    """ Hook to 'decode' an HDA; method uses history and HID to get the HDA represented by
+                        the encoded object. This only works because HDAs are created above. """
+                    if obj_dct.get( '__HistoryDatasetAssociation__', False ):
+                            return self.sa_session.query( model.HistoryDatasetAssociation
+                                                          ).filter_by( history=new_history, hid=obj_dct['hid'] ).first()
+                    return obj_dct
+                jobs_attrs = loads( jobs_attr_str, object_hook=as_hda )
+
+                # Create each job.
+                for job_attrs in jobs_attrs:
+                    imported_job = model.Job()
+                    imported_job.user = user
+                    # TODO: set session?
+                    # imported_job.session = trans.get_galaxy_session().id
+                    imported_job.history = new_history
+                    imported_job.imported = True
+                    imported_job.tool_id = job_attrs[ 'tool_id' ]
+                    imported_job.tool_version = job_attrs[ 'tool_version' ]
+                    imported_job.set_state( job_attrs[ 'state' ] )
+                    imported_job.info = job_attrs.get('info', None)
+                    imported_job.exit_code = job_attrs.get('exit_code', None)
+                    imported_job.traceback = job_attrs.get('traceback', None)
+                    imported_job.stdout = job_attrs.get('stdout', None)
+                    imported_job.stderr = job_attrs.get('stderr', None)
+                    imported_job.command_line = job_attrs.get('command_line', None)
+                    try:
+                        imported_job.create_time = datetime.datetime.strptime(job_attrs["create_time"], "%Y-%m-%dT%H:%M:%S.%f")
+                        imported_job.update_time = datetime.datetime.strptime(job_attrs["update_time"], "%Y-%m-%dT%H:%M:%S.%f")
+                    except:
+                        pass
+                    self.sa_session.add( imported_job )
+                    self.sa_session.flush()
+
+                    class HistoryDatasetAssociationIDEncoder( json.JSONEncoder ):
+                        """ Custom JSONEncoder for a HistoryDatasetAssociation that encodes an HDA as its ID. """
+                        def default( self, obj ):
+                            """ Encode an HDA, default encoding for everything else. """
+                            if isinstance( obj, model.HistoryDatasetAssociation ):
+                                return obj.id
+                            return json.JSONEncoder.default( self, obj )
+
+                    # Set parameters. May be useful to look at metadata.py for creating parameters.
+                    # TODO: there may be a better way to set parameters, e.g.:
+                    #   for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+                    #       job.add_parameter( name, value )
+                    # to make this work, we'd need to flesh out the HDA objects. The code below is
+                    # relatively similar.
+                    for name, value in job_attrs[ 'params' ].items():
+                        # Transform parameter values when necessary.
+                        if isinstance( value, model.HistoryDatasetAssociation ):
+                            # HDA input: use hid to find input.
+                            input_hda = self.sa_session.query( model.HistoryDatasetAssociation ) \
+                                            .filter_by( history=new_history, hid=value.hid ).first()
+                            value = input_hda.id
+                        # print "added parameter %s-->%s to job %i" % ( name, value, imported_job.id )
+                        imported_job.add_parameter( name, dumps( value, cls=HistoryDatasetAssociationIDEncoder ) )
+
+                    # TODO: Connect jobs to input datasets.
+
+                    # Connect jobs to output datasets.
+                    for output_hid in job_attrs[ 'output_datasets' ]:
+                        # print "%s job has output dataset %i" % (imported_job.id, output_hid)
+                        output_hda = self.sa_session.query( model.HistoryDatasetAssociation
+                                                            ).filter_by(history=new_history, hid=output_hid ).first()
+                        if output_hda:
+                            imported_job.add_output_dataset( output_hda.name, output_hda )
+
+                    # Connect jobs to input datasets.
+                    if 'input_mapping' in job_attrs:
+                        for input_name, input_hid in job_attrs[ 'input_mapping' ].items():
+                            input_hda = self.sa_session.query( model.HistoryDatasetAssociation ) \
+                                            .filter_by( history=new_history, hid=input_hid ).first()
+                            if input_hda:
+                                imported_job.add_input_dataset( input_name, input_hda )
+
+                    self.sa_session.flush()
+
+                # Done importing.
+                new_history.importing = False
+                self.sa_session.flush()
+
+                # Cleanup.
+                if os.path.exists( archive_dir ):
+                    shutil.rmtree( archive_dir )
+            except Exception as e:
+                jiha.job.stderr += "Error cleaning up history import job: %s" % e
+                self.sa_session.flush()
+                raise
+
+
+class JobExportHistoryArchiveWrapper( object, UsesAnnotations ):
+    """
+    Class provides support for performing jobs that export a history to an
+    archive.
+    """
+    def __init__( self, job_id ):
+        self.job_id = job_id
+
+    def get_history_datasets( self, trans, history ):
+        """
+        Returns history's datasets.
+        """
+        query = ( trans.sa_session.query( trans.model.HistoryDatasetAssociation )
+                  .filter( trans.model.HistoryDatasetAssociation.history == history )
+                  .options( eagerload( "children" ) )
+                  .join( "dataset" )
+                  .options( eagerload_all( "dataset.actions" ) )
+                  .order_by( trans.model.HistoryDatasetAssociation.hid )
+                  .filter( trans.model.HistoryDatasetAssociation.deleted == expression.false() )
+                  .filter( trans.model.Dataset.purged == expression.false() ) )
+        return query.all()
+
+    # TODO: should use db_session rather than trans in this method.
+    def setup_job( self, trans, jeha, include_hidden=False, include_deleted=False ):
+        """ Perform setup for job to export a history into an archive. Method generates
+            attribute files for export, sets the corresponding attributes in the jeha
+            object, and returns a command line for running the job. The command line
+            includes the command, inputs, and options; it does not include the output
+            file because it must be set at runtime. """
+
+        #
+        # Helper methods/classes.
+        #
+
+        def get_item_tag_dict( item ):
+            """ Create dictionary of an item's tags. """
+            tags = {}
+            for tag in item.tags:
+                tag_user_tname = to_unicode( tag.user_tname )
+                tag_user_value = to_unicode( tag.user_value )
+                tags[ tag_user_tname ] = tag_user_value
+            return tags
+
+        def prepare_metadata( metadata ):
+            """ Prepare metatdata for exporting. """
+            for name, value in metadata.items():
+                # Metadata files are not needed for export because they can be
+                # regenerated.
+                if isinstance( value, trans.app.model.MetadataFile ):
+                    del metadata[ name ]
+            return metadata
+
+        class HistoryDatasetAssociationEncoder( json.JSONEncoder ):
+            """ Custom JSONEncoder for a HistoryDatasetAssociation. """
+            def default( self, obj ):
+                """ Encode an HDA, default encoding for everything else. """
+                if isinstance( obj, trans.app.model.HistoryDatasetAssociation ):
+                    rval = {
+                        "__HistoryDatasetAssociation__": True,
+                        "create_time": obj.create_time.__str__(),
+                        "update_time": obj.update_time.__str__(),
+                        "hid": obj.hid,
+                        "name": to_unicode( obj.name ),
+                        "info": to_unicode( obj.info ),
+                        "blurb": obj.blurb,
+                        "peek": obj.peek,
+                        "extension": obj.extension,
+                        "metadata": prepare_metadata( dict( obj.metadata.items() ) ),
+                        "parent_id": obj.parent_id,
+                        "designation": obj.designation,
+                        "deleted": obj.deleted,
+                        "visible": obj.visible,
+                        "file_name": obj.file_name,
+                        "uuid": ( lambda uuid: str( uuid ) if uuid else None )( obj.dataset.uuid ),
+                        "annotation": to_unicode( getattr( obj, 'annotation', '' ) ),
+                        "tags": get_item_tag_dict( obj ),
+                        "extra_files_path": obj.extra_files_path
+                    }
+                    if not obj.visible and not include_hidden:
+                        rval['exported'] = False
+                    elif obj.deleted and not include_deleted:
+                        rval['exported'] = False
+                    else:
+                        rval['exported'] = True
+                    return rval
+                return json.JSONEncoder.default( self, obj )
+
+        #
+        # Create attributes/metadata files for export.
+        #
+        temp_output_dir = tempfile.mkdtemp()
+
+        # Write history attributes to file.
+        history = jeha.history
+        history_attrs = {
+            "create_time": history.create_time.__str__(),
+            "update_time": history.update_time.__str__(),
+            "name": to_unicode( history.name ),
+            "hid_counter": history.hid_counter,
+            "genome_build": history.genome_build,
+            "annotation": to_unicode( self.get_item_annotation_str( trans.sa_session, history.user, history ) ),
+            "tags": get_item_tag_dict( history ),
+            "includes_hidden_datasets": include_hidden,
+            "includes_deleted_datasets": include_deleted
+        }
+        history_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+        history_attrs_out = open( history_attrs_filename, 'w' )
+        history_attrs_out.write( dumps( history_attrs ) )
+        history_attrs_out.close()
+        jeha.history_attrs_filename = history_attrs_filename
+
+        # Write datasets' attributes to file.
+        datasets = self.get_history_datasets( trans, history )
+        included_datasets = []
+        datasets_attrs = []
+        provenance_attrs = []
+        for dataset in datasets:
+            dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
+            if (not dataset.visible and not include_hidden) or (dataset.deleted and not include_deleted):
+                provenance_attrs.append( dataset )
+            else:
+                datasets_attrs.append( dataset )
+                included_datasets.append( dataset )
+        datasets_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+        datasets_attrs_out = open( datasets_attrs_filename, 'w' )
+        datasets_attrs_out.write( dumps( datasets_attrs, cls=HistoryDatasetAssociationEncoder ) )
+        datasets_attrs_out.close()
+        jeha.datasets_attrs_filename = datasets_attrs_filename
+
+        provenance_attrs_out = open( datasets_attrs_filename + ".provenance", 'w' )
+        provenance_attrs_out.write( dumps( provenance_attrs, cls=HistoryDatasetAssociationEncoder ) )
+        provenance_attrs_out.close()
+
+        #
+        # Write jobs attributes file.
+        #
+
+        # Get all jobs associated with included HDAs.
+        jobs_dict = {}
+        for hda in included_datasets:
+            # Get the associated job, if any. If this hda was copied from another,
+            # we need to find the job that created the origial hda
+            job_hda = hda
+            while job_hda.copied_from_history_dataset_association:  # should this check library datasets as well?
+                job_hda = job_hda.copied_from_history_dataset_association
+            if not job_hda.creating_job_associations:
+                # No viable HDA found.
+                continue
+
+            # Get the job object.
+            job = None
+            for assoc in job_hda.creating_job_associations:
+                job = assoc.job
+                break
+            if not job:
+                # No viable job.
+                continue
+
+            jobs_dict[ job.id ] = job
+
+        # Get jobs' attributes.
+        jobs_attrs = []
+        for id, job in jobs_dict.items():
+            job_attrs = {}
+            job_attrs[ 'tool_id' ] = job.tool_id
+            job_attrs[ 'tool_version' ] = job.tool_version
+            job_attrs[ 'state' ] = job.state
+            job_attrs[ 'info' ] = job.info
+            job_attrs[ 'traceback' ] = job.traceback
+            job_attrs[ 'command_line' ] = job.command_line
+            job_attrs[ 'stderr' ] = job.stderr
+            job_attrs[ 'stdout' ] = job.stdout
+            job_attrs[ 'exit_code' ] = job.exit_code
+            job_attrs[ 'create_time' ] = job.create_time.isoformat()
+            job_attrs[ 'update_time' ] = job.update_time.isoformat()
+
+            # Get the job's parameters
+            try:
+                params_objects = job.get_param_values( trans.app )
+            except:
+                # Could not get job params.
+                continue
+
+            params_dict = {}
+            for name, value in params_objects.items():
+                params_dict[ name ] = value
+            job_attrs[ 'params' ] = params_dict
+
+            # -- Get input, output datasets. --
+
+            input_datasets = []
+            input_mapping = {}
+            for assoc in job.input_datasets:
+                # Optional data inputs will not have a dataset.
+                if assoc.dataset:
+                    input_datasets.append( assoc.dataset.hid )
+                    input_mapping[assoc.name] = assoc.dataset.hid
+            job_attrs[ 'input_datasets' ] = input_datasets
+            job_attrs[ 'input_mapping'] = input_mapping
+            output_datasets = [ assoc.dataset.hid for assoc in job.output_datasets ]
+            job_attrs[ 'output_datasets' ] = output_datasets
+
+            jobs_attrs.append( job_attrs )
+
+        jobs_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name
+        jobs_attrs_out = open( jobs_attrs_filename, 'w' )
+        jobs_attrs_out.write( dumps( jobs_attrs, cls=HistoryDatasetAssociationEncoder ) )
+        jobs_attrs_out.close()
+        jeha.jobs_attrs_filename = jobs_attrs_filename
+
+        #
+        # Create and return command line for running tool.
+        #
+        options = ""
+        if jeha.compressed:
+            options = "-G"
+        return "%s %s %s %s" % ( options, history_attrs_filename,
+                                 datasets_attrs_filename,
+                                 jobs_attrs_filename )
+
+    def cleanup_after_job( self, db_session ):
+        """ Remove temporary directory and attribute files generated during setup for this job. """
+        # Get jeha for job.
+        jeha = db_session.query( model.JobExportHistoryArchive ).filter_by( job_id=self.job_id ).first()
+        if jeha:
+            for filename in [ jeha.history_attrs_filename, jeha.datasets_attrs_filename, jeha.jobs_attrs_filename ]:
+                try:
+                    os.remove( filename )
+                except Exception as e:
+                    log.debug( 'Failed to cleanup attributes file (%s): %s' % ( filename, e ) )
+            temp_dir = os.path.split( jeha.history_attrs_filename )[0]
+            try:
+                shutil.rmtree( temp_dir )
+            except Exception as e:
+                log.debug( 'Error deleting directory containing attribute files (%s): %s' % ( temp_dir, e ) )
diff --git a/lib/galaxy/tools/imp_exp/exp_history_to_archive.xml b/lib/galaxy/tools/imp_exp/exp_history_to_archive.xml
new file mode 100644
index 0000000..02320b1
--- /dev/null
+++ b/lib/galaxy/tools/imp_exp/exp_history_to_archive.xml
@@ -0,0 +1,16 @@
+<tool id="__EXPORT_HISTORY__" name="Export History" version="0.1" tool_type="export_history">
+  <type class="ExportHistoryTool" module="galaxy.tools"/>
+  <action module="galaxy.tools.actions.history_imp_exp" class="ExportHistoryToolAction"/>
+  <command>python '$export_history' $__EXPORT_HISTORY_COMMAND_INPUTS_OPTIONS__ '$output_file'</command>
+  <inputs>
+    <param name="__HISTORY_TO_EXPORT__" type="hidden"/>
+    <param name="compress" type="boolean"/>
+    <param name="__EXPORT_HISTORY_COMMAND_INPUTS_OPTIONS__" type="hidden"/>
+  </inputs>
+  <configfiles>
+    <configfile name="export_history">from galaxy.tools.imp_exp.export_history import main; main()</configfile>
+  </configfiles>
+  <outputs>
+    <data format="gzip" name="output_file"/>
+  </outputs>
+</tool>
diff --git a/lib/galaxy/tools/imp_exp/export_history.py b/lib/galaxy/tools/imp_exp/export_history.py
new file mode 100644
index 0000000..618c63e
--- /dev/null
+++ b/lib/galaxy/tools/imp_exp/export_history.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+"""
+Export a history to an archive file using attribute files.
+
+usage: %prog history_attrs dataset_attrs job_attrs out_file
+    -G, --gzip: gzip archive file
+"""
+
+import optparse
+import os
+import sys
+import tarfile
+from json import dumps, loads
+
+from galaxy.util import FILENAME_VALID_CHARS
+
+
+def get_dataset_filename( name, ext, hid ):
+    """
+    Builds a filename for a dataset using its name an extension.
+    """
+    base = ''.join( c in FILENAME_VALID_CHARS and c or '_' for c in name )
+    return base + "_%s.%s" % (hid, ext)
+
+
+def create_archive( history_attrs_file, datasets_attrs_file, jobs_attrs_file, out_file, gzip=False ):
+    """ Create archive from the given attribute/metadata files and save it to out_file. """
+    tarfile_mode = "w"
+    if gzip:
+        tarfile_mode += ":gz"
+    try:
+
+        history_archive = tarfile.open( out_file, tarfile_mode )
+
+        # Read datasets attributes from file.
+        datasets_attr_in = open( datasets_attrs_file, 'rb' )
+        datasets_attr_str = ''
+        buffsize = 1048576
+        try:
+            while True:
+                datasets_attr_str += datasets_attr_in.read( buffsize )
+                if not datasets_attr_str or len( datasets_attr_str ) % buffsize != 0:
+                    break
+        except OverflowError:
+            pass
+        datasets_attr_in.close()
+        datasets_attrs = loads( datasets_attr_str )
+
+        # Add datasets to archive and update dataset attributes.
+        # TODO: security check to ensure that files added are in Galaxy dataset directory?
+        for dataset_attrs in datasets_attrs:
+            if dataset_attrs['exported']:
+                dataset_file_name = dataset_attrs[ 'file_name' ]  # Full file name.
+                dataset_hid = dataset_attrs[ 'hid']
+                dataset_archive_name = os.path.join( 'datasets',
+                                                     get_dataset_filename( dataset_attrs[ 'name' ], dataset_attrs[ 'extension' ], dataset_hid ) )
+                history_archive.add( dataset_file_name, arcname=dataset_archive_name )
+
+                # Include additional files for example, files/images included in HTML output.
+                extra_files_path = dataset_attrs[ 'extra_files_path' ]
+                if extra_files_path:
+                    try:
+                        file_list = os.listdir( extra_files_path )
+                    except OSError:
+                        file_list = []
+
+                    if len( file_list ):
+                        dataset_extra_files_path = 'datasets/extra_files_path_%s' % dataset_hid
+                        for fname in file_list:
+                            history_archive.add( os.path.join( extra_files_path, fname ),
+                                                 arcname=( os.path.join( dataset_extra_files_path, fname ) ) )
+                        dataset_attrs[ 'extra_files_path' ] = dataset_extra_files_path
+                    else:
+                        dataset_attrs[ 'extra_files_path' ] = ''
+
+                # Update dataset filename to be archive name.
+                dataset_attrs[ 'file_name' ] = dataset_archive_name
+
+        # Rewrite dataset attributes file.
+        datasets_attrs_out = open( datasets_attrs_file, 'w' )
+        datasets_attrs_out.write( dumps( datasets_attrs ) )
+        datasets_attrs_out.close()
+
+        # Finish archive.
+        history_archive.add( history_attrs_file, arcname="history_attrs.txt" )
+        history_archive.add( datasets_attrs_file, arcname="datasets_attrs.txt" )
+        if os.path.exists( datasets_attrs_file + ".provenance" ):
+            history_archive.add( datasets_attrs_file + ".provenance", arcname="datasets_attrs.txt.provenance" )
+        history_archive.add( jobs_attrs_file, arcname="jobs_attrs.txt" )
+        history_archive.close()
+
+        # Status.
+        return 'Created history archive.'
+    except Exception as e:
+        return 'Error creating history archive: %s' % str( e ), sys.stderr
+
+
+def main():
+    # Parse command line.
+    parser = optparse.OptionParser()
+    parser.add_option( '-G', '--gzip', dest='gzip', action="store_true", help='Compress archive using gzip.' )
+    (options, args) = parser.parse_args()
+    gzip = bool( options.gzip )
+    history_attrs, dataset_attrs, job_attrs, out_file = args
+
+    # Create archive.
+    status = create_archive( history_attrs, dataset_attrs, job_attrs, out_file, gzip )
+    print status
+
+
+if __name__ == "__main__":
+    main()
diff --git a/lib/galaxy/tools/imp_exp/imp_history_from_archive.xml b/lib/galaxy/tools/imp_exp/imp_history_from_archive.xml
new file mode 100644
index 0000000..b73c428
--- /dev/null
+++ b/lib/galaxy/tools/imp_exp/imp_history_from_archive.xml
@@ -0,0 +1,14 @@
+<tool id="__IMPORT_HISTORY__" name="Import History" version="0.1" tool_type="import_history">
+  <type class="ImportHistoryTool" module="galaxy.tools"/>
+  <action module="galaxy.tools.actions.history_imp_exp" class="ImportHistoryToolAction"/>
+  <command interpreter="python">unpack_tar_gz_archive.py "${ str( $__ARCHIVE_SOURCE__ ).encode( 'base64' ) }" "${ str( $__DEST_DIR__ ).encode( 'base64' ) }" --$__ARCHIVE_TYPE__ --encoded</command>
+  <inputs>
+    <param name="__ARCHIVE_SOURCE__" type="text">
+      <sanitizer sanitize="False"/>
+    </param>
+    <param name="__ARCHIVE_TYPE__" type="text"/>
+    <param name="__DEST_DIR__" type="text">
+      <sanitizer sanitize="False"/>
+    </param>
+  </inputs>
+</tool>
\ No newline at end of file
diff --git a/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py
new file mode 100644
index 0000000..932779c
--- /dev/null
+++ b/lib/galaxy/tools/imp_exp/unpack_tar_gz_archive.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+"""
+Unpack a tar or tar.gz archive into a directory.
+
+usage: %prog archive_source dest_dir
+    --[url|file] source type, either a URL or a file.
+"""
+
+import os
+import sys
+import optparse
+import tarfile
+import tempfile
+import urllib2
+import math
+from base64 import b64decode
+
+# Set max size of archive/file that will be handled to be 100 GB. This is
+# arbitrary and should be adjusted as needed.
+MAX_SIZE = 100 * math.pow( 2, 30 )
+
+
+def url_to_file( url, dest_file ):
+    """
+    Transfer a file from a remote URL to a temporary file.
+    """
+    try:
+        url_reader = urllib2.urlopen( url )
+        CHUNK = 10 * 1024  # 10k
+        total = 0
+        fp = open( dest_file, 'wb')
+        while True:
+            chunk = url_reader.read( CHUNK )
+            if not chunk:
+                break
+            fp.write( chunk )
+            total += CHUNK
+            if total > MAX_SIZE:
+                break
+        fp.close()
+        return dest_file
+    except Exception as e:
+        print "Exception getting file from URL: %s" % e, sys.stderr
+        return None
+
+
+def check_archive( archive_file, dest_dir ):
+    """
+    Ensure that a tar archive has no absolute paths or relative paths outside
+    the archive.
+    """
+    with tarfile.open( archive_file, mode='r:gz' ) as archive_fp:
+        for arc_path in archive_fp.getnames():
+            assert os.path.normpath(
+                os.path.join(
+                    dest_dir,
+                    arc_path
+                ) ).startswith( dest_dir.rstrip(os.sep) + os.sep ), \
+                "Archive member would extract outside target directory: %s" % arc_path
+    return True
+
+
+def unpack_archive( archive_file, dest_dir ):
+    """
+    Unpack a tar and/or gzipped archive into a destination directory.
+    """
+    archive_fp = tarfile.open( archive_file, mode='r:gz' )
+    archive_fp.extractall( path=dest_dir )
+    archive_fp.close()
+
+
+def main(options, args):
+    is_url = bool( options.is_url )
+    is_file = bool( options.is_file )
+    archive_source, dest_dir = args
+
+    if options.is_b64encoded:
+        archive_source = b64decode( archive_source )
+        dest_dir = b64decode( dest_dir )
+
+    # Get archive from URL.
+    if is_url:
+        archive_file = url_to_file( archive_source, tempfile.NamedTemporaryFile( dir=dest_dir ).name )
+    elif is_file:
+        archive_file = archive_source
+
+    # Unpack archive.
+    check_archive( archive_file, dest_dir )
+    unpack_archive( archive_file, dest_dir )
+
+
+if __name__ == "__main__":
+    # Parse command line.
+    parser = optparse.OptionParser()
+    parser.add_option( '-U', '--url', dest='is_url', action="store_true", help='Source is a URL.' )
+    parser.add_option( '-F', '--file', dest='is_file', action="store_true", help='Source is a URL.' )
+    parser.add_option( '-e', '--encoded', dest='is_b64encoded', action="store_true", default=False, help='Source and destination dir values are base64 encoded.' )
+    (options, args) = parser.parse_args()
+    try:
+        main(options, args)
+    except Exception as e:
+        print "Error unpacking tar/gz archive: %s" % e, sys.stderr
diff --git a/lib/galaxy/tools/lint.py b/lib/galaxy/tools/lint.py
new file mode 100644
index 0000000..7f99d70
--- /dev/null
+++ b/lib/galaxy/tools/lint.py
@@ -0,0 +1,138 @@
+"""This modules contains the functions that drive the tool linting framework."""
+from __future__ import print_function
+import inspect
+
+from galaxy.util import submodules
+from .parser import get_tool_source
+
+
+LEVEL_ALL = "all"
+LEVEL_WARN = "warn"
+LEVEL_ERROR = "error"
+
+
+def lint_tool_source(tool_source, level=LEVEL_ALL, fail_level=LEVEL_WARN, extra_modules=[], skip_types=[]):
+    lint_context = LintContext(level=level, skip_types=skip_types)
+    lint_tool_source_with(lint_context, tool_source, extra_modules)
+
+    return not lint_context.failed(fail_level)
+
+
+def lint_xml(tool_xml, level=LEVEL_ALL, fail_level=LEVEL_WARN, extra_modules=[], skip_types=[]):
+    lint_context = LintContext(level=level, skip_types=skip_types)
+    lint_xml_with(lint_context, tool_xml, extra_modules)
+
+    return not lint_context.failed(fail_level)
+
+
+def lint_tool_source_with(lint_context, tool_source, extra_modules=[]):
+    import galaxy.tools.linters
+    tool_xml = getattr(tool_source, "xml_tree", None)
+    linter_modules = submodules.submodules(galaxy.tools.linters)
+    linter_modules.extend(extra_modules)
+    for module in linter_modules:
+        tool_type = tool_source.parse_tool_type() or "default"
+        lint_tool_types = getattr(module, "lint_tool_types", ["default"])
+        if not ("*" in lint_tool_types or tool_type in lint_tool_types):
+            continue
+
+        for (name, value) in inspect.getmembers(module):
+            if callable(value) and name.startswith("lint_"):
+                # Look at the first argument to the linter to decide
+                # if we should lint the XML description or the abstract
+                # tool parser object.
+                first_arg = inspect.getargspec(value).args[0]
+                if first_arg == "tool_xml":
+                    if tool_xml is None:
+                        # XML linter and non-XML tool, skip for now
+                        continue
+                    else:
+                        lint_context.lint(name, value, tool_xml)
+                else:
+                    lint_context.lint(name, value, tool_source)
+
+
+def lint_xml_with(lint_context, tool_xml, extra_modules=[]):
+    tool_source = get_tool_source(xml_tree=tool_xml)
+    return lint_tool_source_with(lint_context, tool_source, extra_modules=extra_modules)
+
+
+# TODO: Nothing inherently tool-y about LintContext and in fact
+# it is reused for repositories in planemo. Therefore, it should probably
+# be moved to galaxy.util.lint.
+class LintContext(object):
+
+    def __init__(self, level, skip_types=[]):
+        self.skip_types = skip_types
+        self.level = level
+        self.found_errors = False
+        self.found_warns = False
+
+    def lint(self, name, lint_func, lint_target):
+        name = name.replace("tsts", "tests")[len("lint_"):]
+        if name in self.skip_types:
+            return
+        self.printed_linter_info = False
+        self.valid_messages = []
+        self.info_messages = []
+        self.warn_messages = []
+        self.error_messages = []
+        lint_func(lint_target, self)
+        # TODO: colorful emoji if in click CLI.
+        if self.error_messages:
+            status = "FAIL"
+        elif self.warn_messages:
+            status = "WARNING"
+        else:
+            status = "CHECK"
+
+        def print_linter_info():
+            if self.printed_linter_info:
+                return
+            self.printed_linter_info = True
+            print("Applying linter %s... %s" % (name, status))
+
+        for message in self.error_messages:
+            self.found_errors = True
+            print_linter_info()
+            print(".. ERROR: %s" % message)
+
+        if self.level != LEVEL_ERROR:
+            for message in self.warn_messages:
+                self.found_warns = True
+                print_linter_info()
+                print(".. WARNING: %s" % message)
+
+        if self.level == LEVEL_ALL:
+            for message in self.info_messages:
+                print_linter_info()
+                print(".. INFO: %s" % message)
+            for message in self.valid_messages:
+                print_linter_info()
+                print(".. CHECK: %s" % message)
+
+    def __handle_message(self, message_list, message, *args):
+        if args:
+            message = message % args
+        message_list.append(message)
+
+    def valid(self, message, *args):
+        self.__handle_message(self.valid_messages, message, *args)
+
+    def info(self, message, *args):
+        self.__handle_message(self.info_messages, message, *args)
+
+    def error(self, message, *args):
+        self.__handle_message(self.error_messages, message, *args)
+
+    def warn(self, message, *args):
+        self.__handle_message(self.warn_messages, message, *args)
+
+    def failed(self, fail_level):
+        found_warns = self.found_warns
+        found_errors = self.found_errors
+        if fail_level == LEVEL_WARN:
+            lint_fail = (found_warns or found_errors)
+        else:
+            lint_fail = found_errors
+        return lint_fail
diff --git a/lib/galaxy/tools/lint_util.py b/lib/galaxy/tools/lint_util.py
new file mode 100644
index 0000000..3e0c584
--- /dev/null
+++ b/lib/galaxy/tools/lint_util.py
@@ -0,0 +1,5 @@
+
+
+def is_datasource(tool_xml):
+    """Returns true if the tool is a datasource tool"""
+    return tool_xml.getroot().attrib.get('tool_type', '') == 'data_source'
diff --git a/lib/galaxy/tools/linters/__init__.py b/lib/galaxy/tools/linters/__init__.py
new file mode 100644
index 0000000..48d2f15
--- /dev/null
+++ b/lib/galaxy/tools/linters/__init__.py
@@ -0,0 +1 @@
+"""This package contains linting functions for Galaxy tools."""
diff --git a/lib/galaxy/tools/linters/citations.py b/lib/galaxy/tools/linters/citations.py
new file mode 100644
index 0000000..95c03d0
--- /dev/null
+++ b/lib/galaxy/tools/linters/citations.py
@@ -0,0 +1,32 @@
+"""This module contains a citation lint function.
+
+Citations describe references that should be used when consumers
+of the tool publish results.
+"""
+
+
+def lint_citations(tool_xml, lint_ctx):
+    """Ensure tool contains at least one valid citation."""
+    root = tool_xml.getroot()
+    citations = root.findall("citations")
+    if len(citations) > 1:
+        lint_ctx.error("More than one citation section found, behavior undefined.")
+        return
+
+    if len(citations) == 0:
+        lint_ctx.warn("No citations found, consider adding citations to your tool.")
+        return
+
+    valid_citations = 0
+    for citation in citations[0]:
+        if citation.tag != "citation":
+            lint_ctx.warn("Unknown tag discovered in citations block [%s], will be ignored." % citation.tag)
+        if "type" in citation.attrib:
+            citation_type = citation.attrib.get("type")
+            if citation_type not in ["doi", "bibtex"]:
+                lint_ctx.warn("Unknown citation type discovered [%s], will be ignored.", citation_type)
+            else:
+                valid_citations += 1
+
+    if valid_citations > 0:
+        lint_ctx.valid("Found %d likely valid citations.", valid_citations)
diff --git a/lib/galaxy/tools/linters/command.py b/lib/galaxy/tools/linters/command.py
new file mode 100644
index 0000000..16c9134
--- /dev/null
+++ b/lib/galaxy/tools/linters/command.py
@@ -0,0 +1,51 @@
+"""This module contains a linting function for a tool's command description.
+
+A command description describes how to build the command-line to execute
+from supplied inputs.
+"""
+
+
+def lint_command(tool_xml, lint_ctx):
+    """Ensure tool contains exactly one command and check attributes."""
+    root = tool_xml.getroot()
+    commands = root.findall("command")
+    if len(commands) > 1:
+        lint_ctx.error("More than one command tag found, behavior undefined.")
+        return
+
+    if len(commands) == 0:
+        lint_ctx.error("No command tag found, must specify a command template to execute.")
+        return
+
+    command = get_command(tool_xml)
+    if "TODO" in command:
+        lint_ctx.warn("Command template contains TODO text.")
+
+    command_attrib = command.attrib
+    interpreter_type = None
+    for key, value in command_attrib.items():
+        if key == "interpreter":
+            interpreter_type = value
+        elif key == "detect_errors":
+            detect_errors = value
+            if detect_errors not in ["default", "exit_code", "aggressive"]:
+                lint_ctx.warn("Unknown detect_errors attribute [%s]" % detect_errors)
+        else:
+            lint_ctx.warn("Unknown attribute [%s] encountered on command tag." % key)
+
+    interpreter_info = ""
+    if interpreter_type:
+        interpreter_info = " with interpreter of type [%s]" % interpreter_type
+    if interpreter_type:
+        lint_ctx.info("Command uses deprecated 'interpreter' attribute.")
+    lint_ctx.info("Tool contains a command%s." % interpreter_info)
+
+
+def get_command(tool_xml):
+    """Get command XML element from supplied XML root."""
+    root = tool_xml.getroot()
+    commands = root.findall("command")
+    command = None
+    if len(commands) == 1:
+        command = commands[0]
+    return command
diff --git a/lib/galaxy/tools/linters/general.py b/lib/galaxy/tools/linters/general.py
new file mode 100644
index 0000000..16dc43a
--- /dev/null
+++ b/lib/galaxy/tools/linters/general.py
@@ -0,0 +1,37 @@
+"""This module contains a linting functions for general aspects of the tool."""
+import re
+
+ERROR_VERSION_MSG = "Tool version is missing or empty."
+VALID_VERSION_MSG = "Tool defines a version [%s]."
+
+ERROR_NAME_MSG = "Tool name is missing or empty."
+VALID_NAME_MSG = "Tool defines a name [%s]."
+
+ERROR_ID_MSG = "Tool does not define an id attribute."
+VALID_ID_MSG = "Tool defines an id [%s]."
+
+lint_tool_types = ["*"]
+
+
+def lint_general(tool_source, lint_ctx):
+    """Check tool version, name, and id."""
+    version = tool_source.parse_version()
+    if not version:
+        lint_ctx.error(ERROR_VERSION_MSG)
+    else:
+        lint_ctx.valid(VALID_VERSION_MSG % version)
+
+    name = tool_source.parse_name()
+    if not name:
+        lint_ctx.error(ERROR_NAME_MSG)
+    else:
+        lint_ctx.valid(VALID_NAME_MSG % name)
+
+    tool_id = tool_source.parse_id()
+    if not tool_id:
+        lint_ctx.error(ERROR_ID_MSG)
+    else:
+        lint_ctx.valid(VALID_ID_MSG % tool_id)
+
+    if re.search(r"\s", tool_id):
+        lint_ctx.warn("Tool id contains a space - this is discouraged.")
diff --git a/lib/galaxy/tools/linters/help.py b/lib/galaxy/tools/linters/help.py
new file mode 100644
index 0000000..ecb303e
--- /dev/null
+++ b/lib/galaxy/tools/linters/help.py
@@ -0,0 +1,45 @@
+"""This module contains a linting function for a tool's help."""
+from galaxy.util import rst_to_html
+
+
+def lint_help(tool_xml, lint_ctx):
+    """Ensure tool contains exactly one valid RST help block."""
+    root = tool_xml.getroot()
+    helps = root.findall("help")
+    if len(helps) > 1:
+        lint_ctx.error("More than one help section found, behavior undefined.")
+        return
+
+    if len(helps) == 0:
+        lint_ctx.warn("No help section found, consider adding a help section to your tool.")
+        return
+
+    help = helps[0].text or ''
+    if not help.strip():
+        lint_ctx.warn("Help section appears to be empty.")
+        return
+
+    lint_ctx.valid("Tool contains help section.")
+    invalid_rst = rst_invalid(help)
+
+    if "TODO" in help:
+        lint_ctx.warn("Help contains TODO text.")
+
+    if invalid_rst:
+        lint_ctx.warn("Invalid reStructuredText found in help - [%s]." % invalid_rst)
+    else:
+        lint_ctx.valid("Help contains valid reStructuredText.")
+
+
+def rst_invalid(text):
+    """Predicate to determine if text is invalid reStructuredText.
+
+    Return False if the supplied text is valid reStructuredText or
+    a string indicating the problem.
+    """
+    invalid_rst = False
+    try:
+        rst_to_html(text)
+    except Exception as e:
+        invalid_rst = str(e)
+    return invalid_rst
diff --git a/lib/galaxy/tools/linters/inputs.py b/lib/galaxy/tools/linters/inputs.py
new file mode 100644
index 0000000..2673649
--- /dev/null
+++ b/lib/galaxy/tools/linters/inputs.py
@@ -0,0 +1,122 @@
+"""This module contains a linting functions for tool inputs."""
+from ..lint_util import is_datasource
+
+
+def lint_inputs(tool_xml, lint_ctx):
+    """Lint parameters in a tool's inputs block."""
+    datasource = is_datasource(tool_xml)
+    inputs = tool_xml.findall("./inputs//param")
+    num_inputs = 0
+    for param in inputs:
+        num_inputs += 1
+        param_attrib = param.attrib
+        has_errors = False
+        if "type" not in param_attrib:
+            lint_ctx.error("Found param input with no type specified.")
+            has_errors = True
+        if "name" not in param_attrib and "argument" not in param_attrib:
+            lint_ctx.error("Found param input with no name specified.")
+            has_errors = True
+
+        if has_errors:
+            continue
+
+        param_type = param_attrib["type"]
+        param_name = param_attrib.get("name", param_attrib.get("argument"))
+        if param_type == "data":
+            if "format" not in param_attrib:
+                lint_ctx.warn("Param input [%s] with no format specified - 'data' format will be assumed.", param_name)
+
+        if param_type == "select":
+            dynamic_options = param.get("dynamic_options", None)
+            if dynamic_options is None:
+                dynamic_options = param.find("options")
+
+            select_options = _find_with_attribute(param, 'option', 'value')
+            if any(['value' not in option.attrib for option in select_options]):
+                lint_ctx.error("Option without value")
+
+            select_option_ids = [option.attrib.get('value', None) for option in select_options]
+
+            if dynamic_options is None and len(select_options) == 0:
+                message = "No options defined for select [%s]" % param_name
+                lint_ctx.warn(message)
+
+        # TODO: Validate type, much more...
+
+    conditional_selects = tool_xml.findall("./inputs//conditional")
+    for conditional in conditional_selects:
+        booleans = _find_with_attribute(conditional, "param", "type", "boolean")
+        selects = _find_with_attribute(conditional, "param", "type", "select")
+        # Should conditionals ever not have a select?
+        if not len(selects) and not len(booleans):
+            lint_ctx.warn("Conditional without <param type=\"select\" /> or <param type=\"boolean\" />")
+            continue
+
+        test_param_optional = False
+        for select in selects:
+            test_param_optional = test_param_optional or (select.attrib.get('optional', None) is not None)
+            select_options = _find_with_attribute(select, 'option', 'value')
+            select_option_ids = [option.attrib.get('value', None) for option in select_options]
+
+        for boolean in booleans:
+            test_param_optional = test_param_optional or (boolean.attrib.get('optional', None) is not None)
+            select_option_ids = [
+                boolean.attrib.get('truevalue', 'true'),
+                boolean.attrib.get('falsevalue', 'false')
+            ]
+
+        if test_param_optional:
+            lint_ctx.warn("Conditional test parameter declares an invalid optional attribute.")
+
+        whens = conditional.findall('./when')
+        if any(['value' not in when.attrib for when in whens]):
+            lint_ctx.error("When without value")
+
+        when_ids = [w.attrib.get('value', None) for w in whens]
+        when_ids = [i.lower() if i in ["True", "False"] else i for i in when_ids]
+
+        for select_id in select_option_ids:
+            if select_id not in when_ids:
+                lint_ctx.warn("No <when /> block found for select option '%s'" % select_id)
+
+        for when_id in when_ids:
+            if when_id not in select_option_ids:
+                lint_ctx.warn("No <option /> block found for when block '%s'" % when_id)
+
+    if datasource:
+        for datasource_tag in ('display', 'uihints'):
+            if not any([param.tag == datasource_tag for param in inputs]):
+                lint_ctx.info("%s tag usually present in data sources" % datasource_tag)
+
+    if num_inputs:
+        lint_ctx.info("Found %d input parameters.", num_inputs)
+    else:
+        if datasource:
+            lint_ctx.info("No input parameters, OK for data sources")
+        else:
+            lint_ctx.warn("Found no input parameters.")
+
+
+def lint_repeats(tool_xml, lint_ctx):
+    """Lint repeat blocks in tool inputs."""
+    repeats = tool_xml.findall("./inputs//repeat")
+    for repeat in repeats:
+        if "name" not in repeat.attrib:
+            lint_ctx.error("Repeat does not specify name attribute.")
+        if "title" not in repeat.attrib:
+            lint_ctx.error("Repeat does not specify title attribute.")
+
+
+def _find_with_attribute(element, tag, attribute, test_value=None):
+    rval = []
+    for el in (element.findall('./%s' % tag) or []):
+        if attribute not in el.attrib:
+            continue
+        value = el.attrib[attribute]
+        if test_value is not None:
+            if value == test_value:
+                rval.append(el)
+        else:
+            rval.append(el)
+    return rval
diff --git a/lib/galaxy/tools/linters/outputs.py b/lib/galaxy/tools/linters/outputs.py
new file mode 100644
index 0000000..03c5cfd
--- /dev/null
+++ b/lib/galaxy/tools/linters/outputs.py
@@ -0,0 +1,37 @@
+"""This module contains a linting functions for tool outputs."""
+
+
+def lint_output(tool_xml, lint_ctx):
+    """Check output elements, ensure there is at least one and check attributes."""
+    outputs = tool_xml.findall("./outputs")
+    if len(outputs) == 0:
+        lint_ctx.warn("Tool contains no outputs section, most tools should produce outputs.")
+    if len(outputs) > 1:
+        lint_ctx.warn("Tool contains multiple output sections, behavior undefined.")
+
+    num_outputs = 0
+    if len(outputs) == 0:
+        lint_ctx.warn("No outputs found")
+        return
+
+    for output in list(outputs[0]):
+        if output.tag not in ["data", "collection"]:
+            lint_ctx.warn("Unknown element found in outputs [%s]" % output.tag)
+            continue
+        num_outputs += 1
+        output_attrib = output.attrib
+        if output.tag == "data":
+            format_set = False
+            if "format" in output_attrib:
+                format_set = True
+                format = output_attrib["format"]
+                if format == "input":
+                    lint_ctx.warn("Using format='input' on output data, format_source attribute is less ambiguous and should be used instead.")
+            elif "format_source" in output_attrib:
+                format_set = True
+            if not format_set:
+                lint_ctx.warn("Tool data output doesn't define an output format.")
+        elif output.tag == "collection":
+            if "type" not in output_attrib:
+                lint_ctx.warn("Collection output with undefined 'type' found.")
+    lint_ctx.info("%d outputs found.", num_outputs)
diff --git a/lib/galaxy/tools/linters/stdio.py b/lib/galaxy/tools/linters/stdio.py
new file mode 100644
index 0000000..d9e8524
--- /dev/null
+++ b/lib/galaxy/tools/linters/stdio.py
@@ -0,0 +1,60 @@
+"""This module contains a linting functions for tool error detection."""
+from .command import get_command
+
+
+def lint_stdio(tool_xml, lint_ctx):
+    stdios = tool_xml.findall("./stdio")
+    if not stdios:
+        command = get_command(tool_xml)
+        if command is None or not command.get("detect_errors"):
+            lint_ctx.info("No stdio definition found, tool will determine an error from stderr.")
+        return
+
+    if len(stdios) > 1:
+        lint_ctx.error("More than one stdio tag found, behavior undefined.")
+        return
+
+    stdio = stdios[0]
+    for child in list(stdio):
+        if child.tag == "regex":
+            _lint_regex(child, lint_ctx)
+        elif child.tag == "exit_code":
+            _lint_exit_code(child, lint_ctx)
+        else:
+            message = "Unknown stdio child tag discovered [%s]. "
+            message += "Valid options are exit_code and regex."
+            lint_ctx.warn(message % child.tag)
+
+
+def _lint_exit_code(child, lint_ctx):
+    for key, value in child.attrib.items():
+        if key == "range":
+            # TODO: validate
+            pass
+        elif key == "level":
+            _lint_level(value, lint_ctx)
+        elif key == "description":
+            pass
+        else:
+            lint_ctx.warn("Unknown attribute [%s] encountered on exit_code tag." % key)
+
+
+def _lint_regex(child, lint_ctx):
+    for key, value in child.attrib.items():
+        if key == "source":
+            if value not in ["stderr", "stdout", "both"]:
+                lint_ctx.error("Unknown error code level encountered [%s]" % value)
+        elif key == "level":
+            _lint_level(value, lint_ctx)
+        elif key == "match":
+            # TODO: validate
+            pass
+        elif key == "description":
+            pass
+        else:
+            lint_ctx.warn("Unknown attribute [%s] encountered on regex tag." % key)
+
+
+def _lint_level(level_value, lint_ctx):
+    if level_value not in ["warning", "fatal", "log"]:
+        lint_ctx.error("Unknown error code level encountered [%s]" % level_value)
diff --git a/lib/galaxy/tools/linters/tests.py b/lib/galaxy/tools/linters/tests.py
new file mode 100644
index 0000000..be1920b
--- /dev/null
+++ b/lib/galaxy/tools/linters/tests.py
@@ -0,0 +1,36 @@
+"""This module contains a linting functions for tool tests."""
+from ..lint_util import is_datasource
+
+
+# Misspelled so as not be picked up by nosetests.
+def lint_tsts(tool_xml, lint_ctx):
+    tests = tool_xml.findall("./tests/test")
+    datasource = is_datasource(tool_xml)
+
+    if not tests and not datasource:
+        lint_ctx.warn("No tests found, most tools should define test cases.")
+    elif datasource:
+        lint_ctx.info("No tests found, that should be OK for data_sources.")
+
+    num_valid_tests = 0
+    for test in tests:
+        has_test = False
+        if "expect_failure" in test.attrib or "expect_exit_code" in test.attrib:
+            has_test = True
+        if len(test.findall("assert_stdout")) > 0:
+            has_test = True
+        if len(test.findall("assert_stdout")) > 0:
+            has_test = True
+
+        outputs = test.findall("output") + test.findall("output_collection")
+        if len(outputs) > 0:
+            has_test = True
+        if not has_test:
+            lint_ctx.warn("No outputs or expectations defined for tests, this test is likely invalid.")
+        else:
+            num_valid_tests += 1
+
+    if num_valid_tests or datasource:
+        lint_ctx.valid("%d test(s) found.", num_valid_tests)
+    else:
+        lint_ctx.warn("No valid test(s) found.")
diff --git a/lib/galaxy/tools/linters/xml_order.py b/lib/galaxy/tools/linters/xml_order.py
new file mode 100644
index 0000000..27220b0
--- /dev/null
+++ b/lib/galaxy/tools/linters/xml_order.py
@@ -0,0 +1,65 @@
+"""This module contains a linting functions for tool XML block order.
+
+For more information on the IUC standard for XML block order see -
+https://github.com/galaxy-iuc/standards.
+"""
+# https://github.com/galaxy-iuc/standards
+# https://github.com/galaxy-iuc/standards/pull/7/files
+TAG_ORDER = [
+    'description',
+    'macros',
+    'parallelism',
+    'requirements',
+    'code',
+    'stdio',
+    'version_command',
+    'command',
+    'environment_variables',
+    'configfiles',
+    'inputs',
+    'outputs',
+    'tests',
+    'help',
+    'citations',
+]
+
+DATASOURCE_TAG_ORDER = [
+    'description',
+    'macros',
+    'command',
+    'configfiles',
+    'inputs',
+    'request_param_translation',
+    'uihints',
+    'outputs',
+    'options',
+    'help',
+    'citations',
+]
+
+
+# Ensure the XML blocks appear in the correct order prescribed
+# by the tool author best practices.
+def lint_xml_order(tool_xml, lint_ctx):
+    tool_root = tool_xml.getroot()
+
+    if tool_root.attrib.get('tool_type', '') == 'data_source':
+        _validate_for_tags(tool_root, lint_ctx, DATASOURCE_TAG_ORDER)
+    else:
+        _validate_for_tags(tool_root, lint_ctx, TAG_ORDER)
+
+
+def _validate_for_tags(root, lint_ctx, tag_ordering):
+    last_tag = None
+    last_key = None
+    for elem in root:
+        tag = elem.tag
+        if tag in tag_ordering:
+            key = tag_ordering.index(tag)
+            if last_key:
+                if last_key > key:
+                    lint_ctx.warn("Best practice violation [%s] elements should come before [%s]" % (tag, last_tag))
+            last_tag = tag
+            last_key = key
+        else:
+            lint_ctx.info("Unknown tag [%s] encountered, this may result in a warning in the future." % tag)
diff --git a/lib/galaxy/tools/loader.py b/lib/galaxy/tools/loader.py
new file mode 100644
index 0000000..0a86293
--- /dev/null
+++ b/lib/galaxy/tools/loader.py
@@ -0,0 +1,10 @@
+from galaxy.util.xml_macros import (
+    imported_macro_paths,
+    load,
+    raw_tool_xml_tree,
+    template_macro_params,
+)
+
+load_tool = load
+
+__all__ = ("load_tool", "raw_tool_xml_tree", "imported_macro_paths", "template_macro_params")
diff --git a/lib/galaxy/tools/loader_directory.py b/lib/galaxy/tools/loader_directory.py
new file mode 100644
index 0000000..bf78802
--- /dev/null
+++ b/lib/galaxy/tools/loader_directory.py
@@ -0,0 +1,279 @@
+"""Utilities for loading and reasoning about unparsed tools in directories."""
+import fnmatch
+import glob
+import logging
+import os
+import re
+import sys
+
+import yaml
+
+from galaxy.util import checkers
+from .parser import get_tool_source
+from ..tools import loader
+
+log = logging.getLogger(__name__)
+
+PATH_DOES_NOT_EXIST_ERROR = "Could not load tools from path [%s] - this path does not exist."
+PATH_AND_RECURSIVE_ERROR = "Cannot specify a single file and recursive."
+LOAD_FAILURE_ERROR = "Failed to load tool with path %s."
+TOOL_LOAD_ERROR = object()
+TOOL_REGEX = re.compile(r"<tool\s")
+
+YAML_EXTENSIONS = [".yaml", ".yml", ".json"]
+CWL_EXTENSIONS = YAML_EXTENSIONS + [".cwl"]
+
+
+def load_exception_handler(path, exc_info):
+    """Default exception handler for use by load_tool_elements_from_path."""
+    log.warning(LOAD_FAILURE_ERROR % path, exc_info=exc_info)
+
+
+def find_possible_tools_from_path(
+    path,
+    recursive=False,
+    enable_beta_formats=False,
+):
+    """Walk a directory and find potential tool files."""
+    possible_tool_files = []
+    for possible_tool_file in _find_tool_files(
+        path, recursive=recursive,
+        enable_beta_formats=enable_beta_formats
+    ):
+        try:
+            does_look_like_a_tool = looks_like_a_tool(
+                possible_tool_file,
+                enable_beta_formats=enable_beta_formats
+            )
+        except IOError:
+            # Some problem reading the tool file, skip.
+            continue
+
+        if does_look_like_a_tool:
+            possible_tool_files.append(possible_tool_file)
+
+    return possible_tool_files
+
+
+def load_tool_sources_from_path(
+    path,
+    load_exception_handler=load_exception_handler,
+    recursive=False,
+    register_load_errors=False,
+):
+    """Walk a directory and ToolSource objects."""
+    return _load_tools_from_path(
+        path,
+        load_exception_handler=load_exception_handler,
+        recursive=recursive,
+        register_load_errors=register_load_errors,
+        loader_func=get_tool_source,
+        enable_beta_formats=True,
+    )
+
+
+def load_tool_elements_from_path(
+    path,
+    load_exception_handler=load_exception_handler,
+    recursive=False,
+    register_load_errors=False,
+):
+    """Walk a directory and load tool XML elements."""
+    return _load_tools_from_path(
+        path,
+        load_exception_handler=load_exception_handler,
+        recursive=recursive,
+        register_load_errors=register_load_errors,
+        loader_func=loader.load_tool,
+        enable_beta_formats=False,
+    )
+
+
+def _load_tools_from_path(
+    path,
+    load_exception_handler,
+    recursive,
+    register_load_errors,
+    loader_func,
+    enable_beta_formats,
+):
+    loaded_objects = []
+    for possible_tool_file in find_possible_tools_from_path(
+        path,
+        recursive=recursive,
+        enable_beta_formats=enable_beta_formats,
+    ):
+        try:
+            tool_element = loader_func(possible_tool_file)
+            loaded_objects.append((possible_tool_file, tool_element))
+        except Exception:
+            exc_info = sys.exc_info()
+            load_exception_handler(possible_tool_file, exc_info)
+            if register_load_errors:
+                loaded_objects.append((possible_tool_file, TOOL_LOAD_ERROR))
+    return loaded_objects
+
+
+def is_tool_load_error(obj):
+    """Predicate to determine if object loaded for tool is a tool error."""
+    return obj is TOOL_LOAD_ERROR
+
+
+def looks_like_a_tool(path, invalid_names=[], enable_beta_formats=False):
+    """Quick check to see if a file looks like it may be a tool file.
+
+    Whether true in a strict sense or not, lets say the intention and
+    purpose of this procedure is to serve as a filter - all valid tools must
+    "looks_like_a_tool" but not everything that looks like a tool is actually
+    a valid tool.
+
+    invalid_names may be supplied in the context of the tool shed to quickly
+    rule common tool shed XML files.
+    """
+    looks = False
+
+    if os.path.basename(path) in invalid_names:
+        return False
+
+    if looks_like_a_tool_xml(path):
+        looks = True
+
+    if not looks and enable_beta_formats:
+        for tool_checker in BETA_TOOL_CHECKERS.values():
+            if tool_checker(path):
+                looks = True
+                break
+
+    return looks
+
+
+def looks_like_a_tool_xml(path):
+    """Quick check to see if a file looks like it may be a Galaxy XML tool file."""
+    full_path = os.path.abspath(path)
+
+    if not full_path.endswith(".xml"):
+        return False
+
+    if not os.path.getsize(full_path):
+        return False
+
+    if(checkers.check_binary(full_path) or
+       checkers.check_image(full_path) or
+       checkers.check_gzip(full_path)[0] or
+       checkers.check_bz2(full_path)[0] or
+       checkers.check_zip(full_path)):
+        return False
+
+    with open(path, "r") as f:
+        start_contents = f.read(5 * 1024)
+        if TOOL_REGEX.search(start_contents):
+            return True
+
+    return False
+
+
+def is_a_yaml_with_class(path, classes):
+    """Determine if a file is a valid YAML with a supplied ``class`` entry."""
+    if not _has_extension(path, YAML_EXTENSIONS):
+        return False
+
+    with open(path, "r") as f:
+        try:
+            as_dict = yaml.safe_load(f)
+        except Exception:
+            return False
+
+    if not isinstance(as_dict, dict):
+        return False
+
+    file_class = as_dict.get("class", None)
+    return file_class in classes
+
+
+def looks_like_a_tool_yaml(path):
+    """Quick check to see if a file looks like it may be a Galaxy YAML tool file."""
+    return is_a_yaml_with_class(path, ["GalaxyTool"])
+
+
+def looks_like_a_cwl_artifact(path, classes=None):
+    """Quick check to see if a file looks like it may be a CWL artifact."""
+    if not _has_extension(path, CWL_EXTENSIONS):
+        return False
+
+    with open(path, "r") as f:
+        try:
+            as_dict = yaml.safe_load(f)
+        except Exception:
+            return False
+
+    if not isinstance(as_dict, dict):
+        return False
+
+    file_class = as_dict.get("class", None)
+    if classes is not None and file_class not in classes:
+        return False
+
+    file_cwl_version = as_dict.get("cwlVersion", None)
+    return file_cwl_version is not None
+
+
+def looks_like_a_tool_cwl(path):
+    """Quick check to see if a file looks like it may be a CWL tool."""
+    return looks_like_a_cwl_artifact(path, classes=["CommandLineTool", "ExpressionTool"])
+
+
+def _find_tool_files(path, recursive, enable_beta_formats):
+    is_file = not os.path.isdir(path)
+    if not os.path.exists(path):
+        raise Exception(PATH_DOES_NOT_EXIST_ERROR)
+    elif is_file and recursive:
+        raise Exception(PATH_AND_RECURSIVE_ERROR)
+    elif is_file:
+        return [os.path.abspath(path)]
+    else:
+        if enable_beta_formats:
+            if not recursive:
+                files = glob.glob(path + "/*")
+            else:
+                files = _find_files(path, "*")
+        else:
+            if not recursive:
+                files = glob.glob(path + "/*.xml")
+            else:
+                files = _find_files(path, "*.xml")
+        return [os.path.abspath(_) for _ in files]
+
+
+def _has_extension(path, extensions):
+    return any(path.endswith(e) for e in extensions)
+
+
+def _find_files(directory, pattern='*'):
+    if not os.path.exists(directory):
+        raise ValueError("Directory not found {}".format(directory))
+
+    matches = []
+    for root, dirnames, filenames in os.walk(directory):
+        for filename in filenames:
+            full_path = os.path.join(root, filename)
+            if fnmatch.filter([full_path], pattern):
+                matches.append(os.path.join(root, filename))
+    return matches
+
+
+BETA_TOOL_CHECKERS = {
+    'yaml': looks_like_a_tool_yaml,
+    'cwl': looks_like_a_tool_cwl,
+}
+
+__all__ = (
+    "find_possible_tools_from_path",
+    "is_a_yaml_with_class",
+    "is_tool_load_error",
+    "load_tool_elements_from_path",
+    "load_tool_sources_from_path",
+    "looks_like_a_cwl_artifact",
+    "looks_like_a_tool_cwl",
+    "looks_like_a_tool_xml",
+    "looks_like_a_tool_yaml",
+)
diff --git a/lib/galaxy/tools/merge_collection.xml b/lib/galaxy/tools/merge_collection.xml
new file mode 100644
index 0000000..c6f44b4
--- /dev/null
+++ b/lib/galaxy/tools/merge_collection.xml
@@ -0,0 +1,279 @@
+<tool id="__MERGE_COLLECTION__"
+      name="Merge Collections"
+      version="1.0.0"
+      tool_type="merge_collection">
+  <description>into single list of datasets</description>
+  <type class="MergeCollectionTool" module="galaxy.tools" />
+  <action module="galaxy.tools.actions.model_operations"
+          class="ModelOperationToolAction"/>
+  <macros>
+      <xml name="suffix_param">
+          <param name="suffix_pattern" label="Use the follow suffix pattern:" help="Describe the suffix pattern to use when joing element name and data copy number. For instance, the default is '_#', which will produce _1 as the first suffix used." value="_#" type="text">
+              <sanitizer invalid_char="">
+                  <valid initial="string.letters,string.digits">
+                      <add value="_" />
+                      <add value="-" />
+                      <add value="#" />
+                  </valid>
+              </sanitizer>
+          </param>
+      </xml>
+      <xml name="test_collecton_simple_then_alternative">
+          <collection type="list">
+            <element name="sample1" value ="simple_line.txt"/>
+            <element name="sample2" value ="simple_line_alternative.txt"/>
+          </collection>
+      </xml>
+      <xml name="test_collection_alternative_then_simple">
+          <collection type="list">
+            <element name="sample1" value ="simple_line_alternative.txt"/>
+            <element name="sample2" value ="simple_line.txt"/>
+          </collection>
+      </xml>
+      <xml name="advanced_section" tokens="duplicate_handling,suffix_pattern">
+          <section name="advanced">
+              <conditional name="conflict">
+                  <param name="duplicate_options" value="@DUPLICATE_HANDLING@" />
+                  <param name="suffix_pattern" value="@SUFFIX_PATTERN@" />
+              </conditional>
+          </section>
+      </xml>
+      <xml name="assert_is_simple_line">
+          <assert_contents>
+              <has_text_matching expression="^This is a line of text.\n$"/>
+          </assert_contents>
+      </xml>
+      <xml name="assert_is_simple_line_alternative">
+          <assert_contents>
+              <has_text_matching expression="^This is a different line of text.\n$"/>
+          </assert_contents>
+      </xml>
+  </macros>
+  <inputs>
+    <repeat name="inputs" title="Input Collections" min="2">
+        <param name="input" type="data_collection" label="Input Collection" />
+    </repeat>
+    <section name="advanced" title="Advanced Options">
+        <conditional name="conflict">
+          <param name="duplicate_options" type="select" label="How should conflicts (or potential conflicts) be handled?"
+                 help="Collection elements must have unique element identifiers, when appending how should unique identifiers be assured.">
+            <option value="suffix_conflict">Append suffix to conflicted element identifers</option>
+            <option value="suffix_conflict_rest">Append suffix to conflicted element identifers after first one encountered</option>
+            <option value="suffix_every">Append suffix to every element identifer</option>
+            <option value="keep_first" selected="true">Keep first instance</option>
+            <option value="keep_last">Keep last instance</option>
+            <option value="fail">Fail collection creation</option>
+          </param>      
+          <when value="suffix_conflict">
+            <expand macro="suffix_param" />
+          </when>
+          <when value="suffix_conflict_rest">
+            <expand macro="suffix_param" />
+          </when>
+          <when value="suffix_every">
+            <expand macro="suffix_param" />
+          </when>
+          <when value="keep_first" />
+          <when value="keep_last" />
+          <when value="fail" />
+        </conditional>
+    </section>
+  </inputs>
+  <outputs>
+    <collection name="output" format_source="input" type_source="inputs_0|input" label="${on_string} (merged)" >
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <repeat name="inputs">
+        <param name="input">
+          <collection type="list:paired">
+            <element name="i1">
+              <collection type="paired">
+                <element name="forward" value="simple_line.txt" />
+                <element name="reverse" value="simple_line_alternative.txt" />
+              </collection>
+            </element>          
+          </collection>
+        </param>
+      </repeat>
+      <repeat name="inputs">
+        <param name="input">
+          <collection type="list:paired">
+            <element name="i2">
+              <collection type="paired">
+                <element name="forward" value="simple_line.txt" />
+                <element name="reverse" value="simple_line_alternative.txt" />
+              </collection>
+            </element>           
+          </collection>
+        </param>
+      </repeat>
+      <output_collection name="output" type="list:paired">
+        <element name="i1">
+          <element name="forward">
+            <expand macro="assert_is_simple_line" />
+          </element>
+          <element name="reverse">
+            <expand macro="assert_is_simple_line_alternative" />
+          </element>
+        </element>
+        <element name="i2">
+          <element name="forward">
+            <expand macro="assert_is_simple_line" />
+          </element>
+          <element name="reverse">
+            <expand macro="assert_is_simple_line_alternative" />
+          </element>
+        </element>        
+      </output_collection>
+    </test>
+    <test>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>
+      </repeat>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>      
+      </repeat>
+      <expand macro="advanced_section" duplicate_handling="suffix_every" suffix_pattern="__#" />
+      <output_collection name="output" type="list">
+        <element name="sample1__1">
+          <assert_contents>
+            <has_text_matching expression="^This is a line of text.\n$"/>
+          </assert_contents>
+        </element>
+        <element name="sample2__2">
+          <assert_contents>
+            <has_text_matching expression="^This is a different line of text.\n$"/>
+          </assert_contents>
+        </element>
+        <element name="sample1__1">
+          <assert_contents>
+            <has_text_matching expression="^This is a line of text.\n$"/>
+          </assert_contents>
+        </element>
+        <element name="sample2__1">
+          <assert_contents>
+            <has_text_matching expression="^This is a different line of text.\n$"/>
+          </assert_contents>
+        </element>                
+      </output_collection>
+    </test>
+    <test>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>
+      </repeat>
+      <repeat name="inputs">
+        <param name="input">
+          <collection type="list">
+            <element name="sample3" value ="simple_line.txt"/>
+            <element name="sample4" value ="simple_line_alternative.txt"/>
+          </collection>
+        </param>      
+      </repeat>
+      <expand macro="advanced_section" duplicate_handling="suffix_every" suffix_pattern="__#" />
+      <output_collection name="output" type="list">
+        <element name="sample1__1">
+            <expand macro="assert_is_simple_line" />
+        </element>
+        <element name="sample2__1">
+            <expand macro="assert_is_simple_line_alternative" />
+        </element>
+        <element name="sample3__2">
+            <expand macro="assert_is_simple_line" />
+        </element>
+        <element name="sample4__2">
+            <expand macro="assert_is_simple_line_alternative" />
+        </element>                
+      </output_collection>
+    </test>
+    <test>
+      <expand macro="advanced_section" duplicate_handling="suffix_conflict_rest" suffix_pattern="-#" />
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>
+      </repeat>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>      
+      </repeat>
+      <output_collection name="output" type="list">
+        <element name="sample1">
+            <expand macro="assert_is_simple_line" />
+        </element>
+        <element name="sample2">
+            <expand macro="assert_is_simple_line_alternative" />
+        </element>
+        <element name="sample1-2">
+            <expand macro="assert_is_simple_line" />
+        </element>
+        <element name="sample2-2">
+            <expand macro="assert_is_simple_line_alternative" />
+        </element>                
+      </output_collection>
+    </test>
+    <test>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>
+      </repeat>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collection_alternative_then_simple" />
+        </param>      
+      </repeat>
+      <output_collection name="output" type="list">
+        <element name="sample1">
+          <assert_contents>
+            <has_text_matching expression="^This is a line of text.\n$"/>
+          </assert_contents>
+        </element>
+        <element name="sample2">
+          <assert_contents>
+            <has_text_matching expression="^This is a different line of text.\n$"/>
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+    <test>
+      <expand macro="advanced_section" duplicate_handling="keep_last" suffix_pattern="__#" />
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collecton_simple_then_alternative" />
+        </param>
+      </repeat>
+      <repeat name="inputs">
+        <param name="input">
+          <expand macro="test_collection_alternative_then_simple" />
+        </param>
+      </repeat>
+      <output_collection name="output" type="list">
+        <element name="sample1">
+          <assert_contents>
+            <has_text_matching expression="^This is a different line of text.\n$"/>
+          </assert_contents>
+        </element>
+        <element name="sample2">
+          <assert_contents>
+            <has_text_matching expression="^This is a line of text.\n$"/>
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+  <help>
+    This tool takes two list and creates a single unified list.
+    
+    This tool will create new history datasets for your collection
+    but your quota usage will not increase.
+  </help>
+</tool>
diff --git a/lib/galaxy/tools/parameters/__init__.py b/lib/galaxy/tools/parameters/__init__.py
new file mode 100644
index 0000000..754381b
--- /dev/null
+++ b/lib/galaxy/tools/parameters/__init__.py
@@ -0,0 +1,214 @@
+"""
+Classes encapsulating Galaxy tool parameters.
+"""
+import re
+from json import dumps, loads
+
+from galaxy.util.expressions import ExpressionContext
+from galaxy.util.json import json_fix
+
+from .basic import DataCollectionToolParameter, DataToolParameter, RuntimeValue, SelectToolParameter
+from .grouping import Conditional, Repeat, Section, UploadDataset
+
+REPLACE_ON_TRUTHY = object()
+
+# Some tools use the code tag and access the code base, expecting certain tool parameters to be available here.
+__all__ = ( 'DataCollectionToolParameter', 'DataToolParameter', 'SelectToolParameter' )
+
+
+def visit_input_values( inputs, input_values, callback, name_prefix='', label_prefix='', parent_prefix='', context=None, no_replacement_value=REPLACE_ON_TRUTHY ):
+    """
+    Given a tools parameter definition (`inputs`) and a specific set of
+    parameter `values`, call `callback` for each non-grouping parameter,
+    passing the parameter object, value, a constructed unique name,
+    and a display label.
+
+    If the callback returns a value, it will be replace the old value.
+
+    >>> from xml.etree.ElementTree import XML
+    >>> from galaxy.util.bunch import Bunch
+    >>> from galaxy.util.odict import odict
+    >>> from galaxy.tools.parameters.basic import TextToolParameter, BooleanToolParameter
+    >>> from galaxy.tools.parameters.grouping import Repeat
+    >>> a = TextToolParameter( None, XML( '<param name="a"/>' ) )
+    >>> b = Repeat()
+    >>> c = TextToolParameter( None, XML( '<param name="c"/>' ) )
+    >>> d = Repeat()
+    >>> e = TextToolParameter( None, XML( '<param name="e"/>' ) )
+    >>> f = Conditional()
+    >>> g = BooleanToolParameter( None, XML( '<param name="g"/>' ) )
+    >>> h = TextToolParameter( None, XML( '<param name="h"/>' ) )
+    >>> i = TextToolParameter( None, XML( '<param name="i"/>' ) )
+    >>> b.name = 'b'
+    >>> b.inputs = odict([ ('c', c), ('d', d) ])
+    >>> d.name = 'd'
+    >>> d.inputs = odict([ ('e', e), ('f', f) ])
+    >>> f.test_param = g
+    >>> f.name = 'f'
+    >>> f.cases = [ Bunch( value='true', inputs= { 'h': h } ), Bunch( value='false', inputs= { 'i': i } ) ]
+    >>>
+    >>> def visitor( input, value, prefix, prefixed_name, **kwargs ):
+    ...     print 'name=%s, prefix=%s, prefixed_name=%s, value=%s' % ( input.name, prefix, prefixed_name, value )
+    >>> inputs = odict([('a',a),('b',b)])
+    >>> nested = odict([ ('a', 1), ('b', [ odict([('c', 3), ( 'd', [odict([ ('e', 5), ('f', odict([ ('g', True), ('h', 7) ])) ]) ])]) ]) ])
+    >>> visit_input_values( inputs, nested, visitor )
+    name=a, prefix=, prefixed_name=a, value=1
+    name=c, prefix=b_0|, prefixed_name=b_0|c, value=3
+    name=e, prefix=b_0|d_0|, prefixed_name=b_0|d_0|e, value=5
+    name=g, prefix=b_0|d_0|, prefixed_name=b_0|d_0|f|g, value=True
+    name=h, prefix=b_0|d_0|, prefixed_name=b_0|d_0|f|h, value=7
+    >>> params_from_strings( inputs, params_to_strings( inputs, nested, None ), None )[ 'b' ][ 0 ][ 'd' ][ 0 ][ 'f' ][ 'g' ] is True
+    True
+    """
+    def callback_helper( input, input_values, name_prefix, label_prefix, parent_prefix, context=None, error=None ):
+        args = {
+            'input'             : input,
+            'parent'            : input_values,
+            'value'             : input_values.get( input.name ),
+            'prefixed_name'     : '%s%s' % ( name_prefix, input.name ),
+            'prefixed_label'    : '%s%s' % ( label_prefix, input.label or input.name ),
+            'prefix'            : parent_prefix,
+            'context'           : context,
+            'error'             : error
+        }
+        if input.name not in input_values:
+            args[ 'error' ] = 'No value found for \'%s\'.' % args.get( 'prefixed_label' )
+        new_value = callback( **args )
+        if no_replacement_value is REPLACE_ON_TRUTHY:
+            replace = bool( new_value )
+        else:
+            replace = new_value != no_replacement_value
+        if replace:
+            input_values[ input.name ] = new_value
+
+    context = ExpressionContext( input_values, context )
+    payload = { 'context': context, 'no_replacement_value': no_replacement_value }
+    for input in inputs.values():
+        if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
+            values = input_values[ input.name ] = input_values.get( input.name, [] )
+            for i, d in enumerate( values ):
+                d[ '__index__' ] = i
+                new_name_prefix = name_prefix + '%s_%d|' % ( input.name, i )
+                new_label_prefix = label_prefix + '%s %d > ' % ( input.title, i + 1 )
+                visit_input_values( input.inputs, d, callback, new_name_prefix, new_label_prefix, parent_prefix=new_name_prefix, **payload )
+        elif isinstance( input, Conditional ):
+            values = input_values[ input.name ] = input_values.get( input.name, {} )
+            new_name_prefix = name_prefix + input.name + '|'
+            case_error = None
+            try:
+                input.get_current_case( values[ input.test_param.name ] )
+            except:
+                case_error = 'The selected case is unavailable/invalid.'
+                pass
+            callback_helper( input.test_param, values, new_name_prefix, label_prefix, parent_prefix=name_prefix, context=context, error=case_error )
+            values[ '__current_case__' ] = input.get_current_case( values[ input.test_param.name ] )
+            visit_input_values( input.cases[ values[ '__current_case__' ] ].inputs, values, callback, new_name_prefix, label_prefix, parent_prefix=name_prefix, **payload )
+        elif isinstance( input, Section ):
+            values = input_values[ input.name ] = input_values.get( input.name, {} )
+            new_name_prefix = name_prefix + input.name + '|'
+            visit_input_values( input.inputs, values, callback, new_name_prefix, label_prefix, parent_prefix=name_prefix, **payload )
+        else:
+            callback_helper( input, input_values, name_prefix, label_prefix, parent_prefix=parent_prefix, context=context )
+
+
+def check_param( trans, param, incoming_value, param_values ):
+    """
+    Check the value of a single parameter `param`. The value in
+    `incoming_value` is converted from its HTML encoding and validated.
+    The `param_values` argument contains the processed values of
+    previous parameters (this may actually be an ExpressionContext
+    when dealing with grouping scenarios).
+    """
+    value = incoming_value
+    error = None
+    try:
+        if trans.workflow_building_mode:
+            if isinstance( value, RuntimeValue ):
+                return [ { '__class__' : 'RuntimeValue' }, None ]
+            if isinstance( value, dict ):
+                if value.get( '__class__' ) == 'RuntimeValue':
+                    return [ value, None ]
+        value = param.from_json( value, trans, param_values )
+        param.validate( value, trans )
+    except ValueError as e:
+        error = str( e )
+    return value, error
+
+
+def params_to_strings( params, param_values, app ):
+    """
+    Convert a dictionary of parameter values to a dictionary of strings
+    suitable for persisting. The `value_to_basic` method of each parameter
+    is called to convert its value to basic types, the result of which
+    is then json encoded (this allowing complex nested parameters and
+    such).
+    """
+    rval = dict()
+    for key, value in param_values.items():
+        if key in params:
+            value = params[ key ].value_to_basic( value, app )
+        rval[ key ] = str( dumps( value ) )
+    return rval
+
+
+def params_from_strings( params, param_values, app, ignore_errors=False ):
+    """
+    Convert a dictionary of strings as produced by `params_to_strings`
+    back into parameter values (decode the json representation and then
+    allow each parameter to convert the basic types into the parameters
+    preferred form).
+    """
+    rval = dict()
+    for key, value in param_values.items():
+        value = json_fix( loads( value ) )
+        if key in params:
+            value = params[ key ].value_from_basic( value, app, ignore_errors )
+        rval[ key ] = value
+    return rval
+
+
+def params_to_incoming( incoming, inputs, input_values, app, name_prefix="" ):
+    """
+    Given a tool's parameter definition (`inputs`) and a specific set of
+    parameter `input_values` objects, populate `incoming` with the html values.
+
+    Useful for e.g. the rerun function.
+    """
+    for input in inputs.values():
+        if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
+            for d in input_values[ input.name ]:
+                index = d[ '__index__' ]
+                new_name_prefix = name_prefix + '%s_%d|' % ( input.name, index )
+                params_to_incoming( incoming, input.inputs, d, app, new_name_prefix )
+        elif isinstance( input, Conditional ):
+            values = input_values[ input.name ]
+            current = values[ '__current_case__' ]
+            new_name_prefix = name_prefix + input.name + '|'
+            incoming[ new_name_prefix + input.test_param.name ] = values[ input.test_param.name ]
+            params_to_incoming( incoming, input.cases[current].inputs, values, app, new_name_prefix )
+        elif isinstance( input, Section ):
+            values = input_values[ input.name ]
+            new_name_prefix = name_prefix + input.name + '|'
+            params_to_incoming( incoming, input.inputs, values, app, new_name_prefix )
+        else:
+            value = input_values.get( input.name )
+            incoming[ name_prefix + input.name ] = value
+
+
+def update_param( prefixed_name, input_values, new_value ):
+    """
+    Given a prefixed parameter name, e.g. 'parameter_0|parameter_1', update
+    the corresponding input value in a nested input values dictionary.
+    """
+    for key in input_values:
+        match = re.match( '^' + key + '_(\d+)\|(.+)', prefixed_name )
+        if match and not key.endswith( "|__identifier__" ):
+            index = int( match.group( 1 ) )
+            if isinstance( input_values[ key ], list ) and len( input_values[ key ] ) > index:
+                update_param( match.group( 2 ), input_values[ key ][ index ], new_value )
+        else:
+            match = re.match( '^' + key + '\|(.+)', prefixed_name )
+            if isinstance( input_values[ key ], dict ) and match:
+                update_param( match.group( 1 ), input_values[ key ], new_value )
+            elif prefixed_name == key:
+                input_values[ key ] = new_value
diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py
new file mode 100644
index 0000000..e96d55c
--- /dev/null
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -0,0 +1,2090 @@
+"""
+Basic tool parameters.
+"""
+
+import logging
+import re
+import os
+import os.path
+from six import string_types
+from xml.etree.ElementTree import XML
+
+from galaxy import util
+from galaxy.util import string_as_bool, sanitize_param, unicodify
+from galaxy.util.expressions import ExpressionContext
+from sanitize import ToolParameterSanitizer
+import validation
+import galaxy.tools.parser
+from ..parser import get_input_source as ensure_input_source
+from ..parameters import history_query
+from ..parameters import dynamic_options
+from .dataset_matcher import DatasetMatcher
+from .dataset_matcher import DatasetCollectionMatcher
+from galaxy.web import url_for
+from galaxy.util.dictifiable import Dictifiable
+import galaxy.model
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+workflow_building_modes = Bunch( DISABLED=False, ENABLED=True, USE_HISTORY=1 )
+
+WORKFLOW_PARAMETER_REGULAR_EXPRESSION = re.compile( '''\$\{.+?\}''' )
+
+
+def contains_workflow_parameter( value, search=False ):
+    if not isinstance( value, string_types ):
+        return False
+    if search and WORKFLOW_PARAMETER_REGULAR_EXPRESSION.search( value ):
+        return True
+    if not search and WORKFLOW_PARAMETER_REGULAR_EXPRESSION.match( value ):
+        return True
+    return False
+
+
+def parse_dynamic_options( param, input_source ):
+    options_elem = input_source.parse_dynamic_options_elem()
+    if options_elem is not None:
+        return dynamic_options.DynamicOptions( options_elem, param )
+    return None
+
+
+class ToolParameter( object, Dictifiable ):
+    """
+    Describes a parameter accepted by a tool. This is just a simple stub at the
+    moment but in the future should encapsulate more complex parameters (lists
+    of valid choices, validation logic, ...)
+    """
+    dict_collection_visible_keys = ( 'name', 'argument', 'type', 'label', 'help', 'refresh_on_change' )
+
+    def __init__( self, tool, input_source, context=None ):
+        input_source = ensure_input_source(input_source)
+        self.tool = tool
+        self.refresh_on_change_values = []
+        self.argument = input_source.get( "argument" )
+        self.name = ToolParameter.parse_name( input_source )
+        self.type = input_source.get( "type" )
+        self.hidden = input_source.get( "hidden", False )
+        self.refresh_on_change = input_source.get_bool( "refresh_on_change", False )
+        self.optional = input_source.parse_optional()
+        self.is_dynamic = False
+        self.label = input_source.parse_label()
+        self.help = input_source.parse_help()
+        sanitizer_elem = input_source.parse_sanitizer_elem()
+        if sanitizer_elem is not None:
+            self.sanitizer = ToolParameterSanitizer.from_element( sanitizer_elem )
+        else:
+            self.sanitizer = None
+        self.validators = []
+        for elem in input_source.parse_validator_elems():
+            self.validators.append( validation.Validator.from_element( self, elem ) )
+
+    @property
+    def visible( self ):
+        """Return true if the parameter should be rendered on the form"""
+        return True
+
+    def get_label( self ):
+        """Return user friendly name for the parameter"""
+        return self.label if self.label else self.name
+
+    def from_json( self, value, trans=None, other_values={} ):
+        """
+        Convert a value from an HTML POST into the parameters preferred value
+        format.
+        """
+        return value
+
+    def get_initial_value( self, trans, other_values ):
+        """
+        Return the starting value of the parameter
+        """
+        return None
+
+    def get_required_enctype( self ):
+        """
+        If this parameter needs the form to have a specific encoding
+        return it, otherwise return None (indicating compatibility with
+        any encoding)
+        """
+        return None
+
+    def get_dependencies( self ):
+        """
+        Return the names of any other parameters this parameter depends on
+        """
+        return []
+
+    def to_json( self, value, app, use_security ):
+        """Convert a value to a string representation suitable for persisting"""
+        return unicodify( value )
+
+    def to_python( self, value, app ):
+        """Convert a value created with to_json back to an object representation"""
+        return value
+
+    def value_to_basic( self, value, app, use_security=False ):
+        if isinstance( value, RuntimeValue ):
+            return { '__class__': 'RuntimeValue' }
+        elif isinstance( value, dict ):
+            if value.get( '__class__' ) == 'RuntimeValue':
+                return value
+        return self.to_json( value, app, use_security )
+
+    def value_from_basic( self, value, app, ignore_errors=False ):
+        # Handle Runtime and Unvalidated values
+        if isinstance( value, dict ) and value.get( '__class__' ) == 'RuntimeValue':
+            return RuntimeValue()
+        elif isinstance( value, dict ) and value.get( '__class__' ) == 'UnvalidatedValue':
+            return value[ 'value' ]
+        # Delegate to the 'to_python' method
+        if ignore_errors:
+            try:
+                return self.to_python( value, app )
+            except:
+                return value
+        else:
+            return self.to_python( value, app )
+
+    def value_to_display_text( self, value, app=None ):
+        """
+        Convert a value to a text representation suitable for displaying to
+        the user
+        >>> p = ToolParameter( None, XML( '<param name="_name" />' ) )
+        >>> print p.value_to_display_text( None )
+        Not available.
+        >>> print p.value_to_display_text( '' )
+        Empty.
+        >>> print p.value_to_display_text( 'text' )
+        text
+        >>> print p.value_to_display_text( True )
+        True
+        >>> print p.value_to_display_text( False )
+        False
+        >>> print p.value_to_display_text( 0 )
+        0
+        """
+        if value is not None:
+            str_value = unicodify( value )
+            if not str_value:
+                return "Empty."
+            return str_value
+        return "Not available."
+
+    def to_param_dict_string( self, value, other_values={} ):
+        """Called via __str__ when used in the Cheetah template"""
+        if value is None:
+            value = ""
+        elif not isinstance( value, string_types ):
+            value = str( value )
+        if self.tool is None or self.tool.options.sanitize:
+            if self.sanitizer:
+                value = self.sanitizer.sanitize_param( value )
+            else:
+                value = sanitize_param( value )
+        return value
+
+    def validate( self, value, trans=None ):
+        if value in [ "", None ] and self.optional:
+            return
+        for validator in self.validators:
+            validator.validate( value, trans )
+
+    def to_dict( self, trans, other_values={} ):
+        """ to_dict tool parameter. This can be overridden by subclasses. """
+        tool_dict = super( ToolParameter, self ).to_dict()
+        tool_dict[ 'model_class' ] = self.__class__.__name__
+        tool_dict[ 'optional' ] = self.optional
+        tool_dict[ 'hidden' ] = self.hidden
+        tool_dict[ 'is_dynamic' ] = self.is_dynamic
+        if hasattr( self, 'value' ):
+            tool_dict[ 'value' ] = self.value
+        return tool_dict
+
+    @classmethod
+    def build( cls, tool, param ):
+        """Factory method to create parameter of correct type"""
+        param_name = cls.parse_name( param )
+        param_type = param.get( 'type' )
+        if not param_type:
+            raise ValueError( "Tool parameter '%s' requires a 'type'" % ( param_name ) )
+        elif param_type not in parameter_types:
+            raise ValueError( "Tool parameter '%s' uses an unknown type '%s'" % ( param_name, param_type ) )
+        else:
+            return parameter_types[ param_type ]( tool, param )
+
+    @classmethod
+    def parse_name( cls, input_source ):
+        name = input_source.get( 'name' )
+        if name is None:
+            argument = input_source.get( 'argument' )
+            if argument:
+                name = argument.lstrip( '-' )
+            else:
+                raise ValueError( "Tool parameter must specify a name." )
+        return name
+
+
+class TextToolParameter( ToolParameter ):
+    """
+    Parameter that can take on any text value.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch()
+    >>> p = TextToolParameter( None, XML( '<param name="_name" type="text" value="default" />' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('area', False), ('argument', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'TextToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'text'), ('value', 'default')]
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source(input_source)
+        ToolParameter.__init__( self, tool, input_source )
+        self.value = input_source.get( 'value' )
+        self.area = input_source.get_bool( 'area', False )
+
+    def to_json( self, value, app, use_security ):
+        """Convert a value to a string representation suitable for persisting"""
+        if value is None:
+            rval = ''
+        else:
+            rval = util.smart_str( value )
+        return rval
+
+    def validate( self, value, trans=None ):
+        search = self.type == "text"
+        if not ( trans and trans.workflow_building_mode is workflow_building_modes.ENABLED and contains_workflow_parameter(value, search=search) ):
+            return super( TextToolParameter, self ).validate( value, trans )
+
+    def get_initial_value( self, trans, other_values ):
+        return self.value
+
+    def to_dict( self, trans, other_values={} ):
+        d = super(TextToolParameter, self).to_dict(trans)
+        d['area'] = self.area
+        return d
+
+
+class IntegerToolParameter( TextToolParameter ):
+    """
+    Parameter that takes an integer value.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch(), workflow_building_mode=True )
+    >>> p = IntegerToolParameter( None, XML( '<param name="_name" type="integer" value="10" />' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('area', False), ('argument', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('max', None), ('min', None), ('model_class', 'IntegerToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'integer'), ('value', '10')]
+    >>> type( p.from_json( "10", trans ) )
+    <type 'int'>
+    >>> type( p.from_json( "_string", trans ) )
+    Traceback (most recent call last):
+        ...
+    ValueError: An integer or workflow parameter e.g. ${name} is required
+    """
+
+    dict_collection_visible_keys = ToolParameter.dict_collection_visible_keys + ( 'min', 'max' )
+
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source(input_source)
+        TextToolParameter.__init__( self, tool, input_source )
+        if self.value:
+            try:
+                int( self.value )
+            except:
+                raise ValueError( "An integer is required" )
+        elif self.value is None and not self.optional:
+            raise ValueError( "The settings for the field named '%s' require a 'value' setting and optionally a default value which must be an integer" % self.name )
+        self.min = input_source.get( 'min' )
+        self.max = input_source.get( 'max' )
+        if self.min:
+            try:
+                self.min = int( self.min )
+            except:
+                raise ValueError( "An integer is required" )
+        if self.max:
+            try:
+                self.max = int( self.max )
+            except:
+                raise ValueError( "An integer is required" )
+        if self.min is not None or self.max is not None:
+            self.validators.append( validation.InRangeValidator( None, self.min, self.max ) )
+
+    def from_json( self, value, trans, other_values={} ):
+        try:
+            return int( value )
+        except:
+            if contains_workflow_parameter( value ) and trans.workflow_building_mode is workflow_building_modes.ENABLED:
+                return value
+            if not value and self.optional:
+                return ""
+            if trans.workflow_building_mode is workflow_building_modes.ENABLED:
+                raise ValueError( "An integer or workflow parameter e.g. ${name} is required" )
+            else:
+                raise ValueError( "An integer is required" )
+
+    def to_python( self, value, app ):
+        try:
+            return int( value )
+        except Exception as err:
+            if contains_workflow_parameter(value):
+                return value
+            if not value and self.optional:
+                return None
+            raise err
+
+    def get_initial_value( self, trans, other_values ):
+        if self.value:
+            return int( self.value )
+        else:
+            return None
+
+
+class FloatToolParameter( TextToolParameter ):
+    """
+    Parameter that takes a real number value.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch(), workflow_building_mode=True )
+    >>> p = FloatToolParameter( None, XML( '<param name="_name" type="float" value="3.141592" />' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('area', False), ('argument', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('max', None), ('min', None), ('model_class', 'FloatToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'float'), ('value', '3.141592')]
+    >>> type( p.from_json( "36.1", trans ) )
+    <type 'float'>
+    >>> type( p.from_json( "_string", trans ) )
+    Traceback (most recent call last):
+        ...
+    ValueError: A real number or workflow parameter e.g. ${name} is required
+    """
+
+    dict_collection_visible_keys = ToolParameter.dict_collection_visible_keys + ( 'min', 'max' )
+
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source(input_source)
+        TextToolParameter.__init__( self, tool, input_source )
+        self.min = input_source.get( 'min' )
+        self.max = input_source.get( 'max' )
+        if self.value:
+            try:
+                float( self.value )
+            except:
+                raise ValueError( "A real number is required" )
+        elif self.value is None and not self.optional:
+            raise ValueError( "The settings for this field require a 'value' setting and optionally a default value which must be a real number" )
+        if self.min:
+            try:
+                self.min = float( self.min )
+            except:
+                raise ValueError( "A real number is required" )
+        if self.max:
+            try:
+                self.max = float( self.max )
+            except:
+                raise ValueError( "A real number is required" )
+        if self.min is not None or self.max is not None:
+            self.validators.append( validation.InRangeValidator( None, self.min, self.max ) )
+
+    def from_json( self, value, trans, other_values={} ):
+        try:
+            return float( value )
+        except:
+            if contains_workflow_parameter( value ) and trans.workflow_building_mode is workflow_building_modes.ENABLED:
+                return value
+            if not value and self.optional:
+                return ""
+            if trans and trans.workflow_building_mode is workflow_building_modes.ENABLED:
+                raise ValueError( "A real number or workflow parameter e.g. ${name} is required" )
+            else:
+                raise ValueError( "A real number is required" )
+
+    def to_python( self, value, app ):
+        try:
+            return float( value )
+        except Exception as err:
+            if contains_workflow_parameter( value ):
+                return value
+            if not value and self.optional:
+                return None
+            raise err
+
+    def get_initial_value( self, trans, other_values ):
+        try:
+            return float( self.value )
+        except:
+            return None
+
+
+class BooleanToolParameter( ToolParameter ):
+    """
+    Parameter that takes one of two values.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = BooleanToolParameter( None, XML( '<param name="_name" type="boolean" checked="yes" truevalue="_truevalue" falsevalue="_falsevalue" />' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('falsevalue', '_falsevalue'), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'BooleanToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('truevalue', '_truevalue'), ('type', 'boolean'), ('value', True)]
+    >>> print p.from_json( 'true' )
+    True
+    >>> print p.to_param_dict_string( True )
+    _truevalue
+    >>> print p.from_json( 'false' )
+    False
+    >>> print p.to_param_dict_string( False )
+    _falsevalue
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source(input_source)
+        ToolParameter.__init__( self, tool, input_source )
+        self.truevalue = input_source.get( 'truevalue', 'true' )
+        self.falsevalue = input_source.get( 'falsevalue', 'false' )
+        self.checked = input_source.get_bool( 'checked', False )
+
+    def from_json( self, value, trans=None, other_values={} ):
+        return self.to_python( value )
+
+    def to_python( self, value, app=None ):
+        return ( value in [ True, 'True', 'true' ] )
+
+    def to_json( self, value, app, use_security ):
+        if self.to_python( value, app ):
+            return 'true'
+        else:
+            return 'false'
+
+    def get_initial_value( self, trans, other_values ):
+        return self.checked
+
+    def to_param_dict_string( self, value, other_values={} ):
+        if self.to_python( value ):
+            return self.truevalue
+        else:
+            return self.falsevalue
+
+    def to_dict( self, trans, other_values={} ):
+        d = super( BooleanToolParameter, self ).to_dict( trans )
+        d['value'] = self.checked
+        d['truevalue'] = self.truevalue
+        d['falsevalue'] = self.falsevalue
+        return d
+
+    @property
+    def legal_values( self ):
+        return [ self.truevalue, self.falsevalue ]
+
+
+class FileToolParameter( ToolParameter ):
+    """
+    Parameter that takes an uploaded file as a value.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = FileToolParameter( None, XML( '<param name="_name" type="file"/>' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'FileToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'file')]
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source(input_source)
+        ToolParameter.__init__( self, tool, input_source )
+
+    def from_json( self, value, trans=None, other_values={} ):
+        # Middleware or proxies may encode files in special ways (TODO: this
+        # should be pluggable)
+        if type( value ) == dict:
+            upload_store = trans.app.config.nginx_upload_store
+            assert upload_store, "Request appears to have been processed by nginx_upload_module but Galaxy is not configured to recognize it."
+            # Check that the file is in the right location
+            local_filename = os.path.abspath( value[ 'path' ] )
+            assert local_filename.startswith( upload_store ), "Filename provided by nginx (%s) is not in correct directory (%s)." % (local_filename, upload_store)
+            value = dict( filename=value[ "name" ], local_filename=local_filename )
+        return value
+
+    def get_required_enctype( self ):
+        """
+        File upload elements require the multipart/form-data encoding
+        """
+        return "multipart/form-data"
+
+    def to_json( self, value, app, use_security ):
+        if value in [ None, '' ]:
+            return None
+        elif isinstance( value, string_types ):
+            return value
+        elif isinstance( value, dict ):
+            # or should we jsonify?
+            try:
+                return value['local_filename']
+            except:
+                return None
+        raise Exception( "FileToolParameter cannot be persisted" )
+
+    def to_python( self, value, app ):
+        if value is None:
+            return None
+        elif isinstance( value, string_types ):
+            return value
+        else:
+            raise Exception( "FileToolParameter cannot be persisted" )
+
+    def get_initial_value( self, trans, other_values ):
+        return None
+
+
+class FTPFileToolParameter( ToolParameter ):
+    """
+    Parameter that takes a file uploaded via FTP as a value.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = FTPFileToolParameter( None, XML( '<param name="_name" type="ftpfile"/>' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'FTPFileToolParameter'), ('multiple', True), ('name', '_name'), ('optional', True), ('refresh_on_change', False), ('type', 'ftpfile')]
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source(input_source)
+        ToolParameter.__init__( self, tool, input_source )
+        self.multiple = input_source.get_bool( 'multiple', True )
+        self.optional = input_source.parse_optional( True )
+        self.user_ftp_dir = ''
+
+    def get_initial_value( self, trans, other_values ):
+        if trans is not None:
+            if trans.user is not None:
+                self.user_ftp_dir = "%s/" % trans.user_ftp_dir
+        return None
+
+    @property
+    def visible( self ):
+        if self.tool.app.config.ftp_upload_dir is None or self.tool.app.config.ftp_upload_site is None:
+            return False
+        return True
+
+    def to_param_dict_string( self, value, other_values={} ):
+        if value is '':
+            return 'None'
+        lst = [ '%s%s' % (self.user_ftp_dir, dataset) for dataset in value ]
+        if self.multiple:
+            return lst
+        else:
+            return lst[ 0 ]
+
+    def from_json( self, value, trans=None, other_values={} ):
+        return self.to_python( value, trans.app, validate=True )
+
+    def to_json( self, value, app, use_security ):
+        return self.to_python( value, app )
+
+    def to_python( self, value, app, validate=False ):
+        if not isinstance( value, list ):
+            value = [ value ]
+        lst = []
+        for val in value:
+            if val in [ None, '' ]:
+                lst = []
+                break
+            if isinstance( val, dict ):
+                lst.append( val[ 'name' ] )
+            else:
+                lst.append( val )
+        if len( lst ) == 0:
+            if not self.optional and validate:
+                raise ValueError( "Please select a valid FTP file." )
+            return None
+        if validate and self.tool.app.config.ftp_upload_dir is None:
+            raise ValueError( "The FTP directory is not configured." )
+        return lst
+
+    def to_dict( self, trans, other_values=None ):
+        d = super( FTPFileToolParameter, self ).to_dict( trans )
+        d[ 'multiple' ] = self.multiple
+        return d
+
+
+class HiddenToolParameter( ToolParameter ):
+    """
+    Parameter that takes one of two values.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = HiddenToolParameter( None, XML( '<param name="_name" type="hidden" value="_value"/>' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('help', ''), ('hidden', True), ('is_dynamic', False), ('label', ''), ('model_class', 'HiddenToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'hidden'), ('value', '_value')]
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source( input_source )
+        ToolParameter.__init__( self, tool, input_source )
+        self.value = input_source.get( 'value' )
+        self.hidden = True
+
+    def get_initial_value( self, trans, other_values ):
+        return self.value
+
+    def get_label( self ):
+        return None
+
+
+class ColorToolParameter( ToolParameter ):
+    """
+    Parameter that stores a color.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = ColorToolParameter( None, XML( '<param name="_name" type="color" value="#ffffff"/>' ) )
+    >>> print p.name
+    _name
+    >>> print p.to_param_dict_string( "#fdeada" )
+    #fdeada
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'ColorToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'color'), ('value', '#ffffff')]
+    >>> p = ColorToolParameter( None, XML( '<param name="_name" type="color" value="#ffffff" rgb="True"/>' ) )
+    >>> print p.to_param_dict_string( "#fdeada" )
+    (253, 234, 218)
+    >>> print p.to_param_dict_string( None )
+    Traceback (most recent call last):
+        ...
+    ValueError: Failed to convert 'None' to RGB.
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source( input_source )
+        ToolParameter.__init__( self, tool, input_source )
+        self.value = input_source.get( 'value', '#fdeada' )
+        self.rgb = input_source.get( 'rgb', False )
+
+    def get_initial_value( self, trans, other_values ):
+        return self.value.lower()
+
+    def to_param_dict_string( self, value, other_values={} ):
+        if self.rgb:
+            try:
+                return str( tuple( int( value.lstrip( '#' )[ i : i + 2 ], 16 ) for i in ( 0, 2, 4 ) ) )
+            except Exception:
+                raise ValueError( "Failed to convert \'%s\' to RGB." % value )
+        return str( value )
+
+
+class BaseURLToolParameter( HiddenToolParameter ):
+    """
+    Returns a parameter that contains its value prepended by the
+    current server base url. Used in all redirects.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = BaseURLToolParameter( None, XML( '<param name="_name" type="base_url" value="_value"/>' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('help', ''), ('hidden', True), ('is_dynamic', False), ('label', ''), ('model_class', 'BaseURLToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('type', 'base_url'), ('value', '_value')]
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source( input_source )
+        super( BaseURLToolParameter, self ).__init__( tool, input_source )
+        self.value = input_source.get( 'value', '' )
+
+    def get_initial_value( self, trans, other_values ):
+        return self._get_value()
+
+    def from_json( self, value=None, trans=None, other_values={} ):
+        return self._get_value()
+
+    def _get_value( self ):
+        try:
+            return url_for( self.value, qualified=True )
+        except Exception as e:
+            log.debug( 'Url creation failed for "%s": %s', self.name, e )
+            return self.value
+
+    def to_dict( self, trans, other_values={} ):
+        d = super( BaseURLToolParameter, self ).to_dict( trans )
+        d[ 'value' ] = self._get_value()
+        return d
+
+
+class SelectToolParameter( ToolParameter ):
+    """
+    Parameter that takes on one (or many) or a specific set of values.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch() )
+    >>> p = SelectToolParameter( None, XML(
+    ... '''
+    ... <param name="_name" type="select">
+    ...     <option value="x">x_label</option>
+    ...     <option value="y" selected="true">y_label</option>
+    ...     <option value="z">z_label</option>
+    ... </param>
+    ... ''' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('display', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'SelectToolParameter'), ('multiple', False), ('name', '_name'), ('optional', False), ('options', [('x_label', 'x', False), ('y_label', 'y', True), ('z_label', 'z', False)]), ('refresh_on_change', False), ('type', 'select'), ('value', 'y')]
+    >>> p = SelectToolParameter( None, XML(
+    ... '''
+    ... <param name="_name" type="select" multiple="true">
+    ...     <option value="x">x_label</option>
+    ...     <option value="y" selected="true">y_label</option>
+    ...     <option value="z" selected="true">z_label</option>
+    ... </param>
+    ... ''' ) )
+    >>> print p.name
+    _name
+    >>> sorted( p.to_dict( trans ).items() )
+    [('argument', None), ('display', None), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'SelectToolParameter'), ('multiple', True), ('name', '_name'), ('optional', True), ('options', [('x_label', 'x', False), ('y_label', 'y', True), ('z_label', 'z', True)]), ('refresh_on_change', False), ('type', 'select'), ('value', 'z')]
+    >>> print p.to_param_dict_string( ["y", "z"] )
+    y,z
+    """
+    def __init__( self, tool, input_source, context=None ):
+        input_source = ensure_input_source( input_source )
+        ToolParameter.__init__( self, tool, input_source )
+        self.multiple = input_source.get_bool( 'multiple', False )
+        # Multiple selects are optional by default, single selection is the inverse.
+        self.optional = input_source.parse_optional( self.multiple )
+        self.display = input_source.get( 'display', None )
+        self.separator = input_source.get( 'separator', ',' )
+        self.legal_values = set()
+        self.dynamic_options = input_source.get( 'dynamic_options', None )
+        self.options = parse_dynamic_options( self, input_source )
+        if self.options is not None:
+            for validator in self.options.validators:
+                self.validators.append( validator )
+        if self.dynamic_options is None and self.options is None:
+            self.static_options = input_source.parse_static_options()
+            for (title, value, selected) in self.static_options:
+                self.legal_values.add( value )
+        self.is_dynamic = ( ( self.dynamic_options is not None ) or ( self.options is not None ) )
+
+    def _get_dynamic_options_call_other_values( self, trans, other_values ):
+        call_other_values = ExpressionContext( { '__trans__': trans } )
+        if other_values:
+            call_other_values.parent = other_values.parent
+            call_other_values.update( other_values.dict )
+        return call_other_values
+
+    def get_options( self, trans, other_values ):
+        if self.options:
+            return self.options.get_options( trans, other_values )
+        elif self.dynamic_options:
+            call_other_values = self._get_dynamic_options_call_other_values( trans, other_values )
+            try:
+                return eval( self.dynamic_options, self.tool.code_namespace, call_other_values )
+            except Exception as e:
+                log.debug( "Error determining dynamic options for parameter '%s' in tool '%s':", self.name, self.tool.id, exc_info=e )
+                return []
+        else:
+            return self.static_options
+
+    def get_legal_values( self, trans, other_values ):
+        if self.options:
+            return set( v for _, v, _ in self.options.get_options( trans, other_values ) )
+        elif self.dynamic_options:
+            try:
+                call_other_values = self._get_dynamic_options_call_other_values( trans, other_values )
+                return set( v for _, v, _ in eval( self.dynamic_options, self.tool.code_namespace, call_other_values ) )
+            except Exception as e:
+                log.debug( "Determining legal values failed for '%s': %s", self.name, e )
+                return set()
+        else:
+            return self.legal_values
+
+    def from_json( self, value, trans, other_values={} ):
+        legal_values = self.get_legal_values( trans, other_values )
+        workflow_building_mode = trans.workflow_building_mode
+        for context_value in other_values.itervalues():
+            if isinstance( context_value, RuntimeValue ):
+                workflow_building_mode = True
+                break
+        if len( list( legal_values ) ) == 0 and workflow_building_mode:
+            if self.multiple:
+                # While it is generally allowed that a select value can be '',
+                # we do not allow this to be the case in a dynamically
+                # generated multiple select list being set in workflow building
+                # mode we instead treat '' as 'No option Selected' (None)
+                if value == '':
+                    value = None
+                else:
+                    if isinstance( value, string_types ):
+                        # Split on all whitespace. This not only provides flexibility
+                        # in interpreting values but also is needed because many browsers
+                        # use \r\n to separate lines.
+                        value = value.split()
+            return value
+        if ( not legal_values or value is None ) and self.optional:
+            return None
+        if not legal_values:
+            raise ValueError( "Parameter %s requires a value, but has no legal values defined." % self.name )
+        if isinstance( value, list ):
+            if not self.multiple:
+                raise ValueError( "Multiple values provided but parameter %s is not expecting multiple values." % self.name )
+            rval = []
+            for v in value:
+                if v not in legal_values:
+                    raise ValueError( "An invalid option was selected for %s, %r, please verify." % ( self.name, v ) )
+                rval.append( v )
+            return rval
+        else:
+            value_is_none = ( value == "None" and "None" not in legal_values )
+            if value_is_none or not value:
+                if self.multiple:
+                    if self.optional:
+                        return []
+                    else:
+                        raise ValueError( "No option was selected for %s but input is not optional." % self.name )
+            if value not in legal_values:
+                raise ValueError( "An invalid option was selected for %s, %r, please verify." % ( self.name, value ) )
+            return value
+
+    def to_param_dict_string( self, value, other_values={} ):
+        if value is None:
+            return "None"
+        if isinstance( value, list ):
+            if not self.multiple:
+                raise ValueError( "Multiple values provided but parameter %s is not expecting multiple values." % self.name )
+            value = map( str, value )
+        else:
+            value = str( value )
+        if self.tool is None or self.tool.options.sanitize:
+            if self.sanitizer:
+                value = self.sanitizer.sanitize_param( value )
+            else:
+                value = sanitize_param( value )
+        if isinstance( value, list ):
+            value = self.separator.join( value )
+        return value
+
+    def to_json( self, value, app, use_security ):
+        return value
+
+    def get_initial_value( self, trans, other_values ):
+        options = list( self.get_options( trans, other_values ) )
+        if len(options) == 0 and trans.workflow_building_mode:
+            return None
+        value = [ optval for _, optval, selected in options if selected ]
+        if len( value ) == 0:
+            if not self.optional and not self.multiple and options:
+                # Nothing selected, but not optional and not a multiple select, with some values,
+                # so we have to default to something (the HTML form will anyway)
+                value = options[ 0 ][ 1 ]
+            else:
+                value = None
+        elif len( value ) == 1:
+            value = value[0]
+        return value
+
+    def value_to_display_text( self, value, app ):
+        if not isinstance( value, list ):
+            value = [ value ]
+        # FIXME: Currently only translating values back to labels if they
+        #        are not dynamic
+        if self.is_dynamic:
+            rval = map( str, value )
+        else:
+            options = list( self.static_options )
+            rval = []
+            for t, v, s in options:
+                if v in value:
+                    rval.append( t )
+        if rval:
+            return "\n".join( rval )
+        return "Nothing selected."
+
+    def get_dependencies( self ):
+        """
+        Get the *names* of the other params this param depends on.
+        """
+        if self.options:
+            return self.options.get_dependency_names()
+        else:
+            return []
+
+    def to_dict( self, trans, other_values={} ):
+        d = super( SelectToolParameter, self ).to_dict( trans )
+
+        # Get options, value.
+        options = self.get_options( trans, other_values )
+        d[ 'options' ] = options
+        if options:
+            value = options[ 0 ][ 1 ]
+            for option in options:
+                if option[ 2 ]:
+                    # Found selected option.
+                    value = option[ 1 ]
+            d[ 'value' ] = value
+
+        d[ 'display' ] = self.display
+        d[ 'multiple' ] = self.multiple
+        return d
+
+
+class GenomeBuildParameter( SelectToolParameter ):
+    """
+    Select list that sets the last used genome build for the current history as "selected".
+
+    >>> # Create a mock transaction with 'hg17' as the current build
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.read_dbnames( None ) )
+    >>> p = GenomeBuildParameter( None, XML( '<param name="_name" type="genomebuild" value="hg17" />' ) )
+    >>> print p.name
+    _name
+    >>> d = p.to_dict( trans )
+    >>> o = d[ 'options' ]
+    >>> [ i for i in o if i[ 2 ] == True ]
+    [('Human May 2004 (NCBI35/hg17) (hg17)', 'hg17', True)]
+    >>> [ i for i in o if i[ 1 ] == 'hg18' ]
+    [('Human Mar. 2006 (NCBI36/hg18) (hg18)', 'hg18', False)]
+    """
+    def __init__( self, *args, **kwds ):
+        super( GenomeBuildParameter, self ).__init__( *args, **kwds )
+        if self.tool:
+            self.static_options = [ ( value, key, False ) for key, value in self._get_dbkey_names()]
+
+    def get_options( self, trans, other_values ):
+        last_used_build = object()
+        if trans.history:
+            last_used_build = trans.history.genome_build
+        for dbkey, build_name in self._get_dbkey_names( trans=trans ):
+            yield build_name, dbkey, ( dbkey == last_used_build )
+
+    def get_legal_values( self, trans, other_values ):
+        return set( dbkey for dbkey, _ in self._get_dbkey_names( trans=trans ) )
+
+    def to_dict( self, trans, other_values={} ):
+        # skip SelectToolParameter (the immediate parent) bc we need to get options in a different way here
+        d = ToolParameter.to_dict( self, trans )
+
+        # Get options, value - options is a generator here, so compile to list
+        options = list( self.get_options( trans, {} ) )
+        value = options[0][1]
+        for option in options:
+            if option[2]:
+                # Found selected option.
+                value = option[1]
+
+        d.update({
+            'options'   : options,
+            'value'     : value,
+            'display'   : self.display,
+            'multiple'  : self.multiple,
+        })
+
+        return d
+
+    def _get_dbkey_names( self, trans=None ):
+        if not self.tool:
+            # Hack for unit tests, since we have no tool
+            return util.read_dbnames( None )
+        return self.tool.app.genome_builds.get_genome_build_names( trans=trans )
+
+
+class ColumnListParameter( SelectToolParameter ):
+    """
+    Select list that consists of either the total number of columns or only
+    those columns that contain numerical values in the associated DataToolParameter.
+
+    # TODO: we need better testing here, but not sure how to associate a DatatoolParameter with a ColumnListParameter
+    # from a twill perspective...
+
+    >>> # Mock up a history (not connected to database)
+    >>> from galaxy.model import History, HistoryDatasetAssociation
+    >>> from galaxy.util.bunch import Bunch
+    >>> from galaxy.model.mapping import init
+    >>> sa_session = init( "/tmp", "sqlite:///:memory:", create_tables=True ).session
+    >>> hist = History()
+    >>> sa_session.add( hist )
+    >>> sa_session.flush()
+    >>> hda = hist.add_dataset( HistoryDatasetAssociation( id=1, extension='interval', create_dataset=True, sa_session=sa_session ) )
+    >>> dtp =  DataToolParameter( None, XML( '<param name="blah" type="data" format="interval"/>' ) )
+    >>> print dtp.name
+    blah
+    >>> clp = ColumnListParameter ( None, XML( '<param name="numerical_column" type="data_column" data_ref="blah" numerical="true"/>' ) )
+    >>> print clp.name
+    numerical_column
+    """
+    def __init__( self, tool, input_source ):
+        input_source = ensure_input_source( input_source )
+        SelectToolParameter.__init__( self, tool, input_source )
+        self.tool = tool
+        self.numerical = input_source.get_bool( "numerical", False )
+        self.optional = input_source.parse_optional( False )
+        self.accept_default = input_source.get_bool( "accept_default", False )
+        if self.accept_default:
+            self.optional = True
+        self.data_ref = input_source.get( "data_ref", None )
+        self.ref_input = None
+        # Legacy style default value specification...
+        self.default_value = input_source.get( "default_value", None )
+        if self.default_value is None:
+            # Newer style... more in line with other parameters.
+            self.default_value = input_source.get( "value", None )
+        if self.default_value is not None:
+            self.default_value = ColumnListParameter._strip_c( self.default_value )
+        self.is_dynamic = True
+        self.usecolnames = input_source.get_bool( "use_header_names", False )
+
+    def from_json( self, value, trans, other_values={} ):
+        """
+        Label convention prepends column number with a 'c', but tool uses the integer. This
+        removes the 'c' when entered into a workflow.
+        """
+        if self.multiple:
+            # split on newline and ,
+            if isinstance( value, list ) or isinstance( value, string_types ):
+                column_list = []
+                if not isinstance( value, list ):
+                    value = value.split( '\n' )
+                for column in value:
+                    for column2 in str( column ).split( ',' ):
+                        column2 = column2.strip()
+                        if column2:
+                            column_list.append( column2 )
+                value = map( ColumnListParameter._strip_c, column_list )
+            else:
+                value = []
+        else:
+            if value:
+                value = ColumnListParameter._strip_c( value )
+            else:
+                value = None
+        if not value and self.accept_default:
+            value = self.default_value or '1'
+            return [ value ] if self.multiple else value
+        return super( ColumnListParameter, self ).from_json( value, trans, other_values )
+
+    @staticmethod
+    def _strip_c(column):
+        if isinstance(column, string_types):
+            if column.startswith( 'c' ):
+                column = column.strip().lower()[1:]
+        return column
+
+    def get_column_list( self, trans, other_values ):
+        """
+        Generate a select list containing the columns of the associated
+        dataset (if found).
+        """
+        # Get the value of the associated data reference (a dataset)
+        dataset = other_values.get( self.data_ref, None )
+        # Check if a dataset is selected
+        if not dataset:
+            return []
+        column_list = None
+        for dataset in util.listify( dataset ):
+            # Use representative dataset if a dataset collection is parsed
+            if isinstance( dataset, trans.app.model.HistoryDatasetCollectionAssociation ):
+                dataset = dataset.to_hda_representative()
+            # Columns can only be identified if metadata is available
+            if not hasattr( dataset, 'metadata' ) or not hasattr( dataset.metadata, 'columns' ) or not dataset.metadata.columns:
+                return []
+            # Build up possible columns for this dataset
+            this_column_list = []
+            if self.numerical:
+                # If numerical was requested, filter columns based on metadata
+                for i, col in enumerate( dataset.metadata.column_types ):
+                    if col == 'int' or col == 'float':
+                        this_column_list.append( str( i + 1 ) )
+            else:
+                for i in range( 0, dataset.metadata.columns ):
+                    this_column_list.append( str( i + 1 ) )
+            # Take the intersection of these columns with the other columns.
+            if column_list is None:
+                column_list = this_column_list
+            else:
+                column_list = filter( lambda c: c in this_column_list, column_list )
+        return column_list
+
+    def get_options( self, trans, other_values ):
+        """
+        Show column labels rather than c1..cn if use_header_names=True
+        """
+        options = []
+        if self.usecolnames:  # read first row - assume is a header with metadata useful for making good choices
+            dataset = other_values.get( self.data_ref, None )
+            try:
+                head = open( dataset.get_file_name(), 'r' ).readline()
+                cnames = head.rstrip().split( '\t' )
+                column_list = [ ( '%d' % ( i + 1 ), 'c%d: %s' % ( i + 1, x ) ) for i, x in enumerate( cnames ) ]
+                if self.numerical:  # If numerical was requested, filter columns based on metadata
+                    if hasattr( dataset, 'metadata' ) and hasattr( dataset.metadata, 'column_types' ):
+                        if len( dataset.metadata.column_types ) >= len( cnames ):
+                            numerics = [ i for i, x in enumerate( dataset.metadata.column_types ) if x in [ 'int', 'float' ] ]
+                            column_list = [ column_list[ i ] for i in numerics ]
+            except:
+                column_list = self.get_column_list( trans, other_values )
+        else:
+            column_list = self.get_column_list( trans, other_values )
+        for col in column_list:
+            if isinstance( col, tuple ) and len( col ) == 2:
+                options.append( ( col[ 1 ], col[ 0 ], False ) )
+            else:
+                options.append( ( 'Column: ' + col, col, False ) )
+        return options
+
+    def get_initial_value( self, trans, other_values ):
+        if self.default_value is not None:
+            return self.default_value
+        return SelectToolParameter.get_initial_value( self, trans, other_values )
+
+    def get_legal_values( self, trans, other_values ):
+        if self.data_ref not in other_values:
+            raise ValueError( "Value for associated data reference not found (data_ref)." )
+        return set( self.get_column_list( trans, other_values ) )
+
+    def get_dependencies( self ):
+        return [ self.data_ref ]
+
+    def to_dict( self, trans, other_values={} ):
+        d = super( ColumnListParameter, self ).to_dict( trans, other_values=other_values)
+        d[ 'data_ref' ] = self.data_ref
+        d[ 'numerical' ] = self.numerical
+        return d
+
+
+class DrillDownSelectToolParameter( SelectToolParameter ):
+    """
+    Parameter that takes on one (or many) of a specific set of values.
+    Creating a hierarchical select menu, which allows users to 'drill down' a tree-like set of options.
+
+    >>> from galaxy.util.bunch import Bunch
+    >>> trans = Bunch( history=Bunch( genome_build='hg17' ), db_builds=util.read_dbnames( None ) )
+    >>> p = DrillDownSelectToolParameter( None, XML(
+    ... '''
+    ... <param name="_name" type="drill_down" display="checkbox" hierarchy="recurse" multiple="true">
+    ...   <options>
+    ...    <option name="Heading 1" value="heading1">
+    ...        <option name="Option 1" value="option1"/>
+    ...        <option name="Option 2" value="option2"/>
+    ...        <option name="Heading 2" value="heading2">
+    ...          <option name="Option 3" value="option3"/>
+    ...          <option name="Option 4" value="option4"/>
+    ...        </option>
+    ...    </option>
+    ...    <option name="Option 5" value="option5"/>
+    ...   </options>
+    ... </param>
+    ... ''' ) )
+    >>> print p.name
+    _name
+    >>> d = p.to_dict( trans )
+    >>> assert d[ 'multiple' ] == True
+    >>> assert d[ 'display' ] == 'checkbox'
+    >>> assert d[ 'options' ][ 0 ][ 'name' ] == 'Heading 1'
+    >>> assert d[ 'options' ][ 0 ][ 'value' ] == 'heading1'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 0 ][ 'name' ] == 'Option 1'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 0 ][ 'value' ] == 'option1'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 1 ][ 'name' ] == 'Option 2'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 1 ][ 'value' ] == 'option2'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 2 ][ 'name' ] == 'Heading 2'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 2 ][ 'value' ] == 'heading2'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 2 ][ 'options' ][ 0 ][ 'name' ] == 'Option 3'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 2 ][ 'options' ][ 0 ][ 'value' ] == 'option3'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 2 ][ 'options' ][ 1 ][ 'name' ] == 'Option 4'
+    >>> assert d[ 'options' ][ 0 ][ 'options' ][ 2 ][ 'options' ][ 1 ][ 'value' ] == 'option4'
+    >>> assert d[ 'options' ][ 1 ][ 'name' ] == 'Option 5'
+    >>> assert d[ 'options' ][ 1 ][ 'value' ] == 'option5'
+    """
+    def __init__( self, tool, input_source, context=None ):
+        input_source = ensure_input_source( input_source )
+
+        def recurse_option_elems( cur_options, option_elems ):
+            for option_elem in option_elems:
+                selected = string_as_bool( option_elem.get( 'selected', False ) )
+                cur_options.append( { 'name': option_elem.get( 'name' ), 'value': option_elem.get( 'value' ), 'options': [], 'selected': selected  } )
+                recurse_option_elems( cur_options[-1]['options'], option_elem.findall( 'option' ) )
+        ToolParameter.__init__( self, tool, input_source )
+        # TODO: abstract XML out of here - so non-XML InputSources can
+        # specify DrillDown parameters.
+        elem = input_source.elem()
+        self.multiple = string_as_bool( elem.get( 'multiple', False ) )
+        self.display = elem.get( 'display', None )
+        self.hierarchy = elem.get( 'hierarchy', 'exact' )  # exact or recurse
+        self.separator = elem.get( 'separator', ',' )
+        from_file = elem.get( 'from_file', None )
+        if from_file:
+            if not os.path.isabs( from_file ):
+                from_file = os.path.join( tool.app.config.tool_data_path, from_file )
+            elem = XML( "<root>%s</root>" % open( from_file ).read() )
+        self.dynamic_options = elem.get( 'dynamic_options', None )
+        if self.dynamic_options:
+            self.is_dynamic = True
+        self.options = []
+        self.filtered = {}
+        if elem.find( 'filter' ):
+            self.is_dynamic = True
+            for filter in elem.findall( 'filter' ):
+                # currently only filtering by metadata key matching input file is allowed
+                if filter.get( 'type' ) == 'data_meta':
+                    if filter.get( 'data_ref' ) not in self.filtered:
+                        self.filtered[filter.get( 'data_ref' )] = {}
+                    if filter.get( 'meta_key' ) not in self.filtered[filter.get( 'data_ref' )]:
+                        self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )] = {}
+                    if filter.get( 'value' ) not in self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )]:
+                        self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )][filter.get( 'value' )] = []
+                    recurse_option_elems( self.filtered[filter.get( 'data_ref' )][filter.get( 'meta_key' )][filter.get( 'value' )], filter.find( 'options' ).findall( 'option' ) )
+        elif not self.dynamic_options:
+            recurse_option_elems( self.options, elem.find( 'options' ).findall( 'option' ) )
+
+    def _get_options_from_code( self, trans=None, value=None, other_values=None ):
+        assert self.dynamic_options, Exception( "dynamic_options was not specifed" )
+        call_other_values = ExpressionContext({ '__trans__': trans, '__value__': value })
+        if other_values:
+            call_other_values.parent = other_values.parent
+            call_other_values.update( other_values.dict )
+        try:
+            return eval( self.dynamic_options, self.tool.code_namespace, call_other_values )
+        except Exception:
+            return []
+
+    def get_options( self, trans=None, value=None, other_values={} ):
+        if self.is_dynamic:
+            if self.dynamic_options:
+                options = self._get_options_from_code( trans=trans, value=value, other_values=other_values )
+            else:
+                options = []
+            for filter_key, filter_value in self.filtered.iteritems():
+                dataset = other_values.get(filter_key)
+                if dataset.__class__.__name__.endswith( "DatasetFilenameWrapper" ):  # this is a bad way to check for this, but problems importing class ( due to circular imports? )
+                    dataset = dataset.dataset
+                if dataset:
+                    for meta_key, meta_dict in filter_value.iteritems():
+                        if hasattr( dataset, 'metadata' ) and hasattr( dataset.metadata, 'spec' ):
+                            check_meta_val = dataset.metadata.spec[ meta_key ].param.to_string( dataset.metadata.get( meta_key ) )
+                            if check_meta_val in meta_dict:
+                                options.extend( meta_dict[ check_meta_val ] )
+            return options
+        return self.options
+
+    def get_legal_values( self, trans, other_values ):
+        def recurse_options( legal_values, options ):
+            for option in options:
+                legal_values.append( option['value'] )
+                recurse_options( legal_values, option['options'] )
+        legal_values = []
+        recurse_options( legal_values, self.get_options( trans=trans, other_values=other_values ) )
+        return legal_values
+
+    def from_json( self, value, trans, other_values={} ):
+        legal_values = self.get_legal_values( trans, other_values )
+        if len( list( legal_values ) ) == 0 and trans.workflow_building_mode:
+            if self.multiple:
+                if value == '':  # No option selected
+                    value = None
+                else:
+                    value = value.split( "\n" )
+            return value
+        if not value and not self.optional:
+            raise ValueError( "An invalid option was selected for %s, please verify." % (self.name) )
+        if not value:
+            return None
+        if not isinstance( value, list ):
+            value = [ value ]
+        if len( value ) > 1 and not self.multiple:
+            raise ValueError( "Multiple values provided but parameter %s is not expecting multiple values." % self.name )
+        rval = []
+        if not legal_values:
+            raise ValueError( "Parameter %s requires a value, but has no legal values defined." % self.name )
+        for val in value:
+            if val not in legal_values:
+                raise ValueError( "An invalid option was selected for %s, %r, please verify" % ( self.name, val ) )
+            rval.append( val )
+        return rval
+
+    def to_param_dict_string( self, value, other_values={} ):
+        def get_options_list( value ):
+            def get_base_option( value, options ):
+                for option in options:
+                    if value == option['value']:
+                        return option
+                    rval = get_base_option( value, option['options'] )
+                    if rval:
+                        return rval
+                return None  # not found
+
+            def recurse_option( option_list, option ):
+                if not option['options']:
+                    option_list.append( option['value'] )
+                else:
+                    for opt in option['options']:
+                        recurse_option( option_list, opt )
+            rval = []
+            recurse_option( rval, get_base_option( value, self.get_options( other_values=other_values ) ) )
+            return rval or [value]
+
+        if value is None:
+            return "None"
+        rval = []
+        if self.hierarchy == "exact":
+            rval = value
+        else:
+            for val in value:
+                options = get_options_list( val )
+                rval.extend( options )
+        if len( rval ) > 1 and not self.multiple:
+            raise ValueError( "Multiple values provided but parameter %s is not expecting multiple values." % self.name )
+        rval = self.separator.join( rval )
+        if self.tool is None or self.tool.options.sanitize:
+            if self.sanitizer:
+                rval = self.sanitizer.sanitize_param( rval )
+            else:
+                rval = sanitize_param( rval )
+        return rval
+
+    def get_initial_value( self, trans, other_values ):
+        def recurse_options( initial_values, options ):
+            for option in options:
+                if option['selected']:
+                    initial_values.append( option['value'] )
+                recurse_options( initial_values, option['options'] )
+        # More working around dynamic options for workflow
+        options = self.get_options( trans=trans, other_values=other_values )
+        if len( list( options ) ) == 0 and trans.workflow_building_mode:
+            return None
+        initial_values = []
+        recurse_options( initial_values, options )
+        if len( initial_values ) == 0:
+            initial_values = None
+        return initial_values
+
+    def value_to_display_text( self, value, app ):
+        def get_option_display( value, options ):
+            for option in options:
+                if value == option['value']:
+                    return option['name']
+                rval = get_option_display( value, option['options'] )
+                if rval:
+                    return rval
+            return None  # not found
+        if not value:
+            value = []
+        elif not isinstance( value, list ):
+            value = [ value ]
+        # FIXME: Currently only translating values back to labels if they
+        #        are not dynamic
+        if self.is_dynamic:
+            if value:
+                if isinstance( value, list ):
+                    rval = value
+                else:
+                    rval = [ value ]
+            else:
+                rval = []
+        else:
+            rval = []
+            for val in value:
+                rval.append( get_option_display( val, self.options ) or val )
+        if rval:
+            return "\n".join( map( str, rval ) )
+        return "Nothing selected."
+
+    def get_dependencies( self ):
+        """
+        Get the *names* of the other params this param depends on.
+        """
+        return self.filtered.keys()
+
+    def to_dict( self, trans, other_values={} ):
+        # skip SelectToolParameter (the immediate parent) bc we need to get options in a different way here
+        d = ToolParameter.to_dict( self, trans )
+        d[ 'options' ] = self.get_options( trans=trans, other_values=other_values )
+        d[ 'display' ] = self.display
+        d[ 'multiple' ] = self.multiple
+        return d
+
+
+class BaseDataToolParameter( ToolParameter ):
+
+    def __init__( self, tool, input_source, trans ):
+        super(BaseDataToolParameter, self).__init__( tool, input_source )
+        self.refresh_on_change = True
+
+    def _datatypes_registery( self, trans, tool ):
+        # Find datatypes_registry
+        if tool is None:
+            if trans:
+                # Must account for "Input Dataset" types, which while not a tool still need access to the real registry.
+                # A handle to the transaction (and thus app) will be given by the module.
+                datatypes_registry = trans.app.datatypes_registry
+            else:
+                # This occurs for things such as unit tests
+                import galaxy.datatypes.registry
+                datatypes_registry = galaxy.datatypes.registry.Registry()
+                datatypes_registry.load_datatypes()
+        else:
+            datatypes_registry = tool.app.datatypes_registry
+        return datatypes_registry
+
+    def _parse_formats( self, trans, tool, input_source ):
+        datatypes_registry = self._datatypes_registery( trans, tool )
+
+        # Build tuple of classes for supported data formats
+        formats = []
+        self.extensions = input_source.get( 'format', 'data' ).split( "," )
+        normalized_extensions = [extension.strip().lower() for extension in self.extensions]
+        for extension in normalized_extensions:
+            formats.append( datatypes_registry.get_datatype_by_extension( extension ) )
+        self.formats = formats
+
+    def _parse_options( self, input_source ):
+        # TODO: Enhance dynamic options for DataToolParameters. Currently,
+        #       only the special case key='build' of type='data_meta' is
+        #       a valid filter
+        self.options_filter_attribute = None
+        self.options = parse_dynamic_options( self, input_source )
+        if self.options:
+            # TODO: Abstract away XML handling here.
+            options_elem = input_source.elem().find('options')
+            self.options_filter_attribute = options_elem.get(  'options_filter_attribute', None )
+        self.is_dynamic = self.options is not None
+
+    def get_initial_value( self, trans, other_values ):
+        if trans.workflow_building_mode is workflow_building_modes.ENABLED or trans.app.name == 'tool_shed':
+            return RuntimeValue()
+        if self.optional:
+            return None
+        history = trans.history
+        if history is not None:
+            dataset_matcher = DatasetMatcher( trans, self, None, other_values )
+            if isinstance( self, DataToolParameter ):
+                for hda in reversed( history.active_datasets_children_and_roles ):
+                    match = dataset_matcher.hda_match( hda, check_security=False )
+                    if match:
+                        return match.hda
+            else:
+                dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher )
+                for hdca in reversed( history.active_dataset_collections ):
+                    if dataset_collection_matcher.hdca_match( hdca, reduction=self.multiple ):
+                        return hdca
+
+    def to_json( self, value, app, use_security ):
+        def single_to_json( value ):
+            src = None
+            if isinstance( value, dict ) and 'src' in value and 'id' in value:
+                return value
+            elif isinstance( value, galaxy.model.DatasetCollectionElement ):
+                src = 'dce'
+            elif isinstance( value, app.model.HistoryDatasetCollectionAssociation ):
+                src = 'hdca'
+            elif hasattr( value, 'id' ):
+                src = 'hda'
+            if src is not None:
+                return { 'id' : app.security.encode_id( value.id ) if use_security else value.id, 'src' : src }
+        if value not in [ None, '', 'None' ]:
+            if isinstance( value, list ) and len( value ) > 0:
+                values = [ single_to_json( v ) for v in value ]
+            else:
+                values = [ single_to_json( value ) ]
+            return { 'values': values }
+        return None
+
+    def to_python( self, value, app ):
+        def single_to_python( value ):
+            if isinstance( value, dict ) and 'src' in value:
+                id = value[ 'id' ] if isinstance( value[ 'id' ], int ) else app.security.decode_id( value[ 'id' ] )
+                if value[ 'src' ] == 'dce':
+                    return app.model.context.query( app.model.DatasetCollectionElement ).get( id )
+                elif value[ 'src' ] == 'hdca':
+                    return app.model.context.query( app.model.HistoryDatasetCollectionAssociation ).get( id )
+                else:
+                    return app.model.context.query( app.model.HistoryDatasetAssociation ).get( id )
+
+        if isinstance( value, dict ) and 'values' in value:
+            if hasattr( self, 'multiple' ) and self.multiple is True:
+                return [ single_to_python( v ) for v in value[ 'values' ] ]
+            elif len( value[ 'values' ] ) > 0:
+                return single_to_python( value[ 'values' ][ 0 ] )
+
+        # Handle legacy string values potentially stored in databases
+        none_values = [ None, '', 'None' ]
+        if value in none_values:
+            return None
+        if isinstance( value, string_types ) and value.find( ',' ) > -1:
+            return [ app.model.context.query( app.model.HistoryDatasetAssociation ).get( int( v ) ) for v in value.split( ',' ) if v not in none_values ]
+        elif str( value ).startswith( "__collection_reduce__|" ):
+            decoded_id = str( value )[ len( "__collection_reduce__|" ): ]
+            if not decoded_id.isdigit():
+                decoded_id = app.security.decode_id( decoded_id )
+            return app.model.context.query( app.model.HistoryDatasetCollectionAssociation ).get( int( decoded_id ) )
+        elif str( value ).startswith( "dce:" ):
+            return app.model.context.query( app.model.DatasetCollectionElement ).get( int( value[ len( "dce:" ): ] ) )
+        elif str( value ).startswith( "hdca:" ):
+            return app.model.context.query( app.model.HistoryDatasetCollectionAssociation ).get( int( value[ len( "hdca:" ): ] ) )
+        else:
+            return app.model.context.query( app.model.HistoryDatasetAssociation ).get( int( value ) )
+
+
+class DataToolParameter( BaseDataToolParameter ):
+    # TODO, Nate: Make sure the following unit tests appropriately test the dataset security
+    # components.  Add as many additional tests as necessary.
+    """
+    Parameter that takes on one (or many) or a specific set of values.
+
+    TODO: There should be an alternate display that allows single selects to be
+          displayed as radio buttons and multiple selects as a set of checkboxes
+
+    TODO: The following must be fixed to test correctly for the new security_check tag in
+    the DataToolParameter ( the last test below is broken ) Nate's next pass at the dataset
+    security stuff will dramatically alter this anyway.
+    """
+
+    def __init__( self, tool, input_source, trans=None):
+        input_source = ensure_input_source( input_source )
+        super(DataToolParameter, self).__init__( tool, input_source, trans )
+        # Add metadata validator
+        if not input_source.get_bool( 'no_validation', False ):
+            self.validators.append( validation.MetadataValidator() )
+        self._parse_formats( trans, tool, input_source )
+        self.multiple = input_source.get_bool('multiple', False)
+        self.min = input_source.get( 'min' )
+        self.max = input_source.get( 'max' )
+        if self.min:
+            try:
+                self.min = int( self.min )
+            except:
+                raise ValueError( "An integer is required for min property." )
+        if self.max:
+            try:
+                self.max = int( self.max )
+            except:
+                raise ValueError( "An integer is required for max property." )
+        if not self.multiple and (self.min is not None):
+            raise ValueError( "Cannot specify min property on single data parameter '%s'. Set multiple=\"true\" to enable this option." % self.name )
+        if not self.multiple and (self.max is not None):
+            raise ValueError( "Cannot specify max property on single data parameter '%s'. Set multiple=\"true\" to enable this option." % self.name )
+        self.is_dynamic = True
+        self._parse_options( input_source )
+        # Load conversions required for the dataset input
+        self.conversions = []
+        for name, conv_extensions in input_source.parse_conversion_tuples():
+            assert None not in [ name, conv_extensions ], 'A name (%s) and type (%s) are required for explicit conversion' % ( name, conv_extensions )
+            conv_types = [ tool.app.datatypes_registry.get_datatype_by_extension( conv_extensions.lower() ) ]
+            self.conversions.append( ( name, conv_extensions, conv_types ) )
+
+    def match_collections( self, history, dataset_matcher, reduction=True ):
+        dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher )
+
+        for history_dataset_collection in history.active_dataset_collections:
+            if dataset_collection_matcher.hdca_match( history_dataset_collection, reduction=reduction ):
+                yield history_dataset_collection
+
+    def match_datasets( self, history, dataset_matcher ):
+
+        def dataset_collector( hdas, parent_hid ):
+            for i, hda in enumerate( hdas ):
+                if parent_hid is not None:
+                    hid = "%s.%d" % ( parent_hid, i + 1 )
+                else:
+                    hid = str( hda.hid )
+                hda_match = dataset_matcher.hda_match( hda )
+                if not hda_match:
+                    continue
+                yield (hda_match, hid)
+                # Also collect children via association object
+                for item in dataset_collector( hda.children, hid ):
+                    yield item
+
+        for item in dataset_collector( history.active_datasets_children_and_roles, None ):
+            yield item
+
+    def from_json( self, value, trans, other_values={} ):
+        if trans.workflow_building_mode is workflow_building_modes.ENABLED:
+            return None
+        if not value and not self.optional:
+            raise ValueError( "Specify a dataset of the required format / build." )
+        if value in [ None, "None", '' ]:
+            return None
+        if isinstance( value, dict ) and 'values' in value:
+            value = self.to_python( value, trans.app )
+        if isinstance( value, string_types ) and value.find( "," ) > 0:
+            value = [ int( value_part ) for value_part in value.split( "," ) ]
+        if isinstance( value, list ):
+            rval = []
+            found_hdca = False
+            for single_value in value:
+                if isinstance( single_value, dict ) and 'src' in single_value and 'id' in single_value:
+                    if single_value['src'] == 'hda':
+                        rval.append( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id(single_value['id']) ))
+                    elif single_value['src'] == 'hdca':
+                        found_hdca = True
+                        decoded_id = trans.security.decode_id( single_value[ 'id' ] )
+                        rval.append( trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( decoded_id ) )
+                    else:
+                        raise ValueError("Unknown input source %s passed to job submission API." % single_value['src'])
+                elif isinstance( single_value, trans.app.model.HistoryDatasetCollectionAssociation ):
+                    rval.append( single_value )
+                elif isinstance( single_value, trans.app.model.HistoryDatasetAssociation ):
+                    rval.append( single_value )
+                else:
+                    rval.append( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( single_value ) )
+            if found_hdca:
+                for val in rval:
+                    if not isinstance( val, trans.app.model.HistoryDatasetCollectionAssociation ):
+                        raise ValueError( "If collections are supplied to multiple data input parameter, only collections may be used." )
+        elif isinstance( value, trans.app.model.HistoryDatasetAssociation ):
+            rval = value
+        elif isinstance( value, dict ) and 'src' in value and 'id' in value:
+            if value['src'] == 'hda':
+                rval = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id(value['id']) )
+            elif value['src'] == 'hdca':
+                decoded_id = trans.security.decode_id( value[ 'id' ] )
+                rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( decoded_id )
+            else:
+                raise ValueError("Unknown input source %s passed to job submission API." % value['src'])
+        elif str( value ).startswith( "__collection_reduce__|" ):
+            encoded_ids = [ v[ len( "__collection_reduce__|" ): ] for v in str( value ).split(",") ]
+            decoded_ids = map( trans.security.decode_id, encoded_ids )
+            rval = []
+            for decoded_id in decoded_ids:
+                hdca = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( decoded_id )
+                rval.append( hdca )
+        elif isinstance( value, trans.app.model.HistoryDatasetCollectionAssociation ):
+            rval = value
+        else:
+            rval = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( value )
+        if isinstance( rval, list ):
+            values = rval
+        else:
+            values = [ rval ]
+        for v in values:
+            if v:
+                if v.deleted:
+                    raise ValueError( "The previously selected dataset has been deleted." )
+                if hasattr( v, "dataset" ) and v.dataset.state in [ galaxy.model.Dataset.states.ERROR, galaxy.model.Dataset.states.DISCARDED ]:
+                    raise ValueError( "The previously selected dataset has entered an unusable state" )
+        if not self.multiple:
+            if len( values ) > 1:
+                raise ValueError( "More than one dataset supplied to single input dataset parameter." )
+            if len( values ) > 0:
+                rval = values[ 0 ]
+            else:
+                raise ValueError( "Invalid dataset supplied to single input dataset parameter." )
+        return rval
+
+    def to_param_dict_string( self, value, other_values={} ):
+        if value is None:
+            return "None"
+        return value.file_name
+
+    def value_to_display_text( self, value, app ):
+        if value and not isinstance( value, list ):
+            value = [ value ]
+        if value:
+            try:
+                return ", ".join( [ "%s: %s" % ( item.hid, item.name ) for item in value ] )
+            except:
+                pass
+        return "No dataset."
+
+    def validate( self, value, trans=None ):
+        dataset_count = 0
+        for validator in self.validators:
+            def do_validate( v ):
+                if validator.requires_dataset_metadata and v and hasattr( v, 'dataset' ) and v.dataset.state != galaxy.model.Dataset.states.OK:
+                    return
+                else:
+                    validator.validate( v, trans )
+
+            if value and self.multiple:
+                if not isinstance( value, list ):
+                    value = [ value ]
+                for v in value:
+                    if isinstance(v, galaxy.model.HistoryDatasetCollectionAssociation):
+                        for dataset_instance in v.collection.dataset_instances:
+                            dataset_count += 1
+                            do_validate( dataset_instance )
+                    else:
+                        dataset_count += 1
+                        do_validate( v )
+            else:
+                if value:
+                    dataset_count += 1
+                do_validate( value )
+
+        if self.min is not None:
+            if self.min > dataset_count:
+                raise ValueError( "At least %d datasets are required." % self.min )
+        if self.max is not None:
+            if self.max < dataset_count:
+                raise ValueError( "At most %d datasets are required." % self.max )
+
+    def get_dependencies( self ):
+        """
+        Get the *names* of the other params this param depends on.
+        """
+        if self.options:
+            return self.options.get_dependency_names()
+        else:
+            return []
+
+    def converter_safe( self, other_values, trans ):
+        if self.tool is None or self.tool.has_multiple_pages or not hasattr( trans, 'workflow_building_mode' ) or trans.workflow_building_mode:
+            return False
+        if other_values is None:
+            return True  # we don't know other values, so we can't check, assume ok
+        converter_safe = [True]
+
+        def visitor( prefix, input, value, parent=None ):
+            if isinstance( input, SelectToolParameter ) and self.name in input.get_dependencies():
+                if input.is_dynamic and ( input.dynamic_options or ( not input.dynamic_options and not input.options ) or not input.options.converter_safe ):
+                    converter_safe[0] = False  # This option does not allow for conversion, i.e. uses contents of dataset file to generate options
+        self.tool.visit_inputs( other_values, visitor )
+        return False not in converter_safe
+
+    def get_options_filter_attribute( self, value ):
+        # HACK to get around current hardcoded limitation of when a set of dynamic options is defined for a DataToolParameter
+        # it always causes available datasets to be filtered by dbkey
+        # this behavior needs to be entirely reworked (in a backwards compatible manner)
+        options_filter_attribute = self.options_filter_attribute
+        if options_filter_attribute is None:
+            return value.get_dbkey()
+        if options_filter_attribute.endswith( "()" ):
+            call_attribute = True
+            options_filter_attribute = options_filter_attribute[:-2]
+        else:
+            call_attribute = False
+        ref = value
+        for attribute in options_filter_attribute.split( '.' ):
+            ref = getattr( ref, attribute )
+        if call_attribute:
+            ref = ref()
+        return ref
+
+    def to_dict( self, trans, other_values={} ):
+        # create dictionary and fill default parameters
+        d = super( DataToolParameter, self ).to_dict( trans )
+        extensions = self.extensions
+        datatypes_registery = self._datatypes_registery( trans, self.tool )
+        all_edam_formats = datatypes_registery.edam_formats if hasattr( datatypes_registery, 'edam_formats' ) else {}
+        all_edam_data = datatypes_registery.edam_data if hasattr( datatypes_registery, 'edam_formats' ) else {}
+        edam_formats = map(lambda ext: all_edam_formats.get(ext, None), extensions)
+        edam_data = map(lambda ext: all_edam_data.get(ext, None), extensions)
+
+        d['extensions'] = extensions
+        d['edam'] = {'edam_formats': edam_formats, 'edam_data': edam_data}
+        d['multiple'] = self.multiple
+        if self.multiple:
+            # For consistency, should these just always be in the dict?
+            d['min'] = self.min
+            d['max'] = self.max
+        d['options'] = {'hda': [], 'hdca': []}
+
+        # return dictionary without options if context is unavailable
+        history = trans.history
+        if history is None or trans.workflow_building_mode is workflow_building_modes.ENABLED:
+            return d
+
+        # prepare dataset/collection matching
+        dataset_matcher = DatasetMatcher( trans, self, None, other_values )
+        multiple = self.multiple
+
+        # build and append a new select option
+        def append( list, id, hid, name, src, keep=False ):
+            return list.append( { 'id' : trans.security.encode_id( id ), 'hid' : hid, 'name' : name, 'src' : src, 'keep': keep } )
+
+        # add datasets
+        visible_hda = other_values.get( self.name )
+        has_matched = False
+        for hda in history.active_datasets_children_and_roles:
+            match = dataset_matcher.hda_match( hda, check_security=False )
+            if match:
+                m = match.hda
+                has_matched = has_matched or visible_hda == m or visible_hda == hda
+                m_name = '%s (as %s)' % ( match.original_hda.name, match.target_ext ) if match.implicit_conversion else m.name
+                append( d[ 'options' ][ 'hda' ], m.id, m.hid, m_name if m.visible else '(hidden) %s' % m_name, 'hda' )
+        if not has_matched and hasattr( visible_hda, 'id' ) and hasattr( visible_hda, 'hid' ) and hasattr( visible_hda, 'name' ):
+            append( d[ 'options' ][ 'hda' ], visible_hda.id, visible_hda.hid, '(unavailable) %s' % visible_hda.name, 'hda', True )
+
+        # add dataset collections
+        dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher )
+        for hdca in history.active_dataset_collections:
+            if dataset_collection_matcher.hdca_match( hdca, reduction=multiple ):
+                append( d[ 'options' ][ 'hdca' ], hdca.id, hdca.hid, hdca.name, 'hdca' )
+
+        # sort both lists
+        d['options']['hda'] = sorted(d['options']['hda'], key=lambda k: k['hid'], reverse=True)
+        d['options']['hdca'] = sorted(d['options']['hdca'], key=lambda k: k['hid'], reverse=True)
+
+        # return final dictionary
+        return d
+
+
+class DataCollectionToolParameter( BaseDataToolParameter ):
+    """
+    """
+
+    def __init__( self, tool, input_source, trans=None ):
+        input_source = ensure_input_source( input_source )
+        super(DataCollectionToolParameter, self).__init__( tool, input_source, trans )
+        self._parse_formats( trans, tool, input_source )
+        collection_types = input_source.get("collection_type", None)
+        if collection_types:
+            collection_types = [t.strip() for t in collection_types.split(",")]
+        self._collection_types = collection_types
+        self.multiple = False  # Accessed on DataToolParameter a lot, may want in future
+        self.is_dynamic = True
+        self._parse_options( input_source )  # TODO: Review and test.
+
+    @property
+    def collection_types( self ):
+        return self._collection_types
+
+    def _history_query( self, trans ):
+        dataset_collection_type_descriptions = trans.app.dataset_collections_service.collection_type_descriptions
+        return history_query.HistoryQuery.from_parameter( self, dataset_collection_type_descriptions )
+
+    def match_collections( self, trans, history, dataset_matcher ):
+        dataset_collections = trans.app.dataset_collections_service.history_dataset_collections( history, self._history_query( trans ) )
+        dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher )
+
+        for dataset_collection_instance in dataset_collections:
+            if not dataset_collection_matcher.hdca_match( dataset_collection_instance ):
+                continue
+            yield dataset_collection_instance
+
+    def match_multirun_collections( self, trans, history, dataset_matcher ):
+        dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher )
+
+        for history_dataset_collection in history.active_dataset_collections:
+            if not self._history_query( trans ).can_map_over( history_dataset_collection ):
+                continue
+
+            datasets_match = dataset_collection_matcher.hdca_match( history_dataset_collection )
+            if datasets_match:
+                yield history_dataset_collection
+
+    def from_json( self, value, trans, other_values={} ):
+        rval = None
+        if trans.workflow_building_mode is workflow_building_modes.ENABLED:
+            return None
+        if not value and not self.optional:
+            raise ValueError( "Specify a dataset collection of the correct type." )
+        if value in [None, "None"]:
+            return None
+        if isinstance( value, dict ) and 'values' in value:
+            value = self.to_python( value, trans.app )
+        if isinstance( value, string_types ) and value.find( "," ) > 0:
+            value = [ int( value_part ) for value_part in value.split( "," ) ]
+        elif isinstance( value, trans.app.model.HistoryDatasetCollectionAssociation ):
+            rval = value
+        elif isinstance( value, trans.app.model.DatasetCollectionElement ):
+            # When mapping over nested collection - this paramter will recieve
+            # a DatasetCollectionElement instead of a
+            # HistoryDatasetCollectionAssociation.
+            rval = value
+        elif isinstance( value, dict ) and 'src' in value and 'id' in value:
+            if value['src'] == 'hdca':
+                rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( trans.security.decode_id(value['id']) )
+        elif isinstance( value, list ):
+            if len( value ) > 0:
+                value = value[0]
+                if isinstance( value, dict ) and 'src' in value and 'id' in value:
+                    if value['src'] == 'hdca':
+                        rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( trans.security.decode_id(value['id']) )
+        elif isinstance( value, string_types ):
+            if value.startswith( "dce:" ):
+                rval = trans.sa_session.query( trans.app.model.DatasetCollectionElement ).get( value[ len( "dce:"): ] )
+            elif value.startswith( "hdca:" ):
+                rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( value[ len( "hdca:"): ] )
+            else:
+                rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( value )
+        if rval and isinstance( rval, trans.app.model.HistoryDatasetCollectionAssociation ):
+            if rval.deleted:
+                raise ValueError( "The previously selected dataset collection has been deleted" )
+            # TODO: Handle error states, implement error states ...
+        return rval
+
+    def value_to_display_text( self, value, app ):
+        try:
+            if isinstance( value, galaxy.model.HistoryDatasetCollectionAssociation ):
+                display_text = "%s: %s" % ( value.hid, value.name )
+            else:
+                display_text = "Element %d:%s" % ( value.identifier_index, value.identifier_name )
+        except AttributeError:
+            display_text = "No dataset collection."
+        return display_text
+
+    def validate( self, value, trans=None ):
+        return True  # TODO
+
+    def to_dict( self, trans, other_values=None ):
+        # create dictionary and fill default parameters
+        other_values = other_values or {}
+        d = super( DataCollectionToolParameter, self ).to_dict( trans )
+        d['extensions'] = self.extensions
+        d['multiple'] = self.multiple
+        d['options'] = {'hda': [], 'hdca': []}
+
+        # return dictionary without options if context is unavailable
+        history = trans.history
+        if history is None or trans.workflow_building_mode is workflow_building_modes.ENABLED:
+            return d
+
+        # prepare dataset/collection matching
+        dataset_matcher = DatasetMatcher( trans, self, None, other_values )
+
+        # append directly matched collections
+        for hdca in self.match_collections( trans, history, dataset_matcher ):
+            d['options']['hdca'].append({
+                'id': trans.security.encode_id( hdca.id ),
+                'hid': hdca.hid,
+                'name': hdca.name,
+                'src': 'hdca'
+            })
+
+        # append matching subcollections
+        for hdca in self.match_multirun_collections( trans, history, dataset_matcher ):
+            subcollection_type = self._history_query( trans ).can_map_over( hdca ).collection_type
+            d['options']['hdca'].append({
+                'id': trans.security.encode_id( hdca.id ),
+                'hid': hdca.hid,
+                'name': hdca.name,
+                'src': 'hdca',
+                'map_over_type': subcollection_type
+            })
+
+        # sort both lists
+        d['options']['hdca'] = sorted(d['options']['hdca'], key=lambda k: k['hid'], reverse=True)
+
+        # return final dictionary
+        return d
+
+
+class HiddenDataToolParameter( HiddenToolParameter, DataToolParameter ):
+    """
+    Hidden parameter that behaves as a DataToolParameter. As with all hidden
+    parameters, this is a HACK.
+    """
+    def __init__( self, tool, elem ):
+        DataToolParameter.__init__( self, tool, elem )
+        self.value = "None"
+        self.type = "hidden_data"
+        self.hidden = True
+
+    def get_initial_value( self, trans, other_values ):
+        return None
+
+
+class LibraryDatasetToolParameter( ToolParameter ):
+    """
+    Parameter that lets users select a LDDA from a modal window, then use it within the wrapper.
+    """
+
+    def __init__( self, tool, input_source, context=None ):
+        input_source = ensure_input_source( input_source )
+        ToolParameter.__init__( self, tool, input_source )
+        self.multiple = input_source.get_bool( 'multiple', True )
+
+    def get_initial_value( self, trans, other_values ):
+        return None
+
+    def from_json( self, value, trans, other_values={} ):
+        return self.to_python( value, trans.app, other_values=other_values, validate=True )
+
+    def to_param_dict_string( self, value, other_values={} ):
+        if value is None:
+            return 'None'
+        elif self.multiple:
+            return [ dataset.get_file_name() for dataset in value ]
+        else:
+            return value[ 0 ].get_file_name()
+
+    # converts values to json representation:
+    #   { id: LibraryDatasetDatasetAssociation.id, name: LibraryDatasetDatasetAssociation.name, src: 'lda' }
+    def to_json( self, value, app, use_security ):
+        if not isinstance( value, list ):
+            value = [value]
+        lst = []
+        for item in value:
+            lda_id = lda_name = None
+            if isinstance(item, app.model.LibraryDatasetDatasetAssociation):
+                lda_id = app.security.encode_id( item.id ) if use_security else item.id
+                lda_name = item.name
+            elif isinstance(item, dict):
+                lda_id = item.get('id')
+                lda_name = item.get('name')
+            else:
+                lst = []
+                break
+            if lda_id is not None:
+                lst.append( {
+                    'id'   : lda_id,
+                    'name' : lda_name,
+                    'src'  : 'ldda'
+                } )
+        if len( lst ) == 0:
+            return None
+        else:
+            return lst
+
+    # converts values into python representation:
+    #   LibraryDatasetDatasetAssociation
+    # valid input values (incl. arrays of mixed sets) are:
+    #   1. LibraryDatasetDatasetAssociation
+    #   2. LibraryDatasetDatasetAssociation.id
+    #   3. { id: LibraryDatasetDatasetAssociation.id, ... }
+    def to_python( self, value, app, other_values={}, validate=False ):
+        if not isinstance( value, list ):
+            value = [value]
+        lst = []
+        for item in value:
+            if isinstance(item, app.model.LibraryDatasetDatasetAssociation):
+                lst.append(item)
+            else:
+                lda_id = None
+                if isinstance(item, dict):
+                    lda_id = item.get('id')
+                elif isinstance(item, string_types):
+                    lda_id = item
+                else:
+                    lst = []
+                    break
+                lda = app.model.context.query( app.model.LibraryDatasetDatasetAssociation ).get( lda_id if isinstance( lda_id, int ) else app.security.decode_id( lda_id ) )
+                if lda is not None:
+                    lst.append( lda )
+                elif validate:
+                    raise ValueError( "One of the selected library datasets is invalid or not available anymore." )
+        if len( lst ) == 0:
+            if not self.optional and validate:
+                raise ValueError( "Please select a valid library dataset." )
+            return None
+        else:
+            return lst
+
+    def to_dict( self, trans, other_values=None ):
+        d = super( LibraryDatasetToolParameter, self ).to_dict( trans )
+        d['multiple'] = self.multiple
+        return d
+
+
+parameter_types = dict(
+    text=TextToolParameter,
+    integer=IntegerToolParameter,
+    float=FloatToolParameter,
+    boolean=BooleanToolParameter,
+    genomebuild=GenomeBuildParameter,
+    select=SelectToolParameter,
+    color=ColorToolParameter,
+    data_column=ColumnListParameter,
+    hidden=HiddenToolParameter,
+    hidden_data=HiddenDataToolParameter,
+    baseurl=BaseURLToolParameter,
+    file=FileToolParameter,
+    ftpfile=FTPFileToolParameter,
+    data=DataToolParameter,
+    data_collection=DataCollectionToolParameter,
+    library_data=LibraryDatasetToolParameter,
+    drill_down=DrillDownSelectToolParameter
+)
+
+
+class RuntimeValue( object ):
+    """
+    Wrapper to note a value that is not yet set, but will be required at runtime.
+    """
+    pass
diff --git a/lib/galaxy/tools/parameters/dataset_matcher.py b/lib/galaxy/tools/parameters/dataset_matcher.py
new file mode 100644
index 0000000..9d3a017
--- /dev/null
+++ b/lib/galaxy/tools/parameters/dataset_matcher.py
@@ -0,0 +1,184 @@
+from logging import getLogger
+
+import galaxy.model
+
+log = getLogger( __name__ )
+
+ROLES_UNSET = object()
+INVALID_STATES = [ galaxy.model.Dataset.states.ERROR, galaxy.model.Dataset.states.DISCARDED ]
+
+
+class DatasetMatcher( object ):
+    """ Utility class to aid DataToolParameter and similar classes in reasoning
+    about what HDAs could match or are selected for a parameter and value.
+
+    Goal here is to both encapsulate and reuse logic related to filtering,
+    datatype matching, hiding errored dataset, finding implicit conversions,
+    and permission handling.
+    """
+
+    def __init__( self, trans, param, value, other_values ):
+        self.trans = trans
+        self.param = param
+        self.tool = param.tool
+        self.value = value
+        self.current_user_roles = ROLES_UNSET
+        filter_value = None
+        if param.options and other_values:
+            try:
+                filter_value = param.options.get_options( trans, other_values )[0][0]
+            except IndexError:
+                pass  # no valid options
+        self.filter_value = filter_value
+
+    def hda_accessible( self, hda, check_security=True ):
+        """ Does HDA correspond to dataset that is an a valid state and is
+        accessible to user.
+        """
+        dataset = hda.dataset
+        has_tool = self.tool
+        if has_tool:
+            valid_input_states = self.tool.valid_input_states
+        else:
+            valid_input_states = galaxy.model.Dataset.valid_input_states
+        state_valid = dataset.state in valid_input_states
+        return state_valid and ( not check_security or self.__can_access_dataset( dataset ) )
+
+    def valid_hda_match( self, hda, check_implicit_conversions=True, check_security=False ):
+        """ Return False of this parameter can not be matched to the supplied
+        HDA, otherwise return a description of the match (either a
+        HdaDirectMatch describing a direct match or a HdaImplicitMatch
+        describing an implicit conversion.)
+        """
+        rval = False
+        formats = self.param.formats
+        if hda.datatype.matches_any( formats ):
+            rval = HdaDirectMatch( hda )
+        else:
+            if not check_implicit_conversions:
+                return False
+            target_ext, converted_dataset = hda.find_conversion_destination( formats )
+            if target_ext:
+                original_hda = hda
+                if converted_dataset:
+                    hda = converted_dataset
+                if check_security and not self.__can_access_dataset( hda.dataset ):
+                    return False
+                rval = HdaImplicitMatch( hda, target_ext, original_hda )
+            else:
+                return False
+        if self.filter( hda ):
+            return False
+        return rval
+
+    def hda_match( self, hda, check_implicit_conversions=True, check_security=True, ensure_visible=True ):
+        """ If HDA is accessible, return information about whether it could
+        match this parameter and if so how. See valid_hda_match for more
+        information.
+        """
+        accessible = self.hda_accessible( hda, check_security=check_security )
+        if accessible and ( not ensure_visible or hda.visible or ( self.selected( hda ) and not hda.implicitly_converted_parent_datasets ) ):
+            # If we are sending data to an external application, then we need to make sure there are no roles
+            # associated with the dataset that restrict its access from "public".
+            require_public = self.tool and self.tool.tool_type == 'data_destination'
+            if require_public and not self.trans.app.security_agent.dataset_is_public( hda.dataset ):
+                return False
+            if self.filter( hda ):
+                return False
+            return self.valid_hda_match( hda, check_implicit_conversions=check_implicit_conversions )
+
+    def selected( self, hda ):
+        """ Given value for DataToolParameter, is this HDA "selected".
+        """
+        value = self.value
+        if value and str( value[ 0 ] ).isdigit():
+            return hda.id in map(int, value)
+        else:
+            return value and hda in value
+
+    def filter( self, hda ):
+        """ Filter out this value based on other values for job (if
+        applicable).
+        """
+        param = self.param
+        return param.options and param.get_options_filter_attribute( hda ) != self.filter_value
+
+    def __can_access_dataset( self, dataset ):
+        # Lazily cache current_user_roles.
+        if self.current_user_roles is ROLES_UNSET:
+            self.current_user_roles = self.trans.get_current_user_roles()
+        return self.trans.app.security_agent.can_access_dataset( self.current_user_roles, dataset )
+
+
+class HdaDirectMatch( object ):
+    """ Supplied HDA was a valid option directly (did not need to find implicit
+    conversion).
+    """
+
+    def __init__( self, hda ):
+        self.hda = hda
+
+    @property
+    def implicit_conversion( self ):
+        return False
+
+
+class HdaImplicitMatch( object ):
+    """ Supplied HDA was a valid option directly (did not need to find implicit
+    conversion).
+    """
+
+    def __init__( self, hda, target_ext, original_hda ):
+        self.original_hda = original_hda
+        self.hda = hda
+        self.target_ext = target_ext
+
+    @property
+    def implicit_conversion( self ):
+        return True
+
+
+class DatasetCollectionMatcher( object ):
+
+    def __init__( self, dataset_matcher ):
+        self.dataset_matcher = dataset_matcher
+
+    def __valid_element( self, element ):
+        # Simplify things for now and assume these are hdas and not implicit
+        # converts. One could imagine handling both of those cases down the
+        # road.
+        if element.ldda:
+            return False
+
+        child_collection = element.child_collection
+        if child_collection:
+            return self.dataset_collection_match( child_collection )
+
+        hda = element.hda
+        if not hda:
+            return False
+        hda_match = self.dataset_matcher.hda_match( hda, ensure_visible=False )
+        return hda_match and not hda_match.implicit_conversion
+
+    def hdca_match( self, history_dataset_collection_association, reduction=False ):
+        dataset_collection = history_dataset_collection_association.collection
+        if reduction and dataset_collection.collection_type.find( ":" ) > 0:
+            return False
+        else:
+            return self.dataset_collection_match( dataset_collection )
+
+    def dataset_collection_match( self, dataset_collection ):
+        # If dataset collection not yet populated, cannot determine if it
+        # would be a valid match for this parameter.
+        if not dataset_collection.populated:
+            return False
+
+        valid = True
+        for element in dataset_collection.elements:
+            if not self.__valid_element( element ):
+                valid = False
+                break
+        return valid
+
+
+__all__ = ( 'DatasetMatcher', 'DatasetCollectionMatcher' )
diff --git a/lib/galaxy/tools/parameters/dynamic_options.py b/lib/galaxy/tools/parameters/dynamic_options.py
new file mode 100644
index 0000000..2483017
--- /dev/null
+++ b/lib/galaxy/tools/parameters/dynamic_options.py
@@ -0,0 +1,654 @@
+"""
+Support for generating the options for a SelectToolParameter dynamically (based
+on the values of other parameters or other aspects of the current state)
+"""
+
+import logging
+import os
+import validation
+from galaxy.util import string_as_bool
+from galaxy.model import User, HistoryDatasetAssociation, HistoryDatasetCollectionAssociation
+import galaxy.tools
+
+log = logging.getLogger(__name__)
+
+
+class Filter( object ):
+    """
+    A filter takes the current options list and modifies it.
+    """
+    @classmethod
+    def from_element( cls, d_option, elem ):
+        """Loads the proper filter by the type attribute of elem"""
+        type = elem.get( 'type', None )
+        assert type is not None, "Required 'type' attribute missing from filter"
+        return filter_types[type.strip()]( d_option, elem )
+
+    def __init__( self, d_option, elem ):
+        self.dynamic_option = d_option
+        self.elem = elem
+
+    def get_dependency_name( self ):
+        """Returns the name of any depedencies, otherwise None"""
+        return None
+
+    def filter_options( self, options, trans, other_values ):
+        """Returns a list of options after the filter is applied"""
+        raise TypeError( "Abstract Method" )
+
+
+class StaticValueFilter( Filter ):
+    """
+    Filters a list of options on a column by a static value.
+
+    Type: static_value
+
+    Required Attributes:
+        value: static value to compare to
+        column: column in options to compare with
+    Optional Attributes:
+        keep: Keep columns matching value (True)
+              Discard columns matching value (False)
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.value = elem.get( "value", None )
+        assert self.value is not None, "Required 'value' attribute missing from filter"
+        column = elem.get( "column", None )
+        assert column is not None, "Required 'column' attribute missing from filter, when loading from file"
+        self.column = d_option.column_spec_to_index( column )
+        self.keep = string_as_bool( elem.get( "keep", 'True' ) )
+
+    def filter_options( self, options, trans, other_values ):
+        rval = []
+        filter_value = self.value
+        try:
+            filter_value = User.expand_user_properties( trans.user, filter_value)
+        except:
+            pass
+        for fields in options:
+            if ( self.keep and fields[self.column] == filter_value ) or ( not self.keep and fields[self.column] != filter_value ):
+                rval.append( fields )
+        return rval
+
+
+class DataMetaFilter( Filter ):
+    """
+    Filters a list of options on a column by a dataset metadata value.
+
+    Type: data_meta
+
+    When no 'from' source has been specified in the <options> tag, this will populate the options list with (meta_value, meta_value, False).
+    Otherwise, options which do not match the metadata value in the column are discarded.
+
+    Required Attributes:
+
+        - ref: Name of input dataset
+        - key: Metadata key to use for comparison
+        - column: column in options to compare with (not required when not associated with input options)
+
+    Optional Attributes:
+
+        - multiple: Option values are multiple, split column by separator (True)
+        - separator: When multiple split by this (,)
+
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.ref_name = elem.get( "ref", None )
+        assert self.ref_name is not None, "Required 'ref' attribute missing from filter"
+        d_option.has_dataset_dependencies = True
+        self.key = elem.get( "key", None )
+        assert self.key is not None, "Required 'key' attribute missing from filter"
+        self.column = elem.get( "column", None )
+        if self.column is None:
+            assert self.dynamic_option.file_fields is None and self.dynamic_option.dataset_ref_name is None, "Required 'column' attribute missing from filter, when loading from file"
+        else:
+            self.column = d_option.column_spec_to_index( self.column )
+        self.multiple = string_as_bool( elem.get( "multiple", "False" ) )
+        self.separator = elem.get( "separator", "," )
+
+    def get_dependency_name( self ):
+        return self.ref_name
+
+    def filter_options( self, options, trans, other_values ):
+        def compare_meta_value( file_value, dataset_value ):
+            if isinstance( dataset_value, list ):
+                if self.multiple:
+                    file_value = file_value.split( self.separator )
+                    for value in dataset_value:
+                        if value not in file_value:
+                            return False
+                    return True
+                return file_value in dataset_value
+            if self.multiple:
+                return dataset_value in file_value.split( self.separator )
+            return file_value == dataset_value
+        ref = other_values.get( self.ref_name, None )
+        if isinstance( ref, HistoryDatasetCollectionAssociation ):
+            ref = ref.to_hda_representative( self.multiple )
+        is_data = isinstance( ref, galaxy.tools.wrappers.DatasetFilenameWrapper )
+        is_data_list = isinstance( ref, galaxy.tools.wrappers.DatasetListWrapper ) or isinstance( ref, list )
+        is_data_or_data_list = is_data or is_data_list
+        if not isinstance( ref, HistoryDatasetAssociation ) and not is_data_or_data_list:
+            return []  # not a valid dataset
+
+        if is_data_list:
+            meta_value = None
+            for single_ref in ref:
+                this_meta_value = single_ref.metadata.get( self.key, None )
+                if this_meta_value == meta_value:
+                    continue
+                elif meta_value is None:
+                    meta_value = this_meta_value
+                else:
+                    # Different values with mismatching metadata, return []
+                    return []
+        else:
+            meta_value = ref.metadata.get( self.key, None )
+
+        if meta_value is None:
+            return [ ( disp_name, optval, selected ) for disp_name, optval, selected in options ]
+
+        if self.column is not None:
+            rval = []
+            for fields in options:
+                if compare_meta_value( fields[self.column], meta_value ):
+                    rval.append( fields )
+            return rval
+        else:
+            if not self.dynamic_option.columns:
+                self.dynamic_option.columns = {
+                    "name" : 0,
+                    "value" : 1,
+                    "selected" : 2
+                }
+                self.dynamic_option.largest_index = 2
+            if not isinstance( meta_value, list ):
+                meta_value = [meta_value]
+            for value in meta_value:
+                options.append( ( value, value, False ) )
+            return options
+
+
+class ParamValueFilter( Filter ):
+    """
+    Filters a list of options on a column by the value of another input.
+
+    Type: param_value
+
+    Required Attributes:
+
+        - ref: Name of input value
+        - column: column in options to compare with
+
+    Optional Attributes:
+
+        - keep: Keep columns matching value (True)
+                Discard columns matching value (False)
+        - ref_attribute: Period (.) separated attribute chain of input (ref) to use as value for filter
+
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.ref_name = elem.get( "ref", None )
+        assert self.ref_name is not None, "Required 'ref' attribute missing from filter"
+        column = elem.get( "column", None )
+        assert column is not None, "Required 'column' attribute missing from filter"
+        self.column = d_option.column_spec_to_index( column )
+        self.keep = string_as_bool( elem.get( "keep", 'True' ) )
+        self.ref_attribute = elem.get( "ref_attribute", None )
+        if self.ref_attribute:
+            self.ref_attribute = self.ref_attribute.split( '.' )
+        else:
+            self.ref_attribute = []
+
+    def get_dependency_name( self ):
+        return self.ref_name
+
+    def filter_options( self, options, trans, other_values ):
+        if trans is not None and trans.workflow_building_mode:
+            return []
+        ref = other_values.get( self.ref_name, None )
+        for ref_attribute in self.ref_attribute:
+            if not hasattr( ref, ref_attribute ):
+                return []  # ref does not have attribute, so we cannot filter, return empty list
+            ref = getattr( ref, ref_attribute )
+        ref = str( ref )
+        rval = []
+        for fields in options:
+            if ( self.keep and fields[self.column] == ref ) or ( not self.keep and fields[self.column] != ref ):
+                rval.append( fields )
+        return rval
+
+
+class UniqueValueFilter( Filter ):
+    """
+    Filters a list of options to be unique by a column value.
+
+    Type: unique_value
+
+    Required Attributes:
+        column: column in options to compare with
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        column = elem.get( "column", None )
+        assert column is not None, "Required 'column' attribute missing from filter"
+        self.column = d_option.column_spec_to_index( column )
+
+    def get_dependency_name( self ):
+        return self.dynamic_option.dataset_ref_name
+
+    def filter_options( self, options, trans, other_values ):
+        rval = []
+        skip_list = []
+        for fields in options:
+            if fields[self.column] not in skip_list:
+                rval.append( fields )
+                skip_list.append( fields[self.column] )
+        return rval
+
+
+class MultipleSplitterFilter( Filter ):
+    """
+    Turns a single line of options into multiple lines, by splitting a column and creating a line for each item.
+
+    Type: multiple_splitter
+
+    Required Attributes:
+        column: column in options to compare with
+    Optional Attributes:
+        separator: Split column by this (,)
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.separator = elem.get( "separator", "," )
+        columns = elem.get( "column", None )
+        assert columns is not None, "Required 'columns' attribute missing from filter"
+        self.columns = [ d_option.column_spec_to_index( column ) for column in columns.split( "," ) ]
+
+    def filter_options( self, options, trans, other_values ):
+        rval = []
+        for fields in options:
+            for column in self.columns:
+                for field in fields[column].split( self.separator ):
+                    rval.append( fields[0:column] + [field] + fields[column + 1:] )
+        return rval
+
+
+class AttributeValueSplitterFilter( Filter ):
+    """
+    Filters a list of attribute-value pairs to be unique attribute names.
+
+    Type: attribute_value_splitter
+
+    Required Attributes:
+        column: column in options to compare with
+    Optional Attributes:
+        pair_separator: Split column by this (,)
+        name_val_separator: Split name-value pair by this ( whitespace )
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.pair_separator = elem.get( "pair_separator", "," )
+        self.name_val_separator = elem.get( "name_val_separator", None )
+        self.columns = elem.get( "column", None )
+        assert self.columns is not None, "Required 'columns' attribute missing from filter"
+        self.columns = [ int( column ) for column in self.columns.split( "," ) ]
+
+    def filter_options( self, options, trans, other_values ):
+        attr_names = []
+        rval = []
+        for fields in options:
+            for column in self.columns:
+                for pair in fields[column].split( self.pair_separator ):
+                    ary = pair.split( self.name_val_separator )
+                    if len( ary ) == 2:
+                        name = ary[0]
+                        if name not in attr_names:
+                            rval.append( fields[0:column] + [name] + fields[column:] )
+                            attr_names.append( name )
+        return rval
+
+
+class AdditionalValueFilter( Filter ):
+    """
+    Adds a single static value to an options list.
+
+    Type: add_value
+
+    Required Attributes:
+        value: value to appear in select list
+    Optional Attributes:
+        name: Display name to appear in select list (value)
+        index: Index of option list to add value (APPEND)
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.value = elem.get( "value", None )
+        assert self.value is not None, "Required 'value' attribute missing from filter"
+        self.name = elem.get( "name", None )
+        if self.name is None:
+            self.name = self.value
+        self.index = elem.get( "index", None )
+        if self.index is not None:
+            self.index = int( self.index )
+
+    def filter_options( self, options, trans, other_values ):
+        rval = list( options )
+        add_value = []
+        for _ in range( self.dynamic_option.largest_index + 1 ):
+            add_value.append( "" )
+        value_col = self.dynamic_option.columns.get( 'value', 0 )
+        name_col = self.dynamic_option.columns.get( 'name', value_col )
+        # Set name first, then value, in case they are the same column
+        add_value[ name_col ] = self.name
+        add_value[ value_col ] = self.value
+        if self.index is not None:
+            rval.insert( self.index, add_value )
+        else:
+            rval.append( add_value )
+        return rval
+
+
+class RemoveValueFilter( Filter ):
+    """
+    Removes a value from an options list.
+
+    Type: remove_value
+
+    Required Attributes::
+
+        value: value to remove from select list
+            or
+        ref: param to refer to
+            or
+        meta_ref: dataset to refer to
+        key: metadata key to compare to
+
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        self.value = elem.get( "value", None )
+        self.ref_name = elem.get( "ref", None )
+        self.meta_ref = elem.get( "meta_ref", None )
+        self.metadata_key = elem.get( "key", None )
+        assert self.value is not None or ( ( self.ref_name is not None or self.meta_ref is not None )and self.metadata_key is not None ), ValueError( "Required 'value' or 'ref' and 'key' attributes missing from filter" )
+        self.multiple = string_as_bool( elem.get( "multiple", "False" ) )
+        self.separator = elem.get( "separator", "," )
+
+    def filter_options( self, options, trans, other_values ):
+        if trans is not None and trans.workflow_building_mode:
+            return options
+
+        def compare_value( option_value, filter_value ):
+            if isinstance( filter_value, list ):
+                if self.multiple:
+                    option_value = option_value.split( self.separator )
+                    for value in filter_value:
+                        if value not in filter_value:
+                            return False
+                    return True
+                return option_value in filter_value
+            if self.multiple:
+                return filter_value in option_value.split( self.separator )
+            return option_value == filter_value
+        value = self.value
+        if value is None:
+            if self.ref_name is not None:
+                value = other_values.get( self.ref_name )
+            else:
+                data_ref = other_values.get( self.meta_ref )
+                if isinstance( data_ref, HistoryDatasetCollectionAssociation ):
+                    data_ref = data_ref.to_hda_representative()
+                if not isinstance( data_ref, HistoryDatasetAssociation ) and not isinstance( data_ref, galaxy.tools.wrappers.DatasetFilenameWrapper ):
+                    return options  # cannot modify options
+                value = data_ref.metadata.get( self.metadata_key, None )
+        return [ ( disp_name, optval, selected ) for disp_name, optval, selected in options if not compare_value( optval, value ) ]
+
+
+class SortByColumnFilter( Filter ):
+    """
+    Sorts an options list by a column
+
+    Type: sort_by
+
+    Required Attributes:
+        column: column to sort by
+    """
+    def __init__( self, d_option, elem ):
+        Filter.__init__( self, d_option, elem )
+        column = elem.get( "column", None )
+        assert column is not None, "Required 'column' attribute missing from filter"
+        self.column = d_option.column_spec_to_index( column )
+
+    def filter_options( self, options, trans, other_values ):
+        rval = []
+        for fields in options:
+            for j in range( 0, len( rval ) ):
+                if fields[self.column] < rval[j][self.column]:
+                    rval.insert( j, fields )
+                    break
+            else:
+                rval.append( fields )
+        return rval
+
+
+filter_types = dict( data_meta=DataMetaFilter,
+                     param_value=ParamValueFilter,
+                     static_value=StaticValueFilter,
+                     unique_value=UniqueValueFilter,
+                     multiple_splitter=MultipleSplitterFilter,
+                     attribute_value_splitter=AttributeValueSplitterFilter,
+                     add_value=AdditionalValueFilter,
+                     remove_value=RemoveValueFilter,
+                     sort_by=SortByColumnFilter )
+
+
+class DynamicOptions( object ):
+    """Handles dynamically generated SelectToolParameter options"""
+    def __init__( self, elem, tool_param  ):
+        def load_from_parameter( from_parameter, transform_lines=None ):
+            obj = self.tool_param
+            for field in from_parameter.split( '.' ):
+                obj = getattr( obj, field )
+            if transform_lines:
+                obj = eval( transform_lines )
+            return self.parse_file_fields( obj )
+        self.tool_param = tool_param
+        self.columns = {}
+        self.filters = []
+        self.file_fields = None
+        self.largest_index = 0
+        self.dataset_ref_name = None
+        # True if the options generation depends on one or more other parameters
+        # that are dataset inputs
+        self.has_dataset_dependencies = False
+        self.validators = []
+        self.converter_safe = True
+
+        # Parse the <options> tag
+        self.separator = elem.get( 'separator', '\t' )
+        self.line_startswith = elem.get( 'startswith', None )
+        data_file = elem.get( 'from_file', None )
+        self.index_file = None
+        self.missing_index_file = None
+        dataset_file = elem.get( 'from_dataset', None )
+        from_parameter = elem.get( 'from_parameter', None )
+        tool_data_table_name = elem.get( 'from_data_table', None )
+        # Options are defined from a data table loaded by the app
+        self.tool_data_table = None
+        self.missing_tool_data_table_name = None
+        if tool_data_table_name:
+            app = tool_param.tool.app
+            if tool_data_table_name in app.tool_data_tables:
+                self.tool_data_table = app.tool_data_tables[ tool_data_table_name ]
+                # Column definitions are optional, but if provided override those from the table
+                if elem.find( "column" ) is not None:
+                    self.parse_column_definitions( elem )
+                else:
+                    self.columns = self.tool_data_table.columns
+                # Set self.missing_index_file if the index file to
+                # which the tool_data_table refers does not exist.
+                if self.tool_data_table.missing_index_file:
+                    self.missing_index_file = self.tool_data_table.missing_index_file
+            else:
+                self.missing_tool_data_table_name = tool_data_table_name
+                log.warning( "Data table named '%s' is required by tool but not configured" % tool_data_table_name )
+        # Options are defined by parsing tabular text data from a data file
+        # on disk, a dataset, or the value of another parameter
+        elif data_file is not None or dataset_file is not None or from_parameter is not None:
+            self.parse_column_definitions( elem )
+            if data_file is not None:
+                data_file = data_file.strip()
+                if not os.path.isabs( data_file ):
+                    full_path = os.path.join( self.tool_param.tool.app.config.tool_data_path, data_file )
+                    if os.path.exists( full_path ):
+                        self.index_file = data_file
+                        self.file_fields = self.parse_file_fields( open( full_path ) )
+                    else:
+                        self.missing_index_file = data_file
+            elif dataset_file is not None:
+                self.dataset_ref_name = dataset_file
+                self.has_dataset_dependencies = True
+                self.converter_safe = False
+            elif from_parameter is not None:
+                transform_lines = elem.get( 'transform_lines', None )
+                self.file_fields = list( load_from_parameter( from_parameter, transform_lines ) )
+
+        # Load filters
+        for filter_elem in elem.findall( 'filter' ):
+            self.filters.append( Filter.from_element( self, filter_elem ) )
+
+        # Load Validators
+        for validator in elem.findall( 'validator' ):
+            self.validators.append( validation.Validator.from_element( self.tool_param, validator ) )
+
+        if self.dataset_ref_name:
+            tool_param.data_ref = self.dataset_ref_name
+
+    def parse_column_definitions( self, elem ):
+        for column_elem in elem.findall( 'column' ):
+            name = column_elem.get( 'name', None )
+            assert name is not None, "Required 'name' attribute missing from column def"
+            index = column_elem.get( 'index', None )
+            assert index is not None, "Required 'index' attribute missing from column def"
+            index = int( index )
+            self.columns[name] = index
+            if index > self.largest_index:
+                self.largest_index = index
+        assert 'value' in self.columns, "Required 'value' column missing from column def"
+        if 'name' not in self.columns:
+            self.columns['name'] = self.columns['value']
+
+    def parse_file_fields( self, reader ):
+        rval = []
+        field_count = None
+        for line in reader:
+            if line.startswith( '#' ) or ( self.line_startswith and not line.startswith( self.line_startswith ) ):
+                continue
+            line = line.rstrip( "\n\r" )
+            if line:
+                fields = line.split( self.separator )
+                if self.largest_index < len( fields ):
+                    if not field_count:
+                        field_count = len( fields )
+                    elif field_count != len( fields ):
+                        try:
+                            name = reader.name
+                        except AttributeError:
+                            name = "a configuration file"
+                        # Perhaps this should be an error, but even a warning is useful.
+                        log.warning( "Inconsistent number of fields (%i vs %i) in %s using separator %r, check line: %r" %
+                                  ( field_count, len( fields ), name, self.separator, line ) )
+                    rval.append( fields )
+        return rval
+
+    def get_dependency_names( self ):
+        """
+        Return the names of parameters these options depend on -- both data
+        and other param types.
+        """
+        rval = []
+        if self.dataset_ref_name:
+            rval.append( self.dataset_ref_name )
+        for filter in self.filters:
+            depend = filter.get_dependency_name()
+            if depend:
+                rval.append( depend )
+        return rval
+
+    def get_fields( self, trans, other_values ):
+        if self.dataset_ref_name:
+            dataset = other_values.get( self.dataset_ref_name, None )
+            if not dataset or not hasattr( dataset, 'file_name' ):
+                return []  # no valid dataset in history
+            # Ensure parsing dynamic options does not consume more than a megabyte worth memory.
+            path = dataset.file_name
+            if os.path.getsize( path ) < 1048576:
+                options = self.parse_file_fields( open( path ) )
+            else:
+                # Pass just the first megabyte to parse_file_fields.
+                import StringIO
+                log.warning( "Attempting to load options from large file, reading just first megabyte" )
+                contents = open( path, 'r' ).read( 1048576 )
+                options = self.parse_file_fields( StringIO.StringIO( contents ) )
+        elif self.tool_data_table:
+            options = self.tool_data_table.get_fields()
+        else:
+            options = list( self.file_fields )
+        for filter in self.filters:
+            options = filter.filter_options( options, trans, other_values )
+        return options
+
+    def get_fields_by_value( self, value, trans, other_values ):
+        """
+        Return a list of fields with column 'value' matching provided value.
+        """
+        rval = []
+        val_index = self.columns[ 'value' ]
+        for fields in self.get_fields( trans, other_values ):
+            if fields[ val_index ] == value:
+                rval.append( fields )
+        return rval
+
+    def get_field_by_name_for_value( self, field_name, value, trans, other_values ):
+        """
+        Get contents of field by name for specified value.
+        """
+        rval = []
+        if isinstance( field_name, int ):
+            field_index = field_name
+        else:
+            assert field_name in self.columns, "Requested '%s' column missing from column def" % field_name
+            field_index = self.columns[ field_name ]
+        if not isinstance( value, list ):
+            value = [value]
+        for val in value:
+            for fields in self.get_fields_by_value( val, trans, other_values ):
+                rval.append( fields[ field_index ] )
+        return rval
+
+    def get_options( self, trans, other_values ):
+        rval = []
+        if self.file_fields is not None or self.tool_data_table is not None or self.dataset_ref_name is not None:
+            options = self.get_fields( trans, other_values )
+            for fields in options:
+                rval.append( ( fields[self.columns['name']], fields[self.columns['value']], False ) )
+        else:
+            for filter in self.filters:
+                rval = filter.filter_options( rval, trans, other_values )
+        return rval
+
+    def column_spec_to_index( self, column_spec ):
+        """
+        Convert a column specification (as read from the config file), to an
+        index. A column specification can just be a number, a column name, or
+        a column alias.
+        """
+        # Name?
+        if column_spec in self.columns:
+            return self.columns[column_spec]
+        # Int?
+        return int( column_spec )
diff --git a/lib/galaxy/tools/parameters/grouping.py b/lib/galaxy/tools/parameters/grouping.py
new file mode 100644
index 0000000..4a5fc1b
--- /dev/null
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -0,0 +1,613 @@
+"""
+Constructs for grouping tool parameters
+"""
+
+import logging
+log = logging.getLogger( __name__ )
+
+import os
+import StringIO
+import unicodedata
+from six import text_type
+from galaxy.datatypes import sniff
+from galaxy.util import inflector
+from galaxy.util import relpath
+from galaxy.util import sanitize_for_filename
+from galaxy.util.bunch import Bunch
+from galaxy.util.expressions import ExpressionContext
+from galaxy.util.dictifiable import Dictifiable
+
+
+class Group( object, Dictifiable ):
+
+    dict_collection_visible_keys = ( 'name', 'type' )
+
+    def __init__( self ):
+        self.name = None
+
+    @property
+    def visible( self ):
+        return True
+
+    def value_to_basic( self, value, app ):
+        """
+        Convert value to a (possibly nested) representation using only basic
+        types (dict, list, tuple, string_types, int, long, float, bool, None)
+        """
+        return value
+
+    def value_from_basic( self, value, app, ignore_errors=False ):
+        """
+        Convert a basic representation as produced by `value_to_basic` back
+        into the preferred value form.
+        """
+        return value
+
+    def get_initial_value( self, trans, context ):
+        """
+        Return the initial state/value for this group
+        """
+        raise TypeError( "Not implemented" )
+
+    def to_dict( self, trans ):
+        group_dict = super( Group, self ).to_dict()
+        return group_dict
+
+
+class Repeat( Group ):
+
+    dict_collection_visible_keys = ( 'name', 'type', 'title', 'help', 'default', 'min', 'max' )
+    type = "repeat"
+
+    def __init__( self ):
+        Group.__init__( self )
+        self.title = None
+        self.inputs = None
+        self.help = None
+        self.default = 0
+        self.min = None
+        self.max = None
+
+    @property
+    def title_plural( self ):
+        return inflector.pluralize( self.title )
+
+    def label( self ):
+        return "Repeat (%s)" % self.title
+
+    def value_to_basic( self, value, app ):
+        rval = []
+        for d in value:
+            rval_dict = {}
+            # Propogate __index__
+            if '__index__' in d:
+                rval_dict['__index__'] = d['__index__']
+            for input in self.inputs.itervalues():
+                rval_dict[ input.name ] = input.value_to_basic( d[input.name], app )
+            rval.append( rval_dict )
+        return rval
+
+    def value_from_basic( self, value, app, ignore_errors=False ):
+        rval = []
+        try:
+            for i, d in enumerate( value ):
+                rval_dict = {}
+                # If the special __index__ key is not set, create it (for backward
+                # compatibility)
+                rval_dict['__index__'] = d.get( '__index__', i )
+                # Restore child inputs
+                for input in self.inputs.itervalues():
+                    if ignore_errors and input.name not in d:
+                        # If we do not have a value, and are ignoring errors, we simply
+                        # do nothing. There will be no value for the parameter in the
+                        # conditional's values dictionary.
+                        pass
+                    else:
+                        rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+                rval.append( rval_dict )
+        except Exception as e:
+            if not ignore_errors:
+                raise e
+        return rval
+
+    def get_initial_value( self, trans, context ):
+        rval = []
+        for i in range( self.default ):
+            rval_dict = { '__index__': i}
+            for input in self.inputs.itervalues():
+                rval_dict[ input.name ] = input.get_initial_value( trans, context )
+            rval.append( rval_dict )
+        return rval
+
+    def to_dict( self, trans ):
+        repeat_dict = super( Repeat, self ).to_dict( trans )
+
+        def input_to_dict( input ):
+            return input.to_dict( trans )
+
+        repeat_dict[ "inputs" ] = map( input_to_dict, self.inputs.values() )
+        return repeat_dict
+
+
+class Section( Group ):
+
+    dict_collection_visible_keys = ( 'name', 'type', 'title', 'help', 'expanded')
+    type = "section"
+
+    def __init__( self ):
+        Group.__init__( self )
+        self.title = None
+        self.inputs = None
+        self.help = None
+        self.expanded = False
+
+    @property
+    def title_plural( self ):
+        return inflector.pluralize( self.title )
+
+    def label( self ):
+        return "Section (%s)" % self.title
+
+    def value_to_basic( self, value, app ):
+        rval = {}
+        for input in self.inputs.itervalues():
+            rval[ input.name ] = input.value_to_basic( value[input.name], app )
+        return rval
+
+    def value_from_basic( self, value, app, ignore_errors=False ):
+        rval = {}
+        try:
+            for input in self.inputs.itervalues():
+                if not ignore_errors or input.name in value:
+                    rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+        except Exception as e:
+            if not ignore_errors:
+                raise e
+        return rval
+
+    def get_initial_value( self, trans, context ):
+        rval = {}
+        child_context = ExpressionContext( rval, context )
+        for child_input in self.inputs.itervalues():
+            rval[ child_input.name ] = child_input.get_initial_value( trans, child_context )
+        return rval
+
+    def to_dict( self, trans ):
+        section_dict = super( Section, self ).to_dict( trans )
+
+        def input_to_dict( input ):
+            return input.to_dict( trans )
+
+        section_dict[ "inputs" ] = map( input_to_dict, self.inputs.values() )
+        return section_dict
+
+
+class UploadDataset( Group ):
+    type = "upload_dataset"
+
+    def __init__( self ):
+        Group.__init__( self )
+        self.title = None
+        self.inputs = None
+        self.file_type_name = 'file_type'
+        self.default_file_type = 'txt'
+        self.file_type_to_ext = { 'auto': self.default_file_type }
+        self.metadata_ref = 'files_metadata'
+
+    def get_composite_dataset_name( self, context ):
+        # FIXME: HACK
+        # Special case of using 'base_name' metadata for use as Dataset name needs to be done in a General Fashion, as defined within a particular Datatype.
+
+        # We get two different types of contexts here, one straight from submitted parameters, the other after being parsed into tool inputs
+        dataset_name = context.get('files_metadata|base_name', None )
+        if dataset_name is None:
+            dataset_name = context.get('files_metadata', {} ).get( 'base_name', None )
+        if dataset_name is None:
+            dataset_name = 'Uploaded Composite Dataset (%s)' % self.get_file_type( context )
+        return dataset_name
+
+    def get_file_base_name( self, context ):
+        fd = context.get('files_metadata|base_name', 'Galaxy_Composite_file')
+        return fd
+
+    def get_file_type( self, context ):
+        return context.get( self.file_type_name, self.default_file_type )
+
+    def get_datatype_ext( self, trans, context ):
+        ext = self.get_file_type( context )
+        if ext in self.file_type_to_ext:
+            ext = self.file_type_to_ext[ext]  # when using autodetect, we will use composite info from 'text', i.e. only the main file
+        return ext
+
+    def get_datatype( self, trans, context ):
+        ext = self.get_datatype_ext( trans, context )
+        return trans.app.datatypes_registry.get_datatype_by_extension( ext )
+
+    @property
+    def title_plural( self ):
+        return inflector.pluralize(self.title)
+
+    def group_title( self, context ):
+        return "%s (%s)" % ( self.title, context.get( self.file_type_name, self.default_file_type ) )
+
+    def title_by_index( self, trans, index, context ):
+        d_type = self.get_datatype( trans, context )
+        for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ):
+            if i == index:
+                rval = composite_name
+                if composite_file.description:
+                    rval = "%s (%s)" % ( rval, composite_file.description )
+                if composite_file.optional:
+                    rval = "%s [optional]" % rval
+                return rval
+        return None
+
+    def value_to_basic( self, value, app ):
+        rval = []
+        for d in value:
+            rval_dict = {}
+            # Propogate __index__
+            if '__index__' in d:
+                rval_dict['__index__'] = d['__index__']
+            for input in self.inputs.itervalues():
+                rval_dict[ input.name ] = input.value_to_basic( d[input.name], app )
+            rval.append( rval_dict )
+        return rval
+
+    def value_from_basic( self, value, app, ignore_errors=False ):
+        rval = []
+        for i, d in enumerate( value ):
+            rval_dict = {}
+            # If the special __index__ key is not set, create it (for backward
+            # compatibility)
+            rval_dict['__index__'] = d.get( '__index__', i )
+            # Restore child inputs
+            for input in self.inputs.itervalues():
+                if ignore_errors and input.name not in d:  # this wasn't tested
+                    rval_dict[ input.name ] = input.get_initial_value( None, d )
+                else:
+                    rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+            rval.append( rval_dict )
+        return rval
+
+    def get_initial_value( self, trans, context ):
+        d_type = self.get_datatype( trans, context )
+        rval = []
+        for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ):
+            rval_dict = {}
+            rval_dict['__index__'] = i  # create __index__
+            for input in self.inputs.itervalues():
+                rval_dict[ input.name ] = input.get_initial_value( trans, context )
+            rval.append( rval_dict )
+        return rval
+
+    def get_uploaded_datasets( self, trans, context, override_name=None, override_info=None ):
+        def get_data_file_filename( data_file, override_name=None, override_info=None, purge=True ):
+            dataset_name = override_name
+            dataset_info = override_info
+
+            def get_file_name( file_name ):
+                file_name = file_name.split( '\\' )[-1]
+                file_name = file_name.split( '/' )[-1]
+                return file_name
+            try:
+                # Use the existing file
+                if not dataset_name and 'filename' in data_file:
+                    dataset_name = get_file_name( data_file['filename'] )
+                if not dataset_info:
+                    dataset_info = 'uploaded file'
+                return Bunch( type='file', path=data_file['local_filename'], name=dataset_name, purge_source=purge )
+            except:
+                # The uploaded file should've been persisted by the upload tool action
+                return Bunch( type=None, path=None, name=None )
+
+        def get_url_paste_urls_or_filename( group_incoming, override_name=None, override_info=None ):
+            url_paste_file = group_incoming.get( 'url_paste', None )
+            if url_paste_file is not None:
+                url_paste = open( url_paste_file, 'r' ).read( 1024 )
+                if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ) or url_paste.lstrip().lower().startswith( 'https://' ):
+                    url_paste = url_paste.replace( '\r', '' ).split( '\n' )
+                    for line in url_paste:
+                        line = line.strip()
+                        if line:
+                            if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
+                                continue  # non-url line, ignore
+                            dataset_name = override_name
+                            if not dataset_name:
+                                dataset_name = line
+                            dataset_info = override_info
+                            if not dataset_info:
+                                dataset_info = 'uploaded url'
+                            yield Bunch( type='url', path=line, name=dataset_name )
+                else:
+                    dataset_name = dataset_info = precreated_name = 'Pasted Entry'  # we need to differentiate between various url pastes here
+                    if override_name:
+                        dataset_name = override_name
+                    if override_info:
+                        dataset_info = override_info
+                    yield Bunch( type='file', path=url_paste_file, name=precreated_name )
+
+        def get_one_filename( context ):
+            data_file = context['file_data']
+            url_paste = context['url_paste']
+            ftp_files = context['ftp_files']
+            name = context.get( 'NAME', None )
+            info = context.get( 'INFO', None )
+            uuid = context.get( 'uuid', None ) or None  # Turn '' to None
+            warnings = []
+            to_posix_lines = False
+            if context.get( 'to_posix_lines', None ) not in [ "None", None, False ]:
+                to_posix_lines = True
+            space_to_tab = False
+            if context.get( 'space_to_tab', None ) not in [ "None", None, False ]:
+                space_to_tab = True
+            file_bunch = get_data_file_filename( data_file, override_name=name, override_info=info )
+            if file_bunch.path:
+                if url_paste is not None and url_paste.strip():
+                    warnings.append( "All file contents specified in the paste box were ignored." )
+                if ftp_files:
+                    warnings.append( "All FTP uploaded file selections were ignored." )
+            elif url_paste is not None and url_paste.strip():  # we need to use url_paste
+                for file_bunch in get_url_paste_urls_or_filename( context, override_name=name, override_info=info ):
+                    if file_bunch.path:
+                        break
+                if file_bunch.path and ftp_files is not None:
+                    warnings.append( "All FTP uploaded file selections were ignored." )
+            elif ftp_files is not None and trans.user is not None:  # look for files uploaded via FTP
+                user_ftp_dir = trans.user_ftp_dir
+                for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
+                    for filename in filenames:
+                        for ftp_filename in ftp_files:
+                            if ftp_filename == filename:
+                                path = relpath( os.path.join( dirpath, filename ), user_ftp_dir )
+                                if not os.path.islink( os.path.join( dirpath, filename ) ):
+                                    ftp_data_file = { 'local_filename' : os.path.abspath( os.path.join( user_ftp_dir, path ) ),
+                                                      'filename' : os.path.basename( path ) }
+                                    purge = getattr(trans.app.config, 'ftp_upload_purge', True)
+                                    file_bunch = get_data_file_filename(
+                                        ftp_data_file,
+                                        override_name=name,
+                                        override_info=info,
+                                        purge=purge,
+                                    )
+                                    if file_bunch.path:
+                                        break
+                        if file_bunch.path:
+                            break
+                    if file_bunch.path:
+                        break
+            file_bunch.to_posix_lines = to_posix_lines
+            file_bunch.space_to_tab = space_to_tab
+            file_bunch.uuid = uuid
+            return file_bunch, warnings
+
+        def get_filenames( context ):
+            rval = []
+            data_file = context['file_data']
+            ftp_files = context['ftp_files']
+            uuid = context.get( 'uuid', None ) or None  # Turn '' to None
+            name = context.get( 'NAME', None )
+            info = context.get( 'INFO', None )
+            to_posix_lines = False
+            if context.get( 'to_posix_lines', None ) not in [ "None", None, False ]:
+                to_posix_lines = True
+            space_to_tab = False
+            if context.get( 'space_to_tab', None ) not in [ "None", None, False ]:
+                space_to_tab = True
+            file_bunch = get_data_file_filename( data_file, override_name=name, override_info=info )
+            file_bunch.uuid = uuid
+            if file_bunch.path:
+                file_bunch.to_posix_lines = to_posix_lines
+                file_bunch.space_to_tab = space_to_tab
+                rval.append( file_bunch )
+            for file_bunch in get_url_paste_urls_or_filename( context, override_name=name, override_info=info ):
+                if file_bunch.path:
+                    file_bunch.uuid = uuid
+                    file_bunch.to_posix_lines = to_posix_lines
+                    file_bunch.space_to_tab = space_to_tab
+                    rval.append( file_bunch )
+            # look for files uploaded via FTP
+            valid_files = []
+            if ftp_files is not None:
+                # Normalize input paths to ensure utf-8 encoding is normal form c.
+                # This allows for comparison when the filesystem uses a different encoding than the browser.
+                ftp_files = [unicodedata.normalize('NFC', f) for f in ftp_files if isinstance(f, text_type)]
+                if trans.user is None:
+                    log.warning( 'Anonymous user passed values in ftp_files: %s' % ftp_files )
+                    ftp_files = []
+                    # TODO: warning to the user (could happen if session has become invalid)
+                else:
+                    user_ftp_dir = trans.user_ftp_dir
+                    for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
+                        for filename in filenames:
+                            path = relpath( os.path.join( dirpath, filename ), user_ftp_dir )
+                            if not os.path.islink( os.path.join( dirpath, filename ) ):
+                                # Normalize filesystem paths
+                                if isinstance(path, text_type):
+                                    valid_files.append(unicodedata.normalize('NFC', path ))
+                                else:
+                                    valid_files.append(path)
+
+            else:
+                ftp_files = []
+            for ftp_file in ftp_files:
+                if ftp_file not in valid_files:
+                    log.warning( 'User passed an invalid file path in ftp_files: %s' % ftp_file )
+                    continue
+                    # TODO: warning to the user (could happen if file is already imported)
+                ftp_data_file = { 'local_filename' : os.path.abspath( os.path.join( user_ftp_dir, ftp_file ) ),
+                                  'filename' : os.path.basename( ftp_file ) }
+                purge = getattr(trans.app.config, 'ftp_upload_purge', True)
+                file_bunch = get_data_file_filename( ftp_data_file, override_name=name, override_info=info, purge=purge )
+                if file_bunch.path:
+                    file_bunch.to_posix_lines = to_posix_lines
+                    file_bunch.space_to_tab = space_to_tab
+                    rval.append( file_bunch )
+            return rval
+        file_type = self.get_file_type( context )
+        d_type = self.get_datatype( trans, context )
+        dbkey = context.get( 'dbkey', None )
+        writable_files = d_type.writable_files
+        writable_files_offset = 0
+        groups_incoming = [ None for _ in writable_files ]
+        for group_incoming in context.get( self.name, [] ):
+            i = int( group_incoming['__index__'] )
+            groups_incoming[ i ] = group_incoming
+        if d_type.composite_type is not None:
+            # handle uploading of composite datatypes
+            # Only one Dataset can be created
+            dataset = Bunch()
+            dataset.type = 'composite'
+            dataset.file_type = file_type
+            dataset.dbkey = dbkey
+            dataset.datatype = d_type
+            dataset.warnings = []
+            dataset.metadata = {}
+            dataset.composite_files = {}
+            dataset.uuid = None
+            # load metadata
+            files_metadata = context.get( self.metadata_ref, {} )
+            metadata_name_substition_default_dict = dict( [ ( composite_file.substitute_name_with_metadata, d_type.metadata_spec[ composite_file.substitute_name_with_metadata ].default ) for composite_file in d_type.composite_files.values() if composite_file.substitute_name_with_metadata ] )
+            for meta_name, meta_spec in d_type.metadata_spec.iteritems():
+                if meta_spec.set_in_upload:
+                    if meta_name in files_metadata:
+                        meta_value = files_metadata[ meta_name ]
+                        if meta_name in metadata_name_substition_default_dict:
+                            meta_value = sanitize_for_filename( meta_value, default=metadata_name_substition_default_dict[ meta_name ] )
+                        dataset.metadata[ meta_name ] = meta_value
+            dataset.precreated_name = dataset.name = self.get_composite_dataset_name( context )
+            if dataset.datatype.composite_type == 'auto_primary_file':
+                # replace sniff here with just creating an empty file
+                temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file( dataset ) ), prefix='upload_auto_primary_file' )
+                dataset.primary_file = temp_name
+                dataset.to_posix_lines = True
+                dataset.space_to_tab = False
+            else:
+                file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
+                writable_files_offset = 1
+                dataset.primary_file = file_bunch.path
+                dataset.to_posix_lines = file_bunch.to_posix_lines
+                dataset.space_to_tab = file_bunch.space_to_tab
+                dataset.warnings.extend( warnings )
+            if dataset.primary_file is None:  # remove this before finish, this should create an empty dataset
+                raise Exception( 'No primary dataset file was available for composite upload' )
+            keys = [ value.name for value in writable_files.values() ]
+            for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
+                key = keys[ i + writable_files_offset ]
+                if group_incoming is None and not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
+                    dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
+                    dataset.composite_files[ key ] = None
+                else:
+                    file_bunch, warnings = get_one_filename( group_incoming )
+                    dataset.warnings.extend( warnings )
+                    if file_bunch.path:
+                        dataset.composite_files[ key ] = file_bunch.__dict__
+                    else:
+                        dataset.composite_files[ key ] = None
+                        if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
+                            dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
+            return [ dataset ]
+        else:
+            datasets = get_filenames( context[ self.name ][0] )
+            rval = []
+            for dataset in datasets:
+                dataset.file_type = file_type
+                dataset.datatype = d_type
+                dataset.ext = self.get_datatype_ext( trans, context )
+                dataset.dbkey = dbkey
+                rval.append( dataset )
+            return rval
+
+
+class Conditional( Group ):
+    type = "conditional"
+
+    def __init__( self ):
+        Group.__init__( self )
+        self.test_param = None
+        self.cases = []
+        self.value_ref = None
+        self.value_ref_in_group = True  # When our test_param is not part of the conditional Group, this is False
+
+    @property
+    def label( self ):
+        return "Conditional (%s)" % self.name
+
+    def get_current_case( self, value ):
+        # Convert value to user representation
+        str_value = self.test_param.to_param_dict_string( value )
+        # Find the matching case
+        for index, case in enumerate( self.cases ):
+            if str_value == case.value:
+                return index
+        raise ValueError( "No case matched value:", self.name, str_value )
+
+    def value_to_basic( self, value, app ):
+        rval = dict()
+        rval[ self.test_param.name ] = self.test_param.value_to_basic( value[ self.test_param.name ], app )
+        current_case = rval[ '__current_case__' ] = self.get_current_case( value[ self.test_param.name ] )
+        for input in self.cases[ current_case ].inputs.itervalues():
+            if input.name in value:  # parameter might be absent in unverified workflow
+                rval[ input.name ] = input.value_to_basic( value[ input.name ], app )
+        return rval
+
+    def value_from_basic( self, value, app, ignore_errors=False ):
+        rval = dict()
+        try:
+            rval[ self.test_param.name ] = self.test_param.value_from_basic( value.get( self.test_param.name ), app, ignore_errors )
+            current_case = rval[ '__current_case__' ] = self.get_current_case( rval[ self.test_param.name ] )
+            # Inputs associated with current case
+            for input in self.cases[ current_case ].inputs.itervalues():
+                # If we do not have a value, and are ignoring errors, we simply
+                # do nothing. There will be no value for the parameter in the
+                # conditional's values dictionary.
+                if not ignore_errors or input.name in value:
+                    rval[ input.name ] = input.value_from_basic( value[ input.name ], app, ignore_errors )
+        except Exception as e:
+            if not ignore_errors:
+                raise e
+        return rval
+
+    def get_initial_value( self, trans, context ):
+        # State for a conditional is a plain dictionary.
+        rval = {}
+        # Get the default value for the 'test element' and use it
+        # to determine the current case
+        test_value = self.test_param.get_initial_value( trans, context )
+        current_case = self.get_current_case( test_value )
+        # Store the current case in a special value
+        rval['__current_case__'] = current_case
+        # Store the value of the test element
+        rval[ self.test_param.name ] = test_value
+        # Fill in state for selected case
+        child_context = ExpressionContext( rval, context )
+        for child_input in self.cases[current_case].inputs.itervalues():
+            rval[ child_input.name ] = child_input.get_initial_value( trans, child_context )
+        return rval
+
+    def to_dict( self, trans ):
+        cond_dict = super( Conditional, self ).to_dict( trans )
+
+        def nested_to_dict( input ):
+            return input.to_dict( trans )
+
+        cond_dict[ "cases" ] = map( nested_to_dict, self.cases )
+        cond_dict[ "test_param" ] = nested_to_dict( self.test_param )
+        return cond_dict
+
+
+class ConditionalWhen( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'value', )
+
+    def __init__( self ):
+        self.value = None
+        self.inputs = None
+
+    def to_dict( self, trans ):
+        when_dict = super( ConditionalWhen, self ).to_dict()
+
+        def input_to_dict( input ):
+            return input.to_dict( trans )
+
+        when_dict[ "inputs" ] = map( input_to_dict, self.inputs.values() )
+        return when_dict
diff --git a/lib/galaxy/tools/parameters/history_query.py b/lib/galaxy/tools/parameters/history_query.py
new file mode 100644
index 0000000..b0057e3
--- /dev/null
+++ b/lib/galaxy/tools/parameters/history_query.py
@@ -0,0 +1,51 @@
+import logging
+log = logging.getLogger( __name__ )
+
+
+class HistoryQuery( object ):
+    """ An object for describing the collections to pull out of a history,
+    used by DataCollectionToolParameter.
+    """
+
+    def __init__( self, **kwargs ):
+        self.collection_type_descriptions = kwargs.get( "collection_type_descriptions", None )
+
+    @staticmethod
+    def from_parameter( param, collection_type_descriptions ):
+        """ Take in a tool parameter element.
+        """
+        collection_types = param.collection_types
+        if collection_types:
+            collection_type_descriptions = [collection_type_descriptions.for_collection_type(t) for t in collection_types]
+            # Place higher dimension descriptions first so subcollection mapping
+            # (until we expose it to the user) will default to providing tool as much
+            # data as possible. So a list:list:paired mapped to a tool that takes
+            # list,paired,list:paired - will map over list:paired and create a flat list.
+            collection_type_descriptions = sorted(collection_type_descriptions, lambda t: t.dimension, reverse=True )
+        else:
+            collection_type_descriptions = None
+
+        kwargs = dict( collection_type_descriptions=collection_type_descriptions )
+        return HistoryQuery( **kwargs )
+
+    def direct_match( self, hdca ):
+        collection_type_descriptions = self.collection_type_descriptions
+        if collection_type_descriptions is not None:
+            for collection_type_description in collection_type_descriptions:
+                if collection_type_description.can_match_type( hdca.collection.collection_type ):
+                    return True
+            return False
+
+        return True
+
+    def can_map_over( self, hdca ):
+        collection_type_descriptions = self.collection_type_descriptions
+        if collection_type_descriptions is None:
+            return False
+
+        hdca_collection_type = hdca.collection.collection_type
+        for collection_type_description in collection_type_descriptions:
+            # See note about the way this is sorted above.
+            if collection_type_description.is_subcollection_of_type( hdca_collection_type ):
+                return collection_type_description
+            return False
diff --git a/lib/galaxy/tools/parameters/input_translation.py b/lib/galaxy/tools/parameters/input_translation.py
new file mode 100644
index 0000000..5e31cfa
--- /dev/null
+++ b/lib/galaxy/tools/parameters/input_translation.py
@@ -0,0 +1,109 @@
+"""
+Tool Input Translation.
+"""
+
+import logging
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+
+class ToolInputTranslator( object ):
+    """
+    Handles Tool input translation.
+    This is used for data source tools
+
+    >>> from galaxy.util import Params
+    >>> from xml.etree.ElementTree import XML
+    >>> translator = ToolInputTranslator.from_element( XML(
+    ... '''
+    ... <request_param_translation>
+    ...  <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+    ...  <request_param galaxy_name="URL" remote_name="URL" missing="" >
+    ...     <append_param separator="&" first_separator="?" join="=">
+    ...         <value name="_export" missing="1" />
+    ...         <value name="GALAXY_URL" missing="0" />
+    ...     </append_param>
+    ...  </request_param>
+    ...  <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+    ...  <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+    ...  <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
+    ...  <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
+    ...  <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="tabular" >
+    ...   <value_translation>
+    ...    <value galaxy_value="tabular" remote_value="primaryTable" />
+    ...    <value galaxy_value="tabular" remote_value="selectedFields" />
+    ...    <value galaxy_value="wig" remote_value="wigData" />
+    ...    <value galaxy_value="interval" remote_value="tab" />
+    ...    <value galaxy_value="html" remote_value="hyperlinks" />
+    ...    <value galaxy_value="fasta" remote_value="sequence" />
+    ...   </value_translation>
+    ...  </request_param>
+    ... </request_param_translation>
+    ... ''' ) )
+    >>> params = Params( { 'db':'hg17', 'URL':'URL_value', 'org':'Human', 'hgta_outputType':'primaryTable'  } )
+    >>> translator.translate( params )
+    >>> print sorted(list(params.__dict__.keys()))
+    ['URL', 'URL_method', 'data_type', 'db', 'dbkey', 'description', 'hgta_outputType', 'org', 'organism', 'table']
+    >>> params.get('URL', None) in ['URL_value?GALAXY_URL=0&_export=1', 'URL_value?_export=1&GALAXY_URL=0']
+    True
+    """
+    @classmethod
+    def from_element( cls, elem ):
+        """Loads the proper filter by the type attribute of elem"""
+        rval = ToolInputTranslator()
+        for req_param in elem.findall( "request_param" ):
+            # req_param tags must look like <request_param galaxy_name="dbkey" remote_name="GENOME" missing="" />
+            # trans_list = []
+            remote_name = req_param.get( "remote_name" )
+            galaxy_name = req_param.get( "galaxy_name" )
+            missing = req_param.get( "missing" )
+            value_trans = {}
+            append_param = None
+
+            value_trans_elem = req_param.find( 'value_translation' )
+            if value_trans_elem is not None:
+                for value_elem in value_trans_elem.findall( 'value' ):
+                    remote_value = value_elem.get( "remote_value" )
+                    galaxy_value = value_elem.get( "galaxy_value" )
+                    if None not in [ remote_value, galaxy_value ]:
+                        value_trans[ remote_value ] = galaxy_value
+
+            append_param_elem = req_param.find( "append_param" )
+            if append_param_elem is not None:
+                separator = append_param_elem.get( 'separator', ',' )
+                first_separator = append_param_elem.get( 'first_separator', None )
+                join_str = append_param_elem.get( 'join', '=' )
+                append_dict = {}
+                for value_elem in append_param_elem.findall( 'value' ):
+                    value_name = value_elem.get( 'name' )
+                    value_missing = value_elem.get( 'missing' )
+                    if None not in [ value_name, value_missing ]:
+                        append_dict[ value_name ] = value_missing
+                append_param = Bunch( separator=separator, first_separator=first_separator, join_str=join_str, append_dict=append_dict )
+
+            rval.param_trans_dict[ remote_name ] = Bunch( galaxy_name=galaxy_name, missing=missing, value_trans=value_trans, append_param=append_param )
+
+        return rval
+
+    def __init__( self ):
+        self.param_trans_dict = {}
+
+    def translate( self, params ):
+        """
+        update params in-place
+        """
+        for remote_name, translator in self.param_trans_dict.iteritems():
+            galaxy_name = translator.galaxy_name  # NB: if a param by name galaxy_name is provided, it is always thrown away unless galaxy_name == remote_name
+            value = params.get( remote_name, translator.missing )  # get value from input params, or use default value specified in tool config
+            if translator.value_trans and value in translator.value_trans:
+                value = translator.value_trans[ value ]
+            if translator.append_param:
+                for param_name, missing_value in translator.append_param.append_dict.iteritems():
+                    param_value = params.get( param_name, missing_value )
+                    if translator.append_param.first_separator and translator.append_param.first_separator not in value:
+                        sep = translator.append_param.first_separator
+                    else:
+                        sep = translator.append_param.separator
+                    value += '%s%s%s%s' % ( sep, param_name, translator.append_param.join_str, param_value )
+            params.update( { galaxy_name: value } )
diff --git a/lib/galaxy/tools/parameters/meta.py b/lib/galaxy/tools/parameters/meta.py
new file mode 100644
index 0000000..907889b
--- /dev/null
+++ b/lib/galaxy/tools/parameters/meta.py
@@ -0,0 +1,159 @@
+from galaxy.util import permutations
+from galaxy import model
+from galaxy import util
+from galaxy import exceptions
+import itertools
+import copy
+import logging
+log = logging.getLogger( __name__ )
+
+
+def expand_workflow_inputs( inputs ):
+    """
+    Expands incoming encoded multiple payloads, into the set of all individual payload combinations
+    >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'product': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}} )
+    >>> print [ "%s" % ( p[ '1' ][ 'input' ][ 'hid' ] ) for p in params ]
+    ['1', '2']
+    >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}} )
+    >>> print [ "%s" % ( p[ '1' ][ 'input' ][ 'hid' ] ) for p in params ]
+    ['1', '2']
+    >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}, '2': {'input': {'batch': True, 'values': [{'hid': '3'}, {'hid': '4'}] }}} )
+    >>> print [ "%s%s" % ( p[ '1' ][ 'input' ][ 'hid' ], p[ '2' ][ 'input' ][ 'hid' ] ) for p in params ]
+    ['13', '24']
+    >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'product': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}, '2': {'input': {'batch': True, 'values': [{'hid': '3'}, {'hid': '4'}, {'hid': '5'}] }}} )
+    >>> print [ "%s%s" % ( p[ '1' ][ 'input' ][ 'hid' ], p[ '2' ][ 'input' ][ 'hid' ] ) for p in params ]
+    ['13', '23', '14', '24', '15', '25']
+    >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'product': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}, '2': {'input': {'batch': True, 'product': True, 'values': [{'hid': '3'}, {'hid': '4'}, {'hid': '5'}] }}, '3': {'input': {'batch': True, 'product': True, 'values': [{'hid': '6'}, {'hid': '7'}, {'hid': '8'}] }}} )
+    >>> print [ "%s%s%s" % ( p[ '1' ][ 'input' ][ 'hid' ], p[ '2' ][ 'input' ][ 'hid' ], p[ '3' ][ 'input' ][ 'hid' ] ) for p in params ]
+    ['136', '137', '138', '146', '147', '148', '156', '157', '158', '236', '237', '238', '246', '247', '248', '256', '257', '258']
+    """
+    linked_n = None
+    linked = []
+    product = []
+    linked_keys = []
+    product_keys = []
+    for step_id, step in sorted( inputs.items() ):
+        for key, value in sorted( step.items() ):
+            if isinstance( value, dict ) and 'batch' in value and value[ 'batch' ] is True and 'values' in value and isinstance( value[ 'values' ], list ):
+                nval = len( value[ 'values' ] )
+                if 'product' in value and value[ 'product' ] is True:
+                    product.append( value[ 'values' ] )
+                    product_keys.append( ( step_id, key ) )
+                else:
+                    if linked_n is None:
+                        linked_n = nval
+                    elif linked_n != nval or nval is 0:
+                        raise exceptions.RequestParameterInvalidException( 'Failed to match linked batch selections. Please select equal number of data files.' )
+                    linked.append( value[ 'values' ] )
+                    linked_keys.append( ( step_id, key ) )
+    params = []
+    params_keys = []
+    linked = linked or [ [ None ] ]
+    product = product or [ [ None ] ]
+    linked_keys = linked_keys or [ ( None, None ) ]
+    product_keys = product_keys or [ ( None, None ) ]
+    for linked_values, product_values in itertools.product( *[ zip( *linked ), itertools.product( *product ) ] ):
+        new_params = copy.deepcopy( inputs )
+        new_keys = []
+        for ( step_id, key ), value in zip( linked_keys, linked_values ) + zip( product_keys, product_values ):
+            if step_id is not None:
+                new_params[ step_id ][ key ] = value
+                new_keys.append( value[ 'hid' ] )
+        params_keys.append( new_keys )
+        params.append( new_params )
+    return params, params_keys
+
+
+def expand_meta_parameters( trans, tool, incoming ):
+    """
+    Take in a dictionary of raw incoming parameters and expand to a list
+    of expanded incoming parameters (one set of parameters per tool
+    execution).
+    """
+
+    to_remove = []
+    for key in incoming.keys():
+        if key.endswith("|__identifier__"):
+            to_remove.append(key)
+    for key in to_remove:
+        incoming.pop(key)
+
+    def classifier( input_key ):
+        value = incoming[ input_key ]
+        if isinstance( value, dict ) and 'values' in value:
+            # Explicit meta wrapper for inputs...
+            is_batch = value.get( 'batch', False )
+            is_linked = value.get( 'linked', True )
+            if is_batch and is_linked:
+                classification = permutations.input_classification.MATCHED
+            elif is_batch:
+                classification = permutations.input_classification.MULTIPLIED
+            else:
+                classification = permutations.input_classification.SINGLE
+            if __collection_multirun_parameter( value ):
+                collection_value = value[ 'values' ][ 0 ]
+                values = __expand_collection_parameter( trans, input_key, collection_value, collections_to_match, linked=is_linked )
+            else:
+                values = value[ 'values' ]
+        else:
+            classification = permutations.input_classification.SINGLE
+            values = value
+        return classification, values
+
+    from galaxy.dataset_collections import matching
+    collections_to_match = matching.CollectionsToMatch()
+
+    # Stick an unexpanded version of multirun keys so they can be replaced,
+    # by expand_mult_inputs.
+    incoming_template = incoming.copy()
+
+    expanded_incomings = permutations.expand_multi_inputs( incoming_template, classifier )
+    if collections_to_match.has_collections():
+        collection_info = trans.app.dataset_collections_service.match_collections( collections_to_match )
+    else:
+        collection_info = None
+    return expanded_incomings, collection_info
+
+
+def __expand_collection_parameter( trans, input_key, incoming_val, collections_to_match, linked=False ):
+    # If subcollectin multirun of data_collection param - value will
+    # be "hdca_id|subcollection_type" else it will just be hdca_id
+    if "|" in incoming_val:
+        encoded_hdc_id, subcollection_type = incoming_val.split( "|", 1 )
+    else:
+        try:
+            src = incoming_val[ "src" ]
+            if src != "hdca":
+                raise exceptions.ToolMetaParameterException( "Invalid dataset collection source type %s" % src )
+            encoded_hdc_id = incoming_val[ "id" ]
+            subcollection_type = incoming_val.get( 'map_over_type', None )
+        except TypeError:
+            encoded_hdc_id = incoming_val
+            subcollection_type = None
+    hdc_id = trans.app.security.decode_id( encoded_hdc_id )
+    hdc = trans.sa_session.query( model.HistoryDatasetCollectionAssociation ).get( hdc_id )
+    collections_to_match.add( input_key, hdc, subcollection_type=subcollection_type, linked=linked )
+    if subcollection_type is not None:
+        from galaxy.dataset_collections import subcollections
+        subcollection_elements = subcollections.split_dataset_collection_instance( hdc, subcollection_type )
+        return subcollection_elements
+    else:
+        hdas = []
+        for element in hdc.collection.dataset_elements:
+            hda = element.dataset_instance
+            hda.element_identifier = element.element_identifier
+            hdas.append( hda )
+        return hdas
+
+
+def __collection_multirun_parameter( value ):
+    is_batch = value.get( 'batch', False )
+    if not is_batch:
+        return False
+
+    batch_values = util.listify( value[ 'values' ] )
+    if len( batch_values ) == 1:
+        batch_over = batch_values[ 0 ]
+        if isinstance( batch_over, dict ) and ('src' in batch_over) and (batch_over[ 'src' ] == 'hdca'):
+            return True
+    return False
diff --git a/lib/galaxy/tools/parameters/output_collect.py b/lib/galaxy/tools/parameters/output_collect.py
new file mode 100644
index 0000000..a34fa5c
--- /dev/null
+++ b/lib/galaxy/tools/parameters/output_collect.py
@@ -0,0 +1,526 @@
+""" Code allowing tools to define extra files associated with an output datset.
+"""
+import os
+import re
+import operator
+import glob
+import json
+
+from galaxy import jobs
+from galaxy import util
+from galaxy.util import odict
+from galaxy.util import ExecutionTimer
+from galaxy.tools.parser.output_collection_def import (
+    DEFAULT_DATASET_COLLECTOR_DESCRIPTION,
+    INPUT_DBKEY_TOKEN,
+)
+
+DATASET_ID_TOKEN = "DATASET_ID"
+
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+def collect_dynamic_collections(
+    tool,
+    output_collections,
+    job_working_directory,
+    inp_data={},
+    job=None,
+    input_dbkey="?",
+):
+    collections_service = tool.app.dataset_collections_service
+    job_context = JobContext(
+        tool,
+        job,
+        job_working_directory,
+        inp_data,
+        input_dbkey,
+    )
+
+    for name, has_collection in output_collections.items():
+        if name not in tool.output_collections:
+            continue
+        output_collection_def = tool.output_collections[ name ]
+        if not output_collection_def.dynamic_structure:
+            continue
+
+        # Could be HDCA for normal jobs or a DC for mapping
+        # jobs.
+        if hasattr(has_collection, "collection"):
+            collection = has_collection.collection
+        else:
+            collection = has_collection
+
+        try:
+            collection_builder = collections_service.collection_builder_for(
+                collection
+            )
+            job_context.populate_collection_elements(
+                collection,
+                collection_builder,
+                output_collection_def,
+            )
+            collection_builder.populate()
+        except Exception:
+            log.exception("Problem gathering output collection.")
+            collection.handle_population_failed("Problem building datasets for collection.")
+
+
+class JobContext( object ):
+
+    def __init__( self, tool, job, job_working_directory, inp_data, input_dbkey ):
+        self.inp_data = inp_data
+        self.input_dbkey = input_dbkey
+        self.app = tool.app
+        self.sa_session = tool.sa_session
+        self.job = job
+        self.job_working_directory = job_working_directory
+
+    @property
+    def permissions( self ):
+        inp_data = self.inp_data
+        existing_datasets = [ inp for inp in inp_data.values() if inp ]
+        if existing_datasets:
+            permissions = self.app.security_agent.guess_derived_permissions_for_datasets( existing_datasets )
+        else:
+            # No valid inputs, we will use history defaults
+            permissions = self.app.security_agent.history_get_default_permissions( self.job.history )
+        return permissions
+
+    def find_files( self, collection, dataset_collectors ):
+        filenames = odict.odict()
+        for path, extra_file_collector in walk_over_extra_files( dataset_collectors, self.job_working_directory, collection ):
+            filenames[ path ] = extra_file_collector
+        return filenames
+
+    def populate_collection_elements( self, collection, root_collection_builder, output_collection_def ):
+        # TODO: allow configurable sorting.
+        #    <sort by="lexical" /> <!-- default -->
+        #    <sort by="reverse_lexical" />
+        #    <sort regex="example.(\d+).fastq" by="1:numerical" />
+        #    <sort regex="part_(\d+)_sample_([^_]+).fastq" by="2:lexical,1:numerical" />
+        dataset_collectors = map(dataset_collector, output_collection_def.dataset_collector_descriptions)
+        filenames = self.find_files( collection, dataset_collectors )
+
+        element_datasets = []
+        for filename, extra_file_collector in filenames.iteritems():
+            create_dataset_timer = ExecutionTimer()
+            fields_match = extra_file_collector.match( collection, os.path.basename( filename ) )
+            if not fields_match:
+                raise Exception( "Problem parsing metadata fields for file %s" % filename )
+            element_identifiers = fields_match.element_identifiers
+            designation = fields_match.designation
+            visible = fields_match.visible
+            ext = fields_match.ext
+            dbkey = fields_match.dbkey
+            if dbkey == INPUT_DBKEY_TOKEN:
+                dbkey = self.input_dbkey
+
+            # Create new primary dataset
+            name = fields_match.name or designation
+
+            dataset = self.create_dataset(
+                ext=ext,
+                designation=designation,
+                visible=visible,
+                dbkey=dbkey,
+                name=name,
+                filename=filename,
+                metadata_source_name=output_collection_def.metadata_source,
+            )
+            log.debug(
+                "(%s) Created dynamic collection dataset for path [%s] with element identifier [%s] for output [%s] %s",
+                self.job.id,
+                filename,
+                designation,
+                output_collection_def.name,
+                create_dataset_timer,
+            )
+            element_datasets.append((element_identifiers, dataset))
+
+        app = self.app
+        sa_session = self.sa_session
+        job = self.job
+
+        if job:
+            add_datasets_timer = ExecutionTimer()
+            job.history.add_datasets(sa_session, [d for (ei, d) in element_datasets])
+            log.debug(
+                "(%s) Add dynamic collection datsets to history for output [%s] %s",
+                self.job.id,
+                output_collection_def.name,
+                add_datasets_timer,
+            )
+
+        for (element_identifiers, dataset) in element_datasets:
+            current_builder = root_collection_builder
+            for element_identifier in element_identifiers[:-1]:
+                current_builder = current_builder.get_level(element_identifier)
+            current_builder.add_dataset( element_identifiers[-1], dataset )
+
+            # Associate new dataset with job
+            if job:
+                element_identifier_str = ":".join(element_identifiers)
+                # Below was changed from '__new_primary_file_%s|%s__' % ( name, designation )
+                assoc = app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s|%s__' % ( name, element_identifier_str ), dataset )
+                assoc.job = self.job
+            sa_session.add( assoc )
+
+            dataset.raw_set_dataset_state('ok')
+
+        sa_session.flush()
+
+    def create_dataset(
+        self,
+        ext,
+        designation,
+        visible,
+        dbkey,
+        name,
+        filename,
+        metadata_source_name,
+    ):
+        app = self.app
+        sa_session = self.sa_session
+
+        primary_data = _new_hda(app, sa_session, ext, designation, visible, dbkey, self.permissions)
+
+        # Copy metadata from one of the inputs if requested.
+        metadata_source = None
+        if metadata_source_name:
+            metadata_source = self.inp_data[ metadata_source_name ]
+
+        sa_session.flush()
+        # Move data from temp location to dataset location
+        app.object_store.update_from_file(primary_data.dataset, file_name=filename, create=True)
+        primary_data.set_size()
+        # If match specified a name use otherwise generate one from
+        # designation.
+        primary_data.name = name
+
+        if metadata_source:
+            primary_data.init_meta( copy_from=metadata_source )
+        else:
+            primary_data.init_meta()
+
+        return primary_data
+
+
+def collect_primary_datasets( tool, output, job_working_directory, input_ext, input_dbkey="?" ):
+    app = tool.app
+    sa_session = tool.sa_session
+    new_primary_datasets = {}
+    try:
+        galaxy_json_path = os.path.join( job_working_directory, "working", jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
+        # LEGACY: Remove in 17.XX
+        if not os.path.exists( galaxy_json_path ):
+            # Maybe this is a legacy job, use the job working directory instead
+            galaxy_json_path = os.path.join( job_working_directory, jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
+        json_file = open( galaxy_json_path, 'r' )
+        for line in json_file:
+            line = json.loads( line )
+            if line.get( 'type' ) == 'new_primary_dataset':
+                new_primary_datasets[ os.path.split( line.get( 'filename' ) )[-1] ] = line
+    except Exception:
+        # This should not be considered an error or warning condition, this file is optional
+        pass
+    # Loop through output file names, looking for generated primary
+    # datasets in form of:
+    #     'primary_associatedWithDatasetID_designation_visibility_extension(_DBKEY)'
+    primary_output_assigned = False
+    new_outdata_name = None
+    primary_datasets = {}
+    for output_index, ( name, outdata ) in enumerate( output.items() ):
+        dataset_collectors = map(dataset_collector, tool.outputs[ name ].dataset_collector_descriptions) if name in tool.outputs else [ DEFAULT_DATASET_COLLECTOR ]
+        filenames = odict.odict()
+        if 'new_file_path' in app.config.collect_outputs_from:
+            if DEFAULT_DATASET_COLLECTOR in dataset_collectors:
+                # 'new_file_path' collection should be considered deprecated,
+                # only use old-style matching (glob instead of regex and only
+                # using default collector - if enabled).
+                for filename in glob.glob(os.path.join(app.config.new_file_path, "primary_%i_*" % outdata.id) ):
+                    filenames[ filename ] = DEFAULT_DATASET_COLLECTOR
+        if 'job_working_directory' in app.config.collect_outputs_from:
+            for path, extra_file_collector in walk_over_extra_files( dataset_collectors, job_working_directory, outdata ):
+                filenames[ path ] = extra_file_collector
+        for filename_index, ( filename, extra_file_collector ) in enumerate( filenames.iteritems() ):
+            fields_match = extra_file_collector.match( outdata, os.path.basename( filename ) )
+            if not fields_match:
+                # Before I guess pop() would just have thrown an IndexError
+                raise Exception( "Problem parsing metadata fields for file %s" % filename )
+            designation = fields_match.designation
+            if filename_index == 0 and extra_file_collector.assign_primary_output and output_index == 0:
+                new_outdata_name = fields_match.name or "%s (%s)" % ( outdata.name, designation )
+                # Move data from temp location to dataset location
+                app.object_store.update_from_file( outdata.dataset, file_name=filename, create=True )
+                primary_output_assigned = True
+                continue
+            if name not in primary_datasets:
+                primary_datasets[ name ] = odict.odict()
+            visible = fields_match.visible
+            ext = fields_match.ext
+            if ext == "input":
+                ext = input_ext
+            dbkey = fields_match.dbkey
+            if dbkey == INPUT_DBKEY_TOKEN:
+                dbkey = input_dbkey
+            # Create new primary dataset
+            primary_data = _new_hda(app, sa_session, ext, designation, visible, dbkey)
+            app.security_agent.copy_dataset_permissions( outdata.dataset, primary_data.dataset )
+            sa_session.flush()
+            # Move data from temp location to dataset location
+            app.object_store.update_from_file(primary_data.dataset, file_name=filename, create=True)
+            primary_data.set_size()
+            # If match specified a name use otherwise generate one from
+            # designation.
+            primary_data.name = fields_match.name or "%s (%s)" % ( outdata.name, designation )
+            primary_data.info = outdata.info
+            primary_data.init_meta( copy_from=outdata )
+            primary_data.dbkey = dbkey
+            # Associate new dataset with job
+            job = None
+            for assoc in outdata.creating_job_associations:
+                job = assoc.job
+                break
+            if job:
+                assoc = app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s|%s__' % ( name, designation ), primary_data )
+                assoc.job = job
+                sa_session.add( assoc )
+                sa_session.flush()
+            primary_data.state = outdata.state
+            # add tool/metadata provided information
+            new_primary_datasets_attributes = new_primary_datasets.get( os.path.split( filename )[-1], {} )
+            if new_primary_datasets_attributes:
+                dataset_att_by_name = dict( ext='extension' )
+                for att_set in [ 'name', 'info', 'ext', 'dbkey' ]:
+                    dataset_att_name = dataset_att_by_name.get( att_set, att_set )
+                    setattr( primary_data, dataset_att_name, new_primary_datasets_attributes.get( att_set, getattr( primary_data, dataset_att_name ) ) )
+                extra_files_path = new_primary_datasets_attributes.get( 'extra_files', None )
+                if extra_files_path:
+                    extra_files_path_joined = os.path.join( job_working_directory, extra_files_path )
+                    for root, dirs, files in os.walk( extra_files_path_joined ):
+                        extra_dir = os.path.join( primary_data.extra_files_path, root.replace( extra_files_path_joined, '', 1 ).lstrip( os.path.sep ) )
+                        for f in files:
+                            app.object_store.update_from_file(
+                                primary_data.dataset,
+                                extra_dir=extra_dir,
+                                alt_name=f,
+                                file_name=os.path.join( root, f ),
+                                create=True,
+                                dir_only=True,
+                                preserve_symlinks=True
+                            )
+            metadata_dict = new_primary_datasets_attributes.get( 'metadata', None )
+            if metadata_dict:
+                if "dbkey" in new_primary_datasets_attributes:
+                    metadata_dict["dbkey"] = new_primary_datasets_attributes["dbkey"]
+                primary_data.metadata.from_JSON_dict( json_dict=metadata_dict )
+            else:
+                primary_data.set_meta()
+            primary_data.set_peek()
+            sa_session.add( primary_data )
+            sa_session.flush()
+            outdata.history.add_dataset( primary_data )
+            # Add dataset to return dict
+            primary_datasets[name][designation] = primary_data
+            # Need to update all associated output hdas, i.e. history was
+            # shared with job running
+            for dataset in outdata.dataset.history_associations:
+                if outdata == dataset:
+                    continue
+                new_data = primary_data.copy()
+                dataset.history.add_dataset( new_data )
+                sa_session.add( new_data )
+                sa_session.flush()
+        if primary_output_assigned:
+            outdata.name = new_outdata_name
+            outdata.init_meta()
+            outdata.set_meta()
+            outdata.set_peek()
+            sa_session.add( outdata )
+            sa_session.flush()
+    return primary_datasets
+
+
+def walk_over_extra_files( extra_file_collectors, job_working_directory, matchable ):
+    for extra_file_collector in extra_file_collectors:
+        matches = []
+        directory = job_working_directory
+        if extra_file_collector.directory:
+            directory = os.path.join( directory, extra_file_collector.directory )
+            if not util.in_directory( directory, job_working_directory ):
+                raise Exception( "Problem with tool configuration, attempting to pull in datasets from outside working directory." )
+        if not os.path.isdir( directory ):
+            continue
+        for filename in os.listdir( directory ):
+            path = os.path.join( directory, filename )
+            if not os.path.isfile( path ):
+                continue
+            match = extra_file_collector.match( matchable, filename, path=path )
+            if match:
+                matches.append(match)
+
+        for match in extra_file_collector.sort(matches):
+            yield match.path, extra_file_collector
+
+
+def dataset_collector( dataset_collection_description ):
+    if dataset_collection_description is DEFAULT_DATASET_COLLECTOR_DESCRIPTION:
+        # Use 'is' and 'in' operators, so lets ensure this is
+        # treated like a singleton.
+        return DEFAULT_DATASET_COLLECTOR
+    else:
+        return DatasetCollector( dataset_collection_description )
+
+
+class DatasetCollector( object ):
+
+    def __init__( self, dataset_collection_description ):
+        # dataset_collection_description is an abstract description
+        # built from the tool parsing module - see galaxy.tools.parser.output_colleciton_def
+        self.sort_key = dataset_collection_description.sort_key
+        self.sort_reverse = dataset_collection_description.sort_reverse
+        self.sort_comp = dataset_collection_description.sort_comp
+        self.pattern = dataset_collection_description.pattern
+        self.default_dbkey = dataset_collection_description.default_dbkey
+        self.default_ext = dataset_collection_description.default_ext
+        self.default_visible = dataset_collection_description.default_visible
+        self.directory = dataset_collection_description.directory
+        self.assign_primary_output = dataset_collection_description.assign_primary_output
+
+    def pattern_for_dataset( self, dataset_instance=None ):
+        token_replacement = r'\d+'
+        if dataset_instance:
+            token_replacement = str( dataset_instance.id )
+        return self.pattern.replace( DATASET_ID_TOKEN, token_replacement )
+
+    def match( self, dataset_instance, filename, path=None ):
+        pattern = self.pattern_for_dataset( dataset_instance )
+        re_match = re.match( pattern, filename )
+        match_object = None
+        if re_match:
+            match_object = CollectedDatasetMatch( re_match, self, filename, path=path )
+        return match_object
+
+    def sort( self, matches ):
+        reverse = self.sort_reverse
+        sort_key = self.sort_key
+        sort_comp = self.sort_comp
+        assert sort_key in ["filename", "dbkey", "name", "designation"]
+        assert sort_comp in ["lexical", "numeric"]
+        key = operator.attrgetter(sort_key)
+        if sort_comp == "numeric":
+            key = _compose(int, key)
+
+        return sorted(matches, key=key, reverse=reverse)
+
+
+def _compose(f, g):
+    return lambda x: f(g(x))
+
+
+class CollectedDatasetMatch( object ):
+
+    def __init__( self, re_match, collector, filename, path=None ):
+        self.re_match = re_match
+        self.collector = collector
+        self.filename = filename
+        self.path = path
+
+    @property
+    def designation( self ):
+        re_match = self.re_match
+        # If collecting nested collection, grap identifier_0,
+        # identifier_1, etc... and join on : to build designation.
+        element_identifiers = self.raw_element_identifiers
+        if element_identifiers:
+            return ":".join(element_identifiers)
+        elif "designation" in re_match.groupdict():
+            return re_match.group( "designation" )
+        elif "name" in re_match.groupdict():
+            return re_match.group( "name" )
+        else:
+            return None
+
+    @property
+    def element_identifiers( self ):
+        return self.raw_element_identifiers or [self.designation]
+
+    @property
+    def raw_element_identifiers( self ):
+        re_match = self.re_match
+        identifiers = []
+        i = 0
+        while True:
+            key = "identifier_%d" % i
+            if key in re_match.groupdict():
+                identifiers.append(re_match.group(key))
+            else:
+                break
+            i += 1
+
+        return identifiers
+
+    @property
+    def name( self ):
+        """ Return name or None if not defined by the discovery pattern.
+        """
+        re_match = self.re_match
+        name = None
+        if "name" in re_match.groupdict():
+            name = re_match.group( "name" )
+        return name
+
+    @property
+    def dbkey( self ):
+        try:
+            return self.re_match.group( "dbkey" )
+        except IndexError:
+            return self.collector.default_dbkey
+
+    @property
+    def ext( self ):
+        try:
+            return self.re_match.group( "ext" )
+        except IndexError:
+            return self.collector.default_ext
+
+    @property
+    def visible( self ):
+        try:
+            return self.re_match.group( "visible" ).lower() == "visible"
+        except IndexError:
+            return self.collector.default_visible
+
+
+UNSET = object()
+
+
+def _new_hda(
+    app,
+    sa_session,
+    ext,
+    designation,
+    visible,
+    dbkey,
+    permissions=UNSET,
+):
+    """Return a new unflushed HDA with dataset and permissions setup.
+    """
+    # Create new primary dataset
+    primary_data = app.model.HistoryDatasetAssociation( extension=ext,
+                                                        designation=designation,
+                                                        visible=visible,
+                                                        dbkey=dbkey,
+                                                        create_dataset=True,
+                                                        flush=False,
+                                                        sa_session=sa_session )
+    if permissions is not UNSET:
+        app.security_agent.set_all_dataset_permissions( primary_data.dataset, permissions, new=True, flush=False )
+    sa_session.add( primary_data )
+    return primary_data
+
+
+DEFAULT_DATASET_COLLECTOR = DatasetCollector(DEFAULT_DATASET_COLLECTOR_DESCRIPTION)
diff --git a/lib/galaxy/tools/parameters/sanitize.py b/lib/galaxy/tools/parameters/sanitize.py
new file mode 100644
index 0000000..a976d32
--- /dev/null
+++ b/lib/galaxy/tools/parameters/sanitize.py
@@ -0,0 +1,170 @@
+"""
+Tool Parameter specific sanitizing.
+"""
+
+import logging
+import string
+from six import string_types
+
+import galaxy.util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolParameterSanitizer( object ):
+    """
+    Handles tool parameter specific sanitizing.
+
+    >>> from xml.etree.ElementTree import XML
+    >>> sanitizer = ToolParameterSanitizer.from_element( XML(
+    ... '''
+    ... <sanitizer invalid_char="">
+    ...   <valid initial="string.letters"/>
+    ... </sanitizer>
+    ... ''' ) )
+    >>> sanitizer.sanitize_param( ''.join( sorted( [ c for c in string.printable ] ) ) ) == ''.join( sorted( [ c for c in string.letters ] ) )
+    True
+    >>> slash = chr( 92 )
+    >>> sanitizer = ToolParameterSanitizer.from_element( XML(
+    ... '''
+    ... <sanitizer>
+    ...   <valid initial="none">
+    ...    <add preset="string.printable"/>
+    ...    <remove value="""/>
+    ...    <remove value="%s"/>
+    ...   </valid>
+    ...   <mapping initial="none">
+    ...     <add source=""" target="%s""/>
+    ...     <add source="%s" target="%s%s"/>
+    ...   </mapping>
+    ... </sanitizer>
+    ... ''' % ( slash, slash, slash, slash, slash ) ) )
+    >>> text = '%s"$rm&#!' % slash
+    >>> [ c for c in sanitizer.sanitize_param( text ) ] == [ slash, slash, slash, '"', '$', 'r', 'm', '&', '#', '!' ]
+    True
+    """
+
+    VALID_PRESET = { 'default': ( string.letters + string.digits + " -=_.()/+*^,:?!" ), 'none': '' }
+    MAPPING_PRESET = { 'default': galaxy.util.mapped_chars, 'none': {} }
+    DEFAULT_INVALID_CHAR = 'X'
+
+    # class methods
+    @classmethod
+    def from_element( cls, elem ):
+        """Loads the proper filter by the type attribute of elem"""
+        # TODO: Add ability to generically specify a method to use for sanitizing input via specification in tool XML
+        rval = ToolParameterSanitizer()
+        rval._invalid_char = elem.get( 'invalid_char', cls.DEFAULT_INVALID_CHAR )
+        rval.sanitize = galaxy.util.string_as_bool( elem.get( 'sanitize', 'True' ) )
+        for valid_elem in elem.findall( 'valid' ):
+            rval._valid_chars = rval.get_valid_by_name( valid_elem.get( 'initial', 'default' ) )
+            for action_elem in valid_elem:
+                preset = rval.get_valid_by_name( action_elem.get( 'preset', 'none' ) )
+                valid_value = [ val for val in action_elem.get( 'value', [] ) ]
+                if action_elem.tag.lower() == 'add':
+                    for val in preset + valid_value:
+                        if val not in rval._valid_chars:
+                            rval._valid_chars.append( val )
+                elif action_elem.tag.lower() == 'remove':
+                    for val in preset + valid_value:
+                        while val in rval._valid_chars:
+                            rval._valid_chars.remove( val )
+                else:
+                    log.debug( 'Invalid action tag in valid: %s' % action_elem.tag )
+        for mapping_elem in elem.findall( 'mapping' ):
+            rval._mapped_chars = rval.get_mapping_by_name( mapping_elem.get( 'initial', 'default' ) )
+            for action_elem in mapping_elem:
+                map_source = action_elem.get( 'source', None )
+                map_target = action_elem.get( 'target', None )
+                preset = rval.get_mapping_by_name( action_elem.get( 'preset', 'none' ) )
+                if action_elem.tag.lower() == 'add':
+                    rval._mapped_chars.update( preset )
+                    if None not in [ map_source, map_target ]:
+                        rval._mapped_chars[ map_source ] = map_target
+                elif action_elem.tag.lower() == 'remove':
+                    for map_key in preset.keys():
+                        if map_key in rval._mapped_chars:
+                            del rval._mapped_chars[ map_key ]
+                    if map_source is not None and map_key in rval._mapped_chars:
+                        del rval._mapped_chars[ map_key ]
+                else:
+                    log.debug( 'Invalid action tag in mapping: %s' % action_elem.tag )
+        return rval
+
+    @classmethod
+    def get_valid_by_name( cls, name ):
+        rval = []
+        for split_name in name.split( ',' ):
+            split_name = split_name.strip()
+            value = []
+            if split_name.startswith( 'string.' ):
+                try:
+                    value = eval( split_name )
+                except NameError as e:
+                    log.debug( 'Invalid string preset specified: %s' % e )
+            elif split_name in cls.VALID_PRESET:
+                value = cls.VALID_PRESET[ split_name ]
+            else:
+                log.debug( 'Invalid preset name specified: %s' % split_name )
+            rval.extend( [ val for val in value if val not in rval ] )
+        return rval
+
+    @classmethod
+    def get_mapping_by_name( cls, name ):
+        rval = {}
+        for split_name in name.split( ',' ):
+            split_name = split_name.strip()
+            if split_name in cls.MAPPING_PRESET:
+                rval.update( cls.MAPPING_PRESET[ split_name ] )
+            else:
+                log.debug( 'Invalid preset name specified: %s' % split_name )
+        return rval
+    # end class methods
+
+    def __init__( self ):
+        self._valid_chars = []  # List of valid characters
+        self._mapped_chars = {}  # Replace a char with a any number of characters
+        self._invalid_char = self.DEFAULT_INVALID_CHAR  # Replace invalid characters with this character
+        self.sanitize = True  # Simply pass back the passed in value
+
+    def restore_text( self, text ):
+        """Restores sanitized text"""
+        if self.sanitize:
+            for key, value in self._mapped_chars.iteritems():
+                text = text.replace( value, key )
+        return text
+
+    def restore_param( self, value ):
+        if self.sanitize:
+            if isinstance( value, string_types ):
+                return self.restore_text( value )
+            elif isinstance( value, list ):
+                return map( self.restore_text, value )
+            else:
+                raise Exception('Unknown parameter type (%s:%s)' % ( type( value ), value ))
+        return value
+
+    def sanitize_text( self, text ):
+        """Restricts the characters that are allowed in a text"""
+        if not self.sanitize:
+            return text
+        rval = []
+        for c in text:
+            if c in self._valid_chars:
+                rval.append( c )
+            elif c in self._mapped_chars:
+                rval.append( self._mapped_chars[ c ] )
+            else:
+                rval.append( self._invalid_char )
+        return ''.join( rval )
+
+    def sanitize_param( self, value ):
+        """Clean incoming parameters (strings or lists)"""
+        if not self.sanitize:
+            return value
+        if isinstance( value, string_types ):
+            return self.sanitize_text( value )
+        elif isinstance( value, list ):
+            return map( self.sanitize_text, value )
+        else:
+            raise Exception('Unknown parameter type (%s:%s)' % ( type( value ), value ))
diff --git a/lib/galaxy/tools/parameters/validation.py b/lib/galaxy/tools/parameters/validation.py
new file mode 100644
index 0000000..eb8961b
--- /dev/null
+++ b/lib/galaxy/tools/parameters/validation.py
@@ -0,0 +1,448 @@
+"""
+Classes related to parameter validation.
+"""
+
+import logging
+import re
+from six import string_types
+
+from galaxy import model
+from galaxy import util
+
+log = logging.getLogger( __name__ )
+
+
+class Validator( object ):
+    """
+    A validator checks that a value meets some conditions OR raises ValueError
+    """
+    requires_dataset_metadata = False
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        type = elem.get( 'type', None )
+        assert type is not None, "Required 'type' attribute missing from validator"
+        return validator_types[type].from_element( param, elem )
+
+    def validate( self, value, trans=None ):
+        raise TypeError( "Abstract Method" )
+
+
+class RegexValidator( Validator ):
+    """
+    Validator that evaluates a regular expression
+
+    >>> from xml.etree.ElementTree import XML
+    >>> from galaxy.tools.parameters.basic import ToolParameter
+    >>> p = ToolParameter.build( None, XML( '''
+    ... <param name="blah" type="text" size="10" value="10">
+    ...     <validator type="regex" message="Not gonna happen">[Ff]oo</validator>
+    ... </param>
+    ... ''' ) )
+    >>> t = p.validate( "Foo" )
+    >>> t = p.validate( "foo" )
+    >>> t = p.validate( "Fop" )
+    Traceback (most recent call last):
+        ...
+    ValueError: Not gonna happen
+    """
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message' ), elem.text )
+
+    def __init__( self, message, expression ):
+        self.message = message
+        # Compile later. RE objects used to not be thread safe. Not sure about
+        # the sre module.
+        self.expression = expression
+
+    def validate( self, value, trans=None ):
+        if re.match( self.expression, value or '' ) is None:
+            raise ValueError( self.message )
+
+
+class ExpressionValidator( Validator ):
+    """
+    Validator that evaluates a python expression using the value
+
+    >>> from xml.etree.ElementTree import XML
+    >>> from galaxy.tools.parameters.basic import ToolParameter
+    >>> p = ToolParameter.build( None, XML( '''
+    ... <param name="blah" type="text" size="10" value="10">
+    ...     <validator type="expression" message="Not gonna happen">value.lower() == "foo"</validator>
+    ... </param>
+    ... ''' ) )
+    >>> t = p.validate( "Foo" )
+    >>> t = p.validate( "foo" )
+    >>> t = p.validate( "Fop" )
+    Traceback (most recent call last):
+        ...
+    ValueError: Not gonna happen
+    """
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message' ), elem.text, elem.get( 'substitute_value_in_message' ) )
+
+    def __init__( self, message, expression, substitute_value_in_message ):
+        self.message = message
+        self.substitute_value_in_message = substitute_value_in_message
+        # Save compiled expression, code objects are thread safe (right?)
+        self.expression = compile( expression, '<string>', 'eval' )
+
+    def validate( self, value, trans=None ):
+        if not( eval( self.expression, dict( value=value ) ) ):
+            message = self.message
+            if self.substitute_value_in_message:
+                message = message % value
+            raise ValueError( message )
+
+
+class InRangeValidator( Validator ):
+    """
+    Validator that ensures a number is in a specific range
+
+    >>> from xml.etree.ElementTree import XML
+    >>> from galaxy.tools.parameters.basic import ToolParameter
+    >>> p = ToolParameter.build( None, XML( '''
+    ... <param name="blah" type="integer" size="10" value="10">
+    ...     <validator type="in_range" message="Not gonna happen" min="10" exclude_min="true" max="20"/>
+    ... </param>
+    ... ''' ) )
+    >>> t = p.validate( 10 )
+    Traceback (most recent call last):
+        ...
+    ValueError: Not gonna happen
+    >>> t = p.validate( 15 )
+    >>> t = p.validate( 20 )
+    >>> t = p.validate( 21 )
+    Traceback (most recent call last):
+        ...
+    ValueError: Not gonna happen
+    """
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ), elem.get( 'min' ),
+                    elem.get( 'max' ), elem.get( 'exclude_min', 'false' ),
+                    elem.get( 'exclude_max', 'false' ) )
+
+    def __init__( self, message, range_min, range_max, exclude_min=False, exclude_max=False ):
+        """
+        When the optional exclude_min and exclude_max attributes are set
+        to true, the range excludes the end points (i.e., min < value < max),
+        while if set to False ( the default), then range includes the end points
+        (1.e., min <= value <= max).  Combinations of exclude_min and exclude_max
+        values are allowed.
+        """
+        self.min = float( range_min if range_min is not None else '-inf' )
+        self.exclude_min = util.asbool( exclude_min )
+        self.max = float( range_max if range_max is not None else 'inf' )
+        self.exclude_max = util.asbool( exclude_max )
+        assert self.min <= self.max, 'min must be less than or equal to max'
+        # Remove unneeded 0s and decimal from floats to make message pretty.
+        self_min_str = str( self.min ).rstrip( '0' ).rstrip( '.' )
+        self_max_str = str( self.max ).rstrip( '0' ).rstrip( '.' )
+        op1 = '>='
+        op2 = '<='
+        if self.exclude_min:
+            op1 = '>'
+        if self.exclude_max:
+            op2 = '<'
+        self.message = message or "Value must be %s %s and %s %s" % ( op1, self_min_str, op2, self_max_str )
+
+    def validate( self, value, trans=None ):
+        if self.exclude_min:
+            if not self.min < float( value ):
+                raise ValueError( self.message )
+        else:
+            if not self.min <= float( value ):
+                raise ValueError( self.message )
+        if self.exclude_max:
+            if not float( value ) < self.max:
+                raise ValueError( self.message )
+        else:
+            if not float( value ) <= self.max:
+                raise ValueError( self.message )
+
+
+class LengthValidator( Validator ):
+    """
+    Validator that ensures the length of the provided string (value) is in a specific range
+
+    >>> from xml.etree.ElementTree import XML
+    >>> from galaxy.tools.parameters.basic import ToolParameter
+    >>> p = ToolParameter.build( None, XML( '''
+    ... <param name="blah" type="text" size="10" value="foobar">
+    ...     <validator type="length" min="2" max="8"/>
+    ... </param>
+    ... ''' ) )
+    >>> t = p.validate( "foo" )
+    >>> t = p.validate( "bar" )
+    >>> t = p.validate( "f" )
+    Traceback (most recent call last):
+        ...
+    ValueError: Must have length of at least 2
+    >>> t = p.validate( "foobarbaz" )
+    Traceback (most recent call last):
+        ...
+    ValueError: Must have length no more than 8
+    """
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ), elem.get( 'min', None ), elem.get( 'max', None ) )
+
+    def __init__( self, message, length_min, length_max ):
+        self.message = message
+        if length_min is not None:
+            length_min = int( length_min )
+        if length_max is not None:
+            length_max = int( length_max )
+        self.min = length_min
+        self.max = length_max
+
+    def validate( self, value, trans=None ):
+        if self.min is not None and len( value ) < self.min:
+            raise ValueError( self.message or ( "Must have length of at least %d" % self.min ) )
+        if self.max is not None and len( value ) > self.max:
+            raise ValueError( self.message or ( "Must have length no more than %d" % self.max ) )
+
+
+class DatasetOkValidator( Validator ):
+    """
+    Validator that checks if a dataset is in an 'ok' state
+    """
+
+    def __init__( self, message=None ):
+        self.message = message
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ) )
+
+    def validate( self, value, trans=None ):
+        if value and value.state != model.Dataset.states.OK:
+            if self.message is None:
+                self.message = "The selected dataset is still being generated, select another dataset or wait until it is completed"
+            raise ValueError( self.message )
+
+
+class DatasetEmptyValidator( Validator ):
+    """Validator that checks if a dataset has a positive file size."""
+    def __init__( self, message=None ):
+        self.message = message
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ) )
+
+    def validate( self, value, trans=None ):
+        if value:
+            if value.get_size() == 0:
+                if self.message is None:
+                    self.message = "The selected dataset is empty, this tool expects non-empty files."
+                raise ValueError( self.message )
+
+
+class MetadataValidator( Validator ):
+    """
+    Validator that checks for missing metadata
+    """
+    requires_dataset_metadata = True
+
+    def __init__( self, message=None, check="", skip="" ):
+        self.message = message
+        self.check = check.split( "," )
+        self.skip = skip.split( "," )
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( message=elem.get( 'message', None ), check=elem.get( 'check', "" ), skip=elem.get( 'skip', "" ) )
+
+    def validate( self, value, trans=None ):
+        if value:
+            if not isinstance( value, model.DatasetInstance ):
+                raise ValueError( 'A non-dataset value was provided.' )
+            if value.missing_meta( check=self.check, skip=self.skip ):
+                if self.message is None:
+                    self.message = "Metadata missing, click the pencil icon in the history item to edit / save the metadata attributes"
+                raise ValueError( self.message )
+
+
+class UnspecifiedBuildValidator( Validator ):
+    """
+    Validator that checks for dbkey not equal to '?'
+    """
+    requires_dataset_metadata = True
+
+    def __init__( self, message=None ):
+        if message is None:
+            self.message = "Unspecified genome build, click the pencil icon in the history item to set the genome build"
+        else:
+            self.message = message
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ) )
+
+    def validate( self, value, trans=None ):
+        # if value is None, we cannot validate
+        if value:
+            dbkey = value.metadata.dbkey
+            if isinstance( dbkey, list ):
+                dbkey = dbkey[0]
+            if dbkey == '?':
+                raise ValueError( self.message )
+
+
+class NoOptionsValidator( Validator ):
+    """Validator that checks for empty select list"""
+
+    def __init__( self, message=None ):
+        self.message = message
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ) )
+
+    def validate( self, value, trans=None ):
+        if value is None:
+            if self.message is None:
+                self.message = "No options available for selection"
+            raise ValueError( self.message )
+
+
+class EmptyTextfieldValidator( Validator ):
+    """Validator that checks for empty text field"""
+
+    def __init__( self, message=None ):
+        self.message = message
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        return cls( elem.get( 'message', None ) )
+
+    def validate( self, value, trans=None ):
+        if value == '':
+            if self.message is None:
+                self.message = "Field requires a value"
+            raise ValueError( self.message )
+
+
+class MetadataInFileColumnValidator( Validator ):
+    """
+    Validator that checks if the value for a dataset's metadata item exists in a file.
+    """
+    requires_dataset_metadata = True
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        filename = elem.get( "filename", None )
+        if filename:
+            filename = "%s/%s" % ( param.tool.app.config.tool_data_path, filename.strip() )
+        metadata_name = elem.get( "metadata_name", None )
+        if metadata_name:
+            metadata_name = metadata_name.strip()
+        metadata_column = int( elem.get( "metadata_column", 0 ) )
+        split = elem.get( "split", "\t" )
+        message = elem.get( "message", "Value for metadata %s was not found in %s." % ( metadata_name, filename ) )
+        line_startswith = elem.get( "line_startswith", None  )
+        if line_startswith:
+            line_startswith = line_startswith.strip()
+        return cls( filename, metadata_name, metadata_column, message, line_startswith, split )
+
+    def __init__( self, filename, metadata_name, metadata_column, message="Value for metadata not found.", line_startswith=None, split="\t" ):
+        self.metadata_name = metadata_name
+        self.message = message
+        self.valid_values = []
+        for line in open( filename ):
+            if line_startswith is None or line.startswith( line_startswith ):
+                fields = line.split( split )
+                if metadata_column < len( fields ):
+                    self.valid_values.append( fields[metadata_column].strip() )
+
+    def validate( self, value, trans=None ):
+        if not value:
+            return
+        if hasattr( value, "metadata" ):
+            if value.metadata.spec[self.metadata_name].param.to_string( value.metadata.get( self.metadata_name ) ) in self.valid_values:
+                return
+        raise ValueError( self.message )
+
+
+class MetadataInDataTableColumnValidator( Validator ):
+    """
+    Validator that checks if the value for a dataset's metadata item exists in a file.
+    """
+    requires_dataset_metadata = True
+
+    @classmethod
+    def from_element( cls, param, elem ):
+        table_name = elem.get( "table_name", None )
+        assert table_name, 'You must specify a table_name.'
+        tool_data_table = param.tool.app.tool_data_tables[ table_name ]
+        metadata_name = elem.get( "metadata_name", None )
+        if metadata_name:
+            metadata_name = metadata_name.strip()
+        metadata_column = elem.get( "metadata_column", 0 )
+        try:
+            metadata_column = int( metadata_column )
+        except:
+            pass
+        message = elem.get( "message", "Value for metadata %s was not found in %s." % ( metadata_name, table_name ) )
+        line_startswith = elem.get( "line_startswith", None  )
+        if line_startswith:
+            line_startswith = line_startswith.strip()
+        return cls( tool_data_table, metadata_name, metadata_column, message, line_startswith )
+
+    def __init__( self, tool_data_table, metadata_name, metadata_column, message="Value for metadata not found.", line_startswith=None ):
+        self.metadata_name = metadata_name
+        self.message = message
+        self.valid_values = []
+        self._data_table_content_version = None
+        self._tool_data_table = tool_data_table
+        if isinstance( metadata_column, string_types ):
+            metadata_column = tool_data_table.columns[ metadata_column ]
+        self._metadata_column = metadata_column
+        self._load_values()
+
+    def _load_values( self ):
+        self._data_table_content_version, data_fields = self._tool_data_table.get_version_fields()
+        self.valid_values = []
+        for fields in data_fields:
+            if self._metadata_column < len( fields ):
+                self.valid_values.append( fields[ self._metadata_column ] )
+
+    def validate( self, value, trans=None ):
+        if not value:
+            return
+        if hasattr( value, "metadata" ):
+            if not self._tool_data_table.is_current_version( self._data_table_content_version ):
+                log.debug( 'MetadataInDataTableColumnValidator values are out of sync with data table (%s), updating validator.', self._tool_data_table.name )
+                self._load_values()
+            if value.metadata.spec[self.metadata_name].param.to_string( value.metadata.get( self.metadata_name ) ) in self.valid_values:
+                return
+        raise ValueError( self.message )
+
+
+validator_types = dict( expression=ExpressionValidator,
+                        regex=RegexValidator,
+                        in_range=InRangeValidator,
+                        length=LengthValidator,
+                        metadata=MetadataValidator,
+                        unspecified_build=UnspecifiedBuildValidator,
+                        no_options=NoOptionsValidator,
+                        empty_field=EmptyTextfieldValidator,
+                        empty_dataset=DatasetEmptyValidator,
+                        dataset_metadata_in_file=MetadataInFileColumnValidator,
+                        dataset_metadata_in_data_table=MetadataInDataTableColumnValidator,
+                        dataset_ok_validator=DatasetOkValidator, )
+
+
+def get_suite():
+    """Get unittest suite for this module"""
+    import doctest
+    import sys
+    return doctest.DocTestSuite( sys.modules[__name__] )
diff --git a/lib/galaxy/tools/parameters/wrapped.py b/lib/galaxy/tools/parameters/wrapped.py
new file mode 100644
index 0000000..b6dbebd
--- /dev/null
+++ b/lib/galaxy/tools/parameters/wrapped.py
@@ -0,0 +1,116 @@
+from galaxy.tools.parameters.basic import (
+    DataCollectionToolParameter,
+    DataToolParameter,
+    SelectToolParameter
+)
+from galaxy.tools.parameters.grouping import (
+    Conditional,
+    Repeat,
+    Section
+)
+from galaxy.tools.wrappers import (
+    DatasetCollectionWrapper,
+    DatasetFilenameWrapper,
+    DatasetListWrapper,
+    InputValueWrapper,
+    SelectToolParameterWrapper
+)
+
+PARAMS_UNWRAPPED = object()
+
+
+class WrappedParameters( object ):
+
+    def __init__( self, trans, tool, incoming ):
+        self.trans = trans
+        self.tool = tool
+        self.incoming = incoming
+        self._params = PARAMS_UNWRAPPED
+
+    @property
+    def params( self ):
+        if self._params is PARAMS_UNWRAPPED:
+            params = make_dict_copy( self.incoming )
+            self.wrap_values( self.tool.inputs, params, skip_missing_values=not self.tool.check_values )
+            self._params = params
+        return self._params
+
+    def wrap_values( self, inputs, input_values, skip_missing_values=False ):
+        trans = self.trans
+        tool = self.tool
+        incoming = self.incoming
+
+        # Wrap tool inputs as necessary
+        for input in inputs.values():
+            if input.name not in input_values and skip_missing_values:
+                continue
+            value = input_values[ input.name ]
+            if isinstance( input, Repeat ):
+                for d in input_values[ input.name ]:
+                    self.wrap_values( input.inputs, d, skip_missing_values=skip_missing_values )
+            elif isinstance( input, Conditional ):
+                values = input_values[ input.name ]
+                current = values[ "__current_case__" ]
+                self.wrap_values( input.cases[current].inputs, values, skip_missing_values=skip_missing_values )
+            elif isinstance( input, Section ):
+                values = value
+                self.wrap_values( input.inputs, values, skip_missing_values=skip_missing_values )
+            elif isinstance( input, DataToolParameter ) and input.multiple:
+                dataset_instances = DatasetListWrapper.to_dataset_instances( value )
+                input_values[ input.name ] = \
+                    DatasetListWrapper( None,
+                                        dataset_instances,
+                                        datatypes_registry=trans.app.datatypes_registry,
+                                        tool=tool,
+                                        name=input.name )
+            elif isinstance( input, DataToolParameter ):
+                input_values[ input.name ] = \
+                    DatasetFilenameWrapper( value,
+                                            datatypes_registry=trans.app.datatypes_registry,
+                                            tool=tool,
+                                            name=input.name )
+            elif isinstance( input, SelectToolParameter ):
+                input_values[ input.name ] = SelectToolParameterWrapper( input, input_values[ input.name ], tool.app, other_values=incoming )
+            elif isinstance( input, DataCollectionToolParameter ):
+                input_values[ input.name ] = DatasetCollectionWrapper(
+                    None,
+                    value,
+                    datatypes_registry=trans.app.datatypes_registry,
+                    tool=tool,
+                    name=input.name,
+                )
+            else:
+                input_values[ input.name ] = InputValueWrapper( input, value, incoming )
+
+
+def make_dict_copy( from_dict ):
+    """
+    Makes a copy of input dictionary from_dict such that all values that are dictionaries
+    result in creation of a new dictionary ( a sort of deepcopy ).  We may need to handle
+    other complex types ( e.g., lists, etc ), but not sure...
+    Yes, we need to handle lists (and now are)...
+    """
+    copy_from_dict = {}
+    for key, value in from_dict.items():
+        if type( value ).__name__ == 'dict':
+            copy_from_dict[ key ] = make_dict_copy( value )
+        elif isinstance( value, list ):
+            copy_from_dict[ key ] = make_list_copy( value )
+        else:
+            copy_from_dict[ key ] = value
+    return copy_from_dict
+
+
+def make_list_copy( from_list ):
+    new_list = []
+    for value in from_list:
+        if isinstance( value, dict ):
+            new_list.append( make_dict_copy( value ) )
+        elif isinstance( value, list ):
+            new_list.append( make_list_copy( value ) )
+        else:
+            new_list.append( value )
+    return new_list
+
+
+__all__ = ( 'WrappedParameters', 'make_dict_copy' )
diff --git a/lib/galaxy/tools/parameters/wrapped_json.py b/lib/galaxy/tools/parameters/wrapped_json.py
new file mode 100644
index 0000000..b5b4989
--- /dev/null
+++ b/lib/galaxy/tools/parameters/wrapped_json.py
@@ -0,0 +1,83 @@
+import logging
+
+log = logging.getLogger(__name__)
+
+SKIP_INPUT = object()
+
+
+def json_wrap(inputs, input_values, as_dict=None, handle_files="SKIP"):
+    if as_dict is None:
+        as_dict = {}
+
+    for input in inputs.values():
+        input_name = input.name
+        value = input_values[input_name]
+        json_value = _json_wrap_input(input, value, handle_files=handle_files)
+        if json_value is SKIP_INPUT:
+            continue
+        as_dict[input_name] = json_value
+    return as_dict
+
+
+def _json_wrap_input(input, value, handle_files="SKIP"):
+    input_type = input.type
+    if input_type == "repeat":
+        repeat_job_value = []
+        for d in value:
+            repeat_instance_job_value = {}
+            json_wrap(input.inputs, d, repeat_instance_job_value)
+            repeat_job_value.append(repeat_instance_job_value)
+        json_value = repeat_job_value
+    elif input_type == "conditional":
+        values = value
+        current = values["__current_case__"]
+        conditional_job_value = {}
+        json_wrap(input.cases[current].inputs, values, conditional_job_value)
+        test_param = input.test_param
+        test_param_name = test_param.name
+        test_value = _json_wrap_input(test_param, values[test_param_name])
+        conditional_job_value[test_param_name] = test_value
+        json_value = conditional_job_value
+    elif input_type == "section":
+        values = value
+        section_job_value = {}
+        json_wrap(input.inputs, values, section_job_value)
+        json_value = section_job_value
+    elif input_type == "data" and input.multiple:
+        if handle_files == "SKIP":
+            return SKIP_INPUT
+        raise NotImplementedError()
+    elif input_type == "data":
+        if handle_files == "SKIP":
+            return SKIP_INPUT
+        raise NotImplementedError()
+    elif input_type == "data_collection":
+        if handle_files == "SKIP":
+            return SKIP_INPUT
+        raise NotImplementedError()
+    elif input_type in ["select", "text", "color", "hidden"]:
+        json_value = _cast_if_not_none(value, str)
+    elif input_type == "float":
+        json_value = _cast_if_not_none(value, float, empty_to_none=True)
+    elif input_type == "integer":
+        json_value = _cast_if_not_none(value, int, empty_to_none=True)
+    elif input_type == "boolean":
+        json_value = _cast_if_not_none(value, bool)
+    elif input_type == "data_column":
+        # value is a SelectToolParameterWrapper()
+        json_value = [int(_) for _ in _cast_if_not_none(value.value, list)]
+    else:
+        raise NotImplementedError("input_type [%s] not implemented" % input_type)
+
+    return json_value
+
+
+def _cast_if_not_none(value, cast_to, empty_to_none=False):
+    # log.debug("value [%s], type[%s]" % (value, type(value)))
+    if value is None or (empty_to_none and str(value) == ''):
+        return None
+    else:
+        return cast_to(value)
+
+
+__all__ = ('json_wrap', )
diff --git a/lib/galaxy/tools/parser/__init__.py b/lib/galaxy/tools/parser/__init__.py
new file mode 100644
index 0000000..7f7fdf1
--- /dev/null
+++ b/lib/galaxy/tools/parser/__init__.py
@@ -0,0 +1,14 @@
+""" Package responsible for parsing tools from files/abstract tool sources.
+"""
+from .factory import get_input_source, get_tool_source
+from .interface import ToolSource
+from .output_objects import (
+    ToolOutputCollectionPart,
+)
+
+__all__ = (
+    "get_input_source",
+    "get_tool_source",
+    "ToolOutputCollectionPart",
+    "ToolSource",
+)
diff --git a/lib/galaxy/tools/parser/cwl.py b/lib/galaxy/tools/parser/cwl.py
new file mode 100644
index 0000000..65c33d8
--- /dev/null
+++ b/lib/galaxy/tools/parser/cwl.py
@@ -0,0 +1,162 @@
+import logging
+import os
+
+from galaxy.tools.cwl import tool_proxy
+from galaxy.tools.deps import requirements
+from galaxy.util.odict import odict
+
+from .interface import PageSource
+from .interface import PagesSource
+from .interface import ToolSource
+from .interface import ToolStdioExitCode
+from .output_actions import ToolOutputActionGroup
+from .output_objects import ToolOutput
+from .yaml import YamlInputSource
+
+log = logging.getLogger(__name__)
+
+
+class CwlToolSource(ToolSource):
+
+    def __init__(self, tool_file):
+        self._cwl_tool_file = tool_file
+        self._id, _ = os.path.splitext(os.path.basename(tool_file))
+        self._tool_proxy = None
+        self._source_path = tool_file
+
+    @property
+    def tool_proxy(self):
+        if self._tool_proxy is None:
+            self._tool_proxy = tool_proxy(self._source_path)
+        return self._tool_proxy
+
+    def parse_tool_type(self):
+        return 'cwl'
+
+    def parse_id(self):
+        return self._id
+
+    def parse_name(self):
+        return self.tool_proxy.label() or self.parse_id()
+
+    def parse_command(self):
+        return "$__cwl_command"
+
+    def parse_environment_variables(self):
+        environment_variables = []
+        # TODO: Is this even possible from here, should instead this be moved
+        # into the job.
+
+        # for environment_variable_el in environment_variables_el.findall("environment_variable"):
+        #    definition = {
+        #        "name": environment_variable_el.get("name"),
+        #        "template": environment_variable_el.text,
+        #    }
+        #    environment_variables.append(
+        #        definition
+        #    )
+
+        return environment_variables
+
+    def parse_edam_operations(self):
+        return []
+
+    def parse_edam_topics(self):
+        return []
+
+    def parse_help(self):
+        return self.tool_proxy.description() or ""
+
+    def parse_sanitize(self):
+        return False
+
+    def parse_strict_shell(self):
+        return True
+
+    def parse_stdio(self):
+        # TODO: remove duplication with YAML
+        from galaxy.jobs.error_level import StdioErrorLevel
+
+        # New format - starting out just using exit code.
+        exit_code_lower = ToolStdioExitCode()
+        exit_code_lower.range_start = float("-inf")
+        exit_code_lower.range_end = -1
+        exit_code_lower.error_level = StdioErrorLevel.FATAL
+        exit_code_high = ToolStdioExitCode()
+        exit_code_high.range_start = 1
+        exit_code_high.range_end = float("inf")
+        exit_code_lower.error_level = StdioErrorLevel.FATAL
+        return [exit_code_lower, exit_code_high], []
+
+    def parse_interpreter(self):
+        return None
+
+    def parse_version(self):
+        return "0.0.1"
+
+    def parse_description(self):
+        return ""
+
+    def parse_input_pages(self):
+        page_source = CwlPageSource(self.tool_proxy)
+        return PagesSource([page_source])
+
+    def parse_outputs(self, tool):
+        output_instances = self.tool_proxy.output_instances()
+        outputs = odict()
+        output_defs = []
+        for output_instance in output_instances:
+            output_defs.append(self._parse_output(tool, output_instance))
+        # TODO: parse outputs collections
+        for output_def in output_defs:
+            outputs[output_def.name] = output_def
+        return outputs, odict()
+
+    def _parse_output(self, tool, output_instance):
+        name = output_instance.name
+        # TODO: handle filters, actions, change_format
+        output = ToolOutput( name )
+        if "File" in output_instance.output_data_type:
+            output.format = "_sniff_"
+        else:
+            output.format = "expression.json"
+        output.change_format = []
+        output.format_source = None
+        output.metadata_source = ""
+        output.parent = None
+        output.label = None
+        output.count = None
+        output.filters = []
+        output.tool = tool
+        output.hidden = ""
+        output.dataset_collector_descriptions = []
+        output.actions = ToolOutputActionGroup( output, None )
+        return output
+
+    def parse_requirements_and_containers(self):
+        containers = []
+        docker_identifier = self.tool_proxy.docker_identifier()
+        if docker_identifier:
+            containers.append({"type": "docker",
+                               "identifier": docker_identifier})
+        return requirements.parse_requirements_from_dict(dict(
+            requirements=[],  # TODO: enable via extensions
+            containers=containers,
+        ))
+
+    def parse_profile(self):
+        return "16.04"
+
+
+class CwlPageSource(PageSource):
+
+    def __init__(self, tool_proxy):
+        cwl_instances = tool_proxy.input_instances()
+        self._input_list = map(self._to_input_source, cwl_instances)
+
+    def _to_input_source(self, input_instance):
+        as_dict = input_instance.to_dict()
+        return YamlInputSource(as_dict)
+
+    def parse_input_sources(self):
+        return self._input_list
diff --git a/lib/galaxy/tools/parser/factory.py b/lib/galaxy/tools/parser/factory.py
new file mode 100644
index 0000000..d38949f
--- /dev/null
+++ b/lib/galaxy/tools/parser/factory.py
@@ -0,0 +1,74 @@
+"""Constructors for concrete tool and input source objects."""
+from __future__ import absolute_import
+
+import logging
+
+import yaml
+
+from galaxy.tools.loader import load_tool as load_tool_xml
+from galaxy.util.odict import odict
+
+from .cwl import CwlToolSource
+from .interface import InputSource
+from .xml import XmlInputSource, XmlToolSource
+from .yaml import YamlToolSource
+
+log = logging.getLogger(__name__)
+
+
+def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True):
+    """Return a ToolSource object corresponding to supplied source.
+
+    The supplied source may be specified as a file path (using the config_file
+    parameter) or as an XML object loaded with load_tool_xml.
+    """
+    if xml_tree is not None:
+        return XmlToolSource(xml_tree, source_path=config_file)
+    elif config_file is None:
+        raise ValueError("get_tool_source called with invalid config_file None.")
+
+    if not enable_beta_formats:
+        tree = load_tool_xml(config_file)
+        return XmlToolSource(tree, source_path=config_file)
+
+    if config_file.endswith(".yml"):
+        log.info("Loading tool from YAML - this is experimental - tool will not function in future.")
+        with open(config_file, "r") as f:
+            as_dict = ordered_load(f)
+            return YamlToolSource(as_dict, source_path=config_file)
+    elif config_file.endswith(".json") or config_file.endswith(".cwl"):
+        log.info("Loading CWL tool - this is experimental - tool likely will not function in future at least in same way.")
+        return CwlToolSource(config_file)
+    else:
+        tree = load_tool_xml(config_file)
+        return XmlToolSource(tree, source_path=config_file)
+
+
+def ordered_load(stream):
+    class OrderedLoader(yaml.Loader):
+        pass
+
+    def construct_mapping(loader, node):
+        loader.flatten_mapping(node)
+        return odict(loader.construct_pairs(node))
+
+    OrderedLoader.add_constructor(
+        yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
+        construct_mapping)
+
+    return yaml.load(stream, OrderedLoader)
+
+
+def get_input_source(content):
+    """Wrap an XML element in a XmlInputSource if needed.
+
+    If the supplied content is already an InputSource object,
+    it is simply returned. This allow Galaxy to uniformly
+    consume using the tool input source interface.
+    """
+    if not isinstance(content, InputSource):
+        content = XmlInputSource(content)
+    return content
+
+
+__all__ = ("get_tool_source", "get_input_source")
diff --git a/lib/galaxy/tools/parser/interface.py b/lib/galaxy/tools/parser/interface.py
new file mode 100644
index 0000000..969012a
--- /dev/null
+++ b/lib/galaxy/tools/parser/interface.py
@@ -0,0 +1,357 @@
+from abc import ABCMeta
+from abc import abstractmethod
+
+import six
+
+NOT_IMPLEMENTED_MESSAGE = "Galaxy tool format does not yet support this tool feature."
+
+
+ at six.python_2_unicode_compatible
+class ToolSource(object):
+    """ This interface represents an abstract source to parse tool
+    information from.
+    """
+    __metaclass__ = ABCMeta
+    default_is_multi_byte = False
+
+    @abstractmethod
+    def parse_id(self):
+        """ Parse an ID describing the abstract tool. This is not the
+        GUID tracked by the tool shed but the simple id (there may be
+        multiple tools loaded in Galaxy with this same simple id).
+        """
+
+    @abstractmethod
+    def parse_version(self):
+        """ Parse a version describing the abstract tool.
+        """
+
+    def parse_tool_module(self):
+        """ Load Tool class from a custom module. (Optional).
+
+        If not None, return pair containing module and class (as strings).
+        """
+        return None
+
+    def parse_action_module(self):
+        """ Load Tool class from a custom module. (Optional).
+
+        If not None, return pair containing module and class (as strings).
+        """
+        return None
+
+    def parse_tool_type(self):
+        """ Load simple tool type string (e.g. 'data_source', 'default').
+        """
+        return None
+
+    @abstractmethod
+    def parse_name(self):
+        """ Parse a short name for tool (required). """
+
+    @abstractmethod
+    def parse_description(self):
+        """ Parse a description for tool. Longer than name, shorted than help. """
+
+    def parse_is_multi_byte(self):
+        """ Parse is_multi_byte from tool - TODO: figure out what this is and
+        document.
+        """
+        return self.default_is_multi_byte
+
+    def parse_display_interface(self, default):
+        """ Parse display_interface - fallback to default for the tool type
+        (supplied as default parameter) if not specified.
+        """
+        return default
+
+    def parse_require_login(self, default):
+        """ Parse whether the tool requires login (as a bool).
+        """
+        return default
+
+    def parse_request_param_translation_elem(self):
+        """ Return an XML element describing require parameter translation.
+
+        If we wish to support this feature for non-XML based tools this should
+        be converted to return some sort of object interface instead of a RAW
+        XML element.
+        """
+        return None
+
+    @abstractmethod
+    def parse_command(self):
+        """ Return string contianing command to run.
+        """
+
+    @abstractmethod
+    def parse_environment_variables(self):
+        """ Return environment variable templates to expose.
+        """
+
+    @abstractmethod
+    def parse_interpreter(self):
+        """ Return string containing the interpreter to prepend to the command
+        (for instance this might be 'python' to run a Python wrapper located
+        adjacent to the tool).
+        """
+
+    def parse_redirect_url_params_elem(self):
+        """ Return an XML element describing redirect_url_params.
+
+        If we wish to support this feature for non-XML based tools this should
+        be converted to return some sort of object interface instead of a RAW
+        XML element.
+        """
+        return None
+
+    def parse_version_command(self):
+        """ Parse command used to determine version of primary application
+        driving the tool. Return None to not generate or record such a command.
+        """
+        return None
+
+    def parse_version_command_interpreter(self):
+        """ Parse command used to determine version of primary application
+        driving the tool. Return None to not generate or record such a command.
+        """
+        return None
+
+    def parse_parallelism(self):
+        """ Return a galaxy.jobs.ParallismInfo object describing task splitting
+        or None.
+        """
+        return None
+
+    def parse_hidden(self):
+        """ Return boolean indicating whether tool should be hidden in the tool menu.
+        """
+        return False
+
+    def parse_sanitize(self):
+        """ Return boolean indicating whether tool should be sanitized or not.
+        """
+        return True
+
+    def parse_refresh(self):
+        """ Return boolean indicating ... I have no clue...
+        """
+        return False
+
+    @abstractmethod
+    def parse_requirements_and_containers(self):
+        """ Return pair of ToolRequirement and ContainerDescription lists. """
+
+    @abstractmethod
+    def parse_input_pages(self):
+        """ Return a PagesSource representing inputs by page for tool. """
+
+    @abstractmethod
+    def parse_outputs(self, tool):
+        """ Return a pair of output and output collections ordered
+        dictionaries for use by Tool.
+        """
+
+    @abstractmethod
+    def parse_strict_shell(self):
+        """ Return True if tool commands should be executed with
+        set -e.
+        """
+
+    @abstractmethod
+    def parse_stdio(self):
+        """ Builds lists of ToolStdioExitCode and ToolStdioRegex objects
+        to describe tool execution error conditions.
+        """
+        return [], []
+
+    @abstractmethod
+    def parse_help(self):
+        """ Return RST definition of help text for tool or None if the tool
+        doesn't define help text.
+        """
+
+    @abstractmethod
+    def parse_profile(self):
+        """ Return tool profile version as Galaxy major e.g. 16.01 or 16.04.
+        """
+
+    def parse_tests_to_dict(self):
+        return {'tests': []}
+
+    def __str__(self):
+        source_path = getattr(self, "_soure_path", None)
+        if source_path:
+            as_str = u'%s[%s]' % (self.__class__.__name__, source_path)
+        else:
+            as_str = u'%s[In-memory]' % (self.__class__.__name__)
+        return as_str
+
+
+class PagesSource(object):
+    """ Contains a list of Pages - each a list of InputSources -
+    each item in the outer list representing a page of inputs.
+    Pages are deprecated so ideally this outer list will always
+    be exactly a singleton.
+    """
+    def __init__(self, page_sources):
+        self.page_sources = page_sources
+
+    @property
+    def inputs_defined(self):
+        return True
+
+
+class PageSource(object):
+    __metaclass__ = ABCMeta
+
+    def parse_display(self):
+        return None
+
+    @abstractmethod
+    def parse_input_sources(self):
+        """ Return a list of InputSource objects. """
+
+
+class InputSource(object):
+    __metaclass__ = ABCMeta
+    default_optional = False
+
+    def elem(self):
+        # For things in transition that still depend on XML - provide a way
+        # to grab it and just throw an error if feature is attempted to be
+        # used with other tool sources.
+        raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE)
+
+    @abstractmethod
+    def get(self, key, value=None):
+        """ Return simple named properties as string for this input source.
+        keys to be supported depend on the parameter type.
+        """
+
+    @abstractmethod
+    def get_bool(self, key, default):
+        """ Return simple named properties as boolean for this input source.
+        keys to be supported depend on the parameter type.
+        """
+
+    def parse_label(self):
+        return self.get("label")
+
+    def parse_help(self):
+        return self.get("help")
+
+    def parse_sanitizer_elem(self):
+        """ Return an XML description of sanitizers. This is a stop gap
+        until we can rework galaxy.tools.parameters.sanitize to not
+        explicitly depend on XML.
+        """
+        return None
+
+    def parse_validator_elems(self):
+        """ Return an XML description of sanitizers. This is a stop gap
+        until we can rework galaxy.tools.parameters.validation to not
+        explicitly depend on XML.
+        """
+        return []
+
+    def parse_optional(self, default=None):
+        """ Return boolean indicating wheter parameter is optional. """
+        if default is None:
+            default = self.default_optional
+        return self.get_bool( "optional", default )
+
+    def parse_dynamic_options_elem(self):
+        """ Return an XML elemnt describing dynamic options.
+        """
+        return None
+
+    def parse_static_options(self):
+        """ Return list of static options if this is a select type without
+        defining a dynamic options.
+        """
+        return []
+
+    def parse_conversion_tuples(self):
+        """ Return list of (name, extension) to describe explicit conversions.
+        """
+        return []
+
+    def parse_nested_inputs_source(self):
+        # For repeats
+        raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE)
+
+    def parse_test_input_source(self):
+        # For conditionals
+        raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE)
+
+    def parse_when_input_sources(self):
+        raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE)
+
+
+class ToolStdioRegex( object ):
+    """
+    This is a container for the <stdio> element's regex subelement.
+    The regex subelement has a "match" attribute, a "sources"
+    attribute that contains "output" and/or "error", and a "level"
+    attribute that contains "warning" or "fatal".
+    """
+    def __init__( self ):
+        self.match = ""
+        self.stdout_match = False
+        self.stderr_match = False
+        # TODO: Define a common class or constant for error level:
+        self.error_level = "fatal"
+        self.desc = ""
+
+
+class ToolStdioExitCode( object ):
+    """
+    This is a container for the <stdio> element's <exit_code> subelement.
+    The exit_code element has a range of exit codes and the error level.
+    """
+    def __init__( self ):
+        self.range_start = float( "-inf" )
+        self.range_end = float( "inf" )
+        # TODO: Define a common class or constant for error level:
+        self.error_level = "fatal"
+        self.desc = ""
+
+
+class TestCollectionDef( object ):
+    # TODO: do not require XML directly here.
+
+    def __init__( self, elem, parse_param_elem ):
+        self.elements = []
+        attrib = dict( elem.attrib )
+        self.collection_type = attrib[ "type" ]
+        self.name = attrib.get( "name", "Unnamed Collection" )
+        for element in elem.findall( "element" ):
+            element_attrib = dict( element.attrib )
+            element_identifier = element_attrib[ "name" ]
+            nested_collection_elem = element.find( "collection" )
+            if nested_collection_elem is not None:
+                self.elements.append( ( element_identifier, TestCollectionDef( nested_collection_elem, parse_param_elem ) ) )
+            else:
+                self.elements.append( ( element_identifier, parse_param_elem( element ) ) )
+
+    def collect_inputs( self ):
+        inputs = []
+        for element in self.elements:
+            value = element[ 1 ]
+            if isinstance( value, TestCollectionDef ):
+                inputs.extend( value.collect_inputs() )
+            else:
+                inputs.append( value )
+        return inputs
+
+
+class TestCollectionOutputDef( object ):
+
+    def __init__( self, name, attrib, element_tests ):
+        self.name = name
+        self.collection_type = attrib.get( "type", None )
+        count = attrib.get("count", None)
+        self.count = int(count) if count is not None else None
+        self.attrib = attrib
+        self.element_tests = element_tests
diff --git a/lib/galaxy/tools/parser/output_actions.py b/lib/galaxy/tools/parser/output_actions.py
new file mode 100644
index 0000000..2234cd4
--- /dev/null
+++ b/lib/galaxy/tools/parser/output_actions.py
@@ -0,0 +1,663 @@
+"""
+Support for dynamically modifying output attributes.
+"""
+
+import logging
+import os.path
+import re
+import string
+
+from galaxy import util
+
+log = logging.getLogger( __name__ )
+
+# Attributes tool developer may want to query on dataset collections.
+COLLECTION_ATTRIBUTES = [ "collection_type" ]
+
+
+class ToolOutputActionGroup( object ):
+    """
+    Manages a set of tool output dataset actions directives
+    """
+    tag = "group"
+
+    def __init__( self, parent, config_elem ):
+        self.parent = parent
+        self.actions = []
+        if config_elem is not None:
+            for elem in config_elem:
+                if elem.tag == "conditional":
+                    self.actions.append( ToolOutputActionConditional( self, elem ) )
+                elif elem.tag == "action":
+                    self.actions.append( ToolOutputAction.from_elem( self, elem ) )
+                else:
+                    log.debug( "Unknown ToolOutputAction tag specified: %s" % elem.tag )
+
+    def apply_action( self, output_dataset, other_values ):
+        for action in self.actions:
+            action.apply_action( output_dataset, other_values )
+
+    @property
+    def tool( self ):
+        return self.parent.tool
+
+    def __len__( self ):
+        return len( self.actions )
+
+
+class ToolOutputActionConditionalWhen( ToolOutputActionGroup ):
+    tag = "when"
+
+    @classmethod
+    def from_elem( cls, parent, when_elem ):
+        """Loads the proper when by attributes of elem"""
+        when_value = when_elem.get( "value", None )
+        if when_value is not None:
+            return ValueToolOutputActionConditionalWhen( parent, when_elem, when_value )
+        else:
+            when_value = when_elem.get( "datatype_isinstance", None )
+            if when_value is not None:
+                return DatatypeIsInstanceToolOutputActionConditionalWhen( parent, when_elem, when_value )
+        raise TypeError( "When type not implemented" )
+
+    def __init__( self, parent, config_elem, value ):
+        super( ToolOutputActionConditionalWhen, self ).__init__( parent, config_elem )
+        self.value = value
+
+    def is_case( self, output_dataset, other_values ):
+        raise TypeError( "Not implemented" )
+
+    def get_ref( self, output_dataset, other_values ):
+        ref = other_values
+        for ref_name in self.parent.name:
+            assert ref_name in ref, "Required dependency '%s' not found in incoming values" % ref_name
+            ref = ref.get( ref_name )
+        return ref
+
+    def apply_action( self, output_dataset, other_values ):
+        if self.is_case( output_dataset, other_values ):
+            return super( ToolOutputActionConditionalWhen, self ).apply_action( output_dataset, other_values )
+
+
+class ValueToolOutputActionConditionalWhen( ToolOutputActionConditionalWhen ):
+    tag = "when value"
+
+    def is_case( self, output_dataset, other_values ):
+        ref = self.get_ref( output_dataset, other_values )
+        return bool( str( ref ) == self.value )
+
+
+class DatatypeIsInstanceToolOutputActionConditionalWhen( ToolOutputActionConditionalWhen ):
+    tag = "when datatype_isinstance"
+
+    def __init__( self, parent, config_elem, value ):
+        super( DatatypeIsInstanceToolOutputActionConditionalWhen, self ).__init__( parent, config_elem, value )
+        self.value = type( self.tool.app.datatypes_registry.get_datatype_by_extension( value ) )
+
+    def is_case( self, output_dataset, other_values ):
+        ref = self.get_ref( output_dataset, other_values )
+        return isinstance( ref.datatype, self.value )
+
+
+class ToolOutputActionConditional( object ):
+    tag = "conditional"
+
+    def __init__( self, parent, config_elem ):
+        self.parent = parent
+        self.name = config_elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from ToolOutputActionConditional"
+        self.name = self.name.split( '.' )
+        self.cases = []
+        for when_elem in config_elem.findall( 'when' ):
+            self.cases.append( ToolOutputActionConditionalWhen.from_elem( self, when_elem ) )
+
+    def apply_action( self, output_dataset, other_values ):
+        for case in self.cases:
+            case.apply_action( output_dataset, other_values )
+
+    @property
+    def tool( self ):
+        return self.parent.tool
+
+
+class ToolOutputAction( object ):
+    tag = "action"
+
+    @classmethod
+    def from_elem( cls, parent, elem ):
+        """Loads the proper action by the type attribute of elem"""
+        action_type = elem.get( 'type', None )
+        assert action_type is not None, "Required 'type' attribute missing from ToolOutputAction"
+        return action_types[ action_type ]( parent, elem )
+
+    def __init__( self, parent, elem ):
+        self.parent = parent
+        self.default = elem.get( 'default', None )
+        option_elem = elem.find( 'option' )
+        self.option = ToolOutputActionOption.from_elem( self, option_elem )
+
+    def apply_action( self, output_dataset, other_values ):
+        raise TypeError( "Not implemented" )
+
+    @property
+    def tool( self ):
+        return self.parent.tool
+
+
+class ToolOutputActionOption( object ):
+    tag = "object"
+
+    @classmethod
+    def from_elem( cls, parent, elem ):
+        """Loads the proper action by the type attribute of elem"""
+        if elem is None:
+            option_type = NullToolOutputActionOption.tag  # no ToolOutputActionOption's have been defined, use implicit NullToolOutputActionOption
+        else:
+            option_type = elem.get( 'type', None )
+        assert option_type is not None, "Required 'type' attribute missing from ToolOutputActionOption"
+        return option_types[ option_type ]( parent, elem )
+
+    def __init__( self, parent, elem ):
+        self.parent = parent
+        self.filters = []
+        if elem is not None:
+            for filter_elem in elem.findall( 'filter' ):
+                self.filters.append( ToolOutputActionOptionFilter.from_elem( self, filter_elem ) )
+
+    def get_value( self, other_values ):
+        raise TypeError( "Not implemented" )
+
+    @property
+    def tool( self ):
+        return self.parent.tool
+
+
+class NullToolOutputActionOption( ToolOutputActionOption ):
+    tag = "null_option"
+
+    def get_value( self, other_values ):
+        return None
+
+
+class FromFileToolOutputActionOption( ToolOutputActionOption ):
+    tag = "from_file"
+
+    def __init__( self, parent, elem ):
+        super( FromFileToolOutputActionOption, self ).__init__( parent, elem )
+        self.name = elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from FromFileToolOutputActionOption"
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from FromFileToolOutputActionOption"
+        self.column = int( self.column )
+        self.offset = elem.get( 'offset', -1 )
+        self.offset = int( self.offset )
+        self.separator = elem.get( 'separator', '\t' )
+        self.options = []
+        data_file = self.name
+        if not os.path.isabs( data_file ):
+            data_file = os.path.join( self.tool.app.config.tool_data_path, data_file )
+        for line in open( data_file ):
+            self.options.append( line.rstrip( '\n\r' ).split( self.separator ) )
+
+    def get_value( self, other_values ):
+        options = self.options
+        for filter in self.filters:
+            options = filter.filter_options( options, other_values )
+        try:
+            if options:
+                return str( options[ self.offset ][ self.column ] )
+        except Exception as e:
+            log.debug( "Error in FromFileToolOutputActionOption get_value: %s" % e )
+        return None
+
+
+class FromParamToolOutputActionOption( ToolOutputActionOption ):
+    tag = "from_param"
+
+    def __init__( self, parent, elem ):
+        super( FromParamToolOutputActionOption, self ).__init__( parent, elem )
+        self.name = elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from FromFileToolOutputActionOption"
+        self.name = self.name.split( '.' )
+        self.column = elem.get( 'column', 0 )
+        self.column = int( self.column )
+        self.offset = elem.get( 'offset', -1 )
+        self.offset = int( self.offset )
+        self.param_attribute = elem.get( 'param_attribute', [] )
+        if self.param_attribute:
+            self.param_attribute = self.param_attribute.split( '.' )
+
+    def get_value( self, other_values ):
+        value = other_values
+        for ref_name in self.name:
+            assert ref_name in value, "Required dependency '%s' not found in incoming values" % ref_name
+            value = value.get( ref_name )
+        for attr_name in self.param_attribute:
+            # if the value is a list from a repeat tag you can access the first element of the repeat with
+            # artifical 'first' attribute_name. For example: .. param_attribute="first.input_mate1.ext"
+            if isinstance(value, list) and attr_name == 'first':
+                value = value[0]
+            elif isinstance(value, dict):
+                value = value[ attr_name ]
+            elif hasattr( value, "collection" ) and value not in COLLECTION_ATTRIBUTES:
+                # if this is an HDCA for instance let reverse.ext grab
+                # the reverse element and then continue for loop to grab
+                # dataset extension
+                value = value.collection[ attr_name ].element_object
+            elif hasattr( value, "collection" ) and value in COLLECTION_ATTRIBUTES:
+                value = getattr( value.collection, attr_name )
+            else:
+                value = getattr( value, attr_name )
+        options = [ [ str( value ) ] ]
+        for filter in self.filters:
+            options = filter.filter_options( options, other_values )
+        try:
+            if options:
+                return str( options[ self.offset ][ self.column ] )
+        except Exception as e:
+            log.debug( "Error in FromParamToolOutputActionOption get_value: %s" % e )
+        return None
+
+
+class FromDataTableOutputActionOption( ToolOutputActionOption ):
+    tag = "from_data_table"
+
+    # TODO: allow accessing by column 'name' not just index
+    def __init__( self, parent, elem ):
+        super( FromDataTableOutputActionOption, self ).__init__( parent, elem )
+        self.name = elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from FromDataTableOutputActionOption"
+        self.missing_tool_data_table_name = None
+        if self.name in self.tool.app.tool_data_tables:
+            self.options = self.tool.app.tool_data_tables[ self.name ].get_fields()
+            self.column = elem.get( 'column', None )
+            assert self.column is not None, "Required 'column' attribute missing from FromDataTableOutputActionOption"
+            self.column = int( self.column )
+            self.offset = elem.get( 'offset', -1 )
+            self.offset = int( self.offset )
+        else:
+            self.options = []
+            self.missing_tool_data_table_name = self.name
+
+    def get_value( self, other_values ):
+        if self.options:
+            options = self.options
+        else:
+            options = []
+        for filter in self.filters:
+            options = filter.filter_options( options, other_values )
+        try:
+            if options:
+                return str( options[ self.offset ][ self.column ] )
+        except Exception as e:
+            log.debug( "Error in FromDataTableOutputActionOption get_value: %s" % e )
+        return None
+
+
+class MetadataToolOutputAction( ToolOutputAction ):
+    tag = "metadata"
+
+    def __init__( self, parent, elem ):
+        super( MetadataToolOutputAction, self ).__init__( parent, elem )
+        self.name = elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from MetadataToolOutputAction"
+
+    def apply_action( self, output_dataset, other_values ):
+        value = self.option.get_value( other_values )
+        if value is None and self.default is not None:
+            value = self.default
+        if value is not None:
+            setattr( output_dataset.metadata, self.name, value )
+
+
+class FormatToolOutputAction( ToolOutputAction ):
+    tag = "format"
+
+    def __init__( self, parent, elem ):
+        super( FormatToolOutputAction, self ).__init__( parent, elem )
+        self.default = elem.get( 'default', None )
+
+    def apply_action( self, output_dataset, other_values ):
+        value = self.option.get_value( other_values )
+        if value is None and self.default is not None:
+            value = self.default
+        if value is not None:
+            output_dataset.extension = value
+
+
+class ToolOutputActionOptionFilter( object ):
+    tag = "filter"
+
+    @classmethod
+    def from_elem( cls, parent, elem ):
+        """Loads the proper action by the type attribute of elem"""
+        filter_type = elem.get( 'type', None )
+        assert filter_type is not None, "Required 'type' attribute missing from ToolOutputActionOptionFilter"
+        return filter_types[ filter_type ]( parent, elem )
+
+    def __init__( self, parent, elem ):
+        self.parent = parent
+
+    def filter_options( self, options, other_values ):
+        raise TypeError( "Not implemented" )
+
+    @property
+    def tool( self ):
+        return self.parent.tool
+
+
+class ParamValueToolOutputActionOptionFilter( ToolOutputActionOptionFilter ):
+    tag = "param_value"
+
+    def __init__( self, parent, elem ):
+        super( ParamValueToolOutputActionOptionFilter, self ).__init__( parent, elem )
+        self.ref = elem.get( 'ref', None )
+        if self.ref:
+            self.ref = self.ref.split( '.' )
+        self.value = elem.get( 'value', None )
+        assert self.ref != self.value, "Required 'ref' or 'value' attribute missing from ParamValueToolOutputActionOptionFilter"
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from ParamValueToolOutputActionOptionFilter"
+        self.column = int( self.column )
+        self.keep = util.string_as_bool( elem.get( "keep", 'True' ) )
+        self.compare = parse_compare_type( elem.get( 'compare', None ) )
+        self.cast = parse_cast_attribute( elem.get( "cast", None ) )
+        self.param_attribute = elem.get( 'param_attribute', [] )
+        if self.param_attribute:
+            self.param_attribute = self.param_attribute.split( '.' )
+
+    def filter_options( self, options, other_values ):
+        if self.ref:
+            # find ref value
+            value = other_values
+            for ref_name in self.ref:
+                assert ref_name in value, "Required dependency '%s' not found in incoming values" % ref_name
+                value = value.get( ref_name )
+            for attr_name in self.param_attribute:
+                value = getattr( value, attr_name )
+            value = str( value )
+        else:
+            value = self.value
+        value = self.cast( value )
+        rval = []
+        for fields in options:
+            try:
+                if self.keep == ( self.compare( self.cast( fields[self.column] ), value ) ):
+                    rval.append( fields )
+            except Exception as e:
+                log.debug(e)
+                continue  # likely a bad cast or column out of range
+        return rval
+
+
+class InsertColumnToolOutputActionOptionFilter( ToolOutputActionOptionFilter ):
+    tag = "insert_column"
+
+    def __init__( self, parent, elem ):
+        super( InsertColumnToolOutputActionOptionFilter, self ).__init__( parent, elem )
+        self.ref = elem.get( 'ref', None )
+        if self.ref:
+            self.ref = self.ref.split( '.' )
+        self.value = elem.get( 'value', None )
+        assert self.ref != self.value, "Required 'ref' or 'value' attribute missing from InsertColumnToolOutputActionOptionFilter"
+        self.column = elem.get( 'column', None )  # None is append
+        if self.column:
+            self.column = int( self.column )
+        self.iterate = util.string_as_bool( elem.get( "iterate", 'False' ) )
+
+    def filter_options( self, options, other_values ):
+        if self.ref:
+            # find ref value
+            value = other_values
+            for ref_name in self.ref:
+                assert ref_name in value, "Required dependency '%s' not found in incoming values" % ref_name
+                value = value.get( ref_name )
+            value = str( value )
+        else:
+            value = self.value
+        if self.iterate:
+            value = int( value )
+        rval = []
+        for fields in options:
+            if self.column is None:
+                rval.append( fields + [ str( value ) ] )
+            else:
+                fields = list( fields )
+                fields.insert( self.column, str( value ) )
+                rval.append( fields )
+            if self.iterate:
+                value += 1
+        return rval
+
+
+class MultipleSplitterFilter( ToolOutputActionOptionFilter ):
+    tag = "multiple_splitter"
+
+    def __init__( self, parent, elem ):
+        super( MultipleSplitterFilter, self ).__init__( parent, elem )
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from MultipleSplitterFilter"
+        self.column = int( self.column )
+        self.separator = elem.get( "separator", "," )
+
+    def filter_options( self, options, other_values ):
+        rval = []
+        for fields in options:
+            for field in fields[self.column].split( self.separator ):
+                rval.append( fields[0:self.column] + [field] + fields[self.column + 1:] )
+        return rval
+
+
+class ColumnStripFilter( ToolOutputActionOptionFilter ):
+    tag = "column_strip"
+
+    def __init__( self, parent, elem ):
+        super( ColumnStripFilter, self ).__init__( parent, elem )
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from ColumnStripFilter"
+        self.column = int( self.column )
+        self.strip = elem.get( "strip", None )
+
+    def filter_options( self, options, other_values ):
+        rval = []
+        for fields in options:
+            rval.append( fields[0:self.column] + [ fields[self.column].strip( self.strip ) ] + fields[self.column + 1:] )
+        return rval
+
+
+class ColumnReplaceFilter( ToolOutputActionOptionFilter ):
+    tag = "column_replace"
+
+    def __init__( self, parent, elem ):
+        super( ColumnReplaceFilter, self ).__init__( parent, elem )
+        self.old_column = elem.get( 'old_column', None )
+        self.old_value = elem.get( "old_value", None )
+        self.new_value = elem.get( "new_value", None )
+        self.new_column = elem.get( 'new_column', None )
+        assert ( bool( self.old_column ) ^ bool( self.old_value ) and bool( self.new_column ) ^ bool( self.new_value ) ), "Required 'old_column' or 'old_value' and 'new_column' or 'new_value' attribute missing from ColumnReplaceFilter"
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from ColumnReplaceFilter"
+        self.column = int( self.column )
+        if self.old_column is not None:
+            self.old_column = int( self.old_column )
+        if self.new_column is not None:
+            self.new_column = int( self.new_column )
+
+    def filter_options( self, options, other_values ):
+        rval = []
+        for fields in options:
+            if self.old_column:
+                old_value = fields[self.old_column]
+            else:
+                old_value = self.old_value
+            if self.new_column:
+                new_value = fields[self.new_column]
+            else:
+                new_value = self.new_value
+            rval.append( fields[0:self.column] + [ fields[self.column].replace( old_value, new_value ) ] + fields[self.column + 1:] )
+        return rval
+
+
+class MetadataValueFilter( ToolOutputActionOptionFilter ):
+    tag = "metadata_value"
+
+    def __init__( self, parent, elem ):
+        super( MetadataValueFilter, self ).__init__( parent, elem )
+        self.ref = elem.get( 'ref', None )
+        assert self.ref is not None, "Required 'ref' attribute missing from MetadataValueFilter"
+        self.ref = self.ref.split( '.' )
+        self.name = elem.get( 'name', None )
+        assert self.name is not None, "Required 'name' attribute missing from MetadataValueFilter"
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from MetadataValueFilter"
+        self.column = int( self.column )
+        self.keep = util.string_as_bool( elem.get( "keep", 'True' ) )
+        self.compare = parse_compare_type( elem.get( 'compare', None ) )
+
+    def filter_options( self, options, other_values ):
+        ref = other_values
+        for ref_name in self.ref:
+            assert ref_name in ref, "Required dependency '%s' not found in incoming values" % ref_name
+            ref = ref.get( ref_name )
+        value = str( getattr( ref.metadata, self.name ) )
+        rval = []
+        for fields in options:
+            if self.keep == ( self.compare( fields[self.column], value ) ):
+                rval.append( fields )
+        return rval
+
+
+class BooleanFilter( ToolOutputActionOptionFilter ):
+    tag = "boolean"
+
+    def __init__( self, parent, elem ):
+        super( BooleanFilter, self ).__init__( parent, elem )
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from BooleanFilter"
+        self.column = int( self.column )
+        self.keep = util.string_as_bool( elem.get( "keep", 'True' ) )
+        self.cast = parse_cast_attribute( elem.get( "cast", None ) )
+
+    def filter_options( self, options, other_values ):
+        rval = []
+        for fields in options:
+            try:
+                value = fields[self.column]
+                value = self.cast( value )
+            except:
+                value = False  # unable to cast or access value; treat as false
+            if self.keep == bool( value ):
+                rval.append( fields )
+        return rval
+
+
+class StringFunctionFilter( ToolOutputActionOptionFilter ):
+    tag = "string_function"
+
+    def __init__( self, parent, elem ):
+        super( StringFunctionFilter, self ).__init__( parent, elem )
+        self.column = elem.get( 'column', None )
+        assert self.column is not None, "Required 'column' attribute missing from StringFunctionFilter"
+        self.column = int( self.column )
+        self.function = elem.get( "name", None )
+        assert self.function in [ 'lower', 'upper' ], "Required function 'name' missing or invalid from StringFunctionFilter"  # add function names as needed
+        self.function = getattr( string, self.function )
+
+    def filter_options( self, options, other_values ):
+        rval = []
+        for fields in options:
+            rval.append( fields[0:self.column] + [ self.function( fields[self.column] ) ] + fields[self.column + 1:] )
+        return rval
+
+
+# tag to class lookups
+action_types = {}
+for action_type in [ MetadataToolOutputAction, FormatToolOutputAction ]:
+    action_types[ action_type.tag ] = action_type
+
+option_types = {}
+for option_type in [ NullToolOutputActionOption, FromFileToolOutputActionOption, FromParamToolOutputActionOption, FromDataTableOutputActionOption ]:
+    option_types[ option_type.tag ] = option_type
+
+filter_types = {}
+for filter_type in [ ParamValueToolOutputActionOptionFilter, InsertColumnToolOutputActionOptionFilter, MultipleSplitterFilter, ColumnStripFilter, MetadataValueFilter, BooleanFilter, StringFunctionFilter, ColumnReplaceFilter ]:
+    filter_types[ filter_type.tag ] = filter_type
+
+
+# helper classes
+# determine cast function
+def parse_cast_attribute( cast ):
+    if cast == 'string_as_bool':
+        cast = util.string_as_bool
+    elif cast == 'int':
+        cast = int
+    elif cast == 'str':
+        cast = str
+    else:
+        # return value as-is
+        def cast(x):
+            return x
+    return cast
+
+
+# comparison
+def parse_compare_type( compare ):
+    if compare is None:
+        compare = 'eq'
+    assert compare in compare_types, "Invalid compare type specified: %s" % compare
+    return compare_types[ compare ]
+
+
+def compare_eq( value1, value2  ):
+    return value1 == value2
+
+
+def compare_neq( value1, value2  ):
+    return value1 != value2
+
+
+def compare_gt( value1, value2  ):
+    return value1 > value2
+
+
+def compare_gte( value1, value2  ):
+    return value1 >= value2
+
+
+def compare_lt( value1, value2  ):
+    return value1 < value2
+
+
+def compare_lte( value1, value2  ):
+    return value1 <= value2
+
+
+def compare_in( value1, value2 ):
+    return value1 in value2
+
+
+def compare_startswith( value1, value2 ):
+    return value1.startswith( value2 )
+
+
+def compare_endswith( value1, value2 ):
+    return value1.endswith( value2 )
+
+
+def compare_re_search( value1, value2 ):
+    # checks pattern=value2 in value1
+    return bool( re.search( value2, value1 ) )
+
+
+compare_types = {
+    'eq': compare_eq,
+    'neq': compare_neq,
+    'gt': compare_gt,
+    'gte': compare_gte,
+    'lt': compare_lt,
+    'lte': compare_lte,
+    'in': compare_in,
+    'startswith': compare_startswith,
+    'endswith': compare_endswith,
+    "re_search": compare_re_search
+}
diff --git a/lib/galaxy/tools/parser/output_collection_def.py b/lib/galaxy/tools/parser/output_collection_def.py
new file mode 100644
index 0000000..cd175a2
--- /dev/null
+++ b/lib/galaxy/tools/parser/output_collection_def.py
@@ -0,0 +1,75 @@
+""" This module define an abstract class for reasoning about Galaxy's
+dataset collection after jobs are finished.
+"""
+
+from galaxy.util import asbool
+
+DEFAULT_EXTRA_FILENAME_PATTERN = r"primary_DATASET_ID_(?P<designation>[^_]+)_(?P<visible>[^_]+)_(?P<ext>[^_]+)(_(?P<dbkey>[^_]+))?"
+DEFAULT_SORT_BY = "filename"
+DEFAULT_SORT_COMP = "lexical"
+
+
+# XML can describe custom patterns, but these literals describe named
+# patterns that will be replaced.
+NAMED_PATTERNS = {
+    "__default__": DEFAULT_EXTRA_FILENAME_PATTERN,
+    "__name__": r"(?P<name>.*)",
+    "__designation__": r"(?P<designation>.*)",
+    "__name_and_ext__": r"(?P<name>.*)\.(?P<ext>[^\.]+)?",
+    "__designation_and_ext__": r"(?P<designation>.*)\.(?P<ext>[^\._]+)?",
+}
+
+INPUT_DBKEY_TOKEN = "__input__"
+LEGACY_DEFAULT_DBKEY = None  # don't use __input__ for legacy default collection
+
+
+def dataset_collector_descriptions_from_elem( elem, legacy=True ):
+    primary_dataset_elems = elem.findall( "discover_datasets" )
+    if len(primary_dataset_elems) == 0 and legacy:
+        return [ DEFAULT_DATASET_COLLECTOR_DESCRIPTION ]
+    else:
+        return map( lambda elem: DatasetCollectionDescription( **elem.attrib ), primary_dataset_elems )
+
+
+def dataset_collector_descriptions_from_list( discover_datasets_dicts ):
+    return map( lambda kwds: DatasetCollectionDescription( **kwds ), discover_datasets_dicts )
+
+
+class DatasetCollectionDescription(object):
+
+    def __init__( self, **kwargs ):
+        pattern = kwargs.get( "pattern", "__default__" )
+        if pattern in NAMED_PATTERNS:
+            pattern = NAMED_PATTERNS.get( pattern )
+        self.pattern = pattern
+        self.default_dbkey = kwargs.get( "dbkey", INPUT_DBKEY_TOKEN )
+        self.default_ext = kwargs.get( "ext", None )
+        if self.default_ext is None and "format" in kwargs:
+            self.default_ext = kwargs.get( "format" )
+        self.default_visible = asbool( kwargs.get( "visible", None ) )
+        self.directory = kwargs.get( "directory", None )
+        self.assign_primary_output = asbool( kwargs.get( 'assign_primary_output', False ) )
+        sort_by = kwargs.get( "sort_by", DEFAULT_SORT_BY )
+        if sort_by.startswith("reverse_"):
+            self.sort_reverse = True
+            sort_by = sort_by[len("reverse_"):]
+        else:
+            self.sort_reverse = False
+        if "_" in sort_by:
+            sort_comp, sort_by = sort_by.split("_", 1)
+            assert sort_comp in ["lexical", "numeric"]
+        else:
+            sort_comp = DEFAULT_SORT_COMP
+        assert sort_by in [
+            "filename",
+            "name",
+            "designation",
+            "dbkey"
+        ]
+        self.sort_key = sort_by
+        self.sort_comp = sort_comp
+
+
+DEFAULT_DATASET_COLLECTOR_DESCRIPTION = DatasetCollectionDescription(
+    default_dbkey=LEGACY_DEFAULT_DBKEY,
+)
diff --git a/lib/galaxy/tools/parser/output_objects.py b/lib/galaxy/tools/parser/output_objects.py
new file mode 100644
index 0000000..32421f2
--- /dev/null
+++ b/lib/galaxy/tools/parser/output_objects.py
@@ -0,0 +1,220 @@
+from galaxy.util.dictifiable import Dictifiable
+from galaxy.util.odict import odict
+
+
+class ToolOutputBase( Dictifiable, object ):
+
+    def __init__( self, name, label=None, filters=None, hidden=False ):
+        super( ToolOutputBase, self ).__init__()
+        self.name = name
+        self.label = label
+        self.filters = filters or []
+        self.hidden = hidden
+        self.collection = False
+
+
+class ToolOutput( ToolOutputBase ):
+    """
+    Represents an output datasets produced by a tool. For backward
+    compatibility this behaves as if it were the tuple::
+
+      (format, metadata_source, parent)
+    """
+
+    dict_collection_visible_keys = ( 'name', 'format', 'label', 'hidden' )
+
+    def __init__( self, name, format=None, format_source=None, metadata_source=None,
+                  parent=None, label=None, filters=None, actions=None, hidden=False,
+                  implicit=False ):
+        super( ToolOutput, self ).__init__( name, label=label, filters=filters, hidden=hidden )
+        self.format = format
+        self.format_source = format_source
+        self.metadata_source = metadata_source
+        self.parent = parent
+        self.actions = actions
+
+        # Initialize default values
+        self.change_format = []
+        self.implicit = implicit
+        self.from_work_dir = None
+
+    # Tuple emulation
+
+    def __len__( self ):
+        return 3
+
+    def __getitem__( self, index ):
+        if index == 0:
+            return self.format
+        elif index == 1:
+            return self.metadata_source
+        elif index == 2:
+            return self.parent
+        else:
+            raise IndexError( index )
+
+    def __iter__( self ):
+        return iter( ( self.format, self.metadata_source, self.parent ) )
+
+    def to_dict( self, view='collection', value_mapper=None, app=None ):
+        as_dict = super( ToolOutput, self ).to_dict( view=view, value_mapper=value_mapper )
+        format = self.format
+        if format and format != "input" and app:
+            edam_format = app.datatypes_registry.edam_formats.get(self.format)
+            as_dict["edam_format"] = edam_format
+            edam_data = app.datatypes_registry.edam_data.get(self.format)
+            as_dict["edam_data"] = edam_data
+        return as_dict
+
+
+class ToolOutputCollection( ToolOutputBase ):
+    """
+    Represents a HistoryDatasetCollectionAssociation of output datasets produced
+    by a tool.
+
+    <outputs>
+      <collection type="list" label="${tool.name} on ${on_string} fasta">
+        <discover_datasets pattern="__name__" ext="fasta" visible="True" directory="outputFiles" />
+      </collection>
+      <collection type="paired" label="${tool.name} on ${on_string} paired reads">
+        <data name="forward" format="fastqsanger" />
+        <data name="reverse" format="fastqsanger"/>
+      </collection>
+    <outputs>
+    """
+
+    def __init__(
+        self,
+        name,
+        structure,
+        label=None,
+        filters=None,
+        hidden=False,
+        default_format="data",
+        default_format_source=None,
+        default_metadata_source=None,
+        inherit_format=False,
+        inherit_metadata=False
+    ):
+        super( ToolOutputCollection, self ).__init__( name, label=label, filters=filters, hidden=hidden )
+        self.collection = True
+        self.default_format = default_format
+        self.structure = structure
+        self.outputs = odict()
+
+        self.inherit_format = inherit_format
+        self.inherit_metadata = inherit_metadata
+
+        self.metadata_source = default_metadata_source
+        self.format_source = default_format_source
+        self.change_format = []  # TODO
+
+    def known_outputs( self, inputs, type_registry ):
+        if self.dynamic_structure:
+            return []
+
+        # This line is probably not right - should verify structured_like
+        # or have outputs and all outputs have name.
+        if len( self.outputs ) > 1:
+            output_parts = [ToolOutputCollectionPart(self, k, v) for k, v in self.outputs.items()]
+        else:
+            # either must have specified structured_like or something worse
+            if self.structure.structured_like:
+                collection_prototype = inputs[ self.structure.structured_like ].collection
+            else:
+                collection_prototype = type_registry.prototype( self.structure.collection_type )
+
+            def prototype_dataset_element_to_output( element, parent_ids=[] ):
+                name = element.element_identifier
+                format = self.default_format
+                if self.inherit_format:
+                    format = element.dataset_instance.ext
+                output = ToolOutput(
+                    name,
+                    format=format,
+                    format_source=self.format_source,
+                    metadata_source=self.metadata_source,
+                    implicit=True,
+                )
+                if self.inherit_metadata:
+                    output.metadata_source = element.dataset_instance
+                return ToolOutputCollectionPart(
+                    self,
+                    element.element_identifier,
+                    output,
+                    parent_ids=parent_ids,
+                )
+
+            def prototype_collection_to_output( collection_prototype, parent_ids=[] ):
+                output_parts = []
+                for element in collection_prototype.elements:
+                    element_parts = []
+                    if not element.is_collection:
+                        element_parts.append(prototype_dataset_element_to_output( element, parent_ids ))
+                    else:
+                        new_parent_ids = parent_ids[:] + [element.element_identifier]
+                        element_parts.extend(prototype_collection_to_output(element.element_object, new_parent_ids))
+                    output_parts.extend(element_parts)
+
+                return output_parts
+
+            output_parts = prototype_collection_to_output( collection_prototype )
+
+        return output_parts
+
+    @property
+    def dynamic_structure(self):
+        return self.structure.dynamic
+
+    @property
+    def dataset_collector_descriptions(self):
+        if not self.dynamic_structure:
+            raise Exception("dataset_collector_descriptions called for output collection with static structure")
+        return self.structure.dataset_collector_descriptions
+
+
+class ToolOutputCollectionStructure( object ):
+
+    def __init__(
+        self,
+        collection_type,
+        collection_type_source,
+        structured_like,
+        dataset_collector_descriptions,
+    ):
+        self.collection_type = collection_type
+        self.collection_type_source = collection_type_source
+        self.structured_like = structured_like
+        self.dataset_collector_descriptions = dataset_collector_descriptions
+        if collection_type and collection_type_source:
+            raise ValueError("Cannot set both type and type_source on collection output.")
+        if collection_type is None and structured_like is None and dataset_collector_descriptions is None and collection_type_source is None:
+            raise ValueError( "Output collection types must be specify type of structured_like" )
+        if dataset_collector_descriptions and structured_like:
+            raise ValueError( "Cannot specify dynamic structure (discovered_datasets) and structured_like attribute." )
+        self.dynamic = dataset_collector_descriptions is not None
+
+
+class ToolOutputCollectionPart( object ):
+
+    def __init__( self, output_collection_def, element_identifier, output_def, parent_ids=[] ):
+        self.output_collection_def = output_collection_def
+        self.element_identifier = element_identifier
+        self.output_def = output_def
+        self.parent_ids = parent_ids
+
+    @property
+    def effective_output_name( self ):
+        name = self.output_collection_def.name
+        part_name = self.element_identifier
+        effective_output_name = "%s|__part__|%s" % ( name, part_name )
+        return effective_output_name
+
+    @staticmethod
+    def is_named_collection_part_name( name ):
+        return "|__part__|" in name
+
+    @staticmethod
+    def split_output_name( name ):
+        assert ToolOutputCollectionPart.is_named_collection_part_name( name )
+        return name.split("|__part__|")
diff --git a/lib/galaxy/tools/parser/util.py b/lib/galaxy/tools/parser/util.py
new file mode 100644
index 0000000..03a967e
--- /dev/null
+++ b/lib/galaxy/tools/parser/util.py
@@ -0,0 +1,38 @@
+from .interface import ToolStdioExitCode
+from .interface import ToolStdioRegex
+
+
+def error_on_exit_code():
+    exit_code_lower = ToolStdioExitCode()
+    exit_code_lower.range_start = float("-inf")
+    exit_code_lower.range_end = -1
+    _set_fatal(exit_code_lower)
+    exit_code_high = ToolStdioExitCode()
+    exit_code_high.range_start = 1
+    exit_code_high.range_end = float("inf")
+    _set_fatal(exit_code_high)
+    return [exit_code_lower, exit_code_high], []
+
+
+def aggressive_error_checks():
+    exit_codes, _ = error_on_exit_code()
+    # these regexes are processed as case insensitive by default
+    regexes = [
+        _error_regex("exception:"),
+        _error_regex("error:")
+    ]
+    return exit_codes, regexes
+
+
+def _error_regex(match):
+    regex = ToolStdioRegex()
+    _set_fatal(regex)
+    regex.match = match
+    regex.stdout_match = True
+    regex.stderr_match = True
+    return regex
+
+
+def _set_fatal(obj):
+    from galaxy.jobs.error_level import StdioErrorLevel
+    obj.error_level = StdioErrorLevel.FATAL
diff --git a/lib/galaxy/tools/parser/xml.py b/lib/galaxy/tools/parser/xml.py
new file mode 100644
index 0000000..0165267
--- /dev/null
+++ b/lib/galaxy/tools/parser/xml.py
@@ -0,0 +1,950 @@
+import logging
+import re
+import sys
+import traceback
+import uuid
+
+from math import isinf
+
+from galaxy.tools.deps import requirements
+from galaxy.util import string_as_bool, xml_text, xml_to_string
+from galaxy.util.odict import odict
+
+from .interface import (
+    InputSource,
+    PageSource,
+    PagesSource,
+    TestCollectionDef,
+    TestCollectionOutputDef,
+    ToolSource,
+    ToolStdioExitCode,
+    ToolStdioRegex,
+)
+from .output_actions import ToolOutputActionGroup
+from .output_collection_def import dataset_collector_descriptions_from_elem
+from .output_objects import (
+    ToolOutput,
+    ToolOutputCollection,
+    ToolOutputCollectionStructure
+)
+from .util import (
+    aggressive_error_checks,
+    error_on_exit_code,
+)
+
+
+log = logging.getLogger( __name__ )
+
+
+class XmlToolSource(ToolSource):
+    """ Responsible for parsing a tool from classic Galaxy representation.
+    """
+
+    def __init__(self, xml_tree, source_path=None):
+        self.xml_tree = xml_tree
+        self.root = xml_tree.getroot()
+        self._source_path = source_path
+        self.legacy_defaults = self.parse_profile() == "16.01"
+
+    def parse_version(self):
+        return self.root.get("version", None)
+
+    def parse_id(self):
+        return self.root.get("id")
+
+    def parse_tool_module(self):
+        root = self.root
+        if root.find( "type" ) is not None:
+            type_elem = root.find( "type" )
+            module = type_elem.get( 'module', 'galaxy.tools' )
+            cls = type_elem.get( 'class' )
+            return module, cls
+
+        return None
+
+    def parse_action_module(self):
+        root = self.root
+        action_elem = root.find( "action" )
+        if action_elem is not None:
+            module = action_elem.get( 'module' )
+            cls = action_elem.get( 'class' )
+            return module, cls
+        else:
+            return None
+
+    def parse_tool_type(self):
+        root = self.root
+        if root.get( 'tool_type', None ) is not None:
+            return root.get( 'tool_type' )
+
+    def parse_name(self):
+        return self.root.get( "name" )
+
+    def parse_edam_operations(self):
+        edam_ops = self.root.find("edam_operations")
+        if edam_ops is None:
+            return []
+        return [ edam_op.text for edam_op in edam_ops.findall("edam_operation") ]
+
+    def parse_edam_topics(self):
+        edam_topics = self.root.find("edam_topics")
+        if edam_topics is None:
+            return []
+        return [ edam_topic.text for edam_topic in edam_topics.findall("edam_topic") ]
+
+    def parse_description(self):
+        return xml_text(self.root, "description")
+
+    def parse_is_multi_byte(self):
+        return self._get_attribute_as_bool( "is_multi_byte", self.default_is_multi_byte )
+
+    def parse_display_interface(self, default):
+        return self._get_attribute_as_bool( "display_interface", default )
+
+    def parse_require_login(self, default):
+        return self._get_attribute_as_bool( "require_login", default )
+
+    def parse_request_param_translation_elem(self):
+        return self.root.find( "request_param_translation" )
+
+    def parse_command(self):
+        command_el = self._command_el
+        return ( ( command_el is not None ) and command_el.text ) or None
+
+    def parse_environment_variables(self):
+        environment_variables_el = self.root.find("environment_variables")
+        if environment_variables_el is None:
+            return []
+
+        environment_variables = []
+        for environment_variable_el in environment_variables_el.findall("environment_variable"):
+            definition = {
+                "name": environment_variable_el.get("name"),
+                "template": environment_variable_el.text,
+            }
+            environment_variables.append(
+                definition
+            )
+        return environment_variables
+
+    def parse_interpreter(self):
+        command_el = self._command_el
+        interpreter = (command_el is not None) and command_el.get("interpreter", None)
+        if not self.legacy_defaults:
+            log.warning("Deprecated interpeter attribute on command element is now ignored.")
+            interpreter = None
+
+        return interpreter
+
+    def parse_version_command(self):
+        version_cmd = self.root.find("version_command")
+        if version_cmd is not None:
+            return version_cmd.text
+        else:
+            return None
+
+    def parse_version_command_interpreter(self):
+        if self.parse_version_command() is not None:
+            version_cmd = self.root.find("version_command")
+            version_cmd_interpreter = version_cmd.get( "interpreter", None )
+            if version_cmd_interpreter:
+                return version_cmd_interpreter
+        return None
+
+    def parse_parallelism(self):
+        parallelism = self.root.find("parallelism")
+        parallelism_info = None
+        if parallelism is not None and parallelism.get("method"):
+            from galaxy.jobs import ParallelismInfo
+            return ParallelismInfo(parallelism)
+        return parallelism_info
+
+    def parse_hidden(self):
+        hidden = xml_text(self.root, "hidden")
+        if hidden:
+            hidden = string_as_bool(hidden)
+        return hidden
+
+    def parse_redirect_url_params_elem(self):
+        return self.root.find("redirect_url_params")
+
+    def parse_sanitize(self):
+        return self._get_option_value("sanitize", True)
+
+    def parse_refresh(self):
+        return self._get_option_value("refresh", False)
+
+    def _get_option_value(self, key, default):
+        root = self.root
+        for option_elem in root.findall("options"):
+            if key in option_elem.attrib:
+                return string_as_bool(option_elem.get(key))
+
+        return default
+
+    @property
+    def _command_el(self):
+        return self.root.find("command")
+
+    def _get_attribute_as_bool( self, attribute, default, elem=None ):
+        if elem is None:
+            elem = self.root
+        return string_as_bool( elem.get( attribute, default ) )
+
+    def parse_requirements_and_containers(self):
+        return requirements.parse_requirements_from_xml(self.root)
+
+    def parse_input_pages(self):
+        return XmlPagesSource(self.root)
+
+    def parse_outputs(self, tool):
+        out_elem = self.root.find("outputs")
+        outputs = odict()
+        output_collections = odict()
+        if out_elem is None:
+            return outputs, output_collections
+
+        data_dict = odict()
+
+        def _parse(data_elem, **kwds):
+            output_def = self._parse_output(data_elem, tool, **kwds)
+            data_dict[output_def.name] = output_def
+            return output_def
+
+        map(_parse, out_elem.findall("data"))
+
+        for collection_elem in out_elem.findall("collection"):
+            name = collection_elem.get( "name" )
+            label = xml_text( collection_elem, "label" )
+            default_format = collection_elem.get( "format", "data" )
+            collection_type = collection_elem.get( "type", None )
+            collection_type_source = collection_elem.get( "type_source", None )
+            structured_like = collection_elem.get( "structured_like", None )
+            inherit_format = False
+            inherit_metadata = False
+            if structured_like:
+                inherit_format = string_as_bool( collection_elem.get( "inherit_format", None ) )
+                inherit_metadata = string_as_bool( collection_elem.get( "inherit_metadata", None ) )
+            default_format_source = collection_elem.get( "format_source", None )
+            default_metadata_source = collection_elem.get( "metadata_source", "" )
+            filters = collection_elem.findall( 'filter' )
+
+            dataset_collector_descriptions = None
+            if collection_elem.find( "discover_datasets" ) is not None:
+                dataset_collector_descriptions = dataset_collector_descriptions_from_elem( collection_elem, legacy=False )
+            structure = ToolOutputCollectionStructure(
+                collection_type=collection_type,
+                collection_type_source=collection_type_source,
+                structured_like=structured_like,
+                dataset_collector_descriptions=dataset_collector_descriptions,
+            )
+            output_collection = ToolOutputCollection(
+                name,
+                structure,
+                label=label,
+                filters=filters,
+                default_format=default_format,
+                inherit_format=inherit_format,
+                inherit_metadata=inherit_metadata,
+                default_format_source=default_format_source,
+                default_metadata_source=default_metadata_source,
+            )
+            outputs[output_collection.name] = output_collection
+
+            for data_elem in collection_elem.findall("data"):
+                _parse(
+                    data_elem,
+                    default_format=default_format,
+                    default_format_source=default_format_source,
+                    default_metadata_source=default_metadata_source,
+                )
+
+            for data_elem in collection_elem.findall("data"):
+                output_name = data_elem.get("name")
+                data = data_dict[output_name]
+                assert data
+                del data_dict[output_name]
+                output_collection.outputs[output_name] = data
+            output_collections[ name ] = output_collection
+
+        for output_def in data_dict.values():
+            outputs[output_def.name] = output_def
+        return outputs, output_collections
+
+    def _parse_output(
+        self,
+        data_elem,
+        tool,
+        default_format="data",
+        default_format_source=None,
+        default_metadata_source="",
+    ):
+        output = ToolOutput( data_elem.get("name") )
+        output_format = data_elem.get("format", default_format)
+        auto_format = string_as_bool( data_elem.get( "auto_format", "false" ) )
+        if auto_format and output_format != "data":
+            raise ValueError("Setting format and auto_format is not supported at this time.")
+        elif auto_format:
+            output_format = "_sniff_"
+        output.format = output_format
+        output.change_format = data_elem.findall("change_format")
+        output.format_source = data_elem.get("format_source", default_format_source)
+        output.metadata_source = data_elem.get("metadata_source", default_metadata_source)
+        output.parent = data_elem.get("parent", None)
+        output.label = xml_text( data_elem, "label" )
+        output.count = int( data_elem.get("count", 1) )
+        output.filters = data_elem.findall( 'filter' )
+        output.tool = tool
+        output.from_work_dir = data_elem.get("from_work_dir", None)
+        output.hidden = string_as_bool( data_elem.get("hidden", "") )
+        output.actions = ToolOutputActionGroup( output, data_elem.find( 'actions' ) )
+        output.dataset_collector_descriptions = dataset_collector_descriptions_from_elem( data_elem, legacy=self.legacy_defaults )
+        return output
+
+    def parse_stdio(self):
+        command_el = self._command_el
+        detect_errors = None
+        if command_el is not None:
+            detect_errors = command_el.get("detect_errors")
+        if detect_errors and detect_errors != "default":
+            if detect_errors == "exit_code":
+                return error_on_exit_code()
+            elif detect_errors == "aggressive":
+                return aggressive_error_checks()
+            else:
+                raise ValueError("Unknown detect_errors value encountered [%s]" % detect_errors)
+        elif len(self.root.findall('stdio')) == 0 and not self.legacy_defaults:
+            return error_on_exit_code()
+        else:
+            parser = StdioParser(self.root)
+            return parser.stdio_exit_codes, parser.stdio_regexes
+
+    def parse_strict_shell(self):
+        command_el = self._command_el
+        if command_el is not None:
+            return string_as_bool(command_el.get("strict", "False"))
+        elif self.legacy_defaults:
+            return False
+        else:
+            return True
+
+    def parse_help(self):
+        help_elem = self.root.find( 'help' )
+        return help_elem.text if help_elem is not None else None
+
+    def parse_tests_to_dict(self):
+        tests_elem = self.root.find("tests")
+        tests = []
+        rval = dict(
+            tests=tests
+        )
+
+        if tests_elem is not None:
+            for i, test_elem in enumerate(tests_elem.findall("test")):
+                tests.append(_test_elem_to_dict(test_elem, i))
+
+            _copy_to_dict_if_present(tests_elem, rval, ["interactor"])
+
+        return rval
+
+    def parse_profile(self):
+        # Pre-16.04 or default XML defaults
+        # - Use standard error for error detection.
+        # - Don't run shells with -e
+        # - Auto-check for implicit multiple outputs.
+        # - Auto-check for $param_file.
+        # - Enable buggy interpreter attribute.
+        return self.root.get("profile", "16.01")
+
+
+def _test_elem_to_dict(test_elem, i):
+    rval = dict(
+        outputs=__parse_output_elems(test_elem),
+        output_collections=__parse_output_collection_elems(test_elem),
+        inputs=__parse_input_elems(test_elem, i),
+        expect_num_outputs=test_elem.get("expect_num_outputs"),
+        command=__parse_assert_list_from_elem( test_elem.find("assert_command") ),
+        stdout=__parse_assert_list_from_elem( test_elem.find("assert_stdout") ),
+        stderr=__parse_assert_list_from_elem( test_elem.find("assert_stderr") ),
+        expect_exit_code=test_elem.get("expect_exit_code"),
+        expect_failure=string_as_bool(test_elem.get("expect_failure", False)),
+        maxseconds=test_elem.get("maxseconds", None),
+    )
+    _copy_to_dict_if_present(test_elem, rval, ["interactor", "num_outputs"])
+    return rval
+
+
+def __parse_input_elems(test_elem, i):
+    __expand_input_elems( test_elem )
+    return __parse_inputs_elems( test_elem, i )
+
+
+def __parse_output_elems( test_elem ):
+    outputs = []
+    for output_elem in test_elem.findall( "output" ):
+        name, file, attributes = __parse_output_elem( output_elem )
+        outputs.append( ( name, file, attributes ) )
+    return outputs
+
+
+def __parse_output_elem( output_elem ):
+    attrib = dict( output_elem.attrib )
+    name = attrib.pop( 'name', None )
+    if name is None:
+        raise Exception( "Test output does not have a 'name'" )
+
+    file, attributes = __parse_test_attributes( output_elem, attrib, parse_discovered_datasets=True )
+    return name, file, attributes
+
+
+def __parse_command_elem( test_elem ):
+    assert_elem = test_elem.find("command")
+    return __parse_assert_list_from_elem( assert_elem )
+
+
+def __parse_output_collection_elems( test_elem ):
+    output_collections = []
+    for output_collection_elem in test_elem.findall( "output_collection" ):
+        output_collection_def = __parse_output_collection_elem( output_collection_elem )
+        output_collections.append( output_collection_def )
+    return output_collections
+
+
+def __parse_output_collection_elem( output_collection_elem ):
+    attrib = dict( output_collection_elem.attrib )
+    name = attrib.pop( 'name', None )
+    if name is None:
+        raise Exception( "Test output collection does not have a 'name'" )
+    element_tests = __parse_element_tests( output_collection_elem )
+    return TestCollectionOutputDef( name, attrib, element_tests )
+
+
+def __parse_element_tests( parent_element ):
+    element_tests = {}
+    for element in parent_element.findall("element"):
+        element_attrib = dict( element.attrib )
+        identifier = element_attrib.pop( 'name', None )
+        if identifier is None:
+            raise Exception( "Test primary dataset does not have a 'identifier'" )
+        element_tests[ identifier ] = __parse_test_attributes( element, element_attrib, parse_elements=True )
+    return element_tests
+
+
+def __parse_test_attributes( output_elem, attrib, parse_elements=False, parse_discovered_datasets=False ):
+    assert_list = __parse_assert_list( output_elem )
+
+    # Allow either file or value to specify a target file to compare result with
+    # file was traditionally used by outputs and value by extra files.
+    file = attrib.pop( 'file', attrib.pop( 'value', None ) )
+
+    # File no longer required if an list of assertions was present.
+    attributes = {}
+    # Method of comparison
+    attributes['compare'] = attrib.pop( 'compare', 'diff' ).lower()
+    # Number of lines to allow to vary in logs (for dates, etc)
+    attributes['lines_diff'] = int( attrib.pop( 'lines_diff', '0' ) )
+    # Allow a file size to vary if sim_size compare
+    attributes['delta'] = int( attrib.pop( 'delta', '10000' ) )
+    attributes['sort'] = string_as_bool( attrib.pop( 'sort', False ) )
+    extra_files = []
+    if 'ftype' in attrib:
+        attributes['ftype'] = attrib['ftype']
+    for extra in output_elem.findall( 'extra_files' ):
+        extra_files.append( __parse_extra_files_elem( extra ) )
+    metadata = {}
+    for metadata_elem in output_elem.findall( 'metadata' ):
+        metadata[ metadata_elem.get('name') ] = metadata_elem.get( 'value' )
+    md5sum = attrib.get("md5", None)
+    checksum = attrib.get("checksum", None)
+    element_tests = {}
+    if parse_elements:
+        element_tests = __parse_element_tests( output_elem )
+
+    primary_datasets = {}
+    if parse_discovered_datasets:
+        for primary_elem in ( output_elem.findall( "discovered_dataset" ) or [] ):
+            primary_attrib = dict( primary_elem.attrib )
+            designation = primary_attrib.pop( 'designation', None )
+            if designation is None:
+                raise Exception( "Test primary dataset does not have a 'designation'" )
+            primary_datasets[ designation ] = __parse_test_attributes( primary_elem, primary_attrib )
+
+    has_checksum = md5sum or checksum
+    has_nested_tests = extra_files or element_tests or primary_datasets
+    if not (assert_list or file or metadata or has_checksum or has_nested_tests):
+        raise Exception( "Test output defines nothing to check (e.g. must have a 'file' check against, assertions to check, metadata or checksum tests, etc...)")
+    attributes['assert_list'] = assert_list
+    attributes['extra_files'] = extra_files
+    attributes['metadata'] = metadata
+    attributes['md5'] = md5sum
+    attributes['checksum'] = checksum
+    attributes['elements'] = element_tests
+    attributes['primary_datasets'] = primary_datasets
+    return file, attributes
+
+
+def __parse_assert_list( output_elem ):
+    assert_elem = output_elem.find("assert_contents")
+    return __parse_assert_list_from_elem( assert_elem )
+
+
+def __parse_assert_list_from_elem( assert_elem ):
+    assert_list = None
+
+    def convert_elem(elem):
+        """ Converts and XML element to a dictionary format, used by assertion checking code. """
+        tag = elem.tag
+        attributes = dict( elem.attrib )
+        child_elems = list( elem.getchildren() )
+        converted_children = []
+        for child_elem in child_elems:
+            converted_children.append( convert_elem(child_elem) )
+        return {"tag": tag, "attributes": attributes, "children": converted_children}
+    if assert_elem is not None:
+        assert_list = []
+        for assert_child in list(assert_elem):
+            assert_list.append(convert_elem(assert_child))
+
+    return assert_list
+
+
+def __parse_extra_files_elem(extra):
+    # File or directory, when directory, compare basename
+    # by basename
+    attrib = dict(extra.attrib)
+    extra_type = attrib.pop('type', 'file')
+    extra_name = attrib.pop('name', None)
+    assert extra_type == 'directory' or extra_name is not None, \
+        'extra_files type (%s) requires a name attribute' % extra_type
+    extra_value, extra_attributes = __parse_test_attributes(extra, attrib)
+    return extra_type, extra_value, extra_name, extra_attributes
+
+
+def __expand_input_elems( root_elem, prefix="" ):
+    __append_prefix_to_params( root_elem, prefix )
+
+    repeat_elems = root_elem.findall( 'repeat' )
+    indices = {}
+    for repeat_elem in repeat_elems:
+        name = repeat_elem.get( "name" )
+        if name not in indices:
+            indices[ name ] = 0
+            index = 0
+        else:
+            index = indices[ name ] + 1
+            indices[ name ] = index
+
+        new_prefix = __prefix_join( prefix, name, index=index )
+        __expand_input_elems( repeat_elem, new_prefix )
+        __pull_up_params( root_elem, repeat_elem )
+        root_elem.remove( repeat_elem )
+
+    cond_elems = root_elem.findall( 'conditional' )
+    for cond_elem in cond_elems:
+        new_prefix = __prefix_join( prefix, cond_elem.get( "name" ) )
+        __expand_input_elems( cond_elem, new_prefix )
+        __pull_up_params( root_elem, cond_elem )
+        root_elem.remove( cond_elem )
+
+    section_elems = root_elem.findall( 'section' )
+    for section_elem in section_elems:
+        new_prefix = __prefix_join( prefix, section_elem.get( "name" ) )
+        __expand_input_elems( section_elem, new_prefix )
+        __pull_up_params( root_elem, section_elem )
+        root_elem.remove( section_elem )
+
+
+def __append_prefix_to_params( elem, prefix ):
+    for param_elem in elem.findall( 'param' ):
+        param_elem.set( "name", __prefix_join( prefix, param_elem.get( "name" ) ) )
+
+
+def __pull_up_params( parent_elem, child_elem ):
+    for param_elem in child_elem.findall( 'param' ):
+        parent_elem.append( param_elem )
+        child_elem.remove( param_elem )
+
+
+def __prefix_join( prefix, name, index=None ):
+    name = name if index is None else "%s_%d" % ( name, index )
+    return name if not prefix else "%s|%s" % ( prefix, name )
+
+
+def _copy_to_dict_if_present( elem, rval, attributes ):
+    for attribute in attributes:
+        if attribute in elem.attrib:
+            rval[attribute] = elem.get(attribute)
+    return rval
+
+
+def __parse_inputs_elems( test_elem, i ):
+    raw_inputs = []
+    for param_elem in test_elem.findall( "param" ):
+        name, value, attrib = __parse_param_elem( param_elem, i )
+        raw_inputs.append( ( name, value, attrib ) )
+    return raw_inputs
+
+
+def __parse_param_elem( param_elem, i=0 ):
+    attrib = dict( param_elem.attrib )
+    if 'values' in attrib:
+        value = attrib[ 'values' ].split( ',' )
+    elif 'value' in attrib:
+        value = attrib['value']
+    else:
+        value = None
+    attrib['children'] = list( param_elem.getchildren() )
+    if attrib['children']:
+        # At this time, we can assume having children only
+        # occurs on DataToolParameter test items but this could
+        # change and would cause the below parsing to change
+        # based upon differences in children items
+        attrib['metadata'] = []
+        attrib['composite_data'] = []
+        attrib['edit_attributes'] = []
+        # Composite datasets need to be renamed uniquely
+        composite_data_name = None
+        for child in attrib['children']:
+            if child.tag == 'composite_data':
+                attrib['composite_data'].append( child )
+                if composite_data_name is None:
+                    # Generate a unique name; each test uses a
+                    # fresh history.
+                    composite_data_name = '_COMPOSITE_RENAMED_t%d_%s' \
+                        % ( i, uuid.uuid1().hex )
+            elif child.tag == 'metadata':
+                attrib['metadata'].append( child )
+            elif child.tag == 'metadata':
+                attrib['metadata'].append( child )
+            elif child.tag == 'edit_attributes':
+                attrib['edit_attributes'].append( child )
+            elif child.tag == 'collection':
+                attrib[ 'collection' ] = TestCollectionDef( child, __parse_param_elem )
+        if composite_data_name:
+            # Composite datasets need implicit renaming;
+            # inserted at front of list so explicit declarations
+            # take precedence
+            attrib['edit_attributes'].insert( 0, { 'type': 'name', 'value': composite_data_name } )
+    name = attrib.pop( 'name' )
+    return ( name, value, attrib )
+
+
+class StdioParser(object):
+
+    def __init__(self, root):
+        try:
+            self.stdio_exit_codes = list()
+            self.stdio_regexes = list()
+
+            # We should have a single <stdio> element, but handle the case for
+            # multiples.
+            # For every stdio element, add all of the exit_code and regex
+            # subelements that we find:
+            for stdio_elem in ( root.findall( 'stdio' ) ):
+                self.parse_stdio_exit_codes( stdio_elem )
+                self.parse_stdio_regexes( stdio_elem )
+        except Exception:
+            log.error( "Exception in parse_stdio! " + str(sys.exc_info()) )
+
+    def parse_stdio_exit_codes( self, stdio_elem ):
+        """
+        Parse the tool's <stdio> element's <exit_code> subelements.
+        This will add all of those elements, if any, to self.stdio_exit_codes.
+        """
+        try:
+            # Look for all <exit_code> elements. Each exit_code element must
+            # have a range/value.
+            # Exit-code ranges have precedence over a single exit code.
+            # So if there are value and range attributes, we use the range
+            # attribute. If there is neither a range nor a value, then print
+            # a warning and skip to the next.
+            for exit_code_elem in ( stdio_elem.findall( "exit_code" ) ):
+                exit_code = ToolStdioExitCode()
+                # Each exit code has an optional description that can be
+                # part of the "desc" or "description" attributes:
+                exit_code.desc = exit_code_elem.get( "desc" )
+                if exit_code.desc is None:
+                    exit_code.desc = exit_code_elem.get( "description" )
+                # Parse the error level:
+                exit_code.error_level = (
+                    self.parse_error_level( exit_code_elem.get( "level" )))
+                code_range = exit_code_elem.get( "range", "" )
+                if code_range is None:
+                    code_range = exit_code_elem.get( "value", "" )
+                if code_range is None:
+                    log.warning( "Tool stdio exit codes must have " +
+                                 "a range or value" )
+                    continue
+                # Parse the range. We look for:
+                #   :Y
+                #  X:
+                #  X:Y   - Split on the colon. We do not allow a colon
+                #          without a beginning or end, though we could.
+                # Also note that whitespace is eliminated.
+                # TODO: Turn this into a single match - it should be
+                # more efficient.
+                code_range = re.sub( "\s", "", code_range )
+                code_ranges = re.split( ":", code_range )
+                if ( len( code_ranges ) == 2 ):
+                    if ( code_ranges[0] is None or '' == code_ranges[0] ):
+                        exit_code.range_start = float( "-inf" )
+                    else:
+                        exit_code.range_start = int( code_ranges[0] )
+                    if ( code_ranges[1] is None or '' == code_ranges[1] ):
+                        exit_code.range_end = float( "inf" )
+                    else:
+                        exit_code.range_end = int( code_ranges[1] )
+                # If we got more than one colon, then ignore the exit code.
+                elif ( len( code_ranges ) > 2 ):
+                    log.warning( "Invalid tool exit_code range %s - ignored"
+                                 % code_range )
+                    continue
+                # Else we have a singular value. If it's not an integer, then
+                # we'll just write a log message and skip this exit_code.
+                else:
+                    try:
+                        exit_code.range_start = int( code_range )
+                    except:
+                        log.error( code_range )
+                        log.warning( "Invalid range start for tool's exit_code %s: exit_code ignored" % code_range )
+                        continue
+                    exit_code.range_end = exit_code.range_start
+                # TODO: Check if we got ">", ">=", "<", or "<=":
+                # Check that the range, regardless of how we got it,
+                # isn't bogus. If we have two infinite values, then
+                # the start must be -inf and the end must be +inf.
+                # So at least warn about this situation:
+                if ( isinf( exit_code.range_start ) and
+                     isinf( exit_code.range_end ) ):
+                    log.warning( "Tool exit_code range %s will match on " +
+                                 "all exit codes" % code_range )
+                self.stdio_exit_codes.append( exit_code )
+        except Exception:
+            log.error( "Exception in parse_stdio_exit_codes! " +
+                       str(sys.exc_info()) )
+            trace = sys.exc_info()[2]
+            if trace is not None:
+                trace_msg = repr( traceback.format_tb( trace ) )
+                log.error( "Traceback: %s" % trace_msg )
+
+    def parse_stdio_regexes( self, stdio_elem ):
+        """
+        Look in the tool's <stdio> elem for all <regex> subelements
+        that define how to look for warnings and fatal errors in
+        stdout and stderr. This will add all such regex elements
+        to the Tols's stdio_regexes list.
+        """
+        try:
+            # Look for every <regex> subelement. The regular expression
+            # will have "match" and "source" (or "src") attributes.
+            for regex_elem in ( stdio_elem.findall( "regex" ) ):
+                # TODO: Fill in ToolStdioRegex
+                regex = ToolStdioRegex()
+                # Each regex has an optional description that can be
+                # part of the "desc" or "description" attributes:
+                regex.desc = regex_elem.get( "desc" )
+                if regex.desc is None:
+                    regex.desc = regex_elem.get( "description" )
+                # Parse the error level
+                regex.error_level = (
+                    self.parse_error_level( regex_elem.get( "level" ) ) )
+                regex.match = regex_elem.get( "match", "" )
+                if regex.match is None:
+                    # TODO: Convert the offending XML element to a string
+                    log.warning( "Ignoring tool's stdio regex element %s - "
+                                 "the 'match' attribute must exist" )
+                    continue
+                # Parse the output sources. We look for the "src", "source",
+                # and "sources" attributes, in that order. If there is no
+                # such source, then the source defaults to stderr & stdout.
+                # Look for a comma and then look for "err", "error", "out",
+                # and "output":
+                output_srcs = regex_elem.get( "src" )
+                if output_srcs is None:
+                    output_srcs = regex_elem.get( "source" )
+                if output_srcs is None:
+                    output_srcs = regex_elem.get( "sources" )
+                if output_srcs is None:
+                    output_srcs = "output,error"
+                output_srcs = re.sub( "\s", "", output_srcs )
+                src_list = re.split( ",", output_srcs )
+                # Just put together anything to do with "out", including
+                # "stdout", "output", etc. Repeat for "stderr", "error",
+                # and anything to do with "err". If neither stdout nor
+                # stderr were specified, then raise a warning and scan both.
+                for src in src_list:
+                    if re.search( "both", src, re.IGNORECASE ):
+                        regex.stdout_match = True
+                        regex.stderr_match = True
+                    if re.search( "out", src, re.IGNORECASE ):
+                        regex.stdout_match = True
+                    if re.search( "err", src, re.IGNORECASE ):
+                        regex.stderr_match = True
+                    if (not regex.stdout_match and not regex.stderr_match):
+                        log.warning( "Tool id %s: unable to determine if tool "
+                                     "stream source scanning is output, error, "
+                                     "or both. Defaulting to use both." % self.id )
+                        regex.stdout_match = True
+                        regex.stderr_match = True
+                self.stdio_regexes.append( regex )
+        except Exception:
+            log.error( "Exception in parse_stdio_exit_codes! " +
+                       str(sys.exc_info()) )
+            trace = sys.exc_info()[2]
+            if trace is not None:
+                trace_msg = repr( traceback.format_tb( trace ) )
+                log.error( "Traceback: %s" % trace_msg )
+
+    # TODO: This method doesn't have to be part of the Tool class.
+    def parse_error_level( self, err_level ):
+        """
+        Parses error level and returns error level enumeration. If
+        unparsable, returns 'fatal'
+        """
+        from galaxy.jobs.error_level import StdioErrorLevel
+        return_level = StdioErrorLevel.FATAL
+        try:
+            if err_level:
+                if ( re.search( "log", err_level, re.IGNORECASE ) ):
+                    return_level = StdioErrorLevel.LOG
+                elif ( re.search( "warning", err_level, re.IGNORECASE ) ):
+                    return_level = StdioErrorLevel.WARNING
+                elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
+                    return_level = StdioErrorLevel.FATAL
+                else:
+                    log.debug( "Tool %s: error level %s did not match log/warning/fatal" %
+                               ( self.id, err_level ) )
+        except Exception:
+            log.error( "Exception in parse_error_level " +
+                       str(sys.exc_info() ) )
+            trace = sys.exc_info()[2]
+            if trace is not None:
+                trace_msg = repr( traceback.format_tb( trace ) )
+                log.error( "Traceback: %s" % trace_msg )
+        return return_level
+
+
+class XmlPagesSource(PagesSource):
+
+    def __init__(self, root):
+        self.input_elem = root.find("inputs")
+        page_sources = []
+        if self.input_elem is not None:
+            pages_elem = self.input_elem.findall( "page" )
+            for page in ( pages_elem or [ self.input_elem ] ):
+                page_sources.append(XmlPageSource(page))
+        super(XmlPagesSource, self).__init__(page_sources)
+
+    @property
+    def inputs_defined(self):
+        return self.input_elem is not None
+
+
+class XmlPageSource(PageSource):
+
+    def __init__(self, parent_elem):
+        self.parent_elem = parent_elem
+
+    def parse_display(self):
+        display_elem = self.parent_elem.find("display")
+        if display_elem is not None:
+            display = xml_to_string(display_elem)
+        else:
+            display = None
+        return display
+
+    def parse_input_sources(self):
+        return map(XmlInputSource, self.parent_elem)
+
+
+class XmlInputSource(InputSource):
+
+    def __init__(self, input_elem):
+        self.input_elem = input_elem
+        self.input_type = self.input_elem.tag
+
+    def parse_input_type(self):
+        return self.input_type
+
+    def elem(self):
+        return self.input_elem
+
+    def get(self, key, value=None):
+        return self.input_elem.get(key, value)
+
+    def get_bool(self, key, default):
+        return string_as_bool( self.get(key, default ) )
+
+    def parse_label(self):
+        return xml_text(self.input_elem, "label")
+
+    def parse_help(self):
+        return xml_text(self.input_elem, "help")
+
+    def parse_sanitizer_elem(self):
+        return self.input_elem.find("sanitizer")
+
+    def parse_validator_elems(self):
+        return self.input_elem.findall("validator")
+
+    def parse_dynamic_options_elem(self):
+        """ Return a galaxy.tools.parameters.dynamic_options.DynamicOptions
+        if appropriate.
+        """
+        options_elem = self.input_elem.find( 'options' )
+        return options_elem
+
+    def parse_static_options(self):
+        static_options = list()
+        elem = self.input_elem
+        for index, option in enumerate( elem.findall( "option" ) ):
+            value = option.get( "value" )
+            selected = string_as_bool( option.get( "selected", False ) )
+            static_options.append( ( option.text or value, value, selected ) )
+        return static_options
+
+    def parse_optional(self, default=None):
+        """ Return boolean indicating wheter parameter is optional. """
+        elem = self.input_elem
+        if self.get('type') == "data_column":
+            # Allow specifing force_select for backward compat., but probably
+            # should use optional going forward for consistency with other
+            # parameters.
+            if "force_select" in elem.attrib:
+                force_select = string_as_bool( elem.get( "force_select" ) )
+            else:
+                force_select = not string_as_bool( elem.get( "optional", False ) )
+            return not force_select
+
+        if default is None:
+            default = self.default_optional
+        return self.get_bool( "optional", default )
+
+    def parse_conversion_tuples(self):
+        elem = self.input_elem
+        conversions = []
+        for conv_elem in elem.findall( "conversion" ):
+            name = conv_elem.get( "name" )  # name for commandline substitution
+            conv_extensions = conv_elem.get( "type" )  # target datatype extension
+            conversions.append((name, conv_extensions))
+        return conversions
+
+    def parse_nested_inputs_source(self):
+        elem = self.input_elem
+        return XmlPageSource(elem)
+
+    def parse_test_input_source(self):
+        elem = self.input_elem
+        input_elem = elem.find( "param" )
+        assert input_elem is not None, "<conditional> must have a child <param>"
+        return XmlInputSource(input_elem)
+
+    def parse_when_input_sources(self):
+        elem = self.input_elem
+
+        sources = []
+        for case_elem in elem.findall( "when" ):
+            value = case_elem.get( "value" )
+            case_page_source = XmlPageSource(case_elem)
+            sources.append((value, case_page_source))
+        return sources
diff --git a/lib/galaxy/tools/parser/yaml.py b/lib/galaxy/tools/parser/yaml.py
new file mode 100644
index 0000000..4450fb8
--- /dev/null
+++ b/lib/galaxy/tools/parser/yaml.py
@@ -0,0 +1,345 @@
+
+from galaxy.tools.deps import requirements
+from galaxy.util.odict import odict
+
+from .interface import InputSource
+from .interface import PageSource
+from .interface import PagesSource
+from .interface import ToolSource
+from .output_actions import ToolOutputActionGroup
+from .output_collection_def import dataset_collector_descriptions_from_list
+from .output_objects import (
+    ToolOutput,
+    ToolOutputCollection,
+    ToolOutputCollectionStructure,
+)
+from .util import error_on_exit_code
+
+
+class YamlToolSource(ToolSource):
+
+    def __init__(self, root_dict, source_path=None):
+        self.root_dict = root_dict
+        self._source_path = source_path
+
+    def parse_id(self):
+        return self.root_dict.get("id")
+
+    def parse_version(self):
+        return self.root_dict.get("version")
+
+    def parse_name(self):
+        return self.root_dict.get("name")
+
+    def parse_description(self):
+        return self.root_dict.get("description", "")
+
+    def parse_edam_operations(self):
+        return self.root_dict.get("edam_operations", [])
+
+    def parse_edam_topics(self):
+        return self.root_dict.get("edam_topics", [])
+
+    def parse_is_multi_byte(self):
+        return self.root_dict.get("is_multi_byte", self.default_is_multi_byte)
+
+    def parse_sanitize(self):
+        return self.root_dict.get("sanitize", True)
+
+    def parse_display_interface(self, default):
+        return self.root_dict.get('display_interface', default)
+
+    def parse_require_login(self, default):
+        return self.root_dict.get('require_login', default)
+
+    def parse_command(self):
+        return self.root_dict.get("command")
+
+    def parse_environment_variables(self):
+        return []
+
+    def parse_interpreter(self):
+        return self.root_dict.get("interpreter")
+
+    def parse_version_command(self):
+        return self.root_dict.get("runtime_version", {}).get("command", None)
+
+    def parse_version_command_interpreter(self):
+        return self.root_dict.get("runtime_version", {}).get("interpreter", None)
+
+    def parse_requirements_and_containers(self):
+        return requirements.parse_requirements_from_dict(self.root_dict)
+
+    def parse_input_pages(self):
+        # All YAML tools have only one page (feature is deprecated)
+        page_source = YamlPageSource(self.root_dict.get("inputs", {}))
+        return PagesSource([page_source])
+
+    def parse_strict_shell(self):
+        # TODO: Add ability to disable this.
+        return True
+
+    def parse_stdio(self):
+        return error_on_exit_code()
+
+    def parse_help(self):
+        return self.root_dict.get("help", None)
+
+    def parse_outputs(self, tool):
+        outputs = self.root_dict.get("outputs", {})
+        output_defs = []
+        output_collection_defs = []
+        for name, output_dict in outputs.items():
+            output_type = output_dict.get("type", "data")
+            if output_type == "data":
+                output_defs.append(self._parse_output(tool, name, output_dict))
+            elif output_type == "collection":
+                output_collection_defs.append(self._parse_output(tool, name, output_dict))
+            else:
+                message = "Unknown output_type [%s] encountered." % output_type
+                raise Exception(message)
+        outputs = odict()
+        for output in output_defs:
+            outputs[output.name] = output
+        output_collections = odict()
+        for output in output_collection_defs:
+            output_collections[output.name] = output
+
+        return outputs, output_collections
+
+    def _parse_output(self, tool, name, output_dict):
+        # TODO: handle filters, actions, change_format
+        output = ToolOutput( name )
+        output.format = output_dict.get("format", "data")
+        output.change_format = []
+        output.format_source = output_dict.get("format_source", None)
+        output.metadata_source = output_dict.get("metadata_source", "")
+        output.parent = output_dict.get("parent", None)
+        output.label = output_dict.get( "label", None )
+        output.count = output_dict.get("count", 1)
+        output.filters = []
+        output.tool = tool
+        output.from_work_dir = output_dict.get("from_work_dir", None)
+        output.hidden = output_dict.get("hidden", "")
+        # TODO: implement tool output action group fixes
+        output.actions = ToolOutputActionGroup( output, None )
+        output.dataset_collector_descriptions = self._dataset_collector_descriptions( output_dict )
+        return output
+
+    def _parse_output_collection(self, tool, name, output_dict):
+        name = output_dict.get("name")
+        label = output_dict.get("label")
+        default_format = output_dict.get( "format", "data" )
+        collection_type = output_dict.get( "type", None )
+        collection_type_source = output_dict.get( "type_source", None )
+        structured_like = output_dict.get( "structured_like", None )
+        inherit_format = False
+        inherit_metadata = False
+        if structured_like:
+            inherit_format = output_dict.get( "inherit_format", None )
+            inherit_metadata = output_dict.get( "inherit_metadata", None )
+        default_format_source = output_dict.get( "format_source", None )
+        default_metadata_source = output_dict.get( "metadata_source", "" )
+        filters = []
+        dataset_collector_descriptions = self._dataset_collector_descriptions( output_dict )
+
+        structure = ToolOutputCollectionStructure(
+            collection_type=collection_type,
+            collection_type_source=collection_type_source,
+            structured_like=structured_like,
+            dataset_collector_descriptions=dataset_collector_descriptions,
+        )
+        output_collection = ToolOutputCollection(
+            name,
+            structure,
+            label=label,
+            filters=filters,
+            default_format=default_format,
+            inherit_format=inherit_format,
+            inherit_metadata=inherit_metadata,
+            default_format_source=default_format_source,
+            default_metadata_source=default_metadata_source,
+        )
+        return output_collection
+
+    def _dataset_collector_descriptions(self, discover_datasets_dicts):
+        if _is_dict(discover_datasets_dicts):
+            discover_datasets_dicts = [ discover_datasets_dicts ]
+        dataset_collector_descriptions = dataset_collector_descriptions_from_list( discover_datasets_dicts )
+        return dataset_collector_descriptions
+
+    def parse_tests_to_dict(self):
+        tests = []
+        rval = dict(
+            tests=tests
+        )
+
+        for i, test_dict in enumerate(self.root_dict.get("tests", [])):
+            tests.append(_parse_test(i, test_dict))
+
+        return rval
+
+    def parse_profile(self):
+        return self.root_dict.get("profile", "16.04")
+
+
+def _parse_test(i, test_dict):
+    inputs = test_dict["inputs"]
+    if _is_dict(inputs):
+        new_inputs = []
+        for key, value in inputs.items():
+            new_inputs.append((key, value, {}))
+        test_dict["inputs"] = new_inputs
+
+    outputs = test_dict["outputs"]
+
+    new_outputs = []
+    if _is_dict(outputs):
+        for key, value in outputs.items():
+            if _is_dict(value):
+                attributes = value
+                file = attributes.get("file")
+            else:
+                file = value
+                attributes = {}
+            new_outputs.append((key, file, attributes))
+    else:
+        for output in outputs:
+            name = output["name"]
+            value = output.get("file", None)
+            attributes = output
+            new_outputs.append((name, value, attributes))
+
+    for output in new_outputs:
+        attributes = output[2]
+        defaults = {
+            'compare': 'diff',
+            'lines_diff': 0,
+            'delta': 1000,
+            'sort': False,
+        }
+        # TODO
+        attributes["extra_files"] = []
+        # TODO
+        attributes["metadata"] = {}
+        # TODO
+        assert_list = []
+        assert_list = __to_test_assert_list( attributes.get("asserts", [] ) )
+        attributes["assert_list"] = assert_list
+        _ensure_has(attributes, defaults)
+
+    test_dict["outputs"] = new_outputs
+    # TODO: implement output collections for YAML tools.
+    test_dict["output_collections"] = []
+    test_dict["command"] = __to_test_assert_list( test_dict.get( "command", [] ) )
+    test_dict["stdout"] = __to_test_assert_list( test_dict.get( "stdout", [] ) )
+    test_dict["stderr"] = __to_test_assert_list( test_dict.get( "stderr", [] ) )
+    test_dict["expect_exit_code"] = test_dict.get( "expect_exit_code", None )
+    test_dict["expect_failure"] = test_dict.get( "expect_exit_code", False )
+    return test_dict
+
+
+def _is_dict(item):
+    return isinstance(item, dict) or isinstance(item, odict)
+
+
+def __to_test_assert_list(assertions):
+    def expand_dict_form(item):
+        key, value = item
+        new_value = value.copy()
+        new_value["that"] = key
+        return new_value
+
+    if _is_dict(assertions):
+        assertions = map(expand_dict_form, assertions.items() )
+
+    assert_list = []
+    for assertion in assertions:
+        # TODO: not handling nested assertions correctly,
+        # not sure these are used though.
+        children = []
+        if "children" in assertion:
+            children = assertion["children"]
+            del assertion["children"]
+        assert_dict = dict(
+            tag=assertion["that"],
+            attributes=assertion,
+            children=children,
+        )
+        assert_list.append(assert_dict)
+
+    return assert_list or None  # XML variant is None if no assertions made
+
+
+class YamlPageSource(PageSource):
+
+    def __init__(self, inputs_list):
+        self.inputs_list = inputs_list
+
+    def parse_input_sources(self):
+        return map(YamlInputSource, self.inputs_list)
+
+
+class YamlInputSource(InputSource):
+
+    def __init__(self, input_dict):
+        self.input_dict = input_dict
+
+    def get(self, key, default=None):
+        return self.input_dict.get(key, default)
+
+    def get_bool(self, key, default):
+        return self.input_dict.get(key, default)
+
+    def parse_input_type(self):
+        input_type = self.input_dict["type"]
+        if input_type == "repeat":
+            return "repeat"
+        elif input_type == "conditional":
+            return "conditional"
+        else:
+            return "param"
+
+    def parse_nested_inputs_source(self):
+        assert self.parse_input_type() == "repeat"
+        return YamlPageSource(self.input_dict["blocks"])
+
+    def parse_test_input_source(self):
+        test_dict = self.input_dict.get( "test", None )
+        assert test_dict is not None, "conditional must contain a `test` definition"
+        return YamlInputSource(test_dict)
+
+    def parse_when_input_sources(self):
+        input_dict = self.input_dict
+
+        sources = []
+        for value, block in input_dict.get("when", {}).items():
+            if value is True:
+                value = "true"
+            elif value is False:
+                value = "false"
+            else:
+                value = str(value)
+
+            # str here to loose type information like XML, needed?
+            if not isinstance(block, list):
+                block = [block]
+            case_page_source = YamlPageSource(block)
+            sources.append((value, case_page_source))
+        return sources
+
+    def parse_static_options(self):
+        static_options = list()
+        input_dict = self.input_dict
+        for index, option in enumerate(input_dict.get("options", {})):
+            value = option.get( "value" )
+            label = option.get( "label", value )
+            selected = option.get( "selected", False )
+            static_options.append( ( label, value, selected ) )
+        return static_options
+
+
+def _ensure_has(dict, defaults):
+    for key, value in defaults.items():
+        if key not in dict:
+            dict[key] = value
diff --git a/lib/galaxy/tools/search/__init__.py b/lib/galaxy/tools/search/__init__.py
new file mode 100644
index 0000000..6261781
--- /dev/null
+++ b/lib/galaxy/tools/search/__init__.py
@@ -0,0 +1,116 @@
+"""
+Module for building and searching the index of tools
+installed within this Galaxy.
+"""
+import logging
+import re
+import tempfile
+
+from galaxy.web.framework.helpers import to_unicode
+from datetime import datetime
+
+from whoosh.filedb.filestore import RamStorage, FileStorage
+from whoosh.fields import KEYWORD, Schema, STORED, TEXT
+from whoosh.scoring import BM25F
+from whoosh.qparser import MultifieldParser
+from whoosh import analysis
+
+
+log = logging.getLogger( __name__ )
+
+
+class ToolBoxSearch( object ):
+    """
+    Support searching tools in a toolbox. This implementation uses
+    the Whoosh search library.
+    """
+
+    def __init__( self, toolbox, index_help=True ):
+        """
+        Create a searcher for `toolbox`.
+        """
+        self.schema = Schema( id=STORED,
+                              stub=KEYWORD,
+                              name=TEXT( analyzer=analysis.SimpleAnalyzer() ),
+                              description=TEXT,
+                              section=TEXT,
+                              help=TEXT,
+                              labels=KEYWORD )
+        self.rex = analysis.RegexTokenizer()
+        self.toolbox = toolbox
+        self.build_index( index_help )
+
+    def build_index( self, index_help=True ):
+        # Works around https://bitbucket.org/mchaput/whoosh/issues/391/race-conditions-with-temp-storage
+        RamStorage.temp_storage = _temp_storage
+        self.storage = RamStorage()
+        self.index = self.storage.create_index( self.schema )
+        writer = self.index.writer()
+        start_time = datetime.now()
+        log.debug( 'Starting to build toolbox index.' )
+        for id, tool in self.toolbox.tools():
+            #  Do not add data managers to the public index
+            if tool.tool_type == 'manage_data':
+                continue
+            add_doc_kwds = {
+                "id": id,
+                "description": to_unicode( tool.description ),
+                "section": to_unicode( tool.get_panel_section()[1] if len( tool.get_panel_section() ) == 2 else '' ),
+                "help": to_unicode( "" )
+            }
+            # Hyphens are wildcards in Whoosh causing bad things
+            if tool.name.find( '-' ) != -1:
+                add_doc_kwds['name'] = (' ').join( [ token.text for token in self.rex( to_unicode( tool.name ) ) ] )
+            else:
+                add_doc_kwds['name'] = to_unicode( tool.name )
+            # We do not want to search Tool Shed or version parts
+            # of the long ids
+            if id.find( '/' ) != -1:
+                slash_indexes = [ m.start() for m in re.finditer( '/', id ) ]
+                id_stub = id[ ( slash_indexes[1] + 1 ): slash_indexes[4] ]
+                add_doc_kwds['stub'] = (' ').join( [ token.text for token in self.rex( to_unicode( id_stub ) ) ] )
+            else:
+                add_doc_kwds['stub'] = to_unicode( id )
+            if tool.labels:
+                add_doc_kwds['labels'] = to_unicode( " ".join( tool.labels ) )
+            if index_help and tool.help:
+                try:
+                    add_doc_kwds['help'] = to_unicode( tool.help.render( host_url="", static_path="" ) )
+                except Exception:
+                    # Don't fail to build index just because a help message
+                    # won't render.
+                    pass
+            writer.add_document( **add_doc_kwds )
+        writer.commit()
+        stop_time = datetime.now()
+        log.debug( 'Toolbox index finished. It took: ' + str(stop_time - start_time) )
+
+    def search( self, q, tool_name_boost, tool_section_boost, tool_description_boost, tool_label_boost, tool_stub_boost, tool_help_boost, tool_search_limit ):
+        """
+        Perform search on the in-memory index. Weight in the given boosts.
+        """
+        # Change field boosts for searcher
+        searcher = self.index.searcher(
+            weighting=BM25F(
+                field_B={ 'name_B': float( tool_name_boost ),
+                          'section_B': float( tool_section_boost ),
+                          'description_B': float( tool_description_boost ),
+                          'labels_B': float( tool_label_boost ),
+                          'stub_B': float( tool_stub_boost ),
+                          'help_B': float( tool_help_boost ) }
+            )
+        )
+        # Set query to search name, description, section, help, and labels.
+        parser = MultifieldParser( [ 'name', 'description', 'section', 'help', 'labels', 'stub' ], schema=self.schema )
+        # Hyphens are wildcards in Whoosh causing bad things
+        if q.find( '-' ) != -1:
+            q = (' ').join( [ token.text for token in self.rex( to_unicode( q ) ) ] )
+        # Perform the search
+        hits = searcher.search( parser.parse( '*' + q + '*' ), limit=float( tool_search_limit ) )
+        return [ hit[ 'id' ] for hit in hits ]
+
+
+def _temp_storage(self, name=None):
+    path = tempfile.mkdtemp()
+    tempstore = FileStorage(path)
+    return tempstore.create()
diff --git a/lib/galaxy/tools/special_tools.py b/lib/galaxy/tools/special_tools.py
new file mode 100644
index 0000000..4aa82a0
--- /dev/null
+++ b/lib/galaxy/tools/special_tools.py
@@ -0,0 +1,13 @@
+import logging
+log = logging.getLogger( __name__ )
+
+SPECIAL_TOOLS = {
+    "history export": "galaxy/tools/imp_exp/exp_history_to_archive.xml",
+    "history import": "galaxy/tools/imp_exp/imp_history_from_archive.xml",
+}
+
+
+def load_lib_tools( toolbox ):
+    for name, path in SPECIAL_TOOLS.items():
+        tool = toolbox.load_hidden_lib_tool( path )
+        log.debug( "Loaded %s tool: %s", name, tool.id )
diff --git a/lib/galaxy/tools/test.py b/lib/galaxy/tools/test.py
new file mode 100644
index 0000000..fb31a02
--- /dev/null
+++ b/lib/galaxy/tools/test.py
@@ -0,0 +1,384 @@
+import logging
+import os
+import os.path
+from six import string_types
+
+import galaxy.tools.parameters.basic
+import galaxy.tools.parameters.grouping
+from galaxy.util import string_as_bool
+
+try:
+    from nose.tools import nottest
+except ImportError:
+    def nottest(x):
+        return x
+
+log = logging.getLogger( __name__ )
+
+DEFAULT_FTYPE = 'auto'
+DEFAULT_DBKEY = 'hg17'
+DEFAULT_INTERACTOR = "api"  # Default mechanism test code uses for interacting with Galaxy instance.
+DEFAULT_MAX_SECS = None
+
+
+ at nottest
+def parse_tests(tool, tests_source):
+    """
+    Build ToolTestBuilder objects for each "<test>" elements and
+    return default interactor (if any).
+    """
+    default_interactor = os.environ.get( 'GALAXY_TEST_DEFAULT_INTERACTOR', DEFAULT_INTERACTOR )
+    tests_dict = tests_source.parse_tests_to_dict()
+    tests_default_interactor = tests_dict.get( 'interactor', default_interactor )
+    tests = []
+    for i, test_dict in enumerate(tests_dict.get('tests', [])):
+        test = ToolTestBuilder( tool, test_dict, i, default_interactor=tests_default_interactor )
+        tests.append( test )
+    return tests
+
+
+class ToolTestBuilder( object ):
+    """
+    Encapsulates information about a tool test, and allows creation of a
+    dynamic TestCase class (the unittest framework is very class oriented,
+    doing dynamic tests in this way allows better integration)
+    """
+
+    def __init__( self, tool, test_dict, i, default_interactor ):
+        name = test_dict.get( 'name', 'Test-%d' % (i + 1) )
+        maxseconds = test_dict.get( 'maxseconds', DEFAULT_MAX_SECS )
+        if maxseconds is not None:
+            maxseconds = int( maxseconds )
+
+        self.tool = tool
+        self.name = name
+        self.maxseconds = maxseconds
+        self.required_files = []
+        self.inputs = {}
+        self.outputs = []
+        # By default do not making assertions on number of outputs - but to
+        # test filtering allow explicitly state number of outputs.
+        self.num_outputs = None
+        self.error = False
+        self.exception = None
+
+        self.__handle_test_dict( test_dict, i, default_interactor )
+
+    def test_data( self ):
+        """
+        Iterator over metadata representing the required files for upload.
+        """
+        return test_data_iter( self.required_files )
+
+    def __matching_case_for_value( self, cond, declared_value ):
+        test_param = cond.test_param
+        if isinstance(test_param, galaxy.tools.parameters.basic.BooleanToolParameter):
+            if declared_value is None:
+                # No explicit value for param in test case, determine from default
+                query_value = test_param.checked
+            else:
+                query_value = _process_bool_param_value( test_param, declared_value )
+
+            def matches_declared_value(case_value):
+                return _process_bool_param_value( test_param, case_value ) == query_value
+        elif isinstance(test_param, galaxy.tools.parameters.basic.SelectToolParameter):
+            if declared_value is not None:
+                # Test case supplied explicit value to check against.
+
+                def matches_declared_value(case_value):
+                    return case_value == declared_value
+            elif test_param.static_options:
+                # No explicit value in test case, not much to do if options are dynamic but
+                # if static options are available can find the one specified as default or
+                # fallback on top most option (like GUI).
+                for (name, value, selected) in test_param.static_options:
+                    if selected:
+                        default_option = name
+                else:
+                    first_option = test_param.static_options[0]
+                    first_option_value = first_option[1]
+                    default_option = first_option_value
+
+                def matches_declared_value(case_value):
+                    return case_value == default_option
+            else:
+                # No explicit value for this param and cannot determine a
+                # default - give up. Previously this would just result in a key
+                # error exception.
+                msg = "Failed to find test parameter value specification required for conditional %s" % cond.name
+                raise Exception( msg )
+
+        # Check the tool's defined cases against predicate to determine
+        # selected or default.
+        for i, case in enumerate( cond.cases ):
+            if matches_declared_value( case.value ):
+                return case
+        else:
+            msg_template = "%s - Failed to find case matching value (%s) for test parameter specification for conditional %s. Remainder of test behavior is unspecified."
+            msg = msg_template % ( self.tool.id, declared_value, cond.name )
+            log.info( msg )
+
+    def __split_if_str( self, value ):
+        split = isinstance(value, string_types)
+        if split:
+            value = value.split(",")
+        return value
+
+    def __handle_test_dict( self, test_dict, i, default_interactor ):
+        try:
+            # Mechanism test code uses for interacting with Galaxy instance,
+            # until 'api' is the default switch this to API to use its new
+            # features. Once 'api' is the default set to 'twill' to use legacy
+            # features or workarounds.
+            self.interactor = test_dict.get( 'interactor', default_interactor )
+
+            self.inputs = self.__process_raw_inputs( self.tool.inputs, test_dict["inputs"] )
+            self.outputs = test_dict["outputs"]
+            self.output_collections = test_dict["output_collections"]
+            num_outputs = test_dict.get( 'expect_num_outputs', None )
+            if num_outputs:
+                num_outputs = int( num_outputs )
+            self.num_outputs = num_outputs
+            self.command_line = test_dict.get("command", None)
+            self.stdout = test_dict.get("stdout", None)
+            self.stderr = test_dict.get("stderr", None)
+            self.expect_exit_code = test_dict.get("expect_exit_code", None)
+            self.expect_failure = test_dict.get("expect_failure", False)
+            self.md5 = test_dict.get("md5", None)
+        except Exception as e:
+            self.inputs = {}
+            self.error = True
+            self.exception = e
+
+    def __process_raw_inputs( self, tool_inputs, raw_inputs, parent_context=None ):
+        """
+        Recursively expand flat list of inputs into "tree" form of flat list
+        (| using to nest to new levels) structure and expand dataset
+        information as proceeding to populate self.required_files.
+        """
+        parent_context = parent_context or RootParamContext()
+        expanded_inputs = {}
+        for key, value in tool_inputs.items():
+            if isinstance( value, galaxy.tools.parameters.grouping.Conditional ):
+                cond_context = ParamContext( name=value.name, parent_context=parent_context )
+                case_context = ParamContext( name=value.test_param.name, parent_context=cond_context )
+                raw_input = case_context.extract_value( raw_inputs )
+                case_value = raw_input[ 1 ] if raw_input else None
+                case = self.__matching_case_for_value( value, case_value )
+                if case:
+                    for input_name, input_value in case.inputs.items():
+                        case_inputs = self.__process_raw_inputs( { input_name: input_value }, raw_inputs, parent_context=cond_context )
+                        expanded_inputs.update( case_inputs )
+                    if not value.type == "text":
+                        expanded_case_value = self.__split_if_str( case.value )
+                    if case_value is not None:
+                        # A bit tricky here - we are growing inputs with value
+                        # that may be implicit (i.e. not defined by user just
+                        # a default defined in tool). So we do not want to grow
+                        # expanded_inputs and risk repeat block viewing this
+                        # as a new instance with value defined and hence enter
+                        # an infinite loop - hence the "case_value is not None"
+                        # check.
+                        processed_value = _process_simple_value( value.test_param, expanded_case_value )
+                        expanded_inputs[ case_context.for_state() ] = processed_value
+            elif isinstance( value, galaxy.tools.parameters.grouping.Section ):
+                context = ParamContext( name=value.name, parent_context=parent_context )
+                for r_name, r_value in value.inputs.iteritems():
+                    expanded_input = self.__process_raw_inputs( { context.for_state(): r_value }, raw_inputs, parent_context=context )
+                    if expanded_input:
+                        expanded_inputs.update( expanded_input )
+            elif isinstance( value, galaxy.tools.parameters.grouping.Repeat ):
+                repeat_index = 0
+                while True:
+                    context = ParamContext( name=value.name, index=repeat_index, parent_context=parent_context )
+                    updated = False
+                    for r_name, r_value in value.inputs.iteritems():
+                        expanded_input = self.__process_raw_inputs( { context.for_state(): r_value }, raw_inputs, parent_context=context )
+                        if expanded_input:
+                            expanded_inputs.update( expanded_input )
+                            updated = True
+                    if not updated:
+                        break
+                    repeat_index += 1
+            else:
+                context = ParamContext( name=value.name, parent_context=parent_context )
+                raw_input = context.extract_value( raw_inputs )
+                if raw_input:
+                    (name, param_value, param_extra) = raw_input
+                    if not value.type == "text":
+                        param_value = self.__split_if_str( param_value )
+                    if isinstance( value, galaxy.tools.parameters.basic.DataToolParameter ):
+                        if not isinstance(param_value, list):
+                            param_value = [ param_value ]
+                        map( lambda v: self.__add_uploaded_dataset( context.for_state(), v, param_extra, value ), param_value )
+                        processed_value = param_value
+                    elif isinstance( value, galaxy.tools.parameters.basic.DataCollectionToolParameter ):
+                        assert 'collection' in param_extra
+                        collection_def = param_extra[ 'collection' ]
+                        for ( name, value, extra ) in collection_def.collect_inputs():
+                            require_file( name, value, extra, self.required_files )
+                        processed_value = collection_def
+                    else:
+                        processed_value = _process_simple_value( value, param_value )
+                    expanded_inputs[ context.for_state() ] = processed_value
+        return expanded_inputs
+
+    def __add_uploaded_dataset( self, name, value, extra, input_parameter ):
+        if value is None:
+            assert input_parameter.optional, '%s is not optional. You must provide a valid filename.' % name
+            return value
+        return require_file( name, value, extra, self.required_files )
+
+
+def _process_simple_value( param, param_value ):
+    if isinstance( param, galaxy.tools.parameters.basic.SelectToolParameter ) and hasattr( param, 'static_options' ):
+        # Tests may specify values as either raw value or the value
+        # as they appear in the list - the API doesn't and shouldn't
+        # accept the text value - so we need to convert the text
+        # into the form value.
+        def process_param_value( param_value ):
+            found_value = False
+            value_for_text = None
+            if param.static_options:
+                for (text, opt_value, selected) in param.static_options:
+                    if param_value == opt_value:
+                        found_value = True
+                    if value_for_text is None and param_value == text:
+                        value_for_text = opt_value
+            if not found_value and value_for_text is not None:
+                processed_value = value_for_text
+            else:
+                processed_value = param_value
+            return processed_value
+        # Do replacement described above for lists or singleton
+        # values.
+        if isinstance( param_value, list ):
+            processed_value = map( process_param_value, param_value )
+        else:
+            processed_value = process_param_value( param_value )
+    elif isinstance( param, galaxy.tools.parameters.basic.BooleanToolParameter ):
+        # Like above, tests may use the tool define values of simply
+        # true/false.
+        processed_value = _process_bool_param_value( param, param_value )
+    else:
+        processed_value = param_value
+    return processed_value
+
+
+def _process_bool_param_value( param, param_value ):
+    assert isinstance( param, galaxy.tools.parameters.basic.BooleanToolParameter )
+    was_list = False
+    if isinstance( param_value, list ):
+        was_list = True
+        param_value = param_value[0]
+    if param.truevalue == param_value:
+        processed_value = True
+    elif param.falsevalue == param_value:
+        processed_value = False
+    else:
+        processed_value = string_as_bool( param_value )
+    return [ processed_value ] if was_list else processed_value
+
+
+ at nottest
+def test_data_iter( required_files ):
+    for fname, extra in required_files:
+        data_dict = dict(
+            fname=fname,
+            metadata=extra.get( 'metadata', [] ),
+            composite_data=extra.get( 'composite_data', [] ),
+            ftype=extra.get( 'ftype', DEFAULT_FTYPE ),
+            dbkey=extra.get( 'dbkey', DEFAULT_DBKEY ),
+        )
+        edit_attributes = extra.get( 'edit_attributes', [] )
+
+        # currently only renaming is supported
+        for edit_att in edit_attributes:
+            if edit_att.get( 'type', None ) == 'name':
+                new_name = edit_att.get( 'value', None )
+                assert new_name, 'You must supply the new dataset name as the value tag of the edit_attributes tag'
+                data_dict['name'] = new_name
+            else:
+                raise Exception( 'edit_attributes type (%s) is unimplemented' % edit_att.get( 'type', None ) )
+
+        yield data_dict
+
+
+def require_file( name, value, extra, required_files ):
+    if ( value, extra ) not in required_files:
+        required_files.append( ( value, extra ) )  # these files will be uploaded
+    name_change = [ att for att in extra.get( 'edit_attributes', [] ) if att.get( 'type' ) == 'name' ]
+    if name_change:
+        name_change = name_change[-1].get( 'value' )  # only the last name change really matters
+        value = name_change  # change value for select to renamed uploaded file for e.g. composite dataset
+    else:
+        for end in [ '.zip', '.gz' ]:
+            if value.endswith( end ):
+                value = value[ :-len( end ) ]
+                break
+        value = os.path.basename( value )  # if uploading a file in a path other than root of test-data
+    return value
+
+
+class ParamContext(object):
+
+    def __init__( self, name, index=None, parent_context=None ):
+        self.parent_context = parent_context
+        self.name = name
+        self.index = None if index is None else int( index )
+
+    def for_state( self ):
+        name = self.name if self.index is None else "%s_%d" % ( self.name, self.index )
+        parent_for_state = self.parent_context.for_state()
+        if parent_for_state:
+            return "%s|%s" % ( parent_for_state, name )
+        else:
+            return name
+
+    def __str__( self ):
+        return "Context[for_state=%s]" % self.for_state()
+
+    def param_names( self ):
+        for parent_context_param in self.parent_context.param_names():
+            if self.index is not None:
+                yield "%s|%s_%d" % ( parent_context_param, self.name, self.index )
+            else:
+                yield "%s|%s" % ( parent_context_param, self.name )
+        if self.index is not None:
+            yield "%s_%d" % ( self.name, self.index )
+        else:
+            yield self.name
+
+    def extract_value( self, raw_inputs ):
+        for param_name in self.param_names():
+            value = self.__raw_param_found( param_name, raw_inputs)
+            if value:
+                return value
+        return None
+
+    def __raw_param_found( self, param_name, raw_inputs ):
+        index = None
+        for i, raw_input in enumerate( raw_inputs ):
+            if raw_input[ 0 ] == param_name:
+                index = i
+        if index is not None:
+            raw_input = raw_inputs[ index ]
+            del raw_inputs[ index ]
+            return raw_input
+        else:
+            return None
+
+
+class RootParamContext(object):
+
+    def __init__( self ):
+        pass
+
+    def for_state( self ):
+        return ""
+
+    def param_names( self ):
+        return []
+
+    def get_index( self ):
+        return 0
diff --git a/lib/galaxy/tools/toolbox/__init__.py b/lib/galaxy/tools/toolbox/__init__.py
new file mode 100644
index 0000000..604c4e4
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/__init__.py
@@ -0,0 +1,13 @@
+"""API for this module containing functionality related to the toolbox."""
+
+from .base import AbstractToolBox, BaseGalaxyToolBox
+
+from .panel import panel_item_types, ToolSection, ToolSectionLabel
+
+__all__ = (
+    "AbstractToolBox",
+    "BaseGalaxyToolBox",
+    "panel_item_types",
+    "ToolSection",
+    "ToolSectionLabel",
+)
diff --git a/lib/galaxy/tools/toolbox/base.py b/lib/galaxy/tools/toolbox/base.py
new file mode 100644
index 0000000..3b16c51
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/base.py
@@ -0,0 +1,1081 @@
+import logging
+import os
+import string
+import time
+from xml.etree.ElementTree import ParseError
+
+from markupsafe import escape
+from six import iteritems
+from six.moves.urllib.parse import urlparse
+
+from galaxy.exceptions import MessageException, ObjectNotFound
+# Next two are extra tool dependency not used by AbstractToolBox but by
+# BaseGalaxyToolBox.
+from galaxy.tools.deps import build_dependency_manager
+from galaxy.tools.loader_directory import looks_like_a_tool
+from galaxy.util import (
+    listify,
+    parse_xml,
+    string_as_bool
+)
+from galaxy.util.bunch import Bunch
+from galaxy.util.dictifiable import Dictifiable
+from galaxy.util.odict import odict
+
+from .filters import FilterFactory
+from .integrated_panel import ManagesIntegratedToolPanelMixin
+from .lineages import LineageMap
+from .panel import (
+    panel_item_types,
+    ToolPanelElements,
+    ToolSection,
+    ToolSectionLabel
+)
+from .parser import ensure_tool_conf_item, get_toolbox_parser
+from .tags import tool_tag_manager
+from .watcher import (
+    get_tool_conf_watcher,
+    get_tool_watcher
+)
+
+log = logging.getLogger( __name__ )
+
+
+class AbstractToolBox( Dictifiable, ManagesIntegratedToolPanelMixin, object ):
+    """
+    Abstract container for managing a ToolPanel - containing tools and
+    workflows optionally in labelled sections.
+    """
+
+    def __init__( self, config_filenames, tool_root_dir, app, tool_conf_watcher=None ):
+        """
+        Create a toolbox from the config files named by `config_filenames`, using
+        `tool_root_dir` as the base directory for finding individual tool config files.
+        When reloading the toolbox, tool_conf_watcher will be provided.
+        """
+        # The _dynamic_tool_confs list contains dictionaries storing
+        # information about the tools defined in each shed-related
+        # shed_tool_conf.xml file.
+        self._dynamic_tool_confs = []
+        self._tools_by_id = {}
+        self._integrated_section_by_tool = {}
+        # Tool lineages can contain chains of related tools with different ids
+        # so each will be present once in the above dictionary. The following
+        # dictionary can instead hold multiple tools with different versions.
+        self._tool_versions_by_id = {}
+        self._workflows_by_id = {}
+        # In-memory dictionary that defines the layout of the tool panel.
+        self._tool_panel = ToolPanelElements()
+        self._index = 0
+        self.data_manager_tools = odict()
+        self._lineage_map = LineageMap( app )
+        # Sets self._integrated_tool_panel and self._integrated_tool_panel_config_has_contents
+        self._init_integrated_tool_panel( app.config )
+        # The following refers to the tool_path config setting for backward compatibility.  The shed-related
+        # (e.g., shed_tool_conf.xml) files include the tool_path attribute within the <toolbox> tag.
+        self._tool_root_dir = tool_root_dir
+        self.app = app
+        self._tool_watcher = get_tool_watcher( self, app.config )
+        if tool_conf_watcher:
+            self._tool_conf_watcher = tool_conf_watcher  # Avoids (re-)starting threads in uwsgi
+        else:
+            self._tool_conf_watcher = get_tool_conf_watcher(lambda: self.handle_reload_toolbox())
+        self._filter_factory = FilterFactory( self )
+        self._tool_tag_manager = tool_tag_manager( app )
+        self._init_tools_from_configs( config_filenames )
+        if self.app.name == 'galaxy' and self._integrated_tool_panel_config_has_contents:
+            # Load self._tool_panel based on the order in self._integrated_tool_panel.
+            self._load_tool_panel()
+        self._save_integrated_tool_panel()
+
+    def handle_reload_toolbox(self):
+        """Extension-point for Galaxy-app specific reload logic.
+
+        This abstract representation of the toolbox shouldn't have details about
+        interacting with the rest of the Galaxy app or message queues, etc....
+        """
+
+    def handle_panel_update(self, section_dict):
+        """Extension-point for Galaxy-app specific reload logic.
+
+        This abstract representation of the toolbox shouldn't have details about
+        interacting with the rest of the Galaxy app or message queues, etc....
+        """
+
+    def create_tool( self, config_file, repository_id=None, guid=None, **kwds ):
+        raise NotImplementedError()
+
+    def _init_tools_from_configs( self, config_filenames ):
+        """ Read through all tool config files and initialize tools in each
+        with init_tools_from_config below.
+        """
+        start = time.time()
+        self._tool_tag_manager.reset_tags()
+        config_filenames = listify( config_filenames )
+        for config_filename in config_filenames:
+            if os.path.isdir( config_filename ):
+                directory_contents = sorted( os.listdir( config_filename ) )
+                directory_config_files = [ config_file for config_file in directory_contents if config_file.endswith( ".xml" ) ]
+                config_filenames.remove( config_filename )
+                config_filenames.extend( directory_config_files )
+        for config_filename in config_filenames:
+            try:
+                self._init_tools_from_config( config_filename )
+            except ParseError:
+                # Occasionally we experience "Missing required parameter 'shed_tool_conf'."
+                # This happens if parsing the shed_tool_conf fails, so we just sleep a second and try again.
+                # TODO: figure out why this fails occasionally (try installing hundreds of tools in batch ...).
+                time.sleep(1)
+                try:
+                    self._init_tools_from_config(config_filename)
+                except Exception:
+                    raise
+            except Exception:
+                log.exception( "Error loading tools defined in config %s", config_filename )
+        log.debug("Reading tools from config files took %d seconds", time.time() - start)
+
+    def _init_tools_from_config( self, config_filename ):
+        """
+        Read the configuration file and load each tool.  The following tags are currently supported:
+
+        .. raw:: xml
+
+            <toolbox>
+                <tool file="data_source/upload.xml"/>                 # tools outside sections
+                <label text="Basic Tools" id="basic_tools" />         # labels outside sections
+                <workflow id="529fd61ab1c6cc36" />                    # workflows outside sections
+                <section name="Get Data" id="getext">                 # sections
+                    <tool file="data_source/biomart.xml" />           # tools inside sections
+                    <label text="In Section" id="in_section" />       # labels inside sections
+                    <workflow id="adb5f5c93f827949" />                # workflows inside sections
+                    <tool file="data_source/foo.xml" labels="beta" /> # label for a single tool
+                </section>
+            </toolbox>
+
+        """
+        log.info( "Parsing the tool configuration %s" % config_filename )
+        tool_conf_source = get_toolbox_parser(config_filename)
+        tool_path = tool_conf_source.parse_tool_path()
+        parsing_shed_tool_conf = tool_conf_source.is_shed_tool_conf()
+        if parsing_shed_tool_conf:
+            # Keep an in-memory list of xml elements to enable persistence of the changing tool config.
+            config_elems = []
+        tool_path = self.__resolve_tool_path(tool_path, config_filename)
+        # Only load the panel_dict under certain conditions.
+        load_panel_dict = not self._integrated_tool_panel_config_has_contents
+        for item in tool_conf_source.parse_items():
+            index = self._index
+            self._index += 1
+            if parsing_shed_tool_conf:
+                config_elems.append( item.elem )
+            self.load_item(
+                item,
+                tool_path=tool_path,
+                load_panel_dict=load_panel_dict,
+                guid=item.get( 'guid' ),
+                index=index,
+                internal=True
+            )
+
+        if parsing_shed_tool_conf:
+            shed_tool_conf_dict = dict( config_filename=config_filename,
+                                        tool_path=tool_path,
+                                        config_elems=config_elems )
+            self._dynamic_tool_confs.append( shed_tool_conf_dict )
+            # This explicitly monitors shed_tool_confs, otherwise need to add <toolbox monitor="true">>
+            self._tool_conf_watcher.watch_file(config_filename)
+        if tool_conf_source.parse_monitor():
+            self._tool_conf_watcher.watch_file(config_filename)
+
+    def load_item( self, item, tool_path, panel_dict=None, integrated_panel_dict=None, load_panel_dict=True, guid=None, index=None, internal=False ):
+        with self.app._toolbox_lock:
+            item = ensure_tool_conf_item(item)
+            item_type = item.type
+            if item_type not in ['tool', 'section'] and not internal:
+                # External calls from tool shed code cannot load labels or tool
+                # directories.
+                return
+
+            if panel_dict is None:
+                panel_dict = self._tool_panel
+            if integrated_panel_dict is None:
+                integrated_panel_dict = self._integrated_tool_panel
+            if item_type == 'tool':
+                self._load_tool_tag_set( item, panel_dict=panel_dict, integrated_panel_dict=integrated_panel_dict, tool_path=tool_path, load_panel_dict=load_panel_dict, guid=guid, index=index, internal=internal )
+            elif item_type == 'workflow':
+                self._load_workflow_tag_set( item, panel_dict=panel_dict, integrated_panel_dict=integrated_panel_dict, load_panel_dict=load_panel_dict, index=index )
+            elif item_type == 'section':
+                self._load_section_tag_set( item, tool_path=tool_path, load_panel_dict=load_panel_dict, index=index, internal=internal )
+            elif item_type == 'label':
+                self._load_label_tag_set( item, panel_dict=panel_dict, integrated_panel_dict=integrated_panel_dict, load_panel_dict=load_panel_dict, index=index )
+            elif item_type == 'tool_dir':
+                self._load_tooldir_tag_set( item, panel_dict, tool_path, integrated_panel_dict, load_panel_dict=load_panel_dict )
+
+    def get_shed_config_dict_by_filename( self, filename, default=None ):
+        for shed_config_dict in self._dynamic_tool_confs:
+            if shed_config_dict[ 'config_filename' ] == filename:
+                return shed_config_dict
+        return default
+
+    def update_shed_config(self, shed_conf):
+        """  Update the in-memory descriptions of tools and write out the changes
+             to integrated tool panel unless we are just deactivating a tool (since
+             that doesn't affect that file).
+        """
+        for index, my_shed_tool_conf in enumerate(self._dynamic_tool_confs):
+            if shed_conf['config_filename'] == my_shed_tool_conf['config_filename']:
+                self._dynamic_tool_confs[index] = shed_conf
+        self._save_integrated_tool_panel()
+
+    def get_section( self, section_id, new_label=None, create_if_needed=False ):
+        tool_panel_section_key = str( section_id )
+        if tool_panel_section_key in self._tool_panel:
+            # Appending a tool to an existing section in toolbox._tool_panel
+            tool_section = self._tool_panel[ tool_panel_section_key ]
+            log.debug( "Appending to tool panel section: %s" % str( tool_section.name ) )
+        elif create_if_needed:
+            # Appending a new section to toolbox._tool_panel
+            if new_label is None:
+                # This might add an ugly section label to the tool panel, but, oh well...
+                new_label = section_id
+            section_dict = {
+                'name': new_label,
+                'id': section_id,
+                'version': '',
+            }
+            self.handle_panel_update(section_dict)
+            tool_section = self._tool_panel[ tool_panel_section_key ]
+            self._save_integrated_tool_panel()
+        else:
+            tool_section = None
+        return tool_panel_section_key, tool_section
+
+    def create_section(self, section_dict):
+        tool_section = ToolSection(section_dict)
+        self._tool_panel.append_section(tool_section.id, tool_section)
+        log.debug("Loading new tool panel section: %s" % str(tool_section.name))
+        return tool_section
+
+    def get_integrated_section_for_tool( self, tool ):
+        tool_id = tool.id
+
+        if tool_id in self._integrated_section_by_tool:
+            return self._integrated_section_by_tool[tool_id]
+
+        return None, None
+
+    def __resolve_tool_path(self, tool_path, config_filename):
+        if not tool_path:
+            # Default to backward compatible config setting.
+            tool_path = self._tool_root_dir
+        else:
+            # Allow use of __tool_conf_dir__ in toolbox config files.
+            tool_conf_dir = os.path.dirname(config_filename)
+            tool_path_vars = {"tool_conf_dir": tool_conf_dir}
+            tool_path = string.Template(tool_path).safe_substitute(tool_path_vars)
+        return tool_path
+
+    def __add_tool_to_tool_panel(self, tool, panel_component, section=False):
+        # See if a version of this tool is already loaded into the tool panel.
+        # The value of panel_component will be a ToolSection (if the value of
+        # section=True) or self._tool_panel (if section=False).
+        tool_id = str(tool.id)
+        tool = self._tools_by_id[tool_id]
+        if section:
+            panel_dict = panel_component.elems
+        else:
+            panel_dict = panel_component
+
+        related_tool = self._lineage_in_panel(panel_dict, tool=tool)
+        if related_tool:
+            if self._newer_tool(tool, related_tool):
+                panel_dict.replace_tool(
+                    previous_tool_id=related_tool.id,
+                    new_tool_id=tool_id,
+                    tool=tool,
+                )
+                log.debug("Loaded tool id: %s, version: %s into tool panel." % (tool.id, tool.version))
+        else:
+            inserted = False
+            index = self._integrated_tool_panel.index_of_tool_id(tool_id)
+            if index:
+                panel_dict.insert_tool(index, tool)
+                inserted = True
+            if not inserted:
+                # Check the tool's installed versions.
+                versions = []
+                if hasattr(tool, 'lineage'):
+                    versions = tool.lineage.get_versions()
+                for tool_lineage_version in versions:
+                    lineage_id = tool_lineage_version.id
+                    index = self._integrated_tool_panel.index_of_tool_id(lineage_id)
+                    if index:
+                        panel_dict.insert_tool(index, tool)
+                        inserted = True
+                if not inserted:
+                    if (
+                        tool.guid is None or
+                        tool.tool_shed is None or
+                        tool.repository_name is None or
+                        tool.repository_owner is None or
+                        tool.installed_changeset_revision is None
+                    ):
+                        # We have a tool that was not installed from the Tool
+                        # Shed, but is also not yet defined in
+                        # integrated_tool_panel.xml, so append it to the tool
+                        # panel.
+                        panel_dict.append_tool(tool)
+                        log.debug("Loaded tool id: %s, version: %s into tool panel.." % (tool.id, tool.version))
+                    else:
+                        # We are in the process of installing the tool.
+                        tool_lineage = self._lineage_map.get(tool_id)
+                        already_loaded = self._lineage_in_panel(panel_dict, tool_lineage=tool_lineage) is not None
+                        if not already_loaded:
+                            # If the tool is not defined in integrated_tool_panel.xml, append it to the tool panel.
+                            panel_dict.append_tool(tool)
+                            log.debug("Loaded tool id: %s, version: %s into tool panel...." % (tool.id, tool.version))
+
+    def _load_tool_panel( self ):
+        start = time.time()
+        for key, item_type, val in self._integrated_tool_panel.panel_items_iter():
+            if item_type == panel_item_types.TOOL:
+                tool_id = key.replace( 'tool_', '', 1 )
+                if tool_id in self._tools_by_id:
+                    self.__add_tool_to_tool_panel( val, self._tool_panel, section=False )
+                    self._integrated_section_by_tool[tool_id] = '', ''
+            elif item_type == panel_item_types.WORKFLOW:
+                workflow_id = key.replace( 'workflow_', '', 1 )
+                if workflow_id in self._workflows_by_id:
+                    workflow = self._workflows_by_id[ workflow_id ]
+                    self._tool_panel[ key ] = workflow
+                    log.debug( "Loaded workflow: %s %s" % ( workflow_id, workflow.name ) )
+            elif item_type == panel_item_types.LABEL:
+                self._tool_panel[ key ] = val
+            elif item_type == panel_item_types.SECTION:
+                section_dict = {
+                    'id': val.id or '',
+                    'name': val.name or '',
+                    'version': val.version or '',
+                }
+                section = ToolSection( section_dict )
+                log.debug( "Loading section: %s" % section_dict.get( 'name' ) )
+                for section_key, section_item_type, section_val in val.panel_items_iter():
+                    if section_item_type == panel_item_types.TOOL:
+                        tool_id = section_key.replace( 'tool_', '', 1 )
+                        if tool_id in self._tools_by_id:
+                            self.__add_tool_to_tool_panel( section_val, section, section=True )
+                            self._integrated_section_by_tool[tool_id] = key, val.name
+                    elif section_item_type == panel_item_types.WORKFLOW:
+                        workflow_id = section_key.replace( 'workflow_', '', 1 )
+                        if workflow_id in self._workflows_by_id:
+                            workflow = self._workflows_by_id[ workflow_id ]
+                            section.elems[ section_key ] = workflow
+                            log.debug( "Loaded workflow: %s %s" % ( workflow_id, workflow.name ) )
+                    elif section_item_type == panel_item_types.LABEL:
+                        if section_val:
+                            section.elems[ section_key ] = section_val
+                            log.debug( "Loaded label: %s" % ( section_val.text ) )
+                self._tool_panel[ key ] = section
+        log.debug("loading tool panel took %d seconds", time.time() - start)
+
+    def _load_integrated_tool_panel_keys( self ):
+        """
+        Load the integrated tool panel keys, setting values for tools and
+        workflows to None.  The values will be reset when the various tool
+        panel config files are parsed, at which time the tools and workflows
+        are loaded.
+        """
+        tree = parse_xml( self._integrated_tool_panel_config )
+        root = tree.getroot()
+        for elem in root:
+            key = elem.get( 'id' )
+            if elem.tag == 'tool':
+                self._integrated_tool_panel.stub_tool( key )
+            elif elem.tag == 'workflow':
+                self._integrated_tool_panel.stub_workflow( key )
+            elif elem.tag == 'section':
+                section = ToolSection( elem )
+                for section_elem in elem:
+                    section_id = section_elem.get( 'id' )
+                    if section_elem.tag == 'tool':
+                        section.elems.stub_tool( section_id )
+                    elif section_elem.tag == 'workflow':
+                        section.elems.stub_workflow( section_id )
+                    elif section_elem.tag == 'label':
+                        section.elems.stub_label( section_id )
+                self._integrated_tool_panel.append_section( key, section )
+            elif elem.tag == 'label':
+                self._integrated_tool_panel.stub_label( key )
+
+    def get_tool( self, tool_id, tool_version=None, get_all_versions=False, exact=False ):
+        """Attempt to locate a tool in the tool box."""
+        if tool_version:
+            tool_version = str( tool_version )
+
+        if get_all_versions and exact:
+            raise AssertionError("Cannot specify get_tool with both get_all_versions and exact as True")
+
+        if "/repos/" in tool_id:  # test if tool came from a toolshed
+            tool_id_without_tool_shed = tool_id.split("/repos/")[1]
+            available_tool_sheds = [ urlparse(_) for _ in self.app.tool_shed_registry.tool_sheds.values() ]
+            available_tool_sheds = [ url.geturl().replace(url.scheme + "://", '', 1) for url in available_tool_sheds]
+            tool_ids = [ tool_shed + "repos/" + tool_id_without_tool_shed for tool_shed in available_tool_sheds]
+            if tool_id in tool_ids:  # move original tool_id to the top of tool_ids
+                tool_ids.remove(tool_id)
+            tool_ids.insert(0, tool_id)
+        else:
+            tool_ids = [tool_id]
+        for tool_id in tool_ids:
+            if tool_id in self._tools_by_id and not get_all_versions:
+                if tool_version and tool_version in self._tool_versions_by_id[ tool_id ]:
+                    return self._tool_versions_by_id[ tool_id ][ tool_version ]
+                # tool_id exactly matches an available tool by id (which is 'old' tool_id or guid)
+                return self._tools_by_id[ tool_id ]
+            elif exact:
+                # We're looking for an exact match, so we skip lineage and
+                # versionless mapping, though we may want to check duplicate
+                # toolsheds
+                continue
+            # exact tool id match not found, or all versions requested, search for other options, e.g. migrated tools or different versions
+            rval = []
+            tool_lineage = self._lineage_map.get( tool_id )
+            if not tool_lineage:
+                tool_lineage = self._lineage_map.get_versionless( tool_id )
+            if tool_lineage:
+                lineage_tool_versions = tool_lineage.get_versions( )
+                for lineage_tool_version in lineage_tool_versions:
+                    lineage_tool = self._tool_from_lineage_version( lineage_tool_version )
+                    if lineage_tool:
+                        rval.append( lineage_tool )
+            if not rval:
+                # still no tool, do a deeper search and try to match by old ids
+                for tool in self._tools_by_id.values():
+                    if tool.old_id == tool_id:
+                        rval.append( tool )
+            if rval:
+                if get_all_versions:
+                    return rval
+                else:
+                    if tool_version:
+                        # return first tool with matching version
+                        for tool in rval:
+                            if tool.version == tool_version:
+                                return tool
+                    # No tool matches by version, simply return the first available tool found
+                    return rval[0]
+            # We now likely have a Toolshed guid passed in, but no supporting database entries
+            # If the tool exists by exact id and is loaded then provide exact match within a list
+            if tool_id in self._tools_by_id:
+                return[ self._tools_by_id[ tool_id ] ]
+        return None
+
+    def has_tool( self, tool_id, tool_version=None, exact=False ):
+        return self.get_tool( tool_id, tool_version=tool_version, exact=exact ) is not None
+
+    def get_tool_id( self, tool_id ):
+        """ Take a tool id (potentially from a different Galaxy instance or that
+        is no longer loaded  - and find the closest match to the currently loaded
+        tools (using get_tool for inexact matches which currently returns the oldest
+        tool shed installed tool with the same short id).
+        """
+        if tool_id not in self._tools_by_id:
+            tool = self.get_tool( tool_id )
+            if tool:
+                tool_id = tool.id
+            else:
+                tool_id = None
+        # else exact match - leave unmodified.
+        return tool_id
+
+    def get_loaded_tools_by_lineage( self, tool_id ):
+        """Get all loaded tools associated by lineage to the tool whose id is tool_id."""
+        tool_lineage = self._lineage_map.get( tool_id )
+        if tool_lineage:
+            lineage_tool_versions = tool_lineage.get_versions( )
+            available_tool_versions = []
+            for lineage_tool_version in lineage_tool_versions:
+                tool = self._tool_from_lineage_version( lineage_tool_version )
+                if tool:
+                    available_tool_versions.append( tool )
+            return available_tool_versions
+        else:
+            if tool_id in self._tools_by_id:
+                tool = self._tools_by_id[ tool_id ]
+                return [ tool ]
+        return []
+
+    def tools( self ):
+        return iteritems(self._tools_by_id)
+
+    def dynamic_confs( self, include_migrated_tool_conf=False ):
+        confs = []
+        for dynamic_tool_conf_dict in self._dynamic_tool_confs:
+            dynamic_tool_conf_filename = dynamic_tool_conf_dict[ 'config_filename' ]
+            if include_migrated_tool_conf or (dynamic_tool_conf_filename != self.app.config.migrated_tools_config):
+                confs.append( dynamic_tool_conf_dict )
+        return confs
+
+    def dynamic_conf_filenames( self, include_migrated_tool_conf=False ):
+        """ Return list of dynamic tool configuration filenames (shed_tools).
+        These must be used with various dynamic tool configuration update
+        operations (e.g. with update_shed_config).
+        """
+        for dynamic_tool_conf_dict in self.dynamic_confs( include_migrated_tool_conf=include_migrated_tool_conf ):
+            yield dynamic_tool_conf_dict[ 'config_filename' ]
+
+    def _path_template_kwds( self ):
+        return {}
+
+    def _load_tool_tag_set( self, item, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None, index=None, internal=False ):
+        try:
+            path_template = item.get( "file" )
+            template_kwds = self._path_template_kwds()
+            path = string.Template(path_template).safe_substitute(**template_kwds)
+            tool_shed_repository = None
+            can_load_into_panel_dict = True
+
+            tool = self.load_tool_from_cache(os.path.join(tool_path, path))
+            from_cache = tool
+            if from_cache:
+                if guid and tool.id != guid:
+                    # In rare cases a tool shed tool is loaded into the cache without guid.
+                    # In that case recreating the tool will correct the cached version.
+                    from_cache = False
+                else:
+                    log.debug("Loading tool %s from cache", str(tool.id))
+            if guid and not from_cache:  # tool was not in cache and is a tool shed tool
+                tool_shed_repository = self.get_tool_repository_from_xml_item(item, path)
+                if tool_shed_repository:
+                    # Only load tools if the repository is not deactivated or uninstalled.
+                    can_load_into_panel_dict = not tool_shed_repository.deleted
+                    repository_id = self.app.security.encode_id(tool_shed_repository.id)
+                    tool = self.load_tool(os.path.join( tool_path, path ), guid=guid, repository_id=repository_id, use_cached=False)
+            if not tool:  # tool was not in cache and is not a tool shed tool.
+                tool = self.load_tool(os.path.join(tool_path, path), use_cached=False)
+            if string_as_bool(item.get( 'hidden', False )):
+                tool.hidden = True
+            key = 'tool_%s' % str(tool.id)
+            if can_load_into_panel_dict:
+                if guid and not from_cache:
+                    tool.tool_shed = tool_shed_repository.tool_shed
+                    tool.repository_name = tool_shed_repository.name
+                    tool.repository_owner = tool_shed_repository.owner
+                    tool.installed_changeset_revision = tool_shed_repository.installed_changeset_revision
+                    tool.guid = guid
+                    tool.version = item.elem.find( "version" ).text
+                # Make sure tools have a tool_version object.
+                tool_lineage = self._lineage_map.register( tool, from_toolshed=guid )
+                tool.lineage = tool_lineage
+                if item.has_elem:
+                    self._tool_tag_manager.handle_tags( tool.id, item.elem )
+                self.__add_tool( tool, load_panel_dict, panel_dict )
+            # Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
+            integrated_panel_dict.update_or_append( index, key, tool )
+            # If labels were specified in the toolbox config, attach them to
+            # the tool.
+            labels = item.labels
+            if labels is not None:
+                tool.labels = labels
+        except IOError:
+            log.error( "Error reading tool configuration file from path: %s" % path )
+        except Exception:
+            log.exception( "Error reading tool from path: %s" % path )
+
+    def get_tool_repository_from_xml_item(self, item, path):
+        tool_shed = item.elem.find("tool_shed").text
+        repository_name = item.elem.find("repository_name").text
+        repository_owner = item.elem.find("repository_owner").text
+        installed_changeset_revision_elem = item.elem.find("installed_changeset_revision")
+        if installed_changeset_revision_elem is None:
+            # Backward compatibility issue - the tag used to be named 'changeset_revision'.
+            installed_changeset_revision_elem = item.elem.find("changeset_revision")
+        installed_changeset_revision = installed_changeset_revision_elem.text
+        if "/repos/" in path:  # The only time "/repos/" should not be in path is during testing!
+            try:
+                tool_shed_path, reduced_path = path.split('/repos/', 1)
+                splitted_path = reduced_path.split('/')
+                assert tool_shed_path == tool_shed
+                assert splitted_path[0] == repository_owner
+                assert splitted_path[1] == repository_name
+                if splitted_path[2] != installed_changeset_revision:
+                    # This can happen if the Tool Shed repository has been
+                    # updated to a new revision and the installed_changeset_revision
+                    # element in shed_tool_conf.xml file has been updated too
+                    log.debug("The installed_changeset_revision for tool %s is %s, using %s instead", path,
+                              installed_changeset_revision, splitted_path[2])
+                    installed_changeset_revision = splitted_path[2]
+            except AssertionError:
+                log.debug("Error while loading tool %s", path)
+                pass
+        return self._get_tool_shed_repository(tool_shed,
+                                              repository_name,
+                                              repository_owner,
+                                              installed_changeset_revision)
+
+    def _get_tool_shed_repository( self, tool_shed, name, owner, installed_changeset_revision ):
+        # Abstract class doesn't have a dependency on the database, for full Tool Shed
+        # support the actual Galaxy ToolBox implements this method and returns a Tool Shed repository.
+        return None
+
+    def __add_tool( self, tool, load_panel_dict, panel_dict ):
+        # Allow for the same tool to be loaded into multiple places in the
+        # tool panel.  We have to handle the case where the tool is contained
+        # in a repository installed from the tool shed, and the Galaxy
+        # administrator has retrieved updates to the installed repository.  In
+        # this case, the tool may have been updated, but the version was not
+        # changed, so the tool should always be reloaded here.  We used to
+        # only load the tool if it was not found in self._tools_by_id, but
+        # performing that check did not enable this scenario.
+        self.register_tool( tool )
+        if load_panel_dict:
+            self.__add_tool_to_tool_panel( tool, panel_dict, section=isinstance( panel_dict, ToolSection ) )
+
+    def _load_workflow_tag_set( self, item, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
+        try:
+            # TODO: should id be encoded?
+            workflow_id = item.get( 'id' )
+            workflow = self._load_workflow( workflow_id )
+            self._workflows_by_id[ workflow_id ] = workflow
+            key = 'workflow_' + workflow_id
+            if load_panel_dict:
+                panel_dict[ key ] = workflow
+            # Always load workflows into the integrated_panel_dict.
+            integrated_panel_dict.update_or_append( index, key, workflow )
+        except:
+            log.exception( "Error loading workflow: %s" % workflow_id )
+
+    def _load_label_tag_set( self, item, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
+        label = ToolSectionLabel( item )
+        key = 'label_' + label.id
+        if load_panel_dict:
+            panel_dict[ key ] = label
+        integrated_panel_dict.update_or_append( index, key, label )
+
+    def _load_section_tag_set( self, item, tool_path, load_panel_dict, index=None, internal=False ):
+        key = item.get( "id" )
+        if key in self._tool_panel:
+            section = self._tool_panel[ key ]
+            elems = section.elems
+        else:
+            section = ToolSection( item )
+            elems = section.elems
+        if key in self._integrated_tool_panel:
+            integrated_section = self._integrated_tool_panel[ key ]
+            integrated_elems = integrated_section.elems
+        else:
+            integrated_section = ToolSection( item )
+            integrated_elems = integrated_section.elems
+        for sub_index, sub_item in enumerate( item.items ):
+            self.load_item(
+                sub_item,
+                tool_path=tool_path,
+                panel_dict=elems,
+                integrated_panel_dict=integrated_elems,
+                load_panel_dict=load_panel_dict,
+                guid=sub_item.get( 'guid' ),
+                index=sub_index,
+                internal=internal,
+            )
+
+        # Ensure each tool's section is stored
+        for section_key, section_item_type, section_item in integrated_elems.panel_items_iter():
+            if section_item_type == panel_item_types.TOOL:
+                if section_item:
+                    tool_id = section_key.replace( 'tool_', '', 1 )
+                    self._integrated_section_by_tool[tool_id] = integrated_section.id, integrated_section.name
+
+        if load_panel_dict:
+            self._tool_panel[ key ] = section
+        # Always load sections into the integrated_tool_panel.
+        self._integrated_tool_panel.update_or_append( index, key, integrated_section )
+
+    def _load_tooldir_tag_set(self, item, elems, tool_path, integrated_elems, load_panel_dict):
+        directory = os.path.join( tool_path, item.get("dir") )
+        recursive = string_as_bool( item.get("recursive", True) )
+        self.__watch_directory( directory, elems, integrated_elems, load_panel_dict, recursive, force_watch=True )
+
+    def __watch_directory( self, directory, elems, integrated_elems, load_panel_dict, recursive, force_watch=False ):
+
+        def quick_load( tool_file, async=True ):
+            try:
+                tool = self.load_tool( tool_file )
+                self.__add_tool( tool, load_panel_dict, elems )
+                # Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
+                key = 'tool_%s' % str( tool.id )
+                integrated_elems[ key ] = tool
+
+                if async:
+                    self._load_tool_panel()
+                    self._save_integrated_tool_panel()
+                return tool.id
+            except Exception:
+                log.exception("Failed to load potential tool %s." % tool_file)
+                return None
+
+        tool_loaded = False
+        for name in os.listdir( directory ):
+            child_path = os.path.join(directory, name)
+            if os.path.isdir(child_path) and recursive:
+                self.__watch_directory(child_path, elems, integrated_elems, load_panel_dict, recursive)
+            elif self._looks_like_a_tool(child_path):
+                quick_load( child_path, async=False )
+                tool_loaded = True
+        if tool_loaded or force_watch:
+            self._tool_watcher.watch_directory( directory, quick_load )
+
+    def load_tool( self, config_file, guid=None, repository_id=None, use_cached=False, **kwds ):
+        """Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
+        # Parse XML configuration file and get the root element
+        tool = None
+        if use_cached:
+            tool = self.load_tool_from_cache(config_file)
+        if not tool:
+            tool = self.create_tool( config_file=config_file, repository_id=repository_id, guid=guid, **kwds )
+            if tool.tool_shed_repository or not guid:
+                self.add_tool_to_cache(tool, config_file)
+        if not tool.id.startswith("__"):
+            # do not monitor special tools written to tmp directory - no reason
+            # to monitor such a large directory.
+            self._tool_watcher.watch_file( config_file, tool.id )
+        return tool
+
+    def add_tool_to_cache(self, tool, config_file):
+        tool_cache = getattr(self.app, 'tool_cache', None)
+        if tool_cache:
+            self.app.tool_cache.cache_tool(config_file, tool)
+
+    def load_tool_from_cache(self, config_file):
+        tool_cache = getattr( self.app, 'tool_cache', None )
+        tool = tool_cache and tool_cache.get_tool( config_file )
+        return tool
+
+    def load_hidden_lib_tool( self, path ):
+        tool_xml = os.path.join( os.getcwd(), "lib", path )
+        return self.load_hidden_tool( tool_xml )
+
+    def load_hidden_tool( self, config_file, **kwds ):
+        """ Load a hidden tool (in this context meaning one that does not
+        appear in the tool panel) and register it in _tools_by_id.
+        """
+        tool = self.load_tool( config_file, **kwds )
+        self.register_tool( tool )
+        return tool
+
+    def register_tool( self, tool ):
+        tool_id = tool.id
+        version = tool.version or None
+        if tool_id not in self._tool_versions_by_id:
+            self._tool_versions_by_id[ tool_id ] = { version: tool }
+        else:
+            self._tool_versions_by_id[ tool_id ][ version ] = tool
+        if tool_id in self._tools_by_id:
+            related_tool = self._tools_by_id[ tool_id ]
+            # This one becomes the default un-versioned tool
+            # if newer.
+            if self._newer_tool( tool, related_tool ):
+                self._tools_by_id[ tool_id ] = tool
+        else:
+            self._tools_by_id[ tool_id ] = tool
+
+    def package_tool( self, trans, tool_id ):
+        """
+        Create a tarball with the tool's xml, help images, and test data.
+        :param trans: the web transaction
+        :param tool_id: the tool ID from app.toolbox
+        :returns: tuple of tarball filename, success True/False, message/None
+        """
+        # Make sure the tool is actually loaded.
+        if tool_id not in self._tools_by_id:
+            raise ObjectNotFound("No tool found with id '%s'." % escape( tool_id ))
+        else:
+            tool = self._tools_by_id[ tool_id ]
+            return tool.to_archive()
+
+    def reload_tool_by_id( self, tool_id ):
+        """
+        Attempt to reload the tool identified by 'tool_id', if successful
+        replace the old tool.
+        """
+        if tool_id not in self._tools_by_id:
+            message = "No tool with id %s" % escape( tool_id )
+            status = 'error'
+        else:
+            old_tool = self._tools_by_id[ tool_id ]
+            new_tool = self.load_tool( old_tool.config_file, use_cached=False )
+            # The tool may have been installed from a tool shed, so set the tool shed attributes.
+            # Since the tool version may have changed, we don't override it here.
+            new_tool.id = old_tool.id
+            new_tool.guid = old_tool.guid
+            new_tool.tool_shed = old_tool.tool_shed
+            new_tool.repository_name = old_tool.repository_name
+            new_tool.repository_owner = old_tool.repository_owner
+            new_tool.installed_changeset_revision = old_tool.installed_changeset_revision
+            new_tool.old_id = old_tool.old_id
+            # Replace old_tool with new_tool in self._tool_panel
+            tool_key = 'tool_' + tool_id
+            for key, val in self._tool_panel.items():
+                if key == tool_key:
+                    self._tool_panel[ key ] = new_tool
+                    break
+                elif key.startswith( 'section' ):
+                    if tool_key in val.elems:
+                        self._tool_panel[ key ].elems[ tool_key ] = new_tool
+                        break
+            # (Re-)Register the reloaded tool, this will handle
+            #  _tools_by_id and _tool_versions_by_id
+            self.register_tool( new_tool )
+            message = "Reloaded the tool:<br/>"
+            message += "<b>name:</b> %s<br/>" % escape( old_tool.name )
+            message += "<b>id:</b> %s<br/>" % escape( old_tool.id )
+            message += "<b>version:</b> %s" % escape( old_tool.version )
+            status = 'done'
+        return message, status
+
+    def remove_tool_by_id( self, tool_id, remove_from_panel=True ):
+        """
+        Attempt to remove the tool identified by 'tool_id'. Ignores
+        tool lineage - so to remove a tool with potentially multiple
+        versions send remove_from_panel=False and handle the logic of
+        promoting the next newest version of the tool into the panel
+        if needed.
+        """
+        if tool_id not in self._tools_by_id:
+            message = "No tool with id %s" % escape( tool_id )
+            status = 'error'
+        else:
+            tool = self._tools_by_id[ tool_id ]
+            del self._tools_by_id[ tool_id ]
+            tool_cache = getattr( self.app, 'tool_cache', None )
+            if tool_cache:
+                tool_cache.expire_tool( tool_id )
+            if remove_from_panel:
+                tool_key = 'tool_' + tool_id
+                for key, val in self._tool_panel.items():
+                    if key == tool_key:
+                        del self._tool_panel[ key ]
+                        break
+                    elif key.startswith( 'section' ):
+                        if tool_key in val.elems:
+                            del self._tool_panel[ key ].elems[ tool_key ]
+                            break
+                if tool_id in self.data_manager_tools:
+                    del self.data_manager_tools[ tool_id ]
+            # TODO: do we need to manually remove from the integrated panel here?
+            message = "Removed the tool:<br/>"
+            message += "<b>name:</b> %s<br/>" % escape( tool.name )
+            message += "<b>id:</b> %s<br/>" % escape( tool.id )
+            message += "<b>version:</b> %s" % escape( tool.version )
+            status = 'done'
+        return message, status
+
+    def get_sections( self ):
+        for k, v in self._tool_panel.items():
+            if isinstance( v, ToolSection ):
+                yield (v.id, v.name)
+
+    def find_section_id( self, tool_panel_section_id ):
+        """
+        Find the section ID referenced by the key or return '' indicating
+        no such section id.
+        """
+        if not tool_panel_section_id:
+            tool_panel_section_id = ''
+        else:
+            if tool_panel_section_id not in self._tool_panel:
+                # Hack introduced without comment in a29d54619813d5da992b897557162a360b8d610c-
+                # not sure why it is needed.
+                fixed_tool_panel_section_id = 'section_%s' % tool_panel_section_id
+                if fixed_tool_panel_section_id in self._tool_panel:
+                    tool_panel_section_id = fixed_tool_panel_section_id
+                else:
+                    tool_panel_section_id = ''
+        return tool_panel_section_id
+
+    def _load_workflow( self, workflow_id ):
+        """
+        Return an instance of 'Workflow' identified by `id`,
+        which is encoded in the tool panel.
+        """
+        id = self.app.security.decode_id( workflow_id )
+        stored = self.app.model.context.query( self.app.model.StoredWorkflow ).get( id )
+        return stored.latest_workflow
+
+    def tool_panel_contents( self, trans, **kwds ):
+        """ Filter tool_panel contents for displaying for user.
+        """
+        filter_method = self._build_filter_method( trans )
+        for _, item_type, elt in self._tool_panel.panel_items_iter():
+            elt = filter_method( elt, item_type )
+            if elt:
+                yield elt
+
+    def to_dict( self, trans, in_panel=True, **kwds ):
+        """
+        to_dict toolbox.
+        """
+        if in_panel:
+            panel_elts = list( self.tool_panel_contents( trans, **kwds ) )
+            # Produce panel.
+            rval = []
+            kwargs = dict(
+                trans=trans,
+                link_details=True
+            )
+            for elt in panel_elts:
+                rval.append( elt.to_dict( **kwargs ) )
+        else:
+            filter_method = self._build_filter_method( trans )
+            tools = []
+            for id, tool in self._tools_by_id.items():
+                tool = filter_method( tool, panel_item_types.TOOL )
+                if not tool:
+                    continue
+                tools.append( tool.to_dict( trans, link_details=True ) )
+            rval = tools
+
+        return rval
+
+    def shutdown(self):
+        exception = None
+        try:
+            self._tool_watcher.shutdown()
+        except Exception as e:
+            exception = e
+
+        try:
+            self._tool_conf_watcher.shutdown()
+        except Exception as e:
+            exception = exception or e
+
+        if exception:
+            raise exception
+
+    def _lineage_in_panel( self, panel_dict, tool=None, tool_lineage=None ):
+        """ If tool with same lineage already in panel (or section) - find
+        and return it. Otherwise return None.
+        """
+        if tool_lineage is None:
+            assert tool is not None
+            if not hasattr( tool, "lineage" ):
+                return None
+            tool_lineage = tool.lineage
+        lineage_tool_versions = tool_lineage.get_versions( reverse=True )
+        for lineage_tool_version in lineage_tool_versions:
+            lineage_tool = self._tool_from_lineage_version( lineage_tool_version )
+            if lineage_tool:
+                lineage_id = lineage_tool.id
+                if panel_dict.has_tool_with_id( lineage_id ):
+                    return panel_dict.get_tool_with_id( lineage_id )
+        return None
+
+    def _newer_tool( self, tool1, tool2 ):
+        """ Return True if tool1 is considered "newer" given its own lineage
+        description.
+        """
+        if not hasattr( tool1, "lineage" ):
+            return True
+        lineage_tool_versions = tool1.lineage.get_versions()
+        for lineage_tool_version in lineage_tool_versions:
+            lineage_tool = self._tool_from_lineage_version( lineage_tool_version )
+            if lineage_tool is tool1:
+                return False
+            if lineage_tool is tool2:
+                return True
+        return True
+
+    def _tool_from_lineage_version( self, lineage_tool_version ):
+        if lineage_tool_version.id_based:
+            return self._tools_by_id.get( lineage_tool_version.id, None )
+        else:
+            return self._tool_versions_by_id.get( lineage_tool_version.id, {} ).get( lineage_tool_version.version, None )
+
+    def _build_filter_method( self, trans ):
+        context = Bunch( toolbox=self, trans=trans )
+        filters = self._filter_factory.build_filters( trans )
+        return lambda element, item_type: _filter_for_panel(element, item_type, filters, context)
+
+
+def _filter_for_panel( item, item_type, filters, context ):
+    """
+    Filters tool panel elements so that only those that are compatible
+    with provided filters are kept.
+    """
+    def _apply_filter( filter_item, filter_list ):
+        for filter_method in filter_list:
+            try:
+                if not filter_method( context, filter_item ):
+                    return False
+            except Exception as e:
+                raise MessageException( "Toolbox filter exception from '%s': %s." % ( filter_method.__name__, e ) )
+        return True
+    if item_type == panel_item_types.TOOL:
+        if _apply_filter( item, filters[ 'tool' ] ):
+            return item
+    elif item_type == panel_item_types.LABEL:
+        if _apply_filter( item, filters[ 'label' ] ):
+            return item
+    elif item_type == panel_item_types.SECTION:
+        # Filter section item-by-item. Only show a label if there are
+        # non-filtered tools below it.
+
+        if _apply_filter( item, filters[ 'section' ] ):
+            cur_label_key = None
+            tools_under_label = False
+            filtered_elems = item.elems.copy()
+            for key, section_item_type, section_item in item.panel_items_iter():
+                if section_item_type == panel_item_types.TOOL:
+                    # Filter tool.
+                    if _apply_filter( section_item, filters[ 'tool' ] ):
+                        tools_under_label = True
+                    else:
+                        del filtered_elems[ key ]
+                elif section_item_type == panel_item_types.LABEL:
+                    # If there is a label and it does not have tools,
+                    # remove it.
+                    if cur_label_key and ( not tools_under_label or not _apply_filter( section_item, filters[ 'label' ] ) ):
+                        del filtered_elems[ cur_label_key ]
+
+                    # Reset attributes for new label.
+                    cur_label_key = key
+                    tools_under_label = False
+
+            # Handle last label.
+            if cur_label_key and not tools_under_label:
+                del filtered_elems[ cur_label_key ]
+
+            # Only return section if there are elements.
+            if len( filtered_elems ) != 0:
+                copy = item.copy()
+                copy.elems = filtered_elems
+                return copy
+
+    return None
+
+
+class BaseGalaxyToolBox(AbstractToolBox):
+    """
+    Extend the AbstractToolBox with more Galaxy tooling-specific
+    functionality. Adds dependencies on dependency resolution and
+    tool loading modules, that an abstract description of panels
+    shouldn't really depend on.
+    """
+
+    def __init__(self, config_filenames, tool_root_dir, app, tool_conf_watcher=None):
+        super(BaseGalaxyToolBox, self).__init__(config_filenames, tool_root_dir, app, tool_conf_watcher=tool_conf_watcher)
+        self._init_dependency_manager()
+
+    @property
+    def sa_session( self ):
+        """
+        Returns a SQLAlchemy session
+        """
+        return self.app.model.context
+
+    def _looks_like_a_tool(self, path):
+        return looks_like_a_tool(path, enable_beta_formats=getattr(self.app.config, "enable_beta_tool_formats", False))
+
+    def _init_dependency_manager( self ):
+        self.dependency_manager = build_dependency_manager( self.app.config )
+
+    def reload_dependency_manager(self):
+        self._init_dependency_manager()
diff --git a/lib/galaxy/tools/toolbox/cache.py b/lib/galaxy/tools/toolbox/cache.py
new file mode 100644
index 0000000..2c2968a
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/cache.py
@@ -0,0 +1,27 @@
+
+
+class ToolCache(object):
+    """
+    Cache tool definitions to allow quickly reloading the whole
+    toolbox.
+    """
+
+    def __init__(self):
+        self._tools_by_path = {}
+        self._tool_paths_by_id = {}
+
+    def get_tool(self, config_filename):
+        """ Get the tool from the cache if the tool is up to date.
+        """
+        return self._tools_by_path.get(config_filename, None)
+
+    def expire_tool(self, tool_id):
+        if tool_id in self._tool_paths_by_id:
+            config_filename = self._tool_paths_by_id[tool_id]
+            del self._tool_paths_by_id[tool_id]
+            del self._tools_by_path[config_filename]
+
+    def cache_tool(self, config_filename, tool):
+        tool_id = str( tool.id )
+        self._tool_paths_by_id[tool_id] = config_filename
+        self._tools_by_path[config_filename] = tool
diff --git a/lib/galaxy/tools/toolbox/filters/__init__.py b/lib/galaxy/tools/toolbox/filters/__init__.py
new file mode 100644
index 0000000..6b4e7b6
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/filters/__init__.py
@@ -0,0 +1,109 @@
+import logging
+import sys
+
+from copy import deepcopy
+
+from galaxy.util import listify
+
+log = logging.getLogger( __name__ )
+
+
+class FilterFactory( object ):
+    """
+    An instance of this class is responsible for filtering the list
+    of tools presented to a given user in a given context.
+    """
+
+    def __init__( self, toolbox ):
+        self.toolbox = toolbox
+
+        # Prepopulate dict containing filters that are always checked,
+        # other filters that get checked depending on context (e.g. coming from
+        # trackster or no user found are added in build filters).
+        self.default_filters = dict( tool=[ _not_hidden, _handle_authorization ], section=[], label=[] )
+        # Add dynamic filters to these default filters.
+        config = toolbox.app.config
+        self.__base_modules = listify( getattr( config, "toolbox_filter_base_modules", "galaxy.tools.filters" ) )
+        self.__init_filters( "tool", getattr( config, "tool_filters", "" ), self.default_filters )
+        self.__init_filters( "section", getattr( config, "tool_section_filters", "" ), self.default_filters )
+        self.__init_filters( "label", getattr( config, "tool_label_filters", "" ), self.default_filters )
+
+    def build_filters( self, trans, **kwds ):
+        """
+        Build list of filters to check tools against given current context.
+        """
+        filters = deepcopy( self.default_filters )
+        if trans.user:
+            for name, value in trans.user.preferences.items():
+                if value.strip():
+                    user_filters = listify( value, do_strip=True )
+                    category = ''
+                    if name == 'toolbox_tool_filters':
+                        category = "tool"
+                    elif name == 'toolbox_section_filters':
+                        category = "section"
+                    elif name == 'toolbox_label_filters':
+                        category = "label"
+                    if category:
+                        validate = getattr( trans.app.config, 'user_%s_filters' % category, [] )
+                        self.__init_filters( category, user_filters, filters, validate=validate )
+        else:
+            if kwds.get( "trackster", False ):
+                filters[ "tool" ].append( _has_trackster_conf )
+
+        return filters
+
+    def __init_filters( self, key, filters, toolbox_filters, validate=None ):
+        for filter in filters:
+            if validate is None or filter in validate or filter in self.default_filters:
+                filter_function = self._build_filter_function( filter )
+                toolbox_filters[ key ].append( filter_function )
+            else:
+                log.warning( "Refusing to load %s filter '%s' which is not defined in config", key, filter )
+        return toolbox_filters
+
+    def _build_filter_function( self, filter_name ):
+        """Obtain python function (importing a submodule if needed)
+        corresponding to filter_name.
+        """
+        if ":" in filter_name:
+            # Should be a submodule of filters (e.g. examples:restrict_development_tools)
+            (module_name, function_name) = filter_name.rsplit(":", 1)
+            function = self._import_filter( module_name, function_name )
+        else:
+            # No module found, just load a function from this file or
+            # one that has be explicitly imported.
+            function = getattr( globals(), filter_name.strip() )
+        return function
+
+    def _import_filter( self, module_name, function_name ):
+        function_name = function_name.strip()
+        for base_module in self.__base_modules:
+            full_module_name = "%s.%s" % ( base_module, module_name.strip() )
+            try:
+                __import__( full_module_name )
+            except ImportError:
+                # log.debug("Failed to load module.", exc_info=True)
+                continue
+            module = sys.modules[ full_module_name ]
+            if hasattr( module, function_name ):
+                return getattr( module, function_name )
+        raise Exception("Failed to find filter %s.%s" % (module_name, function_name))
+
+
+# Stock Filter Functions
+def _not_hidden( context, tool ):
+    return not tool.hidden
+
+
+def _handle_authorization( context, tool ):
+    user = context.trans.user
+    if tool.require_login and not user:
+        return False
+    if not tool.allow_user_access( user, attempting_access=False ):
+        return False
+    return True
+
+
+def _has_trackster_conf( context, tool ):
+    return tool.trackster_conf
diff --git a/lib/galaxy/tools/toolbox/filters/examples.py.sample b/lib/galaxy/tools/toolbox/filters/examples.py.sample
new file mode 100644
index 0000000..9d7e091
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/filters/examples.py.sample
@@ -0,0 +1,87 @@
+import logging
+log = logging.getLogger( __name__ )
+
+
+def restrict_upload_to_admins( context, tool ):
+    """
+    This tool filter will hide the upload tool from all users except admin
+    users. This can be enabled by renaming this file to examples.py and adding
+    the following to the ``app:main`` section of ``galaxy.ini``:
+
+        tool_filters = examples:restrict_upload_to_admins
+    """
+    if tool.name == "Upload File":
+        return context.trans.user_is_admin()
+    return True
+
+
+def disable_gatk( context, tool ):
+    """
+    This tool filter will disable all gatk tools when enabled. This can be
+    enabled by renaming this file to examples.py and adding the following to the
+     ``app:main`` section of ``galaxy.ini``:
+
+        tool_filters = examples:disable_gatk
+    """
+    return not any( [ requirement.name == "gatk" for requirement in tool.requirements ] )
+
+
+def explicit_user_mapping( context, section ):
+    """
+    This tool section filter uses an explicit mapping to describe what users can view
+    which tool sections. Anonymous users will only be able to view the "Get Data"
+    tool section (with id getext). This can be enabled by renaming this file to
+    examples.py and adding the following to the ``app:main`` section of
+    ``galaxy.ini``:
+
+        tool_section_filters = examples:explicit_user_mapping
+    """
+    users_sections = {
+        None: [ "getext" ],
+        "bob at example.com": [ "getext", "textutil", "filter" ],
+        "mary at example.com": [ "getext", "textutil", "filter", "ngs" ],
+    }
+    user = context.trans.user
+    email = user and user.email
+    valid_sections = users_sections.get( email, [] )
+    return section.id in valid_sections
+
+
+DEVELOPERS = [ "mary at example.com" ]
+
+
+def restrict_development_tools( context, tool ):
+    """
+    This tool filter will disable all tools with the string alpha appearing in
+    the version for all users except those explicitly appearing in the DEVELOPERS list
+    defined above. This can be enabled by renaming this file to examples.py and
+    adding the following to the ``app:main`` section of ``galaxy.ini``:
+
+        tool_filters = examples:restrict_development_tools
+    """
+    version = tool.version
+    user = context.trans.user
+    email = user and user.email
+    return "alpha" not in version or email in DEVELOPERS
+
+
+def per_host_tool_sections( context, section ):
+    """
+    This tool section filter results in different sections being display based on
+    the URL the user is making the request to. This could allow a single Galaxy instance
+    to seem like several different instances hosting different tools based on the URL used
+    to access the Galxy. This can be enabled by renaming this file to examples.py and adding
+    the following to the ``app:main`` section of ``galaxy.ini``:
+
+        tool_section_filters = examples:per_host_tool_sections
+    """
+    host = context.trans.request.host
+    # Core tools used by all virtual hosts.
+    valid_sections = [ "getext", "textutil", "filter" ]
+    if "ngs.galaxy.example.com" in host:
+        valid_sections += [ "ngs" ]
+    elif "microarray.galaxy.example.com" in host:
+        valid_sections += [ "microarray" ]
+    elif "proteomics.galaxy.example.com" in host:
+        valid_sections += [ "proteomics" ]
+    return section.id in valid_sections
diff --git a/lib/galaxy/tools/toolbox/integrated_panel.py b/lib/galaxy/tools/toolbox/integrated_panel.py
new file mode 100644
index 0000000..e998570
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/integrated_panel.py
@@ -0,0 +1,106 @@
+import os
+import shutil
+import tempfile
+import time
+import traceback
+from xml.sax.saxutils import escape
+
+from .panel import (
+    panel_item_types,
+    ToolPanelElements
+)
+
+INTEGRATED_TOOL_PANEL_DESCRIPTION = """
+This is Galaxy's integrated tool panel and should be modified directly only for
+reordering tools inside a section. Each time Galaxy starts up, this file is
+synchronized with the various tool config files: tools, sections and labels
+added to one of these files, will be added also here in the appropriate place,
+while elements removed from the tool config files will be correspondingly
+deleted from this file.
+To modify locally managed tools (e.g. from tool_conf.xml) modify that file
+directly and restart Galaxy. Whenever possible Tool Shed managed tools (e.g.
+from shed_tool_conf.xml) should be managed from within the Galaxy interface or
+via its API - but if changes are necessary (such as to hide a tool or re-assign
+its section) modify that file and restart Galaxy.
+"""
+
+
+class ManagesIntegratedToolPanelMixin:
+
+    def _init_integrated_tool_panel(self, config):
+        self.update_integrated_tool_panel = config.update_integrated_tool_panel
+        self._integrated_tool_panel_config = config.integrated_tool_panel_config
+        self._integrated_tool_panel_tracking_directory = getattr( config, "integrated_tool_panel_tracking_directory", None )
+        # In-memory dictionary that defines the layout of the tool_panel.xml file on disk.
+        self._integrated_tool_panel = ToolPanelElements()
+        self._integrated_tool_panel_config_has_contents = os.path.exists( self._integrated_tool_panel_config ) and os.stat( self._integrated_tool_panel_config ).st_size > 0
+        if self._integrated_tool_panel_config_has_contents:
+            self._load_integrated_tool_panel_keys()
+
+    def _save_integrated_tool_panel(self):
+        if self.update_integrated_tool_panel:
+            # Write the current in-memory integrated_tool_panel to the integrated_tool_panel.xml file.
+            # This will cover cases where the Galaxy administrator manually edited one or more of the tool panel
+            # config files, adding or removing locally developed tools or workflows.  The value of integrated_tool_panel
+            # will be False when things like functional tests are the caller.
+            self._write_integrated_tool_panel_config_file()
+
+    def _write_integrated_tool_panel_config_file( self ):
+        """
+        Write the current in-memory version of the integrated_tool_panel.xml file to disk.  Since Galaxy administrators
+        use this file to manage the tool panel, we'll not use xml_to_string() since it doesn't write XML quite right.
+        """
+        tracking_directory = self._integrated_tool_panel_tracking_directory
+        if not tracking_directory:
+            fd, filename = tempfile.mkstemp()
+        else:
+            if not os.path.exists(tracking_directory):
+                os.makedirs(tracking_directory)
+            name = "integrated_tool_panel_%.10f.xml" % time.time()
+            filename = os.path.join(tracking_directory, name)
+            open_file = open(filename, "w")
+            fd = open_file.fileno()
+        os.write( fd, '<?xml version="1.0"?>\n' )
+        os.write( fd, '<toolbox>\n' )
+        os.write( fd, '    <!--\n    ')
+        os.write( fd, '\n    '.join( [ l for l in INTEGRATED_TOOL_PANEL_DESCRIPTION.split("\n") if l ] ) )
+        os.write( fd, '\n    -->\n')
+        for key, item_type, item in self._integrated_tool_panel.panel_items_iter():
+            if item:
+                if item_type == panel_item_types.TOOL:
+                    os.write( fd, '    <tool id="%s" />\n' % item.id )
+                elif item_type == panel_item_types.WORKFLOW:
+                    os.write( fd, '    <workflow id="%s" />\n' % item.id )
+                elif item_type == panel_item_types.LABEL:
+                    label_id = item.id or ''
+                    label_text = item.text or ''
+                    label_version = item.version or ''
+                    os.write( fd, '    <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
+                elif item_type == panel_item_types.SECTION:
+                    section_id = item.id or ''
+                    section_name = item.name or ''
+                    section_version = item.version or ''
+                    os.write( fd, '    <section id="%s" name="%s" version="%s">\n' % ( escape(section_id), escape(section_name), section_version ) )
+                    for section_key, section_item_type, section_item in item.panel_items_iter():
+                        if section_item_type == panel_item_types.TOOL:
+                            if section_item:
+                                os.write( fd, '        <tool id="%s" />\n' % section_item.id )
+                        elif section_item_type == panel_item_types.WORKFLOW:
+                            if section_item:
+                                os.write( fd, '        <workflow id="%s" />\n' % section_item.id )
+                        elif section_item_type == panel_item_types.LABEL:
+                            if section_item:
+                                label_id = section_item.id or ''
+                                label_text = section_item.text or ''
+                                label_version = section_item.version or ''
+                                os.write( fd, '        <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
+                    os.write( fd, '    </section>\n' )
+        os.write( fd, '</toolbox>\n' )
+        os.close( fd )
+        destination = os.path.abspath( self._integrated_tool_panel_config )
+        if tracking_directory:
+            open(filename + ".stack", "w").write(''.join(traceback.format_stack()))
+            shutil.copy( filename, filename + ".copy" )
+            filename = filename + ".copy"
+        shutil.move( filename, destination )
+        os.chmod( self._integrated_tool_panel_config, 0o644 )
diff --git a/lib/galaxy/tools/toolbox/lineages/__init__.py b/lib/galaxy/tools/toolbox/lineages/__init__.py
new file mode 100644
index 0000000..f79c808
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/lineages/__init__.py
@@ -0,0 +1,6 @@
+from .factory import LineageMap
+from .interface import ToolLineage
+from .tool_shed import ToolVersionCache
+
+
+__all__ = ("LineageMap", "ToolLineage", "ToolVersionCache")
diff --git a/lib/galaxy/tools/toolbox/lineages/factory.py b/lib/galaxy/tools/toolbox/lineages/factory.py
new file mode 100644
index 0000000..2da5334
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/lineages/factory.py
@@ -0,0 +1,49 @@
+from .stock import StockLineage
+from .tool_shed import ToolShedLineage
+
+
+def remove_version_from_guid( guid ):
+    """
+    Removes version from toolshed-derived tool_id(=guid).
+    """
+    if "/repos/" not in guid:
+        return None
+    last_slash = guid.rfind('/')
+    return guid[:last_slash]
+
+
+class LineageMap(object):
+    """ Map each unique tool id to a lineage object.
+    """
+
+    def __init__(self, app):
+        self.lineage_map = {}
+        self.app = app
+
+    def register(self, tool, from_toolshed=False):
+        tool_id = tool.id
+        versionless_tool_id = remove_version_from_guid( tool_id )
+        if from_toolshed:
+            lineage = ToolShedLineage.from_tool(self.app, tool)
+        else:
+            lineage = StockLineage.from_tool( tool )
+        if versionless_tool_id and versionless_tool_id not in self.lineage_map:
+            self.lineage_map[versionless_tool_id] = lineage
+        if tool_id not in self.lineage_map:
+            self.lineage_map[tool_id] = lineage
+        return self.lineage_map[tool_id]
+
+    def get(self, tool_id):
+        if tool_id not in self.lineage_map:
+            lineage = ToolShedLineage.from_tool_id( self.app, tool_id )
+            if lineage:
+                self.lineage_map[tool_id] = lineage
+
+        return self.lineage_map.get(tool_id, None)
+
+    def get_versionless(self, tool_id):
+        versionless_tool_id = remove_version_from_guid(tool_id)
+        return self.lineage_map.get(versionless_tool_id, None)
+
+
+__all__ = ("LineageMap", )
diff --git a/lib/galaxy/tools/toolbox/lineages/interface.py b/lib/galaxy/tools/toolbox/lineages/interface.py
new file mode 100644
index 0000000..6e7fe29
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/lineages/interface.py
@@ -0,0 +1,48 @@
+from abc import ABCMeta
+from abc import abstractmethod
+
+
+class ToolLineage(object):
+    """
+    """
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def get_versions( self, reverse=False ):
+        """ Return an ordered list of lineages (ToolLineageVersion) in this
+        chain, from oldest to newest.
+        """
+
+
+class ToolLineageVersion(object):
+    """ Represents a single tool in a lineage. If lineage is based
+    around GUIDs that somehow encode the version (either using GUID
+    or a simple tool id and a version). """
+
+    def __init__(self, id, version):
+        self.id = id
+        self.version = version
+
+    @staticmethod
+    def from_id_and_verion( id, version ):
+        assert version is not None
+        return ToolLineageVersion( id, version )
+
+    @staticmethod
+    def from_guid( guid ):
+        return ToolLineageVersion( guid, None )
+
+    @property
+    def id_based( self ):
+        """ Return True if the lineage is defined by GUIDs (in this
+        case the indexer of the tools (i.e. the ToolBox) should ignore
+        the tool_version (because it is encoded in the GUID and managed
+        externally).
+        """
+        return self.version is None
+
+    def to_dict(self):
+        return dict(
+            id=self.id,
+            version=self.version,
+        )
diff --git a/lib/galaxy/tools/toolbox/lineages/stock.py b/lib/galaxy/tools/toolbox/lineages/stock.py
new file mode 100644
index 0000000..b6ed173
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/lineages/stock.py
@@ -0,0 +1,51 @@
+import threading
+
+from distutils.version import LooseVersion
+
+from .interface import ToolLineage
+from .interface import ToolLineageVersion
+
+
+class StockLineage(ToolLineage):
+    """ Simple tool's loaded directly from file system with lineage
+    determined solely by distutil's LooseVersion naming scheme.
+    """
+    lineages_by_id = {}
+    lock = threading.Lock()
+
+    def __init__(self, tool_id, **kwds):
+        self.tool_id = tool_id
+        self.tool_versions = set()
+
+    @staticmethod
+    def from_tool( tool ):
+        tool_id = tool.id
+        lineages_by_id = StockLineage.lineages_by_id
+        with StockLineage.lock:
+            if tool_id not in lineages_by_id:
+                lineages_by_id[ tool_id ] = StockLineage( tool_id )
+        lineage = lineages_by_id[ tool_id ]
+        lineage.register_version( tool.version )
+        return lineage
+
+    def register_version( self, tool_version ):
+        assert tool_version is not None
+        self.tool_versions.add( tool_version )
+
+    def get_versions( self, reverse=False ):
+        versions = [ ToolLineageVersion( self.tool_id, v ) for v in self.tool_versions ]
+        # Sort using LooseVersion which defines an appropriate __cmp__
+        # method for comparing tool versions.
+        return sorted( versions, key=_to_loose_version, reverse=reverse )
+
+    def to_dict(self):
+        return dict(
+            tool_id=self.tool_id,
+            tool_versions=list(self.tool_versions),
+            lineage_type='stock',
+        )
+
+
+def _to_loose_version( tool_lineage_version ):
+    version = str( tool_lineage_version.version )
+    return LooseVersion( version )
diff --git a/lib/galaxy/tools/toolbox/lineages/tool_shed.py b/lib/galaxy/tools/toolbox/lineages/tool_shed.py
new file mode 100644
index 0000000..baeac6f
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/lineages/tool_shed.py
@@ -0,0 +1,97 @@
+from .interface import ToolLineage, ToolLineageVersion
+
+try:
+    from galaxy.model.tool_shed_install import ToolVersion
+except ImportError:
+    ToolVersion = None
+
+
+class ToolVersionCache(object):
+    """
+    Instances of this class allow looking up tool_version objects from memory
+    (instead of querying the database) using the tool_version id or the tool_id.
+    This is used to lookup parent tool_version ids using child tool_id, or the
+    inverse, and getting all previous/next tool versions without numerous
+    database requests.
+    """
+    def __init__(self, app):
+        self.app = app
+        self.tool_version_by_id, self.tool_version_by_tool_id = self.get_tool_versions()
+        self.tool_id_to_parent_id, self.parent_id_to_tool_id = self.get_tva_map()
+
+    def get_tva_map(self):
+        """
+        Retrieves all ToolVersionAssociation objects from the database, and builds
+        dictionaries that can be used to either get a tools' parent tool_version id
+        (which can be used to get the parent's tool_version object), or to get the
+        child's tool id using the parent's tool_version id.
+        """
+        tvas = self.app.install_model.context.query(self.app.install_model.ToolVersionAssociation).all()
+        tool_id_to_parent_id = {tva.tool_id: tva.parent_id for tva in tvas}
+        parent_id_to_tool_id = {tva.parent_id: tva.tool_id for tva in tvas}
+        return tool_id_to_parent_id, parent_id_to_tool_id
+
+    def get_tool_versions(self):
+        """
+        Get all tool_version objects from the database and build 2 dictionaries,
+        with tool_version id or tool_id as key and the tool_version object as value.
+        """
+        tool_versions = self.app.install_model.context.query(self.app.install_model.ToolVersion).all()
+        tool_version_by_id = {tv.id: tv for tv in tool_versions}
+        tool_version_by_tool_id = {tv.tool_id: tv for tv in tool_versions}
+        return tool_version_by_id, tool_version_by_tool_id
+
+
+class ToolShedLineage(ToolLineage):
+    """ Representation of tool lineage derived from tool shed repository
+    installations. """
+
+    def __init__(self, app, tool_version, tool_shed_repository=None):
+        if ToolVersion is None:
+            raise Exception("Tool shed models not present, can't create tool shed lineages.")
+        self.app = app
+        self.tool_version_id = tool_version.id
+        # Only used for logging
+        self._tool_shed_repository = tool_shed_repository
+
+    @staticmethod
+    def from_tool( app, tool ):
+        # Make sure the tool has a tool_version.
+        if not get_installed_tool_version( app, tool.id ):
+            tool_version = ToolVersion( tool_id=tool.id, tool_shed_repository=tool.tool_shed_repository )
+            app.install_model.context.add( tool_version )
+            app.install_model.context.flush()
+        return ToolShedLineage( app, tool.tool_version )
+
+    @staticmethod
+    def from_tool_id( app, tool_id ):
+        tool_version = get_installed_tool_version( app, tool_id )
+        if tool_version:
+            return ToolShedLineage( app, tool_version )
+        else:
+            return None
+
+    def get_version_ids( self, reverse=False ):
+        tool_version = self.app.install_model.context.query( ToolVersion ).get( self.tool_version_id )
+        result = tool_version.get_version_ids( self.app, reverse=reverse )
+        return result
+
+    def get_versions( self, reverse=False ):
+        return [ ToolLineageVersion.from_guid(_) for _ in self.get_version_ids( reverse=reverse ) ]
+
+    def to_dict(self):
+        tool_shed_repository = self._tool_shed_repository
+        rval = dict(
+            tool_version_id=self.tool_version_id,
+            tool_versions=[v.to_dict() for v in self.get_versions()],
+            tool_shed_repository=tool_shed_repository if tool_shed_repository is not None else None,
+            lineage_type='tool_shed',
+        )
+        return rval
+
+
+def get_installed_tool_version( app, tool_id ):
+    return app.tool_version_cache.tool_version_by_tool_id.get(tool_id, None)
+
+
+__all__ = ( "ToolShedLineage", )
diff --git a/lib/galaxy/tools/toolbox/panel.py b/lib/galaxy/tools/toolbox/panel.py
new file mode 100644
index 0000000..12936de
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/panel.py
@@ -0,0 +1,170 @@
+from abc import abstractmethod
+
+from six import iteritems
+
+from galaxy.util import bunch
+from galaxy.util.dictifiable import Dictifiable
+from galaxy.util.odict import odict
+
+from .parser import ensure_tool_conf_item
+
+
+panel_item_types = bunch.Bunch(
+    TOOL="TOOL",
+    WORKFLOW="WORKFLOW",
+    SECTION="SECTION",
+    LABEL="LABEL",
+)
+
+
+class HasPanelItems:
+    """
+    """
+
+    @abstractmethod
+    def panel_items( self ):
+        """ Return an ordered dictionary-like object describing tool panel
+        items (such as workflows, tools, labels, and sections).
+        """
+
+    def panel_items_iter( self ):
+        """ Iterate through panel items each represented as a tuple of
+        (panel_key, panel_type, panel_content).
+        """
+        for panel_key, panel_value in iteritems(self.panel_items()):
+            if panel_value is None:
+                continue
+            panel_type = panel_item_types.SECTION
+            if panel_key.startswith("tool_"):
+                panel_type = panel_item_types.TOOL
+            elif panel_key.startswith("label_"):
+                panel_type = panel_item_types.LABEL
+            elif panel_key.startswith("workflow_"):
+                panel_type = panel_item_types.WORKFLOW
+            yield (panel_key, panel_type, panel_value)
+
+
+class ToolSection( Dictifiable, HasPanelItems, object ):
+    """
+    A group of tools with similar type/purpose that will be displayed as a
+    group in the user interface.
+    """
+
+    dict_collection_visible_keys = ( 'id', 'name', 'version' )
+
+    def __init__( self, item=None ):
+        """ Build a ToolSection from an ElementTree element or a dictionary.
+        """
+        if item is None:
+            item = dict()
+        self.name = item.get('name') or ''
+        self.id = item.get('id') or ''
+        self.version = item.get('version') or ''
+        self.elems = ToolPanelElements()
+
+    def copy( self ):
+        copy = ToolSection()
+        copy.name = self.name
+        copy.id = self.id
+        copy.version = self.version
+        copy.elems = self.elems.copy()
+        return copy
+
+    def to_dict( self, trans, link_details=False ):
+        """ Return a dict that includes section's attributes. """
+
+        section_dict = super( ToolSection, self ).to_dict()
+        section_elts = []
+        kwargs = dict(
+            trans=trans,
+            link_details=link_details
+        )
+        for elt in self.elems.values():
+            section_elts.append( elt.to_dict( **kwargs ) )
+        section_dict[ 'elems' ] = section_elts
+
+        return section_dict
+
+    def panel_items( self ):
+        return self.elems
+
+
+class ToolSectionLabel( Dictifiable, object ):
+    """
+    A label for a set of tools that can be displayed above groups of tools
+    and sections in the user interface
+    """
+
+    dict_collection_visible_keys = ( 'id', 'text', 'version' )
+
+    def __init__( self, item ):
+        """ Build a ToolSectionLabel from an ElementTree element or a
+        dictionary.
+        """
+        item = ensure_tool_conf_item(item)
+        self.text = item.get( "text" )
+        self.id = item.get( "id" )
+        self.version = item.get( "version" ) or ''
+
+    def to_dict( self, **kwds ):
+        return super( ToolSectionLabel, self ).to_dict()
+
+
+class ToolPanelElements( HasPanelItems, odict ):
+    """ Represents an ordered dictionary of tool entries - abstraction
+    used both by tool panel itself (normal and integrated) and its sections.
+    """
+
+    def update_or_append( self, index, key, value ):
+        if key in self or index is None:
+            self[ key ] = value
+        else:
+            self.insert( index, key, value )
+
+    def has_tool_with_id( self, tool_id ):
+        key = 'tool_%s' % tool_id
+        return key in self
+
+    def replace_tool( self, previous_tool_id, new_tool_id, tool ):
+        previous_key = 'tool_%s' % previous_tool_id
+        new_key = 'tool_%s' % new_tool_id
+        index = self.keys().index( previous_key )
+        del self[ previous_key ]
+        self.insert( index, new_key, tool )
+
+    def index_of_tool_id( self, tool_id ):
+        query_key = 'tool_%s' % tool_id
+        for index, target_key in enumerate( self.keys() ):
+            if query_key == target_key:
+                return index
+        return None
+
+    def insert_tool( self, index, tool ):
+        key = "tool_%s" % tool.id
+        self.insert( index, key, tool )
+
+    def get_tool_with_id( self, tool_id ):
+        key = "tool_%s" % tool_id
+        return self[ key ]
+
+    def append_tool( self, tool ):
+        key = "tool_%s" % tool.id
+        self[ key ] = tool
+
+    def stub_tool( self, key ):
+        key = "tool_%s" % key
+        self[ key ] = None
+
+    def stub_workflow( self, key ):
+        key = 'workflow_%s' % key
+        self[ key ] = None
+
+    def stub_label( self, key ):
+        key = 'label_%s' % key
+        self[ key ] = None
+
+    def append_section( self, key, section_elems ):
+        self[ key ] = section_elems
+
+    def panel_items( self ):
+        return self
diff --git a/lib/galaxy/tools/toolbox/parser.py b/lib/galaxy/tools/toolbox/parser.py
new file mode 100644
index 0000000..b81e642
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/parser.py
@@ -0,0 +1,158 @@
+"""This module is used to parse tool_conf files.
+
+These files define tool lists, sections, labels, etc... the elements of the
+Galaxy tool panel.
+"""
+from abc import ABCMeta, abstractmethod
+
+import six
+import yaml
+
+from galaxy.util import parse_xml, string_as_bool
+
+DEFAULT_MONITOR = False
+
+
+ at six.add_metaclass(ABCMeta)
+class ToolConfSource(object):
+    """Interface represents a container of tool references."""
+
+    @abstractmethod
+    def parse_items(self):
+        """Return a list of ToolConfItem describing source."""
+
+    @abstractmethod
+    def parse_tool_path(self):
+        """Return tool_path for tools in this toolbox or None."""
+
+    @abstractmethod
+    def is_shed_tool_conf(self):
+        """Decide if this tool conf is a shed tool conf."""
+
+    def parse_monitor(self):
+        """Monitor the toolbox configuration source for changes and reload."""
+        return DEFAULT_MONITOR
+
+
+class XmlToolConfSource(ToolConfSource):
+
+    def __init__(self, config_filename):
+        tree = parse_xml(config_filename)
+        self.root = tree.getroot()
+
+    def parse_tool_path(self):
+        return self.root.get('tool_path')
+
+    def parse_items(self):
+        return [ensure_tool_conf_item(_) for _ in self.root.getchildren()]
+
+    def is_shed_tool_conf(self):
+        has_tool_path = self.parse_tool_path() is not None
+        is_shed_conf = string_as_bool(self.root.get("is_shed_conf", "True"))
+        return has_tool_path and is_shed_conf
+
+    def parse_monitor(self):
+        return string_as_bool(self.root.get('monitor', DEFAULT_MONITOR))
+
+
+class YamlToolConfSource(ToolConfSource):
+
+    def __init__(self, config_filename):
+        with open(config_filename, "r") as f:
+            as_dict = yaml.load(f)
+        self.as_dict = as_dict
+
+    def parse_tool_path(self):
+        return self.as_dict.get('tool_path')
+
+    def parse_items(self):
+        return [ToolConfItem.from_dict(_) for _ in self.as_dict.get('items')]
+
+    def parse_monitor(self):
+        return self.as_dict.get('monitor', DEFAULT_MONITOR)
+
+    def is_shed_tool_conf(self):
+        return False
+
+
+class ToolConfItem(object):
+    """Abstract description of a tool conf item.
+
+    These may include tools, labels, sections, and workflows.
+    """
+
+    def __init__(self, type, attributes, elem=None):
+        self.type = type
+        self.attributes = attributes
+        self._elem = elem
+
+    @classmethod
+    def from_dict(cls, _as_dict):
+        as_dict = _as_dict.copy()
+        type = as_dict.get('type')
+        del as_dict['type']
+        attributes = as_dict
+        if type == 'section':
+            items = [cls.from_dict(_) for _ in as_dict['items']]
+            del as_dict['items']
+            item = ToolConfSection(attributes, items)
+        else:
+            item = ToolConfItem(type, attributes)
+        return item
+
+    def get(self, key, default=None):
+        return self.attributes.get(key, default)
+
+    @property
+    def has_elem(self):
+        return self._elem is not None
+
+    @property
+    def elem(self):
+        if self._elem is None:
+            raise Exception("item.elem called on toolbox element from non-XML source")
+        return self._elem
+
+    @property
+    def labels(self):
+        labels = None
+        if "labels" in self.attributes:
+            labels = [ label.strip() for label in self.attributes["labels"].split( "," ) ]
+        return labels
+
+
+class ToolConfSection(ToolConfItem):
+
+    def __init__(self, attributes, items, elem=None):
+        super(ToolConfSection, self).__init__('section', attributes, elem)
+        self.items = items
+
+
+def ensure_tool_conf_item(xml_or_item):
+    if xml_or_item is None:
+        return None
+    elif isinstance(xml_or_item, ToolConfItem):
+        return xml_or_item
+    else:
+        elem = xml_or_item
+        type = elem.tag
+        attributes = elem.attrib
+        if type != "section":
+            return ToolConfItem(type, attributes, elem)
+        else:
+            items = [ensure_tool_conf_item(_) for _ in elem.getchildren()]
+            return ToolConfSection(attributes, items, elem=elem)
+
+
+def get_toolbox_parser(config_filename):
+    is_yaml = any(config_filename.endswith(e) for e in [".yml", ".yaml", ".json"])
+    if is_yaml:
+        return YamlToolConfSource(config_filename)
+    else:
+        return XmlToolConfSource(config_filename)
+
+
+__all__ = (
+    "get_toolbox_parser",
+    "ensure_tool_conf_item",
+)
diff --git a/lib/galaxy/tools/toolbox/tags.py b/lib/galaxy/tools/toolbox/tags.py
new file mode 100644
index 0000000..fab34a7
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/tags.py
@@ -0,0 +1,77 @@
+""" Handle details of tool tagging - perhaps a deprecated feature.
+"""
+import logging
+
+from abc import ABCMeta
+from abc import abstractmethod
+
+log = logging.getLogger( __name__ )
+
+
+def tool_tag_manager( app ):
+    """ Build a tool tag manager according to app's configuration
+    and return it.
+    """
+    if hasattr( app.config, "get_bool" ) and app.config.get_bool( 'enable_tool_tags', False ):
+        return PersistentToolTagManager( app )
+    else:
+        return NullToolTagManager()
+
+
+class AbstractToolTagManager( object ):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def reset_tags( self ):
+        """ Starting to load tool panels, reset all tags.
+        """
+
+    @abstractmethod
+    def handle_tags( self, tool_id, tool_definition_source ):
+        """ Parse out tags and persist them.
+        """
+
+
+class NullToolTagManager( AbstractToolTagManager ):
+
+    def reset_tags( self ):
+        return None
+
+    def handle_tags( self, tool_id, tool_definition_source ):
+        return None
+
+
+class PersistentToolTagManager( AbstractToolTagManager ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.sa_session = app.model.context
+
+    def reset_tags( self ):
+        log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")" )
+        self.sa_session.query( self.app.model.ToolTagAssociation ).delete()
+        self.sa_session.flush()
+
+    def handle_tags( self, tool_id, tool_definition_source ):
+        elem = tool_definition_source
+        if self.app.config.get_bool( 'enable_tool_tags', False ):
+            tag_names = elem.get( "tags", "" ).split( "," )
+            for tag_name in tag_names:
+                if tag_name == '':
+                    continue
+                tag = self.sa_session.query( self.app.model.Tag ).filter_by( name=tag_name ).first()
+                if not tag:
+                    tag = self.app.model.Tag( name=tag_name )
+                    self.sa_session.add( tag )
+                    self.sa_session.flush()
+                    tta = self.app.model.ToolTagAssociation( tool_id=tool_id, tag_id=tag.id )
+                    self.sa_session.add( tta )
+                    self.sa_session.flush()
+                else:
+                    for tagged_tool in tag.tagged_tools:
+                        if tagged_tool.tool_id == tool_id:
+                            break
+                    else:
+                        tta = self.app.model.ToolTagAssociation( tool_id=tool_id, tag_id=tag.id )
+                        self.sa_session.add( tta )
+                        self.sa_session.flush()
diff --git a/lib/galaxy/tools/toolbox/watcher.py b/lib/galaxy/tools/toolbox/watcher.py
new file mode 100644
index 0000000..a7df04c
--- /dev/null
+++ b/lib/galaxy/tools/toolbox/watcher.py
@@ -0,0 +1,232 @@
+import logging
+import os.path
+import threading
+import time
+from galaxy.util.hash_util import md5_hash_file
+
+try:
+    from watchdog.events import FileSystemEventHandler
+    from watchdog.observers import Observer
+    from watchdog.observers.polling import PollingObserver
+    can_watch = True
+except ImportError:
+    Observer = None
+    FileSystemEventHandler = object
+    PollingObserver = None
+    can_watch = False
+
+from galaxy.util.postfork import register_postfork_function
+
+log = logging.getLogger( __name__ )
+
+
+def get_observer_class(config_value, default, monitor_what_str):
+    """
+    """
+    config_value = config_value or default
+    config_value = str(config_value).lower()
+    if config_value in ("true", "yes", "on", "auto"):
+        expect_observer = True
+        observer_class = Observer
+    elif config_value == "polling":
+        expect_observer = True
+        observer_class = PollingObserver
+    elif config_value in ('false', 'no', 'off'):
+        expect_observer = False
+        observer_class = None
+    else:
+        message = "Unrecognized value for watch_tools config option: %s" % config_value
+        raise Exception(message)
+
+    if expect_observer and observer_class is None:
+        message = "Watchdog library unavailable, cannot monitor %s." % monitor_what_str
+        if config_value == "auto":
+            log.info(message)
+        else:
+            raise Exception(message)
+
+    return observer_class
+
+
+def get_tool_conf_watcher(reload_callback):
+    return ToolConfWatcher(reload_callback)
+
+
+def get_tool_watcher(toolbox, config):
+    config_value = getattr(config, "watch_tools", None)
+    observer_class = get_observer_class(config_value, default="False", monitor_what_str="tools")
+
+    if observer_class is not None:
+        return ToolWatcher(toolbox, observer_class=observer_class)
+    else:
+        return NullWatcher()
+
+
+class ToolConfWatcher(object):
+
+    def __init__(self, reload_callback):
+        self.paths = {}
+        self._active = False
+        self._lock = threading.Lock()
+        self.thread = threading.Thread(target=self.check, name="ToolConfWatcher.thread")
+        self.thread.daemon = True
+        self.event_handler = ToolConfFileEventHandler(reload_callback)
+
+    def start(self):
+        if not self._active:
+            self._active = True
+            register_postfork_function(self.thread.start)
+
+    def shutdown(self):
+        if self._active:
+            self._active = False
+            self.thread.join()
+
+    def check(self):
+        hashes = { key: None for key in self.paths.keys() }
+        while self._active:
+            do_reload = False
+            with self._lock:
+                paths = list(self.paths.keys())
+            for path in paths:
+                if not os.path.exists(path):
+                    continue
+                mod_time = self.paths[path]
+                if not hashes.get(path, None):
+                    hashes[path] = md5_hash_file(path)
+                new_mod_time = None
+                if os.path.exists(path):
+                    new_mod_time = time.ctime(os.path.getmtime(path))
+                if new_mod_time != mod_time:
+                    if hashes[path] != md5_hash_file(path):
+                        self.paths[path] = new_mod_time
+                        log.debug("The file '%s' has changes.", path)
+                        do_reload = True
+
+            if do_reload:
+                with self._lock:
+                    t = threading.Thread(target=self.event_handler.on_any_event)
+                    t.daemon = True
+                    t.start()
+            time.sleep(1)
+
+    def monitor(self, path):
+        mod_time = None
+        if os.path.exists(path):
+            mod_time = time.ctime(os.path.getmtime(path))
+        with self._lock:
+            self.paths[path] = mod_time
+        self.start()
+
+    def watch_file(self, tool_conf_file):
+        self.monitor(tool_conf_file)
+        self.start()
+
+
+class NullToolConfWatcher(object):
+
+    def start(self):
+        pass
+
+    def shutdown(self):
+        pass
+
+    def monitor(self, conf_path):
+        pass
+
+    def watch_file(self, tool_file, tool_id):
+        pass
+
+
+class ToolConfFileEventHandler(FileSystemEventHandler):
+
+    def __init__(self, reload_callback):
+        self.reload_callback = reload_callback
+
+    def on_any_event(self, event=None):
+        self._handle(event)
+
+    def _handle(self, event):
+        self.reload_callback()
+
+
+class ToolWatcher(object):
+
+    def __init__(self, toolbox, observer_class):
+        self.toolbox = toolbox
+        self.tool_file_ids = {}
+        self.tool_dir_callbacks = {}
+        self.monitored_dirs = {}
+        self.observer = observer_class()
+        self.event_handler = ToolFileEventHandler(self)
+        self.start()
+
+    def start(self):
+        register_postfork_function(self.observer.start)
+
+    def shutdown(self):
+        self.observer.stop()
+        self.observer.join()
+
+    def monitor(self, dir):
+        self.observer.schedule(self.event_handler, dir, recursive=False)
+
+    def watch_file(self, tool_file, tool_id):
+        tool_file = os.path.abspath( tool_file )
+        self.tool_file_ids[tool_file] = tool_id
+        tool_dir = os.path.dirname( tool_file )
+        if tool_dir not in self.monitored_dirs:
+            self.monitored_dirs[ tool_dir ] = tool_dir
+            self.monitor( tool_dir )
+
+    def watch_directory(self, tool_dir, callback):
+        tool_dir = os.path.abspath( tool_dir )
+        self.tool_dir_callbacks[tool_dir] = callback
+        if tool_dir not in self.monitored_dirs:
+            self.monitored_dirs[ tool_dir ] = tool_dir
+            self.monitor( tool_dir )
+
+
+class ToolFileEventHandler(FileSystemEventHandler):
+
+    def __init__(self, tool_watcher):
+        self.tool_watcher = tool_watcher
+
+    def on_any_event(self, event):
+        self._handle(event)
+
+    def _handle(self, event):
+        # modified events will only have src path, move events will
+        # have dest_path and src_path but we only care about dest. So
+        # look at dest if it exists else use src.
+        path = getattr( event, 'dest_path', None ) or event.src_path
+        path = os.path.abspath( path )
+        tool_id = self.tool_watcher.tool_file_ids.get( path, None )
+        if tool_id:
+            try:
+                self.tool_watcher.toolbox.reload_tool_by_id(tool_id)
+            except Exception:
+                pass
+        elif path.endswith(".xml"):
+            directory = os.path.dirname( path )
+            dir_callback = self.tool_watcher.tool_dir_callbacks.get( directory, None )
+            if dir_callback:
+                tool_file = event.src_path
+                tool_id = dir_callback( tool_file )
+                if tool_id:
+                    self.tool_watcher.tool_file_ids[ tool_file ] = tool_id
+
+
+class NullWatcher(object):
+
+    def start(self):
+        pass
+
+    def shutdown(self):
+        pass
+
+    def watch_file(self, tool_file, tool_id):
+        pass
+
+    def watch_directory(self, tool_dir, callback):
+        pass
diff --git a/lib/galaxy/tools/unzip_collection.xml b/lib/galaxy/tools/unzip_collection.xml
new file mode 100644
index 0000000..159589a
--- /dev/null
+++ b/lib/galaxy/tools/unzip_collection.xml
@@ -0,0 +1,23 @@
+<tool id="__UNZIP_COLLECTION__"
+      name="Unzip Collection"
+      version="1.0.0"
+      tool_type="unzip_collection">
+  <type class="UnzipCollectionTool" module="galaxy.tools" />
+  <action module="galaxy.tools.actions.model_operations"
+          class="ModelOperationToolAction"/>
+  <inputs>
+    <param type="data_collection" collection_type="paired" name="input" label="Input Paired Dataset" />
+  </inputs>
+  <outputs>
+    <data name="forward" label="${on_string} (forward)" />
+    <data name="reverse" label="${on_string} (reverse)" />
+  </outputs>
+  <help>
+    This tool takes a paired dataset collection and builds two datasets
+    from it. If mapped over a list of paired datasets, this tool will
+    produce two lists of datasets.
+
+    This tool will create new history datasets from your collection
+    but your quota usage will not increase.
+  </help>
+</tool>
diff --git a/lib/galaxy/tools/util/__init__.py b/lib/galaxy/tools/util/__init__.py
new file mode 100644
index 0000000..77298be
--- /dev/null
+++ b/lib/galaxy/tools/util/__init__.py
@@ -0,0 +1,6 @@
+"""
+Utilities used by various Galaxy tools
+
+FIXME: These are used by tool scripts, not the framework, and should not live
+       in this package.
+"""
diff --git a/lib/galaxy/tools/util/galaxyops/__init__.py b/lib/galaxy/tools/util/galaxyops/__init__.py
new file mode 100644
index 0000000..b7a4a5f
--- /dev/null
+++ b/lib/galaxy/tools/util/galaxyops/__init__.py
@@ -0,0 +1,41 @@
+"""Utility functions for galaxyops"""
+import sys
+
+
+def warn( msg ):
+    # TODO: since everything printed to stderr results in job.state = error, we
+    # don't need both a warn and a fail...
+    print >> sys.stderr, msg
+    sys.exit( 1 )
+
+
+def fail( msg ):
+    print >> sys.stderr, msg
+    sys.exit( 1 )
+
+
+# Default chrom, start, end, strand cols for a bed file
+BED_DEFAULT_COLS = 0, 1, 2, 5
+
+
+def parse_cols_arg( cols ):
+    """Parse a columns command line argument into a four-tuple"""
+    if cols:
+        # Handle case where no strand column included - in this case, cols
+        # looks something like 1,2,3,
+        if cols.endswith( ',' ):
+            cols += '0'
+        col_list = map( lambda x: int( x ) - 1, cols.split(",") )
+        return col_list
+    else:
+        return BED_DEFAULT_COLS
+
+
+def default_printer( stream, exc, obj ):
+    print >> stream, "%d: %s" % ( obj.linenum, obj.current_line )
+    print >> stream, "\tError: %s" % ( str(exc) )
+
+
+def skipped( reader, filedesc="" ):
+    first_line, line_contents, problem = reader.skipped_lines[0]
+    return 'Skipped %d invalid lines%s, 1st line #%d: "%s", problem: %s' % ( reader.skipped, filedesc, first_line, line_contents, problem )
diff --git a/lib/galaxy/tools/util/maf_utilities.py b/lib/galaxy/tools/util/maf_utilities.py
new file mode 100644
index 0000000..d2e92b0
--- /dev/null
+++ b/lib/galaxy/tools/util/maf_utilities.py
@@ -0,0 +1,753 @@
+#!/usr/bin/env python
+"""
+Provides wrappers and utilities for working with MAF files and alignments.
+"""
+# Dan Blankenberg
+import logging
+import os
+import string
+import sys
+import tempfile
+
+import bx.align.maf
+import bx.intervals
+import bx.interval_index_file
+from errno import EMFILE
+import resource
+from copy import deepcopy
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+log = logging.getLogger(__name__)
+
+
+GAP_CHARS = [ '-' ]
+SRC_SPLIT_CHAR = '.'
+
+
+def src_split( src ):
+    fields = src.split( SRC_SPLIT_CHAR, 1 )
+    spec = fields.pop( 0 )
+    if fields:
+        chrom = fields.pop( 0 )
+    else:
+        chrom = spec
+    return spec, chrom
+
+
+def src_merge( spec, chrom, contig=None ):
+    if None in [ spec, chrom ]:
+        spec = chrom = spec or chrom
+    return bx.align.maf.src_merge( spec, chrom, contig )
+
+
+def get_species_in_block( block ):
+    species = []
+    for c in block.components:
+        spec, chrom = src_split( c.src )
+        if spec not in species:
+            species.append( spec )
+    return species
+
+
+def tool_fail( msg="Unknown Error" ):
+    print >> sys.stderr, "Fatal Error: %s" % msg
+    sys.exit()
+
+
+class TempFileHandler( object ):
+    '''
+    Handles creating, opening, closing, and deleting of Temp files, with a
+    maximum number of files open at one time.
+    '''
+
+    DEFAULT_MAX_OPEN_FILES = max( resource.getrlimit( resource.RLIMIT_NOFILE )[0] / 2, 1 )
+
+    def __init__( self, max_open_files=None, **kwds ):
+        if max_open_files is None:
+            max_open_files = self.DEFAULT_MAX_OPEN_FILES
+        self.max_open_files = max_open_files
+        self.files = []
+        self.open_file_indexes = []
+        self.kwds = kwds
+
+    def get_open_tempfile( self, index=None, **kwds ):
+        if index is not None and index in self.open_file_indexes:
+            self.open_file_indexes.remove( index )
+        else:
+            if self.max_open_files:
+                while len( self.open_file_indexes ) >= self.max_open_files:
+                    self.close( self.open_file_indexes[0] )
+            if index is None:
+                index = len( self.files )
+                temp_kwds = dict( self.kwds )
+                temp_kwds.update( kwds )
+                # Being able to use delete=True here, would simplify a bit,
+                # but we support python2.4 in these tools
+                while True:
+                    try:
+                        tmp_file = tempfile.NamedTemporaryFile( **temp_kwds )
+                        filename = tmp_file.name
+                        break
+                    except OSError as e:
+                        if self.open_file_indexes and e.errno == EMFILE:
+                            self.max_open_files = len( self.open_file_indexes )
+                            self.close( self.open_file_indexes[0] )
+                        else:
+                            raise e
+                tmp_file.close()
+                self.files.append( open( filename, 'w+b' ) )
+            else:
+                while True:
+                    try:
+                        self.files[ index ] = open( self.files[ index ].name, 'r+b' )
+                        break
+                    except OSError as e:
+                        if self.open_file_indexes and e.errno == EMFILE:
+                            self.max_open_files = len( self.open_file_indexes )
+                            self.close( self.open_file_indexes[0] )
+                        else:
+                            raise e
+                self.files[ index ].seek( 0, 2 )
+        self.open_file_indexes.append( index )
+        return index, self.files[ index ]
+
+    def close( self, index, delete=False ):
+        if index in self.open_file_indexes:
+            self.open_file_indexes.remove( index )
+        rval = self.files[ index ].close()
+        if delete:
+            try:
+                os.unlink( self.files[ index ].name )
+            except OSError:
+                pass
+        return rval
+
+    def flush( self, index ):
+        if index in self.open_file_indexes:
+            self.files[ index ].flush()
+
+    def __del__( self ):
+        for i in xrange( len( self.files ) ):
+            self.close( i, delete=True )
+
+
+# an object corresponding to a reference layered alignment
+class RegionAlignment( object ):
+
+    DNA_COMPLEMENT = string.maketrans( "ACGTacgt", "TGCAtgca" )
+    MAX_SEQUENCE_SIZE = sys.maxint  # Maximum length of sequence allowed
+
+    def __init__( self, size, species=[], temp_file_handler=None ):
+        assert size <= self.MAX_SEQUENCE_SIZE, "Maximum length allowed for an individual sequence has been exceeded (%i > %i)." % ( size, self.MAX_SEQUENCE_SIZE )
+        self.size = size
+        if not temp_file_handler:
+            temp_file_handler = TempFileHandler()
+        self.temp_file_handler = temp_file_handler
+        self.sequences = {}
+        if not isinstance( species, list ):
+            species = [species]
+        for spec in species:
+            self.add_species( spec )
+
+    # add a species to the alignment
+    def add_species( self, species ):
+        # make temporary sequence files
+        file_index, fh = self.temp_file_handler.get_open_tempfile()
+        self.sequences[species] = file_index
+        fh.write( "-" * self.size )
+
+    # returns the names for species found in alignment, skipping names as requested
+    def get_species_names( self, skip=[] ):
+        if not isinstance( skip, list ):
+            skip = [skip]
+        names = self.sequences.keys()
+        for name in skip:
+            try:
+                names.remove( name )
+            except:
+                pass
+        return names
+
+    # returns the sequence for a species
+    def get_sequence( self, species ):
+        file_index, fh = self.temp_file_handler.get_open_tempfile( self.sequences[species] )
+        fh.seek( 0 )
+        return fh.read()
+
+    # returns the reverse complement of the sequence for a species
+    def get_sequence_reverse_complement( self, species ):
+        complement = [base for base in self.get_sequence( species ).translate( self.DNA_COMPLEMENT )]
+        complement.reverse()
+        return "".join( complement )
+
+    # sets a position for a species
+    def set_position( self, index, species, base ):
+        if len( base ) != 1:
+            raise Exception( "A genomic position can only have a length of 1." )
+        return self.set_range( index, species, base )
+    # sets a range for a species
+
+    def set_range( self, index, species, bases ):
+        if index >= self.size or index < 0:
+            raise Exception( "Your index (%i) is out of range (0 - %i)." % ( index, self.size - 1 ) )
+        if len( bases ) == 0:
+            raise Exception( "A set of genomic positions can only have a positive length." )
+        if species not in self.sequences.keys():
+            self.add_species( species )
+        file_index, fh = self.temp_file_handler.get_open_tempfile( self.sequences[species] )
+        fh.seek( index )
+        fh.write( bases )
+
+    # Flush temp file of specified species, or all species
+    def flush( self, species=None ):
+        if species is None:
+            species = self.sequences.keys()
+        elif not isinstance( species, list ):
+            species = [species]
+        for spec in species:
+            self.temp_file_handler.flush( self.sequences[spec] )
+
+
+class GenomicRegionAlignment( RegionAlignment ):
+
+    def __init__( self, start, end, species=[], temp_file_handler=None ):
+        RegionAlignment.__init__( self, end - start, species, temp_file_handler=temp_file_handler )
+        self.start = start
+        self.end = end
+
+
+class SplicedAlignment( object ):
+
+    DNA_COMPLEMENT = string.maketrans( "ACGTacgt", "TGCAtgca" )
+
+    def __init__( self, exon_starts, exon_ends, species=[], temp_file_handler=None ):
+        if not isinstance( exon_starts, list ):
+            exon_starts = [exon_starts]
+        if not isinstance( exon_ends, list ):
+            exon_ends = [exon_ends]
+        assert len( exon_starts ) == len( exon_ends ), "The number of starts does not match the number of sizes."
+        self.exons = []
+        if not temp_file_handler:
+            temp_file_handler = TempFileHandler()
+        self.temp_file_handler = temp_file_handler
+        for i in range( len( exon_starts ) ):
+            self.exons.append( GenomicRegionAlignment( exon_starts[i], exon_ends[i], species, temp_file_handler=temp_file_handler ) )
+
+    # returns the names for species found in alignment, skipping names as requested
+    def get_species_names( self, skip=[] ):
+        if not isinstance( skip, list ):
+            skip = [skip]
+        names = []
+        for exon in self.exons:
+            for name in exon.get_species_names( skip=skip ):
+                if name not in names:
+                    names.append( name )
+        return names
+
+    # returns the sequence for a species
+    def get_sequence( self, species ):
+        index, fh = self.temp_file_handler.get_open_tempfile()
+        for exon in self.exons:
+            if species in exon.get_species_names():
+                seq = exon.get_sequence( species )
+                # we need to refetch fh here, since exon.get_sequence( species ) uses a tempfile
+                # and if max==1, it would close fh
+                index, fh = self.temp_file_handler.get_open_tempfile( index )
+                fh.write( seq )
+            else:
+                fh.write( "-" * exon.size )
+        fh.seek( 0 )
+        rval = fh.read()
+        self.temp_file_handler.close( index, delete=True )
+        return rval
+
+    # returns the reverse complement of the sequence for a species
+    def get_sequence_reverse_complement( self, species ):
+        complement = [base for base in self.get_sequence( species ).translate( self.DNA_COMPLEMENT )]
+        complement.reverse()
+        return "".join( complement )
+
+    # Start and end of coding region
+    @property
+    def start( self ):
+        return self.exons[0].start
+
+    @property
+    def end( self ):
+        return self.exons[-1].end
+
+
+# Open a MAF index using a UID
+def maf_index_by_uid( maf_uid, index_location_file ):
+    for line in open( index_location_file ):
+        try:
+            # read each line, if not enough fields, go to next line
+            if line[0:1] == "#":
+                continue
+            fields = line.split('\t')
+            if maf_uid == fields[1]:
+                try:
+                    maf_files = fields[4].replace( "\n", "" ).replace( "\r", "" ).split( "," )
+                    return bx.align.maf.MultiIndexed( maf_files, keep_open=True, parse_e_rows=False )
+                except Exception as e:
+                    raise Exception( 'MAF UID (%s) found, but configuration appears to be malformed: %s' % ( maf_uid, e ) )
+        except:
+            pass
+    return None
+
+
+# return ( index, temp_index_filename ) for user maf, if available, or build one and return it, return None when no tempfile is created
+def open_or_build_maf_index( maf_file, index_filename, species=None ):
+    try:
+        return ( bx.align.maf.Indexed( maf_file, index_filename=index_filename, keep_open=True, parse_e_rows=False ), None )
+    except:
+        return build_maf_index( maf_file, species=species )
+
+
+def build_maf_index_species_chromosomes( filename, index_species=None ):
+    species = []
+    species_chromosomes = {}
+    indexes = bx.interval_index_file.Indexes()
+    blocks = 0
+    try:
+        maf_reader = bx.align.maf.Reader( open( filename ) )
+        while True:
+            pos = maf_reader.file.tell()
+            block = maf_reader.next()
+            if block is None:
+                break
+            blocks += 1
+            for c in block.components:
+                spec = c.src
+                chrom = None
+                if "." in spec:
+                    spec, chrom = spec.split( ".", 1 )
+                if spec not in species:
+                    species.append( spec )
+                    species_chromosomes[spec] = []
+                if chrom and chrom not in species_chromosomes[spec]:
+                    species_chromosomes[spec].append( chrom )
+                if index_species is None or spec in index_species:
+                    forward_strand_start = c.forward_strand_start
+                    forward_strand_end = c.forward_strand_end
+                    try:
+                        forward_strand_start = int( forward_strand_start )
+                        forward_strand_end = int( forward_strand_end )
+                    except ValueError:
+                        continue  # start and end are not integers, can't add component to index, goto next component
+                        # this likely only occurs when parse_e_rows is True?
+                        # could a species exist as only e rows? should the
+                    if forward_strand_end > forward_strand_start:
+                        # require positive length; i.e. certain lines have start = end = 0 and cannot be indexed
+                        indexes.add( c.src, forward_strand_start, forward_strand_end, pos, max=c.src_size )
+    except Exception as e:
+        # most likely a bad MAF
+        log.debug( 'Building MAF index on %s failed: %s' % ( filename, e ) )
+        return ( None, [], {}, 0 )
+    return ( indexes, species, species_chromosomes, blocks )
+
+
+# builds and returns ( index, index_filename ) for specified maf_file
+def build_maf_index( maf_file, species=None ):
+    indexes, found_species, species_chromosomes, blocks = build_maf_index_species_chromosomes( maf_file, species )
+    if indexes is not None:
+        fd, index_filename = tempfile.mkstemp()
+        out = os.fdopen( fd, 'w' )
+        indexes.write( out )
+        out.close()
+        return ( bx.align.maf.Indexed( maf_file, index_filename=index_filename, keep_open=True, parse_e_rows=False ), index_filename )
+    return ( None, None )
+
+
+def component_overlaps_region( c, region ):
+    if c is None:
+        return False
+    start, end = c.get_forward_strand_start(), c.get_forward_strand_end()
+    if region.start >= end or region.end <= start:
+        return False
+    return True
+
+
+def chop_block_by_region( block, src, region, species=None, mincols=0 ):
+    # This chopping method was designed to maintain consistency with how start/end padding gaps have been working in Galaxy thus far:
+    #   behavior as seen when forcing blocks to be '+' relative to src sequence (ref) and using block.slice_by_component( ref, slice_start, slice_end )
+    #   whether-or-not this is the 'correct' behavior is questionable, but this will at least maintain consistency
+    # comments welcome
+    slice_start = block.text_size  # max for the min()
+    slice_end = 0  # min for the max()
+    old_score = block.score  # save old score for later use
+    # We no longer assume only one occurance of src per block, so we need to check them all
+    for c in iter_components_by_src( block, src ):
+        if component_overlaps_region( c, region ):
+            if c.text is not None:
+                rev_strand = False
+                if c.strand == "-":
+                    # We want our coord_to_col coordinates to be returned from positive stranded component
+                    rev_strand = True
+                    c = c.reverse_complement()
+                start = max( region.start, c.start )
+                end = min( region.end, c.end )
+                start = c.coord_to_col( start )
+                end = c.coord_to_col( end )
+                if rev_strand:
+                    # need to orient slice coordinates to the original block direction
+                    slice_len = end - start
+                    end = len( c.text ) - start
+                    start = end - slice_len
+                slice_start = min( start, slice_start )
+                slice_end = max( end, slice_end )
+
+    if slice_start < slice_end:
+        block = block.slice( slice_start, slice_end )
+        if block.text_size > mincols:
+            # restore old score, may not be accurate, but it is better than 0 for everything?
+            block.score = old_score
+            if species is not None:
+                block = block.limit_to_species( species )
+                block.remove_all_gap_columns()
+            return block
+    return None
+
+
+def orient_block_by_region( block, src, region, force_strand=None ):
+    # loop through components matching src,
+    # make sure each of these components overlap region
+    # cache strand for each of overlaping regions
+    # if force_strand / region.strand not in strand cache, reverse complement
+    # we could have 2 sequences with same src, overlapping region, on different strands, this would cause no reverse_complementing
+    strands = [ c.strand for c in iter_components_by_src( block, src ) if component_overlaps_region( c, region ) ]
+    if strands and ( force_strand is None and region.strand not in strands ) or ( force_strand is not None and force_strand not in strands ):
+        block = block.reverse_complement()
+    return block
+
+
+def get_oriented_chopped_blocks_for_region( index, src, region, species=None, mincols=0, force_strand=None ):
+    for block, idx, offset in get_oriented_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols, force_strand ):
+        yield block
+
+
+def get_oriented_chopped_blocks_with_index_offset_for_region( index, src, region, species=None, mincols=0, force_strand=None ):
+    for block, idx, offset in get_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols ):
+        yield orient_block_by_region( block, src, region, force_strand ), idx, offset
+
+
+# split a block with multiple occurances of src into one block per src
+def iter_blocks_split_by_src( block, src ):
+    for src_c in iter_components_by_src( block, src ):
+        new_block = bx.align.Alignment( score=block.score, attributes=deepcopy( block.attributes ) )
+        new_block.text_size = block.text_size
+        for c in block.components:
+            if c == src_c or c.src != src:
+                new_block.add_component( deepcopy( c ) )  # components have reference to alignment, dont want to loose reference to original alignment block in original components
+        yield new_block
+
+
+# split a block into multiple blocks with all combinations of a species appearing only once per block
+def iter_blocks_split_by_species( block, species=None ):
+    def __split_components_by_species( components_by_species, new_block ):
+        if components_by_species:
+            # more species with components to add to this block
+            components_by_species = deepcopy( components_by_species )
+            spec_comps = components_by_species.pop( 0 )
+            for c in spec_comps:
+                newer_block = deepcopy( new_block )
+                newer_block.add_component( deepcopy( c ) )
+                for value in __split_components_by_species( components_by_species, newer_block ):
+                    yield value
+        else:
+            # no more components to add, yield this block
+            yield new_block
+
+    # divide components by species
+    spec_dict = {}
+    if not species:
+        species = []
+        for c in block.components:
+            spec, chrom = src_split( c.src )
+            if spec not in spec_dict:
+                spec_dict[ spec ] = []
+                species.append( spec )
+            spec_dict[ spec ].append( c )
+    else:
+        for spec in species:
+            spec_dict[ spec ] = []
+            for c in iter_components_by_src_start( block, spec ):
+                spec_dict[ spec ].append( c )
+
+    empty_block = bx.align.Alignment( score=block.score, attributes=deepcopy( block.attributes ) )  # should we copy attributes?
+    empty_block.text_size = block.text_size
+    # call recursive function to split into each combo of spec/blocks
+    for value in __split_components_by_species( spec_dict.values(), empty_block ):
+        sort_block_components_by_block( value, block )  # restore original component order
+        yield value
+
+
+# generator yielding only chopped and valid blocks for a specified region
+def get_chopped_blocks_for_region( index, src, region, species=None, mincols=0 ):
+    for block, idx, offset in get_chopped_blocks_with_index_offset_for_region( index, src, region, species, mincols ):
+        yield block
+
+
+def get_chopped_blocks_with_index_offset_for_region( index, src, region, species=None, mincols=0 ):
+    for block, idx, offset in index.get_as_iterator_with_index_and_offset( src, region.start, region.end ):
+        block = chop_block_by_region( block, src, region, species, mincols )
+        if block is not None:
+            yield block, idx, offset
+
+
+# returns a filled region alignment for specified regions
+def get_region_alignment( index, primary_species, chrom, start, end, strand='+', species=None, mincols=0, overwrite_with_gaps=True, temp_file_handler=None ):
+    if species is not None:
+        alignment = RegionAlignment( end - start, species, temp_file_handler=temp_file_handler )
+    else:
+        alignment = RegionAlignment( end - start, primary_species, temp_file_handler=temp_file_handler )
+    return fill_region_alignment( alignment, index, primary_species, chrom, start, end, strand, species, mincols, overwrite_with_gaps )
+
+
+# reduces a block to only positions exisiting in the src provided
+def reduce_block_by_primary_genome( block, species, chromosome, region_start ):
+    # returns ( startIndex, {species:texts}
+    # where texts' contents are reduced to only positions existing in the primary genome
+    src = "%s.%s" % ( species, chromosome )
+    ref = block.get_component_by_src( src )
+    start_offset = ref.start - region_start
+    species_texts = {}
+    for c in block.components:
+        species_texts[ c.src.split( '.' )[0] ] = list( c.text )
+    # remove locations which are gaps in the primary species, starting from the downstream end
+    for i in range( len( species_texts[ species ] ) - 1, -1, -1 ):
+        if species_texts[ species ][i] == '-':
+            for text in species_texts.values():
+                text.pop( i )
+    for spec, text in species_texts.items():
+        species_texts[spec] = ''.join( text )
+    return ( start_offset, species_texts )
+
+
+# fills a region alignment
+def fill_region_alignment( alignment, index, primary_species, chrom, start, end, strand='+', species=None, mincols=0, overwrite_with_gaps=True ):
+    region = bx.intervals.Interval( start, end )
+    region.chrom = chrom
+    region.strand = strand
+    primary_src = "%s.%s" % ( primary_species, chrom )
+
+    # Order blocks overlaping this position by score, lowest first
+    blocks = []
+    for block, idx, offset in index.get_as_iterator_with_index_and_offset( primary_src, start, end ):
+        score = float( block.score )
+        for i in range( 0, len( blocks ) ):
+            if score < blocks[i][0]:
+                blocks.insert( i, ( score, idx, offset ) )
+                break
+        else:
+            blocks.append( ( score, idx, offset ) )
+
+    # gap_chars_tuple = tuple( GAP_CHARS )
+    gap_chars_str = ''.join( GAP_CHARS )
+    # Loop through ordered blocks and layer by increasing score
+    for block_dict in blocks:
+        for block in iter_blocks_split_by_species( block_dict[1].get_at_offset( block_dict[2] ) ):  # need to handle each occurance of sequence in block seperately
+            if component_overlaps_region( block.get_component_by_src( primary_src ), region ):
+                block = chop_block_by_region( block, primary_src, region, species, mincols )  # chop block
+                block = orient_block_by_region( block, primary_src, region )  # orient block
+                start_offset, species_texts = reduce_block_by_primary_genome( block, primary_species, chrom, start )
+                for spec, text in species_texts.items():
+                    # we should trim gaps from both sides, since these are not positions in this species genome (sequence)
+                    text = text.rstrip( gap_chars_str )
+                    gap_offset = 0
+                    # while text.startswith( gap_chars_tuple ):
+                    while True in [ text.startswith( gap_char ) for gap_char in GAP_CHARS ]:  # python2.4 doesn't accept a tuple for .startswith()
+                        gap_offset += 1
+                        text = text[1:]
+                        if not text:
+                            break
+                    if text:
+                        if overwrite_with_gaps:
+                            alignment.set_range( start_offset + gap_offset, spec, text )
+                        else:
+                            for i, char in enumerate( text ):
+                                if char not in GAP_CHARS:
+                                    alignment.set_position( start_offset + gap_offset + i, spec, char )
+    return alignment
+
+
+# returns a filled spliced region alignment for specified region with start and end lists
+def get_spliced_region_alignment( index, primary_species, chrom, starts, ends, strand='+', species=None, mincols=0, overwrite_with_gaps=True, temp_file_handler=None ):
+    # create spliced alignment object
+    if species is not None:
+        alignment = SplicedAlignment( starts, ends, species, temp_file_handler=temp_file_handler )
+    else:
+        alignment = SplicedAlignment( starts, ends, [primary_species], temp_file_handler=temp_file_handler )
+    for exon in alignment.exons:
+        fill_region_alignment( exon, index, primary_species, chrom, exon.start, exon.end, strand, species, mincols, overwrite_with_gaps )
+    return alignment
+
+
+# loop through string array, only return non-commented lines
+def line_enumerator( lines, comment_start='#' ):
+    i = 0
+    for line in lines:
+        if not line.startswith( comment_start ):
+            i += 1
+            yield ( i, line )
+
+
+# read a GeneBed file, return list of starts, ends, raw fields
+def get_starts_ends_fields_from_gene_bed( line ):
+    # Starts and ends for exons
+    starts = []
+    ends = []
+
+    fields = line.split()
+    # Requires atleast 12 BED columns
+    if len(fields) < 12:
+        raise Exception( "Not a proper 12 column BED line (%s)." % line )
+    tx_start = int( fields[1] )
+    strand = fields[5]
+    if strand != '-':
+        strand = '+'  # Default strand is +
+    cds_start = int( fields[6] )
+    cds_end = int( fields[7] )
+
+    # Calculate and store starts and ends of coding exons
+    region_start, region_end = cds_start, cds_end
+    exon_starts = map( int, fields[11].rstrip( ',\n' ).split( ',' ) )
+    exon_starts = map( ( lambda x: x + tx_start ), exon_starts )
+    exon_ends = map( int, fields[10].rstrip( ',' ).split( ',' ) )
+    exon_ends = map( ( lambda x, y: x + y ), exon_starts, exon_ends )
+    for start, end in zip( exon_starts, exon_ends ):
+        start = max( start, region_start )
+        end = min( end, region_end )
+        if start < end:
+            starts.append( start )
+            ends.append( end )
+    return ( starts, ends, fields )
+
+
+def iter_components_by_src( block, src ):
+    for c in block.components:
+        if c.src == src:
+            yield c
+
+
+def get_components_by_src( block, src ):
+    return [ value for value in iter_components_by_src( block, src ) ]
+
+
+def iter_components_by_src_start( block, src ):
+    for c in block.components:
+        if c.src.startswith( src ):
+            yield c
+
+
+def get_components_by_src_start( block, src ):
+    return [ value for value in iter_components_by_src_start( block, src ) ]
+
+
+def sort_block_components_by_block( block1, block2 ):
+    # orders the components in block1 by the index of the component in block2
+    # block1 must be a subset of block2
+    # occurs in-place
+    return block1.components.sort( cmp=lambda x, y: block2.components.index( x ) - block2.components.index( y ) )
+
+
+def get_species_in_maf( maf_filename ):
+    species = []
+    for block in bx.align.maf.Reader( open( maf_filename ) ):
+        for spec in get_species_in_block( block ):
+            if spec not in species:
+                species.append( spec )
+    return species
+
+
+def parse_species_option( species ):
+    if species:
+        species = species.split( ',' )
+        if 'None' not in species:
+            return species
+    return None  # provided species was '', None, or had 'None' in it
+
+
+def remove_temp_index_file( index_filename ):
+    try:
+        os.unlink( index_filename )
+    except:
+        pass
+
+# Below are methods to deal with FASTA files
+
+
+def get_fasta_header( component, attributes={}, suffix=None ):
+    header = ">%s(%s):%i-%i|" % ( component.src, component.strand, component.get_forward_strand_start(), component.get_forward_strand_end() )
+    for key, value in attributes.iteritems():
+        header = "%s%s=%s|" % ( header, key, value )
+    if suffix:
+        header = "%s%s" % ( header, suffix )
+    else:
+        header = "%s%s" % ( header, src_split( component.src )[ 0 ] )
+    return header
+
+
+def get_attributes_from_fasta_header( header ):
+    if not header:
+        return {}
+    attributes = {}
+    header = header.lstrip( '>' )
+    header = header.strip()
+    fields = header.split( '|' )
+    try:
+        region = fields[0]
+        region = region.split( '(', 1 )
+        temp = region[0].split( '.', 1 )
+        attributes['species'] = temp[0]
+        if len( temp ) == 2:
+            attributes['chrom'] = temp[1]
+        else:
+            attributes['chrom'] = temp[0]
+        region = region[1].split( ')', 1 )
+        attributes['strand'] = region[0]
+        region = region[1].lstrip( ':' ).split( '-' )
+        attributes['start'] = int( region[0] )
+        attributes['end'] = int( region[1] )
+    except:
+        # fields 0 is not a region coordinate
+        pass
+    if len( fields ) > 2:
+        for i in xrange( 1, len( fields ) - 1 ):
+            prop = fields[i].split( '=', 1 )
+            if len( prop ) == 2:
+                attributes[ prop[0] ] = prop[1]
+    if len( fields ) > 1:
+        attributes['__suffix__'] = fields[-1]
+    return attributes
+
+
+def iter_fasta_alignment( filename ):
+    class fastaComponent:
+        def __init__( self, species, text="" ):
+            self.species = species
+            self.text = text
+
+        def extend( self, text ):
+            self.text = self.text + text.replace( '\n', '' ).replace( '\r', '' ).strip()
+    # yields a list of fastaComponents for a FASTA file
+    f = open( filename, 'rb' )
+    components = []
+    # cur_component = None
+    while True:
+        line = f.readline()
+        if not line:
+            if components:
+                yield components
+            return
+        line = line.strip()
+        if not line:
+            if components:
+                yield components
+            components = []
+        elif line.startswith( '>' ):
+            attributes = get_attributes_from_fasta_header( line )
+            components.append( fastaComponent( attributes['species'] ) )
+        elif components:
+            components[-1].extend( line )
diff --git a/lib/galaxy/tools/verify/__init__.py b/lib/galaxy/tools/verify/__init__.py
new file mode 100644
index 0000000..d1ba4bf
--- /dev/null
+++ b/lib/galaxy/tools/verify/__init__.py
@@ -0,0 +1,289 @@
+"""Module of utilities for verifying test results."""
+
+import difflib
+import filecmp
+import hashlib
+import logging
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+
+from .asserts import verify_assertions
+from .test_data import TestDataResolver
+
+log = logging.getLogger(__name__)
+
+DEFAULT_TEST_DATA_RESOLVER = TestDataResolver()
+
+
+def verify(
+    item_label,
+    output_content,
+    attributes,
+    filename=None,
+    get_filename=None,
+    keep_outputs_dir=None,
+    verify_extra_files=None,
+):
+    """Verify the content of a test output using test definitions described by attributes.
+
+    Throw an informative assertion error if any of these tests fail.
+    """
+    if get_filename is None:
+        get_filename = DEFAULT_TEST_DATA_RESOLVER.get_filename
+
+    # Check assertions...
+    assertions = attributes.get("assert_list", None)
+    if attributes is not None and assertions is not None:
+        try:
+            verify_assertions(output_content, attributes["assert_list"])
+        except AssertionError as err:
+            errmsg = '%s different than expected\n' % (item_label)
+            errmsg += str( err )
+            raise AssertionError( errmsg )
+
+    # Verify checksum attributes...
+    # works with older Galaxy style md5=<expected_sum> or cwltest
+    # style checksum=<hash_type>$<hash>.
+    expected_checksum_type = None
+    expected_checksum = None
+    if attributes is not None and attributes.get("md5", None) is not None:
+        expected_checksum_type = "md5"
+        expected_checksum = attributes.get("md5")
+    elif attributes is not None and attributes.get("checksum", None) is not None:
+        checksum_value = attributes.get("checksum", None)
+        expected_checksum_type, expected_checksum = checksum_value.split("$", 1)
+
+    if expected_checksum_type:
+        try:
+            _verify_checksum(output_content, expected_checksum_type, expected_checksum)
+        except AssertionError as err:
+            errmsg = '%s different than expected\n' % (item_label)
+            errmsg += str( err )
+            raise AssertionError( errmsg )
+
+    if filename is not None:
+        local_name = get_filename(filename)
+        temp_name = make_temp_fname(fname=filename)
+        with open(temp_name, 'wb') as f:
+            f.write(output_content)
+
+        # if the server's env has GALAXY_TEST_SAVE, save the output file to that dir
+        if keep_outputs_dir:
+            ofn = os.path.join(keep_outputs_dir, os.path.basename(local_name))
+            log.debug('keep_outputs_dir: %s, ofn: %s', keep_outputs_dir, ofn)
+            try:
+                shutil.copy( temp_name, ofn )
+            except Exception as exc:
+                error_log_msg = 'Could not save output file %s to %s: ' % (temp_name, ofn)
+                error_log_msg += str(exc)
+                log.error(error_log_msg, exc_info=True)
+            else:
+                log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % (keep_outputs_dir, ofn))
+        try:
+            if attributes is None:
+                attributes = {}
+            compare = attributes.get('compare', 'diff')
+            if attributes.get('ftype', None) == 'bam':
+                local_fh, temp_name = _bam_to_sam(local_name, temp_name)
+                local_name = local_fh.name
+            if compare == 'diff':
+                files_diff(local_name, temp_name, attributes=attributes)
+            elif compare == 're_match':
+                files_re_match( local_name, temp_name, attributes=attributes )
+            elif compare == 're_match_multiline':
+                files_re_match_multiline( local_name, temp_name, attributes=attributes )
+            elif compare == 'sim_size':
+                delta = attributes.get('delta', '100')
+                s1 = len(output_content)
+                s2 = os.path.getsize(local_name)
+                if abs(s1 - s2) > int(delta):
+                    raise AssertionError( 'Files %s=%db but %s=%db - compare by size (delta=%s) failed' % (temp_name, s1, local_name, s2, delta) )
+            elif compare == "contains":
+                files_contains( local_name, temp_name, attributes=attributes )
+            else:
+                raise Exception( 'Unimplemented Compare type: %s' % compare )
+            if verify_extra_files:
+                extra_files = attributes.get('extra_files', None)
+                if extra_files:
+                    verify_extra_files(extra_files)
+        except AssertionError as err:
+            errmsg = '%s different than expected, difference (using %s):\n' % ( item_label, compare )
+            errmsg += "( %s v. %s )\n" % ( local_name, temp_name )
+            errmsg += str( err )
+            raise AssertionError( errmsg )
+        finally:
+            if 'GALAXY_TEST_NO_CLEANUP' not in os.environ:
+                os.remove( temp_name )
+
+
+def make_temp_fname(fname=None):
+    """Safe temp name - preserve the file extension for tools that interpret it."""
+    suffix = os.path.split(fname)[-1]  # ignore full path
+    fd, temp_prefix = tempfile.mkstemp(prefix='tmp', suffix=suffix)
+    return temp_prefix
+
+
+def _bam_to_sam(local_name, temp_name):
+    temp_local = tempfile.NamedTemporaryFile( suffix='.sam', prefix='local_bam_converted_to_sam_' )
+    fd, temp_temp = tempfile.mkstemp( suffix='.sam', prefix='history_bam_converted_to_sam_' )
+    os.close( fd )
+    command = 'samtools view -h -o "%s" "%s"' % ( temp_local.name, local_name  )
+    check_command( command, 'Converting local (test-data) bam to sam' )
+    command = 'samtools view -h -o "%s" "%s"' % ( temp_temp, temp_name  )
+    check_command( command, 'Converting history bam to sam ' )
+    os.remove( temp_name )
+    return temp_local, temp_temp
+
+
+def _verify_checksum(data, checksum_type, expected_checksum_value):
+    if checksum_type not in ["md5", "sha1", "sha256", "sha512"]:
+        raise Exception("Unimplemented hash algorithm [%s] encountered." % checksum_type)
+
+    h = hashlib.new(checksum_type)
+    h.update( data )
+    actual_checksum_value = h.hexdigest()
+    if expected_checksum_value != actual_checksum_value:
+        template = "Output checksum [%s] does not match expected [%s] (using hash algorithm %s)."
+        message = template % (actual_checksum_value, expected_checksum_value, checksum_type)
+        raise AssertionError(message)
+
+
+def check_command(command, description):
+    """Verify a command runs with an exit code of 0."""
+    # TODO: also collect ``which samtools`` and ``samtools --version``
+    p = subprocess.Popen( args=command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True )
+    (stdout, stderr) = p.communicate()
+    if p.returncode:
+        template = description
+        template += " failed: (cmd=[%s], stdout=[%s], stderr=[%s])"
+        message = template % (command, stdout, stderr)
+        raise AssertionError(message)
+
+
+def files_diff(file1, file2, attributes=None):
+    """Check the contents of 2 files for differences."""
+    def get_lines_diff( diff ):
+        count = 0
+        for line in diff:
+            if ( line.startswith( '+' ) and not line.startswith( '+++' ) ) or ( line.startswith( '-' ) and not line.startswith( '---' ) ):
+                count += 1
+        return count
+    if not filecmp.cmp( file1, file2 ):
+        files_differ = False
+        local_file = open( file1, 'U' ).readlines()
+        history_data = open( file2, 'U' ).readlines()
+        if attributes is None:
+            attributes = {}
+        if attributes.get( 'sort', False ):
+            history_data.sort()
+        # Why even bother with the check loop below, why not just use the diff output? This seems wasteful.
+        if len( local_file ) == len( history_data ):
+            for i in range( len( history_data ) ):
+                if local_file[i].rstrip( '\r\n' ) != history_data[i].rstrip( '\r\n' ):
+                    files_differ = True
+                    break
+        else:
+            files_differ = True
+        if files_differ:
+            allowed_diff_count = int(attributes.get( 'lines_diff', 0 ))
+            diff = list( difflib.unified_diff( local_file, history_data, "local_file", "history_data" ) )
+            diff_lines = get_lines_diff( diff )
+            if diff_lines > allowed_diff_count:
+                if 'GALAXY_TEST_RAW_DIFF' in os.environ:
+                    diff_slice = diff
+                else:
+                    if len(diff) < 60:
+                        diff_slice = diff[0:40]
+                    else:
+                        diff_slice = diff[:25] + ["********\n", "*SNIP *\n", "********\n"] + diff[-25:]
+                # FIXME: This pdf stuff is rather special cased and has not been updated to consider lines_diff
+                # due to unknown desired behavior when used in conjunction with a non-zero lines_diff
+                # PDF forgiveness can probably be handled better by not special casing by __extension__ here
+                # and instead using lines_diff or a regular expression matching
+                # or by creating and using a specialized pdf comparison function
+                if file1.endswith( '.pdf' ) or file2.endswith( '.pdf' ):
+                    # PDF files contain creation dates, modification dates, ids and descriptions that change with each
+                    # new file, so we need to handle these differences.  As long as the rest of the PDF file does
+                    # not differ we're ok.
+                    valid_diff_strs = [ 'description', 'createdate', 'creationdate', 'moddate', 'id', 'producer', 'creator' ]
+                    valid_diff = False
+                    invalid_diff_lines = 0
+                    for line in diff_slice:
+                        # Make sure to lower case strings before checking.
+                        line = line.lower()
+                        # Diff lines will always start with a + or - character, but handle special cases: '--- local_file \n', '+++ history_data \n'
+                        if ( line.startswith( '+' ) or line.startswith( '-' ) ) and line.find( 'local_file' ) < 0 and line.find( 'history_data' ) < 0:
+                            for vdf in valid_diff_strs:
+                                if line.find( vdf ) < 0:
+                                    valid_diff = False
+                                else:
+                                    valid_diff = True
+                                    # Stop checking as soon as we know we have a valid difference
+                                    break
+                            if not valid_diff:
+                                invalid_diff_lines += 1
+                    log.info('## files diff on %s and %s lines_diff=%d, found diff = %d, found pdf invalid diff = %d' % (file1, file2, allowed_diff_count, diff_lines, invalid_diff_lines))
+                    if invalid_diff_lines > allowed_diff_count:
+                        # Print out diff_slice so we can see what failed
+                        log.info("###### diff_slice ######")
+                        raise AssertionError( "".join( diff_slice ) )
+                else:
+                    log.info('## files diff on %s and %s lines_diff=%d, found diff = %d' % (file1, file2, allowed_diff_count, diff_lines))
+                    for line in diff_slice:
+                        for char in line:
+                            if ord( char ) > 128:
+                                raise AssertionError( "Binary data detected, not displaying diff" )
+                    raise AssertionError( "".join( diff_slice )  )
+
+
+def files_re_match(file1, file2, attributes=None):
+    """Check the contents of 2 files for differences using re.match."""
+    local_file = open( file1, 'U' ).readlines()  # regex file
+    history_data = open( file2, 'U' ).readlines()
+    assert len( local_file ) == len( history_data ), 'Data File and Regular Expression File contain a different number of lines (%s != %s)\nHistory Data (first 40 lines):\n%s' % ( len( local_file ), len( history_data ), ''.join( history_data[:40] ) )
+    if attributes is None:
+        attributes = {}
+    if attributes.get( 'sort', False ):
+        history_data.sort()
+    lines_diff = int(attributes.get( 'lines_diff', 0 ))
+    line_diff_count = 0
+    diffs = []
+    for i in range( len( history_data ) ):
+        if not re.match( local_file[i].rstrip( '\r\n' ), history_data[i].rstrip( '\r\n' ) ):
+            line_diff_count += 1
+            diffs.append( 'Regular Expression: %s\nData file         : %s' % ( local_file[i].rstrip( '\r\n' ), history_data[i].rstrip( '\r\n' ) ) )
+        if line_diff_count > lines_diff:
+            raise AssertionError( "Regular expression did not match data file (allowed variants=%i):\n%s" % ( lines_diff, "".join( diffs ) ) )
+
+
+def files_re_match_multiline(file1, file2, attributes=None):
+    """Check the contents of 2 files for differences using re.match in multiline mode."""
+    local_file = open( file1, 'U' ).read()  # regex file
+    if attributes is None:
+        attributes = {}
+    if attributes.get( 'sort', False ):
+        history_data = open( file2, 'U' ).readlines()
+        history_data.sort()
+        history_data = ''.join( history_data )
+    else:
+        history_data = open( file2, 'U' ).read()
+    # lines_diff not applicable to multiline matching
+    assert re.match( local_file, history_data, re.MULTILINE ), "Multiline Regular expression did not match data file"
+
+
+def files_contains(file1, file2, attributes=None):
+    """Check the contents of file2 for substrings found in file1, on a per-line basis."""
+    local_file = open( file1, 'U' ).readlines()  # regex file
+    # TODO: allow forcing ordering of contains
+    history_data = open( file2, 'U' ).read()
+    lines_diff = int( attributes.get( 'lines_diff', 0 ) )
+    line_diff_count = 0
+    while local_file:
+        contains = local_file.pop( 0 ).rstrip( '\n\r' )
+        if contains not in history_data:
+            line_diff_count += 1
+        if line_diff_count > lines_diff:
+            raise AssertionError( "Failed to find '%s' in history data. (lines_diff=%i):\n" % ( contains, lines_diff ) )
diff --git a/lib/galaxy/tools/verify/asserts/__init__.py b/lib/galaxy/tools/verify/asserts/__init__.py
new file mode 100644
index 0000000..d0c8f19
--- /dev/null
+++ b/lib/galaxy/tools/verify/asserts/__init__.py
@@ -0,0 +1,78 @@
+import inspect
+import logging
+import sys
+
+log = logging.getLogger( __name__ )
+
+assertion_module_names = ['text', 'tabular', 'xml']
+
+# Code for loading modules containing assertion checking functions, to
+# create a new module of assertion functions, create the needed python
+# source file "test/base/asserts/<MODULE_NAME>.py" and add
+# <MODULE_NAME> to the list of assertion module names defined above.
+assertion_modules = []
+for assertion_module_name in assertion_module_names:
+    full_assertion_module_name = 'galaxy.tools.verify.asserts.' + assertion_module_name
+    log.debug(full_assertion_module_name)
+    try:
+        # Dynamically import module
+        __import__(full_assertion_module_name)
+        assertion_module = sys.modules[full_assertion_module_name]
+        assertion_modules.append(assertion_module)
+    except Exception as e:
+        log.exception('Failed to load assertion module: %s %s' % (assertion_module_name, str(e)))
+
+
+def verify_assertions(data, assertion_description_list):
+    """ This function takes a list of assertions and a string to check
+    these assertions against. """
+    for assertion_description in assertion_description_list:
+        verify_assertion(data, assertion_description)
+
+
+def verify_assertion(data, assertion_description):
+    tag = assertion_description["tag"]
+    assert_function_name = "assert_" + tag
+    assert_function = None
+    for assertion_module in assertion_modules:
+        if hasattr(assertion_module, assert_function_name):
+            assert_function = getattr(assertion_module, assert_function_name)
+
+    if assert_function is None:
+        errmsg = "Unable to find test function associated with XML tag '%s'. Check your tool file syntax." % tag
+        raise AssertionError(errmsg)
+
+    assert_function_args = inspect.getargspec(assert_function).args
+    args = {}
+    for attribute, value in assertion_description["attributes"].iteritems():
+        if attribute in assert_function_args:
+            args[attribute] = value
+
+    # Three special arguments automatically populated independently of
+    # tool XML attributes. output is passed in as the contents of the
+    # output file. verify_assertions_function is passed in as the
+    # verify_assertions function defined above, this allows
+    # recursively checking assertions on subsections of
+    # output. children is the parsed version of the child elements of
+    # the XML element describing this assertion. See
+    # assert_element_text in test/base/asserts/xml.py as an example of
+    # how to use verify_assertions_function and children in conjuction
+    # to apply assertion checking to a subset of the input. The parsed
+    # version of an elements child elements do not need to just define
+    # assertions, developers of assertion functions can also use the
+    # child elements in novel ways to define inputs the assertion
+    # checking function (for instance consider the following fictional
+    # assertion function for checking column titles of tabular output
+    # - <has_column_titles><with_name name="sequence"><with_name
+    # name="probability"></has_column_titles>.)
+    if "output" in assert_function_args:
+        args["output"] = data
+
+    if "verify_assertions_function" in assert_function_args:
+        args["verify_assertions_function"] = verify_assertions
+
+    if "children" in assert_function_args:
+        args["children"] = assertion_description["children"]
+
+    # TODO: Verify all needed function arguments are specified.
+    assert_function(**args)
diff --git a/lib/galaxy/tools/verify/asserts/tabular.py b/lib/galaxy/tools/verify/asserts/tabular.py
new file mode 100644
index 0000000..bac40dc
--- /dev/null
+++ b/lib/galaxy/tools/verify/asserts/tabular.py
@@ -0,0 +1,19 @@
+import re
+
+
+def get_first_line(output):
+    match = re.search("^(.*)$", output, flags=re.MULTILINE)
+    if match is None:
+        return None
+    else:
+        return match.group(1)
+
+
+def assert_has_n_columns(output, n, sep='\t'):
+    """ Asserts the tabular output contains n columns. The optional
+    sep argument specifies the column seperator used to determine the
+    number of columns."""
+    n = int(n)
+    first_line = get_first_line(output)
+    assert first_line is not None, "Was expecting output with %d columns, but output was empty." % n
+    assert len(first_line.split(sep)) == n, "Output does not have %d columns." % n
diff --git a/lib/galaxy/tools/verify/asserts/text.py b/lib/galaxy/tools/verify/asserts/text.py
new file mode 100644
index 0000000..835a6ac
--- /dev/null
+++ b/lib/galaxy/tools/verify/asserts/text.py
@@ -0,0 +1,34 @@
+import re
+
+
+def assert_has_text(output, text):
+    """ Asserts specified output contains the substring specified by
+    the argument text."""
+    assert output.find(text) >= 0, "Output file did not contain expected text '%s' (ouptut '%s')" % (text, output)
+
+
+def assert_not_has_text(output, text):
+    """ Asserts specified output does not contain the substring
+    specified the argument text."""
+    assert output.find(text) < 0, "Output file contains unexpected text '%s'" % text
+
+
+def assert_has_line(output, line):
+    """ Asserts the specified output contains the line specified the
+    argument line."""
+    match = re.search("^%s$" % re.escape(line), output, flags=re.MULTILINE)
+    assert match is not None, "No line of output file was '%s' (output was '%s') " % (line, output)
+
+
+def assert_has_text_matching(output, expression):
+    """ Asserts the specified output contains text matching the
+    regular expression specified by the argument expression."""
+    match = re.search(expression, output)
+    assert match is not None, "No text matching expression '%s' was found in output file." % expression
+
+
+def assert_has_line_matching(output, expression):
+    """ Asserts the specified output contains a line matching the
+    regular expression specified by the argument expression."""
+    match = re.search("^%s$" % expression, output, flags=re.MULTILINE)
+    assert match is not None, "No line matching expression '%s' was found in output file." % expression
diff --git a/lib/galaxy/tools/verify/asserts/xml.py b/lib/galaxy/tools/verify/asserts/xml.py
new file mode 100644
index 0000000..44c4b04
--- /dev/null
+++ b/lib/galaxy/tools/verify/asserts/xml.py
@@ -0,0 +1,89 @@
+from __future__ import absolute_import
+
+import re
+import xml.etree
+
+
+# Helper functions used to work with XML output.
+def to_xml(output):
+    return xml.etree.fromstring(output)
+
+
+def xml_find_text(output, path):
+    xml = to_xml(output)
+    text = xml.findtext(path)
+    return text
+
+
+def xml_find(output, path):
+    xml = to_xml(output)
+    return xml.find(path)
+
+
+def assert_is_valid_xml(output):
+    """ Simple assertion that just verifies the specified output
+    is valid XML."""
+    try:
+        to_xml(output)
+    except Exception as e:
+        # TODO: Narrow caught exception to just parsing failure
+        raise AssertionError("Expected valid XML, but could not parse output. %s" % str(e))
+
+
+def assert_has_element_with_path(output, path):
+    """ Asserts the specified output has at least one XML element with a
+    path matching the specified path argument. Valid paths are the
+    simplified subsets of XPath implemented by xml.etree;
+    http://effbot.org/zone/element-xpath.htm for more information."""
+    if xml_find(output, path) is None:
+        errmsg = "Expected to find XML element matching expression %s, not such match was found." % path
+        raise AssertionError(errmsg)
+
+
+def assert_has_n_elements_with_path(output, path, n):
+    """ Asserts the specified output has exactly n elements matching the
+    path specified."""
+    xml = to_xml(output)
+    n = int(n)
+    num_elements = len(xml.findall(path))
+    if num_elements != n:
+        errmsg = "Expected to find %d elements with path %s, but %d were found." % (n, path, num_elements)
+        raise AssertionError(errmsg)
+
+
+def assert_element_text_matches(output, path, expression):
+    """ Asserts the text of the first element matching the specified
+    path matches the specified regular expression."""
+    text = xml_find_text(output, path)
+    if re.match(expression, text) is None:
+        errmsg = "Expected element with path '%s' to contain text matching '%s', instead text '%s' was found." % (path, expression, text)
+        raise AssertionError(errmsg)
+
+
+def assert_element_text_is(output, path, text):
+    """ Asserts the text of the first element matching the specified
+    path matches exactly the specified text. """
+    assert_element_text_matches(output, path, re.escape(text))
+
+
+def assert_attribute_matches(output, path, attribute, expression):
+    """ Asserts the specified attribute of the first element matching
+    the specified path matches the specified regular expression."""
+    xml = xml_find(output, path)
+    attribute_value = xml.attrib[attribute]
+    if re.match(expression, attribute_value) is None:
+        errmsg = "Expected attribute '%s' on element with path '%s' to match '%s', instead attribute value was '%s'." % (attribute, path, expression, attribute_value)
+        raise AssertionError(errmsg)
+
+
+def assert_attribute_is(output, path, attribute, text):
+    """ Asserts the specified attribute of the first element matching
+    the specified path matches exactly the specified text."""
+    assert_attribute_matches(output, path, attribute, re.escape(text))
+
+
+def assert_element_text(output, path, verify_assertions_function, children):
+    """ Recursively checks the specified assertions against the text of
+    the first element matching the specified path."""
+    text = xml_find_text(output, path)
+    verify_assertions_function(text, children)
diff --git a/lib/galaxy/tools/verify/test_data.py b/lib/galaxy/tools/verify/test_data.py
new file mode 100644
index 0000000..01ef28b
--- /dev/null
+++ b/lib/galaxy/tools/verify/test_data.py
@@ -0,0 +1,121 @@
+from __future__ import print_function
+
+import hashlib
+import os
+import re
+import subprocess
+
+from string import Template
+
+from galaxy.util import asbool
+
+UPDATE_TEMPLATE = Template(
+    "git --work-tree $dir --git-dir $dir/.git fetch && "
+    "git --work-tree $dir --git-dir $dir/.git merge origin/master"
+)
+
+UPDATE_FAILED_TEMPLATE = Template(
+    "Warning failed to update test repository $dir - "
+    "update stdout was [$stdout] and stderr was [$stderr]."
+)
+
+
+LIST_SEP = re.compile("\s*,\s*")
+
+
+class TestDataResolver(object):
+
+    def __init__(self, env_var='GALAXY_TEST_FILE_DIR', environ=os.environ):
+        file_dirs = environ.get(env_var, None)
+        if file_dirs:
+            self.resolvers = map(lambda u: build_resolver(u, environ), LIST_SEP.split(file_dirs))
+        else:
+            self.resolvers = []
+
+    def get_filename(self, name):
+        if not self.resolvers:
+            filename = None
+        else:
+            resolver = self.resolvers[0]
+            filename = resolver.path(name)
+            if not resolver.exists(filename):
+                for resolver in self.resolvers[1:]:
+                    if resolver.exists(name):
+                        filename = resolver.path(name)
+            else:
+                # For backward compat. returning first path if none
+                # exist - though I don't know if this function is ever
+                # actually used in a context where one should return
+                # a file even if it doesn't exist (e.g. a prefix or
+                # or something) - I am pretty sure it is not used in
+                # such a fashion in the context of tool tests.
+                filename = resolver.path(name)
+        return os.path.abspath(filename)
+
+
+def build_resolver(uri, environ):
+    if uri.startswith("http") and uri.endswith(".git"):
+        return GitDataResolver(uri, environ)
+    else:
+        return FileDataResolver(uri)
+
+
+class FileDataResolver(object):
+
+    def __init__(self, file_dir):
+        self.file_dir = file_dir
+
+    def exists(self, filename):
+        return os.path.exists(self.path(filename))
+
+    def path(self, filename):
+        return os.path.join(self.file_dir, filename)
+
+
+class GitDataResolver(FileDataResolver):
+
+    def __init__(self, repository, environ):
+        self.repository = repository
+        self.updated = False
+        repo_cache = environ.get("GALAXY_TEST_DATA_REPO_CACHE", "test-data-cache")
+        m = hashlib.md5()
+        m.update(repository)
+        repo_path = os.path.join(repo_cache, m.hexdigest())
+        super(GitDataResolver, self).__init__(repo_path)
+        # My preference would be for this to be false, but for backward compat
+        # will leave it as true for now.
+        self.fetch_data = asbool(environ.get("GALAXY_TEST_FETCH_DATA", "true"))
+
+    def exists(self, filename):
+        exists_now = super(GitDataResolver, self).exists(filename)
+        if exists_now or not self.fetch_data or self.updated:
+            return exists_now
+        self.update_repository()
+        return super(GitDataResolver, self).exists(filename)
+
+    def update_repository(self):
+        self.updated = True
+        if not os.path.exists(self.file_dir):
+            parent_dir = os.path.dirname(self.file_dir)
+            if not os.path.exists(parent_dir):
+                os.makedirs(parent_dir)
+            self.execute("git clone '%s' '%s'" % (self.repository, self.file_dir))
+        update_command = UPDATE_TEMPLATE.safe_substitute(dir=self.file_dir)
+        self.execute(update_command)
+
+    def execute(self, cmd):
+        subprocess_kwds = dict(
+            shell=True,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+        )
+        print("Executing %s" % cmd)
+        p = subprocess.Popen(cmd, **subprocess_kwds)
+        stdout, stderr = p.communicate()
+        if p.returncode != 0:
+            kwds = {
+                'dir': self.file_dir,
+                'stdout': stdout,
+                'stderr': stderr,
+            }
+            print(UPDATE_FAILED_TEMPLATE.substitute(**kwds))
diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py
new file mode 100644
index 0000000..d9801e7
--- /dev/null
+++ b/lib/galaxy/tools/wrappers.py
@@ -0,0 +1,413 @@
+import os
+import pipes
+import tempfile
+from galaxy import exceptions
+from galaxy.util.none_like import NoneDataset
+from galaxy.util import odict
+from galaxy.util.object_wrapper import wrap_with_safe_string
+
+from logging import getLogger
+log = getLogger( __name__ )
+
+# Fields in .log files corresponding to paths, must have one of the following
+# field names and all such fields are assumed to be paths. This is to allow
+# remote ComputeEnvironments (such as one used by Pulsar) determine what values to
+# rewrite or transfer...
+PATH_ATTRIBUTES = [ "path" ]
+
+
+# ... by default though - don't rewrite anything (if no ComputeEnviornment
+# defined or ComputeEnvironment doesn't supply a rewriter).
+def DEFAULT_PATH_REWRITER(x):
+    return x
+
+
+class ToolParameterValueWrapper( object ):
+    """
+    Base class for object that Wraps a Tool Parameter and Value.
+    """
+
+    def __nonzero__( self ):
+        return bool( self.value )
+
+    def get_display_text( self, quote=True ):
+        """
+        Returns a string containing the value that would be displayed to the user in the tool interface.
+        When quote is True (default), the string is escaped for e.g. command-line usage.
+        """
+        rval = self.input.value_to_display_text( self.value, self.input.tool.app ) or ''
+        if quote:
+            return pipes.quote( rval ) or "''"  # pipes.quote in Python < 2.7 returns an empty string instead of the expected quoted empty string
+        return rval
+
+
+class RawObjectWrapper( ToolParameterValueWrapper ):
+    """
+    Wraps an object so that __str__ returns module_name:class_name.
+    """
+    def __init__( self, obj ):
+        self.obj = obj
+
+    def __nonzero__( self ):
+        return bool( self.obj )  # FIXME: would it be safe/backwards compatible to rename .obj to .value, so that we can just inherit this method?
+
+    def __str__( self ):
+        try:
+            return "%s:%s" % (self.obj.__module__, self.obj.__class__.__name__)
+        except:
+            # Most likely None, which lacks __module__.
+            return str( self.obj )
+
+    def __getattr__( self, key ):
+        return getattr( self.obj, key )
+
+
+class InputValueWrapper( ToolParameterValueWrapper ):
+    """
+    Wraps an input so that __str__ gives the "param_dict" representation.
+    """
+    def __init__( self, input, value, other_values={} ):
+        self.input = input
+        self.value = value
+        self._other_values = other_values
+
+    def __eq__( self, other ):
+        if isinstance( other, basestring ):
+            return str( self ) == other
+        elif isinstance( other, int ):
+            return int( self ) == other
+        elif isinstance( other, float ):
+            return float( self ) == other
+        else:
+            return super( InputValueWrapper, self ) == other
+
+    def __ne__( self, other ):
+        return not self == other
+
+    def __str__( self ):
+        to_param_dict_string = self.input.to_param_dict_string( self.value, self._other_values )
+        if isinstance( to_param_dict_string, list ):
+            return ','.join( to_param_dict_string )
+        else:
+            return to_param_dict_string
+
+    def __iter__( self ):
+        to_param_dict_string = self.input.to_param_dict_string( self.value, self._other_values )
+        if not isinstance( to_param_dict_string, list ):
+            return iter( [ to_param_dict_string ] )
+        else:
+            return iter( to_param_dict_string )
+
+    def __getattr__( self, key ):
+        return getattr( self.value, key )
+
+    def __int__(self):
+        return int(str(self))
+
+    def __float__(self):
+        return float(str(self))
+
+
+class SelectToolParameterWrapper( ToolParameterValueWrapper ):
+    """
+    Wraps a SelectTooParameter so that __str__ returns the selected value, but all other
+    attributes are accessible.
+    """
+
+    class SelectToolParameterFieldWrapper:
+        """
+        Provide access to any field by name or index for this particular value.
+        Only applicable for dynamic_options selects, which have more than simple 'options' defined (name, value, selected).
+        """
+        def __init__( self, input, value, other_values, path_rewriter ):
+            self._input = input
+            self._value = value
+            self._other_values = other_values
+            self._fields = {}
+            self._path_rewriter = path_rewriter
+
+        def __getattr__( self, name ):
+            if name not in self._fields:
+                self._fields[ name ] = self._input.options.get_field_by_name_for_value( name, self._value, None, self._other_values )
+            values = map( str, self._fields[ name ] )
+            if name in PATH_ATTRIBUTES:
+                # If we infer this is a path, rewrite it if needed.
+                values = map( self._path_rewriter, values )
+            return self._input.separator.join( values )
+
+    def __init__( self, input, value, app, other_values={}, path_rewriter=None ):
+        self.input = input
+        self.value = value
+        self.input.value_label = input.value_to_display_text( value, app )
+        self._other_values = other_values
+        self._path_rewriter = path_rewriter or DEFAULT_PATH_REWRITER
+        self.fields = self.SelectToolParameterFieldWrapper( input, value, other_values, self._path_rewriter )
+
+    def __eq__( self, other ):
+        if isinstance( other, basestring ):
+            return str( self ) == other
+        else:
+            return super( SelectToolParameterWrapper, self ) == other
+
+    def __ne__( self, other ):
+        return not self == other
+
+    def __str__( self ):
+        # Assuming value is never a path - otherwise would need to pass
+        # along following argument value_map=self._path_rewriter.
+        return self.input.to_param_dict_string( self.value, other_values=self._other_values )
+
+    def __add__( self, x ):
+        return '%s%s' % ( self, x )
+
+    def __getattr__( self, key ):
+        return getattr( self.input, key )
+
+
+class DatasetFilenameWrapper( ToolParameterValueWrapper ):
+    """
+    Wraps a dataset so that __str__ returns the filename, but all other
+    attributes are accessible.
+    """
+
+    class MetadataWrapper:
+        """
+        Wraps a Metadata Collection to return MetadataParameters wrapped
+        according to the metadata spec. Methods implemented to match behavior
+        of a Metadata Collection.
+        """
+        def __init__( self, metadata ):
+            self.metadata = metadata
+
+        def __getattr__( self, name ):
+            rval = self.metadata.get( name, None )
+            if name in self.metadata.spec:
+                if rval is None:
+                    rval = self.metadata.spec[name].no_value
+                rval = self.metadata.spec[ name ].param.to_safe_string( rval )
+                # Store this value, so we don't need to recalculate if needed
+                # again
+                setattr( self, name, rval )
+            else:
+                # escape string value of non-defined metadata value
+                rval = wrap_with_safe_string( rval )
+            return rval
+
+        def __nonzero__( self ):
+            return self.metadata.__nonzero__()
+
+        def __iter__( self ):
+            return self.metadata.__iter__()
+
+        def get( self, key, default=None ):
+            try:
+                return getattr( self, key )
+            except:
+                return default
+
+        def items( self ):
+            return iter( [ ( k, self.get( k ) ) for k, v in self.metadata.items() ] )
+
+    def __init__( self, dataset, datatypes_registry=None, tool=None, name=None, dataset_path=None, identifier=None ):
+        if not dataset:
+            try:
+                # TODO: allow this to work when working with grouping
+                ext = tool.inputs[name].extensions[0]
+            except:
+                ext = 'data'
+            self.dataset = wrap_with_safe_string( NoneDataset( datatypes_registry=datatypes_registry, ext=ext ), no_wrap_classes=ToolParameterValueWrapper )
+        else:
+            # Tool wrappers should not normally be accessing .dataset directly,
+            # so we will wrap it and keep the original around for file paths
+            # Should we name this .value to maintain consistency with most other ToolParameterValueWrapper?
+            self.unsanitized = dataset
+            self.dataset = wrap_with_safe_string( dataset, no_wrap_classes=ToolParameterValueWrapper )
+            self.metadata = self.MetadataWrapper( dataset.metadata )
+        self.datatypes_registry = datatypes_registry
+        self.false_path = getattr( dataset_path, "false_path", None )
+        self.false_extra_files_path = getattr( dataset_path, "false_extra_files_path", None )
+        self._element_identifier = identifier
+
+    @property
+    def element_identifier( self ):
+        identifier = self._element_identifier
+        if identifier is None:
+            identifier = self.name
+        return identifier
+
+    @property
+    def is_collection( self ):
+        return False
+
+    def is_of_type( self, *exts ):
+        datatypes = [ self.datatypes_registry.get_datatype_by_extension( e ) for e in exts ]
+        return self.dataset.datatype.matches_any( datatypes )
+
+    def __str__( self ):
+        if self.false_path is not None:
+            return self.false_path
+        else:
+            return self.unsanitized.file_name
+
+    def __getattr__( self, key ):
+        if self.false_path is not None and key == 'file_name':
+            # Path to dataset was rewritten for this job.
+            return self.false_path
+        elif self.false_extra_files_path is not None and key == 'extra_files_path':
+            # Path to extra files was rewritten for this job.
+            return self.false_extra_files_path
+        elif key == 'extra_files_path':
+            try:
+                # Assume it is an output and that this wrapper
+                # will be set with correct "files_path" for this
+                # job.
+                return self.files_path
+            except AttributeError:
+                # Otherwise, we have an input - delegate to model and
+                # object store to find the static location of this
+                # directory.
+                try:
+                    return self.unsanitized.extra_files_path
+                except exceptions.ObjectNotFound:
+                    # NestedObjectstore raises an error here
+                    # instead of just returning a non-existent
+                    # path like DiskObjectStore.
+                    raise
+        else:
+            return getattr( self.dataset, key )
+
+    def __nonzero__( self ):
+        return bool( self.dataset )
+
+
+class HasDatasets:
+
+    def _dataset_wrapper( self, dataset, dataset_paths, **kwargs ):
+        wrapper_kwds = kwargs.copy()
+        if dataset:
+            real_path = dataset.file_name
+            if real_path in dataset_paths:
+                wrapper_kwds[ "dataset_path" ] = dataset_paths[ real_path ]
+        return DatasetFilenameWrapper( dataset, **wrapper_kwds )
+
+    def paths_as_file(self, sep="\n"):
+        handle, filepath = tempfile.mkstemp(prefix="gx_file_list", dir=self.job_working_directory)
+        contents = sep.join(map(str, self))
+        os.write(handle, contents)
+        os.close(handle)
+        return filepath
+
+
+class DatasetListWrapper( list, ToolParameterValueWrapper, HasDatasets ):
+    """
+    """
+    def __init__( self, job_working_directory, datasets, dataset_paths=[], **kwargs ):
+        if not isinstance(datasets, list):
+            datasets = [datasets]
+
+        def to_wrapper( dataset ):
+            if hasattr(dataset, "dataset_instance"):
+                element = dataset
+                dataset = element.dataset_instance
+                kwargs["identifier"] = element.element_identifier
+            return self._dataset_wrapper( dataset, dataset_paths, **kwargs )
+
+        list.__init__( self, map( to_wrapper, datasets ) )
+        self.job_working_directory = job_working_directory
+
+    @staticmethod
+    def to_dataset_instances( dataset_instance_sources ):
+        dataset_instances = []
+        if not isinstance( dataset_instance_sources, list ):
+            dataset_instance_sources = [ dataset_instance_sources ]
+        for dataset_instance_source in dataset_instance_sources:
+            if dataset_instance_source is None:
+                dataset_instances.append( dataset_instance_source )
+            elif dataset_instance_source.history_content_type == "dataset":
+                dataset_instances.append( dataset_instance_source )
+            else:
+                dataset_instances.extend( dataset_instance_source.collection.dataset_elements )
+        return dataset_instances
+
+    def __str__( self ):
+        return ','.join( map( str, self ) )
+
+
+class DatasetCollectionWrapper( ToolParameterValueWrapper, HasDatasets ):
+
+    def __init__( self, job_working_directory, has_collection, dataset_paths=[], **kwargs ):
+        super(DatasetCollectionWrapper, self).__init__()
+        self.job_working_directory = job_working_directory
+
+        if has_collection is None:
+            self.__input_supplied = False
+            return
+        else:
+            self.__input_supplied = True
+
+        if hasattr( has_collection, "name" ):
+            # It is a HistoryDatasetCollectionAssociation
+            collection = has_collection.collection
+            self.name = has_collection.name
+        elif hasattr( has_collection, "child_collection" ):
+            # It is a DatasetCollectionElement instance referencing another collection
+            collection = has_collection.child_collection
+            self.name = has_collection.element_identifier
+        else:
+            collection = has_collection
+            self.name = None
+
+        elements = collection.elements
+        element_instances = odict.odict()
+
+        element_instance_list = []
+        for dataset_collection_element in elements:
+            element_object = dataset_collection_element.element_object
+            element_identifier = dataset_collection_element.element_identifier
+
+            if dataset_collection_element.is_collection:
+                element_wrapper = DatasetCollectionWrapper(job_working_directory, dataset_collection_element, dataset_paths, **kwargs )
+            else:
+                element_wrapper = self._dataset_wrapper( element_object, dataset_paths, identifier=element_identifier, **kwargs)
+
+            element_instances[element_identifier] = element_wrapper
+            element_instance_list.append( element_wrapper )
+
+        self.__element_instances = element_instances
+        self.__element_instance_list = element_instance_list
+
+    def keys( self ):
+        if not self.__input_supplied:
+            return []
+        return self.__element_instances.keys()
+
+    @property
+    def is_collection( self ):
+        return True
+
+    @property
+    def is_input_supplied( self ):
+        return self.__input_supplied
+
+    def __getitem__( self, key ):
+        if not self.__input_supplied:
+            return None
+        if isinstance( key, int ):
+            return self.__element_instance_list[ key ]
+        else:
+            return self.__element_instances[ key ]
+
+    def __getattr__( self, key ):
+        if not self.__input_supplied:
+            return None
+        return self.__element_instances[ key ]
+
+    def __iter__( self ):
+        if not self.__input_supplied:
+            return [].__iter__()
+        return self.__element_instance_list.__iter__()
+
+    def __nonzero__( self ):
+        # Fail `#if $param` checks in cheetah is optional input
+        # not specified or if resulting collection is empty.
+        return self.__input_supplied and bool( self.__element_instance_list )
diff --git a/lib/galaxy/tools/xsd/LICENSE b/lib/galaxy/tools/xsd/LICENSE
new file mode 100644
index 0000000..09eb8b9
--- /dev/null
+++ b/lib/galaxy/tools/xsd/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2011-2016 John Chilton, Jean-Frédéric Berthelot, Pierre Lindenbaum
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/lib/galaxy/tools/xsd/README.md b/lib/galaxy/tools/xsd/README.md
new file mode 100644
index 0000000..60274d3
--- /dev/null
+++ b/lib/galaxy/tools/xsd/README.md
@@ -0,0 +1,45 @@
+Galaxy-XSD
+==========
+[![Build Status](https://travis-ci.org/JeanFred/Galaxy-XSD.svg)](http://travis-ci.org/JeanFred/Galaxy-XSD)
+[![License](http://img.shields.io/badge/license-MIT-orange.svg?style=flat)](http://opensource.org/licenses/MIT)
+
+A Galaxy XML tool wrapper __XML schema definition__ (__XSD__) 
+
+
+
+# History
+
+* Feb-2015 : Pierre Lindenbaum added doc, tests, Java-XML binding file (jxb) for java xml compiler (xjc)  ( https://docs.oracle.com/cd/E19575-01/819-3669/bnbal/index.html )
+* 2013 : extraction to standalone and improvements by Jean-Fred
+* 2011 : Initial work by John Chilton
+
+# Validating a `tool.xml`
+
+```bash
+$ xmllint --noout --schema galaxy.xsd tool.xml 
+```
+
+# Creating java code
+
+```bash
+$  ${JAVA_HOME}/bin/xjc -b galaxy.jxb galaxy.xsd 
+```
+
+
+# Authors
+
+* Jean-Frédéric @JeanFred
+* Pierre Lindenbaum @yokofakun
+* John Chilton @jmchilton
+
+
+# Licence
+
+This code is free software released under the terms of the MIT license.
+
+
+# See also:
+
+* Galaxy https://usegalaxy.org/
+* Galaxy Tool XML File https://wiki.galaxyproject.org/Admin/Tools/ToolConfigSyntax
+
diff --git a/lib/galaxy/tools/xsd/galaxy.jxb b/lib/galaxy/tools/xsd/galaxy.jxb
new file mode 100644
index 0000000..29eac0b
--- /dev/null
+++ b/lib/galaxy/tools/xsd/galaxy.jxb
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<jxb:bindings 
+	xmlns:xs="http://www.w3.org/2001/XMLSchema"
+	xmlns:xjc="http://java.sun.com/xml/ns/jaxb/xjc"
+	xmlns:jxb="http://java.sun.com/xml/ns/jaxb"
+	jxb:version="2.1"
+	>
+
+<jxb:bindings schemaLocation="galaxy.xsd">
+ 
+
+    <jxb:bindings node="/xs:schema/xs:complexType[@name='Param']/xs:complexContent/xs:extension/xs:attribute[@name='label']">
+            <jxb:property name="LabelAttribute"/>
+    </jxb:bindings>
+
+    <jxb:bindings node="/xs:schema/xs:complexType[@name='ParamOption']/xs:simpleContent/xs:extension/xs:attribute[@name='value']">
+            <jxb:property name="ValueAttribute"/>
+    </jxb:bindings>
+
+
+</jxb:bindings>
+</jxb:bindings>
diff --git a/lib/galaxy/tools/xsd/galaxy.xsd b/lib/galaxy/tools/xsd/galaxy.xsd
new file mode 100644
index 0000000..d2d432d
--- /dev/null
+++ b/lib/galaxy/tools/xsd/galaxy.xsd
@@ -0,0 +1,5125 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema
+        xmlns:xs="http://www.w3.org/2001/XMLSchema"
+        xmlns:gxdocs="http://galaxyproject.org/xml/1.0"
+    elementFormDefault="qualified"
+    attributeFormDefault="unqualified"
+    >
+  <xs:annotation>
+    <xs:appinfo>Galaxy Schema</xs:appinfo>
+    <xs:documentation xml:lang="en">A Galaxy XML tool wrapper</xs:documentation>
+  </xs:annotation>
+
+  <xs:element name="tool">
+    <xs:annotation gxdocs:best_practices="tools">
+      <xs:documentation xml:lang="en"><![CDATA[
+The outer-most tag set of tool XML files. Attributes on this tag apply to the
+tool as a whole.
+
+### Examples
+
+A normal tool:
+
+```xml
+<tool id="seqtk_seq"
+      name="Convert FASTQ to FASTA"
+      version="1.0.0"
+      profile="16.04"
+>
+```
+
+A ``data_source`` tool contains a few more relevant attributes.
+
+```xml
+<tool id="ucsc_table_direct1"
+      name="UCSC Main"
+      version="1.0.0"
+      hidden="false"
+      profile="16.01"
+      tool_type="data_source"
+      URL_method="post">
+```
+      ]]></xs:documentation>
+    </xs:annotation>
+    <xs:complexType>
+      <xs:all>
+        <!-- TODO: Move the anyType further into macros def... -->
+        <xs:element name="macros" type="xs:anyType" minOccurs="0"/>
+        <xs:element name="requirements" type="Requirements" minOccurs="0"/>
+        <xs:element name="description" type="xs:string" minOccurs="0">
+          <xs:annotation gxdocs:best_practices="tool-descriptions">
+            <xs:documentation xml:lang="en"><![CDATA[The value is displayed in
+the tool menu immediately following the hyperlink for the tool (based on the
+``name`` attribute of the ``<tool>`` tag set described above).
+
+### Example
+
+```xml
+<description>table browser</description>
+```
+]]></xs:documentation>
+          </xs:annotation>
+        </xs:element>
+        <xs:element name="parallelism" type="Parallelism" minOccurs="0"/>
+        <xs:element name="version_command" type="VersionCommand" minOccurs="0">
+        </xs:element>
+        <xs:element name="action" type="ToolAction" minOccurs="0" maxOccurs="1" />
+        <xs:element name="environment_variables" type="EnvironmentVariables" minOccurs="0" maxOccurs="1"/>
+        <xs:element name="command" type="Command"/>
+        <xs:element name="request_parameter_translation" type="RequestParameterTranslation" minOccurs="0"/>
+        <xs:element name="configfiles" type="ConfigFiles" minOccurs="0"/>
+        <xs:element name="outputs" type="Outputs" minOccurs="0"/>
+        <xs:element name="inputs" type="Inputs" minOccurs="0"/>
+        <xs:element name="tests" type="Tests" minOccurs="0"/>
+        <xs:element name="stdio" type="Stdio" minOccurs="0"/>
+        <xs:element name="help" type="xs:string" minOccurs="0">
+          <xs:annotation gxdocs:best_practices="help-tag">
+            <xs:documentation xml:lang="en"><![CDATA[This tag set includes all of the necessary details of how to use the tool. This tag set should be included as the next to the last tag set, before citations, in the tool config. Tool help is written in reStructuredText. Included here is only an overview of a subset of features. For more information see http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html .
+
+tag | details
+--- | -------
+``.. class:: warningmark`` | a yellow warning symbol
+``.. class:: infomark`` | a blue information symbol
+``.. image:: path-of-the-file.png :height: 500 :width: 600`` | insert a png file of height 500 and width 600 at this position |
+``**bold**`` | bold
+``*italic*`` | italic
+``*`` | list
+``-`` | list
+``::`` | paragraph
+``-----`` | a horizontal line
+
+### Examples
+
+Show a warning sign to remind users that this tool accept fasta format files only, followed by an example of the query sequence and a figure.
+
+```xml
+<help>
+
+.. class:: warningmark
+
+'''TIP''' This tool requires *fasta* format.
+
+----
+
+'''Example'''
+
+Query sequence::
+    >seq1
+    ATCG...
+
+.. image:: my_figure.png
+    :height: 500
+    :width: 600
+
+</help>
+```
+
+]]></xs:documentation>
+          </xs:annotation>
+        </xs:element>
+        <xs:element name="code" type="Code" minOccurs="0"/>
+        <xs:element name="uihints" type="UIhints" minOccurs="0"/>
+        <xs:element name="options" type="Options" minOccurs="0"/>
+        <xs:element name="trackster_conf" type="TracksterConf" minOccurs="0"/>
+        <xs:element name="citations" type="Citations" minOccurs="0"/>
+      </xs:all>
+      <xs:attribute name="id" type="xs:string" use="required">
+        <xs:annotation gxdocs:best_practices="tool-ids">
+          <xs:documentation xml:lang="en">Must be unique across all tools;
+should be lowercase and contain only letters, numbers, and underscores.
+It allows for tool versioning and metrics of the number of times a tool is used,
+among other things.</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="name" type="xs:string" use="required">
+        <xs:annotation gxdocs:best_practices="tool-names">
+          <xs:documentation xml:lang="en">This string is what is displayed as a
+hyperlink in the tool menu.</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="version" type="xs:string" default="1.0.0">
+        <xs:annotation gxdocs:best_practices="tool-versions">
+          <xs:documentation xml:lang="en">This string defaults to ``1.0.0`` if it is not
+included in the tag. It allows for tool versioning and should be increased with each new version
+of the tool.</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="hidden" type="PermissiveBoolean" default="false">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Allows for tools to be loaded upon
+server startup, but not displayed in the tool menu. This attribute should be
+applied in the toolbox configuration instead and so should be considered
+deprecated.
+</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="display_interface" type="PermissiveBoolean">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Disable the display the tool's
+graphical tool form by setting this to ``false``.</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="tool_type" type="ToolTypeType">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Allows for certain framework
+functionality to be performed on certain types of tools. Normal tools that execute
+typical command-line jobs do not need to specify this, special kinds of tools such
+as [Data Source](https://wiki.galaxyproject.org/Admin/Internals/DataSources) and
+[Data Manager](https://wiki.galaxyproject.org/Admin/Tools/DataManagers) tools should
+set this to have values such as ``data_source`` or ``manage_data``.</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="profile" type="xs:string">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">This string specified the minimum Galaxy
+version that should be required to run this tool. Certain legacy behaviors such
+as using standard error content to detect errors instead of exit code are disabled
+automatically if profile is set to any version newer than ``16.01``, such as
+``16.04``.</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="workflow_compatible" type="xs:boolean" default="true">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">This attribute indicates if
+this tool is usable within a workflow (defaults to ``true`` for normal tools and
+``false`` for data sources).</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="URL_method" type="URLmethodType">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Only used if ``tool_type`` attribute value
+is ``data_source`` - this attribute defines the HTTP request method to use when
+communicating with an external data source application (the default is ``get``).</xs:documentation>
+        </xs:annotation>
+      </xs:attribute>
+    </xs:complexType>
+  </xs:element>
+
+  <xs:complexType name="ToolAction">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Describe the backend Python action to execute for this Galaxy tool.</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+    </xs:sequence>
+    <xs:attribute name="module" type="xs:string" use="required">
+    </xs:attribute>
+    <xs:attribute name="class" type="xs:string" use="required">
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="Requirements">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This is a container tag set for the ``requirement`` and ``container`` tags
+described in greater detail below. ``requirement``s describe software packages
+and other individual computing requirements required to execute a tool, while
+``container``s describe Docker containers that should be able to serve as
+complete descriptions of the runtime of a tool.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="requirement" type="Requirement" minOccurs="0" maxOccurs="unbounded"/>
+      <xs:element name="container" type="Container" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="Requirement">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set is contained within the ``requirements`` tag set. Third party
+programs or modules that the tool depends upon are included in this tag set.
+
+When a tool runs, Galaxy attempts to *resolve* these requirements (also called
+dependencies). ``requirement``s are meant to be abstract and resolvable by
+multiple different systems (e.g. [conda](http://conda.pydata.org/docs/), the
+[Galaxy Tool Shed dependency management system](https://wiki.galaxyproject.org/ToolShedToolFeatures#Automatic_third-party_tool_dependency_installation_and_compilation_with_installed_repositories),
+or [environment modules](http://modules.sourceforge.net/)).
+
+Read more about dependency resolvers in Galaxy on
+[docs.galaxyproject.org](https://docs.galaxyproject.org/en/master/admin/dependency_resolvers.html).
+The current best practice for tool dependencies is to target Conda, this is
+discussed in greater detail
+[here](https://docs.galaxyproject.org/en/master/admin/conda_faq.html).
+
+### Examples
+
+This example shows a tool that requires the samtools 0.0.18 package.
+
+This package is available via the Tool Shed (see
+[Tool Shed dependency management](https://wiki.galaxyproject.org/ToolShedToolFeatures#Automatic_third-party_tool_dependency_installation_and_compilation_with_installed_repositories)
+) as well as [Conda](https://docs.galaxyproject.org/en/master/admin/conda_faq.html)
+and can be configured locally to adapt to any other package management system.
+
+```xml
+<requirements>
+    <requirement type="package" version="0.1.18">samtools</requirement>
+</requirements>
+```
+
+This older example shows a tool that requires R version 2.15.1. The
+``tool_depensencies.xml`` should contain matching declarations for Galaxy to
+actually install the R runtime. The ``set_envirornment`` type is only respected
+by the tool shed and is ignored by the newer and preferred conda dependency
+resolver.
+
+```xml
+<requirements>
+    <requirement type="set_environment">R_SCRIPT_PATH</requirement>
+    <requirement type="package" version="2.15.1">R</requirement>
+</requirements>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="type" type="RequirementType" use="required">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> This value defines the which type of the 3rd party module required by this tool. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="version" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> For package type requirements this value defines a specific version of the tool dependency. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+  <xs:complexType name="Container">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set is contained within the 'requirements' tag set. Galaxy can be
+configured to run tools within Docker (https://www.docker.com/) containers -
+this tag allows the tool to suggest possible valid Docker containers for this
+tool.
+
+Read more about configuring Galaxy to run Docker jobs
+[here](https://wiki.galaxyproject.org/Admin/Tools/Docker).
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="type" type="ContainerType" use="required">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> This value describes the type of container that the tool may be executed in and currently must be 'docker'. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="Parallelism">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for Parallelism</xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="method" type="MethodType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for method</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="merge_outputs" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for merge_outputs</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="split_inputs" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for split_inputs</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="split_size" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for split_size</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="split_mode" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for split_mode</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="Code">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+**Deprecated** do not use this unless absolutely necessary.
+
+This tag set provides detailed control of the way the tool is executed. This
+(optional) code can be deployed in a separate file in the same directory as the
+tool's config file. These hooks are being replaced by new tool config features
+and methods in the ~/lib/galaxy/tools/__init__.py code file.
+
+### Examples
+
+#### Dynamic Options
+
+Use associated dynamic select lists where selecting an option in the first
+select list dynamically re-renders the options in the second select list. In
+this example, we are populating both dynamic select lists from metadata elements
+associated with a tool's single input dataset. The 2 metadata elements we're
+using look like this.
+
+```python
+MetadataElement( name="field_names", default=[], desc="Field names", readonly=True, optional=True, visible=True, no_value=[] )
+# The keys in the field_components map to the list of field_names in the above element
+# which ensures order for select list options that are built from it.
+MetadataElement( name="field_components", default={}, desc="Field names and components", readonly=True, optional=True, visible=True, no_value={} )
+```
+
+Our tool config includes a code file tag like this.
+
+```xml
+<code file="tool_form_utils.py" />
+```
+
+Here are the relevant input parameters in our tool config. The first parameter
+is the input dataset that includes the above metadata elements.
+
+```xml
+<param name="input" type="data" format="vtkascii,vtkbinary" label="Shape with uncolored surface field">
+    <validator type="expression" message="Shape must have an uncolored surface field.">value is not None and len(value.metadata.field_names) > 0</validator>
+</param>
+```
+
+The following parameter dynamically renders a select list consisting of the
+elements in the ``field_names`` metadata element associated with the selected
+input dataset.
+
+```xml
+<param name="field_name" type="select" label="Field name" refresh_on_change="True">
+    <options>
+        <filter type="data_meta" ref="input" key="field_names"/>
+        <validator type="no_options" message="The selected shape has no uncolored surface fields." />
+    </options>
+</param>
+```
+
+The following parameter calls the ``get_field_components_options()`` function in
+the ``tool_form_utils.py`` code file discussed above. This function returns the
+value of the input dataset's ``field_components`` metadata element dictionary
+whose key is the currently selected ``field_name`` from the select list parameter
+above.
+
+```xml
+<param name="field_component_index" type="select" label="Field component index" dynamic_options="get_field_components_options(input, field_name=field_name)" help="Color will be applied to the selected field's component associated with this index." />
+```
+
+Changing the selected option in the ``field_name`` select list will dynamically
+re-render the options available in the associated ``field_component_index`` select
+list, which is the behavior we want.
+
+The ``get_field_components_options()`` method looks like this.
+
+```python
+def get_field_components_options( dataset, field_name ):
+    options = []
+    if dataset.metadata is None:
+        return options
+    if not hasattr( dataset.metadata, 'field_names' ):
+        return options
+    if dataset.metadata.field_names is None:
+        return options
+    if field_name is None:
+        # The expression validator that helps populate the select list of input
+        # datsets in the icqsol_color_surface_field tool does not filter out
+        # datasets with no field field_names, so we need this check.
+        if len( dataset.metadata.field_names ) == 0:
+            return options
+        field_name = dataset.metadata.field_names[0]
+    field_components = dataset.metadata.field_components.get( field_name, [] )
+    for i, field_component in enumerate( field_components ):
+        options.append( ( field_component, field_component, i == 0 ) )
+    return options
+```
+
+]]></xs:documentation>
+
+    </xs:annotation>
+    <xs:attribute name="file" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value is the name of the executable code file, and is called in the exec_before_process(), exec_before_job(), exec_after_process() and exec_after_job()( methods.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="UIhints">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Used only for data source tools, this directive contains UI options (currently only ``minwidth`` is valid).</xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="minwidth" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for minwidth</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="Options">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">This directive is used to specify some rarely modified options.</xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="refresh" type="PermissiveBoolean">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Deprecated, likely unused attribute.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="sanitize" type="PermissiveBoolean" default="true">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This attribute can be used to turn off all input sanitization for a tool.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="TracksterConf">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">This directive is used to specify some rarely modified trackster options.</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="action" type="TracksterAction" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="TracksterAction">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="output_name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="Tests">
+    <xs:annotation gxdocs:best_practices="tests">
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Container tag set to specify tests via the <test> tag sets. Any number of tests can be included,
+and each test is wrapped within separate <test> tag sets. Functional tests are
+executed via [Planemo](https://planemo.readthedocs.io/) or the
+[run_tests.sh](https://github.com/galaxyproject/galaxy/blob/dev/run_tests.sh)
+shell script distributed with Galaxy.
+
+The documentation contained here is mostly reference documentation, for
+tutorials on writing tool tests please check out Planemo's
+[Test-Driven Development](https://planemo.readthedocs.io/en/latest/writing_advanced.html#test-driven-development)
+documentation or the much older wiki content for
+[WritingTests](https://wiki.galaxyproject.org/Admin/Tools/WritingTests).
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="test" type="Test" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="Test">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set contains the necessary parameter values for executing the tool via
+the functional test framework.
+
+### Example
+
+The following two tests will execute the
+[/tools/filters/sorter.xml](https://github.com/galaxyproject/galaxy/blob/dev/tools/filters/sorter.xml)
+tool. Notice the way that the tool's inputs and outputs are defined.
+
+```xml
+  <tests>
+    <test>
+      <param name="input" value="1.bed" ftype="bed" />
+      <param name="column" value="1"/>
+      <param name="order" value="ASC"/>
+      <param name="style" value="num"/>
+      <output name="out_file1" file="sort1_num.bed" ftype="bed" />
+    </test>
+    <test>
+      <param name="input" value="7.bed" ftype="bed" />
+      <param name="column" value="1"/>
+      <param name="order" value="ASC"/>
+      <param name="style" value="alpha"/>
+      <output name="out_file1" file="sort1_alpha.bed" ftype="bed" />
+    </test>
+  </tests>
+```
+
+The following example, tests the execution of the MAF-to-FASTA converter
+([/tools/maf/maf_to_fasta.xml](https://github.com/galaxyproject/galaxy/blob/master/tools/maf/maf_to_fasta.xml)).
+
+```xml
+<tests>
+    <test>
+        <param name="input1" value="3.maf" ftype="maf"/>
+        <param name="species" value="canFam1"/>
+        <param name="fasta_type" value="concatenated"/>
+        <output name="out_file1" file="cf_maf2fasta_concat.dat" ftype="fasta"/>
+    </test>
+</tests>
+```
+
+This test demonstrates verifying specific properties about a test output instead
+of directly comparing it to another file. Here the file attribute is not
+specified and instead a series of assertions is made about the output.
+
+```xml
+<test>
+    <param name="input" value="maf_stats_interval_in.dat" />
+    <param name="lineNum" value="99999"/>
+    <output name="out_file1">
+        <assert_contents>
+            <has_text text="chr7" />
+            <not_has_text text="chr8" />
+            <has_text_matching expression="1274\d+53" />
+            <has_line_matching expression=".*\s+127489808\s+127494553" />
+            <!-- 	 is XML escape code for tab -->
+            <has_line line="chr7	127471195	127489808" />
+            <has_n_columns n="3" />
+        </assert_contents>
+    </output>
+</test>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="TestParamElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="expect_exit_code" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Describe the job's expected exit code.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="expect_num_outputs" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Assert the number of outputs this test
+should produce, this is useful to ensure ``filter`` directives are implemented correctly.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="expect_failure" type="PermissiveBoolean" default="false">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Setting this to ``true`` indicates
+the expectation is for the job fail. If set to ``true`` no job output checks may
+be present in ``test`` definition.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="maxseconds" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Maximum amount of time to let test run.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:group name="TestParamElement">
+    <xs:choice>
+      <xs:element name="param" type="TestParam" />
+      <xs:element name="repeat" type="TestRepeat" />
+      <xs:element name="conditional" type="TestConditional" />
+      <xs:element name="section" type="TestSection" />
+      <xs:element name="output" type="TestOutput" minOccurs="0" maxOccurs="unbounded"/>
+      <xs:element name="output_collection" type="TestOutputCollection"/>
+      <xs:element name="assert_command" type="TestAssertions">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Describe assertions about the job's
+generated command-line.
+
+$assertions
+</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="assert_stdout" type="TestAssertions">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Describe assertions about the job's
+standard output.
+
+$assertions
+</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="assert_stderr" type="TestAssertions">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Describe assertions about the job's
+standard error.
+
+$assertions
+</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="TestSection">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Specify test parameters below a named of a ``section`` block matching
+one in ``inputs`` with this element.
+
+``param`` elements in a ``test`` block can be arranged into nested ``repeat``,
+``conditional``, and ``select`` structures to match the inputs. While this might
+be overkill for simple tests, it helps prevent ambiguous definitions and keeps
+things organized in large test cases. A future ``profile`` version of Galaxy
+tools may require ``section`` blocks be explicitly defined with this
+directive.
+
+### Examples
+
+The test tool demonstrating sections
+([section.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/section.xml))
+contains a test case demonstrating this block. This test case appears below:
+
+```xml
+<test>
+    <section name="int">
+        <param name="inttest" value="12456" />
+    </section>
+    <section name="float">
+        <param name="floattest" value="6.789" />
+    </section>
+    <output name="out_file1">
+        <assert_contents>
+            <has_line line="12456" />
+            <has_line line="6.789" />
+        </assert_contents>
+    </output>
+</test>
+```
+
+]]>
+      </xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="TestParamElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must match the name of the
+associated input ``section``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestConditional">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Specify test parameters below a named of a ``conditional`` block matching
+one in ``inputs`` with this element.
+
+``param`` elements in a ``test`` block can be arranged into nested ``repeat``,
+``conditional``, and ``select`` structures to match the inputs. While this might
+be overkill for simple tests, it helps prevent ambiguous definitions and keeps
+things organized in large test cases. A future ``profile`` version of Galaxy
+tools may require ``conditional`` blocks be explicitly defined with this
+directive.
+
+### Examples
+
+The following example demonstrates disambiguation of a parameter (named ``use``)
+which appears in multiple ``param`` names in ``conditional``s in the ``inputs``
+definition of the [disambiguate_cond.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/disambiguate_cond.xml)
+tool.
+
+```xml
+<!-- Can use nested conditional blocks as shown below to disambiguate
+     various nested parameters. -->
+<test>
+    <conditional name="p1">
+        <param name="use" value="False"/>
+    </conditional>
+    <conditional name="p2">
+        <param name="use" value="True"/>
+    </conditional>
+    <conditional name="p3">
+        <param name="use" value="False"/>
+    </conditional>
+    <conditional name="files">
+        <param name="attach_files" value="True" />
+        <conditional name="p4">
+            <param name="use" value="True"/>
+            <param name="file" value="simple_line_alternative.txt" />
+        </conditional>
+    </conditional>
+    <output name="out_file1">
+        <assert_contents>
+            <has_line line="7 4 7" />
+            <has_line line="This is a different line of text." />
+        </assert_contents>
+    </output>
+</test>
+```
+
+The [tophat2](https://github.com/galaxyproject/tools-devteam/blob/master/tools/tophat2/tophat2_wrapper.xml)
+tool demonstrates a real tool that benefits from more structured test cases
+using the ``conditional`` test directive. One such test case from that tool is
+shown below.
+
+```xml
+<!-- Test base-space paired-end reads with user-supplied reference fasta and full parameters -->
+<test>
+    <!-- TopHat commands:
+    tophat2 -o tmp_dir -r 20 -p 1 -a 8 -m 0 -i 70 -I 500000 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search +report_discordant_pairs tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
+    Replace the + with double-dash
+    Rename the files in tmp_dir appropriately
+    -->
+    <conditional name="singlePaired">
+      <param name="sPaired" value="paired"/>
+      <param name="input1" ftype="fastqsanger" value="tophat_in2.fastqsanger"/>
+      <param name="input2" ftype="fastqsanger" value="tophat_in3.fastqsanger"/>
+      <param name="mate_inner_distance" value="20"/>
+      <param name="report_discordant_pairs" value="Yes" />
+    </conditional>
+    <param name="genomeSource" value="indexed"/>
+    <param name="index" value="tophat_test"/>
+    <conditional name="params">
+      <param name="settingsType" value="full"/>
+      <param name="library_type" value="FR Unstranded"/>
+      <param name="read_mismatches" value="5"/>
+      <!-- Error: the read mismatches (5) and the read gap length (2) should be less than or equal to the read edit dist (2) -->
+      <param name="read_edit_dist" value="5" />
+      <param name="bowtie_n" value="Yes"/>
+      <param name="mate_std_dev" value="20"/>
+      <param name="anchor_length" value="8"/>
+      <param name="splice_mismatches" value="0"/>
+      <param name="min_intron_length" value="70"/>
+      <param name="max_intron_length" value="500000"/>
+      <param name="max_multihits" value="40"/>
+      <param name="min_segment_intron" value="50" />
+      <param name="max_segment_intron" value="500000" />
+      <param name="seg_mismatches" value="2"/>
+      <param name="seg_length" value="25"/>
+      <conditional name="indel_search">
+        <param name="allow_indel_search" value="No"/>
+      </conditional>
+      <conditional name="own_junctions">
+        <param name="use_junctions" value="Yes" />
+        <conditional name="gene_model_ann">
+          <param name="use_annotations" value="No" />
+        </conditional>
+        <conditional name="raw_juncs">
+          <param name="use_juncs" value="No" />
+        </conditional>
+        <conditional name="no_novel_juncs">
+          <param name="no_novel_juncs" value="No" />
+        </conditional>
+      </conditional>
+      <conditional name="coverage_search">
+        <param name="use_search" value="No" />
+      </conditional>
+      <param name="microexon_search" value="Yes" />
+      <conditional name="bowtie2_settings">
+        <param name="b2_settings" value="No" />
+      </conditional>
+      <!-- Fusion search params -->
+      <conditional name="fusion_search">
+        <param name="do_search" value="Yes" />
+        <param name="anchor_len" value="21" />
+        <param name="min_dist" value="10000021" />
+        <param name="read_mismatches" value="3" />
+        <param name="multireads" value="4" />
+        <param name="multipairs" value="5" />
+        <param name="ignore_chromosomes" value="chrM"/>
+      </conditional>
+    </conditional>
+    <conditional name="readGroup">
+      <param name="specReadGroup" value="no" />
+    </conditional>
+    <output name="junctions" file="tophat2_out4j.bed" />
+    <output name="accepted_hits" file="tophat_out4h.bam" compare="sim_size" />
+</test>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="TestParamElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must match the name of the
+associated input ``conditional``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestRepeat">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Specify test parameters below an iteration of a ``repeat`` block with this
+element.
+
+``param`` elements in a ``test`` block can be arranged into nested ``repeat``,
+``conditional``, and ``select`` structures to match the inputs. While this might
+be overkill for simple tests, it helps prevent ambiguous definitions and keeps
+things organized in large test cases. A future ``profile`` version of Galaxy
+tools may require ``repeat`` blocks be explicitly defined with this directive.
+
+### Examples
+
+The test tool [disambiguate_repeats.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/disambiguate_repeats.xml)
+demonstrates the use of this directive.
+
+This first test case demonstrates that this block allows different values for
+the ``param`` named ``input`` to be tested even though this parameter name
+appears in two different ``<repeat>`` elements in the ``<inputs>`` definition.
+
+```xml
+<!-- Can disambiguate repeats and specify multiple blocks using,
+     nested structure. -->
+<test>
+    <repeat name="queries">
+        <param name="input" value="simple_line.txt"/>
+    </repeat>
+    <repeat name="more_queries">
+        <param name="input" value="simple_line_alternative.txt"/>
+    </repeat>
+    <output name="out_file1">
+        <assert_contents>
+            <has_line line="This is a line of text." />
+            <has_line line="This is a different line of text." />
+        </assert_contents>
+    </output>
+</test>
+```
+
+The second definition in that file demonstrates repeated ``<repeat>`` blocks
+allowing multiple instances of a single repeat to be specified.
+
+```xml
+<!-- Multiple such blocks can be specified but only with newer API
+     driven tests. -->
+<test>
+    <repeat name="queries">
+        <param name="input" value="simple_line.txt"/>
+    </repeat>
+    <repeat name="queries">
+        <param name="input" value="simple_line_alternative.txt"/>
+    </repeat>
+    <repeat name="more_queries">
+        <param name="input" value="simple_line.txt"/>
+    </repeat>
+    <repeat name="more_queries">
+        <param name="input" value="simple_line_alternative.txt"/>
+    </repeat>
+    <output name="out_file1" file="simple_lines_interleaved.txt"/>
+</test>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="TestParamElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must match the name of the
+associated input ``repeat``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestParam">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set defines the tool's input parameters for executing the tool via the
+functional test framework. See [test](#tool-tests-test) documentation for
+some simple examples of parameters.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="collection" type="TestCollection" minOccurs="0" maxOccurs="1" />
+      <xs:element name="composite_data" type="TestCompositeData" minOccurs="0" maxOccurs="unbounded" />
+      <xs:element name="metadata" type="TestParamMetadata" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must match the name of the
+associated input parameter (``param``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must be one of the legal
+values that can be assigned to an input parameter.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ftype" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This attribute name should be included
+only with parameters of ``type`` ``data`` for the tool. If this
+attribute name is not included, the functional test framework will attempt to
+determine the data type for the input dataset using the data type sniffers.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="dbkey" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Specifies a ``dbkey`` value for the
+referenced input dataset. This is only valid if the corresponding parameter is
+of ``type`` ``data``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestCompositeData">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Define extra composite input files for test input.</xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Path relative to test-data of composite file.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ftype" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Optional datatype of composite file for test input.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestCollection">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Definition of a collection for test input.</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="element" type="TestParam" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="type" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Type of collection to create.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestOutput">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set defines the variable that names the output dataset for the
+functional test framework. The functional test framework will execute the tool
+using the parameters defined in the ``<param>`` tag sets and generate a
+temporary file, which will either be compared with the file named in the
+``file`` attribute value or checked against assertions made by a child
+``assert_contents`` tag to verify that the tool is functionally correct.
+
+        ]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="TestOutputElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <!-- TODO: This would be more percise if this was required if at the top-level. -->
+    <xs:attribute name="name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+This value is the same as the value of the ``name`` attribute of the ``<data>``
+tag set contained within the tool's ``<outputs>`` tag set.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="file" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+If specified, this value is the name of the output file stored in the target
+``test-data`` directory which will be used to compare the results of executing
+the tool via the functional test framework.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ftype" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+If specified, this value will be checked against the corresponding output's
+data type. If these do not match, the test will fail.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="sort" type="PermissiveBoolean">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This flag causes the lines of the output
+to be sorted before they are compared to the expected output. This could be
+useful for non-deterministic output.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">An alias for ``file``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="md5" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+If specified, the target output's MD5 hash should match the value specified
+here. For large static files it may be inconvenient to upload the entiry file
+and this can be used instead.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="checksum" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+If specified, the target output's checksum should match the value specified
+here. This value should have the form ``hash_type:hash_value``
+(e.g. ``sha1:8156d7ca0f46ed7abac98f82e36cfaddb2aca041``). For large static files
+it may be inconvenient to upload the entiry file and this can be used instead.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="compare" type="TestOutputCompareType">
+    </xs:attribute>
+    <xs:attribute name="lines_diff" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">If ``compare`` is set to ``diff``, the number of lines of difference to allow (each line with a modification is a line added and a line removed so this counts as two lines).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="delta" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">If ``compare`` is set to ``sim_size``, this is the number of bytes different allowed.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:group name="TestOutputElement">
+    <xs:choice>
+      <xs:element name="element" type="TestOutput"/>
+      <!-- TODO: This would be more percise if this was only allowed at the top-level. -->
+      <xs:element name="discovered_dataset" type="TestDiscoveredDataset"/>
+      <!-- TODO: To be more percise only one assert_contents is allowed - this should not be in here. -->
+      <xs:element name="assert_contents" type="TestAssertions">
+        <xs:annotation>
+          <xs:documentation><![CDATA[
+$assertions
+
+### Examples
+
+The following demonstrtes a wide variety of text-based and tabular
+assertion statements.
+
+```xml
+<output name="out_file1">
+    <assert_contents>
+        <has_text text="chr7" />
+        <not_has_text text="chr8" />
+        <has_text_matching expression="1274\d+53" />
+        <has_line_matching expression=".*\s+127489808\s+127494553" />
+        <!-- 	 is XML escape code for tab -->
+        <has_line line="chr7	127471195	127489808" />
+        <has_n_columns n="3" />
+    </assert_contents>
+</output>
+```
+
+The following demonstrtes a wide variety of XML assertion statements.
+
+```xml
+<output name="out_file1">
+    <assert_contents>
+        <is_valid_xml />
+        <has_element_with_path path="BlastOutput_param/Parameters/Parameters_matrix" />
+        <has_n_elements_with_path n="9" path="BlastOutput_iterations/Iteration/Iteration_hits/Hit/Hit_num" />
+        <element_text_matches path="BlastOutput_version" expression="BLASTP\s+2\.2.*" />
+        <element_text_is path="BlastOutput_program" text="blastp" />
+        <element_text path="BlastOutput_iterations/Iteration/Iteration_hits/Hit/Hit_def">
+            <not_has_text text="EDK72998.1" />
+            <has_text_matching expression="ABK[\d\.]+" />
+        </element_text>
+    </assert_contents>
+</output>
+```
+
+The following demonstrtes verifying XML content with XPath-like expressions.
+
+```xml
+<output name="out_file1">
+    <assert_contents>
+        <attribute_is path="outerElement/innerElement1" attribute="foo" text="bar" />
+        <attribute_matches path="outerElement/innerElement2" attribute="foo2" expression="bar\d+" />
+    </assert_contents>
+</output>
+```
+
+]]></xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <!-- TODO: This would be more percise if this was only allowed at the top-level. -->
+      <xs:element name="extra_files" type="TestExtraFile" />
+      <xs:element name="metadata" type="TestOutputMetadata"/>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="TestParamMetadata">
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for name</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for value</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestOutputMetadata">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This directive specifies a test for an output's metadata as an expected key-value pair.
+
+### Example
+
+The functional test tool
+[tool_provided_metadata_1.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/tool_provided_metadata_1.xml)
+provides a demonstration of using this tag.
+
+```xml
+<test>
+  <param name="input1" value="simple_line.txt" />
+  <output name="out1" file="simple_line.txt" ftype="txt">
+    <metadata name="name" value="my dynamic name" />
+    <metadata name="info" value="my dynamic info" />
+    <metadata name="dbkey" value="cust1" />
+  </output>
+</test>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Name of the metdata element to check.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Expected value (as a string) of metadata value.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="TestDiscoveredDataset">
+    <xs:annotation>
+      <xs:documentation><![CDATA[
+
+This directive specifies a test for an output's discovered dataset. It acts as an
+``output`` test tag in many ways and can define any tests of that tag (e.g.
+``assert_contents``, ``value``, ``compare``, ``md5``, ``checksum``, ``metadata``, etc...).
+
+### Example
+
+The functional test tool
+[multi_output_assign_primary.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/multi_output_assign_primary.xml)
+provides a demonstration of using this tag.
+
+```xml
+<test>
+  <param name="num_param" value="7" />
+  <param name="input" ftype="txt" value="simple_line.txt"/>
+  <output name="sample">
+    <assert_contents>
+      <has_line line="1" />
+    </assert_contents>
+    <!-- no sample1 it was consumed by named output "sample" -->
+    <discovered_dataset designation="sample2" ftype="tabular">
+      <assert_contents><has_line line="2" /></assert_contents>
+    </discovered_dataset>
+    <discovered_dataset designation="sample3" ftype="tabular">
+      <assert_contents><has_line line="3" /></assert_contents>
+    </discovered_dataset>
+  </output>
+</test>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:complexContent>
+      <xs:extension base="TestOutput">
+        <xs:attribute type="xs:string" name="designation">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">The designation of the discovered dataset.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:complexContent>
+  </xs:complexType>
+  <xs:complexType name="TestExtraFile">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Define test for extra files on corresponding output.</xs:documentation>
+    </xs:annotation>
+    <xs:complexContent>
+      <xs:extension base="TestOutput">
+        <xs:attribute type="xs:string" name="type">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Extra file type (either ``file`` or ``directory``).</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:complexContent>
+  </xs:complexType>
+  <xs:complexType name="TestOutputCollection">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Define tests for extra files corresponding to an output collection.
+
+``output_collection`` directives should specify a ``name`` and ``type``
+attribute to describe the expected output collection as a whole.
+
+Expectations about collecton contents are described using child ``element``
+directives. For nested collections, these child ``element`` directives may
+themselves contain children.
+
+### Examples
+
+The [genetrack](https://github.com/galaxyproject/tools-iuc/blob/master/tools/genetrack/genetrack.xml)
+tool demonstrates basic usage of an ``output_collection`` test expectation.
+
+```xml
+<test>
+    <param name="input" value="genetrack_input2.gff" ftype="gff" />
+    <param name="input_format" value="gff" />
+    <param name="sigma" value="5" />
+    <param name="exclusion" value="20" />
+    <param name="up_width" value="10" />
+    <param name="down_width" value="10" />
+    <param name="filter" value="3" />
+    <output_collection name="genetrack_output" type="list">
+        <element name="s5e20u10d10F3_on_data_1" file="genetrack_output2.gff" ftype="gff" />
+    </output_collection>
+</test>
+```
+
+The [CWPair2](https://github.com/galaxyproject/tools-iuc/blob/master/tools/cwpair2/cwpair2.xml)
+tool demonstrates that ``element``s can specify a ``compare`` attribute just
+like [output](#tool-tests-test-output).
+
+```xml
+<test>
+    <param name="input" value="cwpair2_input1.gff" />
+    <param name="up_distance" value="25" />
+    <param name="down_distance" value="100" />
+    <param name="method" value="all" />
+    <param name="binsize" value="1" />
+    <param name="threshold_format" value="relative_threshold" />
+    <param name="relative_threshold" value="0.0" />
+    <param name="output_files" value="matched_pair" />
+    <output name="statistics_output" file="statistics1.tabular" ftype="tabular" />
+    <output_collection name="MP" type="list">
+        <element name="data_MP_closest_f0u25d100_on_data_1.gff" file="closest_mp_output1.gff" ftype="gff" compare="contains"/>
+        <element name="data_MP_largest_f0u25d100_on_data_1.gff" file="largest_mp_output1.gff" ftype="gff" compare="contains"/>
+        <element name="data_MP_mode_f0u25d100_on_data_1.gff" file="mode_mp_output1.gff" ftype="gff" compare="contains"/>
+    </output_collection>
+</test>
+```
+
+The
+[collection_creates_dynamic_nested](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/collection_creates_dynamic_nested.xml)
+test tool demonstrates the use of nested ``element`` directives as described
+above. Notice also that it tests the output with ``assert_contents`` instead of
+supplying a ``file`` attribute. Like hinted at with with ``compare`` attribute
+above, the ``element`` tag can specify any of the test attributes that apply to
+the [output](#tool-tests-test-output) (e.g. ``md5``, ``compare``, ``diff``,
+etc...).
+
+```xml
+<test>
+  <param name="foo" value="bar" />
+  <output_collection name="list_output" type="list:list">
+    <element name="oe1">
+      <element name="ie1">
+        <assert_contents>
+          <has_text_matching expression="^A\n$" />
+        </assert_contents>
+      </element>
+      <element name="ie2">
+        <assert_contents>
+          <has_text_matching expression="^B\n$" />
+        </assert_contents>
+      </element>
+    </element>
+    <element name="oe2">
+      <element name="ie1">
+        <assert_contents>
+          <has_text_matching expression="^C\n$" />
+        </assert_contents>
+      </element>
+      <element name="ie2">
+        <assert_contents>
+          <has_text_matching expression="^D\n$" />
+        </assert_contents>
+      </element>
+    </element>
+    <element name="oe3">
+      <element name="ie1">
+        <assert_contents>
+          <has_text_matching expression="^E\n$" />
+        </assert_contents>
+      </element>
+      <element name="ie2">
+        <assert_contents>
+          <has_text_matching expression="^F\n$" />
+        </assert_contents>
+      </element>
+    </element>
+  </output_collection>
+</test>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="element" type="TestOutput" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+This value is the same as the value of the ``name`` attribute of the
+``<collecton>`` tag set contained within the tool's ``<outputs>`` tag set.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="type" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Expected collection type (e.g. ``list``, ``paired``,
+or ``list:paired``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="count" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Number of elements in output collection.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="TestAssertions">
+    <xs:annotation>
+      <xs:documentation><![CDATA[
+This tag set defines a sequence of checks or assertions to run against the
+target output. This tag requires no attributes, but child tags should be used to
+define the assertions to make about the output. The functional test framework
+makes it easy to extend Galaxy with such tags, the following table summarizes
+many of the default assertion tags that come with Galaxy and examples of each
+can be found below.
+
+The implementation of these tags are simply Python functions defined in the
+[galaxy.tools.verify.asserts](https://github.com/galaxyproject/galaxy/tree/dev/lib/galaxy/tools/verify/asserts)
+module.
+]]>
+      </xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="TestAssertion" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+  <xs:group name="TestAssertion">
+    <xs:choice>
+      <xs:element name="has_text" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the specified ``text`` appears in the output (e.g. ``<has_text text="chr7">``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="not_has_text" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the specified ``text`` does not appear in the output (e.g. ``<not_has_text text="chr8" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="has_text_matching" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts text matching the specified regular expression (``expression``) appears in the output (e.g. ``<has_text_matching expression="1274\d+53" />`` ).]]>
+        </xs:documentation>
+      </xs:annotation>
+      </xs:element>
+      <xs:element name="has_line" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts a line matching the specified string (``line``) appears in the output (e.g. ``<has_line line="A full example line." />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="has_line_matching" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts a line matching the specified regular expression (``expression``) appears in the output (e.g. ``<has_line_matching expression=".*\s+127489808\s+127494553" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="has_n_columns" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts tabular output contains the specified number (``n``) of columns (e.g. ``<has_n_columns n="3" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="is_valid_xml" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the output is a valid XML file (e.g. ``<is_valid_xml />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="has_element_with_path" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the XML output contains at least one element (or tag) with the specified XPath-like ``path`` (e.g. ``<has_element_with_path path="BlastOutput_param/Parameters/Parameters_matrix" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="has_n_elements_with_path" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the XML output contains the specified number (``n``) of elements (or tags) with the specified XPath-like ``path`` (e.g. ``<has_n_elements_with_path n="9" path="BlastOutput_iterations/Iteration/Iteration_hits/Hit/Hit_num" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="element_text_is" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the text of the XML element with the specified XPath-like ``path`` is the specified ``text`` (e.g. ``<element_text_is path="BlastOutput_program" text="blastp" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="element_text_matches">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the text of the XML element with the specified XPath-like ``path`` matches the regular expression defined by ``expression`` (e.g. ``<element_text_matches path="BlastOutput_version" expression="BLASTP\s+2\.2.*" />``).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="attribute_is" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the XML ``attribute`` for the element (or tag) with the specified XPath-like ``path`` is the specified ``text`` (e.g. ``<attribute_is path="outerElement/innerElement1" attribute="foo" text="bar" />`` ).]]>
+          </xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="attribute_matches" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[Asserts the XML ``attribute`` for the element (or tag) with the specified XPath-like ``path`` matches the regular expression specified by ``expression`` (e.g. ``<attribute_matches path="outerElement/innerElement2" attribute="foo2" expression="bar\d+" />``).]]></xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="element_text" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation><![CDATA[This tag allows the developer to recurisively specify additional assertions as child elements about just the text contained in the element specified by the XPath-like ``path`` (e.g. ``<element_text path="BlastOutput_iterations/Iteration/Iteration_hits/Hit/Hit_def"><not_has_text text="EDK72998.1" /></element_text>``).]]></xs:documentation>
+        </xs:annotation>
+      </xs:element>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="Inputs">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Consists of all tag sets that define the
+tool's input parameters. Each ``<param>`` tag within the ``<inputs>`` tag set
+maps to a command line parameter within the [command](#tool-command) tag. Most
+tools will not need to specify any attributes on this tag itself.]]>
+</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="InputElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="action" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">URL used by data source tools.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="check_values" type="PermissiveBoolean" default="true">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Set to ``false`` to disable parameter checking in data source tools.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="method" type="URLmethodType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Data source HTTP action (e.g. ``get`` or ``put``) to use.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="target" type="TargetType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">UI link target to use for data source tools (e.g. ``_top``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="nginx_upload" type="PermissiveBoolean" default="false">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This boolean indicates if this is an upload tool or not.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+
+  <xs:group name="InputElement">
+    <xs:choice>
+      <xs:element name="param" type="Param"/>
+      <xs:element name="repeat" type="Repeat"/>
+      <xs:element name="conditional" type="Conditional"/>
+      <xs:element name="section" type="Section"/>
+      <xs:element name="upload_dataset" type="xs:anyType">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Internal, intentionally undocumented feature.</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="display" type="xs:string">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Documentation for display</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="InputType" abstract="true">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for InputType</xs:documentation>
+    </xs:annotation>
+    <xs:sequence/>
+  </xs:complexType>
+
+
+  <xs:complexType name="Conditional">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This is a container for conditional parameters in the tool (must contain 'when'
+tag sets) - the command line (or portions thereof) are then wrapped in an if-else
+statement. A good example tool that demonstrates many conditional parameters is
+[biom_convert.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/biom_format/biom_convert.xml).
+
+```xml
+<conditional name="input_type">
+    <param name="input_type_selector" type="select" label="Choose the source BIOM format">
+        <option value="tsv" selected="True">Tabular File</option>
+        <option value="biom">BIOM File</option>
+    </param>
+    <when value="tsv">
+        <param name="input_table" type="data" format="tabular" label="Tabular File" argument="--input-fp"/>
+        <param name="process_obs_metadata" type="select" label="Process metadata associated with observations when converting" argument="--process-obs-metadata">
+            <option value="" selected="True">Do Not process metadata</option>
+            <option value="taxonomy">taxonomy</option>
+            <option value="naive">naive</option>
+            <option value="sc_separated">sc_separated</option>
+        </param>
+    </when>
+    <when value="biom">
+        <param name="input_table" type="data" format="biom1" label="Tabular File" argument="--input-fp"/>
+    </when>
+</conditional>
+```
+
+The first directive following the conditional is a [param](#tool-inputs-param),
+this param must be of type ``select`` or ``boolean``. Depending on the value a
+user selects for this "test" parameter - different UI elements will be shown.
+These different paths are described by the following the ``when`` blocks shown
+above.
+
+The following Cheetah block demonstrates the use of the ``conditional``
+shown above:
+
+```
+biom convert -i "${input_type.input_table}" -o "${output_table}"
+#if str( $input_type.input_type_selector ) == "tsv":
+    #if $input_type.process_obs_metadata:
+        --process-obs-metadata "${input_type.process_obs_metadata}"
+    #end if
+#end if
+```
+
+Notice that the parameter ``input_table`` appears down both ``when`` clauses
+so ``${input_type.input_table}`` appears unconditionally but we need to
+conditionally reference ``${input_type.process_obs_metadata}`` with a Cheetah
+``if`` statement.
+
+A common use of the conditional wrapper is to select between reference data
+managed by the Galaxy admins (for instance via
+[data managers](https://wiki.galaxyproject.org/Admin/Tools/DataManagers)
+) and
+history files. A good example tool that demonstrates this is
+the [Bowtie 2](https://github.com/galaxyproject/tools-devteam/blob/master/tools/bowtie2/bowtie2_wrapper.xml) wrapper.
+
+```xml
+<conditional name="reference_genome">
+  <param name="source" type="select" label="Will you select a reference genome from your history or use a built-in index?" help="Built-ins were indexed using default options. See `Indexes` section of help below">
+    <option value="indexed">Use a built-in genome index</option>
+    <option value="history">Use a genome from the history and build index</option>
+  </param>
+  <when value="indexed">
+    <param name="index" type="select" label="Select reference genome" help="If your genome of interest is not listed, contact the Galaxy team">
+      <options from_data_table="bowtie2_indexes">
+        <filter type="sort_by" column="2"/>
+        <validator type="no_options" message="No indexes are available for the selected input dataset"/>
+      </options>
+    </param>
+  </when>
+  <when value="history">
+    <param name="own_file" type="data" format="fasta" label="Select reference genome" />
+  </when>
+</conditional>
+```
+
+The Bowtie 2 wrapper also demonstrates other conditional paths - such as choosing
+between paired inputs of single stranded inputs.
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:complexContent>
+      <xs:extension base="InputType">
+        <xs:sequence>
+          <xs:group ref="InputElement" minOccurs="0" maxOccurs="1" />
+          <xs:element name="when" type="ConditionalWhen" minOccurs="0" maxOccurs="unbounded"/>
+        </xs:sequence>
+        <xs:attribute name="name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Name for this element</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="value_from" type="xs:string">
+          <xs:annotation>
+            <xs:documentation><![CDATA[Infrequently used option to dynamically access Galaxy internals, this should be avoided.
+
+Galaxy method to execute.]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="value_ref" type="xs:string">
+          <xs:annotation>
+            <xs:documentation><![CDATA[Infrequently used option to dynamically access Galaxy internals, this should be avoided.
+
+Referenced parameter to pass method.]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="value_ref_in_group" type="PermissiveBoolean">
+          <xs:annotation>
+            <xs:documentation><![CDATA[Infrequently used option to dynamically access Galaxy internals, this should be avoided.
+
+Is referenced parameter is the same group.]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+
+        <xs:attribute name="label" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Human readable description for the conditional, unused in the Galaxy UI currently.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+
+      </xs:extension>
+    </xs:complexContent>
+  </xs:complexType>
+
+
+
+  <xs:complexType name="ConditionalWhen">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">This directive describes one potential
+set of input for the tool at this depth. See documentation for the
+[conditional](#tool-inputs-conditional) block for more details and examples (XML
+and corresponding Cheetah conditionals).</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="InputElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Value for the tool form test parameter
+corresponding to this ``when`` block.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+
+
+  <xs:complexType name="Repeat">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+See
+[xy_plot.xml](https://github.com/galaxyproject/tools-devteam/blob/master/tools/xy_plot/xy_plot.xml)
+for an example of how to use this tag set. This is a container for any tag sets
+that can be contained within the ``<inputs>`` tag set. When this is used, the
+tool will allow the user to add any number of additional sets of the contained
+parameters (an option to add new iterations will be displayed on the tool form).
+All input parameters contained within the ``<repeat>`` tag can be retrieved by
+enumerating over ``$<name_of_repeat_tag_set>`` in the relevant Cheetah code.
+This returns the rank and the parameter objects of the repeat container. See the
+Cheetah code below.
+
+### Example
+
+This part is contained in the ``<inputs>`` tag set.
+
+```xml
+<repeat name="series" title="Series">
+    <param name="input" type="data" format="tabular" label="Dataset"/>
+    <param name="xcol" type="data_column" data_ref="input" label="Column for x axis"/>
+    <param name="ycol" type="data_column" data_ref="input" label="Column for y axis"/>
+</repeat>
+```
+
+This Cheetah code can be used in the ``<command>`` tag set or the
+``<configfile>`` tag set.
+
+```xml
+#for $i, $s in enumerate( $series )
+    rank_of_series=$i
+    input_path='${s.input}'
+    x_colom=${s.xcol}
+    y_colom=${s.ycol}
+#end for
+```
+
+### Testing
+
+This is an example test case with multiple repeat elements for the example above.
+
+```xml
+<test>
+    <repeat name="series">
+        <param name="input" value="tabular1.tsv" ftype="tabular"/>
+        <param name="xcol" value="1"/>
+        <param name="ycol" value="2"/>
+    </repeat>
+    <repeat name="series">
+        <param name="input" value="tabular2.tsv" ftype="tabular"/>
+        <param name="xcol" value="4"/>
+        <param name="ycol" value="2"/>
+    </repeat>
+    <output name="out_file1" file="cool.pdf" ftype="pdf" />
+</test>
+```
+
+See the documentation on the [repeat test directive](#tool-tests-test-repeat).
+
+An older way to specify repeats in a test is by instances that are created by referring to names with a special format: "<repeat name>_<repeat index>|<param name>"
+
+```xml
+<test>
+    <param name="series_0|input" value="tabular1.tsv" ftype="tabular"/>
+    <param name="series_0|xcol" value="1"/>
+    <param name="series_0|ycol" value="2"/>
+    <param name="series_1|input" value="tabular2.tsv" ftype="tabular"/>
+    <param name="series_1|xcol" value="4"/>
+    <param name="series_1|ycol" value="2"/>
+    <output name="out_file1" file="cool.pdf" ftype="pdf" />
+</test>
+```
+
+The test tool [disambiguate_repeats.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/disambiguate_repeats.xml)
+demonstrates both testing strategies.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:complexContent>
+      <xs:extension base="InputType">
+        <xs:sequence>
+          <xs:group ref="InputElement" minOccurs="0" maxOccurs="unbounded"/>
+        </xs:sequence>
+        <xs:attribute name="name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Name for this element</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="title" type="xs:string" use="required">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> The title of the repeat section, which will be displayed on the tool form. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="min" type="xs:integer">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> The minimum number of repeat units. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="max" type="xs:integer">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> The maximum number of repeat units. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="default" type="xs:integer" default="1">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"> The default number of repeat units. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="help" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Short help description for repeat element.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:complexContent>
+  </xs:complexType>
+
+  <xs:complexType name="Section">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+This tag is used to group parameters into sections of the interface. Sections
+are implemented to replace the commonly used tactic of hiding advanced options
+behind a conditional, with sections you can easily visually group a related set
+of options.
+
+### Example
+
+The XML configuration is relatively trivial for sections:
+
+```xml
+<inputs>
+    <section name="adv" title="Advanced Options" expanded="False">
+        <param name="plot_color" type="color" label="Track color" />
+    </section>
+</inputs>
+```
+
+In your command template, you'll need to include the section name to access the
+variable:
+
+```
+--color $adv.plot_color
+```
+
+Further examples can be found in the [test case](https://github.com/galaxyproject/galaxy/blob/master/test/functional/tools/section.xml) from [PR #35](https://github.com/galaxyproject/galaxy/pull/35)
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="InputElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">The internal key used for the section.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="title" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Human readable label for the section.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="expanded" type="PermissiveBoolean" default="false">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Whether the section should be expanded by default or not. If not, the default set values are used.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="help" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Short help description for section, rendered just below the section.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="Param">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Contained within the ``<inputs>`` tag set - each of these specifies a field that
+will be displayed on the tool form. Ultimately, the values of these form fields
+will be passed as the command line parameters to the tool's executable.
+
+### Common Attributes
+
+The attributes valid for this tag vary wildly based on the ``type`` of the
+parameter being described. All the attributes for the ``param`` element are
+documented below for completeness, but here are the common ones for each
+type are as follows:
+
+$attribute_list:name,type,optional,label,help,argument:4
+
+### Parameter Types
+
+#### ``text``
+
+When ``type="text"``, the parameter is free form text and appears as a text box
+in the tool form.
+
+##### Examples
+
+Sometimes you need labels for data or graph axes, chart titles, etc. This can be
+done using a text field. The following will create a text box 30 characters wide
+with the default value of "V1".
+
+```xml
+<param name="xlab" size="30" type="text" value="V1" label="Label for x axis"/>
+```
+
+The ``size`` parameter can be two dimensional, if it is the textbox will be
+rendered on the tool form as a text area instead of a single line text box.
+
+```xml
+<param name="foo" type="text" area="True" size="5x25" />
+```
+
+$attribute_list:value,size,area:5
+
+#### ``integer`` and ``float``
+
+These parameters represent whole number and real numbers, respectively.
+
+##### Example
+
+```
+<param name="region_size" size="4" type="integer" value="1" label="flanking regions of size" />
+```
+
+$attribute_list:value,min,max:5
+
+#### ``boolean``
+
+This represents a binary true or false value.
+
+$attribute_list:checked,truevalue,falsevalue:5
+
+#### ``data``
+
+A dataset from the current history. Multiple types might be used for the param form.
+
+##### Examples
+
+The following will find all "coordinate interval files" contained within the
+current history and dynamically populate a select list with them. If they are
+selected, their destination and internal file name will be passed to the
+appropriate command line variable.
+
+```xml
+<param name="interval_file" type="data" format="interval" label="near intervals in"/>
+```
+
+The following demonstrates a ``param`` which may accept multiple files and
+multiple formats.
+
+```xml
+<param format="sam,bam" multiple="true" name="bamOrSamFile" type="data"
+       label="Alignments in BAM or SAM format"
+       help="The set of aligned reads." />
+```
+
+Perhaps counter-intuitively, a ``multiple="true"`` data parameter requires at least one
+data input. If ``optional="true"`` is specified, this condition is relaxed and the user
+is allowed to select 0 datasets. Unfortunately, if 0 datasets are selected the resulting
+value for the parameter during Cheetah templating (such as in a ``command`` block) will
+effectively be a list with one ``None``-like entity in it.
+
+The following idiom can be used to iterate over such a list and build a hypothetical ``-B``
+parameter for each file - the ``if`` block is used to handle the case where a ``None``-like
+entity appears in the list because no files were selected: 
+
+```
+#for $input in $input1
+    #if $input
+        -B "$input"
+    #end if
+#end for
+```
+
+Some example tools using ``multiple="true"`` data parameters include:
+
+- [multi_data_param.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/multi_data_param.xml)
+- [multi_data_optional.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/multi_data_optional.xml)
+
+Additionally, a detailed discussion of handling multiple homogenous files can be found in the
+the [Planemo Documentation](https://planemo.readthedocs.io/en/latest/writing_advanced.html#consuming-collections)
+on this topic.
+
+$attribute_list:format,multiple:5
+
+#### ``select``
+
+The following will create a select list containing the options "Downstream" and
+"Upstream". Depending on the selection, a ``d`` or ``u`` value will be passed to
+the ``$upstream_or_down`` variable on the command line.
+
+```xml
+<param name="upstream_or_down" type="select" label="Get">
+  <option value="u">Upstream</option>
+  <option value="d">Downstream</option>
+</param>
+```
+
+The following will create a checkbox list allowing the user to select
+"Downstream", "Upstream", both, or neither. Depending on the selection, the
+value of ``$upstream_or_down`` will be ``d``, ``u``, ``u,d``, or "".
+
+```xml
+<param name="upstream_or_down" type="select" label="Get" multiple="true" display="checkboxes">
+  <option value="u">Upstream</option>
+  <option value="d">Downstream</option>
+</param>
+```
+
+$attribute_list:data_ref,dynamic_options,display,multiple:5
+
+#### ``data_column``
+
+This parameter type is used to select columns from a parameter.
+
+$attribute_list:force_select,numerical,use_header_name:5
+
+#### ``drill_down``
+
+$attribute_list:hierarchy:5
+
+#### ``data_collection``
+
+The following will create a parameter that only accepts paired FASTQ files grouped into a collection.
+
+##### Examples
+
+```xml
+<param name="inputs" type="data_collection" collection_type="paired" label="Input FASTQs" format="fastq">
+</param>
+```
+
+More detailed information on writing tools that consume collections can be found
+in the [planemo documentation](https://planemo.readthedocs.io/en/latest/writing_advanced.html#collections).
+
+$attribute_list:format,collection_type:5
+
+#### ``color``
+
+##### Examples
+
+The following example will create a color selector parameter.
+
+```xml
+<param name="feature_color" type="color" label="Default feature color" value="#ff00ff">
+</param>
+```
+
+Given that the output includes a pound sign, it is often convenient to use a
+sanitizer to prevent Galaxy from escaping the result.
+
+```xml
+<param name="feature_color" type="color" label="Default feature color" value="#ff00ff">
+  <sanitizer>
+    <valid initial="string.letters,string.digits">
+      <add value="#" />
+    </valid>
+  </sanitizer>
+</param>
+```
+
+$attribute_list:value,rgb:5
+
+This covers examples of the most common parameter types, the remaining parameter
+types are more obsecure and less likely to be useful for most tool authors.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:complexContent>
+      <xs:extension base="InputType">
+        <xs:sequence>
+          <xs:group ref="ParamElement" minOccurs="0" maxOccurs="unbounded" />
+        </xs:sequence>
+        <xs:attribute name="type" type="ParamType" use="required">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[
+
+Describes the parameter type - each different type as different semantics and
+the tool form widget is different. Currently valid parameter types are:
+``text``,  ``integer``,  ``float``,  ``boolean``,  ``genomebuild``,  ``select``,
+``color``,  ``data_column``,  ``hidden``,  ``hidden_data``,  ``baseurl``,
+``file``,  ``ftpfile``,  ``data``,  ``data_collection``,  ``library_data``,
+``drill_down``. The definition of supported parameter types as defined in the
+``parameter_types`` dictionary in
+[/lib/galaxy/tools/parameters/basic.py](https://github.com/galaxyproject/galaxy/blob/master/lib/galaxy/tools/parameters/basic.py).
+
+]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="name" type="xs:string">
+            <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[Name for this element. This ``name``
+is used as the Cheetah variable containing the user-supplied parameter name in
+``command`` and ``configfile`` elements. The name should not contain pipes or
+periods (e.g. ``.``). Some "reserved" names are ``REDIRECT_URL``,
+``DATA_URL``, ``GALAXY_URL``.]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <!-- TODO: add unique constraints... -->
+        <xs:attribute name="area" type="PermissiveBoolean">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Boolean indicating if this should be
+rendered as a one line text box (if ``false``) or a multi-line text area (if
+``true``).</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="argument" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[
+
+If the parameter reflects just one command line argument of a certain tool, this
+tag should be set to that particular argument. It is rendered in parenthesis
+after the help section, and it will create the name attribute from the argument
+attribute by stripping the dashes (e.g. if ``argument="--sensitive"`` then
+``name="sensitive"`` is implicit).
+
+]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="label" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">The attribute value will be
+displayed on the tool page as the label of the form field
+(``label="Sort Query"``).</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="help" type="xs:string">
+          <xs:annotation gxdocs:best_practices="parameter-help">
+            <xs:documentation xml:lang="en">Short bit of text, rendered on the
+tool form just below the associated field to provide information about the
+field.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="value" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">The default value for this
+parameter.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="default_value" type="xs:string" gxdocs:deprecated="true">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Deprecated way to specify default value for column parameters (use ``value`` instead).</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="optional" type="xs:string" default="false">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">If ``false``, parameter must have a
+value. Defaults to "false".</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="rgb" type="xs:string" default="false">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">If ``false``, the returned value will be in Hex color code. If ``true``
+it will be a RGB value e.g. 0,0,255. This attribute is only valid when ``type`` is ``color``.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="min" type="xs:float">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Minimum valid parameter value - only
+valid when ``type`` is ``integer`` or ``float``.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="max" type="xs:float">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Maximum valid parameter value - only
+valid when ``type`` is ``integer`` or ``float``.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="format" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Only if ``type`` attribute value is
+``data`` or ``data_collection`` - the list of supported data formats is
+contained in the
+[/config/datatypes_conf.xml.sample](https://github.com/galaxyproject/galaxy/blob/dev/config/datatypes_conf.xml.sample)
+file. Use the file extension.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="collection_type" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[
+
+This is only valid if ``type`` is ``data_collection``. Restrict the kind of
+collection that can be consumed by this parameter (e.g. ``paired``,
+``list:paired``, ``list``). Multiple such collection types can be specified here
+as a comma separated list.
+
+              ]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="data_ref" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[
+
+Only valid if ``type`` attribute value is ``select`` or ``data_column``. Used
+with select lists whose options are dynamically generated based on certain
+metadata attributes of the dataset upon which this parameter depends (usually
+but not always the tool's input dataset).
+
+            ]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="accept_default" type="PermissiveBoolean">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="force_select" type="PermissiveBoolean" gxdocs:deprecated="true">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Used only if the ``type`` attribute
+value is ``data_column``, this is deprecated and the inverse of ``optional``.
+Set to ``false`` to not force user to select an option in the list.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="use_header_names" type="PermissiveBoolean">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Used only if the ``type`` attribute
+value is ``data_column``, if ``true`` Galaxy assumes first row of ``data_ref``
+is a header and builds the select list with these values rather than the more
+generic ``c1`` ... ``cN``.
+</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="display" type="DisplayType">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">This attribute is used only if
+``type`` attribute value is ``select`` - render a select list as a set of check
+boxes or radio buttons. Defaults to a drop-down menu select list.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="multiple" type="PermissiveBoolean">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Allow multiple valus to be selected.
+Valid with ``data`` and ``select`` parameters.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="numerical" type="PermissiveBoolean">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Used only if the ``type`` attribute
+value is ``data_column``, if ``true`` the column will be treated as numerical
+when filtering columns based on metadata.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="hierarchy" type="HierarchyType">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Used only if the ``type`` attribute
+value is ``drill_down``, this attribute determines the drill down is
+``recursive`` or ``exact``.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="checked" type="PermissiveBoolean" default="false">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Set to ``true`` if the ``boolean``
+parameter should be checked (or ``true``) by default.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="truevalue" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">The parameter value in the Cheetah
+template if the parameter is ``true`` or checked by the user. Only valid if
+``type`` is ``boolean``.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="falsevalue" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">The parameter value in the Cheetah
+template if the parameter is ``false`` or not checked by the user. Only valid if
+``type`` is ``boolean``.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="size" type="xs:string">
+          <!-- TODO: can be integer or integerxinteger -->
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Used only if ``type`` attribute
+value is ``text``. To create a multi-line text box add an ``area="True"``
+attribute to the param tag. This can be one dimensional (e.g. ``size="40"``)
+or two dimensional (e.g. ``size="5x25"``).</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <!-- metadata_name appears in some wrappers but I think this is a copy
+             and paste problem and doesn't reflect something actually used by
+             Galaxy.
+        -->
+        <!--
+        <xs:attribute name="metadata_name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Documentation for metadata_name</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        -->
+        <xs:attribute name="dynamic_options" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Deprecated/discouraged method to
+allow access to Python code to generate options for a select list. See
+``code``'s documentation for an example.
+</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:complexContent>
+  </xs:complexType>
+
+  <xs:group name="ParamElement">
+    <xs:choice>
+      <xs:element name="label" type="xs:string">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Documentation for label</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+      <xs:element name="option" type="ParamOption" />
+      <xs:element name="options" type="ParamOptions"/>
+      <xs:element name="validator" type="Validator" />
+      <xs:element name="sanitizer" type="Sanitizer"/>
+      <xs:element name="help" type="xs:string">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Documentation for help</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+    </xs:choice>
+  </xs:group>
+
+  <xs:simpleType name="ParamType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for ParamType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="text"/>
+      <xs:enumeration value="integer"/>
+      <xs:enumeration value="float"/>
+      <xs:enumeration value="color"/>
+      <xs:enumeration value="boolean"/>
+      <xs:enumeration value="genomebuild"/>
+      <xs:enumeration value="library_data"/>
+      <xs:enumeration value="select"/>
+      <xs:enumeration value="data_column"/>
+      <xs:enumeration value="hidden"/>
+      <xs:enumeration value="hidden_data"/>
+      <xs:enumeration value="baseurl"/>
+      <xs:enumeration value="file"/>
+      <xs:enumeration value="data"/>
+      <xs:enumeration value="drill_down"/>
+      <xs:enumeration value="data_collection"/>
+    </xs:restriction>
+  </xs:simpleType>
+
+
+  <xs:complexType name="Command">
+    <xs:annotation gxdocs:best_practices="command-tag">
+      <xs:documentation xml:lang="en"><![CDATA[
+This tag specifies how Galaxy should invoke the tool's executable, passing its
+required input parameter values (the command line specification links the
+parameters supplied in the form with the actual tool executable). Any word
+inside it starting with a dollar sign (``$``) will be treated as a variable whose
+values can be acquired from one of three sources: parameters, metadata, or
+output files. After the substitution of variables with their values, the content
+is interpreted with [Cheetah](https://pythonhosted.org/Cheetah/) and finally given
+to the interpreter specified in the corresponding attribute (if any).
+
+### Examples
+
+The following uses a compiled executable ([bedtools](https://bedtools.readthedocs.io/en/latest/)).
+
+```xml
+<command>bed12ToBed6 -i '$input' > '$output'</command>
+```
+
+A few things to note about even this simple example:
+
+* Input and output variables (boringly named ``input`` and ``output``)
+  are expanded into paths using the ``$`` Cheetah directive.
+* Paths should be quoted so that the Galaxy database files may contain spaces.
+* We are building up a shell script - so special characters like ``>`` can be used
+  (in this case the standard output of the bedtools call is written to the path
+  specified by ``'$output'``).
+
+The bed12ToBed6 tool can be found [here](https://github.com/galaxyproject/tools-iuc/blob/master/tools/bedtools/bed12ToBed6.xml).
+
+A more sophisticated bedtools example demonstrates the use of loops, conditionals,
+and uses whitespace to make a complex command very readable can be found in
+[annotateBed](https://github.com/galaxyproject/tools-iuc/blob/master/tools/bedtools/annotateBed.xml)
+tool.
+
+```xml
+<command><![CDATA[
+bedtools annotate
+        -i "${inputA}"
+        #if $names.names_select == 'yes':
+            -files
+            #for $bed in $names.beds:
+                '${bed.input}'
+            #end for
+            -names
+            #for $bed in $names.beds:
+                '${bed.inputName}'
+            #end for
+        #else:
+            #set files = '" "'.join( [ str( $file ) for $file in $names.beds ] )
+            -files '${files}'
+            #set names = '" "'.join( [ str( $name.display_name ) for $name in $names.beds ] )
+            -names '${names}'
+        #end if
+        $strand
+        $counts
+        $both
+        > "${output}"
+]]]]><![CDATA[></command>
+```
+
+The following example (taken from [xpath](https://github.com/galaxyproject/tools-iuc/blob/master/tools/xpath/xpath.xml) tool)
+uses an interpreted executable. In this case a Perl script is shipped with the
+tool and the directory of the tool itself is referenced with ``$__tool_directory__``.
+
+```xml
+<command>
+  perl $__tool_directory__/xpath -q -e '$expression' '$input' > '$output'
+</command>
+```
+
+The following example demonstrates accessing metadata from datasets. Metadata values
+(e.g., ``${input.metadata.chromCol}``) are acquired from the ``Metadata`` model associated
+with the objects selected as the values of each of the relative form field
+parameters in the tool form. Accessing this information is generally enabled using
+the following feature components.
+
+A set of "metadata information" is defined for each supported data type (see the
+``MetadataElement`` objects in the various data types classes in
+[/lib/galaxy/datatypes](https://github.com/galaxyproject/galaxy/tree/dev/lib/galaxy/datatypes).
+The ``DatasetFilenameWrapper`` class in the
+[/lib/galaxy/tools/wrappers.py](https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/tools/wrappers.py)
+code file wraps a metadata collection to return metadata parameters wrapped
+according to the Metadata spec.
+
+```xml
+<command><![CDATA[
+        #set genome = $input.metadata.dbkey
+        #set datatype = $input.datatype
+        mkdir -p output_dir &&
+        python $__tool_directory__/extract_genomic_dna.py
+        --input '$input'
+        --genome '$genome'
+        #if $input.is_of_type("gff"):
+            --input_format "gff"
+            --columns "1,4,5,7"
+            --interpret_features $interpret_features
+        #else:
+            --input_format "interval"
+            --columns "${input.metadata.chromCol},${input.metadata.startCol},${input.metadata.endCol},${input.metadata.strandCol},${input.metadata.nameCol}"
+        #end if
+        --reference_genome_source $reference_genome_cond.reference_genome_source
+        #if str($reference_genome_cond.reference_genome_source) == "cached"
+            --reference_genome $reference_genome_cond.reference_genome.fields.path
+        #else:
+            --reference_genome $reference_genome_cond.reference_genome
+        #end if
+        --output_format $output_format_cond.output_format
+        #if str($output_format_cond.output_format) == "fasta":
+            --fasta_header_type $output_format_cond.fasta_header_type_cond.fasta_header_type
+            #if str($output_format_cond.fasta_header_type_cond.fasta_header_type) == "char_delimited":
+                --fasta_header_delimiter $output_format_cond.fasta_header_type_cond.fasta_header_delimiter
+            #end if
+        #end if
+        --output '$output'
+]]]]><![CDATA[></command>
+```
+
+In additon to demonstrating accessing metadata, this example demonstrates:
+
+* ``$input.is_of_type("gff")`` which can be used to check if an input is of a
+  given datatype.
+* ``#set datatype = $input.datatype`` which is the syntax for defining variables
+  in Cheetah.
+
+### Reserved Variables
+
+Galaxy provides a few pre-defined variables which can be used in your command line,
+even though they don't appear in your tool's parameters.
+
+Name | Description
+---- | -----------
+``$__tool_directory__`` | The directory the tool description (XML file) currently resides in (new in 15.03)
+``$__new_file_path__`` | ``config/galaxy.ini``'s ``new_file_path`` value
+``$__tool_data_path__`` | ``config/galaxy.ini``'s tool_data_path value
+``$__root_dir__`` | Top-level Galaxy source directory made absolute via ``os.path.abspath()``
+``$__datatypes_config__`` | ``config/galaxy.ini``'s datatypes_config value
+``$__user_id__`` | Email's numeric ID (id column of ``galaxy_user`` table in the database)
+``$__user_email__`` | User's email address
+``$__app__`` | The ``galaxy.app.UniverseApplication`` instance, gives access to all other configuration file variables (e.g. $__app__.config.output_size_limit). Should be used as a last resort, may go away in future releases.
+
+Additional runtime properties are available as environment variables. Since these
+are not Cheetah variables (the values aren't available until runtime) these should likely
+be escaped with a backslash (``\``) when appearing in ``command`` or ``configfile`` elements.
+
+Name | Description
+---- | -----------
+``\${GALAXY_SLOTS:-4}`` | Number of cores/threads allocated by the job runner or resource manager to the tool for the given job (here 4 is the default number of threads to use if running via custom runner that does not configure GALAXY_SLOTS or in an older Galaxy runtime).
+
+See the [Planemo docs](https://planemo.readthedocs.io/en/latest/writing_advanced.html#cluster-usage)
+on the topic of ``GALAXY_SLOTS`` for more information and examples.
+
+### Attributes
+
+#### ``detect_errors``
+
+If present on the ``command`` tag, this attribute can be one of:
+
+* ``default`` no-op fallback to ``stdio`` tags and erroring on standard error output (for legacy tools).
+* ``exit_code`` error if tool exit code is not 0. (The @jmchilton recommendation).
+* ``aggressive`` error if tool exit code is not 0 or either ``Exception:`` or ``Error:``
+  appears in standard error/output. (The @bgruening recommendation).
+
+For newer tools with ``profile>=16.04``, the default behavior is ``exit_code``.
+Legacy tools default to ``default`` behavior described above (erroring if the tool
+produces any standard error output).
+
+See [PR 117](https://github.com/galaxyproject/galaxy/pull/117) for more implementation
+information and discussion on the ``detect_errors`` attribute.
+
+#### ``strict``
+
+This boolean forces the ``#set -e`` directive on in shell scripts - so that in a
+multi-part command if any part fails the job exits with a non-zero exit code.
+This is enabled by default for tools with ``profile>=16.04`` and disabled on
+legacy tools.
+
+#### ``interpreter``
+
+Older tools may define an ``intepreter`` attribute on the command, but this is
+deprecated and using the ``$__tool_directory__`` variable is superior.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="detect_errors" type="xs:string">
+          <xs:annotation>
+            <xs:documentation></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="interpreter" type="xs:string" gxdocs:deprecated="true">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">This attribute defines the programming language in which the tool's executable file is written. Any language can be used (tools can be written in Python, C, Perl, Java, etc.). The executable file must be in the same directory of the XML file. If instead this attribute is not specified, the tag content should be a Bash command calling executable(s) available in the $PATH. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="strict" type="xs:boolean">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+
+  <xs:complexType name="ParamOption">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+See [/tools/filters/sorter.xml](https://github.com/galaxyproject/galaxy/blob/master/tools/filters/sorter.xml)
+for typical examples of how to use this tag set. This directive is used to described
+static lists of options and is contained
+within the [param](#tool-inputs-param) directive when the ``type`` attribute
+value is ``select`` (i.e. ``<param type="select" ...>``).
+
+### Example
+
+```xml
+<param name="style" type="select" label="with flavor">
+    <option value="num">Numerical sort</option>
+    <option value="gennum">General numeric sort</option>
+    <option value="alpha">Alphabetical sort</option>
+</param>
+```
+
+An option can also be annotated with ``selected="true"`` to specify a
+default option.
+
+```xml
+<param name="col" type="select" label="From">
+    <option value="0" selected="true">Column 1 / Sequence name</option>
+    <option value="1">Column 2 / Source</option>
+    <option value="2">Column 3 / Feature</option>
+    <option value="6">Column 7 / Strand</option>
+    <option value="7">Column 8 / Frame</option>
+</param>
+```
+]]>
+</xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="value" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[The value of the
+corresponding variable when used the Cheetah template. Also the value that
+should be used in building test cases and used when building requests for the
+API.]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="selected" type="PermissiveBoolean" default="false">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">A boolean parameter indicating
+if the corresponding option is selected by default (the default is ``false``).
+</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+
+  <xs:complexType name="ParamOptions">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+See [/tools/extract/liftOver_wrapper.xml](https://github.com/galaxyproject/galaxy/blob/master/tools/extract/liftOver_wrapper.xml)
+for an example of how to use this tag set. This tag set is optionally contained
+within the ``<param>`` tag when the ``type`` attribute value is ``select`` or
+``data`` and used to dynamically generated lists of options. This tag set
+dynamically creates a list of options whose values can be
+obtained from a predefined file stored locally or a dataset selected from the
+current history.
+
+There are at least five basic ways to use this tag - four of these correspond to
+a ``from_XXX`` attribute on the ``options`` directive and the other is to
+exclusively use ``filter``s to populate options.
+
+* ``from_data_table`` - The options for the select list are dynamically obtained
+  from a file specified in the Galaxy configuration file
+  ``tool_data_table_conf.xml`` or from a Tool Shed installed data manager.
+* ``from_dataset`` - The options for the select list are dynamically obtained
+  from input dataset selected for the tool from the current history.
+* ``from_file`` - The options for the select list are dynamically obtained from
+  a file. This mechanis is discourage in favor of the more generic
+  ``from_data_table``.
+* ``from_parameter`` - The options for the select list are dynamically obtained
+  from a parameter.
+* Using ``filter``s - various filters can be used to populate options, see
+  examples in the [filter](#tool-inputs-param-options-filter) documentation.
+
+### ``from_data_table``
+
+See Galaxy's
+[data tables documentation](https://wiki.galaxyproject.org/Admin/Tools/Data%20Tables)
+for information on setting up data tables.
+
+Once a data table has been configured and populated, these can be easily
+leveraged via tools.
+
+This ``conditional`` block in the
+[bowtie2](https://github.com/galaxyproject/tools-devteam/blob/master/tools/bowtie2/bowtie2_wrapper.xml)
+wrapper demonstrates using ``from_data_table`` options as an
+alternative to local reference data.
+
+```xml
+<conditional name="reference_genome">
+  <param name="source" type="select" label="Will you select a reference genome from your history or use a built-in index?" help="Built-ins were indexed using default options. See `Indexes` section of help below">
+    <option value="indexed">Use a built-in genome index</option>
+    <option value="history">Use a genome from the history and build index</option>
+  </param>
+  <when value="indexed">
+    <param name="index" type="select" label="Select reference genome" help="If your genome of interest is not listed, contact the Galaxy team">
+      <options from_data_table="bowtie2_indexes">
+        <filter type="sort_by" column="2"/>
+        <validator type="no_options" message="No indexes are available for the selected input dataset"/>
+      </options>
+    </param>
+  </when>
+  <when value="history">
+    <param name="own_file" type="data" format="fasta" label="Select reference genome" />
+  </when>
+</conditional>
+```
+
+A minimal example wouldn't even need the ``filter`` or ``validator`` above, but
+they are frequently nice features to add to your wrapper and can improve the user
+experience of a tool.
+
+### ``from_dataset``
+
+The following example is taken from the Mothur tool
+[remove.lineage.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/mothur/remove.lineage.xml)
+and demonstrates generating options from a dataset directly.
+
+```xml
+<param name="taxonomy" type="data" format="mothur.seq.taxonomy" label="taxonomy - Taxonomy" help="please make sure your file has no quotation marks in it"/>
+<param name="taxons" type="select" size="120" optional="true" multiple="true" label="Browse Taxons from Taxonomy">
+    <options from_dataset="taxonomy">
+        <column name="name" index="1"/>
+        <column name="value" index="1"/>
+        <filter type="unique_value" name="unique_taxon" column="1"/>
+        <filter type="sort_by" name="sorted_taxon" column="1"/>
+    </options>
+    <sanitizer>
+        <valid initial="default">
+            <add preset="string.printable"/>
+            <add value=";"/>
+            <remove value="""/>
+            <remove value="'"/>
+        </valid>
+    </sanitizer>
+</param>
+```
+
+Filters can be used to generate options from dataset directly also as the
+example below demonstrates (many more examples are present in the
+[filter](#tool-inputs-param-options-filter) documentation).
+
+```xml
+<param name="species1" type="select" label="When Species" multiple="false">
+    <options>
+        <filter type="data_meta" ref="input1" key="species" />
+    </options>
+</param>
+```
+
+### ``from_file``
+
+The following example is for Blast databases. In this example users maybe select
+a database that is pre-formatted and cached in Galaxy clusters. When a new
+dataset is available, admins must add the database to the local file named
+"blastdb.loc". All such databases in that file are included in the options of
+the select list. For a local instance, the file (e.g. ``blastdb.loc`` or
+``alignseq.loc``) must be stored in the configured
+[tool_data_path](https://github.com/galaxyproject/galaxy/tree/master/tool-data)
+directory. In this example, the option names and values are taken from column 0
+of the file.
+
+```xml
+<param name="source_select" type="select" display="radio" label="Choose target database">
+    <options from_file="blastdb.loc">
+        <column name="name" index="0"/>
+        <column name="value" index="0"/>
+    </options>
+</param>
+```
+
+In general, ``from_file`` should be considered deprecated and  ``from_data_table``
+should be prefered.
+
+### ``from_parameter``
+
+This variant of the ``options`` directive is discouraged because it exposes
+internal Galaxy structures. See the older
+[bowtie](https://github.com/galaxyproject/tools-devteam/blob/master/tools/bowtie_wrappers/bowtie_wrapper.xml)
+wrappers for an example of these.
+
+### Other Ways to Dynamically Generate Options
+
+Though deprecated and discouraged, [code](#tool-code) blocks can also be
+used to generate dynamic options.
+
+]]>
+</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="OptionsElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="from_dataset" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for from_dataset</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="from_file" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for from_file</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="from_data_table" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for from_data_table</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="from_parameter" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for from_parameter</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="options_filter_attribute" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for options_filter_attribute</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="transform_lines" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for transform_lines</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="startswith" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Documentation for startswith</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:group name="OptionsElement">
+    <xs:choice>
+      <xs:element name="filter" type="Filter" minOccurs="0" maxOccurs="unbounded"/>
+      <xs:element name="column" type="Column" minOccurs="0" maxOccurs="unbounded"/>
+      <xs:element name="validator" type="Validator" minOccurs="0" maxOccurs="1"/>
+      <xs:element name="file" type="xs:string" minOccurs="0" maxOccurs="unbounded">
+        <xs:annotation>
+          <xs:documentation xml:lang="en">Documentation for file</xs:documentation>
+        </xs:annotation>
+      </xs:element>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="Column">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Optionally contained within an
+``<options>`` tag set - specifies columns used in building select options from a
+file stored locally (i.e. index or tool data) or a dataset in the
+current history.
+
+Any number of columns may be described, but at least one must be given the name
+``value`` and it will serve as the value of this parameter in the Cheetah
+template and elsewhwere (e.g. in API for instance).
+
+If a column named ``name`` is defined, this too has special meaning and it will
+be the value the tool form user sees for each option. If no ``name`` column
+appears, ``value`` will serve as the name.
+
+### Examples
+
+The following fragment shows options from the dataset in the current history
+that has been selected as the value of the parameter named ``input1``.
+
+```xml
+<options from_dataset="input1">
+    <column name="name" index="0"/>
+    <column name="value" index="0"/>
+</options>
+```
+
+The [interval2maf](https://github.com/galaxyproject/galaxy/blob/dev/tools/maf/interval2maf.xml)
+tool makes use of this tag with files from a history, and the
+[star_fusion](https://github.com/galaxyproject/tools-iuc/blob/master/tools/star_fusion/star_fusion.xml)
+tool makes use of this to reference a data table.
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Name given to the column with index
+``index``, the names ``name`` and ``value`` have special meaning as described
+above.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="index" type="xs:decimal" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">0-based index of the column in the
+target file.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="Validator">
+    <xs:annotation>
+
+      <xs:documentation xml:lang="en"><![CDATA[
+
+See the
+[annotation_profiler](https://github.com/galaxyproject/tools-devteam/blob/master/tools/annotation_profiler/annotation_profiler.xml)
+tool for an example of how to use this tag set. This tag set is contained within
+the <param> tag set - it applies a validator to the containing parameter.
+
+### Examples
+
+The following demonstrates a simple validator ``unspecified_build`` ensuring
+that a dbkey is present on the selected dataset. This example is taken from the
+[extract_genomic_dna](https://github.com/galaxyproject/tools-iuc/blob/master/tools/extract_genomic_dna/extract_genomic_dna.xml#L42)
+tool.
+
+```xml
+<param name="input" type="data" format="gff,interval" label="Fetch sequences for intervals in">
+    <validator type="unspecified_build" />
+</param>
+```
+
+Along the same line, the following example taken from
+[samtools_mpileup](https://github.com/galaxyproject/tools-devteam/blob/master/tool_collections/samtools/samtools_mpileup/samtools_mpileup.xml)
+ensures that a dbkey is present and that FASTA indices in the ``fasta_indexes``
+tool data table are present.
+
+```xml
+<param format="bam" label="BAM file(s)" name="input_bam" type="data" min="1" multiple="True">
+    <validator type="unspecified_build" />
+    <validator type="dataset_metadata_in_data_table" metadata_name="dbkey" table_name="fasta_indexes" metadata_column="1"
+               message="Sequences are not currently available for the specified build." />
+</param>
+```
+
+In this older, somewhat deprecated example - a genome build of the dataset must
+be stored in Galaxy clusters and the name of the genome (``dbkey``) must be one
+of the values in the first column of file ``alignseq.loc`` - that could be
+expressed with the validator. In general, ``dataset_metadata_in_file`` should be
+considered deprecated in favor of
+
+```xml
+<validator type="dataset_metadata_in_file"
+           filename="alignseq.loc"
+           metadata_name="dbkey"
+           metadata_column="1"
+           message="Sequences are not currently available for the specified build."
+           split=" "
+           line_startswith="seq" />
+```
+
+A very common validator is simply ensure a Python expression is valid for a
+specified value. In the following example - paths/names that downstream tools
+use in filenames may not contain ``..``.
+
+```xml
+<validator type="expression" message="No two dots (..) allowed">'..' not in value</validator>
+```
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="type" type="ValidatorType" use="required">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[The list of supported
+validators is in the ``validator_types`` dictionary in
+[/lib/galaxy/tools/parameters/validation.py](https://github.com/galaxyproject/galaxy/blob/dev/lib/galaxy/tools/parameters/validation.py).
+Valid values include: ``expression``, ``regex``, ``in_range``, ``length``,
+``metadata``, ``unspecified_build``, ``no_options``, ``empty_field``,
+``dataset_metadata_in_file``, ``dataset_metadata_in_data_table``,
+``dataset_ok_validator``]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="message" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">
+The error message displayed on the tool form if validation fails. </xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="check" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Comma-seperated list of metadata
+fields to check for if type is ``metadata``. If not specified, all non-optional
+metadata fields will be checked unless they appear in the list of fields
+specified by the ``skip`` attribute.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="table_name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Tool data table name to check against
+if ``type`` is ``dataset_metadata_in_tool_data``. See the documentation for
+[tool data tables](https://wiki.galaxyproject.org/Admin/Tools/Data%20Tables)
+and [data managers](https://wiki.galaxyproject.org/Admin/Tools/DataManagers) for
+more information.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="filename" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Tool data filename to check against
+if ``type`` is ``dataset_metadata_in_file``. File should be present Galaxy's
+``tool-data`` directory.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="metadata_name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Target metadata attribute name for
+``dataset_metadata_in_data_table`` and ``dataset_metadata_in_file`` options.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="metadata_column" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Target column for metadata attribute
+in ``dataset_metadata_in_data_table`` and ``dataset_metadata_in_file`` options.
+This can be an integer index to the column or a column name.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="line_startswith" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Used to indicate lines in the file
+being used for validation start with a this attribute value.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="min" type="xs:decimal">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">When the ``type`` attribute value is
+``in_range`` - this is the minimum number allowed.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="max" type="xs:decimal">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">When the ``type`` attribute value is
+``in_range`` - this is the maximum number allowed.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="exclude_min" type="xs:boolean" default="false">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">When the ``type`` attribute value is
+``in_range`` - this boolean indicates if the ``min`` value is allowed.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="exclude_max" type="xs:boolean" default="false">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">When the ``type`` attribute value is
+``in_range`` - this boolean indicates if the ``max`` value is allowed.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="split" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">If ``type`` is `dataset_metadata_in_file``,
+this attribute is the column separator to use for values in the specified file.
+This default is ``\t`` and due to a bug in older versions of Galaxy, should
+not be modified.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="skip" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Comma-seperated list of metadata
+fields to skip if type is ``metadata``. If not specified, all non-optional
+metadata fields will be checked unless ``check`` attribute is specified.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+  <xs:complexType name="Sanitizer">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+See
+[/tools/filters/grep.xml](https://github.com/galaxyproject/galaxy/blob/dev/tools/filters/grep.xml)
+for a typical example of how to use this tag set. This tag set is used to
+replace the basic parameter sanitization with custom directives. This tag set is
+contained within the ``<param>`` tag set - it contains a set of ``<valid>`` and
+``<mapping>`` tags.
+
+### Examples
+
+This example replaces the invalid character default of ``X`` with the empty
+string (so invalid characters are effectively dropped instead of replaced with
+``X``) and indicates the only valid characters for this input are ASCII letters,
+ASCII digits, and ``_``.
+
+```
+<param name="mystring" type="text" label="Say something interesting">
+    <sanitizer invalid_char="">
+        <valid initial="string.letters,string.digits"><add value="_" /> </valid>
+    </sanitizer>
+</param>
+```
+
+This example allows many more valid characters and specifies that ``&`` will just
+be dropped from the input.
+
+```
+<sanitizer>
+    <valid initial="string.printable">
+        <remove value="'"/>
+    </valid>
+    <mapping initial="none">
+        <add source="'" target=""/>
+    </mapping>
+</sanitizer>
+```
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="SanitizerElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="sanitize" type="PermissiveBoolean" default="true">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This boolean parameter determines if the
+input is sanitized at all (the default is ``true``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="invalid_char" type="xs:string" default="X">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">The attribute specifies the character
+used as a replacement for invalid characters.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:group name="SanitizerElement">
+    <xs:choice>
+      <xs:element name="valid" type="SanitizerValid"/>
+      <xs:element name="mapping" type="SanitizerMapping"/>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="SanitizerValid">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Contained within the
+``<sanitizer>`` tag set, these are used to specify a list of allowed characters.
+Contains ``<add>`` and ``<remove>`` tags.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="SanitizerValidElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="initial" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This describes the initial characters to
+allow as valid, the default is ``string.letters + string.digits + " -=_.()/+*^,:?!"``</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="SanitizerValidAdd">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">This directive is used to add individual
+characters or preset lists of characters. Character must not be allowed as a
+valid input for the mapping to occur. Preset lists include default and none as well as those available from string.* (e.g. ``string.printable``).</xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="preset" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Add target characters from the list of valid characters (e.g. ``string.printable``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Add a character to the list of valid characters.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="SanitizerValidRemove">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">This directive is used to remove
+individual characters or preset lists of characters.
+Character must not be allowed as a valid input for the mapping to occur.
+Preset lists include default and none as well as those available from string.* (e.g. ``string.printable``).</xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="preset" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Remove characters from the list of valid characters (e.g. ``string.printable``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">A character to remove from the list of valid characters.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:group name="SanitizerValidElement">
+    <xs:choice>
+      <xs:element name="add" type="SanitizerValidAdd"/>
+      <xs:element name="remove" type="SanitizerValidRemove"/>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="SanitizerMapping">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Contained within the <sanitizer> tag set. Used to specify a mapping of disallowed character to replacement string. Contains <add> and <remove> tags.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="SanitizerMappingElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="initial" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">initial character mapping (default is ``galaxy.util.mapped_chars``)</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="SanitizerMappingAdd">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Use to add character mapping during sanitization. Character must not be allowed as a valid input for the mapping to occur.]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="source" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Replace all occurrences of this character with the string of ``target``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="target" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Replace all occurrences of ``source`` with this string</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="SanitizerMappingRemove">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Use to remove character mapping during sanitization.]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="source" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Character to remove from mapping.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:group name="SanitizerMappingElement">
+    <xs:choice>
+      <xs:element name="add" type="SanitizerMappingAdd" minOccurs="0" maxOccurs="unbounded"/>
+      <xs:element name="remove" type="SanitizerMappingRemove" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="Filter">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Optionally contained within an
+``<options>`` tag set - filter out values obtained from a locally stored file (e.g.
+a tool data table) or a dataset in the current history.
+
+### Examples
+
+The following example from Mothur's
+[remove.groups.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/mothur/remove.groups.xml)
+tool demonstrates filtering a select list based on the metadata of an input to
+to the tool.
+
+```xml
+<param name="group_in" type="data" format="mothur.groups,mothur.count_table" label="group or count table - Groups"/>
+<param name="groups" type="select" label="groups - Pick groups to remove" multiple="true" optional="false">
+    <options>
+        <filter type="data_meta" ref="group_in" key="groups"/>
+    </options>
+</param>
+```
+
+This more advanced example, taken from Mothur's
+[remove.linage.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/mothur/remove.lineage.xml)
+tool demonstrates using filters to sort a list and remove duplicate entries.
+
+```xml
+<param name="taxonomy" type="data" format="mothur.cons.taxonomy" label="constaxonomy - Constaxonomy file. Provide either a constaxonomy file or a taxonomy file" help="please make sure your file has no quotation marks in it"/>
+<param name="taxons" type="select" size="120" optional="true" multiple="true" label="Browse Taxons from Taxonomy">
+    <options from_dataset="taxonomy">
+        <column name="name" index="2"/>
+        <column name="value" index="2"/>
+        <filter type="unique_value" name="unique_taxon" column="2"/>
+        <filter type="sort_by" name="sorted_taxon" column="2"/>
+    </options>
+    <sanitizer>
+        <valid initial="default">
+            <add preset="string.printable"/>
+            <add value=";"/>
+            <remove value="""/>
+            <remove value="'"/>
+        </valid>
+    </sanitizer>
+</param>
+```
+
+This example taken from the
+[hisat2](https://github.com/galaxyproject/tools-iuc/blob/master/tools/hisat2/hisat2.xml)
+tool demonstrates filtering values from a tool data table.
+
+```xml
+<param help="If your genome of interest is not listed, contact the Galaxy team" label="Select a reference genome" name="index" type="select">
+    <options from_data_table="hisat2_indexes">
+        <filter column="2" type="sort_by" />
+        <validator message="No genomes are available for the selected input dataset" type="no_options" />
+    </options>
+</param>
+```
+
+The
+[gemini_load.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/gemini/gemini_load.xml)
+tool demonstrates adding values to an option list using ``filter``s.
+
+```xml
+<param name="infile" type="data" format="vcf" label="VCF file to be loaded in the GEMINI database" help="Only build 37 (aka hg19) of the human genome is supported.">
+    <options>
+        <filter type="add_value" value="hg19" />
+        <filter type="add_value" value="Homo_sapiens_nuHg19_mtrCRS" />
+        <filter type="add_value" value="hg_g1k_v37" />
+    </options>
+</param>
+```
+
+While this fragment from maf_to_interval.xml demonstrates removing items.
+
+```xml
+<param name="species" type="select" label="Select additional species"
+       display="checkboxes" multiple="true"
+       help="The species matching the dbkey of the alignment is always included.
+       A separate history item will be created for each species.">
+    <options>
+        <filter type="data_meta" ref="input1" key="species" />
+        <filter type="remove_value" meta_ref="input1" key="dbkey" />
+    </options>
+</param>
+```
+
+This example taken from
+[snpSift_dbnsfp.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tool_collections/snpsift/snpsift_dbnsfp/snpSift_dbnsfp.xml)
+demonstrates splitting up strings into multiple values.
+
+```xml
+<param name="annotations" type="select" multiple="true" display="checkboxes" label="Annotate with">
+    <options from_data_table="snpsift_dbnsfps">
+        <column name="name" index="4"/>
+        <column name="value" index="4"/>
+        <filter type="param_value" ref="dbnsfp" column="3" />
+        <filter type="multiple_splitter" column="4" separator=","/>
+    </options>
+</param>
+```
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="type" type="FilterType" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+These values are defined in the module
+[/lib/galaxy/tools/parameters/dynamic_options.py](https://github.com/galaxyproject/galaxy/blob/master/lib/galaxy/tools/parameters/dynamic_options.py)
+in the ``filter_types`` dictionary. Currently defined values are: ``data_meta``,
+``param_value``, ``static_value``, ``unique_value``, ``multiple_splitter``,
+``attribute_value_splitter``, ``add_value``, ``remove_value``, and
+``sort_by``]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="column" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Column targeted by this filter - this
+attribute is unused and invalid if ``type`` is ``add_value`` or ``remove_value``.
+This can be a column index or a column name.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Name displayed for value to add (only
+used with ``type`` of ``add_value``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ref" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">The attribute name of the reference file
+(tool data) or input dataset. Only used when ``type`` is
+``data_meta`` (required), ``param_value`` (required), or ``remove_value``
+(optional).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="key" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">When ``type`` is ``data_meta``, ``param_value``,
+or ``remove_value`` - this is the name of the metadata key of ref to filter by.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="multiple" type="PermissiveBoolean" default="false">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[For types ``data_meta`` and
+``remove_value``, whether option values are multiple. Columns will be split by
+separator. Defaults to ``false``.]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="separator" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[When ``type`` is ``data_meta``,
+``multiple_splitter``, or ``remove_value`` - this is used to split one value
+into multiple parts. When ``type`` is ``data_meta`` or ``remove_value`` this is
+only used if ``multiple`` is set to ``true``.]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="keep" type="PermissiveBoolean" default="true">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">If ``true``, keep columns matching the
+value, if ``false`` discard columns matching the value. Used when ``type`` is
+either ``static_value`` or ``param_value``.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Target value of the operations - has
+slightly different meanings depending on ``type``. For instance when ``type`` is
+``add_value`` it is the value to add to the list and when ``type`` is
+``static_value`` it is the value compared against.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ref_attribute" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Only used when ``type`` is
+``param_value``. Period (``.``) separated attribute chain of input (``ref``)
+attributes to use as value for filter.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="index" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Used when ``type`` is ``add_value``, it
+is the index into the list to add the option to. If not set, the option will be
+added to the end of the list.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="Outputs">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+Container tag set for the ``<data>`` and ``<collection>`` tag sets.
+The files and collections created by tools as a result of their execution are
+named by Galaxy. You specify the number and type of your output files using the
+contained ``<data>`` and ``<collection>`` tags. These may be passed to your tool
+executable through using line variables just like the parameters described in
+the ``<inputs>`` documentation.
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="OutputsElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+  <xs:group name="OutputsElement">
+    <xs:choice>
+      <xs:element name="data" type="Data"/>
+      <xs:element name="collection" type="Collection" />
+    </xs:choice>
+  </xs:group>
+  <xs:group name="DataElement">
+    <xs:choice>
+      <xs:element name="change_format" type="ChangeFormat"/>
+      <xs:element name="filter" type="OutputFilter" />
+      <xs:element name="discover_datasets" type="OutputDiscoverDatasets" />
+      <xs:element name="actions" type="Actions"/>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="Data">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set is contained within the ``<outputs>`` tag set, and it defines the
+output data description for the files resulting from the tool's execution. The
+value of the attribute ``label`` can be acquired from input parameters or metadata
+in the same way that the command line parameters are (discussed in the
+<command> tag set section above).
+
+### Examples
+
+The following will create a variable called ``$out_file1`` with data type
+``pdf``.
+
+```xml
+<outputs>
+    <data format="pdf" name="out_file1" />
+</outputs>
+```
+
+The valid values for format can be found in
+[/config/datatypes_conf.xml.sample](https://github.com/galaxyproject/galaxy/blob/dev/config/datatypes_conf.xml.sample).
+
+The following will create a dataset in the history panel whose data type is the
+same as that of the input dataset selected (and named ``input1``) for the tool.
+
+```xml
+<outputs>
+    <data format_source="input1" name="out_file1" metadata_source="input1"/>
+</outputs>
+```
+
+The following will create datasets in the history panel, setting the output data
+type to be the same as that of an input dataset named by the ``format_source``
+attribute. Note that a conditional name is not included, so 2 separate
+conditional blocks should not contain parameters with the same name.
+
+```xml
+<inputs>
+    <!-- fasta may be an aligned fasta that subclasses Fasta -->
+    <param name="fasta" type="data" format="fasta" label="fasta - Sequences"/>
+    <conditional name="qual">
+        <param name="add" type="select" label="Trim based on a quality file?" help="">
+            <option value="no">no</option>
+            <option value="yes">yes</option>
+        </param>
+        <when value="no"/>
+        <when value="yes">
+            <!-- qual454, qualsolid, qualillumina -->
+            <param name="qfile" type="data" format="qual" label="qfile - a quality file"/>
+        </when>
+    </conditional>
+</inputs>
+<outputs>
+    <data format_source="fasta" name="trim_fasta"
+          label="${tool.name} on ${on_string}: trim.fasta"/>
+    <data format_source="qfile" name="trim_qual"
+          label="${tool.name} on ${on_string}: trim.qual">
+        <filter>qual['add'] == 'yes'</filter>
+    </data>
+</outputs>
+```
+
+Assume that the tool includes an input parameter named ``database`` which is a
+select list (as shown below). Also assume that the user selects the first option
+in the ``$database`` select list. Then the following will ensure that the tool
+produces a tabular data set whose associated history item has the label ``Blat
+on Human (hg18)``.
+
+```xml
+<inputs>
+    <param format="tabular" name="input" type="data" label="Input stuff"/>
+    <param type="select" name="database" label="Database">
+        <option value="hg18">Human (hg18)</option>
+        <option value="dm3">Fly (dm3)</option>
+    </param>
+</inputs>
+<outputs>
+    <data format="input" name="output" label="Blat on ${database.value_label}" />
+</outputs>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="DataElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <!-- TODO: add a unique constraint for action. -->
+    <xs:attribute name="name" type="xs:string">
+        <xs:annotation>
+          <xs:documentation xml:lang="en"><![CDATA[Name for this output. This
+``name`` is used as the Cheetah variable containing the Galaxy assigned output
+path in ``command`` and ``configfile`` elements. The name should not contain
+pipes or periods (e.g. ``.``).]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="auto_format" type="PermissiveBoolean">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+If ``true``, this output will sniffed and its format determined automatically by Galaxy.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="format" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">The short name for the output datatype.
+The valid values for format can be found in
+[/config/datatypes_conf.xml.sample](https://github.com/galaxyproject/galaxy/blob/dev/config/datatypes_conf.xml.sample)
+(e.g. ``format="pdf"`` or ``format="fastqsanger"``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="format_source" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This sets the data type of the output file to be the same format as that of a tool input dataset.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="metadata_source" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This copies the metadata information
+from the tool's input dataset. This is particularly useful for interval data
+types where the order of the columns is not set.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="label" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+This will be the name of the history item for the output data set. The string
+can include structure like ``${<some param name>.<some attribute>}``, as
+discussed for command line parameters in the ``<command>`` tag set section
+above. The default label is ``${tool.name} on ${on_string}``.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="from_work_dir" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Relative path to a file produced by the
+tool in its working directory. Output's contents are set to this file's
+contents.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="hidden" type="xs:boolean" default="false">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Boolean indicating whether to hide
+dataset in the history view. (Default is ``false``.)</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:group name="CollectionElement">
+    <xs:choice>
+      <xs:element name="data" type="Data" />
+      <xs:element name="discover_datasets" type="OutputCollectionDiscoverDatasets" />
+      <xs:element name="filter" type="OutputFilter" />
+    </xs:choice>
+  </xs:group>
+
+  <xs:complexType name="Collection">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set is contained within the ``<outputs>`` tag set, and it defines the
+output dataset collection description resulting from the tool's execution. The
+value of the attribute ``label`` can be acquired from input parameters or
+metadata in the same way that the command line parameters are (discussed in the
+[command](#tool-command) directive).
+
+Creating collections in tools is covered in-depth in
+[planemo's documentation](https://planemo.readthedocs.io/en/latest/writing_advanced.html#creating-collections).
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="CollectionElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+        <xs:annotation>
+          <xs:documentation xml:lang="en"><![CDATA[Name for this output. This
+``name`` is used as the Cheetah variable containing the Galaxy assigned output
+path in ``command`` and ``configfile`` elements. The name should not contain
+pipes or periods (e.g. ``.``).]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="type" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Collection type for output (e.g. ``paired``, ``list``, or ``list:list``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="label" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+
+This will be the name of the history item for the output data set. The string
+can include structure like ``${<some param name>.<some attribute>}``, as
+discussed for command line parameters in the ``<command>`` tag set section
+above. The default label is ``${tool.name} on ${on_string}``.
+
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="format_source" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This is the name of input collection or
+dataset to derive output dataset collection's element's format/datatype from.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="type_source" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This is the name of input collection to
+derive collection's type (e.g. ``collection_type``) from.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="structured_like" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This is the name of input collection or
+dataset to derive "structure" of the output from (output element count and
+identifiers). For instance, if the referenced input has three ordered items with
+identifiers ``sample1``, ``sample2``,  and ``sample3``</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="inherit_format" type="xs:boolean">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">If ``structured_like`` is set, inherit
+format of outputs from format of corresponding input.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="OutputFilter">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+The ``<data>`` tag can contain a ``<filter>`` tag which includes a Python code
+block to be executed to test whether to include this output in the outputs the
+tool ultimately creates. If the code, when executed, returns ``True``,
+the output dataset is retained. In these code blocks the tool parameters appear
+as Python variables and are thus referred to without the $ used for the Cheetah
+template (used in the ``<command>`` tag). Variables that are part of
+conditionals are accessed using a hash named after the conditional.
+
+### Example
+
+```xml
+    <inputs>
+      <param type="data" format="fasta" name="reference_genome" label="Reference genome" />
+      <param type="data" format="bam" name="input_bam" label="Aligned reads" />
+      <conditional name="options">
+        <param label="Use advanced options" name="selection_mode" type="select">
+          <option selected="True" value="defaults">Use default options</option>
+          <option value="advanced">Use advanced options</option>
+        </param>
+        <when value="defaults"> </when>
+        <when value="advanced">
+          <param name="vcf_output" type="boolean" checked="false" label="VCF output"
+            truevalue="--vcf" falsevalue="" />
+        </when>
+      <conditional>
+    </inputs>
+    <outputs>
+      <data format="txt" label="Alignment report on ${on_string}" name="output_txt" />
+      <data format="vcf" label="Variant summary on ${on_string}" name="output_vcf">
+          <filter>options['selection_mode'] == 'advanced' and options['vcf_output']</filter>
+      </data>
+    </outputs>
+```
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="OutputDiscoverDatasets">
+     <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+Describe datasets to dynamically collect after the job complete.
+
+There are many simple tools with examples of this element distributed with
+Galaxy, including:
+
+* https://github.com/galaxyproject/galaxy/tree/master/test/functional/tools/multi_output.xml
+* https://github.com/galaxyproject/galaxy/tree/master/test/functional/tools/multi_output_assign_primary.xml
+* https://github.com/galaxyproject/galaxy/tree/master/test/functional/tools/multi_output_configured.xml
+
+More information can be found on Planemo's documentation for
+[multiple output files](https://planemo.readthedocs.io/en/latest/writing_advanced.html#multiple-output-files).
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="pattern" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Regular expression used to find filenames and parse dynamic properties.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="directory" type="xs:string" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Directory (relative to working directory) to search for files.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="format" type="xs:string" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Format (or datatype) of discovered datasets (an alias with ``ext``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ext" type="xs:string" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Format (or datatype) of discovered datasets (an alias with ``format``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="visible" type="xs:boolean" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Indication if this dataset is visible in output history. This defaults to ``false``, but probably shouldn't - be sure to set to ``true`` if that is your intention.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="assign_primary_output" type="xs:boolean" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Replace the primary dataset described by the parameter ``data`` parameter with the first output discovered.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="OutputCollectionDiscoverDatasets">
+     <xs:annotation>
+       <xs:documentation xml:lang="en"><![CDATA[
+
+This tag allows one to describe the datasets contained within an output
+collection dynamically, such that the outputs are "discovered" based on regular
+expressions after the job is complete.
+
+There are many simple tools with examples of this element distributed with
+Galaxy, including:
+
+* https://github.com/galaxyproject/galaxy/blob/master/test/functional/tools/collection_split_on_column.xml
+* https://github.com/galaxyproject/galaxy/blob/master/test/functional/tools/collection_creates_dynamic_list_of_pairs.xml
+* https://github.com/galaxyproject/galaxy/blob/master/test/functional/tools/collection_creates_dynamic_nested.xml
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="pattern" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Regular expression used to find filenames and parse dynamic properties.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="directory" type="xs:string" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Directory (relative to working directory) to search for files.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="format" type="xs:string" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Format (or datatype) of discovered datasets (an alias with ``ext``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ext" type="xs:string" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Format (or datatype) of discovered datasets (an alias with ``format``).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="visible" type="xs:boolean" use="optional">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Indication if this dataset is visible in output history. This defaults to ``false``, but probably shouldn't - be sure to set to ``true`` if that is your intention.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="Actions">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+The ``actions`` directive allows tools to dynamically take actions related to an
+``output`` either unconditionally or conditionally based on inputs. These
+actions currently include setting metadata values and the output's data format.
+
+The examples below will demonstrate that the ``actions`` tag contains child
+``conditional`` tags. The these conditionals are met, additional ``action``
+directives below the conditional are apply to the ``data`` output.
+
+### Metadata
+
+The ``<actions>`` in the Bowtie 2 wrapper is used in lieu of the deprecated
+``<code>`` tag to set the ``dbkey`` of the output dataset. In
+[bowtie2_wrapper.xml](https://github.com/galaxyproject/tools-devteam/blob/master/tools/bowtie2/bowtie2_wrapper.xml)
+(see below), according to the first action block, if the
+```reference_genome.source`` is ``indexed`` (not ``history``), then it will assign
+the ``dbkey`` of the output file to be the same as that of the reference file. It
+does this by looking at through the data table and finding the entry that has the
+value that's been selected in the index dropdown box as column 1 of the loc file
+entry and using the dbkey, in column 0 (ignoring comment lines (starting with #)
+along the way).
+
+If ``reference_genome.source`` is ``history``, it pulls the ``dbkey`` from the
+supplied file.
+
+```xml
+<data format="bam" name="output" label="${tool.name} on ${on_string}: aligned reads (sorted BAM)">
+  <filter>analysis_type['analysis_type_selector'] == "simple" or analysis_type['sam_opt'] is False</filter>
+  <actions>
+    <conditional name="reference_genome.source">
+      <when value="indexed">
+        <action type="metadata" name="dbkey">
+          <option type="from_data_table" name="bowtie2_indexes" column="1" offset="0">
+            <filter type="param_value" column="0" value="#" compare="startswith" keep="False"/>
+            <filter type="param_value" ref="reference_genome.index" column="0"/>
+          </option>
+        </action>
+      </when>
+      <when value="history">
+        <action type="metadata" name="dbkey">
+          <option type="from_param" name="reference_genome.own_file" param_attribute="dbkey" />
+        </action>
+      </when>
+    </conditional>
+  </actions>
+</data>
+```
+
+### Format
+
+The Bowtie 2 example also demonstrates conditionally setting an output format
+based on inputs, as shown below:
+
+```xml
+<data format="fastqsanger" name="output_unaligned_reads_r" label="${tool.name} on ${on_string}: unaligned reads (R)">
+    <filter>( library['type'] == "paired" or library['type'] == "paired_collection" ) and library['unaligned_file'] is True</filter>
+    <actions>
+        <conditional name="library.type">
+            <when value="paired">
+                <action type="format">
+                    <option type="from_param" name="library.input_2" param_attribute="ext" />
+                </action>
+            </when>
+            <when value="paired_collection">
+                <action type="format">
+                    <option type="from_param" name="library.input_1" param_attribute="reverse.ext" />
+                </action>
+            </when>
+        </conditional>
+    </actions>
+</data>
+```
+
+### Unconditional Actions and Column Names
+
+For a static file that contains a fixed number of columns, it is straight forward:
+
+```xml
+<outputs>
+    <data format="tabular" name="table">
+        <actions>
+            <action name="column_names" type="metadata" default="Firstname,Lastname,Age" />
+        </actions>
+    </data>
+</outputs>
+```
+
+It may also be necessary to use column names based on a variable from another
+input file. This is implemented in the
+[htseq-count](https://github.com/galaxyproject/tools-iuc/blob/master/tools/htseq_count/htseq-count.xml)
+and
+[featureCounts](https://github.com/galaxyproject/tools-iuc/blob/master/tools/featurecounts/featurecounts.xml)
+wrappers:
+
+```xml
+<inputs>
+    <data name="input_file" type="data" multiple="false">
+</inputs>
+<outputs>
+    <data format="tabular" name="output_short">
+        <actions>
+            <action name="column_names" type="metadata" default="Geneid,${input_file.name}" />
+        </actions>
+    </data>
+</outputs>
+```
+
+Or in case of multiple files:
+
+```xml
+<inputs>
+    <data name="input_files" type="data" multiple="true">
+</inputs>
+<outputs>
+    <data format="tabular" name="output_short">
+        <actions>
+            <action name="column_names" type="metadata" default="Geneid,${','.join([ a.name for a in $input_files ])}" />
+        </actions>
+    </data>
+</outputs>
+```
+
+### Unconditional Actions - An Older Example
+
+The first approach above to setting ``dbkey`` based on tool data tables is
+prefered, but an older example using so called "loc files" directly is found
+below.
+
+In addition to demonstrating this lower-level direct access of .loc files, it
+demonstrates an unconditional action. The second block would not be needed for
+most cases - it was required in this tool to handle the specific case of a small
+reference file used for functional testing. It says that if the dbkey has been
+set to ``equCab2chrM`` (which is what the ```<filter type="metadata_value"...
+column="1" />`` tag does), then it should be changed to ``equCab2`` (which is the
+``<option type="from_param" ... column="0" ...>`` tag does).
+
+```xml
+<actions>
+   <conditional name="refGenomeSource.genomeSource">
+      <when value="indexed">
+           <action type="metadata" name="dbkey">
+            <option type="from_file" name="bowtie_indices.loc" column="0" offset="0">
+               <filter type="param_value" column="0" value="#" compare="startswith" keep="False"/>
+               <filter type="param_value" ref="refGenomeSource.index" column="1"/>
+            </option>
+         </action>
+       </when>
+    </conditional>
+    <!-- Special casing equCab2chrM to equCab2 -->
+    <action type="metadata" name="dbkey">
+        <option type="from_param" name="refGenomeSource.genomeSource" column="0" offset="0">
+            <filter type="insert_column" column="0" value="equCab2chrM"/>
+            <filter type="insert_column" column="0" value="equCab2"/>
+            <filter type="metadata_value" ref="output" name="dbkey" column="1" />
+        </option>
+    </action>
+</actions>
+```
+]]>
+</xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="ActionsElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+  <xs:group name="ActionsElement">
+    <xs:choice>
+      <xs:element name="action" type="Action"/>
+      <xs:element name="conditional" type="ActionsConditional"/>
+    </xs:choice>
+  </xs:group>
+  <xs:complexType name="Action">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This directive is contained within an output ``data``'s  ``actions`` directive
+(either directly or beneath a parent ``conditional`` tag). This directive
+describes modifications to either the output's format or metadata (based on
+whether ``type`` is ``format`` or ``metadata``).
+
+See [actions](#tool-outputs-data-actions) documentation for examples
+of this directive.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="option" type="ActionsOption" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="type" type="ActionType" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Type of action (either ``format`` or
+``metadata`` currently).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">If ``type="metadata"``, the name of the
+metadata element.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="default" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">If ``type="format"``, the default format
+if none of the nested options apply.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="ActionsOption">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="filter" type="ActionsConditionalFilter" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="type" type="ActionsOptionType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="column" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="offset" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="param_attribute" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="ActionsConditional">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This directive is contained within an output ``data``'s  ``actions`` directive.
+This directive describes the state of the inputs required to apply an ``action``
+(specified as children of the child ``when`` directives to this element) to an
+output.
+
+See [actions](#tool-outputs-data-actions) documentation for examples
+of this directive.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+        <xs:element name="when" type="ActionsConditionalWhen" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Name of the input parameter to base
+conditional logic on. The value of this parameter will be matched against nested
+``when`` directives.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="ActionsConditionalWhen">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+See [actions](#tool-outputs-data-actions) documentation for examples
+of this directive.
+
+      ]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="action" type="Action" minOccurs="1" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">Value to match conditional input value
+against.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="ActionsConditionalFilter">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="type" type="ActionsConditionalFilterType" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="compare" type="CompareType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="ref" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="column" type="xs:integer">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="keep" type="PermissiveBoolean">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="cast" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="param_attribute" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="separator" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="strip" type="PermissiveBoolean">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="old_column" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="old_value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="new_column" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="new_value" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+  <xs:complexType name="EnvironmentVariables">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">
+This directive should contain one or more ``environment_variable`` definition.
+      </xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="environment_variable" type="EnvironmentVariable" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="EnvironmentVariable">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This directive defines an environment variable that will be available when the
+tool executes. The body should be a Cheetah template block that may reference
+the tool's inputs as demonstrated below.
+
+### Example
+
+The following demonstrates a couple ``environment_variable`` definitions.
+
+```xml
+<environment_variables>
+    <environment_variable name="INTVAR">$inttest</environment_variable>
+    <environment_variable name="IFTEST">#if int($inttest) == 3
+ISTHREE
+#else#
+NOTTHREE
+#end if#</environment_variable>
+    </environment_variables>
+</environment_variables>
+```
+
+If these environment variables are used in another Cheetah context, such as in
+the ``command`` block, the ``$`` used indicate shell expansion of a variable
+should be escaped with a ``\`` so prevent it from being evaluated as a Cheetah
+variable instead of shell variable.
+
+```xml
+<command>
+    echo "\$INTVAR"  >  $out_file1;
+    echo "\$IFTEST"  >> $out_file1;
+</command>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Name of the environment variable to
+define.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="ConfigFiles">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[See
+[xy_plot.xml](https://github.com/galaxyproject/tools-devteam/blob/master/tools/xy_plot/xy_plot.xml)
+for an example of how this tag set is used in a tool. This tag set is a
+container for ``<configfile>`` and ``<inputs>`` tag sets - which can be used
+to setup configuration files for use by tools.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="ConfigFilesElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:group name="ConfigFilesElement">
+    <xs:choice>
+      <xs:element name="inputs" type="ConfigInputs"/>
+      <xs:element name="configfile" type="ConfigFile"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:complexType name="ConfigFile">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set is contained within the ``<configfiles>`` tag set. It allows for
+the creation of a temporary file for file-based parameter transfer.
+
+*Example*
+
+The following is taken from the [xy_plot.xml](https://github.com/galaxyproject/tools-devteam/blob/master/tools/xy_plot/xy_plot.xml)
+tool config.
+
+```xml
+<configfiles>
+    <configfile name="script_file">
+      ## Setup R error handling to go to stderr
+      options( show.error.messages=F, error = function () { cat( geterrmessage(), file=stderr() ); q( "no", 1, F ) } )
+      ## Determine range of all series in the plot
+      xrange = c( NULL, NULL )
+      yrange = c( NULL, NULL )
+      #for $i, $s in enumerate( $series )
+          s${i} = read.table( "${s.input.file_name}" )
+          x${i} = s${i}[,${s.xcol}]
+          y${i} = s${i}[,${s.ycol}]
+          xrange = range( x${i}, xrange )
+          yrange = range( y${i}, yrange )
+      #end for
+      ## Open output PDF file
+      pdf( "${out_file1}" )
+      ## Dummy plot for axis / labels
+      plot( NULL, type="n", xlim=xrange, ylim=yrange, main="${main}", xlab="${xlab}", ylab="${ylab}" )
+      ## Plot each series
+      #for $i, $s in enumerate( $series )
+          #if $s.series_type['type'] == "line"
+              lines( x${i}, y${i}, lty=${s.series_type.lty}, lwd=${s.series_type.lwd}, col=${s.series_type.col} )
+          #elif $s.series_type.type == "points"
+              points( x${i}, y${i}, pch=${s.series_type.pch}, cex=${s.series_type.cex}, col=${s.series_type.col} )
+          #end if
+      #end for
+      ## Close the PDF file
+      devname = dev.off()
+    </configfile>
+</configfiles>
+```
+
+This file is then used in the ``command`` block of the tool as follows:
+
+```xml
+<command>bash "$__tool_directory__/r_wrapper.sh" "$script_file"</command>
+```
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Cheetah variable used to reference
+the path to the file created with this directive.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="ConfigInputs">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+This tag set is contained within the <configfiles> tag set. It tells Galaxy to
+write out a JSON representation of the tool parameters .
+
+*Example*
+
+The following will create a cheetah variable that can be evaluated as ``$inputs`` that
+will contain the tool parameter inputs.
+
+```xml
+<configfiles>
+    <inputs name="inputs" />
+<configfiles>
+```
+
+The following will instead write the inputs to the tool's working directory with
+the specified name (i.e. ``inputs.json``).
+
+```xml
+<configfiles>
+    <inputs name="inputs" filename="inputs.json" />
+<configfiles>
+```
+
+A contrived example of a tool that uses this is the test tool
+[inputs_as_json.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/inputs_as_json.xml).
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="name" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[
+Cheetah variable to populate the path to the inputs JSON file created in
+response to this directive.
+]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+        <xs:attribute name="filename" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Path relative to the working directory of the tool for the inputs JSON file created in response to this directive.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="VersionCommand">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Specifies the command to be run in
+order to get the tool's version string. The resulting value will be found in the
+"Info" field of the history dataset.
+
+Unlike the [command](#tool-command) tag, with the exception of the string
+``$__tool_directory__`` this value is taken as a literal and so there is no
+need to escape values like ``$`` and command inputs are not available for variable
+substitution.
+
+### Examples
+
+A simple example for a [TopHat](https://ccb.jhu.edu/software/tophat/index.shtml)
+tool definition might just be:
+
+```xml
+<version_command>tophat -version</version_command>
+```
+
+An example that leverages a Python script (e.g. ``count_reads.py``) shipped with
+the tool might be:
+
+```xml
+<version_command>python $__tool_directory__/count_reads.py</version_command>
+```
+
+Examples are included in the test tools directory including:
+
+- [version_command_plain.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/version_command_plain.xml)
+- [version_command_tool_dir.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/version_command_tool_dir.xml)
+- [version_command_interpreter.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/version_command_interpreter.xml) (*deprecated*)
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="interpreter" type="xs:string">
+          <xs:annotation>
+            <xs:documentation xml:lang="en"><![CDATA[*Deprecated*: Prefix the version command with an interpreter and the tool directory in order to execute a script shipped with the tool. It is better to just use ``<interpreter> $__tool_directory__/<script_name>``.]]></xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="RequestParameterTranslation">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+        See ~/tools/data_source/ucsc_tablebrowser.xml for an example of how to use this tag set. This tag set is used only in "data_source" tools (the "tool_type" attribute value is "data_source"). This tag set is contained within the <param> tag set - it contains a set of <request_param> tags.
+      ]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="request_param" minOccurs="0" maxOccurs="unbounded" type="RequestParameter"/>
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="RequestParameter">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Contained within the <request_param_translation> tag set ( used only in "data_source" tools ) - the external data source application may send back parameter names like "GENOME" which must be translated to "dbkey" in Galaxy.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="RequestParameterElement" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+    <xs:attribute name="galaxy_name" type="RequestParameterGalaxyNameType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">
+          Each of these maps directly to a remote_name value
+        </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="remote_name" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">
+          The string representing the name of the parameter in the remote data source
+        </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="missing" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">
+          The default value to use for galaxy_name if the remote_name parameter is not included in the request
+        </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:simpleType name="RequestParameterGalaxyNameType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"></xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="URL" />
+      <xs:enumeration value="dbkey" />
+      <xs:enumeration value="organism" />
+      <xs:enumeration value="table" />
+      <xs:enumeration value="description" />
+      <xs:enumeration value="name" />
+      <xs:enumeration value="info" />
+      <xs:enumeration value="data_type" />
+    </xs:restriction>
+  </xs:simpleType>
+
+  <xs:group name="RequestParameterElement">
+    <xs:choice>
+      <xs:element name="append_param" type="RequestParameterAppend" />
+      <xs:element name="value_translation" type="RequestParameterValueTranslation" />
+    </xs:choice>
+  </xs:group>
+
+  <xs:complexType name="RequestParameterAppend">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Optionally contained within the <request_param> tag set if galaxy_name="URL" - some remote data sources ( e.g., Gbrowse, Biomart ) send parameters back to Galaxy in the initial response that must be added to the value of "URL" prior to Galaxy sending the secondary request to the remote data source via URL.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="value" minOccurs="0" maxOccurs="unbounded" type="RequestParameterAppendValue"/>
+    </xs:sequence>
+    <xs:attribute name="separator" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+The text to use to join the requested parameters together (example ``separator="&"``).
+        ]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="first_separator" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+The text to use to join the request_param parameters to the first requested parameter (example ``first_separator="?"``).
+        ]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="join" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+The text to use to join the param name to its value (example ``join="="``).
+        ]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="RequestParameterAppendValue">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Contained within the <append_param> tag set - allows for appending a param name / value pair to the value of URL.
+
+Example:
+
+```xml
+<request_param_translation>
+    <request_param galaxy_name="URL" remote_name="URL" missing="">
+        <append_param separator="&" first_separator="?" join="=">
+            <value name="_export" missing="1" />
+        </append_param>
+    </request_param>
+</request_param_tranlsation>
+```
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="name" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+Any valid HTTP request parameter name. The name / value pair must be received from the remote data source and will be appended to the value of URL as something like "&_export=1" (e.g. ``name="_export"``).
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="missing" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[Must be a valid HTTP request parameter value (e.g. ``missing="1"``).]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="RequestParameterValueTranslation">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Optionally contained within the <request_param> tag set the parameter value received from a remote data source may be named differently in Galaxy, and this tag set allows for the value to be appropriately translated.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="value" minOccurs="0" maxOccurs="unbounded" type="RequestParameterValueTranslationValue"/>
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="RequestParameterValueTranslationValue">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Contained within the <value_translation> tag set - allows for changing the data type value to something supported by Galaxy.
+
+Example:
+
+```xml
+<request_param_translation>
+    <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="bed" >
+        <value_translation>
+            <value galaxy_value="tabular" remote_value="primaryTable" />
+        </value_translation>
+    </request_param>
+</request_param_tranlsation>
+```
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="galaxy_value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[
+The target value (e.g. for setting data format: the list of supported data formats is contained in the
+[/config/datatypes_conf.xml.sample](https://github.com/galaxyproject/galaxy/blob/dev/config/datatypes_conf.xml.sample).
+]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="remote_value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"><![CDATA[The value supplied by the remote data source application]]></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="Stdio">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Tools write the bulk of useful data to datasets, but they can also write messages to standard I/O (stdio) channels known as standard output (stdout) and standard error (stderr). Both stdout and stderr are typically written to the executing program's console or terminal. Previous versions of Galaxy checked stderr for execution errors - if any text showed up on stderr, then the tool's execution was marked as failed. However, many tools write m [...]
+
+Legacy tools (ones with ``profile`` unspecified or a ``profile`` of less than
+16.04) will default to checking stderr for errors as described above. Newer
+tools will instead treat an exit code other than 0 as an error. The
+``detect_error`` on ``command`` can swap between these behaviors but the
+``stdio`` directive allows more options in defining error conditions (though
+these aren't always intuitive).
+
+With ``stdio`` directive, Galaxy can use regular expressions to scan stdout and
+stderr, and it also allows exit codes to be scanned for ranges. The ``<stdio>``
+tag has two subtags, ``<regex>`` and ``<exit_code>``, to define regular
+expressions and exit code processing, respectively. They are defined below. If a
+tool does not have any valid ``<regex>`` or ``<exit_code>`` tags, then Galaxy
+will use the previous technique for finding errors.
+
+A note should be made on the order in which exit codes and regular expressions
+are applied and how the processing stops. Exit code rules are applied before
+regular expression rules. The rationale is that exit codes are more clearly
+defined and are easier to check computationally, so they are applied first. Exit
+code rules are applied in the order in which they appear in the tool's
+configuration file, and regular expressions are also applied in the order in
+which they appear in the tool's configuration file. However, once a rule is
+triggered that causes a fatal error, no further rules are
+checked.]]></xs:documentation>
+
+    </xs:annotation>
+    <xs:sequence>
+      <xs:group ref="StdioElement" minOccurs="0" maxOccurs="unbounded" />
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:group name="StdioElement">
+    <xs:choice>
+      <xs:element name="regex" type="Regex"/>
+      <xs:element name="exit_code" type="ExitCode"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:complexType name="ExitCode">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+Tools may use exit codes to indicate specific execution errors. Many programs use 0 to indicate success and non-zero exit codes to indicate errors. Galaxy allows each tool to specify exit codes that indicate errors. Each <exit_code> tag defines a range of exit codes, and each range can be associated with a description of the error (e.g., "Out of Memory", "Invalid Sequence File") and an error level. The description just describes the condition and can be anything. The error level is eithe [...]
+
+The exit code's range can be any consecutive group of integers. More advanced ranges, such as noncontiguous ranges, are currently not supported. Ranges can be specified in the form "m:n", where m is the start integer and n is the end integer. If ":n" is specified, then the exit code will be compared against all integers less than or equal to n. If "m:" is used, then the exit code will be compared against all integers greater than or equal to m. If the exit code matches, then the error le [...]
+
+Note that most Unix and Linux variants only support positive integers 0 to 255 for exit codes. If an exit code falls out of the range 0 to 255, the usual convention is to only use the lower 8 bits for the exit code. The only known exception is if a job is broken into subtasks using the tasks runner and one of those tasks is stopped with a POSIX signal. (Note that signals should be used as a last resort for terminating processes.) In those cases, the task will receive -1 times the signal  [...]
+
+The <exit_code> tag's supported attributes are as follows:
+
+* ``range``: This indicates the range of exit codes to check. The range can be one of the following:
+  * ``n``: the exit code will only be compared to n;
+  * ``[m:n]``: the exit code must be greater than or equal to m and less than or equal to n;
+  * ``[m:]``: the exit code must be greater than or equal to m;
+  * ``[:n]``: the exit code must be less than or equal to n.
+* ``level``: This indicates the error level of the exit code. The level can have one of two values:
+  * ``warning``: If an exit code falls in the given range, then a description of the error will be added to the beginning of stderr. A warning-level error will not cause the tool to fail.
+  * ``fatal``: If an exit code falls in the given range, then a description of the error will be added to the beginning of stderr. A fatal-level error will cause the tool to fail. If no level is specified, then the fatal error level will be assumed to have occurred.
+* ``description``: This is an optional description of the error that corresponds to the exit code.
+
+The following is an example of the <exit_code> tag:
+
+```xml
+<stdio>
+    <exit_code range="2"   level="fatal"   description="Out of Memory" />
+    <exit_code range="3:5" level="warning" description="Low disk space" />
+    <exit_code range="6:"  level="fatal"   description="Bad input dataset" />
+</stdio>
+```
+
+If the tool returns 0 or 1, then the tool will not be marked as having an error.
+If the exit code is 2, then the tool will fail with the description ``Out of
+Memory`` added to stderr. If the tool returns 3, 4, or 5, then the tool will not
+be marked as having failed, but ``Low disk space`` will be added to stderr.
+Finally, if the tool returns any number greater than or equal to 6, then the
+description ``Bad input dataset`` will be added to stderr and the tool will be
+marked as having failed.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="range" type="RangeType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="level" type="LevelType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="description" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en"></xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+
+  <xs:complexType name="Regex">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+A regular expression defines a pattern of characters. The patterns include the following:
+
+* ``GCTA``, which matches on the fixed string "GCTA";
+* ``[abcd]``, which matches on the characters a, b, c, or d;
+* ``[CG]{12}``, which matches on 12 consecutive characters that are C or G;
+* ``a.*z``, which matches on the character "a", followed by 0 or more characters of any type, followed by a "z";
+* ``^X``, which matches the letter X at the beginning of a string;
+* ``Y$``, which matches the letter Y at the end of a string.
+
+There are many more possible regular expressions. A reference to all supported
+regular expressions can be found under
+[Python Regular Expression Syntax](https://docs.python.org/3/library/re.html#regular-expression-syntax).
+
+A regular expression includes the following attributes:
+
+* ``source``: This tells whether the regular expression should be matched against stdout, stderr, or both. If this attribute is missing or is incorrect, then both stdout and stderr will be checked. The source can be one of the follwing values:
+  * ``stdout``: the regular expression will be applied to stdout;
+  * ``stderr``: the regular expression will be applied to stderr;
+  * ``both``: the regular expression will be applied to both stderr and stdout (which is the default case).
+* ``match``: This is the regular expression that will be used to match against stdout and/or stderr. If the <regex> tag does not contain the match attribute, then the <regex> tag will be ignored. The regular expression can be any valid Python regular expression. All regular expressions are performed case insensitively. For example, if match contains the regular expression "actg", then the regular expression will match against "actg", "ACTG", "AcTg", and so on. Also note that, if double q [...]
+* ``level``: This works very similarly to the <exit_code> tag, except that, when a regular expression matches against its source, the description is added to the beginning of the source. For example, if stdout matches on a regular expression, then the regular expression's description is added to the beginning of stdout (instead of stderr). The level can be log, warning or fatal as described below.
+ * ``log`` and ``warning``: If the regular expression matches against its source input (i.e., stdout and/or stderr), then a description of the error will be added to the beginning of the source, prepended with either 'Log:' or 'Warning:'. A log-level/warning-level error will not cause the tool to fail.
+ * ``fatal``: If the regular expression matches against its source input, then a description of the error will be added to the beginning of the source. A fatal-level error will cause the tool to fail. If no level is specified, then the fatal error level will be assumed to have occurred.
+* ``description``: Just like its ``exit_code`` counterpart, this is an optional description of the regular expression that has matched.
+
+The following is an example of regular expressions that may be used:
+
+```xml
+<stdio>
+    <regex match="low space"
+           source="both"
+           level="warning"
+           description="Low space on device" />
+    <regex match="error"
+           source="stdout"
+           level="fatal"
+           description="Unknown error encountered" />
+    <regex match="[CG]{12}"
+           description="Fatal error - CG island 12 nts long found" />
+    <regex match="^Branch A"
+           level="warning"
+           description="Branch A was taken in execution" />
+</stdio>
+```
+
+The regular expression matching proceeds as follows. First, if either stdout or
+stderr match on ``low space``, then a warning is registered. If stdout contained
+the string ``---LOW SPACE---``, then stdout has the string ``Warning: Low space
+on device`` added to its beginning. The same goes for if stderr had contained the
+string ``low space``. Since only a warning could have occurred, the processing
+continues.
+
+Next, the regular expression ``error`` is matched only against stdout. If stdout
+contains the string ``error`` regardless of its capitalization, then a fatal
+error has occurred and the processing stops. In that case, stdout would be
+prepended with the string ``Fatal: Unknown error encountered``. Note that, if
+stderr contained ``error``, ``ERROR``, or ``ErRor`` then it would not matter -
+stderr was not being scanned.
+
+If the second regular expression did not match, then the third regular
+expression is checked. The third regular expression does not contain an error
+level, so an error level of ``fatal`` is assumed. The third regular expression
+also does not contain a source, so both stdout and stderr are checked. The third
+regular expression looks for 12 consecutive "C"s or "G"s in any order and in
+uppercase or lowercase. If stdout contained ``cgccGGCCcGGcG`` or stderr
+contained ``CCCCCCgggGGG``, then the regular expression would match, the tool
+would be marked with a fatal error, and the stream that contained the
+12-nucleotide CG island would be prepended with ``Fatal: Fatal error - CG island
+12 nts long found``.
+
+Finally, if the tool did not match any of the fatal errors, then the fourth
+regular expression is checked. Since no source is specified, both stdout and
+stderr are checked. If ``Branch A`` is at the beginning of stdout or stderr, then
+a warning will be registered and the source that contained ``Branch A`` will be
+prepended with the warning ``Warning: Branch A was taken in execution``.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:attribute name="source" type="SourceType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This tells whether the regular expression should be matched against stdout, stderr, or both. If this attribute is missing or is incorrect, then both stdout and stderr will be checked. The source can be one of the follwing values: </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="match" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This is the regular expression that will be used to match against stdout and/or stderr. </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="level" type="LevelType">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This works very similarly to the 'exit_code' tag, except that, when a regular expression matches against its source, the description is added to the beginning of the source.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="description" type="xs:string">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">an optional description of the regular expression that has matched.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="ChangeFormat">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[See
+[extract_genomic_dna.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/extract_genomic_dna/extract_genomic_dna.xml)
+or the test tool
+[output_action_change_format.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/output_action_change_format.xml)
+for simple examples of how this tag set is used in a tool. This tag set is
+optionally contained within the ``<data>`` tag set and is the container tag set
+for the following ``<when>`` tag set.]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="when" type="ChangeFormatWhen" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+  <xs:complexType name="ChangeFormatWhen">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[
+
+If the data type of the output dataset is the specified type, the data type is
+changed to the desired type.
+
+### Examples
+
+Assume that your tool config includes the following select list parameter
+structure:
+
+```xml
+<param name="out_format" type="select" label="Output data type">
+    <option value="fasta">FASTA</option>
+    <option value="interval">Interval</option>
+</param>
+```
+
+Then whenever the user selects the ``interval`` option from the select list, the
+following structure in your tool config will override the ``format="fasta"`` setting
+in the ``<data>`` tag set with ``format="interval"``.
+
+```xml
+<outputs>
+    <data format="fasta" name="out_file1">
+        <change_format>
+            <when input="out_format" value="interval" format="interval" />
+        </change_format>
+    </data>
+</outputs>
+```
+
+See
+[extract_genomic_dna.xml](https://github.com/galaxyproject/tools-iuc/blob/master/tools/extract_genomic_dna/extract_genomic_dna.xml)
+or the test tool
+[output_action_change_format.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/output_action_change_format.xml)
+for more examples.
+
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence/>
+    <xs:attribute name="input" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must be the attribute name of
+the desired input parameter (e.g. ``input="out_format"`` above).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="value" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must also be an attribute
+name of an input parameter (e.g. ``value="interval"`` above).</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute name="format" type="xs:string" use="required">
+      <xs:annotation>
+        <xs:documentation xml:lang="en">This value must be a supported data type
+(e.g. ``format="interval"``). See
+[/config/datatypes_conf.xml.sample](https://github.com/galaxyproject/galaxy/blob/dev/config/datatypes_conf.xml.sample)
+for a list of supported formats.</xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+  </xs:complexType>
+
+  <xs:complexType name="Citations">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"><![CDATA[Tool files may declare one
+citations element. Each citations element can contain one or more citation tag
+elements - each of which specifies tool citation information using either a DOI
+or a BibTeX entry.
+
+These citations will appear at the bottom of the tool form in a formatted way
+but the user will have to option to select RAW BibTeX for copying and pasting as
+well. Likewise, the history menu includes an option allowing users to aggregate
+all such citations across an analysis in a list of citations.
+
+BibTeX entries for citations annotated with DOIs will be fetched by Galaxy from
+https://dx.doi.org/ and cached.
+
+```xml
+<citations>
+   <!-- Example of annotating a citation using a DOI. -->
+   <citation type="doi">10.1093/bioinformatics/btq281</citation>
+
+   <!-- Example of annotating a citation using a BibTex entry. -->
+   <citation type="bibtex">@ARTICLE{Kim07aninterior-point,
+   author = {Seung-jean Kim and Kwangmoo Koh and Michael Lustig and Stephen Boyd and Dimitry Gorinevsky},
+   title = {An interior-point method for large-scale l1-regularized logistic regression},
+   journal = {Journal of Machine Learning Research},
+   year = {2007},
+   volume = {8},
+   pages = {1519-1555}
+   }</citation>
+ </citations>
+```
+
+For more implementation information see the
+[pull request](https://bitbucket.org/galaxy/galaxy-central/pull-requests/440/initial-bibtex-doi-citation-support-in/diff)
+adding this feature. For more examples of how to add this to tools checkout the
+following commits adding this to the
+[NCBI BLAST+ suite](https://github.com/peterjc/galaxy_blast/commit/9d2e3906915895765ecc3f48421b91fabf2ccd8b),
+[phenotype association tools](https://bitbucket.org/galaxy/galaxy-central/commits/39c983151fe328ff5d415f6da81ce5b21a7e18a4),
+[MAF suite](https://bitbucket.org/galaxy/galaxy-central/commits/60f63d6d4cb7b73286f3c747e8acaa475e4b6fa8),
+and [MACS2 suite](https://github.com/jmchilton/galaxytools/commit/184971dea73e236f11e82b77adb5cab615b8391b).
+
+This feature was added to the August 2014 release of Galaxy, tools annotated
+with citations will work in older releases of Galaxy but no citation information
+will be available to the end user.
+]]></xs:documentation>
+    </xs:annotation>
+    <xs:sequence>
+      <xs:element name="citation" type="Citation" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:complexType>
+
+  <xs:complexType name="Citation">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Each citations element can contain one or
+more ``citation`` tag elements - each of which specifies tool citation
+information using either a DOI or a BibTeX entry.</xs:documentation>
+    </xs:annotation>
+    <xs:simpleContent>
+      <xs:extension base="xs:string">
+        <xs:attribute name="type" type="CitationType" use="required">
+          <xs:annotation>
+            <xs:documentation xml:lang="en">Type of citation - currently ``doi``
+and ``bibtex`` are the only supported options.</xs:documentation>
+          </xs:annotation>
+        </xs:attribute>
+      </xs:extension>
+    </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:simpleType name="CitationType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Type of citation represented.</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="bibtex"/>
+      <xs:enumeration value="doi"/>
+    </xs:restriction>
+  </xs:simpleType>
+
+  <xs:simpleType name="RequirementType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for RequirementType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="python-module"/>
+      <xs:enumeration value="binary"/>
+      <xs:enumeration value="package"/>
+      <xs:enumeration value="set_environment"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="ContainerType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Type of container for tool execution.</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="docker"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="ToolTypeType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for ToolTypeType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="data_source"/>
+      <xs:enumeration value="manage_data"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="URLmethodType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for URLmethodType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="get"/>
+      <xs:enumeration value="post"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="TargetType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for TargetType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="_top"/>
+      <xs:enumeration value="_parent"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="MethodType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for MethodType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="basic"/>
+      <xs:enumeration value="multi"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="DisplayType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for DisplayType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="checkboxes"/>
+      <xs:enumeration value="radio"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="HierarchyType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for HierarchyType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="exact"/>
+      <xs:enumeration value="recurse"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="ValidatorType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for ValidatorType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="empty_dataset"/>
+      <xs:enumeration value="expression"/>
+      <xs:enumeration value="regex"/>
+      <xs:enumeration value="in_range"/>
+      <xs:enumeration value="length"/>
+      <xs:enumeration value="metadata"/>
+      <xs:enumeration value="unspecified_build"/>
+      <xs:enumeration value="no_options"/>
+      <xs:enumeration value="empty_field"/>
+      <xs:enumeration value="dataset_metadata_in_file"/>
+      <xs:enumeration value="dataset_metadata_in_data_table"/>
+      <xs:enumeration value="dataset_ok_validator"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="FilterType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"></xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="data_meta"/>
+      <xs:enumeration value="param_value"/>
+      <xs:enumeration value="static_value"/>
+      <xs:enumeration value="unique_value"/>
+      <xs:enumeration value="multiple_splitter"/>
+      <xs:enumeration value="add_value"/>
+      <xs:enumeration value="sort_by"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="ActionsConditionalFilterType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en"></xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="param_value"/>
+      <xs:enumeration value="insert_column"/>
+      <xs:enumeration value="column_strip"/>
+      <xs:enumeration value="multiple_splitter"/>
+      <xs:enumeration value="column_replace"/>
+      <xs:enumeration value="metadata_value"/>
+      <xs:enumeration value="boolean"/>
+      <xs:enumeration value="string_function"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="ActionType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for ActionType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="format"/>
+      <xs:enumeration value="metadata"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="ActionsOptionType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for ActionsOptionType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="from_data_table"/>
+      <xs:enumeration value="from_param"/>
+      <xs:enumeration value="from_file"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="CompareType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for CompareType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="startswith"/>
+      <xs:enumeration value="re_search"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="LevelType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for LevelType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="fatal"/>
+      <xs:enumeration value="warning"/>
+      <xs:enumeration value="log"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="RangeType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for RangeType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:pattern value="\-?(\d)*:?\-?(\d)*"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="SourceType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for SourceType</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="stdout"/>
+      <xs:enumeration value="stderr"/>
+      <xs:enumeration value="both"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="TestOutputCompareType">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Type of comparison to use when comparing
+test generated output files to expected output files. Currently valid value are
+``diff`` (the default), ``re_match``, ``sim_size``, ``re_match_multiline``,
+and ``contains``.</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="diff"/>
+      <xs:enumeration value="re_match"/>
+      <xs:enumeration value="sim_size"/>
+      <xs:enumeration value="re_match_multiline"/>
+      <xs:enumeration value="contains"/>
+    </xs:restriction>
+  </xs:simpleType>
+  <xs:simpleType name="PermissiveBoolean">
+    <xs:annotation>
+      <xs:documentation xml:lang="en">Documentation for PermissiveBoolean</xs:documentation>
+    </xs:annotation>
+    <xs:restriction base="xs:string">
+      <xs:enumeration value="0"/>
+      <xs:enumeration value="1"/>
+      <xs:enumeration value="true"/>
+      <xs:enumeration value="false"/>
+      <xs:enumeration value="True"/>
+      <xs:enumeration value="False"/>
+      <xs:enumeration value="yes"/>
+      <xs:enumeration value="no"/>
+    </xs:restriction>
+  </xs:simpleType>
+</xs:schema>
diff --git a/lib/galaxy/tools/zip_collection.xml b/lib/galaxy/tools/zip_collection.xml
new file mode 100644
index 0000000..2eb6be3
--- /dev/null
+++ b/lib/galaxy/tools/zip_collection.xml
@@ -0,0 +1,26 @@
+<tool id="__ZIP_COLLECTION__"
+      name="Zip Collection"
+      version="1.0.0"
+      tool_type="zip_collection">
+  <type class="ZipCollectionTool" module="galaxy.tools" />
+  <action module="galaxy.tools.actions.model_operations"
+          class="ModelOperationToolAction"/>
+  <inputs>
+    <param type="data" name="input_forward" label="Input Dataset (Forward)" />
+    <param type="data" name="input_reverse" label="Input Dataset (Reverse)" />
+  </inputs>
+  <outputs>
+    <collection name="output" type="paired" label="${on_string} (zipped)">
+      <data name="forward" />
+      <data name="reverse" />
+    </collection>
+  </outputs>
+  <help>
+    This tool takes two datasets and creates a dataset pair from
+    them. Mapping over two lists, this tool can be used to build
+    a list of dataset pairs from two individual lists of datasets.
+    
+    This tool will create new history datasets for your collection
+    but your quota usage will not increase.
+  </help>
+</tool>
diff --git a/lib/galaxy/tours/__init__.py b/lib/galaxy/tours/__init__.py
new file mode 100644
index 0000000..c497f2d
--- /dev/null
+++ b/lib/galaxy/tours/__init__.py
@@ -0,0 +1,80 @@
+"""
+This module manages loading/etc of Galaxy interactive tours.
+"""
+
+import os
+import yaml
+import logging
+
+from galaxy import util
+
+log = logging.getLogger( __name__ )
+
+
+def tour_loader(contents_dict):
+    #  Some of this can be done on the clientside.  Maybe even should?
+    title_default = contents_dict.get('title_default', None)
+    for step in contents_dict['steps']:
+        if 'intro' in step:
+            step['content'] = step.pop('intro')
+        if 'position' in step:
+            step['placement'] = step.pop('position')
+        if 'element' not in step:
+            step['orphan'] = True
+        if title_default and 'title' not in step:
+            step['title'] = title_default
+    return contents_dict
+
+
+class ToursRegistry(object):
+
+    def __init__(self, tour_directories):
+        self.tour_directories = util.config_directories_from_setting( tour_directories )
+        self.load_tours()
+
+    def tours_by_id_with_description(self):
+        return [{'id': k,
+                 'description': self.tours[k].get('description', None),
+                 'name': self.tours[k].get('name', None)}
+                for k in self.tours.keys()]
+
+    def load_tour(self, tour_id):
+        for tour_dir in self.tour_directories:
+            tour_path = os.path.join(tour_dir, tour_id + ".yaml")
+            if not os.path.exists(tour_path):
+                tour_path = os.path.join(tour_dir, tour_id + ".yml")
+            if os.path.exists(tour_path):
+                break
+        if os.path.exists(tour_path):
+            return self._load_tour_from_path(tour_path)
+        else:
+            return None
+
+    def load_tours(self):
+        self.tours = {}
+        for tour_dir in self.tour_directories:
+            for filename in os.listdir(tour_dir):
+                if filename.endswith('.yaml') or filename.endswith('.yml'):
+                    self._load_tour_from_path(os.path.join(tour_dir, filename))
+        return self.tours_by_id_with_description()
+
+    def tour_contents(self, tour_id):
+        # Extra format translation could happen here (like the previous intro_to_tour)
+        # For now just return the loaded contents.
+        return self.tours.get(tour_id, None)
+
+    def _load_tour_from_path(self, tour_path):
+        filename = os.path.basename(tour_path)
+        tour_id = os.path.splitext(filename)[0]
+        try:
+            with open(tour_path) as handle:
+                conf = yaml.load(handle)
+                tour = tour_loader(conf)
+                self.tours[tour_id] = tour_loader(conf)
+                log.info("Loaded tour '%s'" % tour_id)
+                return tour
+        except IOError:
+            log.exception("Tour '%s' could not be loaded, error reading file." % tour_id)
+        except yaml.error.YAMLError:
+            log.exception("Tour '%s' could not be loaded, error within file.  Please check your yaml syntax." % tour_id)
+        return None
diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py
new file mode 100644
index 0000000..90af2b7
--- /dev/null
+++ b/lib/galaxy/util/__init__.py
@@ -0,0 +1,1533 @@
+# -*- coding: utf-8 -*-
+"""
+Utility functions used systemwide.
+
+"""
+from __future__ import absolute_import
+
+import binascii
+import collections
+import errno
+import json
+import logging
+import os
+import random
+import re
+import shutil
+import smtplib
+import stat
+import string
+import sys
+import tempfile
+import threading
+import time
+try:
+    import grp
+except ImportError:
+    # For Pulsar on Windows (which does not use the function that uses grp)
+    grp = None
+
+from datetime import datetime
+from hashlib import md5
+from os.path import normpath, relpath
+from xml.etree import ElementInclude, ElementTree
+from xml.etree.ElementTree import ParseError
+
+from six import binary_type, iteritems, string_types, text_type
+from six.moves import email_mime_multipart, email_mime_text, xrange, zip
+from six.moves.urllib import (
+    parse as urlparse,
+    request as urlrequest
+)
+from six.moves.urllib.request import urlopen
+
+try:
+    import docutils.core as docutils_core
+    import docutils.writers.html4css1 as docutils_html4css1
+except ImportError:
+    docutils_core = None
+    docutils_html4css1 = None
+
+from .inflection import English, Inflector
+
+inflector = Inflector(English)
+
+log = logging.getLogger(__name__)
+_lock = threading.RLock()
+
+CHUNK_SIZE = 65536  # 64k
+
+DATABASE_MAX_STRING_SIZE = 32768
+DATABASE_MAX_STRING_SIZE_PRETTY = '32K'
+
+gzip_magic = '\037\213'
+bz2_magic = 'BZh'
+DEFAULT_ENCODING = os.environ.get('GALAXY_DEFAULT_ENCODING', 'utf-8')
+NULL_CHAR = '\000'
+BINARY_CHARS = [ NULL_CHAR ]
+FILENAME_VALID_CHARS = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+
+
+def remove_protocol_from_url( url ):
+    """ Supplied URL may be null, if not ensure http:// or https://
+    etc... is stripped off.
+    """
+    if url is None:
+        return url
+
+    # We have a URL
+    if url.find( '://' ) > 0:
+        new_url = url.split( '://' )[1]
+    else:
+        new_url = url
+    return new_url.rstrip( '/' )
+
+
+def is_binary( value, binary_chars=None ):
+    """
+    File is binary if it contains a null-byte by default (e.g. behavior of grep, etc.).
+    This may fail for utf-16 files, but so would ASCII encoding.
+    >>> is_binary( string.printable )
+    False
+    >>> is_binary( '\\xce\\x94' )
+    False
+    >>> is_binary( '\\000' )
+    True
+    """
+    if binary_chars is None:
+        binary_chars = BINARY_CHARS
+    for binary_char in binary_chars:
+        if binary_char in value:
+            return True
+    return False
+
+
+def is_uuid( value ):
+    """
+    This method returns True if value is a UUID, otherwise False.
+    >>> is_uuid( "123e4567-e89b-12d3-a456-426655440000" )
+    True
+    >>> is_uuid( "0x3242340298902834" )
+    False
+    """
+    uuid_re = re.compile( "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" )
+    if re.match( uuid_re, str( value ) ):
+        return True
+    else:
+        return False
+
+
+def directory_hash_id( id ):
+    """
+
+    >>> directory_hash_id( 100 )
+    ['000']
+    >>> directory_hash_id( "90000" )
+    ['090']
+    >>> directory_hash_id("777777777")
+    ['000', '777', '777']
+    >>> directory_hash_id("135ee48a-4f51-470c-ae2f-ce8bd78799e6")
+    ['1', '3', '5']
+    """
+    s = str( id )
+    l = len( s )
+    # Shortcut -- ids 0-999 go under ../000/
+    if l < 4:
+        return [ "000" ]
+    if not is_uuid(s):
+        # Pad with zeros until a multiple of three
+        padded = ( ( 3 - len( s ) % 3 ) * "0" ) + s
+        # Drop the last three digits -- 1000 files per directory
+        padded = padded[:-3]
+        # Break into chunks of three
+        return [ padded[ i * 3:(i + 1 ) * 3 ] for i in range( len( padded ) // 3 ) ]
+    else:
+        # assume it is a UUID
+        return list(iter(s[0:3]))
+
+
+def get_charset_from_http_headers( headers, default=None ):
+    rval = headers.get('content-type', None )
+    if rval and 'charset=' in rval:
+        rval = rval.split('charset=')[-1].split(';')[0].strip()
+        if rval:
+            return rval
+    return default
+
+
+def synchronized(func):
+    """This wrapper will serialize access to 'func' to a single thread. Use it as a decorator."""
+    def caller(*params, **kparams):
+        _lock.acquire(True)  # Wait
+        try:
+            return func(*params, **kparams)
+        finally:
+            _lock.release()
+    return caller
+
+
+def file_iter(fname, sep=None):
+    """
+    This generator iterates over a file and yields its lines
+    splitted via the C{sep} parameter. Skips empty lines and lines starting with
+    the C{#} character.
+
+    >>> lines = [ line for line in file_iter(__file__) ]
+    >>> len(lines) !=  0
+    True
+    """
+    for line in open(fname):
+        if line and line[0] != '#':
+            yield line.split(sep)
+
+
+def file_reader( fp, chunk_size=CHUNK_SIZE ):
+    """This generator yields the open fileobject in chunks (default 64k). Closes the file at the end"""
+    while 1:
+        data = fp.read(chunk_size)
+        if not data:
+            break
+        yield data
+    fp.close()
+
+
+def unique_id(KEY_SIZE=128):
+    """
+    Generates an unique id
+
+    >>> ids = [ unique_id() for i in range(1000) ]
+    >>> len(set(ids))
+    1000
+    """
+    random_bits = text_type(random.getrandbits(KEY_SIZE)).encode("UTF-8")
+    return md5(random_bits).hexdigest()
+
+
+def parse_xml( fname ):
+    """Returns a parsed xml tree"""
+    # handle deprecation warning for XMLParsing a file with DOCTYPE
+    class DoctypeSafeCallbackTarget( ElementTree.TreeBuilder ):
+        def doctype( *args ):
+            pass
+    tree = ElementTree.ElementTree()
+    try:
+        root = tree.parse( fname, parser=ElementTree.XMLParser( target=DoctypeSafeCallbackTarget() ) )
+    except ParseError:
+        log.exception("Error parsing file %s", fname)
+        raise
+    ElementInclude.include( root )
+    return tree
+
+
+def parse_xml_string(xml_string):
+    tree = ElementTree.fromstring(xml_string)
+    return tree
+
+
+def xml_to_string( elem, pretty=False ):
+    """Returns a string from an xml tree"""
+    if pretty:
+        elem = pretty_print_xml( elem )
+    try:
+        return ElementTree.tostring( elem )
+    except TypeError as e:
+        # we assume this is a comment
+        if hasattr( elem, 'text' ):
+            return "<!-- %s -->\n" % ( elem.text )
+        else:
+            raise e
+
+
+def xml_element_compare( elem1, elem2 ):
+    if not isinstance( elem1, dict ):
+        elem1 = xml_element_to_dict( elem1 )
+    if not isinstance( elem2, dict ):
+        elem2 = xml_element_to_dict( elem2 )
+    return elem1 == elem2
+
+
+def xml_element_list_compare( elem_list1, elem_list2 ):
+    return [ xml_element_to_dict( elem ) for elem in elem_list1  ] == [ xml_element_to_dict( elem ) for elem in elem_list2  ]
+
+
+def xml_element_to_dict( elem ):
+    rval = {}
+    if elem.attrib:
+        rval[ elem.tag ] = {}
+    else:
+        rval[ elem.tag ] = None
+
+    sub_elems = list( elem )
+    if sub_elems:
+        sub_elem_dict = dict()
+        for sub_sub_elem_dict in map( xml_element_to_dict, sub_elems ):
+            for key, value in iteritems(sub_sub_elem_dict):
+                if key not in sub_elem_dict:
+                    sub_elem_dict[ key ] = []
+                sub_elem_dict[ key ].append( value )
+        for key, value in iteritems(sub_elem_dict):
+            if len( value ) == 1:
+                rval[ elem.tag ][ key ] = value[0]
+            else:
+                rval[ elem.tag ][ key ] = value
+    if elem.attrib:
+        for key, value in iteritems(elem.attrib):
+            rval[ elem.tag ][ "@%s" % key ] = value
+
+    if elem.text:
+        text = elem.text.strip()
+        if text and sub_elems or elem.attrib:
+            rval[ elem.tag ][ '#text' ] = text
+        else:
+            rval[ elem.tag ] = text
+
+    return rval
+
+
+def pretty_print_xml( elem, level=0 ):
+    pad = '    '
+    i = "\n" + level * pad
+    if len( elem ):
+        if not elem.text or not elem.text.strip():
+            elem.text = i + pad + pad
+        if not elem.tail or not elem.tail.strip():
+            elem.tail = i
+        for e in elem:
+            pretty_print_xml( e, level + 1 )
+        if not elem.tail or not elem.tail.strip():
+            elem.tail = i
+    else:
+        if level and ( not elem.tail or not elem.tail.strip() ):
+            elem.tail = i + pad
+    return elem
+
+
+def get_file_size( value, default=None ):
+    try:
+        # try built-in
+        return os.path.getsize( value )
+    except:
+        try:
+            # try built-in one name attribute
+            return os.path.getsize( value.name )
+        except:
+            try:
+                # try tell() of end of object
+                offset = value.tell()
+                value.seek( 0, 2 )
+                rval = value.tell()
+                value.seek( offset )
+                return rval
+            except:
+                # return default value
+                return default
+
+
+def shrink_stream_by_size( value, size, join_by="..", left_larger=True, beginning_on_size_error=False, end_on_size_error=False ):
+    rval = ''
+    if get_file_size( value ) > size:
+        start = value.tell()
+        len_join_by = len( join_by )
+        min_size = len_join_by + 2
+        if size < min_size:
+            if beginning_on_size_error:
+                rval = value.read( size )
+                value.seek( start )
+                return rval
+            elif end_on_size_error:
+                value.seek( -size, 2 )
+                rval = value.read( size )
+                value.seek( start )
+                return rval
+            raise ValueError( 'With the provided join_by value (%s), the minimum size value is %i.' % ( join_by, min_size ) )
+        left_index = right_index = int( ( size - len_join_by ) / 2 )
+        if left_index + right_index + len_join_by < size:
+            if left_larger:
+                left_index += 1
+            else:
+                right_index += 1
+        rval = value.read( left_index ) + join_by
+        value.seek( -right_index, 2 )
+        rval += value.read( right_index )
+    else:
+        while True:
+            data = value.read( CHUNK_SIZE )
+            if not data:
+                break
+            rval += data
+    return rval
+
+
+def shrink_string_by_size( value, size, join_by="..", left_larger=True, beginning_on_size_error=False, end_on_size_error=False ):
+    if len( value ) > size:
+        len_join_by = len( join_by )
+        min_size = len_join_by + 2
+        if size < min_size:
+            if beginning_on_size_error:
+                return value[:size]
+            elif end_on_size_error:
+                return value[-size:]
+            raise ValueError( 'With the provided join_by value (%s), the minimum size value is %i.' % ( join_by, min_size ) )
+        left_index = right_index = int( ( size - len_join_by ) / 2 )
+        if left_index + right_index + len_join_by < size:
+            if left_larger:
+                left_index += 1
+            else:
+                right_index += 1
+        value = "%s%s%s" % ( value[:left_index], join_by, value[-right_index:] )
+    return value
+
+
+def pretty_print_time_interval( time=False, precise=False ):
+    """
+    Get a datetime object or a int() Epoch timestamp and return a
+    pretty string like 'an hour ago', 'Yesterday', '3 months ago',
+    'just now', etc
+    credit: http://stackoverflow.com/questions/1551382/user-friendly-time-format-in-python
+    """
+    now = datetime.now()
+    if type( time ) is int:
+        diff = now - datetime.fromtimestamp( time )
+    elif isinstance( time, datetime ):
+        diff = now - time
+    elif isinstance( time, string_types ):
+        try:
+            time = datetime.strptime( time, "%Y-%m-%dT%H:%M:%S.%f" )
+        except ValueError:
+            # MySQL may not support microseconds precision
+            time = datetime.strptime( time, "%Y-%m-%dT%H:%M:%S" )
+        diff = now - time
+    else:
+        diff = now - now
+    second_diff = diff.seconds
+    day_diff = diff.days
+
+    if day_diff < 0:
+        return ''
+
+    if precise:
+        if day_diff == 0:
+                if second_diff < 10:
+                    return "just now"
+                if second_diff < 60:
+                    return str(second_diff) + " seconds ago"
+                if second_diff < 120:
+                    return "a minute ago"
+                if second_diff < 3600:
+                    return str(second_diff / 60) + " minutes ago"
+                if second_diff < 7200:
+                    return "an hour ago"
+                if second_diff < 86400:
+                    return str(second_diff / 3600) + " hours ago"
+        if day_diff == 1:
+            return "yesterday"
+        if day_diff < 7:
+            return str( day_diff ) + " days ago"
+        if day_diff < 31:
+            return str( day_diff / 7 ) + " weeks ago"
+        if day_diff < 365:
+            return str( day_diff / 30 ) + " months ago"
+        return str( day_diff / 365 ) + " years ago"
+    else:
+        if day_diff == 0:
+            return "today"
+        if day_diff == 1:
+            return "yesterday"
+        if day_diff < 7:
+            return "less than a week"
+        if day_diff < 31:
+            return "less than a month"
+        if day_diff < 365:
+            return "less than a year"
+        return "a few years ago"
+
+
+def pretty_print_json(json_data, is_json_string=False):
+    if is_json_string:
+        json_data = json.loads(json_data)
+    return json.dumps(json_data, sort_keys=True, indent=4)
+
+
+# characters that are valid
+valid_chars = set(string.ascii_letters + string.digits + " -=_.()/+*^,:?!")
+
+# characters that are allowed but need to be escaped
+mapped_chars = { '>': '__gt__',
+                 '<': '__lt__',
+                 "'": '__sq__',
+                 '"': '__dq__',
+                 '[': '__ob__',
+                 ']': '__cb__',
+                 '{': '__oc__',
+                 '}': '__cc__',
+                 '@': '__at__',
+                 '\n': '__cn__',
+                 '\r': '__cr__',
+                 '\t': '__tc__',
+                 '#': '__pd__'}
+
+
+def restore_text( text, character_map=mapped_chars ):
+    """Restores sanitized text"""
+    if not text:
+        return text
+    for key, value in character_map.items():
+        text = text.replace(value, key)
+    return text
+
+
+def sanitize_text( text, valid_characters=valid_chars, character_map=mapped_chars, invalid_character='X' ):
+    """
+    Restricts the characters that are allowed in text; accepts both strings
+    and lists of strings; non-string entities will be cast to strings.
+    """
+    if isinstance( text, list ):
+        return [ sanitize_text( x, valid_characters=valid_characters, character_map=character_map, invalid_character=invalid_character ) for x in text ]
+    if not isinstance( text, string_types ):
+        text = smart_str( text )
+    return _sanitize_text_helper( text, valid_characters=valid_characters, character_map=character_map )
+
+
+def _sanitize_text_helper( text, valid_characters=valid_chars, character_map=mapped_chars, invalid_character='X' ):
+    """Restricts the characters that are allowed in a string"""
+
+    out = []
+    for c in text:
+        if c in valid_characters:
+            out.append(c)
+        elif c in character_map:
+            out.append( character_map[c] )
+        else:
+            out.append( invalid_character )  # makes debugging easier
+    return ''.join(out)
+
+
+def sanitize_lists_to_string( values, valid_characters=valid_chars, character_map=mapped_chars, invalid_character='X'  ):
+    if isinstance( values, list ):
+        rval = []
+        for value in values:
+            rval.append( sanitize_lists_to_string( value,
+                                                   valid_characters=valid_characters,
+                                                   character_map=character_map,
+                                                   invalid_character=invalid_character ) )
+        values = ",".join( rval )
+    else:
+        values = sanitize_text( values, valid_characters=valid_characters, character_map=character_map, invalid_character=invalid_character )
+    return values
+
+
+def sanitize_param( value, valid_characters=valid_chars, character_map=mapped_chars, invalid_character='X' ):
+    """Clean incoming parameters (strings or lists)"""
+    if isinstance( value, string_types ):
+        return sanitize_text( value, valid_characters=valid_characters, character_map=character_map, invalid_character=invalid_character )
+    elif isinstance( value, list ):
+        return [ sanitize_text( x, valid_characters=valid_characters, character_map=character_map, invalid_character=invalid_character ) for x in value ]
+    else:
+        raise Exception('Unknown parameter type (%s)' % ( type( value ) ))
+
+
+valid_filename_chars = set( string.ascii_letters + string.digits + '_.' )
+invalid_filenames = [ '', '.', '..' ]
+
+
+def sanitize_for_filename( text, default=None ):
+    """
+    Restricts the characters that are allowed in a filename portion; Returns default value or a unique id string if result is not a valid name.
+    Method is overly aggressive to minimize possible complications, but a maximum length is not considered.
+    """
+    out = []
+    for c in text:
+        if c in valid_filename_chars:
+            out.append( c )
+        else:
+            out.append( '_' )
+    out = ''.join( out )
+    if out in invalid_filenames:
+        if default is None:
+            return sanitize_for_filename( str( unique_id() ) )
+        return default
+    return out
+
+
+def mask_password_from_url( url ):
+    """
+    Masks out passwords from connection urls like the database connection in galaxy.ini
+
+    >>> mask_password_from_url( 'sqlite+postgresql://user:password@localhost/' )
+    'sqlite+postgresql://user:********@localhost/'
+    >>> mask_password_from_url( 'amqp://user:amqp@localhost' )
+    'amqp://user:********@localhost'
+    >>> mask_password_from_url( 'amqp://localhost')
+    'amqp://localhost'
+    """
+    split = urlparse.urlsplit(url)
+    if split.password:
+        if url.count(split.password) == 1:
+            url = url.replace(split.password, "********")
+        else:
+            # This can manipulate the input other than just masking password,
+            # so the previous string replace method is preferred when the
+            # password doesn't appear twice in the url
+            split = split._replace(netloc=split.netloc.replace("%s:%s" % (split.username, split.password), '%s:********' % split.username))
+            url = urlparse.urlunsplit(split)
+    return url
+
+
+def ready_name_for_url( raw_name ):
+    u""" General method to convert a string (i.e. object name) to a URL-ready
+    slug.
+
+    >>> ready_name_for_url( "My Cool Object" )
+    'My-Cool-Object'
+    >>> ready_name_for_url( "!My Cool Object!" )
+    'My-Cool-Object'
+    >>> ready_name_for_url( "Hello₩◎ґʟⅾ" )
+    'Hello'
+    """
+
+    # Replace whitespace with '-'
+    slug_base = re.sub( "\s+", "-", raw_name )
+    # Remove all non-alphanumeric characters.
+    slug_base = re.sub( "[^a-zA-Z0-9\-]", "", slug_base )
+    # Remove trailing '-'.
+    if slug_base.endswith('-'):
+        slug_base = slug_base[:-1]
+    return slug_base
+
+
+def which(file):
+    # http://stackoverflow.com/questions/5226958/which-equivalent-function-in-python
+    for path in os.environ["PATH"].split(":"):
+        if os.path.exists(path + "/" + file):
+                return path + "/" + file
+
+    return None
+
+
+def safe_makedirs(path):
+    """ Safely make a directory, do not fail if it already exist or
+    is created during execution.
+    """
+    if not os.path.exists(path):
+        try:
+            os.makedirs(path)
+        except OSError as e:
+            # review source for Python 2.7 this would only ever happen
+            # for the last path anyway so need to recurse - this exception
+            # means the last part of the path was already in existence.
+            if e.errno != errno.EEXIST:
+                raise
+
+
+def in_directory( file, directory, local_path_module=os.path ):
+    """
+    Return true, if the common prefix of both is equal to directory
+    e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
+    """
+
+    # Make both absolute.
+    directory = local_path_module.abspath(directory)
+    file = local_path_module.abspath(file)
+    return local_path_module.commonprefix([file, directory]) == directory
+
+
+def merge_sorted_iterables( operator, *iterables ):
+    """
+
+    >>> operator = lambda x: x
+    >>> list( merge_sorted_iterables( operator, [1,2,3], [4,5] ) )
+    [1, 2, 3, 4, 5]
+    >>> list( merge_sorted_iterables( operator, [4, 5], [1,2,3] ) )
+    [1, 2, 3, 4, 5]
+    >>> list( merge_sorted_iterables( operator, [1, 4, 5], [2], [3] ) )
+    [1, 2, 3, 4, 5]
+    """
+    first_iterable = iterables[ 0 ]
+    if len( iterables ) == 1:
+        for el in first_iterable:
+            yield el
+    else:
+        for el in __merge_two_sorted_iterables(
+            operator,
+            iter( first_iterable ),
+            merge_sorted_iterables( operator, *iterables[ 1: ] )
+        ):
+            yield el
+
+
+def __merge_two_sorted_iterables( operator, iterable1, iterable2 ):
+    unset = object()
+    continue_merge = True
+    next_1 = unset
+    next_2 = unset
+    while continue_merge:
+        try:
+            if next_1 is unset:
+                next_1 = next( iterable1 )
+            if next_2 is unset:
+                next_2 = next( iterable2 )
+            if operator( next_2 ) < operator( next_1 ):
+                yield next_2
+                next_2 = unset
+            else:
+                yield next_1
+                next_1 = unset
+        except StopIteration:
+            continue_merge = False
+    if next_1 is not unset:
+        yield next_1
+    if next_2 is not unset:
+        yield next_2
+    for el in iterable1:
+        yield el
+    for el in iterable2:
+        yield el
+
+
+class Params( object ):
+    """
+    Stores and 'sanitizes' parameters. Alphanumeric characters and the
+    non-alphanumeric ones that are deemed safe are let to pass through (see L{valid_chars}).
+    Some non-safe characters are escaped to safe forms for example C{>} becomes C{__lt__}
+    (see L{mapped_chars}). All other characters are replaced with C{X}.
+
+    Operates on string or list values only (HTTP parameters).
+
+    >>> values = { 'status':'on', 'symbols':[  'alpha', '<>', '$rm&#!' ]  }
+    >>> par = Params(values)
+    >>> par.status
+    'on'
+    >>> par.value == None      # missing attributes return None
+    True
+    >>> par.get('price', 0)
+    0
+    >>> par.symbols            # replaces unknown symbols with X
+    ['alpha', '__lt____gt__', 'XrmX__pd__!']
+    >>> sorted(par.flatten())  # flattening to a list
+    [('status', 'on'), ('symbols', 'XrmX__pd__!'), ('symbols', '__lt____gt__'), ('symbols', 'alpha')]
+    """
+
+    # is NEVER_SANITIZE required now that sanitizing for tool parameters can be controlled on a per parameter basis and occurs via InputValueWrappers?
+    NEVER_SANITIZE = ['file_data', 'url_paste', 'URL', 'filesystem_paths']
+
+    def __init__( self, params, sanitize=True ):
+        if sanitize:
+            for key, value in params.items():
+                # sanitize check both ungrouped and grouped parameters by
+                # name. Anything relying on NEVER_SANITIZE should be
+                # changed to not require this and NEVER_SANITIZE should be
+                # removed.
+                if (value is not None and
+                    key not in self.NEVER_SANITIZE and
+                    True not in [ key.endswith( "|%s" % nonsanitize_parameter ) for
+                                  nonsanitize_parameter in self.NEVER_SANITIZE ]):
+                        self.__dict__[ key ] = sanitize_param( value )
+                else:
+                    self.__dict__[ key ] = value
+        else:
+            self.__dict__.update(params)
+
+    def flatten(self):
+        """
+        Creates a tuple list from a dict with a tuple/value pair for every value that is a list
+        """
+        flat = []
+        for key, value in self.__dict__.items():
+            if isinstance(value, list):
+                for v in value:
+                    flat.append( (key, v) )
+            else:
+                flat.append( (key, value) )
+        return flat
+
+    def __getattr__(self, name):
+        """This is here to ensure that we get None for non existing parameters"""
+        return None
+
+    def get(self, key, default):
+        return self.__dict__.get(key, default)
+
+    def __str__(self):
+        return '%s' % self.__dict__
+
+    def __len__(self):
+        return len(self.__dict__)
+
+    def __iter__(self):
+        return iter(self.__dict__)
+
+    def update(self, values):
+        self.__dict__.update(values)
+
+
+def rst_to_html( s ):
+    """Convert a blob of reStructuredText to HTML"""
+    log = logging.getLogger( "docutils" )
+
+    if docutils_core is None:
+        raise Exception("Attempted to use rst_to_html but docutils unavailable.")
+
+    class FakeStream( object ):
+        def write( self, str ):
+            if len( str ) > 0 and not str.isspace():
+                log.warning( str )
+
+    settings_overrides = {
+        "embed_stylesheet": False,
+        "template": os.path.join(os.path.dirname(__file__), "docutils_template.txt"),
+        "warning_stream": FakeStream(),
+        "doctitle_xform": False,  # without option, very different rendering depending on
+                                  # number of sections in help content.
+    }
+
+    return unicodify( docutils_core.publish_string( s,
+                      writer=docutils_html4css1.Writer(),
+                      settings_overrides=settings_overrides ) )
+
+
+def xml_text(root, name=None):
+    """Returns the text inside an element"""
+    if name is not None:
+        # Try attribute first
+        val = root.get(name)
+        if val:
+            return val
+        # Then try as element
+        elem = root.find(name)
+    else:
+        elem = root
+    if elem is not None and elem.text:
+        text = ''.join(elem.text.splitlines())
+        return text.strip()
+    # No luck, return empty string
+    return ''
+
+
+# asbool implementation pulled from PasteDeploy
+truthy = frozenset(['true', 'yes', 'on', 'y', 't', '1'])
+falsy = frozenset(['false', 'no', 'off', 'n', 'f', '0'])
+
+
+def asbool(obj):
+    if isinstance(obj, string_types):
+        obj = obj.strip().lower()
+        if obj in truthy:
+            return True
+        elif obj in falsy:
+            return False
+        else:
+            raise ValueError("String is not true/false: %r" % obj)
+    return bool(obj)
+
+
+def string_as_bool( string ):
+    if str( string ).lower() in ( 'true', 'yes', 'on', '1' ):
+        return True
+    else:
+        return False
+
+
+def string_as_bool_or_none( string ):
+    """
+    Returns True, None or False based on the argument:
+        True if passed True, 'True', 'Yes', or 'On'
+        None if passed None or 'None'
+        False otherwise
+
+    Note: string comparison is case-insensitive so lowecase versions of those
+    function equivalently.
+    """
+    string = str( string ).lower()
+    if string in ( 'true', 'yes', 'on' ):
+        return True
+    elif string == 'none':
+        return None
+    else:
+        return False
+
+
+def listify( item, do_strip=False ):
+    """
+    Make a single item a single item list, or return a list if passed a
+    list.  Passing a None returns an empty list.
+    """
+    if not item:
+        return []
+    elif isinstance( item, list ):
+        return item
+    elif isinstance( item, string_types ) and item.count( ',' ):
+        if do_strip:
+            return [token.strip() for token in item.split( ',' )]
+        else:
+            return item.split( ',' )
+    else:
+        return [ item ]
+
+
+def commaify(amount):
+    orig = amount
+    new = re.sub("^(-?\d+)(\d{3})", '\g<1>,\g<2>', amount)
+    if orig == new:
+        return new
+    else:
+        return commaify(new)
+
+
+def roundify(amount, sfs=2):
+    """
+    Take a number in string form and truncate to 'sfs' significant figures.
+    """
+    if len(amount) <= sfs:
+        return amount
+    else:
+        return amount[0:sfs] + '0' * (len(amount) - sfs)
+
+
+def unicodify(value, encoding=DEFAULT_ENCODING, error='replace', default=None):
+    """
+    Returns a unicode string or None.
+    """
+    if value is None:
+        return None
+    try:
+        if not isinstance(value, string_types) and not isinstance(value, binary_type):
+            # In Python 2, value is not an instance of basestring
+            # In Python 3, value is not an instance of bytes or str
+            value = str(value)
+        # Now in Python 2, value is an instance of basestring, but may be not unicode
+        # Now in Python 3, value is an instance of bytes or str
+        if not isinstance(value, text_type):
+            value = text_type(value, encoding, error)
+    except Exception:
+        log.exception("value %s could not be coerced to unicode" % value)
+        return default
+    return value
+
+
+def smart_str(s, encoding=DEFAULT_ENCODING, strings_only=False, errors='strict'):
+    u"""
+    Returns a bytestring version of 's', encoded as specified in 'encoding'.
+
+    If strings_only is True, don't convert (some) non-string-like objects.
+
+    Adapted from an older, simpler version of django.utils.encoding.smart_str.
+
+    >>> assert smart_str(None) == b'None'
+    >>> assert smart_str(None, strings_only=True) is None
+    >>> assert smart_str(3) == b'3'
+    >>> assert smart_str(3, strings_only=True) == 3
+    >>> assert smart_str(b'a bytes string') == b'a bytes string'
+    >>> assert smart_str(u'a simple unicode string') == b'a simple unicode string'
+    >>> assert smart_str(u'à strange ünicode ڃtring') == b'\\xc3\\xa0 strange \\xc3\\xbcnicode \\xda\\x83tring'
+    >>> assert smart_str(b'\\xc3\\xa0n \\xc3\\xabncoded utf-8 string', encoding='latin-1') == b'\\xe0n \\xebncoded utf-8 string'
+    """
+    if strings_only and isinstance(s, (type(None), int)):
+        return s
+    if not isinstance(s, string_types) and not isinstance(s, binary_type):
+        # In Python 2, s is not an instance of basestring
+        # In Python 3, s is not an instance of bytes or str
+        s = str(s)
+    if not isinstance(s, binary_type):
+        return s.encode(encoding, errors)
+    elif s and encoding != DEFAULT_ENCODING:
+        return s.decode(DEFAULT_ENCODING, errors).encode(encoding, errors)
+    else:
+        return s
+
+
+def object_to_string( obj ):
+    return binascii.hexlify( obj )
+
+
+def string_to_object( s ):
+    return binascii.unhexlify( s )
+
+
+class ParamsWithSpecs( collections.defaultdict ):
+    """
+    """
+
+    def __init__( self, specs=None, params=None ):
+        self.specs = specs or dict()
+        self.params = params or dict()
+        for name, value in self.params.items():
+            if name not in self.specs:
+                self._param_unknown_error( name )
+            if 'map' in self.specs[ name ]:
+                try:
+                    self.params[ name ] = self.specs[ name ][ 'map' ]( value )
+                except Exception:
+                    self._param_map_error( name, value )
+            if 'valid' in self.specs[ name ]:
+                if not self.specs[ name ][ 'valid' ]( value ):
+                    self._param_vaildation_error( name, value )
+
+        self.update( self.params )
+
+    def __missing__( self, name ):
+        return self.specs[ name ][ 'default' ]
+
+    def __getattr__( self, name ):
+        return self[ name ]
+
+    def _param_unknown_error( self, name ):
+        raise NotImplementedError()
+
+    def _param_map_error( self, name, value ):
+        raise NotImplementedError()
+
+    def _param_vaildation_error( self, name, value ):
+        raise NotImplementedError()
+
+
+def compare_urls( url1, url2, compare_scheme=True, compare_hostname=True, compare_path=True ):
+    url1 = urlparse.urlparse( url1 )
+    url2 = urlparse.urlparse( url2 )
+    if compare_scheme and url1.scheme and url2.scheme and url1.scheme != url2.scheme:
+        return False
+    if compare_hostname and url1.hostname and url2.hostname and url1.hostname != url2.hostname:
+        return False
+    if compare_path and url1.path and url2.path and url1.path != url2.path:
+        return False
+    return True
+
+
+def read_dbnames(filename):
+    """ Read build names from file """
+    class DBNames( list ):
+        default_value = "?"
+        default_name = "unspecified (?)"
+    db_names = DBNames()
+    try:
+        ucsc_builds = {}
+        man_builds = []  # assume these are integers
+        name_to_db_base = {}
+        if filename is None:
+            # Should only be happening with the galaxy.tools.parameters.basic:GenomeBuildParameter docstring unit test
+            filename = os.path.join( 'tool-data', 'shared', 'ucsc', 'builds.txt.sample' )
+        for line in open(filename):
+            try:
+                if line[0:1] == "#":
+                    continue
+                fields = line.replace("\r", "").replace("\n", "").split("\t")
+                # Special case of unspecified build is at top of list
+                if fields[0] == "?":
+                    db_names.insert(0, (fields[0], fields[1]))
+                    continue
+                try:  # manual build (i.e. microbes)
+                    int(fields[0])
+                    man_builds.append((fields[1], fields[0]))
+                except:  # UCSC build
+                    db_base = fields[0].rstrip('0123456789')
+                    if db_base not in ucsc_builds:
+                        ucsc_builds[db_base] = []
+                        name_to_db_base[fields[1]] = db_base
+                    # we want to sort within a species numerically by revision number
+                    build_rev = re.compile(r'\d+$')
+                    try:
+                        build_rev = int(build_rev.findall(fields[0])[0])
+                    except:
+                        build_rev = 0
+                    ucsc_builds[db_base].append((build_rev, fields[0], fields[1]))
+            except:
+                continue
+        sort_names = sorted(name_to_db_base.keys())
+        for name in sort_names:
+            db_base = name_to_db_base[name]
+            ucsc_builds[db_base].sort()
+            ucsc_builds[db_base].reverse()
+            ucsc_builds[db_base] = [(build, name) for _, build, name in ucsc_builds[db_base]]
+            db_names = DBNames( db_names + ucsc_builds[db_base] )
+        if len( db_names ) > 1 and len( man_builds ) > 0:
+            db_names.append( ( db_names.default_value, '----- Additional Species Are Below -----' ) )
+        man_builds.sort()
+        man_builds = [(build, name) for name, build in man_builds]
+        db_names = DBNames( db_names + man_builds )
+    except Exception as e:
+        log.error( "ERROR: Unable to read builds file: %s", e )
+    if len(db_names) < 1:
+        db_names = DBNames( [( db_names.default_value, db_names.default_name )] )
+    return db_names
+
+
+def read_build_sites( filename, check_builds=True ):
+    """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
+    build_sites = []
+    try:
+        for line in open(filename):
+            try:
+                if line[0:1] == "#":
+                    continue
+                fields = line.replace("\r", "").replace("\n", "").split("\t")
+                site_name = fields[0]
+                site = fields[1]
+                if check_builds:
+                    site_builds = fields[2].split(",")
+                    site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
+                else:
+                    site_dict = {'name': site_name, 'url': site}
+                build_sites.append( site_dict )
+            except:
+                continue
+    except:
+        log.error( "ERROR: Unable to read builds for site file %s", filename )
+    return build_sites
+
+
+def relativize_symlinks( path, start=None, followlinks=False):
+    for root, dirs, files in os.walk( path, followlinks=followlinks ):
+        rel_start = None
+        for file_name in files:
+            symlink_file_name = os.path.join( root, file_name )
+            if os.path.islink( symlink_file_name ):
+                symlink_target = os.readlink( symlink_file_name )
+                if rel_start is None:
+                    if start is None:
+                        rel_start = root
+                    else:
+                        rel_start = start
+                rel_path = relpath( symlink_target, rel_start )
+                os.remove( symlink_file_name )
+                os.symlink( rel_path, symlink_file_name )
+
+
+def stringify_dictionary_keys( in_dict ):
+    # returns a new dictionary
+    # changes unicode keys into strings, only works on top level (does not recurse)
+    # unicode keys are not valid for expansion into keyword arguments on method calls
+    out_dict = {}
+    for key, value in iteritems(in_dict):
+        out_dict[ str( key ) ] = value
+    return out_dict
+
+
+def recursively_stringify_dictionary_keys( d ):
+    if isinstance(d, dict):
+        return dict([(k.encode( DEFAULT_ENCODING ), recursively_stringify_dictionary_keys(v)) for k, v in iteritems(d)])
+    elif isinstance(d, list):
+        return [recursively_stringify_dictionary_keys(x) for x in d]
+    else:
+        return d
+
+
+def mkstemp_ln( src, prefix='mkstemp_ln_' ):
+    """
+    From tempfile._mkstemp_inner, generate a hard link in the same dir with a
+    random name.  Created so we can persist the underlying file of a
+    NamedTemporaryFile upon its closure.
+    """
+    dir = os.path.dirname(src)
+    names = tempfile._get_candidate_names()
+    for seq in xrange(tempfile.TMP_MAX):
+        name = next(names)
+        file = os.path.join(dir, prefix + name)
+        try:
+            os.link( src, file )
+            return (os.path.abspath(file))
+        except OSError as e:
+            if e.errno == errno.EEXIST:
+                continue  # try again
+            raise
+    raise IOError(errno.EEXIST, "No usable temporary file name found")
+
+
+def umask_fix_perms( path, umask, unmasked_perms, gid=None ):
+    """
+    umask-friendly permissions fixing
+    """
+    perms = unmasked_perms & ~umask
+    try:
+        st = os.stat( path )
+    except OSError as e:
+        log.exception( 'Unable to set permissions or group on %s' % path )
+        return
+    # fix modes
+    if stat.S_IMODE( st.st_mode ) != perms:
+        try:
+            os.chmod( path, perms )
+        except Exception as e:
+            log.warning( 'Unable to honor umask (%s) for %s, tried to set: %s but mode remains %s, error was: %s' % ( oct( umask ),
+                                                                                                                      path,
+                                                                                                                      oct( perms ),
+                                                                                                                      oct( stat.S_IMODE( st.st_mode ) ),
+                                                                                                                      e ) )
+    # fix group
+    if gid is not None and st.st_gid != gid:
+        try:
+            os.chown( path, -1, gid )
+        except Exception as e:
+            try:
+                desired_group = grp.getgrgid( gid )
+                current_group = grp.getgrgid( st.st_gid )
+            except:
+                desired_group = gid
+                current_group = st.st_gid
+            log.warning( 'Unable to honor primary group (%s) for %s, group remains %s, error was: %s' % ( desired_group,
+                                                                                                          path,
+                                                                                                          current_group,
+                                                                                                          e ) )
+
+
+def docstring_trim(docstring):
+    """Trimming python doc strings. Taken from: http://www.python.org/dev/peps/pep-0257/"""
+    if not docstring:
+        return ''
+    # Convert tabs to spaces (following the normal Python rules)
+    # and split into a list of lines:
+    lines = docstring.expandtabs().splitlines()
+    # Determine minimum indentation (first line doesn't count):
+    indent = sys.maxsize
+    for line in lines[1:]:
+        stripped = line.lstrip()
+        if stripped:
+            indent = min(indent, len(line) - len(stripped))
+    # Remove indentation (first line is special):
+    trimmed = [lines[0].strip()]
+    if indent < sys.maxsize:
+        for line in lines[1:]:
+            trimmed.append(line[indent:].rstrip())
+    # Strip off trailing and leading blank lines:
+    while trimmed and not trimmed[-1]:
+        trimmed.pop()
+    while trimmed and not trimmed[0]:
+        trimmed.pop(0)
+    # Return a single string:
+    return '\n'.join(trimmed)
+
+
+def nice_size(size):
+    """
+    Returns a readably formatted string with the size
+
+    >>> nice_size(100)
+    '100 bytes'
+    >>> nice_size(10000)
+    '9.8 KB'
+    >>> nice_size(1000000)
+    '976.6 KB'
+    >>> nice_size(100000000)
+    '95.4 MB'
+    """
+    words = [ 'bytes', 'KB', 'MB', 'GB', 'TB' ]
+    prefix = ''
+    try:
+        size = float( size )
+        if size < 0:
+            size = abs( size )
+            prefix = '-'
+    except:
+        return '??? bytes'
+    for ind, word in enumerate(words):
+        step = 1024 ** (ind + 1)
+        if step > size:
+            size = size / float(1024 ** ind)
+            if word == 'bytes':  # No decimals for bytes
+                return "%s%d bytes" % ( prefix, size )
+            return "%s%.1f %s" % ( prefix, size, word )
+    return '??? bytes'
+
+
+def size_to_bytes( size ):
+    """
+    Returns a number of bytes if given a reasonably formatted string with the size
+    """
+    # Assume input in bytes if we can convert directly to an int
+    try:
+        return int( size )
+    except:
+        pass
+    # Otherwise it must have non-numeric characters
+    size_re = re.compile( '([\d\.]+)\s*([tgmk]b?|b|bytes?)$' )
+    size_match = re.match( size_re, size.lower() )
+    assert size_match is not None
+    size = float( size_match.group(1) )
+    multiple = size_match.group(2)
+    if multiple.startswith( 't' ):
+        return int( size * 1024 ** 4 )
+    elif multiple.startswith( 'g' ):
+        return int( size * 1024 ** 3 )
+    elif multiple.startswith( 'm' ):
+        return int( size * 1024 ** 2 )
+    elif multiple.startswith( 'k' ):
+        return int( size * 1024 )
+    elif multiple.startswith( 'b' ):
+        return int( size )
+
+
+def send_mail( frm, to, subject, body, config, html=None ):
+    """
+    Sends an email.
+
+    :type  frm: str
+    :param frm: from address
+
+    :type  to: str
+    :param to: to address
+
+    :type  subject: str
+    :param subject: Subject line
+
+    :type  body: str
+    :param body: Body text (should be plain text)
+
+    :type  config: object
+    :param config: Galaxy configuration object
+
+    :type  html: str
+    :param html: Alternative HTML representation of the body content. If
+                 provided will convert the message to a MIMEMultipart. (Default 'None')
+    """
+
+    to = listify( to )
+    if html:
+        msg = email_mime_multipart.MIMEMultipart('alternative')
+    else:
+        msg = email_mime_text.MIMEText(  body.encode( 'ascii', 'replace' ) )
+
+    msg[ 'To' ] = ', '.join( to )
+    msg[ 'From' ] = frm
+    msg[ 'Subject' ] = subject
+
+    if config.smtp_server is None:
+        log.error( "Mail is not configured for this Galaxy instance." )
+        log.info( msg )
+        return
+
+    if html:
+        mp_text = email_mime_text.MIMEText( body.encode( 'ascii', 'replace' ), 'plain' )
+        mp_html = email_mime_text.MIMEText( html.encode( 'ascii', 'replace' ), 'html' )
+        msg.attach(mp_text)
+        msg.attach(mp_html)
+
+    smtp_ssl = asbool( getattr(config, 'smtp_ssl', False ) )
+    if smtp_ssl:
+        s = smtplib.SMTP_SSL()
+    else:
+        s = smtplib.SMTP()
+    s.connect( config.smtp_server )
+    if not smtp_ssl:
+        try:
+            s.starttls()
+            log.debug( 'Initiated SSL/TLS connection to SMTP server: %s' % config.smtp_server )
+        except RuntimeError as e:
+            log.warning( 'SSL/TLS support is not available to your Python interpreter: %s' % e )
+        except smtplib.SMTPHeloError as e:
+            log.error( "The server didn't reply properly to the HELO greeting: %s" % e )
+            s.close()
+            raise
+        except smtplib.SMTPException as e:
+            log.warning( 'The server does not support the STARTTLS extension: %s' % e )
+    if config.smtp_username and config.smtp_password:
+        try:
+            s.login( config.smtp_username, config.smtp_password )
+        except smtplib.SMTPHeloError as e:
+            log.error( "The server didn't reply properly to the HELO greeting: %s" % e )
+            s.close()
+            raise
+        except smtplib.SMTPAuthenticationError as e:
+            log.error( "The server didn't accept the username/password combination: %s" % e )
+            s.close()
+            raise
+        except smtplib.SMTPException as e:
+            log.error( "No suitable authentication method was found: %s" % e )
+            s.close()
+            raise
+    s.sendmail( frm, to, msg.as_string() )
+    s.quit()
+
+
+def force_symlink( source, link_name ):
+    try:
+        os.symlink( source, link_name )
+    except OSError as e:
+        if e.errno == errno.EEXIST:
+            os.remove( link_name )
+            os.symlink( source, link_name )
+        else:
+            raise e
+
+
+def move_merge( source, target ):
+    # when using shutil and moving a directory, if the target exists,
+    # then the directory is placed inside of it
+    # if the target doesn't exist, then the target is made into the directory
+    # this makes it so that the target is always the target, and if it exists,
+    # the source contents are moved into the target
+    if os.path.isdir( source ) and os.path.exists( target ) and os.path.isdir( target ):
+        for name in os.listdir( source ):
+            move_merge( os.path.join( source, name ), os.path.join( target, name ) )
+    else:
+        return shutil.move( source, target )
+
+
+def safe_str_cmp(a, b):
+    """safely compare two strings in a timing-attack-resistant manner
+    """
+    if len(a) != len(b):
+        return False
+    rv = 0
+    for x, y in zip(a, b):
+        rv |= ord(x) ^ ord(y)
+    return rv == 0
+
+
+galaxy_root_path = os.path.join(__path__[0], "..", "..", "..")
+
+
+def galaxy_directory():
+    return os.path.abspath(galaxy_root_path)
+
+
+def config_directories_from_setting( directories_setting, galaxy_root=galaxy_root_path ):
+    """
+    Parse the ``directories_setting`` into a list of relative or absolute
+    filesystem paths that will be searched to discover plugins.
+
+    :type   galaxy_root:    string
+    :param  galaxy_root:    the root path of this galaxy installation
+    :type   directories_setting: string (default: None)
+    :param  directories_setting: the filesystem path (or paths)
+        to search for plugins. Can be CSV string of paths. Will be treated as
+        absolute if a path starts with '/', relative otherwise.
+    :rtype:                 list of strings
+    :returns:               list of filesystem paths
+    """
+    directories = []
+    if not directories_setting:
+        return directories
+
+    for directory in listify( directories_setting ):
+        directory = directory.strip()
+        if not directory.startswith( '/' ):
+            directory = os.path.join( galaxy_root, directory )
+        if not os.path.exists( directory ):
+            log.warning( 'directory not found: %s', directory )
+            continue
+        directories.append( directory )
+    return directories
+
+
+def parse_int(value, min_val=None, max_val=None, default=None, allow_none=False):
+    try:
+        value = int(value)
+        if min_val is not None and value < min_val:
+            return min_val
+        if max_val is not None and value > max_val:
+            return max_val
+        return value
+    except ValueError:
+        if allow_none:
+            if default is None or value == "None":
+                return None
+        if default:
+            return default
+        else:
+            raise
+
+
+def parse_non_hex_float( s ):
+    """
+    Parse string `s` into a float but throw a `ValueError` if the string is in
+    the otherwise acceptable format `\d+e\d+` (e.g. 40000000000000e5.)
+
+    This can be passed into `json.loads` to prevent a hex string in the above
+    format from being incorrectly parsed as a float in scientific notation.
+
+    >>> parse_non_hex_float( '123.4' )
+    123.4
+    >>> parse_non_hex_float( '2.45e+3' )
+    2450.0
+    >>> parse_non_hex_float( '2.45e-3' )
+    0.00245
+    >>> parse_non_hex_float( '40000000000000e5' )
+    Traceback (most recent call last):
+        ...
+    ValueError: could not convert string to float: 40000000000000e5
+    """
+    f = float( s )
+    # successfully parsed as float if here - check for format in original string
+    if 'e' in s and not ( '+' in s or '-' in s ):
+        raise ValueError( 'could not convert string to float: ' + s )
+    return f
+
+
+def build_url( base_url, port=80, scheme='http', pathspec=None, params=None, doseq=False ):
+    if params is None:
+        params = dict()
+    if pathspec is None:
+        pathspec = []
+    parsed_url = urlparse.urlparse( base_url )
+    if scheme != 'http':
+        parsed_url.scheme = scheme
+    if port != 80:
+        url = '%s://%s:%d/%s' % ( parsed_url.scheme, parsed_url.netloc.rstrip( '/' ), int( port ), parsed_url.path )
+    else:
+        url = '%s://%s/%s' % ( parsed_url.scheme, parsed_url.netloc.rstrip( '/' ), parsed_url.path.lstrip( '/' ) )
+    if len( pathspec ) > 0:
+        url = '%s/%s' % ( url.rstrip( '/' ), '/'.join( pathspec ) )
+    if parsed_url.query:
+        for query_parameter in parsed_url.query.split( '&' ):
+            key, value = query_parameter.split( '=' )
+            params[ key ] = value
+    if params:
+        url += '?%s' % urlparse.urlencode( params, doseq=doseq )
+    return url
+
+
+def url_get( base_url, password_mgr=None, pathspec=None, params=None ):
+    """Make contact with the uri provided and return any contents."""
+    # Uses system proxy settings if they exist.
+    proxy = urlrequest.ProxyHandler()
+    if password_mgr is not None:
+        auth = urlrequest.HTTPDigestAuthHandler( password_mgr )
+        urlopener = urlrequest.build_opener( proxy, auth )
+    else:
+        urlopener = urlrequest.build_opener( proxy )
+    urlrequest.install_opener( urlopener )
+    full_url = build_url( base_url, pathspec=pathspec, params=params )
+    response = urlopener.open( full_url )
+    content = response.read()
+    response.close()
+    return content
+
+
+def download_to_file(url, dest_file_path, timeout=30, chunk_size=2 ** 20):
+    """Download a URL to a file in chunks."""
+    src = urlopen(url, timeout=timeout)
+    with open(dest_file_path, 'wb') as f:
+        while True:
+            chunk = src.read(chunk_size)
+            if not chunk:
+                break
+            f.write(chunk)
+
+
+def safe_relpath(path):
+    """
+    Given what we expect to be a relative path, determine whether the path
+    would exist inside the current directory.
+
+    :type   path:   string
+    :param  path:   a path to check
+    :rtype:         bool
+    :returns:       ``True`` if path is relative and does not reference a path
+        in a parent directory, ``False`` otherwise.
+    """
+    if path.startswith(os.sep) or normpath(path).startswith(os.pardir):
+        return False
+    return True
+
+
+class ExecutionTimer(object):
+
+    def __init__(self):
+        self.begin = time.time()
+
+    def __str__(self):
+        elapsed = (time.time() - self.begin) * 1000.0
+        return "(%0.3f ms)" % elapsed
+
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod(sys.modules[__name__], verbose=False)
diff --git a/lib/galaxy/util/aliaspickler.py b/lib/galaxy/util/aliaspickler.py
new file mode 100644
index 0000000..d375a6a
--- /dev/null
+++ b/lib/galaxy/util/aliaspickler.py
@@ -0,0 +1,30 @@
+import pickle
+from six.moves import cStringIO as StringIO
+
+
+class AliasUnpickler( pickle.Unpickler ):
+    def __init__( self, aliases, *args, **kw):
+        pickle.Unpickler.__init__( self, *args, **kw )
+        self.aliases = aliases
+
+    def find_class( self, module, name ):
+        module, name = self.aliases.get((module, name), (module, name))
+        return pickle.Unpickler.find_class( self, module, name )
+
+
+class AliasPickleModule( object ):
+    def __init__( self, aliases ):
+        self.aliases = aliases
+
+    def dump( self, obj, fileobj, protocol=0):
+        return pickle.dump( obj, fileobj, protocol )
+
+    def dumps( self, obj, protocol=0 ):
+        return pickle.dumps( obj, protocol )
+
+    def load( self, fileobj ):
+        return AliasUnpickler( self.aliases, fileobj ).load()
+
+    def loads( self, string ):
+        fileobj = StringIO( string )
+        return AliasUnpickler( self.aliases, fileobj ).load()
diff --git a/lib/galaxy/util/backports/__init__.py b/lib/galaxy/util/backports/__init__.py
new file mode 100644
index 0000000..18f846f
--- /dev/null
+++ b/lib/galaxy/util/backports/__init__.py
@@ -0,0 +1,3 @@
+"""
+Modules for providing backward compatibility with future versions of Python
+"""
diff --git a/lib/galaxy/util/biostar.py b/lib/galaxy/util/biostar.py
new file mode 100644
index 0000000..278e949
--- /dev/null
+++ b/lib/galaxy/util/biostar.py
@@ -0,0 +1,181 @@
+"""
+Support for integration with the Biostar application
+"""
+
+import hmac
+import logging
+import re
+import urlparse
+
+from unicodedata import normalize
+
+from six import text_type
+
+from galaxy.tools.errors import ErrorReporter
+from galaxy.web.base.controller import url_for
+
+from . import smart_str
+
+log = logging.getLogger( __name__ )
+
+_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
+
+DEFAULT_GALAXY_TAG = ''
+
+# Default values for new posts to Biostar
+DEFAULT_PAYLOAD = {
+    'title': '',
+    'tag_val': DEFAULT_GALAXY_TAG,
+    'content': '',
+}
+
+BIOSTAR_ACTIONS = {
+    None: { 'url': lambda x: '', 'uses_payload': False },
+    'new_post': { 'url': lambda x: 'p/new/external/post/', 'uses_payload': True, 'hmac_values': {'content': 'digest'} },
+    'show_tags': { 'url': lambda x: 't/%s/' % ( "+".join( ( x.get( 'tag_val' ) or DEFAULT_GALAXY_TAG ).split( ',' ) ) ), 'uses_payload': False },
+    'log_out': { 'url': lambda x: 'site/logout/', 'uses_payload': False }
+}
+
+DEFAULT_BIOSTAR_COOKIE_AGE = 1
+
+
+def biostar_enabled( app ):
+    return bool( app.config.biostar_url )
+
+
+# Slugifying from Armin Ronacher (http://flask.pocoo.org/snippets/5/)
+def slugify(text, delim=u'-'):
+    """Generates an slightly worse ASCII-only slug."""
+    if not isinstance( text, text_type ):
+        text = text_type( text )
+    result = []
+    for word in _punct_re.split(text.lower()):
+        word = normalize('NFKD', word).encode('ascii', 'ignore')
+        if word:
+            result.append(word)
+    return text_type(delim.join(result))
+
+
+def get_biostar_url( app, payload=None, biostar_action=None ):
+    # Ensure biostar integration is enabled
+    if not biostar_enabled( app ):
+        raise Exception( "Biostar integration is not enabled" )
+    if biostar_action not in BIOSTAR_ACTIONS:
+        raise Exception( "Invalid action specified (%s)." % ( biostar_action ) )
+    biostar_action = BIOSTAR_ACTIONS[ biostar_action ]
+    # Start building up the payload
+    payload = payload or {}
+    payload = dict( DEFAULT_PAYLOAD, **payload )
+    payload[ 'name' ] = app.config.biostar_key_name
+    for hmac_value_name, hmac_parameter_name in biostar_action.get( 'hmac_values', {} ).items():
+        # Biostar requires ascii only on HMAC'd things
+        payload[ hmac_value_name ] = smart_str( payload.get( hmac_value_name, '' ), encoding='ascii', errors='replace' )
+        payload[ hmac_parameter_name ] = hmac.new( app.config.biostar_key, payload[ hmac_value_name ] ).hexdigest()
+    # generate url, can parse payload info
+    url = str( urlparse.urljoin( app.config.biostar_url, biostar_action.get( 'url' )( payload ) ) )
+    if not biostar_action.get( 'uses_payload' ):
+        payload = {}
+    url = url_for( url )
+    return url, payload
+
+
+def tag_for_tool( tool ):
+    """
+    Generate a reasonable biostar tag for a tool.
+    """
+    # Biostar can now handle tags with spaces, do we want to generate tags differently now?
+    return slugify( tool.name, delim='-' )
+
+
+def populate_tag_payload( payload=None, tool=None ):
+    if payload is None:
+        payload = {}
+    if DEFAULT_GALAXY_TAG:
+        tag_val = [ DEFAULT_GALAXY_TAG ]
+    else:
+        tag_val = []
+    if tool:
+        tag_val.append( tag_for_tool( tool ) )
+    payload[ 'tag_val' ] = ','.join( tag_val )
+    return payload
+
+
+def populate_tool_payload( payload=None, tool=None ):
+    payload = populate_tag_payload( payload=payload, tool=tool )
+    payload[ 'title' ] = 'Need help with "%s" tool' % ( tool.name )
+    tool_url = None
+    if tool.tool_shed_repository:
+        tool_url = tool.tool_shed_repository.get_sharable_url( tool.app )
+        if tool_url:
+            tool_url = '</br>ToolShed URL: <a href="%s">%s</a>' % ( tool_url, tool_url )
+    if not tool_url:
+        tool_url = ''
+    payload[ 'content' ] = '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s%s</p></br>' % ( tool.name, tool.version, tool.id, tool_url )
+    return payload
+
+
+def determine_cookie_domain( galaxy_hostname, biostar_hostname ):
+    if galaxy_hostname == biostar_hostname:
+        return galaxy_hostname
+
+    sub_biostar_hostname = biostar_hostname.split( '.', 1 )[-1]
+    if sub_biostar_hostname == galaxy_hostname:
+        return galaxy_hostname
+
+    sub_galaxy_hostname = galaxy_hostname.split( '.', 1 )[-1]
+    if sub_biostar_hostname == sub_galaxy_hostname:
+        return sub_galaxy_hostname
+
+    return galaxy_hostname
+
+
+def create_cookie( trans, key_name, key, email, age=DEFAULT_BIOSTAR_COOKIE_AGE, override_never_authenticate=False ):
+    if trans.app.config.biostar_never_authenticate and not override_never_authenticate:
+        log.debug( 'A BioStar link was clicked, but never authenticate has been enabled, so we will not create the login cookie.' )
+        return
+    digest = hmac.new( key, email ).hexdigest()
+    value = "%s:%s" % (email, digest)
+    trans.set_cookie( value, name=key_name, path='/', age=age, version='1' )
+    # We need to explicitly set the domain here, in order to allow for biostar in a subdomain to work
+    galaxy_hostname = urlparse.urlsplit( url_for( '/', qualified=True ) ).hostname
+    biostar_hostname = urlparse.urlsplit( trans.app.config.biostar_url ).hostname
+    trans.response.cookies[ key_name ][ 'domain' ] = determine_cookie_domain( galaxy_hostname, biostar_hostname )
+
+
+def delete_cookie( trans, key_name ):
+    # Set expiration of Cookie to time in past, to cause browser to delete
+    if key_name in trans.request.cookies:
+        create_cookie( trans, trans.app.config.biostar_key_name, '', '', age=-90, override_never_authenticate=True )
+
+
+def biostar_logged_in( trans ):
+    if biostar_enabled( trans.app ):
+        if trans.app.config.biostar_key_name in trans.request.cookies:
+            return True
+    return False
+
+
+def biostar_logout( trans ):
+    if biostar_enabled( trans.app ):
+        delete_cookie( trans, trans.app.config.biostar_key_name )
+        return get_biostar_url( trans.app, biostar_action='log_out' )[0]
+    return None
+
+
+class BiostarErrorReporter( ErrorReporter ):
+    def _send_report( self, user, email=None, message=None, **kwd ):
+        assert biostar_enabled( self.app ), ValueError( "Biostar is not configured for this galaxy instance" )
+        assert self.app.config.biostar_enable_bug_reports, ValueError( "Biostar is not configured to allow bug reporting for this galaxy instance" )
+        assert self._can_access_dataset( user ), Exception( "You are not allowed to access this dataset." )
+        tool_version_select_field, tools, tool = \
+            self.app.toolbox.get_tool_components( self.tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=True )
+        payload = { 'title': 'Bug report on "%s" tool' % ( tool.name ), 'content': self.report.replace( '\n', '<br />' ).replace( '\r', '' ), 'tag_val': slugify( 'bug report' ) }
+        # Get footer for email from here
+        payload2 = populate_tool_payload( tool=tool )
+        if 'content' in payload2:
+            payload[ 'content' ] = "%s<br />%s" % ( payload['content'], payload2['content'] )
+        if 'tag_val' in payload2:
+            payload[ 'tag_val' ] = ','.join( [ payload2[ 'tag_val' ], payload[ 'tag_val' ] ] )
+        if 'action' not in payload:
+            payload[ 'action' ] = 1  # Automatically post bug reports to biostar
+        return payload
diff --git a/lib/galaxy/util/bunch.py b/lib/galaxy/util/bunch.py
new file mode 100644
index 0000000..2b1e49e
--- /dev/null
+++ b/lib/galaxy/util/bunch.py
@@ -0,0 +1,39 @@
+class Bunch( object ):
+    """
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52308
+
+    Often we want to just collect a bunch of stuff together, naming each item of
+    the bunch; a dictionary's OK for that, but a small do-nothing class is even handier, and prettier to use.
+    """
+    def __init__(self, **kwds):
+        self.__dict__.update(kwds)
+
+    def dict(self):
+        return self.__dict__
+
+    def get(self, key, default=None):
+        return self.__dict__.get(key, default)
+
+    def __iter__(self):
+        return iter(self.__dict__)
+
+    def items(self):
+        return self.__dict__.items()
+
+    def keys(self):
+        return self.__dict__.keys()
+
+    def values(self):
+        return self.__dict__.values()
+
+    def __str__(self):
+        return '%s' % self.__dict__
+
+    def __nonzero__(self):
+        return bool(self.__dict__)
+
+    def __setitem__(self, k, v):
+        self.__dict__.__setitem__(k, v)
+
+    def __contains__(self, item):
+        return item in self.__dict__
diff --git a/lib/galaxy/util/checkers.py b/lib/galaxy/util/checkers.py
new file mode 100644
index 0000000..85110e6
--- /dev/null
+++ b/lib/galaxy/util/checkers.py
@@ -0,0 +1,139 @@
+import bz2
+import gzip
+import re
+import zipfile
+
+from six import StringIO
+
+from galaxy import util
+from galaxy.util.image_util import image_type
+
+HTML_CHECK_LINES = 100
+
+
+def check_html( file_path, chunk=None ):
+    if chunk is None:
+        temp = open( file_path, "U" )
+    else:
+        temp = chunk
+    regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
+    regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
+    regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
+    regexp4 = re.compile( "<META[\W][^>]*>", re.I )
+    regexp5 = re.compile( "<SCRIPT[^>]*>", re.I )
+    lineno = 0
+    # TODO: Potentially reading huge lines into string here, this should be
+    # reworked.
+    for line in temp:
+        lineno += 1
+        matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line ) or regexp5.search( line )
+        if matches:
+            if chunk is None:
+                temp.close()
+            return True
+        if HTML_CHECK_LINES and (lineno > HTML_CHECK_LINES):
+            break
+    if chunk is None:
+        temp.close()
+    return False
+
+
+def check_binary( name, file_path=True ):
+    # Handles files if file_path is True or text if file_path is False
+    is_binary = False
+    if file_path:
+        temp = open( name, "U" )
+    else:
+        temp = StringIO( name )
+    try:
+        for char in temp.read( 100 ):
+            if util.is_binary( char ):
+                is_binary = True
+                break
+    finally:
+        temp.close( )
+    return is_binary
+
+
+def check_gzip( file_path ):
+    # This method returns a tuple of booleans representing ( is_gzipped, is_valid )
+    # Make sure we have a gzipped file
+    try:
+        temp = open( file_path, "U" )
+        magic_check = temp.read( 2 )
+        temp.close()
+        if magic_check != util.gzip_magic:
+            return ( False, False )
+    except:
+        return ( False, False )
+    # We support some binary data types, so check if the compressed binary file is valid
+    # If the file is Bam, it should already have been detected as such, so we'll just check
+    # for sff format.
+    try:
+        header = gzip.open( file_path ).read(4)
+        if header == b'.sff':
+            return ( True, True )
+    except:
+        return( False, False )
+    CHUNK_SIZE = 2 ** 15  # 32Kb
+    gzipped_file = gzip.GzipFile( file_path, mode='rb' )
+    chunk = gzipped_file.read( CHUNK_SIZE )
+    gzipped_file.close()
+    # See if we have a compressed HTML file
+    if check_html( file_path, chunk=chunk ):
+        return ( True, False )
+    return ( True, True )
+
+
+def check_bz2( file_path ):
+    try:
+        temp = open( file_path, "U" )
+        magic_check = temp.read( 3 )
+        temp.close()
+        if magic_check != util.bz2_magic:
+            return ( False, False )
+    except:
+        return( False, False )
+    CHUNK_SIZE = 2 ** 15  # reKb
+    bzipped_file = bz2.BZ2File( file_path, mode='rb' )
+    chunk = bzipped_file.read( CHUNK_SIZE )
+    bzipped_file.close()
+    # See if we have a compressed HTML file
+    if check_html( file_path, chunk=chunk ):
+        return ( True, False )
+    return ( True, True )
+
+
+def check_zip( file_path ):
+    if zipfile.is_zipfile( file_path ):
+        return True
+    return False
+
+
+def is_bz2( file_path ):
+    is_bz2, is_valid = check_bz2( file_path )
+    return is_bz2
+
+
+def is_gzip( file_path ):
+    is_gzipped, is_valid = check_gzip( file_path )
+    return is_gzipped
+
+
+def check_image( file_path ):
+    """ Simple wrapper around image_type to yield a True/False verdict """
+    if image_type( file_path ):
+        return True
+    return False
+
+
+__all__ = (
+    'check_binary',
+    'check_bz2',
+    'check_gzip',
+    'check_html',
+    'check_image',
+    'check_zip',
+    'is_gzip',
+    'is_bz2',
+)
diff --git a/lib/galaxy/util/dbkeys.py b/lib/galaxy/util/dbkeys.py
new file mode 100644
index 0000000..9d95fd8
--- /dev/null
+++ b/lib/galaxy/util/dbkeys.py
@@ -0,0 +1,93 @@
+"""
+Functionality for dealing with dbkeys.
+"""
+# dbkeys read from disk using builds.txt
+from __future__ import absolute_import
+import os.path
+from json import loads
+
+from galaxy.util import read_dbnames
+from galaxy.util.object_wrapper import sanitize_lists_to_string
+
+
+class GenomeBuilds( object ):
+    default_value = "?"
+    default_name = "unspecified (?)"
+
+    def __init__( self, app, data_table_name="__dbkeys__", load_old_style=True ):
+        self._app = app
+        self._data_table_name = data_table_name
+        self._static_chrom_info_path = app.config.len_file_path
+        # A dbkey can be listed multiple times, but with different names, so we can't use dictionaries for lookups
+        if load_old_style:
+            self._static_dbkeys = list( read_dbnames( app.config.builds_file_path ) )
+        else:
+            self._static_dbkeys = []
+
+    def get_genome_build_names( self, trans=None ):
+        # FIXME: how to deal with key duplicates?
+        rval = []
+        # load user custom genome builds
+        if trans is not None:
+            if trans.history:
+                # This is a little bit Odd. We are adding every .len file in the current history to dbkey list,
+                # but this is previous behavior from trans.db_names, so we'll continue to do it.
+                # It does allow one-off, history specific dbkeys to be created by a user. But we are not filtering,
+                # so a len file will be listed twice (as the build name and again as dataset name),
+                # if custom dbkey creation/conversion occurred within the current history.
+                datasets = trans.sa_session.query( self._app.model.HistoryDatasetAssociation ) \
+                                .filter_by( deleted=False, history_id=trans.history.id, extension="len" )
+                for dataset in datasets:
+                    rval.append( ( dataset.dbkey, "%s (%s) [History]" % ( dataset.name, dataset.dbkey ) ) )
+            user = trans.user
+            if user and hasattr( user, 'preferences' ) and 'dbkeys' in user.preferences:
+                user_keys = loads( user.preferences['dbkeys'] )
+                for key, chrom_dict in user_keys.iteritems():
+                    rval.append( ( key, "%s (%s) [Custom]" % ( chrom_dict['name'], key ) ) )
+        # Load old builds.txt static keys
+        rval.extend( self._static_dbkeys )
+        # load dbkeys from dbkey data table
+        dbkey_table = self._app.tool_data_tables.get( self._data_table_name, None )
+        if dbkey_table is not None:
+            for field_dict in dbkey_table.get_named_fields_list():
+                rval.append( ( field_dict[ 'value' ], field_dict[ 'name' ] ) )
+        return rval
+
+    def get_chrom_info( self, dbkey, trans=None, custom_build_hack_get_len_from_fasta_conversion=True ):
+        # FIXME: flag to turn off custom_build_hack_get_len_from_fasta_conversion should not be required
+        chrom_info = None
+        db_dataset = None
+        # Collect chromInfo from custom builds
+        if trans:
+            db_dataset = trans.db_dataset_for( dbkey )
+            if db_dataset:
+                chrom_info = db_dataset.file_name
+            else:
+                # Do Custom Build handling
+                if trans.user and ( 'dbkeys' in trans.user.preferences ) and ( dbkey in loads( trans.user.preferences[ 'dbkeys' ] ) ):
+                    custom_build_dict = loads( trans.user.preferences[ 'dbkeys' ] )[ dbkey ]
+                    # HACK: the attempt to get chrom_info below will trigger the
+                    # fasta-to-len converter if the dataset is not available or,
+                    # which will in turn create a recursive loop when
+                    # running the fasta-to-len tool. So, use a hack in the second
+                    # condition below to avoid getting chrom_info when running the
+                    # fasta-to-len converter.
+                    if 'fasta' in custom_build_dict and custom_build_hack_get_len_from_fasta_conversion:
+                        # Build is defined by fasta; get len file, which is obtained from converting fasta.
+                        build_fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'fasta' ] )
+                        chrom_info = build_fasta_dataset.get_converted_dataset( trans, 'len' ).file_name
+                    elif 'len' in custom_build_dict:
+                        # Build is defined by len file, so use it.
+                        chrom_info = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( custom_build_dict[ 'len' ] ).file_name
+        # Check Data table
+        if not chrom_info:
+            dbkey_table = self._app.tool_data_tables.get( self._data_table_name, None )
+            if dbkey_table is not None:
+                chrom_info = dbkey_table.get_entry( 'value', dbkey, 'len_path', default=None )
+        # use configured server len path
+        if not chrom_info:
+            # Default to built-in build.
+            # Since we are using an unverified dbkey, we will sanitize the dbkey before use
+            chrom_info = os.path.join( self._static_chrom_info_path, "%s.len" % sanitize_lists_to_string( dbkey ) )
+        chrom_info = os.path.abspath( chrom_info )
+        return ( chrom_info, db_dataset )
diff --git a/lib/galaxy/util/dictifiable.py b/lib/galaxy/util/dictifiable.py
new file mode 100644
index 0000000..29051e8
--- /dev/null
+++ b/lib/galaxy/util/dictifiable.py
@@ -0,0 +1,62 @@
+import datetime
+import uuid
+
+
+class Dictifiable:
+    """ Mixin that enables objects to be converted to dictionaries. This is useful
+        when for sharing objects across boundaries, such as the API, tool scripts,
+        and JavaScript code. """
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        """
+        Return item dictionary.
+        """
+
+        if not value_mapper:
+            value_mapper = {}
+
+        def get_value( key, item ):
+            """
+            Recursive helper function to get item values.
+            """
+            # FIXME: why use exception here? Why not look for key in value_mapper
+            # first and then default to to_dict?
+            try:
+                return item.to_dict( view=view, value_mapper=value_mapper )
+            except:
+                if key in value_mapper:
+                    return value_mapper.get( key )( item )
+                if type(item) == datetime.datetime:
+                    return item.isoformat()
+                elif type(item) == uuid.UUID:
+                    return str(item)
+                # Leaving this for future reference, though we may want a more
+                # generic way to handle special type mappings going forward.
+                # If the item is of a class that needs to be 'stringified' before being put into a JSON data structure
+                # elif type(item) in []:
+                #    return str(item)
+                return item
+
+        # Create dict to represent item.
+        rval = dict(
+            model_class=self.__class__.__name__
+        )
+
+        # Fill item dict with visible keys.
+        try:
+            visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
+        except AttributeError:
+            raise Exception( 'Unknown Dictifiable view: %s' % view )
+        for key in visible_keys:
+            try:
+                item = self.__getattribute__( key )
+                if isinstance( item, list ):
+                    rval[ key ] = []
+                    for i in item:
+                        rval[ key ].append( get_value( key, i ) )
+                else:
+                    rval[ key ] = get_value( key, item )
+            except AttributeError:
+                rval[ key ] = None
+
+        return rval
diff --git a/lib/galaxy/util/docutils_template.txt b/lib/galaxy/util/docutils_template.txt
new file mode 100644
index 0000000..92aee1b
--- /dev/null
+++ b/lib/galaxy/util/docutils_template.txt
@@ -0,0 +1 @@
+%(body)s
diff --git a/lib/galaxy/util/expressions.py b/lib/galaxy/util/expressions.py
new file mode 100644
index 0000000..2b106a8
--- /dev/null
+++ b/lib/galaxy/util/expressions.py
@@ -0,0 +1,56 @@
+"""
+Expression evaluation support.
+
+For the moment this depends on python's eval. In the future it should be
+replaced with a "safe" parser.
+"""
+
+from collections import MutableMapping
+from itertools import chain
+
+
+class ExpressionContext( MutableMapping ):
+    def __init__( self, dict, parent=None ):
+        """
+        Create a new expression context that looks for values in the
+        container object 'dict', and falls back to 'parent'
+        """
+        self.dict = dict
+        self.parent = parent
+
+    def __delitem__(self, key):
+        if key in self.dict:
+            del self.dict[key]
+        elif self.parent is not None and key in self.parent:
+            del self.parent[key]
+
+    def __iter__(self):
+        return chain(iter(self.dict), iter(self.parent or []))
+
+    def __len__(self):
+        return len(self.dict) + len(self.parent or [])
+
+    def __getitem__( self, key ):
+        if key in self.dict:
+            return self.dict[key]
+        if self.parent is not None and key in self.parent:
+            return self.parent[key]
+        raise KeyError( key )
+
+    def __setitem__( self, key, value ):
+        self.dict[key] = value
+
+    def __contains__( self, key ):
+        if key in self.dict:
+            return True
+        if self.parent is not None and key in self.parent:
+            return True
+        return False
+
+    def __str__( self ):
+        return str( self.dict )
+
+    def __nonzero__( self ):
+        if not self.dict and not self.parent:
+            return False
+        return True
diff --git a/lib/galaxy/util/filelock.py b/lib/galaxy/util/filelock.py
new file mode 100644
index 0000000..be6dc51
--- /dev/null
+++ b/lib/galaxy/util/filelock.py
@@ -0,0 +1,82 @@
+"""Code obtained from https://github.com/dmfrey/FileLock.
+
+See full license at:
+
+https://github.com/dmfrey/FileLock/blob/master/LICENSE.txt
+
+"""
+import errno
+import os
+import time
+
+
+class FileLockException(Exception):
+    pass
+
+
+class FileLock(object):
+    """ A file locking mechanism that has context-manager support so
+        you can use it in a with statement. This should be relatively cross
+        compatible as it doesn't rely on msvcrt or fcntl for the locking.
+    """
+
+    def __init__(self, file_name, timeout=10, delay=.05):
+        """ Prepare the file locker. Specify the file to lock and optionally
+            the maximum timeout and the delay between each attempt to lock.
+        """
+        self.is_locked = False
+        full_path = os.path.abspath(file_name)
+        self.lockfile = "%s.lock" % full_path
+        self.file_name = full_path
+        self.timeout = timeout
+        self.delay = delay
+
+    def acquire(self):
+        """ Acquire the lock, if possible. If the lock is in use, it check again
+            every `wait` seconds. It does this until it either gets the lock or
+            exceeds `timeout` number of seconds, in which case it throws
+            an exception.
+        """
+        start_time = time.time()
+        while True:
+            try:
+                self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)
+                break
+            except OSError as e:
+                if e.errno != errno.EEXIST:
+                    raise
+                if (time.time() - start_time) >= self.timeout:
+                    raise FileLockException("Timeout occured.")
+                time.sleep(self.delay)
+        self.is_locked = True
+
+    def release(self):
+        """ Get rid of the lock by deleting the lockfile.
+            When working in a `with` statement, this gets automatically
+            called at the end.
+        """
+        if self.is_locked:
+            os.close(self.fd)
+            os.unlink(self.lockfile)
+            self.is_locked = False
+
+    def __enter__(self):
+        """ Activated when used in the with statement.
+            Should automatically acquire a lock to be used in the with block.
+        """
+        if not self.is_locked:
+            self.acquire()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """ Activated at the end of the with statement.
+            It automatically releases the lock if it isn't locked.
+        """
+        if self.is_locked:
+            self.release()
+
+    def __del__(self):
+        """ Make sure that the FileLock instance doesn't leave a lockfile
+            lying around.
+        """
+        self.release()
diff --git a/lib/galaxy/util/hash_util.py b/lib/galaxy/util/hash_util.py
new file mode 100644
index 0000000..63efe64
--- /dev/null
+++ b/lib/galaxy/util/hash_util.py
@@ -0,0 +1,50 @@
+"""
+Utility functions for bi-directional Python version compatibility.  Python 2.5
+introduced hashlib which replaced sha in Python 2.4 and previous versions.
+"""
+import hashlib
+import hmac
+import logging
+
+log = logging.getLogger( __name__ )
+
+sha1 = hashlib.sha1
+sha = sha1
+md5 = hashlib.md5
+
+
+def md5_hash_file(path):
+    """
+    Return a md5 hashdigest for a file.
+    """
+    hasher = hashlib.md5()
+    with open(path, 'rb') as afile:
+        buf = afile.read()
+        hasher.update(buf)
+        return hasher.hexdigest()
+
+
+def new_secure_hash( text_type=None ):
+    """
+    Returns either a sha1 hash object (if called with no arguments), or a
+    hexdigest of the sha1 hash of the argument `text_type`.
+    """
+    if text_type:
+        return sha1( text_type ).hexdigest()
+    else:
+        return sha1()
+
+
+def hmac_new( key, value ):
+    return hmac.new( key, value, sha ).hexdigest()
+
+
+def is_hashable( value ):
+    try:
+        hash( value )
+    except:
+        return False
+    return True
+
+
+__all__ = ('md5', 'hashlib', 'sha1', 'sha', 'new_secure_hash', 'hmac_new', 'is_hashable')
diff --git a/lib/galaxy/util/heartbeat.py b/lib/galaxy/util/heartbeat.py
new file mode 100644
index 0000000..9026e01
--- /dev/null
+++ b/lib/galaxy/util/heartbeat.py
@@ -0,0 +1,192 @@
+import os
+import sys
+import threading
+import time
+import traceback
+
+from six import iteritems
+
+
+def get_current_thread_object_dict():
+    """
+    Get a dictionary of all 'Thread' objects created via the threading
+    module keyed by thread_id. Note that not all interpreter threads
+    have a thread objects, only the main thread and any created via the
+    'threading' module. Threads created via the low level 'thread' module
+    will not be in the returned dictionary.
+
+    HACK: This mucks with the internals of the threading module since that
+          module does not expose any way to match 'Thread' objects with
+          intepreter thread identifiers (though it should).
+    """
+    rval = dict()
+    # Acquire the lock and then union the contents of 'active' and 'limbo'
+    # threads into the return value.
+    threading._active_limbo_lock.acquire()
+    rval.update( threading._active )
+    rval.update( threading._limbo )
+    threading._active_limbo_lock.release()
+    return rval
+
+
+class Heartbeat( threading.Thread ):
+    """
+    Thread that periodically dumps the state of all threads to a file
+    """
+    def __init__( self, config, name="Heartbeat Thread", period=20, fname="heartbeat.log" ):
+        threading.Thread.__init__( self, name=name )
+        self.config = config
+        self.should_stop = False
+        self.period = period
+        self.fname = fname
+        self.file = None
+        self.fname_nonsleeping = None
+        self.file_nonsleeping = None
+        self.pid = None
+        self.nonsleeping_heartbeats = { }
+        # Event to wait on when sleeping, allows us to interrupt for shutdown
+        self.wait_event = threading.Event()
+
+    def run( self ):
+        self.pid = os.getpid()
+        self.fname = self.fname.format(
+            server_name=self.config.server_name,
+            pid=self.pid
+        )
+        fname, ext = os.path.splitext(self.fname)
+        self.fname_nonsleeping = fname + '.nonsleeping' + ext
+        wait = self.period
+        if self.period <= 0:
+            wait = 60
+        while not self.should_stop:
+            if self.period > 0:
+                self.dump()
+            self.wait_event.wait( wait )
+
+    def open_logs( self ):
+        if self.file is None or self.file.closed:
+            self.file = open( self.fname, "a" )
+            self.file_nonsleeping = open( self.fname_nonsleeping, "a" )
+            self.file.write( "Heartbeat for pid %d thread started at %s\n\n" % ( self.pid, time.asctime() ) )
+            self.file_nonsleeping.write( "Non-Sleeping-threads for pid %d thread started at %s\n\n" % ( self.pid, time.asctime() ) )
+
+    def close_logs( self ):
+        if self.file is not None and not self.file.closed:
+            self.file.write( "Heartbeat for pid %d thread stopped at %s\n\n" % ( self.pid, time.asctime() ) )
+            self.file_nonsleeping.write( "Non-Sleeping-threads for pid %d thread stopped at %s\n\n" % ( self.pid, time.asctime() ) )
+            self.file.close()
+            self.file_nonsleeping.close()
+
+    def dump( self ):
+        self.open_logs()
+        try:
+            # Print separator with timestamp
+            self.file.write( "Traceback dump for all threads at %s:\n\n" % time.asctime() )
+            # Print the thread states
+            threads = get_current_thread_object_dict()
+            for thread_id, frame in iteritems(sys._current_frames()):
+                if thread_id in threads:
+                    object = repr( threads[thread_id] )
+                else:
+                    object = "<No Thread object>"
+                self.file.write( "Thread %s, %s:\n\n" % ( thread_id, object ) )
+                traceback.print_stack( frame, file=self.file )
+                self.file.write( "\n" )
+            self.file.write( "End dump\n\n" )
+            self.file.flush()
+            self.print_nonsleeping(threads)
+        except:
+            self.file.write( "Caught exception attempting to dump thread states:" )
+            traceback.print_exc( None, self.file )
+            self.file.write( "\n" )
+
+    def shutdown( self ):
+        self.should_stop = True
+        self.wait_event.set()
+        self.close_logs()
+        self.join()
+
+    def thread_is_sleeping( self, last_stack_frame ):
+        """
+        Returns True if the given stack-frame represents a known
+        sleeper function (at least in python 2.5)
+        """
+        _filename = last_stack_frame[0]
+        # _line = last_stack_frame[1]
+        _funcname = last_stack_frame[2]
+        _text = last_stack_frame[3]
+        # Ugly hack to tell if a thread is supposedly sleeping or not
+        # These are the most common sleeping functions I've found.
+        # Is there a better way? (python interpreter internals?)
+        # Tested only with python 2.5
+        if _funcname == "wait" and _text == "waiter.acquire()":
+            return True
+        if _funcname == "wait" and _text == "_sleep(delay)":
+            return True
+        if _funcname == "accept" and _text[-14:] == "_sock.accept()":
+            return True
+        if _funcname in ("monitor", "__monitor", "app_loop", "check") \
+                and _text.startswith("time.sleep(") and _text.endswith(")"):
+            return True
+        if _funcname == "drain_events" and _text == "sleep(polling_interval)":
+            return True
+        # Ugly hack: always skip the heartbeat thread
+        # TODO: get the current thread-id in python
+        #   skip heartbeat thread by thread-id, not by filename
+        if _filename.find("/lib/galaxy/util/heartbeat.py") != -1:
+            return True
+        # By default, assume the thread is not sleeping
+        return False
+
+    def get_interesting_stack_frame( self, stack_frames ):
+        """
+        Scans a given backtrace stack frames, returns a single
+        quadraple of [filename, line, function-name, text] of
+        the single, deepest, most interesting frame.
+
+        Interesting being::
+
+          inside the galaxy source code ("/lib/galaxy"),
+          prefreably not an egg.
+        """
+        for _filename, _line, _funcname, _text in reversed(stack_frames):
+            idx = _filename.find("/lib/galaxy/")
+            if idx != -1:
+                relative_filename = _filename[idx:]
+                return ( relative_filename, _line, _funcname, _text )
+        # no "/lib/galaxy" code found, return the innermost frame
+        return stack_frames[-1]
+
+    def print_nonsleeping( self, threads_object_dict ):
+        self.file_nonsleeping.write( "Non-Sleeping threads at %s:\n\n" % time.asctime() )
+        all_threads_are_sleeping = True
+        threads = get_current_thread_object_dict()
+        for thread_id, frame in iteritems(sys._current_frames()):
+            if thread_id in threads:
+                object = repr( threads[thread_id] )
+            else:
+                object = "<No Thread object>"
+            tb = traceback.extract_stack(frame)
+            if self.thread_is_sleeping(tb[-1]):
+                if thread_id in self.nonsleeping_heartbeats:
+                    del self.nonsleeping_heartbeats[thread_id]
+                continue
+
+            # Count non-sleeping thread heartbeats
+            if thread_id in self.nonsleeping_heartbeats:
+                self.nonsleeping_heartbeats[thread_id] += 1
+            else:
+                self.nonsleeping_heartbeats[thread_id] = 1
+
+            good_frame = self.get_interesting_stack_frame(tb)
+            self.file_nonsleeping.write( "Thread %s\t%s\tnon-sleeping for %d heartbeat(s)\n  File %s:%d\n    Function \"%s\"\n      %s\n" %
+                ( thread_id, object, self.nonsleeping_heartbeats[thread_id], good_frame[0], good_frame[1], good_frame[2], good_frame[3] ) )
+            all_threads_are_sleeping = False
+
+        if all_threads_are_sleeping:
+            self.file_nonsleeping.write( "All threads are sleeping.\n" )
+        self.file_nonsleeping.write( "\n" )
+        self.file_nonsleeping.flush()
+
+    def dump_signal_handler( self, signum, frame ):
+        self.dump()
diff --git a/lib/galaxy/util/image_util.py b/lib/galaxy/util/image_util.py
new file mode 100644
index 0000000..a54043b
--- /dev/null
+++ b/lib/galaxy/util/image_util.py
@@ -0,0 +1,75 @@
+"""Provides utilities for working with image files."""
+import imghdr
+import logging
+
+try:
+    import Image as PIL
+except ImportError:
+    try:
+        from PIL import Image as PIL
+    except:
+        PIL = None
+
+log = logging.getLogger(__name__)
+
+
+def image_type( filename ):
+    fmt = None
+    if PIL is not None:
+        try:
+            im = PIL.open( filename )
+            fmt = im.format
+            im.close()
+        except:
+            # We continue to try with imghdr, so this is a rare case of an
+            # exception we expect to happen frequently, so we're not logging
+            pass
+    if not fmt:
+        fmt = imghdr.what( filename )
+    if fmt:
+        return fmt.upper()
+    else:
+        return False
+
+
+def check_image_type( filename, types ):
+    fmt = image_type( filename )
+    if fmt in types:
+        return True
+    return False
+
+
+def get_image_ext( file_path ):
+    # determine ext
+    fmt = image_type( file_path )
+    if fmt in [ 'JPG', 'JPEG' ]:
+        return 'jpg'
+    if fmt == 'PNG':
+        return 'png'
+    if fmt == 'TIFF':
+        return 'tiff'
+    if fmt == 'BMP':
+        return 'bmp'
+    if fmt == 'GIF':
+        return 'gif'
+    if fmt == 'IM':
+        return 'im'
+    if fmt == 'PCD':
+        return 'pcd'
+    if fmt == 'PCX':
+        return 'pcx'
+    if fmt == 'PPM':
+        return 'ppm'
+    if fmt == 'PSD':
+        return 'psd'
+    if fmt == 'XBM':
+        return 'xbm'
+    if fmt == 'XPM':
+        return 'xpm'
+    if fmt == 'RGB':
+        return 'rgb'
+    if fmt == 'PBM':
+        return 'pbm'
+    if fmt == 'PGM':
+        return 'pgm'
+    return None
diff --git a/lib/galaxy/util/inflection.py b/lib/galaxy/util/inflection.py
new file mode 100644
index 0000000..58c22d0
--- /dev/null
+++ b/lib/galaxy/util/inflection.py
@@ -0,0 +1,375 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2006 Bermi Ferrer Martinez
+#
+# bermi a-t bermilabs - com
+# See the end of this file for the free software, open source license (BSD-style).
+
+import re
+
+
+class Base:
+    '''Locale inflectors must inherit from this base class inorder to provide
+    the basic Inflector functionality'''
+
+    def cond_plural(self, number_of_records, word):
+        '''Returns the plural form of a word if first parameter is greater than 1'''
+
+        if number_of_records != 1:
+            return self.pluralize(word)
+        else:
+            return word
+
+    def titleize(self, word, uppercase=''):
+        '''Converts an underscored or CamelCase word into a English sentence.
+            The titleize function converts text like "WelcomePage",
+            "welcome_page" or  "welcome page" to this "Welcome Page".
+            If second parameter is set to 'first' it will only
+            capitalize the first character of the title.'''
+
+        if(uppercase == 'first'):
+            return self.humanize(self.underscore(word)).capitalize()
+        else:
+            return self.humanize(self.underscore(word)).title()
+
+    def camelize(self, word):
+        ''' Returns given word as CamelCased
+        Converts a word like "send_email" to "SendEmail". It
+        will remove non alphanumeric character from the word, so
+        "who's online" will be converted to "WhoSOnline"'''
+        return ''.join(w[0].upper() + w[1:] for w in re.sub('[^A-Z^a-z^0-9^:]+', ' ', word).split(' '))
+
+    def underscore(self, word):
+        ''' Converts a word "into_it_s_underscored_version"
+        Convert any "CamelCased" or "ordinary Word" into an
+        "underscored_word".
+        This can be really useful for creating friendly URLs.'''
+
+        return re.sub('[^A-Z^a-z^0-9^\/]+', '_',
+                      re.sub('([a-z\d])([A-Z])', '\\1_\\2',
+                             re.sub('([A-Z]+)([A-Z][a-z])', '\\1_\\2', re.sub('::', '/', word)))).lower()
+
+    def humanize(self, word, uppercase=''):
+        '''Returns a human-readable string from word
+        Returns a human-readable string from word, by replacing
+        underscores with a space, and by upper-casing the initial
+        character by default.
+        If you need to uppercase all the words you just have to
+        pass 'all' as a second parameter.'''
+
+        if(uppercase == 'first'):
+            return re.sub('_id$', '', word).replace('_', ' ').capitalize()
+        else:
+            return re.sub('_id$', '', word).replace('_', ' ').title()
+
+    def variablize(self, word):
+        '''Same as camelize but first char is lowercased
+        Converts a word like "send_email" to "sendEmail". It
+        will remove non alphanumeric character from the word, so
+        "who's online" will be converted to "whoSOnline"'''
+        word = self.camelize(word)
+        return word[0].lower() + word[1:]
+
+    def tableize(self, class_name):
+        ''' Converts a class name to its table name according to rails
+        naming conventions. Example. Converts "Person" to "people" '''
+        return self.pluralize(self.underscore(class_name))
+
+    def classify(self, table_name):
+        '''Converts a table name to its class name according to rails
+        naming conventions. Example: Converts "people" to "Person" '''
+        return self.camelize(self.singularize(table_name))
+
+    def ordinalize(self, number):
+        '''Converts number to its ordinal English form.
+        This method converts 13 to 13th, 2 to 2nd ...'''
+        tail = 'th'
+        if number % 100 == 11 or number % 100 == 12 or number % 100 == 13:
+            tail = 'th'
+        elif number % 10 == 1:
+            tail = 'st'
+        elif number % 10 == 2:
+            tail = 'nd'
+        elif number % 10 == 3:
+            tail = 'rd'
+
+        return str(number) + tail
+
+    def unaccent(self, text):
+        '''Transforms a string to its unaccented version.
+        This might be useful for generating "friendly" URLs'''
+        find = u'\u00C0\u00C1\u00C2\u00C3\u00C4\u00C5\u00C6\u00C7\u00C8\u00C9\u00CA\u00CB\u00CC\u00CD\u00CE\u00CF\u00D0\u00D1\u00D2\u00D3\u00D4\u00D5\u00D6\u00D8\u00D9\u00DA\u00DB\u00DC\u00DD\u00DE\u00DF\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6\u00E7\u00E8\u00E9\u00EA\u00EB\u00EC\u00ED\u00EE\u00EF\u00F0\u00F1\u00F2\u00F3\u00F4\u00F5\u00F6\u00F8\u00F9\u00FA\u00FB\u00FC\u00FD\u00FE\u00FF'
+        replace = u'AAAAAAACEEEEIIIIDNOOOOOOUUUUYTsaaaaaaaceeeeiiiienoooooouuuuyty'
+        return self.string_replace(text, find, replace)
+
+    def string_replace(self, word, find, replace):
+        '''This function returns a copy of word, translating
+        all occurrences of each character in find to the
+        corresponding character in replace'''
+        for k in range(0, len(find)):
+            word = re.sub(find[k], replace[k], word)
+
+        return word
+
+    def urlize(self, text):
+        '''Transform a string its unaccented and underscored
+        version ready to be inserted in friendly URLs'''
+        return re.sub('^_|_$', '', self.underscore(self.unaccent(text)))
+
+    def demodulize(self, module_name):
+        return self.humanize(self.underscore(re.sub('^.*::', '', module_name)))
+
+    def modulize(self, module_description):
+        return self.camelize(self.singularize(module_description))
+
+    def foreignKey(self, class_name, separate_class_name_and_id_with_underscore=1):
+        ''' Returns class_name in underscored form, with "_id" tacked on at the end.
+        This is for use in dealing with the database.'''
+        if separate_class_name_and_id_with_underscore:
+            tail = '_id'
+        else:
+            tail = 'id'
+        return self.underscore(self.demodulize(class_name)) + tail
+
+
+class English (Base):
+    """
+    Inflector for pluralize and singularize English nouns.
+
+    This is the default Inflector for the Inflector obj
+    """
+
+    def pluralize(self, word):
+        '''Pluralizes English nouns.'''
+
+        rules = [
+            ['(?i)(quiz)$', '\\1zes'],
+            ['^(?i)(ox)$', '\\1en'],
+            ['(?i)([m|l])ouse$', '\\1ice'],
+            ['(?i)(matr|vert|ind)ix|ex$', '\\1ices'],
+            ['(?i)(x|ch|ss|sh)$', '\\1es'],
+            ['(?i)([^aeiouy]|qu)ies$', '\\1y'],
+            ['(?i)([^aeiouy]|qu)y$', '\\1ies'],
+            ['(?i)(hive)$', '\\1s'],
+            ['(?i)(?:([^f])fe|([lr])f)$', '\\1\\2ves'],
+            ['(?i)sis$', 'ses'],
+            ['(?i)([ti])um$', '\\1a'],
+            ['(?i)(buffal|tomat)o$', '\\1oes'],
+            ['(?i)(bu)s$', '\\1ses'],
+            ['(?i)(alias|status)', '\\1es'],
+            ['(?i)(octop|vir)us$', '\\1i'],
+            ['(?i)(ax|test)is$', '\\1es'],
+            ['(?i)s$', 's'],
+            ['(?i)$', 's']
+        ]
+
+        uncountable_words = ['equipment', 'information', 'rice', 'money', 'species', 'series', 'fish', 'sheep']
+
+        irregular_words = {
+            'person': 'people',
+            'man': 'men',
+            'child': 'children',
+            'sex': 'sexes',
+            'move': 'moves'
+        }
+
+        lower_cased_word = word.lower()
+
+        for uncountable_word in uncountable_words:
+            if lower_cased_word[-1 * len(uncountable_word):] == uncountable_word:
+                return word
+
+        for irregular in irregular_words.keys():
+            match = re.search('(' + irregular + ')$', word, re.IGNORECASE)
+            if match:
+                return re.sub('(?i)' + irregular + '$', match.expand('\\1')[0] + irregular_words[irregular][1:], word)
+
+        for rule in range(len(rules)):
+            match = re.search(rules[rule][0], word, re.IGNORECASE)
+            if match:
+                groups = match.groups()
+                for k in range(0, len(groups)):
+                    if groups[k] is None:
+                        rules[rule][1] = rules[rule][1].replace('\\' + str(k + 1), '')
+
+                return re.sub(rules[rule][0], rules[rule][1], word)
+
+        return word
+
+    def singularize(self, word):
+        '''Singularizes English nouns.'''
+
+        rules = [
+            ['(?i)(quiz)zes$', '\\1'],
+            ['(?i)(matr)ices$', '\\1ix'],
+            ['(?i)(vert|ind)ices$', '\\1ex'],
+            ['(?i)^(ox)en', '\\1'],
+            ['(?i)(alias|status)es$', '\\1'],
+            ['(?i)([octop|vir])i$', '\\1us'],
+            ['(?i)(cris|ax|test)es$', '\\1is'],
+            ['(?i)(shoe)s$', '\\1'],
+            ['(?i)(o)es$', '\\1'],
+            ['(?i)(bus)es$', '\\1'],
+            ['(?i)([m|l])ice$', '\\1ouse'],
+            ['(?i)(x|ch|ss|sh)es$', '\\1'],
+            ['(?i)(m)ovies$', '\\1ovie'],
+            ['(?i)(s)eries$', '\\1eries'],
+            ['(?i)([^aeiouy]|qu)ies$', '\\1y'],
+            ['(?i)([lr])ves$', '\\1f'],
+            ['(?i)(tive)s$', '\\1'],
+            ['(?i)(hive)s$', '\\1'],
+            ['(?i)([^f])ves$', '\\1fe'],
+            ['(?i)(^analy)ses$', '\\1sis'],
+            ['(?i)((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)ses$', '\\1\\2sis'],
+            ['(?i)([ti])a$', '\\1um'],
+            ['(?i)(n)ews$', '\\1ews'],
+            ['(?i)s$', ''],
+        ]
+
+        uncountable_words = ['equipment', 'information', 'rice', 'money', 'species', 'series', 'fish', 'sheep', 'sms']
+
+        irregular_words = {
+            'people': 'person',
+            'men': 'man',
+            'children': 'child',
+            'sexes': 'sex',
+            'moves': 'move'
+        }
+
+        lower_cased_word = word.lower()
+        for uncountable_word in uncountable_words:
+            if lower_cased_word[-1 * len(uncountable_word):] == uncountable_word:
+                return word
+
+        for irregular in irregular_words.keys():
+            match = re.search('(' + irregular + ')$', word, re.IGNORECASE)
+            if match:
+                return re.sub('(?i)' + irregular + '$', match.expand('\\1')[0] + irregular_words[irregular][1:], word)
+
+        for rule in range(len(rules)):
+            match = re.search(rules[rule][0], word, re.IGNORECASE)
+            if match:
+                groups = match.groups()
+                for k in range(0, len(groups)):
+                    if groups[k] is None:
+                        rules[rule][1] = rules[rule][1].replace('\\' + str(k + 1), '')
+
+                return re.sub(rules[rule][0], rules[rule][1], word)
+
+        return word
+
+
+class Inflector:
+    """
+    Inflector for pluralizing and singularizing nouns.
+
+    It provides methods for helping on creating programs
+    based on naming conventions like on Ruby on Rails.
+    """
+
+    def __init__( self, Inflector=English ):
+        assert callable(Inflector), "Inflector should be a callable obj"
+        self.Inflector = Inflector()
+
+    def pluralize(self, word):
+        '''Pluralizes nouns.'''
+        return self.Inflector.pluralize(word)
+
+    def singularize(self, word):
+        '''Singularizes nouns.'''
+        return self.Inflector.singularize(word)
+
+    def cond_plural(self, number_of_records, word):
+        '''Returns the plural form of a word if first parameter is greater than 1'''
+        return self.Inflector.cond_plural(number_of_records, word)
+
+    def titleize(self, word, uppercase=''):
+        '''Converts an underscored or CamelCase word into a sentence.
+            The titleize function converts text like "WelcomePage",
+            "welcome_page" or  "welcome page" to this "Welcome Page".
+            If the "uppercase" parameter is set to 'first' it will only
+            capitalize the first character of the title.'''
+        return self.Inflector.titleize(word, uppercase)
+
+    def camelize(self, word):
+        ''' Returns given word as CamelCased
+        Converts a word like "send_email" to "SendEmail". It
+        will remove non alphanumeric character from the word, so
+        "who's online" will be converted to "WhoSOnline"'''
+        return self.Inflector.camelize(word)
+
+    def underscore(self, word):
+        ''' Converts a word "into_it_s_underscored_version"
+        Convert any "CamelCased" or "ordinary Word" into an
+        "underscored_word".
+        This can be really useful for creating friendly URLs.'''
+        return self.Inflector.underscore(word)
+
+    def humanize(self, word, uppercase=''):
+        '''Returns a human-readable string from word
+        Returns a human-readable string from word, by replacing
+        underscores with a space, and by upper-casing the initial
+        character by default.
+        If you need to uppercase all the words you just have to
+        pass 'all' as a second parameter.'''
+        return self.Inflector.humanize(word, uppercase)
+
+    def variablize(self, word):
+        '''Same as camelize but first char is lowercased
+        Converts a word like "send_email" to "sendEmail". It
+        will remove non alphanumeric character from the word, so
+        "who's online" will be converted to "whoSOnline"'''
+        return self.Inflector.variablize(word)
+
+    def tableize(self, class_name):
+        ''' Converts a class name to its table name according to rails
+        naming conventions. Example. Converts "Person" to "people" '''
+        return self.Inflector.tableize(class_name)
+
+    def classify(self, table_name):
+        '''Converts a table name to its class name according to rails
+        naming conventions. Example: Converts "people" to "Person" '''
+        return self.Inflector.classify(table_name)
+
+    def ordinalize(self, number):
+        '''Converts number to its ordinal form.
+        This method converts 13 to 13th, 2 to 2nd ...'''
+        return self.Inflector.ordinalize(number)
+
+    def unaccent(self, text):
+        '''Transforms a string to its unaccented version.
+        This might be useful for generating "friendly" URLs'''
+        return self.Inflector.unaccent(text)
+
+    def urlize(self, text):
+        '''Transform a string to its unaccented and underscored
+        version ready to be inserted in friendly URLs'''
+        return self.Inflector.urlize(text)
+
+    def demodulize(self, module_name):
+        return self.Inflector.demodulize(module_name)
+
+    def modulize(self, module_description):
+        return self.Inflector.modulize(module_description)
+
+    def foreignKey(self, class_name, separate_class_name_and_id_with_underscore=1):
+        ''' Returns class_name in underscored form, with "_id" tacked on at the end.
+        This is for use in dealing with the database.'''
+        return self.Inflector.foreignKey(class_name, separate_class_name_and_id_with_underscore)
+
+
+# Copyright (c) 2006 Bermi Ferrer Martinez
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software to deal in this software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of this software, and to permit
+# persons to whom this software is furnished to do so, subject to the following
+# condition:
+#
+# THIS SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THIS SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THIS SOFTWARE.
diff --git a/lib/galaxy/util/json.py b/lib/galaxy/util/json.py
new file mode 100644
index 0000000..d172151
--- /dev/null
+++ b/lib/galaxy/util/json.py
@@ -0,0 +1,169 @@
+from __future__ import absolute_import
+
+import collections
+import copy
+import json
+import logging
+import math
+import random
+import string
+
+from six import iteritems, string_types, text_type
+
+__all__ = ( "safe_dumps", "json_fix", "validate_jsonrpc_request", "validate_jsonrpc_response", "jsonrpc_request", "jsonrpc_response" )
+
+log = logging.getLogger( __name__ )
+
+to_json_string = json.dumps
+from_json_string = json.loads
+
+
+def json_fix( val ):
+    if isinstance( val, list ):
+        return [ json_fix( v ) for v in val ]
+    elif isinstance( val, dict ):
+        return dict( [ ( json_fix( k ), json_fix( v ) ) for ( k, v ) in iteritems(val) ] )
+    elif isinstance( val, text_type ):
+        return val.encode( "utf8" )
+    else:
+        return val
+
+
+def swap_inf_nan( val ):
+    """
+    This takes an arbitrary object and preps it for jsonifying safely, templating Inf/NaN.
+    """
+    if isinstance(val, string_types):
+        # basestring first, because it's a sequence and would otherwise get caught below.
+        return val
+    elif isinstance( val, collections.Sequence ):
+        return [ swap_inf_nan( v ) for v in val ]
+    elif isinstance( val, collections.Mapping ):
+        return dict( [ ( swap_inf_nan( k ), swap_inf_nan( v ) ) for ( k, v ) in iteritems(val) ] )
+    elif isinstance(val, float):
+        if math.isnan(val):
+            return "__NaN__"
+        elif val == float("inf"):
+            return "__Infinity__"
+        elif val == float("-inf"):
+            return "__-Infinity__"
+        else:
+            return val
+    else:
+        return val
+
+
+def safe_dumps( *args, **kwargs ):
+    """
+    This is a wrapper around dumps that encodes Infinity and NaN values.  It's a
+    fairly rare case (which will be low in request volume).  Basically, we tell
+    json.dumps to blow up if it encounters Infinity/NaN, and we 'fix' it before
+    re-encoding.
+    """
+    try:
+        dumped = json.dumps( *args, allow_nan=False, **kwargs )
+    except ValueError:
+        obj = swap_inf_nan( copy.deepcopy( args[0] ) )
+        dumped = json.dumps( obj, allow_nan=False, **kwargs )
+    if kwargs.get( 'escape_closing_tags', True ):
+        return dumped.replace( '</', '<\\/' )
+    return dumped
+
+
+# Methods for handling JSON-RPC
+
+def validate_jsonrpc_request( request, regular_methods, notification_methods ):
+    try:
+        request = json.loads( request )
+    except Exception as e:
+        return False, request, jsonrpc_response( id=None,
+                                                 error=dict( code=-32700,
+                                                             message='Parse error',
+                                                             data=str( e ) ) )
+    try:
+        assert 'jsonrpc' in request, \
+            'This server requires JSON-RPC 2.0 and no "jsonrpc" member was sent with the Request object as per the JSON-RPC 2.0 Specification.'
+        assert request['jsonrpc'] == '2.0', \
+            'Requested JSON-RPC version "%s" != required version "2.0".' % request['jsonrpc']
+        assert 'method' in request, 'No "method" member was sent with the Request object'
+    except AssertionError as e:
+        return False, request, jsonrpc_response( request=request,
+                                                 error=dict( code=-32600,
+                                                             message='Invalid Request',
+                                                             data=str( e ) ) )
+    try:
+        assert request['method'] in ( regular_methods + notification_methods )
+    except AssertionError as e:
+        return False, request, jsonrpc_response( request=request,
+                                                 error=dict( code=-32601,
+                                                             message='Method not found',
+                                                             data='Valid methods are: %s' % ', '.join( regular_methods + notification_methods ) ) )
+    try:
+        if request['method'] in regular_methods:
+            assert 'id' in request, 'No "id" member was sent with the Request object and the requested method "%s" is not a notification method' % request['method']
+    except AssertionError as e:
+        return False, request, jsonrpc_response( request=request,
+                                                 error=dict( code=-32600,
+                                                             message='Invalid Request',
+                                                             data=str( e ) ) )
+    return True, request, None
+
+
+def validate_jsonrpc_response( response, id=None ):
+    try:
+        response = json.loads( response )
+    except Exception as e:
+        log.error( 'Response was not valid JSON: %s' % str( e ) )
+        log.debug( 'Response was: %s' % response )
+        return False, response
+    try:
+        assert 'jsonrpc' in response, \
+            'This server requires JSON-RPC 2.0 and no "jsonrpc" member was sent with the Response object as per the JSON-RPC 2.0 Specification.'
+        assert ( 'result' in response or 'error' in response ), \
+            'Neither of "result" or "error" members were sent with the Response object.'
+        if 'error' in response:
+            assert int( response['error']['code'] ), \
+                'The "code" member of the "error" object in the Response is missing or not an integer.'
+            assert 'message' in response, \
+                'The "message" member of the "error" object in the Response is missing.'
+    except Exception as e:
+        log.error( 'Response was not valid JSON-RPC: %s' % str( e ) )
+        log.debug( 'Response was: %s' % response )
+        return False, response
+    if id is not None:
+        try:
+            assert 'id' in response and response['id'] == id
+        except Exception as e:
+            log.error( 'The response id "%s" does not match the request id "%s"' % ( response['id'], id ) )
+            return False, response
+    return True, response
+
+
+def jsonrpc_request( method, params=None, id=None, jsonrpc='2.0' ):
+    if method is None:
+        log.error( 'jsonrpc_request(): "method" parameter cannot be None' )
+        return None
+    request = dict( jsonrpc=jsonrpc, method=method )
+    if params:
+        request['params'] = params
+    if id is not None and id is True:
+        request['id'] = ''.join( [ random.choice( string.hexdigits ) for i in range( 16 ) ] )
+    elif id is not None:
+        request['id'] = id
+    return request
+
+
+def jsonrpc_response( request=None, id=None, result=None, error=None, jsonrpc='2.0' ):
+    if result:
+        rval = dict( jsonrpc=jsonrpc, result=result )
+    elif error:
+        rval = dict( jsonrpc=jsonrpc, error=error )
+    else:
+        msg = 'jsonrpc_response() called with out a "result" or "error" parameter'
+        log.error( msg )
+        rval = dict( jsonrpc=jsonrpc, error=msg )
+    if id is not None:
+        rval['id'] = id
+    elif request is not None and 'id' in request:
+        rval['id'] = request['id']
+    return rval
diff --git a/lib/galaxy/util/jstree.py b/lib/galaxy/util/jstree.py
new file mode 100644
index 0000000..9f20d66
--- /dev/null
+++ b/lib/galaxy/util/jstree.py
@@ -0,0 +1,162 @@
+import os
+import copy
+import dictobj
+from collections import namedtuple
+
+Path = namedtuple('Path', ('path', 'id', 'options'))
+
+
+class Node(dictobj.DictionaryObject):
+  """
+  Copyright 2012 "Grim Apps"
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+  Helper class written by William Grim - grimwm
+  Original repo: https://github.com/grimwm/py-jstree
+  Code adjusted according to the idea of Frank Blechschmidt - FraBle
+  Thank you!
+  ************************
+  This class exists as a helper to the JSTree.  Its "jsonData" method can
+  generate sub-tree JSON without putting the logic directly into the JSTree.
+
+  This data structure is only semi-immutable.  The JSTree uses a directly
+  iterative (i.e. no stack is managed) builder pattern to construct a
+  tree out of paths.  Therefore, the children are not known in advance, and
+  we have to keep the children attribute mutable.
+  """
+
+  def __init__(self, path, oid, **kwargs):
+    """
+    kwargs allows users to pass arbitrary information into a Node that
+    will later be output in jsonData().  It allows for more advanced
+    configuration than the default path handling that JSTree currently allows.
+    For example, users may want to pass "attr" or some other valid jsTree options.
+
+    Example:
+      >>> import jstree
+      >>> node = jstree.Node('a', None)
+      >>> print node
+      Node({'text': 'a', 'children': MutableDictionaryObject({})})
+      >>> print node.jsonData()
+      {'text': 'a'}
+
+      >>> import jstree
+      >>> node = jstree.Node('a', 1)
+      >>> print node
+      Node({'text': 'a', 'children': MutableDictionaryObject({}), 'li_attr': DictionaryObject({'id': 1}), 'id': 1})
+      >>> print node.jsonData()
+      {'text': 'a', 'id': 1, 'li_attr': {'id': 1}}
+
+      >>> import jstree
+      >>> node = jstree.Node('a', 5, icon="folder", state = {'opened': True})
+      >>> print node
+      Node({'text': 'a', 'id': 5, 'state': DictionaryObject({'opened': True}), 'children': MutableDictionaryObject({}), 'li_attr': DictionaryObject({'id': 5}), 'icon': 'folder'})
+      >>> print node.jsonData()
+      {'text': 'a', 'state': {'opened': True}, 'id': 5, 'li_attr': {'id': 5}, 'icon': 'folder'}
+    """
+    super(Node, self).__init__()
+
+    children = kwargs.get('children', {})
+    if len(filter(lambda key: not isinstance(children[key], Node), children)):
+      raise TypeError(
+        "One or more children were not instances of '%s'" % Node.__name__)
+    if 'children' in kwargs:
+      del kwargs['children']
+    self._items['children'] = dictobj.MutableDictionaryObject(children)
+
+    if oid is not None:
+      li_attr = kwargs.get('li_attr', {})
+      li_attr['id'] = oid
+      kwargs['li_attr'] = li_attr
+      self._items['id'] = oid
+
+    self._items.update(dictobj.DictionaryObject(**kwargs))
+    self._items['text'] = path
+
+  def jsonData(self):
+    children = [self.children[k].jsonData() for k in sorted(self.children)]
+    output = {}
+    for k in self._items:
+      if 'children' == k:
+        continue
+      if isinstance(self._items[k], dictobj.DictionaryObject):
+        output[k] = self._items[k].asdict()
+      else:
+        output[k] = self._items[k]
+    if len(children):
+      output['children'] = children
+    return output
+
+
+class JSTree(dictobj.DictionaryObject):
+  """
+  An immutable dictionary-like object that converts a list of "paths"
+  into a tree structure suitable for jQuery's jsTree.
+  """
+
+  def __init__(self, paths, **kwargs):
+    """
+    Take a list of paths and put them into a tree.  Paths with the same prefix should
+    be at the same level in the tree.
+
+    kwargs may be standard jsTree options used at all levels in the tree.  These will be outputted
+    in the JSON.
+
+    """
+    if len(filter(lambda p: not isinstance(p, Path), paths)):
+      raise TypeError(
+        "All paths must be instances of '%s'" % Path.__name__)
+
+    super(JSTree, self).__init__()
+
+    root = Node('', None, **kwargs)
+    for path in sorted(paths):
+      curr = root
+      subpaths = path.path.split(os.path.sep)
+      for i, subpath in enumerate(subpaths):
+        if subpath not in curr.children:
+          opt = copy.deepcopy(kwargs)
+          if len(subpaths) - 1 == i:
+            oid = path.id
+            opt.update(path.options) if path.options is not None else None
+          else:
+            oid = None
+          curr.children[subpath] = Node(subpath, oid, **opt)
+          # oid = path.id if len(subpaths) - 1 == i else None
+          # curr.children[subpath] = Node(subpath, oid, **kwargs)
+        curr = curr.children[subpath]
+    self._items['_root'] = root
+
+  def pretty(self, root=None, depth=0, spacing=2):
+    """
+    Create a "pretty print" represenation of the tree with customized indentation at each
+    level of the tree.
+    """
+    if root is None:
+      root = self._root
+    fmt = "%s%s/" if root.children else "%s%s"
+    s = fmt % (" " * depth * spacing, root.text)
+    for child in root.children:
+      child = root.children[child]
+      s += "\n%s" % self.pretty(child, depth + 1, spacing)
+    return s
+
+  def jsonData(self):
+    """
+    Returns a copy of the internal tree in a JSON-friendly format,
+    ready for consumption by jsTree.  The data is represented as a
+    list of dictionaries, each of which are our internal nodes.
+
+    """
+    return [self._root.children[k].jsonData() for k in sorted(self._root.children)]
diff --git a/lib/galaxy/util/lazy_process.py b/lib/galaxy/util/lazy_process.py
new file mode 100644
index 0000000..e979bc9
--- /dev/null
+++ b/lib/galaxy/util/lazy_process.py
@@ -0,0 +1,57 @@
+import subprocess
+import threading
+import time
+
+
+class LazyProcess( object ):
+    """ Abstraction describing a command line launching a service - probably
+    as needed as functionality is accessed in Galaxy.
+    """
+
+    def __init__( self, command_and_args ):
+        self.command_and_args = command_and_args
+        self.thread_lock = threading.Lock()
+        self.allow_process_request = True
+        self.process = None
+
+    def start_process( self ):
+        with self.thread_lock:
+            if self.allow_process_request:
+                self.allow_process_request = False
+                t = threading.Thread(target=self.__start)
+                t.daemon = True
+                t.start()
+
+    def __start(self):
+        with self.thread_lock:
+            self.process = subprocess.Popen( self.command_and_args, close_fds=True )
+
+    def shutdown( self ):
+        with self.thread_lock:
+            self.allow_process_request = False
+        if self.running:
+            self.process.terminate()
+            time.sleep(.01)
+            if self.running:
+                self.process.kill()
+
+    @property
+    def running( self ):
+        return self.process and not self.process.poll()
+
+
+class NoOpLazyProcess( object ):
+    """ LazyProcess abstraction meant to describe potentially optional
+    services, in those cases where one is not configured or valid, this
+    class can be used in place of LazyProcess.
+    """
+
+    def start_process( self ):
+        return
+
+    def shutdown( self ):
+        return
+
+    @property
+    def running( self ):
+        return False
diff --git a/lib/galaxy/util/log/__init__.py b/lib/galaxy/util/log/__init__.py
new file mode 100644
index 0000000..c70627c
--- /dev/null
+++ b/lib/galaxy/util/log/__init__.py
@@ -0,0 +1,7 @@
+class TraceLogger( object ):
+
+    def __init__( self, name ):
+        self.name = name
+
+    def log( **kwargs ):
+        raise TypeError( "Abstract Method" )
diff --git a/lib/galaxy/util/log/fluent_log.py b/lib/galaxy/util/log/fluent_log.py
new file mode 100644
index 0000000..139c9c4
--- /dev/null
+++ b/lib/galaxy/util/log/fluent_log.py
@@ -0,0 +1,45 @@
+"""
+Provides a `TraceLogger` implementation that logs to a fluentd collector
+"""
+
+import json
+import threading
+import time
+
+try:
+    from fluent.sender import FluentSender
+except ImportError:
+    FluentSender = None
+
+
+FLUENT_IMPORT_MESSAGE = ('The Python fluent package is required to use this '
+                         'feature, please install it')
+
+
+class FluentTraceLogger( object ):
+    def __init__( self, name, host='localhost', port=24224 ):
+        assert FluentSender is not None, FLUENT_IMPORT_MESSAGE
+        self.lock = threading.Lock()
+        self.thread_local = threading.local()
+        self.name = name
+        self.sender = FluentSender( self.name, host=host, port=port )
+
+    def context_set( self, key, value ):
+        self.lock.acquire()
+        if not hasattr( self.thread_local, 'context' ):
+            self.thread_local.context = {}
+        self.thread_local.context[key] = value
+        self.lock.release()
+
+    def context_remove( self, key ):
+        self.lock.acquire()
+        del self.thread_local.context[key]
+        self.lock.release()
+
+    def log( self, label, event_time=None, **kwargs ):
+        self.lock.acquire()
+        if hasattr( self.thread_local, 'context' ):
+            kwargs.update( self.thread_local.context )
+        self.lock.release()
+        event_time = event_time or time.time()
+        self.sender.emit_with_time( label, int(event_time), json.dumps(kwargs, default=str))
diff --git a/lib/galaxy/util/multi_byte.py b/lib/galaxy/util/multi_byte.py
new file mode 100644
index 0000000..29cb796
--- /dev/null
+++ b/lib/galaxy/util/multi_byte.py
@@ -0,0 +1,25 @@
+try:
+    import wchartype
+except ImportError:
+    wchartype = None
+
+from six import text_type
+
+
+def is_multi_byte( chars ):
+    if wchartype is None:
+        message = "Attempted to use galaxy.util.multi_byte but dependency wchartype is unavailable."
+        raise Exception(message)
+    for char in chars:
+        try:
+            char = text_type( char )
+        except UnicodeDecodeError:
+            # Probably binary
+            return False
+        if ( wchartype.is_asian( char ) or wchartype.is_full_width( char ) or
+                wchartype.is_kanji( char ) or wchartype.is_hiragana( char ) or
+                wchartype.is_katakana( char ) or wchartype.is_half_katakana( char ) or
+                wchartype.is_hangul( char ) or wchartype.is_full_digit( char ) or
+                wchartype.is_full_letter( char )):
+            return True
+    return False
diff --git a/lib/galaxy/util/none_like.py b/lib/galaxy/util/none_like.py
new file mode 100644
index 0000000..3c0cf8f
--- /dev/null
+++ b/lib/galaxy/util/none_like.py
@@ -0,0 +1,41 @@
+"""
+Objects with No values
+"""
+
+from galaxy.datatypes.registry import Registry
+from galaxy.model.metadata import MetadataCollection
+
+
+class RecursiveNone:
+    def __str__( self ):
+        return "None"
+
+    def __repr__( self ):
+        return str( self )
+
+    def __getattr__( self, name ):
+        value = RecursiveNone()
+        setattr( self, name, value )
+        return value
+
+    def __nonzero__( self ):
+        return False
+
+
+class NoneDataset( RecursiveNone ):
+    def __init__( self, datatypes_registry=None, ext='data', dbkey='?' ):
+        self.ext = self.extension = ext
+        self.dbkey = dbkey
+        if datatypes_registry is None:
+            # Default Value Required for unit tests
+            datatypes_registry = Registry()
+            datatypes_registry.load_datatypes()
+        self.datatype = datatypes_registry.get_datatype_by_extension( ext )
+        self._metadata = None
+        self.metadata = MetadataCollection( self )
+
+    def __getattr__( self, name ):
+        return "None"
+
+    def missing_meta( self ):
+        return False
diff --git a/lib/galaxy/util/object_wrapper.py b/lib/galaxy/util/object_wrapper.py
new file mode 100644
index 0000000..3bcbc29
--- /dev/null
+++ b/lib/galaxy/util/object_wrapper.py
@@ -0,0 +1,493 @@
+"""
+Classes for wrapping Objects and Sanitizing string output.
+"""
+import collections
+import inspect
+import logging
+import string
+import sys
+
+from numbers import Number
+try:
+    from types import NoneType
+except ImportError:
+    NoneType = type(None)
+try:
+    from types import NotImplementedType
+except ImportError:
+    NotImplementedType = type(NotImplemented)
+
+try:
+    from types import EllipsisType
+except ImportError:
+    EllipsisType = type(Ellipsis)
+
+try:
+    from types import XRangeType
+except ImportError:
+    XRangeType = range
+
+try:
+    from types import SliceType
+except ImportError:
+    SliceType = slice
+
+try:
+    from types import (
+        BufferType,
+        DictProxyType
+    )
+except ImportError:
+    # Py3 doesn't have these concepts, just treat them like SliceType that
+    # so they are __WRAP_NO_SUBCLASS__.
+    BufferType = SliceType
+    DictProxyType = SliceType
+
+from types import (
+    BuiltinFunctionType,
+    BuiltinMethodType,
+    CodeType,
+    FrameType,
+    FunctionType,
+    GeneratorType,
+    GetSetDescriptorType,
+    MemberDescriptorType,
+    MethodType,
+    ModuleType,
+    TracebackType,
+)
+from six.moves import (
+    copyreg as copy_reg,
+    UserDict
+)
+
+from galaxy.util import sanitize_lists_to_string as _sanitize_lists_to_string
+
+log = logging.getLogger( __name__ )
+
+# Define different behaviors for different types, see also: https://docs.python.org/2/library/types.html
+
+# Known Callable types
+__CALLABLE_TYPES__ = ( FunctionType, MethodType, GeneratorType, CodeType, BuiltinFunctionType, BuiltinMethodType, )
+
+# Always wrap these types without attempting to subclass
+__WRAP_NO_SUBCLASS__ = ( ModuleType, XRangeType, SliceType, BufferType, TracebackType, FrameType, DictProxyType,
+                         GetSetDescriptorType, MemberDescriptorType ) + __CALLABLE_TYPES__
+
+# Don't wrap or sanitize.
+__DONT_SANITIZE_TYPES__ = ( Number, bool, NoneType, NotImplementedType, EllipsisType, bytearray, )
+
+# Don't wrap, but do sanitize.
+__DONT_WRAP_TYPES__ = tuple()  # ( basestring, ) so that we can get the unsanitized string, we will now wrap basestring instances
+
+# Wrap contents, but not the container
+__WRAP_SEQUENCES__ = ( tuple, list, )
+__WRAP_SETS__ = ( set, frozenset, )
+__WRAP_MAPPINGS__ = ( dict, UserDict, )
+
+
+# Define the set of characters that are not sanitized, and define a set of mappings for those that are.
+# characters that are valid
+VALID_CHARACTERS = set( string.ascii_letters + string.digits + " -=_.()/+*^,:?!@" )
+
+# characters that are allowed but need to be escaped
+CHARACTER_MAP = { '>': '__gt__',
+                  '<': '__lt__',
+                  "'": '__sq__',
+                  '"': '__dq__',
+                  '[': '__ob__',
+                  ']': '__cb__',
+                  '{': '__oc__',
+                  '}': '__cc__',
+                  '\n': '__cn__',
+                  '\r': '__cr__',
+                  '\t': '__tc__',
+                  '#': '__pd__'}
+
+INVALID_CHARACTER = "X"
+
+if sys.version_info > (3, 0):
+    # __coerce__ doesn't do anything under Python anyway.
+    def coerce(x, y):
+        return x
+
+
+def cmp(x, y):
+    # Builtin in Python 2, but not Python 3.
+    return (x > y) - (x < y)
+
+
+def sanitize_lists_to_string( values, valid_characters=VALID_CHARACTERS, character_map=CHARACTER_MAP, invalid_character=INVALID_CHARACTER  ):
+    return _sanitize_lists_to_string( values, valid_characters=valid_characters, character_map=character_map, invalid_character=invalid_character  )
+
+
+def wrap_with_safe_string( value, no_wrap_classes=None ):
+    """
+    Recursively wrap values that should be wrapped.
+    """
+
+    def __do_wrap( value ):
+        if isinstance( value, SafeStringWrapper ):
+            # Only ever wrap one-layer
+            return value
+        if isinstance( value, collections.Callable ):
+            safe_class = CallableSafeStringWrapper
+        else:
+            safe_class = SafeStringWrapper
+        if isinstance( value, no_wrap_classes ):
+            return value
+        if isinstance( value, __DONT_WRAP_TYPES__ ):
+            return sanitize_lists_to_string( value, valid_characters=VALID_CHARACTERS, character_map=CHARACTER_MAP )
+        if isinstance( value, __WRAP_NO_SUBCLASS__ ):
+            return safe_class( value, safe_string_wrapper_function=__do_wrap )
+        for this_type in __WRAP_SEQUENCES__ + __WRAP_SETS__:
+            if isinstance( value, this_type ):
+                return this_type( map( __do_wrap, value ) )
+        for this_type in __WRAP_MAPPINGS__:
+            if isinstance( value, this_type ):
+                # Wrap both key and value
+                return this_type( ( __do_wrap( x[0] ), __do_wrap( x[1] ) ) for x in value.items() )
+        # Create a dynamic class that joins SafeStringWrapper with the object being wrapped.
+        # This allows e.g. isinstance to continue to work.
+        try:
+            wrapped_class_name = value.__name__
+            wrapped_class = value
+        except:
+            wrapped_class_name = value.__class__.__name__
+            wrapped_class = value.__class__
+        value_mod = inspect.getmodule( value )
+        if value_mod:
+            wrapped_class_name = "%s.%s" % ( value_mod.__name__, wrapped_class_name )
+        wrapped_class_name = "SafeStringWrapper(%s:%s)" % ( wrapped_class_name, ",".join( sorted( map( str, no_wrap_classes ) ) ) )
+        do_wrap_func_name = "__do_wrap_%s" % ( wrapped_class_name )
+        do_wrap_func = __do_wrap
+        global_dict = globals()
+        if wrapped_class_name in global_dict:
+            # Check to see if we have created a wrapper for this class yet, if so, reuse
+            wrapped_class = global_dict.get( wrapped_class_name )
+            do_wrap_func = global_dict.get( do_wrap_func_name, __do_wrap )
+        else:
+            try:
+                wrapped_class = type( wrapped_class_name, ( safe_class, wrapped_class, ), {} )
+            except TypeError as e:
+                # Fail-safe for when a class cannot be dynamically subclassed.
+                log.warning( "Unable to create dynamic subclass for %s, %s: %s", type( value), value, e )
+                wrapped_class = type( wrapped_class_name, ( safe_class, ), {} )
+            if wrapped_class not in ( SafeStringWrapper, CallableSafeStringWrapper ):
+                # Save this wrapper for reuse and pickling/copying
+                global_dict[ wrapped_class_name ] = wrapped_class
+                do_wrap_func.__name__ = do_wrap_func_name
+                global_dict[ do_wrap_func_name ] = do_wrap_func
+
+                def pickle_safe_object( safe_object ):
+                    return ( wrapped_class, ( safe_object.unsanitized, do_wrap_func, ) )
+                # Set pickle and copy properties
+                copy_reg.pickle( wrapped_class, pickle_safe_object, do_wrap_func )
+        return wrapped_class( value, safe_string_wrapper_function=do_wrap_func )
+    # Determine classes not to wrap
+    if no_wrap_classes:
+        if not isinstance( no_wrap_classes, ( tuple, list ) ):
+            no_wrap_classes = [ no_wrap_classes ]
+        no_wrap_classes = list( no_wrap_classes ) + list( __DONT_SANITIZE_TYPES__ ) + [ SafeStringWrapper ]
+    else:
+        no_wrap_classes = list( __DONT_SANITIZE_TYPES__ ) + [ SafeStringWrapper ]
+    no_wrap_classes = tuple( set( sorted( no_wrap_classes, key=str ) ) )
+    return __do_wrap( value )
+
+
+# N.B. refer to e.g. https://docs.python.org/2/reference/datamodel.html for information on Python's Data Model.
+
+
+class SafeStringWrapper( object ):
+    """
+    Class that wraps and sanitizes any provided value's attributes
+    that will attempt to be cast into a string.
+
+    Attempts to mimic behavior of original class, including operands.
+
+    To ensure proper handling of e.g. subclass checks, the *wrap_with_safe_string()*
+    method should be used.
+
+    This wrapping occurs in a recursive/parasitic fashion, as all called attributes of
+    the originally wrapped object will also be wrapped and sanitized, unless the attribute
+    is of a type found in __DONT_SANITIZE_TYPES__ + __DONT_WRAP_TYPES__, where e.g. ~(strings
+    will still be sanitized, but not wrapped), and e.g. integers will have neither.
+    """
+    __UNSANITIZED_ATTRIBUTE_NAME__ = 'unsanitized'
+    __NO_WRAP_NAMES__ = [ '__safe_string_wrapper_function__', __UNSANITIZED_ATTRIBUTE_NAME__]
+
+    def __new__( cls, *arg, **kwd ):
+        # We need to define a __new__ since, we are subclassing from e.g. immutable str, which internally sets data
+        # that will be used when other + this (this + other is handled by __add__)
+        try:
+            return super( SafeStringWrapper, cls ).__new__( cls, sanitize_lists_to_string( arg[0], valid_characters=VALID_CHARACTERS, character_map=CHARACTER_MAP ) )
+        except Exception as e:
+            log.warning( "Could not provide an argument to %s.__new__: %s; will try without arguments.", cls, e )
+            return super( SafeStringWrapper, cls ).__new__( cls )
+
+    def __init__( self, value, safe_string_wrapper_function=wrap_with_safe_string ):
+        self.unsanitized = value
+        self.__safe_string_wrapper_function__ = safe_string_wrapper_function
+
+    def __str__( self ):
+        return sanitize_lists_to_string( self.unsanitized, valid_characters=VALID_CHARACTERS, character_map=CHARACTER_MAP )
+
+    def __repr__( self ):
+        return "%s object at %x on: %s" % ( sanitize_lists_to_string( self.__class__.__name__, valid_characters=VALID_CHARACTERS, character_map=CHARACTER_MAP ), id( self ), sanitize_lists_to_string( repr( self.unsanitized ), valid_characters=VALID_CHARACTERS, character_map=CHARACTER_MAP ) )
+
+    def __lt__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.unsanitized < other
+
+    def __le__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.unsanitized <= other
+
+    def __eq__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.unsanitized == other
+
+    def __ne__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.unsanitized != other
+
+    def __gt__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.unsanitized > other
+
+    def __ge__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.unsanitized >= other
+
+    def __cmp__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return cmp( self.unsanitized, other )
+
+    # Do not implement __rcmp__, python 2.2 < 2.6
+
+    def __hash__( self ):
+        return hash( self.unsanitized )
+
+    def __bool__( self ):
+        return bool( self.unsanitized )
+    __nonzero__ = __bool__
+
+    # Do not implement __unicode__, we will rely on __str__
+
+    def __getattr__( self, name ):
+        if name in SafeStringWrapper.__NO_WRAP_NAMES__:
+            # FIXME: is this ever reached?
+            return object.__getattr__( self, name )
+        return self.__safe_string_wrapper_function__( getattr( self.unsanitized, name ) )
+
+    def __setattr__( self, name, value ):
+        if name in SafeStringWrapper.__NO_WRAP_NAMES__:
+            return object.__setattr__( self, name, value )
+        return setattr( self.unsanitized, name, value )
+
+    def __delattr__( self, name ):
+        if name in SafeStringWrapper.__NO_WRAP_NAMES__:
+            return object.__delattr__( self, name )
+        return delattr( self.unsanitized, name )
+
+    def __getattribute__( self, name ):
+        if name in SafeStringWrapper.__NO_WRAP_NAMES__:
+            return object.__getattribute__( self, name )
+        return self.__safe_string_wrapper_function__( getattr( object.__getattribute__( self, 'unsanitized' ), name ) )
+
+    # Skip Descriptors
+
+    # Skip __slots__
+
+    # Don't need __metaclass__, we'll use the helper function to handle with subclassing for e.g. isinstance()
+
+    # Revisit:
+    # __instancecheck__
+    # __subclasscheck__
+    # We are using a helper class to create dynamic subclasses to handle class checks
+
+    # We address __call__ as needed based upon unsanitized, through the use of a CallableSafeStringWrapper class
+
+    def __len__( self ):
+        original_value = self.unsanitized
+        while isinstance( original_value, SafeStringWrapper ):
+            original_value = self.unsanitized
+        return len( self.unsanitized )
+
+    def __getitem__( self, key ):
+        return self.__safe_string_wrapper_function__( self.unsanitized[ key ] )
+
+    def __setitem__( self, key, value ):
+        while isinstance( value, SafeStringWrapper ):
+            value = value.unsanitized
+        self.unsanitized[ key ] = value
+
+    def __delitem__( self, key ):
+        del self.unsanitized[ key ]
+
+    def __iter__( self ):
+        return iter( map( self.__safe_string_wrapper_function__, iter( self.unsanitized ) ) )
+
+    # Do not implement __reversed__
+
+    def __contains__( self, item ):
+        # FIXME: Do we need to consider if item is/isn't or does/doesn't contain SafeStringWrapper?
+        # When considering e.g. nested lists/dicts/etc, this gets complicated
+        while isinstance( item, SafeStringWrapper ):
+            item = item.unsanitized
+        return item in self.unsanitized
+
+    # Not sure that we need these slice methods, but will provide anyway
+    def __getslice__( self, i, j ):
+        return self.__safe_string_wrapper_function__( self.unsanitized[ i:j ] )
+
+    def __setslice__( self, i, j, value ):
+        self.unsanitized[ i:j ] = value
+
+    def __delslice__( self, i, j ):
+        del self.unsanitized[ i:j ]
+
+    def __add__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized + other )
+
+    def __sub__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized - other )
+
+    def __mul__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized * other )
+
+    def __floordiv__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized // other )
+
+    def __mod__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized % other )
+
+    def __divmod__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( divmod( self.unsanitized, other ) )
+
+    def __pow__( self, *other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( pow( self.unsanitized, *other ) )
+
+    def __lshift__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized << other )
+
+    def __rshift__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized >> other )
+
+    def __and__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized & other )
+
+    def __xor__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized ^ other )
+
+    def __or__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized | other )
+
+    def __div__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized / other )
+
+    def __truediv__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( self.unsanitized / other )
+
+    # The only reflected operand that we will define is __rpow__, due to coercion rules complications as per docs
+    def __rpow__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return self.__safe_string_wrapper_function__( pow( other, self.unsanitized ) )
+
+    # Do not implement in-place operands
+
+    def __neg__( self ):
+        return self.__safe_string_wrapper_function__( -self.unsanitized )
+
+    def __pos__( self ):
+        return self.__safe_string_wrapper_function__( +self.unsanitized )
+
+    def __abs__( self ):
+        return self.__safe_string_wrapper_function__( abs( self.unsanitized ) )
+
+    def __invert__( self ):
+        return self.__safe_string_wrapper_function__( ~self.unsanitized )
+
+    def __complex__( self ):
+        return self.__safe_string_wrapper_function__( complex( self.unsanitized ) )
+
+    def __int__( self ):
+        return int( self.unsanitized )
+
+    def __float__( self ):
+        return float( self.unsanitized )
+
+    def __oct__( self ):
+        return oct( self.unsanitized )
+
+    def __hex__( self ):
+        return hex( self.unsanitized )
+
+    def __index__( self ):
+        return self.unsanitized.index()
+
+    def __coerce__( self, other ):
+        while isinstance( other, SafeStringWrapper ):
+            other = other.unsanitized
+        return coerce( self.unsanitized, other )
+
+    def __enter__( self ):
+        return self.unsanitized.__enter__()
+
+    def __exit__( self, *args ):
+        return self.unsanitized.__exit__( *args )
+
+
+class CallableSafeStringWrapper( SafeStringWrapper ):
+
+    def __call__( self, *args, **kwds ):
+        return self.__safe_string_wrapper_function__( self.unsanitized( *args, **kwds ) )
+
+
+# Enable pickling/deepcopy
+def pickle_SafeStringWrapper( safe_object ):
+    args = ( safe_object.unsanitized, )
+    cls = SafeStringWrapper
+    if isinstance( safe_object, CallableSafeStringWrapper ):
+        cls = CallableSafeStringWrapper
+    return ( cls, args )
+
+
+copy_reg.pickle( SafeStringWrapper, pickle_SafeStringWrapper, wrap_with_safe_string )
+copy_reg.pickle( CallableSafeStringWrapper, pickle_SafeStringWrapper, wrap_with_safe_string )
diff --git a/lib/galaxy/util/odict.py b/lib/galaxy/util/odict.py
new file mode 100644
index 0000000..e33013a
--- /dev/null
+++ b/lib/galaxy/util/odict.py
@@ -0,0 +1,94 @@
+"""
+Ordered dictionary implementation.
+"""
+
+from six.moves import UserDict
+dict_alias = dict
+
+
+class odict(UserDict):
+    """
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747
+
+    This dictionary class extends UserDict to record the order in which items are
+    added. Calling keys(), values(), items(), etc. will return results in this
+    order.
+    """
+    def __init__( self, dict=None ):
+        item = dict
+        self._keys = []
+        if isinstance(item, dict_alias):
+            UserDict.__init__( self, item )
+        else:
+            UserDict.__init__( self, None )
+        if isinstance(item, list):
+            for (key, value) in item:
+                self[key] = value
+
+    def __delitem__( self, key ):
+        UserDict.__delitem__( self, key )
+        self._keys.remove( key )
+
+    def __setitem__( self, key, item ):
+        UserDict.__setitem__( self, key, item )
+        if key not in self._keys:
+            self._keys.append( key )
+
+    def clear( self ):
+        UserDict.clear( self )
+        self._keys = []
+
+    def copy(self):
+        new = odict()
+        new.update( self )
+        return new
+
+    def items( self ):
+        return zip( self._keys, self.values() )
+
+    def keys( self ):
+        return self._keys[:]
+
+    def popitem( self ):
+        try:
+            key = self._keys[-1]
+        except IndexError:
+            raise KeyError( 'dictionary is empty' )
+        val = self[ key ]
+        del self[ key ]
+        return ( key, val )
+
+    def setdefault( self, key, failobj=None ):
+        if key not in self._keys:
+            self._keys.append( key )
+        return UserDict.setdefault( self, key, failobj )
+
+    def update( self, dict ):
+        for ( key, val ) in dict.items():
+            self.__setitem__( key, val )
+
+    def values( self ):
+        return map( self.get, self._keys )
+
+    def iterkeys( self ):
+        return iter( self._keys )
+
+    def itervalues( self ):
+        for key in self._keys:
+            yield self.get( key )
+
+    def iteritems( self ):
+        for key in self._keys:
+            yield key, self.get( key )
+
+    def __iter__( self ):
+        for key in self._keys:
+            yield key
+
+    def reverse( self ):
+        self._keys.reverse()
+
+    def insert( self, index, key, item ):
+        if key not in self._keys:
+            self._keys.insert( index, key )
+            UserDict.__setitem__( self, key, item )
diff --git a/lib/galaxy/util/pastescript/__init__.py b/lib/galaxy/util/pastescript/__init__.py
new file mode 100644
index 0000000..45db1d6
--- /dev/null
+++ b/lib/galaxy/util/pastescript/__init__.py
@@ -0,0 +1,3 @@
+"""
+Command for loading and serving wsgi apps taken from PasteScript
+"""
diff --git a/lib/galaxy/util/pastescript/loadwsgi.py b/lib/galaxy/util/pastescript/loadwsgi.py
new file mode 100644
index 0000000..ab31383
--- /dev/null
+++ b/lib/galaxy/util/pastescript/loadwsgi.py
@@ -0,0 +1,776 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+# Mostly taken from PasteDeploy and stripped down for Galaxy
+
+import inspect
+import os
+import re
+import sys
+
+import pkg_resources
+
+from six import iteritems
+from six.moves.urllib.parse import unquote
+
+from galaxy.util.properties import NicerConfigParser
+
+
+__all__ = ('loadapp', 'loadserver', 'loadfilter', 'appconfig')
+
+# ---- from paste.deploy.compat --------------------------------------
+
+"""Python 2<->3 compatibility module"""
+
+
+def print_(template, *args, **kwargs):
+    template = str(template)
+    if args:
+        template = template % args
+    elif kwargs:
+        template = template % kwargs
+    sys.stdout.writelines(template)
+
+
+if sys.version_info < (3, 0):
+    def reraise(t, e, tb):
+        exec('raise t, e, tb', dict(t=t, e=e, tb=tb))
+else:
+    def reraise(t, e, tb):
+        exec('raise e from tb', dict(e=e, tb=tb))
+
+# ---- from paste.deploy.util ----------------------------------------
+
+
+def fix_type_error(exc_info, callable, varargs, kwargs):
+    """
+    Given an exception, this will test if the exception was due to a
+    signature error, and annotate the error with better information if
+    so.
+
+    Usage::
+
+      try:
+          val = callable(*args, **kw)
+      except TypeError:
+          exc_info = fix_type_error(None, callable, args, kw)
+          raise exc_info[0], exc_info[1], exc_info[2]
+    """
+    if exc_info is None:
+        exc_info = sys.exc_info()
+    if (exc_info[0] != TypeError or
+            str(exc_info[1]).find('arguments') == -1 or
+            getattr(exc_info[1], '_type_error_fixed', False)):
+        return exc_info
+    exc_info[1]._type_error_fixed = True
+    argspec = inspect.formatargspec(*inspect.getargspec(callable))
+    args = ', '.join(map(_short_repr, varargs))
+    if kwargs and args:
+        args += ', '
+    if kwargs:
+        kwargs = sorted(kwargs.keys())
+        args += ', '.join('%s=...' % n for n in kwargs)
+    gotspec = '(%s)' % args
+    msg = '%s; got %s, wanted %s' % (exc_info[1], gotspec, argspec)
+    exc_info[1].args = (msg,)
+    return exc_info
+
+
+def _short_repr(v):
+    v = repr(v)
+    if len(v) > 12:
+        v = v[:8] + '...' + v[-4:]
+    return v
+
+
+def fix_call(callable, *args, **kw):
+    """
+    Call ``callable(*args, **kw)`` fixing any type errors that come out.
+    """
+    try:
+        val = callable(*args, **kw)
+    except TypeError:
+        exc_info = fix_type_error(None, callable, args, kw)
+        reraise(*exc_info)
+    return val
+
+
+def lookup_object(spec):
+    """
+    Looks up a module or object from a some.module:func_name specification.
+    To just look up a module, omit the colon and everything after it.
+    """
+    parts, target = spec.split(':') if ':' in spec else (spec, None)
+    module = __import__(parts)
+
+    for part in parts.split('.')[1:] + ([target] if target else []):
+        module = getattr(module, part)
+
+    return module
+
+# ---- from paste.deploy.loadwsgi ------------------------------------
+
+############################################################
+# Utility functions
+############################################################
+
+
+def import_string(s):
+    return pkg_resources.EntryPoint.parse("x=" + s).load(False)
+
+
+def _aslist(obj):
+    """
+    Turn object into a list; lists and tuples are left as-is, None
+    becomes [], and everything else turns into a one-element list.
+    """
+    if obj is None:
+        return []
+    elif isinstance(obj, (list, tuple)):
+        return obj
+    else:
+        return [obj]
+
+
+def _flatten(lst):
+    """
+    Flatten a nested list.
+    """
+    if not isinstance(lst, (list, tuple)):
+        return [lst]
+    result = []
+    for item in lst:
+        result.extend(_flatten(item))
+    return result
+
+
+############################################################
+# Object types
+############################################################
+
+
+class _ObjectType(object):
+
+    name = None
+    egg_protocols = None
+    config_prefixes = None
+
+    def __init__(self):
+        # Normalize these variables:
+        self.egg_protocols = [_aslist(p) for p in _aslist(self.egg_protocols)]
+        self.config_prefixes = [_aslist(p) for p in _aslist(self.config_prefixes)]
+
+    def __repr__(self):
+        return '<%s protocols=%r prefixes=%r>' % (
+            self.name, self.egg_protocols, self.config_prefixes)
+
+    def invoke(self, context):
+        assert context.protocol in _flatten(self.egg_protocols)
+        return fix_call(context.object,
+                        context.global_conf, **context.local_conf)
+
+
+class _App(_ObjectType):
+
+    name = 'application'
+    egg_protocols = ['paste.app_factory', 'paste.composite_factory',
+                     'paste.composit_factory']
+    config_prefixes = [['app', 'application'], ['composite', 'composit'],
+                       'pipeline', 'filter-app']
+
+    def invoke(self, context):
+        if context.protocol in ('paste.composit_factory',
+                                'paste.composite_factory'):
+            return fix_call(context.object,
+                            context.loader, context.global_conf,
+                            **context.local_conf)
+        elif context.protocol == 'paste.app_factory':
+            return fix_call(context.object, context.global_conf, **context.local_conf)
+        else:
+            assert 0, "Protocol %r unknown" % context.protocol
+
+
+APP = _App()
+
+
+class _Filter(_ObjectType):
+    name = 'filter'
+    egg_protocols = [['paste.filter_factory', 'paste.filter_app_factory']]
+    config_prefixes = ['filter']
+
+    def invoke(self, context):
+        if context.protocol == 'paste.filter_factory':
+            return fix_call(context.object,
+                            context.global_conf, **context.local_conf)
+        elif context.protocol == 'paste.filter_app_factory':
+            def filter_wrapper(wsgi_app):
+                # This should be an object, so it has a nicer __repr__
+                return fix_call(context.object,
+                                wsgi_app, context.global_conf,
+                                **context.local_conf)
+            return filter_wrapper
+        else:
+            assert 0, "Protocol %r unknown" % context.protocol
+
+
+FILTER = _Filter()
+
+
+class _Server(_ObjectType):
+    name = 'server'
+    egg_protocols = [['paste.server_factory', 'paste.server_runner']]
+    config_prefixes = ['server']
+
+    def invoke(self, context):
+        if context.protocol == 'paste.server_factory':
+            return fix_call(context.object,
+                            context.global_conf, **context.local_conf)
+        elif context.protocol == 'paste.server_runner':
+            def server_wrapper(wsgi_app):
+                # This should be an object, so it has a nicer __repr__
+                return fix_call(context.object,
+                                wsgi_app, context.global_conf,
+                                **context.local_conf)
+            return server_wrapper
+        else:
+            assert 0, "Protocol %r unknown" % context.protocol
+
+
+SERVER = _Server()
+
+
+# Virtual type: (@@: There's clearly something crufty here;
+# this probably could be more elegant)
+class _PipeLine(_ObjectType):
+    name = 'pipeline'
+
+    def invoke(self, context):
+        app = context.app_context.create()
+        filters = [c.create() for c in context.filter_contexts]
+        filters.reverse()
+        for filter_ in filters:
+            app = filter_(app)
+        return app
+
+
+PIPELINE = _PipeLine()
+
+
+class _FilterApp(_ObjectType):
+    name = 'filter_app'
+
+    def invoke(self, context):
+        next_app = context.next_context.create()
+        filter_ = context.filter_context.create()
+        return filter_(next_app)
+
+
+FILTER_APP = _FilterApp()
+
+
+class _FilterWith(_App):
+    name = 'filtered_with'
+
+    def invoke(self, context):
+        filter_ = context.filter_context.create()
+        filtered = context.next_context.create()
+        if context.next_context.object_type is APP:
+            return filter_(filtered)
+        else:
+            # filtering a filter
+            def composed(app):
+                return filter_(filtered(app))
+            return composed
+
+
+FILTER_WITH = _FilterWith()
+
+############################################################
+# Loaders
+############################################################
+
+
+def loadapp(uri, name=None, **kw):
+    return loadobj(APP, uri, name=name, **kw)
+
+
+def loadfilter(uri, name=None, **kw):
+    return loadobj(FILTER, uri, name=name, **kw)
+
+
+def loadserver(uri, name=None, **kw):
+    return loadobj(SERVER, uri, name=name, **kw)
+
+
+def appconfig(uri, name=None, relative_to=None, global_conf=None):
+    context = loadcontext(APP, uri, name=name,
+                          relative_to=relative_to,
+                          global_conf=global_conf)
+    return context.config()
+
+
+_loaders = {}
+
+
+def loadobj(object_type, uri, name=None, relative_to=None,
+            global_conf=None):
+    context = loadcontext(
+        object_type, uri, name=name, relative_to=relative_to,
+        global_conf=global_conf)
+    return context.create()
+
+
+def loadcontext(object_type, uri, name=None, relative_to=None,
+                global_conf=None):
+    if '#' in uri:
+        if name is None:
+            uri, name = uri.split('#', 1)
+        else:
+            # @@: Ignore fragment or error?
+            uri = uri.split('#', 1)[0]
+    if name is None:
+        name = 'main'
+    if ':' not in uri:
+        raise LookupError("URI has no scheme: %r" % uri)
+    scheme, path = uri.split(':', 1)
+    scheme = scheme.lower()
+    if scheme not in _loaders:
+        raise LookupError(
+            "URI scheme not known: %r (from %s)"
+            % (scheme, ', '.join(_loaders.keys())))
+    return _loaders[scheme](
+        object_type,
+        uri, path, name=name, relative_to=relative_to,
+        global_conf=global_conf)
+
+
+def _loadconfig(object_type, uri, path, name, relative_to,
+                global_conf):
+    isabs = os.path.isabs(path)
+    # De-Windowsify the paths:
+    path = path.replace('\\', '/')
+    if not isabs:
+        if not relative_to:
+            raise ValueError(
+                "Cannot resolve relative uri %r; no relative_to keyword "
+                "argument given" % uri)
+        relative_to = relative_to.replace('\\', '/')
+        if relative_to.endswith('/'):
+            path = relative_to + path
+        else:
+            path = relative_to + '/' + path
+    if path.startswith('///'):
+        path = path[2:]
+    path = unquote(path)
+    loader = ConfigLoader(path)
+    if global_conf:
+        loader.update_defaults(global_conf, overwrite=False)
+    return loader.get_context(object_type, name, global_conf)
+
+
+_loaders['config'] = _loadconfig
+
+
+def _loadegg(object_type, uri, spec, name, relative_to,
+             global_conf):
+    loader = EggLoader(spec)
+    return loader.get_context(object_type, name, global_conf)
+
+
+_loaders['egg'] = _loadegg
+
+
+def _loadfunc(object_type, uri, spec, name, relative_to,
+              global_conf):
+
+    loader = FuncLoader(spec)
+    return loader.get_context(object_type, name, global_conf)
+
+
+_loaders['call'] = _loadfunc
+
+############################################################
+# Loaders
+############################################################
+
+
+class _Loader(object):
+
+    def get_app(self, name=None, global_conf=None):
+        return self.app_context(
+            name=name, global_conf=global_conf).create()
+
+    def get_filter(self, name=None, global_conf=None):
+        return self.filter_context(
+            name=name, global_conf=global_conf).create()
+
+    def get_server(self, name=None, global_conf=None):
+        return self.server_context(
+            name=name, global_conf=global_conf).create()
+
+    def app_context(self, name=None, global_conf=None):
+        return self.get_context(
+            APP, name=name, global_conf=global_conf)
+
+    def filter_context(self, name=None, global_conf=None):
+        return self.get_context(
+            FILTER, name=name, global_conf=global_conf)
+
+    def server_context(self, name=None, global_conf=None):
+        return self.get_context(
+            SERVER, name=name, global_conf=global_conf)
+
+    _absolute_re = re.compile(r'^[a-zA-Z]+:')
+
+    def absolute_name(self, name):
+        """
+        Returns true if the name includes a scheme
+        """
+        if name is None:
+            return False
+        return self._absolute_re.search(name)
+
+
+class ConfigLoader(_Loader):
+
+    def __init__(self, filename):
+        self.filename = filename = filename.strip()
+        defaults = {
+            'here': os.path.dirname(os.path.abspath(filename)),
+            '__file__': os.path.abspath(filename)
+        }
+        self.parser = NicerConfigParser(filename, defaults=defaults)
+        self.parser.optionxform = str  # Don't lower-case keys
+        with open(filename) as f:
+            self.parser.read_file(f)
+
+    def update_defaults(self, new_defaults, overwrite=True):
+        for key, value in iteritems(new_defaults):
+            if not overwrite and key in self.parser._defaults:
+                continue
+            self.parser._defaults[key] = value
+
+    def get_context(self, object_type, name=None, global_conf=None):
+        if self.absolute_name(name):
+            return loadcontext(object_type, name,
+                               relative_to=os.path.dirname(self.filename),
+                               global_conf=global_conf)
+        section = self.find_config_section(
+            object_type, name=name)
+        if global_conf is None:
+            global_conf = {}
+        else:
+            global_conf = global_conf.copy()
+        defaults = self.parser.defaults()
+        global_conf.update(defaults)
+        local_conf = {}
+        global_additions = {}
+        get_from_globals = {}
+        for option in self.parser.options(section):
+            if option.startswith('set '):
+                name = option[4:].strip()
+                global_additions[name] = global_conf[name] = (
+                    self.parser.get(section, option))
+            elif option.startswith('get '):
+                name = option[4:].strip()
+                get_from_globals[name] = self.parser.get(section, option)
+            else:
+                if option in defaults:
+                    # @@: It's a global option (?), so skip it
+                    continue
+                local_conf[option] = self.parser.get(section, option)
+        for local_var, glob_var in get_from_globals.items():
+            local_conf[local_var] = global_conf[glob_var]
+        if object_type in (APP, FILTER) and 'filter-with' in local_conf:
+            filter_with = local_conf.pop('filter-with')
+        else:
+            filter_with = None
+        if 'require' in local_conf:
+            for spec in local_conf['require'].split():
+                pkg_resources.require(spec)
+            del local_conf['require']
+        if section.startswith('filter-app:'):
+            context = self._filter_app_context(
+                object_type, section, name=name,
+                global_conf=global_conf, local_conf=local_conf,
+                global_additions=global_additions)
+        elif section.startswith('pipeline:'):
+            context = self._pipeline_app_context(
+                object_type, section, name=name,
+                global_conf=global_conf, local_conf=local_conf,
+                global_additions=global_additions)
+        elif 'use' in local_conf:
+            context = self._context_from_use(
+                object_type, local_conf, global_conf, global_additions,
+                section)
+        else:
+            context = self._context_from_explicit(
+                object_type, local_conf, global_conf, global_additions,
+                section)
+        if filter_with is not None:
+            filter_with_context = LoaderContext(
+                obj=None,
+                object_type=FILTER_WITH,
+                protocol=None,
+                global_conf=global_conf, local_conf=local_conf,
+                loader=self)
+            filter_with_context.filter_context = self.filter_context(
+                name=filter_with, global_conf=global_conf)
+            filter_with_context.next_context = context
+            return filter_with_context
+        return context
+
+    def _context_from_use(self, object_type, local_conf, global_conf,
+                          global_additions, section):
+        use = local_conf.pop('use')
+        context = self.get_context(
+            object_type, name=use, global_conf=global_conf)
+        context.global_conf.update(global_additions)
+        context.local_conf.update(local_conf)
+        if '__file__' in global_conf:
+            # use sections shouldn't overwrite the original __file__
+            context.global_conf['__file__'] = global_conf['__file__']
+        # @@: Should loader be overwritten?
+        context.loader = self
+
+        if context.protocol is None:
+            # Determine protocol from section type
+            section_protocol = section.split(':', 1)[0]
+            if section_protocol in ('application', 'app'):
+                context.protocol = 'paste.app_factory'
+            elif section_protocol in ('composit', 'composite'):
+                context.protocol = 'paste.composit_factory'
+            else:
+                # This will work with 'server' and 'filter', otherwise it
+                # could fail but there is an error message already for
+                # bad protocols
+                context.protocol = 'paste.%s_factory' % section_protocol
+
+        return context
+
+    def _context_from_explicit(self, object_type, local_conf, global_conf,
+                               global_addition, section):
+        possible = []
+        for protocol_options in object_type.egg_protocols:
+            for protocol in protocol_options:
+                if protocol in local_conf:
+                    possible.append((protocol, local_conf[protocol]))
+                    break
+        if len(possible) > 1:
+            raise LookupError(
+                "Multiple protocols given in section %r: %s"
+                % (section, possible))
+        if not possible:
+            raise LookupError(
+                "No loader given in section %r" % section)
+        found_protocol, found_expr = possible[0]
+        del local_conf[found_protocol]
+        value = import_string(found_expr)
+        context = LoaderContext(
+            value, object_type, found_protocol,
+            global_conf, local_conf, self)
+        return context
+
+    def _filter_app_context(self, object_type, section, name,
+                            global_conf, local_conf, global_additions):
+        if 'next' not in local_conf:
+            raise LookupError(
+                "The [%s] section in %s is missing a 'next' setting"
+                % (section, self.filename))
+        next_name = local_conf.pop('next')
+        context = LoaderContext(None, FILTER_APP, None, global_conf,
+                                local_conf, self)
+        context.next_context = self.get_context(
+            APP, next_name, global_conf)
+        if 'use' in local_conf:
+            context.filter_context = self._context_from_use(
+                FILTER, local_conf, global_conf, global_additions,
+                section)
+        else:
+            context.filter_context = self._context_from_explicit(
+                FILTER, local_conf, global_conf, global_additions,
+                section)
+        return context
+
+    def _pipeline_app_context(self, object_type, section, name,
+                              global_conf, local_conf, global_additions):
+        if 'pipeline' not in local_conf:
+            raise LookupError(
+                "The [%s] section in %s is missing a 'pipeline' setting"
+                % (section, self.filename))
+        pipeline = local_conf.pop('pipeline').split()
+        if local_conf:
+            raise LookupError(
+                "The [%s] pipeline section in %s has extra "
+                "(disallowed) settings: %s"
+                % (', '.join(local_conf.keys())))
+        context = LoaderContext(None, PIPELINE, None, global_conf,
+                                local_conf, self)
+        context.app_context = self.get_context(
+            APP, pipeline[-1], global_conf)
+        context.filter_contexts = [
+            self.get_context(FILTER, pname, global_conf)
+            for pname in pipeline[:-1]]
+        return context
+
+    def find_config_section(self, object_type, name=None):
+        """
+        Return the section name with the given name prefix (following the
+        same pattern as ``protocol_desc`` in ``config``.  It must have the
+        given name, or for ``'main'`` an empty name is allowed.  The
+        prefix must be followed by a ``:``.
+
+        Case is *not* ignored.
+        """
+        possible = []
+        for name_options in object_type.config_prefixes:
+            for name_prefix in name_options:
+                found = self._find_sections(
+                    self.parser.sections(), name_prefix, name)
+                if found:
+                    possible.extend(found)
+                    break
+        if not possible:
+            raise LookupError(
+                "No section %r (prefixed by %s) found in config %s"
+                % (name,
+                   ' or '.join(map(repr, _flatten(object_type.config_prefixes))),
+                   self.filename))
+        if len(possible) > 1:
+            raise LookupError(
+                "Ambiguous section names %r for section %r (prefixed by %s) "
+                "found in config %s"
+                % (possible, name,
+                   ' or '.join(map(repr, _flatten(object_type.config_prefixes))),
+                   self.filename))
+        return possible[0]
+
+    def _find_sections(self, sections, name_prefix, name):
+        found = []
+        if name is None:
+            if name_prefix in sections:
+                found.append(name_prefix)
+            name = 'main'
+        for section in sections:
+            if section.startswith(name_prefix + ':'):
+                if section[len(name_prefix) + 1:].strip() == name:
+                    found.append(section)
+        return found
+
+
+class EggLoader(_Loader):
+
+    def __init__(self, spec):
+        self.spec = spec
+
+    def get_context(self, object_type, name=None, global_conf=None):
+        if self.absolute_name(name):
+            return loadcontext(object_type, name,
+                               global_conf=global_conf)
+        entry_point, protocol, ep_name = self.find_egg_entry_point(
+            object_type, name=name)
+        return LoaderContext(
+            entry_point,
+            object_type,
+            protocol,
+            global_conf or {}, {},
+            self,
+            distribution=pkg_resources.get_distribution(self.spec),
+            entry_point_name=ep_name)
+
+    def find_egg_entry_point(self, object_type, name=None):
+        """
+        Returns the (entry_point, protocol) for the with the given
+        ``name``.
+        """
+        if name is None:
+            name = 'main'
+        possible = []
+        for protocol_options in object_type.egg_protocols:
+            for protocol in protocol_options:
+                pkg_resources.require(self.spec)
+                entry = pkg_resources.get_entry_info(
+                    self.spec,
+                    protocol,
+                    name)
+                if entry is not None:
+                    possible.append((entry.load(), protocol, entry.name))
+                    break
+        if not possible:
+            # Better exception
+            dist = pkg_resources.get_distribution(self.spec)
+            raise LookupError(
+                "Entry point %r not found in egg %r (dir: %s; protocols: %s; "
+                "entry_points: %s)"
+                % (name, self.spec,
+                    dist.location,
+                    ', '.join(_flatten(object_type.egg_protocols)),
+                    ', '.join(_flatten([
+                        list((pkg_resources.get_entry_info(self.spec, prot, name) or {}).keys())
+                        for prot in protocol_options] or '(no entry points)'))))
+        if len(possible) > 1:
+            raise LookupError(
+                "Ambiguous entry points for %r in egg %r (protocols: %s)"
+                % (name, self.spec, ', '.join(_flatten(protocol_options))))
+        return possible[0]
+
+
+class FuncLoader(_Loader):
+    """ Loader that supports specifying functions inside modules, without
+    using eggs at all. Configuration should be in the format:
+        use = call:my.module.path:function_name
+
+    Dot notation is supported in both the module and function name, e.g.:
+        use = call:my.module.path:object.method
+    """
+    def __init__(self, spec):
+        self.spec = spec
+        if ':' not in spec:
+            raise LookupError("Configuration not in format module:function")
+
+    def get_context(self, object_type, name=None, global_conf=None):
+        obj = lookup_object(self.spec)
+        return LoaderContext(
+            obj,
+            object_type,
+            None,  # determine protocol from section type
+            global_conf or {},
+            {},
+            self,
+        )
+
+
+class LoaderContext(object):
+
+    def __init__(self, obj, object_type, protocol,
+                 global_conf, local_conf, loader,
+                 distribution=None, entry_point_name=None):
+        self.object = obj
+        self.object_type = object_type
+        self.protocol = protocol
+        # assert protocol in _flatten(object_type.egg_protocols), (
+        #    "Bad protocol %r; should be one of %s"
+        #    % (protocol, ', '.join(map(repr, _flatten(object_type.egg_protocols)))))
+        self.global_conf = global_conf
+        self.local_conf = local_conf
+        self.loader = loader
+        self.distribution = distribution
+        self.entry_point_name = entry_point_name
+
+    def create(self):
+        return self.object_type.invoke(self)
+
+    def config(self):
+        conf = AttrDict(self.global_conf)
+        conf.update(self.local_conf)
+        conf.local_conf = self.local_conf
+        conf.global_conf = self.global_conf
+        conf.context = self
+        return conf
+
+
+class AttrDict(dict):
+    """
+    A dictionary that can be assigned to.
+    """
+    pass
diff --git a/lib/galaxy/util/pastescript/serve.py b/lib/galaxy/util/pastescript/serve.py
new file mode 100644
index 0000000..49fca8c
--- /dev/null
+++ b/lib/galaxy/util/pastescript/serve.py
@@ -0,0 +1,1072 @@
+# Most of this code is:
+
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+# The server command includes the additional header:
+
+# For discussion of daemonizing:
+#   http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/278731
+# Code taken also from QP:
+#   http://www.mems-exchange.org/software/qp/
+#   From lib/site.py
+
+# Galaxy originally used PasteScript and PasteDeploy for application
+# loading, to maintain compatibility we've internalized some of that
+# code here, stripping out uneeded functionality.
+
+# All top level imports from each package moved here and organized
+from __future__ import print_function
+
+import atexit
+import ConfigParser
+import errno
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import threading
+import time
+
+from logging.config import fileConfig
+
+from .loadwsgi import loadapp, loadserver
+
+
+difflib = None
+
+# ---- from paste.script.bool_optparse --------------------------------
+
+"""
+A subclass of ``optparse.OptionParser`` that allows boolean long
+options (like ``--verbose``) to also take arguments (like
+``--verbose=true``).  Arguments *must* use ``=``.
+"""
+
+try:
+    _ = optparse._
+except AttributeError:
+    from gettext import gettext as _
+
+
+class BoolOptionParser(optparse.OptionParser):
+
+    def _process_long_opt(self, rargs, values):
+        arg = rargs.pop(0)
+
+        # Value explicitly attached to arg?  Pretend it's the next
+        # argument.
+        if "=" in arg:
+            (opt, next_arg) = arg.split("=", 1)
+            rargs.insert(0, next_arg)
+            had_explicit_value = True
+        else:
+            opt = arg
+            had_explicit_value = False
+
+        opt = self._match_long_opt(opt)
+        option = self._long_opt[opt]
+        if option.takes_value():
+            nargs = option.nargs
+            if len(rargs) < nargs:
+                if nargs == 1:
+                    self.error(_("%s option requires an argument") % opt)
+                else:
+                    self.error(_("%s option requires %d arguments")
+                               % (opt, nargs))
+            elif nargs == 1:
+                value = rargs.pop(0)
+            else:
+                value = tuple(rargs[0:nargs])
+                del rargs[0:nargs]
+
+        elif had_explicit_value:
+            value = rargs[0].lower().strip()
+            del rargs[0:1]
+            if value in ('true', 'yes', 'on', '1', 'y', 't'):
+                value = None
+            elif value in ('false', 'no', 'off', '0', 'n', 'f'):
+                # Don't process
+                return
+            else:
+                self.error(_('%s option takes a boolean value only (true/false)') % opt)
+
+        else:
+            value = None
+
+        option.process(opt, value, values, self)
+
+# ---- from paste.script.command --------------------------------------
+
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+
+class BadCommand(Exception):
+
+    def __init__(self, message, exit_code=2):
+        self.message = message
+        self.exit_code = exit_code
+        Exception.__init__(self, message)
+
+    def _get_message(self):
+        """Getter for 'message'; needed only to override deprecation
+        in BaseException."""
+        return self.__message
+
+    def _set_message(self, value):
+        """Setter for 'message'; needed only to override deprecation
+        in BaseException."""
+        self.__message = value
+
+    # BaseException.message has been deprecated since Python 2.6.
+    # To prevent DeprecationWarning from popping up over this
+    # pre-existing attribute, use a new property that takes lookup
+    # precedence.
+    message = property(_get_message, _set_message)
+
+
+class NoDefault(object):
+    pass
+
+
+# run and invoke methods moved below ServeCommand
+class Command(object):
+
+    def __init__(self, name):
+        self.command_name = name
+
+    max_args = None
+    max_args_error = 'You must provide no more than %(max_args)s arguments'
+    min_args = None
+    min_args_error = 'You must provide at least %(min_args)s arguments'
+    required_args = None
+    # If this command takes a configuration file, set this to 1 or -1
+    # Then if invoked through #! the config file will be put into the positional
+    # arguments -- at the beginning with 1, at the end with -1
+    takes_config_file = None
+
+    # Grouped in help messages by this:
+    group_name = ''
+
+    required_args = ()
+    description = None
+    usage = ''
+    hidden = False
+    # This is the default verbosity level; --quiet subtracts,
+    # --verbose adds:
+    default_verbosity = 0
+    # This is the default interactive state:
+    default_interactive = 0
+    return_code = 0
+
+    BadCommand = BadCommand
+
+    # Must define:
+    #   parser
+    #   summary
+    #   command()
+
+    def run(self, args):
+        self.parse_args(args)
+
+        # Setup defaults:
+        for name, default in [('verbose', 0),
+                              ('quiet', 0),
+                              ('interactive', False),
+                              ('overwrite', False)]:
+            if not hasattr(self.options, name):
+                setattr(self.options, name, default)
+        if getattr(self.options, 'simulate', False):
+            self.options.verbose = max(self.options.verbose, 1)
+        self.interactive = self.default_interactive
+        if getattr(self.options, 'interactive', False):
+            self.interactive += self.options.interactive
+        if getattr(self.options, 'no_interactive', False):
+            self.interactive = False
+        self.verbose = self.default_verbosity
+        self.verbose += self.options.verbose
+        self.verbose -= self.options.quiet
+        self.simulate = getattr(self.options, 'simulate', False)
+
+        # For #! situations:
+        if (os.environ.get('PASTE_CONFIG_FILE') and self.takes_config_file is not None):
+            take = self.takes_config_file
+            filename = os.environ.get('PASTE_CONFIG_FILE')
+            if take == 1:
+                self.args.insert(0, filename)
+            elif take == -1:
+                self.args.append(filename)
+            else:
+                assert 0, (
+                    "Value takes_config_file must be None, 1, or -1 (not %r)"
+                    % take)
+
+        if (os.environ.get('PASTE_DEFAULT_QUIET')):
+            self.verbose = 0
+
+        # Validate:
+        if self.min_args is not None and len(self.args) < self.min_args:
+            raise BadCommand(
+                self.min_args_error % {'min_args': self.min_args,
+                                       'actual_args': len(self.args)})
+        if self.max_args is not None and len(self.args) > self.max_args:
+            raise BadCommand(
+                self.max_args_error % {'max_args': self.max_args,
+                                       'actual_args': len(self.args)})
+        for var_name, option_name in self.required_args:
+            if not getattr(self.options, var_name, None):
+                raise BadCommand(
+                    'You must provide the option %s' % option_name)
+        result = self.command()
+        if result is None:
+            return self.return_code
+        else:
+            return result
+
+    def parse_args(self, args):
+        if self.usage:
+            usage = ' ' + self.usage
+        else:
+            usage = ''
+        self.parser.usage = "%%prog [options]%s\n%s" % (
+            usage, self.summary)
+        self.parser.prog = self._prog_name()
+        if self.description:
+            desc = self.description
+            desc = textwrap.dedent(desc)
+            self.parser.description = desc
+        self.options, self.args = self.parser.parse_args(args)
+
+    def _prog_name(self):
+        return '%s %s' % (os.path.basename(sys.argv[0]), self.command_name)
+
+    ########################################
+    # Utility methods
+    ########################################
+
+    def pad(self, s, length, dir='left'):
+        if len(s) >= length:
+            return s
+        if dir == 'left':
+            return s + ' ' * (length - len(s))
+        else:
+            return ' ' * (length - len(s)) + s
+
+    def standard_parser(cls, verbose=True,
+                        interactive=False,
+                        no_interactive=False,
+                        simulate=False,
+                        quiet=False,
+                        overwrite=False):
+        """
+        Create a standard ``OptionParser`` instance.
+
+        Typically used like::
+
+            class MyCommand(Command):
+                parser = Command.standard_parser()
+
+        Subclasses may redefine ``standard_parser``, so use the
+        nearest superclass's class method.
+        """
+        parser = BoolOptionParser()
+        if verbose:
+            parser.add_option('-v', '--verbose',
+                              action='count',
+                              dest='verbose',
+                              default=0)
+        if quiet:
+            parser.add_option('-q', '--quiet',
+                              action='count',
+                              dest='quiet',
+                              default=0)
+        if no_interactive:
+            parser.add_option('--no-interactive',
+                              action="count",
+                              dest="no_interactive",
+                              default=0)
+        if interactive:
+            parser.add_option('-i', '--interactive',
+                              action='count',
+                              dest='interactive',
+                              default=0)
+        if simulate:
+            parser.add_option('-n', '--simulate',
+                              action='store_true',
+                              dest='simulate',
+                              default=False)
+        if overwrite:
+            parser.add_option('-f', '--overwrite',
+                              dest="overwrite",
+                              action="store_true",
+                              help="Overwrite files (warnings will be emitted for non-matching files otherwise)")
+        return parser
+
+    standard_parser = classmethod(standard_parser)
+
+    def quote_first_command_arg(self, arg):
+        """
+        There's a bug in Windows when running an executable that's
+        located inside a path with a space in it.  This method handles
+        that case, or on non-Windows systems or an executable with no
+        spaces, it just leaves well enough alone.
+        """
+        if (sys.platform != 'win32' or ' ' not in arg):
+            # Problem does not apply:
+            return arg
+        try:
+            import win32api
+        except ImportError:
+            raise ValueError(
+                "The executable %r contains a space, and in order to "
+                "handle this issue you must have the win32api module "
+                "installed" % arg)
+        arg = win32api.GetShortPathName(arg)
+        return arg
+
+    def parse_vars(self, args):
+        """
+        Given variables like ``['a=b', 'c=d']`` turns it into ``{'a':
+        'b', 'c': 'd'}``
+        """
+        result = {}
+        for arg in args:
+            if '=' not in arg:
+                raise BadCommand(
+                    'Variable assignment %r invalid (no "=")'
+                    % arg)
+            name, value = arg.split('=', 1)
+            result[name] = value
+        return result
+
+    def logging_file_config(self, config_file):
+        """
+        Setup logging via the logging module's fileConfig function with the
+        specified ``config_file``, if applicable.
+
+        ConfigParser defaults are specified for the special ``__file__``
+        and ``here`` variables, similar to PasteDeploy config loading.
+        """
+        parser = ConfigParser.ConfigParser()
+        parser.read([config_file])
+        if parser.has_section('loggers'):
+            config_file = os.path.abspath(config_file)
+            fileConfig(config_file, dict(__file__=config_file,
+                                         here=os.path.dirname(config_file)))
+
+
+class NotFoundCommand(Command):
+
+    def run(self, args):
+        print('Command %r not known (you may need to run setup.py egg_info)'
+              % self.command_name)
+        commands = list()
+        commands.sort()
+        if not commands:
+            print('No commands registered.')
+            print('Have you installed Paste Script?')
+            print('(try running python setup.py develop)')
+            return 2
+        print('Known commands:')
+        longest = max([len(n) for n, c in commands])
+        for name, command in commands:
+            print('  %s  %s' % (self.pad(name, length=longest),
+                                command.load().summary))
+        return 2
+
+
+# ---- From paste.script.serve ----------------------------------------
+
+MAXFD = 1024
+
+jython = sys.platform.startswith('java')
+
+
+class DaemonizeException( Exception ):
+    pass
+
+
+class ServeCommand(Command):
+
+    min_args = 0
+    usage = 'CONFIG_FILE [start|stop|restart|status] [var=value]'
+    takes_config_file = 1
+    summary = "Serve the described application"
+    description = """\
+    This command serves a web application that uses a paste.deploy
+    configuration file for the server and application.
+
+    If start/stop/restart is given, then --daemon is implied, and it will
+    start (normal operation), stop (--stop-daemon), or do both.
+
+    You can also include variable assignments like 'http_port=8080'
+    and then use %(http_port)s in your config files.
+    """
+
+    # used by subclasses that configure apps and servers differently
+    requires_config_file = True
+
+    parser = Command.standard_parser(quiet=True)
+    parser.add_option('-n', '--app-name',
+                      dest='app_name',
+                      metavar='NAME',
+                      help="Load the named application (default main)")
+    parser.add_option('-s', '--server',
+                      dest='server',
+                      metavar='SERVER_TYPE',
+                      help="Use the named server.")
+    parser.add_option('--server-name',
+                      dest='server_name',
+                      metavar='SECTION_NAME',
+                      help="Use the named server as defined in the configuration file (default: main)")
+    if hasattr(os, 'fork'):
+        parser.add_option('--daemon',
+                          dest="daemon",
+                          action="store_true",
+                          help="Run in daemon (background) mode")
+    parser.add_option('--pid-file',
+                      dest='pid_file',
+                      metavar='FILENAME',
+                      help="Save PID to file (default to paster.pid if running in daemon mode)")
+    parser.add_option('--log-file',
+                      dest='log_file',
+                      metavar='LOG_FILE',
+                      help="Save output to the given log file (redirects stdout)")
+    parser.add_option('--reload',
+                      dest='reload',
+                      action='store_true',
+                      help="Use auto-restart file monitor")
+    parser.add_option('--reload-interval',
+                      dest='reload_interval',
+                      default=1,
+                      help="Seconds between checking files (low number can cause significant CPU usage)")
+    parser.add_option('--monitor-restart',
+                      dest='monitor_restart',
+                      action='store_true',
+                      help="Auto-restart server if it dies")
+    parser.add_option('--status',
+                      action='store_true',
+                      dest='show_status',
+                      help="Show the status of the (presumably daemonized) server")
+
+    if hasattr(os, 'setuid'):
+        # I don't think these are available on Windows
+        parser.add_option('--user',
+                          dest='set_user',
+                          metavar="USERNAME",
+                          help="Set the user (usually only possible when run as root)")
+        parser.add_option('--group',
+                          dest='set_group',
+                          metavar="GROUP",
+                          help="Set the group (usually only possible when run as root)")
+
+    parser.add_option('--stop-daemon',
+                      dest='stop_daemon',
+                      action='store_true',
+                      help='Stop a daemonized server (given a PID file, or default paster.pid file)')
+
+    if jython:
+        parser.add_option('--disable-jython-reloader',
+                          action='store_true',
+                          dest='disable_jython_reloader',
+                          help="Disable the Jython reloader")
+
+    _scheme_re = re.compile(r'^[a-z][a-z]+:', re.I)
+
+    default_verbosity = 1
+
+    _reloader_environ_key = 'PYTHON_RELOADER_SHOULD_RUN'
+    _monitor_environ_key = 'PASTE_MONITOR_SHOULD_RUN'
+
+    possible_subcommands = ('start', 'stop', 'restart', 'status')
+
+    def command(self):
+        if self.options.stop_daemon:
+            return self.stop_daemon()
+
+        if not hasattr(self.options, 'set_user'):
+            # Windows case:
+            self.options.set_user = self.options.set_group = None
+        # @@: Is this the right stage to set the user at?
+        self.change_user_group(
+            self.options.set_user, self.options.set_group)
+
+        if self.requires_config_file:
+            if not self.args:
+                raise BadCommand('You must give a config file')
+            app_spec = self.args[0]
+            if (len(self.args) > 1 and self.args[1] in self.possible_subcommands):
+                cmd = self.args[1]
+                restvars = self.args[2:]
+            else:
+                cmd = None
+                restvars = self.args[1:]
+        else:
+            app_spec = ""
+            if (self.args and self.args[0] in self.possible_subcommands):
+                cmd = self.args[0]
+                restvars = self.args[1:]
+            else:
+                cmd = None
+                restvars = self.args[:]
+
+        if (getattr(self.options, 'daemon', False) and
+                getattr(self.options, 'reload', False)):
+            raise BadCommand('The --daemon and --reload options may not be used together')
+
+        jython_monitor = False
+        if self.options.reload:
+            if jython and not self.options.disable_jython_reloader:
+                # JythonMonitor raises the special SystemRestart
+                # exception that'll cause the Jython interpreter to
+                # reload in the existing Java process (avoiding
+                # subprocess startup time)
+                try:
+                    from paste.reloader import JythonMonitor
+                except ImportError:
+                    pass
+                else:
+                    jython_monitor = JythonMonitor(poll_interval=int(
+                        self.options.reload_interval))
+                    if self.requires_config_file:
+                        jython_monitor.watch_file(self.args[0])
+
+            if not jython_monitor:
+                if os.environ.get(self._reloader_environ_key):
+                    from paste import reloader
+                    if self.verbose > 1:
+                        print('Running reloading file monitor')
+                    reloader.install(int(self.options.reload_interval))
+                    if self.requires_config_file:
+                        reloader.watch_file(self.args[0])
+                else:
+                    return self.restart_with_reloader()
+
+        if cmd not in (None, 'start', 'stop', 'restart', 'status'):
+            raise BadCommand(
+                'Error: must give start|stop|restart (not %s)' % cmd)
+
+        if cmd == 'status' or self.options.show_status:
+            return self.show_status()
+
+        if cmd == 'restart' or cmd == 'stop':
+            result = self.stop_daemon()
+            if result:
+                print("Could not stop daemon")
+                # It's ok to continue trying to restart if stop_daemon returns
+                # a 1, otherwise shortcut and return.
+                if cmd == 'restart' and result != 1:
+                    return result
+            if cmd == 'stop':
+                return result
+            self.options.daemon = True
+
+        if cmd == 'start':
+            self.options.daemon = True
+
+        app_name = self.options.app_name
+        vars = self.parse_vars(restvars)
+        if not self._scheme_re.search(app_spec):
+            app_spec = 'config:' + app_spec
+        server_name = self.options.server_name
+        if self.options.server:
+            server_spec = 'egg:PasteScript'
+            assert server_name is None
+            server_name = self.options.server
+        else:
+            server_spec = app_spec
+        base = os.getcwd()
+
+        if getattr(self.options, 'daemon', False):
+            if not self.options.pid_file:
+                self.options.pid_file = 'paster.pid'
+            if not self.options.log_file:
+                self.options.log_file = 'paster.log'
+
+        # Ensure the log file is writeable
+        if self.options.log_file:
+            try:
+                writeable_log_file = open(self.options.log_file, 'a')
+            except IOError as ioe:
+                msg = 'Error: Unable to write to log file: %s' % ioe
+                raise BadCommand(msg)
+            writeable_log_file.close()
+
+        # Ensure the pid file is writeable
+        if self.options.pid_file:
+            try:
+                writeable_pid_file = open(self.options.pid_file, 'a')
+            except IOError as ioe:
+                msg = 'Error: Unable to write to pid file: %s' % ioe
+                raise BadCommand(msg)
+            writeable_pid_file.close()
+
+        if getattr(self.options, 'daemon', False):
+            try:
+                self.daemonize()
+            except DaemonizeException as ex:
+                if self.verbose > 0:
+                    print(str(ex))
+                return
+
+        if (self.options.monitor_restart and not
+                os.environ.get(self._monitor_environ_key)):
+            return self.restart_with_monitor()
+
+        if self.options.pid_file:
+            self.record_pid(self.options.pid_file)
+
+        if self.options.log_file:
+            stdout_log = LazyWriter(self.options.log_file, 'a')
+            sys.stdout = stdout_log
+            sys.stderr = stdout_log
+            logging.basicConfig(stream=stdout_log)
+
+        log_fn = app_spec
+        if log_fn.startswith('config:'):
+            log_fn = app_spec[len('config:'):]
+        elif log_fn.startswith('egg:'):
+            log_fn = None
+        if log_fn:
+            log_fn = os.path.join(base, log_fn)
+            self.logging_file_config(log_fn)
+
+        server = loadserver(server_spec, name=server_name, relative_to=base, global_conf=vars)
+
+        app = loadapp( app_spec, name=app_name, relative_to=base, global_conf=vars)
+
+        if self.verbose > 0:
+            if hasattr(os, 'getpid'):
+                msg = 'Starting server in PID %i.' % os.getpid()
+            else:
+                msg = 'Starting server.'
+            print(msg)
+
+        def serve():
+            try:
+                server(app)
+            except (SystemExit, KeyboardInterrupt) as e:
+                if self.verbose > 1:
+                    raise
+                if str(e):
+                    msg = ' ' + str(e)
+                else:
+                    msg = ''
+                print('Exiting%s (-v to see traceback)' % msg)
+            except AttributeError as e:
+                # Capturing bad error response from paste
+                if str(e) == "'WSGIThreadPoolServer' object has no attribute 'thread_pool'":
+                    import socket
+                    raise socket.error(98, 'Address already in use')
+                else:
+                    raise AttributeError(e)
+
+        if jython_monitor:
+            # JythonMonitor has to be ran from the main thread
+            threading.Thread(target=serve).start()
+            print('Starting Jython file monitor')
+            jython_monitor.periodic_reload()
+        else:
+            serve()
+
+    def daemonize(self):
+        pid = live_pidfile(self.options.pid_file)
+        if pid:
+            raise DaemonizeException(
+                "Daemon is already running (PID: %s from PID file %s)"
+                % (pid, self.options.pid_file))
+
+        if self.verbose > 0:
+            print('Entering daemon mode')
+        pid = os.fork()
+        if pid:
+            # The forked process also has a handle on resources, so we
+            # *don't* want proper termination of the process, we just
+            # want to exit quick (which os._exit() does)
+            os._exit(0)
+        # Make this the session leader
+        os.setsid()
+        # Fork again for good measure!
+        pid = os.fork()
+        if pid:
+            os._exit(0)
+
+        # @@: Should we set the umask and cwd now?
+
+        import resource  # Resource usage information.
+        maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
+        if (maxfd == resource.RLIM_INFINITY):
+            maxfd = MAXFD
+        # Iterate through and close all file descriptors.
+        for fd in range(0, maxfd):
+            try:
+                os.close(fd)
+            except OSError:  # ERROR, fd wasn't open to begin with (ignored)
+                pass
+
+        if (hasattr(os, "devnull")):
+            REDIRECT_TO = os.devnull
+        else:
+            REDIRECT_TO = "/dev/null"
+        os.open(REDIRECT_TO, os.O_RDWR)  # standard input (0)
+        # Duplicate standard input to standard output and standard error.
+        os.dup2(0, 1)  # standard output (1)
+        os.dup2(0, 2)  # standard error (2)
+
+    def record_pid(self, pid_file):
+        pid = os.getpid()
+        if self.verbose > 1:
+            print('Writing PID %s to %s' % (pid, pid_file))
+        f = open(pid_file, 'w')
+        f.write(str(pid))
+        f.close()
+        atexit.register(_remove_pid_file, pid, pid_file, self.verbose)
+
+    def stop_daemon(self):
+        pid_file = self.options.pid_file or 'paster.pid'
+        if not os.path.exists(pid_file):
+            print('No PID file exists in %s' % pid_file)
+            return 1
+        pid = read_pidfile(pid_file)
+        if not pid:
+            print("Not a valid PID file in %s" % pid_file)
+            return 1
+        pid = live_pidfile(pid_file)
+        if not pid:
+            print("PID in %s is not valid (deleting)" % pid_file)
+            try:
+                os.unlink(pid_file)
+            except (OSError, IOError) as e:
+                print("Could not delete: %s" % e)
+                return 2
+            return 1
+        for j in range(10):
+            if not live_pidfile(pid_file):
+                break
+            import signal
+            os.kill(pid, signal.SIGTERM)
+            time.sleep(1)
+        else:
+            print("failed to kill web process %s" % pid)
+            return 3
+        if os.path.exists(pid_file):
+            os.unlink(pid_file)
+        return 0
+
+    def show_status(self):
+        pid_file = self.options.pid_file or 'paster.pid'
+        if not os.path.exists(pid_file):
+            print('No PID file %s' % pid_file)
+            return 1
+        pid = read_pidfile(pid_file)
+        if not pid:
+            print('No PID in file %s' % pid_file)
+            return 1
+        pid = live_pidfile(pid_file)
+        if not pid:
+            print('PID %s in %s is not running' % (pid, pid_file))
+            return 1
+        print('Server running in PID %s' % pid)
+        return 0
+
+    def restart_with_reloader(self):
+        self.restart_with_monitor(reloader=True)
+
+    def restart_with_monitor(self, reloader=False):
+        if self.verbose > 0:
+            if reloader:
+                print('Starting subprocess with file monitor')
+            else:
+                print('Starting subprocess with monitor parent')
+        while 1:
+            args = [self.quote_first_command_arg(sys.executable)] + sys.argv
+            new_environ = os.environ.copy()
+            if reloader:
+                new_environ[self._reloader_environ_key] = 'true'
+            else:
+                new_environ[self._monitor_environ_key] = 'true'
+            proc = None
+            try:
+                try:
+                    _turn_sigterm_into_systemexit()
+                    proc = subprocess.Popen(args, env=new_environ)
+                    exit_code = proc.wait()
+                    proc = None
+                except KeyboardInterrupt:
+                    print('^C caught in monitor process')
+                    if self.verbose > 1:
+                        raise
+                    return 1
+            finally:
+                if (proc is not None and
+                        hasattr(os, 'kill')):
+                    import signal
+                    try:
+                        os.kill(proc.pid, signal.SIGTERM)
+                    except (OSError, IOError):
+                        pass
+
+            if reloader:
+                # Reloader always exits with code 3; but if we are
+                # a monitor, any exit code will restart
+                if exit_code != 3:
+                    return exit_code
+            if self.verbose > 0:
+                print('-' * 20, 'Restarting', '-' * 20)
+
+    def change_user_group(self, user, group):
+        if not user and not group:
+            return
+        import pwd
+        import grp
+        uid = gid = None
+        if group:
+            try:
+                gid = int(group)
+                group = grp.getgrgid(gid).gr_name
+            except ValueError:
+                import grp
+                try:
+                    entry = grp.getgrnam(group)
+                except KeyError:
+                    raise BadCommand(
+                        "Bad group: %r; no such group exists" % group)
+                gid = entry.gr_gid
+        try:
+            uid = int(user)
+            user = pwd.getpwuid(uid).pw_name
+        except ValueError:
+            try:
+                entry = pwd.getpwnam(user)
+            except KeyError:
+                raise BadCommand(
+                    "Bad username: %r; no such user exists" % user)
+            if not gid:
+                gid = entry.pw_gid
+            uid = entry.pw_uid
+        if self.verbose > 0:
+            print('Changing user to %s:%s (%s:%s)' % (
+                user, group or '(unknown)', uid, gid))
+        if hasattr(os, 'initgroups'):
+            os.initgroups(user, gid)
+        else:
+            os.setgroups([e.gr_gid for e in grp.getgrall()
+                          if user in e.gr_mem] + [gid])
+        if gid:
+            os.setgid(gid)
+        if uid:
+            os.setuid(uid)
+
+
+class LazyWriter(object):
+
+    """
+    File-like object that opens a file lazily when it is first written
+    to.
+    """
+
+    def __init__(self, filename, mode='w'):
+        self.filename = filename
+        self.fileobj = None
+        self.lock = threading.Lock()
+        self.mode = mode
+
+    def open(self):
+        if self.fileobj is None:
+            self.lock.acquire()
+            try:
+                if self.fileobj is None:
+                    self.fileobj = open(self.filename, self.mode)
+            finally:
+                self.lock.release()
+        return self.fileobj
+
+    def write(self, text):
+        fileobj = self.open()
+        fileobj.write(text)
+        fileobj.flush()
+
+    def writelines(self, text):
+        fileobj = self.open()
+        fileobj.writelines(text)
+        fileobj.flush()
+
+    def flush(self):
+        self.open().flush()
+
+
+def live_pidfile(pidfile):
+    """(pidfile:str) -> int | None
+    Returns an int found in the named file, if there is one,
+    and if there is a running process with that process id.
+    Return None if no such process exists.
+    """
+    pid = read_pidfile(pidfile)
+    if pid:
+        try:
+            os.kill(int(pid), 0)
+            return pid
+        except OSError as e:
+            if e.errno == errno.EPERM:
+                return pid
+    return None
+
+
+def read_pidfile(filename):
+    if os.path.exists(filename):
+        try:
+            f = open(filename)
+            content = f.read()
+            f.close()
+            return int(content.strip())
+        except (ValueError, IOError):
+            return None
+    else:
+        return None
+
+
+def _remove_pid_file(written_pid, filename, verbosity):
+    current_pid = os.getpid()
+    if written_pid != current_pid:
+        # A forked process must be exiting, not the process that
+        # wrote the PID file
+        return
+    if not os.path.exists(filename):
+        return
+    f = open(filename)
+    content = f.read().strip()
+    f.close()
+    try:
+        pid_in_file = int(content)
+    except ValueError:
+        pass
+    else:
+        if pid_in_file != current_pid:
+            print("PID file %s contains %s, not expected PID %s" % (
+                filename, pid_in_file, current_pid))
+            return
+    if verbosity > 0:
+        print("Removing PID file %s" % filename)
+    try:
+        os.unlink(filename)
+        return
+    except OSError as e:
+        # Record, but don't give traceback
+        print("Cannot remove PID file: %s" % e)
+    # well, at least lets not leave the invalid PID around...
+    try:
+        f = open(filename, 'w')
+        f.write('')
+        f.close()
+    except OSError as e:
+        print('Stale PID left in file: %s (%e)' % (filename, e))
+    else:
+        print('Stale PID removed')
+
+
+def ensure_port_cleanup(bound_addresses, maxtries=30, sleeptime=2):
+    """
+    This makes sure any open ports are closed.
+
+    Does this by connecting to them until they give connection
+    refused.  Servers should call like::
+
+        import paste.script
+        ensure_port_cleanup([80, 443])
+    """
+    atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries,
+                    sleeptime=sleeptime)
+
+
+def _cleanup_ports(bound_addresses, maxtries=30, sleeptime=2):
+    # Wait for the server to bind to the port.
+    import socket
+    import errno
+    for bound_address in bound_addresses:
+        for attempt in range(maxtries):
+            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            try:
+                sock.connect(bound_address)
+            except socket.error as e:
+                if e.args[0] != errno.ECONNREFUSED:
+                    raise
+                break
+            else:
+                time.sleep(sleeptime)
+        else:
+            raise SystemExit('Timeout waiting for port.')
+        sock.close()
+
+
+def _turn_sigterm_into_systemexit():
+    """
+    Attempts to turn a SIGTERM exception into a SystemExit exception.
+    """
+    try:
+        import signal
+    except ImportError:
+        return
+
+    def handle_term(signo, frame):
+        raise SystemExit
+    signal.signal(signal.SIGTERM, handle_term)
+
+
+# ---- from paste.script.command --------------------------------------
+python_version = sys.version.splitlines()[0].strip()
+
+parser = optparse.OptionParser(add_help_option=False,
+                               # version='%s from %s (python %s)'
+                               # % (dist, dist.location, python_version),
+                               usage='%prog [paster_options] COMMAND [command_options]')
+
+parser.add_option(
+    '-h', '--help',
+    action='store_true',
+    dest='do_help',
+    help="Show this help message")
+parser.disable_interspersed_args()
+
+# @@: Add an option to run this in another Python interpreter
+
+commands = {
+    'serve': ServeCommand
+}
+
+
+def run(args=None):
+    if (not args and
+        len(sys.argv) >= 2 and
+            os.environ.get('_') and sys.argv[0] != os.environ['_'] and
+            os.environ['_'] == sys.argv[1]):
+        # probably it's an exe execution
+        args = ['exe', os.environ['_']] + sys.argv[2:]
+    if args is None:
+        args = sys.argv[1:]
+    options, args = parser.parse_args(args)
+    options.base_parser = parser
+    if options.do_help:
+        args = ['help'] + args
+    if not args:
+        print('Usage: %s COMMAND' % sys.argv[0])
+        args = ['help']
+    command_name = args[0]
+    if command_name not in commands:
+        command = NotFoundCommand
+    else:
+        command = commands[command_name]
+    invoke(command, command_name, options, args[1:])
+
+
+def invoke(command, command_name, options, args):
+    try:
+        runner = command(command_name)
+        exit_code = runner.run(args)
+    except BadCommand as e:
+        print(e.message)
+        exit_code = e.exit_code
+    sys.exit(exit_code)
diff --git a/lib/galaxy/util/permutations.py b/lib/galaxy/util/permutations.py
new file mode 100644
index 0000000..d78e569
--- /dev/null
+++ b/lib/galaxy/util/permutations.py
@@ -0,0 +1,114 @@
+""" There is some shared logic between matching/multiplying inputs in workflows
+and tools. This module is meant to capture some general permutation logic that
+can be applicable for both cases but will only be used in the newer tools case
+first.
+
+Maybe this doesn't make sense and maybe much of this stuff could be replaced
+with itertools product and permutations. These are open questions.
+"""
+from galaxy.exceptions import MessageException
+from galaxy.util.bunch import Bunch
+
+input_classification = Bunch(
+    SINGLE="single",
+    MATCHED="matched",
+    MULTIPLIED="multiplied",
+)
+
+
+class InputMatchedException( MessageException ):
+    """ Indicates problem matching inputs while building up inputs
+    permutations. """
+
+
+def expand_multi_inputs( inputs, classifier, key_filter=None ):
+    key_filter = key_filter or ( lambda x: True )
+
+    single_inputs, matched_multi_inputs, multiplied_multi_inputs = __split_inputs(
+        inputs,
+        classifier,
+        key_filter
+    )
+
+    # Build up every combination of inputs to be run together.
+    input_combos = __extend_with_matched_combos( single_inputs, matched_multi_inputs )
+    input_combos = __extend_with_multiplied_combos( input_combos, multiplied_multi_inputs )
+
+    return input_combos
+
+
+def __split_inputs( inputs, classifier, key_filter ):
+    key_filter = key_filter or ( lambda x: True )
+    input_keys = filter( key_filter, inputs )
+
+    single_inputs = {}
+    matched_multi_inputs = {}
+    multiplied_multi_inputs = {}
+
+    for input_key in input_keys:
+        input_type, expanded_val = classifier( input_key )
+        if input_type == input_classification.SINGLE:
+            single_inputs[ input_key ] = expanded_val
+        elif input_type == input_classification.MATCHED:
+            matched_multi_inputs[ input_key ] = expanded_val
+        elif input_type == input_classification.MULTIPLIED:
+            multiplied_multi_inputs[ input_key ] = expanded_val
+
+    return ( single_inputs, matched_multi_inputs, multiplied_multi_inputs )
+
+
+def __extend_with_matched_combos( single_inputs, multi_inputs ):
+    """
+
+    {a => 1, b => 2} and {c => {3, 4}, d => {5, 6}}
+
+    Becomes
+
+    [ {a => 1, b => 2, c => 3, d => 5}, {a => 1, b => 2, c => 4, d => 6}, ]
+
+    """
+
+    if len( multi_inputs ) == 0:
+        return [ single_inputs ]
+
+    matched_multi_inputs = []
+
+    first_multi_input_key = multi_inputs.keys()[ 0 ]
+    first_multi_value = multi_inputs.get(first_multi_input_key)
+
+    for value in first_multi_value:
+        new_inputs = __copy_and_extend_inputs( single_inputs, first_multi_input_key, value )
+        matched_multi_inputs.append( new_inputs )
+
+    for multi_input_key, multi_input_values in multi_inputs.iteritems():
+        if multi_input_key == first_multi_input_key:
+            continue
+        if len( multi_input_values ) != len( first_multi_value ):
+            raise InputMatchedException()
+
+        for index, value in enumerate( multi_input_values ):
+            matched_multi_inputs[ index ][ multi_input_key ] = value
+
+    return matched_multi_inputs
+
+
+def __extend_with_multiplied_combos( input_combos, multi_inputs ):
+    combos = input_combos
+
+    for multi_input_key, multi_input_value in multi_inputs.iteritems():
+        iter_combos = []
+
+        for combo in combos:
+            for input_value in multi_input_value:
+                iter_combo = __copy_and_extend_inputs( combo, multi_input_key, input_value )
+                iter_combos.append( iter_combo )
+
+        combos = iter_combos
+
+    return combos
+
+
+def __copy_and_extend_inputs( inputs, key, value ):
+    new_inputs = dict( inputs )
+    new_inputs[ key ] = value
+    return new_inputs
diff --git a/lib/galaxy/util/plugin_config.py b/lib/galaxy/util/plugin_config.py
new file mode 100644
index 0000000..c036336
--- /dev/null
+++ b/lib/galaxy/util/plugin_config.py
@@ -0,0 +1,79 @@
+from xml.etree import ElementTree
+
+try:
+    import yaml
+except ImportError:
+    yaml = None
+
+from galaxy.util.submodules import submodules
+
+
+def plugins_dict(module, plugin_type_identifier):
+    """ Walk through all classes in submodules of module and find ones labelled
+    with specified plugin_type_identifier and throw in a dictionary to allow
+    constructions from plugins by these types later on.
+    """
+    plugin_dict = {}
+
+    for plugin_module in submodules( module ):
+        # FIXME: this is not how one is suppose to use __all__ why did you do
+        # this past John?
+        for clazz in getattr( plugin_module, "__all__", [] ):
+            try:
+                clazz = getattr( plugin_module, clazz )
+            except TypeError:
+                clazz = clazz
+            plugin_type = getattr( clazz, plugin_type_identifier, None )
+            if plugin_type:
+                plugin_dict[ plugin_type ] = clazz
+
+    return plugin_dict
+
+
+def load_plugins(plugins_dict, plugin_source, extra_kwds={}):
+    source_type, source = plugin_source
+    if source_type == "xml":
+        return __load_plugins_from_element(plugins_dict, source, extra_kwds)
+    else:
+        return __load_plugins_from_dicts(plugins_dict, source, extra_kwds)
+
+
+def __load_plugins_from_element(plugins_dict, plugins_element, extra_kwds):
+    plugins = []
+
+    for plugin_element in plugins_element.getchildren():
+        plugin_type = plugin_element.tag
+        plugin_kwds = dict( plugin_element.items() )
+        plugin_kwds.update( extra_kwds )
+        plugin = plugins_dict[ plugin_type ]( **plugin_kwds )
+        plugins.append( plugin )
+
+    return plugins
+
+
+def __load_plugins_from_dicts(plugins_dict, configs, extra_kwds):
+    plugins = []
+
+    for config in configs:
+        plugin_type = config[ "type" ]
+        plugin_kwds = config
+        plugin_kwds.update( extra_kwds )
+        plugin = plugins_dict[ plugin_type ]( **plugin_kwds )
+        plugins.append( plugin )
+
+    return plugins
+
+
+def plugin_source_from_path(path):
+    if path.endswith(".yaml") or path.endswith(".yml"):
+        return ('dict', __read_yaml(path))
+    else:
+        return ('xml', ElementTree.parse( path ).getroot())
+
+
+def __read_yaml(path):
+    if yaml is None:
+        raise ImportError("Attempting to read YAML configuration file - but PyYAML dependency unavailable.")
+
+    with open(path, "rb") as f:
+        return yaml.load(f)
diff --git a/lib/galaxy/util/postfork.py b/lib/galaxy/util/postfork.py
new file mode 100644
index 0000000..8cb7c86
--- /dev/null
+++ b/lib/galaxy/util/postfork.py
@@ -0,0 +1,43 @@
+"""
+Handle postfork functions under uWSGI
+"""
+
+# The uwsgi module is automatically injected by the parent uwsgi
+# process and only exists that way.  If anything works, this is a
+# uwsgi-managed process.
+try:
+    import uwsgi
+    if hasattr(uwsgi, "numproc"):
+        process_is_uwsgi = True
+    else:
+        process_is_uwsgi = False
+except ImportError:
+    # This is not a uwsgi process, or something went horribly wrong.
+    process_is_uwsgi = False
+
+try:
+    from uwsgidecorators import postfork
+except:
+    def pf_dec(func):
+        return func
+    postfork = pf_dec
+    if process_is_uwsgi:
+        print("WARNING: This is a uwsgi process but the uwsgidecorators library"
+              " is unavailable.  This is likely due to using an external (not"
+              " in Galaxy's virtualenv) uwsgi and you may experience errors.")
+
+
+postfork_functions = []
+
+
+ at postfork
+def do_postfork():
+    for f, args, kwargs in [ t for t in postfork_functions ]:
+        f(*args, **kwargs)
+
+
+def register_postfork_function(f, *args, **kwargs):
+    if process_is_uwsgi:
+        postfork_functions.append((f, args, kwargs))
+    else:
+        f(*args, **kwargs)
diff --git a/lib/galaxy/util/properties.py b/lib/galaxy/util/properties.py
new file mode 100644
index 0000000..f2de11b
--- /dev/null
+++ b/lib/galaxy/util/properties.py
@@ -0,0 +1,100 @@
+""" Module used to blend ini, environment, and explicit dictionary properties
+to determine application configuration. Some hard coded defaults for Galaxy but
+this should be reusable by tool shed and pulsar as well.
+"""
+import os
+import os.path
+import sys
+
+from six import iteritems
+from six.moves.configparser import ConfigParser
+
+
+def load_app_properties(
+    kwds={},
+    ini_file=None,
+    ini_section="app:main",
+    config_prefix="GALAXY_CONFIG_"
+):
+    properties = kwds.copy() if kwds else {}
+    if ini_file:
+        defaults = {
+            'here': os.path.dirname(os.path.abspath(ini_file)),
+            '__file__': os.path.abspath(ini_file)
+        }
+        parser = NicerConfigParser(ini_file, defaults=defaults)
+        parser.optionxform = str  # Don't lower-case keys
+        with open(ini_file) as f:
+            parser.read_file(f)
+
+        properties.update( dict( parser.items( ini_section ) ) )
+
+    override_prefix = "%sOVERRIDE_" % config_prefix
+    for key in os.environ:
+        if key.startswith( override_prefix ):
+            config_key = key[ len( override_prefix ): ].lower()
+            properties[ config_key ] = os.environ[ key ]
+        elif key.startswith( config_prefix ):
+            config_key = key[ len( config_prefix ): ].lower()
+            if config_key not in properties:
+                properties[ config_key ] = os.environ[ key ]
+
+    return properties
+
+
+class NicerConfigParser(ConfigParser):
+
+    def __init__(self, filename, *args, **kw):
+        ConfigParser.__init__(self, *args, **kw)
+        self.filename = filename
+        if hasattr(self, '_interpolation'):
+            self._interpolation = self.InterpolateWrapper(self._interpolation)
+
+    read_file = getattr(ConfigParser, 'read_file', ConfigParser.readfp)
+
+    def defaults(self):
+        """Return the defaults, with their values interpolated (with the
+        defaults dict itself)
+
+        Mainly to support defaults using values such as %(here)s
+        """
+        defaults = ConfigParser.defaults(self).copy()
+        for key, val in iteritems(defaults):
+            defaults[key] = self.get('DEFAULT', key) or val
+        return defaults
+
+    def _interpolate(self, section, option, rawval, vars):
+        # Python < 3.2
+        try:
+            return ConfigParser._interpolate(
+                self, section, option, rawval, vars)
+        except Exception:
+            e = sys.exc_info()[1]
+            args = list(e.args)
+            args[0] = 'Error in file %s: %s' % (self.filename, e)
+            e.args = tuple(args)
+            e.message = args[0]
+            raise
+
+    class InterpolateWrapper(object):
+        # Python >= 3.2
+        def __init__(self, original):
+            self._original = original
+
+        def __getattr__(self, name):
+            return getattr(self._original, name)
+
+        def before_get(self, parser, section, option, value, defaults):
+            try:
+                return self._original.before_get(parser, section, option,
+                                                 value, defaults)
+            except Exception:
+                e = sys.exc_info()[1]
+                args = list(e.args)
+                args[0] = 'Error in file %s: %s' % (parser.filename, e)
+                e.args = tuple(args)
+                e.message = args[0]
+                raise
+
+
+__all__ = ('load_app_properties', 'NicerConfigParser')
diff --git a/lib/galaxy/util/sanitize_html.py b/lib/galaxy/util/sanitize_html.py
new file mode 100644
index 0000000..aeaa80f
--- /dev/null
+++ b/lib/galaxy/util/sanitize_html.py
@@ -0,0 +1,441 @@
+"""
+HTML Sanitizer (ripped from feedparser)
+"""
+
+import re
+import sgmllib
+
+from six import unichr
+
+from galaxy.util import unicodify
+
+
+# reversable htmlentitydefs mappings for Python 2.2
+try:
+    from htmlentitydefs import name2codepoint, codepoint2name
+except:
+    import htmlentitydefs
+    name2codepoint = {}
+    codepoint2name = {}
+    for (name, codepoint) in htmlentitydefs.entitydefs.iteritems():
+        if codepoint.startswith('&#'):
+            codepoint = unichr(int(codepoint[2:-1]))
+        name2codepoint[name] = ord(codepoint)
+        codepoint2name[ord(codepoint)] = name
+
+
+_cp1252 = {
+    unichr(128): unichr(8364),  # euro sign
+    unichr(130): unichr(8218),  # single low-9 quotation mark
+    unichr(131): unichr( 402),  # latin small letter f with hook
+    unichr(132): unichr(8222),  # double low-9 quotation mark
+    unichr(133): unichr(8230),  # horizontal ellipsis
+    unichr(134): unichr(8224),  # dagger
+    unichr(135): unichr(8225),  # double dagger
+    unichr(136): unichr( 710),  # modifier letter circumflex accent
+    unichr(137): unichr(8240),  # per mille sign
+    unichr(138): unichr( 352),  # latin capital letter s with caron
+    unichr(139): unichr(8249),  # single left-pointing angle quotation mark
+    unichr(140): unichr( 338),  # latin capital ligature oe
+    unichr(142): unichr( 381),  # latin capital letter z with caron
+    unichr(145): unichr(8216),  # left single quotation mark
+    unichr(146): unichr(8217),  # right single quotation mark
+    unichr(147): unichr(8220),  # left double quotation mark
+    unichr(148): unichr(8221),  # right double quotation mark
+    unichr(149): unichr(8226),  # bullet
+    unichr(150): unichr(8211),  # en dash
+    unichr(151): unichr(8212),  # em dash
+    unichr(152): unichr( 732),  # small tilde
+    unichr(153): unichr(8482),  # trade mark sign
+    unichr(154): unichr( 353),  # latin small letter s with caron
+    unichr(155): unichr(8250),  # single right-pointing angle quotation mark
+    unichr(156): unichr( 339),  # latin small ligature oe
+    unichr(158): unichr( 382),  # latin small letter z with caron
+    unichr(159): unichr( 376)}  # latin capital letter y with diaeresis
+
+
+class _BaseHTMLProcessor(sgmllib.SGMLParser):
+    special = re.compile('''[<>'"]''')
+    bare_ampersand = re.compile("&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)")
+    elements_no_end_tag = ['area', 'base', 'basefont', 'br', 'col', 'frame', 'hr',
+                           'img', 'input', 'isindex', 'link', 'meta', 'param']
+
+    def __init__(self, encoding, type):
+        self.encoding = encoding
+        self.type = type
+        # if _debug: sys.stderr.write('entering BaseHTMLProcessor, encoding=%s\n' % self.encoding)
+        sgmllib.SGMLParser.__init__(self)
+
+    def reset(self):
+        self.pieces = []
+        sgmllib.SGMLParser.reset(self)
+
+    def _shorttag_replace(self, match):
+        tag = match.group(1)
+        if tag in self.elements_no_end_tag:
+            return '<' + tag + ' />'
+        else:
+            return '<' + tag + '></' + tag + '>'
+
+    def parse_starttag(self, i):
+        j = sgmllib.SGMLParser.parse_starttag(self, i)
+        if self.type == 'application/xhtml+xml':
+            if j > 2 and self.rawdata[j - 2:j] == '/>':
+                self.unknown_endtag(self.lasttag)
+        return j
+
+    def feed(self, data):
+        data = re.compile(r'<!((?!DOCTYPE|--|\[))', re.IGNORECASE).sub(r'<!\1', data)
+        data = re.sub(r'<([^<>\s]+?)\s*/>', self._shorttag_replace, data)
+        data = data.replace(''', "'")
+        data = data.replace('"', '"')
+        sgmllib.SGMLParser.feed(self, data)
+        sgmllib.SGMLParser.close(self)
+
+    def normalize_attrs(self, attrs):
+        if not attrs:
+            return attrs
+        # utility method to be called by descendants
+        attrs = dict([(k.lower(), v) for k, v in attrs]).items()
+        attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs]
+        attrs.sort()
+        return attrs
+
+    def unknown_starttag(self, tag, attrs):
+        # called for each start tag
+        # attrs is a list of (attr, value) tuples
+        # e.g. for <pre class='screen'>, tag='pre', attrs=[('class', 'screen')]
+        uattrs = []
+        strattrs = ''
+        if attrs:
+            for key, value in attrs:
+                value = value.replace('>', '>').replace('<', '<').replace('"', '"')
+                value = self.bare_ampersand.sub("&", value)
+                uattrs.append((key, value))
+            strattrs = ''.join([' %s="%s"' % (k, v) for k, v in uattrs])
+        if tag in self.elements_no_end_tag:
+            self.pieces.append('<%s%s />' % (tag, strattrs))
+        else:
+            self.pieces.append('<%s%s>' % (tag, strattrs))
+
+    def unknown_endtag(self, tag):
+        # called for each end tag, e.g. for </pre>, tag will be 'pre'
+        # Reconstruct the original end tag.
+        if tag not in self.elements_no_end_tag:
+            self.pieces.append("</%(tag)s>" % locals())
+
+    def handle_charref(self, ref):
+        # called for each character reference, e.g. for ' ', ref will be '160'
+        # Reconstruct the original character reference.
+        if ref.startswith('x'):
+            value = unichr(int(ref[1:], 16))
+        else:
+            value = unichr(int(ref))
+
+        if value in _cp1252.keys():
+            self.pieces.append('&#%s;' % hex(ord(_cp1252[value]))[1:])
+        else:
+            self.pieces.append('&#%(ref)s;' % locals())
+
+    def handle_entityref(self, ref):
+        # called for each entity reference, e.g. for '©', ref will be 'copy'
+        # Reconstruct the original entity reference.
+        if ref in name2codepoint:
+            self.pieces.append('&%(ref)s;' % locals())
+        else:
+            self.pieces.append('&%(ref)s' % locals())
+
+    def handle_data(self, text):
+        # called for each block of plain text, i.e. outside of any tag and
+        # not containing any character or entity references
+        # Store the original text verbatim.
+        self.pieces.append(text)
+
+    def handle_comment(self, text):
+        # called for each HTML comment, e.g. <!-- insert Javascript code here -->
+        # Reconstruct the original comment.
+        self.pieces.append('<!--%(text)s-->' % locals())
+
+    def handle_pi(self, text):
+        # called for each processing instruction, e.g. <?instruction>
+        # Reconstruct original processing instruction.
+        self.pieces.append('<?%(text)s>' % locals())
+
+    def handle_decl(self, text):
+        # called for the DOCTYPE, if present, e.g.
+        # <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+        #     "http://www.w3.org/TR/html4/loose.dtd">
+        # Reconstruct original DOCTYPE
+        self.pieces.append('<!%(text)s>' % locals())
+
+    _new_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9:]*\s*').match
+
+    def _scan_name(self, i, declstartpos):
+        rawdata = self.rawdata
+        n = len(rawdata)
+        if i == n:
+            return None, -1
+        m = self._new_declname_match(rawdata, i)
+        if m:
+            s = m.group()
+            name = s.strip()
+            if (i + len(s)) == n:
+                return None, -1  # end of buffer
+            return name.lower(), m.end()
+        else:
+            self.handle_data(rawdata)
+#            self.updatepos(declstartpos, i)
+            return None, -1
+
+    def convert_charref(self, name):
+        return '&#%s;' % name
+
+    def convert_entityref(self, name):
+        return '&%s;' % name
+
+    def output(self):
+        '''Return processed HTML as a single string'''
+        return ''.join(self.pieces)
+
+
+class _HTMLSanitizer(_BaseHTMLProcessor):
+    acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area', 'article',
+                           'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button', 'canvas',
+                           'caption', 'center', 'cite', 'code', 'col', 'colgroup', 'command',
+                           'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn', 'dialog', 'dir',
+                           'div', 'dl', 'dt', 'em', 'event-source', 'fieldset', 'figure', 'footer',
+                           'font', 'form', 'header', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i',
+                           'img', 'input', 'ins', 'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map',
+                           'menu', 'meter', 'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup',
+                           'option', 'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
+                           'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong', 'sub',
+                           'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot', 'th', 'thead',
+                           'tr', 'tt', 'u', 'ul', 'var', 'video', 'noscript']
+
+    acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey',
+                             'action', 'align', 'alt', 'autocomplete', 'autofocus', 'axis',
+                             'background', 'balance', 'bgcolor', 'bgproperties', 'border',
+                             'bordercolor', 'bordercolordark', 'bordercolorlight', 'bottompadding',
+                             'cellpadding', 'cellspacing', 'ch', 'challenge', 'char', 'charoff',
+                             'choff', 'charset', 'checked', 'cite', 'class', 'clear', 'color', 'cols',
+                             'colspan', 'compact', 'contenteditable', 'controls', 'coords', 'data',
+                             'datafld', 'datapagesize', 'datasrc', 'datetime', 'default', 'delay',
+                             'dir', 'disabled', 'draggable', 'dynsrc', 'enctype', 'end', 'face', 'for',
+                             'form', 'frame', 'galleryimg', 'gutter', 'headers', 'height', 'hidefocus',
+                             'hidden', 'high', 'href', 'hreflang', 'hspace', 'icon', 'id', 'inputmode',
+                             'ismap', 'keytype', 'label', 'leftspacing', 'lang', 'list', 'longdesc',
+                             'loop', 'loopcount', 'loopend', 'loopstart', 'low', 'lowsrc', 'max',
+                             'maxlength', 'media', 'method', 'min', 'multiple', 'name', 'nohref',
+                             'noshade', 'nowrap', 'open', 'optimum', 'pattern', 'ping', 'point-size',
+                             'prompt', 'pqg', 'radiogroup', 'readonly', 'rel', 'repeat-max',
+                             'repeat-min', 'replace', 'required', 'rev', 'rightspacing', 'rows',
+                             'rowspan', 'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src',
+                             'start', 'step', 'summary', 'suppress', 'tabindex', 'target', 'template',
+                             'title', 'toppadding', 'type', 'unselectable', 'usemap', 'urn', 'valign',
+                             'value', 'variable', 'volume', 'vspace', 'vrml', 'width', 'wrap',
+                             'xml:lang']
+
+    unacceptable_elements_with_end_tag = ['script', 'applet', 'style']
+
+    acceptable_css_properties = ['azimuth', 'background-color',
+                                 'border-bottom-color', 'border-collapse', 'border-color',
+                                 'border-left-color', 'border-right-color', 'border-top-color', 'clear',
+                                 'color', 'cursor', 'direction', 'display', 'elevation', 'float', 'font',
+                                 'font-family', 'font-size', 'font-style', 'font-variant', 'font-weight',
+                                 'height', 'letter-spacing', 'line-height', 'overflow', 'pause',
+                                 'pause-after', 'pause-before', 'pitch', 'pitch-range', 'richness',
+                                 'speak', 'speak-header', 'speak-numeral', 'speak-punctuation',
+                                 'speech-rate', 'stress', 'text-align', 'text-decoration', 'text-indent',
+                                 'unicode-bidi', 'vertical-align', 'voice-family', 'volume',
+                                 'white-space', 'width']
+
+    # survey of common keywords found in feeds
+    acceptable_css_keywords = ['auto', 'aqua', 'black', 'block', 'blue',
+                               'bold', 'both', 'bottom', 'brown', 'center', 'collapse', 'dashed',
+                               'dotted', 'fuchsia', 'gray', 'green', '!important', 'italic', 'left',
+                               'lime', 'maroon', 'medium', 'none', 'navy', 'normal', 'nowrap', 'olive',
+                               'pointer', 'purple', 'red', 'right', 'solid', 'silver', 'teal', 'top',
+                               'transparent', 'underline', 'white', 'yellow']
+
+    valid_css_values = re.compile('^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|' +
+                                  '\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$')
+
+    mathml_elements = ['annotation', 'annotation-xml', 'maction', 'math',
+                       'merror', 'mfenced', 'mfrac', 'mi', 'mmultiscripts', 'mn', 'mo', 'mover', 'mpadded',
+                       'mphantom', 'mprescripts', 'mroot', 'mrow', 'mspace', 'msqrt', 'mstyle',
+                       'msub', 'msubsup', 'msup', 'mtable', 'mtd', 'mtext', 'mtr', 'munder',
+                       'munderover', 'none', 'semantics']
+
+    mathml_attributes = ['actiontype', 'align', 'columnalign', 'columnalign',
+                         'columnalign', 'close', 'columnlines', 'columnspacing', 'columnspan', 'depth',
+                         'display', 'displaystyle', 'encoding', 'equalcolumns', 'equalrows',
+                         'fence', 'fontstyle', 'fontweight', 'frame', 'height', 'linethickness',
+                         'lspace', 'mathbackground', 'mathcolor', 'mathvariant', 'mathvariant',
+                         'maxsize', 'minsize', 'open', 'other', 'rowalign', 'rowalign', 'rowalign',
+                         'rowlines', 'rowspacing', 'rowspan', 'rspace', 'scriptlevel', 'selection',
+                         'separator', 'separators', 'stretchy', 'width', 'width', 'xlink:href',
+                         'xlink:show', 'xlink:type', 'xmlns', 'xmlns:xlink']
+
+    # svgtiny - foreignObject + linearGradient + radialGradient + stop
+    svg_elements = ['a', 'animate', 'animateColor', 'animateMotion',
+                    'animateTransform', 'circle', 'defs', 'desc', 'ellipse', 'foreignObject',
+                    'font-face', 'font-face-name', 'font-face-src', 'g', 'glyph', 'hkern',
+                    'linearGradient', 'line', 'marker', 'metadata', 'missing-glyph', 'mpath',
+                    'path', 'polygon', 'polyline', 'radialGradient', 'rect', 'set', 'stop',
+                    'svg', 'switch', 'text', 'title', 'tspan', 'use']
+
+    # svgtiny + class + opacity + offset + xmlns + xmlns:xlink
+    svg_attributes = ['accent-height', 'accumulate', 'additive', 'alphabetic',
+                      'arabic-form', 'ascent', 'attributeName', 'attributeType',
+                      'baseProfile', 'bbox', 'begin', 'by', 'calcMode', 'cap-height',
+                      'class', 'color', 'color-rendering', 'content', 'cx', 'cy', 'd', 'dx',
+                      'dy', 'descent', 'display', 'dur', 'end', 'fill', 'fill-opacity',
+                      'fill-rule', 'font-family', 'font-size', 'font-stretch', 'font-style',
+                      'font-variant', 'font-weight', 'from', 'fx', 'fy', 'g1', 'g2',
+                      'glyph-name', 'gradientUnits', 'hanging', 'height', 'horiz-adv-x',
+                      'horiz-origin-x', 'id', 'ideographic', 'k', 'keyPoints', 'keySplines',
+                      'keyTimes', 'lang', 'mathematical', 'marker-end', 'marker-mid',
+                      'marker-start', 'markerHeight', 'markerUnits', 'markerWidth', 'max',
+                      'min', 'name', 'offset', 'opacity', 'orient', 'origin',
+                      'overline-position', 'overline-thickness', 'panose-1', 'path',
+                      'pathLength', 'points', 'preserveAspectRatio', 'r', 'refX', 'refY',
+                      'repeatCount', 'repeatDur', 'requiredExtensions', 'requiredFeatures',
+                      'restart', 'rotate', 'rx', 'ry', 'slope', 'stemh', 'stemv',
+                      'stop-color', 'stop-opacity', 'strikethrough-position',
+                      'strikethrough-thickness', 'stroke', 'stroke-dasharray',
+                      'stroke-dashoffset', 'stroke-linecap', 'stroke-linejoin',
+                      'stroke-miterlimit', 'stroke-opacity', 'stroke-width', 'systemLanguage',
+                      'target', 'text-anchor', 'to', 'transform', 'type', 'u1', 'u2',
+                      'underline-position', 'underline-thickness', 'unicode', 'unicode-range',
+                      'units-per-em', 'values', 'version', 'viewBox', 'visibility', 'width',
+                      'widths', 'x', 'x-height', 'x1', 'x2', 'xlink:actuate', 'xlink:arcrole',
+                      'xlink:href', 'xlink:role', 'xlink:show', 'xlink:title', 'xlink:type',
+                      'xml:base', 'xml:lang', 'xml:space', 'xmlns', 'xmlns:xlink', 'y', 'y1',
+                      'y2', 'zoomAndPan']
+
+    svg_attr_map = None
+    svg_elem_map = None
+
+    acceptable_svg_properties = [ 'fill', 'fill-opacity', 'fill-rule',
+                                  'stroke', 'stroke-width', 'stroke-linecap', 'stroke-linejoin',
+                                  'stroke-opacity']
+
+    def reset(self):
+        _BaseHTMLProcessor.reset(self)
+        self.unacceptablestack = 0
+        self.mathmlOK = 0
+        self.svgOK = 0
+
+    def unknown_starttag(self, tag, attrs):
+        acceptable_attributes = self.acceptable_attributes
+        keymap = {}
+        if tag not in self.acceptable_elements or self.svgOK:
+            if tag in self.unacceptable_elements_with_end_tag:
+                self.unacceptablestack += 1
+
+            # not otherwise acceptable, perhaps it is MathML or SVG?
+            if tag == 'math' and ('xmlns', 'http://www.w3.org/1998/Math/MathML') in attrs:
+                self.mathmlOK += 1
+            if tag == 'svg' and ('xmlns', 'http://www.w3.org/2000/svg') in attrs:
+                self.svgOK += 1
+
+            # chose acceptable attributes based on tag class, else bail
+            if self.mathmlOK and tag in self.mathml_elements:
+                acceptable_attributes = self.mathml_attributes
+            elif self.svgOK and tag in self.svg_elements:
+                # for most vocabularies, lowercasing is a good idea.  Many
+                # svg elements, however, are camel case
+                if not self.svg_attr_map:
+                    lower = [attr.lower() for attr in self.svg_attributes]
+                    mix = [a for a in self.svg_attributes if a not in lower]
+                    self.svg_attributes = lower
+                    self.svg_attr_map = dict([(a.lower(), a) for a in mix])
+
+                    lower = [attr.lower() for attr in self.svg_elements]
+                    mix = [a for a in self.svg_elements if a not in lower]
+                    self.svg_elements = lower
+                    self.svg_elem_map = dict([(a.lower(), a) for a in mix])
+                acceptable_attributes = self.svg_attributes
+                tag = self.svg_elem_map.get(tag, tag)
+                keymap = self.svg_attr_map
+            elif tag not in self.acceptable_elements:
+                return
+
+        # declare xlink namespace, if needed
+        if self.mathmlOK or self.svgOK:
+            if filter(lambda n, v: n.startswith('xlink:'), attrs):
+                if not ('xmlns:xlink', 'http://www.w3.org/1999/xlink') in attrs:
+                    attrs.append(('xmlns:xlink', 'http://www.w3.org/1999/xlink'))
+
+        clean_attrs = []
+        for key, value in self.normalize_attrs(attrs):
+            if key == "href" and value.strip().startswith("javascript"):
+                pass
+            elif key in acceptable_attributes:
+                key = keymap.get(key, key)
+                clean_attrs.append((key, value))
+            elif key == 'style':
+                pass
+                # clean_value = self.sanitize_style(value)
+                # if clean_value: clean_attrs.append((key,clean_value))
+        _BaseHTMLProcessor.unknown_starttag(self, tag, clean_attrs)
+
+    def unknown_endtag(self, tag):
+        if tag not in self.acceptable_elements:
+            if tag in self.unacceptable_elements_with_end_tag:
+                self.unacceptablestack -= 1
+            if self.mathmlOK and tag in self.mathml_elements:
+                if tag == 'math' and self.mathmlOK:
+                    self.mathmlOK -= 1
+            elif self.svgOK and tag in self.svg_elements:
+                tag = self.svg_elem_map.get(tag, tag)
+                if tag == 'svg' and self.svgOK:
+                    self.svgOK -= 1
+            else:
+                return
+        _BaseHTMLProcessor.unknown_endtag(self, tag)
+
+    def handle_pi(self, text):
+        pass
+
+    def handle_decl(self, text):
+        pass
+
+    def handle_data(self, text):
+        if not self.unacceptablestack:
+            _BaseHTMLProcessor.handle_data(self, text)
+
+    def sanitize_style(self, style):
+        # disallow urls
+        style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
+
+        # gauntlet
+        if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
+            return ''
+        if not re.match("^(\s*[-\w]+\s*:\s*[^:;]*(;|$))*$", style):
+            return ''
+
+        clean = []
+        for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):
+            if not value:
+                continue
+            if prop.lower() in self.acceptable_css_properties:
+                clean.append(prop + ': ' + value + ';')
+            elif prop.split('-')[0].lower() in ['background', 'border', 'margin', 'padding']:
+                for keyword in value.split():
+                    if keyword not in self.acceptable_css_keywords and \
+                            not self.valid_css_values.match(keyword):
+                        break
+                else:
+                    clean.append(prop + ': ' + value + ';')
+            elif self.svgOK and prop.lower() in self.acceptable_svg_properties:
+                clean.append(prop + ': ' + value + ';')
+        return ' '.join(clean)
+
+
+def sanitize_html(htmlSource, encoding="utf-8", type="text/html"):
+    p = _HTMLSanitizer(encoding, type)
+    p.feed(unicodify(htmlSource, encoding))
+    data = p.output()
+    data = data.strip().replace('\r\n', '\n')
+    return data
diff --git a/lib/galaxy/util/simplegraph.py b/lib/galaxy/util/simplegraph.py
new file mode 100644
index 0000000..280eb57
--- /dev/null
+++ b/lib/galaxy/util/simplegraph.py
@@ -0,0 +1,129 @@
+"""
+Fencepost-simple graph structure implementation.
+"""
+# Currently (2013.7.12) only used in easing the parsing of graph datatype data.
+
+from galaxy.util.odict import odict
+
+
+class SimpleGraphNode( object ):
+    """
+    Node representation.
+    """
+    def __init__( self, index, **data ):
+        """
+        :param index: index of this node in some parent list
+        :type index: int
+        :param data: any extra data that needs to be saved
+        :type data: (variadic dictionary)
+        """
+        # a bit application specific (could be 'id')
+        self.index = index
+        self.data = data
+
+
+class SimpleGraphEdge( object ):
+    """
+    Edge representation.
+    """
+    def __init__( self, source_index, target_index, **data ):
+        """
+        :param source_index: index of the edge's source node in some parent list
+        :type source_index: int
+        :param target_index: index of the edge's target node in some parent list
+        :type target_index: int
+        :param data: any extra data that needs to be saved
+        :type data: (variadic dictionary)
+        """
+        self.source_index = source_index
+        self.target_index = target_index
+        self.data = data
+
+
+class SimpleGraph( object ):
+    """
+    Each node is unique (by id) and stores its own index in the node list/odict.
+    Each edge is represented as two indeces into the node list/odict.
+    Both nodes and edges allow storing extra information if needed.
+
+    Allows:
+        multiple edges between two nodes
+        self referential edges (an edge from a node to itself)
+
+    These graphs are not specifically directed but since source and targets on the
+    edges are listed - it could easily be used that way.
+    """
+    def __init__( self, nodes=None, edges=None ):
+        # use an odict so that edge indeces actually match the final node list indeces
+        self.nodes = nodes or odict()
+        self.edges = edges or []
+
+    def add_node( self, node_id, **data ):
+        """
+        Adds a new node only if it doesn't already exist.
+        :param node_id: some unique identifier
+        :type node_id: (hashable)
+        :param data: any extra data that needs to be saved
+        :type data: (variadic dictionary)
+        :returns: the new node
+        """
+        if node_id in self.nodes:
+            return self.nodes[ node_id ]
+        node_index = len( self.nodes )
+        new_node = SimpleGraphNode( node_index, **data )
+        self.nodes[ node_id ] = new_node
+        return new_node
+
+    def add_edge( self, source_id, target_id, **data ):
+        """
+        Adds a new node only if it doesn't already exist.
+        :param source_id: the id of the source node
+        :type source_id: (hashable)
+        :param target_id: the id of the target node
+        :type target_id: (hashable)
+        :param data: any extra data that needs to be saved for the edge
+        :type data: (variadic dictionary)
+        :returns: the new node
+
+        ..note: that, although this will create new nodes if necessary, there's
+        no way to pass `data` to them - so if you need to assoc. more data with
+        the nodes, use `add_node` first.
+        """
+        # adds target_id to source_id's edge list
+        #   adding source_id and/or target_id to nodes if not there already
+        if source_id not in self.nodes:
+            self.add_node( source_id )
+        if target_id not in self.nodes:
+            self.add_node( target_id )
+        new_edge = SimpleGraphEdge( self.nodes[ source_id ].index, self.nodes[ target_id ].index, **data )
+        self.edges.append( new_edge )
+        return new_edge
+
+    def gen_node_dicts( self ):
+        """
+        Returns a generator that yields node dictionaries in the form:
+            { 'id': <the nodes unique id>, 'data': <any additional node data> }
+        """
+        for node_id, node in self.nodes.items():
+            yield { 'id': node_id, 'data': node.data }
+
+    def gen_edge_dicts( self ):
+        """
+        Returns a generator that yields node dictionaries in the form::
+
+            {
+                'source': <the index of the source node in the graph's node list>,
+                'target': <the index of the target node in the graph's node list>,
+                'data'  : <any additional edge data>
+            }
+        """
+        for edge in self.edges:
+            yield { 'source': edge.source_index, 'target': edge.target_index, 'data': edge.data }
+
+    def as_dict( self ):
+        """
+        Returns a dictionary of the form::
+
+            { 'nodes': <a list of node dictionaries>, 'edges': <a list of node dictionaries> }
+        """
+        return { 'nodes': list( self.gen_node_dicts() ), 'edges': list( self.gen_edge_dicts() ) }
diff --git a/lib/galaxy/util/sleeper.py b/lib/galaxy/util/sleeper.py
new file mode 100644
index 0000000..f8d8a45
--- /dev/null
+++ b/lib/galaxy/util/sleeper.py
@@ -0,0 +1,22 @@
+import threading
+
+
+class Sleeper( object ):
+    """
+    Provides a 'sleep' method that sleeps for a number of seconds *unless*
+    the notify method is called (from a different thread).
+    """
+    def __init__( self ):
+        self.condition = threading.Condition()
+
+    def sleep( self, seconds ):
+        # Should this be in a try/finally block? -John
+        self.condition.acquire()
+        self.condition.wait( seconds )
+        self.condition.release()
+
+    def wake( self ):
+        # Should this be in a try/finally block? -John
+        self.condition.acquire()
+        self.condition.notify()
+        self.condition.release()
diff --git a/lib/galaxy/util/sockets.py b/lib/galaxy/util/sockets.py
new file mode 100644
index 0000000..b7946ce
--- /dev/null
+++ b/lib/galaxy/util/sockets.py
@@ -0,0 +1,43 @@
+import random
+import shlex
+import socket
+import subprocess
+
+
+def unused_port(range=None):
+    if range:
+        return __unused_port_on_range(range)
+    else:
+        return __unused_port_rangeless()
+
+
+def __unused_port_rangeless():
+    # TODO: Allow ranges (though then need to guess and check)...
+    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    s.bind(('localhost', 0))
+    addr, port = s.getsockname()
+    s.close()
+    return port
+
+
+def __unused_port_on_range(range):
+    assert range[0] and range[1]
+
+    # Find all ports that are already occupied
+    cmd_netstat = shlex.split("netstat tuln")
+    p1 = subprocess.Popen(cmd_netstat, stdout=subprocess.PIPE)
+
+    occupied_ports = set()
+    for line in p1.stdout.read().split('\n'):
+        if line.startswith('tcp') or line.startswith('tcp6'):
+            col = line.split()
+            local_address = col[3]
+            local_port = local_address.split(':')[1]
+            occupied_ports.add( int(local_port) )
+
+    # Generate random free port number.
+    while True:
+        port = random.randrange(range[0], range[1])
+        if port not in occupied_ports:
+            break
+    return port
diff --git a/lib/galaxy/util/specs.py b/lib/galaxy/util/specs.py
new file mode 100644
index 0000000..a9d35d8
--- /dev/null
+++ b/lib/galaxy/util/specs.py
@@ -0,0 +1,32 @@
+import functools
+import operator
+
+from galaxy import util
+
+
+# Utility methods for specifing maps.
+def to_str_or_none( value ):
+    if value is None:
+        return None
+    else:
+        return str( value )
+
+
+def to_bool_or_none( value ):
+    return util.string_as_bool_or_none( value )
+
+
+def to_bool( value ):
+    return util.asbool( value )
+
+
+def to_float_or_none( value ):
+    if value is None:
+        return None
+    else:
+        return float( value )
+
+
+# Utility methods for specifing valid...
+def is_in( *args ):
+    return functools.partial( operator.contains, args )
diff --git a/lib/galaxy/util/sqlite.py b/lib/galaxy/util/sqlite.py
new file mode 100644
index 0000000..7c275ae
--- /dev/null
+++ b/lib/galaxy/util/sqlite.py
@@ -0,0 +1,28 @@
+import re
+import sqlite3
+
+try:
+    import sqlparse
+
+    def is_read_only_query(query):
+        statements = sqlparse.parse(query)
+        for statement in statements:
+            if statement.get_type() != "SELECT":
+                return False
+        return True
+
+except ImportError:
+    # Without sqlparse we use a very weak regex check
+    def is_read_only_query(query):
+        if re.match("select ", query, re.IGNORECASE):
+            if re.search("^([^\"]|\"[^\"]*\")*?;", query) or re.search("^([^\']|\'[^\']*\')*?;", query):
+                return False
+            else:
+                return True
+        return False
+
+
+def connect(path):
+    connection = sqlite3.connect(path)
+    connection.row_factory = sqlite3.Row
+    return connection
diff --git a/lib/galaxy/util/streamball.py b/lib/galaxy/util/streamball.py
new file mode 100644
index 0000000..8445c22
--- /dev/null
+++ b/lib/galaxy/util/streamball.py
@@ -0,0 +1,59 @@
+"""
+A simple wrapper for writing tarballs as a stream.
+"""
+import logging
+import os
+import tarfile
+
+from galaxy.exceptions import ObjectNotFound
+
+log = logging.getLogger( __name__ )
+
+
+class StreamBall( object ):
+    def __init__( self, mode, members=None ):
+        self.members = members
+        if members is None:
+            self.members = []
+        self.mode = mode
+        self.wsgi_status = None
+        self.wsgi_headeritems = None
+
+    def add( self, file, relpath, check_file=False):
+        if check_file and len(file) > 0:
+            if not os.path.isfile(file):
+                raise ObjectNotFound
+            else:
+                self.members.append( ( file, relpath ) )
+        else:
+            self.members.append( ( file, relpath ) )
+
+    def stream( self, environ, start_response ):
+        response_write = start_response( self.wsgi_status, self.wsgi_headeritems )
+
+        class tarfileobj:
+            def write( self, *args, **kwargs ):
+                response_write( *args, **kwargs )
+        tf = tarfile.open( mode=self.mode, fileobj=tarfileobj() )
+        for (file, rel) in self.members:
+            tf.add( file, arcname=rel )
+        tf.close()
+        return []
+
+
+class ZipBall(object):
+    def __init__(self, tmpf, tmpd):
+        self._tmpf = tmpf
+        self._tmpd = tmpd
+
+    def stream(self, environ, start_response):
+        response_write = start_response( self.wsgi_status, self.wsgi_headeritems )
+        tmpfh = open( self._tmpf )
+        response_write(tmpfh.read())
+        tmpfh.close()
+        try:
+            os.unlink( self._tmpf )
+            os.rmdir( self._tmpd )
+        except OSError:
+            log.exception( "Unable to remove temporary library download archive and directory" )
+        return []
diff --git a/lib/galaxy/util/submodules.py b/lib/galaxy/util/submodules.py
new file mode 100644
index 0000000..135d68a
--- /dev/null
+++ b/lib/galaxy/util/submodules.py
@@ -0,0 +1,32 @@
+import logging
+
+from os import listdir
+
+log = logging.getLogger(__name__)
+
+
+def submodules(module):
+    unsorted_submodule_names = __submodule_names(module)
+    submodule_names = sorted(unsorted_submodule_names, reverse=True)
+    submodules = []
+    for submodule_name in submodule_names:
+        full_submodule = "%s.%s" % (module.__name__, submodule_name)
+        try:
+            __import__(full_submodule)
+            submodule = getattr(module, submodule_name)
+            submodules.append(submodule)
+        except BaseException as exception:
+            exception_str = str(exception)
+            message = "%s dynamic module could not be loaded: %s" % (full_submodule, exception_str)
+            log.debug(message)
+    return submodules
+
+
+def __submodule_names(module):
+    module_dir = module.__path__[0]
+    names = []
+    for fname in listdir(module_dir):
+        if not(fname.startswith("_")) and fname.endswith(".py"):
+            submodule_name = fname[:-len(".py")]
+            names.append(submodule_name)
+    return names
diff --git a/lib/galaxy/util/template.py b/lib/galaxy/util/template.py
new file mode 100644
index 0000000..ef62853
--- /dev/null
+++ b/lib/galaxy/util/template.py
@@ -0,0 +1,16 @@
+"""Entry point for the usage of Cheetah templating within Galaxy."""
+from Cheetah.Template import Template
+
+
+def fill_template( template_text, context=None, **kwargs ):
+    """Fill a cheetah template out for specified context.
+
+    If template_text is None, an exception will be thrown, if context
+    is None (the default) - keyword arguments to this function will be used
+    as the context.
+    """
+    if template_text is None:
+        raise TypeError("Template text specified as None to fill_template.")
+    if not context:
+        context = kwargs
+    return str( Template( source=template_text, searchList=[context] ) )
diff --git a/lib/galaxy/util/topsort.py b/lib/galaxy/util/topsort.py
new file mode 100644
index 0000000..306792d
--- /dev/null
+++ b/lib/galaxy/util/topsort.py
@@ -0,0 +1,210 @@
+"""
+Topological sort.
+
+From Tim Peters, see:
+   http://mail.python.org/pipermail/python-list/1999-July/006660.html
+
+topsort takes a list of pairs, where each pair (x, y) is taken to
+mean that x <= y wrt some abstract partial ordering.  The return
+value is a list, representing a total ordering that respects all
+the input constraints.
+E.g.,
+
+   topsort( [(1,2), (3,3)] )
+
+Valid topological sorts would be any of (but nothing other than)
+
+   [3, 1, 2]
+   [1, 3, 2]
+   [1, 2, 3]
+
+... however this variant ensures that 'key' order (first element of
+tuple) is preserved so the following will be result returned:
+
+   [1, 3, 2]
+
+because those are the permutations of the input elements that
+respect the "1 precedes 2" and "3 precedes 3" input constraints.
+Note that a constraint of the form (x, x) is really just a trick
+to make sure x appears *somewhere* in the output list.
+
+If there's a cycle in the constraints, say
+
+   topsort( [(1,2), (2,1)] )
+
+then CycleError is raised, and the exception object supports
+many methods to help analyze and break the cycles.  This requires
+a good deal more code than topsort itself!
+"""
+from galaxy.util.odict import odict as OrderedDict
+
+
+class CycleError(Exception):
+    def __init__(self, sofar, numpreds, succs):
+        Exception.__init__(self, "cycle in constraints",
+                           sofar, numpreds, succs)
+        self.preds = None
+
+    # return as much of the total ordering as topsort was able to
+    # find before it hit a cycle
+    def get_partial(self):
+        return self[1]
+
+    # return remaining elt -> count of predecessors map
+    def get_pred_counts(self):
+        return self[2]
+
+    # return remaining elt -> list of successors map
+    def get_succs(self):
+        return self[3]
+
+    # return remaining elements (== those that don't appear in
+    # get_partial())
+    def get_elements(self):
+        return self.get_pred_counts().keys()
+
+    # Return a list of pairs representing the full state of what's
+    # remaining (if you pass this list back to topsort, it will raise
+    # CycleError again, and if you invoke get_pairlist on *that*
+    # exception object, the result will be isomorphic to *this*
+    # invocation of get_pairlist).
+    # The idea is that you can use pick_a_cycle to find a cycle,
+    # through some means or another pick an (x,y) pair in the cycle
+    # you no longer want to respect, then remove that pair from the
+    # output of get_pairlist and try topsort again.
+    def get_pairlist(self):
+        succs = self.get_succs()
+        answer = []
+        for x in self.get_elements():
+            if x in succs:
+                for y in succs[x]:
+                    answer.append( (x, y) )
+            else:
+                # make sure x appears in topsort's output!
+                answer.append( (x, x) )
+        return answer
+
+    # return remaining elt -> list of predecessors map
+    def get_preds(self):
+        if self.preds is not None:
+            return self.preds
+        self.preds = preds = OrderedDict()
+        remaining_elts = self.get_elements()
+        for x in remaining_elts:
+            preds[x] = []
+        succs = self.get_succs()
+
+        for x in remaining_elts:
+            if x in succs:
+                for y in succs[x]:
+                    preds[y].append(x)
+
+        if __debug__:
+            for x in remaining_elts:
+                assert len(preds[x]) > 0
+        return preds
+
+    # return a cycle [x, ..., x] at random
+    def pick_a_cycle(self):
+        remaining_elts = self.get_elements()
+
+        # We know that everything in remaining_elts has a predecessor,
+        # but don't know that everything in it has a successor.  So
+        # crawling forward over succs may hit a dead end.  Instead we
+        # crawl backward over the preds until we hit a duplicate, then
+        # reverse the path.
+        preds = self.get_preds()
+        from random import choice
+        x = choice(remaining_elts)
+        answer = []
+        index = OrderedDict()
+        in_answer = index.has_key
+        while not in_answer(x):
+            index[x] = len(answer)  # index of x in answer
+            answer.append(x)
+            x = choice(preds[x])
+        answer.append(x)
+        answer = answer[index[x]:]
+        answer.reverse()
+        return answer
+
+
+def _numpreds_and_successors_from_pairlist(pairlist):
+    numpreds = OrderedDict()   # elt -> # of predecessors
+    successors = OrderedDict()  # elt -> list of successors
+    for first, second in pairlist:
+        # make sure every elt is a key in numpreds
+        if first not in numpreds:
+            numpreds[first] = 0
+        if second not in numpreds:
+            numpreds[second] = 0
+
+        # if they're the same, there's no real dependence
+        if first == second:
+            continue
+
+        # since first < second, second gains a pred ...
+        numpreds[second] = numpreds[second] + 1
+
+        # ... and first gains a succ
+        if first in successors:
+            successors[first].append(second)
+        else:
+            successors[first] = [second]
+    return numpreds, successors
+
+
+def topsort(pairlist):
+    numpreds, successors = _numpreds_and_successors_from_pairlist(pairlist)
+
+    # suck up everything without a predecessor
+    answer = [x for x in numpreds.keys() if numpreds[x] == 0]
+
+    # for everything in answer, knock down the pred count on
+    # its successors; note that answer grows *in* the loop
+    for x in answer:
+        assert numpreds[x] == 0
+        del numpreds[x]
+        if x in successors:
+            for y in successors[x]:
+                numpreds[y] = numpreds[y] - 1
+                if numpreds[y] == 0:
+                    answer.append(y)
+            # following "del" isn't needed; just makes
+            # CycleError details easier to grasp
+            del successors[x]
+
+    if numpreds:
+        # everything in numpreds has at least one predecessor ->
+        # there's a cycle
+        if __debug__:
+            for x in numpreds.keys():
+                assert numpreds[x] > 0
+        raise CycleError(answer, numpreds, successors)
+    return answer
+
+
+def topsort_levels(pairlist):
+    numpreds, successors = _numpreds_and_successors_from_pairlist(pairlist)
+
+    answer = []
+
+    while 1:
+        # Suck up everything without a predecessor.
+        levparents = [x for x in numpreds.keys() if numpreds[x] == 0]
+        if not levparents:
+            break
+        answer.append( levparents )
+        for levparent in levparents:
+            del numpreds[levparent]
+            if levparent in successors:
+                for levparentsucc in successors[levparent]:
+                    numpreds[levparentsucc] -= 1
+                del successors[levparent]
+
+    if numpreds:
+        # Everything in num_parents has at least one child ->
+        # there's a cycle.
+        raise CycleError( answer, numpreds, successors )
+
+    return answer
diff --git a/lib/galaxy/util/ucsc.py b/lib/galaxy/util/ucsc.py
new file mode 100644
index 0000000..4869a6e
--- /dev/null
+++ b/lib/galaxy/util/ucsc.py
@@ -0,0 +1,35 @@
+"""
+Utilities for dealing with UCSC data.
+"""
+
+
+class UCSCLimitException( Exception ):
+    pass
+
+
+class UCSCOutWrapper( object ):
+    """File-like object that throws an exception if it encounters the UCSC limit error lines"""
+    def __init__( self, other ):
+        self.other = iter( other )
+        # Need one line of lookahead to be sure we are hitting the limit message
+        self.lookahead = None
+
+    def __iter__( self ):
+        return self
+
+    def next( self ):
+        if self.lookahead is None:
+            line = self.other.next()
+        else:
+            line = self.lookahead
+            self.lookahead = None
+        if line.startswith( "----------" ):
+            next_line = self.other.next()
+            if next_line.startswith( "Reached output limit" ):
+                raise UCSCLimitException( next_line.strip() )
+            else:
+                self.lookahead = next_line
+        return line
+
+    def readline(self):
+        return self.next()
diff --git a/lib/galaxy/util/validation.py b/lib/galaxy/util/validation.py
new file mode 100644
index 0000000..2b27d1e
--- /dev/null
+++ b/lib/galaxy/util/validation.py
@@ -0,0 +1,38 @@
+""" Module for validation of incoming inputs.
+
+TODO: Refactor BaseController references to similar methods to use this module.
+"""
+from six import string_types
+
+from galaxy import exceptions
+from galaxy.util.sanitize_html import sanitize_html
+
+
+def validate_and_sanitize_basestring( key, val ):
+    if not isinstance( val, string_types ):
+        raise exceptions.RequestParameterInvalidException( '%s must be a string or unicode: %s'
+                                                           % ( key, str( type( val ) ) ) )
+    return sanitize_html( val, 'utf-8', 'text/html' )
+
+
+def validate_and_sanitize_basestring_list( key, val ):
+    try:
+        assert isinstance( val, list )
+        return [ sanitize_html( t, 'utf-8', 'text/html' ) for t in val ]
+    except ( AssertionError, TypeError ):
+        raise exceptions.RequestParameterInvalidException( '%s must be a list of strings: %s'
+                                                           % ( key, str( type( val ) ) ) )
+
+
+def validate_boolean( key, val ):
+    if not isinstance( val, bool ):
+        raise exceptions.RequestParameterInvalidException( '%s must be a boolean: %s'
+                                                           % ( key, str( type( val ) ) ) )
+    return val
+
+
+# TODO:
+# def validate_integer( self, key, val, min, max ):
+# def validate_float( self, key, val, min, max ):
+# def validate_number( self, key, val, min, max ):
+# def validate_genome_build( self, key, val ):
diff --git a/lib/galaxy/util/xml_macros.py b/lib/galaxy/util/xml_macros.py
new file mode 100644
index 0000000..b9a3eac
--- /dev/null
+++ b/lib/galaxy/util/xml_macros.py
@@ -0,0 +1,292 @@
+import os
+
+from copy import deepcopy
+from xml.etree import ElementInclude, ElementTree
+
+
+REQUIRED_PARAMETER = object()
+
+
+def load(path):
+    """
+    Loads tool from file system and preprocesses tool macros.
+    """
+    tree = raw_tool_xml_tree(path)
+    root = tree.getroot()
+
+    _import_macros(root, path)
+
+    # Collect tokens
+    tokens = _macros_of_type(root, 'token', lambda el: el.text or '')
+
+    # Expand xml macros
+    macro_dict = _macros_of_type(root, 'xml', lambda el: XmlMacroDef(el))
+    _expand_macros([root], macro_dict, tokens)
+
+    return tree
+
+
+def template_macro_params(root):
+    """
+    Look for template macros and populate param_dict (for cheetah)
+    with these.
+    """
+    param_dict = {}
+    macro_dict = _macros_of_type(root, 'template', lambda el: el.text)
+    for key, value in macro_dict.items():
+        param_dict[key] = value
+    return param_dict
+
+
+def raw_tool_xml_tree(path):
+    """ Load raw (no macro expansion) tree representation of tool represented
+    at the specified path.
+    """
+    tree = _parse_xml(path)
+    return tree
+
+
+def imported_macro_paths(root):
+    macros_el = _macros_el(root)
+    return _imported_macro_paths_from_el(macros_el)
+
+
+def _import_macros(root, path):
+    tool_dir = os.path.dirname(path)
+    macros_el = _macros_el(root)
+    if macros_el is not None:
+        macro_els = _load_macros(macros_el, tool_dir)
+        _xml_set_children(macros_el, macro_els)
+
+
+def _macros_el(root):
+    return root.find('macros')
+
+
+def _macros_of_type(root, type, el_func):
+    macros_el = root.find('macros')
+    macro_dict = {}
+    if macros_el is not None:
+        macro_els = macros_el.findall('macro')
+        filtered_els = [(macro_el.get("name"), el_func(macro_el))
+                        for macro_el in macro_els
+                        if macro_el.get('type') == type]
+        macro_dict = dict(filtered_els)
+    return macro_dict
+
+
+def _expand_tokens(elements, tokens):
+    if not tokens or not elements:
+        return
+
+    for element in elements:
+        _expand_tokens_for_el(element, tokens)
+
+
+def _expand_tokens_for_el(element, tokens):
+    value = element.text
+    if value:
+        new_value = _expand_tokens_str(element.text, tokens)
+        if not (new_value is value):
+            element.text = new_value
+    for key, value in element.attrib.items():
+        new_value = _expand_tokens_str(value, tokens)
+        if not (new_value is value):
+            element.attrib[key] = new_value
+    _expand_tokens(list(element), tokens)
+
+
+def _expand_tokens_str(str, tokens):
+    for key, value in tokens.items():
+        if str.find(key) > -1:
+            str = str.replace(key, value)
+    return str
+
+
+def _expand_macros(elements, macros, tokens):
+    if not macros and not tokens:
+        return
+
+    for element in elements:
+        while True:
+            expand_el = element.find('.//expand')
+            if expand_el is None:
+                break
+            _expand_macro(element, expand_el, macros, tokens)
+
+        _expand_tokens_for_el(element, tokens)
+
+
+def _expand_macro(element, expand_el, macros, tokens):
+    macro_name = expand_el.get('macro')
+    macro_def = macros[macro_name]
+    expanded_elements = deepcopy(macro_def.elements)
+
+    _expand_yield_statements(expanded_elements, expand_el)
+
+    # Recursively expand contained macros.
+    _expand_macros(expanded_elements, macros, tokens)
+    macro_tokens = macro_def.macro_tokens(expand_el)
+    if macro_tokens:
+        _expand_tokens(expanded_elements, macro_tokens)
+
+    # HACK for elementtree, newer implementations (etree/lxml) won't
+    # require this parent_map data structure but elementtree does not
+    # track parents or recongnize .find('..').
+    # TODO fix this now that we're not using elementtree
+    parent_map = dict((c, p) for p in element.getiterator() for c in p)
+    _xml_replace(expand_el, expanded_elements, parent_map)
+
+
+def _expand_yield_statements(macro_def, expand_el):
+    yield_els = [yield_el for macro_def_el in macro_def for yield_el in macro_def_el.findall('.//yield')]
+
+    expand_el_children = list(expand_el)
+    macro_def_parent_map = \
+        dict((c, p) for macro_def_el in macro_def for p in macro_def_el.getiterator() for c in p)
+
+    for yield_el in yield_els:
+        _xml_replace(yield_el, expand_el_children, macro_def_parent_map)
+
+    # Replace yields at the top level of a macro, seems hacky approach
+    replace_yield = True
+    while replace_yield:
+        for i, macro_def_el in enumerate(macro_def):
+            if macro_def_el.tag == "yield":
+                for target in expand_el_children:
+                    i += 1
+                    macro_def.insert(i, deepcopy(target))
+                macro_def.remove(macro_def_el)
+                continue
+
+        replace_yield = False
+
+
+def _load_macros(macros_el, tool_dir):
+    macros = []
+    # Import macros from external files.
+    macros.extend(_load_imported_macros(macros_el, tool_dir))
+    # Load all directly defined macros.
+    macros.extend(_load_embedded_macros(macros_el, tool_dir))
+    return macros
+
+
+def _load_embedded_macros(macros_el, tool_dir):
+    macros = []
+
+    macro_els = []
+    # attribute typed macro
+    if macros_el is not None:
+        macro_els = macros_el.findall("macro")
+    for macro in macro_els:
+        if 'type' not in macro.attrib:
+            macro.attrib['type'] = 'xml'
+        macros.append(macro)
+
+    # type shortcuts (<xml> is a shortcut for <macro type="xml",
+    # likewise for <template>.
+    typed_tag = ['template', 'xml', 'token']
+    for tag in typed_tag:
+        macro_els = []
+        if macros_el is not None:
+            macro_els = macros_el.findall(tag)
+        for macro_el in macro_els:
+            macro_el.attrib['type'] = tag
+            macro_el.tag = 'macro'
+            macros.append(macro_el)
+
+    return macros
+
+
+def _load_imported_macros(macros_el, tool_dir):
+    macros = []
+
+    for tool_relative_import_path in _imported_macro_paths_from_el(macros_el):
+        import_path = \
+            os.path.join(tool_dir, tool_relative_import_path)
+        file_macros = _load_macro_file(import_path, tool_dir)
+        macros.extend(file_macros)
+
+    return macros
+
+
+def _imported_macro_paths_from_el(macros_el):
+    imported_macro_paths = []
+    macro_import_els = []
+    if macros_el is not None:
+        macro_import_els = macros_el.findall("import")
+    for macro_import_el in macro_import_els:
+        raw_import_path = macro_import_el.text
+        tool_relative_import_path = \
+            os.path.basename(raw_import_path)  # Sanitize this
+        imported_macro_paths.append( tool_relative_import_path )
+    return imported_macro_paths
+
+
+def _load_macro_file(path, tool_dir):
+    tree = _parse_xml(path)
+    root = tree.getroot()
+    return _load_macros(root, tool_dir)
+
+
+def _xml_set_children(element, new_children):
+    for old_child in element:
+        element.remove(old_child)
+    for i, new_child in enumerate(new_children):
+        element.insert(i, new_child)
+
+
+def _xml_replace(query, targets, parent_map):
+    # parent_el = query.find('..') ## Something like this would be better with newer xml library
+    parent_el = parent_map[query]
+    matching_index = -1
+    # for index, el in enumerate(parent_el.iter('.')):  ## Something like this for newer implementation
+    for index, el in enumerate(list(parent_el)):
+        if el == query:
+            matching_index = index
+            break
+    assert matching_index >= 0
+    current_index = matching_index
+    for target in targets:
+        current_index += 1
+        parent_el.insert(current_index, deepcopy(target))
+    parent_el.remove(query)
+
+
+class XmlMacroDef(object):
+
+    def __init__(self, el):
+        self.elements = list(el)
+        parameters = {}
+        tokens = []
+        token_quote = "@"
+        for key, value in el.attrib.items():
+            if key == "token_quote":
+                token_quote = value
+            if key == "tokens":
+                for token in value.split(","):
+                    tokens.append((token, REQUIRED_PARAMETER))
+            elif key.startswith("token_"):
+                token = key[len("token_"):]
+                tokens.append((token, value))
+        for name, default in tokens:
+            parameters[name] = (token_quote, default)
+        self.parameters = parameters
+
+    def macro_tokens(self, expand_el):
+        tokens = {}
+        for key, (wrap_char, default_val) in self.parameters.items():
+            token_value = expand_el.attrib.get(key, default_val)
+            if token_value is REQUIRED_PARAMETER:
+                message = "Failed to expand macro - missing required parameter [%s]."
+                raise ValueError(message % key)
+            token_name = "%s%s%s" % (wrap_char, key.upper(), wrap_char)
+            tokens[token_name] = token_value
+        return tokens
+
+
+def _parse_xml(fname):
+    tree = ElementTree.parse(fname)
+    root = tree.getroot()
+    ElementInclude.include(root)
+    return tree
diff --git a/lib/galaxy/version.py b/lib/galaxy/version.py
new file mode 100644
index 0000000..b462983
--- /dev/null
+++ b/lib/galaxy/version.py
@@ -0,0 +1,3 @@
+VERSION_MAJOR = "16.10"
+VERSION_MINOR = None
+VERSION = VERSION_MAJOR + ('.' + VERSION_MINOR if VERSION_MINOR else '')
diff --git a/lib/galaxy/visualization/__init__.py b/lib/galaxy/visualization/__init__.py
new file mode 100644
index 0000000..4174e93
--- /dev/null
+++ b/lib/galaxy/visualization/__init__.py
@@ -0,0 +1,3 @@
+"""
+Package for Galaxy visualization plugins.
+"""
diff --git a/lib/galaxy/visualization/data_providers/__init__.py b/lib/galaxy/visualization/data_providers/__init__.py
new file mode 100644
index 0000000..679ff01
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/__init__.py
@@ -0,0 +1,3 @@
+"""
+Galaxy visualization/visual analysis data providers.
+"""
diff --git a/lib/galaxy/visualization/data_providers/basic.py b/lib/galaxy/visualization/data_providers/basic.py
new file mode 100644
index 0000000..c394eb6
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/basic.py
@@ -0,0 +1,195 @@
+import sys
+from json import loads
+
+from galaxy.datatypes.tabular import Tabular
+
+
+class BaseDataProvider( object ):
+    """
+    Base class for data providers. Data providers (a) read and package data from datasets;
+    and (b) write subsets of data to new datasets.
+    """
+
+    def __init__( self, converted_dataset=None, original_dataset=None, dependencies=None,
+                  error_max_vals="Only the first %i values are returned." ):
+        """ Create basic data provider. """
+        self.converted_dataset = converted_dataset
+        self.original_dataset = original_dataset
+        self.dependencies = dependencies
+        self.error_max_vals = error_max_vals
+
+    def has_data( self, **kwargs ):
+        """
+        Returns true if dataset has data in the specified genome window, false
+        otherwise.
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def get_iterator( self, **kwargs ):
+        """
+        Returns an iterator that provides data in the region chrom:start-end
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Process data from an iterator to a format that can be provided to client.
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
+        """
+        Returns data as specified by kwargs. start_val is the first element to
+        return and max_vals indicates the number of values to return.
+
+        Return value must be a dictionary with the following attributes:
+            dataset_type, data
+        """
+        iterator = self.get_iterator( chrom, start, end )
+        return self.process_data( iterator, start_val, max_vals, **kwargs )
+
+    def write_data_to_file( self, filename, **kwargs ):
+        """
+        Write data in region defined by chrom, start, and end to a file.
+        """
+        raise Exception( "Unimplemented Function" )
+
+
+class ColumnDataProvider( BaseDataProvider ):
+    """ Data provider for columnar data """
+    MAX_LINES_RETURNED = 30000
+
+    def __init__( self, original_dataset, max_lines_returned=MAX_LINES_RETURNED ):
+        # Compatibility check.
+        if not isinstance( original_dataset.datatype, Tabular ):
+            raise Exception( "Data provider can only be used with tabular data" )
+
+        # Attribute init.
+        self.original_dataset = original_dataset
+        # allow throttling
+        self.max_lines_returned = max_lines_returned
+
+    def get_data( self, columns=None, start_val=0, max_vals=None, skip_comments=True, **kwargs ):
+        """
+        Returns data from specified columns in dataset. Format is list of lists
+        where each list is a line of data.
+        """
+        if not columns:
+            raise TypeError( 'parameter required: columns' )
+
+        # TODO: validate kwargs
+        try:
+            max_vals = int( max_vals )
+            max_vals = min([ max_vals, self.max_lines_returned ])
+        except ( ValueError, TypeError ):
+            max_vals = self.max_lines_returned
+
+        try:
+            start_val = int( start_val )
+            start_val = max([ start_val, 0 ])
+        except ( ValueError, TypeError ):
+            start_val = 0
+
+        # skip comment lines (if any/avail)
+        # pre: should have original_dataset and
+        if( skip_comments and
+                self.original_dataset.metadata.comment_lines and
+                start_val < self.original_dataset.metadata.comment_lines ):
+            start_val = int( self.original_dataset.metadata.comment_lines )
+
+        # columns is an array of ints for now (should handle column names later)
+        columns = loads( columns )
+        for column in columns:
+            assert( ( column < self.original_dataset.metadata.columns ) and
+                    ( column >= 0 ) ), (
+                "column index (%d) must be positive and less" % ( column ) +
+                " than the number of columns: %d" % ( self.original_dataset.metadata.columns ) )
+        # print columns, start_val, max_vals, skip_comments, kwargs
+
+        # set up the response, column lists
+        response = {}
+        response[ 'data' ] = data = [ [] for column in columns ]
+        response[ 'meta' ] = meta = [{
+            'min'   : None,
+            'max'   : None,
+            'count' : 0,
+            'sum'   : 0
+        } for column in columns ]
+
+        column_types = [ self.original_dataset.metadata.column_types[ column ] for column in columns ]
+
+        # function for casting by column_types
+        def cast_val( val, type ):
+            """ Cast value based on type. Return None if can't be cast """
+            if type == 'int':
+                try:
+                    val = int( val )
+                except:
+                    return None
+            elif type == 'float':
+                try:
+                    val = float( val )
+                except:
+                    return None
+            return val
+
+        returning_data = False
+        f = open( self.original_dataset.file_name )
+        # TODO: add f.seek if given fptr in kwargs
+        for count, line in enumerate( f ):
+
+            # check line v. desired start, end
+            if count < start_val:
+                continue
+            if ( count - start_val ) >= max_vals:
+                break
+
+            returning_data = True
+
+            fields = line.split()
+            fields_len = len( fields )
+            # NOTE: this will return None/null for abberrant column values (including bad indeces)
+            for index, column in enumerate( columns ):
+                column_val = None
+                column_type = column_types[ index ]
+                if column < fields_len:
+                    column_val = cast_val( fields[ column ], column_type )
+                    if column_val is not None:
+
+                        # if numeric, maintain min, max, sum
+                        if( column_type == 'float' or column_type == 'int' ):
+                            if( ( meta[ index ][ 'min' ] is None ) or ( column_val < meta[ index ][ 'min' ] ) ):
+                                meta[ index ][ 'min' ] = column_val
+
+                            if( ( meta[ index ][ 'max' ] is None ) or ( column_val > meta[ index ][ 'max' ] ) ):
+                                meta[ index ][ 'max' ] = column_val
+
+                            meta[ index ][ 'sum' ] += column_val
+
+                # maintain a count - for other stats
+                meta[ index ][ 'count' ] += 1
+                data[ index ].append( column_val )
+
+        response[ 'endpoint' ] = dict( last_line=( count - 1 ), file_ptr=f.tell() )
+        f.close()
+
+        if not returning_data:
+            return None
+
+        for index, meta in enumerate( response[ 'meta' ] ):
+            column_type = column_types[ index ]
+            count = meta[ 'count' ]
+
+            if( ( column_type == 'float' or column_type == 'int' ) and count ):
+                meta[ 'mean' ] = float( meta[ 'sum' ] ) / count
+
+                sorted_data = sorted( response[ 'data' ][ index ] )
+                middle_index = ( count / 2 ) - 1
+                if count % 2 == 0:
+                    meta[ 'median' ] = ( ( sorted_data[ middle_index ] + sorted_data[( middle_index + 1 )] ) / 2.0 )
+
+                else:
+                    meta[ 'median' ] = sorted_data[ middle_index ]
+
+        # ugh ... metadata_data_lines is not a reliable source; hafta have an EOF
+        return response
diff --git a/lib/galaxy/visualization/data_providers/cigar.py b/lib/galaxy/visualization/data_providers/cigar.py
new file mode 100644
index 0000000..2ed6f28
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/cigar.py
@@ -0,0 +1,104 @@
+'''
+Functions for working with SAM/BAM CIGAR representation.
+'''
+
+import operator
+
+
+def get_ref_based_read_seq_and_cigar( read_seq, read_start, ref_seq, ref_seq_start, cigar ):
+    '''
+    Returns a ( new_read_seq, new_cigar ) that can be used with reference
+    sequence to reconstruct the read. The new read sequence includes only
+    bases that cannot be recovered from the reference: mismatches and
+    insertions (soft clipped bases are not included). The new cigar replaces
+    Ms with =s and Xs because the M operation can denote a sequence match or
+    mismatch.
+    '''
+
+    if not ref_seq:
+        return read_seq, cigar
+
+    # Set up position for reference, read.
+    ref_seq_pos = read_start - ref_seq_start
+    read_pos = 0
+
+    # Create new read sequence, cigar.
+    new_read_seq = ''
+    new_cigar = ''
+    cigar_ops = 'MIDNSHP=X'
+    for op_tuple in cigar:
+        op, op_len = op_tuple
+        # Op is index into string 'MIDNSHP=X'
+        if op == 0:  # Match
+            # Transform Ms to =s and Xs using reference.
+            new_op = ''
+            total_count = 0
+            while total_count < op_len and ref_seq_pos < len( ref_seq ):
+                match, count = _match_mismatch_counter( read_seq, read_pos, ref_seq, ref_seq_pos )
+                # Use min because count cannot exceed remainder of operation.
+                count = min( count, op_len - total_count )
+                if match:
+                    new_op = '='
+                else:
+                    new_op = 'X'
+                    # Include mismatched bases in new read sequence.
+                    new_read_seq += read_seq[ read_pos:read_pos + count ]
+                new_cigar += '%i%s' % ( count, new_op )
+                total_count += count
+                read_pos += count
+                ref_seq_pos += count
+
+            # If end of read falls outside of ref_seq data, leave as M.
+            if total_count < op_len:
+                new_cigar += '%iM' % ( op_len - total_count )
+        elif op == 1:  # Insertion
+            new_cigar += '%i%s' % ( op_len, cigar_ops[ op ] )
+            # Include insertion bases in new read sequence.
+            new_read_seq += read_seq[ read_pos:read_pos + op_len ]
+            read_pos += op_len
+        elif op in [ 2, 3, 6 ]:  # Deletion, Skip, or Padding
+            ref_seq_pos += op_len
+            new_cigar += '%i%s' % ( op_len, cigar_ops[ op ] )
+        elif op == 4:  # Soft clipping
+            read_pos += op_len
+            new_cigar += '%i%s' % ( op_len, cigar_ops[ op ] )
+        elif op == 5:  # Hard clipping
+            new_cigar += '%i%s' % ( op_len, cigar_ops[ op ] )
+        elif op in [ 7, 8 ]:  # Match or mismatch
+            if op == 8:
+                # Include mismatched bases in new read sequence.
+                new_read_seq += read_seq[ read_pos:read_pos + op_len ]
+            read_pos += op_len
+            ref_seq_pos += op_len
+            new_cigar += '%i%s' % ( op_len, cigar_ops[ op ] )
+
+    return ( new_read_seq, new_cigar )
+
+
+def _match_mismatch_counter( s1, p1, s2, p2 ):
+    '''
+    Count consecutive matches/mismatches between strings s1 and s2
+    starting at p1 and p2, respectively.
+    '''
+
+    # Do initial comparison to determine whether to count matches or
+    # mismatches.
+    if s1[ p1 ] == s2[ p2 ]:
+        cmp_fn = operator.eq
+        match = True
+    else:
+        cmp_fn = operator.ne
+        match = False
+
+    # Increment counts to move to next characters.
+    count = 1
+    p1 += 1
+    p2 += 1
+
+    # Count matches/mismatches.
+    while p1 < len( s1 ) and p2 < len( s2 ) and cmp_fn( s1[ p1 ], s2[ p2 ] ):
+        count += 1
+        p1 += 1
+        p2 += 1
+
+    return match, count
diff --git a/lib/galaxy/visualization/data_providers/genome.py b/lib/galaxy/visualization/data_providers/genome.py
new file mode 100644
index 0000000..c96b9f2
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -0,0 +1,1717 @@
+"""
+Data providers for genome visualizations.
+"""
+
+import itertools
+import math
+import os
+import random
+import re
+import sys
+from json import loads
+
+import pysam
+from bx.interval_index_file import Indexes
+from bx.bbi.bigbed_file import BigBedFile
+from bx.bbi.bigwig_file import BigWigFile
+from pysam import ctabix
+
+from galaxy.datatypes.interval import Bed, Gff, Gtf
+from galaxy.datatypes.util.gff_util import convert_gff_coords_to_bed, GFFFeature, GFFInterval, GFFReaderWrapper, parse_gff_attributes
+from galaxy.visualization.data_providers.basic import BaseDataProvider
+from galaxy.visualization.data_providers.cigar import get_ref_based_read_seq_and_cigar
+
+#
+# Utility functions.
+#
+
+
+def float_nan(n):
+    '''
+    Return None instead of NaN to pass jQuery 1.4's strict JSON
+    '''
+    if n != n:  # NaN != NaN
+        return None
+    else:
+        return float(n)
+
+
+def get_bounds( reads, start_pos_index, end_pos_index ):
+    '''
+    Returns the minimum and maximum position for a set of reads.
+    '''
+    max_low = sys.maxint
+    max_high = -sys.maxint
+    for read in reads:
+        if read[ start_pos_index ] < max_low:
+            max_low = read[ start_pos_index ]
+        if read[ end_pos_index ] > max_high:
+            max_high = read[ end_pos_index ]
+    return max_low, max_high
+
+
+def _convert_between_ucsc_and_ensemble_naming( chrom ):
+    '''
+    Convert between UCSC chromosome ('chr1') naming conventions and Ensembl
+    naming conventions ('1')
+    '''
+    if chrom.startswith( 'chr' ):
+        # Convert from UCSC to Ensembl
+        return chrom[ 3: ]
+    else:
+        # Convert from Ensembl to UCSC
+        return 'chr' + chrom
+
+
+def _chrom_naming_matches( chrom1, chrom2 ):
+    return ( chrom1.startswith( 'chr' ) and chrom2.startswith( 'chr' ) ) or ( not chrom1.startswith( 'chr' ) and not chrom2.startswith( 'chr' ) )
+
+
+class FeatureLocationIndexDataProvider( BaseDataProvider ):
+    """
+    Reads/writes/queries feature location index (FLI) datasets.
+    """
+
+    def __init__( self, converted_dataset ):
+        self.converted_dataset = converted_dataset
+
+    def get_data( self, query ):
+        # Init.
+        textloc_file = open( self.converted_dataset.file_name, 'r' )
+        line_len = int( textloc_file.readline() )
+        file_len = os.path.getsize( self.converted_dataset.file_name )
+        query = query.lower()
+
+        # Find query in file using binary search.
+        low = 0
+        high = file_len / line_len
+        while low < high:
+            mid = ( low + high ) // 2
+            position = mid * line_len
+            textloc_file.seek( position )
+
+            # Compare line with query and update low, high.
+            line = textloc_file.readline()
+            if line < query:
+                low = mid + 1
+            else:
+                high = mid
+
+        # Need to move back one line because last line read may be included in
+        # results.
+        position = low * line_len
+        textloc_file.seek( position )
+
+        # At right point in file, generate hits.
+        result = []
+        while True:
+            line = textloc_file.readline()
+            if not line.startswith( query ):
+                break
+            if line[ -1: ] == '\n':
+                line = line[ :-1 ]
+            result.append( line.split()[1:] )
+
+        textloc_file.close()
+        return result
+
+
+class GenomeDataProvider( BaseDataProvider ):
+    """
+    Base class for genome data providers. All genome providers use BED coordinate
+    format (0-based, half-open coordinates) for both queries and returned data.
+    """
+
+    dataset_type = None
+
+    """
+    Mapping from column name to payload data; this mapping is used to create
+    filters. Key is column name, value is a dict with mandatory key 'index' and
+    optional key 'name'. E.g. this defines column 4
+
+    col_name_data_attr_mapping = {4 : { index: 5, name: 'Score' } }
+    """
+    col_name_data_attr_mapping = {}
+
+    def __init__( self, converted_dataset=None, original_dataset=None, dependencies=None,
+                  error_max_vals="Only the first %i %s in this region are displayed." ):
+        super( GenomeDataProvider, self ).__init__( converted_dataset=converted_dataset,
+                                                    original_dataset=original_dataset,
+                                                    dependencies=dependencies,
+                                                    error_max_vals=error_max_vals )
+
+    def write_data_to_file( self, regions, filename ):
+        """
+        Write data in region defined by chrom, start, and end to a file.
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def valid_chroms( self ):
+        """
+        Returns chroms/contigs that the dataset contains
+        """
+        return None  # by default
+
+    def has_data( self, chrom, start, end, **kwargs ):
+        """
+        Returns true if dataset has data in the specified genome window, false
+        otherwise.
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def open_data_file( self ):
+        """
+        Open data file for reading data.
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        """
+        Returns an iterator that provides data in the region chrom:start-end
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Process data from an iterator to a format that can be provided to client.
+        """
+        raise Exception( "Unimplemented Function" )
+
+    def get_data( self, chrom=None, low=None, high=None, start_val=0, max_vals=sys.maxint, **kwargs ):
+        """
+        Returns data in region defined by chrom, start, and end. start_val and
+        max_vals are used to denote the data to return: start_val is the first element to
+        return and max_vals indicates the number of values to return.
+
+        Return value must be a dictionary with the following attributes:
+            dataset_type, data
+        """
+        start, end = int( low ), int( high )
+        data_file = self.open_data_file()
+        iterator = self.get_iterator( data_file, chrom, start, end, **kwargs )
+        data = self.process_data( iterator, start_val, max_vals, start=start, end=end, **kwargs )
+        try:
+            data_file.close()
+        except AttributeError:
+            # FIXME: some data providers do not have a close function implemented.
+            # Providers without a close function include:
+            #  bx IntervalIndex
+            pass
+
+        return data
+
+    def get_genome_data( self, chroms_info, **kwargs ):
+        """
+        Returns data for complete genome.
+        """
+        genome_data = []
+        for chrom_info in chroms_info[ 'chrom_info' ]:
+            chrom = chrom_info[ 'chrom' ]
+            chrom_len = chrom_info[ 'len' ]
+            chrom_data = self.get_data( chrom, 0, chrom_len, **kwargs )
+            # FIXME: data providers probably should never return None.
+            # Some data providers return None when there's no data, so
+            # create a dummy dict if necessary.
+            if not chrom_data:
+                chrom_data = {
+                    'data': None
+                }
+            chrom_data[ 'region' ] = "%s:%i-%i" % ( chrom, 0, chrom_len )
+            genome_data.append( chrom_data )
+
+        return {
+            'data': genome_data,
+            'dataset_type': self.dataset_type
+        }
+
+    def get_filters( self ):
+        """
+        Returns filters for provider's data. Return value is a list of
+        filters; each filter is a dictionary with the keys 'name', 'index', 'type'.
+        NOTE: This method uses the original dataset's datatype and metadata to
+        create the filters.
+        """
+        # Get column names.
+        try:
+            column_names = self.original_dataset.datatype.column_names
+        except AttributeError:
+            try:
+                column_names = range( self.original_dataset.metadata.columns )
+            except:  # Give up
+                return []
+
+        # Dataset must have column types; if not, cannot create filters.
+        try:
+            column_types = self.original_dataset.metadata.column_types
+        except AttributeError:
+            return []
+
+        # Create and return filters.
+        filters = []
+        if self.original_dataset.metadata.viz_filter_cols:
+            for viz_col_index in self.original_dataset.metadata.viz_filter_cols:
+                # Some columns are optional, so can't assume that a filter
+                # column is in dataset.
+                if viz_col_index >= len( column_names ):
+                    continue
+                col_name = column_names[ viz_col_index ]
+                # Make sure that column has a mapped index. If not, do not add filter.
+                try:
+                    attrs = self.col_name_data_attr_mapping[ col_name ]
+                except KeyError:
+                    continue
+                filters.append( { 'name': attrs[ 'name' ],
+                                  'type': column_types[viz_col_index],
+                                  'index': attrs[ 'index' ] } )
+        return filters
+
+    def get_default_max_vals( self ):
+        return 5000
+
+#
+# -- Base mixins and providers --
+#
+
+
+class FilterableMixin:
+    def get_filters( self ):
+        """ Returns a dataset's filters. """
+
+        # is_ functions taken from Tabular.set_meta
+        def is_int( column_text ):
+            try:
+                int( column_text )
+                return True
+            except:
+                return False
+
+        def is_float( column_text ):
+            try:
+                float( column_text )
+                return True
+            except:
+                if column_text.strip().lower() == 'na':
+                    return True  # na is special cased to be a float
+                return False
+
+        #
+        # Get filters.
+        # TODOs:
+        # (a) might be useful to move this into each datatype's set_meta method;
+        # (b) could look at first N lines to ensure GTF attribute types are consistent.
+        #
+        filters = []
+        # HACK: first 8 fields are for drawing, so start filter column index at 9.
+        filter_col = 8
+        if isinstance( self.original_dataset.datatype, Gff ):
+            # Can filter by score and GTF attributes.
+            filters = [ { 'name': 'Score',
+                          'type': 'number',
+                          'index': filter_col,
+                          'tool_id': 'Filter1',
+                          'tool_exp_name': 'c6' } ]
+            filter_col += 1
+            if isinstance( self.original_dataset.datatype, Gtf ):
+                # Create filters based on dataset metadata.
+                for name, a_type in self.original_dataset.metadata.attribute_types.items():
+                    if a_type in [ 'int', 'float' ]:
+                        filters.append(
+                            { 'name': name,
+                              'type': 'number',
+                              'index': filter_col,
+                              'tool_id': 'gff_filter_by_attribute',
+                              'tool_exp_name': name } )
+                        filter_col += 1
+        elif isinstance( self.original_dataset.datatype, Bed ):
+            # Can filter by score column only.
+            filters = [ { 'name': 'Score',
+                          'type': 'number',
+                          'index': filter_col,
+                          'tool_id': 'Filter1',
+                          'tool_exp_name': 'c5'
+                          } ]
+
+        return filters
+
+
+class TabixDataProvider( FilterableMixin, GenomeDataProvider ):
+    dataset_type = 'tabix'
+
+    """
+    Tabix index data provider for the Galaxy track browser.
+    """
+
+    col_name_data_attr_mapping = { 4: { 'index': 4, 'name': 'Score' } }
+
+    def open_data_file( self ):
+        return ctabix.Tabixfile(self.dependencies['bgzip'].file_name,
+                                index=self.converted_dataset.file_name)
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        # chrom must be a string, start/end integers.
+        # in previous versions of pysam, unicode was accepted for chrom, but not in 8.4
+        chrom = str(chrom)
+        start = int(start)
+        end = int(end)
+        if end >= (2 << 29):
+            end = (2 << 29 - 1)  # Tabix-enforced maximum
+        # Get iterator using either naming scheme.
+        iterator = iter( [] )
+        if chrom in data_file.contigs:
+            iterator = data_file.fetch(reference=chrom, start=start, end=end)
+        else:
+            # Try alternative naming scheme.
+            chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+            if chrom in data_file.contigs:
+                iterator = data_file.fetch(reference=chrom, start=start, end=end)
+
+        return iterator
+
+    def write_data_to_file( self, regions, filename ):
+        out = open( filename, "w" )
+
+        data_file = self.open_data_file()
+        for region in regions:
+            # Write data in region.
+            iterator = self.get_iterator( data_file, region.chrom, region.start, region.end )
+            for line in iterator:
+                out.write( "%s\n" % line )
+
+        # TODO: once Pysam is updated and Tabixfile has a close() method,
+        # data_file.close()
+
+        out.close()
+
+#
+# -- Interval data providers --
+#
+
+
+class IntervalDataProvider( GenomeDataProvider ):
+    dataset_type = 'interval_index'
+
+    """
+    Processes interval data from native format to payload format.
+
+    Payload format: [ uid (offset), start, end, name, strand, thick_start, thick_end, blocks ]
+    """
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        raise Exception( "Unimplemented Function" )
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Provides
+        """
+        # Build data to return. Payload format is:
+        # [ <guid/offset>, <start>, <end>, <name>, <strand> ]
+        #
+        # First three entries are mandatory, others are optional.
+        #
+        filter_cols = loads( kwargs.get( "filter_cols", "[]" ) )
+        no_detail = ( "no_detail" in kwargs )
+        rval = []
+        message = None
+
+        # Subtract one b/c columns are 1-based but indices are 0-based.
+        def col_fn(col):
+            return None if col is None else col - 1
+
+        start_col = self.original_dataset.metadata.startCol - 1
+        end_col = self.original_dataset.metadata.endCol - 1
+        strand_col = col_fn( self.original_dataset.metadata.strandCol )
+        name_col = col_fn( self.original_dataset.metadata.nameCol )
+        for count, line in enumerate( iterator ):
+            if count < start_val:
+                continue
+            if max_vals and count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "features" )
+                break
+
+            feature = line.split()
+            length = len(feature)
+            # Unique id is just a hash of the line
+            payload = [ hash(line), int( feature[start_col] ), int( feature[end_col] ) ]
+
+            if no_detail:
+                rval.append( payload )
+                continue
+
+            # Name, strand.
+            if name_col:
+                payload.append( feature[name_col] )
+            if strand_col:
+                # Put empty name as placeholder.
+                if not name_col:
+                    payload.append( "" )
+                payload.append( feature[strand_col] )
+
+            # Score (filter data)
+            if length >= 5 and filter_cols and filter_cols[0] == "Score":
+                try:
+                    payload.append( float( feature[4] ) )
+                except:
+                    payload.append( feature[4] )
+
+            rval.append( payload )
+
+        return { 'data': rval, 'message': message }
+
+    def write_data_to_file( self, regions, filename ):
+        raise Exception( "Unimplemented Function" )
+
+
+class IntervalTabixDataProvider( TabixDataProvider, IntervalDataProvider ):
+    """
+    Provides data from a BED file indexed via tabix.
+    """
+    pass
+
+
+#
+# -- BED data providers --
+#
+
+class BedDataProvider( GenomeDataProvider ):
+    """
+    Processes BED data from native format to payload format.
+
+    Payload format: [ uid (offset), start, end, name, strand, thick_start, thick_end, blocks ]
+    """
+
+    dataset_type = 'interval_index'
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        raise Exception( "Unimplemented Method" )
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Provides
+        """
+        # Build data to return. Payload format is:
+        # [ <guid/offset>, <start>, <end>, <name>, <strand>, <thick_start>,
+        #   <thick_end>, <blocks> ]
+        #
+        # First three entries are mandatory, others are optional.
+        #
+        filter_cols = loads( kwargs.get( "filter_cols", "[]" ) )
+        no_detail = ( "no_detail" in kwargs )
+        rval = []
+        message = None
+        for count, line in enumerate( iterator ):
+            if count < start_val:
+                continue
+            if max_vals and count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "features" )
+                break
+            # TODO: can we use column metadata to fill out payload?
+            # TODO: use function to set payload data
+
+            feature = line.split()
+            length = len(feature)
+            # Unique id is just a hash of the line
+            payload = [ hash(line), int(feature[1]), int(feature[2]) ]
+
+            if no_detail:
+                rval.append( payload )
+                continue
+
+            # Name, strand, thick start, thick end.
+            if length >= 4:
+                payload.append(feature[3])
+            if length >= 6:
+                payload.append(feature[5])
+            if length >= 8:
+                payload.append(int(feature[6]))
+                payload.append(int(feature[7]))
+
+            # Blocks.
+            if length >= 12:
+                block_sizes = [ int(n) for n in feature[10].split(',') if n != '']
+                block_starts = [ int(n) for n in feature[11].split(',') if n != '' ]
+                blocks = zip( block_sizes, block_starts )
+                payload.append( [ ( int(feature[1]) + block[1], int(feature[1]) + block[1] + block[0] ) for block in blocks ] )
+
+            # Score (filter data)
+            if length >= 5 and filter_cols and filter_cols[0] == "Score":
+                # If dataset doesn't have name/strand/thick start/thick end/blocks,
+                # add placeholders. There should be 8 entries if all attributes
+                # are present.
+                payload.extend( [ None for i in range( 8 - len( payload ) ) ] )
+
+                try:
+                    payload.append( float( feature[4] ) )
+                except:
+                    payload.append( feature[4] )
+
+            rval.append( payload )
+
+        return { 'data': rval, 'dataset_type': self.dataset_type, 'message': message }
+
+    def write_data_to_file( self, regions, filename ):
+        out = open( filename, "w" )
+
+        for region in regions:
+            # Write data in region.
+            chrom = region.chrom
+            start = region.start
+            end = region.end
+            data_file = self.open_data_file()
+            iterator = self.get_iterator( data_file, chrom, start, end )
+            for line in iterator:
+                out.write( "%s\n" % line )
+            data_file.close()
+
+        out.close()
+
+
+class BedTabixDataProvider( TabixDataProvider, BedDataProvider ):
+    """
+    Provides data from a BED file indexed via tabix.
+    """
+    pass
+
+
+class RawBedDataProvider( BedDataProvider ):
+    """
+    Provide data from BED file.
+
+    NOTE: this data provider does not use indices, and hence will be very slow
+    for large datasets.
+    """
+
+    def get_iterator( self, data_file, chrom=None, start=None, end=None, **kwargs ):
+        # Read first line in order to match chrom naming format.
+        line = data_file.readline()
+        dataset_chrom = line.split()[0]
+        if not _chrom_naming_matches( chrom, dataset_chrom ):
+            chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+        # Undo read.
+        data_file.seek( 0 )
+
+        def line_filter_iter():
+            for line in open( self.original_dataset.file_name ):
+                if line.startswith( "track" ) or line.startswith( "browser" ):
+                    continue
+                feature = line.split()
+                feature_chrom = feature[0]
+                feature_start = int( feature[1] )
+                feature_end = int( feature[2] )
+                if ( chrom is not None and feature_chrom != chrom ) \
+                        or ( start is not None and feature_start > end ) \
+                        or ( end is not None and feature_end < start ):
+                    continue
+                yield line
+
+        return line_filter_iter()
+
+#
+# -- VCF data providers --
+#
+
+
+class VcfDataProvider( GenomeDataProvider ):
+    """
+    Abstract class that processes VCF data from native format to payload format.
+
+    Payload format: An array of entries for each locus in the file. Each array
+    has the following entries:
+        1. GUID (unused)
+        2. location (0-based)
+        3. reference base(s)
+        4. alternative base(s)
+        5. quality score
+        6. whether variant passed filter
+        7. sample genotypes -- a single string with samples separated by commas; empty string
+           denotes the reference genotype
+        8-end: allele counts for each alternative
+    """
+
+    col_name_data_attr_mapping = { 'Qual': { 'index': 6, 'name': 'Qual' } }
+
+    dataset_type = 'variant'
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Returns a dict with the following attributes::
+
+            data - a list of variants with the format
+
+            .. raw:: text
+
+                [<guid>, <start>, <end>, <name>, cigar, seq]
+
+            message - error/informative message
+
+        """
+        data = []
+        message = None
+
+        def get_mapping( ref, alt ):
+            """
+            Returns ( offset, new_seq, cigar ) tuple that defines mapping of
+            alt to ref. Cigar format is an array of [ op_index, length ] pairs
+            where op_index is the 0-based index into the string "MIDNSHP=X"
+            """
+
+            cig_ops = "MIDNSHP=X"
+
+            ref_len = len( ref )
+            alt_len = len( alt )
+
+            # Substitutions?
+            if ref_len == alt_len:
+                return 0, alt, [ [ cig_ops.find( "M" ), ref_len ] ]
+
+            # Deletions?
+            alt_in_ref_index = ref.find( alt )
+            if alt_in_ref_index != -1:
+                return alt_in_ref_index, ref[ alt_in_ref_index + 1: ], [ [ cig_ops.find( "D" ), ref_len - alt_len ] ]
+
+            # Insertions?
+            ref_in_alt_index = alt.find( ref )
+            if ref_in_alt_index != -1:
+                return ref_in_alt_index, alt[ ref_in_alt_index + 1: ], [ [ cig_ops.find( "I" ), alt_len - ref_len ] ]
+
+        # Pack data.
+        genotype_re = re.compile( '/|\|' )
+        for count, line in enumerate( iterator ):
+            if count < start_val:
+                continue
+            if max_vals and count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "features" )
+                break
+
+            # Split line and aggregate data.
+            feature = line.split()
+            pos, c_id, ref, alt, qual, c_filter, info = feature[ 1:8 ]
+
+            # Format and samples data are optional.
+            samples_data = []
+            if len( feature ) > 8:
+                samples_data = feature[ 9: ]
+
+            # VCF is 1-based but provided position is 0-based.
+            pos = int( pos ) - 1
+
+            # FIXME: OK to skip?
+            if alt == '.':
+                count -= 1
+                continue
+
+            # Set up array to track allele counts.
+            allele_counts = [ 0 for i in range( alt.count( ',' ) + 1 ) ]
+            sample_gts = []
+
+            if samples_data:
+                # Process and pack samples' genotype and count alleles across samples.
+                alleles_seen = {}
+                has_alleles = False
+
+                for i, sample in enumerate( samples_data ):
+                    # Parse and count alleles.
+                    genotype = sample.split( ':' )[ 0 ]
+                    has_alleles = False
+                    alleles_seen.clear()
+                    for allele in genotype_re.split( genotype ):
+                        try:
+                            # This may throw a ValueError if allele is missing.
+                            allele = int( allele )
+
+                            # Only count allele if it hasn't been seen yet.
+                            if allele != 0 and allele not in alleles_seen:
+                                allele_counts[ allele - 1 ] += 1
+                                alleles_seen[ allele ] = True
+                                has_alleles = True
+                        except ValueError:
+                            pass
+
+                    # If no alleles, use empty string as proxy.
+                    if not has_alleles:
+                        genotype = ''
+
+                    sample_gts.append( genotype )
+            else:
+                # No samples, so set allele count and sample genotype manually.
+                allele_counts = [ 1 ]
+                sample_gts = [ '1/1' ]
+
+            # Add locus data.
+            locus_data = [
+                -1,
+                pos,
+                c_id,
+                ref,
+                alt,
+                qual,
+                c_filter,
+                ','.join( sample_gts )
+            ]
+            locus_data.extend( allele_counts )
+            data.append( locus_data )
+
+        return { 'data': data, 'message': message }
+
+    def write_data_to_file( self, regions, filename ):
+        out = open( filename, "w" )
+        data_file = self.open_data_file()
+
+        for region in regions:
+            # Write data in region.
+            iterator = self.get_iterator( data_file, region.chrom, region.start, region.end )
+            for line in iterator:
+                out.write( "%s\n" % line )
+        out.close()
+
+
+class VcfTabixDataProvider( TabixDataProvider, VcfDataProvider ):
+    """
+    Provides data from a VCF file indexed via tabix.
+    """
+
+    dataset_type = 'variant'
+
+
+class RawVcfDataProvider( VcfDataProvider ):
+    """
+    Provide data from VCF file.
+
+    NOTE: this data provider does not use indices, and hence will be very slow
+    for large datasets.
+    """
+
+    def open_data_file( self ):
+        return open( self.original_dataset.file_name )
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        # Skip comments.
+        line = None
+        for line in data_file:
+            if not line.startswith("#"):
+                break
+
+        # If last line is a comment, there are no data lines.
+        if line.startswith( "#" ):
+            return []
+
+        # Match chrom naming format.
+        if line:
+            dataset_chrom = line.split()[0]
+            if not _chrom_naming_matches( chrom, dataset_chrom ):
+                chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+
+        def line_in_region( vcf_line, chrom, start, end ):
+            """ Returns true if line is in region. """
+            variant_chrom, variant_start = vcf_line.split()[ 0:2 ]
+            # VCF format is 1-based.
+            variant_start = int( variant_start ) - 1
+            return variant_chrom == chrom and variant_start >= start and variant_start <= end
+
+        def line_filter_iter():
+            """ Yields lines in data that are in region chrom:start-end """
+            # Yield data line read above.
+            if line_in_region( line, chrom, start, end ):
+                yield line
+
+            # Search for and yield other data lines.
+            for data_line in data_file:
+                if line_in_region( data_line, chrom, start, end ):
+                    yield data_line
+
+        return line_filter_iter()
+
+
+class BamDataProvider( GenomeDataProvider, FilterableMixin ):
+    """
+    Provides access to intervals from a sorted indexed BAM file. Coordinate
+    data is reported in BED format: 0-based, half-open.
+    """
+
+    dataset_type = 'bai'
+
+    def get_filters( self ):
+        """
+        Returns filters for dataset.
+        """
+        # HACK: first 7 fields are for drawing, so start filter column index at 7.
+        filter_col = 7
+        filters = []
+        filters.append( { 'name': 'Mapping Quality',
+                        'type': 'number',
+                          'index': filter_col }
+                        )
+        return filters
+
+    def write_data_to_file( self, regions, filename ):
+        """
+        Write reads in regions to file.
+        """
+
+        # Open current BAM file using index.
+        bamfile = pysam.AlignmentFile( self.original_dataset.file_name, mode='rb',
+                                       index_filename=self.converted_dataset.file_name )
+
+        # TODO: write headers as well?
+        new_bamfile = pysam.AlignmentFile( filename, template=bamfile, mode='wb' )
+
+        for region in regions:
+            # Write data from region.
+            chrom = region.chrom
+            start = region.start
+            end = region.end
+
+            try:
+                data = bamfile.fetch(start=start, end=end, reference=chrom)
+            except ValueError:
+                # Try alternative chrom naming.
+                chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+                try:
+                    data = bamfile.fetch( start=start, end=end, reference=chrom )
+                except ValueError:
+                    return None
+
+            # Write reads in region.
+            for i, read in enumerate( data ):
+                new_bamfile.write( read )
+
+        # Cleanup.
+        new_bamfile.close()
+        bamfile.close()
+
+    def open_data_file( self ):
+        # Attempt to open the BAM file with index
+        return pysam.AlignmentFile( self.original_dataset.file_name, mode='rb',
+                                    index_filename=self.converted_dataset.file_name )
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        """
+        Returns an iterator that provides data in the region chrom:start-end
+        """
+
+        # Fetch and return data.
+        chrom = str(chrom)
+        start = int(start)
+        end = int(end)
+        try:
+            data = data_file.fetch( start=start, end=end, reference=chrom )
+        except ValueError:
+            # Try alternative chrom naming.
+            chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+            try:
+                data = data_file.fetch( start=start, end=end, reference=chrom )
+            except ValueError:
+                return None
+        return data
+
+    def process_data( self, iterator, start_val=0, max_vals=None, ref_seq=None,
+                      iterator_type='nth', mean_depth=None, start=0, end=0, **kwargs ):
+        """
+        Returns a dict with the following attributes::
+
+            data - a list of reads with the format
+                [<guid>, <start>, <end>, <name>, <read_1>, <read_2>, [empty], <mapq_scores>]
+
+                where <read_1> has the format
+                    [<start>, <end>, <cigar>, <strand>, <read_seq>]
+
+                and <read_2> has the format
+                    [<start>, <end>, <cigar>, <strand>, <read_seq>]
+
+                Field 7 is empty so that mapq scores' location matches that in single-end reads.
+                For single-end reads, read has format:
+                    [<guid>, <start>, <end>, <name>, <cigar>, <strand>, <seq>, <mapq_score>]
+
+                NOTE: read end and sequence data are not valid for reads outside of
+                requested region and should not be used.
+
+            max_low - lowest coordinate for the returned reads
+            max_high - highest coordinate for the returned reads
+            message - error/informative message
+        """
+        # No iterator indicates no reads.
+        if iterator is None:
+            return { 'data': [], 'message': None }
+
+        #
+        # Helper functions.
+        #
+
+        def decode_strand( read_flag, mask ):
+            """ Decode strand from read flag. """
+
+            strand_flag = ( read_flag & mask == 0 )
+            if strand_flag:
+                return "+"
+            else:
+                return "-"
+
+        def _random_read_iterator( read_iterator, threshold ):
+            """
+            An iterator that returns a random stream of reads from the read_iterator
+            as well as corresponding pairs for returned reads.
+            threshold is a value in [0,1] that denotes the percentage of reads
+            to return.
+            """
+            for e in read_iterator:
+                if e.qname in paired_pending or random.uniform( 0, 1 ) <= threshold:
+                    yield e
+
+        def _nth_read_iterator( read_iterator, threshold ):
+            """
+            An iterator that returns every nth read.
+            """
+
+            # Convert threshold to N for stepping through iterator.
+            n = int( 1 / threshold )
+            return itertools.islice( read_iterator, None, None, n )
+
+        # -- Choose iterator. --
+
+        # Calculate threshold for non-sequential iterators based on mean_depth and read length.
+        try:
+            first_read = next( iterator )
+        except StopIteration:
+            # no reads.
+            return { 'data': [], 'message': None, 'max_low': start, 'max_high': start }
+
+        read_len = len( first_read.seq )
+        num_reads = max( ( end - start ) * mean_depth / float( read_len ), 1 )
+        threshold = float( max_vals ) / num_reads
+        iterator = itertools.chain( iter( [ first_read ] ), iterator )
+
+        # Use specified iterator type, save for when threshold is >= 1.
+        # A threshold of >= 1 indicates all reads are to be returned, so no
+        # sampling needed and seqential iterator will be used.
+        if iterator_type == 'sequential' or threshold >= 1:
+            read_iterator = iterator
+        elif iterator_type == 'random':
+            read_iterator = _random_read_iterator( iterator, threshold )
+        elif iterator_type == 'nth':
+            read_iterator = _nth_read_iterator( iterator, threshold )
+
+        #
+        # Encode reads as list of lists.
+        #
+        results = []
+        paired_pending = {}
+        unmapped = 0
+        message = None
+        count = 0
+        for read in read_iterator:
+            if count < start_val:
+                continue
+            if ( count - start_val - unmapped ) >= max_vals:
+                message = self.error_max_vals % ( max_vals, "reads" )
+                break
+
+            # If not mapped, skip read.
+            is_mapped = ( read.flag & 0x0004 == 0 )
+            if not is_mapped:
+                unmapped += 1
+                continue
+
+            qname = read.qname
+            seq = read.seq
+            strand = decode_strand( read.flag, 0x0010 )
+            if read.cigar is not None:
+                read_len = sum( [cig[1] for cig in read.cigar] )  # Use cigar to determine length
+            else:
+                read_len = len(seq)  # If no cigar, just use sequence length
+
+            if read.is_proper_pair:
+                if qname in paired_pending:
+                    # Found pair.
+                    pair = paired_pending[qname]
+                    results.append( [ hash( "%i_%s" % ( pair['start'], qname ) ),
+                                      pair['start'],
+                                      read.pos + read_len,
+                                      qname,
+                                      [ pair['start'], pair['end'], pair['cigar'], pair['strand'], pair['seq'] ],
+                                      [ read.pos, read.pos + read_len, read.cigar, strand, seq ],
+                                      None, [ pair['mapq'], read.mapq ]
+                                      ] )
+                    del paired_pending[qname]
+                else:
+                    # Insert first of pair.
+                    paired_pending[qname] = { 'start': read.pos, 'end': read.pos + read_len, 'seq': seq, 'mate_start': read.mpos,
+                                              'rlen': read_len, 'strand': strand, 'cigar': read.cigar, 'mapq': read.mapq }
+                    count += 1
+            else:
+                results.append( [ hash( "%i_%s" % ( read.pos, qname ) ),
+                                read.pos, read.pos + read_len, qname,
+                                read.cigar, strand, read.seq, read.mapq ] )
+                count += 1
+
+        # Take care of reads whose mates are out of range.
+        for qname, read in paired_pending.iteritems():
+            if read['mate_start'] < read['start']:
+                # Mate is before read.
+                read_start = read['mate_start']
+                read_end = read['end']
+                # Make read_1 start=end so that length is 0 b/c we don't know
+                # read length.
+                r1 = [ read['mate_start'], read['mate_start'] ]
+                r2 = [ read['start'], read['end'], read['cigar'], read['strand'], read['seq'] ]
+            else:
+                # Mate is after read.
+                read_start = read['start']
+                # Make read_2 start=end so that length is 0 b/c we don't know
+                # read length. Hence, end of read is start of read_2.
+                read_end = read['mate_start']
+                r1 = [ read['start'], read['end'], read['cigar'], read['strand'], read['seq'] ]
+                r2 = [ read['mate_start'], read['mate_start'] ]
+
+            results.append( [ hash( "%i_%s" % ( read_start, qname ) ), read_start, read_end, qname, r1, r2, [read[ 'mapq' ], 125] ] )
+
+        # Clean up. TODO: is this needed? If so, we'll need a cleanup function after processing the data.
+        # bamfile.close()
+
+        def compress_seq_and_cigar( read, start_field, cigar_field, seq_field ):
+            '''
+            Use reference-based compression to compress read sequence and cigar.
+            '''
+            read_seq, read_cigar = get_ref_based_read_seq_and_cigar( read[ seq_field ].upper(),
+                                                                     read[ start_field ],
+                                                                     ref_seq.sequence,
+                                                                     ref_seq.start,
+                                                                     read[ cigar_field ] )
+            read[ seq_field ] = read_seq
+            read[ cigar_field ] = read_cigar
+
+        def convert_cigar( read, start_field, cigar_field, seq_field ):
+            '''
+            Convert read cigar from pysam format to string format.
+            '''
+            cigar_ops = 'MIDNSHP=X'
+            read_cigar = ''
+            for op_tuple in read[ cigar_field ]:
+                read_cigar += '%i%s' % ( op_tuple[1], cigar_ops[ op_tuple[0] ] )
+            read[ cigar_field ] = read_cigar
+
+        # Choose method for processing reads. Use reference-based compression
+        # if possible. Otherwise, convert cigar.
+        if ref_seq:
+            # Uppercase for easy comparison.
+            ref_seq.sequence = ref_seq.sequence.upper()
+            process_read = compress_seq_and_cigar
+        else:
+            process_read = convert_cigar
+
+        # Process reads.
+        for read in results:
+            if isinstance( read[ 5 ], list ):
+                # Paired-end read.
+                if len( read[4] ) > 2:
+                    process_read( read[4], 0, 2, 4 )
+                if len( read[5] ) > 2:
+                    process_read( read[5], 0, 2, 4 )
+            else:
+                # Single-end read.
+                process_read( read, 1, 4, 6)
+
+        max_low, max_high = get_bounds( results, 1, 2 )
+
+        return { 'data': results, 'message': message, 'max_low': max_low, 'max_high': max_high }
+
+
+class SamDataProvider( BamDataProvider ):
+
+    dataset_type = 'bai'
+
+    def __init__( self, converted_dataset=None, original_dataset=None, dependencies=None ):
+        """ Create SamDataProvider. """
+        super( SamDataProvider, self ).__init__( converted_dataset=converted_dataset,
+                                                 original_dataset=original_dataset,
+                                                 dependencies=dependencies )
+
+        # To use BamDataProvider, original dataset must be BAM and
+        # converted dataset must be BAI. Use BAI from BAM metadata.
+        if converted_dataset:
+            self.original_dataset = converted_dataset
+            self.converted_dataset = converted_dataset.metadata.bam_index
+
+
+class BBIDataProvider( GenomeDataProvider ):
+    """
+    BBI data provider for the Galaxy track browser.
+    """
+
+    dataset_type = 'bigwig'
+
+    def valid_chroms( self ):
+        # No way to return this info as of now
+        return None
+
+    def has_data( self, chrom ):
+        f, bbi = self._get_dataset()
+        all_dat = bbi.query( chrom, 0, 2147483647, 1 ) or \
+            bbi.query( _convert_between_ucsc_and_ensemble_naming( chrom ), 0, 2147483647, 1 )
+        f.close()
+        return all_dat is not None
+
+    def get_data( self, chrom, start, end, start_val=0, max_vals=None, num_samples=1000, **kwargs ):
+        start = int( start )
+        end = int( end )
+
+        # Helper function for getting summary data regardless of chromosome
+        # naming convention.
+        def _summarize_bbi( bbi, chrom, start, end, num_points ):
+            return bbi.summarize( chrom, start, end, num_points ) or \
+                bbi.summarize( _convert_between_ucsc_and_ensemble_naming( chrom ), start, end, num_points )
+
+        # Bigwig can be a standalone bigwig file, in which case we use
+        # original_dataset, or coming from wig->bigwig conversion in
+        # which we use converted_dataset
+        f, bbi = self._get_dataset()
+
+        # If stats requested, compute overall summary data for the range
+        # start:endbut no reduced data. This is currently used by client
+        # to determine the default range.
+        if 'stats' in kwargs:
+            summary = _summarize_bbi( bbi, chrom, start, end, 1 )
+            f.close()
+
+            min_val = 0
+            max_val = 0
+            mean = 0
+            sd = 0
+            if summary is not None:
+                # Does the summary contain any defined values?
+                valid_count = summary.valid_count[0]
+                if summary.valid_count > 0:
+                    # Compute $\mu \pm 2\sigma$ to provide an estimate for upper and lower
+                    # bounds that contain ~95% of the data.
+                    mean = summary.sum_data[0] / valid_count
+                    var = max( summary.sum_squares[0] - mean, 0 )  # Prevent variance underflow.
+                    if valid_count > 1:
+                        var /= valid_count - 1
+                    sd = math.sqrt( var )
+                    min_val = summary.min_val[0]
+                    max_val = summary.max_val[0]
+
+            return dict( data=dict( min=min_val, max=max_val, mean=mean, sd=sd ) )
+
+        def summarize_region( bbi, chrom, start, end, num_points ):
+            '''
+            Returns results from summarizing a region using num_points.
+            NOTE: num_points cannot be greater than end - start or BBI
+            will return None for all positions.
+            '''
+            result = []
+
+            # Get summary; this samples at intervals of length
+            # (end - start)/num_points -- i.e. drops any fractional component
+            # of interval length.
+            summary = _summarize_bbi( bbi, chrom, start, end, num_points )
+            if summary:
+                # mean = summary.sum_data / summary.valid_count
+
+                # Standard deviation by bin, not yet used
+                # var = summary.sum_squares - mean
+                # var /= minimum( valid_count - 1, 1 )
+                # sd = sqrt( var )
+
+                pos = start
+                step_size = (end - start) / num_points
+
+                for i in range( num_points ):
+                    result.append( (pos, float_nan( summary.sum_data[i] / summary.valid_count[i] ) ) )
+                    pos += step_size
+
+            return result
+
+        # Approach is different depending on region size.
+        num_samples = int( num_samples )
+        if end - start < num_samples:
+            # Get values for individual bases in region, including start and end.
+            # To do this, need to increase end to next base and request number of points.
+            num_points = end - start + 1
+            end += 1
+        else:
+            #
+            # The goal is to sample the region between start and end uniformly
+            # using ~N (num_samples) data points. The challenge is that the size of
+            # sampled intervals rarely is full bases, so sampling using N points
+            # will leave the end of the region unsampled due to remainders for
+            # each interval. To recitify this, a new N is calculated based on the
+            # step size that covers as much of the region as possible.
+            #
+            # However, this still leaves some of the region unsampled. This
+            # could be addressed by repeatedly sampling remainder using a
+            # smaller and smaller step_size, but that would require iteratively
+            # going to BBI, which could be time consuming.
+            #
+
+            # Start with N samples.
+            num_points = num_samples
+            step_size = ( end - start ) / num_points
+            # Add additional points to sample in the remainder not covered by
+            # the initial N samples.
+            remainder_start = start + step_size * num_points
+            additional_points = ( end - remainder_start ) / step_size
+            num_points += additional_points
+
+        result = summarize_region( bbi, chrom, start, end, num_points )
+
+        # Cleanup and return.
+        f.close()
+        return {
+            'data': result,
+            'dataset_type': self.dataset_type
+        }
+
+
+class BigBedDataProvider( BBIDataProvider ):
+    def _get_dataset( self ):
+        # Nothing converts to bigBed so we don't consider converted dataset
+        f = open( self.original_dataset.file_name )
+        return f, BigBedFile(file=f)
+
+
+class BigWigDataProvider ( BBIDataProvider ):
+    """
+    Provides data from BigWig files; position data is reported in 1-based
+    coordinate system, i.e. wiggle format.
+    """
+    def _get_dataset( self ):
+        if self.converted_dataset is not None:
+            f = open( self.converted_dataset.file_name )
+        else:
+            f = open( self.original_dataset.file_name )
+        return f, BigWigFile(file=f)
+
+
+class IntervalIndexDataProvider( FilterableMixin, GenomeDataProvider ):
+    """
+    Interval index files used for GFF, Pileup files.
+    """
+    col_name_data_attr_mapping = { 4: { 'index': 4, 'name': 'Score' } }
+
+    dataset_type = 'interval_index'
+
+    def write_data_to_file( self, regions, filename ):
+        source = open( self.original_dataset.file_name )
+        index = Indexes( self.converted_dataset.file_name )
+        out = open( filename, 'w' )
+
+        for region in regions:
+            # Write data from region.
+            chrom = region.chrom
+            start = region.start
+            end = region.end
+            for start, end, offset in index.find( chrom, start, end ):
+                source.seek( offset )
+
+                # HACK: write differently depending on original dataset format.
+                if self.original_dataset.ext not in [ 'gff', 'gff3', 'gtf' ]:
+                    line = source.readline()
+                    out.write( line )
+                else:
+                    reader = GFFReaderWrapper( source, fix_strand=True )
+                    feature = reader.next()
+                    for interval in feature.intervals:
+                        out.write( '\t'.join( interval.fields ) + '\n' )
+
+        source.close()
+        out.close()
+
+    def open_data_file( self ):
+        return Indexes( self.converted_dataset.file_name )
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        """
+        Returns an iterator for data in data_file in chrom:start-end
+        """
+        if chrom not in data_file.indexes:
+            # Try alternative naming.
+            chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+
+        return data_file.find(chrom, start, end)
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        results = []
+        message = None
+        source = open( self.original_dataset.file_name )
+
+        #
+        # Build data to return. Payload format is:
+        # [ <guid/offset>, <start>, <end>, <name>, <score>, <strand>, <thick_start>,
+        #   <thick_end>, <blocks> ]
+        #
+        # First three entries are mandatory, others are optional.
+        #
+        filter_cols = loads( kwargs.get( "filter_cols", "[]" ) )
+        no_detail = ( "no_detail" in kwargs )
+        for count, val in enumerate( iterator ):
+            offset = val[2]
+            if count < start_val:
+                continue
+            if count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "features" )
+                break
+            source.seek( offset )
+            # TODO: can we use column metadata to fill out payload?
+
+            # GFF dataset.
+            reader = GFFReaderWrapper( source, fix_strand=True )
+            feature = reader.next()
+            payload = package_gff_feature( feature, no_detail, filter_cols )
+            payload.insert( 0, offset )
+
+            results.append( payload )
+
+        return { 'data': results, 'message': message }
+
+
+class RawGFFDataProvider( GenomeDataProvider ):
+    """
+    Provide data from GFF file that has not been indexed.
+
+    NOTE: this data provider does not use indices, and hence will be very slow
+    for large datasets.
+    """
+
+    dataset_type = 'interval_index'
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        """
+        Returns an iterator that provides data in the region chrom:start-end as well as
+        a file offset.
+        """
+        source = open( self.original_dataset.file_name )
+
+        # Read first line in order to match chrom naming format.
+        line = source.readline()
+
+        # If line empty, assume file is empty and return empty iterator.
+        if len( line ) == 0:
+            return iter([])
+
+        # Determine chromosome naming format.
+        dataset_chrom = line.split()[0]
+        if not _chrom_naming_matches( chrom, dataset_chrom ):
+            chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+        # Undo read.
+        source.seek( 0 )
+
+        def features_in_region_iter():
+            offset = 0
+            for feature in GFFReaderWrapper( source, fix_strand=True ):
+                # Only provide features that are in region.
+                feature_start, feature_end = convert_gff_coords_to_bed( [ feature.start, feature.end ] )
+                if feature.chrom == chrom and feature_end > start and feature_start < end:
+                    yield feature, offset
+                offset += feature.raw_size
+
+        return features_in_region_iter()
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Process data from an iterator to a format that can be provided to client.
+        """
+        filter_cols = loads( kwargs.get( "filter_cols", "[]" ) )
+        no_detail = ( "no_detail" in kwargs )
+        results = []
+        message = None
+
+        for count, ( feature, offset ) in enumerate( iterator ):
+            if count < start_val:
+                continue
+            if count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "reads" )
+                break
+
+            payload = package_gff_feature( feature, no_detail=no_detail, filter_cols=filter_cols )
+            payload.insert( 0, offset )
+            results.append( payload )
+
+        return { 'data': results, 'dataset_type': self.dataset_type, 'message': message }
+
+
+class GtfTabixDataProvider( TabixDataProvider ):
+    """
+    Returns data from GTF datasets that are indexed via tabix.
+    """
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        # Loop through lines and group by transcript_id; each group is a feature.
+
+        # TODO: extend this code or use code in gff_util to process GFF/3 as well
+        # and then create a generic GFFDataProvider that can be used with both
+        # raw and tabix datasets.
+        features = {}
+        for count, line in enumerate( iterator ):
+            line_attrs = parse_gff_attributes( line.split('\t')[8] )
+            transcript_id = line_attrs[ 'transcript_id' ]
+            if transcript_id in features:
+                feature = features[ transcript_id ]
+            else:
+                feature = []
+                features[ transcript_id ] = feature
+            feature.append( GFFInterval( None, line.split( '\t') ) )
+
+        # Process data.
+        filter_cols = loads( kwargs.get( "filter_cols", "[]" ) )
+        no_detail = ( "no_detail" in kwargs )
+        results = []
+        message = None
+
+        for count, intervals in enumerate( features.values() ):
+            if count < start_val:
+                continue
+            if count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "reads" )
+                break
+
+            feature = GFFFeature( None, intervals=intervals )
+            payload = package_gff_feature( feature, no_detail=no_detail, filter_cols=filter_cols )
+            payload.insert( 0, feature.intervals[ 0 ].attributes[ 'transcript_id' ] )
+            results.append( payload )
+
+        return { 'data': results, 'message': message }
+
+#
+# -- ENCODE Peak data providers.
+#
+
+
+class ENCODEPeakDataProvider( GenomeDataProvider ):
+    """
+    Abstract class that processes ENCODEPeak data from native format to payload format.
+
+    Payload format: [ uid (offset), start, end, name, strand, thick_start, thick_end, blocks ]
+    """
+
+    def get_iterator( self, data_file, chrom, start, end, **kwargs ):
+        raise Exception( "Unimplemented Method" )
+
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Provides
+        """
+
+        # FIXMEs:
+        # (1) should be able to unify some of this code with BedDataProvider.process_data
+        # (2) are optional number of parameters supported?
+
+        # Build data to return. Payload format is:
+        # [ <guid/offset>, <start>, <end>, <name>, <strand>, <thick_start>,
+        #   <thick_end>, <blocks> ]
+        #
+        # First three entries are mandatory, others are optional.
+        #
+        no_detail = ( "no_detail" in kwargs )
+        rval = []
+        message = None
+        for count, line in enumerate( iterator ):
+            if count < start_val:
+                continue
+            if max_vals and count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "features" )
+                break
+
+            feature = line.split()
+
+            # Feature initialization.
+            payload = [
+                # GUID is just a hash of the line
+                hash( line ),
+                # Add start, end.
+                int( feature[1] ),
+                int( feature[2] )
+            ]
+
+            if no_detail:
+                rval.append( payload )
+                continue
+
+            # Extend with additional data.
+            payload.extend( [
+                # Add name, strand.
+                feature[3],
+                feature[5],
+                # Thick start, end are feature start, end for now.
+                int( feature[1] ),
+                int( feature[2] ),
+                # No blocks.
+                None,
+                # Filtering data: Score, signalValue, pValue, qValue.
+                float( feature[4] ),
+                float( feature[6] ),
+                float( feature[7] ),
+                float( feature[8] )
+            ] )
+
+            rval.append( payload )
+
+        return { 'data': rval, 'message': message }
+
+
+class ENCODEPeakTabixDataProvider( TabixDataProvider, ENCODEPeakDataProvider ):
+    """
+    Provides data from an ENCODEPeak dataset indexed via tabix.
+    """
+
+    def get_filters( self ):
+        """
+        Returns filters for dataset.
+        """
+        # HACK: first 8 fields are for drawing, so start filter column index at 9.
+        filter_col = 8
+        filters = []
+        filters.append( { 'name': 'Score',
+                          'type': 'number',
+                          'index': filter_col,
+                          'tool_id': 'Filter1',
+                          'tool_exp_name': 'c6' } )
+        filter_col += 1
+        filters.append( { 'name': 'Signal Value',
+                          'type': 'number',
+                          'index': filter_col,
+                          'tool_id': 'Filter1',
+                          'tool_exp_name': 'c7' } )
+        filter_col += 1
+        filters.append( { 'name': 'pValue',
+                        'type': 'number',
+                          'index': filter_col,
+                          'tool_id': 'Filter1',
+                          'tool_exp_name': 'c8' } )
+        filter_col += 1
+        filters.append( { 'name': 'qValue',
+                        'type': 'number',
+                          'index': filter_col,
+                          'tool_id': 'Filter1',
+                          'tool_exp_name': 'c9' } )
+        return filters
+
+#
+# -- ChromatinInteraction data providers --
+#
+
+
+class ChromatinInteractionsDataProvider( GenomeDataProvider ):
+    def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+        """
+        Provides
+        """
+
+        rval = []
+        message = None
+        for count, line in enumerate( iterator ):
+            if count < start_val:
+                continue
+            if max_vals and count - start_val >= max_vals:
+                message = self.error_max_vals % ( max_vals, "interactions" )
+                break
+
+            feature = line.split()
+
+            s1 = int( feature[1] )
+            e1 = int( feature[2] )
+            c = feature[3]
+            s2 = int( feature[4] )
+            e2 = int( feature[5] )
+            v = float( feature[6] )
+
+            # Feature initialization.
+            payload = [
+                # GUID is just a hash of the line
+                hash( line ),
+                # Add start1, end1, chr2, start2, end2, value.
+                s1, e1, c, s2, e2, v
+            ]
+
+            rval.append( payload )
+
+        return { 'data': rval, 'message': message }
+
+    def get_default_max_vals( self ):
+        return 100000
+
+
+class ChromatinInteractionsTabixDataProvider( TabixDataProvider, ChromatinInteractionsDataProvider ):
+    def get_iterator( self, data_file, chrom, start=0, end=sys.maxint, interchromosomal=False, **kwargs ):
+        """
+        """
+        # Modify start as needed to get earlier interactions with start region.
+        span = int( end ) - int( start )
+        filter_start = max( 0, int( start ) - span - span / 2 )
+
+        def filter( iter ):
+            for line in iter:
+                feature = line.split()
+                s1 = int( feature[1] )
+                e1 = int( feature[2] )
+                c = feature[3]
+                s2 = int( feature[4] )
+                e2 = int( feature[5] )
+                # Check for intrachromosal interactions.
+                if ( ( s1 + s2 ) / 2 <= end ) and ( ( e1 + e2 ) / 2 >= start ) and ( c == chrom ):
+                    yield line
+                # Check for interchromosal interactions.
+                if interchromosomal and c != chrom:
+                    yield line
+        return filter( TabixDataProvider.get_iterator( self, data_file, chrom, filter_start, end ) )
+
+#
+# -- Helper methods. --
+#
+
+
+def package_gff_feature( feature, no_detail=False, filter_cols=[] ):
+    """ Package a GFF feature in an array for data providers. """
+    feature = convert_gff_coords_to_bed( feature )
+
+    # No detail means only start, end.
+    if no_detail:
+        return [ feature.start, feature.end ]
+
+    # Return full feature.
+    payload = [ feature.start,
+                feature.end,
+                feature.name(),
+                feature.strand,
+                # No notion of thick start, end in GFF, so make everything
+                # thick.
+                feature.start,
+                feature.end
+                ]
+
+    # HACK: ignore interval with name 'transcript' from feature.
+    # Cufflinks puts this interval in each of its transcripts,
+    # and they mess up trackster by covering the feature's blocks.
+    # This interval will always be a feature's first interval,
+    # and the GFF's third column is its feature name.
+    feature_intervals = feature.intervals
+    if feature.intervals[0].fields[2] == 'transcript':
+        feature_intervals = feature.intervals[1:]
+    # Add blocks.
+    block_sizes = [ (interval.end - interval.start ) for interval in feature_intervals ]
+    block_starts = [ ( interval.start - feature.start ) for interval in feature_intervals ]
+    blocks = zip( block_sizes, block_starts )
+    payload.append( [ ( feature.start + block[1], feature.start + block[1] + block[0] ) for block in blocks ] )
+
+    # Add filter data to payload.
+    for col in filter_cols:
+        if col == "Score":
+            if feature.score == 'nan':
+                payload.append( feature.score )
+            else:
+                try:
+                    f = float( feature.score )
+                    payload.append( f )
+                except:
+                    payload.append( feature.score )
+        elif col in feature.attributes:
+            if feature.attributes[col] == 'nan':
+                payload.append( feature.attributes[col] )
+            else:
+                try:
+                    f = float( feature.attributes[col] )
+                    payload.append( f )
+                except:
+                    payload.append( feature.attributes[col] )
+        else:
+            # Dummy value.
+            payload.append( 0 )
+    return payload
diff --git a/lib/galaxy/visualization/data_providers/phyloviz/__init__.py b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
new file mode 100644
index 0000000..c848078
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
@@ -0,0 +1,44 @@
+
+""" Data providers code for PhyloViz """
+
+from galaxy.visualization.data_providers.basic import BaseDataProvider
+from galaxy.visualization.data_providers.phyloviz.nexusparser import Nexus_Parser
+from galaxy.visualization.data_providers.phyloviz.newickparser import Newick_Parser
+from galaxy.visualization.data_providers.phyloviz.phyloxmlparser import Phyloxml_Parser
+
+
+class PhylovizDataProvider( BaseDataProvider ):
+
+    dataset_type = "phylo"
+
+    def __init__( self, original_dataset=None ):
+        super( PhylovizDataProvider, self ).__init__( original_dataset=original_dataset )
+
+    def get_data( self, tree_index=0 ):
+        """
+        Returns trees.
+        Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
+        """
+
+        file_ext = self.original_dataset.datatype.file_ext
+        file_name = self.original_dataset.file_name
+        parseMsg = None
+        jsonDicts = []
+        rval = { 'dataset_type': self.dataset_type }
+
+        if file_ext == "nhx":  # parses newick files
+            newickParser = Newick_Parser()
+            jsonDicts, parseMsg = newickParser.parseFile( file_name )
+        elif file_ext == "phyloxml":  # parses phyloXML files
+            phyloxmlParser = Phyloxml_Parser()
+            jsonDicts, parseMsg = phyloxmlParser.parseFile( file_name )
+        elif file_ext == "nex":  # parses nexus files
+            nexusParser = Nexus_Parser()
+            jsonDicts, parseMsg = nexusParser.parseFile( file_name )
+            jsonDicts = jsonDicts[ int( tree_index ) ]
+            rval["trees"] = parseMsg
+
+        rval[ "data" ] = jsonDicts
+        rval[ "msg"] = parseMsg
+
+        return rval
diff --git a/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
new file mode 100644
index 0000000..78767d7
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
@@ -0,0 +1,121 @@
+import json
+
+
+class Node(object):
+    """Node class of PhyloTree, which represents a CLAUDE in a phylogenetic tree"""
+    def __init__(self, nodeName, **kwargs):
+        """Creates a node and adds in the typical annotations"""
+        self.name, self.id = nodeName, kwargs.get("id", 0)
+        self.depth = kwargs.get("depth", 0)
+        self.children = []
+
+        self.isInternal = kwargs.get("isInternal", 0)
+        self.length, self.bootstrap = kwargs.get("length", 0), kwargs.get("bootstrap", None)
+        self.events = kwargs.get("events", "")
+
+        # clean up boot strap values
+        if self.bootstrap == -1:
+            self.bootstrap = None
+
+    def addChildNode(self, child):
+        """Adds a child node to the current node"""
+        if isinstance(child, Node):
+            self.children.append(child)
+        else:
+            self.children += child
+
+    def __str__(self):
+        return self.name + " id:" + str(self.id) + ", depth: " + str(self.depth)
+
+    def toJson(self):
+        """Converts the data in the node to a dict representation of json"""
+        thisJson = {
+            "name"      : self.name,
+            "id"        : self.id,
+            "depth"     : self.depth,
+            "dist"      : self.length
+        }
+        thisJson = self.addChildrenToJson(thisJson)
+        thisJson = self.addMiscToJson(thisJson)
+        return thisJson
+
+    def addChildrenToJson(self, jsonDict):
+        """Needs a special method to addChildren, such that the key does not appear in the Jsondict when the children is empty
+        this requirement is due to the layout algorithm used by d3 layout for hiding subtree """
+        if len(self.children) > 0:
+            children = [ node.toJson() for node in self.children]
+            jsonDict["children"] = children
+        return jsonDict
+
+    def addMiscToJson(self, jsonDict):
+        """Adds other misc attributes to json if they are present"""
+        if not self.events == "":
+            jsonDict["events"] = self.events
+        if self.bootstrap is not None:
+            jsonDict["bootstrap"] = self.bootstrap
+        return jsonDict
+
+
+class PhyloTree(object):
+    """Standardized python based class to represent the phylogenetic tree parsed from different
+    phylogenetic file formats."""
+
+    def __init__(self):
+        self.root, self.rootAttr = None, {}
+        self.nodes = {}
+        self.title = None
+        self.id = 1
+
+    def addAttributesToRoot(self, attrDict):
+        """Adds attributes to root, but first we put it in a temp store and bind it with root when .toJson is called"""
+        for key, value in attrDict.items():
+            self.rootAttr[key] = value
+
+    def makeNode(self, nodeName, **kwargs):
+        """Called to make a node within PhyloTree, arbitrary kwargs can be passed to annotate nodes
+        Tracks the number of nodes via internally incremented id"""
+        kwargs["id"] = self.id
+        self.id += 1
+        return Node(nodeName, **kwargs)
+
+    def addRoot(self, root):
+        """Creates a root for phyloTree"""
+        assert isinstance(root, Node)
+        root.parent = None
+        self.root = root
+
+    def generateJsonableDict(self):
+        """Changes itself into a dictonary by recurssively calling the tojson on all its nodes. Think of it
+        as a dict in an array of dict in an array of dict and so on..."""
+        jsonTree = ""
+        if self.root:
+            assert isinstance(self.root, Node)
+            jsonTree = self.root.toJson()
+            for key, value in self.rootAttr.items():
+                # transfer temporary stored attr to root
+                jsonTree[key] = value
+        else:
+            raise Exception("Root is not assigned!")
+        return jsonTree
+
+
+class Base_Parser(object):
+    """Base parsers contain all the methods to handle phylogeny tree creation and
+    converting the data to json that all parsers should have"""
+
+    def __init__(self):
+        self.phyloTrees = []
+
+    def parseFile(self, filePath):
+        """Base method that all phylogeny file parser should have"""
+        raise Exception("Base method for phylogeny file parsers is not implemented")
+
+    def toJson(self, jsonDict):
+        """Convenience method to get a json string from a python json dict"""
+        return json.dumps(jsonDict)
+
+    def _writeJsonToFile(self, filepath, json):
+        """Writes the file out to the system"""
+        f = open(filepath, "w")
+        f.writelines(json)
+        f.close()
diff --git a/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py b/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
new file mode 100644
index 0000000..fe17cfc
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
@@ -0,0 +1,182 @@
+
+from .baseparser import Base_Parser, PhyloTree
+import re
+
+
+class Newick_Parser(Base_Parser):
+    """For parsing trees stored in the newick format (.nhx)
+    It is necessarily more complex because this parser is later extended by Nexus for parsing newick as well.."""
+
+    def __init__(self):
+        super(Newick_Parser, self).__init__()
+
+    def parseFile(self, filePath):
+        """Parses a newick file to obtain the string inside. Returns: jsonableDict"""
+        with open(filePath, "r") as newickFile:
+            newickString = newickFile.read()
+            newickString = newickString.replace("\n", "").replace("\r", "")
+            return [self.parseData(newickString)], "Success"
+
+    def parseData(self, newickString):
+        """To be called on a newickString directly to parse it. Returns: jsonableDict"""
+        return self._parseNewickToJson(newickString)
+
+    def _parseNewickToJson(self, newickString, treeName=None, nameMap=None):
+        """parses a newick representation of a tree into a PhyloTree data structure,
+        which can be easily converted to json"""
+        self.phyloTree = PhyloTree()
+        newickString = self.cleanNewickString(newickString)
+        if nameMap:
+            newickString = self._mapName(newickString, nameMap)
+
+        self.phyloTree.root = self.parseNode(newickString, 0)
+        if nameMap:
+            self.phyloTree.addAttributesToRoot({"treeName": treeName})
+
+        return self.phyloTree.generateJsonableDict()
+
+    def cleanNewickString(self, rawNewick):
+        """removing semi colon, and illegal json characters (\,',") and white spaces"""
+        return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
+
+    def _makeNodesFromString(self, string, depth):
+        """elements separated by comma could be empty"""
+
+        if string.find("(") != -1:
+            raise Exception("Tree is not well form, location: " + string)
+
+        childrenString = string.split(",")
+        childrenNodes = []
+
+        for childString in childrenString:
+            if len(childString) == 0:
+                continue
+            nodeInfo = childString.split(":")
+            name, length, bootstrap = "", None, -1
+            if len(nodeInfo) == 2:  # has length info
+                length = nodeInfo[1]
+                # checking for bootstap values
+                name = nodeInfo[0]
+                try:    # Nexus may bootstrap in names position
+                    name = float(name)
+                    if 0 <= name <= 1:
+                        bootstrap = name
+                    elif 1 <= name <= 100:
+                        bootstrap = name / 100
+                    name = ""
+                except ValueError:
+                    name = nodeInfo[0]
+            else:
+                name = nodeInfo[0]      # string only contains name
+            node = self.phyloTree.makeNode(name, length=length, depth=depth, bootstrap=bootstrap)
+            childrenNodes += [node]
+        return childrenNodes
+
+    def _mapName(self, newickString, nameMap):
+        """
+        Necessary to replace names of terms inside nexus representation
+        Also, it's here because Mailaud's doesnt deal with id_strings outside of quotes(" ")
+        """
+        newString = ""
+        start = 0
+        end = 0
+
+        for i in range(len(newickString)):
+            if newickString[i] == "(" or newickString[i] == ",":
+                if re.match(r"[,(]", newickString[i + 1:]):
+                    continue
+                else:
+                    end = i + 1
+                    # i now refers to the starting position of the term to be replaced,
+                    # we will next find j which is the ending pos of the term
+                    for j in range(i + 1, len(newickString)):
+                        enclosingSymbol = newickString[j]   # the immediate symbol after a common or left bracket which denotes the end of a term
+                        if enclosingSymbol == ")" or enclosingSymbol == ":" or enclosingSymbol == ",":
+                            termToReplace = newickString[end:j]
+
+                            newString += newickString[start : end] + nameMap[termToReplace]  # + "'"  "'" +
+                            start = j
+                            break
+
+        newString += newickString[start:]
+        return newString
+
+    def parseNode(self, string, depth):
+        """
+        Recursive method for parsing newick string, works by stripping down the string into substring
+        of newick contained with brackers, which is used to call itself.
+
+        Eg ... ( A, B, (D, E)C, F, G ) ...
+
+        We will make the preceeding nodes first A, B, then the internal node C, its children D, E,
+        and finally the succeeding nodes F, G
+        """
+
+        # Base case where there is only an empty string
+        if string == "":
+            return
+            # Base case there it's only an internal claude
+        if string.find("(") == -1:
+            return self._makeNodesFromString(string, depth)
+
+        nodes = []      # nodes refer to the nodes on this level
+        start = 0
+        lenOfPreceedingInternalNodeString = 0
+        bracketStack = []
+
+        for j in range(len(string)):
+            if string[j] == "(":    # finding the positions of all the open brackets
+                bracketStack.append(j)
+                continue
+            if string[j] == ")":    # finding the positions of all the closed brackets to extract claude
+                i = bracketStack.pop()
+
+                if len(bracketStack) == 0:  # is child of current node
+
+                    InternalNode = None
+
+                    # First flat call to make nodes of the same depth but from the preceeding string.
+                    startSubstring = string[start + lenOfPreceedingInternalNodeString: i]
+                    preceedingNodes = self._makeNodesFromString(startSubstring, depth)
+                    nodes += preceedingNodes
+
+                    # Then We will try to see if the substring has any internal nodes first, make it then make nodes preceeding it and succeeding it.
+                    if j + 1 < len(string):
+                        stringRightOfBracket = string[j + 1:]      # Eg. '(b:0.4,a:0.3)c:0.3, stringRightOfBracket = c:0.3
+                        match = re.search(r"[\)\,\(]", stringRightOfBracket)
+                        if match:
+                            indexOfNextSymbol = match.start()
+                            stringRepOfInternalNode = stringRightOfBracket[:indexOfNextSymbol]
+                            internalNodes = self._makeNodesFromString( stringRepOfInternalNode, depth)
+                            if len(internalNodes) > 0:
+                                InternalNode = internalNodes[0]
+                            lenOfPreceedingInternalNodeString = len(stringRepOfInternalNode)
+                        else:   # sometimes the node can be the last element of a string
+                            InternalNode = self._makeNodesFromString(string[j + 1:], depth)[0]
+                            lenOfPreceedingInternalNodeString = len(string) - j
+                    if InternalNode is None:       # creating a generic node if it is unnamed
+                        InternalNode = self.phyloTree.makeNode( "", depth=depth, isInternal=True )  # "internal-" + str(depth)
+                        lenOfPreceedingInternalNodeString = 0
+
+                    # recussive call to make the internal claude
+                    childSubString = string[ i + 1 : j ]
+                    InternalNode.addChildNode(self.parseNode(childSubString, depth + 1))
+
+                    nodes.append(InternalNode)  # we append the internal node later to preserve order
+
+                    start = j + 1
+                continue
+
+        if depth == 0:    # if it's the root node, we do nothing about it and return
+            return nodes[0]
+
+        # Adding last most set of children
+        endString = string[start:]
+        if string[start - 1] == ")":  # if the symbol belongs to an internal node which is created previously, then we remove it from the string left to parse
+            match = re.search(r"[\)\,\(]", endString)
+            if match:
+                endOfNodeName = start + match.start() + 1
+                endString = string[endOfNodeName:]
+                nodes += self._makeNodesFromString(endString, depth)
+
+        return nodes
diff --git a/lib/galaxy/visualization/data_providers/phyloviz/nexusparser.py b/lib/galaxy/visualization/data_providers/phyloviz/nexusparser.py
new file mode 100644
index 0000000..f18c2d2
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/nexusparser.py
@@ -0,0 +1,102 @@
+from __future__ import with_statement
+from newickparser import Newick_Parser
+import re
+
+MAX_READLINES = 200000
+
+
+class Nexus_Parser(Newick_Parser):
+
+    def __init__(self):
+        super(Nexus_Parser, self).__init__()
+
+    def parseFile(self, filePath):
+        """passes a file and extracts its Nexus content."""
+        return self.parseNexus(filePath)
+
+    def parseNexus(self, filename):
+        """ Nexus data is stored in blocks between a line starting with begin and another line starting with end;
+        Commends inside square brackets are to be ignored,
+        For more information: http://wiki.christophchamp.com/index.php/NEXUS_file_format
+        Nexus can store multiple trees
+        """
+
+        with open( filename, "rt") as nex_file:
+            nexlines = nex_file.readlines()
+
+        rowCount = 0
+        inTreeBlock = False         # sentinel to check if we are in a tree block
+        intranslateBlock = False    # sentinel to check if we are in the translate region of the tree. Stores synonyms of the labellings
+        self.inCommentBlock = False
+        self.nameMapping = None       # stores mapping representation used in nexus format
+        treeNames = []
+
+        for line in nexlines:
+            line = line.replace(";\n", "")
+            lline = line.lower()
+
+            if rowCount > MAX_READLINES or (not nex_file) :
+                break
+            rowCount += 1
+            # We are only interested in the tree block.
+            if "begin" in lline and "tree" in lline and not inTreeBlock:
+                inTreeBlock = True
+                continue
+            if inTreeBlock and "end" in lline[:3]:
+                inTreeBlock, currPhyloTree = False, None
+                continue
+
+            if inTreeBlock:
+
+                if "title" in lline:        # Adding title to the tree
+                    continue
+
+                if "translate" in lline:
+                    intranslateBlock = True
+                    self.nameMapping = {}
+                    continue
+
+                if intranslateBlock:
+                    mappingLine = self.splitLinebyWhitespaces(line)
+                    key, value = mappingLine[1], mappingLine[2].replace(",", "").replace("'", "")    # replacing illegal json characters
+                    self.nameMapping[key] = value
+
+                # Extracting newick Trees
+                if "tree" in lline:
+                    intranslateBlock = False
+
+                    treeLineCols = self.splitLinebyWhitespaces(line)
+                    treeName, newick = treeLineCols[2], treeLineCols[-1]
+
+                    if newick == "":    # Empty lines can be found in tree blocks
+                        continue
+
+                    currPhyloTree = self._parseNewickToJson(newick, treeName, nameMap=self.nameMapping)
+
+                    self.phyloTrees.append(currPhyloTree)
+                    treeIndex = len(self.phyloTrees) - 1
+                    treeNames.append( (treeName, treeIndex) )    # appending name of tree, and its index
+                    continue
+
+        return self.phyloTrees, treeNames
+
+    def splitLinebyWhitespaces(self, line):
+        """replace tabs and write spaces to a single write space, so we can properly split it."""
+        return re.split(r"\s+", line)
+
+    def checkComments(self, line):
+        """Check to see if the line/lines is a comment."""
+        if not self.inCommentBlock:
+            if "[" in line:
+                if "]" not in line:
+                    self.inCommentBlock = True
+                else:
+                    return "Nextline"   # need to move on to the nextline after getting out of comment
+        else :
+            if "]" in line:
+                if line.rfind("[") > line.rfind("]"):
+                    pass                # a comment block is closed but another is open.
+                else:
+                    self.inCommentBlock = False
+                    return "Nextline"   # need to move on to the nextline after getting out of comment
+        return ""
diff --git a/lib/galaxy/visualization/data_providers/phyloviz/phyloxmlparser.py b/lib/galaxy/visualization/data_providers/phyloviz/phyloxmlparser.py
new file mode 100644
index 0000000..5b2a6a9
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/phyloxmlparser.py
@@ -0,0 +1,128 @@
+from baseparser import Base_Parser, PhyloTree, Node
+from xml.etree import ElementTree
+
+
+class Phyloxml_Parser(Base_Parser):
+    """Parses a phyloxml file into a json file that will be passed to PhyloViz for display"""
+
+    def __init__(self):
+        super(Phyloxml_Parser, self).__init__()
+        self.phyloTree = PhyloTree()
+        self.tagsOfInterest = {
+            "clade": "",
+            "name" : "name",
+            "branch_length" : "length",
+            "confidence"    : "bootstrap",
+            "events"        : "events"
+        }
+
+    def parseFile(self, filePath):
+        """passes a file and extracts its Phylogeny Tree content."""
+        phyloXmlFile = open(filePath, "r")
+
+        xmlTree = ElementTree.parse(phyloXmlFile)
+        xmlRoot = xmlTree.getroot()[0]
+        self.nameSpaceIndex = xmlRoot.tag.rfind("}") + 1  # used later by the clean tag method to remove the name space in every element.tag
+
+        phyloRoot = None
+        for child in xmlRoot:
+            childTag = self.cleanTag(child.tag)
+            if childTag == "clade":
+                phyloRoot = child
+            elif childTag == "name":
+                self.phyloTree.title = child.text
+
+        self.phyloTree.root = self.parseNode(phyloRoot, 0)
+        jsonDict = self.phyloTree.generateJsonableDict()
+        return [jsonDict], "Success"
+
+    def parseNode(self, node, depth):
+        """Parses any node within a phyloxml tree and looks out for claude, which signals the creation of
+        nodes - internal OR leaf"""
+
+        tag = self.cleanTag(node.tag)
+        if not tag == "clade":
+            return None
+        hasInnerClade = False
+
+        # peeking once for parent and once for child to check if the node is internal
+        for child in node:
+            childTag = self.cleanTag(child.tag)
+            if childTag == "clade":
+                hasInnerClade = True
+                break
+
+        if hasInnerClade:       # this node is an internal node
+            currentNode = self._makeInternalNode(node, depth=depth)
+            for child in node:
+                child = self.parseNode(child, depth + 1)
+                if isinstance(child, Node):
+                    currentNode.addChildNode(child)
+
+        else:                   # this node is a leaf node
+            currentNode = self._makeLeafNode(node, depth=depth + 1)
+
+        return currentNode
+
+    def _makeLeafNode(self, leafNode, depth=0 ):
+        """Makes leaf nodes by calling Phylotree methods"""
+        node = {}
+        for child in leafNode:
+            childTag = self.cleanTag(child.tag)
+            if childTag in self.tagsOfInterest:
+                key = self.tagsOfInterest[childTag]    # need to map phyloxml terms to ours
+                node[key] = child.text
+
+        node["depth"] = depth
+        return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
+
+    def _getNodeName(self, node, depth=-1):
+        """Gets the name of a claude. It handles the case where a taxonomy node is involved"""
+
+        def getTagFromTaxonomyNode(node):
+            """Returns the name of a taxonomy node. A taxonomy node have to be treated differently as the name
+            is embedded one level deeper"""
+            phyloxmlTaxoNames = {
+                "common_name" : "",
+                "scientific_name" : "",
+                "code"  : ""
+            }
+            for child in node:
+                childTag = self.cleanTag(child.tag)
+                if childTag in phyloxmlTaxoNames:
+                    return child.text
+            return ""
+
+        nodeName = ""
+        for child in node:
+            childTag = self.cleanTag(child.tag)
+            if childTag == "name" :
+                nodeName = child.text
+                break
+            elif childTag == "taxonomy":
+                nodeName = getTagFromTaxonomyNode(child)
+                break
+
+        return nodeName
+
+    def _makeInternalNode(self, internalNode, depth=0):
+        """ Makes an internal node from an element object that is guranteed to be a parent node.
+        Gets the value of interests like events and appends it to a custom node object that will be passed to PhyloTree to make nodes
+        """
+        node = {}
+        for child in internalNode:
+            childTag = self.cleanTag(child.tag)
+            if childTag == "clade":
+                continue
+            elif childTag in self.tagsOfInterest:
+                if childTag == "events":    # events is nested 1 more level deeper than others
+                    key, text = "events", self.cleanTag(child[0].tag)
+                else:
+                    key = self.tagsOfInterest[childTag]
+                    text = child.text
+                node[key] = text
+
+        return self.phyloTree.makeNode(self._getNodeName(internalNode, depth), **node)
+
+    def cleanTag(self, tagString):
+        return tagString[self.nameSpaceIndex:]
diff --git a/lib/galaxy/visualization/data_providers/registry.py b/lib/galaxy/visualization/data_providers/registry.py
new file mode 100644
index 0000000..4fd655f
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/registry.py
@@ -0,0 +1,113 @@
+from six import string_types
+
+from galaxy.visualization.data_providers.basic import ColumnDataProvider
+from galaxy.visualization.data_providers import genome
+from galaxy.model import NoConverterException
+from galaxy.visualization.data_providers.phyloviz import PhylovizDataProvider
+from galaxy.datatypes.tabular import Tabular, Vcf
+from galaxy.datatypes.interval import Interval, ENCODEPeak, ChromatinInteractions, Gtf, Gff, Bed
+from galaxy.datatypes.xml import Phyloxml
+from galaxy.datatypes.data import Newick, Nexus
+
+
+class DataProviderRegistry( object ):
+    """
+    Registry for data providers that enables listing and lookup.
+    """
+
+    def __init__( self ):
+        # Mapping from dataset type name to a class that can fetch data from a file of that
+        # type. First key is converted dataset type; if result is another dict, second key
+        # is original dataset type.
+        self.dataset_type_name_to_data_provider = {
+            "tabix": {
+                Vcf: genome.VcfTabixDataProvider,
+                Bed: genome.BedTabixDataProvider,
+                Gtf: genome.GtfTabixDataProvider,
+                ENCODEPeak: genome.ENCODEPeakTabixDataProvider,
+                Interval: genome.IntervalTabixDataProvider,
+                ChromatinInteractions: genome.ChromatinInteractionsTabixDataProvider,
+                "default" : genome.TabixDataProvider
+            },
+            "interval_index": genome.IntervalIndexDataProvider,
+            "bai": genome.BamDataProvider,
+            "bam": genome.SamDataProvider,
+            "bigwig": genome.BigWigDataProvider,
+            "bigbed": genome.BigBedDataProvider,
+
+            "column_with_stats": ColumnDataProvider
+        }
+
+    def get_data_provider( self, trans, name=None, source='data', raw=False, original_dataset=None ):
+        """
+        Returns data provider matching parameter values. For standalone data
+        sources, source parameter is ignored.
+        """
+
+        data_provider = None
+        if raw:
+            # Working with raw data.
+            if isinstance( original_dataset.datatype, Gff ):
+                data_provider_class = genome.RawGFFDataProvider
+            elif isinstance( original_dataset.datatype, Bed ):
+                data_provider_class = genome.RawBedDataProvider
+            elif isinstance( original_dataset.datatype, Vcf ):
+                data_provider_class = genome.RawVcfDataProvider
+            elif isinstance( original_dataset.datatype, Tabular ):
+                data_provider_class = ColumnDataProvider
+            elif isinstance( original_dataset.datatype, ( Nexus, Newick, Phyloxml ) ):
+                data_provider_class = PhylovizDataProvider
+
+            data_provider = data_provider_class( original_dataset=original_dataset )
+
+        else:
+            # Working with converted or standalone dataset.
+
+            if name:
+                # Provider requested by name; get from mappings.
+                value = self.dataset_type_name_to_data_provider[ name ]
+                if isinstance( value, dict ):
+                    # Get converter by dataset extension; if there is no data provider,
+                    # get the default.
+                    data_provider_class = value.get( original_dataset.datatype.__class__, value.get( "default" ) )
+                else:
+                    data_provider_class = value
+
+                # If name is the same as original dataset's type, dataset is standalone.
+                # Otherwise, a converted dataset is being used.
+                if name == original_dataset.ext:
+                    data_provider = data_provider_class( original_dataset=original_dataset )
+                else:
+                    converted_dataset = original_dataset.get_converted_dataset( trans, name )
+                    deps = original_dataset.get_converted_dataset_deps( trans, name )
+                    data_provider = data_provider_class( original_dataset=original_dataset,
+                                                         converted_dataset=converted_dataset,
+                                                         dependencies=deps )
+
+            elif original_dataset:
+                # No name, so look up a provider name from datatype's information.
+
+                # Dataset must have data sources to get data.
+                if not original_dataset.datatype.data_sources:
+                    return None
+
+                # Get data provider mapping and data provider.
+                data_provider_mapping = original_dataset.datatype.data_sources
+                if 'data_standalone' in data_provider_mapping:
+                    data_provider = self.get_data_provider( trans,
+                                                            name=data_provider_mapping[ 'data_standalone' ],
+                                                            original_dataset=original_dataset )
+                else:
+                    source_list = data_provider_mapping[ source ]
+                    if isinstance( source_list, string_types ):
+                        source_list = [ source_list ]
+
+                    # Find a valid data provider in the source list.
+                    for source in source_list:
+                        try:
+                            data_provider = self.get_data_provider( trans, name=source, original_dataset=original_dataset )
+                            break
+                        except NoConverterException:
+                            pass
+
+        return data_provider
diff --git a/lib/galaxy/visualization/genome/__init__.py b/lib/galaxy/visualization/genome/__init__.py
new file mode 100644
index 0000000..39810cc
--- /dev/null
+++ b/lib/galaxy/visualization/genome/__init__.py
@@ -0,0 +1,3 @@
+"""
+Code for Galaxy genome visualizations.
+"""
diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py
new file mode 100644
index 0000000..9c73573
--- /dev/null
+++ b/lib/galaxy/visualization/genomes.py
@@ -0,0 +1,398 @@
+import logging
+import os
+import re
+import sys
+from json import loads
+
+from bx.seq.twobit import TwoBitFile
+
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+# FIXME: copied from tracks.py
+# Message strings returned to browser
+messages = Bunch(
+    PENDING="pending",
+    NO_DATA="no data",
+    NO_CHROMOSOME="no chromosome",
+    NO_CONVERTER="no converter",
+    NO_TOOL="no tool",
+    DATA="data",
+    ERROR="error",
+    OK="ok"
+)
+
+
+def decode_dbkey( dbkey ):
+    """ Decodes dbkey and returns tuple ( username, dbkey )"""
+    if ':' in dbkey:
+        return dbkey.split( ':' )
+    else:
+        return None, dbkey
+
+
+class GenomeRegion( object ):
+    """
+    A genomic region on an individual chromosome.
+    """
+
+    def __init__( self, chrom=None, start=0, end=0, sequence=None ):
+        self.chrom = chrom
+        self.start = int( start )
+        self.end = int( end )
+        self.sequence = sequence
+
+    def __str__( self ):
+        return self.chrom + ":" + str( self.start ) + "-" + str( self.end )
+
+    @staticmethod
+    def from_dict( obj_dict ):
+        return GenomeRegion( chrom=obj_dict[ 'chrom' ],
+                             start=obj_dict[ 'start' ],
+                             end=obj_dict[ 'end' ] )
+
+    @staticmethod
+    def from_str( obj_str ):
+        # check for gene region
+        gene_region = obj_str.split(':')
+
+        # split gene region into components
+        if (len(gene_region) == 2):
+            gene_interval = gene_region[1].split('-')
+
+            # check length
+            if (len(gene_interval) == 2):
+                return GenomeRegion(chrom=gene_region[0],
+                                    start=gene_interval[0],
+                                    end=gene_interval[1])
+
+        # return genome region instance
+        return GenomeRegion()
+
+
+class Genome( object ):
+    """
+    Encapsulates information about a known genome/dbkey.
+    """
+    def __init__( self, key, description, len_file=None, twobit_file=None ):
+        self.key = key
+        self.description = description
+        self.len_file = len_file
+        self.twobit_file = twobit_file
+
+    def to_dict( self, num=None, chrom=None, low=None ):
+        """
+        Returns representation of self as a dictionary.
+        """
+
+        def check_int(s):
+            if s.isdigit():
+                return int(s)
+            else:
+                return s
+
+        def split_by_number(s):
+            return [ check_int(c) for c in re.split('([0-9]+)', s) ]
+
+        #
+        # Parameter check, setting.
+        #
+        if num:
+            num = int( num )
+        else:
+            num = sys.maxsize  # just a big number
+
+        if low:
+            low = int( low )
+            if low < 0:
+                low = 0
+        else:
+            low = 0
+
+        #
+        # Get chroms data:
+        #   (a) chrom name, len;
+        #   (b) whether there are previous, next chroms;
+        #   (c) index of start chrom.
+        #
+        len_file_enumerate = enumerate( open( self.len_file ) )
+        chroms = {}
+        prev_chroms = False
+        start_index = 0
+        if chrom:
+            # Use starting chrom to start list.
+            found = False
+            count = 0
+            for line_num, line in len_file_enumerate:
+                if line.startswith("#"):
+                    continue
+                name, len = line.split("\t")
+                if found:
+                    chroms[ name ] = int( len )
+                    count += 1
+                elif name == chrom:
+                    # Found starting chrom.
+                    chroms[ name ] = int( len )
+                    count += 1
+                    found = True
+                    start_index = line_num
+                    if line_num != 0:
+                        prev_chroms = True
+                if count >= num:
+                    break
+        else:
+            # Use low to start list.
+            high = low + int( num )
+            prev_chroms = ( low != 0 )
+            start_index = low
+
+            # Read chrom data from len file.
+            for line_num, line in len_file_enumerate:
+                if line_num < low:
+                    continue
+                if line_num >= high:
+                    break
+                if line.startswith("#"):
+                    continue
+                # LEN files have format:
+                #   <chrom_name><tab><chrom_length>
+                fields = line.split("\t")
+                chroms[ fields[0] ] = int( fields[1] )
+
+        # Set flag to indicate whether there are more chroms after list.
+        next_chroms = False
+        try:
+            next(len_file_enumerate)
+            next_chroms = True
+        except:
+            # No more chroms to read.
+            pass
+
+        to_sort = [{ 'chrom': chrm, 'len': length } for chrm, length in chroms.items()]
+        to_sort.sort(key=lambda _: split_by_number(_['chrom']))
+        return {
+            'id': self.key,
+            'reference': self.twobit_file is not None,
+            'chrom_info': to_sort,
+            'prev_chroms' : prev_chroms,
+            'next_chroms' : next_chroms,
+            'start_index' : start_index
+        }
+
+
+class Genomes( object ):
+    """
+    Provides information about available genome data and methods for manipulating that data.
+    """
+
+    def __init__( self, app ):
+        self.app = app
+        # Create list of genomes from app.genome_builds
+        self.genomes = {}
+        # Store internal versions of data tables for twobit and __dbkey__
+        self._table_versions = { 'twobit': None, '__dbkeys__': None }
+        self.reload_genomes()
+
+    def reload_genomes( self ):
+        self.genomes = {}
+        # Store table versions for later
+        for table_name in self._table_versions.keys():
+            table = self.app.tool_data_tables.get( table_name, None )
+            if table is not None:
+                self._table_versions[ table_name ] = table._loaded_content_version
+
+        twobit_table = self.app.tool_data_tables.get( 'twobit', None )
+        twobit_fields = {}
+        if twobit_table is None:
+            # Add genome data (twobit files) to genomes, directly from twobit.loc
+            try:
+                for line in open( os.path.join( self.app.config.tool_data_path, "twobit.loc" ) ):
+                    if line.startswith("#"):
+                        continue
+                    val = line.split()
+                    if len( val ) == 2:
+                        key, path = val
+                        twobit_fields[ key ] = path
+            except IOError as e:
+                # Thrown if twobit.loc does not exist.
+                log.exception( "Error reading twobit.loc: %s", e )
+        for key, description in self.app.genome_builds.get_genome_build_names():
+            self.genomes[ key ] = Genome( key, description )
+            # Add len files to genomes.
+            self.genomes[ key ].len_file = self.app.genome_builds.get_chrom_info( key )[0]
+            if self.genomes[ key ].len_file:
+                if not os.path.exists( self.genomes[ key ].len_file ):
+                    self.genomes[ key ].len_file = None
+            # Add genome data (twobit files) to genomes.
+            if twobit_table is not None:
+                self.genomes[ key ].twobit_file = twobit_table.get_entry( 'value', key, 'path', default=None )
+            elif key in twobit_fields:
+                self.genomes[ key ].twobit_file = twobit_fields[ key ]
+
+    def check_and_reload( self ):
+        # Check if tables have been modified, if so reload
+        for table_name, table_version in self._table_versions.items():
+            table = self.app.tool_data_tables.get( table_name, None )
+            if table is not None and not table.is_current_version( table_version ):
+                return self.reload_genomes()
+
+    def get_build( self, dbkey ):
+        """ Returns build for the given key. """
+        self.check_and_reload()
+        rval = None
+        if dbkey in self.genomes:
+            rval = self.genomes[ dbkey ]
+        return rval
+
+    def get_dbkeys( self, trans, chrom_info=False, **kwd ):
+        """ Returns all known dbkeys. If chrom_info is True, only dbkeys with
+            chromosome lengths are returned. """
+        self.check_and_reload()
+        dbkeys = []
+
+        # Add user's custom keys to dbkeys.
+        user_keys_dict = {}
+        user = trans.get_user()
+        if user:
+            if 'dbkeys' in user.preferences:
+                user_keys_dict = loads( user.preferences[ 'dbkeys' ] )
+            dbkeys.extend( [ (attributes[ 'name' ], key ) for key, attributes in user_keys_dict.items() ] )
+
+        # Add app keys to dbkeys.
+
+        # If chrom_info is True, only include keys with len files (which contain chromosome info).
+        if chrom_info:
+            def filter_fn(b):
+                return b.len_file is not None
+        else:
+            def filter_fn(b):
+                return True
+
+        dbkeys.extend( [ ( genome.description, genome.key ) for key, genome in self.genomes.items() if filter_fn( genome ) ] )
+
+        return dbkeys
+
+    def chroms( self, trans, dbkey=None, num=None, chrom=None, low=None ):
+        """
+        Returns a naturally sorted list of chroms/contigs for a given dbkey.
+        Use either chrom or low to specify the starting chrom in the return list.
+        """
+        self.check_and_reload()
+        # If there is no dbkey owner, default to current user.
+        dbkey_owner, dbkey = decode_dbkey( dbkey )
+        if dbkey_owner:
+            dbkey_user = trans.sa_session.query( trans.app.model.User ).filter_by( username=dbkey_owner ).first()
+        else:
+            dbkey_user = trans.user
+
+        #
+        # Get/create genome object.
+        #
+        genome = None
+        twobit_file = None
+
+        # Look first in user's custom builds.
+        if dbkey_user and 'dbkeys' in dbkey_user.preferences:
+            user_keys = loads( dbkey_user.preferences['dbkeys'] )
+            if dbkey in user_keys:
+                dbkey_attributes = user_keys[ dbkey ]
+                dbkey_name = dbkey_attributes[ 'name' ]
+
+                # If there's a fasta for genome, convert to 2bit for later use.
+                if 'fasta' in dbkey_attributes:
+                    build_fasta = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dbkey_attributes[ 'fasta' ] )
+                    len_file = build_fasta.get_converted_dataset( trans, 'len' ).file_name
+                    build_fasta.get_converted_dataset( trans, 'twobit' )
+                    # HACK: set twobit_file to True rather than a file name because
+                    # get_converted_dataset returns null during conversion even though
+                    # there will eventually be a twobit file available for genome.
+                    twobit_file = True
+                # Backwards compatibility: look for len file directly.
+                elif 'len' in dbkey_attributes:
+                    len_file = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( user_keys[ dbkey ][ 'len' ] ).file_name
+                if len_file:
+                    genome = Genome( dbkey, dbkey_name, len_file=len_file, twobit_file=twobit_file )
+
+        # Look in history and system builds.
+        if not genome:
+            # Look in history for chromosome len file.
+            len_ds = trans.db_dataset_for( dbkey )
+            if len_ds:
+                genome = Genome( dbkey, dbkey_name, len_file=len_ds.file_name )
+            # Look in system builds.
+            elif dbkey in self.genomes:
+                genome = self.genomes[ dbkey ]
+
+        # Set up return value or log exception if genome not found for key.
+        rval = None
+        if genome:
+            rval = genome.to_dict( num=num, chrom=chrom, low=low )
+        else:
+            log.exception( 'genome not found for key %s' % dbkey )
+
+        return rval
+
+    def has_reference_data( self, dbkey, dbkey_owner=None ):
+        """
+        Returns true if there is reference data for the specified dbkey. If dbkey is custom,
+        dbkey_owner is needed to determine if there is reference data.
+        """
+        self.check_and_reload()
+        # Look for key in built-in builds.
+        if dbkey in self.genomes and self.genomes[ dbkey ].twobit_file:
+            # There is built-in reference data.
+            return True
+
+        # Look for key in owner's custom builds.
+        if dbkey_owner and 'dbkeys' in dbkey_owner.preferences:
+            user_keys = loads( dbkey_owner.preferences[ 'dbkeys' ] )
+            if dbkey in user_keys:
+                dbkey_attributes = user_keys[ dbkey ]
+                if 'fasta' in dbkey_attributes:
+                    # Fasta + converted datasets can provide reference data.
+                    return True
+
+        return False
+
+    def reference( self, trans, dbkey, chrom, low, high ):
+        """
+        Return reference data for a build.
+        """
+        self.check_and_reload()
+        # If there is no dbkey owner, default to current user.
+        dbkey_owner, dbkey = decode_dbkey( dbkey )
+        if dbkey_owner:
+            dbkey_user = trans.sa_session.query( trans.app.model.User ).filter_by( username=dbkey_owner ).first()
+        else:
+            dbkey_user = trans.user
+
+        if not self.has_reference_data( dbkey, dbkey_user ):
+            return None
+
+        #
+        # Get twobit file with reference data.
+        #
+        twobit_file_name = None
+        if dbkey in self.genomes:
+            # Built-in twobit.
+            twobit_file_name = self.genomes[dbkey].twobit_file
+        else:
+            user_keys = loads( dbkey_user.preferences['dbkeys'] )
+            dbkey_attributes = user_keys[ dbkey ]
+            fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dbkey_attributes[ 'fasta' ] )
+            msg = fasta_dataset.convert_dataset( trans, 'twobit' )
+            if msg:
+                return msg
+            else:
+                twobit_dataset = fasta_dataset.get_converted_dataset( trans, 'twobit' )
+                twobit_file_name = twobit_dataset.file_name
+
+        # Read and return reference data.
+        try:
+            twobit = TwoBitFile( open( twobit_file_name ) )
+            if chrom in twobit:
+                seq_data = twobit[chrom].get( int(low), int(high) )
+                return GenomeRegion( chrom=chrom, start=low, end=high, sequence=seq_data )
+        except IOError:
+            return None
diff --git a/lib/galaxy/visualization/plugins/__init__.py b/lib/galaxy/visualization/plugins/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/visualization/plugins/config_parser.py b/lib/galaxy/visualization/plugins/config_parser.py
new file mode 100644
index 0000000..8898a4b
--- /dev/null
+++ b/lib/galaxy/visualization/plugins/config_parser.py
@@ -0,0 +1,461 @@
+from six import string_types
+
+import galaxy.model
+from galaxy import util
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ParsingException( ValueError ):
+    """
+    An exception class for errors that occur during parsing of the visualizations
+    framework configuration XML file.
+    """
+    pass
+
+
+class VisualizationsConfigParser( object ):
+    """
+    Class that parses a visualizations configuration XML file.
+
+    Each visualization will get the following info:
+        - how to load a visualization:
+            -- how to find the proper template
+            -- how to convert query string into DB models
+        - when/how to generate a link to the visualization
+            -- what provides the data
+            -- what information needs to be added to the query string
+    """
+    #: what are the allowed 'entry_point_type' for entry_point elements
+    ALLOWED_ENTRY_POINT_TYPES = ['mako', 'html', 'script']
+    #: what are the allowed href targets when clicking on a visualization anchor
+    VALID_RENDER_TARGETS = [ 'galaxy_main', '_top', '_blank' ]
+
+    def __init__( self ):
+        # what parsers should be used for sub-components
+        self.data_source_parser = DataSourceParser()
+        self.param_parser = ParamParser()
+        self.param_modifier_parser = ParamModifierParser()
+
+    def parse_file( self, xml_filepath ):
+        """
+        Parse the given XML file for visualizations data.
+        :returns: visualization config dictionary
+        """
+        xml_tree = util.parse_xml( xml_filepath )
+        visualization = self.parse_visualization( xml_tree.getroot() )
+        return visualization
+
+    def parse_visualization( self, xml_tree ):
+        """
+        Parse the template, name, and any data_sources and params from the
+        given `xml_tree` for a visualization.
+        """
+        returned = {}
+
+        # main tag specifies plugin type (visualization or
+        # interactive_enviornment).
+        returned[ 'plugin_type' ] = xml_tree.tag
+
+        # a text display name for end user links
+        returned[ 'name' ] = xml_tree.attrib.get( 'name', None )
+        if not returned[ 'name' ]:
+            raise ParsingException( 'visualization needs a name attribute' )
+
+        # allow manually turning off a vis by checking for a disabled property
+        if 'disabled' in xml_tree.attrib:
+            log.info( 'Visualizations plugin disabled: %s. Skipping...', returned[ 'name' ] )
+            return None
+
+        # record the embeddable flag - defaults to false
+        #   this is a design by contract promise that the visualization can be rendered inside another page
+        #   often by rendering only a DOM fragment. Since this is an advanced feature that requires a bit more
+        #   work from the creator's side - it defaults to False
+        returned[ 'embeddable' ] = False
+        if 'embeddable' in xml_tree.attrib:
+            returned[ 'embeddable' ] = xml_tree.attrib.get( 'embeddable', False ) == 'true'
+
+        # a (for now) text description of what the visualization does
+        description = xml_tree.find( 'description' )
+        returned[ 'description' ] = description.text.strip() if description is not None else None
+
+        # data_sources are the kinds of objects/data associated with the visualization
+        #   e.g. views on HDAs can use this to find out what visualizations are applicable to them
+        data_sources = []
+        data_sources_confs = xml_tree.find( 'data_sources' )
+        for data_source_conf in data_sources_confs.findall( 'data_source' ):
+            data_source = self.data_source_parser.parse( data_source_conf )
+            if data_source:
+                data_sources.append( data_source )
+        # data_sources are not required
+        if not data_sources:
+            raise ParsingException( 'No valid data_sources for visualization' )
+        returned[ 'data_sources' ] = data_sources
+
+        # TODO: this is effectively required due to param_confs.findall( 'param' )
+        # parameters spell out how to convert query string params into resources and data
+        #   that will be parsed, fetched, etc. and passed to the template
+        # list or dict? ordered or not?
+        params = {}
+        param_confs = xml_tree.find( 'params' )
+        param_elements = param_confs.findall( 'param' ) if param_confs is not None else []
+        for param_conf in param_elements:
+            param = self.param_parser.parse( param_conf )
+            if param:
+                params[ param_conf.text ] = param
+        # params are not required
+        if params:
+            returned[ 'params' ] = params
+
+        # param modifiers provide extra information for other params (e.g. hda_ldda='hda' -> dataset_id is an hda id)
+        # store these modifiers in a 2-level dictionary { target_param: { param_modifier_key: { param_mod_data }
+        # ugh - wish we didn't need these
+        param_modifiers = {}
+        param_modifier_elements = param_confs.findall( 'param_modifier' ) if param_confs is not None else []
+        for param_modifier_conf in param_modifier_elements:
+            param_modifier = self.param_modifier_parser.parse( param_modifier_conf )
+            # param modifiers map accrd. to the params they modify (for faster lookup)
+            target_param = param_modifier_conf.get( 'modifies' )
+            param_modifier_key = param_modifier_conf.text
+            if param_modifier and target_param in params:
+                # multiple params can modify a single, other param,
+                #   so store in a sub-dict, initializing if this is the first
+                if target_param not in param_modifiers:
+                    param_modifiers[ target_param ] = {}
+                param_modifiers[ target_param ][ param_modifier_key ] = param_modifier
+
+        # not required
+        if param_modifiers:
+            returned[ 'param_modifiers' ] = param_modifiers
+
+        # entry_point: how will this plugin render/load? mako, script tag, or static html file?
+        returned[ 'entry_point' ] = self.parse_entry_point( xml_tree )
+
+        # link_text: the string to use for the text of any links/anchors to this visualization
+        link_text = xml_tree.find( 'link_text' )
+        if link_text is not None and link_text.text:
+            returned[ 'link_text' ] = link_text
+
+        # render_target: where in the browser to open the rendered visualization
+        # defaults to: galaxy_main
+        render_target = xml_tree.find( 'render_target' )
+        if( ( render_target is not None and render_target.text ) and
+                ( render_target.text in self.VALID_RENDER_TARGETS ) ):
+            returned[ 'render_target' ] = render_target.text
+        else:
+            returned[ 'render_target' ] = 'galaxy_main'
+        # consider unifying the above into its own element and parsing method
+
+        return returned
+
+    def parse_entry_point( self, xml_tree ):
+        """
+        Parse the config file for an appropriate entry point: a mako template, a script tag,
+        or an html file, returning as dictionary with: `type`, `file`, and `attr`ibutes of
+        the element.
+        """
+        # (older) mako-only syntax: the template to use in rendering the visualization
+        template = xml_tree.find( 'template' )
+        if template is not None and template.text:
+            log.info( 'template syntax is deprecated: use entry_point instead' )
+            return {
+                'type' : 'mako',
+                'file' : template.text,
+                'attr' : {}
+            }
+
+        # need one of the two: (the deprecated) template or entry_point
+        entry_point = xml_tree.find( 'entry_point' )
+        if entry_point is None:
+            raise ParsingException( 'template or entry_point required' )
+
+        # parse by returning a sub-object and simply copying any attributes unused here
+        entry_point_attrib = entry_point.attrib.copy()
+        entry_point_type = entry_point_attrib.pop( 'entry_point_type', 'mako' )
+        if entry_point_type not in self.ALLOWED_ENTRY_POINT_TYPES:
+            raise ParsingException( 'Unknown entry_point type: ' + entry_point_type )
+        return {
+            'type' : entry_point_type,
+            'file' : entry_point.text,
+            'attr' : entry_point_attrib
+        }
+
+
+# -------------------------------------------------------------------
+class DataSourceParser( object ):
+    """
+    Component class of VisualizationsConfigParser that parses data_source elements
+    within visualization elements.
+
+    data_sources are (in the extreme) any object that can be used to produce
+    data for the visualization to consume (e.g. HDAs, LDDAs, Jobs, Users, etc.).
+    There can be more than one data_source associated with a visualization.
+    """
+    # these are the allowed classes to associate visualizations with (as strings)
+    #   any model_class element not in this list will throw a parsing ParsingExcepion
+    ALLOWED_MODEL_CLASSES = [
+        'Visualization',
+        'HistoryDatasetAssociation',
+        'LibraryDatasetDatasetAssociation'
+    ]
+    ATTRIBUTE_SPLIT_CHAR = '.'
+    # these are the allowed object attributes to use in data source tests
+    #   any attribute element not in this list will throw a parsing ParsingExcepion
+    ALLOWED_DATA_SOURCE_ATTRIBUTES = [
+        'datatype'
+    ]
+
+    def parse( self, xml_tree ):
+        """
+        Return a visualization data_source dictionary parsed from the given
+        XML element.
+        """
+        returned = {}
+        # model_class (required, only one) - look up and convert model_class to actual galaxy model class
+        model_class = self.parse_model_class( xml_tree.find( 'model_class' ) )
+        if not model_class:
+            raise ParsingException( 'data_source needs a model class' )
+        returned[ 'model_class' ] = model_class
+
+        # tests (optional, 0 or more) - data for boolean test: 'is the visualization usable by this object?'
+        # when no tests are given, default to isinstance( object, model_class )
+        returned[ 'tests' ] = self.parse_tests( xml_tree.findall( 'test' ) )
+
+        # to_params (optional, 0 or more) - tells the registry to set certain params based on the model_clas, tests
+        returned[ 'to_params' ] = {}
+        to_params = self.parse_to_params( xml_tree.findall( 'to_param' ) )
+        if to_params:
+            returned[ 'to_params' ] = to_params
+
+        return returned
+
+    def parse_model_class( self, xml_tree ):
+        """
+        Convert xml model_class element to a galaxy model class
+        (or None if model class is not found).
+
+        This element is required and only the first element is used.
+        The model_class string must be in ALLOWED_MODEL_CLASSES.
+        """
+        if xml_tree is None or not xml_tree.text:
+            raise ParsingException( 'data_source entry requires a model_class' )
+
+        if xml_tree.text not in self.ALLOWED_MODEL_CLASSES:
+            # log.debug( 'available data_source model_classes: %s' %( str( self.ALLOWED_MODEL_CLASSES ) ) )
+            raise ParsingException( 'Invalid data_source model_class: %s' % ( xml_tree.text ) )
+
+        # look up the model from the model module returning an empty data_source if not found
+        model_class = getattr( galaxy.model, xml_tree.text, None )
+        return model_class
+
+    def _build_getattr_lambda( self, attr_name_list ):
+        """
+        Recursively builds a compound lambda function of getattr's
+        from the attribute names given in `attr_name_list`.
+        """
+        if len( attr_name_list ) == 0:
+            # identity - if list is empty, return object itself
+            return lambda o: o
+
+        next_attr_name = attr_name_list[-1]
+        if len( attr_name_list ) == 1:
+            # recursive base case
+            return lambda o: getattr( o, next_attr_name )
+
+        # recursive case
+        return lambda o: getattr( self._build_getattr_lambda( attr_name_list[:-1] )( o ), next_attr_name )
+
+    def parse_tests( self, xml_tree_list ):
+        """
+        Returns a list of test dictionaries that the registry can use
+        against a given object to determine if the visualization can be
+        used with the object.
+        """
+        # tests should NOT include expensive operations: reading file data, running jobs, etc.
+        # do as much here as possible to reduce the overhead of seeing if a visualization is applicable
+        # currently tests are or'd only (could be and'd or made into compound boolean tests)
+        tests = []
+        if not xml_tree_list:
+            return tests
+
+        for test_elem in xml_tree_list:
+            test_type = test_elem.get( 'type', 'eq' )
+            test_result = test_elem.text.strip() if test_elem.text else None
+            if not test_type or not test_result:
+                log.warning( 'Skipping test. Needs both type attribute and text node to be parsed: ' +
+                          '%s, %s' % ( test_type, test_elem.text ) )
+                continue
+            test_result = test_result.strip()
+
+            # test_attr can be a dot separated chain of object attributes (e.g. dataset.datatype) - convert to list
+            # TODO: too dangerous - constrain these to some allowed list
+            # TODO: does this err if no test_attr - it should...
+            test_attr = test_elem.get( 'test_attr' )
+            test_attr = test_attr.split( self.ATTRIBUTE_SPLIT_CHAR ) if isinstance( test_attr, string_types ) else []
+            # log.debug( 'test_type: %s, test_attr: %s, test_result: %s', test_type, test_attr, test_result )
+
+            # build a lambda function that gets the desired attribute to test
+            getter = self._build_getattr_lambda( test_attr )
+            # result type should tell the registry how to convert the result before the test
+            test_result_type = test_elem.get( 'result_type', 'string' )
+
+            # test functions should be sent an object to test, and the parsed result expected from the test
+            if test_type == 'isinstance':
+                # is test_attr attribute an instance of result
+                # TODO: wish we could take this further but it would mean passing in the datatypes_registry
+                def test_fn(o, result):
+                    return isinstance( getter( o ), result )
+
+            elif test_type == 'has_dataprovider':
+                # does the object itself have a datatype attr and does that datatype have the given dataprovider
+                def test_fn(o, result):
+                    return (hasattr( getter( o ), 'has_dataprovider' ) and
+                            getter( o ).has_dataprovider( result ) )
+
+            elif test_type == 'has_attribute':
+                # does the object itself have attr in 'result' (no equivalence checking)
+                def test_fn(o, result):
+                    return hasattr( getter( o ), result )
+
+            elif test_type == 'not_eq':
+                def test_fn(o, result):
+                    return str( getter( o ) ) != result
+
+            else:
+                # default to simple (string) equilavance (coercing the test_attr to a string)
+                def test_fn(o, result):
+                    return str( getter( o ) ) == result
+
+            tests.append({
+                'type'          : test_type,
+                'result'        : test_result,
+                'result_type'   : test_result_type,
+                'fn'            : test_fn
+            })
+
+        return tests
+
+    def parse_to_params( self, xml_tree_list ):
+        """
+        Given a list of `to_param` elements, returns a dictionary that allows
+        the registry to convert the data_source into one or more appropriate
+        params for the visualization.
+        """
+        to_param_dict = {}
+        if not xml_tree_list:
+            return to_param_dict
+
+        for element in xml_tree_list:
+            # param_name required
+            param_name = element.text
+            if not param_name:
+                raise ParsingException( 'to_param requires text (the param name)' )
+
+            param = {}
+            # assign is a shortcut param_attr that assigns a value to a param (as text)
+            assign = element.get( 'assign' )
+            if assign is not None:
+                param[ 'assign' ] = assign
+
+            # param_attr is the attribute of the object (that the visualization will be applied to)
+            #   that should be converted into a query param (e.g. param_attr="id" -> dataset_id)
+            # TODO:?? use the build attr getter here?
+            # simple (1 lvl) attrs for now
+            param_attr = element.get( 'param_attr' )
+            if param_attr is not None:
+                param[ 'param_attr' ] = param_attr
+            # element must have either param_attr or assign? what about no params (the object itself)
+            if not param_attr and not assign:
+                raise ParsingException( 'to_param requires either assign or param_attr attributes: %s', param_name )
+
+            # TODO: consider making the to_param name an attribute (param="hda_ldda") and the text what would
+            #           be used for the conversion - this would allow CDATA values to be passed
+            # <to_param param="json" type="assign"><![CDATA[{ "one": 1, "two": 2 }]]></to_param>
+
+            if param:
+                to_param_dict[ param_name ] = param
+
+        return to_param_dict
+
+
+class ParamParser( object ):
+    """
+    Component class of VisualizationsConfigParser that parses param elements
+    within visualization elements.
+
+    params are parameters that will be parsed (based on their `type`, etc.)
+    and sent to the visualization template by controllers.visualization.render.
+    """
+    DEFAULT_PARAM_TYPE = 'str'
+
+    def parse( self, xml_tree ):
+        """
+        Parse a visualization parameter from the given `xml_tree`.
+        """
+        returned = {}
+
+        # don't store key, just check it
+        param_key = xml_tree.text
+        if not param_key:
+            raise ParsingException( 'Param entry requires text' )
+
+        returned[ 'type' ] = self.parse_param_type( xml_tree )
+
+        # is the parameter required in the template and,
+        #   if not, what is the default value?
+        required = xml_tree.get( 'required' ) == "true"
+        returned[ 'required' ] = required
+        if not required:
+            # default defaults to None
+            default = None
+            if 'default' in xml_tree.attrib:
+                default = xml_tree.get( 'default' )
+                # convert default based on param_type here
+            returned[ 'default' ] = default
+
+        # does the param have to be within a list of certain values
+        # NOTE: the interpretation of this list is deferred till parsing and based on param type
+        #   e.g. it could be 'val in constrain_to', or 'constrain_to is min, max for number', etc.
+        # TODO: currently unused
+        constrain_to = xml_tree.get( 'constrain_to' )
+        if constrain_to:
+            returned[ 'constrain_to' ] = constrain_to.split( ',' )
+
+        # is the param a comma-separated-value list?
+        returned[ 'csv' ] = xml_tree.get( 'csv' ) == "true"
+
+        # remap keys in the params/query string to the var names used in the template
+        var_name_in_template = xml_tree.get( 'var_name_in_template' )
+        if var_name_in_template:
+            returned[ 'var_name_in_template' ] = var_name_in_template
+
+        return returned
+
+    def parse_param_type( self, xml_tree ):
+        """
+        Parse a param type from the given `xml_tree`.
+        """
+        # default to string as param_type
+        param_type = xml_tree.get( 'type' ) or self.DEFAULT_PARAM_TYPE
+        # TODO: set parsers and validaters, convert here
+        return param_type
+
+
+class ParamModifierParser( ParamParser ):
+    """
+    Component class of VisualizationsConfigParser that parses param_modifier
+    elements within visualization elements.
+
+    param_modifiers are params from a dictionary (such as a query string)
+    that are not standalone but modify the parsing/conversion of a separate
+    (normal) param (e.g. 'hda_ldda' can equal 'hda' or 'ldda' and control
+    whether a visualizations 'dataset_id' param is for an HDA or LDDA).
+    """
+    def parse( self, element ):
+        # modifies is required
+        modifies = element.get( 'modifies' )
+        if not modifies:
+            raise ParsingException( 'param_modifier entry requires a target param key (attribute "modifies")' )
+        returned = super( ParamModifierParser, self).parse( element )
+        return returned
diff --git a/lib/galaxy/visualization/plugins/plugin.py b/lib/galaxy/visualization/plugins/plugin.py
new file mode 100644
index 0000000..9a485fe
--- /dev/null
+++ b/lib/galaxy/visualization/plugins/plugin.py
@@ -0,0 +1,342 @@
+"""
+Visualization plugins: instantiate/deserialize data and models
+from a query string and render a webpage based on those data.
+"""
+
+import os
+import copy
+
+import mako
+
+from galaxy.managers import api_keys
+from galaxy.web.base import pluginframework
+from galaxy.web.base import interactive_environments
+
+from galaxy.visualization.plugins import resource_parser
+from galaxy.visualization.plugins import utils
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# =============================================================================
+# TODO:
+# move mixins to facade'd objects
+# allow config to override static/template settings
+# allow config detection in alternate places: galaxy-visualization.xml
+# =============================================================================
+class ServesStaticPluginMixin( object ):
+    """
+    An object that serves static files from the server.
+    """
+
+    def _set_up_static_plugin( self, **kwargs ):
+        """
+        Detect and set up static paths and urls if needed.
+        """
+        # TODO: allow config override
+        self.serves_static = False
+        if self._is_static_plugin():
+            self.static_path = self._build_static_path()
+            self.static_url = self._build_static_url()
+            self.serves_static = True
+        return self.serves_static
+
+    def _is_static_plugin( self ):
+        """
+        Detect whether this plugin should serve static resources.
+        """
+        return os.path.isdir( self._build_static_path() )
+
+    def _build_static_path( self ):
+        return os.path.join( self.path, 'static' )
+
+    def _build_static_url( self ):
+        return '/'.join([ self.base_url, 'static' ])
+
+
+# =============================================================================
+class ServesTemplatesPluginMixin( object ):
+    """
+    An object that renders (mako) template files from the server.
+    """
+
+    #: default number of templates to search for plugin template lookup
+    DEFAULT_TEMPLATE_COLLECTION_SIZE = 10
+    #: default encoding of plugin templates
+    DEFAULT_TEMPLATE_ENCODING = 'utf-8'
+
+    def _set_up_template_plugin( self, template_cache_dir, additional_template_paths=None, **kwargs ):
+        """
+        Detect and set up template paths if the plugin serves templates.
+        """
+        self.serves_templates = False
+        if self._is_template_plugin():
+            self.template_path = self._build_template_path()
+            self.template_lookup = self._build_template_lookup( template_cache_dir,
+                                                                additional_template_paths=additional_template_paths )
+            self.serves_templates = True
+        return self.serves_templates
+
+    def _is_template_plugin( self ):
+        return os.path.isdir( self._build_template_path() )
+
+    def _build_template_path( self ):
+        return os.path.join( self.path, 'templates' )
+
+    def _build_template_lookup( self, template_cache_dir, additional_template_paths=None,
+                                collection_size=DEFAULT_TEMPLATE_COLLECTION_SIZE, output_encoding=DEFAULT_TEMPLATE_ENCODING ):
+        """
+        Build a mako template filename lookup for the plugin.
+        """
+        template_lookup_paths = self.template_path
+        if additional_template_paths:
+            template_lookup_paths = [ template_lookup_paths ] + additional_template_paths
+        return mako.lookup.TemplateLookup(
+            directories=template_lookup_paths,
+            module_directory=template_cache_dir,
+            collection_size=collection_size,
+            output_encoding=output_encoding )
+
+
+# =============================================================================
+class VisualizationPlugin( pluginframework.Plugin, ServesStaticPluginMixin, ServesTemplatesPluginMixin ):
+    """
+    A plugin that instantiates resources, serves static files, and uses mako
+    templates to render web pages.
+    """
+    # AKA: MakoVisualizationPlugin
+    # config[ 'entry_point' ][ 'type' ] == 'mako'
+# TODO: concept/name collision between plugin config and visualization config
+
+    def __init__( self, app, path, name, config, context=None, **kwargs ):
+        super( VisualizationPlugin, self ).__init__( app, path, name, config, context=None, **kwargs )
+        context = context or {}
+        self.config = config
+
+        base_url = context.get( 'base_url', '' )
+        self.base_url = '/'.join([ base_url, self.name ]) if base_url else self.name
+        self._set_up_static_plugin()
+
+        template_cache_dir = context.get( 'template_cache_dir', None )
+        additional_template_paths = context.get( 'additional_template_paths', [] )
+        self._set_up_template_plugin( template_cache_dir, additional_template_paths=additional_template_paths )
+
+        self.resource_parser = resource_parser.ResourceParser( app )
+
+    def render( self, trans=None, embedded=None, **kwargs ):
+        """
+        Render and return the text of the non-saved plugin webpage/fragment.
+        """
+        # not saved - no existing config
+        config = {}
+        # set up render vars based on plugin.config and kwargs
+        render_vars = self._build_render_vars( config, trans=trans, **kwargs )
+        return self._render( render_vars, trans=trans, embedded=embedded )
+
+    def render_saved( self, visualization, trans=None, embedded=None, **kwargs ):
+        """
+        Render and return the text of the plugin webpage/fragment using the
+        config/data of a saved visualization.
+        """
+        config = self._get_saved_visualization_config( visualization, **kwargs )
+        # pass the saved visualization config for parsing into render vars
+        render_vars = self._build_render_vars( config, trans=trans, **kwargs )
+        # update any values that were loaded from the saved Visualization
+        render_vars.update( dict(
+            title=visualization.latest_revision.title,
+            saved_visualization=visualization,
+            visualization_id=trans.security.encode_id( visualization.id ),
+        ))
+        return self._render( render_vars, trans=trans, embedded=embedded )
+
+    def _get_saved_visualization_config( self, visualization, revision=None, **kwargs ):
+        """
+        Return the config of a saved visualization and revision.
+
+        If no revision given, default to latest revision.
+        """
+        # TODO: allow loading a specific revision - should be part of UsesVisualization
+        return copy.copy( visualization.latest_revision.config )
+
+    # ---- non-public
+    def _build_render_vars( self, config, trans=None, **kwargs ):
+        """
+        Build all the variables that will be passed into the renderer.
+        """
+        render_vars = {}
+        # Meta variables passed to the template/renderer to describe the visualization being rendered.
+        render_vars.update(
+            visualization_name=self.name,
+            visualization_display_name=self.config[ 'name' ],
+            title=kwargs.get( 'title', None ),
+            saved_visualization=None,
+            visualization_id=None,
+            # NOTE: passing *unparsed* kwargs as query
+            query=kwargs,
+        )
+        # config based on existing or kwargs
+        config = self._build_config( config, trans=trans, **kwargs )
+        render_vars[ 'config' ] = config
+        # further parse config to resources (models, etc.) used in template based on registry config
+        resources = self._config_to_resources( trans, config )
+        render_vars.update( resources )
+
+        return render_vars
+
+    def _build_config( self, config, trans=None, **kwargs ):
+        """
+        Build the configuration for this new/saved visualization by combining
+        any existing config and the kwargs (gen. from the url query).
+        """
+        # first, pull from any existing config
+        if config:
+            config = copy.copy( config )
+        else:
+            config = {}
+        # then, overwrite with keys/values from kwargs (gen. a query string)
+        config_from_kwargs = self._kwargs_to_config( trans, kwargs )
+        config.update( config_from_kwargs )
+        # to object format for easier querying
+        config = utils.OpenObject( **config )
+        return config
+
+    # TODO: the difference between config & resources is unclear in this section - is it needed?
+    def _kwargs_to_config( self, trans, kwargs ):
+        """
+        Given a kwargs dict (gen. a query string dict from a controller action), parse
+        and return any key/value pairs found in the plugin's `params` section.
+        """
+        expected_params = self.config.get( 'params', {} )
+        config = self.resource_parser.parse_config( trans, expected_params, kwargs )
+        return config
+
+    def _config_to_resources( self, trans, config ):
+        """
+        Instantiate/deserialize the resources (HDAs, LDDAs, etc.) given in a
+        visualization config into models/variables a visualization renderer can use.
+        """
+        expected_params = self.config.get( 'params', {} )
+        param_modifiers = self.config.get( 'param_modifiers', {} )
+        resources = self.resource_parser.parse_parameter_dictionary( trans, expected_params, config, param_modifiers )
+        return resources
+
+    def _render( self, render_vars, trans=None, embedded=None, **kwargs ):
+        """
+        Render the visualization via Mako and the plugin's template file.
+        """
+        render_vars[ 'embedded' ] = self._parse_embedded( embedded )
+        # NOTE: (mako specific) vars is a dictionary for shared data in the template
+        #   this feels hacky to me but it's what mako recommends:
+        #   http://docs.makotemplates.org/en/latest/runtime.html
+        render_vars.update( vars={} )
+        template_filename = self.config[ 'entry_point' ][ 'file' ]
+        return trans.fill_template( template_filename, template_lookup=self.template_lookup, **render_vars )
+
+    def _parse_embedded( self, embedded ):
+        """
+        Parse information on dimensions, readonly, etc. from the embedded query val.
+        """
+        # as is for now
+        return embedded
+
+
+# =============================================================================
+class InteractiveEnvironmentPlugin( VisualizationPlugin ):
+    """
+    Serves web-based REPLs such as IPython and RStudio.
+    """
+    INTENV_REQUEST_FACTORY = interactive_environments.InteractiveEnvironmentRequest
+
+    def __init__( self, app, path, name, config, context=None, **kwargs ):
+        # TODO: this is a hack until we can get int envs seperated from the vis reg and into their own framework
+        context[ 'base_url' ] = 'interactive_environments'
+        super( InteractiveEnvironmentPlugin, self ).__init__( app, path, name, config, context=context, **kwargs )
+
+    def _render( self, render_vars, trans=None, embedded=None, **kwargs ):
+        """
+        Override to add interactive environment specific template vars.
+        """
+        render_vars[ 'embedded' ] = self._parse_embedded( embedded )
+        # NOTE: (mako specific) vars is a dictionary for shared data in the template
+        #   this feels hacky to me but it's what mako recommends:
+        #   http://docs.makotemplates.org/en/latest/runtime.html
+        render_vars.update( vars={} )
+        # No longer needed but being left around for a few releases as ipython-galaxy
+        # as an external visualization plugin is deprecated in favor of core interactive
+        # environment plugin.
+        if 'get_api_key' not in render_vars:
+            def get_api_key():
+                return api_keys.ApiKeyManager( trans.app ).get_or_create_api_key( trans.user )
+            render_vars[ 'get_api_key' ] = get_api_key
+
+        if 'plugin_path' not in render_vars:
+            render_vars[ 'plugin_path' ] = os.path.abspath( self.path )
+
+        if self.config.get( 'plugin_type', 'visualization' ) == "interactive_environment":
+            request = self.INTENV_REQUEST_FACTORY( trans, self )
+            render_vars[ "ie_request" ] = request
+
+        template_filename = self.config[ 'entry_point' ][ 'file' ]
+        return trans.fill_template( template_filename, template_lookup=self.template_lookup, **render_vars )
+
+
+# =============================================================================
+class ScriptVisualizationPlugin( VisualizationPlugin ):
+    """
+    A visualization plugin that starts by loading a single (js) script.
+
+    The script is loaded into a pre-defined mako template:
+        `config/plugins/visualizations/common/templates/script_entry_point.mako`
+    """
+    MAKO_TEMPLATE = 'script_entry_point.mako'
+
+    def _is_template_plugin( self ):
+        """
+        Override to always yield true since this plugin type always uses the
+        pre-determined mako template.
+        """
+        return True
+
+    def _render( self, render_vars, trans=None, embedded=None, **kwargs ):
+        """
+        Override to add script attributes and point mako at the script entry point
+        template.
+        """
+        render_vars[ 'embedded' ] = self._parse_embedded( embedded )
+        render_vars.update( vars={} )
+        render_vars.update({
+            "script_tag_attributes" : self.config[ 'entry_point' ][ 'attr' ]
+        })
+        template_filename = os.path.join( self.MAKO_TEMPLATE )
+        return trans.fill_template( template_filename, template_lookup=self.template_lookup, **render_vars )
+
+
+# =============================================================================
+class StaticFileVisualizationPlugin( VisualizationPlugin ):
+    """
+    A visualiztion plugin that starts by loading a static html file defined
+    in the visualization's config file.
+    """
+    # TODO: these are not embeddable by their nature - update config
+    # TODO: should do render/render_saved here since most of the calc done there is unneeded in this case
+    def _render( self, render_vars, trans=None, embedded=None, **kwargs ):
+        """
+        Render the static file simply by reading and returning it.
+        """
+        render_vars[ 'embedded' ] = self._parse_embedded( embedded )
+        render_vars.update( vars={} )
+
+        static_file_path = self.config[ 'entry_point' ][ 'file' ]
+        static_file_path = os.path.join( self.path, static_file_path )
+        with open( static_file_path, 'r' ) as outfile:
+            return outfile.read()
+
+
+# # =============================================================================
+# class PyGeneratedVisualizationPlugin( VisualizationPlugin ):
+#     """
+#     Selectively import one module and call a specified fn within it to generate the
+#     HTML served.
+#     """
+#     pass
diff --git a/lib/galaxy/visualization/plugins/registry.py b/lib/galaxy/visualization/plugins/registry.py
new file mode 100644
index 0000000..e94d430
--- /dev/null
+++ b/lib/galaxy/visualization/plugins/registry.py
@@ -0,0 +1,266 @@
+"""
+Lower level of visualization framework which does three main things:
+    - associate visualizations with objects
+    - create urls to visualizations based on some target object(s)
+    - unpack a query string into the desired objects needed for rendering
+"""
+import os
+import weakref
+
+from galaxy.web import url_for
+import galaxy.exceptions
+
+from galaxy.web.base import pluginframework
+from galaxy.visualization.plugins import config_parser
+from galaxy.visualization.plugins import plugin as vis_plugins
+from galaxy.visualization.plugins import utils as vis_utils
+
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# -------------------------------------------------------------------
+class VisualizationsRegistry( pluginframework.PageServingPluginManager ):
+    """
+    Main responsibilities are:
+        - discovering visualization plugins in the filesystem
+        - testing if an object has a visualization that can be applied to it
+        - generating a link to controllers.visualization.render with
+            the appropriate params
+        - validating and parsing params into resources (based on a context)
+            used in the visualization template
+    """
+    NAMED_ROUTE = 'visualization_plugin'
+    DEFAULT_BASE_URL = 'visualizations'
+    # these should be handled somewhat differently - and be passed onto their resp. methods in ctrl.visualization
+    # TODO: change/remove if/when they can be updated to use this system
+    #: any built in visualizations that have their own render method in ctrls/visualization
+    BUILT_IN_VISUALIZATIONS = [
+        'trackster',
+        'circster',
+        'sweepster',
+        'phyloviz'
+    ]
+
+    def __str__( self ):
+        return self.__class__.__name__
+
+    def __init__( self, app, skip_bad_plugins=True, **kwargs ):
+        self.app = weakref.ref( app )
+        self.config_parser = config_parser.VisualizationsConfigParser()
+        super( VisualizationsRegistry, self ).__init__( app, skip_bad_plugins=skip_bad_plugins, **kwargs )
+
+    def is_plugin( self, plugin_path ):
+        """
+        Determines whether the given filesystem path contains a plugin.
+
+        In this base class, all sub-directories are considered plugins.
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the
+            potential plugin
+        :rtype:                 bool
+        :returns:               True if the path contains a plugin
+        """
+        # plugin_path must be a directory, have a config dir, and a config file matching the plugin dir name
+        if not os.path.isdir( plugin_path ):
+            # super won't work here - different criteria
+            return False
+        if 'config' not in os.listdir( plugin_path ):
+            return False
+        expected_config_filename = '%s.xml' % ( os.path.split( plugin_path )[1] )
+        if not os.path.isfile( os.path.join( plugin_path, 'config', expected_config_filename ) ):
+            return False
+        return True
+
+    def load_plugin( self, plugin_path ):
+        """
+        Create the visualization plugin object, parse its configuration file,
+        and return it.
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the plugin
+        :rtype:                 ``VisualizationPlugin``
+        :returns:               the loaded plugin
+        """
+        plugin_name = os.path.split( plugin_path )[1]
+        # TODO: this is the standard/older way to config
+        config_file = os.path.join( plugin_path, 'config', ( plugin_name + '.xml' ) )
+        config = self.config_parser.parse_file( config_file )
+        # config file is required, otherwise skip this visualization
+        if not config:
+            return None
+        plugin = self._build_plugin( plugin_name, plugin_path, config )
+        return plugin
+
+    def _build_plugin( self, plugin_name, plugin_path, config ):
+        # TODO: as builder not factory
+
+        # default class
+        plugin_class = vis_plugins.VisualizationPlugin
+        # ipython, etc
+        if config[ 'plugin_type' ] == 'interactive_environment':
+            plugin_class = vis_plugins.InteractiveEnvironmentPlugin
+        # js only
+        elif config[ 'entry_point' ][ 'type' ] == 'script':
+            plugin_class = vis_plugins.ScriptVisualizationPlugin
+        # from a static file (html, etc)
+        elif config[ 'entry_point' ][ 'type' ] == 'html':
+            plugin_class = vis_plugins.StaticFileVisualizationPlugin
+
+        plugin = plugin_class( self.app(), plugin_path, plugin_name, config, context=dict(
+            base_url=self.base_url,
+            template_cache_dir=self.template_cache_dir,
+            additional_template_paths=self.additional_template_paths
+        ))
+        return plugin
+
+    def get_plugin( self, key ):
+        """
+        Wrap to throw error if plugin not in registry.
+        """
+        if key not in self.plugins:
+            raise galaxy.exceptions.ObjectNotFound( 'Unknown or invalid visualization: ' + key )
+        return self.plugins[ key ]
+
+    # -- building links to visualizations from objects --
+    def get_visualizations( self, trans, target_object ):
+        """
+        Get the names of visualizations usable on the `target_object` and
+        the urls to call in order to render the visualizations.
+        """
+        # TODO:?? a list of objects? YAGNI?
+        applicable_visualizations = []
+        for vis_name in self.plugins:
+            url_data = self.get_visualization( trans, vis_name, target_object )
+            if url_data:
+                applicable_visualizations.append( url_data )
+        return applicable_visualizations
+
+    def get_visualization( self, trans, visualization_name, target_object ):
+        """
+        Return data to build a url to the visualization with the given
+        `visualization_name` if it's applicable to `target_object` or
+        `None` if it's not.
+        """
+        # log.debug( 'VisReg.get_visualization: %s, %s', visualization_name, target_object )
+        visualization = self.plugins.get( visualization_name, None )
+        if not visualization:
+            return None
+
+        data_sources = visualization.config[ 'data_sources' ]
+        for data_source in data_sources:
+            # log.debug( 'data_source: %s', data_source )
+            # currently a model class is required
+            model_class = data_source[ 'model_class' ]
+            # log.debug( '\t model_class: %s', model_class )
+            if not isinstance( target_object, model_class ):
+                continue
+            # log.debug( '\t passed model_class' )
+
+            # TODO: not true: must have test currently
+            tests = data_source[ 'tests' ]
+            if tests and not self.is_object_applicable( trans, target_object, tests ):
+                continue
+            # log.debug( '\t passed tests' )
+
+            param_data = data_source[ 'to_params' ]
+            url = self.get_visualization_url( trans, target_object, visualization, param_data )
+            display_name = visualization.config.get( 'name', None )
+            render_target = visualization.config.get( 'render_target', 'galaxy_main' )
+            embeddable = visualization.config.get( 'embeddable', False )
+            # remap some of these vars for direct use in ui.js, PopupMenu (e.g. text->html)
+            return {
+                'href'      : url,
+                'html'      : display_name,
+                'target'    : render_target,
+                'embeddable': embeddable
+            }
+
+        return None
+
+    def is_object_applicable( self, trans, target_object, data_source_tests ):
+        """
+        Run a visualization's data_source tests to find out if
+        it can be applied to the target_object.
+        """
+        # log.debug( 'is_object_applicable( self, trans, %s, %s )', target_object, data_source_tests )
+        for test in data_source_tests:
+            test_type = test[ 'type' ]
+            result_type = test[ 'result_type' ]
+            test_result = test[ 'result' ]
+            test_fn = test[ 'fn' ]
+            # log.debug( '%s %s: %s, %s, %s, %s', str( target_object ), 'is_object_applicable',
+            #           test_type, result_type, test_result, test_fn )
+
+            if test_type == 'isinstance':
+                # parse test_result based on result_type (curr: only datatype has to do this)
+                if result_type == 'datatype':
+                    # convert datatypes to their actual classes (for use with isinstance)
+                    datatype_class_name = test_result
+                    test_result = trans.app.datatypes_registry.get_datatype_class_by_name( datatype_class_name )
+                    if not test_result:
+                        # but continue (with other tests) if can't find class by that name
+                        # if self.debug:
+                        #    log.warning( 'visualizations_registry cannot find class (%s)' +
+                        #              ' for applicability test on: %s, id: %s', datatype_class_name,
+                        #              target_object, getattr( target_object, 'id', '' ) )
+                        continue
+
+            # NOTE: tests are OR'd, if any test passes - the visualization can be applied
+            if test_fn( target_object, test_result ):
+                # log.debug( '\t test passed' )
+                return True
+
+        return False
+
+    def get_visualization_url( self, trans, target_object, visualization, param_data ):
+        """
+        Generates a url for the visualization with `visualization`
+        for use with the given `target_object` with a query string built
+        from the configuration data in `param_data`.
+        """
+        # precondition: the target_object should be usable by the visualization (accrd. to data_sources)
+        # convert params using vis.data_source.to_params
+        params = self.get_url_params( trans, target_object, param_data )
+
+        # we want existing visualizations to work as normal but still be part of the registry (without mod'ing)
+        #   so generate their urls differently
+        url = None
+        if visualization.name in self.BUILT_IN_VISUALIZATIONS:
+            url = url_for( controller='visualization', action=visualization.name, **params )
+        # TODO: needs to be split off as it's own registry
+        elif isinstance( visualization, vis_plugins.InteractiveEnvironmentPlugin ):
+            url = url_for( 'interactive_environment_plugin', visualization_name=visualization.name, **params )
+        else:
+            url = url_for( self.NAMED_ROUTE, visualization_name=visualization.name, **params )
+
+        # TODO:?? not sure if embedded would fit/used here? or added in client...
+        return url
+
+    def get_url_params( self, trans, target_object, param_data ):
+        """
+        Convert the applicable objects and assoc. data into a param dict
+        for a url query string to add to the url that loads the visualization.
+        """
+        params = {}
+        for to_param_name, to_param_data in param_data.items():
+            # TODO??: look into params as well? what is required, etc.
+            target_attr = to_param_data.get( 'param_attr', None )
+            assign = to_param_data.get( 'assign', None )
+            # one or the other is needed
+            # assign takes precedence (goes last, overwrites)?
+            # NOTE this is only one level
+
+            if target_attr and vis_utils.hasattr_recursive( target_object, target_attr ):
+                params[ to_param_name ] = vis_utils.getattr_recursive( target_object, target_attr )
+
+            if assign:
+                params[ to_param_name ] = assign
+
+        # NOTE!: don't expose raw ids: encode id, _id
+        # TODO: double encodes if from config
+        if params:
+            params = trans.security.encode_dict_ids( params )
+        return params
diff --git a/lib/galaxy/visualization/plugins/resource_parser.py b/lib/galaxy/visualization/plugins/resource_parser.py
new file mode 100644
index 0000000..c808ddf
--- /dev/null
+++ b/lib/galaxy/visualization/plugins/resource_parser.py
@@ -0,0 +1,229 @@
+"""
+Deserialize Galaxy resources (hdas, ldas, datasets, genomes, etc.) from
+a dictionary of string data/ids (often from a query string).
+"""
+import json
+import logging
+import weakref
+
+import galaxy.exceptions
+import galaxy.util
+from galaxy.util import bunch
+from galaxy.managers import visualizations as visualization_manager
+from galaxy.managers import hdas as hda_manager
+
+log = logging.getLogger( __name__ )
+
+
+class ResourceParser( object ):
+    """
+    Given a parameter dictionary (often a converted query string) and a
+    configuration dictionary (curr. only VisualizationsRegistry uses this),
+    convert the entries in the parameter dictionary into resources (Galaxy
+    models, primitive types, lists of either, etc.) and return
+    in a new dictionary.
+
+    The keys used to store the new values can optionally be re-mapped to
+    new keys (e.g. dataset_id="NNN" -> hda=<HistoryDatasetAssociation>).
+    """
+    primitive_parsers = {
+        'str'   : lambda param: galaxy.util.sanitize_html.sanitize_html( param, 'utf-8' ),
+        'bool'  : lambda param: galaxy.util.string_as_bool( param ),
+        'int'   : int,
+        'float' : float,
+        # 'date'  : lambda param: ,
+        'json'  : ( lambda param: json.loads(
+            galaxy.util.sanitize_html.sanitize_html( param ) ) ),
+    }
+
+    def __init__( self, app, *args, **kwargs ):
+        self.app = weakref.ref( app )
+        self.managers = self._init_managers( app )
+
+    def _init_managers( self, app ):
+        return bunch.Bunch(
+            visualization=visualization_manager.VisualizationManager( app ),
+            hda=hda_manager.HDAManager( app )
+        )
+
+    def parse_parameter_dictionary( self, trans, param_config_dict, query_params, param_modifiers=None ):
+        """
+        Parse all expected params from the query dictionary `query_params`.
+
+        If param is required and not present, raises a `KeyError`.
+        """
+        # log.debug( 'parse_parameter_dictionary, query_params:\n%s', query_params )
+
+        # parse the modifiers first since they modify the params coming next
+        # TODO: this is all really for hda_ldda - which we could replace with model polymorphism
+        params_that_modify_other_params = self.parse_parameter_modifiers(
+            trans, param_modifiers, query_params )
+
+        resources = {}
+        for param_name, param_config in param_config_dict.items():
+            # optionally rename the variable returned, defaulting to the original name
+            var_name_in_template = param_config.get( 'var_name_in_template', param_name )
+
+            # if the param is present, get its value, any param modifiers for that param, and parse it into a resource
+            # use try catch here and not caller to fall back on the default value or re-raise if required
+            resource = None
+            query_val = query_params.get( param_name, None )
+            if query_val is not None:
+                try:
+                    target_param_modifiers = params_that_modify_other_params.get( param_name, None )
+                    resource = self.parse_parameter( trans, param_config,
+                                                     query_val, param_modifiers=target_param_modifiers )
+
+                except Exception as exception:
+                    if trans.debug:
+                        raise
+                    else:
+                        log.warning( 'Exception parsing visualization param from query: %s, %s, (%s) %s',
+                                  param_name, query_val, str( type( exception ) ), str( exception ) )
+                    resource = None
+
+            # here - we've either had no value in the query_params or there was a failure to parse
+            #   so: error if required, otherwise get a default (which itself defaults to None)
+            if resource is None:
+                if param_config[ 'required' ]:
+                    raise KeyError( 'required param %s not found in URL' % ( param_name ) )
+                resource = self.parse_parameter_default( trans, param_config )
+
+            resources[ var_name_in_template ] = resource
+
+        return resources
+
+    def parse_config( self, trans, param_config_dict, query_params ):
+        """
+        Return `query_params` dict parsing only JSON serializable params.
+        Complex params such as models, etc. are left as the original query value.
+        Keys in `query_params` not found in the `param_config_dict` will not be
+        returned.
+        """
+        # log.debug( 'parse_config, query_params:\n%s', query_params )
+        config = {}
+        for param_name, param_config in param_config_dict.items():
+            config_val = query_params.get( param_name, None )
+            if config_val is not None and param_config[ 'type' ] in self.primitive_parsers:
+                try:
+                    config_val = self.parse_parameter( trans, param_config, config_val )
+
+                except Exception as exception:
+                    log.warning( 'Exception parsing visualization param from query: ' +
+                              '%s, %s, (%s) %s' % ( param_name, config_val, str( type( exception ) ), str( exception ) ))
+                    config_val = None
+
+            # here - we've either had no value in the query_params or there was a failure to parse
+            #   so: if there's a default and it's not None, add it to the config
+            if config_val is None:
+                if param_config.get( 'default', None ) is None:
+                    continue
+                config_val = self.parse_parameter_default( trans, param_config )
+
+            config[ param_name ] = config_val
+
+        return config
+
+    # TODO: I would LOVE to rip modifiers out completely
+    def parse_parameter_modifiers( self, trans, param_modifiers, query_params ):
+        """
+        Parse and return parameters that are meant to modify other parameters,
+        be grouped with them, or are needed to successfully parse other parameters.
+        """
+        # only one level of modification - down that road lies madness
+        # parse the modifiers out of query_params first since they modify the other params coming next
+        parsed_modifiers = {}
+        if not param_modifiers:
+            return parsed_modifiers
+        # precondition: expects a two level dictionary
+        # { target_param_name -> { param_modifier_name -> { param_modifier_data }}}
+        for target_param_name, modifier_dict in param_modifiers.items():
+            parsed_modifiers[ target_param_name ] = target_modifiers = {}
+
+            for modifier_name, modifier_config in modifier_dict.items():
+                query_val = query_params.get( modifier_name, None )
+                if query_val is not None:
+                    modifier = self.parse_parameter( trans, modifier_config, query_val )
+                    target_modifiers[ modifier_name ] = modifier
+                else:
+                    # TODO: required attr?
+                    target_modifiers[ modifier_name ] = self.parse_parameter_default( trans, modifier_config )
+
+        return parsed_modifiers
+
+    def parse_parameter_default( self, trans, param_config ):
+        """
+        Parse any default values for the given param, defaulting the default
+        to `None`.
+        """
+        # currently, *default* default is None, so this is quaranteed to be part of the dictionary
+        default = param_config[ 'default' ]
+        # if default is None, do not attempt to parse it
+        if default is None:
+            return default
+        # otherwise, parse (currently param_config['default'] is a string just like query param and needs to be parsed)
+        #   this saves us the trouble of parsing the default when the config file is read
+        #   (and adding this code to the xml parser)
+        return self.parse_parameter( trans, param_config, default )
+
+    def parse_parameter( self, trans, expected_param_data, query_param,
+                         recurse=True, param_modifiers=None ):
+        """
+        Use data in `expected_param_data` to parse `query_param` from a string into
+        a resource usable directly by a template.
+        """
+        param_type = expected_param_data.get( 'type' )
+        # constrain_to = expected_param_data.get( 'constrain_to' )
+        csv = expected_param_data.get( 'csv' )
+
+        parsed_param = None
+
+        # handle recursion for csv values
+        if csv and recurse:
+            parsed_param = []
+            query_param_list = galaxy.util.listify( query_param )
+            for query_param in query_param_list:
+                parsed_param.append( self._parse_param( trans, expected_param_data, query_param, recurse=False ) )
+            return parsed_param
+
+        if param_type in self.primitive_parsers:
+            # TODO: what about param modifiers on primitives?
+            parsed_param = self.primitive_parsers[ param_type ]( query_param )
+
+        # TODO: constrain_to: this gets complicated - remove?
+
+        # db models
+        elif param_type == 'visualization':
+            # ?: is this even used anymore/anywhere?
+            decoded_visualization_id = self._decode_id( query_param )
+            parsed_param = self.managers.visualization.get_accessible( decoded_visualization_id, trans.user )
+
+        elif param_type == 'dataset':
+            decoded_dataset_id = self._decode_id( query_param )
+            parsed_param = self.managers.hda.get_accessible( decoded_dataset_id, trans.user )
+
+        elif param_type == 'hda_or_ldda':
+            encoded_dataset_id = query_param
+            # needs info from another param...
+            hda_ldda = param_modifiers.get( 'hda_ldda' )
+            if hda_ldda == 'hda':
+                decoded_dataset_id = self._decode_id( encoded_dataset_id )
+                parsed_param = self.managers.hda.get_accessible( decoded_dataset_id, trans.user )
+            else:
+                parsed_param = self.managers.ldda.get( trans, encoded_dataset_id )
+
+        # TODO: ideally this would check v. a list of valid dbkeys
+        elif param_type == 'dbkey':
+            dbkey = query_param
+            parsed_param = galaxy.util.sanitize_html.sanitize_html( dbkey, 'utf-8' )
+
+        return parsed_param
+
+    def _decode_id( self, id ):
+        try:
+            return self.app().security.decode_id( str( id ) )
+        except ( ValueError, TypeError ):
+            raise galaxy.exceptions.MalformedId(
+                "Malformed id ( %s ) specified, unable to decode" % ( str( id ) ),
+                id=str( id )
+            )
diff --git a/lib/galaxy/visualization/plugins/utils.py b/lib/galaxy/visualization/plugins/utils.py
new file mode 100644
index 0000000..a6db021
--- /dev/null
+++ b/lib/galaxy/visualization/plugins/utils.py
@@ -0,0 +1,75 @@
+"""
+Utilities for visualization plugins.
+"""
+
+
+# =============================================================================
+class OpenObject( dict ):
+    # note: not a Bunch
+    # TODO: move to util.data_structures
+    """
+    A dict that allows assignment and attribute retrieval using the dot
+    operator.
+
+    If an attribute isn't contained in the dict `None` is returned (no
+    KeyError).
+    JSON-serializable.
+    """
+    def __getitem__( self, key ):
+        if key not in self:
+            return None
+        return super( OpenObject, self ).__getitem__( key )
+
+    def __getattr__( self, key ):
+        return self.__getitem__( key )
+
+
+# ------------------------------------------------------------------- misc
+# TODO: move to utils?
+def getattr_recursive( item, attr_key, *args ):
+    """
+    Allows dot member notation in attribute name when getting an item's attribute.
+
+    NOTE: also searches dictionaries
+    """
+    using_default = len( args ) >= 1
+    default = args[0] if using_default else None
+
+    for attr_key in attr_key.split( '.' ):
+        try:
+            if isinstance( item, dict ):
+                item = item.__getitem__( attr_key )
+            else:
+                item = getattr( item, attr_key )
+
+        except ( KeyError, AttributeError ):
+            if using_default:
+                return default
+            raise
+
+    return item
+
+
+def hasattr_recursive( item, attr_key ):
+    """
+    Allows dot member notation in attribute name when getting an item's attribute.
+
+    NOTE: also searches dictionaries
+    """
+    if '.' in attr_key:
+        attr_key, last_key = attr_key.rsplit( '.', 1 )
+        item = getattr_recursive( item, attr_key, None )
+        if item is None:
+            return False
+        attr_key = last_key
+
+    try:
+        if isinstance( item, dict ):
+            return item.__contains__( attr_key )
+        else:
+            return hasattr( item, attr_key )
+
+    except ( KeyError, AttributeError ):
+        return False
+
+    return True
diff --git a/lib/galaxy/visualization/tracks/__init__.py b/lib/galaxy/visualization/tracks/__init__.py
new file mode 100644
index 0000000..0e006ee
--- /dev/null
+++ b/lib/galaxy/visualization/tracks/__init__.py
@@ -0,0 +1 @@
+""" Summary.py required to be in this module due to pickling. """
diff --git a/lib/galaxy/web/__init__.py b/lib/galaxy/web/__init__.py
new file mode 100644
index 0000000..a522f38
--- /dev/null
+++ b/lib/galaxy/web/__init__.py
@@ -0,0 +1,38 @@
+"""
+The Galaxy web application framework
+"""
+
+from .framework import url_for
+from .framework.base import httpexceptions
+# TODO: Make _future_* the default.
+from .framework.decorators import (
+    _future_expose_api,
+    _future_expose_api_anonymous,
+    _future_expose_api_anonymous_and_sessionless,
+    _future_expose_api_raw,
+    _future_expose_api_raw_anonymous,
+    _future_expose_api_raw_anonymous_and_sessionless,
+    error,
+    expose,
+    expose_api,
+    expose_api_anonymous,
+    expose_api_raw,
+    expose_api_raw_anonymous,
+    json,
+    json_pretty,
+    require_admin,
+    require_login
+)
+from .framework.formbuilder import (
+    form,
+    FormBuilder
+)
+
+__all__ = ('url_for', 'error', 'expose', 'json', 'json_pretty',
+           'require_admin', 'require_login', 'expose_api', 'expose_api_anonymous',
+           'expose_api_raw', 'expose_api_raw_anonymous', '_future_expose_api',
+           '_future_expose_api_anonymous', '_future_expose_api_raw',
+           '_future_expose_api_raw_anonymous',
+           '_future_expose_api_anonymous_and_sessionless',
+           '_future_expose_api_raw_anonymous_and_sessionless', 'form',
+           'FormBuilder', 'httpexceptions')
diff --git a/lib/galaxy/web/base/__init__.py b/lib/galaxy/web/base/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/web/base/controller.py b/lib/galaxy/web/base/controller.py
new file mode 100644
index 0000000..51effd2
--- /dev/null
+++ b/lib/galaxy/web/base/controller.py
@@ -0,0 +1,2261 @@
+"""
+Contains functionality needed in every web interface
+"""
+import logging
+import operator
+import re
+
+from six import string_types, text_type
+from sqlalchemy import true
+
+from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError
+from paste.httpexceptions import HTTPNotImplemented, HTTPRequestRangeNotSatisfiable
+from galaxy.exceptions import ItemAccessibilityException, ItemDeletionException, ItemOwnershipException
+from galaxy.exceptions import MessageException
+
+from galaxy import web
+from galaxy import model
+from galaxy import security
+from galaxy import util
+
+from galaxy.web import error, url_for
+from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField
+from galaxy.web.form_builder import build_select_field, HistoryField, PasswordField, WorkflowField, WorkflowMappingField
+from galaxy.workflow.modules import WorkflowModuleInjector, MissingToolException
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.util.dictifiable import Dictifiable
+
+from galaxy.datatypes.interval import ChromatinInteractions
+
+from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex, LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
+
+from galaxy.managers import api_keys
+from galaxy.managers import tags
+from galaxy.managers import workflows
+from galaxy.managers import base as managers_base
+from galaxy.managers import users
+from galaxy.managers import configuration
+
+
+log = logging.getLogger( __name__ )
+
+# States for passing messages
+SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
+
+
+def _is_valid_slug( slug ):
+    """ Returns true if slug is valid. """
+
+    VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
+    return VALID_SLUG_RE.match( slug )
+
+
+class BaseController( object ):
+    """
+    Base class for Galaxy web application controllers.
+    """
+
+    def __init__( self, app ):
+        """Initialize an interface for application 'app'"""
+        self.app = app
+        self.sa_session = app.model.context
+        self.user_manager = users.UserManager( app )
+
+    def get_toolbox(self):
+        """Returns the application toolbox"""
+        return self.app.toolbox
+
+    def get_class( self, class_name ):
+        """ Returns the class object that a string denotes. Without this method, we'd have to do eval(<class_name>). """
+        return managers_base.get_class( class_name )
+
+    def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
+        """
+        Convenience method to get a model object with the specified checks.
+        """
+        return managers_base.get_object( trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
+
+    # this should be here - but catching errors from sharable item controllers that *should* have SharableItemMixin
+    #   but *don't* then becomes difficult
+    # def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
+    #    log.warning( 'BaseController.security_check: %s, %b, %b', str( item ), check_ownership, check_accessible )
+    #    # meant to be overridden in SharableSecurityMixin
+    #    return item
+
+    def get_user( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
+        return self.get_object( trans, id, 'User', check_ownership=False, check_accessible=False, deleted=deleted )
+
+    def get_group( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
+        return self.get_object( trans, id, 'Group', check_ownership=False, check_accessible=False, deleted=deleted )
+
+    def get_role( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
+        return self.get_object( trans, id, 'Role', check_ownership=False, check_accessible=False, deleted=deleted )
+
+    # ---- parsing query params
+    def decode_id( self, id ):
+        return managers_base.decode_id( self.app, id )
+
+    def encode_all_ids( self, trans, rval, recursive=False ):
+        """
+        Encodes all integer values in the dict rval whose keys are 'id' or end with '_id'
+
+        It might be useful to turn this in to a decorator
+        """
+        return trans.security.encode_all_ids( rval, recursive=recursive )
+
+    def parse_filter_params( self, qdict, filter_attr_key='q', filter_value_key='qv', attr_op_split_char='-' ):
+        """
+        """
+        # TODO: import DEFAULT_OP from FilterParser
+        DEFAULT_OP = 'eq'
+        if filter_attr_key not in qdict:
+            return []
+        # precondition: attrs/value pairs are in-order in the qstring
+        attrs = qdict.get( filter_attr_key )
+        if not isinstance( attrs, list ):
+            attrs = [ attrs ]
+        # ops are strings placed after the attr strings and separated by a split char (e.g. 'create_time-lt')
+        # ops are optional and default to 'eq'
+        reparsed_attrs = []
+        ops = []
+        for attr in attrs:
+            op = DEFAULT_OP
+            if attr_op_split_char in attr:
+                # note: only split the last (e.g. q=community-tags-in&qv=rna yields ( 'community-tags', 'in', 'rna' )
+                attr, op = attr.rsplit( attr_op_split_char, 1 )
+            ops.append( op )
+            reparsed_attrs.append( attr )
+        attrs = reparsed_attrs
+
+        values = qdict.get( filter_value_key, [] )
+        if not isinstance( values, list ):
+            values = [ values ]
+        # TODO: it may be more helpful to the consumer if we error on incomplete 3-tuples
+        #   (instead of relying on zip to shorten)
+        return zip( attrs, ops, values )
+
+    def parse_limit_offset( self, qdict ):
+        """
+        """
+        def _parse_pos_int( i ):
+            try:
+                new_val = int( i )
+                if new_val >= 0:
+                    return new_val
+            except ( TypeError, ValueError ):
+                pass
+            return None
+
+        limit = _parse_pos_int( qdict.get( 'limit', None ) )
+        offset = _parse_pos_int( qdict.get( 'offset', None ) )
+        return ( limit, offset )
+
+
+Root = BaseController
+
+
+class BaseUIController( BaseController ):
+
+    def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
+        try:
+            return BaseController.get_object( self, trans, id, class_name,
+                                              check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
+
+        except MessageException:
+            raise       # handled in the caller
+        except:
+            log.exception( "Exception in get_object check for %s %s:" % ( class_name, str( id ) ) )
+            raise Exception( 'Server error retrieving %s id ( %s ).' % ( class_name, str( id ) ) )
+
+
+class BaseAPIController( BaseController ):
+
+    def get_object( self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None ):
+        try:
+            return BaseController.get_object( self, trans, id, class_name,
+                                              check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted )
+
+        except ItemDeletionException as e:
+            raise HTTPBadRequest( detail="Invalid %s id ( %s ) specified: %s" % ( class_name, str( id ), str( e ) ) )
+        except MessageException as e:
+            raise HTTPBadRequest( detail=e.err_msg )
+        except Exception as e:
+            log.exception( "Exception in get_object check for %s %s." % ( class_name, str( id ) ) )
+            raise HTTPInternalServerError( comment=str( e ) )
+
+    def validate_in_users_and_groups( self, trans, payload ):
+        """
+        For convenience, in_users and in_groups can be encoded IDs or emails/group names in the API.
+        """
+        def get_id( item, model_class, column ):
+            try:
+                return trans.security.decode_id( item )
+            except:
+                pass  # maybe an email/group name
+            # this will raise if the item is invalid
+            return trans.sa_session.query( model_class ).filter( column == item ).first().id
+        new_in_users = []
+        new_in_groups = []
+        invalid = []
+        for item in util.listify( payload.get( 'in_users', [] ) ):
+            try:
+                new_in_users.append( get_id( item, trans.app.model.User, trans.app.model.User.table.c.email ) )
+            except:
+                invalid.append( item )
+        for item in util.listify( payload.get( 'in_groups', [] ) ):
+            try:
+                new_in_groups.append( get_id( item, trans.app.model.Group, trans.app.model.Group.table.c.name ) )
+            except:
+                invalid.append( item )
+        if invalid:
+            msg = "The following value(s) for associated users and/or groups could not be parsed: %s." % ', '.join( invalid )
+            msg += "  Valid values are email addresses of users, names of groups, or IDs of both."
+            raise Exception( msg )
+        payload['in_users'] = map( str, new_in_users )
+        payload['in_groups'] = map( str, new_in_groups )
+
+    def not_implemented( self, trans, **kwd ):
+        raise HTTPNotImplemented()
+
+    def _parse_serialization_params( self, kwd, default_view ):
+        view = kwd.get( 'view', None )
+        keys = kwd.get( 'keys' )
+        if isinstance( keys, string_types ):
+            keys = keys.split( ',' )
+        return dict( view=view, keys=keys, default_view=default_view )
+
+
+class JSAppLauncher( BaseUIController ):
+    """
+    A controller that launches JavaScript web applications.
+    """
+
+    #: path to js app template
+    JS_APP_MAKO_FILEPATH = "/js-app.mako"
+    #: window-scoped js function to call to start the app (will be passed options, bootstrapped)
+    DEFAULT_ENTRY_FN = "app"
+    #: keys used when serializing current user for bootstrapped data
+    USER_BOOTSTRAP_KEYS = ( 'id', 'email', 'username', 'is_admin', 'tags_used', 'requests',
+                            'total_disk_usage', 'nice_total_disk_usage', 'quota_percent', 'preferences' )
+
+    def __init__( self, app ):
+        super( JSAppLauncher, self ).__init__( app )
+        self.user_manager = users.UserManager( app )
+        self.user_serializer = users.CurrentUserSerializer( app )
+        self.config_serializer = configuration.ConfigSerializer( app )
+        self.admin_config_serializer = configuration.AdminConfigSerializer( app )
+
+    def _get_js_options( self, trans, root=None ):
+        """
+        Return a dictionary of session/site configuration/options to jsonify
+        and pass onto the js app.
+
+        Defaults to `config`, `user`, and the root url. Pass kwargs to update further.
+        """
+        root = root or web.url_for( '/' )
+        js_options = {
+            'root'      : root,
+            'user'      : self.user_serializer.serialize( trans.user, self.USER_BOOTSTRAP_KEYS, trans=trans ),
+            'config'    : self._get_site_configuration( trans )
+        }
+        return js_options
+
+    def _get_site_configuration( self, trans ):
+        """
+        Return a dictionary representing Galaxy's current configuration.
+        """
+        try:
+            serializer = self.config_serializer
+            if self.user_manager.is_admin( trans.user ):
+                serializer = self.admin_config_serializer
+            return serializer.serialize_to_view( self.app.config, view='all' )
+        except Exception as exc:
+            log.exception( exc )
+            return {}
+
+    def template( self, trans, app_name, entry_fn='app', options=None, bootstrapped_data=None, masthead=True, **additional_options ):
+        """
+        Render and return the single page mako template that starts the app.
+
+        `app_name` (string): the first portion of the webpack bundle to as the app.
+        `entry_fn` (string): the name of the window-scope function that starts the
+            app. Defaults to 'app'.
+        `bootstrapped_data` (dict): (optional) update containing any more data
+            the app may need.
+        `masthead` (boolean): (optional, default=True) include masthead elements in
+            the initial page dom.
+        `additional_options` (kwargs): update to the options sent to the app.
+        """
+        options = options or self._get_js_options( trans )
+        options.update( additional_options )
+        return trans.fill_template(
+            self.JS_APP_MAKO_FILEPATH,
+            js_app_name=app_name,
+            js_app_entry_fn=( entry_fn or self.DEFAULT_ENTRY_FN ),
+            options=( options or self._get_js_options( trans ) ),
+            bootstrapped=( bootstrapped_data or {} ),
+            masthead=masthead
+        )
+
+
+class Datatype( object ):
+    """Used for storing in-memory list of datatypes currently in the datatypes registry."""
+
+    def __init__( self, extension, dtype, type_extension, mimetype, display_in_upload ):
+        self.extension = extension
+        self.dtype = dtype
+        self.type_extension = type_extension
+        self.mimetype = mimetype
+        self.display_in_upload = display_in_upload
+
+#
+# -- Mixins for working with Galaxy objects. --
+#
+
+
+class CreatesUsersMixin:
+    """
+    Mixin centralizing logic for user creation between web and API controller.
+
+    Web controller handles additional features such e-mail subscription, activation,
+    user forms, etc.... API created users are much more vanilla for the time being.
+    """
+
+    def create_user( self, trans, email, username, password ):
+        user = trans.app.model.User( email=email )
+        user.set_password_cleartext( password )
+        user.username = username
+        if trans.app.config.user_activation_on:
+            user.active = False
+        else:
+            user.active = True  # Activation is off, every new user is active by default.
+        trans.sa_session.add( user )
+        trans.sa_session.flush()
+        trans.app.security_agent.create_private_user_role( user )
+        if trans.webapp.name == 'galaxy':
+            # We set default user permissions, before we log in and set the default history permissions
+            trans.app.security_agent.user_set_default_permissions( user,
+                                                                   default_access_private=trans.app.config.new_user_dataset_access_role_default_private )
+        return user
+
+
+class CreatesApiKeysMixin:
+    """
+    Mixing centralizing logic for creating API keys for user objects.
+
+    Deprecated - please use api_keys.ApiKeyManager for new development.
+    """
+
+    def create_api_key( self, trans, user ):
+        return api_keys.ApiKeyManager( trans.app ).create_api_key( user )
+
+
+class SharableItemSecurityMixin:
+    """ Mixin for handling security for sharable items. """
+
+    def security_check( self, trans, item, check_ownership=False, check_accessible=False ):
+        """ Security checks for an item: checks if (a) user owns item or (b) item is accessible to user. """
+        return managers_base.security_check( trans, item, check_ownership=check_ownership, check_accessible=check_accessible )
+
+
+class ExportsHistoryMixin:
+
+    def serve_ready_history_export( self, trans, jeha ):
+        assert jeha.ready
+        if jeha.compressed:
+            trans.response.set_content_type( 'application/x-gzip' )
+        else:
+            trans.response.set_content_type( 'application/x-tar' )
+        disposition = 'attachment; filename="%s"' % jeha.export_name
+        trans.response.headers["Content-Disposition"] = disposition
+        return open( trans.app.object_store.get_filename( jeha.dataset ) )
+
+    def queue_history_export( self, trans, history, gzip=True, include_hidden=False, include_deleted=False ):
+        # Convert options to booleans.
+        if isinstance( gzip, string_types ):
+            gzip = ( gzip in [ 'True', 'true', 'T', 't' ] )
+        if isinstance( include_hidden, string_types ):
+            include_hidden = ( include_hidden in [ 'True', 'true', 'T', 't' ] )
+        if isinstance( include_deleted, string_types ):
+            include_deleted = ( include_deleted in [ 'True', 'true', 'T', 't' ] )
+
+        # Run job to do export.
+        history_exp_tool = trans.app.toolbox.get_tool( '__EXPORT_HISTORY__' )
+        params = {
+            'history_to_export': history,
+            'compress': gzip,
+            'include_hidden': include_hidden,
+            'include_deleted': include_deleted
+        }
+
+        history_exp_tool.execute( trans, incoming=params, history=history, set_output_hid=True )
+
+
+class ImportsHistoryMixin:
+
+    def queue_history_import( self, trans, archive_type, archive_source ):
+        # Run job to do import.
+        history_imp_tool = trans.app.toolbox.get_tool( '__IMPORT_HISTORY__' )
+        incoming = { '__ARCHIVE_SOURCE__' : archive_source, '__ARCHIVE_TYPE__' : archive_type }
+        history_imp_tool.execute( trans, incoming=incoming )
+
+
+class UsesLibraryMixin:
+
+    def get_library( self, trans, id, check_ownership=False, check_accessible=True ):
+        l = self.get_object( trans, id, 'Library' )
+        if check_accessible and not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( trans.get_current_user_roles(), l ) ):
+            error( "LibraryFolder is not accessible to the current user" )
+        return l
+
+
+class UsesLibraryMixinItems( SharableItemSecurityMixin ):
+
+    def get_library_folder( self, trans, id, check_ownership=False, check_accessible=True ):
+        return self.get_object( trans, id, 'LibraryFolder',
+                                check_ownership=False, check_accessible=check_accessible )
+
+    def get_library_dataset_dataset_association( self, trans, id, check_ownership=False, check_accessible=True ):
+        # Deprecated in lieu to galaxy.managers.lddas.LDDAManager.get() but not
+        # reusing that exactly because of subtle differences in exception handling
+        # logic (API controller override get_object to be slightly different).
+        return self.get_object( trans, id, 'LibraryDatasetDatasetAssociation',
+                                check_ownership=False, check_accessible=check_accessible )
+
+    def get_library_dataset( self, trans, id, check_ownership=False, check_accessible=True ):
+        return self.get_object( trans, id, 'LibraryDataset',
+                                check_ownership=False, check_accessible=check_accessible )
+
+    # TODO: it makes no sense that I can get roles from a user but not user.is_admin()
+    # def can_user_add_to_library_item( self, trans, user, item ):
+    #    if not user: return False
+    #    return (  ( user.is_admin() )
+    #           or ( trans.app.security_agent.can_add_library_item( user.all_roles(), item ) ) )
+
+    def can_current_user_add_to_library_item( self, trans, item ):
+        if not trans.user:
+            return False
+        return (  ( trans.user_is_admin() ) or
+                  ( trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ) ) )
+
+    def check_user_can_add_to_library_item( self, trans, item, check_accessible=True ):
+        """
+        Raise exception if user cannot add to the specified library item (i.e.
+        Folder). Can set check_accessible to False if folder was loaded with
+        this check.
+        """
+        if not trans.user:
+            return False
+
+        current_user_roles = trans.get_current_user_roles()
+        if trans.user_is_admin():
+            return True
+
+        if check_accessible:
+            if not trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user ):
+                raise ItemAccessibilityException( )
+
+        if not trans.app.security_agent.can_add_library_item( trans.get_current_user_roles(), item ):
+            # Slight misuse of ItemOwnershipException?
+            raise ItemOwnershipException( "User cannot add to library item." )
+
+    def copy_hda_to_library_folder( self, trans, hda, library_folder, roles=None, ldda_message='' ):
+        # PRECONDITION: permissions for this action on hda and library_folder have been checked
+        roles = roles or []
+
+        # this code was extracted from library_common.add_history_datasets_to_library
+        # TODO: refactor library_common.add_history_datasets_to_library to use this for each hda to copy
+
+        # create the new ldda and apply the folder perms to it
+        ldda = hda.to_library_dataset_dataset_association( trans, target_folder=library_folder,
+                                                           roles=roles, ldda_message=ldda_message )
+        self._apply_library_folder_permissions_to_ldda( trans, library_folder, ldda )
+        self._apply_hda_permissions_to_ldda( trans, hda, ldda )
+        # TODO:?? not really clear on how permissions are being traded here
+        #   seems like hda -> ldda permissions should be set in to_library_dataset_dataset_association
+        #   then they get reset in _apply_library_folder_permissions_to_ldda
+        #   then finally, re-applies hda -> ldda for missing actions in _apply_hda_permissions_to_ldda??
+        return ldda
+
+    def _apply_library_folder_permissions_to_ldda( self, trans, library_folder, ldda ):
+        """
+        Copy actions/roles from library folder to an ldda (and its library_dataset).
+        """
+        # PRECONDITION: permissions for this action on library_folder and ldda have been checked
+        security_agent = trans.app.security_agent
+        security_agent.copy_library_permissions( trans, library_folder, ldda )
+        security_agent.copy_library_permissions( trans, library_folder, ldda.library_dataset )
+        return security_agent.get_permissions( ldda )
+
+    def _apply_hda_permissions_to_ldda( self, trans, hda, ldda ):
+        """
+        Copy actions/roles from hda to ldda.library_dataset (and then ldda) if ldda
+        doesn't already have roles for the given action.
+        """
+        # PRECONDITION: permissions for this action on hda and ldda have been checked
+        # Make sure to apply any defined dataset permissions, allowing the permissions inherited from the
+        #   library_dataset to over-ride the same permissions on the dataset, if they exist.
+        security_agent = trans.app.security_agent
+        dataset_permissions_dict = security_agent.get_permissions( hda.dataset )
+        library_dataset = ldda.library_dataset
+        library_dataset_actions = [ permission.action for permission in library_dataset.actions ]
+
+        # except that: if DATASET_MANAGE_PERMISSIONS exists in the hda.dataset permissions,
+        #   we need to instead apply those roles to the LIBRARY_MANAGE permission to the library dataset
+        dataset_manage_permissions_action = security_agent.get_action( 'DATASET_MANAGE_PERMISSIONS' ).action
+        library_manage_permissions_action = security_agent.get_action( 'LIBRARY_MANAGE' ).action
+        # TODO: test this and remove if in loop below
+        # TODO: doesn't handle action.action
+        # if dataset_manage_permissions_action in dataset_permissions_dict:
+        #    managing_roles = dataset_permissions_dict.pop( dataset_manage_permissions_action )
+        #    dataset_permissions_dict[ library_manage_permissions_action ] = managing_roles
+
+        flush_needed = False
+        for action, dataset_permissions_roles in dataset_permissions_dict.items():
+            if isinstance( action, security.Action ):
+                action = action.action
+
+            # alter : DATASET_MANAGE_PERMISSIONS -> LIBRARY_MANAGE (see above)
+            if action == dataset_manage_permissions_action:
+                action = library_manage_permissions_action
+
+            # TODO: generalize to util.update_dict_without_overwrite
+            # add the hda actions & roles to the library_dataset
+            # NOTE: only apply an hda perm if it's NOT set in the library_dataset perms (don't overwrite)
+            if action not in library_dataset_actions:
+                for role in dataset_permissions_roles:
+                    ldps = trans.model.LibraryDatasetPermissions( action, library_dataset, role )
+                    ldps = [ ldps ] if not isinstance( ldps, list ) else ldps
+                    for ldp in ldps:
+                        trans.sa_session.add( ldp )
+                        flush_needed = True
+
+        if flush_needed:
+            trans.sa_session.flush()
+
+        # finally, apply the new library_dataset to its associated ldda (must be the same)
+        security_agent.copy_library_permissions( trans, library_dataset, ldda )
+        return security_agent.get_permissions( ldda )
+
+
+class UsesVisualizationMixin( UsesLibraryMixinItems ):
+    """
+    Mixin for controllers that use Visualization objects.
+    """
+
+    viz_types = [ "trackster" ]
+
+    def get_visualization( self, trans, id, check_ownership=True, check_accessible=False ):
+        """
+        Get a Visualization from the database by id, verifying ownership.
+        """
+        # Load workflow from database
+        try:
+            visualization = trans.sa_session.query( trans.model.Visualization ).get( trans.security.decode_id( id ) )
+        except TypeError:
+            visualization = None
+        if not visualization:
+            error( "Visualization not found" )
+        else:
+            return self.security_check( trans, visualization, check_ownership, check_accessible )
+
+    def get_visualizations_by_user( self, trans, user, order_by=None, query_only=False ):
+        """
+        Return query or query results of visualizations filtered by a user.
+
+        Set `order_by` to a column or list of columns to change the order
+        returned. Defaults to `DEFAULT_ORDER_BY`.
+        Set `query_only` to return just the query for further filtering or
+        processing.
+        """
+        # TODO: move into model (as class attr)
+        DEFAULT_ORDER_BY = [ model.Visualization.title ]
+        if not order_by:
+            order_by = DEFAULT_ORDER_BY
+        if not isinstance( order_by, list ):
+            order_by = [ order_by ]
+        query = trans.sa_session.query( model.Visualization )
+        query = query.filter( model.Visualization.user == user )
+        if order_by:
+            query = query.order_by( *order_by )
+        if query_only:
+            return query
+        return query.all()
+
+    def get_visualizations_shared_with_user( self, trans, user, order_by=None, query_only=False ):
+        """
+        Return query or query results for visualizations shared with the given user.
+
+        Set `order_by` to a column or list of columns to change the order
+        returned. Defaults to `DEFAULT_ORDER_BY`.
+        Set `query_only` to return just the query for further filtering or
+        processing.
+        """
+        DEFAULT_ORDER_BY = [ model.Visualization.title ]
+        if not order_by:
+            order_by = DEFAULT_ORDER_BY
+        if not isinstance( order_by, list ):
+            order_by = [ order_by ]
+        query = trans.sa_session.query( model.Visualization ).join( model.VisualizationUserShareAssociation )
+        query = query.filter( model.VisualizationUserShareAssociation.user_id == user.id )
+        # remove duplicates when a user shares with themselves?
+        query = query.filter( model.Visualization.user_id != user.id )
+        if order_by:
+            query = query.order_by( *order_by )
+        if query_only:
+            return query
+        return query.all()
+
+    def get_published_visualizations( self, trans, exclude_user=None, order_by=None, query_only=False ):
+        """
+        Return query or query results for published visualizations optionally excluding
+        the user in `exclude_user`.
+
+        Set `order_by` to a column or list of columns to change the order
+        returned. Defaults to `DEFAULT_ORDER_BY`.
+        Set `query_only` to return just the query for further filtering or
+        processing.
+        """
+        DEFAULT_ORDER_BY = [ model.Visualization.title ]
+        if not order_by:
+            order_by = DEFAULT_ORDER_BY
+        if not isinstance( order_by, list ):
+            order_by = [ order_by ]
+        query = trans.sa_session.query( model.Visualization )
+        query = query.filter( model.Visualization.published == true() )
+        if exclude_user:
+            query = query.filter( model.Visualization.user != exclude_user )
+        if order_by:
+            query = query.order_by( *order_by )
+        if query_only:
+            return query
+        return query.all()
+
+    # TODO: move into model (to_dict)
+    def get_visualization_summary_dict( self, visualization ):
+        """
+        Return a set of summary attributes for a visualization in dictionary form.
+        NOTE: that encoding ids isn't done here should happen at the caller level.
+        """
+        # TODO: deleted
+        # TODO: importable
+        return {
+            'id'        : visualization.id,
+            'title'     : visualization.title,
+            'type'      : visualization.type,
+            'dbkey'     : visualization.dbkey,
+        }
+
+    def get_visualization_dict( self, visualization ):
+        """
+        Return a set of detailed attributes for a visualization in dictionary form.
+        The visualization's latest_revision is returned in its own sub-dictionary.
+        NOTE: that encoding ids isn't done here should happen at the caller level.
+        """
+        return {
+            'model_class': 'Visualization',
+            'id'         : visualization.id,
+            'title'      : visualization.title,
+            'type'       : visualization.type,
+            'user_id'    : visualization.user.id,
+            'dbkey'      : visualization.dbkey,
+            'slug'       : visualization.slug,
+            # to_dict only the latest revision (allow older to be fetched elsewhere)
+            'latest_revision' : self.get_visualization_revision_dict( visualization.latest_revision ),
+            'revisions' : [ r.id for r in visualization.revisions ],
+        }
+
+    def get_visualization_revision_dict( self, revision ):
+        """
+        Return a set of detailed attributes for a visualization in dictionary form.
+        NOTE: that encoding ids isn't done here should happen at the caller level.
+        """
+        return {
+            'model_class'      : 'VisualizationRevision',
+            'id'               : revision.id,
+            'visualization_id' : revision.visualization.id,
+            'title'            : revision.title,
+            'dbkey'            : revision.dbkey,
+            'config'           : revision.config,
+        }
+
+    def import_visualization( self, trans, id, user=None ):
+        """
+        Copy the visualization with the given id and associate the copy
+        with the given user (defaults to trans.user).
+
+        Raises `ItemAccessibilityException` if `user` is not passed and
+        the current user is anonymous, and if the visualization is not `importable`.
+        Raises `ItemDeletionException` if the visualization has been deleted.
+        """
+        # default to trans.user, error if anon
+        if not user:
+            if not trans.user:
+                raise ItemAccessibilityException( "You must be logged in to import Galaxy visualizations" )
+            user = trans.user
+
+        # check accessibility
+        visualization = self.get_visualization( trans, id, check_ownership=False )
+        if not visualization.importable:
+            raise ItemAccessibilityException( "The owner of this visualization has disabled imports via this link." )
+        if visualization.deleted:
+            raise ItemDeletionException( "You can't import this visualization because it has been deleted." )
+
+        # copy vis and alter title
+        # TODO: need to handle custom db keys.
+        imported_visualization = visualization.copy( user=user, title="imported: " + visualization.title )
+        trans.sa_session.add( imported_visualization )
+        trans.sa_session.flush()
+        return imported_visualization
+
+    def create_visualization( self, trans, type, title="Untitled Visualization", slug=None,
+                              dbkey=None, annotation=None, config={}, save=True ):
+        """
+        Create visualiation and first revision.
+        """
+        visualization = self._create_visualization( trans, title, type, dbkey, slug, annotation, save )
+        # TODO: handle this error structure better either in _create or here
+        if isinstance( visualization, dict ):
+            err_dict = visualization
+            raise ValueError( err_dict[ 'title_err' ] or err_dict[ 'slug_err' ] )
+
+        # Create and save first visualization revision
+        revision = trans.model.VisualizationRevision( visualization=visualization, title=title,
+                                                      config=config, dbkey=dbkey )
+        visualization.latest_revision = revision
+
+        if save:
+            session = trans.sa_session
+            session.add( revision )
+            session.flush()
+
+        return visualization
+
+    def add_visualization_revision( self, trans, visualization, config, title, dbkey ):
+        """
+        Adds a new `VisualizationRevision` to the given `visualization` with
+        the given parameters and set its parent visualization's `latest_revision`
+        to the new revision.
+        """
+        # precondition: only add new revision on owned vis's
+        # TODO:?? should we default title, dbkey, config? to which: visualization or latest_revision?
+        revision = trans.model.VisualizationRevision( visualization, title, dbkey, config )
+        visualization.latest_revision = revision
+        # TODO:?? does this automatically add revision to visualzation.revisions?
+        trans.sa_session.add( revision )
+        trans.sa_session.flush()
+        return revision
+
+    def save_visualization( self, trans, config, type, id=None, title=None, dbkey=None, slug=None, annotation=None ):
+        session = trans.sa_session
+
+        # Create/get visualization.
+        if not id:
+            # Create new visualization.
+            vis = self._create_visualization( trans, title, type, dbkey, slug, annotation )
+        else:
+            decoded_id = trans.security.decode_id( id )
+            vis = session.query( trans.model.Visualization ).get( decoded_id )
+            # TODO: security check?
+
+        # Create new VisualizationRevision that will be attached to the viz
+        vis_rev = trans.model.VisualizationRevision()
+        vis_rev.visualization = vis
+        # do NOT alter the dbkey
+        vis_rev.dbkey = vis.dbkey
+        # do alter the title and config
+        vis_rev.title = title
+
+        # -- Validate config. --
+
+        if vis.type == 'trackster':
+            def unpack_track( track_dict ):
+                """ Unpack a track from its json. """
+                dataset_dict = track_dict[ 'dataset' ]
+                return {
+                    "dataset_id": trans.security.decode_id( dataset_dict['id'] ),
+                    "hda_ldda": dataset_dict.get('hda_ldda', 'hda'),
+                    "track_type": track_dict['track_type'],
+                    "prefs": track_dict['prefs'],
+                    "mode": track_dict['mode'],
+                    "filters": track_dict['filters'],
+                    "tool_state": track_dict['tool_state']
+                }
+
+            def unpack_collection( collection_json ):
+                """ Unpack a collection from its json. """
+                unpacked_drawables = []
+                drawables = collection_json[ 'drawables' ]
+                for drawable_json in drawables:
+                    if 'track_type' in drawable_json:
+                        drawable = unpack_track( drawable_json )
+                    else:
+                        drawable = unpack_collection( drawable_json )
+                    unpacked_drawables.append( drawable )
+                return {
+                    "obj_type": collection_json[ 'obj_type' ],
+                    "drawables": unpacked_drawables,
+                    "prefs": collection_json.get( 'prefs', [] ),
+                    "filters": collection_json.get( 'filters', None )
+                }
+
+            # TODO: unpack and validate bookmarks:
+            def unpack_bookmarks( bookmarks_json ):
+                return bookmarks_json
+
+            # Unpack and validate view content.
+            view_content = unpack_collection( config[ 'view' ] )
+            bookmarks = unpack_bookmarks( config[ 'bookmarks' ] )
+            vis_rev.config = { "view": view_content, "bookmarks": bookmarks }
+            # Viewport from payload
+            if 'viewport' in config:
+                chrom = config['viewport']['chrom']
+                start = config['viewport']['start']
+                end = config['viewport']['end']
+                overview = config['viewport']['overview']
+                vis_rev.config[ "viewport" ] = { 'chrom': chrom, 'start': start, 'end': end, 'overview': overview }
+        else:
+            # Default action is to save the config as is with no validation.
+            vis_rev.config = config
+
+        vis.latest_revision = vis_rev
+        session.add( vis_rev )
+        session.flush()
+        encoded_id = trans.security.encode_id( vis.id )
+        return { "vis_id": encoded_id, "url": url_for( controller='visualization', action=vis.type, id=encoded_id ) }
+
+    def get_tool_def( self, trans, hda ):
+        """ Returns definition of an interactive tool for an HDA. """
+
+        # Get dataset's job.
+        job = None
+        for job_output_assoc in hda.creating_job_associations:
+            job = job_output_assoc.job
+            break
+        if not job:
+            return None
+
+        tool = trans.app.toolbox.get_tool( job.tool_id )
+        if not tool:
+            return None
+
+        # Tool must have a Trackster configuration.
+        if not tool.trackster_conf:
+            return None
+
+        # -- Get tool definition and add input values from job. --
+        tool_dict = tool.to_dict( trans, io_details=True )
+        tool_param_values = dict( [ ( p.name, p.value ) for p in job.parameters ] )
+        tool_param_values = tool.params_from_strings( tool_param_values, trans.app, ignore_errors=True )
+
+        # Only get values for simple inputs for now.
+        inputs_dict = [ i for i in tool_dict[ 'inputs' ] if i[ 'type' ] not in [ 'data', 'hidden_data', 'conditional' ] ]
+        for t_input in inputs_dict:
+            # Add value to tool.
+            if 'name' in t_input:
+                name = t_input[ 'name' ]
+                if name in tool_param_values:
+                    value = tool_param_values[ name ]
+                    if isinstance( value, Dictifiable ):
+                        value = value.to_dict()
+                    t_input[ 'value' ] = value
+
+        return tool_dict
+
+    def get_visualization_config( self, trans, visualization ):
+        """ Returns a visualization's configuration. Only works for trackster visualizations right now. """
+        config = None
+        if visualization.type in [ 'trackster', 'genome' ]:
+            # Unpack Trackster config.
+            latest_revision = visualization.latest_revision
+            bookmarks = latest_revision.config.get( 'bookmarks', [] )
+
+            def pack_track( track_dict ):
+                dataset_id = track_dict['dataset_id']
+                hda_ldda = track_dict.get('hda_ldda', 'hda')
+                if hda_ldda == 'ldda':
+                    # HACK: need to encode library dataset ID because get_hda_or_ldda
+                    # only works for encoded datasets.
+                    dataset_id = trans.security.encode_id( dataset_id )
+                dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+
+                try:
+                    prefs = track_dict['prefs']
+                except KeyError:
+                    prefs = {}
+
+                track_data_provider = trans.app.data_provider_registry.get_data_provider( trans,
+                                                                                          original_dataset=dataset,
+                                                                                          source='data' )
+                return {
+                    "track_type": dataset.datatype.track_type,
+                    "dataset": trans.security.encode_dict_ids( dataset.to_dict() ),
+                    "prefs": prefs,
+                    "mode": track_dict.get( 'mode', 'Auto' ),
+                    "filters": track_dict.get( 'filters', { 'filters' : track_data_provider.get_filters() } ),
+                    "tool": self.get_tool_def( trans, dataset ),
+                    "tool_state": track_dict.get( 'tool_state', {} )
+                }
+
+            def pack_collection( collection_dict ):
+                drawables = []
+                for drawable_dict in collection_dict[ 'drawables' ]:
+                    if 'track_type' in drawable_dict:
+                        drawables.append( pack_track( drawable_dict ) )
+                    else:
+                        drawables.append( pack_collection( drawable_dict ) )
+                return {
+                    'obj_type': collection_dict[ 'obj_type' ],
+                    'drawables': drawables,
+                    'prefs': collection_dict.get( 'prefs', [] ),
+                    'filters': collection_dict.get( 'filters', {} )
+                }
+
+            def encode_dbkey( dbkey ):
+                """
+                Encodes dbkey as needed. For now, prepends user's public name
+                to custom dbkey keys.
+                """
+                encoded_dbkey = dbkey
+                user = visualization.user
+                if 'dbkeys' in user.preferences and dbkey in user.preferences[ 'dbkeys' ]:
+                    encoded_dbkey = "%s:%s" % ( user.username, dbkey )
+                return encoded_dbkey
+
+            # Set tracks.
+            tracks = []
+            if 'tracks' in latest_revision.config:
+                # Legacy code.
+                for track_dict in visualization.latest_revision.config[ 'tracks' ]:
+                    tracks.append( pack_track( track_dict ) )
+            elif 'view' in latest_revision.config:
+                for drawable_dict in visualization.latest_revision.config[ 'view' ][ 'drawables' ]:
+                    if 'track_type' in drawable_dict:
+                        tracks.append( pack_track( drawable_dict ) )
+                    else:
+                        tracks.append( pack_collection( drawable_dict ) )
+
+            config = {  "title": visualization.title,
+                        "vis_id": trans.security.encode_id( visualization.id ),
+                        "tracks": tracks,
+                        "bookmarks": bookmarks,
+                        "chrom": "",
+                        "dbkey": encode_dbkey( visualization.dbkey ) }
+
+            if 'viewport' in latest_revision.config:
+                config['viewport'] = latest_revision.config['viewport']
+        else:
+            # Default action is to return config unaltered.
+            latest_revision = visualization.latest_revision
+            config = latest_revision.config
+
+        return config
+
+    def get_new_track_config( self, trans, dataset ):
+        """
+        Returns track configuration dict for a dataset.
+        """
+        # Get data provider.
+        track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset )
+
+        # Get track definition.
+        return {
+            "track_type": dataset.datatype.track_type,
+            "name": dataset.name,
+            "dataset": trans.security.encode_dict_ids( dataset.to_dict() ),
+            "prefs": {},
+            "filters": { 'filters' : track_data_provider.get_filters() },
+            "tool": self.get_tool_def( trans, dataset ),
+            "tool_state": {}
+        }
+
+    def get_hda_or_ldda( self, trans, hda_ldda, dataset_id ):
+        """ Returns either HDA or LDDA for hda/ldda and id combination. """
+        if hda_ldda == "hda":
+            return self.get_hda( trans, dataset_id, check_ownership=False, check_accessible=True )
+        else:
+            return self.get_library_dataset_dataset_association( trans, dataset_id )
+
+    def get_hda( self, trans, dataset_id, check_ownership=True, check_accessible=False, check_state=True ):
+        """
+        Get an HDA object by id performing security checks using
+        the current transaction.
+        """
+        try:
+            dataset_id = trans.security.decode_id( dataset_id )
+        except ( AttributeError, TypeError ):
+            # DEPRECATION: We still support unencoded ids for backward compatibility
+            try:
+                dataset_id = int( dataset_id )
+            except ValueError:
+                raise HTTPBadRequest( "Invalid dataset id: %s." % str( dataset_id ) )
+
+        try:
+            data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( dataset_id ) )
+        except:
+            raise HTTPRequestRangeNotSatisfiable( "Invalid dataset id: %s." % str( dataset_id ) )
+
+        if check_ownership:
+            # Verify ownership.
+            user = trans.get_user()
+            if not user:
+                error( "Must be logged in to manage Galaxy items" )
+            if data.history.user != user:
+                error( "%s is not owned by current user" % data.__class__.__name__ )
+
+        if check_accessible:
+            current_user_roles = trans.get_current_user_roles()
+
+            if not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+                error( "You are not allowed to access this dataset" )
+
+            if check_state and data.state == trans.model.Dataset.states.UPLOAD:
+                return trans.show_error_message( "Please wait until this dataset finishes uploading " +
+                                                 "before attempting to view it." )
+        return data
+
+    # -- Helper functions --
+
+    def _create_visualization( self, trans, title, type, dbkey=None, slug=None, annotation=None, save=True ):
+        """ Create visualization but not first revision. Returns Visualization object. """
+        user = trans.get_user()
+
+        # Error checking.
+        title_err = slug_err = ""
+        if not title:
+            title_err = "visualization name is required"
+        elif slug and not _is_valid_slug( slug ):
+            slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+        elif slug and trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
+            slug_err = "visualization identifier must be unique"
+
+        if title_err or slug_err:
+            return { 'title_err': title_err, 'slug_err': slug_err }
+
+        # Create visualization
+        visualization = trans.model.Visualization( user=user, title=title, dbkey=dbkey, type=type )
+        if slug:
+            visualization.slug = slug
+        else:
+            self.create_item_slug( trans.sa_session, visualization )
+        if annotation:
+            annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+            # TODO: if this is to stay in the mixin, UsesAnnotations should be added to the superclasses
+            #   right now this is depending on the classes that include this mixin to have UsesAnnotations
+            self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
+
+        if save:
+            session = trans.sa_session
+            session.add( visualization )
+            session.flush()
+
+        return visualization
+
+    def _get_genome_data( self, trans, dataset, dbkey=None ):
+        """
+        Returns genome-wide data for dataset if available; if not, message is returned.
+        """
+        rval = None
+
+        # Get data sources.
+        data_sources = dataset.get_datasources( trans )
+        query_dbkey = dataset.dbkey
+        if query_dbkey == "?":
+            query_dbkey = dbkey
+        chroms_info = self.app.genomes.chroms( trans, dbkey=query_dbkey )
+
+        # If there are no messages (messages indicate data is not ready/available), get data.
+        messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
+        message = self._get_highest_priority_msg( messages_list )
+        if message:
+            rval = message
+        else:
+            # HACK: chromatin interactions tracks use data as source.
+            source = 'index'
+            if isinstance( dataset.datatype, ChromatinInteractions ):
+                source = 'data'
+
+            data_provider = trans.app.data_provider_registry.get_data_provider( trans,
+                                                                                original_dataset=dataset,
+                                                                                source=source )
+            # HACK: pass in additional params which are used for only some
+            # types of data providers; level, cutoffs used for summary tree,
+            # num_samples for BBI, and interchromosomal used for chromatin interactions.
+            rval = data_provider.get_genome_data( chroms_info,
+                                                  level=4, detail_cutoff=0, draw_cutoff=0,
+                                                  num_samples=150,
+                                                  interchromosomal=True )
+
+        return rval
+
+    # FIXME: this method probably belongs down in the model.Dataset class.
+    def _get_highest_priority_msg( self, message_list ):
+        """
+        Returns highest priority message from a list of messages.
+        """
+        return_message = None
+
+        # For now, priority is: job error (dict), no converter, pending.
+        for message in message_list:
+            if message is not None:
+                if isinstance(message, dict):
+                    return_message = message
+                    break
+                elif message == "no converter":
+                    return_message = message
+                elif return_message is None and message == "pending":
+                    return_message = message
+        return return_message
+
+
+class UsesStoredWorkflowMixin( SharableItemSecurityMixin, UsesAnnotations ):
+    """ Mixin for controllers that use StoredWorkflow objects. """
+
+    def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
+        """ Get a StoredWorkflow from the database by id, verifying ownership. """
+        # Load workflow from database
+        try:
+            workflow = trans.sa_session.query( trans.model.StoredWorkflow ).get( trans.security.decode_id( id ) )
+        except TypeError:
+            workflow = None
+
+        if not workflow:
+            error( "Workflow not found" )
+        else:
+            self.security_check( trans, workflow, check_ownership, check_accessible )
+
+            # Older workflows may be missing slugs, so set them here.
+            if not workflow.slug:
+                self.create_item_slug( trans.sa_session, workflow )
+                trans.sa_session.flush()
+
+        return workflow
+
+    def get_stored_workflow_steps( self, trans, stored_workflow ):
+        """ Restores states for a stored workflow's steps. """
+        module_injector = WorkflowModuleInjector( trans )
+        for step in stored_workflow.latest_workflow.steps:
+            try:
+                module_injector.inject( step )
+            except MissingToolException:
+                # Now upgrade_messages is a string instead of a dict, why?
+                step.upgrade_messages = "Unknown Tool ID"
+
+    def _import_shared_workflow( self, trans, stored):
+        """ """
+        # Copy workflow.
+        imported_stored = model.StoredWorkflow()
+        imported_stored.name = "imported: " + stored.name
+        workflow = stored.latest_workflow.copy()
+        workflow.stored_workflow = imported_stored
+        imported_stored.latest_workflow = workflow
+        imported_stored.user = trans.user
+        # Save new workflow.
+        session = trans.sa_session
+        session.add( imported_stored )
+        session.flush()
+
+        # Copy annotations.
+        self.copy_item_annotation( session, stored.user, stored, imported_stored.user, imported_stored )
+        for order_index, step in enumerate( stored.latest_workflow.steps ):
+            self.copy_item_annotation( session, stored.user, step,
+                                       imported_stored.user, imported_stored.latest_workflow.steps[order_index] )
+        session.flush()
+        return imported_stored
+
+    def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False, publish=False ):
+        """
+        Creates a workflow from a dict. Created workflow is stored in the database and returned.
+        """
+        # TODO: replace this method with direct access to manager.
+        workflow_contents_manager = workflows.WorkflowContentsManager( self.app )
+        created_workflow = workflow_contents_manager.build_workflow_from_dict(
+            trans,
+            data,
+            source=source,
+            add_to_menu=add_to_menu,
+            publish=publish
+        )
+        return created_workflow.stored_workflow, created_workflow.missing_tools
+
+    def _workflow_to_dict( self, trans, stored ):
+        """
+        Converts a workflow to a dict of attributes suitable for exporting.
+        """
+        workflow_contents_manager = workflows.WorkflowContentsManager( self.app )
+        return workflow_contents_manager.workflow_to_dict(
+            trans,
+            stored,
+        )
+
+
+class UsesFormDefinitionsMixin:
+    """Mixin for controllers that use Galaxy form objects."""
+
+    def get_all_forms( self, trans, all_versions=False, filter=None, form_type='All' ):
+        """
+        Return all the latest forms from the form_definition_current table
+        if all_versions is set to True. Otherwise return all the versions
+        of all the forms from the form_definition table.
+        """
+        if all_versions:
+            return trans.sa_session.query( trans.app.model.FormDefinition )
+        if filter:
+            fdc_list = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).filter_by( **filter )
+        else:
+            fdc_list = trans.sa_session.query( trans.app.model.FormDefinitionCurrent )
+        if form_type == 'All':
+            return [ fdc.latest_form for fdc in fdc_list ]
+        else:
+            return [ fdc.latest_form for fdc in fdc_list if fdc.latest_form.type == form_type ]
+
+    def get_all_forms_by_type( self, trans, cntrller, form_type ):
+        forms = self.get_all_forms( trans,
+                                    filter=dict( deleted=False ),
+                                    form_type=form_type )
+        if not forms:
+            message = "There are no forms on which to base the template, so create a form and then add the template."
+            return trans.response.send_redirect( web.url_for( controller='forms',
+                                                              action='create_form_definition',
+                                                              cntrller=cntrller,
+                                                              message=message,
+                                                              status='done',
+                                                              form_type=form_type ) )
+        return forms
+
+    @web.expose
+    def add_template( self, trans, cntrller, item_type, form_type, **kwd ):
+        params = util.Params( kwd )
+        form_id = params.get( 'form_id', 'none' )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        action = ''
+        status = params.get( 'status', 'done' )
+        forms = self.get_all_forms_by_type( trans, cntrller, form_type )
+        # form_type must be one of: RUN_DETAILS_TEMPLATE, LIBRARY_INFO_TEMPLATE
+        in_library = form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        in_sample_tracking = form_type == trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE
+        if in_library:
+            show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
+            use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
+            library_id = params.get( 'library_id', None )
+            folder_id = params.get( 'folder_id', None )
+            ldda_id = params.get( 'ldda_id', None )
+            is_admin = trans.user_is_admin() and cntrller in [ 'library_admin', 'requests_admin' ]
+            current_user_roles = trans.get_current_user_roles()
+        elif in_sample_tracking:
+            request_type_id = params.get( 'request_type_id', None )
+            sample_id = params.get( 'sample_id', None )
+        try:
+            if in_sample_tracking:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       request_type_id=request_type_id,
+                                                                       sample_id=sample_id )
+            elif in_library:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       library_id=library_id,
+                                                                       folder_id=folder_id,
+                                                                       ldda_id=ldda_id,
+                                                                       is_admin=is_admin )
+            if not item:
+                message = "Invalid %s id ( %s ) specified." % ( item_desc, str( id ) )
+                if in_sample_tracking:
+                    return trans.response.send_redirect( web.url_for( controller='request_type',
+                                                                      action='browse_request_types',
+                                                                      id=request_type_id,
+                                                                      message=util.sanitize_text( message ),
+                                                                      status='error' ) )
+                if in_library:
+                    return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                      action='browse_library',
+                                                                      cntrller=cntrller,
+                                                                      id=library_id,
+                                                                      show_deleted=show_deleted,
+                                                                      message=util.sanitize_text( message ),
+                                                                      status='error' ) )
+        except ValueError:
+            # At this point, the client has already redirected, so this is just here to prevent the unnecessary traceback
+            return None
+        if in_library:
+            # Make sure the user is authorized to do what they are trying to do.
+            authorized = True
+            if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+                authorized = False
+                unauthorized = 'modify'
+            if not ( is_admin or trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user ) ):
+                authorized = False
+                unauthorized = 'access'
+            if not authorized:
+                message = "You are not authorized to %s %s '%s'." % ( unauthorized, item_desc, item.name )
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  cntrller=cntrller,
+                                                                  id=library_id,
+                                                                  show_deleted=show_deleted,
+                                                                  message=util.sanitize_text( message ),
+                                                                  status='error' ) )
+            # If the inheritable checkbox is checked, the param will be in the request
+            inheritable = CheckboxField.is_checked( params.get( 'inheritable', '' ) )
+        if params.get( 'add_template_button', False ):
+            if form_id not in [ None, 'None', 'none' ]:
+                form = trans.sa_session.query( trans.app.model.FormDefinition ).get( trans.security.decode_id( form_id ) )
+                form_values = trans.app.model.FormValues( form, {} )
+                trans.sa_session.add( form_values )
+                trans.sa_session.flush()
+                if item_type == 'library':
+                    assoc = trans.model.LibraryInfoAssociation( item, form, form_values, inheritable=inheritable )
+                elif item_type == 'folder':
+                    assoc = trans.model.LibraryFolderInfoAssociation( item, form, form_values, inheritable=inheritable )
+                elif item_type == 'ldda':
+                    assoc = trans.model.LibraryDatasetDatasetInfoAssociation( item, form, form_values )
+                elif item_type in [ 'request_type', 'sample' ]:
+                    run = trans.model.Run( form, form_values )
+                    trans.sa_session.add( run )
+                    trans.sa_session.flush()
+                    if item_type == 'request_type':
+                        # Delete current RequestTypeRunAssociation, if one exists.
+                        rtra = item.run_details
+                        if rtra:
+                            trans.sa_session.delete( rtra )
+                            trans.sa_session.flush()
+                        # Add the new RequestTypeRunAssociation.  Templates associated with a RequestType
+                        # are automatically inherited to the samples.
+                        assoc = trans.model.RequestTypeRunAssociation( item, run )
+                    elif item_type == 'sample':
+                        assoc = trans.model.SampleRunAssociation( item, run )
+                trans.sa_session.add( assoc )
+                trans.sa_session.flush()
+                message = 'A template based on the form "%s" has been added to this %s.' % ( form.name, item_desc )
+                new_kwd = dict( action=action,
+                                cntrller=cntrller,
+                                message=util.sanitize_text( message ),
+                                status='done' )
+                if in_sample_tracking:
+                    new_kwd.update( dict( controller='request_type',
+                                          request_type_id=request_type_id,
+                                          sample_id=sample_id,
+                                          id=id ) )
+                    return trans.response.send_redirect( web.url_for( **new_kwd ) )
+                elif in_library:
+                    new_kwd.update( dict( controller='library_common',
+                                          use_panels=use_panels,
+                                          library_id=library_id,
+                                          folder_id=folder_id,
+                                          id=id,
+                                          show_deleted=show_deleted ) )
+                    return trans.response.send_redirect( web.url_for( **new_kwd ) )
+            else:
+                message = "Select a form on which to base the template."
+                status = "error"
+        form_id_select_field = self.build_form_id_select_field( trans, forms, selected_value=kwd.get( 'form_id', 'none' ) )
+        try:
+            decoded_form_id = trans.security.decode_id( form_id )
+        except:
+            decoded_form_id = None
+        if decoded_form_id:
+            for form in forms:
+                if decoded_form_id == form.id:
+                    widgets = form.get_widgets( trans.user )
+                    break
+        else:
+            widgets = []
+        new_kwd = dict( cntrller=cntrller,
+                        item_name=item.name,
+                        item_desc=item_desc,
+                        item_type=item_type,
+                        form_type=form_type,
+                        widgets=widgets,
+                        form_id_select_field=form_id_select_field,
+                        message=message,
+                        status=status )
+        if in_sample_tracking:
+            new_kwd.update( dict( request_type_id=request_type_id,
+                                  sample_id=sample_id ) )
+        elif in_library:
+            new_kwd.update( dict( use_panels=use_panels,
+                                  library_id=library_id,
+                                  folder_id=folder_id,
+                                  ldda_id=ldda_id,
+                                  inheritable_checked=inheritable,
+                                  show_deleted=show_deleted ) )
+        return trans.fill_template( '/common/select_template.mako',
+                                    **new_kwd )
+
+    @web.expose
+    def edit_template( self, trans, cntrller, item_type, form_type, **kwd ):
+        # Edit the template itself, keeping existing field contents, if any.
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        edited = util.string_as_bool( params.get( 'edited', False ) )
+        action = ''
+        # form_type must be one of: RUN_DETAILS_TEMPLATE, LIBRARY_INFO_TEMPLATE
+        in_library = form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        in_sample_tracking = form_type == trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE
+        if in_library:
+            show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
+            use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
+            library_id = params.get( 'library_id', None )
+            folder_id = params.get( 'folder_id', None )
+            ldda_id = params.get( 'ldda_id', None )
+            is_admin = trans.user_is_admin() and cntrller in [ 'library_admin', 'requests_admin' ]
+            current_user_roles = trans.get_current_user_roles()
+        elif in_sample_tracking:
+            request_type_id = params.get( 'request_type_id', None )
+            sample_id = params.get( 'sample_id', None )
+        try:
+            if in_library:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       library_id=library_id,
+                                                                       folder_id=folder_id,
+                                                                       ldda_id=ldda_id,
+                                                                       is_admin=is_admin )
+            elif in_sample_tracking:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       request_type_id=request_type_id,
+                                                                       sample_id=sample_id )
+        except ValueError:
+            return None
+        if in_library:
+            if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+                message = "You are not authorized to modify %s '%s'." % ( item_desc, item.name )
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  cntrller=cntrller,
+                                                                  id=library_id,
+                                                                  show_deleted=show_deleted,
+                                                                  message=util.sanitize_text( message ),
+                                                                  status='error' ) )
+        # An info_association must exist at this point
+        if in_library:
+            info_association, inherited = item.get_info_association( restrict=True )
+        elif in_sample_tracking:
+            # Here run_details is a RequestTypeRunAssociation
+            rtra = item.run_details
+            info_association = rtra.run
+        template = info_association.template
+        if edited:
+            # The form on which the template is based has been edited, so we need to update the
+            # info_association with the current form
+            fdc = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( template.form_definition_current_id )
+            info_association.template = fdc.latest_form
+            trans.sa_session.add( info_association )
+            trans.sa_session.flush()
+            message = "The template for this %s has been updated with your changes." % item_desc
+            new_kwd = dict( action=action,
+                            cntrller=cntrller,
+                            id=id,
+                            message=util.sanitize_text( message ),
+                            status='done' )
+            if in_library:
+                new_kwd.update( dict( controller='library_common',
+                                      use_panels=use_panels,
+                                      library_id=library_id,
+                                      folder_id=folder_id,
+                                      show_deleted=show_deleted ) )
+                return trans.response.send_redirect( web.url_for( **new_kwd ) )
+            elif in_sample_tracking:
+                new_kwd.update( dict( controller='request_type',
+                                      request_type_id=request_type_id,
+                                      sample_id=sample_id ) )
+                return trans.response.send_redirect( web.url_for( **new_kwd ) )
+        # "template" is a FormDefinition, so since we're changing it, we need to use the latest version of it.
+        vars = dict( id=trans.security.encode_id( template.form_definition_current_id ),
+                     response_redirect=web.url_for( controller='request_type',
+                                                    action='edit_template',
+                                                    cntrller=cntrller,
+                                                    item_type=item_type,
+                                                    form_type=form_type,
+                                                    edited=True,
+                                                    **kwd ) )
+        return trans.response.send_redirect( web.url_for( controller='forms', action='edit_form_definition', **vars ) )
+
+    @web.expose
+    def edit_template_info( self, trans, cntrller, item_type, form_type, **kwd ):
+        # Edit the contents of the template fields without altering the template itself.
+        params = util.Params( kwd )
+        # form_type must be one of: RUN_DETAILS_TEMPLATE, LIBRARY_INFO_TEMPLATE
+        in_library = form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        in_sample_tracking = form_type == trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE
+        if in_library:
+            library_id = params.get( 'library_id', None )
+            folder_id = params.get( 'folder_id', None )
+            ldda_id = params.get( 'ldda_id', None )
+            show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
+            use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
+            is_admin = ( trans.user_is_admin() and cntrller == 'library_admin' )
+            current_user_roles = trans.get_current_user_roles()
+        elif in_sample_tracking:
+            request_type_id = params.get( 'request_type_id', None )
+            sample_id = params.get( 'sample_id', None )
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        try:
+            if in_library:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       library_id=library_id,
+                                                                       folder_id=folder_id,
+                                                                       ldda_id=ldda_id,
+                                                                       is_admin=is_admin )
+            elif in_sample_tracking:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       request_type_id=request_type_id,
+                                                                       sample_id=sample_id )
+        except ValueError:
+            if cntrller == 'api':
+                trans.response.status = 400
+                return None
+            return None
+        if in_library:
+            if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+                message = "You are not authorized to modify %s '%s'." % ( item_desc, item.name )
+                if cntrller == 'api':
+                    trans.response.status = 400
+                    return message
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  cntrller=cntrller,
+                                                                  id=library_id,
+                                                                  show_deleted=show_deleted,
+                                                                  message=util.sanitize_text( message ),
+                                                                  status='error' ) )
+        # We need the type of each template field widget
+        widgets = item.get_template_widgets( trans )
+        # The list of widgets may include an AddressField which we need to save if it is new
+        for widget_dict in widgets:
+            widget = widget_dict[ 'widget' ]
+            if isinstance( widget, AddressField ):
+                value = util.restore_text( params.get( widget.name, '' ) )
+                if value == 'new':
+                    if params.get( 'edit_info_button', False ):
+                        if self.field_param_values_ok( widget.name, 'AddressField', **kwd ):
+                            # Save the new address
+                            address = trans.app.model.UserAddress( user=trans.user )
+                            self.save_widget_field( trans, address, widget.name, **kwd )
+                            widget.value = str( address.id )
+                        else:
+                            message = 'Required fields are missing contents.'
+                            if cntrller == 'api':
+                                trans.response.status = 400
+                                return message
+                            new_kwd = dict( action=action,
+                                            id=id,
+                                            message=util.sanitize_text( message ),
+                                            status='error' )
+                            if in_library:
+                                new_kwd.update( dict( controller='library_common',
+                                                      cntrller=cntrller,
+                                                      use_panels=use_panels,
+                                                      library_id=library_id,
+                                                      folder_id=folder_id,
+                                                      show_deleted=show_deleted ) )
+                                return trans.response.send_redirect( web.url_for( **new_kwd ) )
+                            if in_sample_tracking:
+                                new_kwd.update( dict( controller='request_type',
+                                                      request_type_id=request_type_id,
+                                                      sample_id=sample_id ) )
+                                return trans.response.send_redirect( web.url_for( **new_kwd ) )
+                    else:
+                        # Form was submitted via refresh_on_change
+                        widget.value = 'new'
+                elif value == text_type( 'none' ):
+                    widget.value = ''
+                else:
+                    widget.value = value
+            elif isinstance( widget, CheckboxField ):
+                # We need to check the value from kwd since util.Params would have munged the list if
+                # the checkbox is checked.
+                value = kwd.get( widget.name, '' )
+                if CheckboxField.is_checked( value ):
+                    widget.value = 'true'
+            else:
+                widget.value = util.restore_text( params.get( widget.name, '' ) )
+        # Save updated template field contents
+        field_contents = self.clean_field_contents( widgets, **kwd )
+        if field_contents:
+            if in_library:
+                # In in a library, since information templates are inherited, the template fields can be displayed
+                # on the information page for a folder or ldda when it has no info_association object.  If the user
+                #  has added field contents on an inherited template via a parent's info_association, we'll need to
+                # create a new form_values and info_association for the current object.  The value for the returned
+                # inherited variable is not applicable at this level.
+                info_association, inherited = item.get_info_association( restrict=True )
+            elif in_sample_tracking:
+                assoc = item.run_details
+                if item_type == 'request_type' and assoc:
+                    # If we're dealing with a RequestType, assoc will be a ReuqestTypeRunAssociation.
+                    info_association = assoc.run
+                elif item_type == 'sample' and assoc:
+                    # If we're dealing with a Sample, assoc will be a SampleRunAssociation if the
+                    # Sample has one.  If the Sample does not have a SampleRunAssociation, assoc will
+                    # be the Sample's RequestType RequestTypeRunAssociation, in which case we need to
+                    # create a SampleRunAssociation using the inherited template from the RequestType.
+                    if isinstance( assoc, trans.model.RequestTypeRunAssociation ):
+                        form_definition = assoc.run.template
+                        new_form_values = trans.model.FormValues( form_definition, {} )
+                        trans.sa_session.add( new_form_values )
+                        trans.sa_session.flush()
+                        new_run = trans.model.Run( form_definition, new_form_values )
+                        trans.sa_session.add( new_run )
+                        trans.sa_session.flush()
+                        sra = trans.model.SampleRunAssociation( item, new_run )
+                        trans.sa_session.add( sra )
+                        trans.sa_session.flush()
+                        info_association = sra.run
+                    else:
+                        info_association = assoc.run
+                else:
+                    info_association = None
+            if info_association:
+                template = info_association.template
+                info = info_association.info
+                form_values = trans.sa_session.query( trans.app.model.FormValues ).get( info.id )
+                # Update existing content only if it has changed
+                flush_required = False
+                for field_contents_key, field_contents_value in field_contents.items():
+                    if field_contents_key in form_values.content:
+                        if form_values.content[ field_contents_key ] != field_contents_value:
+                            flush_required = True
+                            form_values.content[ field_contents_key ] = field_contents_value
+                    else:
+                        flush_required = True
+                        form_values.content[ field_contents_key ] = field_contents_value
+                if flush_required:
+                    trans.sa_session.add( form_values )
+                    trans.sa_session.flush()
+            else:
+                if in_library:
+                    # Inherit the next available info_association so we can get the template
+                    info_association, inherited = item.get_info_association()
+                    template = info_association.template
+                    # Create a new FormValues object
+                    form_values = trans.app.model.FormValues( template, field_contents )
+                    trans.sa_session.add( form_values )
+                    trans.sa_session.flush()
+                    # Create a new info_association between the current library item and form_values
+                    if item_type == 'folder':
+                        # A LibraryFolder is a special case because if it inherited the template from its parent,
+                        # we want to set inheritable to True for its info_association.  This allows for the default
+                        # inheritance to be False for each level in the Library hierarchy unless we're creating a new
+                        # level in the hierarchy, in which case we'll inherit the "inheritable" setting from the parent
+                        # level.
+                        info_association = trans.app.model.LibraryFolderInfoAssociation( item, template, form_values, inheritable=inherited )
+                        trans.sa_session.add( info_association )
+                        trans.sa_session.flush()
+                    elif item_type == 'ldda':
+                        info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( item, template, form_values )
+                        trans.sa_session.add( info_association )
+                        trans.sa_session.flush()
+        message = 'The information has been updated.'
+        if cntrller == 'api':
+            return 200, message
+        new_kwd = dict( action=action,
+                        cntrller=cntrller,
+                        id=id,
+                        message=util.sanitize_text( message ),
+                        status='done' )
+        if in_library:
+            new_kwd.update( dict( controller='library_common',
+                                  use_panels=use_panels,
+                                  library_id=library_id,
+                                  folder_id=folder_id,
+                                  show_deleted=show_deleted ) )
+        if in_sample_tracking:
+            new_kwd.update( dict( controller='requests_common',
+                                  cntrller='requests_admin',
+                                  id=trans.security.encode_id( sample.id ),
+                                  sample_id=sample_id ) )
+        return trans.response.send_redirect( web.url_for( **new_kwd ) )
+
+    @web.expose
+    def delete_template( self, trans, cntrller, item_type, form_type, **kwd ):
+        params = util.Params( kwd )
+        # form_type must be one of: RUN_DETAILS_TEMPLATE, LIBRARY_INFO_TEMPLATE
+        in_library = form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        in_sample_tracking = form_type == trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE
+        if in_library:
+            is_admin = ( trans.user_is_admin() and cntrller == 'library_admin' )
+            current_user_roles = trans.get_current_user_roles()
+            show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) )
+            use_panels = util.string_as_bool( params.get( 'use_panels', False ) )
+            library_id = params.get( 'library_id', None )
+            folder_id = params.get( 'folder_id', None )
+            ldda_id = params.get( 'ldda_id', None )
+        elif in_sample_tracking:
+            request_type_id = params.get( 'request_type_id', None )
+            sample_id = params.get( 'sample_id', None )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        try:
+            if in_library:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       library_id=library_id,
+                                                                       folder_id=folder_id,
+                                                                       ldda_id=ldda_id,
+                                                                       is_admin=is_admin )
+            elif in_sample_tracking:
+                item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                       item_type=item_type,
+                                                                       request_type_id=request_type_id,
+                                                                       sample_id=sample_id )
+        except ValueError:
+            return None
+        if in_library:
+            if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+                message = "You are not authorized to modify %s '%s'." % ( item_desc, item.name )
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  cntrller=cntrller,
+                                                                  id=library_id,
+                                                                  show_deleted=show_deleted,
+                                                                  message=util.sanitize_text( message ),
+                                                                  status='error' ) )
+        if in_library:
+            info_association, inherited = item.get_info_association()
+        elif in_sample_tracking:
+            info_association = item.run_details
+        if not info_association:
+            message = "There is no template for this %s" % item_type
+        else:
+            if in_library:
+                info_association.deleted = True
+                trans.sa_session.add( info_association )
+                trans.sa_session.flush()
+            elif in_sample_tracking:
+                trans.sa_session.delete( info_association )
+                trans.sa_session.flush()
+            message = 'The template for this %s has been deleted.' % item_type
+        new_kwd = dict( action=action,
+                        cntrller=cntrller,
+                        id=id,
+                        message=util.sanitize_text( message ),
+                        status='done' )
+        if in_library:
+            new_kwd.update( dict( controller='library_common',
+                                  use_panels=use_panels,
+                                  library_id=library_id,
+                                  folder_id=folder_id,
+                                  show_deleted=show_deleted ) )
+            return trans.response.send_redirect( web.url_for( **new_kwd ) )
+        if in_sample_tracking:
+            new_kwd.update( dict( controller='request_type',
+                                  request_type_id=request_type_id,
+                                  sample_id=sample_id ) )
+            return trans.response.send_redirect( web.url_for( **new_kwd ) )
+
+    def widget_fields_have_contents( self, widgets ):
+        # Return True if any of the fields in widgets contain contents, widgets is a list of dictionaries that looks something like:
+        # [{'widget': <galaxy.web.form_builder.TextField object at 0x10867aa10>, 'helptext': 'Field 0 help (Optional)', 'label': 'Field 0'}]
+        for field in widgets:
+            if ( isinstance( field[ 'widget' ], TextArea ) or isinstance( field[ 'widget' ], TextField ) ) and field[ 'widget' ].value:
+                return True
+            if isinstance( field[ 'widget' ], SelectField ) and field[ 'widget' ].options:
+                for option_label, option_value, selected in field[ 'widget' ].options:
+                    if selected:
+                        return True
+            if isinstance( field[ 'widget' ], CheckboxField ) and field[ 'widget' ].checked:
+                return True
+            if isinstance( field[ 'widget' ], WorkflowField ) and str( field[ 'widget' ].value ).lower() not in [ 'none' ]:
+                return True
+            if isinstance( field[ 'widget' ], WorkflowMappingField ) and str( field[ 'widget' ].value ).lower() not in [ 'none' ]:
+                return True
+            if isinstance( field[ 'widget' ], HistoryField ) and str( field[ 'widget' ].value ).lower() not in [ 'none' ]:
+                return True
+            if isinstance( field[ 'widget' ], AddressField ) and str( field[ 'widget' ].value ).lower() not in [ 'none' ]:
+                return True
+        return False
+
+    def clean_field_contents( self, widgets, **kwd ):
+        field_contents = {}
+        for widget_dict in widgets:
+            widget = widget_dict[ 'widget' ]
+            value = kwd.get( widget.name, ''  )
+            if isinstance( widget, CheckboxField ):
+                # CheckboxField values are lists if the checkbox is checked
+                value = str( widget.is_checked( value ) ).lower()
+            elif isinstance( widget, AddressField ):
+                # If the address was new, is has already been saved and widget.value is the new address.id
+                value = widget.value
+            field_contents[ widget.name ] = util.restore_text( value )
+        return field_contents
+
+    def field_param_values_ok( self, widget_name, widget_type, **kwd ):
+        # Make sure required fields have contents, etc
+        params = util.Params( kwd )
+        if widget_type == 'AddressField':
+            if not util.restore_text( params.get( '%s_short_desc' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_name' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_institution' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_address' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_city' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_state' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_postal_code' % widget_name, '' ) ) \
+                    or not util.restore_text( params.get( '%s_country' % widget_name, '' ) ):
+                return False
+        return True
+
+    def save_widget_field( self, trans, field_obj, widget_name, **kwd ):
+        # Save a form_builder field object
+        params = util.Params( kwd )
+        if isinstance( field_obj, trans.model.UserAddress ):
+            field_obj.desc = util.restore_text( params.get( '%s_short_desc' % widget_name, '' ) )
+            field_obj.name = util.restore_text( params.get( '%s_name' % widget_name, '' ) )
+            field_obj.institution = util.restore_text( params.get( '%s_institution' % widget_name, '' ) )
+            field_obj.address = util.restore_text( params.get( '%s_address' % widget_name, '' ) )
+            field_obj.city = util.restore_text( params.get( '%s_city' % widget_name, '' ) )
+            field_obj.state = util.restore_text( params.get( '%s_state' % widget_name, '' ) )
+            field_obj.postal_code = util.restore_text( params.get( '%s_postal_code' % widget_name, '' ) )
+            field_obj.country = util.restore_text( params.get( '%s_country' % widget_name, '' ) )
+            field_obj.phone = util.restore_text( params.get( '%s_phone' % widget_name, '' ) )
+            trans.sa_session.add( field_obj )
+            trans.sa_session.flush()
+
+    def get_form_values( self, trans, user, form_definition, **kwd ):
+        '''
+        Returns the name:value dictionary containing all the form values
+        '''
+        params = util.Params( kwd )
+        values = {}
+        for field in form_definition.fields:
+            field_type = field[ 'type' ]
+            field_name = field[ 'name' ]
+            input_value = params.get( field_name, '' )
+            if field_type == AddressField.__name__:
+                input_text_value = util.restore_text( input_value )
+                if input_text_value == 'new':
+                    # Save this new address in the list of this user's addresses
+                    user_address = trans.model.UserAddress( user=user )
+                    self.save_widget_field( trans, user_address, field_name, **kwd )
+                    trans.sa_session.refresh( user )
+                    field_value = int( user_address.id )
+                elif input_text_value in [ '', 'none', 'None', None ]:
+                    field_value = ''
+                else:
+                    field_value = int( input_text_value )
+            elif field_type == CheckboxField.__name__:
+                field_value = CheckboxField.is_checked( input_value )
+            elif field_type == PasswordField.__name__:
+                field_value = kwd.get( field_name, '' )
+            else:
+                field_value = util.restore_text( input_value )
+            values[ field_name ] = field_value
+        return values
+
+    def populate_widgets_from_kwd( self, trans, widgets, **kwd ):
+        # A form submitted via refresh_on_change requires us to populate the widgets with the contents of
+        # the form fields the user may have entered so that when the form refreshes the contents are retained.
+        params = util.Params( kwd )
+        populated_widgets = []
+        for widget_dict in widgets:
+            widget = widget_dict[ 'widget' ]
+            if params.get( widget.name, False ):
+                # The form included a field whose contents should be used to set the
+                # value of the current widget (widget.name is the name set by the
+                # user when they defined the FormDefinition).
+                if isinstance( widget, AddressField ):
+                    value = util.restore_text( params.get( widget.name, '' ) )
+                    if value == 'none':
+                        value = ''
+                    widget.value = value
+                    widget_dict[ 'widget' ] = widget
+                    # Populate the AddressField params with the form field contents
+                    widget_params_dict = {}
+                    for field_name, label, help_text in widget.fields():
+                        form_param_name = '%s_%s' % ( widget.name, field_name )
+                        widget_params_dict[ form_param_name ] = util.restore_text( params.get( form_param_name, '' ) )
+                    widget.params = widget_params_dict
+                elif isinstance( widget, CheckboxField ):
+                    # Check the value from kwd since util.Params would have
+                    # stringify'd the list if the checkbox is checked.
+                    value = kwd.get( widget.name, '' )
+                    if CheckboxField.is_checked( value ):
+                        widget.value = 'true'
+                        widget_dict[ 'widget' ] = widget
+                elif isinstance( widget, SelectField ):
+                    # Ensure the selected option remains selected.
+                    value = util.restore_text( params.get( widget.name, '' ) )
+                    processed_options = []
+                    for option_label, option_value, option_selected in widget.options:
+                        selected = value == option_value
+                        processed_options.append( ( option_label, option_value, selected ) )
+                    widget.options = processed_options
+                else:
+                    widget.value = util.restore_text( params.get( widget.name, '' ) )
+                    widget_dict[ 'widget' ] = widget
+            populated_widgets.append( widget_dict )
+        return populated_widgets
+
+    def get_item_and_stuff( self, trans, item_type, **kwd ):
+        # Return an item, description, action and an id based on the item_type.  Valid item_types are
+        # library, folder, ldda, request_type, sample.
+        if item_type == 'library':
+            library_id = kwd.get( 'library_id', None )
+            id = library_id
+            try:
+                item = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
+            except:
+                item = None
+            item_desc = 'data library'
+            action = 'library_info'
+        elif item_type == 'folder':
+            folder_id = kwd.get( 'folder_id', None )
+            id = folder_id
+            try:
+                item = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+            except:
+                item = None
+            item_desc = 'folder'
+            action = 'folder_info'
+        elif item_type == 'ldda':
+            ldda_id = kwd.get( 'ldda_id', None )
+            id = ldda_id
+            try:
+                item = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
+            except:
+                item = None
+            item_desc = 'dataset'
+            action = 'ldda_edit_info'
+        elif item_type == 'request_type':
+            request_type_id = kwd.get( 'request_type_id', None )
+            id = request_type_id
+            try:
+                item = trans.sa_session.query( trans.app.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+            except:
+                item = None
+            item_desc = 'request type'
+            action = 'view_editable_request_type'
+        elif item_type == 'sample':
+            sample_id = kwd.get( 'sample_id', None )
+            id = sample_id
+            try:
+                item = trans.sa_session.query( trans.app.model.Sample ).get( trans.security.decode_id( sample_id ) )
+            except:
+                item = None
+            item_desc = 'sample'
+            action = 'view_sample'
+        else:
+            item = None
+            # message = "Invalid item type ( %s )" % str( item_type )
+            item_desc = None
+            action = None
+            id = None
+        return item, item_desc, action, id
+
+    def build_form_id_select_field( self, trans, forms, selected_value='none' ):
+        return build_select_field( trans,
+                                   objs=forms,
+                                   label_attr='name',
+                                   select_field_name='form_id',
+                                   selected_value=selected_value,
+                                   refresh_on_change=True )
+
+
+class SharableMixin:
+    """ Mixin for a controller that manages an item that can be shared. """
+
+    # -- Implemented methods. --
+
+    def _is_valid_slug( self, slug ):
+        """ Returns true if slug is valid. """
+        return _is_valid_slug( slug )
+
+    @web.expose
+    @web.require_login( "share Galaxy items" )
+    def set_public_username( self, trans, id, username, **kwargs ):
+        """ Set user's public username and delegate to sharing() """
+        user = trans.get_user()
+        # message from validate_publicname does not contain input, no need
+        # to escape.
+        message = validate_publicname( trans, username, user )
+        if message:
+            return trans.fill_template( '/sharing_base.mako', item=self.get_item( trans, id ), message=message, status='error' )
+        user.username = username
+        trans.sa_session.flush()
+        return self.sharing( trans, id, **kwargs )
+
+    @web.expose
+    @web.require_login( "modify Galaxy items" )
+    def set_slug_async( self, trans, id, new_slug ):
+        item = self.get_item( trans, id )
+        if item:
+            # Only update slug if slug is not already in use.
+            if trans.sa_session.query( item.__class__ ).filter_by( user=item.user, slug=new_slug ).count() == 0:
+                item.slug = new_slug
+                trans.sa_session.flush()
+
+        return item.slug
+
+    def _make_item_accessible( self, sa_session, item ):
+        """ Makes item accessible--viewable and importable--and sets item's slug.
+            Does not flush/commit changes, however. Item must have name, user,
+            importable, and slug attributes. """
+        item.importable = True
+        self.create_item_slug( sa_session, item )
+
+    def create_item_slug( self, sa_session, item ):
+        """ Create/set item slug. Slug is unique among user's importable items
+            for item's class. Returns true if item's slug was set/changed; false
+            otherwise.
+        """
+        cur_slug = item.slug
+
+        # Setup slug base.
+        if cur_slug is None or cur_slug == "":
+            # Item can have either a name or a title.
+            if hasattr( item, 'name' ):
+                item_name = item.name
+            elif hasattr( item, 'title' ):
+                item_name = item.title
+            slug_base = util.ready_name_for_url( item_name.lower() )
+        else:
+            slug_base = cur_slug
+
+        # Using slug base, find a slug that is not taken. If slug is taken,
+        # add integer to end.
+        new_slug = slug_base
+        count = 1
+        # Ensure unique across model class and user and don't include this item
+        # in the check in case it has previously been assigned a valid slug.
+        while sa_session.query( item.__class__ ).filter( item.__class__.user == item.user, item.__class__.slug == new_slug, item.__class__.id != item.id).count() != 0:
+            # Slug taken; choose a new slug based on count. This approach can
+            # handle numerous items with the same name gracefully.
+            new_slug = '%s-%i' % ( slug_base, count )
+            count += 1
+
+        # Set slug and return.
+        item.slug = new_slug
+        return item.slug == cur_slug
+
+    # -- Abstract methods. --
+
+    @web.expose
+    @web.require_login( "share Galaxy items" )
+    def sharing( self, trans, id, **kwargs ):
+        """ Handle item sharing. """
+        raise NotImplementedError()
+
+    @web.expose
+    @web.require_login( "share Galaxy items" )
+    def share( self, trans, id=None, email="", **kwd ):
+        """ Handle sharing an item with a particular user. """
+        raise NotImplementedError()
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug ):
+        """ Display item by username and slug. """
+        raise NotImplementedError()
+
+    @web.json
+    @web.require_login( "get item name and link" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns item's name and link. """
+        raise NotImplementedError()
+
+    @web.expose
+    @web.require_login("get item content asynchronously")
+    def get_item_content_async( self, trans, id ):
+        """ Returns item content in HTML format. """
+        raise NotImplementedError()
+
+    def get_item( self, trans, id ):
+        """ Return item based on id. """
+        raise NotImplementedError()
+
+
+class UsesQuotaMixin( object ):
+
+    def get_quota( self, trans, id, check_ownership=False, check_accessible=False, deleted=None ):
+        return self.get_object( trans, id, 'Quota', check_ownership=False, check_accessible=False, deleted=deleted )
+
+
+class UsesTagsMixin( SharableItemSecurityMixin ):
+
+    def get_tag_handler( self, trans ):
+        return trans.app.tag_handler
+
+    def _get_user_tags( self, trans, item_class_name, id ):
+        user = trans.user
+        tagged_item = self._get_tagged_item( trans, item_class_name, id )
+        return [ tag for tag in tagged_item.tags if tag.user == user ]
+
+    def _get_tagged_item( self, trans, item_class_name, id, check_ownership=True ):
+        tagged_item = self.get_object( trans, id, item_class_name, check_ownership=check_ownership, check_accessible=True )
+        return tagged_item
+
+    def _remove_items_tag( self, trans, item_class_name, id, tag_name ):
+        """Remove a tag from an item."""
+        user = trans.user
+        tagged_item = self._get_tagged_item( trans, item_class_name, id )
+        deleted = tagged_item and self.get_tag_handler( trans ).remove_item_tag( trans, user, tagged_item, tag_name )
+        trans.sa_session.flush()
+        return deleted
+
+    def _apply_item_tag( self, trans, item_class_name, id, tag_name, tag_value=None ):
+        user = trans.user
+        tagged_item = self._get_tagged_item( trans, item_class_name, id )
+        tag_assoc = self.get_tag_handler( trans ).apply_item_tag( user, tagged_item, tag_name, tag_value )
+        trans.sa_session.flush()
+        return tag_assoc
+
+    def _get_item_tag_assoc( self, trans, item_class_name, id, tag_name ):
+        user = trans.user
+        tagged_item = self._get_tagged_item( trans, item_class_name, id )
+        log.debug( "In get_item_tag_assoc with tagged_item %s" % tagged_item )
+        return self.get_tag_handler( trans )._get_item_tag_assoc( user, tagged_item, tag_name )
+
+    def set_tags_from_list( self, trans, item, new_tags_list, user=None ):
+        # Method deprecated - try to use TagsHandler instead.
+        tags_manager = tags.GalaxyTagManager( trans.app )
+        return tags_manager.set_tags_from_list( user, item, new_tags_list )
+
+    def get_user_tags_used( self, trans, user=None ):
+        """
+        Return a list of distinct 'user_tname:user_value' strings that the
+        given user has used.
+
+        user defaults to trans.user.
+        Returns an empty list if no user is given and trans.user is anonymous.
+        """
+        # TODO: for lack of a UsesUserMixin - placing this here - maybe into UsesTags, tho
+        user = user or trans.user
+        if not user:
+            return []
+
+        # get all the taggable model TagAssociations
+        tag_models = [ v.tag_assoc_class for v in trans.app.tag_handler.item_tag_assoc_info.values() ]
+        # create a union of subqueries for each for this user - getting only the tname and user_value
+        all_tags_query = None
+        for tag_model in tag_models:
+            subq = ( trans.sa_session.query( tag_model.user_tname, tag_model.user_value )
+                     .filter( tag_model.user == trans.user ) )
+            all_tags_query = subq if all_tags_query is None else all_tags_query.union( subq )
+
+        # if nothing init'd the query, bail
+        if all_tags_query is None:
+            return []
+
+        # boil the tag tuples down into a sorted list of DISTINCT name:val strings
+        tags = all_tags_query.distinct().all()
+        tags = [( ( name + ':' + val ) if val else name ) for name, val in tags ]
+        return sorted( tags )
+
+
+class UsesExtendedMetadataMixin( SharableItemSecurityMixin ):
+    """ Mixin for getting and setting item extended metadata. """
+
+    def get_item_extended_metadata_obj( self, trans, item ):
+        """
+        Given an item object (such as a LibraryDatasetDatasetAssociation), find the object
+        of the associated extended metadata
+        """
+        if item.extended_metadata:
+            return item.extended_metadata
+        return None
+
+    def set_item_extended_metadata_obj( self, trans, item, extmeta_obj, check_writable=False):
+        if item.__class__ == LibraryDatasetDatasetAssociation:
+            if not check_writable or trans.app.security_agent.can_modify_library_item( trans.get_current_user_roles(), item, trans.user ):
+                item.extended_metadata = extmeta_obj
+                trans.sa_session.flush()
+        if item.__class__ == HistoryDatasetAssociation:
+            history = None
+            if check_writable:
+                history = self.security_check( trans, item, check_ownership=True, check_accessible=True )
+            else:
+                history = self.security_check( trans, item, check_ownership=False, check_accessible=True )
+            if history:
+                item.extended_metadata = extmeta_obj
+                trans.sa_session.flush()
+
+    def unset_item_extended_metadata_obj( self, trans, item, check_writable=False):
+        if item.__class__ == LibraryDatasetDatasetAssociation:
+            if not check_writable or trans.app.security_agent.can_modify_library_item( trans.get_current_user_roles(), item, trans.user ):
+                item.extended_metadata = None
+                trans.sa_session.flush()
+        if item.__class__ == HistoryDatasetAssociation:
+            history = None
+            if check_writable:
+                history = self.security_check( trans, item, check_ownership=True, check_accessible=True )
+            else:
+                history = self.security_check( trans, item, check_ownership=False, check_accessible=True )
+            if history:
+                item.extended_metadata = None
+                trans.sa_session.flush()
+
+    def create_extended_metadata(self, trans, extmeta):
+        """
+        Create/index an extended metadata object. The returned object is
+        not associated with any items
+        """
+        ex_meta = ExtendedMetadata(extmeta)
+        trans.sa_session.add( ex_meta )
+        trans.sa_session.flush()
+        for path, value in self._scan_json_block(extmeta):
+            meta_i = ExtendedMetadataIndex(ex_meta, path, value)
+            trans.sa_session.add(meta_i)
+        trans.sa_session.flush()
+        return ex_meta
+
+    def delete_extended_metadata( self, trans, item):
+        if item.__class__ == ExtendedMetadata:
+            trans.sa_session.delete( item )
+            trans.sa_session.flush()
+
+    def _scan_json_block(self, meta, prefix=""):
+        """
+        Scan a json style data structure, and emit all fields and their values.
+        Example paths
+
+        Data
+        { "data" : [ 1, 2, 3 ] }
+
+        Path:
+        /data == [1,2,3]
+
+        /data/[0] == 1
+
+        """
+        if isinstance(meta, dict):
+            for a in meta:
+                for path, value in self._scan_json_block(meta[a], prefix + "/" + a):
+                    yield path, value
+        elif isinstance(meta, list):
+            for i, a in enumerate(meta):
+                for path, value in self._scan_json_block(a, prefix + "[%d]" % (i)):
+                    yield path, value
+        else:
+            # BUG: Everything is cast to string, which can lead to false positives
+            # for cross type comparisions, ie "True" == True
+            yield prefix, ("%s" % (meta)).encode("utf8", errors='replace')
+
+
+class ControllerUnavailable( Exception ):
+    """
+    Deprecated: `BaseController` used to be available under the name `Root`
+    """
+    pass
+
+# ---- Utility methods -------------------------------------------------------
+
+
+def sort_by_attr( seq, attr ):
+    """
+    Sort the sequence of objects by object's attribute
+    Arguments:
+    seq  - the list or any sequence (including immutable one) of objects to sort.
+    attr - the name of attribute to sort by
+    """
+    # Use the "Schwartzian transform"
+    # Create the auxiliary list of tuples where every i-th tuple has form
+    # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
+    # only to provide stable sorting, but mainly to eliminate comparison of objects
+    # (which can be expensive or prohibited) in case of equal attribute values.
+    intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
+    intermed.sort()
+    return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
diff --git a/lib/galaxy/web/base/controllers/__init__.py b/lib/galaxy/web/base/controllers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/web/base/controllers/admin.py b/lib/galaxy/web/base/controllers/admin.py
new file mode 100644
index 0000000..e2fdf0e
--- /dev/null
+++ b/lib/galaxy/web/base/controllers/admin.py
@@ -0,0 +1,1189 @@
+import logging
+import os
+from datetime import datetime, timedelta
+from six import string_types
+from string import punctuation as PUNCTUATION
+
+from sqlalchemy import and_, false, func, or_
+
+import galaxy.queue_worker
+from galaxy import util, web
+from galaxy.util import inflector
+from galaxy.web.form_builder import CheckboxField
+from tool_shed.util import repository_util
+from tool_shed.util.web_util import escape
+
+log = logging.getLogger( __name__ )
+
+
+class Admin( object ):
+    # Override these
+    user_list_grid = None
+    role_list_grid = None
+    group_list_grid = None
+    quota_list_grid = None
+    repository_list_grid = None
+    tool_version_list_grid = None
+    delete_operation = None
+    undelete_operation = None
+    purge_operation = None
+
+    @web.expose
+    @web.require_admin
+    def index( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', ''  ) )
+        status = kwd.get( 'status', 'done' )
+        if trans.webapp.name == 'galaxy':
+            is_repo_installed = trans.install_model.context.query( trans.install_model.ToolShedRepository ).first() is not None
+            installing_repository_ids = repository_util.get_ids_of_tool_shed_repositories_being_installed( trans.app, as_string=True )
+            return trans.fill_template( '/webapps/galaxy/admin/index.mako',
+                                        is_repo_installed=is_repo_installed,
+                                        installing_repository_ids=installing_repository_ids,
+                                        message=message,
+                                        status=status )
+        else:
+            return trans.fill_template( '/webapps/tool_shed/admin/index.mako',
+                                        message=message,
+                                        status=status )
+
+    @web.expose
+    @web.require_admin
+    def center( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', ''  ) )
+        status = kwd.get( 'status', 'done' )
+        if trans.webapp.name == 'galaxy':
+            is_repo_installed = trans.install_model.context.query( trans.install_model.ToolShedRepository ).first() is not None
+            installing_repository_ids = repository_util.get_ids_of_tool_shed_repositories_being_installed( trans.app, as_string=True )
+            return trans.fill_template( '/webapps/galaxy/admin/center.mako',
+                                        is_repo_installed=is_repo_installed,
+                                        installing_repository_ids=installing_repository_ids,
+                                        message=message,
+                                        status=status )
+        else:
+            return trans.fill_template( '/webapps/tool_shed/admin/center.mako',
+                                        message=message,
+                                        status=status )
+
+    @web.expose
+    @web.require_admin
+    def package_tool( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        toolbox = self.app.toolbox
+        tool_id = None
+        if params.get( 'package_tool_button', False ):
+            tool_id = params.get('tool_id', None)
+            try:
+                tool_tarball = trans.app.toolbox.package_tool( trans, tool_id )
+                trans.response.set_content_type( 'application/x-gzip' )
+                download_file = open( tool_tarball )
+                os.unlink( tool_tarball )
+                tarball_path, filename = os.path.split( tool_tarball )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.tgz"' % ( tool_id )
+                return download_file
+            except Exception:
+                return trans.fill_template( '/admin/package_tool.mako',
+                                            tool_id=tool_id,
+                                            toolbox=toolbox,
+                                            message=message,
+                                            status='error' )
+
+    @web.expose
+    @web.require_admin
+    def reload_tool( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        toolbox = self.app.toolbox
+        tool_id = None
+        if params.get( 'reload_tool_button', False ):
+            tool_id = kwd.get( 'tool_id', None )
+            galaxy.queue_worker.send_control_task(trans.app, 'reload_tool', noop_self=True, kwargs={'tool_id': tool_id} )
+            message, status = trans.app.toolbox.reload_tool_by_id( tool_id)
+        return trans.fill_template( '/admin/reload_tool.mako',
+                                    tool_id=tool_id,
+                                    toolbox=toolbox,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def tool_versions( self, trans, **kwd ):
+        if 'message' not in kwd or not kwd[ 'message' ]:
+            kwd[ 'message' ] = 'Tool ids for tools that are currently loaded into the tool panel are highlighted in green (click to display).'
+        return self.tool_version_list_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def roles( self, trans, **kwargs ):
+        if 'operation' in kwargs:
+            operation = kwargs[ 'operation' ].lower().replace( '+', ' ' )
+            if operation == "roles":
+                return self.role( trans, **kwargs )
+            if operation == "create":
+                return self.create_role( trans, **kwargs )
+            if operation == "delete":
+                return self.mark_role_deleted( trans, **kwargs )
+            if operation == "undelete":
+                return self.undelete_role( trans, **kwargs )
+            if operation == "purge":
+                return self.purge_role( trans, **kwargs )
+            if operation == "manage users and groups":
+                return self.manage_users_and_groups_for_role( trans, **kwargs )
+            if operation == "manage role associations":
+                # This is currently used only in the Tool Shed.
+                return self.manage_role_associations( trans, **kwargs )
+            if operation == "rename":
+                return self.rename_role( trans, **kwargs )
+        # Render the list view
+        return self.role_list_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_admin
+    def create_role( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        name = util.restore_text( params.get( 'name', '' ) )
+        description = util.restore_text( params.get( 'description', '' ) )
+        in_users = util.listify( params.get( 'in_users', [] ) )
+        out_users = util.listify( params.get( 'out_users', [] ) )
+        in_groups = util.listify( params.get( 'in_groups', [] ) )
+        out_groups = util.listify( params.get( 'out_groups', [] ) )
+        create_group_for_role = params.get( 'create_group_for_role', '' )
+        create_group_for_role_checked = CheckboxField.is_checked( create_group_for_role )
+        ok = True
+        if params.get( 'create_role_button', False ):
+            if not name or not description:
+                message = "Enter a valid name and a description."
+                status = 'error'
+                ok = False
+            elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name == name ).first():
+                message = "Role names must be unique and a role with that name already exists, so choose another name."
+                status = 'error'
+                ok = False
+            else:
+                # Create the role
+                role = trans.app.model.Role( name=name, description=description, type=trans.app.model.Role.types.ADMIN )
+                trans.sa_session.add( role )
+                # Create the UserRoleAssociations
+                for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
+                    ura = trans.app.model.UserRoleAssociation( user, role )
+                    trans.sa_session.add( ura )
+                # Create the GroupRoleAssociations
+                for group in [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in in_groups ]:
+                    gra = trans.app.model.GroupRoleAssociation( group, role )
+                    trans.sa_session.add( gra )
+                if create_group_for_role_checked:
+                    # Create the group
+                    group = trans.app.model.Group( name=name )
+                    trans.sa_session.add( group )
+                    # Associate the group with the role
+                    gra = trans.model.GroupRoleAssociation( group, role )
+                    trans.sa_session.add( gra )
+                    num_in_groups = len( in_groups ) + 1
+                else:
+                    num_in_groups = len( in_groups )
+                trans.sa_session.flush()
+                message = "Role '%s' has been created with %d associated users and %d associated groups.  " \
+                    % ( role.name, len( in_users ), num_in_groups )
+                if create_group_for_role_checked:
+                    message += 'One of the groups associated with this role is the newly created group with the same name.'
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='roles',
+                                                           message=util.sanitize_text( message ),
+                                                           status='done' ) )
+        if ok:
+            for user in trans.sa_session.query( trans.app.model.User ) \
+                                        .filter( trans.app.model.User.table.c.deleted == false() ) \
+                                        .order_by( trans.app.model.User.table.c.email ):
+                out_users.append( ( user.id, user.email ) )
+            for group in trans.sa_session.query( trans.app.model.Group ) \
+                                         .filter( trans.app.model.Group.table.c.deleted == false() ) \
+                                         .order_by( trans.app.model.Group.table.c.name ):
+                out_groups.append( ( group.id, group.name ) )
+        return trans.fill_template( '/admin/dataset_security/role/role_create.mako',
+                                    name=name,
+                                    description=description,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    in_groups=in_groups,
+                                    out_groups=out_groups,
+                                    create_group_for_role_checked=create_group_for_role_checked,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def rename_role( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        id = params.get( 'id', None )
+        if not id:
+            message = "No role ids received for renaming"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='roles',
+                                                       message=message,
+                                                       status='error' ) )
+        role = get_role( trans, id )
+        if params.get( 'rename_role_button', False ):
+            old_name = role.name
+            new_name = util.restore_text( params.name )
+            new_description = util.restore_text( params.description )
+            if not new_name:
+                message = 'Enter a valid name'
+                status = 'error'
+            else:
+                existing_role = trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name == new_name ).first()
+                if existing_role and existing_role.id != role.id:
+                    message = 'A role with that name already exists'
+                    status = 'error'
+                else:
+                    if not ( role.name == new_name and role.description == new_description ):
+                        role.name = new_name
+                        role.description = new_description
+                        trans.sa_session.add( role )
+                        trans.sa_session.flush()
+                        message = "Role '%s' has been renamed to '%s'" % ( old_name, new_name )
+                    return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                      action='roles',
+                                                                      message=util.sanitize_text( message ),
+                                                                      status='done' ) )
+        return trans.fill_template( '/admin/dataset_security/role/role_rename.mako',
+                                    role=role,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_users_and_groups_for_role( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        id = params.get( 'id', None )
+        if not id:
+            message = "No role ids received for managing users and groups"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='roles',
+                                                       message=message,
+                                                       status='error' ) )
+        role = get_role( trans, id )
+        if params.get( 'role_members_edit_button', False ):
+            in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
+            if trans.webapp.name == 'galaxy':
+                for ura in role.users:
+                    user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
+                    if user not in in_users:
+                        # Delete DefaultUserPermissions for previously associated users that have been removed from the role
+                        for dup in user.default_permissions:
+                            if role == dup.role:
+                                trans.sa_session.delete( dup )
+                        # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role
+                        for history in user.histories:
+                            for dhp in history.default_permissions:
+                                if role == dhp.role:
+                                    trans.sa_session.delete( dhp )
+                        trans.sa_session.flush()
+            in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
+            trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups )
+            trans.sa_session.refresh( role )
+            message = "Role '%s' has been updated with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) )
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='roles',
+                                                       message=util.sanitize_text( message ),
+                                                       status=status ) )
+        in_users = []
+        out_users = []
+        in_groups = []
+        out_groups = []
+        for user in trans.sa_session.query( trans.app.model.User ) \
+                                    .filter( trans.app.model.User.table.c.deleted == false() ) \
+                                    .order_by( trans.app.model.User.table.c.email ):
+            if user in [ x.user for x in role.users ]:
+                in_users.append( ( user.id, user.email ) )
+            else:
+                out_users.append( ( user.id, user.email ) )
+        for group in trans.sa_session.query( trans.app.model.Group ) \
+                                     .filter( trans.app.model.Group.table.c.deleted == false() ) \
+                                     .order_by( trans.app.model.Group.table.c.name ):
+            if group in [ x.group for x in role.groups ]:
+                in_groups.append( ( group.id, group.name ) )
+            else:
+                out_groups.append( ( group.id, group.name ) )
+        library_dataset_actions = {}
+        if trans.webapp.name == 'galaxy' and len(role.dataset_actions) < 25:
+            # Build a list of tuples that are LibraryDatasetDatasetAssociationss followed by a list of actions
+            # whose DatasetPermissions is associated with the Role
+            # [ ( LibraryDatasetDatasetAssociation [ action, action ] ) ]
+            for dp in role.dataset_actions:
+                for ldda in trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \
+                                            .filter( trans.app.model.LibraryDatasetDatasetAssociation.dataset_id == dp.dataset_id ):
+                    root_found = False
+                    folder_path = ''
+                    folder = ldda.library_dataset.folder
+                    while not root_found:
+                        folder_path = '%s / %s' % ( folder.name, folder_path )
+                        if not folder.parent:
+                            root_found = True
+                        else:
+                            folder = folder.parent
+                    folder_path = '%s %s' % ( folder_path, ldda.name )
+                    library = trans.sa_session.query( trans.app.model.Library ) \
+                                              .filter( trans.app.model.Library.table.c.root_folder_id == folder.id ) \
+                                              .first()
+                    if library not in library_dataset_actions:
+                        library_dataset_actions[ library ] = {}
+                    try:
+                        library_dataset_actions[ library ][ folder_path ].append( dp.action )
+                    except:
+                        library_dataset_actions[ library ][ folder_path ] = [ dp.action ]
+        else:
+            message = "Not showing associated datasets, there are too many."
+            status = 'info'
+        return trans.fill_template( '/admin/dataset_security/role/role.mako',
+                                    role=role,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    in_groups=in_groups,
+                                    out_groups=out_groups,
+                                    library_dataset_actions=library_dataset_actions,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def mark_role_deleted( self, trans, **kwd ):
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No role ids received for deleting"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='roles',
+                                                       message=message,
+                                                       status='error' ) )
+        ids = util.listify( id )
+        message = "Deleted %d roles: " % len( ids )
+        for role_id in ids:
+            role = get_role( trans, role_id )
+            role.deleted = True
+            trans.sa_session.add( role )
+            trans.sa_session.flush()
+            message += " %s " % role.name
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='roles',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_role( self, trans, **kwd ):
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No role ids received for undeleting"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='roles',
+                                                       message=message,
+                                                       status='error' ) )
+        ids = util.listify( id )
+        count = 0
+        undeleted_roles = ""
+        for role_id in ids:
+            role = get_role( trans, role_id )
+            if not role.deleted:
+                message = "Role '%s' has not been deleted, so it cannot be undeleted." % role.name
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='roles',
+                                                           message=util.sanitize_text( message ),
+                                                           status='error' ) )
+            role.deleted = False
+            trans.sa_session.add( role )
+            trans.sa_session.flush()
+            count += 1
+            undeleted_roles += " %s" % role.name
+        message = "Undeleted %d roles: %s" % ( count, undeleted_roles )
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='roles',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def purge_role( self, trans, **kwd ):
+        # This method should only be called for a Role that has previously been deleted.
+        # Purging a deleted Role deletes all of the following from the database:
+        # - UserRoleAssociations where role_id == Role.id
+        # - DefaultUserPermissions where role_id == Role.id
+        # - DefaultHistoryPermissions where role_id == Role.id
+        # - GroupRoleAssociations where role_id == Role.id
+        # - DatasetPermissionss where role_id == Role.id
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No role ids received for purging"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='roles',
+                                                       message=util.sanitize_text( message ),
+                                                       status='error' ) )
+        ids = util.listify( id )
+        message = "Purged %d roles: " % len( ids )
+        for role_id in ids:
+            role = get_role( trans, role_id )
+            if not role.deleted:
+                message = "Role '%s' has not been deleted, so it cannot be purged." % role.name
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='roles',
+                                                           message=util.sanitize_text( message ),
+                                                           status='error' ) )
+            # Delete UserRoleAssociations
+            for ura in role.users:
+                user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id )
+                # Delete DefaultUserPermissions for associated users
+                for dup in user.default_permissions:
+                    if role == dup.role:
+                        trans.sa_session.delete( dup )
+                # Delete DefaultHistoryPermissions for associated users
+                for history in user.histories:
+                    for dhp in history.default_permissions:
+                        if role == dhp.role:
+                            trans.sa_session.delete( dhp )
+                trans.sa_session.delete( ura )
+            # Delete GroupRoleAssociations
+            for gra in role.groups:
+                trans.sa_session.delete( gra )
+            # Delete DatasetPermissionss
+            for dp in role.dataset_actions:
+                trans.sa_session.delete( dp )
+            trans.sa_session.flush()
+            message += " %s " % role.name
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='roles',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def groups( self, trans, **kwargs ):
+        if 'operation' in kwargs:
+            operation = kwargs[ 'operation' ].lower().replace( '+', ' ' )
+            if operation == "groups":
+                return self.group( trans, **kwargs )
+            if operation == "create":
+                return self.create_group( trans, **kwargs )
+            if operation == "delete":
+                return self.mark_group_deleted( trans, **kwargs )
+            if operation == "undelete":
+                return self.undelete_group( trans, **kwargs )
+            if operation == "purge":
+                return self.purge_group( trans, **kwargs )
+            if operation == "manage users and roles":
+                return self.manage_users_and_roles_for_group( trans, **kwargs )
+            if operation == "rename":
+                return self.rename_group( trans, **kwargs )
+        # Render the list view
+        return self.group_list_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_admin
+    def rename_group( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        id = params.get( 'id', None )
+        if not id:
+            message = "No group ids received for renaming"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='groups',
+                                                       message=message,
+                                                       status='error' ) )
+        group = get_group( trans, id )
+        if params.get( 'rename_group_button', False ):
+            old_name = group.name
+            new_name = util.restore_text( params.name )
+            if not new_name:
+                message = 'Enter a valid name'
+                status = 'error'
+            else:
+                existing_group = trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name == new_name ).first()
+                if existing_group and existing_group.id != group.id:
+                    message = 'A group with that name already exists'
+                    status = 'error'
+                else:
+                    if group.name != new_name:
+                        group.name = new_name
+                        trans.sa_session.add( group )
+                        trans.sa_session.flush()
+                        message = "Group '%s' has been renamed to '%s'" % ( old_name, new_name )
+                    return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                      action='groups',
+                                                                      message=util.sanitize_text( message ),
+                                                                      status='done' ) )
+        return trans.fill_template( '/admin/dataset_security/group/group_rename.mako',
+                                    group=group,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_users_and_roles_for_group( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        group = get_group( trans, params.id )
+        if params.get( 'group_roles_users_edit_button', False ):
+            in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.in_roles ) ]
+            in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
+            trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=in_roles, users=in_users )
+            trans.sa_session.refresh( group )
+            message += "Group '%s' has been updated with %d associated roles and %d associated users" % ( group.name, len( in_roles ), len( in_users ) )
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='groups',
+                                                       message=util.sanitize_text( message ),
+                                                       status=status ) )
+        in_roles = []
+        out_roles = []
+        in_users = []
+        out_users = []
+        for role in trans.sa_session.query(trans.app.model.Role ) \
+                                    .filter( trans.app.model.Role.table.c.deleted == false() ) \
+                                    .order_by( trans.app.model.Role.table.c.name ):
+            if role in [ x.role for x in group.roles ]:
+                in_roles.append( ( role.id, role.name ) )
+            else:
+                out_roles.append( ( role.id, role.name ) )
+        for user in trans.sa_session.query( trans.app.model.User ) \
+                                    .filter( trans.app.model.User.table.c.deleted == false() ) \
+                                    .order_by( trans.app.model.User.table.c.email ):
+            if user in [ x.user for x in group.users ]:
+                in_users.append( ( user.id, user.email ) )
+            else:
+                out_users.append( ( user.id, user.email ) )
+        message += 'Group %s is currently associated with %d roles and %d users' % ( group.name, len( in_roles ), len( in_users ) )
+        return trans.fill_template( '/admin/dataset_security/group/group.mako',
+                                    group=group,
+                                    in_roles=in_roles,
+                                    out_roles=out_roles,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def create_group( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        name = util.restore_text( params.get( 'name', '' ) )
+        in_users = util.listify( params.get( 'in_users', [] ) )
+        out_users = util.listify( params.get( 'out_users', [] ) )
+        in_roles = util.listify( params.get( 'in_roles', [] ) )
+        out_roles = util.listify( params.get( 'out_roles', [] ) )
+        create_role_for_group = params.get( 'create_role_for_group', '' )
+        create_role_for_group_checked = CheckboxField.is_checked( create_role_for_group )
+        ok = True
+        if params.get( 'create_group_button', False ):
+            if not name:
+                message = "Enter a valid name."
+                status = 'error'
+                ok = False
+            elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name == name ).first():
+                message = "Group names must be unique and a group with that name already exists, so choose another name."
+                status = 'error'
+                ok = False
+            else:
+                # Create the group
+                group = trans.app.model.Group( name=name )
+                trans.sa_session.add( group )
+                trans.sa_session.flush()
+                # Create the UserRoleAssociations
+                for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
+                    uga = trans.app.model.UserGroupAssociation( user, group )
+                    trans.sa_session.add( uga )
+                # Create the GroupRoleAssociations
+                for role in [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]:
+                    gra = trans.app.model.GroupRoleAssociation( group, role )
+                    trans.sa_session.add( gra )
+                if create_role_for_group_checked:
+                    # Create the role
+                    role = trans.app.model.Role( name=name, description='Role for group %s' % name )
+                    trans.sa_session.add( role )
+                    # Associate the role with the group
+                    gra = trans.model.GroupRoleAssociation( group, role )
+                    trans.sa_session.add( gra )
+                    num_in_roles = len( in_roles ) + 1
+                else:
+                    num_in_roles = len( in_roles )
+                trans.sa_session.flush()
+                message = "Group '%s' has been created with %d associated users and %d associated roles.  " \
+                    % ( group.name, len( in_users ), num_in_roles )
+                if create_role_for_group_checked:
+                    message += 'One of the roles associated with this group is the newly created role with the same name.'
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='groups',
+                                                           message=util.sanitize_text( message ),
+                                                           status='done' ) )
+        if ok:
+            for user in trans.sa_session.query( trans.app.model.User ) \
+                                        .filter( trans.app.model.User.table.c.deleted == false() ) \
+                                        .order_by( trans.app.model.User.table.c.email ):
+                out_users.append( ( user.id, user.email ) )
+            for role in trans.sa_session.query( trans.app.model.Role ) \
+                                        .filter( trans.app.model.Role.table.c.deleted == false() ) \
+                                        .order_by( trans.app.model.Role.table.c.name ):
+                out_roles.append( ( role.id, role.name ) )
+        return trans.fill_template( '/admin/dataset_security/group/group_create.mako',
+                                    name=name,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    in_roles=in_roles,
+                                    out_roles=out_roles,
+                                    create_role_for_group_checked=create_role_for_group_checked,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def mark_group_deleted( self, trans, **kwd ):
+        params = util.Params( kwd )
+        id = params.get( 'id', None )
+        if not id:
+            message = "No group ids received for marking deleted"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='groups',
+                                                       message=message,
+                                                       status='error' ) )
+        ids = util.listify( id )
+        message = "Deleted %d groups: " % len( ids )
+        for group_id in ids:
+            group = get_group( trans, group_id )
+            group.deleted = True
+            trans.sa_session.add( group )
+            trans.sa_session.flush()
+            message += " %s " % group.name
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='groups',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_group( self, trans, **kwd ):
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No group ids received for undeleting"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='groups',
+                                                       message=message,
+                                                       status='error' ) )
+        ids = util.listify( id )
+        count = 0
+        undeleted_groups = ""
+        for group_id in ids:
+            group = get_group( trans, group_id )
+            if not group.deleted:
+                message = "Group '%s' has not been deleted, so it cannot be undeleted." % group.name
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='groups',
+                                                           message=util.sanitize_text( message ),
+                                                           status='error' ) )
+            group.deleted = False
+            trans.sa_session.add( group )
+            trans.sa_session.flush()
+            count += 1
+            undeleted_groups += " %s" % group.name
+        message = "Undeleted %d groups: %s" % ( count, undeleted_groups )
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='groups',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def purge_group( self, trans, **kwd ):
+        # This method should only be called for a Group that has previously been deleted.
+        # Purging a deleted Group simply deletes all UserGroupAssociations and GroupRoleAssociations.
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No group ids received for purging"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='groups',
+                                                       message=util.sanitize_text( message ),
+                                                       status='error' ) )
+        ids = util.listify( id )
+        message = "Purged %d groups: " % len( ids )
+        for group_id in ids:
+            group = get_group( trans, group_id )
+            if not group.deleted:
+                # We should never reach here, but just in case there is a bug somewhere...
+                message = "Group '%s' has not been deleted, so it cannot be purged." % group.name
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='groups',
+                                                           message=util.sanitize_text( message ),
+                                                           status='error' ) )
+            # Delete UserGroupAssociations
+            for uga in group.users:
+                trans.sa_session.delete( uga )
+            # Delete GroupRoleAssociations
+            for gra in group.roles:
+                trans.sa_session.delete( gra )
+            trans.sa_session.flush()
+            message += " %s " % group.name
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='groups',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def create_new_user( self, trans, **kwd ):
+        return trans.response.send_redirect( web.url_for( controller='user',
+                                                          action='create',
+                                                          cntrller='admin' ) )
+
+    @web.expose
+    @web.require_admin
+    def reset_user_password( self, trans, **kwd ):
+        user_id = kwd.get( 'id', None )
+        if not user_id:
+            message = "No users received for resetting passwords."
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='users',
+                                                       message=message,
+                                                       status='error' ) )
+        user_ids = util.listify( user_id )
+        if 'reset_user_password_button' in kwd:
+            message = ''
+            status = ''
+            for user_id in user_ids:
+                user = get_user( trans, user_id )
+                password = kwd.get( 'password', None )
+                confirm = kwd.get( 'confirm', None )
+                if len( password ) < 6:
+                    message = "Use a password of at least 6 characters."
+                    status = 'error'
+                    break
+                elif password != confirm:
+                    message = "Passwords do not match."
+                    status = 'error'
+                    break
+                else:
+                    user.set_password_cleartext( password )
+                    trans.sa_session.add( user )
+                    trans.sa_session.flush()
+            if not message and not status:
+                message = "Passwords reset for %d %s." % ( len( user_ids ), inflector.cond_plural( len( user_ids ), 'user' ) )
+                status = 'done'
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='users',
+                                                       message=util.sanitize_text( message ),
+                                                       status=status ) )
+        users = [ get_user( trans, user_id ) for user_id in user_ids ]
+        if len( user_ids ) > 1:
+            user_id = ','.join( user_ids )
+        return trans.fill_template( '/admin/user/reset_password.mako',
+                                    id=user_id,
+                                    users=users,
+                                    password='',
+                                    confirm='' )
+
+    @web.expose
+    @web.require_admin
+    def mark_user_deleted( self, trans, **kwd ):
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No user ids received for deleting"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='users',
+                                                       message=message,
+                                                       status='error' ) )
+        ids = util.listify( id )
+        message = "Deleted %d users: " % len( ids )
+        for user_id in ids:
+            user = get_user( trans, user_id )
+            user.deleted = True
+            trans.sa_session.add( user )
+            trans.sa_session.flush()
+            message += " %s " % user.email
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='users',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_user( self, trans, **kwd ):
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No user ids received for undeleting"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='users',
+                                                       message=message,
+                                                       status='error' ) )
+        ids = util.listify( id )
+        count = 0
+        undeleted_users = ""
+        for user_id in ids:
+            user = get_user( trans, user_id )
+            if not user.deleted:
+                message = "User '%s' has not been deleted, so it cannot be undeleted." % user.email
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='users',
+                                                           message=util.sanitize_text( message ),
+                                                           status='error' ) )
+            user.deleted = False
+            trans.sa_session.add( user )
+            trans.sa_session.flush()
+            count += 1
+            undeleted_users += " %s" % user.email
+        message = "Undeleted %d users: %s" % ( count, undeleted_users )
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='users',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def purge_user( self, trans, **kwd ):
+        # This method should only be called for a User that has previously been deleted.
+        # We keep the User in the database ( marked as purged ), and stuff associated
+        # with the user's private role in case we want the ability to unpurge the user
+        # some time in the future.
+        # Purging a deleted User deletes all of the following:
+        # - History where user_id = User.id
+        #    - HistoryDatasetAssociation where history_id = History.id
+        #    - Dataset where HistoryDatasetAssociation.dataset_id = Dataset.id
+        # - UserGroupAssociation where user_id == User.id
+        # - UserRoleAssociation where user_id == User.id EXCEPT FOR THE PRIVATE ROLE
+        # - UserAddress where user_id == User.id
+        # Purging Histories and Datasets must be handled via the cleanup_datasets.py script
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No user ids received for purging"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='users',
+                                                       message=util.sanitize_text( message ),
+                                                       status='error' ) )
+        ids = util.listify( id )
+        message = "Purged %d users: " % len( ids )
+        for user_id in ids:
+            user = get_user( trans, user_id )
+            if not user.deleted:
+                # We should never reach here, but just in case there is a bug somewhere...
+                message = "User '%s' has not been deleted, so it cannot be purged." % user.email
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='users',
+                                                           message=util.sanitize_text( message ),
+                                                           status='error' ) )
+            private_role = trans.app.security_agent.get_private_user_role( user )
+            # Delete History
+            for h in user.active_histories:
+                trans.sa_session.refresh( h )
+                for hda in h.active_datasets:
+                    # Delete HistoryDatasetAssociation
+                    d = trans.sa_session.query( trans.app.model.Dataset ).get( hda.dataset_id )
+                    # Delete Dataset
+                    if not d.deleted:
+                        d.deleted = True
+                        trans.sa_session.add( d )
+                    hda.deleted = True
+                    trans.sa_session.add( hda )
+                h.deleted = True
+                trans.sa_session.add( h )
+            # Delete UserGroupAssociations
+            for uga in user.groups:
+                trans.sa_session.delete( uga )
+            # Delete UserRoleAssociations EXCEPT FOR THE PRIVATE ROLE
+            for ura in user.roles:
+                if ura.role_id != private_role.id:
+                    trans.sa_session.delete( ura )
+            # Delete UserAddresses
+            for address in user.addresses:
+                trans.sa_session.delete( address )
+            # Purge the user
+            user.purged = True
+            trans.sa_session.add( user )
+            trans.sa_session.flush()
+            message += "%s " % user.email
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='users',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def users( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "roles":
+                return self.user( trans, **kwd )
+            elif operation == "reset password":
+                return self.reset_user_password( trans, **kwd )
+            elif operation == "delete":
+                return self.mark_user_deleted( trans, **kwd )
+            elif operation == "undelete":
+                return self.undelete_user( trans, **kwd )
+            elif operation == "purge":
+                return self.purge_user( trans, **kwd )
+            elif operation == "create":
+                return self.create_new_user( trans, **kwd )
+            elif operation == "information":
+                user_id = kwd.get( 'id', None )
+                if not user_id:
+                    kwd[ 'message' ] = util.sanitize_text( "Invalid user id (%s) received" % str( user_id ) )
+                    kwd[ 'status' ] = 'error'
+                else:
+                    return trans.response.send_redirect( web.url_for( controller='user',
+                                                                      action='manage_user_info',
+                                                                      cntrller='admin',
+                                                                      **kwd ) )
+            elif operation == "manage roles and groups":
+                return self.manage_roles_and_groups_for_user( trans, **kwd )
+        if trans.app.config.allow_user_deletion:
+            if self.delete_operation not in self.user_list_grid.operations:
+                self.user_list_grid.operations.append( self.delete_operation )
+            if self.undelete_operation not in self.user_list_grid.operations:
+                self.user_list_grid.operations.append( self.undelete_operation )
+            if self.purge_operation not in self.user_list_grid.operations:
+                self.user_list_grid.operations.append( self.purge_operation )
+        # Render the list view
+        return self.user_list_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
+        """Return autocomplete data for user emails"""
+        ac_data = ""
+        for user in trans.sa_session.query( trans.app.model.User ).filter_by( deleted=False ).filter( func.lower( trans.app.model.User.email ).like( q.lower() + "%" ) ):
+            ac_data = ac_data + user.email + "\n"
+        return ac_data
+
+    @web.expose
+    @web.require_admin
+    def manage_roles_and_groups_for_user( self, trans, **kwd ):
+        user_id = kwd.get( 'id', None )
+        message = ''
+        status = ''
+        if not user_id:
+            message += "Invalid user id (%s) received" % str( user_id )
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='users',
+                                                       message=util.sanitize_text( message ),
+                                                       status='error' ) )
+        user = get_user( trans, user_id )
+        private_role = trans.app.security_agent.get_private_user_role( user )
+        if kwd.get( 'user_roles_groups_edit_button', False ):
+            # Make sure the user is not dis-associating himself from his private role
+            out_roles = kwd.get( 'out_roles', [] )
+            if out_roles:
+                out_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( out_roles ) ]
+            if private_role in out_roles:
+                message += "You cannot eliminate a user's private role association.  "
+                status = 'error'
+            in_roles = kwd.get( 'in_roles', [] )
+            if in_roles:
+                in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( in_roles ) ]
+            out_groups = kwd.get( 'out_groups', [] )
+            if out_groups:
+                out_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( out_groups ) ]
+            in_groups = kwd.get( 'in_groups', [] )
+            if in_groups:
+                in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( in_groups ) ]
+            if in_roles:
+                trans.app.security_agent.set_entity_user_associations( users=[ user ], roles=in_roles, groups=in_groups )
+                trans.sa_session.refresh( user )
+                message += "User '%s' has been updated with %d associated roles and %d associated groups (private roles are not displayed)" % \
+                    ( user.email, len( in_roles ), len( in_groups ) )
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='users',
+                                                           message=util.sanitize_text( message ),
+                                                           status='done' ) )
+        in_roles = []
+        out_roles = []
+        in_groups = []
+        out_groups = []
+        for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted == false() ) \
+                                                                  .order_by( trans.app.model.Role.table.c.name ):
+            if role in [ x.role for x in user.roles ]:
+                in_roles.append( ( role.id, role.name ) )
+            elif role.type != trans.app.model.Role.types.PRIVATE:
+                # There is a 1 to 1 mapping between a user and a PRIVATE role, so private roles should
+                # not be listed in the roles form fields, except for the currently selected user's private
+                # role, which should always be in in_roles.  The check above is added as an additional
+                # precaution, since for a period of time we were including private roles in the form fields.
+                out_roles.append( ( role.id, role.name ) )
+        for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted == false() ) \
+                                                                    .order_by( trans.app.model.Group.table.c.name ):
+            if group in [ x.group for x in user.groups ]:
+                in_groups.append( ( group.id, group.name ) )
+            else:
+                out_groups.append( ( group.id, group.name ) )
+        message += "User '%s' is currently associated with %d roles and is a member of %d groups" % \
+            ( user.email, len( in_roles ), len( in_groups ) )
+        if not status:
+            status = 'done'
+        return trans.fill_template( '/admin/user/user.mako',
+                                    user=user,
+                                    in_roles=in_roles,
+                                    out_roles=out_roles,
+                                    in_groups=in_groups,
+                                    out_groups=out_groups,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def jobs( self, trans, stop=[], stop_msg=None, cutoff=180, job_lock=None, ajl_submit=None, **kwd ):
+        deleted = []
+        msg = None
+        status = None
+        job_ids = util.listify( stop )
+        if job_ids and stop_msg in [ None, '' ]:
+            msg = 'Please enter an error message to display to the user describing why the job was terminated'
+            status = 'error'
+        elif job_ids:
+            if stop_msg[-1] not in PUNCTUATION:
+                stop_msg += '.'
+            for job_id in job_ids:
+                error_msg = "This job was stopped by an administrator: %s  <a href='%s' target='_blank'>Contact support</a> for additional help." \
+                    % ( stop_msg, self.app.config.get("support_url", "https://wiki.galaxyproject.org/Support" ) )
+                if trans.app.config.track_jobs_in_database:
+                    job = trans.sa_session.query( trans.app.model.Job ).get( job_id )
+                    job.stderr = error_msg
+                    job.set_state( trans.app.model.Job.states.DELETED_NEW )
+                    trans.sa_session.add( job )
+                else:
+                    trans.app.job_manager.job_stop_queue.put( job_id, error_msg=error_msg )
+                deleted.append( str( job_id ) )
+        if deleted:
+            msg = 'Queued job'
+            if len( deleted ) > 1:
+                msg += 's'
+            msg += ' for deletion: '
+            msg += ', '.join( deleted )
+            status = 'done'
+            trans.sa_session.flush()
+        if ajl_submit:
+            if job_lock == 'on':
+                galaxy.queue_worker.send_control_task(trans.app, 'admin_job_lock',
+                                                      kwargs={'job_lock': True } )
+                job_lock = True
+            else:
+                galaxy.queue_worker.send_control_task(trans.app, 'admin_job_lock',
+                                                      kwargs={'job_lock': False } )
+                job_lock = False
+        else:
+            job_lock = trans.app.job_manager.job_lock
+        cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) )
+        jobs = trans.sa_session.query( trans.app.model.Job ) \
+                               .filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time,
+                                              or_( trans.app.model.Job.state == trans.app.model.Job.states.NEW,
+                                                   trans.app.model.Job.state == trans.app.model.Job.states.QUEUED,
+                                                   trans.app.model.Job.state == trans.app.model.Job.states.RUNNING,
+                                                   trans.app.model.Job.state == trans.app.model.Job.states.UPLOAD ) ) ) \
+                               .order_by( trans.app.model.Job.table.c.update_time.desc() ).all()
+        recent_jobs = trans.sa_session.query( trans.app.model.Job ) \
+            .filter( and_( trans.app.model.Job.table.c.update_time > cutoff_time,
+                           or_( trans.app.model.Job.state == trans.app.model.Job.states.ERROR,
+                                trans.app.model.Job.state == trans.app.model.Job.states.OK) ) ) \
+            .order_by( trans.app.model.Job.table.c.update_time.desc() ).all()
+        last_updated = {}
+        for job in jobs:
+            delta = datetime.utcnow() - job.update_time
+            if delta.days > 0:
+                last_updated[job.id] = '%s hours' % ( delta.days * 24 + int( delta.seconds / 60 / 60 ) )
+            elif delta > timedelta( minutes=59 ):
+                last_updated[job.id] = '%s hours' % int( delta.seconds / 60 / 60 )
+            else:
+                last_updated[job.id] = '%s minutes' % int( delta.seconds / 60 )
+        finished = {}
+        for job in recent_jobs:
+            delta = datetime.utcnow() - job.update_time
+            if delta.days > 0:
+                finished[job.id] = '%s hours' % ( delta.days * 24 + int( delta.seconds / 60 / 60 ) )
+            elif delta > timedelta( minutes=59 ):
+                finished[job.id] = '%s hours' % int( delta.seconds / 60 / 60 )
+            else:
+                finished[job.id] = '%s minutes' % int( delta.seconds / 60 )
+        return trans.fill_template( '/admin/jobs.mako',
+                                    jobs=jobs,
+                                    recent_jobs=recent_jobs,
+                                    last_updated=last_updated,
+                                    finished=finished,
+                                    cutoff=cutoff,
+                                    msg=msg,
+                                    status=status,
+                                    job_lock=job_lock)
+
+    @web.expose
+    @web.require_admin
+    def job_info( self, trans, jobid=None ):
+        job = None
+        if jobid is not None:
+            job = trans.sa_session.query( trans.app.model.Job ).get(jobid)
+        return trans.fill_template( '/webapps/reports/job_info.mako',
+                                    job=job,
+                                    message="<a href='jobs'>Back</a>" )
+
+    @web.expose
+    @web.require_admin
+    def sanitize_whitelist( self, trans, submit_whitelist=False, tools_to_whitelist=[]):
+        if submit_whitelist:
+            # write the configured sanitize_whitelist_file with new whitelist
+            # and update in-memory list.
+            with open(trans.app.config.sanitize_whitelist_file, 'wt') as f:
+                if isinstance(tools_to_whitelist, string_types):
+                    tools_to_whitelist = [tools_to_whitelist]
+                new_whitelist = sorted([tid for tid in tools_to_whitelist if tid in trans.app.toolbox.tools_by_id])
+                f.write("\n".join(new_whitelist))
+            trans.app.config.sanitize_whitelist = new_whitelist
+            galaxy.queue_worker.send_control_task(trans.app, 'reload_sanitize_whitelist', noop_self=True)
+            # dispatch a message to reload list for other processes
+        return trans.fill_template( '/webapps/galaxy/admin/sanitize_whitelist.mako',
+                                    sanitize_all=trans.app.config.sanitize_all_html,
+                                    tools=trans.app.toolbox.tools_by_id )
+
+
+# ---- Utility methods -------------------------------------------------------
+
+
+def get_user( trans, user_id ):
+    """Get a User from the database by id."""
+    user = trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( user_id ) )
+    if not user:
+        return trans.show_error_message( "User not found for id (%s)" % str( user_id ) )
+    return user
+
+
+def get_user_by_username( trans, username ):
+    """Get a user from the database by username"""
+    # TODO: Add exception handling here.
+    return trans.sa_session.query( trans.model.User ) \
+                           .filter( trans.model.User.table.c.username == username ) \
+                           .one()
+
+
+def get_role( trans, id ):
+    """Get a Role from the database by id."""
+    # Load user from database
+    id = trans.security.decode_id( id )
+    role = trans.sa_session.query( trans.model.Role ).get( id )
+    if not role:
+        return trans.show_error_message( "Role not found for id (%s)" % str( id ) )
+    return role
+
+
+def get_group( trans, id ):
+    """Get a Group from the database by id."""
+    # Load user from database
+    id = trans.security.decode_id( id )
+    group = trans.sa_session.query( trans.model.Group ).get( id )
+    if not group:
+        return trans.show_error_message( "Group not found for id (%s)" % str( id ) )
+    return group
+
+
+def get_quota( trans, id ):
+    """Get a Quota from the database by id."""
+    # Load user from database
+    id = trans.security.decode_id( id )
+    quota = trans.sa_session.query( trans.model.Quota ).get( id )
+    return quota
diff --git a/lib/galaxy/web/base/interactive_environments.py b/lib/galaxy/web/base/interactive_environments.py
new file mode 100644
index 0000000..f9fdcf6
--- /dev/null
+++ b/lib/galaxy/web/base/interactive_environments.py
@@ -0,0 +1,411 @@
+import ConfigParser
+
+import os
+import json
+import yaml
+import stat
+import random
+import tempfile
+from subprocess import Popen, PIPE
+
+from galaxy.util.bunch import Bunch
+from galaxy import web, model
+from galaxy.managers import api_keys
+from galaxy.tools.deps.docker_util import DockerVolume
+
+import logging
+log = logging.getLogger(__name__)
+
+
+class InteractiveEnvironmentRequest(object):
+
+    def __init__(self, trans, plugin):
+        self.trans = trans
+        self.log = log
+
+        self.attr = Bunch()
+        self.attr.viz_id = plugin.name
+        self.attr.history_id = trans.security.encode_id( trans.history.id )
+        self.attr.galaxy_config = trans.app.config
+        self.attr.galaxy_root_dir = os.path.abspath(self.attr.galaxy_config.root)
+        self.attr.root = web.url_for("/")
+        self.attr.app_root = self.attr.root + "plugins/interactive_environments/" + self.attr.viz_id + "/static/"
+        self.attr.import_volume = True
+
+        plugin_path = os.path.abspath( plugin.path )
+
+        # Store our template and configuration path
+        self.attr.our_config_dir = os.path.join(plugin_path, "config")
+        self.attr.our_template_dir = os.path.join(plugin_path, "templates")
+        self.attr.HOST = trans.request.host.rsplit(':', 1)[0]
+
+        self.load_deploy_config()
+        self.load_allowed_images()
+        self.attr.docker_hostname = self.attr.viz_config.get("docker", "docker_hostname")
+
+        # Generate per-request passwords the IE plugin can use to configure
+        # the destination container.
+        self.notebook_pw_salt = self.generate_password(length=12)
+        self.notebook_pw = self.generate_password(length=24)
+
+        ie_parent_temp_dir = self.attr.viz_config.get("docker", "docker_galaxy_temp_dir") or None
+        self.temp_dir = os.path.abspath( tempfile.mkdtemp( dir=ie_parent_temp_dir ) )
+
+        if self.attr.viz_config.getboolean("docker", "wx_tempdir"):
+            # Ensure permissions are set
+            try:
+                os.chmod( self.temp_dir, os.stat(self.temp_dir).st_mode | stat.S_IXOTH )
+            except Exception:
+                log.error( "Could not change permissions of tmpdir %s" % self.temp_dir )
+                # continue anyway
+
+        # This duplicates the logic in the proxy manager
+        if self.attr.galaxy_config.dynamic_proxy_external_proxy:
+            self.attr.proxy_prefix = '/'.join(
+                (
+                    '',
+                    self.attr.galaxy_config.cookie_path.strip('/'),
+                    self.attr.galaxy_config.dynamic_proxy_prefix.strip('/'),
+                    self.attr.viz_id,
+                )
+            )
+        else:
+            self.attr.proxy_prefix = ''
+        # If cookie_path is unset (thus '/'), the proxy prefix ends up with
+        # multiple leading '/' characters, which will cause the client to
+        # request resources from http://dynamic_proxy_prefix
+        if self.attr.proxy_prefix.startswith('/'):
+            self.attr.proxy_prefix = '/' + self.attr.proxy_prefix.lstrip('/')
+
+    def load_allowed_images(self):
+        if os.path.exists(os.path.join(self.attr.our_config_dir, 'allowed_images.yml')):
+            fn = os.path.join(self.attr.our_config_dir, 'allowed_images.yml')
+        elif os.path.exists(os.path.join(self.attr.our_config_dir, 'allowed_images.yml.sample')):
+            fn = os.path.join(self.attr.our_config_dir, 'allowed_images.yml.sample')
+        else:
+            # If we don't have an allowed images, then we fall back to image
+            # name specified in the .ini file
+            try:
+                self.allowed_images = [self.attr.viz_config.get('docker', 'image')]
+                self.default_image = self.attr.viz_config.get('docker', 'image')
+                return
+            except AttributeError:
+                raise Exception("[{0}] Could not find allowed_images.yml, or image tag in {0}.ini file for ".format(self.attr.viz_id))
+
+        with open(fn, 'r') as handle:
+            self.allowed_images = [x['image'] for x in yaml.load(handle)]
+
+            if len(self.allowed_images) == 0:
+                raise Exception("No allowed images specified for " + self.attr.viz_id)
+
+            self.default_image = self.allowed_images[0]
+
+    def load_deploy_config(self, default_dict={}):
+        # For backwards compat, any new variables added to the base .ini file
+        # will need to be recorded here. The ConfigParser doesn't provide a
+        # .get() that will ignore missing sections, so we must make use of
+        # their defaults dictionary instead.
+        default_dict = {
+            'command': 'docker {docker_args}',
+            'command_inject': '--sig-proxy=true -e DEBUG=false',
+            'docker_hostname': 'localhost',
+            'wx_tempdir': 'False',
+            'docker_galaxy_temp_dir': None
+        }
+        viz_config = ConfigParser.SafeConfigParser(default_dict)
+        conf_path = os.path.join( self.attr.our_config_dir, self.attr.viz_id + ".ini" )
+        if not os.path.exists( conf_path ):
+            conf_path = "%s.sample" % conf_path
+        viz_config.read( conf_path )
+        self.attr.viz_config = viz_config
+
+        def _boolean_option(option, default=False):
+            if self.attr.viz_config.has_option("main", option):
+                return self.attr.viz_config.getboolean("main", option)
+            else:
+                return default
+
+        # Older style port range proxying - not sure we want to keep these around or should
+        # we always assume use of Galaxy dynamic proxy? None of these need to be specified
+        # if using the Galaxy dynamic proxy.
+        self.attr.PASSWORD_AUTH = _boolean_option("password_auth")
+        self.attr.SSL_URLS = _boolean_option("ssl")
+
+    def get_conf_dict(self):
+        """
+            Build up a configuration dictionary that is standard for ALL IEs.
+
+            TODO: replace hashed password with plaintext.
+        """
+        trans = self.trans
+        request = trans.request
+        api_key = api_keys.ApiKeyManager( trans.app ).get_or_create_api_key( trans.user )
+        conf_file = {
+            'history_id': self.attr.history_id,
+            'api_key': api_key,
+            'remote_host': request.remote_addr,
+            # DOCKER_PORT is NO LONGER AVAILABLE. All IEs must update.
+            'cors_origin': request.host_url,
+            'user_email': self.trans.user.email,
+            'proxy_prefix': self.attr.proxy_prefix,
+        }
+
+        web_port = self.attr.galaxy_config.galaxy_infrastructure_web_port
+        conf_file['galaxy_web_port'] = web_port or self.attr.galaxy_config.guess_galaxy_port()
+
+        if self.attr.viz_config.has_option("docker", "galaxy_url"):
+            conf_file['galaxy_url'] = self.attr.viz_config.get("docker", "galaxy_url")
+        elif self.attr.galaxy_config.galaxy_infrastructure_url_set:
+            conf_file['galaxy_url'] = self.attr.galaxy_config.galaxy_infrastructure_url.rstrip('/') + '/'
+        else:
+            conf_file['galaxy_url'] = request.application_url.rstrip('/') + '/'
+            # Galaxy paster port is deprecated
+            conf_file['galaxy_paster_port'] = conf_file['galaxy_web_port']
+
+        return conf_file
+
+    def generate_hex(self, length):
+        return ''.join(random.choice('0123456789abcdef') for _ in range(length))
+
+    def generate_password(self, length):
+        """
+            Generate a random alphanumeric password
+        """
+        return ''.join(random.choice('0123456789abcdefghijklmnopqrstuvwxyz') for _ in range(length))
+
+    def javascript_boolean(self, python_boolean):
+        """
+            Convenience function to convert boolean for use in JS
+        """
+        if python_boolean:
+            return "true"
+        else:
+            return "false"
+
+    def url_template(self, url_template):
+        """Process a URL template
+
+        There are several variables accessible to the user:
+
+            - ${PROXY_URL} will be replaced with the dynamically create proxy's url
+            - ${PROXY_PREFIX} will be replaced with the prefix that may occur
+        """
+        # Next several lines for older style replacements (not used with Galaxy dynamic
+        # proxy)
+        if self.attr.SSL_URLS:
+            protocol = 'https'
+        else:
+            protocol = 'http'
+
+        url_template = url_template.replace('${PROTO}', protocol) \
+            .replace('${HOST}', self.attr.HOST)
+
+        # Only the following replacements are used with Galaxy dynamic proxy
+        # URLs
+        url = url_template.replace('${PROXY_URL}', str(self.attr.proxy_url)) \
+            .replace('${PROXY_PREFIX}', str(self.attr.proxy_prefix.replace('/', '%2F')))
+        return url
+
+    def volume(self, host_path, container_path, **kwds):
+        return DockerVolume(host_path, container_path, **kwds)
+
+    def docker_cmd(self, image, env_override={}, volumes=[]):
+        """
+            Generate and return the docker command to execute
+        """
+        temp_dir = self.temp_dir
+        conf = self.get_conf_dict()
+        conf.update(env_override)
+        env_str = ' '.join(['-e "%s=%s"' % (key.upper(), item) for key, item in conf.items()])
+        volume_str = ' '.join(['-v "%s"' % volume for volume in volumes])
+        import_volume_str = '-v "{temp_dir}:/import/"'.format(temp_dir=temp_dir) if self.attr.import_volume else ''
+        # This is the basic docker command such as "sudo -u docker docker {docker_args}"
+        # or just "docker {docker_args}"
+        command = self.attr.viz_config.get("docker", "command")
+        # Then we format in the entire docker command in place of
+        # {docker_args}, so as to let the admin not worry about which args are
+        # getting passed
+        command = command.format(docker_args='run {command_inject} {environment} -d -P {import_volume_str} {volume_str} {image}')
+        # Once that's available, we format again with all of our arguments
+        command = command.format(
+            command_inject=self.attr.viz_config.get("docker", "command_inject"),
+            environment=env_str,
+            import_volume_str=import_volume_str,
+            volume_str=volume_str,
+            image=image,
+        )
+        return command
+
+    def _idsToVolumes(self, ids):
+        if len(ids.strip()) == 0:
+            return []
+
+        # They come as a comma separated list
+        ids = ids.split(',')
+
+        # Next we need to turn these into volumes
+        volumes = []
+        for id in ids:
+            decoded_id = self.trans.security.decode_id(id)
+            dataset = self.trans.sa_session.query(model.HistoryDatasetAssociation).get(decoded_id)
+            # TODO: do we need to check if the user has access?
+            volumes.append(self.volume(dataset.get_file_name(), '/import/[{0}] {1}.{2}'.format(dataset.id, dataset.name, dataset.ext)))
+        return volumes
+
+    def launch(self, image=None, additional_ids=None, raw_cmd=None, env_override={}, volumes=[]):
+        """Launch a docker image.
+
+        :type image: str
+        :param image: Optional image name. If not provided, self.default_image
+                      is used, which is the first image listed in the
+                      allowed_images.yml{,.sample} file.
+
+        :type additional_ids: str
+        :param additional_ids: comma separated list of encoded HDA IDs. These
+                               are transformed into Volumes and added to that
+                               argument
+
+        :type raw_cmd: str
+        :param raw_cmd: raw docker command. Usually generated with self.docker_cmd()
+
+        :type env_override: dict
+        :param env_override: dictionary of environment variables to add.
+
+        :type volumes: list of galaxy.tools.deps.docker_util.DockerVolume
+        :param volumes: dictionary of docker volume mounts
+
+        """
+        if image is None:
+            image = self.default_image
+
+        if image not in self.allowed_images:
+            # Now that we're allowing users to specify images, we need to ensure that they aren't
+            # requesting images we have not specifically allowed.
+            raise Exception("Attempting to launch disallowed image! %s not in list of allowed images [%s]"
+                            % (image, ', '.join(self.allowed_images)))
+
+        if additional_ids is not None:
+            volumes += self._idsToVolumes(additional_ids)
+
+        if raw_cmd is None:
+            raw_cmd = self.docker_cmd(image, env_override=env_override, volumes=volumes)
+
+        log.info("Starting docker container for IE {0} with command [{1}]".format(
+            self.attr.viz_id,
+            raw_cmd
+        ))
+        p = Popen( raw_cmd, stdout=PIPE, stderr=PIPE, close_fds=True, shell=True)
+        stdout, stderr = p.communicate()
+        if p.returncode != 0:
+            log.error( "%s\n%s" % (stdout, stderr) )
+            return None
+        else:
+            container_id = stdout.strip()
+            log.debug( "Container id: %s" % container_id)
+            inspect_data = self.inspect_container(container_id)
+            port_mappings = self.get_container_port_mapping(inspect_data)
+            self.attr.docker_hostname = self.get_container_host(inspect_data)
+            log.debug( "Container host: %s", self.attr.docker_hostname )
+            if len(port_mappings) > 1:
+                log.warning("Don't know how to handle proxies to containers with multiple exposed ports. Arbitrarily choosing first")
+            elif len(port_mappings) == 0:
+                log.warning("No exposed ports to map! Images MUST EXPOSE")
+                return None
+            # Fetch the first port_mapping
+            (service, host_ip, host_port) = port_mappings[0]
+
+            # Now we configure our proxy_requst object and we manually specify
+            # the port to map to and ensure the proxy is available.
+            self.attr.proxy_request = self.trans.app.proxy_manager.setup_proxy(
+                self.trans,
+                host=self.attr.docker_hostname,
+                port=host_port,
+                proxy_prefix=self.attr.proxy_prefix,
+                route_name=self.attr.viz_id,
+                container_ids=[container_id],
+            )
+            # These variables then become available for use in templating URLs
+            self.attr.proxy_url = self.attr.proxy_request[ 'proxy_url' ]
+            # Commented out because it needs to be documented and visible that
+            # this variable was moved here. Usually would remove commented
+            # code, but again, needs to be clear where this went. Remove at a
+            # later time.
+            #
+            # PORT is no longer exposed internally. All requests are forced to
+            # go through the proxy we ship.
+            # self.attr.PORT = self.attr.proxy_request[ 'proxied_port' ]
+
+    def inspect_container(self, container_id):
+        """Runs docker inspect on a container and returns json response as python dictionary inspect_data.
+
+        :type container_id: str
+        :param container_id: a docker container ID
+
+        :returns: inspect_data, a dict of docker inspect output
+        """
+        command = self.attr.viz_config.get("docker", "command")
+        command = command.format(docker_args="inspect %s" % container_id)
+        log.info("Inspecting docker container {0} with command [{1}]".format(
+            container_id,
+            command
+        ))
+
+        p = Popen(command, stdout=PIPE, stderr=PIPE, close_fds=True, shell=True)
+        stdout, stderr = p.communicate()
+        if p.returncode != 0:
+            log.error( "%s\n%s" % (stdout, stderr) )
+            return None
+
+        inspect_data = json.loads(stdout)
+        # [{
+        #     "NetworkSettings" : {
+        #         "Ports" : {
+        #             "3306/tcp" : [
+        #                 {
+        #                     "HostIp" : "127.0.0.1",
+        #                     "HostPort" : "3306"
+        #                 }
+        #             ]
+        return inspect_data
+
+    def get_container_host(self, inspect_data):
+        """
+        Determine the ip address on the container. If inspect_data contains
+        Node.IP return that (e.g. running in Docker Swarm). If the hostname
+        is "localhost", look for NetworkSettings.Gateway. Otherwise, just
+        return the configured docker_hostname.
+
+        :type inspect_data: dict
+        :param inspect_data: output of docker inspect
+        :returns: IP address or hostname of the node the conatainer is
+                  running on.
+        """
+        inspect_data = inspect_data[0]
+        if 'Node' in inspect_data:
+            return inspect_data['Node']['IP']
+        elif self.attr.docker_hostname == "localhost":
+            return inspect_data['NetworkSettings']['Gateway']
+        else:
+            return self.attr.docker_hostname
+
+    def get_container_port_mapping(self, inspect_data):
+        """
+        :type inspect_data: dict
+        :param inspect_data: output of docker inspect
+        :returns: a list of triples containing (internal_port, external_ip,
+                  external_port), of which the ports are probably the only
+                  useful information.
+
+        Someday code that calls this should be refactored whenever we get
+        containers with multiple ports working.
+        """
+        mappings = []
+        port_mappings = inspect_data[0]['NetworkSettings']['Ports']
+        for port_name in port_mappings:
+            for binding in port_mappings[port_name]:
+                mappings.append((
+                    port_name.replace('/tcp', '').replace('/udp', ''),
+                    binding['HostIp'],
+                    binding['HostPort']
+                ))
+        return mappings
diff --git a/lib/galaxy/web/base/pluginframework.py b/lib/galaxy/web/base/pluginframework.py
new file mode 100644
index 0000000..7048307
--- /dev/null
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -0,0 +1,580 @@
+"""
+Base class for plugins - frameworks or systems that may:
+ * add code at startup
+ * allow hooks to be called
+and base class for plugins that:
+ * serve static content
+ * serve templated html
+ * have some configuration at startup
+"""
+
+import os.path
+import sys
+import imp
+
+from galaxy import util
+from galaxy.util import odict
+from galaxy.util import bunch
+
+import mako.lookup
+import logging
+log = logging.getLogger( __name__ )
+
+
+class PluginManagerException( Exception ):
+    """Base exception for plugin frameworks.
+    """
+    pass
+
+
+class PluginManagerConfigException( PluginManagerException ):
+    """Exception for plugin framework configuration errors.
+    """
+    pass
+
+
+# ============================================================================= base
+class PluginManager( object ):
+    """
+    Plugins represents an section of code that is not tracked in the
+    Galaxy repository, allowing the addition of custom code to a Galaxy
+    installation without changing the code base.
+
+    A PluginManager discovers and manages these plugins.
+
+    This is an non-abstract class but its usefulness is limited and is meant
+    to be inherited.
+    """
+
+    def __init__( self, app, directories_setting=None, skip_bad_plugins=True, **kwargs ):
+        """
+        Set up the manager and load all plugins.
+
+        :type   app:    UniverseApplication
+        :param  app:    the application (and its configuration) using this manager
+        :type   directories_setting: string (default: None)
+        :param  directories_setting: the filesystem path (or paths)
+            to search for plugins. Can be CSV string of paths. Will be treated as
+            absolute if a path starts with '/', relative otherwise.
+        :type   skip_bad_plugins:    boolean (default: True)
+        :param  skip_bad_plugins:    whether to skip plugins that cause
+            exceptions when loaded or to raise that exception
+        """
+        self.directories = []
+        self.skip_bad_plugins = skip_bad_plugins
+        self.plugins = odict.odict()
+
+        self.directories = util.config_directories_from_setting( directories_setting, app.config.root )
+
+        self.load_configuration()
+        self.load_plugins()
+
+    def load_configuration( self ):
+        """
+        Override to load some framework/plugin specifc configuration.
+        """
+        # Abstract method
+        return True
+
+    def load_plugins( self ):
+        """
+        Search ``self.directories`` for potential plugins, load them, and cache
+        in ``self.plugins``.
+        :rtype:                 odict
+        :returns:               ``self.plugins``
+        """
+        for plugin_path in self.find_plugins():
+            try:
+                plugin = self.load_plugin( plugin_path )
+
+                if plugin and plugin.name not in self.plugins:
+                    self.plugins[ plugin.name ] = plugin
+                    log.info( '%s, loaded plugin: %s', self, plugin.name )
+                # NOTE: prevent silent, implicit overwrite here (two plugins in two diff directories)
+                # TODO: overwriting may be desired
+                elif plugin and plugin.name in self.plugins:
+                    log.warning( '%s, plugin with name already exists: %s. Skipping...', self, plugin.name )
+
+            except Exception:
+                if not self.skip_bad_plugins:
+                    raise
+                log.exception( 'Plugin loading raised exception: %s. Skipping...', plugin_path )
+
+        return self.plugins
+
+    def find_plugins( self ):
+        """
+        Return the directory paths of plugins within ``self.directories``.
+
+        Paths are considered a plugin path if they pass ``self.is_plugin``.
+        :rtype:                 string generator
+        :returns:               paths of valid plugins
+        """
+        # due to the ordering of listdir, there is an implicit plugin loading order here
+        # could instead explicitly list on/off in master config file
+        for directory in self.directories:
+            for plugin_dir in sorted( os.listdir( directory ) ):
+                plugin_path = os.path.join( directory, plugin_dir )
+                if self.is_plugin( plugin_path ):
+                    yield plugin_path
+
+    def is_plugin( self, plugin_path ):
+        """
+        Determines whether the given filesystem path contains a plugin.
+
+        In this base class, all sub-directories are considered plugins.
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the
+            potential plugin
+        :rtype:                 bool
+        :returns:               True if the path contains a plugin
+        """
+        if not os.path.isdir( plugin_path ):
+            return False
+        return True
+
+    def load_plugin( self, plugin_path ):
+        """
+        Create, load, and/or initialize the plugin and return it.
+
+        Plugin bunches are decorated with:
+            * name : the plugin name
+            * path : the plugin path
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the plugin
+        :rtype:                 ``util.bunch.Bunch``
+        :returns:               the loaded plugin object
+        """
+        plugin = bunch.Bunch(
+            # TODO: need a better way to define plugin names
+            #   pro: filesystem name ensures uniqueness
+            #   con: rel. inflexible
+            name=os.path.split( plugin_path )[1],
+            path=plugin_path
+        )
+        return plugin
+
+
+# ============================================================================= plugin managers using hooks
+class HookPluginManager( PluginManager ):
+    """
+    A hook plugin is a directory containing python modules or packages that:
+        * allow creating, including, and running custom code at specific 'hook'
+            points/events
+        * are not tracked in the Galaxy repository and allow adding custom code
+            to a Galaxy installation
+
+    A HookPluginManager imports the plugin code needed and calls the plugin's
+    hook functions at the specified time.
+    """
+    #: the python file that will be imported - hook functions should be contained here
+    loading_point_filename = 'plugin.py'
+    hook_fn_prefix = 'hook_'
+
+    def is_plugin( self, plugin_path ):
+        """
+        Determines whether the given filesystem path contains a hookable plugin.
+
+        All sub-directories that contain ``loading_point_filename`` are considered
+        plugins.
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the
+            potential plugin
+        :rtype:                 bool
+        :returns:               True if the path contains a plugin
+        """
+        if not super( HookPluginManager, self ).is_plugin( plugin_path ):
+            return False
+        # TODO: possibly switch to <plugin.name>.py or __init__.py
+        if self.loading_point_filename not in os.listdir( plugin_path ):
+            return False
+        return True
+
+    def load_plugin( self, plugin_path ):
+        """
+        Import the plugin ``loading_point_filename`` and attach to the plugin bunch.
+
+        Plugin bunches are decorated with:
+            * name : the plugin name
+            * path : the plugin path
+            * module : the plugin code
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the plugin
+        :rtype:                 ``util.bunch.Bunch``
+        :returns:               the loaded plugin object
+        """
+        plugin = super( HookPluginManager, self ).load_plugin( plugin_path )
+
+        loading_point_name = self.loading_point_filename[:-3]
+        plugin[ 'module' ] = self.import_plugin_module( loading_point_name, plugin )
+        return plugin
+
+    def import_plugin_module( self, loading_point_name, plugin, import_as=None ):
+        """
+        Import the plugin code and cache the module in the plugin object.
+
+        :type   loading_point_name: string
+        :param  loading_point_name: name of the python file to import (w/o extension)
+        :type   plugin:             ``util.bunch.Bunch``
+        :param  plugin:             the plugin containing the template to render
+        :type   import_as:          string
+        :param  import_as:          namespace to use for imported module
+            This will be prepended with the ``__name__`` of this file.
+            Defaults to ``plugin.name``
+        :rtype:                     ``util.bunch.Bunch``
+        :returns:                   the loaded plugin object
+        """
+        # add this name to import_as (w/ default to plugin.name) to prevent namespace pollution in sys.modules
+        import_as = '%s.%s' % ( __name__, ( import_as or plugin.name ) )
+        module_file, pathname, description = imp.find_module( loading_point_name, [ plugin.path ] )
+        try:
+            # TODO: hate this hack but only way to get package imports inside the plugin to work?
+            sys.path.append( plugin.path )
+            # sys.modules will now have import_as in its list
+            module = imp.load_module( import_as, module_file, pathname, description )
+        finally:
+            module_file.close()
+            if plugin.path in sys.path:
+                sys.path.remove( plugin.path )
+        return module
+
+    def run_hook( self, hook_name, *args, **kwargs ):
+        """
+        Search all plugins for a function named ``hook_fn_prefix`` + ``hook_name``
+        and run it passing in args and kwargs.
+
+        Return values from each hook are returned in a dictionary keyed with the
+        plugin names.
+
+        :type   hook_name:  string
+        :param  hook_name:  name (suffix) of the hook to run
+        :rtype:             dictionary
+        :returns:           where keys are plugin.names and
+            values return values from the hooks
+        """
+        # TODO: is hook prefix necessary?
+        # TODO: could be made more efficient if cached by hook_name in the manager on load_plugin
+        #   (low maint. overhead since no dynamic loading/unloading of plugins)
+        hook_fn_name = ''.join([ self.hook_fn_prefix, hook_name ])
+        returned = {}
+        for plugin_name, plugin in self.plugins.items():
+            hook_fn = getattr( plugin.module, hook_fn_name, None )
+
+            if hook_fn and hasattr( hook_fn, '__call__' ):
+                try:
+                    fn_returned = hook_fn( *args, **kwargs )
+                    returned[ plugin.name ] = fn_returned
+                except Exception:
+                    # fail gracefully and continue with other plugins
+                    log.exception( 'Hook function "%s" failed for plugin "%s"', hook_name, plugin.name )
+
+        # not sure of utility of this - seems better to be fire-and-forget pub-sub
+        return returned
+
+    def filter_hook( self, hook_name, hook_arg, *args, **kwargs ):
+        """
+        Search all plugins for a function named ``hook_fn_prefix`` + ``hook_name``
+        and run the first with ``hook_arg`` and every function after with the
+        return value of the previous.
+
+        ..note:
+            This makes plugin load order very important.
+
+        :type   hook_name:  string
+        :param  hook_name:  name (suffix) of the hook to run
+        :type   hook_arg:   any
+        :param  hook_arg:   the arg to be passed between hook functions
+        :rtype:             any
+        :returns:           the modified hook_arg
+        """
+        hook_fn_name = ''.join([ self.hook_fn_prefix, hook_name ])
+        for plugin_name, plugin in self.plugins.items():
+            hook_fn = getattr( plugin.module, hook_fn_name, None )
+
+            if hook_fn and hasattr( hook_fn, '__call__' ):
+                try:
+                    hook_arg = hook_fn( hook_arg, *args, **kwargs )
+
+                except Exception:
+                    # fail gracefully and continue with other plugins
+                    log.exception( 'Filter hook function "%s" failed for plugin "%s"', hook_name, plugin.name )
+
+        # may have been altered by hook fns, return
+        return hook_arg
+
+
+class PluginManagerStaticException( PluginManagerException ):
+    """Exception for plugin framework static directory set up errors.
+    """
+    pass
+
+
+class PluginManagerTemplateException( PluginManagerException ):
+    """Exception for plugin framework template directory
+    and template rendering errors.
+    """
+    pass
+
+
+# ============================================================================= base
+class PageServingPluginManager( PluginManager ):
+    """
+    Page serving plugins are files/directories that:
+        * are not tracked in the Galaxy repository and allow adding custom code
+            to a Galaxy installation
+        * serve static files (css, js, images, etc.),
+        * render templates
+
+    A PageServingPluginManager sets up all the above components.
+    """
+    # TODO: I'm unclear of the utility of this class - it prob. will only have one subclass (vis reg). Fold into?
+
+    #: default static url base
+    DEFAULT_BASE_URL = ''
+    #: does the class need static files served?
+    serves_static = True
+    #: does the class need template files served?
+    serves_templates = True
+    #: default number of templates to search for plugin template lookup
+    DEFAULT_TEMPLATE_COLLECTION_SIZE = 10
+    #: default encoding of plugin templates
+    DEFAULT_TEMPLATE_ENCODING = 'utf-8'
+    #: name of files to search for additional template lookup directories
+    additional_template_paths_config_filename = 'additional_template_paths.xml'
+
+    def __init__( self, app, base_url='', template_cache_dir=None, **kwargs ):
+        """
+        Set up the manager and load all plugins.
+
+        :type   app:        UniverseApplication
+        :param  app:        the application (and its configuration) using this manager
+        :type   base_url:   string
+        :param  base_url:   url to prefix all plugin urls with
+        :type   template_cache_dir: string
+        :param  template_cache_dir: filesytem path to the directory where cached
+            templates are kept
+        """
+        self.base_url = base_url or self.DEFAULT_BASE_URL
+        if not self.base_url:
+            raise PluginManagerException( 'base_url or DEFAULT_BASE_URL required' )
+        self.template_cache_dir = template_cache_dir
+        self.additional_template_paths = []
+
+        super( PageServingPluginManager, self ).__init__( app, **kwargs )
+
+    def load_configuration( self ):
+        """
+        Load framework wide configuration, including:
+            additional template lookup directories
+        """
+        for directory in self.directories:
+            possible_path = os.path.join( directory, self.additional_template_paths_config_filename )
+            if os.path.exists( possible_path ):
+                added_paths = self.parse_additional_template_paths( possible_path, directory )
+                self.additional_template_paths.extend( added_paths )
+
+    def parse_additional_template_paths( self, config_filepath, base_directory ):
+        """
+        Parse an XML config file at `config_filepath` for template paths
+        (relative to `base_directory`) to add to each plugin's template lookup.
+
+        Allows having a set of common templates for import/inheritance in
+        plugin templates.
+
+        :type   config_filepath:    string
+        :param  config_filepath:    filesystem path to the config file
+        :type   base_directory:     string
+        :param  base_directory:     path prefixed to new, relative template paths
+        """
+        additional_paths = []
+        xml_tree = util.parse_xml( config_filepath )
+        paths_list = xml_tree.getroot()
+        for rel_path_elem in paths_list.findall( 'path' ):
+            if rel_path_elem.text is not None:
+                additional_paths.append( os.path.join( base_directory, rel_path_elem.text ) )
+        return additional_paths
+
+    def is_plugin( self, plugin_path ):
+        """
+        Determines whether the given filesystem path contains a plugin.
+
+        If the manager ``serves_templates`` and a sub-directory contains another
+        sub-directory named 'templates' it's considered valid.
+        If the manager ``serves_static`` and a sub-directory contains another
+        sub-directory named 'static' it's considered valid.
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the
+            potential plugin
+        :rtype:                 bool
+        :returns:               True if the path contains a plugin
+        """
+        if not super( PageServingPluginManager, self ).is_plugin( plugin_path ):
+            return False
+        # reject only if we don't have either
+        listdir = os.listdir( plugin_path )
+        if( ( 'templates' not in listdir ) and ( 'static' not in listdir ) ):
+            return False
+        return True
+
+    def load_plugin( self, plugin_path ):
+        """
+        Create the plugin and decorate with static and/or template paths and urls.
+
+        Plugin bunches are decorated with:
+            * name : the plugin name
+            * path : the plugin path
+            * base_url : a url to the plugin
+
+        :type   plugin_path:    string
+        :param  plugin_path:    relative or absolute filesystem path to the plugin
+        :rtype:                 ``util.bunch.Bunch``
+        :returns:               the loaded plugin object
+        """
+        plugin = super( PageServingPluginManager, self ).load_plugin( plugin_path )
+        # TODO: urlencode?
+        plugin[ 'base_url' ] = '/'.join([ self.base_url, plugin.name ])
+        plugin = self._set_up_static_plugin( plugin )
+        plugin = self._set_up_template_plugin( plugin )
+
+        return plugin
+
+    def _set_up_static_plugin( self, plugin ):
+        """
+        Decorate the plugin with paths and urls needed to serve static content.
+
+        Plugin bunches are decorated with:
+            * serves_static : whether this plugin will serve static content
+
+        If the plugin path contains a 'static' sub-dir, the following are added:
+            * static_path   : the filesystem path to the static content
+            * static_url    : the url to use when serving static content
+
+        :type   plugin: ``util.bunch.Bunch``
+        :param  plugin: the plugin to decorate
+        :rtype:         ``util.bunch.Bunch``
+        :returns:       the loaded plugin object
+        """
+        plugin[ 'serves_static' ] = False
+        static_path = os.path.join( plugin.path, 'static' )
+        if self.serves_static and os.path.isdir( static_path ):
+            plugin.serves_static = True
+            plugin[ 'static_path' ] = static_path
+            plugin[ 'static_url' ] = '/'.join([ plugin.base_url, 'static' ])
+        return plugin
+
+    def _set_up_template_plugin( self, plugin ):
+        """
+        Decorate the plugin with paths needed to fill templates.
+
+        Plugin bunches are decorated with:
+            * serves_templates :    whether this plugin will use templates
+
+        If the plugin path contains a 'static' sub-dir, the following are added:
+            * template_path   : the filesystem path to the template sub-dir
+            * template_lookup : the (currently Mako) TemplateLookup used to search
+                for templates
+
+        :type   plugin: ``util.bunch.Bunch``
+        :param  plugin: the plugin to decorate
+        :rtype:         ``util.bunch.Bunch``
+        :returns:       the loaded plugin object
+        """
+        plugin[ 'serves_templates' ] = False
+        template_path = os.path.join( plugin.path, 'templates' )
+        if self.serves_templates and os.path.isdir( template_path ):
+            plugin.serves_templates = True
+            plugin[ 'template_path' ] = template_path
+            plugin[ 'template_lookup' ] = self.build_plugin_template_lookup( plugin )
+        return plugin
+
+    # ------------------------------------------------------------------------- serving static files
+    def get_static_urls_and_paths( self ):
+        """
+        For each plugin, return a 2-tuple where the first element is a url path
+        to the plugin's static files and the second is a filesystem path to those
+        same files.
+
+        Meant to be passed to a Static url map.
+
+        :rtype:         list of 2-tuples
+        :returns:       all urls and paths for each plugin serving static content
+        """
+        # called during the static middleware creation (buildapp.py, wrap_in_static)
+        urls_and_paths = []
+        for plugin in self.plugins.values():
+            if plugin.serves_static:
+                urls_and_paths.append( ( plugin.static_url, plugin.static_path ) )
+        return urls_and_paths
+
+    # ------------------------------------------------------------------------- templates
+    def build_plugin_template_lookup( self, plugin ):
+        """
+        Builds the object that searches for templates (cached or not) when rendering.
+
+        :type   plugin: ``util.bunch.Bunch``
+        :param  plugin: the plugin containing the templates
+        :rtype:         ``Mako.lookup.TemplateLookup``
+        :returns:       template lookup for this plugin
+        """
+        if not plugin.serves_templates:
+            return None
+        template_lookup_paths = plugin.template_path
+        if self.additional_template_paths:
+            template_lookup_paths = [ template_lookup_paths ] + self.additional_template_paths
+        template_lookup = self._create_mako_template_lookup( self.template_cache_dir, template_lookup_paths )
+        return template_lookup
+
+    def _create_mako_template_lookup( self, cache_dir, paths,
+                                      collection_size=DEFAULT_TEMPLATE_COLLECTION_SIZE, output_encoding=DEFAULT_TEMPLATE_ENCODING ):
+        """
+        Create a ``TemplateLookup`` with defaults.
+
+        :rtype:         ``Mako.lookup.TemplateLookup``
+        :returns:       all urls and paths for each plugin serving static content
+        """
+        # TODO: possible to add galaxy/templates into the lookup here?
+        return mako.lookup.TemplateLookup(
+            directories=paths,
+            module_directory=cache_dir,
+            collection_size=collection_size,
+            output_encoding=output_encoding )
+
+    def fill_template( self, trans, plugin, template_filename, **kwargs ):
+        """
+        Pass control over to trans and render ``template_filename``.
+
+        :type   trans:              ``galaxy.web.framework.webapp.GalaxyWebTransaction``
+        :param  trans:              transaction doing the rendering
+        :type   plugin:             ``util.bunch.Bunch``
+        :param  plugin:             the plugin containing the template to render
+        :type   template_filename:  string
+        :param  template_filename:  the path of the template to render relative to
+            ``plugin.template_path``
+        :returns:       rendered template
+        """
+        # defined here to be overridden
+        return trans.fill_template( template_filename, template_lookup=plugin.template_lookup, **kwargs )
+
+    # TODO: add fill_template fn that is able to load extra libraries beforehand (and remove after)
+    # TODO: add template helpers specific to the plugins
+    # TODO: some sort of url_for for these plugins
+
+
+# =============================================================================
+class Plugin( object ):
+    """
+    Plugin as object/class.
+    """
+
+    def __init__( self, app, path, name, config, context=None, **kwargs ):
+        context = context or {}
+
+        self.app = app
+        self.path = path
+        self.name = name
+        self.config = config
diff --git a/lib/galaxy/web/buildapp.py b/lib/galaxy/web/buildapp.py
new file mode 100644
index 0000000..e6a04c8
--- /dev/null
+++ b/lib/galaxy/web/buildapp.py
@@ -0,0 +1,5 @@
+"""For backward compatibility only, pulls app_factor from galaxy.webapps.main"""
+
+from galaxy.webapps.galaxy.buildapp import app_factory
+
+__all__ = ('app_factory', )
diff --git a/lib/galaxy/web/form_builder.py b/lib/galaxy/web/form_builder.py
new file mode 100644
index 0000000..fbdaf72
--- /dev/null
+++ b/lib/galaxy/web/form_builder.py
@@ -0,0 +1,827 @@
+"""
+Classes for generating HTML forms
+"""
+import os
+import time
+import logging
+
+from six import string_types
+from operator import itemgetter
+from cgi import escape
+from galaxy.util import restore_text, relpath, nice_size, unicodify
+from galaxy.web import url_for
+from binascii import hexlify
+
+log = logging.getLogger(__name__)
+
+
+class BaseField(object):
+    def get_html( self, prefix="" ):
+        """Returns the html widget corresponding to the parameter"""
+        raise TypeError( "Abstract Method" )
+
+    def get_disabled_str( self, disabled=False ):
+        if disabled:
+            return ' disabled="disabled"'
+        else:
+            return ''
+
+
+class TextField(BaseField):
+    """
+    A standard text input box.
+
+    >>> print TextField( "foo" ).get_html()
+    <input type="text" name="foo" size="10" value="">
+    >>> print TextField( "bins", size=4, value="default" ).get_html()
+    <input type="text" name="bins" size="4" value="default">
+    """
+    def __init__( self, name, size=None, value=None ):
+        self.name = name
+        self.size = int( size or 10 )
+        self.value = value or ""
+
+    def get_html( self, prefix="", disabled=False ):
+        value = self.value
+        value = unicodify( value )
+        return unicodify( '<input type="text" name="%s%s" size="%d" value="%s"%s>'
+                          % ( prefix, self.name, self.size, escape( value, quote=True ), self.get_disabled_str( disabled ) ) )
+
+    def set_size(self, size):
+        self.size = int( size )
+
+
+class PasswordField(BaseField):
+    """
+    A password input box. text appears as "******"
+
+    >>> print PasswordField( "foo" ).get_html()
+    <input type="password" name="foo" size="10" value="">
+    >>> print PasswordField( "bins", size=4, value="default" ).get_html()
+    <input type="password" name="bins" size="4" value="default">
+    """
+    def __init__( self, name, size=None, value=None ):
+        self.name = name
+        self.size = int( size or 10 )
+        self.value = value or ""
+
+    def get_html( self, prefix="", disabled=False  ):
+        return unicodify( '<input type="password" name="%s%s" size="%d" value="%s"%s>'
+                          % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) ) )
+
+    def set_size(self, size):
+        self.size = int( size )
+
+
+class TextArea(BaseField):
+    """
+    A standard text area box.
+
+    >>> print TextArea( "foo" ).get_html()
+    <textarea name="foo" rows="5" cols="25"></textarea>
+    >>> print TextArea( "bins", size="4x5", value="default" ).get_html()
+    <textarea name="bins" rows="4" cols="5">default</textarea>
+    """
+    _DEFAULT_SIZE = "5x25"
+
+    def __init__( self, name, size=None, value=None ):
+        self.name = name
+        size = size or self._DEFAULT_SIZE
+        self.size = size.split("x")
+        self.rows = int(self.size[0])
+        self.cols = int(self.size[-1])
+        self.value = value or ""
+
+    def get_html( self, prefix="", disabled=False ):
+        return unicodify( '<textarea name="%s%s" rows="%d" cols="%d"%s>%s</textarea>'
+                          % ( prefix, self.name, self.rows, self.cols, self.get_disabled_str( disabled ), escape( str( self.value ), quote=True ) ) )
+
+    def set_size(self, rows, cols):
+        self.rows = rows
+        self.cols = cols
+
+
+class CheckboxField(BaseField):
+    """
+    A checkbox (boolean input)
+
+    >>> print CheckboxField( "foo" ).get_html()
+    <input type="checkbox" id="foo" name="foo" value="__CHECKED__"><input type="hidden" name="foo" value="__NOTHING__">
+    >>> print CheckboxField( "bar", checked="yes" ).get_html()
+    <input type="checkbox" id="bar" name="bar" value="__CHECKED__" checked="checked"><input type="hidden" name="bar" value="__NOTHING__">
+    """
+
+    def __init__( self, name, checked=None, refresh_on_change=False, refresh_on_change_values=None ):
+        self.name = name
+        self.checked = ( checked is True ) or ( isinstance( checked, string_types ) and ( checked.lower() in ( "yes", "true", "on" ) ) )
+        self.refresh_on_change = refresh_on_change
+        self.refresh_on_change_values = refresh_on_change_values or []
+        if self.refresh_on_change:
+            self.refresh_on_change_text = ' refresh_on_change="true" '
+            if self.refresh_on_change_values:
+                self.refresh_on_change_text = '%s refresh_on_change_values="%s" ' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) )
+        else:
+            self.refresh_on_change_text = ''
+
+    def get_html( self, prefix="", disabled=False ):
+        if self.checked:
+            checked_text = ' checked="checked"'
+        else:
+            checked_text = ''
+        id_name = prefix + self.name
+        return unicodify( '<input type="checkbox" id="%s" name="%s" value="__CHECKED__"%s%s%s><input type="hidden" name="%s" value="__NOTHING__"%s>'
+                          % ( id_name, id_name, checked_text, self.get_disabled_str( disabled ), self.refresh_on_change_text, id_name, self.get_disabled_str( disabled ) ) )
+
+    @staticmethod
+    def is_checked( value ):
+        if value is True:
+            return True
+        return isinstance( value, list ) and ( '__CHECKED__' in value or len( value ) == 2 )
+
+    def set_checked(self, value):
+        if isinstance( value, string_types ):
+            self.checked = value.lower() in [ "yes", "true", "on" ]
+        else:
+            self.checked = value
+
+
+class FileField(BaseField):
+    """
+    A file upload input.
+
+    >>> print FileField( "foo" ).get_html()
+    <input type="file" name="foo">
+    >>> print FileField( "foo", ajax = True ).get_html()
+    <input type="file" name="foo" galaxy-ajax-upload="true">
+    """
+
+    def __init__( self, name, value=None, ajax=False ):
+        self.name = name
+        self.ajax = ajax
+        self.value = value
+
+    def get_html( self, prefix="" ):
+        value_text = ""
+        if self.value:
+            value_text = ' value="%s"' % escape( str( self.value ), quote=True )
+        ajax_text = ""
+        if self.ajax:
+            ajax_text = ' galaxy-ajax-upload="true"'
+        return unicodify( '<input type="file" name="%s%s"%s%s>' % ( prefix, self.name, ajax_text, value_text ) )
+
+
+class FTPFileField(BaseField):
+    """
+    An FTP file upload input.
+    """
+    thead = '''
+        <table id="grid-table" class="grid">
+            <thead id="grid-table-header">
+                <tr>
+                    <th id="select-header"></th>
+                    <th id="name-header">
+                        File
+                    </th>
+                    <th id="size-header">
+                        Size
+                    </th>
+                    <th id="date-header">
+                        Date
+                    </th>
+                </tr>
+            </thead>
+            <tbody id="grid-table-body">
+    '''
+    trow = '''
+                <tr>
+                    <td><input type="checkbox" name="%s%s" value="%s"/></td>
+                    <td>%s</td>
+                    <td>%s</td>
+                    <td>%s</td>
+                </tr>
+    '''
+    tfoot = '''
+            </tbody>
+        </table>
+    '''
+
+    def __init__( self, name, dir, ftp_site, value=None ):
+        self.name = name
+        self.dir = dir
+        self.ftp_site = ftp_site
+        self.value = value
+
+    def get_html( self, prefix="" ):
+        rval = FTPFileField.thead
+        if self.dir is None:
+            rval += '<tr><td colspan="4"><em>Please <a href="%s">create</a> or <a href="%s">log in to</a> a Galaxy account to view files uploaded via FTP.</em></td></tr>' % ( url_for( controller='user', action='create', cntrller='user', referer=url_for( controller='root' ) ), url_for( controller='user', action='login', cntrller='user', referer=url_for( controller='root' ) ) )
+        elif not os.path.exists( self.dir ):
+            rval += '<tr><td colspan="4"><em>Your FTP upload directory contains no files.</em></td></tr>'
+        else:
+            uploads = []
+            for ( dirpath, dirnames, filenames ) in os.walk( self.dir ):
+                for filename in filenames:
+                    path = relpath( os.path.join( dirpath, filename ), self.dir )
+                    statinfo = os.lstat( os.path.join( dirpath, filename ) )
+                    uploads.append( dict( path=path,
+                                          size=nice_size( statinfo.st_size ),
+                                          ctime=time.strftime( "%m/%d/%Y %I:%M:%S %p", time.localtime( statinfo.st_ctime ) ) ) )
+            if not uploads:
+                rval += '<tr><td colspan="4"><em>Your FTP upload directory contains no files.</em></td></tr>'
+            uploads = sorted(uploads, key=itemgetter("path"))
+            for upload in uploads:
+                rval += FTPFileField.trow % ( prefix, self.name, upload['path'], upload['path'], upload['size'], upload['ctime'] )
+        rval += FTPFileField.tfoot
+        rval += '<div class="toolParamHelp">This Galaxy server allows you to upload files via FTP.  To upload some files, log in to the FTP server at <strong>%s</strong> using your Galaxy credentials (email address and password).</div>' % self.ftp_site
+        return rval
+
+
+class HiddenField(BaseField):
+    """
+    A hidden field.
+
+    >>> print HiddenField( "foo", 100 ).get_html()
+    <input type="hidden" name="foo" value="100">
+    """
+    def __init__( self, name, value=None ):
+        self.name = name
+        self.value = value or ""
+
+    def get_html( self, prefix="" ):
+        return unicodify( '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, escape( str( self.value ), quote=True ) ) )
+
+
+class SelectField(BaseField):
+    """
+    A select field.
+
+    >>> t = SelectField( "foo", multiple=True )
+    >>> t.add_option( "tuti", 1 )
+    >>> t.add_option( "fruity", "x" )
+    >>> print t.get_html()
+    <select name="foo" multiple>
+    <option value="1">tuti</option>
+    <option value="x">fruity</option>
+    </select>
+
+    >>> t = SelectField( "bar" )
+    >>> t.add_option( "automatic", 3 )
+    >>> t.add_option( "bazooty", 4, selected=True )
+    >>> print t.get_html()
+    <select name="bar" last_selected_value="4">
+    <option value="3">automatic</option>
+    <option value="4" selected>bazooty</option>
+    </select>
+
+    >>> t = SelectField( "foo", display="radio" )
+    >>> t.add_option( "tuti", 1 )
+    >>> t.add_option( "fruity", "x" )
+    >>> print t.get_html()
+    <div><input type="radio" name="foo" value="1" id="foo|1"><label class="inline" for="foo|1">tuti</label></div>
+    <div><input type="radio" name="foo" value="x" id="foo|x"><label class="inline" for="foo|x">fruity</label></div>
+
+    >>> t = SelectField( "bar", multiple=True, display="checkboxes" )
+    >>> t.add_option( "automatic", 3 )
+    >>> t.add_option( "bazooty", 4, selected=True )
+    >>> print t.get_html()
+    <div class="checkUncheckAllPlaceholder" checkbox_name="bar"></div>
+    <div><input type="checkbox" name="bar" value="3" id="bar|3"><label class="inline" for="bar|3">automatic</label></div>
+    <div><input type="checkbox" name="bar" value="4" id="bar|4" checked='checked'><label class="inline" for="bar|4">bazooty</label></div>
+    """
+    def __init__( self, name, multiple=None, display=None, refresh_on_change=False, refresh_on_change_values=None, size=None, field_id=None ):
+        self.name = name
+        self.field_id = field_id
+        self.multiple = multiple or False
+        self.size = size
+        self.options = list()
+        if display == "checkboxes":
+            assert multiple, "Checkbox display only supported for multiple select"
+        elif display == "radio":
+            assert not( multiple ), "Radio display only supported for single select"
+        elif display is not None:
+            raise Exception( "Unknown display type: %s" % display )
+        self.display = display
+        self.refresh_on_change = refresh_on_change
+        self.refresh_on_change_values = refresh_on_change_values or []
+        if self.refresh_on_change:
+            self.refresh_on_change_text = ' refresh_on_change="true"'
+            if self.refresh_on_change_values:
+                self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, escape( ",".join( self.refresh_on_change_values ), quote=True ) )
+        else:
+            self.refresh_on_change_text = ''
+
+    def add_option( self, text, value, selected=False ):
+        self.options.append( ( text, value, selected ) )
+
+    def get_html( self, prefix="", disabled=False, extra_attr=None ):
+        if extra_attr is not None:
+            self.extra_attributes = ' %s' % ' '.join( [ '%s="%s"' % ( k, escape( v ) ) for k, v in extra_attr.items() ] )
+        else:
+            self.extra_attributes = ''
+        if self.display == "checkboxes":
+            return self.get_html_checkboxes( prefix, disabled )
+        elif self.display == "radio":
+            return self.get_html_radio( prefix, disabled )
+        else:
+            return self.get_html_default( prefix, disabled )
+
+    def get_html_checkboxes( self, prefix="", disabled=False ):
+        rval = []
+        ctr = 0
+        if len( self.options ) > 1:
+            rval.append( '<div class="checkUncheckAllPlaceholder" checkbox_name="%s%s"></div>' % ( prefix, self.name ) )  # placeholder for the insertion of the Select All/Unselect All buttons
+        for text, value, selected in self.options:
+            style = ""
+            text = unicodify( text )
+            escaped_value = escape( unicodify( value ), quote=True )
+            uniq_id = "%s%s|%s" % (prefix, self.name, escaped_value)
+            if len(self.options) > 2 and ctr % 2 == 1:
+                style = " class=\"odd_row\""
+            selected_text = ""
+            if selected:
+                selected_text = " checked='checked'"
+            rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" id="%s"%s%s%s><label class="inline" for="%s">%s</label></div>'
+                         % ( style, prefix, self.name, escaped_value, uniq_id, selected_text, self.get_disabled_str( disabled ), self.extra_attributes, uniq_id, escape( text, quote=True ) ) )
+            ctr += 1
+        return unicodify( "\n".join( rval ) )
+
+    def get_html_radio( self, prefix="", disabled=False ):
+        rval = []
+        ctr = 0
+        for text, value, selected in self.options:
+            style = ""
+            escaped_value = escape( str( value ), quote=True )
+            uniq_id = "%s%s|%s" % (prefix, self.name, escaped_value)
+            if len(self.options) > 2 and ctr % 2 == 1:
+                style = " class=\"odd_row\""
+            selected_text = ""
+            if selected:
+                selected_text = " checked='checked'"
+            rval.append( '<div%s><input type="radio" name="%s%s"%s value="%s" id="%s"%s%s%s><label class="inline" for="%s">%s</label></div>'
+                         % ( style,
+                             prefix,
+                             self.name,
+                             self.refresh_on_change_text,
+                             escaped_value,
+                             uniq_id,
+                             selected_text,
+                             self.get_disabled_str( disabled ),
+                             self.extra_attributes,
+                             uniq_id,
+                             text ) )
+            ctr += 1
+        return unicodify( "\n".join( rval ) )
+
+    def get_html_default( self, prefix="", disabled=False ):
+        if self.multiple:
+            multiple = " multiple"
+        else:
+            multiple = ""
+        if self.size:
+            size = ' size="%s"' % str( self.size )
+        else:
+            size = ''
+        rval = []
+        last_selected_value = ""
+        for text, value, selected in self.options:
+            if selected:
+                selected_text = " selected"
+                last_selected_value = value
+                if not isinstance( last_selected_value, string_types ):
+                    last_selected_value = str( last_selected_value )
+            else:
+                selected_text = ""
+            rval.append( '<option value="%s"%s>%s</option>' % ( escape( unicodify( value ), quote=True ), selected_text, escape( unicodify( text ), quote=True ) ) )
+        if last_selected_value:
+            last_selected_value = ' last_selected_value="%s"' % escape( unicodify( last_selected_value ), quote=True )
+        if self.field_id is not None:
+            id_string = ' id="%s"' % self.field_id
+        else:
+            id_string = ''
+        rval.insert( 0, '<select name="%s%s"%s%s%s%s%s%s%s>'
+                     % ( prefix, self.name, multiple, size, self.refresh_on_change_text, last_selected_value, self.get_disabled_str( disabled ), id_string, self.extra_attributes ) )
+        rval.append( '</select>' )
+        return unicodify( "\n".join( rval ) )
+
+    def get_selected( self, return_label=False, return_value=False, multi=False ):
+        '''
+        Return the currently selected option's label, value or both as a tuple.  For
+        multi-select lists, a list is returned.
+        '''
+        if multi:
+            selected_options = []
+        for label, value, selected in self.options:
+            if selected:
+                if return_label and return_value:
+                    if multi:
+                        selected_options.append( ( label, value ) )
+                    else:
+                        return ( label, value )
+                elif return_label:
+                    if multi:
+                        selected_options.append( label )
+                    else:
+                        return label
+                elif return_value:
+                    if multi:
+                        selected_options.append( value )
+                    else:
+                        return value
+        if multi:
+            return selected_options
+        return None
+
+    def to_dict( self ):
+        return dict(
+            name=self.name,
+            multiple=self.multiple,
+            options=self.options
+        )
+
+
+class DrillDownField( BaseField ):
+    """
+    A hierarchical select field, which allows users to 'drill down' a tree-like set of options.
+
+    >>> t = DrillDownField( "foo", multiple=True, display="checkbox", options=[{'name': 'Heading 1', 'value': 'heading1', 'options': [{'name': 'Option 1', 'value': 'option1', 'options': []}, {'name': 'Option 2', 'value': 'option2', 'options': []}, {'name': 'Heading 1', 'value': 'heading1', 'options': [{'name': 'Option 3', 'value': 'option3', 'options': []}, {'name': 'Option 4', 'value': 'option4', 'options': []}]}]}, {'name': 'Option 5', 'value': 'option5', 'options': []}] )
+    >>> print t.get_html()
+    <div class="form-row drilldown-container" id="drilldown--666f6f">
+    <div class="form-row-input">
+    <div><span class="form-toggle icon-button toggle-expand" id="drilldown--666f6f-68656164696e6731-click"></span>
+    <input type="checkbox" name="foo" value="heading1" >Heading 1
+    </div><div class="form-row" id="drilldown--666f6f-68656164696e6731-container" style="float: left; margin-left: 1em;">
+    <div class="form-row-input">
+    <input type="checkbox" name="foo" value="option1" >Option 1
+    </div>
+    <div class="form-row-input">
+    <input type="checkbox" name="foo" value="option2" >Option 2
+    </div>
+    <div class="form-row-input">
+    <div><span class="form-toggle icon-button toggle-expand" id="drilldown--666f6f-68656164696e6731-68656164696e6731-click"></span>
+    <input type="checkbox" name="foo" value="heading1" >Heading 1
+    </div><div class="form-row" id="drilldown--666f6f-68656164696e6731-68656164696e6731-container" style="float: left; margin-left: 1em;">
+    <div class="form-row-input">
+    <input type="checkbox" name="foo" value="option3" >Option 3
+    </div>
+    <div class="form-row-input">
+    <input type="checkbox" name="foo" value="option4" >Option 4
+    </div>
+    </div>
+    </div>
+    </div>
+    </div>
+    <div class="form-row-input">
+    <input type="checkbox" name="foo" value="option5" >Option 5
+    </div>
+    </div>
+    >>> t = DrillDownField( "foo", multiple=False, display="radio", options=[{'name': 'Heading 1', 'value': 'heading1', 'options': [{'name': 'Option 1', 'value': 'option1', 'options': []}, {'name': 'Option 2', 'value': 'option2', 'options': []}, {'name': 'Heading 1', 'value': 'heading1', 'options': [{'name': 'Option 3', 'value': 'option3', 'options': []}, {'name': 'Option 4', 'value': 'option4', 'options': []}]}]}, {'name': 'Option 5', 'value': 'option5', 'options': []}] )
+    >>> print t.get_html()
+    <div class="form-row drilldown-container" id="drilldown--666f6f">
+    <div class="form-row-input">
+    <div><span class="form-toggle icon-button toggle-expand" id="drilldown--666f6f-68656164696e6731-click"></span>
+    <input type="radio" name="foo" value="heading1" >Heading 1
+    </div><div class="form-row" id="drilldown--666f6f-68656164696e6731-container" style="float: left; margin-left: 1em;">
+    <div class="form-row-input">
+    <input type="radio" name="foo" value="option1" >Option 1
+    </div>
+    <div class="form-row-input">
+    <input type="radio" name="foo" value="option2" >Option 2
+    </div>
+    <div class="form-row-input">
+    <div><span class="form-toggle icon-button toggle-expand" id="drilldown--666f6f-68656164696e6731-68656164696e6731-click"></span>
+    <input type="radio" name="foo" value="heading1" >Heading 1
+    </div><div class="form-row" id="drilldown--666f6f-68656164696e6731-68656164696e6731-container" style="float: left; margin-left: 1em;">
+    <div class="form-row-input">
+    <input type="radio" name="foo" value="option3" >Option 3
+    </div>
+    <div class="form-row-input">
+    <input type="radio" name="foo" value="option4" >Option 4
+    </div>
+    </div>
+    </div>
+    </div>
+    </div>
+    <div class="form-row-input">
+    <input type="radio" name="foo" value="option5" >Option 5
+    </div>
+    </div>
+    """
+
+    def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options=[], value=[], refresh_on_change_values=[] ):
+        self.name = name
+        self.multiple = multiple or False
+        self.options = options
+        if value and not isinstance( value, list ):
+            value = [ value ]
+        elif not value:
+            value = []
+        self.value = value
+        if display == "checkbox":
+            assert multiple, "Checkbox display only supported for multiple select"
+        elif display == "radio":
+            assert not( multiple ), "Radio display only supported for single select"
+        else:
+            raise Exception( "Unknown display type: %s" % display )
+        self.display = display
+        self.refresh_on_change = refresh_on_change
+        self.refresh_on_change_values = refresh_on_change_values
+        if self.refresh_on_change:
+            self.refresh_on_change_text = ' refresh_on_change="true"'
+            if self.refresh_on_change_values:
+                self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) )
+        else:
+            self.refresh_on_change_text = ''
+
+    def get_html( self, prefix="" ):
+        def find_expanded_options( expanded_options, options, parent_options=[] ):
+            for option in options:
+                if option['value'] in self.value:
+                    expanded_options.extend( parent_options )
+                if option['options']:
+                    new_parents = list( parent_options ) + [ option['value'] ]
+                    find_expanded_options( expanded_options, option['options'], new_parents )
+
+        def recurse_options( html, options, base_id, expanded_options=[] ):
+            for option in options:
+                escaped_option_value = escape( str( option['value'] ), quote=True )
+                selected = ( option['value'] in self.value )
+                if selected:
+                    selected = ' checked'
+                else:
+                    selected = ''
+                span_class = 'form-toggle icon-button toggle'
+                if option['value'] not in expanded_options:
+                    span_class = "%s-expand" % ( span_class )
+                html.append( '<div class="form-row-input">')
+                drilldown_group_id = "%s-%s" % ( base_id, hexlify( option['value'] ) )
+                if option['options']:
+                    html.append( '<div><span class="%s" id="%s-click"></span>' % ( span_class, drilldown_group_id ) )
+                html.append( '<input type="%s" name="%s%s" value="%s" %s>%s' % ( self.display, prefix, self.name, escaped_option_value, selected, option['name']) )
+                if option['options']:
+                    html.append( '</div><div class="form-row" id="%s-container" style="float: left; margin-left: 1em;">' % ( drilldown_group_id )  )
+                    recurse_options( html, option['options'], drilldown_group_id, expanded_options )
+                    html.append( '</div>')
+                html.append( '</div>')
+        drilldown_id = "drilldown-%s-%s" % ( hexlify( prefix ), hexlify( self.name ) )
+        rval = []
+        rval.append( '<div class="form-row drilldown-container" id="%s">' % ( drilldown_id ) )
+        expanded_options = []
+        find_expanded_options( expanded_options, self.options )
+        recurse_options( rval, self.options, drilldown_id, expanded_options )
+        rval.append( '</div>' )
+        return unicodify( '\n'.join( rval ) )
+
+
+class AddressField(BaseField):
+    @staticmethod
+    def fields():
+        return [  ( "short_desc", "Short address description", "Required" ),
+                  ( "name", "Name", "Required" ),
+                  ( "institution", "Institution", "Required" ),
+                  ( "address", "Address", "Required" ),
+                  ( "city", "City", "Required" ),
+                  ( "state", "State/Province/Region", "Required" ),
+                  ( "postal_code", "Postal Code", "Required" ),
+                  ( "country", "Country", "Required" ),
+                  ( "phone", "Phone", "" )  ]
+
+    def __init__(self, name, user=None, value=None, params=None):
+        self.name = name
+        self.user = user
+        self.value = value
+        self.select_address = None
+        self.params = params
+
+    def get_html( self, disabled=False ):
+        address_html = ''
+        add_ids = ['none']
+        if self.user:
+            for a in self.user.addresses:
+                add_ids.append( str( a.id ) )
+        add_ids.append( 'new' )
+        self.select_address = SelectField( self.name,
+                                           refresh_on_change=True,
+                                           refresh_on_change_values=add_ids )
+        if self.value == 'none':
+            self.select_address.add_option( 'Select one', 'none', selected=True )
+        else:
+            self.select_address.add_option( 'Select one', 'none' )
+        if self.user:
+            for a in self.user.addresses:
+                if not a.deleted:
+                    if self.value == str( a.id ):
+                        self.select_address.add_option( a.desc, str( a.id ), selected=True )
+                        # Display this address
+                        address_html += '''
+                                        <div class="form-row">
+                                            %s
+                                        </div>
+                                        ''' % a.get_html()
+                    else:
+                        self.select_address.add_option( a.desc, str( a.id ) )
+        if self.value == 'new':
+            self.select_address.add_option( 'Add a new address', 'new', selected=True )
+            for field_name, label, help_text in self.fields():
+                add_field = TextField( self.name + '_' + field_name,
+                                       40,
+                                       restore_text( self.params.get( self.name + '_' + field_name, ''  ) ) )
+                address_html += '''
+                                <div class="form-row">
+                                    <label>%s</label>
+                                    %s
+                                    ''' % ( label, add_field.get_html( disabled=disabled ) )
+                if help_text:
+                    address_html += '''
+                                    <div class="toolParamHelp" style="clear: both;">
+                                        %s
+                                    </div>
+                                    ''' % help_text
+                address_html += '''
+                                </div>
+                                '''
+        else:
+            self.select_address.add_option( 'Add a new address', 'new' )
+        return self.select_address.get_html( disabled=disabled ) + address_html
+
+
+class WorkflowField( BaseField ):
+    def __init__( self, name, user=None, value=None, params=None ):
+        self.name = name
+        self.user = user
+        self.value = value
+        self.select_workflow = None
+        self.params = params
+
+    def get_html( self, disabled=False ):
+        self.select_workflow = SelectField( self.name )
+        if self.value == 'none':
+            self.select_workflow.add_option( 'Select one', 'none', selected=True )
+        else:
+            self.select_workflow.add_option( 'Select one', 'none' )
+        if self.user:
+            for a in self.user.stored_workflows:
+                if not a.deleted:
+                    if str( self.value ) == str( a.id ):
+                        self.select_workflow.add_option( a.name, str( a.id ), selected=True )
+                    else:
+                        self.select_workflow.add_option( a.name, str( a.id ) )
+        return self.select_workflow.get_html( disabled=disabled )
+
+
+class WorkflowMappingField( BaseField):
+    def __init__( self, name, user=None, value=None, params=None, **kwd ):
+        # DBTODO integrate this with the new __build_workflow approach in requests_common.  As it is, not particularly useful.
+        self.name = name
+        self.user = user
+        self.value = value
+        self.select_workflow = None
+        self.params = params
+        self.workflow_inputs = []
+
+    def get_html( self, disabled=False ):
+        self.select_workflow = SelectField( self.name, refresh_on_change=True )
+        workflow_inputs = []
+        if self.value == 'none':
+            self.select_workflow.add_option( 'Select one', 'none', selected=True )
+        else:
+            self.select_workflow.add_option( 'Select one', 'none' )
+        if self.user:
+            for a in self.user.stored_workflows:
+                if not a.deleted:
+                    if str( self.value ) == str( a.id ):
+                        self.select_workflow.add_option( a.name, str( a.id ), selected=True )
+                    else:
+                        self.select_workflow.add_option( a.name, str( a.id ) )
+            if self.value and self.value != 'none':
+                # Workflow selected.  Find all inputs.
+                for workflow in self.user.stored_workflows:
+                    if workflow.id == int(self.value):
+                        for step in workflow.latest_workflow.steps:
+                            if step.type == 'data_input':
+                                if step.tool_inputs and "name" in step.tool_inputs:
+                                    workflow_inputs.append((step.tool_inputs['name'], TextField( '%s_%s' % (self.name, step.id), 20)))
+        # Do something more appropriate here and allow selection of inputs
+        return self.select_workflow.get_html( disabled=disabled ) + ''.join(['<div class="form-row"><label>%s</label>%s</div>' % (s[0], s[1].get_html()) for s in workflow_inputs])
+
+    def get_display_text(self):
+        if self.value:
+            return self.value
+        else:
+            return '-'
+
+
+class HistoryField( BaseField ):
+    def __init__( self, name, user=None, value=None, params=None ):
+        self.name = name
+        self.user = user
+        self.value = value
+        self.select_history = None
+        self.params = params
+
+    def get_html( self, disabled=False ):
+        self.select_history = SelectField( self.name )
+        if self.value == 'none':
+            self.select_history.add_option( 'No Import', 'none', selected=True )
+            self.select_history.add_option( 'New History', 'new' )
+        else:
+            self.select_history.add_option( 'No Import', 'none' )
+            if self.value == 'new':
+                self.select_history.add_option( 'New History', 'new', selected=True )
+            else:
+                self.select_history.add_option( 'New History', 'new')
+        if self.user:
+            for a in self.user.histories:
+                if not a.deleted:
+                    if str( self.value ) == str( a.id ):
+                        self.select_history.add_option( a.name, str( a.id ), selected=True )
+                    else:
+                        self.select_history.add_option( a.name, str( a.id ) )
+        return self.select_history.get_html( disabled=disabled )
+
+    def get_display_text(self):
+        if self.value:
+            return self.value
+        else:
+            return '-'
+
+
+class LibraryField( BaseField ):
+    def __init__( self, name, value=None, trans=None ):
+        self.name = name
+        self.lddas = value
+        self.trans = trans
+
+    def get_html( self, prefix="", disabled=False ):
+        if not self.lddas:
+            ldda_ids = ""
+            text = "Select library dataset(s)"
+        else:
+            ldda_ids = "||".join( [ self.trans.security.encode_id( ldda.id ) for ldda in self.lddas ] )
+            text = "<br />".join( [ "%s. %s" % (i + 1, ldda.name) for i, ldda in enumerate(self.lddas)] )
+        return unicodify( '<a href="javascript:void(0);" class="add-librarydataset">%s</a> \
+                <input type="hidden" name="%s%s" value="%s">' % ( text, prefix, self.name, escape( str(ldda_ids), quote=True ) ) )
+
+    def get_display_text(self):
+        if self.ldda:
+            return self.ldda.name
+        else:
+            return 'None'
+
+
+def get_suite():
+    """Get unittest suite for this module"""
+    import doctest
+    import sys
+    return doctest.DocTestSuite( sys.modules[__name__] )
+
+
+# --------- Utility methods -----------------------------
+def build_select_field( trans, objs, label_attr, select_field_name, initial_value='none',
+                        selected_value='none', refresh_on_change=False, multiple=False, display=None, size=None ):
+    """
+    Build a SelectField given a set of objects.  The received params are:
+
+    - objs: the set of objects used to populate the option list
+    - label_attr: the attribute of each obj (e.g., name, email, etc ) whose value is used to populate each option label.
+
+        - If the string 'self' is passed as label_attr, each obj in objs is assumed to be a string, so the obj itself is used
+
+    - select_field_name: the name of the SelectField
+    - initial_value: the value of the first option in the SelectField - allows for an option telling the user to select something
+    - selected_value: the value of the currently selected option
+    - refresh_on_change: True if the SelectField should perform a refresh_on_change
+    """
+    if initial_value == 'none':
+        values = [ initial_value ]
+    else:
+        values = []
+    for obj in objs:
+        if label_attr == 'self':
+            # Each obj is a string
+            values.append( obj )
+        else:
+            values.append( trans.security.encode_id( obj.id ) )
+    if refresh_on_change:
+        refresh_on_change_values = values
+    else:
+        refresh_on_change_values = []
+    select_field = SelectField( name=select_field_name,
+                                multiple=multiple,
+                                display=display,
+                                refresh_on_change=refresh_on_change,
+                                refresh_on_change_values=refresh_on_change_values,
+                                size=size )
+    for obj in objs:
+        if label_attr == 'self':
+            # Each obj is a string
+            if str( selected_value ) == str( obj ):
+                select_field.add_option( obj, obj, selected=True )
+            else:
+                select_field.add_option( obj, obj )
+        else:
+            label = getattr( obj, label_attr )
+            if str( selected_value ) == str( obj.id ) or str( selected_value ) == trans.security.encode_id( obj.id ):
+                select_field.add_option( label, trans.security.encode_id( obj.id ), selected=True )
+            else:
+                select_field.add_option( label, trans.security.encode_id( obj.id ) )
+    return select_field
diff --git a/lib/galaxy/web/formatting.py b/lib/galaxy/web/formatting.py
new file mode 100644
index 0000000..d7b2151
--- /dev/null
+++ b/lib/galaxy/web/formatting.py
@@ -0,0 +1,32 @@
+import locale
+from string import Template
+
+DEFAULT_LOCALE_FORMAT = '%a %b %e %H:%M:%S %Y'
+ISO_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
+
+
+def expand_pretty_datetime_format(value):
+    """
+
+    >>> expand_pretty_datetime_format("%H:%M:%S %Z")
+    '%H:%M:%S %Z'
+    >>> locale_format = expand_pretty_datetime_format("$locale (UTC)")
+    >>> import locale
+    >>> expected_format = '%s (UTC)' % locale.nl_langinfo(locale.D_T_FMT)
+    >>> locale_format == expected_format
+    True
+    >>> expand_pretty_datetime_format("$iso8601")
+    '%Y-%m-%d %H:%M:%S'
+    """
+    locale_format = None
+    try:
+        locale_format = locale.nl_langinfo(locale.D_T_FMT)
+    except AttributeError:  # nl_langinfo not available
+        pass
+    if not locale_format:
+        locale_format = DEFAULT_LOCALE_FORMAT
+    stock_formats = dict(
+        locale=locale_format,
+        iso8601=ISO_DATETIME_FORMAT,
+    )
+    return Template(value).safe_substitute(**stock_formats)
diff --git a/lib/galaxy/web/framework/__init__.py b/lib/galaxy/web/framework/__init__.py
new file mode 100644
index 0000000..4ea0d0b
--- /dev/null
+++ b/lib/galaxy/web/framework/__init__.py
@@ -0,0 +1,6 @@
+"""
+Galaxy web application framework
+"""
+
+from . import base
+url_for = base.routes.url_for
diff --git a/lib/galaxy/web/framework/base.py b/lib/galaxy/web/framework/base.py
new file mode 100644
index 0000000..e227a80
--- /dev/null
+++ b/lib/galaxy/web/framework/base.py
@@ -0,0 +1,478 @@
+"""
+A simple WSGI application/framework.
+"""
+import cgi  # For FieldStorage
+import logging
+import os.path
+import socket
+import tarfile
+import tempfile
+import time
+import types
+from Cookie import SimpleCookie
+
+import routes
+import webob
+# We will use some very basic HTTP/wsgi utilities from the paste library
+from paste import httpexceptions
+from paste.request import get_cookies
+from paste.response import HeaderDict
+
+from galaxy.util import smart_str
+
+log = logging.getLogger( __name__ )
+
+#: time of the most recent server startup
+server_starttime = int( time.time() )
+
+
+def __resource_with_deleted( self, member_name, collection_name, **kwargs ):
+    """
+    Method to monkeypatch on to routes.mapper.Mapper which does the same thing
+    as resource() with the addition of standardized routes for handling
+    elements in Galaxy's "deleted but not really deleted" fashion.
+    """
+    collection_path = kwargs.get( 'path_prefix', '' ) + '/' + collection_name + '/deleted'
+    member_path = collection_path + '/{id}'
+    self.connect( 'deleted_' + collection_name, collection_path, controller=collection_name, action='index', deleted=True, conditions=dict( method=['GET'] ) )
+    self.connect( 'deleted_' + member_name, member_path, controller=collection_name, action='show', deleted=True, conditions=dict( method=['GET'] ) )
+    self.connect( 'undelete_deleted_' + member_name, member_path + '/undelete', controller=collection_name, action='undelete',
+                  conditions=dict( method=['POST'] ) )
+    self.resource( member_name, collection_name, **kwargs )
+
+
+routes.Mapper.resource_with_deleted = __resource_with_deleted
+
+
+class WebApplication( object ):
+    """
+    A simple web application which maps requests to objects using routes,
+    and to methods on those objects in the CherryPy style. Thus simple
+    argument mapping in the CherryPy style occurs automatically, but more
+    complicated encoding of arguments in the PATH_INFO can be performed
+    with routes.
+    """
+    def __init__( self ):
+        """
+        Create a new web application object. To actually connect some
+        controllers use `add_controller` and `add_route`. Call
+        `finalize_config` when all controllers and routes have been added
+        and `__call__` to handle a request (WSGI style).
+        """
+        self.controllers = dict()
+        self.api_controllers = dict()
+        self.mapper = routes.Mapper()
+        # FIXME: The following two options are deprecated and should be
+        # removed.  Consult the Routes documentation.
+        self.mapper.minimization = True
+        # self.mapper.explicit = False
+        self.transaction_factory = DefaultWebTransaction
+        # Set if trace logging is enabled
+        self.trace_logger = None
+
+    def add_ui_controller( self, controller_name, controller ):
+        """
+        Add a controller class to this application. A controller class has
+        methods which handle web requests. To connect a URL to a controller's
+        method use `add_route`.
+        """
+        log.debug( "Enabling '%s' controller, class: %s",
+                   controller_name, controller.__class__.__name__ )
+        self.controllers[ controller_name ] = controller
+
+    def add_api_controller( self, controller_name, controller ):
+        log.debug( "Enabling '%s' API controller, class: %s",
+                   controller_name, controller.__class__.__name__ )
+        self.api_controllers[ controller_name ] = controller
+
+    def add_route( self, route, **kwargs ):
+        """
+        Add a route to match a URL with a method. Accepts all keyword
+        arguments of `routes.Mapper.connect`. Every route should result in
+        at least a controller value which corresponds to one of the
+        objects added with `add_controller`. It optionally may yield an
+        `action` argument which will be used to locate the method to call
+        on the controller. Additional arguments will be passed to the
+        method as keyword args.
+        """
+        self.mapper.connect( route, **kwargs )
+
+    def add_client_route( self, route ):
+        self.add_route(route, controller='root', action='client')
+
+    def set_transaction_factory( self, transaction_factory ):
+        """
+        Use the callable `transaction_factory` to create the transaction
+        which will be passed to requests.
+        """
+        self.transaction_factory = transaction_factory
+
+    def finalize_config( self ):
+        """
+        Call when application is completely configured and ready to serve
+        requests
+        """
+        # Create/compile the regular expressions for route mapping
+        self.mapper.create_regs( self.controllers.keys() )
+
+    def trace( self, **fields ):
+        if self.trace_logger:
+            self.trace_logger.log( "WebApplication", **fields )
+
+    def __call__( self, environ, start_response ):
+        """
+        Call interface as specified by WSGI. Wraps the environment in user
+        friendly objects, finds the appropriate method to handle the request
+        and calls it.
+        """
+        # Immediately create request_id which we will use for logging
+        request_id = environ.get( 'request_id', 'unknown' )
+        if self.trace_logger:
+            self.trace_logger.context_set( "request_id", request_id )
+        self.trace( message="Starting request" )
+        try:
+            return self.handle_request( environ, start_response )
+        finally:
+            self.trace( message="Handle request finished" )
+            if self.trace_logger:
+                self.trace_logger.context_remove( "request_id" )
+
+    def handle_request( self, environ, start_response, body_renderer=None ):
+        # Grab the request_id (should have been set by middleware)
+        request_id = environ.get( 'request_id', 'unknown' )
+        # Map url using routes
+        path_info = environ.get( 'PATH_INFO', '' )
+        map = self.mapper.match( path_info, environ )
+        if path_info.startswith('/api'):
+            environ[ 'is_api_request' ] = True
+            controllers = self.api_controllers
+        else:
+            environ[ 'is_api_request' ] = False
+            controllers = self.controllers
+        if map is None:
+            raise httpexceptions.HTTPNotFound( "No route for " + path_info )
+        self.trace( path_info=path_info, map=map )
+        # Setup routes
+        rc = routes.request_config()
+        rc.mapper = self.mapper
+        rc.mapper_dict = map
+        rc.environ = environ
+        # Setup the transaction
+        trans = self.transaction_factory( environ )
+        trans.request_id = request_id
+        rc.redirect = trans.response.send_redirect
+        # Get the controller class
+        controller_name = map.pop( 'controller', None )
+        controller = controllers.get( controller_name, None )
+        if controller_name is None:
+            raise httpexceptions.HTTPNotFound( "No controller for " + path_info )
+        # Resolve action method on controller
+        action = map.pop( 'action', 'index' )
+        # This is the easiest way to make the controller/action accessible for
+        # url_for invocations.  Specifically, grids.
+        trans.controller = controller_name
+        trans.action = action
+        method = getattr( controller, action, None )
+        if method is None:
+            method = getattr( controller, 'default', None )
+        if method is None:
+            raise httpexceptions.HTTPNotFound( "No action for " + path_info )
+        # Is the method exposed
+        if not getattr( method, 'exposed', False ):
+            raise httpexceptions.HTTPNotFound( "Action not exposed for " + path_info )
+        # Is the method callable
+        if not callable( method ):
+            raise httpexceptions.HTTPNotFound( "Action not callable for " + path_info )
+        environ['controller_action_key'] = "%s.%s.%s" % ('api' if environ['is_api_request'] else 'web', controller_name, action or 'default')
+        # Combine mapper args and query string / form args and call
+        kwargs = trans.request.params.mixed()
+        kwargs.update( map )
+        # Special key for AJAX debugging, remove to avoid confusing methods
+        kwargs.pop( '_', None )
+        try:
+            body = method( trans, **kwargs )
+        except Exception as e:
+            body = self.handle_controller_exception( e, trans, **kwargs )
+            if not body:
+                raise
+        body_renderer = body_renderer or self._render_body
+        return body_renderer( trans, body, environ, start_response )
+
+    def _render_body( self, trans, body, environ, start_response ):
+        # Now figure out what we got back and try to get it to the browser in
+        # a smart way
+        if callable( body ):
+            # Assume the callable is another WSGI application to run
+            return body( environ, start_response )
+        elif isinstance( body, types.FileType ):
+            # Stream the file back to the browser
+            return send_file( start_response, trans, body )
+        elif isinstance( body, tarfile.ExFileObject ):
+            # Stream the tarfile member back to the browser
+            body = iterate_file( body )
+            start_response( trans.response.wsgi_status(),
+                            trans.response.wsgi_headeritems() )
+            return body
+        else:
+            start_response( trans.response.wsgi_status(),
+                            trans.response.wsgi_headeritems() )
+            return self.make_body_iterable( trans, body )
+
+    def make_body_iterable( self, trans, body ):
+        if isinstance( body, ( types.GeneratorType, list, tuple ) ):
+            # Recursively stream the iterable
+            return flatten( body )
+        elif body is None:
+            # Returns an empty body
+            return []
+        else:
+            # Worst case scenario
+            return [ smart_str( body ) ]
+
+    def handle_controller_exception( self, e, trans, **kwargs ):
+        """
+        Allow handling of exceptions raised in controller methods.
+        """
+        return False
+
+
+class WSGIEnvironmentProperty( object ):
+    """
+    Descriptor that delegates a property to a key in the environ member of the
+    associated object (provides property style access to keys in the WSGI
+    environment)
+    """
+    def __init__( self, key, default='' ):
+        self.key = key
+        self.default = default
+
+    def __get__( self, obj, type=None ):
+        if obj is None:
+            return self
+        return obj.environ.get( self.key, self.default )
+
+
+class LazyProperty( object ):
+    """
+    Property that replaces itself with a calculated value the first time
+    it is used.
+    """
+    def __init__( self, func ):
+        self.func = func
+
+    def __get__(self, obj, type=None ):
+        if obj is None:
+            return self
+        value = self.func( obj )
+        setattr( obj, self.func.func_name, value )
+        return value
+
+
+lazy_property = LazyProperty
+
+
+class DefaultWebTransaction( object ):
+    """
+    Wraps the state of a single web transaction (request/response cycle).
+
+    TODO: Provide hooks to allow application specific state to be included
+          in here.
+    """
+    def __init__( self, environ ):
+        self.environ = environ
+        self.request = Request( environ )
+        self.response = Response()
+
+    @lazy_property
+    def session( self ):
+        """
+        Get the user's session state. This is laze since we rarely use it
+        and the creation/serialization cost is high.
+        """
+        if 'com.saddi.service.session' in self.environ:
+            return self.environ['com.saddi.service.session'].session
+        elif 'beaker.session' in self.environ:
+            return self.environ['beaker.session']
+        else:
+            return None
+
+
+class FieldStorage( cgi.FieldStorage ):
+    def make_file(self, binary=None):
+        # For request.params, override cgi.FieldStorage.make_file to create persistent
+        # tempfiles.  Necessary for externalizing the upload tool.  It's a little hacky
+        # but for performance reasons it's way better to use Paste's tempfile than to
+        # create a new one and copy.
+        return tempfile.NamedTemporaryFile()
+
+    def read_lines(self):
+        # Always make a new file
+        self.file = self.make_file()
+        self.__file = None
+        if self.outerboundary:
+            self.read_lines_to_outerboundary()
+        else:
+            self.read_lines_to_eof()
+
+
+cgi.FieldStorage = FieldStorage
+
+
+class Request( webob.Request ):
+    """
+    Encapsulates an HTTP request.
+    """
+    def __init__( self, environ ):
+        """
+        Create a new request wrapping the WSGI environment `environ`
+        """
+        #  self.environ = environ
+        webob.Request.__init__( self, environ, charset='utf-8', decode_param_names=False )
+    # Properties that are computed and cached on first use
+
+    @lazy_property
+    def remote_host( self ):
+        try:
+            return socket.gethostbyname( self.remote_addr )
+        except socket.error:
+            return self.remote_addr
+
+    @lazy_property
+    def remote_hostname( self ):
+        try:
+            return socket.gethostbyaddr( self.remote_addr )[0]
+        except socket.error:
+            return self.remote_addr
+
+    @lazy_property
+    def cookies( self ):
+        return get_cookies( self.environ )
+
+    @lazy_property
+    def base( self ):
+        return ( self.scheme + "://" + self.host )
+
+    # @lazy_property
+    # def params( self ):
+    #     return parse_formvars( self.environ )
+
+    @lazy_property
+    def path( self ):
+        return self.environ.get('SCRIPT_NAME', '') + self.environ['PATH_INFO']
+
+    @lazy_property
+    def browser_url( self ):
+        return self.base + self.path
+
+    # Descriptors that map properties to the associated environment
+
+    # scheme = WSGIEnvironmentProperty( 'wsgi.url_scheme' )
+    # remote_addr = WSGIEnvironmentProperty( 'REMOTE_ADDR' )
+
+    remote_port = WSGIEnvironmentProperty( 'REMOTE_PORT' )
+
+    # method = WSGIEnvironmentProperty( 'REQUEST_METHOD' )
+    # script_name = WSGIEnvironmentProperty( 'SCRIPT_NAME' )
+
+    protocol = WSGIEnvironmentProperty( 'SERVER_PROTOCOL' )
+
+    # query_string = WSGIEnvironmentProperty( 'QUERY_STRING' )
+    # path_info = WSGIEnvironmentProperty( 'PATH_INFO' )
+
+
+class Response( object ):
+    """
+    Describes an HTTP response. Currently very simple since the actual body
+    of the request is handled separately.
+    """
+    def __init__( self ):
+        """
+        Create a new Response defaulting to HTML content and "200 OK" status
+        """
+        self.status = "200 OK"
+        self.headers = HeaderDict( { "content-type": "text/html" } )
+        self.cookies = SimpleCookie()
+
+    def set_content_type( self, type ):
+        """
+        Sets the Content-Type header
+        """
+        self.headers[ "content-type" ] = type
+
+    def get_content_type( self ):
+        return self.headers[ "content-type" ]
+
+    def send_redirect( self, url ):
+        """
+        Send an HTTP redirect response to (target `url`)
+        """
+        raise httpexceptions.HTTPFound( url.encode('utf-8'), headers=self.wsgi_headeritems() )
+
+    def wsgi_headeritems( self ):
+        """
+        Return headers in format appropriate for WSGI `start_response`
+        """
+        result = self.headers.headeritems()
+        # Add cookie to header
+        for name in self.cookies.keys():
+            crumb = self.cookies[name]
+            header, value = str( crumb ).split( ': ', 1 )
+            result.append( ( header, value ) )
+        return result
+
+    def wsgi_status( self ):
+        """
+        Return status line in format appropriate for WSGI `start_response`
+        """
+        if isinstance( self.status, int ):
+            exception = httpexceptions.get_exception( self.status )
+            return "%d %s" % ( exception.code, exception.title )
+        else:
+            return self.status
+
+
+# ---- Utilities ------------------------------------------------------------
+
+CHUNK_SIZE = 2 ** 16
+
+
+def send_file( start_response, trans, body ):
+    # If configured use X-Accel-Redirect header for nginx
+    base = trans.app.config.nginx_x_accel_redirect_base
+    apache_xsendfile = trans.app.config.apache_xsendfile
+    if base:
+        trans.response.headers['X-Accel-Redirect'] = \
+            base + os.path.abspath( body.name )
+        body = [ "" ]
+    elif apache_xsendfile:
+        trans.response.headers['X-Sendfile'] = os.path.abspath( body.name )
+        body = [ "" ]
+    # Fall back on sending the file in chunks
+    else:
+        body = iterate_file( body )
+    start_response( trans.response.wsgi_status(),
+                    trans.response.wsgi_headeritems() )
+    return body
+
+
+def iterate_file( file ):
+    """
+    Progressively return chunks from `file`.
+    """
+    while 1:
+        chunk = file.read( CHUNK_SIZE )
+        if not chunk:
+            break
+        yield chunk
+
+
+def flatten( seq ):
+    """
+    Flatten a possible nested set of iterables
+    """
+    for x in seq:
+        if isinstance( x, ( types.GeneratorType, list, tuple ) ):
+            for y in flatten( x ):
+                yield smart_str( y )
+        else:
+            yield smart_str( x )
diff --git a/lib/galaxy/web/framework/decorators.py b/lib/galaxy/web/framework/decorators.py
new file mode 100644
index 0000000..94d639c
--- /dev/null
+++ b/lib/galaxy/web/framework/decorators.py
@@ -0,0 +1,404 @@
+import inspect
+import logging
+from functools import wraps
+from json import loads
+from traceback import format_exc
+
+import paste.httpexceptions
+from six import string_types
+
+from galaxy import util
+from galaxy.exceptions import error_codes, MessageException
+from galaxy.util.json import safe_dumps
+from galaxy.web.framework import url_for
+
+log = logging.getLogger( __name__ )
+
+JSON_CONTENT_TYPE = "application/json"
+JSONP_CONTENT_TYPE = "application/javascript"
+JSONP_CALLBACK_KEY = 'callback'
+
+
+def error( message ):
+    raise MessageException( message, type='error' )
+
+
+# ----------------------------------------------------------------------------- web controller decorators
+def _save_orig_fn( wrapped, orig ):
+    if not hasattr( orig, '_orig' ):
+        wrapped._orig = orig
+    return wrapped
+
+
+def expose( func ):
+    """
+    Decorator: mark a function as 'exposed' and thus web accessible
+    """
+    func.exposed = True
+    return func
+
+
+def json( func, pretty=False ):
+    """
+    Format the response as JSON and set the response content type to
+    JSON_CONTENT_TYPE.
+    """
+    @wraps(func)
+    def call_and_format( self, trans, *args, **kwargs ):
+        # pull out any callback argument to the api endpoint and set the content type to json or javascript
+        jsonp_callback = kwargs.pop( JSONP_CALLBACK_KEY, None )
+        if jsonp_callback:
+            trans.response.set_content_type( JSONP_CONTENT_TYPE )
+        else:
+            trans.response.set_content_type( JSON_CONTENT_TYPE )
+        rval = func( self, trans, *args, **kwargs )
+        return _format_return_as_json( rval, jsonp_callback, pretty=( pretty or trans.debug ) )
+
+    if not hasattr( func, '_orig' ):
+        call_and_format._orig = func
+    return expose( _save_orig_fn( call_and_format, func ) )
+
+
+def json_pretty( func ):
+    """
+    Indent and sort returned JSON.
+    """
+    return json( func, pretty=True )
+
+
+def require_login( verb="perform this action", use_panels=False, webapp='galaxy' ):
+    def argcatcher( func ):
+        @wraps(func)
+        def decorator( self, trans, *args, **kwargs ):
+            if trans.get_user():
+                return func( self, trans, *args, **kwargs )
+            else:
+                return trans.show_error_message(
+                    'You must be <a target="galaxy_main" href="%s">logged in</a> to %s.'
+                    % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
+        return decorator
+    return argcatcher
+
+
+def require_admin( func ):
+    @wraps(func)
+    def decorator( self, trans, *args, **kwargs ):
+        if not trans.user_is_admin():
+            msg = "You must be an administrator to access this feature."
+            user = trans.get_user()
+            if not trans.app.config.admin_users_list:
+                msg = "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration."
+            elif not user:
+                msg = "You must be logged in as an administrator to access this feature."
+            trans.response.status = 403
+            if trans.response.get_content_type() == 'application/json':
+                return msg
+            else:
+                return trans.show_error_message( msg )
+        return func( self, trans, *args, **kwargs )
+    return decorator
+
+
+# ----------------------------------------------------------------------------- (original) api decorators
+def expose_api( func, to_json=True, user_required=True ):
+    """
+    Expose this function via the API.
+    """
+    @wraps(func)
+    def decorator( self, trans, *args, **kwargs ):
+        def error( environ, start_response ):
+            start_response( error_status, [('Content-type', 'text/plain')] )
+            return error_message
+        error_status = '403 Forbidden'
+        if trans.error_message:
+            return trans.error_message
+        if user_required and trans.anonymous:
+            error_message = "API Authentication Required for this request"
+            return error
+        if trans.request.body:
+            try:
+                kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
+            except ValueError:
+                error_status = '400 Bad Request'
+                error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
+                return error
+
+        # pull out any callback argument to the api endpoint and set the content type to json or javascript
+        jsonp_callback = kwargs.pop( JSONP_CALLBACK_KEY, None )
+        if jsonp_callback:
+            trans.response.set_content_type( JSONP_CONTENT_TYPE )
+        else:
+            trans.response.set_content_type( JSON_CONTENT_TYPE )
+
+        # send 'do not cache' headers to handle IE's caching of ajax get responses
+        trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+
+        # Perform api_run_as processing, possibly changing identity
+        if 'payload' in kwargs and 'run_as' in kwargs['payload']:
+            if not trans.user_can_do_run_as():
+                error_message = 'User does not have permissions to run jobs as another user'
+                return error
+            try:
+                decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
+            except TypeError:
+                trans.response.status = 400
+                return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
+            try:
+                user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+                trans.api_inherit_admin = trans.user_is_admin()
+                trans.set_user(user)
+            except:
+                trans.response.status = 400
+                return "That user does not exist."
+        try:
+            rval = func( self, trans, *args, **kwargs)
+            if to_json:
+                rval = _format_return_as_json( rval, jsonp_callback, pretty=trans.debug )
+            return rval
+        except paste.httpexceptions.HTTPException:
+            raise  # handled
+        except:
+            log.exception( 'Uncaught exception in exposed API method:' )
+            raise paste.httpexceptions.HTTPServerError()
+    return expose( _save_orig_fn( decorator, func ) )
+
+
+def __extract_payload_from_request( trans, func, kwargs ):
+
+    content_type = trans.request.headers[ 'content-type' ]
+    if content_type.startswith( 'application/x-www-form-urlencoded' ) or content_type.startswith( 'multipart/form-data' ):
+        # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
+        # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
+        # are not a part of the request body. This is a problem because it's not possible to differentiate between values
+        # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
+        # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
+        # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
+        payload = kwargs.copy()
+        named_args, _, _, _ = inspect.getargspec( func )
+        for arg in named_args:
+            payload.pop( arg, None )
+        for k, v in payload.iteritems():
+            if isinstance( v, string_types ):
+                try:
+                    # note: parse_non_hex_float only needed here for single string values where something like
+                    # 40000000000000e5 will be parsed as a scientific notation float. This is as opposed to hex strings
+                    # in larger JSON structures where quoting prevents this (further below)
+                    payload[ k ] = loads( v, parse_float=util.parse_non_hex_float )
+                except:
+                    # may not actually be json, just continue
+                    pass
+        payload = util.recursively_stringify_dictionary_keys( payload )
+    else:
+        # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
+        # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
+        # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
+        payload = util.recursively_stringify_dictionary_keys( loads( trans.request.body ) )
+    return payload
+
+
+def expose_api_raw( func ):
+    """
+    Expose this function via the API but don't dump the results
+    to JSON.
+    """
+    return expose_api( func, to_json=False )
+
+
+def expose_api_raw_anonymous( func ):
+    """
+    Expose this function via the API but don't dump the results
+    to JSON.
+    """
+    return expose_api( func, to_json=False, user_required=False )
+
+
+def expose_api_anonymous( func, to_json=True ):
+    """
+    Expose this function via the API but don't require a set user.
+    """
+    return expose_api( func, to_json=to_json, user_required=False )
+
+
+# ----------------------------------------------------------------------------- (new) api decorators
+# TODO: rename as expose_api and make default.
+def _future_expose_api( func, to_json=True, user_required=True, user_or_session_required=True, handle_jsonp=True ):
+    """
+    Expose this function via the API.
+    """
+    @wraps(func)
+    def decorator( self, trans, *args, **kwargs ):
+        # errors passed in from trans._authenicate_api
+        if trans.error_message:
+            return __api_error_response( trans, status_code=403, err_code=error_codes.USER_NO_API_KEY,
+                                         err_msg=trans.error_message )
+        if trans.anonymous:
+            # error if anon and user required
+            if user_required:
+                return __api_error_response( trans, status_code=403, err_code=error_codes.USER_NO_API_KEY,
+                                             err_msg="API authentication required for this request" )
+            # error if anon and no session
+            if not trans.galaxy_session and user_or_session_required:
+                return __api_error_response( trans, status_code=403, err_code=error_codes.USER_NO_API_KEY,
+                                             err_msg="API authentication required for this request" )
+
+        if trans.request.body:
+            try:
+                kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
+            except ValueError:
+                error_code = error_codes.USER_INVALID_JSON
+                return __api_error_response( trans, status_code=400, err_code=error_code )
+
+        # pull out any callback argument to the api endpoint and set the content type to json or javascript
+        # TODO: use handle_jsonp to NOT overwrite existing tool_shed JSONP
+        jsonp_callback = kwargs.pop( JSONP_CALLBACK_KEY, None ) if handle_jsonp else None
+        if jsonp_callback:
+            trans.response.set_content_type( JSONP_CONTENT_TYPE )
+        else:
+            trans.response.set_content_type( JSON_CONTENT_TYPE )
+
+        # send 'do not cache' headers to handle IE's caching of ajax get responses
+        trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+
+        # TODO: Refactor next block out into a helper procedure.
+        # Perform api_run_as processing, possibly changing identity
+        if 'payload' in kwargs and 'run_as' in kwargs['payload']:
+            if not trans.user_can_do_run_as():
+                error_code = error_codes.USER_CANNOT_RUN_AS
+                return __api_error_response( trans, err_code=error_code, status_code=403 )
+            try:
+                decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
+            except TypeError:
+                error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
+                error_code = error_codes.USER_INVALID_RUN_AS
+                return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
+            try:
+                user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+                trans.api_inherit_admin = trans.user_is_admin()
+                trans.set_user(user)
+            except:
+                error_code = error_codes.USER_INVALID_RUN_AS
+                return __api_error_response( trans, err_code=error_code, status_code=400 )
+        try:
+            rval = func( self, trans, *args, **kwargs )
+            if to_json:
+                rval = _format_return_as_json( rval, jsonp_callback, pretty=trans.debug )
+            return rval
+        except MessageException as e:
+            traceback_string = format_exc()
+            return __api_error_response( trans, exception=e, traceback=traceback_string )
+        except paste.httpexceptions.HTTPException:
+            # TODO: Allow to pass or format for the API???
+            raise  # handled
+        except Exception as e:
+            traceback_string = format_exc()
+            error_message = 'Uncaught exception in exposed API method:'
+            log.exception( error_message )
+            return __api_error_response(
+                trans,
+                status_code=500,
+                exception=e,
+                traceback=traceback_string,
+                err_msg=error_message,
+                err_code=error_codes.UNKNOWN
+            )
+    if not hasattr(func, '_orig'):
+        decorator._orig = func
+    decorator.exposed = True
+    return decorator
+
+
+def _format_return_as_json( rval, jsonp_callback=None, pretty=False ):
+    """
+    Formats a return value as JSON or JSONP if `jsonp_callback` is present.
+
+    Use `pretty=True` to return pretty printed json.
+    """
+    dumps_kwargs = dict( indent=4, sort_keys=True ) if pretty else {}
+    json = safe_dumps( rval, **dumps_kwargs )
+    if jsonp_callback:
+        json = "{}({});".format( jsonp_callback, json )
+    return json
+
+
+def __api_error_message( trans, **kwds ):
+    exception = kwds.get( "exception", None )
+    if exception:
+        # If we are passed a MessageException use err_msg.
+        default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
+        default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
+        extra_error_info = getattr( exception, 'extra_error_info', {} )
+        if not isinstance( extra_error_info, dict ):
+            extra_error_info = {}
+    else:
+        default_error_message = "Error processing API request."
+        default_error_code = error_codes.UNKNOWN
+        extra_error_info = {}
+    traceback_string = kwds.get( "traceback", "No traceback available." )
+    err_msg = kwds.get( "err_msg", default_error_message )
+    error_code_object = kwds.get( "err_code", default_error_code )
+    try:
+        error_code = error_code_object.code
+    except AttributeError:
+        # Some sort of bad error code sent in, logic failure on part of
+        # Galaxy developer.
+        error_code = error_codes.UNKNOWN.code
+    # Would prefer the terminology of error_code and error_message, but
+    # err_msg used a good number of places already. Might as well not change
+    # it?
+    error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
+    if trans.debug:  # TODO: Should admins get to see traceback as well?
+        error_response[ "traceback" ] = traceback_string
+    return error_response
+
+
+def __api_error_response( trans, **kwds ):
+    error_dict = __api_error_message( trans, **kwds )
+    exception = kwds.get( "exception", None )
+    # If we are given an status code directly - use it - otherwise check
+    # the exception for a status_code attribute.
+    if "status_code" in kwds:
+        status_code = int( kwds.get( "status_code" ) )
+    elif hasattr( exception, "status_code" ):
+        status_code = int( exception.status_code )
+    else:
+        status_code = 500
+    response = trans.response
+    if not response.status or str(response.status).startswith("20"):
+        # Unset status code appears to be string '200 OK', if anything
+        # non-success (i.e. not 200 or 201) has been set, do not override
+        # underlying controller.
+        response.status = status_code
+    return safe_dumps( error_dict )
+
+
+def _future_expose_api_anonymous( func, to_json=True ):
+    """
+    Expose this function via the API but don't require a set user.
+    """
+    return _future_expose_api( func, to_json=to_json, user_required=False )
+
+
+def _future_expose_api_anonymous_and_sessionless( func, to_json=True ):
+    """
+    Expose this function via the API but don't require a user or a galaxy_session.
+    """
+    return _future_expose_api( func, to_json=to_json, user_required=False, user_or_session_required=False )
+
+
+def _future_expose_api_raw( func ):
+    return _future_expose_api( func, to_json=False, user_required=True )
+
+
+def _future_expose_api_raw_anonymous( func ):
+    return _future_expose_api( func, to_json=False, user_required=False )
+
+
+def _future_expose_api_raw_anonymous_and_sessionless( func ):
+    # TODO: tool_shed api implemented JSONP first on a method-by-method basis, don't overwrite that for now
+    return _future_expose_api(
+        func,
+        to_json=False,
+        user_required=False,
+        user_or_session_required=False,
+        handle_jsonp=False
+    )
diff --git a/lib/galaxy/web/framework/formbuilder.py b/lib/galaxy/web/framework/formbuilder.py
new file mode 100644
index 0000000..a1caf68
--- /dev/null
+++ b/lib/galaxy/web/framework/formbuilder.py
@@ -0,0 +1,86 @@
+from galaxy.util import bunch
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+def form( *args, **kwargs ):
+    return FormBuilder( *args, **kwargs )
+
+
+class FormBuilder( object ):
+    """
+    Simple class describing an HTML form
+    """
+    def __init__( self, action="", title="", name="form", submit_text="submit", use_panels=False ):
+        self.title = title
+        self.name = name
+        self.action = action
+        self.submit_text = submit_text
+        self.inputs = []
+        self.use_panels = use_panels
+
+    def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True  ):
+        self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
+        return self
+
+    def add_checkbox( self, name, label, value=None, error=None, help=None  ):
+        return self.add_input( 'checkbox', label, name, value, error, help )
+
+    def add_text( self, name, label, value=None, error=None, help=None  ):
+        return self.add_input( 'text', label, name, value, error, help )
+
+    def add_password( self, name, label, value=None, error=None, help=None  ):
+        return self.add_input( 'password', label, name, value, error, help )
+
+    def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
+        self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label   ) )
+        return self
+
+
+class FormInput( object ):
+    """
+    Simple class describing a form input element
+    """
+    def __init__( self, type, name, label, value=None, error=None, help=None, use_label=True, extra_attributes={}, **kwargs ):
+        self.type = type
+        self.name = name
+        self.label = label
+        self.value = value
+        self.error = error
+        self.help = help
+        self.use_label = use_label
+        self.extra_attributes = extra_attributes
+
+
+class DatalistInput( FormInput ):
+    """ Data list input """
+
+    def __init__( self, name, *args, **kwargs ):
+        if 'extra_attributes' not in kwargs:
+            kwargs[ 'extra_attributes' ] = {}
+        kwargs[ 'extra_attributes' ][ 'list' ] = name
+        FormInput.__init__( self, None, name, *args, **kwargs )
+        self.options = kwargs.get( 'options', {} )
+
+    def body_html( self ):
+        options = "".join( [ "<option value='%s'>%s</option>" % ( key, value ) for key, value in self.options.iteritems() ] )
+        return """<datalist id="%s">%s</datalist>""" % ( self.name, options )
+
+
+class SelectInput( FormInput ):
+    """ A select form input. """
+    def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
+        FormInput.__init__( self, "select", name, label, value=value, error=error, help=help, use_label=use_label )
+        self.options = options
+
+
+class FormData( object ):
+    """
+    Class for passing data about a form to a template, very rudimentary, could
+    be combined with the tool form handling to build something more general.
+    """
+    def __init__( self ):
+        # TODO: galaxy's two Bunchs are defined differently. Is this right?
+        self.values = bunch.Bunch()
+        self.errors = bunch.Bunch()
diff --git a/lib/galaxy/web/framework/helpers/__init__.py b/lib/galaxy/web/framework/helpers/__init__.py
new file mode 100644
index 0000000..2dfda1f
--- /dev/null
+++ b/lib/galaxy/web/framework/helpers/__init__.py
@@ -0,0 +1,113 @@
+"""
+Galaxy web framework helpers
+"""
+
+from ..base import server_starttime
+from datetime import datetime, timedelta
+
+from galaxy.util import hash_util
+from galaxy.util import unicodify
+from galaxy.util.json import safe_dumps as dumps  # Used by mako templates # noqa: F401
+from webhelpers import date
+from webhelpers.html.tags import stylesheet_link, javascript_link
+
+from routes import url_for
+
+
+def time_ago( x ):
+    """
+    Convert a datetime to a string.
+    """
+    delta = timedelta(weeks=1)
+
+    # If the date is more than one week ago, then display the actual date instead of in words
+    if (datetime.utcnow() - x) > delta:  # Greater than a week difference
+        return x.strftime("%b %d, %Y")
+    else:
+        date_array = date.distance_of_time_in_words( x, datetime.utcnow() ).replace(",", "").split(" ")
+        return "~%s %s ago" % (date_array[0], date_array[1])
+
+
+def iff( a, b, c ):
+    """
+    Ternary shortcut
+    """
+    if a:
+        return b
+    else:
+        return c
+
+
+def truncate(content, length=100, suffix='...'):
+    """
+    Smart string truncation
+    """
+    if len(content) <= length:
+        return content
+    else:
+        return content[:length].rsplit(' ', 1)[0] + suffix
+
+# Quick helpers for static content
+
+
+def css( *args ):
+    """
+    Take a list of stylesheet names (no extension) and return appropriate string
+    of link tags.
+
+    Cache-bust with time that server started running on
+    """
+    return "\n".join( [ stylesheet_link( url_for( "/static/style/%s.css?v=%s" % (name, server_starttime) ) ) for name in args ] )
+
+
+def js_helper( prefix, *args ):
+    """
+    Take a prefix and list of javascript names and return appropriate
+    string of script tags.
+
+    Cache-bust with time that server started running on
+    """
+    return "\n".join( [ javascript_link( url_for( "/%s%s.js?v=%s" % (prefix, name, server_starttime ) ) ) for name in args ] )
+
+
+def js( *args ):
+    """
+    Take a prefix and list of javascript names and return appropriate
+    string of script tags.
+    """
+    return js_helper( 'static/scripts/', *args )
+
+
+def templates( *args ):
+    """
+    Take a list of template names (no extension) and return appropriate
+    string of script tags.
+    """
+    return js_helper( 'static/scripts/templates/compiled/', *args )
+
+# Hashes
+
+
+def md5( s ):
+    """
+    Return hex encoded md5 hash of string s
+    """
+    m = hash_util.md5()
+    m.update( s )
+    return m.hexdigest()
+
+
+# Unicode help
+def to_unicode( a_string ):
+    """
+    Convert a string to unicode in utf-8 format; if string is already unicode,
+    does nothing because string's encoding cannot be determined by introspection.
+    """
+    return unicodify( a_string, 'utf-8' )
+
+
+def is_true( val ):
+    """
+    Returns true if input is a boolean and true or is a string and looks like a true value.
+    """
+    return val is True or val in [ 'True', 'true', 'T', 't' ]
diff --git a/lib/galaxy/web/framework/helpers/grids.py b/lib/galaxy/web/framework/helpers/grids.py
new file mode 100644
index 0000000..bca528e
--- /dev/null
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -0,0 +1,860 @@
+import logging
+import math
+from json import dumps, loads
+
+from markupsafe import escape
+from six import string_types, text_type
+from sqlalchemy.sql.expression import and_, false, func, null, or_, true
+
+from galaxy.model.item_attrs import RuntimeException, UsesAnnotations, UsesItemRatings
+from galaxy.util import sanitize_text, unicodify
+from galaxy.util.odict import odict
+from galaxy.web.framework import decorators, url_for
+from galaxy.web.framework.helpers import iff
+
+
+log = logging.getLogger( __name__ )
+
+
+class Grid( object ):
+    """
+    Specifies the content and format of a grid (data table).
+    """
+    title = ""
+    exposed = True
+    model_class = None
+    show_item_checkboxes = False
+    template = "grid_base.mako"
+    async_template = "grid_base_async.mako"
+    use_async = False
+    use_hide_message = True
+    global_actions = []
+    columns = []
+    operations = []
+    standard_filters = []
+    # Any columns that are filterable (either standard or advanced) should have a default value set in the default filter.
+    default_filter = {}
+    default_sort_key = None
+    preserve_state = False
+    use_paging = False
+    num_rows_per_page = 25
+    num_page_links = 10
+    # Set preference names.
+    cur_filter_pref_name = ".filter"
+    cur_sort_key_pref_name = ".sort_key"
+    pass_through_operations = {}
+    legend = None
+    info_text = None
+
+    def __init__( self ):
+        # Determine if any multiple row operations are defined
+        self.has_multiple_item_operations = False
+        for operation in self.operations:
+            if operation.allow_multiple:
+                self.has_multiple_item_operations = True
+                break
+
+        # If a column does not have a model class, set the column's model class
+        # to be the grid's model class.
+        for column in self.columns:
+            if not column.model_class:
+                column.model_class = self.model_class
+
+    def __call__( self, trans, **kwargs ):
+        # Get basics.
+        # FIXME: pretty sure this is only here to pass along, can likely be eliminated
+        status = kwargs.get( 'status', None )
+        message = kwargs.get( 'message', None )
+        # Build a base filter and sort key that is the combination of the saved state and defaults.
+        # Saved state takes preference over defaults.
+        base_filter = {}
+        if self.default_filter:
+            # default_filter is a dictionary that provides a default set of filters based on the grid's columns.
+            base_filter = self.default_filter.copy()
+        base_sort_key = self.default_sort_key
+        if self.preserve_state:
+            pref_name = text_type( self.__class__.__name__ + self.cur_filter_pref_name )
+            if pref_name in trans.get_user().preferences:
+                saved_filter = loads( trans.get_user().preferences[pref_name] )
+                base_filter.update( saved_filter )
+            pref_name = text_type( self.__class__.__name__ + self.cur_sort_key_pref_name )
+            if pref_name in trans.get_user().preferences:
+                base_sort_key = loads( trans.get_user().preferences[pref_name] )
+        # Build initial query
+        query = self.build_initial_query( trans, **kwargs )
+        query = self.apply_query_filter( trans, query, **kwargs )
+        # Maintain sort state in generated urls
+        extra_url_args = {}
+        # Determine whether use_default_filter flag is set.
+        use_default_filter_str = kwargs.get( 'use_default_filter' )
+        use_default_filter = False
+        if use_default_filter_str:
+            use_default_filter = ( use_default_filter_str.lower() == 'true' )
+        # Process filtering arguments to (a) build a query that represents the filter and (b) build a
+        # dictionary that denotes the current filter.
+        cur_filter_dict = {}
+        for column in self.columns:
+            if column.key:
+                # Get the filter criterion for the column. Precedence is (a) if using default filter, only look there; otherwise,
+                # (b) look in kwargs; and (c) look in base filter.
+                column_filter = None
+                if use_default_filter:
+                    if self.default_filter:
+                        column_filter = self.default_filter.get( column.key )
+                elif "f-" + column.model_class.__name__ + ".%s" % column.key in kwargs:
+                    # Queries that include table joins cannot guarantee unique column names.  This problem is
+                    # handled by setting the column_filter value to <TableName>.<ColumnName>.
+                    column_filter = kwargs.get( "f-" + column.model_class.__name__ + ".%s" % column.key )
+                elif "f-" + column.key in kwargs:
+                    column_filter = kwargs.get( "f-" + column.key )
+                elif column.key in base_filter:
+                    column_filter = base_filter.get( column.key )
+
+                # Method (1) combines a mix of strings and lists of strings into a single string and (2) attempts to de-jsonify all strings.
+                def loads_recurse(item):
+                    decoded_list = []
+                    if isinstance( item, string_types):
+                        try:
+                            # Not clear what we're decoding, so recurse to ensure that we catch everything.
+                            decoded_item = loads( item )
+                            if isinstance( decoded_item, list):
+                                decoded_list = loads_recurse( decoded_item )
+                            else:
+                                decoded_list = [ text_type( decoded_item ) ]
+                        except ValueError:
+                            decoded_list = [ text_type( item ) ]
+                    elif isinstance( item, list):
+                        for element in item:
+                            a_list = loads_recurse( element )
+                            decoded_list = decoded_list + a_list
+                    return decoded_list
+                # If column filter found, apply it.
+                if column_filter is not None:
+                    # TextColumns may have a mix of json and strings.
+                    if isinstance( column, TextColumn ):
+                        column_filter = loads_recurse( column_filter )
+                        if len( column_filter ) == 1:
+                            column_filter = column_filter[0]
+                    # Interpret ',' as a separator for multiple terms.
+                    if isinstance( column_filter, string_types ) and column_filter.find(',') != -1:
+                        column_filter = column_filter.split(',')
+
+                    # Check if filter is empty
+                    if isinstance( column_filter, list ):
+                        # Remove empty strings from filter list
+                        column_filter = [x for x in column_filter if x != '']
+                        if len(column_filter) == 0:
+                            continue
+                    elif isinstance(column_filter, string_types):
+                        # If filter criterion is empty, do nothing.
+                        if column_filter == '':
+                            continue
+
+                    # Update query.
+                    query = column.filter( trans, trans.user, query, column_filter )
+                    # Upate current filter dict.
+                    # Column filters are rendered in various places, sanitize them all here.
+                    cur_filter_dict[ column.key ] = sanitize_text(column_filter)
+                    # Carry filter along to newly generated urls; make sure filter is a string so
+                    # that we can encode to UTF-8 and thus handle user input to filters.
+                    if isinstance( column_filter, list ):
+                        # Filter is a list; process each item.
+                        column_filter = [ text_type(_).encode('utf-8') if not isinstance(_, string_types) else _ for _ in column_filter ]
+                        extra_url_args[ "f-" + column.key ] = dumps( column_filter )
+                    else:
+                        # Process singleton filter.
+                        if not isinstance( column_filter, string_types ):
+                            column_filter = text_type(column_filter)
+                        extra_url_args[ "f-" + column.key ] = column_filter.encode("utf-8")
+        # Process sort arguments.
+        sort_key = None
+        if 'sort' in kwargs:
+            sort_key = kwargs['sort']
+        elif base_sort_key:
+            sort_key = base_sort_key
+        if sort_key:
+            ascending = not( sort_key.startswith( "-" ) )
+            # Queries that include table joins cannot guarantee unique column names.  This problem is
+            # handled by setting the column_filter value to <TableName>.<ColumnName>.
+            table_name = None
+            if sort_key.find( '.' ) > -1:
+                a_list = sort_key.split( '.' )
+                if ascending:
+                    table_name = a_list[0]
+                else:
+                    table_name = a_list[0][1:]
+                column_name = a_list[1]
+            elif ascending:
+                column_name = sort_key
+            else:
+                column_name = sort_key[1:]
+            # Sort key is a column key.
+            for column in self.columns:
+                if column.key and column.key.find( '.' ) > -1:
+                    column_key = column.key.split( '.' )[1]
+                else:
+                    column_key = column.key
+                if ( table_name is None or table_name == column.model_class.__name__ ) and column_key == column_name:
+                    query = column.sort( trans, query, ascending, column_name=column_name )
+                    break
+            extra_url_args['sort'] = sort_key
+        # There might be a current row
+        current_item = self.get_current_item( trans, **kwargs )
+        # Process page number.
+        if self.use_paging:
+            if 'page' in kwargs:
+                if kwargs['page'] == 'all':
+                    page_num = 0
+                else:
+                    page_num = int( kwargs['page'] )
+            else:
+                page_num = 1
+            if page_num == 0:
+                # Show all rows in page.
+                total_num_rows = query.count()
+                page_num = 1
+                num_pages = 1
+            else:
+                # Show a limited number of rows. Before modifying query, get the total number of rows that query
+                # returns so that the total number of pages can be computed.
+                total_num_rows = query.count()
+                query = query.limit( self.num_rows_per_page ).offset( ( page_num - 1 ) * self.num_rows_per_page )
+                num_pages = int( math.ceil( float( total_num_rows ) / self.num_rows_per_page ) )
+        else:
+            # Defaults.
+            page_num = 1
+            num_pages = 1
+        # There are some places in grid templates where it's useful for a grid
+        # to have its current filter.
+        self.cur_filter_dict = cur_filter_dict
+        # Preserve grid state: save current filter and sort key.
+        if self.preserve_state:
+            pref_name = text_type( self.__class__.__name__ + self.cur_filter_pref_name )
+            trans.get_user().preferences[pref_name] = text_type( dumps( cur_filter_dict ) )
+            if sort_key:
+                pref_name = text_type( self.__class__.__name__ + self.cur_sort_key_pref_name )
+                trans.get_user().preferences[pref_name] = text_type( dumps( sort_key ) )
+            trans.sa_session.flush()
+        # Log grid view.
+        context = text_type( self.__class__.__name__ )
+        params = cur_filter_dict.copy()
+        params['sort'] = sort_key
+        params['async'] = ( 'async' in kwargs )
+
+        # TODO:??
+        # commenting this out; when this fn calls session.add( action ) and session.flush the query from this fn
+        # is effectively 'wiped' out. Nate believes it has something to do with our use of session( autocommit=True )
+        # in mapping.py. If you change that to False, the log_action doesn't affect the query
+        # Below, I'm rendering the template first (that uses query), then calling log_action, then returning the page
+        # trans.log_action( trans.get_user(), text_type( "grid.view" ), context, params )
+
+        # Render grid.
+        def url( *args, **kwargs ):
+            route_name = kwargs.pop( '__route_name__', None )
+            # Only include sort/filter arguments if not linking to another
+            # page. This is a bit of a hack.
+            if 'action' in kwargs:
+                new_kwargs = dict()
+            else:
+                new_kwargs = dict( extra_url_args )
+            # Extend new_kwargs with first argument if found
+            if len(args) > 0:
+                new_kwargs.update( args[0] )
+            new_kwargs.update( kwargs )
+            # We need to encode item ids
+            if 'id' in new_kwargs:
+                id = new_kwargs[ 'id' ]
+                if isinstance( id, list ):
+                    new_kwargs[ 'id' ] = [ trans.security.encode_id( i ) for i in id ]
+                else:
+                    new_kwargs[ 'id' ] = trans.security.encode_id( id )
+            # The url_for invocation *must* include a controller and action.
+            if 'controller' not in new_kwargs:
+                new_kwargs['controller'] = trans.controller
+            if 'action' not in new_kwargs:
+                new_kwargs['action'] = trans.action
+            if route_name:
+                return url_for( route_name, **new_kwargs )
+            return url_for( **new_kwargs )
+
+        self.use_panels = ( kwargs.get( 'use_panels', False ) in [ True, 'True', 'true' ] )
+        self.advanced_search = ( kwargs.get( 'advanced_search', False ) in [ True, 'True', 'true' ] )
+        async_request = ( ( self.use_async ) and ( kwargs.get( 'async', False ) in [ True, 'True', 'true'] ) )
+        # Currently, filling the template returns a str object; this requires decoding the string into a
+        # unicode object within mako templates. What probably should be done is to return the template as
+        # utf-8 unicode; however, this would require encoding the object as utf-8 before returning the grid
+        # results via a controller method, which is require substantial changes. Hence, for now, return grid
+        # as str.
+        page = trans.fill_template( iff( async_request, self.async_template, self.template ),
+                                    grid=self,
+                                    query=query,
+                                    cur_page_num=page_num,
+                                    num_pages=num_pages,
+                                    num_page_links=self.num_page_links,
+                                    default_filter_dict=self.default_filter,
+                                    cur_filter_dict=cur_filter_dict,
+                                    sort_key=sort_key,
+                                    current_item=current_item,
+                                    ids=kwargs.get( 'id', [] ),
+                                    url=url,
+                                    status=status,
+                                    message=message,
+                                    info_text=self.info_text,
+                                    use_panels=self.use_panels,
+                                    use_hide_message=self.use_hide_message,
+                                    advanced_search=self.advanced_search,
+                                    show_item_checkboxes=( self.show_item_checkboxes or
+                                                           kwargs.get( 'show_item_checkboxes', '' ) in [ 'True', 'true' ] ),
+                                    # Pass back kwargs so that grid template can set and use args without
+                                    # grid explicitly having to pass them.
+                                    kwargs=kwargs )
+        trans.log_action( trans.get_user(), text_type( "grid.view" ), context, params )
+        return page
+
+    def get_ids( self, **kwargs ):
+        id = []
+        if 'id' in kwargs:
+            id = kwargs['id']
+            # Coerce ids to list
+            if not isinstance( id, list ):
+                id = id.split( "," )
+            # Ensure ids are integers
+            try:
+                id = map( int, id )
+            except:
+                decorators.error( "Invalid id" )
+        return id
+
+    # ---- Override these ----------------------------------------------------
+    def handle_operation( self, trans, operation, ids, **kwargs ):
+        pass
+
+    def get_current_item( self, trans, **kwargs ):
+        return None
+
+    def build_initial_query( self, trans, **kwargs ):
+        return trans.sa_session.query( self.model_class )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        # Applies a database filter that holds for all items in the grid.
+        # (gvk) Is this method necessary?  Why not simply build the entire query,
+        # including applying filters in the build_initial_query() method?
+        return query
+
+
+class GridColumn( object ):
+    def __init__( self, label, key=None, model_class=None, method=None, format=None,
+                  link=None, attach_popup=False, visible=True, nowrap=False,
+                  # Valid values for filterable are ['standard', 'advanced', None]
+                  filterable=None, sortable=True, label_id_prefix=None, inbound=False ):
+        """Create a grid column."""
+        self.label = label
+        self.key = key
+        self.model_class = model_class
+        self.method = method
+        self.format = format
+        self.link = link
+        self.inbound = inbound
+        self.nowrap = nowrap
+        self.attach_popup = attach_popup
+        self.visible = visible
+        self.filterable = filterable
+        # Column must have a key to be sortable.
+        self.sortable = ( self.key is not None and sortable )
+        self.label_id_prefix = label_id_prefix or ''
+
+    def get_value( self, trans, grid, item ):
+        if self.method:
+            value = getattr( grid, self.method )( trans, item )
+        elif self.key:
+            value = getattr( item, self.key )
+        else:
+            value = None
+        if self.format:
+            value = self.format( value )
+        return escape(unicodify(value))
+
+    def get_link( self, trans, grid, item ):
+        if self.link and self.link( item ):
+            return self.link( item )
+        return None
+
+    def filter( self, trans, user, query, column_filter ):
+        """ Modify query to reflect the column filter. """
+        if column_filter == "All":
+            pass
+        if column_filter == "True":
+            query = query.filter_by( **{ self.key: True } )
+        elif column_filter == "False":
+            query = query.filter_by( **{ self.key: False } )
+        return query
+
+    def get_accepted_filters( self ):
+        """ Returns a list of accepted filters for this column. """
+        accepted_filters_vals = [ "False", "True", "All" ]
+        accepted_filters = []
+        for val in accepted_filters_vals:
+            args = { self.key: val }
+            accepted_filters.append( GridColumnFilter( val, args) )
+        return accepted_filters
+
+    def sort( self, trans, query, ascending, column_name=None ):
+        """Sort query using this column."""
+        if column_name is None:
+            column_name = self.key
+        if ascending:
+            query = query.order_by( self.model_class.table.c.get( column_name ).asc() )
+        else:
+            query = query.order_by( self.model_class.table.c.get( column_name ).desc() )
+        return query
+
+
+class ReverseSortColumn( GridColumn ):
+    """ Column that reverses sorting; this is useful when the natural sort is descending. """
+    def sort( self, trans, query, ascending, column_name=None ):
+        return GridColumn.sort( self, trans, query, (not ascending), column_name=column_name )
+
+
+class TextColumn( GridColumn ):
+    """ Generic column that employs freetext and, hence, supports freetext, case-independent filtering. """
+    def filter( self, trans, user, query, column_filter ):
+        """ Modify query to filter using free text, case independence. """
+        if column_filter == "All":
+            pass
+        elif column_filter:
+            query = query.filter( self.get_filter( trans, user, column_filter ) )
+        return query
+
+    def get_filter( self, trans, user, column_filter ):
+        """ Returns a SQLAlchemy criterion derived from column_filter. """
+        if isinstance( column_filter, string_types ):
+            return self.get_single_filter( user, column_filter )
+        elif isinstance( column_filter, list ):
+            clause_list = []
+            for filter in column_filter:
+                clause_list.append( self.get_single_filter( user, filter ) )
+            return and_( *clause_list )
+
+    def get_single_filter( self, user, a_filter ):
+        """
+        Returns a SQLAlchemy criterion derived for a single filter. Single filter
+        is the most basic filter--usually a string--and cannot be a list.
+        """
+        # Queries that include table joins cannot guarantee that table column names will be
+        # unique, so check to see if a_filter is of type <TableName>.<ColumnName>.
+        if self.key.find( '.' ) > -1:
+            a_key = self.key.split( '.' )[1]
+        else:
+            a_key = self.key
+        model_class_key_field = getattr( self.model_class, a_key )
+        return func.lower( model_class_key_field ).like( "%" + a_filter.lower() + "%" )
+
+    def sort( self, trans, query, ascending, column_name=None ):
+        """Sort column using case-insensitive alphabetical sorting."""
+        if column_name is None:
+            column_name = self.key
+        if ascending:
+            query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).asc() )
+        else:
+            query = query.order_by( func.lower( self.model_class.table.c.get( column_name ) ).desc() )
+        return query
+
+
+class DateTimeColumn( TextColumn ):
+    def sort( self, trans, query, ascending, column_name=None ):
+        """Sort query using this column."""
+        return GridColumn.sort( self, trans, query, ascending, column_name=column_name )
+
+
+class BooleanColumn( TextColumn ):
+    def sort( self, trans, query, ascending, column_name=None ):
+        """Sort query using this column."""
+        return GridColumn.sort( self, trans, query, ascending, column_name=column_name )
+
+    def get_single_filter( self, user, a_filter ):
+        if self.key.find( '.' ) > -1:
+            a_key = self.key.split( '.' )[1]
+        else:
+            a_key = self.key
+        model_class_key_field = getattr( self.model_class, a_key )
+        return model_class_key_field == a_filter
+
+
+class IntegerColumn( TextColumn ):
+    """
+    Integer column that employs freetext, but checks that the text is an integer,
+    so support filtering on integer values.
+
+    IMPORTANT NOTE: grids that use this column type should not include the column
+    in the cols_to_filter list of MulticolFilterColumn ( i.e., searching on this
+    column type should not be performed in the grid's standard search - it won't
+    throw exceptions, but it also will not find what you're looking for ).  Grids
+    that search on this column should use 'filterable="advanced"' so that searching
+    is only performed in the advanced search component, restricting the search to
+    the specific column.
+
+    This is useful for searching on object ids or other integer columns.  See the
+    JobIdColumn column in the SpecifiedDateListGrid class in the jobs controller of
+    the reports webapp for an example.
+    """
+    def get_single_filter( self, user, a_filter ):
+        model_class_key_field = getattr( self.model_class, self.key )
+        assert int( a_filter ), "The search entry must be an integer"
+        return model_class_key_field == int( a_filter )
+
+    def sort( self, trans, query, ascending, column_name=None ):
+        """Sort query using this column."""
+        return GridColumn.sort( self, trans, query, ascending, column_name=column_name )
+
+
+class CommunityRatingColumn( GridColumn, UsesItemRatings ):
+    """ Column that displays community ratings for an item. """
+    def get_value( self, trans, grid, item ):
+        ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, item, webapp_model=trans.model )
+        return trans.fill_template( "tool_shed_rating.mako",
+                                    ave_item_rating=ave_item_rating,
+                                    num_ratings=num_ratings,
+                                    item_id=trans.security.encode_id( item.id ) )
+
+    def sort( self, trans, query, ascending, column_name=None ):
+        def get_foreign_key( source_class, target_class ):
+            """ Returns foreign key in source class that references target class. """
+            target_fk = None
+            for fk in source_class.table.foreign_keys:
+                if fk.references( target_class.table ):
+                    target_fk = fk
+                    break
+            if not target_fk:
+                raise RuntimeException( "No foreign key found between objects: %s, %s" % source_class.table, target_class.table )
+            return target_fk
+        # Get the columns that connect item's table and item's rating association table.
+        item_rating_assoc_class = getattr( trans.model, '%sRatingAssociation' % self.model_class.__name__ )
+        foreign_key = get_foreign_key( item_rating_assoc_class, self.model_class )
+        fk_col = foreign_key.parent
+        referent_col = foreign_key.get_referent( self.model_class.table )
+        # Do sorting using a subquery.
+        # Subquery to get average rating for each item.
+        ave_rating_subquery = trans.sa_session.query( fk_col,
+                                                      func.avg( item_rating_assoc_class.table.c.rating ).label('avg_rating') ) \
+                                              .group_by( fk_col ) \
+                                              .subquery()
+        # Integrate subquery into main query.
+        query = query.outerjoin( (ave_rating_subquery, referent_col == ave_rating_subquery.columns[fk_col.name]) )
+        # Sort using subquery results; use coalesce to avoid null values.
+        if not ascending:  # TODO: for now, reverse sorting b/c first sort is ascending, and that should be the natural sort.
+            query = query.order_by( func.coalesce( ave_rating_subquery.c.avg_rating, 0 ).asc() )
+        else:
+            query = query.order_by( func.coalesce( ave_rating_subquery.c.avg_rating, 0 ).desc() )
+        return query
+
+
+class OwnerAnnotationColumn( TextColumn, UsesAnnotations ):
+    """ Column that displays and filters item owner's annotations. """
+    def __init__( self, col_name, key, model_class=None, model_annotation_association_class=None, filterable=None ):
+        GridColumn.__init__( self, col_name, key=key, model_class=model_class, filterable=filterable )
+        self.sortable = False
+        self.model_annotation_association_class = model_annotation_association_class
+
+    def get_value( self, trans, grid, item ):
+        """ Returns first 150 characters of annotation. """
+        annotation = self.get_item_annotation_str( trans.sa_session, item.user, item )
+        if annotation:
+            ann_snippet = annotation[:155]
+            if len( annotation ) > 155:
+                ann_snippet = ann_snippet[ :ann_snippet.rfind(' ') ]
+                ann_snippet += "..."
+        else:
+            ann_snippet = ""
+        return ann_snippet
+
+    def get_single_filter( self, user, a_filter ):
+        """ Filter by annotation and annotation owner. """
+        return self.model_class.annotations.any(
+            and_( func.lower( self.model_annotation_association_class.annotation ).like( "%" + a_filter.lower() + "%" ),
+               # TODO: not sure why, to filter by owner's annotations, we have to do this rather than
+               # 'self.model_class.user==self.model_annotation_association_class.user'
+               self.model_annotation_association_class.table.c.user_id == self.model_class.table.c.user_id ) )
+
+
+class CommunityTagsColumn( TextColumn ):
+    """ Column that supports community tags. """
+    def __init__( self, col_name, key, model_class=None, model_tag_association_class=None, filterable=None, grid_name=None ):
+        GridColumn.__init__( self, col_name, key=key, model_class=model_class, nowrap=True, filterable=filterable, sortable=False )
+        self.model_tag_association_class = model_tag_association_class
+        # Column-specific attributes.
+        self.grid_name = grid_name
+
+    def get_value( self, trans, grid, item ):
+        return trans.fill_template( "/tagging_common.mako", tag_type="community", trans=trans, user=trans.get_user(), tagged_item=item, elt_context=self.grid_name,
+                                    in_form=True, input_size="20", tag_click_fn="add_tag_to_grid_filter", use_toggle_link=True )
+
+    def filter( self, trans, user, query, column_filter ):
+        """ Modify query to filter model_class by tag. Multiple filters are ANDed. """
+        if column_filter == "All":
+            pass
+        elif column_filter:
+            query = query.filter( self.get_filter( trans, user, column_filter ) )
+        return query
+
+    def get_filter( self, trans, user, column_filter ):
+            # Parse filter to extract multiple tags.
+            if isinstance( column_filter, list ):
+                # Collapse list of tags into a single string; this is redundant but effective. TODO: fix this by iterating over tags.
+                column_filter = ",".join( column_filter )
+            raw_tags = trans.app.tag_handler.parse_tags( column_filter.encode( "utf-8" ) )
+            clause_list = []
+            for name, value in raw_tags.items():
+                if name:
+                    # Filter by all tags.
+                    clause_list.append( self.model_class.tags.any( func.lower( self.model_tag_association_class.user_tname ).like( "%" + name.lower() + "%" ) ) )
+                    if value:
+                        # Filter by all values.
+                        clause_list.append( self.model_class.tags.any( func.lower( self.model_tag_association_class.user_value ).like( "%" + value.lower() + "%" ) ) )
+            return and_( *clause_list )
+
+
+class IndividualTagsColumn( CommunityTagsColumn ):
+    """ Column that supports individual tags. """
+    def get_value( self, trans, grid, item ):
+        return trans.fill_template( "/tagging_common.mako",
+                                    tag_type="individual",
+                                    user=trans.user,
+                                    tagged_item=item,
+                                    elt_context=self.grid_name,
+                                    in_form=True,
+                                    input_size="20",
+                                    tag_click_fn="add_tag_to_grid_filter",
+                                    use_toggle_link=True )
+
+    def get_filter( self, trans, user, column_filter ):
+            # Parse filter to extract multiple tags.
+            if isinstance( column_filter, list ):
+                # Collapse list of tags into a single string; this is redundant but effective. TODO: fix this by iterating over tags.
+                column_filter = ",".join( column_filter )
+            raw_tags = trans.app.tag_handler.parse_tags( column_filter.encode( "utf-8" ) )
+            clause_list = []
+            for name, value in raw_tags.items():
+                if name:
+                    # Filter by individual's tag names.
+                    clause_list.append( self.model_class.tags.any( and_( func.lower( self.model_tag_association_class.user_tname ).like( "%" + name.lower() + "%" ), self.model_tag_association_class.user == user ) ) )
+                    if value:
+                        # Filter by individual's tag values.
+                        clause_list.append( self.model_class.tags.any( and_( func.lower( self.model_tag_association_class.user_value ).like( "%" + value.lower() + "%" ), self.model_tag_association_class.user == user ) ) )
+            return and_( *clause_list )
+
+
+class MulticolFilterColumn( TextColumn ):
+    """ Column that performs multicolumn filtering. """
+    def __init__( self, col_name, cols_to_filter, key, visible, filterable="default" ):
+        GridColumn.__init__( self, col_name, key=key, visible=visible, filterable=filterable)
+        self.cols_to_filter = cols_to_filter
+
+    def filter( self, trans, user, query, column_filter ):
+        """ Modify query to filter model_class by tag. Multiple filters are ANDed. """
+        if column_filter == "All":
+            return query
+        if isinstance( column_filter, list):
+            clause_list = []
+            for filter in column_filter:
+                part_clause_list = []
+                for column in self.cols_to_filter:
+                    part_clause_list.append( column.get_filter( trans, user, filter ) )
+                clause_list.append( or_( *part_clause_list ) )
+            complete_filter = and_( *clause_list )
+        else:
+            clause_list = []
+            for column in self.cols_to_filter:
+                clause_list.append( column.get_filter( trans, user, column_filter ) )
+            complete_filter = or_( *clause_list )
+        return query.filter( complete_filter )
+
+
+class OwnerColumn( TextColumn ):
+    """ Column that lists item's owner. """
+    def get_value( self, trans, grid, item ):
+        return item.user.username
+
+    def sort( self, trans, query, ascending, column_name=None ):
+        """ Sort column using case-insensitive alphabetical sorting on item's username. """
+        if ascending:
+            query = query.order_by( func.lower( self.model_class.username ).asc() )
+        else:
+            query = query.order_by( func.lower( self.model_class.username ).desc() )
+        return query
+
+
+class PublicURLColumn( TextColumn ):
+    """ Column displays item's public URL based on username and slug. """
+    def get_link( self, trans, grid, item ):
+        if item.user.username and item.slug:
+            return dict( action='display_by_username_and_slug', username=item.user.username, slug=item.slug )
+        elif not item.user.username:
+            # TODO: provide link to set username.
+            return None
+        elif not item.user.slug:
+            # TODO: provide link to set slug.
+            return None
+
+
+class DeletedColumn( GridColumn ):
+    """ Column that tracks and filters for items with deleted attribute. """
+    def get_accepted_filters( self ):
+        """ Returns a list of accepted filters for this column. """
+        accepted_filter_labels_and_vals = { "active" : "False", "deleted" : "True", "all": "All" }
+        accepted_filters = []
+        for label, val in accepted_filter_labels_and_vals.items():
+            args = { self.key: val }
+            accepted_filters.append( GridColumnFilter( label, args) )
+        return accepted_filters
+
+    def filter( self, trans, user, query, column_filter ):
+        """Modify query to filter self.model_class by state."""
+        if column_filter == "All":
+            pass
+        elif column_filter in [ "True", "False" ]:
+            query = query.filter( self.model_class.deleted == ( column_filter == "True" ) )
+        return query
+
+
+class StateColumn( GridColumn ):
+    """
+    Column that tracks and filters for items with state attribute.
+
+    IMPORTANT NOTE: self.model_class must have a states Bunch or dict if
+    this column type is used in the grid.
+    """
+    def get_value( self, trans, grid, item ):
+        return item.state
+
+    def filter( self, trans, user, query, column_filter ):
+        """Modify query to filter self.model_class by state."""
+        if column_filter == "All":
+            pass
+        elif column_filter in [ v for k, v in self.model_class.states.items() ]:
+            query = query.filter( self.model_class.state == column_filter )
+        return query
+
+    def get_accepted_filters( self ):
+        """Returns a list of accepted filters for this column."""
+        all = GridColumnFilter( 'all', { self.key : 'All' } )
+        accepted_filters = [ all ]
+        for k, v in self.model_class.states.items():
+            args = { self.key: v }
+            accepted_filters.append( GridColumnFilter( v, args) )
+        return accepted_filters
+
+
+class SharingStatusColumn( GridColumn ):
+    """ Grid column to indicate sharing status. """
+    def get_value( self, trans, grid, item ):
+        # Delete items cannot be shared.
+        if item.deleted:
+            return ""
+        # Build a list of sharing for this item.
+        sharing_statuses = []
+        if item.users_shared_with:
+            sharing_statuses.append( "Shared" )
+        if item.importable:
+            sharing_statuses.append( "Accessible" )
+        if item.published:
+            sharing_statuses.append( "Published" )
+        return ", ".join( sharing_statuses )
+
+    def get_link( self, trans, grid, item ):
+        if not item.deleted and ( item.users_shared_with or item.importable or item.published ):
+            return dict( operation="share or publish", id=item.id )
+        return None
+
+    def filter( self, trans, user, query, column_filter ):
+        """ Modify query to filter histories by sharing status. """
+        if column_filter == "All":
+            pass
+        elif column_filter:
+            if column_filter == "private":
+                query = query.filter( self.model_class.users_shared_with == null() )
+                query = query.filter( self.model_class.importable == false() )
+            elif column_filter == "shared":
+                query = query.filter( self.model_class.users_shared_with != null() )
+            elif column_filter == "accessible":
+                query = query.filter( self.model_class.importable == true() )
+            elif column_filter == "published":
+                query = query.filter( self.model_class.published == true() )
+        return query
+
+    def get_accepted_filters( self ):
+        """ Returns a list of accepted filters for this column. """
+        accepted_filter_labels_and_vals = odict()
+        accepted_filter_labels_and_vals["private"] = "private"
+        accepted_filter_labels_and_vals["shared"] = "shared"
+        accepted_filter_labels_and_vals["accessible"] = "accessible"
+        accepted_filter_labels_and_vals["published"] = "published"
+        accepted_filter_labels_and_vals["all"] = "All"
+        accepted_filters = []
+        for label, val in accepted_filter_labels_and_vals.items():
+            args = { self.key: val }
+            accepted_filters.append( GridColumnFilter( label, args) )
+        return accepted_filters
+
+
+class GridOperation( object ):
+    def __init__( self, label, key=None, condition=None, allow_multiple=True, allow_popup=True,
+                  target=None, url_args=None, async_compatible=False, confirm=None,
+                  global_operation=None, inbound=False ):
+        self.label = label
+        self.key = key
+        self.allow_multiple = allow_multiple
+        self.allow_popup = allow_popup
+        self.condition = condition
+        self.target = target
+        self.url_args = url_args
+        self.async_compatible = async_compatible
+        self.inbound = inbound
+        # if 'confirm' is set, then ask before completing the operation
+        self.confirm = confirm
+        # specify a general operation that acts on the full grid
+        # this should be a function returning a dictionary with parameters
+        # to pass to the URL, similar to GridColumn links:
+        # global_operation=(lambda: dict(operation="download")
+        self.global_operation = global_operation
+
+    def get_url_args( self, item ):
+        if self.url_args:
+            if hasattr( self.url_args, '__call__' ):
+                url_args = self.url_args( item )
+            else:
+                url_args = dict( self.url_args )
+            url_args['id'] = item.id
+            return url_args
+        else:
+            return dict( operation=self.label, id=item.id )
+
+    def allowed( self, item ):
+        if self.condition:
+            return bool(self.condition( item ))
+        else:
+            return True
+
+
+class DisplayByUsernameAndSlugGridOperation( GridOperation ):
+    """ Operation to display an item by username and slug. """
+    def get_url_args( self, item ):
+        return { 'action' : 'display_by_username_and_slug', 'username' : item.user.username, 'slug' : item.slug }
+
+
+class GridAction( object ):
+    def __init__( self, label=None, url_args=None, inbound=False ):
+        self.label = label
+        self.url_args = url_args
+        self.inbound = inbound
+
+
+class GridColumnFilter( object ):
+    def __init__( self, label, args=None ):
+        self.label = label
+        self.args = args
+
+    def get_url_args( self ):
+        rval = {}
+        for k, v in self.args.items():
+            rval[ "f-" + k ] = v
+        return rval
diff --git a/lib/galaxy/web/framework/middleware/__init__.py b/lib/galaxy/web/framework/middleware/__init__.py
new file mode 100644
index 0000000..1aaf4f0
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/__init__.py
@@ -0,0 +1,3 @@
+"""
+WSGI Middleware.
+"""
diff --git a/lib/galaxy/web/framework/middleware/batch.py b/lib/galaxy/web/framework/middleware/batch.py
new file mode 100644
index 0000000..58dfd7b
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/batch.py
@@ -0,0 +1,185 @@
+"""
+Batch API middleware
+
+Adds a single route to the installation that:
+  1. accepts a POST call containing a JSON array of 'http-like' JSON
+     dictionaries.
+  2. Each dictionary describes a single API call within the batch and is routed
+     back by the middleware to the application's `handle_request` as if it was
+     a separate request.
+  3. Each response generated is combined into a final JSON list that is
+     returned from the POST call.
+
+In this way, API calls can be kept properly atomic and the endpoint can compose
+them into complex tasks using only one request.
+
+..note: This batch system is primarily designed for use by the UI as these
+types of batch operations *reduce the number of requests* for a given group of
+API tasks. IOW, this ain't about batching jobs.
+
+..warning: this endpoint is experimental is likely to change.
+"""
+import io
+from urlparse import urlparse
+import json
+import re
+
+from paste import httpexceptions
+import routes
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class BatchMiddleware( object ):
+    """
+    Adds a URL endpoint for processing batch API calls formatted as a JSON
+    array of JSON dictionaries. These dictionaries are in the form:
+    [
+        {
+            "url": "/api/histories",
+            "type": "POST",
+            "body": "{ \"name\": \"New History Name\" }"
+        },
+        ...
+    ]
+
+    where:
+      * `url` is the url for the API call to be made including any query string
+      * `type` is the HTTP method used (e.g. 'POST', 'PUT') - defaults to 'GET'
+      * `body` is the text body of the request (optional)
+      * `contentType` content-type request header (defaults to application/json)
+    """
+    DEFAULT_CONFIG = {
+        'route' : '/api/batch',
+        'allowed_routes' : [
+            '^api\/users.*',
+            '^api\/histories.*',
+            '^api\/jobs.*',
+        ]
+    }
+
+    def __init__( self, galaxy, application, config=None ):
+        #: the original galaxy webapp
+        self.galaxy = galaxy
+        #: the wrapped webapp
+        self.application = application
+        self.config = self.DEFAULT_CONFIG.copy()
+        self.config.update( config )
+        self.base_url = routes.url_for( '/' )
+        self.handle_request = self.galaxy.handle_request
+
+    def __call__( self, environ, start_response ):
+        if environ[ 'PATH_INFO' ] == self.config[ 'route' ]:
+            return self.process_batch_requests( environ, start_response )
+        return self.application( environ, start_response )
+
+    def process_batch_requests( self, batch_environ, start_response ):
+        """
+        Loops through any provided JSON formatted 'requests', aggregates their
+        JSON responses, and wraps them in the batch call response.
+        """
+        payload = self._read_post_payload( batch_environ )
+        requests = payload.get( 'batch', [] )
+
+        responses = []
+        for request in requests:
+            if not self._is_allowed_route( request[ 'url' ] ):
+                responses.append( self._disallowed_route_response( request[ 'url' ] ) )
+                continue
+
+            request_environ = self._build_request_environ( batch_environ, request )
+            response = self._process_batch_request( request, request_environ, start_response )
+            responses.append( response )
+
+        batch_response_body = json.dumps( responses )
+        start_response( '200 OK', [
+            ( 'Content-Length', len( batch_response_body ) ),
+            ( 'Content-Type', 'application/json' ),
+        ])
+        return batch_response_body
+
+    def _read_post_payload( self, environ ):
+        request_body_size = int( environ.get( 'CONTENT_LENGTH', 0 ) )
+        request_body = environ[ 'wsgi.input' ].read( request_body_size ) or '{}'
+        # TODO: json decode error handling
+        # log.debug( 'request_body: (%s)\n%s', type( request_body ), request_body )
+        payload = json.loads( request_body )
+        return payload
+
+    def _is_allowed_route( self, route ):
+        if self.config.get( 'allowed_routes', None ):
+            shortened_route = route.replace( self.base_url, '', 1 )
+            matches = [ re.match( allowed, shortened_route ) for allowed in self.config[ 'allowed_routes' ] ]
+            return any( matches )
+        return True
+
+    def _disallowed_route_response( self, route ):
+        return dict( status=403, headers=self._default_headers(), body={
+            'err_msg'   : 'Disallowed route used for batch operation',
+            'route'     : route,
+            'allowed'   : self.config[ 'allowed_routes' ]
+        })
+
+    def _build_request_environ( self, original_environ, request ):
+        """
+        Given a request and the original environ used to call the batch, return
+        a new environ parsable/suitable for the individual api call.
+        """
+        # TODO: use a dict of defaults/config
+        # copy the original environ and reconstruct a fake version for each batched request
+        request_environ = original_environ.copy()
+        # TODO: for now, do not overwrite the other headers used in the main api/batch request
+        request_environ[ 'CONTENT_TYPE' ] = request.get( 'contentType', 'application/json' )
+        request_environ[ 'REQUEST_METHOD' ] = request.get( 'method', request.get( 'type', 'GET' ) )
+        url = '{0}://{1}{2}'.format( request_environ.get( 'wsgi.url_scheme' ),
+                                     request_environ.get( 'HTTP_HOST' ),
+                                     request[ 'url' ] )
+        parsed = urlparse( url )
+        request_environ[ 'PATH_INFO' ] = parsed.path
+        request_environ[ 'QUERY_STRING' ] = parsed.query
+
+        request_body = request.get( 'body', u'' )
+        # set this to None so webob/request will copy the body using the raw bytes
+        # if we set it, webob will try to use the buffer interface on a unicode string
+        request_environ[ 'CONTENT_LENGTH' ] = None
+        # this may well need to change in py3
+        request_body = io.BytesIO( bytearray( request_body, encoding='utf8' ) )
+        request_environ[ 'wsgi.input' ] = request_body
+        # log.debug( 'request_environ:\n%s', pprint.pformat( request_environ ) )
+
+        return request_environ
+
+    def _process_batch_request( self, request, environ, start_response ):
+        # We may need to include middleware to record various reponses, but this way of doing that won't work:
+        # status, headers, body = self.application( environ, start_response, body_renderer=self.body_renderer )
+
+        # We have to re-create the handle request method here in order to bypass reusing the 'api/batch' request
+        #   because reuse will cause the paste error:
+        # File "./eggs/Paste-1.7.5.1-py2.7.egg/paste/httpserver.py", line 166, in wsgi_start_response
+        #     assert 0, "Attempt to set headers a second time w/o an exc_info"
+        try:
+            response = self.galaxy.handle_request( environ, start_response, body_renderer=self.body_renderer )
+        # handle errors from galaxy.handle_request (only 404s)
+        except httpexceptions.HTTPNotFound:
+            response = dict( status=404, headers=self._default_headers(), body={} )
+        return response
+
+    def body_renderer( self, trans, body, environ, start_response ):
+        # this is a dummy renderer that does not call start_response
+        # See 'We have to re-create the handle request method...' in _process_batch_request above
+        return dict(
+            status=trans.response.status,
+            headers=trans.response.headers,
+            body=json.loads( self.galaxy.make_body_iterable( trans, body )[0] )
+        )
+
+    def _default_headers( self ):
+        return {
+            'x-frame-options': 'SAMEORIGIN',
+            'content-type'   : 'application/json',
+            'cache-control'  : 'max-age=0,no-cache,no-store'
+        }
+
+    def handle_exception( self, environ ):
+        return False
diff --git a/lib/galaxy/web/framework/middleware/error.py b/lib/galaxy/web/framework/middleware/error.py
new file mode 100644
index 0000000..581d989
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/error.py
@@ -0,0 +1,498 @@
+# Like many things, this started life as part of Paste
+
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Error handler middleware
+
+When an exception is thrown from the wrapper application, this logs
+the exception and displays an error page.
+"""
+import cgi
+import sys
+import traceback
+
+import six
+from paste import (
+    request,
+    wsgilib
+)
+from paste.exceptions import collector, formatter, reporter
+from six.moves import cStringIO as StringIO
+
+__all__ = ('ErrorMiddleware', 'handle_exception')
+
+
+class _NoDefault(object):
+    def __repr__(self):
+        return '<NoDefault>'
+
+
+NoDefault = _NoDefault()
+
+
+class ErrorMiddleware(object):
+
+    """
+    Error handling middleware
+
+    Usage::
+
+        error_catching_wsgi_app = ErrorMiddleware(wsgi_app)
+
+    Settings:
+
+      ``debug``:
+          If true, then tracebacks will be shown in the browser.
+
+      ``error_email``:
+          an email address (or list of addresses) to send exception
+          reports to
+
+      ``error_log``:
+          a filename to append tracebacks to
+
+      ``show_exceptions_in_wsgi_errors``:
+          If true, then errors will be printed to ``wsgi.errors``
+          (frequently a server error log, or stderr).
+
+      ``from_address``, ``smtp_server``, ``error_subject_prefix``, ``smtp_username``, ``smtp_password``, ``smtp_use_tls``:
+          variables to control the emailed exception reports
+
+      ``error_message``:
+          When debug mode is off, the error message to show to users.
+
+      ``xmlhttp_key``:
+          When this key (default ``_``) is in the request GET variables
+          (not POST!), expect that this is an XMLHttpRequest, and the
+          response should be more minimal; it should not be a complete
+          HTML page.
+
+    Environment Configuration:
+
+      ``paste.throw_errors``:
+          If this setting in the request environment is true, then this
+          middleware is disabled. This can be useful in a testing situation
+          where you don't want errors to be caught and transformed.
+
+      ``paste.expected_exceptions``:
+          When this middleware encounters an exception listed in this
+          environment variable and when the ``start_response`` has not
+          yet occurred, the exception will be re-raised instead of being
+          caught.  This should generally be set by middleware that may
+          (but probably shouldn't be) installed above this middleware,
+          and wants to get certain exceptions.  Exceptions raised after
+          ``start_response`` have been called are always caught since
+          by definition they are no longer expected.
+
+    """
+
+    def __init__(self, application, global_conf=None,
+                 debug=NoDefault,
+                 error_email=None,
+                 error_log=None,
+                 show_exceptions_in_wsgi_errors=NoDefault,
+                 from_address=None,
+                 smtp_server=None,
+                 smtp_username=None,
+                 smtp_password=None,
+                 smtp_use_tls=False,
+                 error_subject_prefix=None,
+                 error_message=None,
+                 xmlhttp_key=None):
+        from paste.util import converters
+        self.application = application
+        # @@: global_conf should be handled elsewhere in a separate
+        # function for the entry point
+        if global_conf is None:
+            global_conf = {}
+        if debug is NoDefault:
+            debug = converters.asbool(global_conf.get('debug'))
+        if show_exceptions_in_wsgi_errors is NoDefault:
+            show_exceptions_in_wsgi_errors = converters.asbool(global_conf.get('show_exceptions_in_wsgi_errors'))
+        self.debug_mode = converters.asbool(debug)
+        if error_email is None:
+            error_email = (global_conf.get('error_email') or
+                           global_conf.get('admin_email') or
+                           global_conf.get('webmaster_email') or
+                           global_conf.get('sysadmin_email'))
+        self.error_email = converters.aslist(error_email)
+        self.error_log = error_log
+        self.show_exceptions_in_wsgi_errors = show_exceptions_in_wsgi_errors
+        if from_address is None:
+            from_address = global_conf.get('error_from_address', 'errors at localhost')
+        self.from_address = from_address
+        if smtp_server is None:
+            smtp_server = global_conf.get('smtp_server', 'localhost')
+        self.smtp_server = smtp_server
+        self.smtp_username = smtp_username or global_conf.get('smtp_username')
+        self.smtp_password = smtp_password or global_conf.get('smtp_password')
+        self.smtp_use_tls = smtp_use_tls or converters.asbool(global_conf.get('smtp_use_tls'))
+        self.error_subject_prefix = error_subject_prefix or ''
+        if error_message is None:
+            error_message = global_conf.get('error_message')
+        self.error_message = error_message
+        if xmlhttp_key is None:
+            xmlhttp_key = global_conf.get('xmlhttp_key', '_')
+        self.xmlhttp_key = xmlhttp_key
+
+    def __call__(self, environ, start_response):
+        """
+        The WSGI application interface.
+        """
+        # We want to be careful about not sending headers twice,
+        # and the content type that the app has committed to (if there
+        # is an exception in the iterator body of the response)
+        if environ.get('paste.throw_errors'):
+            return self.application(environ, start_response)
+        environ['paste.throw_errors'] = True
+
+        try:
+            __traceback_supplement__ = Supplement, self, environ
+            sr_checker = ResponseStartChecker(start_response)
+            app_iter = self.application(environ, sr_checker)
+            return self.make_catching_iter(app_iter, environ, sr_checker)
+        except:
+            exc_info = sys.exc_info()
+            try:
+                for expect in environ.get('paste.expected_exceptions', []):
+                    if isinstance(exc_info[1], expect):
+                        raise
+                start_response('500 Internal Server Error',
+                               [('content-type', 'text/html')],
+                               exc_info)
+                # @@: it would be nice to deal with bad content types here
+                response = self.exception_handler(exc_info, environ)
+                return [response]
+            finally:
+                # clean up locals...
+                exc_info = None
+
+    def make_catching_iter(self, app_iter, environ, sr_checker):
+        if isinstance(app_iter, (list, tuple)):
+            # These don't raise
+            return app_iter
+        return CatchingIter(app_iter, environ, sr_checker, self)
+
+    def exception_handler(self, exc_info, environ):
+        simple_html_error = False
+        if self.xmlhttp_key:
+            get_vars = wsgilib.parse_querystring(environ)
+            if dict(get_vars).get(self.xmlhttp_key):
+                simple_html_error = True
+        return handle_exception(
+            exc_info, environ['wsgi.errors'],
+            html=True,
+            debug_mode=self.debug_mode,
+            error_email=self.error_email,
+            error_log=self.error_log,
+            show_exceptions_in_wsgi_errors=self.show_exceptions_in_wsgi_errors,
+            error_email_from=self.from_address,
+            smtp_server=self.smtp_server,
+            smtp_username=self.smtp_username,
+            smtp_password=self.smtp_password,
+            smtp_use_tls=self.smtp_use_tls,
+            error_subject_prefix=self.error_subject_prefix,
+            error_message=self.error_message,
+            simple_html_error=simple_html_error,
+            environ=environ)
+
+
+class ResponseStartChecker(object):
+    def __init__(self, start_response):
+        self.start_response = start_response
+        self.response_started = False
+
+    def __call__(self, *args):
+        self.response_started = True
+        # Return whatever the wrapped start_response would have
+        # returned
+        return self.start_response(*args)
+
+
+class CatchingIter(six.Iterator):
+
+    """
+    A wrapper around the application iterator that will catch
+    exceptions raised by the a generator, or by the close method, and
+    display or report as necessary.
+    """
+
+    def __init__(self, app_iter, environ, start_checker, error_middleware):
+        self.app_iterable = app_iter
+        self.app_iterator = iter(app_iter)
+        self.environ = environ
+        self.start_checker = start_checker
+        self.error_middleware = error_middleware
+        self.closed = False
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        __traceback_supplement__ = (
+            Supplement, self.error_middleware, self.environ)
+        if self.closed:
+            raise StopIteration
+        try:
+            return next(self.app_iterator)
+        except StopIteration:
+            self.closed = True
+            close_response = self._close()
+            if close_response is not None:
+                return close_response
+            else:
+                raise StopIteration
+        except:
+            self.closed = True
+            close_response = self._close()
+            exc_info = sys.exc_info()
+            response = self.error_middleware.exception_handler(
+                exc_info, self.environ)
+            if close_response is not None:
+                response += (
+                    '<hr noshade>Error in .close():<br>%s'
+                    % close_response)
+
+            if not self.start_checker.response_started:
+                self.start_checker('500 Internal Server Error',
+                                   [('content-type', 'text/html')],
+                                   exc_info)
+
+            return response
+
+    def close(self):
+        # This should at least print something to stderr if the
+        # close method fails at this point
+        if not self.closed:
+            self._close()
+
+    def _close(self):
+        """Close and return any error message"""
+        if not hasattr(self.app_iterable, 'close'):
+            return None
+        try:
+            self.app_iterable.close()
+            return None
+        except:
+            close_response = self.error_middleware.exception_handler(
+                sys.exc_info(), self.environ)
+            return close_response
+
+
+class Supplement(object):
+
+    """
+    This is a supplement used to display standard WSGI information in
+    the traceback.
+    """
+
+    def __init__(self, middleware, environ):
+        self.middleware = middleware
+        self.environ = environ
+        self.source_url = request.construct_url(environ)
+
+    def extraData(self):
+        data = {}
+        cgi_vars = data[('extra', 'CGI Variables')] = {}
+        wsgi_vars = data[('extra', 'WSGI Variables')] = {}
+        hide_vars = ['paste.config', 'wsgi.errors', 'wsgi.input',
+                     'wsgi.multithread', 'wsgi.multiprocess',
+                     'wsgi.run_once', 'wsgi.version',
+                     'wsgi.url_scheme']
+        for name, value in self.environ.items():
+            if name.upper() == name:
+                if value:
+                    cgi_vars[name] = value
+            elif name not in hide_vars:
+                wsgi_vars[name] = value
+        if self.environ['wsgi.version'] != (1, 0):
+            wsgi_vars['wsgi.version'] = self.environ['wsgi.version']
+        proc_desc = tuple([int(bool(self.environ[key]))
+                           for key in ('wsgi.multiprocess',
+                                       'wsgi.multithread',
+                                       'wsgi.run_once')])
+        wsgi_vars['wsgi process'] = self.process_combos[proc_desc]
+        wsgi_vars['application'] = self.middleware.application
+        if 'paste.config' in self.environ:
+            data[('extra', 'Configuration')] = dict(self.environ['paste.config'])
+        return data
+
+    process_combos = {
+        # multiprocess, multithread, run_once
+        (0, 0, 0): 'Non-concurrent server',
+        (0, 1, 0): 'Multithreaded',
+        (1, 0, 0): 'Multiprocess',
+        (1, 1, 0): 'Multi process AND threads (?)',
+        (0, 0, 1): 'Non-concurrent CGI',
+        (0, 1, 1): 'Multithread CGI (?)',
+        (1, 0, 1): 'CGI',
+        (1, 1, 1): 'Multi thread/process CGI (?)',
+    }
+
+
+def handle_exception(exc_info, error_stream, html=True,
+                     debug_mode=False,
+                     error_email=None,
+                     error_log=None,
+                     show_exceptions_in_wsgi_errors=False,
+                     error_email_from='errors at localhost',
+                     smtp_server='localhost',
+                     smtp_username=None,
+                     smtp_password=None,
+                     smtp_use_tls=False,
+                     error_subject_prefix='',
+                     error_message=None,
+                     simple_html_error=False,
+                     environ=None
+                     ):
+    """
+    For exception handling outside of a web context
+
+    Use like::
+
+        import sys
+        from paste.exceptions.errormiddleware import handle_exception
+        try:
+            do stuff
+        except:
+            handle_exception(
+                sys.exc_info(), sys.stderr, html=False, ...other config...)
+
+    If you want to report, but not fully catch the exception, call
+    ``raise`` after ``handle_exception``, which (when given no argument)
+    will reraise the exception.
+    """
+    reported = False
+    exc_data = collector.collect_exception(*exc_info)
+    extra_data = ''
+    if error_email:
+        rep = reporter.EmailReporter(
+            to_addresses=error_email,
+            from_address=error_email_from,
+            smtp_server=smtp_server,
+            smtp_username=smtp_username,
+            smtp_password=smtp_password,
+            smtp_use_tls=smtp_use_tls,
+            subject_prefix=error_subject_prefix)
+        rep_err = send_report(rep, exc_data, html=html)
+        if rep_err:
+            extra_data += rep_err
+        else:
+            reported = True
+    if error_log:
+        rep = reporter.LogReporter(
+            filename=error_log)
+        rep_err = send_report(rep, exc_data, html=html)
+        if rep_err:
+            extra_data += rep_err
+        else:
+            reported = True
+    if show_exceptions_in_wsgi_errors:
+        rep = reporter.FileReporter(
+            file=error_stream)
+        rep_err = send_report(rep, exc_data, html=html)
+        if rep_err:
+            extra_data += rep_err
+        else:
+            reported = True
+    else:
+        error_stream.write('Error - %s: %s\n' % (
+            exc_data.exception_type, exc_data.exception_value))
+    if html:
+        if debug_mode and simple_html_error:
+            return_error = formatter.format_html(
+                exc_data, include_hidden_frames=False,
+                include_reusable=False, show_extra_data=False)
+            reported = True
+        elif debug_mode and not simple_html_error:
+            error_html = formatter.format_html(
+                exc_data,
+                include_hidden_frames=True,
+                include_reusable=False)
+            head_html = formatter.error_css + formatter.hide_display_js
+            return_error = error_template(
+                head_html, error_html, extra_data)
+            extra_data = ''
+            reported = True
+        else:
+            msg = error_message or '''
+            An error occurred.
+            '''
+            extra = "<p><b>The error has been logged to our team.</b>"
+            if 'sentry_event_id' in environ:
+                extra += " If you want to contact us about this error, please reference the following<br><br>"
+                extra += "<b><large>GURU MEDITATION: #" + environ['sentry_event_id'] + "</large></b>"
+            extra += "</p>"
+            return_error = error_template('', msg, extra)
+    else:
+        return_error = None
+    if not reported and error_stream:
+        err_report = formatter.format_text(exc_data, show_hidden_frames=True)
+        err_report += '\n' + '-' * 60 + '\n'
+        error_stream.write(err_report)
+    if extra_data:
+        error_stream.write(extra_data)
+    return return_error
+
+
+def send_report(rep, exc_data, html=True):
+    try:
+        rep.report(exc_data)
+    except:
+        output = StringIO()
+        traceback.print_exc(file=output)
+        if html:
+            return """
+            <p>Additionally an error occurred while sending the %s report:
+
+            <pre>%s</pre>
+            </p>""" % (
+                cgi.escape(str(rep)), output.getvalue())
+        else:
+            return (
+                "Additionally an error occurred while sending the "
+                "%s report:\n%s" % (str(rep), output.getvalue()))
+    else:
+        return ''
+
+
+def error_template(head_html, exception, extra):
+    return '''
+    <!DOCTYPE HTML>
+    <html>
+    <head>
+    <style type="text/css">
+    body { color: #303030; background: #dfe5f9; font-family:"Lucida Grande",verdana,arial,helvetica,sans-serif; font-size:12px; line-height:16px; }
+    .content { max-width: 720px; margin: auto; margin-top: 50px; }
+    </style>
+    <title>Internal Server Error</title>
+    %s
+    </head>
+    <body>
+    <div class="content">
+    <h1>Internal Server Error</h1>
+
+    <h2>Galaxy was unable to successfully complete your request</h2>
+
+    <p>%s</p>
+
+    This may be an intermittent problem due to load or other unpredictable factors, reloading the page may address the problem.
+
+    %s
+    </div>
+    </body>
+    </html>''' % (head_html, exception, extra)
+
+
+def make_error_middleware(app, global_conf, **kw):
+    return ErrorMiddleware(app, global_conf=global_conf, **kw)
+
+
+doc_lines = ErrorMiddleware.__doc__.splitlines(True)
+for i in range(len(doc_lines)):
+    if doc_lines[i].strip().startswith('Settings'):
+        make_error_middleware.__doc__ = ''.join(doc_lines[i:])
+        break
+del i, doc_lines
diff --git a/lib/galaxy/web/framework/middleware/profile.py b/lib/galaxy/web/framework/middleware/profile.py
new file mode 100644
index 0000000..d4fa6bc
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/profile.py
@@ -0,0 +1,175 @@
+"""
+Middleware that profiles the request with cProfile and displays profiling
+information at the bottom of each page.
+"""
+
+import threading
+import cgi
+import cProfile
+import pstats
+
+from paste import response
+
+
+template = """
+<script>
+function show_profile_output()
+{
+var win = window.open("", "win"); // a window object
+var doc = win.document;
+doc.open("text/html", "replace");
+doc.write("<HTML><HEAD><TITLE>Profiler output</TITLE></HEAD><BODY>")
+doc.write(document.getElementById( 'profile_output' ).innerHTML)
+doc.write("</BODY></HTML>");
+doc.close();
+}
+function show_inline()
+{
+document.getElementById( 'profile_output' ).style.display="block";
+}
+</script>
+<div style="background-color: #ff9; color: #000; border: 2px solid #000; padding: 5px;">
+show profile output: <a href="javascript:show_inline();">inline</a> | <a href="javascript:show_profile_output();">new window</a>
+<div id="profile_output" style="display: none">
+<hr />
+%s
+</div>
+</div>
+"""
+
+
+class ProfileMiddleware(object):
+
+    """
+    Middleware that profiles all requests.
+
+    All HTML pages will have profiling information appended to them.
+    The data is isolated to that single request, and does not include
+    data from previous requests.
+    """
+
+    def __init__( self, app, global_conf=None, limit=40 ):
+        self.app = app
+        self.lock = threading.Lock()
+        self.limit = limit
+
+    def __call__(self, environ, start_response):
+        catch_response = []
+        body = []
+
+        def replace_start_response(status, headers, exc_info=None):
+            catch_response.extend([status, headers])
+            start_response(status, headers, exc_info)
+            return body.append
+
+        def run_app():
+            body.extend(self.app(environ, replace_start_response))
+        # Run in profiler
+        prof = cProfile.Profile()
+        prof.runctx( "run_app()", globals(), locals() )
+        # Build up body with stats
+        body = ''.join(body)
+        headers = catch_response[1]
+        content_type = response.header_value(headers, 'content-type')
+        if not content_type.startswith('text/html'):
+            # We can't add info to non-HTML output
+            return [body]
+        stats = pstats.Stats( prof )
+        stats.strip_dirs()
+        stats.sort_stats( 'time', 'calls' )
+        output = pstats_as_html( stats, self.limit )
+        body += template % output
+        return [body]
+
+
+def pstats_as_html( stats, *sel_list ):
+    """
+    Return an HTML representation of a pstats.Stats object.
+    """
+    rval = []
+    # Number of function calls, primitive calls, total time
+    rval.append( "<div>%d function calls (%d primitive) in %0.3f CPU seconds</div>"
+                 % ( stats.total_calls, stats.prim_calls, stats.total_tt ) )
+    # Extract functions that match 'sel_list'
+    funcs, order_message, select_message = get_func_list( stats, sel_list )
+    # Deal with any ordering or selection messages
+    if order_message:
+        rval.append( "<div>%s</div>" % cgi.escape( order_message ) )
+    if select_message:
+        rval.append( "<div>%s</div>" % cgi.escape( select_message ) )
+    # Build a table for the functions
+    if list:
+        rval.append( "<table>" )
+        # Header
+        rval.append( "<tr><th>ncalls</th>"
+                     "<th>tottime</th>"
+                     "<th>percall</th>"
+                     "<th>cumtime</th>"
+                     "<th>percall</th>"
+                     "<th>filename:lineno(function)</th></tr>" )
+        for func in funcs:
+            rval.append( "<tr>" )
+            # Calculate each field
+            cc, nc, tt, ct, callers = stats.stats[ func ]
+            # ncalls
+            ncalls = str(nc)
+            if nc != cc:
+                ncalls = ncalls + '/' + str(cc)
+            rval.append( "<td>%s</td>" % cgi.escape( ncalls ) )
+            # tottime
+            rval.append( "<td>%0.8f</td>" % tt )
+            # percall
+            if nc == 0:
+                percall = ""
+            else:
+                percall = "%0.8f" % ( tt / nc )
+            rval.append( "<td>%s</td>" % cgi.escape( percall ) )
+            # cumtime
+            rval.append( "<td>%0.8f</td>" % ct )
+            # ctpercall
+            if cc == 0:
+                ctpercall = ""
+            else:
+                ctpercall = "%0.8f" % ( ct / cc )
+            rval.append( "<td>%s</td>" % cgi.escape( ctpercall ) )
+            # location
+            rval.append( "<td>%s</td>" % cgi.escape( func_std_string( func ) ) )
+            # row complete
+            rval.append( "</tr>" )
+        rval.append( "</table>")
+        # Concatenate result
+        return "".join( rval )
+
+
+def get_func_list( stats, sel_list ):
+    """
+    Use 'sel_list' to select a list of functions to display.
+    """
+    # Determine if an ordering was applied
+    if stats.fcn_list:
+        list = stats.fcn_list[:]
+        order_message = "Ordered by: " + stats.sort_type
+    else:
+        list = stats.stats.keys()
+        order_message = "Random listing order was used"
+    # Do the selection and accumulate messages
+    select_message = ""
+    for selection in sel_list:
+        list, select_message = stats.eval_print_amount( selection, list, select_message )
+    # Return the list of functions selected and the message
+    return list, order_message, select_message
+
+
+def func_std_string( func_name ):
+    """
+    Match what old profile produced
+    """
+    if func_name[:2] == ('~', 0):
+        # special case for built-in functions
+        name = func_name[2]
+        if name.startswith('<') and name.endswith('>'):
+            return '{%s}' % name[1:-1]
+        else:
+            return name
+    else:
+        return "%s:%d(%s)" % func_name
diff --git a/lib/galaxy/web/framework/middleware/remoteuser.py b/lib/galaxy/web/framework/middleware/remoteuser.py
new file mode 100644
index 0000000..5446718
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/remoteuser.py
@@ -0,0 +1,205 @@
+"""
+Middleware for handling $REMOTE_USER if use_remote_user is enabled.
+"""
+
+import socket
+from galaxy.util import safe_str_cmp
+import logging
+log = logging.getLogger(__name__)
+
+errorpage = """
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html lang="en">
+    <head>
+        <title>Galaxy</title>
+        <style type="text/css">
+        body {
+            min-width: 500px;
+            text-align: center;
+        }
+        .errormessage {
+            font: 75%% verdana, "Bitstream Vera Sans", geneva, arial, helvetica, helve, sans-serif;
+            padding: 10px;
+            margin: 100px auto;
+            min-height: 32px;
+            max-width: 500px;
+            border: 1px solid #AA6666;
+            background-color: #FFCCCC;
+            text-align: left;
+        }
+        </style>
+    </head>
+    <body>
+        <div class="errormessage">
+            <h4>%s</h4>
+            <p>%s</p>
+        </div>
+    </body>
+</html>
+"""
+
+
+class RemoteUser( object ):
+
+    def __init__( self, app, maildomain=None, display_servers=None, admin_users=None,
+                  single_user=None, remote_user_header=None, remote_user_secret_header=None,
+                  normalize_remote_user_email=False ):
+        self.app = app
+        self.maildomain = maildomain
+        self.display_servers = display_servers or []
+        self.admin_users = admin_users or []
+        self.remote_user_header = remote_user_header or 'HTTP_REMOTE_USER'
+        self.single_user = single_user
+        self.config_secret_header = remote_user_secret_header
+        self.normalize_remote_user_email = normalize_remote_user_email
+
+    def __call__( self, environ, start_response ):
+        # Allow display servers
+        if self.display_servers and 'REMOTE_ADDR' in environ:
+            try:
+                host = socket.gethostbyaddr( environ[ 'REMOTE_ADDR' ] )[0]
+            except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
+                # in the event of a lookup failure, deny access
+                host = None
+            if host in self.display_servers:
+                environ[ self.remote_user_header ] = 'remote_display_server@%s' % ( self.maildomain or 'example.org' )
+                return self.app( environ, start_response )
+
+        if self.single_user:
+            assert self.remote_user_header not in environ
+            environ[ self.remote_user_header ] = self.single_user
+
+        # Apache sets REMOTE_USER to the string '(null)' when using the
+        # Rewrite* method for passing REMOTE_USER and a user is
+        # un-authenticated.  Any other possible values need to go here as well.
+        if self.remote_user_header in environ:
+            # process remote user with configuration options.
+            if self.normalize_remote_user_email:
+                environ[self.remote_user_header] = environ[self.remote_user_header].lower()
+            if self.maildomain and '@' not in environ[self.remote_user_header]:
+                environ[self.remote_user_header] = "%s@%s" % (environ[self.remote_user_header], self.maildomain)
+
+        path_info = environ.get('PATH_INFO', '')
+
+        # The API handles its own authentication via keys
+        # Check for API key before checking for header
+        if path_info.startswith( '/api/' ):
+            return self.app( environ, start_response )
+
+        # If the secret header is enabled, we expect upstream to send along some key
+        # in HTTP_GX_SECRET, so we'll need to compare that here to the correct value
+        #
+        # This is not an ideal location for this function.  The reason being
+        # that because this check is done BEFORE the REMOTE_USER check,  it is
+        # possible to attack the GX_SECRET key without having correct
+        # credentials. However, that's why it's not "ideal", but it is "good
+        # enough". The only users able to exploit this are ones with access to
+        # the local system (unless Galaxy is listening on 0.0.0.0....). It
+        # seems improbable that an attacker with access to the server hosting
+        # Galaxy would not have access to Galaxy itself, and be attempting to
+        # attack the system
+        if self.config_secret_header is not None:
+            if environ.get('HTTP_GX_SECRET') is None:
+                title = "Access to Galaxy is denied"
+                message = """
+                Galaxy is configured to authenticate users via an external
+                method (such as HTTP authentication in Apache), but
+                no shared secret key was provided by the
+                upstream (proxy) server.</p>
+                <p>Please contact your local Galaxy administrator.  The
+                variable <code>remote_user_secret</code> and
+                <code>GX_SECRET</code> header must be set before you may
+                access Galaxy.
+                """
+                return self.error( start_response, title, message )
+            if not safe_str_cmp(environ.get('HTTP_GX_SECRET', ''), self.config_secret_header):
+                title = "Access to Galaxy is denied"
+                message = """
+                Galaxy is configured to authenticate users via an external
+                method (such as HTTP authentication in Apache), but an
+                incorrect shared secret key was provided by the
+                upstream (proxy) server.</p>
+                <p>Please contact your local Galaxy administrator.  The
+                variable <code>remote_user_secret</code> and
+                <code>GX_SECRET</code> header must be set before you may
+                access Galaxy.
+                """
+                return self.error( start_response, title, message )
+
+        # Apache sets REMOTE_USER to the string '(null)' when using the
+        # Rewrite* method for passing REMOTE_USER and a user is
+        # un-authenticated.  Any other possible values need to go here as well.
+        if not environ.get(self.remote_user_header, '(null)').startswith('(null)'):
+            if not environ[ self.remote_user_header ].count( '@' ):
+                if self.maildomain is not None:
+                    environ[ self.remote_user_header ] += '@' + self.maildomain
+                else:
+                    title = "Access to Galaxy is denied"
+                    message = """
+                        Galaxy is configured to authenticate users via an external
+                        method (such as HTTP authentication in Apache), but only a
+                        username (not an email address) was provided by the
+                        upstream (proxy) server.  Since Galaxy usernames are email
+                        addresses, a default mail domain must be set.</p>
+                        <p>Please contact your local Galaxy administrator.  The
+                        variable <code>remote_user_maildomain</code> must be set
+                        before you may access Galaxy.
+                    """
+                    return self.error( start_response, title, message )
+            user_accessible_paths = (
+                '/user/api_keys',
+                '/user/edit_username',
+                '/user/dbkeys',
+                '/user/toolbox_filters',
+                '/user/set_default_permissions',
+                '/user/change_communication',
+            )
+
+            admin_accessible_paths = (
+                '/user/create',
+                '/user/logout',
+                '/user/manage_user_info',
+                '/user/edit_info',
+                '/userskeys/all_users',
+            )
+
+            if not path_info.startswith('/user'):
+                # shortcut the following whitelist for non-user-controller
+                # requests.
+                pass
+            elif environ[self.remote_user_header] in self.admin_users and \
+                    any([path_info.startswith(prefix) for prefix in admin_accessible_paths]):
+                # If the user is an admin user, and any of the admin accessible paths match..., allow them to execute that action.
+                pass
+            elif any([path_info.startswith(prefix) for prefix in user_accessible_paths]):
+                # If the user is allowed to access the path, pass
+                pass
+            elif path_info == '/user' or path_info == '/user/':
+                pass  # We do allow access to the root user preferences page.
+            elif path_info.startswith( '/user' ):
+                # Any other endpoint in the user controller is off limits
+                title = "Access to Galaxy user controls is disabled"
+                message = """
+                    User controls are disabled when Galaxy is configured
+                    for external authentication.
+                """
+                return self.error( start_response, title, message )
+            return self.app( environ, start_response )
+        else:
+            log.debug("Unable to identify user.  %s not found" % self.remote_user_header)
+            for k, v in environ.iteritems():
+                log.debug("%s = %s", k, v)
+
+            title = "Access to Galaxy is denied"
+            message = """
+                Galaxy is configured to authenticate users via an external
+                method (such as HTTP authentication in Apache), but a username
+                was not provided by the upstream (proxy) server.  This is
+                generally due to a misconfiguration in the upstream server.</p>
+                <p>Please contact your local Galaxy administrator.
+            """
+            return self.error( start_response, title, message )
+
+    def error( self, start_response, title="Access denied", message="Please contact your local Galaxy administrator." ):
+        start_response( '403 Forbidden', [('Content-type', 'text/html')] )
+        return [errorpage % (title, message)]
diff --git a/lib/galaxy/web/framework/middleware/request_id.py b/lib/galaxy/web/framework/middleware/request_id.py
new file mode 100644
index 0000000..1f7500b
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/request_id.py
@@ -0,0 +1,14 @@
+import uuid
+
+
+class RequestIDMiddleware( object ):
+    """
+    A WSGI middleware that creates a unique ID for the request and
+    puts it in the environment
+    """
+    def __init__( self, app, global_conf=None ):
+        self.app = app
+
+    def __call__( self, environ, start_response ):
+        environ['request_id'] = uuid.uuid1().hex
+        return self.app( environ, start_response )
diff --git a/lib/galaxy/web/framework/middleware/sentry.py b/lib/galaxy/web/framework/middleware/sentry.py
new file mode 100644
index 0000000..194ab08
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/sentry.py
@@ -0,0 +1,99 @@
+"""
+raven.middleware
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
+:license: BSD, see LICENSE for more details.
+"""
+
+try:
+    from raven import Client
+    from raven.utils.wsgi import get_current_url, get_headers, get_environ
+except:
+    Client = None
+
+from galaxy.util.postfork import register_postfork_function
+
+
+RAVEN_IMPORT_MESSAGE = ('The Python raven package is required to use this '
+                        'feature, please install it')
+
+
+class Sentry(object):
+    """
+    A WSGI middleware which will attempt to capture any
+    uncaught exceptions and send them to Sentry.
+    """
+    def __init__(self, application, dsn):
+        assert Client is not None, RAVEN_IMPORT_MESSAGE
+        self.application = application
+        self.client = None
+
+        def postfork_sentry_client():
+            self.client = Client( dsn )
+
+        register_postfork_function(postfork_sentry_client)
+
+    def __call__(self, environ, start_response):
+        try:
+            iterable = self.application(environ, start_response)
+        except Exception:
+            self.handle_exception(environ)
+            raise
+
+        try:
+            for event in iterable:
+                yield event
+        except Exception:
+            self.handle_exception(environ)
+            raise
+        finally:
+            # wsgi spec requires iterable to call close if it exists
+            # see http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
+            if iterable and hasattr(iterable, 'close') and callable(iterable.close):
+                try:
+                    iterable.close()
+                except Exception:
+                    self.handle_exception(environ)
+
+    def handle_exception(self, environ):
+        headers = dict(get_headers(environ))
+        # Authorization header for REMOTE_USER sites consists of a base64() of
+        # their plaintext password. It is a security issue for this password to
+        # be exposed to a third party system which may or may not be under
+        # control of the same administrators as the local Authentication
+        # system. E.g. university LDAP systems.
+        if 'Authorization' in headers:
+            # Redact so the administrator knows that a value is indeed present.
+            headers['Authorization'] = 'redacted'
+        # Passing cookies allows for impersonation of users (depending on
+        # remote service) and can be considered a security risk as well. For
+        # multiple services running alongside Galaxy on the same host, this
+        # could allow a sentry user with access to logs to impersonate a user
+        # on another service. In the case of services like IPython, this can be
+        # a serious concern as that would allow for terminal access. Furthermore,
+        # very little debugging information can be gained as a result of having
+        # access to all of the users cookies (including Galaxy cookies)
+        if 'Cookie' in headers:
+            headers['Cookie'] = 'redacted'
+        event_id = self.client.captureException(
+            data={
+                'sentry.interfaces.Http': {
+                    'method': environ.get('REQUEST_METHOD'),
+                    'url': get_current_url(environ, strip_querystring=True),
+                    'query_string': environ.get('QUERY_STRING'),
+                    # TODO
+                    # 'data': environ.get('wsgi.input'),
+                    'headers': headers,
+                    'env': dict(get_environ(environ)),
+                }
+            },
+            # Galaxy: add request id from environment if available
+            extra={
+                'request_id': environ.get( 'request_id', 'Unknown' )
+            }
+        )
+        # Galaxy: store event_id in environment so we can show it to the user
+        environ['sentry_event_id'] = event_id
+
+        return event_id
diff --git a/lib/galaxy/web/framework/middleware/static.py b/lib/galaxy/web/framework/middleware/static.py
new file mode 100644
index 0000000..c68838d
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/static.py
@@ -0,0 +1,55 @@
+import os
+
+from paste import (
+    fileapp,
+    request
+)
+from paste.httpheaders import ETAG
+from paste.urlparser import StaticURLParser
+
+
+class CacheableStaticURLParser( StaticURLParser ):
+
+    def __init__( self, directory, cache_seconds=None ):
+        StaticURLParser.__init__( self, directory )
+        self.cache_seconds = cache_seconds
+
+    def __call__( self, environ, start_response ):
+        path_info = environ.get('PATH_INFO', '')
+        if not path_info:
+            # See if this is a static file hackishly mapped.
+            if os.path.exists(self.directory) and os.path.isfile(self.directory):
+                app = fileapp.FileApp(self.directory)
+                if self.cache_seconds:
+                    app.cache_control( max_age=int( self.cache_seconds ) )
+                return app(environ, start_response)
+            return self.add_slash(environ, start_response)
+        if path_info == '/':
+            # @@: This should obviously be configurable
+            filename = 'index.html'
+        else:
+            filename = request.path_info_pop(environ)
+        full = os.path.join(self.directory, filename)
+        if not os.path.exists(full):
+            return self.not_found(environ, start_response)
+        if os.path.isdir(full):
+            # @@: Cache?
+            return self.__class__(full)(environ, start_response)
+        if environ.get('PATH_INFO') and environ.get('PATH_INFO') != '/':
+            return self.error_extra_path(environ, start_response)
+        if_none_match = environ.get('HTTP_IF_NONE_MATCH')
+        if if_none_match:
+            mytime = os.stat(full).st_mtime
+            if str(mytime) == if_none_match:
+                headers = []
+                ETAG.update(headers, mytime)
+                start_response('304 Not Modified', headers)
+                return ['']  # empty body
+        app = fileapp.FileApp(full)
+        if self.cache_seconds:
+            app.cache_control( max_age=int( self.cache_seconds ) )
+        return app(environ, start_response)
+
+
+def make_static( global_conf, document_root, cache_seconds=None ):
+    return CacheableStaticURLParser( document_root, cache_seconds )
diff --git a/lib/galaxy/web/framework/middleware/statsd.py b/lib/galaxy/web/framework/middleware/statsd.py
new file mode 100644
index 0000000..e831e31
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/statsd.py
@@ -0,0 +1,37 @@
+"""
+Middleware for sending request statistics to statsd
+"""
+from __future__ import absolute_import
+import time
+
+try:
+    import statsd
+except ImportError:
+    # This middleware will never be used without statsd.  This block allows
+    # unit tests pass on systems without it.
+    statsd = None
+
+
+class StatsdMiddleware(object):
+    """
+    This middleware will log request durations to the configured statsd
+    instance.
+    """
+
+    def __init__(self,
+                 application,
+                 statsd_host,
+                 statsd_port,
+                 statsd_prefix):
+        if not statsd:
+            raise ImportError( "Statsd middleware configured, but no statsd python module found. "
+                           "Please install the python statsd module to use this functionality." )
+        self.application = application
+        self.statsd_client = statsd.StatsClient(statsd_host, statsd_port, prefix=statsd_prefix)
+
+    def __call__(self, environ, start_response):
+        start_time = time.time()
+        req = self.application(environ, start_response)
+        dt = int((time.time() - start_time) * 1000)
+        self.statsd_client.timing(environ.get('controller_action_key', None) or environ.get('PATH_INFO', "NOPATH").strip('/').replace('/', '.'), dt)
+        return req
diff --git a/lib/galaxy/web/framework/middleware/translogger.py b/lib/galaxy/web/framework/middleware/translogger.py
new file mode 100644
index 0000000..286f39b
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/translogger.py
@@ -0,0 +1,121 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware for logging requests, using Apache combined log format
+"""
+import logging
+import time
+import urllib
+
+from six import string_types
+
+
+class TransLogger(object):
+    """
+    This logging middleware will log all requests as they go through.
+    They are, by default, sent to a logger named ``'wsgi'`` at the
+    INFO level.
+
+    If ``setup_console_handler`` is true, then messages for the named
+    logger will be sent to the console.
+    """
+
+    format = ('%(REMOTE_ADDR)s - %(REMOTE_USER)s [%(time)s] '
+              '"%(REQUEST_METHOD)s %(REQUEST_URI)s %(HTTP_VERSION)s" '
+              '%(status)s %(bytes)s "%(HTTP_REFERER)s" "%(HTTP_USER_AGENT)s"')
+
+    def __init__(self, application,
+                 logger=None,
+                 format=None,
+                 logging_level=logging.INFO,
+                 logger_name='wsgi',
+                 setup_console_handler=True,
+                 set_logger_level=logging.DEBUG):
+        if format is not None:
+            self.format = format
+        self.application = application
+        self.logging_level = logging_level
+        self.logger_name = logger_name
+        if logger is None:
+            self.logger = logging.getLogger(self.logger_name)
+            if setup_console_handler:
+                console = logging.StreamHandler()
+                console.setLevel(logging.DEBUG)
+                # We need to control the exact format:
+                console.setFormatter(logging.Formatter('%(message)s'))
+                self.logger.addHandler(console)
+                self.logger.propagate = False
+            if set_logger_level is not None:
+                self.logger.setLevel(set_logger_level)
+        else:
+            self.logger = logger
+
+    def __call__(self, environ, start_response):
+        start = time.localtime()
+        req_uri = urllib.quote(environ.get('SCRIPT_NAME', '') +
+                               environ.get('PATH_INFO', ''))
+        if environ.get('QUERY_STRING'):
+            req_uri += '?' + environ['QUERY_STRING']
+        method = environ['REQUEST_METHOD']
+
+        def replacement_start_response(status, headers, exc_info=None):
+            # @@: Ideally we would count the bytes going by if no
+            # content-length header was provided; but that does add
+            # some overhead, so at least for now we'll be lazy.
+            bytes = None
+            for name, value in headers:
+                if name.lower() == 'content-length':
+                    bytes = value
+            self.write_log(environ, method, req_uri, start, status, bytes)
+            return start_response( status, headers, exc_info )
+        return self.application(environ, replacement_start_response)
+
+    def write_log(self, environ, method, req_uri, start, status, bytes):
+        if bytes is None:
+            bytes = '-'
+        if time.daylight:
+                offset = time.altzone / 60 / 60 * -100
+        else:
+                offset = time.timezone / 60 / 60 * -100
+        if offset >= 0:
+                offset = "+%0.4d" % (offset)
+        elif offset < 0:
+                offset = "%0.4d" % (offset)
+        d = {
+            'REMOTE_ADDR': environ.get('REMOTE_ADDR') or '-',
+            'REMOTE_USER': environ.get('REMOTE_USER') or '-',
+            'REQUEST_METHOD': method,
+            'REQUEST_URI': req_uri,
+            'HTTP_VERSION': environ.get('SERVER_PROTOCOL'),
+            'time': time.strftime('%d/%b/%Y:%H:%M:%S ', start) + offset,
+            'status': status.split(None, 1)[0],
+            'bytes': bytes,
+            'HTTP_REFERER': environ.get('HTTP_REFERER', '-'),
+            'HTTP_USER_AGENT': environ.get('HTTP_USER_AGENT', '-'),
+        }
+        message = self.format % d
+        self.logger.log(self.logging_level, message)
+
+
+def make_filter(
+        app, global_conf,
+        logger_name='wsgi',
+        format=None,
+        logging_level=logging.INFO,
+        setup_console_handler=True,
+        set_logger_level=logging.DEBUG):
+    from paste.util.converters import asbool
+    if isinstance(logging_level, string_types):
+        logging_level = logging._levelNames[logging_level]
+    if isinstance(set_logger_level, string_types):
+        set_logger_level = logging._levelNames[set_logger_level]
+    return TransLogger(
+        app,
+        format=format or None,
+        logging_level=logging_level,
+        logger_name=logger_name,
+        setup_console_handler=asbool(setup_console_handler),
+        set_logger_level=set_logger_level)
+
+
+make_filter.__doc__ = TransLogger.__doc__
diff --git a/lib/galaxy/web/framework/middleware/xforwardedhost.py b/lib/galaxy/web/framework/middleware/xforwardedhost.py
new file mode 100644
index 0000000..8ea6fce
--- /dev/null
+++ b/lib/galaxy/web/framework/middleware/xforwardedhost.py
@@ -0,0 +1,22 @@
+class XForwardedHostMiddleware( object ):
+    """
+    A WSGI middleware that changes the HTTP host header in the WSGI environ
+    based on the X-Forwarded-Host header IF found
+    """
+    def __init__( self, app, global_conf=None ):
+        self.app = app
+
+    def __call__( self, environ, start_response ):
+        x_forwarded_host = environ.get( 'HTTP_X_FORWARDED_HOST', None )
+        if x_forwarded_host:
+            environ[ 'ORGINAL_HTTP_HOST' ] = environ[ 'HTTP_HOST' ]
+            environ[ 'HTTP_HOST' ] = x_forwarded_host.split(', ', 1)[0]
+        x_forwarded_for = environ.get( 'HTTP_X_FORWARDED_FOR', None )
+        if x_forwarded_for:
+            environ[ 'ORGINAL_REMOTE_ADDR' ] = environ[ 'REMOTE_ADDR' ]
+            environ[ 'REMOTE_ADDR' ] = x_forwarded_for.split(', ', 1)[0]
+        x_url_scheme = environ.get( 'HTTP_X_URL_SCHEME', None )
+        if x_url_scheme:
+            environ[ 'original_wsgi.url_scheme' ] = environ[ 'wsgi.url_scheme' ]
+            environ[ 'wsgi.url_scheme' ] = x_url_scheme
+        return self.app( environ, start_response )
diff --git a/lib/galaxy/web/framework/openid_manager.py b/lib/galaxy/web/framework/openid_manager.py
new file mode 100644
index 0000000..3856b22
--- /dev/null
+++ b/lib/galaxy/web/framework/openid_manager.py
@@ -0,0 +1,75 @@
+"""
+Manage the OpenID consumer and related data stores.
+"""
+
+import os
+import pickle
+import logging
+
+try:
+    from openid import oidutil
+    from openid.store import filestore
+    from openid.consumer import consumer
+    from openid.extensions import sreg
+except ImportError:
+    oidutil = None
+
+    class FakeConsumer( object ):
+        def __getattr__(x, y):
+            return None
+    consumer = FakeConsumer()
+
+
+OPENID_IMPORT_MESSAGE = ('The Python openid package is required to use this '
+                         'feature, please install it')
+
+log = logging.getLogger( __name__ )
+
+
+def oidlog( message, level=0 ):
+    log.debug( message )
+
+
+if oidutil is not None:
+    oidutil.log = oidlog
+
+
+class OpenIDManager( object ):
+    def __init__( self, cache_path ):
+        assert oidutil is not None, OPENID_IMPORT_MESSAGE
+        self.session_path = os.path.join( cache_path, 'session' )
+        self.store_path = os.path.join( cache_path, 'store' )
+        for dir in self.session_path, self.store_path:
+            if not os.path.exists( dir ):
+                os.makedirs( dir )
+        self.store = filestore.FileOpenIDStore( self.store_path )
+
+    def get_session( self, trans ):
+        session_file = os.path.join( self.session_path, str( trans.galaxy_session.id ) )
+        if not os.path.exists( session_file ):
+            pickle.dump( dict(), open( session_file, 'w' ) )
+        return pickle.load( open( session_file ) )
+
+    def persist_session( self, trans, oidconsumer ):
+        session_file = os.path.join( self.session_path, str( trans.galaxy_session.id ) )
+        pickle.dump( oidconsumer.session, open( session_file, 'w' ) )
+
+    def get_consumer( self, trans ):
+        return consumer.Consumer( self.get_session( trans ), self.store )
+
+    def add_sreg( self, trans, request, required=None, optional=None ):
+        if required is None:
+            required = []
+        if optional is None:
+            optional = []
+        sreg_request = sreg.SRegRequest( required=required, optional=optional )
+        request.addExtension( sreg_request )
+
+    def get_sreg( self, info ):
+        return sreg.SRegResponse.fromSuccessResponse( info )
+
+    # so I don't have to expose all of openid.consumer.consumer
+    FAILURE = consumer.FAILURE
+    SUCCESS = consumer.SUCCESS
+    CANCEL = consumer.CANCEL
+    SETUP_NEEDED = consumer.SETUP_NEEDED
diff --git a/lib/galaxy/web/framework/webapp.py b/lib/galaxy/web/framework/webapp.py
new file mode 100644
index 0000000..188df61
--- /dev/null
+++ b/lib/galaxy/web/framework/webapp.py
@@ -0,0 +1,943 @@
+"""
+"""
+import datetime
+import inspect
+import os
+import hashlib
+import random
+import socket
+import string
+import time
+import urlparse
+from Cookie import CookieError
+from importlib import import_module
+
+from Cheetah.Template import Template
+import mako.runtime
+import mako.lookup
+from babel.support import Translations
+from babel import Locale
+from six import string_types
+from sqlalchemy import and_, true
+from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.orm import joinedload
+
+from galaxy.exceptions import MessageException
+
+from galaxy import util
+from galaxy.util import asbool
+from galaxy.util import safe_str_cmp
+from galaxy.util.sanitize_html import sanitize_html
+
+from galaxy.managers import context
+from galaxy.web.framework import url_for
+from galaxy.web.framework import base
+from galaxy.web.framework import helpers
+from galaxy.web.framework import formbuilder
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+UCSC_SERVERS = (
+    'hgw1.cse.ucsc.edu',
+    'hgw2.cse.ucsc.edu',
+    'hgw3.cse.ucsc.edu',
+    'hgw4.cse.ucsc.edu',
+    'hgw5.cse.ucsc.edu',
+    'hgw6.cse.ucsc.edu',
+    'hgw7.cse.ucsc.edu',
+    'hgw8.cse.ucsc.edu',
+)
+
+
+class WebApplication( base.WebApplication ):
+    """
+    Base WSGI application instantiated for all Galaxy webapps.
+
+    A web application that:
+        * adds API and UI controllers by scanning given directories and
+        importing all modules found there.
+        * has a security object.
+        * builds mako template lookups.
+        * generates GalaxyWebTransactions.
+    """
+    def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
+        self.name = name
+        base.WebApplication.__init__( self )
+        self.set_transaction_factory( lambda e: self.transaction_chooser( e, galaxy_app, session_cookie ) )
+        # Mako support
+        self.mako_template_lookup = self.create_mako_template_lookup( galaxy_app, name )
+        # Security helper
+        self.security = galaxy_app.security
+
+    def create_mako_template_lookup( self, galaxy_app, name ):
+        paths = []
+        # First look in webapp specific directory
+        if name is not None:
+            paths.append( os.path.join( galaxy_app.config.template_path, 'webapps', name ) )
+        # Then look in root directory
+        paths.append( galaxy_app.config.template_path )
+        # Create TemplateLookup with a small cache
+        return mako.lookup.TemplateLookup(directories=paths,
+                                          module_directory=galaxy_app.config.template_cache,
+                                          collection_size=500,
+                                          output_encoding='utf-8' )
+
+    def handle_controller_exception( self, e, trans, **kwargs ):
+        if isinstance( e, MessageException ):
+            # In the case of a controller exception, sanitize to make sure
+            # unsafe html input isn't reflected back to the user
+            return trans.show_message( sanitize_html(e.err_msg), e.type )
+
+    def make_body_iterable( self, trans, body ):
+        if isinstance( body, formbuilder.FormBuilder ):
+            body = trans.show_form( body )
+        return base.WebApplication.make_body_iterable( self, trans, body )
+
+    def transaction_chooser( self, environ, galaxy_app, session_cookie ):
+        return GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
+
+    def add_ui_controllers( self, package_name, app ):
+        """
+        Search for UI controllers in `package_name` and add
+        them to the webapp.
+        """
+        from galaxy.web.base.controller import BaseUIController
+        from galaxy.web.base.controller import ControllerUnavailable
+        package = import_module( package_name )
+        controller_dir = package.__path__[0]
+        for fname in os.listdir( controller_dir ):
+            if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+                name = fname[:-3]
+                module_name = package_name + "." + name
+                try:
+                    module = import_module( module_name )
+                except ControllerUnavailable as exc:
+                    log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
+                    continue
+                # Look for a controller inside the modules
+                for key in dir( module ):
+                    T = getattr( module, key )
+                    if inspect.isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
+                        controller = self._instantiate_controller( T, app )
+                        self.add_ui_controller( name, controller )
+
+    def add_api_controllers( self, package_name, app ):
+        """
+        Search for UI controllers in `package_name` and add
+        them to the webapp.
+        """
+        from galaxy.web.base.controller import BaseAPIController
+        from galaxy.web.base.controller import ControllerUnavailable
+        package = import_module( package_name )
+        controller_dir = package.__path__[0]
+        for fname in os.listdir( controller_dir ):
+            if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
+                name = fname[:-3]
+                module_name = package_name + "." + name
+                try:
+                    module = import_module( module_name )
+                except ControllerUnavailable as exc:
+                    log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
+                    continue
+                for key in dir( module ):
+                    T = getattr( module, key )
+                    # Exclude classes such as BaseAPIController and BaseTagItemsController
+                    if inspect.isclass( T ) and not key.startswith("Base") and issubclass( T, BaseAPIController ):
+                        # By default use module_name, but allow controller to override name
+                        controller_name = getattr( T, "controller_name", name )
+                        controller = self._instantiate_controller( T, app )
+                        self.add_api_controller( controller_name, controller )
+
+    def _instantiate_controller( self, T, app ):
+        """ Extension point, allow apps to contstruct controllers differently,
+        really just used to stub out actual controllers for routes testing.
+        """
+        return T( app )
+
+
+class GalaxyWebTransaction( base.DefaultWebTransaction,
+                            context.ProvidesAppContext, context.ProvidesUserContext, context.ProvidesHistoryContext ):
+    """
+    Encapsulates web transaction specific state for the Galaxy application
+    (specifically the user's "cookie" session and history)
+    """
+
+    def __init__( self, environ, app, webapp, session_cookie=None):
+        self.app = app
+        self.webapp = webapp
+        self.security = webapp.security
+        base.DefaultWebTransaction.__init__( self, environ )
+        self.setup_i18n()
+        self.expunge_all()
+        config = self.app.config
+        self.debug = asbool( config.get( 'debug', False ) )
+        x_frame_options = getattr( config, 'x_frame_options', None )
+        if x_frame_options:
+            self.response.headers['X-Frame-Options'] = x_frame_options
+        # Flag indicating whether we are in workflow building mode (means
+        # that the current history should not be used for parameter values
+        # and such).
+        self.workflow_building_mode = False
+        # Flag indicating whether this is an API call and the API key user is an administrator
+        self.api_inherit_admin = False
+        self.__user = None
+        self.galaxy_session = None
+        self.error_message = None
+
+        # set any cross origin resource sharing headers if configured to do so
+        self.set_cors_headers()
+
+        if self.environ.get('is_api_request', False):
+            # With API requests, if there's a key, use it and associate the
+            # user with the transaction.
+            # If not, check for an active session but do not create one.
+            # If an error message is set here, it's sent back using
+            # trans.show_error in the response -- in expose_api.
+            self.error_message = self._authenticate_api( session_cookie )
+        elif self.app.name == "reports":
+            self.galaxy_session = None
+        else:
+            # This is a web request, get or create session.
+            self._ensure_valid_session( session_cookie )
+        if self.galaxy_session:
+            # When we've authenticated by session, we have to check the
+            # following.
+            # Prevent deleted users from accessing Galaxy
+            if config.use_remote_user and self.galaxy_session.user.deleted:
+                self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
+            if config.require_login:
+                self._ensure_logged_in_user( environ, session_cookie )
+            if config.session_duration:
+                # TODO DBTODO All ajax calls from the client need to go through
+                # a single point of control where we can do things like
+                # redirect/etc.  This is API calls as well as something like 40
+                # @web.json requests that might not get handled well on the
+                # clientside.
+                #
+                # Make sure we're not past the duration, and either log out or
+                # update timestamp.
+                now = datetime.datetime.now()
+                if self.galaxy_session.last_action:
+                    expiration_time = self.galaxy_session.last_action + datetime.timedelta(minutes=config.session_duration)
+                else:
+                    expiration_time = now
+                    self.galaxy_session.last_action = now - datetime.timedelta(seconds=1)
+                    self.sa_session.add(self.galaxy_session)
+                    self.sa_session.flush()
+                if expiration_time < now:
+                    # Expiration time has passed.
+                    self.handle_user_logout()
+                    if self.environ.get('is_api_request', False):
+                        self.response.status = 401
+                        self.user = None
+                        self.galaxy_session = None
+                    else:
+                        self.response.send_redirect( url_for( controller='user',
+                                                     action='login',
+                                                     message="You have been logged out due to inactivity.  Please log in again to continue using Galaxy.",
+                                                     status='info',
+                                                     use_panels=True ) )
+                else:
+                    self.galaxy_session.last_action = now
+                    self.sa_session.add(self.galaxy_session)
+                    self.sa_session.flush()
+
+    def setup_i18n( self ):
+        locales = []
+        if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
+            # locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
+            client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
+            for locale in client_locales:
+                try:
+                    locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
+                except Exception as e:
+                    log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
+        if not locales:
+            # Default to English
+            locales = 'en'
+        t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
+        self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
+
+    def set_cors_headers( self ):
+        """Allow CORS requests if configured to do so by echoing back the request's
+        'Origin' header (if any) as the response header 'Access-Control-Allow-Origin'
+        """
+        # TODO: in order to use these, we need preflight to work, and to do that we
+        # need the OPTIONS method on all api calls (or everywhere we can POST/PUT)
+        # ALLOWED_METHODS = ( 'POST', 'PUT' )
+
+        # do not set any access control headers if not configured for it (common case)
+        if not self.app.config.get( 'allowed_origin_hostnames', None ):
+            return
+        # do not set any access control headers if there's no origin header on the request
+        origin_header = self.request.headers.get( "Origin", None )
+        if not origin_header:
+            return
+
+        # singular match
+        def matches_allowed_origin( origin, allowed_origin ):
+            if isinstance( allowed_origin, string_types ):
+                return origin == allowed_origin
+            match = allowed_origin.match( origin )
+            return match and match.group() == origin
+
+        # check for '*' or compare to list of allowed
+        def is_allowed_origin( origin ):
+            # localhost uses no origin header (== null)
+            if not origin:
+                return False
+            for allowed_origin in self.app.config.allowed_origin_hostnames:
+                if allowed_origin == '*' or matches_allowed_origin( origin, allowed_origin ):
+                    return True
+            return False
+
+        # boil origin header down to hostname
+        origin = urlparse.urlparse( origin_header ).hostname
+        # check against the list of allowed strings/regexp hostnames, echo original if cleared
+        if is_allowed_origin( origin ):
+            self.response.headers[ 'Access-Control-Allow-Origin' ] = origin_header
+            # TODO: see the to do on ALLOWED_METHODS above
+            # self.response.headers[ 'Access-Control-Allow-Methods' ] = ', '.join( ALLOWED_METHODS )
+
+        # NOTE: raising some errors (such as httpexceptions), will remove the header
+        # (e.g. client will get both cors error and 404 inside that)
+
+    def get_user( self ):
+        """Return the current user if logged in or None."""
+        if self.galaxy_session:
+            return self.galaxy_session.user
+        else:
+            return self.__user
+
+    def set_user( self, user ):
+        """Set the current user."""
+        if self.galaxy_session:
+            self.galaxy_session.user = user
+            self.sa_session.add( self.galaxy_session )
+            self.sa_session.flush()
+        self.__user = user
+
+    user = property( get_user, set_user )
+
+    def get_cookie( self, name='galaxysession' ):
+        """Convenience method for getting a session cookie"""
+        try:
+            # If we've changed the cookie during the request return the new value
+            if name in self.response.cookies:
+                return self.response.cookies[name].value
+            else:
+                return self.request.cookies[name].value
+        except:
+            return None
+
+    def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
+        """Convenience method for setting a session cookie"""
+        # The galaxysession cookie value must be a high entropy 128 bit random number encrypted
+        # using a server secret key.  Any other value is invalid and could pose security issues.
+        self.response.cookies[name] = value
+        self.response.cookies[name]['path'] = path
+        self.response.cookies[name]['max-age'] = 3600 * 24 * age  # 90 days
+        tstamp = time.localtime( time.time() + 3600 * 24 * age )
+        self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
+        self.response.cookies[name]['version'] = version
+        https = self.request.environ[ "wsgi.url_scheme" ] == "https"
+        if https:
+            self.response.cookies[name]['secure'] = True
+        try:
+            self.response.cookies[name]['httponly'] = True
+        except CookieError as e:
+            log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
+
+    def _authenticate_api( self, session_cookie ):
+        """
+        Authenticate for the API via key or session (if available).
+        """
+        api_key = self.request.params.get('key', None)
+        secure_id = self.get_cookie( name=session_cookie )
+        api_key_supplied = self.environ.get('is_api_request', False) and api_key
+        if api_key_supplied and self._check_master_api_key( api_key ):
+            self.api_inherit_admin = True
+            log.info( "Session authenticated using Galaxy master api key" )
+            self.user = None
+            self.galaxy_session = None
+        elif api_key_supplied:
+            # Sessionless API transaction, we just need to associate a user.
+            try:
+                provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
+            except NoResultFound:
+                return 'Provided API key is not valid.'
+            if provided_key.user.deleted:
+                return 'User account is deactivated, please contact an administrator.'
+            newest_key = provided_key.user.api_keys[0]
+            if newest_key.key != provided_key.key:
+                return 'Provided API key has expired.'
+            self.set_user( provided_key.user )
+        elif secure_id:
+            # API authentication via active session
+            # Associate user using existing session
+            self._ensure_valid_session( session_cookie )
+        else:
+            # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
+            self.user = None
+            self.galaxy_session = None
+
+    def _check_master_api_key( self, api_key ):
+        master_api_key = getattr( self.app.config, 'master_api_key', None )
+        if not master_api_key:
+            return False
+        # Hash keys to make them the same size, so we can do safe comparison.
+        master_hash = hashlib.sha256( master_api_key ).hexdigest()
+        provided_hash = hashlib.sha256( api_key ).hexdigest()
+        return safe_str_cmp( master_hash, provided_hash )
+
+    def _ensure_valid_session( self, session_cookie, create=True):
+        """
+        Ensure that a valid Galaxy session exists and is available as
+        trans.session (part of initialization)
+
+        Support for universe_session and universe_user cookies has been
+        removed as of 31 Oct 2008.
+        """
+        # Try to load an existing session
+        secure_id = self.get_cookie( name=session_cookie )
+        galaxy_session = None
+        prev_galaxy_session = None
+        user_for_new_session = None
+        invalidate_existing_session = False
+        # Track whether the session has changed so we can avoid calling flush
+        # in the most common case (session exists and is valid).
+        galaxy_session_requires_flush = False
+        if secure_id:
+            # Decode the cookie value to get the session_key
+            session_key = self.security.decode_guid( secure_id )
+            try:
+                # Make sure we have a valid UTF-8 string
+                session_key = session_key.encode( 'utf8' )
+            except UnicodeDecodeError:
+                # We'll end up creating a new galaxy_session
+                session_key = None
+            if session_key:
+                # Retrieve the galaxy_session id via the unique session_key
+                galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
+                                                .filter( and_( self.app.model.GalaxySession.table.c.session_key == session_key,
+                                                               self.app.model.GalaxySession.table.c.is_valid == true() ) ).options( joinedload( "user" ) ).first()
+        # If remote user is in use it can invalidate the session and in some
+        # cases won't have a cookie set above, so we need to to check some
+        # things now.
+        if self.app.config.use_remote_user:
+            # If this is an api request, and they've passed a key, we let this go.
+            assert self.app.config.remote_user_header in self.environ, \
+                "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
+            remote_user_email = self.environ[ self.app.config.remote_user_header ]
+            if galaxy_session:
+                # An existing session, make sure correct association exists
+                if galaxy_session.user is None:
+                    # No user, associate
+                    galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
+                    galaxy_session_requires_flush = True
+                elif (not remote_user_email.startswith('(null)') and  # Apache does this, see remoteuser.py
+                      (galaxy_session.user.email != remote_user_email) and
+                      ((not self.app.config.allow_user_impersonation) or
+                       (remote_user_email not in self.app.config.admin_users_list))):
+                    # Session exists but is not associated with the correct
+                    # remote user, and the currently set remote_user is not a
+                    # potentially impersonating admin.
+                    invalidate_existing_session = True
+                    user_for_new_session = self.get_or_create_remote_user( remote_user_email )
+                    log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
+                                 remote_user_email, galaxy_session.user.email )
+            else:
+                # No session exists, get/create user for new session
+                user_for_new_session = self.get_or_create_remote_user( remote_user_email )
+        else:
+            if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
+                # Remote user support is not enabled, but there is an existing
+                # session with an external user, invalidate
+                invalidate_existing_session = True
+                log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
+                             galaxy_session.user.email )
+            elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
+                invalidate_existing_session = True
+                log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
+        # Do we need to invalidate the session for some reason?
+        if invalidate_existing_session:
+            prev_galaxy_session = galaxy_session
+            prev_galaxy_session.is_valid = False
+            galaxy_session = None
+        # No relevant cookies, or couldn't find, or invalid, so create a new session
+        if galaxy_session is None:
+            galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
+            galaxy_session_requires_flush = True
+            self.galaxy_session = galaxy_session
+            self.__update_session_cookie( name=session_cookie )
+        else:
+            self.galaxy_session = galaxy_session
+        # Do we need to flush the session?
+        if galaxy_session_requires_flush:
+            self.sa_session.add( galaxy_session )
+            # FIXME: If prev_session is a proper relation this would not
+            #        be needed.
+            if prev_galaxy_session:
+                self.sa_session.add( prev_galaxy_session )
+            self.sa_session.flush()
+        # If the old session was invalid, get a new (or existing default,
+        # unused) history with our new session
+        if invalidate_existing_session:
+            self.get_or_create_default_history()
+
+    def _ensure_logged_in_user( self, environ, session_cookie ):
+        # The value of session_cookie can be one of
+        # 'galaxysession' or 'galaxycommunitysession'
+        # Currently this method does nothing unless session_cookie is 'galaxysession'
+        if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
+            # TODO: re-engineer to eliminate the use of allowed_paths
+            # as maintenance overhead is far too high.
+            allowed_paths = [
+                # client app route
+                # TODO: might be better as '/:username/login', '/:username/logout'
+                url_for( controller='root', action='login' ),
+                # mako app routes
+                url_for( controller='user', action='login' ),
+                url_for( controller='user', action='logout' ),
+                url_for( controller='user', action='reset_password' ),
+                url_for( controller='user', action='change_password' ),
+                # required to log in w/ openid
+                url_for( controller='user', action='openid_auth' ),
+                url_for( controller='user', action='openid_process' ),
+                url_for( controller='user', action='openid_associate' ),
+                # TODO: do any of these still need to bypass require login?
+                url_for( controller='user', action='api_keys' ),
+                url_for( controller='user', action='create' ),
+                url_for( controller='user', action='index' ),
+                url_for( controller='user', action='manage_user_info' ),
+                url_for( controller='user', action='set_default_permissions' ),
+            ]
+            # append the welcome url to allowed paths if we'll show it at the login screen
+            if self.app.config.show_welcome_with_login:
+                allowed_paths.append( url_for( controller='root', action='welcome' ) )
+
+            # prevent redirect when UCSC server attempts to get dataset contents as 'anon' user
+            display_as = url_for( controller='root', action='display_as' )
+            if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
+                try:
+                    host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
+                except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
+                    host = None
+                if host in UCSC_SERVERS:
+                    return
+            # prevent redirect for external, enabled display applications getting dataset contents
+            external_display_path = url_for( controller='', action='display_application' )
+            if self.request.path.startswith( external_display_path ):
+                request_path_split = self.request.path.split( '/' )
+                try:
+                    if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] ) and
+                            request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links and
+                            request_path_split[-3] != 'None'):
+                        return
+                except IndexError:
+                    pass
+            # redirect to root if the path is not in the list above
+            if self.request.path not in allowed_paths:
+                login_url = url_for( controller='root', action='login', redirect=self.request.path )
+                self.response.send_redirect( login_url )
+
+    def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
+        """
+        Create a new GalaxySession for this request, possibly with a connection
+        to a previous session (in `prev_galaxy_session`) and an existing user
+        (in `user_for_new_session`).
+
+        Caller is responsible for flushing the returned session.
+        """
+        session_key = self.security.get_new_guid()
+        galaxy_session = self.app.model.GalaxySession(
+            session_key=session_key,
+            is_valid=True,
+            remote_host=self.request.remote_host,
+            remote_addr=self.request.remote_addr,
+            referer=self.request.headers.get( 'Referer', None ) )
+        if prev_galaxy_session:
+            # Invalidated an existing session for some reason, keep track
+            galaxy_session.prev_session_id = prev_galaxy_session.id
+        if user_for_new_session:
+            # The new session should be associated with the user
+            galaxy_session.user = user_for_new_session
+        return galaxy_session
+
+    def get_or_create_remote_user( self, remote_user_email ):
+        """
+        Create a remote user with the email remote_user_email and return it
+        """
+        if not self.app.config.use_remote_user:
+            return None
+        if getattr( self.app.config, "normalize_remote_user_email", False ):
+            remote_user_email = remote_user_email.lower()
+        user = self.sa_session.query( self.app.model.User).filter( self.app.model.User.table.c.email == remote_user_email ).first()
+        if user:
+            # GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
+            # role and default user / history permissions were not set for remote users.  When a
+            # remote user authenticates, we'll look for this information, and if missing, create it.
+            if not self.app.security_agent.get_private_user_role( user ):
+                self.app.security_agent.create_private_user_role( user )
+            if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
+                if not user.default_permissions:
+                    self.app.security_agent.user_set_default_permissions( user )
+                    self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
+        elif user is None:
+            username = remote_user_email.split( '@', 1 )[0].lower()
+            random.seed()
+            user = self.app.model.User( email=remote_user_email )
+            user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
+            user.external = True
+            # Replace invalid characters in the username
+            for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
+                username = username.replace( char, '-' )
+            # Find a unique username - user can change it later
+            if self.sa_session.query( self.app.model.User ).filter_by( username=username ).first():
+                i = 1
+                while self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first():
+                    i += 1
+                username += '-' + str(i)
+            user.username = username
+            self.sa_session.add( user )
+            self.sa_session.flush()
+            self.app.security_agent.create_private_user_role( user )
+            # We set default user permissions, before we log in and set the default history permissions
+            if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
+                self.app.security_agent.user_set_default_permissions( user )
+            # self.log_event( "Automatically created account '%s'", user.email )
+        return user
+
+    def __update_session_cookie( self, name='galaxysession' ):
+        """
+        Update the session cookie to match the current session.
+        """
+        self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
+                         name=name, path=self.app.config.cookie_path )
+
+    def handle_user_login( self, user ):
+        """
+        Login a new user (possibly newly created)
+
+           - create a new session
+           - associate new session with user
+           - if old session had a history and it was not associated with a user, associate it with the new session,
+             otherwise associate the current session's history with the user
+           - add the disk usage of the current session to the user's total disk usage
+        """
+        # Set the previous session
+        prev_galaxy_session = self.galaxy_session
+        prev_galaxy_session.is_valid = False
+        # Define a new current_session
+        self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
+        if self.webapp.name == 'galaxy':
+            cookie_name = 'galaxysession'
+            # Associated the current user's last accessed history (if exists) with their new session
+            history = None
+            try:
+                users_last_session = user.galaxy_sessions[0]
+                last_accessed = True
+            except:
+                users_last_session = None
+                last_accessed = False
+            if (prev_galaxy_session.current_history and not
+                    prev_galaxy_session.current_history.deleted and
+                    prev_galaxy_session.current_history.datasets):
+                if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
+                    # If the previous galaxy session had a history, associate it with the new
+                    # session, but only if it didn't belong to a different user.
+                    history = prev_galaxy_session.current_history
+                    if prev_galaxy_session.user is None:
+                        # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
+                        for hda in history.datasets:
+                            user.adjust_total_disk_usage(hda.quota_amount(user))
+            elif self.galaxy_session.current_history:
+                history = self.galaxy_session.current_history
+            if (not history and users_last_session and
+                    users_last_session.current_history and not
+                    users_last_session.current_history.deleted):
+                history = users_last_session.current_history
+            elif not history:
+                history = self.get_history( create=True, most_recent=True )
+            if history not in self.galaxy_session.histories:
+                self.galaxy_session.add_history( history )
+            if history.user is None:
+                history.user = user
+            self.galaxy_session.current_history = history
+            if not last_accessed:
+                # Only set default history permissions if current history is not from a previous session
+                self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
+            self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
+        else:
+            cookie_name = 'galaxycommunitysession'
+            self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+        self.sa_session.flush()
+        # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
+        self.__update_session_cookie( name=cookie_name )
+
+    def handle_user_logout( self, logout_all=False ):
+        """
+        Logout the current user:
+           - invalidate the current session
+           - create a new session with no user associated
+        """
+        prev_galaxy_session = self.galaxy_session
+        prev_galaxy_session.is_valid = False
+        self.galaxy_session = self.__create_new_session( prev_galaxy_session )
+        self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+        galaxy_user_id = prev_galaxy_session.user_id
+        if logout_all and galaxy_user_id is not None:
+            for other_galaxy_session in ( self.sa_session.query(self.app.model.GalaxySession)
+                                          .filter( and_( self.app.model.GalaxySession.table.c.user_id == galaxy_user_id,
+                                                         self.app.model.GalaxySession.table.c.is_valid == true(),
+                                                         self.app.model.GalaxySession.table.c.id != prev_galaxy_session.id ) ) ):
+                other_galaxy_session.is_valid = False
+                self.sa_session.add( other_galaxy_session )
+        self.sa_session.flush()
+        if self.webapp.name == 'galaxy':
+            # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
+            self.__update_session_cookie( name='galaxysession' )
+        elif self.webapp.name == 'tool_shed':
+            self.__update_session_cookie( name='galaxycommunitysession' )
+
+    def get_galaxy_session( self ):
+        """
+        Return the current galaxy session
+        """
+        return self.galaxy_session
+
+    def get_history( self, create=False, most_recent=False ):
+        """
+        Load the current history.
+            - If that isn't available, we find the most recently updated history.
+            - If *that* isn't available, we get or create the default history.
+        Transactions will not always have an active history (API requests), so
+        None is a valid response.
+        """
+        history = None
+        if self.galaxy_session:
+            if hasattr( self.galaxy_session, 'current_history' ):
+                history = self.galaxy_session.current_history
+        if not history and most_recent:
+            history = self.get_most_recent_history()
+        if not history and util.string_as_bool( create ):
+            history = self.get_or_create_default_history()
+        return history
+
+    def set_history( self, history ):
+        if history and not history.deleted:
+            self.galaxy_session.current_history = history
+        self.sa_session.add( self.galaxy_session )
+        self.sa_session.flush()
+
+    history = property( get_history, set_history )
+
+    def get_or_create_default_history( self ):
+        """
+        Gets or creates a default history and associates it with the current
+        session.
+        """
+
+        # There must be a user to fetch a default history.
+        if not self.galaxy_session.user:
+            return self.new_history()
+
+        # Look for default history that (a) has default name + is not deleted and
+        # (b) has no datasets. If suitable history found, use it; otherwise, create
+        # new history.
+        unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
+            user=self.galaxy_session.user,
+            name=self.app.model.History.default_name,
+            deleted=False )
+        default_history = None
+        for history in unnamed_histories:
+            if len( history.datasets ) == 0:
+                # Found suitable default history.
+                default_history = history
+                break
+
+        # Set or create hsitory.
+        if default_history:
+            history = default_history
+            self.set_history( history )
+        else:
+            history = self.new_history()
+
+        return history
+
+    def get_most_recent_history( self ):
+        """
+        Gets the most recently updated history.
+        """
+        # There must be a user to fetch histories, and without a user we have
+        # no recent history.
+        if not self.galaxy_session.user:
+            return None
+        try:
+            recent_history = self.sa_session.query( self.app.model.History ).filter_by(
+                user=self.galaxy_session.user,
+                deleted=False ).order_by(self.app.model.History.update_time.desc()).first()
+        except NoResultFound:
+            return None
+        self.set_history(recent_history)
+        return recent_history
+
+    def new_history( self, name=None ):
+        """
+        Create a new history and associate it with the current session and
+        its associated user (if set).
+        """
+        # Create new history
+        history = self.app.model.History()
+        if name:
+            history.name = name
+        # Associate with session
+        history.add_galaxy_session( self.galaxy_session )
+        # Make it the session's current history
+        self.galaxy_session.current_history = history
+        # Associate with user
+        if self.galaxy_session.user:
+            history.user = self.galaxy_session.user
+        # Track genome_build with history
+        history.genome_build = self.app.genome_builds.default_value
+        # Set the user's default history permissions
+        self.app.security_agent.history_set_default_permissions( history )
+        # Save
+        self.sa_session.add_all( ( self.galaxy_session, history ) )
+        self.sa_session.flush()
+        return history
+
+    @base.lazy_property
+    def template_context( self ):
+        return dict()
+
+    def set_message( self, message, type=None ):
+        """
+        Convenience method for setting the 'message' and 'message_type'
+        element of the template context.
+        """
+        self.template_context['message'] = message
+        if type:
+            self.template_context['status'] = type
+
+    def get_message( self ):
+        """
+        Convenience method for getting the 'message' element of the template
+        context.
+        """
+        return self.template_context['message']
+
+    def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
+        """
+        Convenience method for displaying a simple page with a single message.
+
+        `type`: one of "error", "warning", "info", or "done"; determines the
+                type of dialog box and icon displayed with the message
+
+        `refresh_frames`: names of frames in the interface that should be
+                          refreshed when the message is displayed
+        """
+        return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
+
+    def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
+        """
+        Convenience method for displaying an error message. See `show_message`.
+        """
+        return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
+
+    def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
+        """
+        Convenience method for displaying an ok message. See `show_message`.
+        """
+        return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
+
+    def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
+        """
+        Convenience method for displaying an warn message. See `show_message`.
+        """
+        return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
+
+    def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
+        """
+        Convenience method for displaying a simple page with a single HTML
+        form.
+        """
+        return self.fill_template( template, form=form, header=header,
+                                   use_panels=( form.use_panels or use_panels ),
+                                   active_view=active_view )
+
+    def fill_template(self, filename, **kwargs):
+        """
+        Fill in a template, putting any keyword arguments on the context.
+        """
+        # call get_user so we can invalidate sessions from external users,
+        # if external auth has been disabled.
+        self.get_user()
+        if filename.endswith( ".mako" ):
+            return self.fill_template_mako( filename, **kwargs )
+        else:
+            template = Template( file=os.path.join(self.app.config.template_path, filename),
+                                 searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
+            return str( template )
+
+    def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
+        template_lookup = template_lookup or self.webapp.mako_template_lookup
+        template = template_lookup.get_template( filename )
+        template.output_encoding = 'utf-8'
+
+        data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
+        data.update( self.template_context )
+        data.update( kwargs )
+        return template.render( **data )
+
+    def stream_template_mako( self, filename, **kwargs ):
+        template = self.webapp.mako_template_lookup.get_template( filename )
+        template.output_encoding = 'utf-8'
+        data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
+        data.update( self.template_context )
+        data.update( kwargs )
+
+        def render( environ, start_response ):
+            response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
+
+            class StreamBuffer( object ):
+                def write( self, d ):
+                    response_write( d.encode( 'utf-8' ) )
+            buffer = StreamBuffer()
+            context = mako.runtime.Context( buffer, **data )
+            template.render_context( context )
+            return []
+        return render
+
+    def fill_template_string(self, template_string, context=None, **kwargs):
+        """
+        Fill in a template, putting any keyword arguments on the context.
+        """
+        template = Template( source=template_string,
+                             searchList=[context or kwargs, dict(caller=self)] )
+        return str(template)
+
+
+def build_url_map( app, global_conf, local_conf ):
+    from paste.urlmap import URLMap
+    from galaxy.web.framework.middleware.static import CacheableStaticURLParser as Static
+    urlmap = URLMap()
+    # Merge the global and local configurations
+    conf = global_conf.copy()
+    conf.update(local_conf)
+    # Get cache time in seconds
+    cache_time = conf.get( "static_cache_time", None )
+    if cache_time is not None:
+        cache_time = int( cache_time )
+    # Send to dynamic app by default
+    urlmap["/"] = app
+    # Define static mappings from config
+    urlmap["/static"] = Static( conf.get( "static_dir", "./static/" ), cache_time )
+    urlmap["/images"] = Static( conf.get( "static_images_dir", "./static/images" ), cache_time )
+    urlmap["/static/scripts"] = Static( conf.get( "static_scripts_dir", "./static/scripts/" ), cache_time )
+    urlmap["/static/style"] = Static( conf.get( "static_style_dir", "./static/style/blue" ), cache_time )
+    urlmap["/favicon.ico"] = Static( conf.get( "static_favicon_dir", "./static/favicon.ico" ), cache_time )
+    urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt", "./static/robots.txt" ), cache_time )
+    return urlmap, cache_time
diff --git a/lib/galaxy/web/params.py b/lib/galaxy/web/params.py
new file mode 100644
index 0000000..b141f40
--- /dev/null
+++ b/lib/galaxy/web/params.py
@@ -0,0 +1,31 @@
+"""
+Mixins for parsing web form and API parameters
+"""
+from galaxy import util
+
+
+class BaseParamParser( object ):
+    def get_params( self, kwargs ):
+        params = util.Params( kwargs )
+        # set defaults if unset
+        updates = dict( webapp=params.get( 'webapp', 'galaxy' ),
+                        message=util.restore_text( params.get( 'message', '' ) ),
+                        status=util.restore_text( params.get( 'status', 'done' ) ) )
+        params.update( updates )
+        return params
+
+
+class QuotaParamParser( BaseParamParser ):
+    def get_quota_params( self, kwargs ):
+        params = self.get_params( kwargs )
+        updates = dict( name=util.restore_text( params.get( 'name', '' ) ),
+                        description=util.restore_text( params.get( 'description', '' ) ),
+                        amount=util.restore_text( params.get( 'amount', '' ).strip() ),
+                        operation=params.get( 'operation', '' ),
+                        default=params.get( 'default', '' ),
+                        in_users=util.listify( params.get( 'in_users', [] ) ),
+                        out_users=util.listify( params.get( 'out_users', [] ) ),
+                        in_groups=util.listify( params.get( 'in_groups', [] ) ),
+                        out_groups=util.listify( params.get( 'out_groups', [] ) ) )
+        params.update( updates )
+        return params
diff --git a/lib/galaxy/web/proxy/__init__.py b/lib/galaxy/web/proxy/__init__.py
new file mode 100644
index 0000000..8db25a7
--- /dev/null
+++ b/lib/galaxy/web/proxy/__init__.py
@@ -0,0 +1,263 @@
+import logging
+import os
+import json
+
+from galaxy.util.filelock import FileLock
+from galaxy.util import sockets
+from galaxy.util.lazy_process import LazyProcess, NoOpLazyProcess
+from galaxy.util import sqlite
+from galaxy.util import unique_id
+import urllib2
+import time
+
+log = logging.getLogger( __name__ )
+
+
+DEFAULT_PROXY_TO_HOST = "localhost"
+SECURE_COOKIE = "galaxysession"
+# Randomly generate a password every launch
+
+
+class ProxyManager(object):
+
+    def __init__( self, config ):
+        for option in ["manage_dynamic_proxy", "dynamic_proxy_bind_port",
+                       "dynamic_proxy_bind_ip", "dynamic_proxy_debug",
+                       "dynamic_proxy_external_proxy", "dynamic_proxy_prefix",
+                       "proxy_session_map",
+                       "dynamic_proxy", "cookie_path",
+                       "dynamic_proxy_golang_noaccess",
+                       "dynamic_proxy_golang_clean_interval",
+                       "dynamic_proxy_golang_docker_address",
+                       "dynamic_proxy_golang_api_key"]:
+
+            setattr( self, option, getattr( config, option ) )
+
+        if self.manage_dynamic_proxy:
+            self.lazy_process = self.__setup_lazy_process( config )
+        else:
+            self.lazy_process = NoOpLazyProcess()
+
+        if self.dynamic_proxy_golang_api_key is None:
+            self.dynamic_proxy_golang_api_key = unique_id()
+
+        self.proxy_ipc = proxy_ipc(config)
+
+    def shutdown( self ):
+        self.lazy_process.shutdown()
+
+    def setup_proxy( self, trans, host=DEFAULT_PROXY_TO_HOST, port=None, proxy_prefix="", route_name="", container_ids=None ):
+        if self.manage_dynamic_proxy:
+            log.info("Attempting to start dynamic proxy process")
+            log.debug("Cmd: " + ' '.join(self.lazy_process.command_and_args))
+            self.lazy_process.start_process()
+
+        if container_ids is None:
+            container_ids = []
+
+        authentication = AuthenticationToken(trans)
+        proxy_requests = ProxyRequests(host=host, port=port)
+        self.proxy_ipc.handle_requests(
+            authentication,
+            proxy_requests,
+            '/%s' % route_name,
+            container_ids
+        )
+        # TODO: These shouldn't need to be request.host and request.scheme -
+        # though they are reasonable defaults.
+        host = trans.request.host
+        if ':' in host:
+            host = host[0:host.index(':')]
+        scheme = trans.request.scheme
+        if not self.dynamic_proxy_external_proxy:
+            proxy_url = '%s://%s:%d' % (scheme, host, self.dynamic_proxy_bind_port)
+        else:
+            proxy_url = '%s://%s%s' % (scheme, host, proxy_prefix)
+        return {
+            'proxy_url': proxy_url,
+            'proxied_port': proxy_requests.port,
+            'proxied_host': proxy_requests.host,
+        }
+
+    def __setup_lazy_process( self, config ):
+        launcher = self.proxy_launcher()
+        command = launcher.launch_proxy_command(config)
+        return LazyProcess(command)
+
+    def proxy_launcher(self):
+        if self.dynamic_proxy == "node":
+            return NodeProxyLauncher()
+        elif self.dynamic_proxy == "golang":
+            return GolangProxyLauncher()
+        else:
+            raise Exception("Unknown proxy type")
+
+
+class ProxyLauncher(object):
+
+    def launch_proxy_command(self, config):
+        raise NotImplementedError()
+
+
+class NodeProxyLauncher(object):
+
+    def launch_proxy_command(self, config):
+        args = [
+            "--sessions", config.proxy_session_map,
+            "--ip", config.dynamic_proxy_bind_ip,
+            "--port", str(config.dynamic_proxy_bind_port),
+            "--reverseProxy",
+        ]
+        if config.dynamic_proxy_debug:
+            args.append("--verbose")
+
+        parent_directory = os.path.dirname( __file__ )
+        path_to_application = os.path.join( parent_directory, "js", "lib", "main.js" )
+        command = [ path_to_application ] + args
+        return command
+
+
+class GolangProxyLauncher(object):
+
+    def launch_proxy_command(self, config):
+        args = [
+            "gxproxy",  # Must be on path. TODO: wheel?
+            "--listenAddr", '%s:%d' % (
+                config.dynamic_proxy_bind_ip,
+                config.dynamic_proxy_bind_port,
+            ),
+            "--listenPath", "/".join((
+                config.cookie_path,
+                config.dynamic_proxy_prefix
+            )),
+            "--cookieName", "galaxysession",
+            "--storage", config.proxy_session_map.replace('.sqlite', '.xml'),  # just in case.
+            "--apiKey", config.dynamic_proxy_golang_api_key,
+            "--noAccess", config.dynamic_proxy_golang_noaccess,
+            "--cleanInterval", config.dynamic_proxy_golang_clean_interval,
+            "--dockerAddr", config.dynamic_proxy_golang_docker_address,
+        ]
+        if config.dynamic_proxy_debug:
+            args.append("--verbose")
+        return args
+
+
+class AuthenticationToken(object):
+
+    def __init__(self, trans):
+        self.cookie_name = SECURE_COOKIE
+        self.cookie_value = trans.get_cookie( self.cookie_name )
+
+
+class ProxyRequests(object):
+
+    def __init__(self, host=None, port=None):
+        if host is None:
+            host = DEFAULT_PROXY_TO_HOST
+        if port is None:
+            port = sockets.unused_port()
+            log.info("Obtained unused port %d" % port)
+        self.host = host
+        self.port = port
+
+
+def proxy_ipc(config):
+    proxy_session_map = config.proxy_session_map
+    if config.dynamic_proxy == "node":
+        if proxy_session_map.endswith(".sqlite"):
+            return SqliteProxyIpc(proxy_session_map)
+        else:
+            return JsonFileProxyIpc(proxy_session_map)
+    elif config.dynamic_proxy == "golang":
+        return RestGolangProxyIpc(config)
+
+
+class ProxyIpc(object):
+
+    def handle_requests(self, authentication, proxy_requests, route_name, container_ids):
+        raise NotImplementedError()
+
+
+class JsonFileProxyIpc(object):
+
+    def __init__(self, proxy_session_map):
+        self.proxy_session_map = proxy_session_map
+
+    def handle_requests(self, authentication, proxy_requests, route_name, container_ids):
+        key = "%s:%s" % ( proxy_requests.host, proxy_requests.port )
+        secure_id = authentication.cookie_value
+        with FileLock( self.proxy_session_map ):
+            if not os.path.exists( self.proxy_session_map ):
+                open( self.proxy_session_map, "w" ).write( "{}" )
+            json_data = open( self.proxy_session_map, "r" ).read()
+            session_map = json.loads( json_data )
+            to_remove = []
+            for k, value in session_map.items():
+                if value == secure_id:
+                    to_remove.append( k )
+            for k in to_remove:
+                del session_map[ k ]
+            session_map[ key ] = secure_id
+            new_json_data = json.dumps( session_map )
+            open( self.proxy_session_map, "w" ).write( new_json_data )
+
+
+class SqliteProxyIpc(object):
+
+    def __init__(self, proxy_session_map):
+        self.proxy_session_map = proxy_session_map
+
+    def handle_requests(self, authentication, proxy_requests, route_name, container_ids):
+        key = "%s:%s" % ( proxy_requests.host, proxy_requests.port )
+        secure_id = authentication.cookie_value
+        with FileLock( self.proxy_session_map ):
+            conn = sqlite.connect(self.proxy_session_map)
+            try:
+                c = conn.cursor()
+                try:
+                    # Create table
+                    c.execute('''CREATE TABLE gxproxy
+                                 (key text PRIMARY_KEY, secret text)''')
+                except Exception:
+                    pass
+                insert_tmpl = '''INSERT INTO gxproxy (key, secret) VALUES ('%s', '%s');'''
+                insert = insert_tmpl % (key, secure_id)
+                c.execute(insert)
+                conn.commit()
+            finally:
+                conn.close()
+
+
+class RestGolangProxyIpc(object):
+
+    def __init__(self, config):
+        self.config = config
+        self.api_url = 'http://127.0.0.1:%s/api?api_key=%s' % (self.config.dynamic_proxy_bind_port, self.config.dynamic_proxy_golang_api_key)
+
+    def handle_requests(self, authentication, proxy_requests, route_name, container_ids, sleep=1):
+        """Make a POST request to the GO proxy to register a route
+        """
+        values = {
+            'FrontendPath': route_name,
+            'BackendAddr': "%s:%s" % ( proxy_requests.host, proxy_requests.port ),
+            'AuthorizedCookie': authentication.cookie_value,
+            'ContainerIds': container_ids,
+        }
+
+        req = urllib2.Request(self.api_url)
+        req.add_header('Content-Type', 'application/json')
+
+        # Sometimes it takes our poor little proxy a second or two to get
+        # going, so if this fails, re-call ourselves with an increased timeout.
+        try:
+            urllib2.urlopen(req, json.dumps(values))
+        except urllib2.URLError as err:
+            log.debug(err)
+            if sleep > 5:
+                excp = "Could not contact proxy after %s seconds" % sum(range(sleep + 1))
+                raise Exception(excp)
+            time.sleep(sleep)
+            self.handle_requests(authentication, proxy_requests, route_name, container_ids, sleep=sleep + 1)
+        pass
+
+# TODO: MQ diven proxy?
diff --git a/lib/galaxy/web/proxy/js/Dockerfile b/lib/galaxy/web/proxy/js/Dockerfile
new file mode 100644
index 0000000..05e057d
--- /dev/null
+++ b/lib/galaxy/web/proxy/js/Dockerfile
@@ -0,0 +1,18 @@
+# Have not yet gotten this to work - goal was to launch the prox in a Docker container.
+# Networking is a bit tricky though - could not get the child proxy to talk to the child
+# IPython container.
+
+
+# sudo docker build --no-cache=true -t gxproxy .
+# sudo docker run --net host -v /home/john/workspace/galaxy-central/database:/var/gxproxy -p 8800:8800 -t gxproxy lib/main.js --sessions /var/gxproxy/session_map.json --ip 0.0.0.0 --port 8800
+
+FROM node:0.11.13
+
+RUN mkdir -p /usr/src/gxproxy
+WORKDIR /usr/src/gxproxy
+
+ADD package.json /usr/src/gxproxy/
+RUN npm install
+ADD . /usr/src/gxproxy
+
+CMD [ "lib/main.js" ]
diff --git a/lib/galaxy/web/proxy/js/README.md b/lib/galaxy/web/proxy/js/README.md
new file mode 100644
index 0000000..f397ef8
--- /dev/null
+++ b/lib/galaxy/web/proxy/js/README.md
@@ -0,0 +1,2 @@
+# A dynamic configurable reverse proxy for use within Galaxy
+
diff --git a/lib/galaxy/web/proxy/js/lib/main.js b/lib/galaxy/web/proxy/js/lib/main.js
new file mode 100755
index 0000000..fa10738
--- /dev/null
+++ b/lib/galaxy/web/proxy/js/lib/main.js
@@ -0,0 +1,47 @@
+#!/usr/bin/env node
+/*
+Inspiration taken from
+	https://github.com/jupyter/multiuser-server/blob/master/multiuser/js/main.js
+*/
+var fs = require('fs');
+var args = require('commander');
+
+package_info = require('../package')
+
+args
+	.version(package_info)
+    .option('--ip <n>', 'Public-facing IP of the proxy', 'localhost')
+    .option('--port <n>', 'Public-facing port of the proxy', parseInt)
+    .option('--cookie <cookiename>', 'Cookie proving authentication', 'galaxysession')
+    .option('--sessions <file>', 'Routes file to monitor')
+    .option('--reverseProxy', 'Cause the proxy to rewrite location blocks with its own port')
+    .option('--verbose')
+
+args.parse(process.argv);
+
+var DynamicProxy = require('./proxy.js').DynamicProxy;
+var mapFor = require('./mapper.js').mapFor;
+
+var sessions = mapFor(args.sessions);
+
+var dynamic_proxy_options = {
+  sessionCookie: args['cookie'],
+  sessionMap: sessions,
+  verbose: args.verbose,
+  port: args['port']
+}
+
+if(args.reverseProxy){
+    dynamic_proxy_options.reverseProxy = true;
+}
+
+var dynamic_proxy = new DynamicProxy(dynamic_proxy_options);
+
+var listen = {};
+listen.port = args.port || 8000;
+listen.ip = args.ip;
+
+if(args.verbose) {
+	console.log("Listening on " + listen.ip + ":" + listen.port);
+}
+dynamic_proxy.proxy_server.listen(listen.port, listen.ip);
diff --git a/lib/galaxy/web/proxy/js/lib/mapper.js b/lib/galaxy/web/proxy/js/lib/mapper.js
new file mode 100644
index 0000000..20f0f14
--- /dev/null
+++ b/lib/galaxy/web/proxy/js/lib/mapper.js
@@ -0,0 +1,78 @@
+var fs = require('fs');
+var sqlite3 = require('sqlite3')
+
+
+var endsWith = function(subjectString, searchString) {
+    var position = subjectString.length;
+    position -= searchString.length;
+    var lastIndex = subjectString.indexOf(searchString, position);
+    return lastIndex !== -1 && lastIndex === position;
+};
+
+
+var updateFromJson = function(path, map) {
+    var content = fs.readFileSync(path, 'utf8');
+    var keyToSession = JSON.parse(content);
+    var newSessions = {};
+    for(var key in keyToSession) {
+        var hostAndPort = key.split(":");
+        // 'host': hostAndPort[0],
+        newSessions[keyToSession[key]] = {'target': {'host': hostAndPort[0], 'port': parseInt(hostAndPort[1])}};
+    }
+    for(var oldSession in map) {
+        if(!(oldSession in newSessions)) {
+            delete map[ oldSession ];
+        }
+    }
+    for(var newSession in newSessions) {
+        map[newSession] = newSessions[newSession];
+    }
+}
+
+var updateFromSqlite = function(path, map) {
+    var newSessions = {};
+    var loadSessions = function() {
+        db.each("SELECT key, secret FROM gxproxy", function(err, row) {
+            var key = row['key'];
+            var secret = row['secret'];
+            var hostAndPort = key.split(":");
+            var target = {'host': hostAndPort[0], 'port': parseInt(hostAndPort[1])};
+            newSessions[secret] = {'target': target};
+        }, finish);
+    };
+
+    var finish = function() {
+        for(var oldSession in map) {
+            if(!(oldSession in newSessions)) {
+                delete map[ oldSession ];
+            }
+        }
+        for(var newSession in newSessions) {
+            map[newSession] = newSessions[newSession];
+        }
+        db.close();
+    };
+
+    var db = new sqlite3.Database(path, loadSessions);
+};
+
+
+var mapFor = function(path) {
+    var map = {};
+    var loadMap;
+    if(endsWith(path, '.sqlite')) {
+        loadMap = function() {
+            updateFromSqlite(path, map);
+        }
+    } else {
+        loadMap = function() {
+            updateFromJson(path, map);
+        }
+    }
+    console.log("Watching path " + path);
+    loadMap();
+    fs.watch(path, loadMap);
+    return map;
+};
+
+exports.mapFor = mapFor;
\ No newline at end of file
diff --git a/lib/galaxy/web/proxy/js/lib/proxy.js b/lib/galaxy/web/proxy/js/lib/proxy.js
new file mode 100644
index 0000000..95ebd8e
--- /dev/null
+++ b/lib/galaxy/web/proxy/js/lib/proxy.js
@@ -0,0 +1,175 @@
+var http = require('http'),
+    httpProxy = require('http-proxy');
+
+var bound = function (that, method) {
+    // bind a method, to ensure `this=that` when it is called
+    // because prototype languages are bad
+    return function () {
+        method.apply(that, arguments);
+    };
+};
+
+var DynamicProxy = function(options) {
+    var dynamicProxy = this;
+    this.sessionCookie = options.sessionCookie;
+    this.sessionMap = options.sessionMap;
+    this.debug = options.verbose;
+    this.reverseProxy = options.reverseProxy;
+    this.port = options.port;
+
+    var log_errors = function(handler) {
+        return function (req, res) {
+            try {
+                return handler.apply(dynamicProxy, arguments);
+            } catch (e) {
+                console.log("Error in handler for " + req.method + ' ' + req.url + ': ', e);
+            }
+        };
+    };
+
+    var proxy = this.proxy = httpProxy.createProxyServer({
+        ws : true,
+    });
+
+    this.proxy_server = http.createServer(
+        log_errors(dynamicProxy.handleProxyRequest)
+    );
+    this.proxy_server.on('upgrade', bound(this, this.handleWs));
+};
+
+DynamicProxy.prototype.rewriteRequest = function(request) {
+    if(request.url.indexOf('rstudio') != -1){
+        var remap = {
+            'content-type': 'Content-Type',
+            'content-length': 'Content-Length',
+        }
+        // RStudio isn't spec compliant and pitches a fit on NodeJS's http module's lowercase HTTP headers
+        for(var i = 0; i<Object.keys(remap).length; i++){
+            var key = Object.keys(remap)[i];
+            if(key in request.headers){
+                request.headers[remap[key]] = request.headers[key];
+                delete request.headers[key];
+            }
+        }
+        if('Content-Type' in request.headers && request.headers['Content-Type'] == 'application/x-www-form-urlencoded; charset=UTF-8'){
+            request.headers['Content-Type'] = 'application/x-www-form-urlencoded';
+        }
+    }
+}
+
+DynamicProxy.prototype.targetForRequest = function(request) {
+    // return proxy target for a given url
+    var session = this.findSession(request);
+    for (var mappedSession in this.sessionMap) {
+        if(session == mappedSession) {
+            return this.sessionMap[session].target;
+        }
+    }
+
+    return null;
+};
+
+DynamicProxy.prototype.findSession = function(request) {
+    var sessionCookie = this.sessionCookie;
+        rc = request.headers.cookie;
+    if(!rc) {
+        return null;
+    }
+    var cookies = rc.split(';');
+    for(var cookieIndex in cookies) {
+        var cookie = cookies[cookieIndex];
+        var parts = cookie.split('=');
+        var partName = parts.shift().trim();
+        if(partName == sessionCookie) {
+            return unescape(parts.join('='))
+        }
+    }
+
+    return null;
+};
+
+DynamicProxy.prototype.handleProxyRequest = function(req, res) {
+    var othis = this;
+    var target = this.targetForRequest(req);
+    if(this.debug) {
+        console.log("PROXY " + req.method + " " + req.url + " to " + target.host + ':' + target.port);
+    }
+    var origin = req.headers.origin;
+    this.rewriteRequest(req);
+    res.oldWriteHead = res.writeHead;
+
+    res.writeHead = function(statusCode, headers) {
+        if(othis.reverseProxy && statusCode === 302){
+            if(res && res._headers){
+                if(othis.debug){
+                    console.log("Original Location Header: " + res._headers.location);
+                }
+                if(res._headers.location){
+                    res._headers.location = res._headers.location.replace('http://localhost/', 'http://localhost:' + othis.port + '/');
+                }
+                if(othis.debug){
+                    console.log("Rewritten Location Header: " + res._headers.location);
+                }
+            }
+        }
+        try {
+            if(origin){
+                res.setHeader('Access-Control-Allow-Origin', origin);
+            }
+            res.setHeader('Access-Control-Allow-Credentials', 'true');
+
+            if(!headers){
+                headers = {};
+            }
+            res.oldWriteHead(statusCode, headers);
+        }
+        catch (error) {
+          console.log("Header could not be modified.");
+          console.log(error);
+        }
+
+    }
+    this.proxy.web(req, res, {
+        target: target
+    }, function (e) {
+        console.log("Proxy error: ", e);
+        res.writeHead(502);
+        res.write("Proxy target missing");
+        res.end();
+    });
+};
+
+DynamicProxy.prototype.handleWs = function(req, res, head) {
+    // no local route found, time to proxy
+    var target = this.targetForRequest(req);
+    if(this.debug) {
+        console.log("PROXY WS " + req.url + " to " + target.host + ':' + target.port);
+    }
+    var origin = req.headers.origin;
+    this.rewriteRequest(req);
+    res.oldWriteHead = res.writeHead;
+    res.writeHead = function(statusCode, headers) {
+        try {
+            if(origin){
+                res.setHeader('Access-Control-Allow-Origin', origin);
+            }
+            res.setHeader('Access-Control-Allow-Credentials', 'true');
+            if(!headers){ headers = {}; }
+            res.oldWriteHead(statusCode, headers);
+        }
+        catch (error) {
+          console.log("Header could not be modified.");
+          console.log(error);
+        }
+    }
+    this.proxy.ws(req, res, head, {
+        target: target
+    }, function (e) {
+        console.log("Proxy error: ", e);
+        res.writeHead(502);
+        res.write("Proxy target missing");
+        res.end();
+    });
+};
+
+exports.DynamicProxy = DynamicProxy;
diff --git a/lib/galaxy/web/proxy/js/package.json b/lib/galaxy/web/proxy/js/package.json
new file mode 100644
index 0000000..4c0d1a9
--- /dev/null
+++ b/lib/galaxy/web/proxy/js/package.json
@@ -0,0 +1,19 @@
+{
+  "name": "galaxy-proxy",
+  "version": "0.0.1",
+  "description": "A dynamic reverse proxy for use within Galaxy",
+  "main": "index.js",
+  "author": "John Chilton",
+  "license": "AFL-3.0",
+  "readmeFilename": "README.md",
+  "repository": {
+    "type": "mercurial",
+    "url": "https://bitbucket.org/galaxy/galaxy-central"
+  },
+  "dependencies": {
+    "eventemitter3": "0.1.6",
+    "http-proxy": "1.6.0",
+    "commander": "~2.2",
+    "sqlite3": "3.1.3"
+  }
+}
diff --git a/lib/galaxy/web/security/__init__.py b/lib/galaxy/web/security/__init__.py
new file mode 100644
index 0000000..4a2ed33
--- /dev/null
+++ b/lib/galaxy/web/security/__init__.py
@@ -0,0 +1,130 @@
+import collections
+import os
+import os.path
+import logging
+
+import galaxy.exceptions
+
+from Crypto.Cipher import Blowfish
+from Crypto.Util.randpool import RandomPool
+from Crypto.Util import number
+
+log = logging.getLogger( __name__ )
+
+if os.path.exists( "/dev/urandom" ):
+    # We have urandom, use it as the source of random data
+    random_fd = os.open( "/dev/urandom", os.O_RDONLY )
+
+    def get_random_bytes( nbytes ):
+        value = os.read( random_fd, nbytes )
+        # Normally we should get as much as we need
+        if len( value ) == nbytes:
+            return value.encode( "hex" )
+        # If we don't, keep reading (this is slow and should never happen)
+        while len( value ) < nbytes:
+            value += os.read( random_fd, nbytes - len( value ) )
+        return value.encode( "hex" )
+else:
+    def get_random_bytes( nbytes ):
+        nbits = nbytes * 8
+        random_pool = RandomPool( 1064 )
+        while random_pool.entropy < nbits:
+            random_pool.add_event()
+        random_pool.stir()
+        return str( number.getRandomNumber( nbits, random_pool.get_bytes ) )
+
+
+class SecurityHelper( object ):
+
+    def __init__( self, **config ):
+        self.id_secret = config['id_secret']
+        self.id_cipher = Blowfish.new( self.id_secret )
+
+        per_kind_id_secret_base = config.get( 'per_kind_id_secret_base', self.id_secret )
+        self.id_ciphers_for_kind = _cipher_cache( per_kind_id_secret_base )
+
+    def encode_id( self, obj_id, kind=None ):
+        if obj_id is None:
+            raise galaxy.exceptions.MalformedId("Attempted to encode None id")
+        id_cipher = self.__id_cipher( kind )
+        # Convert to string
+        s = str( obj_id )
+        # Pad to a multiple of 8 with leading "!"
+        s = ( "!" * ( 8 - len(s) % 8 ) ) + s
+        # Encrypt
+        return id_cipher.encrypt( s ).encode( 'hex' )
+
+    def encode_dict_ids( self, a_dict, kind=None, skip_startswith=None ):
+        """
+        Encode all ids in dictionary. Ids are identified by (a) an 'id' key or
+        (b) a key that ends with '_id'
+        """
+        for key, val in a_dict.items():
+            if key == 'id' or key.endswith('_id') and ( skip_startswith is None or not key.startswith( skip_startswith ) ):
+                a_dict[ key ] = self.encode_id( val, kind=kind )
+
+        return a_dict
+
+    def encode_all_ids( self, rval, recursive=False ):
+        """
+        Encodes all integer values in the dict rval whose keys are 'id' or end
+        with '_id' excluding `tool_id` which are consumed and produced as is
+        via the API.
+        """
+        if not isinstance( rval, dict ):
+            return rval
+        for k, v in rval.items():
+            if ( k == 'id' or k.endswith( '_id' ) ) and v is not None and k not in [ 'tool_id', 'external_id' ]:
+                try:
+                    rval[ k ] = self.encode_id( v )
+                except Exception:
+                    pass  # probably already encoded
+            if ( k.endswith( "_ids" ) and isinstance( v, list ) ):
+                try:
+                    o = []
+                    for i in v:
+                        o.append( self.encode_id( i ) )
+                    rval[ k ] = o
+                except Exception:
+                    pass
+            else:
+                if recursive and isinstance( v, dict ):
+                    rval[ k ] = self.encode_all_ids( v, recursive )
+                elif recursive and isinstance( v, list ):
+                    rval[ k ] = map( lambda el: self.encode_all_ids( el, True), v )
+        return rval
+
+    def decode_id( self, obj_id, kind=None ):
+        id_cipher = self.__id_cipher( kind )
+        return int( id_cipher.decrypt( obj_id.decode( 'hex' ) ).lstrip( "!" ) )
+
+    def encode_guid( self, session_key ):
+        # Session keys are strings
+        # Pad to a multiple of 8 with leading "!"
+        s = ( "!" * ( 8 - len( session_key ) % 8 ) ) + session_key
+        # Encrypt
+        return self.id_cipher.encrypt( s ).encode( 'hex' )
+
+    def decode_guid( self, session_key ):
+        # Session keys are strings
+        return self.id_cipher.decrypt( session_key.decode( 'hex' ) ).lstrip( "!" )
+
+    def get_new_guid( self ):
+        # Generate a unique, high entropy 128 bit random number
+        return get_random_bytes( 16 )
+
+    def __id_cipher( self, kind ):
+        if not kind:
+            id_cipher = self.id_cipher
+        else:
+            id_cipher = self.id_ciphers_for_kind[ kind ]
+        return id_cipher
+
+
+class _cipher_cache( collections.defaultdict ):
+
+    def __init__( self, secret_base ):
+        self.secret_base = secret_base
+
+    def __missing__( self, key ):
+        return Blowfish.new( self.secret_base + "__" + key )
diff --git a/lib/galaxy/webapps/__init__.py b/lib/galaxy/webapps/__init__.py
new file mode 100644
index 0000000..7bcb5ec
--- /dev/null
+++ b/lib/galaxy/webapps/__init__.py
@@ -0,0 +1,3 @@
+"""Galaxy webapps root package -- this is a namespace package."""
+
+__import__( "pkg_resources" ).declare_namespace( __name__ )
diff --git a/lib/galaxy/webapps/galaxy/__init__.py b/lib/galaxy/webapps/galaxy/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/webapps/galaxy/api/__init__.py b/lib/galaxy/webapps/galaxy/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/webapps/galaxy/api/annotations.py b/lib/galaxy/webapps/galaxy/api/annotations.py
new file mode 100644
index 0000000..8a96506
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/annotations.py
@@ -0,0 +1,91 @@
+"""
+API operations on annotations.
+"""
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesStoredWorkflowMixin
+from galaxy.model.item_attrs import UsesAnnotations
+
+from galaxy import managers
+
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.util import sanitize_html
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class BaseAnnotationsController( BaseAPIController, UsesStoredWorkflowMixin, UsesAnnotations ):
+
+    @expose_api
+    def index( self, trans, **kwd ):
+        idnum = kwd[self.tagged_item_id]
+        item = self._get_item_from_id(trans, idnum)
+        if item is not None:
+            return self.get_item_annotation_str( trans.sa_session, trans.get_user(), item )
+
+    @expose_api
+    def create( self, trans, payload, **kwd ):
+        if "text" not in payload:
+            return ""
+        idnum = kwd[self.tagged_item_id]
+        item = self._get_item_from_id(trans, idnum)
+        if item is not None:
+            new_annotation = payload.get("text")
+            # TODO: sanitize on display not entry
+            new_annotation = sanitize_html.sanitize_html( new_annotation, 'utf-8', 'text/html' )
+
+            self.add_item_annotation( trans.sa_session, trans.get_user(), item, new_annotation )
+            trans.sa_session.flush()
+            return new_annotation
+        return ""
+
+    @expose_api
+    def delete( self, trans, **kwd ):
+        idnum = kwd[self.tagged_item_id]
+        item = self._get_item_from_id(trans, idnum)
+        if item is not None:
+            return self.delete_item_annotation( trans.sa_session, trans.get_user(), item )
+
+    @expose_api
+    def undelete( self, trans, **kwd ):
+        raise exceptions.NotImplemented()
+
+
+class HistoryAnnotationsController(BaseAnnotationsController):
+    controller_name = "history_annotations"
+    tagged_item_id = "history_id"
+
+    def __init__( self, app ):
+        super( HistoryAnnotationsController, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+
+    def _get_item_from_id(self, trans, idstr):
+        decoded_idstr = self.decode_id( idstr )
+        history = self.history_manager.get_accessible( decoded_idstr, trans.user, current_history=trans.history )
+        return history
+
+
+class HistoryContentAnnotationsController( BaseAnnotationsController ):
+    controller_name = "history_content_annotations"
+    tagged_item_id = "history_content_id"
+
+    def __init__( self, app ):
+        super( HistoryContentAnnotationsController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    def _get_item_from_id(self, trans, idstr):
+        decoded_idstr = self.decode_id( idstr )
+        hda = self.hda_manager.get_accessible( decoded_idstr, trans.user )
+        hda = self.hda_manager.error_if_uploading( hda )
+        return hda
+
+
+class WorkflowAnnotationsController( BaseAnnotationsController ):
+    controller_name = "workflow_annotations"
+    tagged_item_id = "workflow_id"
+
+    def _get_item_from_id(self, trans, idstr):
+        hda = self.get_stored_workflow( trans, idstr )
+        return hda
diff --git a/lib/galaxy/webapps/galaxy/api/authenticate.py b/lib/galaxy/webapps/galaxy/api/authenticate.py
new file mode 100644
index 0000000..f07b40f
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/authenticate.py
@@ -0,0 +1,105 @@
+"""
+API key retrieval through BaseAuth
+Sample usage:
+
+curl --user zipzap at foo.com:password http://localhost:8080/api/authenticate/baseauth
+
+Returns:
+
+{
+    "api_key": "baa4d6e3a156d3033f05736255f195f9"
+}
+"""
+
+from base64 import b64decode
+from urllib import unquote
+
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.managers import api_keys
+from galaxy import exceptions
+from galaxy.web.base.controller import BaseAPIController
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class AuthenticationController( BaseAPIController ):
+
+    def __init__( self, app ):
+        super( AuthenticationController, self ).__init__( app )
+        self.api_keys_manager = api_keys.ApiKeyManager( app )
+
+    @expose_api_anonymous_and_sessionless
+    def get_api_key( self, trans, **kwd ):
+        """
+        def get_api_key( self, trans, **kwd )
+        * GET /api/authenticate/baseauth
+          returns an API key for authenticated user based on BaseAuth headers
+
+        :returns: api_key in json format
+        :rtype:   dict
+
+        :raises: ObjectNotFound, HTTPBadRequest
+        """
+        email, password = self._decode_baseauth( trans.environ.get( 'HTTP_AUTHORIZATION' ) )
+
+        user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email == email ).all()
+
+        if len( user ) == 0:
+            raise exceptions.ObjectNotFound( 'The user does not exist.' )
+        elif len( user ) > 1:
+            # DB is inconsistent and we have more users with the same email.
+            raise exceptions.InconsistentDatabase( 'An error occured, please contact your administrator.' )
+        else:
+            user = user[0]
+            is_valid_user = self.app.auth_manager.check_password(user, password)
+        if is_valid_user:
+            key = self.api_keys_manager.get_or_create_api_key( user )
+            return dict( api_key=key )
+        else:
+            raise exceptions.AuthenticationFailed( 'Invalid password.' )
+
+    def _decode_baseauth( self, encoded_str ):
+        """
+        Decode an encrypted HTTP basic authentication string. Returns a tuple of
+        the form (email, password), and raises a HTTPBadRequest exception if
+        nothing could be decoded.
+
+        :param  encoded_str: BaseAuth string encoded base64
+        :type   encoded_str: string
+
+        :returns: email of the user
+        :rtype:   string
+        :returns: password of the user
+        :rtype:   string
+
+        :raises: HTTPBadRequest
+        """
+        split = encoded_str.strip().split( ' ' )
+
+        # If split is only one element, try to decode the email and password
+        # directly.
+        if len( split ) == 1:
+            try:
+                email, password = b64decode( split[ 0 ] ).split( ':' )
+            except:
+                raise exceptions.ActionInputError()
+
+        # If there are only two elements, check the first and ensure it says
+        # 'basic' so that we know we're about to decode the right thing. If not,
+        # bail out.
+        elif len( split ) == 2:
+            if split[ 0 ].strip().lower() == 'basic':
+                try:
+                    email, password = b64decode( split[ 1 ] ).split( ':' )
+                except:
+                    raise exceptions.ActionInputError()
+            else:
+                raise exceptions.ActionInputError()
+
+        # If there are more than 2 elements, something crazy must be happening.
+        # Bail.
+        else:
+            raise exceptions.ActionInputError()
+
+        return unquote( email ), unquote( password )
diff --git a/lib/galaxy/webapps/galaxy/api/configuration.py b/lib/galaxy/webapps/galaxy/api/configuration.py
new file mode 100644
index 0000000..b1188b0
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/configuration.py
@@ -0,0 +1,100 @@
+"""
+API operations allowing clients to determine Galaxy instance's capabilities
+and configuration settings.
+"""
+
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import require_admin
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.managers import configuration
+from galaxy.queue_worker import send_control_task
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ConfigurationController( BaseAPIController ):
+
+    def __init__( self, app ):
+        super( ConfigurationController, self ).__init__( app )
+        self.config_serializer = configuration.ConfigSerializer( app )
+        self.admin_config_serializer = configuration.AdminConfigSerializer( app )
+
+    @expose_api_anonymous_and_sessionless
+    def index( self, trans, **kwd ):
+        """
+        GET /api/configuration
+        Return an object containing exposable configuration settings.
+
+        Note: a more complete list is returned if the user is an admin.
+        """
+        is_admin = self.user_manager.is_admin( trans.user )
+        serialization_params = self._parse_serialization_params( kwd, 'all' )
+        return self.get_config_dict( trans, is_admin, **serialization_params )
+
+    @expose_api_anonymous_and_sessionless
+    def version( self, trans, **kwds ):
+        """
+        GET /api/version
+        Return a description of the major version of Galaxy (e.g. 15.03).
+
+        :rtype:     dict
+        :returns:   dictionary with major version keyed on 'version_major'
+        """
+        return {"version_major": self.app.config.version_major }
+
+    def get_config_dict( self, trans, return_admin=False, view=None, keys=None, default_view='all' ):
+        """
+        Return a dictionary with (a subset of) current Galaxy settings.
+
+        If `return_admin` also include a subset of more sensitive keys.
+        Pass in `view` (String) and comma seperated list of keys to control which
+        configuration settings are returned.
+        """
+        serializer = self.config_serializer
+        if return_admin:
+            # TODO: this should probably just be under a different route: 'admin/configuration'
+            serializer = self.admin_config_serializer
+
+        serialized = serializer.serialize_to_view( self.app.config, view=view, keys=keys, default_view=default_view )
+        return serialized
+
+    @expose_api
+    @require_admin
+    def dynamic_tool_confs(self, trans):
+        confs = self.app.toolbox.dynamic_confs(include_migrated_tool_conf=True)
+        return map(_tool_conf_to_dict, confs)
+
+    @expose_api
+    @require_admin
+    def tool_lineages(self, trans):
+        rval = []
+        for id, tool in self.app.toolbox.tools():
+            if hasattr( tool, 'lineage' ):
+                lineage_dict = tool.lineage.to_dict()
+            else:
+                lineage_dict = None
+
+            entry = dict(
+                id=id,
+                lineage=lineage_dict
+            )
+            rval.append(entry)
+        return rval
+
+    @expose_api
+    @require_admin
+    def reload_toolbox(self, trans):
+        """
+        PUT /api/configuration/toolbox
+        Reload the Galaxy toolbox (but not individual tools).
+        """
+        send_control_task(self.app.toolbox.app, 'reload_toolbox')
+
+
+def _tool_conf_to_dict(conf):
+    return dict(
+        config_filename=conf['config_filename'],
+        tool_path=conf['tool_path'],
+    )
diff --git a/lib/galaxy/webapps/galaxy/api/dataset_collections.py b/lib/galaxy/webapps/galaxy/api/dataset_collections.py
new file mode 100644
index 0000000..9d25976
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/dataset_collections.py
@@ -0,0 +1,85 @@
+from galaxy.web import _future_expose_api as expose_api
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesLibraryMixinItems
+
+from galaxy import managers
+from galaxy.managers.collections_util import api_payload_to_create_params, dictify_dataset_collection_instance
+
+from logging import getLogger
+log = getLogger( __name__ )
+
+
+class DatasetCollectionsController(
+    BaseAPIController,
+    UsesLibraryMixinItems,
+):
+
+    def __init__( self, app ):
+        super( DatasetCollectionsController, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+
+    @expose_api
+    def index( self, trans, **kwd ):
+        trans.response.status = 501
+        return 'not implemented'
+
+    @expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        * POST /api/dataset_collections:
+            create a new dataset collection instance.
+
+        :type   payload: dict
+        :param  payload: (optional) dictionary structure containing:
+            * collection_type: dataset colltion type to create.
+            * instance_type:   Instance type - 'history' or 'library'.
+            * name:            the new dataset collections's name
+            * datasets:        object describing datasets for collection
+        :rtype:     dict
+        :returns:   element view of new dataset collection
+        """
+        # TODO: Error handling...
+        create_params = api_payload_to_create_params( payload )
+        instance_type = payload.pop( "instance_type", "history" )
+        if instance_type == "history":
+            history_id = payload.get( 'history_id' )
+            history_id = self.decode_id( history_id )
+            history = self.history_manager.get_owned( history_id, trans.user, current_history=trans.history )
+            create_params[ "parent" ] = history
+        elif instance_type == "library":
+            folder_id = payload.get( 'folder_id' )
+            library_folder = self.get_library_folder( trans, folder_id, check_accessible=True )
+            self.check_user_can_add_to_library_item( trans, library_folder, check_accessible=False )
+            create_params[ "parent" ] = library_folder
+        else:
+            trans.status = 501
+            return
+        dataset_collection_instance = self.__service( trans ).create( trans=trans, **create_params )
+        return dictify_dataset_collection_instance( dataset_collection_instance,
+                                                    security=trans.security, parent=create_params[ "parent" ] )
+
+    @expose_api
+    def show( self, trans, instance_type, id, **kwds ):
+        dataset_collection_instance = self.__service( trans ).get_dataset_collection_instance(
+            trans,
+            id=id,
+            instance_type=instance_type,
+        )
+        if instance_type == 'history':
+            parent = dataset_collection_instance.history
+        elif instance_type == 'library':
+            parent = dataset_collection_instance.folder
+        else:
+            trans.status = 501
+            return
+        return dictify_dataset_collection_instance(
+            dataset_collection_instance,
+            security=trans.security,
+            parent=parent,
+            view='element'
+        )
+
+    def __service( self, trans ):
+        service = trans.app.dataset_collections_service
+        return service
diff --git a/lib/galaxy/webapps/galaxy/api/datasets.py b/lib/galaxy/webapps/galaxy/api/datasets.py
new file mode 100644
index 0000000..b58ce40
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -0,0 +1,382 @@
+"""
+API operations on the contents of a history dataset.
+"""
+from six import string_types
+
+from galaxy import model
+from galaxy import exceptions as galaxy_exceptions
+from galaxy import web
+from galaxy.web.framework.helpers import is_true
+from galaxy import util
+
+from galaxy.visualization.data_providers.genome import FeatureLocationIndexDataProvider
+from galaxy.visualization.data_providers.genome import SamDataProvider
+from galaxy.visualization.data_providers.genome import BamDataProvider
+from galaxy.datatypes import dataproviders
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesVisualizationMixin
+from galaxy import managers
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class DatasetsController( BaseAPIController, UsesVisualizationMixin ):
+
+    def __init__( self, app ):
+        super( DatasetsController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+        self.hda_serializer = managers.hdas.HDASerializer( self.app )
+
+    def _parse_serialization_params( self, kwd, default_view ):
+        view = kwd.get( 'view', None )
+        keys = kwd.get( 'keys' )
+        if isinstance( keys, string_types ):
+            keys = keys.split( ',' )
+        return dict( view=view, keys=keys, default_view=default_view )
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/datasets
+        Lists datasets.
+        """
+        trans.response.status = 501
+        return 'not implemented'
+
+    @web.expose_api_anonymous
+    def show( self, trans, id, hda_ldda='hda', data_type=None, provider=None, **kwd ):
+        """
+        GET /api/datasets/{encoded_dataset_id}
+        Displays information about and/or content of a dataset.
+        """
+        # Get dataset.
+        try:
+            dataset = self.get_hda_or_ldda( trans, hda_ldda=hda_ldda, dataset_id=id )
+        except Exception as e:
+            return str( e )
+
+        # Use data type to return particular type of data.
+        try:
+            if data_type == 'state':
+                rval = self._dataset_state( trans, dataset )
+            elif data_type == 'converted_datasets_state':
+                rval = self._converted_datasets_state( trans, dataset, kwd.get( 'chrom', None ),
+                                                       is_true( kwd.get( 'retry', False ) ) )
+            elif data_type == 'data':
+                rval = self._data( trans, dataset, **kwd )
+            elif data_type == 'features':
+                rval = self._search_features( trans, dataset, kwd.get( 'query' ) )
+            elif data_type == 'raw_data':
+                rval = self._raw_data( trans, dataset, provider, **kwd )
+            elif data_type == 'track_config':
+                rval = self.get_new_track_config( trans, dataset )
+            elif data_type == 'genome_data':
+                rval = self._get_genome_data( trans, dataset, kwd.get('dbkey', None) )
+            else:
+                # Default: return dataset as dict.
+                if hda_ldda == 'hda':
+                    return self.hda_serializer.serialize_to_view( dataset,
+                                                                  view=kwd.get( 'view', 'detailed' ), user=trans.user, trans=trans )
+                else:
+                    rval = dataset.to_dict()
+
+        except Exception as e:
+            rval = "Error in dataset API at listing contents: " + str( e )
+            log.error( rval + ": %s" % str(e), exc_info=True )
+            trans.response.status = 500
+        return rval
+
+    def _dataset_state( self, trans, dataset, **kwargs ):
+        """
+        Returns state of dataset.
+        """
+        msg = self.hda_manager.data_conversion_status( dataset )
+        if not msg:
+            msg = dataset.conversion_messages.DATA
+
+        return msg
+
+    def _converted_datasets_state( self, trans, dataset, chrom=None, retry=False ):
+        """
+        Init-like method that returns state of dataset's converted datasets.
+        Returns valid chroms for that dataset as well.
+        """
+        msg = self.hda_manager.data_conversion_status( dataset )
+        if msg:
+            return msg
+
+        # Get datasources and check for messages (which indicate errors). Retry if flag is set.
+        data_sources = dataset.get_datasources( trans )
+        messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
+        msg = self._get_highest_priority_msg( messages_list )
+        if msg:
+            if retry:
+                # Clear datasources and then try again.
+                dataset.clear_associated_files()
+                return self._converted_datasets_state( trans, dataset, chrom )
+            else:
+                return msg
+
+        # If there is a chrom, check for data on the chrom.
+        if chrom:
+            data_provider = trans.app.data_provider_registry.get_data_provider( trans,
+                                                                                original_dataset=dataset, source='index' )
+            if not data_provider.has_data( chrom ):
+                return dataset.conversion_messages.NO_DATA
+
+        # Have data if we get here
+        return { "status": dataset.conversion_messages.DATA, "valid_chroms": None }
+
+    def _search_features( self, trans, dataset, query ):
+        """
+        Returns features, locations in dataset that match query. Format is a
+        list of features; each feature is a list itself: [name, location]
+        """
+        if dataset.can_convert_to( "fli" ):
+            converted_dataset = dataset.get_converted_dataset( trans, "fli" )
+            if converted_dataset:
+                data_provider = FeatureLocationIndexDataProvider( converted_dataset=converted_dataset )
+                if data_provider:
+                    return data_provider.get_data( query )
+
+        return []
+
+    def _data( self, trans, dataset, chrom, low, high, start_val=0, max_vals=None, **kwargs ):
+        """
+        Provides a block of data from a dataset.
+        """
+        # Parameter check.
+        if not chrom:
+            return dataset.conversion_messages.NO_DATA
+
+        # Dataset check.
+        msg = self.hda_manager.data_conversion_status( dataset )
+        if msg:
+            return msg
+
+        # Get datasources and check for messages.
+        data_sources = dataset.get_datasources( trans )
+        messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
+        return_message = self._get_highest_priority_msg( messages_list )
+        if return_message:
+            return return_message
+
+        extra_info = None
+        mode = kwargs.get( "mode", "Auto" )
+        data_provider_registry = trans.app.data_provider_registry
+        indexer = None
+
+        # Coverage mode uses index data.
+        if mode == "Coverage":
+            # Get summary using minimal cutoffs.
+            indexer = data_provider_registry.get_data_provider( trans, original_dataset=dataset, source='index' )
+            return indexer.get_data( chrom, low, high, **kwargs )
+
+        # TODO:
+        # (1) add logic back in for no_detail
+        # (2) handle scenario where mode is Squish/Pack but data requested is large, so reduced data needed to be returned.
+
+        # If mode is Auto, need to determine what type of data to return.
+        if mode == "Auto":
+            # Get stats from indexer.
+            indexer = data_provider_registry.get_data_provider( trans, original_dataset=dataset, source='index' )
+            stats = indexer.get_data( chrom, low, high, stats=True )
+
+            # If stats were requested, return them.
+            if 'stats' in kwargs:
+                if stats[ 'data' ][ 'max' ] == 0:
+                    return { 'dataset_type': indexer.dataset_type, 'data': None }
+                else:
+                    return stats
+
+            # Stats provides features/base and resolution is bases/pixel, so
+            # multiplying them yields features/pixel.
+            features_per_pixel = stats[ 'data' ][ 'max' ] * float( kwargs[ 'resolution' ] )
+
+            # Use heuristic based on features/pixel and region size to determine whether to
+            # return coverage data. When zoomed out and region is large, features/pixel
+            # is determining factor. However, when sufficiently zoomed in and region is
+            # small, coverage data is no longer provided.
+            if int( high ) - int( low ) > 50000 and features_per_pixel > 1000:
+                return indexer.get_data( chrom, low, high )
+
+        #
+        # Provide individual data points.
+        #
+
+        # Get data provider.
+        data_provider = data_provider_registry.get_data_provider( trans, original_dataset=dataset, source='data' )
+
+        # Allow max_vals top be data provider set if not passed
+        if max_vals is None:
+            max_vals = data_provider.get_default_max_vals()
+
+        # Get reference sequence and mean depth for region; these is used by providers for aligned reads.
+        region = None
+        mean_depth = None
+        if isinstance( data_provider, (SamDataProvider, BamDataProvider ) ):
+            # Get reference sequence.
+            if dataset.dbkey:
+                # FIXME: increase region 1M each way to provide sequence for
+                # spliced/gapped reads. Probably should provide refseq object
+                # directly to data provider.
+                region = self.app.genomes.reference( trans, dbkey=dataset.dbkey, chrom=chrom,
+                                                     low=( max( 0, int( low  ) - 1000000 ) ),
+                                                     high=( int( high ) + 1000000 ) )
+
+            # Get mean depth.
+            if not indexer:
+                indexer = data_provider_registry.get_data_provider( trans, original_dataset=dataset, source='index' )
+            stats = indexer.get_data( chrom, low, high, stats=True )
+            mean_depth = stats[ 'data' ][ 'mean' ]
+
+        # Get and return data from data_provider.
+        result = data_provider.get_data( chrom, int( low ), int( high ), int( start_val ), int( max_vals ),
+                                         ref_seq=region, mean_depth=mean_depth, **kwargs )
+        result.update( { 'dataset_type': data_provider.dataset_type, 'extra_info': extra_info } )
+        return result
+
+    def _raw_data( self, trans, dataset, provider=None, **kwargs ):
+        """
+        Uses original (raw) dataset to return data. This method is useful
+        when the dataset is not yet indexed and hence using data would
+        be slow because indexes need to be created.
+        """
+        # Dataset check.
+        msg = self.hda_manager.data_conversion_status( dataset )
+        if msg:
+            return msg
+
+        registry = trans.app.data_provider_registry
+
+        # allow the caller to specify which provider is used
+        #   pulling from the original providers if possible, then the new providers
+        if provider:
+            if provider in registry.dataset_type_name_to_data_provider:
+                data_provider = registry.dataset_type_name_to_data_provider[ provider ]( dataset )
+
+            elif dataset.datatype.has_dataprovider( provider ):
+                kwargs = dataset.datatype.dataproviders[ provider ].parse_query_string_settings( kwargs )
+                # use dictionary to allow more than the data itself to be returned (data totals, other meta, etc.)
+                return {
+                    'data': list( dataset.datatype.dataprovider( dataset, provider, **kwargs ) )
+                }
+
+            else:
+                raise dataproviders.exceptions.NoProviderAvailable( dataset.datatype, provider )
+
+        # no provider name: look up by datatype
+        else:
+            data_provider = registry.get_data_provider( trans, raw=True, original_dataset=dataset )
+
+        # Return data.
+        data = data_provider.get_data( **kwargs )
+
+        return data
+
+    @web.expose_api_raw_anonymous
+    def display( self, trans, history_content_id, history_id,
+                 preview=False, filename=None, to_ext=None, raw=False, **kwd ):
+        """
+        GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}/display
+        Displays history content (dataset).
+
+        The query parameter 'raw' should be considered experimental and may be dropped at
+        some point in the future without warning. Generally, data should be processed by its
+        datatype prior to display (the defult if raw is unspecified or explicitly false.
+        """
+        decoded_content_id = self.decode_id( history_content_id )
+        raw = util.string_as_bool_or_none( raw )
+
+        rval = ''
+        try:
+            hda = self.hda_manager.get_accessible( decoded_content_id, trans.user )
+
+            if raw:
+                if filename and filename != 'index':
+                    file_path = trans.app.object_store.get_filename( hda.dataset,
+                                                                     extra_dir=( 'dataset_%s_files' % hda.dataset.id ),
+                                                                     alt_name=filename)
+                else:
+                    file_path = hda.file_name
+                rval = open( file_path )
+
+            else:
+                display_kwd = kwd.copy()
+                if 'key' in display_kwd:
+                    del display_kwd["key"]
+                rval = hda.datatype.display_data( trans, hda, preview, filename, to_ext, **display_kwd )
+
+        except Exception as exception:
+            log.error( "Error getting display data for dataset (%s) from history (%s): %s",
+                       history_content_id, history_id, str( exception ), exc_info=True )
+            trans.response.status = 500
+            rval = ( "Could not get display data for dataset: " + str( exception ) )
+
+        return rval
+
+    @web.expose_api_raw_anonymous
+    def get_metadata_file( self, trans, history_content_id, history_id, metadata_file=None, **kwd ):
+        decoded_content_id = self.decode_id( history_content_id )
+        rval = ''
+        try:
+            hda = self.hda_manager.get_accessible( decoded_content_id, trans.user )
+            file_ext = hda.metadata.spec.get(metadata_file).get("file_ext", metadata_file)
+            fname = ''.join(c in util.FILENAME_VALID_CHARS and c or '_' for c in hda.name)[0:150]
+            trans.response.headers["Content-Type"] = "application/octet-stream"
+            trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (hda.hid, fname, file_ext)
+            return open(hda.metadata.get(metadata_file).file_name)
+        except Exception as exception:
+            log.error( "Error getting metadata_file (%s) for dataset (%s) from history (%s): %s",
+                       metadata_file, history_content_id, history_id, str( exception ), exc_info=True )
+            trans.response.status = 500
+            rval = ( "Could not get display data for dataset: " + str( exception ) )
+        return rval
+
+    @web._future_expose_api_anonymous
+    def converted( self, trans, dataset_id, ext, **kwargs ):
+        """
+        converted( self, trans, dataset_id, ext, **kwargs )
+        * GET /api/datasets/{dataset_id}/converted/{ext}
+            return information about datasets made by converting this dataset
+            to a new format
+
+        :type   dataset_id: str
+        :param  dataset_id: the encoded id of the original HDA to check
+        :type   ext:        str
+        :param  ext:        file extension of the target format or None.
+
+        If there is no existing converted dataset for the format in `ext`,
+        one will be created.
+
+        If `ext` is None, a dictionary will be returned of the form
+        { <converted extension> : <converted id>, ... } containing all the
+        *existing* converted datasets.
+
+        ..note: `view` and `keys` are also available to control the serialization
+            of individual datasets. They have no effect when `ext` is None.
+
+        :rtype:     dict
+        :returns:   dictionary containing detailed HDA information
+                    or (if `ext` is None) an extension->dataset_id map
+        """
+        decoded_id = self.decode_id( dataset_id )
+        hda = self.hda_manager.get_accessible( decoded_id, trans.user )
+        if ext:
+            converted = self._get_or_create_converted( trans, hda, ext, **kwargs )
+            return self.hda_serializer.serialize_to_view( converted,
+                user=trans.user, trans=trans, **self._parse_serialization_params( kwargs, 'detailed' ) )
+
+        return self.hda_serializer.serialize_converted_datasets( hda, 'converted' )
+
+    def _get_or_create_converted( self, trans, original, target_ext, **kwargs ):
+        try:
+            original.get_converted_dataset( trans, target_ext )
+            converted = original.get_converted_files_by_type( target_ext )
+            return converted
+
+        except model.NoConverterException:
+            exc_data = dict( source=original.ext, target=target_ext, available=original.get_converter_types().keys() )
+            raise galaxy_exceptions.RequestParameterInvalidException( 'Conversion not possible', **exc_data )
diff --git a/lib/galaxy/webapps/galaxy/api/datatypes.py b/lib/galaxy/webapps/galaxy/api/datatypes.py
new file mode 100644
index 0000000..ec0f6fa
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/datatypes.py
@@ -0,0 +1,132 @@
+"""
+API operations allowing clients to determine datatype supported by Galaxy.
+"""
+
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy import exceptions
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.util import asbool
+from galaxy.datatypes.data import Data
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class DatatypesController( BaseAPIController ):
+
+    @expose_api_anonymous_and_sessionless
+    def index( self, trans, **kwd ):
+        """
+        GET /api/datatypes
+        Return an object containing upload datatypes.
+        """
+        datatypes_registry = self._datatypes_registry
+        try:
+            extension_only = asbool( kwd.get( 'extension_only', True ) )
+            upload_only = asbool( kwd.get( 'upload_only', True ) )
+            if extension_only:
+                if upload_only:
+                    return datatypes_registry.upload_file_formats
+                else:
+                    return [ ext for ext in datatypes_registry.datatypes_by_extension ]
+            else:
+                rval = []
+                for elem in datatypes_registry.datatype_elems:
+                    if not asbool(elem.get('display_in_upload')) and upload_only:
+                        continue
+                    keys = ['extension', 'description', 'description_url']
+                    dictionary = {}
+                    for key in keys:
+                        dictionary[key] = elem.get(key)
+                    extension = elem.get('extension')
+                    if extension in datatypes_registry.datatypes_by_extension:
+                        composite_files = datatypes_registry.datatypes_by_extension[ extension ].composite_files
+                        if composite_files:
+                            dictionary['composite_files'] = [_.dict() for _ in composite_files.itervalues()]
+                    rval.append(dictionary)
+                return rval
+        except Exception as exception:
+            log.error( 'could not get datatypes: %s', str( exception ), exc_info=True )
+            if not isinstance( exception, exceptions.MessageException ):
+                raise exceptions.InternalServerError( str( exception ) )
+            else:
+                raise
+
+    @expose_api_anonymous_and_sessionless
+    def mapping( self, trans, **kwd ):
+        '''
+        GET /api/datatypes/mapping
+        Return a dictionary of class to class mappings.
+        '''
+        try:
+            ext_to_class_name = dict()
+            classes = []
+            for k, v in self._datatypes_registry.datatypes_by_extension.iteritems():
+                c = v.__class__
+                ext_to_class_name[k] = c.__module__ + "." + c.__name__
+                classes.append( c )
+            class_to_classes = dict()
+
+            def visit_bases( types, cls ):
+                for base in cls.__bases__:
+                    if issubclass( base, Data ):
+                        types.add( base.__module__ + "." + base.__name__ )
+                    visit_bases( types, base )
+            for c in classes:
+                n = c.__module__ + "." + c.__name__
+                types = set( [ n ] )
+                visit_bases( types, c )
+                class_to_classes[ n ] = dict( ( t, True ) for t in types )
+            return dict( ext_to_class_name=ext_to_class_name, class_to_classes=class_to_classes )
+
+        except Exception as exception:
+            log.error( 'could not get datatype mapping: %s', str( exception ), exc_info=True )
+            if not isinstance( exception, exceptions.MessageException ):
+                raise exceptions.InternalServerError( str( exception ) )
+            else:
+                raise
+
+    @expose_api_anonymous_and_sessionless
+    def sniffers( self, trans, **kwd ):
+        '''
+        GET /api/datatypes/sniffers
+        Return a list of sniffers.
+        '''
+        try:
+            rval = []
+            for sniffer_elem in self._datatypes_registry.sniffer_elems:
+                datatype = sniffer_elem.get( 'type' )
+                if datatype is not None:
+                    rval.append( datatype )
+            return rval
+        except Exception as exception:
+            log.error( 'could not get datatypes: %s', str( exception ), exc_info=True )
+            if not isinstance( exception, exceptions.MessageException ):
+                raise exceptions.InternalServerError( str( exception ) )
+            else:
+                raise
+
+    @expose_api_anonymous_and_sessionless
+    def converters( self, trans, **kwd ):
+        converters = []
+        for (source_type, targets) in self._datatypes_registry.datatype_converters.iteritems():
+            for target_type in targets:
+                converters.append( {
+                    'source': source_type,
+                    'target': target_type,
+                    'tool_id': targets[ target_type ].id,
+                } )
+
+        return converters
+
+    @expose_api_anonymous_and_sessionless
+    def edam_formats( self, trans, **kwds ):
+        return self._datatypes_registry.edam_formats
+
+    @expose_api_anonymous_and_sessionless
+    def edam_data( self, trans, **kwds ):
+        return self._datatypes_registry.edam_data
+
+    @property
+    def _datatypes_registry( self ):
+        return self.app.datatypes_registry
diff --git a/lib/galaxy/webapps/galaxy/api/extended_metadata.py b/lib/galaxy/webapps/galaxy/api/extended_metadata.py
new file mode 100644
index 0000000..7c016bf
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/extended_metadata.py
@@ -0,0 +1,81 @@
+"""
+API operations on annotations.
+"""
+import logging
+from galaxy import web
+from galaxy import managers
+
+from galaxy.web.base.controller import BaseAPIController, UsesLibraryMixinItems, UsesStoredWorkflowMixin, UsesExtendedMetadataMixin, HTTPNotImplemented
+
+log = logging.getLogger( __name__ )
+
+
+class BaseExtendedMetadataController( BaseAPIController, UsesExtendedMetadataMixin, UsesLibraryMixinItems, UsesStoredWorkflowMixin ):
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        idnum = kwd[self.exmeta_item_id]
+        item = self._get_item_from_id(trans, idnum, check_writable=False)
+        if item is not None:
+            ex_meta = self.get_item_extended_metadata_obj( trans, item )
+            if ex_meta is not None:
+                return ex_meta.data
+
+    @web.expose_api
+    def create( self, trans, payload, **kwd ):
+        idnum = kwd[self.exmeta_item_id]
+        item = self._get_item_from_id(trans, idnum, check_writable=True)
+        if item is not None:
+            ex_obj = self.get_item_extended_metadata_obj(trans, item)
+            if ex_obj is not None:
+                self.unset_item_extended_metadata_obj(trans, item)
+                self.delete_extended_metadata(trans, ex_obj)
+            ex_obj = self.create_extended_metadata(trans, payload)
+            self.set_item_extended_metadata_obj(trans, item, ex_obj)
+
+    @web.expose_api
+    def delete( self, trans, **kwd ):
+        idnum = kwd[self.tagged_item_id]
+        item = self._get_item_from_id(trans, idnum, check_writable=True)
+        if item is not None:
+            ex_obj = self.get_item_extended_metadata_obj(trans, item)
+            if ex_obj is not None:
+                self.unset_item_extended_metadata_obj(trans, item)
+                self.delete_extended_metadata(trans, ex_obj)
+
+    @web.expose_api
+    def undelete( self, trans, **kwd ):
+        raise HTTPNotImplemented()
+
+
+class LibraryDatasetExtendMetadataController(BaseExtendedMetadataController):
+    controller_name = "library_dataset_extended_metadata"
+    exmeta_item_id = "library_content_id"
+
+    def _get_item_from_id(self, trans, idstr, check_writable=True):
+        if check_writable:
+            item = self.get_library_dataset_dataset_association( trans, idstr)
+            if trans.app.security_agent.can_modify_library_item( trans.get_current_user_roles(), item ):
+                return item
+        else:
+            item = self.get_library_dataset_dataset_association( trans, idstr)
+            if trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), item, trans.user ):
+                return item
+        return None
+
+
+class HistoryDatasetExtendMetadataController(BaseExtendedMetadataController):
+    controller_name = "history_dataset_extended_metadata"
+    exmeta_item_id = "history_content_id"
+
+    def __init__( self, app ):
+        super( HistoryDatasetExtendMetadataController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    def _get_item_from_id(self, trans, idstr, check_writable=True):
+        decoded_idstr = self.decode_id( idstr )
+        if check_writable:
+            return self.hda_manager.get_owned( decoded_idstr, trans.user, current_history=trans.history )
+        else:
+            hda = self.hda_manager.get_accessible( decoded_idstr, trans.user )
+            return self.hda_manager.error_if_uploading( hda )
diff --git a/lib/galaxy/webapps/galaxy/api/folder_contents.py b/lib/galaxy/webapps/galaxy/api/folder_contents.py
new file mode 100644
index 0000000..5a1d14f
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/folder_contents.py
@@ -0,0 +1,332 @@
+"""
+API operations on the contents of a library folder.
+"""
+from galaxy import util
+from galaxy import exceptions
+from galaxy import managers
+from galaxy.managers import folders
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web.base.controller import BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class FolderContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
+    """
+    Class controls retrieval, creation and updating of folder contents.
+    """
+
+    def __init__( self, app ):
+        super( FolderContentsController, self ).__init__( app )
+        self.folder_manager = folders.FolderManager()
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    @expose_api_anonymous
+    def index( self, trans, folder_id, **kwd ):
+        """
+        GET /api/folders/{encoded_folder_id}/contents
+
+        Displays a collection (list) of a folder's contents
+        (files and folders). Encoded folder ID is prepended
+        with 'F' if it is a folder as opposed to a data set
+        which does not have it. Full path is provided in
+        response as a separate object providing data for
+        breadcrumb path building.
+
+        :param  folder_id: encoded ID of the folder which
+            contents should be library_dataset_dict
+        :type   folder_id: encoded string
+
+        :param kwd: keyword dictionary with other params
+        :type  kwd: dict
+
+        :returns: dictionary containing all items and metadata
+        :type:    dict
+
+        :raises: MalformedId, InconsistentDatabase, ObjectNotFound,
+             InternalServerError
+        """
+        is_admin = trans.user_is_admin()
+        deleted = kwd.get( 'include_deleted', 'missing' )
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            deleted = util.asbool( deleted )
+        except ValueError:
+            deleted = False
+
+        decoded_folder_id = self.folder_manager.cut_and_decode( trans, folder_id )
+        folder = self.folder_manager.get( trans, decoded_folder_id )
+
+        # Special level of security on top of libraries.
+        if trans.app.security_agent.can_access_library( current_user_roles, folder.parent_library ) or is_admin:
+            pass
+        else:
+            if trans.user:
+                log.warning( "SECURITY: User (id: %s) without proper access rights is trying to load folder with ID of %s" % ( trans.user.id, decoded_folder_id ) )
+            else:
+                log.warning( "SECURITY: Anonymous user is trying to load restricted folder with ID of %s" % ( decoded_folder_id ) )
+            raise exceptions.ObjectNotFound( 'Folder with the id provided ( %s ) was not found' % str( folder_id ) )
+
+        folder_contents = []
+        update_time = ''
+        create_time = ''
+        #  Go through every accessible item (folders, datasets) in the folder and include its metadata.
+        for content_item in self._load_folder_contents( trans, folder, deleted ):
+            return_item = {}
+            encoded_id = trans.security.encode_id( content_item.id )
+            update_time = content_item.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+            create_time = content_item.create_time.strftime( "%Y-%m-%d %I:%M %p" )
+
+            if content_item.api_type == 'folder':
+                encoded_id = 'F' + encoded_id
+                can_modify = is_admin or ( trans.user and trans.app.security_agent.can_modify_library_item( current_user_roles, folder ) )
+                can_manage = is_admin or ( trans.user and trans.app.security_agent.can_manage_library_item( current_user_roles, folder ) )
+                return_item.update( dict( can_modify=can_modify, can_manage=can_manage ) )
+                if content_item.description:
+                    return_item.update( dict( description=content_item.description ) )
+
+            if content_item.api_type == 'file':
+                #  Is the dataset public or private?
+                #  When both are False the dataset is 'restricted'
+                #  Access rights are checked on the dataset level, not on the ld or ldda level to maintain consistency
+                is_unrestricted = trans.app.security_agent.dataset_is_public( content_item.library_dataset_dataset_association.dataset )
+                if trans.user and trans.app.security_agent.dataset_is_private_to_user( trans, content_item ):
+                    is_private = True
+                else:
+                    is_private = False
+
+                # Can user manage the permissions on the dataset?
+                can_manage = is_admin or (trans.user and trans.app.security_agent.can_manage_dataset( current_user_roles, content_item.library_dataset_dataset_association.dataset ) )
+
+                nice_size = util.nice_size( int( content_item.library_dataset_dataset_association.get_size() ) )
+
+                library_dataset_dict = content_item.to_dict()
+
+                return_item.update( dict( file_ext=library_dataset_dict[ 'file_ext' ],
+                                          date_uploaded=library_dataset_dict[ 'date_uploaded' ],
+                                          is_unrestricted=is_unrestricted,
+                                          is_private=is_private,
+                                          can_manage=can_manage,
+                                          file_size=nice_size
+                                          ) )
+                if content_item.library_dataset_dataset_association.message:
+                    return_item.update( dict( message=content_item.library_dataset_dataset_association.message ) )
+
+            # For every item include the default metadata
+            return_item.update( dict( id=encoded_id,
+                                      type=content_item.api_type,
+                                      name=content_item.name,
+                                      update_time=update_time,
+                                      create_time=create_time,
+                                      deleted=content_item.deleted
+                                      ) )
+            folder_contents.append( return_item )
+
+        # Return the reversed path so it starts with the library node.
+        full_path = self.build_path( trans, folder )[ ::-1 ]
+
+        # Check whether user can add items to the current folder
+        can_add_library_item = is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, folder )
+
+        # Check whether user can modify the current folder
+        can_modify_folder = is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, folder )
+
+        parent_library_id = None
+        if folder.parent_library is not None:
+            parent_library_id = trans.security.encode_id( folder.parent_library.id )
+
+        metadata = dict( full_path=full_path,
+                         can_add_library_item=can_add_library_item,
+                         can_modify_folder=can_modify_folder,
+                         folder_name=folder.name,
+                         folder_description=folder.description,
+                         parent_library_id=parent_library_id )
+        folder_container = dict( metadata=metadata, folder_contents=folder_contents )
+        return folder_container
+
+    def build_path( self, trans, folder ):
+        """
+        Search the path upwards recursively and load the whole route of
+        names and ids for breadcrumb building purposes.
+
+        :param folder: current folder for navigating up
+        :param type:   Galaxy LibraryFolder
+
+        :returns:   list consisting of full path to the library
+        :type:      list
+        """
+        path_to_root = []
+        # We are almost in root
+        if folder.parent_id is None:
+            path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+        else:
+            # We add the current folder and traverse up one folder.
+            path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+            upper_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder.parent_id )
+            path_to_root.extend( self.build_path( trans, upper_folder ) )
+        return path_to_root
+
+    def _load_folder_contents( self, trans, folder, include_deleted ):
+        """
+        Loads all contents of the folder (folders and data sets) but only
+        in the first level. Include deleted if the flag is set and if the
+        user has access to undelete it.
+
+        :param  folder:          the folder which contents are being loaded
+        :type   folder:          Galaxy LibraryFolder
+
+        :param  include_deleted: flag, when true the items that are deleted
+            and can be undeleted by current user are shown
+        :type   include_deleted: boolean
+
+        :returns:   a list containing the requested items
+        :type:      list
+        """
+        current_user_roles = trans.get_current_user_roles()
+        is_admin = trans.user_is_admin()
+        content_items = []
+        for subfolder in folder.folders:
+            if subfolder.deleted:
+                if include_deleted:
+                    if is_admin:
+                        # Admins can see all deleted folders.
+                        subfolder.api_type = 'folder'
+                        content_items.append( subfolder )
+                    else:
+                        # Users with MODIFY permissions can see deleted folders.
+                        can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, subfolder )
+                        if can_modify:
+                            subfolder.api_type = 'folder'
+                            content_items.append( subfolder )
+            else:
+                # Undeleted folders are non-restricted for now. The contents are not.
+                # TODO decide on restrictions
+                subfolder.api_type = 'folder'
+                content_items.append( subfolder )
+                # if is_admin:
+                #     subfolder.api_type = 'folder'
+                #     content_items.append( subfolder )
+                # else:
+                #     can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
+                #     if can_access:
+                #         subfolder.api_type = 'folder'
+                #         content_items.append( subfolder )
+
+        for dataset in folder.datasets:
+            if dataset.deleted:
+                if include_deleted:
+                    if is_admin:
+                        # Admins can see all deleted datasets.
+                        dataset.api_type = 'file'
+                        content_items.append( dataset )
+                    else:
+                        # Users with MODIFY permissions on the item can see the deleted item.
+                        can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, dataset )
+                        if can_modify:
+                            dataset.api_type = 'file'
+                            content_items.append( dataset )
+            else:
+                if is_admin:
+                    dataset.api_type = 'file'
+                    content_items.append( dataset )
+                else:
+                    can_access = trans.app.security_agent.can_access_dataset( current_user_roles, dataset.library_dataset_dataset_association.dataset )
+                    if can_access:
+                        dataset.api_type = 'file'
+                        content_items.append( dataset )
+
+        return content_items
+
+    @expose_api
+    def create( self, trans, encoded_folder_id, payload, **kwd ):
+        """
+        * POST /api/folders/{encoded_id}/contents
+            create a new library file from an HDA
+
+        :param  encoded_folder_id:      the encoded id of the folder to import dataset(s) to
+        :type   encoded_folder_id:      an encoded id string
+        :param  payload:    dictionary structure containing:
+            :param from_hda_id:         (optional) the id of an accessible HDA to copy into the library
+            :type  from_hda_id:         encoded id
+            :param ldda_message:        (optional) the new message attribute of the LDDA created
+            :type   ldda_message:       str
+            :param extended_metadata:   (optional) dub-dictionary containing any extended metadata to associate with the item
+            :type  extended_metadata:   dict
+        :type   payload:    dict
+
+        :returns:   a dictionary containing the id, name, and 'show' url of the new item
+        :rtype:     dict
+
+        :raises:    ObjectAttributeInvalidException,
+            InsufficientPermissionsException, ItemAccessibilityException,
+            InternalServerError
+        """
+        encoded_folder_id_16 = self.__decode_library_content_id( trans, encoded_folder_id )
+        from_hda_id, ldda_message = ( payload.pop( 'from_hda_id', None ), payload.pop( 'ldda_message', '' ) )
+        if ldda_message:
+            ldda_message = util.sanitize_html.sanitize_html( ldda_message, 'utf-8' )
+        rval = {}
+        try:
+            decoded_hda_id = self.decode_id( from_hda_id )
+            hda = self.hda_manager.get_owned( decoded_hda_id, trans.user, current_history=trans.history )
+            hda = self.hda_manager.error_if_uploading( hda )
+            folder = self.get_library_folder( trans, encoded_folder_id_16, check_accessible=True )
+
+            library = folder.parent_library
+            if library.deleted:
+                raise exceptions.ObjectAttributeInvalidException()
+            if not self.can_current_user_add_to_library_item( trans, folder ):
+                raise exceptions.InsufficientPermissionsException()
+
+            ldda = self.copy_hda_to_library_folder( trans, hda, folder, ldda_message=ldda_message )
+            update_time = ldda.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+            ldda_dict = ldda.to_dict()
+            rval = trans.security.encode_dict_ids( ldda_dict )
+            rval['update_time'] = update_time
+
+        except exceptions.ObjectAttributeInvalidException:
+            raise exceptions.ObjectAttributeInvalidException( 'You cannot add datasets into deleted library. Undelete it first.' )
+        except exceptions.InsufficientPermissionsException:
+            raise exceptions.exceptions.InsufficientPermissionsException( 'You do not have proper permissions to add a dataset to a folder with id (%s)' % ( encoded_folder_id ) )
+        except Exception as exc:
+            # TODO handle exceptions better within the mixins
+            if ( ( 'not accessible to the current user' in str( exc ) ) or ( 'You are not allowed to access this dataset' in str( exc ) ) ):
+                raise exceptions.ItemAccessibilityException( 'You do not have access to the requested item' )
+            else:
+                log.exception( exc )
+                raise exceptions.InternalServerError( 'An unknown error ocurred. Please try again.' )
+        return rval
+
+    def __decode_library_content_id( self, trans, encoded_folder_id ):
+        """
+        Identifies whether the id provided is properly encoded
+        LibraryFolder.
+
+        :param  encoded_folder_id:  encoded id of Galaxy LibraryFolder
+        :type   encoded_folder_id:  encoded string
+
+        :returns:   encoded id of Folder (had 'F' prepended)
+        :type:  string
+
+        :raises:    MalformedId
+        """
+        if ( ( len( encoded_folder_id ) % 16 == 1 ) and encoded_folder_id.startswith( 'F' ) ):
+            return encoded_folder_id[ 1: ]
+        else:
+            raise exceptions.MalformedId( 'Malformed folder id ( %s ) specified, unable to decode.' % str( encoded_folder_id ) )
+
+    @expose_api
+    def show( self, trans, id, library_id, **kwd ):
+        """
+        GET /api/folders/{encoded_folder_id}/
+        """
+        raise exceptions.NotImplemented( 'Showing the library folder content is not implemented here.' )
+
+    @expose_api
+    def update( self, trans, id, library_id, payload, **kwd ):
+        """
+        PUT /api/folders/{encoded_folder_id}/contents
+        """
+        raise exceptions.NotImplemented( 'Updating the library folder content is not implemented here.' )
diff --git a/lib/galaxy/webapps/galaxy/api/folders.py b/lib/galaxy/webapps/galaxy/api/folders.py
new file mode 100644
index 0000000..21a019c
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/folders.py
@@ -0,0 +1,288 @@
+"""
+API operations on library folders.
+"""
+from galaxy import util
+from galaxy import exceptions
+from galaxy.managers import folders, roles
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web.base.controller import BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class FoldersController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
+
+    def __init__( self, app ):
+        super( FoldersController, self ).__init__( app )
+        self.folder_manager = folders.FolderManager()
+        self.role_manager = roles.RoleManager( app )
+
+    @expose_api
+    def index( self, trans, **kwd ):
+        """
+        *GET /api/folders/
+        This would normally display a list of folders. However, that would
+        be across multiple libraries, so it's not implemented.
+        """
+        raise exceptions.NotImplemented( 'Listing all accessible library folders is not implemented.' )
+
+    @expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        show( self, trans, id, **kwd )
+        *GET /api/folders/{encoded_folder_id}
+
+        Displays information about a folder.
+
+        :param  id:      the folder's encoded id (required)
+        :type   id:      an encoded id string (has to be prefixed by 'F')
+
+        :returns:   dictionary including details of the folder
+        :rtype:     dict
+        """
+        folder_id = self.folder_manager.cut_and_decode( trans, id )
+        folder = self.folder_manager.get( trans, folder_id, check_manageable=False, check_accessible=True )
+        return_dict = self.folder_manager.get_folder_dict( trans, folder )
+        return return_dict
+
+    @expose_api
+    def create( self, trans, encoded_parent_folder_id, payload=None, **kwd ):
+        """
+        *POST /api/folders/{encoded_parent_folder_id}
+            Create a new folder object underneath the one specified in the parameters.
+
+        :param  encoded_parent_folder_id:      (required) the parent folder's id
+        :type   encoded_parent_folder_id:      an encoded id string (should be prefixed by 'F')
+        :param   payload: dictionary structure containing:
+            :param  name:                          (required) the name of the new folder
+            :type   name:                          str
+            :param  description:                   the description of the new folder
+            :type   description:                   str
+        :type       dictionary
+        :returns:   information about newly created folder, notably including ID
+        :rtype:     dictionary
+        :raises: RequestParameterMissingException
+        """
+        if payload:
+            kwd.update(payload)
+        name = kwd.get( 'name', None )
+        if name is None:
+            raise exceptions.RequestParameterMissingException( "Missing required parameter 'name'." )
+        description = kwd.get( 'description', '' )
+        decoded_parent_folder_id = self.folder_manager.cut_and_decode( trans, encoded_parent_folder_id )
+        parent_folder = self.folder_manager.get( trans, decoded_parent_folder_id )
+        new_folder = self.folder_manager.create( trans, parent_folder.id, name, description )
+        return self.folder_manager.get_folder_dict( trans, new_folder )
+
+    @expose_api
+    def get_permissions( self, trans, encoded_folder_id, **kwd ):
+        """
+        * GET /api/folders/{id}/permissions
+
+        Load all permissions for the given folder id and return it.
+
+        :param  encoded_folder_id:     the encoded id of the folder
+        :type   encoded_folder_id:     an encoded id string
+
+        :param  scope:      either 'current' or 'available'
+        :type   scope:      string
+
+        :returns:   dictionary with all applicable permissions' values
+        :rtype:     dictionary
+
+        :raises: InsufficientPermissionsException
+        """
+        current_user_roles = trans.get_current_user_roles()
+        is_admin = trans.user_is_admin()
+        decoded_folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id )
+        folder = self.folder_manager.get( trans, decoded_folder_id )
+
+        if not ( is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, folder ) ):
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to access permissions of this folder.' )
+
+        scope = kwd.get( 'scope', None )
+        if scope == 'current' or scope is None:
+            return self.folder_manager.get_current_roles( trans, folder )
+        #  Return roles that are available to select.
+        elif scope == 'available':
+            page = kwd.get( 'page', None )
+            if page is not None:
+                page = int( page )
+            else:
+                page = 1
+            page_limit = kwd.get( 'page_limit', None )
+            if page_limit is not None:
+                page_limit = int( page_limit )
+            else:
+                page_limit = 10
+            query = kwd.get( 'q', None )
+            roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder, query, page, page_limit )
+            return_roles = []
+            for role in roles:
+                role_id = trans.security.encode_id( role.id )
+                return_roles.append( dict( id=role_id, name=role.name, type=role.type ) )
+            return dict( roles=return_roles, page=page, page_limit=page_limit, total=total_roles )
+        else:
+            raise exceptions.RequestParameterInvalidException( "The value of 'scope' parameter is invalid. Alllowed values: current, available" )
+
+    @expose_api
+    def set_permissions( self, trans, encoded_folder_id, payload=None, **kwd ):
+        """
+        *POST /api/folders/{encoded_folder_id}/permissions
+            Set permissions of the given folder to the given role ids.
+
+        :param  encoded_folder_id:      the encoded id of the folder to set the permissions of
+        :type   encoded_folder_id:      an encoded id string
+        :param   payload: dictionary structure containing:
+            :param  action:            (required) describes what action should be performed
+            :type   action:            string
+            :param  add_ids[]:         list of Role.id defining roles that should have add item permission on the folder
+            :type   add_ids[]:         string or list
+            :param  manage_ids[]:      list of Role.id defining roles that should have manage permission on the folder
+            :type   manage_ids[]:      string or list
+            :param  modify_ids[]:      list of Role.id defining roles that should have modify permission on the folder
+            :type   modify_ids[]:      string or list
+        :type       dictionary
+        :returns:   dict of current roles for all available permission types.
+        :rtype:     dictionary
+        :raises: RequestParameterInvalidException, InsufficientPermissionsException, RequestParameterMissingException
+        """
+        if payload:
+            kwd.update(payload)
+        is_admin = trans.user_is_admin()
+        current_user_roles = trans.get_current_user_roles()
+        decoded_folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id )
+        folder = self.folder_manager.get( trans, decoded_folder_id )
+        if not ( is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, folder ) ):
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to modify permissions of this folder.' )
+
+        new_add_roles_ids = util.listify( kwd.get( 'add_ids[]', None ) )
+        new_manage_roles_ids = util.listify( kwd.get( 'manage_ids[]', None ) )
+        new_modify_roles_ids = util.listify( kwd.get( 'modify_ids[]', None ) )
+
+        action = kwd.get( 'action', None )
+        if action is None:
+            raise exceptions.RequestParameterMissingException( 'The mandatory parameter "action" is missing.' )
+        elif action == 'set_permissions':
+
+            # ADD TO LIBRARY ROLES
+            valid_add_roles = []
+            invalid_add_roles_names = []
+            for role_id in new_add_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                #  Check whether role is in the set of allowed roles
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder )
+                if role in valid_roles:
+                    valid_add_roles.append( role )
+                else:
+                    invalid_add_roles_names.append( role_id )
+            if len( invalid_add_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the add library item permission: " + str( invalid_add_roles_names ) )
+
+            # MANAGE FOLDER ROLES
+            valid_manage_roles = []
+            invalid_manage_roles_names = []
+            for role_id in new_manage_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                #  Check whether role is in the set of allowed roles
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder )
+                if role in valid_roles:
+                    valid_manage_roles.append( role )
+                else:
+                    invalid_manage_roles_names.append( role_id )
+            if len( invalid_manage_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the manage folder permission: " + str( invalid_manage_roles_names ) )
+
+            # MODIFY FOLDER ROLES
+            valid_modify_roles = []
+            invalid_modify_roles_names = []
+            for role_id in new_modify_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                #  Check whether role is in the set of allowed roles
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder )
+                if role in valid_roles:
+                    valid_modify_roles.append( role )
+                else:
+                    invalid_modify_roles_names.append( role_id )
+            if len( invalid_modify_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the modify folder permission: " + str( invalid_modify_roles_names ) )
+
+            permissions = { trans.app.security_agent.permitted_actions.LIBRARY_ADD: valid_add_roles }
+            permissions.update( { trans.app.security_agent.permitted_actions.LIBRARY_MANAGE: valid_manage_roles } )
+            permissions.update( { trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles } )
+
+            trans.app.security_agent.set_all_library_permissions( trans, folder, permissions )
+        else:
+            raise exceptions.RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value.'
+                                                               'Allowed values are: "set_permissions"' )
+        return self.folder_manager.get_current_roles( trans, folder )
+
+    @expose_api
+    def delete( self, trans, encoded_folder_id, **kwd ):
+        """
+        delete( self, trans, encoded_folder_id, **kwd )
+        * DELETE /api/folders/{encoded_folder_id}
+            marks the folder with the given ``encoded_folder_id`` as `deleted` (or removes the `deleted` mark if the `undelete` param is true)
+
+        .. note:: Currently, only admin users can un/delete folders.
+
+        :param  encoded_folder_id:     the encoded id of the folder to un/delete
+        :type   encoded_folder_id:     an encoded id string
+
+        :param  undelete:    (optional) flag specifying whether the item should be deleted or undeleted, defaults to false:
+        :type   undelete:    bool
+
+        :returns:   detailed folder information
+        :rtype:     dictionary
+
+        """
+        folder = self.folder_manager.get( trans, self.folder_manager.cut_and_decode( trans, encoded_folder_id ), True )
+        undelete = util.string_as_bool( kwd.get( 'undelete', False ) )
+        folder = self.folder_manager.delete( trans, folder, undelete )
+        folder_dict = self.folder_manager.get_folder_dict( trans, folder )
+        return folder_dict
+
+    @expose_api
+    def update( self, trans, encoded_folder_id, payload=None, **kwd ):
+        """
+        * PATCH /api/folders/{encoded_folder_id}
+           Update the folder defined by an ``encoded_folder_id`` with the data in the payload.
+
+       .. note:: Currently, only admin users can update library folders. Also the folder must not be `deleted`.
+
+        :param  id:      the encoded id of the folder
+        :type   id:      an encoded id string
+        :param  payload: (required) dictionary structure containing::
+            'name':         new folder's name, cannot be empty
+            'description':  new folder's description
+        :type   payload: dict
+        :returns:   detailed folder information
+        :rtype:     dict
+        :raises: RequestParameterMissingException
+        """
+        decoded_folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id )
+        folder = self.folder_manager.get( trans, decoded_folder_id )
+        if payload:
+            kwd.update(payload)
+        name = kwd.get( 'name', None )
+        if not name:
+            raise exceptions.RequestParameterMissingException( "Parameter 'name' of library folder is required. You cannot remove it." )
+        description = kwd.get( 'description', None )
+        updated_folder = self.folder_manager.update( trans, folder, name, description )
+        folder_dict = self.folder_manager.get_folder_dict( trans, updated_folder )
+        return folder_dict
+
+    def __decode_id( self, trans, encoded_id, object_name=None ):
+        """
+        Try to decode the id.
+
+        :param  object_name:      Name of the object the id belongs to. (optional)
+        :type   object_name:      str
+        """
+        try:
+            return trans.security.decode_id( encoded_id )
+        except TypeError:
+            raise exceptions.MalformedId( 'Malformed %s id specified, unable to decode.' % object_name if object_name is not None else '' )
+        except ValueError:
+            raise exceptions.MalformedId( 'Wrong %s id specified, unable to decode.' % object_name if object_name is not None else '' )
diff --git a/lib/galaxy/webapps/galaxy/api/forms.py b/lib/galaxy/webapps/galaxy/api/forms.py
new file mode 100644
index 0000000..17295d2
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/forms.py
@@ -0,0 +1,75 @@
+"""
+API operations on FormDefinition objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+from galaxy.forms.forms import form_factory
+from xml.etree.ElementTree import XML
+
+log = logging.getLogger( __name__ )
+
+
+class FormDefinitionAPIController( BaseAPIController ):
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/forms
+        Displays a collection (list) of forms.
+        """
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "You are not authorized to view the list of forms."
+        query = trans.sa_session.query( trans.app.model.FormDefinition )
+        rval = []
+        for form_definition in query:
+            item = form_definition.to_dict( value_mapper={ 'id': trans.security.encode_id, 'form_definition_current_id': trans.security.encode_id } )
+            item['url'] = url_for( 'form', id=trans.security.encode_id( form_definition.id ) )
+            rval.append( item )
+        return rval
+
+    @web.expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/forms/{encoded_form_id}
+        Displays information about a form.
+        """
+        form_definition_id = id
+        try:
+            decoded_form_definition_id = trans.security.decode_id( form_definition_id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed form definition id ( %s ) specified, unable to decode." % str( form_definition_id )
+        try:
+            form_definition = trans.sa_session.query( trans.app.model.FormDefinition ).get( decoded_form_definition_id )
+        except:
+            form_definition = None
+        if not form_definition or not trans.user_is_admin():
+            trans.response.status = 400
+            return "Invalid form definition id ( %s ) specified." % str( form_definition_id )
+        item = form_definition.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'form_definition_current_id': trans.security.encode_id } )
+        item['url'] = url_for( 'form', id=form_definition_id )
+        return item
+
+    @web.expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/forms
+        Creates a new form.
+        """
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "You are not authorized to create a new form."
+        xml_text = payload.get( 'xml_text', None )
+        if xml_text is None:
+            trans.response.status = 400
+            return "Missing required parameter 'xml_text'."
+            # enhance to allow creating from more than just xml
+        form_definition = form_factory.from_elem( XML( xml_text ) )
+        trans.sa_session.add( form_definition )
+        trans.sa_session.flush()
+        encoded_id = trans.security.encode_id( form_definition.id )
+        item = form_definition.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'form_definition_current_id': trans.security.encode_id } )
+        item['url'] = url_for( 'form', id=encoded_id )
+        return [ item ]
diff --git a/lib/galaxy/webapps/galaxy/api/genomes.py b/lib/galaxy/webapps/galaxy/api/genomes.py
new file mode 100644
index 0000000..ba525db
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/genomes.py
@@ -0,0 +1,77 @@
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.framework.helpers import is_true
+
+
+def get_id( base, format ):
+    if format:
+        return "%s.%s" % ( base, format )
+    else:
+        return base
+
+
+class GenomesController( BaseAPIController ):
+    """
+    RESTful controller for interactions with genome data.
+    """
+
+    @web.expose_api_anonymous
+    def index( self, trans, **kwd ):
+        """
+        GET /api/genomes: returns a list of installed genomes
+        """
+
+        return self.app.genomes.get_dbkeys( trans, **kwd )
+
+    @web.json
+    def show( self, trans, id, num=None, chrom=None, low=None, high=None, **kwd ):
+        """
+        GET /api/genomes/{id}
+
+        Returns information about build <id>
+        """
+
+        # Process kwds.
+        id = get_id( id, kwd.get( 'format', None ) )
+        reference = is_true( kwd.get( 'reference', False ) )
+
+        # Return info.
+        rval = None
+        if reference:
+            region = self.app.genomes.reference( trans, dbkey=id, chrom=chrom, low=low, high=high )
+            rval = { 'dataset_type': 'refseq', 'data': region.sequence }
+        else:
+            rval = self.app.genomes.chroms( trans, dbkey=id, num=num, chrom=chrom, low=low )
+        return rval
+
+    @web.expose_api_raw_anonymous
+    def indexes(self, trans, id, **kwd):
+        """
+        GET /api/genomes/{id}/indexes?type={table name}
+
+        Returns all available indexes for a genome id for type={table name}
+        For instance, /api/genomes/hg19/indexes?type=fasta_indexes
+        """
+        index_extensions = {'fasta_indexes': '.fai'}
+        id = get_id( id, kwd.get( 'format', None ) )
+        index_type = kwd.get('type', None)
+
+        tbl_entries = self.app.tool_data_tables.data_tables[index_type].data
+        index_file_name = [x[-1] for x in tbl_entries if id in x].pop()
+
+        if_open = open(index_file_name + index_extensions[index_type], mode='r')
+        return if_open.read()
+
+    @web.expose_api_raw_anonymous
+    def sequences(self, trans, id, num=None, chrom=None, low=None, high=None, **kwd ):
+        """
+        GET /api/genomes/{id}/sequences
+
+        This is a wrapper for accepting sequence requests that
+        want a raw return, not json
+        """
+        id = get_id( id, kwd.get( 'format', None ) )
+        reference = is_true( kwd.get( 'reference', False ) )
+        assert reference
+        region = self.app.genomes.reference( trans, dbkey=id, chrom=chrom, low=low, high=high )
+        return region.sequence
diff --git a/lib/galaxy/webapps/galaxy/api/group_roles.py b/lib/galaxy/webapps/galaxy/api/group_roles.py
new file mode 100644
index 0000000..f9ac634
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/group_roles.py
@@ -0,0 +1,125 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+log = logging.getLogger( __name__ )
+
+
+class GroupRolesAPIController( BaseAPIController ):
+
+    @web.expose_api
+    @web.require_admin
+    def index( self, trans, group_id, **kwd ):
+        """
+        GET /api/groups/{encoded_group_id}/roles
+        Displays a collection (list) of groups.
+        """
+        decoded_group_id = trans.security.decode_id( group_id )
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+        except:
+            group = None
+        if not group:
+            trans.response.status = 400
+            return "Invalid group id ( %s ) specified." % str( group_id )
+        rval = []
+        try:
+            for gra in group.roles:
+                role = gra.role
+                encoded_id = trans.security.encode_id( role.id )
+                rval.append( dict( id=encoded_id,
+                                   name=role.name,
+                                   url=url_for( 'group_role', group_id=group_id, id=encoded_id, ) ) )
+        except Exception as e:
+            rval = "Error in group API at listing roles"
+            log.error( rval + ": %s" % str(e) )
+            trans.response.status = 500
+        return rval
+
+    @web.expose_api
+    @web.require_admin
+    def show( self, trans, id, group_id, **kwd ):
+        """
+        GET /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+        Displays information about a group role.
+        """
+        role_id = id
+        decoded_group_id = trans.security.decode_id( group_id )
+        decoded_role_id = trans.security.decode_id( role_id )
+        item = None
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+            role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+            for gra in group.roles:
+                if gra.role == role:
+                    item = dict( id=role_id,
+                                 name=role.name,
+                                 url=url_for( 'group_role', group_id=group_id, id=role_id) )  # TODO Fix This
+            if not item:
+                item = "role %s not in group %s" % (role.name, group.name)
+        except Exception as e:
+            item = "Error in group_role API group %s role %s" % (group.name, role.name)
+            log.error(item + ": %s" % str(e))
+        return item
+
+    @web.expose_api
+    @web.require_admin
+    def update( self, trans, id, group_id, **kwd ):
+        """
+        PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+        Adds a role to a group
+        """
+        role_id = id
+        decoded_group_id = trans.security.decode_id( group_id )
+        decoded_role_id = trans.security.decode_id( role_id )
+        item = None
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+            role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+            for gra in group.roles:
+                if gra.role == role:
+                    item = dict( id=role_id,
+                                 name=role.name,
+                                 url=url_for( 'group_role', group_id=group_id, id=role_id) )
+            if not item:
+                gra = trans.app.model.GroupRoleAssociation( group, role )
+                # Add GroupRoleAssociation
+                trans.sa_session.add( gra )
+                trans.sa_session.flush()
+                item = dict( id=role_id,
+                             name=role.name,
+                             url=url_for( 'group_role', group_id=group_id, id=role_id) )
+        except Exception as e:
+            item = "Error in group_role API Adding role %s to group %s" % (role.name, group.name)
+            log.error(item + ": %s" % str(e))
+        return item
+
+    @web.expose_api
+    @web.require_admin
+    def delete( self, trans, id, group_id, **kwd ):
+        """
+        DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+        Removes a role from a group
+        """
+        role_id = id
+        decoded_group_id = trans.security.decode_id( group_id )
+        decoded_role_id = trans.security.decode_id( role_id )
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+            role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+            for gra in group.roles:
+                if gra.role == role:
+                    trans.sa_session.delete( gra )
+                    trans.sa_session.flush()
+                    item = dict( id=role_id,
+                                 name=role.name,
+                                 url=url_for( 'group_role', group_id=group_id, id=role_id) )
+            if not item:
+                item = "role %s not in group %s" % (role.name, group.name)
+        except Exception as e:
+            item = "Error in group_role API Removing role %s from group %s" % (role.name, group.name)
+            log.error(item + ": %s" % str(e))
+        return item
diff --git a/lib/galaxy/webapps/galaxy/api/group_users.py b/lib/galaxy/webapps/galaxy/api/group_users.py
new file mode 100644
index 0000000..64a107d
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/group_users.py
@@ -0,0 +1,125 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+log = logging.getLogger( __name__ )
+
+
+class GroupUsersAPIController( BaseAPIController ):
+
+    @web.expose_api
+    @web.require_admin
+    def index( self, trans, group_id, **kwd ):
+        """
+        GET /api/groups/{encoded_group_id}/users
+        Displays a collection (list) of groups.
+        """
+        decoded_group_id = trans.security.decode_id( group_id )
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+        except:
+            group = None
+        if not group:
+            trans.response.status = 400
+            return "Invalid group id ( %s ) specified." % str( group_id )
+        rval = []
+        try:
+            for uga in group.users:
+                user = uga.user
+                encoded_id = trans.security.encode_id( user.id )
+                rval.append( dict( id=encoded_id,
+                                   email=user.email,
+                                   url=url_for( 'group_user', group_id=group_id, id=encoded_id, ) ) )
+        except Exception as e:
+            rval = "Error in group API at listing users"
+            log.error( rval + ": %s" % str(e) )
+            trans.response.status = 500
+        return rval
+
+    @web.expose_api
+    @web.require_admin
+    def show( self, trans, id, group_id, **kwd ):
+        """
+        GET /api/groups/{encoded_group_id}/users/{encoded_user_id}
+        Displays information about a group user.
+        """
+        user_id = id
+        decoded_group_id = trans.security.decode_id( group_id )
+        decoded_user_id = trans.security.decode_id( user_id )
+        item = None
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+            user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+            for uga in group.users:
+                if uga.user == user:
+                    item = dict( id=user_id,
+                                 email=user.email,
+                                 url=url_for( 'group_user', group_id=group_id, id=user_id) )  # TODO Fix This
+            if not item:
+                item = "user %s not in group %s" % (user.email, group.name)
+        except Exception as e:
+            item = "Error in group_user API group %s user %s" % (group.name, user.email)
+            log.error(item + ": %s" % str(e))
+        return item
+
+    @web.expose_api
+    @web.require_admin
+    def update( self, trans, id, group_id, **kwd ):
+        """
+        PUT /api/groups/{encoded_group_id}/users/{encoded_user_id}
+        Adds a user to a group
+        """
+        user_id = id
+        decoded_group_id = trans.security.decode_id( group_id )
+        decoded_user_id = trans.security.decode_id( user_id )
+        item = None
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+            user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+            for uga in group.users:
+                if uga.user == user:
+                    item = dict( id=user_id,
+                                 email=user.email,
+                                 url=url_for( 'group_user', group_id=group_id, id=user_id) )
+            if not item:
+                uga = trans.app.model.UserGroupAssociation( user, group )
+                # Add UserGroupAssociations
+                trans.sa_session.add( uga )
+                trans.sa_session.flush()
+                item = dict( id=user_id,
+                             email=user.email,
+                             url=url_for( 'group_user', group_id=group_id, id=user_id) )
+        except Exception as e:
+            item = "Error in group_user API Adding user %s to group %s" % (user.email, group.name)
+            log.error(item + ": %s" % str(e))
+        return item
+
+    @web.expose_api
+    @web.require_admin
+    def delete( self, trans, id, group_id, **kwd ):
+        """
+        DELETE /api/groups/{encoded_group_id}/users/{encoded_user_id}
+        Removes a user from a group
+        """
+        user_id = id
+        decoded_group_id = trans.security.decode_id( group_id )
+        decoded_user_id = trans.security.decode_id( user_id )
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+            user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+            for uga in group.users:
+                if uga.user == user:
+                    trans.sa_session.delete( uga )
+                    trans.sa_session.flush()
+                    item = dict( id=user_id,
+                                 email=user.email,
+                                 url=url_for( 'group_user', group_id=group_id, id=user_id) )
+            if not item:
+                item = "user %s not in group %s" % (user.email, group.name)
+        except Exception as e:
+            item = "Error in group_user API Removing user %s from group %s" % (user.email, group.name)
+            log.error(item + ": %s" % str(e))
+        return item
diff --git a/lib/galaxy/webapps/galaxy/api/groups.py b/lib/galaxy/webapps/galaxy/api/groups.py
new file mode 100644
index 0000000..0b362cc
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/groups.py
@@ -0,0 +1,129 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from sqlalchemy import false
+
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+
+log = logging.getLogger( __name__ )
+
+
+class GroupAPIController( BaseAPIController ):
+
+    @web.expose_api
+    @web.require_admin
+    def index( self, trans, **kwd ):
+        """
+        GET /api/groups
+        Displays a collection (list) of groups.
+        """
+        rval = []
+        for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted == false() ):
+            if trans.user_is_admin():
+                item = group.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+                encoded_id = trans.security.encode_id( group.id )
+                item['url'] = url_for( 'group', id=encoded_id )
+                rval.append( item )
+        return rval
+
+    @web.expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/groups
+        Creates a new group.
+        """
+        log.info("groups payload%s\n" % (payload))
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "You are not authorized to create a new group."
+        name = payload.get( 'name', None )
+        if not name:
+            trans.response.status = 400
+            return "Enter a valid name"
+        if trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name == name ).first():
+            trans.response.status = 400
+            return "A group with that name already exists"
+
+        group = trans.app.model.Group( name=name )
+        trans.sa_session.add( group )
+        user_ids = payload.get( 'user_ids', [] )
+        for i in user_ids:
+            log.info("user_id: %s\n" % (i ))
+            log.info("%s %s\n" % (i, trans.security.decode_id( i ) ))
+        users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+        role_ids = payload.get( 'role_ids', [] )
+        roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]
+        trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users )
+        """
+        # Create the UserGroupAssociations
+        for user in users:
+            trans.app.security_agent.associate_user_group( user, group )
+        # Create the GroupRoleAssociations
+        for role in roles:
+            trans.app.security_agent.associate_group_role( group, role )
+        """
+        trans.sa_session.flush()
+        encoded_id = trans.security.encode_id( group.id )
+        item = group.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id } )
+        item['url'] = url_for( 'group', id=encoded_id )
+        return [ item ]
+
+    @web.expose_api
+    @web.require_admin
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/groups/{encoded_group_id}
+        Displays information about a group.
+        """
+        group_id = id
+        try:
+            decoded_group_id = trans.security.decode_id( group_id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed group id ( %s ) specified, unable to decode." % str( group_id )
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+        except:
+            group = None
+        if not group:
+            trans.response.status = 400
+            return "Invalid group id ( %s ) specified." % str( group_id )
+        item = group.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id } )
+        item['url'] = url_for( 'group', id=group_id )
+        item['users_url'] = url_for( 'group_users', group_id=group_id )
+        item['roles_url'] = url_for( 'group_roles', group_id=group_id )
+        return item
+
+    @web.expose_api
+    @web.require_admin
+    def update( self, trans, id, payload, **kwd ):
+        """
+        PUT /api/groups/{encoded_group_id}
+        Modifies a group.
+        """
+        group_id = id
+        try:
+            decoded_group_id = trans.security.decode_id( group_id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed group id ( %s ) specified, unable to decode." % str( group_id )
+        try:
+            group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+        except:
+            group = None
+        if not group:
+            trans.response.status = 400
+            return "Invalid group id ( %s ) specified." % str( group_id )
+        name = payload.get( 'name', None )
+        if name:
+            group.name = name
+            trans.sa_session.add(group)
+        user_ids = payload.get( 'user_ids', [] )
+        users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+        role_ids = payload.get( 'role_ids', [] )
+        roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]
+        trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users, delete_existing_assocs=False )
+        trans.sa_session.flush()
diff --git a/lib/galaxy/webapps/galaxy/api/histories.py b/lib/galaxy/webapps/galaxy/api/histories.py
new file mode 100644
index 0000000..9b16fb0
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -0,0 +1,484 @@
+"""
+API operations on a history.
+
+.. seealso:: :class:`galaxy.model.History`
+"""
+
+from sqlalchemy import true, false
+
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web import _future_expose_api_raw as expose_api_raw
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import ExportsHistoryMixin
+from galaxy.web.base.controller import ImportsHistoryMixin
+
+from galaxy.managers import histories, citations, users
+
+from galaxy import util
+from galaxy.util import string_as_bool
+from galaxy.util import restore_text
+from galaxy.web import url_for
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class HistoriesController( BaseAPIController, ExportsHistoryMixin, ImportsHistoryMixin ):
+
+    def __init__( self, app ):
+        super( HistoriesController, self ).__init__( app )
+        self.citations_manager = citations.CitationsManager( app )
+        self.user_manager = users.UserManager( app )
+        self.history_manager = histories.HistoryManager( app )
+        self.history_serializer = histories.HistorySerializer( app )
+        self.history_deserializer = histories.HistoryDeserializer( app )
+        self.history_filters = histories.HistoryFilters( app )
+
+    @expose_api_anonymous
+    def index( self, trans, deleted='False', **kwd ):
+        """
+        index( trans, deleted='False' )
+        * GET /api/histories:
+            return undeleted histories for the current user
+        * GET /api/histories/deleted:
+            return deleted histories for the current user
+        .. note:: Anonymous users are allowed to get their current history
+
+        :type   deleted: boolean
+        :param  deleted: if True, show only deleted histories, if False, non-deleted
+
+        :rtype:     list
+        :returns:   list of dictionaries containing summary history information
+
+        The following are optional parameters:
+            view:   string, one of ('summary','detailed'), defaults to 'summary'
+                    controls which set of properties to return
+            keys:   comma separated strings, unused by default
+                    keys/names of individual properties to return
+
+        If neither keys or views are sent, the default view (set of keys) is returned.
+        If both a view and keys are sent, the key list and the view's keys are
+        combined.
+        If keys are send and no view, only those properties in keys are returned.
+
+        For which properties are available see:
+            galaxy/managers/histories/HistorySerializer
+
+        The list returned can be filtered by using two optional parameters:
+            q:      string, generally a property name to filter by followed
+                    by an (often optional) hyphen and operator string.
+            qv:     string, the value to filter by
+
+        ..example:
+            To filter the list to only those created after 2015-01-29,
+            the query string would look like:
+                '?q=create_time-gt&qv=2015-01-29'
+
+            Multiple filters can be sent in using multiple q/qv pairs:
+                '?q=create_time-gt&qv=2015-01-29&q=tag-has&qv=experiment-1'
+
+        The list returned can be paginated using two optional parameters:
+            limit:  integer, defaults to no value and no limit (return all)
+                    how many items to return
+            offset: integer, defaults to 0 and starts at the beginning
+                    skip the first ( offset - 1 ) items and begin returning
+                    at the Nth item
+
+        ..example:
+            limit and offset can be combined. Skip the first two and return five:
+                '?limit=5&offset=3'
+
+        The list returned can be ordered using the optional parameter:
+            order:  string containing one of the valid ordering attributes followed
+                    (optionally) by '-asc' or '-dsc' for ascending and descending
+                    order respectively. Orders can be stacked as a comma-
+                    separated list of values.
+
+        ..example:
+            To sort by name descending then create time descending:
+                '?order=name-dsc,create_time'
+
+        The ordering attributes and their default orders are:
+            create_time defaults to 'create_time-dsc'
+            update_time defaults to 'update_time-dsc'
+            name    defaults to 'name-asc'
+
+        'order' defaults to 'create_time-dsc'
+        """
+        serialization_params = self._parse_serialization_params( kwd, 'summary' )
+        limit, offset = self.parse_limit_offset( kwd )
+        filter_params = self.parse_filter_params( kwd )
+
+        # bail early with current history if user is anonymous
+        current_user = self.user_manager.current_user( trans )
+        if self.user_manager.is_anonymous( current_user ):
+            current_history = self.history_manager.get_current( trans )
+            if not current_history:
+                return []
+            # note: ignores filters, limit, offset
+            return [ self.history_serializer.serialize_to_view( current_history,
+                     user=current_user, trans=trans, **serialization_params ) ]
+
+        filters = []
+        # support the old default of not-returning/filtering-out deleted histories
+        filters += self._get_deleted_filter( deleted, filter_params )
+        # users are limited to requesting only their own histories (here)
+        filters += [ self.app.model.History.user == current_user ]
+        # and any sent in from the query string
+        filters += self.history_filters.parse_filters( filter_params )
+
+        order_by = self._parse_order_by( kwd.get( 'order', 'create_time-dsc' ) )
+        histories = self.history_manager.list( filters=filters, order_by=order_by, limit=limit, offset=offset )
+
+        rval = []
+        for history in histories:
+            history_dict = self.history_serializer.serialize_to_view( history, user=trans.user, trans=trans, **serialization_params )
+            rval.append( history_dict )
+        return rval
+
+    def _get_deleted_filter( self, deleted, filter_params ):
+        # TODO: this should all be removed (along with the default) in v2
+        # support the old default of not-returning/filtering-out deleted histories
+        try:
+            # the consumer must explicitly ask for both deleted and non-deleted
+            #   but pull it from the parsed params (as the filter system will error on None)
+            deleted_filter_index = filter_params.index( ( 'deleted', 'eq', 'None' ) )
+            filter_params.pop( deleted_filter_index )
+            return []
+        except ValueError:
+            pass
+
+        # the deleted string bool was also used as an 'include deleted' flag
+        if deleted in ( 'True', 'true' ):
+            return [ self.app.model.History.deleted == true() ]
+
+        # the third option not handled here is 'return only deleted'
+        #   if this is passed in (in the form below), simply return and let the filter system handle it
+        if ( 'deleted', 'eq', 'True' ) in filter_params:
+            return []
+
+        # otherwise, do the default filter of removing the deleted histories
+        return [ self.app.model.History.deleted == false() ]
+
+    def _parse_order_by( self, order_by_string ):
+        ORDER_BY_SEP_CHAR = ','
+        manager = self.history_manager
+        if ORDER_BY_SEP_CHAR in order_by_string:
+            return [ manager.parse_order_by( o ) for o in order_by_string.split( ORDER_BY_SEP_CHAR ) ]
+        return manager.parse_order_by( order_by_string )
+
+    @expose_api_anonymous
+    def show( self, trans, id, deleted='False', **kwd ):
+        """
+        show( trans, id, deleted='False' )
+        * GET /api/histories/{id}:
+            return the history with ``id``
+        * GET /api/histories/deleted/{id}:
+            return the deleted history with ``id``
+        * GET /api/histories/most_recently_used:
+            return the most recently used history
+
+        :type   id:      an encoded id string
+        :param  id:      the encoded id of the history to query or the string 'most_recently_used'
+        :type   deleted: boolean
+        :param  deleted: if True, allow information on a deleted history to be shown.
+
+        :param  keys: same as the use of `keys` in the `index` function above
+        :param  view: same as the use of `view` in the `index` function above
+
+        :rtype:     dictionary
+        :returns:   detailed history information
+        """
+        history_id = id
+        deleted = string_as_bool( deleted )
+
+        if history_id == "most_recently_used":
+            history = self.history_manager.most_recent( trans.user,
+                filters=( self.app.model.History.deleted == false() ), current_history=trans.history )
+        else:
+            history = self.history_manager.get_accessible( self.decode_id( history_id ), trans.user, current_history=trans.history )
+
+        return self.history_serializer.serialize_to_view( history,
+            user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    @expose_api_anonymous
+    def citations( self, trans, history_id, **kwd ):
+        """
+        """
+        history = self.history_manager.get_accessible( self.decode_id( history_id ), trans.user, current_history=trans.history )
+        tool_ids = set([])
+        for dataset in history.datasets:
+            job = dataset.creating_job
+            if not job:
+                continue
+            tool_id = job.tool_id
+            if not tool_id:
+                continue
+            tool_ids.add(tool_id)
+        return map( lambda citation: citation.to_dict( "bibtex" ),
+                    self.citations_manager.citations_for_tool_ids( tool_ids ) )
+
+    @expose_api_anonymous_and_sessionless
+    def published( self, trans, **kwd ):
+        """
+        published( self, trans, **kwd ):
+        * GET /api/histories/published:
+            return all histories that are published
+
+        :rtype:     list
+        :returns:   list of dictionaries containing summary history information
+
+        Follows the same filtering logic as the index() method above.
+        """
+        limit, offset = self.parse_limit_offset( kwd )
+        filter_params = self.parse_filter_params( kwd )
+        filters = self.history_filters.parse_filters( filter_params )
+        order_by = self._parse_order_by( kwd.get( 'order', 'create_time-dsc' ) )
+        histories = self.history_manager.list_published( filters=filters, order_by=order_by, limit=limit, offset=offset )
+        rval = []
+        for history in histories:
+            history_dict = self.history_serializer.serialize_to_view( history, user=trans.user, trans=trans,
+                **self._parse_serialization_params( kwd, 'summary' ) )
+            rval.append( history_dict )
+        return rval
+
+    # TODO: does this need to be anonymous_and_sessionless? Not just expose_api?
+    @expose_api_anonymous_and_sessionless
+    def shared_with_me( self, trans, **kwd ):
+        """
+        shared_with_me( self, trans, **kwd )
+        * GET /api/histories/shared_with_me:
+            return all histories that are shared with the current user
+
+        :rtype:     list
+        :returns:   list of dictionaries containing summary history information
+
+        Follows the same filtering logic as the index() method above.
+        """
+        current_user = trans.user
+        limit, offset = self.parse_limit_offset( kwd )
+        filter_params = self.parse_filter_params( kwd )
+        filters = self.history_filters.parse_filters( filter_params )
+        order_by = self._parse_order_by( kwd.get( 'order', 'create_time-dsc' ) )
+        histories = self.history_manager.list_shared_with( current_user,
+            filters=filters, order_by=order_by, limit=limit, offset=offset )
+        rval = []
+        for history in histories:
+            history_dict = self.history_serializer.serialize_to_view( history, user=current_user, trans=trans,
+                **self._parse_serialization_params( kwd, 'summary' ) )
+            rval.append( history_dict )
+        return rval
+
+    @expose_api_anonymous
+    def create( self, trans, payload, **kwd ):
+        """
+        create( trans, payload )
+        * POST /api/histories:
+            create a new history
+
+        :type   payload: dict
+        :param  payload: (optional) dictionary structure containing:
+            * name:             the new history's name
+            * history_id:       the id of the history to copy
+            * all_datasets:     copy deleted hdas/hdcas? 'True' or 'False', defaults to True
+            * archive_source:   the url that will generate the archive to import
+            * archive_type:     'url' (default)
+
+        :param  keys: same as the use of `keys` in the `index` function above
+        :param  view: same as the use of `view` in the `index` function above
+
+        :rtype:     dict
+        :returns:   element view of new history
+        """
+        hist_name = None
+        if payload.get( 'name', None ):
+            hist_name = restore_text( payload['name'] )
+        copy_this_history_id = payload.get( 'history_id', None )
+
+        all_datasets = util.string_as_bool( payload.get( 'all_datasets', True ) )
+
+        if "archive_source" in payload:
+            archive_source = payload[ "archive_source" ]
+            archive_type = payload.get( "archive_type", "url" )
+            self.queue_history_import( trans, archive_type=archive_type, archive_source=archive_source )
+            return {}
+
+        new_history = None
+        # if a history id was passed, copy that history
+        if copy_this_history_id:
+            decoded_id = self.decode_id( copy_this_history_id )
+            original_history = self.history_manager.get_accessible( decoded_id, trans.user, current_history=trans.history )
+            hist_name = hist_name or ( "Copy of '%s'" % original_history.name )
+            new_history = original_history.copy( name=hist_name, target_user=trans.user, all_datasets=all_datasets )
+
+        # otherwise, create a new empty history
+        else:
+            new_history = self.history_manager.create( user=trans.user, name=hist_name )
+
+        trans.sa_session.add( new_history )
+        trans.sa_session.flush()
+
+        # an anonymous user can only have one history
+        if self.user_manager.is_anonymous( trans.user ):
+            self.history_manager.set_current( trans, new_history )
+
+        return self.history_serializer.serialize_to_view( new_history,
+            user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    @expose_api
+    def delete( self, trans, id, **kwd ):
+        """
+        delete( self, trans, id, **kwd )
+        * DELETE /api/histories/{id}
+            delete the history with the given ``id``
+        .. note:: Stops all active jobs in the history if purge is set.
+
+        :type   id:     str
+        :param  id:     the encoded id of the history to delete
+        :type   kwd:    dict
+        :param  kwd:    (optional) dictionary structure containing extra parameters
+
+        You can purge a history, removing all it's datasets from disk (if unshared),
+        by passing in ``purge=True`` in the url.
+
+        :param  keys: same as the use of `keys` in the `index` function above
+        :param  view: same as the use of `view` in the `index` function above
+
+        :rtype:     dict
+        :returns:   the deleted or purged history
+        """
+        history_id = id
+        # a request body is optional here
+        purge = False
+        if 'purge' in kwd:
+            purge = string_as_bool( kwd.get( 'purge' ) )
+        # for backwards compat, keep the payload sub-dictionary
+        if kwd.get( 'payload', None ):
+            purge = string_as_bool( kwd['payload'].get( 'purge', False ) )
+
+        history = self.history_manager.get_owned( self.decode_id( history_id ), trans.user, current_history=trans.history )
+        self.history_manager.delete( history )
+        if purge:
+            self.history_manager.purge( history )
+
+        return self.history_serializer.serialize_to_view( history,
+            user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    @expose_api
+    def undelete( self, trans, id, **kwd ):
+        """
+        undelete( self, trans, id, **kwd )
+        * POST /api/histories/deleted/{id}/undelete:
+            undelete history (that hasn't been purged) with the given ``id``
+
+        :type   id:     str
+        :param  id:     the encoded id of the history to undelete
+
+        :param  keys: same as the use of `keys` in the `index` function above
+        :param  view: same as the use of `view` in the `index` function above
+
+        :rtype:     str
+        :returns:   'OK' if the history was undeleted
+        """
+        # TODO: remove at v2
+        history_id = id
+        history = self.history_manager.get_owned( self.decode_id( history_id ), trans.user, current_history=trans.history )
+        self.history_manager.undelete( history )
+
+        return self.history_serializer.serialize_to_view( history,
+            user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    @expose_api
+    def update( self, trans, id, payload, **kwd ):
+        """
+        update( self, trans, id, payload, **kwd )
+        * PUT /api/histories/{id}
+            updates the values for the history with the given ``id``
+
+        :type   id:      str
+        :param  id:      the encoded id of the history to update
+        :type   payload: dict
+        :param  payload: a dictionary containing any or all the
+            fields in :func:`galaxy.model.History.to_dict` and/or the following:
+
+            * annotation: an annotation for the history
+
+        :param  keys: same as the use of `keys` in the `index` function above
+        :param  view: same as the use of `view` in the `index` function above
+
+        :rtype:     dict
+        :returns:   an error object if an error occurred or a dictionary containing
+            any values that were different from the original and, therefore, updated
+        """
+        # TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
+        history = self.history_manager.get_owned( self.decode_id( id ), trans.user, current_history=trans.history )
+
+        self.history_deserializer.deserialize( history, payload, user=trans.user, trans=trans )
+        return self.history_serializer.serialize_to_view( history,
+            user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    @expose_api
+    def archive_export( self, trans, id, **kwds ):
+        """
+        export_archive( self, trans, id, payload )
+        * PUT /api/histories/{id}/exports:
+            start job (if needed) to create history export for corresponding
+            history.
+
+        :type   id:     str
+        :param  id:     the encoded id of the history to export
+
+        :rtype:     dict
+        :returns:   object containing url to fetch export from.
+        """
+        # PUT instead of POST because multiple requests should just result
+        # in one object being created.
+        history = self.history_manager.get_accessible( self.decode_id( id ), trans.user, current_history=trans.history )
+        jeha = history.latest_export
+        up_to_date = jeha and jeha.up_to_date
+        if 'force' in kwds:
+            up_to_date = False  # Temp hack to force rebuild everytime during dev
+        if not up_to_date:
+            # Need to create new JEHA + job.
+            gzip = kwds.get( "gzip", True )
+            include_hidden = kwds.get( "include_hidden", False )
+            include_deleted = kwds.get( "include_deleted", False )
+            self.queue_history_export( trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted )
+
+        if up_to_date and jeha.ready:
+            jeha_id = trans.security.encode_id( jeha.id )
+            return dict( download_url=url_for( "history_archive_download", id=id, jeha_id=jeha_id ) )
+        else:
+            # Valid request, just resource is not ready yet.
+            trans.response.status = "202 Accepted"
+            return ''
+
+    @expose_api_raw
+    def archive_download( self, trans, id, jeha_id, **kwds ):
+        """
+        export_download( self, trans, id, jeha_id )
+        * GET /api/histories/{id}/exports/{jeha_id}:
+            If ready and available, return raw contents of exported history.
+            Use/poll "PUT /api/histories/{id}/exports" to initiate the creation
+            of such an export - when ready that route will return 200 status
+            code (instead of 202) with a JSON dictionary containing a
+            `download_url`.
+        """
+        # Seems silly to put jeha_id in here, but want GET to be immuatable?
+        # and this is being accomplished this way.
+        history = self.history_manager.get_accessible( self.decode_id( id ), trans.user, current_history=trans.history )
+        matching_exports = filter( lambda e: trans.security.encode_id( e.id ) == jeha_id, history.exports )
+        if not matching_exports:
+            raise exceptions.ObjectNotFound()
+
+        jeha = matching_exports[ 0 ]
+        if not jeha.ready:
+            # User should not have been given this URL, PUT export should have
+            # return a 202.
+            raise exceptions.MessageException( "Export not available or not yet ready." )
+
+        return self.serve_ready_history_export( trans, jeha )
diff --git a/lib/galaxy/webapps/galaxy/api/history_contents.py b/lib/galaxy/webapps/galaxy/api/history_contents.py
new file mode 100644
index 0000000..d7fc43b
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -0,0 +1,723 @@
+"""
+API operations on the contents of a history.
+"""
+import os
+import re
+
+from galaxy import exceptions
+from galaxy import util
+from galaxy.util.streamball import StreamBall
+from galaxy.util.json import safe_dumps
+
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_raw as expose_api_raw
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesLibraryMixin
+from galaxy.web.base.controller import UsesLibraryMixinItems
+from galaxy.web.base.controller import UsesTagsMixin
+
+from galaxy.managers import histories
+from galaxy.managers import history_contents
+from galaxy.managers import hdas
+from galaxy.managers import hdcas
+from galaxy.managers import folders
+from galaxy.managers.collections_util import api_payload_to_create_params
+from galaxy.managers.collections_util import dictify_dataset_collection_instance
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class HistoryContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems, UsesTagsMixin ):
+
+    def __init__( self, app ):
+        super( HistoryContentsController, self ).__init__( app )
+        self.hda_manager = hdas.HDAManager( app )
+        self.history_manager = histories.HistoryManager( app )
+        self.history_contents_manager = history_contents.HistoryContentsManager( app )
+        self.folder_manager = folders.FolderManager()
+        self.hda_serializer = hdas.HDASerializer( app )
+        self.hda_deserializer = hdas.HDADeserializer( app )
+        self.hdca_serializer = hdcas.HDCASerializer( app )
+        self.history_contents_filters = history_contents.HistoryContentsFilters( app )
+
+    @expose_api_anonymous
+    def index( self, trans, history_id, ids=None, v=None, **kwd ):
+        """
+        index( self, trans, history_id, ids=None, **kwd )
+        * GET /api/histories/{history_id}/contents
+            return a list of HDA data for the history with the given ``id``
+        .. note:: Anonymous users are allowed to get their current history contents
+
+        If Ids is not given, index returns a list of *summary* objects for
+        every HDA associated with the given `history_id`.
+
+        If ids is given, index returns a *more complete* json object for each
+        HDA in the ids list.
+
+        :type   history_id: str
+        :param  history_id: encoded id string of the HDA's History
+        :type   ids:        str
+        :param  ids:        (optional) a comma separated list of encoded `HDA` ids
+        :param  types:      (optional) kinds of contents to index (currently just
+                            dataset, but dataset_collection will be added shortly).
+        :type   types:      str
+
+        :rtype:     list
+        :returns:   dictionaries containing summary or detailed HDA information
+        """
+        if v == 'dev':
+            return self.__index_v2( trans, history_id, **kwd )
+
+        rval = []
+
+        history = self.history_manager.get_accessible( self.decode_id( history_id ), trans.user, current_history=trans.history )
+
+        # Allow passing in type or types - for continuity rest of methods
+        # take in type - but this one can be passed multiple types and
+        # type=dataset,dataset_collection is a bit silly.
+        types = kwd.get( 'type', kwd.get( 'types', None ) ) or []
+        if types:
+            types = util.listify(types)
+        else:
+            types = [ 'dataset', "dataset_collection" ]
+
+        contents_kwds = { 'types': types }
+        if ids:
+            ids = map( lambda id: self.decode_id( id ), ids.split( ',' ) )
+            contents_kwds[ 'ids' ] = ids
+            # If explicit ids given, always used detailed result.
+            details = 'all'
+        else:
+            contents_kwds[ 'deleted' ] = kwd.get( 'deleted', None )
+            contents_kwds[ 'visible' ] = kwd.get( 'visible', None )
+            # details param allows a mixed set of summary and detailed hdas
+            # Ever more convoluted due to backwards compat..., details
+            # should be considered deprecated in favor of more specific
+            # dataset_details (and to be implemented dataset_collection_details).
+            details = kwd.get( 'details', None ) or kwd.get( 'dataset_details', None ) or []
+            if details and details != 'all':
+                details = util.listify( details )
+
+        for content in history.contents_iter( **contents_kwds ):
+            encoded_content_id = trans.security.encode_id( content.id )
+            detailed = details == 'all' or ( encoded_content_id in details )
+
+            if isinstance( content, trans.app.model.HistoryDatasetAssociation ):
+                view = 'detailed' if detailed else 'summary'
+                hda_dict = self.hda_serializer.serialize_to_view( content, view=view, user=trans.user, trans=trans )
+                rval.append( hda_dict )
+
+            elif isinstance( content, trans.app.model.HistoryDatasetCollectionAssociation ):
+                view = 'element' if detailed else 'collection'
+                collection_dict = self.__collection_dict( trans, content, view=view )
+                rval.append( collection_dict )
+
+        return rval
+
+    def __collection_dict( self, trans, dataset_collection_instance, view="collection" ):
+        return dictify_dataset_collection_instance( dataset_collection_instance,
+            security=trans.security, parent=dataset_collection_instance.history, view=view )
+
+    @expose_api_anonymous
+    def show( self, trans, id, history_id, **kwd ):
+        """
+        show( self, trans, id, history_id, **kwd )
+        * GET /api/histories/{history_id}/contents/{id}
+            return detailed information about an HDA within a history
+        .. note:: Anonymous users are allowed to get their current history contents
+
+        :type   id:         str
+        :param  ids:        the encoded id of the HDA to return
+        :type   history_id: str
+        :param  history_id: encoded id string of the HDA's History
+
+        :rtype:     dict
+        :returns:   dictionary containing detailed HDA information
+        """
+        contents_type = kwd.get('type', 'dataset')
+        if contents_type == 'dataset':
+            return self.__show_dataset( trans, id, **kwd )
+        elif contents_type == 'dataset_collection':
+            return self.__show_dataset_collection( trans, id, history_id, **kwd )
+        else:
+            return self.__handle_unknown_contents_type( trans, contents_type )
+
+    def __show_dataset( self, trans, id, **kwd ):
+        hda = self.hda_manager.get_accessible( self.decode_id( id ), trans.user )
+        return self.hda_serializer.serialize_to_view( hda,
+                                                      user=trans.user,
+                                                      trans=trans,
+                                                      **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    def __show_dataset_collection( self, trans, id, history_id, **kwd ):
+        try:
+            service = trans.app.dataset_collections_service
+            dataset_collection_instance = service.get_dataset_collection_instance(
+                trans=trans,
+                instance_type='history',
+                id=id,
+            )
+            return self.__collection_dict( trans, dataset_collection_instance, view="element" )
+        except Exception as e:
+            log.exception( "Error in history API at listing dataset collection: %s", e )
+            trans.response.status = 500
+            return { 'error': str( e ) }
+
+    @expose_api_anonymous
+    def create( self, trans, history_id, payload, **kwd ):
+        """
+        create( self, trans, history_id, payload, **kwd )
+        * POST /api/histories/{history_id}/contents/{type}
+            create a new HDA by copying an accessible LibraryDataset
+
+        :type   history_id: str
+        :param  history_id: encoded id string of the new HDA's History
+        :type   type: str
+        :param  type: Type of history content - 'dataset' (default) or
+                      'dataset_collection'.
+        :type   payload:    dict
+        :param  payload:    dictionary structure containing::
+            copy from library (for type 'dataset'):
+            'source'    = 'library'
+            'content'   = [the encoded id from the library dataset]
+
+            copy from library folder
+            'source'    = 'library_folder'
+            'content'   = [the encoded id from the library folder]
+
+            copy from history dataset (for type 'dataset'):
+            'source'    = 'hda'
+            'content'   = [the encoded id from the HDA]
+
+            copy from history dataset collection (for type 'dataset_collection')
+            'source'    = 'hdca'
+            'content'   = [the encoded id from the HDCA]
+
+            create new history dataset collection (for type 'dataset_collection')
+            'source'              = 'new_collection' (default 'source' if type is
+                                    'dataset_collection' - no need to specify this)
+            'collection_type'     = For example, "list", "paired", "list:paired".
+            'name'                = Name of new dataset collection.
+            'element_identifiers' = Recursive list structure defining collection.
+                                    Each element must have 'src' which can be
+                                    'hda', 'ldda', 'hdca', or 'new_collection',
+                                    as well as a 'name' which is the name of
+                                    element (e.g. "forward" or "reverse" for
+                                    paired datasets, or arbitrary sample names
+                                    for instance for lists). For all src's except
+                                    'new_collection' - a encoded 'id' attribute
+                                    must be included wiht element as well.
+                                    'new_collection' sources must defined a
+                                    'collection_type' and their own list of
+                                    (potentially) nested 'element_identifiers'.
+
+        ..note:
+            Currently, a user can only copy an HDA from a history that the user owns.
+
+        :rtype:     dict
+        :returns:   dictionary containing detailed information for the new HDA
+        """
+        # TODO: Flush out create new collection documentation above, need some
+        # examples. See also bioblend and API tests for specific examples.
+
+        history = self.history_manager.get_owned( self.decode_id( history_id ), trans.user,
+                                                  current_history=trans.history )
+
+        type = payload.get( 'type', 'dataset' )
+        if type == 'dataset':
+            source = payload.get( 'source', None )
+            if source == 'library_folder':
+                return self.__create_datasets_from_library_folder( trans, history, payload, **kwd )
+            else:
+                return self.__create_dataset( trans, history, payload, **kwd )
+        elif type == 'dataset_collection':
+            return self.__create_dataset_collection( trans, history, payload, **kwd )
+        else:
+            return self.__handle_unknown_contents_type( trans, type )
+
+    def __create_dataset( self, trans, history, payload, **kwd ):
+        source = payload.get( 'source', None )
+        if source not in ( 'library', 'hda' ):
+            raise exceptions.RequestParameterInvalidException(
+                "'source' must be either 'library' or 'hda': %s" % ( source ) )
+        content = payload.get( 'content', None )
+        if content is None:
+            raise exceptions.RequestParameterMissingException( "'content' id of lda or hda is missing" )
+
+        # copy from library dataset
+        hda = None
+        if source == 'library':
+            ld = self.get_library_dataset( trans, content, check_ownership=False, check_accessible=False )
+            # TODO: why would get_library_dataset NOT return a library dataset?
+            if type( ld ) is not trans.app.model.LibraryDataset:
+                raise exceptions.RequestParameterInvalidException(
+                    "Library content id ( %s ) is not a dataset" % content )
+            # insert into history
+            hda = ld.library_dataset_dataset_association.to_history_dataset_association( history, add_to_history=True )
+
+        # copy an existing, accessible hda
+        elif source == 'hda':
+            unencoded_hda_id = self.decode_id( content )
+            original = self.hda_manager.get_accessible( unencoded_hda_id, trans.user )
+            # check for access on history that contains the original hda as well
+            self.history_manager.error_unless_accessible( original.history, trans.user, current_history=trans.history )
+            hda = self.hda_manager.copy( original, history=history )
+
+        trans.sa_session.flush()
+        if not hda:
+            return None
+        return self.hda_serializer.serialize_to_view( hda,
+            user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    def __create_datasets_from_library_folder( self, trans, history, payload, **kwd ):
+        rval = []
+
+        source = payload.get( 'source', None )
+        if source == 'library_folder':
+            content = payload.get( 'content', None )
+            if content is None:
+                raise exceptions.RequestParameterMissingException( "'content' id of lda or hda is missing" )
+
+            folder_id = self.folder_manager.cut_and_decode( trans, content )
+            folder = self.folder_manager.get( trans, folder_id )
+
+            current_user_roles = trans.get_current_user_roles()
+
+            def traverse( folder ):
+                admin = trans.user_is_admin()
+                rval = []
+                for subfolder in folder.active_folders:
+                    if not admin:
+                        can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
+                    if (admin or can_access) and not subfolder.deleted:
+                        rval.extend( traverse( subfolder ) )
+                for ld in folder.datasets:
+                    if not admin:
+                        can_access = trans.app.security_agent.can_access_dataset(
+                            current_user_roles,
+                            ld.library_dataset_dataset_association.dataset
+                        )
+                    if (admin or can_access) and not ld.deleted:
+                        rval.append( ld )
+                return rval
+
+            for ld in traverse( folder ):
+                hda = ld.library_dataset_dataset_association.to_history_dataset_association( history, add_to_history=True )
+                hda_dict = self.hda_serializer.serialize_to_view( hda,
+                    user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+                rval.append( hda_dict )
+        else:
+            message = "Invalid 'source' parameter in request %s" % source
+            raise exceptions.RequestParameterInvalidException(message)
+
+        trans.sa_session.flush()
+        return rval
+
+    def __create_dataset_collection( self, trans, history, payload, **kwd ):
+        source = kwd.get( "source", payload.get( "source", "new_collection" ) )
+        service = trans.app.dataset_collections_service
+        if source == "new_collection":
+            create_params = api_payload_to_create_params( payload )
+            dataset_collection_instance = service.create(
+                trans,
+                parent=history,
+                **create_params
+            )
+        elif source == "hdca":
+            content = payload.get( 'content', None )
+            if content is None:
+                raise exceptions.RequestParameterMissingException( "'content' id of target to copy is missing" )
+            dataset_collection_instance = service.copy(
+                trans=trans,
+                parent=history,
+                source="hdca",
+                encoded_source_id=content,
+            )
+        else:
+            message = "Invalid 'source' parameter in request %s" % source
+            raise exceptions.RequestParameterInvalidException(message)
+
+        # if the consumer specified keys or view, use the secondary serializer
+        if 'view' in kwd or 'keys' in kwd:
+            return self.hdca_serializer.serialize_to_view( dataset_collection_instance,
+                user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+        return self.__collection_dict( trans, dataset_collection_instance, view="element" )
+
+    @expose_api_anonymous
+    def update( self, trans, history_id, id, payload, **kwd ):
+        """
+        update( self, trans, history_id, id, payload, **kwd )
+        * PUT /api/histories/{history_id}/contents/{id}
+            updates the values for the HDA with the given ``id``
+
+        :type   history_id: str
+        :param  history_id: encoded id string of the HDA's History
+        :type   id:         str
+        :param  id:         the encoded id of the history to update
+        :type   payload:    dict
+        :param  payload:    a dictionary containing any or all the
+            fields in :func:`galaxy.model.HistoryDatasetAssociation.to_dict`
+            and/or the following:
+
+            * annotation: an annotation for the HDA
+
+        :rtype:     dict
+        :returns:   an error object if an error occurred or a dictionary containing
+            any values that were different from the original and, therefore, updated
+        """
+        # TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
+        contents_type = kwd.get('type', 'dataset')
+        if contents_type == "dataset":
+            return self.__update_dataset( trans, history_id, id, payload, **kwd )
+        elif contents_type == "dataset_collection":
+            return self.__update_dataset_collection( trans, history_id, id, payload, **kwd )
+        else:
+            return self.__handle_unknown_contents_type( trans, contents_type )
+
+    def __update_dataset( self, trans, history_id, id, payload, **kwd ):
+        # anon user: ensure that history ids match up and the history is the current,
+        #   check for uploading, and use only the subset of attribute keys manipulatable by anon users
+        if trans.user is None:
+            hda = self.hda_manager.by_id( self.decode_id( id ) )
+            if hda.history != trans.history:
+                raise exceptions.AuthenticationRequired( 'API authentication required for this request' )
+            hda = self.hda_manager.error_if_uploading( hda )
+
+            anon_allowed_payload = {}
+            if 'deleted' in payload:
+                anon_allowed_payload[ 'deleted' ] = payload[ 'deleted' ]
+            if 'visible' in payload:
+                anon_allowed_payload[ 'visible' ] = payload[ 'visible' ]
+            payload = anon_allowed_payload
+
+        # logged in user: use full payload, check state if deleting, and make sure the history is theirs
+        else:
+            hda = self.hda_manager.get_owned( self.decode_id( id ), trans.user, current_history=trans.history )
+
+            # only check_state if not deleting, otherwise cannot delete uploading files
+            check_state = not payload.get( 'deleted', False )
+            if check_state:
+                hda = self.hda_manager.error_if_uploading( hda )
+
+        # make the actual changes
+        # TODO: is this if still needed?
+        if hda and isinstance( hda, trans.model.HistoryDatasetAssociation ):
+            self.hda_deserializer.deserialize( hda, payload, user=trans.user, trans=trans )
+            # TODO: this should be an effect of deleting the hda
+            if payload.get( 'deleted', False ):
+                self.hda_manager.stop_creating_job( hda )
+            return self.hda_serializer.serialize_to_view( hda,
+                                                          user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+        return {}
+
+    def __update_dataset_collection( self, trans, history_id, id, payload, **kwd ):
+        return trans.app.dataset_collections_service.update( trans, "history", id, payload )
+
+    # TODO: allow anonymous del/purge and test security on this
+    @expose_api
+    def delete( self, trans, history_id, id, purge=False, **kwd ):
+        """
+        delete( self, trans, history_id, id, **kwd )
+        * DELETE /api/histories/{history_id}/contents/{id}
+            delete the HDA with the given ``id``
+        .. note:: Currently does not stop any active jobs for which this dataset is an output.
+
+        :type   id:     str
+        :param  id:     the encoded id of the history to delete
+        :type   purge:  bool
+        :param  purge:  if True, purge the HDA
+        :type   kwd:    dict
+        :param  kwd:    (optional) dictionary structure containing:
+
+            * payload:     a dictionary itself containing:
+                * purge:   if True, purge the HDA
+
+        .. note:: that payload optionally can be placed in the query string of the request.
+            This allows clients that strip the request body to still purge the dataset.
+
+        :rtype:     dict
+        :returns:   an error object if an error occurred or a dictionary containing:
+            * id:         the encoded id of the history,
+            * deleted:    if the history was marked as deleted,
+            * purged:     if the history was purged
+        """
+        contents_type = kwd.get('type', 'dataset')
+        if contents_type == "dataset":
+            return self.__delete_dataset( trans, history_id, id, purge=purge, **kwd )
+        elif contents_type == "dataset_collection":
+            trans.app.dataset_collections_service.delete( trans, "history", id )
+            return { 'id' : id, "deleted": True }
+        else:
+            return self.__handle_unknown_contents_type( trans, contents_type )
+
+    def __delete_dataset( self, trans, history_id, id, purge, **kwd ):
+        # get purge from the query or from the request body payload (a request body is optional here)
+        purge = util.string_as_bool( purge )
+        if kwd.get( 'payload', None ):
+            # payload takes priority
+            purge = util.string_as_bool( kwd['payload'].get( 'purge', purge ) )
+
+        hda = self.hda_manager.get_owned( self.decode_id( id ), trans.user, current_history=trans.history )
+        self.hda_manager.error_if_uploading( hda )
+
+        if purge:
+            self.hda_manager.purge( hda )
+        else:
+            self.hda_manager.delete( hda )
+        return self.hda_serializer.serialize_to_view( hda,
+                                                      user=trans.user, trans=trans, **self._parse_serialization_params( kwd, 'detailed' ) )
+
+    def __handle_unknown_contents_type( self, trans, contents_type ):
+        raise exceptions.UnknownContentsType('Unknown contents type: %s' % type)
+
+    def __index_v2( self, trans, history_id, **kwd ):
+        """
+        index( self, trans, history_id, **kwd )
+        * GET /api/histories/{history_id}/contents
+            return a list of HDA data for the history with the given ``id``
+        .. note:: Anonymous users are allowed to get their current history contents
+
+        If ids is given, index returns a *more complete* json object for each
+        HDA in the ids list.
+
+        :type   history_id: str
+        :param  history_id: encoded id string of the HDA's History
+
+        :rtype:     list
+        :returns:   dictionaries containing summary or detailed HDA information
+
+        The following are optional parameters:
+            view:   string, one of ('summary','detailed'), defaults to 'summary'
+                    controls which set of properties to return
+            keys:   comma separated strings, unused by default
+                    keys/names of individual properties to return
+
+        If neither keys or views are sent, the default view (set of keys) is returned.
+        If both a view and keys are sent, the key list and the view's keys are
+        combined.
+        If keys are sent and no view, only those properties in keys are returned.
+
+        For which properties are available see:
+            galaxy/managers/hdas/HDASerializer
+        and:
+            galaxy/managers/collection_util
+
+        The list returned can be filtered by using two optional parameters:
+            q:      string, generally a property name to filter by followed
+                    by an (often optional) hyphen and operator string.
+            qv:     string, the value to filter by
+
+        ..example:
+            To filter the list to only those created after 2015-01-29,
+            the query string would look like:
+                '?q=create_time-gt&qv=2015-01-29'
+
+            Multiple filters can be sent in using multiple q/qv pairs:
+                '?q=create_time-gt&qv=2015-01-29&q=name-contains&qv=experiment-1'
+
+        The list returned can be paginated using two optional parameters:
+            limit:  integer, defaults to no value and no limit (return all)
+                    how many items to return
+            offset: integer, defaults to 0 and starts at the beginning
+                    skip the first ( offset - 1 ) items and begin returning
+                    at the Nth item
+
+        ..example:
+            limit and offset can be combined. Skip the first two and return five:
+                '?limit=5&offset=3'
+
+        The list returned can be ordered using the optional parameter:
+            order:  string containing one of the valid ordering attributes followed
+                    (optionally) by '-asc' or '-dsc' for ascending and descending
+                    order respectively. Orders can be stacked as a comma-
+                    separated list of values.
+
+        ..example:
+            To sort by name descending then create time descending:
+                '?order=name-dsc,create_time'
+
+        The ordering attributes and their default orders are:
+            hid defaults to 'hid-asc'
+            create_time defaults to 'create_time-dsc'
+            update_time defaults to 'update_time-dsc'
+            name    defaults to 'name-asc'
+
+        'order' defaults to 'hid-asc'
+        """
+        rval = []
+
+        history = self.history_manager.get_accessible( self.decode_id( history_id ), trans.user,
+            current_history=trans.history )
+
+        filter_params = self.parse_filter_params( kwd )
+        filters = self.history_contents_filters.parse_filters( filter_params )
+        limit, offset = self.parse_limit_offset( kwd )
+        order_by = self._parse_order_by( kwd.get( 'order', 'hid-asc' ) )
+        serialization_params = self._parse_serialization_params( kwd, 'summary' )
+        # TODO: > 16.04: remove these
+        # TODO: remove 'dataset_details' and the following section when the UI doesn't need it
+        # details param allows a mixed set of summary and detailed hdas
+        # Ever more convoluted due to backwards compat..., details
+        # should be considered deprecated in favor of more specific
+        # dataset_details (and to be implemented dataset_collection_details).
+        details = kwd.get( 'details', [] )
+        if details and details != 'all':
+            details = util.listify( details )
+        view = serialization_params.pop( 'view' )
+
+        contents = self.history_contents_manager.contents( history,
+            filters=filters, limit=limit, offset=offset, order_by=order_by )
+        for content in contents:
+
+            # TODO: remove split
+            if isinstance( content, trans.app.model.HistoryDatasetAssociation ):
+                # TODO: remove split
+                if details == 'all' or trans.security.encode_id( content.id ) in details:
+                    rval.append( self.hda_serializer.serialize_to_view( content,
+                        user=trans.user, trans=trans, view='detailed', **serialization_params ) )
+                else:
+                    rval.append( self.hda_serializer.serialize_to_view( content,
+                        user=trans.user, trans=trans, view=view, **serialization_params ) )
+
+            elif isinstance( content, trans.app.model.HistoryDatasetCollectionAssociation ):
+                collection = self.hdca_serializer.serialize_to_view( content,
+                    user=trans.user, trans=trans, view=view, **serialization_params )
+                rval.append( collection )
+
+        return rval
+
+    def encode_type_id( self, type_id ):
+        TYPE_ID_SEP = '-'
+        split = type_id.split( TYPE_ID_SEP, 1 )
+        return TYPE_ID_SEP.join([ split[0], self.app.security.encode_id( split[1] )])
+
+    def _parse_order_by( self, order_by_string ):
+        ORDER_BY_SEP_CHAR = ','
+        manager = self.history_contents_manager
+        if ORDER_BY_SEP_CHAR in order_by_string:
+            return [ manager.parse_order_by( o ) for o in order_by_string.split( ORDER_BY_SEP_CHAR ) ]
+        return manager.parse_order_by( order_by_string )
+
+    @expose_api_raw
+    def archive( self, trans, history_id, filename='', format='tgz', dry_run=True, **kwd ):
+        """
+        archive( self, trans, history_id, filename='', format='tgz', dry_run=True, **kwd )
+        * GET /api/histories/{history_id}/contents/archive/{id}
+        * GET /api/histories/{history_id}/contents/archive/{filename}.{format}
+            build and return a compressed archive of the selected history contents
+
+        :type   filename:  string
+        :param  filename:  (optional) archive name (defaults to history name)
+        :type   dry_run:   boolean
+        :param  dry_run:   (optional) if True, return the archive and file paths only
+                           as json and not an archive file
+
+        :returns:   archive file for download
+
+        .. note:: this is a volatile endpoint and settings and behavior may change.
+        """
+        # roughly from: http://stackoverflow.com/a/31976060 (windows, linux)
+        invalid_filename_char_regex = re.compile( r'[:<>|\\\/\?\* "]' )
+        # path format string - dot separator between id and name
+        id_name_format = u'{}.{}'
+
+        def name_to_filename( name, max_length=150, replace_with=u'_' ):
+            # TODO: seems like shortening unicode with [:] would cause unpredictable display strings
+            return invalid_filename_char_regex.sub( replace_with, name )[0:max_length]
+
+        # given a set of parents for a dataset (HDCAs, DC, DCEs, etc.) - build a directory structure that
+        # (roughly) recreates the nesting in the contents using the parent names and ids
+        def build_path_from_parents( parents ):
+            parent_names = []
+            for parent in parents:
+                # an HDCA
+                if hasattr( parent, 'hid' ):
+                    name = name_to_filename( parent.name )
+                    parent_names.append( id_name_format.format( parent.hid, name ) )
+                # a DCE
+                elif hasattr( parent, 'element_index' ):
+                    name = name_to_filename( parent.element_identifier )
+                    parent_names.append( id_name_format.format( parent.element_index, name ) )
+            # NOTE: DCs are skipped and use the wrapping DCE info instead
+            return parent_names
+
+        # get the history used for the contents query and check for accessibility
+        history = self.history_manager.get_accessible( trans.security.decode_id( history_id ), trans.user )
+        archive_base_name = filename or name_to_filename( history.name )
+
+        # this is the fn applied to each dataset contained in the query
+        paths_and_files = []
+
+        def build_archive_files_and_paths( content, *parents ):
+            archive_path = archive_base_name
+            if not self.hda_manager.is_accessible( content, trans.user ):
+                # if the underlying dataset is not accessible, skip it silently
+                return
+
+            content_container_id = content.hid
+            content_name = name_to_filename( content.name )
+            if parents:
+                if hasattr( parents[0], 'element_index' ):
+                    # if content is directly wrapped in a DCE, strip it from parents (and the resulting path)
+                    # and instead replace the content id and name with the DCE index and identifier
+                    parent_dce, parents = parents[0], parents[1:]
+                    content_container_id = parent_dce.element_index
+                    content_name = name_to_filename( parent_dce.element_identifier )
+                # reverse for path from parents: oldest parent first
+                archive_path = os.path.join( archive_path, *build_path_from_parents( parents )[::-1] )
+                # TODO: this is brute force - building the path each time instead of re-using it
+                # possibly cache
+
+            # add the name as the last element in the archive path
+            content_id_and_name = id_name_format.format( content_container_id, content_name )
+            archive_path = os.path.join( archive_path, content_id_and_name )
+
+            # ---- for composite files, we use id and name for a directory and, inside that, ...
+            if self.hda_manager.is_composite( content ):
+                # ...save the 'main' composite file (gen. html)
+                paths_and_files.append( ( content.file_name, os.path.join( archive_path, content.name + '.html' ) ) )
+                for extra_file in self.hda_manager.extra_files( content ):
+                    extra_file_basename = os.path.basename( extra_file )
+                    archive_extra_file_path = os.path.join( archive_path, extra_file_basename )
+                    # ...and one for each file in the composite
+                    paths_and_files.append( ( extra_file, archive_extra_file_path ) )
+
+            # ---- for single files, we add the true extension to id and name and store that single filename
+            else:
+                # some dataset names can contain their original file extensions, don't repeat
+                if not archive_path.endswith( '.' + content.extension ):
+                    archive_path += '.' + content.extension
+                paths_and_files.append( ( content.file_name, archive_path ) )
+
+        # filter the contents that contain datasets using any filters possible from index above and map the datasets
+        filter_params = self.parse_filter_params( kwd )
+        filters = self.history_contents_filters.parse_filters( filter_params )
+        self.history_contents_manager.map_datasets( history, build_archive_files_and_paths, filters=filters )
+
+        # if dry_run, return the structure as json for debugging
+        if dry_run == 'True':
+            trans.response.headers['Content-Type'] = 'application/json'
+            return safe_dumps( paths_and_files )
+
+        # create the archive, add the dataset files, then stream the archive as a download
+        archive_type_string = 'w|gz'
+        archive_ext = 'tgz'
+        if self.app.config.upstream_gzip:
+            archive_type_string = 'w|'
+            archive_ext = 'tar'
+        archive = StreamBall( archive_type_string )
+
+        for file_path, archive_path in paths_and_files:
+            archive.add( file_path, archive_path )
+
+        archive_name = '.'.join([ archive_base_name, archive_ext ])
+        trans.response.set_content_type( "application/x-tar" )
+        trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="{}"'.format( archive_name )
+        archive.wsgi_status = trans.response.wsgi_status()
+        archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+        return archive.stream
diff --git a/lib/galaxy/webapps/galaxy/api/item_tags.py b/lib/galaxy/webapps/galaxy/api/item_tags.py
new file mode 100644
index 0000000..0e3cc1f
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/item_tags.py
@@ -0,0 +1,76 @@
+"""
+API operations related to tagging items.
+"""
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesTagsMixin
+
+from galaxy.web import _future_expose_api as expose_api
+from galaxy import exceptions
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class BaseItemTagsController( BaseAPIController, UsesTagsMixin ):
+    """
+    """
+    @expose_api
+    def index( self, trans, **kwd ):
+        """
+        """
+        tags = self._get_user_tags(trans, self.tagged_item_class, kwd[ self.tagged_item_id ])
+        return [ self._api_value( tag, trans, view='collection' ) for tag in tags ]
+
+    @expose_api
+    def show( self, trans, tag_name, **kwd ):
+        """
+        """
+        tag = self._get_item_tag_assoc( trans, self.tagged_item_class, kwd[ self.tagged_item_id ], tag_name )
+        if not tag:
+            raise exceptions.ObjectNotFound( "Failed to retrieve specified tag." )
+        return self._api_value( tag, trans )
+
+    @expose_api
+    def create( self, trans, tag_name, payload=None, **kwd ):
+        """
+        """
+        payload = payload or {}
+        value = payload.get( "value", None )
+        tag = self._apply_item_tag( trans, self.tagged_item_class, kwd[ self.tagged_item_id ], tag_name, value )
+        return self._api_value( tag, trans )
+
+    # Not handling these differently at this time
+    update = create
+
+    @expose_api
+    def delete( self, trans, tag_name, **kwd ):
+        """
+        """
+        deleted = self._remove_items_tag( trans, self.tagged_item_class, kwd[ self.tagged_item_id ], tag_name )
+        if not deleted:
+            raise exceptions.RequestParameterInvalidException( "Failed to delete specified tag." )
+        # TODO: ugh - 204 would be better
+        return 'OK'
+
+    def _api_value( self, tag, trans, view='element' ):
+        return tag.to_dict( view=view, value_mapper={ 'id': trans.security.encode_id } )
+
+
+class HistoryContentTagsController( BaseItemTagsController ):
+    controller_name = "history_content_tags"
+    tagged_item_class = "HistoryDatasetAssociation"
+    tagged_item_id = "history_content_id"
+
+
+class HistoryTagsController( BaseItemTagsController ):
+    controller_name = "history_tags"
+    tagged_item_class = "History"
+    tagged_item_id = "history_id"
+
+
+class WorkflowTagsController( BaseItemTagsController ):
+    controller_name = "workflow_tags"
+    tagged_item_class = "StoredWorkflow"
+    tagged_item_id = "workflow_id"
+
+# TODO: Visualization and Pages once APIs for those are available
diff --git a/lib/galaxy/webapps/galaxy/api/job_files.py b/lib/galaxy/webapps/galaxy/api/job_files.py
new file mode 100644
index 0000000..3abac0f
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/job_files.py
@@ -0,0 +1,166 @@
+""" API for asynchronous job running mechanisms can use to fetch or put files
+related to running and queued jobs.
+"""
+import os
+import shutil
+
+from galaxy import exceptions
+from galaxy import util
+from galaxy import model
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web import _future_expose_api_raw_anonymous_and_sessionless as expose_api_raw_anonymous_and_sessionless
+
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class JobFilesAPIController( BaseAPIController ):
+    """ This job files controller allows remote job running mechanisms to
+    read and modify the current state of files for queued and running jobs.
+    It is certainly not meant to represent part of Galaxy's stable, user
+    facing API.
+
+    Furthermore, even if a user key corresponds to the user running the job,
+    it should not be accepted for authorization - this API allows access to
+    low-level unfiltered files and such authorization would break Galaxy's
+    security model for tool execution.
+    """
+
+    @expose_api_raw_anonymous_and_sessionless
+    def index( self, trans, job_id, **kwargs ):
+        """
+        index( self, trans, job_id, **kwargs )
+        * GET /api/jobs/{job_id}/files
+            Get a file required to staging a job (proper datasets, extra inputs,
+            task-split inputs, working directory files).
+
+        :type   job_id: str
+        :param  job_id: encoded id string of the job
+        :type   path: str
+        :param  path: Path to file.
+        :type   job_key: str
+        :param  job_key: A key used to authenticate this request as acting on
+                         behalf or a job runner for the specified job.
+        ..note:
+            This API method is intended only for consumption by job runners,
+            not end users.
+
+        :rtype:     binary
+        :returns:   contents of file
+        """
+        self.__authorize_job_access( trans, job_id, **kwargs )
+        path = kwargs.get("path", None)
+        return open(path, 'rb')
+
+    @expose_api_anonymous_and_sessionless
+    def create( self, trans, job_id, payload, **kwargs ):
+        """
+        create( self, trans, job_id, payload, **kwargs )
+        * POST /api/jobs/{job_id}/files
+            Populate an output file (formal dataset, task split part, working
+            directory file (such as those related to metadata)). This should be
+            a multipart post with a 'file' parameter containing the contents of
+            the actual file to create.
+
+        :type   job_id: str
+        :param  job_id: encoded id string of the job
+        :type   payload:    dict
+        :param  payload:    dictionary structure containing::
+            'job_key'   = Key authenticating
+            'path'      = Path to file to create.
+
+        ..note:
+            This API method is intended only for consumption by job runners,
+            not end users.
+
+        :rtype:     dict
+        :returns:   an okay message
+        """
+        job = self.__authorize_job_access( trans, job_id, **payload )
+        path = payload.get( "path" )
+        self.__check_job_can_write_to_path( trans, job, path )
+
+        # Is this writing an unneeded file? Should this just copy in Python?
+        if '__file_path' in payload:
+            file_path = payload.get( '__file_path' )
+            upload_store = trans.app.config.nginx_upload_job_files_store
+            assert upload_store, ( "Request appears to have been processed by"
+                                   " nginx_upload_module but Galaxy is not"
+                                   " configured to recognize it" )
+            assert file_path.startswith( upload_store ), \
+                ( "Filename provided by nginx (%s) is not in correct"
+                  " directory (%s)" % ( file_path, upload_store ) )
+            input_file = open( file_path )
+        else:
+            input_file = payload.get( "file",
+                                      payload.get( "__file", None ) ).file
+        try:
+            shutil.move( input_file.name, path )
+        finally:
+            try:
+                input_file.close()
+            except OSError:
+                # Fails to close file if not using nginx upload because the
+                # tempfile has moved and Python wants to delete it.
+                pass
+        return {"message": "ok"}
+
+    def __authorize_job_access(self, trans, encoded_job_id, **kwargs):
+        for key in [ "path", "job_key" ]:
+            if key not in kwargs:
+                error_message = "Job files action requires a valid '%s'." % key
+                raise exceptions.ObjectAttributeMissingException( error_message )
+
+        job_id = trans.security.decode_id( encoded_job_id )
+        job_key = trans.security.encode_id( job_id, kind="jobs_files" )
+        if not util.safe_str_cmp( kwargs[ "job_key" ], job_key ):
+            raise exceptions.ItemAccessibilityException("Invalid job_key supplied.")
+
+        # Verify job is active. Don't update the contents of complete jobs.
+        job = trans.sa_session.query( model.Job ).get( job_id )
+        if job.finished:
+            error_message = "Attempting to read or modify the files of a job that has already completed."
+            raise exceptions.ItemAccessibilityException( error_message )
+        return job
+
+    def __check_job_can_write_to_path( self, trans, job, path ):
+        """ Verify an idealized job runner should actually be able to write to
+        the specified path - it must be a dataset output, a dataset "extra
+        file", or a some place in the working directory of this job.
+
+        Would like similar checks for reading the unstructured nature of loc
+        files make this very difficult. (See abandoned work here
+        https://gist.github.com/jmchilton/9103619.)
+        """
+        in_work_dir = self.__in_working_directory( job, path, trans.app )
+        allow_temp_dir_file = self.__is_allowed_temp_dir_file( trans.app, job, path )
+        if not in_work_dir and not allow_temp_dir_file and not self.__is_output_dataset_path( job, path ):
+            raise exceptions.ItemAccessibilityException("Job is not authorized to write to supplied path.")
+
+    def __is_allowed_temp_dir_file( self, app, job, path ):
+        # grrr.. need to get away from new_file_path - these should be written
+        # to job working directory like metadata files.
+        in_temp_dir = util.in_directory( path, app.config.new_file_path )
+        return in_temp_dir and os.path.split( path )[ -1 ].startswith( "GALAXY_VERSION_")
+
+    def __is_output_dataset_path( self, job, path ):
+        """ Check if is an output path for this job or a file in the an
+        output's extra files path.
+        """
+        da_lists = [ job.output_datasets, job.output_library_datasets ]
+        for da_list in da_lists:
+            for job_dataset_association in da_list:
+                dataset = job_dataset_association.dataset
+                if not dataset:
+                    continue
+                if os.path.abspath( dataset.file_name ) == os.path.abspath( path ):
+                    return True
+                elif util.in_directory( path, dataset.extra_files_path ):
+                    return True
+        return False
+
+    def __in_working_directory( self, job, path, app ):
+        working_directory = app.object_store.get_filename(job, base_dir='job_work', dir_only=True, extra_dir=str(job.id))
+        return util.in_directory( path, working_directory )
diff --git a/lib/galaxy/webapps/galaxy/api/jobs.py b/lib/galaxy/webapps/galaxy/api/jobs.py
new file mode 100644
index 0000000..b873f50
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/jobs.py
@@ -0,0 +1,352 @@
+"""
+API operations on a jobs.
+
+.. seealso:: :class:`galaxy.model.Jobs`
+"""
+
+import json
+import logging
+
+from six import string_types
+from sqlalchemy import and_, false, or_
+from sqlalchemy.orm import aliased
+
+from galaxy import exceptions
+from galaxy import managers
+from galaxy import model
+from galaxy import util
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesLibraryMixinItems
+
+log = logging.getLogger( __name__ )
+
+
+class JobController( BaseAPIController, UsesLibraryMixinItems ):
+
+    def __init__( self, app ):
+        super( JobController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+        self.dataset_manager = managers.datasets.DatasetManager( app )
+
+    @expose_api
+    def index( self, trans, **kwd ):
+        """
+        index( trans, state=None, tool_id=None, history_id=None, date_range_min=None, date_range_max=None, user_details=False )
+        * GET /api/jobs:
+            return jobs for current user
+
+            !! if user is admin and user_details is True, then
+                return jobs for all galaxy users based on filtering - this is an extended service
+
+        :type   state: string or list
+        :param  state: limit listing of jobs to those that match one of the included states. If none, all are returned.
+        Valid Galaxy job states include:
+                'new', 'upload', 'waiting', 'queued', 'running', 'ok', 'error', 'paused', 'deleted', 'deleted_new'
+
+        :type   tool_id: string or list
+        :param  tool_id: limit listing of jobs to those that match one of the included tool_ids. If none, all are returned.
+
+        :type   user_details: boolean
+        :param  user_details: if true, and requestor is an admin, will return external job id and user email.
+
+        :type   date_range_min: string '2014-01-01'
+        :param  date_range_min: limit the listing of jobs to those updated on or after requested date
+
+        :type   date_range_max: string '2014-12-31'
+        :param  date_range_max: limit the listing of jobs to those updated on or before requested date
+
+        :type   history_id: string
+        :param  history_id: limit listing of jobs to those that match the history_id. If none, all are returned.
+
+        :rtype:     list
+        :returns:   list of dictionaries containing summary job information
+        """
+        state = kwd.get( 'state', None )
+        is_admin = trans.user_is_admin()
+        user_details = kwd.get('user_details', False)
+
+        if is_admin:
+            query = trans.sa_session.query( trans.app.model.Job )
+        else:
+            query = trans.sa_session.query( trans.app.model.Job ).filter(trans.app.model.Job.user == trans.user)
+
+        def build_and_apply_filters( query, objects, filter_func ):
+            if objects is not None:
+                if isinstance( objects, string_types ):
+                    query = query.filter( filter_func( objects ) )
+                elif isinstance( objects, list ):
+                    t = []
+                    for obj in objects:
+                        t.append( filter_func( obj ) )
+                    query = query.filter( or_( *t ) )
+            return query
+
+        query = build_and_apply_filters( query, state, lambda s: trans.app.model.Job.state == s )
+
+        query = build_and_apply_filters( query, kwd.get( 'tool_id', None ), lambda t: trans.app.model.Job.tool_id == t )
+        query = build_and_apply_filters( query, kwd.get( 'tool_id_like', None ), lambda t: trans.app.model.Job.tool_id.like(t) )
+
+        query = build_and_apply_filters( query, kwd.get( 'date_range_min', None ), lambda dmin: trans.app.model.Job.table.c.update_time >= dmin )
+        query = build_and_apply_filters( query, kwd.get( 'date_range_max', None ), lambda dmax: trans.app.model.Job.table.c.update_time <= dmax )
+
+        history_id = kwd.get( 'history_id', None )
+        if history_id is not None:
+            try:
+                decoded_history_id = self.decode_id(history_id)
+                query = query.filter( trans.app.model.Job.history_id == decoded_history_id )
+            except:
+                raise exceptions.ObjectAttributeInvalidException()
+
+        out = []
+        if kwd.get( 'order_by' ) == 'create_time':
+            order_by = trans.app.model.Job.create_time.desc()
+        else:
+            order_by = trans.app.model.Job.update_time.desc()
+        for job in query.order_by( order_by ).all():
+            job_dict = job.to_dict( 'collection', system_details=is_admin )
+            j = self.encode_all_ids( trans, job_dict, True )
+            if user_details:
+                j['user_email'] = job.user.email
+            out.append(j)
+
+        return out
+
+    @expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        show( trans, id )
+        * GET /api/jobs/{id}:
+            return jobs for current user
+
+        :type   id: string
+        :param  id: Specific job id
+
+        :type   full: boolean
+        :param  full: whether to return extra information
+
+        :rtype:     dictionary
+        :returns:   dictionary containing full description of job data
+        """
+        job = self.__get_job( trans, id )
+        is_admin = trans.user_is_admin()
+        job_dict = self.encode_all_ids( trans, job.to_dict( 'element', system_details=is_admin ), True )
+        full_output = util.asbool( kwd.get( 'full', 'false' ) )
+        if full_output:
+            job_dict.update( dict( stderr=job.stderr, stdout=job.stdout ) )
+            if is_admin:
+                job_dict['user_email'] = job.user.email
+
+                def metric_to_dict(metric):
+                    metric_name = metric.metric_name
+                    metric_value = metric.metric_value
+                    metric_plugin = metric.plugin
+                    title, value = trans.app.job_metrics.format(metric_plugin, metric_name, metric_value)
+                    return dict(
+                        title=title,
+                        value=value,
+                        plugin=metric_plugin,
+                        name=metric_name,
+                        raw_value=str(metric_value),
+                    )
+
+                job_dict['job_metrics'] = [metric_to_dict(metric) for metric in job.metrics]
+        return job_dict
+
+    @expose_api
+    def inputs( self, trans, id, **kwd ):
+        """
+        show( trans, id )
+        * GET /api/jobs/{id}/inputs
+            returns input datasets created by job
+
+        :type   id: string
+        :param  id: Encoded job id
+
+        :rtype:     dictionary
+        :returns:   dictionary containing input dataset associations
+        """
+        job = self.__get_job( trans, id )
+        return self.__dictify_associations( trans, job.input_datasets, job.input_library_datasets )
+
+    @expose_api
+    def outputs( self, trans, id, **kwd ):
+        """
+        show( trans, id )
+        * GET /api/jobs/{id}/outputs
+            returns output datasets created by job
+
+        :type   id: string
+        :param  id: Encoded job id
+
+        :rtype:     dictionary
+        :returns:   dictionary containing output dataset associations
+        """
+        job = self.__get_job( trans, id )
+        return self.__dictify_associations( trans, job.output_datasets, job.output_library_datasets )
+
+    @expose_api_anonymous
+    def build_for_rerun( self, trans, id, **kwd ):
+        """
+        * GET /api/jobs/{id}/build_for_rerun
+            returns a tool input/param template prepopulated with this job's
+            information, suitable for rerunning or rendering parameters of the
+            job.
+
+        :type   id: string
+        :param  id: Encoded job id
+
+        :rtype:     dictionary
+        :returns:   dictionary containing output dataset associations
+        """
+
+        job = self.__get_job(trans, id)
+        if not job:
+            raise exceptions.ObjectNotFound("Could not access job with id '%s'" % id)
+        tool = self.app.toolbox.get_tool( job.tool_id, job.tool_version )
+        if not tool.is_workflow_compatible:
+            raise exceptions.ConfigDoesNotAllowException( "Tool '%s' cannot be rerun." % ( job.tool_id ) )
+        return tool.to_json(trans, {}, job=job)
+
+    def __dictify_associations( self, trans, *association_lists ):
+        rval = []
+        for association_list in association_lists:
+            rval.extend( map( lambda a: self.__dictify_association( trans, a ), association_list ) )
+        return rval
+
+    def __dictify_association( self, trans, job_dataset_association ):
+        dataset_dict = None
+        dataset = job_dataset_association.dataset
+        if dataset:
+            if isinstance( dataset, model.HistoryDatasetAssociation ):
+                dataset_dict = dict( src="hda", id=trans.security.encode_id( dataset.id ) )
+            else:
+                dataset_dict = dict( src="ldda", id=trans.security.encode_id( dataset.id ) )
+        return dict( name=job_dataset_association.name, dataset=dataset_dict )
+
+    def __get_job( self, trans, id ):
+        try:
+            decoded_job_id = self.decode_id( id )
+        except Exception:
+            raise exceptions.MalformedId()
+        job = trans.sa_session.query( trans.app.model.Job ).filter( trans.app.model.Job.id == decoded_job_id ).first()
+        if job is None:
+            raise exceptions.ObjectNotFound()
+        if not trans.user_is_admin() and job.user != trans.user:
+            if not job.output_datasets:
+                raise exceptions.ItemAccessibilityException( "Job has no output datasets." )
+            for data_assoc in job.output_datasets:
+                if not self.dataset_manager.is_accessible( data_assoc.dataset.dataset, trans.user ):
+                    raise exceptions.ItemAccessibilityException( "You are not allowed to rerun this job." )
+        return job
+
+    @expose_api
+    def create( self, trans, payload, **kwd ):
+        """ See the create method in tools.py in order to submit a job. """
+        raise exceptions.NotImplemented( 'Please POST to /api/tools instead.' )
+
+    @expose_api
+    def search( self, trans, payload, **kwd ):
+        """
+        search( trans, payload )
+        * POST /api/jobs/search:
+            return jobs for current user
+
+        :type   payload: dict
+        :param  payload: Dictionary containing description of requested job. This is in the same format as
+            a request to POST /apt/tools would take to initiate a job
+
+        :rtype:     list
+        :returns:   list of dictionaries containing summary job information of the jobs that match the requested job run
+
+        This method is designed to scan the list of previously run jobs and find records of jobs that had
+        the exact some input parameters and datasets. This can be used to minimize the amount of repeated work, and simply
+        recycle the old results.
+        """
+
+        tool_id = None
+        if 'tool_id' in payload:
+            tool_id = payload.get( 'tool_id' )
+        if tool_id is None:
+            raise exceptions.ObjectAttributeMissingException( "No tool id" )
+
+        tool = trans.app.toolbox.get_tool( tool_id )
+        if tool is None:
+            raise exceptions.ObjectNotFound( "Requested tool not found" )
+        if 'inputs' not in payload:
+            raise exceptions.ObjectAttributeMissingException( "No inputs defined" )
+
+        inputs = payload[ 'inputs' ]
+
+        input_data = {}
+        input_param = {}
+        for k, v in inputs.items():
+            if isinstance( v, dict ):
+                if 'id' in v:
+                    if 'src' not in v or v[ 'src' ] == 'hda':
+                        hda_id = self.decode_id( v['id'] )
+                        dataset = self.hda_manager.get_accessible( hda_id, trans.user )
+                    else:
+                        dataset = self.get_library_dataset_dataset_association( trans, v['id'] )
+                    if dataset is None:
+                        raise exceptions.ObjectNotFound( "Dataset %s not found" % ( v[ 'id' ] ) )
+                    input_data[k] = dataset.dataset_id
+            else:
+                input_param[k] = json.dumps( str(v) )
+
+        query = trans.sa_session.query( trans.app.model.Job ).filter(
+            trans.app.model.Job.tool_id == tool_id,
+            trans.app.model.Job.user == trans.user
+        )
+
+        if 'state' not in payload:
+            query = query.filter(
+                or_(
+                    trans.app.model.Job.state == 'running',
+                    trans.app.model.Job.state == 'queued',
+                    trans.app.model.Job.state == 'waiting',
+                    trans.app.model.Job.state == 'running',
+                    trans.app.model.Job.state == 'ok',
+                )
+            )
+        else:
+            if isinstance( payload[ 'state' ], string_types ):
+                query = query.filter( trans.app.model.Job.state == payload[ 'state' ] )
+            elif isinstance( payload[ 'state' ], list ):
+                o = []
+                for s in payload[ 'state' ]:
+                    o.append( trans.app.model.Job.state == s )
+                query = query.filter(
+                    or_( *o )
+                )
+
+        for k, v in input_param.items():
+            a = aliased( trans.app.model.JobParameter )
+            query = query.filter( and_(
+                trans.app.model.Job.id == a.job_id,
+                a.name == k,
+                a.value == v
+            ) )
+
+        for k, v in input_data.items():
+            # Here we are attempting to link the inputs to the underlying
+            # dataset (not the dataset association).
+            # This way, if the calculation was done using a copied HDA
+            # (copied from the library or another history), the search will
+            # still find the job
+            a = aliased( trans.app.model.JobToInputDatasetAssociation )
+            b = aliased( trans.app.model.HistoryDatasetAssociation )
+            query = query.filter( and_(
+                trans.app.model.Job.id == a.job_id,
+                a.dataset_id == b.id,
+                b.deleted == false(),
+                b.dataset_id == v
+            ) )
+
+        out = []
+        for job in query.all():
+            # check to make sure none of the output files have been deleted
+            if all( list( a.dataset.deleted is False for a in job.output_datasets ) ):
+                out.append( self.encode_all_ids( trans, job.to_dict( 'element' ), True ) )
+        return out
diff --git a/lib/galaxy/webapps/galaxy/api/lda_datasets.py b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
new file mode 100644
index 0000000..14dc751
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/lda_datasets.py
@@ -0,0 +1,756 @@
+"""
+API operations on the library datasets.
+"""
+import glob
+import logging
+import os
+import os.path
+import string
+import sys
+import tempfile
+import zipfile
+from json import dumps
+
+from paste.httpexceptions import HTTPBadRequest, HTTPInternalServerError
+
+from galaxy import exceptions
+from galaxy import util
+from galaxy import web
+from galaxy.exceptions import ObjectNotFound
+from galaxy.managers import folders, roles
+from galaxy.tools.actions import upload_common
+from galaxy.util.streamball import StreamBall
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web.base.controller import BaseAPIController, UsesVisualizationMixin
+
+log = logging.getLogger( __name__ )
+
+
+class LibraryDatasetsController( BaseAPIController, UsesVisualizationMixin ):
+
+    def __init__( self, app ):
+        super( LibraryDatasetsController, self ).__init__( app )
+        self.folder_manager = folders.FolderManager()
+        self.role_manager = roles.RoleManager( app )
+
+    @expose_api_anonymous
+    def show( self, trans, id, **kwd ):
+        """
+        show( self, trans, id, **kwd )
+        * GET /api/libraries/datasets/{encoded_dataset_id}
+            Displays information about the dataset identified by the encoded ID.
+
+        :param  id:      the encoded id of the dataset to query
+        :type   id:      an encoded id string
+
+        :returns:   detailed dataset information from base controller
+        :rtype:     dictionary
+
+        .. seealso:: :attr:`galaxy.web.base.controller.UsesLibraryMixinItems.get_library_dataset`
+        """
+        try:
+            library_dataset = self.get_library_dataset( trans, id=id, check_ownership=False, check_accessible=True )
+        except Exception:
+            raise exceptions.ObjectNotFound( 'Requested library_dataset was not found.' )
+
+        current_user_roles = trans.get_current_user_roles()
+
+        # Build the full path for breadcrumb purposes.
+        full_path = self._build_path( trans, library_dataset.folder )
+        dataset_item = ( trans.security.encode_id( library_dataset.id ), library_dataset.name )
+        full_path.insert(0, dataset_item)
+        full_path = full_path[ ::-1 ]
+
+        # Find expired versions of the library dataset
+        expired_ldda_versions = []
+        for expired_ldda in library_dataset.expired_datasets:
+            expired_ldda_versions.append( ( trans.security.encode_id( expired_ldda.id ), expired_ldda.name ) )
+
+        rval = trans.security.encode_all_ids( library_dataset.to_dict() )
+        if len(expired_ldda_versions) > 0:
+            rval[ 'has_versions' ] = True
+            rval[ 'expired_versions' ] = expired_ldda_versions
+        rval[ 'deleted' ] = library_dataset.deleted
+        rval[ 'folder_id' ] = 'F' + rval[ 'folder_id' ]
+        rval[ 'full_path' ] = full_path
+        rval[ 'file_size' ] = util.nice_size( int( library_dataset.library_dataset_dataset_association.get_size() ) )
+        rval[ 'date_uploaded' ] = library_dataset.library_dataset_dataset_association.create_time.strftime( "%Y-%m-%d %I:%M %p" )
+        rval[ 'can_user_modify' ] = trans.app.security_agent.can_modify_library_item( current_user_roles, library_dataset) or trans.user_is_admin()
+        rval[ 'is_unrestricted' ] = trans.app.security_agent.dataset_is_public( library_dataset.library_dataset_dataset_association.dataset )
+
+        #  Manage dataset permission is always attached to the dataset itself, not the the ld or ldda to maintain consistency
+        rval[ 'can_user_manage' ] = trans.app.security_agent.can_manage_dataset( current_user_roles, library_dataset.library_dataset_dataset_association.dataset) or trans.user_is_admin()
+        return rval
+
+    @expose_api_anonymous
+    def show_version( self, trans, encoded_dataset_id, encoded_ldda_id, **kwd ):
+        """
+        show_version( self, trans, encoded_dataset_id, encoded_ldda_id, **kwd ):
+        * GET /api/libraries/datasets/{encoded_dataset_id}/versions/{encoded_ldda_id}
+            Displays information about specific version of the library_dataset (i.e. ldda).
+
+        :param  encoded_dataset_id:      the encoded id of the dataset to query
+        :type   encoded_dataset_id:      an encoded id string
+
+        :param  encoded_ldda_id:      the encoded id of the ldda to query
+        :type   encoded_ldda_id:      an encoded id string
+
+        :rtype:     dictionary
+        :returns:   dict of ldda's details
+        """
+        try:
+            library_dataset = self.get_library_dataset( trans, id=encoded_dataset_id, check_ownership=False, check_accessible=True )
+        except Exception:
+            raise exceptions.ObjectNotFound( 'Requested library_dataset was not found.' )
+
+        try:
+            ldda = self.get_library_dataset_dataset_association( trans, id=encoded_ldda_id, check_ownership=False, check_accessible=False )
+        except Exception as e:
+            raise exceptions.ObjectNotFound( 'Requested version of library dataset was not found.' + str(e) )
+
+        if ldda not in library_dataset.expired_datasets:
+            raise exceptions.ObjectNotFound( 'Given library dataset does not have the requested version.' )
+
+        rval = trans.security.encode_all_ids( ldda.to_dict() )
+        return rval
+
+    @expose_api
+    def show_roles( self, trans, encoded_dataset_id, **kwd ):
+        """
+        show_roles( self, trans, id, **kwd ):
+        * GET /api/libraries/datasets/{encoded_dataset_id}/permissions
+            Displays information about current or available roles
+            for a given dataset permission.
+
+        :param  encoded_dataset_id:      the encoded id of the dataset to query
+        :type   encoded_dataset_id:      an encoded id string
+
+        :param  scope:      either 'current' or 'available'
+        :type   scope:      string
+
+        :rtype:     dictionary
+        :returns:   either dict of current roles for all permission types or
+                           dict of available roles to choose from (is the same for any permission type)
+        """
+
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            library_dataset = self.get_library_dataset( trans, id=encoded_dataset_id, check_ownership=False, check_accessible=False )
+        except Exception as e:
+            raise exceptions.ObjectNotFound( 'Requested dataset was not found.' + str(e) )
+        dataset = library_dataset.library_dataset_dataset_association.dataset
+
+        # User has to have manage permissions permission in order to see the roles.
+        can_manage = trans.app.security_agent.can_manage_dataset( current_user_roles, dataset ) or trans.user_is_admin()
+        if not can_manage:
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to access permissions.' )
+
+        scope = kwd.get( 'scope', None )
+        if scope == 'current' or scope is None:
+            return self._get_current_roles( trans, library_dataset )
+
+        #  Return roles that are available to select.
+        elif scope == 'available':
+            page = kwd.get( 'page', None )
+            if page is not None:
+                page = int( page )
+            else:
+                page = 1
+
+            page_limit = kwd.get( 'page_limit', None )
+            if page_limit is not None:
+                page_limit = int( page_limit )
+            else:
+                page_limit = 10
+
+            query = kwd.get( 'q', None )
+
+            roles, total_roles = trans.app.security_agent.get_valid_roles( trans, dataset, query, page, page_limit )
+
+            return_roles = []
+            for role in roles:
+                role_id = trans.security.encode_id( role.id )
+                return_roles.append( dict( id=role_id, name=role.name, type=role.type ) )
+            return dict( roles=return_roles, page=page, page_limit=page_limit, total=total_roles )
+        else:
+            raise exceptions.RequestParameterInvalidException( "The value of 'scope' parameter is invalid. Alllowed values: current, available" )
+
+    def _get_current_roles( self, trans, library_dataset):
+        """
+        Find all roles currently connected to relevant permissions
+        on the library dataset and the underlying dataset.
+
+        :param  library_dataset:      the model object
+        :type   library_dataset:      LibraryDataset
+
+        :rtype:     dictionary
+        :returns:   dict of current roles for all available permission types
+        """
+        dataset = library_dataset.library_dataset_dataset_association.dataset
+
+        # Omit duplicated roles by converting to set
+        access_roles = set( dataset.get_access_roles( trans ) )
+        modify_roles = set( trans.app.security_agent.get_roles_for_action( library_dataset, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY ) )
+        manage_roles = set( dataset.get_manage_permissions_roles( trans ) )
+
+        access_dataset_role_list = [ ( access_role.name, trans.security.encode_id( access_role.id ) ) for access_role in access_roles ]
+        manage_dataset_role_list = [ ( manage_role.name, trans.security.encode_id( manage_role.id ) ) for manage_role in manage_roles ]
+        modify_item_role_list = [ ( modify_role.name, trans.security.encode_id( modify_role.id ) ) for modify_role in modify_roles ]
+
+        return dict( access_dataset_roles=access_dataset_role_list, modify_item_roles=modify_item_role_list, manage_dataset_roles=manage_dataset_role_list )
+
+    @expose_api
+    def update_permissions( self, trans, encoded_dataset_id, payload=None, **kwd ):
+        """
+        *POST /api/libraries/datasets/{encoded_dataset_id}/permissions
+            Set permissions of the given dataset to the given role ids.
+
+        :param  encoded_dataset_id:      the encoded id of the dataset to update permissions of
+        :type   encoded_dataset_id:      an encoded id string
+        :param   payload: dictionary structure containing:
+            :param  action:     (required) describes what action should be performed
+                                available actions: make_private, remove_restrictions, set_permissions
+            :type   action:     string
+            :param  access_ids[]:      list of Role.id defining roles that should have access permission on the dataset
+            :type   access_ids[]:      string or list
+            :param  manage_ids[]:      list of Role.id defining roles that should have manage permission on the dataset
+            :type   manage_ids[]:      string or list
+            :param  modify_ids[]:      list of Role.id defining roles that should have modify permission on the library dataset item
+            :type   modify_ids[]:      string or list
+        :type:      dictionary
+        :returns:   dict of current roles for all available permission types
+        :rtype:     dictionary
+
+        :raises: RequestParameterInvalidException, ObjectNotFound, InsufficientPermissionsException, InternalServerError
+                    RequestParameterMissingException
+        """
+        if payload:
+            kwd.update(payload)
+        try:
+            library_dataset = self.get_library_dataset( trans, id=encoded_dataset_id, check_ownership=False, check_accessible=False )
+        except Exception as e:
+            raise exceptions.ObjectNotFound( 'Requested dataset was not found.' + str(e) )
+        dataset = library_dataset.library_dataset_dataset_association.dataset
+        current_user_roles = trans.get_current_user_roles()
+        can_manage = trans.app.security_agent.can_manage_dataset( current_user_roles, dataset ) or trans.user_is_admin()
+        if not can_manage:
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permissions to manage permissions on this dataset.' )
+        new_access_roles_ids = util.listify( kwd.get( 'access_ids[]', None ) )
+        new_manage_roles_ids = util.listify( kwd.get( 'manage_ids[]', None ) )
+        new_modify_roles_ids = util.listify( kwd.get( 'modify_ids[]', None ) )
+        action = kwd.get( 'action', None )
+        if action is None:
+            raise exceptions.RequestParameterMissingException( 'The mandatory parameter "action" is missing.' )
+        elif action == 'remove_restrictions':
+            trans.app.security_agent.make_dataset_public( dataset )
+            if not trans.app.security_agent.dataset_is_public( dataset ):
+                raise exceptions.InternalServerError( 'An error occured while making dataset public.' )
+        elif action == 'make_private':
+            if not trans.app.security_agent.dataset_is_private_to_user( trans, library_dataset ):
+                private_role = trans.app.security_agent.get_private_user_role( trans.user )
+                dp = trans.app.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, dataset, private_role )
+                trans.sa_session.add( dp )
+                trans.sa_session.flush()
+            if not trans.app.security_agent.dataset_is_private_to_user( trans, library_dataset ):
+                # Check again and inform the user if dataset is not private.
+                raise exceptions.InternalServerError( 'An error occured and the dataset is NOT private.' )
+        elif action == 'set_permissions':
+            # ACCESS DATASET ROLES
+            valid_access_roles = []
+            invalid_access_roles_ids = []
+            if new_access_roles_ids is None:
+                trans.app.security_agent.make_dataset_public( dataset )
+            else:
+                for role_id in new_access_roles_ids:
+                    role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                    #  Check whether role is in the set of allowed roles
+                    valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, dataset )
+                    if role in valid_roles:
+                        valid_access_roles.append( role )
+                    else:
+                        invalid_access_roles_ids.append( role_id )
+                if len( invalid_access_roles_ids ) > 0:
+                    log.warning( "The following roles could not be added to the dataset access permission: " + str( invalid_access_roles_ids ) )
+
+                access_permission = dict( access=valid_access_roles )
+                trans.app.security_agent.set_dataset_permission( dataset, access_permission )
+
+            # MANAGE DATASET ROLES
+            valid_manage_roles = []
+            invalid_manage_roles_ids = []
+            new_manage_roles_ids = util.listify( new_manage_roles_ids )
+
+            #  Load all access roles to check
+            active_access_roles = dataset.get_access_roles( trans )
+
+            for role_id in new_manage_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                #  Check whether role is in the set of access roles
+                if role in active_access_roles:
+                    valid_manage_roles.append( role )
+                else:
+                    invalid_manage_roles_ids.append( role_id )
+
+            if len( invalid_manage_roles_ids ) > 0:
+                log.warning( "The following roles could not be added to the dataset manage permission: " + str( invalid_manage_roles_ids ) )
+
+            manage_permission = { trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS: valid_manage_roles }
+            trans.app.security_agent.set_dataset_permission( dataset, manage_permission )
+
+            # MODIFY LIBRARY ITEM ROLES
+            valid_modify_roles = []
+            invalid_modify_roles_ids = []
+            new_modify_roles_ids = util.listify( new_modify_roles_ids )
+
+            #  Load all access roles to check
+            active_access_roles = dataset.get_access_roles( trans )
+
+            for role_id in new_modify_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                #  Check whether role is in the set of access roles
+                if role in active_access_roles:
+                    valid_modify_roles.append( role )
+                else:
+                    invalid_modify_roles_ids.append( role_id )
+
+            if len( invalid_modify_roles_ids ) > 0:
+                log.warning( "The following roles could not be added to the dataset modify permission: " + str( invalid_modify_roles_ids ) )
+
+            modify_permission = { trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles }
+            trans.app.security_agent.set_library_item_permission( library_dataset, modify_permission )
+
+        else:
+            raise exceptions.RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value. '
+                                                               'Allowed values are: "remove_restrictions", "make_private", "set_permissions"' )
+
+        return self._get_current_roles( trans, library_dataset )
+
+    @expose_api
+    def delete( self, trans, encoded_dataset_id, **kwd ):
+        """
+        delete( self, trans, encoded_dataset_id, **kwd ):
+        * DELETE /api/libraries/datasets/{encoded_dataset_id}
+            Marks the dataset deleted or undeleted based on the value
+            of the undelete flag.
+            If the flag is not present it is considered False and the
+            item is marked deleted.
+
+        :param  encoded_dataset_id:      the encoded id of the dataset to change
+        :type   encoded_dataset_id:      an encoded id string
+
+        :returns:   dict containing information about the dataset
+        :rtype:     dictionary
+        """
+        undelete = util.string_as_bool( kwd.get( 'undelete', False ) )
+        try:
+            dataset = self.get_library_dataset( trans, id=encoded_dataset_id, check_ownership=False, check_accessible=False )
+        except Exception as e:
+            raise exceptions.ObjectNotFound( 'Requested dataset was not found.' + str(e) )
+        current_user_roles = trans.get_current_user_roles()
+        allowed = trans.app.security_agent.can_modify_library_item( current_user_roles, dataset )
+        if ( not allowed ) and ( not trans.user_is_admin() ):
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permissions to delete this dataset.')
+
+        if undelete:
+            dataset.deleted = False
+        else:
+            dataset.deleted = True
+
+        trans.sa_session.add( dataset )
+        trans.sa_session.flush()
+
+        rval = trans.security.encode_all_ids( dataset.to_dict() )
+        nice_size = util.nice_size( int( dataset.library_dataset_dataset_association.get_size() ) )
+        rval[ 'file_size' ] = nice_size
+        rval[ 'update_time' ] = dataset.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+        rval[ 'deleted' ] = dataset.deleted
+        rval[ 'folder_id' ] = 'F' + rval[ 'folder_id' ]
+        return rval
+
+    @expose_api
+    def load( self, trans, payload=None, **kwd ):
+        """
+        * POST /api/libraries/datasets
+        Load dataset from the given source into the library.
+        Source can be:
+            user directory - root folder specified in galaxy.ini as "$user_library_import_dir"
+                example path: path/to/galaxy/$user_library_import_dir/user at example.com/{user can browse everything here}
+                the folder with the user login has to be created beforehand
+            (admin)import directory - root folder specified in galaxy ini as "$library_import_dir"
+                example path: path/to/galaxy/$library_import_dir/{admin can browse everything here}
+            (admin)any absolute or relative path - option allowed with "allow_library_path_paste" in galaxy.ini
+
+        :param   payload: dictionary structure containing:
+            :param  encoded_folder_id:      the encoded id of the folder to import dataset(s) to
+            :type   encoded_folder_id:      an encoded id string
+            :param  source:                 source the datasets should be loaded from
+            :type   source:                 str
+            :param  link_data:              flag whether to link the dataset to data or copy it to Galaxy, defaults to copy
+                                            while linking is set to True all symlinks will be resolved _once_
+            :type   link_data:              bool
+            :param  preserve_dirs:          flag whether to preserve the directory structure when importing dir
+                                            if False only datasets will be imported
+            :type   preserve_dirs:          bool
+            :param  file_type:              file type of the loaded datasets, defaults to 'auto' (autodetect)
+            :type   file_type:              str
+            :param  dbkey:                  dbkey of the loaded genome, defaults to '?' (unknown)
+            :type   dbkey:                  str
+        :type   dictionary
+        :returns:   dict containing information about the created upload job
+        :rtype:     dictionary
+        :raises: RequestParameterMissingException, AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException
+                    InsufficientPermissionsException, ObjectNotFound
+        """
+        if payload:
+            kwd.update(payload)
+        kwd['space_to_tab'] = False
+        kwd['to_posix_lines'] = True
+        kwd[ 'dbkey' ] = kwd.get( 'dbkey', '?' )
+        kwd[ 'file_type' ] = kwd.get( 'file_type', 'auto' )
+        kwd['link_data_only'] = 'link_to_files' if util.string_as_bool( kwd.get( 'link_data', False ) ) else 'copy_files'
+        encoded_folder_id = kwd.get( 'encoded_folder_id', None )
+        if encoded_folder_id is not None:
+            folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id )
+        else:
+            raise exceptions.RequestParameterMissingException( 'The required atribute encoded_folder_id is missing.' )
+        path = kwd.get( 'path', None)
+        if path is None:
+            raise exceptions.RequestParameterMissingException( 'The required atribute path is missing.' )
+        folder = self.folder_manager.get( trans, folder_id )
+
+        source = kwd.get( 'source', None )
+        if source not in [ 'userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path' ]:
+            raise exceptions.RequestParameterMissingException( 'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ')
+        if source in [ 'importdir_file', 'importdir_folder' ]:
+            if not trans.user_is_admin:
+                raise exceptions.AdminRequiredException( 'Only admins can import from importdir.' )
+            if not trans.app.config.library_import_dir:
+                raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from importdir.' )
+            import_base_dir = trans.app.config.library_import_dir
+            path = os.path.join( import_base_dir, path )
+        if source in [ 'userdir_file', 'userdir_folder' ]:
+            user_login = trans.user.email
+            user_base_dir = trans.app.config.user_library_import_dir
+            if user_base_dir is None:
+                raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from user directories.' )
+            full_dir = os.path.join( user_base_dir, user_login )
+            if not path.lower().startswith( full_dir.lower() ):
+                path = os.path.join( full_dir, path )
+            if not os.path.exists( path ):
+                raise exceptions.RequestParameterInvalidException( 'Given path does not exist on the host.' )
+            if not self.folder_manager.can_add_item( trans, folder ):
+                raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to add items to the given folder.' )
+        if source == 'admin_path':
+            if not trans.app.config.allow_library_path_paste:
+                raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to import into library from path.' )
+            if not trans.user_is_admin:
+                raise exceptions.AdminRequiredException( 'Only admins can import from path.' )
+
+        # Set up the traditional tool state/params
+        tool_id = 'upload1'
+        tool = trans.app.toolbox.get_tool( tool_id )
+        state = tool.new_state( trans )
+        tool.populate_state( trans, tool.inputs, kwd, state.inputs )
+        tool_params = state.inputs
+        dataset_upload_inputs = []
+        for input in tool.inputs.itervalues():
+            if input.type == "upload_dataset":
+                dataset_upload_inputs.append( input )
+        library_bunch = upload_common.handle_library_params( trans, {}, trans.security.encode_id( folder.id ) )
+        abspath_datasets = []
+        kwd[ 'filesystem_paths' ] = path
+        if source in [ 'importdir_folder' ]:
+            kwd[ 'filesystem_paths' ] = os.path.join( import_base_dir, path )
+        # user wants to import one file only
+        if source in [ "userdir_file", "importdir_file" ]:
+            file = os.path.abspath( path )
+            abspath_datasets.append( trans.webapp.controllers[ 'library_common' ].make_library_uploaded_dataset(
+                trans, 'api', kwd, os.path.basename( file ), file, 'server_dir', library_bunch ) )
+        # user wants to import whole folder
+        if source == "userdir_folder":
+            uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common' ].get_path_paste_uploaded_datasets(
+                trans, 'api', kwd, library_bunch, 200, '' )
+            uploaded_datasets = uploaded_datasets_bunch[ 0 ]
+            if uploaded_datasets is None:
+                raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.' )
+            for ud in uploaded_datasets:
+                ud.path = os.path.abspath( ud.path )
+                abspath_datasets.append( ud )
+        #  user wants to import from path
+        if source in [ "admin_path", "importdir_folder" ]:
+            # validate the path is within root
+            uploaded_datasets_bunch = trans.webapp.controllers[ 'library_common' ].get_path_paste_uploaded_datasets(
+                trans, 'api', kwd, library_bunch, 200, '' )
+            uploaded_datasets = uploaded_datasets_bunch[0]
+            if uploaded_datasets is None:
+                raise exceptions.ObjectNotFound( 'Given folder does not contain any datasets.' )
+            for ud in uploaded_datasets:
+                ud.path = os.path.abspath( ud.path )
+                abspath_datasets.append( ud )
+        json_file_path = upload_common.create_paramfile( trans, abspath_datasets )
+        data_list = [ ud.data for ud in abspath_datasets ]
+        job_params = {}
+        job_params['link_data_only'] = dumps( kwd.get( 'link_data_only', 'copy_files' ) )
+        job_params['uuid'] = dumps( kwd.get( 'uuid', None ) )
+        job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=folder, job_params=job_params )
+        trans.sa_session.add( job )
+        trans.sa_session.flush()
+        job_dict = job.to_dict()
+        job_dict[ 'id' ] = trans.security.encode_id( job_dict[ 'id' ] )
+        return job_dict
+
+    @web.expose
+    #  TODO convert to expose_api
+    def download( self, trans, format, **kwd ):
+        """
+        download( self, trans, format, **kwd )
+        * GET /api/libraries/datasets/download/{format}
+        * POST /api/libraries/datasets/download/{format}
+            Downloads requested datasets (identified by encoded IDs) in requested format.
+
+        example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828``
+
+        .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed'
+
+        :param  format:      string representing requested archive format
+        :type   format:      string
+        :param  ld_ids[]:      an array of encoded dataset ids
+        :type   ld_ids[]:      an array
+        :param  folder_ids[]:      an array of encoded folder ids
+        :type   folder_ids[]:      an array
+
+        :rtype:   file
+        :returns: either archive with the requested datasets packed inside or a single uncompressed dataset
+
+        :raises: MessageException, ItemDeletionException, ItemAccessibilityException, HTTPBadRequest, OSError, IOError, ObjectNotFound
+        """
+        library_datasets = []
+        datasets_to_download = kwd.get( 'ld_ids%5B%5D', None )
+        if datasets_to_download is None:
+            datasets_to_download = kwd.get( 'ld_ids', None )
+        if datasets_to_download is not None:
+            datasets_to_download = util.listify( datasets_to_download )
+            for dataset_id in datasets_to_download:
+                try:
+                    library_dataset = self.get_library_dataset( trans, id=dataset_id, check_ownership=False, check_accessible=True )
+                    library_datasets.append( library_dataset )
+                except HTTPBadRequest:
+                    raise exceptions.RequestParameterInvalidException( 'Bad Request.' )
+                except HTTPInternalServerError:
+                    raise exceptions.InternalServerError( 'Internal error.' )
+                except Exception as e:
+                    raise exceptions.InternalServerError( 'Unknown error.' + str(e) )
+
+        folders_to_download = kwd.get( 'folder_ids%5B%5D', None )
+        if folders_to_download is None:
+            folders_to_download = kwd.get( 'folder_ids', None )
+        if folders_to_download is not None:
+            folders_to_download = util.listify( folders_to_download )
+
+            current_user_roles = trans.get_current_user_roles()
+
+            def traverse( folder ):
+                admin = trans.user_is_admin()
+                rval = []
+                for subfolder in folder.active_folders:
+                    if not admin:
+                        can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
+                    if (admin or can_access) and not subfolder.deleted:
+                        rval.extend( traverse( subfolder ) )
+                for ld in folder.datasets:
+                    if not admin:
+                        can_access = trans.app.security_agent.can_access_dataset(
+                            current_user_roles,
+                            ld.library_dataset_dataset_association.dataset
+                        )
+                    if (admin or can_access) and not ld.deleted:
+                        rval.append( ld )
+                return rval
+
+            for encoded_folder_id in folders_to_download:
+                folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id )
+                folder = self.folder_manager.get( trans, folder_id )
+                library_datasets.extend( traverse( folder ) )
+
+        if not library_datasets:
+            raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids or folder ids to download.' )
+
+        if format in [ 'zip', 'tgz', 'tbz' ]:
+            # error = False
+            killme = string.punctuation + string.whitespace
+            trantab = string.maketrans( killme, '_' * len( killme ) )
+            try:
+                outext = 'zip'
+                if format == 'zip':
+                    # Can't use mkstemp - the file must not exist first
+                    tmpd = tempfile.mkdtemp()
+                    util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
+                    tmpf = os.path.join( tmpd, 'library_download.' + format )
+                    if trans.app.config.upstream_gzip:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+                    else:
+                        archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                    archive.add = lambda x, y: archive.write( x, y.encode( 'CP437' ) )
+                elif format == 'tgz':
+                    if trans.app.config.upstream_gzip:
+                        archive = StreamBall( 'w|' )
+                        outext = 'tar'
+                    else:
+                        archive = StreamBall( 'w|gz' )
+                        outext = 'tgz'
+                elif format == 'tbz':
+                    archive = StreamBall( 'w|bz2' )
+                    outext = 'tbz2'
+            except ( OSError, zipfile.BadZipfile ):
+                log.exception( "Unable to create archive for download" )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            except Exception:
+                log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[ 0 ] )
+                raise exceptions.InternalServerError( "Unable to create archive for download." )
+            composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
+            seen = []
+            for ld in library_datasets:
+                ldda = ld.library_dataset_dataset_association
+                ext = ldda.extension
+                is_composite = ext in composite_extensions
+                path = ""
+                parent_folder = ldda.library_dataset.folder
+                while parent_folder is not None:
+                    # Exclude the now-hidden "root folder"
+                    if parent_folder.parent is None:
+                        path = os.path.join( parent_folder.library_root[ 0 ].name, path )
+                        break
+                    path = os.path.join( parent_folder.name, path )
+                    parent_folder = parent_folder.parent
+                path += ldda.name
+                while path in seen:
+                    path += '_'
+                seen.append( path )
+                zpath = os.path.split(path)[ -1 ]  # comes as base_name/fname
+                outfname, zpathext = os.path.splitext( zpath )
+
+                if is_composite:  # need to add all the components from the extra_files_path to the zip
+                    if zpathext == '':
+                        zpath = '%s.html' % zpath  # fake the real nature of the html file
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, zpath )  # add the primary of a composite set
+                        else:
+                            archive.add( ldda.dataset.file_name, zpath, check_file=True )  # add the primary of a composite set
+                    except IOError:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download." )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found. " )
+                    except Exception as e:
+                        log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str( e ) )
+
+                    flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
+                    for fpath in flist:
+                        efp, fname = os.path.split(fpath)
+                        if fname > '':
+                            fname = fname.translate(trantab)
+                        try:
+                            if format == 'zip':
+                                archive.add( fpath, fname )
+                            else:
+                                archive.add( fpath, fname, check_file=True )
+                        except IOError:
+                            log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname) )
+                            raise exceptions.InternalServerError( "Unable to create archive for download." )
+                        except ObjectNotFound:
+                            log.exception( "Requested dataset %s does not exist on the host." % fpath )
+                            raise exceptions.ObjectNotFound( "Requested dataset not found." )
+                        except Exception as e:
+                            log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
+                            raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str( e ) )
+
+                else:  # simple case
+                    try:
+                        if format == 'zip':
+                            archive.add( ldda.dataset.file_name, path )
+                        else:
+                            archive.add( ldda.dataset.file_name, path, check_file=True )
+                    except IOError:
+                        log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name )
+                        raise exceptions.InternalServerError( "Unable to create archive for download" )
+                    except ObjectNotFound:
+                        log.exception( "Requested dataset %s does not exist on the host." % ldda.dataset.file_name )
+                        raise exceptions.ObjectNotFound( "Requested dataset not found." )
+                    except Exception as e:
+                        log.exception( "Unable to add %s to temporary library download archive %s" % ( fname, outfname ) )
+                        raise exceptions.InternalServerError( "Unknown error. " + str( e ) )
+            lname = 'selected_dataset'
+            fname = lname.replace( ' ', '_' ) + '_files'
+            if format == 'zip':
+                archive.close()
+                trans.response.set_content_type( "application/octet-stream" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive = util.streamball.ZipBall( tmpf, tmpd )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
+            else:
+                trans.response.set_content_type( "application/x-tar" )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % ( fname, outext )
+                archive.wsgi_status = trans.response.wsgi_status()
+                archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                return archive.stream
+        elif format == 'uncompressed':
+            if len(library_datasets) != 1:
+                raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once." )
+            else:
+                single_ld = library_datasets[ 0 ]
+                ldda = single_ld.library_dataset_dataset_association
+                dataset = ldda.dataset
+                fStat = os.stat( dataset.file_name )
+                trans.response.set_content_type( ldda.get_mime() )
+                trans.response.headers[ 'Content-Length' ] = int( fStat.st_size )
+                fname = ldda.name
+                fname = ''.join( c in util.FILENAME_VALID_CHARS and c or '_' for c in fname )[ 0:150 ]
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s"' % fname
+                try:
+                    return open( dataset.file_name )
+                except:
+                    raise exceptions.InternalServerError( "This dataset contains no content." )
+        else:
+            raise exceptions.RequestParameterInvalidException( "Wrong format parameter specified" )
+
+    def _build_path( self, trans, folder ):
+        """
+        Search the path upwards recursively and load the whole route of
+        names and ids for breadcrumb building purposes.
+
+        :param folder: current folder for navigating up
+        :param type:   Galaxy LibraryFolder
+
+        :returns:   list consisting of full path to the library
+        :type:      list
+        """
+        path_to_root = []
+        # We are almost in root
+        if folder.parent_id is None:
+            path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+        else:
+            # We add the current folder and traverse up one folder.
+            path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+            upper_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder.parent_id )
+            path_to_root.extend( self._build_path( trans, upper_folder ) )
+        return path_to_root
+
+    def __decode_id( self, trans, encoded_id, object_name=None ):
+        """
+        Try to decode the id.
+
+        :param  object_name:      Name of the object the id belongs to. (optional)
+        :type   object_name:      str
+        """
+        try:
+            return trans.security.decode_id( encoded_id )
+        except TypeError:
+            raise exceptions.MalformedId( 'Malformed %s id specified, unable to decode.' % object_name if object_name is not None else '' )
+        except ValueError:
+            raise exceptions.MalformedId( 'Wrong %s id specified, unable to decode.' % object_name if object_name is not None else '' )
diff --git a/lib/galaxy/webapps/galaxy/api/libraries.py b/lib/galaxy/webapps/galaxy/api/libraries.py
new file mode 100644
index 0000000..90316c0
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/libraries.py
@@ -0,0 +1,377 @@
+"""
+API operations on a data library.
+"""
+from galaxy import util
+from galaxy import exceptions
+from galaxy.managers import libraries, folders, roles
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web.base.controller import BaseAPIController
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class LibrariesController( BaseAPIController ):
+
+    def __init__( self, app ):
+        super( LibrariesController, self ).__init__( app )
+        self.folder_manager = folders.FolderManager()
+        self.library_manager = libraries.LibraryManager()
+        self.role_manager = roles.RoleManager( app )
+
+    @expose_api_anonymous
+    def index( self, trans, **kwd ):
+        """
+        index( self, trans, **kwd )
+        * GET /api/libraries:
+            Returns a list of summary data for all libraries.
+
+        :param  deleted: if True, show only ``deleted`` libraries, if False show only ``non-deleted``
+        :type   deleted: boolean (optional)
+
+        :returns:   list of dictionaries containing library information
+        :rtype:     list
+
+        .. seealso:: :attr:`galaxy.model.Library.dict_collection_visible_keys`
+
+        """
+        deleted = util.string_as_bool_or_none( kwd.get( 'deleted', None ) )
+        query = self.library_manager.list( trans, deleted )
+        libraries = []
+        for library in query:
+            libraries.append( self.library_manager.get_library_dict( trans, library ) )
+        return libraries
+
+    def __decode_id( self, trans, encoded_id, object_name=None ):
+        """
+        Try to decode the id.
+
+        :param  object_name:      Name of the object the id belongs to. (optional)
+        :type   object_name:      str
+        """
+        try:
+            return trans.security.decode_id( encoded_id )
+        except TypeError:
+            raise exceptions.MalformedId( 'Malformed %s id specified, unable to decode.' % object_name if object_name is not None else '' )
+        except ValueError:
+            raise exceptions.MalformedId( 'Wrong %s id specified, unable to decode.' % object_name if object_name is not None else '' )
+
+    @expose_api_anonymous
+    def show( self, trans, id, deleted='False', **kwd ):
+        """
+        show( self, trans, id, deleted='False', **kwd )
+        * GET /api/libraries/{encoded_id}:
+            returns detailed information about a library
+        * GET /api/libraries/deleted/{encoded_id}:
+            returns detailed information about a ``deleted`` library
+
+        :param  id:      the encoded id of the library
+        :type   id:      an encoded id string
+        :param  deleted: if True, allow information on a ``deleted`` library
+        :type   deleted: boolean
+
+        :returns:   detailed library information
+        :rtype:     dictionary
+
+        .. seealso:: :attr:`galaxy.model.Library.dict_element_visible_keys`
+
+        :raises: MalformedId, ObjectNotFound
+        """
+        library = self.library_manager.get( trans, self.__decode_id( trans, id, 'library' ) )
+        library_dict = self.library_manager.get_library_dict( trans, library )
+        return library_dict
+
+    @expose_api
+    def create( self, trans, payload=None, **kwd ):
+        """
+        * POST /api/libraries:
+            Creates a new library.
+
+        .. note:: Currently, only admin users can create libraries.
+
+        :param  payload: dictionary structure containing::
+            :param name:         (required) the new library's name
+            :type  name:         str
+            :param description:  the new library's description
+            :type  description:  str
+            :param synopsis:     the new library's synopsis
+            :type  synopsis:     str
+        :type   payload: dict
+        :returns:   detailed library information
+        :rtype:     dict
+        :raises: RequestParameterMissingException
+        """
+        if payload:
+            kwd.update(payload)
+        name = kwd.get('name', None)
+        if not name:
+            raise exceptions.RequestParameterMissingException( "Missing required parameter 'name'." )
+        description = kwd.get( 'description', '' )
+        synopsis = kwd.get( 'synopsis', '' )
+        if synopsis in [ 'None', None ]:
+            synopsis = ''
+        library = self.library_manager.create( trans, name, description, synopsis )
+        library_dict = self.library_manager.get_library_dict( trans, library )
+        return library_dict
+
+    @expose_api
+    def update( self, trans, id, payload=None, **kwd ):
+        """
+        * PATCH /api/libraries/{encoded_id}
+           Updates the library defined by an ``encoded_id`` with the data in the payload.
+
+       .. note:: Currently, only admin users can update libraries. Also the library must not be `deleted`.
+
+        :param  id:      the encoded id of the library
+        :type   id:      an encoded id string
+        :param  payload: dictionary structure containing::
+            :param name:         new library's name, cannot be empty
+            :type  name:         str
+            :param description:  new library's description
+            :type  description:  str
+            :param synopsis:     new library's synopsis
+            :type  synopsis:     str
+        :type   payload: dict
+        :returns:   detailed library information
+        :rtype:     dict
+        :raises: RequestParameterMissingException
+        """
+        library = self.library_manager.get( trans, self.__decode_id( trans, id, 'library'  ) )
+        if payload:
+            kwd.update(payload)
+        name = kwd.get( 'name', None )
+        if name == '':
+            raise exceptions.RequestParameterMissingException( "Parameter 'name' of library is required. You cannot remove it." )
+        description = kwd.get( 'description', None )
+        synopsis = kwd.get( 'synopsis', None )
+        updated_library = self.library_manager.update( trans, library, name, description, synopsis )
+        library_dict = self.library_manager.get_library_dict( trans, updated_library )
+        return library_dict
+
+    @expose_api
+    def delete( self, trans, id, payload=None, **kwd ):
+        """
+        * DELETE /api/libraries/{id}
+            marks the library with the given ``id`` as `deleted` (or removes the `deleted` mark if the `undelete` param is true)
+
+        .. note:: Currently, only admin users can un/delete libraries.
+
+        :param  id:     the encoded id of the library to un/delete
+        :type   id:     an encoded id string
+
+        :param   payload: dictionary structure containing:
+            :param  undelete:    (optional) flag specifying whether the item should be deleted or undeleted, defaults to false:
+            :type   undelete:    bool
+        :type:     dictionary
+        :returns:   detailed library information
+        :rtype:     dictionary
+
+        .. seealso:: :attr:`galaxy.model.Library.dict_element_visible_keys`
+        """
+        if payload:
+            kwd.update(payload)
+        library = self.library_manager.get( trans, self.__decode_id( trans, id, 'library' ))
+        undelete = util.string_as_bool( kwd.get( 'undelete', False ) )
+        library = self.library_manager.delete( trans, library, undelete )
+        library_dict = self.library_manager.get_library_dict( trans, library )
+        return library_dict
+
+    @expose_api
+    def get_permissions( self, trans, encoded_library_id, **kwd ):
+        """
+        * GET /api/libraries/{id}/permissions
+
+        Load all permissions for the given library id and return it.
+
+        :param  encoded_library_id:     the encoded id of the library
+        :type   encoded_library_id:     an encoded id string
+
+        :param  scope:      either 'current' or 'available'
+        :type   scope:      string
+
+        :param  is_library_access:      indicates whether the roles available for the library access are requested
+        :type   is_library_access:      bool
+
+        :returns:   dictionary with all applicable permissions' values
+        :rtype:     dictionary
+
+        :raises: InsufficientPermissionsException
+        """
+        current_user_roles = trans.get_current_user_roles()
+        is_admin = trans.user_is_admin()
+        library = self.library_manager.get( trans, self.__decode_id( trans, encoded_library_id, 'library' ) )
+        if not ( is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, library ) ):
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to access permissions of this library.' )
+
+        scope = kwd.get( 'scope', None )
+        is_library_access = util.string_as_bool( kwd.get( 'is_library_access', False ) )
+
+        if scope == 'current' or scope is None:
+            roles = self.library_manager.get_current_roles( trans, library )
+            return roles
+
+        #  Return roles that are available to select.
+        elif scope == 'available':
+            page = kwd.get( 'page', None )
+            if page is not None:
+                page = int( page )
+            else:
+                page = 1
+
+            page_limit = kwd.get( 'page_limit', None )
+            if page_limit is not None:
+                page_limit = int( page_limit )
+            else:
+                page_limit = 10
+
+            query = kwd.get( 'q', None )
+
+            roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library, query, page, page_limit, is_library_access )
+
+            return_roles = []
+            for role in roles:
+                role_id = trans.security.encode_id( role.id )
+                return_roles.append( dict( id=role_id, name=role.name, type=role.type ) )
+            return dict( roles=return_roles, page=page, page_limit=page_limit, total=total_roles )
+        else:
+            raise exceptions.RequestParameterInvalidException( "The value of 'scope' parameter is invalid. Alllowed values: current, available" )
+
+    @expose_api
+    def set_permissions( self, trans, encoded_library_id, payload=None, **kwd ):
+        """
+        *POST /api/libraries/{encoded_library_id}/permissions
+            Set permissions of the given library to the given role ids.
+
+        :param  encoded_library_id:      the encoded id of the library to set the permissions of
+        :type   encoded_library_id:      an encoded id string
+        :param   payload: dictionary structure containing:
+            :param  action:            (required) describes what action should be performed
+                                       available actions: remove_restrictions, set_permissions
+            :type   action:            str
+            :param  access_ids[]:      list of Role.id defining roles that should have access permission on the library
+            :type   access_ids[]:      string or list
+            :param  add_ids[]:         list of Role.id defining roles that should have add item permission on the library
+            :type   add_ids[]:         string or list
+            :param  manage_ids[]:      list of Role.id defining roles that should have manage permission on the library
+            :type   manage_ids[]:      string or list
+            :param  modify_ids[]:      list of Role.id defining roles that should have modify permission on the library
+            :type   modify_ids[]:      string or list
+        :type:      dictionary
+        :returns:   dict of current roles for all available permission types
+        :rtype:     dictionary
+        :raises: RequestParameterInvalidException, InsufficientPermissionsException, InternalServerError
+                    RequestParameterMissingException
+        """
+        if payload:
+            kwd.update(payload)
+        is_admin = trans.user_is_admin()
+        current_user_roles = trans.get_current_user_roles()
+        library = self.library_manager.get( trans, self.__decode_id( trans, encoded_library_id, 'library' ) )
+
+        if not ( is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, library ) ):
+            raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to modify permissions of this library.' )
+
+        new_access_roles_ids = util.listify( kwd.get( 'access_ids[]', None ) )
+        new_add_roles_ids = util.listify( kwd.get( 'add_ids[]', None ) )
+        new_manage_roles_ids = util.listify( kwd.get( 'manage_ids[]', None ) )
+        new_modify_roles_ids = util.listify( kwd.get( 'modify_ids[]', None ) )
+
+        action = kwd.get( 'action', None )
+        if action is None:
+            if payload is not None:
+                return self.set_permissions_old( trans, library, payload, **kwd )
+            else:
+                raise exceptions.RequestParameterMissingException( 'The mandatory parameter "action" is missing.' )
+        elif action == 'remove_restrictions':
+            is_public = self.library_manager.make_public( trans, library )
+            if not is_public:
+                raise exceptions.InternalServerError( 'An error occured while making library public.' )
+        elif action == 'set_permissions':
+
+            # ACCESS LIBRARY ROLES
+            valid_access_roles = []
+            invalid_access_roles_names = []
+            for role_id in new_access_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library, is_library_access=True )
+                if role in valid_roles:
+                    valid_access_roles.append( role )
+                else:
+                    invalid_access_roles_names.append( role_id )
+            if len( invalid_access_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the library access permission: " + str( invalid_access_roles_names ) )
+
+            # ADD TO LIBRARY ROLES
+            valid_add_roles = []
+            invalid_add_roles_names = []
+            for role_id in new_add_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library )
+                if role in valid_roles:
+                    valid_add_roles.append( role )
+                else:
+                    invalid_add_roles_names.append( role_id )
+            if len( invalid_add_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the add library item permission: " + str( invalid_add_roles_names ) )
+
+            # MANAGE LIBRARY ROLES
+            valid_manage_roles = []
+            invalid_manage_roles_names = []
+            for role_id in new_manage_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library )
+                if role in valid_roles:
+                    valid_manage_roles.append( role )
+                else:
+                    invalid_manage_roles_names.append( role_id )
+            if len( invalid_manage_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the manage library permission: " + str( invalid_manage_roles_names ) )
+
+            # MODIFY LIBRARY ROLES
+            valid_modify_roles = []
+            invalid_modify_roles_names = []
+            for role_id in new_modify_roles_ids:
+                role = self.role_manager.get( trans, self.__decode_id( trans, role_id, 'role' ) )
+                valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library )
+                if role in valid_roles:
+                    valid_modify_roles.append( role )
+                else:
+                    invalid_modify_roles_names.append( role_id )
+            if len( invalid_modify_roles_names ) > 0:
+                log.warning( "The following roles could not be added to the modify library permission: " + str( invalid_modify_roles_names ) )
+
+            permissions = { trans.app.security_agent.permitted_actions.LIBRARY_ACCESS: valid_access_roles }
+            permissions.update( { trans.app.security_agent.permitted_actions.LIBRARY_ADD: valid_add_roles } )
+            permissions.update( { trans.app.security_agent.permitted_actions.LIBRARY_MANAGE: valid_manage_roles } )
+            permissions.update( { trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles } )
+
+            trans.app.security_agent.set_all_library_permissions( trans, library, permissions )
+            trans.sa_session.refresh( library )
+            # Copy the permissions to the root folder
+            trans.app.security_agent.copy_library_permissions( trans, library, library.root_folder )
+        else:
+            raise exceptions.RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value.'
+                                                               'Allowed values are: "remove_restrictions", set_permissions"' )
+        roles = self.library_manager.get_current_roles( trans, library )
+        return roles
+
+    def set_permissions_old( self, trans, library, payload, **kwd ):
+        """
+        *** old implementation for backward compatibility ***
+
+        POST /api/libraries/{encoded_library_id}/permissions
+        Updates the library permissions.
+        """
+        params = util.Params( payload )
+        permissions = {}
+        for k, v in trans.app.model.Library.permitted_actions.items():
+            role_params = params.get( k + '_in', [] )
+            in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( trans.security.decode_id( x ) ) for x in util.listify( role_params ) ]
+            permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+        trans.app.security_agent.set_all_library_permissions( trans, library, permissions )
+        trans.sa_session.refresh( library )
+        # Copy the permissions to the root folder
+        trans.app.security_agent.copy_library_permissions( trans, library, library.root_folder )
+        item = library.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'root_folder_id': trans.security.encode_id } )
+        return item
diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py
new file mode 100644
index 0000000..bc58d2e
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/library_contents.py
@@ -0,0 +1,444 @@
+"""
+API operations on the contents of a data library.
+"""
+from galaxy import util
+from galaxy import web
+from galaxy import exceptions
+from galaxy import managers
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web.base.controller import BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems
+from galaxy.web.base.controller import HTTPBadRequest, url_for
+from galaxy.managers.collections_util import api_payload_to_create_params, dictify_dataset_collection_instance
+from galaxy.model import ExtendedMetadata, ExtendedMetadataIndex
+from sqlalchemy.orm.exc import MultipleResultsFound
+from sqlalchemy.orm.exc import NoResultFound
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class LibraryContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
+
+    def __init__( self, app ):
+        super( LibraryContentsController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    @expose_api
+    def index( self, trans, library_id, **kwd ):
+        """
+        index( self, trans, library_id, **kwd )
+        * GET /api/libraries/{library_id}/contents:
+            Returns a list of library files and folders.
+
+        .. note:: May be slow! Returns all content traversing recursively through all folders.
+        .. seealso:: :class:`galaxy.webapps.galaxy.api.FolderContentsController.index` for a non-recursive solution
+
+        :param  library_id: the encoded id of the library
+        :type   library_id: str
+
+        :returns:   list of dictionaries of the form:
+            * id:   the encoded id of the library item
+            * name: the 'library path'
+                or relationship of the library item to the root
+            * type: 'file' or 'folder'
+            * url:  the url to get detailed information on the library item
+        :rtype:     list
+
+        :raises:  MalformedId, InconsistentDatabase, RequestParameterInvalidException, InternalServerError
+        """
+        rval = []
+        current_user_roles = trans.get_current_user_roles()
+
+        def traverse( folder ):
+            admin = trans.user_is_admin()
+            rval = []
+            for subfolder in folder.active_folders:
+                if not admin:
+                    can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder )
+                if (admin or can_access) and not subfolder.deleted:
+                    subfolder.api_path = folder.api_path + '/' + subfolder.name
+                    subfolder.api_type = 'folder'
+                    rval.append( subfolder )
+                    rval.extend( traverse( subfolder ) )
+            for ld in folder.datasets:
+                if not admin:
+                    can_access = trans.app.security_agent.can_access_dataset(
+                        current_user_roles,
+                        ld.library_dataset_dataset_association.dataset
+                    )
+                if (admin or can_access) and not ld.deleted:
+                    ld.api_path = folder.api_path + '/' + ld.name
+                    ld.api_type = 'file'
+                    rval.append( ld )
+            return rval
+        try:
+            decoded_library_id = self.decode_id( library_id )
+        except Exception:
+            raise exceptions.MalformedId( 'Malformed library id ( %s ) specified, unable to decode.' % library_id )
+        try:
+            library = trans.sa_session.query( trans.app.model.Library ).filter( trans.app.model.Library.table.c.id == decoded_library_id ).one()
+        except MultipleResultsFound:
+            raise exceptions.InconsistentDatabase( 'Multiple libraries found with the same id.' )
+        except NoResultFound:
+            raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.' )
+        except Exception as e:
+            raise exceptions.InternalServerError( 'Error loading from the database.' + str(e))
+        if not ( trans.user_is_admin() or trans.app.security_agent.can_access_library( current_user_roles, library ) ):
+            raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.' )
+        encoded_id = 'F' + trans.security.encode_id( library.root_folder.id )
+        # appending root folder
+        rval.append( dict( id=encoded_id,
+                           type='folder',
+                           name='/',
+                           url=url_for( 'library_content', library_id=library_id, id=encoded_id ) ) )
+        library.root_folder.api_path = ''
+        # appending all other items in the library recursively
+        for content in traverse( library.root_folder ):
+            encoded_id = trans.security.encode_id( content.id )
+            if content.api_type == 'folder':
+                encoded_id = 'F' + encoded_id
+            rval.append( dict( id=encoded_id,
+                               type=content.api_type,
+                               name=content.api_path,
+                               url=url_for( 'library_content', library_id=library_id, id=encoded_id, ) ) )
+        return rval
+
+    @expose_api
+    def show( self, trans, id, library_id, **kwd ):
+        """
+        show( self, trans, id, library_id, **kwd )
+        * GET /api/libraries/{library_id}/contents/{id}
+            Returns information about library file or folder.
+
+        :param  id:         the encoded id of the library item to return
+        :type   id:         str
+
+        :param  library_id: the encoded id of the library that contains this item
+        :type   library_id: str
+
+        :returns:   detailed library item information
+        :rtype:     dict
+
+        .. seealso::
+            :func:`galaxy.model.LibraryDataset.to_dict` and
+            :attr:`galaxy.model.LibraryFolder.dict_element_visible_keys`
+        """
+        class_name, content_id = self.__decode_library_content_id( id )
+        if class_name == 'LibraryFolder':
+            content = self.get_library_folder( trans, content_id, check_ownership=False, check_accessible=True )
+            rval = content.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id } )
+            rval[ 'id' ] = 'F' + str( rval[ 'id' ] )
+            if rval[ 'parent_id' ] is not None:  # This can happen for root folders.
+                rval[ 'parent_id' ] = 'F' + str( trans.security.encode_id( rval[ 'parent_id' ] ) )
+            rval[ 'parent_library_id' ] = trans.security.encode_id( rval[ 'parent_library_id' ] )
+        else:
+            content = self.get_library_dataset( trans, content_id, check_ownership=False, check_accessible=True )
+            rval = content.to_dict( view='element')
+            rval[ 'id' ] = trans.security.encode_id( rval[ 'id' ] )
+            rval[ 'ldda_id' ] = trans.security.encode_id( rval[ 'ldda_id' ] )
+            rval[ 'folder_id' ] = 'F' + str( trans.security.encode_id( rval[ 'folder_id' ] ) )
+            rval[ 'parent_library_id' ] = trans.security.encode_id( rval[ 'parent_library_id' ] )
+        return rval
+
+    @web.expose_api
+    def create( self, trans, library_id, payload, **kwd ):
+        """
+        create( self, trans, library_id, payload, **kwd )
+        * POST /api/libraries/{library_id}/contents:
+            create a new library file or folder
+
+        To copy an HDA into a library send ``create_type`` of 'file' and
+        the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``).
+
+        :type   library_id: str
+        :param  library_id: the encoded id of the library where to create the new item
+        :type   payload:    dict
+        :param  payload:    dictionary structure containing:
+
+            * folder_id:    the encoded id of the parent folder of the new item
+            * create_type:  the type of item to create ('file', 'folder' or 'collection')
+            * from_hda_id:  (optional, only if create_type is 'file') the
+                encoded id of an accessible HDA to copy into the library
+            * ldda_message: (optional) the new message attribute of the LDDA created
+            * extended_metadata: (optional) dub-dictionary containing any extended
+                metadata to associate with the item
+            * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths'
+            * server_dir: (optional, only if upload_option is
+                'upload_directory') relative path of the subdirectory of Galaxy
+                ``library_import_dir`` to upload. All and only the files (i.e.
+                no subdirectories) contained in the specified directory will be
+                uploaded.
+            * filesystem_paths: (optional, only if upload_option is
+                'upload_paths' and the user is an admin) file paths on the
+                Galaxy server to upload to the library, one file per line
+            * link_data_only: (optional, only when upload_option is
+                'upload_directory' or 'upload_paths') either 'copy_files'
+                (default) or 'link_to_files'. Setting to 'link_to_files'
+                symlinks instead of copying the files
+            * name: (optional, only if create_type is 'folder') name of the
+                folder to create
+            * description: (optional, only if create_type is 'folder')
+                description of the folder to create
+
+        :rtype:     dict
+        :returns:   a dictionary containing the id, name,
+            and 'show' url of the new item
+        """
+        if 'create_type' not in payload:
+            trans.response.status = 400
+            return "Missing required 'create_type' parameter."
+        else:
+            create_type = payload.pop( 'create_type' )
+        if create_type not in ( 'file', 'folder', 'collection' ):
+            trans.response.status = 400
+            return "Invalid value for 'create_type' parameter ( %s ) specified." % create_type
+
+        if 'folder_id' not in payload:
+            trans.response.status = 400
+            return "Missing required 'folder_id' parameter."
+        else:
+            folder_id = payload.pop( 'folder_id' )
+            class_name, folder_id = self.__decode_library_content_id( folder_id )
+        try:
+            # security is checked in the downstream controller
+            parent = self.get_library_folder( trans, folder_id, check_ownership=False, check_accessible=False )
+        except Exception as e:
+            return str( e )
+        # The rest of the security happens in the library_common controller.
+        real_folder_id = trans.security.encode_id( parent.id )
+
+        # are we copying an HDA to the library folder?
+        #   we'll need the id and any message to attach, then branch to that private function
+        from_hda_id, ldda_message = ( payload.pop( 'from_hda_id', None ), payload.pop( 'ldda_message', '' ) )
+        if create_type == 'file' and from_hda_id:
+            return self._copy_hda_to_library_folder( trans, from_hda_id, library_id, real_folder_id, ldda_message )
+
+        # check for extended metadata, store it and pop it out of the param
+        # otherwise sanitize_param will have a fit
+        ex_meta_payload = payload.pop('extended_metadata', None)
+
+        # Now create the desired content object, either file or folder.
+        if create_type == 'file':
+            status, output = trans.webapp.controllers['library_common'].upload_library_dataset( trans, 'api', library_id, real_folder_id, **payload )
+        elif create_type == 'folder':
+            status, output = trans.webapp.controllers['library_common'].create_folder( trans, 'api', real_folder_id, library_id, **payload )
+        elif create_type == 'collection':
+            # Not delegating to library_common, so need to check access to parent
+            # folder here.
+            self.check_user_can_add_to_library_item( trans, parent, check_accessible=True )
+            create_params = api_payload_to_create_params( payload )
+            create_params[ 'parent' ] = parent
+            service = trans.app.dataset_collections_service
+            dataset_collection_instance = service.create( **create_params )
+            return [ dictify_dataset_collection_instance( dataset_collection_instance, security=trans.security, parent=parent ) ]
+        if status != 200:
+            trans.response.status = status
+            return output
+        else:
+            rval = []
+            for v in output.values():
+                if ex_meta_payload is not None:
+                    # If there is extended metadata, store it, attach it to the dataset, and index it
+                    ex_meta = ExtendedMetadata(ex_meta_payload)
+                    trans.sa_session.add( ex_meta )
+                    v.extended_metadata = ex_meta
+                    trans.sa_session.add(v)
+                    trans.sa_session.flush()
+                    for path, value in self._scan_json_block(ex_meta_payload):
+                        meta_i = ExtendedMetadataIndex(ex_meta, path, value)
+                        trans.sa_session.add(meta_i)
+                    trans.sa_session.flush()
+                if type( v ) == trans.app.model.LibraryDatasetDatasetAssociation:
+                    v = v.library_dataset
+                encoded_id = trans.security.encode_id( v.id )
+                if create_type == 'folder':
+                    encoded_id = 'F' + encoded_id
+                rval.append( dict( id=encoded_id,
+                                   name=v.name,
+                                   url=url_for( 'library_content', library_id=library_id, id=encoded_id ) ) )
+            return rval
+
+    def _scan_json_block(self, meta, prefix=""):
+        """
+        Scan a json style data structure, and emit all fields and their values.
+        Example paths
+
+        Data
+        { "data" : [ 1, 2, 3 ] }
+
+        Path:
+        /data == [1,2,3]
+
+        /data/[0] == 1
+
+        """
+        if isinstance(meta, dict):
+            for a in meta:
+                for path, value in self._scan_json_block(meta[a], prefix + "/" + a):
+                    yield path, value
+        elif isinstance(meta, list):
+            for i, a in enumerate(meta):
+                for path, value in self._scan_json_block(a, prefix + "[%d]" % (i)):
+                    yield path, value
+        else:
+            # BUG: Everything is cast to string, which can lead to false positives
+            # for cross type comparisions, ie "True" == True
+            yield prefix, ("%s" % (meta)).encode("utf8", errors='replace')
+
+    def _copy_hda_to_library_folder( self, trans, from_hda_id, library_id, folder_id, ldda_message='' ):
+        """
+        Copies hda ``from_hda_id`` to library folder ``folder_id``, optionally
+        adding ``ldda_message`` to the new ldda's ``message``.
+
+        ``library_contents.create`` will branch to this if called with 'from_hda_id'
+        in its payload.
+        """
+        log.debug( '_copy_hda_to_library_folder: %s' % ( str(( from_hda_id, library_id, folder_id, ldda_message )) ) )
+        # PRECONDITION: folder_id has already been altered to remove the folder prefix ('F')
+        # TODO: allow name and other, editable ldda attrs?
+        if ldda_message:
+            ldda_message = util.sanitize_html.sanitize_html( ldda_message, 'utf-8' )
+
+        rval = {}
+        try:
+            # check permissions on (all three?) resources: hda, library, folder
+            # TODO: do we really need the library??
+            from_hda_id = self.decode_id( from_hda_id )
+            hda = self.hda_manager.get_owned( from_hda_id, trans.user, current_history=trans.history )
+            hda = self.hda_manager.error_if_uploading( hda )
+            # library = self.get_library( trans, library_id, check_accessible=True )
+            folder = self.get_library_folder( trans, folder_id, check_accessible=True )
+
+            # TOOD: refactor to use check_user_can_add_to_library_item, eliminate boolean
+            # can_current_user_add_to_library_item.
+            if not self.can_current_user_add_to_library_item( trans, folder ):
+                trans.response.status = 403
+                return { 'error': 'user has no permission to add to library folder (%s)' % ( folder_id ) }
+
+            ldda = self.copy_hda_to_library_folder( trans, hda, folder, ldda_message=ldda_message )
+            ldda_dict = ldda.to_dict()
+            rval = trans.security.encode_dict_ids( ldda_dict )
+
+        except Exception as exc:
+            # TODO: grrr...
+            if 'not accessible to the current user' in str( exc ):
+                trans.response.status = 403
+                return { 'error': str( exc ) }
+            else:
+                log.exception( exc )
+                trans.response.status = 500
+                return { 'error': str( exc ) }
+
+        return rval
+
+    @web.expose_api
+    def update( self, trans, id, library_id, payload, **kwd ):
+        """
+        update( self, trans, id, library_id, payload, **kwd )
+        * PUT /api/libraries/{library_id}/contents/{id}
+            create a ImplicitlyConvertedDatasetAssociation
+        .. seealso:: :class:`galaxy.model.ImplicitlyConvertedDatasetAssociation`
+
+        :type   id:         str
+        :param  id:         the encoded id of the library item to return
+        :type   library_id: str
+        :param  library_id: the encoded id of the library that contains this item
+        :type   payload:    dict
+        :param  payload:    dictionary structure containing::
+            'converted_dataset_id':
+
+        :rtype:     None
+        :returns:   None
+        """
+        if 'converted_dataset_id' in payload:
+            converted_id = payload.pop( 'converted_dataset_id' )
+            content = self.get_library_dataset( trans, id, check_ownership=False, check_accessible=False )
+            content_conv = self.get_library_dataset( trans, converted_id, check_ownership=False, check_accessible=False )
+            assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( parent=content.library_dataset_dataset_association,
+                                                                           dataset=content_conv.library_dataset_dataset_association,
+                                                                           file_type=content_conv.library_dataset_dataset_association.extension,
+                                                                           metadata_safe=True )
+            trans.sa_session.add( assoc )
+            trans.sa_session.flush()
+
+    def __decode_library_content_id( self, content_id ):
+        if len( content_id ) % 16 == 0:
+            return 'LibraryDataset', content_id
+        elif content_id.startswith( 'F' ):
+            return 'LibraryFolder', content_id[ 1: ]
+        else:
+            raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) )
+
+    @web.expose_api
+    def delete( self, trans, library_id, id, **kwd ):
+        """
+        delete( self, trans, library_id, id, **kwd )
+        * DELETE /api/libraries/{library_id}/contents/{id}
+            delete the LibraryDataset with the given ``id``
+
+        :type   id:     str
+        :param  id:     the encoded id of the library dataset to delete
+        :type   kwd:    dict
+        :param  kwd:    (optional) dictionary structure containing:
+
+            * payload:     a dictionary itself containing:
+                * purge:   if True, purge the LD
+
+        :rtype:     dict
+        :returns:   an error object if an error occurred or a dictionary containing:
+            * id:         the encoded id of the library dataset,
+            * deleted:    if the library dataset was marked as deleted,
+            * purged:     if the library dataset was purged
+        """
+        # a request body is optional here
+        purge = False
+        if kwd.get( 'payload', None ):
+            purge = util.string_as_bool( kwd['payload'].get( 'purge', False ) )
+
+        rval = { 'id': id }
+        try:
+            ld = self.get_library_dataset( trans, id, check_ownership=False, check_accessible=True )
+            user_is_admin = trans.user_is_admin()
+            can_modify = trans.app.security_agent.can_modify_library_item( trans.user.all_roles(), ld )
+            log.debug( 'is_admin: %s, can_modify: %s', user_is_admin, can_modify )
+            if not ( user_is_admin or can_modify ):
+                trans.response.status = 403
+                rval.update({ 'error': 'Unauthorized to delete or purge this library dataset' })
+                return rval
+
+            ld.deleted = True
+            if purge:
+                ld.purged = True
+                trans.sa_session.add( ld )
+                trans.sa_session.flush()
+
+                # TODO: had to change this up a bit from Dataset.user_can_purge
+                dataset = ld.library_dataset_dataset_association.dataset
+                no_history_assoc = len( dataset.history_associations ) == len( dataset.purged_history_associations )
+                no_library_assoc = dataset.library_associations == [ ld.library_dataset_dataset_association ]
+                can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc
+
+                if can_purge_dataset:
+                    try:
+                        ld.library_dataset_dataset_association.dataset.full_delete()
+                        trans.sa_session.add( ld.dataset )
+                    except:
+                        pass
+                    # flush now to preserve deleted state in case of later interruption
+                    trans.sa_session.flush()
+                rval[ 'purged' ] = True
+            trans.sa_session.flush()
+            rval[ 'deleted' ] = True
+
+        except exceptions.httpexceptions.HTTPInternalServerError as http_server_err:
+            log.exception( 'Library_contents API, delete: uncaught HTTPInternalServerError: %s, %s\n%s',
+                           id, str( kwd ), str( http_server_err ) )
+            raise
+        except exceptions.httpexceptions.HTTPException:
+            raise
+        except Exception as exc:
+            log.exception( 'library_contents API, delete: uncaught exception: %s, %s\n%s',
+                           id, str( kwd ), str( exc ) )
+            trans.response.status = 500
+            rval.update({ 'error': str( exc ) })
+        return rval
diff --git a/lib/galaxy/webapps/galaxy/api/metrics.py b/lib/galaxy/webapps/galaxy/api/metrics.py
new file mode 100644
index 0000000..dd6e727
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/metrics.py
@@ -0,0 +1,101 @@
+"""
+API operations for for querying and recording user metrics from some client
+(typically a user's browser).
+"""
+# TODO: facade or adapter to fluentd
+
+import datetime
+
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+
+from galaxy.web.base.controller import BaseAPIController
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class MetricsController( BaseAPIController ):
+
+    def __init__( self, app ):
+        super( MetricsController, self ).__init__( app )
+        #: set to true to send additional debugging info to the log
+        self.debugging = True
+
+    def _deserialize_isoformat_date( self, datestring ):
+        """
+        Convert ISO formatted date string into python datetime.
+        """
+        return datetime.datetime.strptime( datestring, "%Y-%m-%dT%H:%M:%S.%fZ" )
+
+    @expose_api_anonymous
+    def create( self, trans, payload, **kwd ):
+        """
+        create( trans, payload )
+        * POST /api/metrics:
+            record any metrics sent and return some status object
+
+        .. note:: Anonymous users can post metrics
+
+        :type   payload: dict
+        :param  payload: (optional) dictionary structure containing:
+            * metrics:          a list containing dictionaries of the form:
+                ** namespace:       label indicating the source of the metric
+                ** time:            isoformat datetime when the metric was recorded
+                ** level:           an integer representing the metric's log level
+                ** args:            a json string containing an array of extra data
+
+        :rtype:     dict
+        :returns:   status object
+        """
+        user_id = trans.user.id if trans.user else None
+        session_id = trans.galaxy_session.id if trans.galaxy_session else None
+        parsed_gen = self._parse_metrics( payload.get( 'metrics', None ), user_id, session_id )
+        self._send_metrics( trans, parsed_gen )
+        response = self._get_server_pong( trans )
+        return response
+
+    # TODO: move the following to DAO/Manager object
+    def _parse_metrics( self, metrics, user_id=None, session_id=None ):
+        """
+        Return a generator yielding the each given metric as a tuple:
+            * label:    the namespace of the metric
+            * time:     datetime of the metric's creation
+            * kwargs:   a dictionary containing:
+                ** level:   the log level of the metric
+                ** user:    the user associated with the metric
+                            (will be None if anonymous user)
+                ** session: the session of the current user
+        """
+        metrics = metrics or []
+        for metric in metrics:
+            label = metric[ 'namespace' ]
+            time = self._deserialize_isoformat_date( metric[ 'time' ] )
+            kwargs = {
+                'level'   : metric[ 'level' ],
+                'args'    : metric[ 'args' ],
+                'user'    : user_id,
+                'session' : session_id
+            }
+            yield ( label, time, kwargs )
+
+    def _send_metrics( self, trans, metrics ):
+        """
+        Send metrics to the app's `trace_logger` if set and
+        send to `log.debug` if this controller if `self.debugging`.
+
+        Precondition: metrics are parsed and in proper format.
+        """
+        if trans.app.trace_logger:
+            for label, time, kwargs in metrics:
+                trans.app.trace_logger.log( label, event_time=int( time ), **kwargs )
+        elif self.debugging:
+            for label, time, kwargs in metrics:
+                log.debug( '%s %s %s', label, time, kwargs )
+
+    def _get_server_pong( self, trans ):
+        """
+        Return some status message or object.
+
+        For future use.
+        """
+        return {}
diff --git a/lib/galaxy/webapps/galaxy/api/page_revisions.py b/lib/galaxy/webapps/galaxy/api/page_revisions.py
new file mode 100644
index 0000000..56e3bde
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -0,0 +1,90 @@
+"""
+API for updating Galaxy Pages
+"""
+import logging
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy import exceptions
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.util.sanitize_html import sanitize_html
+
+log = logging.getLogger( __name__ )
+
+
+class PageRevisionsController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
+
+    @expose_api
+    def index( self, trans, page_id, **kwd ):
+        """
+        index( self, trans, page_id, **kwd )
+        * GET /api/pages/{page_id}/revisions
+            return a list of Page revisions
+
+        :param page_id: Display the revisions of Page with ID=page_id
+
+        :rtype:     list
+        :returns:   dictionaries containing different revisions of the page
+        """
+        page = self._get_page( trans, page_id )
+        self._verify_page_ownership( trans, page )
+
+        r = trans.sa_session.query( trans.app.model.PageRevision ).filter_by( page_id=trans.security.decode_id(page_id) )
+        out = []
+        for page in r:
+            out.append( self.encode_all_ids( trans, page.to_dict(), True) )
+        return out
+
+    @expose_api
+    def create( self, trans, page_id, payload, **kwd ):
+        """
+        create( self, trans, page_id, payload **kwd )
+        * POST /api/pages/{page_id}/revisions
+            Create a new revision for a page
+
+        :param page_id: Add revision to Page with ID=page_id
+        :param payload: A dictionary containing::
+            'title'     = New title of the page
+            'content'   = New content of the page
+
+        :rtype:     dictionary
+        :returns:   Dictionary with 'success' or 'error' element to indicate the result of the request
+        """
+        content = payload.get("content", None)
+        if not content:
+            raise exceptions.ObjectAttributeMissingException("content undefined or empty")
+
+        page = self._get_page( trans, page_id )
+        self._verify_page_ownership( trans, page )
+
+        if 'title' in payload:
+            title = payload['title']
+        else:
+            title = page.title
+
+        content = sanitize_html( content, 'utf-8', 'text/html' )
+
+        page_revision = trans.app.model.PageRevision()
+        page_revision.title = title
+        page_revision.page = page
+        page.latest_revision = page_revision
+        page_revision.content = content
+
+        # Persist
+        session = trans.sa_session
+        session.flush()
+
+        return page_revision.to_dict( view="element" )
+
+    def _get_page( self, trans, page_id ):
+        page = None
+        try:
+            page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id(page_id) )
+        except Exception:
+            pass
+        if not page:
+            raise exceptions.ObjectNotFound()
+        return page
+
+    def _verify_page_ownership( self, trans, page ):
+        if not self.security_check( trans, page, True, True ):
+            raise exceptions.ItemOwnershipException()
diff --git a/lib/galaxy/webapps/galaxy/api/pages.py b/lib/galaxy/webapps/galaxy/api/pages.py
new file mode 100644
index 0000000..b69ea56
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -0,0 +1,151 @@
+"""
+API for updating Galaxy Pages
+"""
+import logging
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy import exceptions
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.util.sanitize_html import sanitize_html
+
+log = logging.getLogger( __name__ )
+
+
+class PagesController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
+
+    @expose_api
+    def index( self, trans, deleted=False, **kwd ):
+        """
+        index( self, trans, deleted=False, **kwd )
+        * GET /api/pages
+            return a list of Pages viewable by the user
+
+        :param deleted: Display deleted pages
+
+        :rtype:     list
+        :returns:   dictionaries containing summary or detailed Page information
+        """
+        out = []
+
+        if trans.user_is_admin():
+            r = trans.sa_session.query( trans.app.model.Page )
+            if not deleted:
+                r = r.filter_by(deleted=False)
+            for row in r:
+                out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+        else:
+            user = trans.get_user()
+            r = trans.sa_session.query( trans.app.model.Page ).filter_by( user=user )
+            if not deleted:
+                r = r.filter_by(deleted=False)
+            for row in r:
+                out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+            r = trans.sa_session.query( trans.app.model.Page ).filter( trans.app.model.Page.user != user ).filter_by(published=True)
+            if not deleted:
+                r = r.filter_by(deleted=False)
+            for row in r:
+                out.append( self.encode_all_ids( trans, row.to_dict(), True) )
+
+        return out
+
+    @expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        create( self, trans, payload, **kwd )
+        * POST /api/pages
+            Create a page and return dictionary containing Page summary
+
+        :param  payload:    dictionary structure containing::
+            'slug'       = The title slug for the page URL, must be unique
+            'title'      = Title of the page
+            'content'    = HTML contents of the page
+            'annotation' = Annotation that will be attached to the page
+
+        :rtype:     dict
+        :returns:   Dictionary return of the Page.to_dict call
+        """
+        user = trans.get_user()
+
+        if not payload.get("title", None):
+            raise exceptions.ObjectAttributeMissingException( "Page name is required" )
+        elif not payload.get("slug", None):
+            raise exceptions.ObjectAttributeMissingException( "Page id is required" )
+        elif not self._is_valid_slug( payload["slug"] ):
+            raise exceptions.ObjectAttributeInvalidException( "Page identifier must consist of only lowercase letters, numbers, and the '-' character" )
+        elif trans.sa_session.query( trans.app.model.Page ).filter_by( user=user, slug=payload["slug"], deleted=False ).first():
+            raise exceptions.DuplicatedSlugException( "Page slug must be unique" )
+
+        content = payload.get("content", "")
+        content = sanitize_html( content, 'utf-8', 'text/html' )
+
+        # Create the new stored page
+        page = trans.app.model.Page()
+        page.title = payload['title']
+        page.slug = payload['slug']
+        page_annotation = sanitize_html( payload.get( "annotation", "" ), 'utf-8', 'text/html' )
+        self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+        page.user = user
+        # And the first (empty) page revision
+        page_revision = trans.app.model.PageRevision()
+        page_revision.title = payload['title']
+        page_revision.page = page
+        page.latest_revision = page_revision
+        page_revision.content = content
+        # Persist
+        session = trans.sa_session
+        session.add( page )
+        session.flush()
+
+        rval = self.encode_all_ids( trans, page.to_dict(), True )
+        return rval
+
+    @expose_api
+    def delete( self, trans, id, **kwd ):
+        """
+        delete( self, trans, id, **kwd )
+        * DELETE /api/pages/{id}
+            Create a page and return dictionary containing Page summary
+
+        :param  id:    ID of page to be deleted
+
+        :rtype:     dict
+        :returns:   Dictionary with 'success' or 'error' element to indicate the result of the request
+        """
+        page = self._get_page( trans, id )
+
+        # Mark a page as deleted
+        page.deleted = True
+        trans.sa_session.flush()
+        return ''  # TODO: Figure out what to return on DELETE, document in guidelines!
+
+    @expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        show( self, trans, id, **kwd )
+        * GET /api/pages/{id}
+            View a page summary and the content of the latest revision
+
+        :param  id:    ID of page to be displayed
+
+        :rtype:     dict
+        :returns:   Dictionary return of the Page.to_dict call with the 'content' field populated by the most recent revision
+        """
+        page = self._get_page( trans, id )
+        self.security_check( trans, page, check_ownership=False, check_accessible=True)
+        rval = self.encode_all_ids( trans, page.to_dict(), True )
+        rval['content'] = page.latest_revision.content
+        return rval
+
+    def _get_page( self, trans, id ):  # Fetches page object and verifies security.
+        try:
+            page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+        except Exception:
+            page = None
+
+        if not page:
+            raise exceptions.ObjectNotFound()
+
+        if page.user != trans.user and not trans.user_is_admin():
+            raise exceptions.ItemOwnershipException()
+
+        return page
diff --git a/lib/galaxy/webapps/galaxy/api/provenance.py b/lib/galaxy/webapps/galaxy/api/provenance.py
new file mode 100644
index 0000000..4b95008
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/provenance.py
@@ -0,0 +1,98 @@
+"""
+API operations provenance
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+from paste.httpexceptions import HTTPNotImplemented, HTTPBadRequest
+from galaxy import managers
+
+log = logging.getLogger( __name__ )
+
+
+class BaseProvenanceController( BaseAPIController ):
+    """
+    """
+    def __init__( self, app ):
+        super( BaseProvenanceController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        follow = kwd.get('follow', False)
+        value = self._get_provenance( trans, self.provenance_item_class, kwd[self.provenance_item_id], follow )
+        return value
+
+    @web.expose_api
+    def show( self, trans, elem_name, **kwd ):
+        follow = kwd.get('follow', False)
+        value = self._get_provenance( trans, self.provenance_item_class, kwd[self.provenance_item_id], follow )
+        return value
+
+    @web.expose_api
+    def create( self, trans, tag_name, payload=None, **kwd ):
+        payload = payload or {}
+        raise HTTPNotImplemented()
+
+    @web.expose_api
+    def delete( self, trans, tag_name, **kwd ):
+        raise HTTPBadRequest("Cannot Delete Provenance")
+
+    def _get_provenance( self, trans, item_class_name, item_id, follow=True ):
+        provenance_item = self.get_object( trans, item_id, item_class_name, check_ownership=False, check_accessible=False)
+        if item_class_name == "HistoryDatasetAssociation":
+            self.hda_manager.error_unless_accessible( provenance_item, trans.user )
+        else:
+            self.security_check( trans, provenance_item, check_accessible=True )
+        out = self._get_record( trans, provenance_item, follow )
+        return out
+
+    def _get_record(self, trans, item, follow):
+        if item is not None:
+            if item.copied_from_library_dataset_dataset_association:
+                item = item.copied_from_library_dataset_dataset_association
+            job = item.creating_job
+            if job is not None:
+                return {
+                    "id": trans.security.encode_id(item.id),
+                    "uuid": ( lambda uuid: str( uuid ) if uuid else None )( item.dataset.uuid),
+                    "job_id": trans.security.encode_id( job.id ),
+                    "tool_id": job.tool_id,
+                    "parameters": self._get_job_record(trans, job, follow),
+                    "stderr": job.stderr,
+                    "stdout": job.stdout,
+                }
+            else:
+                return {
+                    "id": trans.security.encode_id(item.id),
+                    "uuid": ( lambda uuid: str( uuid ) if uuid else None )( item.dataset.uuid)
+                }
+        return None
+
+    def _get_job_record(self, trans, job, follow):
+        out = {}
+        for p in job.parameters:
+            out[p.name] = p.value
+        for in_d in job.input_datasets:
+            if not in_d.dataset:
+                continue
+            if follow:
+                out[in_d.name] = self._get_record(trans, in_d.dataset, follow)
+            else:
+                out[in_d.name] = {
+                    "id": trans.security.encode_id(in_d.dataset.id),
+                    "uuid": ( lambda uuid: str( uuid ) if uuid else None )( in_d.dataset.dataset.uuid ),
+                }
+        return out
+
+
+class HDAProvenanceController( BaseProvenanceController ):
+    controller_name = "history_content_provenance"
+    provenance_item_class = "HistoryDatasetAssociation"
+    provenance_item_id = "history_content_id"
+
+
+class LDDAProvenanceController( BaseProvenanceController ):
+    controller_name = "ldda_provenance"
+    provenance_item_class = "LibraryDatasetDatasetAssociation"
+    provenance_item_id = "library_content_id"
diff --git a/lib/galaxy/webapps/galaxy/api/quotas.py b/lib/galaxy/webapps/galaxy/api/quotas.py
new file mode 100644
index 0000000..2cca27d
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/quotas.py
@@ -0,0 +1,147 @@
+"""
+API operations on Quota objects.
+"""
+import logging
+
+from paste.httpexceptions import HTTPBadRequest
+from sqlalchemy import false, true
+
+from galaxy import web, util
+from galaxy.actions.admin import AdminActions
+from galaxy.exceptions import ActionInputError
+from galaxy.web.base.controller import BaseAPIController, UsesQuotaMixin, url_for
+from galaxy.web.base.controllers.admin import Admin
+from galaxy.web.params import QuotaParamParser
+
+log = logging.getLogger( __name__ )
+
+
+class QuotaAPIController( BaseAPIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
+    @web.expose_api
+    @web.require_admin
+    def index( self, trans, deleted='False', **kwd ):
+        """
+        GET /api/quotas
+        GET /api/quotas/deleted
+        Displays a collection (list) of quotas.
+        """
+        rval = []
+        deleted = util.string_as_bool( deleted )
+        query = trans.sa_session.query( trans.app.model.Quota )
+        if deleted:
+            route = 'deleted_quota'
+            query = query.filter( trans.app.model.Quota.table.c.deleted == true() )
+        else:
+            route = 'quota'
+            query = query.filter( trans.app.model.Quota.table.c.deleted == false() )
+        for quota in query:
+            item = quota.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+            encoded_id = trans.security.encode_id( quota.id )
+            item['url'] = url_for( route, id=encoded_id )
+            rval.append( item )
+        return rval
+
+    @web.expose_api
+    @web.require_admin
+    def show( self, trans, id, deleted='False', **kwd ):
+        """
+        GET /api/quotas/{encoded_quota_id}
+        GET /api/quotas/deleted/{encoded_quota_id}
+        Displays information about a quota.
+        """
+        quota = self.get_quota( trans, id, deleted=util.string_as_bool( deleted ) )
+        return quota.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'total_disk_usage': float } )
+
+    @web.expose_api
+    @web.require_admin
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/quotas
+        Creates a new quota.
+        """
+        try:
+            self.validate_in_users_and_groups( trans, payload )
+        except Exception as e:
+            raise HTTPBadRequest( detail=str( e ) )
+        params = self.get_quota_params( payload )
+        try:
+            quota, message = self._create_quota( params )
+        except ActionInputError as e:
+            raise HTTPBadRequest( detail=str( e ) )
+        item = quota.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+        item['url'] = url_for( 'quota', id=trans.security.encode_id( quota.id ) )
+        item['message'] = message
+        return item
+
+    @web.expose_api
+    @web.require_admin
+    def update( self, trans, id, payload, **kwd ):
+        """
+        PUT /api/quotas/{encoded_quota_id}
+        Modifies a quota.
+        """
+        try:
+            self.validate_in_users_and_groups( trans, payload )
+        except Exception as e:
+            raise HTTPBadRequest( detail=str( e ) )
+
+        quota = self.get_quota( trans, id, deleted=False )
+
+        # FIXME: Doing it this way makes the update non-atomic if a method fails after an earlier one has succeeded.
+        payload['id'] = id
+        params = self.get_quota_params( payload )
+        methods = []
+        if payload.get( 'name', None ) or payload.get( 'description', None ):
+            methods.append( self._rename_quota )
+        if payload.get( 'amount', None ):
+            methods.append( self._edit_quota )
+        if payload.get( 'default', None ) == 'no':
+            methods.append( self._unset_quota_default )
+        elif payload.get( 'default', None ):
+            methods.append( self._set_quota_default )
+        if payload.get( 'in_users', None ) or payload.get( 'in_groups', None ):
+            methods.append( self._manage_users_and_groups_for_quota )
+
+        messages = []
+        for method in methods:
+            try:
+                message = method( quota, params )
+            except ActionInputError as e:
+                raise HTTPBadRequest( detail=str( e ) )
+            messages.append( message )
+        return '; '.join( messages )
+
+    @web.expose_api
+    @web.require_admin
+    def delete( self, trans, id, **kwd ):
+        """
+        DELETE /api/quotas/{encoded_quota_id}
+        Deletes a quota
+        """
+        quota = self.get_quota( trans, id, deleted=False )  # deleted quotas are not technically members of this collection
+
+        # a request body is optional here
+        payload = kwd.get( 'payload', {} )
+        payload['id'] = id
+        params = self.get_quota_params( payload )
+
+        try:
+            message = self._mark_quota_deleted( quota, params )
+            if util.string_as_bool( payload.get( 'purge', False ) ):
+                message += self._purge_quota( quota, params )
+        except ActionInputError as e:
+            raise HTTPBadRequest( detail=str( e ) )
+        return message
+
+    @web.expose_api
+    @web.require_admin
+    def undelete( self, trans, id, **kwd ):
+        """
+        POST /api/quotas/deleted/{encoded_quota_id}/undelete
+        Undeletes a quota
+        """
+        quota = self.get_quota( trans, id, deleted=True )
+        try:
+            return self._undelete_quota( quota )
+        except ActionInputError as e:
+            raise HTTPBadRequest( detail=str( e ) )
diff --git a/lib/galaxy/webapps/galaxy/api/remote_files.py b/lib/galaxy/webapps/galaxy/api/remote_files.py
new file mode 100644
index 0000000..28d42cd
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/remote_files.py
@@ -0,0 +1,149 @@
+"""
+API operations on remote files.
+"""
+import os
+import time
+import hashlib
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.util import jstree, unicodify
+from galaxy.web.base.controller import BaseAPIController
+from operator import itemgetter
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class RemoteFilesAPIController( BaseAPIController ):
+
+    @expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/remote_files/
+
+        Displays remote files.
+
+        :param  target:      target to load available datasets from, defaults to ftp
+            possible values: ftp, userdir, importdir
+        :type   target:      str
+
+        :param  format:      requested format of data, defaults to flat
+            possible values: flat, jstree, ajax
+
+        :returns:   list of available files
+        :rtype:     list
+        """
+        target = kwd.get( 'target', None )
+        format = kwd.get( 'format', None )
+
+        if target == 'userdir':
+            user_login = trans.user.email
+            user_base_dir = trans.app.config.user_library_import_dir
+            if user_base_dir is None:
+                raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from user directories.' )
+            full_import_dir = os.path.join( user_base_dir, user_login )
+            if not os.path.exists(full_import_dir):
+                raise exceptions.ObjectNotFound('You do not have any files in your user directory. Use FTP to upload there.')
+            if full_import_dir is not None:
+                if format == 'jstree':
+                    disable = kwd.get( 'disable', 'folders')
+                    try:
+                        userdir_jstree = self.__create_jstree( full_import_dir, disable )
+                        response = userdir_jstree.jsonData()
+                    except Exception as exception:
+                        log.debug( str( exception ) )
+                        raise exceptions.InternalServerError( 'Could not create tree representation of the given folder: ' + str( full_import_dir ) )
+                    if not response:
+                        raise exceptions.ObjectNotFound('You do not have any files in your user directory. Use FTP to upload there.')
+                elif format == 'ajax':
+                    raise exceptions.NotImplemented( 'Not implemented yet. Sorry.' )
+                else:
+                    try:
+                        response = self.__load_all_filenames( full_import_dir )
+                    except Exception as exception:
+                        log.error( 'Could not get user import files: %s', str( exception ), exc_info=True )
+                        raise exceptions.InternalServerError( 'Could not get the files from your user directory folder.' )
+            else:
+                raise exceptions.InternalServerError( 'Could not get the files from your user directory folder.' )
+        elif target == 'importdir':
+            base_dir = trans.app.config.library_import_dir
+            if base_dir is None:
+                raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow usage of import directory.' )
+            if format == 'jstree':
+                    disable = kwd.get( 'disable', 'folders')
+                    try:
+                        importdir_jstree = self.__create_jstree( base_dir, disable )
+                        response = importdir_jstree.jsonData()
+                    except Exception as exception:
+                        log.debug( str( exception ) )
+                        raise exceptions.InternalServerError( 'Could not create tree representation of the given folder: ' + str( base_dir ) )
+            elif format == 'ajax':
+                raise exceptions.NotImplemented( 'Not implemented yet. Sorry.' )
+            else:
+                try:
+                    response = self.__load_all_filenames( base_dir )
+                except Exception as exception:
+                    log.error( 'Could not get user import files: %s', str( exception ), exc_info=True )
+                    raise exceptions.InternalServerError( 'Could not get the files from your import directory folder.' )
+        else:
+            user_ftp_base_dir = trans.app.config.ftp_upload_dir
+            if user_ftp_base_dir is None:
+                raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow upload from FTP directories.' )
+            try:
+                user_ftp_dir = trans.user_ftp_dir
+                if user_ftp_dir is not None:
+                    response = self.__load_all_filenames( user_ftp_dir )
+                else:
+                    log.warning( 'You do not have an FTP directory named as your login at this Galaxy instance.' )
+                    return None
+            except Exception as exception:
+                log.warning( 'Could not get ftp files: %s', str( exception ), exc_info=True )
+                return None
+        return response
+
+    def __load_all_filenames( self, directory ):
+        """
+        Loads recursively all files within the given folder and its
+        subfolders and returns a flat list.
+        """
+        response = []
+        if os.path.exists( directory ):
+            for ( dirpath, dirnames, filenames ) in os.walk( directory ):
+                for filename in filenames:
+                    path = os.path.relpath( os.path.join( dirpath, filename ), directory )
+                    statinfo = os.lstat( os.path.join( dirpath, filename ) )
+                    response.append( dict( path=path,
+                                           size=statinfo.st_size,
+                                           ctime=time.strftime( "%m/%d/%Y %I:%M:%S %p", time.localtime( statinfo.st_ctime ) ) ) )
+        else:
+            log.warning( "The directory \"%s\" does not exist." % directory )
+            return response
+        # sort by path
+        response = sorted(response, key=itemgetter("path"))
+        return response
+
+    def __create_jstree( self, directory, disable='folders' ):
+        """
+        Loads recursively all files and folders within the given folder
+        and its subfolders and returns jstree representation
+        of its structure.
+        """
+        userdir_jstree = None
+        jstree_paths = []
+        if os.path.exists( directory ):
+            for ( dirpath, dirnames, filenames ) in os.walk( directory ):
+                for dirname in dirnames:
+                    dir_path = os.path.relpath( os.path.join( dirpath, dirname ), directory )
+                    dir_path_hash = hashlib.sha1(unicodify(dir_path).encode('utf-8')).hexdigest()
+                    disabled = True if disable == 'folders' else False
+                    jstree_paths.append( jstree.Path( dir_path, dir_path_hash, { 'type': 'folder', 'state': { 'disabled': disabled }, 'li_attr': { 'full_path': dir_path } } ) )
+
+                for filename in filenames:
+                    file_path = os.path.relpath( os.path.join( dirpath, filename ), directory )
+                    file_path_hash = hashlib.sha1(unicodify(file_path).encode('utf-8')).hexdigest()
+                    disabled = True if disable == 'files' else False
+                    jstree_paths.append( jstree.Path( file_path, file_path_hash, { 'type': 'file', 'state': { 'disabled': disabled }, 'li_attr': { 'full_path': file_path } } ) )
+        else:
+            raise exceptions.ConfigDoesNotAllowException( 'The given directory does not exist.' )
+        userdir_jstree = jstree.JSTree( jstree_paths )
+        return userdir_jstree
diff --git a/lib/galaxy/webapps/galaxy/api/request_types.py b/lib/galaxy/webapps/galaxy/api/request_types.py
new file mode 100644
index 0000000..99cca3b
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/request_types.py
@@ -0,0 +1,102 @@
+"""
+API operations on RequestType objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+from galaxy.sample_tracking.request_types import request_type_factory
+from xml.etree.ElementTree import XML
+
+log = logging.getLogger( __name__ )
+
+
+class RequestTypeAPIController( BaseAPIController ):
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/request_types
+        Displays a collection (list) of request_types.
+        """
+        rval = []
+        for request_type in trans.app.security_agent.get_accessible_request_types( trans, trans.user ):
+            item = request_type.to_dict( value_mapper={ 'id': trans.security.encode_id, 'request_form_id': trans.security.encode_id, 'sample_form_id': trans.security.encode_id } )
+            encoded_id = trans.security.encode_id( request_type.id )
+            item['url'] = url_for( 'request_type', id=encoded_id )
+            rval.append( item )
+        return rval
+
+    @web.expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/request_types/{encoded_request_type_id}
+        Displays information about a request_type.
+        """
+        request_type_id = id
+        try:
+            decoded_request_type_id = trans.security.decode_id( request_type_id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed request type id ( %s ) specified, unable to decode." % str( request_type_id )
+        try:
+            request_type = trans.sa_session.query( trans.app.model.RequestType ).get( decoded_request_type_id )
+        except:
+            request_type = None
+        if not request_type:  # or not trans.user_is_admin():
+            trans.response.status = 400
+            return "Invalid request_type id ( %s ) specified." % str( request_type_id )
+        if not trans.app.security_agent.can_access_request_type( trans.user.all_roles(), request_type ):
+            trans.response.status = 400
+            return "No permission to access request_type ( %s )." % str( request_type_id )
+        item = request_type.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'request_form_id': trans.security.encode_id, 'sample_form_id': trans.security.encode_id } )
+        item['url'] = url_for( 'request_type', id=request_type_id )
+        return item
+
+    @web.expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/request_types
+        Creates a new request type (external_service configuration).
+        """
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "You are not authorized to create a new request type (external_service configuration)."
+        xml_text = payload.get( 'xml_text', None )
+        if xml_text is None:
+            trans.response.status = 400
+            return "Missing required parameter 'xml_text'."
+        elem = XML( xml_text )
+        request_form_id = payload.get( 'request_form_id', None )
+        if request_form_id is None:
+            trans.response.status = 400
+            return "Missing required parameter 'request_form_id'."
+        request_form = trans.sa_session.query( trans.app.model.FormDefinition ).get( trans.security.decode_id( request_form_id ) )
+        sample_form_id = payload.get( 'sample_form_id', None )
+        if sample_form_id is None:
+            trans.response.status = 400
+            return "Missing required parameter 'sample_form_id'."
+        sample_form = trans.sa_session.query( trans.app.model.FormDefinition ).get( trans.security.decode_id( sample_form_id ) )
+        external_service_id = payload.get( 'external_service_id', None )
+        if external_service_id is None:
+            trans.response.status = 400
+            return "Missing required parameter 'external_service_id'."
+        external_service = trans.sa_session.query( trans.app.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+        request_type = request_type_factory.from_elem( elem, request_form, sample_form, external_service )
+        # FIXME: move permission building/setting to separate abstract method call and
+        # allow setting individual permissions by role (currently only one action, so not strictly needed)
+        role_ids = payload.get( 'role_ids', [] )
+        roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]  # if trans.app.security_agent.ok_to_display( trans.user, i ) ]
+        permissions = {}
+        if roles:
+            # yikes, there has to be a better way?
+            for k, v in trans.model.RequestType.permitted_actions.items():
+                permissions[ trans.app.security_agent.get_action( v.action ) ] = roles
+        if permissions:
+            trans.app.security_agent.set_request_type_permissions( request_type, permissions )
+
+        # flush objects
+        trans.sa_session.add( request_type )
+        trans.sa_session.flush()
+        encoded_id = trans.security.encode_id( request_type.id )
+        item = request_type.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id, 'request_form_id': trans.security.encode_id, 'sample_form_id': trans.security.encode_id } )
+        item['url'] = url_for( 'request_type', id=encoded_id )
+        return [ item ]
diff --git a/lib/galaxy/webapps/galaxy/api/requests.py b/lib/galaxy/webapps/galaxy/api/requests.py
new file mode 100644
index 0000000..10fa82d
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/requests.py
@@ -0,0 +1,108 @@
+"""
+API operations on a sample tracking system.
+"""
+import logging
+
+from sqlalchemy import and_, false
+
+from galaxy import web
+from galaxy.util.bunch import Bunch
+from galaxy.web import url_for
+from galaxy.web.base.controller import BaseAPIController
+
+log = logging.getLogger( __name__ )
+
+
+class RequestsAPIController( BaseAPIController ):
+    _update_types = Bunch( REQUEST='request_state' )
+    _update_type_values = [v[1] for v in _update_types.items()]
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/requests
+        Displays a collection (list) of sequencing requests.
+        """
+        # if admin user then return all requests
+        if trans.user_is_admin():
+            query = trans.sa_session.query( trans.app.model.Request ) \
+                .filter(  trans.app.model.Request.table.c.deleted == false() )\
+                .all()
+        else:
+            query = trans.sa_session.query( trans.app.model.Request )\
+                .filter( and_( trans.app.model.Request.table.c.user_id == trans.user.id and
+                trans.app.model.Request.table.c.deleted == false() ) ) \
+                .all()
+        rval = []
+        for request in query:
+            item = request.to_dict()
+            item['url'] = url_for( 'requests', id=trans.security.encode_id( request.id ) )
+            item['id'] = trans.security.encode_id( item['id'] )
+            if trans.user_is_admin():
+                item['user'] = request.user.email
+            rval.append( item )
+        return rval
+
+    @web.expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/requests/{encoded_request_id}
+        Displays details of a sequencing request.
+        """
+        try:
+            request_id = trans.security.decode_id( id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed id ( %s ) specified, unable to decode." % ( str( id ) )
+        try:
+            request = trans.sa_session.query( trans.app.model.Request ).get( request_id )
+        except:
+            request = None
+        if not request or not ( trans.user_is_admin() or request.user.id == trans.user.id ):
+            trans.response.status = 400
+            return "Invalid request id ( %s ) specified." % str( request_id )
+        item = request.to_dict()
+        item['url'] = url_for( 'requests', id=trans.security.encode_id( request.id ) )
+        item['id'] = trans.security.encode_id( item['id'] )
+        item['user'] = request.user.email
+        item['num_of_samples'] = len(request.samples)
+        return item
+
+    @web.expose_api
+    def update( self, trans, id, key, payload, **kwd ):
+        """
+        PUT /api/requests/{encoded_request_id}
+        Updates a request state, sample state or sample dataset transfer status
+        depending on the update_type
+        """
+        update_type = None
+        if 'update_type' not in payload:
+            trans.response.status = 400
+            return "Missing required 'update_type' parameter.  Please consult the API documentation for help."
+        else:
+            update_type = payload.pop( 'update_type' )
+        if update_type not in self._update_type_values:
+            trans.response.status = 400
+            return "Invalid value for 'update_type' parameter ( %s ) specified.  Please consult the API documentation for help." % update_type
+        try:
+            request_id = trans.security.decode_id( id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed  request id ( %s ) specified, unable to decode." % str( id )
+        try:
+            request = trans.sa_session.query( trans.app.model.Request ).get( request_id )
+        except:
+            request = None
+        if not request or not ( trans.user_is_admin() or request.user.id == trans.user.id ):
+            trans.response.status = 400
+            return "Invalid request id ( %s ) specified." % str( request_id )
+        # check update type
+        if update_type == 'request_state':
+            return self.__update_request_state( trans, encoded_request_id=id )
+
+    def __update_request_state( self, trans, encoded_request_id ):
+        requests_common_cntrller = trans.webapp.controllers['requests_common']
+        status, output = requests_common_cntrller.update_request_state( trans,
+                                                                        cntrller='api',
+                                                                        request_id=encoded_request_id )
+        return status, output
diff --git a/lib/galaxy/webapps/galaxy/api/roles.py b/lib/galaxy/webapps/galaxy/api/roles.py
new file mode 100644
index 0000000..d61bf37
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/roles.py
@@ -0,0 +1,93 @@
+"""
+API operations on Role objects.
+"""
+import logging
+
+from sqlalchemy import false
+
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController, url_for
+
+log = logging.getLogger( __name__ )
+
+
+class RoleAPIController( BaseAPIController ):
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/roles
+        Displays a collection (list) of roles.
+        """
+        rval = []
+        for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted == false() ):
+            if trans.user_is_admin() or trans.app.security_agent.ok_to_display( trans.user, role ):
+                item = role.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+                encoded_id = trans.security.encode_id( role.id )
+                item['url'] = url_for( 'role', id=encoded_id )
+                rval.append( item )
+        return rval
+
+    @web.expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/roles/{encoded_role_id}
+        Displays information about a role.
+        """
+        role_id = id
+        try:
+            decoded_role_id = trans.security.decode_id( role_id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed role id ( %s ) specified, unable to decode." % str( role_id )
+        try:
+            role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+        except:
+            role = None
+        if not role or not (trans.user_is_admin() or trans.app.security_agent.ok_to_display( trans.user, role )):
+            trans.response.status = 400
+            return "Invalid role id ( %s ) specified." % str( role_id )
+        item = role.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id } )
+        item['url'] = url_for( 'role', id=role_id )
+        return item
+
+    @web.expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/roles
+        Creates a new role.
+        """
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "You are not authorized to create a new role."
+        name = payload.get( 'name', None )
+        description = payload.get( 'description', None )
+        if not name or not description:
+            trans.response.status = 400
+            return "Enter a valid name and a description"
+        if trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name == name ).first():
+            trans.response.status = 400
+            return "A role with that name already exists"
+
+        role_type = trans.app.model.Role.types.ADMIN  # TODO: allow non-admins to create roles
+
+        role = trans.app.model.Role( name=name, description=description, type=role_type )
+        trans.sa_session.add( role )
+        user_ids = payload.get( 'user_ids', [] )
+        users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+        group_ids = payload.get( 'group_ids', [] )
+        groups = [ trans.sa_session.query( trans.model.Group ).get( trans.security.decode_id( i ) ) for i in group_ids ]
+
+        # Create the UserRoleAssociations
+        for user in users:
+            trans.app.security_agent.associate_user_role( user, role )
+
+        # Create the GroupRoleAssociations
+        for group in groups:
+            trans.app.security_agent.associate_group_role( group, role )
+
+        trans.sa_session.flush()
+        encoded_id = trans.security.encode_id( role.id )
+        item = role.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id } )
+        item['url'] = url_for( 'role', id=encoded_id )
+        return [ item ]
diff --git a/lib/galaxy/webapps/galaxy/api/samples.py b/lib/galaxy/webapps/galaxy/api/samples.py
new file mode 100644
index 0000000..cba2145
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/samples.py
@@ -0,0 +1,145 @@
+"""
+API operations for samples in the Galaxy sample tracking system.
+"""
+import logging
+
+from galaxy import util
+from galaxy.util.bunch import Bunch
+from galaxy.web import url_for
+from galaxy.web.base.controller import BaseAPIController, web
+
+log = logging.getLogger( __name__ )
+
+
+class SamplesAPIController( BaseAPIController ):
+    update_types = Bunch( SAMPLE=[ 'sample_state', 'run_details' ],
+                          SAMPLE_DATASET=[ 'sample_dataset_transfer_status' ] )
+    update_type_values = []
+    for k, v in update_types.items():
+        update_type_values.extend( v )
+
+    @web.expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/requests/{encoded_request_id}/samples
+        Displays a collection (list) of sample of a sequencing request.
+        """
+        try:
+            request_id = trans.security.decode_id( kwd[ 'request_id' ] )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed request id ( %s ) specified, unable to decode." % str( kwd[ 'request_id' ] )
+        try:
+            request = trans.sa_session.query( trans.app.model.Request ).get( request_id )
+        except:
+            request = None
+        if not request or not ( trans.user_is_admin() or request.user.id == trans.user.id ):
+            trans.response.status = 400
+            return "Invalid request id ( %s ) specified." % str( request_id )
+        rval = []
+        for sample in request.samples:
+            item = sample.to_dict()
+            item['url'] = url_for( 'samples',
+                                   request_id=trans.security.encode_id( request_id ),
+                                   id=trans.security.encode_id( sample.id ) )
+            item['id'] = trans.security.encode_id( item['id'] )
+            rval.append( item )
+        return rval
+
+    @web.expose_api
+    def update( self, trans, id, payload, **kwd ):
+        """
+        PUT /api/samples/{encoded_sample_id}
+        Updates a sample or objects related ( mapped ) to a sample.
+        """
+        update_type = None
+        if 'update_type' not in payload:
+            trans.response.status = 400
+            return "Missing required 'update_type' parameter, consult the API documentation for help."
+        else:
+            update_type = payload.pop( 'update_type' )
+        if update_type not in self.update_type_values:
+            trans.response.status = 400
+            return "Invalid value for 'update_type' parameter (%s) specified, consult the API documentation for help." % update_type
+        sample_id = util.restore_text( id )
+        try:
+            decoded_sample_id = trans.security.decode_id( sample_id )
+        except TypeError:
+            trans.response.status = 400
+            return "Malformed sample_id (%s) specified, unable to decode." % str( sample_id )
+        try:
+            sample = trans.sa_session.query( trans.app.model.Sample ).get( decoded_sample_id )
+        except:
+            sample = None
+        if not sample:
+            trans.response.status = 400
+            return "Invalid sample id ( %s ) specified." % str( sample_id )
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "You are not authorized to update samples."
+        requests_admin_controller = trans.webapp.controllers[ 'requests_admin' ]
+        if update_type == 'run_details':
+            deferred_plugin = payload.pop( 'deferred_plugin', None )
+            if deferred_plugin:
+                try:
+                    trans.app.job_manager.deferred_job_queue.plugins[deferred_plugin].create_job( trans, sample=sample, **payload )
+                except:
+                    log.exception( 'update() called with a deferred job plugin (%s) but creating the deferred job failed:' % deferred_plugin )
+            status, output = requests_admin_controller.edit_template_info( trans,
+                                                                           cntrller='api',
+                                                                           item_type='sample',
+                                                                           form_type=trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE,
+                                                                           sample_id=sample_id,
+                                                                           **payload )
+            return status, output
+        elif update_type == 'sample_state':
+            return self.__update_sample_state( trans, sample, sample_id, **payload )
+        elif update_type == 'sample_dataset_transfer_status':
+            # update sample_dataset transfer status
+            return self.__update_sample_dataset_status( trans, **payload )
+
+    def __update_sample_state( self, trans, sample, encoded_sample_id, **payload ):
+        # only admin user may update sample state in Galaxy sample tracking
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "only an admin user may update sample state in Galaxy sample tracking."
+        if 'new_state' not in payload:
+            trans.response.status = 400
+            return "Missing required parameter: 'new_state'."
+        new_state_name = payload.pop( 'new_state' )
+        comment = payload.get( 'comment', '' )
+        # check if the new state is a valid sample state
+        possible_states = sample.request.type.states
+        new_state = None
+        for state in possible_states:
+            if state.name == new_state_name:
+                new_state = state
+        if not new_state:
+            trans.response.status = 400
+            return "Invalid sample state requested ( %s )." % new_state_name
+        requests_common_cntrller = trans.webapp.controllers[ 'requests_common' ]
+        status, output = requests_common_cntrller.update_sample_state( trans=trans,
+                                                                       cntrller='api',
+                                                                       sample_ids=[ encoded_sample_id ],
+                                                                       new_state=new_state,
+                                                                       comment=comment )
+        return status, output
+
+    def __update_sample_dataset_status( self, trans, **payload ):
+        # only admin user may transfer sample datasets in Galaxy sample tracking
+        if not trans.user_is_admin():
+            trans.response.status = 403
+            return "Only an admin user may transfer sample datasets in Galaxy sample tracking and thus update transfer status."
+        if 'sample_dataset_ids' not in payload or 'new_status' not in payload:
+            trans.response.status = 400
+            return "Missing one or more required parameters: 'sample_dataset_ids' and 'new_status'."
+        sample_dataset_ids = payload.pop( 'sample_dataset_ids' )
+        new_status = payload.pop( 'new_status' )
+        error_msg = payload.get( 'error_msg', '' )
+        requests_admin_cntrller = trans.webapp.controllers[ 'requests_admin' ]
+        status, output = requests_admin_cntrller.update_sample_dataset_status( trans=trans,
+                                                                               cntrller='api',
+                                                                               sample_dataset_ids=sample_dataset_ids,
+                                                                               new_status=new_status,
+                                                                               error_msg=error_msg )
+        return status, output
diff --git a/lib/galaxy/webapps/galaxy/api/search.py b/lib/galaxy/webapps/galaxy/api/search.py
new file mode 100644
index 0000000..c037562
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/search.py
@@ -0,0 +1,66 @@
+"""
+API for searching Galaxy Datasets
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController
+from galaxy.model.search import GalaxySearchEngine
+from galaxy.exceptions import ItemAccessibilityException
+
+log = logging.getLogger( __name__ )
+
+
+class SearchController( BaseAPIController, SharableItemSecurityMixin ):
+
+    @web.expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/search
+        Do a search of the various elements of Galaxy.
+        """
+        query_txt = payload.get("query", None)
+        out = []
+        if query_txt is not None:
+            se = GalaxySearchEngine()
+            try:
+                query = se.query(query_txt)
+            except Exception as e:
+                return {'error': str(e)}
+            if query is not None:
+                query.decode_query_ids(trans)
+                current_user_roles = trans.get_current_user_roles()
+                try:
+                    results = query.process(trans)
+                except Exception as e:
+                    return {'error': str(e)}
+                for item in results:
+                    append = False
+                    if trans.user_is_admin():
+                        append = True
+                    if not append:
+                        if type( item ) in [ trans.app.model.LibraryFolder, trans.app.model.LibraryDatasetDatasetAssociation, trans.app.model.LibraryDataset ]:
+                            if (trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), item, trans.user ) ):
+                                append = True
+                        elif type( item ) in [ trans.app.model.Job ]:
+                            if item.used_id == trans.user or trans.user_is_admin():
+                                append = True
+                        elif type( item ) in [ trans.app.model.Page, trans.app.model.StoredWorkflow ]:
+                            try:
+                                if self.security_check( trans, item, False, True):
+                                    append = True
+                            except ItemAccessibilityException:
+                                append = False
+                        elif type( item ) in [ trans.app.model.PageRevision ]:
+                            try:
+                                if self.security_check( trans, item.page, False, True):
+                                    append = True
+                            except ItemAccessibilityException:
+                                append = False
+                        elif hasattr(item, 'dataset'):
+                            if trans.app.security_agent.can_access_dataset( current_user_roles, item.dataset ):
+                                append = True
+
+                    if append:
+                        row = query.item_to_api_value(item)
+                        out.append( self.encode_all_ids( trans, row, True) )
+        return { 'results': out }
diff --git a/lib/galaxy/webapps/galaxy/api/tool_data.py b/lib/galaxy/webapps/galaxy/api/tool_data.py
new file mode 100644
index 0000000..6e371e0
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tool_data.py
@@ -0,0 +1,125 @@
+import os
+
+from galaxy import exceptions
+from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_raw as expose_api_raw
+from galaxy.web.base.controller import BaseAPIController
+import galaxy.queue_worker
+
+
+class ToolData( BaseAPIController ):
+    """
+    RESTful controller for interactions with tool data
+    """
+
+    @web.require_admin
+    @expose_api
+    def index( self, trans, **kwds ):
+        """
+        GET /api/tool_data: returns a list tool_data tables::
+
+        """
+        return list( a.to_dict() for a in self._data_tables.values() )
+
+    @web.require_admin
+    @expose_api
+    def show( self, trans, id, **kwds ):
+        return self._data_table(id).to_dict(view='element')
+
+    @web.require_admin
+    @expose_api
+    def reload( self, trans, id, **kwd ):
+        """
+        GET /api/tool_data/{id}/reload
+
+        Reloads a tool_data table.
+        """
+        decoded_tool_data_id = id
+        data_table = trans.app.tool_data_tables.data_tables.get(decoded_tool_data_id)
+        data_table.reload_from_files()
+        galaxy.queue_worker.send_control_task( trans.app, 'reload_tool_data_tables',
+                                               noop_self=True,
+                                               kwargs={'table_name': decoded_tool_data_id} )
+        return self._data_table( decoded_tool_data_id ).to_dict( view='element' )
+
+    @web.require_admin
+    @expose_api
+    def delete( self, trans, id, **kwd ):
+        """
+        DELETE /api/tool_data/{id}
+        Removes an item from a data table
+
+        :type   id:     str
+        :param  id:     the id of the data table containing the item to delete
+        :type   kwd:    dict
+        :param  kwd:    (required) dictionary structure containing:
+
+            * payload:     a dictionary itself containing:
+                * values:   <TAB> separated list of column contents, there must be a value for all the columns of the data table
+        """
+        decoded_tool_data_id = id
+
+        try:
+            data_table = trans.app.tool_data_tables.data_tables.get(decoded_tool_data_id)
+        except:
+            data_table = None
+        if not data_table:
+            trans.response.status = 400
+            return "Invalid data table id ( %s ) specified." % str( decoded_tool_data_id )
+
+        values = None
+        if kwd.get( 'payload', None ):
+            values = kwd['payload'].get( 'values', '' )
+
+        if not values:
+            trans.response.status = 400
+            return "Invalid data table item ( %s ) specified." % str( values )
+
+        split_values = values.split("\t")
+
+        if len(split_values) != len(data_table.get_column_name_list()):
+            trans.response.status = 400
+            return "Invalid data table item ( %s ) specified. Wrong number of columns (%s given, %s required)." % ( str( values ), str(len(split_values)), str(len(data_table.get_column_name_list())))
+
+        data_table.remove_entry(split_values)
+        galaxy.queue_worker.send_control_task( trans.app, 'reload_tool_data_tables',
+                                               noop_self=True,
+                                               kwargs={'table_name': decoded_tool_data_id} )
+        return self._data_table( decoded_tool_data_id ).to_dict( view='element' )
+
+    @web.require_admin
+    @expose_api
+    def show_field( self, trans, id, value, **kwds ):
+        """
+        GET /api/tool_data/<id>/fields/<value>
+
+        Get information about a partiular field in a tool_data table
+        """
+        return self._data_table_field( id, value ).to_dict()
+
+    @web.require_admin
+    @expose_api_raw
+    def download_field_file( self, trans, id, value, path, **kwds ):
+        field_value = self._data_table_field( id, value )
+        base_dir = field_value.get_base_dir()
+        full_path = os.path.join( base_dir, path )
+        if full_path not in field_value.get_files():
+            raise exceptions.ObjectNotFound("No such path in data table field.")
+        return open(full_path, "r")
+
+    def _data_table_field( self, id, value ):
+        out = self._data_table(id).get_field(value)
+        if out is None:
+            raise exceptions.ObjectNotFound("No such field %s in data table %s." % (value, id))
+        return out
+
+    def _data_table( self, id ):
+        try:
+            return self._data_tables[id]
+        except IndexError:
+            raise exceptions.ObjectNotFound("No such data table %s" % id)
+
+    @property
+    def _data_tables( self ):
+        return self.app.tool_data_tables.data_tables
diff --git a/lib/galaxy/webapps/galaxy/api/tool_dependencies.py b/lib/galaxy/webapps/galaxy/api/tool_dependencies.py
new file mode 100644
index 0000000..ea5821d
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tool_dependencies.py
@@ -0,0 +1,183 @@
+"""
+API operations allowing clients to manage tool dependencies.
+"""
+
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import require_admin
+from galaxy.web.base.controller import BaseAPIController
+
+from galaxy.tools.deps import views
+
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ToolDependenciesAPIController( BaseAPIController ):
+
+    def __init__(self, app):
+        super(ToolDependenciesAPIController, self).__init__(app)
+        self._view = views.DependencyResolversView(app)
+
+    @expose_api
+    @require_admin
+    def index(self, trans, **kwd):
+        """
+        GET /api/dependencies_resolvers
+        """
+        return self._view.index()
+
+    @expose_api
+    @require_admin
+    def show(self, trans, id):
+        """
+        GET /api/dependencies_resolver/<id>
+        """
+        return self._view.show(id)
+
+    @expose_api
+    @require_admin
+    def update(self, trans):
+        """
+        PUT /api/dependencies_resolvers
+
+        Reload tool dependency resolution configuration.
+        """
+        return self._view.reload()
+
+    @expose_api
+    @require_admin
+    def resolver_dependency(self, trans, id, **kwds):
+        """
+        GET /api/dependencies_resolver/{index}/dependency
+
+        Resolve described requirement against specified dependency resolver.
+
+        :type   index:    int
+        :param  index:    index of the dependency resolver
+        :type   kwds:     dict
+        :param  kwds:     dictionary structure containing extra parameters
+        :type   name:     str
+        :param  name:     name of the requirement to find a dependency for (required)
+        :type   version:  str
+        :param  version:  version of the requirement to find a dependency for (required)
+        :type   exact:    bool
+        :param  exact:    require an exact match to specify requirement (do not discard
+                          version information to resolve dependency).
+
+        :rtype:     dict
+        :returns:   a dictified description of the dependency, with attribute
+                    ``dependency_type: None`` if no match was found.
+        """
+        return self._view.resolver_dependency(id, **kwds)
+
+    @expose_api
+    @require_admin
+    def install_dependency(self, trans, id=None, **kwds):
+        """
+        POST /api/dependencies_resolver/{index}/dependency
+
+        Install described requirement against specified dependency resolver.
+
+        :type   index:    int
+        :param  index:    index of the dependency resolver
+        :type   kwds:     dict
+        :param  kwds:     dictionary structure containing extra parameters
+        :type   name:     str
+        :param  name:     name of the requirement to find a dependency for (required)
+        :type   version:  str
+        :param  version:  version of the requirement to find a dependency for (required)
+        :type   exact:    bool
+        :param  exact:    require an exact match to specify requirement (do not discard
+                          version information to resolve dependency).
+
+        :rtype:     dict
+        :returns:   a dictified description of the dependency, with attribute
+                    ``dependency_type: None`` if no match was found.
+        """
+        self._view.install_dependency(id, **kwds)
+        return self._view.manager_dependency(**kwds)
+
+    @expose_api
+    @require_admin
+    def manager_dependency(self, trans, **kwds):
+        """
+        GET /api/dependencies_resolvers/dependency
+
+        Resolve described requirement against all dependency resolvers, returning
+        the match with highest priority.
+
+        :type   index:    int
+        :param  index:    index of the dependency resolver
+        :type   kwds:     dict
+        :param  kwds:     dictionary structure containing extra parameters
+        :type   name:     str
+        :param  name:     name of the requirement to find a dependency for (required)
+        :type   version:  str
+        :param  version:  version of the requirement to find a dependency for (required)
+        :type   exact:    bool
+        :param  exact:    require an exact match to specify requirement (do not discard
+                          version information to resolve dependency).
+
+        :rtype:     dict
+        :returns:   a dictified description of the dependency, with type: None
+                    if no match was found.
+        """
+        return self._view.manager_dependency(**kwds)
+
+    @expose_api
+    @require_admin
+    def resolver_requirements(self, trans, id, **kwds):
+        """
+        GET /api/dependencies_resolver/{index}/requirements
+
+        Find all "simple" requirements that could be resolved "exactly"
+        by this dependency resolver. The dependency resolver must implement
+        ListDependencyResolver.
+
+        :type   index:    int
+        :param  index:    index of the dependency resolver
+
+        :rtype:     dict
+        :returns:   a dictified description of the requirement that could
+                    be resolved.
+        """
+        return self._view.resolver_requirements(id)
+
+    @expose_api
+    @require_admin
+    def manager_requirements(self, trans, **kwds):
+        """
+        GET /api/dependencies_resolver/requirements
+
+        Find all "simple" requirements that could be resolved "exactly"
+        by all dependency resolvers that support this operation.
+
+        :type   index:    int
+        :param  index:    index of the dependency resolver
+
+        :rtype:     dict
+        :returns:   a dictified description of the requirement that could
+                    be resolved (keyed on 'requirement') and the index of
+                    the corresponding resolver (keyed on 'index').
+        """
+        return self._view.manager_requirements()
+
+    @expose_api
+    @require_admin
+    def clean(self, trans, id=None, **kwds):
+        """
+        POST /api/dependencies_resolver/{index}/clean
+
+        Cleans up intermediate files created by resolvers during the dependency
+        installation.
+
+        :type   index:    int
+        :param  index:    index of the dependency resolver
+
+        :rtype:     dict
+        :returns:   a dictified description of the requirement that could
+                    be resolved (keyed on 'requirement') and the index of
+                    the corresponding resolver (keyed on 'index').
+        """
+        return self._view.clean(id, **kwds)
diff --git a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
new file mode 100644
index 0000000..00f9d60
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -0,0 +1,741 @@
+import json
+import logging
+import re
+from time import strftime
+
+from paste.httpexceptions import HTTPBadRequest, HTTPForbidden
+
+import tool_shed.util.shed_util_common as suc
+from galaxy import util
+from galaxy import web
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web.base.controller import BaseAPIController
+
+from tool_shed.galaxy_install.install_manager import InstallRepositoryManager
+from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
+from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
+from tool_shed.util import common_util
+from tool_shed.util import encoding_util
+from tool_shed.util import hg_util
+from tool_shed.util import workflow_util
+from tool_shed.util import tool_util
+from tool_shed.util import repository_util
+
+from sqlalchemy import and_
+
+log = logging.getLogger( __name__ )
+
+
+def get_message_for_no_shed_tool_config():
+    # This Galaxy instance is not configured with a shed-related tool panel configuration file.
+    message = 'The tool_config_file setting in galaxy.ini must include at least one shed tool configuration file name with a <toolbox> '
+    message += 'tag that includes a tool_path attribute value which is a directory relative to the Galaxy installation directory in order to '
+    message += 'automatically install tools from a tool shed into Galaxy (e.g., the file name shed_tool_conf.xml whose <toolbox> tag is '
+    message += '<toolbox tool_path="../shed_tools">).  For details, see the "Installation of Galaxy tool shed repository tools into a local '
+    message += 'Galaxy instance" section of the Galaxy tool shed wiki at http://wiki.galaxyproject.org/InstallingRepositoriesToGalaxy#'
+    message += 'Installing_Galaxy_tool_shed_repository_tools_into_a_local_Galaxy_instance.'
+    return message
+
+
+class ToolShedRepositoriesController( BaseAPIController ):
+    """RESTful controller for interactions with tool shed repositories."""
+
+    def __ensure_can_install_repos( self, trans ):
+        # Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
+        if not suc.have_shed_tool_conf_for_install( self.app ):
+            message = get_message_for_no_shed_tool_config()
+            log.debug( message )
+            return dict( status='error', error=message )
+        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+        if not trans.user_is_admin():
+            raise exceptions.AdminRequiredException( 'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.' )
+
+    def __flatten_repository_dependency_list( self, trans, tool_shed_repository ):
+        '''
+        Return a recursive exclusive flattened list of all tool_shed_repository's dependencies.
+        '''
+        dependencies = []
+        for dependency in tool_shed_repository.repository_dependencies:
+            if len( dependency.repository_dependencies ) > 0:
+                sub_dependencies = self.__flatten_repository_dependency_list( trans, dependency )
+                for sub_dependency in sub_dependencies:
+                    if sub_dependency not in dependencies:
+                        dependencies.append( sub_dependency )
+            if dependency not in dependencies:
+                dependencies.append( dependency.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) ) )
+        return dependencies
+
+    def __get_repo_info_dict( self, trans, repositories, tool_shed_url ):
+        repo_ids = []
+        changesets = []
+        for repository_id, changeset in repositories:
+            repo_ids.append( repository_id )
+            changesets.append( changeset )
+        params = dict( repository_ids=str( ','.join( repo_ids ) ), changeset_revisions=str( ','.join( changesets ) ) )
+        pathspec = [ 'repository', 'get_repository_information' ]
+        raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        return json.loads( raw_text )
+
+    def __get_value_mapper( self, trans, tool_shed_repository ):
+        value_mapper = { 'id': trans.security.encode_id( tool_shed_repository.id ),
+                         'error_message': tool_shed_repository.error_message or '' }
+        return value_mapper
+
+    def __get_tool_dependencies( self, metadata, tool_dependencies=None ):
+        if tool_dependencies is None:
+            tool_dependencies = []
+        for key, dependency_dict in metadata[ 'tool_dependencies' ].items():
+            if 'readme' in dependency_dict:
+                del( dependency_dict[ 'readme' ] )
+            if dependency_dict not in tool_dependencies:
+                tool_dependencies.append( dependency_dict )
+        if metadata[ 'has_repository_dependencies' ]:
+            for dependency in metadata[ 'repository_dependencies' ]:
+                tool_dependencies = self.__get_tool_dependencies( dependency, tool_dependencies )
+        return tool_dependencies
+
+    def __get_tools( self, metadata, tools=None ):
+        if tools is None:
+            tools = []
+        if metadata[ 'includes_tools_for_display_in_tool_panel' ]:
+            for key, tool_dict in metadata[ 'tools' ]:
+                tool_info = dict( clean=re.sub( '[^a-zA-Z0-9]+', '_', tool_dict[ 'name' ] ).lower(),
+                                  name=tool_dict[ 'name' ],
+                                  version=tool_dict[ 'version' ],
+                                  description=tool_dict[ 'description' ] )
+                if tool_info not in tools:
+                    tools.append( tool_info )
+        if metadata[ 'has_repository_dependencies' ]:
+            for dependency in metadata[ 'repository_dependencies' ]:
+                tools = self.__get_tools( dependency, tools )
+        return tools
+
+    @expose_api
+    def check_for_updates( self, trans, **kwd ):
+        '''
+        GET /api/tool_shed_repositories/check_for_updates
+        Check for updates to the specified repository, or all installed repositories.
+
+        :param key: the current Galaxy admin user's API key
+        :param id: the galaxy-side encoded repository ID
+        '''
+        repository_id = kwd.get( 'id', None )
+        message, status = repository_util.check_for_updates( self.app, trans.install_model, repository_id )
+        return { 'status': status, 'message': message }
+
+    @expose_api
+    def exported_workflows( self, trans, id, **kwd ):
+        """
+        GET /api/tool_shed_repositories/{encoded_tool_shed_repository_id}/exported_workflows
+
+        Display a list of dictionaries containing information about this tool shed repository's exported workflows.
+
+        :param id: the encoded id of the ToolShedRepository object
+        """
+        # Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_workflows
+        # Since exported workflows are dictionaries with very few attributes that differentiate them from each
+        # other, we'll build the list based on the following dictionary of those few attributes.
+        exported_workflows = []
+        repository = repository_util.get_tool_shed_repository_by_id( self.app, id )
+        metadata = repository.metadata
+        if metadata:
+            exported_workflow_tups = metadata.get( 'workflows', [] )
+        else:
+            exported_workflow_tups = []
+        for index, exported_workflow_tup in enumerate( exported_workflow_tups ):
+            # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of
+            # relative_path is the location on disk (relative to the root of the installed repository) where the
+            # exported_workflow_dict file (.ga file) is located.
+            exported_workflow_dict = exported_workflow_tup[ 1 ]
+            annotation = exported_workflow_dict.get( 'annotation', '' )
+            format_version = exported_workflow_dict.get( 'format-version', '' )
+            workflow_name = exported_workflow_dict.get( 'name', '' )
+            # Since we don't have an in-memory object with an id, we'll identify the exported workflow via its
+            # location (i.e., index) in the list.
+            display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name )
+            exported_workflows.append( display_dict )
+        return exported_workflows
+
+    @expose_api
+    def get_latest_installable_revision( self, trans, payload, **kwd ):
+        """
+        POST /api/tool_shed_repositories/get_latest_installable_revision
+        Get the latest installable revision of a specified repository from a specified Tool Shed.
+
+        :param key: the current Galaxy admin user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed from which to retrieve the Repository revision.
+        :param name (required): the name of the Repository
+        :param owner (required): the owner of the Repository
+        """
+        # Get the information about the repository to be installed from the payload.
+        tool_shed_url, name, owner = self.__parse_repository_from_payload( payload )
+        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+        if not trans.user_is_admin():
+            raise exceptions.AdminRequiredException( 'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.' )
+        params = dict( name=name, owner=owner )
+        pathspec = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
+        try:
+            raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        except Exception as e:
+            message = "Error attempting to retrieve the latest installable revision from tool shed %s for repository %s owned by %s: %s" % \
+                ( str( tool_shed_url ), str( name ), str( owner ), str( e ) )
+            log.debug( message )
+            return dict( status='error', error=message )
+        if raw_text:
+            # If successful, the response from get_ordered_installable_revisions will be a list of
+            # changeset_revision hash strings.
+            changeset_revisions = json.loads( raw_text )
+            if len( changeset_revisions ) >= 1:
+                return changeset_revisions[ -1 ]
+        return hg_util.INITIAL_CHANGELOG_HASH
+
+    @expose_api
+    @web.require_admin
+    def shed_categories( self, trans, **kwd ):
+        """
+        GET /api/tool_shed_repositories/shed_categories
+
+        Display a list of categories in the selected toolshed.
+
+        :param tool_shed_url: the url of the toolshed to get categories from
+        """
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        url = util.build_url( tool_shed_url, pathspec=[ 'api', 'categories' ] )
+        categories = json.loads( util.url_get( url ) )
+        repositories = []
+        url = util.build_url( tool_shed_url, pathspec=[ 'api', 'repositories' ] )
+        for repo in json.loads( util.url_get( url ) ):
+            repositories.append( dict( value=repo[ 'id' ], label='%s/%s' % ( repo[ 'owner' ], repo[ 'name' ] ) ) )
+        return { 'categories': categories, 'repositories': repositories }
+
+    @expose_api
+    @web.require_admin
+    def shed_category( self, trans, **kwd ):
+        """
+        GET /api/tool_shed_repositories/shed_category
+
+        Display a list of repositories in the selected category.
+
+        :param tool_shed_url: the url of the toolshed to get repositories from
+        :param category_id: the category to get repositories from
+        """
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        category_id = kwd.get( 'category_id', '' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        url = util.build_url( tool_shed_url, pathspec=[ 'api', 'categories', category_id, 'repositories' ] )
+        category = json.loads( util.url_get( url ) )
+        return category
+
+    @expose_api
+    @web.require_admin
+    def shed_repository( self, trans, **kwd ):
+        """
+        GET /api/tool_shed_repositories/shed_repository
+
+        Get details about the specified repository from its shed.
+
+        :param tsr_id: the tool_shed_repository_id
+        """
+        tool_dependencies = dict()
+        tools = dict()
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        tsr_id = kwd.get( 'tsr_id', '' )
+        tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans.app )
+        tool_panel_section_dict = { 'name': tool_panel_section_select_field.name,
+                                    'id': tool_panel_section_select_field.field_id,
+                                    'sections': [] }
+        for name, id, _ in tool_panel_section_select_field.options:
+            tool_panel_section_dict['sections'].append( dict( id=id, name=name ) )
+        repository_data = dict()
+        repository_data[ 'repository' ] = json.loads( util.url_get( tool_shed_url, pathspec=[ 'api', 'repositories', tsr_id ] ) )
+        repository_data[ 'repository' ][ 'metadata' ] = json.loads( util.url_get( tool_shed_url, pathspec=[ 'api', 'repositories', tsr_id, 'metadata' ] ) )
+        repository_data[ 'shed_conf' ] = tool_util.build_shed_tool_conf_select_field( trans.app ).get_html().replace('\n', '')
+        repository_data[ 'panel_section_html' ] = tool_panel_section_select_field.get_html( extra_attr={ 'style': 'width: 30em;' } ).replace( '\n', '' )
+        repository_data[ 'panel_section_dict' ] = tool_panel_section_dict
+        for changeset, metadata in repository_data[ 'repository' ][ 'metadata' ].items():
+            if changeset not in tool_dependencies:
+                tool_dependencies[ changeset ] = []
+            if metadata[ 'includes_tools_for_display_in_tool_panel' ]:
+                if changeset not in tools:
+                    tools[ changeset ] = []
+                for tool_dict in metadata[ 'tools' ]:
+                    tool_info = dict( clean=re.sub( '[^a-zA-Z0-9]+', '_', tool_dict[ 'name' ] ).lower(),
+                                      guid=tool_dict[ 'guid' ],
+                                      name=tool_dict[ 'name' ],
+                                      version=tool_dict[ 'version' ],
+                                      description=tool_dict[ 'description' ] )
+                    if tool_info not in tools[ changeset ]:
+                        tools[ changeset ].append( tool_info )
+                if metadata[ 'has_repository_dependencies' ]:
+                    for repository_dependency in metadata[ 'repository_dependencies' ]:
+                        tools[ changeset ] = self.__get_tools( repository_dependency, tools[ changeset ] )
+                repository_data[ 'tools' ] = tools
+            for key, dependency_dict in metadata[ 'tool_dependencies' ].items():
+                if 'readme' in dependency_dict:
+                    del( dependency_dict[ 'readme' ] )
+                if dependency_dict not in tool_dependencies[ changeset ]:
+                    tool_dependencies[ changeset ].append( dependency_dict )
+                    # log.debug(tool_dependencies)
+            if metadata[ 'has_repository_dependencies' ]:
+                for repository_dependency in metadata[ 'repository_dependencies' ]:
+                    tool_dependencies[ changeset ] = self.__get_tool_dependencies( repository_dependency, tool_dependencies[ changeset ] )
+        repository_data[ 'tool_dependencies' ] = tool_dependencies
+        return repository_data
+
+    @expose_api
+    def import_workflow( self, trans, payload, **kwd ):
+        """
+        POST /api/tool_shed_repositories/import_workflow
+
+        Import the specified exported workflow contained in the specified installed tool shed repository into Galaxy.
+
+        :param key: the API key of the Galaxy user with which the imported workflow will be associated.
+        :param id: the encoded id of the ToolShedRepository object
+
+        The following parameters are included in the payload.
+        :param index: the index location of the workflow tuple in the list of exported workflows stored in the metadata for the specified repository
+        """
+        api_key = kwd.get( 'key', None )
+        if api_key is None:
+            raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." )
+        tool_shed_repository_id = kwd.get( 'id', '' )
+        if not tool_shed_repository_id:
+            raise HTTPBadRequest( detail="Missing required parameter 'id'." )
+        index = payload.get( 'index', None )
+        if index is None:
+            raise HTTPBadRequest( detail="Missing required parameter 'index'." )
+        repository = repository_util.get_tool_shed_repository_by_id( self.app, tool_shed_repository_id )
+        exported_workflows = json.loads( self.exported_workflows( trans, tool_shed_repository_id ) )
+        # Since we don't have an in-memory object with an id, we'll identify the exported workflow via its location (i.e., index) in the list.
+        exported_workflow = exported_workflows[ int( index ) ]
+        workflow_name = exported_workflow[ 'workflow_name' ]
+        workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
+        if status == 'error':
+            log.debug( error_message )
+            return {}
+        return workflow.to_dict( view='element' )
+
+    @expose_api
+    def import_workflows( self, trans, **kwd ):
+        """
+        POST /api/tool_shed_repositories/import_workflows
+
+        Import all of the exported workflows contained in the specified installed tool shed repository into Galaxy.
+
+        :param key: the API key of the Galaxy user with which the imported workflows will be associated.
+        :param id: the encoded id of the ToolShedRepository object
+        """
+        api_key = kwd.get( 'key', None )
+        if api_key is None:
+            raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." )
+        tool_shed_repository_id = kwd.get( 'id', '' )
+        if not tool_shed_repository_id:
+            raise HTTPBadRequest( detail="Missing required parameter 'id'." )
+        repository = repository_util.get_tool_shed_repository_by_id( self.app, tool_shed_repository_id )
+        exported_workflows = json.loads( self.exported_workflows( trans, tool_shed_repository_id ) )
+        imported_workflow_dicts = []
+        for exported_workflow_dict in exported_workflows:
+            workflow_name = exported_workflow_dict[ 'workflow_name' ]
+            workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name )
+            if status == 'error':
+                log.debug( error_message )
+            else:
+                imported_workflow_dicts.append( workflow.to_dict( view='element' ) )
+        return imported_workflow_dicts
+
+    @expose_api
+    def index( self, trans, **kwd ):
+        """
+        GET /api/tool_shed_repositories
+        Display a list of dictionaries containing information about installed tool shed repositories.
+        """
+        # Example URL: http://localhost:8763/api/tool_shed_repositories
+        clause_list = []
+        if 'name' in kwd:
+            clause_list.append( self.app.install_model.ToolShedRepository.table.c.name == kwd.get( 'name', None ) )
+        if 'owner' in kwd:
+            clause_list.append( self.app.install_model.ToolShedRepository.table.c.owner == kwd.get( 'owner', None ) )
+        if 'changeset' in kwd:
+            clause_list.append( self.app.install_model.ToolShedRepository.table.c.changeset_revision == kwd.get( 'changeset', None ) )
+        tool_shed_repository_dicts = []
+        query = trans.install_model.context.query( self.app.install_model.ToolShedRepository ) \
+                                           .order_by( self.app.install_model.ToolShedRepository.table.c.name )
+        if len( clause_list ) > 0:
+            query = query.filter( and_( *clause_list ) )
+        for tool_shed_repository in query.all():
+            tool_shed_repository_dict = \
+                tool_shed_repository.to_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
+            tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+                                                              action='show',
+                                                              id=trans.security.encode_id( tool_shed_repository.id ) )
+            tool_shed_repository_dicts.append( tool_shed_repository_dict )
+        return tool_shed_repository_dicts
+
+    @expose_api
+    @web.require_admin
+    def install( self, trans, **kwd ):
+        """
+        POST /api/tool_shed_repositories/install
+        Initiate the installation of a repository.
+
+        :param install_resolver_dependencies: True to install resolvable dependencies.
+        :param install_tool_dependencies: True to install tool dependencies.
+        :param install_repository_dependencies: True to install repository dependencies.
+        :param tool_panel_section_id: The unique identifier for an existing tool panel section
+        :param new_tool_panel_section_label: Create a new tool panel section with this label
+        :param shed_tool_conf: The shed tool config file to use for this installation
+        :param tool_shed_url: The URL for the toolshed whence this repository is being installed
+        :param changeset: The changeset to update to after cloning the repository
+        """
+        irm = InstallRepositoryManager( self.app )
+        tool_shed_url = kwd.get( 'tool_shed_url', None )
+        repositories = json.loads( kwd.get( 'repositories', '[]' ) )
+        repo_info_dict = self.__get_repo_info_dict( trans, repositories, tool_shed_url )
+        includes_tools = False
+        includes_tools_for_display_in_tool_panel = False
+        has_repository_dependencies = False
+        includes_tool_dependencies = False
+        install_resolver_dependencies = util.asbool( kwd.get( 'install_resolver_dependencies', False ) )
+        for encoded_repo_info_dict in repo_info_dict.get( 'repo_info_dicts', [] ):
+            decoded_repo_info_dict = encoding_util.tool_shed_decode( encoded_repo_info_dict )
+            if not includes_tools:
+                includes_tools = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools', False ) )
+            if not includes_tools_for_display_in_tool_panel:
+                includes_tools_for_display_in_tool_panel = \
+                    util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
+            if not has_repository_dependencies:
+                has_repository_dependencies = util.string_as_bool( repo_info_dict.get( 'has_repository_dependencies', False ) )
+            if not includes_tool_dependencies:
+                includes_tool_dependencies = util.string_as_bool( repo_info_dict.get( 'includes_tool_dependencies', False ) )
+        encoded_repo_info_dicts = util.listify( repo_info_dict.get( 'repo_info_dicts', [] ) )
+        repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
+        tool_panel_section_id = kwd.get( 'tool_panel_section_id', None )
+        new_tool_panel_section_label = kwd.get( 'new_tool_panel_section', None )
+        tool_panel_section_mapping = json.loads( kwd.get( 'tool_panel_section', '{}' ) )
+        install_tool_dependencies = util.asbool( kwd.get( 'install_tool_dependencies', False ) )
+        install_repository_dependencies = util.asbool( kwd.get( 'install_repository_dependencies', False ) )
+        shed_tool_conf = kwd.get( 'shed_tool_conf', None )
+        tool_path = suc.get_tool_path_by_shed_tool_conf_filename( self.app, shed_tool_conf )
+        installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
+                                  new_tool_panel_section_label=new_tool_panel_section_label,
+                                  no_changes_checked=False,
+                                  repo_info_dicts=repo_info_dicts,
+                                  tool_panel_section_id=tool_panel_section_id,
+                                  tool_path=tool_path,
+                                  tool_shed_url=tool_shed_url )
+        new_repositories, tool_panel_keys, repo_info_dicts, filtered_repos = irm.handle_tool_shed_repositories( installation_dict )
+        if new_repositories:
+            installation_dict = dict( created_or_updated_tool_shed_repositories=new_repositories,
+                                      filtered_repo_info_dicts=filtered_repos,
+                                      has_repository_dependencies=has_repository_dependencies,
+                                      includes_tool_dependencies=includes_tool_dependencies,
+                                      includes_tools=includes_tools,
+                                      includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                                      install_repository_dependencies=install_repository_dependencies,
+                                      install_tool_dependencies=install_tool_dependencies,
+                                      message='',
+                                      new_tool_panel_section_label=new_tool_panel_section_label,
+                                      tool_panel_section_mapping=tool_panel_section_mapping,
+                                      install_resolver_dependencies=install_resolver_dependencies,
+                                      shed_tool_conf=shed_tool_conf,
+                                      status='ok',
+                                      tool_panel_section_id=tool_panel_section_id,
+                                      tool_panel_section_keys=tool_panel_keys,
+                                      tool_path=tool_path,
+                                      tool_shed_url=tool_shed_url )
+            encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
+                irm.initiate_repository_installation( installation_dict )
+            return json.dumps( dict( operation='install',
+                                     api=True,
+                                     install_resolver_dependencies=install_resolver_dependencies,
+                                     install_tool_dependencies=install_tool_dependencies,
+                                     encoded_kwd=encoded_kwd,
+                                     reinstalling=False,
+                                     tool_shed_repository_ids=json.dumps( [ repo[0] for repo in repositories ] ),
+                                     repositories=[ trans.security.encode_id( repo.id ) for repo in new_repositories ] ) )
+
+    @expose_api
+    def install_repository_revision( self, trans, payload, **kwd ):
+        """
+        POST /api/tool_shed_repositories/install_repository_revision
+        Install a specified repository revision from a specified tool shed into Galaxy.
+
+        :param key: the current Galaxy admin user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed from which to install the Repository
+        :param name (required): the name of the Repository
+        :param owner (required): the owner of the Repository
+        :param changeset_revision (required): the changeset_revision of the RepositoryMetadata object associated with the Repository
+        :param new_tool_panel_section_label (optional): label of a new section to be added to the Galaxy tool panel in which to load
+                                                        tools contained in the Repository.  Either this parameter must be an empty string or
+                                                        the tool_panel_section_id parameter must be an empty string or both must be an empty
+                                                        string (both cannot be used simultaneously).
+        :param tool_panel_section_id (optional): id of the Galaxy tool panel section in which to load tools contained in the Repository.
+                                                 If this parameter is an empty string and the above new_tool_panel_section_label parameter is an
+                                                 empty string, tools will be loaded outside of any sections in the tool panel.  Either this
+                                                 parameter must be an empty string or the tool_panel_section_id parameter must be an empty string
+                                                 of both must be an empty string (both cannot be used simultaneously).
+        :param install_repository_dependencies (optional): Set to True if you want to install repository dependencies defined for the specified
+                                                           repository being installed.  The default setting is False.
+        :param install_tool_dependencies (optional): Set to True if you want to install tool dependencies defined for the specified repository being
+                                                     installed.  The default setting is False.
+        :param shed_tool_conf (optional): The shed-related tool panel configuration file configured in the "tool_config_file" setting in the Galaxy config file
+                                          (e.g., galaxy.ini).  At least one shed-related tool panel config file is required to be configured. Setting
+                                          this parameter to a specific file enables you to choose where the specified repository will be installed because
+                                          the tool_path attribute of the <toolbox> from the specified file is used as the installation location
+                                          (e.g., <toolbox tool_path="../shed_tools">).  If this parameter is not set, a shed-related tool panel configuration
+                                          file will be selected automatically.
+        """
+        # Get the information about the repository to be installed from the payload.
+        tool_shed_url, name, owner, changeset_revision = self.__parse_repository_from_payload( payload, include_changeset=True )
+        self.__ensure_can_install_repos( trans )
+        irm = InstallRepositoryManager( self.app )
+        installed_tool_shed_repositories = irm.install( tool_shed_url,
+                                                        name,
+                                                        owner,
+                                                        changeset_revision,
+                                                        payload )
+
+        def to_dict( tool_shed_repository ):
+            tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
+            tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+                                                              action='show',
+                                                              id=trans.security.encode_id( tool_shed_repository.id ) )
+            return tool_shed_repository_dict
+        if installed_tool_shed_repositories:
+            return map( to_dict, installed_tool_shed_repositories )
+        message = "No repositories were installed, possibly because the selected repository has already been installed."
+        return dict( status="ok", message=message )
+
+    @expose_api
+    def install_repository_revisions( self, trans, payload, **kwd ):
+        """
+        POST /api/tool_shed_repositories/install_repository_revisions
+        Install one or more specified repository revisions from one or more specified tool sheds into Galaxy.  The received parameters
+        must be ordered lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.
+
+        It's questionable whether this method is needed as the above method for installing a single repository can probably cover all
+        desired scenarios.  We'll keep this one around just in case...
+
+        :param key: the current Galaxy admin user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_urls: the base URLs of the Tool Sheds from which to install a specified Repository
+        :param names: the names of the Repositories to be installed
+        :param owners: the owners of the Repositories to be installed
+        :param changeset_revisions: the changeset_revisions of each RepositoryMetadata object associated with each Repository to be installed
+        :param new_tool_panel_section_label: optional label of a new section to be added to the Galaxy tool panel in which to load
+                                             tools contained in the Repository.  Either this parameter must be an empty string or
+                                             the tool_panel_section_id parameter must be an empty string, as both cannot be used.
+        :param tool_panel_section_id: optional id of the Galaxy tool panel section in which to load tools contained in the Repository.
+                                      If not set, tools will be loaded outside of any sections in the tool panel.  Either this
+                                      parameter must be an empty string or the tool_panel_section_id parameter must be an empty string,
+                                      as both cannot be used.
+        :param install_repository_dependencies (optional): Set to True if you want to install repository dependencies defined for the specified
+                                                           repository being installed.  The default setting is False.
+        :param install_tool_dependencies (optional): Set to True if you want to install tool dependencies defined for the specified repository being
+                                                     installed.  The default setting is False.
+        :param shed_tool_conf (optional): The shed-related tool panel configuration file configured in the "tool_config_file" setting in the Galaxy config file
+                                          (e.g., galaxy.ini).  At least one shed-related tool panel config file is required to be configured. Setting
+                                          this parameter to a specific file enables you to choose where the specified repository will be installed because
+                                          the tool_path attribute of the <toolbox> from the specified file is used as the installation location
+                                          (e.g., <toolbox tool_path="../shed_tools">).  If this parameter is not set, a shed-related tool panel configuration
+                                          file will be selected automatically.
+        """
+        self.__ensure_can_install_repos( trans )
+        # Get the information about all of the repositories to be installed.
+        tool_shed_urls = util.listify( payload.get( 'tool_shed_urls', '' ) )
+        names = util.listify( payload.get( 'names', '' ) )
+        owners = util.listify( payload.get( 'owners', '' ) )
+        changeset_revisions = util.listify( payload.get( 'changeset_revisions', '' ) )
+        num_specified_repositories = len( tool_shed_urls )
+        if len( names ) != num_specified_repositories or \
+                len( owners ) != num_specified_repositories or \
+                len( changeset_revisions ) != num_specified_repositories:
+            message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered '
+            message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.'
+            log.debug( message )
+            return dict( status='error', error=message )
+        # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information
+        # about each of the repositories being installed.
+        # TODO: we may want to enhance this method to allow for each of the following to be associated with each repository instead of
+        # forcing all repositories to use the same settings.
+        install_repository_dependencies = payload.get( 'install_repository_dependencies', False )
+        install_resolver_dependencies = payload.get( 'install_resolver_dependencies', False )
+        install_tool_dependencies = payload.get( 'install_tool_dependencies', False )
+        new_tool_panel_section_label = payload.get( 'new_tool_panel_section_label', '' )
+        shed_tool_conf = payload.get( 'shed_tool_conf', None )
+        tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
+        all_installed_tool_shed_repositories = []
+        for tool_shed_url, name, owner, changeset_revision in zip( tool_shed_urls, names, owners, changeset_revisions ):
+            current_payload = dict( tool_shed_url=tool_shed_url,
+                                    name=name,
+                                    owner=owner,
+                                    changeset_revision=changeset_revision,
+                                    new_tool_panel_section_label=new_tool_panel_section_label,
+                                    tool_panel_section_id=tool_panel_section_id,
+                                    install_repository_dependencies=install_repository_dependencies,
+                                    install_resolver_dependencies=install_resolver_dependencies,
+                                    install_tool_dependencies=install_tool_dependencies,
+                                    shed_tool_conf=shed_tool_conf )
+            installed_tool_shed_repositories = self.install_repository_revision( trans, **current_payload )
+            if isinstance( installed_tool_shed_repositories, dict ):
+                # We encountered an error.
+                return installed_tool_shed_repositories
+            elif isinstance( installed_tool_shed_repositories, list ):
+                all_installed_tool_shed_repositories.extend( installed_tool_shed_repositories )
+        return all_installed_tool_shed_repositories
+
+    @expose_api
+    def repair_repository_revision( self, trans, payload, **kwd ):
+        """
+        POST /api/tool_shed_repositories/repair_repository_revision
+        Repair a specified repository revision previously installed into Galaxy.
+
+        :param key: the current Galaxy admin user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository was installed
+        :param name (required): the name of the Repository
+        :param owner (required): the owner of the Repository
+        :param changeset_revision (required): the changeset_revision of the RepositoryMetadata object associated with the Repository
+        """
+        # Get the information about the repository to be installed from the payload.
+        tool_shed_url, name, owner, changeset_revision = self.__parse_repository_from_payload( payload, include_changeset=True )
+        tool_shed_repositories = []
+        tool_shed_repository = repository_util.get_installed_repository( self.app,
+                                                                         tool_shed=tool_shed_url,
+                                                                         name=name,
+                                                                         owner=owner,
+                                                                         changeset_revision=changeset_revision )
+        rrm = RepairRepositoryManager( self.app )
+        repair_dict = rrm.get_repair_dict( tool_shed_repository )
+        ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] )
+        ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] )
+        if ordered_tsr_ids and ordered_repo_info_dicts:
+            for index, tsr_id in enumerate( ordered_tsr_ids ):
+                repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+                repo_info_dict = ordered_repo_info_dicts[ index ]
+                # TODO: handle errors in repair_dict.
+                repair_dict = rrm.repair_tool_shed_repository( repository,
+                                                               encoding_util.tool_shed_encode( repo_info_dict ) )
+                repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans, repository ) )
+                repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+                                                        action='show',
+                                                        id=trans.security.encode_id( repository.id ) )
+                if repair_dict:
+                    errors = repair_dict.get( repository.name, [] )
+                    repository_dict[ 'errors_attempting_repair' ] = '  '.join( errors )
+                tool_shed_repositories.append( repository_dict )
+        # Display the list of repaired repositories.
+        return tool_shed_repositories
+
+    def __parse_repository_from_payload( self, payload, include_changeset=False ):
+        # Get the information about the repository to be installed from the payload.
+        tool_shed_url = payload.get( 'tool_shed_url', '' )
+        if not tool_shed_url:
+            raise exceptions.RequestParameterMissingException( "Missing required parameter 'tool_shed_url'." )
+        name = payload.get( 'name', '' )
+        if not name:
+            raise exceptions.RequestParameterMissingException( "Missing required parameter 'name'." )
+        owner = payload.get( 'owner', '' )
+        if not owner:
+            raise exceptions.RequestParameterMissingException( "Missing required parameter 'owner'." )
+        if not include_changeset:
+            return tool_shed_url, name, owner
+
+        changeset_revision = payload.get( 'changeset_revision', '' )
+        if not changeset_revision:
+            raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
+
+        return tool_shed_url, name, owner, changeset_revision
+
+    @expose_api
+    def reset_metadata_on_installed_repositories( self, trans, payload, **kwd ):
+        """
+        PUT /api/tool_shed_repositories/reset_metadata_on_installed_repositories
+
+        Resets all metadata on all repositories installed into Galaxy in an "orderly fashion".
+
+        :param key: the API key of the Galaxy admin user.
+        """
+        start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+        results = dict( start_time=start_time,
+                        successful_count=0,
+                        unsuccessful_count=0,
+                        repository_status=[] )
+        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+        if not trans.user_is_admin():
+            raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' )
+        irmm = InstalledRepositoryMetadataManager( self.app )
+        query = irmm.get_query_for_setting_metadata_on_repositories( order=False )
+        # Now reset metadata on all remaining repositories.
+        for repository in query:
+            try:
+                irmm.set_repository( repository )
+                irmm.reset_all_metadata_on_installed_repository()
+                irmm_invalid_file_tups = irmm.get_invalid_file_tups()
+                if irmm_invalid_file_tups:
+                    message = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                            irmm_invalid_file_tups,
+                                                                            repository,
+                                                                            None,
+                                                                            as_html=False )
+                    results[ 'unsuccessful_count' ] += 1
+                else:
+                    message = "Successfully reset metadata on repository %s owned by %s" % \
+                        ( str( repository.name ), str( repository.owner ) )
+                    results[ 'successful_count' ] += 1
+            except Exception as e:
+                message = "Error resetting metadata on repository %s owned by %s: %s" % \
+                    ( str( repository.name ), str( repository.owner ), str( e ) )
+                results[ 'unsuccessful_count' ] += 1
+            results[ 'repository_status' ].append( message )
+        stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+        results[ 'stop_time' ] = stop_time
+        return json.dumps( results, sort_keys=True, indent=4 )
+
+    @expose_api
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
+        Display a dictionary containing information about a specified tool_shed_repository.
+
+        :param id: the encoded id of the ToolShedRepository object
+        """
+        # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
+        tool_shed_repository = repository_util.get_tool_shed_repository_by_id( self.app, id )
+        if tool_shed_repository is None:
+            log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
+            return {}
+        tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
+        tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+                                                          action='show',
+                                                          id=trans.security.encode_id( tool_shed_repository.id ) )
+        return tool_shed_repository_dict
+
+    @expose_api
+    @web.require_admin
+    def status( self, trans, id, **kwd ):
+        """
+        GET /api/tool_shed_repositories/{id}/status
+        Display a dictionary containing information about a specified repository's installation
+        status and a list of its dependencies and the status of each.
+
+        :param id: the repository's encoded id
+        """
+        tool_shed_repository = repository_util.get_tool_shed_repository_by_id( self.app, id )
+        if tool_shed_repository is None:
+            log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) )
+            return {}
+        tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans, tool_shed_repository ) )
+        tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+                                                          action='show',
+                                                          id=trans.security.encode_id( tool_shed_repository.id ) )
+        tool_shed_repository_dict[ 'repository_dependencies' ] = self.__flatten_repository_dependency_list( trans, tool_shed_repository )
+        return tool_shed_repository_dict
diff --git a/lib/galaxy/webapps/galaxy/api/tools.py b/lib/galaxy/webapps/galaxy/api/tools.py
new file mode 100644
index 0000000..b76dd9a
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -0,0 +1,632 @@
+import logging
+import urllib
+from json import dumps
+
+import galaxy.queue_worker
+from galaxy import exceptions, managers, util, web
+from galaxy.managers.collections_util import dictify_dataset_collection_instance
+from galaxy.visualization.genomes import GenomeRegion
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesVisualizationMixin
+
+log = logging.getLogger( __name__ )
+
+
+class ToolsController( BaseAPIController, UsesVisualizationMixin ):
+    """
+    RESTful controller for interactions with tools.
+    """
+
+    def __init__( self, app ):
+        super( ToolsController, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    @expose_api_anonymous_and_sessionless
+    def index( self, trans, **kwds ):
+        """
+        GET /api/tools: returns a list of tools defined by parameters::
+
+            parameters:
+
+                in_panel  - if true, tools are returned in panel structure,
+                            including sections and labels
+                trackster - if true, only tools that are compatible with
+                            Trackster are returned
+                q         - if present search on the given query will be performed
+                tool_id   - if present the given tool_id will be searched for
+                            all installed versions
+        """
+
+        # Read params.
+        in_panel = util.string_as_bool( kwds.get( 'in_panel', 'True' ) )
+        trackster = util.string_as_bool( kwds.get( 'trackster', 'False' ) )
+        q = kwds.get( 'q', '' )
+        tool_id = kwds.get( 'tool_id', '' )
+
+        # Find whether to search.
+        if q:
+            hits = self._search( q )
+            results = []
+            if hits:
+                for hit in hits:
+                    try:
+                        tool = self._get_tool( hit, user=trans.user )
+                        if tool:
+                            results.append( tool.id )
+                    except exceptions.AuthenticationFailed:
+                        pass
+                    except exceptions.ObjectNotFound:
+                        pass
+            return results
+
+        # Find whether to detect.
+        if tool_id:
+            detected_versions = self._detect( trans, tool_id )
+            return detected_versions
+
+        # Return everything.
+        try:
+            return self.app.toolbox.to_dict( trans, in_panel=in_panel, trackster=trackster)
+        except Exception:
+            raise exceptions.InternalServerError( "Error: Could not convert toolbox to dictionary" )
+
+    @expose_api_anonymous_and_sessionless
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/tools/{tool_id}
+        Returns tool information, including parameters and inputs.
+        """
+        io_details = util.string_as_bool( kwd.get( 'io_details', False ) )
+        link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
+        tool = self._get_tool( id, user=trans.user )
+        return tool.to_dict( trans, io_details=io_details, link_details=link_details )
+
+    @expose_api_anonymous
+    def build( self, trans, id, **kwd ):
+        """
+        GET /api/tools/{tool_id}/build
+        Returns a tool model including dynamic parameters and updated values, repeats block etc.
+        """
+        if 'payload' in kwd:
+            kwd = kwd.get('payload')
+        tool_version = kwd.get( 'tool_version', None )
+        tool = self._get_tool( id, tool_version=tool_version, user=trans.user )
+        return tool.to_json(trans, kwd.get('inputs', kwd))
+
+    @expose_api
+    @web.require_admin
+    def reload( self, trans, id, **kwd ):
+        """
+        GET /api/tools/{tool_id}/reload
+        Reload specified tool.
+        """
+        galaxy.queue_worker.send_control_task( trans.app, 'reload_tool', noop_self=True, kwargs={ 'tool_id': id } )
+        message, status = trans.app.toolbox.reload_tool_by_id( id )
+        return { status: message }
+
+    @expose_api
+    @web.require_admin
+    def all_requirements(self, trans, **kwds):
+        """
+        GET /api/tools/all_requirements
+        Return list of unique requirements for all tools.
+        """
+
+        return trans.app.toolbox.all_requirements
+
+    @expose_api
+    @web.require_admin
+    def requirements(self, trans, id, **kwds):
+        """
+        GET /api/tools/{tool_id}/requirements
+        Return the resolver status for a specific tool id.
+        [{"status": "installed", "name": "hisat2", "versionless": false, "resolver_type": "conda", "version": "2.0.3", "type": "package"}]
+        """
+        tool = self._get_tool(id)
+        return tool.tool_requirements_status
+
+    @expose_api
+    @web.require_admin
+    def install_dependencies(self, trans, id, **kwds):
+        """
+        POST /api/tools/{tool_id}/install_dependencies
+        Attempts to install requirements via the dependency resolver
+
+        parameters:
+            build_dependency_cache:  If true, attempts to cache dependencies for this tool
+            force_rebuild:           If true and chache dir exists, attempts to delete cache dir
+        """
+        tool = self._get_tool(id)
+        [tool._view.install_dependency(id=None, **req.to_dict()) for req in tool.requirements]
+        if kwds.get('build_dependency_cache'):
+            tool.build_dependency_cache(**kwds)
+        # TODO: rework resolver install system to log and report what has been done.
+        # _view.install_dependency should return a dict with stdout, stderr and success status
+        return tool.tool_requirements_status
+
+    @expose_api
+    @web.require_admin
+    def build_dependency_cache(self, trans, id, **kwds):
+        """
+        POST /api/tools/{tool_id}/build_dependency_cache
+        Attempts to cache installed dependencies.
+
+        parameters:
+            force_rebuild:           If true and chache dir exists, attempts to delete cache dir
+        """
+        tool = self._get_tool(id)
+        tool.build_dependency_cache(**kwds)
+        # TODO: Should also have a more meaningful return.
+        return tool.tool_requirements_status
+
+    @expose_api
+    @web.require_admin
+    def diagnostics( self, trans, id, **kwd ):
+        """
+        GET /api/tools/{tool_id}/diagnostics
+        Return diagnostic information to help debug panel
+        and dependency related problems.
+        """
+        # TODO: Move this into tool.
+        def to_dict(x):
+            return x.to_dict()
+
+        tool = self._get_tool( id, user=trans.user )
+        if hasattr( tool, 'lineage' ):
+            lineage_dict = tool.lineage.to_dict()
+        else:
+            lineage_dict = None
+        tool_shed_dependencies = tool.installed_tool_dependencies
+        if tool_shed_dependencies:
+            tool_shed_dependencies_dict = map(to_dict, tool_shed_dependencies)
+        else:
+            tool_shed_dependencies_dict = None
+        return {
+            "tool_id": tool.id,
+            "tool_version": tool.version,
+            "dependency_shell_commands": tool.build_dependency_shell_commands(),
+            "lineage": lineage_dict,
+            "requirements": map(to_dict, tool.requirements),
+            "installed_tool_shed_dependencies": tool_shed_dependencies_dict,
+            "tool_dir": tool.tool_dir,
+            "tool_shed": tool.tool_shed,
+            "repository_name": tool.repository_name,
+            "repository_owner": tool.repository_owner,
+            "installed_changeset_revision": None,
+            "guid": tool.guid,
+        }
+
+    def _detect( self, trans, tool_id ):
+        """
+        Detect whether the tool with the given id is installed.
+
+        :param tool_id: exact id of the tool
+        :type tool_id:  str
+
+        :return:      list with available versions
+        "return type: list
+        """
+        tools = self.app.toolbox.get_tool( tool_id, get_all_versions=True )
+        detected_versions = []
+        if tools:
+            for tool in tools:
+                if tool and tool.allow_user_access( trans.user ):
+                    detected_versions.append( tool.version )
+        return detected_versions
+
+    def _search( self, q ):
+        """
+        Perform the search on the given query.
+        Boosts and numer of results are configurable in galaxy.ini file.
+
+        :param q: the query to search with
+        :type  q: str
+
+        :return:      Dictionary containing the tools' ids of the best hits.
+        :return type: dict
+        """
+        tool_name_boost = self.app.config.get( 'tool_name_boost', 9 )
+        tool_section_boost = self.app.config.get( 'tool_section_boost', 3 )
+        tool_description_boost = self.app.config.get( 'tool_description_boost', 2 )
+        tool_label_boost = self.app.config.get( 'tool_label_boost', 1 )
+        tool_stub_boost = self.app.config.get( 'tool_stub_boost', 5 )
+        tool_help_boost = self.app.config.get( 'tool_help_boost', 0.5 )
+        tool_search_limit = self.app.config.get( 'tool_search_limit', 20 )
+
+        results = self.app.toolbox_search.search( q=q,
+                                                  tool_name_boost=tool_name_boost,
+                                                  tool_section_boost=tool_section_boost,
+                                                  tool_description_boost=tool_description_boost,
+                                                  tool_label_boost=tool_label_boost,
+                                                  tool_stub_boost=tool_stub_boost,
+                                                  tool_help_boost=tool_help_boost,
+                                                  tool_search_limit=tool_search_limit )
+        return results
+
+    @expose_api_anonymous_and_sessionless
+    def citations( self, trans, id, **kwds ):
+        tool = self._get_tool( id, user=trans.user )
+        rval = []
+        for citation in tool.citations:
+            rval.append( citation.to_dict( 'bibtex' ) )
+        return rval
+
+    @web.expose_api_raw
+    @web.require_admin
+    def download( self, trans, id, **kwds ):
+        tool_tarball = trans.app.toolbox.package_tool(trans, id)
+        trans.response.set_content_type('application/x-gzip')
+        download_file = open(tool_tarball, "rb")
+        trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.tgz"' % (id)
+        return download_file
+
+    @expose_api_anonymous
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/tools
+        Executes tool using specified inputs and returns tool's outputs.
+        """
+        # HACK: for now, if action is rerun, rerun tool.
+        action = payload.get( 'action', None )
+        if action == 'rerun':
+            return self._rerun_tool( trans, payload, **kwd )
+
+        # -- Execute tool. --
+
+        # Get tool.
+        tool_version = payload.get( 'tool_version', None )
+        tool = trans.app.toolbox.get_tool( payload[ 'tool_id' ], tool_version ) if 'tool_id' in payload else None
+        if not tool or not tool.allow_user_access( trans.user ):
+            raise exceptions.MessageException( 'Tool not found or not accessible.' )
+        if trans.app.config.user_activation_on:
+            if not trans.user:
+                log.warning( "Anonymous user attempts to execute tool, but account activation is turned on." )
+            elif not trans.user.active:
+                log.warning( "User \"%s\" attempts to execute tool, but account activation is turned on and user account is not active." % trans.user.email )
+
+        # Set running history from payload parameters.
+        # History not set correctly as part of this API call for
+        # dataset upload.
+        history_id = payload.get('history_id', None)
+        if history_id:
+            decoded_id = self.decode_id( history_id )
+            target_history = self.history_manager.get_owned( decoded_id, trans.user, current_history=trans.history )
+        else:
+            target_history = None
+
+        # Set up inputs.
+        inputs = payload.get( 'inputs', {} )
+        # Find files coming in as multipart file data and add to inputs.
+        for k, v in payload.iteritems():
+            if k.startswith('files_') or k.startswith('__files_'):
+                inputs[k] = v
+
+        # for inputs that are coming from the Library, copy them into the history
+        input_patch = {}
+        for k, v in inputs.iteritems():
+            if isinstance(v, dict) and v.get('src', '') == 'ldda' and 'id' in v:
+                ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( self.decode_id(v['id']) )
+                if trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ):
+                    input_patch[k] = ldda.to_history_dataset_association(target_history, add_to_history=True)
+
+        for k, v in input_patch.iteritems():
+            inputs[k] = v
+
+        # TODO: encode data ids and decode ids.
+        # TODO: handle dbkeys
+        params = util.Params( inputs, sanitize=False )
+        incoming = params.__dict__
+        vars = tool.handle_input( trans, incoming, history=target_history )
+
+        # TODO: check for errors and ensure that output dataset(s) are available.
+        output_datasets = vars.get( 'out_data', [] )
+        rval = { 'outputs': [], 'output_collections': [], 'jobs': [], 'implicit_collections': [] }
+
+        job_errors = vars.get( 'job_errors', [] )
+        if job_errors:
+            # If we are here - some jobs were successfully executed but some failed.
+            rval[ 'errors' ] = job_errors
+
+        outputs = rval[ 'outputs' ]
+        # TODO:?? poss. only return ids?
+        for output_name, output in output_datasets:
+            output_dict = output.to_dict()
+            # add the output name back into the output data structure
+            # so it's possible to figure out which newly created elements
+            # correspond with which tool file outputs
+            output_dict[ 'output_name' ] = output_name
+            outputs.append( trans.security.encode_dict_ids( output_dict, skip_startswith="metadata_" ) )
+
+        for job in vars.get('jobs', []):
+            rval[ 'jobs' ].append( self.encode_all_ids( trans, job.to_dict( view='collection' ), recursive=True ) )
+
+        for output_name, collection_instance in vars.get('output_collections', []):
+            history = target_history or trans.history
+            output_dict = dictify_dataset_collection_instance( collection_instance, security=trans.security, parent=history )
+            output_dict[ 'output_name' ] = output_name
+            rval[ 'output_collections' ].append( output_dict )
+
+        for output_name, collection_instance in vars.get( 'implicit_collections', {} ).iteritems():
+            history = target_history or trans.history
+            output_dict = dictify_dataset_collection_instance( collection_instance, security=trans.security, parent=history )
+            output_dict[ 'output_name' ] = output_name
+            rval[ 'implicit_collections' ].append( output_dict )
+
+        return rval
+
+    #
+    # -- Helper methods --
+    #
+    def _get_tool( self, id, tool_version=None, user=None ):
+        id = urllib.unquote_plus( id )
+        tool = self.app.toolbox.get_tool( id, tool_version )
+        if not tool:
+            raise exceptions.ObjectNotFound( "Could not find tool with id '%s'." % id )
+        if not tool.allow_user_access( user ):
+            raise exceptions.AuthenticationFailed( "Access denied, please login for tool with id '%s'." % id )
+        return tool
+
+    def _rerun_tool( self, trans, payload, **kwargs ):
+        """
+        Rerun a tool to produce a new output dataset that corresponds to a
+        dataset that a user is currently viewing.
+        """
+
+        #
+        # TODO: refactor to use same code as run_tool.
+        #
+
+        # Run tool on region if region is specificied.
+        run_on_regions = False
+        regions = payload.get( 'regions', None )
+        if regions:
+            if isinstance( regions, dict ):
+                # Regions is a single region.
+                regions = [ GenomeRegion.from_dict( regions ) ]
+            elif isinstance( regions, list ):
+                # There is a list of regions.
+                regions = [ GenomeRegion.from_dict( r ) for r in regions ]
+
+                if len( regions ) > 1:
+                    # Sort by chrom name, start so that data is not fetched out of order.
+                    regions = sorted(regions, key=lambda r: (r.chrom.lower(), r.start))
+
+                    # Merge overlapping regions so that regions do not overlap
+                    # and hence data is not included multiple times.
+                    prev = regions[0]
+                    cur = regions[1]
+                    index = 1
+                    while True:
+                        if cur.chrom == prev.chrom and cur.start <= prev.end:
+                            # Found overlapping regions, so join them into prev.
+                            prev.end = cur.end
+                            del regions[ index ]
+                        else:
+                            # No overlap, move to next region.
+                            prev = cur
+                            index += 1
+
+                        # Get next region or exit.
+                        if index == len( regions ):
+                            # Done.
+                            break
+                        else:
+                            cur = regions[ index ]
+
+            run_on_regions = True
+
+        # Dataset check.
+        decoded_dataset_id = self.decode_id( payload.get( 'target_dataset_id' ) )
+        original_dataset = self.hda_manager.get_accessible( decoded_dataset_id, user=trans.user )
+        original_dataset = self.hda_manager.error_if_uploading( original_dataset )
+        msg = self.hda_manager.data_conversion_status( original_dataset )
+        if msg:
+            return msg
+
+        # Set tool parameters--except non-hidden dataset parameters--using combination of
+        # job's previous parameters and incoming parameters. Incoming parameters
+        # have priority.
+        #
+        original_job = self.hda_manager.creating_job( original_dataset )
+        tool = trans.app.toolbox.get_tool( original_job.tool_id )
+        if not tool or not tool.allow_user_access( trans.user ):
+            return trans.app.model.Dataset.conversion_messages.NO_TOOL
+        tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
+
+        # TODO: rather than set new inputs using dict of json'ed value, unpack parameters and set using set_param_value below.
+        # TODO: need to handle updates to conditional parameters; conditional
+        # params are stored in dicts (and dicts within dicts).
+        new_inputs = payload[ 'inputs' ]
+        tool_params.update( dict( [ ( key, dumps( value ) ) for key, value in new_inputs.items() if key in tool.inputs and new_inputs[ key ] is not None ] ) )
+        tool_params = tool.params_from_strings( tool_params, self.app )
+
+        #
+        # If running tool on region, convert input datasets (create indices) so
+        # that can regions of data can be quickly extracted.
+        #
+        data_provider_registry = trans.app.data_provider_registry
+        messages_list = []
+        if run_on_regions:
+            for jida in original_job.input_datasets:
+                input_dataset = jida.dataset
+                data_provider = data_provider_registry.get_data_provider( trans, original_dataset=input_dataset, source='data' )
+                if data_provider and ( not data_provider.converted_dataset or
+                                       data_provider.converted_dataset.state != trans.app.model.Dataset.states.OK ):
+                    # Can convert but no converted dataset yet, so return message about why.
+                    data_sources = input_dataset.datatype.data_sources
+                    msg = input_dataset.convert_dataset( trans, data_sources[ 'data' ] )
+                    if msg is not None:
+                        messages_list.append( msg )
+
+        # Return any messages generated during conversions.
+        return_message = self._get_highest_priority_msg( messages_list )
+        if return_message:
+            return return_message
+
+        #
+        # Set target history (the history that tool will use for inputs/outputs).
+        # If user owns dataset, put new data in original dataset's history; if
+        # user does not own dataset (and hence is accessing dataset via sharing),
+        # put new data in user's current history.
+        #
+        if original_dataset.history.user == trans.user:
+            target_history = original_dataset.history
+        else:
+            target_history = trans.get_history( create=True )
+        hda_permissions = trans.app.security_agent.history_get_default_permissions( target_history )
+
+        def set_param_value( param_dict, param_name, param_value ):
+            """
+            Set new parameter value in a tool's parameter dictionary.
+            """
+
+            # Recursive function to set param value.
+            def set_value( param_dict, group_name, group_index, param_name, param_value ):
+                if group_name in param_dict:
+                    param_dict[ group_name ][ group_index ][ param_name ] = param_value
+                    return True
+                elif param_name in param_dict:
+                    param_dict[ param_name ] = param_value
+                    return True
+                else:
+                    # Recursive search.
+                    return_val = False
+                    for value in param_dict.values():
+                        if isinstance( value, dict ):
+                            return_val = set_value( value, group_name, group_index, param_name, param_value)
+                            if return_val:
+                                return return_val
+                    return False
+
+            # Parse parameter name if necessary.
+            if param_name.find( "|" ) == -1:
+                # Non-grouping parameter.
+                group_name = group_index = None
+            else:
+                # Grouping parameter.
+                group, param_name = param_name.split( "|" )
+                index = group.rfind( "_" )
+                group_name = group[ :index ]
+                group_index = int( group[ index + 1: ] )
+
+            return set_value( param_dict, group_name, group_index, param_name, param_value )
+
+        # Set parameters based tool's trackster config.
+        params_set = {}
+        for action in tool.trackster_conf.actions:
+            success = False
+            for joda in original_job.output_datasets:
+                if joda.name == action.output_name:
+                    set_param_value( tool_params, action.name, joda.dataset )
+                    params_set[ action.name ] = True
+                    success = True
+                    break
+
+            if not success:
+                return trans.app.model.Dataset.conversion_messages.ERROR
+
+        #
+        # Set input datasets for tool. If running on regions, extract and use subset
+        # when possible.
+        #
+        if run_on_regions:
+            regions_str = ",".join( [ str( r ) for r in regions ] )
+        for jida in original_job.input_datasets:
+            # If param set previously by config actions, do nothing.
+            if jida.name in params_set:
+                continue
+
+            input_dataset = jida.dataset
+            if input_dataset is None:  # optional dataset and dataset wasn't selected
+                tool_params[ jida.name ] = None
+            elif run_on_regions and 'data' in input_dataset.datatype.data_sources:
+                # Dataset is indexed and hence a subset can be extracted and used
+                # as input.
+
+                # Look for subset.
+                subset_dataset_association = trans.sa_session.query( trans.app.model.HistoryDatasetAssociationSubset ) \
+                                                             .filter_by( hda=input_dataset, location=regions_str ) \
+                                                             .first()
+                if subset_dataset_association:
+                    # Data subset exists.
+                    subset_dataset = subset_dataset_association.subset
+                else:
+                    # Need to create subset.
+                    data_source = input_dataset.datatype.data_sources[ 'data' ]
+                    input_dataset.get_converted_dataset( trans, data_source )
+                    input_dataset.get_converted_dataset_deps( trans, data_source )
+
+                    # Create new HDA for input dataset's subset.
+                    new_dataset = trans.app.model.HistoryDatasetAssociation( extension=input_dataset.ext,
+                                                                             dbkey=input_dataset.dbkey,
+                                                                             create_dataset=True,
+                                                                             sa_session=trans.sa_session,
+                                                                             name="Subset [%s] of data %i" %
+                                                                                  ( regions_str, input_dataset.hid ),
+                                                                             visible=False )
+                    target_history.add_dataset( new_dataset )
+                    trans.sa_session.add( new_dataset )
+                    trans.app.security_agent.set_all_dataset_permissions( new_dataset.dataset, hda_permissions )
+
+                    # Write subset of data to new dataset
+                    data_provider = data_provider_registry.get_data_provider( trans, original_dataset=input_dataset, source='data' )
+                    trans.app.object_store.create( new_dataset.dataset )
+                    data_provider.write_data_to_file( regions, new_dataset.file_name )
+
+                    # TODO: (a) size not working; (b) need to set peek.
+                    new_dataset.set_size()
+                    new_dataset.info = "Data subset for trackster"
+                    new_dataset.set_dataset_state( trans.app.model.Dataset.states.OK )
+
+                    # Set metadata.
+                    # TODO: set meta internally if dataset is small enough?
+                    trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool,
+                                                                                                 trans, incoming={ 'input1': new_dataset },
+                                                                                                 overwrite=False, job_params={ "source": "trackster" } )
+                    # Add HDA subset association.
+                    subset_association = trans.app.model.HistoryDatasetAssociationSubset( hda=input_dataset, subset=new_dataset, location=regions_str )
+                    trans.sa_session.add( subset_association )
+
+                    subset_dataset = new_dataset
+
+                trans.sa_session.flush()
+
+                # Add dataset to tool's parameters.
+                if not set_param_value( tool_params, jida.name, subset_dataset ):
+                    return { "error": True, "message": "error setting parameter %s" % jida.name }
+
+        #
+        # Execute tool and handle outputs.
+        #
+        try:
+            subset_job, subset_job_outputs = tool.execute( trans, incoming=tool_params,
+                                                           history=target_history,
+                                                           job_params={ "source": "trackster" } )
+        except Exception as e:
+            # Lots of things can go wrong when trying to execute tool.
+            return { "error": True, "message": e.__class__.__name__ + ": " + str(e) }
+        if run_on_regions:
+            for output in subset_job_outputs.values():
+                output.visible = False
+            trans.sa_session.flush()
+
+        #
+        # Return new track that corresponds to the original dataset.
+        #
+        output_name = None
+        for joda in original_job.output_datasets:
+            if joda.dataset == original_dataset:
+                output_name = joda.name
+                break
+        for joda in subset_job.output_datasets:
+            if joda.name == output_name:
+                output_dataset = joda.dataset
+
+        dataset_dict = output_dataset.to_dict()
+        dataset_dict[ 'id' ] = trans.security.encode_id( dataset_dict[ 'id' ] )
+        dataset_dict[ 'track_config' ] = self.get_new_track_config( trans, output_dataset )
+        return dataset_dict
diff --git a/lib/galaxy/webapps/galaxy/api/tours.py b/lib/galaxy/webapps/galaxy/api/tours.py
new file mode 100644
index 0000000..c72e6c8
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tours.py
@@ -0,0 +1,50 @@
+"""
+API Controller providing Galaxy Tours
+"""
+import logging
+
+from galaxy.web import (
+    _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless,
+    expose_api,
+    require_admin
+)
+from galaxy.web.base.controller import BaseAPIController
+
+log = logging.getLogger( __name__ )
+
+
+class ToursController( BaseAPIController ):
+
+    def __init__( self, app ):
+        super( ToursController, self ).__init__( app )
+
+    @expose_api_anonymous_and_sessionless
+    def index( self, trans, **kwd ):
+        """
+        *GET /api/tours/
+        Displays available tours
+        """
+        return self.app.tour_registry.tours_by_id_with_description()
+
+    @expose_api_anonymous_and_sessionless
+    def show( self, trans, tour_id, **kwd ):
+        """
+        load_config( self, trans, Tour_config_file, **kwd )
+        * GET /api/tours/{tour_id}:
+            Read a yaml file containing the specified tour definition
+
+        :returns:   tour definition
+        :rtype:     dictionary
+        """
+        return self.app.tour_registry.tour_contents(tour_id)
+
+    @expose_api
+    @require_admin
+    def update_tour( self, trans, tour_id, **kwd ):
+        """
+        This simply reloads tours right now.  It's a quick hack.
+
+        TODO: allow creation of new tours (which get written to the
+        filesystem).
+        """
+        return self.app.tour_registry.load_tour(tour_id)
diff --git a/lib/galaxy/webapps/galaxy/api/users.py b/lib/galaxy/webapps/galaxy/api/users.py
new file mode 100644
index 0000000..382cd28
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/users.py
@@ -0,0 +1,242 @@
+"""
+API operations on User objects.
+"""
+
+import logging
+
+from sqlalchemy import false, true, or_
+
+from galaxy import exceptions, util, web
+from galaxy.managers import users
+from galaxy.security.validate_user_input import validate_email
+from galaxy.security.validate_user_input import validate_password
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import CreatesApiKeysMixin
+from galaxy.web.base.controller import CreatesUsersMixin
+from galaxy.web.base.controller import UsesTagsMixin
+
+log = logging.getLogger( __name__ )
+
+
+class UserAPIController( BaseAPIController, UsesTagsMixin, CreatesUsersMixin, CreatesApiKeysMixin ):
+
+    def __init__(self, app):
+        super(UserAPIController, self).__init__(app)
+        self.user_manager = users.UserManager(app)
+        self.user_serializer = users.UserSerializer( app )
+        self.user_deserializer = users.UserDeserializer( app )
+
+    @expose_api
+    def index( self, trans, deleted='False', f_email=None, f_name=None, f_any=None, **kwd ):
+        """
+        GET /api/users
+        GET /api/users/deleted
+        Displays a collection (list) of users.
+
+        :param deleted: (optional) If true, show deleted users
+        :type  deleted: bool
+
+        :param f_email: (optional) An email address to filter on. (Non-admin
+                        users can only use this if ``expose_user_email`` is ``True`` in
+                        galaxy.ini)
+        :type  f_email: str
+
+        :param f_name: (optional) A username to filter on. (Non-admin users
+                       can only use this if ``expose_user_name`` is ``True`` in
+                       galaxy.ini)
+        :type  f_name: str
+
+        :param f_any: (optional) Filter on username OR email. (Non-admin users
+                       can use this, the email filter and username filter will
+                       only be active if their corresponding ``expose_user_*`` is
+                       ``True`` in galaxy.ini)
+        :type  f_any: str
+        """
+        rval = []
+        query = trans.sa_session.query( trans.app.model.User )
+        deleted = util.string_as_bool( deleted )
+
+        if f_email and (trans.user_is_admin() or trans.app.config.expose_user_email):
+            query = query.filter( trans.app.model.User.email.like("%%%s%%" % f_email) )
+
+        if f_name and (trans.user_is_admin() or trans.app.config.expose_user_name):
+            query = query.filter( trans.app.model.User.username.like("%%%s%%" % f_name) )
+
+        if f_any:
+            if trans.user_is_admin():
+                query = query.filter(or_(
+                    trans.app.model.User.email.like("%%%s%%" % f_any),
+                    trans.app.model.User.username.like("%%%s%%" % f_any)
+                ))
+            else:
+                if trans.app.config.expose_user_email and trans.app.config.expose_user_name:
+                    query = query.filter(or_(
+                        trans.app.model.User.email.like("%%%s%%" % f_any),
+                        trans.app.model.User.username.like("%%%s%%" % f_any)
+                    ))
+                elif trans.app.config.expose_user_email:
+                    query = query.filter( trans.app.model.User.email.like("%%%s%%" % f_any) )
+                elif trans.app.config.expose_user_name:
+                    query = query.filter( trans.app.model.User.username.like("%%%s%%" % f_any) )
+
+        if deleted:
+            query = query.filter( trans.app.model.User.table.c.deleted == true() )
+            # only admins can see deleted users
+            if not trans.user_is_admin():
+                return []
+        else:
+            query = query.filter( trans.app.model.User.table.c.deleted == false() )
+            # special case: user can see only their own user
+            # special case2: if the galaxy admin has specified that other user email/names are
+            #   exposed, we don't want special case #1
+            if not trans.user_is_admin() and not trans.app.config.expose_user_name and not trans.app.config.expose_user_email:
+                item = trans.user.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+                return [item]
+        for user in query:
+            item = user.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+            # If NOT configured to expose_email, do not expose email UNLESS the user is self, or
+            # the user is an admin
+            if not trans.app.config.expose_user_name and user is not trans.user and not trans.user_is_admin():
+                del item['username']
+            if not trans.app.config.expose_user_email and user is not trans.user and not trans.user_is_admin():
+                del item['email']
+            # TODO: move into api_values
+            rval.append( item )
+        return rval
+
+    @expose_api_anonymous
+    def show( self, trans, id, deleted='False', **kwd ):
+        """
+        GET /api/users/{encoded_user_id}
+        GET /api/users/deleted/{encoded_user_id}
+        GET /api/users/current
+        Displays information about a user.
+        """
+        deleted = util.string_as_bool( deleted )
+        try:
+            # user is requesting data about themselves
+            if id == "current":
+                # ...and is anonymous - return usage and quota (if any)
+                if not trans.user:
+                    item = self.anon_user_api_value( trans )
+                    return item
+
+                # ...and is logged in - return full
+                else:
+                    user = trans.user
+            else:
+                user = self.get_user( trans, id, deleted=deleted )
+            # check that the user is requesting themselves (and they aren't del'd) unless admin
+            if not trans.user_is_admin():
+                assert trans.user == user
+                assert not user.deleted
+        except:
+            raise exceptions.RequestParameterInvalidException( 'Invalid user id specified', id=id )
+        return self.user_serializer.serialize_to_view(user, view='detailed')
+
+    @expose_api
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/users
+        Creates a new Galaxy user.
+        """
+        if not trans.app.config.allow_user_creation and not trans.user_is_admin():
+            raise exceptions.ConfigDoesNotAllowException( 'User creation is not allowed in this Galaxy instance' )
+        if trans.app.config.use_remote_user and trans.user_is_admin():
+            user = trans.get_or_create_remote_user( remote_user_email=payload['remote_user_email'] )
+        elif trans.user_is_admin():
+            username = payload[ 'username' ]
+            email = payload[ 'email' ]
+            password = payload[ 'password' ]
+            message = "\n".join( [ validate_email( trans, email ),
+                                   validate_password( trans, password, password ),
+                                   validate_publicname( trans, username ) ] ).rstrip()
+            if message:
+                raise exceptions.RequestParameterInvalidException( message )
+            else:
+                user = self.create_user( trans=trans, email=email, username=username, password=password )
+        else:
+            raise exceptions.NotImplemented()
+        item = user.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id,
+                                                            'total_disk_usage': float } )
+        return item
+
+    @expose_api
+    @web.require_admin
+    def api_key( self, trans, user_id, **kwd ):
+        """
+        POST /api/users/{encoded_user_id}/api_key
+        Creates a new API key for specified user.
+        """
+        user = self.get_user( trans, user_id )
+        key = self.create_api_key( trans, user )
+        return key
+
+    @expose_api
+    def update( self, trans, id, payload, **kwd ):
+        """
+        update( self, trans, id, payload, **kwd )
+        * PUT /api/users/{id}
+            updates the values for the item with the given ``id``
+
+        :type id: str
+        :param id: the encoded id of the item to update
+        :type payload: dict
+        :param payload: a dictionary of new attribute values
+
+        :rtype: dict
+        :returns: an error object if an error occurred or a dictionary containing
+            the serialized item after any changes
+        """
+        current_user = trans.user
+        user_to_update = self.user_manager.by_id( self.decode_id( id ) )
+
+        # only allow updating other users if they're admin
+        editing_someone_else = current_user != user_to_update
+        is_admin = trans.api_inherit_admin or self.user_manager.is_admin( current_user )
+        if editing_someone_else and not is_admin:
+            raise exceptions.InsufficientPermissionsException( 'you are not allowed to update that user', id=id )
+
+        self.user_deserializer.deserialize( user_to_update, payload, user=current_user, trans=trans )
+        return self.user_serializer.serialize_to_view( user_to_update, view='detailed' )
+
+    @expose_api
+    @web.require_admin
+    def delete( self, trans, id, **kwd ):
+        """
+        DELETE /api/users/{id}
+        delete the user with the given ``id``
+
+        :param id: the encoded id of the user to delete
+        :type  id: str
+
+        :param purge: (optional) if True, purge the user
+        :type  purge: bool
+        """
+        if not trans.app.config.allow_user_deletion:
+            raise exceptions.ConfigDoesNotAllowException( 'The configuration of this Galaxy instance does not allow admins to delete users.' )
+        purge = util.string_as_bool(kwd.get('purge', False))
+        if purge:
+            raise exceptions.NotImplemented('Purge option has not been implemented yet')
+        user = self.get_user(trans, id)
+        self.user_manager.delete(user)
+        return self.user_serializer.serialize_to_view(user, view='detailed')
+
+    @expose_api
+    @web.require_admin
+    def undelete( self, trans, **kwd ):
+        raise exceptions.NotImplemented()
+
+    # TODO: move to more basal, common resource than this
+    def anon_user_api_value( self, trans ):
+        """
+        Returns data for an anonymous user, truncated to only usage and quota_percent
+        """
+        usage = trans.app.quota_agent.get_usage( trans )
+        percent = trans.app.quota_agent.get_percent( trans=trans, usage=usage )
+        return {'total_disk_usage': int( usage ),
+                'nice_total_disk_usage': util.nice_size( usage ),
+                'quota_percent': percent}
diff --git a/lib/galaxy/webapps/galaxy/api/visualizations.py b/lib/galaxy/webapps/galaxy/api/visualizations.py
new file mode 100644
index 0000000..90ee824
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/visualizations.py
@@ -0,0 +1,206 @@
+"""
+Visualizations resource control over the API.
+
+NOTE!: this is a work in progress and functionality and data structures
+may change often.
+"""
+from six import string_types
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesVisualizationMixin
+from galaxy.web.base.controller import SharableMixin
+from galaxy.model.item_attrs import UsesAnnotations
+
+from galaxy.web import _future_expose_api as expose_api
+from galaxy import web
+from galaxy import util
+from galaxy import exceptions
+import json
+import logging
+log = logging.getLogger( __name__ )
+
+
+class VisualizationsController( BaseAPIController, UsesVisualizationMixin, SharableMixin, UsesAnnotations ):
+    """
+    RESTful controller for interactions with visualizations.
+    """
+
+    @expose_api
+    def index( self, trans, **kwargs ):
+        """
+        GET /api/visualizations:
+        """
+        rval = []
+        user = trans.user
+
+        # TODO: search for: title, made by user, creation time range, type (vis name), dbkey, etc.
+        # TODO: limit, offset, order_by
+        # TODO: deleted
+
+        # this is the default search - user's vis, vis shared with user, published vis
+        visualizations = self.get_visualizations_by_user( trans, user )
+        visualizations += self.get_visualizations_shared_with_user( trans, user )
+        visualizations += self.get_published_visualizations( trans, exclude_user=user )
+        # TODO: the admin case - everything
+
+        for visualization in visualizations:
+            item = self.get_visualization_summary_dict( visualization )
+            item = trans.security.encode_dict_ids( item )
+            item[ 'url' ] = web.url_for( 'visualization', id=item[ 'id' ] )
+            rval.append( item )
+
+        return rval
+
+    @expose_api
+    def show( self, trans, id, **kwargs ):
+        """
+        GET /api/visualizations/{viz_id}
+        """
+        # TODO: revisions should be a contents/nested controller like viz/xxx/r/xxx)?
+        # the important thing is the config
+        rval = {}
+        # TODO:?? /api/visualizations/registry -> json of registry.listings?
+
+        visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+        dictionary = trans.security.encode_dict_ids( self.get_visualization_dict( visualization ) )
+        dictionary[ 'url' ] = web.url_for( controller='visualization',
+                                           action="display_by_username_and_slug", username=visualization.user.username, slug=visualization.slug )
+        dictionary[ 'annotation' ] = self.get_item_annotation_str( trans.sa_session, trans.user, visualization )
+
+        # need to encode ids in revisions as well
+        encoded_revisions = []
+        for revision in dictionary[ 'revisions' ]:
+            # NOTE: does not encode ids inside the configs
+            encoded_revisions.append( trans.security.encode_id( revision ) )
+        dictionary[ 'revisions' ] = encoded_revisions
+        dictionary[ 'latest_revision' ] = trans.security.encode_dict_ids( dictionary[ 'latest_revision' ] )
+
+        rval = dictionary
+        return rval
+
+    @expose_api
+    def create( self, trans, payload, **kwargs ):
+        """
+        POST /api/visualizations
+        creates a new visualization using the given payload
+
+        POST /api/visualizations?import_id={encoded_visualization_id}
+        imports a copy of an existing visualization into the user's workspace
+        """
+        rval = None
+
+        if 'import_id' in payload:
+            import_id = payload( 'import_id' )
+            visualization = self.import_visualization( trans, import_id, user=trans.user )
+
+        else:
+            payload = self._validate_and_parse_payload( payload )
+            # must have a type (I've taken this to be the visualization name)
+            if 'type' not in payload:
+                raise exceptions.RequestParameterMissingException( "key/value 'type' is required" )
+            vis_type = payload.pop( 'type', False )
+
+            payload[ 'save' ] = True
+            try:
+                # generate defaults - this will err if given a weird key?
+                visualization = self.create_visualization( trans, vis_type, **payload )
+            except ValueError as val_err:
+                raise exceptions.RequestParameterMissingException( str( val_err ) )
+
+        rval = { 'id' : trans.security.encode_id( visualization.id ) }
+
+        return rval
+
+    @expose_api
+    def update( self, trans, id, payload, **kwargs ):
+        """
+        PUT /api/visualizations/{encoded_visualization_id}
+        """
+        rval = None
+
+        payload = self._validate_and_parse_payload( payload )
+
+        # there's a differentiation here between updating the visualiztion and creating a new revision
+        #   that needs to be handled clearly here
+        # or alternately, using a different controller like PUT /api/visualizations/{id}/r/{id}
+
+        # TODO: consider allowing direct alteration of revisions title (without a new revision)
+        #   only create a new revsion on a different config
+
+        # only update owned visualizations
+        visualization = self.get_visualization( trans, id, check_ownership=True )
+        title = payload.get( 'title', visualization.latest_revision.title )
+        dbkey = payload.get( 'dbkey', visualization.latest_revision.dbkey )
+        config = payload.get( 'config', visualization.latest_revision.config )
+
+        latest_config = visualization.latest_revision.config
+        if( ( title != visualization.latest_revision.title ) or
+                ( dbkey != visualization.latest_revision.dbkey ) or
+                ( json.dumps( config ) != json.dumps( latest_config ) ) ):
+            revision = self.add_visualization_revision( trans, visualization, config, title, dbkey )
+            rval = { 'id' : id, 'revision' : revision.id }
+
+        # allow updating vis title
+        visualization.title = title
+        trans.sa_session.flush()
+
+        return rval
+
+    def _validate_and_parse_payload( self, payload ):
+        """
+        Validate and parse incomming data payload for a visualization.
+        """
+        # This layer handles (most of the stricter idiot proofing):
+        #   - unknown/unallowed keys
+        #   - changing data keys from api key to attribute name
+        #   - protection against bad data form/type
+        #   - protection against malicious data content
+        # all other conversions and processing (such as permissions, etc.) should happen down the line
+
+        # keys listed here don't error when attempting to set, but fail silently
+        #   this allows PUT'ing an entire model back to the server without attribute errors on uneditable attrs
+        valid_but_uneditable_keys = (
+            'id', 'model_class'
+            # TODO: fill out when we create to_dict, get_dict, whatevs
+        )
+        # TODO: deleted
+        # TODO: importable
+        ValidationError = exceptions.RequestParameterInvalidException
+
+        validated_payload = {}
+        for key, val in payload.items():
+            # TODO: validate types in VALID_TYPES/registry names at the mixin/model level?
+            if key == 'type':
+                if not isinstance( val, string_types ):
+                    raise ValidationError( '%s must be a string or unicode: %s' % ( key, str( type( val ) ) ) )
+                val = util.sanitize_html.sanitize_html( val, 'utf-8' )
+            elif key == 'config':
+                if not isinstance( val, dict ):
+                    raise ValidationError( '%s must be a dictionary: %s' % ( key, str( type( val ) ) ) )
+
+            elif key == 'annotation':
+                if not isinstance( val, string_types ):
+                    raise ValidationError( '%s must be a string or unicode: %s' % ( key, str( type( val ) ) ) )
+                val = util.sanitize_html.sanitize_html( val, 'utf-8' )
+
+            # these are keys that actually only be *updated* at the revision level and not here
+            #   (they are still valid for create, tho)
+            elif key == 'title':
+                if not isinstance( val, string_types ):
+                    raise ValidationError( '%s must be a string or unicode: %s' % ( key, str( type( val ) ) ) )
+                val = util.sanitize_html.sanitize_html( val, 'utf-8' )
+            elif key == 'slug':
+                if not isinstance( val, string_types ):
+                    raise ValidationError( '%s must be a string: %s' % ( key, str( type( val ) ) ) )
+                val = util.sanitize_html.sanitize_html( val, 'utf-8' )
+            elif key == 'dbkey':
+                if not isinstance( val, string_types ):
+                    raise ValidationError( '%s must be a string or unicode: %s' % ( key, str( type( val ) ) ) )
+                val = util.sanitize_html.sanitize_html( val, 'utf-8' )
+
+            elif key not in valid_but_uneditable_keys:
+                continue
+                # raise AttributeError( 'unknown key: %s' %( str( key ) ) )
+
+            validated_payload[ key ] = val
+        return validated_payload
diff --git a/lib/galaxy/webapps/galaxy/api/webhooks.py b/lib/galaxy/webapps/galaxy/api/webhooks.py
new file mode 100644
index 0000000..ec0bbda
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/webhooks.py
@@ -0,0 +1,71 @@
+"""
+API Controller providing Galaxy Webhooks
+"""
+
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as \
+    expose_api_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+
+import logging
+import random
+import imp
+
+log = logging.getLogger(__name__)
+
+
+class WebhooksController(BaseAPIController):
+    def __init__(self, app):
+        super(WebhooksController, self).__init__(app)
+
+    @expose_api_anonymous_and_sessionless
+    def get_all(self, trans, **kwd):
+        """
+        *GET /api/webhooks/
+        Returns all webhooks
+        """
+        return [
+            webhook.to_dict()
+            for webhook in self.app.webhooks_registry.webhooks
+        ]
+
+    @expose_api_anonymous_and_sessionless
+    def get_random(self, trans, webhook_type, **kwd):
+        """
+        *GET /api/webhooks/{webhook_type}
+        Returns a random webhook for a given type
+        """
+        webhooks = [
+            webhook
+            for webhook in self.app.webhooks_registry.webhooks
+            if webhook_type in webhook.type and
+            webhook.activate is True
+        ]
+        return random.choice(webhooks).to_dict() if webhooks else {}
+
+    @expose_api_anonymous_and_sessionless
+    def get_all_by_type(self, trans, webhook_type, **kwd):
+        """
+        *GET /api/webhooks/{webhook_type}/all
+        Returns all webhooks for a given type
+        """
+        return [
+            webhook.to_dict()
+            for webhook in self.app.webhooks_registry.webhooks
+            if webhook_type in webhook.type
+        ]
+
+    @expose_api_anonymous_and_sessionless
+    def get_data(self, trans, webhook_name, **kwd):
+        """
+        *GET /api/webhooks/{webhook_name}/get_data
+        Returns the result of executing helper function
+        """
+        webhook = [
+            webhook
+            for webhook in self.app.webhooks_registry.webhooks
+            if webhook.name == webhook_name
+        ]
+        return imp.load_source('helper', webhook[0].helper).main(
+            trans,
+            webhook[0],
+        ) if webhook and webhook[0].helper != '' else {}
diff --git a/lib/galaxy/webapps/galaxy/api/workflows.py b/lib/galaxy/webapps/galaxy/api/workflows.py
new file mode 100644
index 0000000..a3ac111
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -0,0 +1,633 @@
+"""
+API operations for Workflows
+"""
+from __future__ import absolute_import
+
+import logging
+from six.moves.urllib.parse import unquote_plus
+
+from sqlalchemy import desc, false, or_, true
+
+from galaxy import (
+    exceptions,
+    model,
+    util
+)
+from galaxy.managers import (
+    histories,
+    workflows
+)
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web.base.controller import (
+    BaseAPIController,
+    SharableMixin,
+    url_for,
+    UsesStoredWorkflowMixin
+)
+from galaxy.workflow.extract import extract_workflow
+from galaxy.workflow.modules import module_factory
+from galaxy.workflow.run import invoke, queue_invoke
+from galaxy.workflow.run_request import build_workflow_run_configs
+
+log = logging.getLogger(__name__)
+
+
+class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin, UsesAnnotations, SharableMixin):
+
+    def __init__( self, app ):
+        super( WorkflowsAPIController, self ).__init__( app )
+        self.history_manager = histories.HistoryManager( app )
+        self.workflow_manager = workflows.WorkflowsManager( app )
+        self.workflow_contents_manager = workflows.WorkflowContentsManager( app )
+
+    @expose_api
+    def index(self, trans, **kwd):
+        """
+        GET /api/workflows
+
+        Displays a collection of workflows.
+
+        :param  show_published:      if True, show also published workflows
+        :type   show_published:      boolean
+        """
+        show_published = util.string_as_bool( kwd.get( 'show_published', 'False' ) )
+        rval = []
+        filter1 = ( trans.app.model.StoredWorkflow.user == trans.user )
+        if show_published:
+            filter1 = or_( filter1, ( trans.app.model.StoredWorkflow.published == true() ) )
+        for wf in trans.sa_session.query( trans.app.model.StoredWorkflow ).filter(
+                filter1, trans.app.model.StoredWorkflow.table.c.deleted == false() ).order_by(
+                desc( trans.app.model.StoredWorkflow.table.c.update_time ) ).all():
+            item = wf.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+            encoded_id = trans.security.encode_id(wf.id)
+            item['url'] = url_for('workflow', id=encoded_id)
+            item['owner'] = wf.user.username
+            rval.append(item)
+        for wf_sa in trans.sa_session.query( trans.app.model.StoredWorkflowUserShareAssociation ).filter_by(
+                user=trans.user ).join( 'stored_workflow' ).filter(
+                trans.app.model.StoredWorkflow.deleted == false() ).order_by(
+                desc( trans.app.model.StoredWorkflow.update_time ) ).all():
+            item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+            encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id)
+            item['url'] = url_for( 'workflow', id=encoded_id )
+            item['owner'] = wf_sa.stored_workflow.user.username
+            rval.append(item)
+        return rval
+
+    @expose_api
+    def show(self, trans, id, **kwd):
+        """
+        GET /api/workflows/{encoded_workflow_id}
+
+        Displays information needed to run a workflow from the command line.
+        """
+        stored_workflow = self.__get_stored_workflow( trans, id )
+        if stored_workflow.importable is False and stored_workflow.user != trans.user and not trans.user_is_admin():
+            if trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation).filter_by(user=trans.user, stored_workflow=stored_workflow).count() == 0:
+                message = "Workflow is neither importable, nor owned by or shared with current user"
+                raise exceptions.ItemAccessibilityException( message )
+        if kwd.get("legacy", False):
+            style = "legacy"
+        else:
+            style = "instance"
+        return self.workflow_contents_manager.workflow_to_dict( trans, stored_workflow, style=style )
+
+    @expose_api
+    def create(self, trans, payload, **kwd):
+        """
+        POST /api/workflows
+
+        Run or create workflows from the api.
+
+        If installed_repository_file or from_history_id is specified a new
+        workflow will be created for this user. Otherwise, workflow_id must be
+        specified and this API method will cause a workflow to execute.
+
+        :param  installed_repository_file    The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified
+        :type   installed_repository_file    str
+
+        :param  workflow_id:                 An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified
+        :type   workflow_id:                 str
+
+        :param  parameters:                  If workflow_id is set - see _update_step_parameters()
+        :type   parameters:                  dict
+
+        :param  ds_map:                      If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
+        :type   ds_map:                      dict
+
+        :param  no_add_to_history:           If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history
+        :type   no_add_to_history:           str
+
+        :param  history:                     If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history
+        :type   history:                     str
+
+        :param  replacement_params:          If workflow_id is set - an optional dictionary used when renaming datasets
+        :type   replacement_params:          dict
+
+        :param  from_history_id:             Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified
+        :type   from_history_id:             str
+
+        :param  job_ids:                     If from_history_id is set - optional list of jobs to include when extracting a workflow from history
+        :type   job_ids:                     str
+
+        :param  dataset_ids:                 If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history
+        :type   dataset_ids:                 str
+
+        :param  dataset_collection_ids:      If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history
+        :type   dataset_collection_ids:      str
+
+        :param  workflow_name:               If from_history_id is set - name of the workflow to create when extracting a workflow from history
+        :type   workflow_name:               str
+
+        :param  allow_tool_state_corrections:  If set to True, any Tool parameter changes will not prevent running workflow, defaults to False
+        :type   allow_tool_state_corrections:  bool
+        """
+        ways_to_create = set( [
+            'workflow_id',
+            'installed_repository_file',
+            'from_history_id',
+            'shared_workflow_id',
+            'workflow',
+        ] )
+        if len( ways_to_create.intersection( payload ) ) == 0:
+            message = "One parameter among - %s - must be specified" % ", ".join( ways_to_create )
+            raise exceptions.RequestParameterMissingException( message )
+
+        if len( ways_to_create.intersection( payload ) ) > 1:
+            message = "Only one parameter among - %s - must be specified" % ", ".join( ways_to_create )
+            raise exceptions.RequestParameterInvalidException( message )
+
+        if 'installed_repository_file' in payload:
+            workflow_controller = trans.webapp.controllers[ 'workflow' ]
+            result = workflow_controller.import_workflow( trans=trans,
+                                                          cntrller='api',
+                                                          **payload)
+            return result
+
+        if 'from_history_id' in payload:
+            from_history_id = payload.get( 'from_history_id' )
+            from_history_id = self.decode_id( from_history_id )
+            history = self.history_manager.get_accessible( from_history_id, trans.user, current_history=trans.history )
+
+            job_ids = [ self.decode_id(_) for _ in payload.get( 'job_ids', [] ) ]
+            dataset_ids = payload.get( 'dataset_ids', [] )
+            dataset_collection_ids = payload.get( 'dataset_collection_ids', [] )
+            workflow_name = payload[ 'workflow_name' ]
+            stored_workflow = extract_workflow(
+                trans=trans,
+                user=trans.get_user(),
+                history=history,
+                job_ids=job_ids,
+                dataset_ids=dataset_ids,
+                dataset_collection_ids=dataset_collection_ids,
+                workflow_name=workflow_name,
+            )
+            item = stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+            item[ 'url' ] = url_for( 'workflow', id=item[ 'id' ] )
+            return item
+
+        if 'shared_workflow_id' in payload:
+            workflow_id = payload[ 'shared_workflow_id' ]
+            return self.__api_import_shared_workflow( trans, workflow_id, payload )
+
+        if 'workflow' in payload:
+            return self.__api_import_new_workflow( trans, payload, **kwd )
+
+        workflow_id = payload.get( 'workflow_id', None )
+        if not workflow_id:
+            message = "Invalid workflow_id specified."
+            raise exceptions.RequestParameterInvalidException( message )
+
+        # Get workflow + accessibility check.
+        stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id )
+        workflow = stored_workflow.latest_workflow
+
+        run_configs = build_workflow_run_configs( trans, workflow, payload )
+        assert len(run_configs) == 1
+        run_config = run_configs[0]
+        history = run_config.target_history
+
+        # invoke may throw MessageExceptions on tool erors, failure
+        # to match up inputs, etc...
+        outputs, invocation = invoke(
+            trans=trans,
+            workflow=workflow,
+            workflow_run_config=run_config,
+            populate_state=True,
+        )
+        trans.sa_session.flush()
+
+        # Build legacy output - should probably include more information from
+        # outputs.
+        rval = {}
+        rval['history'] = trans.security.encode_id( history.id )
+        rval['outputs'] = []
+        for step in workflow.steps:
+            if step.type == 'tool' or step.type is None:
+                for v in outputs[ step.id ].values():
+                    rval[ 'outputs' ].append( trans.security.encode_id( v.id ) )
+
+        # Newer version of this API just returns the invocation as a dict, to
+        # facilitate migration - produce the newer style response and blend in
+        # the older information.
+        invocation_response = self.__encode_invocation( trans, invocation, step_details=kwd.get('step_details', False) )
+        invocation_response.update( rval )
+        return invocation_response
+
+    @expose_api
+    def workflow_dict( self, trans, workflow_id, **kwd ):
+        """
+        GET /api/workflows/{encoded_workflow_id}/download
+        Returns a selected workflow as a json dictionary.
+        """
+        stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id )
+
+        style = kwd.get("style", "export")
+        ret_dict = self.workflow_contents_manager.workflow_to_dict( trans, stored_workflow, style=style )
+        if not ret_dict:
+            # This workflow has a tool that's missing from the distribution
+            message = "Workflow cannot be exported due to missing tools."
+            raise exceptions.MessageException( message )
+        return ret_dict
+
+    @expose_api
+    def delete( self, trans, id, **kwd ):
+        """
+        DELETE /api/workflows/{encoded_workflow_id}
+        Deletes a specified workflow
+        Author: rpark
+
+        copied from galaxy.web.controllers.workflows.py (delete)
+        """
+        workflow_id = id
+
+        try:
+            stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(self.decode_id(workflow_id))
+        except Exception as e:
+            trans.response.status = 400
+            return ("Workflow with ID='%s' can not be found\n Exception: %s") % (workflow_id, str( e ))
+
+        # check to see if user has permissions to selected workflow
+        if stored_workflow.user != trans.user and not trans.user_is_admin():
+            trans.response.status = 403
+            return("Workflow is not owned by current user")
+
+        # Mark a workflow as deleted
+        stored_workflow.deleted = True
+        trans.sa_session.flush()
+
+        # TODO: Unsure of response message to let api know that a workflow was successfully deleted
+        return ( "Workflow '%s' successfully deleted" % stored_workflow.name )
+
+    @expose_api
+    def import_new_workflow_deprecated(self, trans, payload, **kwd):
+        """
+        POST /api/workflows/upload
+        Importing dynamic workflows from the api. Return newly generated workflow id.
+        Author: rpark
+
+        # currently assumes payload['workflow'] is a json representation of a workflow to be inserted into the database
+
+        Deprecated in favor to POST /api/workflows with encoded 'workflow' in
+        payload the same way.
+        """
+        return self.__api_import_new_workflow( trans, payload, **kwd )
+
+    @expose_api
+    def update( self, trans, id, payload, **kwds ):
+        """
+        * PUT /api/workflows/{id}
+            updates the workflow stored with ``id``
+
+        :type   id:      str
+        :param  id:      the encoded id of the workflow to update
+        :type   payload: dict
+        :param  payload: a dictionary containing any or all the
+            * workflow   the json description of the workflow as would be
+                         produced by GET workflows/<id>/download or
+                         given to `POST workflows`
+
+                         The workflow contents will be updated to target
+                         this.
+
+            * name       optional string name for the workflow, if not present in payload,
+                         name defaults to existing name
+            * annotation optional string annotation for the workflow, if not present in payload,
+                         annotation defaults to existing annotation
+            * menu_entry optional boolean marking if the workflow should appear in the user's menu,
+                         if not present, workflow menu entries are not modified
+
+        :rtype:     dict
+        :returns:   serialized version of the workflow
+        """
+        stored_workflow = self.__get_stored_workflow( trans, id )
+        if 'workflow' in payload:
+            stored_workflow.name = sanitize_html(payload['name']) if ('name' in payload) else stored_workflow.name
+
+            if 'annotation' in payload:
+                newAnnotation = sanitize_html(payload['annotation'])
+                self.add_item_annotation(trans.sa_session, trans.get_user(), stored_workflow, newAnnotation)
+
+            if 'menu_entry' in payload:
+                if payload['menu_entry']:
+                    menuEntry = model.StoredWorkflowMenuEntry()
+                    menuEntry.stored_workflow = stored_workflow
+                    trans.get_user().stored_workflow_menu_entries.append(menuEntry)
+                else:
+                    # remove if in list
+                    entries = {x.stored_workflow_id: x for x in trans.get_user().stored_workflow_menu_entries}
+                    if (trans.security.decode_id(id) in entries):
+                        trans.get_user().stored_workflow_menu_entries.remove(entries[trans.security.decode_id(id)])
+
+            workflow, errors = self.workflow_contents_manager.update_workflow_from_dict(
+                trans,
+                stored_workflow,
+                payload['workflow'],
+            )
+        else:
+            message = "Updating workflow requires dictionary containing 'workflow' attribute with new JSON description."
+            raise exceptions.RequestParameterInvalidException( message )
+        return self.workflow_contents_manager.workflow_to_dict( trans, stored_workflow, style="instance" )
+
+    @expose_api
+    def build_module( self, trans, payload={} ):
+        """
+        POST /api/workflows/build_module
+        Builds module details including a tool model for the workflow editor.
+        """
+        tool_id = payload.get( 'tool_id' )
+        tool_version = payload.get( 'tool_version' )
+        tool_inputs = payload.get( 'inputs', {} )
+        annotation = payload.get( 'annotation', tool_inputs.get( 'annotation', '' ) )
+
+        # load tool
+        tool = self._get_tool( tool_id, tool_version=tool_version, user=trans.user )
+
+        # initialize module
+        module = module_factory.from_dict( trans, {
+            'type'          : 'tool',
+            'tool_id'       : tool.id,
+            'tool_state'    : None
+        } )
+
+        # create tool model and default tool state (if missing)
+        tool_model = module.tool.to_json( trans, tool_inputs, workflow_building_mode=True )
+        module.update_state( tool_model[ 'state_inputs' ] )
+        return {
+            'tool_model'        : tool_model,
+            'tool_state'        : module.get_state(),
+            'data_inputs'       : module.get_data_inputs(),
+            'data_outputs'      : module.get_data_outputs(),
+            'tool_errors'       : module.get_errors(),
+            'form_html'         : module.get_config_form(),
+            'annotation'        : annotation,
+            'post_job_actions'  : module.get_post_job_actions(tool_inputs)
+        }
+
+    #
+    # -- Helper methods --
+    #
+    def _get_tool( self, id, tool_version=None, user=None ):
+        id = unquote_plus( id )
+        tool = self.app.toolbox.get_tool( id, tool_version )
+        if not tool or not tool.allow_user_access( user ):
+            raise exceptions.ObjectNotFound("Could not find tool with id '%s'" % id)
+        return tool
+
+    def __api_import_new_workflow( self, trans, payload, **kwd ):
+        data = payload['workflow']
+
+        publish = util.string_as_bool( payload.get( "publish", False ) )
+        # If 'publish' set, default to importable.
+        importable = util.string_as_bool( payload.get( "importable", publish ) )
+
+        if publish and not importable:
+            raise exceptions.RequestParameterInvalidException( "Published workflow must be importable." )
+
+        from_dict_kwds = dict(
+            source="API",
+            publish=publish,
+        )
+        workflow, missing_tool_tups = self._workflow_from_dict( trans, data, **from_dict_kwds )
+
+        if importable:
+            self._make_item_accessible( trans.sa_session, workflow )
+            trans.sa_session.flush()
+
+        # galaxy workflow newly created id
+        workflow_id = workflow.id
+        # api encoded, id
+        encoded_id = trans.security.encode_id(workflow_id)
+
+        # return list
+        rval = []
+
+        item = workflow.to_dict(value_mapper={'id': trans.security.encode_id})
+        item['url'] = url_for('workflow', id=encoded_id)
+
+        rval.append(item)
+
+        return item
+
+    @expose_api
+    def import_shared_workflow_deprecated(self, trans, payload, **kwd):
+        """
+        POST /api/workflows/import
+        Import a workflow shared by other users.
+
+        :param  workflow_id:      the workflow id (required)
+        :type   workflow_id:      str
+
+        :raises: exceptions.MessageException, exceptions.ObjectNotFound
+        """
+        # Pull parameters out of payload.
+        workflow_id = payload.get('workflow_id', None)
+        if workflow_id is None:
+            raise exceptions.ObjectAttributeMissingException( "Missing required parameter 'workflow_id'." )
+        self.__api_import_shared_workflow( trans, workflow_id, payload )
+
+    def __api_import_shared_workflow( self, trans, workflow_id, payload, **kwd ):
+        try:
+            stored_workflow = self.get_stored_workflow( trans, workflow_id, check_ownership=False )
+        except:
+            raise exceptions.ObjectNotFound( "Malformed workflow id ( %s ) specified." % workflow_id )
+        if stored_workflow.importable is False:
+            raise exceptions.ItemAccessibilityException( 'The owner of this workflow has disabled imports via this link.' )
+        elif stored_workflow.deleted:
+            raise exceptions.ItemDeletionException( "You can't import this workflow because it has been deleted." )
+        imported_workflow = self._import_shared_workflow( trans, stored_workflow )
+        item = imported_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } )
+        encoded_id = trans.security.encode_id(imported_workflow.id)
+        item['url'] = url_for('workflow', id=encoded_id)
+        return item
+
+    @expose_api
+    def invoke( self, trans, workflow_id, payload, **kwd ):
+        """
+        POST /api/workflows/{encoded_workflow_id}/invocations
+
+        Schedule the workflow specified by `workflow_id` to run.
+        """
+        # /usage is awkward in this context but is consistent with the rest of
+        # this module. Would prefer to redo it all to use /invocation(s).
+        # Get workflow + accessibility check.
+        stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id)
+        workflow = stored_workflow.latest_workflow
+        run_configs = build_workflow_run_configs(trans, workflow, payload)
+        is_batch = payload.get('batch')
+        if not is_batch and len(run_configs) != 1:
+            raise exceptions.RequestParameterInvalidException("Must specify 'batch' to use batch parameters.")
+
+        invocations = []
+        for run_config in run_configs:
+            workflow_scheduler_id = payload.get('scheduler', None)
+            # TODO: workflow scheduler hints
+            work_request_params = dict(scheduler=workflow_scheduler_id)
+            workflow_invocation = queue_invoke(
+                trans=trans,
+                workflow=workflow,
+                workflow_run_config=run_config,
+                request_params=work_request_params
+            )
+            invocation = self.encode_all_ids(trans, workflow_invocation.to_dict(), recursive=True)
+            invocations.append(invocation)
+
+        if is_batch:
+            return invocations
+        else:
+            return invocations[0]
+
+    @expose_api
+    def index_invocations(self, trans, workflow_id, **kwd):
+        """
+        GET /api/workflows/{workflow_id}/invocations
+
+        Get the list of the workflow invocations
+
+        :param  workflow_id:      the workflow id (required)
+        :type   workflow_id:      str
+
+        :raises: exceptions.MessageException, exceptions.ObjectNotFound
+        """
+        stored_workflow = self.__get_stored_workflow( trans, workflow_id )
+        results = self.workflow_manager.build_invocations_query( trans, stored_workflow.id )
+        out = []
+        for r in results:
+            out.append( self.__encode_invocation( trans, r, view="collection" ) )
+        return out
+
+    @expose_api
+    def show_invocation(self, trans, workflow_id, invocation_id, **kwd):
+        """
+        GET /api/workflows/{workflow_id}/invocations/{invocation_id}
+        Get detailed description of workflow invocation
+
+        :param  workflow_id:        the workflow id (required)
+        :type   workflow_id:        str
+
+        :param  invocation_id:      the invocation id (required)
+        :type   invocation_id:      str
+
+        :raises: exceptions.MessageException, exceptions.ObjectNotFound
+        """
+        decoded_workflow_invocation_id = self.decode_id( invocation_id )
+        workflow_invocation = self.workflow_manager.get_invocation( trans, decoded_workflow_invocation_id )
+        if workflow_invocation:
+            return self.__encode_invocation( trans, workflow_invocation, step_details=kwd.get('step_details', False) )
+        return None
+
+    @expose_api
+    def cancel_invocation(self, trans, workflow_id, invocation_id, **kwd):
+        """
+        DELETE /api/workflows/{workflow_id}/invocations/{invocation_id}
+        Cancel the specified workflow invocation.
+
+        :param  workflow_id:      the workflow id (required)
+        :type   workflow_id:      str
+
+        :param  invocation_id:      the usage id (required)
+        :type   invocation_id:      str
+
+        :raises: exceptions.MessageException, exceptions.ObjectNotFound
+        """
+        decoded_workflow_invocation_id = self.decode_id( invocation_id )
+        workflow_invocation = self.workflow_manager.cancel_invocation( trans, decoded_workflow_invocation_id )
+        return self.__encode_invocation( trans, workflow_invocation )
+
+    @expose_api
+    def invocation_step(self, trans, workflow_id, invocation_id, step_id, **kwd):
+        """
+        GET /api/workflows/{workflow_id}/invocations/{invocation_id}/steps/{step_id}
+
+        :param  workflow_id:        the workflow id (required)
+        :type   workflow_id:        str
+
+        :param  invocation_id:      the invocation id (required)
+        :type   invocation_id:      str
+
+        :param  step_id:      encoded id of the WorkflowInvocationStep (required)
+        :type   step_id:      str
+
+        :param  payload:       payload containing update action information
+                               for running workflow.
+
+        :raises: exceptions.MessageException, exceptions.ObjectNotFound
+        """
+        decoded_invocation_step_id = self.decode_id( step_id )
+        invocation_step = self.workflow_manager.get_invocation_step(
+            trans,
+            decoded_invocation_step_id
+        )
+        return self.__encode_invocation_step( trans, invocation_step )
+
+    @expose_api
+    def update_invocation_step(self, trans, workflow_id, invocation_id, step_id, payload, **kwd):
+        """
+        PUT /api/workflows/{workflow_id}/invocations/{invocation_id}/steps/{step_id}
+        Update state of running workflow step invocation - still very nebulous
+        but this would be for stuff like confirming paused steps can proceed
+        etc....
+
+
+        :param  workflow_id:      the workflow id (required)
+        :type   workflow_id:      str
+
+        :param  invocation_id:      the usage id (required)
+        :type   invocation_id:      str
+
+        :param  step_id:      encoded id of the WorkflowInvocationStep (required)
+        :type   step_id:      str
+
+        :raises: exceptions.MessageException, exceptions.ObjectNotFound
+        """
+        decoded_invocation_step_id = self.decode_id( step_id )
+        action = payload.get( "action", None )
+
+        invocation_step = self.workflow_manager.update_invocation_step(
+            trans,
+            decoded_invocation_step_id,
+            action=action,
+        )
+        return self.__encode_invocation_step( trans, invocation_step )
+
+    def __encode_invocation_step( self, trans, invocation_step ):
+        return self.encode_all_ids(
+            trans,
+            invocation_step.to_dict( 'element' ),
+            True
+        )
+
+    def __get_stored_accessible_workflow( self, trans, workflow_id ):
+        return self.workflow_manager.get_stored_accessible_workflow( trans, workflow_id )
+
+    def __get_stored_workflow( self, trans, workflow_id ):
+        return self.workflow_manager.get_stored_workflow( trans, workflow_id )
+
+    def __encode_invocation( self, trans, invocation, view="element", step_details=False ):
+        return self.encode_all_ids(
+            trans,
+            invocation.to_dict( view, step_details=step_details ),
+            True
+        )
diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py
new file mode 100644
index 0000000..8bee0a1
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -0,0 +1,877 @@
+"""
+Provides factory methods to assemble the Galaxy web application
+"""
+
+import os
+import sys
+import threading
+import atexit
+
+try:
+    import configparser
+except:
+    import ConfigParser as configparser
+
+
+import galaxy.app
+import galaxy.model
+import galaxy.model.mapping
+import galaxy.datatypes.registry
+import galaxy.web.framework
+import galaxy.web.framework.webapp
+from galaxy.webapps.util import build_template_error_formatters
+from galaxy import util
+from galaxy.util import asbool
+from galaxy.util.postfork import process_is_uwsgi, register_postfork_function
+from galaxy.util.properties import load_app_properties
+
+from paste import httpexceptions
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class GalaxyWebApplication( galaxy.web.framework.webapp.WebApplication ):
+    pass
+
+
+def app_factory( global_conf, **kwargs ):
+    return paste_app_factory( global_conf, **kwargs )
+
+
+def paste_app_factory( global_conf, **kwargs ):
+    """
+    Return a wsgi application serving the root object
+    """
+    kwargs = load_app_properties(
+        kwds=kwargs
+    )
+    # Create the Galaxy application unless passed in
+    if 'app' in kwargs:
+        app = kwargs.pop( 'app' )
+        galaxy.app.app = app
+    else:
+        try:
+            app = galaxy.app.UniverseApplication( global_conf=global_conf, **kwargs )
+            galaxy.app.app = app
+        except:
+            import traceback
+            traceback.print_exc()
+            sys.exit( 1 )
+    # Call app's shutdown method when the interpeter exits, this cleanly stops
+    # the various Galaxy application daemon threads
+    atexit.register( app.shutdown )
+    # Create the universe WSGI application
+    webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' )
+
+    # CLIENTSIDE ROUTES
+    # The following are routes that are handled completely on the clientside.
+    # The following routes don't bootstrap any information, simply provide the
+    # base analysis interface at which point the application takes over.
+
+    webapp.add_client_route( '/tours' )
+    webapp.add_client_route( '/tours/{tour_id}' )
+
+    # STANDARD CONTROLLER ROUTES
+    webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app )
+    # Force /history to go to view of current
+    webapp.add_route( '/history', controller='history', action='view' )
+    webapp.add_route( '/history/view/{id}', controller='history', action='view' )
+    # Force /activate to go to the controller
+    webapp.add_route( '/activate', controller='user', action='activate' )
+    webapp.add_route( '/login', controller='root', action='login' )
+
+    # These two routes handle our simple needs at the moment
+    webapp.add_route( '/async/{tool_id}/{data_id}/{data_secret}', controller='async', action='index', tool_id=None, data_id=None, data_secret=None )
+    webapp.add_route( '/{controller}/{action}', action='index' )
+    webapp.add_route( '/{action}', controller='root', action='index' )
+
+    # allow for subdirectories in extra_files_path
+    webapp.add_route( '/datasets/{dataset_id}/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None)
+    webapp.add_route( '/datasets/{dataset_id}/{action}/{filename}', controller='dataset', action='index', dataset_id=None, filename=None)
+    webapp.add_route( '/display_application/{dataset_id}/{app_name}/{link_name}/{user_id}/{app_action}/{action_param}/{action_param_extra:.+?}',
+                      controller='dataset', action='display_application', dataset_id=None, user_id=None,
+                      app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None )
+    webapp.add_route( '/u/{username}/d/{slug}/{filename}', controller='dataset', action='display_by_username_and_slug', filename=None )
+    webapp.add_route( '/u/{username}/p/{slug}', controller='page', action='display_by_username_and_slug' )
+    webapp.add_route( '/u/{username}/h/{slug}', controller='history', action='display_by_username_and_slug' )
+    webapp.add_route( '/u/{username}/w/{slug}', controller='workflow', action='display_by_username_and_slug' )
+    webapp.add_route( '/u/{username}/w/{slug}/{format}', controller='workflow', action='display_by_username_and_slug' )
+    webapp.add_route( '/u/{username}/v/{slug}', controller='visualization', action='display_by_username_and_slug' )
+    webapp.add_route( '/search', controller='search', action='index' )
+
+    # TODO: Refactor above routes into external method to allow testing in
+    # isolation as well.
+    populate_api_routes( webapp, app )
+
+    # ==== Done
+    # Indicate that all configuration settings have been provided
+    webapp.finalize_config()
+
+    # Wrap the webapp in some useful middleware
+    if kwargs.get( 'middleware', True ):
+        webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
+    if asbool( kwargs.get( 'static_enabled', True) ):
+        if process_is_uwsgi:
+            log.error("Static middleware is enabled in your configuration but this is a uwsgi process.  Refusing to wrap in static middleware.")
+        else:
+            webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=[ app.visualizations_registry ], **kwargs )
+    # Close any pooled database connections before forking
+    try:
+        galaxy.model.mapping.metadata.bind.dispose()
+    except:
+        log.exception("Unable to dispose of pooled galaxy model database connections.")
+    try:
+        # This model may not actually be bound.
+        if galaxy.model.tool_shed_install.mapping.metadata.bind:
+            galaxy.model.tool_shed_install.mapping.metadata.bind.dispose()
+    except:
+        log.exception("Unable to dispose of pooled toolshed install model database connections.")
+
+    register_postfork_function(postfork_setup)
+
+    for th in threading.enumerate():
+        if th.is_alive():
+            log.debug("Prior to webapp return, Galaxy thread %s is alive.", th)
+    # Return
+    return webapp
+
+
+def uwsgi_app_factory():
+    import uwsgi
+    root = os.path.abspath(uwsgi.opt.get('galaxy_root', os.getcwd()))
+    config_file = uwsgi.opt.get('galaxy_config_file', os.path.join(root, 'config', 'galaxy.ini'))
+    global_conf = {
+        '__file__': config_file if os.path.exists(__file__) else None,
+        'here': root }
+    parser = configparser.ConfigParser()
+    parser.read(config_file)
+    try:
+        kwargs = dict(parser.items('app:main'))
+    except configparser.NoSectionError:
+        kwargs = {}
+    return app_factory(global_conf, **kwargs)
+
+
+def postfork_setup():
+    from galaxy.app import app
+    if process_is_uwsgi:
+        import uwsgi
+        app.config.server_name += ".%s" % uwsgi.worker_id()
+    app.control_worker.bind_and_start()
+
+
+def populate_api_routes( webapp, app ):
+    webapp.add_api_controllers( 'galaxy.webapps.galaxy.api', app )
+
+    valid_history_contents_types = [
+        'dataset',
+        'dataset_collection',
+    ]
+
+    # Accesss HDA details via histories/{history_id}/contents/datasets/{hda_id}
+    webapp.mapper.resource( "content_typed",
+                            "{type:%s}s" % "|".join( valid_history_contents_types ),
+                            name_prefix="history_",
+                            controller='history_contents',
+                            path_prefix='/api/histories/{history_id}/contents',
+                            parent_resources=dict( member_name='history', collection_name='histories' ),
+                            )
+
+    contents_archive_mapper = webapp.mapper.submapper( action='archive', controller='history_contents' )
+    contents_archive_mapper.connect( '/api/histories/{history_id}/contents/archive' )
+    contents_archive_mapper.connect( '/api/histories/{history_id}/contents/archive/{filename}{.format}' )
+
+    # Legacy access to HDA details via histories/{history_id}/contents/{hda_id}
+    webapp.mapper.resource( 'content',
+                            'contents',
+                            controller='history_contents',
+                            name_prefix='history_',
+                            path_prefix='/api/histories/{history_id}',
+                            parent_resources=dict( member_name='history', collection_name='histories' ) )
+    webapp.mapper.connect( "history_contents_display",
+                           "/api/histories/{history_id}/contents/{history_content_id}/display",
+                           controller="datasets",
+                           action="display",
+                           conditions=dict(method=["GET"]))
+    webapp.mapper.connect( "history_contents_metadata_file",
+                           "/api/histories/{history_id}/contents/{history_content_id}/metadata_file",
+                           controller="datasets",
+                           action="get_metadata_file",
+                           conditions=dict(method=["GET"]))
+    webapp.mapper.resource( 'user',
+                            'users',
+                            controller='group_users',
+                            name_prefix='group_',
+                            path_prefix='/api/groups/{group_id}',
+                            parent_resources=dict( member_name='group', collection_name='groups' ) )
+    webapp.mapper.resource( 'role',
+                            'roles',
+                            controller='group_roles',
+                            name_prefix='group_',
+                            path_prefix='/api/groups/{group_id}',
+                            parent_resources=dict( member_name='group', collection_name='groups' ) )
+    _add_item_tags_controller( webapp,
+                               name_prefix="history_content_",
+                               path_prefix='/api/histories/{history_id}/contents/{history_content_id}' )
+    webapp.mapper.connect( '/api/histories/published', action='published', controller="histories", conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( '/api/histories/shared_with_me', action='shared_with_me', controller="histories" )
+    _add_item_tags_controller( webapp,
+                               name_prefix="history_",
+                               path_prefix='/api/histories/{history_id}' )
+    _add_item_tags_controller( webapp,
+                               name_prefix="workflow_",
+                               path_prefix='/api/workflows/{workflow_id}' )
+    _add_item_annotation_controller( webapp,
+                                     name_prefix="history_content_",
+                                     path_prefix='/api/histories/{history_id}/contents/{history_content_id}' )
+    _add_item_annotation_controller( webapp,
+                                     name_prefix="history_",
+                                     path_prefix='/api/histories/{history_id}' )
+    _add_item_annotation_controller( webapp,
+                                     name_prefix="workflow_",
+                                     path_prefix='/api/workflows/{workflow_id}' )
+    _add_item_provenance_controller( webapp,
+                                     name_prefix="history_content_",
+                                     path_prefix='/api/histories/{history_id}/contents/{history_content_id}' )
+
+    webapp.mapper.resource( 'dataset', 'datasets', path_prefix='/api' )
+    webapp.mapper.resource( 'tool_data', 'tool_data', path_prefix='/api' )
+    webapp.mapper.connect( '/api/tool_data/{id:.+?}/fields/{value:.+?}/files/{path:.+?}', action='download_field_file', controller="tool_data" )
+    webapp.mapper.connect( '/api/tool_data/{id:.+?}/fields/{value:.+?}', action='show_field', controller="tool_data" )
+    webapp.mapper.connect( '/api/tool_data/{id:.+?}/reload', action='reload', controller="tool_data" )
+    webapp.mapper.resource( 'dataset_collection', 'dataset_collections', path_prefix='/api/')
+    webapp.mapper.resource( 'sample', 'samples', path_prefix='/api' )
+    webapp.mapper.resource( 'request', 'requests', path_prefix='/api' )
+    webapp.mapper.resource( 'form', 'forms', path_prefix='/api' )
+    webapp.mapper.resource( 'request_type', 'request_types', path_prefix='/api' )
+    webapp.mapper.resource( 'role', 'roles', path_prefix='/api' )
+    webapp.mapper.connect( '/api/ftp_files', controller='remote_files' )
+    webapp.mapper.resource( 'remote_file', 'remote_files', path_prefix='/api' )
+    webapp.mapper.resource( 'group', 'groups', path_prefix='/api' )
+    webapp.mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
+
+    # =======================
+    # ====== TOOLS API ======
+    # =======================
+
+    webapp.mapper.connect( '/api/tools/all_requirements', action='all_requirements', controller="tools" )
+    webapp.mapper.connect( '/api/tools/{id:.+?}/build', action='build', controller="tools" )
+    webapp.mapper.connect( '/api/tools/{id:.+?}/reload', action='reload', controller="tools" )
+    webapp.mapper.connect( '/api/tools/{id:.+?}/diagnostics', action='diagnostics', controller="tools" )
+    webapp.mapper.connect( '/api/tools/{id:.+?}/citations', action='citations', controller="tools" )
+    webapp.mapper.connect( '/api/tools/{id:.+?}/download', action='download', controller="tools" )
+    webapp.mapper.connect( '/api/tools/{id:.+?}/requirements', action='requirements', controller="tools")
+    webapp.mapper.connect( '/api/tools/{id:.+?}/install_dependencies', action='install_dependencies', controller="tools", conditions=dict( method=[ "POST" ] ))
+    webapp.mapper.connect( '/api/tools/{id:.+?}/build_dependency_cache', action='build_dependency_cache', controller="tools", conditions=dict( method=[ "POST" ] ))
+    webapp.mapper.connect( '/api/tools/{id:.+?}', action='show', controller="tools" )
+    webapp.mapper.resource( 'tool', 'tools', path_prefix='/api' )
+
+    webapp.mapper.connect( '/api/dependency_resolvers/clean', action="clean", controller="tool_dependencies", conditions=dict( method=[ "POST" ]) )
+    webapp.mapper.connect( '/api/dependency_resolvers/dependency', action="manager_dependency", controller="tool_dependencies", conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( '/api/dependency_resolvers/dependency', action="install_dependency", controller="tool_dependencies", conditions=dict( method=[ "POST" ] ) )
+    webapp.mapper.connect( '/api/dependency_resolvers/requirements', action="manager_requirements", controller="tool_dependencies" )
+    webapp.mapper.connect( '/api/dependency_resolvers/{id}/clean', action="clean", controller="tool_dependencies", conditions=dict( method=[ "POST" ]) )
+    webapp.mapper.connect( '/api/dependency_resolvers/{id}/dependency', action="resolver_dependency", controller="tool_dependencies", conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( '/api/dependency_resolvers/{id}/dependency', action="install_dependency", controller="tool_dependencies", conditions=dict( method=[ "POST" ] ) )
+    webapp.mapper.connect( '/api/dependency_resolvers/{id}/requirements', action="resolver_requirements", controller="tool_dependencies" )
+    webapp.mapper.resource( 'dependency_resolver', 'dependency_resolvers', controller="tool_dependencies", path_prefix='api' )
+
+    webapp.mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
+    webapp.mapper.resource( 'genome', 'genomes', path_prefix='/api' )
+    webapp.mapper.connect( '/api/genomes/{id}/indexes', controller='genomes', action='indexes' )
+    webapp.mapper.connect( '/api/genomes/{id}/sequences', controller='genomes', action='sequences' )
+    webapp.mapper.resource( 'visualization', 'visualizations', path_prefix='/api' )
+    webapp.mapper.connect( '/api/workflows/build_module', action='build_module', controller="workflows" )
+    webapp.mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
+    webapp.mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
+    webapp.mapper.connect( '/api/histories/{history_id}/citations', action='citations', controller="histories" )
+    webapp.mapper.connect(
+        'dynamic_tool_confs',
+        '/api/configuration/dynamic_tool_confs',
+        controller="configuration",
+        action="dynamic_tool_confs"
+    )
+    webapp.mapper.connect(
+        'tool_lineages',
+        '/api/configuration/tool_lineages',
+        controller="configuration",
+        action="tool_lineages"
+    )
+    webapp.mapper.connect(
+        '/api/configuration/toolbox',
+        controller="configuration",
+        action="reload_toolbox",
+        conditions=dict( method=["PUT"] )
+    )
+    webapp.mapper.resource( 'configuration', 'configuration', path_prefix='/api' )
+    webapp.mapper.connect( "configuration_version",
+                           "/api/version", controller="configuration",
+                           action="version", conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.resource( 'datatype',
+                            'datatypes',
+                            path_prefix='/api',
+                            collection={ 'sniffers': 'GET', 'mapping': 'GET', 'converters': 'GET', 'edam_data': 'GET', 'edam_formats': 'GET' },
+                            parent_resources=dict( member_name='datatype', collection_name='datatypes' ) )
+    webapp.mapper.resource( 'search', 'search', path_prefix='/api' )
+    webapp.mapper.resource( 'page', 'pages', path_prefix="/api")
+    webapp.mapper.resource( 'revision', 'revisions',
+                            path_prefix='/api/pages/{page_id}',
+                            controller='page_revisions',
+                            parent_resources=dict( member_name='page', collection_name='pages' ) )
+
+    webapp.mapper.connect( "history_archive_export",
+                           "/api/histories/{id}/exports", controller="histories",
+                           action="archive_export", conditions=dict( method=[ "PUT" ] ) )
+    webapp.mapper.connect( "history_archive_download",
+                           "/api/histories/{id}/exports/{jeha_id}", controller="histories",
+                           action="archive_download", conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( "create_api_key", "/api/users/{user_id}/api_key",
+                           controller="users", action="api_key", user_id=None,
+                           conditions=dict( method=["POST"] ) )
+
+    # ---- visualizations registry ---- generic template renderer
+    # @deprecated: this route should be considered deprecated
+    webapp.add_route( '/visualization/show/{visualization_name}', controller='visualization', action='render', visualization_name=None )
+
+    # provide an alternate route to visualization plugins that's closer to their static assets
+    # (/plugins/visualizations/{visualization_name}/static) and allow them to use relative urls to those
+    webapp.mapper.connect( 'visualization_plugin', '/plugins/visualizations/{visualization_name}/show',
+        controller='visualization', action='render' )
+    webapp.mapper.connect( 'saved_visualization', '/plugins/visualizations/{visualization_name}/saved',
+        controller='visualization', action='saved' )
+    # same with IE's
+    webapp.mapper.connect( 'interactive_environment_plugin', '/plugins/interactive_environments/{visualization_name}/show',
+        controller='visualization', action='render' )
+    webapp.mapper.connect( 'saved_interactive_environment', '/plugins/interactive_environments/{visualization_name}/saved',
+        controller='visualization', action='saved' )
+
+    # Deprecated in favor of POST /api/workflows with 'workflow' in payload.
+    webapp.mapper.connect( 'import_workflow_deprecated',
+                           '/api/workflows/upload',
+                           controller='workflows',
+                           action='import_new_workflow_deprecated',
+                           conditions=dict( method=['POST'] ) )
+    webapp.mapper.connect( 'workflow_dict',
+                           '/api/workflows/{workflow_id}/download',
+                           controller='workflows',
+                           action='workflow_dict',
+                           conditions=dict( method=['GET'] ) )
+    # Preserve the following download route for now for dependent applications  -- deprecate at some point
+    webapp.mapper.connect( 'workflow_dict',
+                           '/api/workflows/download/{workflow_id}',
+                           controller='workflows',
+                           action='workflow_dict',
+                           conditions=dict( method=['GET'] ) )
+    # Deprecated in favor of POST /api/workflows with shared_workflow_id in payload.
+    webapp.mapper.connect( 'import_shared_workflow_deprecated',
+                           '/api/workflows/import',
+                           controller='workflows',
+                           action='import_shared_workflow_deprecated',
+                           conditions=dict( method=['POST'] ) )
+
+    # route for creating/getting converted datasets
+    webapp.mapper.connect( '/api/datasets/{dataset_id}/converted', controller='datasets', action='converted', ext=None )
+    webapp.mapper.connect( '/api/datasets/{dataset_id}/converted/{ext}', controller='datasets', action='converted' )
+
+    # API refers to usages and invocations - these mean the same thing but the
+    # usage routes should be considered deprecated.
+    invoke_names = {
+        "invocations": "",
+        "usage": "_deprecated",
+    }
+    for noun, suffix in invoke_names.iteritems():
+        name = "%s%s" % (noun, suffix)
+        webapp.mapper.connect(
+            'list_workflow_%s' % name,
+            '/api/workflows/{workflow_id}/%s' % noun,
+            controller='workflows',
+            action='index_invocations',
+            conditions=dict(method=['GET'])
+        )
+
+        webapp.mapper.connect(
+            'workflow_%s_contents' % name,
+            '/api/workflows/{workflow_id}/%s/{invocation_id}' % noun,
+            controller='workflows',
+            action='show_invocation',
+            conditions=dict(method=['GET'])
+        )
+
+        webapp.mapper.connect(
+            'cancel_workflow_%s' % name,
+            '/api/workflows/{workflow_id}/%s/{invocation_id}' % noun,
+            controller='workflows',
+            action='cancel_invocation',
+            conditions=dict(method=['DELETE'])
+        )
+
+        webapp.mapper.connect(
+            'workflow_%s_step' % name,
+            '/api/workflows/{workflow_id}/%s/{invocation_id}/steps/{step_id}' % noun,
+            controller='workflows',
+            action='invocation_step',
+            conditions=dict(method=['GET'])
+        )
+
+        webapp.mapper.connect(
+            'workflow_%s_step_update' % name,
+            '/api/workflows/{workflow_id}/%s/{invocation_id}/steps/{step_id}' % noun,
+            controller='workflows',
+            action='update_invocation_step',
+            conditions=dict(method=['PUT'])
+        )
+
+        webapp.mapper.connect(
+            'workflow_%s' % name,
+            '/api/workflows/{workflow_id}/%s' % noun,
+            controller='workflows',
+            action='invoke',
+            conditions=dict( method=['POST'] )
+        )
+    # ============================
+    # ===== AUTHENTICATE API =====
+    # ============================
+
+    webapp.mapper.connect( 'api_key_retrieval',
+                           '/api/authenticate/baseauth/',
+                           controller='authenticate',
+                           action='get_api_key',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    # =====================
+    # ===== TOURS API =====
+    # =====================
+
+    webapp.mapper.connect( 'index',
+                           '/api/tours',
+                           controller='tours',
+                           action='index',
+                           conditions=dict( method=["GET"] ) )
+
+    webapp.mapper.connect( 'show',
+                           '/api/tours/{tour_id}',
+                           controller='tours',
+                           action='show',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'update_tour',
+                           '/api/tours/{tour_id}',
+                           controller='tours',
+                           action='update_tour',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    # ========================
+    # ===== WEBHOOKS API =====
+    # ========================
+
+    webapp.mapper.connect( 'get_all',
+                           '/api/webhooks',
+                           controller='webhooks',
+                           action='get_all',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'get_random',
+                           '/api/webhooks/{webhook_type}',
+                           controller='webhooks',
+                           action='get_random',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'get_all_by_type',
+                           '/api/webhooks/{webhook_type}/all',
+                           controller='webhooks',
+                           action='get_all_by_type',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'get_data',
+                           '/api/webhooks/{webhook_name}/get_data',
+                           controller='webhooks',
+                           action='get_data',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    # =======================
+    # ===== LIBRARY API =====
+    # =======================
+
+    webapp.mapper.connect( 'update_library',
+                           '/api/libraries/{id}',
+                           controller='libraries',
+                           action='update',
+                           conditions=dict( method=[ "PATCH", "PUT" ] ) )
+
+    webapp.mapper.connect( 'show_library_permissions',
+                           '/api/libraries/{encoded_library_id}/permissions',
+                           controller='libraries',
+                           action='get_permissions',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'set_library_permissions',
+                           '/api/libraries/{encoded_library_id}/permissions',
+                           controller='libraries',
+                           action='set_permissions',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    webapp.mapper.connect( 'show_ld_item',
+                           '/api/libraries/datasets/{id}',
+                           controller='lda_datasets',
+                           action='show',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'load_ld',
+                           '/api/libraries/datasets/',
+                           controller='lda_datasets',
+                           action='load',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    webapp.mapper.connect( 'show_version_of_ld_item',
+                           '/api/libraries/datasets/{encoded_dataset_id}/versions/{encoded_ldda_id}',
+                           controller='lda_datasets',
+                           action='show_version',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'show_legitimate_lda_roles',
+                           '/api/libraries/datasets/{encoded_dataset_id}/permissions',
+                           controller='lda_datasets',
+                           action='show_roles',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'update_lda_permissions',
+                           '/api/libraries/datasets/{encoded_dataset_id}/permissions',
+                           controller='lda_datasets',
+                           action='update_permissions',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    webapp.mapper.connect( 'delete_lda_item',
+                           '/api/libraries/datasets/{encoded_dataset_id}',
+                           controller='lda_datasets',
+                           action='delete',
+                           conditions=dict( method=[ "DELETE" ] ) )
+
+    webapp.mapper.connect( 'download_lda_items',
+                           '/api/libraries/datasets/download/{format}',
+                           controller='lda_datasets',
+                           action='download',
+                           conditions=dict( method=[ "POST", "GET" ] ) )
+
+    webapp.mapper.resource_with_deleted( 'library',
+                                         'libraries',
+                                         path_prefix='/api' )
+
+    webapp.mapper.resource( 'content',
+                            'contents',
+                            controller='library_contents',
+                            name_prefix='library_',
+                            path_prefix='/api/libraries/{library_id}',
+                            parent_resources=dict( member_name='library', collection_name='libraries' ) )
+
+    _add_item_extended_metadata_controller( webapp,
+                                            name_prefix="library_dataset_",
+                                            path_prefix='/api/libraries/{library_id}/contents/{library_content_id}' )
+
+    # =======================
+    # ===== FOLDERS API =====
+    # =======================
+
+    webapp.mapper.connect( 'add_history_datasets_to_library',
+                           '/api/folders/{encoded_folder_id}/contents',
+                           controller='folder_contents',
+                           action='create',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    webapp.mapper.connect( 'create_folder',
+                           '/api/folders/{encoded_parent_folder_id}',
+                           controller='folders',
+                           action='create',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    webapp.mapper.connect( 'delete_folder',
+                           '/api/folders/{encoded_folder_id}',
+                           controller='folders',
+                           action='delete',
+                           conditions=dict( method=[ "DELETE" ] ) )
+
+    webapp.mapper.connect( 'update_folder',
+                           '/api/folders/{encoded_folder_id}',
+                           controller='folders',
+                           action='update',
+                           conditions=dict( method=[ "PATCH", "PUT" ] ) )
+
+    webapp.mapper.resource( 'folder',
+                            'folders',
+                            path_prefix='/api' )
+
+    webapp.mapper.connect( 'show_folder_permissions',
+                           '/api/folders/{encoded_folder_id}/permissions',
+                           controller='folders',
+                           action='get_permissions',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'set_folder_permissions',
+                           '/api/folders/{encoded_folder_id}/permissions',
+                           controller='folders',
+                           action='set_permissions',
+                           conditions=dict( method=[ "POST" ] ) )
+
+    webapp.mapper.resource( 'content',
+                            'contents',
+                            controller='folder_contents',
+                            name_prefix='folder_',
+                            path_prefix='/api/folders/{folder_id}',
+                            parent_resources=dict( member_name='folder', collection_name='folders' ),
+                            conditions=dict( method=[ "GET" ] )  )
+
+    webapp.mapper.resource( 'job',
+                            'jobs',
+                            path_prefix='/api' )
+    webapp.mapper.connect( 'job_search', '/api/jobs/search', controller='jobs', action='search', conditions=dict( method=['POST'] ) )
+    webapp.mapper.connect( 'job_inputs', '/api/jobs/{id}/inputs', controller='jobs', action='inputs', conditions=dict( method=['GET'] ) )
+    webapp.mapper.connect( 'job_outputs', '/api/jobs/{id}/outputs', controller='jobs', action='outputs', conditions=dict( method=['GET'] ) )
+    webapp.mapper.connect( 'build_for_rerun', '/api/jobs/{id}/build_for_rerun', controller='jobs', action='build_for_rerun', conditions=dict( method=['GET'] ) )
+
+    # Job files controllers. Only for consumption by remote job runners.
+    webapp.mapper.resource( 'file',
+                            'files',
+                            controller="job_files",
+                            name_prefix="job_",
+                            path_prefix='/api/jobs/{job_id}',
+                            parent_resources=dict( member_name="job", collection_name="jobs" ) )
+
+    _add_item_extended_metadata_controller( webapp,
+                                            name_prefix="history_dataset_",
+                                            path_prefix='/api/histories/{history_id}/contents/{history_content_id}' )
+
+    # ====================
+    # ===== TOOLSHED =====
+    # ====================
+
+    # Handle displaying tool help images and README file images contained in repositories installed from the tool shed.
+    webapp.add_route( '/admin_toolshed/static/images/{repository_id}/{image_file:.+?}',
+                      controller='admin_toolshed',
+                      action='display_image_in_repository',
+                      repository_id=None,
+                      image_file=None )
+
+    webapp.mapper.connect( 'shed_category',
+                           '/api/tool_shed_repositories/shed_category',
+                           controller='tool_shed_repositories',
+                           action='shed_category',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'shed_repository',
+                           '/api/tool_shed_repositories/shed_repository',
+                           controller='tool_shed_repositories',
+                           action='shed_repository',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'shed_categories',
+                           '/api/tool_shed_repositories/shed_categories',
+                           controller='tool_shed_repositories',
+                           action='shed_categories',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'tool_shed_repository',
+                           '/api/tool_shed_repositories/:id/status',
+                           controller='tool_shed_repositories',
+                           action='status',
+                           conditions=dict( method=[ "GET" ] ) )
+
+    webapp.mapper.connect( 'install_repository',
+                           '/api/tool_shed_repositories/install',
+                           controller='tool_shed_repositories',
+                           action='install',
+                           conditions=dict( method=[ 'POST' ] ) )
+
+    # Galaxy API for tool shed features.
+    webapp.mapper.resource( 'tool_shed_repository',
+                            'tool_shed_repositories',
+                            member={ 'repair_repository_revision': 'POST',
+                                     'exported_workflows': 'GET',
+                                     'import_workflow': 'POST',
+                                     'import_workflows': 'POST' },
+                            collection={ 'get_latest_installable_revision': 'POST',
+                                         'reset_metadata_on_installed_repositories': 'POST' },
+                            controller='tool_shed_repositories',
+                            name_prefix='tool_shed_repository_',
+                            path_prefix='/api',
+                            new={ 'install_repository_revision': 'POST' },
+                            parent_resources=dict( member_name='tool_shed_repository', collection_name='tool_shed_repositories' ) )
+
+    # ==== Trace/Metrics Logger
+    # Connect logger from app
+    if app.trace_logger:
+        webapp.trace_logger = app.trace_logger
+
+    # metrics logging API
+    # webapp.mapper.connect( "index", "/api/metrics",
+    #    controller="metrics", action="index", conditions=dict( method=["GET"] ) )
+    # webapp.mapper.connect( "show", "/api/metrics/{id}",
+    #    controller="metrics", action="show", conditions=dict( method=["GET"] ) )
+    webapp.mapper.connect( "create", "/api/metrics", controller="metrics",
+                           action="create", conditions=dict( method=["POST"] ) )
+
+
+def _add_item_tags_controller( webapp, name_prefix, path_prefix, **kwd ):
+    # Not just using map.resources because actions should be based on name not id
+    controller = "%stags" % name_prefix
+    name = "%stag" % name_prefix
+    path = "%s/tags" % path_prefix
+    map = webapp.mapper
+    # Allow view items' tags.
+    map.connect(name, path,
+                controller=controller, action="index",
+                conditions=dict(method=["GET"]))
+    # Allow remove tag from item
+    map.connect("%s_delete" % name, "%s/tags/{tag_name}" % path_prefix,
+                controller=controller, action="delete",
+                conditions=dict(method=["DELETE"]))
+    # Allow create a new tag with from name
+    map.connect("%s_create" % name, "%s/tags/{tag_name}" % path_prefix,
+                controller=controller, action="create",
+                conditions=dict(method=["POST"]))
+    # Allow update tag value
+    map.connect("%s_update" % name, "%s/tags/{tag_name}" % path_prefix,
+                controller=controller, action="update",
+                conditions=dict(method=["PUT"]))
+    # Allow show tag by name
+    map.connect("%s_show" % name, "%s/tags/{tag_name}" % path_prefix,
+                controller=controller, action="show",
+                conditions=dict(method=["GET"]))
+
+
+def _add_item_extended_metadata_controller( webapp, name_prefix, path_prefix, **kwd ):
+    controller = "%sextended_metadata" % name_prefix
+    name = "%sextended_metadata" % name_prefix
+    webapp.mapper.resource(name, "extended_metadata", path_prefix=path_prefix, controller=controller)
+
+
+def _add_item_annotation_controller( webapp, name_prefix, path_prefix, **kwd ):
+    controller = "%sannotations" % name_prefix
+    name = "%sannotation" % name_prefix
+    webapp.mapper.resource(name, "annotation", path_prefix=path_prefix, controller=controller)
+
+
+def _add_item_provenance_controller( webapp, name_prefix, path_prefix, **kwd ):
+    controller = "%sprovenance" % name_prefix
+    name = "%sprovenance" % name_prefix
+    webapp.mapper.resource(name, "provenance", path_prefix=path_prefix, controller=controller)
+
+
+def wrap_in_middleware( app, global_conf, **local_conf ):
+    """
+    Based on the configuration wrap `app` in a set of common and useful
+    middleware.
+    """
+    webapp = app
+
+    # Merge the global and local configurations
+    conf = global_conf.copy()
+    conf.update(local_conf)
+    debug = asbool( conf.get( 'debug', False ) )
+    # First put into place httpexceptions, which must be most closely
+    # wrapped around the application (it can interact poorly with
+    # other middleware):
+    app = httpexceptions.make_middleware( app, conf )
+    log.debug( "Enabling 'httpexceptions' middleware" )
+    # Statsd request timing and profiling
+    statsd_host = conf.get('statsd_host', None)
+    if statsd_host:
+        from galaxy.web.framework.middleware.statsd import StatsdMiddleware
+        app = StatsdMiddleware( app,
+                                statsd_host,
+                                conf.get('statsd_port', 8125),
+                                conf.get('statsd_prefix', 'galaxy') )
+        log.debug( "Enabling 'statsd' middleware" )
+    # If we're using remote_user authentication, add middleware that
+    # protects Galaxy from improperly configured authentication in the
+    # upstream server
+    single_user = conf.get( 'single_user', None )
+    use_remote_user = asbool(conf.get( 'use_remote_user', False )) or single_user
+    if use_remote_user:
+        from galaxy.web.framework.middleware.remoteuser import RemoteUser
+        app = RemoteUser( app, maildomain=conf.get( 'remote_user_maildomain', None ),
+                          display_servers=util.listify( conf.get( 'display_servers', '' ) ),
+                          single_user=single_user,
+                          admin_users=conf.get( 'admin_users', '' ).split( ',' ),
+                          remote_user_header=conf.get( 'remote_user_header', 'HTTP_REMOTE_USER' ),
+                          remote_user_secret_header=conf.get('remote_user_secret', None),
+                          normalize_remote_user_email=conf.get('normalize_remote_user_email', False))
+    # The recursive middleware allows for including requests in other
+    # requests or forwarding of requests, all on the server side.
+    if asbool(conf.get('use_recursive', True)):
+        from paste import recursive
+        app = recursive.RecursiveMiddleware( app, conf )
+        log.debug( "Enabling 'recursive' middleware" )
+    # If sentry logging is enabled, log here before propogating up to
+    # the error middleware
+    sentry_dsn = conf.get( 'sentry_dsn', None )
+    if sentry_dsn:
+        from galaxy.web.framework.middleware.sentry import Sentry
+        app = Sentry( app, sentry_dsn )
+    # Various debug middleware that can only be turned on if the debug
+    # flag is set, either because they are insecure or greatly hurt
+    # performance
+    if debug:
+        # Middleware to check for WSGI compliance
+        if asbool( conf.get( 'use_lint', False ) ):
+            from paste import lint
+            app = lint.make_middleware( app, conf )
+            log.debug( "Enabling 'lint' middleware" )
+        # Middleware to run the python profiler on each request
+        if asbool( conf.get( 'use_profile', False ) ):
+            from paste.debug import profile
+            app = profile.ProfileMiddleware( app, conf )
+            log.debug( "Enabling 'profile' middleware" )
+    if debug and asbool( conf.get( 'use_interactive', False ) ) and not process_is_uwsgi:
+        # Interactive exception debugging, scary dangerous if publicly
+        # accessible, if not enabled we'll use the regular error printing
+        # middleware.
+        from weberror import evalexception
+        app = evalexception.EvalException( app, conf,
+                                           templating_formatters=build_template_error_formatters() )
+        log.debug( "Enabling 'eval exceptions' middleware" )
+    else:
+        if debug and asbool( conf.get( 'use_interactive', False ) ) and process_is_uwsgi:
+            log.error("Interactive debugging middleware is enabled in your configuration "
+                      "but this is a uwsgi process.  Refusing to wrap in interactive error middleware.")
+        # Not in interactive debug mode, just use the regular error middleware
+        import galaxy.web.framework.middleware.error
+        app = galaxy.web.framework.middleware.error.ErrorMiddleware( app, conf )
+        log.debug( "Enabling 'error' middleware" )
+    # Transaction logging (apache access.log style)
+    if asbool( conf.get( 'use_translogger', True ) ):
+        from galaxy.web.framework.middleware.translogger import TransLogger
+        app = TransLogger( app )
+        log.debug( "Enabling 'trans logger' middleware" )
+    # X-Forwarded-Host handling
+    from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware
+    app = XForwardedHostMiddleware( app )
+    log.debug( "Enabling 'x-forwarded-host' middleware" )
+    # Request ID middleware
+    from galaxy.web.framework.middleware.request_id import RequestIDMiddleware
+    app = RequestIDMiddleware( app )
+    log.debug( "Enabling 'Request ID' middleware" )
+
+    # api batch call processing middleware
+    from galaxy.web.framework.middleware.batch import BatchMiddleware
+    app = BatchMiddleware( webapp, app, {})
+    log.debug( "Enabling 'Batch' middleware" )
+
+    return app
+
+
+def wrap_in_static( app, global_conf, plugin_frameworks=None, **local_conf ):
+    from galaxy.web.framework.middleware.static import CacheableStaticURLParser as Static
+    urlmap, cache_time = galaxy.web.framework.webapp.build_url_map( app, global_conf, local_conf )
+    # wrap any static dirs for plugins
+    plugin_frameworks = plugin_frameworks or []
+    for framework in plugin_frameworks:
+        if framework and framework.serves_static:
+            # invert control to each plugin for finding their own static dirs
+            for plugin_url, plugin_static_path in framework.get_static_urls_and_paths():
+                plugin_url = '/plugins/' + plugin_url
+                urlmap[( plugin_url )] = Static( plugin_static_path, cache_time )
+                log.debug( 'added url, path to static middleware: %s, %s', plugin_url, plugin_static_path )
+
+    # URL mapper becomes the root webapp
+    return urlmap
diff --git a/lib/galaxy/webapps/galaxy/controllers/__init__.py b/lib/galaxy/webapps/galaxy/controllers/__init__.py
new file mode 100644
index 0000000..ba2d2e6
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/__init__.py
@@ -0,0 +1,3 @@
+"""
+Galaxy web controllers.
+"""
diff --git a/lib/galaxy/webapps/galaxy/controllers/admin.py b/lib/galaxy/webapps/galaxy/controllers/admin.py
new file mode 100644
index 0000000..d061506
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -0,0 +1,905 @@
+import imp
+import logging
+import os
+from sqlalchemy.sql import expression
+
+import galaxy.queue_worker
+import galaxy.util
+from galaxy import model
+from galaxy import web
+from galaxy.actions.admin import AdminActions
+from galaxy.exceptions import MessageException
+from galaxy.model import tool_shed_install as install_model
+from galaxy.model.util import pgcalc
+from galaxy.util import nice_size, sanitize_text, url_get
+from galaxy.util.odict import odict
+from galaxy.web import url_for
+from galaxy.web.base.controller import BaseUIController, UsesQuotaMixin
+from galaxy.web.base.controllers.admin import Admin
+from galaxy.web.framework.helpers import grids, time_ago
+from galaxy.web.params import QuotaParamParser
+from galaxy.tools import global_tool_errors
+from tool_shed.util import common_util
+from tool_shed.util import encoding_util
+from tool_shed.util.web_util import escape
+
+log = logging.getLogger( __name__ )
+
+
+class UserListGrid( grids.Grid ):
+
+    class EmailColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, user ):
+            return escape(user.email)
+
+    class UserNameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.username:
+                return escape(user.username)
+            return 'not set'
+
+    class StatusColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.purged:
+                return "purged"
+            elif user.deleted:
+                return "deleted"
+            return ""
+
+    class GroupsColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.groups:
+                return len( user.groups )
+            return 0
+
+    class RolesColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.roles:
+                return len( user.roles )
+            return 0
+
+    class ExternalColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.external:
+                return 'yes'
+            return 'no'
+
+    class LastLoginColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.galaxy_sessions:
+                return self.format( user.galaxy_sessions[ 0 ].update_time )
+            return 'never'
+
+    class TimeCreatedColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            return user.create_time.strftime('%x')
+
+    class ActivatedColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, user ):
+            if user.active:
+                return 'Y'
+            else:
+                return 'N'
+
+    # Grid definition
+    title = "Users"
+    model_class = model.User
+    template = '/admin/user/grid.mako'
+    default_sort_key = "email"
+    columns = [
+        EmailColumn( "Email",
+                     key="email",
+                     model_class=model.User,
+                     link=( lambda item: dict( operation="information", id=item.id, webapp="galaxy" ) ),
+                     attach_popup=True,
+                     filterable="advanced" ),
+        UserNameColumn( "User Name",
+                        key="username",
+                        model_class=model.User,
+                        attach_popup=False,
+                        filterable="advanced" ),
+        GroupsColumn( "Groups", attach_popup=False ),
+        RolesColumn( "Roles", attach_popup=False ),
+        ExternalColumn( "External", attach_popup=False ),
+        LastLoginColumn( "Last Login", format=time_ago ),
+        StatusColumn( "Status", attach_popup=False ),
+        TimeCreatedColumn( "Created", attach_popup=False ),
+        ActivatedColumn( "Activated", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Create new user", dict( controller='admin', action='users', operation='create', webapp="galaxy" ) )
+    ]
+    operations = [
+        grids.GridOperation( "Manage Roles and Groups",
+                             condition=( lambda item: not item.deleted ),
+                             allow_multiple=False,
+                             url_args=dict( webapp="galaxy", action="manage_roles_and_groups_for_user" ) ),
+        grids.GridOperation( "Reset Password",
+                             condition=( lambda item: not item.deleted ),
+                             allow_multiple=True,
+                             allow_popup=False,
+                             url_args=dict( webapp="galaxy", action="reset_user_password" ) ),
+        grids.GridOperation( "Recalculate Disk Usage",
+                             condition=( lambda item: not item.deleted ),
+                             allow_multiple=False,
+                             url_args=dict( webapp="galaxy", action="recalculate_user_disk_usage" ) )
+    ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True, purged=False ) ),
+        grids.GridColumnFilter( "Purged", args=dict( purged=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def get_current_item( self, trans, **kwargs ):
+        return trans.user
+
+
+class RoleListGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, role ):
+            return escape(role.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, role ):
+            if role.description:
+                return escape(role.description)
+            return ''
+
+    class TypeColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, role ):
+            return role.type
+
+    class StatusColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, role ):
+            if role.deleted:
+                return "deleted"
+            return ""
+
+    class GroupsColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, role ):
+            if role.groups:
+                return len( role.groups )
+            return 0
+
+    class UsersColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, role ):
+            if role.users:
+                return len( role.users )
+            return 0
+
+    # Grid definition
+    title = "Roles"
+    model_class = model.Role
+    template = '/admin/dataset_security/role/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="Manage users and groups", id=item.id, webapp="galaxy" ) ),
+                    model_class=model.Role,
+                    attach_popup=True,
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                           key='description',
+                           model_class=model.Role,
+                           attach_popup=False,
+                           filterable="advanced" ),
+        TypeColumn( "Type",
+                    key='type',
+                    model_class=model.Role,
+                    attach_popup=False,
+                    filterable="advanced" ),
+        GroupsColumn( "Groups", attach_popup=False ),
+        UsersColumn( "Users", attach_popup=False ),
+        StatusColumn( "Status", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Add new role", dict( controller='admin', action='roles', operation='create' ) )
+    ]
+    operations = [ grids.GridOperation( "Edit",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="rename_role" ) ),
+                   grids.GridOperation( "Delete",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="mark_role_deleted" ) ),
+                   grids.GridOperation( "Undelete",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="undelete_role" ) ),
+                   grids.GridOperation( "Purge",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="purge_role" ) ) ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter( model.Role.type != model.Role.types.PRIVATE )
+
+
+class GroupListGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, group ):
+            return escape(group.name)
+
+    class StatusColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, group ):
+            if group.deleted:
+                return "deleted"
+            return ""
+
+    class RolesColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, group ):
+            if group.roles:
+                return len( group.roles )
+            return 0
+
+    class UsersColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, group ):
+            if group.members:
+                return len( group.members )
+            return 0
+
+    # Grid definition
+    title = "Groups"
+    model_class = model.Group
+    template = '/admin/dataset_security/group/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="Manage users and roles", id=item.id, webapp="galaxy" ) ),
+                    model_class=model.Group,
+                    attach_popup=True,
+                    filterable="advanced" ),
+        UsersColumn( "Users", attach_popup=False ),
+        RolesColumn( "Roles", attach_popup=False ),
+        StatusColumn( "Status", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Add new group", dict( controller='admin', action='groups', operation='create', webapp="galaxy" ) )
+    ]
+    operations = [ grids.GridOperation( "Rename",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="rename_group" ) ),
+                   grids.GridOperation( "Delete",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="mark_group_deleted" ) ),
+                   grids.GridOperation( "Undelete",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="undelete_group" ) ),
+                   grids.GridOperation( "Purge",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="purge_group" ) ) ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+
+class QuotaListGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, quota ):
+            return escape(quota.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, quota ):
+            if quota.description:
+                return escape(quota.description)
+            return ''
+
+    class AmountColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, quota ):
+            return quota.operation + quota.display_amount
+
+    class StatusColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, quota ):
+            if quota.deleted:
+                return "deleted"
+            elif quota.default:
+                return "<strong>default for %s users</strong>" % quota.default[0].type
+            return ""
+
+    class UsersColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, quota ):
+            if quota.users:
+                return len( quota.users )
+            return 0
+
+    class GroupsColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, quota ):
+            if quota.groups:
+                return len( quota.groups )
+            return 0
+
+    # Grid definition
+    title = "Quotas"
+    model_class = model.Quota
+    template = '/admin/quota/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="Change amount", id=item.id, webapp="galaxy" ) ),
+                    model_class=model.Quota,
+                    attach_popup=True,
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                           key='description',
+                           model_class=model.Quota,
+                           attach_popup=False,
+                           filterable="advanced" ),
+        AmountColumn( "Amount",
+                      key='amount',
+                      model_class=model.Quota,
+                      attach_popup=False,
+                      filterable="advanced" ),
+        UsersColumn( "Users", attach_popup=False ),
+        GroupsColumn( "Groups", attach_popup=False ),
+        StatusColumn( "Status", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Add new quota", dict( controller='admin', action='quotas', operation='create' ) )
+    ]
+    operations = [ grids.GridOperation( "Rename",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="rename_quota" ) ),
+                   grids.GridOperation( "Change amount",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="edit_quota" ) ),
+                   grids.GridOperation( "Manage users and groups",
+                                        condition=( lambda item: not item.default and not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="manage_users_and_groups_for_quota" ) ),
+                   grids.GridOperation( "Set as different type of default",
+                                        condition=( lambda item: item.default ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="set_quota_default" ) ),
+                   grids.GridOperation( "Set as default",
+                                        condition=( lambda item: not item.default and not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="set_quota_default" ) ),
+                   grids.GridOperation( "Unset as default",
+                                        condition=( lambda item: item.default and not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( webapp="galaxy", action="unset_quota_default" ) ),
+                   grids.GridOperation( "Delete",
+                                        condition=( lambda item: not item.deleted and not item.default ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="mark_quota_deleted" ) ),
+                   grids.GridOperation( "Undelete",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="undelete_quota" ) ),
+                   grids.GridOperation( "Purge",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( webapp="galaxy", action="purge_quota" ) ) ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+
+class ToolVersionListGrid( grids.Grid ):
+
+    class ToolIdColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, tool_version ):
+            toolbox = trans.app.toolbox
+            if toolbox.has_tool( tool_version.tool_id, exact=True ):
+                link = url_for( controller='tool_runner', tool_id=tool_version.tool_id )
+                link_str = '<a target="_blank" href="%s">' % link
+                return '<div class="count-box state-color-ok">%s%s</a></div>' % ( link_str, tool_version.tool_id )
+            return tool_version.tool_id
+
+    class ToolVersionsColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, tool_version ):
+            tool_ids_str = ''
+            toolbox = trans.app.toolbox
+            for tool_id in tool_version.get_version_ids( trans.app ):
+                if toolbox.has_tool( tool_id, exact=True ):
+                    link = url_for( controller='tool_runner', tool_id=tool_id )
+                    link_str = '<a target="_blank" href="%s">' % link
+                    tool_ids_str += '<div class="count-box state-color-ok">%s%s</a></div><br/>' % ( link_str, tool_id )
+                else:
+                    tool_ids_str += '%s<br/>' % tool_id
+            return tool_ids_str
+
+    # Grid definition
+    title = "Tool versions"
+    model_class = install_model.ToolVersion
+    template = '/admin/tool_version/grid.mako'
+    default_sort_key = "tool_id"
+    columns = [
+        ToolIdColumn( "Tool id",
+                      key='tool_id',
+                      attach_popup=False ),
+        ToolVersionsColumn( "Version lineage by tool id (parent/child ordered)" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search tool id",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = []
+    operations = []
+    standard_filters = []
+    default_filter = {}
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.install_model.context.query( self.model_class )
+
+
+class AdminGalaxy( BaseUIController, Admin, AdminActions, UsesQuotaMixin, QuotaParamParser ):
+
+    user_list_grid = UserListGrid()
+    role_list_grid = RoleListGrid()
+    group_list_grid = GroupListGrid()
+    quota_list_grid = QuotaListGrid()
+    tool_version_list_grid = ToolVersionListGrid()
+    delete_operation = grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), allow_multiple=True )
+    undelete_operation = grids.GridOperation( "Undelete", condition=( lambda item: item.deleted and not item.purged ), allow_multiple=True )
+    purge_operation = grids.GridOperation( "Purge", condition=( lambda item: item.deleted and not item.purged ), allow_multiple=True )
+
+    @web.expose
+    @web.require_admin
+    def quotas( self, trans, **kwargs ):
+        if 'operation' in kwargs:
+            operation = kwargs.pop('operation').lower()
+            if operation == "quotas":
+                return self.quota( trans, **kwargs )
+            if operation == "create":
+                return self.create_quota( trans, **kwargs )
+            if operation == "delete":
+                return self.mark_quota_deleted( trans, **kwargs )
+            if operation == "undelete":
+                return self.undelete_quota( trans, **kwargs )
+            if operation == "purge":
+                return self.purge_quota( trans, **kwargs )
+            if operation == "change amount":
+                return self.edit_quota( trans, **kwargs )
+            if operation == "manage users and groups":
+                return self.manage_users_and_groups_for_quota( trans, **kwargs )
+            if operation == "rename":
+                return self.rename_quota( trans, **kwargs )
+            if operation == "edit":
+                return self.edit_quota( trans, **kwargs )
+        # Render the list view
+        return self.quota_list_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_admin
+    def create_quota( self, trans, **kwd ):
+        params = self.get_quota_params( kwd )
+        if params.get( 'create_quota_button', False ):
+            try:
+                quota, message = self._create_quota( params )
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='quotas',
+                                                                  webapp=params.webapp,
+                                                                  message=sanitize_text( message ),
+                                                                  status='done' ) )
+            except MessageException as e:
+                params.message = str( e )
+                params.status = 'error'
+        in_users = map( int, params.in_users )
+        in_groups = map( int, params.in_groups )
+        new_in_users = []
+        new_in_groups = []
+        for user in trans.sa_session.query( trans.app.model.User ) \
+                                    .filter( trans.app.model.User.table.c.deleted == expression.false() ) \
+                                    .order_by( trans.app.model.User.table.c.email ):
+            if user.id in in_users:
+                new_in_users.append( ( user.id, user.email ) )
+            else:
+                params.out_users.append( ( user.id, user.email ) )
+        for group in trans.sa_session.query( trans.app.model.Group ) \
+                                     .filter( trans.app.model.Group.table.c.deleted == expression.false() ) \
+                                     .order_by( trans.app.model.Group.table.c.name ):
+            if group.id in in_groups:
+                new_in_groups.append( ( group.id, group.name ) )
+            else:
+                params.out_groups.append( ( group.id, group.name ) )
+        return trans.fill_template( '/admin/quota/quota_create.mako',
+                                    webapp=params.webapp,
+                                    name=params.name,
+                                    description=params.description,
+                                    amount=params.amount,
+                                    operation=params.operation,
+                                    default=params.default,
+                                    in_users=new_in_users,
+                                    out_users=params.out_users,
+                                    in_groups=new_in_groups,
+                                    out_groups=params.out_groups,
+                                    message=params.message,
+                                    status=params.status )
+
+    @web.expose
+    @web.require_admin
+    def rename_quota( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, 'rename_quota_button', self._rename_quota, kwd )
+        if not quota:
+            return
+        return trans.fill_template( '/admin/quota/quota_rename.mako',
+                                    id=params.id,
+                                    name=params.name or quota.name,
+                                    description=params.description or quota.description,
+                                    webapp=params.webapp,
+                                    message=params.message,
+                                    status=params.status )
+
+    @web.expose
+    @web.require_admin
+    def manage_users_and_groups_for_quota( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, 'quota_members_edit_button', self._manage_users_and_groups_for_quota, kwd )
+        if not quota:
+            return
+        in_users = []
+        out_users = []
+        in_groups = []
+        out_groups = []
+        for user in trans.sa_session.query( trans.app.model.User ) \
+                                    .filter( trans.app.model.User.table.c.deleted == expression.false() ) \
+                                    .order_by( trans.app.model.User.table.c.email ):
+            if user in [ x.user for x in quota.users ]:
+                in_users.append( ( user.id, user.email ) )
+            else:
+                out_users.append( ( user.id, user.email ) )
+        for group in trans.sa_session.query( trans.app.model.Group ) \
+                                     .filter( trans.app.model.Group.table.c.deleted == expression.false()) \
+                                     .order_by( trans.app.model.Group.table.c.name ):
+            if group in [ x.group for x in quota.groups ]:
+                in_groups.append( ( group.id, group.name ) )
+            else:
+                out_groups.append( ( group.id, group.name ) )
+        return trans.fill_template( '/admin/quota/quota.mako',
+                                    id=params.id,
+                                    name=quota.name,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    in_groups=in_groups,
+                                    out_groups=out_groups,
+                                    webapp=params.webapp,
+                                    message=params.message,
+                                    status=params.status )
+
+    @web.expose
+    @web.require_admin
+    def edit_quota( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, 'edit_quota_button', self._edit_quota, kwd )
+        if not quota:
+            return
+        return trans.fill_template( '/admin/quota/quota_edit.mako',
+                                    id=params.id,
+                                    operation=params.operation or quota.operation,
+                                    display_amount=params.amount or quota.display_amount,
+                                    webapp=params.webapp,
+                                    message=params.message,
+                                    status=params.status )
+
+    @web.expose
+    @web.require_admin
+    def set_quota_default( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, 'set_default_quota_button', self._set_quota_default, kwd )
+        if not quota:
+            return
+        if params.default:
+            default = params.default
+        elif quota.default:
+            default = quota.default[0].type
+        else:
+            default = "no"
+        return trans.fill_template( '/admin/quota/quota_set_default.mako',
+                                    id=params.id,
+                                    default=default,
+                                    webapp=params.webapp,
+                                    message=params.message,
+                                    status=params.status )
+
+    @web.expose
+    @web.require_admin
+    def unset_quota_default( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, True, self._unset_quota_default, kwd )
+        if not quota:
+            return
+        return trans.response.send_redirect( web.url_for( controller='admin',
+                                                          action='quotas',
+                                                          webapp=params.webapp,
+                                                          message=sanitize_text( params.message ),
+                                                          status='error' ) )
+
+    @web.expose
+    @web.require_admin
+    def mark_quota_deleted( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, True, self._mark_quota_deleted, kwd, listify=True )
+        if not quota:
+            return
+        return trans.response.send_redirect( web.url_for( controller='admin',
+                                                          action='quotas',
+                                                          webapp=params.webapp,
+                                                          message=sanitize_text( params.message ),
+                                                          status='error' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_quota( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, True, self._undelete_quota, kwd, listify=True )
+        if not quota:
+            return
+        return trans.response.send_redirect( web.url_for( controller='admin',
+                                                          action='quotas',
+                                                          webapp=params.webapp,
+                                                          message=sanitize_text( params.message ),
+                                                          status='error' ) )
+
+    @web.expose
+    @web.require_admin
+    def purge_quota( self, trans, **kwd ):
+        quota, params = self._quota_op( trans, True, self._purge_quota, kwd, listify=True )
+        if not quota:
+            return
+        return trans.response.send_redirect( web.url_for( controller='admin',
+                                                          action='quotas',
+                                                          webapp=params.webapp,
+                                                          message=sanitize_text( params.message ),
+                                                          status='error' ) )
+
+    def _quota_op( self, trans, do_op, op_method, kwd, listify=False ):
+        params = self.get_quota_params( kwd )
+        if listify:
+            quota = []
+            messages = []
+            for id in galaxy.util.listify( params.id ):
+                try:
+                    quota.append( self.get_quota( trans, id ) )
+                except MessageException as e:
+                    messages.append( str( e ) )
+            if messages:
+                return None, trans.response.send_redirect( web.url_for( controller='admin',
+                                                                        action='quotas',
+                                                                        webapp=params.webapp,
+                                                                        message=sanitize_text( ', '.join( messages ) ),
+                                                                        status='error' ) )
+        else:
+            try:
+                quota = self.get_quota( trans, params.id, deleted=False )
+            except MessageException as e:
+                return None, trans.response.send_redirect( web.url_for( controller='admin',
+                                                                        action='quotas',
+                                                                        webapp=params.webapp,
+                                                                        message=sanitize_text( str( e ) ),
+                                                                        status='error' ) )
+        if do_op is True or ( do_op is not False and params.get( do_op, False ) ):
+            try:
+                message = op_method( quota, params )
+                return None, trans.response.send_redirect( web.url_for( controller='admin',
+                                                                        action='quotas',
+                                                                        webapp=params.webapp,
+                                                                        message=sanitize_text( message ),
+                                                                        status='done' ) )
+            except MessageException as e:
+                params.message = e.err_msg
+                params.status = e.type
+        return quota, params
+
+    @web.expose
+    @web.require_admin
+    def impersonate( self, trans, email=None, **kwd ):
+        if not trans.app.config.allow_user_impersonation:
+            return trans.show_error_message( "User impersonation is not enabled in this instance of Galaxy." )
+        message = ''
+        status = 'done'
+        emails = None
+        if email is not None:
+            user = trans.sa_session.query( trans.app.model.User ).filter_by( email=email ).first()
+            if user:
+                trans.handle_user_logout()
+                trans.handle_user_login(user)
+                message = 'You are now logged in as %s, <a target="_top" href="%s">return to the home page</a>' % ( email, url_for( controller='root' ) )
+                emails = []
+            else:
+                message = 'Invalid user selected'
+                status = 'error'
+        if emails is None:
+            emails = [ u.email for u in trans.sa_session.query( trans.app.model.User ).enable_eagerloads( False ).all() ]
+        return trans.fill_template( 'admin/impersonate.mako', emails=emails, message=message, status=status )
+
+    def check_for_tool_dependencies( self, trans, migration_stage ):
+        # Get the 000x_tools.xml file associated with migration_stage.
+        tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) )
+        tree = galaxy.util.parse_xml( tools_xml_file_path )
+        root = tree.getroot()
+        tool_shed = root.get( 'name' )
+        shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed )
+        repo_name_dependency_tups = []
+        if shed_url:
+            for elem in root:
+                if elem.tag == 'repository':
+                    tool_dependencies = []
+                    tool_dependencies_dict = {}
+                    repository_name = elem.get( 'name' )
+                    changeset_revision = elem.get( 'changeset_revision' )
+                    params = dict( name=repository_name, owner='devteam', changeset_revision=changeset_revision )
+                    pathspec = [ 'repository', 'get_tool_dependencies' ]
+                    text = url_get( shed_url, password_mgr=self.app.tool_shed_registry.url_auth( shed_url ), pathspec=pathspec, params=params )
+                    if text:
+                        tool_dependencies_dict = encoding_util.tool_shed_decode( text )
+                        for dependency_key, requirements_dict in tool_dependencies_dict.items():
+                            tool_dependency_name = requirements_dict[ 'name' ]
+                            tool_dependency_version = requirements_dict[ 'version' ]
+                            tool_dependency_type = requirements_dict[ 'type' ]
+                            tool_dependency_readme = requirements_dict.get( 'readme', '' )
+                            tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
+                    repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) )
+        return repo_name_dependency_tups
+
+    @web.expose
+    @web.require_admin
+    def review_tool_migration_stages( self, trans, **kwd ):
+        message = escape( galaxy.util.restore_text( kwd.get( 'message', '' ) ) )
+        status = galaxy.util.restore_text( kwd.get( 'status', 'done' ) )
+        migration_stages_dict = odict()
+        migration_modules = []
+        migration_scripts_dir = os.path.abspath( os.path.join( trans.app.config.root, 'lib', 'tool_shed', 'galaxy_install', 'migrate', 'versions' ) )
+        migration_scripts_dir_contents = os.listdir( migration_scripts_dir )
+        for item in migration_scripts_dir_contents:
+            if os.path.isfile( os.path.join( migration_scripts_dir, item ) ) and item.endswith( '.py' ):
+                module = item.replace( '.py', '' )
+                migration_modules.append( module )
+        if migration_modules:
+            migration_modules.sort()
+            # Remove the 0001_tools.py script since it is the seed.
+            migration_modules = migration_modules[ 1: ]
+            # Reverse the list so viewing will be newest to oldest.
+            migration_modules.reverse()
+        for migration_module in migration_modules:
+            migration_stage = int( migration_module.replace( '_tools', '' ) )
+            repo_name_dependency_tups = self.check_for_tool_dependencies( trans, migration_stage )
+            open_file_obj, file_name, description = imp.find_module( migration_module, [ migration_scripts_dir ] )
+            imported_module = imp.load_module( 'upgrade', open_file_obj, file_name, description )
+            migration_info = imported_module.__doc__
+            open_file_obj.close()
+            migration_stages_dict[ migration_stage ] = ( migration_info, repo_name_dependency_tups )
+        return trans.fill_template( 'admin/review_tool_migration_stages.mako',
+                                    migration_stages_dict=migration_stages_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def tool_errors( self, trans, **kwd ):
+        return trans.fill_template('admin/tool_errors.mako', tool_errors=global_tool_errors.error_stack)
+
+    @web.expose
+    @web.require_admin
+    def view_datatypes_registry( self, trans, **kwd ):
+        message = escape( galaxy.util.restore_text( kwd.get( 'message', '' ) ) )
+        status = galaxy.util.restore_text( kwd.get( 'status', 'done' ) )
+        return trans.fill_template( 'admin/view_datatypes_registry.mako', message=message, status=status )
+
+    @web.expose
+    @web.require_admin
+    def view_tool_data_tables( self, trans, **kwd ):
+        message = escape( galaxy.util.restore_text( kwd.get( 'message', '' ) ) )
+        status = galaxy.util.restore_text( kwd.get( 'status', 'done' ) )
+        return trans.fill_template( 'admin/view_data_tables_registry.mako', message=message, status=status )
+
+    @web.expose
+    @web.require_admin
+    def display_applications( self, trans, **kwd ):
+        return trans.fill_template( 'admin/view_display_applications.mako', display_applications=trans.app.datatypes_registry.display_applications )
+
+    @web.expose
+    @web.require_admin
+    def reload_display_application( self, trans, **kwd ):
+        galaxy.queue_worker.send_control_task(trans.app,
+                                              'reload_display_application',
+                                              noop_self=True,
+                                              kwargs={'display_application_ids': kwd.get( 'id' )} )
+        reloaded, failed = trans.app.datatypes_registry.reload_display_applications( kwd.get( 'id' ) )
+        if not reloaded and failed:
+            return trans.show_error_message( 'Unable to reload any of the %i requested display applications ("%s").'
+                                             % ( len( failed ), '", "'.join( failed ) ) )
+        if failed:
+            return trans.show_warn_message( 'Reloaded %i display applications ("%s"), but failed to reload %i display applications ("%s").'
+                                            % ( len( reloaded ), '", "'.join( reloaded ), len( failed ), '", "'.join( failed ) ) )
+        if not reloaded:
+            return trans.show_warn_message( 'You need to request at least one display application to reload.' )
+        return trans.show_ok_message( 'Reloaded %i requested display applications ("%s").' % ( len( reloaded ), '", "'.join( reloaded ) ) )
+
+    @web.expose
+    @web.require_admin
+    def recalculate_user_disk_usage( self, trans, **kwd ):
+        user_id = kwd.get( 'id', None )
+        user = trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( user_id ) )
+        if not user:
+            return trans.show_error_message( "User not found for id (%s)" % sanitize_text( str( user_id ) ) )
+        engine = None
+        if trans.app.config.database_connection:
+            engine = trans.app.config.database_connection.split(':')[0]
+        if engine not in ( 'postgres', 'postgresql' ):
+            done = False
+            while not done:
+                current = user.get_disk_usage()
+                new = user.calculate_disk_usage()
+                trans.sa_session.refresh( user )
+                # make sure usage didn't change while calculating, set done
+                if user.get_disk_usage() == current:
+                    done = True
+                if new not in (current, None):
+                    user.set_disk_usage( new )
+                    trans.sa_session.add( user )
+                    trans.sa_session.flush()
+        else:
+            # We can use the lightning fast pgcalc!
+            current = user.get_disk_usage()
+            new = pgcalc( self.sa_session, user.id )
+        # yes, still a small race condition between here and the flush
+        if new in ( current, None ):
+            message = 'Usage is unchanged at %s.' % nice_size( current )
+        else:
+            message = 'Usage has changed by %s to %s.' % ( nice_size( new - current ), nice_size( new )  )
+        return trans.response.send_redirect( web.url_for( controller='admin',
+                                                          action='users',
+                                                          message=sanitize_text( message ),
+                                                          status='info' ) )
diff --git a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
new file mode 100644
index 0000000..1c18fa1
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -0,0 +1,2121 @@
+import json
+import logging
+import os
+import shutil
+
+from six import string_types
+from sqlalchemy import or_
+
+import tool_shed.repository_types.util as rt_util
+from admin import AdminGalaxy
+from galaxy import util, web
+from galaxy.tools.deps import views
+from galaxy.web.form_builder import CheckboxField
+from tool_shed.galaxy_install import dependency_display
+from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
+from tool_shed.galaxy_install.grids import admin_toolshed_grids
+from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
+from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
+from tool_shed.galaxy_install.tools import data_manager
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import tool_version_manager
+from tool_shed.util import common_util
+from tool_shed.util import encoding_util
+from tool_shed.util import hg_util
+from tool_shed.util import readme_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
+from tool_shed.util import workflow_util
+from tool_shed.util.web_util import escape
+
+log = logging.getLogger( __name__ )
+
+
+class AdminToolshed( AdminGalaxy ):
+
+    installed_repository_grid = admin_toolshed_grids.InstalledRepositoryGrid()
+    repository_installation_grid = admin_toolshed_grids.RepositoryInstallationGrid()
+    tool_dependency_grid = admin_toolshed_grids.ToolDependencyGrid()
+
+    @web.expose
+    @web.require_admin
+    def activate_repository( self, trans, **kwd ):
+        """Activate a repository that was deactivated but not uninstalled."""
+        repository_id = kwd[ 'id' ]
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        try:
+            trans.app.installed_repository_manager.activate_repository( repository )
+        except Exception as e:
+            error_message = "Error activating repository %s: %s" % ( escape( repository.name ), str( e ) )
+            log.exception( error_message )
+            message = '%s.<br/>You may be able to resolve this by uninstalling and then reinstalling the repository.  Click <a href="%s">here</a> to uninstall the repository.' \
+                % ( error_message, web.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ) )
+            status = 'error'
+            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                              action='manage_repository',
+                                                              id=repository_id,
+                                                              message=message,
+                                                              status=status ) )
+        message = 'The <b>%s</b> repository has been activated.' % escape( repository.name )
+        status = 'done'
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='browse_repositories',
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def browse_repository( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+        return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako',
+                                    repository=repository,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def browse_repositories( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd.pop( 'operation' ).lower()
+            if operation == "manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='manage_repository',
+                                                                  **kwd ) )
+            if operation == "get updates":
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='check_for_updates',
+                                                                  **kwd ) )
+            if operation == "update tool shed status":
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='update_tool_shed_status_for_installed_repository',
+                                                                  **kwd ) )
+            if operation == "reset to install":
+                kwd[ 'reset_repository' ] = True
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='reset_to_install',
+                                                                  **kwd ) )
+            if operation == "purge":
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='purge_repository',
+                                                                  **kwd ) )
+            if operation == "activate or reinstall":
+                repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+                if repository.uninstalled:
+                    # Since we're reinstalling the repository we need to find the latest changeset revision to which it can
+                    # be updated so that we can reset the metadata if necessary.  This will ensure that information about
+                    # repository dependencies and tool dependencies will be current.  Only allow selecting a different section
+                    # in the tool panel if the repository was uninstalled and it contained tools that should be displayed in
+                    # the tool panel.
+                    changeset_revision_dict = \
+                        trans.app.update_repository_manager.get_update_to_changeset_revision_and_ctx_rev( repository )
+                    current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
+                    current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
+                    if current_changeset_revision and current_ctx_rev:
+                        if current_ctx_rev == repository.ctx_rev:
+                            # The uninstalled repository is current.
+                            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                              action='reselect_tool_panel_section',
+                                                                              **kwd ) )
+                        else:
+                            # The uninstalled repository has updates available in the tool shed.
+                            updated_repo_info_dict = \
+                                self.get_updated_repository_information( trans=trans,
+                                                                         repository_id=trans.security.encode_id( repository.id ),
+                                                                         repository_name=repository.name,
+                                                                         repository_owner=repository.owner,
+                                                                         changeset_revision=current_changeset_revision )
+                            json_repo_info_dict = json.dumps( updated_repo_info_dict )
+                            encoded_repo_info_dict = encoding_util.tool_shed_encode( json_repo_info_dict )
+                            kwd[ 'latest_changeset_revision' ] = current_changeset_revision
+                            kwd[ 'latest_ctx_rev' ] = current_ctx_rev
+                            kwd[ 'updated_repo_info_dict' ] = encoded_repo_info_dict
+                            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                              action='reselect_tool_panel_section',
+                                                                              **kwd ) )
+                    else:
+                        message = "Unable to get latest revision for repository <b>%s</b> from " % escape( str( repository.name ) )
+                        message += "the Tool Shed, so repository re-installation is not possible at this time."
+                        status = "error"
+                        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                          action='browse_repositories',
+                                                                          message=message,
+                                                                          status=status ) )
+                else:
+                    return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                      action='activate_repository',
+                                                                      **kwd ) )
+            if operation == "deactivate or uninstall":
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='deactivate_or_uninstall_repository',
+                                                                  **kwd ) )
+            if operation == "install latest revision":
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='install_latest_repository_revision',
+                                                                  **kwd ) )
+            if operation == 'install':
+                # The user is attempting to install a white ghost.
+                kwd[ 'status' ] = 'error'
+                kwd[ 'message' ] = 'It seems you are attempting to install a "white ghost", which should instead be purged.'
+        return self.installed_repository_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def browse_tool_dependency( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
+        tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_ids[ 0 ] )
+        if tool_dependency.in_error_state:
+            message = "This tool dependency is not installed correctly (see the <b>Tool dependency installation error</b> below).  "
+            message += "Choose <b>Uninstall this tool dependency</b> from the <b>Repository Actions</b> menu, correct problems "
+            message += "if necessary, and try installing the dependency again."
+            status = "error"
+        repository = tool_dependency.tool_shed_repository
+        return trans.fill_template( '/admin/tool_shed_repository/browse_tool_dependency.mako',
+                                    repository=repository,
+                                    tool_dependency=tool_dependency,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def browse_tool_shed( self, trans, **kwd ):
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        params = dict( galaxy_url=web.url_for( '/', qualified=True ) )
+        url = util.build_url( tool_shed_url, pathspec=[ 'repository', 'browse_valid_categories' ], params=params )
+        return trans.response.send_redirect( url )
+
+    @web.expose
+    @web.require_admin
+    def browse_toolsheds( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        return trans.fill_template( '/admin/tool_shed_repository/browse_toolsheds.mako',
+                                    message=message,
+                                    status='error' )
+
+    @web.expose
+    @web.require_admin
+    def browse_tool_sheds( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        return trans.fill_template( '/webapps/galaxy/admin/tool_sheds.mako',
+                                    message=message,
+                                    status='error' )
+
+    @web.expose
+    @web.require_admin
+    def check_for_updates( self, trans, **kwd ):
+        """Send a request to the relevant tool shed to see if there are any updates."""
+        repository_id = kwd.get( 'id', None )
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
+        params = dict( galaxy_url=web.url_for( '/', qualified=True ),
+                       name=str( repository.name ),
+                       owner=str( repository.owner ),
+                       changeset_revision=str( repository.changeset_revision ) )
+        pathspec = [ 'repository', 'check_for_updates' ]
+        url = util.build_url( tool_shed_url, pathspec=pathspec, params=params )
+        return trans.response.send_redirect( url )
+
+    @web.expose
+    @web.require_admin
+    def deactivate_or_uninstall_repository( self, trans, **kwd ):
+        """
+        Handle all changes when a tool shed repository is being deactivated or uninstalled.  Notice
+        that if the repository contents include a file named tool_data_table_conf.xml.sample, its
+        entries are not removed from the defined config.shed_tool_data_table_config.  This is because
+        it becomes a bit complex to determine if other installed repositories include tools that
+        require the same entry.  For now we'll never delete entries from config.shed_tool_data_table_config,
+        but we may choose to do so in the future if it becomes necessary.
+        """
+        message = escape( kwd.get( 'message', '' ) )
+        statuses = [ None, '' 'info', 'done', 'warning', 'error' ]
+        status = kwd.get( 'status', 'done' )
+        if status in statuses:
+            status = statuses.index( status )
+        else:
+            status = 1
+        remove_from_disk = kwd.get( 'remove_from_disk', '' )
+        remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
+        tool_shed_repositories = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+        if not isinstance( tool_shed_repositories, list ):
+            tool_shed_repositories = [tool_shed_repositories]
+        for tool_shed_repository in tool_shed_repositories:
+            shed_tool_conf, tool_path, relative_install_dir = \
+                suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+            if relative_install_dir:
+                if tool_path:
+                    relative_install_dir = os.path.join( tool_path, relative_install_dir )
+                repository_install_dir = os.path.abspath( relative_install_dir )
+            else:
+                repository_install_dir = None
+            errors = ''
+            if kwd.get( 'deactivate_or_uninstall_repository_button', False ):
+                if tool_shed_repository.includes_tools_for_display_in_tool_panel:
+                    # Handle tool panel alterations.
+                    tpm = tool_panel_manager.ToolPanelManager( trans.app )
+                    tpm.remove_repository_contents( tool_shed_repository,
+                                                    shed_tool_conf,
+                                                    uninstall=remove_from_disk_checked )
+                if tool_shed_repository.includes_data_managers:
+                    dmh = data_manager.DataManagerHandler( trans.app )
+                    dmh.remove_from_data_manager( tool_shed_repository )
+                if tool_shed_repository.includes_datatypes:
+                    # Deactivate proprietary datatypes.
+                    cdl = custom_datatype_manager.CustomDatatypeLoader( trans.app )
+                    installed_repository_dict = cdl.load_installed_datatypes( tool_shed_repository,
+                                                                              repository_install_dir,
+                                                                              deactivate=True )
+                    if installed_repository_dict:
+                        converter_path = installed_repository_dict.get( 'converter_path' )
+                        if converter_path is not None:
+                            cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=True )
+                        display_path = installed_repository_dict.get( 'display_path' )
+                        if display_path is not None:
+                            cdl.load_installed_display_applications( installed_repository_dict, deactivate=True )
+                if remove_from_disk_checked:
+                    try:
+                        # Remove the repository from disk.
+                        shutil.rmtree( repository_install_dir )
+                        log.debug( "Removed repository installation directory: %s" % str( repository_install_dir ) )
+                        removed = True
+                    except Exception as e:
+                        log.debug( "Error removing repository installation directory %s: %s" % ( str( repository_install_dir ), str( e ) ) )
+                        if isinstance( e, OSError ) and not os.path.exists( repository_install_dir ):
+                            removed = True
+                            log.debug( "Repository directory does not exist on disk, marking as uninstalled." )
+                        else:
+                            removed = False
+                    if removed:
+                        tool_shed_repository.uninstalled = True
+                        # Remove all installed tool dependencies and tool dependencies stuck in the INSTALLING state, but don't touch any
+                        # repository dependencies.
+                        tool_dependencies_to_uninstall = tool_shed_repository.tool_dependencies_installed_or_in_error
+                        tool_dependencies_to_uninstall.extend( tool_shed_repository.tool_dependencies_being_installed )
+                        for tool_dependency in tool_dependencies_to_uninstall:
+                            uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency )
+                            if error_message:
+                                errors = '%s  %s' % ( errors, error_message )
+                tool_shed_repository.deleted = True
+                if remove_from_disk_checked:
+                    tool_shed_repository.status = trans.install_model.ToolShedRepository.installation_status.UNINSTALLED
+                    tool_shed_repository.error_message = None
+                    if trans.app.config.manage_dependency_relationships:
+                        # Remove the uninstalled repository and any tool dependencies from the in-memory dictionaries in the
+                        # installed_repository_manager.
+                        trans.app.installed_repository_manager.handle_repository_uninstall( tool_shed_repository )
+                else:
+                    tool_shed_repository.status = trans.install_model.ToolShedRepository.installation_status.DEACTIVATED
+                trans.install_model.context.add( tool_shed_repository )
+                trans.install_model.context.flush()
+                if remove_from_disk_checked:
+                    message += 'The repository named <b>%s</b> has been uninstalled.  ' % escape( tool_shed_repository.name )
+                    if errors:
+                        message += 'Attempting to uninstall tool dependencies resulted in errors: %s' % errors
+                        status = max( status, statuses.index( 'error' ) )
+                    else:
+                        status = max( status, statuses.index( 'done' ) )
+                else:
+                    message = 'The repository named <b>%s</b> has been deactivated.  ' % escape( tool_shed_repository.name )
+                    status = max( status, statuses.index( 'done' ) )
+        status = statuses[ status ]
+        if kwd.get( 'deactivate_or_uninstall_repository_button', False ):
+            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                              action='browse_repositories',
+                                                              message=message,
+                                                              status=status ) )
+        remove_from_disk_check_box = CheckboxField( 'remove_from_disk', checked=remove_from_disk_checked )
+        return trans.fill_template( '/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako',
+                                    repository=tool_shed_repositories,
+                                    remove_from_disk_check_box=remove_from_disk_check_box,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def display_image_in_repository( self, trans, **kwd ):
+        """
+        Open an image file that is contained in an installed tool shed repository or that is referenced by a URL for display.  The
+        image can be defined in either a README.rst file contained in the repository or the help section of a Galaxy tool config that
+        is contained in the repository.  The following image definitions are all supported.  The former $PATH_TO_IMAGES is no longer
+        required, and is now ignored.
+        .. image:: https://raw.github.com/galaxy/some_image.png
+        .. image:: $PATH_TO_IMAGES/some_image.png
+        .. image:: /static/images/some_image.gif
+        .. image:: some_image.jpg
+        .. image:: /deep/some_image.png
+        """
+        repository_id = kwd.get( 'repository_id', None )
+        relative_path_to_image_file = kwd.get( 'image_file', None )
+        if repository_id and relative_path_to_image_file:
+            repository = repository_util.get_tool_shed_repository_by_id( trans.app, repository_id )
+            if repository:
+                repo_files_dir = repository.repo_files_directory( trans.app )
+                # The following line sometimes returns None.  TODO: Figure out why.
+                path_to_file = repository_util.get_absolute_path_to_file_in_repository( repo_files_dir, relative_path_to_image_file )
+                if path_to_file and os.path.exists( path_to_file ):
+                    file_name = os.path.basename( relative_path_to_image_file )
+                    try:
+                        extension = file_name.split( '.' )[ -1 ]
+                    except Exception:
+                        extension = None
+                    if extension:
+                        mimetype = trans.app.datatypes_registry.get_mimetype_by_extension( extension )
+                        if mimetype:
+                            trans.response.set_content_type( mimetype )
+                    return open( path_to_file, 'r' )
+        return None
+
+    @web.expose
+    @web.require_admin
+    def find_tools_in_tool_shed( self, trans, **kwd ):
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        params = dict( galaxy_url=web.url_for( '/', qualified=True ) )
+        url = util.build_url( tool_shed_url, pathspec=[ 'repository', 'find_tools' ], params=params )
+        return trans.response.send_redirect( url )
+
+    @web.expose
+    @web.require_admin
+    def find_workflows_in_tool_shed( self, trans, **kwd ):
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        params = dict( galaxy_url=web.url_for( '/', qualified=True ) )
+        url = util.build_url( tool_shed_url, pathspec=[ 'repository', 'find_workflows' ], params=params )
+        return trans.response.send_redirect( url )
+
+    @web.expose
+    @web.require_admin
+    def generate_workflow_image( self, trans, workflow_name, repository_id=None ):
+        """Return an svg image representation of a workflow dictionary created when the workflow was exported."""
+        return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=repository_id )
+
+    @web.json
+    @web.require_admin
+    def get_file_contents( self, trans, file_path, repository_id ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        return suc.get_repository_file_contents( trans.app, file_path, repository_id, is_admin=True )
+
+    @web.expose
+    @web.require_admin
+    def get_tool_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+        """
+        Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for
+        the received repository name, owner and changeset revision.  The received repository_id is the encoded id
+        of the installed tool shed repository in Galaxy.  We need it so that we can derive the tool shed from which
+        it was installed.
+        """
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
+        if tool_shed_url is None or repository_name is None or repository_owner is None or changeset_revision is None:
+            message = "Unable to retrieve tool dependencies from the Tool Shed because one or more of the following required "
+            message += "parameters is None: tool_shed_url: %s, repository_name: %s, repository_owner: %s, changeset_revision: %s " % \
+                ( str( tool_shed_url ), str( repository_name ), str( repository_owner ), str( changeset_revision ) )
+            raise Exception( message )
+        params = dict( name=repository_name, owner=repository_owner, changeset_revision=changeset_revision )
+        pathspec = [ 'repository', 'get_tool_dependencies' ]
+        raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        if len( raw_text ) > 2:
+            encoded_text = json.loads( raw_text )
+            text = encoding_util.tool_shed_decode( encoded_text )
+        else:
+            text = ''
+        return text
+
+    @web.expose
+    @web.require_admin
+    def get_updated_repository_information( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
+        """
+        Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall
+        an updated revision of an uninstalled tool shed repository.
+        """
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
+        if tool_shed_url is None or repository_name is None or repository_owner is None or changeset_revision is None:
+            message = "Unable to retrieve updated repository information from the Tool Shed because one or more of the following "
+            message += "required parameters is None: tool_shed_url: %s, repository_name: %s, repository_owner: %s, changeset_revision: %s " % \
+                ( str( tool_shed_url ), str( repository_name ), str( repository_owner ), str( changeset_revision ) )
+            raise Exception( message )
+        params = dict( name=str( repository_name ),
+                       owner=str( repository_owner ),
+                       changeset_revision=changeset_revision )
+        pathspec = [ 'repository', 'get_updated_repository_information' ]
+        raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        repo_information_dict = json.loads( raw_text )
+        return repo_information_dict
+
+    @web.expose
+    @web.require_admin
+    def import_workflow( self, trans, workflow_name, repository_id, **kwd ):
+        """Import a workflow contained in an installed tool shed repository into Galaxy."""
+        message = str( escape( kwd.get( 'message', '' ) ) )
+        status = kwd.get( 'status', 'done' )
+        if workflow_name:
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
+            repository = repository_util.get_tool_shed_repository_by_id( trans.app, repository_id )
+            if repository:
+                workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name )
+                if workflow:
+                    workflow_name = encoding_util.tool_shed_encode( str( workflow.name ) )
+                else:
+                    message += 'Unable to locate a workflow named <b>%s</b> within the installed tool shed repository named <b>%s</b>' % \
+                        ( escape( str( workflow_name ) ), escape( str( repository.name ) ) )
+                    status = 'error'
+            else:
+                message = 'Invalid repository id <b>%s</b> received.' % str( repository_id )
+                status = 'error'
+        else:
+            message = 'The value of workflow_name is required, but was not received.'
+            status = 'error'
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='view_workflow',
+                                                          workflow_name=workflow_name,
+                                                          repository_id=repository_id,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def initiate_tool_dependency_installation( self, trans, tool_dependencies, **kwd ):
+        """
+        Install specified dependencies for repository tools.  The received list of tool_dependencies
+        are the database records for those dependencies defined in the tool_dependencies.xml file
+        (contained in the repository) that should be installed.  This allows for filtering out dependencies
+        that have not been checked for installation on the 'Manage tool dependencies' page for an installed
+        tool shed repository.
+        """
+        # Get the tool_shed_repository from one of the tool_dependencies.
+        message = str( escape( kwd.get( 'message', '' ) ) )
+        status = kwd.get( 'status', 'done' )
+        err_msg = ''
+        tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
+        # Get the tool_dependencies.xml file from the repository.
+        tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME,
+                                                                 tool_shed_repository.repo_path( trans.app ) )
+        itdm = install_manager.InstallToolDependencyManager( trans.app )
+        installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository,
+                                                                                tool_dependencies_config=tool_dependencies_config,
+                                                                                tool_dependencies=tool_dependencies,
+                                                                                from_tool_migration_manager=False )
+        for installed_tool_dependency in installed_tool_dependencies:
+            if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR:
+                text = util.unicodify( installed_tool_dependency.error_message )
+                if text is not None:
+                    err_msg += '  %s' % text
+        if err_msg:
+            message += err_msg
+            status = 'error'
+        message += "Installed tool dependencies: %s" % ', '.join( td.name for td in installed_tool_dependencies )
+        td_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.tool_dependencies ]
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='manage_tool_dependencies',
+                                                          tool_dependency_ids=td_ids,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def install_latest_repository_revision( self, trans, **kwd ):
+        """Install the latest installable revision of a repository that has been previously installed."""
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        if repository_id is not None:
+            repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+            if repository is not None:
+                tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
+                name = str( repository.name )
+                owner = str( repository.owner )
+                params = dict( galaxy_url=web.url_for( '/', qualified=True ),
+                               name=name,
+                               owner=owner )
+                pathspec = [ 'repository', 'get_latest_downloadable_changeset_revision' ]
+                raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+                url = util.build_url( tool_shed_url, pathspec=pathspec, params=params )
+                latest_downloadable_revision = json.loads( raw_text )
+                if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH:
+                    message = 'Error retrieving the latest downloadable revision for this repository via the url <b>%s</b>.' % url
+                    status = 'error'
+                else:
+                    # Make sure the latest changeset_revision of the repository has not already been installed.
+                    # Updates to installed repository revisions may have occurred, so make sure to locate the
+                    # appropriate repository revision if one exists.  We need to create a temporary repo_info_tuple
+                    # with the following entries to handle this.
+                    # ( description, clone_url, changeset_revision, ctx_rev, owner, repository_dependencies, tool_dependencies )
+                    tmp_clone_url = util.build_url( tool_shed_url, pathspec=[ 'repos', owner, name ] )
+                    tmp_repo_info_tuple = ( None, tmp_clone_url, latest_downloadable_revision, None, owner, None, None )
+                    installed_repository, installed_changeset_revision = \
+                        repository_util.repository_was_previously_installed( trans.app, tool_shed_url, name, tmp_repo_info_tuple, from_tip=False )
+                    if installed_repository:
+                        current_changeset_revision = str( installed_repository.changeset_revision )
+                        message = 'Revision <b>%s</b> of repository <b>%s</b> owned by <b>%s</b> has already been installed.' % \
+                            ( latest_downloadable_revision, name, owner )
+                        if current_changeset_revision != latest_downloadable_revision:
+                            message += '  The current changeset revision is <b>%s</b>.' % current_changeset_revision
+                        status = 'error'
+                    else:
+                        # Install the latest downloadable revision of the repository.
+                        params = dict( name=name,
+                                       owner=owner,
+                                       changeset_revisions=str( latest_downloadable_revision ),
+                                       galaxy_url=web.url_for( '/', qualified=True ) )
+                        pathspec = [ 'repository', 'install_repositories_by_revision' ]
+                        url = util.build_url( tool_shed_url, pathspec=pathspec, params=params )
+                        return trans.response.send_redirect( url )
+            else:
+                message = 'Cannot locate installed tool shed repository with encoded id <b>%s</b>.' % str( repository_id )
+                status = 'error'
+        else:
+            message = 'The request parameters did not include the required encoded <b>id</b> of installed repository.'
+            status = 'error'
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='browse_repositories',
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def install_tool_dependencies_with_update( self, trans, **kwd ):
+        """
+        Updating an installed tool shed repository where new tool dependencies but no new repository
+        dependencies are included in the updated revision.
+        """
+        updating_repository_id = kwd.get( 'updating_repository_id', None )
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, updating_repository_id )
+        # All received dependencies need to be installed - confirmed by the caller.
+        encoded_tool_dependencies_dict = kwd.get( 'encoded_tool_dependencies_dict', None )
+        if encoded_tool_dependencies_dict is not None:
+            tool_dependencies_dict = encoding_util.tool_shed_decode( encoded_tool_dependencies_dict )
+        else:
+            tool_dependencies_dict = {}
+        encoded_relative_install_dir = kwd.get( 'encoded_relative_install_dir', None )
+        if encoded_relative_install_dir is not None:
+            relative_install_dir = encoding_util.tool_shed_decode( encoded_relative_install_dir )
+        else:
+            relative_install_dir = ''
+        updating_to_changeset_revision = kwd.get( 'updating_to_changeset_revision', None )
+        updating_to_ctx_rev = kwd.get( 'updating_to_ctx_rev', None )
+        encoded_updated_metadata = kwd.get( 'encoded_updated_metadata', None )
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        if 'install_tool_dependencies_with_update_button' in kwd:
+            # Now that the user has chosen whether to install tool dependencies or not, we can
+            # update the repository record with the changes in the updated revision.
+            if encoded_updated_metadata:
+                updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata )
+            else:
+                updated_metadata = None
+            repository = trans.app.update_repository_manager.update_repository_record( repository=repository,
+                                                                                       updated_metadata_dict=updated_metadata,
+                                                                                       updated_changeset_revision=updating_to_changeset_revision,
+                                                                                       updated_ctx_rev=updating_to_ctx_rev )
+            if tool_dependencies_dict:
+                tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
+                                                                                         repository,
+                                                                                         relative_install_dir,
+                                                                                         set_status=False )
+                message = "The installed repository named '%s' has been updated to change set revision '%s'.  " % \
+                    ( escape( str( repository.name ) ), updating_to_changeset_revision )
+                self.initiate_tool_dependency_installation( trans, tool_dependencies, message=message, status=status )
+        # Handle tool dependencies check box.
+        if trans.app.config.tool_dependency_dir is None:
+            if tool_dependencies_dict:
+                message = ("Tool dependencies defined in this repository can be automatically installed if you set "
+                           "the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file "
+                           "(galaxy.ini) and restart your Galaxy server.")
+                status = "warning"
+            install_tool_dependencies_check_box_checked = False
+        else:
+            install_tool_dependencies_check_box_checked = True
+        install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies',
+                                                             checked=install_tool_dependencies_check_box_checked )
+        view = views.DependencyResolversView(self.app)
+        if view.installable_resolvers:
+            install_resolver_dependencies_check_box = CheckboxField( 'install_resolver_dependencies', checked=True )
+        else:
+            install_resolver_dependencies_check_box = None
+        return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies_with_update.mako',
+                                    repository=repository,
+                                    updating_repository_id=updating_repository_id,
+                                    updating_to_ctx_rev=updating_to_ctx_rev,
+                                    updating_to_changeset_revision=updating_to_changeset_revision,
+                                    encoded_updated_metadata=encoded_updated_metadata,
+                                    encoded_relative_install_dir=encoded_relative_install_dir,
+                                    encoded_tool_dependencies_dict=encoded_tool_dependencies_dict,
+                                    install_resolver_dependencies_check_box=install_resolver_dependencies_check_box,
+                                    install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+                                    tool_dependencies_dict=tool_dependencies_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_repositories( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        api_installation = util.asbool( kwd.get( 'api', 'false' ) )
+        if api_installation:
+            tsr_ids = json.loads( kwd.get( 'tool_shed_repository_ids', '[]' ) )
+            kwd[ 'tool_shed_repository_ids' ] = tsr_ids
+            tsridslist = common_util.get_tool_shed_repository_ids( **kwd )
+        else:
+            tsridslist = common_util.get_tool_shed_repository_ids( **kwd )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if not tsridslist:
+                message = 'Select at least 1 tool shed repository to %s.' % operation
+                kwd[ 'message' ] = message
+                kwd[ 'status' ] = 'error'
+                del kwd[ 'operation' ]
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='manage_repositories',
+                                                                  **kwd ) )
+            if operation == 'browse':
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='browse_repository',
+                                                                  **kwd ) )
+            elif operation == 'uninstall':
+                # TODO: I believe this block should be removed, but make sure..
+                repositories_for_uninstallation = []
+                for repository_id in tsridslist:
+                    repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+                                                            .get( trans.security.decode_id( repository_id ) )
+                    if repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED,
+                                              trans.install_model.ToolShedRepository.installation_status.ERROR ]:
+                        repositories_for_uninstallation.append( repository )
+                if repositories_for_uninstallation:
+                    return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                      action='uninstall_repositories',
+                                                                      **kwd ) )
+                else:
+                    kwd[ 'message' ] = 'All selected tool shed repositories are already uninstalled.'
+                    kwd[ 'status' ] = 'error'
+            elif operation == "install":
+                irm = install_manager.InstallRepositoryManager( trans.app )
+                reinstalling = util.string_as_bool( kwd.get( 'reinstalling', False ) )
+                encoded_kwd = kwd[ 'encoded_kwd' ]
+                decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd )
+                install_resolver_dependencies = CheckboxField.is_checked( decoded_kwd.get( 'install_resolver_dependencies', '' ) )
+                install_tool_dependencies = CheckboxField.is_checked( decoded_kwd.get( 'install_tool_dependencies', '' ) )
+                tsr_ids = decoded_kwd[ 'tool_shed_repository_ids' ]
+                decoded_kwd['install_resolver_dependencies'] = install_resolver_dependencies
+                decoded_kwd['install_tool_dependencies'] = install_tool_dependencies
+                try:
+                    tool_shed_repositories = irm.install_repositories(
+                        tsr_ids=tsr_ids,
+                        decoded_kwd=decoded_kwd,
+                        reinstalling=reinstalling,
+                    )
+                    tsr_ids_for_monitoring = [ trans.security.encode_id( tsr.id ) for tsr in tool_shed_repositories ]
+                    if api_installation:
+                        return json.dumps( tsr_ids_for_monitoring )
+                    else:
+                        trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                   action='monitor_repository_installation',
+                                                                   tool_shed_repository_ids=tsr_ids_for_monitoring ) )
+                except install_manager.RepositoriesInstalledException as e:
+                    kwd[ 'message' ] = e.message
+                    kwd[ 'status' ] = 'error'
+        return self.repository_installation_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def manage_repository( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        if repository_id is None:
+            return trans.show_error_message( 'Missing required encoded repository id.' )
+        operation = kwd.get( 'operation', None )
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        if repository is None:
+            return trans.show_error_message( 'Invalid repository specified.' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
+        name = str( repository.name )
+        owner = str( repository.owner )
+        installed_changeset_revision = str( repository.installed_changeset_revision )
+        if repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING ]:
+            tool_shed_repository_ids = [ repository_id ]
+            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                              action='monitor_repository_installation',
+                                                              tool_shed_repository_ids=tool_shed_repository_ids ) )
+        if repository.can_install and operation == 'install':
+            # Send a request to the tool shed to install the repository.
+            params = dict( name=name,
+                           owner=owner,
+                           changeset_revisions=installed_changeset_revision,
+                           galaxy_url=web.url_for( '/', qualified=True ) )
+            pathspec = [ 'repository', 'install_repositories_by_revision' ]
+            url = util.build_url( tool_shed_url, pathspec=pathspec, params=params )
+            return trans.response.send_redirect( url )
+        description = kwd.get( 'description', repository.description )
+        shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+        if relative_install_dir:
+            repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
+        else:
+            repo_files_dir = None
+        if repository.in_error_state:
+            message = "This repository is not installed correctly (see the <b>Repository installation error</b> below).  Choose "
+            message += "<b>Reset to install</b> from the <b>Repository Actions</b> menu, correct problems if necessary and try "
+            message += "installing the repository again."
+            status = "error"
+        elif repository.can_install:
+            message = "This repository is not installed.  You can install it by choosing  <b>Install</b> from the <b>Repository Actions</b> menu."
+            status = "error"
+        elif kwd.get( 'edit_repository_button', False ):
+            if description != repository.description:
+                repository.description = description
+                trans.install_model.context.add( repository )
+                trans.install_model.context.flush()
+            message = "The repository information has been updated."
+        dd = dependency_display.DependencyDisplayer( trans.app )
+        containers_dict = dd.populate_containers_dict_from_repository_metadata( tool_shed_url=tool_shed_url,
+                                                                                tool_path=tool_path,
+                                                                                repository=repository,
+                                                                                reinstalling=False,
+                                                                                required_repo_info_dicts=None )
+        view = views.DependencyResolversView(self.app)
+        requirements = suc.get_unique_requirements_from_repository(repository)
+        requirements_status = view.get_requirements_status(requirements, repository.installed_tool_dependencies)
+        return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
+                                    repository=repository,
+                                    description=description,
+                                    repo_files_dir=repo_files_dir,
+                                    containers_dict=containers_dict,
+                                    requirements_status=requirements_status,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_repository_tool_dependencies( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
+        if tool_dependency_ids:
+            # We need a tool_shed_repository, so get it from one of the tool_dependencies.
+            tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_ids[ 0 ] )
+            tool_shed_repository = tool_dependency.tool_shed_repository
+        else:
+            # The user must be on the manage_repository_tool_dependencies page and clicked the button to either install or uninstall a
+            # tool dependency, but they didn't check any of the available tool dependencies on which to perform the action.
+            repository_id = kwd.get( 'repository_id', None )
+            tool_shed_repository = repository_util.get_tool_shed_repository_by_id( trans.app, repository_id )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if not tool_dependency_ids:
+                message = 'Select at least 1 tool dependency to %s.' % operation
+                kwd[ 'message' ] = message
+                kwd[ 'status' ] = 'error'
+                del kwd[ 'operation' ]
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='manage_repository_tool_dependencies',
+                                                                  **kwd ) )
+            if operation == 'browse':
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='browse_tool_dependency',
+                                                                  **kwd ) )
+            elif operation == 'uninstall':
+                tool_dependencies_for_uninstallation = []
+                for tool_dependency_id in tool_dependency_ids:
+                    tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_id )
+                    if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLED,
+                                                   trans.install_model.ToolDependency.installation_status.ERROR ]:
+                        tool_dependencies_for_uninstallation.append( tool_dependency )
+                if tool_dependencies_for_uninstallation:
+                    return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                      action='uninstall_tool_dependencies',
+                                                                      **kwd ) )
+                else:
+                    message = 'No selected tool dependencies can be uninstalled, you may need to use the <b>Repair repository</b> feature.'
+                    status = 'error'
+            elif operation == "install":
+                if trans.app.config.tool_dependency_dir:
+                    tool_dependencies_for_installation = []
+                    for tool_dependency_id in tool_dependency_ids:
+                        tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_id )
+                        if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                                       trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                            tool_dependencies_for_installation.append( tool_dependency )
+                    if tool_dependencies_for_installation:
+                        self.initiate_tool_dependency_installation( trans,
+                                                                    tool_dependencies_for_installation,
+                                                                    message=message,
+                                                                    status=status )
+                    else:
+                        message = 'All selected tool dependencies are already installed.'
+                        status = 'error'
+                else:
+                        message = 'Set the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file (galaxy.ini) '
+                        message += ' and restart your Galaxy server to install tool dependencies.'
+                        status = 'error'
+        installed_tool_dependencies_select_field = \
+            tool_dependency_util.build_tool_dependencies_select_field( trans.app,
+                                                                       tool_shed_repository=tool_shed_repository,
+                                                                       name='inst_td_ids',
+                                                                       uninstalled_only=False )
+        uninstalled_tool_dependencies_select_field = \
+            tool_dependency_util.build_tool_dependencies_select_field( trans.app,
+                                                                       tool_shed_repository=tool_shed_repository,
+                                                                       name='uninstalled_tool_dependency_ids',
+                                                                       uninstalled_only=True )
+        return trans.fill_template( '/admin/tool_shed_repository/manage_repository_tool_dependencies.mako',
+                                    repository=tool_shed_repository,
+                                    installed_tool_dependencies_select_field=installed_tool_dependencies_select_field,
+                                    uninstalled_tool_dependencies_select_field=uninstalled_tool_dependencies_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_tool_dependencies( self, trans, **kwd ):
+        # This method is called when tool dependencies are being installed.  See the related manage_repository_tool_dependencies
+        # method for managing the tool dependencies for a specified installed tool shed repository.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
+        repository_id = kwd.get( 'repository_id', None )
+        if tool_dependency_ids:
+            # We need a tool_shed_repository, so get it from one of the tool_dependencies.
+            tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_ids[ 0 ] )
+            tool_shed_repository = tool_dependency.tool_shed_repository
+        else:
+            # The user must be on the manage_repository_tool_dependencies page and clicked the button to either install or uninstall a
+            # tool dependency, but they didn't check any of the available tool dependencies on which to perform the action.
+            tool_shed_repository = repository_util.get_tool_shed_repository_by_id( trans.app, repository_id )
+        self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % escape( tool_shed_repository.name )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if not tool_dependency_ids:
+                message = 'Select at least 1 tool dependency to %s.' % operation
+                kwd[ 'message' ] = message
+                kwd[ 'status' ] = 'error'
+                del kwd[ 'operation' ]
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='manage_tool_dependencies',
+                                                                  **kwd ) )
+            if operation == 'browse':
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='browse_tool_dependency',
+                                                                  **kwd ) )
+            elif operation == "install":
+                if trans.app.config.tool_dependency_dir:
+                    tool_dependencies_for_installation = []
+                    for tool_dependency_id in tool_dependency_ids:
+                        tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_id )
+                        if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                                       trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                            tool_dependencies_for_installation.append( tool_dependency )
+                    if tool_dependencies_for_installation:
+                        self.initiate_tool_dependency_installation( trans,
+                                                                    tool_dependencies_for_installation,
+                                                                    message=message,
+                                                                    status=status )
+                    else:
+                        kwd[ 'message' ] = 'All selected tool dependencies are already installed.'
+                        kwd[ 'status' ] = 'error'
+                else:
+                        message = 'Set the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file (galaxy.ini) '
+                        message += ' and restart your Galaxy server to install tool dependencies.'
+                        kwd[ 'message' ] = message
+                        kwd[ 'status' ] = 'error'
+        # Redirect if no tool dependencies are in the process of being installed.
+        if tool_shed_repository.tool_dependencies_being_installed:
+            return self.tool_dependency_grid( trans, **kwd )
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='manage_repository_tool_dependencies',
+                                                          tool_dependency_ids=tool_dependency_ids,
+                                                          repository_id=repository_id,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def monitor_repository_installation( self, trans, **kwd ):
+        tsridslist = common_util.get_tool_shed_repository_ids( **kwd )
+        if not tsridslist:
+            tsridslist = repository_util.get_ids_of_tool_shed_repositories_being_installed( trans.app, as_string=False )
+        kwd[ 'tool_shed_repository_ids' ] = tsridslist
+        return self.repository_installation_grid( trans, **kwd )
+
+    @web.json
+    @web.require_admin
+    def open_folder( self, trans, folder_path, repository_id ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        return suc.open_repository_files_folder( trans.app, folder_path, repository_id, is_admin=True )
+
+    @web.expose
+    @web.require_admin
+    def prepare_for_install( self, trans, **kwd ):
+        if not suc.have_shed_tool_conf_for_install( trans.app ):
+            message = 'The <b>tool_config_file</b> setting in <b>galaxy.ini</b> must include at least one '
+            message += 'shed tool configuration file name with a <b><toolbox></b> tag that includes a <b>tool_path</b> '
+            message += 'attribute value which is a directory relative to the Galaxy installation directory in order '
+            message += 'to automatically install tools from a Galaxy Tool Shed (e.g., the file name <b>shed_tool_conf.xml</b> '
+            message += 'whose <b><toolbox></b> tag is <b><toolbox tool_path="../shed_tools"></b>).<p/>See the '
+            message += '<a href="https://wiki.galaxyproject.org/InstallingRepositoriesToGalaxy" target="_blank">Installation '
+            message += 'of Galaxy Tool Shed repository tools into a local Galaxy instance</a> section of the Galaxy Tool '
+            message += 'Shed wiki for all of the details.'
+            return trans.show_error_message( message )
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        shed_tool_conf = kwd.get( 'shed_tool_conf', None )
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        # Handle repository dependencies, which do not include those that are required only for compiling a dependent
+        # repository's tool dependencies.
+        has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) )
+        install_repository_dependencies = kwd.get( 'install_repository_dependencies', '' )
+        # Every repository will be installed into the same tool panel section or all will be installed outside of any sections.
+        new_tool_panel_section_label = kwd.get( 'new_tool_panel_section_label', '' )
+        tool_panel_section_id = kwd.get( 'tool_panel_section_id', '' )
+        tool_panel_section_keys = []
+        # One or more repositories may include tools, but not necessarily all of them.
+        includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
+        # Some tools should not be displayed in the tool panel (e.g., DataManager tools and datatype converters).
+        includes_tools_for_display_in_tool_panel = util.string_as_bool( kwd.get( 'includes_tools_for_display_in_tool_panel', False ) )
+        includes_tool_dependencies = util.string_as_bool( kwd.get( 'includes_tool_dependencies', False ) )
+        install_resolver_dependencies = kwd.get( 'install_resolver_dependencies', '' )
+        install_tool_dependencies = kwd.get( 'install_tool_dependencies', '' )
+        # In addition to installing new repositories, this method is called when updating an installed repository
+        # to a new changeset_revision where the update includes newly defined repository dependencies.
+        updating = util.asbool( kwd.get( 'updating', False ) )
+        updating_repository_id = kwd.get( 'updating_repository_id', None )
+        updating_to_changeset_revision = kwd.get( 'updating_to_changeset_revision', None )
+        updating_to_ctx_rev = kwd.get( 'updating_to_ctx_rev', None )
+        encoded_updated_metadata = kwd.get( 'encoded_updated_metadata', None )
+        encoded_repo_info_dicts = kwd.get( 'encoded_repo_info_dicts', '' )
+        if encoded_repo_info_dicts:
+            encoded_repo_info_dicts = encoded_repo_info_dicts.split( encoding_util.encoding_sep )
+        if not encoded_repo_info_dicts:
+            # The request originated in the tool shed via a tool search or from this controller's
+            # update_to_changeset_revision() method.
+            repository_ids = kwd.get( 'repository_ids', None )
+            if updating:
+                # We have updated an installed repository where the updates included newly defined repository
+                # and possibly tool dependencies.  We will have arrived here only if the updates include newly
+                # defined repository dependencies.  We're preparing to allow the user to elect to install these
+                # dependencies.  At this point, the repository has been updated to the latest changeset revision,
+                # but the received repository id is from the Galaxy side (the caller is this controller's
+                # update_to_changeset_revision() method.  We need to get the id of the same repository from the
+                # Tool Shed side.
+                repository = repository_util.get_tool_shed_repository_by_id( trans.app, updating_repository_id )
+                # For backward compatibility to the 12/20/12 Galaxy release.
+                try:
+                    params = dict( name=str( repository.name ), owner=str( repository.owner ) )
+                    pathspec = [ 'repository', 'get_repository_id' ]
+                    repository_ids = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+                except Exception as e:
+                    # The Tool Shed cannot handle the get_repository_id request, so the code must be older than the
+                    # 04/2014 Galaxy release when it was introduced.  It will be safest to error out and let the
+                    # Tool Shed admin update the Tool Shed to a later release.
+                    message = 'The updates available for the repository <b>%s</b> ' % escape( str( repository.name ) )
+                    message += 'include newly defined repository or tool dependency definitions, and attempting '
+                    message += 'to update the repository resulted in the following error.  Contact the Tool Shed '
+                    message += 'administrator if necessary.<br/>%s' % str( e )
+                    status = 'error'
+                    return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                      action='browse_repositories',
+                                                                      message=message,
+                                                                      status=status ) )
+                changeset_revisions = updating_to_changeset_revision
+            else:
+                changeset_revisions = kwd.get( 'changeset_revisions', None )
+            # Get the information necessary to install each repository.
+            params = dict( repository_ids=str( repository_ids ), changeset_revisions=str( changeset_revisions ) )
+            pathspec = [ 'repository', 'get_repository_information' ]
+            raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+            repo_information_dict = json.loads( raw_text )
+            for encoded_repo_info_dict in repo_information_dict.get( 'repo_info_dicts', [] ):
+                decoded_repo_info_dict = encoding_util.tool_shed_decode( encoded_repo_info_dict )
+                if not includes_tools:
+                    includes_tools = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools', False ) )
+                if not includes_tools_for_display_in_tool_panel:
+                    includes_tools_for_display_in_tool_panel = \
+                        util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
+                if not has_repository_dependencies:
+                    has_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'has_repository_dependencies', False ) )
+                if not includes_tool_dependencies:
+                    includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
+            encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) )
+        repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
+        dd = dependency_display.DependencyDisplayer( trans.app )
+        install_repository_manager = install_manager.InstallRepositoryManager( trans.app )
+        if ( ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or
+             ( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ) ):
+            if updating:
+                repository = repository_util.get_tool_shed_repository_by_id( trans.app, updating_repository_id )
+                decoded_updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata )
+                # Now that the user has decided whether they will handle dependencies, we can update
+                # the repository to the latest revision.
+                repository = trans.app.update_repository_manager.update_repository_record( repository=repository,
+                                                                                           updated_metadata_dict=decoded_updated_metadata,
+                                                                                           updated_changeset_revision=updating_to_changeset_revision,
+                                                                                           updated_ctx_rev=updating_to_ctx_rev )
+            install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
+            if includes_tool_dependencies:
+                install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
+            else:
+                install_tool_dependencies = False
+            install_resolver_dependencies = CheckboxField.is_checked( install_resolver_dependencies )
+            tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans.app, shed_tool_conf )
+            installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
+                                      new_tool_panel_section_label=new_tool_panel_section_label,
+                                      no_changes_checked=False,
+                                      repo_info_dicts=repo_info_dicts,
+                                      tool_panel_section_id=tool_panel_section_id,
+                                      tool_path=tool_path,
+                                      tool_shed_url=tool_shed_url )
+            created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
+                install_repository_manager.handle_tool_shed_repositories( installation_dict )
+            if created_or_updated_tool_shed_repositories:
+                installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories,
+                                          filtered_repo_info_dicts=filtered_repo_info_dicts,
+                                          has_repository_dependencies=has_repository_dependencies,
+                                          includes_tool_dependencies=includes_tool_dependencies,
+                                          includes_tools=includes_tools,
+                                          includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                                          install_repository_dependencies=install_repository_dependencies,
+                                          install_resolver_dependencies=install_resolver_dependencies,
+                                          install_tool_dependencies=install_tool_dependencies,
+                                          message=message,
+                                          new_tool_panel_section_label=new_tool_panel_section_label,
+                                          shed_tool_conf=shed_tool_conf,
+                                          status=status,
+                                          tool_panel_section_id=tool_panel_section_id,
+                                          tool_panel_section_keys=tool_panel_section_keys,
+                                          tool_path=tool_path,
+                                          tool_shed_url=tool_shed_url )
+
+                encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
+                    install_repository_manager.initiate_repository_installation( installation_dict )
+                return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
+                                            encoded_kwd=encoded_kwd,
+                                            query=query,
+                                            tool_shed_repositories=tool_shed_repositories,
+                                            initiate_repository_installation_ids=encoded_repository_ids,
+                                            reinstalling=False )
+            else:
+                kwd[ 'message' ] = message
+                kwd[ 'status' ] = status
+                return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                  action='manage_repositories',
+                                                                  **kwd ) )
+        shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans.app )
+        tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans.app, shed_tool_conf )
+        tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans.app )
+        tool_requirements = suc.get_tool_shed_repo_requirements(app=trans.app,
+                                                                tool_shed_url=tool_shed_url,
+                                                                repo_info_dicts=repo_info_dicts)
+        view = views.DependencyResolversView(self.app)
+        requirements_status = view.get_requirements_status(tool_requirements)
+        if len( repo_info_dicts ) == 1:
+            # If we're installing or updating a single repository, see if it contains a readme or
+            # dependencies that we can display.
+            repo_info_dict = repo_info_dicts[ 0 ]
+            dependencies_for_repository_dict = \
+                trans.app.installed_repository_manager.get_dependencies_for_repository( tool_shed_url,
+                                                                                        repo_info_dict,
+                                                                                        includes_tool_dependencies,
+                                                                                        updating=updating )
+            if not has_repository_dependencies:
+                has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
+            if not includes_tool_dependencies:
+                includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False )
+            if not includes_tools:
+                includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
+            if not includes_tools_for_display_in_tool_panel:
+                includes_tools_for_display_in_tool_panel = \
+                    dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+            installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
+            installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
+            missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
+            missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None )
+            readme_files_dict = readme_util.get_readme_files_dict_for_display( trans.app, tool_shed_url, repo_info_dict )
+            # We're handling 1 of 3 scenarios here: (1) we're installing a tool shed repository for the first time, so we've
+            # retrieved the list of installed and missing repository dependencies from the database (2) we're handling the
+            # scenario where an error occurred during the installation process, so we have a tool_shed_repository record in
+            # the database with associated repository dependency records.  Since we have the repository dependencies in both
+            # of the above 2 cases, we'll merge the list of missing repository dependencies into the list of installed
+            # repository dependencies since each displayed repository dependency will display a status, whether installed or
+            # missing.  The 3rd scenario is where we're updating an installed repository and the updates include newly
+            # defined repository (and possibly tool) dependencies.  In this case, merging will result in newly defined
+            # dependencies to be lost.  We pass the updating parameter to make sure merging occurs only when appropriate.
+            containers_dict = \
+                dd.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url,
+                                                             tool_path=tool_path,
+                                                             readme_files_dict=readme_files_dict,
+                                                             installed_repository_dependencies=installed_repository_dependencies,
+                                                             missing_repository_dependencies=missing_repository_dependencies,
+                                                             installed_tool_dependencies=installed_tool_dependencies,
+                                                             missing_tool_dependencies=missing_tool_dependencies,
+                                                             updating=updating )
+        else:
+            # We're installing a list of repositories, each of which may have tool dependencies or repository dependencies.
+            containers_dicts = []
+            for repo_info_dict in repo_info_dicts:
+                dependencies_for_repository_dict = \
+                    trans.app.installed_repository_manager.get_dependencies_for_repository( tool_shed_url,
+                                                                                            repo_info_dict,
+                                                                                            includes_tool_dependencies,
+                                                                                            updating=updating )
+                if not has_repository_dependencies:
+                    has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
+                if not includes_tool_dependencies:
+                    includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False )
+                if not includes_tools:
+                    includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
+                if not includes_tools_for_display_in_tool_panel:
+                    includes_tools_for_display_in_tool_panel = \
+                        dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+                installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
+                installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
+                missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
+                missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None )
+                containers_dict = dd.populate_containers_dict_for_new_install(
+                    tool_shed_url=tool_shed_url,
+                    tool_path=tool_path,
+                    readme_files_dict=None,
+                    installed_repository_dependencies=installed_repository_dependencies,
+                    missing_repository_dependencies=missing_repository_dependencies,
+                    installed_tool_dependencies=installed_tool_dependencies,
+                    missing_tool_dependencies=missing_tool_dependencies,
+                    updating=updating
+                )
+                containers_dicts.append( containers_dict )
+            # Merge all containers into a single container.
+            containers_dict = dd.merge_containers_dicts_for_new_install( containers_dicts )
+        # Handle tool dependencies check box.
+        if trans.app.config.tool_dependency_dir is None:
+            if includes_tool_dependencies:
+                message = "Tool dependencies defined in this repository can be automatically installed if you set "
+                message += "the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file "
+                message += "(galaxy.ini) and restart your Galaxy server before installing the repository."
+                status = "warning"
+            install_tool_dependencies_check_box_checked = False
+        else:
+            install_tool_dependencies_check_box_checked = True
+        install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies',
+                                                             checked=install_tool_dependencies_check_box_checked )
+        # Handle repository dependencies check box.
+        install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
+        view = views.DependencyResolversView(self.app)
+        if view.installable_resolvers:
+            install_resolver_dependencies_check_box = CheckboxField( 'install_resolver_dependencies', checked=True )
+        else:
+            install_resolver_dependencies_check_box = None
+        encoded_repo_info_dicts = encoding_util.encoding_sep.join( encoded_repo_info_dicts )
+        tool_shed_url = kwd[ 'tool_shed_url' ]
+        if includes_tools_for_display_in_tool_panel:
+            return trans.fill_template( '/admin/tool_shed_repository/select_tool_panel_section.mako',
+                                        encoded_repo_info_dicts=encoded_repo_info_dicts,
+                                        updating=updating,
+                                        updating_repository_id=updating_repository_id,
+                                        updating_to_ctx_rev=updating_to_ctx_rev,
+                                        updating_to_changeset_revision=updating_to_changeset_revision,
+                                        encoded_updated_metadata=encoded_updated_metadata,
+                                        includes_tools=includes_tools,
+                                        includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                                        includes_tool_dependencies=includes_tool_dependencies,
+                                        install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+                                        install_resolver_dependencies_check_box=install_resolver_dependencies_check_box,
+                                        has_repository_dependencies=has_repository_dependencies,
+                                        install_repository_dependencies_check_box=install_repository_dependencies_check_box,
+                                        new_tool_panel_section_label=new_tool_panel_section_label,
+                                        containers_dict=containers_dict,
+                                        shed_tool_conf=shed_tool_conf,
+                                        shed_tool_conf_select_field=shed_tool_conf_select_field,
+                                        tool_panel_section_select_field=tool_panel_section_select_field,
+                                        tool_shed_url=tool_shed_url,
+                                        requirements_status=requirements_status,
+                                        message=message,
+                                        status=status )
+        else:
+            # If installing repositories that includes no tools and has no repository dependencies, display a page
+            # allowing the Galaxy administrator to select a shed-related tool panel configuration file whose tool_path
+            # setting will be the location the repositories will be installed.
+            return trans.fill_template( '/admin/tool_shed_repository/select_shed_tool_panel_config.mako',
+                                        encoded_repo_info_dicts=encoded_repo_info_dicts,
+                                        updating=updating,
+                                        updating_repository_id=updating_repository_id,
+                                        updating_to_ctx_rev=updating_to_ctx_rev,
+                                        updating_to_changeset_revision=updating_to_changeset_revision,
+                                        encoded_updated_metadata=encoded_updated_metadata,
+                                        includes_tools=includes_tools,
+                                        includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                                        includes_tool_dependencies=includes_tool_dependencies,
+                                        install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+                                        install_resolver_dependencies_check_box=install_resolver_dependencies_check_box,
+                                        has_repository_dependencies=has_repository_dependencies,
+                                        install_repository_dependencies_check_box=install_repository_dependencies_check_box,
+                                        new_tool_panel_section_label=new_tool_panel_section_label,
+                                        containers_dict=containers_dict,
+                                        shed_tool_conf=shed_tool_conf,
+                                        shed_tool_conf_select_field=shed_tool_conf_select_field,
+                                        tool_panel_section_select_field=tool_panel_section_select_field,
+                                        tool_shed_url=tool_shed_url,
+                                        tool_requirements=tool_requirements,
+                                        message=message,
+                                        status=status )
+
+    @web.expose
+    @web.require_admin
+    def purge_repository( self, trans, **kwd ):
+        """Purge a "white ghost" repository from the database."""
+        repository_id = kwd.get( 'id', None )
+        new_kwd = {}
+        if repository_id is not None:
+            repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+            if repository:
+                if repository.is_new:
+                    if kwd.get( 'purge_repository_button', False ):
+                        irm = trans.app.installed_repository_manager
+                        purge_status, purge_message = irm.purge_repository( repository )
+                        if purge_status == 'ok':
+                            new_kwd[ 'status' ] = "done"
+                        else:
+                            new_kwd[ 'status' ] = 'error'
+                        new_kwd[ 'message' ] = purge_message
+                    else:
+                        return trans.fill_template( 'admin/tool_shed_repository/purge_repository_confirmation.mako',
+                                                    repository=repository )
+                else:
+                    new_kwd[ 'status' ] = 'error'
+                    new_kwd[ 'message' ] = 'Repositories must have a <b>New</b> status in order to be purged.'
+            else:
+                new_kwd[ 'status' ] = 'error'
+                new_kwd[ 'message' ] = 'Cannot locate the database record for the repository with encoded id %s.' % str( repository_id )
+        else:
+            new_kwd[ 'status' ] = 'error'
+            new_kwd[ 'message' ] = 'Invalid repository id value "None" received for repository to be purged.'
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='browse_repositories',
+                                                          **new_kwd ) )
+
+    @web.expose
+    @web.require_admin
+    def reinstall_repository( self, trans, **kwd ):
+        """
+        Reinstall a tool shed repository that has been previously uninstalled, making sure to handle all repository
+        and tool dependencies of the repository.
+        """
+        rdim = repository_dependency_manager.RepositoryDependencyInstallManager( trans.app )
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd[ 'id' ]
+        tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        no_changes = kwd.get( 'no_changes', '' )
+        no_changes_checked = CheckboxField.is_checked( no_changes )
+        install_repository_dependencies = CheckboxField.is_checked( kwd.get( 'install_repository_dependencies', '' ) )
+        install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
+        install_resolver_dependencies = CheckboxField.is_checked( kwd.get( 'install_resolver_dependencies', '' ) )
+        shed_tool_conf, tool_path, relative_install_dir = \
+            suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+        repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_repository.tool_shed )
+        tool_section = None
+        tool_panel_section_id = kwd.get( 'tool_panel_section_id', '' )
+        new_tool_panel_section_label = kwd.get( 'new_tool_panel_section_label', '' )
+        tool_panel_section_key = None
+        tool_panel_section_keys = []
+        metadata = tool_shed_repository.metadata
+        # Keep track of tool dependencies defined for the current repository or those defined for any of
+        # its repository dependencies.
+        includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies
+        if tool_shed_repository.includes_tools_for_display_in_tool_panel:
+            tpm = tool_panel_manager.ToolPanelManager( trans.app )
+            # Handle the selected tool panel location for loading tools included in the tool shed repository.
+            tool_section, tool_panel_section_key = \
+                tpm.handle_tool_panel_selection( toolbox=trans.app.toolbox,
+                                                 metadata=metadata,
+                                                 no_changes_checked=no_changes_checked,
+                                                 tool_panel_section_id=tool_panel_section_id,
+                                                 new_tool_panel_section_label=new_tool_panel_section_label )
+            if tool_section is not None:
+                # Just in case the tool_section.id differs from tool_panel_section_id, which it shouldn't...
+                tool_panel_section_id = str( tool_section.id )
+        if tool_shed_repository.status == trans.install_model.ToolShedRepository.installation_status.UNINSTALLED:
+            repository_type = suc.get_repository_type_from_tool_shed(trans.app,
+                                                                     tool_shed_url,
+                                                                     tool_shed_repository.name,
+                                                                     tool_shed_repository.owner)
+            if repository_type == rt_util.TOOL_DEPENDENCY_DEFINITION:
+                # Repositories of type tool_dependency_definition must get the latest
+                # metadata from the Tool Shed since they have only a single installable
+                # revision.
+                raw_text = suc.get_tool_dependency_definition_metadata_from_tool_shed(trans.app,
+                                                                                      tool_shed_url,
+                                                                                      tool_shed_repository.name,
+                                                                                      tool_shed_repository.owner)
+                new_meta = json.loads(raw_text)
+                # Clean up old repository dependency and tool dependency relationships.
+                suc.clean_dependency_relationships(trans, new_meta, tool_shed_repository, tool_shed_url)
+            # The repository's status must be updated from 'Uninstalled' to 'New' when initiating reinstall
+            # so the repository_installation_updater will function.
+            tool_shed_repository = repository_util.create_or_update_tool_shed_repository( trans.app,
+                                                                                          tool_shed_repository.name,
+                                                                                          tool_shed_repository.description,
+                                                                                          tool_shed_repository.installed_changeset_revision,
+                                                                                          tool_shed_repository.ctx_rev,
+                                                                                          repository_clone_url,
+                                                                                          metadata,
+                                                                                          trans.install_model.ToolShedRepository.installation_status.NEW,
+                                                                                          tool_shed_repository.changeset_revision,
+                                                                                          tool_shed_repository.owner,
+                                                                                          tool_shed_repository.dist_to_shed )
+        ctx_rev = suc.get_ctx_rev( trans.app,
+                                   tool_shed_url,
+                                   tool_shed_repository.name,
+                                   tool_shed_repository.owner,
+                                   tool_shed_repository.installed_changeset_revision )
+        repo_info_dicts = []
+        repo_info_dict = kwd.get( 'repo_info_dict', None )
+        if repo_info_dict:
+            if isinstance( repo_info_dict, string_types ):
+                repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+        else:
+            # Entering this else block occurs only if the tool_shed_repository does not include any valid tools.
+            if install_repository_dependencies:
+                repository_dependencies = \
+                    rdim.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
+                                                                                         tool_shed_repository )
+            else:
+                repository_dependencies = None
+            if metadata:
+                tool_dependencies = metadata.get( 'tool_dependencies', None )
+            else:
+                tool_dependencies = None
+            repo_info_dict = repository_util.create_repo_info_dict( trans.app,
+                                                                    repository_clone_url=repository_clone_url,
+                                                                    changeset_revision=tool_shed_repository.changeset_revision,
+                                                                    ctx_rev=ctx_rev,
+                                                                    repository_owner=tool_shed_repository.owner,
+                                                                    repository_name=tool_shed_repository.name,
+                                                                    tool_dependencies=tool_dependencies,
+                                                                    repository_dependencies=repository_dependencies )
+        if repo_info_dict not in repo_info_dicts:
+            repo_info_dicts.append( repo_info_dict )
+        # Make sure all tool_shed_repository records exist.
+        created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
+            rdim.create_repository_dependency_objects( tool_path=tool_path,
+                                                       tool_shed_url=tool_shed_url,
+                                                       repo_info_dicts=repo_info_dicts,
+                                                       install_repository_dependencies=install_repository_dependencies,
+                                                       no_changes_checked=no_changes_checked,
+                                                       tool_panel_section_id=tool_panel_section_id )
+        # Default the selected tool panel location for loading tools included in each newly installed required
+        # tool shed repository to the location selected for the repository selected for re-installation.
+        for index, tps_key in enumerate( tool_panel_section_keys ):
+            if tps_key is None:
+                tool_panel_section_keys[ index ] = tool_panel_section_key
+        encoded_repository_ids = [ trans.security.encode_id( r.id ) for r in created_or_updated_tool_shed_repositories ]
+        new_kwd = dict( includes_tool_dependencies=includes_tool_dependencies,
+                        includes_tools=tool_shed_repository.includes_tools,
+                        includes_tools_for_display_in_tool_panel=tool_shed_repository.includes_tools_for_display_in_tool_panel,
+                        install_tool_dependencies=install_tool_dependencies,
+                        install_resolver_dependencies=install_resolver_dependencies,
+                        repo_info_dicts=filtered_repo_info_dicts,
+                        message=message,
+                        new_tool_panel_section_label=new_tool_panel_section_label,
+                        shed_tool_conf=shed_tool_conf,
+                        status=status,
+                        tool_panel_section_id=tool_panel_section_id,
+                        tool_path=tool_path,
+                        tool_panel_section_keys=tool_panel_section_keys,
+                        tool_shed_repository_ids=encoded_repository_ids,
+                        tool_shed_url=tool_shed_url )
+        encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
+        tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories  ]
+        tool_shed_repositories = []
+        for tsr_id in tsr_ids:
+            tsr = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( tsr_id )
+            tool_shed_repositories.append( tsr )
+        clause_list = []
+        for tsr_id in tsr_ids:
+            clause_list.append( trans.install_model.ToolShedRepository.table.c.id == tsr_id )
+        query = trans.install_model.context.current.query( trans.install_model.ToolShedRepository ) \
+                                           .filter( or_( *clause_list ) )
+        return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
+                                    encoded_kwd=encoded_kwd,
+                                    query=query,
+                                    tool_shed_repositories=tool_shed_repositories,
+                                    initiate_repository_installation_ids=encoded_repository_ids,
+                                    reinstalling=True )
+
+    @web.expose
+    @web.require_admin
+    def repair_repository( self, trans, **kwd ):
+        """
+        Inspect the repository dependency hierarchy for a specified repository and attempt to make sure they are all properly installed as well as
+        each repository's tool dependencies.
+        """
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        if not repository_id:
+            message = 'Invalid installed tool shed repository id %s received.' % str( repository_id )
+            status = 'error'
+            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                              action='browse_repositories',
+                                                              message=message,
+                                                              status=status ) )
+        tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        rrm = RepairRepositoryManager( trans.app )
+        if kwd.get( 'repair_repository_button', False ):
+            encoded_repair_dict = kwd.get( 'repair_dict', None )
+            if encoded_repair_dict:
+                repair_dict = encoding_util.tool_shed_decode( encoded_repair_dict )
+            else:
+                repair_dict = None
+            if not repair_dict:
+                repair_dict = rrm.get_repair_dict( tool_shed_repository )
+            ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] )
+            ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] )
+            if ordered_tsr_ids and ordered_repo_info_dicts:
+                repositories_for_repair = []
+                for tsr_id in ordered_tsr_ids:
+                    repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+                    repositories_for_repair.append( repository )
+                return self.repair_tool_shed_repositories( trans, rrm, repositories_for_repair, ordered_repo_info_dicts )
+        tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        repair_dict = rrm.get_repair_dict( tool_shed_repository )
+        encoded_repair_dict = encoding_util.tool_shed_encode( repair_dict )
+        ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] )
+        ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] )
+        return trans.fill_template( 'admin/tool_shed_repository/repair_repository.mako',
+                                    repository=tool_shed_repository,
+                                    encoded_repair_dict=encoded_repair_dict,
+                                    repair_dict=repair_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def repair_tool_shed_repositories( self, trans, repair_repository_manager, tool_shed_repositories, repo_info_dicts, **kwd  ):
+        """Repair specified tool shed repositories."""
+        # The received lists of tool_shed_repositories and repo_info_dicts are ordered.
+        for index, tool_shed_repository in enumerate( tool_shed_repositories ):
+            repo_info_dict = repo_info_dicts[ index ]
+            repair_repository_manager.repair_tool_shed_repository( tool_shed_repository,
+                                                                   encoding_util.tool_shed_encode( repo_info_dict ) )
+        tsr_ids_for_monitoring = [ trans.security.encode_id( tsr.id ) for tsr in tool_shed_repositories ]
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='monitor_repository_installation',
+                                                          tool_shed_repository_ids=tsr_ids_for_monitoring ) )
+
+    @web.json
+    def repository_installation_status_updates( self, trans, ids=None, status_list=None ):
+        # Avoid caching
+        trans.response.headers[ 'Pragma' ] = 'no-cache'
+        trans.response.headers[ 'Expires' ] = '0'
+        # Create new HTML for any ToolShedRepository records whose status that has changed.
+        rval = []
+        if ids is not None and status_list is not None:
+            ids = util.listify( ids )
+            status_list = util.listify( status_list )
+            for tup in zip( ids, status_list ):
+                id, status = tup
+                repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+                if repository.status != status:
+                    rval.append( dict( id=id,
+                                       status=repository.status,
+                                       html_status=unicode( trans.fill_template( "admin/tool_shed_repository/repository_installation_status.mako",
+                                                                                 repository=repository ),
+                                                            'utf-8' ) ) )
+        return rval
+
+    @web.expose
+    @web.require_admin
+    def reselect_tool_panel_section( self, trans, **kwd ):
+        """
+        Select or change the tool panel section to contain the tools included in the tool shed repository
+        being reinstalled.  If there are updates available for the repository in the tool shed, the
+        tool_dependencies and repository_dependencies associated with the updated changeset revision will
+        have been retrieved from the tool shed and passed in the received kwd.  In this case, the stored
+        tool shed repository metadata from the Galaxy database will not be used since it is outdated.
+        """
+        message = ''
+        status = 'done'
+        repository_id = kwd.get( 'id', None )
+        latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
+        latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
+        tool_shed_repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
+        metadata = tool_shed_repository.metadata
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( tool_shed_repository.tool_shed ) )
+        tool_path = tool_shed_repository.get_tool_relative_path( trans.app )[0]
+        if latest_changeset_revision and latest_ctx_rev:
+            # There are updates available in the tool shed for the repository, so use the received
+            # dependency information which was retrieved from the tool shed.
+            encoded_updated_repo_info_dict = kwd.get( 'updated_repo_info_dict', None )
+            updated_repo_info_dict = encoding_util.tool_shed_decode( encoded_updated_repo_info_dict )
+            readme_files_dict = updated_repo_info_dict.get( 'readme_files_dict', None )
+            includes_data_managers = updated_repo_info_dict.get( 'includes_data_managers', False )
+            includes_datatypes = updated_repo_info_dict.get( 'includes_datatypes', False )
+            includes_workflows = updated_repo_info_dict.get( 'includes_workflows', False )
+            includes_tool_dependencies = updated_repo_info_dict.get( 'includes_tool_dependencies', False )
+            repo_info_dict = updated_repo_info_dict[ 'repo_info_dict' ]
+        else:
+            # There are no updates available from the tool shed for the repository, so use its locally stored metadata.
+            includes_data_managers = False
+            includes_datatypes = False
+            includes_tool_dependencies = False
+            includes_workflows = False
+            readme_files_dict = None
+            tool_dependencies = None
+            if metadata:
+                if 'data_manager' in metadata:
+                    includes_data_managers = True
+                if 'datatypes' in metadata:
+                    includes_datatypes = True
+                if 'tool_dependencies' in metadata:
+                    includes_tool_dependencies = True
+                if 'workflows' in metadata:
+                    includes_workflows = True
+                # Since we're reinstalling, we need to send a request to the tool shed to get the README files.
+                params = dict( name=tool_shed_repository.name,
+                               owner=tool_shed_repository.owner,
+                               changeset_revision=tool_shed_repository.installed_changeset_revision )
+                pathspec = [ 'repository', 'get_readme_files' ]
+                raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+                readme_files_dict = json.loads( raw_text )
+                tool_dependencies = metadata.get( 'tool_dependencies', None )
+            rdim = repository_dependency_manager.RepositoryDependencyInstallManager( trans.app )
+            repository_dependencies = \
+                rdim.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
+                                                                                     tool_shed_repository )
+            repo_info_dict = repository_util.create_repo_info_dict( trans.app,
+                                                                    repository_clone_url=repository_clone_url,
+                                                                    changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                    ctx_rev=tool_shed_repository.ctx_rev,
+                                                                    repository_owner=tool_shed_repository.owner,
+                                                                    repository_name=tool_shed_repository.name,
+                                                                    tool_dependencies=tool_dependencies,
+                                                                    repository_dependencies=repository_dependencies )
+        irm = trans.app.installed_repository_manager
+        dependencies_for_repository_dict = irm.get_dependencies_for_repository( tool_shed_url,
+                                                                                repo_info_dict,
+                                                                                includes_tool_dependencies,
+                                                                                updating=True )
+        includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False )
+        includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
+        includes_tools_for_display_in_tool_panel = dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+        installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
+        installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
+        missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
+        missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None )
+        if installed_repository_dependencies or missing_repository_dependencies:
+            has_repository_dependencies = True
+        else:
+            has_repository_dependencies = False
+        if includes_tools_for_display_in_tool_panel:
+            # Get the location in the tool panel in which the tools were originally loaded.
+            if 'tool_panel_section' in metadata:
+                tool_panel_dict = metadata[ 'tool_panel_section' ]
+                if tool_panel_dict:
+                    if tool_util.panel_entry_per_tool( tool_panel_dict ):
+                        # The following forces everything to be loaded into 1 section (or no section) in the tool panel.
+                        tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+                        tool_section_dict = tool_section_dicts[ 0 ]
+                        original_section_name = tool_section_dict[ 'name' ]
+                    else:
+                        original_section_name = tool_panel_dict[ 'name' ]
+                else:
+                    original_section_name = ''
+            else:
+                original_section_name = ''
+            tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans.app )
+            no_changes_check_box = CheckboxField( 'no_changes', checked=True )
+            if original_section_name:
+                message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>.  " \
+                    % ( escape( tool_shed_repository.name ), original_section_name )
+                message += "Uncheck the <b>No changes</b> check box and select a different tool panel section to load the tools in a "
+                message += "different section in the tool panel.  "
+                status = 'warning'
+            else:
+                message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections.  " % escape( tool_shed_repository.name )
+                message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section.  "
+                status = 'warning'
+        else:
+            no_changes_check_box = None
+            original_section_name = ''
+            tool_panel_section_select_field = None
+        shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans.app )
+        dd = dependency_display.DependencyDisplayer( trans.app )
+        containers_dict = \
+            dd.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url,
+                                                         tool_path=tool_path,
+                                                         readme_files_dict=readme_files_dict,
+                                                         installed_repository_dependencies=installed_repository_dependencies,
+                                                         missing_repository_dependencies=missing_repository_dependencies,
+                                                         installed_tool_dependencies=installed_tool_dependencies,
+                                                         missing_tool_dependencies=missing_tool_dependencies,
+                                                         updating=False )
+        # Since we're reinstalling we'll merge the list of missing repository dependencies into the list of
+        # installed repository dependencies since each displayed repository dependency will display a status,
+        # whether installed or missing.
+        containers_dict = dd.merge_missing_repository_dependencies_to_installed_container( containers_dict )
+        # Handle repository dependencies check box.
+        install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
+        # Handle tool dependencies check box.
+        if trans.app.config.tool_dependency_dir is None:
+            if includes_tool_dependencies:
+                message += "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
+                message += "setting in your Galaxy config file (galaxy.ini) and restart your Galaxy server before installing the repository.  "
+                status = "warning"
+            install_tool_dependencies_check_box_checked = False
+        else:
+            install_tool_dependencies_check_box_checked = True
+        install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=install_tool_dependencies_check_box_checked )
+        view = views.DependencyResolversView(self.app)
+        if view.installable_resolvers:
+            install_resolver_dependencies_check_box = CheckboxField( 'install_resolver_dependencies', checked=True )
+        else:
+            install_resolver_dependencies_check_box = None
+        return trans.fill_template( '/admin/tool_shed_repository/reselect_tool_panel_section.mako',
+                                    repository=tool_shed_repository,
+                                    no_changes_check_box=no_changes_check_box,
+                                    original_section_name=original_section_name,
+                                    includes_data_managers=includes_data_managers,
+                                    includes_datatypes=includes_datatypes,
+                                    includes_tools=includes_tools,
+                                    includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                                    includes_tool_dependencies=includes_tool_dependencies,
+                                    includes_workflows=includes_workflows,
+                                    has_repository_dependencies=has_repository_dependencies,
+                                    install_repository_dependencies_check_box=install_repository_dependencies_check_box,
+                                    install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+                                    install_resolver_dependencies_check_box=install_resolver_dependencies_check_box,
+                                    containers_dict=containers_dict,
+                                    tool_panel_section_select_field=tool_panel_section_select_field,
+                                    shed_tool_conf_select_field=shed_tool_conf_select_field,
+                                    encoded_repo_info_dict=encoding_util.tool_shed_encode( repo_info_dict ),
+                                    repo_info_dict=repo_info_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
+        irmm = InstalledRepositoryMetadataManager( trans.app )
+        if 'reset_metadata_on_selected_repositories_button' in kwd:
+            message, status = irmm.reset_metadata_on_selected_repositories( trans.user, **kwd )
+        else:
+            message = escape( kwd.get( 'message', '' ) )
+            status = kwd.get( 'status', 'done' )
+        repositories_select_field = irmm.build_repository_ids_select_field()
+        return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako',
+                                    repositories_select_field=repositories_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def reset_repository_metadata( self, trans, id ):
+        """Reset all metadata on a single installed tool shed repository."""
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, id )
+        repository_clone_url = common_util.generate_clone_url_for_installed_repository( trans.app, repository )
+        tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
+        if relative_install_dir:
+            original_metadata_dict = repository.metadata
+            irmm = InstalledRepositoryMetadataManager( app=trans.app,
+                                                       repository=repository,
+                                                       changeset_revision=repository.changeset_revision,
+                                                       repository_clone_url=repository_clone_url,
+                                                       shed_config_dict=repository.get_shed_config_dict( trans.app ),
+                                                       relative_install_dir=relative_install_dir,
+                                                       repository_files_dir=None,
+                                                       resetting_all_metadata_on_repository=False,
+                                                       updating_installed_repository=False,
+                                                       persist=False )
+            irmm.generate_metadata_for_changeset_revision()
+            irmm_metadata_dict = irmm.get_metadata_dict()
+            if irmm_metadata_dict != original_metadata_dict:
+                repository.metadata = irmm_metadata_dict
+                irmm.update_in_shed_tool_config()
+                trans.install_model.context.add( repository )
+                trans.install_model.context.flush()
+                message = 'Metadata has been reset on repository <b>%s</b>.' % escape( repository.name )
+                status = 'done'
+            else:
+                message = 'Metadata did not need to be reset on repository <b>%s</b>.' % escape( repository.name )
+                status = 'done'
+        else:
+            message = 'Error locating installation directory for repository <b>%s</b>.' % escape( repository.name )
+            status = 'error'
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='manage_repository',
+                                                          id=id,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def reset_to_install( self, trans, **kwd ):
+        """An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+        if kwd.get( 'reset_repository', False ):
+            repository_util.set_repository_attributes( trans.app,
+                                                       repository,
+                                                       status=trans.install_model.ToolShedRepository.installation_status.NEW,
+                                                       error_message=None,
+                                                       deleted=False,
+                                                       uninstalled=False,
+                                                       remove_from_disk=True )
+            new_kwd = {}
+            new_kwd[ 'message' ] = "You can now attempt to install the repository named <b>%s</b> again." % escape( str( repository.name ) )
+            new_kwd[ 'status' ] = "done"
+            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                              action='browse_repositories',
+                                                              **new_kwd ) )
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='manage_repository',
+                                                          **kwd ) )
+
+    @web.expose
+    @web.require_admin
+    def set_tool_versions( self, trans, **kwd ):
+        """
+        Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed
+        repository and update the metadata for the repository's revision in the Galaxy database.
+        """
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, kwd[ 'id' ] )
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( repository.tool_shed ) )
+        params = dict( name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision )
+        pathspec = [ 'repository', 'get_tool_versions' ]
+        text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        if text:
+            tool_version_dicts = json.loads( text )
+            tvm = tool_version_manager.ToolVersionManager( trans.app )
+            tvm.handle_tool_versions( tool_version_dicts, repository )
+            message = "Tool versions have been set for all included tools."
+            status = 'done'
+        else:
+            message = ("Version information for the tools included in the <b>%s</b> repository is missing.  "
+                       "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
+                       "from the installed repository's <b>Repository Actions</b> menu.  " % escape( repository.name ))
+            status = 'error'
+        shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+        repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+        dd = dependency_display.DependencyDisplayer( trans.app )
+        containers_dict = dd.populate_containers_dict_from_repository_metadata( tool_shed_url=tool_shed_url,
+                                                                                tool_path=tool_path,
+                                                                                repository=repository,
+                                                                                reinstalling=False,
+                                                                                required_repo_info_dicts=None )
+        return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
+                                    repository=repository,
+                                    description=repository.description,
+                                    repo_files_dir=repo_files_dir,
+                                    containers_dict=containers_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.json
+    def tool_dependency_status_updates( self, trans, ids=None, status_list=None ):
+        # Avoid caching
+        trans.response.headers[ 'Pragma' ] = 'no-cache'
+        trans.response.headers[ 'Expires' ] = '0'
+        # Create new HTML for any ToolDependency records whose status that has changed.
+        rval = []
+        if ids is not None and status_list is not None:
+            ids = util.listify( ids )
+            status_list = util.listify( status_list )
+            for tup in zip( ids, status_list ):
+                id, status = tup
+                tool_dependency = trans.install_model.context.query( trans.install_model.ToolDependency ).get( trans.security.decode_id( id ) )
+                if tool_dependency.status != status:
+                    rval.append( dict( id=id,
+                                       status=tool_dependency.status,
+                                       html_status=unicode( trans.fill_template( "admin/tool_shed_repository/tool_dependency_installation_status.mako",
+                                                                                 tool_dependency=tool_dependency ),
+                                                            'utf-8' ) ) )
+        return rval
+
+    @web.expose
+    @web.require_admin
+    def uninstall_tool_dependencies( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
+        if not tool_dependency_ids:
+            tool_dependency_ids = util.listify( kwd.get( 'id', None ) )
+        tool_dependencies = []
+        for tool_dependency_id in tool_dependency_ids:
+            tool_dependency = tool_dependency_util.get_tool_dependency( trans.app, tool_dependency_id )
+            tool_dependencies.append( tool_dependency )
+        tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
+        if kwd.get( 'uninstall_tool_dependencies_button', False ):
+            errors = False
+            # Filter tool dependencies to only those that are installed but in an error state.
+            tool_dependencies_for_uninstallation = []
+            for tool_dependency in tool_dependencies:
+                if tool_dependency.can_uninstall:
+                    tool_dependencies_for_uninstallation.append( tool_dependency )
+            for tool_dependency in tool_dependencies_for_uninstallation:
+                uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency )
+                if error_message:
+                    errors = True
+                    message = '%s  %s' % ( message, error_message )
+            if errors:
+                message = "Error attempting to uninstall tool dependencies: %s" % message
+                status = 'error'
+            else:
+                message = "These tool dependencies have been uninstalled: %s" % \
+                    ','.join( td.name for td in tool_dependencies_for_uninstallation )
+            td_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.tool_dependencies ]
+            return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                              action='manage_repository_tool_dependencies',
+                                                              tool_dependency_ids=td_ids,
+                                                              status=status,
+                                                              message=message ) )
+        return trans.fill_template( '/admin/tool_shed_repository/uninstall_tool_dependencies.mako',
+                                    repository=tool_shed_repository,
+                                    tool_dependency_ids=tool_dependency_ids,
+                                    tool_dependencies=tool_dependencies,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def update_to_changeset_revision( self, trans, **kwd ):
+        """Update a cloned repository to the latest revision possible."""
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        # Handle protocol changes over time.
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        latest_changeset_revision = kwd.get( 'latest_changeset_revision', None )
+        latest_ctx_rev = kwd.get( 'latest_ctx_rev', None )
+        repository = repository_util.get_installed_repository( trans.app,
+                                                               tool_shed=tool_shed_url,
+                                                               name=name,
+                                                               owner=owner,
+                                                               changeset_revision=changeset_revision )
+        original_metadata_dict = repository.metadata
+        original_repository_dependencies_dict = original_metadata_dict.get( 'repository_dependencies', {} )
+        original_repository_dependencies = original_repository_dependencies_dict.get( 'repository_dependencies', [] )
+        original_tool_dependencies_dict = original_metadata_dict.get( 'tool_dependencies', {} )
+        if changeset_revision and latest_changeset_revision and latest_ctx_rev:
+            if changeset_revision == latest_changeset_revision:
+                message = "The installed repository named '%s' is current, there are no updates available.  " % name
+            else:
+                shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+                if relative_install_dir:
+                    if tool_path:
+                        repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
+                    else:
+                        repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
+                    repo = hg_util.get_repo_for_repository( trans.app,
+                                                            repository=None,
+                                                            repo_path=repo_files_dir,
+                                                            create=False )
+                    repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+                    hg_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
+                    hg_util.update_repository( repo, latest_ctx_rev )
+                    # Remove old Data Manager entries
+                    if repository.includes_data_managers:
+                        dmh = data_manager.DataManagerHandler( trans.app )
+                        dmh.remove_from_data_manager( repository )
+                    # Update the repository metadata.
+                    tpm = tool_panel_manager.ToolPanelManager( trans.app )
+                    irmm = InstalledRepositoryMetadataManager( app=trans.app,
+                                                               tpm=tpm,
+                                                               repository=repository,
+                                                               changeset_revision=latest_changeset_revision,
+                                                               repository_clone_url=repository_clone_url,
+                                                               shed_config_dict=repository.get_shed_config_dict( trans.app ),
+                                                               relative_install_dir=relative_install_dir,
+                                                               repository_files_dir=None,
+                                                               resetting_all_metadata_on_repository=False,
+                                                               updating_installed_repository=True,
+                                                               persist=True )
+                    irmm.generate_metadata_for_changeset_revision()
+                    irmm_metadata_dict = irmm.get_metadata_dict()
+                    if 'tools' in irmm_metadata_dict:
+                        tool_panel_dict = irmm_metadata_dict.get( 'tool_panel_section', None )
+                        if tool_panel_dict is None:
+                            tool_panel_dict = tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( repository )
+                        repository_tools_tups = irmm.get_repository_tools_tups()
+                        tpm.add_to_tool_panel( repository_name=str( repository.name ),
+                                               repository_clone_url=repository_clone_url,
+                                               changeset_revision=str( repository.installed_changeset_revision ),
+                                               repository_tools_tups=repository_tools_tups,
+                                               owner=str( repository.owner ),
+                                               shed_tool_conf=shed_tool_conf,
+                                               tool_panel_dict=tool_panel_dict,
+                                               new_install=False )
+                        # Add new Data Manager entries
+                        if 'data_manager' in irmm_metadata_dict:
+                            dmh = data_manager.DataManagerHandler( trans.app )
+                            dmh.install_data_managers( trans.app.config.shed_data_manager_config_file,
+                                                       irmm_metadata_dict,
+                                                       repository.get_shed_config_dict( trans.app ),
+                                                       os.path.join( relative_install_dir, name ),
+                                                       repository,
+                                                       repository_tools_tups )
+                    if 'repository_dependencies' in irmm_metadata_dict or 'tool_dependencies' in irmm_metadata_dict:
+                        new_repository_dependencies_dict = irmm_metadata_dict.get( 'repository_dependencies', {} )
+                        new_repository_dependencies = new_repository_dependencies_dict.get( 'repository_dependencies', [] )
+                        new_tool_dependencies_dict = irmm_metadata_dict.get( 'tool_dependencies', {} )
+                        if new_repository_dependencies:
+                            # [[http://localhost:9009', package_picard_1_56_0', devteam', 910b0b056666', False', False']]
+                            proceed_to_install = False
+                            if new_repository_dependencies == original_repository_dependencies:
+                                for new_repository_tup in new_repository_dependencies:
+                                    # Make sure all dependencies are installed.
+                                    # TODO: Repository dependencies that are not installed should be displayed to the user,
+                                    # giving them the option to install them or not. This is the same behavior as when initially
+                                    # installing and when re-installing.
+                                    new_tool_shed, new_name, new_owner, new_changeset_revision, new_pir, new_oicct = \
+                                        common_util.parse_repository_dependency_tuple( new_repository_tup )
+                                    # Mock up a repo_info_tupe that has the information needed to see if the repository dependency
+                                    # was previously installed.
+                                    repo_info_tuple = ( '', new_tool_shed, new_changeset_revision, '', new_owner, [], [] )
+                                    # Since the value of new_changeset_revision came from a repository dependency
+                                    # definition, it may occur earlier in the Tool Shed's repository changelog than
+                                    # the Galaxy tool_shed_repository.installed_changeset_revision record value, so
+                                    # we set from_tip to True to make sure we get the entire set of changeset revisions
+                                    # from the Tool Shed.
+                                    new_repository_db_record, installed_changeset_revision = \
+                                        repository_util.repository_was_previously_installed( trans.app,
+                                                                                             tool_shed_url,
+                                                                                             new_name,
+                                                                                             repo_info_tuple,
+                                                                                             from_tip=True )
+                                    if new_repository_db_record:
+                                        if new_repository_db_record.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR,
+                                                                                trans.install_model.ToolShedRepository.installation_status.NEW,
+                                                                                trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+                                            proceed_to_install = True
+                                            break
+                                    else:
+                                        proceed_to_install = True
+                                        break
+                            if proceed_to_install:
+                                # Updates received include newly defined repository dependencies, so allow the user
+                                # the option of installting them.  We cannot update the repository with the changes
+                                # until that happens, so we have to send them along.
+                                new_kwd = dict( tool_shed_url=tool_shed_url,
+                                                updating_repository_id=trans.security.encode_id( repository.id ),
+                                                updating_to_ctx_rev=latest_ctx_rev,
+                                                updating_to_changeset_revision=latest_changeset_revision,
+                                                encoded_updated_metadata=encoding_util.tool_shed_encode( irmm_metadata_dict ),
+                                                updating=True )
+                                return self.prepare_for_install( trans, **new_kwd )
+                        # Updates received did not include any newly defined repository dependencies but did include
+                        # newly defined tool dependencies.  If the newly defined tool dependencies are not the same
+                        # as the originally defined tool dependencies, we need to install them.
+                        proceed_to_install = False
+                        for new_key, new_val in new_tool_dependencies_dict.items():
+                            if new_key not in original_tool_dependencies_dict:
+                                proceed_to_install = True
+                                break
+                            original_val = original_tool_dependencies_dict[ new_key ]
+                            if new_val != original_val:
+                                proceed_to_install = True
+                                break
+                        if proceed_to_install:
+                            encoded_tool_dependencies_dict = encoding_util.tool_shed_encode( irmm_metadata_dict.get( 'tool_dependencies', {} ) )
+                            encoded_relative_install_dir = encoding_util.tool_shed_encode( relative_install_dir )
+                            new_kwd = dict( updating_repository_id=trans.security.encode_id( repository.id ),
+                                            updating_to_ctx_rev=latest_ctx_rev,
+                                            updating_to_changeset_revision=latest_changeset_revision,
+                                            encoded_updated_metadata=encoding_util.tool_shed_encode( irmm_metadata_dict ),
+                                            encoded_relative_install_dir=encoded_relative_install_dir,
+                                            encoded_tool_dependencies_dict=encoded_tool_dependencies_dict,
+                                            message=message,
+                                            status=status )
+                            return self.install_tool_dependencies_with_update( trans, **new_kwd )
+                    # Updates received did not include any newly defined repository dependencies or newly defined
+                    # tool dependencies that need to be installed.
+                    repository = trans.app.update_repository_manager.update_repository_record( repository=repository,
+                                                                                               updated_metadata_dict=irmm_metadata_dict,
+                                                                                               updated_changeset_revision=latest_changeset_revision,
+                                                                                               updated_ctx_rev=latest_ctx_rev )
+                    message = "The installed repository named '%s' has been updated to change set revision '%s'.  " % \
+                        ( name, latest_changeset_revision )
+                else:
+                    message = "The directory containing the installed repository named '%s' cannot be found.  " % name
+                    status = 'error'
+        else:
+            message = "The latest changeset revision could not be retrieved for the installed repository named '%s'.  " % name
+            status = 'error'
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='manage_repository',
+                                                          id=trans.security.encode_id( repository.id ),
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def update_tool_shed_status_for_installed_repository( self, trans, **kwd ):
+        message, status = repository_util.check_for_updates( trans.app, trans.install_model, kwd.get( 'id', None ) )
+        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                          action='browse_repositories',
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def view_tool_metadata( self, trans, repository_id, tool_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_installed_tool_shed_repository( trans.app, repository_id )
+        repository_metadata = repository.metadata
+        shed_config_dict = repository.get_shed_config_dict( trans.app )
+        tool_metadata = {}
+        tool_lineage = []
+        tool = None
+        if 'tools' in repository_metadata:
+            for tool_metadata_dict in repository_metadata[ 'tools' ]:
+                if tool_metadata_dict[ 'id' ] == tool_id:
+                    tool_metadata = tool_metadata_dict
+                    tool_config = tool_metadata[ 'tool_config' ]
+                    if shed_config_dict and shed_config_dict.get( 'tool_path' ):
+                        tool_config = os.path.join( shed_config_dict.get( 'tool_path' ), tool_config )
+                    tool = trans.app.toolbox.load_tool( os.path.abspath( tool_config ), guid=tool_metadata[ 'guid' ] )
+                    if tool:
+                        tvm = tool_version_manager.ToolVersionManager( trans.app )
+                        tool_version = tvm.get_tool_version( str( tool.id ) )
+                        tool_lineage = tool_version.get_version_ids( trans.app, reverse=True )
+                    break
+        return trans.fill_template( "/admin/tool_shed_repository/view_tool_metadata.mako",
+                                    repository=repository,
+                                    repository_metadata=repository_metadata,
+                                    tool=tool,
+                                    tool_metadata=tool_metadata,
+                                    tool_lineage=tool_lineage,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def view_workflow( self, trans, workflow_name=None, repository_id=None, **kwd ):
+        """Retrieve necessary information about a workflow from the database so that it can be displayed in an svg image."""
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        if workflow_name:
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
+        repository = repository_util.get_tool_shed_repository_by_id( trans.app, repository_id )
+        changeset_revision = repository.changeset_revision
+        metadata = repository.metadata
+        return trans.fill_template( "/admin/tool_shed_repository/view_workflow.mako",
+                                    repository=repository,
+                                    changeset_revision=changeset_revision,
+                                    repository_id=repository_id,
+                                    workflow_name=workflow_name,
+                                    metadata=metadata,
+                                    message=message,
+                                    status=status )
diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py
new file mode 100644
index 0000000..c0dca96
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/async.py
@@ -0,0 +1,145 @@
+"""
+Upload class
+"""
+
+import logging
+import urllib
+
+from galaxy import jobs, web
+from galaxy.util import Params
+from galaxy.util.hash_util import hmac_new
+from galaxy.web.base.controller import BaseUIController
+
+log = logging.getLogger( __name__ )
+
+
+class ASync( BaseUIController ):
+
+    @web.expose
+    def default(self, trans, tool_id=None, data_id=None, data_secret=None, **kwd):
+        """Catches the tool id and redirects as needed"""
+        return self.index( trans, tool_id=tool_id, data_id=data_id, data_secret=data_secret, **kwd)
+
+    @web.expose
+    def index(self, trans, tool_id=None, data_secret=None, **kwd):
+        """Manages ascynchronous connections"""
+
+        if tool_id is None:
+            return "tool_id argument is required"
+        tool_id = str(tool_id)
+
+        # redirect to main when getting no parameters
+        if not kwd:
+            return trans.response.send_redirect( "/index" )
+
+        params = Params(kwd, sanitize=False)
+        STATUS = params.STATUS
+        URL = params.URL
+        data_id = params.data_id
+
+        log.debug('async dataid -> %s' % data_id)
+        trans.log_event( 'Async dataid -> %s' % str(data_id) )
+
+        # initialize the tool
+        toolbox = self.get_toolbox()
+        tool = toolbox.get_tool( tool_id )
+        if not tool:
+            return "Tool with id %s not found" % tool_id
+
+        #
+        # we have an incoming data_id
+        #
+        if data_id:
+            if not URL:
+                return "No URL parameter was submitted for data %s" % data_id
+            data = trans.sa_session.query( trans.model.HistoryDatasetAssociation ).get( data_id )
+
+            if not data:
+                return "Data %s does not exist or has already been deleted" % data_id
+
+            if STATUS == 'OK':
+                key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
+                if key != data_secret:
+                    return "You do not have permission to alter data %s." % data_id
+                # push the job into the queue
+                data.state = data.blurb = data.states.RUNNING
+                log.debug('executing tool %s' % tool.id)
+                trans.log_event( 'Async executing tool %s' % tool.id, tool_id=tool.id )
+                galaxy_url = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
+                galaxy_url = params.get("GALAXY_URL", galaxy_url)
+                params = dict( URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext )
+                # Assume there is exactly one output file possible
+                params[tool.outputs.keys()[0]] = data.id
+                tool.execute( trans, incoming=params )
+            else:
+                log.debug('async error -> %s' % STATUS)
+                trans.log_event( 'Async error -> %s' % STATUS )
+                data.state = data.blurb = jobs.JOB_ERROR
+                data.info = "Error -> %s" % STATUS
+
+            trans.sa_session.flush()
+
+            return "Data %s with status %s received. OK" % (data_id, STATUS)
+        else:
+            #
+            # no data_id must be parameter submission
+            #
+            if params.data_type:
+                GALAXY_TYPE = params.data_type
+            elif params.galaxyFileFormat == 'wig':  # this is an undocumented legacy special case
+                GALAXY_TYPE = 'wig'
+            else:
+                GALAXY_TYPE = params.GALAXY_TYPE or tool.outputs.values()[0].format
+
+            GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
+            GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
+            GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'
+
+            # data = datatypes.factory(ext=GALAXY_TYPE)()
+            # data.ext   = GALAXY_TYPE
+            # data.name  = GALAXY_NAME
+            # data.info  = GALAXY_INFO
+            # data.dbkey = GALAXY_BUILD
+            # data.state = jobs.JOB_OK
+            # history.datasets.add_dataset( data )
+
+            data = trans.app.model.HistoryDatasetAssociation( create_dataset=True, sa_session=trans.sa_session, extension=GALAXY_TYPE )
+            trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
+            data.name = GALAXY_NAME
+            data.dbkey = GALAXY_BUILD
+            data.info = GALAXY_INFO
+            trans.sa_session.add( data )  # Need to add data to session before setting state (setting state requires that the data object is in the session, but this may change)
+            data.state = data.states.NEW
+            open( data.file_name, 'wb' ).close()  # create the file
+            trans.history.add_dataset( data, genome_build=GALAXY_BUILD )
+            trans.sa_session.add( trans.history )
+            trans.sa_session.flush()
+            trans.log_event( "Added dataset %d to history %d" % (data.id, trans.history.id ), tool_id=tool_id )
+
+            try:
+                key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
+                galaxy_url = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
+                params.update( { 'GALAXY_URL': galaxy_url } )
+                params.update( { 'data_id': data.id } )
+                # Use provided URL or fallback to tool action
+                url = URL or tool.action
+                # Does url already have query params?
+                if '?' in url:
+                    url_join_char = '&'
+                else:
+                    url_join_char = '?'
+                url = "%s%s%s" % ( url, url_join_char, urllib.urlencode( params.flatten() ) )
+                log.debug("connecting to -> %s" % url)
+                trans.log_event( "Async connecting to -> %s" % url )
+                text = urllib.urlopen(url).read(-1)
+                text = text.strip()
+                if not text.endswith('OK'):
+                    raise Exception( text )
+                data.state = data.blurb = data.states.RUNNING
+            except Exception as e:
+                data.info = str(e)
+                data.state = data.blurb = data.states.ERROR
+
+            trans.sa_session.flush()
+
+        return trans.fill_template( 'root/tool_runner.mako', out_data={}, num_jobs=1, job_errors=[] )
diff --git a/lib/galaxy/webapps/galaxy/controllers/biostar.py b/lib/galaxy/webapps/galaxy/controllers/biostar.py
new file mode 100644
index 0000000..bb5523e
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -0,0 +1,103 @@
+"""
+Controller for integration with the Biostar application
+"""
+
+from galaxy.web.base.controller import BaseUIController, error, web
+from galaxy.util import biostar
+
+
+class BiostarController( BaseUIController ):
+    """
+    Provides integration with Biostar through external authentication, see: http://liondb.com/help/x/
+    """
+
+    @web.expose
+    def biostar_redirect( self, trans, payload=None, biostar_action=None ):
+        """
+        Generate a redirect to a Biostar site using external authentication to
+        pass Galaxy user information and optional information about a specific tool.
+        """
+        try:
+            url, payload = biostar.get_biostar_url( trans.app, payload=payload, biostar_action=biostar_action )
+        except Exception as e:
+            return error( str( e ) )
+        # Only create/log in biostar user if is registered Galaxy user
+        if trans.user:
+            biostar.create_cookie( trans, trans.app.config.biostar_key_name, trans.app.config.biostar_key, trans.user.email )
+        if payload:
+            return trans.fill_template( "biostar/post_redirect.mako", post_url=url, form_inputs=payload )
+        return trans.response.send_redirect( url )
+
+    @web.expose
+    def biostar_tool_tag_redirect( self, trans, tool_id=None ):
+        """
+        Generate a redirect to a Biostar site using tag for tool.
+        """
+        # tool_id is required
+        if tool_id is None:
+            return error( "No tool_id provided" )
+        # Load the tool
+        tool_version_select_field, tools, tool = \
+            self.app.toolbox.get_tool_components( tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=True )
+        # No matching tool, unlikely
+        if not tool:
+            return error( "No tool found matching '%s'" % tool_id )
+        # Tool specific information for payload
+        payload = biostar.populate_tag_payload( tool=tool )
+        # Pass on to standard redirect method
+        return self.biostar_redirect( trans, payload=payload, biostar_action='show_tags' )
+
+    @web.expose
+    def biostar_question_redirect( self, trans, payload=None ):
+        """
+        Generate a redirect to a Biostar site using external authentication to
+        pass Galaxy user information and information about a specific tool.
+        """
+        # Pass on to standard redirect method
+        return self.biostar_redirect( trans, payload=payload, biostar_action='new_post' )
+
+    @web.expose
+    def biostar_tool_question_redirect( self, trans, tool_id=None ):
+        """
+        Generate a redirect to a Biostar site using external authentication to
+        pass Galaxy user information and information about a specific tool.
+        """
+        # tool_id is required
+        if tool_id is None:
+            return error( "No tool_id provided" )
+        # Load the tool
+        tool_version_select_field, tools, tool = \
+            self.app.toolbox.get_tool_components( tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=True )
+        # No matching tool, unlikely
+        if not tool:
+            return error( "No tool found matching '%s'" % tool_id )
+        # Tool specific information for payload
+        payload = biostar.populate_tool_payload( tool=tool )
+        # Pass on to regular question method
+        return self.biostar_question_redirect( trans, payload )
+
+    @web.expose
+    def biostar_tool_bug_report( self, trans, hda=None, email=None, message=None ):
+        """
+        Generate a redirect to a Biostar site using external authentication to
+        pass Galaxy user information and information about a specific tool error.
+        """
+        try:
+            error_reporter = biostar.BiostarErrorReporter( hda, trans.app )
+            payload = error_reporter.send_report( trans.user, email=email, message=message )
+        except Exception as e:
+            return error( str( e ) )
+        return self.biostar_redirect( trans, payload=payload, biostar_action='new_post' )
+
+    @web.expose
+    def biostar_logout( self, trans ):
+        """
+        Log out of biostar
+        """
+        try:
+            url = biostar.biostar_log_out( trans )
+        except Exception as e:
+            return error( str( e ) )
+        if url:
+            return trans.response.send_redirect( url )
+        return error( "Could not determine Biostar logout URL." )
diff --git a/lib/galaxy/webapps/galaxy/controllers/data_manager.py b/lib/galaxy/webapps/galaxy/controllers/data_manager.py
new file mode 100644
index 0000000..28056ee
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/data_manager.py
@@ -0,0 +1,118 @@
+import logging
+from json import loads
+
+import paste.httpexceptions
+from markupsafe import escape
+from six import string_types
+
+import galaxy.queue_worker
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+
+log = logging.getLogger( __name__ )
+
+
+class DataManager( BaseUIController ):
+
+    @web.expose
+    def index( self, trans, **kwd ):
+        not_is_admin = not trans.user_is_admin()
+        if not_is_admin and not trans.app.config.enable_data_manager_user_view:
+            raise paste.httpexceptions.HTTPUnauthorized( "This Galaxy instance is not configured to allow non-admins to view the data manager." )
+        message = escape( kwd.get( 'message', '' ) )
+        status = escape( kwd.get( 'status', 'info' ) )
+        return trans.fill_template( "data_manager/index.mako", data_managers=trans.app.data_managers, tool_data_tables=trans.app.tool_data_tables, view_only=not_is_admin, message=message, status=status )
+
+    @web.expose
+    def manage_data_manager( self, trans, **kwd ):
+        not_is_admin = not trans.user_is_admin()
+        if not_is_admin and not trans.app.config.enable_data_manager_user_view:
+            raise paste.httpexceptions.HTTPUnauthorized( "This Galaxy instance is not configured to allow non-admins to view the data manager." )
+        message = escape( kwd.get( 'message', '' ) )
+        status = escape( kwd.get( 'status', 'info' ) )
+        data_manager_id = kwd.get( 'id', None )
+        data_manager = trans.app.data_managers.get_manager( data_manager_id )
+        if data_manager is None:
+            return trans.response.send_redirect( web.url_for( controller="data_manager", action="index", message="Invalid Data Manager (%s) was requested" % data_manager_id, status="error" ) )
+        jobs = list( reversed( [ assoc.job for assoc in trans.sa_session.query( trans.app.model.DataManagerJobAssociation ).filter_by( data_manager_id=data_manager_id ) ] ) )
+        return trans.fill_template( "data_manager/manage_data_manager.mako", data_manager=data_manager, jobs=jobs, view_only=not_is_admin, message=message, status=status )
+
+    @web.expose
+    def view_job( self, trans, **kwd ):
+        not_is_admin = not trans.user_is_admin()
+        if not_is_admin and not trans.app.config.enable_data_manager_user_view:
+            raise paste.httpexceptions.HTTPUnauthorized( "This Galaxy instance is not configured to allow non-admins to view the data manager." )
+        message = escape( kwd.get( 'message', '' ) )
+        status = escape( kwd.get( 'status', 'info' ) )
+        job_id = kwd.get( 'id', None )
+        try:
+            job_id = trans.security.decode_id( job_id )
+            job = trans.sa_session.query( trans.app.model.Job ).get( job_id )
+        except Exception as e:
+            job = None
+            log.error( "Bad job id (%s) passed to view_job: %s" % ( job_id, e ) )
+        if not job:
+            return trans.response.send_redirect( web.url_for( controller="data_manager", action="index", message="Invalid job (%s) was requested" % job_id, status="error" ) )
+        data_manager_id = job.data_manager_association.data_manager_id
+        data_manager = trans.app.data_managers.get_manager( data_manager_id )
+        hdas = [ assoc.dataset for assoc in job.get_output_datasets() ]
+        data_manager_output = []
+        error_messages = []
+        for hda in hdas:
+            try:
+                data_manager_json = loads( open( hda.get_file_name() ).read() )
+            except Exception as e:
+                data_manager_json = {}
+                error_messages.append( escape( "Unable to obtain data_table info for hda (%s): %s" % ( hda.id, e ) ) )
+            values = []
+            for key, value in data_manager_json.get( 'data_tables', {} ).iteritems():
+                values.append( ( key, value ) )
+            data_manager_output.append( values )
+        return trans.fill_template( "data_manager/view_job.mako", data_manager=data_manager, job=job, view_only=not_is_admin, hdas=hdas, data_manager_output=data_manager_output, message=message, status=status, error_messages=error_messages )
+
+    @web.expose
+    def manage_data_table( self, trans, **kwd ):
+        not_is_admin = not trans.user_is_admin()
+        if not_is_admin and not trans.app.config.enable_data_manager_user_view:
+            raise paste.httpexceptions.HTTPUnauthorized( "This Galaxy instance is not configured to allow non-admins to view the data manager." )
+        message = escape( kwd.get( 'message', '' ) )
+        status = escape( kwd.get( 'status', 'info' ) )
+        data_table_name = kwd.get( 'table_name', None )
+        if not data_table_name:
+            return trans.response.send_redirect( web.url_for( controller="data_manager", action="index" ) )
+        data_table = trans.app.tool_data_tables.get( data_table_name, None )
+        if data_table is None:
+            return trans.response.send_redirect( web.url_for( controller="data_manager", action="index", message="Invalid Data table (%s) was requested" % data_table_name, status="error" ) )
+        return trans.fill_template( "data_manager/manage_data_table.mako", data_table=data_table, view_only=not_is_admin, message=message, status=status )
+
+    @web.expose
+    @web.require_admin
+    def reload_tool_data_tables( self, trans, table_name=None, **kwd ):
+        if table_name and isinstance( table_name, string_types ):
+            table_name = table_name.split( "," )
+        # Reload the tool data tables
+        table_names = self.app.tool_data_tables.reload_tables( table_names=table_name )
+        galaxy.queue_worker.send_control_task(trans.app, 'reload_tool_data_tables',
+                                              noop_self=True,
+                                              kwargs={'table_name': table_name} )
+        redirect_url = None
+        if table_names:
+            status = 'done'
+            if len( table_names ) == 1:
+                message = "The data table '%s' has been reloaded." % table_names[0]
+                redirect_url = web.url_for( controller='data_manager',
+                                            action='manage_data_table',
+                                            table_name=table_names[0],
+                                            message=message,
+                                            status=status )
+            else:
+                message = "The data tables '%s' have been reloaded." % ', '.join( table_names )
+        else:
+            message = "No data tables have been reloaded."
+            status = 'error'
+        if redirect_url is None:
+            redirect_url = web.url_for( controller='admin',
+                                        action='view_tool_data_tables',
+                                        message=message,
+                                        status=status )
+        return trans.response.send_redirect( redirect_url )
diff --git a/lib/galaxy/webapps/galaxy/controllers/dataset.py b/lib/galaxy/webapps/galaxy/controllers/dataset.py
new file mode 100644
index 0000000..97033c4
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -0,0 +1,1174 @@
+import logging
+import os
+import urllib
+
+from markupsafe import escape
+import paste.httpexceptions
+from six import string_types, text_type
+from sqlalchemy import false, true
+
+from galaxy import datatypes, model, util, web
+from galaxy import managers
+from galaxy.datatypes.display_applications.util import decode_dataset_user, encode_dataset_user
+from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
+from galaxy.util import inflector, smart_str
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.web.base.controller import BaseUIController, ERROR, SUCCESS, url_for, UsesExtendedMetadataMixin
+from galaxy.web.framework.helpers import grids, iff, time_ago, to_unicode
+from galaxy.tools.errors import EmailErrorReporter
+
+log = logging.getLogger( __name__ )
+
+comptypes = []
+
+try:
+    import zlib  # noqa: F401
+    comptypes.append( 'zip' )
+except ImportError:
+    pass
+
+
+class HistoryDatasetAssociationListGrid( grids.Grid ):
+    # Custom columns for grid.
+    class HistoryColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, hda):
+            return escape(hda.history.name)
+
+    class StatusColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, hda ):
+            if hda.deleted:
+                return "deleted"
+            return ""
+
+        def get_accepted_filters( self ):
+            """ Returns a list of accepted filters for this column. """
+            accepted_filter_labels_and_vals = { "Active" : "False", "Deleted" : "True", "All": "All" }
+            accepted_filters = []
+            for label, val in accepted_filter_labels_and_vals.items():
+                args = { self.key: val }
+                accepted_filters.append( grids.GridColumnFilter( label, args) )
+            return accepted_filters
+
+    # Grid definition
+    title = "Saved Datasets"
+    model_class = model.HistoryDatasetAssociation
+    template = '/dataset/grid.mako'
+    default_sort_key = "-update_time"
+    columns = [
+        grids.TextColumn( "Name", key="name",
+                          # Link name to dataset's history.
+                          link=( lambda item: iff( item.history.deleted, None, dict( operation="switch", id=item.id ) ) ), filterable="advanced", attach_popup=True ),
+        HistoryColumn( "History", key="history", sortable=False, inbound=True,
+                       link=( lambda item: iff( item.history.deleted, None, dict( operation="switch_history", id=item.id ) ) ) ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryDatasetAssociationTagAssociation, filterable="advanced", grid_name="HistoryDatasetAssocationListGrid" ),
+        StatusColumn( "Status", key="deleted", attach_popup=False ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[2] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+    operations = [
+        grids.GridOperation( "Copy to current history", condition=( lambda item: not item.deleted ), async_compatible=True ),
+    ]
+    standard_filters = []
+    default_filter = dict( name="All", deleted="False", tags="All" )
+    preserve_state = False
+    use_async = True
+    use_paging = True
+    num_rows_per_page = 50
+
+    def build_initial_query( self, trans, **kwargs ):
+        # Show user's datasets that are not deleted, not in deleted histories, and not hidden.
+        # To filter HDAs by user, need to join model class/HDA and History table so that it is
+        # possible to filter by user. However, for dictionary-based filtering to work, need a
+        # primary table for the query.
+        return trans.sa_session.query( self.model_class ).select_from( self.model_class.table.join( model.History.table ) ) \
+            .filter( model.History.user == trans.user ) \
+            .filter( self.model_class.deleted == false() ) \
+            .filter( model.History.deleted == false() ) \
+            .filter( self.model_class.visible == true() )
+
+
+class DatasetInterface( BaseUIController, UsesAnnotations, UsesItemRatings, UsesExtendedMetadataMixin ):
+
+    stored_list_grid = HistoryDatasetAssociationListGrid()
+
+    def __init__( self, app ):
+        super( DatasetInterface, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    def _get_job_for_dataset( self, trans, dataset_id ):
+        '''
+        Return the job for the given dataset. This will throw an error if the
+        dataset is either nonexistent or inaccessible to the user.
+        '''
+        hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( dataset_id ) )
+        assert hda and self._can_access_dataset( trans, hda )
+        return hda.creating_job
+
+    def _can_access_dataset( self, trans, dataset_association, allow_admin=True, additional_roles=None ):
+        roles = trans.get_current_user_roles()
+        if additional_roles:
+            roles = roles + additional_roles
+        return ( allow_admin and trans.user_is_admin() ) or trans.app.security_agent.can_access_dataset( roles, dataset_association.dataset )
+
+    @web.expose
+    def errors( self, trans, id ):
+        hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( self.decode_id( id ) )
+
+        if not hda or not self._can_access_dataset( trans, hda ):
+            return trans.show_error_message( "Either this dataset does not exist or you do not have permission to access it." )
+        return trans.fill_template( "dataset/errors.mako", hda=hda )
+
+    @web.expose
+    def stdout( self, trans, dataset_id=None, **kwargs ):
+        trans.response.set_content_type( 'text/plain' )
+        stdout = ""
+        try:
+            job = self._get_job_for_dataset( trans, dataset_id )
+            stdout = job.stdout
+        except:
+            stdout = "Invalid dataset ID or you are not allowed to access this dataset"
+        return smart_str( stdout )
+
+    @web.expose
+    # TODO: Migrate stderr and stdout to use _get_job_for_dataset; it wasn't tested.
+    def stderr( self, trans, dataset_id=None, **kwargs ):
+        trans.response.set_content_type( 'text/plain' )
+        stderr = ""
+        try:
+            job = self._get_job_for_dataset( trans, dataset_id )
+            stderr = job.stderr
+        except:
+            stderr = "Invalid dataset ID or you are not allowed to access this dataset"
+        return smart_str( stderr )
+
+    @web.expose
+    def exit_code( self, trans, dataset_id=None, **kwargs ):
+        trans.response.set_content_type( 'text/plain' )
+        exit_code = ""
+        try:
+            job = self._get_job_for_dataset( trans, dataset_id )
+            exit_code = job.exit_code
+        except:
+            exit_code = "Invalid dataset ID or you are not allowed to access this dataset"
+        return exit_code
+
+    @web.expose
+    def report_error( self, trans, id, email='', message="", **kwd ):
+        biostar_report = 'biostar' in str( kwd.get( 'submit_error_report') ).lower()
+        if biostar_report:
+            return trans.response.send_redirect( url_for( controller='biostar', action='biostar_tool_bug_report', hda=id, email=email, message=message ) )
+        try:
+            error_reporter = EmailErrorReporter( id, trans.app )
+            error_reporter.send_report( user=trans.user, email=email, message=message )
+            return trans.show_ok_message( "Your error report has been sent" )
+        except Exception as e:
+            return trans.show_error_message( "An error occurred sending the report by email: %s" % str( e ) )
+
+    @web.expose
+    def default(self, trans, dataset_id=None, **kwd):
+        return 'This link may not be followed from within Galaxy.'
+
+    @web.expose
+    def get_metadata_file(self, trans, hda_id, metadata_name):
+        """ Allows the downloading of metadata files associated with datasets (eg. bai index for bam files) """
+        data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( hda_id ) )
+        if not data or not self._can_access_dataset( trans, data ):
+            return trans.show_error_message( "You are not allowed to access this dataset" )
+
+        fname = ''.join(c in util.FILENAME_VALID_CHARS and c or '_' for c in data.name)[0:150]
+
+        file_ext = data.metadata.spec.get(metadata_name).get("file_ext", metadata_name)
+        trans.response.headers["Content-Type"] = "application/octet-stream"
+        trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (data.hid, fname, file_ext)
+        return open(data.metadata.get(metadata_name).file_name)
+
+    def _check_dataset(self, trans, hda_id):
+        # DEPRECATION: We still support unencoded ids for backward compatibility
+        try:
+            data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( hda_id) )
+            if data is None:
+                raise ValueError( 'Invalid reference dataset id: %s.' % hda_id)
+        except:
+            try:
+                data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( hda_id ) )
+            except:
+                data = None
+        if not data:
+            raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( hda_id ) )
+        if not self._can_access_dataset( trans, data ):
+            return trans.show_error_message( "You are not allowed to access this dataset" )
+        if data.purged:
+            return trans.show_error_message( "The dataset you are attempting to view has been purged." )
+        if data.deleted and not ( trans.user_is_admin() or ( data.history and trans.get_user() == data.history.user ) ):
+            return trans.show_error_message( "The dataset you are attempting to view has been deleted." )
+        if data.state == trans.model.Dataset.states.UPLOAD:
+            return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
+        return data
+
+    @web.expose
+    @web.json
+    def transfer_status(self, trans, dataset_id, filename=None):
+        """ Primarily used for the S3ObjectStore - get the status of data transfer
+        if the file is not in cache """
+        data = self._check_dataset(trans, dataset_id)
+        if isinstance( data, string_types ):
+            return data
+        log.debug( "Checking transfer status for dataset %s..." % data.dataset.id )
+
+        # Pulling files in extra_files_path into cache is not handled via this
+        # method but that's primarily because those files are typically linked to
+        # through tool's output page anyhow so tying a JavaScript event that will
+        # call this method does not seem doable?
+        if data.dataset.external_filename:
+            return True
+        else:
+            return trans.app.object_store.file_ready(data.dataset)
+
+    @web.expose
+    def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, offset=None, ck_size=None, **kwd):
+        data = self._check_dataset(trans, dataset_id)
+        if not isinstance( data, trans.app.model.DatasetInstance ):
+            return data
+        # Ensure offset is an integer before passing through to datatypes.
+        if offset:
+            offset = int(offset)
+        # Ensure ck_size is an integer before passing through to datatypes.
+        if ck_size:
+            ck_size = int(ck_size)
+        return data.datatype.display_data(trans, data, preview, filename, to_ext, offset=offset, ck_size=ck_size, **kwd)
+
+    @web.expose
+    def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
+        """Allows user to modify parameters of an HDA."""
+        message = None
+        status = 'done'
+        refresh_frames = []
+        error = False
+
+        def __ok_to_edit_metadata( dataset_id ):
+            # prevent modifying metadata when dataset is queued or running as input/output
+            # This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
+            for job_to_dataset_association in trans.sa_session.query(
+                    self.app.model.JobToInputDatasetAssociation ) \
+                    .filter_by( dataset_id=dataset_id ) \
+                    .all() \
+                    + trans.sa_session.query( self.app.model.JobToOutputDatasetAssociation ) \
+                    .filter_by( dataset_id=dataset_id ) \
+                    .all():
+                if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
+                    return False
+            return True
+        if hid is not None:
+            history = trans.get_history()
+            # TODO: hid handling
+            data = history.datasets[ int( hid ) - 1 ]
+            id = None
+        elif dataset_id is not None:
+            id = self.decode_id( dataset_id )
+            data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+        else:
+            trans.log_event( "dataset_id and hid are both None, cannot load a dataset to edit" )
+            return trans.show_error_message( "You must provide a history dataset id to edit" )
+        if data is None:
+            trans.log_event( "Problem retrieving dataset (encoded: %s, decoded: %s) with history id %s." % ( str( dataset_id ), str( id ), str( hid ) ) )
+            return trans.show_error_message( "History dataset id is invalid" )
+        if dataset_id is not None and data.history.user is not None and data.history.user != trans.user:
+            trans.log_event( "User attempted to edit an HDA they do not own (encoded: %s, decoded: %s)" % ( dataset_id, id ) )
+            # Do not reveal the dataset's existence
+            return trans.show_error_message( "History dataset id is invalid" )
+        current_user_roles = trans.get_current_user_roles()
+        if data.history.user and not data.dataset.has_manage_permissions_roles( trans ):
+            # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
+            # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
+            # permission.  In this case, we'll reset this permission to the hda user's private role.
+            manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+            permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
+            trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
+        if self._can_access_dataset( trans, data ):
+            if data.state == trans.model.Dataset.states.UPLOAD:
+                return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
+            params = util.Params( kwd, sanitize=False )
+            if params.change:
+                # The user clicked the Save button on the 'Change data type' form
+                if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+                    # prevent modifying datatype when dataset is queued or running as input/output
+                    if not __ok_to_edit_metadata( data.id ):
+                        message = "This dataset is currently being used as input or output.  You cannot change datatype until the jobs have completed or you have canceled them."
+                        error = True
+                    else:
+                        trans.app.datatypes_registry.change_datatype( data, params.datatype )
+                        trans.sa_session.flush()
+                        trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming={ 'input1': data }, overwrite=False )  # overwrite is False as per existing behavior
+                        message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
+                        refresh_frames = ['history']
+                else:
+                    message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype )
+                    error = True
+            elif params.save:
+                # The user clicked the Save button on the 'Edit Attributes' form
+                data.name = params.name if params.name else ''
+                data.info = params.info if params.info else ''
+                message = ''
+                if __ok_to_edit_metadata( data.id ):
+                    # The following for loop will save all metadata_spec items
+                    for name, spec in data.datatype.metadata_spec.items():
+                        if spec.get("readonly"):
+                            continue
+                        optional = params.get("is_" + name, None)
+                        other = params.get("or_" + name, None)
+                        if optional and optional == '__NOTHING__':
+                            # optional element... == '__NOTHING__' actually means it is NOT checked (and therefore omitted)
+                            setattr(data.metadata, name, None)
+                        else:
+                            if other:
+                                setattr( data.metadata, name, other )
+                            else:
+                                setattr( data.metadata, name, spec.unwrap( params.get(name, None) ) )
+                    data.datatype.after_setting_metadata( data )
+                    # Sanitize annotation before adding it.
+                    if params.annotation:
+                        annotation = sanitize_html( params.annotation, 'utf-8', 'text/html' )
+                        self.add_item_annotation( trans.sa_session, trans.get_user(), data, annotation )
+                    # This block on controller code is inactive until the 'extended_metadata' edit box is added back into the UI
+                    # Add or delete extended metadata
+#                    if params.extended_metadata:
+#                        em_string = params.extended_metadata
+#                        if len(em_string):
+#                            em_payload = None
+#                            try:
+#                                em_payload = loads(em_string)
+#                            except Exception as e:
+#                                message = 'Invalid JSON input'
+#                                error = True
+#                            if em_payload is not None:
+#                                if data is not None:
+#                                    ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                                    if ex_obj is not None:
+#                                        self.unset_item_extended_metadata_obj(trans, data)
+#                                        self.delete_extended_metadata(trans, ex_obj)
+#                                    ex_obj = self.create_extended_metadata(trans, em_payload)
+#                                    self.set_item_extended_metadata_obj(trans, data, ex_obj)
+#                                    message = "Updated Extended metadata '%s'." % data.name
+#                                    status = 'done'
+#                                else:
+#                                    message = "data not found"
+#                                    error = True
+#                    else:
+#                        if data is not None:
+#                            ex_obj = self.get_item_extended_metadata_obj(trans, data)
+#                            if ex_obj is not None:
+#                                self.unset_item_extended_metadata_obj(trans, data)
+#                                self.delete_extended_metadata(trans, ex_obj)
+#                        message = "Deleted Extended metadata '%s'." % data.name
+#                        status = 'done'
+
+                    # If setting metadata previously failed and all required elements have now been set, clear the failed state.
+                    if data._state == trans.model.Dataset.states.FAILED_METADATA and not data.missing_meta():
+                        data._state = None
+                    trans.sa_session.flush()
+                    message = "Attributes updated%s" % message
+                    refresh_frames = ['history']
+                else:
+                    trans.sa_session.flush()
+                    message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
+                    status = "warning"
+                    refresh_frames = ['history']
+            elif params.detect:
+                # The user clicked the Auto-detect button on the 'Edit Attributes' form
+                # prevent modifying metadata when dataset is queued or running as input/output
+                if not __ok_to_edit_metadata( data.id ):
+                    message = "This dataset is currently being used as input or output.  You cannot change metadata until the jobs have completed or you have canceled them."
+                    error = True
+                else:
+                    for name, spec in data.metadata.spec.items():
+                        # We need to be careful about the attributes we are resetting
+                        if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
+                            if spec.get( 'default' ):
+                                setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+                    message = 'Attributes have been queued to be updated'
+                    trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming={ 'input1': data } )
+                    trans.sa_session.flush()
+                    refresh_frames = ['history']
+            elif params.convert_data:
+                target_type = kwd.get("target_type", None)
+                if target_type:
+                    message = data.datatype.convert_dataset(trans, data, target_type)
+                    refresh_frames = ['history']
+            elif params.update_roles_button:
+                if not trans.user:
+                    return trans.show_error_message( "You must be logged in if you want to change permissions." )
+                if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
+                    access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
+                    manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+                    # The user associated the DATASET_ACCESS permission on the dataset with 1 or more roles.  We
+                    # need to ensure that they did not associate roles that would cause accessibility problems.
+                    permissions, in_roles, error, message = \
+                        trans.app.security_agent.derive_roles_from_access( trans, data.dataset.id, 'root', **kwd )
+                    if error:
+                        # Keep the original role associations for the DATASET_ACCESS permission on the dataset.
+                        permissions[ access_action ] = data.dataset.get_access_roles( trans )
+                        status = 'error'
+                    else:
+                        error = trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+                        if error:
+                            message += error
+                            status = 'error'
+                        else:
+                            message = 'Your changes completed successfully.'
+                    trans.sa_session.refresh( data.dataset )
+                else:
+                    message = "You are not authorized to change this dataset's permissions"
+                    error = True
+            else:
+                if "dbkey" in data.datatype.metadata_spec and not data.metadata.dbkey:
+                    # Copy dbkey into metadata, for backwards compatability
+                    # This looks like it does nothing, but getting the dbkey
+                    # returns the metadata dbkey unless it is None, in which
+                    # case it resorts to the old dbkey.  Setting the dbkey
+                    # sets it properly in the metadata
+                    # This is likely no longer required, since the dbkey exists entirely within metadata (the old_dbkey field is gone): REMOVE ME?
+                    data.metadata.dbkey = data.dbkey
+            # let's not overwrite the imported datatypes module with the variable datatypes?
+            # the built-in 'id' is overwritten in lots of places as well
+            ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
+            ldatatypes.sort()
+            all_roles = trans.app.security_agent.get_legitimate_roles( trans, data.dataset, 'root' )
+            if error:
+                status = 'error'
+            return trans.fill_template( "/dataset/edit_attributes.mako",
+                                        data=data,
+                                        data_annotation=self.get_item_annotation_str( trans.sa_session, trans.user, data ),
+                                        datatypes=ldatatypes,
+                                        current_user_roles=current_user_roles,
+                                        all_roles=all_roles,
+                                        message=message,
+                                        status=status,
+                                        dataset_id=dataset_id,
+                                        refresh_frames=refresh_frames )
+        else:
+            return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
+
+    @web.expose
+    @web.require_login( "see all available datasets" )
+    def list( self, trans, **kwargs ):
+        """List all available datasets"""
+        status = message = None
+
+        if 'operation' in kwargs:
+            operation = kwargs['operation'].lower()
+            hda_ids = util.listify( kwargs.get( 'id', [] ) )
+
+            # Display no message by default
+            status, message = None, None
+
+            # Load the hdas and ensure they all belong to the current user
+            hdas = []
+            for encoded_hda_id in hda_ids:
+                hda_id = self.decode_id( encoded_hda_id )
+                hda = trans.sa_session.query( model.HistoryDatasetAssociation ).filter_by( id=hda_id ).first()
+                if hda:
+                    # Ensure history is owned by current user
+                    if hda.history.user_id is not None and trans.user:
+                        assert trans.user.id == hda.history.user_id, "HistoryDatasetAssocation does not belong to current user"
+                    hdas.append( hda )
+                else:
+                    log.warning( "Invalid history_dataset_association id '%r' passed to list", hda_id )
+
+            if hdas:
+                if operation == "switch" or operation == "switch_history":
+                    # Switch to a history that the HDA resides in.
+
+                    # Convert hda to histories.
+                    histories = []
+                    for hda in hdas:
+                        histories.append( hda.history )
+
+                    # Use history controller to switch the history. TODO: is this reasonable?
+                    status, message = trans.webapp.controllers['history']._list_switch( trans, histories )
+
+                    # Current history changed, refresh history frame; if switching to a dataset, set hda seek.
+                    trans.template_context['refresh_frames'] = ['history']
+                    if operation == "switch":
+                        hda_ids = [ trans.security.encode_id( hda.id ) for hda in hdas ]
+                        trans.template_context[ 'seek_hda_ids' ] = hda_ids
+                elif operation == "copy to current history":
+                    #
+                    # Copy datasets to the current history.
+                    #
+
+                    target_histories = [ trans.get_history() ]
+
+                    # Reverse HDAs so that they appear in the history in the order they are provided.
+                    hda_ids.reverse()
+                    status, message = self._copy_datasets( trans, hda_ids, target_histories )
+
+                    # Current history changed, refresh history frame.
+                    trans.template_context['refresh_frames'] = ['history']
+
+        # Render the list view
+        return self.stored_list_grid( trans, status=status, message=message, **kwargs )
+
+    @web.expose
+    def imp( self, trans, dataset_id=None, **kwd ):
+        """ Import another user's dataset via a shared URL; dataset is added to user's current history. """
+        # Set referer message.
+        referer = trans.request.referer
+        if referer:
+            referer_message = "<a href='%s'>return to the previous page</a>" % escape(referer)
+        else:
+            referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+        # Error checking.
+        if not dataset_id:
+            return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True )
+        # Do import.
+        cur_history = trans.get_history( create=True )
+        status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ], imported=True )
+        message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
+        return trans.show_message( message, type=status, use_panels=True )
+
+    @web.expose
+    @web.json
+    @web.require_login( "use Galaxy datasets" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns dataset's name and link. """
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        return_dict = { "name" : dataset.name, "link" : url_for( controller='dataset', action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) }
+        return return_dict
+
+    @web.expose
+    def get_embed_html_async( self, trans, id ):
+        """ Returns HTML for embedding a dataset in a page. """
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        if dataset:
+            return "Embedded Dataset '%s'" % dataset.name
+
+    @web.expose
+    @web.require_login( "use Galaxy datasets" )
+    def set_accessible_async( self, trans, id=None, accessible=False ):
+        """ Does nothing because datasets do not have an importable/accessible attribute. This method could potentially set another attribute. """
+        return
+
+    @web.expose
+    @web.require_login( "rate items" )
+    @web.json
+    def rate_async( self, trans, id, rating ):
+        """ Rate a dataset asynchronously and return updated community data. """
+
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        if not dataset:
+            return trans.show_error_message( "The specified dataset does not exist." )
+
+        # Rate dataset.
+        self.rate_item( trans.sa_session, trans.get_user(), dataset, rating )
+
+        return self.get_ave_item_rating_data( trans.sa_session, dataset )
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug, filename=None, preview=True ):
+        """ Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """
+        id = slug
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        if dataset:
+            # Filename used for composite types.
+            if filename:
+                return self.display( trans, dataset_id=slug, filename=filename)
+
+            truncated, dataset_data = self.hda_manager.text_data( dataset, preview )
+            dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
+
+            # If dataset is chunkable, get first chunk.
+            first_chunk = None
+            if dataset.datatype.CHUNKABLE:
+                first_chunk = dataset.datatype.get_chunk(trans, dataset, 0)
+
+            # If data is binary or an image, stream without template; otherwise, use display template.
+            # TODO: figure out a way to display images in display template.
+            if isinstance(dataset.datatype, datatypes.binary.Binary) or isinstance(dataset.datatype, datatypes.images.Image) or isinstance(dataset.datatype, datatypes.text.Html):
+                trans.response.set_content_type( dataset.get_mime() )
+                return open( dataset.file_name )
+            else:
+                # Get rating data.
+                user_item_rating = 0
+                if trans.get_user():
+                    user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), dataset )
+                    if user_item_rating:
+                        user_item_rating = user_item_rating.rating
+                    else:
+                        user_item_rating = 0
+                ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, dataset )
+
+                return trans.fill_template_mako( "/dataset/display.mako", item=dataset, item_data=dataset_data,
+                                                 truncated=truncated, user_item_rating=user_item_rating,
+                                                 ave_item_rating=ave_item_rating, num_ratings=num_ratings,
+                                                 first_chunk=first_chunk )
+        else:
+            raise web.httpexceptions.HTTPNotFound()
+
+    @web.expose
+    def get_item_content_async( self, trans, id ):
+        """ Returns item content in HTML format. """
+
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        if dataset is None:
+            raise web.httpexceptions.HTTPNotFound()
+        truncated, dataset_data = self.hda_manager.text_data( dataset, preview=True )
+        # Get annotation.
+        dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
+        return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
+
+    @web.expose
+    def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
+        # TODO:?? why is this an access check only?
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        if not dataset:
+            web.httpexceptions.HTTPNotFound()
+        if dataset and new_annotation:
+            # Sanitize annotation before adding it.
+            new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
+            self.add_item_annotation( trans.sa_session, trans.get_user(), dataset, new_annotation )
+            trans.sa_session.flush()
+            return new_annotation
+
+    @web.expose
+    def get_annotation_async( self, trans, id ):
+        decoded_id = self.decode_id( id )
+        dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+        dataset = self.hda_manager.error_if_uploading( dataset )
+        if not dataset:
+            web.httpexceptions.HTTPNotFound()
+        annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
+        if annotation and isinstance( annotation, text_type ):
+            annotation = annotation.encode( 'ascii', 'replace' )  # paste needs ascii here
+        return annotation
+
+    @web.expose
+    def display_at( self, trans, dataset_id, filename=None, **kwd ):
+        """Sets up a dataset permissions so it is viewable at an external site"""
+        if not trans.app.config.enable_old_display_applications:
+            return trans.show_error_message( "This method of accessing external display applications has been disabled by a Galaxy administrator." )
+        site = filename
+        data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id )
+        if not data:
+            raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
+        if 'display_url' not in kwd or 'redirect_url' not in kwd:
+            return trans.show_error_message( 'Invalid parameters specified for "display at" link, please contact a Galaxy administrator' )
+        try:
+            redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
+        except:
+            redirect_url = kwd['redirect_url']  # not all will need custom text
+        if trans.app.security_agent.dataset_is_public( data.dataset ):
+            return trans.response.send_redirect( redirect_url )  # anon access already permitted by rbac
+        if self._can_access_dataset( trans, data ):
+            trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site )
+            return trans.response.send_redirect( redirect_url )
+        else:
+            return trans.show_error_message( "You are not allowed to view this dataset at external sites.  Please contact your Galaxy administrator to acquire management permissions for this dataset." )
+
+    @web.expose
+    def display_application( self, trans, dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None, **kwds ):
+        """Access to external display applications"""
+        # Build list of parameters to pass in to display application logic (app_kwds)
+        app_kwds = {}
+        for name, value in dict(kwds).iteritems():  # clone kwds because we remove stuff as we go.
+            if name.startswith( "app_" ):
+                app_kwds[ name[ len( "app_" ): ] ] = value
+                del kwds[ name ]
+        if kwds:
+            log.debug( "Unexpected Keywords passed to display_application: %s" % kwds )  # route memory?
+        # decode ids
+        data, user = decode_dataset_user( trans, dataset_id, user_id )
+        if not data:
+            raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
+        if user is None:
+            user = trans.user
+        if user:
+            user_roles = user.all_roles()
+        else:
+            user_roles = []
+        # Decode application name and link name
+        app_name = urllib.unquote_plus( app_name )
+        link_name = urllib.unquote_plus( link_name )
+        if None in [ app_name, link_name ]:
+            return trans.show_error_message( "A display application name and link name must be provided." )
+        if self._can_access_dataset( trans, data, additional_roles=user_roles ):
+            msg = []
+            preparable_steps = []
+            refresh = False
+            display_app = trans.app.datatypes_registry.display_applications.get( app_name )
+            if not display_app:
+                log.debug( "Unknown display application has been requested: %s", app_name )
+                return paste.httpexceptions.HTTPNotFound( "The requested display application (%s) is not available." % ( app_name ) )
+            dataset_hash, user_hash = encode_dataset_user( trans, data, user )
+            try:
+                display_link = display_app.get_link( link_name, data, dataset_hash, user_hash, trans, app_kwds )
+            except Exception as e:
+                log.debug( "Error generating display_link: %s", e )
+                # User can sometimes recover from, e.g. conversion errors by fixing input metadata, so use conflict
+                return paste.httpexceptions.HTTPConflict( "Error generating display_link: %s" % e )
+            if not display_link:
+                log.debug( "Unknown display link has been requested: %s", link_name )
+                return paste.httpexceptions.HTTPNotFound( "Unknown display link has been requested: %s" % link_name )
+            if data.state == data.states.ERROR:
+                msg.append( ( 'This dataset is in an error state, you cannot view it at an external display application.', 'error' ) )
+            elif data.deleted:
+                msg.append( ( 'This dataset has been deleted, you cannot view it at an external display application.', 'error' ) )
+            elif data.state != data.states.OK:
+                msg.append( ( 'You must wait for this dataset to be created before you can view it at an external display application.', 'info' ) )
+                refresh = True
+            else:
+                # We have permissions, dataset is not deleted and is in OK state, allow access
+                if display_link.display_ready():
+                    if app_action in [ 'data', 'param' ]:
+                        assert action_param, "An action param must be provided for a data or param action"
+                        # data is used for things with filenames that could be passed off to a proxy
+                        # in case some display app wants all files to be in the same 'directory',
+                        # data can be forced to param, but not the other way (no filename for other direction)
+                        # get param name from url param name
+                        try:
+                            action_param = display_link.get_param_name_by_url( action_param )
+                        except ValueError as e:
+                            log.debug( e )
+                            return paste.httpexceptions.HTTPNotFound( str( e ) )
+                        value = display_link.get_param_value( action_param )
+                        assert value, "An invalid parameter name was provided: %s" % action_param
+                        assert value.parameter.viewable, "This parameter is not viewable."
+                        if value.parameter.type == 'data':
+                            try:
+                                if action_param_extra:
+                                    assert value.parameter.allow_extra_files_access, "Extra file content requested (%s), but allow_extra_files_access is False." % ( action_param_extra )
+                                    file_name = os.path.join( value.extra_files_path, action_param_extra )
+                                else:
+                                    file_name = value.file_name
+                                content_length = os.path.getsize( file_name )
+                                rval = open( file_name )
+                            except OSError as e:
+                                log.debug( "Unable to access requested file in display application: %s", e )
+                                return paste.httpexceptions.HTTPNotFound( "This file is no longer available." )
+                        else:
+                            rval = str( value )
+                            content_length = len( rval )
+                        trans.response.set_content_type( value.mime_type( action_param_extra=action_param_extra ) )
+                        trans.response.headers[ 'Content-Length' ] = content_length
+                        return rval
+                    elif app_action is None:
+                        # redirect user to url generated by display link
+                        # Fix for Safari caching display links, which can change if the underlying dataset has an attribute change, e.g. name, metadata, etc
+                        trans.response.headers[ 'Cache-Control' ] = [ 'no-cache', 'max-age=0', 'no-store', 'must-revalidate' ]
+                        return trans.response.send_redirect( display_link.display_url() )
+                    else:
+                        msg.append( ( 'Invalid action provided: %s' % app_action, 'error' ) )
+                else:
+                    if app_action is None:
+                        if trans.history != data.history:
+                            msg.append( ( 'You must import this dataset into your current history before you can view it at the desired display application.', 'error' ) )
+                        else:
+                            refresh = True
+                            msg.append( ( 'Launching this display application required additional datasets to be generated, you can view the status of these jobs below. ', 'info' ) )
+                            if not display_link.preparing_display():
+                                display_link.prepare_display()
+                            preparable_steps = display_link.get_prepare_steps()
+                    else:
+                        raise Exception( 'Attempted a view action (%s) on a non-ready display application' % app_action )
+            return trans.fill_template_mako( "dataset/display_application/display.mako",
+                                             msg=msg,
+                                             display_app=display_app,
+                                             display_link=display_link,
+                                             refresh=refresh,
+                                             preparable_steps=preparable_steps )
+        return trans.show_error_message( 'You do not have permission to view this dataset at an external display application.' )
+
+    def _delete( self, trans, dataset_id ):
+        message = None
+        status = 'done'
+        id = None
+        try:
+            id = self.decode_id( dataset_id )
+            hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+            assert hda, 'Invalid HDA: %s' % id
+            # Walk up parent datasets to find the containing history
+            topmost_parent = hda
+            while topmost_parent.parent:
+                topmost_parent = topmost_parent.parent
+            assert topmost_parent in trans.history.datasets, "Data does not belong to current history"
+            # Mark deleted and cleanup
+            hda.mark_deleted()
+            hda.clear_associated_files()
+            trans.log_event( "Dataset id %s marked as deleted" % str(id) )
+            self.hda_manager.stop_creating_job( hda )
+            trans.sa_session.flush()
+        except Exception as e:
+            msg = 'HDA deletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+            log.exception( msg + ': ' + str( e ) )
+            trans.log_event( msg )
+            message = 'Dataset deletion failed'
+            status = 'error'
+        return ( message, status )
+
+    def _undelete( self, trans, dataset_id ):
+        message = None
+        status = 'done'
+        id = None
+        try:
+            id = self.decode_id( dataset_id )
+            history = trans.get_history()
+            hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+            assert hda and hda.undeletable, 'Invalid HDA: %s' % id
+            # Walk up parent datasets to find the containing history
+            topmost_parent = hda
+            while topmost_parent.parent:
+                topmost_parent = topmost_parent.parent
+            assert topmost_parent in history.datasets, "Data does not belong to current history"
+            # Mark undeleted
+            hda.mark_undeleted()
+            trans.sa_session.flush()
+            trans.log_event( "Dataset id %s has been undeleted" % str(id) )
+        except Exception:
+            msg = 'HDA undeletion failed (encoded: %s, decoded: %s)' % ( dataset_id, id )
+            log.exception( msg )
+            trans.log_event( msg )
+            message = 'Dataset undeletion failed'
+            status = 'error'
+        return ( message, status )
+
+    def _unhide( self, trans, dataset_id ):
+        try:
+            id = self.decode_id( dataset_id )
+        except:
+            return False
+        history = trans.get_history()
+        hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+        if hda:
+            # Walk up parent datasets to find the containing history
+            topmost_parent = hda
+            while topmost_parent.parent:
+                topmost_parent = topmost_parent.parent
+            assert topmost_parent in history.datasets, "Data does not belong to current history"
+            # Mark undeleted
+            hda.mark_unhidden()
+            trans.sa_session.flush()
+            trans.log_event( "Dataset id %s has been unhidden" % str(id) )
+            return True
+        return False
+
+    def _purge( self, trans, dataset_id ):
+        message = None
+        status = 'done'
+        try:
+            id = self.decode_id( dataset_id )
+            user = trans.get_user()
+            hda = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+            # Invalid HDA
+            assert hda, 'Invalid history dataset ID'
+
+            # Walk up parent datasets to find the containing history
+            topmost_parent = hda
+            while topmost_parent.parent:
+                topmost_parent = topmost_parent.parent
+            # If the user is anonymous, make sure the HDA is owned by the current session.
+            if not user:
+                current_history_id = trans.galaxy_session.current_history_id
+                assert topmost_parent.history.id == current_history_id, 'Data does not belong to current user'
+            # If the user is known, make sure the HDA is owned by the current user.
+            else:
+                assert topmost_parent.history.user == user, 'Data does not belong to current user'
+
+            # Ensure HDA is deleted
+            hda.deleted = True
+            # HDA is purgeable
+            # Decrease disk usage first
+            if user:
+                user.adjust_total_disk_usage(-hda.quota_amount(user))
+            # Mark purged
+            hda.purged = True
+            trans.sa_session.add( hda )
+            trans.log_event( "HDA id %s has been purged" % hda.id )
+            trans.sa_session.flush()
+            # Don't delete anything if there are active HDAs or any LDDAs, even if
+            # the LDDAs are deleted.  Let the cleanup scripts get it in the latter
+            # case.
+            if hda.dataset.user_can_purge:
+                try:
+                    hda.dataset.full_delete()
+                    trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
+                    trans.sa_session.add( hda.dataset )
+                except:
+                    log.exception( 'Unable to purge dataset (%s) on purge of HDA (%s):' % ( hda.dataset.id, hda.id ) )
+            trans.sa_session.flush()
+        except Exception as exc:
+            msg = 'HDA purge failed (encoded: %s, decoded: %s): %s' % ( dataset_id, id, exc )
+            log.exception( msg )
+            trans.log_event( msg )
+            message = 'Dataset removal from disk failed'
+            status = 'error'
+        return ( message, status )
+
+    @web.expose
+    def delete( self, trans, dataset_id, filename, show_deleted_on_refresh=False ):
+        message, status = self._delete( trans, dataset_id )
+        return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
+
+    @web.expose
+    def delete_async( self, trans, dataset_id, filename ):
+        message, status = self._delete( trans, dataset_id )
+        if status == 'done':
+            return "OK"
+        else:
+            raise Exception( message )
+
+    @web.expose
+    def undelete( self, trans, dataset_id, filename ):
+        message, status = self._undelete( trans, dataset_id )
+        return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=True, message=message, status=status ) )
+
+    @web.expose
+    def undelete_async( self, trans, dataset_id, filename ):
+        message, status = self._undelete( trans, dataset_id )
+        if status == 'done':
+            return "OK"
+        else:
+            raise Exception( message )
+
+    @web.expose
+    def unhide( self, trans, dataset_id, filename ):
+        if self._unhide( trans, dataset_id ):
+            return trans.response.send_redirect( web.url_for( controller='root', action='history', show_hidden=True ) )
+        raise Exception( "Error unhiding" )
+
+    @web.expose
+    def purge( self, trans, dataset_id, filename, show_deleted_on_refresh=False ):
+        if trans.app.config.allow_user_dataset_purge:
+            message, status = self._purge( trans, dataset_id )
+        else:
+            message = "Removal of datasets by users is not allowed in this Galaxy instance.  Please contact your Galaxy administrator."
+            status = 'error'
+        return trans.response.send_redirect( web.url_for( controller='root', action='history', show_deleted=show_deleted_on_refresh, message=message, status=status ) )
+
+    @web.expose
+    def purge_async( self, trans, dataset_id, filename ):
+        if trans.app.config.allow_user_dataset_purge:
+            message, status = self._purge( trans, dataset_id )
+        else:
+            message = "Removal of datasets by users is not allowed in this Galaxy instance.  Please contact your Galaxy administrator."
+            status = 'error'
+        if status == 'done':
+            return "OK"
+        else:
+            raise Exception( message )
+
+    @web.expose
+    def show_params( self, trans, dataset_id=None, from_noframe=None, **kwd ):
+        """
+        Show the parameters used for the job associated with an HDA
+        """
+        try:
+            hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( self.decode_id( dataset_id ) )
+        except ValueError:
+            hda = None
+        if not hda:
+            raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % escape( str( dataset_id ) ) )
+        if not self._can_access_dataset( trans, hda ):
+            return trans.show_error_message( "You are not allowed to access this dataset" )
+
+        # Get the associated job, if any. If this hda was copied from another,
+        # we need to find the job that created the origial dataset association.
+        params_objects = None
+        job = None
+        tool = None
+        upgrade_messages = {}
+        has_parameter_errors = False
+        inherit_chain = hda.source_dataset_chain
+        if inherit_chain:
+            job_dataset_association = inherit_chain[-1][0]
+        else:
+            job_dataset_association = hda
+        if job_dataset_association.creating_job_associations:
+            job = job_dataset_association.creating_job_associations[0].job
+            if job:
+                # Get the tool object
+                try:
+                    # Load the tool
+                    toolbox = self.get_toolbox()
+                    tool = toolbox.get_tool( job.tool_id )
+                    assert tool is not None, 'Requested tool has not been loaded.'
+                    # Load parameter objects, if a parameter type has changed, it's possible for the value to no longer be valid
+                    try:
+                        params_objects = job.get_param_values( trans.app, ignore_errors=False )
+                    except:
+                        params_objects = job.get_param_values( trans.app, ignore_errors=True )
+                        # use different param_objects in the following line, since we want to display original values as much as possible
+                        upgrade_messages = tool.check_and_update_param_values( job.get_param_values( trans.app, ignore_errors=True ),
+                                                                               trans,
+                                                                               update_values=False )
+                        has_parameter_errors = True
+                except:
+                    pass
+        if job is None:
+            return trans.show_error_message( "Job information is not available for this dataset." )
+        # TODO: we should provide the basic values along with the objects, in order to better handle reporting of old values during upgrade
+        return trans.fill_template( "show_params.mako",
+                                    inherit_chain=inherit_chain,
+                                    history=trans.get_history(),
+                                    hda=hda,
+                                    job=job,
+                                    tool=tool,
+                                    params_objects=params_objects,
+                                    upgrade_messages=upgrade_messages,
+                                    has_parameter_errors=has_parameter_errors )
+
+    @web.expose
+    def copy_datasets( self, trans, source_history=None, source_content_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ):
+        user = trans.get_user()
+        if source_history is not None:
+            decoded_source_history_id = self.decode_id( source_history )
+            history = self.history_manager.get_owned( decoded_source_history_id, trans.user, current_history=trans.history )
+            current_history = trans.get_history()
+        else:
+            history = current_history = trans.get_history()
+        refresh_frames = []
+        if source_content_ids:
+            if not isinstance( source_content_ids, list ):
+                source_content_ids = source_content_ids.split(",")
+            encoded_dataset_collection_ids = [ s[ len("dataset_collection|"): ] for s in source_content_ids if s.startswith("dataset_collection|") ]
+            encoded_dataset_ids = [ s[ len("dataset|"): ] for s in source_content_ids if s.startswith("dataset|") ]
+            decoded_dataset_collection_ids = set(map( self.decode_id, encoded_dataset_collection_ids ))
+            decoded_dataset_ids = set(map( self.decode_id, encoded_dataset_ids ))
+        else:
+            decoded_dataset_collection_ids = []
+            decoded_dataset_ids = []
+        if new_history_name:
+            target_history_ids = []
+        else:
+            if target_history_id:
+                target_history_ids = [ self.decode_id(target_history_id) ]
+            elif target_history_ids:
+                if not isinstance( target_history_ids, list ):
+                    target_history_ids = target_history_ids.split(",")
+                target_history_ids = list(set([ self.decode_id(h) for h in target_history_ids if h ]))
+            else:
+                target_history_ids = []
+        done_msg = error_msg = ""
+        new_history = None
+        if do_copy:
+            invalid_contents = 0
+            if not ( decoded_dataset_ids or decoded_dataset_collection_ids ) or not ( target_history_ids or new_history_name ):
+                error_msg = "You must provide both source datasets and target histories. "
+            else:
+                if new_history_name:
+                    new_history = trans.app.model.History()
+                    new_history.name = new_history_name
+                    new_history.user = user
+                    trans.sa_session.add( new_history )
+                    trans.sa_session.flush()
+                    target_history_ids.append( new_history.id )
+                if user:
+                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if hist is not None and hist.user == user ]
+                else:
+                    target_histories = [ history ]
+                if len( target_histories ) != len( target_history_ids ):
+                    error_msg = error_msg + "You do not have permission to add datasets to %i requested histories.  " % ( len( target_history_ids ) - len( target_histories ) )
+                source_contents = map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, decoded_dataset_ids )
+                source_contents.extend( map( trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get, decoded_dataset_collection_ids ) )
+                source_contents.sort(key=lambda content: content.hid)
+                for content in source_contents:
+                    if content is None:
+                        error_msg = error_msg + "You tried to copy a dataset that does not exist. "
+                        invalid_contents += 1
+                    elif content.history != history:
+                        error_msg = error_msg + "You tried to copy a dataset which is not in your current history. "
+                        invalid_contents += 1
+                    else:
+                        for hist in target_histories:
+                            if content.history_content_type == "dataset":
+                                hist.add_dataset( content.copy( copy_children=True ) )
+                            else:
+                                copy_collected_datasets = True
+                                copy_kwds = {}
+                                if copy_collected_datasets:
+                                    copy_kwds["element_destination"] = hist
+                                hist.add_dataset_collection( content.copy( **copy_kwds ) )
+                if current_history in target_histories:
+                    refresh_frames = ['history']
+                trans.sa_session.flush()
+                hist_names_str = ", ".join( ['<a href="%s" target="_top">%s</a>' %
+                                            ( url_for( controller="history", action="switch_to_history",
+                                                       hist_id=trans.security.encode_id( hist.id ) ), escape(hist.name) )
+                                            for hist in target_histories ] )
+                num_source = len( source_content_ids ) - invalid_contents
+                num_target = len(target_histories)
+                done_msg = "%i %s copied to %i %s: %s." % (num_source, inflector.cond_plural(num_source, "dataset"), num_target, inflector.cond_plural(num_target, "history"), hist_names_str )
+                trans.sa_session.refresh( history )
+        source_contents = history.active_contents
+        target_histories = [history]
+        if user:
+            target_histories = user.active_histories
+        return trans.fill_template( "/dataset/copy_view.mako",
+                                    source_history=history,
+                                    current_history=current_history,
+                                    source_content_ids=source_content_ids,
+                                    target_history_id=target_history_id,
+                                    target_history_ids=target_history_ids,
+                                    source_contents=source_contents,
+                                    target_histories=target_histories,
+                                    new_history_name=new_history_name,
+                                    done_msg=done_msg,
+                                    error_msg=error_msg,
+                                    refresh_frames=refresh_frames )
+
+    def _copy_datasets( self, trans, dataset_ids, target_histories, imported=False ):
+        """ Helper method for copying datasets. """
+        user = trans.get_user()
+        done_msg = error_msg = ""
+
+        invalid_datasets = 0
+        if not dataset_ids or not target_histories:
+            error_msg = "You must provide both source datasets and target histories."
+        else:
+            # User must own target histories to copy datasets to them.
+            for history in target_histories:
+                if user != history.user:
+                    error_msg = error_msg + "You do not have permission to add datasets to %i requested histories.  " % ( len( target_histories ) )
+            for dataset_id in dataset_ids:
+                decoded_id = self.decode_id( dataset_id )
+                data = self.hda_manager.get_accessible( decoded_id, trans.user )
+                data = self.hda_manager.error_if_uploading( data )
+
+                if data is None:
+                    error_msg = error_msg + "You tried to copy a dataset that does not exist or that you do not have access to.  "
+                    invalid_datasets += 1
+                else:
+                    for hist in target_histories:
+                        dataset_copy = data.copy( copy_children=True )
+                        if imported:
+                            dataset_copy.name = "imported: " + dataset_copy.name
+                        hist.add_dataset( dataset_copy )
+            trans.sa_session.flush()
+            num_datasets_copied = len( dataset_ids ) - invalid_datasets
+            done_msg = "%i dataset%s copied to %i histor%s." % \
+                ( num_datasets_copied, iff( num_datasets_copied == 1, "", "s"), len( target_histories ), iff( len( target_histories ) == 1, "y", "ies") )
+            trans.sa_session.refresh( history )
+
+        if error_msg != "":
+            status = ERROR
+            message = error_msg
+        else:
+            status = SUCCESS
+            message = done_msg
+        return status, message
diff --git a/lib/galaxy/webapps/galaxy/controllers/error.py b/lib/galaxy/webapps/galaxy/controllers/error.py
new file mode 100644
index 0000000..e0e8099
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/error.py
@@ -0,0 +1,7 @@
+from galaxy.web.base.controller import BaseUIController, web
+
+
+class Error( BaseUIController ):
+    @web.expose
+    def index( self, trans ):
+        raise Exception( "Fake error" )
diff --git a/lib/galaxy/webapps/galaxy/controllers/external_service.py b/lib/galaxy/webapps/galaxy/controllers/external_service.py
new file mode 100644
index 0000000..dbf1b7f
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/external_service.py
@@ -0,0 +1,369 @@
+from __future__ import absolute_import
+
+import logging
+
+from galaxy import model, util
+from markupsafe import escape
+from galaxy.web.base.controller import BaseUIController, web, UsesFormDefinitionsMixin
+from galaxy.web.form_builder import TextField, SelectField
+from galaxy.web.framework.helpers import time_ago, iff, grids
+from .requests_common import invalid_id_redirect
+
+log = logging.getLogger( __name__ )
+
+
+class ExternalServiceGrid( grids.Grid ):
+    # Custom column types
+    class NameColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, external_service):
+            return escape(external_service.name)
+
+    class ExternalServiceTypeColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, external_service):
+            try:
+                return trans.app.external_service_types.all_external_service_types[ external_service.external_service_type_id ].name
+            except KeyError:
+                return 'Error in loading external_service type: %s' % external_service.external_service_type_id
+
+    # Grid definition
+    title = "External Services"
+    template = "admin/external_service/grid.mako"
+    model_class = model.ExternalService
+    default_sort_key = "-create_time"
+    num_rows_per_page = 50
+    preserve_state = True
+    use_paging = True
+    default_filter = dict( deleted="False" )
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: iff( item.deleted, None, dict( operation="view", id=item.id ) ) ),
+                    attach_popup=True,
+                    filterable="advanced" ),
+        grids.TextColumn( "Description",
+                          key='description',
+                          filterable="advanced" ),
+        ExternalServiceTypeColumn( "External Service Type" ),
+        grids.GridColumn( "Last Updated",
+                          key="update_time",
+                          format=time_ago ),
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted  )  ),
+        grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted  )  ),
+        grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+    ]
+    global_actions = [
+        grids.GridAction( "Reload external service types", dict( controller='external_service', action='reload_external_service_types' ) ),
+        grids.GridAction( "Create new external service", dict( controller='external_service', action='create_external_service' ) )
+    ]
+
+
+class ExternalService( BaseUIController, UsesFormDefinitionsMixin ):
+    external_service_grid = ExternalServiceGrid()
+
+    @web.expose
+    @web.require_admin
+    def browse_external_services( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "view":
+                return self.view_external_service( trans, **kwd )
+            elif operation == "edit":
+                return self.edit_external_service( trans, **kwd )
+            elif operation == "delete":
+                return self.delete_external_service( trans, **kwd )
+            elif operation == "undelete":
+                return self.undelete_external_service( trans, **kwd )
+        # Render the grid view
+        return self.external_service_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def create_external_service( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        external_service_type_id = params.get( 'external_service_type_id', 'none' )
+        widgets = self.__build_external_service_widgets( trans, external_service=None, **kwd )
+        external_service_type = None
+        error = False
+        if not trans.app.external_service_types.visible_external_service_types:
+            error = True
+            message = 'There are no visible external_service types in the external_service types config file'
+        elif params.get( 'create_external_service_button', False ):
+            if external_service_type_id == 'none':
+                error = True
+                message = 'Provide an external_service_type_id to create a new external service.'
+            else:
+                self.__save_external_service( trans, **kwd )
+                message = 'The external_service has been created.'
+                return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                                  action='browse_external_services',
+                                                                  message=message,
+                                                                  status=status ) )
+        elif external_service_type_id != 'none':
+            # Form submission via refresh_on_change
+            trans.app.external_service_types.reload( external_service_type_id )
+            external_service_type = self.get_external_service_type( trans, external_service_type_id )
+            widgets.extend( external_service_type.form_definition.get_widgets( trans.user, **kwd ) )
+        if error:
+            return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                              action='browse_external_services',
+                                                              message=message,
+                                                              status='error' ) )
+        return trans.fill_template( '/admin/external_service/create_external_service.mako',
+                                    widgets=widgets,
+                                    message=message,
+                                    status=status,
+                                    external_service_type=external_service_type )
+
+    @web.expose
+    @web.require_admin
+    def view_external_service( self, trans, **kwd ):
+        external_service_id = kwd.get( 'id', None )
+        try:
+            external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+        except:
+            return invalid_id_redirect( trans, 'external_service', external_service_id, 'external_service', action='browse_external_services' )
+        external_service_type = self.get_external_service_type( trans, external_service.external_service_type_id )
+        return trans.fill_template( '/admin/external_service/view_external_service.mako',
+                                    external_service=external_service,
+                                    external_service_type=external_service_type )
+
+    @web.expose
+    @web.require_admin
+    def edit_external_service( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        external_service_id = params.get( 'id', None )
+        try:
+            external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+        except:
+            return invalid_id_redirect( trans, 'external_service', external_service_id, 'external_service', action='browse_external_services' )
+        if params.get( 'edit_external_service_button', False ):
+            external_service = self.__save_external_service( trans, **kwd )
+            trans.sa_session.refresh( external_service )
+            message = 'Changes made to external service (%s) have been saved' % external_service.name
+        widgets = self.__build_external_service_widgets( trans, external_service, **kwd )
+        widgets.extend( external_service.form_definition.get_widgets( trans.user, external_service.form_values.content, **kwd ) )
+        external_service_type = self.get_external_service_type( trans, external_service.external_service_type_id )
+        return trans.fill_template( '/admin/external_service/edit_external_service.mako',
+                                    external_service=external_service,
+                                    widgets=widgets,
+                                    message=message,
+                                    status=status,
+                                    external_service_type=external_service_type )
+
+    def __save_external_service( self, trans, **kwd ):
+        # Here we save a newly created external_service or save changed
+        # attributes of an existing external_service.
+        params = util.Params( kwd )
+        external_service_id = params.get( 'id', None )
+        name = util.restore_text( params.get( 'external_service_name', ''  ) )
+        description = util.restore_text( params.get( 'external_service_description', '' ) )
+        version = util.restore_text( params.get( 'external_service_version', '' ) )
+        external_service_type_id = params.get( 'external_service_type_id', '' )
+        if external_service_id:
+            # We're saving changed attributes of an existing external_service.
+            external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+            external_service.name = name
+            external_service.description = description
+            external_service.version = version
+            external_service.form_values.content = self.get_form_values( trans, trans.user, external_service.form_definition, **kwd )
+            trans.sa_session.add( external_service )
+            trans.sa_session.add( external_service.form_values )
+            trans.sa_session.flush()
+        else:
+            # We're saving a newly created external_service
+            external_service_type = self.get_external_service_type( trans, external_service_type_id )
+            external_service = trans.model.ExternalService( name, description, external_service_type_id, version )
+            external_service.form_definition = external_service_type.form_definition
+            # Get the form values from kwd, some of which may be different than the defaults in the external service
+            # type config because the user could have overwritten them.
+            values = self.get_form_values( trans, trans.user, external_service.form_definition, **kwd )
+            external_service.form_values = trans.model.FormValues( external_service.form_definition, values )
+            trans.sa_session.add( external_service )
+            trans.sa_session.add( external_service.form_definition )
+            trans.sa_session.add( external_service.form_values )
+            trans.sa_session.flush()
+        return external_service
+
+    @web.expose
+    @web.require_admin
+    def edit_external_service_form_definition( self, trans, **kwd ):
+        util.Params( kwd )
+        external_service_id = kwd.get( 'id', None )
+        try:
+            external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+        except:
+            return invalid_id_redirect( trans, 'external_service', external_service_id, 'external_service', action='browse_external_services' )
+        vars = dict( id=trans.security.encode_id( external_service.form_definition.form_definition_current_id ),
+                     response_redirect=web.url_for( controller='external_service',
+                                                    action='update_external_service_form_definition',
+                                                    **kwd ) )
+        return trans.response.send_redirect( web.url_for( controller='forms', action='edit_form_definition', **vars ) )
+
+    @web.expose
+    @web.require_admin
+    def update_external_service_form_definition( self, trans, **kwd ):
+        util.Params( kwd )
+        external_service_id = kwd.get( 'id', None )
+        try:
+            external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+        except:
+            return invalid_id_redirect( trans, 'external_service', external_service_id, 'external_service', action='browse_external_services' )
+        external_service.form_definition = external_service.form_definition.current.latest_form
+        trans.sa_session.add( external_service )
+        trans.sa_session.flush()
+        message = "The form definition for the '%s' external service has been updated with your changes." % external_service.name
+        return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                          action='edit_external_service',
+                                                          message=message,
+                                                          status='done',
+                                                          **kwd ) )
+
+    @web.expose
+    @web.require_admin
+    def delete_external_service( self, trans, **kwd ):
+        external_service_id = kwd.get( 'id', '' )
+        external_service_id_list = util.listify( external_service_id )
+        for external_service_id in external_service_id_list:
+            try:
+                external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+            except:
+                return invalid_id_redirect( trans, 'external_service', external_service_id, 'external_service', action='browse_external_services' )
+            external_service.deleted = True
+            trans.sa_session.add( external_service )
+            trans.sa_session.flush()
+        message = '%i external services has been deleted' % len( external_service_id_list )
+        return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                          action='browse_external_services',
+                                                          message=message,
+                                                          status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_external_service( self, trans, **kwd ):
+        external_service_id = kwd.get( 'id', '' )
+        external_service_id_list = util.listify( external_service_id )
+        for external_service_id in external_service_id_list:
+            try:
+                external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+            except:
+                return invalid_id_redirect( trans, 'external_service', external_service_id, 'external_service', action='browse_external_services' )
+            external_service.deleted = False
+            trans.sa_session.add( external_service )
+            trans.sa_session.flush()
+        status = 'done'
+        message = '%i external services have been undeleted' % len( external_service_id_list )
+        return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                          action='browse_external_services',
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def reload_external_service_types( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        external_service_type_id = params.get( 'external_service_type_id', 'none' )
+        if params.get( 'reload_external_service_type_button', False ):
+            new_external_service_type = trans.app.external_service_types.reload( external_service_type_id )
+            status = 'done'
+            message = 'Reloaded external service type: %s' % new_external_service_type.name
+        external_service_type_select_field = self.__build_external_service_type_select_field( trans,
+                                                                                              external_service_type_id,
+                                                                                              refresh_on_change=False,
+                                                                                              visible_external_service_types_only=False )
+        if not trans.app.external_service_types.visible_external_service_types:
+            message = 'There are no visible external service types in the external service types config file.'
+            status = 'error'
+            return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                              action='browse_external_services',
+                                                              message=message,
+                                                              status=status ) )
+        return trans.fill_template( '/admin/external_service/reload_external_service_types.mako',
+                                    external_service_type_select_field=external_service_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    def get_external_service_type(self, trans, external_service_type_id, action='browse_external_services'):
+        try:
+            return trans.app.external_service_types.all_external_service_types[ external_service_type_id ]
+        except KeyError:
+            message = 'Error in loading external service type: %s' % external_service_type_id
+            return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                              action=action,
+                                                              message=message,
+                                                              status='error' ) )
+
+    # ===== Methods for building SelectFields used on various admin_requests forms
+    def __build_external_service_widgets( self, trans, external_service=None, **kwd ):
+        params = util.Params( kwd )
+        if external_service:
+            name = external_service.name
+            description = external_service.description
+            version = external_service.version
+            seq_type = external_service.external_service_type_id
+        else:
+            name = util.restore_text( params.get( 'external_service_name', ''  ) )
+            description = util.restore_text( params.get( 'external_service_description', ''  ) )
+            version = util.restore_text( params.get( 'external_service_version', ''  ) )
+            selected_seq_type = params.get( 'external_service_type_id', ''  )
+            if selected_seq_type in trans.app.external_service_types.all_external_service_types:
+                seq_type = trans.app.external_service_types.all_external_service_types[ selected_seq_type ].id
+            else:
+                seq_type = 'none'
+        widgets = [ dict( label='Name',
+                          widget=TextField( 'external_service_name', 40, name ),
+                          helptext='' ),
+                    dict( label='Description',
+                          widget=TextField( 'external_service_description', 40, description ),
+                          helptext='' ),
+                    dict( label='Version',
+                          widget=TextField( 'external_service_version', 40, version ),
+                          helptext='' ) ]
+        # Do not show the external_service_type selectfield when editing a external_service
+        if not external_service:
+            widgets.append( dict( label='External service type',
+                                  widget=self.__build_external_service_type_select_field( trans, seq_type, visible_external_service_types_only=True ),
+                                  helptext='') )
+        return widgets
+
+    def __build_external_service_type_select_field( self, trans, selected_value, refresh_on_change=True, visible_external_service_types_only=False ):
+        external_service_types = trans.app.external_service_types.all_external_service_types
+        if visible_external_service_types_only:
+            objs_list = [ external_service_types[ seq_type_id ] for seq_type_id in trans.app.external_service_types.visible_external_service_types ]
+        else:
+            objs_list = external_service_types.values()
+        refresh_on_change_values = [ 'none' ]
+        refresh_on_change_values.extend( [ trans.security.encode_id( obj.id ) for obj in objs_list] )
+        select_external_service_type = SelectField( 'external_service_type_id',
+                                                    refresh_on_change=refresh_on_change,
+                                                    refresh_on_change_values=refresh_on_change_values )
+        if selected_value == 'none':
+            select_external_service_type.add_option( 'Select one', 'none', selected=True )
+        else:
+            select_external_service_type.add_option( 'Select one', 'none' )
+        for seq_type in objs_list:
+            if seq_type.version:
+                option_name = " ".join( [ seq_type.name, "version", seq_type.version ] )
+            else:
+                option_name = seq_type.name
+            if selected_value == seq_type.id:
+                select_external_service_type.add_option( option_name, seq_type.id, selected=True )
+            else:
+                select_external_service_type.add_option( option_name, seq_type.id )
+        return select_external_service_type
diff --git a/lib/galaxy/webapps/galaxy/controllers/external_services.py b/lib/galaxy/webapps/galaxy/controllers/external_services.py
new file mode 100644
index 0000000..638d52f
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/external_services.py
@@ -0,0 +1,27 @@
+import logging
+from galaxy import web
+from galaxy.model import ExternalService, Sample
+from galaxy.web.base.controller import BaseUIController
+
+log = logging.getLogger( __name__ )
+
+class_name_to_class = {}
+
+for model_class in [Sample]:
+    class_name_to_class[ model_class.__name__ ] = model_class
+
+
+class ExternalServiceController( BaseUIController ):
+    @web.expose
+    @web.require_admin
+    def access_action( self, trans, external_service_action, item, item_type, **kwd ):
+        if item_type in class_name_to_class:
+            item_type = class_name_to_class.get( item_type )
+            item = item_type.get( item )
+            external_service_action_parsed = external_service_action.split( '|' )
+            populated_external_service = ExternalService.get( external_service_action_parsed.pop( 0 ) ).populate_actions( trans, item )
+            populated_action = populated_external_service.perform_action_by_name( external_service_action_parsed )
+            results = populated_action.handle_results( trans )
+            return results
+        else:
+            raise Exception( 'unknown item class type' )
diff --git a/lib/galaxy/webapps/galaxy/controllers/forms.py b/lib/galaxy/webapps/galaxy/controllers/forms.py
new file mode 100644
index 0000000..27bdcac
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/forms.py
@@ -0,0 +1,676 @@
+import copy
+import logging
+import re
+
+from markupsafe import escape
+from galaxy import model, util
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy.web.form_builder import FileField, TextField, HiddenField, SelectField
+from galaxy.web.framework.helpers import iff, grids
+
+log = logging.getLogger( __name__ )
+
+VALID_FIELDNAME_RE = re.compile( "^[a-zA-Z0-9\_]+$" )
+
+
+class FormsGrid( grids.Grid ):
+    # Custom column types
+    class NameColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, form):
+            return escape(form.latest_form.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, form):
+            return escape(form.latest_form.desc)
+
+    class TypeColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, form):
+            return form.latest_form.type
+    # Grid definition
+    title = "Forms"
+    template = "admin/forms/grid.mako"
+    model_class = model.FormDefinitionCurrent
+    default_sort_key = "-create_time"
+    num_rows_per_page = 50
+    preserve_state = True
+    use_paging = True
+    default_filter = dict( deleted="False" )
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    model_class=model.FormDefinition,
+                    link=( lambda item: iff( item.deleted, None, dict( operation="view_latest_form_definition",
+                                                                       id=item.id ) ) ),
+                    attach_popup=True,
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                           key='desc',
+                           model_class=model.FormDefinition,
+                           filterable="advanced" ),
+        TypeColumn( "Type" ),
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted )  ),
+        grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted )  ),
+        grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+    ]
+    global_actions = [
+        grids.GridAction( "Create new form", dict( controller='forms', action='create_form_definition' ) )
+    ]
+
+    def build_initial_query( self, trans, **kwargs ):
+        return trans.sa_session.query( self.model_class ).join(model.FormDefinition, self.model_class.latest_form_id == model.FormDefinition.id)
+
+
+class Forms( BaseUIController ):
+    # Empty TextField
+    empty_field = { 'name': '',
+                    'label': '',
+                    'helptext': '',
+                    'visible': True,
+                    'required': False,
+                    'type': model.TextField.__name__,
+                    'selectlist': [],
+                    'layout': 'none',
+                    'default': '' }
+    forms_grid = FormsGrid()
+
+    @web.expose
+    @web.require_admin
+    def browse_form_definitions( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if not kwd.get( 'id', None ):
+                return trans.response.send_redirect( web.url_for( controller='forms',
+                                                                  action='browse_form_definitions',
+                                                                  status='error',
+                                                                  message="Invalid form ID") )
+            if operation == "view_latest_form_definition":
+                return self.view_latest_form_definition( trans, **kwd )
+            elif operation == "delete":
+                return self.delete_form_definition( trans, **kwd )
+            elif operation == "undelete":
+                return self.undelete_form_definition( trans, **kwd )
+            elif operation == "edit":
+                return self.edit_form_definition( trans, **kwd )
+        return self.forms_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def view_latest_form_definition( self, trans, **kwd ):
+        '''Displays the layout of the latest version of the form definition'''
+        form_definition_current_id = kwd.get( 'id', None )
+        try:
+            form_definition_current = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ) \
+                                                      .get( trans.security.decode_id( form_definition_current_id ) )
+        except:
+            return trans.response.send_redirect( web.url_for( controller='forms',
+                                                              action='browse_form_definitions',
+                                                              message='Invalid form',
+                                                              status='error' ) )
+        return trans.fill_template( '/admin/forms/view_form_definition.mako',
+                                    form_definition=form_definition_current.latest_form )
+
+    @web.expose
+    @web.require_admin
+    def create_form_definition( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        self.__imported_from_file = False
+        if params.get( 'create_form_button', False ):
+            form_definition, message = self.save_form_definition( trans, form_definition_current_id=None, **kwd )
+            if not form_definition:
+                return trans.response.send_redirect( web.url_for( controller='forms',
+                                                                  action='create_form_definition',
+                                                                  message=message,
+                                                                  status='error',
+                                                                  name=util.restore_text( params.get( 'name', '' ) ),
+                                                                  description=util.restore_text( params.get( 'description', '' ) ) ))
+            if self.__imported_from_file:
+                return trans.response.send_redirect( web.url_for( controller='forms',
+                                                                  action='edit_form_definition',
+                                                                  id=trans.security.encode_id( form_definition.current.id )) )
+            else:
+                return trans.response.send_redirect( web.url_for( controller='forms',
+                                                                  action='edit_form_definition',
+                                                                  id=trans.security.encode_id( form_definition.current.id ),
+                                                                  add_field_button='Add field',
+                                                                  name=form_definition.name,
+                                                                  description=form_definition.desc,
+                                                                  form_type_select_field=form_definition.type ) )
+        inputs = [ ( 'Name', TextField( 'name', 40, util.restore_text( params.get( 'name', '' ) ) ) ),
+                   ( 'Description', TextField( 'description', 40, util.restore_text( params.get( 'description', '' ) ) ) ),
+                   ( 'Type', self.__build_form_types_widget( trans, selected=params.get( 'form_type', 'none' ) ) ),
+                   ( 'Import from csv file (Optional)', FileField( 'file_data', 40, '' ) ) ]
+        return trans.fill_template( '/admin/forms/create_form.mako',
+                                    inputs=inputs,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def edit_form_definition( self, trans, response_redirect=None, **kwd ):
+        '''
+        This callback method is for handling form editing.  The value of response_redirect
+        should be an URL that is defined by the caller.  This allows for redirecting as desired
+        when the form changes have been saved.  For an example of how this works, see the
+        edit_template() method in the base controller.
+        '''
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        try:
+            form_definition_current = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( trans.security.decode_id(kwd['id']) )
+        except:
+            return trans.response.send_redirect( web.url_for( controller='forms',
+                                                              action='browse_form_definitions',
+                                                              message='Invalid form',
+                                                              status='error' ) )
+        form_definition = form_definition_current.latest_form
+        # TODO: eliminate the need for this refresh param.
+        if params.get( 'refresh', False ):
+            # Refresh
+            current_form = self.get_current_form( trans, **kwd )
+        else:
+            # Show the saved form for editing
+            current_form = self.get_saved_form( form_definition )
+        # Save changes
+        if params.get( 'save_changes_button', False ):
+            new_form_definition, message = self.save_form_definition( trans, form_definition_current_id=form_definition.form_definition_current.id, **kwd )
+            # if validation error encountered while saving the form, show the
+            # unsaved form, with the error message
+            if not new_form_definition:
+                status = 'error'
+            else:
+                # everything went fine. form saved successfully. Show the saved form or redirect
+                # to response_redirect if appropriate.
+                if response_redirect:
+                    return trans.response.send_redirect( response_redirect )
+                form_definition = new_form_definition
+                current_form = self.get_saved_form( form_definition )
+                message = "The form '%s' has been updated with the changes." % form_definition.name
+        # Add a layout grid
+        elif params.get( 'add_layout_grid_button', False ):
+            current_form[ 'layout' ].append( '' )
+        # Delete a layout grid
+        elif params.get( 'remove_layout_grid_button', False ):
+            index = int( kwd[ 'remove_layout_grid_button' ].split( ' ' )[2] ) - 1
+            del current_form[ 'layout' ][index]
+        # Add a field
+        elif params.get( 'add_field_button', False ):
+            field_index = len( current_form[ 'fields' ] ) + 1
+            self.empty_field[ 'name' ] = '%i_field_name' % field_index
+            self.empty_field[ 'label' ] = 'Field label %i' % field_index
+            current_form[ 'fields' ].append( self.empty_field )
+        # Delete a field
+        elif params.get( 'remove_button', False ):
+            # find the index of the field to be removed from the remove button label
+            index = int( kwd[ 'remove_button' ].split( ' ' )[2] ) - 1
+            del current_form[ 'fields' ][ index ]
+        # Add SelectField option
+        elif 'Add' in kwd.values():
+            current_form, status, message = self.__add_select_field_option( trans=trans,
+                                                                            current_form=current_form,
+                                                                            **kwd)
+        # Remove SelectField option
+        elif 'Remove' in kwd.values():
+            current_form, status, message = self.__remove_select_field_option( trans=trans,
+                                                                               current_form=current_form,
+                                                                               **kwd)
+        return self.show_editable_form_definition( trans=trans,
+                                                   form_definition=form_definition,
+                                                   current_form=current_form,
+                                                   message=message,
+                                                   status=status,
+                                                   response_redirect=response_redirect,
+                                                   **kwd )
+
+    def get_saved_form( self, form_definition ):
+        '''
+        This retrieves the saved form and returns a dictionary containing the name,
+        desc, type, layout & fields of the form
+        '''
+        if form_definition.type == form_definition.types.SAMPLE:
+            return dict( name=form_definition.name,
+                         desc=form_definition.desc,
+                         type=form_definition.type,
+                         layout=list( copy.deepcopy( form_definition.layout ) ),
+                         fields=list( copy.deepcopy( form_definition.fields ) ) )
+        return dict( name=form_definition.name,
+                     desc=form_definition.desc,
+                     type=form_definition.type,
+                     layout=[],
+                     fields=list( copy.deepcopy( form_definition.fields ) ) )
+
+    def get_current_form( self, trans, **kwd ):
+        '''
+        This method gets all the unsaved user-entered form details and returns a
+        dictionary containing the name, desc, type, layout & fields of the form
+        '''
+        params = util.Params( kwd )
+        name = util.restore_text( params.name )
+        desc = util.restore_text( params.description ) or ""
+        form_type = util.restore_text( params.form_type_select_field )
+        # get the user entered layout grids in it is a sample form definition
+        layout = []
+        if form_type == trans.model.FormDefinition.types.SAMPLE:
+            index = 0
+            while True:
+                if 'grid_layout%i' % index in kwd:
+                    grid_name = util.restore_text( params.get( 'grid_layout%i' % index, '' ) )
+                    layout.append( grid_name )
+                    index = index + 1
+                else:
+                    break
+        # for csv file import
+        csv_file = params.get( 'file_data', '' )
+        fields = []
+        if csv_file == '':
+            # get the user entered fields
+            index = 0
+            while True:
+                if 'field_label_%i' % index in kwd:
+                    fields.append( self.__get_field( index, **kwd ) )
+                    index = index + 1
+                else:
+                    break
+            fields = fields
+        else:
+            fields, layout = self.__import_fields(trans, csv_file, form_type)
+        return dict(name=name,
+                    desc=desc,
+                    type=form_type,
+                    layout=layout,
+                    fields=fields)
+
+    def save_form_definition( self, trans, form_definition_current_id=None, **kwd ):
+        '''
+        This method saves the current form
+        '''
+        # check the form for invalid inputs
+        flag, message = self.__validate_form( **kwd )
+        if not flag:
+            return None, message
+        current_form = self.get_current_form( trans, **kwd )
+        # validate fields
+        field_names_dict = {}
+        for field in current_form[ 'fields' ]:
+            if not field[ 'label' ]:
+                return None, "All the field labels must be completed."
+            if not VALID_FIELDNAME_RE.match( field[ 'name' ] ):
+                return None, "'%s' is not a valid field name." % field[ 'name' ]
+            if field[ 'name' ] in field_names_dict:
+                return None, "Each field name must be unique in the form definition. '%s' is not unique." % field[ 'name' ]
+            else:
+                field_names_dict[ field[ 'name' ] ] = 1
+        # if type is sample form, it should have at least one layout grid
+        if current_form[ 'type' ] == trans.app.model.FormDefinition.types.SAMPLE and not len( current_form[ 'layout' ] ):
+            current_form[ 'layout' ] = [ 'Layout1' ]
+        # create a new form definition
+        form_definition = trans.app.model.FormDefinition( name=current_form[ 'name' ],
+                                                          desc=current_form[ 'desc' ],
+                                                          fields=current_form[ 'fields' ],
+                                                          form_definition_current=None,
+                                                          form_type=current_form[ 'type' ],
+                                                          layout=current_form[ 'layout' ] )
+        if form_definition_current_id:  # save changes to the existing form
+            # change the pointer in the form_definition_current table to point
+            # to this new record
+            form_definition_current = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( form_definition_current_id )
+        else:  # create a new form
+            form_definition_current = trans.app.model.FormDefinitionCurrent()
+        # create corresponding row in the form_definition_current table
+        form_definition.form_definition_current = form_definition_current
+        form_definition_current.latest_form = form_definition
+        trans.sa_session.add( form_definition_current )
+        trans.sa_session.flush()
+        message = "The new form named '%s' has been created. " % (form_definition.name)
+        return form_definition, message
+
+    def show_editable_form_definition( self, trans, form_definition, current_form, message='', status='done', response_redirect=None, **kwd ):
+        """
+        Displays the form and any of the changes made to it in edit mode. In this method
+        all the widgets are build for all name, description and all the fields of a form
+        definition.
+        """
+        util.Params( kwd )
+        # name & description
+        form_details = [ ( 'Name', TextField( 'name', 40, current_form[ 'name' ] ) ),
+                         ( 'Description', TextField( 'description', 40, current_form[ 'desc' ] ) ),
+                         ( 'Type', HiddenField( 'form_type_select_field', current_form['type']) ) ]
+        form_layout = []
+        if current_form[ 'type' ] == trans.app.model.FormDefinition.types.SAMPLE:
+            for index, layout_name in enumerate( current_form[ 'layout' ] ):
+                form_layout.append( TextField( 'grid_layout%i' % index, 40, layout_name ))
+        # fields
+        field_details = []
+        for field_index, field in enumerate( current_form[ 'fields' ] ):
+            field_widgets = self.build_form_definition_field_widgets( trans=trans,
+                                                                      layout_grids=current_form['layout'],
+                                                                      field_index=field_index,
+                                                                      field=field,
+                                                                      form_type=current_form['type'] )
+            field_details.append( field_widgets )
+        return trans.fill_template( '/admin/forms/edit_form_definition.mako',
+                                    form_details=form_details,
+                                    field_details=field_details,
+                                    form_definition=form_definition,
+                                    field_types=trans.model.FormDefinition.supported_field_types,
+                                    message=message,
+                                    status=status,
+                                    current_form_type=current_form[ 'type' ],
+                                    layout_grids=form_layout,
+                                    response_redirect=response_redirect )
+
+    @web.expose
+    @web.require_admin
+    def delete_form_definition( self, trans, **kwd ):
+        id_list = util.listify( kwd['id'] )
+        for id in id_list:
+            try:
+                form_definition_current = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( trans.security.decode_id(id) )
+            except:
+                return trans.response.send_redirect( web.url_for( controller='forms',
+                                                                  action='browse_form_definitions',
+                                                                  message='Invalid form',
+                                                                  status='error' ) )
+            form_definition_current.deleted = True
+            trans.sa_session.add( form_definition_current )
+            trans.sa_session.flush()
+        return trans.response.send_redirect( web.url_for( controller='forms',
+                                                          action='browse_form_definitions',
+                                                          message='%i forms have been deleted.' % len(id_list),
+                                                          status='done') )
+
+    @web.expose
+    @web.require_admin
+    def undelete_form_definition( self, trans, **kwd ):
+        id_list = util.listify( kwd['id'] )
+        for id in id_list:
+            try:
+                form_definition_current = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( trans.security.decode_id(id) )
+            except:
+                return trans.response.send_redirect( web.url_for( controller='forms',
+                                                                  action='browse_form_definitions',
+                                                                  message='Invalid form',
+                                                                  status='error' ) )
+            form_definition_current.deleted = False
+            trans.sa_session.add( form_definition_current )
+            trans.sa_session.flush()
+        return trans.response.send_redirect( web.url_for( controller='forms',
+                                                          action='browse_form_definitions',
+                                                          message='%i forms have been undeleted.' % len(id_list),
+                                                          status='done') )
+
+    def build_form_definition_field_widgets( self, trans, layout_grids, field_index, field, form_type ):
+        '''
+        This method returns a list of widgets which describes a form definition field. This
+        includes the field label, helptext, type, selectfield options, required/optional & layout
+        '''
+        # field label
+        label = TextField( 'field_label_' + str( field_index ), 40, field['label'] )
+        # help text
+        helptext = TextField( 'field_helptext_' + str( field_index ), 40, field['helptext'] )
+        # field type
+        field_type_select_field = SelectField( 'field_type_' + str( field_index ),
+                                               refresh_on_change=True,
+                                               refresh_on_change_values=[ SelectField.__name__ ] )
+        # fill up the field type selectfield options
+        field_type_options = []
+        # if the form is for defining samples, then use the sample field types
+        # which does not include TextArea & AddressField
+        if form_type == trans.model.FormDefinition.types.SAMPLE:
+            for supported_field_type in trans.model.Sample.supported_field_types:
+                if supported_field_type.__name__ == field[ 'type' ]:
+                    field_type_select_field.add_option( supported_field_type.__name__,
+                                                        supported_field_type.__name__,
+                                                        selected=True )
+                    if supported_field_type.__name__ == SelectField.__name__:
+                        # when field type is Selectfield, add option Textfields
+                        field_type_options = self.__build_field_type_select_field_options( field, field_index )
+                else:
+                    field_type_select_field.add_option( supported_field_type.__name__,
+                                                        supported_field_type.__name__ )
+        else:
+            for supported_field_type in trans.model.FormDefinition.supported_field_types:
+                if supported_field_type.__name__ == field[ 'type' ]:
+                    field_type_select_field.add_option( supported_field_type.__name__,
+                                                        supported_field_type.__name__,
+                                                        selected=True )
+                    if supported_field_type.__name__ == SelectField.__name__:
+                        # when field type is Selectfield, add option Textfields
+                        field_type_options = self.__build_field_type_select_field_options( field, field_index )
+                else:
+                    field_type_select_field.add_option( supported_field_type.__name__,
+                                                        supported_field_type.__name__ )
+        # required/optional radio button
+        required = SelectField( 'field_required_' + str(field_index), display='radio' )
+        if field[ 'required' ] == 'required':
+            required.add_option( 'Required', 'required', selected=True )
+            required.add_option( 'Optional', 'optional' )
+        else:
+            required.add_option( 'Required', 'required' )
+            required.add_option( 'Optional', 'optional', selected=True )
+        # layout grid option select_field
+        if layout_grids and form_type == trans.model.FormDefinition.types.SAMPLE:
+            layout_select_field = SelectField( 'field_layout_' + str( field_index ) )
+            for index, grid_name in enumerate( layout_grids ):
+                if str( field.get( 'layout', None ) ) == str( index ):  # existing behavior: integer indexes are stored as strings.
+                    grid_selected = True
+                else:
+                    grid_selected = False
+                layout_select_field.add_option("%i. %s" % ( index + 1, grid_name ), index, selected=grid_selected )
+        # default value
+        default_value = TextField( 'field_default_' + str(field_index),
+                                   40,
+                                   field.get( 'default', '' ) )
+        # field name
+        name = TextField( 'field_name_' + str( field_index ), 40, field[ 'name' ] )
+        name_helptext = "The field name must be unique for each field and must contain only alphanumeric characters and underscore ."
+        if layout_grids and form_type == trans.model.FormDefinition.types.SAMPLE:
+            return [ ( 'Field label', label ),
+                     ( 'Help text', helptext ),
+                     ( 'Type', field_type_select_field, "Add options below", field_type_options ),
+                     ( 'Default value', default_value ),
+                     ( '', required ),
+                     ( 'Select the grid layout to place this field', layout_select_field ),
+                     ( 'Field name', name, name_helptext ) ]
+        return [ ( 'Field label', label ),
+                 ( 'Help text', helptext ),
+                 ( 'Type', field_type_select_field, "Add options below", field_type_options),
+                 ( 'Default value', default_value ),
+                 ( '', required),
+                 ( 'Field name', name, name_helptext ) ]
+
+    def __build_field_type_select_field_options( self, field, field_index ):
+        '''
+        Returns a list of TextFields, one for each select field option
+        '''
+        field_type_options = []
+        if field[ 'selectlist' ]:
+            for ctr, option in enumerate( field[ 'selectlist' ] ):
+                option_textfield = TextField( 'field_' + str( field_index ) + '_option_' + str( ctr ), 40, option )
+                field_type_options.append( ( 'Option ' + str( ctr + 1 ), option_textfield ) )
+        return field_type_options
+
+    def __add_select_field_option( self, trans, current_form, **kwd ):
+        '''
+        This method adds a select_field option. The kwd dict searched for
+        the field index which needs to be removed
+        '''
+        message = ''
+        status = 'ok',
+        index = -1
+        for k, v in kwd.items():
+            if v == 'Add':
+                # extract the field index from the
+                # button name of format: 'addoption_<field>'
+                index = int(k.split('_')[1])
+                break
+        if index == -1:
+            # something wrong happened
+            message = 'Error in adding selectfield option',
+            status = 'error',
+            return current_form, status, message
+        # add an empty option
+        current_form[ 'fields' ][ index ][ 'selectlist' ].append( '' )
+        return current_form, status, message
+
+    def __remove_select_field_option( self, trans, current_form, **kwd ):
+        '''
+        This method removes a select_field option. The kwd dict searched for
+        the field index and option index which needs to be removed
+        '''
+        message = ''
+        status = 'ok',
+        option = -1
+        for k, v in kwd.items():
+            if v == 'Remove':
+                # extract the field & option indices from the
+                # button name of format: 'removeoption_<field>_<option>'
+                index = int( k.split( '_' )[1] )
+                option = int( k.split( '_' )[2] )
+                break
+        if option == -1:
+            # something wrong happened
+            message = 'Error in removing selectfield option',
+            status = 'error',
+            return current_form, status, message
+        # remove the option
+        del current_form[ 'fields' ][ index ][ 'selectlist' ][ option ]
+        return current_form, status, message
+
+    def __get_select_field_options( self, index, **kwd ):
+        '''
+        This method gets all the options entered by the user for field when
+        the fieldtype is SelectField
+        '''
+        params = util.Params( kwd )
+        ctr = 0
+        sb_options = []
+        while True:
+            if 'field_%s_option_%s' % (index, ctr) in kwd:
+                option = params.get( 'field_%s_option_%s' % (index, ctr), None )
+                sb_options.append( util.restore_text( option ) )
+                ctr = ctr + 1
+            else:
+                return sb_options
+
+    def __get_field( self, index, **kwd ):
+        '''
+        This method retrieves all the user-entered details of a field and
+        returns a dict.
+        '''
+        params = util.Params( kwd )
+        label = util.restore_text( params.get( 'field_label_%i' % index, '' ) )
+        name = util.restore_text( params.get( 'field_name_%i' % index, '' ) )
+        helptext = util.restore_text( params.get( 'field_helptext_%i' % index, '' ) )
+        required = params.get( 'field_required_%i' % index, False )
+        field_type = util.restore_text( params.get( 'field_type_%i' % index, '' ) )
+        layout = params.get( 'field_layout_%i' % index, '0' )
+        default = util.restore_text( params.get( 'field_default_%i' % index, '' ) )
+        if not name.strip():
+            name = '%i_field_name' % index
+        if field_type == 'SelectField':
+            options = self.__get_select_field_options(index, **kwd)
+            return { 'name': name,
+                     'label': label,
+                     'helptext': helptext,
+                     'visible': True,
+                     'required': required,
+                     'type': field_type,
+                     'selectlist': options,
+                     'layout': layout,
+                     'default': default }
+        return { 'name': name,
+                 'label': label,
+                 'helptext': helptext,
+                 'visible': True,
+                 'required': required,
+                 'type': field_type,
+                 'layout': layout,
+                 'default': default }
+
+    def __import_fields( self, trans, csv_file, form_type ):
+        '''
+        "company","name of the company", "True", "required", "TextField",,
+        "due date","turnaround time", "True", "optional", "SelectField","24 hours, 1 week, 1 month"
+        '''
+        import csv
+        fields = []
+        layouts = set()
+        try:
+            reader = csv.reader(csv_file.file)
+            index = 1
+            for row in reader:
+                if len(row) < 7:  # ignore bogus rows
+                    continue
+                options = row[5].split(',')
+                if len(row) >= 8:
+                    fields.append( { 'name': '%i_field_name' % index,
+                                     'label': row[0],
+                                     'helptext': row[1],
+                                     'visible': row[2],
+                                     'required': row[3],
+                                     'type': row[4],
+                                     'selectlist': options,
+                                     'layout': row[6],
+                                     'default': row[7] } )
+                    layouts.add(row[6])
+                else:
+                    fields.append( { 'name': '%i_field_name' % index,
+                                     'label': row[0],
+                                     'helptext': row[1],
+                                     'visible': row[2],
+                                     'required': row[3],
+                                     'type': row[4],
+                                     'selectlist': options,
+                                     'default': row[6] } )
+                index = index + 1
+        except:
+            return trans.response.send_redirect( web.url_for( controller='forms',
+                                                              action='create_form',
+                                                              status='error',
+                                                              message='Error in importing <b>%s</b> file' % csv_file.file))
+        self.__imported_from_file = True
+        return fields, list(layouts)
+
+    def __validate_form( self, **kwd ):
+        '''
+        This method checks the following text inputs are filled out by the user
+        - the name of form
+        - form type
+        '''
+        params = util.Params( kwd )
+        # form name
+        if not util.restore_text( params.name ):
+            return None, 'Form name must be filled.'
+        # form type
+        if util.restore_text( params.form_type_select_field ) == 'none':
+            return None, 'Form type must be selected.'
+        return True, ''
+
+    def __build_form_types_widget( self, trans, selected='none' ):
+        form_type_select_field = SelectField( 'form_type_select_field' )
+        if selected == 'none':
+            form_type_select_field.add_option( 'Select one', 'none', selected=True )
+        else:
+            form_type_select_field.add_option( 'Select one', 'none' )
+        fd_types = trans.app.model.FormDefinition.types.items()
+        fd_types.sort()
+        for ft in fd_types:
+            if selected == ft[1]:
+                form_type_select_field.add_option( ft[1], ft[1], selected=True )
+            else:
+                form_type_select_field.add_option( ft[1], ft[1] )
+        return form_type_select_field
diff --git a/lib/galaxy/webapps/galaxy/controllers/history.py b/lib/galaxy/webapps/galaxy/controllers/history.py
new file mode 100644
index 0000000..9896328
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -0,0 +1,1364 @@
+import logging
+import urllib
+
+from markupsafe import escape
+from six import string_types
+from sqlalchemy import and_, false, func, null, true
+from sqlalchemy.orm import eagerload, eagerload_all
+
+import galaxy.util
+from galaxy import exceptions
+from galaxy import managers
+from galaxy import model
+from galaxy import web
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.model.item_attrs import UsesItemRatings
+from galaxy.util import nice_size, Params, parse_int
+from galaxy.util.odict import odict
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.web import url_for
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.base.controller import ERROR, INFO, SUCCESS, WARNING
+from galaxy.web.base.controller import ExportsHistoryMixin
+from galaxy.web.base.controller import ImportsHistoryMixin
+from galaxy.web.base.controller import SharableMixin
+from galaxy.web.framework.helpers import grids, iff, time_ago
+
+
+log = logging.getLogger( __name__ )
+
+
+class NameColumn( grids.TextColumn ):
+    def get_value( self, trans, grid, history ):
+        return escape(history.get_display_name())
+
+
+class HistoryListGrid( grids.Grid ):
+
+    # Custom column types
+    class DatasetsByStateColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, history ):
+            # States to show in column.
+            states_to_show = ( 'ok', 'running', 'queued', 'new', 'error' )
+
+            # Get dataset counts for each state in a state-count dictionary.
+            state_counts = dict( ( state, count ) for state, count in
+                                 trans.sa_session.query( model.Dataset.state, func.count(model.Dataset.state) )
+                                      .join( model.HistoryDatasetAssociation )
+                                      .group_by( model.Dataset.state )
+                                      .filter( model.HistoryDatasetAssociation.history_id == history.id,
+                                               model.HistoryDatasetAssociation.visible == true(),
+                                               model.HistoryDatasetAssociation.deleted == false(),
+                                               model.Dataset.state.in_(states_to_show) )
+                                 )
+
+            # Create HTML.
+            rval = ''
+            for state in states_to_show:
+                count = state_counts.get( state )
+                if count:
+                    rval += '<div class="count-box state-color-%s">%s</div> ' % (state, count)
+            return rval
+
+    class HistoryListNameColumn( NameColumn ):
+        def get_link( self, trans, grid, history ):
+            link = None
+            if not history.deleted:
+                link = dict( operation="Switch", id=history.id, use_panels=grid.use_panels, async_compatible=True )
+            return link
+
+    class DeletedColumn( grids.DeletedColumn ):
+        def get_value( self, trans, grid, history ):
+            if history == trans.history:
+                return "<strong>current history</strong>"
+            if history.purged:
+                return "deleted permanently"
+            elif history.deleted:
+                return "deleted"
+            return ""
+
+        def sort( self, trans, query, ascending, column_name=None ):
+            if ascending:
+                query = query.order_by( self.model_class.table.c.purged.asc(), self.model_class.table.c.update_time.desc() )
+            else:
+                query = query.order_by( self.model_class.table.c.purged.desc(), self.model_class.table.c.update_time.desc() )
+            return query
+
+    # Grid definition
+    title = "Saved Histories"
+    model_class = model.History
+    template = '/history/grid.mako'
+    default_sort_key = "-update_time"
+    columns = [
+        HistoryListNameColumn( "Name", key="name", attach_popup=True, filterable="advanced" ),
+        DatasetsByStateColumn( "Datasets", key="datasets_by_state", sortable=False, nowrap=True),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation,
+                                    filterable="advanced", grid_name="HistoryListGrid" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False ),
+        grids.GridColumn( "Size on Disk", key="disk_size", format=nice_size, sortable=False ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        DeletedColumn( "Status", key="deleted", filterable="advanced" )
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "search history names and tags",
+            cols_to_filter=[ columns[0], columns[2] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+    operations = [
+        grids.GridOperation( "Switch", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=True ),
+        grids.GridOperation( "View", allow_multiple=False ),
+        grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Copy", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), async_compatible=False, inbound=True  ),
+        grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), async_compatible=True ),
+        grids.GridOperation( "Delete Permanently", condition=( lambda item: not item.purged ), confirm="History contents will be removed from disk, this cannot be undone.  Continue?", async_compatible=True ),
+        grids.GridOperation( "Undelete", condition=( lambda item: item.deleted and not item.purged ), async_compatible=True ),
+    ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) ),
+    ]
+    default_filter = dict( name="All", deleted="False", tags="All", sharing="All" )
+    num_rows_per_page = 10
+    preserve_state = False
+    use_async = True
+    use_paging = True
+    info_text = "Histories that have been deleted for more than a time period specified by the Galaxy administrator(s) may be permanently deleted."
+
+    def get_current_item( self, trans, **kwargs ):
+        return trans.get_history()
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user, importing=False )
+
+
+class SharedHistoryListGrid( grids.Grid ):
+
+    # Custom column types
+    class DatasetsByStateColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, history ):
+            rval = ''
+            for state in ( 'ok', 'running', 'queued', 'error' ):
+                total = sum( 1 for d in history.active_datasets if d.state == state )
+                if total:
+                    rval += '<div class="count-box state-color-%s">%s</div>' % ( state, total )
+            return rval
+
+    class SharedByColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, history ):
+            return escape(history.user.email)
+
+    # Grid definition
+    title = "Histories shared with you by others"
+    model_class = model.History
+    template = '/history/shared_grid.mako'
+    default_sort_key = "-update_time"
+    default_filter = {}
+    columns = [
+        grids.GridColumn( "Name", key="name", attach_popup=True ),  # link=( lambda item: dict( operation="View", id=item.id ) ), attach_popup=True ),
+        DatasetsByStateColumn( "Datasets", sortable=False ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        SharedByColumn( "Shared by", key="user_id" )
+    ]
+    operations = [
+        grids.GridOperation( "View", allow_multiple=False, target="_top" ),
+        grids.GridOperation( "Copy" ),
+        # grids.GridOperation( "Copy", allow_multiple=False ),
+        grids.GridOperation( "Unshare" )
+    ]
+    standard_filters = []
+
+    def build_initial_query( self, trans, **kwargs ):
+        return trans.sa_session.query( self.model_class ).join( 'users_shared_with' )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter( model.HistoryUserShareAssociation.user == trans.user )
+
+
+class HistoryAllPublishedGrid( grids.Grid ):
+    class NameURLColumn( grids.PublicURLColumn, NameColumn ):
+        pass
+
+    title = "Published Histories"
+    model_class = model.History
+    default_sort_key = "update_time"
+    default_filter = dict( public_url="All", username="All", tags="All" )
+    use_paging = True
+    num_rows_per_page = 50
+    use_async = True
+    columns = [
+        NameURLColumn( "Name", key="name", filterable="advanced" ),
+        grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.HistoryAnnotationAssociation, filterable="advanced" ),
+        grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+        grids.CommunityRatingColumn( "Community Rating", key="rating" ),
+        grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced", grid_name="PublicHistoryListGrid" ),
+        grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search name, annotation, owner, and tags",
+            cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+    operations = []
+
+    def build_initial_query( self, trans, **kwargs ):
+        # Join so that searching history.user makes sense.
+        return trans.sa_session.query( self.model_class ).join( model.User.table )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        # A public history is published, has a slug, and is not deleted.
+        return query.filter( self.model_class.published == true() ).filter( self.model_class.slug != null() ).filter( self.model_class.deleted == false() )
+
+
+class HistoryController( BaseUIController, SharableMixin, UsesAnnotations, UsesItemRatings,
+                         ExportsHistoryMixin, ImportsHistoryMixin ):
+
+    def __init__( self, app ):
+        super( HistoryController, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+        self.history_serializer = managers.histories.HistorySerializer( self.app )
+
+    @web.expose
+    def index( self, trans ):
+        return ""
+
+    @web.expose
+    def list_as_xml( self, trans ):
+        """XML history list for functional tests"""
+        trans.response.set_content_type( 'text/xml' )
+        return trans.fill_template( "/history/list_as_xml.mako" )
+
+    # ......................................................................... lists
+    stored_list_grid = HistoryListGrid()
+    shared_list_grid = SharedHistoryListGrid()
+    published_list_grid = HistoryAllPublishedGrid()
+
+    @web.expose
+    def list_published( self, trans, **kwargs ):
+        if 'async' in kwargs:
+            kwargs[ 'embedded' ] = True
+            return self.published_list_grid( trans, **kwargs )
+
+        kwargs[ 'embedded' ] = True
+        grid = self.published_list_grid( trans, **kwargs )
+        return trans.fill_template( "history/list_published.mako", embedded_grid=grid )
+
+    @web.expose
+    @web.require_login( "work with multiple histories" )
+    def list( self, trans, **kwargs ):
+        """List all available histories"""
+        current_history = trans.get_history()
+        status = message = None
+        if 'operation' in kwargs:
+            operation = kwargs['operation'].lower()
+            if operation == "share or publish":
+                return self.sharing( trans, **kwargs )
+            if operation == "rename" and kwargs.get('id', None):  # Don't call rename if no ids
+                if 'name' in kwargs:
+                    del kwargs['name']  # Remove ajax name param that rename method uses
+                return self.rename( trans, **kwargs )
+            if operation == "view":
+                decoded_id = self.decode_id( kwargs.get( 'id', None ) )
+                history = self.history_manager.get_owned( decoded_id, trans.user, current_history=trans.history )
+                return trans.response.send_redirect( url_for( controller='history',
+                                                              action='view',
+                                                              id=kwargs['id'],
+                                                              show_deleted=history.deleted,
+                                                              use_panels=False ) )
+            history_ids = galaxy.util.listify( kwargs.get( 'id', [] ) )
+            # Display no message by default
+            status, message = None, None
+            # Load the histories and ensure they all belong to the current user
+            histories = []
+            for history_id in history_ids:
+                history = self.history_manager.get_owned( self.decode_id( history_id ), trans.user, current_history=trans.history )
+                if history:
+                    # Ensure history is owned by current user
+                    if history.user_id is not None and trans.user:
+                        assert trans.user.id == history.user_id, "History does not belong to current user"
+                    histories.append( history )
+                else:
+                    log.warning( "Invalid history id '%r' passed to list", history_id )
+            if histories:
+                if operation == "switch":
+                    status, message = self._list_switch( trans, histories )
+                    # Take action to update UI to reflect history switch. If
+                    # grid is using panels, it is standalone and hence a redirect
+                    # to root is needed; if grid is not using panels, it is nested
+                    # in the main Galaxy UI and refreshing the history frame
+                    # is sufficient.
+                    use_panels = kwargs.get('use_panels', False) == 'True'
+                    if use_panels:
+                        return trans.response.send_redirect( url_for( "/" ) )
+                    else:
+                        trans.template_context['refresh_frames'] = ['history']
+                elif operation in ( "delete", "delete permanently" ):
+                    if operation == "delete permanently":
+                        status, message = self._list_delete( trans, histories, purge=True )
+                    else:
+                        status, message = self._list_delete( trans, histories )
+                    if current_history in histories:
+                        # Deleted the current history, so a new, empty history was
+                        # created automatically, and we need to refresh the history frame
+                        trans.template_context['refresh_frames'] = ['history']
+                elif operation == "undelete":
+                    status, message = self._list_undelete( trans, histories )
+                elif operation == "unshare":
+                    for history in histories:
+                        for husa in trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+                                                    .filter_by( history=history ):
+                            trans.sa_session.delete( husa )
+                elif operation == "enable import via link":
+                    for history in histories:
+                        if not history.importable:
+                            self._make_item_importable( trans.sa_session, history )
+                elif operation == "disable import via link":
+                    if history_ids:
+                        histories = []
+                        for history_id in history_ids:
+                            history = self.history_manager.get_owned( self.decode_id( history_id ), trans.user, current_history=trans.history )
+                            if history.importable:
+                                history.importable = False
+                            histories.append( history )
+
+                trans.sa_session.flush()
+        # Render the list view
+        return self.stored_list_grid( trans, status=status, message=message, **kwargs )
+
+    def _list_delete( self, trans, histories, purge=False ):
+        """Delete histories"""
+        n_deleted = 0
+        deleted_current = False
+        message_parts = []
+        status = SUCCESS
+        for history in histories:
+            if history.users_shared_with:
+                message_parts.append( "History (%s) has been shared with others, unshare it before deleting it.  " % history.name )
+                status = ERROR
+            else:
+                if not history.deleted:
+                    # We'll not eliminate any DefaultHistoryPermissions in case we undelete the history later
+                    history.deleted = True
+                    # If deleting the current history, make a new current.
+                    if history == trans.get_history():
+                        deleted_current = True
+                    trans.log_event( "History (%s) marked as deleted" % history.name )
+                    n_deleted += 1
+                if purge and trans.app.config.allow_user_dataset_purge:
+                    for hda in history.datasets:
+                        if trans.user:
+                            trans.user.adjust_total_disk_usage(-hda.quota_amount(trans.user))
+                        hda.purged = True
+                        trans.sa_session.add( hda )
+                        trans.log_event( "HDA id %s has been purged" % hda.id )
+                        trans.sa_session.flush()
+                        if hda.dataset.user_can_purge:
+                            try:
+                                hda.dataset.full_delete()
+                                trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
+                                trans.sa_session.add( hda.dataset )
+                            except:
+                                log.exception( 'Unable to purge dataset (%s) on purge of hda (%s):' % ( hda.dataset.id, hda.id ) )
+                    history.purged = True
+                    self.sa_session.add( history )
+                    self.sa_session.flush()
+                for hda in history.datasets:
+                    # Not all datasets have jobs associated with them (e.g., datasets imported from libraries).
+                    if hda.creating_job_associations:
+                        # HDA has associated job, so try marking it deleted.
+                        job = hda.creating_job_associations[0].job
+                        if job.history_id == history.id and not job.finished:
+                            # No need to check other outputs since the job's parent history is this history
+                            job.mark_deleted( trans.app.config.track_jobs_in_database )
+                            trans.app.job_manager.job_stop_queue.put( job.id )
+        trans.sa_session.flush()
+        if n_deleted:
+            part = "Deleted %d %s" % ( n_deleted, iff( n_deleted != 1, "histories", "history" ) )
+            if purge and trans.app.config.allow_user_dataset_purge:
+                part += " and removed %s dataset%s from disk" % ( iff( n_deleted != 1, "their", "its" ), iff( n_deleted != 1, 's', '' ) )
+            elif purge:
+                part += " but the datasets were not removed from disk because that feature is not enabled in this Galaxy instance"
+            message_parts.append( "%s.  " % part )
+        if deleted_current:
+            # note: this needs to come after commits above or will use an empty history that was deleted above
+            trans.get_or_create_default_history()
+            message_parts.append( "Your active history was deleted, a new empty history is now active.  " )
+            status = INFO
+        return ( status, " ".join( message_parts ) )
+
+    def _list_undelete( self, trans, histories ):
+        """Undelete histories"""
+        n_undeleted = 0
+        n_already_purged = 0
+        for history in histories:
+            if history.purged:
+                n_already_purged += 1
+            if history.deleted:
+                history.deleted = False
+                if not history.default_permissions:
+                    # For backward compatibility - for a while we were deleting all DefaultHistoryPermissions on
+                    # the history when we deleted the history.  We are no longer doing this.
+                    # Need to add default DefaultHistoryPermissions in case they were deleted when the history was deleted
+                    default_action = trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS
+                    private_user_role = trans.app.security_agent.get_private_user_role( history.user )
+                    default_permissions = {}
+                    default_permissions[ default_action ] = [ private_user_role ]
+                    trans.app.security_agent.history_set_default_permissions( history, default_permissions )
+                n_undeleted += 1
+                trans.log_event( "History (%s) %d marked as undeleted" % ( history.name, history.id ) )
+        status = SUCCESS
+        message_parts = []
+        if n_undeleted:
+            message_parts.append( "Undeleted %d %s.  " % ( n_undeleted, iff( n_undeleted != 1, "histories", "history" ) ) )
+        if n_already_purged:
+            message_parts.append( "%d histories have already been purged and cannot be undeleted." % n_already_purged )
+            status = WARNING
+        return status, "".join( message_parts )
+
+    def _list_switch( self, trans, histories ):
+        """Switch to a new different history"""
+        new_history = histories[0]
+        galaxy_session = trans.get_galaxy_session()
+        try:
+            association = trans.sa_session.query( trans.app.model.GalaxySessionToHistoryAssociation ) \
+                                          .filter_by( session_id=galaxy_session.id, history_id=new_history.id ) \
+                                          .first()
+        except:
+            association = None
+        new_history.add_galaxy_session( galaxy_session, association=association )
+        trans.sa_session.add( new_history )
+        trans.sa_session.flush()
+        trans.set_history( new_history )
+        # No message
+        return None, None
+
+    @web.expose
+    @web.require_login( "work with shared histories" )
+    def list_shared( self, trans, **kwargs ):
+        """List histories shared with current user by others"""
+        status = message = None
+        if 'operation' in kwargs:
+            ids = galaxy.util.listify( kwargs.get( 'id', [] ) )
+            operation = kwargs['operation'].lower()
+            if operation == "view":
+                # Display history.
+                history = self.history_manager.get_accessible( self.decode_id( ids[0] ), trans.user, current_history=trans.history )
+                return self.display_by_username_and_slug( trans, history.user.username, history.slug )
+            elif operation == 'unshare':
+                if not ids:
+                    message = "Select a history to unshare"
+                    return self.shared_list_grid( trans, status='error', message=message, **kwargs )
+                for id in ids:
+                    # No need to check security, association below won't yield a
+                    # hit if this user isn't having the history shared with her.
+                    history = self.history_manager.by_id( self.decode_id( id ) )
+                    # Current user is the user with which the histories were shared
+                    association = ( trans.sa_session.query( trans.app.model.HistoryUserShareAssociation )
+                                    .filter_by( user=trans.user, history=history ).one() )
+                    trans.sa_session.delete( association )
+                    trans.sa_session.flush()
+                message = "Unshared %d shared histories" % len( ids )
+                status = 'done'
+        # Render the list view
+        return self.shared_list_grid( trans, status=status, message=message, **kwargs )
+
+    # ......................................................................... html
+    @web.expose
+    def citations( self, trans ):
+        # Get history
+        history = trans.history
+        history_id = trans.security.encode_id( history.id )
+        return trans.fill_template( "history/citations.mako", history=history, history_id=history_id )
+
+    @web.expose
+    def as_xml( self, trans, id=None, show_deleted=None, show_hidden=None ):
+        """
+        Return a history in xml format.
+        """
+        if trans.app.config.require_login and not trans.user:
+            return trans.fill_template( '/no_access.mako', message='Please log in to access Galaxy histories.' )
+
+        if id:
+            history = self.history_manager.get_accessible( self.decode_id( id ), trans.user,
+                current_history=trans.history )
+        else:
+            history = trans.get_history( most_recent=True, create=True )
+
+        trans.response.set_content_type( 'text/xml' )
+        return trans.fill_template_mako(
+            "history/as_xml.mako",
+            history=history,
+            show_deleted=galaxy.util.string_as_bool( show_deleted ),
+            show_hidden=galaxy.util.string_as_bool( show_hidden ) )
+
+    @web.expose
+    def display_structured( self, trans, id=None ):
+        """
+        Display a history as a nested structure showing the jobs and workflow
+        invocations that created each dataset (if any).
+        """
+        # Get history
+        if id is None:
+            id = trans.history.id
+        else:
+            id = self.decode_id( id )
+        # Expunge history from the session to allow us to force a reload
+        # with a bunch of eager loaded joins
+        trans.sa_session.expunge( trans.history )
+        history = trans.sa_session.query( model.History ).options(
+            eagerload_all( 'active_datasets.creating_job_associations.job.workflow_invocation_step.workflow_invocation.workflow' ),
+            eagerload_all( 'active_datasets.children' )
+        ).get( id )
+        assert history
+        # TODO: formalize to trans.show_error
+        assert ( history.user and ( history.user.id == trans.user.id ) or
+                 ( history.id == trans.history.id ) or
+                 ( trans.user_is_admin() ) )
+        # Resolve jobs and workflow invocations for the datasets in the history
+        # items is filled with items (hdas, jobs, or workflows) that go at the
+        # top level
+        items = []
+        # First go through and group hdas by job, if there is no job they get
+        # added directly to items
+        jobs = odict()
+        for hda in history.active_datasets:
+            if hda.visible is False:
+                continue
+            # Follow "copied from ..." association until we get to the original
+            # instance of the dataset
+            original_hda = hda
+            # while original_hda.copied_from_history_dataset_association:
+            #     original_hda = original_hda.copied_from_history_dataset_association
+            # Check if the job has a creating job, most should, datasets from
+            # before jobs were tracked, or from the upload tool before it
+            # created a job, may not
+            if not original_hda.creating_job_associations:
+                items.append( ( hda, None ) )
+            # Attach hda to correct job
+            # -- there should only be one creating_job_association, so this
+            #    loop body should only be hit once
+            for assoc in original_hda.creating_job_associations:
+                job = assoc.job
+                if job in jobs:
+                    jobs[ job ].append( ( hda, None ) )
+                else:
+                    jobs[ job ] = [ ( hda, None ) ]
+        # Second, go through the jobs and connect to workflows
+        wf_invocations = odict()
+        for job, hdas in jobs.iteritems():
+            # Job is attached to a workflow step, follow it to the
+            # workflow_invocation and group
+            if job.workflow_invocation_step:
+                wf_invocation = job.workflow_invocation_step.workflow_invocation
+                if wf_invocation in wf_invocations:
+                    wf_invocations[ wf_invocation ].append( ( job, hdas ) )
+                else:
+                    wf_invocations[ wf_invocation ] = [ ( job, hdas ) ]
+            # Not attached to a workflow, add to items
+            else:
+                items.append( ( job, hdas ) )
+        # Finally, add workflow invocations to items, which should now
+        # contain all hdas with some level of grouping
+        items.extend( wf_invocations.items() )
+        # Sort items by age
+        items.sort( key=( lambda x: x[0].create_time ), reverse=True )
+        #
+        return trans.fill_template( "history/display_structured.mako", items=items, history=history )
+
+    @web.expose
+    def structure( self, trans, id=None, **kwargs ):
+        """
+        """
+        unencoded_history_id = trans.history.id
+        if id:
+            unencoded_history_id = self.decode_id( id )
+        history_to_view = self.history_manager.get_accessible( unencoded_history_id, trans.user,
+            current_history=trans.history )
+
+        history_dictionary = self.history_serializer.serialize_to_view( history_to_view,
+            view='dev-detailed', user=trans.user, trans=trans )
+        contents = self.history_serializer.serialize_contents( history_to_view,
+            'contents', trans=trans, user=trans.user )
+
+        jobs = ( trans.sa_session.query( trans.app.model.Job )
+            .filter( trans.app.model.Job.user == history_to_view.user )
+            .filter( trans.app.model.Job.history_id == unencoded_history_id ) ).all()
+        jobs = map( lambda j: self.encode_all_ids( trans, j.to_dict( 'element' ), True ), jobs )
+
+        tools = {}
+        for tool_id in set( map( lambda j: j[ 'tool_id' ], jobs ) ):
+            unquoted_id = urllib.unquote_plus( tool_id )
+            tool = self.app.toolbox.get_tool( unquoted_id )
+            if not tool:
+                raise exceptions.ObjectNotFound( "Could not find tool with id '%s'" % tool_id )
+                # TODO: some fallback for tool information
+            tools[ tool_id ] = tool.to_dict( trans, io_details=True, link_details=True )
+
+        return trans.fill_template( "history/structure.mako", historyId=history_dictionary[ 'id' ],
+            history=history_dictionary, contents=contents, jobs=jobs, tools=tools, **kwargs )
+
+    @web.expose
+    def view( self, trans, id=None, show_deleted=False, show_hidden=False, use_panels=True ):
+        """
+        View a history. If a history is importable, then it is viewable by any user.
+        """
+        show_deleted = galaxy.util.string_as_bool( show_deleted )
+        show_hidden = galaxy.util.string_as_bool( show_hidden )
+        use_panels = galaxy.util.string_as_bool( use_panels )
+
+        history_dictionary = {}
+        user_is_owner = False
+        try:
+            if id:
+                history_to_view = self.history_manager.get_accessible( self.decode_id( id ), trans.user,
+                    current_history=trans.history )
+                user_is_owner = history_to_view.user == trans.user
+                history_is_current = history_to_view == trans.history
+            else:
+                history_to_view = trans.history
+                user_is_owner = True
+                history_is_current = True
+
+            # include all datasets: hidden, deleted, and purged
+            history_dictionary = self.history_serializer.serialize_to_view( history_to_view,
+                view='dev-detailed', user=trans.user, trans=trans )
+
+        except Exception as exc:
+            user_id = str( trans.user.id ) if trans.user else '(anonymous)'
+            log.exception( 'Error bootstrapping history for user %s: %s', user_id, exc )
+            if isinstance( exc, exceptions.ItemAccessibilityException ):
+                error_msg = 'You do not have permission to view this history.'
+            else:
+                error_msg = ( 'An error occurred getting the history data from the server. ' +
+                              'Please contact a Galaxy administrator if the problem persists.' )
+            return trans.show_error_message( error_msg, use_panels=use_panels )
+
+        return trans.fill_template_mako( "history/view.mako",
+            history=history_dictionary,
+            user_is_owner=user_is_owner, history_is_current=history_is_current,
+            show_deleted=show_deleted, show_hidden=show_hidden, use_panels=use_panels )
+
+    @web.require_login( "use more than one Galaxy history" )
+    @web.expose
+    def view_multiple( self, trans, include_deleted_histories=False, order='update_time', limit=10 ):
+        """
+        """
+        current_history_id = trans.security.encode_id( trans.history.id )
+        # TODO: allow specifying user_id for admin?
+        include_deleted_histories = galaxy.util.string_as_bool( include_deleted_histories )
+        limit = parse_int( limit, min_val=1, default=10, allow_none=True)
+
+        return trans.fill_template_mako( "history/view_multiple.mako", current_history_id=current_history_id,
+            include_deleted_histories=include_deleted_histories, order=order, limit=limit )
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug ):
+        """
+        Display history based on a username and slug.
+        """
+        # Get history.
+        session = trans.sa_session
+        user = session.query( model.User ).filter_by( username=username ).first()
+        history = trans.sa_session.query( model.History ) \
+            .options( eagerload( 'tags' ) ).options( eagerload( 'annotations' ) ) \
+            .filter_by( user=user, slug=slug, deleted=False ).first()
+        if history is None:
+            raise web.httpexceptions.HTTPNotFound()
+        # Security check raises error if user cannot access history.
+        self.history_manager.error_unless_accessible( history, trans.user, current_history=trans.history )
+
+        # Get rating data.
+        user_item_rating = 0
+        if trans.get_user():
+            user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), history )
+            if user_item_rating:
+                user_item_rating = user_item_rating.rating
+            else:
+                user_item_rating = 0
+        ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, history )
+
+        # create ownership flag for template, dictify models
+        user_is_owner = trans.user == history.user
+        history_dictionary = self.history_serializer.serialize_to_view( history,
+            view='dev-detailed', user=trans.user, trans=trans )
+        history_dictionary[ 'annotation' ] = self.get_item_annotation_str( trans.sa_session, history.user, history )
+
+        return trans.stream_template_mako( "history/display.mako", item=history, item_data=[],
+            user_is_owner=user_is_owner, history_dict=history_dictionary,
+            user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
+
+    # ......................................................................... sharing & publishing
+    @web.expose
+    @web.require_login( "share Galaxy histories" )
+    def sharing( self, trans, id=None, histories=[], **kwargs ):
+        """ Handle history sharing. """
+
+        # Get session and histories.
+        session = trans.sa_session
+        # Id values take precedence over histories passed in; last resort is current history.
+        if id:
+            ids = galaxy.util.listify( id )
+            if ids:
+                histories = [ self.history_manager.get_accessible( self.decode_id( history_id ), trans.user, current_history=trans.history )
+                              for history_id in ids ]
+        elif not histories:
+            histories = [ trans.history ]
+
+        # Do operation on histories.
+        for history in histories:
+            if 'make_accessible_via_link' in kwargs:
+                self._make_item_accessible( trans.sa_session, history )
+            elif 'make_accessible_and_publish' in kwargs:
+                self._make_item_accessible( trans.sa_session, history )
+                history.published = True
+            elif 'publish' in kwargs:
+                if history.importable:
+                    history.published = True
+                else:
+                    # TODO: report error here.
+                    pass
+            elif 'disable_link_access' in kwargs:
+                history.importable = False
+            elif 'unpublish' in kwargs:
+                history.published = False
+            elif 'disable_link_access_and_unpublish' in kwargs:
+                history.importable = history.published = False
+            elif 'unshare_user' in kwargs:
+                user = trans.sa_session.query( trans.app.model.User ).get( self.decode_id( kwargs[ 'unshare_user' ] ) )
+                # Look for and delete sharing relation for history-user.
+                deleted_sharing_relation = False
+                husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=user, history=history ).all()
+                if husas:
+                    deleted_sharing_relation = True
+                    for husa in husas:
+                        trans.sa_session.delete( husa )
+                if not deleted_sharing_relation:
+                    history_name = escape( history.name )
+                    user_email = escape( user.email )
+                    message = "History '%s' does not seem to be shared with user '%s'" % ( history_name, user_email )
+                    return trans.fill_template( '/sharing_base.mako', item=history,
+                                                message=message, status='error' )
+
+        # Legacy issue: histories made accessible before recent updates may not have a slug. Create slug for any histories that need them.
+        for history in histories:
+            if history.importable and not history.slug:
+                self._make_item_accessible( trans.sa_session, history )
+
+        session.flush()
+
+        return trans.fill_template( "/sharing_base.mako", item=history )
+
+    @web.expose
+    @web.require_login( "share histories with other users" )
+    def share( self, trans, id=None, email="", **kwd ):
+        # If a history contains both datasets that can be shared and others that cannot be shared with the desired user,
+        # then the entire history is shared, and the protected datasets will be visible, but inaccessible ( greyed out )
+        # in the copyd history
+        params = Params( kwd )
+        user = trans.get_user()
+        # TODO: we have too many error messages floating around in here - we need
+        # to incorporate the messaging system used by the libraries that will display
+        # a message on any page.
+        err_msg = galaxy.util.restore_text( params.get( 'err_msg', '' ) )
+        if not email:
+            if not id:
+                # Default to the current history
+                id = trans.security.encode_id( trans.history.id )
+            id = galaxy.util.listify( id )
+            send_to_err = err_msg
+            histories = []
+            for history_id in id:
+                history_id = self.decode_id( history_id )
+                history = self.history_manager.get_owned( history_id, trans.user, current_history=trans.history )
+                histories.append( history )
+            return trans.fill_template( "/history/share.mako",
+                                        histories=histories,
+                                        email=email,
+                                        send_to_err=send_to_err )
+
+        histories = self._get_histories( trans, id )
+        send_to_users, send_to_err = self._get_users( trans, user, email )
+        if not send_to_users:
+            if not send_to_err:
+                send_to_err += "%s is not a valid Galaxy user.  %s" % ( email, err_msg )
+            return trans.fill_template( "/history/share.mako",
+                                        histories=histories,
+                                        email=email,
+                                        send_to_err=send_to_err )
+
+        if params.get( 'share_button', False ):
+
+            # The user has not yet made a choice about how to share, so dictionaries will be built for display
+            can_change, cannot_change, no_change_needed, unique_no_change_needed, send_to_err = \
+                self._populate_restricted( trans, user, histories, send_to_users, None, send_to_err, unique=True )
+
+            send_to_err += err_msg
+            if cannot_change and not no_change_needed and not can_change:
+                send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
+                return trans.fill_template( "/history/share.mako",
+                                            histories=histories,
+                                            email=email,
+                                            send_to_err=send_to_err )
+
+            if can_change or cannot_change:
+                return trans.fill_template( "/history/share.mako",
+                                            histories=histories,
+                                            email=email,
+                                            send_to_err=send_to_err,
+                                            can_change=can_change,
+                                            cannot_change=cannot_change,
+                                            no_change_needed=unique_no_change_needed )
+
+            if no_change_needed:
+                return self._share_histories( trans, user, send_to_err, histories=no_change_needed )
+
+            elif not send_to_err:
+                # User seems to be sharing an empty history
+                send_to_err = "You cannot share an empty history.  "
+
+        return trans.fill_template( "/history/share.mako",
+                                    histories=histories,
+                                    email=email,
+                                    send_to_err=send_to_err )
+
+    @web.expose
+    @web.require_login( "share restricted histories with other users" )
+    def share_restricted( self, trans, id=None, email="", **kwd ):
+        if 'action' in kwd:
+            action = kwd[ 'action' ]
+        else:
+            err_msg = "Select an action.  "
+            return trans.response.send_redirect( url_for( controller='history',
+                                                          action='share',
+                                                          id=id,
+                                                          email=email,
+                                                          err_msg=err_msg,
+                                                          share_button=True ) )
+        user = trans.get_user()
+        user_roles = user.all_roles()
+        histories = self._get_histories( trans, id )
+        send_to_users, send_to_err = self._get_users( trans, user, email )
+        send_to_err = ''
+        # The user has made a choice, so dictionaries will be built for sharing
+        can_change, cannot_change, no_change_needed, unique_no_change_needed, send_to_err = \
+            self._populate_restricted( trans, user, histories, send_to_users, action, send_to_err )
+        # Now that we've populated the can_change, cannot_change, and no_change_needed dictionaries,
+        # we'll populate the histories_for_sharing dictionary from each of them.
+        histories_for_sharing = {}
+        if no_change_needed:
+            # Don't need to change anything in cannot_change, so populate as is
+            histories_for_sharing, send_to_err = \
+                self._populate( trans, histories_for_sharing, no_change_needed, send_to_err )
+        if cannot_change:
+            # Can't change anything in cannot_change, so populate as is
+            histories_for_sharing, send_to_err = \
+                self._populate( trans, histories_for_sharing, cannot_change, send_to_err )
+        # The action here is either 'public' or 'private', so we'll continue to populate the
+        # histories_for_sharing dictionary from the can_change dictionary.
+        for send_to_user, history_dict in can_change.items():
+            for history in history_dict:
+                # Make sure the current history has not already been shared with the current send_to_user
+                if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+                                   .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+                                                  trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
+                                   .count() > 0:
+                    send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
+                else:
+                    # Only deal with datasets that have not been purged
+                    for hda in history.activatable_datasets:
+                        # If the current dataset is not public, we may need to perform an action on it to
+                        # make it accessible by the other user.
+                        if not trans.app.security_agent.can_access_dataset( send_to_user.all_roles(), hda.dataset ):
+                            # The user with which we are sharing the history does not have access permission on the current dataset
+                            if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ) and not hda.dataset.library_associations:
+                                # The current user has authority to change permissions on the current dataset because
+                                # they have permission to manage permissions on the dataset and the dataset is not associated
+                                # with a library.
+                                if action == "private":
+                                    trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
+                                elif action == "public":
+                                    trans.app.security_agent.make_dataset_public( hda.dataset )
+                    # Populate histories_for_sharing with the history after performing any requested actions on
+                    # its datasets to make them accessible by the other user.
+                    if send_to_user not in histories_for_sharing:
+                        histories_for_sharing[ send_to_user ] = [ history ]
+                    elif history not in histories_for_sharing[ send_to_user ]:
+                        histories_for_sharing[ send_to_user ].append( history )
+        return self._share_histories( trans, user, send_to_err, histories=histories_for_sharing )
+
+    def _get_histories( self, trans, ids ):
+        if not ids:
+            # Default to the current history
+            ids = trans.security.encode_id( trans.history.id )
+        ids = galaxy.util.listify( ids )
+        histories = []
+        for history_id in ids:
+            history_id = self.decode_id( history_id )
+            history = self.history_manager.get_owned( history_id, trans.user, current_history=trans.history )
+            histories.append( history )
+        return histories
+
+    def _get_users( self, trans, user, emails_or_ids ):
+        send_to_users = []
+        send_to_err = ""
+        for string in galaxy.util.listify( emails_or_ids ):
+            string = string.strip()
+            if not string:
+                continue
+
+            send_to_user = None
+            if '@' in string:
+                email_address = string
+                send_to_user = self.user_manager.by_email( email_address,
+                    filters=[ trans.app.model.User.table.c.deleted == false() ] )
+
+            else:
+                try:
+                    decoded_user_id = self.decode_id( string )
+                    send_to_user = self.user_manager.by_id( decoded_user_id )
+                    if send_to_user.deleted:
+                        send_to_user = None
+                # TODO: in an ideal world, we would let this bubble up to web.expose which would handle it
+                except exceptions.MalformedId:
+                    send_to_user = None
+
+            if not send_to_user:
+                send_to_err += "%s is not a valid Galaxy user.  " % string
+            elif send_to_user == user:
+                send_to_err += "You cannot send histories to yourself.  "
+            else:
+                send_to_users.append( send_to_user )
+
+        return send_to_users, send_to_err
+
+    def _populate( self, trans, histories_for_sharing, other, send_to_err ):
+        # This method will populate the histories_for_sharing dictionary with the users and
+        # histories in other, eliminating histories that have already been shared with the
+        # associated user.  No security checking on datasets is performed.
+        # If not empty, the histories_for_sharing dictionary looks like:
+        # { userA: [ historyX, historyY ], userB: [ historyY ] }
+        # other looks like:
+        # { userA: {historyX : [hda, hda], historyY : [hda]}, userB: {historyY : [hda]} }
+        for send_to_user, history_dict in other.items():
+            for history in history_dict:
+                # Make sure the current history has not already been shared with the current send_to_user
+                if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+                                   .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+                                                  trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
+                                   .count() > 0:
+                    send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
+                else:
+                    # Build the dict that will be used for sharing
+                    if send_to_user not in histories_for_sharing:
+                        histories_for_sharing[ send_to_user ] = [ history ]
+                    elif history not in histories_for_sharing[ send_to_user ]:
+                        histories_for_sharing[ send_to_user ].append( history )
+        return histories_for_sharing, send_to_err
+
+    def _populate_restricted( self, trans, user, histories, send_to_users, action, send_to_err, unique=False ):
+        # The user may be attempting to share histories whose datasets cannot all be accessed by other users.
+        # If this is the case, the user sharing the histories can:
+        # 1) action=='public': choose to make the datasets public if he is permitted to do so
+        # 2) action=='private': automatically create a new "sharing role" allowing protected
+        #    datasets to be accessed only by the desired users
+        # This method will populate the can_change, cannot_change and no_change_needed dictionaries, which
+        # are used for either displaying to the user, letting them make 1 of the choices above, or sharing
+        # after the user has made a choice.  They will be used for display if 'unique' is True, and will look
+        # like: {historyX : [hda, hda], historyY : [hda] }
+        # For sharing, they will look like:
+        # { userA: {historyX : [hda, hda], historyY : [hda]}, userB: {historyY : [hda]} }
+        can_change = {}
+        cannot_change = {}
+        no_change_needed = {}
+        unique_no_change_needed = {}
+        user_roles = user.all_roles()
+        for history in histories:
+            for send_to_user in send_to_users:
+                # Make sure the current history has not already been shared with the current send_to_user
+                if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+                                   .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id,
+                                                  trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \
+                                   .count() > 0:
+                    send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email )
+                else:
+                    # Only deal with datasets that have not been purged
+                    for hda in history.activatable_datasets:
+                        if trans.app.security_agent.can_access_dataset( send_to_user.all_roles(), hda.dataset ):
+                            # The no_change_needed dictionary is a special case.  If both of can_change
+                            # and cannot_change are empty, no_change_needed will used for sharing.  Otherwise
+                            # unique_no_change_needed will be used for displaying, so we need to populate both.
+                            # Build the dictionaries for display, containing unique histories only
+                            if history not in unique_no_change_needed:
+                                unique_no_change_needed[ history ] = [ hda ]
+                            else:
+                                unique_no_change_needed[ history ].append( hda )
+                            # Build the dictionaries for sharing
+                            if send_to_user not in no_change_needed:
+                                no_change_needed[ send_to_user ] = {}
+                            if history not in no_change_needed[ send_to_user ]:
+                                no_change_needed[ send_to_user ][ history ] = [ hda ]
+                            else:
+                                no_change_needed[ send_to_user ][ history ].append( hda )
+                        else:
+                            # The user with which we are sharing the history does not have access permission on the current dataset
+                            if trans.app.security_agent.can_manage_dataset( user_roles, hda.dataset ):
+                                # The current user has authority to change permissions on the current dataset because
+                                # they have permission to manage permissions on the dataset.
+                                # NOTE: ( gvk )There may be problems if the dataset also has an ldda, but I don't think so
+                                # because the user with which we are sharing will not have the "manage permission" permission
+                                # on the dataset in their history.  Keep an eye on this though...
+                                if unique:
+                                    # Build the dictionaries for display, containing unique histories only
+                                    if history not in can_change:
+                                        can_change[ history ] = [ hda ]
+                                    else:
+                                        can_change[ history ].append( hda )
+                                else:
+                                    # Build the dictionaries for sharing
+                                    if send_to_user not in can_change:
+                                        can_change[ send_to_user ] = {}
+                                    if history not in can_change[ send_to_user ]:
+                                        can_change[ send_to_user ][ history ] = [ hda ]
+                                    else:
+                                        can_change[ send_to_user ][ history ].append( hda )
+                            else:
+                                if action in [ "private", "public" ]:
+                                    # The user has made a choice, so 'unique' doesn't apply.  Don't change stuff
+                                    # that the user doesn't have permission to change
+                                    continue
+                                if unique:
+                                    # Build the dictionaries for display, containing unique histories only
+                                    if history not in cannot_change:
+                                        cannot_change[ history ] = [ hda ]
+                                    else:
+                                        cannot_change[ history ].append( hda )
+                                else:
+                                    # Build the dictionaries for sharing
+                                    if send_to_user not in cannot_change:
+                                        cannot_change[ send_to_user ] = {}
+                                    if history not in cannot_change[ send_to_user ]:
+                                        cannot_change[ send_to_user ][ history ] = [ hda ]
+                                    else:
+                                        cannot_change[ send_to_user ][ history ].append( hda )
+        return can_change, cannot_change, no_change_needed, unique_no_change_needed, send_to_err
+
+    def _share_histories( self, trans, user, send_to_err, histories=None ):
+        # histories looks like: { userA: [ historyX, historyY ], userB: [ historyY ] }
+        histories = histories or {}
+        msg = ""
+        if not histories:
+            send_to_err += "No users have been specified or no histories can be sent without changing permissions or associating a sharing role.  "
+        else:
+            for send_to_user, send_to_user_histories in histories.items():
+                shared_histories = []
+                for history in send_to_user_histories:
+                    share = trans.app.model.HistoryUserShareAssociation()
+                    share.history = history
+                    share.user = send_to_user
+                    trans.sa_session.add( share )
+                    self.create_item_slug( trans.sa_session, history )
+                    trans.sa_session.flush()
+                    if history not in shared_histories:
+                        shared_histories.append( history )
+        if send_to_err:
+            msg += send_to_err
+        return self.sharing( trans, histories=shared_histories, msg=msg )
+
+    # ......................................................................... actions/orig. async
+    @web.expose
+    def purge_deleted_datasets( self, trans ):
+        count = 0
+        if trans.app.config.allow_user_dataset_purge:
+            for hda in trans.history.datasets:
+                if not hda.deleted or hda.purged:
+                    continue
+                if trans.user:
+                    trans.user.adjust_total_disk_usage(-hda.quota_amount(trans.user))
+                hda.purged = True
+                trans.sa_session.add( hda )
+                trans.log_event( "HDA id %s has been purged" % hda.id )
+                trans.sa_session.flush()
+                if hda.dataset.user_can_purge:
+                    try:
+                        hda.dataset.full_delete()
+                        trans.log_event( "Dataset id %s has been purged upon the the purge of HDA id %s" % ( hda.dataset.id, hda.id ) )
+                        trans.sa_session.add( hda.dataset )
+                    except:
+                        log.exception( 'Unable to purge dataset (%s) on purge of hda (%s):' % ( hda.dataset.id, hda.id ) )
+                count += 1
+        return trans.show_ok_message( "%d datasets have been deleted permanently" % count, refresh_frames=['history'] )
+
+    @web.expose
+    def delete( self, trans, id, purge=False ):
+        """Delete the history -- this does not require a logged in user."""
+        # TODO: use api instead
+        try:
+            # get the history with the given id, delete and optionally purge
+            current_history = self.history_manager.get_current( trans )
+            history = self.history_manager.get_owned( self.decode_id( id ), trans.user, current_history=current_history )
+            if history.users_shared_with:
+                raise exceptions.ObjectAttributeInvalidException(
+                    "History has been shared with others. Unshare it before deleting it."
+                )
+            self.history_manager.delete( history, flush=( not purge ) )
+            if purge:
+                self.history_manager.purge( history )
+
+            # if this history is the current history for this session,
+            # - attempt to find the most recently used, undeleted history and switch to it.
+            # - If no suitable recent history is found, create a new one and switch
+            if history == current_history:
+                not_deleted_or_purged = [ model.History.deleted == false(), model.History.purged == false() ]
+                most_recent_history = self.history_manager.most_recent( user=trans.user, filters=not_deleted_or_purged )
+                if most_recent_history:
+                    self.history_manager.set_current( trans, most_recent_history )
+                else:
+                    trans.get_or_create_default_history()
+
+        except Exception as exc:
+            return trans.show_error_message( exc )
+        return trans.show_ok_message( "History deleted", refresh_frames=['history'] )
+
+    @web.expose
+    def resume_paused_jobs( self, trans, current=False, ids=None ):
+        """Resume paused jobs the active history -- this does not require a logged in user."""
+        if not ids and galaxy.util.string_as_bool( current ):
+            histories = [ trans.get_history() ]
+            refresh_frames = ['history']
+        else:
+            raise NotImplementedError( "You can currently only resume all the datasets of the current history." )
+        for history in histories:
+            history.resume_paused_jobs()
+            trans.sa_session.add( history )
+        trans.sa_session.flush()
+        return trans.show_ok_message( "Your jobs have been resumed.", refresh_frames=refresh_frames )
+        # TODO: used in index.mako
+
+    @web.expose
+    @web.require_login( "rate items" )
+    @web.json
+    def rate_async( self, trans, id, rating ):
+        """ Rate a history asynchronously and return updated community data. """
+        history = self.history_manager.get_accessible( self.decode_id( id ), trans.user, current_history=trans.history )
+        if not history:
+            return trans.show_error_message( "The specified history does not exist." )
+        # Rate history.
+        self.rate_item( trans.sa_session, trans.get_user(), history, rating )
+        return self.get_ave_item_rating_data( trans.sa_session, history )
+        # TODO: used in display_base.mako
+
+    @web.expose
+    # TODO: Remove require_login when users are warned that, if they are not
+    # logged in, this will remove their current history.
+    @web.require_login( "use Galaxy histories" )
+    def import_archive( self, trans, **kwargs ):
+        """ Import a history from a file archive. """
+        # Set archive source and type.
+        archive_file = kwargs.get( 'archive_file', None )
+        archive_url = kwargs.get( 'archive_url', None )
+        archive_source = None
+        if archive_file:
+            archive_source = archive_file
+            archive_type = 'file'
+        elif archive_url:
+            archive_source = archive_url
+            archive_type = 'url'
+        # If no source to create archive from, show form to upload archive or specify URL.
+        if not archive_source:
+            return trans.show_form(
+                web.FormBuilder( web.url_for(controller='history', action='import_archive'), "Import a History from an Archive", submit_text="Submit" )
+                        .add_input( "text", "Archived History URL", "archive_url", value="", error=None )
+                # TODO: add support for importing via a file.
+                # .add_input( "file", "Archived History File", "archive_file", value=None, error=None )
+            )
+        self.queue_history_import( trans, archive_type=archive_type, archive_source=archive_source )
+        return trans.show_message( "Importing history from '%s'. \
+                                    This history will be visible when the import is complete" % archive_source )
+        # TODO: used in this file and index.mako
+
+    @web.expose
+    def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False, preview=False ):
+        """ Export a history to an archive. """
+        #
+        # Get history to export.
+        #
+        if id:
+            history = self.history_manager.get_accessible( self.decode_id( id ), trans.user, current_history=trans.history )
+        else:
+            # Use current history.
+            history = trans.history
+            id = trans.security.encode_id( history.id )
+        if not history:
+            return trans.show_error_message( "This history does not exist or you cannot export this history." )
+        # If history has already been exported and it has not changed since export, stream it.
+        jeha = history.latest_export
+        if jeha and jeha.up_to_date:
+            if jeha.ready:
+                if preview:
+                    url = url_for( controller='history', action="export_archive", id=id, qualified=True )
+                    return trans.show_message( "History Ready: '%(n)s'. Use this link to download "
+                                               "the archive or import it to another Galaxy server: "
+                                               "<a href='%(u)s'>%(u)s</a>" % ( { 'n': history.name, 'u': url } ) )
+                else:
+                    return self.serve_ready_history_export( trans, jeha )
+            elif jeha.preparing:
+                return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>"
+                                           % ( { 'n': history.name, 's': url_for( controller='history', action="export_archive", id=id, qualified=True ) } ) )
+        self.queue_history_export( trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted )
+        url = url_for( controller='history', action="export_archive", id=id, qualified=True )
+        return trans.show_message( "Exporting History '%(n)s'. You will need to <a href='%(share)s'>make this history 'accessible'</a> in order to import this to another galaxy sever. <br/>"
+                                   "Use this link to download the archive or import it to another Galaxy server: "
+                                   "<a href='%(u)s'>%(u)s</a>" % ( { 'share': url_for(controller='history', action='sharing'), 'n': history.name, 'u': url } ) )
+        # TODO: used in this file and index.mako
+
+    @web.expose
+    @web.json
+    @web.require_login( "get history name and link" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns history's name and link. """
+        history = self.history_manager.get_accessible( self.decode_id( id ), trans.user, current_history=trans.history )
+        if self.create_item_slug( trans.sa_session, history ):
+            trans.sa_session.flush()
+        return_dict = {
+            "name": history.name,
+            "link": url_for(controller='history', action="display_by_username_and_slug",
+                            username=history.user.username, slug=history.slug ) }
+        return return_dict
+        # TODO: used in page/editor.mako
+
+    @web.expose
+    @web.require_login( "set history's accessible flag" )
+    def set_accessible_async( self, trans, id=None, accessible=False ):
+        """ Set history's importable attribute and slug. """
+        history = self.history_manager.get_owned( self.decode_id( id ), trans.user, current_history=trans.history )
+        # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
+        importable = accessible in ['True', 'true', 't', 'T']
+        if history and history.importable != importable:
+            if importable:
+                self._make_item_accessible( trans.sa_session, history )
+            else:
+                history.importable = importable
+            trans.sa_session.flush()
+        return
+        # TODO: used in page/editor.mako
+
+    @web.expose
+    def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
+        """Return autocomplete data for history names"""
+        user = trans.get_user()
+        if not user:
+            return
+
+        ac_data = ""
+        for history in ( trans.sa_session.query( model.History )
+                         .filter_by( user=user )
+                         .filter( func.lower( model.History.name ).like(q.lower() + "%") ) ):
+            ac_data = ac_data + history.name + "\n"
+        return ac_data
+        # TODO: used in grid_base.mako
+
+    @web.expose
+    @web.require_login( "rename histories" )
+    def rename( self, trans, id=None, name=None, **kwd ):
+        user = trans.get_user()
+        if not id:
+            # Default to the current history
+            history = trans.get_history()
+            if not history.user:
+                return trans.show_error_message( "You must save your history before renaming it." )
+            id = trans.security.encode_id( history.id )
+        id = galaxy.util.listify( id )
+        name = galaxy.util.listify( name )
+        histories = []
+
+        for history_id in id:
+            history = self.history_manager.get_owned( self.decode_id( history_id ), trans.user, current_history=trans.history )
+            if history and history.user_id == user.id:
+                histories.append( history )
+        if not name or len( histories ) != len( name ):
+            return trans.fill_template( "/history/rename.mako", histories=histories )
+
+        change_msgs = []
+        for i in range( len( histories ) ):
+            cur_name = histories[i].get_display_name()
+            new_name = name[i]
+
+            # skip if name is empty
+            if not isinstance( new_name, string_types ) or not new_name.strip():
+                change_msgs.append( "You must specify a valid name for History: " + cur_name )
+                continue
+
+            # skip if not the owner
+            # ??: isn't this already handled in get_history/if statement above?
+            if histories[i].user_id != user.id:
+                change_msgs.append( "History: " + cur_name + " does not appear to belong to you." )
+                continue
+
+            # skip if it wouldn't be a change
+            if new_name == cur_name:
+                change_msgs.append( "History: " + cur_name + " is already named: " + new_name )
+                continue
+
+            # escape, sanitize, set, and log the change
+            new_name = escape( new_name )
+            histories[i].name = sanitize_html( new_name )
+            trans.sa_session.add( histories[i] )
+            trans.sa_session.flush()
+
+            trans.log_event( "History renamed: id: %s, renamed to: '%s'" % ( str( histories[i].id ), new_name ) )
+            change_msgs.append( "History: " + cur_name + " renamed to: " + new_name )
+
+        change_msg = '<br />'.join( change_msgs )
+        return trans.show_message( change_msg, refresh_frames=['history'] )
+
+    # ------------------------------------------------------------------------- current history
+    @web.expose
+    @web.require_login( "switch to a history" )
+    def switch_to_history( self, trans, hist_id=None ):
+        """Change the current user's current history to one with `hist_id`."""
+        # remains for backwards compat
+        self.set_as_current( trans, id=hist_id )
+        return trans.response.send_redirect( url_for( "/" ) )
+
+    def get_item( self, trans, id ):
+        return self.history_manager.get_owned( self.decode_id( id ), trans.user, current_history=trans.history )
+        # TODO: override of base ui controller?
+
+    def history_data( self, trans, history ):
+        """Return the given history in a serialized, dictionary form."""
+        return self.history_serializer.serialize_to_view( history, view='dev-detailed', user=trans.user, trans=trans )
+
+    # TODO: combine these next two - poss. with a redirect flag
+    # @web.require_login( "switch to a history" )
+    @web.json
+    def set_as_current( self, trans, id ):
+        """Change the current user's current history to one with `id`."""
+        # Prevent IE11 from caching this, since we actually use it via GET.
+        trans.response.headers[ 'Cache-Control' ] = ["max-age=0", "no-cache", "no-store"]
+        try:
+            history = self.history_manager.get_owned( self.decode_id( id ), trans.user, current_history=trans.history )
+            trans.set_history( history )
+            return self.history_data( trans, history )
+        except exceptions.MessageException, msg_exc:
+            trans.response.status = msg_exc.err_code.code
+            return { 'err_msg': msg_exc.err_msg, 'err_code': msg_exc.err_code.code }
+
+    @web.json
+    def current_history_json( self, trans ):
+        """Return the current user's current history in a serialized, dictionary form."""
+        # Prevent IE11 from caching this
+        trans.response.headers[ 'Cache-Control' ] = ["max-age=0", "no-cache", "no-store"]
+        history = trans.get_history( most_recent=True, create=True )
+        return self.history_data( trans, history )
+
+    @web.json
+    def create_new_current( self, trans, name=None ):
+        """Create a new, current history for the current user"""
+        new_history = trans.new_history( name )
+        return self.history_data( trans, new_history )
+    # TODO: /history/current to do all of the above: if ajax, return json; if post, read id and set to current
diff --git a/lib/galaxy/webapps/galaxy/controllers/library.py b/lib/galaxy/webapps/galaxy/controllers/library.py
new file mode 100644
index 0000000..3df63fa
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/library.py
@@ -0,0 +1,140 @@
+import logging
+
+from markupsafe import escape
+from sqlalchemy import and_, false, not_, or_
+
+from galaxy import model, util
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.framework.helpers import grids
+from library_common import get_comptypes, lucene_search, whoosh_search
+
+
+log = logging.getLogger( __name__ )
+
+
+class LibraryListGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, library ):
+            return escape(library.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, library ):
+            if library.description:
+                return escape(library.description)
+            return ''
+
+    # Grid definition
+    title = "Data Libraries"
+    model_class = model.Library
+    template = '/library/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Data library name",
+                    key="name",
+                    link=( lambda library: dict( operation="browse", id=library.id ) ),
+                    attach_popup=False,
+                    filterable="advanced" ),
+        DescriptionColumn( "Data library description",
+                           key="description",
+                           attach_popup=False,
+                           filterable="advanced" ),
+    ]
+    columns.append( grids.MulticolFilterColumn( "search dataset name, info, message, dbkey",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    standard_filters = []
+    default_filter = dict( name="All", description="All", deleted="False", purged="False" )
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def build_initial_query( self, trans, **kwargs ):
+        return trans.sa_session.query( self.model_class ).filter( self.model_class.table.c.deleted == false() )
+
+    def apply_query_filter( self, trans, query, **kwd ):
+        current_user_role_ids = [ role.id for role in trans.get_current_user_roles() ]
+        library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
+        restricted_library_ids = [ lp.library_id for lp in trans.sa_session.query( trans.model.LibraryPermissions )
+                                                                           .filter( trans.model.LibraryPermissions.table.c.action == library_access_action )
+                                                                           .distinct() ]
+        accessible_restricted_library_ids = [ lp.library_id for lp in trans.sa_session.query( trans.model.LibraryPermissions )
+                                                                                      .filter( and_( trans.model.LibraryPermissions.table.c.action == library_access_action,
+                                                                                                     trans.model.LibraryPermissions.table.c.role_id.in_( current_user_role_ids ) ) ) ]
+        if not trans.user:
+            # Filter to get only public libraries, a library whose id
+            # is not in restricted_library_ids is a public library
+            return query.filter( not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ) )
+        else:
+            # Filter to get libraries accessible by the current user, get both
+            # public libraries and restricted libraries accessible by the current user.
+            return query.filter( or_( not_( trans.model.Library.table.c.id.in_( restricted_library_ids ) ),
+                                      trans.model.Library.table.c.id.in_( accessible_restricted_library_ids ) ) )
+
+
+class Library( BaseUIController ):
+
+    library_list_grid = LibraryListGrid()
+
+    @web.expose
+    def list( self, trans, **kwd ):
+        # define app configuration for generic mako template
+        app = {
+            'jscript'       : "galaxy.library"
+        }
+        return trans.fill_template( 'galaxy.panels.mako',
+                                    config={
+                                        'title': 'Galaxy Data Libraries',
+                                        'app': app } )
+
+    @web.expose
+    def index( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', ''  ) )
+        status = escape( kwd.get( 'status', 'done' ) )
+        default_action = kwd.get( 'default_action', None )
+        return trans.fill_template( "/library/index.mako",
+                                    default_action=default_action,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def browse_libraries( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "browse":
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  cntrller='library',
+                                                                  **kwd ) )
+        if 'f-free-text-search' in kwd:
+            search_term = kwd[ "f-free-text-search" ]
+            if trans.app.config.enable_lucene_library_search:
+                indexed_search_enabled = True
+                search_url = trans.app.config.config_dict.get( "fulltext_url", "" )
+                if search_url:
+                    indexed_search_enabled = True
+                    status, message, lddas = lucene_search( trans, 'library', search_term, search_url, **kwd )
+            elif trans.app.config.enable_whoosh_library_search:
+                indexed_search_enabled = True
+                status, message, lddas = whoosh_search( trans, 'library', search_term, **kwd )
+            else:
+                indexed_search_enabled = False
+            if indexed_search_enabled:
+                comptypes = get_comptypes( trans )
+                show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+                use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+                return trans.fill_template( '/library/common/library_dataset_search_results.mako',
+                                            cntrller='library',
+                                            search_term=search_term,
+                                            comptypes=comptypes,
+                                            lddas=lddas,
+                                            current_user_roles=trans.get_current_user_roles(),
+                                            show_deleted=show_deleted,
+                                            use_panels=use_panels,
+                                            message=message,
+                                            status=status )
+        # Render the list view
+        return self.library_list_grid( trans, **kwd )
diff --git a/lib/galaxy/webapps/galaxy/controllers/library_admin.py b/lib/galaxy/webapps/galaxy/controllers/library_admin.py
new file mode 100644
index 0000000..48a5788
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/library_admin.py
@@ -0,0 +1,247 @@
+import logging
+
+from markupsafe import escape
+
+import galaxy.model
+import galaxy.util
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.framework.helpers import grids, time_ago
+from library_common import get_comptypes, lucene_search, whoosh_search
+
+log = logging.getLogger( __name__ )
+
+
+class LibraryListGrid( grids.Grid ):
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, library ):
+            return escape(library.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, library ):
+            if library.description:
+                return escape(library.description)
+            return ''
+
+    class StatusColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, library ):
+            if library.purged:
+                return "purged"
+            elif library.deleted:
+                return "deleted"
+            return ""
+    # Grid definition
+    title = "Data Libraries"
+    model_class = galaxy.model.Library
+    template = '/admin/library/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Data library name",
+                    key="name",
+                    link=( lambda library: dict( operation="browse", id=library.id ) ),
+                    attach_popup=False,
+                    filterable="advanced" ),
+        DescriptionColumn( "Data library description",
+                           key="description",
+                           attach_popup=False,
+                           filterable="advanced" ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        StatusColumn( "Status", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "search dataset name, info, message, dbkey",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Create new data library", dict( controller='library_admin', action='create_library' ) )
+    ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True, purged=False ) ),
+        grids.GridColumnFilter( "Purged", args=dict( purged=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+    default_filter = dict( name="All", description="All", deleted="False", purged="False" )
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+
+class LibraryAdmin( BaseUIController ):
+
+    library_list_grid = LibraryListGrid()
+
+    @web.expose
+    @web.require_admin
+    def browse_libraries( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "browse":
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  cntrller='library_admin',
+                                                                  **kwd ) )
+            elif operation == "delete":
+                return self.delete_library( trans, **kwd )
+            elif operation == "undelete":
+                return self.undelete_library( trans, **kwd )
+        self.library_list_grid.operations = []
+        if 'f-deleted' in kwd:
+            if kwd[ 'f-deleted' ] != 'All':
+                if galaxy.util.string_as_bool( kwd[ 'f-deleted' ] ):
+                    # We're viewing deleted data libraries, so add a GridOperation
+                    # enabling one or more of them to be undeleted.
+                    self.library_list_grid.operations = [
+                        grids.GridOperation( "Undelete",
+                                             condition=( lambda item: item.deleted ),
+                                             allow_multiple=True,
+                                             allow_popup=False,
+                                             url_args=dict( webapp="galaxy" ) )
+                    ]
+                else:
+                    # We're viewing active data libraries, so add a GridOperation
+                    # enabling one or more of them to be deleted.
+                    self.library_list_grid.operations = [
+                        grids.GridOperation( "Delete",
+                                             condition=( lambda item: not item.deleted ),
+                                             allow_multiple=True,
+                                             allow_popup=False,
+                                             url_args=dict( webapp="galaxy" ) )
+                    ]
+        else:
+            # We're viewing active data libraries, so add a GridOperation
+            # enabling one or more of them to be deleted.
+            self.library_list_grid.operations = [
+                grids.GridOperation( "Delete",
+                                     condition=( lambda item: not item.deleted ),
+                                     allow_multiple=True,
+                                     allow_popup=False,
+                                     url_args=dict( webapp="galaxy" ) )
+            ]
+        if 'f-free-text-search' in kwd:
+            search_term = kwd[ "f-free-text-search" ]
+            if trans.app.config.enable_lucene_library_search:
+                indexed_search_enabled = True
+                search_url = trans.app.config.config_dict.get( "fulltext_find_url", "" )
+                if search_url:
+                    status, message, lddas = lucene_search( trans, 'library_admin', search_term, search_url, **kwd )
+            elif trans.app.config.enable_whoosh_library_search:
+                indexed_search_enabled = True
+                status, message, lddas = whoosh_search( trans, 'library_admin', search_term, **kwd )
+            else:
+                indexed_search_enabled = False
+            if indexed_search_enabled:
+                comptypes = get_comptypes( trans )
+                show_deleted = galaxy.util.string_as_bool( kwd.get( 'show_deleted', False ) )
+                use_panels = galaxy.util.string_as_bool( kwd.get( 'use_panels', False ) )
+                return trans.fill_template( '/library/common/library_dataset_search_results.mako',
+                                            cntrller='library_admin',
+                                            search_term=search_term,
+                                            comptypes=comptypes,
+                                            lddas=lddas,
+                                            show_deleted=show_deleted,
+                                            use_panels=use_panels,
+                                            message=escape( message ),
+                                            status=escape( status ) )
+        # Render the list view
+        return self.library_list_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def create_library( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', ''  ) )
+        status = kwd.get( 'status', 'done' )
+        if kwd.get( 'create_library_button', False ):
+            name = kwd.get( 'name', 'No name' )
+            description = kwd.get( 'description', '' )
+            synopsis = kwd.get( 'synopsis', '' )
+            if synopsis in [ 'None', None ]:
+                synopsis = ''
+            library = trans.app.model.Library( name=name, description=description, synopsis=synopsis )
+            root_folder = trans.app.model.LibraryFolder( name=name, description='' )
+            library.root_folder = root_folder
+            trans.sa_session.add_all( ( library, root_folder ) )
+            trans.sa_session.flush()
+            message = "The new library named '%s' has been created" % escape( library.name )
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller='library_admin',
+                                                              id=trans.security.encode_id( library.id ),
+                                                              message=message,
+                                                              status='done' ) )
+        return trans.fill_template( '/admin/library/new_library.mako', message=message, status=escape( status ) )
+
+    @web.expose
+    @web.require_admin
+    def delete_library( self, trans, id, **kwd  ):
+        # Used by the Delete grid operation in the LibrarylistGrid.
+        return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                          action='delete_library_item',
+                                                          cntrller='library_admin',
+                                                          library_id=id,
+                                                          item_id=id,
+                                                          item_type='library' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_library( self, trans, id, **kwd  ):
+        # Used by the Undelete grid operation in the LibrarylistGrid.
+        return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                          action='undelete_library_item',
+                                                          cntrller='library_admin',
+                                                          library_id=id,
+                                                          item_id=id,
+                                                          item_type='library' ) )
+
+    @web.expose
+    @web.require_admin
+    def purge_library( self, trans, **kwd ):
+        # TODO: change this function to purge_library_item, behaving similar to delete_library_item
+        # assuming we want the ability to purge libraries.
+        # This function is currently only used by the functional tests.
+        library = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( kwd.get( 'id' ) ) )
+
+        def purge_folder( library_folder ):
+            for lf in library_folder.folders:
+                purge_folder( lf )
+            trans.sa_session.refresh( library_folder )
+            for library_dataset in library_folder.datasets:
+                trans.sa_session.refresh( library_dataset )
+                ldda = library_dataset.library_dataset_dataset_association
+                if ldda:
+                    trans.sa_session.refresh( ldda )
+                    dataset = ldda.dataset
+                    trans.sa_session.refresh( dataset )
+                    # If the dataset is not associated with any additional undeleted folders, then we can delete it.
+                    # We don't set dataset.purged to True here because the cleanup_datasets script will do that for
+                    # us, as well as removing the file from disk.
+                    # if not dataset.deleted and len( dataset.active_library_associations ) <= 1: # This is our current ldda
+                    dataset.deleted = True
+                    ldda.deleted = True
+                    trans.sa_session.add_all( ( dataset, ldda ) )
+                library_dataset.deleted = True
+                trans.sa_session.add( library_dataset )
+            library_folder.deleted = True
+            library_folder.purged = True
+            trans.sa_session.add( library_folder )
+            trans.sa_session.flush()
+        if not library.deleted:
+            message = "Library '%s' has not been marked deleted, so it cannot be purged" % escape( library.name )
+            return trans.response.send_redirect( web.url_for( controller='library_admin',
+                                                              action='browse_libraries',
+                                                              message=message,
+                                                              status='error' ) )
+        else:
+            purge_folder( library.root_folder )
+            library.purged = True
+            trans.sa_session.add( library )
+            trans.sa_session.flush()
+            message = "Library '%s' and all of its contents have been purged, datasets will be removed from disk via the cleanup_datasets script" % escape( library.name )
+            return trans.response.send_redirect( web.url_for( controller='library_admin',
+                                                              action='browse_libraries',
+                                                              message=message,
+                                                              status='done' ) )
diff --git a/lib/galaxy/webapps/galaxy/controllers/library_common.py b/lib/galaxy/webapps/galaxy/controllers/library_common.py
new file mode 100644
index 0000000..680bb46
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -0,0 +1,2805 @@
+import glob
+import logging
+import operator
+import os
+import os.path
+import string
+import sys
+import tarfile
+import tempfile
+import urllib
+import urllib2
+import zipfile
+from json import dumps, loads
+
+from markupsafe import escape
+from sqlalchemy import and_, false
+from sqlalchemy.orm import eagerload_all
+
+from galaxy import util, web
+from galaxy.security import Action
+from galaxy.tools.actions import upload_common
+from galaxy.util import inflector, unicodify, FILENAME_VALID_CHARS
+from galaxy.util.streamball import StreamBall
+from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin, UsesExtendedMetadataMixin, UsesLibraryMixinItems
+from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, build_select_field
+
+# Whoosh is compatible with Python 2.5+ Try to import Whoosh and set flag to indicate whether tool search is enabled.
+try:
+    import whoosh.index
+    from whoosh.fields import Schema, STORED, TEXT
+    from whoosh.scoring import BM25F
+    from whoosh.qparser import MultifieldParser
+    whoosh_search_enabled = True
+    # The following must be defined exactly like the
+    # schema in ~/scripts/data_libraries/build_whoosh_index.py
+    schema = Schema( id=STORED, name=TEXT, info=TEXT, dbkey=TEXT, message=TEXT )
+except ImportError as e:
+    whoosh_search_enabled = False
+    schema = None
+
+log = logging.getLogger( __name__ )
+
+# Test for available compression types
+tmpd = tempfile.mkdtemp()
+comptypes = []
+for comptype in ( 'gz', 'bz2' ):
+    tmpf = os.path.join( tmpd, 'compression_test.tar.' + comptype )
+    try:
+        archive = tarfile.open( tmpf, 'w:' + comptype )
+        archive.close()
+        comptypes.append( comptype )
+    except tarfile.CompressionError:
+        log.exception( "Compression error when testing %s compression.  This option will be disabled for library downloads." % comptype )
+    try:
+        os.unlink( tmpf )
+    except OSError:
+        pass
+try:
+    import zlib  # noqa: F401
+    comptypes.append( 'zip' )
+except ImportError:
+    pass
+
+try:
+    os.rmdir( tmpd )
+except:
+    pass
+
+
+class LibraryCommon( BaseUIController, UsesFormDefinitionsMixin, UsesExtendedMetadataMixin, UsesLibraryMixinItems ):
+    @web.json
+    def library_item_updates( self, trans, ids=None, states=None ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        # Create new HTML for any that have changed
+        rval = {}
+        if ids is not None and states is not None:
+            ids = map( int, ids.split( "," ) )
+            states = states.split( "," )
+            for id, state in zip( ids, states ):
+                data = trans.sa_session.query( self.app.model.LibraryDatasetDatasetAssociation ).get( id )
+                if data.state != state:
+                    job_ldda = data
+                    while job_ldda.copied_from_library_dataset_dataset_association:
+                        job_ldda = job_ldda.copied_from_library_dataset_dataset_association
+                    rval[id] = {
+                        "state": data.state,
+                        "html": unicodify( trans.fill_template( "library/common/library_item_info.mako", ldda=data ), 'utf-8' )
+                        # "force_history_refresh": force_history_refresh
+                    }
+        return rval
+
+    @web.expose
+    def browse_library( self, trans, cntrller='library', **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        # If use_panels is True, the library is being accessed via an external link
+        # which did not originate from within the Galaxy instance, and the library will
+        # be displayed correctly with the mast head.
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        library_id = kwd.get( 'id', None )
+        if not library_id:
+            # To handle bots
+            message = "You must specify a library id."
+            status = 'error'
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            library = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
+        except:
+            # Protect against attempts to phish for valid keys that return libraries
+            library = None
+        # Most security for browsing libraries is handled in the template, but do a basic check here.
+        if not library or not ( is_admin or trans.app.security_agent.can_access_library( current_user_roles, library ) ):
+            message = "Invalid library id ( %s ) specified." % str( library_id )
+            status = 'error'
+        else:
+            show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+            created_ldda_ids = kwd.get( 'created_ldda_ids', '' )
+            hidden_folder_ids = util.listify( kwd.get( 'hidden_folder_ids', '' ) )
+            if created_ldda_ids and not message:
+                message = "%d datasets are uploading in the background to the library '%s' (each is selected).  " % \
+                    ( len( created_ldda_ids.split( ',' ) ), escape( library.name ) )
+                message += "Don't navigate away from Galaxy or use the browser's \"stop\" or \"reload\" buttons (on this tab) until the "
+                message += "message \"This job is running\" is cleared from the \"Information\" column below for each selected dataset."
+                status = "info"
+            comptypes = get_comptypes( trans )
+            try:
+                if self.app.config.new_lib_browse:
+                    return trans.fill_template( 'library/common/browse_library_opt.mako',
+                                                cntrller=cntrller,
+                                                use_panels=use_panels,
+                                                library=library,
+                                                created_ldda_ids=created_ldda_ids,
+                                                hidden_folder_ids=hidden_folder_ids,
+                                                show_deleted=show_deleted,
+                                                comptypes=comptypes,
+                                                current_user_roles=current_user_roles,
+                                                message=escape( message ),
+                                                status=escape( status ) )
+                else:
+                    return trans.fill_template( 'library/common/browse_library.mako',
+                                                cntrller=cntrller,
+                                                use_panels=use_panels,
+                                                library=library,
+                                                created_ldda_ids=created_ldda_ids,
+                                                hidden_folder_ids=hidden_folder_ids,
+                                                show_deleted=show_deleted,
+                                                comptypes=comptypes,
+                                                current_user_roles=current_user_roles,
+                                                message=escape( message ),
+                                                status=escape( status ) )
+            except Exception as e:
+                message = 'Error attempting to display contents of library (%s): %s.' % ( escape( str( library.name ) ), str( e ) )
+                status = 'error'
+        default_action = kwd.get( 'default_action', None )
+
+        return trans.response.send_redirect( web.url_for( use_panels=use_panels,
+                                                          controller=cntrller,
+                                                          action='browse_libraries',
+                                                          default_action=default_action,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    def library_info( self, trans, cntrller, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        library_id = kwd.get( 'id', None )
+        try:
+            library = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
+        except:
+            library = None
+        self._check_access( trans, cntrller, is_admin, library, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'library_info_button', False ):
+            self._check_modify( trans, cntrller, is_admin, library, current_user_roles, use_panels, library_id, show_deleted )
+            new_name = kwd.get( 'name', 'No name' )
+            if not new_name:
+                message = 'Enter a valid name'
+                status = 'error'
+            else:
+                new_description = kwd.get( 'description', '' )
+                new_synopsis = kwd.get( 'synopsis', '' )
+                if new_synopsis in [ None, 'None' ]:
+                    new_synopsis = ''
+                library.name = new_name
+                library.description = new_description
+                library.synopsis = new_synopsis
+                # Rename the root_folder
+                library.root_folder.name = new_name
+                library.root_folder.description = new_description
+                trans.sa_session.add_all( ( library, library.root_folder ) )
+                trans.sa_session.flush()
+                message = "Information updated for library '%s'." % library.name
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='library_info',
+                                                                  cntrller=cntrller,
+                                                                  use_panels=use_panels,
+                                                                  id=trans.security.encode_id( library.id ),
+                                                                  show_deleted=show_deleted,
+                                                                  message=message,
+                                                                  status='done' ) )
+        # See if we have any associated templates
+        info_association, inherited = library.get_info_association()
+        widgets = library.get_template_widgets( trans )
+        widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        return trans.fill_template( '/library/common/library_info.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    library=library,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    current_user_roles=current_user_roles,
+                                    show_deleted=show_deleted,
+                                    info_association=info_association,
+                                    inherited=inherited,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def library_permissions( self, trans, cntrller, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        library_id = kwd.get( 'id', None )
+        try:
+            library = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
+        except:
+            library = None
+        self._check_access( trans, cntrller, is_admin, library, current_user_roles, use_panels, library_id, show_deleted )
+        self._check_manage( trans, cntrller, is_admin, library, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'update_roles_button', False ):
+            # The user clicked the Save button on the 'Associate With Roles' form
+            permissions = {}
+            for k, v in trans.app.model.Library.permitted_actions.items():
+                in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ]
+                permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+            trans.app.security_agent.set_all_library_permissions( trans, library, permissions )
+            trans.sa_session.refresh( library )
+            # Copy the permissions to the root folder
+            trans.app.security_agent.copy_library_permissions( trans, library, library.root_folder )
+            message = "Permissions updated for library '%s'." % escape( library.name )
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='library_permissions',
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              id=trans.security.encode_id( library.id ),
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='done' ) )
+        roles = trans.app.security_agent.get_legitimate_roles( trans, library, cntrller )
+        all_roles = trans.app.security_agent.get_all_roles( trans, cntrller )
+        return trans.fill_template( '/library/common/library_permissions.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    library=library,
+                                    current_user_roles=current_user_roles,
+                                    roles=roles,
+                                    all_roles=all_roles,
+                                    show_deleted=show_deleted,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def create_folder( self, trans, cntrller, parent_id, library_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller in ( 'library_admin', 'api' )
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            parent_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( parent_id ) )
+        except:
+            parent_folder = None
+        # Check the library which actually contains the user-supplied parent folder, not the user-supplied
+        # library, which could be anything.
+        self._check_access( trans, cntrller, is_admin, parent_folder, current_user_roles, use_panels, library_id, show_deleted )
+        self._check_add( trans, cntrller, is_admin, parent_folder, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'new_folder_button', False ) or cntrller == 'api':
+            new_folder = trans.app.model.LibraryFolder( name=kwd.get( 'name', '' ),
+                                                        description=kwd.get( 'description', '' ) )
+            # We are associating the last used genome build with folders, so we will always
+            # initialize a new folder with the first dbkey in genome builds list which is currently
+            # ?    unspecified (?)
+            new_folder.genome_build = trans.app.genome_builds.default_value
+            parent_folder.add_folder( new_folder )
+            trans.sa_session.add( new_folder )
+            trans.sa_session.flush()
+            # New folders default to having the same permissions as their parent folder
+            trans.app.security_agent.copy_library_permissions( trans, parent_folder, new_folder )
+            # If we're creating in the API, we're done
+            if cntrller == 'api':
+                return 200, dict( created=new_folder )
+            # If we have an inheritable template, redirect to the folder_info page so information
+            # can be filled in immediately.
+            widgets = []
+            info_association, inherited = new_folder.get_info_association()
+            if info_association and ( not( inherited ) or info_association.inheritable ):
+                widgets = new_folder.get_template_widgets( trans )
+            if info_association:
+                message = "The new folder named '%s' has been added to the data library.  " % escape( new_folder.name )
+                message += "Additional information about this folder may be added using the inherited template."
+                return trans.fill_template( '/library/common/folder_info.mako',
+                                            cntrller=cntrller,
+                                            use_panels=use_panels,
+                                            folder=new_folder,
+                                            library_id=library_id,
+                                            widgets=widgets,
+                                            current_user_roles=current_user_roles,
+                                            show_deleted=show_deleted,
+                                            info_association=info_association,
+                                            inherited=inherited,
+                                            message=escape( message ),
+                                            status='done' )
+            # If not inheritable info_association, redirect to the library.
+            message = "The new folder named '%s' has been added to the data library." % escape( new_folder.name )
+            # SM: This is the second place where the API controller would
+            # reference the library id:
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='done' ) )
+        # We do not render any template widgets on creation pages since saving the info_association
+        # cannot occur before the associated item is saved.
+        return trans.fill_template( '/library/common/new_folder.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    library_id=library_id,
+                                    folder=parent_folder,
+                                    show_deleted=show_deleted,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def folder_info( self, trans, cntrller, id, library_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( id ) )
+        except:
+            folder = None
+        self._check_access( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'rename_folder_button', False ):
+            self._check_modify( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            new_name = kwd.get( 'name', '' )
+            new_description = kwd.get( 'description', '' )
+            if not new_name:
+                message = 'Enter a valid name'
+                status = 'error'
+            else:
+                folder.name = new_name
+                folder.description = new_description
+                trans.sa_session.add( folder )
+                trans.sa_session.flush()
+                message = "Information updated for folder '%s'." % escape( folder.name )
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='folder_info',
+                                                                  cntrller=cntrller,
+                                                                  use_panels=use_panels,
+                                                                  id=id,
+                                                                  library_id=library_id,
+                                                                  show_deleted=show_deleted,
+                                                                  message=message,
+                                                                  status='done' ) )
+        # See if we have any associated templates
+        widgets = []
+        widget_fields_have_contents = False
+        info_association, inherited = folder.get_info_association()
+        if info_association and ( not( inherited ) or info_association.inheritable ):
+            widgets = folder.get_template_widgets( trans )
+            widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        return trans.fill_template( '/library/common/folder_info.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    folder=folder,
+                                    library_id=library_id,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    current_user_roles=current_user_roles,
+                                    show_deleted=show_deleted,
+                                    info_association=info_association,
+                                    inherited=inherited,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def folder_permissions( self, trans, cntrller, id, library_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( id ) )
+        except:
+            folder = None
+        self._check_access( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+        self._check_manage( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'update_roles_button', False ):
+            # The user clicked the Save button on the 'Associate With Roles' form
+            permissions = {}
+            for k, v in trans.app.model.Library.permitted_actions.items():
+                if k != 'LIBRARY_ACCESS':
+                    # LIBRARY_ACCESS is a special permission set only at the library level
+                    # and it is not inherited.
+                    in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( int( x ) ) for x in util.listify( kwd.get( k + '_in', [] ) ) ]
+                    permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+            trans.app.security_agent.set_all_library_permissions( trans, folder, permissions )
+            trans.sa_session.refresh( folder )
+            message = "Permissions updated for folder '%s'." % escape( folder.name )
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='folder_permissions',
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              id=trans.security.encode_id( folder.id ),
+                                                              library_id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='done' ) )
+        # If the library is public all roles are legitimate, but if the library
+        # is restricted, only those roles associated with the LIBRARY_ACCESS
+        # permission are legitimate.
+        roles = trans.app.security_agent.get_legitimate_roles( trans, folder.parent_library, cntrller )
+        return trans.fill_template( '/library/common/folder_permissions.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    folder=folder,
+                                    library_id=library_id,
+                                    current_user_roles=current_user_roles,
+                                    roles=roles,
+                                    show_deleted=show_deleted,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def ldda_edit_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
+        except:
+            ldda = None
+        self._check_access( trans, cntrller, is_admin, ldda, current_user_roles, use_panels, library_id, show_deleted )
+        self._check_modify( trans, cntrller, is_admin, ldda, current_user_roles, use_panels, library_id, show_deleted )
+        dbkey = kwd.get( 'dbkey', '?' )
+        if isinstance( dbkey, list ):
+            dbkey = dbkey[0]
+        file_formats = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
+        file_formats.sort()
+
+        def __ok_to_edit_metadata( ldda_id ):
+            # prevent modifying metadata when dataset is queued or running as input/output
+            # This code could be more efficient, i.e. by using mappers, but to prevent slowing down loading a History panel, we'll leave the code here for now
+            for job_to_dataset_association in trans.sa_session.query(
+                    self.app.model.JobToInputLibraryDatasetAssociation ) \
+                    .filter_by( ldda_id=ldda_id ) \
+                    .all() \
+                    + trans.sa_session.query( self.app.model.JobToOutputLibraryDatasetAssociation ) \
+                    .filter_by( ldda_id=ldda_id ) \
+                    .all():
+                if job_to_dataset_association.job.state not in [ job_to_dataset_association.job.states.OK, job_to_dataset_association.job.states.ERROR, job_to_dataset_association.job.states.DELETED ]:
+                    return False
+            return True
+
+        # See if we have any associated templates
+        widgets = []
+        info_association, inherited = ldda.get_info_association()
+        if info_association and ( not( inherited ) or info_association.inheritable ):
+            widgets = ldda.get_template_widgets( trans )
+        if kwd.get( 'change', False ):
+            # The user clicked the Save button on the 'Change data type' form
+            if __ok_to_edit_metadata( ldda.id ):
+                if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( kwd.get( 'datatype' ) ).allow_datatype_change:
+                    trans.app.datatypes_registry.change_datatype( ldda, kwd.get( 'datatype' ) )
+                    trans.sa_session.flush()
+                    message = "Data type changed for library dataset '%s'." % escape( ldda.name )
+                    status = 'done'
+                else:
+                    message = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( escape( ldda.extension ), escape( kwd.get( 'datatype' ) ) )
+                    status = 'error'
+            else:
+                message = "This dataset is currently being used as input or output.  You cannot change datatype until the jobs have completed or you have canceled them."
+                status = "error"
+        elif kwd.get( 'save', False ):
+            # The user clicked the Save button on the 'Edit Attributes' form
+            new_name = kwd.get( 'name', '' )
+            new_info = kwd.get( 'info', '' )
+            new_message = escape( kwd.get( 'message', ''  ) )
+            if not new_name:
+                message = 'Enter a valid name'
+                status = 'error'
+            else:
+                ldda.name = new_name
+                ldda.info = new_info
+                ldda.message = new_message
+                if __ok_to_edit_metadata( ldda.id ):
+                    # The following for loop will save all metadata_spec items
+                    for name, spec in ldda.datatype.metadata_spec.items():
+                        if spec.get("readonly"):
+                            continue
+                        optional = kwd.get( "is_" + name, None )
+                        if optional and optional == '__NOTHING__':
+                            # optional element... == '__NOTHING__' actually means it is NOT checked (and therefore ommitted)
+                            setattr( ldda.metadata, name, None )
+                        else:
+                            setattr( ldda.metadata, name, spec.unwrap( kwd.get( name, None ) ) )
+                    ldda.metadata.dbkey = dbkey
+                    ldda.datatype.after_setting_metadata( ldda )
+                    message = "Attributes updated for library dataset '%s'." % escape( ldda.name )
+                    status = 'done'
+                else:
+                    message = "Attributes updated, but metadata could not be changed because this dataset is currently being used as input or output. You must cancel or wait for these jobs to complete before changing metadata."
+                    status = 'warning'
+                trans.sa_session.flush()
+        elif kwd.get( 'detect', False ):
+            # The user clicked the Auto-detect button on the 'Edit Attributes' form
+            if __ok_to_edit_metadata( ldda.id ):
+                for name, spec in ldda.datatype.metadata_spec.items():
+                    # We need to be careful about the attributes we are resetting
+                    if name not in [ 'name', 'info', 'dbkey' ]:
+                        if spec.get( 'default' ):
+                            setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
+                message = "Attributes have been queued to be updated for library dataset '%s'." % escape( ldda.name )
+                status = 'done'
+                trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming={ 'input1': ldda } )
+            else:
+                message = "This dataset is currently being used as input or output.  You cannot change metadata until the jobs have completed or you have canceled them."
+                status = 'error'
+            trans.sa_session.flush()
+        elif kwd.get( 'change_extended_metadata', False):
+            em_string = kwd.get("extended_metadata", "" )
+            if len(em_string):
+                payload = None
+                try:
+                    payload = loads(em_string)
+                except Exception:
+                    message = 'Invalid JSON input'
+                    status = 'error'
+                if payload is not None:
+                    if ldda is not None:
+                        ex_obj = self.get_item_extended_metadata_obj(trans, ldda)
+                        if ex_obj is not None:
+                            self.unset_item_extended_metadata_obj(trans, ldda)
+                            self.delete_extended_metadata(trans, ex_obj)
+                        ex_obj = self.create_extended_metadata(trans, payload)
+                        self.set_item_extended_metadata_obj(trans, ldda, ex_obj)
+                        message = "Updated Extended metadata '%s'." % escape( ldda.name )
+                        status = 'done'
+                    else:
+                        message = "LDDA not found"
+                        status = 'error'
+            else:
+                if ldda is not None:
+                    ex_obj = self.get_item_extended_metadata_obj(trans, ldda)
+                    if ex_obj is not None:
+                        self.unset_item_extended_metadata_obj(trans, ldda)
+                        self.delete_extended_metadata(trans, ex_obj)
+                message = "Deleted Extended metadata '%s'." % escape( ldda.name )
+                status = 'done'
+
+        if "dbkey" in ldda.datatype.metadata_spec and not ldda.metadata.dbkey:
+            # Copy dbkey into metadata, for backwards compatability
+            # This looks like it does nothing, but getting the dbkey
+            # returns the metadata dbkey unless it is None, in which
+            # case it resorts to the old dbkey.  Setting the dbkey
+            # sets it properly in the metadata
+            ldda.metadata.dbkey = ldda.dbkey
+        return trans.fill_template( "/library/common/ldda_edit_info.mako",
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    ldda=ldda,
+                                    library_id=library_id,
+                                    file_formats=file_formats,
+                                    widgets=widgets,
+                                    current_user_roles=current_user_roles,
+                                    show_deleted=show_deleted,
+                                    info_association=info_association,
+                                    inherited=inherited,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def ldda_info( self, trans, cntrller, library_id, folder_id, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        show_associated_hdas_and_lddas = util.string_as_bool( kwd.get( 'show_associated_hdas_and_lddas', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
+        self._check_access( trans, cntrller, is_admin, ldda, current_user_roles, use_panels, library_id, show_deleted )
+        if is_admin and show_associated_hdas_and_lddas:
+            # Get all associated hdas and lddas that use the same disk file.
+            associated_hdas = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
+                                              .filter( and_( trans.model.HistoryDatasetAssociation.deleted == false(),
+                                                             trans.model.HistoryDatasetAssociation.dataset_id == ldda.dataset_id ) ) \
+                                              .all()
+            associated_lddas = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ) \
+                                               .filter( and_( trans.model.LibraryDatasetDatasetAssociation.deleted == false(),
+                                                              trans.model.LibraryDatasetDatasetAssociation.dataset_id == ldda.dataset_id,
+                                                              trans.model.LibraryDatasetDatasetAssociation.id != ldda.id ) ) \
+                                               .all()
+        else:
+            associated_hdas = []
+            associated_lddas = []
+        # See if we have any associated templates
+        widgets = []
+        widget_fields_have_contents = False
+        info_association, inherited = ldda.get_info_association()
+        if info_association and ( not( inherited ) or info_association.inheritable ):
+            widgets = ldda.get_template_widgets( trans )
+            widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        return trans.fill_template( '/library/common/ldda_info.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    ldda=ldda,
+                                    library=ldda.library_dataset.folder.parent_library,
+                                    show_associated_hdas_and_lddas=show_associated_hdas_and_lddas,
+                                    associated_hdas=associated_hdas,
+                                    associated_lddas=associated_lddas,
+                                    show_deleted=show_deleted,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    current_user_roles=current_user_roles,
+                                    info_association=info_association,
+                                    inherited=inherited,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def ldda_permissions( self, trans, cntrller, library_id, folder_id, id, **kwd ):
+        message = str( escape( kwd.get( 'message', '' ) ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        ids = util.listify( id )
+        lddas = []
+        libraries = []
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        for id in ids:
+            try:
+                ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
+            except:
+                ldda = None
+            if ldda:
+                library = ldda.library_dataset.folder.parent_library
+            self._check_access( trans, cntrller, is_admin, ldda, current_user_roles, use_panels, library_id, show_deleted )
+            lddas.append( ldda )
+            libraries.append( library )
+        library = libraries[0]
+        if filter( lambda x: x != library, libraries ):
+            message = "Library datasets specified span multiple libraries."
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              id=library_id,
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              message=message,
+                                                              status='error' ) )
+        # If access to the dataset is restricted, then use the roles associated with the DATASET_ACCESS permission to
+        # determine the legitimate roles.  If the dataset is public, see if access to the library is restricted.  If
+        # it is, use the roles associated with the LIBRARY_ACCESS permission to determine the legitimate roles.  If both
+        # the dataset and the library are public, all roles are legitimate.  All of the datasets will have the same
+        # permissions at this point.
+        ldda = lddas[0]
+        if trans.app.security_agent.dataset_is_public( ldda.dataset ):
+            # The dataset is public, so check access to the library
+            roles = trans.app.security_agent.get_legitimate_roles( trans, library, cntrller )
+        else:
+            roles = trans.app.security_agent.get_legitimate_roles( trans, ldda.dataset, cntrller )
+        if kwd.get( 'update_roles_button', False ):
+            # Dataset permissions
+            access_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action )
+            manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
+            permissions, in_roles, error, message = \
+                trans.app.security_agent.derive_roles_from_access( trans, trans.app.security.decode_id( library_id ), cntrller, library=True, **kwd )
+            # Keep roles for DATASET_MANAGE_PERMISSIONS on the dataset
+            if not ldda.has_manage_permissions_roles( trans ):
+                # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time,
+                # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS
+                # permission.  In this case, we'll reset this permission to the ldda user's private role.
+                # dataset_manage_permissions_roles = [ trans.app.security_agent.get_private_user_role( ldda.user ) ]
+                permissions[ manage_permissions_action ] = [ trans.app.security_agent.get_private_user_role( ldda.user ) ]
+            else:
+                permissions[ manage_permissions_action ] = ldda.get_manage_permissions_roles( trans )
+            for ldda in lddas:
+                # Set the DATASET permissions on the Dataset.
+                if error:
+                    # Keep the original role associations for the DATASET_ACCESS permission on the ldda.
+                    permissions[ access_action ] = ldda.get_access_roles( trans )
+                    status = 'error'
+                else:
+                    error = trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, permissions )
+                    if error:
+                        message += error
+                        status = 'error'
+                    trans.sa_session.refresh( ldda.dataset )
+            if not error:
+                # Set the LIBRARY permissions on the LibraryDataset.  The LibraryDataset and
+                # LibraryDatasetDatasetAssociation will be set with the same permissions.
+                permissions = {}
+                for k, v in trans.app.model.Library.permitted_actions.items():
+                    if k != 'LIBRARY_ACCESS':
+                        # LIBRARY_ACCESS is a special permission set only at the library level and it is not inherited.
+                        in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ]
+                        permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+                for ldda in lddas:
+                    error = trans.app.security_agent.set_all_library_permissions( trans, ldda.library_dataset, permissions )
+                    trans.sa_session.refresh( ldda.library_dataset )
+                    if error:
+                        message = error
+                    else:
+                        # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation
+                        trans.app.security_agent.set_all_library_permissions( trans, ldda, permissions )
+                        trans.sa_session.refresh( ldda )
+                if len( lddas ) == 1:
+                    message = "Permissions updated for dataset '%s'." % escape( ldda.name )
+                else:
+                    message = 'Permissions updated for %d datasets.' % len( lddas )
+                status = 'done'
+            return trans.fill_template( "/library/common/ldda_permissions.mako",
+                                        cntrller=cntrller,
+                                        use_panels=use_panels,
+                                        lddas=lddas,
+                                        library_id=library_id,
+                                        roles=roles,
+                                        show_deleted=show_deleted,
+                                        message=escape( message ),
+                                        status=escape( status ) )
+        if len( ids ) > 1:
+            # Ensure that the permissions across all library items are identical, otherwise we can't update them together.
+            check_list = []
+            for ldda in lddas:
+                permissions = []
+                # Check the library level permissions - the permissions on the LibraryDatasetDatasetAssociation
+                # will always be the same as the permissions on the associated LibraryDataset.
+                for library_permission in trans.app.security_agent.get_permissions( ldda.library_dataset ):
+                    if library_permission.action not in permissions:
+                        permissions.append( library_permission.action )
+                for dataset_permission in trans.app.security_agent.get_permissions( ldda.dataset ):
+                    if dataset_permission.action not in permissions:
+                        permissions.append( dataset_permission.action )
+                permissions.sort()
+                if not check_list:
+                    check_list = permissions
+                if permissions != check_list:
+                    message = 'The datasets you selected do not have identical permissions, so they can not be updated together'
+                    trans.response.send_redirect( web.url_for( controller='library_common',
+                                                               action='browse_library',
+                                                               cntrller=cntrller,
+                                                               use_panels=use_panels,
+                                                               id=library_id,
+                                                               show_deleted=show_deleted,
+                                                               message=message,
+                                                               status='error' ) )
+        # Display permission form, permissions will be updated for all lddas simultaneously.
+        return trans.fill_template( "/library/common/ldda_permissions.mako",
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    lddas=lddas,
+                                    library_id=library_id,
+                                    roles=roles,
+                                    show_deleted=show_deleted,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def upload_library_dataset( self, trans, cntrller, library_id, folder_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        ldda_message = escape( kwd.get( 'ldda_message', '' ) )
+        # deleted = util.string_as_bool( kwd.get( 'deleted', False ) )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        replace_id = kwd.get( 'replace_id', None )
+        replace_dataset = None
+        upload_option = kwd.get( 'upload_option', 'upload_file' )
+        if kwd.get( 'files_0|uni_to_posix', False ):
+            to_posix_lines = kwd.get( 'files_0|to_posix_lines', '' )
+        else:
+            to_posix_lines = kwd.get( 'to_posix_lines', '' )
+        if kwd.get( 'files_0|space_to_tab', False ):
+            space_to_tab = kwd.get( 'files_0|space_to_tab', '' )
+        else:
+            space_to_tab = kwd.get( 'space_to_tab', '' )
+        link_data_only = kwd.get( 'link_data_only', 'copy_files' )
+        dbkey = kwd.get( 'dbkey', '?' )
+        if isinstance( dbkey, list ):
+            last_used_build = dbkey[0]
+        else:
+            last_used_build = dbkey
+        roles = kwd.get( 'roles', '' )
+        is_admin = trans.user_is_admin() and cntrller in ( 'library_admin', 'api' )
+        current_user_roles = trans.get_current_user_roles()
+        widgets = []
+        info_association, inherited = None, None
+        template_id = "None"
+        if replace_id not in [ '', None, 'None' ]:
+            replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) )
+            self._check_access( trans, cntrller, is_admin, replace_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_modify( trans, cntrller, is_admin, replace_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            library = replace_dataset.folder.parent_library
+            folder = replace_dataset.folder
+            info_association, inherited = replace_dataset.library_dataset_dataset_association.get_info_association()
+            if info_association and ( not( inherited ) or info_association.inheritable ):
+                widgets = replace_dataset.library_dataset_dataset_association.get_template_widgets( trans )
+            # The name is stored - by the time the new ldda is created, replace_dataset.name
+            # will point to the new ldda, not the one it's replacing.
+            replace_dataset_name = replace_dataset.name
+            if not last_used_build:
+                last_used_build = replace_dataset.library_dataset_dataset_association.dbkey
+        else:
+            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+            self._check_access( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_add( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            library = folder.parent_library
+        if folder and last_used_build in [ 'None', None, '?' ]:
+            last_used_build = folder.genome_build
+        if kwd.get( 'runtool_btn', False ) or kwd.get( 'ajax_upload', False ) or cntrller == 'api':
+            error = False
+            if upload_option == 'upload_paths' and not trans.app.config.allow_library_path_paste:
+                error = True
+                message = '"allow_library_path_paste" is not defined in the Galaxy configuration file'
+            elif upload_option == 'upload_paths' and not is_admin:
+                error = True
+                message = 'Uploading files via filesystem paths can only be performed by administrators'
+            elif roles:
+                # Check to see if the user selected roles to associate with the DATASET_ACCESS permission
+                # on the dataset that would cause accessibility issues.
+                vars = dict( DATASET_ACCESS_in=roles )
+                permissions, in_roles, error, message = \
+                    trans.app.security_agent.derive_roles_from_access( trans, library.id, cntrller, library=True, **vars )
+            if error:
+                if cntrller == 'api':
+                    return 400, message
+                trans.response.send_redirect( web.url_for( controller='library_common',
+                                                           action='upload_library_dataset',
+                                                           cntrller=cntrller,
+                                                           library_id=library_id,
+                                                           folder_id=folder_id,
+                                                           replace_id=replace_id,
+                                                           upload_option=upload_option,
+                                                           show_deleted=show_deleted,
+                                                           message=message,
+                                                           status='error' ) )
+            else:
+                # See if we have any inherited templates.
+                if not info_association:
+                    info_association, inherited = folder.get_info_association( inherited=True )
+                if info_association and info_association.inheritable:
+                    template_id = str( info_association.template.id )
+                    widgets = folder.get_template_widgets( trans, get_contents=True )
+                    processed_widgets = []
+                    # The list of widgets may include an AddressField which we need to save if it is new
+                    for index, widget_dict in enumerate( widgets ):
+                        widget = widget_dict[ 'widget' ]
+                        if isinstance( widget, AddressField ):
+                            value = kwd.get( widget.name, '' )
+                            if value == 'new':
+                                if self.field_param_values_ok( widget.name, 'AddressField', **kwd ):
+                                    # Save the new address
+                                    address = trans.app.model.UserAddress( user=trans.user )
+                                    self.save_widget_field( trans, address, widget.name, **kwd )
+                                    widget.value = str( address.id )
+                                    widget_dict[ 'widget' ] = widget
+                                    processed_widgets.append( widget_dict )
+                                    # It is now critical to update the value of 'field_%i', replacing the string
+                                    # 'new' with the new address id.  This is necessary because the upload_dataset()
+                                    # method below calls the handle_library_params() method, which does not parse the
+                                    # widget fields, it instead pulls form values from kwd.  See the FIXME comments in the
+                                    # handle_library_params() method, and the CheckboxField code in the next conditional.
+                                    kwd[ widget.name ] = str( address.id )
+                                else:
+                                    # The invalid address won't be saved, but we cannot display error
+                                    # messages on the upload form due to the ajax upload already occurring.
+                                    # When we re-engineer the upload process ( currently under way ), we
+                                    # will be able to check the form values before the ajax upload occurs
+                                    # in the background.  For now, we'll do nothing...
+                                    pass
+                        elif isinstance( widget, CheckboxField ):
+                            # We need to check the value from kwd since util.Params would have munged the list if
+                            # the checkbox is checked.
+                            value = kwd.get( widget.name, '' )
+                            if CheckboxField.is_checked( value ):
+                                widget.value = 'true'
+                                widget_dict[ 'widget' ] = widget
+                                processed_widgets.append( widget_dict )
+                                kwd[ widget.name ] = 'true'
+                        else:
+                            processed_widgets.append( widget_dict )
+                    widgets = processed_widgets
+                created_outputs_dict = trans.webapp.controllers[ 'library_common' ].upload_dataset( trans,
+                                                                                                    cntrller=cntrller,
+                                                                                                    library_id=trans.security.encode_id( library.id ),
+                                                                                                    folder_id=trans.security.encode_id( folder.id ),
+                                                                                                    template_id=template_id,
+                                                                                                    widgets=widgets,
+                                                                                                    replace_dataset=replace_dataset,
+                                                                                                    **kwd )
+                if created_outputs_dict:
+                    if cntrller == 'api':
+                        # created_outputs_dict can be a string only if cntrller == 'api'
+                        if type( created_outputs_dict ) == str:
+                            return 400, created_outputs_dict
+                        elif type( created_outputs_dict ) == tuple:
+                            return created_outputs_dict[0], created_outputs_dict[1]
+                        return 200, created_outputs_dict
+                    total_added = len( created_outputs_dict.keys() )
+                    ldda_id_list = [ str( v.id ) for k, v in created_outputs_dict.items() ]
+                    created_ldda_ids = ",".join( ldda_id_list )
+                    if replace_dataset:
+                        message = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, escape( replace_dataset_name ), escape( folder.name ) )
+                    else:
+                        if not folder.parent:
+                            # Libraries have the same name as their root_folder
+                            message = "Added %d datasets to the library '%s' (each is selected).  " % ( total_added, escape( folder.name ) )
+                        else:
+                            message = "Added %d datasets to the folder '%s' (each is selected).  " % ( total_added, escape( folder.name ) )
+                        if cntrller == 'library_admin':
+                            message += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary."
+                            status = 'done'
+                        else:
+                            # Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need
+                            # to check one of them to see if the current user can manage permissions on them.
+                            check_ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id_list[0] )
+                            if trans.app.security_agent.can_manage_library_item( current_user_roles, check_ldda ):
+                                if replace_dataset:
+                                    default_action = ''
+                                else:
+                                    message += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary."
+                                    default_action = 'manage_permissions'
+                            else:
+                                default_action = 'import_to_current_history'
+                            trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                       action='browse_library',
+                                                                       cntrller=cntrller,
+                                                                       id=library_id,
+                                                                       default_action=default_action,
+                                                                       created_ldda_ids=created_ldda_ids,
+                                                                       show_deleted=show_deleted,
+                                                                       message=message,
+                                                                       status='done' ) )
+                else:
+                    created_ldda_ids = ''
+                    message = "Upload failed"
+                    status = 'error'
+                    if cntrller == 'api':
+                        return 400, message
+                trans.response.send_redirect( web.url_for( controller='library_common',
+                                                           action='browse_library',
+                                                           cntrller=cntrller,
+                                                           id=library_id,
+                                                           created_ldda_ids=created_ldda_ids,
+                                                           show_deleted=show_deleted,
+                                                           message=message,
+                                                           status=status ) )
+        # Note: if the upload form was submitted due to refresh_on_change for a form field, we cannot re-populate
+        # the field for the selected file ( files_0|file_data ) if the user selected one.  This is because the value
+        # attribute of the html input file type field is typically ignored by browsers as a security precaution.
+
+        # See if we have any inherited templates.
+        if not info_association:
+            info_association, inherited = folder.get_info_association( inherited=True )
+            if info_association and info_association.inheritable:
+                widgets = folder.get_template_widgets( trans, get_contents=True )
+        if info_association:
+            # Retain contents of widget fields when form was submitted via refresh_on_change.
+            widgets = self.populate_widgets_from_kwd( trans, widgets, **kwd )
+            template_id = str( info_association.template.id )
+
+        # Send list of data formats to the upload form so the "extension" select list can be populated dynamically
+        file_formats = trans.app.datatypes_registry.upload_file_formats
+
+        dbkeys = trans.app.genomes.get_dbkeys( trans )
+        dbkeys.sort( key=lambda dbkey: dbkey[0].lower() )
+
+        # Send the current history to the form to enable importing datasets from history to library
+        history = trans.get_history()
+        if history is not None:
+            trans.sa_session.refresh( history )
+        if upload_option == 'upload_file' and trans.app.config.nginx_upload_path:
+            # If we're using nginx upload, override the form action -
+            # url_for is intentionally not used on the base URL here -
+            # nginx_upload_path is expected to include the proxy prefix if the
+            # administrator intends for it to be part of the URL.
+            action = trans.app.config.nginx_upload_path + '?nginx_redir=' + web.url_for( controller='library_common', action='upload_library_dataset' )
+        else:
+            action = web.url_for( controller='library_common', action='upload_library_dataset' )
+        do_not_display_options = []
+        if replace_dataset:
+            # TODO: Not sure why, but 'upload_paths' is not allowed if replacing a dataset.  See self.make_library_uploaded_dataset().
+            do_not_display_options = [ 'upload_directory', 'upload_paths' ]
+        upload_option_select_list = self._build_upload_option_select_list( trans, upload_option, is_admin, do_not_display_options )
+        roles_select_list = self._build_roles_select_list( trans, cntrller, library, util.listify( roles ) )
+        return trans.fill_template( '/library/common/upload.mako',
+                                    cntrller=cntrller,
+                                    upload_option_select_list=upload_option_select_list,
+                                    upload_option=upload_option,
+                                    action=action,
+                                    library_id=library_id,
+                                    folder_id=folder_id,
+                                    replace_dataset=replace_dataset,
+                                    file_formats=file_formats,
+                                    dbkeys=dbkeys,
+                                    last_used_build=last_used_build,
+                                    roles_select_list=roles_select_list,
+                                    history=history,
+                                    widgets=widgets,
+                                    template_id=template_id,
+                                    to_posix_lines=to_posix_lines,
+                                    space_to_tab=space_to_tab,
+                                    link_data_only=link_data_only,
+                                    show_deleted=show_deleted,
+                                    ldda_message=ldda_message,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    def upload_dataset( self, trans, cntrller, library_id, folder_id, replace_dataset=None, **kwd ):
+        # Set up the traditional tool state/params
+        tool_id = 'upload1'
+        tool = trans.app.toolbox.get_tool( tool_id )
+        state = tool.new_state( trans )
+        tool.populate_state( trans, tool.inputs, kwd, state.inputs )
+        tool_params = state.inputs
+        dataset_upload_inputs = []
+        for input_name, input in tool.inputs.iteritems():
+            if input.type == "upload_dataset":
+                dataset_upload_inputs.append( input )
+        # Library-specific params
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        message = escape( kwd.get( 'message', '' ) )
+        server_dir = kwd.get( 'server_dir', '' )
+        if replace_dataset not in [ None, 'None' ]:
+            replace_id = trans.security.encode_id( replace_dataset.id )
+        else:
+            replace_id = None
+        upload_option = kwd.get( 'upload_option', 'upload_file' )
+        response_code = 200
+        if upload_option == 'upload_directory':
+            if server_dir in [ None, 'None', '' ]:
+                response_code = 400
+            if cntrller == 'library_admin' or ( cntrller == 'api' and trans.user_is_admin ):
+                import_dir = trans.app.config.library_import_dir
+                import_dir_desc = 'library_import_dir'
+                full_dir = os.path.join( import_dir, server_dir )
+            else:
+                import_dir = trans.app.config.user_library_import_dir
+                import_dir_desc = 'user_library_import_dir'
+                if server_dir == trans.user.email:
+                    full_dir = os.path.join( import_dir, server_dir )
+                else:
+                    full_dir = os.path.join( import_dir, trans.user.email, server_dir )
+            if import_dir:
+                message = 'Select a directory'
+            else:
+                response_code = 403
+                message = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
+        elif upload_option == 'upload_paths':
+            if not trans.app.config.allow_library_path_paste:
+                response_code = 403
+                message = '"allow_library_path_paste" is not defined in the Galaxy configuration file'
+        # Some error handling should be added to this method.
+        try:
+            # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd
+            # should be passed so that complex objects that may have been included in the initial request remain.
+            library_bunch = upload_common.handle_library_params( trans, kwd, folder_id, replace_dataset )
+        except:
+            response_code = 500
+            message = "Unable to parse upload parameters, please report this error."
+        # Proceed with (mostly) regular upload processing if we're still errorless
+        if response_code == 200:
+            precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller )
+            if upload_option == 'upload_file':
+                tool_params = upload_common.persist_uploads( tool_params )
+                uploaded_datasets = upload_common.get_uploaded_datasets( trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch )
+            elif upload_option == 'upload_directory':
+                uploaded_datasets, response_code, message = self.get_server_dir_uploaded_datasets( trans, cntrller, kwd, full_dir, import_dir_desc, library_bunch, response_code, message )
+            elif upload_option == 'upload_paths':
+                uploaded_datasets, response_code, message = self.get_path_paste_uploaded_datasets( trans, cntrller, kwd, library_bunch, response_code, message )
+            upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
+            if upload_option == 'upload_file' and not uploaded_datasets:
+                response_code = 400
+                message = 'Select a file, enter a URL or enter text'
+        if response_code != 200:
+            if cntrller == 'api':
+                return ( response_code, message )
+            trans.response.send_redirect( web.url_for( controller='library_common',
+                                                       action='upload_library_dataset',
+                                                       cntrller=cntrller,
+                                                       library_id=library_id,
+                                                       folder_id=folder_id,
+                                                       replace_id=replace_id,
+                                                       upload_option=upload_option,
+                                                       show_deleted=show_deleted,
+                                                       message=message,
+                                                       status='error' ) )
+        json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
+        data_list = [ ud.data for ud in uploaded_datasets ]
+        job_params = {}
+        job_params['link_data_only'] = dumps( kwd.get( 'link_data_only', 'copy_files' ) )
+        job_params['uuid'] = dumps( kwd.get( 'uuid', None ) )
+        job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params )
+        trans.sa_session.add( job )
+        trans.sa_session.flush()
+        return output
+
+    def make_library_uploaded_dataset( self, trans, cntrller, params, name, path, type, library_bunch, in_folder=None ):
+        link_data_only = params.get( 'link_data_only', 'copy_files' )
+        uuid_str = params.get( 'uuid', None )
+        file_type = params.get( 'file_type', None )
+        library_bunch.replace_dataset = None  # not valid for these types of upload
+        uploaded_dataset = util.bunch.Bunch()
+        new_name = name
+        # Remove compressed file extensions, if any, but only if
+        # we're copying files into Galaxy's file space.
+        if link_data_only == 'copy_files':
+            if new_name.endswith( '.gz' ):
+                new_name = new_name.rstrip( '.gz' )
+            elif new_name.endswith( '.zip' ):
+                new_name = new_name.rstrip( '.zip' )
+        uploaded_dataset.name = new_name
+        uploaded_dataset.path = path
+        uploaded_dataset.type = type
+        uploaded_dataset.ext = None
+        uploaded_dataset.file_type = file_type
+        uploaded_dataset.dbkey = params.get( 'dbkey', None )
+        uploaded_dataset.to_posix_lines = params.get('to_posix_lines', None)
+        uploaded_dataset.space_to_tab = params.get( 'space_to_tab', None )
+        if in_folder:
+            uploaded_dataset.in_folder = in_folder
+        uploaded_dataset.data = upload_common.new_upload( trans, cntrller, uploaded_dataset, library_bunch )
+        uploaded_dataset.link_data_only = link_data_only
+        uploaded_dataset.uuid = uuid_str
+        if link_data_only == 'link_to_files':
+            uploaded_dataset.data.file_name = os.path.abspath( path )
+            # Since we are not copying the file into Galaxy's managed
+            # default file location, the dataset should never be purgable.
+            uploaded_dataset.data.dataset.purgable = False
+            trans.sa_session.add_all( ( uploaded_dataset.data, uploaded_dataset.data.dataset ) )
+            trans.sa_session.flush()
+        return uploaded_dataset
+
+    def get_server_dir_uploaded_datasets( self, trans, cntrller, params, full_dir, import_dir_desc, library_bunch, response_code, message ):
+        dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc)
+        files = dir_response[0]
+        if not files:
+            return dir_response
+        uploaded_datasets = []
+        for file in files:
+            name = os.path.basename( file )
+            uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, file, 'server_dir', library_bunch ) )
+        return uploaded_datasets, 200, None
+
+    def _get_server_dir_files( self, params, full_dir, import_dir_desc ):
+        files = []
+        try:
+            for entry in os.listdir( full_dir ):
+                # Only import regular files
+                path = os.path.join( full_dir, entry )
+                link_data_only = params.get( 'link_data_only', 'copy_files' )
+                if os.path.islink( full_dir ) and link_data_only == 'link_to_files':
+                    # If we're linking instead of copying and the
+                    # sub-"directory" in the import dir is actually a symlink,
+                    # dereference the symlink, but not any of its contents.
+                    link_path = os.readlink( full_dir )
+                    if os.path.isabs( link_path ):
+                        path = os.path.join( link_path, entry )
+                    else:
+                        path = os.path.abspath( os.path.join( link_path, entry ) )
+                elif os.path.islink( path ) and os.path.isfile( path ) and link_data_only == 'link_to_files':
+                    # If we're linking instead of copying and the "file" in the
+                    # sub-directory of the import dir is actually a symlink,
+                    # dereference the symlink (one dereference only, Vasili).
+                    link_path = os.readlink( path )
+                    if os.path.isabs( link_path ):
+                        path = link_path
+                    else:
+                        path = os.path.abspath( os.path.join( os.path.dirname( path ), link_path ) )
+                if os.path.isfile( path ):
+                    files.append( path )
+        except Exception as e:
+            message = "Unable to get file list for configured %s, error: %s" % ( import_dir_desc, str( e ) )
+            response_code = 500
+            return None, response_code, message
+        if not files:
+            message = "The directory '%s' contains no valid files" % full_dir
+            response_code = 400
+            return None, response_code, message
+        return files, None, None
+
+    def get_path_paste_uploaded_datasets( self, trans, cntrller, params, library_bunch, response_code, message ):
+        preserve_dirs = util.string_as_bool( params.get( 'preserve_dirs', False ) )
+        uploaded_datasets = []
+        (files_and_folders, _response_code, _message) = self._get_path_files_and_folders(params, preserve_dirs)
+        if _response_code:
+            return (uploaded_datasets, _response_code, _message)
+        for (path, name, folder) in files_and_folders:
+            uploaded_datasets.append( self.make_library_uploaded_dataset( trans, cntrller, params, name, path, 'path_paste', library_bunch, folder ) )
+        return uploaded_datasets, 200, None
+
+    def _get_path_files_and_folders( self, params, preserve_dirs ):
+        problem_response = self._check_path_paste_params( params )
+        if problem_response:
+            return problem_response
+        files_and_folders = []
+        for (line, path) in self._paths_list( params ):
+            line_files_and_folders = self._get_single_path_files_and_folders( line, path, preserve_dirs )
+            files_and_folders.extend( line_files_and_folders )
+        return files_and_folders, None, None
+
+    def _get_single_path_files_and_folders(self, line, path, preserve_dirs):
+        files_and_folders = []
+        if os.path.isfile( path ):
+            name = os.path.basename( path )
+            files_and_folders.append((path, name, None))
+        for basedir, dirs, files in os.walk( line ):
+            for file in files:
+                file_path = os.path.abspath( os.path.join( basedir, file ) )
+                if preserve_dirs:
+                    in_folder = os.path.dirname( file_path.replace( path, '', 1 ).lstrip( '/' ) )
+                else:
+                    in_folder = None
+                files_and_folders.append((file_path, file, in_folder))
+        return files_and_folders
+
+    def _paths_list(self, params):
+        return [ (l.strip(), os.path.abspath(l.strip())) for l in params.get( 'filesystem_paths', '' ).splitlines() if l.strip() ]
+
+    def _check_path_paste_params(self, params):
+        if params.get( 'filesystem_paths', '' ) == '':
+            message = "No paths entered in the upload form"
+            response_code = 400
+            return None, response_code, message
+        bad_paths = []
+        for (_, path) in self._paths_list( params ):
+            if not os.path.exists( path ):
+                bad_paths.append( path )
+        if bad_paths:
+            message = 'Invalid paths: "%s".' % '", "'.join( bad_paths )
+            response_code = 400
+            return None, response_code, message
+        return None
+
+    @web.expose
+    def add_history_datasets_to_library( self, trans, cntrller, library_id, folder_id, hda_ids='', **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        ldda_message = escape( kwd.get( 'ldda_message', '' ) )
+        show_deleted = kwd.get( 'show_deleted', False )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        replace_id = kwd.get( 'replace_id', None )
+        replace_dataset = None
+        upload_option = kwd.get( 'upload_option', 'import_from_history' )
+        if kwd.get( 'files_0|to_posix_lines', False ):
+            to_posix_lines = kwd.get( 'files_0|to_posix_lines', '' )
+        else:
+            to_posix_lines = kwd.get( 'to_posix_lines', '' )
+        if kwd.get( 'files_0|space_to_tab', False ):
+            space_to_tab = kwd.get( 'files_0|space_to_tab', '' )
+        else:
+            space_to_tab = kwd.get( 'space_to_tab', '' )
+        link_data_only = kwd.get( 'link_data_only', 'copy_files' )
+        dbkey = kwd.get( 'dbkey', '?' )
+        if isinstance( dbkey, list ):
+            last_used_build = dbkey[0]
+        else:
+            last_used_build = dbkey
+        roles = kwd.get( 'roles', '' )
+        is_admin = trans.user_is_admin() and cntrller in ( 'library_admin', 'api' )
+        current_user_roles = trans.get_current_user_roles()
+        info_association, inherited = None, None
+        template_id = "None"
+        if replace_id not in [ None, 'None' ]:
+            try:
+                replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) )
+            except:
+                replace_dataset = None
+            self._check_access( trans, cntrller, is_admin, replace_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_modify( trans, cntrller, is_admin, replace_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            library = replace_dataset.folder.parent_library
+            folder = replace_dataset.folder
+            last_used_build = replace_dataset.library_dataset_dataset_association.dbkey
+            info_association, inherited = replace_dataset.library_dataset_dataset_association.get_info_association()
+            if info_association and ( not( inherited ) or info_association.inheritable ):
+                template_id = str( info_association.template.id )
+        else:
+            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+            self._check_access( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_add( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            library = folder.parent_library
+            last_used_build = folder.genome_build
+        # See if the current history is empty
+        history = trans.get_history()
+        trans.sa_session.refresh( history )
+        if not history.active_datasets:
+            message = 'Your current history is empty'
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='error' ) )
+        if kwd.get( 'add_history_datasets_to_library_button', False ):
+            hda_ids = util.listify( hda_ids )
+            if hda_ids:
+                dataset_names = []
+                created_ldda_ids = ''
+                for hda_id in hda_ids:
+                    try:
+                        hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( hda_id ) )
+                    except:
+                        hda = None
+                    self._check_access( trans, cntrller, is_admin, hda, current_user_roles, use_panels, library_id, show_deleted )
+                    if roles:
+                        role_ids = roles.split( ',' )
+                        role_obj_list = [ trans.sa_session.query( trans.model.Role ).get( role_id ) for role_id in role_ids ]
+                    else:
+                        role_obj_list = []
+                    ldda = hda.to_library_dataset_dataset_association( trans,
+                                                                       target_folder=folder,
+                                                                       replace_dataset=replace_dataset,
+                                                                       roles=role_obj_list,
+                                                                       ldda_message=ldda_message )
+                    created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( ldda.id ) )
+                    dataset_names.append( ldda.name )
+                    if not replace_dataset:
+                        # If replace_dataset is None, the Library level permissions will be taken from the folder and applied to the new
+                        # LDDA and LibraryDataset.
+                        trans.app.security_agent.copy_library_permissions( trans, folder, ldda )
+                        trans.app.security_agent.copy_library_permissions( trans, folder, ldda.library_dataset )
+                    else:
+                        library_bunch = upload_common.handle_library_params( trans, kwd, folder_id, replace_dataset )
+                        if library_bunch.template and library_bunch.template_field_contents:
+                            # Since information templates are inherited, the template fields can be displayed on the upload form.
+                            # If the user has added field contents, we'll need to create a new form_values and info_association
+                            # for the new library_dataset_dataset_association object.
+                            # Create a new FormValues object, using the template we previously retrieved
+                            form_values = trans.app.model.FormValues( library_bunch.template, library_bunch.template_field_contents )
+                            trans.sa_session.add( form_values )
+                            trans.sa_session.flush()
+                            # Create a new info_association between the current ldda and form_values
+                            info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( ldda, library_bunch.template, form_values )
+                            trans.sa_session.add( info_association )
+                            trans.sa_session.flush()
+                    # Make sure to apply any defined dataset permissions, allowing the permissions inherited from the folder to
+                    # over-ride the same permissions on the dataset, if they exist.
+                    dataset_permissions_dict = trans.app.security_agent.get_permissions( hda.dataset )
+                    current_library_dataset_actions = [ permission.action for permission in ldda.library_dataset.actions ]
+                    # The DATASET_MANAGE_PERMISSIONS permission on a dataset is a special case because if
+                    # it exists, then we need to apply the LIBRARY_MANAGE permission to the library dataset.
+                    dataset_manage_permissions_action = trans.app.security_agent.get_action( 'DATASET_MANAGE_PERMISSIONS' ).action
+                    flush_needed = False
+                    for action, dataset_permissions_roles in dataset_permissions_dict.items():
+                        if isinstance( action, Action ):
+                            action = action.action
+                        if action == dataset_manage_permissions_action:
+                            # Apply the LIBRARY_MANAGE permission to the library dataset.
+                            action = trans.app.security_agent.get_action( 'LIBRARY_MANAGE' ).action
+                        # Allow the permissions inherited from the folder to over-ride the same permissions on the dataset.
+                        if action not in current_library_dataset_actions:
+                            for ldp in [ trans.model.LibraryDatasetPermissions( action, ldda.library_dataset, role ) for role in dataset_permissions_roles ]:
+                                trans.sa_session.add( ldp )
+                                flush_needed = True
+                    if flush_needed:
+                        trans.sa_session.flush()
+                    # Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset
+                    trans.app.security_agent.copy_library_permissions( trans, ldda.library_dataset, ldda )
+                if created_ldda_ids:
+                    created_ldda_ids = created_ldda_ids.lstrip( ',' )
+                    ldda_id_list = created_ldda_ids.split( ',' )
+                    total_added = len( ldda_id_list )
+                    if replace_dataset:
+                        message = "Added %d dataset versions to the library dataset '%s' in the folder '%s'." % ( total_added, escape( replace_dataset.name ), escape( folder.name ) )
+                    else:
+                        if not folder.parent:
+                            # Libraries have the same name as their root_folder
+                            message = "Added %d datasets to the library '%s' (each is selected).  " % ( total_added, escape( folder.name ) )
+                        else:
+                            message = "Added %d datasets to the folder '%s' (each is selected).  " % ( total_added, escape( folder.name ) )
+                        if cntrller == 'library_admin':
+                            message += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary."
+                        else:
+                            # Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need
+                            # to check one of them to see if the current user can manage permissions on them.
+                            check_ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id_list[0] )
+                            if trans.app.security_agent.can_manage_library_item( current_user_roles, check_ldda ):
+                                if not replace_dataset:
+                                    message += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary."
+                    return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                      action='browse_library',
+                                                                      cntrller=cntrller,
+                                                                      id=library_id,
+                                                                      created_ldda_ids=created_ldda_ids,
+                                                                      show_deleted=show_deleted,
+                                                                      message=message,
+                                                                      status='done' ) )
+            else:
+                message = 'Select at least one dataset from the list of active datasets in your current history'
+                status = 'error'
+                upload_option = kwd.get( 'upload_option', 'import_from_history' )
+                # Send list of data formats to the upload form so the "extension" select list can be populated dynamically
+                file_formats = trans.app.datatypes_registry.upload_file_formats
+                # Send list of genome builds to the form so the "dbkey" select list can be populated dynamically
+
+                def get_dbkey_options( last_used_build ):
+                    for dbkey, build_name in trans.app.genome_builds.get_genome_build_names( trans=trans ):
+                        yield build_name, dbkey, ( dbkey == last_used_build )
+                dbkeys = get_dbkey_options( last_used_build )
+                # Send the current history to the form to enable importing datasets from history to library
+                history = trans.get_history()
+                trans.sa_session.refresh( history )
+                action = 'add_history_datasets_to_library'
+                upload_option_select_list = self._build_upload_option_select_list( trans, upload_option, is_admin )
+                roles_select_list = self._build_roles_select_list( trans, cntrller, library, util.listify( roles ) )
+                return trans.fill_template( "/library/common/upload.mako",
+                                            cntrller=cntrller,
+                                            upload_option_select_list=upload_option_select_list,
+                                            upload_option=upload_option,
+                                            action=action,
+                                            library_id=library_id,
+                                            folder_id=folder_id,
+                                            replace_dataset=replace_dataset,
+                                            file_formats=file_formats,
+                                            dbkeys=dbkeys,
+                                            last_used_build=last_used_build,
+                                            roles_select_list=roles_select_list,
+                                            history=history,
+                                            widgets=[],
+                                            template_id=template_id,
+                                            to_posix_lines=to_posix_lines,
+                                            space_to_tab=space_to_tab,
+                                            link_data_only=link_data_only,
+                                            show_deleted=show_deleted,
+                                            ldda_message=ldda_message,
+                                            message=escape( message ),
+                                            status=escape( status ) )
+
+    def _build_roles_select_list( self, trans, cntrller, library, selected_role_ids=[] ):
+        # Get the list of legitimate roles to display on the upload form.  If the library is public,
+        # all active roles are legitimate.  If the library is restricted by the LIBRARY_ACCESS permission, only
+        # the set of all roles associated with users that have that permission are legitimate.
+        legitimate_roles = trans.app.security_agent.get_legitimate_roles( trans, library, cntrller )
+        if legitimate_roles:
+            # Build the roles multi-select list using the list of legitimate roles, making sure to select any that
+            # were selected before refresh_on_change, if one occurred.
+            roles_select_list = SelectField( "roles", multiple="true", size="5" )
+            for role in legitimate_roles:
+                selected = str( role.id ) in selected_role_ids
+                roles_select_list.add_option( text=role.name, value=str( role.id ), selected=selected )
+            return roles_select_list
+        else:
+            return None
+
+    def _build_upload_option_select_list( self, trans, upload_option, is_admin, do_not_include_values=[] ):
+        # Build the upload_option select list.  The do_not_include_values param can contain options that
+        # should not be included in the list.  For example, the 'upload_directory' option should not be
+        # included if uploading a new version of a library dataset.
+        upload_refresh_on_change_values = []
+        for option_value, option_label in trans.model.LibraryDataset.upload_options:
+            if option_value not in do_not_include_values:
+                upload_refresh_on_change_values.append( option_value )
+        upload_option_select_list = SelectField( 'upload_option',
+                                                 refresh_on_change=True,
+                                                 refresh_on_change_values=upload_refresh_on_change_values )
+        for option_value, option_label in trans.model.LibraryDataset.upload_options:
+            if option_value not in do_not_include_values:
+                if option_value == 'upload_directory':
+                    if is_admin and not trans.app.config.library_import_dir:
+                        continue
+                    elif not is_admin:
+                        if not trans.app.config.user_library_import_dir:
+                            continue
+                        path = os.path.join( trans.app.config.user_library_import_dir, trans.user.email )
+                        if not os.path.isdir( path ):
+                            try:
+                                os.makedirs( path )
+                            except:
+                                continue
+                elif option_value == 'upload_paths':
+                    if not is_admin or not trans.app.config.allow_library_path_paste:
+                        continue
+                upload_option_select_list.add_option( option_label, option_value, selected=option_value == upload_option )
+        return upload_option_select_list
+
+    @web.expose
+    def download_dataset_from_folder( self, trans, cntrller, id, library_id=None, **kwd ):
+        """Catches the dataset id and displays file contents as directed"""
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
+        except:
+            ldda = None
+        self._check_access( trans, cntrller, is_admin, ldda, current_user_roles, use_panels, library_id, show_deleted )
+        composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+        ext = ldda.extension
+        if ext in composite_extensions:
+            # is composite - must return a zip of contents and the html file itself - ugh - should be reversible at upload!
+            # use act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ) since it does what we need
+            kwd['do_action'] = 'zip'
+            return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=[id, ], **kwd )
+        else:
+            trans.response.set_content_type( ldda.get_mime() )
+            fStat = os.stat( ldda.file_name )
+            trans.response.headers[ 'Content-Length' ] = int( fStat.st_size )
+            fname = ldda.name
+            fname = ''.join( c in FILENAME_VALID_CHARS and c or '_' for c in fname )[ 0:150 ]
+            trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s"' % fname
+            try:
+                return open( ldda.file_name )
+            except:
+                message = 'This dataset contains no content'
+        return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                          action='browse_library',
+                                                          cntrller=cntrller,
+                                                          use_panels=use_panels,
+                                                          id=library_id,
+                                                          show_deleted=show_deleted,
+                                                          message=message,
+                                                          status='error' ) )
+
+    @web.expose
+    def library_dataset_info( self, trans, cntrller, id, library_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            library_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( id ) )
+        except:
+            library_dataset = None
+        self._check_access( trans, cntrller, is_admin, library_dataset, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'edit_attributes_button', False ):
+            self._check_modify( trans, cntrller, is_admin, library_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            new_name = kwd.get( 'name', '' )
+            new_info = kwd.get( 'info', '' )
+            if not new_name:
+                message = 'Enter a valid name'
+                status = 'error'
+            else:
+                library_dataset.name = new_name
+                library_dataset.info = new_info
+                trans.sa_session.add( library_dataset )
+                trans.sa_session.flush()
+                message = "Information updated for library dataset '%s'." % escape( library_dataset.name )
+                status = 'done'
+        # See if we have any associated templates
+        widgets = []
+        widget_fields_have_contents = False
+        info_association, inherited = library_dataset.library_dataset_dataset_association.get_info_association()
+        if info_association and ( not( inherited ) or info_association.inheritable ):
+            widgets = library_dataset.library_dataset_dataset_association.get_template_widgets( trans )
+            widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        return trans.fill_template( '/library/common/library_dataset_info.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    library_dataset=library_dataset,
+                                    library_id=library_id,
+                                    current_user_roles=current_user_roles,
+                                    info_association=info_association,
+                                    inherited=inherited,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    show_deleted=show_deleted,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def library_dataset_permissions( self, trans, cntrller, id, library_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            library_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( id ) )
+        except:
+            library_dataset = None
+        self._check_access( trans, cntrller, is_admin, library_dataset, current_user_roles, use_panels, library_id, show_deleted )
+        self._check_manage( trans, cntrller, is_admin, library_dataset, current_user_roles, use_panels, library_id, show_deleted )
+        if kwd.get( 'update_roles_button', False ):
+            # The user clicked the Save button on the 'Associate With Roles' form
+            permissions = {}
+            for k, v in trans.app.model.Library.permitted_actions.items():
+                if k != 'LIBRARY_ACCESS':
+                    # LIBRARY_ACCESS is a special permission set only at the library level
+                    # and it is not inherited.
+                    in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ]
+                    permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+            # Set the LIBRARY permissions on the LibraryDataset
+            # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions
+            error = trans.app.security_agent.set_all_library_permissions( trans, library_dataset, permissions )
+            trans.sa_session.refresh( library_dataset )
+            if error:
+                message = error
+                status = 'error'
+            else:
+                # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation
+                trans.app.security_agent.set_all_library_permissions( trans, library_dataset.library_dataset_dataset_association, permissions )
+                trans.sa_session.refresh( library_dataset.library_dataset_dataset_association )
+                message = "Permisisons updated for library dataset '%s'." % escape( library_dataset.name )
+                status = 'done'
+        roles = trans.app.security_agent.get_legitimate_roles( trans, library_dataset, cntrller )
+        return trans.fill_template( '/library/common/library_dataset_permissions.mako',
+                                    cntrller=cntrller,
+                                    use_panels=use_panels,
+                                    library_dataset=library_dataset,
+                                    library_id=library_id,
+                                    roles=roles,
+                                    current_user_roles=current_user_roles,
+                                    show_deleted=show_deleted,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def make_library_item_public( self, trans, cntrller, library_id, item_type, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        current_user_roles = trans.get_current_user_roles()
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        if item_type == 'library':
+            library = trans.sa_session.query( trans.model.Library ).get( trans.security.decode_id( id ) )
+            self._check_access( trans, cntrller, is_admin, library, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_manage( trans, cntrller, is_admin, library, current_user_roles, use_panels, library_id, show_deleted )
+            contents = util.string_as_bool( kwd.get( 'contents', 'False' ) )
+            trans.app.security_agent.make_library_public( library, contents=contents )
+            if contents:
+                message = "The data library (%s) and all its contents have been made publicly accessible." % escape( library.name )
+            else:
+                message = "The data library (%s) has been made publicly accessible, but access to its contents has been left unchanged." % escape( library.name )
+        elif item_type == 'folder':
+            folder = trans.sa_session.query( trans.model.LibraryFolder ).get( trans.security.decode_id( id ) )
+            self._check_access( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_manage( trans, cntrller, is_admin, folder, current_user_roles, use_panels, library_id, show_deleted )
+            trans.app.security_agent.make_folder_public( folder )
+            message = "All of the contents of folder (%s) have been made publicly accessible." % escape( folder.name )
+        elif item_type == 'ldda':
+            ldda = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( id ) )
+            self._check_access( trans, cntrller, is_admin, ldda.library_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            self._check_manage( trans, cntrller, is_admin, ldda.library_dataset, current_user_roles, use_panels, library_id, show_deleted )
+            trans.app.security_agent.make_dataset_public( ldda.dataset )
+            message = "The libary dataset (%s) has been made publicly accessible." % escape( ldda.name )
+        else:
+            message = "Invalid item_type (%s) received." % escape( str( item_type ) )
+            status = 'error'
+        return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                          action='browse_library',
+                                                          cntrller=cntrller,
+                                                          use_panels=use_panels,
+                                                          id=library_id,
+                                                          show_deleted=show_deleted,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ):
+        # This method is called from 1 of 3 places:
+        # - this controller's download_dataset_from_folder() method
+        # - he browse_library.mako template
+        # - the library_dataset_search_results.mako template
+        # In the last case above, we will not have a library_id
+        class NgxZip( object ):
+            def __init__( self, url_base ):
+                self.files = {}
+                self.url_base = url_base
+
+            def add( self, file, relpath ):
+                self.files[file] = relpath
+
+            def __str__( self ):
+                rval = ''
+                for fname, relpath in self.files.items():
+                    crc = '-'
+                    size = os.stat( fname ).st_size
+                    quoted_fname = urllib.quote_plus( fname, '/' )
+                    rval += '%s %i %s%s %s\r\n' % ( crc, size, self.url_base, quoted_fname, relpath )
+                return rval
+        # Perform an action on a list of library datasets.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        action = kwd.get( 'do_action', None )
+        lddas = []
+        error = False
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_user_roles = trans.get_current_user_roles()
+        current_history = trans.get_history()
+        if not ldda_ids:
+            error = True
+            message = 'You must select at least one dataset.'
+        elif not action:
+            error = True
+            message = 'You must select an action to perform on the selected datasets.'
+        else:
+            if action in [ 'import_to_current_history', 'import_to_histories' ]:
+                new_kwd = {}
+                if current_history is not None and action == 'import_to_current_history':
+                    encoded_current_history_id = trans.security.encode_id( current_history.id )
+                    selected_history_id = encoded_current_history_id
+                    new_kwd[ 'do_action' ] = action
+                    new_kwd[ 'target_history_ids' ] = encoded_current_history_id
+                    new_kwd[ 'import_datasets_to_histories_button' ] = 'Import library datasets'
+                else:
+                    selected_history_id = ''
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='import_datasets_to_histories',
+                                                                  cntrller=cntrller,
+                                                                  library_id=library_id,
+                                                                  selected_history_id=selected_history_id,
+                                                                  ldda_ids=ldda_ids,
+                                                                  use_panels=use_panels,
+                                                                  show_deleted=show_deleted,
+                                                                  message=message,
+                                                                  status=status,
+                                                                  **new_kwd ) )
+            if action == 'move':
+                if library_id in [ 'none', 'None', None ]:
+                    source_library_id = ''
+                else:
+                    source_library_id = library_id
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='move_library_item',
+                                                                  cntrller=cntrller,
+                                                                  source_library_id=source_library_id,
+                                                                  item_type='ldda',
+                                                                  item_id=ldda_ids,
+                                                                  use_panels=use_panels,
+                                                                  show_deleted=show_deleted,
+                                                                  message=message,
+                                                                  status=status ) )
+            ldda_ids = util.listify( ldda_ids )
+            for ldda_id in ldda_ids:
+                try:
+                    # Load the ldda requested and check whether the user has access to them
+                    ldda = self.get_library_dataset_dataset_association( trans, ldda_id )
+                    assert not ldda.dataset.purged
+                    lddas.append( ldda )
+                except:
+                    ldda = None
+                    message += "Invalid library dataset id (%s) specified.  " % str( ldda_id )
+        if not error:
+            if action == 'manage_permissions':
+                valid_ldda_ids = []
+                valid_lddas = []
+                invalid_lddas = []
+                for ldda in lddas:
+                    if is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, ldda ):
+                        valid_lddas.append( ldda )
+                        valid_ldda_ids.append( ldda.id )
+                    else:
+                        invalid_lddas.append( ldda )
+                if invalid_lddas:
+                    message += "You are not authorized to manage permissions on %s: " % inflector.cond_plural( len( invalid_lddas ), "dataset" )
+                    for ldda in invalid_lddas:
+                        message += '(%s)' % escape( ldda.name )
+                    message += '.  '
+                if valid_ldda_ids:
+                    encoded_ldda_ids = [ trans.security.encode_id( ldda_id ) for ldda_id in valid_ldda_ids ]
+                    folder_id = trans.security.encode_id( valid_lddas[0].library_dataset.folder.id )
+                    trans.response.send_redirect( web.url_for( controller='library_common',
+                                                               action='ldda_permissions',
+                                                               cntrller=cntrller,
+                                                               use_panels=use_panels,
+                                                               library_id=library_id,
+                                                               folder_id=folder_id,
+                                                               id=",".join( encoded_ldda_ids ),
+                                                               show_deleted=show_deleted,
+                                                               message=message,
+                                                               status=status ) )
+                else:
+                    message = "You are not authorized to manage permissions on any of the selected datasets."
+            elif action == 'delete':
+                valid_lddas = []
+                invalid_lddas = []
+                for ldda in lddas:
+                    if is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, ldda ):
+                        valid_lddas.append( ldda )
+                    else:
+                        invalid_lddas.append( ldda )
+                if invalid_lddas:
+                    message += "You are not authorized to delete %s: " % inflector.cond_plural( len( invalid_lddas ), "dataset" )
+                    for ldda in invalid_lddas:
+                        message += '(%s)' % ldda.name
+                    message += '.  '
+                if valid_lddas:
+                    for ldda in valid_lddas:
+                        # Do not delete the association, just delete the library_dataset.  The
+                        # cleanup_datasets.py script handles everything else.
+                        ld = ldda.library_dataset
+                        ld.deleted = True
+                        trans.sa_session.add( ld )
+                    trans.sa_session.flush()
+                    num_valid_lddas = len( valid_lddas )
+                    message += "Deleted %i %s." % ( num_valid_lddas, inflector.cond_plural( num_valid_lddas, "dataset" ) )
+                else:
+                    message = "You are not authorized to delete any of the selected datasets."
+            elif action in [ 'zip', 'tgz', 'tbz', 'ngxzip' ]:
+                error = False
+                killme = string.punctuation + string.whitespace
+                trantab = string.maketrans(killme, '_' * len(killme))
+                try:
+                    outext = 'zip'
+                    if action == 'zip':
+                        # Can't use mkstemp - the file must not exist first
+                        tmpd = tempfile.mkdtemp()
+                        util.umask_fix_perms( tmpd, trans.app.config.umask, 0777, self.app.config.gid )
+                        tmpf = os.path.join( tmpd, 'library_download.' + action )
+                        if trans.app.config.upstream_gzip:
+                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True )
+                        else:
+                            archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+                        archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
+                    elif action == 'tgz':
+                        if trans.app.config.upstream_gzip:
+                            archive = StreamBall( 'w|' )
+                            outext = 'tar'
+                        else:
+                            archive = StreamBall( 'w|gz' )
+                            outext = 'tgz'
+                    elif action == 'tbz':
+                        archive = StreamBall( 'w|bz2' )
+                        outext = 'tbz2'
+                    elif action == 'ngxzip':
+                        archive = NgxZip( trans.app.config.nginx_x_archive_files_base )
+                except ( OSError, zipfile.BadZipfile ):
+                    error = True
+                    log.exception( "Unable to create archive for download" )
+                    message = "Unable to create archive for download, please report this error"
+                    status = 'error'
+                except:
+                    error = True
+                    log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[0] )
+                    message = "Unable to create archive for download, please report - %s" % sys.exc_info()[0]
+                    status = 'error'
+                if not error:
+                    composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
+                    seen = []
+                    for ldda in lddas:
+                        if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]:
+                            continue
+                        ext = ldda.extension
+                        is_composite = ext in composite_extensions
+                        path = ""
+                        parent_folder = ldda.library_dataset.folder
+                        while parent_folder is not None:
+                            # Exclude the now-hidden "root folder"
+                            if parent_folder.parent is None:
+                                path = os.path.join( parent_folder.library_root[0].name, path )
+                                break
+                            path = os.path.join( parent_folder.name, path )
+                            parent_folder = parent_folder.parent
+                        path += ldda.name
+                        while path in seen:
+                            path += '_'
+                        seen.append( path )
+                        zpath = os.path.split(path)[-1]  # comes as base_name/fname
+                        outfname, zpathext = os.path.splitext(zpath)
+                        if is_composite:
+                            # need to add all the components from the extra_files_path to the zip
+                            if zpathext == '':
+                                zpath = '%s.html' % zpath  # fake the real nature of the html file
+                            try:
+                                archive.add(ldda.dataset.file_name, zpath)  # add the primary of a composite set
+                            except IOError:
+                                error = True
+                                log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name)
+                                message = "Unable to create archive for download, please report this error"
+                                status = 'error'
+                                continue
+                            flist = glob.glob(os.path.join(ldda.dataset.extra_files_path, '*.*'))  # glob returns full paths
+                            for fpath in flist:
+                                efp, fname = os.path.split(fpath)
+                                if fname > '':
+                                    fname = fname.translate(trantab)
+                                try:
+                                    archive.add( fpath, fname )
+                                except IOError:
+                                    error = True
+                                    log.exception( "Unable to add %s to temporary library download archive %s" % (fname, outfname))
+                                    message = "Unable to create archive for download, please report this error"
+                                    status = 'error'
+                                    continue
+                        else:  # simple case
+                            try:
+                                archive.add( ldda.dataset.file_name, path )
+                            except IOError:
+                                error = True
+                                log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name)
+                                message = "Unable to create archive for download, please report this error"
+                                status = 'error'
+                    if not error:
+                        if library_id:
+                            lname = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) ).name
+                        else:
+                            # Request must have coe from the library_dataset_search_results page.
+                            lname = 'selected_dataset'
+                        fname = lname.replace( ' ', '_' ) + '_files'
+                        if action == 'zip':
+                            archive.close()
+                            trans.response.set_content_type( "application/x-zip-compressed" )
+                            trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (fname, outext)
+                            archive = util.streamball.ZipBall(tmpf, tmpd)
+                            archive.wsgi_status = trans.response.wsgi_status()
+                            archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                            return archive.stream
+                        elif action == 'ngxzip':
+                            trans.response.set_content_type( "application/zip" )
+                            trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (fname, outext)
+                            trans.response.headers[ "X-Archive-Files" ] = "zip"
+                            return archive
+                        else:
+                            trans.response.set_content_type( "application/x-tar" )
+                            trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (fname, outext)
+                            archive.wsgi_status = trans.response.wsgi_status()
+                            archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+                            return archive.stream
+            else:
+                status = 'error'
+                message = 'Invalid action (%s) specified.' % escape( str( action ) )
+        if library_id:
+            # If we have a library_id, browse the associated library
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              current_user_roles=current_user_roles,
+                                                              use_panels=use_panels,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status=status ) )
+        else:
+            # We arrived here from the library_dataset_search_results page, so redirect there.
+            search_term = kwd.get( 'search_term', '' )
+            comptypes = get_comptypes( trans )
+            return trans.fill_template( '/library/common/library_dataset_search_results.mako',
+                                        cntrller=cntrller,
+                                        current_user_roles=current_user_roles,
+                                        search_term=search_term,
+                                        comptypes=comptypes,
+                                        lddas=lddas,
+                                        show_deleted=show_deleted,
+                                        use_panels=use_panels,
+                                        message=escape( message ),
+                                        status=escape( status ) )
+
+    @web.expose
+    def import_datasets_to_histories( self, trans, cntrller, library_id='', folder_id='', ldda_ids='', target_history_id='', target_history_ids='', new_history_name='', **kwd ):
+        # This method is called from one of the following places:
+        # - a menu option for a library dataset ( ldda_ids is a single ldda id )
+        # - a menu option for a library folder ( folder_id has a value )
+        # - a select list option for acting on multiple selected datasets within a library
+        #   ( ldda_ids is a comma separated string of ldda ids )
+        # - a menu option for a library dataset search result set ( ldda_ids is a comma separated string of ldda ids )
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        action = kwd.get( 'do_action', None )
+        user = trans.get_user()
+        current_history = trans.get_history()
+        if library_id:
+            library = trans.sa_session.query( trans.model.Library ).get( trans.security.decode_id( library_id ) )
+        else:
+            library = None
+        if folder_id:
+            folder = trans.sa_session.query( trans.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+        else:
+            folder = None
+        ldda_ids = util.listify( ldda_ids )
+        if ldda_ids:
+            ldda_ids = map( trans.security.decode_id, ldda_ids )
+        if target_history_ids:
+            target_history_ids = util.listify( target_history_ids )
+            target_history_ids = set(
+                [ trans.security.decode_id( thid )
+                    for thid in target_history_ids if thid ] )
+        elif target_history_id:
+            target_history_ids = [ trans.security.decode_id( target_history_id ) ]
+        if kwd.get( 'import_datasets_to_histories_button', False ):
+            invalid_datasets = 0
+            if not ldda_ids or not ( target_history_ids or new_history_name ):
+                message = "You must provide one or more source library datasets and one or more target histories."
+                status = 'error'
+            else:
+                if new_history_name:
+                    new_history = trans.app.model.History()
+                    new_history.name = new_history_name
+                    new_history.user = user
+                    trans.sa_session.add( new_history )
+                    trans.sa_session.flush()
+                    target_history_ids = [ new_history.id ]
+                    target_histories = [ new_history ]
+                elif user:
+                    target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )]
+                else:
+                    target_histories = [ current_history ]
+                if len( target_histories ) != len( target_history_ids ):
+                    message += "You do not have permission to add datasets to %i requested histories.  " % ( len( target_history_ids ) - len( target_histories ) )
+                    status = 'error'
+                flush_needed = False
+                for ldda in map( trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get, ldda_ids ):
+                    if ldda is None:
+                        message += "You tried to import a dataset that does not exist.  "
+                        status = 'error'
+                        invalid_datasets += 1
+                    elif ldda.dataset.state not in [ trans.model.Dataset.states.OK, trans.model.Dataset.states.ERROR ]:
+                        message += "You cannot import dataset '%s' since its state is '%s'.  " % ( escape( ldda.name ), ldda.dataset.state )
+                        status = 'error'
+                        invalid_datasets += 1
+                    elif not ldda.has_data():
+                        message += "You cannot import empty dataset '%s'.  " % escape( ldda.name )
+                        status = 'error'
+                        invalid_datasets += 1
+                    else:
+                        for target_history in target_histories:
+                            ldda.to_history_dataset_association( target_history=target_history, add_to_history=True )
+                            if not flush_needed:
+                                flush_needed = True
+                if flush_needed:
+                    trans.sa_session.flush()
+                    hist_names_str = ", ".join( [ target_history.name for target_history in target_histories ] )
+                    num_source = len( ldda_ids ) - invalid_datasets
+                    num_target = len( target_histories )
+                    message += "%i %s imported into %i %s: %s" % ( num_source,
+                                                                   inflector.cond_plural( num_source, "dataset" ),
+                                                                   num_target,
+                                                                   inflector.cond_plural( num_target, "history" ),
+                                                                   hist_names_str )
+                trans.sa_session.refresh( current_history )
+        current_user_roles = trans.get_current_user_roles()
+        source_lddas = []
+        if folder:
+            for library_dataset in folder.datasets:
+                ldda = library_dataset.library_dataset_dataset_association
+                if not ldda.deleted and trans.app.security_agent.can_access_library_item( current_user_roles, ldda, trans.user ):
+                    source_lddas.append( ldda )
+        elif ldda_ids:
+            for ldda_id in ldda_ids:
+                # Secuirty access permiision chcck is not needed here since the current user had access
+                # to the lddas in order for the menu optin  to be available.
+                ldda = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ).get( ldda_id )
+                source_lddas.append( ldda )
+        if current_history is None:
+            current_history = trans.get_history( create=True )
+        if current_history is not None:
+            target_histories = [ current_history ]
+        else:
+            target_histories = []
+            message = 'You must have a history before you can import datasets.  You can do this by loading the analysis interface.'
+            status = 'error'
+        if user:
+            target_histories = user.active_histories
+        if action == 'import_to_current_history' and library_id:
+            # To streamline this as much as possible, go back to browsing the library.
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              id=library_id,
+                                                              message=message,
+                                                              status=status ) )
+        return trans.fill_template( "/library/common/import_datasets_to_histories.mako",
+                                    cntrller=cntrller,
+                                    library=library,
+                                    current_history=current_history,
+                                    ldda_ids=ldda_ids,
+                                    target_history_id=target_history_id,
+                                    target_history_ids=target_history_ids,
+                                    source_lddas=source_lddas,
+                                    target_histories=target_histories,
+                                    new_history_name=new_history_name,
+                                    show_deleted=show_deleted,
+                                    use_panels=use_panels,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ):
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        message = escape( kwd.get( 'message', '' ) )
+        is_admin = ( trans.user_is_admin() and cntrller == 'library_admin' )
+        current_user_roles = trans.get_current_user_roles()
+        try:
+            item, item_desc, action, id = self.get_item_and_stuff( trans,
+                                                                   item_type=item_type,
+                                                                   library_id=library_id,
+                                                                   folder_id=folder_id,
+                                                                   ldda_id=ldda_id,
+                                                                   is_admin=is_admin )
+        except ValueError:
+            return None
+        if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+            message = "You are not authorized to modify %s '%s'." % ( escape( item_desc ), escape( item.name ) )
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='error' ) )
+        info_association, inherited = item.get_info_association( restrict=True )
+        if info_association:
+            if info_association.inheritable:
+                message = "The template for this %s will no longer be inherited to contained folders and datasets." % escape( item_desc )
+            else:
+                message = "The template for this %s will now be inherited to contained folders and datasets." % escape( item_desc )
+            info_association.inheritable = not( info_association.inheritable )
+            trans.sa_session.add( info_association )
+            trans.sa_session.flush()
+        return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                          action=action,
+                                                          cntrller=cntrller,
+                                                          use_panels=use_panels,
+                                                          library_id=library_id,
+                                                          folder_id=folder_id,
+                                                          id=id,
+                                                          show_deleted=show_deleted,
+                                                          message=message,
+                                                          status='done' ) )
+
+    @web.expose
+    def move_library_item( self, trans, cntrller, item_type, item_id, source_library_id='', make_target_current=True, **kwd ):
+        # This method is called from one of the following places:
+        # - a menu option for a library dataset ( item_type is 'ldda' and item_id is a single ldda id )
+        # - a menu option for a library folder ( item_type is 'folder' and item_id is a single folder id )
+        # - a select list option for acting on multiple selected datasets within a library ( item_type is
+        #   'ldda' and item_id is a comma separated string of ldda ids )
+        # - a menu option for a library dataset search result set ( item_type is 'ldda' and item_id is a
+        #   comma separated string of ldda ids )
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        make_target_current = util.string_as_bool( make_target_current )
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        user = trans.get_user()
+        current_user_roles = trans.get_current_user_roles()
+        move_ldda_ids = []
+        move_lddas = []
+        move_folder_id = []
+        move_folder = None
+        if source_library_id:
+            source_library = trans.sa_session.query( trans.model.Library ).get( trans.security.decode_id( source_library_id ) )
+        else:
+            # Request sent from the library_dataset_search_results page.
+            source_library = None
+        target_library_id = kwd.get( 'target_library_id', '' )
+        if target_library_id not in [ '', 'none', None ]:
+            target_library = trans.sa_session.query( trans.model.Library ).get( trans.security.decode_id( target_library_id ) )
+        elif make_target_current:
+            target_library = source_library
+        else:
+            target_library = None
+        target_folder_id = kwd.get( 'target_folder_id', '' )
+        if target_folder_id not in [ '', 'none', None ]:
+            target_folder = trans.sa_session.query( trans.model.LibraryFolder ).get( trans.security.decode_id( target_folder_id ) )
+            if target_library is None:
+                target_library = target_folder.parent_library
+        else:
+            target_folder = None
+        if item_type == 'ldda':
+            # We've been called from a menu option for a library dataset search result set
+            move_ldda_ids = util.listify( item_id )
+            if move_ldda_ids:
+                move_ldda_ids = map( trans.security.decode_id, move_ldda_ids )
+        elif item_type == 'folder':
+            move_folder_id = item_id
+            move_folder = trans.sa_session.query( trans.model.LibraryFolder ).get( trans.security.decode_id( move_folder_id ) )
+        if kwd.get( 'move_library_item_button', False ):
+            if not ( move_ldda_ids or move_folder_id ) or target_folder_id in [ '', 'none', None ]:
+                message = "You must select a source folder or one or more source datasets, and a target folder."
+                status = 'error'
+            else:
+                valid_lddas = []
+                invalid_lddas = []
+                invalid_items = 0
+                flush_required = False
+                if item_type == 'ldda':
+                    for ldda in map( trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get, move_ldda_ids ):
+                        if ldda is None:
+                            message += "You tried to move a dataset that does not exist.  "
+                            status = 'error'
+                            invalid_items += 1
+                        elif ldda.dataset.state not in [ trans.model.Dataset.states.OK, trans.model.Dataset.states.ERROR ]:
+                            message += "You cannot move dataset '%s' since its state is '%s'.  " % ( ldda.name, ldda.dataset.state )
+                            status = 'error'
+                            invalid_items += 1
+                        elif not ldda.has_data():
+                            message += "You cannot move empty dataset '%s'.  " % ldda.name
+                            status = 'error'
+                            invalid_items += 1
+                        else:
+                            if is_admin:
+                                library_dataset = ldda.library_dataset
+                                library_dataset.folder = target_folder
+                                trans.sa_session.add( library_dataset )
+                                flush_required = True
+                            else:
+                                if trans.app.security_agent.can_modify_library_item( current_user_roles, ldda ):
+                                    valid_lddas.append( ldda )
+                                    library_dataset = ldda.library_dataset
+                                    library_dataset.folder = target_folder
+                                    trans.sa_session.add( library_dataset )
+                                    flush_required = True
+                                else:
+                                    invalid_items += 1
+                                    invalid_lddas.append( ldda )
+                    if not valid_lddas:
+                        message = "You are not authorized to move any of the selected datasets."
+                    elif invalid_lddas:
+                        message += "You are not authorized to move %s: " % inflector.cond_plural( len( invalid_lddas ), "dataset" )
+                        for ldda in invalid_lddas:
+                            message += '(%s)' % escape( ldda.name )
+                        message += '.  '
+                    num_source = len( move_ldda_ids ) - invalid_items
+                    message = "%i %s moved to folder (%s) within data library (%s)" % ( num_source,
+                                                                                        inflector.cond_plural( num_source, "dataset" ),
+                                                                                        target_folder.name,
+                                                                                        target_library.name )
+                elif item_type == 'folder':
+                    move_folder = trans.sa_session.query( trans.app.model.LibraryFolder ) \
+                                                  .get( trans.security.decode_id( move_folder_id ) )
+                    if move_folder is None:
+                        message += "You tried to move a folder that does not exist.  "
+                        status = 'error'
+                        invalid_items += 1
+                    else:
+                        move_folder.parent = target_folder
+                        trans.sa_session.add( move_folder )
+                        flush_required = True
+                    message = "Moved folder (%s) to folder (%s) within data library (%s) " % ( escape( move_folder.name ),
+                                                                                               escape( target_folder.name ),
+                                                                                               escape( target_library.name ) )
+                if flush_required:
+                    trans.sa_session.flush()
+        if target_library:
+            if is_admin:
+                target_library_folders = target_library.get_active_folders( target_library.root_folder )
+            else:
+                folders_with_permission_to_add = []
+                for folder in target_library.get_active_folders( target_library.root_folder ):
+                    if trans.app.security_agent.can_add_library_item( current_user_roles, folder ):
+                        folders_with_permission_to_add.append( folder )
+                target_library_folders = folders_with_permission_to_add
+        else:
+            target_library_folders = []
+        if item_type == 'ldda':
+            for ldda_id in move_ldda_ids:
+                # TODO: It is difficult to filter out undesired folders (e.g. the ldda's current
+                # folder) if we have a list of lddas, but we may want to filter folders that
+                # are easily handled.
+                ldda = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ).get( ldda_id )
+                move_lddas.append( ldda )
+        elif item_type == 'folder':
+            def __is_contained_in( folder1, folder2 ):
+                # Return True if folder1 is contained in folder2
+                if folder1.parent:
+                    if folder1.parent == folder2:
+                        return True
+                    return __is_contained_in( folder1.parent, folder2 )
+                return False
+            filtered_folders = []
+            for folder in target_library_folders:
+                include = True
+                if move_folder:
+                    if __is_contained_in( folder, move_folder ):
+                        # Don't allow moving a folder to one of its sub-folders (circular issues in db)
+                        include = False
+                    if move_folder.id == folder.id:
+                        # Don't allow moving a folder to itself
+                        include = False
+                    if move_folder.parent and move_folder.parent.id == folder.id:
+                        # Don't allow moving a folder to its current parent folder
+                        include = False
+                if include:
+                    filtered_folders.append( folder )
+            target_library_folders = filtered_folders
+
+        def __build_target_library_id_select_field( trans, selected_value='none' ):
+            # Get all the libraries for which the current user can add items.
+            target_libraries = []
+            if is_admin:
+                for library in trans.sa_session.query( trans.model.Library ) \
+                                               .filter( trans.model.Library.deleted == false() ) \
+                                               .order_by( trans.model.Library.table.c.name ):
+                    if source_library is None or library.id != source_library.id:
+                        target_libraries.append( library )
+            else:
+                for library in trans.app.security_agent.get_accessible_libraries( trans, user ):
+                    if source_library is None:
+                        if trans.app.security_agent.can_add_library_item( current_user_roles, library ):
+                            target_libraries.append( library )
+                    elif library.id != source_library.id:
+                        if trans.app.security_agent.can_add_library_item( current_user_roles, library ):
+                            target_libraries.append( library )
+            # A refresh_on_change is required to display the selected library's folders
+            return build_select_field( trans,
+                                       objs=target_libraries,
+                                       label_attr='name',
+                                       select_field_name='target_library_id',
+                                       selected_value=selected_value,
+                                       refresh_on_change=True )
+
+        def __build_target_folder_id_select_field( trans, folders, selected_value='none' ):
+            for folder in folders:
+                if not folder.parent:
+                    folder.name = 'Data library root'
+                    break
+            return build_select_field( trans,
+                                       objs=folders,
+                                       label_attr='name',
+                                       select_field_name='target_folder_id',
+                                       selected_value=selected_value,
+                                       refresh_on_change=False )
+        if target_library:
+            selected_value = target_library.id
+        else:
+            selected_value = 'none'
+        target_library_id_select_field = __build_target_library_id_select_field( trans, selected_value=selected_value )
+        target_folder_id_select_field = __build_target_folder_id_select_field( trans, target_library_folders )
+        return trans.fill_template( "/library/common/move_library_item.mako",
+                                    cntrller=cntrller,
+                                    make_target_current=make_target_current,
+                                    source_library=source_library,
+                                    item_type=item_type,
+                                    item_id=item_id,
+                                    move_ldda_ids=move_ldda_ids,
+                                    move_lddas=move_lddas,
+                                    move_folder=move_folder,
+                                    target_library=target_library,
+                                    target_library_id_select_field=target_library_id_select_field,
+                                    target_folder_id_select_field=target_folder_id_select_field,
+                                    show_deleted=show_deleted,
+                                    use_panels=use_panels,
+                                    message=escape( message ),
+                                    status=escape( status ) )
+
+    @web.expose
+    def delete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
+        # This action will handle deleting all types of library items.  State is saved for libraries and
+        # folders ( i.e., if undeleted, the state of contents of the library or folder will remain, so previously
+        # deleted / purged contents will have the same state ).  When a library or folder has been deleted for
+        # the amount of time defined in the cleanup_datasets.py script, the library or folder and all of its
+        # contents will be purged.  The association between this method and the cleanup_datasets.py script
+        # enables clean maintenance of libraries and library dataset disk files.  This is also why the item_types
+        # are not any of the associations ( the cleanup_datasets.py script handles everything ).
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        item_types = { 'library': trans.app.model.Library,
+                       'folder': trans.app.model.LibraryFolder,
+                       'library_dataset': trans.app.model.LibraryDataset }
+        is_admin = ( trans.user_is_admin() and cntrller == 'library_admin' )
+        current_user_roles = trans.get_current_user_roles()
+        if item_type not in item_types:
+            message = 'Bad item_type specified: %s' % escape( str( item_type ) )
+            status = 'error'
+        else:
+            if item_type == 'library_dataset':
+                item_desc = 'Dataset'
+            else:
+                item_desc = item_type.capitalize()
+            library_item_ids = util.listify( item_id )
+            valid_items = 0
+            invalid_items = 0
+            not_authorized_items = 0
+            flush_needed = False
+            message = ''
+            for library_item_id in library_item_ids:
+                try:
+                    library_item = trans.sa_session.query( item_types[ item_type ] ).get( trans.security.decode_id( library_item_id ) )
+                except:
+                    library_item = None
+                if not library_item or not ( is_admin or trans.app.security_agent.can_access_library_item( current_user_roles, library_item, trans.user ) ):
+                    invalid_items += 1
+                elif not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, library_item ) ):
+                    not_authorized_items += 1
+                else:
+                    valid_items += 1
+                    library_item.deleted = True
+                    trans.sa_session.add( library_item )
+                    flush_needed = True
+            if flush_needed:
+                trans.sa_session.flush()
+            if valid_items:
+                message += "%d %s marked deleted.  " % ( valid_items, escape( inflector.cond_plural( valid_items, item_desc ) ) )
+            if invalid_items:
+                message += '%d invalid %s specifield.  ' % ( invalid_items, escape( inflector.cond_plural( invalid_items, item_desc ) ) )
+                status = 'error'
+            if not_authorized_items:
+                message += 'You are not authorized to delete %d %s.  ' % ( not_authorized_items, escape( inflector.cond_plural( not_authorized_items, item_desc ) ) )
+                status = 'error'
+        if item_type == 'library':
+            return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                              action='browse_libraries',
+                                                              message=message,
+                                                              status=status ) )
+        else:
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status=status ) )
+
+    @web.expose
+    def undelete_library_item( self, trans, cntrller, library_id, item_id, item_type, **kwd ):
+        # This action will handle undeleting all types of library items
+        status = kwd.get( 'status', 'done' )
+        show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) )
+        item_types = { 'library': trans.app.model.Library,
+                       'folder': trans.app.model.LibraryFolder,
+                       'library_dataset': trans.app.model.LibraryDataset }
+        is_admin = ( trans.user_is_admin() and cntrller == 'library_admin' )
+        current_user_roles = trans.get_current_user_roles()
+        if item_type not in item_types:
+            message = 'Bad item_type specified: %s' % escape( str( item_type ) )
+            status = 'error'
+        else:
+            if item_type == 'library_dataset':
+                item_desc = 'Dataset'
+            else:
+                item_desc = item_type.capitalize()
+
+            library_item_ids = util.listify( item_id )
+            valid_items = 0
+            invalid_items = 0
+            purged_items = 0
+            not_authorized_items = 0
+            flush_needed = False
+            message = ''
+            for library_item_id in library_item_ids:
+                try:
+                    library_item = trans.sa_session.query( item_types[ item_type ] ).get( trans.security.decode_id( library_item_id ) )
+                except:
+                    library_item = None
+                if not library_item or not ( is_admin or trans.app.security_agent.can_access_library_item( current_user_roles, library_item, trans.user ) ):
+                    invalid_items += 1
+                elif library_item.purged:
+                    purged_items += 1
+                elif not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, library_item ) ):
+                    not_authorized_items += 1
+                else:
+                    valid_items += 1
+                    library_item.deleted = False
+                    trans.sa_session.add( library_item )
+                    flush_needed = True
+            if flush_needed:
+                trans.sa_session.flush()
+            if valid_items:
+                message += "%d %s marked undeleted.  " % ( valid_items, escape( inflector.cond_plural( valid_items, item_desc ) ) )
+            if invalid_items:
+                message += '%d invalid %s specifield.  ' % ( invalid_items, escape( inflector.cond_plural( invalid_items, item_desc ) ) )
+                status = 'error'
+            if not_authorized_items:
+                message += 'You are not authorized to undelete %d %s.  ' % ( not_authorized_items, escape( inflector.cond_plural( not_authorized_items, item_desc ) ) )
+                status = 'error'
+            if purged_items:
+                message += '%d %s marked purged, so cannot be undeleted.  ' % ( purged_items, escape( inflector.cond_plural( purged_items, item_desc ) ) )
+                status = 'error'
+        if item_type == 'library':
+            return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                              action='browse_libraries',
+                                                              message=message,
+                                                              status=status ) )
+        else:
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status=status ) )
+
+    def _check_access( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
+        can_access = True
+        if isinstance( item, trans.model.HistoryDatasetAssociation ):
+            # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association.
+            if not item:
+                message = "Invalid history dataset (%s) specified." % escape( str( item ) )
+                can_access = False
+            elif not trans.app.security_agent.can_access_dataset( current_user_roles, item.dataset ) and item.history.user == trans.user:
+                message = "You do not have permission to access the history dataset with id (%s)." % str( item.id )
+                can_access = False
+        else:
+            # Make sure the user has the LIBRARY_ACCESS permission on the library item.
+            if not item:
+                message = "Invalid library item (%s) specified." % escape( str( item ) )
+                can_access = False
+            elif not ( is_admin or trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user ) ):
+                if isinstance( item, trans.model.Library ):
+                    item_type = 'data library'
+                elif isinstance( item, trans.model.LibraryFolder ):
+                    item_type = 'folder'
+                else:
+                    item_type = '(unknown item type)'
+                message = "You do not have permission to access the %s with id (%s)." % ( escape( item_type ), str( item.id ) )
+                can_access = False
+        if not can_access:
+            if cntrller == 'api':
+                return 400, message
+            if isinstance( item, trans.model.Library ):
+                return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                                  action='browse_libraries',
+                                                                  cntrller=cntrller,
+                                                                  use_panels=use_panels,
+                                                                  message=message,
+                                                                  status='error' ) )
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='error' ) )
+
+    def _check_add( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
+        # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
+        if not ( is_admin or trans.app.security_agent.can_add_library_item( current_user_roles, item ) ):
+            message = "You are not authorized to add an item to (%s)." % escape( item.name )
+            # Redirect to the real parent library since we know we have access to it.
+            if cntrller == 'api':
+                return 403, message
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              id=library_id,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='error' ) )
+
+    def _check_manage( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
+        if isinstance( item, trans.model.LibraryDataset ):
+            # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE and DATASET_MANAGE_PERMISSIONS permissions.
+            if not ( is_admin or
+                     ( trans.app.security_agent.can_manage_library_item( current_user_roles, item ) and
+                       trans.app.security_agent.can_manage_dataset( current_user_roles, item.library_dataset_dataset_association.dataset ) ) ):
+                message = "You are not authorized to manage permissions on library dataset (%s)." % escape( item.name )
+                if cntrller == 'api':
+                    return 403, message
+                return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                                  action='browse_library',
+                                                                  id=library_id,
+                                                                  cntrller=cntrller,
+                                                                  use_panels=use_panels,
+                                                                  message=message,
+                                                                  status='error' ) )
+        # Deny access if the user is not an admin and does not have the LIBRARY_MANAGE permission.
+        if not ( is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, item ) ):
+            message = "You are not authorized to manage permissions on (%s)." % escape( item.name )
+            if cntrller == 'api':
+                return 403, message
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              id=library_id,
+                                                              cntrller=cntrller,
+                                                              use_panels=use_panels,
+                                                              message=message,
+                                                              status='error' ) )
+
+    def _check_modify( self, trans, cntrller, is_admin, item, current_user_roles, use_panels, library_id, show_deleted ):
+        # Deny modification if the user is not an admin and does not have the LIBRARY_MODIFY permission.
+        if not ( is_admin or trans.app.security_agent.can_modify_library_item( current_user_roles, item ) ):
+            message = "You are not authorized to modify (%s)." % escape( item.name )
+            if cntrller == 'api':
+                return 403, message
+            return trans.response.send_redirect( web.url_for( controller='library_common',
+                                                              action='browse_library',
+                                                              cntrller=cntrller,
+                                                              id=library_id,
+                                                              use_panels=use_panels,
+                                                              show_deleted=show_deleted,
+                                                              message=message,
+                                                              status='error' ) )
+
+# ---- Utility methods -------------------------------------------------------
+
+
+def active_folders( trans, folder ):
+    # Much faster way of retrieving all active sub-folders within a given folder than the
+    # performance of the mapper.  This query also eagerloads the permissions on each folder.
+    return trans.sa_session.query( trans.app.model.LibraryFolder ) \
+                           .filter_by( parent=folder, deleted=False ) \
+                           .options( eagerload_all( "actions" ) ) \
+                           .order_by( trans.app.model.LibraryFolder.table.c.name ) \
+                           .all()
+
+
+def activatable_folders( trans, folder ):
+    return trans.sa_session.query( trans.app.model.LibraryFolder ) \
+                           .filter_by( parent=folder, purged=False ) \
+                           .options( eagerload_all( "actions" ) ) \
+                           .order_by( trans.app.model.LibraryFolder.table.c.name ) \
+                           .all()
+
+
+def map_library_datasets_to_lddas( trans, lib_datasets ):
+    '''
+    Given a list of LibraryDatasets, return a map from the LibraryDatasets
+    to their LDDAs. If an LDDA does not exist for a LibraryDataset, then
+    there will be no entry in the return hash.
+    '''
+    # Get a list of the LibraryDatasets' ids so that we can pass it along to
+    # a query to retrieve the LDDAs. This eliminates querying for each
+    # LibraryDataset.
+    lib_dataset_ids = [ x.library_dataset_dataset_association_id for x in lib_datasets ]
+    lddas = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \
+                            .filter( trans.app.model.LibraryDatasetDatasetAssociation.id.in_( lib_dataset_ids ) ) \
+                            .all()
+
+    # Map the LibraryDataset to the returned LDDAs:
+    ret_lddas = {}
+    for ldda in lddas:
+        ret_lddas[ldda.library_dataset_id] = ldda
+    return ret_lddas
+
+
+def datasets_for_lddas( trans, lddas ):
+    '''
+    Given a list of LDDAs, return a list of Datasets for them.
+    '''
+    dataset_ids = [ x.dataset_id for x in lddas ]
+    datasets = trans.sa_session.query( trans.app.model.Dataset ) \
+                               .filter( trans.app.model.Dataset.id.in_( dataset_ids ) ) \
+                               .all()
+    return datasets
+
+
+def active_folders_and_library_datasets( trans, folder ):
+    folders = active_folders( trans, folder )
+    library_datasets = trans.sa_session.query( trans.model.LibraryDataset ).filter(
+        and_( trans.model.LibraryDataset.table.c.deleted == false(),
+        trans.model.LibraryDataset.table.c.folder_id == folder.id ) ) \
+        .order_by( trans.model.LibraryDataset.table.c._name ) \
+        .all()
+    return folders, library_datasets
+
+
+def activatable_folders_and_library_datasets( trans, folder ):
+    folders = activatable_folders( trans, folder )
+    library_datasets = trans.sa_session.query( trans.model.LibraryDataset ) \
+                                       .filter( trans.model.LibraryDataset.table.c.folder_id == folder.id ) \
+                                       .join( ( trans.model.LibraryDatasetDatasetAssociation.table,
+                                                trans.model.LibraryDataset.table.c.library_dataset_dataset_association_id == trans.model.LibraryDatasetDatasetAssociation.table.c.id ) ) \
+                                       .join( ( trans.model.Dataset.table,
+                                                trans.model.LibraryDatasetDatasetAssociation.table.c.dataset_id == trans.model.Dataset.table.c.id ) ) \
+                                       .filter( trans.model.Dataset.table.c.deleted == false() ) \
+                                       .order_by( trans.model.LibraryDataset.table.c._name ) \
+                                       .all()
+    return folders, library_datasets
+
+
+def branch_deleted( folder ):
+    # Return True if a folder belongs to a branch that has been deleted
+    if folder.deleted:
+        return True
+    if folder.parent:
+        return branch_deleted( folder.parent )
+    return False
+
+
+def get_containing_library_from_library_dataset( trans, library_dataset ):
+    """Given a library_dataset, get the containing library"""
+    folder = library_dataset.folder
+    while folder.parent:
+        folder = folder.parent
+    # We have folder set to the library's root folder, which has the same name as the library
+    for library in trans.sa_session.query( trans.model.Library ).filter(
+        and_( trans.model.Library.table.c.deleted == false(),
+            trans.model.Library.table.c.name == folder.name ) ):
+        # Just to double-check
+        if library.root_folder == folder:
+            return library
+    return None
+
+
+def get_comptypes( trans ):
+    comptypes_t = comptypes
+    if trans.app.config.nginx_x_archive_files_base:
+        comptypes_t = ['ngxzip']
+    for comptype in trans.app.config.disable_library_comptypes:
+        # TODO: do this once, not every time (we're gonna raise an
+        # exception every time after the first time)
+        try:
+            comptypes_t.remove( comptype )
+        except:
+            pass
+    return comptypes_t
+
+
+def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ):
+    is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+    if is_admin:
+        accessible_items = items
+    else:
+        # Enforce access permission settings
+        current_user_roles = trans.get_current_user_roles()
+        accessible_items = []
+        for item in items:
+            if trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user ):
+                accessible_items.append( item )
+    # Sort by name
+    return sort_by_attr( [ item for item in accessible_items ], sort_attr )
+
+
+def sort_by_attr( seq, attr ):
+    """
+    Sort the sequence of objects by object's attribute
+    Arguments:
+    seq  - the list or any sequence (including immutable one) of objects to sort.
+    attr - the name of attribute to sort by
+    """
+    # Use the "Schwartzian transform"
+    # Create the auxiliary list of tuples where every i-th tuple has form
+    # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
+    # only to provide stable sorting, but mainly to eliminate comparison of objects
+    # (which can be expensive or prohibited) in case of equal attribute values.
+    intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
+    intermed.sort()
+    return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
+
+
+def lucene_search( trans, cntrller, search_term, search_url, **kwd ):
+    """Return display of results from a full-text lucene search of data libraries."""
+    message = escape( kwd.get( 'message', '' ) )
+    status = kwd.get( 'status', 'done' )
+    full_url = "%s/find?%s" % ( search_url, urllib.urlencode( { "kwd" : search_term } ) )
+    response = urllib2.urlopen( full_url )
+    ldda_ids = loads( response.read() )[ "ids" ]
+    response.close()
+    lddas = [ trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) for ldda_id in ldda_ids ]
+    return status, message, get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' )
+
+
+def whoosh_search( trans, cntrller, search_term, **kwd ):
+    """Return display of results from a full-text whoosh search of data libraries."""
+    message = escape( kwd.get( 'message', '' ) )
+    status = kwd.get( 'status', 'done' )
+    ok = True
+    if whoosh_search_enabled:
+        whoosh_index_dir = trans.app.config.whoosh_index_dir
+        index_exists = whoosh.index.exists_in( whoosh_index_dir )
+        if index_exists:
+            index = whoosh.index.open_dir( whoosh_index_dir )
+            # Set field boosts for searcher to place equal weight on all search fields.
+            searcher = index.searcher( weighting=BM25F( field_B={ 'name_B' : 3.4,
+                                                                  'info_B' : 3.2,
+                                                                  'dbkey_B' : 3.3,
+                                                                  'message_B' : 3.5 } ) )
+            # Perform search
+            parser = MultifieldParser( [ 'name', 'info', 'dbkey', 'message' ], schema=schema )
+            # Search term with wildcards may be slow...
+            results = searcher.search( parser.parse( '*' + search_term + '*' ), minscore=0.01 )
+            ldda_ids = [ result[ 'id' ] for result in results ]
+            lddas = []
+            for ldda_id in ldda_ids:
+                ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id )
+                if ldda:
+                    lddas.append( ldda )
+            lddas = get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' )
+        else:
+            message = "Tell your Galaxy administrator that the directory %s does not contain valid whoosh indexes" % str( whoosh_index_dir )
+            ok = False
+    else:
+        message = "Whoosh is compatible with Python version 2.5 or greater.  Your Python verison is not compatible."
+        ok = False
+    if not ok:
+        status = 'error'
+        lddas = []
+    return status, message, lddas
diff --git a/lib/galaxy/webapps/galaxy/controllers/mobile.py b/lib/galaxy/webapps/galaxy/controllers/mobile.py
new file mode 100644
index 0000000..1065da3
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/mobile.py
@@ -0,0 +1,95 @@
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+
+
+class Mobile( BaseUIController ):
+
+    @web.expose
+    def index( self, trans, **kwargs ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # return trans.fill_template( "mobile/index.mako" )
+
+    @web.expose
+    def history_list( self, trans ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # return trans.fill_template( "mobile/history/list.mako" )
+
+    @web.expose
+    def history_detail( self, trans, id ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # history = trans.sa_session.query( trans.app.model.History ).get( id )
+        # assert history.user == trans.user
+        # return trans.fill_template( "mobile/history/detail.mako", history=history )
+
+    @web.expose
+    def dataset_detail( self, trans, id ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
+        # assert dataset.history.user == trans.user
+        # return trans.fill_template( "mobile/dataset/detail.mako", dataset=dataset )
+
+    @web.expose
+    def dataset_peek( self, trans, id ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
+        # assert dataset.history.user == trans.user
+        # return trans.fill_template( "mobile/dataset/peek.mako", dataset=dataset )
+
+    @web.expose
+    def settings( self, trans, email=None, password=None ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # message = None
+        # if email is not None and password is not None:
+        #     if email == "":
+        #         self.__logout( trans )
+        #         message = "Logged out"
+        #     else:
+        #         error = self.__login( trans, email, password )
+        #         message = error or "Login changed"
+        # return trans.fill_template( "mobile/settings.mako", message=message )
+
+    def __logout( self, trans ):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # trans.log_event( "User logged out" )
+        # trans.handle_user_logout()
+
+    def __login(self, trans, login="", password=""):
+        return trans.response.send_redirect( web.url_for(controller='root', action='index' ) )
+        # error = password_error = None
+        # user = trans.sa_session.query( model.User ).filter(or_(
+        #    email == login,
+        #    username == login
+        # )).first()
+        # if not user:
+        #     autoreg = trans.app.auth_manager.check_auto_registration(trans, login, password)
+        #     if autoreg[0]:
+        #         kwd = {}
+        #         kwd['email'] = autoreg[1]
+        #         kwd['username'] = autoreg[2]
+        #         params = util.Params( kwd )
+        #         message = " ".join( [ validate_email( trans, kwd['email'] ),
+        #                               validate_publicname( trans, kwd['username'] ) ] ).rstrip()
+        #         if not message:
+        #             message, status, user, success = self.__register( trans, 'user', False, **kwd )
+        #             if success:
+        #                 # The handle_user_login() method has a call to the history_set_default_permissions() method
+        #                 # (needed when logging in with a history), user needs to have default permissions set before logging in
+        #                 trans.handle_user_login( user )
+        #                 trans.log_event( "User (auto) created a new account" )
+        #                 trans.log_event( "User logged in" )
+        #             else:
+        #                 message = "Auto-registration failed, contact your local Galaxy administrator. %s" % message
+        #         else:
+        #             message = "Auto-registration failed, contact your local Galaxy administrator. %s" % message
+        #     else:
+        #         message = "No such user (please note that login is case sensitive)"
+        # elif user.deleted:
+        #     error = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
+        # elif user.external:
+        #     error = "This account was created for use with an external authentication method, contact your local Galaxy administrator to activate it."
+        # elif not trans.app.auth_manager.check_password(user, password):
+        #     error = "Invalid password"
+        # else:
+        #     trans.handle_user_login( user )
+        #     trans.log_event( "User logged in" )
+        # return error
diff --git a/lib/galaxy/webapps/galaxy/controllers/page.py b/lib/galaxy/webapps/galaxy/controllers/page.py
new file mode 100644
index 0000000..9aa9c19
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/page.py
@@ -0,0 +1,824 @@
+from json import loads
+
+from markupsafe import escape
+from sqlalchemy import and_, desc, false, true
+
+from galaxy import managers, model, util, web
+from galaxy.model.item_attrs import UsesItemRatings
+from galaxy.util import unicodify
+from galaxy.util.sanitize_html import _BaseHTMLProcessor, sanitize_html
+from galaxy.web import error, url_for
+from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesVisualizationMixin
+from galaxy.web.framework.helpers import grids, time_ago
+
+
+def format_bool( b ):
+    if b:
+        return "yes"
+    else:
+        return ""
+
+
+class PageListGrid( grids.Grid ):
+    # Custom column.
+    class URLColumn( grids.PublicURLColumn ):
+        def get_value( self, trans, grid, item ):
+            return url_for(controller='page', action='display_by_username_and_slug', username=item.user.username, slug=item.slug )
+
+    # Grid definition
+    use_panels = True
+    title = "Pages"
+    model_class = model.Page
+    default_filter = { "published": "All", "tags": "All", "title": "All", "sharing": "All" }
+    default_sort_key = "-update_time"
+    columns = [
+        grids.TextColumn( "Title", key="title", attach_popup=True, filterable="advanced" ),
+        URLColumn( "Public URL" ),
+        grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.PageAnnotationAssociation, filterable="advanced" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.PageTagAssociation, filterable="advanced", grid_name="PageListGrid" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+    ]
+    columns.append( grids.MulticolFilterColumn(
+                    "Search",
+                    cols_to_filter=[ columns[0], columns[2] ],
+                    key="free-text-search", visible=False, filterable="standard" )
+                    )
+    global_actions = [
+        grids.GridAction( "Add new page", dict( action='create' ) )
+    ]
+    operations = [
+        grids.DisplayByUsernameAndSlugGridOperation( "View", allow_multiple=False ),
+        grids.GridOperation( "Edit content", allow_multiple=False, url_args=dict( action='edit_content') ),
+        grids.GridOperation( "Edit attributes", allow_multiple=False, url_args=dict( action='edit') ),
+        grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Delete", confirm="Are you sure you want to delete this page?" ),
+    ]
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user, deleted=False )
+
+
+class PageAllPublishedGrid( grids.Grid ):
+    # Grid definition
+    use_panels = True
+    use_async = True
+    title = "Published Pages"
+    model_class = model.Page
+    default_sort_key = "update_time"
+    default_filter = dict( title="All", username="All" )
+    columns = [
+        grids.PublicURLColumn( "Title", key="title", filterable="advanced" ),
+        grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.PageAnnotationAssociation, filterable="advanced" ),
+        grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+        grids.CommunityRatingColumn( "Community Rating", key="rating" ),
+        grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.PageTagAssociation, filterable="advanced", grid_name="PageAllPublishedGrid" ),
+        grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search title, annotation, owner, and tags",
+            cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+    def build_initial_query( self, trans, **kwargs ):
+        # Join so that searching history.user makes sense.
+        return trans.sa_session.query( self.model_class ).join( model.User.table )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter( self.model_class.deleted == false() ).filter( self.model_class.published == true() )
+
+
+class ItemSelectionGrid( grids.Grid ):
+    """ Base class for pages' item selection grids. """
+    # Custom columns.
+    class NameColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, item):
+            if hasattr( item, "get_display_name" ):
+                return escape(item.get_display_name())
+            else:
+                return escape(item.name)
+
+    # Grid definition.
+    show_item_checkboxes = True
+    template = "/page/select_items_grid.mako"
+    default_filter = { "deleted": "False", "sharing": "All" }
+    default_sort_key = "-update_time"
+    use_async = True
+    use_paging = True
+    num_rows_per_page = 10
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user )
+
+
+class HistorySelectionGrid( ItemSelectionGrid ):
+    """ Grid for selecting histories. """
+    # Grid definition.
+    title = "Saved Histories"
+    model_class = model.History
+    columns = [
+        ItemSelectionGrid.NameColumn( "Name", key="name", filterable="advanced" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryTagAssociation, filterable="advanced"),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False, visible=False ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[1] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user, purged=False )
+
+
+class HistoryDatasetAssociationSelectionGrid( ItemSelectionGrid ):
+    """ Grid for selecting HDAs. """
+    # Grid definition.
+    title = "Saved Datasets"
+    model_class = model.HistoryDatasetAssociation
+    columns = [
+        ItemSelectionGrid.NameColumn( "Name", key="name", filterable="advanced" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryDatasetAssociationTagAssociation, filterable="advanced"),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False, visible=False ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[1] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        # To filter HDAs by user, need to join HDA and History table and then filter histories by user. This is necessary because HDAs do not have
+        # a user relation.
+        return query.select_from( model.HistoryDatasetAssociation.table.join( model.History.table ) ).filter( model.History.user == trans.user )
+
+
+class WorkflowSelectionGrid( ItemSelectionGrid ):
+    """ Grid for selecting workflows. """
+    # Grid definition.
+    title = "Saved Workflows"
+    model_class = model.StoredWorkflow
+    columns = [
+        ItemSelectionGrid.NameColumn( "Name", key="name", filterable="advanced" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.StoredWorkflowTagAssociation, filterable="advanced"),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False, visible=False ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[1] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+
+class PageSelectionGrid( ItemSelectionGrid ):
+    """ Grid for selecting pages. """
+    # Grid definition.
+    title = "Saved Pages"
+    model_class = model.Page
+    columns = [
+        grids.TextColumn( "Title", key="title", filterable="advanced" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.PageTagAssociation, filterable="advanced"),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False, visible=False ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[1] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+
+class VisualizationSelectionGrid( ItemSelectionGrid ):
+    """ Grid for selecting visualizations. """
+    # Grid definition.
+    title = "Saved Visualizations"
+    model_class = model.Visualization
+    columns = [
+        grids.TextColumn( "Title", key="title", filterable="advanced" ),
+        grids.TextColumn( "Type", key="type" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[2] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+
+class _PageContentProcessor( _BaseHTMLProcessor ):
+    """ Processes page content to produce HTML that is suitable for display. For now, processor renders embedded objects. """
+
+    def __init__( self, trans, encoding, type, render_embed_html_fn ):
+        _BaseHTMLProcessor.__init__( self, encoding, type)
+        self.trans = trans
+        self.ignore_content = False
+        self.num_open_tags_for_ignore = 0
+        self.render_embed_html_fn = render_embed_html_fn
+
+    def unknown_starttag( self, tag, attrs ):
+        """ Called for each start tag; attrs is a list of (attr, value) tuples. """
+
+        # If ignoring content, just increment tag count and ignore.
+        if self.ignore_content:
+            self.num_open_tags_for_ignore += 1
+            return
+
+        # Not ignoring tag; look for embedded content.
+        embedded_item = False
+        for attribute in attrs:
+            if ( attribute[0] == "class" ) and ( "embedded-item" in attribute[1].split(" ") ):
+                embedded_item = True
+                break
+        # For embedded content, set ignore flag to ignore current content and add new content for embedded item.
+        if embedded_item:
+            # Set processing attributes to ignore content.
+            self.ignore_content = True
+            self.num_open_tags_for_ignore = 1
+
+            # Insert content for embedded element.
+            for attribute in attrs:
+                name = attribute[0]
+                if name == "id":
+                    # ID has form '<class_name>-<encoded_item_id>'
+                    item_class, item_id = attribute[1].split("-")
+                    embed_html = self.render_embed_html_fn( self.trans, item_class, item_id )
+                    self.pieces.append( embed_html )
+            return
+
+        # Default behavior: not ignoring and no embedded content.
+        _BaseHTMLProcessor.unknown_starttag( self, tag, attrs )
+
+    def handle_data( self, text ):
+        """ Called for each block of plain text. """
+        if self.ignore_content:
+            return
+        _BaseHTMLProcessor.handle_data( self, text )
+
+    def unknown_endtag( self, tag ):
+        """ Called for each end tag. """
+
+        # If ignoring content, see if current tag is the end of content to ignore.
+        if self.ignore_content:
+            self.num_open_tags_for_ignore -= 1
+            if self.num_open_tags_for_ignore == 0:
+                # Done ignoring content.
+                self.ignore_content = False
+            return
+
+        # Default behavior:
+        _BaseHTMLProcessor.unknown_endtag( self, tag )
+
+
+class PageController( BaseUIController, SharableMixin,
+                      UsesStoredWorkflowMixin, UsesVisualizationMixin, UsesItemRatings ):
+
+    _page_list = PageListGrid()
+    _all_published_list = PageAllPublishedGrid()
+    _history_selection_grid = HistorySelectionGrid()
+    _workflow_selection_grid = WorkflowSelectionGrid()
+    _datasets_selection_grid = HistoryDatasetAssociationSelectionGrid()
+    _page_selection_grid = PageSelectionGrid()
+    _visualization_selection_grid = VisualizationSelectionGrid()
+
+    def __init__( self, app ):
+        super( PageController, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+        self.history_serializer = managers.histories.HistorySerializer( self.app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    @web.expose
+    @web.require_login()
+    def list( self, trans, *args, **kwargs ):
+        """ List user's pages. """
+        # Handle operation
+        if 'operation' in kwargs and 'id' in kwargs:
+            session = trans.sa_session
+            operation = kwargs['operation'].lower()
+            ids = util.listify( kwargs['id'] )
+            for id in ids:
+                item = session.query( model.Page ).get( self.decode_id( id ) )
+                if operation == "delete":
+                    item.deleted = True
+                if operation == "share or publish":
+                    return self.sharing( trans, **kwargs )
+            session.flush()
+
+        # HACK: to prevent the insertion of an entire html document inside another
+        kwargs[ 'embedded' ] = True
+        # Build grid HTML.
+        grid = self._page_list( trans, *args, **kwargs )
+
+        # Build list of pages shared with user.
+        shared_by_others = trans.sa_session \
+            .query( model.PageUserShareAssociation ) \
+            .filter_by( user=trans.get_user() ) \
+            .join( model.Page.table ) \
+            .filter( model.Page.deleted == false() ) \
+            .order_by( desc( model.Page.update_time ) ) \
+            .all()
+
+        # Render grid wrapped in panels
+        return trans.fill_template( "page/index.mako", embedded_grid=grid, shared_by_others=shared_by_others )
+
+    @web.expose
+    def list_published( self, trans, *args, **kwargs ):
+        kwargs[ 'embedded' ] = True
+        grid = self._all_published_list( trans, *args, **kwargs )
+        if 'async' in kwargs:
+            return grid
+
+        # Render grid wrapped in panels
+        return trans.fill_template( "page/list_published.mako", embedded_grid=grid )
+
+    @web.expose
+    @web.require_login( "create pages" )
+    def create( self, trans, page_title="", page_slug="", page_annotation="" ):
+        """
+        Create a new page
+        """
+        user = trans.get_user()
+        page_title_err = page_slug_err = page_annotation_err = ""
+        if trans.request.method == "POST":
+            if not page_title:
+                page_title_err = "Page name is required"
+            elif not page_slug:
+                page_slug_err = "Page id is required"
+            elif not self._is_valid_slug( page_slug ):
+                page_slug_err = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
+            elif trans.sa_session.query( model.Page ).filter_by( user=user, slug=page_slug, deleted=False ).first():
+                page_slug_err = "Page id must be unique"
+            else:
+                # Create the new stored page
+                page = model.Page()
+                page.title = page_title
+                page.slug = page_slug
+                page_annotation = sanitize_html( page_annotation, 'utf-8', 'text/html' )
+                self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+                page.user = user
+                # And the first (empty) page revision
+                page_revision = model.PageRevision()
+                page_revision.title = page_title
+                page_revision.page = page
+                page.latest_revision = page_revision
+                page_revision.content = ""
+                # Persist
+                session = trans.sa_session
+                session.add( page )
+                session.flush()
+                # Display the management page
+                # trans.set_message( "Page '%s' created" % page.title )
+                return trans.response.send_redirect( web.url_for(controller='page', action='list' ) )
+        return trans.show_form(
+            web.FormBuilder( web.url_for(controller='page', action='create'), "Create new page", submit_text="Submit" )
+            .add_text( "page_title", "Page title", value=page_title, error=page_title_err )
+            .add_text( "page_slug", "Page identifier", value=page_slug, error=page_slug_err,
+                       help="""A unique identifier that will be used for
+                            public links to this page. A default is generated
+                            from the page title, but can be edited. This field
+                            must contain only lowercase letters, numbers, and
+                            the '-' character.""" )
+            .add_text( "page_annotation", "Page annotation", value=page_annotation, error=page_annotation_err,
+                       help="A description of the page; annotation is shown alongside published pages."),
+            template="page/create.mako" )
+
+    @web.expose
+    @web.require_login( "edit pages" )
+    def edit( self, trans, id, page_title="", page_slug="", page_annotation="" ):
+        """
+        Edit a page's attributes.
+        """
+        encoded_id = id
+        id = self.decode_id( id )
+        session = trans.sa_session
+        page = session.query( model.Page ).get( id )
+        user = trans.user
+        assert page.user == user
+        page_title_err = page_slug_err = page_annotation_err = ""
+        if trans.request.method == "POST":
+            if not page_title:
+                page_title_err = "Page name is required"
+            elif not page_slug:
+                page_slug_err = "Page id is required"
+            elif not self._is_valid_slug( page_slug ):
+                page_slug_err = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
+            elif page_slug != page.slug and trans.sa_session.query( model.Page ).filter_by( user=user, slug=page_slug, deleted=False ).first():
+                page_slug_err = "Page id must be unique"
+            elif not page_annotation:
+                page_annotation_err = "Page annotation is required"
+            else:
+                page.title = page_title
+                page.slug = page_slug
+                page_annotation = sanitize_html( page_annotation, 'utf-8', 'text/html' )
+                self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+                session.flush()
+                # Redirect to page list.
+                return trans.response.send_redirect( web.url_for(controller='page', action='list' ) )
+        else:
+            page_title = page.title
+            page_slug = page.slug
+            page_annotation = self.get_item_annotation_str( trans.sa_session, trans.user, page )
+            if not page_annotation:
+                page_annotation = ""
+        return trans.show_form(
+            web.FormBuilder( web.url_for(controller='page', action='edit', id=encoded_id ), "Edit page attributes", submit_text="Submit" )
+            .add_text( "page_title", "Page title", value=page_title, error=page_title_err )
+            .add_text( "page_slug", "Page identifier", value=page_slug, error=page_slug_err,
+                       help="""A unique identifier that will be used for
+                       public links to this page. A default is generated
+                       from the page title, but can be edited. This field
+                       must contain only lowercase letters, numbers, and
+                       the '-' character.""" )
+            .add_text( "page_annotation", "Page annotation", value=page_annotation, error=page_annotation_err,
+                       help="A description of the page; annotation is shown alongside published pages."),
+            template="page/create.mako" )
+
+    @web.expose
+    @web.require_login( "edit pages" )
+    def edit_content( self, trans, id ):
+        """
+        Render the main page editor interface.
+        """
+        id = self.decode_id( id )
+        page = trans.sa_session.query( model.Page ).get( id )
+        assert page.user == trans.user
+        return trans.fill_template( "page/editor.mako", page=page )
+
+    @web.expose
+    @web.require_login( "use Galaxy pages" )
+    def sharing( self, trans, id, **kwargs ):
+        """ Handle page sharing. """
+
+        # Get session and page.
+        session = trans.sa_session
+        page = trans.sa_session.query( model.Page ).get( self.decode_id( id ) )
+
+        # Do operation on page.
+        if 'make_accessible_via_link' in kwargs:
+            self._make_item_accessible( trans.sa_session, page )
+        elif 'make_accessible_and_publish' in kwargs:
+            self._make_item_accessible( trans.sa_session, page )
+            page.published = True
+        elif 'publish' in kwargs:
+            page.published = True
+        elif 'disable_link_access' in kwargs:
+            page.importable = False
+        elif 'unpublish' in kwargs:
+            page.published = False
+        elif 'disable_link_access_and_unpublish' in kwargs:
+            page.importable = page.published = False
+        elif 'unshare_user' in kwargs:
+            user = session.query( model.User ).get( self.decode_id( kwargs['unshare_user' ] ) )
+            if not user:
+                error( "User not found for provided id" )
+            association = session.query( model.PageUserShareAssociation ) \
+                                 .filter_by( user=user, page=page ).one()
+            session.delete( association )
+
+        session.flush()
+
+        return trans.fill_template( "/sharing_base.mako",
+                                    item=page, use_panels=True )
+
+    @web.expose
+    @web.require_login( "use Galaxy pages" )
+    def share( self, trans, id, email="", use_panels=False ):
+        """ Handle sharing with an individual user. """
+        msg = mtype = None
+        page = trans.sa_session.query( model.Page ).get( self.decode_id( id ) )
+        if email:
+            other = trans.sa_session.query( model.User ) \
+                                    .filter( and_( model.User.table.c.email == email,
+                                                   model.User.table.c.deleted == false() ) ) \
+                                    .first()
+            if not other:
+                mtype = "error"
+                msg = ( "User '%s' does not exist" % escape( email ) )
+            elif other == trans.get_user():
+                mtype = "error"
+                msg = ( "You cannot share a page with yourself" )
+            elif trans.sa_session.query( model.PageUserShareAssociation ) \
+                    .filter_by( user=other, page=page ).count() > 0:
+                mtype = "error"
+                msg = ( "Page already shared with '%s'" % escape( email ) )
+            else:
+                share = model.PageUserShareAssociation()
+                share.page = page
+                share.user = other
+                session = trans.sa_session
+                session.add( share )
+                self.create_item_slug( session, page )
+                session.flush()
+                page_title = escape( page.title )
+                other_email = escape( other.email )
+                trans.set_message( "Page '%s' shared with user '%s'" % ( page_title, other_email ) )
+                return trans.response.send_redirect( url_for( controller='page', action='sharing', id=id ) )
+        return trans.fill_template( "/ind_share_base.mako",
+                                    message=msg,
+                                    messagetype=mtype,
+                                    item=page,
+                                    email=email,
+                                    use_panels=use_panels )
+
+    @web.expose
+    @web.require_login()
+    def save( self, trans, id, content, annotations ):
+        id = self.decode_id( id )
+        page = trans.sa_session.query( model.Page ).get( id )
+        assert page.user == trans.user
+
+        # Sanitize content
+        content = sanitize_html( content, 'utf-8', 'text/html' )
+
+        # Add a new revision to the page with the provided content.
+        page_revision = model.PageRevision()
+        page_revision.title = page.title
+        page_revision.page = page
+        page.latest_revision = page_revision
+        page_revision.content = content
+
+        # Save annotations.
+        annotations = loads( annotations )
+        for annotation_dict in annotations:
+            item_id = self.decode_id( annotation_dict[ 'item_id' ] )
+            item_class = self.get_class( annotation_dict[ 'item_class' ] )
+            item = trans.sa_session.query( item_class ).filter_by( id=item_id ).first()
+            if not item:
+                raise RuntimeError( "cannot find annotated item" )
+            text = sanitize_html( annotation_dict[ 'text' ], 'utf-8', 'text/html' )
+
+            # Add/update annotation.
+            if item_id and item_class and text:
+                # Get annotation association.
+                annotation_assoc_class = eval( "model.%sAnnotationAssociation" % item_class.__name__ )
+                annotation_assoc = trans.sa_session.query( annotation_assoc_class ).filter_by( user=trans.get_user() )
+                if item_class == model.History.__class__:
+                    annotation_assoc = annotation_assoc.filter_by( history=item )
+                elif item_class == model.HistoryDatasetAssociation.__class__:
+                    annotation_assoc = annotation_assoc.filter_by( hda=item )
+                elif item_class == model.StoredWorkflow.__class__:
+                    annotation_assoc = annotation_assoc.filter_by( stored_workflow=item )
+                elif item_class == model.WorkflowStep.__class__:
+                    annotation_assoc = annotation_assoc.filter_by( workflow_step=item )
+                annotation_assoc = annotation_assoc.first()
+                if not annotation_assoc:
+                    # Create association.
+                    annotation_assoc = annotation_assoc_class()
+                    item.annotations.append( annotation_assoc )
+                    annotation_assoc.user = trans.get_user()
+                # Set annotation user text.
+                annotation_assoc.annotation = text
+        trans.sa_session.flush()
+
+    @web.expose
+    @web.require_login()
+    def display( self, trans, id ):
+        id = self.decode_id( id )
+        page = trans.sa_session.query( model.Page ).get( id )
+        if not page:
+            raise web.httpexceptions.HTTPNotFound()
+        return self.display_by_username_and_slug( trans, page.user.username, page.slug )
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug ):
+        """ Display page based on a username and slug. """
+
+        # Get page.
+        session = trans.sa_session
+        user = session.query( model.User ).filter_by( username=username ).first()
+        page = trans.sa_session.query( model.Page ).filter_by( user=user, slug=slug, deleted=False ).first()
+        if page is None:
+            raise web.httpexceptions.HTTPNotFound()
+        # Security check raises error if user cannot access page.
+        self.security_check( trans, page, False, True)
+
+        # Process page content.
+        processor = _PageContentProcessor( trans, 'utf-8', 'text/html', self._get_embed_html )
+        processor.feed( page.latest_revision.content )
+
+        # Get rating data.
+        user_item_rating = 0
+        if trans.get_user():
+            user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), page )
+            if user_item_rating:
+                user_item_rating = user_item_rating.rating
+            else:
+                user_item_rating = 0
+        ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, page )
+
+        # Output is string, so convert to unicode for display.
+        page_content = unicodify( processor.output(), 'utf-8' )
+        return trans.fill_template_mako( "page/display.mako", item=page,
+                                         item_data=page_content,
+                                         user_item_rating=user_item_rating,
+                                         ave_item_rating=ave_item_rating,
+                                         num_ratings=num_ratings,
+                                         content_only=True )
+
+    @web.expose
+    @web.require_login( "use Galaxy pages" )
+    def set_accessible_async( self, trans, id=None, accessible=False ):
+        """ Set page's importable attribute and slug. """
+        page = self.get_page( trans, id )
+
+        # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
+        importable = accessible in ['True', 'true', 't', 'T']
+        if page.importable != importable:
+            if importable:
+                self._make_item_accessible( trans.sa_session, page )
+            else:
+                page.importable = importable
+            trans.sa_session.flush()
+        return
+
+    @web.expose
+    @web.require_login( "rate items" )
+    @web.json
+    def rate_async( self, trans, id, rating ):
+        """ Rate a page asynchronously and return updated community data. """
+
+        page = self.get_page( trans, id, check_ownership=False, check_accessible=True )
+        if not page:
+            return trans.show_error_message( "The specified page does not exist." )
+
+        # Rate page.
+        self.rate_item( trans.sa_session, trans.get_user(), page, rating )
+
+        return self.get_ave_item_rating_data( trans.sa_session, page )
+
+    @web.expose
+    def get_embed_html_async( self, trans, id ):
+        """ Returns HTML for embedding a workflow in a page. """
+
+        # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code.
+        page = self.get_page( trans, id )
+        if page:
+            return "Embedded Page '%s'" % page.title
+
+    @web.expose
+    @web.json
+    @web.require_login( "use Galaxy pages" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns page's name and link. """
+        page = self.get_page( trans, id )
+
+        if self.create_item_slug( trans.sa_session, page ):
+            trans.sa_session.flush()
+        return_dict = { "name": page.title, "link": url_for(controller='page',
+                                                            action="display_by_username_and_slug",
+                                                            username=page.user.username,
+                                                            slug=page.slug ) }
+        return return_dict
+
+    @web.expose
+    @web.require_login("select a history from saved histories")
+    def list_histories_for_selection( self, trans, **kwargs ):
+        """ Returns HTML that enables a user to select one or more histories. """
+        # Render the list view
+        return self._history_selection_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login("select a workflow from saved workflows")
+    def list_workflows_for_selection( self, trans, **kwargs ):
+        """ Returns HTML that enables a user to select one or more workflows. """
+        # Render the list view
+        return self._workflow_selection_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login("select a visualization from saved visualizations")
+    def list_visualizations_for_selection( self, trans, **kwargs ):
+        """ Returns HTML that enables a user to select one or more visualizations. """
+        # Render the list view
+        return self._visualization_selection_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login("select a page from saved pages")
+    def list_pages_for_selection( self, trans, **kwargs ):
+        """ Returns HTML that enables a user to select one or more pages. """
+        # Render the list view
+        return self._page_selection_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login("select a dataset from saved datasets")
+    def list_datasets_for_selection( self, trans, **kwargs ):
+        """ Returns HTML that enables a user to select one or more datasets. """
+        # Render the list view
+        return self._datasets_selection_grid( trans, **kwargs )
+
+    @web.expose
+    def get_editor_iframe( self, trans ):
+        """ Returns the document for the page editor's iframe. """
+        return trans.fill_template( "page/wymiframe.mako" )
+
+    def get_page( self, trans, id, check_ownership=True, check_accessible=False ):
+        """Get a page from the database by id."""
+        # Load history from database
+        id = self.decode_id( id )
+        page = trans.sa_session.query( model.Page ).get( id )
+        if not page:
+            error( "Page not found" )
+        else:
+            return self.security_check( trans, page, check_ownership, check_accessible )
+
+    def get_item( self, trans, id ):
+        return self.get_page( trans, id )
+
+    def _get_embedded_history_html( self, trans, id ):
+        """
+        Returns html suitable for embedding in another page.
+        """
+        # TODO: should be moved to history controller and/or called via ajax from the template
+        decoded_id = self.decode_id( id )
+        # histories embedded in pages are set to importable when embedded, check for access here
+        history = self.history_manager.get_accessible( decoded_id, trans.user, current_history=trans.history )
+
+        # create ownership flag for template, dictify models
+        # note: adding original annotation since this is published - get_dict returns user-based annos
+        user_is_owner = trans.user == history.user
+        history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history )
+
+        # include all datasets: hidden, deleted, and purged
+        history_dictionary = self.history_serializer.serialize_to_view(
+            history, view='detailed', user=trans.user, trans=trans
+        )
+        contents = self.history_serializer.serialize_contents( history, 'contents', trans=trans, user=trans.user )
+        history_dictionary[ 'annotation' ] = history.annotation
+
+        filled = trans.fill_template( "history/embed.mako",
+                                      item=history,
+                                      user_is_owner=user_is_owner,
+                                      history_dict=history_dictionary,
+                                      content_dicts=contents )
+        return filled
+
+    def _get_embedded_visualization_html( self, trans, id ):
+        """
+        Returns html suitable for embedding visualizations in another page.
+        """
+        visualization = self.get_visualization( trans, id, False, True )
+        visualization.annotation = self.get_item_annotation_str( trans.sa_session, visualization.user, visualization )
+        if not visualization:
+            return None
+
+        # Fork to template based on visualization.type (registry or builtin).
+        if(( trans.app.visualizations_registry and visualization.type in trans.app.visualizations_registry.plugins ) and
+                ( visualization.type not in trans.app.visualizations_registry.BUILT_IN_VISUALIZATIONS ) ):
+            # if a registry visualization, load a version into an iframe :(
+            # TODO: simplest path from A to B but not optimal - will be difficult to do reg visualizations any other way
+            # TODO: this will load the visualization twice (once above, once when the iframe src calls 'saved')
+            encoded_visualization_id = trans.security.encode_id( visualization.id )
+            return trans.fill_template( 'visualization/embed_in_frame.mako',
+                                        item=visualization,
+                                        encoded_visualization_id=encoded_visualization_id,
+                                        content_only=True )
+
+        return trans.fill_template( "visualization/embed.mako", item=visualization, item_data=None )
+
+    def _get_embed_html( self, trans, item_class, item_id ):
+        """ Returns HTML for embedding an item in a page. """
+        item_class = self.get_class( item_class )
+        if item_class == model.History:
+            return self._get_embedded_history_html( trans, item_id )
+
+        elif item_class == model.HistoryDatasetAssociation:
+            decoded_id = self.decode_id( item_id )
+            dataset = self.hda_manager.get_accessible( decoded_id, trans.user )
+            dataset = self.hda_manager.error_if_uploading( dataset )
+
+            dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
+            if dataset:
+                data = self.hda_manager.text_data( dataset )
+                return trans.fill_template( "dataset/embed.mako", item=dataset, item_data=data )
+
+        elif item_class == model.StoredWorkflow:
+            workflow = self.get_stored_workflow( trans, item_id, False, True )
+            workflow.annotation = self.get_item_annotation_str( trans.sa_session, workflow.user, workflow )
+            if workflow:
+                self.get_stored_workflow_steps( trans, workflow )
+                return trans.fill_template( "workflow/embed.mako", item=workflow, item_data=workflow.latest_workflow.steps )
+
+        elif item_class == model.Visualization:
+            return self._get_embedded_visualization_html( trans, item_id )
+
+        elif item_class == model.Page:
+            pass
diff --git a/lib/galaxy/webapps/galaxy/controllers/request_type.py b/lib/galaxy/webapps/galaxy/controllers/request_type.py
new file mode 100644
index 0000000..a82d515
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/request_type.py
@@ -0,0 +1,477 @@
+from __future__ import absolute_import
+
+import logging
+
+from sqlalchemy import false
+from markupsafe import escape
+
+from galaxy import model, util
+from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin, web
+from galaxy.web.form_builder import build_select_field, TextField
+from galaxy.web.framework.helpers import iff, grids
+from .requests_common import invalid_id_redirect
+
+log = logging.getLogger( __name__ )
+
+
+class RequestTypeGrid( grids.Grid ):
+    # Custom column types
+    class NameColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, request_type):
+            return escape(request_type.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, request_type):
+            return escape(request_type.desc)
+
+    class RequestFormColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, request_type):
+            return escape(request_type.request_form.name)
+
+    class SampleFormColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, request_type):
+            return escape(request_type.sample_form.name)
+
+    class ExternalServiceColumn( grids.IntegerColumn ):
+        def get_value(self, trans, grid, request_type):
+            if request_type.external_services:
+                return len( request_type.external_services )
+            return 'No external service assigned'
+    # Grid definition
+    title = "Request Types"
+    template = "admin/request_type/grid.mako"
+    model_class = model.RequestType
+    default_sort_key = "-create_time"
+    num_rows_per_page = 50
+    preserve_state = True
+    use_paging = True
+    default_filter = dict( deleted="False" )
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: iff( item.deleted, None, dict( operation="view_request_type", id=item.id ) ) ),
+                    attach_popup=True,
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                           key='desc',
+                           filterable="advanced" ),
+        RequestFormColumn( "Request Form",
+                           link=( lambda item: iff( item.deleted, None, dict( operation="view_form_definition", id=item.request_form.id ) ) ) ),
+        SampleFormColumn( "Sample Form",
+                          link=( lambda item: iff( item.deleted, None, dict( operation="view_form_definition", id=item.sample_form.id ) ) ) ),
+        ExternalServiceColumn( "External Services" ),
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Edit request type", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
+        grids.GridOperation( "Edit permissions", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
+        grids.GridOperation( "Use run details template", allow_multiple=False, condition=( lambda item: not item.deleted and not item.run_details ) ),
+        grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted ) ),
+        grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+    ]
+    global_actions = [
+        grids.GridAction( "Create new request type", dict( controller='request_type', action='create_request_type' ) )
+    ]
+
+
+class RequestType( BaseUIController, UsesFormDefinitionsMixin ):
+    request_type_grid = RequestTypeGrid()
+
+    @web.expose
+    @web.require_admin
+    def browse_request_types( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            obj_id = kwd.get( 'id', None )
+            if operation == "view_form_definition":
+                return self.view_form_definition( trans, **kwd )
+            elif operation == "view_request_type":
+                return self.view_request_type( trans, **kwd )
+            elif operation == "use run details template":
+                return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                                  action='add_template',
+                                                                  cntrller='requests_admin',
+                                                                  item_type='request_type',
+                                                                  form_type=trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE,
+                                                                  request_type_id=obj_id ) )
+            elif operation == "edit request type":
+                return self.view_editable_request_type( trans, **kwd )
+            elif operation == "delete":
+                return self.delete_request_type( trans, **kwd )
+            elif operation == "undelete":
+                return self.undelete_request_type( trans, **kwd )
+            elif operation == "edit permissions":
+                return self.request_type_permissions( trans, **kwd )
+            elif operation == "view_external_service":
+                return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                                  action='view_external_service',
+                                                                  **kwd ) )
+        # Render the grid view
+        return self.request_type_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def create_request_type( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        rt_info_widgets, rt_states_widgets = self.__get_populated_request_type_widgets( trans, **kwd )
+        external_service_select_fields_list = []
+        # get all the external services selected till now
+        external_services_list = self.__get_external_services( trans, **kwd )
+        for index, external_service in enumerate( external_services_list ):
+            external_service_select_field = self.__build_external_service_select_field( trans,
+                                                                                        'external_service_id_%i' % index,
+                                                                                        external_service )
+            external_service_select_fields_list.append( external_service_select_field )
+        if params.get( 'add_state_button', False ):
+            # Append a new tuple to the set of states which will result in
+            # empty state name and description TextFields being displayed on
+            # the form.
+            rt_states_widgets.append( ( "", "" ) )
+        elif params.get( 'remove_state_button', False ):
+            index = int( params.get( 'remove_state_button', '' ).split( " " )[2] )
+            del rt_states_widgets[ index - 1 ]
+        elif params.get( 'add_external_service_button', False ):
+            # create a new one
+            external_service_select_field = self.__build_external_service_select_field( trans,
+                                                                                        'external_service_id_%i' % len( external_services_list ) )
+            external_service_select_fields_list.append( external_service_select_field )
+        elif params.get( 'create_request_type_button', False ):
+            self.__save_request_type( trans, action='create_request_type', **kwd )
+            message = 'The request type has been created.'
+            return trans.response.send_redirect( web.url_for( controller='request_type',
+                                                              action='browse_request_types',
+                                                              message=message,
+                                                              status=status ) )
+        # A request_type requires at least one possible sample state so that
+        # it can be used to create a sequencing request
+        if not len( rt_states_widgets ):
+            rt_states_widgets.append( ( "New", "First sample state" ) )
+        return trans.fill_template( '/admin/request_type/create_request_type.mako',
+                                    rt_info_widgets=rt_info_widgets,
+                                    rt_states_widgets=rt_states_widgets,
+                                    external_service_select_fields_list=external_service_select_fields_list,
+                                    message=message,
+                                    status=status )
+
+    def __get_external_services(self, trans, request_type=None, **kwd):
+        params = util.Params( kwd )
+        external_services_list = []
+        i = 0
+        while True:
+            if 'external_service_id_%i' % i in kwd:
+                id = params.get( 'external_service_id_%i' % i, '' )
+                try:
+                    external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( id ) )
+                except:
+                    return invalid_id_redirect( trans, 'request_type', id, 'external service', action='browse_request_types' )
+                external_services_list.append( external_service )
+                i += 1
+            else:
+                break
+        return external_services_list
+
+    @web.expose
+    @web.require_admin
+    def view_editable_request_type( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_type_id = params.get( 'id', None )
+        try:
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', request_type_id, 'request type', action='browse_request_types' )
+        # See if we have any associated templates
+        widgets = request_type.get_template_widgets( trans )
+        widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        # get all the external services selected till now
+        external_service_select_fields_list = []
+        for index, external_service in enumerate( request_type.external_services ):
+            external_service_select_field = self.__build_external_service_select_field( trans,
+                                                                                        'external_service_id_%i' % index,
+                                                                                        external_service )
+            external_service_select_fields_list.append( external_service_select_field )
+        return trans.fill_template( '/admin/request_type/edit_request_type.mako',
+                                    request_type=request_type,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    external_service_select_fields_list=external_service_select_fields_list,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def edit_request_type( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_type_id = params.get( 'id', None )
+        try:
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', request_type_id, 'request type', action='browse_request_types' )
+        # See if we have any associated templates
+        widgets = request_type.get_template_widgets( trans )
+        widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        # get all the external services selected till now
+        external_service_select_fields_list = []
+        external_services_list = self.__get_external_services( trans, request_type, **kwd )
+        if params.get( 'edit_request_type_button', False ):
+            request_type = self.__save_request_type( trans, action='edit_request_type', **kwd )
+            message = 'Changes made to request type (%s) have been saved' % request_type.name
+        elif params.get( 'add_external_service_button', False ):
+            external_services_list.append( None )
+        elif params.get( 'remove_external_service_button', False ):
+            index = int( kwd[ 'remove_external_service_button' ].split(' ')[3] ) - 1
+            del external_services_list[index]
+        for index, external_service in enumerate( external_services_list ):
+            external_service_select_field = self.__build_external_service_select_field( trans,
+                                                                                        'external_service_id_%i' % index,
+                                                                                        external_service )
+            external_service_select_fields_list.append( external_service_select_field )
+        return trans.fill_template( '/admin/request_type/edit_request_type.mako',
+                                    request_type=request_type,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    external_service_select_fields_list=external_service_select_fields_list,
+                                    message=message,
+                                    status=status )
+
+    def __save_request_type( self, trans, action, **kwd ):
+        # Here we save a newly created request_type or save changed
+        # attributes of an existing request_type.
+        params = util.Params( kwd )
+        request_type_id = params.get( 'id', None )
+        name = util.restore_text( params.get( 'name', ''  ) )
+        desc = util.restore_text( params.get( 'desc', '' ) )
+        request_form_id = params.get( 'request_form_id', 'none' )
+        sample_form_id = params.get( 'sample_form_id', 'none' )
+        # validate
+        if not name or request_form_id == 'none' or sample_form_id == 'none':
+            message = 'Enter the name, request form, sample form and at least one sample state associated with this request type.'
+            return trans.response.send_redirect( web.url_for( controller='request_type',
+                                                              action=action,
+                                                              message=message,
+                                                              status='error' ) )
+        try:
+            request_form = trans.sa_session.query( trans.model.FormDefinition ).get( trans.security.decode_id( request_form_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', request_type_id, 'form definition', action='browse_request_types' )
+
+        try:
+            sample_form = trans.sa_session.query( trans.model.FormDefinition ).get( trans.security.decode_id( sample_form_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', request_type_id, 'form definition', action='browse_request_types' )
+        if request_type_id:
+            # We're saving changed attributes of an existing request_type.
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+            request_type.name = name
+            request_type.desc = desc
+            request_type.request_form = request_form
+            request_type.sample_form = sample_form
+            for sample_state in request_type.states:
+                sample_state_id = trans.security.encode_id( sample_state.id )
+                name = util.restore_text( params.get( 'state_name_%s' % sample_state_id, '' ) )
+                desc = util.restore_text( params.get( 'state_desc_%s' % sample_state_id, '' ) )
+                sample_state.name = name
+                sample_state.desc = desc
+                trans.sa_session.add( sample_state )
+                trans.sa_session.flush()
+            trans.sa_session.add( request_type )
+            trans.sa_session.flush()
+        else:
+            # We're saving a newly created request_type
+            request_type = trans.model.RequestType( name=name,
+                                                    desc=desc,
+                                                    request_form=request_form,
+                                                    sample_form=sample_form )
+            trans.sa_session.add( request_type )
+            trans.sa_session.flush()
+            i = 0
+            while True:
+                if 'state_name_%i' % i in kwd:
+                    name = util.restore_text( params.get( 'state_name_%i' % i, '' ) )
+                    desc = util.restore_text( params.get( 'state_desc_%i' % i, '' ) )
+                    sample_state = trans.model.SampleState( name, desc, request_type )
+                    trans.sa_session.add( sample_state )
+                    trans.sa_session.flush()
+                    i += 1
+                else:
+                    break
+        # delete existing associations
+        request_type.delete_external_service_associations( trans )
+        # save the external services associated with this request_type
+        external_services_list = self.__get_external_services( trans, **kwd )
+        for external_service in external_services_list:
+            request_type.add_external_service_association( trans, external_service )
+        return request_type
+
+    def __get_populated_request_type_widgets( self, trans, **kwd ):
+        request_form_definitions = self.get_all_forms( trans,
+                                                       filter=dict( deleted=False ),
+                                                       form_type=trans.model.FormDefinition.types.REQUEST )
+        sample_form_definitions = self.get_all_forms( trans,
+                                                      filter=dict( deleted=False ),
+                                                      form_type=trans.model.FormDefinition.types.SAMPLE )
+        if not request_form_definitions or not sample_form_definitions:
+            return [], []
+        params = util.Params( kwd )
+        request_form_id = params.get( 'request_form_id', 'none' )
+        sample_form_id = params.get( 'sample_form_id', 'none' )
+        request_form_id_select_field = build_select_field( trans,
+                                                           objs=request_form_definitions,
+                                                           label_attr='name',
+                                                           select_field_name='request_form_id',
+                                                           selected_value=request_form_id,
+                                                           refresh_on_change=False )
+        sample_form_id_select_field = build_select_field( trans,
+                                                          objs=sample_form_definitions,
+                                                          label_attr='name',
+                                                          select_field_name='sample_form_id',
+                                                          selected_value=sample_form_id,
+                                                          refresh_on_change=False )
+        rt_info_widgets = [ dict( label='Name',
+                                  widget=TextField( 'name', 40, util.restore_text( params.get( 'name', '' ) ) ) ),
+                            dict( label='Description',
+                                  widget=TextField( 'desc', 40, util.restore_text( params.get( 'desc', '' ) ) ) ),
+                            dict( label='Request form',
+                                  widget=request_form_id_select_field ),
+                            dict( label='Sample form',
+                                  widget=sample_form_id_select_field ) ]
+        # Unsaved sample states being defined for this request type
+        rt_states = []
+        i = 0
+        while True:
+            if 'state_name_%i' % i in kwd:
+                rt_states.append( ( util.restore_text( params.get( 'state_name_%i' % i, ''  ) ),
+                                    util.restore_text( params.get( 'state_desc_%i' % i, ''  ) ) ) )
+                i += 1
+            else:
+                break
+        return rt_info_widgets, rt_states
+
+    @web.expose
+    @web.require_admin
+    def view_request_type( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_type_id = kwd.get( 'id', None )
+        try:
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', request_type_id, 'request type', action='browse_request_types' )
+        # See if we have any associated templates
+        widgets = request_type.get_template_widgets( trans )
+        widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        return trans.fill_template( '/admin/request_type/view_request_type.mako',
+                                    request_type=request_type,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def delete_request_type( self, trans, **kwd ):
+        request_type_id = kwd.get( 'id', '' )
+        request_type_id_list = util.listify( request_type_id )
+        for request_type_id in request_type_id_list:
+            try:
+                request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+            except:
+                return invalid_id_redirect( trans, 'request_type', request_type_id, 'request type', action='browse_request_types' )
+            request_type.deleted = True
+            trans.sa_session.add( request_type )
+            trans.sa_session.flush()
+        message = '%i request types has been deleted' % len( request_type_id_list )
+        return trans.response.send_redirect( web.url_for( controller='request_type',
+                                                          action='browse_request_types',
+                                                          message=message,
+                                                          status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_request_type( self, trans, **kwd ):
+        request_type_id = kwd.get( 'id', '' )
+        request_type_id_list = util.listify( request_type_id )
+        for request_type_id in request_type_id_list:
+            try:
+                request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+            except:
+                return invalid_id_redirect( trans, 'request_type', request_type_id, 'request type', action='browse_request_types' )
+            request_type.deleted = False
+            trans.sa_session.add( request_type )
+            trans.sa_session.flush()
+        status = 'done'
+        message = '%i request types have been undeleted' % len( request_type_id_list )
+        return trans.response.send_redirect( web.url_for( controller='request_type',
+                                                          action='browse_request_types',
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def request_type_permissions( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_type_id = kwd.get( 'id', '' )
+        try:
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', request_type_id, 'request type', action='browse_request_types' )
+        roles = trans.sa_session.query( trans.model.Role ) \
+                                .filter( trans.model.Role.table.c.deleted == false() ) \
+                                .order_by( trans.model.Role.table.c.name )
+        if params.get( 'update_roles_button', False ):
+            permissions = {}
+            for k, v in trans.model.RequestType.permitted_actions.items():
+                in_roles = [ trans.sa_session.query( trans.model.Role ).get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ]
+                permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+            trans.app.security_agent.set_request_type_permissions( request_type, permissions )
+            trans.sa_session.refresh( request_type )
+            message = "Permissions updated for request type '%s'" % request_type.name
+        return trans.fill_template( '/admin/request_type/request_type_permissions.mako',
+                                    request_type=request_type,
+                                    roles=roles,
+                                    status=status,
+                                    message=message )
+
+    @web.expose
+    @web.require_admin
+    def view_form_definition( self, trans, **kwd ):
+        form_definition_id = kwd.get( 'id', None )
+        try:
+            form_definition = trans.sa_session.query( trans.model.FormDefinition ).get( trans.security.decode_id( form_definition_id ) )
+        except:
+            return invalid_id_redirect( trans, 'request_type', form_definition_id, 'form definition', action='browse_request_types' )
+        return trans.fill_template( '/admin/forms/view_form_definition.mako',
+                                    form_definition=form_definition )
+
+    # ===== Methods for building SelectFields used on various admin_requests forms
+    def __build_external_service_select_field( self, trans, select_field_name, external_service=None ):
+        if external_service:
+            selected_value = trans.security.encode_id( external_service.id )
+        else:
+            selected_value = 'none'
+        all_external_services = trans.sa_session.query( trans.model.ExternalService ).filter( trans.model.ExternalService.table.c.deleted == false() ).all()
+        for e in all_external_services:
+            external_service_type = e.get_external_service_type( trans )
+            e.label = '%s - %s' % ( e.name, external_service_type.name )
+        return build_select_field( trans,
+                                   objs=all_external_services,
+                                   label_attr='label',
+                                   select_field_name=select_field_name,
+                                   selected_value=selected_value,
+                                   refresh_on_change=False )
diff --git a/lib/galaxy/webapps/galaxy/controllers/requests.py b/lib/galaxy/webapps/galaxy/controllers/requests.py
new file mode 100644
index 0000000..e30e3d3
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/requests.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import
+
+import logging
+
+from sqlalchemy import false
+
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy.web.framework.helpers import grids
+from .requests_common import RequestsGrid
+
+log = logging.getLogger( __name__ )
+
+
+class UserRequestsGrid( RequestsGrid ):
+    operations = [ operation for operation in RequestsGrid.operations ]
+    operations.append( grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: item.is_unsubmitted and not item.deleted ) ) )
+    operations.append( grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: item.is_new and not item.deleted ) ) )
+    operations.append( grids.GridOperation( "Undelete", allow_multiple=True, condition=( lambda item: item.deleted ) ) )
+
+    def apply_query_filter( self, trans, query, **kwd ):
+        return query.filter_by( user=trans.user )
+
+
+class Requests( BaseUIController ):
+    request_grid = UserRequestsGrid()
+
+    @web.expose
+    @web.require_login( "view sequencing requests" )
+    def index( self, trans ):
+        return trans.fill_template( "requests/index.mako" )
+
+    @web.expose
+    @web.require_login( "create/submit sequencing requests" )
+    def find_samples_index( self, trans ):
+        return trans.fill_template( "requests/find_samples_index.mako" )
+
+    @web.expose
+    def browse_requests( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "edit":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='edit_basic_request_info',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+            if operation == "add_samples":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='add_samples',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+            if operation == "edit_samples":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='edit_samples',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+            if operation == "view_request":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='view_request',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+            if operation == "delete":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='delete_request',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+            if operation == "undelete":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='undelete_request',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+            if operation == "view_request_history":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='view_request_history',
+                                                                  cntrller='requests',
+                                                                  **kwd ) )
+
+        # If there are requests that have been rejected, show a message as a reminder to the user
+        rejected = 0
+        for request in trans.sa_session.query( trans.app.model.Request ) \
+                .filter( trans.app.model.Request.table.c.deleted == false() ) \
+                .filter( trans.app.model.Request.table.c.user_id == trans.user.id ):
+            if request.is_rejected:
+                rejected = rejected + 1
+        if rejected:
+            status = 'warning'
+            message = "%d requests (highlighted in red) were rejected.  Click on the request name for details." % rejected
+            kwd[ 'status' ] = status
+            kwd[ 'message' ] = message
+        # Allow the user to create a new request only if they have permission to access a request type.
+        accessible_request_types = trans.app.security_agent.get_accessible_request_types( trans, trans.user )
+        if accessible_request_types:
+            self.request_grid.global_actions = [ grids.GridAction( "Create new request", dict( controller='requests_common',
+                                                                                               action='create_request',
+                                                                                               cntrller='requests' ) ) ]
+        else:
+            self.request_grid.global_actions = []
+        # Render the list view
+        return self.request_grid( trans, **kwd )
diff --git a/lib/galaxy/webapps/galaxy/controllers/requests_admin.py b/lib/galaxy/webapps/galaxy/controllers/requests_admin.py
new file mode 100644
index 0000000..6b2d9c8
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/requests_admin.py
@@ -0,0 +1,473 @@
+from __future__ import absolute_import
+import logging
+import os
+
+from galaxy import model, util
+from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin, web
+from galaxy.web.form_builder import build_select_field
+from galaxy.web.framework.helpers import time_ago, grids
+from .requests_common import RequestsGrid, invalid_id_redirect
+from markupsafe import escape
+
+
+log = logging.getLogger( __name__ )
+
+
+class AdminRequestsGrid( RequestsGrid ):
+    class UserColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, request ):
+            return escape(request.user.email)
+    # Grid definition
+    columns = [ col for col in RequestsGrid.columns ]
+    columns.append( UserColumn( "User",
+                                model_class=model.User,
+                                key='username' ) )
+    operations = [ operation for operation in RequestsGrid.operations ]
+    operations.append( grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ) ) )
+    operations.append( grids.GridOperation( "Reject", allow_multiple=False, condition=( lambda item: not item.deleted and item.is_submitted ) ) )
+    operations.append( grids.GridOperation( "Delete", allow_multiple=True, condition=( lambda item: not item.deleted ) ) )
+    operations.append( grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ) )
+    global_actions = [
+        grids.GridAction( "Create new request", dict( controller='requests_common',
+                                                      action='create_request',
+                                                      cntrller='requests_admin' ) )
+    ]
+
+
+class DataTransferGrid( grids.Grid ):
+    # Custom column types
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, sample_dataset ):
+            return escape(sample_dataset.name)
+
+    class SizeColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, sample_dataset ):
+            return sample_dataset.size
+
+    class StatusColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, sample_dataset ):
+            return sample_dataset.status
+
+    class ExternalServiceColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, sample_dataset ):
+            try:
+                return escape(sample_dataset.external_service.name)
+            except:
+                return 'None'
+    # Grid definition
+    title = "Sample Datasets"
+    template = "admin/requests/sample_datasets_grid.mako"
+    model_class = model.SampleDataset
+    default_sort_key = "-create_time"
+    num_rows_per_page = 50
+    preserve_state = True
+    use_paging = False
+    columns = [
+        NameColumn( "Name",
+                    link=( lambda item: dict( operation="view", id=item.id ) ),
+                    attach_popup=True,
+                    filterable="advanced" ),
+        SizeColumn( "Size",
+                    filterable="advanced" ),
+        grids.GridColumn( "Last Updated",
+                          key="update_time",
+                          format=time_ago ),
+        ExternalServiceColumn( 'External service',
+                               link=( lambda item: dict( operation="view_external_service", id=item.external_service.id ) ), ),
+        StatusColumn( "Transfer Status",
+                      filterable="advanced",
+                      label_id_prefix='datasetTransferStatus-' ),
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Transfer",
+                             allow_multiple=True,
+                             condition=( lambda item: item.status in [ model.SampleDataset.transfer_status.NOT_STARTED ] ) ),
+        grids.GridOperation( "Rename",
+                             allow_multiple=True,
+                             allow_popup=False,
+                             condition=( lambda item: item.status in [ model.SampleDataset.transfer_status.NOT_STARTED ] ) ),
+        grids.GridOperation( "Delete",
+                             allow_multiple=True,
+                             condition=( lambda item: item.status in [ model.SampleDataset.transfer_status.NOT_STARTED ] ) )
+    ]
+
+    def apply_query_filter( self, trans, query, **kwd ):
+        sample_id = kwd.get( 'sample_id', None )
+        if not sample_id:
+            return query
+        return query.filter_by( sample_id=trans.security.decode_id( sample_id ) )
+
+
+class RequestsAdmin( BaseUIController, UsesFormDefinitionsMixin ):
+    request_grid = AdminRequestsGrid()
+    datatx_grid = DataTransferGrid()
+
+    @web.expose
+    @web.require_admin
+    def index( self, trans ):
+        return trans.fill_template( "/admin/requests/index.mako" )
+
+    @web.expose
+    @web.require_admin
+    def browse_requests( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "edit":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='edit_basic_request_info',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+            if operation == "add_samples":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='add_samples',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+            if operation == "edit_samples":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='edit_samples',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+            if operation == "view_request":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='view_request',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+            if operation == "view_request_history":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='view_request_history',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+            if operation == "reject":
+                return self.reject_request( trans, **kwd )
+            if operation == "view_type":
+                return trans.response.send_redirect( web.url_for( controller='request_type',
+                                                                  action='view_request_type',
+                                                                  **kwd ) )
+            if operation == "delete":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='delete_request',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+            if operation == "undelete":
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action='undelete_request',
+                                                                  cntrller='requests_admin',
+                                                                  **kwd ) )
+        # Render the list view
+        return self.request_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def reject_request( self, trans, **kwd ):
+        params = util.Params( kwd )
+        request_id = params.get( 'id', '' )
+        status = params.get( 'status', 'done' )
+        message = params.get( 'message', 'done' )
+        if params.get( 'cancel_reject_button', False ):
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='view_request',
+                                                              cntrller='requests_admin',
+                                                              id=request_id ) )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, 'requests_admin', request_id )
+        # Validate
+        comment = util.restore_text( params.get( 'comment', '' ) )
+        if not comment:
+            status = 'error'
+            message = 'A reason for rejecting the request is required.'
+            return trans.fill_template( '/admin/requests/reject.mako',
+                                        cntrller='requests_admin',
+                                        request=request,
+                                        status=status,
+                                        message=message )
+        # Create an event with state 'Rejected' for this request
+        event_comment = "Sequencing request marked rejected by %s. Reason: %s " % ( trans.user.email, comment )
+        event = trans.model.RequestEvent( request, request.states.REJECTED, event_comment )
+        trans.sa_session.add( event )
+        trans.sa_session.flush()
+        message = 'Sequencing request (%s) has been rejected.' % request.name
+        return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                          action='browse_requests',
+                                                          status=status,
+                                                          message=message,
+                                                          **kwd ) )
+
+    # Data transfer from sequencer/external_service
+    @web.expose
+    @web.require_admin
+    def manage_datasets( self, trans, **kwd ):
+        def handle_error( **kwd ):
+            kwd[ 'status' ] = 'error'
+            return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                              action='manage_datasets',
+                                                              **kwd ) )
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        # When this method is called due to a grid operation, the sample ID
+        # will be in the param 'id'.  But when this method is called via a
+        # redirect from another method, the ID will be in 'sample_id'.  So,
+        # check for 'id' if 'sample_id' is not provided.
+        sample_id = params.get( 'sample_id', None )
+        if sample_id is None:
+            sample_id = params.get( 'id', None )
+        try:
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        except:
+            return invalid_id_redirect( trans, 'requests_admin', sample_id, 'sample' )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            sample_dataset_id = params.get( 'id', None )
+            if not sample_dataset_id:
+                message = 'Select at least 1 dataset to %s.' % operation
+                kwd[ 'message' ] = message
+                del kwd[ 'operation' ]
+                handle_error( **kwd )
+            id_list = util.listify( sample_dataset_id )
+            selected_sample_datasets = []
+            for sample_dataset_id in id_list:
+                try:
+                    sample_dataset = trans.sa_session.query( trans.model.SampleDataset ).get( trans.security.decode_id( sample_dataset_id ) )
+                except:
+                    return invalid_id_redirect( trans, 'requests_admin', sample_dataset_id, 'sample dataset' )
+                selected_sample_datasets.append( sample_dataset )
+            if operation == "view":
+                return trans.fill_template( '/admin/requests/view_sample_dataset.mako',
+                                            cntrller='requests_admin',
+                                            sample_dataset=selected_sample_datasets[0] )
+            elif operation == "delete":
+                not_deleted = []
+                for sample_dataset in selected_sample_datasets:
+                    # Make sure the dataset has been transferred before deleting it.
+                    if sample_dataset in sample_dataset.sample.untransferred_dataset_files:
+                        # Save the sample dataset
+                        sample = sample_dataset.sample
+                        trans.sa_session.delete( sample_dataset )
+                        trans.sa_session.flush()
+                    else:
+                        not_deleted.append( sample_dataset.name )
+                message = '%i datasets have been deleted.' % ( len( id_list ) - len( not_deleted ) )
+                if not_deleted:
+                    status = 'warning'
+                    message = message + '  %s could not be deleted because their transfer status is not "Not Started". ' % str( not_deleted )
+                return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                                  action='manage_datasets',
+                                                                  sample_id=trans.security.encode_id( sample.id ),
+                                                                  status=status,
+                                                                  message=message ) )
+            elif operation == "rename":
+                # If one of the selected sample datasets is in the NOT_STARTED state,
+                # then display an error message.  A NOT_STARTED state implies the dataset
+                # has not yet been transferred.
+                no_datasets_transferred = True
+                for selected_sample_dataset in selected_sample_datasets:
+                    if selected_sample_dataset in selected_sample_dataset.sample.untransferred_dataset_files:
+                        no_datasets_transferred = False
+                        break
+                if no_datasets_transferred:
+                    status = 'error'
+                    message = 'A dataset can be renamed only if it has been transferred.'
+                    return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                                      action='manage_datasets',
+                                                                      sample_id=trans.security.encode_id( selected_sample_datasets[0].sample.id ),
+                                                                      status=status,
+                                                                      message=message ) )
+                return trans.fill_template( '/admin/requests/rename_datasets.mako',
+                                            sample=selected_sample_datasets[0].sample,
+                                            id_list=id_list )
+            elif operation == "transfer":
+                self.initiate_data_transfer( trans,
+                                             trans.security.encode_id( selected_sample_datasets[0].sample.id ),
+                                             sample_datasets=selected_sample_datasets )
+            elif operation == "view_external_service":
+                return trans.response.send_redirect( web.url_for( controller='external_service',
+                                                                  action='view_external_service',
+                                                                  **kwd ) )
+
+        # Render the grid view
+        request_id = trans.security.encode_id( sample.request.id )
+        library_id = trans.security.encode_id( sample.library.id )
+        self.datatx_grid.title = 'Manage "%s" datasets' % sample.name
+        self.datatx_grid.global_actions = [ grids.GridAction( "Browse target data library",
+                                                              dict( controller='library_common',
+                                                                    action='browse_library',
+                                                                    cntrller='library_admin',
+                                                                    id=library_id ) ),
+                                            grids.GridAction( "Browse this request",
+                                                              dict( controller='requests_common',
+                                                                    action='view_request',
+                                                                    cntrller='requests_admin',
+                                                                    id=request_id ) ) ]
+        return self.datatx_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def rename_datasets( self, trans, **kwd ):
+        # This method is called from the DataTransferGrid when a user is renaming 1 or more
+        # SampleDatasets.
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        sample_id = kwd.get( 'sample_id', None )
+        try:
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        except:
+            return invalid_id_redirect( trans, 'requests_admin', sample_id, 'sample' )
+        # id_list is list of SampleDataset ids, which is a subset of all
+        # of the SampleDatasets associated with the Sample.  The user may
+        # or may not have selected all of the SampleDatasets for renaming.
+        id_list = util.listify( kwd.get( 'id_list', [] ) )
+        # Get all of the SampleDatasets
+        sample_datasets = []
+        for sample_dataset_id in id_list:
+            sample_dataset = trans.sa_session.query( trans.app.model.SampleDataset ).get( trans.security.decode_id( sample_dataset_id ) )
+            sample_datasets.append( sample_dataset )
+        if params.get( 'rename_datasets_button', False ):
+            incorrect_dataset_names = []
+            for sample_dataset in sample_datasets:
+                encoded_id = trans.security.encode_id( sample_dataset.id )
+                selected_option = util.restore_text( params.get( 'rename_datasets_for_sample_%s' % encoded_id, '' ) )
+                new_name = util.restore_text( params.get( 'new_name_%s' % encoded_id, '' ) )
+                if not new_name:
+                    incorrect_dataset_names.append( sample_dataset.name )
+                    continue
+                new_name = util.sanitize_for_filename( new_name )
+                if selected_option == 'none':
+                    sample_dataset.name = new_name
+                else:
+                    sample_dataset.name = '%s_%s' % ( selected_option, new_name )
+                trans.sa_session.add( sample_dataset )
+                trans.sa_session.flush()
+            if len( sample_datasets ) == len( incorrect_dataset_names ):
+                status = 'error'
+                message = 'All datasets renamed incorrectly.'
+            elif len( incorrect_dataset_names ):
+                status = 'done'
+                message = 'Changes saved successfully. The following datasets were renamed incorrectly: %s.' % str( incorrect_dataset_names )
+            else:
+                message = 'Changes saved successfully.'
+            return trans.fill_template( '/admin/requests/rename_datasets.mako',
+                                        sample=sample,
+                                        id_list=id_list,
+                                        message=message,
+                                        status=status )
+        return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                          action='manage_datasets',
+                                                          sample_id=sample_id ) )
+
+    def __ensure_library_add_permission( self, trans, target_library, target_folder ):
+        """
+        Ensures the current admin user has ADD_LIBRARY permission on the target data library and folder.
+        """
+        current_user_roles = trans.user.all_roles()
+        current_user_private_role = trans.app.security_agent.get_private_user_role( trans.user )
+        flush_needed = False
+        if not trans.app.security_agent.can_add_library_item( current_user_roles, target_library ):
+            lp = trans.model.LibraryPermissions( trans.app.security_agent.permitted_actions.LIBRARY_ADD.action,
+                                                 target_library,
+                                                 current_user_private_role )
+            trans.sa_session.add( lp )
+            flush_needed = True
+        if not trans.app.security_agent.can_add_library_item( current_user_roles, target_folder ):
+            lfp = trans.model.LibraryFolderPermissions( trans.app.security_agent.permitted_actions.LIBRARY_ADD.action,
+                                                        target_folder,
+                                                        current_user_private_role )
+            trans.sa_session.add( lfp )
+            flush_needed = True
+        if flush_needed:
+            trans.sa_session.flush()
+
+    @web.expose
+    @web.require_admin
+    def initiate_data_transfer( self, trans, sample_id, sample_datasets=[], sample_dataset_id='' ):
+        # Initiate the transfer of the datasets from the external service to the target Galaxy data library.
+        # The admin user must have LIBRARY_ADD permission for the target library and folder
+        try:
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        except:
+            return invalid_id_redirect( trans, 'requests_admin', sample_id, 'sample' )
+        message = ""
+        status = "done"
+        # Make sure the current admin user has LIBRARY_ADD permission on the target data library and folder.
+        self.__ensure_library_add_permission( trans, sample.library, sample.folder )
+        if sample_dataset_id and not sample_datasets:
+            # Either a list of SampleDataset objects or a comma-separated string of
+            # encoded SampleDataset ids can be received.  If the latter, parse the
+            # sample_dataset_id string to build the list of sample_datasets.
+            id_list = util.listify( sample_dataset_id )
+            for sample_dataset_id in id_list:
+                sample_dataset = trans.sa_session.query( trans.model.SampleDataset ).get( trans.security.decode_id( sample_dataset_id ) )
+                sample_datasets.append( sample_dataset )
+        if trans.app.config.enable_beta_job_managers:
+            # For now, assume that all SampleDatasets use the same external service ( this may not be optimal ).
+            if sample_datasets:
+                external_service_type_id = sample_datasets[0].external_service.external_service_type_id
+                # Here external_service_type_id will be something like '454_life_sciences'
+                external_service = sample.request.type.get_external_service( external_service_type_id )
+                external_service_type = external_service.get_external_service_type( trans )
+                external_service.load_data_transfer_settings( trans )
+                # For now only scp is supported.
+                scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP ]
+                if not scp_configs[ 'automatic_transfer' ]:
+                    deferred_plugin = 'ManualDataTransferPlugin'
+                else:
+                    raise Exception( "Automatic data transfer using scp is not yet supported." )
+            trans.app.job_manager.deferred_job_queue.plugins[ deferred_plugin ].create_job( trans,
+                                                                                            sample=sample,
+                                                                                            sample_datasets=sample_datasets,
+                                                                                            external_service=external_service,
+                                                                                            external_service_type=external_service_type )
+        else:
+            message = "Message queue transfer is no longer supported, please set enable_beta_job_managers = True in galaxy.ini"
+            status = "error"
+        return trans.response.send_redirect( web.url_for( controller='requests_admin',
+                                                          action='manage_datasets',
+                                                          sample_id=trans.security.encode_id( sample.id ),
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    def update_sample_dataset_status(self, trans, cntrller, sample_dataset_ids, new_status, error_msg=None ):
+        # check if the new status is a valid transfer status
+        possible_status_list = [ v[1] for v in trans.app.model.SampleDataset.transfer_status.items() ]
+        if new_status not in possible_status_list:
+            trans.response.status = 400
+            return 400, "The requested transfer status ( %s ) is not a valid transfer status." % new_status
+        for id in util.listify( sample_dataset_ids ):
+            try:
+                sd_id = trans.security.decode_id( id )
+                sample_dataset = trans.sa_session.query( trans.app.model.SampleDataset ).get( sd_id )
+            except:
+                trans.response.status = 400
+                return 400, "Invalid sample dataset id ( %s ) specified." % str( id )
+            sample_dataset.status = new_status
+            sample_dataset.error_msg = error_msg
+            trans.sa_session.add( sample_dataset )
+            trans.sa_session.flush()
+        return 200, 'Done'
+    # Methods for building SelectFields used on various admin_requests forms
+
+    def __build_sample_id_select_field( self, trans, request, selected_value ):
+        return build_select_field( trans, request.samples, 'name', 'sample_id', selected_value=selected_value, refresh_on_change=False )
+
+
+# Methods for building SelectFields used on various admin_requests forms - used outside this controller =====
+def build_rename_datasets_for_sample_select_field( trans, sample_dataset, selected_value='none' ):
+    options = []
+    for option_index, option in enumerate( sample_dataset.file_path.split( os.sep )[ :-1 ] ):
+        option = option.strip()
+        if option:
+            options.append( option )
+    return build_select_field( trans,
+                               objs=options,
+                               label_attr='self',
+                               select_field_name='rename_datasets_for_sample_%s' % trans.security.encode_id( sample_dataset.id ),
+                               selected_value=selected_value,
+                               refresh_on_change=False )
diff --git a/lib/galaxy/webapps/galaxy/controllers/requests_common.py b/lib/galaxy/webapps/galaxy/controllers/requests_common.py
new file mode 100644
index 0000000..2989f65
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/requests_common.py
@@ -0,0 +1,1960 @@
+import csv
+import logging
+import re
+
+from sqlalchemy import and_, false, func, select
+from markupsafe import escape
+
+from galaxy import model, util, web
+from galaxy.util import unicodify
+from galaxy.security.validate_user_input import validate_email
+from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin
+from galaxy.web.form_builder import build_select_field, CheckboxField, SelectField, TextField
+from galaxy.web.framework.helpers import grids, iff, time_ago
+
+log = logging.getLogger( __name__ )
+
+
+class RequestsGrid( grids.Grid ):
+    # Custom column types
+    class NameColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, request ):
+            return escape(request.name)
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value(self, trans, grid, request):
+            return escape(request.desc)
+
+    class SamplesColumn( grids.GridColumn ):
+        def get_value(self, trans, grid, request):
+            return str( len( request.samples ) )
+
+    class TypeColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, request ):
+            return request.type.name
+
+    class StateColumn( grids.StateColumn ):
+        def get_value(self, trans, grid, request ):
+            state = request.state
+            if state == request.states.REJECTED:
+                state_color = 'error'
+            elif state == request.states.NEW:
+                state_color = 'new'
+            elif state == request.states.SUBMITTED:
+                state_color = 'running'
+            elif state == request.states.COMPLETE:
+                state_color = 'ok'
+            else:
+                state_color = state
+            return '<div class="count-box state-color-%s">%s</div>' % ( state_color, state )
+
+        def filter( self, trans, user, query, column_filter ):
+            """ Modify query to filter request by state. """
+            if column_filter == "All":
+                return query
+            if column_filter:
+                return query.join( model.RequestEvent.table ) \
+                            .filter( self.model_class.table.c.id == model.RequestEvent.table.c.request_id ) \
+                            .filter( model.RequestEvent.table.c.state == column_filter ) \
+                            .filter( model.RequestEvent.table.c.id.in_( select( columns=[ func.max( model.RequestEvent.table.c.id ) ],
+                                                                                from_obj=model.RequestEvent.table,
+                                                                                group_by=model.RequestEvent.table.c.request_id ) ) )
+
+    # Grid definition
+    title = "Sequencing Requests"
+    template = "requests/grid.mako"
+    model_class = model.Request
+    default_sort_key = "-update_time"
+    num_rows_per_page = 50
+    use_paging = True
+    default_filter = dict( state="All", deleted="False" )
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="view_request", id=item.id ) ),
+                    attach_popup=True,
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                           key='desc',
+                           filterable="advanced" ),
+        SamplesColumn( "Samples",
+                       link=( lambda item: iff( item.deleted, None, dict( operation="edit_samples", id=item.id ) ) ) ),
+        TypeColumn( "Type",
+                    link=( lambda item: iff( item.deleted, None, dict( operation="view_type", id=item.type.id ) ) ) ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" ),
+        StateColumn( "State",
+                     key='state',
+                     filterable="advanced",
+                     link=( lambda item: iff( item.deleted, None, dict( operation="view_request_history", id=item.id ) ) )
+                     )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Submit",
+                             allow_multiple=False,
+                             condition=( lambda item: not item.deleted and item.is_unsubmitted and item.samples ),
+                             confirm="Samples cannot be added to this request after it is submitted. Click OK to submit."  )
+    ]
+
+
+class RequestsCommon( BaseUIController, UsesFormDefinitionsMixin ):
+    @web.json
+    def sample_state_updates( self, trans, ids=None, states=None ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        # Create new HTML for any that have changed
+        rval = {}
+        if ids is not None and states is not None:
+            ids = map( int, ids.split( "," ) )
+            states = states.split( "," )
+            for tup in zip( ids, states ):
+                id, state = tup
+                if id and state:
+                    sample = trans.sa_session.query( self.app.model.Sample ).get( id )
+                    if sample.state.name != state:
+                        rval[ id ] = { "state": sample.state.name,
+                                       "html_state": unicodify( trans.fill_template( "requests/common/sample_state.mako",
+                                                                                     sample=sample),
+                                                                'utf-8' ) }
+        return rval
+
+    @web.json
+    def sample_datasets_updates( self, trans, ids=None, datasets=None ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        # Create new HTML for any that have changed
+        rval = {}
+        if ids is not None and datasets is not None:
+            ids = map( int, ids.split( "," ) )
+            number_of_datasets_list = map( int, datasets.split( "," ) )
+            for tup in zip( ids, number_of_datasets_list ):
+                id, number_of_datasets = tup
+                if id and number_of_datasets:
+                    sample = trans.sa_session.query( self.app.model.Sample ).get( id )
+                    if len( sample.datasets ) != number_of_datasets:
+                        rval[ id ] = { "datasets": len( sample.datasets ),
+                                       "html_datasets": unicodify( trans.fill_template( "requests/common/sample_datasets.mako",
+                                                                                        sample=sample),
+                                                                   'utf-8' ) }
+        return rval
+
+    @web.json
+    def dataset_transfer_status_updates( self, trans, ids=None, transfer_status_list=None ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        # Create new HTML for any that have changed
+        rval = {}
+        if ids is not None and transfer_status_list is not None:
+            ids = ids.split( "," )
+            transfer_status_list = transfer_status_list.split( "," )
+            for tup in zip( ids, transfer_status_list ):
+                id, transfer_status = tup
+                if id and transfer_status:
+                    sample_dataset = trans.sa_session.query( self.app.model.SampleDataset ).get( trans.security.decode_id( id ) )
+                    if sample_dataset.status != transfer_status:
+                        rval[ id ] = { "status": sample_dataset.status,
+                                       "html_status": unicodify( trans.fill_template( "requests/common/sample_dataset_transfer_status.mako",
+                                                                                      sample_dataset=sample_dataset),
+                                                                 'utf-8' ) }
+        return rval
+
+    @web.expose
+    @web.require_login( "create sequencing requests" )
+    def create_request( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        message = util.restore_text( params.get( 'message', '' ) )
+        status = params.get( 'status', 'done' )
+        request_type_id = params.get( 'request_type_id', 'none' )
+        if request_type_id != 'none':
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+        else:
+            request_type = None
+        # user_id will not be 'none' if an admin user is submitting this request on behalf of another user
+        # and they selected that user's id from the user_id SelectField.
+        user_id_encoded = True
+        user_id = params.get( 'user_id', 'none' )
+        if user_id != 'none':
+            try:
+                user = trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( user_id ) )
+            except TypeError:
+                # We must have an email address rather than an encoded user id
+                # This is because the galaxy.base.js creates a search+select box
+                # when there are more than 20 items in a SelectField.
+                user = trans.sa_session.query( trans.model.User ) \
+                                       .filter( trans.model.User.table.c.email == util.restore_text( user_id ) ) \
+                                       .first()
+                user_id_encoded = False
+
+        elif not is_admin:
+            user = trans.user
+        else:
+            user = None
+        if params.get( 'create_request_button', False ) or params.get( 'add_sample_button', False ):
+            name = util.restore_text( params.get( 'name', '' ) )
+            if is_admin and user_id == 'none':
+                message = 'Select the user on behalf of whom you are submitting this request.'
+                status = 'error'
+            elif user is None:
+                message = 'Invalid user ID (%s)' % str(user_id)
+                status = 'error'
+            # when creating a request from the user perspective, check if the
+            # user has access permission to this request_type
+            elif cntrller == 'requests' and not trans.app.security_agent.can_access_request_type( user.all_roles(), request_type ):
+                message = '%s does not have access permission to the "%s" request type.' % ( user.email, request_type.name )
+                status = 'error'
+            elif not name:
+                message = 'Enter the name of the request.'
+                status = 'error'
+            else:
+                request = self.__save_request( trans, cntrller, **kwd )
+                message = 'The sequencing request has been created.'
+                if params.get( 'create_request_button', False ):
+                    return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                                      action='browse_requests',
+                                                                      message=message,
+                                                                      status='done' ) )
+                elif params.get( 'add_sample_button', False ):
+                    request_id = trans.security.encode_id( request.id )
+                    return self.add_sample( trans, cntrller, request_id, **kwd )
+        request_type_select_field = self.__build_request_type_id_select_field( trans, selected_value=request_type_id )
+        # Widgets to be rendered on the request form
+        widgets = []
+        if request_type is not None or status == 'error':
+            # Either the user selected a request_type or an error exists on the form.
+            widgets.append( dict( label='Name of the Experiment',
+                                  widget=TextField( 'name', 40, util.restore_text( params.get( 'name', ''  ) ) ),
+                                  helptext='(Required)') )
+            widgets.append( dict( label='Description',
+                                  widget=TextField( 'desc', 40, util.restore_text( params.get( 'desc', ''  ) )),
+                                  helptext='(Optional)') )
+            if request_type is not None:
+                widgets += request_type.request_form.get_widgets( user, **kwd )
+        # In case there is an error on the form, make sure to populate widget fields with anything the user
+        # may have already entered.
+        widgets = self.populate_widgets_from_kwd( trans, widgets, **kwd )
+        if request_type is not None or status == 'error':
+            # Either the user selected a request_type or an error exists on the form.
+            if is_admin:
+                if not user_id_encoded and user:
+                    selected_user_id = trans.security.encode_id( user.id )
+                else:
+                    selected_user_id = user_id
+                user_widget = dict( label='Select user',
+                                    widget=self.__build_user_id_select_field( trans, selected_value=selected_user_id ),
+                                    helptext='Submit the request on behalf of the selected user (Required)')
+                widgets = [ user_widget ] + widgets
+        return trans.fill_template( '/requests/common/create_request.mako',
+                                    cntrller=cntrller,
+                                    request_type_select_field=request_type_select_field,
+                                    request_type_select_field_selected=request_type_id,
+                                    widgets=widgets,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "view request" )
+    def view_request( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_id = params.get( 'id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        # Build a list of sample widgets (based on the attributes of each sample) for display.
+        displayable_sample_widgets = self.__get_sample_widgets( trans, request, request.samples, **kwd )
+        request_widgets = self.__get_request_widgets( trans, request.id )
+        return trans.fill_template( '/requests/common/view_request.mako',
+                                    cntrller=cntrller,
+                                    request=request,
+                                    request_widgets=request_widgets,
+                                    displayable_sample_widgets=displayable_sample_widgets,
+                                    status=status,
+                                    message=message )
+
+    @web.expose
+    @web.require_login( "edit sequencing requests" )
+    def edit_basic_request_info( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_id = params.get( 'id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        name = util.restore_text( params.get( 'name', '' ) )
+        if params.get( 'edit_basic_request_info_button', False ):
+            if not name:
+                status = 'error'
+                message = 'Enter the name of the request'
+            else:
+                request = self.__save_request( trans, cntrller, request=request, **kwd )
+                message = 'The changes made to request (%s) have been saved.' % request.name
+        # Widgets to be rendered on the request form
+        widgets = []
+        widgets.append( dict( label='Name',
+                              widget=TextField( 'name', 40, request.name ),
+                              helptext='(Required)' ) )
+        widgets.append( dict( label='Description',
+                              widget=TextField( 'desc', 40, request.desc ),
+                              helptext='(Optional)' ) )
+        widgets = widgets + request.type.request_form.get_widgets( request.user, request.values.content, **kwd )
+        # In case there is an error on the form, make sure to populate widget fields with anything the user
+        # may have already entered.
+        widgets = self.populate_widgets_from_kwd( trans, widgets, **kwd )
+        return trans.fill_template( 'requests/common/edit_basic_request_info.mako',
+                                    cntrller=cntrller,
+                                    request_type=request.type,
+                                    request=request,
+                                    widgets=widgets,
+                                    message=message,
+                                    status=status )
+
+    def __save_request( self, trans, cntrller, request=None, **kwd ):
+        """
+        Saves changes to an existing request, or creates a new
+        request if received request is None.
+        """
+        params = util.Params( kwd )
+        request_type_id = params.get( 'request_type_id', None )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        if request is None:
+            # We're creating a new request, so we need the associated request_type
+            request_type = trans.sa_session.query( trans.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+            if is_admin:
+                # The admin user is creating a request on behalf of another user
+                user_id = params.get( 'user_id', '' )
+                user = trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( user_id ) )
+            else:
+                user = trans.user
+        else:
+            # We're saving changes to an existing request
+            user = request.user
+            request_type = request.type
+        name = util.restore_text( params.get( 'name', '' ) )
+        desc = util.restore_text( params.get( 'desc', '' ) )
+        notification = dict( email=[ user.email ], sample_states=[ request_type.final_sample_state.id ], body='', subject='' )
+        values = self.get_form_values( trans, user, request_type.request_form, **kwd )
+        if request is None:
+            form_values = trans.model.FormValues( request_type.request_form, values )
+            trans.sa_session.add( form_values )
+            # We're creating a new request
+            request = trans.model.Request( name, desc, request_type, user, form_values, notification )
+            trans.sa_session.add( request )
+            trans.sa_session.flush()
+            trans.sa_session.refresh( request )
+            # Create an event with state 'New' for this new request
+            comment = "Sequencing request created by %s" % trans.user.email
+            if request.user != trans.user:
+                comment += " on behalf of %s." % request.user.email
+            event = trans.model.RequestEvent( request, request.states.NEW, comment )
+            trans.sa_session.add( event )
+            trans.sa_session.flush()
+        else:
+            # We're saving changes to an existing request
+            request.name = name
+            request.desc = desc
+            request.type = request_type
+            request.user = user
+            request.notification = notification
+            request.values.content = values
+            trans.sa_session.add( request )
+            trans.sa_session.add( request.values )
+            trans.sa_session.flush()
+        return request
+
+    @web.expose
+    @web.require_login( "submit sequencing requests" )
+    def submit_request( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        request_id = params.get( 'id', None )
+        message = util.restore_text( params.get( 'message', '' ) )
+        status = util.restore_text( params.get( 'status', 'done' ) )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        ok = True
+        if not request.samples:
+            message = 'Add at least 1 sample to this request before submitting.'
+            ok = False
+        if ok:
+            message = self.__validate_request( trans, cntrller, request )
+        if message or not ok:
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='edit_basic_request_info',
+                                                              cntrller=cntrller,
+                                                              id=request_id,
+                                                              status='error',
+                                                              message=message ) )
+        # Change the request state to 'Submitted'
+        comment = "Sequencing request submitted by %s" % trans.user.email
+        if request.user != trans.user:
+            comment += " on behalf of %s." % request.user.email
+        event = trans.model.RequestEvent( request, request.states.SUBMITTED, comment )
+        trans.sa_session.add( event )
+        # Change the state of each of the samples of this request
+        # request.type.states is the list of SampleState objects configured
+        # by the admin for this RequestType.
+        trans.sa_session.add( event )
+        trans.sa_session.flush()
+        # Samples will not have an associated SampleState until the request is submitted, at which
+        # time all samples of the request will be set to the first SampleState configured for the
+        # request's RequestType configured by the admin.
+        initial_sample_state_after_request_submitted = request.type.states[0]
+        for sample in request.samples:
+            event_comment = 'Sequencing request submitted and sample state set to %s.' % request.type.states[0].name
+            event = trans.model.SampleEvent( sample,
+                                             initial_sample_state_after_request_submitted,
+                                             event_comment )
+            trans.sa_session.add( event )
+        trans.sa_session.add( request )
+        trans.sa_session.flush()
+        request.send_email_notification( trans, initial_sample_state_after_request_submitted )
+        message = 'The sequencing request has been submitted.'
+        # show the request page after submitting the request
+        return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                          action='view_request',
+                                                          cntrller=cntrller,
+                                                          id=request_id,
+                                                          status=status,
+                                                          message=message ) )
+
+    @web.expose
+    @web.require_login( "edit samples" )
+    def edit_samples( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_id = params.get( 'id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        if params.get( 'cancel_changes_button', False ):
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='edit_samples',
+                                                              cntrller=cntrller,
+                                                              id=request_id ) )
+        libraries = trans.app.security_agent.get_accessible_libraries( trans, request.user )
+        # Build a list of sample widgets (based on the attributes of each sample) for display.
+        displayable_sample_widgets = self.__get_sample_widgets( trans, request, request.samples, **kwd )
+        encoded_selected_sample_ids = self.__get_encoded_selected_sample_ids( trans, request, **kwd )
+        sample_operation = params.get( 'sample_operation', 'none' )
+
+        def handle_error( **kwd ):
+            kwd[ 'status' ] = 'error'
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='edit_samples',
+                                                              cntrller=cntrller,
+                                                              **kwd ) )
+        if not encoded_selected_sample_ids and sample_operation != 'none':
+            # Probably occurred due to refresh_on_change...is there a better approach?
+            kwd[ 'sample_operation' ] = 'none'
+            message = 'Select at least one sample before selecting an operation.'
+            kwd[ 'message' ] = message
+            handle_error( **kwd )
+        if params.get( 'save_samples_button', False ):
+            if encoded_selected_sample_ids:
+                # We need the list of displayable_sample_widgets to include the same number
+                # of objects that that request.samples has so that we can enumerate over each
+                # list without problems.  We have to be careful here since the user may have
+                # used the multi-select check boxes when editing sample widgets, but didn't
+                # select all of them.  We'll first get the set of samples corresponding to the
+                # checked sample ids.
+                samples = []
+                selected_samples = []
+                for encoded_sample_id in encoded_selected_sample_ids:
+                    sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( encoded_sample_id ) )
+                    selected_samples.append( sample )
+                # Now build the list of samples, inserting None for samples that have not been checked.
+                for sample in request.samples:
+                    if sample in selected_samples:
+                        samples.append( sample )
+                    else:
+                        samples.append( None )
+                # The __save_samples method requires sample_widgets, not sample objects, so we'll get what we
+                # need by calling __get_sample_widgets().  However, we need to take care here because __get_sample_widgets()
+                # is used to populate the sample widget dicts from kwd, and the method assumes that a None object in the
+                # received list of samples should be populated from the db.  Since we're just re-using the method here to
+                # change our list of samples into a list of sample widgets, we'll need to make sure to keep track of our
+                # None objects.
+                sample_widgets = [ obj for obj in samples ]
+                sample_widgets = self.__get_sample_widgets( trans, request, sample_widgets, **kwd )
+                # Replace each sample widget dict with a None object if necessary
+                for index, obj in enumerate( samples ):
+                    if obj is None:
+                        sample_widgets[ index ] = None
+            else:
+                sample_widgets = displayable_sample_widgets
+            return self.__save_samples( trans, cntrller, request, sample_widgets, saving_new_samples=False, **kwd )
+        request_widgets = self.__get_request_widgets( trans, request.id )
+        sample_copy_select_field = self.__build_copy_sample_select_field( trans, displayable_sample_widgets )
+        libraries_select_field, folders_select_field = self.__build_library_and_folder_select_fields( trans,
+                                                                                                      request.user,
+                                                                                                      'sample_operation',
+                                                                                                      libraries,
+                                                                                                      None,
+                                                                                                      **kwd )
+        sample_operation_select_field = self.__build_sample_operation_select_field( trans, is_admin, request, sample_operation )
+        sample_state_id = params.get( 'sample_state_id', None )
+        sample_state_id_select_field = self.__build_sample_state_id_select_field( trans, request, sample_state_id )
+        return trans.fill_template( '/requests/common/edit_samples.mako',
+                                    cntrller=cntrller,
+                                    request=request,
+                                    encoded_selected_sample_ids=encoded_selected_sample_ids,
+                                    request_widgets=request_widgets,
+                                    displayable_sample_widgets=displayable_sample_widgets,
+                                    sample_copy_select_field=sample_copy_select_field,
+                                    libraries=libraries,
+                                    sample_operation_select_field=sample_operation_select_field,
+                                    libraries_select_field=libraries_select_field,
+                                    folders_select_field=folders_select_field,
+                                    sample_state_id_select_field=sample_state_id_select_field,
+                                    status=status,
+                                    message=message )
+
+    @web.expose
+    def update_sample_state(self, trans, cntrller, sample_ids, new_state, comment=None ):
+        for sample_id in sample_ids:
+            try:
+                sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+            except:
+                if cntrller == 'api':
+                    trans.response.status = 400
+                    return "Invalid sample id ( %s ) specified, unable to decode." % str( sample_id )
+                else:
+                    return invalid_id_redirect( trans, cntrller, sample_id, 'sample' )
+            if comment is None:
+                comment = 'Sample state set to %s' % str( new_state )
+            event = trans.model.SampleEvent( sample, new_state, comment )
+            trans.sa_session.add( event )
+            trans.sa_session.flush()
+        if cntrller == 'api':
+            return 200, 'Done'
+
+    @web.expose
+    @web.require_login( "delete sequencing requests" )
+    def delete_request( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        id_list = util.listify( kwd.get( 'id', '' ) )
+        message = util.restore_text( params.get( 'message', '' ) )
+        status = util.restore_text( params.get( 'status', 'done' ) )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        num_deleted = 0
+        not_deleted = []
+        for id in id_list:
+            ok_for_now = True
+            try:
+                # This block will handle bots that do not send valid request ids.
+                request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( id ) )
+            except:
+                ok_for_now = False
+            if ok_for_now:
+                # We will only allow the request to be deleted by a non-admin user if not request.submitted
+                if is_admin or not request.is_submitted:
+                    request.deleted = True
+                    trans.sa_session.add( request )
+                    # Delete all the samples belonging to this request
+                    for s in request.samples:
+                        s.deleted = True
+                        trans.sa_session.add( s )
+                    comment = "Sequencing request marked deleted by %s." % trans.user.email
+                    # There is no DELETED state for a request, so keep the current request state
+                    event = trans.model.RequestEvent( request, request.state, comment )
+                    trans.sa_session.add( event )
+                    trans.sa_session.flush()
+                    num_deleted += 1
+                else:
+                    not_deleted.append( request )
+        message += '%i requests have been deleted.' % num_deleted
+        if not_deleted:
+            message += '  Contact the administrator to delete the following submitted requests: '
+            for request in not_deleted:
+                message += '%s, ' % request.name
+            message = message.rstrip( ', ' )
+        return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                          action='browse_requests',
+                                                          status=status,
+                                                          message=message ) )
+
+    @web.expose
+    @web.require_login( "undelete sequencing requests" )
+    def undelete_request( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        id_list = util.listify( kwd.get( 'id', '' ) )
+        message = util.restore_text( params.get( 'message', '' ) )
+        status = util.restore_text( params.get( 'status', 'done' ) )
+        num_undeleted = 0
+        for id in id_list:
+            ok_for_now = True
+            try:
+                # This block will handle bots that do not send valid request ids.
+                request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( id ) )
+            except:
+                ok_for_now = False
+            if ok_for_now:
+                request.deleted = False
+                trans.sa_session.add( request )
+                # Undelete all the samples belonging to this request
+                for s in request.samples:
+                    s.deleted = False
+                    trans.sa_session.add( s )
+                comment = "Sequencing request marked undeleted by %s." % trans.user.email
+                event = trans.model.RequestEvent( request, request.state, comment )
+                trans.sa_session.add( event )
+                trans.sa_session.flush()
+                num_undeleted += 1
+        message += '%i requests have been undeleted.' % num_undeleted
+        return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                          action='browse_requests',
+                                                          status=status,
+                                                          message=message ) )
+
+    @web.expose
+    @web.require_login( "sequencing request history" )
+    def view_request_history( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        request_id = params.get( 'id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        return trans.fill_template( '/requests/common/view_request_history.mako',
+                                    cntrller=cntrller,
+                                    request=request )
+
+    @web.expose
+    @web.require_login( "edit email notification settings" )
+    def edit_email_settings( self, trans, cntrller, **kwd ):
+        """
+        Allow for changing the email notification settings where email is sent to a list of users
+        whenever the request state changes to one selected for notification.
+        """
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_id = params.get( 'id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        email_address = CheckboxField.is_checked( params.get( 'email_address', '' ) )
+        additional_email_addresses = params.get( 'additional_email_addresses', '' )
+        # Get the list of checked sample state CheckBoxFields
+        checked_sample_states = []
+        for index, sample_state in enumerate( request.type.states ):
+            if CheckboxField.is_checked( params.get( 'sample_state_%i' % sample_state.id, '' ) ):
+                checked_sample_states.append( sample_state.id )
+        if additional_email_addresses:
+            additional_email_addresses = additional_email_addresses.split( '\r\n' )
+        if email_address or additional_email_addresses:
+            # The user added 1 or more email addresses
+            email_addresses = []
+            if email_address:
+                email_addresses.append( request.user.email )
+            for email_address in additional_email_addresses:
+                email_addresses.append( util.restore_text( email_address ) )
+            # Make sure email addresses are valid
+            err_msg = ''
+            for email_address in email_addresses:
+                err_msg += validate_email( trans, email_address, check_dup=False )
+            if err_msg:
+                status = 'error'
+                message += err_msg
+            else:
+                request.notification = dict( email=email_addresses,
+                                             sample_states=checked_sample_states,
+                                             body='',
+                                             subject='' )
+        else:
+            # The user may have eliminated email addresses that were previously set
+            request.notification = None
+            if checked_sample_states:
+                message = 'All sample states have been unchecked since no email addresses have been selected or entered.  '
+        trans.sa_session.add( request )
+        trans.sa_session.flush()
+        trans.sa_session.refresh( request )
+        message += 'The changes made to the email notification settings have been saved.'
+        return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                          action='edit_basic_request_info',
+                                                          cntrller=cntrller,
+                                                          id=request_id,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_login( "update sequencing request state" )
+    def update_request_state( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = params.get( 'message', '' )
+        status = params.get( 'status', 'done' )
+        request_id = params.get( 'request_id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        # Make sure all the samples of the current request have the same state
+        common_state = request.samples_have_common_state
+        if not common_state:
+            # If the current request state is complete and one of its samples moved from
+            # the final sample state, then move the request state to In-progress
+            if request.is_complete:
+                message = "At least 1 sample state moved from the final sample state, so now the request's state is (%s)" % request.states.SUBMITTED
+                event = trans.model.RequestEvent( request, request.states.SUBMITTED, message )
+                trans.sa_session.add( event )
+                trans.sa_session.flush()
+            if cntrller == 'api':
+                return 200, message
+        else:
+            final_state = False
+            request_type_state = request.type.final_sample_state
+            if common_state.id == request_type_state.id:
+                # since all the samples are in the final state, change the request state to 'Complete'
+                comment = "All samples of this sequencing request are in the final sample state (%s). " % request_type_state.name
+                state = request.states.COMPLETE
+                final_state = True
+            else:
+                comment = "All samples of this sequencing request are in the (%s) sample state. " % common_state.name
+                state = request.states.SUBMITTED
+            event = trans.model.RequestEvent( request, state, comment )
+            trans.sa_session.add( event )
+            trans.sa_session.flush()
+            # See if an email notification is configured to be sent when the samples are in this state.
+            retval = request.send_email_notification( trans, common_state, final_state )
+            if retval:
+                message = comment + retval
+            else:
+                message = comment
+            if cntrller == 'api':
+                return 200, message
+        return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                          action='edit_samples',
+                                                          cntrller=cntrller,
+                                                          id=request_id,
+                                                          status=status,
+                                                          message=message ) )
+
+    @web.expose
+    @web.require_login( "find samples" )
+    def find_samples( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        samples_list = []
+        results = ''
+        if params.get( 'find_samples_button', False ):
+            search_string = kwd.get( 'search_box', ''  )
+            search_type = params.get( 'search_type', ''  )
+            request_states = util.listify( params.get( 'request_states', '' ) )
+            samples = []
+            if search_type == 'bar_code':
+                samples = trans.sa_session.query( trans.model.Sample ) \
+                                          .filter( and_( trans.model.Sample.table.c.deleted == false(),
+                                                         func.lower( trans.model.Sample.table.c.bar_code ).like( "%" + search_string.lower() + "%" ) ) ) \
+                                          .order_by( trans.model.Sample.table.c.create_time.desc() )
+            elif search_type == 'sample name':
+                samples = trans.sa_session.query( trans.model.Sample ) \
+                                          .filter( and_( trans.model.Sample.table.c.deleted == false(),
+                                                         func.lower( trans.model.Sample.table.c.name ).like( "%" + search_string.lower() + "%" ) ) ) \
+                                          .order_by( trans.model.Sample.table.c.create_time.desc() )
+            elif search_type == 'dataset':
+                samples = trans.sa_session.query( trans.model.Sample ) \
+                                          .filter( and_( trans.model.Sample.table.c.deleted == false(),
+                                                         trans.model.SampleDataset.table.c.sample_id == trans.model.Sample.table.c.id,
+                                                         func.lower( trans.model.SampleDataset.table.c.name ).like( "%" + search_string.lower() + "%" ) ) ) \
+                                          .order_by( trans.model.Sample.table.c.create_time.desc() )
+            elif search_type == 'form value':
+                samples = []
+                if search_string.find('=') != -1:
+                    field_label, field_value = search_string.split('=')
+                    all_samples = trans.sa_session.query( trans.model.Sample ) \
+                        .filter( trans.model.Sample.table.c.deleted == false() ) \
+                        .order_by( trans.model.Sample.table.c.create_time.desc() )
+                    for sample in all_samples:
+                        # find the field in the sample form with the given label
+                        for field in sample.request.type.sample_form.fields:
+                            if field_label == field['label']:
+                                # check if the value is equal to the value in the search string
+                                if sample.values.content[ field['name'] ] == field_value:
+                                    samples.append( sample )
+            if is_admin:
+                for s in samples:
+                    if not s.request.deleted and s.request.state in request_states:
+                        samples_list.append( s )
+            else:
+                for s in samples:
+                    if s.request.user.id == trans.user.id and s.request.state in request_states and not s.request.deleted:
+                        samples_list.append( s )
+            results = 'There are %i samples matching the search parameters.' % len( samples_list )
+        # Build the request_states SelectField
+        selected_value = kwd.get( 'request_states', trans.model.Request.states.SUBMITTED )
+        states = [ v for k, v in trans.model.Request.states.items() ]
+        request_states = build_select_field( trans,
+                                             states,
+                                             'self',
+                                             'request_states',
+                                             selected_value=selected_value,
+                                             refresh_on_change=False,
+                                             multiple=True,
+                                             display='checkboxes' )
+        # Build the search_type SelectField
+        selected_value = kwd.get( 'search_type', 'sample name' )
+        types = [ 'sample name', 'bar_code', 'dataset', 'form value' ]
+        search_type = build_select_field( trans, types, 'self', 'search_type', selected_value=selected_value, refresh_on_change=False )
+        # Build the search_box TextField
+        search_box = TextField( 'search_box', 50, kwd.get('search_box', '' ) )
+        return trans.fill_template( '/requests/common/find_samples.mako',
+                                    cntrller=cntrller,
+                                    request_states=request_states,
+                                    samples=samples_list,
+                                    search_type=search_type,
+                                    results=results,
+                                    search_box=search_box )
+
+    @web.expose
+    @web.require_login( "sample events" )
+    def view_sample_history( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        sample_id = params.get( 'sample_id', None )
+        try:
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, sample_id, 'sample' )
+        return trans.fill_template( '/requests/common/view_sample_history.mako',
+                                    cntrller=cntrller,
+                                    sample=sample )
+
+    @web.expose
+    @web.require_login( "add samples" )
+    def add_samples( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        request_id = params.get( 'id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        libraries = trans.app.security_agent.get_accessible_libraries( trans, request.user )
+        # Build a list of sample widgets (based on the attributes of each sample) for display.
+        displayable_sample_widgets = self.__get_sample_widgets( trans, request, request.samples, **kwd )
+        if params.get( 'import_samples_button', False ):
+            # Import sample field values from a csv file
+            # TODO: should this be a mapper?
+            workflows = [ w.latest_workflow for w in trans.user.stored_workflows if not w.deleted ]
+            return self.__import_samples( trans, cntrller, request, displayable_sample_widgets, libraries, workflows, **kwd )
+        elif params.get( 'add_sample_button', False ):
+            return self.add_sample( trans, cntrller, request_id, **kwd )
+        elif params.get( 'save_samples_button', False ):
+            return self.__save_samples( trans, cntrller, request, displayable_sample_widgets, saving_new_samples=True, **kwd )
+        request_widgets = self.__get_request_widgets( trans, request.id )
+        sample_copy_select_field = self.__build_copy_sample_select_field( trans, displayable_sample_widgets )
+        libraries_select_field, folders_select_field = self.__build_library_and_folder_select_fields( trans,
+                                                                                                      request.user,
+                                                                                                      'sample_operation',
+                                                                                                      libraries,
+                                                                                                      None,
+                                                                                                      **kwd )
+        return trans.fill_template( '/requests/common/add_samples.mako',
+                                    cntrller=cntrller,
+                                    request=request,
+                                    request_widgets=request_widgets,
+                                    displayable_sample_widgets=displayable_sample_widgets,
+                                    sample_copy_select_field=sample_copy_select_field,
+                                    libraries=libraries,
+                                    libraries_select_field=libraries_select_field,
+                                    folders_select_field=folders_select_field,
+                                    status=status,
+                                    message=message )
+
+    @web.expose
+    @web.require_login( "add sample" )
+    def add_sample( self, trans, cntrller, request_id, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        # Get the widgets for rendering the request form
+        request_widgets = self.__get_request_widgets( trans, request.id )
+        displayable_sample_widgets = self.__get_sample_widgets( trans, request, request.samples, **kwd )
+        if params.get( 'add_sample_button', False ):
+            libraries = trans.app.security_agent.get_accessible_libraries( trans, request.user )
+            num_samples_to_add = int( params.get( 'num_sample_to_copy', 1 ) )
+            # See if the user has selected a sample to copy.
+            copy_sample_index = int( params.get( 'copy_sample_index', -1 ) )
+            for index in range( num_samples_to_add ):
+                field_values = {}
+                if copy_sample_index != -1:
+                    # The user has selected a sample to copy.
+                    library_id = displayable_sample_widgets[ copy_sample_index ][ 'library_select_field' ].get_selected( return_value=True )
+                    folder_id = displayable_sample_widgets[ copy_sample_index ][ 'folder_select_field' ].get_selected( return_value=True )
+                    name = displayable_sample_widgets[ copy_sample_index ][ 'name' ] + '_%i' % ( len( displayable_sample_widgets ) + 1 )
+                    history_id = displayable_sample_widgets[ copy_sample_index ][ 'history_select_field' ].get_selected( return_value=True )
+                    workflow_id = displayable_sample_widgets[ copy_sample_index ][ 'workflow_select_field' ][0].get_selected( return_value=True )
+                    # DBTODO Do something nicer with the workflow fieldset.  Remove [0] indexing and copy mappings as well.
+                    for field_name in displayable_sample_widgets[ copy_sample_index ][ 'field_values' ]:
+                        field_values[ field_name ] = ''
+                else:
+                    # The user has not selected a sample to copy, just adding a new generic sample.
+                    library_id = None
+                    folder_id = None
+                    history_id = None
+                    workflow_id = None
+                    name = 'Sample_%i' % ( len( displayable_sample_widgets ) + 1 )
+                    for field in request.type.sample_form.fields:
+                        field_values[ field[ 'name' ] ] = ''
+                # Build the library_select_field and folder_select_field for the new sample being added.
+                library_select_field, folder_select_field = self.__build_library_and_folder_select_fields( trans,
+                                                                                                           user=request.user,
+                                                                                                           sample_index=len( displayable_sample_widgets ),
+                                                                                                           libraries=libraries,
+                                                                                                           sample=None,
+                                                                                                           library_id=library_id,
+                                                                                                           folder_id=folder_id,
+                                                                                                           **kwd )
+                history_select_field = self.__build_history_select_field( trans=trans,
+                                                                          user=request.user,
+                                                                          sample_index=len( displayable_sample_widgets ),
+                                                                          history_id=history_id,
+                                                                          **kwd )
+                workflow_select_field = self.__build_workflow_select_field( trans=trans,
+                                                                            user=request.user,
+                                                                            request=request,
+                                                                            sample_index=len( displayable_sample_widgets ),
+                                                                            workflow_id=workflow_id,
+                                                                            history_id=history_id,
+                                                                            **kwd )
+                # Append the new sample to the current list of samples for the request
+                displayable_sample_widgets.append( dict( id=None,
+                                                         name=name,
+                                                         bar_code='',
+                                                         library=None,
+                                                         library_id=library_id,
+                                                         history=None,
+                                                         workflow=None,
+                                                         history_select_field=history_select_field,
+                                                         workflow_select_field=workflow_select_field,
+                                                         folder=None,
+                                                         folder_id=folder_id,
+                                                         field_values=field_values,
+                                                         library_select_field=library_select_field,
+                                                         folder_select_field=folder_select_field ) )
+        sample_copy_select_field = self.__build_copy_sample_select_field( trans, displayable_sample_widgets )
+        return trans.fill_template( '/requests/common/add_samples.mako',
+                                    cntrller=cntrller,
+                                    request=request,
+                                    request_widgets=request_widgets,
+                                    displayable_sample_widgets=displayable_sample_widgets,
+                                    sample_copy_select_field=sample_copy_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "view request" )
+    def view_sample( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        sample_id = params.get( 'id', None )
+        try:
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, sample_id, 'sample' )
+        # See if we have any associated templates
+        widgets = sample.get_template_widgets( trans )
+        widget_fields_have_contents = self.widget_fields_have_contents( widgets )
+        if is_admin:
+            external_services = sample.populate_external_services( trans=trans )
+        else:
+            external_services = None
+        return trans.fill_template( '/requests/common/view_sample.mako',
+                                    cntrller=cntrller,
+                                    sample=sample,
+                                    widgets=widgets,
+                                    widget_fields_have_contents=widget_fields_have_contents,
+                                    status=status,
+                                    message=message,
+                                    external_services=external_services )
+
+    @web.expose
+    @web.require_login( "delete sample from sequencing request" )
+    def delete_sample( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        status = params.get( 'status', 'done' )
+        message = util.restore_text( params.get( 'message', '' ) )
+        request_id = params.get( 'request_id', None )
+        try:
+            request = trans.sa_session.query( trans.model.Request ).get( trans.security.decode_id( request_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, request_id )
+        displayable_sample_widgets = self.__get_sample_widgets( trans, request, request.samples, **kwd )
+        sample_index = int( params.get( 'sample_id', 0 ) )
+        sample_name = displayable_sample_widgets[sample_index]['name']
+        sample = request.get_sample( sample_name )
+        if sample:
+            trans.sa_session.delete( sample.values )
+            trans.sa_session.delete( sample )
+            trans.sa_session.flush()
+        message = 'Sample (%s) has been deleted.' % sample_name
+        return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                          action='edit_samples',
+                                                          cntrller=cntrller,
+                                                          id=trans.security.encode_id( request.id ),
+                                                          status=status,
+                                                          message=message ) )
+
+    @web.expose
+    @web.require_login( "view data transfer page" )
+    def view_sample_datasets( self, trans, cntrller, **kwd ):
+        # The link on the number of selected datasets will only appear if there is at least 1 selected dataset.
+        # If there are 0 selected datasets, there is no link, so this method will only be reached from the requests
+        # controller if there are selected datasets.
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        sample_id = params.get( 'sample_id', None )
+        try:
+            sample = trans.sa_session.query( trans.model.Sample ).get( trans.security.decode_id( sample_id ) )
+        except:
+            return invalid_id_redirect( trans, cntrller, sample_id, 'sample' )
+        external_service_id = params.get( 'external_service_id', None )
+        external_service = trans.sa_session.query( trans.model.ExternalService ).get( trans.security.decode_id( external_service_id ) )
+        # See if a library and folder have been set for this sample.
+        if is_admin and not sample.library or not sample.folder:
+            status = 'error'
+            message = "Select a target data library and folder for the sample before selecting the datasets."
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='edit_samples',
+                                                              cntrller=cntrller,
+                                                              id=trans.security.encode_id( sample.request.id ),
+                                                              status=status,
+                                                              message=message ) )
+        transfer_status = params.get( 'transfer_status', None )
+        if transfer_status in [ None, 'None' ]:
+            title = 'All selected datasets for "%s"' % sample.name
+            sample_datasets = sample.datasets
+        elif transfer_status == trans.model.SampleDataset.transfer_status.IN_QUEUE:
+            title = 'Datasets of "%s" that are in the transfer queue' % sample.name
+            sample_datasets = sample.queued_dataset_files
+        elif transfer_status == trans.model.SampleDataset.transfer_status.TRANSFERRING:
+            title = 'Datasets of "%s" that are being transferred' % sample.name
+            sample_datasets = sample.transferring_dataset_files
+        elif transfer_status == trans.model.SampleDataset.transfer_status.ADD_TO_LIBRARY:
+            title = 'Datasets of "%s" that are being added to the target data library' % sample.name
+            sample_datasets = sample.adding_to_library_dataset_files
+        elif transfer_status == trans.model.SampleDataset.transfer_status.COMPLETE:
+            title = 'Datasets of "%s" that are available in the target data library' % sample.name
+            sample_datasets = sample.transferred_dataset_files
+        elif transfer_status == trans.model.SampleDataset.transfer_status.ERROR:
+            title = 'Datasets of "%s" that resulted in a transfer error' % sample.name
+            sample_datasets = sample.transfer_error_dataset_files
+        return trans.fill_template( '/requests/common/view_sample_datasets.mako',
+                                    cntrller=cntrller,
+                                    title=title,
+                                    external_service=external_service,
+                                    sample=sample,
+                                    sample_datasets=sample_datasets,
+                                    transfer_status=transfer_status,
+                                    message=message,
+                                    status=status )
+
+    def __import_samples( self, trans, cntrller, request, displayable_sample_widgets, libraries, workflows, **kwd ):
+        """
+        Reads the samples csv file and imports all the samples.  The csv file must be in the following format.  The [:FieldValue]
+        is optional, the form field will contain the value after the ':' if included.
+                        SampleName,DataLibraryName,DataLibraryFolderName,HistoryName,WorkflowName,Field1Name:Field1Value,Field2Name:Field2Value...
+        """
+        params = util.Params( kwd )
+        file_obj = params.get( 'file_data', '' )
+        try:
+            reader = csv.reader( file_obj.file )
+            for row in reader:
+                library_id = None
+                library = None
+                folder_id = None
+                folder = None
+                history_id = None
+                history = None
+                workflow_id = None
+                workflow = None
+                # Get the library
+                library = trans.sa_session.query( trans.model.Library ) \
+                                          .filter( and_( trans.model.Library.table.c.name == row[1],
+                                                   trans.model.Library.table.c.deleted == false() ) ) \
+                                          .first()
+                if library:
+                    # Get the folder
+                    for folder in trans.sa_session.query( trans.model.LibraryFolder ) \
+                                                  .filter( and_( trans.model.LibraryFolder.table.c.name == row[2],
+                                                                 trans.model.LibraryFolder.table.c.deleted == false() ) ):
+                        if folder.parent_library == library:
+                            break
+                    if folder:
+                        library_id = trans.security.encode_id( library.id )
+                        folder_id = trans.security.encode_id( folder.id )
+                library_select_field, folder_select_field = self.__build_library_and_folder_select_fields( trans,
+                                                                                                           request.user,
+                                                                                                           len( displayable_sample_widgets ),
+                                                                                                           libraries,
+                                                                                                           None,
+                                                                                                           library_id,
+                                                                                                           folder_id,
+                                                                                                           **kwd )
+                # Get the history
+                history = trans.sa_session.query( trans.model.History ) \
+                                          .filter( and_( trans.model.History.table.c.name == row[3],
+                                                         trans.model.History.table.c.deleted == false(),
+                                                         trans.model.History.user_id == trans.user.id ) ) \
+                                          .first()
+                if history:
+                    history_id = trans.security.encode_id( history.id )
+                else:
+                    history_id = 'none'
+                history_select_field = self.__build_history_select_field( trans=trans,
+                                                                          user=request.user,
+                                                                          sample_index=len( displayable_sample_widgets ),
+                                                                          history_id=history_id )
+                # Get the workflow
+                workflow = trans.sa_session.query( trans.model.StoredWorkflow ) \
+                                           .filter( and_( trans.model.StoredWorkflow.table.c.name == row[4],
+                                                          trans.model.StoredWorkflow.table.c.deleted == false(),
+                                                          trans.model.StoredWorkflow.user_id == trans.user.id ) ) \
+                                           .first()
+                if workflow:
+                    workflow_id = trans.security.encode_id( workflow.id )
+                else:
+                    workflow_id = 'none'
+                workflow_select_field = self.__build_workflow_select_field( trans=trans,
+                                                                            user=request.user,
+                                                                            request=request,
+                                                                            sample_index=len( displayable_sample_widgets ),
+                                                                            workflow_id=workflow_id,
+                                                                            history_id=history_id )
+                field_values = {}
+                field_names = row[5:]
+                for field_name in field_names:
+                    if field_name.find( ':' ) >= 0:
+                        field_list = field_name.split( ':' )
+                        field_name = field_list[0]
+                        field_value = field_list[1]
+                    else:
+                        field_value = ''
+                    field_values[ field_name ] = field_value
+                displayable_sample_widgets.append( dict( id=None,
+                                                         name=row[0],
+                                                         bar_code='',
+                                                         library=library,
+                                                         library_id=library_id,
+                                                         library_select_field=library_select_field,
+                                                         folder=folder,
+                                                         folder_id=folder_id,
+                                                         folder_select_field=folder_select_field,
+                                                         history=history,
+                                                         history_id=history_id,
+                                                         history_select_field=history_select_field,
+                                                         workflow=workflow,
+                                                         workflow_id=workflow_id,
+                                                         workflow_select_field=workflow_select_field,
+                                                         field_values=field_values ) )
+        except Exception as e:
+            if str( e ) == "'unicode' object has no attribute 'file'":
+                message = "Select a file"
+            else:
+                message = 'Error attempting to create samples from selected file: %s.' % str( e )
+                message += '  Make sure the selected csv file uses the format: SampleName,DataLibrary,DataLibraryFolder,FieldValue1,FieldValue2...'
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='add_sample',
+                                                              cntrller=cntrller,
+                                                              request_id=trans.security.encode_id( request.id ),
+                                                              add_sample_button='Add sample',
+                                                              status='error',
+                                                              message=message ) )
+        request_widgets = self.__get_request_widgets( trans, request.id )
+        sample_copy_select_field = self.__build_copy_sample_select_field( trans, displayable_sample_widgets )
+        return trans.fill_template( '/requests/common/add_samples.mako',
+                                    cntrller=cntrller,
+                                    request=request,
+                                    request_widgets=request_widgets,
+                                    displayable_sample_widgets=displayable_sample_widgets,
+                                    sample_copy_select_field=sample_copy_select_field )
+
+    def __save_samples( self, trans, cntrller, request, sample_widgets, saving_new_samples=False, **kwd ):
+        # Here we handle saving all new samples added by the user as well as saving
+        # changes to any subset of the request's samples.  A sample will not have an
+        # associated SampleState until the request is submitted, at which time the
+        # sample is automatically associated with the first SampleState configured by
+        # the admin for the request's RequestType.
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        sample_operation = params.get( 'sample_operation', 'none' )
+        if saving_new_samples:
+            redirect_action = 'add_samples'
+        else:
+            redirect_action = 'edit_samples'
+        # Check for duplicate sample names within the request
+        self.__validate_sample_names( trans, cntrller, request, sample_widgets, **kwd )
+        log.debug( "SAVING SAMPLES!" )
+        log.debug( "saving_new_samples is %s" % saving_new_samples )
+        if not saving_new_samples:
+            library = None
+            folder = None
+
+            def handle_error( **kwd ):
+                kwd[ 'status' ] = 'error'
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  action=redirect_action,
+                                                                  cntrller=cntrller,
+                                                                  **kwd ) )
+            # Here we handle saving changes to single samples as well as saving changes to
+            # selected sets of samples.  If samples are selected, the sample_operation param
+            # will have a value other than 'none', and the samples param will be a list of
+            # encoded sample ids.  There are currently only 2 multi-select operations;
+            # model.Sample.bulk_operations.CHANGE_STATE and model.sample.bulk_operations.SELECT_LIBRARY.
+            # If sample_operation is 'none, then the samples param will be a list of sample objects.
+            if sample_operation == trans.model.Sample.bulk_operations.CHANGE_STATE:
+                sample_state_id = params.get( 'sample_state_id', None )
+                if sample_state_id in [ None, 'none' ]:
+                    message = "Select a new state from the <b>Change current state</b> list before clicking the <b>Save</b> button."
+                    kwd[ 'message' ] = message
+                    del kwd[ 'save_samples_button' ]
+                    handle_error( **kwd )
+                sample_event_comment = util.restore_text( params.get( 'sample_event_comment', '' ) )
+                new_state = trans.sa_session.query( trans.model.SampleState ).get( trans.security.decode_id( sample_state_id ) )
+                # Send the encoded sample_ids to update_sample_state.
+                # TODO: make changes necessary to just send the samples...
+                encoded_selected_sample_ids = self.__get_encoded_selected_sample_ids( trans, request, **kwd )
+                # Make sure all samples have a unique bar_code if the state is changing
+                for sample_index in range( len( sample_widgets ) ):
+                    current_sample = sample_widgets[ sample_index ]
+                    if current_sample is None:
+                        # We have a None value because the user did not select this sample
+                        # on which to perform the action.
+                        continue
+                    request_sample = request.samples[ sample_index ]
+                    bar_code = current_sample[ 'bar_code' ]
+                    if bar_code:
+                        # If the sample has a new bar_code, make sure it is unique.
+                        bc_message = self.__validate_bar_code( trans, request_sample, bar_code )
+                        if bc_message:
+                            message += bc_message
+                            kwd[ 'message' ] = message
+                            del kwd[ 'save_samples_button' ]
+                            handle_error( **kwd )
+                self.update_sample_state( trans, cntrller, encoded_selected_sample_ids, new_state, comment=sample_event_comment )
+                return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                                  cntrller=cntrller,
+                                                                  action='update_request_state',
+                                                                  request_id=trans.security.encode_id( request.id ) ) )
+            elif sample_operation == trans.model.Sample.bulk_operations.SELECT_LIBRARY:
+                library_id = params.get( 'sample_operation_library_id', 'none' )
+                folder_id = params.get( 'sample_operation_folder_id', 'none' )
+                library, folder = self.__get_library_and_folder( trans, library_id, folder_id )
+                for sample_index in range( len( sample_widgets ) ):
+                    current_sample = sample_widgets[ sample_index ]
+                    if current_sample is None:
+                        # We have a None value because the user did not select this sample
+                        # on which to perform the action.
+                        continue
+                    current_sample[ 'library' ] = library
+                    current_sample[ 'folder' ] = folder
+            self.__update_samples( trans, cntrller, request, sample_widgets, **kwd )
+            message = 'Changes made to the samples have been saved. '
+        else:
+            # Saving a newly created sample.  The sample will not have an associated SampleState
+            # until the request is submitted, at which time all samples of the request will be
+            # set to the first SampleState configured for the request's RequestType configured
+            # by the admin ( i.e., the sample's SampleState would be set to request.type.states[0] ).
+            new_samples = []
+            for index in range( len( sample_widgets ) - len( request.samples ) ):
+                sample_index = len( request.samples )
+                sample_widget = sample_widgets[ sample_index ]
+                form_values = trans.model.FormValues( request.type.sample_form, sample_widget[ 'field_values' ] )
+                trans.sa_session.add( form_values )
+                trans.sa_session.flush()
+                if request.is_submitted:
+                    bar_code = sample_widget[ 'bar_code' ]
+                else:
+                    bar_code = ''
+                sample = trans.model.Sample( name=sample_widget[ 'name' ],
+                                             desc='',
+                                             request=request,
+                                             form_values=form_values,
+                                             bar_code=bar_code,
+                                             library=sample_widget[ 'library' ],
+                                             folder=sample_widget[ 'folder' ],
+                                             history=sample_widget['history'],
+                                             workflow=sample_widget['workflow_dict'] )
+                trans.sa_session.add( sample )
+                trans.sa_session.flush()
+                new_samples.append( sample )
+            # If this sample is added when the request is already submitted then these new samples
+            # should be in the first sample state when saved
+            if request.is_submitted:
+                initial_sample_state_after_request_submitted = request.type.states[0]
+                for sample in new_samples:
+                    event_comment = 'Sample added and sample state set to %s.' % request.type.states[0].name
+                    event = trans.model.SampleEvent( sample,
+                                                     initial_sample_state_after_request_submitted,
+                                                     event_comment )
+                    trans.sa_session.add( event )
+                trans.sa_session.flush()
+        return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                          action=redirect_action,
+                                                          cntrller=cntrller,
+                                                          id=trans.security.encode_id( request.id ),
+                                                          status=status,
+                                                          message=message ) )
+
+    def __update_samples( self, trans, cntrller, request, sample_widgets, **kwd ):
+        # The list of sample_widgets must have the same number of objects as request.samples,
+        # but some of the objects can be None.  Those that are not None correspond to samples
+        # selected by the user for performing an action on multiple samples simultaneously.
+        # The items in the sample_widgets list have already been populated with any changed
+        # param values (changed implies the value in kwd is different from the attribute value
+        # in the database) in kwd before this method is reached.
+        def handle_error( **kwd ):
+            kwd[ 'status' ] = 'error'
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='edit_samples',
+                                                              cntrller=cntrller,
+                                                              **kwd ) )
+        for index, sample_widget in enumerate( sample_widgets ):
+            if sample_widget is not None:
+                # sample_widget will be None if the user checked sample check boxes and selected an action
+                # to perform on multiple samples, but did not select certain samples.
+                sample = request.samples[ index ]
+                # Get the sample's form values to see if they have changed.
+                form_values = trans.sa_session.query( trans.model.FormValues ).get( sample.values.id )
+                if sample.name != sample_widget[ 'name' ] or \
+                        sample.bar_code != sample_widget[ 'bar_code' ] or \
+                        sample.library != sample_widget[ 'library' ] or \
+                        sample.folder != sample_widget[ 'folder' ] or \
+                        sample.history != sample_widget[ 'history' ] or \
+                        sample.workflow != sample_widget[ 'workflow_dict' ] or \
+                        form_values.content != sample_widget[ 'field_values' ]:
+                    # Information about this sample has been changed.
+                    sample.name = sample_widget[ 'name' ]
+                    bar_code = sample_widget[ 'bar_code' ]
+                    # If the sample has a new bar_code, make sure it is unique.
+                    if bar_code:
+                        bc_message = self.__validate_bar_code( trans, sample, bar_code )
+                        if bc_message:
+                            kwd[ 'message' ] = bc_message
+                            del kwd[ 'save_samples_button' ]
+                            handle_error( **kwd )
+                        if not sample.bar_code:
+                            # If the sample's associated SampleState is still the initial state
+                            # configured by the admin for the request's RequestType, this must be
+                            # the first time a bar code was added to the sample, so change its state
+                            # to the next associated SampleState.
+                            if sample.state.id == request.type.states[0].id:
+                                # Change the sample state only if its request_type
+                                # has at least 2 states
+                                if len( request.type.states ) >= 2:
+                                    next_sample_state = request.type.states[1]
+                                else:
+                                    next_sample_state = request.type.states[0]
+                                event = trans.model.SampleEvent( sample,
+                                                                 next_sample_state,
+                                                                 'Bar code associated with the sample' )
+                                trans.sa_session.add( event )
+                                trans.sa_session.flush()
+                                # Next step is to update the request event history if bar codes
+                                # have been assigned to all the samples of this request
+                                common_state = request.samples_have_common_state
+                                if request.is_submitted and common_state and len( request.type.states ) >= 2:
+                                    comment = "All samples of this request are in the (%s) sample state. " % common_state.name
+                                    event = trans.model.RequestEvent( request, request.states.SUBMITTED, comment )
+                                    trans.sa_session.add( event )
+                                    trans.sa_session.flush()
+                                    request.send_email_notification( trans, next_sample_state )
+
+                    sample.bar_code = bar_code
+                    sample.library = sample_widget[ 'library' ]
+                    sample.folder = sample_widget[ 'folder' ]
+                    sample.history = sample_widget[ 'history' ]
+                    sample.workflow = sample_widget[ 'workflow_dict' ]
+                    form_values.content = sample_widget[ 'field_values' ]
+                    trans.sa_session.add_all( ( sample, form_values ) )
+                    trans.sa_session.flush()
+
+    def __get_library_and_folder( self, trans, library_id, folder_id ):
+        try:
+            library = trans.sa_session.query( trans.model.Library ).get( trans.security.decode_id( library_id ) )
+        except:
+            library = None
+        if library and folder_id == 'none':
+            folder = library.root_folder
+        elif library and folder_id != 'none':
+            try:
+                folder = trans.sa_session.query( trans.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+            except:
+                if library:
+                    folder = library.root_folder
+                else:
+                    folder = None
+        else:
+            folder = None
+        return library, folder
+
+    def __get_history( self, trans, history_id):
+        try:
+            history = trans.sa_session.query( trans.model.History).get(trans.security.decode_id( history_id))
+            return history
+        except:
+            return None
+
+    def __get_workflow( self, trans, workflow_id):
+        try:
+            workflow = trans.sa_session.query( trans.model.Workflow).get(trans.security.decode_id( workflow_id))
+            return workflow
+        except:
+            return None
+
+    def __get_active_folders( self, folder, active_folders_list ):
+        """Return all of the active folders for the received library"""
+        active_folders_list.extend( folder.active_folders )
+        for sub_folder in folder.active_folders:
+            self.__get_active_folders( sub_folder, active_folders_list )
+        return active_folders_list
+
+    # ===== Methods for handling form definition widgets =====
+    def __get_request_widgets( self, trans, id ):
+        """Get the widgets for the request"""
+        request = trans.sa_session.query( trans.model.Request ).get( id )
+        # The request_widgets list is a list of dictionaries
+        request_widgets = []
+        for index, field in enumerate( request.type.request_form.fields ):
+            field_value = request.values.content[ field['name'] ]
+            if field[ 'required' ]:
+                required_label = 'Required'
+            else:
+                required_label = 'Optional'
+            if field[ 'type' ] == 'AddressField':
+                if field_value:
+                    request_widgets.append( dict( label=field[ 'label' ],
+                                                  value=trans.sa_session.query( trans.model.UserAddress ).get( int( field_value ) ).get_html(),
+                                                  helptext=field[ 'helptext' ] + ' (' + required_label + ')' ) )
+                else:
+                    request_widgets.append( dict( label=field[ 'label' ],
+                                                  value=None,
+                                                  helptext=field[ 'helptext' ] + ' (' + required_label + ')' ) )
+            else:
+                request_widgets.append( dict( label=field[ 'label' ],
+                                              value=field_value,
+                                              helptext=field[ 'helptext' ] + ' (' + required_label + ')' ) )
+        return request_widgets
+
+    def __get_sample_widgets( self, trans, request, samples, **kwd ):
+        """
+        Returns a list of dictionaries, each representing the widgets that define a sample on a form.
+        The widgets are populated from kwd based on the set of samples received.  The set of samples
+        corresponds to a request.samples list, but if the user checked specific check boxes on the form,
+        those samples that were not checked will have None objects in the list of samples.  In this case,
+        the corresponding sample_widget is populated from the db rather than kwd.
+        """
+        params = util.Params( kwd )
+        sample_operation = params.get( 'sample_operation', 'none' )
+        sample_widgets = []
+        if sample_operation != 'none':
+            # The sample_operatin param has a value other than 'none', and a specified
+            # set of samples was received.
+            library_id = util.restore_text( params.get( 'sample_operation_library_id', 'none' ) )
+            folder_id = util.restore_text( params.get( 'sample_operation_folder_id', 'none' ) )
+        # Build the list of widgets which will be used to render each sample row on the request page
+        if not request:
+            return sample_widgets
+        libraries = trans.app.security_agent.get_accessible_libraries( trans, request.user )
+        # Build the list if sample widgets, populating the values from kwd.
+        for index, sample in enumerate( samples ):
+            if sample is None:
+                # Use the sample from the request object since it will not have updated values from kwd.
+                sample = request.samples[ index ]
+                sample_id = sample.id
+                name = sample.name
+                bar_code = sample.bar_code
+                library = sample.library
+                folder = sample.folder
+                history = sample.history
+                workflow = sample.workflow
+                field_values = sample.values.content
+                if not history:
+                    history_id = 'none'
+                else:
+                    history_id = history.id
+                if not workflow:
+                    workflow_id = 'none'
+                else:
+                    workflow_id = workflow.id
+                workflow_dict = sample.workflow
+            else:
+                # Update the sample attributes from kwd
+                sample_id = None
+                name = util.restore_text( params.get( 'sample_%i_name' % index, sample.name ) )
+                bar_code = util.restore_text( params.get( 'sample_%i_bar_code' % index, sample.bar_code ) )
+                library_id = util.restore_text( params.get( 'sample_%i_library_id' % index, '' ) )
+                if not library_id and sample.library:
+                    library_id = trans.security.encode_id( sample.library.id )
+                folder_id = util.restore_text( params.get( 'sample_%i_folder_id' % index, '' ) )
+                if not folder_id and sample.folder:
+                    folder_id = trans.security.encode_id( sample.folder.id )
+                library, folder = self.__get_library_and_folder( trans, library_id, folder_id )
+                history_id = util.restore_text( params.get( 'sample_%i_history_id' % index, '' ))
+                if not history_id and sample.history:
+                    history_id = trans.security.encode_id( sample.history.id )
+                history = self.__get_history(trans, history_id)
+                wf_tag = 'sample_%i_workflow_id' % index
+                workflow_id = util.restore_text( params.get( wf_tag, '' ) )
+                if not workflow_id and sample.workflow:
+                    workflow_id = trans.security.encode_id( sample.workflow['id'] )
+                    workflow_dict = sample.workflow
+                    workflow = self.__get_workflow(trans, workflow_id)
+                else:
+                    workflow_dict = None
+                    workflow = self.__get_workflow(trans, workflow_id)
+                    if workflow:
+                        workflow_dict = {'id': workflow.id,
+                                         'name': workflow.name,
+                                         'mappings': {}}
+                        for k, v in kwd.iteritems():
+                            kwd_tag = "%s_" % wf_tag
+                            if k.startswith(kwd_tag):
+                                # DBTODO Don't need to store the whole mapping word in the dict, only the step.
+                                workflow_dict['mappings'][int(k[len(kwd_tag):])] = {'ds_tag': v}
+                field_values = {}
+                for field_index, field in enumerate( request.type.sample_form.fields ):
+                    field_name = field['name']
+                    input_value = params.get( 'sample_%i_field_%i' % ( index, field_index ), sample.values.content[ field_name ] )
+                    if field['type'] == CheckboxField.__name__:
+                        field_value = CheckboxField.is_checked( input_value )
+                    else:
+                        field_value = util.restore_text( input_value )
+                    field_values[ field_name ] = field_value
+            library_select_field, folder_select_field = self.__build_library_and_folder_select_fields( trans=trans,
+                                                                                                       user=request.user,
+                                                                                                       sample_index=index,
+                                                                                                       libraries=libraries,
+                                                                                                       sample=sample,
+                                                                                                       library_id=library_id,
+                                                                                                       folder_id=folder_id,
+                                                                                                       **kwd )
+            history_select_field = self.__build_history_select_field( trans=trans,
+                                                                      user=request.user,
+                                                                      sample_index=index,
+                                                                      sample=sample,
+                                                                      history_id=history_id,
+                                                                      **kwd)
+            workflow_select_field = self.__build_workflow_select_field( trans=trans,
+                                                                        user=request.user,
+                                                                        request=request,
+                                                                        sample_index=index,
+                                                                        sample=sample,
+                                                                        workflow_dict=workflow_dict,
+                                                                        history_id=history_id,
+                                                                        **kwd)
+            sample_widgets.append( dict( id=sample_id,
+                                         name=name,
+                                         bar_code=bar_code,
+                                         library=library,
+                                         folder=folder,
+                                         history=history,
+                                         workflow=workflow,
+                                         workflow_dict=workflow_dict,
+                                         field_values=field_values,
+                                         library_select_field=library_select_field,
+                                         folder_select_field=folder_select_field,
+                                         history_select_field=history_select_field,
+                                         workflow_select_field=workflow_select_field ) )
+        # There may be additional new samples on the form that have not yet been associated with the request.
+        # TODO: factor this code so it is not duplicating what's above.
+        index = len( samples )
+        while True:
+            name = util.restore_text( params.get( 'sample_%i_name' % index, '' ) )
+            if not name:
+                break
+            bar_code = util.restore_text( params.get( 'sample_%i_bar_code' % index, '' ) )
+            library_id = util.restore_text( params.get( 'sample_%i_library_id' % index, '' ) )
+            folder_id = util.restore_text( params.get( 'sample_%i_folder_id' % index, '' ) )
+            library, folder = self.__get_library_and_folder( trans, library_id, folder_id )
+            history_id = util.restore_text( params.get( 'sample_%i_history_id' % index, '' ))
+            if not history_id and sample.history:
+                history_id = trans.security.encode_id( sample.history.id )
+            history = self.__get_history(trans, history_id)
+            wf_tag = 'sample_%i_workflow_id' % index
+            workflow_id = util.restore_text( params.get( wf_tag, '' ) )
+            if not workflow_id and sample.workflow:
+                workflow_id = trans.security.encode_id( sample.workflow['id'] )
+                workflow_dict = sample.workflow
+                workflow = self.__get_workflow(trans, workflow_id)
+            else:
+                workflow_dict = None
+                workflow = self.__get_workflow(trans, workflow_id)
+                if workflow:
+                    workflow_dict = {'id': workflow.id,
+                                     'name': workflow.name,
+                                     'mappings': {}}
+                    for k, v in kwd.iteritems():
+                        kwd_tag = "%s_" % wf_tag
+                        if k.startswith(kwd_tag):
+                            # DBTODO Change the key to include the dataset tag, not just the names.
+                            workflow_dict['mappings'][int(k[len(kwd_tag):])] = {'ds_tag': v}
+            field_values = {}
+            for field_index, field in enumerate( request.type.sample_form.fields ):
+                    field_name = field['name']
+                    input_value = params.get( 'sample_%i_field_%i' % ( index, field_index ), '' )
+                    if field['type'] == CheckboxField.__name__:
+                        field_value = CheckboxField.is_checked( input_value )
+                    else:
+                        field_value = util.restore_text( input_value )
+                    field_values[ field_name ] = field_value
+            library_select_field, folder_select_field = self.__build_library_and_folder_select_fields( trans=trans,
+                                                                                                       user=request.user,
+                                                                                                       sample_index=index,
+                                                                                                       libraries=libraries,
+                                                                                                       sample=None,
+                                                                                                       library_id=library_id,
+                                                                                                       folder_id=folder_id,
+                                                                                                       **kwd )
+            history_select_field = self.__build_history_select_field( trans=trans,
+                                                                      user=request.user,
+                                                                      sample_index=index,
+                                                                      sample=None,
+                                                                      history_id=history_id,
+                                                                      **kwd)
+
+            workflow_select_field = self.__build_workflow_select_field( trans=trans,
+                                                                        user=request.user,
+                                                                        request=request,
+                                                                        sample_index=index,
+                                                                        sample=None,
+                                                                        workflow_dict=workflow_dict,
+                                                                        history_id=history_id,
+                                                                        **kwd)
+            sample_widgets.append( dict( id=None,
+                                         name=name,
+                                         bar_code=bar_code,
+                                         library=library,
+                                         folder=folder,
+                                         field_values=field_values,
+                                         history=history,
+                                         workflow=workflow,
+                                         workflow_dict=workflow_dict,
+                                         history_select_field=history_select_field,
+                                         workflow_select_field=workflow_select_field,
+                                         library_select_field=library_select_field,
+                                         folder_select_field=folder_select_field ) )
+            index += 1
+        return sample_widgets
+
+    # ===== Methods for building SelectFields used on various request forms =====
+    def __build_copy_sample_select_field( self, trans, displayable_sample_widgets ):
+        copy_sample_index_select_field = SelectField( 'copy_sample_index' )
+        copy_sample_index_select_field.add_option( 'None', -1, selected=True )
+        for index, sample_dict in enumerate( displayable_sample_widgets ):
+            copy_sample_index_select_field.add_option( sample_dict[ 'name' ], index )
+        return copy_sample_index_select_field
+
+    def __build_request_type_id_select_field( self, trans, selected_value='none' ):
+        accessible_request_types = trans.app.security_agent.get_accessible_request_types( trans, trans.user )
+        return build_select_field( trans, accessible_request_types, 'name', 'request_type_id', selected_value=selected_value, refresh_on_change=True )
+
+    def __build_user_id_select_field( self, trans, selected_value='none' ):
+        active_users = trans.sa_session.query( trans.model.User ) \
+                                       .filter( trans.model.User.table.c.deleted == false() ) \
+                                       .order_by( trans.model.User.email.asc() )
+        # A refresh_on_change is required so the user's set of addresses can be displayed.
+        return build_select_field( trans, active_users, 'email', 'user_id', selected_value=selected_value, refresh_on_change=True )
+
+    def __build_sample_operation_select_field( self, trans, is_admin, request, selected_value ):
+        # The sample_operation SelectField is displayed only after the request has been submitted.
+        # its label is "For selected samples"
+        if is_admin:
+            if request.is_complete:
+                bulk_operations = [ trans.model.Sample.bulk_operations.CHANGE_STATE ]
+            if request.is_rejected:
+                bulk_operations = [ trans.model.Sample.bulk_operations.SELECT_LIBRARY ]
+            else:
+                bulk_operations = [ s for i, s in trans.model.Sample.bulk_operations.items() ]
+        else:
+            if request.is_complete:
+                bulk_operations = []
+            else:
+                bulk_operations = [ trans.model.Sample.bulk_operations.SELECT_LIBRARY ]
+        return build_select_field( trans, bulk_operations, 'self', 'sample_operation', selected_value=selected_value, refresh_on_change=True )
+
+    def __build_library_and_folder_select_fields( self, trans, user, sample_index, libraries, sample=None, library_id=None, folder_id=None, **kwd ):
+        # Create the library_id SelectField for a specific sample. The received libraries param is a list of all the libraries
+        # accessible to the current user, and we add them as options to the library_select_field.  If the user has selected an
+        # existing library then display all the folders of the selected library in the folder_select_field.  Library folders do
+        # not have ACCESS permissions associated with them (only LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE), so all folders will
+        # be present in the folder_select_field for each library selected.
+        params = util.Params( kwd )
+        if sample_index == 'sample_operation':
+            # build the library selection widget for the bulk sample operation
+            library_select_field_name = "sample_operation_library_id"
+            folder_select_field_name = "sample_operation_folder_id"
+        else:
+            library_select_field_name = "sample_%i_library_id" % sample_index
+            folder_select_field_name = "sample_%i_folder_id" % sample_index
+        if not library_id:
+            library_id = params.get( library_select_field_name, None )
+        if not folder_id:
+            folder_id = params.get( folder_select_field_name, None )
+        selected_library = None
+        if library_id not in [ None, 'none' ]:
+            for library in libraries:
+                encoded_id = trans.security.encode_id( library.id )
+                if encoded_id == str( library_id ):
+                    selected_library = library
+                    break
+        elif sample and sample.library and library_id == 'none':
+            # The user previously selected a library but is now resetting the selection to 'none'
+            selected_library = None
+        elif sample and sample.library:
+            library_id = trans.security.encode_id( sample.library.id )
+            selected_library = sample.library
+        # Build the sample_%i_library_id SelectField with refresh on change enabled
+        library_select_field = build_select_field( trans,
+                                                   libraries,
+                                                   'name',
+                                                   library_select_field_name,
+                                                   initial_value='none',
+                                                   selected_value=str( library_id ).lower(),
+                                                   refresh_on_change=True )
+        # Get all folders for the selected library, if one is indeed selected
+        if selected_library:
+            folders = self.__get_active_folders( selected_library.root_folder, active_folders_list=[ selected_library.root_folder ] )
+            if folder_id:
+                selected_folder_id = folder_id
+            elif sample and sample.folder:
+                selected_folder_id = trans.security.encode_id( sample.folder.id )
+            else:
+                selected_folder_id = trans.security.encode_id( selected_library.root_folder.id )
+        else:
+            selected_folder_id = 'none'
+            folders = []
+        # Change the name of the library root folder to clarify that it is the root
+        for folder in folders:
+            if not folder.parent:
+                folder.name = 'Data library root'
+                break
+        folder_select_field = build_select_field( trans,
+                                                  folders,
+                                                  'name',
+                                                  folder_select_field_name,
+                                                  initial_value='none',
+                                                  selected_value=selected_folder_id )
+        return library_select_field, folder_select_field
+
+    def __build_history_select_field(self, trans, user, sample_index, sample=None, history_id=None, **kwd):
+        params = util.Params( kwd )
+        history_select_field_name = "sample_%i_history_id" % sample_index
+        if not history_id:
+            history_id = params.get( history_select_field_name, None )
+        if history_id not in [ None, 'none', 'new']:
+            for history in user.histories:
+                if not history.deleted:
+                    encoded_id = trans.security.encode_id(history.id)
+                    if encoded_id == str(history_id):
+                        break
+        elif sample and sample.history and history_id == 'none' or history_id == 'new':
+            # The user previously selected a history but is now resetting the selection to 'none'
+            pass
+        elif sample and sample.history:
+            history_id = trans.security.encode_id( sample.history.id )
+        # Build the sample_%i_history_id SelectField with refresh on change disabled
+        hsf = build_select_field( trans,
+                                  [h for h in user.histories if not h.deleted],
+                                  'name',
+                                  history_select_field_name,
+                                  initial_value='none',
+                                  selected_value=str( history_id ).lower(),
+                                  refresh_on_change=True )
+        # This is ugly, but allows for an explicit "New History", while still using build_select_field.
+        # hsf.options = hsf.options[:1] + [( "Create a New History", 'new', 'new'==str( history_id ).lower() )] + hsf.options[1:]
+        hsf.options = [( "Select one", 'none', 'none' == str( history_id ).lower() )] + hsf.options[1:]
+        return hsf
+
+    def __build_workflow_select_field(self, trans, user, request, sample_index, sample=None, workflow_id=None, workflow_dict=None, history_id=None, **kwd ):
+        params = util.Params( kwd )
+        workflow_select_field_name = "sample_%i_workflow_id" % sample_index
+        selected_workflow = None
+        if not workflow_id:
+            workflow_id = params.get( workflow_select_field_name, None )
+        if workflow_id not in [ None, 'none' ]:
+            selected_workflow = trans.sa_session.query( trans.model.Workflow ).get(trans.security.decode_id(workflow_id))
+        elif sample and sample.workflow and workflow_id == 'none':
+            selected_workflow = None
+        elif sample and sample.workflow:
+            workflow_id = sample.workflow['id']
+            selected_workflow = trans.sa_session.query( trans.model.Workflow ).get(sample.workflow['id'])
+        s_list = [w.latest_workflow for w in user.stored_workflows if not w.deleted]
+        if selected_workflow and selected_workflow not in s_list:
+            s_list.append(selected_workflow)
+        workflow_select_field = build_select_field(trans,
+                                                   s_list,
+                                                   'name',
+                                                   workflow_select_field_name,
+                                                   initial_value='none',
+                                                   selected_value=str( workflow_id ).lower(),
+                                                   refresh_on_change=True )
+        workflow_select_field.options = [( "Select one", 'none', 'none' == str( workflow_id ).lower() )] + workflow_select_field.options[1:]
+        wf_fieldset = [workflow_select_field]
+        if selected_workflow and request.type.external_services:
+            # DBTODO This will work for now, but should be handled more rigorously.
+            ds_list = []
+            external_service = request.type.external_services[0]
+            dataset_name_re = re.compile( '(dataset\d+)_(name)' )
+            for k, v in external_service.form_values.content.items():
+                match = dataset_name_re.match( k )
+                if match:
+                    ds_list.append(("ds|%s" % k[:-5], v))
+            if history_id not in [None, 'none', 'new', '']:
+                hist = trans.sa_session.query( trans.model.History ).get(trans.security.decode_id(history_id))
+                h_inputs = [("hi|%s" % trans.security.encode_id(ds.id), ds.name) for ds in hist.datasets if not ds.deleted]
+                ds_list += h_inputs
+            for step in selected_workflow.steps:
+                if step.type == 'data_input':
+                    if step.tool_inputs and "name" in step.tool_inputs:
+                        sf_name = '%s_%s' % (workflow_select_field_name, step.id)
+                        select_field = SelectField( name=sf_name )
+                        sf = params.get( sf_name, None )
+                        if not sf and sample and sample.workflow:
+                            if str(step.id) in sample.workflow['mappings']:
+                                sf = sample.workflow['mappings'][str(step.id)]['ds_tag']
+                        for value, label in ds_list:
+                            if value == sf:
+                                select_field.add_option( label, value, selected=True)
+                            else:
+                                select_field.add_option( label, value )
+                        wf_fieldset.append((step.tool_inputs['name'], select_field))
+        return wf_fieldset
+
+    def __build_sample_state_id_select_field( self, trans, request, selected_value ):
+        if selected_value == 'none':
+            if request.samples:
+                selected_value = trans.security.encode_id( request.samples[0].state.id )
+            else:
+                selected_value = trans.security.encode_id( request.type.states[0].id )
+        return build_select_field( trans,
+                                   objs=request.type.states,
+                                   label_attr='name',
+                                   select_field_name='sample_state_id',
+                                   selected_value=selected_value,
+                                   refresh_on_change=False )
+
+    # ===== Methods for validation forms and fields =====
+    def __validate_request( self, trans, cntrller, request ):
+        """Validates the request entered by the user"""
+        # TODO: Add checks for required sample fields here.
+        empty_fields = []
+        # Make sure required form fields are filled in.
+        for index, field in enumerate( request.type.request_form.fields ):
+            if field[ 'required' ] == 'required' and request.values.content[ field[ 'name' ] ] in [ '', None ]:
+                empty_fields.append( field[ 'label' ] )
+        empty_sample_fields = []
+        for s in request.samples:
+            for field in request.type.sample_form.fields:
+                log.debug("field: %s", field)
+                log.debug("svc: %s", s.values.content)
+                if field['required'] == 'required' and s.values.content[field['name']] in ['', None]:
+                    empty_sample_fields.append((s.name, field['label']))
+        if empty_fields or empty_sample_fields:
+            message = 'Complete the following fields of the request before submitting: <br/>'
+            if empty_fields:
+                for ef in empty_fields:
+                    message += '<b>%s</b><br/>' % ef
+            if empty_sample_fields:
+                for sname, ef in empty_sample_fields:
+                    message = message + '<b>%s</b> field of sample <b>%s</b><br/>' % (ef, sname)
+            return message
+        return None
+
+    def __validate_sample_names( self, trans, cntrller, request, displayable_sample_widgets, **kwd ):
+        # Check for duplicate sample names for all samples of the request.
+        message = ''
+        for index in range( len( displayable_sample_widgets ) - len( request.samples ) ):
+            sample_index = index + len( request.samples )
+            sample_widget = displayable_sample_widgets[ sample_index ]
+            sample_name = sample_widget[ 'name' ]
+            if not sample_name.strip():
+                message = 'Enter the name of sample number %i' % sample_index
+                break
+            count = 0
+            for i in range( len( displayable_sample_widgets ) ):
+                if sample_name == displayable_sample_widgets[ i ][ 'name' ]:
+                    count += 1
+            if count > 1:
+                message = "You tried to add %i samples with the name (%s).  Samples belonging to a request must have unique names." % ( count, sample_name )
+                break
+        if message:
+            del kwd[ 'save_samples_button' ]
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = 'error'
+            return trans.response.send_redirect( web.url_for( controller='requests_common',
+                                                              action='edit_samples',
+                                                              cntrller=cntrller,
+                                                              **kwd ) )
+
+    def __validate_bar_code( self, trans, sample, bar_code ):
+        """
+        Make sure that the bar_code about to be assigned to a sample is globally unique.
+        That is, bar_codes must be unique across requests in Galaxy sample tracking.
+        Bar codes are not required, but if used, they can only be added to a sample after
+        the request is submitted.
+        """
+        message = ''
+        unique = True
+        for index in range( len( sample.request.samples ) ):
+            # TODO: Add a unique constraint to sample.bar_code table column
+            # Make sure bar code is unique
+            for sample_with_bar_code in trans.sa_session.query( trans.model.Sample ) \
+                    .filter( trans.model.Sample.table.c.bar_code == bar_code ):
+                if sample_with_bar_code and sample_with_bar_code.id != sample.id:
+                    message = '''The bar code (%s) associated with the sample (%s) belongs to another sample.
+                                 Bar codes must be unique across all samples, so use a different bar code
+                                 for this sample.''' % ( bar_code, sample.name )
+                    unique = False
+                    break
+            if not unique:
+                break
+        return message
+
+    # ===== Other miscellaneous utility methods =====
+    def __get_encoded_selected_sample_ids( self, trans, request, **kwd ):
+        encoded_selected_sample_ids = []
+        for sample in request.samples:
+            if CheckboxField.is_checked( kwd.get( 'select_sample_%i' % sample.id, '' ) ):
+                encoded_selected_sample_ids.append( trans.security.encode_id( sample.id ) )
+        return encoded_selected_sample_ids
+
+
+# ===== Miscellaneous utility methods outside of the RequestsCommon class =====
+def invalid_id_redirect( trans, cntrller, obj_id, item='sequencing request', action='browse_requests' ):
+    status = 'error'
+    message = "Invalid %s id (%s)" % ( item, str( obj_id ) )
+    return trans.response.send_redirect( web.url_for( controller=cntrller,
+                                                      action=action,
+                                                      status=status,
+                                                      message=message ) )
diff --git a/lib/galaxy/webapps/galaxy/controllers/root.py b/lib/galaxy/webapps/galaxy/controllers/root.py
new file mode 100644
index 0000000..09eb7b4
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -0,0 +1,540 @@
+"""
+Contains the main interface in the Universe class
+"""
+import cgi
+import os
+import urllib
+
+from paste.httpexceptions import HTTPNotFound, HTTPBadGateway
+
+from galaxy import web
+from galaxy import util
+from galaxy.util import listify, Params, string_as_bool, FILENAME_VALID_CHARS
+
+from galaxy.web.base import controller
+from galaxy.model.item_attrs import UsesAnnotations
+from galaxy import managers
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+# =============================================================================
+class RootController( controller.JSAppLauncher, UsesAnnotations ):
+    """
+    Controller class that maps to the url root of Galaxy (i.e. '/').
+    """
+    def __init__( self, app ):
+        super( RootController, self ).__init__( app )
+        self.history_manager = managers.histories.HistoryManager( app )
+        self.history_serializer = managers.histories.HistorySerializer( app )
+
+    @web.expose
+    def default(self, trans, target1=None, target2=None, **kwd):
+        """
+        Called on any url that does not match a controller method.
+        """
+        raise HTTPNotFound( 'This link may not be followed from within Galaxy.' )
+
+    @web.expose
+    def client(self, trans, **kwd):
+        """
+        Endpoint for clientside routes.  Currently a passthrough to index
+        (minus kwargs) though we can differentiate it more in the future.
+        Should not be used with url_for -- see
+        (https://github.com/galaxyproject/galaxy/issues/1878) for why.
+        """
+        return self.index(trans)
+
+    def _get_extended_config( self, trans ):
+        app = trans.app
+        user_requests = bool( trans.user and ( trans.user.requests or app.security_agent.get_accessible_request_types( trans, trans.user ) ) )
+        config = {
+            'active_view'                   : 'analysis',
+            'params'                        : dict( trans.request.params ),
+            'enable_cloud_launch'           : app.config.get_bool( 'enable_cloud_launch', False ),
+            # TODO: next two should be redundant - why can't we build one from the other?
+            'toolbox'                       : app.toolbox.to_dict( trans, in_panel=False ),
+            'toolbox_in_panel'              : app.toolbox.to_dict( trans ),
+            'message_box_visible'           : app.config.message_box_visible,
+            'show_inactivity_warning'       : app.config.user_activation_on and trans.user and not trans.user.active,
+            # TODO: move to user
+            'user_requests'                 : user_requests
+        }
+
+        # TODO: move to user
+        stored_workflow_menu_entries = config[ 'stored_workflow_menu_entries' ] = []
+        for menu_item in getattr( trans.user, 'stored_workflow_menu_entries', [] ):
+            stored_workflow_menu_entries.append({
+                'encoded_stored_workflow_id': trans.security.encode_id( menu_item.stored_workflow_id ),
+                'stored_workflow': {
+                    'name': util.unicodify( menu_item.stored_workflow.name )
+                }
+            })
+
+        return config
+
+    @web.expose
+    def index( self, trans, tool_id=None, workflow_id=None, history_id=None, m_c=None, m_a=None, **kwd ):
+        """
+        Root and entry point for client-side web app.
+
+        :type       tool_id: str or None
+        :param      tool_id: load center panel with given tool if not None
+        :type   workflow_id: encoded id or None
+        :param  workflow_id: load center panel with given workflow if not None
+        :type    history_id: encoded id or None
+        :param   history_id: switch current history to given history if not None
+        :type           m_c: str or None
+        :param          m_c: controller name (e.g. 'user')
+        :type           m_a: str or None
+        :param          m_a: controller method/action (e.g. 'dbkeys')
+
+        If m_c and m_a are present, the center panel will be loaded using the
+        controller and action as a url: (e.g. 'user/dbkeys').
+        """
+        if trans.app.config.require_login and self.user_manager.is_anonymous( trans.user ):
+            # TODO: this doesn't properly redirect when login is done
+            # (see webapp __ensure_logged_in_user for the initial redirect - not sure why it doesn't redirect to login?)
+            login_url = web.url_for( controller="root", action="login" )
+            trans.response.send_redirect( login_url )
+
+        # if a history_id was sent, attempt to switch to that history
+        history = trans.history
+        if history_id:
+            unencoded_id = trans.security.decode_id( history_id )
+            history = self.history_manager.get_owned( unencoded_id, trans.user )
+            trans.set_history( history )
+
+        # index/analysis needs an extended configuration
+        js_options = self._get_js_options( trans )
+        config = js_options[ 'config' ]
+        config.update( self._get_extended_config( trans ) )
+
+        return self.template( trans, 'analysis', options=js_options )
+
+    @web.expose
+    def login( self, trans, redirect=None, **kwd ):
+        """
+        User login path for client-side.
+        """
+        return self.template( trans, 'login',
+                              redirect=redirect,
+                              # TODO: move into config
+                              openid_providers=[ p.name for p in trans.app.openid_providers ],
+                              # an installation may have it's own welcome_url - show it here if they've set that
+                              welcome_url=web.url_for( controller='root', action='welcome' ),
+                              show_welcome_with_login=trans.app.config.show_welcome_with_login )
+
+    # ---- Tool related -----------------------------------------------------
+
+    @web.json
+    def tool_search( self, trans, **kwd ):
+        """Searches the tool database and returns data for any tool
+        whose text matches the query.
+
+        Data are returned in JSON format.
+        """
+        query = kwd.get( 'query', '' )
+        tags = listify( kwd.get( 'tags[]', [] ) )
+        trans.log_action( trans.get_user(), "tool_search.search", "", { "query": query, "tags": tags } )
+        results = []
+        if tags:
+            tags = trans.sa_session.query( trans.app.model.Tag ).filter( trans.app.model.Tag.name.in_( tags ) ).all()
+            for tagged_tool_il in [ tag.tagged_tools for tag in tags ]:
+                for tagged_tool in tagged_tool_il:
+                    if tagged_tool.tool_id not in results:
+                        results.append( tagged_tool.tool_id )
+            if trans.user:
+                trans.user.preferences['selected_tool_tags'] = ','.join( [ tag.name for tag in tags ] )
+                trans.sa_session.flush()
+        elif trans.user:
+            trans.user.preferences['selected_tool_tags'] = ''
+            trans.sa_session.flush()
+        if len( query ) > 2:
+            search_results = trans.app.toolbox_search.search( query )
+            if 'tags[]' in kwd:
+                results = filter( lambda x: x in results, search_results )
+            else:
+                results = search_results
+        return results
+
+    @web.expose
+    def tool_help( self, trans, id ):
+        """Return help page for tool identified by 'id' if available
+        """
+        toolbox = self.get_toolbox()
+        tool = toolbox.get_tool( id )
+        yield "<html><body>"
+        if not tool:
+            # TODO: arent tool ids strings now?
+            yield "Unknown tool id '%d'" % id
+        elif tool.help:
+            yield tool.help
+        else:
+            yield "No additional help available for tool '%s'" % tool.name
+        yield "</body></html>"
+
+    # ---- Dataset display / editing ----------------------------------------
+    @web.expose
+    def display( self, trans, id=None, hid=None, tofile=None, toext=".txt", encoded_id=None, **kwd ):
+        """Returns data directly into the browser.
+
+        Sets the mime-type according to the extension.
+
+        Used by the twill tool test driver - used anywhere else? Would like to drop hid
+        argument and path if unneeded now. Likewise, would like to drop encoded_id=XXX
+        and use assume id is encoded (likely id wouldn't be coming in encoded if this
+        is used anywhere else though.)
+        """
+        # TODO: unencoded id
+        if hid is not None:
+            try:
+                hid = int( hid )
+            except:
+                return "hid '%s' is invalid" % str( hid )
+            history = trans.get_history()
+            for dataset in history.datasets:
+                if dataset.hid == hid:
+                    data = dataset
+                    break
+            else:
+                raise Exception( "No dataset with hid '%d'" % hid )
+        else:
+            if encoded_id and not id:
+                id = self.decode_id( encoded_id )
+            try:
+                data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+            except:
+                return "Dataset id '%s' is invalid" % str( id )
+        if data:
+            current_user_roles = trans.get_current_user_roles()
+            if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+                trans.response.set_content_type(data.get_mime())
+                if tofile:
+                    fStat = os.stat(data.file_name)
+                    trans.response.headers['Content-Length'] = int(fStat.st_size)
+                    if toext[0:1] != ".":
+                        toext = "." + toext
+                    fname = data.name
+                    fname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in fname)[0:150]
+                    trans.response.headers["Content-Disposition"] = 'attachment; filename="GalaxyHistoryItem-%s-[%s]%s"' % (data.hid, fname, toext)
+                trans.log_event( "Display dataset id: %s" % str(id) )
+                try:
+                    return open( data.file_name )
+                except:
+                    return "This dataset contains no content"
+            else:
+                return "You are not allowed to access this dataset"
+        else:
+            return "No dataset with id '%s'" % str( id )
+
+    @web.expose
+    def display_child(self, trans, parent_id=None, designation=None, tofile=None, toext=".txt"):
+        """Returns child data directly into the browser, based upon parent_id and designation.
+        """
+        # TODO: unencoded id
+        try:
+            data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( parent_id )
+            if data:
+                child = data.get_child_by_designation( designation )
+                if child:
+                    current_user_roles = trans.get_current_user_roles()
+                    if trans.app.security_agent.can_access_dataset( current_user_roles, child ):
+                        return self.display( trans, id=child.id, tofile=tofile, toext=toext )
+                    else:
+                        return "You are not privileged to access this dataset."
+        except Exception:
+            pass
+        return "A child named %s could not be found for data %s" % ( designation, parent_id )
+
+    @web.expose
+    def display_as( self, trans, id=None, display_app=None, **kwd ):
+        """Returns a file in a format that can successfully be displayed in display_app.
+        """
+        # TODO: unencoded id
+        data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+        authz_method = 'rbac'
+        if 'authz_method' in kwd:
+            authz_method = kwd['authz_method']
+        if data:
+            current_user_roles = trans.get_current_user_roles()
+            if authz_method == 'rbac' and trans.app.security_agent.can_access_dataset( current_user_roles, data ):
+                trans.response.set_content_type( data.get_mime() )
+                trans.log_event( "Formatted dataset id %s for display at %s" % ( str( id ), display_app ) )
+                return data.as_display_type( display_app, **kwd )
+            elif authz_method == 'display_at' and trans.app.host_security_agent.allow_action( trans.request.remote_addr,
+                                                                                              data.permitted_actions.DATASET_ACCESS,
+                                                                                              dataset=data ):
+                trans.response.set_content_type( data.get_mime() )
+                return data.as_display_type( display_app, **kwd )
+            else:
+                return "You are not allowed to access this dataset."
+        else:
+            return "No data with id=%d" % id
+
+    @web.expose
+    def peek(self, trans, id=None):
+        """Returns a 'peek' at the data.
+        """
+        # TODO: unused?
+        # TODO: unencoded id
+        data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+        if data:
+            yield "<html><body><pre>"
+            yield data.peek
+            yield "</pre></body></html>"
+        else:
+            yield "No data with id=%d" % id
+
+    # ---- History management -----------------------------------------------
+    @web.expose
+    def history_delete( self, trans, id ):
+        """Backward compatibility with check_galaxy script.
+        """
+        # TODO: unused?
+        return trans.webapp.controllers['history'].list( trans, id, operation='delete' )
+
+    @web.expose
+    def clear_history( self, trans ):
+        """Clears the history for a user.
+        """
+        # TODO: unused? (seems to only be used in TwillTestCase)
+        history = trans.get_history()
+        for dataset in history.datasets:
+            dataset.deleted = True
+            dataset.clear_associated_files()
+        trans.sa_session.flush()
+        trans.log_event( "History id %s cleared" % (str(history.id)) )
+        trans.response.send_redirect( web.url_for("/index" ) )
+
+    @web.expose
+    def history_import( self, trans, id=None, confirm=False, **kwd ):
+        # TODO: unused?
+        # TODO: unencoded id
+        user = trans.get_user()
+        user_history = trans.get_history()
+        if not id:
+            return trans.show_error_message( "You must specify a history you want to import.")
+        import_history = trans.sa_session.query( trans.app.model.History ).get( id )
+        if not import_history:
+            return trans.show_error_message( "The specified history does not exist.")
+        if user:
+            if import_history.user_id == user.id:
+                return trans.show_error_message( "You cannot import your own history.")
+            new_history = import_history.copy( target_user=trans.user )
+            new_history.name = "imported: " + new_history.name
+            new_history.user_id = user.id
+            galaxy_session = trans.get_galaxy_session()
+            try:
+                association = trans.sa_session.query( trans.app.model.GalaxySessionToHistoryAssociation ) \
+                                              .filter_by( session_id=galaxy_session.id, history_id=new_history.id ) \
+                                              .first()
+            except:
+                association = None
+            new_history.add_galaxy_session( galaxy_session, association=association )
+            trans.sa_session.add( new_history )
+            trans.sa_session.flush()
+            if not user_history.datasets:
+                trans.set_history( new_history )
+            trans.log_event( "History imported, id: %s, name: '%s': " % (str(new_history.id), new_history.name ) )
+            return trans.show_ok_message( """
+                History "%s" has been imported. Click <a href="%s">here</a>
+                to begin.""" % ( new_history.name, web.url_for( '/' ) ) )
+        elif not user_history.datasets or confirm:
+            new_history = import_history.copy()
+            new_history.name = "imported: " + new_history.name
+            new_history.user_id = None
+            galaxy_session = trans.get_galaxy_session()
+            try:
+                association = trans.sa_session.query( trans.app.model.GalaxySessionToHistoryAssociation ) \
+                                              .filter_by( session_id=galaxy_session.id, history_id=new_history.id ) \
+                                              .first()
+            except:
+                association = None
+            new_history.add_galaxy_session( galaxy_session, association=association )
+            trans.sa_session.add( new_history )
+            trans.sa_session.flush()
+            trans.set_history( new_history )
+            trans.log_event( "History imported, id: %s, name: '%s': " % (str(new_history.id), new_history.name ) )
+            return trans.show_ok_message( """
+                History "%s" has been imported. Click <a href="%s">here</a>
+                to begin.""" % ( new_history.name, web.url_for( '/' ) ) )
+        return trans.show_warn_message( """
+            Warning! If you import this history, you will lose your current
+            history. Click <a href="%s">here</a> to confirm.
+            """ % web.url_for( controller='root', action='history_import', id=id, confirm=True ) )
+
+    @web.expose
+    def history_new( self, trans, name=None ):
+        """Create a new history with the given name
+        and refresh the history panel.
+        """
+        trans.new_history( name=name )
+        trans.log_event( "Created new History, id: %s." % str(trans.history.id) )
+        return trans.show_message( "New history created", refresh_frames=['history'] )
+
+    @web.expose
+    def history_add_to( self, trans, history_id=None, file_data=None,
+                        name="Data Added to History", info=None, ext="txt", dbkey="?", copy_access_from=None, **kwd ):
+        """Adds a POSTed file to a History.
+        """
+        # TODO: unencoded id
+        try:
+            history = trans.sa_session.query( trans.app.model.History ).get( history_id )
+            data = trans.app.model.HistoryDatasetAssociation( name=name,
+                                                              info=info,
+                                                              extension=ext,
+                                                              dbkey=dbkey,
+                                                              create_dataset=True,
+                                                              sa_session=trans.sa_session )
+            if copy_access_from:
+                copy_access_from = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( copy_access_from )
+                trans.app.security_agent.copy_dataset_permissions( copy_access_from.dataset, data.dataset )
+            else:
+                permissions = trans.app.security_agent.history_get_default_permissions( history )
+                trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+            trans.sa_session.add( data )
+            trans.sa_session.flush()
+            data_file = open( data.file_name, "wb" )
+            file_data.file.seek( 0 )
+            data_file.write( file_data.file.read() )
+            data_file.close()
+            data.state = data.states.OK
+            data.set_size()
+            data.init_meta()
+            data.set_meta()
+            trans.sa_session.flush()
+            history.add_dataset( data )
+            trans.sa_session.flush()
+            data.set_peek()
+            trans.sa_session.flush()
+            trans.log_event("Added dataset %d to history %d" % (data.id, trans.history.id))
+            return trans.show_ok_message( "Dataset " + str(data.hid) + " added to history " + str(history_id) + "." )
+        except Exception as e:
+            msg = "Failed to add dataset to history: %s" % ( e )
+            log.error( msg )
+            trans.log_event( msg )
+            return trans.show_error_message("Adding File to History has Failed")
+
+    @web.expose
+    def history_set_default_permissions( self, trans, id=None, **kwd ):
+        """Sets the permissions on a history.
+        """
+        # TODO: unencoded id
+        if trans.user:
+            if 'update_roles_button' in kwd:
+                history = None
+                if id:
+                    try:
+                        id = int( id )
+                    except:
+                        id = None
+                    if id:
+                        history = trans.sa_session.query( trans.app.model.History ).get( id )
+                if not history:
+                    # If we haven't retrieved a history, use the current one
+                    history = trans.get_history()
+                p = Params( kwd )
+                permissions = {}
+                for k, v in trans.app.model.Dataset.permitted_actions.items():
+                    in_roles = p.get( k + '_in', [] )
+                    if not isinstance( in_roles, list ):
+                        in_roles = [ in_roles ]
+                    in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]
+                    permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
+                dataset = 'dataset' in kwd
+                bypass_manage_permission = 'bypass_manage_permission' in kwd
+                trans.app.security_agent.history_set_default_permissions( history, permissions,
+                                                                          dataset=dataset, bypass_manage_permission=bypass_manage_permission )
+                return trans.show_ok_message( 'Default history permissions have been changed.' )
+            return trans.fill_template( 'history/permissions.mako' )
+        else:
+            # user not logged in, history group must be only public
+            return trans.show_error_message( "You must be logged in to change a history's default permissions." )
+
+    @web.expose
+    def dataset_make_primary( self, trans, id=None):
+        """Copies a dataset and makes primary.
+        """
+        # TODO: unused?
+        # TODO: unencoded id
+        try:
+            old_data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
+            new_data = old_data.copy()
+            # new_data.parent = None
+            history = trans.get_history()
+            history.add_dataset(new_data)
+            trans.sa_session.add( new_data )
+            trans.sa_session.flush()
+            return trans.show_message( "<p>Secondary dataset has been made primary.</p>", refresh_frames=['history'] )
+        except:
+            return trans.show_error_message( "<p>Failed to make secondary dataset primary.</p>" )
+
+    @web.expose
+    def welcome( self, trans ):
+        welcome_url = trans.app.config.welcome_url
+        return trans.response.send_redirect( web.url_for( welcome_url  ) )
+
+    @web.expose
+    def bucket_proxy( self, trans, bucket=None, **kwd):
+        if bucket:
+            trans.response.set_content_type( 'text/xml' )
+            b_list_xml = urllib.urlopen('http://s3.amazonaws.com/%s/' % bucket)
+            return b_list_xml.read()
+        raise Exception("You must specify a bucket")
+
+    # ---- Debug methods ----------------------------------------------------
+    @web.expose
+    def echo(self, trans, **kwd):
+        """Echos parameters (debugging).
+        """
+        rval = ""
+        for k in trans.request.headers:
+            rval += "%s: %s <br/>" % ( k, trans.request.headers[k] )
+        for k in kwd:
+            rval += "%s: %s <br/>" % ( k, kwd[k] )
+            if isinstance( kwd[k], cgi.FieldStorage ):
+                rval += "-> %s" % kwd[k].file.read()
+        return rval
+
+    @web.json
+    def echo_json( self, trans, **kwd ):
+        """Echos parameters as JSON (debugging).
+
+        Attempts to parse values passed as boolean, float, then int. Defaults
+        to string. Non-recursive (will not parse lists).
+        """
+        # TODO: use json
+        rval = {}
+        for k in kwd:
+            rval[ k ] = kwd[k]
+            try:
+                if rval[ k ] in [ 'true', 'True', 'false', 'False' ]:
+                    rval[ k ] = string_as_bool( rval[ k ] )
+                rval[ k ] = float( rval[ k ] )
+                rval[ k ] = int( rval[ k ] )
+            except:
+                pass
+        return rval
+
+    @web.expose
+    def generate_error( self, trans, code=500 ):
+        """Raises an exception (debugging).
+        """
+        trans.response.status = code
+        raise Exception( "Fake error!" )
+
+    @web.json
+    def generate_json_error( self, trans, code=500 ):
+        """Raises an exception (debugging).
+        """
+        try:
+            code = int( code )
+        except Exception:
+            code = 500
+
+        if code == 502:
+            raise HTTPBadGateway()
+        trans.response.status = code
+        return { 'error': 'Fake error!' }
diff --git a/lib/galaxy/webapps/galaxy/controllers/search.py b/lib/galaxy/webapps/galaxy/controllers/search.py
new file mode 100644
index 0000000..4b33deb
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/search.py
@@ -0,0 +1,19 @@
+"""
+Contains a basic search interface for Galaxy
+"""
+import logging
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+
+log = logging.getLogger( __name__ )
+
+
+class SearchController( BaseUIController ):
+
+    @web.expose
+    def index(self, trans):
+        """
+        Per the message, this is not ready for human consumption, yet.  Power
+        users can still use the search API.
+        """
+        return trans.show_message("Sorry, the search interface isn't quite ready for use, yet.  Watch the release notes and check back later!")
diff --git a/lib/galaxy/webapps/galaxy/controllers/tag.py b/lib/galaxy/webapps/galaxy/controllers/tag.py
new file mode 100644
index 0000000..509c6e8
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/tag.py
@@ -0,0 +1,210 @@
+"""
+Tags Controller: handles tagging/untagging of entities
+and provides autocomplete support.
+"""
+
+from six import text_type
+from sqlalchemy.sql import select
+from sqlalchemy.sql.expression import and_, func
+
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController, UsesTagsMixin
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class TagsController ( BaseUIController, UsesTagsMixin ):
+
+    @web.expose
+    @web.require_login( "edit item tags" )
+    def get_tagging_elt_async( self, trans, item_id, item_class, elt_context="" ):
+        """
+        Returns HTML for editing an item's tags.
+        """
+        item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+        if not item:
+            return trans.show_error_message( "No item of class %s with id %s " % ( item_class, item_id ) )
+        return trans.fill_template( "/tagging_common.mako",
+                                    tag_type="individual",
+                                    user=trans.user,
+                                    tagged_item=item,
+                                    elt_context=elt_context,
+                                    in_form=False,
+                                    input_size="22",
+                                    tag_click_fn="default_tag_click_fn",
+                                    use_toggle_link=False )
+
+    @web.expose
+    @web.require_login( "add tag to an item" )
+    def add_tag_async( self, trans, item_id=None, item_class=None, new_tag=None, context=None ):
+        """
+        Add tag to an item.
+        """
+        # Apply tag.
+        item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+        user = trans.user
+        self.get_tag_handler( trans ).apply_item_tags( user, item, new_tag.encode( 'utf-8' ) )
+        trans.sa_session.flush()
+        # Log.
+        params = dict( item_id=item.id, item_class=item_class, tag=new_tag )
+        trans.log_action( user, text_type( "tag" ), context, params )
+
+    @web.expose
+    @web.require_login( "remove tag from an item" )
+    def remove_tag_async( self, trans, item_id=None, item_class=None, tag_name=None, context=None ):
+        """
+        Remove tag from an item.
+        """
+        # Remove tag.
+        item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+        user = trans.user
+        self.get_tag_handler( trans ).remove_item_tag( user, item, tag_name.encode( 'utf-8' ) )
+        trans.sa_session.flush()
+        # Log.
+        params = dict( item_id=item.id, item_class=item_class, tag=tag_name )
+        trans.log_action( user, text_type( "untag" ), context, params )
+
+    # Retag an item. All previous tags are deleted and new tags are applied.
+    @web.expose
+    @web.require_login( "Apply a new set of tags to an item; previous tags are deleted." )
+    def retag_async( self, trans, item_id=None, item_class=None, new_tags=None ):
+        """
+        Apply a new set of tags to an item; previous tags are deleted.
+        """
+        # Apply tags.
+        item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+        user = trans.user
+        self.get_tag_handler( trans ).delete_item_tags( user, item )
+        self.get_tag_handler( trans ).apply_item_tags( user, item, new_tags.encode( 'utf-8' ) )
+        trans.sa_session.flush()
+
+    @web.expose
+    @web.require_login( "get autocomplete data for an item's tags" )
+    def tag_autocomplete_data( self, trans, q=None, limit=None, timestamp=None, item_id=None, item_class=None ):
+        """
+        Get autocomplete data for an item's tags.
+        """
+        # Get item, do security check, and get autocomplete data.
+        item = None
+        if item_id is not None:
+            item = self._get_item( trans, item_class, trans.security.decode_id( item_id ) )
+        user = trans.user
+        item_class = self.get_class( item_class )
+        q = '' if q is None else q
+        q = q.encode( 'utf-8' )
+        if q.find( ":" ) == -1:
+            return self._get_tag_autocomplete_names( trans, q, limit, timestamp, user, item, item_class )
+        else:
+            return self._get_tag_autocomplete_values( trans, q, limit, timestamp, user, item, item_class )
+
+    def _get_tag_autocomplete_names( self, trans, q, limit, timestamp, user=None, item=None, item_class=None ):
+        """
+        Returns autocomplete data for tag names ordered from most frequently used to
+        least frequently used.
+        """
+        # Get user's item tags and usage counts.
+        # Get item's class object and item-tag association class.
+        if item is None and item_class is None:
+            raise RuntimeError( "Both item and item_class cannot be None" )
+        elif item is not None:
+            item_class = item.__class__
+        item_tag_assoc_class = self.get_tag_handler( trans ).get_tag_assoc_class( item_class )
+        # Build select statement.
+        cols_to_select = [ item_tag_assoc_class.table.c.tag_id, func.count( '*' ) ]
+        from_obj = item_tag_assoc_class.table.join( item_class.table ).join( trans.app.model.Tag.table )
+        where_clause = and_( trans.app.model.Tag.table.c.name.like( q + "%" ),
+                             item_tag_assoc_class.table.c.user_id == user.id )
+        order_by = [ func.count( "*" ).desc() ]
+        group_by = item_tag_assoc_class.table.c.tag_id
+        # Do query and get result set.
+        query = select( columns=cols_to_select,
+                        from_obj=from_obj,
+                        whereclause=where_clause,
+                        group_by=group_by,
+                        order_by=order_by,
+                        limit=limit )
+        result_set = trans.sa_session.execute( query )
+        # Create and return autocomplete data.
+        ac_data = "#Header|Your Tags\n"
+        for row in result_set:
+            tag = self.get_tag_handler( trans ).get_tag_by_id( row[0] )
+            # Exclude tags that are already applied to the item.
+            if ( item is not None ) and ( self.get_tag_handler( trans ).item_has_tag( trans.user, item, tag ) ):
+                continue
+            # Add tag to autocomplete data. Use the most frequent name that user
+            # has employed for the tag.
+            tag_names = self._get_usernames_for_tag( trans, trans.user, tag, item_class, item_tag_assoc_class )
+            ac_data += tag_names[0] + "|" + tag_names[0] + "\n"
+        return ac_data
+
+    def _get_tag_autocomplete_values( self, trans, q, limit, timestamp, user=None, item=None, item_class=None ):
+        """
+        Returns autocomplete data for tag values ordered from most frequently used to
+        least frequently used.
+        """
+        tag_name_and_value = q.split( ":" )
+        tag_name = tag_name_and_value[0]
+        tag_value = tag_name_and_value[1]
+        tag = self.get_tag_handler( trans ).get_tag_by_name( tag_name )
+        # Don't autocomplete if tag doesn't exist.
+        if tag is None:
+            return ""
+        # Get item's class object and item-tag association class.
+        if item is None and item_class is None:
+            raise RuntimeError( "Both item and item_class cannot be None" )
+        elif item is not None:
+            item_class = item.__class__
+        item_tag_assoc_class = self.get_tag_handler( trans ).get_tag_assoc_class( item_class )
+        # Build select statement.
+        cols_to_select = [ item_tag_assoc_class.table.c.value, func.count( '*' ) ]
+        from_obj = item_tag_assoc_class.table.join( item_class.table ).join( trans.app.model.Tag.table )
+        where_clause = and_( item_tag_assoc_class.table.c.user_id == user.id,
+                             trans.app.model.Tag.table.c.id == tag.id,
+                             item_tag_assoc_class.table.c.value.like( tag_value + "%" ) )
+        order_by = [ func.count("*").desc(), item_tag_assoc_class.table.c.value ]
+        group_by = item_tag_assoc_class.table.c.value
+        # Do query and get result set.
+        query = select( columns=cols_to_select,
+                        from_obj=from_obj,
+                        whereclause=where_clause,
+                        group_by=group_by,
+                        order_by=order_by,
+                        limit=limit )
+        result_set = trans.sa_session.execute( query )
+        # Create and return autocomplete data.
+        ac_data = "#Header|Your Values for '%s'\n" % ( tag_name )
+        tag_uname = self._get_usernames_for_tag( trans, trans.user, tag, item_class, item_tag_assoc_class )[0]
+        for row in result_set:
+            ac_data += tag_uname + ":" + row[0] + "|" + row[0] + "\n"
+        return ac_data
+
+    def _get_usernames_for_tag( self, trans, user, tag, item_class, item_tag_assoc_class ):
+        """
+        Returns an ordered list of the user names for a tag; list is ordered from
+        most popular to least popular name.
+        """
+        # Build select stmt.
+        cols_to_select = [ item_tag_assoc_class.table.c.user_tname, func.count( '*' ) ]
+        where_clause = and_( item_tag_assoc_class.table.c.user_id == user.id,
+                             item_tag_assoc_class.table.c.tag_id == tag.id )
+        group_by = item_tag_assoc_class.table.c.user_tname
+        order_by = [ func.count( "*" ).desc() ]
+        # Do query and get result set.
+        query = select( columns=cols_to_select,
+                        whereclause=where_clause,
+                        group_by=group_by,
+                        order_by=order_by )
+        result_set = trans.sa_session.execute( query )
+        user_tag_names = list()
+        for row in result_set:
+            user_tag_names.append( row[0] )
+        return user_tag_names
+
+    def _get_item( self, trans, item_class_name, id ):
+        """
+        Get an item based on type and id.
+        """
+        item_class = self.get_tag_handler( trans ).item_tag_assoc_info[item_class_name].item_class
+        item = trans.sa_session.query( item_class ).filter( item_class.id == id)[0]
+        return item
diff --git a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
new file mode 100644
index 0000000..2c411bd
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -0,0 +1,147 @@
+"""
+Controller handles external tool related requests
+"""
+import logging
+
+from markupsafe import escape
+
+import galaxy.util
+from galaxy import web
+from galaxy.tools import DataSourceTool
+from galaxy.web import error, url_for
+from galaxy.web.base.controller import BaseUIController
+
+log = logging.getLogger( __name__ )
+
+
+class ToolRunner( BaseUIController ):
+
+    # Hack to get biomart to work, ideally, we could pass tool_id to biomart and receive it back
+    @web.expose
+    def biomart(self, trans, tool_id='biomart', **kwd):
+        """Catches the tool id and redirects as needed"""
+        return self.index(trans, tool_id=tool_id, **kwd)
+
+    # test to get hapmap to work, ideally, we could pass tool_id to hapmap biomart and receive it back
+    @web.expose
+    def hapmapmart(self, trans, tool_id='hapmapmart', **kwd):
+        """Catches the tool id and redirects as needed"""
+        return self.index(trans, tool_id=tool_id, **kwd)
+
+    @web.expose
+    def default(self, trans, tool_id=None, **kwd):
+        """Catches the tool id and redirects as needed"""
+        return self.index(trans, tool_id=tool_id, **kwd)
+
+    def __get_tool( self, tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=False ):
+        tool_version_select_field, tools, tool = self.get_toolbox().get_tool_components( tool_id, tool_version, get_loaded_tools_by_lineage, set_selected )
+        return tool
+
+    @web.expose
+    def index( self, trans, tool_id=None, from_noframe=None, **kwd ):
+        def __tool_404__():
+            log.error( 'index called with tool id \'%s\' but no such tool exists', tool_id )
+            trans.log_event( 'Tool id \'%s\' does not exist' % tool_id )
+            trans.response.status = 404
+            return trans.show_error_message('Tool \'%s\' does not exist.' % ( escape(tool_id) ))
+        # tool id not available, redirect to main page
+        if tool_id is None:
+            return trans.response.send_redirect( url_for( controller='root', action='welcome' ) )
+        tool = self.__get_tool( tool_id )
+        # tool id is not matching, display an error
+        if not tool:
+            return __tool_404__()
+        if tool.require_login and not trans.user:
+            redirect = url_for( controller='tool_runner', action='index', tool_id=tool_id, **kwd )
+            return trans.response.send_redirect( url_for( controller='user',
+                                                          action='login',
+                                                          cntrller='user',
+                                                          status='info',
+                                                          message='You must be logged in to use this tool.',
+                                                          redirect=redirect ) )
+        if not tool.allow_user_access( trans.user ):
+            return __tool_404__()
+        if tool.tool_type == 'default':
+            return trans.response.send_redirect( url_for( controller='root', tool_id=tool_id ) )
+
+        # execute tool without displaying form (used for datasource tools)
+        params = galaxy.util.Params( kwd, sanitize=False )
+        # do param translation here, used by datasource tools
+        if tool.input_translator:
+            tool.input_translator.translate( params )
+        if 'runtool_btn' not in params.__dict__ and 'URL' not in params.__dict__:
+            error( 'Tool execution through the `tool_runner` requires a `runtool_btn` flag or `URL` parameter.' )
+        # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ),
+        # so make sure to create a new history if we've never had one before.
+        history = tool.get_default_history_by_trans( trans, create=True )
+        try:
+            vars = tool.handle_input( trans, params.__dict__, history=history )
+        except Exception as e:
+            error( str( e ) )
+        if len( params ) > 0:
+            trans.log_event( 'Tool params: %s' % ( str( params ) ), tool_id=tool_id )
+        return trans.fill_template( 'root/tool_runner.mako', **vars )
+
+    @web.expose
+    def rerun( self, trans, id=None, job_id=None, **kwd ):
+        """
+        Given a HistoryDatasetAssociation id, find the job and that created
+        the dataset, extract the parameters, and display the appropriate tool
+        form with parameters already filled in.
+        """
+        if job_id is None:
+            if not id:
+                error( "'id' parameter is required" )
+            try:
+                id = int( id )
+            except:
+                # it's not an un-encoded id, try to parse as encoded
+                try:
+                    id = trans.security.decode_id( id )
+                except:
+                    error( "Invalid value for 'id' parameter" )
+            # Get the dataset object
+            data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
+            # only allow rerunning if user is allowed access to the dataset.
+            if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ):
+                error( "You are not allowed to access this dataset" )
+            # Get the associated job, if any.
+            job = data.creating_job
+            if job:
+                job_id = trans.security.encode_id( job.id )
+            else:
+                raise Exception("Failed to get job information for dataset hid %d" % data.hid)
+        return trans.response.send_redirect( url_for( controller="root", job_id=job_id ) )
+
+    @web.expose
+    def data_source_redirect( self, trans, tool_id=None ):
+        """
+        Redirects a user accessing a Data Source tool to its target action link.
+        This method will subvert mix-mode content blocking in several browsers when
+        accessing non-https data_source tools from an https galaxy server.
+
+        Tested as working on Safari 7.0 and FireFox 26
+        Subverting did not work on Chrome 31
+        """
+        if tool_id is None:
+            return trans.response.send_redirect( url_for( controller="root", action="welcome" ) )
+        tool = self.__get_tool( tool_id )
+        # No tool matching the tool id, display an error (shouldn't happen)
+        if not tool:
+            log.error( "data_source_redirect called with tool id '%s' but no such tool exists", tool_id )
+            trans.log_event( "Tool id '%s' does not exist" % tool_id )
+            trans.response.status = 404
+            return trans.show_error_message("Tool '%s' does not exist." % ( escape(tool_id) ))
+
+        if isinstance( tool, DataSourceTool ):
+            link = url_for( tool.action, **tool.get_static_param_values( trans ) )
+        else:
+            link = url_for( controller='tool_runner', tool_id=tool.id )
+        return trans.response.send_redirect( link )
+
+    @web.expose
+    def redirect( self, trans, redirect_url=None, **kwd ):
+        if not redirect_url:
+            return trans.show_error_message( "Required URL for redirection missing" )
+        trans.log_event( "Redirecting to: %s" % redirect_url )
+        return trans.fill_template( 'root/redirect.mako', redirect_url=redirect_url )
diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py
new file mode 100644
index 0000000..712bc49
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -0,0 +1,1879 @@
+"""
+Contains the user interface in the Universe class
+"""
+
+import glob
+import logging
+import os
+import random
+import socket
+import urllib
+from datetime import datetime, timedelta
+from json import dumps, loads
+
+from markupsafe import escape
+from sqlalchemy import and_, or_, true, func
+
+from galaxy import model
+from galaxy import util
+from galaxy import web
+from galaxy.util import string_as_bool
+from galaxy.exceptions import ObjectInvalid
+from galaxy.security.validate_user_input import (transform_publicname,
+                                                 validate_email,
+                                                 validate_password,
+                                                 validate_publicname)
+from galaxy.tools.toolbox.filters import FilterFactory
+from galaxy.util import biostar, hash_util, docstring_trim, listify
+from galaxy.web import url_for
+from galaxy.web.base.controller import (BaseUIController,
+                                        CreatesApiKeysMixin,
+                                        CreatesUsersMixin,
+                                        UsesFormDefinitionsMixin)
+from galaxy.web.form_builder import build_select_field, CheckboxField
+from galaxy.web.framework.helpers import grids, time_ago
+
+log = logging.getLogger( __name__ )
+
+REQUIRE_LOGIN_TEMPLATE = """
+<p>
+    This %s has been configured such that only users who are logged in may use it.%s
+</p>
+"""
+
+PASSWORD_RESET_TEMPLATE = """
+To reset your Galaxy password for the instance at %s use the following link,
+which will expire %s.
+
+%s
+
+If you did not make this request, no action is necessary on your part, though
+you may want to notify an administrator.
+
+If you're having trouble using the link when clicking it from email client, you
+can also copy and paste it into your browser.
+"""
+
+
+class UserOpenIDGrid( grids.Grid ):
+    use_panels = False
+    title = "OpenIDs linked to your account"
+    model_class = model.UserOpenID
+    template = '/user/openid_manage.mako'
+    default_filter = { "openid": "All" }
+    default_sort_key = "-create_time"
+    columns = [
+        grids.TextColumn( "OpenID URL", key="openid", link=( lambda x: dict( action='openid_auth', login_button="Login", openid_url=x.openid if not x.provider else '', openid_provider=x.provider, auto_associate=True ) ) ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+    ]
+    operations = [
+        grids.GridOperation( "Delete", async_compatible=True ),
+    ]
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( self.model_class ).filter( self.model_class.user_id == trans.user.id )
+
+
+class User( BaseUIController, UsesFormDefinitionsMixin, CreatesUsersMixin, CreatesApiKeysMixin ):
+    user_openid_grid = UserOpenIDGrid()
+    installed_len_files = None
+
+    @web.expose
+    def index( self, trans, cntrller='user', **kwd ):
+        return trans.fill_template( '/user/index.mako', cntrller=cntrller )
+
+    @web.expose
+    def openid_auth( self, trans, **kwd ):
+        '''Handles user request to access an OpenID provider'''
+        if not trans.app.config.enable_openid:
+            return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
+        message = 'Unspecified failure authenticating via OpenID'
+        openid_url = kwd.get( 'openid_url', '' )
+        openid_provider = kwd.get( 'openid_provider', '' )
+        if not openid_provider or openid_url:
+            openid_provider = trans.app.openid_providers.NO_PROVIDER_ID  # empty fields cause validation errors
+        redirect = kwd.get( 'redirect', '' ).strip()
+        auto_associate = util.string_as_bool( kwd.get( 'auto_associate', False ) )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        action = 'login'
+        consumer = trans.app.openid_manager.get_consumer( trans )
+        if openid_url:
+            openid_provider_obj = trans.app.openid_providers.new_provider_from_identifier( openid_url )
+        else:
+            openid_provider_obj = trans.app.openid_providers.get( openid_provider )
+        if not openid_url and openid_provider == trans.app.openid_providers.NO_PROVIDER_ID:
+            message = 'An OpenID provider was not specified'
+        elif openid_provider_obj:
+            if not redirect:
+                redirect = ' '
+            process_url = trans.request.base.rstrip( '/' ) + url_for( controller='user', action='openid_process', redirect=redirect, openid_provider=openid_provider, auto_associate=auto_associate )  # None of these values can be empty, or else a verification error will occur
+            request = None
+            try:
+                request = consumer.begin( openid_provider_obj.op_endpoint_url )
+                if request is None:
+                    message = 'No OpenID services are available at %s' % openid_provider_obj.op_endpoint_url
+            except Exception as e:
+                message = 'Failed to begin OpenID authentication: %s' % str( e )
+            if request is not None:
+                trans.app.openid_manager.add_sreg( trans, request, required=openid_provider_obj.sreg_required, optional=openid_provider_obj.sreg_optional )
+                if request.shouldSendRedirect():
+                    redirect_url = request.redirectURL(
+                        trans.request.base, process_url )
+                    trans.app.openid_manager.persist_session( trans, consumer )
+                    return trans.response.send_redirect( redirect_url )
+                else:
+                    form = request.htmlMarkup( trans.request.base, process_url, form_tag_attrs={'id': 'openid_message', 'target': '_top'} )
+                    trans.app.openid_manager.persist_session( trans, consumer )
+                    return form
+        return trans.response.send_redirect( url_for( controller='user',
+                                                      action=action,
+                                                      redirect=redirect,
+                                                      use_panels=use_panels,
+                                                      message=message,
+                                                      status='error' ) )
+
+    @web.expose
+    def openid_process( self, trans, **kwd ):
+        '''Handle's response from OpenID Providers'''
+        if not trans.app.config.enable_openid:
+            return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
+        auto_associate = util.string_as_bool( kwd.get( 'auto_associate', False ) )
+        action = 'login'
+        if trans.user:
+            action = 'openid_manage'
+        if trans.app.config.support_url is not None:
+            contact = '<a href="%s">support</a>' % trans.app.config.support_url
+        else:
+            contact = 'support'
+        message = 'Verification failed for an unknown reason.  Please contact %s for assistance.' % ( contact )
+        status = 'error'
+        consumer = trans.app.openid_manager.get_consumer( trans )
+        info = consumer.complete( kwd, trans.request.url )
+        display_identifier = info.getDisplayIdentifier()
+        redirect = kwd.get( 'redirect', '' ).strip()
+        openid_provider = kwd.get( 'openid_provider', None )
+        if info.status == trans.app.openid_manager.FAILURE and display_identifier:
+            message = "Login via OpenID failed.  The technical reason for this follows, please include this message in your email if you need to %s to resolve this problem: %s" % ( contact, info.message )
+            return trans.response.send_redirect( url_for( controller='user',
+                                                          action=action,
+                                                          use_panels=True,
+                                                          redirect=redirect,
+                                                          message=message,
+                                                          status='error' ) )
+        elif info.status == trans.app.openid_manager.SUCCESS:
+            if info.endpoint.canonicalID:
+                display_identifier = info.endpoint.canonicalID
+            openid_provider_obj = trans.app.openid_providers.get( openid_provider )
+            user_openid = trans.sa_session.query( trans.app.model.UserOpenID ).filter( trans.app.model.UserOpenID.table.c.openid == display_identifier ).first()
+            if not openid_provider_obj and user_openid and user_openid.provider:
+                openid_provider_obj = trans.app.openid_providers.get( user_openid.provider )
+            if not openid_provider_obj:
+                openid_provider_obj = trans.app.openid_providers.new_provider_from_identifier( display_identifier )
+            if not user_openid:
+                user_openid = trans.app.model.UserOpenID( session=trans.galaxy_session, openid=display_identifier )
+            if not user_openid.user:
+                user_openid.session = trans.galaxy_session
+            if not user_openid.provider and openid_provider:
+                user_openid.provider = openid_provider
+            if trans.user:
+                if user_openid.user and user_openid.user.id != trans.user.id:
+                    message = "The OpenID <strong>%s</strong> is already associated with another Galaxy account, <strong>%s</strong>.  Please disassociate it from that account before attempting to associate it with a new account." % ( escape( display_identifier ), escape( user_openid.user.email ) )
+                if not trans.user.active and trans.app.config.user_activation_on:  # Account activation is ON and the user is INACTIVE.
+                    if ( trans.app.config.activation_grace_period != 0 ):  # grace period is ON
+                        if self.is_outside_grace_period( trans, trans.user.create_time ):  # User is outside the grace period. Login is disabled and he will have the activation email resent.
+                            message, status = self.resend_verification_email( trans, trans.user.email, trans.user.username )
+                        else:  # User is within the grace period, let him log in.
+                            pass
+                    else:  # Grace period is off. Login is disabled and user will have the activation email resent.
+                        message, status = self.resend_verification_email( trans, trans.user.email, trans.user.username )
+                elif not user_openid.user or user_openid.user == trans.user:
+                    if openid_provider_obj.id:
+                        user_openid.provider = openid_provider_obj.id
+                    user_openid.session = trans.galaxy_session
+                    if not openid_provider_obj.never_associate_with_user:
+                        if not auto_associate and ( user_openid.user and user_openid.user.id == trans.user.id ):
+                            message = "The OpenID <strong>%s</strong> is already associated with your Galaxy account, <strong>%s</strong>." % ( escape( display_identifier ), escape( trans.user.email ) )
+                            status = "warning"
+                        else:
+                            message = "The OpenID <strong>%s</strong> has been associated with your Galaxy account, <strong>%s</strong>." % ( escape( display_identifier ), escape( trans.user.email ) )
+                            status = "done"
+                        user_openid.user = trans.user
+                        trans.sa_session.add( user_openid )
+                        trans.sa_session.flush()
+                        trans.log_event( "User associated OpenID: %s" % display_identifier )
+                    else:
+                        message = "The OpenID <strong>%s</strong> cannot be used to log into your Galaxy account, but any post authentication actions have been performed." % escape( openid_provider_obj.name )
+                        status = "info"
+                    openid_provider_obj.post_authentication( trans, trans.app.openid_manager, info )
+                    if redirect:
+                        message = '%s<br>Click <a href="%s"><strong>here</strong></a> to return to the page you were previously viewing.' % ( message, escape( self.__get_redirect_url( redirect ) ) )
+                if redirect and status != "error":
+                    return trans.response.send_redirect( self.__get_redirect_url( redirect ) )
+                return trans.response.send_redirect( url_for( controller='user',
+                                                     action='openid_manage',
+                                                     use_panels=True,
+                                                     redirect=redirect,
+                                                     message=message,
+                                                     status=status ) )
+            elif user_openid.user:
+                trans.handle_user_login( user_openid.user )
+                trans.log_event( "User logged in via OpenID: %s" % display_identifier )
+                openid_provider_obj.post_authentication( trans, trans.app.openid_manager, info )
+                if not redirect:
+                    redirect = url_for( '/' )
+                redirect = self.__get_redirect_url( redirect )
+                return trans.response.send_redirect( redirect )
+            trans.sa_session.add( user_openid )
+            trans.sa_session.flush()
+            message = "OpenID authentication was successful, but you need to associate your OpenID with a Galaxy account."
+            sreg_resp = trans.app.openid_manager.get_sreg( info )
+            try:
+                sreg_username_name = openid_provider_obj.use_for.get( 'username' )
+                username = sreg_resp.get( sreg_username_name, '' )
+            except AttributeError:
+                username = ''
+            try:
+                sreg_email_name = openid_provider_obj.use_for.get( 'email' )
+                email = sreg_resp.get( sreg_email_name, '' )
+            except AttributeError:
+                email = ''
+            # OpenID success, but user not logged in, and not previously associated
+            return trans.response.send_redirect( url_for( controller='user',
+                                                 action='openid_associate',
+                                                 use_panels=True,
+                                                 redirect=redirect,
+                                                 username=username,
+                                                 email=email,
+                                                 message=message,
+                                                 status='warning' ) )
+        elif info.status == trans.app.openid_manager.CANCEL:
+            message = "Login via OpenID was cancelled by an action at the OpenID provider's site."
+            status = "warning"
+        elif info.status == trans.app.openid_manager.SETUP_NEEDED:
+            if info.setup_url:
+                return trans.response.send_redirect( info.setup_url )
+            else:
+                message = "Unable to log in via OpenID.  Setup at the provider is required before this OpenID can be used.  Please visit your provider's site to complete this step."
+        return trans.response.send_redirect( url_for( controller='user',
+                                                      action=action,
+                                                      use_panels=True,
+                                                      redirect=redirect,
+                                                      message=message,
+                                                      status=status ) )
+
+    @web.expose
+    def openid_associate( self, trans, cntrller='user', **kwd ):
+        '''Associates a user with an OpenID log in'''
+        if not trans.app.config.enable_openid:
+            return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        message = escape( kwd.get( 'message', ''  ) )
+        status = kwd.get( 'status', 'done' )
+        email = kwd.get( 'email', '' )
+        username = kwd.get( 'username', '' )
+        redirect = kwd.get( 'redirect', '' ).strip()
+        params = util.Params( kwd )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        openids = trans.galaxy_session.openids
+        user = None
+        if not openids:
+            return trans.show_error_message( 'You have not successfully completed an OpenID authentication in this session.  You can do so on the <a href="%s">login</a> page.' % url_for( controller='user', action='login', use_panels=use_panels ) )
+        elif is_admin:
+            return trans.show_error_message( 'Associating OpenIDs with accounts cannot be done by administrators.' )
+        if kwd.get( 'login_button', False ):
+            message, status, user, success = self.__validate_login( trans, **kwd )
+            if success:
+                openid_objs = []
+                for openid in openids:
+                    openid_provider_obj = trans.app.openid_providers.get( openid.provider )
+                    if not openid_provider_obj or not openid_provider_obj.never_associate_with_user:
+                        openid.user = user
+                        trans.sa_session.add( openid )
+                        trans.log_event( "User associated OpenID: %s" % openid.openid )
+                    if openid_provider_obj and openid_provider_obj.has_post_authentication_actions():
+                        openid_objs.append( openid_provider_obj )
+                trans.sa_session.flush()
+                if len( openid_objs ) == 1:
+                    return trans.response.send_redirect( url_for( controller='user', action='openid_auth', openid_provider=openid_objs[0].id, redirect=redirect, auto_associate=True ) )
+                elif openid_objs:
+                    message = 'You have authenticated with several OpenID providers, please click the following links to execute the post authentication actions. '
+                    message = "%s<br/><ul>" % ( message )
+                    for openid in openid_objs:
+                        message = '%s<li><a href="%s" target="_blank">%s</a></li>' % ( message, url_for( controller='user', action='openid_auth', openid_provider=openid.id, redirect=redirect, auto_associate=True ), openid.name )
+                    message = "%s</ul>" % ( message )
+                    return trans.response.send_redirect( url_for( controller='user',
+                                                                  action='openid_manage',
+                                                                  use_panels=use_panels,
+                                                                  redirect=redirect,
+                                                                  message=message,
+                                                                  status='info' ) )
+                if redirect:
+                    return trans.response.send_redirect( redirect )
+                return trans.response.send_redirect( url_for( controller='user',
+                                                              action='openid_manage',
+                                                              use_panels=use_panels,
+                                                              redirect=redirect,
+                                                              message=message,
+                                                              status='info' ) )
+        if kwd.get( 'create_user_button', False ):
+            password = kwd.get( 'password', '' )
+            confirm = kwd.get( 'confirm', '' )
+            subscribe = params.get( 'subscribe', '' )
+            subscribe_checked = CheckboxField.is_checked( subscribe )
+            error = ''
+            if not trans.app.config.allow_user_creation and not trans.user_is_admin():
+                error = 'User registration is disabled.  Please contact your local Galaxy administrator for an account.'
+            else:
+                # Check email and password validity
+                error = self.__validate( trans, params, email, password, confirm, username )
+                if not error:
+                    # all the values are valid
+                    message, status, user, success = self.__register( trans,
+                                                                      cntrller,
+                                                                      subscribe_checked,
+                                                                      **kwd )
+                    if success:
+                        openid_objs = []
+                        for openid in openids:
+                            openid_provider_obj = trans.app.openid_providers.get( openid.provider )
+                            if not openid_provider_obj:
+                                openid_provider_obj = trans.app.openid_providers.new_provider_from_identifier( openid.identifier )
+                            if not openid_provider_obj.never_associate_with_user:
+                                openid.user = user
+                                trans.sa_session.add( openid )
+                                trans.log_event( "User associated OpenID: %s" % openid.openid )
+                            if openid_provider_obj.has_post_authentication_actions():
+                                openid_objs.append( openid_provider_obj )
+                        trans.sa_session.flush()
+                        if len( openid_objs ) == 1:
+                            return trans.response.send_redirect( url_for( controller='user', action='openid_auth', openid_provider=openid_objs[0].id, redirect=redirect, auto_associate=True ) )
+                        elif openid_objs:
+                            message = 'You have authenticated with several OpenID providers, please click the following links to execute the post authentication actions. '
+                            message = "%s<br/><ul>" % ( message )
+                            for openid in openid_objs:
+                                message = '%s<li><a href="%s" target="_blank">%s</a></li>' % ( message, url_for( controller='user', action='openid_auth', openid_provider=openid.id, redirect=redirect, auto_associate=True ), openid.name )
+                            message = "%s</ul>" % ( message )
+                            return trans.response.send_redirect( url_for( controller='user',
+                                                                          action='openid_manage',
+                                                                          use_panels=True,
+                                                                          redirect=redirect,
+                                                                          message=message,
+                                                                          status='info' ) )
+                        if redirect:
+                            return trans.response.send_redirect( redirect )
+                        return trans.response.send_redirect( url_for( controller='user',
+                                                                      action='openid_manage',
+                                                                      use_panels=use_panels,
+                                                                      redirect=redirect,
+                                                                      message=message,
+                                                                      status='info' ) )
+                else:
+                    message = error
+                    status = 'error'
+        if trans.webapp.name == 'galaxy':
+            user_type_form_definition = self.__get_user_type_form_definition( trans, user=user, **kwd )
+            user_type_fd_id = params.get( 'user_type_fd_id', 'none' )
+            if user_type_fd_id == 'none' and user_type_form_definition is not None:
+                user_type_fd_id = trans.security.encode_id( user_type_form_definition.id )
+            user_type_fd_id_select_field = self.__build_user_type_fd_id_select_field( trans, selected_value=user_type_fd_id )
+            widgets = self.__get_widgets( trans, user_type_form_definition, user=user, **kwd )
+        else:
+            user_type_fd_id_select_field = None
+            user_type_form_definition = None
+            widgets = []
+        return trans.fill_template( '/user/openid_associate.mako',
+                                    cntrller=cntrller,
+                                    email=email,
+                                    password='',
+                                    confirm='',
+                                    username=transform_publicname( trans, username ),
+                                    header='',
+                                    use_panels=use_panels,
+                                    redirect=redirect,
+                                    refresh_frames=[],
+                                    message=message,
+                                    status=status,
+                                    active_view="user",
+                                    subscribe_checked=False,
+                                    user_type_fd_id_select_field=user_type_fd_id_select_field,
+                                    user_type_form_definition=user_type_form_definition,
+                                    widgets=widgets,
+                                    openids=openids )
+
+    @web.expose
+    @web.require_login( 'manage OpenIDs' )
+    def openid_disassociate( self, trans, **kwd ):
+        '''Disassociates a user with an OpenID'''
+        if not trans.app.config.enable_openid:
+            return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
+        params = util.Params( kwd )
+        ids = params.get( 'id', None )
+        message = params.get( 'message', None )
+        status = params.get( 'status', None )
+        use_panels = params.get( 'use_panels', False )
+        user_openids = []
+        if not ids:
+            message = 'You must select at least one OpenID to disassociate from your Galaxy account.'
+            status = 'error'
+        else:
+            ids = util.listify( params.id )
+            for id in ids:
+                id = trans.security.decode_id( id )
+                user_openid = trans.sa_session.query( trans.app.model.UserOpenID ).get( int( id ) )
+                if not user_openid or ( trans.user.id != user_openid.user_id ):
+                    message = 'The selected OpenID(s) are not associated with your Galaxy account.'
+                    status = 'error'
+                    user_openids = []
+                    break
+                user_openids.append( user_openid )
+            if user_openids:
+                deleted_urls = []
+                for user_openid in user_openids:
+                    trans.sa_session.delete( user_openid )
+                    deleted_urls.append( user_openid.openid )
+                trans.sa_session.flush()
+                for deleted_url in deleted_urls:
+                    trans.log_event( "User disassociated OpenID: %s" % deleted_url )
+                message = '%s OpenIDs were disassociated from your Galaxy account.' % len( ids )
+                status = 'done'
+        return trans.response.send_redirect( url_for( controller='user',
+                                                      action='openid_manage',
+                                                      use_panels=use_panels,
+                                                      message=message,
+                                                      status=status ) )
+
+    @web.expose
+    @web.require_login( 'manage OpenIDs' )
+    def openid_manage( self, trans, **kwd ):
+        '''Manage OpenIDs for user'''
+        if not trans.app.config.enable_openid:
+            return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
+        use_panels = kwd.get( 'use_panels', False )
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "delete":
+                return trans.response.send_redirect( url_for( controller='user',
+                                                              action='openid_disassociate',
+                                                              use_panels=use_panels,
+                                                              id=kwd['id'] ) )
+        kwd['redirect'] = kwd.get( 'redirect', url_for( controller='user', action='openid_manage', use_panels=True ) ).strip()
+        kwd['openid_providers'] = trans.app.openid_providers
+        return self.user_openid_grid( trans, **kwd )
+
+    @web.expose
+    def login( self, trans, refresh_frames=[], **kwd ):
+        '''Handle Galaxy Log in'''
+        referer = trans.request.referer or ''
+        redirect = self.__get_redirect_url( kwd.get( 'redirect', referer ).strip() )
+        redirect_url = ''  # always start with redirect_url being empty
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
+        message = kwd.get( 'message', '' )
+        status = kwd.get( 'status', 'done' )
+        header = ''
+        user = trans.user
+        login = kwd.get( 'login', '' )
+        if user:
+            # Already logged in.
+            redirect_url = redirect
+            message = 'You are already logged in.'
+            status = 'info'
+        elif kwd.get( 'login_button', False ):
+            if trans.webapp.name == 'galaxy' and not refresh_frames:
+                if trans.app.config.require_login:
+                    refresh_frames = [ 'masthead', 'history', 'tools' ]
+                else:
+                    refresh_frames = [ 'masthead', 'history' ]
+            message, status, user, success = self.__validate_login( trans, **kwd )
+            if success:
+                redirect_url = redirect
+        if not user and trans.app.config.require_login:
+            if trans.app.config.allow_user_creation:
+                create_account_str = "  If you don't already have an account, <a href='%s'>you may create one</a>." % \
+                    web.url_for( controller='user', action='create', cntrller='user' )
+                if trans.webapp.name == 'galaxy':
+                    header = REQUIRE_LOGIN_TEMPLATE % ( "Galaxy instance", create_account_str )
+                else:
+                    header = REQUIRE_LOGIN_TEMPLATE % ( "Galaxy tool shed", create_account_str )
+            else:
+                if trans.webapp.name == 'galaxy':
+                    header = REQUIRE_LOGIN_TEMPLATE % ( "Galaxy instance", "" )
+                else:
+                    header = REQUIRE_LOGIN_TEMPLATE % ( "Galaxy tool shed", "" )
+        return trans.fill_template( '/user/login.mako',
+                                    login=login,
+                                    header=header,
+                                    use_panels=use_panels,
+                                    redirect_url=redirect_url,
+                                    redirect=redirect,
+                                    refresh_frames=refresh_frames,
+                                    message=message,
+                                    status=status,
+                                    openid_providers=trans.app.openid_providers,
+                                    form_input_auto_focus=True,
+                                    active_view="user" )
+
+    def __validate_login( self, trans, **kwd ):
+        """Validates numerous cases that might happen during the login time."""
+        status = kwd.get( 'status', 'error' )
+        login = kwd.get( 'login', '' )
+        password = kwd.get( 'password', '' )
+        referer = trans.request.referer or ''
+        redirect = kwd.get( 'redirect', referer ).strip()
+        success = False
+        user = trans.sa_session.query( trans.app.model.User ).filter(or_(
+            trans.app.model.User.table.c.email == login,
+            trans.app.model.User.table.c.username == login
+        )).first()
+        log.debug("trans.app.config.auth_config_file: %s" % trans.app.config.auth_config_file)
+        if not user:
+            autoreg = trans.app.auth_manager.check_auto_registration(trans, login, password)
+            if autoreg[0]:
+                kwd['email'] = autoreg[1]
+                kwd['username'] = autoreg[2]
+                message = " ".join( [ validate_email( trans, kwd['email'] ),
+                                      validate_publicname( trans, kwd['username'] ) ] ).rstrip()
+                if not message:
+                    message, status, user, success = self.__register( trans, 'user', False, **kwd )
+                    if success:
+                        # The handle_user_login() method has a call to the history_set_default_permissions() method
+                        # (needed when logging in with a history), user needs to have default permissions set before logging in
+                        trans.handle_user_login( user )
+                        trans.log_event( "User (auto) created a new account" )
+                        trans.log_event( "User logged in" )
+                    else:
+                        message = "Auto-registration failed, contact your local Galaxy administrator. %s" % message
+                else:
+                    message = "Auto-registration failed, contact your local Galaxy administrator. %s" % message
+            else:
+                message = "No such user or invalid password"
+        elif user.deleted:
+            message = "This account has been marked deleted, contact your local Galaxy administrator to restore the account."
+            if trans.app.config.error_email_to is not None:
+                message += ' Contact: %s' % trans.app.config.error_email_to
+        elif user.external:
+            message = "This account was created for use with an external authentication method, contact your local Galaxy administrator to activate it."
+            if trans.app.config.error_email_to is not None:
+                message += ' Contact: %s' % trans.app.config.error_email_to
+        elif not trans.app.auth_manager.check_password(user, password):
+            message = "Invalid password"
+        elif trans.app.config.user_activation_on and not user.active:  # activation is ON and the user is INACTIVE
+            if ( trans.app.config.activation_grace_period != 0 ):  # grace period is ON
+                if self.is_outside_grace_period( trans, user.create_time ):  # User is outside the grace period. Login is disabled and he will have the activation email resent.
+                    message, status = self.resend_verification_email( trans, user.email, user.username )
+                else:  # User is within the grace period, let him log in.
+                    message, success, status = self.proceed_login( trans, user, redirect )
+            else:  # Grace period is off. Login is disabled and user will have the activation email resent.
+                message, status = self.resend_verification_email( trans, user.email, user.username )
+        else:  # activation is OFF
+            pw_expires = trans.app.config.password_expiration_period
+            if pw_expires and user.last_password_change < datetime.today() - pw_expires:
+                # Password is expired, we don't log them in.
+                trans.response.send_redirect(web.url_for(controller='user',
+                                                         action='change_password',
+                                                         message='Your password has expired. Please change it to access Galaxy.',
+                                                         redirect_home=True,
+                                                         status='error'))
+            message, success, status = self.proceed_login( trans, user, redirect )
+            if pw_expires and user.last_password_change < datetime.today() - timedelta(days=pw_expires.days / 10):
+                # If password is about to expire, modify message to state that.
+                expiredate = datetime.today() - user.last_password_change + pw_expires
+                message = 'You are now logged in as %s. Your password will expire in %s days.<br>You can <a target="_top" href="%s">go back to the page you were visiting</a> or <a target="_top" href="%s">go to the home page</a>.' % \
+                          (expiredate.days, user.email, redirect, url_for('/'))
+                status = 'warning'
+        return ( message, status, user, success )
+
+    def proceed_login( self, trans, user, redirect ):
+        """
+        Function processes user login. It is called in case all the login requirements are valid.
+        """
+        message = ''
+        trans.handle_user_login( user )
+        if trans.webapp.name == 'galaxy':
+            trans.log_event( "User logged in" )
+            message = 'You are now logged in as %s.<br>You can <a target="_top" href="%s">go back to the page you were visiting</a> or <a target="_top" href="%s">go to the home page</a>.' % \
+                ( user.email, redirect, url_for( '/' ) )
+            if trans.app.config.require_login:
+                message += '  <a target="_top" href="%s">Click here</a> to continue to the home page.' % web.url_for( controller="root", action="welcome" )
+        success = True
+        status = 'done'
+        return message, success, status
+
+    @web.expose
+    def resend_verification( self, trans ):
+        """
+        Exposed function for use outside of the class. E.g. when user click on the resend link in the masthead.
+        """
+        message, status = self.resend_verification_email( trans, None, None )
+        if status == 'done':
+            return trans.show_ok_message( message )
+        else:
+            return trans.show_error_message( message )
+
+    def resend_verification_email( self, trans, email, username ):
+        """
+        Function resends the verification email in case user wants to log in with an inactive account or he clicks the resend link.
+        """
+        if email is None:  # User is coming from outside registration form, load email from trans
+            email = trans.user.email
+        if username is None:  # User is coming from outside registration form, load email from trans
+            username = trans.user.username
+        is_activation_sent = self.send_verification_email( trans, email, username )
+        if is_activation_sent:
+            message = 'This account has not been activated yet. The activation link has been sent again. Please check your email address <b>%s</b> including the spam/trash folder.<br><a target="_top" href="%s">Return to the home page</a>.' % ( escape( email ), url_for( '/' ) )
+            status = 'error'
+        else:
+            message = 'This account has not been activated yet but we are unable to send the activation link. Please contact your local Galaxy administrator.<br><a target="_top" href="%s">Return to the home page</a>.' % url_for( '/' )
+            status = 'error'
+            if trans.app.config.error_email_to is not None:
+                message += '<br>Error contact: %s' % trans.app.config.error_email_to
+        return message, status
+
+    def is_outside_grace_period( self, trans, create_time ):
+        """
+        Function checks whether the user is outside the config-defined grace period for inactive accounts.
+        """
+        #  Activation is forced and the user is not active yet. Check the grace period.
+        activation_grace_period = trans.app.config.activation_grace_period
+        delta = timedelta( hours=int( activation_grace_period ) )
+        time_difference = datetime.utcnow() - create_time
+        return ( time_difference > delta or activation_grace_period == 0 )
+
+    @web.expose
+    def logout( self, trans, logout_all=False ):
+        if trans.webapp.name == 'galaxy':
+            if trans.app.config.require_login:
+                refresh_frames = [ 'masthead', 'history', 'tools' ]
+            else:
+                refresh_frames = [ 'masthead', 'history' ]
+            # Since logging an event requires a session, we'll log prior to ending the session
+            trans.log_event( "User logged out" )
+        else:
+            refresh_frames = [ 'masthead' ]
+        trans.handle_user_logout( logout_all=logout_all )
+        message = 'You have been logged out.<br>To log in again <a target="_top" href="%s">go to the home page</a>.' % \
+            ( url_for( '/' ) )
+        if biostar.biostar_logged_in( trans ):
+            biostar_url = biostar.biostar_logout( trans )
+            if biostar_url:
+                # TODO: It would be better if we automatically logged this user out of biostar
+                message += '<br>To logout of Biostar, please click <a href="%s" target="_blank">here</a>.' % ( biostar_url )
+        if trans.app.config.use_remote_user and trans.app.config.remote_user_logout_href:
+            trans.response.send_redirect(trans.app.config.remote_user_logout_href)
+        else:
+            return trans.fill_template('/user/logout.mako',
+                                       refresh_frames=refresh_frames,
+                                       message=message,
+                                       status='done',
+                                       active_view="user" )
+
+    @web.expose
+    def create( self, trans, cntrller='user', redirect_url='', refresh_frames=[], **kwd ):
+        params = util.Params( kwd )
+        # If the honeypot field is not empty we are dealing with a bot.
+        honeypot_field = params.get( 'bear_field', '' )
+        if honeypot_field != '':
+            return trans.show_error_message( "You've been flagged as a possible bot. If you are not, please try registering again and fill the form out carefully. <a target=\"_top\" href=\"%s\">Go to the home page</a>." ) % url_for( '/' )
+
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        use_panels = util.string_as_bool( kwd.get( 'use_panels', True ) )
+        email = util.restore_text( params.get( 'email', '' ) )
+        # Do not sanitize passwords, so take from kwd
+        # instead of params ( which were sanitized )
+        password = kwd.get( 'password', '' )
+        confirm = kwd.get( 'confirm', '' )
+        username = util.restore_text( params.get( 'username', '' ) )
+        subscribe = params.get( 'subscribe', '' )
+        subscribe_checked = CheckboxField.is_checked( subscribe )
+        referer = trans.request.referer or ''
+        redirect = kwd.get( 'redirect', referer ).strip()
+        is_admin = cntrller == 'admin' and trans.user_is_admin
+        if not trans.app.config.allow_user_creation and not trans.user_is_admin():
+            message = 'User registration is disabled.  Please contact your local Galaxy administrator for an account.'
+            if trans.app.config.error_email_to is not None:
+                message += ' Contact: %s' % trans.app.config.error_email_to
+            status = 'error'
+        else:
+            # check user is allowed to register
+            message, status = trans.app.auth_manager.check_registration_allowed(email, username, password)
+            if message == '':
+                if not refresh_frames:
+                    if trans.webapp.name == 'galaxy':
+                        if trans.app.config.require_login:
+                            refresh_frames = [ 'masthead', 'history', 'tools' ]
+                        else:
+                            refresh_frames = [ 'masthead', 'history' ]
+                    else:
+                        refresh_frames = [ 'masthead' ]
+                # Create the user, save all the user info and login to Galaxy
+                if params.get( 'create_user_button', False ):
+                    # Check email and password validity
+                    message = self.__validate( trans, params, email, password, confirm, username )
+                    if not message:
+                        # All the values are valid
+                        message, status, user, success = self.__register( trans,
+                                                                          cntrller,
+                                                                          subscribe_checked,
+                                                                          **kwd )
+                        if trans.webapp.name == 'tool_shed':
+                            redirect_url = url_for( '/' )
+                        if success and not is_admin:
+                            # The handle_user_login() method has a call to the history_set_default_permissions() method
+                            # (needed when logging in with a history), user needs to have default permissions set before logging in
+                            trans.handle_user_login( user )
+                            trans.log_event( "User created a new account" )
+                            trans.log_event( "User logged in" )
+                        if success and is_admin:
+                            message = 'Created new user account (%s)' % escape( user.email )
+                            trans.response.send_redirect( web.url_for( controller='admin',
+                                                                       action='users',
+                                                                       cntrller=cntrller,
+                                                                       message=message,
+                                                                       status=status ) )
+                    else:
+                        status = 'error'
+        if trans.webapp.name == 'galaxy':
+            user_type_form_definition = self.__get_user_type_form_definition( trans, user=None, **kwd )
+            user_type_fd_id = params.get( 'user_type_fd_id', 'none' )
+            if user_type_fd_id == 'none' and user_type_form_definition is not None:
+                user_type_fd_id = trans.security.encode_id( user_type_form_definition.id )
+            user_type_fd_id_select_field = self.__build_user_type_fd_id_select_field( trans, selected_value=user_type_fd_id )
+            widgets = self.__get_widgets( trans, user_type_form_definition, user=None, **kwd )
+            #  Warning message that is shown on the registration page.
+            registration_warning_message = trans.app.config.registration_warning_message
+        else:
+            user_type_fd_id_select_field = None
+            user_type_form_definition = None
+            widgets = []
+            registration_warning_message = None
+        return trans.fill_template( '/user/register.mako',
+                                    cntrller=cntrller,
+                                    email=email,
+                                    username=transform_publicname( trans, username ),
+                                    subscribe_checked=subscribe_checked,
+                                    user_type_fd_id_select_field=user_type_fd_id_select_field,
+                                    user_type_form_definition=user_type_form_definition,
+                                    widgets=widgets,
+                                    use_panels=use_panels,
+                                    redirect=redirect,
+                                    redirect_url=redirect_url,
+                                    refresh_frames=refresh_frames,
+                                    registration_warning_message=registration_warning_message,
+                                    message=message,
+                                    status=status )
+
+    def __register( self, trans, cntrller, subscribe_checked, **kwd ):
+        email = util.restore_text( kwd.get( 'email', '' ) )
+        password = kwd.get( 'password', '' )
+        username = util.restore_text( kwd.get( 'username', '' ) )
+        message = escape( kwd.get( 'message', ''  ) )
+        status = kwd.get( 'status', 'done' )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        user = self.create_user( trans=trans, email=email, username=username, password=password )
+        error = ''
+        success = True
+        if trans.webapp.name == 'galaxy':
+            # Save other information associated with the user, if any
+            user_info_forms = self.get_all_forms( trans,
+                                                  filter=dict( deleted=False ),
+                                                  form_type=trans.app.model.FormDefinition.types.USER_INFO )
+            # If there are no user forms available then there is nothing to save
+            if user_info_forms:
+                user_type_fd_id = kwd.get( 'user_type_fd_id', 'none' )
+                if user_type_fd_id not in [ 'none' ]:
+                    user_type_form_definition = trans.sa_session.query( trans.app.model.FormDefinition ).get( trans.security.decode_id( user_type_fd_id ) )
+                    values = self.get_form_values( trans, user, user_type_form_definition, **kwd )
+                    form_values = trans.app.model.FormValues( user_type_form_definition, values )
+                    trans.sa_session.add( form_values )
+                    trans.sa_session.flush()
+                    user.values = form_values
+                    trans.sa_session.add( user )
+                    trans.sa_session.flush()
+            if subscribe_checked:
+                # subscribe user to email list
+                if trans.app.config.smtp_server is None:
+                    error = "Now logged in as " + user.email + ". However, subscribing to the mailing list has failed because mail is not configured for this Galaxy instance. <br>Please contact your local Galaxy administrator."
+                else:
+                    body = 'Join Mailing list.\n'
+                    to = trans.app.config.mailing_join_addr
+                    frm = email
+                    subject = 'Join Mailing List'
+                    try:
+                        util.send_mail( frm, to, subject, body, trans.app.config )
+                    except Exception:
+                        log.exception( 'Subscribing to the mailing list has failed.' )
+                        error = "Now logged in as " + user.email + ". However, subscribing to the mailing list has failed."
+            if not error and not is_admin:
+                # The handle_user_login() method has a call to the history_set_default_permissions() method
+                # (needed when logging in with a history), user needs to have default permissions set before logging in
+                trans.handle_user_login( user )
+                trans.log_event( "User created a new account" )
+                trans.log_event( "User logged in" )
+            elif not error:
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='users',
+                                                           message='Created new user account (%s)' % user.email,
+                                                           status=status ) )
+        if error:
+            message = error
+            status = 'error'
+            success = False
+        else:
+            if trans.webapp.name == 'galaxy' and trans.app.config.user_activation_on:
+                is_activation_sent = self.send_verification_email( trans, email, username )
+                if is_activation_sent:
+                    message = 'Now logged in as %s.<br>Verification email has been sent to your email address. Please verify it by clicking the activation link in the email.<br>Please check your spam/trash folder in case you cannot find the message.<br><a target="_top" href="%s">Return to the home page.</a>' % ( escape( user.email ), url_for( '/' ) )
+                    success = True
+                else:
+                    message = 'Unable to send activation email, please contact your local Galaxy administrator.'
+                    if trans.app.config.error_email_to is not None:
+                        message += ' Contact: %s' % trans.app.config.error_email_to
+                    success = False
+            else:  # User activation is OFF, proceed without sending the activation email.
+                message = 'Now logged in as %s.<br><a target="_top" href="%s">Return to the home page.</a>' % ( escape( user.email ), url_for( '/' ) )
+                success = True
+        return ( message, status, user, success )
+
+    def send_verification_email( self, trans, email, username ):
+        """
+        Send the verification email containing the activation link to the user's email.
+        """
+        if username is None:
+            username = trans.user.username
+        activation_link = self.prepare_activation_link( trans, escape( email ) )
+
+        host = trans.request.host.split( ':' )[ 0 ]
+        if host in [ 'localhost', '127.0.0.1', '0.0.0.0' ]:
+            host = socket.getfqdn()
+        body = ("Hello %s,\n\n"
+                "In order to complete the activation process for %s begun on %s at %s, please click on the following link to verify your account:\n\n"
+                "%s \n\n"
+                "By clicking on the above link and opening a Galaxy account you are also confirming that you have read and agreed to Galaxy's Terms and Conditions for use of this service (%s). This includes a quota limit of one account per user. Attempts to subvert this limit by creating multiple accounts or through any other method may result in termination of all associated accounts and data.\n\n"
+                "Please contact us if you need help with your account at: %s. You can also browse resources available at: %s. \n\n"
+                "More about the Galaxy Project can be found at galaxyproject.org\n\n"
+                "Your Galaxy Team" % (escape( username ), escape( email ),
+                                      datetime.utcnow().strftime( "%D"),
+                                      trans.request.host, activation_link,
+                                      trans.app.config.terms_url,
+                                      trans.app.config.error_email_to,
+                                      trans.app.config.instance_resource_url))
+        to = email
+        frm = trans.app.config.email_from or 'galaxy-no-reply@' + host
+        subject = 'Galaxy Account Activation'
+        try:
+            util.send_mail( frm, to, subject, body, trans.app.config )
+            return True
+        except Exception:
+            log.exception( 'Unable to send the activation email.' )
+            return False
+
+    def prepare_activation_link( self, trans, email ):
+        """
+        Prepare the account activation link for the user.
+        """
+        activation_token = self.get_activation_token( trans, email )
+        activation_link = url_for( controller='user', action='activate', activation_token=activation_token, email=email, qualified=True  )
+        return activation_link
+
+    def get_activation_token( self, trans, email ):
+        """
+        Check for the activation token. Create new activation token and store it in the database if no token found.
+        """
+        user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email == email ).first()
+        activation_token = user.activation_token
+        if activation_token is None:
+            activation_token = hash_util.new_secure_hash( str( random.getrandbits( 256 ) ) )
+            user.activation_token = activation_token
+            trans.sa_session.add( user )
+            trans.sa_session.flush()
+        return activation_token
+
+    @web.expose
+    def activate( self, trans, **kwd ):
+        """
+        Check whether token fits the user and then activate the user's account.
+        """
+        params = util.Params( kwd, sanitize=False )
+        email = urllib.unquote( params.get( 'email', None ) )
+        activation_token = params.get( 'activation_token', None )
+
+        if email is None or activation_token is None:
+            #  We don't have the email or activation_token, show error.
+            return trans.show_error_message( "You are using an invalid activation link. Try to log in and we will send you a new activation email. <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller="root", action="index" )
+        else:
+            # Find the user
+            user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email == email ).first()
+            # If the user is active already don't try to activate
+            if user.active is True:
+                return trans.show_ok_message( "Your account is already active. Nothing has changed. <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
+            if user.activation_token == activation_token:
+                user.activation_token = None
+                user.active = True
+                trans.sa_session.add( user )
+                trans.sa_session.flush()
+                return trans.show_ok_message( "Your account has been successfully activated! <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
+            else:
+                #  Tokens don't match. Activation is denied.
+                return trans.show_error_message( "You are using an invalid activation link. Try to log in and we will send you a new activation email. <br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' )
+        return
+
+    def __get_user_type_form_definition( self, trans, user=None, **kwd ):
+        params = util.Params( kwd )
+        if user and user.values:
+            user_type_fd_id = trans.security.encode_id( user.values.form_definition.id )
+        else:
+            user_type_fd_id = params.get( 'user_type_fd_id', 'none' )
+        if user_type_fd_id not in [ 'none' ]:
+            user_type_form_definition = trans.sa_session.query( trans.app.model.FormDefinition ).get( trans.security.decode_id( user_type_fd_id ) )
+        else:
+            user_type_form_definition = None
+        return user_type_form_definition
+
+    def __get_widgets( self, trans, user_type_form_definition, user=None, **kwd ):
+        widgets = []
+        if user_type_form_definition:
+            if user:
+                if user.values:
+                    widgets = user_type_form_definition.get_widgets( user=user,
+                                                                     contents=user.values.content,
+                                                                     **kwd )
+                else:
+                    widgets = user_type_form_definition.get_widgets( None, contents={}, **kwd )
+            else:
+                widgets = user_type_form_definition.get_widgets( None, contents={}, **kwd )
+        return widgets
+
+    @web.expose
+    def manage_user_info( self, trans, cntrller, **kwd ):
+        '''Manage a user's login, password, public username, type, addresses, etc.'''
+        params = util.Params( kwd )
+        user_id = params.get( 'id', None )
+        if user_id:
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+        if not user:
+            raise AssertionError("The user id (%s) is not valid" % str( user_id ))
+        email = util.restore_text( params.get( 'email', user.email ) )
+        username = util.restore_text( params.get( 'username', '' ) )
+        if not username:
+            username = user.username
+        message = escape( util.restore_text( params.get( 'message', ''  ) ) )
+        status = params.get( 'status', 'done' )
+        if trans.webapp.name == 'galaxy':
+            user_type_form_definition = self.__get_user_type_form_definition( trans, user=user, **kwd )
+            user_type_fd_id = params.get( 'user_type_fd_id', 'none' )
+            if user_type_fd_id == 'none' and user_type_form_definition is not None:
+                user_type_fd_id = trans.security.encode_id( user_type_form_definition.id )
+            user_type_fd_id_select_field = self.__build_user_type_fd_id_select_field( trans, selected_value=user_type_fd_id )
+            widgets = self.__get_widgets( trans, user_type_form_definition, user=user, **kwd )
+            # user's addresses
+            show_filter = util.restore_text( params.get( 'show_filter', 'Active'  ) )
+            if show_filter == 'All':
+                addresses = [address for address in user.addresses]
+            elif show_filter == 'Deleted':
+                addresses = [address for address in user.addresses if address.deleted]
+            else:
+                addresses = [address for address in user.addresses if not address.deleted]
+            user_info_forms = self.get_all_forms( trans,
+                                                  filter=dict( deleted=False ),
+                                                  form_type=trans.app.model.FormDefinition.types.USER_INFO )
+            return trans.fill_template( '/webapps/galaxy/user/manage_info.mako',
+                                        cntrller=cntrller,
+                                        user=user,
+                                        email=email,
+                                        username=username,
+                                        user_type_fd_id_select_field=user_type_fd_id_select_field,
+                                        user_info_forms=user_info_forms,
+                                        user_type_form_definition=user_type_form_definition,
+                                        user_type_fd_id=user_type_fd_id,
+                                        widgets=widgets,
+                                        addresses=addresses,
+                                        show_filter=show_filter,
+                                        message=message,
+                                        status=status )
+        else:
+            return trans.fill_template( '/webapps/tool_shed/user/manage_info.mako',
+                                        cntrller=cntrller,
+                                        user=user,
+                                        email=email,
+                                        username=username,
+                                        message=message,
+                                        status=status )
+
+    # For REMOTE_USER, we need the ability to just edit the username
+    @web.expose
+    @web.require_login( "to manage the public name" )
+    def edit_username( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        user_id = params.get( 'user_id', None )
+        if user_id and is_admin:
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+        if user and params.get( 'change_username_button', False ):
+            username = kwd.get( 'username', '' )
+            if username:
+                message = validate_publicname( trans, username, user )
+            if message:
+                status = 'error'
+            else:
+                user.username = username
+                trans.sa_session.add( user )
+                trans.sa_session.flush()
+                message = 'The username has been updated with the changes.'
+        return trans.fill_template( '/user/username.mako',
+                                    cntrller=cntrller,
+                                    user=user,
+                                    username=user.username,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def edit_info( self, trans, cntrller, **kwd ):
+        """
+        Edit user information = username, email or password.
+        """
+        params = util.Params( kwd )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        user_id = params.get( 'user_id', None )
+        if user_id and is_admin:
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        elif user_id and ( not trans.user or trans.user.id != trans.security.decode_id( user_id ) ):
+            message = 'Invalid user id'
+            status = 'error'
+            user = None
+        else:
+            user = trans.user
+        if user and params.get( 'login_info_button', False ):
+            # Editing email and username
+            email = util.restore_text( params.get( 'email', '' ) )
+            username = util.restore_text( params.get( 'username', '' ) ).lower()
+
+            # Validate the new values for email and username
+            message = validate_email( trans, email, user )
+            if not message and username:
+                message = validate_publicname( trans, username, user )
+            if message:
+                status = 'error'
+            else:
+                if ( user.email != email ):
+                    # The user's private role name must match the user's login ( email )
+                    private_role = trans.app.security_agent.get_private_user_role( user )
+                    private_role.name = email
+                    private_role.description = 'Private role for ' + email
+                    # Change the email itself
+                    user.email = email
+                    trans.sa_session.add_all( ( user, private_role ) )
+                    trans.sa_session.flush()
+                    if trans.webapp.name == 'galaxy' and trans.app.config.user_activation_on:
+                        user.active = False
+                        trans.sa_session.add( user )
+                        trans.sa_session.flush()
+                        is_activation_sent = self.send_verification_email( trans, user.email, user.username )
+                        if is_activation_sent:
+                            message = 'The login information has been updated with the changes.<br>Verification email has been sent to your new email address. Please verify it by clicking the activation link in the email.<br>Please check your spam/trash folder in case you cannot find the message.'
+                        else:
+                            message = 'Unable to send activation email, please contact your local Galaxy administrator.'
+                            if trans.app.config.error_email_to is not None:
+                                message += ' Contact: %s' % trans.app.config.error_email_to
+                if ( user.username != username ):
+                    user.username = username
+                    trans.sa_session.add( user )
+                    trans.sa_session.flush()
+                message = 'The login information has been updated with the changes.'
+        elif user and params.get( 'edit_user_info_button', False ):
+            # Edit user information - webapp MUST BE 'galaxy'
+            user_type_fd_id = params.get( 'user_type_fd_id', 'none' )
+            if user_type_fd_id not in [ 'none' ]:
+                user_type_form_definition = trans.sa_session.query( trans.app.model.FormDefinition ).get( trans.security.decode_id( user_type_fd_id ) )
+            elif user.values:
+                user_type_form_definition = user.values.form_definition
+            else:
+                # User was created before any of the user_info forms were created
+                user_type_form_definition = None
+            if user_type_form_definition:
+                values = self.get_form_values( trans, user, user_type_form_definition, **kwd )
+            else:
+                values = {}
+            flush_needed = False
+            if user.values:
+                # Editing the user info of an existing user with existing user info
+                user.values.content = values
+                trans.sa_session.add( user.values )
+                flush_needed = True
+            elif values:
+                form_values = trans.model.FormValues( user_type_form_definition, values )
+                trans.sa_session.add( form_values )
+                user.values = form_values
+                flush_needed = True
+            if flush_needed:
+                trans.sa_session.add( user )
+                trans.sa_session.flush()
+            message = "The user information has been updated with the changes."
+        if user and trans.webapp.name == 'galaxy' and is_admin:
+            kwd[ 'user_id' ] = trans.security.encode_id( user.id )
+        kwd[ 'id' ] = user_id
+        if message:
+            kwd[ 'message' ] = util.sanitize_text( message )
+        if status:
+            kwd[ 'status' ] = status
+        return trans.response.send_redirect( web.url_for( controller='user',
+                                                          action='manage_user_info',
+                                                          cntrller=cntrller,
+                                                          **kwd ) )
+
+    @web.expose
+    def change_password( self, trans, token=None, **kwd):
+        """
+        Provides a form with which one can change their password.  If token is
+        provided, don't require current password.
+        """
+        status = None
+        message = kwd.get( 'message', '' )
+        user = None
+        if kwd.get( 'change_password_button', False ):
+            password = kwd.get( 'password', '' )
+            confirm = kwd.get( 'confirm', '' )
+            current = kwd.get( 'current', '' )
+            token_result = None
+            if token:
+                # If a token was supplied, validate and set user
+                token_result = trans.sa_session.query( trans.app.model.PasswordResetToken ).get(token)
+                if token_result and token_result.expiration_time > datetime.utcnow():
+                    user = token_result.user
+                else:
+                    return trans.show_error_message("Invalid or expired password reset token, please request a new one.")
+            else:
+                # The user is changing their own password, validate their current password
+                (ok, message) = trans.app.auth_manager.check_change_password(trans.user, current )
+                if ok:
+                    user = trans.user
+                else:
+                    status = 'error'
+            if user:
+                # Validate the new password
+                message = validate_password( trans, password, confirm )
+                if message:
+                    status = 'error'
+                else:
+                    # Save new password
+                    user.set_password_cleartext( password )
+                    # if we used a token, invalidate it and log the user in.
+                    if token_result:
+                        trans.handle_user_login(token_result.user)
+                        token_result.expiration_time = datetime.utcnow()
+                        trans.sa_session.add(token_result)
+                    # Invalidate all other sessions
+                    for other_galaxy_session in trans.sa_session.query( trans.app.model.GalaxySession ) \
+                                                     .filter( and_( trans.app.model.GalaxySession.table.c.user_id == user.id,
+                                                                    trans.app.model.GalaxySession.table.c.is_valid == true(),
+                                                                    trans.app.model.GalaxySession.table.c.id != trans.galaxy_session.id ) ):
+                        other_galaxy_session.is_valid = False
+                        trans.sa_session.add( other_galaxy_session )
+                    trans.sa_session.add( user )
+                    trans.sa_session.flush()
+                    trans.log_event( "User change password" )
+                    if kwd.get('display_top', False) == 'True':
+                        return trans.response.send_redirect( url_for( '/', message='Password has been changed' ))
+                    else:
+                        return trans.show_ok_message('The password has been changed and any other existing Galaxy sessions have been logged out (but jobs in histories in those sessions will not be interrupted).')
+
+        return trans.fill_template( '/user/change_password.mako',
+                                    token=token,
+                                    status=status,
+                                    message=message,
+                                    display_top=kwd.get('redirect_home', False)
+                                    )
+
+    @web.expose
+    def change_communication( self, trans, cntrller, **kwd):
+        """
+            Provides a form with which the user can activate/deactivate
+            the commnication server.
+        """
+        params = util.Params( kwd )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        user_id = params.get( 'user_id', None )
+        activated = ''
+
+        if user_id and is_admin:
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+
+        if user and params.get( 'change_communication_button', False ):
+            communication_enabled = kwd.get( 'enable_communication_server', False )
+            if communication_enabled:
+                activated = 'checked'
+                user.preferences['communication_server'] = True
+                message = 'Your communication settings has been updated and activated.'
+            else:
+                activated = ''
+                user.preferences['communication_server'] = False
+                message = 'Your communication settings has been updated and deactivated.'
+            trans.sa_session.add( user )
+            trans.sa_session.flush()
+        else:
+            if string_as_bool( user.preferences.get('communication_server', '0') ):
+                activated = 'checked'
+
+        return trans.fill_template( '/user/communication_settings.mako',
+                                    cntrller=cntrller,
+                                    status=status,
+                                    activated=activated,
+                                    message=message )
+
+    @web.expose
+    def reset_password( self, trans, email=None, **kwd ):
+        """Reset the user's password. Send an email with token that allows a password change."""
+        if trans.app.config.smtp_server is None:
+            return trans.show_error_message( "Mail is not configured for this Galaxy instance "
+                                             "and password reset information cannot be sent. "
+                                             "Please contact your local Galaxy administrator." )
+        message = None
+        status = 'done'
+        if kwd.get( 'reset_password_button', False ):
+            message = validate_email(trans, email, check_dup=False)
+            if not message:
+                # Default to a non-userinfo-leaking response message
+                message = ( "Your reset request for %s has been received.  "
+                            "Please check your email account for more instructions.  "
+                            "If you do not receive an email shortly, please contact an administrator." % ( escape( email ) ) )
+                reset_user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email == email ).first()
+                if not reset_user:
+                    # Perform a case-insensitive check only if the user wasn't found
+                    reset_user = trans.sa_session.query( trans.app.model.User ).filter( func.lower(trans.app.model.User.table.c.email) == func.lower(email) ).first()
+                if reset_user:
+                    prt = trans.app.model.PasswordResetToken( reset_user )
+                    trans.sa_session.add( prt )
+                    trans.sa_session.flush()
+                    host = trans.request.host.split( ':' )[ 0 ]
+                    if host in [ 'localhost', '127.0.0.1', '0.0.0.0' ]:
+                        host = socket.getfqdn()
+                    reset_url = url_for( controller='user',
+                                         action="change_password",
+                                         token=prt.token, qualified=True)
+                    body = PASSWORD_RESET_TEMPLATE % ( host, prt.expiration_time.strftime(trans.app.config.pretty_datetime_format),
+                                                       reset_url )
+                    frm = trans.app.config.email_from or 'galaxy-no-reply@' + host
+                    subject = 'Galaxy Password Reset'
+                    try:
+                        util.send_mail( frm, email, subject, body, trans.app.config )
+                        trans.sa_session.add( reset_user )
+                        trans.sa_session.flush()
+                        trans.log_event( "User reset password: %s" % email )
+                    except Exception:
+                        log.exception( 'Unable to reset password.' )
+        return trans.fill_template( '/user/reset_password.mako',
+                                    message=message,
+                                    status=status )
+
+    def __validate( self, trans, params, email, password, confirm, username ):
+        # If coming from the tool shed webapp, we'll require a public user name
+        if trans.webapp.name == 'tool_shed':
+            if not username:
+                return "A public user name is required in the tool shed."
+            if username in [ 'repos' ]:
+                return "The term <b>%s</b> is a reserved word in the tool shed, so it cannot be used as a public user name." % escape( username )
+        message = "\n".join( [ validate_email( trans, email ),
+                               validate_password( trans, password, confirm ),
+                               validate_publicname( trans, username ) ] ).rstrip()
+        if not message:
+            if trans.webapp.name == 'galaxy':
+                if self.get_all_forms( trans,
+                                       filter=dict( deleted=False ),
+                                       form_type=trans.app.model.FormDefinition.types.USER_INFO ):
+                    user_type_fd_id = params.get( 'user_type_fd_id', 'none' )
+                    if user_type_fd_id in [ 'none' ]:
+                        return "Select the user's type and information"
+        return message
+
+    @web.expose
+    def set_default_permissions( self, trans, cntrller, **kwd ):
+        """Set the user's default permissions for the new histories"""
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        if trans.user:
+            if 'update_roles_button' in kwd:
+                p = util.Params( kwd )
+                permissions = {}
+                for k, v in trans.app.model.Dataset.permitted_actions.items():
+                    in_roles = p.get( k + '_in', [] )
+                    if not isinstance( in_roles, list ):
+                        in_roles = [ in_roles ]
+                    in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]
+                    action = trans.app.security_agent.get_action( v.action ).action
+                    permissions[ action ] = in_roles
+                trans.app.security_agent.user_set_default_permissions( trans.user, permissions )
+                message = 'Default new history permissions have been changed.'
+            return trans.fill_template( 'user/permissions.mako',
+                                        cntrller=cntrller,
+                                        message=message,
+                                        status=status )
+        else:
+            # User not logged in, history group must be only public
+            return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+
+    @web.expose
+    @web.require_login()
+    def toolbox_filters( self, trans, cntrller, **kwd ):
+        """
+            Sets the user's default filters for the toolbox.
+            Toolbox filters are specified in galaxy.ini.
+            The user can activate them and the choice is stored in user_preferences.
+        """
+
+        def get_filter_mapping( db_filters, config_filters, factory ):
+            """
+                Compare the allowed filters from the galaxy.ini config file with the previously saved or default filters from the database.
+                We need that to toogle the checkboxes for the formular in the right way.
+                Furthermore we extract all information associated to a filter to display them in the formular.
+            """
+            filters = list()
+            for filter_name in config_filters:
+                function = factory._build_filter_function(filter_name)
+                doc_string = docstring_trim( function.__doc__ )
+                split = doc_string.split('\n\n')
+                if split:
+                    sdesc = split[0]
+                else:
+                    log.error( 'No description specified in the __doc__ string for %s.' % filter_name )
+                if len(split) > 1:
+                    description = split[1]
+                else:
+                    description = ''
+
+                if filter_name in db_filters:
+                    filters.append( dict( filterpath=filter_name, short_desc=sdesc, desc=description, checked=True ) )
+                else:
+                    filters.append( dict( filterpath=filter_name, short_desc=sdesc, desc=description, checked=False ) )
+            return filters
+
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+
+        user_id = params.get( 'user_id', False )
+        if user_id:
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+
+        if user:
+            saved_user_tool_filters = list()
+            saved_user_section_filters = list()
+            saved_user_label_filters = list()
+
+            for name, value in user.preferences.items():
+                if name == 'toolbox_tool_filters':
+                    saved_user_tool_filters = listify( value, do_strip=True )
+                elif name == 'toolbox_section_filters':
+                    saved_user_section_filters = listify( value, do_strip=True )
+                elif name == 'toolbox_label_filters':
+                    saved_user_label_filters = listify( value, do_strip=True )
+
+            ff = FilterFactory(trans.app.toolbox)
+            tool_filters = get_filter_mapping( saved_user_tool_filters, trans.app.config.user_tool_filters, ff )
+            section_filters = get_filter_mapping( saved_user_section_filters, trans.app.config.user_section_filters, ff )
+            label_filters = get_filter_mapping( saved_user_label_filters, trans.app.config.user_label_filters, ff )
+
+            return trans.fill_template( 'user/toolbox_filters.mako',
+                                        cntrller=cntrller,
+                                        message=message,
+                                        tool_filters=tool_filters,
+                                        section_filters=section_filters,
+                                        label_filters=label_filters,
+                                        user=user,
+                                        status=status )
+        else:
+            # User not logged in, history group must be only public
+            return trans.show_error_message( "You must be logged in to change private toolbox filters." )
+
+    @web.expose
+    @web.require_login( "to change the private toolbox filters" )
+    def edit_toolbox_filters( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', '' ) )
+
+        if params.get( 'edit_toolbox_filter_button', False ):
+            tool_filters = list()
+            section_filters = list()
+            label_filters = list()
+            for name, state in params.flatten():
+                if state == 'on':
+                    if name.startswith('t_'):
+                        tool_filters.append( name[2:] )
+                    elif name.startswith('l_'):
+                        label_filters.append( name[2:] )
+                    elif name.startswith('s_'):
+                        section_filters.append( name[2:] )
+            trans.user.preferences['toolbox_tool_filters'] = ','.join( tool_filters )
+            trans.user.preferences['toolbox_section_filters'] = ','.join( section_filters )
+            trans.user.preferences['toolbox_label_filters'] = ','.join( label_filters )
+
+            trans.sa_session.add( trans.user )
+            trans.sa_session.flush()
+            message = 'ToolBox filters has been updated.'
+            kwd = dict( message=message, status='done' )
+
+        # Display the ToolBox filters form with the current values filled in
+        return self.toolbox_filters( trans, cntrller, **kwd )
+
+    @web.expose
+    @web.require_login( "to get most recently used tool" )
+    @web.json_pretty
+    def get_most_recently_used_tool_async( self, trans ):
+        """ Returns information about the most recently used tool. """
+
+        # Get most recently used tool.
+        query = trans.sa_session.query( self.app.model.Job.tool_id ).join( self.app.model.History ) \
+                                .filter( self.app.model.History.user == trans.user ) \
+                                .order_by( self.app.model.Job.create_time.desc() ).limit(1)
+        tool_id = query[0][0]  # Get first element in first row of query.
+        tool = self.get_toolbox().get_tool( tool_id )
+
+        # Return tool info.
+        tool_info = {"id": tool.id,
+                     "link": url_for( controller='tool_runner', tool_id=tool.id ),
+                     "target": tool.target,
+                     "name": tool.name,  # TODO: translate this using _()
+                     "minsizehint": tool.uihints.get( 'minwidth', -1 ),
+                     "description": tool.description}
+        return tool_info
+
+    @web.expose
+    def manage_addresses(self, trans, **kwd):
+        if trans.user:
+            params = util.Params( kwd )
+            message = util.restore_text( params.get( 'message', '' ) )
+            status = params.get( 'status', 'done' )
+            show_filter = util.restore_text( params.get( 'show_filter', 'Active' ) )
+            if show_filter == 'All':
+                addresses = [address for address in trans.user.addresses]
+            elif show_filter == 'Deleted':
+                addresses = [address for address in trans.user.addresses if address.deleted]
+            else:
+                addresses = [address for address in trans.user.addresses if not address.deleted]
+            return trans.fill_template( 'user/address.mako',
+                                        addresses=addresses,
+                                        show_filter=show_filter,
+                                        message=message,
+                                        status=status)
+        else:
+            # User not logged in, history group must be only public
+            return trans.show_error_message( "You must be logged in to change your default permitted actions." )
+
+    @web.require_login( "to add addresses" )
+    @web.expose
+    def new_address( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        user_id = params.get( 'id', False )
+        if is_admin:
+            if not user_id:
+                return trans.show_error_message( "You must specify a user to add a new address to." )
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+        short_desc = util.restore_text( params.get( 'short_desc', ''  ) )
+        name = util.restore_text( params.get( 'name', ''  ) )
+        institution = util.restore_text( params.get( 'institution', ''  ) )
+        address = util.restore_text( params.get( 'address', ''  ) )
+        city = util.restore_text( params.get( 'city', ''  ) )
+        state = util.restore_text( params.get( 'state', ''  ) )
+        postal_code = util.restore_text( params.get( 'postal_code', ''  ) )
+        country = util.restore_text( params.get( 'country', ''  ) )
+        phone = util.restore_text( params.get( 'phone', ''  ) )
+        ok = True
+        if not trans.app.config.allow_user_creation and not is_admin:
+            return trans.show_error_message( 'User registration is disabled.  Please contact your local Galaxy administrator for an account.' )
+        if params.get( 'new_address_button', False ):
+            if not short_desc:
+                ok = False
+                message = 'Enter a short description for this address'
+            elif not name:
+                ok = False
+                message = 'Enter the name'
+            elif not institution:
+                ok = False
+                message = 'Enter the institution associated with the user'
+            elif not address:
+                ok = False
+                message = 'Enter the address'
+            elif not city:
+                ok = False
+                message = 'Enter the city'
+            elif not state:
+                ok = False
+                message = 'Enter the state/province/region'
+            elif not postal_code:
+                ok = False
+                message = 'Enter the postal code'
+            elif not country:
+                ok = False
+                message = 'Enter the country'
+            if ok:
+                user_address = trans.model.UserAddress( user=user,
+                                                        desc=short_desc,
+                                                        name=name,
+                                                        institution=institution,
+                                                        address=address,
+                                                        city=city,
+                                                        state=state,
+                                                        postal_code=postal_code,
+                                                        country=country,
+                                                        phone=phone )
+                trans.sa_session.add( user_address )
+                trans.sa_session.flush()
+                message = 'Address (%s) has been added' % escape( user_address.desc )
+                new_kwd = dict( message=message, status=status )
+                if is_admin:
+                    new_kwd[ 'id' ] = trans.security.encode_id( user.id )
+                return trans.response.send_redirect( web.url_for( controller='user',
+                                                                  action='manage_user_info',
+                                                                  cntrller=cntrller,
+                                                                  **new_kwd ) )
+        # Display the address form with the current values filled in
+        return trans.fill_template( 'user/new_address.mako',
+                                    cntrller=cntrller,
+                                    user=user,
+                                    short_desc=short_desc,
+                                    name=name,
+                                    institution=institution,
+                                    address=address,
+                                    city=city,
+                                    state=state,
+                                    postal_code=postal_code,
+                                    country=country,
+                                    phone=phone,
+                                    message=escape(message),
+                                    status=status )
+
+    @web.require_login( "to edit addresses" )
+    @web.expose
+    def edit_address( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = util.restore_text( params.get( 'message', ''  ) )
+        status = params.get( 'status', 'done' )
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        user_id = params.get( 'id', False )
+        if is_admin:
+            if not user_id:
+                return trans.show_error_message( "You must specify a user to add a new address to." )
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+        address_id = params.get( 'address_id', None )
+        if not address_id:
+            return trans.show_error_message( "Invalid address id." )
+        address_obj = trans.sa_session.query( trans.app.model.UserAddress ).get( trans.security.decode_id( address_id ) )
+        if address_obj.user_id != user.id:
+            return trans.show_error_message( "Invalid address id." )
+        if params.get( 'edit_address_button', False  ):
+            short_desc = util.restore_text( params.get( 'short_desc', ''  ) )
+            name = util.restore_text( params.get( 'name', ''  ) )
+            institution = util.restore_text( params.get( 'institution', ''  ) )
+            address = util.restore_text( params.get( 'address', ''  ) )
+            city = util.restore_text( params.get( 'city', ''  ) )
+            state = util.restore_text( params.get( 'state', ''  ) )
+            postal_code = util.restore_text( params.get( 'postal_code', ''  ) )
+            country = util.restore_text( params.get( 'country', ''  ) )
+            phone = util.restore_text( params.get( 'phone', ''  ) )
+            ok = True
+            if not short_desc:
+                ok = False
+                message = 'Enter a short description for this address'
+            elif not name:
+                ok = False
+                message = 'Enter the name'
+            elif not institution:
+                ok = False
+                message = 'Enter the institution associated with the user'
+            elif not address:
+                ok = False
+                message = 'Enter the address'
+            elif not city:
+                ok = False
+                message = 'Enter the city'
+            elif not state:
+                ok = False
+                message = 'Enter the state/province/region'
+            elif not postal_code:
+                ok = False
+                message = 'Enter the postal code'
+            elif not country:
+                ok = False
+                message = 'Enter the country'
+            if ok:
+                address_obj.desc = short_desc
+                address_obj.name = name
+                address_obj.institution = institution
+                address_obj.address = address
+                address_obj.city = city
+                address_obj.state = state
+                address_obj.postal_code = postal_code
+                address_obj.country = country
+                address_obj.phone = phone
+                trans.sa_session.add( address_obj )
+                trans.sa_session.flush()
+                message = 'Address (%s) has been updated.' % escape( address_obj.desc )
+                new_kwd = dict( message=message, status=status )
+                if is_admin:
+                    new_kwd[ 'id' ] = trans.security.encode_id( user.id )
+                return trans.response.send_redirect( web.url_for( controller='user',
+                                                                  action='manage_user_info',
+                                                                  cntrller=cntrller,
+                                                                  **new_kwd ) )
+        # Display the address form with the current values filled in
+        return trans.fill_template( 'user/edit_address.mako',
+                                    cntrller=cntrller,
+                                    user=user,
+                                    address_obj=address_obj,
+                                    message=escape( message ),
+                                    status=status )
+
+    @web.require_login( "to delete addresses" )
+    @web.expose
+    def delete_address( self, trans, cntrller, address_id=None, **kwd ):
+        return self.__delete_undelete_address( trans, cntrller, 'delete', address_id=address_id, **kwd )
+
+    @web.require_login( "to undelete addresses" )
+    @web.expose
+    def undelete_address( self, trans, cntrller, address_id=None, **kwd ):
+        return self.__delete_undelete_address( trans, cntrller, 'undelete', address_id=address_id, **kwd )
+
+    def __delete_undelete_address( self, trans, cntrller, op, address_id=None, **kwd ):
+        is_admin = cntrller == 'admin' and trans.user_is_admin()
+        user_id = kwd.get( 'id', False )
+        if is_admin:
+            if not user_id:
+                return trans.show_error_message( "You must specify a user to %s an address from." % op )
+            user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( user_id ) )
+        else:
+            user = trans.user
+        try:
+            user_address = trans.sa_session.query( trans.app.model.UserAddress ).get( trans.security.decode_id( address_id ) )
+        except:
+            return trans.show_error_message( "Invalid address id." )
+        if user_address:
+            if user_address.user_id != user.id:
+                return trans.show_error_message( "Invalid address id." )
+            user_address.deleted = True if op == 'delete' else False
+            trans.sa_session.add( user_address )
+            trans.sa_session.flush()
+            message = 'Address (%s) %sd' % ( escape( user_address.desc ), op )
+            status = 'done'
+        return trans.response.send_redirect( web.url_for( controller='user',
+                                                          action='manage_user_info',
+                                                          cntrller=cntrller,
+                                                          id=trans.security.encode_id( user.id ),
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    def set_user_pref_async( self, trans, pref_name, pref_value ):
+        """ Set a user preference asynchronously. If user is not logged in, do nothing. """
+        if trans.user:
+            trans.log_action( trans.get_user(), "set_user_pref", "", { pref_name: pref_value } )
+            trans.user.preferences[pref_name] = pref_value
+            trans.sa_session.flush()
+
+    @web.expose
+    def log_user_action_async( self, trans, action, context, params ):
+        """ Log a user action asynchronously. If user is not logged in, do nothing. """
+        if trans.user:
+            trans.log_action( trans.get_user(), action, context, params )
+
+    @web.expose
+    @web.require_login()
+    def dbkeys( self, trans, **kwds ):
+        """ Handle custom builds. """
+
+        #
+        # Process arguments and add/delete build.
+        #
+        user = trans.user
+        message = None
+        lines_skipped = 0
+        if self.installed_len_files is None:
+            installed_builds = []
+            for build in glob.glob( os.path.join(trans.app.config.len_file_path, "*.len") ):
+                installed_builds.append( os.path.basename(build).split(".len")[0] )
+            self.installed_len_files = ", ".join(installed_builds)
+        if 'dbkeys' not in user.preferences:
+            dbkeys = {}
+        else:
+            dbkeys = loads(user.preferences['dbkeys'])
+        if 'delete' in kwds:
+            # Delete a build.
+            key = kwds.get('key', '')
+            if key and key in dbkeys:
+                del dbkeys[key]
+        elif 'add' in kwds:
+            # Add new custom build.
+            name = kwds.get('name', '')
+            key = kwds.get('key', '')
+
+            # Look for build's chrom info in len_file and len_text.
+            len_file = kwds.get( 'len_file', None )
+            if getattr( len_file, "file", None ):  # Check if it's a FieldStorage object
+                len_text = len_file.file.read()
+            else:
+                len_text = kwds.get( 'len_text', None )
+
+            if not len_text:
+                # Using FASTA from history.
+                dataset_id = kwds.get('dataset_id', '')
+
+            if not name or not key or not ( len_text or dataset_id ):
+                message = "You must specify values for all the fields."
+            elif key in dbkeys:
+                message = "There is already a custom build with that key. Delete it first if you want to replace it."
+            else:
+                # Have everything needed; create new build.
+                build_dict = { "name": name }
+                if len_text:
+                    # Create new len file
+                    new_len = trans.app.model.HistoryDatasetAssociation( extension="len", create_dataset=True, sa_session=trans.sa_session )
+                    trans.sa_session.add( new_len )
+                    new_len.name = name
+                    new_len.visible = False
+                    new_len.state = trans.app.model.Job.states.OK
+                    new_len.info = "custom build .len file"
+                    try:
+                        trans.app.object_store.create( new_len.dataset )
+                    except ObjectInvalid:
+                        raise Exception( 'Unable to create output dataset: object store is full' )
+                    trans.sa_session.flush()
+                    counter = 0
+                    f = open(new_len.file_name, "w")
+                    # LEN files have format:
+                    #   <chrom_name><tab><chrom_length>
+                    for line in len_text.split("\n"):
+                        lst = line.strip().rsplit(None, 1)  # Splits at the last whitespace in the line
+                        if not lst or len(lst) < 2:
+                            lines_skipped += 1
+                            continue
+                        chrom, length = lst[0], lst[1]
+                        try:
+                            length = int(length)
+                        except ValueError:
+                            lines_skipped += 1
+                            continue
+                        counter += 1
+                        f.write("%s\t%s\n" % (chrom, length))
+                    f.close()
+                    build_dict.update( { "len": new_len.id, "count": counter } )
+                else:
+                    dataset_id = trans.security.decode_id( dataset_id )
+                    build_dict[ "fasta" ] = dataset_id
+                dbkeys[key] = build_dict
+        # Save builds.
+        # TODO: use database table to save builds.
+        user.preferences['dbkeys'] = dumps(dbkeys)
+        trans.sa_session.flush()
+
+        #
+        # Display custom builds page.
+        #
+
+        # Add chrom/contig count to dbkeys dict.
+        updated = False
+        for key, attributes in dbkeys.items():
+            if 'count' in attributes:
+                # Already have count, so do nothing.
+                continue
+
+            # Get len file.
+            fasta_dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( attributes[ 'fasta' ] )
+            len_dataset = fasta_dataset.get_converted_dataset( trans, "len" )
+            # HACK: need to request dataset again b/c get_converted_dataset()
+            # doesn't return dataset (as it probably should).
+            len_dataset = fasta_dataset.get_converted_dataset( trans, "len" )
+            if len_dataset.state == trans.app.model.Job.states.ERROR:
+                # Can't use len dataset.
+                continue
+
+            # Get chrom count file.
+            chrom_count_dataset = len_dataset.get_converted_dataset( trans, "linecount" )
+            if not chrom_count_dataset or chrom_count_dataset.state != trans.app.model.Job.states.OK:
+                # No valid linecount dataset.
+                continue
+            else:
+                # Set chrom count.
+                try:
+                    chrom_count = int( open( chrom_count_dataset.file_name ).readline() )
+                    attributes[ 'count' ] = chrom_count
+                    updated = True
+                except Exception as e:
+                    log.error( "Failed to open chrom count dataset: %s", e )
+
+        if updated:
+            user.preferences['dbkeys'] = dumps(dbkeys)
+            trans.sa_session.flush()
+
+        # Potential genome data for custom builds is limited to fasta datasets in current history for now.
+        fasta_hdas = trans.sa_session.query( model.HistoryDatasetAssociation ) \
+                          .filter_by( history=trans.history, extension="fasta", deleted=False ) \
+                          .order_by( model.HistoryDatasetAssociation.hid.desc() )
+
+        return trans.fill_template( 'user/dbkeys.mako',
+                                    user=user,
+                                    dbkeys=dbkeys,
+                                    message=message,
+                                    installed_len_files=self.installed_len_files,
+                                    lines_skipped=lines_skipped,
+                                    fasta_hdas=fasta_hdas,
+                                    use_panels=kwds.get( 'use_panels', False ) )
+
+    @web.expose
+    @web.require_login()
+    def api_keys( self, trans, cntrller, **kwd ):
+        params = util.Params( kwd )
+        message = escape( util.restore_text( params.get( 'message', ''  ) ) )
+        status = params.get( 'status', 'done' )
+        if params.get( 'new_api_key_button', False ):
+            self.create_api_key( trans, trans.user )
+            message = "Generated a new web API key"
+            status = "done"
+        return trans.fill_template( 'webapps/galaxy/user/api_keys.mako',
+                                    cntrller=cntrller,
+                                    user=trans.user,
+                                    message=message,
+                                    status=status )
+
+    def __get_redirect_url( self, redirect ):
+        root_url = url_for( '/', qualified=True )
+        # compare urls, to prevent a redirect from pointing (directly) outside of galaxy
+        # or to enter a logout/login loop
+        if not util.compare_urls( root_url, redirect, compare_path=False ) or util.compare_urls( url_for( controller='user', action='logout', qualified=True ), redirect ):
+            log.warning('Redirect URL is outside of Galaxy, will redirect to Galaxy root instead: %s', redirect)
+            redirect = root_url
+        elif util.compare_urls( url_for( controller='user', action='logout', qualified=True ), redirect ):
+            redirect = root_url
+        return redirect
+
+    # ===== Methods for building SelectFields  ================================
+    def __build_user_type_fd_id_select_field( self, trans, selected_value ):
+        # Get all the user information forms
+        user_info_forms = self.get_all_forms( trans,
+                                              filter=dict( deleted=False ),
+                                              form_type=trans.model.FormDefinition.types.USER_INFO )
+        return build_select_field( trans,
+                                   objs=user_info_forms,
+                                   label_attr='name',
+                                   select_field_name='user_type_fd_id',
+                                   initial_value='none',
+                                   selected_value=selected_value,
+                                   refresh_on_change=True )
diff --git a/lib/galaxy/webapps/galaxy/controllers/userskeys.py b/lib/galaxy/webapps/galaxy/controllers/userskeys.py
new file mode 100644
index 0000000..c1a95f9
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/userskeys.py
@@ -0,0 +1,79 @@
+"""
+Contains the user interface in the Universe class
+"""
+
+import logging
+
+from markupsafe import escape
+from sqlalchemy import false
+
+from galaxy import (
+    util,
+    web
+)
+from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin
+
+log = logging.getLogger( __name__ )
+
+require_login_template = """
+<p>
+    This %s has been configured such that only users who are logged in may use it.%s
+</p>
+"""
+
+# FIXME: This controller is using unencoded IDs, but I am not going to address
+# this now since it is admin-side and should be reimplemented in the API
+# anyway.
+
+
+class User( BaseUIController, UsesFormDefinitionsMixin ):
+    @web.expose
+    @web.require_login()
+    @web.require_admin
+    def index( self, trans, cntrller, **kwd ):
+        return trans.fill_template( 'webapps/galaxy/user/list_users.mako', action='all_users', cntrller=cntrller )
+
+    @web.expose
+    @web.require_login()
+    @web.require_admin
+    def admin_api_keys( self, trans, cntrller, uid, **kwd ):
+        params = util.Params( kwd )
+        message = escape( util.restore_text( params.get( 'message', ''  ) ) )
+        status = params.get( 'status', 'done' )
+        uid = params.get('uid', uid)
+        if params.get( 'new_api_key_button', False ):
+            new_key = trans.app.model.APIKeys()
+            new_key.user_id = uid
+            new_key.key = trans.app.security.get_new_guid()
+            trans.sa_session.add( new_key )
+            trans.sa_session.flush()
+            message = "A new web API key has been generated for (%s)" % escape( new_key.user.email )
+            status = "done"
+        return trans.response.send_redirect( web.url_for( controller='userskeys',
+                                                          action='all_users',
+                                                          cntrller=cntrller,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_login()
+    @web.require_admin
+    def all_users( self, trans, cntrller="userskeys", **kwd ):
+        params = util.Params( kwd )
+        message = escape( util.restore_text( params.get( 'message', ''  ) ) )
+        status = params.get( 'status', 'done' )
+        users = []
+        for user in trans.sa_session.query( trans.app.model.User ) \
+                                    .filter( trans.app.model.User.table.c.deleted == false() ) \
+                                    .order_by( trans.app.model.User.table.c.email ):
+                uid = int(user.id)
+                userkey = ""
+                for api_user in trans.sa_session.query(trans.app.model.APIKeys) \
+                        .filter( trans.app.model.APIKeys.user_id == uid):
+                    userkey = api_user.key
+                users.append({'uid': uid, 'email': user.email, 'key': userkey})
+        return trans.fill_template( 'webapps/galaxy/user/list_users.mako',
+                                    cntrller=cntrller,
+                                    users=users,
+                                    message=message,
+                                    status=status )
diff --git a/lib/galaxy/webapps/galaxy/controllers/visualization.py b/lib/galaxy/webapps/galaxy/controllers/visualization.py
new file mode 100644
index 0000000..f0195cf
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -0,0 +1,1079 @@
+from __future__ import absolute_import
+
+import logging
+from json import loads
+
+from markupsafe import escape
+from paste.httpexceptions import HTTPNotFound, HTTPBadRequest
+from six import string_types
+from sqlalchemy import and_, desc, false, or_, true
+
+from galaxy import managers, model, util, web
+from galaxy.datatypes.interval import Bed
+from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.visualization.data_providers.genome import RawBedDataProvider
+from galaxy.visualization.data_providers.phyloviz import PhylovizDataProvider
+from galaxy.visualization.genomes import decode_dbkey
+from galaxy.visualization.genomes import GenomeRegion
+from galaxy.visualization.plugins import registry
+from galaxy.web import error
+from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesVisualizationMixin
+from galaxy.web.framework.helpers import grids, time_ago
+
+from .library import LibraryListGrid
+import os
+import yaml
+
+log = logging.getLogger( __name__ )
+
+
+#
+# -- Grids --
+#
+
+class NameColumn( grids.TextColumn ):
+    def get_value( self, trans, grid, history ):
+        return escape(history.get_display_name())
+
+    def get_link( self, trans, grid, history ):
+        # Provide link to list all datasets in history that have a given dbkey.
+        # Right now, only dbkey needs to be passed through, but pass through
+        # all for now since it's cleaner.
+        d = dict( action=grid.datasets_action, show_item_checkboxes=True )
+        d[ grid.datasets_param ] = trans.security.encode_id( history.id )
+        for filter, value in grid.cur_filter_dict.iteritems():
+            d[ "f-" + filter ] = value
+        return d
+
+
+class DbKeyPlaceholderColumn( grids.GridColumn ):
+    """ Placeholder to keep track of dbkey. """
+    def filter( self, trans, user, query, dbkey ):
+        return query
+
+
+class HistorySelectionGrid( grids.Grid ):
+    """
+    Grid enables user to select a history, which is then used to display
+    datasets from the history.
+    """
+    title = "Add Track: Select History"
+    model_class = model.History
+    template = '/tracks/history_select_grid.mako'
+    default_sort_key = "-update_time"
+    datasets_action = 'list_history_datasets'
+    datasets_param = "f-history"
+    columns = [
+        NameColumn( "History Name", key="name", filterable="standard", inbound=True ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago, visible=False ),
+        DbKeyPlaceholderColumn( "Dbkey", key="dbkey", model_class=model.HistoryDatasetAssociation, visible=False )
+    ]
+    num_rows_per_page = 10
+    use_async = True
+    use_paging = True
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user, purged=False, deleted=False, importing=False )
+
+
+class LibrarySelectionGrid( LibraryListGrid ):
+    """
+    Grid enables user to select a Library, which is then used to display
+    datasets from the history.
+    """
+    title = "Add Track: Select Library"
+    template = '/tracks/history_select_grid.mako'
+    model_class = model.Library
+    datasets_action = 'list_library_datasets'
+    datasets_param = "f-library"
+    columns = [
+        NameColumn( "Library Name", key="name", filterable="standard", inbound=True  )
+    ]
+    num_rows_per_page = 10
+    use_async = True
+    use_paging = True
+
+
+class DbKeyColumn( grids.GridColumn ):
+    """ Column for filtering by and displaying dataset dbkey. """
+    def filter( self, trans, user, query, dbkey ):
+        """ Filter by dbkey. """
+        # Use raw SQL b/c metadata is a BLOB.
+        dbkey_user, dbkey = decode_dbkey( dbkey )
+        dbkey = dbkey.replace("'", "\\'")
+        return query.filter( or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ) )
+
+        # Use this query when datasets with matching dbkey *or* no dbkey can be added to the visualization.
+        # return query.filter( or_( \
+        #                        or_( "metadata like '%%\"dbkey\": [\"%s\"]%%'" % dbkey, "metadata like '%%\"dbkey\": \"%s\"%%'" % dbkey ), \
+        #                        or_( "metadata like '%%\"dbkey\": [\"?\"]%%'", "metadata like '%%\"dbkey\": \"?\"%%'" ) \
+        #                        )
+        #                    )
+
+
+class HistoryColumn( grids.GridColumn ):
+    """ Column for filtering by history id. """
+    def filter( self, trans, user, query, history_id ):
+        return query.filter( model.History.id == trans.security.decode_id(history_id) )
+
+
+class HistoryDatasetsSelectionGrid( grids.Grid ):
+    # Grid definition.
+    available_tracks = None
+    title = "Add Datasets"
+    template = "tracks/history_datasets_select_grid.mako"
+    model_class = model.HistoryDatasetAssociation
+    default_filter = { "deleted": "False", "shared": "All" }
+    default_sort_key = "-hid"
+    use_async = True
+    use_paging = False
+    columns = [
+        grids.GridColumn( "Id", key="hid" ),
+        grids.TextColumn( "Name", key="name", model_class=model.HistoryDatasetAssociation ),
+        grids.TextColumn( "Filetype", key="extension", model_class=model.HistoryDatasetAssociation ),
+        HistoryColumn( "History", key="history", visible=False ),
+        DbKeyColumn( "Dbkey", key="dbkey", model_class=model.HistoryDatasetAssociation, visible=True, sortable=False )
+    ]
+    columns.append(
+        grids.MulticolFilterColumn( "Search name and filetype", cols_to_filter=[ columns[1], columns[2] ],
+                                    key="free-text-search", visible=False, filterable="standard" )
+    )
+
+    def get_current_item( self, trans, **kwargs ):
+        """
+        Current item for grid is the history being queried. This is a bit
+        of hack since current_item typically means the current item in the grid.
+        """
+        return trans.sa_session.query( model.History ).get( trans.security.decode_id( kwargs[ 'f-history' ] ) )
+
+    def build_initial_query( self, trans, **kwargs ):
+        return trans.sa_session.query( self.model_class ).join( model.History.table ).join( model.Dataset.table )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        if self.available_tracks is None:
+            self.available_tracks = trans.app.datatypes_registry.get_available_tracks()
+        return query.filter( model.HistoryDatasetAssociation.extension.in_(self.available_tracks) ) \
+                    .filter( model.Dataset.state == model.Dataset.states.OK ) \
+                    .filter( model.HistoryDatasetAssociation.deleted == false() ) \
+                    .filter( model.HistoryDatasetAssociation.visible == true() )
+
+
+class TracksterSelectionGrid( grids.Grid ):
+    # Grid definition.
+    title = "Insert into visualization"
+    template = "/tracks/add_to_viz.mako"
+    model_class = model.Visualization
+    default_sort_key = "-update_time"
+    use_async = True
+    use_paging = False
+    show_item_checkboxes = True
+    columns = [
+        grids.TextColumn( "Title", key="title", model_class=model.Visualization, filterable="standard" ),
+        grids.TextColumn( "Dbkey", key="dbkey", model_class=model.Visualization ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago )
+    ]
+
+    def build_initial_query( self, trans, **kwargs ):
+        return trans.sa_session.query( self.model_class ).filter( self.model_class.deleted == false() )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter( self.model_class.user_id == trans.user.id )
+
+
+class VisualizationListGrid( grids.Grid ):
+    def get_url_args( item ):
+        """
+        Returns dictionary used to create item link.
+        """
+        url_kwargs = dict( controller='visualization', id=item.id )
+        # TODO: hack to build link to saved visualization - need trans in this function instead in order to do
+        # link_data = trans.app.visualizations_registry.get_visualizations( trans, item )
+        if item.type in registry.VisualizationsRegistry.BUILT_IN_VISUALIZATIONS:
+            url_kwargs[ 'action' ] = item.type
+        else:
+            url_kwargs[ '__route_name__' ] = 'saved_visualization'
+            url_kwargs[ 'visualization_name' ] = item.type
+            url_kwargs[ 'action' ] = 'saved'
+        return url_kwargs
+
+    def get_display_name( self, trans, item ):
+        if trans.app.visualizations_registry and item.type in trans.app.visualizations_registry.plugins:
+            plugin = trans.app.visualizations_registry.plugins[ item.type ]
+            return plugin.config.get( 'name', item.type )
+        return item.type
+
+    # Grid definition
+    title = "Saved Visualizations"
+    model_class = model.Visualization
+    default_sort_key = "-update_time"
+    default_filter = dict( title="All", deleted="False", tags="All", sharing="All" )
+    columns = [
+        grids.TextColumn( "Title", key="title", attach_popup=True, link=get_url_args ),
+        grids.TextColumn( "Type", method='get_display_name' ),
+        grids.TextColumn( "Dbkey", key="dbkey" ),
+        grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationListGrid" ),
+        grids.SharingStatusColumn( "Sharing", key="sharing", filterable="advanced", sortable=False ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[2] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+    global_actions = [
+        grids.GridAction( "Create new visualization", dict( action='create' ), inbound=True )
+    ]
+    operations = [
+        grids.GridOperation( "Open", allow_multiple=False, url_args=get_url_args ),
+        grids.GridOperation( "Open in Circster", allow_multiple=False, condition=( lambda item: item.type == 'trackster' ), url_args=dict( action='circster' ) ),
+        grids.GridOperation( "Edit Attributes", allow_multiple=False, url_args=dict( action='edit'), inbound=True),
+        grids.GridOperation( "Copy", allow_multiple=False, condition=( lambda item: not item.deleted )),
+        grids.GridOperation( "Share or Publish", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ), confirm="Are you sure you want to delete this visualization?" ),
+    ]
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user, deleted=False )
+
+
+class VisualizationAllPublishedGrid( grids.Grid ):
+    # Grid definition
+    use_panels = True
+    use_async = True
+    title = "Published Visualizations"
+    model_class = model.Visualization
+    default_sort_key = "update_time"
+    default_filter = dict( title="All", username="All" )
+    columns = [
+        grids.PublicURLColumn( "Title", key="title", filterable="advanced" ),
+        grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.VisualizationAnnotationAssociation, filterable="advanced" ),
+        grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+        grids.CommunityRatingColumn( "Community Rating", key="rating" ),
+        grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.VisualizationTagAssociation, filterable="advanced", grid_name="VisualizationAllPublishedGrid" ),
+        grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search title, annotation, owner, and tags",
+            cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+            key="free-text-search", visible=False, filterable="standard" )
+    )
+
+    def build_initial_query( self, trans, **kwargs ):
+        # Join so that searching history.user makes sense.
+        return trans.sa_session.query( self.model_class ).join( model.User.table )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter( self.model_class.deleted == false() ).filter( self.model_class.published == true() )
+
+
+class VisualizationController( BaseUIController, SharableMixin, UsesVisualizationMixin,
+                               UsesAnnotations, UsesItemRatings ):
+    _user_list_grid = VisualizationListGrid()
+    _published_list_grid = VisualizationAllPublishedGrid()
+    _libraries_grid = LibrarySelectionGrid()
+    _histories_grid = HistorySelectionGrid()
+    _history_datasets_grid = HistoryDatasetsSelectionGrid()
+    _tracks_grid = TracksterSelectionGrid()
+
+    def __init__( self, app ):
+        super( VisualizationController, self ).__init__( app )
+        self.hda_manager = managers.hdas.HDAManager( app )
+
+    #
+    # -- Functions for listing visualizations. --
+    #
+
+    @web.expose
+    @web.require_login( "see all available libraries" )
+    def list_libraries( self, trans, **kwargs ):
+        """List all libraries that can be used for selecting datasets."""
+
+        # Render the list view
+        return self._libraries_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login( "see a library's datasets that can added to this visualization" )
+    def list_library_datasets( self, trans, **kwargs ):
+        """List a library's datasets that can be added to a visualization."""
+
+        library = trans.sa_session.query( trans.app.model.Library ).get( self.decode_id( kwargs.get('f-library') ) )
+        return trans.fill_template( '/tracks/library_datasets_select_grid.mako',
+                                    cntrller="library",
+                                    use_panels=False,
+                                    library=library,
+                                    created_ldda_ids='',
+                                    hidden_folder_ids='',
+                                    show_deleted=False,
+                                    comptypes=[],
+                                    current_user_roles=trans.get_current_user_roles(),
+                                    message='',
+                                    status="done" )
+
+    @web.expose
+    @web.require_login( "see all available histories" )
+    def list_histories( self, trans, **kwargs ):
+        """List all histories that can be used for selecting datasets."""
+
+        # Render the list view
+        return self._histories_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login( "see current history's datasets that can added to this visualization" )
+    def list_current_history_datasets( self, trans, **kwargs ):
+        """ List a history's datasets that can be added to a visualization. """
+
+        kwargs[ 'f-history' ] = trans.security.encode_id( trans.get_history().id )
+        kwargs[ 'show_item_checkboxes' ] = 'True'
+        return self.list_history_datasets( trans, **kwargs )
+
+    @web.expose
+    @web.require_login( "see a history's datasets that can added to this visualization" )
+    def list_history_datasets( self, trans, **kwargs ):
+        """List a history's datasets that can be added to a visualization."""
+
+        # Render the list view
+        return self._history_datasets_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login( "see all available datasets" )
+    def list_datasets( self, trans, **kwargs ):
+        """List all datasets that can be added as tracks"""
+        # Render the list view
+        return self._data_grid( trans, **kwargs )
+
+    @web.expose
+    def list_tracks( self, trans, **kwargs ):
+        return self._tracks_grid( trans, **kwargs )
+
+    @web.expose
+    def list_published( self, trans, *args, **kwargs ):
+        kwargs[ 'embedded' ] = True
+        grid = self._published_list_grid( trans, **kwargs )
+        if 'async' in kwargs:
+            return grid
+
+        # Render grid wrapped in panels
+        return trans.fill_template( "visualization/list_published.mako", embedded_grid=grid )
+
+    @web.expose
+    @web.require_login( "use Galaxy visualizations", use_panels=True )
+    def list( self, trans, *args, **kwargs ):
+
+        # Handle operation
+        if 'operation' in kwargs and 'id' in kwargs:
+            session = trans.sa_session
+            operation = kwargs['operation'].lower()
+            ids = util.listify( kwargs['id'] )
+            for id in ids:
+                item = session.query( model.Visualization ).get( self.decode_id( id ) )
+                if operation == "delete":
+                    item.deleted = True
+                if operation == "share or publish":
+                    return self.sharing( trans, **kwargs )
+                if operation == "copy":
+                    self.copy( trans, **kwargs )
+            session.flush()
+
+        # Build list of visualizations shared with user.
+        shared_by_others = trans.sa_session \
+            .query( model.VisualizationUserShareAssociation ) \
+            .filter_by( user=trans.get_user() ) \
+            .join( model.Visualization.table ) \
+            .filter( model.Visualization.deleted == false() ) \
+            .order_by( desc( model.Visualization.update_time ) ) \
+            .all()
+
+        kwargs[ 'embedded' ] = True
+        grid = self._user_list_grid( trans, *args, **kwargs )
+        return trans.fill_template( "visualization/list.mako", embedded_grid=grid, shared_by_others=shared_by_others )
+
+    #
+    # -- Functions for operating on visualizations. --
+    #
+
+    @web.expose
+    @web.require_login( "use Galaxy visualizations", use_panels=True )
+    def index( self, trans, *args, **kwargs ):
+        """ Lists user's saved visualizations. """
+        return self.list( trans, *args, **kwargs )
+
+    @web.expose
+    @web.require_login()
+    def copy(self, trans, id, **kwargs):
+        visualization = self.get_visualization( trans, id, check_ownership=False )
+        user = trans.get_user()
+        owner = ( visualization.user == user )
+        new_title = "Copy of '%s'" % visualization.title
+        if not owner:
+            new_title += " shared by %s" % visualization.user.email
+
+        copied_viz = visualization.copy( user=trans.user, title=new_title )
+
+        # Persist
+        session = trans.sa_session
+        session.add( copied_viz )
+        session.flush()
+
+        # Display the management page
+        trans.set_message( 'Created new visualization with name "%s"' % copied_viz.title )
+        return
+
+    @web.expose
+    @web.require_login( "use Galaxy visualizations" )
+    def set_accessible_async( self, trans, id=None, accessible=False ):
+        """ Set visualization's importable attribute and slug. """
+        visualization = self.get_visualization( trans, id )
+
+        # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
+        importable = accessible in ['True', 'true', 't', 'T']
+        if visualization and visualization.importable != importable:
+            if importable:
+                self._make_item_accessible( trans.sa_session, visualization )
+            else:
+                visualization.importable = importable
+            trans.sa_session.flush()
+
+        return
+
+    @web.expose
+    @web.require_login( "rate items" )
+    @web.json
+    def rate_async( self, trans, id, rating ):
+        """ Rate a visualization asynchronously and return updated community data. """
+
+        visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+        if not visualization:
+            return trans.show_error_message( "The specified visualization does not exist." )
+
+        # Rate visualization.
+        self.rate_item( trans.sa_session, trans.get_user(), visualization, rating )
+
+        return self.get_ave_item_rating_data( trans.sa_session, visualization )
+
+    @web.expose
+    @web.require_login( "share Galaxy visualizations" )
+    def imp( self, trans, id ):
+        """ Import a visualization into user's workspace. """
+        # Set referer message.
+        referer = trans.request.referer
+        if referer:
+            referer_message = "<a href='%s'>return to the previous page</a>" % escape(referer)
+        else:
+            referer_message = "<a href='%s'>go to Galaxy's start page</a>" % web.url_for( '/' )
+
+        # Do import.
+        session = trans.sa_session
+        visualization = self.get_visualization( trans, id, check_ownership=False )
+        if visualization.importable is False:
+            return trans.show_error_message( "The owner of this visualization has disabled imports via this link.<br>You can %s" % referer_message, use_panels=True )
+        elif visualization.deleted:
+            return trans.show_error_message( "You can't import this visualization because it has been deleted.<br>You can %s" % referer_message, use_panels=True )
+        else:
+            # Create imported visualization via copy.
+            #   TODO: need to handle custom db keys.
+
+            imported_visualization = visualization.copy( user=trans.user, title="imported: " + visualization.title )
+
+            # Persist
+            session = trans.sa_session
+            session.add( imported_visualization )
+            session.flush()
+
+            # Redirect to load galaxy frames.
+            return trans.show_ok_message(
+                message="""Visualization "%s" has been imported. <br>You can <a href="%s">start using this visualization</a> or %s."""
+                % ( visualization.title, web.url_for( controller='visualization' ), referer_message ), use_panels=True )
+
+    @web.expose
+    @web.require_login( "share Galaxy visualizations" )
+    def sharing( self, trans, id, **kwargs ):
+        """ Handle visualization sharing. """
+
+        # Get session and visualization.
+        session = trans.sa_session
+        visualization = self.get_visualization( trans, id, check_ownership=True )
+
+        # Do operation on visualization.
+        if 'make_accessible_via_link' in kwargs:
+            self._make_item_accessible( trans.sa_session, visualization )
+        elif 'make_accessible_and_publish' in kwargs:
+            self._make_item_accessible( trans.sa_session, visualization )
+            visualization.published = True
+        elif 'publish' in kwargs:
+            visualization.published = True
+        elif 'disable_link_access' in kwargs:
+            visualization.importable = False
+        elif 'unpublish' in kwargs:
+            visualization.published = False
+        elif 'disable_link_access_and_unpublish' in kwargs:
+            visualization.importable = visualization.published = False
+        elif 'unshare_user' in kwargs:
+            user = session.query( model.User ).get( self.decode_id( kwargs['unshare_user' ] ) )
+            if not user:
+                error( "User not found for provided id" )
+            association = session.query( model.VisualizationUserShareAssociation ) \
+                                 .filter_by( user=user, visualization=visualization ).one()
+            session.delete( association )
+
+        session.flush()
+
+        return trans.fill_template( "/sharing_base.mako", item=visualization, use_panels=True )
+
+    @web.expose
+    @web.require_login( "share Galaxy visualizations" )
+    def share( self, trans, id=None, email="", use_panels=False ):
+        """ Handle sharing a visualization with a particular user. """
+        msg = mtype = None
+        visualization = self.get_visualization( trans, id, check_ownership=True )
+        if email:
+            other = trans.sa_session.query( model.User ) \
+                                    .filter( and_( model.User.table.c.email == email,
+                                                   model.User.table.c.deleted == false() ) ) \
+                                    .first()
+            if not other:
+                mtype = "error"
+                msg = ( "User '%s' does not exist" % escape( email ) )
+            elif other == trans.get_user():
+                mtype = "error"
+                msg = ( "You cannot share a visualization with yourself" )
+            elif trans.sa_session.query( model.VisualizationUserShareAssociation ) \
+                    .filter_by( user=other, visualization=visualization ).count() > 0:
+                mtype = "error"
+                msg = ( "Visualization already shared with '%s'" % escape( email ) )
+            else:
+                share = model.VisualizationUserShareAssociation()
+                share.visualization = visualization
+                share.user = other
+                session = trans.sa_session
+                session.add( share )
+                self.create_item_slug( session, visualization )
+                session.flush()
+                viz_title = escape( visualization.title )
+                other_email = escape( other.email )
+                trans.set_message( "Visualization '%s' shared with user '%s'" % ( viz_title, other_email ) )
+                return trans.response.send_redirect( web.url_for(controller='visualization', action='sharing', id=id ) )
+        return trans.fill_template( "/ind_share_base.mako",
+                                    message=msg,
+                                    messagetype=mtype,
+                                    item=visualization,
+                                    email=email,
+                                    use_panels=use_panels )
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug ):
+        """ Display visualization based on a username and slug. """
+
+        # Get visualization.
+        session = trans.sa_session
+        user = session.query( model.User ).filter_by( username=username ).first()
+        visualization = trans.sa_session.query( model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first()
+        if visualization is None:
+            raise web.httpexceptions.HTTPNotFound()
+
+        # Security check raises error if user cannot access visualization.
+        self.security_check( trans, visualization, check_ownership=False, check_accessible=True )
+
+        # Get rating data.
+        user_item_rating = 0
+        if trans.get_user():
+            user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), visualization )
+            if user_item_rating:
+                user_item_rating = user_item_rating.rating
+            else:
+                user_item_rating = 0
+        ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, visualization )
+
+        # Fork to template based on visualization.type (registry or builtin).
+        if( ( trans.app.visualizations_registry and visualization.type in trans.app.visualizations_registry.plugins ) and
+                ( visualization.type not in trans.app.visualizations_registry.BUILT_IN_VISUALIZATIONS ) ):
+            # if a registry visualization, load a version of display.mako that will load the vis into an iframe :(
+            # TODO: simplest path from A to B but not optimal - will be difficult to do reg visualizations any other way
+            # TODO: this will load the visualization twice (once above, once when the iframe src calls 'saved')
+            encoded_visualization_id = trans.security.encode_id( visualization.id )
+            return trans.stream_template_mako( 'visualization/display_in_frame.mako',
+                                               item=visualization, encoded_visualization_id=encoded_visualization_id,
+                                               user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings,
+                                               content_only=True )
+
+        visualization_config = self.get_visualization_config( trans, visualization )
+        return trans.stream_template_mako( "visualization/display.mako", item=visualization, item_data=visualization_config,
+                                           user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings,
+                                           content_only=True )
+
+    @web.expose
+    @web.json
+    @web.require_login( "get item name and link" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns visualization's name and link. """
+        visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+
+        if self.create_item_slug( trans.sa_session, visualization ):
+            trans.sa_session.flush()
+        return_dict = { "name": visualization.title,
+                        "link": web.url_for(controller='visualization', action="display_by_username_and_slug", username=visualization.user.username, slug=visualization.slug ) }
+        return return_dict
+
+    @web.expose
+    def get_item_content_async( self, trans, id ):
+        """ Returns item content in HTML format. """
+
+        # Get visualization, making sure it's accessible.
+        visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+        if visualization is None:
+            raise web.httpexceptions.HTTPNotFound()
+
+        # Return content.
+        visualization_config = self.get_visualization_config( trans, visualization )
+        return trans.fill_template_mako( "visualization/item_content.mako", encoded_id=trans.security.encode_id(visualization.id),
+                                         item=visualization, item_data=visualization_config, content_only=True )
+
+    @web.expose
+    @web.require_login( "create visualizations" )
+    def create( self, trans, visualization_title="", visualization_slug="", visualization_annotation="", visualization_dbkey="",
+                visualization_type="" ):
+        """
+        Creates a new visualization or returns a form for creating visualization.
+        """
+        visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
+        if trans.request.method == "POST":
+            rval = self.create_visualization( trans, title=visualization_title,
+                                              slug=visualization_slug,
+                                              annotation=visualization_annotation,
+                                              dbkey=visualization_dbkey,
+                                              type=visualization_type )
+            if isinstance( rval, dict ):
+                # Found error creating viz.
+                visualization_title_err = rval[ 'title_err' ]
+                visualization_slug_err = rval[ 'slug_err' ]
+            else:
+                # Successfully created viz.
+                return trans.response.send_redirect( web.url_for(controller='visualization', action='list' ) )
+
+        viz_type_options = [ ( t, t ) for t in self.viz_types ]
+        return trans.show_form(
+            web.FormBuilder( web.url_for(controller='visualization', action='create'), "Create new visualization", submit_text="Submit" )
+            .add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+            .add_select( "visualization_type", "Type", options=viz_type_options, error=None )
+            .add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
+                       help="""A unique identifier that will be used for
+                            public links to this visualization. A default is generated
+                            from the visualization title, but can be edited. This field
+                            must contain only lowercase letters, numbers, and
+                            the '-' character.""" )
+            .add_select( "visualization_dbkey", "Visualization DbKey/Build", value=visualization_dbkey, options=trans.app.genomes.get_dbkeys( trans, chrom_info=True ), error=None)
+            .add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
+                       help="A description of the visualization; annotation is shown alongside published visualizations."),
+            template="visualization/create.mako"
+        )
+
+    @web.json
+    def save( self, trans, vis_json=None, type=None, id=None, title=None, dbkey=None, annotation=None ):
+        """
+        Save a visualization; if visualization does not have an ID, a new
+        visualization is created. Returns JSON of visualization.
+        """
+        # Get visualization attributes from kwargs or from config.
+        vis_config = loads( vis_json )
+        vis_type = type or vis_config[ 'type' ]
+        vis_id = id or vis_config.get( 'id', None )
+        vis_title = title or vis_config.get( 'title', None )
+        vis_dbkey = dbkey or vis_config.get( 'dbkey', None )
+        vis_annotation = annotation or vis_config.get( 'annotation', None )
+        return self.save_visualization( trans, vis_config, vis_type, vis_id, vis_title, vis_dbkey, vis_annotation )
+
+    @web.expose
+    @web.require_login( "edit visualizations" )
+    def edit( self, trans, id, visualization_title="", visualization_slug="", visualization_annotation="" ):
+        """
+        Edit a visualization's attributes.
+        """
+        visualization = self.get_visualization( trans, id, check_ownership=True )
+        session = trans.sa_session
+
+        visualization_title_err = visualization_slug_err = visualization_annotation_err = ""
+        if trans.request.method == "POST":
+            if not visualization_title:
+                visualization_title_err = "Visualization name is required"
+            elif not visualization_slug:
+                visualization_slug_err = "Visualization id is required"
+            elif not self._is_valid_slug( visualization_slug ):
+                visualization_slug_err = "Visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
+            elif visualization_slug != visualization.slug and trans.sa_session.query( model.Visualization ).filter_by( user=visualization.user, slug=visualization_slug, deleted=False ).first():
+                visualization_slug_err = "Visualization id must be unique"
+            else:
+                visualization.title = visualization_title
+                visualization.slug = visualization_slug
+                if visualization_annotation != "":
+                    visualization_annotation = sanitize_html( visualization_annotation, 'utf-8', 'text/html' )
+                    self.add_item_annotation( trans.sa_session, trans.get_user(), visualization, visualization_annotation )
+                session.flush()
+                # Redirect to visualization list.
+                return trans.response.send_redirect( web.url_for(controller='visualization', action='list' ) )
+        else:
+            visualization_title = visualization.title
+            # Create slug if it's not already set.
+            if visualization.slug is None:
+                self.create_item_slug( trans.sa_session, visualization )
+            visualization_slug = visualization.slug
+            visualization_annotation = self.get_item_annotation_str( trans.sa_session, trans.user, visualization )
+            if not visualization_annotation:
+                visualization_annotation = ""
+        return trans.show_form(
+            web.FormBuilder( web.url_for(controller='visualization', action='edit', id=id ), "Edit visualization attributes", submit_text="Submit" )
+            .add_text( "visualization_title", "Visualization title", value=visualization_title, error=visualization_title_err )
+            .add_text( "visualization_slug", "Visualization identifier", value=visualization_slug, error=visualization_slug_err,
+                       help="""A unique identifier that will be used for
+                            public links to this visualization. A default is generated
+                            from the visualization title, but can be edited. This field
+                            must contain only lowercase letters, numbers, and
+                            the '-' character.""" )
+            .add_text( "visualization_annotation", "Visualization annotation", value=visualization_annotation, error=visualization_annotation_err,
+                       help="A description of the visualization; annotation is shown alongside published visualizations."),
+            template="visualization/create.mako" )
+
+    # ------------------------- registry.
+    @web.expose
+    @web.require_login( "use Galaxy visualizations", use_panels=True )
+    def render( self, trans, visualization_name, embedded=None, **kwargs ):
+        """
+        Render the appropriate visualization template, parsing the `kwargs`
+        into appropriate variables and resources (such as ORM models)
+        based on this visualizations `param` data in visualizations_conf.xml.
+
+        URL: /visualization/show/{visualization_name}
+        """
+        plugin = self._get_plugin_from_registry( trans, visualization_name )
+        try:
+            return plugin.render( trans=trans, embedded=embedded, **kwargs )
+        except Exception as exception:
+            self._handle_plugin_error( trans, visualization_name, exception )
+
+    def _get_plugin_from_registry( self, trans, visualization_name ):
+        """
+        Get the named plugin from the registry.
+        :raises HTTPNotFound: if registry has been turned off in config.
+        :raises HTTPNotFound: if visualization_name isn't a registered plugin.
+        """
+        if not trans.app.visualizations_registry:
+            raise HTTPNotFound( 'No visualization registry (possibly disabled in galaxy.ini)' )
+        return trans.app.visualizations_registry.get_plugin( visualization_name )
+
+    def _handle_plugin_error( self, trans, visualization_name, exception ):
+        """
+        Log, raise if debugging; log and show html message if not.
+        """
+        log.exception( 'error rendering visualization (%s): %s', visualization_name, str( exception ) )
+        if trans.debug:
+            raise
+        return trans.show_error_message(
+            "There was an error rendering the visualization. " +
+            "Contact your Galaxy administrator if the problem persists." +
+            "<br/>Details: " + str( exception ), use_panels=False )
+
+    @web.expose
+    @web.require_login( "use Galaxy visualizations", use_panels=True )
+    def saved( self, trans, id=None, revision=None, type=None, config=None, title=None, **kwargs ):
+        """
+        Save (on POST) or load (on GET) a visualization then render.
+        """
+        # TODO: consider merging saved and render at this point (could break saved URLs, tho)
+        if trans.request.method == 'POST':
+            self._POST_to_saved( trans, id=id, revision=revision, type=type, config=config, title=title, **kwargs )
+
+        # check the id and load the saved visualization
+        if id is None:
+            return HTTPBadRequest( 'A valid visualization id is required to load a visualization' )
+        visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+
+        # re-add title to kwargs for passing to render
+        if title:
+            kwargs[ 'title' ] = title
+        plugin = self._get_plugin_from_registry( trans, visualization.type )
+        try:
+            return plugin.render_saved( visualization, trans=trans, **kwargs )
+        except Exception as exception:
+            self._handle_plugin_error( trans, visualization.type, exception )
+
+    def _POST_to_saved( self, trans, id=None, revision=None, type=None, config=None, title=None, **kwargs ):
+        """
+        Save the visualiztion info (revision, type, config, title, etc.) to
+        the Visualization at `id` or to a new Visualization if `id` is None.
+
+        Uses POST/redirect/GET after a successful save, redirecting to GET.
+        """
+        DEFAULT_VISUALIZATION_NAME = 'Unnamed Visualization'
+
+        # post to saved in order to save a visualization
+        if type is None or config is None:
+            return HTTPBadRequest( 'A visualization type and config are required to save a visualization' )
+        if isinstance( config, string_types ):
+            config = loads( config )
+        title = title or DEFAULT_VISUALIZATION_NAME
+
+        # TODO: allow saving to (updating) a specific revision - should be part of UsesVisualization
+        # TODO: would be easier if this returned the visualization directly
+        # check security if posting to existing visualization
+        if id is not None:
+            self.get_visualization( trans, id, check_ownership=True, check_accessible=False )
+            # ??: on not owner: error raised, but not returned (status = 200)
+        # TODO: there's no security check in save visualization (if passed an id)
+        returned = self.save_visualization( trans, config, type, id, title )
+
+        # redirect to GET to prevent annoying 'Do you want to post again?' dialog on page reload
+        render_url = web.url_for( controller='visualization', action='saved', id=returned.get( 'vis_id' ) )
+        return trans.response.send_redirect( render_url )
+
+    #
+    # Visualizations.
+    #
+    @web.expose
+    @web.require_login()
+    def trackster(self, trans, **kwargs):
+        """
+        Display browser for the visualization denoted by id and add the datasets listed in `dataset_ids`.
+        """
+
+        # define app configuration
+        app = { 'jscript' : "viz/trackster" }
+
+        # get dataset to add
+        id = kwargs.get( "id", None )
+
+        # get dataset to add
+        new_dataset_id = kwargs.get( "dataset_id", None )
+
+        # set up new browser if no id provided
+        if not id:
+            # use dbkey from dataset to be added or from incoming parameter
+            dbkey = None
+            if new_dataset_id:
+                decoded_id = self.decode_id( new_dataset_id )
+                hda = self.hda_manager.get_owned( decoded_id, trans.user, current_history=trans.user )
+                dbkey = hda.dbkey
+                if dbkey == '?':
+                    dbkey = kwargs.get( "dbkey", None )
+
+            # save database key
+            app['default_dbkey'] = dbkey
+        else:
+            # load saved visualization
+            vis = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+            app['viz_config'] = self.get_visualization_config( trans, vis )
+
+        # backup id
+        app['id'] = id
+
+        # add dataset id
+        app['add_dataset'] = new_dataset_id
+
+        # check for gene region
+        gene_region = GenomeRegion.from_str(kwargs.get("gene_region", ""))
+
+        # update gene region of saved visualization if user parses a new gene region in the url
+        if gene_region.chrom is not None:
+            app['gene_region'] = {
+                'chrom' : gene_region.chrom,
+                'start' : gene_region.start,
+                'end'   : gene_region.end
+            }
+
+        # fill template
+        return trans.fill_template('galaxy.panels.mako', config={'right_panel': True, 'app': app})
+
+    @web.expose
+    def circster( self, trans, id=None, hda_ldda=None, dataset_id=None, dbkey=None ):
+        """
+        Display a circster visualization.
+        """
+
+        # Get dataset to add.
+        dataset = None
+        if dataset_id:
+            dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+
+        # Get/create vis.
+        if id:
+            # Display existing viz.
+            vis = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+            dbkey = vis.dbkey
+        else:
+            # Create new viz.
+            if not dbkey:
+                # If dbkey not specified, use dataset's dbkey.
+                dbkey = dataset.dbkey
+                if not dbkey or dbkey == '?':
+                    # Circster requires a valid dbkey.
+                    return trans.show_error_message( "You must set the dataset's dbkey to view it. You can set "
+                                                     "a dataset's dbkey by clicking on the pencil icon and editing "
+                                                     "its attributes.", use_panels=True )
+
+            vis = self.create_visualization( trans, type="genome", dbkey=dbkey, save=False )
+
+        # Get the vis config and work with it from here on out. Working with the
+        # config is only possible because the config structure of trackster/genome
+        # visualizations is well known.
+        viz_config = self.get_visualization_config( trans, vis )
+
+        # Add dataset if specified.
+        if dataset:
+            viz_config[ 'tracks' ].append( self.get_new_track_config( trans, dataset ) )
+
+        # Get genome info.
+        chroms_info = self.app.genomes.chroms( trans, dbkey=dbkey )
+        genome = { 'dbkey': dbkey, 'chroms_info': chroms_info }
+
+        # Add genome-wide data to each track in viz.
+        tracks = viz_config.get( 'tracks', [] )
+        for track in tracks:
+            dataset_dict = track[ 'dataset' ]
+            dataset = self.get_hda_or_ldda( trans, dataset_dict[ 'hda_ldda'], dataset_dict[ 'id' ] )
+
+            genome_data = self._get_genome_data( trans, dataset, dbkey )
+            if not isinstance( genome_data, string_types ):
+                track[ 'preloaded_data' ] = genome_data
+
+        # define app configuration for generic mako template
+        app = {
+            'jscript'       : "viz/circster",
+            'viz_config'    : viz_config,
+            'genome'        : genome
+        }
+
+        # fill template
+        return trans.fill_template('galaxy.panels.mako', config={'app' : app})
+
+    @web.expose
+    def sweepster( self, trans, id=None, hda_ldda=None, dataset_id=None, regions=None ):
+        """
+        Displays a sweepster visualization using the incoming parameters. If id is available,
+        get the visualization with the given id; otherwise, create a new visualization using
+        a given dataset and regions.
+        """
+        regions = regions or '{}'
+        # Need to create history if necessary in order to create tool form.
+        trans.get_history( most_recent=True, create=True )
+
+        if id:
+            # Loading a shared visualization.
+            viz = self.get_visualization( trans, id )
+            viz_config = self.get_visualization_config( trans, viz )
+            decoded_id = self.decode_id( viz_config[ 'dataset_id' ] )
+            dataset = self.hda_manager.get_owned( decoded_id, trans.user, current_history=trans.history )
+        else:
+            # Loading new visualization.
+            dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+            job = self.hda_manager.creating_job( dataset )
+            viz_config = {
+                'dataset_id': dataset_id,
+                'tool_id': job.tool_id,
+                'regions': loads( regions )
+            }
+
+        # Add tool, dataset attributes to config based on id.
+        tool = trans.app.toolbox.get_tool( viz_config[ 'tool_id' ] )
+        viz_config[ 'tool' ] = tool.to_dict( trans, io_details=True )
+        viz_config[ 'dataset' ] = trans.security.encode_dict_ids( dataset.to_dict() )
+
+        return trans.fill_template_mako( "visualization/sweepster.mako", config=viz_config )
+
+    def get_item( self, trans, id ):
+        return self.get_visualization( trans, id )
+
+    @web.expose
+    def phyloviz( self, trans, id=None, dataset_id=None, tree_index=0, **kwargs ):
+        config = None
+        data = None
+
+        # if id, then this is a saved visualization; get its config and the dataset_id from there
+        if id:
+            visualization = self.get_visualization( trans, id )
+            config = self.get_visualization_config( trans, visualization )
+            dataset_id = config.get( 'dataset_id', None )
+
+        # get the hda if we can, then its data using the phyloviz parsers
+        if dataset_id:
+            decoded_id = self.decode_id( dataset_id )
+            hda = self.hda_manager.get_accessible( decoded_id, trans.user )
+            hda = self.hda_manager.error_if_uploading( hda )
+        else:
+            return trans.show_message( "Phyloviz couldn't find a dataset_id" )
+
+        pd = PhylovizDataProvider( original_dataset=hda )
+        data = pd.get_data( tree_index=tree_index )
+
+        # ensure at least a default configuration (gen. an new/unsaved visualization)
+        if not config:
+            config = {
+                'dataset_id': dataset_id,
+                'title'     : hda.display_name(),
+                'ext'       : hda.datatype.file_ext,
+                'treeIndex' : tree_index,
+                'saved_visualization' : False
+            }
+        return trans.fill_template_mako( "visualization/phyloviz.mako", data=data, config=config )
+
+    @web.expose
+    @web.require_login( "run Galaxy Interactive Environments" )
+    def gie_list( self, trans, **kwargs ):
+        if not hasattr( self, 'gie_image_map' ):
+            self.gie_image_map = {}
+
+            for gie_dir in self.app.config.gie_dirs:
+                gie_list = os.listdir( gie_dir )
+                for gie in gie_list:
+                    gie_path = os.path.join(gie_dir, gie)
+
+                    if not os.path.isdir(gie_path):
+                        continue
+
+                    if not os.path.exists(self._gie_config_dir(gie_path)):
+                        continue
+
+                    if os.path.exists( self._gie_config_dir( gie_path, 'allowed_images.yml' ) ):
+                        image_file = self._gie_config_dir( gie_path, 'allowed_images.yml' )
+                    elif os.path.exists( self._gie_config_dir( gie_path, 'allowed_images.yml.sample' ) ):
+                        image_file = self._gie_config_dir( gie_path, 'allowed_images.yml.sample' )
+                    else:
+                        continue
+
+                    with open( image_file, 'r' ) as handle:
+                        self.gie_image_map[gie] = yaml.load( handle )
+
+        return trans.fill_template_mako(
+            "visualization/gie.mako",
+            gie_image_map=self.gie_image_map,
+            history=trans.get_history(),
+        )
+
+    def _gie_config_dir(self, gie_path, *args):
+        nargs = [gie_path, 'config']
+        if len(args) > 0:
+            nargs += args
+        return os.path.join(*nargs)
+
+    @web.json
+    def bookmarks_from_dataset( self, trans, hda_id=None, ldda_id=None ):
+        if hda_id:
+            hda_ldda = "hda"
+            dataset_id = hda_id
+        elif ldda_id:
+            hda_ldda = "ldda"
+            dataset_id = ldda_id
+        dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+
+        rows = []
+        if isinstance( dataset.datatype, Bed ):
+            data = RawBedDataProvider( original_dataset=dataset ).get_iterator()
+            for i, line in enumerate( data ):
+                if ( i > 500 ):
+                    break
+                fields = line.split()
+                location = name = "%s:%s-%s" % ( fields[0], fields[1], fields[2] )
+                if len( fields ) > 3:
+                    name = fields[4]
+                rows.append( [location, name] )
+        return { 'data': rows }
diff --git a/lib/galaxy/webapps/galaxy/controllers/workflow.py b/lib/galaxy/webapps/galaxy/controllers/workflow.py
new file mode 100644
index 0000000..7d1e742
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -0,0 +1,1315 @@
+import base64
+import httplib
+import json
+import logging
+import os
+import sgmllib
+import urllib2
+
+from sqlalchemy import and_
+from sqlalchemy.sql import expression
+from markupsafe import escape
+
+from tool_shed.util import encoding_util
+
+from galaxy import model
+from galaxy import util
+from galaxy import web
+from galaxy import exceptions
+from galaxy.managers import workflows, histories
+from galaxy.model.item_attrs import UsesItemRatings
+from galaxy.model.mapping import desc
+from galaxy.util import unicodify, FILENAME_VALID_CHARS
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.web import error, url_for
+from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesStoredWorkflowMixin
+from galaxy.web.framework.formbuilder import form
+from galaxy.web.framework.helpers import grids, time_ago, to_unicode
+from galaxy.workflow.extract import extract_workflow
+from galaxy.workflow.extract import summarize
+from galaxy.workflow.modules import module_factory
+from galaxy.workflow.modules import WorkflowModuleInjector
+from galaxy.workflow.render import WorkflowCanvas, STANDALONE_SVG_TEMPLATE
+
+log = logging.getLogger( __name__ )
+
+
+class StoredWorkflowListGrid( grids.Grid ):
+
+    class StepsColumn( grids.GridColumn ):
+        def get_value(self, trans, grid, workflow):
+            return len( workflow.latest_workflow.steps )
+
+    # Grid definition
+    use_panels = True
+    title = "Saved Workflows"
+    model_class = model.StoredWorkflow
+    default_filter = { "name": "All", "tags": "All" }
+    default_sort_key = "-update_time"
+    columns = [
+        grids.TextColumn( "Name", key="name", attach_popup=True, filterable="advanced" ),
+        grids.IndividualTagsColumn( "Tags",
+                                    "tags",
+                                    model_tag_association_class=model.StoredWorkflowTagAssociation,
+                                    filterable="advanced",
+                                    grid_name="StoredWorkflowListGrid" ),
+        StepsColumn( "Steps" ),
+        grids.GridColumn( "Created", key="create_time", format=time_ago ),
+        grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search",
+            cols_to_filter=[ columns[0], columns[1] ],
+            key="free-text-search", visible=False, filterable="standard"
+        )
+    )
+    operations = [
+        grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Run", condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Copy", condition=( lambda item: not item.deleted ), async_compatible=False  ),
+        grids.GridOperation( "Rename", condition=( lambda item: not item.deleted ), async_compatible=False  ),
+        grids.GridOperation( "Sharing", condition=( lambda item: not item.deleted ), async_compatible=False ),
+        grids.GridOperation( "Delete", condition=( lambda item: item.deleted ), async_compatible=True ),
+    ]
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        return query.filter_by( user=trans.user, deleted=False )
+
+
+class StoredWorkflowAllPublishedGrid( grids.Grid ):
+    title = "Published Workflows"
+    model_class = model.StoredWorkflow
+    default_sort_key = "update_time"
+    default_filter = dict( public_url="All", username="All", tags="All" )
+    use_async = True
+    columns = [
+        grids.PublicURLColumn( "Name", key="name", filterable="advanced", attach_popup=True ),
+        grids.OwnerAnnotationColumn( "Annotation",
+                                     key="annotation",
+                                     model_annotation_association_class=model.StoredWorkflowAnnotationAssociation,
+                                     filterable="advanced" ),
+        grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
+        grids.CommunityRatingColumn( "Community Rating", key="rating" ),
+        grids.CommunityTagsColumn( "Community Tags", key="tags",
+                                   model_tag_association_class=model.StoredWorkflowTagAssociation,
+                                   filterable="advanced", grid_name="PublicWorkflowListGrid" ),
+        grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
+    ]
+    columns.append(
+        grids.MulticolFilterColumn(
+            "Search name, annotation, owner, and tags",
+            cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+            key="free-text-search", visible=False, filterable="standard"
+        )
+    )
+    operations = [
+        grids.GridOperation(
+            "Import",
+            condition=( lambda item: not item.deleted ),
+            allow_multiple=False,
+            url_args=dict( action="imp")
+        ),
+        grids.GridOperation(
+            "Save as File",
+            condition=( lambda item: not item.deleted ),
+            allow_multiple=False,
+            url_args=dict( action="export_to_file" )
+        ),
+    ]
+
+    def build_initial_query( self, trans, **kwargs ):
+        # Join so that searching stored_workflow.user makes sense.
+        return trans.sa_session.query( self.model_class ).join( model.User.table )
+
+    def apply_query_filter( self, trans, query, **kwargs ):
+        # A public workflow is published, has a slug, and is not deleted.
+        return query.filter(
+            self.model_class.published == expression.true() ).filter(
+            self.model_class.slug.isnot(None)).filter(
+            self.model_class.deleted == expression.false())
+
+
+# Simple SGML parser to get all content in a single tag.
+class SingleTagContentsParser( sgmllib.SGMLParser ):
+
+    def __init__( self, target_tag ):
+        sgmllib.SGMLParser.__init__( self )
+        self.target_tag = target_tag
+        self.cur_tag = None
+        self.tag_content = ""
+
+    def unknown_starttag( self, tag, attrs ):
+        """ Called for each start tag. """
+        self.cur_tag = tag
+
+    def handle_data( self, text ):
+        """ Called for each block of plain text. """
+        if self.cur_tag == self.target_tag:
+            self.tag_content += text
+
+
+class WorkflowController( BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesItemRatings ):
+    stored_list_grid = StoredWorkflowListGrid()
+    published_list_grid = StoredWorkflowAllPublishedGrid()
+
+    __myexp_url = "www.myexperiment.org:80"
+
+    @web.expose
+    def index( self, trans ):
+        return self.list( trans )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def list_grid( self, trans, **kwargs ):
+        """ List user's stored workflows. """
+        # status = message = None
+        if 'operation' in kwargs:
+            operation = kwargs['operation'].lower()
+            if operation == "rename":
+                return self.rename( trans, **kwargs )
+            history_ids = util.listify( kwargs.get( 'id', [] ) )
+            if operation == "sharing":
+                return self.sharing( trans, id=history_ids )
+        return self.stored_list_grid( trans, **kwargs )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows", use_panels=True )
+    def list( self, trans ):
+        """
+        Render workflow main page (management of existing workflows)
+        """
+        user = trans.get_user()
+        workflows = trans.sa_session.query( model.StoredWorkflow ) \
+            .filter_by( user=user, deleted=False ) \
+            .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
+            .all()
+        shared_by_others = trans.sa_session \
+            .query( model.StoredWorkflowUserShareAssociation ) \
+            .filter_by( user=user ) \
+            .join( 'stored_workflow' ) \
+            .filter( model.StoredWorkflow.deleted == expression.false() ) \
+            .order_by( desc( model.StoredWorkflow.update_time ) ) \
+            .all()
+
+        # Legacy issue: all shared workflows must have slugs.
+        slug_set = False
+        for workflow_assoc in shared_by_others:
+            if self.create_item_slug( trans.sa_session, workflow_assoc.stored_workflow ):
+                slug_set = True
+        if slug_set:
+            trans.sa_session.flush()
+
+        return trans.fill_template( "workflow/list.mako",
+                                    workflows=workflows,
+                                    shared_by_others=shared_by_others )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def list_for_run( self, trans ):
+        """
+        Render workflow list for analysis view (just allows running workflow
+        or switching to management view)
+        """
+        user = trans.get_user()
+        workflows = trans.sa_session.query( model.StoredWorkflow ) \
+            .filter_by( user=user, deleted=False ) \
+            .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
+            .all()
+        shared_by_others = trans.sa_session \
+            .query( model.StoredWorkflowUserShareAssociation ) \
+            .filter_by( user=user ) \
+            .filter( model.StoredWorkflow.deleted == expression.false() ) \
+            .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
+            .all()
+        return trans.fill_template( "workflow/list_for_run.mako",
+                                    workflows=workflows,
+                                    shared_by_others=shared_by_others )
+
+    @web.expose
+    def list_published( self, trans, **kwargs ):
+        kwargs[ 'embedded' ] = True
+        grid = self.published_list_grid( trans, **kwargs )
+        if 'async' in kwargs:
+            return grid
+
+        # Render grid wrapped in panels
+        return trans.fill_template( "workflow/list_published.mako", embedded_grid=grid )
+
+    @web.expose
+    def display_by_username_and_slug( self, trans, username, slug, format='html' ):
+        """
+        Display workflow based on a username and slug. Format can be html, json, or json-download.
+        """
+
+        # Get workflow by username and slug. Security is handled by the display methods below.
+        session = trans.sa_session
+        user = session.query( model.User ).filter_by( username=username ).first()
+        if not user:
+            raise web.httpexceptions.HTTPNotFound()
+        stored_workflow = trans.sa_session.query( model.StoredWorkflow ).filter_by( user=user, slug=slug, deleted=False ).first()
+        if not stored_workflow:
+            raise web.httpexceptions.HTTPNotFound()
+        encoded_id = trans.security.encode_id( stored_workflow.id )
+
+        # Display workflow in requested format.
+        if format == 'html':
+            return self._display( trans, stored_workflow )
+        elif format == 'json':
+            return self.for_direct_import( trans, encoded_id )
+        elif format == 'json-download':
+            return self.export_to_file( trans, encoded_id )
+
+    @web.expose
+    def display_by_id( self, trans, id ):
+        """ Display workflow based on id. """
+        # Get workflow.
+        stored_workflow = self.get_stored_workflow( trans, id )
+        return self._display(trans, stored_workflow)
+
+    def _display( self, trans, stored_workflow ):
+        """ Diplay workflow as HTML page. """
+
+        if stored_workflow is None:
+            raise web.httpexceptions.HTTPNotFound()
+        # Security check raises error if user cannot access workflow.
+        self.security_check( trans, stored_workflow, False, True )
+        # Get data for workflow's steps.
+        self.get_stored_workflow_steps( trans, stored_workflow )
+        # Get annotations.
+        stored_workflow.annotation = self.get_item_annotation_str( trans.sa_session, stored_workflow.user, stored_workflow )
+        for step in stored_workflow.latest_workflow.steps:
+            step.annotation = self.get_item_annotation_str( trans.sa_session, stored_workflow.user, step )
+        # Get rating data.
+        user_item_rating = 0
+        if trans.get_user():
+            user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), stored_workflow )
+            if user_item_rating:
+                user_item_rating = user_item_rating.rating
+            else:
+                user_item_rating = 0
+        ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, stored_workflow )
+        return trans.fill_template_mako( "workflow/display.mako", item=stored_workflow, item_data=stored_workflow.latest_workflow.steps,
+                                         user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
+
+    @web.expose
+    def get_item_content_async( self, trans, id ):
+        """ Returns item content in HTML format. """
+
+        stored = self.get_stored_workflow( trans, id, False, True )
+        if stored is None:
+            raise web.httpexceptions.HTTPNotFound()
+
+        # Get data for workflow's steps.
+        self.get_stored_workflow_steps( trans, stored )
+        # Get annotations.
+        stored.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, stored )
+        for step in stored.latest_workflow.steps:
+            step.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, step )
+        return trans.stream_template_mako( "/workflow/item_content.mako", item=stored, item_data=stored.latest_workflow.steps )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def share( self, trans, id, email="", use_panels=False ):
+        msg = mtype = None
+        # Load workflow from database
+        stored = self.get_stored_workflow( trans, id )
+        if email:
+            other = trans.sa_session.query( model.User ) \
+                                    .filter( and_( model.User.table.c.email == email,
+                                                   model.User.table.c.deleted == expression.false() ) ) \
+                                    .first()
+            if not other:
+                mtype = "error"
+                msg = ( "User '%s' does not exist" % escape( email ) )
+            elif other == trans.get_user():
+                mtype = "error"
+                msg = ( "You cannot share a workflow with yourself" )
+            elif trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
+                    .filter_by( user=other, stored_workflow=stored ).count() > 0:
+                mtype = "error"
+                msg = ( "Workflow already shared with '%s'" % escape( email ) )
+            else:
+                share = model.StoredWorkflowUserShareAssociation()
+                share.stored_workflow = stored
+                share.user = other
+                session = trans.sa_session
+                session.add( share )
+                session.flush()
+                trans.set_message( "Workflow '%s' shared with user '%s'" % ( escape( stored.name ), escape( other.email ) ) )
+                return trans.response.send_redirect( url_for( controller='workflow', action='sharing', id=id ) )
+        return trans.fill_template( "/ind_share_base.mako",
+                                    message=msg,
+                                    messagetype=mtype,
+                                    item=stored,
+                                    email=email,
+                                    use_panels=use_panels )
+
+    @web.expose
+    @web.require_login( "Share or export Galaxy workflows" )
+    def sharing( self, trans, id, **kwargs ):
+        """ Handle workflow sharing. """
+        session = trans.sa_session
+        if 'unshare_me' in kwargs:
+            # Remove self from shared associations with workflow.
+            stored = self.get_stored_workflow(trans, id, False, True)
+            association = session.query( model.StoredWorkflowUserShareAssociation ) \
+                                 .filter_by( user=trans.user, stored_workflow=stored ).one()
+            session.delete( association )
+            session.flush()
+            return self.list( trans )
+        else:
+            # Get session and workflow.
+            stored = self.get_stored_workflow( trans, id )
+            session.add( stored )
+
+            # Do operation on workflow.
+            if 'make_accessible_via_link' in kwargs:
+                self._make_item_accessible( trans.sa_session, stored )
+            elif 'make_accessible_and_publish' in kwargs:
+                self._make_item_accessible( trans.sa_session, stored )
+                stored.published = True
+            elif 'publish' in kwargs:
+                stored.published = True
+            elif 'disable_link_access' in kwargs:
+                stored.importable = False
+            elif 'unpublish' in kwargs:
+                stored.published = False
+            elif 'disable_link_access_and_unpublish' in kwargs:
+                stored.importable = stored.published = False
+            elif 'unshare_user' in kwargs:
+                user = session.query( model.User ).get( trans.security.decode_id( kwargs['unshare_user' ] ) )
+                if not user:
+                    error( "User not found for provided id" )
+                association = session.query( model.StoredWorkflowUserShareAssociation ) \
+                                     .filter_by( user=user, stored_workflow=stored ).one()
+                session.delete( association )
+
+            # Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them.
+            if stored.importable and not stored.slug:
+                self._make_item_accessible( trans.sa_session, stored )
+
+            session.flush()
+            return trans.fill_template( "/workflow/sharing.mako", use_panels=True, item=stored )
+
+    @web.expose
+    @web.require_login( "to import a workflow", use_panels=True )
+    def imp( self, trans, id, **kwargs ):
+        """Imports a workflow shared by other users."""
+        # Set referer message.
+        referer = trans.request.referer
+        if referer:
+            referer_message = "<a href='%s'>return to the previous page</a>" % escape(referer)
+        else:
+            referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
+
+        # Do import.
+        stored = self.get_stored_workflow( trans, id, check_ownership=False )
+        if stored.importable is False:
+            return trans.show_error_message( "The owner of this workflow has disabled imports via this link.<br>You can %s" % referer_message, use_panels=True )
+        elif stored.deleted:
+            return trans.show_error_message( "You can't import this workflow because it has been deleted.<br>You can %s" % referer_message, use_panels=True )
+        self._import_shared_workflow( trans, stored )
+
+        # Redirect to load galaxy frames.
+        return trans.show_ok_message(
+            message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s."""
+            % ( stored.name, web.url_for( controller='workflow' ), referer_message ), use_panels=True )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def rename( self, trans, id, new_name=None, **kwargs ):
+        stored = self.get_stored_workflow( trans, id )
+        if new_name is not None:
+            san_new_name = sanitize_html( new_name )
+            stored.name = san_new_name
+            stored.latest_workflow.name = san_new_name
+            trans.sa_session.flush()
+            trans.set_message( "Workflow renamed to '%s'." % san_new_name )
+            return self.list( trans )
+        else:
+            return form( url_for(controller='workflow', action='rename', id=trans.security.encode_id(stored.id) ),
+                         "Rename workflow",
+                         submit_text="Rename",
+                         use_panels=True
+                         ).add_text( "new_name", "Workflow Name", value=to_unicode( stored.name ) )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def rename_async( self, trans, id, new_name=None, **kwargs ):
+        stored = self.get_stored_workflow( trans, id )
+        if new_name:
+            san_new_name = sanitize_html( new_name )
+            stored.name = san_new_name
+            stored.latest_workflow.name = san_new_name
+            trans.sa_session.flush()
+            return stored.name
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
+        stored = self.get_stored_workflow( trans, id )
+        if new_annotation:
+            # Sanitize annotation before adding it.
+            new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
+            self.add_item_annotation( trans.sa_session, trans.get_user(), stored, new_annotation )
+            trans.sa_session.flush()
+            return new_annotation
+
+    @web.expose
+    @web.require_login( "rate items" )
+    @web.json
+    def rate_async( self, trans, id, rating ):
+        """ Rate a workflow asynchronously and return updated community data. """
+
+        stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
+        if not stored:
+            return trans.show_error_message( "The specified workflow does not exist." )
+
+        # Rate workflow.
+        self.rate_item( trans.sa_session, trans.get_user(), stored, rating )
+
+        return self.get_ave_item_rating_data( trans.sa_session, stored )
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def set_accessible_async( self, trans, id=None, accessible=False ):
+        """ Set workflow's importable attribute and slug. """
+        stored = self.get_stored_workflow( trans, id )
+
+        # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed.
+        importable = accessible in ['True', 'true', 't', 'T']
+        if stored and stored.importable != importable:
+            if importable:
+                self._make_item_accessible( trans.sa_session, stored )
+            else:
+                stored.importable = importable
+            trans.sa_session.flush()
+        return
+
+    @web.expose
+    def get_embed_html_async( self, trans, id ):
+        """ Returns HTML for embedding a workflow in a page. """
+
+        # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code.
+        stored = self.get_stored_workflow( trans, id )
+        if stored:
+            return "Embedded Workflow '%s'" % stored.name
+
+    @web.expose
+    @web.json
+    @web.require_login( "use Galaxy workflows" )
+    def get_name_and_link_async( self, trans, id=None ):
+        """ Returns workflow's name and link. """
+        stored = self.get_stored_workflow( trans, id )
+
+        return_dict = { "name": stored.name,
+                        "link": url_for(controller='workflow',
+                                        action="display_by_username_and_slug",
+                                        username=stored.user.username,
+                                        slug=stored.slug ) }
+        return return_dict
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def gen_image( self, trans, id ):
+        stored = self.get_stored_workflow( trans, id, check_ownership=True )
+        try:
+            svg = self._workflow_to_svg_canvas( trans, stored )
+        except Exception:
+            status = 'error'
+            message = 'Galaxy is unable to create the SVG image. Please check your workflow, there might be missing tools.'
+            return trans.fill_template( "/workflow/sharing.mako", use_panels=True, item=stored, status=status, message=message )
+        trans.response.set_content_type("image/svg+xml")
+        s = STANDALONE_SVG_TEMPLATE % svg.tostring()
+        return s.encode('utf-8')
+
+    @web.expose
+    @web.require_login( "use Galaxy workflows" )
+    def copy( self, trans, id, save_as_name=None ):
+        # Get workflow to copy.
+        stored = self.get_stored_workflow( trans, id, check_ownership=False )
+        user = trans.get_user()
+        if stored.user == user:
+            owner = True
+        else:
+            if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
+                    .filter_by( user=user, stored_workflow=stored ).count() == 0:
+                error( "Workflow is not owned by or shared with current user" )
+            owner = False
+
+        # Copy.
+        new_stored = model.StoredWorkflow()
+        if (save_as_name):
+            new_stored.name = '%s' % save_as_name
+        else:
+            new_stored.name = "Copy of '%s'" % stored.name
+        new_stored.latest_workflow = stored.latest_workflow
+        # Copy annotation.
+        annotation_obj = self.get_item_annotation_obj( trans.sa_session, stored.user, stored )
+        if annotation_obj:
+            self.add_item_annotation( trans.sa_session, trans.get_user(), new_stored, annotation_obj.annotation )
+        new_stored.copy_tags_from(trans.user, stored)
+        if not owner:
+            new_stored.name += " shared by '%s'" % stored.user.email
+        new_stored.user = user
+        # Persist
+        session = trans.sa_session
+        session.add( new_stored )
+        session.flush()
+        # Display the management page
+        trans.set_message( 'Created new workflow with name "%s"' % escape( new_stored.name ) )
+        return self.list( trans )
+
+    @web.expose
+    @web.require_login( "create workflows" )
+    def create( self, trans, workflow_name=None, workflow_annotation="" ):
+        """
+        Create a new stored workflow with name `workflow_name`.
+        """
+        user = trans.get_user()
+        if workflow_name is not None:
+            # Create the new stored workflow
+            stored_workflow = model.StoredWorkflow()
+            stored_workflow.name = workflow_name
+            stored_workflow.user = user
+            self.create_item_slug( trans.sa_session, stored_workflow )
+            # And the first (empty) workflow revision
+            workflow = model.Workflow()
+            workflow.name = workflow_name
+            workflow.stored_workflow = stored_workflow
+            stored_workflow.latest_workflow = workflow
+            # Add annotation.
+            workflow_annotation = sanitize_html( workflow_annotation, 'utf-8', 'text/html' )
+            self.add_item_annotation( trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation )
+            # Persist
+            session = trans.sa_session
+            session.add( stored_workflow )
+            session.flush()
+            return self.editor( trans, id=trans.security.encode_id(stored_workflow.id ))
+        else:
+            return form( url_for(controller="workflow", action="create"), "Create New Workflow", submit_text="Create", use_panels=True ) \
+                .add_text( "workflow_name", "Workflow Name", value="Unnamed workflow" ) \
+                .add_text( "workflow_annotation",
+                           "Workflow Annotation",
+                           value="",
+                           help="A description of the workflow; annotation is shown alongside shared or published workflows." )
+
+    @web.json
+    def save_workflow_as(self, trans, workflow_name, workflow_data, workflow_annotation=""):
+        """
+            Creates a new workflow based on Save As command. It is a new workflow, but
+            is created with workflow_data already present.
+        """
+        user = trans.get_user()
+        if workflow_name is not None:
+            workflow_contents_manager = workflows.WorkflowContentsManager(trans.app)
+            stored_workflow = model.StoredWorkflow()
+            stored_workflow.name = workflow_name
+            stored_workflow.user = user
+            self.create_item_slug(trans.sa_session, stored_workflow)
+            workflow = model.Workflow()
+            workflow.name = workflow_name
+            workflow.stored_workflow = stored_workflow
+            stored_workflow.latest_workflow = workflow
+            # Add annotation.
+            workflow_annotation = sanitize_html( workflow_annotation, 'utf-8', 'text/html' )
+            self.add_item_annotation( trans.sa_session, trans.get_user(), stored_workflow, workflow_annotation )
+
+            # Persist
+            session = trans.sa_session
+            session.add( stored_workflow )
+            session.flush()
+
+            try:
+                workflow, errors = workflow_contents_manager.update_workflow_from_dict(
+                    trans,
+                    stored_workflow,
+                    workflow_data,
+                )
+            except workflows.MissingToolsException as e:
+                return dict(
+                    name=e.workflow.name,
+                    message=("This workflow includes missing or invalid tools. "
+                             "It cannot be saved until the following steps are removed or the missing tools are enabled."),
+                    errors=e.errors,
+                )
+            return (trans.security.encode_id(stored_workflow.id))
+        else:
+            # This is an error state, 'save as' must have a workflow_name
+            log.exception("Error in Save As workflow: no name.")
+
+    @web.expose
+    def delete( self, trans, id=None ):
+        """
+        Mark a workflow as deleted
+        """
+        # Load workflow from database
+        stored = self.get_stored_workflow( trans, id )
+        # Mark as deleted and save
+        stored.deleted = True
+        trans.user.stored_workflow_menu_entries = [entry for entry in trans.user.stored_workflow_menu_entries if entry.stored_workflow != stored]
+        trans.sa_session.add( stored )
+        trans.sa_session.flush()
+        # Display the management page
+        trans.set_message( "Workflow '%s' deleted" % escape( stored.name ) )
+        return self.list( trans )
+
+    @web.expose
+    @web.require_login( "edit workflows" )
+    def editor( self, trans, id=None ):
+        """
+        Render the main workflow editor interface. The canvas is embedded as
+        an iframe (necessary for scrolling to work properly), which is
+        rendered by `editor_canvas`.
+        """
+        if not id:
+            error( "Invalid workflow id" )
+        stored = self.get_stored_workflow( trans, id )
+        workflows = trans.sa_session.query( model.StoredWorkflow ) \
+            .filter_by( user=trans.user, deleted=False ) \
+            .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
+            .all()
+        return trans.fill_template( "workflow/editor.mako", workflows=workflows, stored=stored, annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored ) )
+
+    @web.json
+    def editor_form_post( self, trans, type=None, content_id=None, annotation=None, label=None, **incoming ):
+        """
+        Accepts a tool state and incoming values, and generates a new tool
+        form and some additional information, packed into a json dictionary.
+        This is used for the form shown in the right pane when a node
+        is selected.
+        """
+        tool_state = incoming.pop( 'tool_state' )
+        module = module_factory.from_dict( trans, {
+            'type': type,
+            'content_id': content_id,
+            'tool_state': tool_state,
+            'label': label or None
+        } )
+        module.update_state( incoming )
+        return {
+            'label': module.label,
+            'tool_state': module.get_state(),
+            'data_inputs': module.get_data_inputs(),
+            'data_outputs': module.get_data_outputs(),
+            'tool_errors': module.get_errors(),
+            'form_html': module.get_config_form(),
+            'annotation': annotation
+        }
+
+    @web.json
+    def get_new_module_info( self, trans, type, **kwargs ):
+        """
+        Get the info for a new instance of a module initialized with default
+        parameters (any keyword arguments will be passed along to the module).
+        Result includes data inputs and outputs, html representation
+        of the initial form, and the initial tool state (with default values).
+        This is called asynchronously whenever a new node is added.
+        """
+        trans.workflow_building_mode = True
+        module = module_factory.new( trans, type, **kwargs )
+        tool_model = None
+        return {
+            'type': module.type,
+            'name': module.get_name(),
+            'content_id': module.get_content_id(),
+            'tool_state': module.get_state(),
+            'tool_model': tool_model,
+            'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
+            'data_inputs': module.get_data_inputs(),
+            'data_outputs': module.get_data_outputs(),
+            'form_html': module.get_config_form(),
+            'annotation': ""
+        }
+
+    @web.json
+    def load_workflow( self, trans, id ):
+        """
+        Get the latest Workflow for the StoredWorkflow identified by `id` and
+        encode it as a json string that can be read by the workflow editor
+        web interface.
+        """
+        trans.workflow_building_mode = True
+        stored = self.get_stored_workflow( trans, id, check_ownership=True, check_accessible=False )
+        workflow_contents_manager = workflows.WorkflowContentsManager(trans.app)
+        return workflow_contents_manager.workflow_to_dict( trans, stored, style="editor" )
+
+    @web.json
+    def save_workflow( self, trans, id, workflow_data ):
+        """
+        Save the workflow described by `workflow_data` with id `id`.
+        """
+        # Get the stored workflow
+        stored = self.get_stored_workflow( trans, id )
+        workflow_contents_manager = workflows.WorkflowContentsManager(trans.app)
+        try:
+            workflow, errors = workflow_contents_manager.update_workflow_from_dict(
+                trans,
+                stored,
+                workflow_data,
+            )
+        except workflows.MissingToolsException as e:
+            return dict(
+                name=e.workflow.name,
+                message="This workflow includes missing or invalid tools. "
+                        "It cannot be saved until the following steps are removed or the missing tools are enabled.",
+                errors=e.errors,
+            )
+
+        if workflow.has_errors:
+            errors.append( "Some steps in this workflow have validation errors" )
+        if workflow.has_cycles:
+            errors.append( "This workflow contains cycles" )
+        if errors:
+            rval = dict( message="Workflow saved, but will not be runnable due to the following errors",
+                         errors=errors )
+        else:
+            rval = dict( message="Workflow saved" )
+        rval['name'] = workflow.name
+        return rval
+
+    @web.expose
+    @web.require_login( "use workflows" )
+    def export_to_myexp( self, trans, id, myexp_username, myexp_password ):
+        """
+        Exports a workflow to myExperiment website.
+        """
+        trans.workflow_building_mode = True
+        stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
+
+        # Convert workflow to dict.
+        workflow_dict = self._workflow_to_dict( trans, stored )
+
+        #
+        # Create and submit workflow myExperiment request.
+        #
+
+        # Create workflow content JSON.
+        workflow_content = json.dumps( workflow_dict, indent=4, sort_keys=True )
+
+        # Create myExperiment request.
+        request_raw = trans.fill_template(
+            "workflow/myexp_export.mako",
+            workflow_name=workflow_dict['name'],
+            workflow_description=workflow_dict['annotation'],
+            workflow_content=workflow_content,
+            workflow_svg=self._workflow_to_svg_canvas( trans, stored ).tostring()
+        )
+        # strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
+        request = unicodify( request_raw.strip(), 'utf-8' )
+
+        # Do request and get result.
+        auth_header = base64.b64encode( '%s:%s' % ( myexp_username, myexp_password ))
+        headers = { "Content-type": "text/xml", "Accept": "text/xml", "Authorization": "Basic %s" % auth_header }
+        myexp_url = trans.app.config.get( "myexperiment_url", self.__myexp_url )
+        conn = httplib.HTTPConnection( myexp_url )
+        # NOTE: blocks web thread.
+        conn.request("POST", "/workflow.xml", request, headers)
+        response = conn.getresponse()
+        response_data = response.read()
+        conn.close()
+
+        # Do simple parse of response to see if export successful and provide user feedback.
+        parser = SingleTagContentsParser( 'id' )
+        parser.feed( response_data )
+        myexp_workflow_id = parser.tag_content
+        workflow_list_str = " <br>Return to <a href='%s'>workflow list." % url_for( controller='workflow', action='list' )
+        if myexp_workflow_id:
+            return trans.show_message(
+                """Workflow '%s' successfully exported to myExperiment. <br/>
+                <a href="http://%s/workflows/%s">Click here to view the workflow on myExperiment</a> %s
+                """ % ( stored.name, myexp_url, myexp_workflow_id, workflow_list_str ),
+                use_panels=True )
+        else:
+            return trans.show_error_message(
+                "Workflow '%s' could not be exported to myExperiment. Error: %s %s" %
+                ( stored.name, response_data, workflow_list_str ), use_panels=True )
+
+    @web.json_pretty
+    def for_direct_import( self, trans, id ):
+        """
+        Get the latest Workflow for the StoredWorkflow identified by `id` and
+        encode it as a json string that can be imported back into Galaxy
+
+        This has slightly different information than the above. In particular,
+        it does not attempt to decode forms and build UIs, it just stores
+        the raw state.
+        """
+        stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
+        return self._workflow_to_dict( trans, stored )
+
+    @web.json_pretty
+    def export_to_file( self, trans, id ):
+        """
+        Get the latest Workflow for the StoredWorkflow identified by `id` and
+        encode it as a json string that can be imported back into Galaxy
+
+        This has slightly different information than the above. In particular,
+        it does not attempt to decode forms and build UIs, it just stores
+        the raw state.
+        """
+
+        # Get workflow.
+        stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
+
+        # Stream workflow to file.
+        stored_dict = self._workflow_to_dict( trans, stored )
+        if not stored_dict:
+            # This workflow has a tool that's missing from the distribution
+            trans.response.status = 400
+            return "Workflow cannot be exported due to missing tools."
+        sname = stored.name
+        sname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in sname)[0:150]
+        trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy-Workflow-%s.ga"' % ( sname )
+        trans.response.set_content_type( 'application/galaxy-archive' )
+        return stored_dict
+
+    @web.expose
+    def import_workflow( self, trans, cntrller='workflow', **kwd ):
+        """
+        Import a workflow by reading an url, uploading a file, opening and reading the contents
+        of a local file, or receiving the textual representation of a workflow via http.
+        """
+        url = kwd.get( 'url', '' )
+        workflow_text = kwd.get( 'workflow_text', '' )
+        message = str( escape( kwd.get( 'message', '' ) ) )
+        status = kwd.get( 'status', 'done' )
+        import_button = kwd.get( 'import_button', False )
+        # The special Galaxy integration landing page's URL on myExperiment
+        myexperiment_target_url = 'http://%s/galaxy?galaxy_url=%s' % \
+            ( trans.app.config.get( "myexperiment_url", "www.myexperiment.org" ), url_for('/', qualified=True) )
+        # The source of the workflow, used by myExperiment to indicate the workflow came from there.
+        workflow_source = kwd.get( 'workflow_source', 'uploaded file' )
+        # The following parameters will have values only if the workflow
+        # id being imported from a Galaxy tool shed repository.
+        tool_shed_url = kwd.get( 'tool_shed_url', '' )
+        repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
+        add_to_menu = util.string_as_bool( kwd.get( 'add_to_menu', False ) )
+        # The workflow_name parameter is in the request only if the import originated
+        # from a Galaxy tool shed, in which case the value was encoded.
+        workflow_name = kwd.get( 'workflow_name', '' )
+        if workflow_name:
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
+        # The following parameters will have a value only if the import originated
+        # from a tool shed repository installed locally or from the API.
+        installed_repository_file = kwd.get( 'installed_repository_file', '' )
+        repository_id = kwd.get( 'repository_id', '' )
+        if installed_repository_file and not import_button:
+            workflow_file = open( installed_repository_file, 'rb' )
+            workflow_text = workflow_file.read()
+            workflow_file.close()
+            import_button = True
+        if tool_shed_url and not import_button:
+            # Use urllib (send another request to the tool shed) to retrieve the workflow.
+            params = dict( repository_metadata_id=repository_metadata_id,
+                           workflow_name=encoding_util.tool_shed_encode( workflow_name ),
+                           open_for_url=True )
+            pathspec = [ 'workflow', 'import_workflow' ]
+            workflow_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+            import_button = True
+        if import_button:
+            workflow_data = None
+            if url:
+                # Load workflow from external URL
+                # NOTE: blocks the web thread.
+                try:
+                    workflow_data = urllib2.urlopen( url ).read()
+                except Exception as e:
+                    message = "Failed to open URL: <b>%s</b><br>Exception: %s" % ( escape( url ), escape( str( e ) ) )
+                    status = 'error'
+            elif workflow_text:
+                # This case occurs when the workflow_text was sent via http from the tool shed.
+                workflow_data = workflow_text
+            else:
+                # Load workflow from browsed file.
+                file_data = kwd.get( 'file_data', '' )
+                if file_data in ( '', None ):
+                    message = 'No exported Galaxy workflow files were selected.'
+                    status = 'error'
+                else:
+                    uploaded_file = file_data.file
+                    uploaded_file_name = uploaded_file.name
+                    # uploaded_file_filename = file_data.filename
+                    if os.path.getsize( os.path.abspath( uploaded_file_name ) ) > 0:
+                        # We're reading the file as text so we can re-use the existing code below.
+                        # This may not be ideal...
+                        workflow_data = uploaded_file.read()
+                    else:
+                        message = 'You attempted to upload an empty file.'
+                        status = 'error'
+            if workflow_data:
+                # Convert incoming workflow data from json
+                try:
+                    data = json.loads( workflow_data )
+                except Exception as e:
+                    data = None
+                    message = "The data content does not appear to be a Galaxy workflow."
+                    status = 'error'
+                    log.exception("Error importing workflow.")
+                if data:
+                    # Create workflow if possible.  If a required tool is not available in the local
+                    # Galaxy instance, the tool information will be available in the step_dict.
+                    src = None
+                    if cntrller != 'api':
+                        src = workflow_source
+                    workflow, missing_tool_tups = self._workflow_from_dict( trans, data, source=src, add_to_menu=add_to_menu )
+                    workflow = workflow.latest_workflow
+                    if workflow_name:
+                        workflow.name = workflow_name
+                    # Provide user feedback and show workflow list.
+                    if workflow.has_errors:
+                        message += "Imported, but some steps in this workflow have validation errors. "
+                        status = "error"
+                    if workflow.has_cycles:
+                        message += "Imported, but this workflow contains cycles.  "
+                        status = "error"
+                    else:
+                        message += "Workflow <b>%s</b> imported successfully.  " % escape( workflow.name )
+                    if missing_tool_tups:
+                        if trans.user_is_admin():
+                            # A required tool is not available in the local Galaxy instance.
+                            # TODO: It would sure be nice to be able to redirect to a mako template here that displays a nice
+                            # page including the links to the configured tool sheds instead of this message, but trying
+                            # to get the panels back is a nightmare since workflow eliminates the Galaxy panels.  Someone
+                            # involved in workflow development needs to figure out what it will take to be able to switch
+                            # back and forth between Galaxy (with panels ) and the workflow view (without panels ), having
+                            # the Galaxy panels displayed whenever in Galaxy.
+                            message += "The workflow requires the following tools that are not available in this Galaxy instance."
+                            message += "You can likely install the required tools from one of the Galaxy tool sheds listed below.<br/>"
+                            for missing_tool_tup in missing_tool_tups:
+                                missing_tool_id, missing_tool_name, missing_tool_version, step_id = missing_tool_tup
+                                message += "<b>Tool name</b> %s, <b>id</b> %s, <b>version</b> %s<br/>" % (
+                                           escape( missing_tool_name ),
+                                           escape( missing_tool_id ),
+                                           escape( missing_tool_version ) )
+                            message += "<br/>"
+                            for shed_name, shed_url in trans.app.tool_shed_registry.tool_sheds.items():
+                                if shed_url.endswith( '/' ):
+                                    shed_url = shed_url.rstrip( '/' )
+                                    url = '%s/repository/find_tools?galaxy_url=%s' % ( shed_url, url_for( '/', qualified=True ) )
+                                    if missing_tool_tups:
+                                        url += '&tool_id='
+                                    for missing_tool_tup in missing_tool_tups:
+                                        missing_tool_id = missing_tool_tup[0]
+                                        url += '%s,' % escape( missing_tool_id )
+                                message += '<a href="%s">%s</a><br/>' % ( url, shed_name )
+                                status = 'error'
+                            if installed_repository_file or tool_shed_url:
+                                # Another Galaxy panels Hack: The request did not originate from the Galaxy
+                                # workflow view, so we don't need to render the Galaxy panels.
+                                action = 'center'
+                            else:
+                                # Another Galaxy panels hack: The request originated from the Galaxy
+                                # workflow view, so we need to render the Galaxy panels.
+                                action = 'index'
+                            return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                              action=action,
+                                                                              message=message,
+                                                                              status=status ) )
+                        else:
+                            # TODO: Figure out what to do here...
+                            pass
+                    if tool_shed_url:
+                        # We've received the textual representation of a workflow from a Galaxy tool shed.
+                        message = "Workflow <b>%s</b> imported successfully." % escape( workflow.name )
+                        url = '%s/workflow/view_workflow?repository_metadata_id=%s&workflow_name=%s&message=%s' % \
+                            ( tool_shed_url, repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ), message )
+                        return trans.response.send_redirect( url )
+                    elif installed_repository_file:
+                        # The workflow was read from a file included with an installed tool shed repository.
+                        message = "Workflow <b>%s</b> imported successfully." % escape( workflow.name )
+                        if cntrller == 'api':
+                            return status, message
+                        return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+                                                                          action='browse_repository',
+                                                                          id=repository_id,
+                                                                          message=message,
+                                                                          status=status ) )
+                    return self.list( trans )
+        if cntrller == 'api':
+            return status, message
+        return trans.fill_template( "workflow/import.mako",
+                                    url=url,
+                                    message=message,
+                                    status=status,
+                                    use_panels=True,
+                                    myexperiment_target_url=myexperiment_target_url )
+
+    @web.expose
+    def build_from_current_history( self, trans, job_ids=None, dataset_ids=None, dataset_collection_ids=None, workflow_name=None, dataset_names=None, dataset_collection_names=None ):
+        user = trans.get_user()
+        history = trans.get_history()
+        if not user:
+            return trans.show_error_message( "Must be logged in to create workflows" )
+        if ( job_ids is None and dataset_ids is None ) or workflow_name is None:
+            jobs, warnings = summarize( trans )
+            # Render
+            return trans.fill_template(
+                "workflow/build_from_current_history.mako",
+                jobs=jobs,
+                warnings=warnings,
+                history=history
+            )
+        else:
+            stored_workflow = extract_workflow(
+                trans,
+                user=user,
+                job_ids=job_ids,
+                dataset_ids=dataset_ids,
+                dataset_collection_ids=dataset_collection_ids,
+                workflow_name=workflow_name,
+                dataset_names=dataset_names,
+                dataset_collection_names=dataset_collection_names
+            )
+            # Index page with message
+            workflow_id = trans.security.encode_id( stored_workflow.id )
+            return trans.show_message( 'Workflow "%s" created from current history. '
+                                       'You can <a href="%s" target="_parent">edit</a> or <a href="%s">run</a> the workflow.'
+                                       % ( escape( workflow_name ), url_for( controller='workflow', action='editor', id=workflow_id ),
+                                           url_for( controller='workflow', action='run', id=workflow_id ) ) )
+
+    @web.expose
+    def run( self, trans, id, history_id=None, **kwargs ):
+        history = None
+        try:
+            if history_id is not None:
+                history_manager = histories.HistoryManager( trans.app )
+                history = history_manager.get_owned( trans.security.decode_id( history_id ), trans.user, current_history=trans.history )
+            else:
+                history = trans.get_history()
+            if history is None:
+                raise exceptions.MessageException( 'History unavailable. Please specify a valid history id' )
+        except Exception as e:
+            raise exceptions.MessageException( '[history_id=%s] Failed to retrieve history. %s.' % ( history_id, str( e ) ) )
+        trans.history = history
+        workflow_manager = workflows.WorkflowsManager( trans.app )
+        workflow_contents_manager = workflows.WorkflowContentsManager( trans.app )
+        stored = workflow_manager.get_stored_accessible_workflow( trans, id )
+        workflow_dict = workflow_contents_manager.workflow_to_dict( trans, stored, style='run' )
+        return trans.fill_template( 'workflow/run.mako', workflow_dict=workflow_dict )
+
+    def get_item( self, trans, id ):
+        return self.get_stored_workflow( trans, id )
+
+    @web.expose
+    def tag_outputs( self, trans, id, **kwargs ):
+        stored = self.get_stored_workflow( trans, id, check_ownership=False )
+        user = trans.get_user()
+        if stored.user != user:
+            if trans.sa_session.query( model.StoredWorkflowUserShareAssociation ) \
+                    .filter_by( user=user, stored_workflow=stored ).count() == 0:
+                error( "Workflow is not owned by or shared with current user" )
+        # Get the latest revision
+        workflow = stored.latest_workflow
+        # It is possible for a workflow to have 0 steps
+        if len( workflow.steps ) == 0:
+            error( "Workflow cannot be tagged for outputs because it does not have any steps" )
+        if workflow.has_cycles:
+            error( "Workflow cannot be tagged for outputs because it contains cycles" )
+        if workflow.has_errors:
+            error( "Workflow cannot be tagged for outputs because of validation errors in some steps" )
+        # Build the state for each step
+        errors = {}
+        has_upgrade_messages = False
+        # has_errors is never used
+        # has_errors = False
+        if kwargs:
+            # If kwargs were provided, the states for each step should have
+            # been POSTed
+            for step in workflow.steps:
+                if step.type == 'tool':
+                    # Extract just the output flags for this step.
+                    p = "%s|otag|" % step.id
+                    l = len(p)
+                    outputs = [k[l:] for ( k, v ) in kwargs.iteritems() if k.startswith( p )]
+                    if step.workflow_outputs:
+                        for existing_output in step.workflow_outputs:
+                            if existing_output.output_name not in outputs:
+                                trans.sa_session.delete(existing_output)
+                            else:
+                                outputs.remove(existing_output.output_name)
+                    for outputname in outputs:
+                        m = model.WorkflowOutput(workflow_step_id=int(step.id), output_name=outputname)
+                        trans.sa_session.add(m)
+        # Prepare each step
+        trans.sa_session.flush()
+        module_injector = WorkflowModuleInjector( trans )
+        for step in workflow.steps:
+            step.upgrade_messages = {}
+            # Contruct modules
+            module_injector.inject( step )
+            if step.upgrade_messages:
+                has_upgrade_messages = True
+            if step.type == 'tool' or step.type is None:
+                # Error dict
+                if step.tool_errors:
+                    errors[step.id] = step.tool_errors
+        # Render the form
+        return trans.fill_template(
+            "workflow/tag_outputs.mako",
+            steps=workflow.steps,
+            workflow=stored,
+            has_upgrade_messages=has_upgrade_messages,
+            errors=errors,
+            incoming=kwargs
+        )
+
+    @web.expose
+    def configure_menu( self, trans, workflow_ids=None ):
+        user = trans.get_user()
+        if trans.request.method == "POST":
+            if workflow_ids is None:
+                workflow_ids = []
+            elif type( workflow_ids ) != list:
+                workflow_ids = [ workflow_ids ]
+            sess = trans.sa_session
+            # This explicit remove seems like a hack, need to figure out
+            # how to make the association do it automatically.
+            for m in user.stored_workflow_menu_entries:
+                sess.delete( m )
+            user.stored_workflow_menu_entries = []
+            q = sess.query( model.StoredWorkflow )
+            # To ensure id list is unique
+            seen_workflow_ids = set()
+            for id in workflow_ids:
+                if id in seen_workflow_ids:
+                    continue
+                else:
+                    seen_workflow_ids.add( id )
+                m = model.StoredWorkflowMenuEntry()
+                m.stored_workflow = q.get( id )
+                user.stored_workflow_menu_entries.append( m )
+            sess.flush()
+            message = "Menu updated"
+            refresh_frames = ['tools']
+        else:
+            message = None
+            refresh_frames = []
+        user = trans.get_user()
+        ids_in_menu = set( [ x.stored_workflow_id for x in user.stored_workflow_menu_entries ] )
+        workflows = trans.sa_session.query( model.StoredWorkflow ) \
+            .filter_by( user=user, deleted=False ) \
+            .order_by( desc( model.StoredWorkflow.table.c.update_time ) ) \
+            .all()
+        shared_by_others = trans.sa_session \
+            .query( model.StoredWorkflowUserShareAssociation ) \
+            .filter_by( user=user ) \
+            .filter( model.StoredWorkflow.deleted == expression.false() ) \
+            .all()
+        return trans.fill_template( "workflow/configure_menu.mako",
+                                    workflows=workflows,
+                                    shared_by_others=shared_by_others,
+                                    ids_in_menu=ids_in_menu,
+                                    message=message,
+                                    refresh_frames=refresh_frames )
+
+    def _workflow_to_svg_canvas( self, trans, stored ):
+        workflow = stored.latest_workflow
+        workflow_canvas = WorkflowCanvas()
+        for step in workflow.steps:
+            # Load from database representation
+            module = module_factory.from_workflow_step( trans, step )
+            module_name = module.get_name()
+            module_data_inputs = module.get_data_inputs()
+            module_data_outputs = module.get_data_outputs()
+            workflow_canvas.populate_data_for_step(
+                step,
+                module_name,
+                module_data_inputs,
+                module_data_outputs,
+            )
+        workflow_canvas.add_steps()
+        return workflow_canvas.finish()
+
+
+def _build_workflow_on_str(instance_ds_names):
+    # Returns suffix for new histories based on multi input iteration
+    num_multi_inputs = len(instance_ds_names)
+    if num_multi_inputs == 0:
+        return ""
+    elif num_multi_inputs == 1:
+        return " on %s" % instance_ds_names[0]
+    else:
+        return " on %s and %s" % (", ".join(instance_ds_names[0:-1]), instance_ds_names[-1])
+
+
+def _expand_multiple_inputs(kwargs):
+    (single_inputs, matched_multi_inputs, multiplied_multi_inputs) = _split_inputs(kwargs)
+
+    # Build up every combination of inputs to be run together.
+    input_combos = _extend_with_matched_combos(single_inputs, matched_multi_inputs)
+    input_combos = _extend_with_multiplied_combos(input_combos, multiplied_multi_inputs)
+
+    # Input name that are multiply specified
+    multi_input_keys = matched_multi_inputs.keys() + multiplied_multi_inputs.keys()
+
+    for input_combo in input_combos:
+        for key, value in input_combo.iteritems():
+            kwargs[key] = value
+        yield (kwargs, multi_input_keys)
+
+
+def _extend_with_matched_combos(single_inputs, multi_inputs):
+    if len(multi_inputs) == 0:
+        return [single_inputs]
+
+    matched_multi_inputs = []
+
+    first_multi_input_key = multi_inputs.keys()[0]
+    first_multi_value = multi_inputs.get(first_multi_input_key)
+
+    for value in first_multi_value:
+        new_inputs = _copy_and_extend_inputs(single_inputs, first_multi_input_key, value)
+        matched_multi_inputs.append(new_inputs)
+
+    for multi_input_key, multi_input_values in multi_inputs.iteritems():
+        if multi_input_key == first_multi_input_key:
+            continue
+        if len(multi_input_values) != len(first_multi_value):
+            raise Exception("Failed to match up multi-select inputs, must select equal number of data files in each multiselect")
+        for index, value in enumerate(multi_input_values):
+            matched_multi_inputs[index][multi_input_key] = value
+    return matched_multi_inputs
+
+
+def _extend_with_multiplied_combos(input_combos, multi_inputs):
+    combos = input_combos
+
+    for multi_input_key, multi_input_value in multi_inputs.iteritems():
+        iter_combos = []
+
+        for combo in combos:
+            for input_value in multi_input_value:
+                iter_combos.append(_copy_and_extend_inputs(combo, multi_input_key, input_value))
+
+        combos = iter_combos
+    return combos
+
+
+def _copy_and_extend_inputs(inputs, key, value):
+    new_inputs = dict(inputs)
+    new_inputs[key] = value
+    return new_inputs
+
+
+def _split_inputs(kwargs):
+    """
+    """
+    input_keys = filter(lambda a: a.endswith('|input'), kwargs)
+    single_inputs = {}
+    matched_multi_inputs = {}
+    multiplied_multi_inputs = {}
+    for input_key in input_keys:
+        input_val = kwargs[input_key]
+        if isinstance(input_val, list):
+            input_base = input_key[:-len("|input")]
+            mode_key = "%s|multi_mode" % input_base
+            mode = kwargs.get(mode_key, "matched")
+            if mode == "matched":
+                matched_multi_inputs[input_key] = input_val
+            else:
+                multiplied_multi_inputs[input_key] = input_val
+        else:
+            single_inputs[input_key] = input_val
+    return (single_inputs, matched_multi_inputs, multiplied_multi_inputs)
diff --git a/lib/galaxy/webapps/reports/__init__.py b/lib/galaxy/webapps/reports/__init__.py
new file mode 100644
index 0000000..44642ab
--- /dev/null
+++ b/lib/galaxy/webapps/reports/__init__.py
@@ -0,0 +1,6 @@
+"""The Galaxy Reports application."""
+
+from galaxy.web.framework import url_for
+from galaxy.web.framework.decorators import expose
+
+__all__ = ('url_for', 'expose')
diff --git a/lib/galaxy/webapps/reports/app.py b/lib/galaxy/webapps/reports/app.py
new file mode 100644
index 0000000..7476144
--- /dev/null
+++ b/lib/galaxy/webapps/reports/app.py
@@ -0,0 +1,40 @@
+import config
+import sys
+import time
+
+import galaxy.model
+from galaxy.web import security
+import logging
+log = logging.getLogger( __name__ )
+
+
+class UniverseApplication( object ):
+    """Encapsulates the state of a Universe application"""
+    def __init__( self, **kwargs ):
+        log.debug( "python path is: %s", ", ".join( sys.path ) )
+        self.name = "reports"
+        # Read config file and check for errors
+        self.config = config.Configuration( **kwargs )
+        self.config.check()
+        config.configure_logging( self.config )
+        # Determine the database url
+        if self.config.database_connection:
+            db_url = self.config.database_connection
+        else:
+            db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+        # Setup the database engine and ORM
+        self.model = galaxy.model.mapping.init( self.config.file_path,
+                                                db_url,
+                                                self.config.database_engine_options,
+                                                create_tables=True )
+        if not self.config.database_connection:
+            self.targets_mysql = False
+        else:
+            self.targets_mysql = 'mysql' in self.config.database_connection
+        # Security helper
+        self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+        # used for cachebusting -- refactor this into a *SINGLE* UniverseApplication base.
+        self.server_starttime = int(time.time())
+
+    def shutdown( self ):
+        pass
diff --git a/lib/galaxy/webapps/reports/buildapp.py b/lib/galaxy/webapps/reports/buildapp.py
new file mode 100644
index 0000000..bba669d
--- /dev/null
+++ b/lib/galaxy/webapps/reports/buildapp.py
@@ -0,0 +1,151 @@
+"""
+Provides factory methods to assemble the Galaxy web application
+"""
+
+import logging
+import atexit
+import os
+
+from inspect import isclass
+
+from paste import httpexceptions
+
+from galaxy.util import asbool
+from galaxy.util.postfork import process_is_uwsgi
+from galaxy.webapps.util import build_template_error_formatters
+
+import galaxy.model
+import galaxy.model.mapping
+import galaxy.web.framework.webapp
+from galaxy.util.properties import load_app_properties
+
+log = logging.getLogger( __name__ )
+
+
+class ReportsWebApplication( galaxy.web.framework.webapp.WebApplication ):
+    pass
+
+
+def add_ui_controllers( webapp, app ):
+    """
+    Search for controllers in the 'galaxy.webapps.controllers' module and add
+    them to the webapp.
+    """
+    from galaxy.web.base.controller import BaseUIController
+    import galaxy.webapps.reports.controllers
+    controller_dir = galaxy.webapps.reports.controllers.__path__[0]
+    for fname in os.listdir( controller_dir ):
+        if not fname.startswith( "_" ) and fname.endswith( ".py" ):
+            name = fname[:-3]
+            module_name = "galaxy.webapps.reports.controllers." + name
+            module = __import__( module_name )
+            for comp in module_name.split( "." )[1:]:
+                module = getattr( module, comp )
+            # Look for a controller inside the modules
+            for key in dir( module ):
+                T = getattr( module, key )
+                if isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
+                    webapp.add_ui_controller( name, T( app ) )
+
+
+def app_factory( global_conf, **kwargs ):
+    """Return a wsgi application serving the root object"""
+    # Create the Galaxy application unless passed in
+    kwargs = load_app_properties(
+        kwds=kwargs
+    )
+    if 'app' in kwargs:
+        app = kwargs.pop( 'app' )
+    else:
+        from galaxy.webapps.reports.app import UniverseApplication
+        app = UniverseApplication( global_conf=global_conf, **kwargs )
+    atexit.register( app.shutdown )
+    # Create the universe WSGI application
+    webapp = ReportsWebApplication( app, session_cookie='galaxyreportssession', name="reports" )
+    add_ui_controllers( webapp, app )
+    # These two routes handle our simple needs at the moment
+    webapp.add_route( '/{controller}/{action}', controller="root", action='index' )
+    webapp.add_route( '/{action}', controller='root', action='index' )
+    webapp.finalize_config()
+    # Wrap the webapp in some useful middleware
+    if kwargs.get( 'middleware', True ):
+        webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
+    if asbool( kwargs.get( 'static_enabled', True ) ):
+        webapp = wrap_in_static( webapp, global_conf, **kwargs )
+    # Close any pooled database connections before forking
+    try:
+        galaxy.model.mapping.metadata.bind.dispose()
+    except:
+        log.exception("Unable to dispose of pooled galaxy model database connections.")
+    # Return
+    return webapp
+
+
+def wrap_in_middleware( app, global_conf, **local_conf ):
+    """Based on the configuration wrap `app` in a set of common and useful middleware."""
+    # Merge the global and local configurations
+    conf = global_conf.copy()
+    conf.update(local_conf)
+    debug = asbool( conf.get( 'debug', False ) )
+    # First put into place httpexceptions, which must be most closely
+    # wrapped around the application (it can interact poorly with
+    # other middleware):
+    app = httpexceptions.make_middleware( app, conf )
+    log.debug( "Enabling 'httpexceptions' middleware" )
+    # The recursive middleware allows for including requests in other
+    # requests or forwarding of requests, all on the server side.
+    if asbool(conf.get('use_recursive', True)):
+        from paste import recursive
+        app = recursive.RecursiveMiddleware( app, conf )
+        log.debug( "Enabling 'recursive' middleware" )
+    # Various debug middleware that can only be turned on if the debug
+    # flag is set, either because they are insecure or greatly hurt
+    # performance
+    if debug:
+        # Middleware to check for WSGI compliance
+        if asbool( conf.get( 'use_lint', True ) ):
+            from paste import lint
+            app = lint.make_middleware( app, conf )
+            log.debug( "Enabling 'lint' middleware" )
+        # Middleware to run the python profiler on each request
+        if asbool( conf.get( 'use_profile', False ) ):
+            import profile
+            app = profile.ProfileMiddleware( app, conf )
+            log.debug( "Enabling 'profile' middleware" )
+        # Middleware that intercepts print statements and shows them on the
+        # returned page
+        if asbool( conf.get( 'use_printdebug', True ) ):
+            from paste.debug import prints
+            app = prints.PrintDebugMiddleware( app, conf )
+            log.debug( "Enabling 'print debug' middleware" )
+    if debug and asbool( conf.get( 'use_interactive', False ) ) and not process_is_uwsgi:
+        # Interactive exception debugging, scary dangerous if publicly
+        # accessible, if not enabled we'll use the regular error printing
+        # middleware.
+        from weberror import evalexception
+        app = evalexception.EvalException( app, conf,
+                                           templating_formatters=build_template_error_formatters() )
+        log.debug( "Enabling 'eval exceptions' middleware" )
+    else:
+        if debug and asbool( conf.get( 'use_interactive', False ) ) and process_is_uwsgi:
+            log.error("Interactive debugging middleware is enabled in your configuration "
+                      "but this is a uwsgi process.  Refusing to wrap in interactive error middleware.")
+        # Not in interactive debug mode, just use the regular error middleware
+        from paste.exceptions import errormiddleware
+        app = errormiddleware.ErrorMiddleware( app, conf )
+        log.debug( "Enabling 'error' middleware" )
+    # Transaction logging (apache access.log style)
+    if asbool( conf.get( 'use_translogger', True ) ):
+        from paste.translogger import TransLogger
+        app = TransLogger( app )
+        log.debug( "Enabling 'trans logger' middleware" )
+    # X-Forwarded-Host handling
+    from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware
+    app = XForwardedHostMiddleware( app )
+    log.debug( "Enabling 'x-forwarded-host' middleware" )
+    return app
+
+
+def wrap_in_static( app, global_conf, **local_conf ):
+    urlmap, _ = galaxy.web.framework.webapp.build_url_map( app, global_conf, local_conf )
+    return urlmap
diff --git a/lib/galaxy/webapps/reports/config.py b/lib/galaxy/webapps/reports/config.py
new file mode 100644
index 0000000..a561684
--- /dev/null
+++ b/lib/galaxy/webapps/reports/config.py
@@ -0,0 +1,126 @@
+"""Universe configuration builder."""
+import logging
+import os
+import sys
+
+from six.moves import configparser
+
+from galaxy.util import string_as_bool
+
+log = logging.getLogger( __name__ )
+
+
+def resolve_path( path, root ):
+    """If 'path' is relative make absolute by prepending 'root'"""
+    if not( os.path.isabs( path ) ):
+        path = os.path.join( root, path )
+    return path
+
+
+class ConfigurationError( Exception ):
+    pass
+
+
+class Configuration( object ):
+    def __init__( self, **kwargs ):
+        self.config_dict = kwargs
+        self.root = kwargs.get( 'root_dir', '.' )
+        # Database related configuration
+        self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
+        self.database_connection = kwargs.get( "database_connection", False )
+        self.database_engine_options = get_database_engine_options( kwargs )
+        # Where dataset files are stored
+        self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
+        self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
+        self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
+        self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
+        self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
+        self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
+        self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/reports" ), self.root )
+        self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
+        self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
+        self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
+        self.brand = kwargs.get( 'brand', None )
+        # Configuration for the message box directly below the masthead.
+        self.message_box_visible = string_as_bool( kwargs.get( 'message_box_visible', False ) )
+        self.message_box_content = kwargs.get( 'message_box_content', None )
+        self.message_box_class = kwargs.get( 'message_box_class', 'info' )
+        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
+        self.blog_url = kwargs.get( 'blog_url', None )
+        self.screencasts_url = kwargs.get( 'screencasts_url', None )
+        self.log_events = False
+        self.cookie_path = kwargs.get( "cookie_path", "/" )
+        # Error logging with sentry
+        self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
+        # Parse global_conf
+        global_conf = kwargs.get( 'global_conf', None )
+        global_conf_parser = configparser.ConfigParser()
+        if global_conf and "__file__" in global_conf:
+            global_conf_parser.read(global_conf['__file__'])
+
+    def get( self, key, default ):
+        return self.config_dict.get( key, default )
+
+    def check( self ):
+        # Check that required directories exist
+        for path in self.root, self.file_path, self.template_path:
+            if not os.path.isdir( path ):
+                raise ConfigurationError("Directory does not exist: %s" % path )
+
+
+def get_database_engine_options( kwargs ):
+    """
+    Allow options for the SQLAlchemy database engine to be passed by using
+    the prefix "database_engine_option".
+    """
+    conversions = {
+        'convert_unicode': string_as_bool,
+        'pool_timeout': int,
+        'echo': string_as_bool,
+        'echo_pool': string_as_bool,
+        'pool_recycle': int,
+        'pool_size': int,
+        'max_overflow': int,
+        'pool_threadlocal': string_as_bool
+    }
+    prefix = "database_engine_option_"
+    prefix_len = len( prefix )
+    rval = {}
+    for key, value in kwargs.items():
+        if key.startswith( prefix ):
+            key = key[prefix_len:]
+            if key in conversions:
+                value = conversions[key](value)
+            rval[ key  ] = value
+    return rval
+
+
+def configure_logging( config ):
+    """
+    Allow some basic logging configuration to be read from the cherrpy
+    config.
+    """
+    format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" )
+    level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ]
+    destination = config.get( "log_destination", "stdout" )
+    log.info( "Logging at '%s' level to '%s'" % ( level, destination ) )
+    # Get root logger
+    root = logging.getLogger()
+    # Set level
+    root.setLevel( level )
+    # Turn down paste httpserver logging
+    if level <= logging.DEBUG:
+        logging.getLogger( "paste.httpserver.ThreadPool" ).setLevel( logging.WARN )
+    # Remove old handlers
+    for h in root.handlers[:]:
+        root.removeHandler(h)
+    # Create handler
+    if destination == "stdout":
+        handler = logging.StreamHandler( sys.stdout )
+    else:
+        handler = logging.FileHandler( destination )
+    # Create formatter
+    formatter = logging.Formatter( format )
+    # Hook everything up
+    handler.setFormatter( formatter )
+    root.addHandler( handler )
diff --git a/lib/galaxy/webapps/reports/controllers/__init__.py b/lib/galaxy/webapps/reports/controllers/__init__.py
new file mode 100644
index 0000000..6999eb1
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/__init__.py
@@ -0,0 +1 @@
+"""Galaxy reports controllers."""
diff --git a/lib/galaxy/webapps/reports/controllers/history.py b/lib/galaxy/webapps/reports/controllers/history.py
new file mode 100644
index 0000000..7a75c6a
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/history.py
@@ -0,0 +1,192 @@
+import galaxy.model
+import collections
+import logging
+import sqlalchemy as sa
+
+from galaxy import util
+from galaxy.web.base.controller import BaseUIController, web
+
+from markupsafe import escape
+from sqlalchemy import and_
+
+log = logging.getLogger( __name__ )
+
+
+def int_to_octet(size):
+    try:
+        size = float(size)
+    except ValueError:
+        return "???"
+    except TypeError:
+        if size is None:
+            return "0 o"
+        return "???"
+    units = ("o", "Ko", "Mo", "Go", "To")
+    no_unit = 0
+    while (size >= 1000):
+        size /= 1000.
+        no_unit += 1
+    try:
+        return "%.2f %s" % (size, units[no_unit])
+    except IndexError:
+        return "%.0f %s" % (size * ((no_unit - len(units) + 1) * 1000.), units[-1] )
+
+
+class History( BaseUIController ):
+    """
+    Class defining functions used by reports to make requests to get
+    informations and fill templates before being displayed.
+    The name of function must be the same as as the field "action" of
+    the "href" dict, in .mako templates (templates/webapps/reports).
+    """
+    @web.expose
+    def history_and_dataset_per_user( self, trans, **kwd ):
+        """
+        fill history_and_dataset_per_user.mako template with:
+            - user email
+            - the number of history and their size
+            - the number of dataset
+        """
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        user_cutoff = int( kwd.get( 'user_cutoff', 60 ) )
+
+        # sort by history space, or by user mail or by number of history/dataset
+        sort_by = kwd.get( 'sorting', 'User' )
+        sorting = 0 if sort_by == 'User' else 1 if sort_by == "HSort" else 2 if sort_by == "DSort" else 3
+        descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+
+        # select count (h.id) as history, u.email as email
+        # from history h, galaxy_user u
+        # where h.user_id = u.id and h.deleted='f'
+        # group by email order by email desc
+        histories = sa.select(
+            (sa.func.count( galaxy.model.History.table.c.id ).label( 'history' ),
+             galaxy.model.User.table.c.email.label( 'email' )),
+            from_obj=[sa.outerjoin(galaxy.model.History.table, galaxy.model.User.table)],
+            whereclause=and_(galaxy.model.History.table.c.user_id == galaxy.model.User.table.c.id,
+                             galaxy.model.History.table.c.deleted == 'f'),
+            group_by=['email'],
+            order_by=[ sa.desc( 'email' ), 'history' ] )
+
+        # select u.email, count(d.id)
+        # from galaxy_user u, dataset d, history_dataset_association hd,history h
+        # where d.id=hd.dataset_id and h.id=hd.history_id and u.id = h.user_id and h.deleted='f'
+        # group by u.email;
+        datasets = sa.select(
+            (sa.func.count( galaxy.model.Dataset.table.c.id ).label( 'dataset' ),
+             sa.func.sum( galaxy.model.Dataset.table.c.total_size ).label( 'size' ),
+             galaxy.model.User.table.c.email.label( 'email' ) ),
+            from_obj=[ galaxy.model.User.table,
+                       galaxy.model.Dataset.table,
+                       galaxy.model.HistoryDatasetAssociation.table,
+                       galaxy.model.History.table],
+            whereclause=and_(galaxy.model.Dataset.table.c.id == galaxy.model.HistoryDatasetAssociation.table.c.dataset_id,
+                             galaxy.model.History.table.c.id == galaxy.model.HistoryDatasetAssociation.table.c.history_id,
+                             galaxy.model.History.table.c.user_id == galaxy.model.User.table.c.id,
+                             galaxy.model.History.table.c.deleted == 'f'),
+            group_by=[ 'email' ] )
+
+        # execute requests, replace None fields by "Unknown"
+        # transform lists to dict with email as key and
+        # number of (history/dataset)/size of history as value
+        histories = dict( [ ( _.email if _.email is not None else "Unknown", int( _.history ) )
+                            for _ in histories.execute() ] )
+        datasets = dict( [ ( _.email if _.email is not None else "Unknown", (int( _.dataset ), int( _.size )) )
+                           for _ in datasets.execute() ] )
+
+        sorting_functions = [
+            lambda first, second: descending if first[0].lower() > second[0].lower() else -descending,
+            lambda first, second: descending if histories.get(first, 0) < histories.get(second, 0) else -descending,
+            lambda first, second: descending if datasets.get(first, [0])[0] < datasets.get(second, [0])[0] else -descending,
+            lambda first, second: descending if datasets.get(first, [0, 0])[1] < datasets.get(second, [0, 0])[1] else -descending
+        ]
+
+        # fetch all users
+        users = list(set(histories.keys()) | set(datasets.keys()))
+
+        # sort users depending on sort function, defined by user choices
+        users.sort(sorting_functions[sorting])
+        if user_cutoff > 0:
+            users = users[:user_cutoff]
+
+        # to keep ordered
+        data = collections.OrderedDict()
+        for user in users:
+            dataset = datasets.get(user, [0, 0])
+            history = histories.get(user, 0)
+            data[user] = ("%d (%s)" % ( history, int_to_octet( dataset[1] ) ), dataset[0] )
+
+        return trans.fill_template( '/webapps/reports/history_and_dataset_per_user.mako',
+                                    data=data,
+                                    user_cutoff=user_cutoff,
+                                    sorting=sorting,
+                                    descending=descending,
+                                    message=message )
+
+    @web.expose
+    def history_and_dataset_type( self, trans, **kwd ):
+        """
+        fill history_and_dataset_type.mako template with:
+            - the name of history
+            - the number of dataset foreach type
+        """
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        user_cutoff = int( kwd.get( 'user_cutoff', 60 ) )
+        descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+        user_selection = kwd.get( 'user_selection', None)
+
+        # select d.state, h.name
+        # from dataset d, history h , history_dataset_association hda
+        # where hda.history_id=h.id and hda.dataset_id=d.id order by h.state;
+        from_obj = [ galaxy.model.Dataset.table, galaxy.model.History.table, galaxy.model.HistoryDatasetAssociation.table]
+        if user_selection is not None:
+            from_obj.append( galaxy.model.User.table )
+            whereclause = and_( galaxy.model.Dataset.table.c.id == galaxy.model.HistoryDatasetAssociation.table.c.dataset_id,
+                                galaxy.model.History.table.c.id == galaxy.model.HistoryDatasetAssociation.table.c.history_id,
+                                galaxy.model.User.table.c.id == galaxy.model.History.table.c.user_id,
+                                galaxy.model.User.table.c.email == user_selection )
+        else:
+            whereclause = and_(galaxy.model.Dataset.table.c.id == galaxy.model.HistoryDatasetAssociation.table.c.dataset_id,
+                               galaxy.model.History.table.c.id == galaxy.model.HistoryDatasetAssociation.table.c.history_id)
+        histories = sa.select( ( galaxy.model.Dataset.table.c.state.label( 'state' ),
+                                 galaxy.model.History.table.c.name.label( 'name' ) ),
+                               from_obj=from_obj,
+                               whereclause=whereclause,
+                               order_by=[ 'name' ] )
+
+        # execute requests, replace None fields by "Unknown"
+        data = [ ( _.name if _.name is not None else "NoNamedHistory", _.state )
+                 for _ in histories.execute() ]
+
+        # sort by names descending or ascending
+        data.sort(lambda first, second: descending if first[0].lower() > second[0].lower() else -descending)
+
+        # fetch names in the first list and status in the second
+        if data:
+            names, status = zip( *tuple(data) )
+        else:
+            names, status = [], []
+
+        possible_status = {"ok": 0, "upload": 1, "paused": 2, "queued": 3, "error": 4, "discarded": 5}
+        number_of_possible_status = len( possible_status ) + 1  # + 1 to handle unknown status!
+
+        # to keep ordered
+        datas = collections.OrderedDict()
+        for no, name in enumerate(names):
+            if name not in datas:
+                if user_cutoff > 0:
+                    if len( datas ) == user_cutoff:
+                        break
+                # creation of a list containing the number of each status
+                datas[name] = ['-'] * number_of_possible_status
+            # to not execute it several times, we put it in a variable...
+            no_status = possible_status.get(status[no], 6)
+            if datas[name][no_status] == '-':
+                datas[name][no_status] = 0
+            datas[name][no_status] += 1
+
+        return trans.fill_template( '/webapps/reports/history_and_dataset_type.mako',
+                                    data=datas,
+                                    user_cutoff=user_cutoff,
+                                    descending=descending,
+                                    message=message )
diff --git a/lib/galaxy/webapps/reports/controllers/home.py b/lib/galaxy/webapps/reports/controllers/home.py
new file mode 100644
index 0000000..d91f9cd
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/home.py
@@ -0,0 +1,123 @@
+from datetime import datetime, timedelta
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy import model
+import calendar
+import sqlalchemy as sa
+from galaxy.webapps.reports.controllers.query import ReportQueryBuilder
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class HomePage( BaseUIController, ReportQueryBuilder ):
+    @web.expose
+    def run_stats( self, trans, **kwd ):
+        message = ''
+        end_date = datetime.utcnow()
+        end_date = datetime(end_date.year, end_date.month, end_date.day, end_date.hour)
+        end_date_buffer = datetime(end_date.year, end_date.month, end_date.day, end_date.hour + 1)
+        start_hours = end_date - timedelta(1)
+        start_days = end_date - timedelta(30)
+
+        jf_hr_data = [0] * 24
+        jf_dy_data = [0] * 30
+        jc_hr_data = [0] * 24
+        jc_dy_data = [0] * 30
+        et_hr_data = []
+        et_dy_data = []
+
+        recent_jobs = sa.select(
+            (
+                (model.Job.table.c.id),
+                (model.Job.table.c.create_time).label( 'create_time' ),
+                (model.Job.table.c.update_time).label( 'update_time' )
+            )
+        )
+
+        for job in recent_jobs.execute():
+            if(job.create_time >= start_days and
+               job.create_time < end_date_buffer ):
+                if(job.create_time >= start_hours and
+                   job.create_time < end_date_buffer ):
+                    # Get the creation time for the jobs in the past day
+                    end_day = end_date.day
+                    start_day = job.create_time.day
+                    end_hour = end_date.hour
+                    start_hour = job.create_time.hour
+
+                    if(end_day != start_day):
+                        hours = (end_hour + 24) - start_hour
+                    else:
+                        hours = end_hour - start_hour
+
+                    if(hours < 24):
+                        jc_hr_data[int(hours)] += 1
+                    else:
+                        jc_dy_data[23] += 1
+                # Get the creation time for jobs in the past 30 days
+                end_month = end_date.month
+                start_month = job.create_time.month
+                end_day = end_date.day
+                start_day = job.create_time.day
+
+                if(end_month != start_month):
+                    month_weekday, month_range = calendar.monthrange(job.create_time.year, job.create_time.month)
+                    day = (end_day + month_range) - start_day
+                else:
+                    day = end_day - start_day
+
+                if(day < 30):
+                    jc_dy_data[int(day)] += 1
+
+            if(job.update_time >= start_days and
+               job.update_time < end_date_buffer ):
+                if(job.update_time >= start_hours and
+                   job.update_time < end_date_buffer ):
+                    # Get the time finishedfor the jobs in the past day
+                    end_day = end_date.day
+                    start_day = job.update_time.day
+                    end_hour = end_date.hour
+                    start_hour = job.update_time.hour
+
+                    if(end_day != start_day):
+                        hours = (end_hour + 23) - start_hour
+                    else:
+                        hours = end_hour - start_hour
+
+                    if(hours < 24):
+                        jf_hr_data[int(hours)] += 1
+
+                        # Get the Elapsed Time for said job
+                        time = (job.update_time - job.create_time)
+                        seconds = time.seconds
+                        minutes = seconds // 60
+                        et_hr_data.append(minutes)
+                # Get the time the job finished and run time in the 30 days
+                end_month = end_date.month
+                start_month = job.update_time.month
+                end_day = end_date.day
+                start_day = job.update_time.day
+
+                if(end_month != start_month):
+                    month_weekday, month_range = calendar.monthrange(job.update_time.year, job.update_time.month)
+                    day = (end_day + (month_range - 1)) - start_day
+                else:
+                    day = end_day - start_day
+
+                if(day < 30):
+                    jf_dy_data[int(day)] += 1
+
+                    # Get the Elapsed Time for said job
+                    time = (job.update_time - job.create_time)
+                    seconds = time.seconds
+                    minutes = seconds // 60
+                    et_dy_data.append(minutes)
+
+        return trans.fill_template( '/webapps/reports/run_stats.mako',
+            jf_hr_data=jf_hr_data,
+            jf_dy_data=jf_dy_data,
+            jc_hr_data=jc_hr_data,
+            jc_dy_data=jc_dy_data,
+            et_hr_data=et_hr_data,
+            et_dy_data=et_dy_data,
+            message=message )
diff --git a/lib/galaxy/webapps/reports/controllers/jobs.py b/lib/galaxy/webapps/reports/controllers/jobs.py
new file mode 100644
index 0000000..8e1d66d
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/jobs.py
@@ -0,0 +1,1128 @@
+from __future__ import print_function
+import calendar
+import logging
+from collections import namedtuple
+from datetime import datetime, date, timedelta
+
+import sqlalchemy as sa
+from sqlalchemy import and_, not_, or_
+
+from markupsafe import escape
+
+from galaxy import model, util
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy.web.framework.helpers import grids
+import re
+from math import ceil, floor
+from galaxy.webapps.reports.controllers.query import ReportQueryBuilder
+
+log = logging.getLogger( __name__ )
+
+
+class Timer(object):
+    def __init__(self):
+        self.start()
+        self.stop()
+        self.ERROR = self.time_elapsed()
+
+    def start(self):
+        self.start_time = datetime.now()
+
+    def stop(self):
+        try:
+            self.stop_time = datetime.now()
+            self.time_delta = self.stop_time - self.start_time
+            del(self.stop_time)
+            del(self.start_time)
+        except NameError:
+            print("You need to start before you can stop!")
+
+    def time_elapsed(self):
+        try:
+            return_time = self.time_delta - self.ERROR
+        except NameError:
+            print("You need to start and stop before there's an elapsed time!")
+        except AttributeError:
+            return_time = self.time_delta
+
+        return return_time
+
+
+def sorter(default_sort_id, kwd):
+    """
+    Initialize sorting variables
+    """
+    SortSpec = namedtuple('SortSpec', ['sort_id', 'order', 'arrow', 'exc_order'])
+
+    sort_id = kwd.get('sort_id')
+    order = kwd.get('order')
+
+    # Parse the default value
+    if sort_id == "default":
+        sort_id = default_sort_id
+
+    # Create the sort
+    if order == "asc":
+        _order = sa.asc( sort_id )
+    elif order == "desc":
+        _order = sa.desc( sort_id )
+    else:
+        # In case of default
+        order = "desc"
+        _order = sa.desc( sort_id )
+
+    # Create an arrow icon to put beside the ordered column
+    up_arrow = "&#x2191;"
+    down_arrow = "&#x2193;"
+    arrow = " "
+
+    if order == "asc":
+        arrow += down_arrow
+    else:
+        arrow += up_arrow
+
+    return SortSpec(sort_id, order, arrow, _order)
+
+
+def get_spark_time( time_period ):
+    _time_period = 0
+
+    if time_period == "days":
+        _time_period = 1.0
+    elif time_period == "weeks":
+        _time_period = 7.0
+    elif time_period == "months":
+        _time_period = 30.0
+    elif time_period == "years":
+        _time_period = 365.0
+    else:
+        time_period = "days"
+        _time_period = 1.0
+
+    return time_period, _time_period
+
+
+class SpecifiedDateListGrid( grids.Grid ):
+
+    class JobIdColumn( grids.IntegerColumn ):
+
+        def get_value( self, trans, grid, job ):
+            return job.id
+
+    class StateColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, job ):
+            return '<div class="count-box state-color-%s">%s</div>' % ( job.state, job.state )
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'Unfinished':
+                return query.filter( not_( or_( model.Job.table.c.state == model.Job.states.OK,
+                                                model.Job.table.c.state == model.Job.states.ERROR,
+                                                model.Job.table.c.state == model.Job.states.DELETED ) ) )
+            return query
+
+    class ToolColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, job ):
+            return job.tool_id
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter is not None:
+                query = query.filter( model.Job.table.c.tool_id == column_filter )
+
+            return query
+
+    class CreateTimeColumn( grids.DateTimeColumn ):
+
+        def get_value( self, trans, grid, job ):
+            return job.create_time.strftime("%b %d, %Y, %H:%M:%S")
+
+    class UserColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, job ):
+            if job.user:
+                return escape(job.user.email)
+            return 'anonymous'
+
+    class EmailColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            return query.filter( and_( model.Job.table.c.user_id == model.User.table.c.id,
+                                       model.User.table.c.email == column_filter ) )
+
+    class SpecifiedDateColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            # We are either filtering on a date like YYYY-MM-DD or on a month like YYYY-MM,
+            # so we need to figure out which type of date we have
+            if column_filter.count( '-' ) == 2:  # We are filtering on a date like YYYY-MM-DD
+                year, month, day = map( int, column_filter.split( "-" ) )
+                start_date = date( year, month, day )
+                end_date = start_date + timedelta( days=1 )
+            if column_filter.count( '-' ) == 1:  # We are filtering on a month like YYYY-MM
+                year, month = map( int, column_filter.split( "-" ) )
+                start_date = date( year, month, 1 )
+                end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
+
+            return query.filter( and_( self.model_class.table.c.create_time >= start_date,
+                                       self.model_class.table.c.create_time < end_date ) )
+
+    # Grid definition
+    use_async = False
+    model_class = model.Job
+    title = "Jobs"
+    template = '/webapps/reports/grid.mako'
+    default_sort_key = "id"
+    columns = [
+        JobIdColumn( "Id",
+                     key="id",
+                     link=( lambda item: dict( operation="job_info", id=item.id, webapp="reports" ) ),
+                     attach_popup=False,
+                     filterable="advanced" ),
+        StateColumn( "State",
+                     key="state",
+                     attach_popup=False ),
+        ToolColumn( "Tool Id",
+                    key="tool_id",
+                    link=( lambda item: dict( operation="tool_per_month", id=item.id, webapp="reports" ) ),
+                    attach_popup=False ),
+        CreateTimeColumn( "Creation Time",
+                          key="create_time",
+                          attach_popup=False ),
+        UserColumn( "User",
+                    key="email",
+                    model_class=model.User,
+                    link=( lambda item: dict( operation="user_per_month", id=item.id, webapp="reports" ) ),
+                    attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        SpecifiedDateColumn( "Specified Date",
+                             key="specified_date",
+                             visible=False ),
+        EmailColumn( "Email",
+                     key="email",
+                     model_class=model.User,
+                     visible=False ),
+        grids.StateColumn( "State",
+                           key="state",
+                           visible=False,
+                           filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    standard_filters = []
+    default_filter = { 'specified_date': 'All' }
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def build_initial_query( self, trans, **kwd ):
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+        return trans.sa_session.query( self.model_class ) \
+                               .join( model.User ) \
+                               .filter( model.Job.table.c.user_id != monitor_user_id )\
+                               .enable_eagerloads( False )
+
+
+class Jobs( BaseUIController, ReportQueryBuilder ):
+
+    """
+    Class contains functions for querying data requested by user via the webapp. It exposes the functions and
+    responds to requests with the filled .mako templates.
+    """
+
+    specified_date_list_grid = SpecifiedDateListGrid()
+
+    @web.expose
+    def specified_date_handler( self, trans, **kwd ):
+        # We add params to the keyword dict in this method in order to rename the param
+        # with an "f-" prefix, simulating filtering by clicking a search link.  We have
+        # to take this approach because the "-" character is illegal in HTTP requests.
+        kwd[ 'sort_id' ] = 'default'
+        kwd[ 'order' ] = 'default'
+
+        if 'f-specified_date' in kwd and 'specified_date' not in kwd:
+            # The user clicked a State link in the Advanced Search box, so 'specified_date'
+            # will have been eliminated.
+            pass
+        elif 'specified_date' not in kwd:
+            kwd[ 'f-specified_date' ] = 'All'
+        else:
+            kwd[ 'f-specified_date' ] = kwd[ 'specified_date' ]
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "job_info":
+                return trans.response.send_redirect( web.url_for( controller='jobs',
+                                                                  action='job_info',
+                                                                  **kwd ) )
+            elif operation == "tool_for_month":
+                kwd[ 'f-tool_id' ] = kwd[ 'tool_id' ]
+            elif operation == "tool_per_month":
+                # The received id is the job id, so we need to get the job's tool_id.
+                job_id = kwd.get( 'id', None )
+                job = get_job( trans, job_id )
+                kwd[ 'tool_id' ] = job.tool_id
+                return trans.response.send_redirect( web.url_for( controller='jobs',
+                                                                  action='tool_per_month',
+                                                                  **kwd ) )
+            elif operation == "user_for_month":
+                kwd[ 'f-email' ] = util.restore_text( kwd[ 'email' ] )
+            elif operation == "user_per_month":
+                # The received id is the job id, so we need to get the id of the user
+                # that submitted the job.
+                job_id = kwd.get( 'id', None )
+                job = get_job( trans, job_id )
+                if job.user:
+                    kwd[ 'email' ] = job.user.email
+                else:
+                    kwd[ 'email' ] = None  # For anonymous users
+                return trans.response.send_redirect( web.url_for( controller='jobs',
+                                                                  action='user_per_month',
+                                                                  **kwd ) )
+            elif operation == "specified_date_in_error":
+                kwd[ 'f-state' ] = 'error'
+            elif operation == "unfinished":
+                kwd[ 'f-state' ] = 'Unfinished'
+            elif operation == "specified_tool_in_error":
+                kwd[ 'f-state' ] = 'error'
+                kwd[ 'f-tool_id' ] = kwd[ 'tool_id' ]
+        return self.specified_date_list_grid( trans, **kwd )
+
+    def _calculate_trends_for_jobs( self, jobs_query ):
+        trends = dict()
+        for job in jobs_query.execute():
+            job_day = int(job.date.strftime("%-d")) - 1
+            job_month = int(job.date.strftime("%-m"))
+            job_month_name = job.date.strftime("%B")
+            job_year = job.date.strftime("%Y")
+            key = str( job_month_name + job_year)
+
+            try:
+                trends[key][job_day] += 1
+            except KeyError:
+                job_year = int(job_year)
+                wday, day_range = calendar.monthrange(job_year, job_month)
+                trends[key] = [0] * day_range
+                trends[key][job_day] += 1
+        return trends
+
+    def _calculate_job_table( self, jobs_query ):
+        jobs = []
+        for row in jobs_query.execute():
+            month_name = row.date.strftime("%B")
+            year = int(row.date.strftime("%Y"))
+
+            jobs.append( (
+                row.date.strftime( "%Y-%m" ),
+                row.total_jobs,
+                month_name,
+                year
+            ) )
+        return jobs
+
+    @web.expose
+    def specified_month_all( self, trans, **kwd ):
+        """
+        Queries the DB for all jobs in given month, defaults to current month.
+        """
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'date', kwd )
+        offset = 0
+        limit = 10
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        # If specified_date is not received, we'll default to the current month
+        specified_date = kwd.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) )
+        specified_month = specified_date[ :7 ]
+
+        year, month = map( int, specified_month.split( "-" ) )
+        start_date = date( year, month, 1 )
+        end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
+        month_label = start_date.strftime( "%B" )
+        year_label = start_date.strftime( "%Y" )
+
+        # Use to make the page table
+        month_jobs = sa.select( ( sa.func.date( model.Job.table.c.create_time ).label( 'date' ),
+                                  sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                                whereclause=sa.and_( model.Job.table.c.user_id != monitor_user_id,
+                                                     model.Job.table.c.create_time >= start_date,
+                                                     model.Job.table.c.create_time < end_date ),
+                                from_obj=[ model.Job.table ],
+                                group_by=[ 'date' ],
+                                order_by=[ _order ],
+                                offset=offset,
+                                limit=limit)
+
+        # Use to make trendline
+        all_jobs = sa.select( ( model.Job.table.c.create_time.label('date'), model.Job.table.c.id.label('id') ),
+                             whereclause=sa.and_( model.Job.table.c.user_id != monitor_user_id,
+                                                  model.Job.table.c.create_time >= start_date,
+                                                  model.Job.table.c.create_time < end_date ) )
+
+        trends = dict()
+        for job in all_jobs.execute():
+            job_hour = int(job.date.strftime("%-H"))
+            job_day = job.date.strftime("%d")
+
+            try:
+                trends[job_day][job_hour] += 1
+            except KeyError:
+                trends[job_day] = [0] * 24
+                trends[job_day][job_hour] += 1
+
+        jobs = []
+        for row in month_jobs.execute():
+            row_dayname = row.date.strftime("%A")
+            row_day = row.date.strftime("%d")
+
+            jobs.append( ( row_dayname,
+                           row_day,
+                           row.total_jobs,
+                           row.date
+                           ) )
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/jobs_specified_month_all.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    month_label=month_label,
+                                    year_label=year_label,
+                                    month=month,
+                                    page_specs=page_specs,
+                                    jobs=jobs,
+                                    trends=trends,
+                                    is_user_jobs_only=monitor_user_id,
+                                    message=message )
+
+    @web.expose
+    def specified_month_in_error( self, trans, **kwd ):
+        """
+        Queries the DB for the user jobs in error.
+        """
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs instead
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        # If specified_date is not received, we'll default to the current month
+        specified_date = kwd.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) )
+        specified_month = specified_date[ :7 ]
+        year, month = map( int, specified_month.split( "-" ) )
+        start_date = date( year, month, 1 )
+        end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
+        month_label = start_date.strftime( "%B" )
+        year_label = start_date.strftime( "%Y" )
+
+        month_jobs_in_error = sa.select( ( sa.func.date( model.Job.table.c.create_time ).label( 'date' ),
+                                           sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                                         whereclause=sa.and_( model.Job.table.c.user_id != monitor_user_id,
+                                                              model.Job.table.c.state == 'error',
+                                                              model.Job.table.c.create_time >= start_date,
+                                                              model.Job.table.c.create_time < end_date ),
+                                         from_obj=[ model.Job.table ],
+                                         group_by=[ 'date' ],
+                                         order_by=[ _order ],
+                                         offset=offset,
+                                         limit=limit )
+
+        # Use to make trendline
+        all_jobs_in_error = sa.select( ( model.Job.table.c.create_time.label('date'), model.Job.table.c.id.label('id') ),
+                                      whereclause=sa.and_( model.Job.table.c.user_id != monitor_user_id,
+                                                           model.Job.table.c.state == 'error',
+                                                           model.Job.table.c.create_time >= start_date,
+                                                           model.Job.table.c.create_time < end_date ) )
+
+        trends = dict()
+        for job in all_jobs_in_error.execute():
+            job_hour = int(job.date.strftime("%-H"))
+            job_day = job.date.strftime("%d")
+
+            try:
+                trends[job_day][job_hour] += 1
+            except KeyError:
+                trends[job_day] = [0] * 24
+                trends[job_day][job_hour] += 1
+
+        jobs = []
+        for row in month_jobs_in_error.execute():
+            row_dayname = row.date.strftime("%A")
+            row_day = row.date.strftime("%d")
+
+            jobs.append( ( row_dayname,
+                           row_day,
+                           row.total_jobs,
+                           row.date
+                           ) )
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/jobs_specified_month_in_error.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    month_label=month_label,
+                                    year_label=year_label,
+                                    month=month,
+                                    jobs=jobs,
+                                    trends=trends,
+                                    message=message,
+                                    is_user_jobs_only=monitor_user_id,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def per_month_all( self, trans, **kwd ):
+        """
+        Queries the DB for all jobs. Avoids monitor jobs.
+        """
+
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        # Use to make the page table
+        jobs_by_month = sa.select( ( self.select_month( model.Job.table.c.create_time ).label( 'date' ),
+                                   sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                                   whereclause=model.Job.table.c.user_id != monitor_user_id,
+                                   from_obj=[ model.Job.table ],
+                                   group_by=self.group_by_month( model.Job.table.c.create_time ),
+                                   order_by=[ _order ],
+                                   offset=offset,
+                                   limit=limit )
+
+        # Use to make sparkline
+        all_jobs = sa.select( ( self.select_day(model.Job.table.c.create_time).label('date'),
+                               model.Job.table.c.id.label('id') ) )
+
+        trends = self._calculate_trends_for_jobs( all_jobs )
+        jobs = self._calculate_job_table( jobs_by_month )
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/jobs_per_month_all.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    trends=trends,
+                                    jobs=jobs,
+                                    is_user_jobs_only=monitor_user_id,
+                                    message=message,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def per_month_in_error( self, trans, **kwd ):
+        """
+        Queries the DB for user jobs in error. Filters out monitor jobs.
+        """
+
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        # Use to make the page table
+        jobs_in_error_by_month = sa.select( ( self.select_month( model.Job.table.c.create_time ).label( 'date' ),
+                                              sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                                            whereclause=sa.and_( model.Job.table.c.state == 'error',
+                                                                 model.Job.table.c.user_id != monitor_user_id ),
+                                            from_obj=[ model.Job.table ],
+                                            group_by=self.group_by_month( model.Job.table.c.create_time ),
+                                            order_by=[ _order ],
+                                            offset=offset,
+                                            limit=limit )
+
+        # Use to make trendline
+        all_jobs = sa.select( ( self.select_day(model.Job.table.c.create_time).label('date'),
+                               model.Job.table.c.id.label('id') ),
+                             whereclause=sa.and_( model.Job.table.c.state == 'error',
+                                                 model.Job.table.c.user_id != monitor_user_id ) )
+
+        trends = self._calculate_trends_for_jobs( all_jobs )
+        jobs = self._calculate_job_table( jobs_in_error_by_month  )
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/jobs_per_month_in_error.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    trends=trends,
+                                    jobs=jobs,
+                                    message=message,
+                                    is_user_jobs_only=monitor_user_id,
+                                    page_specs=page_specs,
+                                    offset=offset,
+                                    limit=limit )
+
+    @web.expose
+    def per_user( self, trans, **kwd ):
+        total_time = Timer()
+        q_time = Timer()
+
+        total_time.start()
+        params = util.Params( kwd )
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'user_email', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        time_period = kwd.get('spark_time')
+        time_period, _time_period = get_spark_time( time_period )
+        spark_limit = 30
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        jobs = []
+        jobs_per_user = sa.select( ( model.User.table.c.email.label( 'user_email' ),
+                                     sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                                   from_obj=[ sa.outerjoin( model.Job.table, model.User.table ) ],
+                                   group_by=[ 'user_email' ],
+                                   order_by=[ _order ],
+                                   offset=offset,
+                                   limit=limit )
+
+        q_time.start()
+        for row in jobs_per_user.execute():
+            if ( row.user_email is None ):
+                jobs.append( ( 'Anonymous',
+                               row.total_jobs ) )
+            elif ( row.user_email == monitor_email ):
+                continue
+            else:
+                jobs.append( ( row.user_email,
+                               row.total_jobs ) )
+        q_time.stop()
+        query1time = q_time.time_elapsed()
+
+        users = sa.select( [model.User.table.c.email],
+                           from_obj=[ model.User.table ] )
+
+        all_jobs_per_user = sa.select( ( model.Job.table.c.id.label( 'id' ),
+                                        model.Job.table.c.create_time.label( 'date' ),
+                                        model.User.table.c.email.label( 'user_email' ) ),
+                                      from_obj=[ sa.outerjoin( model.Job.table, model.User.table ) ],
+                                      whereclause=model.User.table.c.email.in_( users ) )
+
+        currday = datetime.today()
+        trends = dict()
+        q_time.start()
+        for job in all_jobs_per_user.execute():
+            if job.user_email is None:
+                curr_user = 'Anonymous'
+            else:
+                curr_user = re.sub(r'\W+', '', job.user_email)
+
+            try:
+                day = currday - job.date
+            except TypeError:
+                day = currday - datetime.date(job.date)
+
+            day = day.days
+            container = floor(day / _time_period)
+            container = int(container)
+            try:
+                if container < spark_limit:
+                    trends[curr_user][container] += 1
+            except KeyError:
+                trends[curr_user] = [0] * spark_limit
+                if container < spark_limit:
+                    trends[curr_user][container] += 1
+        q_time.stop()
+        query2time = q_time.time_elapsed()
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        total_time.stop()
+        ttime = total_time.time_elapsed()
+        return trans.fill_template( '/webapps/reports/jobs_per_user.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    spark_limit=spark_limit,
+                                    time_period=time_period,
+                                    q1time=query1time,
+                                    q2time=query2time,
+                                    ttime=ttime,
+                                    trends=trends,
+                                    jobs=jobs,
+                                    message=message,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def user_per_month( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = ''
+
+        email = util.restore_text( params.get( 'email', '' ) )
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+
+        q = sa.select( ( self.select_month( model.Job.table.c.create_time ).label( 'date' ),
+                         sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                       whereclause=model.User.table.c.email == email,
+                       from_obj=[ sa.join( model.Job.table, model.User.table ) ],
+                       group_by=self.group_by_month( model.Job.table.c.create_time ),
+                       order_by=[ _order ] )
+
+        all_jobs_per_user = sa.select( ( model.Job.table.c.create_time.label( 'date' ),
+                                       model.Job.table.c.id.label( 'job_id' ) ),
+                                       whereclause=sa.and_( model.User.table.c.email == email ),
+                                       from_obj=[ sa.join( model.Job.table, model.User.table ) ] )
+
+        trends = dict()
+        for job in all_jobs_per_user.execute():
+            job_day = int(job.date.strftime("%-d")) - 1
+            job_month = int(job.date.strftime("%-m"))
+            job_month_name = job.date.strftime("%B")
+            job_year = job.date.strftime("%Y")
+            key = str( job_month_name + job_year)
+
+            try:
+                trends[key][job_day] += 1
+            except KeyError:
+                job_year = int(job_year)
+                wday, day_range = calendar.monthrange(job_year, job_month)
+                trends[key] = [0] * day_range
+                trends[key][job_day] += 1
+
+        jobs = []
+        for row in q.execute():
+            jobs.append( ( row.date.strftime( "%Y-%m" ),
+                           row.total_jobs,
+                           row.date.strftime( "%B" ),
+                           row.date.strftime( "%Y" ) ) )
+        return trans.fill_template( '/webapps/reports/jobs_user_per_month.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    id=kwd.get('id'),
+                                    trends=trends,
+                                    email=util.sanitize_text( email ),
+                                    jobs=jobs, message=message )
+
+    @web.expose
+    def per_tool( self, trans, **kwd ):
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'tool_id', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        time_period = kwd.get('spark_time')
+        time_period, _time_period = get_spark_time( time_period )
+        spark_limit = 30
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        jobs = []
+        q = sa.select( ( model.Job.table.c.tool_id.label( 'tool_id' ),
+                         sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                       whereclause=model.Job.table.c.user_id != monitor_user_id,
+                       from_obj=[ model.Job.table ],
+                       group_by=[ 'tool_id' ],
+                       order_by=[ _order ],
+                       offset=offset,
+                       limit=limit )
+
+        all_jobs_per_tool = sa.select( ( model.Job.table.c.tool_id.label( 'tool_id' ),
+                                        model.Job.table.c.id.label( 'id' ),
+                                        self.select_day( model.Job.table.c.create_time ).label( 'date' ) ),
+                                      whereclause=model.Job.table.c.user_id != monitor_user_id,
+                                      from_obj=[ model.Job.table ] )
+
+        currday = date.today()
+        trends = dict()
+        for job in all_jobs_per_tool.execute():
+            curr_tool = re.sub(r'\W+', '', str(job.tool_id))
+            try:
+                day = currday - job.date
+            except TypeError:
+                day = currday - datetime.date(job.date)
+
+            day = day.days
+            container = floor(day / _time_period)
+            container = int(container)
+            try:
+                if container < spark_limit:
+                    trends[curr_tool][container] += 1
+            except KeyError:
+                trends[curr_tool] = [0] * spark_limit
+                if container < spark_limit:
+                    trends[curr_tool][container] += 1
+
+        for row in q.execute():
+            jobs.append( ( row.tool_id,
+                           row.total_jobs ) )
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/jobs_per_tool.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    spark_limit=spark_limit,
+                                    time_period=time_period,
+                                    trends=trends,
+                                    jobs=jobs,
+                                    message=message,
+                                    is_user_jobs_only=monitor_user_id,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def errors_per_tool( self, trans, **kwd ):
+        """
+        Queries the DB for user jobs in error. Filters out monitor jobs.
+        """
+
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'tool_id', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        time_period = kwd.get('spark_time')
+        time_period, _time_period = get_spark_time( time_period )
+        spark_limit = 30
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        jobs_in_error_per_tool = sa.select( ( model.Job.table.c.tool_id.label( 'tool_id' ),
+                                              sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                                            whereclause=sa.and_( model.Job.table.c.state == 'error',
+                                                                 model.Job.table.c.user_id != monitor_user_id ),
+                                            from_obj=[ model.Job.table ],
+                                            group_by=[ 'tool_id' ],
+                                            order_by=[ _order ],
+                                            offset=offset,
+                                            limit=limit )
+
+        all_jobs_per_tool_errors = sa.select( ( self.select_day( model.Job.table.c.create_time ).label( 'date' ),
+                                              model.Job.table.c.id.label( 'id' ),
+                                              model.Job.table.c.tool_id.label( 'tool_id' ) ),
+                                              whereclause=sa.and_( model.Job.table.c.state == 'error',
+                                                                   model.Job.table.c.user_id != monitor_user_id ),
+                                              from_obj=[ model.Job.table ]
+                                              )
+
+        currday = date.today()
+        trends = dict()
+        for job in all_jobs_per_tool_errors.execute():
+            curr_tool = re.sub(r'\W+', '', str(job.tool_id))
+            try:
+                day = currday - job.date
+            except TypeError:
+                day = currday - datetime.date(job.date)
+
+            # convert day into days/weeks/months/years
+            day = day.days
+            container = floor(day / _time_period)
+            container = int(container)
+            try:
+                if container < spark_limit:
+                    trends[curr_tool][container] += 1
+            except KeyError:
+                trends[curr_tool] = [0] * spark_limit
+                if day < spark_limit:
+                    trends[curr_tool][container] += 1
+        jobs = []
+        for row in jobs_in_error_per_tool.execute():
+            jobs.append( ( row.total_jobs, row.tool_id ) )
+
+        pages_found = ceil(len(jobs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/jobs_errors_per_tool.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    spark_limit=spark_limit,
+                                    time_period=time_period,
+                                    trends=trends,
+                                    jobs=jobs,
+                                    message=message,
+                                    is_user_jobs_only=monitor_user_id,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def tool_per_month( self, trans, **kwd ):
+        message = ''
+
+        params = util.Params( kwd )
+        monitor_email = params.get( 'monitor_email', 'monitor at bx.psu.edu' )
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        # In case we don't know which is the monitor user we will query for all jobs
+        monitor_user_id = get_monitor_id( trans, monitor_email )
+
+        tool_id = params.get( 'tool_id', 'Add a column1' )
+        specified_date = params.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) )
+        q = sa.select( ( self.select_month( model.Job.table.c.create_time ).label( 'date' ),
+                         sa.func.count( model.Job.table.c.id ).label( 'total_jobs' ) ),
+                       whereclause=sa.and_( model.Job.table.c.tool_id == tool_id,
+                                            model.Job.table.c.user_id != monitor_user_id ),
+                       from_obj=[ model.Job.table ],
+                       group_by=self.group_by_month( model.Job.table.c.create_time ),
+                       order_by=[ _order ] )
+
+        # Use to make sparkline
+        all_jobs_for_tool = sa.select( ( self.select_month(model.Job.table.c.create_time).label('month'),
+                                        self.select_day(model.Job.table.c.create_time).label('day'),
+                                        model.Job.table.c.id.label('id') ),
+                                      whereclause=sa.and_( model.Job.table.c.tool_id == tool_id,
+                                                          model.Job.table.c.user_id != monitor_user_id ),
+                             from_obj=[ model.Job.table ] )
+        trends = dict()
+        for job in all_jobs_for_tool.execute():
+            job_day = int(job.day.strftime("%-d")) - 1
+            job_month = int(job.month.strftime("%-m"))
+            job_month_name = job.month.strftime("%B")
+            job_year = job.month.strftime("%Y")
+            key = str( job_month_name + job_year)
+
+            try:
+                trends[key][job_day] += 1
+            except KeyError:
+                job_year = int(job_year)
+                wday, day_range = calendar.monthrange(job_year, job_month)
+                trends[key] = [0] * day_range
+                trends[key][job_day] += 1
+
+        jobs = []
+        for row in q.execute():
+            jobs.append( ( row.date.strftime( "%Y-%m" ),
+                           row.total_jobs,
+                           row.date.strftime( "%B" ),
+                           row.date.strftime( "%Y" ) ) )
+        return trans.fill_template( '/webapps/reports/jobs_tool_per_month.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    specified_date=specified_date,
+                                    tool_id=tool_id,
+                                    trends=trends,
+                                    jobs=jobs,
+                                    message=message,
+                                    is_user_jobs_only=monitor_user_id )
+
+    @web.expose
+    def job_info( self, trans, **kwd ):
+        message = ''
+        job = trans.sa_session.query( model.Job ) \
+                              .get( trans.security.decode_id( kwd.get( 'id', '' ) ) )
+        return trans.fill_template( '/webapps/reports/job_info.mako',
+                                    job=job,
+                                    message=message )
+
+# ---- Utility methods -------------------------------------------------------
+
+
+def get_job( trans, id ):
+    return trans.sa_session.query( trans.model.Job ).get( trans.security.decode_id( id ) )
+
+
+def get_monitor_id( trans, monitor_email ):
+    """
+    A convenience method to obtain the monitor job id.
+    """
+    monitor_user_id = None
+    monitor_row = trans.sa_session.query( trans.model.User.table.c.id ) \
+        .filter( trans.model.User.table.c.email == monitor_email ) \
+        .first()
+    if monitor_row is not None:
+        monitor_user_id = monitor_row[0]
+    return monitor_user_id
diff --git a/lib/galaxy/webapps/reports/controllers/query.py b/lib/galaxy/webapps/reports/controllers/query.py
new file mode 100644
index 0000000..2bd3b1c
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/query.py
@@ -0,0 +1,30 @@
+""" Mixin to help build advanced queries for reports interface.
+"""
+import sqlalchemy as sa
+
+
+class ReportQueryBuilder:
+
+    def group_by_month(self, column):
+        if self.app.targets_mysql:
+            return [ sa.func.year( column ), sa.func.month( sa.func.date( column ) ) ]
+        else:
+            return [ sa.func.date_trunc( 'month', sa.func.date( column ) ) ]
+
+    def select_month(self, column):
+        if self.app.targets_mysql:
+            return sa.func.date( column )
+        else:
+            return sa.func.date_trunc( 'month', sa.func.date( column ) )
+
+    def group_by_day(self, column):
+        if self.app.targets_mysql:
+            return [ sa.func.day( sa.func.date( column ) ) ]
+        else:
+            return [ sa.func.date_trunc( 'day', sa.func.date( column ) ) ]
+
+    def select_day(self, column):
+        if self.app.targets_mysql:
+            return sa.func.date( column )
+        else:
+            return sa.func.date_trunc( 'day', sa.func.date( column ) )
diff --git a/lib/galaxy/webapps/reports/controllers/root.py b/lib/galaxy/webapps/reports/controllers/root.py
new file mode 100644
index 0000000..ff104ba
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/root.py
@@ -0,0 +1,9 @@
+from galaxy.web.base.controller import BaseUIController, web
+import logging
+log = logging.getLogger( __name__ )
+
+
+class Report( BaseUIController ):
+    @web.expose
+    def index( self, trans, **kwd ):
+        return trans.fill_template( '/webapps/reports/index.mako' )
diff --git a/lib/galaxy/webapps/reports/controllers/sample_tracking.py b/lib/galaxy/webapps/reports/controllers/sample_tracking.py
new file mode 100644
index 0000000..eb74fa1
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/sample_tracking.py
@@ -0,0 +1,206 @@
+import calendar
+import logging
+from datetime import date, timedelta
+
+import sqlalchemy as sa
+from sqlalchemy import and_
+from markupsafe import escape
+
+from galaxy import model, util
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy.web.framework.helpers import grids
+from galaxy.webapps.reports.controllers.query import ReportQueryBuilder
+
+log = logging.getLogger( __name__ )
+
+
+class SpecifiedDateListGrid( grids.Grid ):
+
+    class RequestNameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, request ):
+            return escape(request.name)
+
+    class CreateTimeColumn( grids.DateTimeColumn ):
+
+        def get_value( self, trans, grid, request ):
+            return request.create_time
+
+    class UserColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, request ):
+            if request.user:
+                return escape(request.user.email)
+            return 'unknown'
+
+    class EmailColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            return query.filter( and_( model.Request.table.c.user_id == model.User.table.c.id,
+                                       model.User.table.c.email == column_filter ) )
+
+    class SpecifiedDateColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            # We are either filtering on a date like YYYY-MM-DD or on a month like YYYY-MM,
+            # so we need to figure out which type of date we have
+            if column_filter.count( '-' ) == 2:
+                # We are filtering on a date like YYYY-MM-DD
+                year, month, day = map( int, column_filter.split( "-" ) )
+                start_date = date( year, month, day )
+                end_date = start_date + timedelta( days=1 )
+                return query.filter( and_( self.model_class.table.c.create_time >= start_date,
+                                           self.model_class.table.c.create_time < end_date ) )
+            if column_filter.count( '-' ) == 1:
+                # We are filtering on a month like YYYY-MM
+                year, month = map( int, column_filter.split( "-" ) )
+                start_date = date( year, month, 1 )
+                end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
+                return query.filter( and_( self.model_class.table.c.create_time >= start_date,
+                                           self.model_class.table.c.create_time < end_date ) )
+
+    # Grid definition
+    use_async = False
+    model_class = model.Request
+    title = "Sequencing Requests"
+    template = '/webapps/reports/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        RequestNameColumn( "Name",
+                           key="name",
+                           attach_popup=False,
+                           filterable="advanced" ),
+        CreateTimeColumn( "Creation Time",
+                          key="create_time",
+                          attach_popup=False ),
+        UserColumn( "User",
+                    key="email",
+                    model_class=model.User,
+                    link=( lambda item: dict( operation="user_per_month", id=item.id, webapp="reports" ) ),
+                    attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        SpecifiedDateColumn( "Specified Date",
+                             key="specified_date",
+                             visible=False ),
+        EmailColumn( "Email",
+                     key="email",
+                     model_class=model.User,
+                     visible=False ),
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    standard_filters = []
+    default_filter = { 'specified_date': 'All' }
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( self.model_class ) \
+                               .join( model.User ) \
+                               .enable_eagerloads( False )
+
+
+class SampleTracking( BaseUIController, ReportQueryBuilder ):
+
+    specified_date_list_grid = SpecifiedDateListGrid()
+
+    @web.expose
+    def specified_date_handler( self, trans, **kwd ):
+        # We add params to the keyword dict in this method in order to rename the param
+        # with an "f-" prefix, simulating filtering by clicking a search link.  We have
+        # to take this approach because the "-" character is illegal in HTTP requests.
+        if 'f-specified_date' in kwd and 'specified_date' not in kwd:
+            # The user clicked a State link in the Advanced Search box, so 'specified_date'
+            # will have been eliminated.
+            pass
+        elif 'specified_date' not in kwd:
+            kwd[ 'f-specified_date' ] = 'All'
+        else:
+            kwd[ 'f-specified_date' ] = kwd[ 'specified_date' ]
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "request_per_month":
+                # The received id is the request id.
+                return trans.response.send_redirect( web.url_for( controller='sample_tracking',
+                                                                  action='request_per_month',
+                                                                  **kwd ) )
+            elif operation == "user_per_month":
+                request_id = kwd.get( 'id', None )
+                request = get_request( trans, request_id )
+                if request.user:
+                    kwd[ 'email' ] = request.user.email
+                else:
+                    kwd[ 'email' ] = None  # For anonymous users ( shouldn't happen with requests )
+                return trans.response.send_redirect( web.url_for( controller='sample_tracking',
+                                                                  action='user_per_month',
+                                                                  **kwd ) )
+        return self.specified_date_list_grid( trans, **kwd )
+
+    @web.expose
+    def per_month_all( self, trans, **kwd ):
+        message = ''
+        q = sa.select( ( self.select_month( model.Request.table.c.create_time ).label( 'date' ),
+                         sa.func.count( model.Request.table.c.id ).label( 'total' ) ),
+                       from_obj=[ sa.outerjoin( model.Request.table, model.User.table ) ],
+                       group_by=self.group_by_month( model.Request.table.c.create_time ),
+                       order_by=[ sa.desc( 'date' ) ] )
+        requests = []
+        for row in q.execute():
+            requests.append( ( row.date.strftime( "%Y-%m" ),
+                               row.total,
+                               row.date.strftime( "%B" ),
+                               row.date.strftime( "%Y" ) ) )
+        return trans.fill_template( '/webapps/reports/requests_per_month_all.mako',
+                                    requests=requests,
+                                    message=message )
+
+    @web.expose
+    def per_user( self, trans, **kwd ):
+        message = ''
+        requests = []
+        q = sa.select( ( model.User.table.c.email.label( 'user_email' ),
+                         sa.func.count( model.Request.table.c.id ).label( 'total' ) ),
+                       from_obj=[ sa.outerjoin( model.Request.table, model.User.table ) ],
+                       group_by=[ 'user_email' ],
+                       order_by=[ sa.desc( 'total' ), 'user_email' ] )
+        for row in q.execute():
+            requests.append( ( row.user_email,
+                               row.total ) )
+        return trans.fill_template( '/webapps/reports/requests_per_user.mako', requests=requests, message=message )
+
+    @web.expose
+    def user_per_month( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = ''
+        email = util.restore_text( params.get( 'email', '' ) )
+        user_id = trans.security.decode_id( params.get( 'id', '' ) )
+        q = sa.select( ( self.select_month( model.Request.table.c.create_time ).label( 'date' ),
+                         sa.func.count( model.Request.table.c.id ).label( 'total' ) ),
+                       whereclause=model.Request.table.c.user_id == user_id,
+                       from_obj=[ model.Request.table ],
+                       group_by=self.group_by_month( model.Request.table.c.create_time ),
+                       order_by=[ sa.desc( 'date' ) ] )
+        requests = []
+        for row in q.execute():
+            requests.append( ( row.date.strftime( "%Y-%m" ),
+                               row.total,
+                               row.date.strftime( "%B" ),
+                               row.date.strftime( "%Y" ) ) )
+        return trans.fill_template( '/webapps/reports/requests_user_per_month.mako',
+                                    email=util.sanitize_text( email ),
+                                    requests=requests,
+                                    message=message )
+
+# ---- Utility methods -------------------------------------------------------
+
+
+def get_request( trans, id ):
+    return trans.sa_session.query( trans.model.Workflow ).get( trans.security.decode_id( id ) )
diff --git a/lib/galaxy/webapps/reports/controllers/system.py b/lib/galaxy/webapps/reports/controllers/system.py
new file mode 100644
index 0000000..419701a
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/system.py
@@ -0,0 +1,214 @@
+import logging
+import os
+from datetime import datetime, timedelta
+from decimal import Decimal
+
+from sqlalchemy import and_, desc, false, null, true
+from sqlalchemy.orm import eagerload
+
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy import model, util
+
+log = logging.getLogger( __name__ )
+
+
+class System( BaseUIController ):
+    @web.expose
+    def index( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = ''
+        if params.userless_histories_days:
+            userless_histories_days = params.userless_histories_days
+        else:
+            userless_histories_days = '60'
+        if params.deleted_histories_days:
+            deleted_histories_days = params.deleted_histories_days
+        else:
+            deleted_histories_days = '60'
+        if params.deleted_datasets_days:
+            deleted_datasets_days = params.deleted_datasets_days
+        else:
+            deleted_datasets_days = '60'
+        file_path, disk_usage, datasets, file_size_str = self.disk_usage( trans, **kwd )
+        if 'action' in kwd:
+            if kwd['action'] == "userless_histories":
+                userless_histories_days, message = self.userless_histories( trans, **kwd )
+            elif kwd['action'] == "deleted_histories":
+                deleted_histories_days, message = self.deleted_histories( trans, **kwd )
+            elif kwd['action'] == "deleted_datasets":
+                deleted_datasets_days, message = self.deleted_datasets( trans, **kwd )
+        return trans.fill_template( '/webapps/reports/system.mako',
+                                    file_path=file_path,
+                                    disk_usage=disk_usage,
+                                    datasets=datasets,
+                                    file_size_str=file_size_str,
+                                    userless_histories_days=userless_histories_days,
+                                    deleted_histories_days=deleted_histories_days,
+                                    deleted_datasets_days=deleted_datasets_days,
+                                    message=message,
+                                    nice_size=nice_size )
+
+    def userless_histories( self, trans, **kwd ):
+        """The number of userless histories and associated datasets that have not been updated for the specified number of days."""
+        params = util.Params( kwd )
+        message = ''
+        if params.userless_histories_days:
+            userless_histories_days = int( params.userless_histories_days )
+            cutoff_time = datetime.utcnow() - timedelta( days=userless_histories_days )
+            history_count = 0
+            dataset_count = 0
+            for history in trans.sa_session.query( model.History ) \
+                    .filter( and_( model.History.table.c.user_id == null(),
+                    model.History.table.c.deleted == true(),
+                    model.History.table.c.update_time < cutoff_time ) ):
+                for dataset in history.datasets:
+                    if not dataset.deleted:
+                        dataset_count += 1
+                history_count += 1
+            message = "%d userless histories ( including a total of %d datasets ) have not been updated for at least %d days." % ( history_count, dataset_count, userless_histories_days )
+        else:
+            message = "Enter the number of days."
+        return str( userless_histories_days ), message
+
+    def deleted_histories( self, trans, **kwd ):
+        """
+        The number of histories that were deleted more than the specified number of days ago, but have not yet been purged.
+        Also included is the number of datasets associated with the histories.
+        """
+        params = util.Params( kwd )
+        message = ''
+        if params.deleted_histories_days:
+            deleted_histories_days = int( params.deleted_histories_days )
+            cutoff_time = datetime.utcnow() - timedelta( days=deleted_histories_days )
+            history_count = 0
+            dataset_count = 0
+            disk_space = 0
+            histories = trans.sa_session.query( model.History ) \
+                .filter( and_( model.History.table.c.deleted == true(),
+                    model.History.table.c.purged == false(),
+                    model.History.table.c.update_time < cutoff_time ) ) \
+                .options( eagerload( 'datasets' ) )
+
+            for history in histories:
+                for hda in history.datasets:
+                    if not hda.dataset.purged:
+                        dataset_count += 1
+                        try:
+                            disk_space += hda.dataset.file_size
+                        except:
+                            pass
+                history_count += 1
+            message = "%d histories ( including a total of %d datasets ) were deleted more than %d days ago, but have not yet been purged, " \
+                "disk space: %s." % ( history_count, dataset_count, deleted_histories_days, nice_size( disk_space, True ) )
+        else:
+            message = "Enter the number of days."
+        return str( deleted_histories_days ), message
+
+    def deleted_datasets( self, trans, **kwd ):
+        """The number of datasets that were deleted more than the specified number of days ago, but have not yet been purged."""
+        params = util.Params( kwd )
+        message = ''
+        if params.deleted_datasets_days:
+            deleted_datasets_days = int( params.deleted_datasets_days )
+            cutoff_time = datetime.utcnow() - timedelta( days=deleted_datasets_days )
+            dataset_count = 0
+            disk_space = 0
+            for dataset in trans.sa_session.query( model.Dataset ) \
+                .filter( and_( model.Dataset.table.c.deleted == true(),
+                    model.Dataset.table.c.purged == false(),
+                    model.Dataset.table.c.update_time < cutoff_time ) ):
+                dataset_count += 1
+                try:
+                    disk_space += dataset.file_size
+                except:
+                    pass
+            message = "%d datasets were deleted more than %d days ago, but have not yet been purged," \
+                " disk space: %s." % ( dataset_count, deleted_datasets_days, nice_size( disk_space, True ))
+        else:
+            message = "Enter the number of days."
+        return str( deleted_datasets_days ), message
+
+    @web.expose
+    def dataset_info( self, trans, **kwd ):
+        message = ''
+        dataset = trans.sa_session.query( model.Dataset ).get( trans.security.decode_id( kwd.get( 'id', '' ) ) )
+        # Get all associated hdas and lddas that use the same disk file.
+        associated_hdas = trans.sa_session.query( trans.model.HistoryDatasetAssociation ) \
+            .filter( and_( trans.model.HistoryDatasetAssociation.deleted == false(),
+            trans.model.HistoryDatasetAssociation.dataset_id == dataset.id ) ) \
+            .all()
+        associated_lddas = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ) \
+            .filter( and_( trans.model.LibraryDatasetDatasetAssociation.deleted == false(),
+            trans.model.LibraryDatasetDatasetAssociation.dataset_id == dataset.id ) ) \
+            .all()
+        return trans.fill_template( '/webapps/reports/dataset_info.mako',
+                                    dataset=dataset,
+                                    associated_hdas=associated_hdas,
+                                    associated_lddas=associated_lddas,
+                                    message=message )
+
+    def get_disk_usage( self, file_path ):
+        df_cmd = 'df -h ' + file_path
+        is_sym_link = os.path.islink( file_path )
+        file_system = disk_size = disk_used = disk_avail = disk_cap_pct = mount = None
+        df_file = os.popen( df_cmd )
+        while True:
+            df_line = df_file.readline()
+            df_line = df_line.strip()
+            if df_line:
+                df_line = df_line.lower()
+                if 'filesystem' in df_line or 'proc' in df_line:
+                    continue
+                elif is_sym_link:
+                    if ':' in df_line and '/' in df_line:
+                        mount = df_line
+                    else:
+                        try:
+                            disk_size, disk_used, disk_avail, disk_cap_pct, file_system = df_line.split()
+                            break
+                        except:
+                            pass
+                else:
+                    try:
+                        file_system, disk_size, disk_used, disk_avail, disk_cap_pct, mount = df_line.split()
+                        break
+                    except:
+                        pass
+            else:
+                break  # EOF
+        df_file.close()
+        return ( file_system, disk_size, disk_used, disk_avail, disk_cap_pct, mount  )
+
+    @web.expose
+    def disk_usage( self, trans, **kwd ):
+        file_path = trans.app.config.file_path
+        disk_usage = self.get_disk_usage( file_path )
+        min_file_size = 2 ** 32  # 4 Gb
+        file_size_str = nice_size( min_file_size )
+        datasets = trans.sa_session.query( model.Dataset ) \
+                                   .filter( and_( model.Dataset.table.c.purged == false(),
+                                                  model.Dataset.table.c.file_size > min_file_size ) ) \
+                                   .order_by( desc( model.Dataset.table.c.file_size ) )
+        return file_path, disk_usage, datasets, file_size_str
+
+
+def nice_size(size, include_bytes=False):
+    """Returns a readably formatted string with the size"""
+    niced = False
+    nice_string = "%s bytes" % size
+    try:
+        nsize = Decimal(size)
+        for x in ['bytes', 'KB', 'MB', 'GB']:
+            if nsize.compare(Decimal("1024.0")) == Decimal("-1"):
+                nice_string = "%3.1f %s" % (nsize, x)
+                niced = True
+                break
+            nsize /= Decimal("1024.0")
+        if not niced:
+            nice_string = "%3.1f %s" % (nsize, 'TB')
+            niced = True
+        if include_bytes and x != 'bytes':
+            nice_string = "%s (%s bytes)" % (nice_string, size)
+    except:
+        pass
+    return nice_string
diff --git a/lib/galaxy/webapps/reports/controllers/tools.py b/lib/galaxy/webapps/reports/controllers/tools.py
new file mode 100644
index 0000000..5612183
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/tools.py
@@ -0,0 +1,365 @@
+
+import collections
+import logging
+import galaxy.model
+import sqlalchemy as sa
+
+from galaxy import util
+from galaxy.web.base.controller import BaseUIController, web
+
+from sqlalchemy import and_
+from datetime import timedelta
+from markupsafe import escape
+
+log = logging.getLogger( __name__ )
+
+
+def int_to_octet(size):
+    try:
+        size = float(size)
+    except ValueError:
+        return "???"
+    except TypeError:
+        if size is None:
+            return "0 o"
+        return "???"
+    units = ("o", "Ko", "Mo", "Go", "To")
+    no_unit = 0
+    while (size >= 1000):
+        size /= 1000.
+        no_unit += 1
+    try:
+        return "%.2f %s" % ( size, units[no_unit] )
+    except IndexError:
+        return "%.0f %s" % ( size * ( ( no_unit - len( units ) + 1 ) * 1000. ), units[-1] )
+
+
+class Tools( BaseUIController ):
+    """
+    Class defining functions used by reports to make requests to get
+    informations and fill templates before being displayed.
+    The name of function must be the same as as the field "action" of
+    the "href" dict, in .mako templates (templates/webapps/reports).
+    """
+
+    def formatted(self, date, colored=False):
+        splited = str(date).split(',')
+        if len(splited) == 2:
+            returned = "%s %dH" % (splited[0], int(splited[1].split(':')[0]))
+            if colored:
+                return '<font color="red">' + returned + '</font>'
+            return returned
+        else:
+            splited = tuple([float(_) for _ in str(date).split(':')])
+            if splited[0]:
+                returned = '%d h. %d min.' % splited[:2]
+                if colored:
+                    return '<font color="orange">' + returned + '</font>'
+                return returned
+            if splited[1]:
+                return "%d min. %d sec." % splited[1:3]
+            return "%.1f sec." % splited[2]
+
+    @web.expose
+    def tools_and_job_state( self, trans, **kwd ):
+        """
+        fill tools_and_job_state.mako template with
+            - the name of the tool
+            - the number of jobs using this tool in state 'ok'
+            - the number of jobs using this tool in error
+        """
+
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        user_cutoff = int( kwd.get( 'user_cutoff', 60 ) )
+
+        # sort by history space, or by user mail or by number of history/dataset
+        sort_by = kwd.get( 'sorting', 'Tool' )
+        sorting = 0 if sort_by == 'Tool' else 1 if sort_by == 'ok' else 2
+        descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+        sort_functions = ( lambda first, second: descending if first.lower() > second.lower() else -descending,
+                           lambda first, second: -descending if tools_and_jobs_ok.get( first, 0 ) > tools_and_jobs_ok.get( second ) else descending,
+                           lambda first, second: -descending if tools_and_jobs_error.get( first, 0 ) > tools_and_jobs_error.get( second, 0 ) else descending )
+
+        data = collections.OrderedDict()
+
+        # select count(id), tool_id from job where state='ok' group by tool_id;
+        tools_and_jobs_ok = sa.select( ( galaxy.model.Job.table.c.tool_id .label( 'tool' ),
+                                         sa.func.count( galaxy.model.Job.table.c.id ).label( 'job' ) ),
+                                       from_obj=[ galaxy.model.Job.table],
+                                       whereclause=(galaxy.model.Job.table.c.state == 'ok'),
+                                       group_by=[ 'tool' ] )
+
+        # select count(id), tool_id from job where state='error' group by tool_id;
+        tools_and_jobs_error = sa.select( ( galaxy.model.Job.table.c.tool_id .label( 'tool' ),
+                                            sa.func.count( galaxy.model.Job.table.c.id ).label( 'job' ) ),
+                                          from_obj=[ galaxy.model.Job.table],
+                                          whereclause=(galaxy.model.Job.table.c.state == 'error'),
+                                          group_by=[ 'tool' ] )
+
+        tools_and_jobs_ok = dict( list( tools_and_jobs_ok.execute() ) )
+        tools_and_jobs_error = dict( list( tools_and_jobs_error.execute() ) )
+
+        # select each job name one time
+        tools = list(set(tools_and_jobs_ok.keys()) | set(tools_and_jobs_error.keys()))
+        tools.sort(sort_functions[ sorting ])
+
+        for tool in tools:
+            data[tool] = (str( tools_and_jobs_ok.get(tool, '-') ), str( tools_and_jobs_error.get(tool, '-') ) )
+
+        return trans.fill_template( '/webapps/reports/tools_and_job_state.mako',
+                                    data=data,
+                                    user_cutoff=user_cutoff,
+                                    sorting=sorting,
+                                    descending=descending,
+                                    message=message )
+
+    @web.expose
+    def tools_and_job_state_per_month(self, trans, **kwd ):
+        """
+        fill tools_and_job_state_per_month.mako template with
+            - the name of the tool
+            - the number of jobs using this tool in state 'ok'
+            - the number of jobs using this tool in error
+        """
+
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        user_cutoff = int( kwd.get( 'user_cutoff', 60 ) )
+
+        # sort by history space, or by user mail or by number of history/dataset
+        # sort_by = kwd.get( 'sorting', 'Tool' )
+        # sorting = 0 if sort_by == 'Tool' else 1 if sort_by == 'ok' else 2
+        # descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+        tool = kwd.get( 'tool', None)
+
+        if tool is None:
+            raise TypeError("Tool can't be None")
+
+        data = collections.OrderedDict()
+
+        # select count(id), create_time from job where state='ok' and tool_id=$tool group by date;
+        date_and_jobs_ok = sa.select( ( sa.func.date( galaxy.model.Job.table.c.create_time ).label( 'date' ),
+                                        sa.func.count( galaxy.model.Job.table.c.id ).label( 'job' ) ),
+                                      from_obj=[ galaxy.model.Job.table],
+                                      whereclause=and_( galaxy.model.Job.table.c.state == 'ok', galaxy.model.Job.table.c.tool_id == tool ),
+                                      group_by=[ 'date' ] )
+
+        # select count(id), create_time from job where state='error' and tool_id=$tool group by date;
+        date_and_jobs_error = sa.select( ( sa.func.date( galaxy.model.Job.table.c.create_time ).label( 'date' ),
+                                           sa.func.count( galaxy.model.Job.table.c.id ).label( 'job' ) ),
+                                         from_obj=[ galaxy.model.Job.table],
+                                         whereclause=and_( galaxy.model.Job.table.c.state == 'error', galaxy.model.Job.table.c.tool_id == tool ),
+                                         group_by=[ 'date' ] )
+
+        # sort_functions = (lambda first, second: descending if first.lower() > second.lower() else -descending,
+        #  lambda first, second: -descending if tools_and_jobs_ok.get( first, 0 ) >
+        #  tools_and_jobs_ok.get( second ) else descending,
+        #   lambda first, second: -descending if tools_and_jobs_error.get( first, 0 ) >
+        #       tools_and_jobs_error.get( second, 0 ) else descending)
+
+        date_and_jobs_ok = dict( list( date_and_jobs_ok.execute() ) )
+        date_and_jobs_error = dict( list( date_and_jobs_error.execute() ) )
+
+        # select each date
+        dates = list(set(date_and_jobs_ok.keys()) | set(date_and_jobs_error.keys()))
+        dates.sort(reverse=True)
+        for date in dates:
+            date_key = date.strftime( "%B %Y" )
+            if date_key not in data:
+                data[date_key] = [int( date_and_jobs_ok.get(date, 0) ), int( date_and_jobs_error.get(date, 0) ) ]
+            else:
+                data[date_key][0] += int( date_and_jobs_ok.get( date, 0 ) )
+                data[date_key][1] += int( date_and_jobs_error.get( date, 0 ) )
+
+        return trans.fill_template( '/webapps/reports/tools_and_job_state_per_month.mako',
+                                    data=data,
+                                    tool=tool,
+                                    user_cutoff=user_cutoff,
+                                    message=message )
+
+    @web.expose
+    def tool_execution_time(self, trans, **kwd):
+        """
+        Fill the template tool_execution_time.mako with informations:
+            - Tool name
+            - Tool average execution time
+            - last job execution time
+            - min and max execution time
+        """
+
+        # liste des tools + temps moyen d'exec du job + temps d'execution du dernier job + tps min et max / mois (?)
+        user_cutoff = int(kwd.get("user_cutoff", 60))
+        sort_by = kwd.get("sort_by", "tool")
+        descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+        sort_by = 0 if sort_by == "tool" else 1 if sort_by == "avg" else 2 if sort_by == "min" else 3
+        color = True if kwd.get("color", '') == "True" else False
+
+        data = {}
+        ordered_data = collections.OrderedDict()
+
+        def field_sort(first, second, field):
+            descending if data[first][field] < data[second][field] else -descending
+
+        sort_functions = [
+            lambda first, second: -descending if first.lower() < second.lower() else descending,
+            lambda first, second: field_sort(first, second, "avg"),
+            lambda first, second: field_sort(first, second, "min"),
+            lambda first, second: field_sort(first, second, "max")]
+
+        jobs_times = sa.select( ( galaxy.model.Job.table.c.tool_id.label( "name" ),
+                                  galaxy.model.Job.table.c.create_time.label("create_time"),
+                                  galaxy.model.Job.table.c.update_time.label("update_time"),
+                                  galaxy.model.Job.table.c.update_time - galaxy.model.Job.table.c.create_time ),
+                                from_obj=[galaxy.model.Job.table])
+
+        jobs_times = [(name, (create, update, time)) for name, create, update, time in jobs_times.execute()]
+        for tool, attr in jobs_times:
+            if tool not in data:
+                data[tool] = { "last": [(attr[1], attr[0])], "avg": [attr[2]] }
+            else:
+                data[tool]["last"].append((attr[1], attr[0]))
+                data[tool]["avg"].append(attr[2])
+
+        for tool in data:
+            data[tool]["min"] = min(data[tool]["avg"])
+            data[tool]["max"] = max(data[tool]["avg"])
+            last = max(data[tool]["last"])
+            data[tool]["last"] = last[0] - last[1]
+            data[tool]["avg"] = sum(data[tool]["avg"], timedelta()) / len(data[tool]["avg"])
+
+        tools = data.keys()
+        if user_cutoff:
+            tools = tools[:user_cutoff]
+        tools.sort(sort_functions[sort_by])
+        for tool in tools:
+            ordered_data[tool] = { "min": self.formatted(data[tool]["min"], color),
+                                   "max": self.formatted(data[tool]["max"], color),
+                                   "avg": self.formatted(data[tool]["avg"], color),
+                                   "last": self.formatted(data[tool]["last"], color) }
+
+        return trans.fill_template( '/webapps/reports/tool_execution_time.mako',
+                                    data=ordered_data,
+                                    descending=descending,
+                                    user_cutoff=user_cutoff,
+                                    sort_by=sort_by )
+
+    @web.expose
+    def tool_execution_time_per_month(self, trans, **kwd):
+        """
+        Fill the template tool_execution_time_per_month.mako with informations:
+            - Tool average execution time
+            - last job execution time
+            - min and max execution time
+        """
+
+        # liste des tools + temps moyen d'exec du job + temps d'execution du dernier job + tps min et max / mois(?)
+        user_cutoff = int(kwd.get("user_cutoff", 60))
+        sort_by = kwd.get("sort_by", "month")
+        descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+        sort_by = 0 if sort_by == "month" else 1 if sort_by == "min" else 2 if sort_by == "max" else 3
+        tool = kwd.get("tool", None)
+        color = True if kwd.get("color", '') == "True" else False
+
+        if tool is None:
+            raise ValueError("Tool can't be None")
+
+        ordered_data = collections.OrderedDict()
+        sort_functions = [(lambda first, second, i=i: descending if first[i] < second[i] else -descending) for i in range(4)]
+
+        jobs_times = sa.select( ( sa.func.date_trunc('month', galaxy.model.Job.table.c.create_time ).label('date'),
+                                  sa.func.max(galaxy.model.Job.table.c.update_time - galaxy.model.Job.table.c.create_time),
+                                  sa.func.avg(galaxy.model.Job.table.c.update_time - galaxy.model.Job.table.c.create_time),
+                                  sa.func.min(galaxy.model.Job.table.c.update_time - galaxy.model.Job.table.c.create_time) ),
+                                from_obj=[galaxy.model.Job.table],
+                                whereclause=galaxy.model.Job.table.c.tool_id == tool,
+                                group_by=['date'] )
+
+        months = list(jobs_times.execute())
+        months.sort(sort_functions[sort_by])
+        if user_cutoff:
+            months = months[:user_cutoff]
+
+        for month in months:
+            ordered_data[str(month[0]).split(' ')[0][:-3]] = ( self.formatted(month[1], color),
+                                                               self.formatted(month[2], color),
+                                                               self.formatted(month[3], color) )
+
+        return trans.fill_template( '/webapps/reports/tool_execution_time_per_month.mako',
+                                    data=ordered_data,
+                                    tool=tool,
+                                    descending=descending,
+                                    user_cutoff=user_cutoff,
+                                    sort_by=sort_by )
+
+    @web.expose
+    def tool_error_messages(self, trans, **kwd):
+        tool_name = kwd.get("tool", None)
+        descending = 1 if kwd.get("descending", 'desc') == "desc" else -1
+        sort_by = 0 if kwd.get("sort_by", "time") == "time" else 1
+        cutoff = int(kwd.get("user_cutoff", 60))
+        sort_functions = ( lambda _a, _b: -descending if counter[_a][1] > counter[_b][1] else descending,
+                           lambda _a, _b: -descending if counter[_a][0] > counter[_b][0] else descending )
+
+        if tool_name is None:
+            raise ValueError("Tool can't be none")
+        tool_errors = [ [unicode(a), b] for a, b in
+                        sa.select((galaxy.model.Job.table.c.stderr, galaxy.model.Job.table.c.create_time),
+                        from_obj=[galaxy.model.Job.table],
+                        whereclause=and_( galaxy.model.Job.table.c.tool_id == tool_name,
+                                          galaxy.model.Job.table.c.state == 'error')).execute() ]
+
+        counter = {}
+        for error in tool_errors:
+            try:
+                error[0] = unicode(error[0].decode("utf-8"))
+                # encoding tested:
+                # latin-1 ; iso-8859-1 ; alien ; cenc ; cp037 ; cp437 ; base64 ; utf-8 ; utf-16 ; ascii ; hex
+            except UnicodeEncodeError:
+                for no, lettre in enumerate(error[0]):
+                    try:
+                        str(lettre.decode("utf-8"))
+                    except UnicodeEncodeError:
+                        try:
+                            error[0] = error[0].replace(error[0][no], '?')
+                        except UnicodeEncodeError:
+                            error[0] = "This error contains special character and can't be displayed."
+                            break
+            if error[0] in counter:
+                counter[error[0]][0] += 1
+            else:
+                counter[error[0]] = [1, error[1]]
+
+        data = collections.OrderedDict()
+        keys = counter.keys()
+        if cutoff:
+            keys = keys[:cutoff]
+        keys.sort(sort_functions[sort_by])
+
+        spaces = [' ', '\t', '    ']
+        for key in keys:
+            new_key = '</br>'.join([_ for _ in key.split('\n') if _ and _ not in spaces])
+            if len(new_key) >= 100:
+                to_replace = []
+                words = key.split('\n')
+                for word in words:
+                    if word in to_replace:
+                        continue
+                    if words.count(word) > 1:
+                        print word
+                        to_replace.append(word)
+                for word in to_replace:
+                    sentence = ("</br>" + word) * 2
+                    count = 2
+                    while sentence + "</br>" + word in new_key:
+                        sentence += "</br>" + word
+                        count += 1
+                    print sentence, count
+                    if sentence in new_key:
+                        new_key = new_key.replace(sentence, '</br>' + word + " [this line in %d times]" % (count))
+            data[new_key] = counter[key]
+
+        return trans.fill_template( "/webapps/reports/tool_error_messages.mako",
+                                    data=data,
+                                    descending=descending,
+                                    tool_name=tool_name,
+                                    sort_by=sort_by,
+                                    user_cutoff=cutoff )
diff --git a/lib/galaxy/webapps/reports/controllers/users.py b/lib/galaxy/webapps/reports/controllers/users.py
new file mode 100644
index 0000000..b842637
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/users.py
@@ -0,0 +1,216 @@
+import calendar
+import logging
+import operator
+import galaxy.model
+import sqlalchemy as sa
+
+from galaxy import util
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy.webapps.reports.controllers.jobs import sorter
+from galaxy.webapps.reports.controllers.query import ReportQueryBuilder
+
+from datetime import datetime, date, timedelta
+from markupsafe import escape
+from sqlalchemy import false
+
+log = logging.getLogger( __name__ )
+
+
+class Users( BaseUIController, ReportQueryBuilder ):
+
+    @web.expose
+    def registered_users( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        num_users = trans.sa_session.query( galaxy.model.User ).count()
+        return trans.fill_template( '/webapps/reports/registered_users.mako', num_users=num_users, message=message )
+
+    @web.expose
+    def registered_users_per_month( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+
+        q = sa.select( ( self.select_month( galaxy.model.User.table.c.create_time ).label( 'date' ),
+                         sa.func.count( galaxy.model.User.table.c.id ).label( 'num_users' ) ),
+                       from_obj=[ galaxy.model.User.table ],
+                       group_by=self.group_by_month( galaxy.model.User.table.c.create_time ),
+                       order_by=[ _order ] )
+        users = []
+        for row in q.execute():
+            users.append( ( row.date.strftime( "%Y-%m" ),
+                            row.num_users,
+                            row.date.strftime( "%B" ),
+                            row.date.strftime( "%Y" ) ) )
+        return trans.fill_template( '/webapps/reports/registered_users_per_month.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    users=users,
+                                    message=message )
+
+    @web.expose
+    def specified_month( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        # If specified_date is not received, we'll default to the current month
+        specified_date = kwd.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) )
+        specified_month = specified_date[ :7 ]
+        year, month = map( int, specified_month.split( "-" ) )
+        start_date = date( year, month, 1 )
+        end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
+        month_label = start_date.strftime( "%B" )
+        year_label = start_date.strftime( "%Y" )
+        q = sa.select( ( self.select_day( galaxy.model.User.table.c.create_time ).label( 'date' ),
+                         sa.func.count( galaxy.model.User.table.c.id ).label( 'num_users' ) ),
+                       whereclause=sa.and_( galaxy.model.User.table.c.create_time >= start_date,
+                                            galaxy.model.User.table.c.create_time < end_date ),
+                       from_obj=[ galaxy.model.User.table ],
+                       group_by=self.group_by_day( galaxy.model.User.table.c.create_time ),
+                       order_by=[ sa.desc( 'date' ) ] )
+        users = []
+        for row in q.execute():
+            users.append( ( row.date.strftime( "%Y-%m-%d" ),
+                            row.date.strftime( "%d" ),
+                            row.num_users,
+                            row.date.strftime( "%A" ) ) )
+        return trans.fill_template( '/webapps/reports/registered_users_specified_month.mako',
+                                    month_label=month_label,
+                                    year_label=year_label,
+                                    month=month,
+                                    users=users,
+                                    message=message )
+
+    @web.expose
+    def specified_date( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        # If specified_date is not received, we'll default to the current month
+        specified_date = kwd.get( 'specified_date', datetime.utcnow().strftime( "%Y-%m-%d" ) )
+        year, month, day = map( int, specified_date.split( "-" ) )
+        start_date = date( year, month, day )
+        end_date = start_date + timedelta( days=1 )
+        day_of_month = start_date.strftime( "%d" )
+        day_label = start_date.strftime( "%A" )
+        month_label = start_date.strftime( "%B" )
+        year_label = start_date.strftime( "%Y" )
+        q = sa.select( ( self.select_day( galaxy.model.User.table.c.create_time ).label( 'date' ),
+                         galaxy.model.User.table.c.email ),
+                       whereclause=sa.and_( galaxy.model.User.table.c.create_time >= start_date,
+                                            galaxy.model.User.table.c.create_time < end_date ),
+                       from_obj=[ galaxy.model.User.table ],
+                       order_by=[ galaxy.model.User.table.c.email ] )
+        users = []
+        for row in q.execute():
+            users.append( ( row.email ) )
+        return trans.fill_template( '/webapps/reports/registered_users_specified_date.mako',
+                                    specified_date=start_date,
+                                    day_label=day_label,
+                                    month_label=month_label,
+                                    year_label=year_label,
+                                    day_of_month=day_of_month,
+                                    users=users,
+                                    message=message )
+
+    @web.expose
+    def last_access_date( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        specs = sorter( 'one', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+
+        def name_to_num(name):
+            num = None
+
+            if name is not None and name.lower() == 'zero':
+                num = 0
+            else:
+                num = 1
+
+            return num
+
+        if order == "desc":
+            _order = True
+        else:
+            _order = False
+
+        days_not_logged_in = kwd.get( 'days_not_logged_in', 90 )
+        if not days_not_logged_in:
+            days_not_logged_in = 0
+        cutoff_time = datetime.utcnow() - timedelta( days=int( days_not_logged_in ) )
+        users = []
+        for user in trans.sa_session.query( galaxy.model.User ) \
+                                    .filter( galaxy.model.User.table.c.deleted == false() ) \
+                                    .order_by( galaxy.model.User.table.c.email ):
+            if user.galaxy_sessions:
+                last_galaxy_session = user.galaxy_sessions[ 0 ]
+                if last_galaxy_session.update_time < cutoff_time:
+                    users.append( ( user.email, last_galaxy_session.update_time.strftime( "%Y-%m-%d" ) ) )
+            else:
+                # The user has never logged in
+                users.append( ( user.email, "never logged in" ) )
+        users = sorted( users, key=operator.itemgetter( name_to_num(sort_id) ), reverse=_order )
+        return trans.fill_template( '/webapps/reports/users_last_access_date.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    days_not_logged_in=days_not_logged_in,
+                                    users=users,
+                                    message=message )
+
+    @web.expose
+    def user_disk_usage( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        specs = sorter( 'disk_usage', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+
+        if order == "desc":
+            _order = True
+        else:
+            _order = False
+
+        user_cutoff = int( kwd.get( 'user_cutoff', 60 ) )
+        # disk_usage isn't indexed
+        users = sorted( trans.sa_session.query( galaxy.model.User ).all(), key=operator.attrgetter( str(sort_id) ), reverse=_order )
+        if user_cutoff:
+            users = users[:user_cutoff]
+        return trans.fill_template( '/webapps/reports/users_user_disk_usage.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    users=users,
+                                    user_cutoff=user_cutoff,
+                                    message=message )
+
+    @web.expose
+    def history_per_user( self, trans, **kwd ):
+        message = escape( util.restore_text( kwd.get( 'message', '' ) ) )
+        user_cutoff = int( kwd.get( 'user_cutoff', 60 ) )
+        sorting = 0 if kwd.get( 'sorting', 'User' ) == 'User' else 1
+        descending = 1 if kwd.get( 'descending', 'desc' ) == 'desc' else -1
+        sorting_functions = [
+            lambda first, second: descending if first[0].lower() > second[0].lower() else -descending,
+            lambda first, second: descending if first[1] < second[1] else -descending ]
+
+        req = sa.select(
+            ( sa.func.count( galaxy.model.History.table.c.id ).label( 'history' ),
+              galaxy.model.User.table.c.username.label( 'username' ) ),
+            from_obj=[ sa.outerjoin( galaxy.model.History.table, galaxy.model.User.table ) ],
+            whereclause=galaxy.model.History.table.c.user_id == galaxy.model.User.table.c.id,
+            group_by=[ 'username' ],
+            order_by=[ sa.desc( 'username' ), 'history' ] )
+
+        histories = [ (_.username if _.username is not None else "Unknown", _.history) for _ in req.execute() ]
+        histories.sort( sorting_functions[ sorting ] )
+        if user_cutoff != 0:
+            histories = histories[:user_cutoff]
+
+        return trans.fill_template( '/webapps/reports/history_per_user.mako',
+                                    histories=histories,
+                                    user_cutoff=user_cutoff,
+                                    sorting=sorting,
+                                    descending=descending,
+                                    message=message )
diff --git a/lib/galaxy/webapps/reports/controllers/workflows.py b/lib/galaxy/webapps/reports/controllers/workflows.py
new file mode 100644
index 0000000..314f3c6
--- /dev/null
+++ b/lib/galaxy/webapps/reports/controllers/workflows.py
@@ -0,0 +1,464 @@
+import calendar
+from datetime import datetime, date, timedelta
+import logging
+from collections import namedtuple
+import sqlalchemy as sa
+from sqlalchemy import and_
+from markupsafe import escape
+
+from galaxy import model, util
+from math import floor
+from galaxy.web.base.controller import BaseUIController, web
+from galaxy.web.framework.helpers import grids
+import re
+from math import ceil
+from galaxy.webapps.reports.controllers.query import ReportQueryBuilder
+from galaxy.webapps.reports.controllers.jobs import sorter, get_spark_time
+
+log = logging.getLogger( __name__ )
+
+
+class SpecifiedDateListGrid( grids.Grid ):
+
+    class WorkflowNameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, stored_workflow ):
+            return escape(stored_workflow.name)
+
+    class CreateTimeColumn( grids.DateTimeColumn ):
+
+        def get_value( self, trans, grid, stored_workflow ):
+            return stored_workflow.create_time
+
+    class UserColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, stored_workflow ):
+            if stored_workflow.user:
+                return escape(stored_workflow.user.email)
+            return 'unknown'
+
+    class EmailColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            return query.filter( and_( model.StoredWorkflow.table.c.user_id == model.User.table.c.id,
+                                       model.User.table.c.email == column_filter ) )
+
+    class SpecifiedDateColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            # We are either filtering on a date like YYYY-MM-DD or on a month like YYYY-MM,
+            # so we need to figure out which type of date we have
+            if column_filter.count( '-' ) == 2:
+                # We are filtering on a date like YYYY-MM-DD
+                year, month, day = map( int, column_filter.split( "-" ) )
+                start_date = date( year, month, day )
+                end_date = start_date + timedelta( days=1 )
+                return query.filter( and_( self.model_class.table.c.create_time >= start_date,
+                                           self.model_class.table.c.create_time < end_date ) )
+            if column_filter.count( '-' ) == 1:
+                # We are filtering on a month like YYYY-MM
+                year, month = map( int, column_filter.split( "-" ) )
+                start_date = date( year, month, 1 )
+                end_date = start_date + timedelta( days=calendar.monthrange( year, month )[1] )
+                return query.filter( and_( self.model_class.table.c.create_time >= start_date,
+                                           self.model_class.table.c.create_time < end_date ) )
+
+    # Grid definition
+    use_async = False
+    model_class = model.StoredWorkflow
+    title = "Workflows"
+    template = '/webapps/reports/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        WorkflowNameColumn( "Name",
+                            key="name",
+                            attach_popup=False,
+                            filterable="advanced" ),
+        CreateTimeColumn( "Creation Time",
+                          key="create_time",
+                          attach_popup=False ),
+        UserColumn( "User",
+                    key="email",
+                    model_class=model.User,
+                    link=( lambda item: dict( operation="user_per_month", id=item.id, webapp="reports" ) ),
+                    attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        SpecifiedDateColumn( "Specified Date",
+                             key="specified_date",
+                             visible=False ),
+        EmailColumn( "Email",
+                     key="email",
+                     model_class=model.User,
+                     visible=False ),
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    standard_filters = []
+    default_filter = { 'specified_date': 'All' }
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = True
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( self.model_class ) \
+                               .join( model.User ) \
+                               .enable_eagerloads( False )
+
+
+class Workflows( BaseUIController, ReportQueryBuilder ):
+
+    specified_date_list_grid = SpecifiedDateListGrid()
+
+    @web.expose
+    def specified_date_handler( self, trans, **kwd ):
+        # We add params to the keyword dict in this method in order to rename the param
+        # with an "f-" prefix, simulating filtering by clicking a search link.  We have
+        # to take this approach because the "-" character is illegal in HTTP requests.
+        if 'f-specified_date' in kwd and 'specified_date' not in kwd:
+            # The user clicked a State link in the Advanced Search box, so 'specified_date'
+            # will have been eliminated.
+            pass
+        elif 'specified_date' not in kwd:
+            kwd[ 'f-specified_date' ] = 'All'
+        else:
+            kwd[ 'f-specified_date' ] = kwd[ 'specified_date' ]
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "workflow_per_month":
+                # The received id is the stored_workflow id.
+                return trans.response.send_redirect( web.url_for( controller='workflows',
+                                                                  action='workflow_per_month',
+                                                                  **kwd ) )
+            elif operation == "user_per_month":
+                stored_workflow_id = kwd.get( 'id', None )
+                workflow = get_workflow( trans, stored_workflow_id )
+                if workflow.user:
+                    kwd[ 'email' ] = workflow.user.email
+                else:
+                    kwd[ 'email' ] = None  # For anonymous users ( shouldn't happen with workflows )
+                return trans.response.send_redirect( web.url_for( controller='workflows',
+                                                                  action='user_per_month',
+                                                                  **kwd ) )
+        return self.specified_date_list_grid( trans, **kwd )
+
+    @web.expose
+    def per_month_all( self, trans, **kwd ):
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        q = sa.select( ( self.select_month( model.StoredWorkflow.table.c.create_time ).label( 'date' ),
+                        sa.func.count( model.StoredWorkflow.table.c.id ).label( 'total_workflows' ) ),
+                       from_obj=[ sa.outerjoin( model.StoredWorkflow.table, model.User.table ) ],
+                       group_by=self.group_by_month( model.StoredWorkflow.table.c.create_time ),
+                       order_by=[ _order ],
+                       offset=offset,
+                       limit=limit )
+
+        all_workflows = sa.select( ( self.select_day( model.StoredWorkflow.table.c.create_time ).label( 'date' ),
+                     model.StoredWorkflow.table.c.id ) )
+
+        trends = dict()
+        for workflow in all_workflows.execute():
+            workflow_day = int(workflow.date.strftime("%-d")) - 1
+            workflow_month = int(workflow.date.strftime("%-m"))
+            workflow_month_name = workflow.date.strftime("%B")
+            workflow_year = workflow.date.strftime("%Y")
+            key = str( workflow_month_name + workflow_year)
+
+            try:
+                trends[key][workflow_day] += 1
+            except KeyError:
+                workflow_year = int(workflow_year)
+                wday, day_range = calendar.monthrange(workflow_year, workflow_month)
+                trends[key] = [0] * day_range
+                trends[key][workflow_day] += 1
+
+        workflows = []
+        for row in q.execute():
+            month_name = row.date.strftime("%B")
+            year = int(row.date.strftime("%Y"))
+
+            workflows.append( ( row.date.strftime( "%Y-%m" ),
+                                row.total_workflows,
+                                month_name,
+                                year ) )
+
+        pages_found = ceil(len(workflows) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/workflows_per_month_all.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    trends=trends,
+                                    workflows=workflows,
+                                    message=message,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def per_user( self, trans, **kwd ):
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        specs = sorter( 'user_email', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        time_period = kwd.get('spark_time')
+        time_period, _time_period = get_spark_time( time_period )
+        spark_limit = 30
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        workflows = []
+        q = sa.select( ( model.User.table.c.email.label( 'user_email' ),
+                         sa.func.count( model.StoredWorkflow.table.c.id ).label( 'total_workflows' ) ),
+                       from_obj=[ sa.outerjoin( model.StoredWorkflow.table, model.User.table ) ],
+                       group_by=[ 'user_email' ],
+                       order_by=[ _order ],
+                       offset=offset,
+                       limit=limit )
+
+        all_workflows_per_user = sa.select( ( model.User.table.c.email.label( 'user_email' ),
+                                             self.select_day( model.StoredWorkflow.table.c.create_time ).label('date'),
+                                             model.StoredWorkflow.table.c.id ),
+                                           from_obj=[ sa.outerjoin( model.StoredWorkflow.table,
+                                                                   model.User.table ) ] )
+        currday = datetime.today()
+        trends = dict()
+        for workflow in all_workflows_per_user.execute():
+            curr_user = re.sub(r'\W+', '', workflow.user_email)
+            try:
+                day = currday - workflow.date
+            except TypeError:
+                day = datetime.date(currday) - datetime.date(workflow.date)
+
+            day = day.days
+            container = floor(day / _time_period)
+            container = int(container)
+            try:
+                if container < spark_limit:
+                    trends[curr_user][container] += 1
+            except KeyError:
+                trends[curr_user] = [0] * spark_limit
+                if container < spark_limit:
+                    trends[curr_user][container] += 1
+
+        for row in q.execute():
+            workflows.append( ( row.user_email,
+                                row.total_workflows ) )
+
+        pages_found = ceil(len(workflows) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/workflows_per_user.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    spark_limit=spark_limit,
+                                    trends=trends,
+                                    time_period=time_period,
+                                    workflows=workflows,
+                                    message=message,
+                                    page_specs=page_specs )
+
+    @web.expose
+    def user_per_month( self, trans, **kwd ):
+        params = util.Params( kwd )
+        message = ''
+        specs = sorter( 'date', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        email = util.restore_text( params.get( 'email', '' ) )
+        user_id = trans.security.decode_id( params.get( 'id', '' ) )
+
+        q = sa.select( ( self.select_month( model.StoredWorkflow.table.c.create_time ).label( 'date' ),
+                         sa.func.count( model.StoredWorkflow.table.c.id ).label( 'total_workflows' ) ),
+                       whereclause=model.StoredWorkflow.table.c.user_id == user_id,
+                       from_obj=[ model.StoredWorkflow.table ],
+                       group_by=self.group_by_month( model.StoredWorkflow.table.c.create_time ),
+                       order_by=[ _order ] )
+
+        all_workflows_user_month = sa.select( ( self.select_day( model.StoredWorkflow.table.c.create_time ).label( 'date' ),
+                                               model.StoredWorkflow.table.c.id ),
+                                             whereclause=model.StoredWorkflow.table.c.user_id == user_id,
+                                             from_obj=[ model.StoredWorkflow.table ] )
+
+        trends = dict()
+        for workflow in all_workflows_user_month.execute():
+            workflow_day = int(workflow.date.strftime("%-d")) - 1
+            workflow_month = int(workflow.date.strftime("%-m"))
+            workflow_month_name = workflow.date.strftime("%B")
+            workflow_year = workflow.date.strftime("%Y")
+            key = str( workflow_month_name + workflow_year)
+
+            try:
+                trends[key][workflow_day] += 1
+            except KeyError:
+                workflow_year = int(workflow_year)
+                wday, day_range = calendar.monthrange(workflow_year, workflow_month)
+                trends[key] = [0] * day_range
+                trends[key][workflow_day] += 1
+
+        workflows = []
+        for row in q.execute():
+            workflows.append( ( row.date.strftime( "%Y-%m" ),
+                                row.total_workflows,
+                                row.date.strftime( "%B" ),
+                                row.date.strftime( "%Y" ) ) )
+        return trans.fill_template( '/webapps/reports/workflows_user_per_month.mako',
+                                    email=util.sanitize_text( email ),
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    trends=trends,
+                                    workflows=workflows,
+                                    message=message )
+
+    @web.expose
+    def per_workflow( self, trans, **kwd ):
+        message = ''
+        PageSpec = namedtuple('PageSpec', ['entries', 'offset', 'page', 'pages_found'])
+
+        specs = sorter( 'workflow_name', kwd )
+        sort_id = specs.sort_id
+        order = specs.order
+        arrow = specs.arrow
+        _order = specs.exc_order
+        time_period = kwd.get('spark_time')
+        time_period, _time_period = get_spark_time( time_period )
+        spark_limit = 30
+        offset = 0
+        limit = 10
+
+        if "entries" in kwd:
+            entries = int(kwd.get( 'entries' ))
+        else:
+            entries = 10
+        limit = entries * 4
+
+        if "offset" in kwd:
+            offset = int(kwd.get( 'offset' ))
+        else:
+            offset = 0
+
+        if "page" in kwd:
+            page = int(kwd.get( 'page' ))
+        else:
+            page = 1
+
+        # In case we don't know which is the monitor user we will query for all jobs
+
+        q = sa.select( ( model.Workflow.table.c.id.label( 'workflow_id' ),
+                        sa.func.min(model.Workflow.table.c.name).label( 'workflow_name' ),
+                       sa.func.count( model.WorkflowInvocation.table.c.id ).label( 'total_runs' ) ),
+                      from_obj=[ model.Workflow.table,
+                                model.WorkflowInvocation.table ],
+                      whereclause=sa.and_( model.WorkflowInvocation.table.c.workflow_id == model.Workflow.table.c.id ),
+                      group_by=[  model.Workflow.table.c.id ],
+                      order_by=[ _order ],
+                       offset=offset,
+                       limit=limit )
+
+        all_runs_per_workflow = sa.select( ( model.Workflow.table.c.id.label( 'workflow_id' ),
+                                            model.Workflow.table.c.name.label( 'workflow_name' ),
+                                            self.select_day( model.WorkflowInvocation.table.c.create_time ).label( 'date' ) ),
+                                          from_obj=[ model.Workflow.table,
+                                                    model.WorkflowInvocation.table ],
+                                          whereclause=sa.and_( model.WorkflowInvocation.table.c.workflow_id == model.Workflow.table.c.id ) )
+
+        currday = date.today()
+        trends = dict()
+        for run in all_runs_per_workflow.execute():
+            curr_tool = re.sub(r'\W+', '', str(run.workflow_id))
+            try:
+                day = currday - run.date
+            except TypeError:
+                day = currday - datetime.date(run.date)
+
+            day = day.days
+            container = floor(day / _time_period)
+            container = int(container)
+            try:
+                if container < spark_limit:
+                    trends[curr_tool][container] += 1
+            except KeyError:
+                trends[curr_tool] = [0] * spark_limit
+                if container < spark_limit:
+                    trends[curr_tool][container] += 1
+
+        runs = []
+        for row in q.execute():
+            runs.append( ( row.workflow_name,
+                           row.total_runs,
+                           row.workflow_id) )
+
+        pages_found = ceil(len(runs) / float(entries))
+        page_specs = PageSpec(entries, offset, page, pages_found)
+
+        return trans.fill_template( '/webapps/reports/workflows_per_workflow.mako',
+                                    order=order,
+                                    arrow=arrow,
+                                    sort_id=sort_id,
+                                    spark_limit=spark_limit,
+                                    time_period=time_period,
+                                    trends=trends,
+                                    runs=runs,
+                                    message=message,
+                                    page_specs=page_specs)
+
+# ---- Utility methods -------------------------------------------------------
+
+
+def get_workflow( trans, id ):
+    return trans.sa_session.query( trans.model.Workflow ).get( trans.security.decode_id( id ) )
diff --git a/lib/galaxy/webapps/tool_shed/__init__.py b/lib/galaxy/webapps/tool_shed/__init__.py
new file mode 100644
index 0000000..1e1338b
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/__init__.py
@@ -0,0 +1,6 @@
+"""The Galaxy Tool Shed application."""
+
+from galaxy.web.framework import url_for
+from galaxy.web.framework.decorators import expose
+
+__all__ = ('url_for', 'expose')
diff --git a/lib/galaxy/webapps/tool_shed/api/__init__.py b/lib/galaxy/webapps/tool_shed/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/webapps/tool_shed/api/authenticate.py b/lib/galaxy/webapps/tool_shed/api/authenticate.py
new file mode 100644
index 0000000..38bfdba
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/authenticate.py
@@ -0,0 +1,34 @@
+"""
+API key retrieval through BaseAuth
+Sample usage:
+
+curl --user zipzap at foo.com:password http://localhost:9009/api/authenticate/baseauth
+
+Returns:
+{
+    "api_key": <some api key>
+}
+"""
+import logging
+
+from galaxy.webapps.galaxy.api.authenticate import AuthenticationController
+from galaxy.web import _future_expose_api_raw_anonymous_and_sessionless as expose_api_raw_anonymous_and_sessionless
+
+log = logging.getLogger( __name__ )
+
+
+class ToolShedAuthenticationController( AuthenticationController ):
+
+    @expose_api_raw_anonymous_and_sessionless
+    def get_tool_shed_api_key( self, trans, **kwd ):
+        """
+        def get_api_key( self, trans, **kwd )
+        * GET /api/authenticate/baseauth
+        returns an API key for authenticated user based on BaseAuth headers
+
+        :returns: api_key in json format
+        :rtype:   dict
+
+        :raises: ObjectNotFound, HTTPBadRequest
+        """
+        return self.get_api_key( trans, **kwd )
diff --git a/lib/galaxy/webapps/tool_shed/api/categories.py b/lib/galaxy/webapps/tool_shed/api/categories.py
new file mode 100644
index 0000000..4b5ab86
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/categories.py
@@ -0,0 +1,135 @@
+import logging
+
+from galaxy import util
+from galaxy import web
+from galaxy import exceptions
+from galaxy.web import require_admin as require_admin
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+from tool_shed.util import repository_util
+import tool_shed.util.shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class CategoriesController( BaseAPIController ):
+    """RESTful controller for interactions with categories in the Tool Shed."""
+
+    def __get_repository_count( self, trans, category_name ):
+        return self.app.repository_registry.viewable_repositories_and_suites_by_category.get( category_name, 0 )
+
+    def __get_value_mapper( self, trans ):
+        value_mapper = { 'id': trans.security.encode_id }
+        return value_mapper
+
+    @expose_api
+    @require_admin
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/categories
+        Return a dictionary of information about the created category.
+        The following parameters are included in the payload:
+
+        :param name (required): the name of the category
+        :param description (optional): the description of the category (if not provided, the name will be used)
+
+        Example: POST /api/categories/?key=XXXYYYXXXYYY
+        Content-Disposition: form-data; name="name" Category_Name
+        Content-Disposition: form-data; name="description" Category_Description
+        """
+        category_dict = dict( message='', status='ok' )
+        name = payload.get( 'name', '' )
+        if name:
+            description = payload.get( 'description', '' )
+            if not description:
+                # Default the description to the name.
+                description = name
+            if suc.get_category_by_name( self.app, name ):
+                raise exceptions.Conflict( 'A category with that name already exists.' )
+            else:
+                # Create the category
+                category = self.app.model.Category( name=name, description=description )
+                trans.sa_session.add( category )
+                trans.sa_session.flush()
+                category_dict = category.to_dict( view='element',
+                                                  value_mapper=self.__get_value_mapper( trans ) )
+                category_dict[ 'message' ] = "Category '%s' has been created" % str( category.name )
+                category_dict[ 'url' ] = web.url_for( controller='categories',
+                                                      action='show',
+                                                      id=trans.security.encode_id( category.id ) )
+        else:
+            raise exceptions.RequestParameterMissingException( 'Missing required parameter "name".' )
+        return category_dict
+
+    @expose_api_anonymous_and_sessionless
+    def get_repositories( self, trans, category_id, **kwd ):
+        """
+        GET /api/categories/{encoded_category_id}/repositories
+        Return information about the provided category and the repositories in that category.
+
+        :param id: the encoded id of the Category object
+
+        Example: GET localhost:9009/api/categories/f9cad7b01a472135/repositories
+        """
+        category = suc.get_category( self.app, category_id )
+        if category is None:
+            category_dict = dict( message='Unable to locate category record for id %s.' % ( str( id ) ),
+                                  status='error' )
+            return category_dict
+        category_dict = category.to_dict( view='element',
+                                          value_mapper=self.__get_value_mapper( trans ) )
+        category_dict[ 'url' ] = web.url_for( controller='categories',
+                                              action='show',
+                                              id=trans.security.encode_id( category.id ) )
+        repositories = repository_util.get_repositories_by_category( self.app, category.id )
+        category_dict[ 'repositories' ] = repositories
+        return category_dict
+
+    @expose_api_anonymous_and_sessionless
+    def index( self, trans, deleted=False, **kwd ):
+        """
+        GET /api/categories
+        Return a list of dictionaries that contain information about each Category.
+
+        :param deleted: flag used to include deleted categories
+
+        Example: GET localhost:9009/api/categories
+        """
+        category_dicts = []
+        deleted = util.asbool( deleted )
+        if deleted and not trans.user_is_admin():
+            raise exceptions.AdminRequiredException( 'Only administrators can query deleted categories.' )
+        for category in trans.sa_session.query( self.app.model.Category ) \
+                                        .filter( self.app.model.Category.table.c.deleted == deleted ) \
+                                        .order_by( self.app.model.Category.table.c.name ):
+            category_dict = category.to_dict( view='collection',
+                                              value_mapper=self.__get_value_mapper( trans ) )
+            category_dict[ 'url' ] = web.url_for( controller='categories',
+                                                  action='show',
+                                                  id=trans.security.encode_id( category.id ) )
+            category_dict[ 'repositories' ] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get( category.name, 0 )
+            category_dicts.append( category_dict )
+        return category_dicts
+
+    @expose_api_anonymous_and_sessionless
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/categories/{encoded_category_id}
+        Return a dictionary of information about a category.
+
+        :param id: the encoded id of the Category object
+
+        Example: GET localhost:9009/api/categories/f9cad7b01a472135
+        """
+        category = suc.get_category( self.app, id )
+        if category is None:
+            category_dict = dict( message='Unable to locate category record for id %s.' % ( str( id ) ),
+                                  status='error' )
+            return category_dict
+        category_dict = category.to_dict( view='element',
+                                          value_mapper=self.__get_value_mapper( trans ) )
+        category_dict[ 'url' ] = web.url_for( controller='categories',
+                                              action='show',
+                                              id=trans.security.encode_id( category.id ) )
+        return category_dict
diff --git a/lib/galaxy/webapps/tool_shed/api/configuration.py b/lib/galaxy/webapps/tool_shed/api/configuration.py
new file mode 100644
index 0000000..a133436
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/configuration.py
@@ -0,0 +1,28 @@
+"""
+API operations allowing clients to determine Tool Shed instance's
+capabilities and configuration settings.
+"""
+
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class ConfigurationController( BaseAPIController ):
+
+    def __init__( self, app ):
+        super( ConfigurationController, self ).__init__( app )
+
+    @expose_api_anonymous_and_sessionless
+    def version( self, trans, **kwds ):
+        """
+        GET /api/version
+        Return a description of the version_major and version of Galaxy Tool Shed
+        (e.g. 15.07 and 15.07.dev).
+
+        :rtype:     dict
+        :returns:   dictionary with versions keyed as 'version_major' and 'version'
+        """
+        return {"version_major": self.app.config.version_major, "version": self.app.config.version }
diff --git a/lib/galaxy/webapps/tool_shed/api/groups.py b/lib/galaxy/webapps/tool_shed/api/groups.py
new file mode 100644
index 0000000..6b0d5e1
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/groups.py
@@ -0,0 +1,149 @@
+import logging
+from galaxy import util
+from galaxy import web
+from galaxy.util import pretty_print_time_interval
+from galaxy.exceptions import RequestParameterMissingException
+from galaxy.exceptions import AdminRequiredException
+from galaxy.exceptions import ObjectNotFound
+from galaxy.web import require_admin as require_admin
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+from tool_shed.managers import groups
+
+log = logging.getLogger( __name__ )
+
+
+class GroupsController( BaseAPIController ):
+    """RESTful controller for interactions with groups in the Tool Shed."""
+
+    def __init__( self, app ):
+        super( GroupsController, self ).__init__( app )
+        self.group_manager = groups.GroupManager()
+
+    def __get_value_mapper( self, trans ):
+        value_mapper = { 'id' : trans.security.encode_id }
+        return value_mapper
+
+    @expose_api_anonymous_and_sessionless
+    def index( self, trans, deleted=False, **kwd ):
+        """
+        GET /api/groups
+        Return a list of dictionaries that contain information about each Group.
+
+        :param deleted: flag used to include deleted groups
+
+        Example: GET localhost:9009/api/groups
+        """
+        group_dicts = []
+        deleted = util.asbool( deleted )
+        if deleted and not trans.user_is_admin():
+            raise AdminRequiredException( 'Only administrators can query deleted groups.' )
+        for group in self.group_manager.list( trans, deleted ):
+            group_dicts.append( self._populate( trans, group ) )
+        return group_dicts
+
+    @expose_api
+    @require_admin
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/groups
+        Return a dictionary of information about the created group.
+        The following parameters are included in the payload:
+
+        :param name (required): the name of the group
+        :param description (optional): the description of the group
+
+        Example: POST /api/groups/?key=XXXYYYXXXYYY
+        Content-Disposition: form-data; name="name" Group_Name
+        Content-Disposition: form-data; name="description" Group_Description
+        """
+        group_dict = dict( message='', status='ok' )
+        name = payload.get( 'name', '' )
+        if name:
+            description = payload.get( 'description', '' )
+            if not description:
+                description = ''
+            else:
+                # TODO add description field to the model
+                group_dict = self.group_manager.create( trans, name=name ).to_dict( view='element', value_mapper=self.__get_value_mapper( trans ) )
+        else:
+            raise RequestParameterMissingException( 'Missing required parameter "name".' )
+        return group_dict
+
+    @expose_api_anonymous_and_sessionless
+    def show( self, trans, encoded_id, **kwd ):
+        """
+        GET /api/groups/{encoded_group_id}
+        Return a dictionary of information about a group.
+
+        :param id: the encoded id of the Group object
+
+        Example: GET localhost:9009/api/groups/f9cad7b01a472135
+        """
+        decoded_id = trans.security.decode_id( encoded_id )
+        group = self.group_manager.get( trans, decoded_id )
+        if group is None:
+            raise ObjectNotFound( 'Unable to locate group record for id %s.' % ( str( encoded_id ) ) )
+        return self._populate( trans, group )
+
+    def _populate( self, trans, group ):
+        """
+        Turn the given group information from DB into a dict
+        and add other characteristics like members and repositories.
+        """
+        model = trans.app.model
+        group_dict = group.to_dict( view='collection', value_mapper=self.__get_value_mapper( trans ) )
+        group_members = []
+        group_repos = []
+        total_downloads = 0
+        for uga in group.users:
+            user = trans.sa_session.query( model.User ).filter( model.User.table.c.id == uga.user_id ).one()
+            user_repos_count = 0
+            for repo in trans.sa_session.query( model.Repository ) \
+                    .filter( model.Repository.table.c.user_id == uga.user_id ) \
+                    .join( model.RepositoryMetadata.table ) \
+                    .join( model.User.table ) \
+                    .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                    .outerjoin( model.Category.table ):
+                categories = []
+                for rca in repo.categories:
+                    cat_dict = dict( name=rca.category.name, id=trans.app.security.encode_id( rca.category.id ) )
+                    categories.append( cat_dict )
+                time_repo_created_full = repo.create_time.strftime( "%Y-%m-%d %I:%M %p" )
+                time_repo_updated_full = repo.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+                time_repo_created = pretty_print_time_interval( repo.create_time, True )
+                time_repo_updated = pretty_print_time_interval( repo.update_time, True )
+                approved = ''
+                ratings = []
+                for review in repo.reviews:
+                    if review.rating:
+                        ratings.append( review.rating )
+                    if review.approved == 'yes':
+                        approved = 'yes'
+                # TODO add user ratings
+                ratings_mean = str( float( sum( ratings ) ) / len( ratings ) ) if len( ratings ) > 0 else ''
+                total_downloads += repo.times_downloaded
+                group_repos.append( {   'name': repo.name,
+                                        'times_downloaded': repo.times_downloaded,
+                                        'owner': repo.user.username,
+                                        'time_created_full': time_repo_created_full,
+                                        'time_created': time_repo_created,
+                                        'time_updated_full': time_repo_updated_full,
+                                        'time_updated': time_repo_updated,
+                                        'description': repo.description,
+                                        'approved': approved,
+                                        'ratings_mean': ratings_mean,
+                                        'categories' : categories } )
+                user_repos_count += 1
+            encoded_user_id = trans.app.security.encode_id( repo.user.id )
+            user_repos_url = web.url_for( controller='repository', action='browse_repositories_by_user', user_id=encoded_user_id )
+            time_created = pretty_print_time_interval( user.create_time, True )
+            member_dict = { 'id': encoded_user_id, 'username': user.username, 'user_repos_url': user_repos_url, 'user_repos_count': user_repos_count, 'user_tools_count': 'unknown', 'time_created': time_created }
+            group_members.append( member_dict )
+        group_dict[ 'members' ] = group_members
+        group_dict[ 'total_members' ] = len( group_members )
+        group_dict[ 'repositories' ] = group_repos
+        group_dict[ 'total_repos' ] = len( group_repos )
+        group_dict[ 'total_downloads' ] = total_downloads
+        return group_dict
diff --git a/lib/galaxy/webapps/tool_shed/api/repositories.py b/lib/galaxy/webapps/tool_shed/api/repositories.py
new file mode 100644
index 0000000..2f3dbc2
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -0,0 +1,1098 @@
+import json
+import logging
+import os
+import tarfile
+import StringIO
+from cgi import FieldStorage
+from collections import namedtuple
+from time import strftime
+
+from sqlalchemy import and_, false
+
+from galaxy import util
+from galaxy import web
+from galaxy.util import checkers
+from galaxy.exceptions import ActionInputError
+from galaxy.exceptions import ConfigDoesNotAllowException
+from galaxy.exceptions import InsufficientPermissionsException
+from galaxy.exceptions import MalformedId
+from galaxy.exceptions import ObjectNotFound
+from galaxy.exceptions import RequestParameterInvalidException
+from galaxy.exceptions import RequestParameterMissingException
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web import _future_expose_api_raw_anonymous_and_sessionless as expose_api_raw_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import HTTPBadRequest
+from galaxy.webapps.tool_shed.search.repo_search import RepoSearch
+from tool_shed.capsule import capsule_manager
+from tool_shed.dependencies import attribute_handlers
+from tool_shed.metadata import repository_metadata_manager
+from tool_shed.repository_types import util as rt_util
+from tool_shed.util import basic_util
+from tool_shed.util import commit_util
+from tool_shed.util import encoding_util
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_content_util
+from tool_shed.util import repository_util
+from tool_shed.util import tool_util
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoriesController( BaseAPIController ):
+    """RESTful controller for interactions with repositories in the Tool Shed."""
+
+    @web.expose_api
+    def add_repository_registry_entry( self, trans, payload, **kwd ):
+        """
+        POST /api/repositories/add_repository_registry_entry
+        Adds appropriate entries to the repository registry for the repository defined by the received name and owner.
+
+        :param key: the user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed containing the Repository
+        :param name (required): the name of the Repository
+        :param owner (required): the owner of the Repository
+        """
+        response_dict = {}
+        if not trans.user_is_admin():
+            response_dict[ 'status' ] = 'error'
+            response_dict[ 'message' ] = "You are not authorized to add entries to this Tool Shed's repository registry."
+            return response_dict
+        tool_shed_url = payload.get( 'tool_shed_url', '' )
+        if not tool_shed_url:
+            raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+        tool_shed_url = tool_shed_url.rstrip( '/' )
+        name = payload.get( 'name', '' )
+        if not name:
+            raise HTTPBadRequest( detail="Missing required parameter 'name'." )
+        owner = payload.get( 'owner', '' )
+        if not owner:
+            raise HTTPBadRequest( detail="Missing required parameter 'owner'." )
+        repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+        if repository is None:
+            error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) )
+            log.debug( error_message )
+            response_dict[ 'status' ] = 'error'
+            response_dict[ 'message' ] = error_message
+            return response_dict
+        # Update the repository registry.
+        self.app.repository_registry.add_entry( repository )
+        response_dict[ 'status' ] = 'ok'
+        response_dict[ 'message' ] = 'Entries for repository %s owned by %s have been added to the Tool Shed repository registry.' \
+            % ( name, owner )
+        return response_dict
+
+    @web.expose_api_anonymous
+    def get_ordered_installable_revisions( self, trans, name=None, owner=None, **kwd ):
+        """
+        GET /api/repositories/get_ordered_installable_revisions
+
+        :param name: the name of the Repository
+        :param owner: the owner of the Repository
+
+        Returns the ordered list of changeset revision hash strings that are associated with installable revisions.
+        As in the changelog, the list is ordered oldest to newest.
+        """
+        # Example URL: http://localhost:9009/api/repositories/get_ordered_installable_revisions?name=add_column&owner=test
+        if name is None:
+            name = kwd.get( 'name', None )
+        if owner is None:
+            owner = kwd.get( 'owner', None )
+        tsr_id = kwd.get( 'tsr_id', None )
+        if None not in [ name, owner ]:
+            # Get the repository information.
+            repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+            if repository is None:
+                trans.response.status = 404
+                return { 'status': 'error', 'message': 'No repository named %s found with owner %s' % ( name, owner ) }
+        elif tsr_id is not None:
+            repository = repository_util.get_repository_in_tool_shed( self.app, tsr_id )
+        else:
+            error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+            error_message += "invalid parameters received."
+            log.debug( error_message )
+            return []
+        return [ revision[ 1 ] for revision in repository.installable_revisions( self.app, sort_revisions=True ) ]
+
+    @web.expose_api_anonymous
+    def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ):
+        """
+        GET /api/repositories/get_repository_revision_install_info
+
+        :param name: the name of the Repository
+        :param owner: the owner of the Repository
+        :param changeset_revision: the changeset_revision of the RepositoryMetadata object associated with the Repository
+
+        Returns a list of the following dictionaries::
+        - a dictionary defining the Repository.  For example:
+        {
+            "deleted": false,
+            "deprecated": false,
+            "description": "add_column hello",
+            "id": "f9cad7b01a472135",
+            "long_description": "add_column hello",
+            "name": "add_column",
+            "owner": "test",
+            "private": false,
+            "times_downloaded": 6,
+            "url": "/api/repositories/f9cad7b01a472135",
+            "user_id": "f9cad7b01a472135"
+        }
+        - a dictionary defining the Repository revision (RepositoryMetadata).  For example:
+        {
+            "changeset_revision": "3a08cc21466f",
+            "downloadable": true,
+            "has_repository_dependencies": false,
+            "has_repository_dependencies_only_if_compiling_contained_td": false,
+            "id": "f9cad7b01a472135",
+            "includes_datatypes": false,
+            "includes_tool_dependencies": false,
+            "includes_tools": true,
+            "includes_tools_for_display_in_tool_panel": true,
+            "includes_workflows": false,
+            "malicious": false,
+            "repository_id": "f9cad7b01a472135",
+            "url": "/api/repository_revisions/f9cad7b01a472135",
+            "valid_tools": [{u'add_to_tool_panel': True,
+                u'description': u'data on any column using simple expressions',
+                u'guid': u'localhost:9009/repos/enis/sample_repo_1/Filter1/2.2.0',
+                u'id': u'Filter1',
+                u'name': u'Filter',
+                u'requirements': [],
+                u'tests': [{u'inputs': [[u'input', u'1.bed'], [u'cond', u"c1=='chr22'"]],
+                  u'name': u'Test-1',
+                  u'outputs': [[u'out_file1', u'filter1_test1.bed']],
+                  u'required_files': [u'1.bed', u'filter1_test1.bed']}],
+                u'tool_config': u'database/community_files/000/repo_1/filtering.xml',
+                u'tool_type': u'default',
+                u'version': u'2.2.0',
+                u'version_string_cmd': None}]
+        }
+        - a dictionary including the additional information required to install the repository.  For example:
+        {
+            "add_column": [
+                "add_column hello",
+                "http://test@localhost:9009/repos/test/add_column",
+                "3a08cc21466f",
+                "1",
+                "test",
+                {},
+                {}
+            ]
+        }
+        """
+        # Example URL:
+        # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr>
+        if name and owner and changeset_revision:
+            # Get the repository information.
+            repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+            if repository is None:
+                log.debug( 'Cannot locate repository %s owned by %s' % ( str( name ), str( owner ) ) )
+                return {}, {}, {}
+            encoded_repository_id = trans.security.encode_id( repository.id )
+            repository_dict = repository.to_dict( view='element',
+                                                  value_mapper=self.__get_value_mapper( trans ) )
+            repository_dict[ 'url' ] = web.url_for( controller='repositories',
+                                                    action='show',
+                                                    id=encoded_repository_id )
+            # Get the repository_metadata information.
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                               encoded_repository_id,
+                                                                                               changeset_revision )
+            if repository_metadata is None:
+                # The changeset_revision column in the repository_metadata table has been updated with a new
+                # value value, so find the changeset_revision to which we need to update.
+                repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+                new_changeset_revision = metadata_util.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+                repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                                   encoded_repository_id,
+                                                                                                   new_changeset_revision )
+                changeset_revision = new_changeset_revision
+            if repository_metadata is not None:
+                encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
+                repository_metadata_dict = repository_metadata.to_dict( view='collection',
+                                                                        value_mapper=self.__get_value_mapper( trans ) )
+                repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+                                                                 action='show',
+                                                                 id=encoded_repository_metadata_id )
+                if 'tools' in repository_metadata.metadata:
+                    repository_metadata_dict[ 'valid_tools' ] = repository_metadata.metadata[ 'tools' ]
+                # Get the repo_info_dict for installing the repository.
+                repo_info_dict, \
+                    includes_tools, \
+                    includes_tool_dependencies, \
+                    includes_tools_for_display_in_tool_panel, \
+                    has_repository_dependencies, \
+                    has_repository_dependencies_only_if_compiling_contained_td = \
+                    repository_util.get_repo_info_dict( self.app,
+                                                        trans.user,
+                                                        encoded_repository_id,
+                                                        changeset_revision )
+                return repository_dict, repository_metadata_dict, repo_info_dict
+            else:
+                log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" %
+                           ( str( repository.id ), str( changeset_revision ) ) )
+                return repository_dict, {}, {}
+        else:
+            debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: "
+            debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \
+                ( str( name ), str( owner ), str( changeset_revision ) )
+            log.debug( debug_msg )
+            return {}, {}, {}
+
+    @web.expose_api_anonymous
+    def get_installable_revisions( self, trans, **kwd ):
+        """
+        GET /api/repositories/get_installable_revisions
+
+        :param tsr_id: the encoded toolshed ID of the repository
+
+        Returns a list of lists of changesets, in the format [ [ 0, fbb391dc803c ], [ 1, 9d9ec4d9c03e ], [ 2, 9b5b20673b89 ], [ 3, e8c99ce51292 ] ].
+        """
+        # Example URL: http://localhost:9009/api/repositories/get_installable_revisions?tsr_id=9d37e53072ff9fa4
+        tsr_id = kwd.get( 'tsr_id', None )
+        if tsr_id is not None:
+            repository = repository_util.get_repository_in_tool_shed( self.app, tsr_id )
+        else:
+            error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: "
+            error_message += "missing or invalid parameter received."
+            log.debug( error_message )
+            return []
+        return repository.installable_revisions( self.app )
+
+    def __get_value_mapper( self, trans ):
+        value_mapper = { 'id': trans.security.encode_id,
+                         'repository_id': trans.security.encode_id,
+                         'user_id': trans.security.encode_id }
+        return value_mapper
+
+    @web.expose_api
+    def import_capsule( self, trans, payload, **kwd ):
+        """
+        POST /api/repositories/new/import_capsule
+        Import a repository capsule into the Tool Shed.
+
+        :param key: the user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed into which the capsule should be imported.
+        :param capsule_file_name (required): the name of the capsule file.
+        """
+        # Get the information about the capsule to be imported from the payload.
+        tool_shed_url = payload.get( 'tool_shed_url', '' )
+        if not tool_shed_url:
+            raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+        capsule_file_name = payload.get( 'capsule_file_name', '' )
+        if not capsule_file_name:
+            raise HTTPBadRequest( detail="Missing required parameter 'capsule_file_name'." )
+        capsule_file_path = os.path.abspath( capsule_file_name )
+        capsule_dict = dict( error_message='',
+                             encoded_file_path=None,
+                             status='ok',
+                             tar_archive=None,
+                             uploaded_file=None,
+                             capsule_file_name=None )
+        if os.path.getsize( os.path.abspath( capsule_file_name ) ) == 0:
+            log.debug( 'Your capsule file %s is empty.' % str( capsule_file_name ) )
+            return {}
+        try:
+            # Open for reading with transparent compression.
+            tar_archive = tarfile.open( capsule_file_path, 'r:*' )
+        except tarfile.ReadError as e:
+            log.debug( 'Error opening capsule file %s: %s' % ( str( capsule_file_name ), str( e ) ) )
+            return {}
+        irm = capsule_manager.ImportRepositoryManager( self.app,
+                                                       trans.request.host,
+                                                       trans.user,
+                                                       trans.user_is_admin() )
+        capsule_dict[ 'tar_archive' ] = tar_archive
+        capsule_dict[ 'capsule_file_name' ] = capsule_file_name
+        capsule_dict = irm.extract_capsule_files( **capsule_dict )
+        capsule_dict = irm.validate_capsule( **capsule_dict )
+        status = capsule_dict.get( 'status', 'error' )
+        if status == 'error':
+            log.debug( 'The capsule contents are invalid and cannot be imported:<br/>%s' %
+                       str( capsule_dict.get( 'error_message', '' ) ) )
+            return {}
+        encoded_file_path = capsule_dict.get( 'encoded_file_path', None )
+        if encoded_file_path is None:
+            log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str( capsule_dict ) )
+            return {}
+        file_path = encoding_util.tool_shed_decode( encoded_file_path )
+        manifest_file_path = os.path.join( file_path, 'manifest.xml' )
+        # The manifest.xml file has already been validated, so no error_message should be returned here.
+        repository_info_dicts, error_message = irm.get_repository_info_from_manifest( manifest_file_path )
+        # Determine the status for each exported repository archive contained within the capsule.
+        repository_status_info_dicts = irm.get_repository_status_from_tool_shed( repository_info_dicts )
+        # Generate a list of repository name / import results message tuples for display after the capsule is imported.
+        import_results_tups = []
+        # Only create repositories that do not yet exist and that the current user is authorized to create.  The
+        # status will be None for repositories that fall into the intersection of these 2 categories.
+        for repository_status_info_dict in repository_status_info_dicts:
+            # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
+            repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
+            repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
+            import_results_tups = irm.create_repository_and_import_archive( repository_status_info_dict,
+                                                                            import_results_tups )
+        irm.check_status_and_reset_downloadable( import_results_tups )
+        basic_util.remove_dir( file_path )
+        # NOTE: the order of installation is defined in import_results_tups, but order will be lost
+        # when transferred to return_dict.
+        return_dict = {}
+        for import_results_tup in import_results_tups:
+            ok, name_owner, message = import_results_tup
+            name, owner = name_owner
+            key = 'Archive of repository "%s" owned by "%s"' % ( str( name ), str( owner ) )
+            val = message.replace( '<b>', '"' ).replace( '</b>', '"' )
+            return_dict[ key ] = val
+        return return_dict
+
+    @expose_api_raw_anonymous_and_sessionless
+    def index( self, trans, deleted=False, owner=None, name=None, **kwd ):
+        """
+        GET /api/repositories
+        Displays a collection of repositories with optional criteria.
+
+        :param q:        (optional)if present search on the given query will be performed
+        :type  q:        str
+
+        :param page:     (optional)requested page of the search
+        :type  page:     int
+
+        :param page_size:     (optional)requested page_size of the search
+        :type  page_size:     int
+
+        :param jsonp:    (optional)flag whether to use jsonp format response, defaults to False
+        :type  jsonp:    bool
+
+        :param callback: (optional)name of the function to wrap callback in
+                         used only when jsonp is true, defaults to 'callback'
+        :type  callback: str
+
+        :param deleted:  (optional)displays repositories that are or are not set to deleted.
+        :type  deleted:  bool
+
+        :param owner:    (optional)the owner's public username.
+        :type  owner:    str
+
+        :param name:     (optional)the repository name.
+        :type  name:     str
+
+        :returns dict:   object containing list of results
+
+        Examples:
+            GET http://localhost:9009/api/repositories
+            GET http://localhost:9009/api/repositories?q=fastq
+        """
+        repository_dicts = []
+        deleted = util.asbool( deleted )
+        q = kwd.get( 'q', '' )
+        if q:
+            page = kwd.get( 'page', 1 )
+            page_size = kwd.get( 'page_size', 10 )
+            try:
+                page = int( page )
+                page_size = int( page_size )
+            except ValueError:
+                raise RequestParameterInvalidException( 'The "page" and "page_size" parameters have to be integers.' )
+            return_jsonp = util.asbool( kwd.get( 'jsonp', False ) )
+            callback = kwd.get( 'callback', 'callback' )
+            search_results = self._search( trans, q, page, page_size )
+            if return_jsonp:
+                response = str( '%s(%s);' % ( callback, json.dumps( search_results ) ) )
+            else:
+                response = json.dumps( search_results )
+            return response
+
+        clause_list = [ and_( self.app.model.Repository.table.c.deprecated == false(),
+                              self.app.model.Repository.table.c.deleted == deleted ) ]
+        if owner is not None:
+            clause_list.append( and_( self.app.model.User.table.c.username == owner,
+                                      self.app.model.Repository.table.c.user_id == self.app.model.User.table.c.id ) )
+        if name is not None:
+            clause_list.append( self.app.model.Repository.table.c.name == name )
+        for repository in trans.sa_session.query( self.app.model.Repository ) \
+                                          .filter( *clause_list ) \
+                                          .order_by( self.app.model.Repository.table.c.name ):
+            repository_dict = repository.to_dict( view='collection',
+                                                  value_mapper=self.__get_value_mapper( trans ) )
+            repository_dict[ 'category_ids' ] = \
+                [ trans.security.encode_id( x.category.id ) for x in repository.categories ]
+            repository_dicts.append( repository_dict )
+        return json.dumps( repository_dicts )
+
+    def _search( self, trans, q, page=1, page_size=10 ):
+        """
+        Perform the search over TS repositories.
+        Note that search works over the Whoosh index which you have
+        to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually.
+        Also TS config option toolshed_search_on has to be True and
+        whoosh_index_dir has to be specified.
+        """
+        conf = self.app.config
+        if not conf.toolshed_search_on:
+            raise ConfigDoesNotAllowException( 'Searching the TS through the API is turned off for this instance.' )
+        if not conf.whoosh_index_dir:
+            raise ConfigDoesNotAllowException( 'There is no directory for the search index specified. Please contact the administrator.' )
+        search_term = q.strip()
+        if len( search_term ) < 3:
+            raise RequestParameterInvalidException( 'The search term has to be at least 3 characters long.' )
+
+        repo_search = RepoSearch()
+
+        Boosts = namedtuple( 'Boosts', [ 'repo_name_boost',
+                                         'repo_description_boost',
+                                         'repo_long_description_boost',
+                                         'repo_homepage_url_boost',
+                                         'repo_remote_repository_url_boost',
+                                         'repo_owner_username_boost' ] )
+        boosts = Boosts( float( conf.get( 'repo_name_boost', 0.9 ) ),
+                         float( conf.get( 'repo_description_boost', 0.6 ) ),
+                         float( conf.get( 'repo_long_description_boost', 0.5 ) ),
+                         float( conf.get( 'repo_homepage_url_boost', 0.3 ) ),
+                         float( conf.get( 'repo_remote_repository_url_boost', 0.2 ) ),
+                         float( conf.get( 'repo_owner_username_boost', 0.3 ) ) )
+
+        results = repo_search.search( trans,
+                                      search_term,
+                                      page,
+                                      page_size,
+                                      boosts )
+        results[ 'hostname' ] = web.url_for( '/', qualified=True )
+        return results
+
+    @web.expose_api
+    def remove_repository_registry_entry( self, trans, payload, **kwd ):
+        """
+        POST /api/repositories/remove_repository_registry_entry
+        Removes appropriate entries from the repository registry for the repository defined by the received name and owner.
+
+        :param key: the user's API key
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed containing the Repository
+        :param name (required): the name of the Repository
+        :param owner (required): the owner of the Repository
+        """
+        response_dict = {}
+        if not trans.user_is_admin():
+            response_dict[ 'status' ] = 'error'
+            response_dict[ 'message' ] = "You are not authorized to remove entries from this Tool Shed's repository registry."
+            return response_dict
+        tool_shed_url = payload.get( 'tool_shed_url', '' )
+        if not tool_shed_url:
+            raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+        tool_shed_url = tool_shed_url.rstrip( '/' )
+        name = payload.get( 'name', '' )
+        if not name:
+            raise HTTPBadRequest( detail="Missing required parameter 'name'." )
+        owner = payload.get( 'owner', '' )
+        if not owner:
+            raise HTTPBadRequest( detail="Missing required parameter 'owner'." )
+        repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+        if repository is None:
+            error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) )
+            log.debug( error_message )
+            response_dict[ 'status' ] = 'error'
+            response_dict[ 'message' ] = error_message
+            return response_dict
+        # Update the repository registry.
+        self.app.repository_registry.remove_entry( repository )
+        response_dict[ 'status' ] = 'ok'
+        response_dict[ 'message' ] = 'Entries for repository %s owned by %s have been removed from the Tool Shed repository registry.' \
+            % ( name, owner )
+        return response_dict
+
+    @web.expose_api
+    def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ):
+        """
+        GET /api/repository_ids_for_setting_metadata
+
+        Displays a collection (list) of repository ids ordered for setting metadata.
+
+        :param key: the API key of the Tool Shed user.
+        :param my_writable (optional): if the API key is associated with an admin user in the Tool Shed, setting this param value
+                                       to True will restrict resetting metadata to only repositories that are writable by the user
+                                       in addition to those repositories of type tool_dependency_definition.  This param is ignored
+                                       if the current user is not an admin user, in which case this same restriction is automatic.
+        """
+        if trans.user_is_admin():
+            my_writable = util.asbool( my_writable )
+        else:
+            my_writable = True
+        handled_repository_ids = []
+        repository_ids = []
+        rmm = repository_metadata_manager.RepositoryMetadataManager( self.app, trans.user )
+        query = rmm.get_query_for_setting_metadata_on_repositories( my_writable=my_writable, order=False )
+        # Make sure repositories of type tool_dependency_definition are first in the list.
+        for repository in query:
+            if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+                repository_ids.append( trans.security.encode_id( repository.id ) )
+        # Now add all remaining repositories to the list.
+        for repository in query:
+            if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+                repository_ids.append( trans.security.encode_id( repository.id ) )
+        return repository_ids
+
+    @web.expose_api
+    def reset_metadata_on_repositories( self, trans, payload, **kwd ):
+        """
+        PUT /api/repositories/reset_metadata_on_repositories
+
+        Resets all metadata on all repositories in the Tool Shed in an "orderly fashion".  Since there are currently only two
+        repository types (tool_dependecy_definition and unrestricted), the order in which metadata is reset is repositories of
+        type tool_dependecy_definition first followed by repositories of type unrestricted, and only one pass is necessary.  If
+        a new repository type is introduced, the process will undoubtedly need to be revisited.  To facilitate this order, an
+        in-memory list of repository ids that have been processed is maintained.
+
+        :param key: the API key of the Tool Shed user.
+
+        The following parameters can optionally be included in the payload.
+        :param my_writable (optional): if the API key is associated with an admin user in the Tool Shed, setting this param value
+                                       to True will restrict resetting metadata to only repositories that are writable by the user
+                                       in addition to those repositories of type tool_dependency_definition.  This param is ignored
+                                       if the current user is not an admin user, in which case this same restriction is automatic.
+        :param encoded_ids_to_skip (optional): a list of encoded repository ids for repositories that should not be processed.
+        :param skip_file (optional): A local file name that contains the encoded repository ids associated with repositories to skip.
+                                     This param can be used as an alternative to the above encoded_ids_to_skip.
+        """
+
+        def handle_repository( trans, rmm, repository, results ):
+            log.debug( "Resetting metadata on repository %s" % str( repository.name ) )
+            try:
+                rmm.set_repository( repository )
+                rmm.reset_all_metadata_on_repository_in_tool_shed()
+                rmm_invalid_file_tups = rmm.get_invalid_file_tups()
+                if rmm_invalid_file_tups:
+                    message = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                            rmm_invalid_file_tups,
+                                                                            repository,
+                                                                            None,
+                                                                            as_html=False )
+                    results[ 'unsuccessful_count' ] += 1
+                else:
+                    message = "Successfully reset metadata on repository %s owned by %s" % \
+                        ( str( repository.name ), str( repository.user.username ) )
+                    results[ 'successful_count' ] += 1
+            except Exception as e:
+                message = "Error resetting metadata on repository %s owned by %s: %s" % \
+                    ( str( repository.name ), str( repository.user.username ), str( e ) )
+                results[ 'unsuccessful_count' ] += 1
+            status = '%s : %s' % ( str( repository.name ), message )
+            results[ 'repository_status' ].append( status )
+            return results
+        rmm = repository_metadata_manager.RepositoryMetadataManager( app=self.app,
+                                                                     user=trans.user,
+                                                                     resetting_all_metadata_on_repository=True,
+                                                                     updating_installed_repository=False,
+                                                                     persist=False )
+        start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+        results = dict( start_time=start_time,
+                        repository_status=[],
+                        successful_count=0,
+                        unsuccessful_count=0 )
+        handled_repository_ids = []
+        encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] )
+        skip_file = payload.get( 'skip_file', None )
+        if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip:
+            # Load the list of encoded_ids_to_skip from the skip_file.
+            # Contents of file must be 1 encoded repository id per line.
+            lines = open( skip_file, 'rb' ).readlines()
+            for line in lines:
+                if line.startswith( '#' ):
+                    # Skip comments.
+                    continue
+                encoded_ids_to_skip.append( line.rstrip( '\n' ) )
+        if trans.user_is_admin():
+            my_writable = util.asbool( payload.get( 'my_writable', False ) )
+        else:
+            my_writable = True
+        query = rmm.get_query_for_setting_metadata_on_repositories( my_writable=my_writable, order=False )
+        # First reset metadata on all repositories of type repository_dependency_definition.
+        for repository in query:
+            encoded_id = trans.security.encode_id( repository.id )
+            if encoded_id in encoded_ids_to_skip:
+                log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" %
+                           ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+            elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+                results = handle_repository( trans, rmm, repository, results )
+        # Now reset metadata on all remaining repositories.
+        for repository in query:
+            encoded_id = trans.security.encode_id( repository.id )
+            if encoded_id in encoded_ids_to_skip:
+                log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" %
+                           ( str( repository.id ), str( encoded_ids_to_skip ) ) )
+            elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids:
+                results = handle_repository( trans, rmm, repository, results )
+        stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+        results[ 'stop_time' ] = stop_time
+        return json.dumps( results, sort_keys=True, indent=4 )
+
+    @web.expose_api
+    def reset_metadata_on_repository( self, trans, payload, **kwd ):
+        """
+        PUT /api/repositories/reset_metadata_on_repository
+
+        Resets all metadata on a specified repository in the Tool Shed.
+
+        :param key: the API key of the Tool Shed user.
+
+        The following parameters must be included in the payload.
+        :param repository_id: the encoded id of the repository on which metadata is to be reset.
+        """
+
+        def handle_repository( trans, start_time, repository ):
+            results = dict( start_time=start_time,
+                            repository_status=[] )
+            try:
+                rmm = repository_metadata_manager.RepositoryMetadataManager( app=self.app,
+                                                                             user=trans.user,
+                                                                             repository=repository,
+                                                                             resetting_all_metadata_on_repository=True,
+                                                                             updating_installed_repository=False,
+                                                                             persist=False )
+                rmm.reset_all_metadata_on_repository_in_tool_shed()
+                rmm_invalid_file_tups = rmm.get_invalid_file_tups()
+                if rmm_invalid_file_tups:
+                    message = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                            rmm_invalid_file_tups,
+                                                                            repository,
+                                                                            None,
+                                                                            as_html=False )
+                    results[ 'status' ] = 'warning'
+                else:
+                    message = "Successfully reset metadata on repository %s owned by %s" % \
+                        ( str( repository.name ), str( repository.user.username ) )
+                    results[ 'status' ] = 'ok'
+            except Exception as e:
+                message = "Error resetting metadata on repository %s owned by %s: %s" % \
+                    ( str( repository.name ), str( repository.user.username ), str( e ) )
+                results[ 'status' ] = 'error'
+            status = '%s : %s' % ( str( repository.name ), message )
+            results[ 'repository_status' ].append( status )
+            return results
+
+        repository_id = payload.get( 'repository_id', None )
+        if repository_id is not None:
+            repository = repository_util.get_repository_in_tool_shed( self.app, repository_id )
+            start_time = strftime( "%Y-%m-%d %H:%M:%S" )
+            log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) )
+            results = handle_repository( trans, start_time, repository )
+            stop_time = strftime( "%Y-%m-%d %H:%M:%S" )
+            results[ 'stop_time' ] = stop_time
+        return results
+
+    @expose_api_anonymous_and_sessionless
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/repositories/{encoded_repository_id}
+        Returns information about a repository in the Tool Shed.
+
+        Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135
+
+        :param id: the encoded id of the Repository object
+        :type  id: encoded str
+
+        :returns:   detailed repository information
+        :rtype:     dict
+
+        :raises:  ObjectNotFound, MalformedId
+        """
+        try:
+            trans.security.decode_id( id )
+        except Exception:
+            raise MalformedId( 'The given id is invalid.' )
+
+        repository = repository_util.get_repository_in_tool_shed( self.app, id )
+        if repository is None:
+            raise ObjectNotFound( 'Unable to locate repository for the given id.' )
+        repository_dict = repository.to_dict( view='element',
+                                              value_mapper=self.__get_value_mapper( trans ) )
+        # TODO the following property would be better suited in the to_dict method
+        repository_dict[ 'category_ids' ] = \
+            [ trans.security.encode_id( x.category.id ) for x in repository.categories ]
+        return repository_dict
+
+    @expose_api_raw_anonymous_and_sessionless
+    def updates( self, trans, **kwd ):
+        """
+        GET /api/repositories/updates
+        Return a dictionary with boolean values for whether there are updates available
+        for the repository revision, newer installable revisions available,
+        the revision is the latest installable revision, and if the repository is deprecated.
+
+        :param owner: owner of the repository
+        :type  owner: str
+        :param name: name of the repository
+        :type  name: str
+        :param changeset_revision: changeset of the repository
+        :type  changeset_revision: str
+        :param hexlify: flag whether to hexlify the response (for backward compatibility)
+        :type  changeset: boolean
+
+        :returns:   information about repository deprecations, updates, and upgrades
+        :rtype:     dict
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        hexlify_this = util.asbool( kwd.get( 'hexlify', True ) )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        if repository:
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                               trans.security.encode_id( repository.id ),
+                                                                                               changeset_revision )
+            repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+            tool_shed_status_dict = {}
+            # Handle repository deprecation.
+            tool_shed_status_dict[ 'repository_deprecated' ] = str( repository.deprecated )
+            # Handle latest installable revision.
+            if changeset_revision == repository.tip( trans.app ):
+                tool_shed_status_dict[ 'latest_installable_revision' ] = 'True'
+            else:
+                next_installable_revision = metadata_util.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+                if repository_metadata is None:
+                    if next_installable_revision and next_installable_revision != changeset_revision:
+                        tool_shed_status_dict[ 'latest_installable_revision' ] = 'True'
+                    else:
+                        tool_shed_status_dict[ 'latest_installable_revision' ] = 'False'
+                else:
+                    if next_installable_revision and next_installable_revision != changeset_revision:
+                        tool_shed_status_dict[ 'latest_installable_revision' ] = 'False'
+                    else:
+                        tool_shed_status_dict[ 'latest_installable_revision' ] = 'True'
+            # Handle revision updates.
+            if changeset_revision == repository.tip( trans.app ):
+                tool_shed_status_dict[ 'revision_update' ] = 'False'
+            else:
+                if repository_metadata is None:
+                    tool_shed_status_dict[ 'revision_update' ] = 'True'
+                else:
+                    tool_shed_status_dict[ 'revision_update' ] = 'False'
+            # Handle revision upgrades.
+            metadata_revisions = [ revision[ 1 ] for revision in metadata_util.get_metadata_revisions( repository, repo ) ]
+            num_metadata_revisions = len( metadata_revisions )
+            for index, metadata_revision in enumerate( metadata_revisions ):
+                if index == num_metadata_revisions:
+                    tool_shed_status_dict[ 'revision_upgrade' ] = 'False'
+                    break
+                if metadata_revision == changeset_revision:
+                    if num_metadata_revisions - index > 1:
+                        tool_shed_status_dict[ 'revision_upgrade' ] = 'True'
+                    else:
+                        tool_shed_status_dict[ 'revision_upgrade' ] = 'False'
+                    break
+            return encoding_util.tool_shed_encode( tool_shed_status_dict ) if hexlify_this else json.dumps( tool_shed_status_dict )
+        return encoding_util.tool_shed_encode({}) if hexlify_this else json.dumps({})
+
+    @expose_api_anonymous_and_sessionless
+    def show_tools( self, trans, id, changeset, **kwd ):
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                           id,
+                                                                                           changeset )
+        if repository_metadata is not None:
+            encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
+            repository_metadata_dict = repository_metadata.to_dict( view='collection',
+                                                                    value_mapper=self.__get_value_mapper( trans ) )
+            repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+                                                             action='show',
+                                                             id=encoded_repository_metadata_id )
+            if 'tools' in repository_metadata.metadata:
+                repository_metadata_dict[ 'valid_tools' ] = repository_metadata.metadata[ 'tools' ]
+            # Get the repo_info_dict for installing the repository.
+            repo_info_dict, \
+                includes_tools, \
+                includes_tool_dependencies, \
+                includes_tools_for_display_in_tool_panel, \
+                has_repository_dependencies, \
+                has_repository_dependencies_only_if_compiling_contained_td = \
+                repository_util.get_repo_info_dict( self.app,
+                                                    trans.user,
+                                                    id,
+                                                    changeset )
+            return repository_metadata_dict
+        else:
+            log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" %
+                       ( str( id ), str( changeset ) ) )
+            return {}
+
+    @expose_api_anonymous_and_sessionless
+    def metadata( self, trans, id, **kwd ):
+        """
+        GET /api/repositories/{encoded_repository_id}/metadata
+        Returns information about a repository in the Tool Shed.
+
+        Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135/metadata
+
+        :param id: the encoded id of the Repository object
+
+        :returns:   A dictionary containing the specified repository's metadata, by changeset,
+                    recursively including dependencies and their metadata.
+
+        :not found:  Empty dictionary.
+        """
+        try:
+            trans.security.decode_id( id )
+        except Exception:
+            raise MalformedId( 'The given id is invalid.' )
+        recursive = util.asbool( kwd.get( 'recursive', 'True' ) )
+        all_metadata = {}
+        repository = repository_util.get_repository_in_tool_shed( self.app, id )
+        for changeset, changehash in repository.installable_revisions( self.app ):
+            metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( self.app, repository, changehash )
+            if metadata is None:
+                continue
+            metadata_dict = metadata.to_dict( value_mapper={ 'id': self.app.security.encode_id, 'repository_id': self.app.security.encode_id } )
+            metadata_dict[ 'repository' ] = repository.to_dict( value_mapper={ 'id': self.app.security.encode_id } )
+            if metadata.has_repository_dependencies and recursive:
+                metadata_dict[ 'repository_dependencies' ] = metadata_util.get_all_dependencies( self.app, metadata, processed_dependency_links=[] )
+            else:
+                metadata_dict[ 'repository_dependencies' ] = []
+            if metadata.includes_tool_dependencies and recursive:
+                metadata_dict[ 'tool_dependencies' ] = repository.get_tool_dependencies( self.app, changehash )
+            else:
+                metadata_dict[ 'tool_dependencies' ] = {}
+            if metadata.includes_tools:
+                metadata_dict[ 'tools' ] = metadata.metadata[ 'tools' ]
+            all_metadata[ '%s:%s' % ( int( changeset ), changehash ) ] = metadata_dict
+        return all_metadata
+
+    @expose_api
+    def update( self, trans, id, **kwd ):
+        """
+        PATCH /api/repositories/{encoded_repository_id}
+        Updates information about a repository in the Tool Shed.
+
+        :param id: the encoded id of the Repository object
+
+        :param payload: dictionary structure containing::
+            'name':                  repo's name (optional)
+            'synopsis':              repo's synopsis (optional)
+            'description':           repo's description (optional)
+            'remote_repository_url': repo's remote repo (optional)
+            'homepage_url':          repo's homepage url (optional)
+            'category_ids':          list of existing encoded TS category ids
+                                     the updated repo should be associated with (optional)
+        :type payload: dict
+
+        :returns:   detailed repository information
+        :rtype:     dict
+
+        :raises: RequestParameterInvalidException, InsufficientPermissionsException
+        """
+        payload = kwd.get( 'payload', None )
+        if not payload:
+            raise RequestParameterMissingException( "You did not specify any payload." )
+
+        name = payload.get( 'name', None )
+        synopsis = payload.get( 'synopsis', None )
+        description = payload.get( 'description', None )
+        remote_repository_url = payload.get( 'remote_repository_url', None )
+        homepage_url = payload.get( 'homepage_url', None )
+        category_ids = payload.get( 'category_ids', None )
+        if category_ids is not None:
+            # We need to know if it was actually passed, and listify turns None into []
+            category_ids = util.listify( category_ids )
+
+        update_kwds = dict(
+            name=name,
+            description=synopsis,
+            long_description=description,
+            remote_repository_url=remote_repository_url,
+            homepage_url=homepage_url,
+            category_ids=category_ids,
+        )
+
+        repo, message = repository_util.update_repository( app=self.app, trans=trans, id=id, **update_kwds )
+        if repo is None:
+            if "You are not the owner" in message:
+                raise InsufficientPermissionsException( message )
+            else:
+                raise ActionInputError( message )
+
+        repository_dict = repo.to_dict( view='element', value_mapper=self.__get_value_mapper( trans ) )
+        repository_dict[ 'category_ids' ] = \
+            [ trans.security.encode_id( x.category.id ) for x in repo.categories ]
+        return repository_dict
+
+    @expose_api
+    def create( self, trans, **kwd ):
+        """
+        create( self, trans, payload, **kwd )
+        * POST /api/repositories:
+            Creates a new repository.
+            Only ``name`` and ``synopsis`` parameters are required.
+
+        :param payload: dictionary structure containing::
+            'name':                  new repo's name (required)
+            'synopsis':              new repo's synopsis (required)
+            'description':           new repo's description (optional)
+            'remote_repository_url': new repo's remote repo (optional)
+            'homepage_url':          new repo's homepage url (optional)
+            'category_ids[]':        list of existing encoded TS category ids
+                                     the new repo should be associated with (optional)
+            'type':                  new repo's type, defaults to ``unrestricted`` (optional)
+
+        :type payload: dict
+
+        :returns:   detailed repository information
+        :rtype:     dict
+
+        :raises: RequestParameterMissingException, RequestParameterInvalidException
+        """
+        payload = kwd.get( 'payload', None )
+        if not payload:
+            raise RequestParameterMissingException( "You did not specify any payload." )
+        name = payload.get( 'name', None )
+        if not name:
+            raise RequestParameterMissingException( "Missing required parameter 'name'." )
+        synopsis = payload.get( 'synopsis', None )
+        if not synopsis:
+            raise RequestParameterMissingException( "Missing required parameter 'synopsis'." )
+
+        description = payload.get( 'description', '' )
+        remote_repository_url = payload.get( 'remote_repository_url', '' )
+        homepage_url = payload.get( 'homepage_url', '' )
+        category_ids = util.listify( payload.get( 'category_ids[]', '' ) )
+
+        repo_type = payload.get( 'type', rt_util.UNRESTRICTED )
+        if repo_type not in rt_util.types:
+            raise RequestParameterInvalidException( 'This repository type is not valid' )
+
+        invalid_message = repository_util.validate_repository_name( self.app, name, trans.user )
+        if invalid_message:
+            raise RequestParameterInvalidException( invalid_message )
+
+        repo, message = repository_util.create_repository( app=self.app,
+                                                           name=name,
+                                                           type=repo_type,
+                                                           description=synopsis,
+                                                           long_description=description,
+                                                           user_id=trans.user.id,
+                                                           category_ids=category_ids,
+                                                           remote_repository_url=remote_repository_url,
+                                                           homepage_url=homepage_url )
+
+        repository_dict = repo.to_dict( view='element', value_mapper=self.__get_value_mapper( trans ) )
+        repository_dict[ 'category_ids' ] = \
+            [ trans.security.encode_id( x.category.id ) for x in repo.categories ]
+        return repository_dict
+
+    @web.expose_api
+    def create_changeset_revision( self, trans, id, payload, **kwd ):
+        """
+        POST /api/repositories/{encoded_repository_id}/changeset_revision
+
+        Create a new tool shed repository commit - leaving PUT on parent
+        resource open for updating meta-attributes of the repository (and
+        Galaxy doesn't allow PUT multipart data anyway
+        https://trello.com/c/CQwmCeG6).
+
+        :param id: the encoded id of the Repository object
+
+        The following parameters may be included in the payload.
+        :param commit_message: hg commit message for update.
+        """
+
+        # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135
+        rdah = attribute_handlers.RepositoryDependencyAttributeHandler( self.app, unpopulate=False )
+        tdah = attribute_handlers.ToolDependencyAttributeHandler( self.app, unpopulate=False )
+
+        repository = repository_util.get_repository_in_tool_shed( self.app, id )
+
+        if not ( trans.user_is_admin() or
+                 self.app.security_agent.user_can_administer_repository( trans.user, repository ) or
+                 self.app.security_agent.can_push( self.app, trans.user, repository ) ):
+            trans.response.status = 400
+            return {
+                "err_msg": "You do not have permission to update this repository.",
+            }
+
+        repo_dir = repository.repo_path( self.app )
+        repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_dir, create=False )
+
+        upload_point = commit_util.get_upload_point( repository, **kwd )
+        tip = repository.tip( self.app )
+
+        file_data = payload.get('file')
+        # Code stolen from gx's upload_common.py
+        if isinstance( file_data, FieldStorage ):
+            assert not isinstance( file_data.file, StringIO.StringIO )
+            assert file_data.file.name != '<fdopen>'
+            local_filename = util.mkstemp_ln( file_data.file.name, 'upload_file_data_' )
+            file_data.file.close()
+            file_data = dict( filename=file_data.filename,
+                              local_filename=local_filename )
+        elif type( file_data ) == dict and 'local_filename' not in file_data:
+            raise Exception( 'Uploaded file was encoded in a way not understood.' )
+
+        commit_message = kwd.get( 'commit_message', 'Uploaded' )
+
+        uploaded_file = open(file_data['local_filename'], 'rb')
+        uploaded_file_name = file_data['local_filename']
+
+        isgzip = False
+        isbz2 = False
+        isgzip = checkers.is_gzip( uploaded_file_name )
+        if not isgzip:
+            isbz2 = checkers.is_bz2( uploaded_file_name )
+        if ( isgzip or isbz2 ):
+            # Open for reading with transparent compression.
+            tar = tarfile.open( uploaded_file_name, 'r:*' )
+        else:
+            tar = tarfile.open( uploaded_file_name )
+
+        new_repo_alert = False
+        remove_repo_files_not_in_tar = True
+
+        ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
+            repository_content_util.upload_tar(
+                trans,
+                rdah,
+                tdah,
+                repository,
+                tar,
+                uploaded_file,
+                upload_point,
+                remove_repo_files_not_in_tar,
+                commit_message,
+                new_repo_alert
+            )
+        if ok:
+            # Update the repository files for browsing.
+            hg_util.update_repository( repo )
+            # Get the new repository tip.
+            if tip == repository.tip( self.app ):
+                trans.response.status = 400
+                message = 'No changes to repository.'
+                ok = False
+            else:
+                rmm = repository_metadata_manager.RepositoryMetadataManager( app=self.app,
+                                                                             user=trans.user,
+                                                                             repository=repository )
+                status, error_message = \
+                    rmm.set_repository_metadata_due_to_new_tip( trans.request.host,
+                                                                content_alert_str=content_alert_str,
+                                                                **kwd )
+                if error_message:
+                    ok = False
+                    trans.response.status = 500
+                    message = error_message
+        else:
+            trans.response.status = 500
+        if os.path.exists( uploaded_file_name ):
+            os.remove( uploaded_file_name )
+        if not ok:
+            return {
+                "err_msg": message,
+                "content_alert": content_alert_str
+            }
+        else:
+            return {
+                "message": message,
+                "content_alert": content_alert_str
+            }
diff --git a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
new file mode 100644
index 0000000..fb0428f
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -0,0 +1,244 @@
+import logging
+
+from sqlalchemy import and_
+
+from galaxy import util
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController, HTTPBadRequest
+from tool_shed.capsule import capsule_manager
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoryRevisionsController( BaseAPIController ):
+    """RESTful controller for interactions with tool shed repository revisions."""
+
+    @web.expose_api_anonymous
+    def export( self, trans, payload, **kwd ):
+        """
+        POST /api/repository_revisions/export
+        Creates and saves a gzip compressed tar archive of a repository and optionally all of its repository dependencies.
+
+        The following parameters are included in the payload.
+        :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository is to be exported
+        :param name (required): the name of the Repository
+        :param owner (required): the owner of the Repository
+        :param changeset_revision (required): the changeset_revision of the RepositoryMetadata object associated with the Repository
+        :param export_repository_dependencies (optional): whether to export repository dependencies - defaults to False
+        :param download_dir (optional): the local directory to which to download the archive - defaults to /tmp
+        """
+        tool_shed_url = payload.get( 'tool_shed_url', '' )
+        if not tool_shed_url:
+            raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+        tool_shed_url = tool_shed_url.rstrip( '/' )
+        name = payload.get( 'name', '' )
+        if not name:
+            raise HTTPBadRequest( detail="Missing required parameter 'name'." )
+        owner = payload.get( 'owner', '' )
+        if not owner:
+            raise HTTPBadRequest( detail="Missing required parameter 'owner'." )
+        changeset_revision = payload.get( 'changeset_revision', '' )
+        if not changeset_revision:
+            raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
+        export_repository_dependencies = payload.get( 'export_repository_dependencies', False )
+        # We'll currently support only gzip-compressed tar archives.
+        export_repository_dependencies = util.asbool( export_repository_dependencies )
+        # Get the repository information.
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        if repository is None:
+            error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) )
+            log.debug( error_message )
+            return None, error_message
+        erm = capsule_manager.ExportRepositoryManager( app=trans.app,
+                                                       user=trans.user,
+                                                       tool_shed_url=tool_shed_url,
+                                                       repository=repository,
+                                                       changeset_revision=changeset_revision,
+                                                       export_repository_dependencies=export_repository_dependencies,
+                                                       using_api=True )
+        return erm.export_repository()
+
+    def __get_value_mapper( self, trans ):
+        value_mapper = { 'id' : trans.security.encode_id,
+                         'repository_id' : trans.security.encode_id,
+                         'user_id' : trans.security.encode_id }
+        return value_mapper
+
+    @web.expose_api_anonymous
+    def index( self, trans, **kwd ):
+        """
+        GET /api/repository_revisions
+        Displays a collection (list) of repository revisions.
+        """
+        # Example URL: http://localhost:9009/api/repository_revisions
+        repository_metadata_dicts = []
+        # Build up an anded clause list of filters.
+        clause_list = []
+        # Filter by downloadable if received.
+        downloadable = kwd.get( 'downloadable', None )
+        if downloadable is not None:
+            clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) )
+        # Filter by malicious if received.
+        malicious = kwd.get( 'malicious', None )
+        if malicious is not None:
+            clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) )
+        # Filter by missing_test_components if received.
+        missing_test_components = kwd.get( 'missing_test_components', None )
+        if missing_test_components is not None:
+            clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) )
+        # Filter by includes_tools if received.
+        includes_tools = kwd.get( 'includes_tools', None )
+        if includes_tools is not None:
+            clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) )
+        for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \
+                                                   .filter( and_( *clause_list ) ) \
+                                                   .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ):
+            repository_metadata_dict = repository_metadata.to_dict( view='collection',
+                                                                    value_mapper=self.__get_value_mapper( trans ) )
+            repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+                                                             action='show',
+                                                             id=trans.security.encode_id( repository_metadata.id ) )
+            repository_metadata_dicts.append( repository_metadata_dict )
+        return repository_metadata_dicts
+
+    @web.expose_api_anonymous
+    def repository_dependencies( self, trans, id, **kwd ):
+        """
+        GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies
+
+        Returns a list of dictionaries that each define a specific downloadable revision of a
+        repository in the Tool Shed.  This method returns dictionaries with more information in
+        them than other methods in this controller.  The information about repository_metdata is
+        enhanced to include information about the repository (e.g., name, owner, etc) associated
+        with the repository_metadata record.
+
+        :param id: the encoded id of the `RepositoryMetadata` object
+        """
+        # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb125606ff9ea620
+        repository_dependencies_dicts = []
+        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, id )
+        if repository_metadata is None:
+            log.debug( 'Invalid repository_metadata id received: %s' % str( id ) )
+            return repository_dependencies_dicts
+        metadata = repository_metadata.metadata
+        if metadata is None:
+            log.debug( 'The repository_metadata record with id %s has no metadata.' % str( id ) )
+            return repository_dependencies_dicts
+        if 'repository_dependencies' in metadata:
+            rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+            for rd_tup in rd_tups:
+                tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ]
+                repository_dependency = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+                if repository_dependency is None:
+                    log.dbug( 'Cannot locate repository dependency %s owned by %s.' % ( name, owner ) )
+                    continue
+                repository_dependency_id = trans.security.encode_id( repository_dependency.id )
+                repository_dependency_repository_metadata = \
+                    metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_dependency_id, changeset_revision )
+                if repository_dependency_repository_metadata is None:
+                    # The changeset_revision column in the repository_metadata table has been updated with a new
+                    # value value, so find the changeset_revision to which we need to update.
+                    repo = hg_util.get_repo_for_repository( trans.app,
+                                                            repository=repository_dependency,
+                                                            repo_path=None,
+                                                            create=False )
+                    new_changeset_revision = metadata_util.get_next_downloadable_changeset_revision( repository_dependency,
+                                                                                                     repo,
+                                                                                                     changeset_revision )
+                    if new_changeset_revision != changeset_revision:
+                        repository_dependency_repository_metadata = \
+                            metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                         repository_dependency_id,
+                                                                                         new_changeset_revision )
+                        changeset_revision = new_changeset_revision
+                    else:
+                        decoded_repository_dependency_id = trans.security.decode_id( repository_dependency_id )
+                        debug_msg = 'Cannot locate repository_metadata with id %d for repository dependency %s owned by %s ' % \
+                            ( decoded_repository_dependency_id, str( name ), str( owner ) )
+                        debug_msg += 'using either of these changeset_revisions: %s, %s.' % \
+                            ( str( changeset_revision ), str( new_changeset_revision ) )
+                        log.debug( debug_msg )
+                        continue
+                repository_dependency_metadata_dict = \
+                    repository_dependency_repository_metadata.to_dict( view='element',
+                                                                       value_mapper=self.__get_value_mapper( trans ) )
+                repository_dependency_dict = repository_dependency.to_dict( view='element',
+                                                                            value_mapper=self.__get_value_mapper( trans ) )
+                # We need to be careful with the entries in our repository_dependency_dict here since this Tool Shed API
+                # controller is working with repository_metadata records.  The above to_dict() method returns a dictionary
+                # with an id entry for the repository record.  However, all of the other methods in this controller have
+                # the id entry associated with a repository_metadata record id.  To avoid confusion, we'll update the
+                # repository_dependency_metadata_dict with entries from the repository_dependency_dict without using the
+                # Python dictionary update() method because we do not want to overwrite existing entries.
+                for k, v in repository_dependency_dict.items():
+                    if k not in repository_dependency_metadata_dict:
+                        repository_dependency_metadata_dict[ k ] = v
+                repository_dependency_metadata_dict[ 'url' ] = web.url_for( controller='repositories',
+                                                                            action='show',
+                                                                            id=repository_dependency_id )
+                repository_dependencies_dicts.append( repository_dependency_metadata_dict )
+        return repository_dependencies_dicts
+
+    @web.expose_api_anonymous
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/repository_revisions/{encoded_repository_metadata_id}
+        Displays information about a repository_metadata record in the Tool Shed.
+
+        :param id: the encoded id of the `RepositoryMetadata` object
+        """
+        # Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620
+        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, id )
+        if repository_metadata is None:
+            log.debug( 'Cannot locate repository_metadata with id %s' % str( id ) )
+            return {}
+        encoded_repository_id = trans.security.encode_id( repository_metadata.repository_id )
+        repository_metadata_dict = repository_metadata.to_dict( view='element',
+                                                                value_mapper=self.__get_value_mapper( trans ) )
+        repository_metadata_dict[ 'url' ] = web.url_for( controller='repositories',
+                                                         action='show',
+                                                         id=encoded_repository_id )
+        return repository_metadata_dict
+
+    @web.expose_api
+    def update( self, trans, payload, **kwd ):
+        """
+        PUT /api/repository_revisions/{encoded_repository_metadata_id}/{payload}
+        Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload.
+
+        :param id: the encoded id of the `RepositoryMetadata` object
+        """
+        repository_metadata_id = kwd.get( 'id', None )
+        if repository_metadata_id is None:
+            raise HTTPBadRequest( detail="Missing required parameter 'id'." )
+        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+        if repository_metadata is None:
+            decoded_repository_metadata_id = trans.security.decode_id( repository_metadata_id )
+            log.debug( 'Cannot locate repository_metadata with id %s' % str( decoded_repository_metadata_id ) )
+            return {}
+        else:
+            decoded_repository_metadata_id = repository_metadata.id
+        flush_needed = False
+        for key, new_value in payload.items():
+            if hasattr( repository_metadata, key ):
+                # log information when setting attributes associated with the Tool Shed's install and test framework.
+                if key in [ 'includes_tools', 'missing_test_components' ]:
+                    log.debug( 'Setting repository_metadata column %s to value %s for changeset_revision %s via the Tool Shed API.' %
+                               ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) )
+                setattr( repository_metadata, key, new_value )
+                flush_needed = True
+        if flush_needed:
+            log.debug( 'Updating repository_metadata record with id %s and changeset_revision %s.' %
+                       ( str( decoded_repository_metadata_id ), str( repository_metadata.changeset_revision ) ) )
+            trans.sa_session.add( repository_metadata )
+            trans.sa_session.flush()
+            trans.sa_session.refresh( repository_metadata )
+        repository_metadata_dict = repository_metadata.to_dict( view='element',
+                                                                value_mapper=self.__get_value_mapper( trans ) )
+        repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+                                                         action='show',
+                                                         id=repository_metadata_id )
+        return repository_metadata_dict
diff --git a/lib/galaxy/webapps/tool_shed/api/tools.py b/lib/galaxy/webapps/tool_shed/api/tools.py
new file mode 100644
index 0000000..d13f293
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/tools.py
@@ -0,0 +1,101 @@
+import json
+import logging
+from collections import namedtuple
+from galaxy import web
+from galaxy import util
+from galaxy.web import _future_expose_api_raw_anonymous_and_sessionless as expose_api_raw_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.webapps.tool_shed.search.tool_search import ToolSearch
+from galaxy.exceptions import NotImplemented
+from galaxy.exceptions import RequestParameterInvalidException
+from galaxy.exceptions import ConfigDoesNotAllowException
+
+log = logging.getLogger( __name__ )
+
+
+class ToolsController( BaseAPIController ):
+    """RESTful controller for interactions with tools in the Tool Shed."""
+
+    @expose_api_raw_anonymous_and_sessionless
+    def index( self, trans, **kwd ):
+        """
+        GET /api/tools
+        Displays a collection of tools with optional criteria.
+
+        :param q:        (optional)if present search on the given query will be performed
+        :type  q:        str
+
+        :param page:     (optional)requested page of the search
+        :type  page:     int
+
+        :param page_size:     (optional)requested page_size of the search
+        :type  page_size:     int
+
+        :param jsonp:    (optional)flag whether to use jsonp format response, defaults to False
+        :type  jsonp:    bool
+
+        :param callback: (optional)name of the function to wrap callback in
+                         used only when jsonp is true, defaults to 'callback'
+        :type  callback: str
+
+        :returns dict:   object containing list of results and metadata
+
+        Examples:
+            GET http://localhost:9009/api/tools
+            GET http://localhost:9009/api/tools?q=fastq
+        """
+        q = kwd.get( 'q', '' )
+        if not q:
+            raise NotImplemented( 'Listing of all the tools is not implemented. Provide parameter "q" to search instead.' )
+        else:
+            page = kwd.get( 'page', 1 )
+            page_size = kwd.get( 'page_size', 10 )
+            try:
+                page = int( page )
+                page_size = int( page_size )
+            except ValueError:
+                raise RequestParameterInvalidException( 'The "page" and "page_size" have to be integers.' )
+            return_jsonp = util.asbool( kwd.get( 'jsonp', False ) )
+            callback = kwd.get( 'callback', 'callback' )
+            search_results = self._search( trans, q, page, page_size )
+            if return_jsonp:
+                response = str( '%s(%s);' % ( callback, json.dumps( search_results ) ) )
+            else:
+                response = json.dumps( search_results )
+            return response
+
+    def _search( self, trans, q, page=1, page_size=10 ):
+        """
+        Perform the search over TS tools index.
+        Note that search works over the Whoosh index which you have
+        to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually.
+        Also TS config option toolshed_search_on has to be True and
+        whoosh_index_dir has to be specified.
+        """
+        conf = self.app.config
+        if not conf.toolshed_search_on:
+            raise ConfigDoesNotAllowException( 'Searching the TS through the API is turned off for this instance.' )
+        if not conf.whoosh_index_dir:
+            raise ConfigDoesNotAllowException( 'There is no directory for the search index specified. Please contact the administrator.' )
+        search_term = q.strip()
+        if len( search_term ) < 3:
+            raise RequestParameterInvalidException( 'The search term has to be at least 3 characters long.' )
+
+        tool_search = ToolSearch()
+
+        Boosts = namedtuple( 'Boosts', [ 'tool_name_boost',
+                                         'tool_description_boost',
+                                         'tool_help_boost',
+                                         'tool_repo_owner_username_boost' ] )
+        boosts = Boosts( float( conf.get( 'tool_name_boost', 1.2 ) ),
+                         float( conf.get( 'tool_description_boost', 0.6 ) ),
+                         float( conf.get( 'tool_help_boost', 0.4 ) ),
+                         float( conf.get( 'tool_repo_owner_username_boost', 0.3 ) ) )
+
+        results = tool_search.search( trans,
+                                      search_term,
+                                      page,
+                                      page_size,
+                                      boosts )
+        results[ 'hostname' ] = web.url_for( '/', qualified=True )
+        return results
diff --git a/lib/galaxy/webapps/tool_shed/api/users.py b/lib/galaxy/webapps/tool_shed/api/users.py
new file mode 100644
index 0000000..274c135
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/users.py
@@ -0,0 +1,131 @@
+import logging
+
+from galaxy import util
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.security.validate_user_input import validate_email
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.security.validate_user_input import validate_password
+import tool_shed.util.shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class UsersController( BaseAPIController ):
+    """RESTful controller for interactions with users in the Tool Shed."""
+
+    @web.expose_api
+    @web.require_admin
+    def create( self, trans, payload, **kwd ):
+        """
+        POST /api/users
+        Returns a dictionary of information about the created user.
+
+:       param key: the current Galaxy admin user's API key
+
+        The following parameters are included in the payload.
+        :param email (required): the email address of the user
+        :param password (required): the password of the user
+        :param username (required): the public username of the user
+        """
+        user_dict = dict( message='',
+                          status='ok' )
+        # Get the information about the user to be created from the payload.
+        email = payload.get( 'email', '' )
+        password = payload.get( 'password', '' )
+        username = payload.get( 'username', '' )
+        message = self.__validate( trans,
+                                   email=email,
+                                   password=password,
+                                   confirm=password,
+                                   username=username )
+        if message:
+            message = 'email: %s, username: %s - %s' % ( email, username, message )
+            user_dict[ 'message' ] = message
+            user_dict[ 'status' ] = 'error'
+        else:
+            # Create the user.
+            user = self.__create_user( trans, email, username, password )
+            user_dict = user.to_dict( view='element',
+                                      value_mapper=self.__get_value_mapper( trans ) )
+            user_dict[ 'message' ] = "User '%s' has been created." % str( user.username )
+            user_dict[ 'url' ] = web.url_for( controller='users',
+                                              action='show',
+                                              id=trans.security.encode_id( user.id ) )
+        return user_dict
+
+    def __create_user( self, trans, email, username, password ):
+        user = trans.app.model.User( email=email )
+        user.set_password_cleartext( password )
+        user.username = username
+        if trans.app.config.user_activation_on:
+            user.active = False
+        else:
+            user.active = True  # Activation is off, every new user is active by default.
+        trans.sa_session.add( user )
+        trans.sa_session.flush()
+        trans.app.security_agent.create_private_user_role( user )
+        return user
+
+    def __get_value_mapper( self, trans ):
+        value_mapper = { 'id' : trans.security.encode_id }
+        return value_mapper
+
+    @web.expose_api_anonymous
+    def index( self, trans, deleted=False, **kwd ):
+        """
+        GET /api/users
+        Returns a list of dictionaries that contain information about each user.
+        """
+        # Example URL: http://localhost:9009/api/users
+        user_dicts = []
+        deleted = util.asbool( deleted )
+        for user in trans.sa_session.query( trans.app.model.User ) \
+                                    .filter( trans.app.model.User.table.c.deleted == deleted ) \
+                                    .order_by( trans.app.model.User.table.c.username ):
+            user_dict = user.to_dict( view='collection',
+                                      value_mapper=self.__get_value_mapper( trans ) )
+            user_dict[ 'url' ] = web.url_for( controller='users',
+                                              action='show',
+                                              id=trans.security.encode_id( user.id ) )
+            user_dicts.append( user_dict )
+        return user_dicts
+
+    @web.expose_api_anonymous
+    def show( self, trans, id, **kwd ):
+        """
+        GET /api/users/{encoded_user_id}
+        GET /api/users/current
+        Returns a dictionary of information about a user.
+
+        :param id: the encoded id of the User object.
+        """
+        user = None
+        # user is requesting data about themselves
+        if id == "current" and trans.user:
+            user = trans.user
+        else:
+            user = suc.get_user( trans.app, id )
+
+        if user is None:
+            user_dict = dict( message='Unable to locate user record for id %s.' % ( str( id ) ),
+                              status='error' )
+            return user_dict
+        user_dict = user.to_dict( view='element',
+                                  value_mapper=self.__get_value_mapper( trans ) )
+        user_dict[ 'url' ] = web.url_for( controller='users',
+                                          action='show',
+                                          id=trans.security.encode_id( user.id ) )
+        return user_dict
+
+    def __validate( self, trans, email, password, confirm, username ):
+        if not username:
+            return "A public user name is required in the Tool Shed."
+        if username in [ 'repos' ]:
+            return "The term <b>%s</b> is a reserved word in the Tool Shed, so it cannot be used as a public user name." % username
+        message = validate_email( trans, email )
+        if not message:
+            message = validate_password( trans, password, confirm )
+        if not message and username:
+            message = validate_publicname( trans, username )
+        return message
diff --git a/lib/galaxy/webapps/tool_shed/app.py b/lib/galaxy/webapps/tool_shed/app.py
new file mode 100644
index 0000000..b05fb93
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/app.py
@@ -0,0 +1,85 @@
+import config
+import sys
+import time
+import galaxy.datatypes.registry
+import galaxy.quota
+import galaxy.tools.data
+import galaxy.webapps.tool_shed.model
+from galaxy import tools
+from galaxy.config import configure_logging
+from galaxy.managers.tags import CommunityTagManager
+from galaxy.openid.providers import OpenIDProviders
+from galaxy.util.dbkeys import GenomeBuilds
+from galaxy.web import security
+import tool_shed.repository_registry
+import tool_shed.repository_types.registry
+from tool_shed.grids.repository_grid_filter_manager import RepositoryGridFilterManager
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class UniverseApplication( object ):
+    """Encapsulates the state of a Universe application"""
+
+    def __init__( self, **kwd ):
+        log.debug( "python path is: %s", ", ".join( sys.path ) )
+        self.name = "tool_shed"
+        # Read the tool_shed.ini configuration file and check for errors.
+        self.config = config.Configuration( **kwd )
+        self.config.check()
+        configure_logging( self.config )
+        # Initialize the  Galaxy datatypes registry.
+        self.datatypes_registry = galaxy.datatypes.registry.Registry()
+        self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+        # Initialize the Tool Shed repository_types registry.
+        self.repository_types_registry = tool_shed.repository_types.registry.Registry()
+        # Initialize the RepositoryGridFilterManager.
+        self.repository_grid_filter_manager = RepositoryGridFilterManager()
+        # Determine the Tool Shed database connection string.
+        if self.config.database_connection:
+            db_url = self.config.database_connection
+        else:
+            db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+        # Initialize the Tool Shed database and check for appropriate schema version.
+        from galaxy.webapps.tool_shed.model.migrate.check import create_or_verify_database
+        create_or_verify_database( db_url, self.config.database_engine_options )
+        # Set up the Tool Shed database engine and ORM.
+        from galaxy.webapps.tool_shed.model import mapping
+        self.model = mapping.init( self.config.file_path,
+                                   db_url,
+                                   self.config.database_engine_options )
+        # Initialize the Tool Shed security helper.
+        self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+        # initialize the Tool Shed tag handler.
+        self.tag_handler = CommunityTagManager( self )
+        # Initialize the Tool Shed tool data tables.  Never pass a configuration file here
+        # because the Tool Shed should always have an empty dictionary!
+        self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path )
+        self.genome_builds = GenomeBuilds( self )
+        from galaxy import auth
+        self.auth_manager = auth.AuthManager( self )
+        # Citation manager needed to load tools.
+        from galaxy.managers.citations import CitationsManager
+        self.citations_manager = CitationsManager( self )
+        # The Tool Shed makes no use of a Galaxy toolbox, but this attribute is still required.
+        self.toolbox = tools.ToolBox( [], self.config.tool_path, self )
+        # Initialize the Tool Shed security agent.
+        self.security_agent = self.model.security_agent
+        # The Tool Shed makes no use of a quota, but this attribute is still required.
+        self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
+        # TODO: Add OpenID support
+        self.openid_providers = OpenIDProviders()
+        # Initialize the baseline Tool Shed statistics component.
+        self.shed_counter = self.model.shed_counter
+        # Let the Tool Shed's HgwebConfigManager know where the hgweb.config file is located.
+        self.hgweb_config_manager = self.model.hgweb_config_manager
+        self.hgweb_config_manager.hgweb_config_dir = self.config.hgweb_config_dir
+        # Initialize the repository registry.
+        self.repository_registry = tool_shed.repository_registry.Registry( self )
+        #  used for cachebusting -- refactor this into a *SINGLE* UniverseApplication base.
+        self.server_starttime = int(time.time())
+        log.debug( "Tool shed hgweb.config file is: %s", self.hgweb_config_manager.hgweb_config )
+
+    def shutdown( self ):
+        pass
diff --git a/lib/galaxy/webapps/tool_shed/buildapp.py b/lib/galaxy/webapps/tool_shed/buildapp.py
new file mode 100644
index 0000000..59c09a1
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -0,0 +1,313 @@
+"""
+Provides factory methods to assemble the Galaxy web application
+"""
+import atexit
+import logging
+import os
+import routes
+
+from six.moves.urllib.parse import parse_qs
+from inspect import isclass
+from paste import httpexceptions
+from galaxy.util import asbool
+
+import galaxy.webapps.tool_shed.model
+import galaxy.webapps.tool_shed.model.mapping
+import galaxy.web.framework.webapp
+from galaxy.webapps.util import build_template_error_formatters
+from galaxy import util
+from galaxy.util.postfork import process_is_uwsgi
+from galaxy.util.properties import load_app_properties
+from routes.middleware import RoutesMiddleware
+
+log = logging.getLogger( __name__ )
+
+
+class CommunityWebApplication( galaxy.web.framework.webapp.WebApplication ):
+    pass
+
+
+def add_ui_controllers( webapp, app ):
+    """
+    Search for controllers in the 'galaxy.webapps.controllers' module and add
+    them to the webapp.
+    """
+    from galaxy.web.base.controller import BaseUIController
+    import galaxy.webapps.tool_shed.controllers
+    controller_dir = galaxy.webapps.tool_shed.controllers.__path__[0]
+    for fname in os.listdir( controller_dir ):
+        if not fname.startswith( "_" ) and fname.endswith( ".py" ):
+            name = fname[:-3]
+            module_name = "galaxy.webapps.tool_shed.controllers." + name
+            module = __import__( module_name )
+            for comp in module_name.split( "." )[1:]:
+                module = getattr( module, comp )
+            # Look for a controller inside the modules
+            for key in dir( module ):
+                T = getattr( module, key )
+                if isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
+                    webapp.add_ui_controller( name, T( app ) )
+
+
+def app_factory( global_conf, **kwargs ):
+    """Return a wsgi application serving the root object"""
+    # Create the Galaxy tool shed application unless passed in
+    kwargs = load_app_properties(
+        kwds=kwargs,
+        config_prefix='TOOL_SHED_CONFIG_'
+    )
+    if 'app' in kwargs:
+        app = kwargs.pop( 'app' )
+    else:
+        try:
+            from galaxy.webapps.tool_shed.app import UniverseApplication
+            app = UniverseApplication( global_conf=global_conf, **kwargs )
+        except:
+            import traceback
+            import sys
+            traceback.print_exc()
+            sys.exit( 1 )
+    atexit.register( app.shutdown )
+    # Create the universe WSGI application
+    webapp = CommunityWebApplication( app, session_cookie='galaxycommunitysession', name="tool_shed" )
+    add_ui_controllers( webapp, app )
+    webapp.add_route( '/view/{owner}', controller='repository', action='sharable_owner' )
+    webapp.add_route( '/view/{owner}/{name}', controller='repository', action='sharable_repository' )
+    webapp.add_route( '/view/{owner}/{name}/{changeset_revision}', controller='repository', action='sharable_repository_revision' )
+    # Handle displaying tool help images and README file images for tools contained in repositories.
+    webapp.add_route( '/repository/static/images/{repository_id}/{image_file:.+?}',
+                      controller='repository',
+                      action='display_image_in_repository',
+                      repository_id=None,
+                      image_file=None )
+    webapp.add_route( '/{controller}/{action}', action='index' )
+    webapp.add_route( '/{action}', controller='repository', action='index' )
+    # Enable 'hg clone' functionality on repos by letting hgwebapp handle the request
+    webapp.add_route( '/repos/*path_info', controller='hg', action='handle_request', path_info='/' )
+    # Add the web API.  # A good resource for RESTful services - http://routes.readthedocs.org/en/latest/restful.html
+    webapp.add_api_controllers( 'galaxy.webapps.tool_shed.api', app )
+    webapp.mapper.connect( 'api_key_retrieval',
+                           '/api/authenticate/baseauth/',
+                           controller='authenticate',
+                           action='get_tool_shed_api_key',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( 'group',
+                           '/api/groups/',
+                           controller='groups',
+                           action='index',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( 'group',
+                           '/api/groups/',
+                           controller='groups',
+                           action='create',
+                           conditions=dict( method=[ "POST" ] ) )
+    webapp.mapper.connect( 'group',
+                           '/api/groups/{encoded_id}',
+                           controller='groups',
+                           action='show',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.resource( 'category',
+                            'categories',
+                            controller='categories',
+                            name_prefix='category_',
+                            path_prefix='/api',
+                            parent_resources=dict( member_name='category', collection_name='categories' ) )
+    webapp.mapper.connect( 'repositories_in_category',
+                           '/api/categories/{category_id}/repositories',
+                           controller='categories',
+                           action='get_repositories',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( 'show_updates_for_repository',
+                           '/api/repositories/updates',
+                           controller='repositories',
+                           action='updates',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.resource( 'repository',
+                            'repositories',
+                            controller='repositories',
+                            collection={ 'add_repository_registry_entry': 'POST',
+                                         'get_repository_revision_install_info': 'GET',
+                                         'get_ordered_installable_revisions': 'GET',
+                                         'get_installable_revisions': 'GET',
+                                         'remove_repository_registry_entry': 'POST',
+                                         'repository_ids_for_setting_metadata': 'GET',
+                                         'reset_metadata_on_repositories': 'POST',
+                                         'reset_metadata_on_repository': 'POST' },
+                            name_prefix='repository_',
+                            path_prefix='/api',
+                            new={ 'import_capsule': 'POST' },
+                            parent_resources=dict( member_name='repository', collection_name='repositories' ) )
+    webapp.mapper.resource( 'repository_revision',
+                            'repository_revisions',
+                            member={ 'repository_dependencies': 'GET',
+                                     'export': 'POST' },
+                            controller='repository_revisions',
+                            name_prefix='repository_revision_',
+                            path_prefix='/api',
+                            parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
+    webapp.mapper.resource( 'user',
+                            'users',
+                            controller='users',
+                            name_prefix='user_',
+                            path_prefix='/api',
+                            parent_resources=dict( member_name='user', collection_name='users' ) )
+    webapp.mapper.connect( 'update_repository',
+                           '/api/repositories/{id}',
+                           controller='repositories',
+                           action='update',
+                           conditions=dict( method=[ "PATCH", "PUT" ] ) )
+    webapp.mapper.connect( 'repository_create_changeset_revision',
+                           '/api/repositories/{id}/changeset_revision',
+                           controller='repositories',
+                           action='create_changeset_revision',
+                           conditions=dict( method=[ "POST" ] ) )
+    webapp.mapper.connect( 'repository_get_metadata',
+                           '/api/repositories/{id}/metadata',
+                           controller='repositories',
+                           action='metadata',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( 'repository_show_tools',
+                           '/api/repositories/{id}/{changeset}/show_tools',
+                           controller='repositories',
+                           action='show_tools',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( 'create_repository',
+                           '/api/repositories',
+                           controller='repositories',
+                           action='create',
+                           conditions=dict( method=[ "POST" ] ) )
+    webapp.mapper.connect( 'tools',
+                           '/api/tools',
+                           controller='tools',
+                           action='index',
+                           conditions=dict( method=[ "GET" ] ) )
+    webapp.mapper.connect( "version", "/api/version", controller="configuration", action="version", conditions=dict( method=[ "GET" ] ) )
+
+    webapp.finalize_config()
+    # Wrap the webapp in some useful middleware
+    if kwargs.get( 'middleware', True ):
+        webapp = wrap_in_middleware( webapp, global_conf, **kwargs )
+    if asbool( kwargs.get( 'static_enabled', True) ):
+        if process_is_uwsgi:
+            log.error("Static middleware is enabled in your configuration but this is a uwsgi process.  Refusing to wrap in static middleware.")
+        else:
+            webapp = wrap_in_static( webapp, global_conf, **kwargs )
+    # Close any pooled database connections before forking
+    try:
+        galaxy.webapps.tool_shed.model.mapping.metadata.bind.dispose()
+    except:
+        log.exception("Unable to dispose of pooled tool_shed model database connections.")
+    # Return
+    return webapp
+
+
+def wrap_in_middleware( app, global_conf, **local_conf ):
+    """Based on the configuration wrap `app` in a set of common and useful middleware."""
+    # Merge the global and local configurations
+    conf = global_conf.copy()
+    conf.update( local_conf )
+    debug = asbool( conf.get( 'debug', False ) )
+    # First put into place httpexceptions, which must be most closely
+    # wrapped around the application (it can interact poorly with
+    # other middleware):
+    app = httpexceptions.make_middleware( app, conf )
+    log.debug( "Enabling 'httpexceptions' middleware" )
+    # Create a separate mapper for redirects to prevent conflicts.
+    redirect_mapper = routes.Mapper()
+    redirect_mapper = _map_redirects( redirect_mapper )
+    # Load the Routes middleware which we use for redirecting
+    app = RoutesMiddleware( app, redirect_mapper )
+    log.debug( "Enabling 'routes' middleware" )
+    # If we're using remote_user authentication, add middleware that
+    # protects Galaxy from improperly configured authentication in the
+    # upstream server
+    if asbool(conf.get( 'use_remote_user', False )):
+        from galaxy.webapps.tool_shed.framework.middleware.remoteuser import RemoteUser
+        app = RemoteUser( app, maildomain=conf.get( 'remote_user_maildomain', None ),
+                          display_servers=util.listify( conf.get( 'display_servers', '' ) ),
+                          admin_users=conf.get( 'admin_users', '' ).split( ',' ),
+                          remote_user_secret_header=conf.get('remote_user_secret', None) )
+        log.debug( "Enabling 'remote user' middleware" )
+    # The recursive middleware allows for including requests in other
+    # requests or forwarding of requests, all on the server side.
+    if asbool(conf.get('use_recursive', True)):
+        from paste import recursive
+        app = recursive.RecursiveMiddleware( app, conf )
+        log.debug( "Enabling 'recursive' middleware" )
+    if debug and asbool( conf.get( 'use_interactive', False ) ) and not process_is_uwsgi:
+        # Interactive exception debugging, scary dangerous if publicly
+        # accessible, if not enabled we'll use the regular error printing
+        # middleware.
+        from weberror import evalexception
+        app = evalexception.EvalException( app, conf,
+                                           templating_formatters=build_template_error_formatters() )
+        log.debug( "Enabling 'eval exceptions' middleware" )
+    else:
+        if debug and asbool( conf.get( 'use_interactive', False ) ) and process_is_uwsgi:
+            log.error("Interactive debugging middleware is enabled in your configuration "
+                      "but this is a uwsgi process.  Refusing to wrap in interactive error middleware.")
+        # Not in interactive debug mode, just use the regular error middleware
+        import galaxy.web.framework.middleware.error
+        app = galaxy.web.framework.middleware.error.ErrorMiddleware( app, conf )
+        log.debug( "Enabling 'error' middleware" )
+    # Transaction logging (apache access.log style)
+    if asbool( conf.get( 'use_translogger', True ) ):
+        from paste.translogger import TransLogger
+        app = TransLogger( app )
+        log.debug( "Enabling 'trans logger' middleware" )
+    # If sentry logging is enabled, log here before propogating up to
+    # the error middleware
+    # TODO sentry config is duplicated between tool_shed/galaxy, refactor this.
+    sentry_dsn = conf.get( 'sentry_dsn', None )
+    if sentry_dsn:
+        from galaxy.web.framework.middleware.sentry import Sentry
+        log.debug( "Enabling 'sentry' middleware" )
+        app = Sentry( app, sentry_dsn )
+    # X-Forwarded-Host handling
+    from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware
+    app = XForwardedHostMiddleware( app )
+    log.debug( "Enabling 'x-forwarded-host' middleware" )
+    # Various debug middleware that can only be turned on if the debug
+    # flag is set, either because they are insecure or greatly hurt
+    # performance. The print debug middleware needs to be loaded last,
+    # since there is a quirk in its behavior that breaks some (but not
+    # all) subsequent middlewares.
+    if debug:
+        # Middleware to check for WSGI compliance
+        if asbool( conf.get( 'use_lint', True ) ):
+            from paste import lint
+            app = lint.make_middleware( app, conf )
+            log.debug( "Enabling 'lint' middleware" )
+        # Middleware to run the python profiler on each request
+        if asbool( conf.get( 'use_profile', False ) ):
+            import profile
+            app = profile.ProfileMiddleware( app, conf )
+            log.debug( "Enabling 'profile' middleware" )
+        # Middleware that intercepts print statements and shows them on the
+        # returned page
+        if asbool( conf.get( 'use_printdebug', True ) ):
+            from paste.debug import prints
+            app = prints.PrintDebugMiddleware( app, conf )
+            log.debug( "Enabling 'print debug' middleware" )
+    return app
+
+
+def wrap_in_static( app, global_conf, **local_conf ):
+    urlmap, _ = galaxy.web.framework.webapp.build_url_map( app, global_conf, local_conf )
+    return urlmap
+
+
+def _map_redirects( mapper ):
+    """
+    Add redirect to the Routes mapper and forward the received query string.
+    Subsequently when the redirect is triggered in Routes middleware the request
+    will not even reach the webapp.
+    """
+    def forward_qs(environ, result):
+        qs_dict = parse_qs(environ['QUERY_STRING'])
+        for qs in qs_dict:
+            result[ qs ] = qs_dict[ qs ]
+        return True
+
+    mapper.redirect( "/repository/status_for_installed_repository", "/api/repositories/updates/", _redirect_code="301 Moved Permanently", conditions=dict( function=forward_qs ) )
+    return mapper
diff --git a/lib/galaxy/webapps/tool_shed/config.py b/lib/galaxy/webapps/tool_shed/config.py
new file mode 100644
index 0000000..d787ace
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/config.py
@@ -0,0 +1,253 @@
+"""
+Universe configuration builder.
+"""
+import os
+import re
+import logging
+import logging.config
+import ConfigParser
+from datetime import timedelta
+from galaxy.util import string_as_bool
+from galaxy.web.formatting import expand_pretty_datetime_format
+from galaxy.version import VERSION, VERSION_MAJOR
+
+log = logging.getLogger( __name__ )
+
+
+def resolve_path( path, root ):
+    """If 'path' is relative make absolute by prepending 'root'"""
+    if not( os.path.isabs( path ) ):
+        path = os.path.join( root, path )
+    return path
+
+
+class ConfigurationError( Exception ):
+    pass
+
+
+class Configuration( object ):
+
+    def __init__( self, **kwargs ):
+        self.config_dict = kwargs
+        self.root = kwargs.get( 'root_dir', '.' )
+
+        # Resolve paths of other config files
+        self.__parse_config_file_options( kwargs )
+
+        # Collect the umask and primary gid from the environment
+        self.umask = os.umask( 077 )  # get the current umask
+        os.umask( self.umask )  # can't get w/o set, so set it back
+        self.gid = os.getgid()  # if running under newgrp(1) we'll need to fix the group of data created on the cluster
+        self.version_major = VERSION_MAJOR
+        self.version = VERSION
+        # Database related configuration
+        self.database = resolve_path( kwargs.get( "database_file", "database/community.sqlite" ), self.root )
+        self.database_connection = kwargs.get( "database_connection", False )
+        self.database_engine_options = get_database_engine_options( kwargs )
+        self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
+        # Repository and Tool search API
+        self.toolshed_search_on = string_as_bool( kwargs.get( "toolshed_search_on", True ) )
+        self.whoosh_index_dir = kwargs.get( "whoosh_index_dir", 'database/toolshed_whoosh_indexes' )
+        self.repo_name_boost = kwargs.get( "repo_name_boost", 0.9 )
+        self.repo_description_boost = kwargs.get( "repo_description_boost", 0.6 )
+        self.repo_long_description_boost = kwargs.get( "repo_long_description_boost", 0.5 )
+        self.repo_homepage_url_boost = kwargs.get( "repo_homepage_url_boost", 0.3 )
+        self.repo_remote_repository_url_boost = kwargs.get( "repo_remote_repository_url_boost", 0.2 )
+        self.repo_owner_username_boost = kwargs.get( "repo_owner_username_boost", 0.3 )
+        self.tool_name_boost = kwargs.get( "tool_name_boost", 1.2 )
+        self.tool_description_boost = kwargs.get( "tool_description_boost", 0.6 )
+        self.tool_help_boost = kwargs.get( "tool_help_boost", 0.4 )
+        self.tool_repo_owner_username = kwargs.get( "tool_repo_owner_username", 0.3 )
+        # Analytics
+        self.ga_code = kwargs.get( "ga_code", None )
+        self.session_duration = int(kwargs.get( 'session_duration', 0 ))
+        # Where dataset files are stored
+        self.file_path = resolve_path( kwargs.get( "file_path", "database/community_files" ), self.root )
+        self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
+        self.cookie_path = kwargs.get( "cookie_path", "/" )
+        self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
+        self.id_secret = kwargs.get( "id_secret", "changethisinproductiontoo")
+        # Tool stuff
+        self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
+        self.tool_secret = kwargs.get( "tool_secret", "" )
+        self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "shed-tool-data" ), os.getcwd() )
+        self.tool_data_table_config_path = None
+        self.integrated_tool_panel_config = resolve_path( kwargs.get( 'integrated_tool_panel_config', 'integrated_tool_panel.xml' ), self.root )
+        self.builds_file_path = resolve_path( kwargs.get( "builds_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'builds.txt') ), self.root )
+        self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'chrom') ), self.root )
+        self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
+        self.update_integrated_tool_panel = False
+        # Galaxy flavor Docker Image
+        self.enable_galaxy_flavor_docker_image = string_as_bool( kwargs.get( "enable_galaxy_flavor_docker_image", "False" ) )
+        self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
+        self.user_activation_on = None
+        self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
+        self.terms_url = kwargs.get( 'terms_url', None )
+        self.blacklist_location = kwargs.get( 'blacklist_file', None )
+        self.blacklist_content = None
+        self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
+        self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
+        self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
+        self.remote_user_secret = kwargs.get( "remote_user_secret", None )
+        self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
+        self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
+        self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
+        self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
+        self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
+        self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/community" ), self.root )
+        self.admin_users = kwargs.get( "admin_users", "" )
+        self.admin_users_list = [u.strip() for u in self.admin_users.split(',') if u]
+        self.mailing_join_addr = kwargs.get('mailing_join_addr', "galaxy-announce-join at bx.psu.edu")
+        self.error_email_to = kwargs.get( 'error_email_to', None )
+        self.smtp_server = kwargs.get( 'smtp_server', None )
+        self.smtp_username = kwargs.get( 'smtp_username', None )
+        self.smtp_password = kwargs.get( 'smtp_password', None )
+        self.smtp_ssl = kwargs.get( 'smtp_ssl', None )
+        self.start_job_runners = kwargs.get( 'start_job_runners', None )
+        self.email_from = kwargs.get( 'email_from', None )
+        self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
+        self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
+        self.brand = kwargs.get( 'brand', None )
+        self.pretty_datetime_format = expand_pretty_datetime_format( kwargs.get( 'pretty_datetime_format', '$locale (UTC)' ) )
+        # Configuration for the message box directly below the masthead.
+        self.message_box_visible = string_as_bool( kwargs.get( 'message_box_visible', False ) )
+        self.message_box_content = kwargs.get( 'message_box_content', None )
+        self.message_box_class = kwargs.get( 'message_box_class', 'info' )
+        self.support_url = kwargs.get( 'support_url', 'https://wiki.galaxyproject.org/Support' )
+        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
+        self.blog_url = kwargs.get( 'blog_url', None )
+        self.biostar_url = kwargs.get( 'biostar_url', None )
+        self.screencasts_url = kwargs.get( 'screencasts_url', None )
+        self.log_events = False
+        self.cloud_controller_instance = False
+        self.server_name = ''
+        self.job_manager = ''
+        self.default_job_handlers = []
+        self.default_cluster_job_runner = 'local:///'
+        self.job_handlers = []
+        self.tool_handlers = []
+        self.tool_runners = []
+        # Error logging with sentry
+        self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
+        # Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
+        self.hgweb_config_dir = resolve_path( kwargs.get( 'hgweb_config_dir', '' ), self.root )
+        # Proxy features
+        self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
+        self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
+        self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None )
+        # Parse global_conf and save the parser
+        global_conf = kwargs.get( 'global_conf', None )
+        global_conf_parser = ConfigParser.ConfigParser()
+        self.global_conf_parser = global_conf_parser
+        if global_conf and "__file__" in global_conf:
+            global_conf_parser.read(global_conf['__file__'])
+        self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
+        self.citation_cache_type = kwargs.get( "citation_cache_type", "file" )
+        self.citation_cache_data_dir = resolve_path( kwargs.get( "citation_cache_data_dir", "database/tool_shed_citations/data" ), self.root )
+        self.citation_cache_lock_dir = resolve_path( kwargs.get( "citation_cache_lock_dir", "database/tool_shed_citations/locks" ), self.root )
+        self.password_expiration_period = timedelta(days=int(kwargs.get("password_expiration_period", 0)))
+
+    @property
+    def shed_tool_data_path( self ):
+        return self.tool_data_path
+
+    @property
+    def sentry_dsn_public( self ):
+        """
+        Sentry URL with private key removed for use in client side scripts,
+        sentry server will need to be configured to accept events
+        """
+        # TODO refactor this to a common place between toolshed/galaxy config, along
+        # with other duplicated methods.
+        if self.sentry_dsn:
+            return re.sub( r"^([^:/?#]+:)?//(\w+):(\w+)", r"\1//\2", self.sentry_dsn )
+        else:
+            return None
+
+    def __parse_config_file_options( self, kwargs ):
+        defaults = dict(
+            auth_config_file=[ 'config/auth_conf.xml', 'config/auth_conf.xml.sample' ],
+            datatypes_config_file=[ 'config/datatypes_conf.xml', 'datatypes_conf.xml', 'config/datatypes_conf.xml.sample' ],
+            shed_tool_data_table_config=[ 'shed_tool_data_table_conf.xml', 'config/shed_tool_data_table_conf.xml' ],
+        )
+
+        for var, defaults in defaults.items():
+            if kwargs.get( var, None ) is not None:
+                path = kwargs.get( var )
+            else:
+                for default in defaults:
+                    if os.path.exists( resolve_path( default, self.root ) ):
+                        path = default
+                        break
+                else:
+                    path = defaults[-1]
+            setattr( self, var, resolve_path( path, self.root ) )
+
+        # Backwards compatibility for names used in too many places to fix
+        self.datatypes_config = self.datatypes_config_file
+
+    def get( self, key, default ):
+        return self.config_dict.get( key, default )
+
+    def get_bool( self, key, default ):
+        if key in self.config_dict:
+            return string_as_bool( self.config_dict[key] )
+        else:
+            return default
+
+    def check( self ):
+        # Check that required directories exist.
+        paths_to_check = [ self.root, self.file_path, self.hgweb_config_dir, self.tool_data_path, self.template_path ]
+        for path in paths_to_check:
+            if path not in [ None, False ] and not os.path.isdir( path ):
+                try:
+                    os.makedirs( path )
+                except Exception as e:
+                    raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+        # Create the directories that it makes sense to create.
+        for path in self.file_path, \
+            self.template_cache, \
+                os.path.join( self.tool_data_path, 'shared', 'jars' ):
+            if path not in [ None, False ] and not os.path.isdir( path ):
+                try:
+                    os.makedirs( path )
+                except Exception as e:
+                    raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+        # Check that required files exist.
+        if not os.path.isfile( self.datatypes_config ):
+            raise ConfigurationError( "File not found: %s" % self.datatypes_config )
+
+    def is_admin_user( self, user ):
+        """
+        Determine if the provided user is listed in `admin_users`.
+        """
+        admin_users = self.get( "admin_users", "" ).split( "," )
+        return user is not None and user.email in admin_users
+
+
+def get_database_engine_options( kwargs ):
+    """
+    Allow options for the SQLAlchemy database engine to be passed by using
+    the prefix "database_engine_option".
+    """
+    conversions = {
+        'convert_unicode': string_as_bool,
+        'pool_timeout': int,
+        'echo': string_as_bool,
+        'echo_pool': string_as_bool,
+        'pool_recycle': int,
+        'pool_size': int,
+        'max_overflow': int,
+        'pool_threadlocal': string_as_bool,
+        'server_side_cursors': string_as_bool
+    }
+    prefix = "database_engine_option_"
+    prefix_len = len( prefix )
+    rval = {}
+    for key, value in kwargs.iteritems():
+        if key.startswith( prefix ):
+            key = key[prefix_len:]
+            if key in conversions:
+                value = conversions[key](value)
+            rval[ key  ] = value
+    return rval
diff --git a/lib/galaxy/webapps/tool_shed/controllers/__init__.py b/lib/galaxy/webapps/tool_shed/controllers/__init__.py
new file mode 100644
index 0000000..551fc75
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/__init__.py
@@ -0,0 +1 @@
+"""Galaxy tool shed controllers."""
diff --git a/lib/galaxy/webapps/tool_shed/controllers/admin.py b/lib/galaxy/webapps/tool_shed/controllers/admin.py
new file mode 100644
index 0000000..06d1c75
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -0,0 +1,494 @@
+import logging
+
+from galaxy import util
+from galaxy.util import inflector
+from galaxy import web
+from tool_shed.util.web_util import escape
+
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.base.controllers.admin import Admin
+
+import tool_shed.grids.admin_grids as admin_grids
+from tool_shed.metadata import repository_metadata_manager
+
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class AdminController( BaseUIController, Admin ):
+
+    user_list_grid = admin_grids.UserGrid()
+    role_list_grid = admin_grids.RoleGrid()
+    group_list_grid = admin_grids.GroupGrid()
+    manage_category_grid = admin_grids.ManageCategoryGrid()
+    repository_grid = admin_grids.AdminRepositoryGrid()
+    repository_metadata_grid = admin_grids.RepositoryMetadataGrid()
+
+    @web.expose
+    @web.require_admin
+    def browse_repositories( self, trans, **kwd ):
+        # We add parameters to the keyword dict in this method in order to rename the param
+        # with an "f-" prefix, simulating filtering by clicking a search link.  We have
+        # to take this approach because the "-" character is illegal in HTTP requests.
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repositories',
+                                                                  **kwd ) )
+            elif operation == "edit_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='edit_repository',
+                                                                  **kwd ) )
+            elif operation == "repositories_by_user":
+                # Eliminate the current filters if any exist.
+                for k, v in kwd.items():
+                    if k.startswith( 'f-' ):
+                        del kwd[ k ]
+                if 'user_id' in kwd:
+                    user = suc.get_user( trans.app, kwd[ 'user_id' ] )
+                    kwd[ 'f-email' ] = user.email
+                    del kwd[ 'user_id' ]
+                else:
+                    # The received id is the repository id, so we need to get the id of the user
+                    # that uploaded the repository.
+                    repository_id = kwd.get( 'id', None )
+                    repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                    kwd[ 'f-email' ] = repository.user.email
+            elif operation == "repositories_by_category":
+                # Eliminate the current filters if any exist.
+                for k, v in kwd.items():
+                    if k.startswith( 'f-' ):
+                        del kwd[ k ]
+                category_id = kwd.get( 'id', None )
+                category = suc.get_category( trans.app, category_id )
+                kwd[ 'f-Category.name' ] = category.name
+            elif operation == "receive email alerts":
+                if kwd[ 'id' ]:
+                    kwd[ 'caller' ] = 'browse_repositories'
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='set_email_alerts',
+                                                                      **kwd ) )
+                else:
+                    del kwd[ 'operation' ]
+            elif operation == 'delete':
+                return self.delete_repository( trans, **kwd )
+            elif operation == "undelete":
+                return self.undelete_repository( trans, **kwd )
+        # The changeset_revision_select_field in the RepositoryGrid performs a refresh_on_change
+        # which sends in request parameters like changeset_revison_1, changeset_revision_2, etc.  One
+        # of the many select fields on the grid performed the refresh_on_change, so we loop through
+        # all of the received values to see which value is not the repository tip.  If we find it, we
+        # know the refresh_on_change occurred, and we have the necessary repository id and change set
+        # revision to pass on.
+        for k, v in kwd.items():
+            changeset_revision_str = 'changeset_revision_'
+            if k.startswith( changeset_revision_str ):
+                repository_id = trans.security.encode_id( int( k.lstrip( changeset_revision_str ) ) )
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                if repository.tip( trans.app ) != v:
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='browse_repositories',
+                                                                      operation='view_or_manage_repository',
+                                                                      id=trans.security.encode_id( repository.id ),
+                                                                      changeset_revision=v ) )
+        # Render the list view
+        return self.repository_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def browse_repository_metadata( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "delete":
+                return self.delete_repository_metadata( trans, **kwd )
+            if operation == "view_or_manage_repository_revision":
+                # The received id is a RepositoryMetadata object id, so we need to get the
+                # associated Repository and redirect to view_or_manage_repository with the
+                # changeset_revision.
+                repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, kwd[ 'id' ] )
+                repository = repository_metadata.repository
+                kwd[ 'id' ] = trans.security.encode_id( repository.id )
+                kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
+                kwd[ 'operation' ] = 'view_or_manage_repository'
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repositories',
+                                                                  **kwd ) )
+        return self.repository_metadata_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def create_category( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        name = kwd.get( 'name', '' ).strip()
+        description = kwd.get( 'description', '' ).strip()
+        if kwd.get( 'create_category_button', False ):
+            if not name or not description:
+                message = 'Enter a valid name and a description'
+                status = 'error'
+            elif suc.get_category_by_name( trans.app, name ):
+                message = 'A category with that name already exists'
+                status = 'error'
+            else:
+                # Create the category
+                category = trans.app.model.Category( name=name, description=description )
+                trans.sa_session.add( category )
+                trans.sa_session.flush()
+                # Update the Tool Shed's repository registry.
+                trans.app.repository_registry.add_category_entry( category )
+                message = "Category '%s' has been created" % escape( category.name )
+                status = 'done'
+                trans.response.send_redirect( web.url_for( controller='admin',
+                                                           action='manage_categories',
+                                                           message=message,
+                                                           status=status ) )
+        return trans.fill_template( '/webapps/tool_shed/category/create_category.mako',
+                                    name=name,
+                                    description=description,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def delete_repository( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        id = kwd.get( 'id', None )
+        if id:
+            # Deleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
+            ids = util.listify( id )
+            count = 0
+            deleted_repositories = ""
+            for repository_id in ids:
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                if repository:
+                    if not repository.deleted:
+                        # Mark all installable repository_metadata records as not installable.
+                        for repository_metadata in repository.downloadable_revisions:
+                            repository_metadata.downloadable = False
+                            trans.sa_session.add( repository_metadata )
+                        # Mark the repository admin role as deleted.
+                        repository_admin_role = repository.admin_role
+                        if repository_admin_role is not None:
+                            repository_admin_role.deleted = True
+                            trans.sa_session.add( repository_admin_role )
+                        repository.deleted = True
+                        trans.sa_session.add( repository )
+                        trans.sa_session.flush()
+                        # Update the repository registry.
+                        trans.app.repository_registry.remove_entry( repository )
+                        count += 1
+                        deleted_repositories += " %s " % repository.name
+            if count:
+                message = "Deleted %d %s: %s" % ( count, inflector.cond_plural( len( ids ), "repository" ), escape( deleted_repositories ) )
+            else:
+                message = "All selected repositories were already marked deleted."
+        else:
+            message = "No repository ids received for deleting."
+            status = 'error'
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='browse_repositories',
+                                                   message=util.sanitize_text( message ),
+                                                   status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def delete_repository_metadata( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        id = kwd.get( 'id', None )
+        if id:
+            ids = util.listify( id )
+            count = 0
+            for repository_metadata_id in ids:
+                repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+                trans.sa_session.delete( repository_metadata )
+                trans.sa_session.flush()
+                count += 1
+            if count:
+                message = "Deleted %d repository metadata %s" % ( count, inflector.cond_plural( len( ids ), "record" ) )
+        else:
+            message = "No repository metadata ids received for deleting."
+            status = 'error'
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='browse_repository_metadata',
+                                                   message=util.sanitize_text( message ),
+                                                   status=status ) )
+
+    @web.expose
+    @web.require_admin
+    def edit_category( self, trans, **kwd ):
+        '''Handle requests to edit TS category name or description'''
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No category ids received for editing"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='manage_categories',
+                                                       message=message,
+                                                       status='error' ) )
+        category = suc.get_category( trans.app, id )
+        original_category_name = str( category.name )
+        original_category_description = str( category.description )
+        if kwd.get( 'edit_category_button', False ):
+            flush_needed = False
+            new_name = kwd.get( 'name', '' ).strip()
+            new_description = kwd.get( 'description', '' ).strip()
+            if original_category_name != new_name:
+                if not new_name:
+                    message = 'Enter a valid name'
+                    status = 'error'
+                elif original_category_name != new_name and suc.get_category_by_name( trans.app, new_name ):
+                    message = 'A category with that name already exists'
+                    status = 'error'
+                else:
+                    category.name = new_name
+                    flush_needed = True
+            if original_category_description != new_description:
+                category.description = new_description
+                if not flush_needed:
+                    flush_needed = True
+            if flush_needed:
+                trans.sa_session.add( category )
+                trans.sa_session.flush()
+                if original_category_name != new_name:
+                    # Update the Tool Shed's repository registry.
+                    trans.app.repository_registry.edit_category_entry( original_category_name, new_name )
+                message = "The information has been saved for category '%s'" % escape( category.name )
+                status = 'done'
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='manage_categories',
+                                                                  message=message,
+                                                                  status=status ) )
+        return trans.fill_template( '/webapps/tool_shed/category/edit_category.mako',
+                                    category=category,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_categories( self, trans, **kwd ):
+        if 'f-free-text-search' in kwd:
+            # Trick to enable searching repository name, description from the CategoryGrid.
+            # What we've done is rendered the search box for the RepositoryGrid on the grid.mako
+            # template for the CategoryGrid.  See ~/templates/webapps/tool_shed/category/grid.mako.
+            # Since we are searching repositories and not categories, redirect to browse_repositories().
+            return trans.response.send_redirect( web.url_for( controller='admin',
+                                                              action='browse_repositories',
+                                                              **kwd ) )
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "create":
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='create_category',
+                                                                  **kwd ) )
+            elif operation == "delete":
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='mark_category_deleted',
+                                                                  **kwd ) )
+            elif operation == "undelete":
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='undelete_category',
+                                                                  **kwd ) )
+            elif operation == "purge":
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='purge_category',
+                                                                  **kwd ) )
+            elif operation == "edit":
+                return trans.response.send_redirect( web.url_for( controller='admin',
+                                                                  action='edit_category',
+                                                                  **kwd ) )
+        return self.manage_category_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_admin
+    def regenerate_statistics( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        if 'regenerate_statistics_button' in kwd:
+            trans.app.shed_counter.generate_statistics()
+            message = "Successfully regenerated statistics"
+        return trans.fill_template( '/webapps/tool_shed/admin/statistics.mako',
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def manage_role_associations( self, trans, **kwd ):
+        """Manage users, groups and repositories associated with a role."""
+        role_id = kwd.get( 'id', None )
+        role = repository_util.get_role_by_id( trans.app, role_id )
+        # We currently only have a single role associated with a repository, the repository admin role.
+        repository_role_association = role.repositories[ 0 ]
+        repository = repository_role_association.repository
+        associations_dict = repository_util.handle_role_associations( trans.app,
+                                                                      role,
+                                                                      repository,
+                                                                      **kwd )
+        in_users = associations_dict.get( 'in_users', [] )
+        out_users = associations_dict.get( 'out_users', [] )
+        in_groups = associations_dict.get( 'in_groups', [] )
+        out_groups = associations_dict.get( 'out_groups', [] )
+        message = associations_dict.get( 'message', '' )
+        status = associations_dict.get( 'status', 'done' )
+        return trans.fill_template( '/webapps/tool_shed/role/role.mako',
+                                    in_admin_controller=True,
+                                    repository=repository,
+                                    role=role,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    in_groups=in_groups,
+                                    out_groups=out_groups,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
+        rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user )
+        if 'reset_metadata_on_selected_repositories_button' in kwd:
+            message, status = rmm.reset_metadata_on_selected_repositories( **kwd )
+        else:
+            message = escape( util.restore_text( kwd.get( 'message', ''  ) ) )
+            status = kwd.get( 'status', 'done' )
+        repositories_select_field = rmm.build_repository_ids_select_field( name='repository_ids',
+                                                                           multiple=True,
+                                                                           display='checkboxes',
+                                                                           my_writable=False )
+        return trans.fill_template( '/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako',
+                                    repositories_select_field=repositories_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_admin
+    def undelete_repository( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        id = kwd.get( 'id', None )
+        if id:
+            # Undeleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id.
+            ids = util.listify( id )
+            count = 0
+            undeleted_repositories = ""
+            for repository_id in ids:
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                if repository:
+                    if repository.deleted:
+                        # Inspect all repository_metadata records to determine those that are installable, and mark
+                        # them accordingly.
+                        for repository_metadata in repository.metadata_revisions:
+                            metadata = repository_metadata.metadata
+                            if metadata:
+                                if metadata_util.is_downloadable( metadata ):
+                                    repository_metadata.downloadable = True
+                                    trans.sa_session.add( repository_metadata )
+                        # Mark the repository admin role as not deleted.
+                        repository_admin_role = repository.admin_role
+                        if repository_admin_role is not None:
+                            repository_admin_role.deleted = False
+                            trans.sa_session.add( repository_admin_role )
+                        repository.deleted = False
+                        trans.sa_session.add( repository )
+                        trans.sa_session.flush()
+                        if not repository.deprecated:
+                            # Update the repository registry.
+                            trans.app.repository_registry.add_entry( repository )
+                        count += 1
+                        undeleted_repositories += " %s" % repository.name
+            if count:
+                message = "Undeleted %d %s: %s" % ( count, inflector.cond_plural( count, "repository" ), undeleted_repositories )
+            else:
+                message = "No selected repositories were marked deleted, so they could not be undeleted."
+        else:
+            message = "No repository ids received for undeleting."
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='browse_repositories',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def mark_category_deleted( self, trans, **kwd ):
+        # TODO: We should probably eliminate the Category.deleted column since it really makes no
+        # sense to mark a category as deleted (category names and descriptions can be changed instead).
+        # If we do this, and the following 2 methods can be eliminated.
+        message = escape( kwd.get( 'message', '' ) )
+        id = kwd.get( 'id', None )
+        if id:
+            ids = util.listify( id )
+            message = "Deleted %d categories: " % len( ids )
+            for category_id in ids:
+                category = suc.get_category( trans.app, category_id )
+                category.deleted = True
+                trans.sa_session.add( category )
+                trans.sa_session.flush()
+                # Update the Tool Shed's repository registry.
+                trans.app.repository_registry.remove_category_entry( category )
+                message += " %s " % escape( category.name )
+        else:
+            message = "No category ids received for deleting."
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='manage_categories',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def purge_category( self, trans, **kwd ):
+        # This method should only be called for a Category that has previously been deleted.
+        # Purging a deleted Category deletes all of the following from the database:
+        # - RepoitoryCategoryAssociations where category_id == Category.id
+        message = escape( kwd.get( 'message', '' ) )
+        id = kwd.get( 'id', None )
+        if id:
+            ids = util.listify( id )
+            count = 0
+            purged_categories = ""
+            message = "Purged %d categories: " % len( ids )
+            for category_id in ids:
+                category = suc.get_category( trans.app, category_id )
+                if category.deleted:
+                    # Delete RepositoryCategoryAssociations
+                    for rca in category.repositories:
+                        trans.sa_session.delete( rca )
+                    trans.sa_session.flush()
+                    purged_categories += " %s " % category.name
+            message = "Purged %d categories: %s" % ( count, escape( purged_categories ) )
+        else:
+            message = "No category ids received for purging."
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='manage_categories',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
+
+    @web.expose
+    @web.require_admin
+    def undelete_category( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        id = kwd.get( 'id', None )
+        if id:
+            ids = util.listify( id )
+            count = 0
+            undeleted_categories = ""
+            for category_id in ids:
+                category = suc.get_category( trans.app, category_id )
+                if category.deleted:
+                    category.deleted = False
+                    trans.sa_session.add( category )
+                    trans.sa_session.flush()
+                    # Update the Tool Shed's repository registry.
+                    trans.app.repository_registry.add_category_entry( category )
+                    count += 1
+                    undeleted_categories += " %s" % category.name
+            message = "Undeleted %d categories: %s" % ( count, escape( undeleted_categories ) )
+        else:
+            message = "No category ids received for undeleting."
+        trans.response.send_redirect( web.url_for( controller='admin',
+                                                   action='manage_categories',
+                                                   message=util.sanitize_text( message ),
+                                                   status='done' ) )
diff --git a/lib/galaxy/webapps/tool_shed/controllers/groups.py b/lib/galaxy/webapps/tool_shed/controllers/groups.py
new file mode 100644
index 0000000..fde3885
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/groups.py
@@ -0,0 +1,19 @@
+import logging
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+
+log = logging.getLogger( __name__ )
+
+
+class Group( BaseUIController ):
+
+    @web.expose
+    def index( self, trans, **kwd ):
+        # define app configuration for generic mako template
+        app = {
+            'jscript'       : "../toolshed/scripts/toolshed.groups"
+        }
+        return trans.fill_template( '/webapps/tool_shed/group/index.mako',
+                                    config={
+                                        'title': 'Tool Shed Groups',
+                                        'app': app } )
diff --git a/lib/galaxy/webapps/tool_shed/controllers/hg.py b/lib/galaxy/webapps/tool_shed/controllers/hg.py
new file mode 100644
index 0000000..03a0633
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py
@@ -0,0 +1,36 @@
+import logging
+
+from mercurial.hgweb.hgwebdir_mod import hgwebdir
+from mercurial.hgweb.request import wsgiapplication
+
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+from tool_shed.util.repository_util import get_repository_by_name_and_owner
+
+log = logging.getLogger(__name__)
+
+
+class HgController( BaseUIController ):
+    @web.expose
+    def handle_request( self, trans, **kwd ):
+        # The os command that results in this method being called will look something like:
+        # hg clone http://test@127.0.0.1:9009/repos/test/convert_characters1
+        hgweb_config = trans.app.hgweb_config_manager.hgweb_config
+        cmd = kwd.get( 'cmd', None )
+
+        def make_web_app():
+            hgwebapp = hgwebdir( hgweb_config )
+            return hgwebapp
+        wsgi_app = wsgiapplication( make_web_app )
+        if cmd == 'getbundle':
+            path_info = kwd.get( 'path_info', None )
+            if path_info:
+                owner, name = path_info.split( '/' )
+                repository = get_repository_by_name_and_owner( trans.app, name, owner )
+                if repository:
+                    times_downloaded = repository.times_downloaded
+                    times_downloaded += 1
+                    repository.times_downloaded = times_downloaded
+                    trans.sa_session.add( repository )
+                    trans.sa_session.flush()
+        return wsgi_app
diff --git a/lib/galaxy/webapps/tool_shed/controllers/repository.py b/lib/galaxy/webapps/tool_shed/controllers/repository.py
new file mode 100644
index 0000000..a50dafe
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -0,0 +1,2968 @@
+import json
+import logging
+import os
+import string
+import tempfile
+from datetime import date
+
+from mercurial import mdiff
+from mercurial import patch
+from sqlalchemy import and_, false, null
+
+import tool_shed.grids.repository_grids as repository_grids
+import tool_shed.grids.util as grids_util
+import tool_shed.repository_types.util as rt_util
+
+from galaxy import util
+from galaxy import web
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.form_builder import CheckboxField
+from galaxy.web.framework.helpers import grids
+from galaxy.webapps.tool_shed.util import ratings_util
+from tool_shed.capsule import capsule_manager
+from tool_shed.dependencies.repository import relation_builder
+from tool_shed.galaxy_install import dependency_display
+from tool_shed.metadata import repository_metadata_manager
+from tool_shed.tools import tool_validator
+from tool_shed.tools import tool_version_manager
+from tool_shed.util import basic_util
+from tool_shed.util import common_util
+from tool_shed.util import encoding_util
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import readme_util
+from tool_shed.util import repository_util
+from tool_shed.util import search_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import tool_util
+from tool_shed.util import workflow_util
+from tool_shed.util.web_util import escape
+from tool_shed.utility_containers import ToolShedUtilityContainerManager
+
+log = logging.getLogger( __name__ )
+
+malicious_error = "  This changeset cannot be downloaded because it potentially produces malicious behavior or contains inappropriate content."
+malicious_error_can_push = "  Correct this changeset as soon as possible, it potentially produces malicious behavior or contains inappropriate content."
+
+
+class RepositoryController( BaseUIController, ratings_util.ItemRatings ):
+
+    category_grid = repository_grids.CategoryGrid()
+    datatypes_grid = repository_grids.DatatypesGrid()
+    deprecated_repositories_i_own_grid = repository_grids.DeprecatedRepositoriesIOwnGrid()
+    email_alerts_repository_grid = repository_grids.EmailAlertsRepositoryGrid()
+    docker_image_grid = repository_grids.DockerImageGrid()
+    install_matched_repository_grid = repository_grids.InstallMatchedRepositoryGrid()
+    matched_repository_grid = repository_grids.MatchedRepositoryGrid()
+    my_writable_repositories_grid = repository_grids.MyWritableRepositoriesGrid()
+    my_writable_repositories_missing_tool_test_components_grid = repository_grids.MyWritableRepositoriesMissingToolTestComponentsGrid()
+    repositories_by_user_grid = repository_grids.RepositoriesByUserGrid()
+    repositories_i_own_grid = repository_grids.RepositoriesIOwnGrid()
+    repositories_i_can_administer_grid = repository_grids.RepositoriesICanAdministerGrid()
+    repositories_in_category_grid = repository_grids.RepositoriesInCategoryGrid()
+    repositories_missing_tool_test_components_grid = repository_grids.RepositoriesMissingToolTestComponentsGrid()
+    repositories_with_invalid_tools_grid = repository_grids.RepositoriesWithInvalidToolsGrid()
+    repository_dependencies_grid = repository_grids.RepositoryDependenciesGrid()
+    repository_grid = repository_grids.RepositoryGrid()
+    # The repository_metadata_grid is not currently displayed, but is sub-classed by several grids.
+    repository_metadata_grid = repository_grids.RepositoryMetadataGrid()
+    tool_dependencies_grid = repository_grids.ToolDependenciesGrid()
+    tools_grid = repository_grids.ToolsGrid()
+    valid_category_grid = repository_grids.ValidCategoryGrid()
+    valid_repository_grid = repository_grids.ValidRepositoryGrid()
+
+    def _redirect_if_necessary( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+            elif operation == "repositories_by_user":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repositories_by_user',
+                                                                  **kwd ) )
+            elif operation in [ 'mark as deprecated', 'mark as not deprecated' ]:
+                kwd[ 'mark_deprecated' ] = operation == 'mark as deprecated'
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='deprecate',
+                                                                  **kwd ) )
+
+    @web.expose
+    def browse_categories( self, trans, **kwd ):
+        # The request came from the tool shed.
+        if 'f-free-text-search' in kwd:
+            # Trick to enable searching repository name, description from the CategoryGrid.
+            # What we've done is rendered the search box for the RepositoryGrid on the grid.mako
+            # template for the CategoryGrid.  See ~/templates/webapps/tool_shed/category/grid.mako.
+            # Since we are searching repositories and not categories, redirect to browse_repositories().
+            if 'id' in kwd and 'f-free-text-search' in kwd and kwd[ 'id' ] == kwd[ 'f-free-text-search' ]:
+                # The value of 'id' has been set to the search string, which is a repository name.
+                # We'll try to get the desired encoded repository id to pass on.
+                try:
+                    repository_name = kwd[ 'id' ]
+                    repository = repository_util.get_repository_by_name( trans.app, repository_name )
+                    kwd[ 'id' ] = trans.security.encode_id( repository.id )
+                except:
+                    pass
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              **kwd ) )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation in [ "repositories_by_category", "repositories_by_user" ]:
+                # Eliminate the current filters if any exist.
+                for k, v in kwd.items():
+                    if k.startswith( 'f-' ):
+                        del kwd[ k ]
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repositories',
+                                                                  **kwd ) )
+        title = trans.app.repository_grid_filter_manager.get_grid_title( trans,
+                                                                         trailing_string='by Category',
+                                                                         default='Repositories' )
+        self.category_grid.title = title
+        return self.category_grid( trans, **kwd )
+
+    @web.expose
+    def browse_datatypes( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            # The received id is a RepositoryMetadata id.
+            repository_metadata_id = kwd[ 'id' ]
+            repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+            repository_id = trans.security.encode_id( repository_metadata.repository_id )
+            changeset_revision = repository_metadata.changeset_revision
+            new_kwd = dict( id=repository_id,
+                            changeset_revision=changeset_revision )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **new_kwd ) )
+        return self.datatypes_grid( trans, **kwd )
+
+    @web.expose
+    def browse_deprecated_repositories_i_own( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              operation='view_or_manage_repository',
+                                                              id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        return self.deprecated_repositories_i_own_grid( trans, **kwd )
+
+    @web.expose
+    def browse_my_writable_repositories( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              operation='view_or_manage_repository',
+                                                              id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        return self.my_writable_repositories_grid( trans, **kwd )
+
+    @web.expose
+    def browse_my_writable_repositories_missing_tool_test_components( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        if 'message' not in kwd:
+            message = 'This list contains repositories that match the following criteria:<br>'
+            message += '<ul>'
+            message += '<li>you are authorized to update them</li>'
+            message += '<li>the latest installable revision contains at least 1 tool with no defined tests <b>OR</b>:</li>'
+            message += '<li>the latest installable revision contains at least 1 tool with a test that requires a missing test data file</li>'
+            message += '</ul>'
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = 'warning'
+        return self.my_writable_repositories_missing_tool_test_components_grid( trans, **kwd )
+
+    @web.expose
+    def browse_my_writable_repositories_with_invalid_tools( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        if 'message' not in kwd:
+            message = 'This list contains repositories that match the following criteria:<br>'
+            message += '<ul>'
+            message += '<li>you are authorized to update them</li>'
+            message += '<li>the latest metadata revision contains at least 1 invalid tool</li>'
+            message += '</ul>'
+            message += 'Click the tool config file name to see why the tool is invalid.'
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = 'warning'
+        return self.my_writable_repositories_with_invalid_tools_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories( self, trans, **kwd ):
+        # We add params to the keyword dict in this method in order to rename the param with an "f-" prefix,
+        # simulating filtering by clicking a search link.  We have to take this approach because the "-"
+        # character is illegal in HTTP requests.
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+            elif operation == "edit_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='edit_repository',
+                                                                  **kwd ) )
+            elif operation == "repositories_by_user":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repositories_by_user',
+                                                                  **kwd ) )
+            elif operation == "reviewed_repositories_i_own":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='reviewed_repositories_i_own' ) )
+            elif operation == "repositories_by_category":
+                category_id = kwd.get( 'id', None )
+                message = escape( kwd.get( 'message', '' ) )
+                status = kwd.get( 'status', 'done' )
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repositories_in_category',
+                                                                  id=category_id,
+                                                                  message=message,
+                                                                  status=status ) )
+            elif operation == "receive email alerts":
+                if trans.user:
+                    if kwd[ 'id' ]:
+                        kwd[ 'caller' ] = 'browse_repositories'
+                        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                          action='set_email_alerts',
+                                                                          **kwd ) )
+                else:
+                    kwd[ 'message' ] = 'You must be logged in to set email alerts.'
+                    kwd[ 'status' ] = 'error'
+                    del kwd[ 'operation' ]
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              operation='view_or_manage_repository',
+                                                              id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        title = trans.app.repository_grid_filter_manager.get_grid_title( trans,
+                                                                         trailing_string='',
+                                                                         default='Repositories' )
+        self.repository_grid.title = title
+        return self.repository_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories_by_user( self, trans, **kwd ):
+        """Display the list of repositories owned by a specified user."""
+        # Eliminate the current search filters if any exist.
+        for k, v in kwd.items():
+            if k.startswith( 'f-' ):
+                del kwd[ k ]
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        user_id = kwd.get( 'user_id', None )
+        if user_id is None:
+            # The received id is the repository id, so we need to get the id of the user that owns the repository.
+            repository_id = kwd.get( 'id', None )
+            if repository_id:
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                user_id = trans.security.encode_id( repository.user.id )
+                kwd[ 'user_id' ] = user_id
+            else:
+                # The user selected a repository revision which results in a refresh_on_change.
+                selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+                if repository:
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='view_or_manage_repository',
+                                                                      id=trans.security.encode_id( repository.id ),
+                                                                      changeset_revision=selected_changeset_revision ) )
+        if user_id:
+            user = suc.get_user( trans.app, user_id )
+            trailing_string = ''
+            default = 'Repositories Owned by %s' % str( user.username )
+        else:
+            trailing_string = ''
+            default = 'Repositories'
+        title = trans.app.repository_grid_filter_manager.get_grid_title( trans,
+                                                                         trailing_string=trailing_string,
+                                                                         default=default )
+        self.repositories_by_user_grid.title = title
+        return self.repositories_by_user_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories_i_can_administer( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              operation='view_or_manage_repository',
+                                                              id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        return self.repositories_i_can_administer_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories_i_own( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              operation='view_or_manage_repository',
+                                                              id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        return self.repositories_i_own_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories_in_category( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+            if operation == 'repositories_by_user':
+                user_id = kwd.get( 'user_id', None )
+                if user_id is None:
+                    # The received id is the repository id, so we need to get the id of the user that owns the repository.
+                    repository_id = kwd.get( 'id', None )
+                    if repository_id:
+                        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                        user_id = trans.security.encode_id( repository.user.id )
+                        user = suc.get_user( trans.app, user_id )
+                        self.repositories_by_user_grid.title = "Repositories owned by %s" % user.username
+                        kwd[ 'user_id' ] = user_id
+                        return self.repositories_by_user_grid( trans, **kwd )
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            # The user selected a repository revision which results in a refresh_on_change.
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='view_or_manage_repository',
+                                                              id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        category_id = kwd.get( 'id', None )
+        if category_id:
+            category = suc.get_category( trans.app, category_id )
+            if category:
+                trailing_string = 'in Category %s' % str( category.name )
+            else:
+                trailing_string = 'in Category'
+        else:
+            trailing_string = 'in Category'
+        title = trans.app.repository_grid_filter_manager.get_grid_title( trans,
+                                                                         trailing_string=trailing_string,
+                                                                         default='Repositories' )
+        self.repositories_in_category_grid.title = title
+        return self.repositories_in_category_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories_missing_tool_test_components( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        if 'message' not in kwd:
+            message = 'This list contains repositories that match the following criteria:<br>'
+            message += '<ul>'
+            message += '<li>the latest installable revision contains at least 1 tool with no defined tests <b>OR</b>:</li>'
+            message += '<li>the latest installable revision contains at least 1 tool with a test that requires a missing test data file</li>'
+            message += '</ul>'
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = 'warning'
+        return self.repositories_missing_tool_test_components_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repositories_with_invalid_tools( self, trans, **kwd ):
+        _redir = self._redirect_if_necessary( trans, **kwd )
+        if _redir is not None:
+            return _redir
+
+        if 'message' not in kwd:
+            message = 'This list contains repositories that match the following criteria:<br>'
+            message += '<ul>'
+            message += '<li>the latest metadata revision contains at least 1 invalid tool</li>'
+            message += '</ul>'
+            message += 'Click the tool config file name to see why the tool is invalid.'
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = 'warning'
+        return self.repositories_with_invalid_tools_grid( trans, **kwd )
+
+    @web.expose
+    def browse_repository( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        commit_message = escape( kwd.get( 'commit_message', 'Deleted selected files' ) )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        # Update repository files for browsing.
+        hg_util.update_repository( repo )
+        changeset_revision = repository.tip( trans.app )
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              id,
+                                                                                              changeset_revision,
+                                                                                              metadata_only=True )
+        repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
+        return trans.fill_template( '/webapps/tool_shed/repository/browse_repository.mako',
+                                    repository=repository,
+                                    changeset_revision=changeset_revision,
+                                    metadata=metadata,
+                                    commit_message=commit_message,
+                                    repository_type_select_field=repository_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def browse_repository_dependencies( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            # The received id is a RepositoryMetadata id.
+            repository_metadata_id = kwd[ 'id' ]
+            repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+            repository_id = trans.security.encode_id( repository_metadata.repository_id )
+            changeset_revision = repository_metadata.changeset_revision
+            new_kwd = dict( id=repository_id,
+                            changeset_revision=changeset_revision )
+            if operation == "browse_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_repository',
+                                                                  **new_kwd ) )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **new_kwd ) )
+        return self.repository_dependencies_grid( trans, **kwd )
+
+    @web.expose
+    def browse_tools( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            # The received id is a RepositoryMetadata id.
+            repository_metadata_id = kwd['id' ]
+            repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+            repository_id = trans.security.encode_id( repository_metadata.repository_id )
+            changeset_revision = repository_metadata.changeset_revision
+            new_kwd = dict( id=repository_id,
+                            changeset_revision=changeset_revision )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **new_kwd ) )
+        return self.tools_grid( trans, **kwd )
+
+    @web.expose
+    def browse_tool_dependencies( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            # The received id is a RepositoryMetadata id.
+            repository_metadata_id = kwd[ 'id' ]
+            repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+            repository_id = trans.security.encode_id( repository_metadata.repository_id )
+            changeset_revision = repository_metadata.changeset_revision
+            new_kwd = dict( id=repository_id,
+                            changeset_revision=changeset_revision )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **new_kwd ) )
+        return self.tool_dependencies_grid( trans, **kwd )
+
+    @web.expose
+    def browse_valid_categories( self, trans, **kwd ):
+        """Filter repositories per category by those that are valid for installing into Galaxy."""
+        # The request came from Galaxy, so restrict category links to display only valid repository changeset revisions.
+        galaxy_url = common_util.handle_galaxy_url( trans, **kwd )
+        if galaxy_url:
+            kwd[ 'galaxy_url' ] = galaxy_url
+        if 'f-free-text-search' in kwd:
+            if kwd[ 'f-free-text-search' ] == 'All':
+                # The user performed a search, then clicked the "x" to eliminate the search criteria.
+                new_kwd = {}
+                return self.valid_category_grid( trans, **new_kwd )
+            # Since we are searching valid repositories and not categories, redirect to browse_valid_repositories().
+            if 'id' in kwd and 'f-free-text-search' in kwd and kwd[ 'id' ] == kwd[ 'f-free-text-search' ]:
+                # The value of 'id' has been set to the search string, which is a repository name.
+                # We'll try to get the desired encoded repository id to pass on.
+                try:
+                    name = kwd[ 'id' ]
+                    repository = repository_util.get_repository_by_name( trans.app, name )
+                    kwd[ 'id' ] = trans.security.encode_id( repository.id )
+                except:
+                    pass
+            return self.browse_valid_repositories( trans, **kwd )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation in [ "valid_repositories_by_category", "valid_repositories_by_user" ]:
+                # Eliminate the current filters if any exist.
+                for k, v in kwd.items():
+                    if k.startswith( 'f-' ):
+                        del kwd[ k ]
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='browse_valid_repositories',
+                                                                  **kwd ) )
+        title = trans.app.repository_grid_filter_manager.get_grid_title( trans,
+                                                                         trailing_string='by Category',
+                                                                         default='Categories of Valid Repositories' )
+        self.valid_category_grid.title = title
+        return self.valid_category_grid( trans, **kwd )
+
+    @web.expose
+    def browse_valid_repositories( self, trans, **kwd ):
+        """Filter repositories to those that are installable into Galaxy."""
+        galaxy_url = common_util.handle_galaxy_url( trans, **kwd )
+        if galaxy_url:
+            kwd[ 'galaxy_url' ] = galaxy_url
+        repository_id = kwd.get( 'id', None )
+        if 'f-free-text-search' in kwd:
+            if 'f-Category.name' in kwd:
+                # The user browsed to a category and then entered a search string, so get the category associated with its value.
+                category_name = kwd[ 'f-Category.name' ]
+                category = suc.get_category_by_name( trans.app, category_name )
+                # Set the id value in kwd since it is required by the ValidRepositoryGrid.build_initial_query method.
+                kwd[ 'id' ] = trans.security.encode_id( category.id )
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "preview_tools_in_changeset":
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                repository_metadata = metadata_util.get_latest_repository_metadata( trans.app, repository.id, downloadable=True )
+                latest_installable_changeset_revision = repository_metadata.changeset_revision
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='preview_tools_in_changeset',
+                                                                  repository_id=repository_id,
+                                                                  changeset_revision=latest_installable_changeset_revision ) )
+            elif operation == "valid_repositories_by_category":
+                # Eliminate the current filters if any exist.
+                for k, v in kwd.items():
+                    if k.startswith( 'f-' ):
+                        del kwd[ k ]
+                category_id = kwd.get( 'id', None )
+                category = suc.get_category( trans.app, category_id )
+                kwd[ 'f-Category.name' ] = category.name
+        selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans.app, **kwd )
+        if repository:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='preview_tools_in_changeset',
+                                                              repository_id=trans.security.encode_id( repository.id ),
+                                                              changeset_revision=selected_changeset_revision ) )
+        url_args = dict( action='browse_valid_repositories',
+                         operation='preview_tools_in_changeset',
+                         repository_id=repository_id )
+        self.valid_repository_grid.operations = [ grids.GridOperation( "Preview and install",
+                                                                       url_args=url_args,
+                                                                       allow_multiple=False,
+                                                                       async_compatible=False ) ]
+        title = trans.app.repository_grid_filter_manager.get_grid_title( trans,
+                                                                         trailing_string='',
+                                                                         default='Valid Repositories' )
+        self.valid_repository_grid.title = title
+        return self.valid_repository_grid( trans, **kwd )
+
+    @web.expose
+    def check_for_updates( self, trans, **kwd ):
+        """Handle a request from a local Galaxy instance."""
+        message = escape( kwd.get( 'message', '' ) )
+        # If the request originated with the UpdateRepositoryManager, it will not include a galaxy_url.
+        galaxy_url = common_util.handle_galaxy_url( trans, **kwd )
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        # Default to the current changeset revision.
+        update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+        latest_changeset_revision = changeset_revision
+        from_update_manager = kwd.get( 'from_update_manager', False )
+        if from_update_manager:
+            update = 'true'
+            no_update = 'false'
+        elif galaxy_url:
+            # Start building up the url to redirect back to the calling Galaxy instance.
+            params = dict( tool_shed_url=web.url_for( '/', qualified=True ),
+                           name=str( repository.name ),
+                           owner=str( repository.user.username ),
+                           changeset_revision=changeset_revision )
+            pathspec = [ 'admin_toolshed', 'update_to_changeset_revision' ]
+        else:
+            message = 'Unable to check for updates due to an invalid Galaxy URL: <b>%s</b>.  ' % galaxy_url
+            message += 'You may need to enable third-party cookies in your browser.  '
+            return trans.show_error_message( message )
+        if changeset_revision == repository.tip( trans.app ):
+            # If changeset_revision is the repository tip, there are no additional updates.
+            if from_update_manager:
+                return no_update
+            # Return the same value for changeset_revision and latest_changeset_revision.
+        else:
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                               trans.security.encode_id( repository.id ),
+                                                                                               changeset_revision )
+            if repository_metadata:
+                # If changeset_revision is in the repository_metadata table for this repository, there are no
+                # additional updates.
+                if from_update_manager:
+                    return no_update
+                # Return the same value for changeset_revision and latest_changeset_revision.
+            else:
+                # The changeset_revision column in the repository_metadata table has been updated with a new
+                # changeset_revision value since the repository was installed.  We need to find the changeset_revision
+                # to which we need to update.
+                update_to_changeset_hash = None
+                for changeset in repo.changelog:
+                    changeset_hash = str( repo.changectx( changeset ) )
+                    hg_util.get_changectx_for_changeset( repo, changeset_hash )
+                    if update_to_changeset_hash:
+                        if changeset_hash == repository.tip( trans.app ):
+                            update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
+                            latest_changeset_revision = changeset_hash
+                            break
+                        else:
+                            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                                               trans.security.encode_id( repository.id ),
+                                                                                                               changeset_hash )
+                            if repository_metadata:
+                                # We found a RepositoryMetadata record.
+                                update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
+                                latest_changeset_revision = changeset_hash
+                                break
+                            else:
+                                update_to_changeset_hash = changeset_hash
+                    else:
+                        if changeset_hash == changeset_revision:
+                            # We've found the changeset in the changelog for which we need to get the next update.
+                            update_to_changeset_hash = changeset_hash
+                if from_update_manager:
+                    if latest_changeset_revision == changeset_revision:
+                        return no_update
+                    return update
+        params['latest_changeset_revision'] = str( latest_changeset_revision )
+        params['latest_ctx_rev'] = str( update_to_ctx.rev() )
+        url = util.build_url( galaxy_url, pathspec=pathspec, params=params )
+        return trans.response.send_redirect( url )
+
+    @web.expose
+    def contact_owner( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              id,
+                                                                                              repository.tip( trans.app ),
+                                                                                              metadata_only=True )
+        if trans.user and trans.user.email:
+            return trans.fill_template( "/webapps/tool_shed/repository/contact_owner.mako",
+                                        repository=repository,
+                                        metadata=metadata,
+                                        message=message,
+                                        status=status )
+        else:
+            # Do all we can to eliminate spam.
+            return trans.show_error_message( "You must be logged in to contact the owner of a repository." )
+
+    @web.expose
+    def create_galaxy_docker_image( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_ids = util.listify( kwd.get( 'id', '' ) )
+        if 'operation' in kwd:
+            if repository_ids:
+                operation = kwd[ 'operation' ].lower()
+                if operation == "include in docker image":
+                    repository_tups = []
+                    for repository_id in repository_ids:
+                        repository = repository_util.get_repository_by_id( trans.app, repository_id )
+                        repository_tups.append( ( str( repository.name ),
+                                                  str( repository.user.username ),
+                                                  str( repository.type ) ) )
+                    return trans.fill_template( "/webapps/tool_shed/repository/docker_image_repositories.mako",
+                                                id=','.join( repository_ids ),
+                                                repository_tups=repository_tups,
+                                                message=message,
+                                                status=status )
+            else:
+                # This can only occur when there is a multi-select grid with check boxes and an operation,
+                # and the user clicked the operation button without checking any of the check boxes.
+                kwd[ 'message' ] = "No items were selected."
+                kwd[ 'status' ] = 'error'
+        elif kwd.get( 'create_docker_image_button', False ):
+            tmp_image_dir = tempfile.mkdtemp( prefix="tmp-toolshed-cdidir" )
+            docker_file_name = 'Dockerfile'
+            docker_file_path = os.path.join( tmp_image_dir, docker_file_name )
+            tool_shed_url = tool_shed_url = web.url_for( '/', qualified=True )
+            repository_string = ''
+            for repository_id in repository_ids:
+                repository = repository_util.get_repository_by_id( trans.app, repository_id )
+                template = basic_util.SELECTED_REPOSITORIES_TEMPLATE
+                repository_template = \
+                    string.Template( template ).safe_substitute( tool_shed_url=tool_shed_url,
+                                                                 repository_owner=str( repository.user.username ),
+                                                                 repository_name=str( repository.name ) )
+                repository_string = '%s\n%s' % ( repository_string, repository_template )
+            template = basic_util.DOCKER_IMAGE_TEMPLATE
+            docker_image_template = \
+                string.Template( template ).safe_substitute( selected_repositories=repository_string )
+            docker_image_string = docker_image_template
+            trans.response.set_content_type( 'application/text/plain' )
+            trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s"' % docker_file_name
+            opened_file = open( docker_file_path, "w" )
+            opened_file.write( docker_image_string )
+            opened_file.close()
+            opened_file = open( docker_file_path, "r" )
+            # Make sure the file is removed from disk after the contents have been downloaded.
+            os.unlink( docker_file_path )
+            docker_file_path, docker_file_name = os.path.split( docker_file_path )
+            basic_util.remove_dir( docker_file_path )
+            return opened_file
+        return self.docker_image_grid( trans, **kwd )
+
+    @web.expose
+    def create_repository( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        categories = suc.get_categories( trans )
+        if not categories:
+            message = 'No categories have been configured in this instance of the Galaxy Tool Shed.  '
+            message += 'An administrator needs to create some via the Administrator control panel before creating repositories.'
+            status = 'error'
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              message=message,
+                                                              status=status ) )
+        name = kwd.get( 'name', '' ).strip()
+        remote_repository_url = kwd.get( 'remote_repository_url', '' )
+        homepage_url = kwd.get( 'homepage_url', '' )
+        description = kwd.get( 'description', '' )
+        long_description = kwd.get( 'long_description', '' )
+        category_ids = util.listify( kwd.get( 'category_id', '' ) )
+        selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
+        repository_type = kwd.get( 'repository_type', rt_util.UNRESTRICTED )
+        if kwd.get( 'create_repository_button', False ):
+            error = False
+            message = repository_util.validate_repository_name( trans.app, name, trans.user )
+            if message:
+                error = True
+            if not description:
+                message = 'Enter a description.'
+                error = True
+            if error:
+                status = 'error'
+            else:
+                repository, message = repository_util.create_repository( trans.app,
+                                                                         name,
+                                                                         repository_type,
+                                                                         description,
+                                                                         long_description,
+                                                                         user_id=trans.user.id,
+                                                                         category_ids=category_ids,
+                                                                         remote_repository_url=remote_repository_url,
+                                                                         homepage_url=homepage_url )
+                trans.response.send_redirect( web.url_for( controller='repository',
+                                                           action='manage_repository',
+                                                           message=message,
+                                                           id=trans.security.encode_id( repository.id ) ) )
+        repository_type_select_field = rt_util.build_repository_type_select_field( trans )
+        return trans.fill_template( '/webapps/tool_shed/repository/create_repository.mako',
+                                    name=name,
+                                    remote_repository_url=remote_repository_url,
+                                    homepage_url=homepage_url,
+                                    description=description,
+                                    long_description=long_description,
+                                    selected_categories=selected_categories,
+                                    categories=categories,
+                                    repository_type_select_field=repository_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "deprecate repository" )
+    def deprecate( self, trans, **kwd ):
+        """Mark a repository in the tool shed as deprecated or not deprecated."""
+        # Marking a repository in the tool shed as deprecated has no effect on any downloadable changeset
+        # revisions that may be associated with the repository.  Revisions are not marked as not downlaodable
+        # because those that have installed the repository must be allowed to get updates.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        mark_deprecated = util.string_as_bool( kwd.get( 'mark_deprecated', False ) )
+        repository.deprecated = mark_deprecated
+        trans.sa_session.add( repository )
+        trans.sa_session.flush()
+        if mark_deprecated:
+            # Update the repository registry.
+            trans.app.repository_registry.remove_entry( repository )
+            message = 'The repository <b>%s</b> has been marked as deprecated.' % escape( repository.name )
+        else:
+            # Update the repository registry.
+            trans.app.repository_registry.add_entry( repository )
+            message = 'The repository <b>%s</b> has been marked as not deprecated.' % escape( repository.name )
+        trans.response.send_redirect( web.url_for( controller='repository',
+                                                   action='browse_repositories',
+                                                   operation='repositories_i_own',
+                                                   message=message,
+                                                   status=status ) )
+
+    @web.expose
+    def display_image_in_repository( self, trans, **kwd ):
+        """
+        Open an image file that is contained in repository or that is referenced by a URL for display.  The image can be defined in
+        either a README.rst file contained in the repository or the help section of a Galaxy tool config that is contained in the repository.
+        The following image definitions are all supported.  The former $PATH_TO_IMAGES is no longer required, and is now ignored.
+        .. image:: https://raw.github.com/galaxy/some_image.png
+        .. image:: $PATH_TO_IMAGES/some_image.png
+        .. image:: /static/images/some_image.gif
+        .. image:: some_image.jpg
+        .. image:: /deep/some_image.png
+        """
+        repository_id = kwd.get( 'repository_id', None )
+        relative_path_to_image_file = kwd.get( 'image_file', None )
+        if repository_id and relative_path_to_image_file:
+            repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+            if repository:
+                repo_files_dir = repository.repo_path( trans.app )
+                path_to_file = repository_util.get_absolute_path_to_file_in_repository( repo_files_dir, relative_path_to_image_file )
+                if os.path.exists( path_to_file ):
+                    file_name = os.path.basename( relative_path_to_image_file )
+                    try:
+                        extension = file_name.split( '.' )[ -1 ]
+                    except Exception:
+                        extension = None
+                    if extension:
+                        mimetype = trans.app.datatypes_registry.get_mimetype_by_extension( extension )
+                        if mimetype:
+                            trans.response.set_content_type( mimetype )
+                    return open( path_to_file, 'r' )
+        return None
+
+    @web.expose
+    def display_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        render_repository_actions_for = kwd.get( 'render_repository_actions_for', 'tool_shed' )
+        tv = tool_validator.ToolValidator( trans.app )
+        repository, tool, message = tv.load_tool_from_changeset_revision( repository_id,
+                                                                          changeset_revision,
+                                                                          tool_config )
+        if message:
+            status = 'error'
+        tool_state = tool_util.new_state( trans, tool, invalid=False )
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              repository_id,
+                                                                                              changeset_revision,
+                                                                                              metadata_only=True )
+        try:
+            return trans.fill_template( "/webapps/tool_shed/repository/tool_form.mako",
+                                        repository=repository,
+                                        render_repository_actions_for=render_repository_actions_for,
+                                        metadata=metadata,
+                                        changeset_revision=changeset_revision,
+                                        tool=tool,
+                                        tool_state=tool_state,
+                                        message=message,
+                                        status=status )
+        except Exception as e:
+            message = "Error displaying tool, probably due to a problem in the tool config.  The exception is: %s." % str( e )
+        if trans.webapp.name == 'galaxy' or render_repository_actions_for == 'galaxy':
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='preview_tools_in_changeset',
+                                                              repository_id=repository_id,
+                                                              changeset_revision=changeset_revision,
+                                                              message=message,
+                                                              status='error' ) )
+        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                          action='browse_repositories',
+                                                          operation='view_or_manage_repository',
+                                                          id=repository_id,
+                                                          changeset_revision=changeset_revision,
+                                                          message=message,
+                                                          status='error' ) )
+
+    @web.expose
+    def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
+        """Download an archive of the repository files compressed as zip, gz or bz2."""
+        # FIXME: thgis will currently only download the repository tip, no matter which installable changeset_revision is being viewed.
+        # This should be enhanced to use the export method below, which accounts for the currently viewed changeset_revision.
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        # Allow hgweb to handle the download.  This requires the tool shed
+        # server account's .hgrc file to include the following setting:
+        # [web]
+        # allow_archive = bz2, gz, zip
+        file_type_str = basic_util.get_file_type_str( changeset_revision, file_type )
+        repository.times_downloaded += 1
+        trans.sa_session.add( repository )
+        trans.sa_session.flush()
+        tool_shed_url = web.url_for( '/', qualified=True )
+        pathspec = [ 'repos', str( repository.user.username ), str( repository.name ), 'archive', file_type_str ]
+        download_url = util.build_url( tool_shed_url, pathspec=pathspec )
+        return trans.response.send_redirect( download_url )
+
+    @web.expose
+    def export( self, trans, repository_id, changeset_revision, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        export_repository_dependencies = kwd.get( 'export_repository_dependencies', '' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        if kwd.get( 'export_repository_button', False ):
+            # We'll currently support only gzip-compressed tar archives.
+            export_repository_dependencies = CheckboxField.is_checked( export_repository_dependencies )
+            tool_shed_url = web.url_for( '/', qualified=True )
+            erm = capsule_manager.ExportRepositoryManager( app=trans.app,
+                                                           user=trans.user,
+                                                           tool_shed_url=tool_shed_url,
+                                                           repository=repository,
+                                                           changeset_revision=changeset_revision,
+                                                           export_repository_dependencies=export_repository_dependencies,
+                                                           using_api=False )
+            repositories_archive, error_message = erm.export_repository()
+            repositories_archive_filename = os.path.basename( repositories_archive.name )
+            if error_message:
+                message = error_message
+                status = 'error'
+            else:
+                trans.response.set_content_type( 'application/x-gzip' )
+                trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s"' % ( repositories_archive_filename )
+                opened_archive = open( repositories_archive.name )
+                # Make sure the file is removed from disk after the contents have been downloaded.
+                os.unlink( repositories_archive.name )
+                repositories_archive_path, file_name = os.path.split( repositories_archive.name )
+                basic_util.remove_dir( repositories_archive_path )
+                return opened_archive
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+        metadata = repository_metadata.metadata
+        toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+        # Initialize the repository dependency RelationBuilder.
+        rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+        # Work-around to ensure repositories that contain packages needed only for compiling
+        # a dependent package are included in the capsule.
+        rb.set_filter_dependencies_needed_for_compiling( False )
+        # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
+        repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+        if repository_dependencies:
+            # Only display repository dependencies if they exist.
+            exclude = [ 'datatypes', 'invalid_repository_dependencies', 'invalid_tool_dependencies', 'invalid_tools',
+                        'readme_files', 'tool_dependencies', 'tools', 'workflows', 'data_manager' ]
+            tsucm = ToolShedUtilityContainerManager( trans.app )
+            containers_dict = tsucm.build_repository_containers( repository,
+                                                                 changeset_revision,
+                                                                 repository_dependencies,
+                                                                 repository_metadata,
+                                                                 exclude=exclude )
+            export_repository_dependencies_check_box = CheckboxField( 'export_repository_dependencies', checked=True )
+        else:
+            containers_dict = None
+            export_repository_dependencies_check_box = None
+        revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=True )
+        return trans.fill_template( "/webapps/tool_shed/repository/export_repository.mako",
+                                    changeset_revision=changeset_revision,
+                                    containers_dict=containers_dict,
+                                    export_repository_dependencies_check_box=export_repository_dependencies_check_box,
+                                    repository=repository,
+                                    repository_metadata=repository_metadata,
+                                    revision_label=revision_label,
+                                    metadata=metadata,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def export_via_api( self, trans, **kwd ):
+        """Return an exported gzip compressed repository archive file opened for reading."""
+        encoded_repositories_archive_name = kwd.get( 'encoded_repositories_archive_name', None )
+        if encoded_repositories_archive_name:
+            repositories_archive_name = encoding_util.tool_shed_decode( encoded_repositories_archive_name )
+            opened_archive = open( repositories_archive_name )
+            # Make sure the file is removed from disk after the contents have been downloaded.
+            os.unlink( repositories_archive_name )
+            return opened_archive
+        return ''
+
+    @web.expose
+    def find_tools( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        common_util.handle_galaxy_url( trans, **kwd )
+        if 'operation' in kwd:
+            item_id = kwd.get( 'id', '' )
+            if item_id:
+                operation = kwd[ 'operation' ].lower()
+                is_admin = trans.user_is_admin()
+                if operation == "view_or_manage_repository":
+                    # The received id is a RepositoryMetadata id, so we have to get the repository id.
+                    repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, item_id )
+                    repository_id = trans.security.encode_id( repository_metadata.repository.id )
+                    repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                    kwd[ 'id' ] = repository_id
+                    kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
+                    if trans.webapp.name == 'tool_shed' and ( is_admin or repository.user == trans.user ):
+                        a = 'manage_repository'
+                    else:
+                        a = 'view_repository'
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action=a,
+                                                                      **kwd ) )
+                if operation == "install to galaxy":
+                    # We've received a list of RepositoryMetadata ids, so we need to build a list of associated Repository ids.
+                    encoded_repository_ids = []
+                    changeset_revisions = []
+                    for repository_metadata_id in util.listify( item_id ):
+                        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+                        encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
+                        changeset_revisions.append( repository_metadata.changeset_revision )
+                    new_kwd = {}
+                    new_kwd[ 'repository_ids' ] = encoded_repository_ids
+                    new_kwd[ 'changeset_revisions' ] = changeset_revisions
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='install_repositories_by_revision',
+                                                                      **new_kwd ) )
+            else:
+                # This can only occur when there is a multi-select grid with check boxes and an operation,
+                # and the user clicked the operation button without checking any of the check boxes.
+                return trans.show_error_message( "No items were selected." )
+        tool_ids = [ item.lower() for item in util.listify( kwd.get( 'tool_id', '' ) ) ]
+        tool_names = [ item.lower() for item in util.listify( kwd.get( 'tool_name', '' ) ) ]
+        tool_versions = [ item.lower() for item in util.listify( kwd.get( 'tool_version', '' ) ) ]
+        exact_matches = kwd.get( 'exact_matches', '' )
+        exact_matches_checked = CheckboxField.is_checked( exact_matches )
+        match_tuples = []
+        ok = True
+        if tool_ids or tool_names or tool_versions:
+            ok, match_tuples = search_util.search_repository_metadata( trans.app,
+                                                                       exact_matches_checked,
+                                                                       tool_ids=tool_ids,
+                                                                       tool_names=tool_names,
+                                                                       tool_versions=tool_versions )
+            if ok:
+                kwd[ 'match_tuples' ] = match_tuples
+                # Render the list view
+                if trans.webapp.name == 'galaxy':
+                    # Our initial request originated from a Galaxy instance.
+                    global_actions = [ grids.GridAction( "Browse valid repositories",
+                                                         dict( controller='repository', action='browse_valid_categories' ) ),
+                                       grids.GridAction( "Search for valid tools",
+                                                         dict( controller='repository', action='find_tools' ) ),
+                                       grids.GridAction( "Search for workflows",
+                                                         dict( controller='repository', action='find_workflows' ) ) ]
+                    self.install_matched_repository_grid.global_actions = global_actions
+                    install_url_args = dict( controller='repository', action='find_tools' )
+                    operations = [ grids.GridOperation( "Install", url_args=install_url_args, allow_multiple=True, async_compatible=False ) ]
+                    self.install_matched_repository_grid.operations = operations
+                    return self.install_matched_repository_grid( trans, **kwd )
+                else:
+                    kwd[ 'message' ] = "tool id: <b>%s</b><br/>tool name: <b>%s</b><br/>tool version: <b>%s</b><br/>exact matches only: <b>%s</b>" % \
+                        ( basic_util.stringify( tool_ids ),
+                          escape( basic_util.stringify( tool_names ) ),
+                          escape( basic_util.stringify( tool_versions ) ),
+                          str( exact_matches_checked ) )
+                    self.matched_repository_grid.title = "Repositories with matching tools"
+                    return self.matched_repository_grid( trans, **kwd )
+            else:
+                message = "No search performed - each field must contain the same number of comma-separated items."
+                status = "error"
+        exact_matches_check_box = CheckboxField( 'exact_matches', checked=exact_matches_checked )
+        return trans.fill_template( '/webapps/tool_shed/repository/find_tools.mako',
+                                    tool_id=basic_util.stringify( tool_ids ),
+                                    tool_name=basic_util.stringify( tool_names ),
+                                    tool_version=basic_util.stringify( tool_versions ),
+                                    exact_matches_check_box=exact_matches_check_box,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def find_workflows( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        common_util.handle_galaxy_url( trans, **kwd )
+        if 'operation' in kwd:
+            item_id = kwd.get( 'id', '' )
+            if item_id:
+                operation = kwd[ 'operation' ].lower()
+                is_admin = trans.user_is_admin()
+                if operation == "view_or_manage_repository":
+                    # The received id is a RepositoryMetadata id, so we have to get the repository id.
+                    repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, item_id )
+                    repository_id = trans.security.encode_id( repository_metadata.repository.id )
+                    repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                    kwd[ 'id' ] = repository_id
+                    kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
+                    if trans.webapp.name == 'tool_shed' and ( is_admin or repository.user == trans.user ):
+                        a = 'manage_repository'
+                    else:
+                        a = 'view_repository'
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action=a,
+                                                                      **kwd ) )
+                if operation == "install to galaxy":
+                    # We've received a list of RepositoryMetadata ids, so we need to build a list of associated Repository ids.
+                    encoded_repository_ids = []
+                    changeset_revisions = []
+                    for repository_metadata_id in util.listify( item_id ):
+                        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, item_id )
+                        encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) )
+                        changeset_revisions.append( repository_metadata.changeset_revision )
+                    new_kwd = {}
+                    new_kwd[ 'repository_ids' ] = encoded_repository_ids
+                    new_kwd[ 'changeset_revisions' ] = changeset_revisions
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='install_repositories_by_revision',
+                                                                      **new_kwd ) )
+            else:
+                # This can only occur when there is a multi-select grid with check boxes and an operation,
+                # and the user clicked the operation button without checking any of the check boxes.
+                return trans.show_error_message( "No items were selected." )
+        if 'find_workflows_button' in kwd:
+            workflow_names = [ item.lower() for item in util.listify( kwd.get( 'workflow_name', '' ) ) ]
+            exact_matches = kwd.get( 'exact_matches', '' )
+            exact_matches_checked = CheckboxField.is_checked( exact_matches )
+            match_tuples = []
+            ok = True
+            if workflow_names:
+                ok, match_tuples = search_util.search_repository_metadata( trans.app,
+                                                                           exact_matches_checked,
+                                                                           workflow_names=workflow_names )
+            else:
+                ok, match_tuples = search_util.search_repository_metadata( trans.app,
+                                                                           exact_matches_checked,
+                                                                           workflow_names=[],
+                                                                           all_workflows=True )
+            if ok:
+                kwd[ 'match_tuples' ] = match_tuples
+                if trans.webapp.name == 'galaxy':
+                    # Our initial request originated from a Galaxy instance.
+                    global_actions = [ grids.GridAction( "Browse valid repositories",
+                                                         dict( controller='repository', action='browse_valid_repositories' ) ),
+                                       grids.GridAction( "Search for valid tools",
+                                                         dict( controller='repository', action='find_tools' ) ),
+                                       grids.GridAction( "Search for workflows",
+                                                         dict( controller='repository', action='find_workflows' ) ) ]
+                    self.install_matched_repository_grid.global_actions = global_actions
+                    install_url_args = dict( controller='repository', action='find_workflows' )
+                    operations = [ grids.GridOperation( "Install", url_args=install_url_args, allow_multiple=True, async_compatible=False ) ]
+                    self.install_matched_repository_grid.operations = operations
+                    return self.install_matched_repository_grid( trans, **kwd )
+                else:
+                    kwd[ 'message' ] = "workflow name: <b>%s</b><br/>exact matches only: <b>%s</b>" % \
+                        ( escape( basic_util.stringify( workflow_names ) ), str( exact_matches_checked ) )
+                    self.matched_repository_grid.title = "Repositories with matching workflows"
+                    return self.matched_repository_grid( trans, **kwd )
+            else:
+                message = "No search performed - each field must contain the same number of comma-separated items."
+                status = "error"
+        else:
+            exact_matches_checked = False
+            workflow_names = []
+        exact_matches_check_box = CheckboxField( 'exact_matches', checked=exact_matches_checked )
+        return trans.fill_template( '/webapps/tool_shed/repository/find_workflows.mako',
+                                    workflow_name=basic_util.stringify( workflow_names ),
+                                    exact_matches_check_box=exact_matches_check_box,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def generate_workflow_image( self, trans, workflow_name, repository_metadata_id=None ):
+        """Return an svg image representation of a workflow dictionary created when the workflow was exported."""
+        return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=repository_metadata_id, repository_id=None )
+
+    @web.expose
+    def get_changeset_revision_and_ctx_rev( self, trans, **kwd ):
+        """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated."""
+        def has_galaxy_utilities( repository_metadata ):
+            has_galaxy_utilities_dict = dict( includes_data_managers=False,
+                                              includes_datatypes=False,
+                                              includes_tools=False,
+                                              includes_tools_for_display_in_tool_panel=False,
+                                              has_repository_dependencies=False,
+                                              has_repository_dependencies_only_if_compiling_contained_td=False,
+                                              includes_tool_dependencies=False,
+                                              includes_workflows=False )
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'data_manager' in metadata:
+                        has_galaxy_utilities_dict[ 'includes_data_managers' ] = True
+                    if 'datatypes' in metadata:
+                        has_galaxy_utilities_dict[ 'includes_datatypes' ] = True
+                    if 'tools' in metadata:
+                        has_galaxy_utilities_dict[ 'includes_tools' ] = True
+                    if 'tool_dependencies' in metadata:
+                        has_galaxy_utilities_dict[ 'includes_tool_dependencies' ] = True
+                    repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+                    repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+                    has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+                        repository_util.get_repository_dependency_types( repository_dependencies )
+                    has_galaxy_utilities_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+                    has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = \
+                        has_repository_dependencies_only_if_compiling_contained_td
+                    if 'workflows' in metadata:
+                        has_galaxy_utilities_dict[ 'includes_workflows' ] = True
+            return has_galaxy_utilities_dict
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                 trans.security.encode_id( repository.id ),
+                                                                                 changeset_revision )
+        has_galaxy_utilities_dict = has_galaxy_utilities( repository_metadata )
+        includes_data_managers = has_galaxy_utilities_dict[ 'includes_data_managers' ]
+        includes_datatypes = has_galaxy_utilities_dict[ 'includes_datatypes' ]
+        includes_tools = has_galaxy_utilities_dict[ 'includes_tools' ]
+        includes_tools_for_display_in_tool_panel = has_galaxy_utilities_dict[ 'includes_tools_for_display_in_tool_panel' ]
+        includes_tool_dependencies = has_galaxy_utilities_dict[ 'includes_tool_dependencies' ]
+        has_repository_dependencies = has_galaxy_utilities_dict[ 'has_repository_dependencies' ]
+        has_repository_dependencies_only_if_compiling_contained_td = \
+            has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ]
+        includes_workflows = has_galaxy_utilities_dict[ 'includes_workflows' ]
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        # Default to the received changeset revision and ctx_rev.
+        update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+        ctx_rev = str( update_to_ctx.rev() )
+        latest_changeset_revision = changeset_revision
+        update_dict = dict( changeset_revision=changeset_revision,
+                            ctx_rev=ctx_rev,
+                            includes_data_managers=includes_data_managers,
+                            includes_datatypes=includes_datatypes,
+                            includes_tools=includes_tools,
+                            includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                            includes_tool_dependencies=includes_tool_dependencies,
+                            has_repository_dependencies=has_repository_dependencies,
+                            has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
+                            includes_workflows=includes_workflows )
+        if changeset_revision == repository.tip( trans.app ):
+            # If changeset_revision is the repository tip, there are no additional updates.
+            return encoding_util.tool_shed_encode( update_dict )
+        else:
+            if repository_metadata:
+                # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
+                return encoding_util.tool_shed_encode( update_dict )
+            else:
+                # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
+                # repository was installed.  We need to find the changeset_revision to which we need to update.
+                update_to_changeset_hash = None
+                for changeset in repo.changelog:
+                    includes_tools = False
+                    has_repository_dependencies = False
+                    has_repository_dependencies_only_if_compiling_contained_td = False
+                    changeset_hash = str( repo.changectx( changeset ) )
+                    hg_util.get_changectx_for_changeset( repo, changeset_hash )
+                    if update_to_changeset_hash:
+                        update_to_repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                                                     trans.security.encode_id( repository.id ),
+                                                                                                                     changeset_hash )
+                        if update_to_repository_metadata:
+                            has_galaxy_utilities_dict = has_galaxy_utilities( repository_metadata )
+                            includes_data_managers = has_galaxy_utilities_dict[ 'includes_data_managers' ]
+                            includes_datatypes = has_galaxy_utilities_dict[ 'includes_datatypes' ]
+                            includes_tools = has_galaxy_utilities_dict[ 'includes_tools' ]
+                            includes_tools_for_display_in_tool_panel = has_galaxy_utilities_dict[ 'includes_tools_for_display_in_tool_panel' ]
+                            includes_tool_dependencies = has_galaxy_utilities_dict[ 'includes_tool_dependencies' ]
+                            has_repository_dependencies = has_galaxy_utilities_dict[ 'has_repository_dependencies' ]
+                            has_repository_dependencies_only_if_compiling_contained_td = has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ]
+                            includes_workflows = has_galaxy_utilities_dict[ 'includes_workflows' ]
+                            # We found a RepositoryMetadata record.
+                            if changeset_hash == repository.tip( trans.app ):
+                                # The current ctx is the repository tip, so use it.
+                                update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
+                                latest_changeset_revision = changeset_hash
+                            else:
+                                update_to_ctx = hg_util.get_changectx_for_changeset( repo, update_to_changeset_hash )
+                                latest_changeset_revision = update_to_changeset_hash
+                            break
+                    elif not update_to_changeset_hash and changeset_hash == changeset_revision:
+                        # We've found the changeset in the changelog for which we need to get the next update.
+                        update_to_changeset_hash = changeset_hash
+                update_dict[ 'includes_data_managers' ] = includes_data_managers
+                update_dict[ 'includes_datatypes' ] = includes_datatypes
+                update_dict[ 'includes_tools' ] = includes_tools
+                update_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
+                update_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
+                update_dict[ 'includes_workflows' ] = includes_workflows
+                update_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+                update_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
+                update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
+        update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
+        return encoding_util.tool_shed_encode( update_dict )
+
+    @web.expose
+    def get_ctx_rev( self, trans, **kwd ):
+        """Given a repository and changeset_revision, return the correct ctx.rev() value."""
+        repository_name = kwd[ 'name' ]
+        repository_owner = kwd[ 'owner' ]
+        changeset_revision = kwd[ 'changeset_revision' ]
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+        if ctx:
+            return str( ctx.rev() )
+        return ''
+
+    @web.json
+    def get_file_contents( self, trans, file_path, repository_id ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        is_admin = trans.user_is_admin()
+        return suc.get_repository_file_contents( trans.app, file_path, repository_id, is_admin )
+
+    @web.json
+    def get_latest_downloadable_changeset_revision( self, trans, **kwd ):
+        """
+        Return the latest installable changeset revision for the repository associated with the received
+        name and owner.  This method is called from Galaxy when attempting to install the latest revision
+        of an installed repository.
+        """
+        repository_name = kwd.get( 'name', None )
+        repository_owner = kwd.get( 'owner', None )
+        if repository_name is not None and repository_owner is not None:
+            repository = repository_util.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+            if repository:
+                repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+                return metadata_util.get_latest_downloadable_changeset_revision( trans.app, repository, repo )
+        return hg_util.INITIAL_CHANGELOG_HASH
+
+    @web.json
+    def get_readme_files( self, trans, **kwd ):
+        """
+        This method is called when installing or re-installing a single repository into a Galaxy instance.
+        If the received changeset_revision includes one or more readme files, return them in a dictionary.
+        """
+        repository_name = kwd.get( 'name', None )
+        repository_owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        if repository_name is not None and repository_owner is not None and changeset_revision is not None:
+            repository = repository_util.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+            if repository:
+                repository_metadata = \
+                    metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                 trans.security.encode_id( repository.id ),
+                                                                                 changeset_revision )
+                if repository_metadata:
+                    metadata = repository_metadata.metadata
+                    if metadata:
+                        return readme_util.build_readme_files_dict( trans.app,
+                                                                    repository,
+                                                                    changeset_revision,
+                                                                    repository_metadata.metadata )
+        return {}
+
+    @web.json
+    def get_repository_dependencies( self, trans, **kwd ):
+        """
+        Return an encoded dictionary of all repositories upon which the contents of the received repository
+        depends.
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        # get_repository_dependencies( self, app, changeset, toolshed_url )
+        dependencies = repository.get_repository_dependencies( trans.app, changeset_revision, web.url_for( '/', qualified=True ) )
+        if dependencies:
+            return encoding_util.tool_shed_encode( dependencies )
+        return ''
+
+    @web.expose
+    def get_repository_id( self, trans, **kwd ):
+        """Given a repository name and owner, return the encoded repository id."""
+        repository_name = kwd[ 'name' ]
+        repository_owner = kwd[ 'owner' ]
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+        if repository:
+            return trans.security.encode_id( repository.id )
+        return ''
+
+    @web.json
+    def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
+        """
+        Generate a list of dictionaries, each of which contains the information about a repository that will
+        be necessary for installing it into a local Galaxy instance.
+        """
+        includes_tools = False
+        includes_tools_for_display_in_tool_panel = False
+        has_repository_dependencies = False
+        has_repository_dependencies_only_if_compiling_contained_td = False
+        includes_tool_dependencies = False
+        repo_info_dicts = []
+        for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
+            repository_id, changeset_revision = tup
+            repo_info_dict, cur_includes_tools, cur_includes_tool_dependencies, cur_includes_tools_for_display_in_tool_panel, \
+                cur_has_repository_dependencies, cur_has_repository_dependencies_only_if_compiling_contained_td = \
+                repository_util.get_repo_info_dict( trans.app, trans.user, repository_id, changeset_revision )
+            if cur_has_repository_dependencies and not has_repository_dependencies:
+                has_repository_dependencies = True
+            if cur_has_repository_dependencies_only_if_compiling_contained_td and not has_repository_dependencies_only_if_compiling_contained_td:
+                has_repository_dependencies_only_if_compiling_contained_td = True
+            if cur_includes_tools and not includes_tools:
+                includes_tools = True
+            if cur_includes_tool_dependencies and not includes_tool_dependencies:
+                includes_tool_dependencies = True
+            if cur_includes_tools_for_display_in_tool_panel and not includes_tools_for_display_in_tool_panel:
+                includes_tools_for_display_in_tool_panel = True
+            repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+        return dict( includes_tools=includes_tools,
+                     includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                     has_repository_dependencies=has_repository_dependencies,
+                     has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
+                     includes_tool_dependencies=includes_tool_dependencies,
+                     repo_info_dicts=repo_info_dicts )
+
+    @web.expose
+    def get_repository_type( self, trans, **kwd ):
+        """Given a repository name and owner, return the type."""
+        repository_name = kwd[ 'name' ]
+        repository_owner = kwd[ 'owner' ]
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+        return str( repository.type )
+
+    @web.json
+    def get_required_repo_info_dict( self, trans, encoded_str=None ):
+        """
+        Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the
+        information needed to install the list of repositories defined by the received encoded_str.
+        """
+        repo_info_dict = {}
+        if encoded_str:
+            encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
+            encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
+            decoded_required_repository_tups = []
+            for encoded_required_repository_tup in encoded_required_repository_tups:
+                decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) )
+            encoded_repository_ids = []
+            changeset_revisions = []
+            for required_repository_tup in decoded_required_repository_tups:
+                tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( required_repository_tup )
+                repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+                encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
+                changeset_revisions.append( changeset_revision )
+            if encoded_repository_ids and changeset_revisions:
+                repo_info_dict = json.loads( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
+        return repo_info_dict
+
+    @web.expose
+    def get_tool_dependencies( self, trans, **kwd ):
+        """
+        Handle a request from a Galaxy instance to get the tool_dependencies entry from the metadata
+        for a specified changeset revision.
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        dependencies = repository.get_tool_dependencies( trans.app, changeset_revision )
+        if len( dependencies ) > 0:
+            return encoding_util.tool_shed_encode( dependencies )
+        return ''
+
+    @web.expose
+    def get_tool_dependencies_config_contents( self, trans, **kwd ):
+        """
+        Handle a request from a Galaxy instance to get the tool_dependencies.xml file contents for a
+        specified changeset revision.
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        # TODO: We're currently returning the tool_dependencies.xml file that is available on disk.  We need
+        # to enhance this process to retrieve older versions of the tool-dependencies.xml file from the repository
+        # manafest.
+        repo_dir = repository.repo_path( trans.app )
+        # Get the tool_dependencies.xml file from disk.
+        tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, repo_dir )
+        # Return the encoded contents of the tool_dependencies.xml file.
+        if tool_dependencies_config:
+            tool_dependencies_config_file = open( tool_dependencies_config, 'rb' )
+            contents = tool_dependencies_config_file.read()
+            tool_dependencies_config_file.close()
+            return contents
+        return ''
+
+    @web.json
+    def get_tool_dependency_definition_metadata( self, trans, **kwd ):
+        """
+        Given a repository name and ownerof a repository whose type is
+        tool_dependency_definition, return the current metadata.
+        """
+        repository_name = kwd[ 'name' ]
+        repository_owner = kwd[ 'owner' ]
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+        encoded_id = trans.app.security.encode_id( repository.id )
+        repository_tip = repository.tip( trans.app )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                           encoded_id,
+                                                                                           repository_tip )
+        return repository_metadata.metadata
+
+    @web.expose
+    def get_tool_versions( self, trans, **kwd ):
+        """
+        For each valid /downloadable change set (up to the received changeset_revision) in the repository's
+        change log, append the changeset tool_versions dictionary to the list that will be returned.
+        """
+        name = kwd[ 'name' ]
+        owner = kwd[ 'owner' ]
+        changeset_revision = kwd[ 'changeset_revision' ]
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        tool_version_dicts = []
+        for changeset in repo.changelog:
+            current_changeset_revision = str( repo.changectx( changeset ) )
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                               trans.security.encode_id( repository.id ),
+                                                                                               current_changeset_revision )
+            if repository_metadata and repository_metadata.tool_versions:
+                tool_version_dicts.append( repository_metadata.tool_versions )
+                if current_changeset_revision == changeset_revision:
+                    break
+        if tool_version_dicts:
+            return json.dumps( tool_version_dicts )
+        return ''
+
+    @web.json
+    def get_updated_repository_information( self, trans, name, owner, changeset_revision, **kwd ):
+        """
+        Generate a dictionary that contains the information about a repository that is necessary for installing
+        it into a local Galaxy instance.
+        """
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        repository_id = trans.security.encode_id( repository.id )
+        repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans.user, repository )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+        if not repository_metadata:
+            # The received changeset_revision is no longer associated with metadata, so get the next changeset_revision in the repository
+            # changelog that is associated with metadata.
+            changeset_revision = metadata_util.get_next_downloadable_changeset_revision( repository,
+                                                                                         repo,
+                                                                                         after_changeset_revision=changeset_revision )
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+        ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+        repo_info_dict = repository_util.create_repo_info_dict( app=trans.app,
+                                                                repository_clone_url=repository_clone_url,
+                                                                changeset_revision=changeset_revision,
+                                                                ctx_rev=str( ctx.rev() ),
+                                                                repository_owner=repository.user.username,
+                                                                repository_name=repository.name,
+                                                                repository=repository,
+                                                                repository_metadata=repository_metadata,
+                                                                tool_dependencies=None,
+                                                                repository_dependencies=None )
+        includes_data_managers = False
+        includes_datatypes = False
+        includes_tools = False
+        includes_tools_for_display_in_tool_panel = False
+        includes_workflows = False
+        readme_files_dict = None
+        metadata = repository_metadata.metadata
+        if metadata:
+            if 'data_manager' in metadata:
+                includes_data_managers = True
+            if 'datatypes' in metadata:
+                includes_datatypes = True
+            if 'tools' in metadata:
+                includes_tools = True
+                # Handle includes_tools_for_display_in_tool_panel.
+                tool_dicts = metadata[ 'tools' ]
+                for tool_dict in tool_dicts:
+                    if tool_dict.get( 'includes_tools_for_display_in_tool_panel', False ):
+                        includes_tools_for_display_in_tool_panel = True
+                        break
+            if 'workflows' in metadata:
+                includes_workflows = True
+            readme_files_dict = readme_util.build_readme_files_dict( trans.app, repository, changeset_revision, metadata )
+        # See if the repo_info_dict was populated with repository_dependencies or tool_dependencies.
+        has_repository_dependencies = False
+        has_repository_dependencies_only_if_compiling_contained_td = False
+        includes_tool_dependencies = False
+        for name, repo_info_tuple in repo_info_dict.items():
+            if not has_repository_dependencies or not has_repository_dependencies_only_if_compiling_contained_td or not includes_tool_dependencies:
+                description, reposectory_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+                    repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+                for rd_key, rd_tups in repository_dependencies.items():
+                    if rd_key in [ 'root_key', 'description' ]:
+                        continue
+                    curr_has_repository_dependencies, curr_has_repository_dependencies_only_if_compiling_contained_td = \
+                        repository_util.get_repository_dependency_types( rd_tups )
+                    if curr_has_repository_dependencies and not has_repository_dependencies:
+                        has_repository_dependencies = True
+                    if curr_has_repository_dependencies_only_if_compiling_contained_td and not has_repository_dependencies_only_if_compiling_contained_td:
+                        has_repository_dependencies_only_if_compiling_contained_td = True
+                if tool_dependencies and not includes_tool_dependencies:
+                    includes_tool_dependencies = True
+        return dict( includes_data_managers=includes_data_managers,
+                     includes_datatypes=includes_datatypes,
+                     includes_tools=includes_tools,
+                     includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                     has_repository_dependencies=has_repository_dependencies,
+                     has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
+                     includes_tool_dependencies=includes_tool_dependencies,
+                     includes_workflows=includes_workflows,
+                     readme_files_dict=readme_files_dict,
+                     repo_info_dict=repo_info_dict )
+
+    @web.expose
+    def help( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        return trans.fill_template( '/webapps/tool_shed/repository/help.mako', message=message, status=status, **kwd )
+
+    @web.expose
+    def import_capsule( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        capsule_file_name = kwd.get( 'capsule_file_name', None )
+        encoded_file_path = kwd.get( 'encoded_file_path', None )
+        file_path = encoding_util.tool_shed_decode( encoded_file_path )
+        export_info_file_path = os.path.join( file_path, 'export_info.xml' )
+        irm = capsule_manager.ImportRepositoryManager( trans.app,
+                                                       trans.request.host,
+                                                       trans.user,
+                                                       trans.user_is_admin() )
+        export_info_dict = irm.get_export_info_dict( export_info_file_path )
+        manifest_file_path = os.path.join( file_path, 'manifest.xml' )
+        # The manifest.xml file has already been validated, so no error_message should be returned here.
+        repository_info_dicts, error_message = irm.get_repository_info_from_manifest( manifest_file_path )
+        # Determine the status for each exported repository archive contained within the capsule.
+        repository_status_info_dicts = irm.get_repository_status_from_tool_shed( repository_info_dicts )
+        if 'import_capsule_button' in kwd:
+            # Generate a list of repository name / import results message tuples for display after the capsule is imported.
+            import_results_tups = []
+            # Only create repositories that do not yet exist and that the current user is authorized to create.  The
+            # status will be None for repositories that fall into the intersection of these 2 categories.
+            for repository_status_info_dict in repository_status_info_dicts:
+                # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
+                repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
+                repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
+                import_results_tups = irm.create_repository_and_import_archive( repository_status_info_dict,
+                                                                                import_results_tups )
+            irm.check_status_and_reset_downloadable( import_results_tups )
+            basic_util.remove_dir( file_path )
+            return trans.fill_template( '/webapps/tool_shed/repository/import_capsule_results.mako',
+                                        export_info_dict=export_info_dict,
+                                        import_results_tups=import_results_tups,
+                                        message=message,
+                                        status=status )
+        return trans.fill_template( '/webapps/tool_shed/repository/import_capsule.mako',
+                                    encoded_file_path=encoded_file_path,
+                                    export_info_dict=export_info_dict,
+                                    repository_status_info_dicts=repository_status_info_dicts,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def index( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        # See if there are any RepositoryMetadata records since menu items require them.
+        repository_metadata = trans.sa_session.query( trans.model.RepositoryMetadata ).first()
+        current_user = trans.user
+        # TODO: move the following to some in-memory register so these queries can be done once
+        # at startup.  The in-memory register can then be managed during the current session.
+        can_administer_repositories = False
+        has_reviewed_repositories = False
+        has_deprecated_repositories = False
+        if current_user:
+            # See if the current user owns any repositories that have been reviewed.
+            for repository in current_user.active_repositories:
+                if repository.reviews:
+                    has_reviewed_repositories = True
+                    break
+            # See if the current user has any repositories that have been marked as deprecated.
+            for repository in current_user.active_repositories:
+                if repository.deprecated:
+                    has_deprecated_repositories = True
+                    break
+            # See if the current user can administer any repositories, but only if not an admin user.
+            if not trans.user_is_admin():
+                if current_user.active_repositories:
+                    can_administer_repositories = True
+                else:
+                    for repository in trans.sa_session.query( trans.model.Repository ) \
+                                                      .filter( trans.model.Repository.table.c.deleted == false() ):
+                        if trans.app.security_agent.user_can_administer_repository( current_user, repository ):
+                            can_administer_repositories = True
+                            break
+        # Route in may have been from a sharable URL, in whcih case we'll have a user_id and possibly a name
+        # The received user_id will be the id of the repository owner.
+        user_id = kwd.get( 'user_id', None )
+        repository_id = kwd.get( 'repository_id', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        return trans.fill_template( '/webapps/tool_shed/index.mako',
+                                    repository_metadata=repository_metadata,
+                                    can_administer_repositories=can_administer_repositories,
+                                    has_reviewed_repositories=has_reviewed_repositories,
+                                    has_deprecated_repositories=has_deprecated_repositories,
+                                    user_id=user_id,
+                                    repository_id=repository_id,
+                                    changeset_revision=changeset_revision,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def install_repositories_by_revision( self, trans, **kwd ):
+        """
+        Send the list of repository_ids and changeset_revisions to Galaxy so it can begin the installation
+        process.  If the value of repository_ids is not received, then the name and owner of a single repository
+        must be received to install a single repository.
+        """
+        repository_ids = kwd.get( 'repository_ids', None )
+        changeset_revisions = kwd.get( 'changeset_revisions', None )
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        if not repository_ids:
+            repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+            repository_ids = trans.security.encode_id( repository.id )
+        galaxy_url = common_util.handle_galaxy_url( trans, **kwd )
+        if galaxy_url:
+            # Redirect back to local Galaxy to perform install.
+            params = dict( tool_shed_url=web.url_for( '/', qualified=True ),
+                           repository_ids=','.join( util.listify( repository_ids ) ),
+                           changeset_revisions=','.join( util.listify( changeset_revisions ) ) )
+            pathspec = [ 'admin_toolshed', 'prepare_for_install' ]
+            url = util.build_url( galaxy_url, pathspec=pathspec, params=params )
+            return trans.response.send_redirect( url )
+        else:
+            message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>.  ' % galaxy_url
+            message += 'You may need to enable third-party cookies in your browser.  '
+            status = 'error'
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_valid_categories',
+                                                              message=message,
+                                                              status=status ) )
+
+    @web.expose
+    def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        render_repository_actions_for = kwd.get( 'render_repository_actions_for', 'tool_shed' )
+        tv = tool_validator.ToolValidator( trans.app )
+        repository, tool, error_message = tv.load_tool_from_changeset_revision( repository_id,
+                                                                                changeset_revision,
+                                                                                tool_config )
+        tool_state = tool_util.new_state( trans, tool, invalid=True )
+        invalid_file_tups = []
+        if tool:
+            invalid_file_tups = tv.check_tool_input_params( repository.repo_path( trans.app ),
+                                                            tool_config,
+                                                            tool,
+                                                            [] )
+        if invalid_file_tups:
+            message = tool_util.generate_message_for_invalid_tools( trans.app,
+                                                                    invalid_file_tups,
+                                                                    repository,
+                                                                    {},
+                                                                    as_html=True,
+                                                                    displaying_invalid_tool=True )
+        elif error_message:
+            message = error_message
+        try:
+            return trans.fill_template( "/webapps/tool_shed/repository/tool_form.mako",
+                                        repository=repository,
+                                        render_repository_actions_for=render_repository_actions_for,
+                                        changeset_revision=changeset_revision,
+                                        tool=tool,
+                                        tool_state=tool_state,
+                                        message=message,
+                                        status='error' )
+        except Exception as e:
+            message = "Exception thrown attempting to display tool: %s." % str( e )
+        if trans.webapp.name == 'galaxy':
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='preview_tools_in_changeset',
+                                                              repository_id=repository_id,
+                                                              changeset_revision=changeset_revision,
+                                                              message=message,
+                                                              status='error' ) )
+        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                          action='browse_repositories',
+                                                          operation='view_or_manage_repository',
+                                                          id=repository_id,
+                                                          changeset_revision=changeset_revision,
+                                                          message=message,
+                                                          status='error' ) )
+
+    @web.expose
+    @web.require_login( "manage email alerts" )
+    def manage_email_alerts( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        new_repo_alert = kwd.get( 'new_repo_alert', '' )
+        new_repo_alert_checked = CheckboxField.is_checked( new_repo_alert )
+        user = trans.user
+        if kwd.get( 'new_repo_alert_button', False ):
+            user.new_repo_alert = new_repo_alert_checked
+            trans.sa_session.add( user )
+            trans.sa_session.flush()
+            if new_repo_alert_checked:
+                message = 'You will receive email alerts for all new valid tool shed repositories.'
+            else:
+                message = 'You will not receive any email alerts for new valid tool shed repositories.'
+        checked = new_repo_alert_checked or ( user and user.new_repo_alert )
+        new_repo_alert_check_box = CheckboxField( 'new_repo_alert', checked=checked )
+        email_alert_repositories = []
+        for repository in trans.sa_session.query( trans.model.Repository ) \
+                                          .filter( and_( trans.model.Repository.table.c.deleted == false(),
+                                                         trans.model.Repository.table.c.email_alerts != null() ) ) \
+                                          .order_by( trans.model.Repository.table.c.name ):
+            if user.email in repository.email_alerts:
+                email_alert_repositories.append( repository )
+        return trans.fill_template( "/webapps/tool_shed/user/manage_email_alerts.mako",
+                                    new_repo_alert_check_box=new_repo_alert_check_box,
+                                    email_alert_repositories=email_alert_repositories,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "manage repository" )
+    def manage_repository( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        repository_type = kwd.get( 'repository_type', str( repository.type ) )
+        repo_dir = repository.repo_path( trans.app )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
+        repo_name = kwd.get( 'repo_name', repository.name )
+        changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
+        repository.share_url = repository_util.generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=changeset_revision )
+        repository.clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans.user, repository )
+        remote_repository_url = kwd.get( 'remote_repository_url', repository.remote_repository_url )
+        homepage_url = kwd.get( 'homepage_url', repository.homepage_url )
+        description = kwd.get( 'description', repository.description )
+        long_description = kwd.get( 'long_description', repository.long_description )
+        avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
+        display_reviews = util.string_as_bool( kwd.get( 'display_reviews', False ) )
+        alerts = kwd.get( 'alerts', '' )
+        alerts_checked = CheckboxField.is_checked( alerts )
+        category_ids = util.listify( kwd.get( 'category_id', '' ) )
+        if repository.email_alerts:
+            email_alerts = json.loads( repository.email_alerts )
+        else:
+            email_alerts = []
+        allow_push = kwd.get( 'allow_push', '' )
+        error = False
+        user = trans.user
+        if kwd.get( 'edit_repository_button', False ):
+            update_kwds = dict(
+                name=repo_name,
+                description=description,
+                long_description=long_description,
+                remote_repository_url=remote_repository_url,
+                homepage_url=homepage_url,
+                type=repository_type,
+            )
+
+            repository, message = repository_util.update_repository( app=trans.app, trans=trans, id=id, **update_kwds )
+            if repository is None:
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_repository',
+                                                                  id=id,
+                                                                  message=message,
+                                                                  status='error' ) )
+
+        elif kwd.get( 'manage_categories_button', False ):
+            flush_needed = False
+            # Delete all currently existing categories.
+            for rca in repository.categories:
+                trans.sa_session.delete( rca )
+                trans.sa_session.flush()
+            if category_ids:
+                # Create category associations
+                for category_id in category_ids:
+                    category = trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( category_id ) )
+                    rca = trans.app.model.RepositoryCategoryAssociation( repository, category )
+                    trans.sa_session.add( rca )
+                    trans.sa_session.flush()
+            message = "The repository information has been updated."
+        elif kwd.get( 'user_access_button', False ):
+            if allow_push not in [ 'none' ]:
+                remove_auth = kwd.get( 'remove_auth', '' )
+                if remove_auth:
+                    usernames = ''
+                else:
+                    user_ids = util.listify( allow_push )
+                    usernames = []
+                    for user_id in user_ids:
+                        user = trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( user_id ) )
+                        usernames.append( user.username )
+                    usernames = ','.join( usernames )
+                repository.set_allow_push( trans.app, usernames, remove_auth=remove_auth )
+            message = "The repository information has been updated."
+        elif kwd.get( 'receive_email_alerts_button', False ):
+            flush_needed = False
+            if alerts_checked:
+                if user.email not in email_alerts:
+                    email_alerts.append( user.email )
+                    repository.email_alerts = json.dumps( email_alerts )
+                    flush_needed = True
+            else:
+                if user.email in email_alerts:
+                    email_alerts.remove( user.email )
+                    repository.email_alerts = json.dumps( email_alerts )
+                    flush_needed = True
+            if flush_needed:
+                trans.sa_session.add( repository )
+                trans.sa_session.flush()
+            message = "The repository information has been updated."
+        if error:
+            status = 'error'
+        current_allow_push = repository.allow_push( trans.app )
+        if current_allow_push:
+            current_allow_push_list = current_allow_push.split( ',' )
+        else:
+            current_allow_push_list = []
+        allow_push_select_field = repository_util.build_allow_push_select_field( trans, current_allow_push_list )
+        checked = alerts_checked or user.email in email_alerts
+        alerts_check_box = CheckboxField( 'alerts', checked=checked )
+        changeset_revision_select_field = grids_util.build_changeset_revision_select_field( trans,
+                                                                                            repository,
+                                                                                            selected_value=changeset_revision,
+                                                                                            add_id_to_name=False,
+                                                                                            downloadable=False )
+        revision_label = hg_util.get_revision_label( trans.app, repository, repository.tip( trans.app ), include_date=False )
+        repository_metadata = None
+        metadata = None
+        is_malicious = False
+        repository_dependencies = None
+        if changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision )
+            if repository_metadata:
+                revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=False )
+                metadata = repository_metadata.metadata
+                is_malicious = repository_metadata.malicious
+            else:
+                # There is no repository_metadata defined for the changeset_revision, so see if it was defined in a previous
+                # changeset in the changelog.
+                previous_changeset_revision = \
+                    metadata_util.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=False )
+                if previous_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
+                    repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, id, previous_changeset_revision )
+                    if repository_metadata:
+                        revision_label = hg_util.get_revision_label( trans.app, repository, previous_changeset_revision, include_date=False )
+                        metadata = repository_metadata.metadata
+                        is_malicious = repository_metadata.malicious
+                        changeset_revision = previous_changeset_revision
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
+                toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+                rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+                repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+                if str( repository.type ) != rt_util.REPOSITORY_SUITE_DEFINITION:
+                    # Handle messaging for resetting repository type to the optimal value.
+                    change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app,
+                                                                                                          repository )
+                    if change_repository_type_message:
+                        message += change_repository_type_message
+                        status = 'warning'
+                elif str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION:
+                    # Handle messaging for resetting repository type to the optimal value.
+                    change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app,
+                                                                                                          repository )
+                    if change_repository_type_message:
+                        message += change_repository_type_message
+                        status = 'warning'
+                    else:
+                        # Handle messaging for orphan tool dependency definitions.
+                        dd = dependency_display.DependencyDisplayer( trans.app )
+                        orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata )
+                        if orphan_message:
+                            message += orphan_message
+                            status = 'warning'
+        if is_malicious:
+            if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
+                message += malicious_error_can_push
+            else:
+                message += malicious_error
+            status = 'error'
+        repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
+        malicious_check_box = CheckboxField( 'malicious', checked=is_malicious )
+        categories = suc.get_categories( trans.app )
+        selected_categories = [ _rca.category_id for _rca in repository.categories ]
+        tsucm = ToolShedUtilityContainerManager( trans.app )
+        containers_dict = tsucm.build_repository_containers( repository,
+                                                             changeset_revision,
+                                                             repository_dependencies,
+                                                             repository_metadata )
+        heads = hg_util.get_repository_heads( repo )
+        deprecated_repository_dependency_tups = \
+            metadata_util.get_repository_dependency_tups_from_repository_metadata( trans.app,
+                                                                                   repository_metadata,
+                                                                                   deprecated_only=True )
+        return trans.fill_template( '/webapps/tool_shed/repository/manage_repository.mako',
+                                    repo_name=repo_name,
+                                    remote_repository_url=remote_repository_url,
+                                    homepage_url=homepage_url,
+                                    description=description,
+                                    long_description=long_description,
+                                    current_allow_push_list=current_allow_push_list,
+                                    allow_push_select_field=allow_push_select_field,
+                                    deprecated_repository_dependency_tups=deprecated_repository_dependency_tups,
+                                    repo=repo,
+                                    heads=heads,
+                                    repository=repository,
+                                    containers_dict=containers_dict,
+                                    repository_metadata=repository_metadata,
+                                    changeset_revision=changeset_revision,
+                                    changeset_revision_select_field=changeset_revision_select_field,
+                                    revision_label=revision_label,
+                                    selected_categories=selected_categories,
+                                    categories=categories,
+                                    metadata=metadata,
+                                    avg_rating=avg_rating,
+                                    display_reviews=display_reviews,
+                                    num_ratings=num_ratings,
+                                    alerts_check_box=alerts_check_box,
+                                    malicious_check_box=malicious_check_box,
+                                    repository_type_select_field=repository_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "manage repository administrators" )
+    def manage_repository_admins( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
+        metadata = None
+        if changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision )
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+            else:
+                # There is no repository_metadata defined for the changeset_revision, so see if it was defined
+                # in a previous changeset in the changelog.
+                repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+                previous_changeset_revision = \
+                    metadata_util.get_previous_metadata_changeset_revision( repository,
+                                                                            repo,
+                                                                            changeset_revision,
+                                                                            downloadable=False )
+                if previous_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
+                    repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                                       id,
+                                                                                                       previous_changeset_revision )
+                    if repository_metadata:
+                        metadata = repository_metadata.metadata
+        role = repository.admin_role
+        associations_dict = repository_util.handle_role_associations( trans.app,
+                                                                      role,
+                                                                      repository,
+                                                                      **kwd )
+        in_users = associations_dict.get( 'in_users', [] )
+        out_users = associations_dict.get( 'out_users', [] )
+        in_groups = associations_dict.get( 'in_groups', [] )
+        out_groups = associations_dict.get( 'out_groups', [] )
+        message = associations_dict.get( 'message', '' )
+        status = associations_dict.get( 'status', 'done' )
+        return trans.fill_template( '/webapps/tool_shed/role/role.mako',
+                                    in_admin_controller=False,
+                                    repository=repository,
+                                    metadata=metadata,
+                                    changeset_revision=changeset_revision,
+                                    role=role,
+                                    in_users=in_users,
+                                    out_users=out_users,
+                                    in_groups=in_groups,
+                                    out_groups=out_groups,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "review repository revision" )
+    def manage_repository_reviews_of_revision( self, trans, **kwd ):
+        return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                          action='manage_repository_reviews_of_revision',
+                                                          **kwd ) )
+
+    @web.expose
+    @web.require_login( "multi select email alerts" )
+    def multi_select_email_alerts( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "receive email alerts":
+                if trans.user:
+                    if kwd[ 'id' ]:
+                        kwd[ 'caller' ] = 'multi_select_email_alerts'
+                        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                          action='set_email_alerts',
+                                                                          **kwd ) )
+                else:
+                    kwd[ 'message' ] = 'You must be logged in to set email alerts.'
+                    kwd[ 'status' ] = 'error'
+                    del kwd[ 'operation' ]
+            elif operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        self.email_alerts_repository_grid.title = "Set email alerts for repository changes"
+        return self.email_alerts_repository_grid( trans, **kwd )
+
+    @web.expose
+    def next_installable_changeset_revision( self, trans, **kwd ):
+        """
+        Handle a request from a Galaxy instance where the changeset_revision defined for a repository
+        in a dependency definition file is older than the changeset_revision associated with the installed
+        repository.
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        # Get the next installable changeset_revision beyond the received changeset_revision.
+        next_changeset_revision = metadata_util.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+        if next_changeset_revision and next_changeset_revision != changeset_revision:
+            return next_changeset_revision
+        return ''
+
+    @web.json
+    def open_folder( self, trans, folder_path, repository_id ):
+        # Avoid caching
+        trans.response.headers['Pragma'] = 'no-cache'
+        trans.response.headers['Expires'] = '0'
+        is_admin = trans.user_is_admin()
+        return suc.open_repository_files_folder( trans.app, folder_path, repository_id, is_admin )
+
+    @web.expose
+    def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+        if repository_metadata:
+            repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
+            metadata = repository_metadata.metadata
+            # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
+            toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+            rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+            repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+            if metadata:
+                if 'repository_dependencies' in metadata and not repository_dependencies:
+                    # See if we have an invalid repository dependency definition or if the repository dependency is required
+                    # only for compiling the repository's tool dependency.
+                    invalid = False
+                    repository_dependencies_dict = metadata[ 'repository_dependencies' ]
+                    rd_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+                    for rd_tup in rd_tups:
+                        rdtool_shed, \
+                            rd_name, \
+                            rd_owner, \
+                            rd_changeset_revision, \
+                            rd_prior_installation_required, \
+                            rd_only_if_compiling_contained_td = \
+                            common_util.parse_repository_dependency_tuple( rd_tup )
+                        if not util.asbool( rd_only_if_compiling_contained_td ):
+                            invalid = True
+                            break
+                    if invalid:
+                        dd = dependency_display.DependencyDisplayer( trans.app )
+                        message = dd.generate_message_for_invalid_repository_dependencies( metadata,
+                                                                                           error_from_tuple=False )
+                        status = 'error'
+        else:
+            repository_metadata_id = None
+            metadata = None
+            repository_dependencies = None
+        revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=True )
+        changeset_revision_select_field = grids_util.build_changeset_revision_select_field( trans,
+                                                                                            repository,
+                                                                                            selected_value=changeset_revision,
+                                                                                            add_id_to_name=False,
+                                                                                            downloadable=False )
+        tsucm = ToolShedUtilityContainerManager( trans.app )
+        containers_dict = tsucm.build_repository_containers( repository,
+                                                             changeset_revision,
+                                                             repository_dependencies,
+                                                             repository_metadata )
+        return trans.fill_template( '/webapps/tool_shed/repository/preview_tools_in_changeset.mako',
+                                    repository=repository,
+                                    containers_dict=containers_dict,
+                                    repository_metadata_id=repository_metadata_id,
+                                    changeset_revision=changeset_revision,
+                                    revision_label=revision_label,
+                                    changeset_revision_select_field=changeset_revision_select_field,
+                                    metadata=metadata,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def previous_changeset_revisions( self, trans, from_tip=False, **kwd ):
+        """
+        Handle a request from a local Galaxy instance.  This method will handle two scenarios: (1) the
+        repository was previously installed using an older changeset_revsion, but later the repository
+        was updated in the tool shed and the Galaxy admin is trying to install the latest changeset
+        revision of the same repository instead of updating the one that was previously installed. (2)
+        the admin is attempting to get updates for an installed repository that has a repository dependency
+        and both the repository and its dependency have available updates.  In this case, the from_tip
+        parameter will be True because the repository dependency definition may define a changeset hash
+        for the dependency that is newer than the installed changeset revision of the dependency (this is
+        due to the behavior of "Tool dependency definition" repositories, whose metadata is always the tip),
+        so the complete list of changset hashes in the changelog must be returned.
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        if name is not None and owner is not None:
+            repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+            from_tip = util.string_as_bool( from_tip )
+            if from_tip:
+                changeset_revision = repository.tip( trans.app )
+            else:
+                changeset_revision = kwd.get( 'changeset_revision', None )
+            if changeset_revision is not None:
+                repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+                # Get the lower bound changeset revision.
+                lower_bound_changeset_revision = metadata_util.get_previous_metadata_changeset_revision( repository,
+                                                                                                         repo,
+                                                                                                         changeset_revision,
+                                                                                                         downloadable=True )
+                # Build the list of changeset revision hashes.
+                changeset_hashes = []
+                for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo,
+                                                                                 lower_bound_changeset_revision,
+                                                                                 changeset_revision ):
+                    changeset_hashes.append( str( repo.changectx( changeset ) ) )
+                if changeset_hashes:
+                    changeset_hashes_str = ','.join( changeset_hashes )
+                    return changeset_hashes_str
+        return ''
+
+    @web.expose
+    @web.require_login( "rate repositories" )
+    def rate_repository( self, trans, **kwd ):
+        """ Rate a repository and return updated rating data. """
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        id = kwd.get( 'id', None )
+        if not id:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              message='Select a repository to rate',
+                                                              status='error' ) )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        changeset_revision = repository.tip( trans.app )
+        if repository.user == trans.user:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='browse_repositories',
+                                                              message="You are not allowed to rate your own repository",
+                                                              status='error' ) )
+        if kwd.get( 'rate_button', False ):
+            rating = int( kwd.get( 'rating', '0' ) )
+            comment = kwd.get( 'comment', '' )
+            rating = self.rate_item( trans, trans.user, repository, rating, comment )
+        avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
+        display_reviews = util.string_as_bool( kwd.get( 'display_reviews', False ) )
+        rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              id,
+                                                                                              changeset_revision,
+                                                                                              metadata_only=True )
+        repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
+        revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=True )
+        return trans.fill_template( '/webapps/tool_shed/repository/rate_repository.mako',
+                                    repository=repository,
+                                    metadata=metadata,
+                                    revision_label=revision_label,
+                                    avg_rating=avg_rating,
+                                    display_reviews=display_reviews,
+                                    num_ratings=num_ratings,
+                                    rra=rra,
+                                    repository_type_select_field=repository_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def reset_all_metadata( self, trans, id, **kwd ):
+        """Reset all metadata on the complete changelog for a single repository in the tool shed."""
+        # This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template.
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app,
+                                                                     user=trans.user,
+                                                                     repository=repository,
+                                                                     resetting_all_metadata_on_repository=True )
+        rmm.reset_all_metadata_on_repository_in_tool_shed()
+        rmm_metadata_dict = rmm.get_metadata_dict()
+        rmm_invalid_file_tups = rmm.get_invalid_file_tups()
+        if rmm_invalid_file_tups:
+            message = tool_util.generate_message_for_invalid_tools( trans.app,
+                                                                    rmm_invalid_file_tups,
+                                                                    repository,
+                                                                    rmm_metadata_dict )
+            status = 'error'
+        else:
+            message = "All repository metadata has been reset.  "
+            status = 'done'
+        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                          action='manage_repository',
+                                                          id=id,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    def reset_metadata_on_my_writable_repositories_in_tool_shed( self, trans, **kwd ):
+        rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user, resetting_all_metadata_on_repository=True )
+        if 'reset_metadata_on_selected_repositories_button' in kwd:
+            message, status = rmm.reset_metadata_on_selected_repositories( **kwd )
+        else:
+            message = escape( kwd.get( 'message', '' ) )
+            status = kwd.get( 'status', 'done' )
+        repositories_select_field = rmm.build_repository_ids_select_field( name='repository_ids',
+                                                                           multiple=True,
+                                                                           display='checkboxes',
+                                                                           my_writable=True )
+        return trans.fill_template( '/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako',
+                                    repositories_select_field=repositories_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def select_files_to_delete( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        commit_message = escape( kwd.get( 'commit_message', 'Deleted selected files' ) )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        repo_dir = repository.repo_path( trans.app )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
+        selected_files_to_delete = kwd.get( 'selected_files_to_delete', '' )
+        if kwd.get( 'select_files_to_delete_button', False ):
+            if selected_files_to_delete:
+                selected_files_to_delete = selected_files_to_delete.split( ',' )
+                # Get the current repository tip.
+                tip = repository.tip( trans.app )
+                for selected_file in selected_files_to_delete:
+                    try:
+                        hg_util.remove_file( repo.ui, repo, selected_file, force=True )
+                    except Exception as e:
+                        log.debug( "Error removing the following file using the mercurial API:\n %s" % str( selected_file ) )
+                        log.debug( "The error was: %s" % str( e ))
+                        log.debug( "Attempting to remove the file using a different approach." )
+                        relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+                        repo.dirstate.remove( relative_selected_file )
+                        repo.dirstate.write()
+                        absolute_selected_file = os.path.abspath( selected_file )
+                        if os.path.isdir( absolute_selected_file ):
+                            try:
+                                os.rmdir( absolute_selected_file )
+                            except OSError as e:
+                                # The directory is not empty
+                                pass
+                        elif os.path.isfile( absolute_selected_file ):
+                            os.remove( absolute_selected_file )
+                            dir = os.path.split( absolute_selected_file )[0]
+                            try:
+                                os.rmdir( dir )
+                            except OSError as e:
+                                # The directory is not empty
+                                pass
+                # Commit the change set.
+                if not commit_message:
+                    commit_message = 'Deleted selected files'
+                hg_util.commit_changeset( repo.ui,
+                                          repo,
+                                          full_path_to_changeset=repo_dir,
+                                          username=trans.user.username,
+                                          message=commit_message )
+                suc.handle_email_alerts( trans.app, trans.request.host, repository )
+                # Update the repository files for browsing.
+                hg_util.update_repository( repo )
+                # Get the new repository tip.
+                if tip == repository.tip( trans.app ):
+                    message += 'No changes to repository.  '
+                else:
+                    rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app,
+                                                                                 user=trans.user,
+                                                                                 repository=repository )
+                    status, error_message = rmm.set_repository_metadata_due_to_new_tip( trans.request.host, **kwd )
+                    if error_message:
+                        message = error_message
+                    else:
+                        message += 'The selected files were deleted from the repository.  '
+            else:
+                message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
+                status = "error"
+        repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
+        changeset_revision = repository.tip( trans.app )
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              id,
+                                                                                              changeset_revision,
+                                                                                              metadata_only=True )
+        return trans.fill_template( '/webapps/tool_shed/repository/browse_repository.mako',
+                                    repo=repo,
+                                    repository=repository,
+                                    changeset_revision=changeset_revision,
+                                    metadata=metadata,
+                                    commit_message=commit_message,
+                                    repository_type_select_field=repository_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def send_to_owner( self, trans, id, message='' ):
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        if not message:
+            message = 'Enter a message'
+            status = 'error'
+        elif trans.user and trans.user.email:
+            smtp_server = trans.app.config.smtp_server
+            from_address = trans.app.config.email_from
+            if smtp_server is None or from_address is None:
+                return trans.show_error_message( "Mail is not configured for this Galaxy tool shed instance" )
+            to_address = repository.user.email
+            # Get the name of the server hosting the tool shed instance.
+            host = trans.request.host
+            # Build the email message
+            body = string.Template( suc.contact_owner_template ) \
+                .safe_substitute( username=trans.user.username,
+                                  repository_name=repository.name,
+                                  email=trans.user.email,
+                                  message=message,
+                                  host=host )
+            subject = "Regarding your tool shed repository named %s" % repository.name
+            # Send it
+            try:
+                util.send_mail( from_address, to_address, subject, body, trans.app.config )
+                message = "Your message has been sent"
+                status = "done"
+            except Exception as e:
+                message = "An error occurred sending your message by email: %s" % str( e )
+                status = "error"
+        else:
+            # Do all we can to eliminate spam.
+            return trans.show_error_message( "You must be logged in to contact the owner of a repository." )
+        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                          action='contact_owner',
+                                                          id=id,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_login( "set email alerts" )
+    def set_email_alerts( self, trans, **kwd ):
+        """Set email alerts for selected repositories."""
+        # This method is called from multiple grids, so the caller must be passed.
+        caller = kwd[ 'caller' ]
+        user = trans.user
+        if user:
+            repository_ids = util.listify( kwd.get( 'id', '' ) )
+            total_alerts_added = 0
+            total_alerts_removed = 0
+            flush_needed = False
+            for repository_id in repository_ids:
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                if repository.email_alerts:
+                    email_alerts = json.loads( repository.email_alerts )
+                else:
+                    email_alerts = []
+                if user.email in email_alerts:
+                    email_alerts.remove( user.email )
+                    repository.email_alerts = json.dumps( email_alerts )
+                    trans.sa_session.add( repository )
+                    flush_needed = True
+                    total_alerts_removed += 1
+                else:
+                    email_alerts.append( user.email )
+                    repository.email_alerts = json.dumps( email_alerts )
+                    trans.sa_session.add( repository )
+                    flush_needed = True
+                    total_alerts_added += 1
+            if flush_needed:
+                trans.sa_session.flush()
+            message = 'Total alerts added: %d, total alerts removed: %d' % ( total_alerts_added, total_alerts_removed )
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = 'done'
+        del kwd[ 'operation' ]
+        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                          action=caller,
+                                                          **kwd ) )
+
+    @web.expose
+    @web.require_login( "set repository as malicious" )
+    def set_malicious( self, trans, id, ctx_str, **kwd ):
+        malicious = kwd.get( 'malicious', '' )
+        if kwd.get( 'malicious_button', False ):
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, id, ctx_str )
+            malicious_checked = CheckboxField.is_checked( malicious )
+            repository_metadata.malicious = malicious_checked
+            trans.sa_session.add( repository_metadata )
+            trans.sa_session.flush()
+            if malicious_checked:
+                message = "The repository tip has been defined as malicious."
+            else:
+                message = "The repository tip has been defined as <b>not</b> malicious."
+            status = 'done'
+        return trans.response.send_redirect( web.url_for( controller='repository',
+                                                          action='manage_repository',
+                                                          id=id,
+                                                          changeset_revision=ctx_str,
+                                                          malicious=malicious,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    def sharable_owner( self, trans, owner ):
+        """Support for sharable URL for each repository owner's tools, e.g. http://example.org/view/owner."""
+        try:
+            user = common_util.get_user_by_username( trans, owner )
+        except:
+            user = None
+        if user:
+            user_id = trans.security.encode_id( user.id )
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='index',
+                                                              user_id=user_id ) )
+        else:
+            return trans.show_error_message( "The tool shed <b>%s</b> contains no repositories owned by <b>%s</b>." %
+                                             ( web.url_for( '/', qualified=True ).rstrip( '/' ), str( owner ) ) )
+
+    @web.expose
+    def sharable_repository( self, trans, owner, name ):
+        """Support for sharable URL for a specified repository, e.g. http://example.org/view/owner/name."""
+        try:
+            repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        except:
+            repository = None
+        if repository:
+            repository_id = trans.security.encode_id( repository.id )
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='index',
+                                                              repository_id=repository_id ) )
+        else:
+            # If the owner is valid, then show all of their repositories.
+            try:
+                user = common_util.get_user_by_username( trans, owner )
+            except:
+                user = None
+            if user:
+                user_id = trans.security.encode_id( user.id )
+                message = "This list of repositories owned by <b>%s</b>, does not include one named <b>%s</b>." % ( str( owner ), str( name ) )
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='index',
+                                                                  user_id=user_id,
+                                                                  message=message,
+                                                                  status='error' ) )
+            else:
+                return trans.show_error_message( "The tool shed <b>%s</b> contains no repositories named <b>%s</b> with owner <b>%s</b>." %
+                                                 ( web.url_for( '/', qualified=True ).rstrip( '/' ), str( name ), str( owner ) ) )
+
+    @web.expose
+    def sharable_repository_revision( self, trans, owner, name, changeset_revision ):
+        """Support for sharable URL for a specified repository revision, e.g. http://example.org/view/owner/name/changeset_revision."""
+        try:
+            repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+        except:
+            repository = None
+        if repository:
+            repository_id = trans.security.encode_id( repository.id )
+            repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                                             repository_id,
+                                                                                                             changeset_revision )
+            if not repository_metadata:
+                # Get updates to the received changeset_revision if any exist.
+                repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+                upper_bound_changeset_revision = metadata_util.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+                if upper_bound_changeset_revision and upper_bound_changeset_revision != changeset_revision:
+                    changeset_revision = upper_bound_changeset_revision
+                    repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                                                     repository_id,
+                                                                                                                     changeset_revision )
+            if repository_metadata:
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='index',
+                                                                  repository_id=repository_id,
+                                                                  changeset_revision=changeset_revision ) )
+            else:
+                message = "The change log for the repository named <b>%s</b> owned by <b>%s</b> does not include revision <b>%s</b>." % \
+                    ( escape( str( name ) ), escape( str( owner ) ), escape( str( changeset_revision ) ) )
+                return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                  action='index',
+                                                                  repository_id=repository_id,
+                                                                  message=message,
+                                                                  status='error' ) )
+        else:
+            # See if the owner is valid.
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='sharable_owner',
+                                                              owner=owner ) )
+
+    @web.expose
+    def updated_changeset_revisions( self, trans, **kwd ):
+        """
+        Handle a request from a local Galaxy instance to retrieve the list of changeset revisions to which an
+        installed repository can be updated.  This method will return a string of comma-separated changeset revision
+        hashes for all available updates to the received changeset revision.  Among other things , this method
+        handles the scenario where an installed tool shed repository's tool_dependency definition file defines a
+        changeset revision for a complex repository dependency that is outdated.  In other words, a defined changeset
+        revision is older than the current changeset revision for the required repository, making it impossible to
+        discover the repository without knowledge of revisions to which it could have been updated.
+        """
+        name = kwd.get( 'name', None )
+        owner = kwd.get( 'owner', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        if name and owner and changeset_revision:
+            return metadata_util.get_updated_changeset_revisions( trans.app, name, owner, changeset_revision )
+        return ''
+
+    @web.expose
+    def upload_capsule( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        url = kwd.get( 'url', '' )
+        if 'upload_capsule_button' in kwd:
+            irm = capsule_manager.ImportRepositoryManager( trans.app,
+                                                           trans.request.host,
+                                                           trans.user,
+                                                           trans.user_is_admin() )
+            capsule_dict = irm.upload_capsule( **kwd )
+            status = capsule_dict.get( 'status', 'error' )
+            if status == 'error':
+                message = capsule_dict.get( 'error_message', '' )
+            else:
+                capsule_dict = irm.extract_capsule_files( **capsule_dict )
+                capsule_dict = irm.validate_capsule( **capsule_dict )
+                status = capsule_dict.get( 'status', 'error' )
+                if status == 'ok':
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='import_capsule',
+                                                                      **capsule_dict ) )
+                else:
+                    message = 'The capsule contents are invalid and cannot be imported:<br/>%s' % \
+                        str( capsule_dict.get( 'error_message', '' ) )
+        return trans.fill_template( '/webapps/tool_shed/repository/upload_capsule.mako',
+                                    url=url,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def view_changelog( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        changesets = []
+        for changeset in repo.changelog:
+            ctx = repo.changectx( changeset )
+            if metadata_util.get_repository_metadata_by_changeset_revision( trans.app, id, str( ctx ) ):
+                has_metadata = True
+            else:
+                has_metadata = False
+            change_dict = { 'ctx': ctx,
+                            'rev': str( ctx.rev() ),
+                            'date': date,
+                            'display_date': hg_util.get_readable_ctx_date( ctx ),
+                            'description': ctx.description(),
+                            'files': ctx.files(),
+                            'user': ctx.user(),
+                            'parent': ctx.parents()[0],
+                            'has_metadata': has_metadata }
+            # Make sure we'll view latest changeset first.
+            changesets.insert( 0, change_dict )
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              id,
+                                                                                              repository.tip( trans.app ),
+                                                                                              metadata_only=True )
+        return trans.fill_template( '/webapps/tool_shed/repository/view_changelog.mako',
+                                    repository=repository,
+                                    metadata=metadata,
+                                    changesets=changesets,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def view_changeset( self, trans, id, ctx_str, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        ctx = hg_util.get_changectx_for_changeset( repo, ctx_str )
+        if ctx is None:
+            message = "Repository does not include changeset revision '%s'." % str( ctx_str )
+            status = 'error'
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='view_changelog',
+                                                              id=id,
+                                                              message=message,
+                                                              status=status ) )
+        ctx_parent = ctx.parents()[ 0 ]
+        if ctx.children():
+            ctx_child = ctx.children()[ 0 ]
+        else:
+            ctx_child = None
+        diffs = []
+        options_dict = hg_util.get_mercurial_default_options_dict( 'diff' )
+        # Not quite sure if the following settings make any difference, but with a combination of them and the size check on each
+        # diff, we don't run out of memory when viewing the changelog of the cisortho2 repository on the test tool shed.
+        options_dict[ 'maxfile' ] = basic_util.MAXDIFFSIZE
+        options_dict[ 'maxtotal' ] = basic_util.MAXDIFFSIZE
+        diffopts = mdiff.diffopts( **options_dict )
+        for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node(), opts=diffopts ):
+            if len( diff ) > basic_util.MAXDIFFSIZE:
+                diff = util.shrink_string_by_size( diff, basic_util.MAXDIFFSIZE )
+            diffs.append( basic_util.to_html_string( diff ) )
+        modified, added, removed, deleted, unknown, ignored, clean = repo.status( node1=ctx_parent.node(), node2=ctx.node() )
+        anchors = modified + added + removed + deleted + unknown + ignored + clean
+        metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                              id,
+                                                                                              ctx_str,
+                                                                                              metadata_only=True )
+        # For rendering the prev button.
+        if ctx_parent:
+            ctx_parent_date = hg_util.get_readable_ctx_date( ctx_parent )
+            ctx_parent_rev = ctx_parent.rev()
+            if ctx_parent_rev < 0:
+                prev = None
+            else:
+                prev = "<b>%s:%s</b> <i>(%s)</i>" % ( ctx_parent_rev, ctx_parent, ctx_parent_date )
+        else:
+            prev = None
+        if ctx_child:
+            ctx_child_date = hg_util.get_readable_ctx_date( ctx_child )
+            ctx_child_rev = ctx_child.rev()
+            next = "<b>%s:%s</b> <i>(%s)</i>" % ( ctx_child_rev, ctx_child, ctx_child_date )
+        else:
+            next = None
+        return trans.fill_template( '/webapps/tool_shed/repository/view_changeset.mako',
+                                    repository=repository,
+                                    metadata=metadata,
+                                    prev=prev,
+                                    next=next,
+                                    ctx=ctx,
+                                    ctx_parent=ctx_parent,
+                                    ctx_child=ctx_child,
+                                    anchors=anchors,
+                                    modified=modified,
+                                    added=added,
+                                    removed=removed,
+                                    deleted=deleted,
+                                    unknown=unknown,
+                                    ignored=ignored,
+                                    clean=clean,
+                                    diffs=diffs,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def view_or_manage_repository( self, trans, **kwd ):
+        repository_id = kwd.get( 'id', None )
+        if repository_id:
+            repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+            user = trans.user
+            if repository:
+                if user is not None and ( trans.user_is_admin() or
+                                          trans.app.security_agent.user_can_administer_repository( user, repository ) ):
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='manage_repository',
+                                                                      **kwd ) )
+                else:
+                    return trans.response.send_redirect( web.url_for( controller='repository',
+                                                                      action='view_repository',
+                                                                      **kwd ) )
+            return trans.show_error_message( "Invalid repository id '%s' received." % repository_id )
+        return trans.show_error_message( "The repository id was not received." )
+
+    @web.expose
+    def view_repository( self, trans, id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, id )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
+        changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
+        repository.share_url = repository_util.generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=changeset_revision )
+        repository.clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans.user, repository )
+        display_reviews = kwd.get( 'display_reviews', False )
+        alerts = kwd.get( 'alerts', '' )
+        alerts_checked = CheckboxField.is_checked( alerts )
+        if repository.email_alerts:
+            email_alerts = json.loads( repository.email_alerts )
+        else:
+            email_alerts = []
+        repository_dependencies = None
+        user = trans.user
+        if user and kwd.get( 'receive_email_alerts_button', False ):
+            flush_needed = False
+            if alerts_checked:
+                if user.email not in email_alerts:
+                    email_alerts.append( user.email )
+                    repository.email_alerts = json.dumps( email_alerts )
+                    flush_needed = True
+            else:
+                if user.email in email_alerts:
+                    email_alerts.remove( user.email )
+                    repository.email_alerts = json.dumps( email_alerts )
+                    flush_needed = True
+            if flush_needed:
+                trans.sa_session.add( repository )
+                trans.sa_session.flush()
+        checked = alerts_checked or ( user and user.email in email_alerts )
+        alerts_check_box = CheckboxField( 'alerts', checked=checked )
+        changeset_revision_select_field = grids_util.build_changeset_revision_select_field( trans,
+                                                                                            repository,
+                                                                                            selected_value=changeset_revision,
+                                                                                            add_id_to_name=False,
+                                                                                            downloadable=False )
+        revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=False )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision )
+        if repository_metadata:
+            metadata = repository_metadata.metadata
+            # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
+            toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+            rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+            repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+            if str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION:
+                # Handle messaging for orphan tool dependency definitions.
+                dd = dependency_display.DependencyDisplayer( trans.app )
+                orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata )
+                if orphan_message:
+                    message += orphan_message
+                    status = 'warning'
+        else:
+            metadata = None
+        is_malicious = metadata_util.is_malicious( trans.app, id, repository.tip( trans.app ) )
+        if is_malicious:
+            if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
+                message += malicious_error_can_push
+            else:
+                message += malicious_error
+            status = 'error'
+        tsucm = ToolShedUtilityContainerManager( trans.app )
+        containers_dict = tsucm.build_repository_containers( repository,
+                                                             changeset_revision,
+                                                             repository_dependencies,
+                                                             repository_metadata )
+        repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
+        heads = hg_util.get_repository_heads( repo )
+        return trans.fill_template( '/webapps/tool_shed/repository/view_repository.mako',
+                                    repo=repo,
+                                    heads=heads,
+                                    repository=repository,
+                                    repository_metadata=repository_metadata,
+                                    metadata=metadata,
+                                    containers_dict=containers_dict,
+                                    avg_rating=avg_rating,
+                                    display_reviews=display_reviews,
+                                    num_ratings=num_ratings,
+                                    alerts_check_box=alerts_check_box,
+                                    changeset_revision=changeset_revision,
+                                    changeset_revision_select_field=changeset_revision_select_field,
+                                    revision_label=revision_label,
+                                    repository_type_select_field=repository_type_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        render_repository_actions_for = kwd.get( 'render_repository_actions_for', 'tool_shed' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        repo_files_dir = repository.repo_path( trans.app )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_files_dir, create=False )
+        tool_metadata_dict = {}
+        tool_lineage = []
+        tool = None
+        guid = None
+        original_tool_data_path = trans.app.config.tool_data_path
+        revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=False )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+        if repository_metadata:
+            repository_metadata_id = trans.security.encode_id( repository_metadata.id )
+            metadata = repository_metadata.metadata
+            if metadata:
+                if 'tools' in metadata:
+                    tv = tool_validator.ToolValidator( trans.app )
+                    for tool_metadata_dict in metadata[ 'tools' ]:
+                        if tool_metadata_dict[ 'id' ] == tool_id:
+                            work_dir = tempfile.mkdtemp()
+                            relative_path_to_tool_config = tool_metadata_dict[ 'tool_config' ]
+                            guid = tool_metadata_dict[ 'guid' ]
+                            full_path_to_tool_config = os.path.abspath( relative_path_to_tool_config )
+                            full_path_to_dir, tool_config_filename = os.path.split( full_path_to_tool_config )
+                            can_use_disk_file = tv.can_use_tool_config_disk_file( repository,
+                                                                                  repo,
+                                                                                  full_path_to_tool_config,
+                                                                                  changeset_revision )
+                            if can_use_disk_file:
+                                trans.app.config.tool_data_path = work_dir
+                                tool, valid, message, sample_files = \
+                                    tv.handle_sample_files_and_load_tool_from_disk( repo_files_dir,
+                                                                                    repository_id,
+                                                                                    full_path_to_tool_config,
+                                                                                    work_dir )
+                                if message:
+                                    status = 'error'
+                            else:
+                                tool, message, sample_files = \
+                                    tv.handle_sample_files_and_load_tool_from_tmp_config( repo,
+                                                                                          repository_id,
+                                                                                          changeset_revision,
+                                                                                          tool_config_filename,
+                                                                                          work_dir )
+                                if message:
+                                    status = 'error'
+                            basic_util.remove_dir( work_dir )
+                            break
+                    if guid:
+                        tvm = tool_version_manager.ToolVersionManager( trans.app )
+                        tool_lineage = tvm.get_version_lineage_for_tool( repository_id,
+                                                                         repository_metadata,
+                                                                         guid )
+        else:
+            repository_metadata_id = None
+            metadata = None
+        changeset_revision_select_field = grids_util.build_changeset_revision_select_field( trans,
+                                                                                            repository,
+                                                                                            selected_value=changeset_revision,
+                                                                                            add_id_to_name=False,
+                                                                                            downloadable=False )
+        trans.app.config.tool_data_path = original_tool_data_path
+        return trans.fill_template( "/webapps/tool_shed/repository/view_tool_metadata.mako",
+                                    render_repository_actions_for=render_repository_actions_for,
+                                    repository=repository,
+                                    repository_metadata_id=repository_metadata_id,
+                                    metadata=metadata,
+                                    tool=tool,
+                                    tool_metadata_dict=tool_metadata_dict,
+                                    tool_lineage=tool_lineage,
+                                    changeset_revision=changeset_revision,
+                                    revision_label=revision_label,
+                                    changeset_revision_select_field=changeset_revision_select_field,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    def view_workflow( self, trans, workflow_name, repository_metadata_id, **kwd ):
+        """Retrieve necessary information about a workflow from the database so that it can be displayed in an svg image."""
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        render_repository_actions_for = kwd.get( 'render_repository_actions_for', 'tool_shed' )
+        if workflow_name:
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
+        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, trans.security.encode_id( repository_metadata.repository_id ) )
+        changeset_revision = repository_metadata.changeset_revision
+        metadata = repository_metadata.metadata
+        return trans.fill_template( "/webapps/tool_shed/repository/view_workflow.mako",
+                                    repository=repository,
+                                    render_repository_actions_for=render_repository_actions_for,
+                                    changeset_revision=changeset_revision,
+                                    repository_metadata_id=repository_metadata_id,
+                                    workflow_name=workflow_name,
+                                    metadata=metadata,
+                                    message=message,
+                                    status=status )
diff --git a/lib/galaxy/webapps/tool_shed/controllers/repository_review.py b/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
new file mode 100644
index 0000000..ce53369
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository_review.py
@@ -0,0 +1,613 @@
+import logging
+
+from sqlalchemy import and_, func, false
+
+import tool_shed.grids.repository_review_grids as repository_review_grids
+import tool_shed.grids.util as grids_util
+from galaxy import util
+from galaxy import web
+from galaxy.util.odict import odict
+from galaxy.web.base.controller import BaseUIController
+from galaxy.web.form_builder import CheckboxField
+from galaxy.webapps.tool_shed.util import ratings_util
+from tool_shed.util import hg_util
+from tool_shed.util import review_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util.container_util import STRSEP
+from tool_shed.util.web_util import escape
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoryReviewController( BaseUIController, ratings_util.ItemRatings ):
+
+    component_grid = repository_review_grids.ComponentGrid()
+    repositories_ready_for_review_grid = repository_review_grids.RepositoriesReadyForReviewGrid()
+    repositories_reviewed_by_me_grid = repository_review_grids.RepositoriesReviewedByMeGrid()
+    repositories_with_reviews_grid = repository_review_grids.RepositoriesWithReviewsGrid()
+    repositories_without_reviews_grid = repository_review_grids.RepositoriesWithoutReviewsGrid()
+    repository_reviews_by_user_grid = repository_review_grids.RepositoryReviewsByUserGrid()
+    reviewed_repositories_i_own_grid = repository_review_grids.ReviewedRepositoriesIOwnGrid()
+    repositories_with_no_tool_tests_grid = repository_review_grids.RepositoriesWithNoToolTestsGrid()
+
+    @web.expose
+    @web.require_login( "approve repository review" )
+    def approve_repository_review( self, trans, **kwd ):
+        # The value of the received id is the encoded review id.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        encoded_review_id = kwd[ 'id' ]
+        review = review_util.get_review( trans.app, encoded_review_id )
+        if kwd.get( 'approve_repository_review_button', False ):
+            approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP )
+            approved_select_field_value = str( kwd[ approved_select_field_name ] )
+            review.approved = approved_select_field_value
+            trans.sa_session.add( review )
+            trans.sa_session.flush()
+            message = 'Approved value <b>%s</b> saved for this revision.' % escape( approved_select_field_value )
+        repository_id = trans.security.encode_id( review.repository_id )
+        changeset_revision = review.changeset_revision
+        return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                          action='manage_repository_reviews_of_revision',
+                                                          id=repository_id,
+                                                          changeset_revision=changeset_revision,
+                                                          message=message,
+                                                          status=status ) )
+
+    @web.expose
+    @web.require_login( "browse components" )
+    def browse_components( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd[ 'operation' ].lower()
+            if operation == "create":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='create_component',
+                                                                  **kwd ) )
+        return self.component_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "browse review" )
+    def browse_review( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        review = review_util.get_review( trans.app, kwd[ 'id' ] )
+        repository = review.repository
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+        return trans.fill_template( '/webapps/tool_shed/repository_review/browse_review.mako',
+                                    repository=repository,
+                                    changeset_revision_label=changeset_revision_label,
+                                    review=review,
+                                    message=message,
+                                    status=status )
+
+    def copy_review( self, trans, review_to_copy, review ):
+        for component_review in review_to_copy.component_reviews:
+            copied_component_review = trans.model.ComponentReview( repository_review_id=review.id,
+                                                                   component_id=component_review.component.id,
+                                                                   comment=component_review.comment,
+                                                                   private=component_review.private,
+                                                                   approved=component_review.approved,
+                                                                   rating=component_review.rating )
+            trans.sa_session.add( copied_component_review )
+            trans.sa_session.flush()
+        review.approved = review_to_copy.approved
+        review.rating = review_to_copy.rating
+        trans.sa_session.add( review )
+        trans.sa_session.flush()
+
+    @web.expose
+    @web.require_login( "create component" )
+    def create_component( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        name = kwd.get( 'name', '' )
+        description = kwd.get( 'description', '' )
+        if kwd.get( 'create_component_button', False ):
+            if not name or not description:
+                message = 'Enter a valid name and a description'
+                status = 'error'
+            elif review_util.get_component_by_name( trans.app, name ):
+                message = 'A component with that name already exists'
+                status = 'error'
+            else:
+                component = trans.app.model.Component( name=name, description=description )
+                trans.sa_session.add( component )
+                trans.sa_session.flush()
+                message = "Component '%s' has been created" % escape( component.name )
+                status = 'done'
+                trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                           action='manage_components',
+                                                           message=message,
+                                                           status=status ) )
+        return trans.fill_template( '/webapps/tool_shed/repository_review/create_component.mako',
+                                    name=name,
+                                    description=description,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "create review" )
+    def create_review( self, trans, **kwd ):
+        # The value of the received id is the encoded repository id.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        previous_review_id = kwd.get( 'previous_review_id', None )
+        create_without_copying = 'create_without_copying' in kwd
+        if repository_id:
+            if changeset_revision:
+                # Make sure there is not already a review of the revision by the user.
+                repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+                if review_util.get_review_by_repository_id_changeset_revision_user_id( app=trans.app,
+                                                                                       repository_id=repository_id,
+                                                                                       changeset_revision=changeset_revision,
+                                                                                       user_id=trans.security.encode_id( trans.user.id ) ):
+                    message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, escape( repository.name ) )
+                    status = "error"
+                else:
+                    # See if there are any reviews for previous changeset revisions that the user can copy.
+                    if not create_without_copying and \
+                            not previous_review_id and \
+                            review_util.has_previous_repository_reviews( trans.app, repository, changeset_revision ):
+                        return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                          action='select_previous_review',
+                                                                          **kwd ) )
+                    # A review can be initially performed only on an installable revision of a repository, so make sure we have metadata associated
+                    # with the received changeset_revision.
+                    repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+                    if repository_metadata:
+                        metadata = repository_metadata.metadata
+                        if metadata:
+                            review = trans.app.model.RepositoryReview( repository_id=repository_metadata.repository_id,
+                                                                       changeset_revision=changeset_revision,
+                                                                       user_id=trans.user.id,
+                                                                       rating=None,
+                                                                       deleted=False )
+                            trans.sa_session.add( review )
+                            trans.sa_session.flush()
+                            if previous_review_id:
+                                review_to_copy = review_util.get_review( trans.app, previous_review_id )
+                                self.copy_review( trans, review_to_copy, review )
+                            review_id = trans.security.encode_id( review.id )
+                            message = "Begin your review of revision <b>%s</b> of repository <b>%s</b>." \
+                                % ( changeset_revision, repository.name )
+                            status = 'done'
+                            trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                       action='edit_review',
+                                                                       id=review_id,
+                                                                       message=message,
+                                                                       status=status ) )
+                    else:
+                        message = "A new review cannot be created for revision <b>%s</b> of repository <b>%s</b>.  Select a valid revision and try again." \
+                            % ( changeset_revision, escape( repository.name ) )
+                        kwd[ 'message' ] = message
+                        kwd[ 'status' ] = 'error'
+            else:
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='manage_repository_reviews',
+                                                                  **kwd ) )
+        return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                          action='view_or_manage_repository',
+                                                          **kwd ) )
+
+    @web.expose
+    @web.require_login( "edit component" )
+    def edit_component( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        id = kwd.get( 'id', None )
+        if not id:
+            message = "No component ids received for editing"
+            trans.response.send_redirect( web.url_for( controller='admin',
+                                                       action='manage_categories',
+                                                       message=message,
+                                                       status='error' ) )
+        component = review_util.get_component( trans.app, id )
+        if kwd.get( 'edit_component_button', False ):
+            new_description = kwd.get( 'description', '' ).strip()
+            if component.description != new_description:
+                component.description = new_description
+                trans.sa_session.add( component )
+                trans.sa_session.flush()
+                message = "The information has been saved for the component named <b>%s</b>" % escape( component.name )
+                status = 'done'
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='manage_components',
+                                                                  message=message,
+                                                                  status=status ) )
+        return trans.fill_template( '/webapps/tool_shed/repository_review/edit_component.mako',
+                                    component=component,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "edit review" )
+    def edit_review( self, trans, **kwd ):
+        # The value of the received id is the encoded review id.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        review_id = kwd.get( 'id', None )
+        review = review_util.get_review( trans.app, review_id )
+        components_dict = odict()
+        for component in review_util.get_components( trans.app ):
+            components_dict[ component.name ] = dict( component=component, component_review=None )
+        repository = review.repository
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        for component_review in review.component_reviews:
+            if component_review and component_review.component:
+                component_name = component_review.component.name
+                if component_name in components_dict:
+                    component_review_dict = components_dict[ component_name ]
+                    component_review_dict[ 'component_review' ] = component_review
+                    components_dict[ component_name ] = component_review_dict
+        # Handle a Save button click.
+        save_button_clicked = False
+        save_buttons = [ '%s%sreview_button' % ( comp_name, STRSEP ) for comp_name in components_dict.keys() ]
+        save_buttons.append( 'revision_approved_button' )
+        for save_button in save_buttons:
+            if save_button in kwd:
+                save_button_clicked = True
+                break
+        if save_button_clicked:
+            # Handle the revision_approved_select_field value.
+            revision_approved = kwd.get( 'revision_approved', None )
+            revision_approved_setting_changed = False
+            if revision_approved:
+                revision_approved = str( revision_approved )
+                if review.approved != revision_approved:
+                    revision_approved_setting_changed = True
+                    review.approved = revision_approved
+                    trans.sa_session.add( review )
+                    trans.sa_session.flush()
+            saved_component_names = []
+            for component_name in components_dict.keys():
+                flushed = False
+                # Retrieve the review information from the form.
+                # The star rating form field is a radio button list, so it will not be received if it was not clicked in the form.
+                # Due to this behavior, default the value to 0.
+                rating = 0
+                for k, v in kwd.items():
+                    if k.startswith( '%s%s' % ( component_name, STRSEP ) ):
+                        component_review_attr = k.replace( '%s%s' % ( component_name, STRSEP ), '' )
+                        if component_review_attr == 'component_id':
+                            component_id = str( v )
+                        elif component_review_attr == 'comment':
+                            comment = str( v )
+                        elif component_review_attr == 'private':
+                            private = CheckboxField.is_checked( v )
+                        elif component_review_attr == 'approved':
+                            approved = str( v )
+                        elif component_review_attr == 'rating':
+                            rating = int( str( v ) )
+                component = review_util.get_component( trans.app, component_id )
+                component_review = \
+                    review_util.get_component_review_by_repository_review_id_component_id( trans.app,
+                                                                                           review_id,
+                                                                                           component_id )
+                if component_review:
+                    # See if the existing component review should be updated.
+                    if component_review.comment != comment or \
+                            component_review.private != private or \
+                            component_review.approved != approved or \
+                            component_review.rating != rating:
+                        component_review.comment = comment
+                        component_review.private = private
+                        component_review.approved = approved
+                        component_review.rating = rating
+                        trans.sa_session.add( component_review )
+                        trans.sa_session.flush()
+                        flushed = True
+                        saved_component_names.append( component_name )
+                else:
+                    # See if a new component_review should be created.
+                    if comment or private or approved != trans.model.ComponentReview.approved_states.NO or rating:
+                        component_review = trans.model.ComponentReview( repository_review_id=review.id,
+                                                                        component_id=component.id,
+                                                                        comment=comment,
+                                                                        approved=approved,
+                                                                        rating=rating )
+                        trans.sa_session.add( component_review )
+                        trans.sa_session.flush()
+                        flushed = True
+                        saved_component_names.append( component_name )
+                if flushed:
+                    # Update the repository rating value to be the average of all component review ratings.
+                    average_rating = trans.sa_session.query( func.avg( trans.model.ComponentReview.table.c.rating ) ) \
+                                                     .filter( and_( trans.model.ComponentReview.table.c.repository_review_id == review.id,
+                                                                    trans.model.ComponentReview.table.c.deleted == false(),
+                                                                    trans.model.ComponentReview.table.c.approved != trans.model.ComponentReview.approved_states.NA ) ) \
+                                                     .scalar()
+                    if average_rating is not None:
+                        review.rating = int( average_rating )
+                    trans.sa_session.add( review )
+                    trans.sa_session.flush()
+                    # Update the information in components_dict.
+                    if component_name in components_dict:
+                        component_review_dict = components_dict[ component_name ]
+                        component_review_dict[ 'component_review' ] = component_review
+                        components_dict[ component_name ] = component_review_dict
+            if revision_approved_setting_changed:
+                message += 'Approved value <b>%s</b> saved for this revision.<br/>' % review.approved
+            if saved_component_names:
+                message += 'Reviews were saved for components: %s' % ', '.join( saved_component_names )
+            if not revision_approved_setting_changed and not saved_component_names:
+                message += 'No changes were made to this review, so nothing was saved.'
+        if review and review.approved:
+            selected_value = review.approved
+        else:
+            selected_value = trans.model.ComponentReview.approved_states.NO
+        revision_approved_select_field = grids_util.build_approved_select_field( trans,
+                                                                                 name='revision_approved',
+                                                                                 selected_value=selected_value,
+                                                                                 for_component=False )
+        rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+        return trans.fill_template( '/webapps/tool_shed/repository_review/edit_review.mako',
+                                    repository=repository,
+                                    review=review,
+                                    changeset_revision_label=changeset_revision_label,
+                                    revision_approved_select_field=revision_approved_select_field,
+                                    components_dict=components_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "manage components" )
+    def manage_components( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "create":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='create_component',
+                                                                  **kwd ) )
+            elif operation == "edit":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='edit_component',
+                                                                  **kwd ) )
+        if 'message' not in kwd:
+            message = "This is a list of repository components (features) that can be reviewed.  You can add new components or change "
+            message += "the description of an existing component if appropriate.  Click on the name link to change the description."
+            status = "warning"
+            kwd[ 'message' ] = message
+            kwd[ 'status' ] = status
+        return self.component_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "manage repositories ready for review" )
+    def manage_repositories_ready_for_review( self, trans, **kwd ):
+        """
+        A repository is ready to be reviewed if one of the following conditions is met:
+        1) It contains no tools
+        2) It contains tools the tools_functionally_correct flag is set to True.  This implies that the repository metadata revision was installed and tested
+           by the Tool Shed's install and test framework.
+        """
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "inspect repository revisions":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='create_review',
+                                                                  **kwd ) )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        message = 'Any of these repositories that contain tools have been installed into Galaxy and proven to be functionally correct by executing the tests defined '
+        message += 'for each tool.  Repositories that do not contain tools have not been installed into Galaxy. '
+        kwd[ 'message' ] = message
+        kwd[ 'status' ] = 'warning'
+        return self.repositories_ready_for_review_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "manage repositories reviewed by me" )
+    def manage_repositories_reviewed_by_me( self, trans, **kwd ):
+        # The value of the received id is the encoded repository id.
+        if 'operation' in kwd:
+            kwd[ 'mine' ] = True
+            return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                              action='manage_repositories_with_reviews',
+                                                              **kwd ) )
+        self.repositories_reviewed_by_me_grid.title = 'Repositories reviewed by me'
+        return self.repositories_reviewed_by_me_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "manage repositories with invalid tests" )
+    def manage_repositories_with_invalid_tests( self, trans, **kwd ):
+        """
+        Display a list of repositories that contain tools, have not yet been reviewed, and have invalid functional tests.  Tests are defined as
+        invalid if they are missing from the tool config or if defined test data is not included in the repository.
+        """
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "inspect repository revisions":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='create_review',
+                                                                  **kwd ) )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        message = 'These repositories contain tools with missing functional tests or test data.  '
+        kwd[ 'message' ] = message
+        kwd[ 'status' ] = 'warning'
+        return self.repositories_with_no_tool_tests_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "manage repositories with reviews" )
+    def manage_repositories_with_reviews( self, trans, **kwd ):
+        # The value of the received id is the encoded repository id.
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "inspect repository revisions":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='manage_repository_reviews',
+                                                                  **kwd ) )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        return self.repositories_with_reviews_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "manage repositories without reviews" )
+    def manage_repositories_without_reviews( self, trans, **kwd ):
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "inspect repository revisions":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='create_review',
+                                                                  **kwd ) )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        return self.repositories_without_reviews_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "manage repository reviews" )
+    def manage_repository_reviews( self, trans, mine=False, **kwd ):
+        # The value of the received id is the encoded repository id.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        if repository_id:
+            repository = suc.get_repository_in_tool_shed( trans.app, repository_id )
+            repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+            metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
+            reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+            reviews_dict = odict()
+            for changeset in hg_util.get_reversed_changelog_changesets( repo ):
+                ctx = repo.changectx( changeset )
+                changeset_revision = str( ctx )
+                if changeset_revision in metadata_revision_hashes or changeset_revision in reviewed_revision_hashes:
+                    rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, changeset_revision )
+                    if changeset_revision in reviewed_revision_hashes:
+                        # Find the review for this changeset_revision
+                        repository_reviews = \
+                            review_util.get_reviews_by_repository_id_changeset_revision( trans.app,
+                                                                                         repository_id,
+                                                                                         changeset_revision )
+                        # Determine if the current user can add a review to this revision.
+                        can_add_review = trans.user not in [ repository_review.user for repository_review in repository_reviews ]
+                        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
+                        if repository_metadata:
+                            repository_metadata_reviews = util.listify( repository_metadata.reviews )
+                        else:
+                            repository_metadata_reviews = []
+                    else:
+                        repository_reviews = []
+                        repository_metadata_reviews = []
+                        can_add_review = True
+                    installable = changeset_revision in metadata_revision_hashes
+                    revision_dict = dict( changeset_revision_label=changeset_revision_label,
+                                          repository_reviews=repository_reviews,
+                                          repository_metadata_reviews=repository_metadata_reviews,
+                                          installable=installable,
+                                          can_add_review=can_add_review )
+                    reviews_dict[ changeset_revision ] = revision_dict
+        return trans.fill_template( '/webapps/tool_shed/repository_review/reviews_of_repository.mako',
+                                    repository=repository,
+                                    reviews_dict=reviews_dict,
+                                    mine=mine,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "manage repository reviews of revision" )
+    def manage_repository_reviews_of_revision( self, trans, **kwd ):
+        # The value of the received id is the encoded repository id.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository_id = kwd.get( 'id', None )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
+        rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, changeset_revision )
+        reviews = review_util.get_reviews_by_repository_id_changeset_revision( trans.app,
+                                                                               repository_id,
+                                                                               changeset_revision )
+        return trans.fill_template( '/webapps/tool_shed/repository_review/reviews_of_changeset_revision.mako',
+                                    repository=repository,
+                                    changeset_revision=changeset_revision,
+                                    changeset_revision_label=changeset_revision_label,
+                                    reviews=reviews,
+                                    installable=installable,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "repository reviews by user" )
+    def repository_reviews_by_user( self, trans, **kwd ):
+
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            # The value of the received id is the encoded review id.
+            review = review_util.get_review( trans.app, kwd[ 'id' ] )
+            repository = review.repository
+            kwd[ 'id' ] = trans.security.encode_id( repository.id )
+            if operation == "inspect repository revisions":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='manage_repository_reviews',
+                                                                  **kwd ) )
+            if operation == "view_or_manage_repository":
+                kwd[ 'changeset_revision' ] = review.changeset_revision
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        # The user may not be the current user.  The value of the received id is the encoded user id.
+        user = suc.get_user( trans.app, kwd[ 'id' ] )
+        self.repository_reviews_by_user_grid.title = "All repository revision reviews for user '%s'" % user.username
+        return self.repository_reviews_by_user_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "reviewed repositories i own" )
+    def reviewed_repositories_i_own( self, trans, **kwd ):
+        # The value of the received id is the encoded repository id.
+        if 'operation' in kwd:
+            operation = kwd['operation'].lower()
+            if operation == "inspect repository revisions":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='manage_repository_reviews',
+                                                                  **kwd ) )
+            if operation == "view_or_manage_repository":
+                return trans.response.send_redirect( web.url_for( controller='repository_review',
+                                                                  action='view_or_manage_repository',
+                                                                  **kwd ) )
+        return self.reviewed_repositories_i_own_grid( trans, **kwd )
+
+    @web.expose
+    @web.require_login( "select previous review" )
+    def select_previous_review( self, trans, **kwd ):
+        # The value of the received id is the encoded repository id.
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, kwd[ 'id' ] )
+        changeset_revision = kwd.get( 'changeset_revision', None )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+        previous_reviews_dict = review_util.get_previous_repository_reviews( trans.app,
+                                                                             repository,
+                                                                             changeset_revision )
+        rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, changeset_revision )
+        return trans.fill_template( '/webapps/tool_shed/repository_review/select_previous_review.mako',
+                                    repository=repository,
+                                    changeset_revision=changeset_revision,
+                                    changeset_revision_label=changeset_revision_label,
+                                    previous_reviews_dict=previous_reviews_dict,
+                                    message=message,
+                                    status=status )
+
+    @web.expose
+    @web.require_login( "view or manage repository" )
+    def view_or_manage_repository( self, trans, **kwd ):
+        repository = repository_util.get_repository_in_tool_shed( trans.app, kwd[ 'id' ] )
+        if trans.user_is_admin() or repository.user == trans.user:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='manage_repository',
+                                                              **kwd ) )
+        else:
+            return trans.response.send_redirect( web.url_for( controller='repository',
+                                                              action='view_repository',
+                                                              **kwd ) )
diff --git a/lib/galaxy/webapps/tool_shed/controllers/upload.py b/lib/galaxy/webapps/tool_shed/controllers/upload.py
new file mode 100644
index 0000000..1d005c7
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -0,0 +1,408 @@
+import logging
+import os
+import shutil
+import tarfile
+import tempfile
+import urllib
+
+from galaxy import util
+from galaxy import web
+from galaxy.util import checkers
+from galaxy.web.base.controller import BaseUIController
+from tool_shed.util.web_util import escape
+
+from tool_shed.dependencies import attribute_handlers
+from tool_shed.galaxy_install import dependency_display
+from tool_shed.metadata import repository_metadata_manager
+from tool_shed.repository_types import util as rt_util
+from tool_shed.tools import data_table_manager
+
+from tool_shed.util import basic_util
+from tool_shed.util import commit_util
+from tool_shed.util import hg_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import repository_content_util
+from tool_shed.util import xml_util
+
+from mercurial import commands
+
+log = logging.getLogger( __name__ )
+
+
+class UploadController( BaseUIController ):
+
+    @web.expose
+    @web.require_login( 'upload', use_panels=True )
+    def upload( self, trans, **kwd ):
+        message = escape( kwd.get( 'message', '' ) )
+        status = kwd.get( 'status', 'done' )
+        commit_message = escape( kwd.get( 'commit_message', 'Uploaded'  ) )
+        repository_id = kwd.get( 'repository_id', '' )
+        repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id )
+        repo_dir = repository.repo_path( trans.app )
+        repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
+        uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
+        remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
+        uploaded_file = None
+        upload_point = commit_util.get_upload_point( repository, **kwd )
+        tip = repository.tip( trans.app )
+        file_data = kwd.get( 'file_data', '' )
+        url = kwd.get( 'url', '' )
+        # Part of the upload process is sending email notification to those that have registered to
+        # receive them.  One scenario occurs when the first change set is produced for the repository.
+        # See the suc.handle_email_alerts() method for the definition of the scenarios.
+        new_repo_alert = repository.is_new( trans.app )
+        uploaded_directory = None
+        if kwd.get( 'upload_button', False ):
+            if file_data == '' and url == '':
+                message = 'No files were entered on the upload form.'
+                status = 'error'
+                uploaded_file = None
+            elif url and url.startswith( 'hg' ):
+                # Use mercurial clone to fetch repository, contents will then be copied over.
+                uploaded_directory = tempfile.mkdtemp()
+                repo_url = 'http%s' % url[ len( 'hg' ): ]
+                repo_url = repo_url.encode( 'ascii', 'replace' )
+                try:
+                    commands.clone( hg_util.get_configured_ui(), repo_url, uploaded_directory )
+                except Exception as e:
+                    message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str( e ) )
+                    status = 'error'
+                    basic_util.remove_dir( uploaded_directory )
+                    uploaded_directory = None
+            elif url:
+                valid_url = True
+                try:
+                    stream = urllib.urlopen( url )
+                except Exception as e:
+                    valid_url = False
+                    message = 'Error uploading file via http: %s' % str( e )
+                    status = 'error'
+                    uploaded_file = None
+                if valid_url:
+                    fd, uploaded_file_name = tempfile.mkstemp()
+                    uploaded_file = open( uploaded_file_name, 'wb' )
+                    while 1:
+                        chunk = stream.read( util.CHUNK_SIZE )
+                        if not chunk:
+                            break
+                        uploaded_file.write( chunk )
+                    uploaded_file.flush()
+                    uploaded_file_filename = url.split( '/' )[ -1 ]
+                    isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
+            elif file_data not in ( '', None ):
+                uploaded_file = file_data.file
+                uploaded_file_name = uploaded_file.name
+                uploaded_file_filename = os.path.split( file_data.filename )[ -1 ]
+                isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
+            if uploaded_file or uploaded_directory:
+                rdah = attribute_handlers.RepositoryDependencyAttributeHandler( trans.app, unpopulate=False )
+                tdah = attribute_handlers.ToolDependencyAttributeHandler( trans.app, unpopulate=False )
+                tdtm = data_table_manager.ToolDataTableManager( trans.app )
+                ok = True
+                isgzip = False
+                isbz2 = False
+                if uploaded_file:
+                    if uncompress_file:
+                        isgzip = checkers.is_gzip( uploaded_file_name )
+                        if not isgzip:
+                            isbz2 = checkers.is_bz2( uploaded_file_name )
+                    if isempty:
+                        tar = None
+                        istar = False
+                    else:
+                        # Determine what we have - a single file or an archive
+                        try:
+                            if ( isgzip or isbz2 ) and uncompress_file:
+                                # Open for reading with transparent compression.
+                                tar = tarfile.open( uploaded_file_name, 'r:*' )
+                            else:
+                                tar = tarfile.open( uploaded_file_name )
+                            istar = True
+                        except tarfile.ReadError as e:
+                            tar = None
+                            istar = False
+                else:
+                    # Uploaded directory
+                    istar = False
+                if istar:
+                    ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
+                        repository_content_util.upload_tar(
+                            trans,
+                            rdah,
+                            tdah,
+                            repository,
+                            tar,
+                            uploaded_file,
+                            upload_point,
+                            remove_repo_files_not_in_tar,
+                            commit_message,
+                            new_repo_alert
+                        )
+                elif uploaded_directory:
+                    ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
+                        self.upload_directory( trans,
+                                               rdah,
+                                               tdah,
+                                               repository,
+                                               uploaded_directory,
+                                               upload_point,
+                                               remove_repo_files_not_in_tar,
+                                               commit_message,
+                                               new_repo_alert )
+                else:
+                    if ( isgzip or isbz2 ) and uncompress_file:
+                        uploaded_file_filename = commit_util.uncompress( repository,
+                                                                         uploaded_file_name,
+                                                                         uploaded_file_filename,
+                                                                         isgzip=isgzip,
+                                                                         isbz2=isbz2 )
+                    if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and \
+                            uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+                        ok = False
+                        message = 'Repositories of type <b>Repository suite definition</b> can only contain a single file named '
+                        message += '<b>repository_dependencies.xml</b>.'
+                    elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and \
+                            uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
+                        ok = False
+                        message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named '
+                        message += '<b>tool_dependencies.xml</b>.'
+                    if ok:
+                        if upload_point is not None:
+                            full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
+                        else:
+                            full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
+                        # Move some version of the uploaded file to the load_point within the repository hierarchy.
+                        if uploaded_file_filename in [ rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME ]:
+                            # Inspect the contents of the file to see if toolshed or changeset_revision attributes
+                            # are missing and if so, set them appropriately.
+                            altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name )
+                            if error_message:
+                                ok = False
+                                message = error_message
+                                status = 'error'
+                            elif altered:
+                                tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                                shutil.move( tmp_filename, full_path )
+                            else:
+                                shutil.move( uploaded_file_name, full_path )
+                        elif uploaded_file_filename in [ rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME ]:
+                            # Inspect the contents of the file to see if changeset_revision values are
+                            # missing and if so, set them appropriately.
+                            altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name )
+                            if error_message:
+                                ok = False
+                                message = error_message
+                                status = 'error'
+                            if ok:
+                                if altered:
+                                    tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                                    shutil.move( tmp_filename, full_path )
+                                else:
+                                    shutil.move( uploaded_file_name, full_path )
+                        else:
+                            shutil.move( uploaded_file_name, full_path )
+                        if ok:
+                            # See if any admin users have chosen to receive email alerts when a repository is updated.
+                            # If so, check every uploaded file to ensure content is appropriate.
+                            check_contents = commit_util.check_file_contents_for_email_alerts( trans.app )
+                            if check_contents and os.path.isfile( full_path ):
+                                content_alert_str = commit_util.check_file_content_for_html_and_images( full_path )
+                            else:
+                                content_alert_str = ''
+                            hg_util.add_changeset( repo.ui, repo, full_path )
+                            # Convert from unicode to prevent "TypeError: array item must be char"
+                            full_path = full_path.encode( 'ascii', 'replace' )
+                            hg_util.commit_changeset( repo.ui,
+                                                      repo,
+                                                      full_path_to_changeset=full_path,
+                                                      username=trans.user.username,
+                                                      message=commit_message )
+                            if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
+                                # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
+                                # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables
+                                # dictionary.
+                                error, error_message = tdtm.handle_sample_tool_data_table_conf_file( full_path, persist=False )
+                                if error:
+                                    message = '%s<br/>%s' % ( message, error_message )
+                            # See if the content of the change set was valid.
+                            admin_only = len( repository.downloadable_revisions ) != 1
+                            suc.handle_email_alerts( trans.app,
+                                                     trans.request.host,
+                                                     repository,
+                                                     content_alert_str=content_alert_str,
+                                                     new_repo_alert=new_repo_alert,
+                                                     admin_only=admin_only )
+                if ok:
+                    # Update the repository files for browsing.
+                    hg_util.update_repository( repo )
+                    # Get the new repository tip.
+                    if tip == repository.tip( trans.app ):
+                        message = 'No changes to repository.  '
+                        status = 'warning'
+                    else:
+                        if ( isgzip or isbz2 ) and uncompress_file:
+                            uncompress_str = ' uncompressed and '
+                        else:
+                            uncompress_str = ' '
+                        if uploaded_directory:
+                            source_type = "repository"
+                            source = url
+                        else:
+                            source_type = "file"
+                            source = uploaded_file_filename
+                        message = "The %s <b>%s</b> has been successfully%suploaded to the repository.  " % \
+                            ( source_type, escape( source ), uncompress_str )
+                        if istar and ( undesirable_dirs_removed or undesirable_files_removed ):
+                            items_removed = undesirable_dirs_removed + undesirable_files_removed
+                            message += "  %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed
+                            message += "were removed from the archive.  "
+                        if istar and remove_repo_files_not_in_tar and files_to_remove:
+                            if upload_point is not None:
+                                message += "  %d files were removed from the repository relative to the selected upload point '%s'.  " % \
+                                    ( len( files_to_remove ), upload_point )
+                            else:
+                                message += "  %d files were removed from the repository root.  " % len( files_to_remove )
+                        rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app,
+                                                                                     user=trans.user,
+                                                                                     repository=repository )
+                        status, error_message = \
+                            rmm.set_repository_metadata_due_to_new_tip( trans.request.host,
+                                                                        content_alert_str=content_alert_str,
+                                                                        **kwd )
+                        if error_message:
+                            message = error_message
+                        kwd[ 'message' ] = message
+                    if repository.metadata_revisions:
+                        # A repository's metadata revisions are order descending by update_time, so the zeroth revision
+                        # will be the tip just after an upload.
+                        metadata_dict = repository.metadata_revisions[ 0 ].metadata
+                    else:
+                        metadata_dict = {}
+                    dd = dependency_display.DependencyDisplayer( trans.app )
+                    if str( repository.type ) not in [ rt_util.REPOSITORY_SUITE_DEFINITION,
+                                                       rt_util.TOOL_DEPENDENCY_DEFINITION ]:
+                        change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app,
+                                                                                                              repository )
+                        if change_repository_type_message:
+                            message += change_repository_type_message
+                            status = 'warning'
+                        else:
+                            # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies
+                            # weren't loaded due to a requirement tag mismatch or some other problem.  Tool dependency
+                            # definitions can define orphan tool dependencies (no relationship to any tools contained in the
+                            # repository), so warning messages are important because orphans are always valid.  The repository
+                            # owner must be warned in case they did not intend to define an orphan dependency, but simply
+                            # provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
+                            orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata_dict )
+                            if orphan_message:
+                                message += orphan_message
+                                status = 'warning'
+                    # Handle messaging for invalid tool dependencies.
+                    invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies( metadata_dict )
+                    if invalid_tool_dependencies_message:
+                        message += invalid_tool_dependencies_message
+                        status = 'error'
+                    # Handle messaging for invalid repository dependencies.
+                    invalid_repository_dependencies_message = \
+                        dd.generate_message_for_invalid_repository_dependencies( metadata_dict,
+                                                                                 error_from_tuple=True )
+                    if invalid_repository_dependencies_message:
+                        message += invalid_repository_dependencies_message
+                        status = 'error'
+                    # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+                    tdtm.reset_tool_data_tables()
+                    if uploaded_directory:
+                        basic_util.remove_dir( uploaded_directory )
+                    trans.response.send_redirect( web.url_for( controller='repository',
+                                                               action='browse_repository',
+                                                               id=repository_id,
+                                                               commit_message='Deleted selected files',
+                                                               message=message,
+                                                               status=status ) )
+                else:
+                    if uploaded_directory:
+                        basic_util.remove_dir( uploaded_directory )
+                    status = 'error'
+                # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+                tdtm.reset_tool_data_tables()
+        return trans.fill_template( '/webapps/tool_shed/repository/upload.mako',
+                                    repository=repository,
+                                    changeset_revision=tip,
+                                    url=url,
+                                    commit_message=commit_message,
+                                    uncompress_file=uncompress_file,
+                                    remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
+                                    message=message,
+                                    status=status )
+
+    def upload_directory( self, trans, rdah, tdah, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar,
+                          commit_message, new_repo_alert ):
+        repo_dir = repository.repo_path( trans.app )
+        undesirable_dirs_removed = 0
+        undesirable_files_removed = 0
+        if upload_point is not None:
+            full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
+        else:
+            full_path = os.path.abspath( repo_dir )
+        filenames_in_archive = []
+        for root, dirs, files in os.walk( uploaded_directory ):
+            for uploaded_file in files:
+                relative_path = os.path.normpath( os.path.join( os.path.relpath( root, uploaded_directory ), uploaded_file ) )
+                if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                    ok = os.path.basename( uploaded_file ) == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME
+                elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION:
+                    ok = os.path.basename( uploaded_file ) == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME
+                else:
+                    ok = os.path.basename( uploaded_file ) not in commit_util.UNDESIRABLE_FILES
+                if ok:
+                    for file_path_item in relative_path.split( '/' ):
+                        if file_path_item in commit_util.UNDESIRABLE_DIRS:
+                            undesirable_dirs_removed += 1
+                            ok = False
+                            break
+                else:
+                    undesirable_files_removed += 1
+                if ok:
+                    uploaded_file_name = os.path.abspath( os.path.join( root, uploaded_file ) )
+                    if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+                        # Inspect the contents of the file to see if toolshed or changeset_revision
+                        # attributes are missing and if so, set them appropriately.
+                        altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name )
+                        if error_message:
+                            return False, error_message, [], '', [], []
+                        elif altered:
+                            tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                            shutil.move( tmp_filename, uploaded_file_name )
+                    elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
+                        # Inspect the contents of the file to see if toolshed or changeset_revision
+                        # attributes are missing and if so, set them appropriately.
+                        altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name )
+                        if error_message:
+                            return False, error_message, [], '', [], []
+                        if altered:
+                            tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                            shutil.move( tmp_filename, uploaded_file_name )
+                    repo_path = os.path.join( full_path, relative_path )
+                    repo_basedir = os.path.normpath( os.path.join( repo_path, os.path.pardir ) )
+                    if not os.path.exists( repo_basedir ):
+                        os.makedirs( repo_basedir )
+                    if os.path.exists( repo_path ):
+                        if os.path.isdir( repo_path ):
+                            shutil.rmtree( repo_path )
+                        else:
+                            os.remove( repo_path )
+                    shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path )
+                    filenames_in_archive.append( relative_path )
+        return commit_util.handle_directory_changes( trans.app,
+                                                     trans.request.host,
+                                                     trans.user.username,
+                                                     repository,
+                                                     full_path,
+                                                     filenames_in_archive,
+                                                     remove_repo_files_not_in_tar,
+                                                     new_repo_alert,
+                                                     commit_message,
+                                                     undesirable_dirs_removed,
+                                                     undesirable_files_removed )
diff --git a/lib/galaxy/webapps/tool_shed/controllers/user.py b/lib/galaxy/webapps/tool_shed/controllers/user.py
new file mode 100644
index 0000000..5591688
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/controllers/user.py
@@ -0,0 +1,3 @@
+from galaxy.webapps.galaxy.controllers.user import User
+
+__all__ = ('User', )
diff --git a/lib/galaxy/webapps/tool_shed/framework/__init__.py b/lib/galaxy/webapps/tool_shed/framework/__init__.py
new file mode 100644
index 0000000..e412be1
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/framework/__init__.py
@@ -0,0 +1,3 @@
+"""
+Galaxy tool shed web application framework
+"""
diff --git a/lib/galaxy/webapps/tool_shed/framework/middleware/__init__.py b/lib/galaxy/webapps/tool_shed/framework/middleware/__init__.py
new file mode 100644
index 0000000..5b709e9
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/framework/middleware/__init__.py
@@ -0,0 +1 @@
+"""WSGI Middleware."""
diff --git a/lib/galaxy/webapps/tool_shed/framework/middleware/remoteuser.py b/lib/galaxy/webapps/tool_shed/framework/middleware/remoteuser.py
new file mode 100644
index 0000000..5315529
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/framework/middleware/remoteuser.py
@@ -0,0 +1,126 @@
+"""
+Middleware for handling $REMOTE_USER if use_remote_user is enabled.
+"""
+
+import socket
+from galaxy.util import safe_str_cmp
+
+errorpage = """
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html lang="en">
+    <head>
+        <title>Galaxy</title>
+        <style type="text/css">
+        body {
+            min-width: 500px;
+            text-align: center;
+        }
+        .errormessage {
+            font: 75%% verdana, "Bitstream Vera Sans", geneva, arial, helvetica, helve, sans-serif;
+            padding: 10px;
+            margin: 100px auto;
+            min-height: 32px;
+            max-width: 500px;
+            border: 1px solid #AA6666;
+            background-color: #FFCCCC;
+            text-align: left;
+        }
+        </style>
+    </head>
+    <body>
+        <div class="errormessage">
+            <h4>%s</h4>
+            <p>%s</p>
+        </div>
+    </body>
+</html>
+"""
+
+
+class RemoteUser( object ):
+    def __init__( self, app, maildomain=None, display_servers=None, admin_users=None, remote_user_secret_header=None ):
+        self.app = app
+        self.maildomain = maildomain
+        self.display_servers = display_servers or []
+        self.admin_users = admin_users or []
+        self.config_secret_header = remote_user_secret_header
+
+    def __call__( self, environ, start_response ):
+        environ[ 'webapp' ] = 'tool_shed'
+        # Allow display servers
+        if self.display_servers and 'REMOTE_ADDR' in environ:
+            try:
+                host = socket.gethostbyaddr( environ[ 'REMOTE_ADDR' ] )[0]
+            except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
+                # in the event of a lookup failure, deny access
+                host = None
+            if host in self.display_servers:
+                environ[ 'HTTP_REMOTE_USER' ] = 'remote_display_server@%s' % ( self.maildomain or 'example.org' )
+                return self.app( environ, start_response )
+
+        # If the secret header is enabled, we expect upstream to send along some key
+        # in HTTP_GX_SECRET, so we'll need to compare that here to the correct value
+        #
+        # This is not an ideal location for this function.  The reason being
+        # that because this check is done BEFORE the REMOTE_USER check,  it is
+        # possible to attack the GX_SECRET key without having correct
+        # credentials. However, that's why it's not "ideal", but it is "good
+        # enough". The only users able to exploit this are ones with access to
+        # the local system (unless Galaxy is listening on 0.0.0.0....). It
+        # seems improbable that an attacker with access to the server hosting
+        # Galaxy would not have access to Galaxy itself, and be attempting to
+        # attack the system
+        if self.config_secret_header is not None:
+            if not safe_str_cmp(environ.get('HTTP_GX_SECRET'), self.config_secret_header):
+                title = "Access to Galaxy is denied"
+                message = """
+                Galaxy is configured to authenticate users via an external
+                method (such as HTTP authentication in Apache), but an
+                incorrect shared secret key was provided by the
+                upstream (proxy) server.</p>
+                <p>Please contact your local Galaxy administrator.  The
+                variable <code>remote_user_secret</code> and
+                <code>GX_SECRET</code> header must be set before you may
+                access Galaxy.
+                """
+                return self.error( start_response, title, message )
+
+        # Apache sets REMOTE_USER to the string '(null)' when using the Rewrite* method for passing REMOTE_USER and a user is
+        # un-authenticated.  Any other possible values need to go here as well.
+        path_info = environ.get('PATH_INFO', '')
+        if 'HTTP_REMOTE_USER' in environ and environ[ 'HTTP_REMOTE_USER' ] != '(null)':
+            if not environ[ 'HTTP_REMOTE_USER' ].count( '@' ):
+                if self.maildomain is not None:
+                    environ[ 'HTTP_REMOTE_USER' ] += '@' + self.maildomain
+                else:
+                    title = "Access to this Galaxy tool shed is denied"
+                    message = """
+                        This Galaxy tool shed is configured to authenticate users via an external
+                        method (such as HTTP authentication in Apache), but only a username (not
+                        an email address) was provided by the upstream (proxy) server.  Since tool
+                        shed usernames are email addresses, a default mail domain must be set.</[>
+                        <p>The variable <code>remote_user_maildomain</code> must be set before you
+                        can access this tool shed.  Contact your local tool shed administrator.
+                    """
+                    return self.error( start_response, title, message )
+            return self.app( environ, start_response )
+        elif path_info.startswith( '/api/' ):
+            # The API handles its own authentication via keys
+            return self.app( environ, start_response )
+        elif path_info.startswith( '/user/api_keys' ):
+            # api_keys can be managed when remote_user is in use.
+            pass
+        else:
+            title = "Access to this Galaxy tool shed is denied"
+            message = """
+                This Galaxy tool shed is configured to authenticate users via an external
+                method (such as HTTP authentication in Apache), but a username was not
+                provided by the upstream (proxy) server.  This is generally due to a
+                misconfiguration in the upstream server.</p>
+                <p>Contact your local Galaxy tool shed administrator.
+            """
+            return self.error( start_response, title, message )
+
+    def error( self, start_response, title="Access denied", message="Contact your local Galaxy tool shed administrator." ):
+        start_response( '403 Forbidden', [('Content-type', 'text/html')] )
+        return [errorpage % (title, message)]
diff --git a/lib/galaxy/webapps/tool_shed/model/__init__.py b/lib/galaxy/webapps/tool_shed/model/__init__.py
new file mode 100644
index 0000000..8935aa2
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/__init__.py
@@ -0,0 +1,511 @@
+import logging
+import operator
+import os
+from datetime import datetime, timedelta
+from galaxy import util
+from galaxy.model.orm.now import now
+from galaxy.util import unique_id
+from galaxy.util.bunch import Bunch
+from galaxy.util.hash_util import new_secure_hash
+from galaxy.util.dictifiable import Dictifiable
+import tool_shed.repository_types.util as rt_util
+from tool_shed.dependencies.repository import relation_builder
+from tool_shed.util import metadata_util
+
+from mercurial import hg
+from mercurial import ui
+
+log = logging.getLogger( __name__ )
+
+
+class APIKeys( object ):
+    pass
+
+
+class User( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'username' )
+    dict_element_visible_keys = ( 'id', 'username' )
+
+    def __init__( self, email=None, password=None ):
+        self.email = email
+        self.password = password
+        self.external = False
+        self.deleted = False
+        self.purged = False
+        self.username = None
+        self.new_repo_alert = False
+
+    def all_roles( self ):
+        roles = [ ura.role for ura in self.roles ]
+        for group in [ uga.group for uga in self.groups ]:
+            for role in [ gra.role for gra in group.roles ]:
+                if role not in roles:
+                    roles.append( role )
+        return roles
+
+    def check_password( self, cleartext ):
+        """Check if 'cleartext' matches 'self.password' when hashed."""
+        return self.password == new_secure_hash( text_type=cleartext )
+
+    def get_disk_usage( self, nice_size=False ):
+        return 0
+
+    @property
+    def nice_total_disk_usage( self ):
+        return 0
+
+    def set_disk_usage( self, bytes ):
+        pass
+
+    total_disk_usage = property( get_disk_usage, set_disk_usage )
+
+    def set_password_cleartext( self, cleartext ):
+        """Set 'self.password' to the digest of 'cleartext'."""
+        self.password = new_secure_hash( text_type=cleartext )
+
+
+class PasswordResetToken( object ):
+    def __init__( self, user, token=None):
+        if token:
+            self.token = token
+        else:
+            self.token = unique_id()
+        self.user = user
+        self.expiration_time = now() + timedelta(hours=24)
+
+
+class Group( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'name' )
+
+    def __init__( self, name=None ):
+        self.name = name
+        self.deleted = False
+
+
+class Role( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name' )
+    dict_element_visible_keys = ( 'id', 'name', 'description', 'type' )
+    private_id = None
+    types = Bunch( PRIVATE='private',
+                   SYSTEM='system',
+                   USER='user',
+                   ADMIN='admin',
+                   SHARING='sharing' )
+
+    def __init__( self, name="", description="", type="system", deleted=False ):
+        self.name = name
+        self.description = description
+        self.type = type
+        self.deleted = deleted
+
+    @property
+    def is_repository_admin_role( self ):
+        # A repository admin role must always be associated with a repository. The mapper returns an
+        # empty list for those roles that have no repositories.  This method will require changes if
+        # new features are introduced that results in more than one role per repository.
+        if self.repositories:
+            return True
+        return False
+
+
+class UserGroupAssociation( object ):
+    def __init__( self, user, group ):
+        self.user = user
+        self.group = group
+
+
+class UserRoleAssociation( object ):
+    def __init__( self, user, role ):
+        self.user = user
+        self.role = role
+
+
+class GroupRoleAssociation( object ):
+    def __init__( self, group, role ):
+        self.group = group
+        self.role = role
+
+
+class RepositoryRoleAssociation( object ):
+    def __init__( self, repository, role ):
+        self.repository = repository
+        self.role = role
+
+
+class GalaxySession( object ):
+
+    def __init__( self,
+                  id=None,
+                  user=None,
+                  remote_host=None,
+                  remote_addr=None,
+                  referer=None,
+                  current_history=None,
+                  session_key=None,
+                  is_valid=False,
+                  prev_session_id=None,
+                  last_action=None ):
+        self.id = id
+        self.user = user
+        self.remote_host = remote_host
+        self.remote_addr = remote_addr
+        self.referer = referer
+        self.current_history = current_history
+        self.session_key = session_key
+        self.is_valid = is_valid
+        self.prev_session_id = prev_session_id
+        self.last_action = last_action or datetime.now()
+
+
+class Repository( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name', 'type', 'remote_repository_url', 'homepage_url', 'description', 'user_id', 'private', 'deleted',
+                                     'times_downloaded', 'deprecated' )
+    dict_element_visible_keys = ( 'id', 'name', 'type', 'remote_repository_url', 'homepage_url', 'description', 'long_description', 'user_id', 'private',
+                                  'deleted', 'times_downloaded', 'deprecated' )
+    file_states = Bunch( NORMAL='n',
+                         NEEDS_MERGING='m',
+                         MARKED_FOR_REMOVAL='r',
+                         MARKED_FOR_ADDITION='a',
+                         NOT_TRACKED='?' )
+
+    def __init__( self, id=None, name=None, type=None, remote_repository_url=None, homepage_url=None,
+                  description=None, long_description=None, user_id=None, private=False,
+                  deleted=None, email_alerts=None, times_downloaded=0, deprecated=False ):
+        self.id = id
+        self.name = name or "Unnamed repository"
+        self.type = type
+        self.remote_repository_url = remote_repository_url
+        self.homepage_url = homepage_url
+        self.description = description
+        self.long_description = long_description
+        self.user_id = user_id
+        self.private = private
+        self.deleted = deleted
+        self.email_alerts = email_alerts
+        self.times_downloaded = times_downloaded
+        self.deprecated = deprecated
+
+    @property
+    def admin_role( self ):
+        admin_role_name = '%s_%s_admin' % ( str( self.name ), str( self.user.username ) )
+        for rra in self.roles:
+            role = rra.role
+            if str( role.name ) == admin_role_name:
+                return role
+        raise Exception( 'Repository %s owned by %s is not associated with a required administrative role.' %
+                         ( str( self.name ), str( self.user.username ) ) )
+
+    def allow_push( self, app ):
+        repo = hg.repository( ui.ui(), self.repo_path( app ) )
+        return repo.ui.config( 'web', 'allow_push' )
+
+    def can_change_type( self, app ):
+        # Allow changing the type only if the repository has no contents, has never been installed, or has
+        # never been changed from the default type.
+        if self.is_new( app ):
+            return True
+        if self.times_downloaded == 0:
+            return True
+        if self.type == rt_util.UNRESTRICTED:
+            return True
+        return False
+
+    def can_change_type_to( self, app, new_type_label ):
+        if self.type == new_type_label:
+            return False
+        if self.can_change_type( app ):
+            new_type = app.repository_types_registry.get_class_by_label( new_type_label )
+            if new_type.is_valid_for_type( app, self ):
+                return True
+        return False
+
+    def get_changesets_for_setting_metadata( self, app ):
+        type_class = self.get_type_class( app )
+        return type_class.get_changesets_for_setting_metadata( app, self )
+
+    def get_repository_dependencies( self, app, changeset, toolshed_url ):
+        # We aren't concerned with repositories of type tool_dependency_definition here if a
+        # repository_metadata record is not returned because repositories of this type will never
+        # have repository dependencies. However, if a readme file is uploaded, or some other change
+        # is made that does not create a new downloadable changeset revision but updates the existing
+        # one, we still want to be able to get repository dependencies.
+        repository_metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( app,
+                                                                                                    self,
+                                                                                                    changeset )
+        if repository_metadata:
+            metadata = repository_metadata.metadata
+            if metadata:
+                rb = relation_builder.RelationBuilder( app, self, repository_metadata, toolshed_url )
+                repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+                if repository_dependencies:
+                    return repository_dependencies
+        return None
+
+    def get_type_class( self, app ):
+        return app.repository_types_registry.get_class_by_label( self.type )
+
+    def get_tool_dependencies( self, app, changeset_revision ):
+        repo = hg.repository( ui.ui(), self.repo_path( app ) )
+        changeset_revision = metadata_util.get_next_downloadable_changeset_revision( self, repo, changeset_revision )
+        for downloadable_revision in self.downloadable_revisions:
+            if downloadable_revision.changeset_revision == changeset_revision:
+                return downloadable_revision.metadata.get( 'tool_dependencies', {} )
+        return {}
+
+    def installable_revisions( self, app, sort_revisions=True ):
+        return metadata_util.get_metadata_revisions( self,
+                                                     hg.repository( ui.ui(), self.repo_path( app ) ),
+                                                     sort_revisions=sort_revisions )
+
+    def is_new( self, app ):
+        repo = hg.repository( ui.ui(), self.repo_path( app ) )
+        tip_ctx = repo.changectx( repo.changelog.tip() )
+        return tip_ctx.rev() < 0
+
+    def repo_path( self, app ):
+        return app.hgweb_config_manager.get_entry( os.path.join( "repos", self.user.username, self.name ) )
+
+    def revision( self, app ):
+        repo = hg.repository( ui.ui(), self.repo_path( app ) )
+        tip_ctx = repo.changectx( repo.changelog.tip() )
+        return "%s:%s" % ( str( tip_ctx.rev() ), str( repo.changectx( repo.changelog.tip() ) ) )
+
+    def set_allow_push( self, app, usernames, remove_auth='' ):
+        allow_push = util.listify( self.allow_push( app ) )
+        if remove_auth:
+            allow_push.remove( remove_auth )
+        else:
+            for username in util.listify( usernames ):
+                if username not in allow_push:
+                    allow_push.append( username )
+        allow_push = '%s\n' % ','.join( allow_push )
+        repo = hg.repository( ui.ui(), path=self.repo_path( app ) )
+        # Why doesn't the following work?
+        # repo.ui.setconfig( 'web', 'allow_push', allow_push )
+        lines = repo.opener( 'hgrc', 'rb' ).readlines()
+        fp = repo.opener( 'hgrc', 'wb' )
+        for line in lines:
+            if line.startswith( 'allow_push' ):
+                fp.write( 'allow_push = %s' % allow_push )
+            else:
+                fp.write( line )
+        fp.close()
+
+    def tip( self, app ):
+        repo = hg.repository( ui.ui(), self.repo_path( app ) )
+        return str( repo.changectx( repo.changelog.tip() ) )
+
+    def to_dict( self, view='collection', value_mapper=None ):
+        rval = super( Repository, self ).to_dict( view=view, value_mapper=value_mapper )
+        if 'user_id' in rval:
+            rval[ 'owner' ] = self.user.username
+        return rval
+
+
+class RepositoryMetadata( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'missing_test_components',
+                                     'has_repository_dependencies', 'includes_datatypes', 'includes_tools', 'includes_tool_dependencies',
+                                     'includes_tools_for_display_in_tool_panel', 'includes_workflows' )
+    dict_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'missing_test_components',
+                                  'has_repository_dependencies', 'includes_datatypes', 'includes_tools', 'includes_tool_dependencies',
+                                  'includes_tools_for_display_in_tool_panel', 'includes_workflows', 'repository_dependencies' )
+
+    def __init__( self, id=None, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False,
+                  downloadable=False, missing_test_components=None, tools_functionally_correct=False, test_install_error=False,
+                  has_repository_dependencies=False, includes_datatypes=False, includes_tools=False, includes_tool_dependencies=False,
+                  includes_workflows=False ):
+        self.id = id
+        self.repository_id = repository_id
+        self.changeset_revision = changeset_revision
+        self.metadata = metadata
+        self.tool_versions = tool_versions
+        self.malicious = malicious
+        self.downloadable = downloadable
+        self.missing_test_components = missing_test_components
+        self.has_repository_dependencies = has_repository_dependencies
+        # We don't consider the special case has_repository_dependencies_only_if_compiling_contained_td here.
+        self.includes_datatypes = includes_datatypes
+        self.includes_tools = includes_tools
+        self.includes_tool_dependencies = includes_tool_dependencies
+        self.includes_workflows = includes_workflows
+
+    @property
+    def includes_tools_for_display_in_tool_panel( self ):
+        if self.metadata:
+            tool_dicts = self.metadata.get( 'tools', [] )
+            for tool_dict in tool_dicts:
+                if tool_dict.get( 'add_to_tool_panel', True ):
+                    return True
+        return False
+
+    @property
+    def repository_dependencies( self ):
+        if self.has_repository_dependencies:
+            return [ repository_dependency for repository_dependency in self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] ]
+        return []
+
+
+class RepositoryReview( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'user_id', 'rating', 'deleted' )
+    dict_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'user_id', 'rating', 'deleted' )
+    approved_states = Bunch( NO='no', YES='yes' )
+
+    def __init__( self, repository_id=None, changeset_revision=None, user_id=None, rating=None, deleted=False ):
+        self.repository_id = repository_id
+        self.changeset_revision = changeset_revision
+        self.user_id = user_id
+        self.rating = rating
+        self.deleted = deleted
+
+
+class ComponentReview( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'repository_review_id', 'component_id', 'private', 'approved', 'rating', 'deleted' )
+    dict_element_visible_keys = ( 'id', 'repository_review_id', 'component_id', 'private', 'approved', 'rating', 'deleted' )
+    approved_states = Bunch( NO='no', YES='yes', NA='not_applicable' )
+
+    def __init__( self, repository_review_id=None, component_id=None, comment=None, private=False, approved=False, rating=None, deleted=False ):
+        self.repository_review_id = repository_review_id
+        self.component_id = component_id
+        self.comment = comment
+        self.private = private
+        self.approved = approved
+        self.rating = rating
+        self.deleted = deleted
+
+
+class Component( object ):
+
+    def __init__( self, name=None, description=None ):
+        self.name = name
+        self.description = description
+
+
+class ItemRatingAssociation( object ):
+
+    def __init__( self, id=None, user=None, item=None, rating=0, comment='' ):
+        self.id = id
+        self.user = user
+        self.item = item
+        self.rating = rating
+        self.comment = comment
+
+    def set_item( self, item ):
+        """ Set association's item. """
+        pass
+
+
+class RepositoryRatingAssociation( ItemRatingAssociation ):
+
+    def set_item( self, repository ):
+        self.repository = repository
+
+
+class Category( object, Dictifiable ):
+    dict_collection_visible_keys = ( 'id', 'name', 'description', 'deleted' )
+    dict_element_visible_keys = ( 'id', 'name', 'description', 'deleted' )
+
+    def __init__( self, name=None, description=None, deleted=False ):
+        self.name = name
+        self.description = description
+        self.deleted = deleted
+
+
+class RepositoryCategoryAssociation( object ):
+
+    def __init__( self, repository=None, category=None ):
+        self.repository = repository
+        self.category = category
+
+
+class Tag( object ):
+
+    def __init__( self, id=None, type=None, parent_id=None, name=None ):
+        self.id = id
+        self.type = type
+        self.parent_id = parent_id
+        self.name = name
+
+    def __str__( self ):
+        return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
+
+
+class ItemTagAssociation( object ):
+
+    def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ):
+        self.id = id
+        self.user = user
+        self.item_id = item_id
+        self.tag_id = tag_id
+        self.user_tname = user_tname
+        self.value = None
+        self.user_value = None
+
+
+class PostJobAction( object ):
+
+    def __init__( self, action_type, workflow_step, output_name=None, action_arguments=None):
+        self.action_type = action_type
+        self.output_name = output_name
+        self.action_arguments = action_arguments
+        self.workflow_step = workflow_step
+
+
+class StoredWorkflowAnnotationAssociation( object ):
+    pass
+
+
+class WorkflowStepAnnotationAssociation( object ):
+    pass
+
+
+class Workflow( object ):
+
+    def __init__( self ):
+        self.user = None
+        self.name = None
+        self.has_cycles = None
+        self.has_errors = None
+        self.steps = []
+
+
+class WorkflowStep( object ):
+
+    def __init__( self ):
+        self.id = None
+        self.type = None
+        self.name = None
+        self.tool_id = None
+        self.tool_inputs = None
+        self.tool_errors = None
+        self.position = None
+        self.input_connections = []
+        self.config = None
+        self.label = None
+
+
+class WorkflowStepConnection( object ):
+
+    def __init__( self ):
+        self.output_step = None
+        self.output_name = None
+        self.input_step = None
+        self.input_name = None
+
+
+# Utility methods
+def sort_by_attr( seq, attr ):
+    """
+    Sort the sequence of objects by object's attribute
+    Arguments:
+    seq  - the list or any sequence (including immutable one) of objects to sort.
+    attr - the name of attribute to sort by
+    """
+    # Use the "Schwartzian transform"
+    # Create the auxiliary list of tuples where every i-th tuple has form
+    # (seq[i].attr, i, seq[i]) and sort it. The second item of tuple is needed not
+    # only to provide stable sorting, but mainly to eliminate comparison of objects
+    # (which can be expensive or prohibited) in case of equal attribute values.
+    intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq )
+    intermed.sort()
+    return map( operator.getitem, intermed, ( -1, ) * len( intermed ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/mapping.py b/lib/galaxy/webapps/tool_shed/model/mapping.py
new file mode 100644
index 0000000..6c71cff
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/mapping.py
@@ -0,0 +1,331 @@
+"""
+Details of how the data model objects are mapped onto the relational database
+are encapsulated here.
+"""
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, desc, false, ForeignKey, Integer, MetaData, not_, String, Table, TEXT, true, UniqueConstraint
+from sqlalchemy.orm import backref, mapper, relation
+
+import galaxy.webapps.tool_shed.model
+import galaxy.webapps.tool_shed.util.hgweb_config
+import galaxy.webapps.tool_shed.util.shed_statistics as shed_statistics
+from galaxy.model.base import ModelMapping
+from galaxy.model.custom_types import JSONType, TrimmedString
+from galaxy.model.orm.engine_factory import build_engine
+from galaxy.model.orm.now import now
+from galaxy.webapps.tool_shed.model import APIKeys, Category, Component, ComponentReview
+from galaxy.webapps.tool_shed.model import GalaxySession, Group, GroupRoleAssociation
+from galaxy.webapps.tool_shed.model import PasswordResetToken, Repository, RepositoryCategoryAssociation
+from galaxy.webapps.tool_shed.model import RepositoryMetadata, RepositoryRatingAssociation
+from galaxy.webapps.tool_shed.model import RepositoryReview, RepositoryRoleAssociation, Role
+from galaxy.webapps.tool_shed.model import Tag, User, UserGroupAssociation, UserRoleAssociation
+from galaxy.webapps.tool_shed.security import CommunityRBACAgent
+
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+APIKeys.table = Table( "api_keys", metadata,
+                       Column( "id", Integer, primary_key=True ),
+                       Column( "create_time", DateTime, default=now ),
+                       Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                       Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
+
+User.table = Table( "galaxy_user", metadata,
+                    Column( "id", Integer, primary_key=True),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "email", TrimmedString( 255 ), nullable=False ),
+                    Column( "username", String( 255 ), index=True ),
+                    Column( "password", TrimmedString( 40 ), nullable=False ),
+                    Column( "external", Boolean, default=False ),
+                    Column( "new_repo_alert", Boolean, default=False ),
+                    Column( "deleted", Boolean, index=True, default=False ),
+                    Column( "purged", Boolean, index=True, default=False ) )
+
+PasswordResetToken.table = Table("password_reset_token", metadata,
+                                 Column( "token", String( 32 ), primary_key=True, unique=True, index=True ),
+                                 Column( "expiration_time", DateTime ),
+                                 Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ) )
+
+Group.table = Table( "galaxy_group", metadata,
+                     Column( "id", Integer, primary_key=True ),
+                     Column( "create_time", DateTime, default=now ),
+                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                     Column( "name", String( 255 ), index=True, unique=True ),
+                     Column( "deleted", Boolean, index=True, default=False ) )
+
+Role.table = Table( "role", metadata,
+                    Column( "id", Integer, primary_key=True ),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "name", String( 255 ), index=True, unique=True ),
+                    Column( "description", TEXT ),
+                    Column( "type", String( 40 ), index=True ),
+                    Column( "deleted", Boolean, index=True, default=False ) )
+
+UserGroupAssociation.table = Table( "user_group_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+UserRoleAssociation.table = Table( "user_role_association", metadata,
+                                   Column( "id", Integer, primary_key=True ),
+                                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                   Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+                                   Column( "create_time", DateTime, default=now ),
+                                   Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GroupRoleAssociation.table = Table( "group_role_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+                                    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+RepositoryRoleAssociation.table = Table( "repository_role_association", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                         Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+                                         Column( "create_time", DateTime, default=now ),
+                                         Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GalaxySession.table = Table( "galaxy_session", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+                             Column( "remote_host", String( 255 ) ),
+                             Column( "remote_addr", String( 255 ) ),
+                             Column( "referer", TEXT ),
+                             Column( "session_key", TrimmedString( 255 ), index=True, unique=True ),  # unique 128 bit random number coerced to a string
+                             Column( "is_valid", Boolean, default=False ),
+                             Column( "prev_session_id", Integer ),  # saves a reference to the previous session so we have a way to chain them together
+                             Column( "last_action", DateTime) )
+
+Repository.table = Table( "repository", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "name", TrimmedString( 255 ), index=True ),
+                          Column( "type", TrimmedString( 255 ), index=True ),
+                          Column( "remote_repository_url", TrimmedString( 255 ) ),
+                          Column( "homepage_url", TrimmedString( 255 ) ),
+                          Column( "description", TEXT ),
+                          Column( "long_description", TEXT ),
+                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                          Column( "private", Boolean, default=False ),
+                          Column( "deleted", Boolean, index=True, default=False ),
+                          Column( "email_alerts", JSONType, nullable=True ),
+                          Column( "times_downloaded", Integer ),
+                          Column( "deprecated", Boolean, default=False ) )
+
+RepositoryMetadata.table = Table( "repository_metadata", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "create_time", DateTime, default=now ),
+                                  Column( "update_time", DateTime, default=now, onupdate=now ),
+                                  Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                  Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+                                  Column( "metadata", JSONType, nullable=True ),
+                                  Column( "tool_versions", JSONType, nullable=True ),
+                                  Column( "malicious", Boolean, default=False ),
+                                  Column( "downloadable", Boolean, default=True ),
+                                  Column( "missing_test_components", Boolean, default=False, index=True ),
+                                  Column( "has_repository_dependencies", Boolean, default=False, index=True ),
+                                  Column( "includes_datatypes", Boolean, default=False, index=True ),
+                                  Column( "includes_tools", Boolean, default=False, index=True ),
+                                  Column( "includes_tool_dependencies", Boolean, default=False, index=True ),
+                                  Column( "includes_workflows", Boolean, default=False, index=True ) )
+
+RepositoryReview.table = Table( "repository_review", metadata,
+                                Column( "id", Integer, primary_key=True ),
+                                Column( "create_time", DateTime, default=now ),
+                                Column( "update_time", DateTime, default=now, onupdate=now ),
+                                Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+                                Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                                Column( "approved", TrimmedString( 255 ) ),
+                                Column( "rating", Integer, index=True ),
+                                Column( "deleted", Boolean, index=True, default=False ) )
+
+ComponentReview.table = Table( "component_review", metadata,
+                               Column( "id", Integer, primary_key=True ),
+                               Column( "create_time", DateTime, default=now ),
+                               Column( "update_time", DateTime, default=now, onupdate=now ),
+                               Column( "repository_review_id", Integer, ForeignKey( "repository_review.id" ), index=True ),
+                               Column( "component_id", Integer, ForeignKey( "component.id" ), index=True ),
+                               Column( "comment", TEXT ),
+                               Column( "private", Boolean, default=False ),
+                               Column( "approved", TrimmedString( 255 ) ),
+                               Column( "rating", Integer ),
+                               Column( "deleted", Boolean, index=True, default=False ) )
+
+Component.table = Table( "component", metadata,
+                         Column( "id", Integer, primary_key=True ),
+                         Column( "name", TrimmedString( 255 ) ),
+                         Column( "description", TEXT ) )
+
+RepositoryRatingAssociation.table = Table( "repository_rating_association", metadata,
+                                           Column( "id", Integer, primary_key=True ),
+                                           Column( "create_time", DateTime, default=now ),
+                                           Column( "update_time", DateTime, default=now, onupdate=now ),
+                                           Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                           Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                           Column( "rating", Integer, index=True ),
+                                           Column( "comment", TEXT ) )
+
+RepositoryCategoryAssociation.table = Table( "repository_category_association", metadata,
+                                             Column( "id", Integer, primary_key=True ),
+                                             Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                             Column( "category_id", Integer, ForeignKey( "category.id" ), index=True ) )
+
+Category.table = Table( "category", metadata,
+                        Column( "id", Integer, primary_key=True ),
+                        Column( "create_time", DateTime, default=now ),
+                        Column( "update_time", DateTime, default=now, onupdate=now ),
+                        Column( "name", TrimmedString( 255 ), index=True, unique=True ),
+                        Column( "description", TEXT ),
+                        Column( "deleted", Boolean, index=True, default=False ) )
+
+Tag.table = Table( "tag", metadata,
+                   Column( "id", Integer, primary_key=True ),
+                   Column( "type", Integer ),
+                   Column( "parent_id", Integer, ForeignKey( "tag.id" ) ),
+                   Column( "name", TrimmedString(255) ),
+                   UniqueConstraint( "name" ) )
+
+# With the tables defined we can define the mappers and setup the relationships between the model objects.
+mapper( User, User.table,
+        properties=dict( active_repositories=relation( Repository, primaryjoin=( ( Repository.table.c.user_id == User.table.c.id ) & ( not_( Repository.table.c.deleted ) ) ), order_by=( Repository.table.c.name ) ),
+                         galaxy_sessions=relation( GalaxySession, order_by=desc( GalaxySession.table.c.update_time ) ),
+                         api_keys=relation( APIKeys, backref="user", order_by=desc( APIKeys.table.c.create_time ) ) ) )
+
+mapper( PasswordResetToken, PasswordResetToken.table,
+        properties=dict( user=relation( User, backref="reset_tokens") ) )
+
+mapper( APIKeys, APIKeys.table, properties={} )
+
+mapper( Group, Group.table,
+        properties=dict( users=relation( UserGroupAssociation ) ) )
+
+mapper( Role, Role.table,
+        properties=dict(
+            repositories=relation( RepositoryRoleAssociation,
+                                   primaryjoin=( ( Role.table.c.id == RepositoryRoleAssociation.table.c.role_id ) & ( RepositoryRoleAssociation.table.c.repository_id == Repository.table.c.id ) ) ),
+            users=relation( UserRoleAssociation,
+                            primaryjoin=( ( Role.table.c.id == UserRoleAssociation.table.c.role_id ) & ( UserRoleAssociation.table.c.user_id == User.table.c.id ) ) ),
+            groups=relation( GroupRoleAssociation,
+                             primaryjoin=( ( Role.table.c.id == GroupRoleAssociation.table.c.role_id ) & ( GroupRoleAssociation.table.c.group_id == Group.table.c.id ) ) ) ) )
+
+mapper( RepositoryRoleAssociation, RepositoryRoleAssociation.table,
+        properties=dict(
+            repository=relation( Repository ),
+            role=relation( Role ) ) )
+
+mapper( UserGroupAssociation, UserGroupAssociation.table,
+        properties=dict( user=relation( User, backref="groups" ),
+                         group=relation( Group, backref="members" ) ) )
+
+mapper( UserRoleAssociation, UserRoleAssociation.table,
+        properties=dict(
+            user=relation( User, backref="roles" ),
+            non_private_roles=relation( User,
+                                        backref="non_private_roles",
+                                        primaryjoin=( ( User.table.c.id == UserRoleAssociation.table.c.user_id ) & ( UserRoleAssociation.table.c.role_id == Role.table.c.id ) & not_( Role.table.c.name == User.table.c.email ) ) ),
+            role=relation( Role ) ) )
+
+mapper( GroupRoleAssociation, GroupRoleAssociation.table,
+        properties=dict(
+            group=relation( Group, backref="roles" ),
+            role=relation( Role ) ) )
+
+mapper( GalaxySession, GalaxySession.table,
+        properties=dict( user=relation( User ) ) )
+
+mapper( Tag, Tag.table,
+        properties=dict( children=relation(Tag, backref=backref( 'parent', remote_side=[ Tag.table.c.id ] ) ) ) )
+
+mapper( Category, Category.table,
+        properties=dict( repositories=relation( RepositoryCategoryAssociation,
+                                                secondary=Repository.table,
+                                                primaryjoin=( Category.table.c.id == RepositoryCategoryAssociation.table.c.category_id ),
+                                                secondaryjoin=( RepositoryCategoryAssociation.table.c.repository_id == Repository.table.c.id ) ) ) )
+
+mapper( Repository, Repository.table,
+        properties=dict(
+            categories=relation( RepositoryCategoryAssociation ),
+            ratings=relation( RepositoryRatingAssociation, order_by=desc( RepositoryRatingAssociation.table.c.update_time ), backref="repositories" ),
+            user=relation( User ),
+            downloadable_revisions=relation( RepositoryMetadata,
+                                             primaryjoin=( ( Repository.table.c.id == RepositoryMetadata.table.c.repository_id ) & ( RepositoryMetadata.table.c.downloadable == true() ) ),
+                                             order_by=desc( RepositoryMetadata.table.c.update_time ) ),
+            metadata_revisions=relation( RepositoryMetadata,
+                                         order_by=desc( RepositoryMetadata.table.c.update_time ) ),
+            roles=relation( RepositoryRoleAssociation ),
+            reviews=relation( RepositoryReview,
+                              primaryjoin=( ( Repository.table.c.id == RepositoryReview.table.c.repository_id ) ) ),
+            reviewers=relation( User,
+                                secondary=RepositoryReview.table,
+                                primaryjoin=( Repository.table.c.id == RepositoryReview.table.c.repository_id ),
+                                secondaryjoin=( RepositoryReview.table.c.user_id == User.table.c.id ) ) ) )
+
+mapper( RepositoryMetadata, RepositoryMetadata.table,
+        properties=dict( repository=relation( Repository ),
+                         reviews=relation( RepositoryReview,
+                                           foreign_keys=[ RepositoryMetadata.table.c.repository_id, RepositoryMetadata.table.c.changeset_revision ],
+                                           primaryjoin=( ( RepositoryMetadata.table.c.repository_id == RepositoryReview.table.c.repository_id ) & ( RepositoryMetadata.table.c.changeset_revision == RepositoryReview.table.c.changeset_revision ) ) ) ) )
+
+mapper( RepositoryReview, RepositoryReview.table,
+        properties=dict( repository=relation( Repository,
+                                              primaryjoin=( RepositoryReview.table.c.repository_id == Repository.table.c.id ) ),
+                         # Take care when using the mapper below!  It should be used only when a new review is being created for a repository change set revision.
+                         # Keep in mind that repository_metadata records can be removed from the database for certain change set revisions when metadata is being
+                         # reset on a repository!
+                         repository_metadata=relation( RepositoryMetadata,
+                                                       foreign_keys=[ RepositoryReview.table.c.repository_id, RepositoryReview.table.c.changeset_revision ],
+                                                       primaryjoin=( ( RepositoryReview.table.c.repository_id == RepositoryMetadata.table.c.repository_id ) & ( RepositoryReview.table.c.changeset_revision == RepositoryMetadata.table.c.changeset_revision ) ),
+                                                       backref='review' ),
+                         user=relation( User, backref="repository_reviews" ),
+                         component_reviews=relation( ComponentReview,
+                                                     primaryjoin=( ( RepositoryReview.table.c.id == ComponentReview.table.c.repository_review_id ) & ( ComponentReview.table.c.deleted == false() ) ) ),
+                         private_component_reviews=relation( ComponentReview,
+                                                             primaryjoin=( ( RepositoryReview.table.c.id == ComponentReview.table.c.repository_review_id ) & ( ComponentReview.table.c.deleted == false() ) & ( ComponentReview.table.c.private == true() ) ) ) ) )
+
+mapper( ComponentReview, ComponentReview.table,
+        properties=dict( repository_review=relation( RepositoryReview ),
+                         component=relation( Component,
+                                             primaryjoin=( ComponentReview.table.c.component_id == Component.table.c.id ) ) ) )
+
+mapper( Component, Component.table )
+
+mapper( RepositoryRatingAssociation, RepositoryRatingAssociation.table,
+        properties=dict( repository=relation( Repository ), user=relation( User ) ) )
+
+mapper( RepositoryCategoryAssociation, RepositoryCategoryAssociation.table,
+        properties=dict(
+            category=relation( Category ),
+            repository=relation( Repository ) ) )
+
+
+def init( file_path, url, engine_options={}, create_tables=False ):
+    """Connect mappings to the database"""
+    # Create the database engine
+    engine = build_engine( url, engine_options )
+    # Connect the metadata to the database.
+    metadata.bind = engine
+
+    result = ModelMapping([galaxy.webapps.tool_shed.model], engine=engine)
+
+    if create_tables:
+        metadata.create_all()
+
+    result.create_tables = create_tables
+
+    # Load local tool shed security policy
+    result.security_agent = CommunityRBACAgent( result )
+    result.shed_counter = shed_statistics.ShedCounter( result )
+    result.hgweb_config_manager = galaxy.webapps.tool_shed.util.hgweb_config.HgWebConfigManager()
+    return result
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/__init__.py b/lib/galaxy/webapps/tool_shed/model/migrate/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/check.py b/lib/galaxy/webapps/tool_shed/model/migrate/check.py
new file mode 100644
index 0000000..ba7d479
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/check.py
@@ -0,0 +1,92 @@
+import logging
+import os.path
+import sys
+
+from migrate.versioning import repository, schema
+from sqlalchemy import create_engine, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+log = logging.getLogger( __name__ )
+
+# path relative to galaxy
+migrate_repository_directory = os.path.dirname( __file__ ).replace( os.getcwd() + os.path.sep, '', 1 )
+migrate_repository = repository.Repository( migrate_repository_directory )
+
+
+def create_or_verify_database( url, engine_options={} ):
+    """
+    Check that the database is use-able, possibly creating it if empty (this is
+    the only time we automatically create tables, otherwise we force the
+    user to do it using the management script so they can create backups).
+
+    1) Empty database --> initialize with latest version and return
+    2) Database older than migration support --> fail and require manual update
+    3) Database at state where migrate support introduced --> add version control information but make no changes (might still require manual update)
+    4) Database versioned but out of date --> fail with informative message, user must run "sh manage_db.sh upgrade"
+
+    """
+    # Create engine and metadata
+    engine = create_engine( url, **engine_options )
+    meta = MetaData( bind=engine )
+    # Try to load dataset table
+    try:
+        Table( "galaxy_user", meta, autoload=True )
+    except NoSuchTableError:
+        # No 'galaxy_user' table means a completely uninitialized database, which
+        # is fine, init the database in a versioned state
+        log.info( "No database, initializing" )
+        # Database might or might not be versioned
+        try:
+            # Declare the database to be under a repository's version control
+            db_schema = schema.ControlledSchema.create( engine, migrate_repository )
+        except:
+            # The database is already under version control
+            db_schema = schema.ControlledSchema( engine, migrate_repository )
+        # Apply all scripts to get to current version
+        migrate_to_current_version( engine, db_schema )
+        return
+    try:
+        Table( "migrate_version", meta, autoload=True )
+    except NoSuchTableError:
+        # The database exists but is not yet under migrate version control, so init with version 1
+        log.info( "Adding version control to existing database" )
+        try:
+            Table( "metadata_file", meta, autoload=True )
+            schema.ControlledSchema.create( engine, migrate_repository, version=2 )
+        except NoSuchTableError:
+            schema.ControlledSchema.create( engine, migrate_repository, version=1 )
+    # Verify that the code and the DB are in sync
+    db_schema = schema.ControlledSchema( engine, migrate_repository )
+    if migrate_repository.versions.latest != db_schema.version:
+        exception_msg = "Your database has version '%d' but this code expects version '%d'.  " % ( db_schema.version, migrate_repository.versions.latest )
+        exception_msg += "Back up your database and then migrate the schema by running the following from your Galaxy installation directory:"
+        exception_msg += "\n\nsh manage_db.sh upgrade tool_shed\n"
+        raise Exception( exception_msg )
+    else:
+        log.info( "At database version %d" % db_schema.version )
+
+
+def migrate_to_current_version( engine, schema ):
+    # Changes to get to current version
+    changeset = schema.changeset( None )
+    for ver, change in changeset:
+        nextver = ver + changeset.step
+        log.info( 'Migrating %s -> %s... ' % ( ver, nextver ) )
+        old_stdout = sys.stdout
+
+        class FakeStdout( object ):
+            def __init__( self ):
+                self.buffer = []
+
+            def write( self, s ):
+                self.buffer.append( s )
+
+            def flush( self ):
+                pass
+        sys.stdout = FakeStdout()
+        try:
+            schema.runchange( ver, change, changeset.step )
+        finally:
+            for message in "".join( sys.stdout.buffer ).split( "\n" ):
+                log.info( message )
+            sys.stdout = old_stdout
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/migrate.cfg b/lib/galaxy/webapps/tool_shed/model/migrate/migrate.cfg
new file mode 100644
index 0000000..3fd7400
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/migrate.cfg
@@ -0,0 +1,20 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=Galaxy
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to 
+# change the table name in each database too. 
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the 
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the 
+# commit continues, perhaps ending successfully. 
+# Databases in this list MUST compile successfully during a commit, or the 
+# entire commit will fail. List the databases your application will actually 
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0001_initial_tables.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0001_initial_tables.py
new file mode 100644
index 0000000..62f5e4a
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0001_initial_tables.py
@@ -0,0 +1,156 @@
+"""
+Migration script to create initial tables.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Index, Integer, MetaData, String, Table, TEXT, UniqueConstraint
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+User_table = Table( "galaxy_user", metadata,
+                    Column( "id", Integer, primary_key=True),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "email", TrimmedString( 255 ), nullable=False ),
+                    Column( "username", String( 255 ), index=True ),
+                    Column( "password", TrimmedString( 40 ), nullable=False ),
+                    Column( "external", Boolean, default=False ),
+                    Column( "deleted", Boolean, index=True, default=False ),
+                    Column( "purged", Boolean, index=True, default=False ) )
+
+Group_table = Table( "galaxy_group", metadata,
+                     Column( "id", Integer, primary_key=True ),
+                     Column( "create_time", DateTime, default=now ),
+                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                     Column( "name", String( 255 ), index=True, unique=True ),
+                     Column( "deleted", Boolean, index=True, default=False ) )
+
+Role_table = Table( "role", metadata,
+                    Column( "id", Integer, primary_key=True ),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "name", String( 255 ), index=True, unique=True ),
+                    Column( "description", TEXT ),
+                    Column( "type", String( 40 ), index=True ),
+                    Column( "deleted", Boolean, index=True, default=False ) )
+
+UserGroupAssociation_table = Table( "user_group_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+UserRoleAssociation_table = Table( "user_role_association", metadata,
+                                   Column( "id", Integer, primary_key=True ),
+                                   Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                   Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+                                   Column( "create_time", DateTime, default=now ),
+                                   Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GroupRoleAssociation_table = Table( "group_role_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+                                    Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+                                    Column( "create_time", DateTime, default=now ),
+                                    Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GalaxySession_table = Table( "galaxy_session", metadata,
+                             Column( "id", Integer, primary_key=True ),
+                             Column( "create_time", DateTime, default=now ),
+                             Column( "update_time", DateTime, default=now, onupdate=now ),
+                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
+                             Column( "remote_host", String( 255 ) ),
+                             Column( "remote_addr", String( 255 ) ),
+                             Column( "referer", TEXT ),
+                             Column( "session_key", TrimmedString( 255 ), index=True, unique=True ),  # unique 128 bit random number coerced to a string
+                             Column( "is_valid", Boolean, default=False ),
+                             Column( "prev_session_id", Integer ) )  # saves a reference to the previous session so we have a way to chain them together
+
+Tool_table = Table( "tool", metadata,
+                    Column( "id", Integer, primary_key=True ),
+                    Column( "guid", TrimmedString( 255 ), index=True, unique=True ),
+                    Column( "tool_id", TrimmedString( 255 ), index=True ),
+                    Column( "create_time", DateTime, default=now ),
+                    Column( "update_time", DateTime, default=now, onupdate=now ),
+                    Column( "newer_version_id", Integer, ForeignKey( "tool.id" ), nullable=True ),
+                    Column( "name", TrimmedString( 255 ), index=True ),
+                    Column( "description", TEXT ),
+                    Column( "user_description", TEXT ),
+                    Column( "version", TrimmedString( 255 ) ),
+                    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                    Column( "external_filename", TEXT ),
+                    Column( "deleted", Boolean, index=True, default=False ) )
+
+Event_table = Table( 'event', metadata,
+                     Column( "id", Integer, primary_key=True ),
+                     Column( "create_time", DateTime, default=now ),
+                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                     Column( "state", TrimmedString( 255 ), index=True ),
+                     Column( "comment", TEXT ) )
+
+ToolEventAssociation_table = Table( "tool_event_association", metadata,
+                                    Column( "id", Integer, primary_key=True ),
+                                    Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                    Column( "event_id", Integer, ForeignKey( "event.id" ), index=True ) )
+
+Category_table = Table( "category", metadata,
+                        Column( "id", Integer, primary_key=True ),
+                        Column( "create_time", DateTime, default=now ),
+                        Column( "update_time", DateTime, default=now, onupdate=now ),
+                        Column( "name", TrimmedString( 255 ), index=True, unique=True ),
+                        Column( "description", TEXT ),
+                        Column( "deleted", Boolean, index=True, default=False ) )
+
+ToolCategoryAssociation_table = Table( "tool_category_association", metadata,
+                                       Column( "id", Integer, primary_key=True ),
+                                       Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                       Column( "category_id", Integer, ForeignKey( "category.id" ), index=True ) )
+
+Tag_table = Table( "tag", metadata,
+                   Column( "id", Integer, primary_key=True ),
+                   Column( "type", Integer ),
+                   Column( "parent_id", Integer, ForeignKey( "tag.id" ) ),
+                   Column( "name", TrimmedString(255) ),
+                   UniqueConstraint( "name" ) )
+
+ToolTagAssociation_table = Table( "tool_tag_association", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                  Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                  Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                  Column( "user_tname", TrimmedString(255), index=True),
+                                  Column( "value", TrimmedString(255), index=True),
+                                  Column( "user_value", TrimmedString(255), index=True) )
+
+ToolAnnotationAssociation_table = Table( "tool_annotation_association", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                         Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                         Column( "annotation", TEXT ) )
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.create_all()
+    Index( 'ix_tool_annotation_association_annotation', ToolAnnotationAssociation_table.c.annotation, mysql_length=767 ).create()
+
+
+def downgrade( migrate_engine ):
+    # Operations to reverse the above upgrade go here.
+    pass
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0002_add_tool_suite_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0002_add_tool_suite_column.py
new file mode 100644
index 0000000..6da8335
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0002_add_tool_suite_column.py
@@ -0,0 +1,51 @@
+"""
+Migration script to add the suite column to the tool table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    print __doc__
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    Tool_table = Table( "tool", metadata, autoload=True )
+    c = Column( "suite", Boolean, default=False, index=True )
+    try:
+        # Create
+        c.create( Tool_table, index_name='ix_tool_suite')
+        assert c is Tool_table.c.suite
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_false = "0"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE tool SET suite=%s" % default_false )
+    except Exception as e:
+        print "Adding suite column to the tool table failed: %s" % str( e )
+        log.debug( "Adding suite column to the tool table failed: %s" % str( e ) )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop suite column from tool table.
+    Tool_table = Table( "tool", metadata, autoload=True )
+    try:
+        Tool_table.c.suite.drop()
+    except Exception as e:
+        print "Dropping column suite from the tool table failed: %s" % str( e )
+        log.debug( "Dropping column suite from the tool table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0003_review_and_review_association_tables.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0003_review_and_review_association_tables.py
new file mode 100644
index 0000000..b58e6e6
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0003_review_and_review_association_tables.py
@@ -0,0 +1,49 @@
+"""
+Adds the tool_rating_association table, enabling tools to be rated along with review comments.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+ToolRatingAssociation_table = Table( "tool_rating_association", metadata,
+                                     Column( "id", Integer, primary_key=True ),
+                                     Column( "create_time", DateTime, default=now ),
+                                     Column( "update_time", DateTime, default=now, onupdate=now ),
+                                     Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                     Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                     Column( "rating", Integer, index=True ),
+                                     Column( "comment", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    # Load existing tables
+    metadata.reflect()
+    try:
+        ToolRatingAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_rating_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    # Load existing tables
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        ToolRatingAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_rating_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0004_repository_tables.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0004_repository_tables.py
new file mode 100644
index 0000000..c0527ee
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0004_repository_tables.py
@@ -0,0 +1,83 @@
+"""
+Adds the repository, repository_rating_association and repository_category_association tables.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+Repository_table = Table( "repository", metadata,
+                          Column( "id", Integer, primary_key=True ),
+                          Column( "create_time", DateTime, default=now ),
+                          Column( "update_time", DateTime, default=now, onupdate=now ),
+                          Column( "name", TrimmedString( 255 ), index=True ),
+                          Column( "description", TEXT ),
+                          Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                          Column( "private", Boolean, default=False ),
+                          Column( "deleted", Boolean, index=True, default=False ) )
+
+RepositoryRatingAssociation_table = Table( "repository_rating_association", metadata,
+                                           Column( "id", Integer, primary_key=True ),
+                                           Column( "create_time", DateTime, default=now ),
+                                           Column( "update_time", DateTime, default=now, onupdate=now ),
+                                           Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                           Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                           Column( "rating", Integer, index=True ),
+                                           Column( "comment", TEXT ) )
+
+RepositoryCategoryAssociation_table = Table( "repository_category_association", metadata,
+                                             Column( "id", Integer, primary_key=True ),
+                                             Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                             Column( "category_id", Integer, ForeignKey( "category.id" ), index=True ) )
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    # Load existing tables
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Repository_table.create()
+    except Exception as e:
+        log.debug( "Creating repository table failed: %s" % str( e ) )
+    try:
+        RepositoryRatingAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating repository_rating_association table failed: %s" % str( e ) )
+    try:
+        RepositoryCategoryAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating repository_category_association table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    # Load existing tables
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        Repository_table.drop()
+    except Exception as e:
+        log.debug( "Dropping repository table failed: %s" % str( e ) )
+    try:
+        RepositoryRatingAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping repository_rating_association table failed: %s" % str( e ) )
+    try:
+        RepositoryCategoryAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping repository_category_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0005_drop_tool_related_tables.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0005_drop_tool_related_tables.py
new file mode 100644
index 0000000..6fe967b
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0005_drop_tool_related_tables.py
@@ -0,0 +1,195 @@
+"""
+Drops the tool, tool_category_association, event, tool_event_association, tool_rating_association,
+tool_tag_association and tool_annotation_association tables since they are no longer used in the
+next-gen tool shed.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    # Load existing tables
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Load and then drop the tool_category_association table
+    try:
+        ToolCategoryAssociation_table = Table( "tool_category_association", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table tool_category_association" )
+    try:
+        ToolCategoryAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_category_association table failed: %s" % str( e ) )
+    # Load and then drop the tool_event_association table
+    try:
+        ToolEventAssociation_table = Table( "tool_event_association", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table tool_event_association" )
+    try:
+        ToolEventAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_event_association table failed: %s" % str( e ) )
+    # Load and then drop the tool_rating_association table
+    try:
+        ToolRatingAssociation_table = Table( "tool_rating_association", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table tool_rating_association" )
+    try:
+        ToolRatingAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_rating_association table failed: %s" % str( e ) )
+    # Load and then drop the tool_tag_association table
+    try:
+        ToolTagAssociation_table = Table( "tool_tag_association", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table tool_tag_association" )
+    try:
+        ToolTagAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_tag_association table failed: %s" % str( e ) )
+    # Load and then drop the tool_annotation_association table
+    try:
+        ToolAnnotationAssociation_table = Table( "tool_annotation_association", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table tool_annotation_association" )
+    try:
+        ToolAnnotationAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool_annotation_association table failed: %s" % str( e ) )
+    # Load and then drop the event table
+    try:
+        Event_table = Table( "event", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table event" )
+    try:
+        Event_table.drop()
+    except Exception as e:
+        log.debug( "Dropping event table failed: %s" % str( e ) )
+    # Load and then drop the tool table
+    try:
+        Tool_table = Table( "tool", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table tool" )
+    try:
+        Tool_table.drop()
+    except Exception as e:
+        log.debug( "Dropping tool table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    # Load existing tables
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # We've lost all of our data, so downgrading is useless. However, we'll
+    # at least re-create the dropped tables.
+    Event_table = Table( 'event', metadata,
+                         Column( "id", Integer, primary_key=True ),
+                         Column( "create_time", DateTime, default=now ),
+                         Column( "update_time", DateTime, default=now, onupdate=now ),
+                         Column( "state", TrimmedString( 255 ), index=True ),
+                         Column( "comment", TEXT ) )
+
+    Tool_table = Table( "tool", metadata,
+                        Column( "id", Integer, primary_key=True ),
+                        Column( "guid", TrimmedString( 255 ), index=True, unique=True ),
+                        Column( "tool_id", TrimmedString( 255 ), index=True ),
+                        Column( "create_time", DateTime, default=now ),
+                        Column( "update_time", DateTime, default=now, onupdate=now ),
+                        Column( "newer_version_id", Integer, ForeignKey( "tool.id" ), nullable=True ),
+                        Column( "name", TrimmedString( 255 ), index=True ),
+                        Column( "description", TEXT ),
+                        Column( "user_description", TEXT ),
+                        Column( "version", TrimmedString( 255 ) ),
+                        Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                        Column( "external_filename", TEXT ),
+                        Column( "deleted", Boolean, index=True, default=False ),
+                        Column( "suite", Boolean, default=False, index=True ) )
+
+    ToolCategoryAssociation_table = Table( "tool_category_association", metadata,
+                                           Column( "id", Integer, primary_key=True ),
+                                           Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                           Column( "category_id", Integer, ForeignKey( "category.id" ), index=True ) )
+
+    ToolEventAssociation_table = Table( "tool_event_association", metadata,
+                                        Column( "id", Integer, primary_key=True ),
+                                        Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                        Column( "event_id", Integer, ForeignKey( "event.id" ), index=True ) )
+
+    ToolRatingAssociation_table = Table( "tool_rating_association", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "create_time", DateTime, default=now ),
+                                         Column( "update_time", DateTime, default=now, onupdate=now ),
+                                         Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                         Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                         Column( "rating", Integer, index=True ),
+                                         Column( "comment", TEXT ) )
+
+    ToolTagAssociation_table = Table( "tool_tag_association", metadata,
+                                      Column( "id", Integer, primary_key=True ),
+                                      Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                      Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
+                                      Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                      Column( "user_tname", TrimmedString(255), index=True),
+                                      Column( "value", TrimmedString(255), index=True),
+                                      Column( "user_value", TrimmedString(255), index=True) )
+
+    ToolAnnotationAssociation_table = Table( "tool_annotation_association", metadata,
+                                             Column( "id", Integer, primary_key=True ),
+                                             Column( "tool_id", Integer, ForeignKey( "tool.id" ), index=True ),
+                                             Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                                             Column( "annotation", TEXT, index=True) )
+
+    # Create the event table
+    try:
+        Event_table.create()
+    except Exception as e:
+        log.debug( "Creating event table failed: %s" % str( e ) )
+    # Create the tool table
+    try:
+        Tool_table.create()
+    except Exception as e:
+        log.debug( "Creating tool table failed: %s" % str( e ) )
+    # Create the tool_category_association table
+    try:
+        ToolCategoryAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_category_association table failed: %s" % str( e ) )
+    # Create the tool_event_association table
+    try:
+        ToolEventAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_event_association table failed: %s" % str( e ) )
+    # Create the tool_rating_association table
+    try:
+        ToolRatingAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_rating_association table failed: %s" % str( e ) )
+    # Create the tool_tag_association table
+    try:
+        ToolTagAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_tag_association table failed: %s" % str( e ) )
+    # Create the tool_annotation_association table
+    try:
+        ToolAnnotationAssociation_table.create()
+    except Exception as e:
+        log.debug( "Creating tool_annotation_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0006_add_email_alerts_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0006_add_email_alerts_column.py
new file mode 100644
index 0000000..ef526a5
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0006_add_email_alerts_column.py
@@ -0,0 +1,48 @@
+"""
+Migration script to add the email_alerts column to the repository table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    c = Column( "email_alerts", JSONType, nullable=True )
+    try:
+        # Create
+        c.create( Repository_table )
+        assert c is Repository_table.c.email_alerts
+    except Exception as e:
+        print "Adding email_alerts column to the repository table failed: %s" % str( e )
+        log.debug( "Adding email_alerts column to the repository table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop email_alerts column from repository table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    try:
+        Repository_table.c.email_alerts.drop()
+    except Exception as e:
+        print "Dropping column email_alerts from the repository table failed: %s" % str( e )
+        log.debug( "Dropping column email_alerts from the repository table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0007_add_long_description_times_downloaded_columns.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0007_add_long_description_times_downloaded_columns.py
new file mode 100644
index 0000000..46d5e89
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0007_add_long_description_times_downloaded_columns.py
@@ -0,0 +1,64 @@
+"""
+Migration script to add the long_description and times_downloaded columns to the repository table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Column, Integer, MetaData, Table, TEXT
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    c = Column( "long_description", TEXT )
+    try:
+        # Create
+        c.create( Repository_table )
+        assert c is Repository_table.c.long_description
+    except Exception as e:
+        print "Adding long_description column to the repository table failed: %s" % str( e )
+        log.debug( "Adding long_description column to the repository table failed: %s" % str( e ) )
+
+    c = Column( "times_downloaded", Integer )
+    try:
+        # Create
+        c.create( Repository_table )
+        assert c is Repository_table.c.times_downloaded
+    except Exception as e:
+        print "Adding times_downloaded column to the repository table failed: %s" % str( e )
+        log.debug( "Adding times_downloaded column to the repository table failed: %s" % str( e ) )
+
+    cmd = "UPDATE repository SET long_description = ''"
+    migrate_engine.execute( cmd )
+    cmd = "UPDATE repository SET times_downloaded = 0"
+    migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop email_alerts column from repository table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    try:
+        Repository_table.c.long_description.drop()
+    except Exception as e:
+        print "Dropping column long_description from the repository table failed: %s" % str( e )
+        log.debug( "Dropping column long_description from the repository table failed: %s" % str( e ) )
+    try:
+        Repository_table.c.times_downloaded.drop()
+    except Exception as e:
+        print "Dropping column times_downloaded from the repository table failed: %s" % str( e )
+        log.debug( "Dropping column times_downloaded from the repository table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0008_add_repository_metadata_table.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0008_add_repository_metadata_table.py
new file mode 100644
index 0000000..6810095
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0008_add_repository_metadata_table.py
@@ -0,0 +1,53 @@
+"""
+Migration script to add the repository_metadata table.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType, TrimmedString
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+RepositoryMetadata_table = Table( "repository_metadata", metadata,
+                                  Column( "id", Integer, primary_key=True ),
+                                  Column( "create_time", DateTime, default=now ),
+                                  Column( "update_time", DateTime, default=now, onupdate=now ),
+                                  Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                  Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+                                  Column( "metadata", JSONType, nullable=True ) )
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create repository_metadata table.
+    try:
+        RepositoryMetadata_table.create()
+    except Exception as e:
+        print str(e)
+        log.debug( "Creating repository_metadata table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop repository_metadata table.
+    try:
+        RepositoryMetadata_table.drop()
+    except Exception as e:
+        print str(e)
+        log.debug( "Dropping repository_metadata table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0009_add_malicious_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0009_add_malicious_column.py
new file mode 100644
index 0000000..b447953
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0009_add_malicious_column.py
@@ -0,0 +1,51 @@
+"""
+Migration script to add the malicious column to the repository_metadata table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    Repository_metadata_table = Table( "repository_metadata", metadata, autoload=True )
+    c = Column( "malicious", Boolean, default=False, index=True )
+    try:
+        # Create
+        c.create( Repository_metadata_table, index_name="ix_repository_metadata_malicious")
+        assert c is Repository_metadata_table.c.malicious
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_false = "0"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE repository_metadata SET malicious=%s" % default_false )
+    except Exception as e:
+        print "Adding malicious column to the repository_metadata table failed: %s" % str( e )
+        log.debug( "Adding malicious column to the repository_metadata table failed: %s" % str( e ) )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop malicious column from repository_metadata table.
+    Repository_metadata_table = Table( "repository_metadata", metadata, autoload=True )
+    try:
+        Repository_metadata_table.c.malicious.drop()
+    except Exception as e:
+        print "Dropping column malicious from the repository_metadata table failed: %s" % str( e )
+        log.debug( "Dropping column malicious from the repository_metadata table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0010_add_new_repo_alert_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0010_add_new_repo_alert_column.py
new file mode 100644
index 0000000..fbbe62e
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0010_add_new_repo_alert_column.py
@@ -0,0 +1,53 @@
+"""
+Migration script to add the new_repo_alert column to the galaxy_user table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    User_table = Table( "galaxy_user", metadata, autoload=True )
+    c = Column( "new_repo_alert", Boolean, default=False, index=True )
+    try:
+        # Create
+        c.create( User_table, index_name="ix_galaxy_user_new_repo_alert")
+        assert c is User_table.c.new_repo_alert
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_false = "0"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_false = "false"
+        else:
+            log.debug("unknown migrate_engine dialect")
+        migrate_engine.execute( "UPDATE galaxy_user SET new_repo_alert=%s" % default_false )
+    except Exception as e:
+        print "Adding new_repo_alert column to the galaxy_user table failed: %s" % str( e )
+        log.debug( "Adding new_repo_alert column to the galaxy_user table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop new_repo_alert column from galaxy_user table.
+    User_table = Table( "galaxy_user", metadata, autoload=True )
+    try:
+        User_table.c.new_repo_alert.drop()
+    except Exception as e:
+        print "Dropping column new_repo_alert from the galaxy_user table failed: %s" % str( e )
+        log.debug( "Dropping column new_repo_alert from the galaxy_user table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0011_add_tool_versions_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0011_add_tool_versions_column.py
new file mode 100644
index 0000000..0303b68
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0011_add_tool_versions_column.py
@@ -0,0 +1,47 @@
+"""
+Migration script to add the tool_versions column to the repository_metadata table.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    c = Column( "tool_versions", JSONType, nullable=True )
+    try:
+        # Create
+        c.create( RepositoryMetadata_table )
+        assert c is RepositoryMetadata_table.c.tool_versions
+    except Exception as e:
+        print "Adding tool_versions column to the repository_metadata table failed: %s" % str( e )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop new_repo_alert column from galaxy_user table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    try:
+        RepositoryMetadata_table.c.tool_versions.drop()
+    except Exception as e:
+        print "Dropping column tool_versions from the repository_metadata table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0012_add_downloadable_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0012_add_downloadable_column.py
new file mode 100644
index 0000000..6f0a840
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0012_add_downloadable_column.py
@@ -0,0 +1,49 @@
+"""
+Migration script to add the downloadable column to the repository_metadata table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    c = Column( "downloadable", Boolean, default=True )
+    try:
+        # Create
+        c.create( RepositoryMetadata_table )
+        assert c is RepositoryMetadata_table.c.downloadable
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_true = "1"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_true = "true"
+        migrate_engine.execute( "UPDATE repository_metadata SET downloadable=%s" % default_true )
+    except Exception as e:
+        print "Adding downloadable column to the repository_metadata table failed: %s" % str( e )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop downloadable column from repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    try:
+        RepositoryMetadata_table.c.downloadable.drop()
+    except Exception as e:
+        print "Dropping column downloadable from the repository_metadata table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0013_add_review_tables.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0013_add_review_tables.py
new file mode 100644
index 0000000..d153ff6
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0013_add_review_tables.py
@@ -0,0 +1,220 @@
+"""
+Migration script to add the repository_review, component_review and component tables and the Repository Reviewer group and role.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+IUC = 'Intergalactic Utilities Commission'
+NOW = datetime.datetime.utcnow
+REVIEWER = 'Repository Reviewer'
+ROLE_TYPE = 'system'
+
+
+def nextval(migrate_engine, table, col='id' ):
+    if migrate_engine.name in ['postgresql', 'postgres']:
+        return "nextval('%s_%s_seq')" % ( table, col )
+    elif migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+        return "null"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def localtimestamp(migrate_engine):
+    if migrate_engine.name in ['postgresql', 'postgres'] or migrate_engine.name == 'mysql':
+        return "LOCALTIMESTAMP"
+    elif migrate_engine.name == 'sqlite':
+        return "current_date || ' ' || current_time"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def boolean_false(migrate_engine):
+    if migrate_engine.name in ['postgresql', 'postgres'] or migrate_engine.name == 'mysql':
+        return False
+    elif migrate_engine.name == 'sqlite':
+        return 0
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+RepositoryReview_table = Table( "repository_review", metadata,
+                                Column( "id", Integer, primary_key=True ),
+                                Column( "create_time", DateTime, default=NOW ),
+                                Column( "update_time", DateTime, default=NOW, onupdate=NOW ),
+                                Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+                                Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
+                                Column( "approved", TrimmedString( 255 ) ),
+                                Column( "rating", Integer, index=True ),
+                                Column( "deleted", Boolean, index=True, default=False ) )
+
+ComponentReview_table = Table( "component_review", metadata,
+                               Column( "id", Integer, primary_key=True ),
+                               Column( "create_time", DateTime, default=NOW ),
+                               Column( "update_time", DateTime, default=NOW, onupdate=NOW ),
+                               Column( "repository_review_id", Integer, ForeignKey( "repository_review.id" ), index=True ),
+                               Column( "component_id", Integer, ForeignKey( "component.id" ), index=True ),
+                               Column( "comment", TEXT ),
+                               Column( "private", Boolean, default=False ),
+                               Column( "approved", TrimmedString( 255 ) ),
+                               Column( "rating", Integer ),
+                               Column( "deleted", Boolean, index=True, default=False ) )
+
+Component_table = Table( "component", metadata,
+                         Column( "id", Integer, primary_key=True ),
+                         Column( "name", TrimmedString( 255 ) ),
+                         Column( "description", TEXT ) )
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create new review tables.
+    try:
+        Component_table.create()
+    except Exception as e:
+        print str(e)
+        log.debug( "Creating component table failed: %s" % str( e ) )
+    try:
+        RepositoryReview_table.create()
+    except Exception as e:
+        print str(e)
+        log.debug( "Creating repository_review table failed: %s" % str( e ) )
+    try:
+        ComponentReview_table.create()
+    except Exception as e:
+        print str(e)
+        log.debug( "Creating component_review table failed: %s" % str( e ) )
+    # Insert default Component values.
+    names = [ 'Data types', 'Functional tests', 'README', 'Tool dependencies', 'Tools', 'Workflows' ]
+    descriptions = [ 'Proprietary datatypes defined in a file named datatypes_conf.xml included in the repository',
+                     'Functional tests defined in each tool config included in the repository along with test data files',
+                     'An appropriately named file included in the repository that contains installation information or 3rd-party tool dependency licensing information',
+                     'Tool dependencies defined in a file named tool_dependencies.xml included in the repository for contained tools',
+                     'Galaxy tools included in the repository',
+                     'Exported Galaxy workflows included in the repository' ]
+    for tup in zip( names, descriptions ):
+        name, description = tup
+        cmd = "INSERT INTO component VALUES ("
+        cmd += "%s, " % nextval(migrate_engine, 'component' )
+        cmd += "'%s', " % name
+        cmd += "'%s' " % description
+        cmd += ");"
+        migrate_engine.execute( cmd )
+    # Insert a REVIEWER role into the role table.
+    cmd = "INSERT INTO role VALUES ("
+    cmd += "%s, " % nextval(migrate_engine, 'role' )
+    cmd += "%s, " % localtimestamp(migrate_engine)
+    cmd += "%s, " % localtimestamp(migrate_engine)
+    cmd += "'%s', " % REVIEWER
+    cmd += "'A user or group member with this role can review repositories.', "
+    cmd += "'%s', " % ROLE_TYPE
+    cmd += "%s" % boolean_false(migrate_engine)
+    cmd += ");"
+    migrate_engine.execute( cmd )
+    # Get the id of the REVIEWER role.
+    cmd = "SELECT id FROM role WHERE name = '%s' and type = '%s';" % ( REVIEWER, ROLE_TYPE )
+    row = migrate_engine.execute( cmd ).fetchone()
+    if row:
+        role_id = row[ 0 ]
+    else:
+        role_id = None
+    # Insert an IUC group into the galaxy_group table.
+    cmd = "INSERT INTO galaxy_group VALUES ("
+    cmd += "%s, " % nextval(migrate_engine, 'galaxy_group' )
+    cmd += "%s, " % localtimestamp(migrate_engine)
+    cmd += "%s, " % localtimestamp(migrate_engine)
+    cmd += "'%s', " % IUC
+    cmd += "%s" % boolean_false(migrate_engine)
+    cmd += ");"
+    migrate_engine.execute( cmd )
+    # Get the id of the IUC group.
+    cmd = "SELECT id FROM galaxy_group WHERE name = '%s';" % ( IUC )
+    row = migrate_engine.execute( cmd ).fetchone()
+    if row:
+        group_id = row[ 0 ]
+    else:
+        group_id = None
+    if group_id and role_id:
+        # Insert a group_role_association for the IUC group and the REVIEWER role.
+        cmd = "INSERT INTO group_role_association VALUES ("
+        cmd += "%s, " % nextval(migrate_engine, 'group_role_association' )
+        cmd += "%d, " % int( group_id )
+        cmd += "%d, " % int( role_id )
+        cmd += "%s, " % localtimestamp(migrate_engine)
+        cmd += "%s " % localtimestamp(migrate_engine)
+        cmd += ");"
+        migrate_engine.execute( cmd )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop review tables.
+    try:
+        ComponentReview_table.drop()
+    except Exception as e:
+        print str(e)
+        log.debug( "Dropping component_review table failed: %s" % str( e ) )
+    try:
+        RepositoryReview_table.drop()
+    except Exception as e:
+        print str(e)
+        log.debug( "Dropping repository_review table failed: %s" % str( e ) )
+    try:
+        Component_table.drop()
+    except Exception as e:
+        print str(e)
+        log.debug( "Dropping component table failed: %s" % str( e ) )
+    # Get the id of the REVIEWER group.
+    cmd = "SELECT id FROM galaxy_group WHERE name = '%s';" % ( IUC )
+    row = migrate_engine.execute( cmd ).fetchone()
+    if row:
+        group_id = row[ 0 ]
+    else:
+        group_id = None
+    # Get the id of the REVIEWER role.
+    cmd = "SELECT id FROM role WHERE name = '%s' and type = '%s';" % ( REVIEWER, ROLE_TYPE )
+    row = migrate_engine.execute( cmd ).fetchone()
+    if row:
+        role_id = row[ 0 ]
+    else:
+        role_id = None
+    # See if we have at least 1 user
+    cmd = "SELECT * FROM galaxy_user;"
+    users = migrate_engine.execute( cmd ).fetchall()
+    if role_id:
+        if users:
+            # Delete all UserRoleAssociations for the REVIEWER role.
+            cmd = "DELETE FROM user_role_association WHERE role_id = %d;" % int( role_id )
+            migrate_engine.execute( cmd )
+        if group_id:
+            # Delete all UserGroupAssociations for members of the IUC group.
+            cmd = "DELETE FROM user_group_association WHERE group_id = %d;" % int( group_id )
+            migrate_engine.execute( cmd )
+            # Delete all GroupRoleAssociations for the IUC group and the REVIEWER role.
+            cmd = "DELETE FROM group_role_association WHERE group_id = %d and role_id = %d;" % ( int( group_id ), int( role_id ) )
+            migrate_engine.execute( cmd )
+            # Delete the IUC group from the galaxy_group table.
+            cmd = "DELETE FROM galaxy_group WHERE id = %d;" % int( group_id )
+            migrate_engine.execute( cmd )
+        # Delete the REVIEWER role from the role table.
+        cmd = "DELETE FROM role WHERE id = %d;" % int( role_id )
+        migrate_engine.execute( cmd )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0014_add_deprecated_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0014_add_deprecated_column.py
new file mode 100644
index 0000000..6e227c3
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0014_add_deprecated_column.py
@@ -0,0 +1,51 @@
+"""
+Migration script to add the deprecated column to the repository table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize imported column in job table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    c = Column( "deprecated", Boolean, default=False )
+    try:
+        # Create
+        c.create( Repository_table )
+        assert c is Repository_table.c.deprecated
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_false = "0"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE repository SET deprecated=%s" % default_false )
+    except Exception as e:
+        print "Adding deprecated column to the repository table failed: %s" % str( e )
+        log.debug( "Adding deprecated column to the repository table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop email_alerts column from repository table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    try:
+        Repository_table.c.deprecated.drop()
+    except Exception as e:
+        print "Dropping column deprecated from the repository table failed: %s" % str( e )
+        log.debug( "Dropping column deprecated from the repository table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0015_add_api_keys_table.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0015_add_api_keys_table.py
new file mode 100644
index 0000000..e2ecd9c
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0015_add_api_keys_table.py
@@ -0,0 +1,49 @@
+"""
+Migration script to add the api_keys table.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+now = datetime.datetime.utcnow
+
+metadata = MetaData()
+
+APIKeys_table = Table( "api_keys", metadata,
+                       Column( "id", Integer, primary_key=True ),
+                       Column( "create_time", DateTime, default=now ),
+                       Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+                       Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        APIKeys_table.create()
+    except Exception as e:
+        log.debug( "Creating api_keys table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    # Load existing tables
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        APIKeys_table.drop()
+    except Exception as e:
+        log.debug( "Dropping api_keys table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0016_add_do_not_test_tools_functionally_correct_errors_columns.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0016_add_do_not_test_tools_functionally_correct_errors_columns.py
new file mode 100644
index 0000000..d1af2ad
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0016_add_do_not_test_tools_functionally_correct_errors_columns.py
@@ -0,0 +1,99 @@
+"""
+Migration script to add the tool_test_errors, do_not_test, tools_functionally_correct, and time_last_tested columns to the repository_metadata table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create and initialize tools_functionally_correct, do_not_test, time_last_tested, and tool_test_errors columns in repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    c = Column( "tools_functionally_correct", Boolean, default=False, index=True )
+    try:
+        # Create tools_functionally_correct column
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_tfc" )
+        assert c is RepositoryMetadata_table.c.tools_functionally_correct
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_false = "0"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE repository_metadata SET tools_functionally_correct=%s" % default_false )
+    except Exception as e:
+        print "Adding tools_functionally_correct column to the repository_metadata table failed: %s" % str( e )
+        log.debug( "Adding tools_functionally_correct column to the repository_metadata table failed: %s" % str( e ) )
+    c = Column( "do_not_test", Boolean, default=False, index=True )
+    try:
+        # Create do_not_test column
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_dnt")
+        assert c is RepositoryMetadata_table.c.do_not_test
+        # Initialize.
+        if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+            default_false = "0"
+        elif migrate_engine.name in ['postgresql', 'postgres']:
+            default_false = "false"
+        migrate_engine.execute( "UPDATE repository_metadata SET do_not_test=%s" % default_false )
+    except Exception as e:
+        print "Adding do_not_test column to the repository_metadata table failed: %s" % str( e )
+        log.debug( "Adding do_not_test column to the repository_metadata table failed: %s" % str( e ) )
+    c = Column( "time_last_tested", DateTime, default=None, nullable=True )
+    try:
+        # Create time_last_tested column
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_tlt")
+        assert c is RepositoryMetadata_table.c.time_last_tested
+    except Exception as e:
+        print "Adding time_last_tested column to the repository_metadata table failed: %s" % str( e )
+        log.debug( "Adding time_last_tested column to the repository_metadata table failed: %s" % str( e ) )
+    c = Column( "tool_test_errors", JSONType, nullable=True )
+    try:
+        # Create tool_test_errors column
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_tte")
+        assert c is RepositoryMetadata_table.c.tool_test_errors
+    except Exception as e:
+        print "Adding tool_test_errors column to the repository_metadata table failed: %s" % str( e )
+        log.debug( "Adding tool_test_errors column to the repository_metadata table failed: %s" % str( e ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop tool_test_errors, time_last_tested, do_not_test, and tools_functionally_correct columns from repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    try:
+        RepositoryMetadata_table.c.tool_test_errors.drop()
+    except Exception as e:
+        print "Dropping column tool_test_errors from the repository_metadata table failed: %s" % str( e )
+        log.debug( "Dropping column tool_test_errors from the repository_metadata table failed: %s" % str( e ) )
+    try:
+        RepositoryMetadata_table.c.time_last_tested.drop()
+    except Exception as e:
+        print "Dropping column time_last_tested from the repository_metadata table failed: %s" % str( e )
+        log.debug( "Dropping column time_last_tested from the repository_metadata table failed: %s" % str( e ) )
+    try:
+        RepositoryMetadata_table.c.do_not_test.drop()
+    except Exception as e:
+        print "Dropping column do_not_test from the repository_metadata table failed: %s" % str( e )
+        log.debug( "Dropping column do_not_test from the repository_metadata table failed: %s" % str( e ) )
+    try:
+        RepositoryMetadata_table.c.tools_functionally_correct.drop()
+    except Exception as e:
+        print "Dropping column tools_functionally_correct from the repository_metadata table failed: %s" % str( e )
+        log.debug( "Dropping column tools_functionally_correct from the repository_metadata table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0017_add_galaxy_utility_columns_to_repository_metadata_table.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0017_add_galaxy_utility_columns_to_repository_metadata_table.py
new file mode 100644
index 0000000..5ee29e8
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0017_add_galaxy_utility_columns_to_repository_metadata_table.py
@@ -0,0 +1,113 @@
+"""
+Migration script to add the includes_datatypes, has_repository_dependencies, includes_tools, includes_tool_dependencies and includes_workflows
+columns to the repository_metadata table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Initialize.
+    if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+        default_false = "0"
+    elif migrate_engine.name in ['postgres', 'postgresql']:
+        default_false = "false"
+    # Create and initialize tools_functionally_correct, do_not_test, time_last_tested, and tool_test_errors columns in repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+
+    # Create tools_functionally_correct column
+    c = Column( "includes_datatypes", Boolean, default=False, index=True )
+    try:
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_inc_datatypes")
+        assert c is RepositoryMetadata_table.c.includes_datatypes
+        migrate_engine.execute( "UPDATE repository_metadata SET includes_datatypes=%s" % default_false )
+    except Exception as e:
+        print "Adding includes_datatypes column to the repository_metadata table failed: %s" % str( e )
+
+    # Create includes_datatypes column
+    c = Column( "has_repository_dependencies", Boolean, default=False, index=True )
+    try:
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_has_repo_deps")
+        assert c is RepositoryMetadata_table.c.has_repository_dependencies
+        migrate_engine.execute( "UPDATE repository_metadata SET has_repository_dependencies=%s" % default_false )
+    except Exception as e:
+        print "Adding has_repository_dependencies column to the repository_metadata table failed: %s" % str( e )
+
+    # Create includes_tools column
+    c = Column( "includes_tools", Boolean, default=False, index=True )
+    try:
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_inc_tools")
+        assert c is RepositoryMetadata_table.c.includes_tools
+        migrate_engine.execute( "UPDATE repository_metadata SET includes_tools=%s" % default_false )
+    except Exception as e:
+        print "Adding includes_tools column to the repository_metadata table failed: %s" % str( e )
+
+    # Create includes_tool_dependencies column
+    c = Column( "includes_tool_dependencies", Boolean, default=False, index=True )
+    try:
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_inc_tool_deps")
+        assert c is RepositoryMetadata_table.c.includes_tool_dependencies
+        migrate_engine.execute( "UPDATE repository_metadata SET includes_tool_dependencies=%s" % default_false )
+    except Exception as e:
+        print "Adding includes_tool_dependencies column to the repository_metadata table failed: %s" % str( e )
+
+    # Create includes_workflows column
+    c = Column( "includes_workflows", Boolean, default=False, index=True )
+    try:
+        c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_inc_workflows")
+        assert c is RepositoryMetadata_table.c.includes_workflows
+        migrate_engine.execute( "UPDATE repository_metadata SET includes_workflows=%s" % default_false )
+    except Exception as e:
+        print "Adding includes_workflows column to the repository_metadata table failed: %s" % str( e )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop tool_test_errors, time_last_tested, do_not_test, and tools_functionally_correct columns from repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+
+    # Drop the includes_workflows column.
+    try:
+        RepositoryMetadata_table.c.includes_workflows.drop()
+    except Exception as e:
+        print "Dropping column includes_workflows from the repository_metadata table failed: %s" % str( e )
+
+    # Drop the includes_tool_dependencies column.
+    try:
+        RepositoryMetadata_table.c.includes_tool_dependencies.drop()
+    except Exception as e:
+        print "Dropping column includes_tool_dependencies from the repository_metadata table failed: %s" % str( e )
+
+    # Drop the includes_tools column.
+    try:
+        RepositoryMetadata_table.c.includes_tools.drop()
+    except Exception as e:
+        print "Dropping column includes_tools from the repository_metadata table failed: %s" % str( e )
+
+    # Drop the has_repository_dependencies column.
+    try:
+        RepositoryMetadata_table.c.has_repository_dependencies.drop()
+    except Exception as e:
+        print "Dropping column has_repository_dependencies from the repository_metadata table failed: %s" % str( e )
+
+    # Drop the includes_datatypes column.
+    try:
+        RepositoryMetadata_table.c.includes_datatypes.drop()
+    except Exception as e:
+        print "Dropping column includes_datatypes from the repository_metadata table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0018_add_repository_metadata_flag_columns.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0018_add_repository_metadata_flag_columns.py
new file mode 100644
index 0000000..c9e0fa7
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0018_add_repository_metadata_flag_columns.py
@@ -0,0 +1,92 @@
+"""
+Migration script to alter the repository_metadata table by dropping the tool_test_errors column and adding columns
+tool_test_results, missing_test_components.
+"""
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Initialize.
+    if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+        default_false = "0"
+    elif migrate_engine.name in ['postgresql', 'postgres']:
+        default_false = "false"
+
+    try:
+        RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    except NoSuchTableError:
+        RepositoryMetadata_table = None
+        log.debug( "Failed loading table repository_metadata." )
+
+    if RepositoryMetadata_table is not None:
+        # Drop the tool_test_errors column from the repository_metadata table as it is poorly named.  It will be replaced with the new
+        # tool_test_results column.
+        try:
+            col = RepositoryMetadata_table.c.tool_test_errors
+            col.drop()
+        except Exception as e:
+            log.debug( "Dropping column 'tool_test_errors' from repository_metadata table failed: %s" % ( str( e ) ) )
+
+        # Create the tool_test_results column to replace the ill-named tool_test_errors column just dropped above.
+        c = Column( "tool_test_results", JSONType, nullable=True )
+        try:
+            c.create( RepositoryMetadata_table )
+            assert c is RepositoryMetadata_table.c.tool_test_results
+        except Exception as e:
+            print "Adding tool_test_results column to the repository_metadata table failed: %s" % str( e )
+
+        # Create the missing_test_components column.
+        c = Column( "missing_test_components", Boolean, default=False, index=True )
+        try:
+            c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_mtc")
+            assert c is RepositoryMetadata_table.c.missing_test_components
+            migrate_engine.execute( "UPDATE repository_metadata SET missing_test_components=%s" % default_false )
+        except Exception as e:
+            print "Adding missing_test_components column to the repository_metadata table failed: %s" % str( e )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop missing_test_components and tool_test_results from the repository_metadata table and add tool_test_errors to the repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+
+    # Drop the missing_test_components column.
+    try:
+        RepositoryMetadata_table.c.missing_test_components.drop()
+    except Exception as e:
+        print "Dropping column missing_test_components from the repository_metadata table failed: %s" % str( e )
+
+    # Drop the tool_test_results column.
+    try:
+        RepositoryMetadata_table.c.tool_test_results.drop()
+    except Exception as e:
+        print "Dropping column tool_test_results from the repository_metadata table failed: %s" % str( e )
+
+    # Create the tool_test_errors column.
+    c = Column( "tool_test_errors", JSONType, nullable=True )
+    try:
+        c.create( RepositoryMetadata_table )
+        assert c is RepositoryMetadata_table.c.tool_test_errors
+    except Exception as e:
+        print "Adding tool_test_errors column to the repository_metadata table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0019_add_skip_tool_test_table_and_test_install_error_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0019_add_skip_tool_test_table_and_test_install_error_column.py
new file mode 100644
index 0000000..d32d1bd
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0019_add_skip_tool_test_table_and_test_install_error_column.py
@@ -0,0 +1,83 @@
+"""
+Migration script to add the skip_tool_test table and add the test_install_error column to the repository_metadata table.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
+from sqlalchemy.exc import NoSuchTableError
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+now = datetime.datetime.utcnow
+
+metadata = MetaData()
+
+SkipToolTest_table = Table( "skip_tool_test", metadata,
+                            Column( "id", Integer, primary_key=True ),
+                            Column( "create_time", DateTime, default=now ),
+                            Column( "update_time", DateTime, default=now, onupdate=now ),
+                            Column( "repository_metadata_id", Integer, ForeignKey( "repository_metadata.id" ), index=True ),
+                            Column( "initial_changeset_revision", TrimmedString( 255 ), index=True ),
+                            Column( "comment", TEXT ) )
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Initialize.
+    if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+        default_false = "0"
+    elif migrate_engine.name in [ 'postgresql', 'postgres' ]:
+        default_false = "false"
+
+    try:
+        RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    except NoSuchTableError:
+        RepositoryMetadata_table = None
+        log.debug( "Failed loading table repository_metadata." )
+
+    if RepositoryMetadata_table is not None:
+        # Create the test_install_error column.
+        c = Column( "test_install_error", Boolean, default=False, index=True )
+        try:
+            c.create( RepositoryMetadata_table, index_name="ix_repository_metadata_ttie")
+            assert c is RepositoryMetadata_table.c.test_install_error
+            migrate_engine.execute( "UPDATE repository_metadata SET test_install_error=%s" % default_false )
+        except Exception as e:
+            print "Adding test_install_error column to the repository_metadata table failed: %s" % str( e )
+
+    # Create skip_tool_test table.
+    try:
+        SkipToolTest_table.create()
+    except Exception as e:
+        print "Creating the skip_tool_test table failed: %s" % str( e )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    # Drop the skip_tool_test table.
+    try:
+        SkipToolTest_table.drop()
+    except Exception as e:
+        print "Dropping the skip_tool_test table failed: %s" % str( e )
+
+    # Drop test_install_error column from the repository_metadata table.
+    RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+    try:
+        RepositoryMetadata_table.c.test_install_error.drop()
+    except Exception as e:
+        print "Dropping column test_install_error from the repository_metadata table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0020_add_repository_type_column.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0020_add_repository_type_column.py
new file mode 100644
index 0000000..8936cc4
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0020_add_repository_type_column.py
@@ -0,0 +1,46 @@
+"""Migration script to add the type column to the repository table."""
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    Repository_table = Table( "repository", metadata, autoload=True )
+    c = Column( "type", TrimmedString( 255 ), index=True )
+    try:
+        # Create
+        c.create( Repository_table, index_name="ix_repository_type" )
+        assert c is Repository_table.c.type
+    except Exception as e:
+        print "Adding type column to the repository table failed: %s" % str( e )
+    # Update the type column to have the default unrestricted value.
+    cmd = "UPDATE repository SET type = 'unrestricted'"
+    migrate_engine.execute( cmd )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop type column from repository table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    try:
+        Repository_table.c.type.drop()
+    except Exception as e:
+        print "Dropping column type from the repository table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0021_change_repository_type_value.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0021_change_repository_type_value.py
new file mode 100644
index 0000000..ae7bfee
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0021_change_repository_type_value.py
@@ -0,0 +1,32 @@
+"""Migration script to change repository.type column value from generic to unrestricted."""
+import logging
+import sys
+
+from sqlalchemy import MetaData
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Update the type column to have the default unrestricted value.
+    cmd = "UPDATE repository SET type = 'unrestricted' WHERE type = 'generic'"
+    migrate_engine.execute( cmd )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Update the type column to have the default generic value.
+    cmd = "UPDATE repository SET type = 'generic' WHERE type = 'unrestricted'"
+    migrate_engine.execute( cmd )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0022_add_repository_admin_roles.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0022_add_repository_admin_roles.py
new file mode 100644
index 0000000..09440fe
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0022_add_repository_admin_roles.py
@@ -0,0 +1,163 @@
+"""
+Migration script to create the repository_role_association table, insert name-spaced
+repository administrative roles into the role table and associate each repository and
+owner with the appropriate name-spaced role.
+"""
+import datetime
+import logging
+import sys
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, Table
+from sqlalchemy.exc import NoSuchTableError
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+NOW = datetime.datetime.utcnow
+ROLE_TYPE = 'system'
+
+RepositoryRoleAssociation_table = Table( "repository_role_association", metadata,
+                                         Column( "id", Integer, primary_key=True ),
+                                         Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
+                                         Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
+                                         Column( "create_time", DateTime, default=NOW ),
+                                         Column( "update_time", DateTime, default=NOW, onupdate=NOW ) )
+
+
+def nextval( migrate_engine, table, col='id' ):
+    if migrate_engine.name in [ 'postgresql', 'postgres' ]:
+        return "nextval('%s_%s_seq')" % ( table, col )
+    elif migrate_engine.name in [ 'mysql', 'sqlite' ]:
+        return "null"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def localtimestamp( migrate_engine ):
+    if migrate_engine.name in [ 'postgresql', 'postgres', 'mysql' ]:
+        return "LOCALTIMESTAMP"
+    elif migrate_engine.name == 'sqlite':
+        return "current_date || ' ' || current_time"
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def boolean_false( migrate_engine ):
+    if migrate_engine.name in [ 'postgresql', 'postgres', 'mysql' ]:
+        return False
+    elif migrate_engine.name == 'sqlite':
+        return 0
+    else:
+        raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Create the new repository_role_association table.
+    try:
+        RepositoryRoleAssociation_table.create()
+    except Exception as e:
+        print str(e)
+        log.debug( "Creating repository_role_association table failed: %s" % str( e ) )
+    # Select the list of repositories and associated public user names for their owners.
+    user_ids = []
+    repository_ids = []
+    role_names = []
+    cmd = 'SELECT repository.id, repository.name, repository.user_id, galaxy_user.username FROM repository, galaxy_user WHERE repository.user_id = galaxy_user.id;'
+    for row in migrate_engine.execute( cmd ):
+        repository_id = row[ 0 ]
+        name = row[ 1 ]
+        user_id = row[ 2 ]
+        username = row[ 3 ]
+        repository_ids.append( int( repository_id ) )
+        role_names.append( '%s_%s_admin' % ( str( name ), str( username ) ) )
+        user_ids.append( int( user_id ) )
+    # Insert a new record into the role table for each new role.
+    for tup in zip( repository_ids, user_ids, role_names ):
+        repository_id, user_id, role_name = tup
+        cmd = "INSERT INTO role VALUES ("
+        cmd += "%s, " % nextval( migrate_engine, 'role' )
+        cmd += "%s, " % localtimestamp( migrate_engine )
+        cmd += "%s, " % localtimestamp( migrate_engine )
+        cmd += "'%s', " % role_name
+        cmd += "'A user or group member with this role can administer this repository.', "
+        cmd += "'%s', " % ROLE_TYPE
+        cmd += "%s" % boolean_false( migrate_engine )
+        cmd += ");"
+        migrate_engine.execute( cmd )
+        # Get the id of the new role.
+        cmd = "SELECT id FROM role WHERE name = '%s' and type = '%s';" % ( role_name, ROLE_TYPE )
+        row = migrate_engine.execute( cmd ).fetchone()
+        if row:
+            role_id = row[ 0 ]
+        else:
+            role_id = None
+        if role_id:
+            # Create a repository_role_association record to associate the repository with the new role.
+            cmd = "INSERT INTO repository_role_association VALUES ("
+            cmd += "%s, " % nextval( migrate_engine, 'repository_role_association' )
+            cmd += "%d, " % int( repository_id )
+            cmd += "%d, " % int( role_id )
+            cmd += "%s, " % localtimestamp( migrate_engine )
+            cmd += "%s " % localtimestamp( migrate_engine )
+            cmd += ");"
+            migrate_engine.execute( cmd )
+            # Create a user_role_association record to associate the repository owner with the new role.
+            cmd = "INSERT INTO user_role_association VALUES ("
+            cmd += "%s, " % nextval( migrate_engine, 'user_role_association' )
+            cmd += "%d, " % int( user_id )
+            cmd += "%d, " % int( role_id )
+            cmd += "%s, " % localtimestamp( migrate_engine )
+            cmd += "%s " % localtimestamp( migrate_engine )
+            cmd += ");"
+            migrate_engine.execute( cmd )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Determine the list of roles to delete by first selecting the list of repositories and associated
+    # public user names for their owners.
+    role_names = []
+    cmd = 'SELECT name, username FROM repository, galaxy_user WHERE repository.user_id = galaxy_user.id;'
+    for row in migrate_engine.execute( cmd ):
+        name = row[ 0 ]
+        username = row[ 1 ]
+        role_names.append( '%s_%s_admin' % ( str( name ), str( username ) ) )
+    # Delete each role as well as all users associated with each role.
+    for role_name in role_names:
+        # Select the id of the record associated with the current role_name from the role table.
+        cmd = "SELECT id, name FROM role WHERE name = '%s';" % role_name
+        row = migrate_engine.execute( cmd ).fetchone()
+        if row:
+            role_id = row[ 0 ]
+        else:
+            role_id = None
+        if role_id:
+            # Delete all user_role_association records for the current role.
+            cmd = "DELETE FROM user_role_association WHERE role_id = %d;" % int( role_id )
+            migrate_engine.execute( cmd )
+            # Delete all repository_role_association records for the current role.
+            cmd = "DELETE FROM repository_role_association WHERE role_id = %d;" % int( role_id )
+            migrate_engine.execute( cmd )
+            # Delete the role from the role table.
+            cmd = "DELETE FROM role WHERE id = %d;" % int( role_id )
+            migrate_engine.execute( cmd )
+    # Drop the repository_role_association table.
+    try:
+        RepositoryRoleAssociation_table = Table( "repository_role_association", metadata, autoload=True )
+    except NoSuchTableError:
+        log.debug( "Failed loading table repository_role_association" )
+    try:
+        RepositoryRoleAssociation_table.drop()
+    except Exception as e:
+        log.debug( "Dropping repository_role_association table failed: %s" % str( e ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0023_add_repository_url_and_hompeage_url.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0023_add_repository_url_and_hompeage_url.py
new file mode 100644
index 0000000..dac6733
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0023_add_repository_url_and_hompeage_url.py
@@ -0,0 +1,49 @@
+"""
+Migration script to add the remote_repository_url and homepage_url
+columns to the repository table.
+"""
+import logging
+import sys
+
+from sqlalchemy import Column, MetaData, Table
+
+from galaxy.model.custom_types import TrimmedString
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData()
+
+
+def upgrade( migrate_engine ):
+    print __doc__
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    Repository_table = Table( "repository", metadata, autoload=True )
+    c_remote = Column( "remote_repository_url", TrimmedString( 255 ) )
+    c_homepage = Column( "homepage_url", TrimmedString( 255 ) )
+    try:
+        # Create
+        c_remote.create( Repository_table )
+        c_homepage.create( Repository_table )
+        assert c_remote is Repository_table.c.remote_repository_url
+        assert c_homepage is Repository_table.c.homepage_url
+    except Exception as e:
+        print "Adding remote_repository_url and homepage_url columns to the repository table failed: %s" % str( e )
+
+
+def downgrade( migrate_engine ):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    # Drop type column from repository table.
+    Repository_table = Table( "repository", metadata, autoload=True )
+    try:
+        Repository_table.c.remote_repository_url.drop()
+        Repository_table.c.homepage_url.drop()
+    except Exception as e:
+        print "Dropping columns remote_repository_url and homepage_url from the repository table failed: %s" % str( e )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0024_password_reset.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0024_password_reset.py
new file mode 100644
index 0000000..63cacb8
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0024_password_reset.py
@@ -0,0 +1,37 @@
+"""
+Migration script for the password reset table
+"""
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, MetaData, String, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+PasswordResetToken_table = Table("password_reset_token", metadata,
+                                 Column( "token", String( 32 ), primary_key=True, unique=True, index=True ),
+                                 Column( "expiration_time", DateTime ),
+                                 Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ))
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print __doc__
+    metadata.reflect()
+    try:
+        PasswordResetToken_table.create()
+    except Exception as e:
+        print str(e)
+        log.exception("Creating %s table failed: %s" % (PasswordResetToken_table.name, str( e ) ) )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+    try:
+        PasswordResetToken_table.drop()
+    except Exception as e:
+        print str(e)
+        log.exception("Dropping %s table failed: %s" % (PasswordResetToken_table.name, str( e ) ) )
diff --git a/lib/galaxy/webapps/tool_shed/model/migrate/versions/0025_session_timeout.py b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0025_session_timeout.py
new file mode 100644
index 0000000..fc63674
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/model/migrate/versions/0025_session_timeout.py
@@ -0,0 +1,45 @@
+"""
+Migration script to add session update time (used for timeouts)
+"""
+import datetime
+import logging
+
+from sqlalchemy import Column, DateTime, MetaData, Table
+
+now = datetime.datetime.utcnow
+log = logging.getLogger( __name__ )
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    print __doc__
+    metadata.reflect()
+
+    lastaction_column = Column( "last_action", DateTime )
+    __add_column( lastaction_column, "galaxy_session", metadata )
+
+
+def downgrade(migrate_engine):
+    metadata.bind = migrate_engine
+    metadata.reflect()
+
+    __drop_column( "last_action", "galaxy_session", metadata )
+
+
+def __add_column(column, table_name, metadata, **kwds):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        column.create( table, **kwds )
+    except Exception as e:
+        print str(e)
+        log.exception( "Adding column %s failed." % column)
+
+
+def __drop_column( column_name, table_name, metadata ):
+    try:
+        table = Table( table_name, metadata, autoload=True )
+        getattr( table.c, column_name ).drop()
+    except Exception as e:
+        print str(e)
+        log.exception( "Dropping column %s failed." % column_name )
diff --git a/lib/galaxy/webapps/tool_shed/search/__init__.py b/lib/galaxy/webapps/tool_shed/search/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/webapps/tool_shed/search/repo_search.py b/lib/galaxy/webapps/tool_shed/search/repo_search.py
new file mode 100644
index 0000000..1129ab2
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/search/repo_search.py
@@ -0,0 +1,125 @@
+"""Module for searching the toolshed repositories"""
+from galaxy import exceptions
+from galaxy.exceptions import ObjectNotFound
+import logging
+log = logging.getLogger( __name__ )
+
+import whoosh.index
+from whoosh import scoring
+from whoosh.fields import Schema, STORED, TEXT
+from whoosh.qparser import MultifieldParser
+
+schema = Schema(
+    id=STORED,
+    name=TEXT( field_boost=1.7, stored=True ),
+    description=TEXT( field_boost=1.5, stored=True ),
+    long_description=TEXT( stored=True ),
+    homepage_url=TEXT( stored=True ),
+    remote_repository_url=TEXT( stored=True ),
+    repo_owner_username=TEXT( stored=True ),
+    times_downloaded=STORED,
+    approved=STORED,
+    last_updated=STORED,
+    full_last_updated=STORED )
+
+
+class RepoWeighting( scoring.BM25F ):
+    """
+    Affect the BM25G scoring model through the final method.
+    source: https://groups.google.com/forum/#!msg/whoosh/1AKNbW8R_l8/XySW0OecH6gJ
+    """
+    use_final = True
+
+    def final( self, searcher, docnum, score ):
+        # Arbitrary for now
+        reasonable_hits = 100.0
+
+        stored_times_downloaded = searcher.stored_fields( docnum )[ "times_downloaded" ]
+        if not isinstance( stored_times_downloaded, ( int, long ) ):
+            times_downloaded = int( stored_times_downloaded )
+        else:
+            times_downloaded = stored_times_downloaded
+        # Add 1 to prevent 0 being divided
+        if times_downloaded == 0:
+            times_downloaded = 1
+        popularity_modifier = ( times_downloaded / reasonable_hits )
+
+        cert_modifier = 2 if searcher.stored_fields( docnum )[ "approved" ] == 'yes' else 1
+
+        # Adjust the computed score for this document by the popularity
+        # and by the certification level.
+        final_score = score * popularity_modifier * cert_modifier
+        return final_score
+
+
+class RepoSearch( object ):
+
+    def search( self, trans, search_term, page, page_size, boosts ):
+        """
+        Perform the search on the given search_term
+
+        :param search_term: unicode encoded string with the search term(s)
+        :param boosts: namedtuple containing custom boosts for searchfields, see api/repositories.py
+
+        :returns results: dictionary containing number of hits, hits themselves and matched terms for each
+        """
+        whoosh_index_dir = trans.app.config.whoosh_index_dir
+        index_exists = whoosh.index.exists_in( whoosh_index_dir )
+        if index_exists:
+            index = whoosh.index.open_dir( whoosh_index_dir )
+            try:
+                # Some literature about BM25F:
+                # http://trec.nist.gov/pubs/trec13/papers/microsoft-cambridge.web.hard.pdf
+                # http://en.wikipedia.org/wiki/Okapi_BM25
+                # __Basically__ the higher number the bigger weight.
+                repo_weighting = RepoWeighting( field_B={ 'name_B' : boosts.repo_name_boost,
+                                                          'description_B' : boosts.repo_description_boost,
+                                                          'long_description_B' : boosts.repo_long_description_boost,
+                                                          'homepage_url_B' : boosts.repo_homepage_url_boost,
+                                                          'remote_repository_url_B' : boosts.repo_remote_repository_url_boost,
+                                                          'repo_owner_username' : boosts.repo_owner_username_boost } )
+
+                searcher = index.searcher( weighting=repo_weighting )
+
+                parser = MultifieldParser( [
+                    'name',
+                    'description',
+                    'long_description',
+                    'homepage_url',
+                    'remote_repository_url',
+                    'repo_owner_username' ], schema=schema )
+
+                user_query = parser.parse( '*' + search_term + '*' )
+
+                try:
+                    hits = searcher.search_page( user_query, page, pagelen=page_size, terms=True )
+                except ValueError:
+                    raise ObjectNotFound( 'The requested page does not exist.' )
+
+                log.debug( 'searching for: #' + str( search_term ) )
+                log.debug( 'total hits: ' + str( len( hits ) ) )
+                log.debug( 'scored hits: ' + str( hits.scored_length() ) )
+                results = {}
+                results[ 'total_results'] = str( len( hits ) )
+                results[ 'page'] = str( page )
+                results[ 'page_size'] = str( page_size )
+                results[ 'hits' ] = []
+                for hit in hits:
+                    hit_dict = {}
+                    hit_dict[ 'id' ] = trans.security.encode_id( hit.get( 'id' ) )
+                    hit_dict[ 'repo_owner_username' ] = hit.get( 'repo_owner_username' )
+                    hit_dict[ 'name' ] = hit.get( 'name' )
+                    hit_dict[ 'long_description' ] = hit.get( 'long_description' )
+                    hit_dict[ 'remote_repository_url' ] = hit.get( 'remote_repository_url' )
+                    hit_dict[ 'homepage_url' ] = hit.get( 'homepage_url' )
+                    hit_dict[ 'description' ] = hit.get( 'description' )
+                    hit_dict[ 'last_updated' ] = hit.get( 'last_updated' )
+                    hit_dict[ 'full_last_updated' ] = hit.get( 'full_last_updated' )
+                    hit_dict[ 'approved' ] = hit.get( 'approved' )
+                    hit_dict[ 'times_downloaded' ] = hit.get( 'times_downloaded' )
+                    results[ 'hits' ].append( {'repository': hit_dict, 'matched_terms': hit.matched_terms(), 'score': hit.score } )
+                return results
+            finally:
+                searcher.close()
+        else:
+            raise exceptions.InternalServerError( 'The search index file is missing.' )
diff --git a/lib/galaxy/webapps/tool_shed/search/tool_search.py b/lib/galaxy/webapps/tool_shed/search/tool_search.py
new file mode 100644
index 0000000..c04a8bf
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/search/tool_search.py
@@ -0,0 +1,84 @@
+"""Module for searching the toolshed tools within all repositories"""
+import os
+import logging
+from galaxy import exceptions
+from galaxy.exceptions import ObjectNotFound
+import whoosh.index
+from whoosh import scoring
+from whoosh.fields import Schema, STORED, TEXT
+from whoosh.qparser import MultifieldParser
+
+log = logging.getLogger( __name__ )
+
+tool_schema = Schema(
+    name=TEXT( stored=True ),
+    description=TEXT( stored=True ),
+    owner=TEXT( stored=True ),
+    id=TEXT( stored=True ),
+    help=TEXT( stored=True ),
+    version=TEXT( stored=True),
+    repo_name=TEXT( stored=True ),
+    repo_owner_username=TEXT( stored=True ),
+    repo_id=STORED )
+
+
+class ToolSearch( object ):
+
+    def search( self, trans, search_term, page, page_size, boosts ):
+        """
+        Perform the search on the given search_term
+
+        :param search_term: unicode encoded string with the search term(s)
+
+        :returns results: dictionary containing number of hits, hits themselves and matched terms for each
+        """
+        tool_index_dir = os.path.join( trans.app.config.whoosh_index_dir, 'tools' )
+        index_exists = whoosh.index.exists_in( tool_index_dir )
+        if index_exists:
+            index = whoosh.index.open_dir( tool_index_dir )
+            try:
+                # Some literature about BM25F:
+                # http://trec.nist.gov/pubs/trec13/papers/microsoft-cambridge.web.hard.pdf
+                # http://en.wikipedia.org/wiki/Okapi_BM25
+                # __Basically__ the higher number the bigger weight.
+                tool_weighting = scoring.BM25F( field_B={
+                                                'name_B' : boosts.tool_name_boost,
+                                                'description_B' : boosts.tool_description_boost,
+                                                'help_B' : boosts.tool_help_boost,
+                                                'repo_owner_username_B' : boosts.tool_repo_owner_username_boost } )
+                searcher = index.searcher( weighting=tool_weighting )
+
+                parser = MultifieldParser( [
+                    'name',
+                    'description',
+                    'help',
+                    'repo_owner_username' ], schema=tool_schema )
+
+                user_query = parser.parse( '*' + search_term + '*' )
+
+                try:
+                    hits = searcher.search_page( user_query, page, pagelen=page_size, terms=True )
+                except ValueError:
+                    raise ObjectNotFound( 'The requested page does not exist.' )
+
+                log.debug( 'searching tools for: #' + str( search_term ) )
+                log.debug( 'total hits: ' + str( len( hits ) ) )
+                log.debug( 'scored hits: ' + str( hits.scored_length() ) )
+                results = {}
+                results[ 'total_results'] = str( len( hits ) )
+                results[ 'page'] = str( page )
+                results[ 'page_size'] = str( page_size )
+                results[ 'hits' ] = []
+                for hit in hits:
+                    hit_dict = {}
+                    hit_dict[ 'id' ] = hit.get( 'id' )
+                    hit_dict[ 'repo_owner_username' ] = hit.get( 'repo_owner_username' )
+                    hit_dict[ 'repo_name' ] = hit.get( 'repo_name' )
+                    hit_dict[ 'name' ] = hit.get( 'name' )
+                    hit_dict[ 'description' ] = hit.get( 'description' )
+                    results[ 'hits' ].append( {'tool': hit_dict, 'matched_terms': hit.matched_terms(), 'score': hit.score } )
+                return results
+            finally:
+                searcher.close()
+        else:
+            raise exceptions.InternalServerError( 'The search index file is missing.' )
diff --git a/lib/galaxy/webapps/tool_shed/security/__init__.py b/lib/galaxy/webapps/tool_shed/security/__init__.py
new file mode 100644
index 0000000..46603ab
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/security/__init__.py
@@ -0,0 +1,279 @@
+"""Tool Shed Security"""
+import logging
+
+from sqlalchemy import and_, false
+
+from galaxy.util import listify
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger(__name__)
+
+
+class Action( object ):
+
+    def __init__( self, action, description, model ):
+        self.action = action
+        self.description = description
+        self.model = model
+
+
+class RBACAgent:
+    """Handle Galaxy Tool Shed security"""
+    permitted_actions = Bunch()
+
+    def associate_components( self, **kwd ):
+        raise Exception( 'No valid method of associating provided components: %s' % kwd )
+
+    def associate_user_role( self, user, role ):
+        raise Exception( 'No valid method of associating a user with a role' )
+
+    def convert_permitted_action_strings( self, permitted_action_strings ):
+        """
+        When getting permitted actions from an untrusted source like a
+        form, ensure that they match our actual permitted actions.
+        """
+        return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] )
+
+    def create_private_user_role( self, user ):
+        raise Exception( "Unimplemented Method" )
+
+    def get_action( self, name, default=None ):
+        """Get a permitted action by its dict key or action name"""
+        for k, v in self.permitted_actions.items():
+            if k == name or v.action == name:
+                return v
+        return default
+
+    def get_actions( self ):
+        """Get all permitted actions as a list of Action objects"""
+        return self.permitted_actions.__dict__.values()
+
+    def get_item_actions( self, action, item ):
+        raise Exception( 'No valid method of retrieving action (%s) for item %s.' % ( action, item ) )
+
+    def get_private_user_role( self, user ):
+        raise Exception( "Unimplemented Method" )
+
+
+class CommunityRBACAgent( RBACAgent ):
+
+    def __init__( self, model, permitted_actions=None ):
+        self.model = model
+        if permitted_actions:
+            self.permitted_actions = permitted_actions
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session"""
+        return self.model.context
+
+    def allow_action( self, roles, action, item ):
+        """
+        Method for checking a permission for the current user ( based on roles ) to perform a
+        specific action on an item
+        """
+        item_actions = self.get_item_actions( action, item )
+        if not item_actions:
+            return action.model == 'restrict'
+        ret_val = False
+        for item_action in item_actions:
+            if item_action.role in roles:
+                ret_val = True
+                break
+        return ret_val
+
+    def associate_components( self, **kwd ):
+        if 'user' in kwd:
+            if 'group' in kwd:
+                return self.associate_user_group( kwd['user'], kwd['group'] )
+            elif 'role' in kwd:
+                return self.associate_user_role( kwd['user'], kwd['role'] )
+        elif 'role' in kwd:
+            if 'group' in kwd:
+                return self.associate_group_role( kwd['group'], kwd['role'] )
+        elif 'repository' in kwd:
+            return self.associate_repository_category( kwd[ 'repository' ], kwd[ 'category' ] )
+        raise Exception( 'No valid method of associating provided components: %s' % kwd )
+
+    def associate_group_role( self, group, role ):
+        assoc = self.model.GroupRoleAssociation( group, role )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def associate_user_group( self, user, group ):
+        assoc = self.model.UserGroupAssociation( user, group )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def associate_user_role( self, user, role ):
+        assoc = self.model.UserRoleAssociation( user, role )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def associate_repository_category( self, repository, category ):
+        assoc = self.model.RepositoryCategoryAssociation( repository, category )
+        self.sa_session.add( assoc )
+        self.sa_session.flush()
+        return assoc
+
+    def create_private_user_role( self, user ):
+        # Create private role
+        role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE )
+        self.sa_session.add( role )
+        self.sa_session.flush()
+        # Add user to role
+        self.associate_components( role=role, user=user )
+        return role
+
+    def get_item_actions( self, action, item ):
+        # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+        return [ permission for permission in item.actions if permission.action == action.action ]
+
+    def get_private_user_role( self, user, auto_create=False ):
+        role = self.sa_session.query( self.model.Role ) \
+                              .filter( and_( self.model.Role.table.c.name == user.email,
+                                             self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
+                              .first()
+        if not role:
+            if auto_create:
+                return self.create_private_user_role( user )
+            else:
+                return None
+        return role
+
+    def get_repository_reviewer_role( self ):
+        return self.sa_session.query( self.model.Role ) \
+                              .filter( and_( self.model.Role.table.c.name == 'Repository Reviewer',
+                                             self.model.Role.table.c.type == self.model.Role.types.SYSTEM ) ) \
+                              .first()
+
+    def set_entity_group_associations( self, groups=None, users=None, roles=None, delete_existing_assocs=True ):
+        if groups is None:
+            groups = []
+        if users is None:
+            users = []
+        if roles is None:
+            roles = []
+        for group in groups:
+            if delete_existing_assocs:
+                for a in group.roles + group.users:
+                    self.sa_session.delete( a )
+                    self.sa_session.flush()
+            for role in roles:
+                self.associate_components( group=group, role=role )
+            for user in users:
+                self.associate_components( group=group, user=user )
+
+    def set_entity_role_associations( self, roles=None, users=None, groups=None, repositories=None, delete_existing_assocs=True ):
+        if roles is None:
+            roles = []
+        if users is None:
+            users = []
+        if groups is None:
+            groups = []
+        if repositories is None:
+            repositories = []
+        for role in roles:
+            if delete_existing_assocs:
+                for a in role.users + role.groups:
+                    self.sa_session.delete( a )
+                    self.sa_session.flush()
+            for user in users:
+                self.associate_components( user=user, role=role )
+            for group in groups:
+                self.associate_components( group=group, role=role )
+
+    def set_entity_user_associations( self, users=None, roles=None, groups=None, delete_existing_assocs=True ):
+        if users is None:
+            users = []
+        if roles is None:
+            roles = []
+        if groups is None:
+            groups = []
+        for user in users:
+            if delete_existing_assocs:
+                for a in user.non_private_roles + user.groups:
+                    self.sa_session.delete( a )
+                    self.sa_session.flush()
+            self.sa_session.refresh( user )
+            for role in roles:
+                # Make sure we are not creating an additional association with a PRIVATE role
+                if role not in user.roles:
+                    self.associate_components( user=user, role=role )
+            for group in groups:
+                self.associate_components( user=user, group=group )
+
+    def can_push( self, app, user, repository ):
+        if user:
+            return user.username in listify( repository.allow_push( app ) )
+        return False
+
+    def user_can_administer_repository( self, user, repository ):
+        """Return True if the received user can administer the received repository."""
+        if user:
+            if repository:
+                repository_admin_role = repository.admin_role
+                for rra in repository.roles:
+                    role = rra.role
+                    if role.id == repository_admin_role.id:
+                        # We have the repository's admin role, so see if the user is associated with it.
+                        for ura in role.users:
+                            role_member = ura.user
+                            if role_member.id == user.id:
+                                return True
+                        # The user is not directly associated with the role, so see if they are a member
+                        # of a group that is associated with the role.
+                        for gra in role.groups:
+                            group = gra.group
+                            for uga in group.members:
+                                member = uga.user
+                                if member.id == user.id:
+                                    return True
+        return False
+
+    def user_can_import_repository_archive( self, user, archive_owner ):
+        # This method should be called only if the current user is not an admin.
+        if user.username == archive_owner:
+            return True
+        # A member of the IUC is authorized to create new repositories that are owned by another user.
+        iuc_group = self.sa_session.query( self.model.Group ) \
+                                   .filter( and_( self.model.Group.table.c.name == 'Intergalactic Utilities Commission',
+                                                  self.model.Group.table.c.deleted == false() ) ) \
+                                   .first()
+        if iuc_group is not None:
+            for uga in iuc_group.users:
+                if uga.user.id == user.id:
+                    return True
+        return False
+
+    def user_can_review_repositories( self, user ):
+        if user:
+            roles = user.all_roles()
+            if roles:
+                repository_reviewer_role = self.get_repository_reviewer_role()
+                if repository_reviewer_role:
+                    return repository_reviewer_role in roles
+        return False
+
+    def user_can_browse_component_review( self, app, repository, component_review, user ):
+        if component_review and user:
+            if self.can_push( app, user, repository ):
+                # A user with write permission on the repository can access private/public component reviews.
+                return True
+            else:
+                if self.user_can_review_repositories( user ):
+                    # Reviewers can access private/public component reviews.
+                    return True
+        return False
+
+
+def get_permitted_actions( filter=None ):
+    '''Utility method to return a subset of RBACAgent's permitted actions'''
+    if filter is None:
+        return RBACAgent.permitted_actions
+    tmp_bunch = Bunch()
+    [ tmp_bunch.__dict__.__setitem__(k, v) for k, v in RBACAgent.permitted_actions.items() if k.startswith( filter ) ]
+    return tmp_bunch
diff --git a/lib/galaxy/webapps/tool_shed/util/__init__.py b/lib/galaxy/webapps/tool_shed/util/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/webapps/tool_shed/util/hgweb_config.py b/lib/galaxy/webapps/tool_shed/util/hgweb_config.py
new file mode 100644
index 0000000..05953a2
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/util/hgweb_config.py
@@ -0,0 +1,103 @@
+import os
+import ConfigParser
+import logging
+import shutil
+import threading
+from datetime import date
+
+log = logging.getLogger( __name__ )
+
+new_hgweb_config_template = """
+[paths]
+
+"""
+
+
+class HgWebConfigManager( object ):
+    def __init__( self ):
+        self.hgweb_config_dir = None
+        self.in_memory_config = None
+
+    def add_entry( self, lhs, rhs ):
+        """Add an entry in the hgweb.config file for a new repository."""
+        lock = threading.Lock()
+        lock.acquire( True )
+        try:
+            # Since we're changing the config, make sure the latest is loaded into memory.
+            self.read_config( force_read=True )
+            # An entry looks something like: repos/test/mira_assembler = database/community_files/000/repo_123.
+            if rhs.startswith( './' ):
+                rhs = rhs.replace( './', '', 1 )
+            self.make_backup()
+            # Add the new entry into memory.
+            self.in_memory_config.set( 'paths', lhs, rhs )
+            # Persist our in-memory configuration.
+            self.write_config()
+        except Exception as e:
+            log.debug( "Exception in HgWebConfigManager.add_entry(): %s" % str( e ) )
+        finally:
+            lock.release()
+
+    def change_entry( self, old_lhs, new_lhs, new_rhs ):
+        """Change an entry in the hgweb.config file for a repository - this only happens when the owner changes the name of the repository."""
+        lock = threading.Lock()
+        lock.acquire( True )
+        try:
+            self.make_backup()
+            # Remove the old entry.
+            self.in_memory_config.remove_option( 'paths', old_lhs )
+            # Add the new entry.
+            self.in_memory_config.set( 'paths', new_lhs, new_rhs )
+            # Persist our in-memory configuration.
+            self.write_config()
+        except Exception as e:
+            log.debug( "Exception in HgWebConfigManager.change_entry(): %s" % str( e ) )
+        finally:
+            lock.release()
+
+    def get_entry( self, lhs ):
+        """Return an entry in the hgweb.config file for a repository"""
+        self.read_config()
+        try:
+            entry = self.in_memory_config.get( 'paths', lhs )
+        except ConfigParser.NoOptionError:
+            try:
+                # We have a multi-threaded front-end, so one of the threads may not have the latest version of the hgweb.config file.
+                self.read_config( force_read=True )
+                entry = self.in_memory_config.get( 'paths', lhs )
+            except ConfigParser.NoOptionError:
+                raise Exception( "Entry for repository %s missing in file %s." % ( lhs, self.hgweb_config ) )
+        return entry
+
+    @property
+    def hgweb_config( self ):
+        hgweb_config = os.path.join( self.hgweb_config_dir, 'hgweb.config' )
+        if not os.path.exists( hgweb_config ):
+            # We used to raise an exception here...
+            # raise Exception( "Required file %s does not exist - check config setting for hgweb_config_dir." % hgweb_config )
+            # ...but now we just log the missing file and create a new empty one.
+            log.debug( "Required file %s does not exist, so creating a new, empty file.  Check your config setting for hgweb_config_dir." % hgweb_config )
+            hgweb_config_file = open( hgweb_config, 'wb' )
+            hgweb_config_file.write( new_hgweb_config_template )
+            hgweb_config_file.close()
+        return os.path.abspath( hgweb_config )
+
+    def make_backup( self ):
+        # Make a backup of the hgweb.config file.
+        today = date.today()
+        backup_date = today.strftime( "%Y_%m_%d" )
+        hgweb_config_backup_filename = 'hgweb.config_%s_backup' % backup_date
+        hgweb_config_copy = os.path.join( self.hgweb_config_dir, hgweb_config_backup_filename )
+        shutil.copy( os.path.abspath( self.hgweb_config ), os.path.abspath( hgweb_config_copy ) )
+
+    def read_config( self, force_read=False ):
+        if force_read or self.in_memory_config is None:
+            config = ConfigParser.ConfigParser()
+            config.read( self.hgweb_config )
+            self.in_memory_config = config
+
+    def write_config( self ):
+        """Writing the in-memory configuration to the hgweb.config file on disk."""
+        config_file = open( self.hgweb_config, 'wb' )
+        self.in_memory_config.write( config_file )
+        config_file.close
diff --git a/lib/galaxy/webapps/tool_shed/util/ratings_util.py b/lib/galaxy/webapps/tool_shed/util/ratings_util.py
new file mode 100644
index 0000000..5f07fa9
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/util/ratings_util.py
@@ -0,0 +1,28 @@
+import logging
+from galaxy.model.item_attrs import UsesItemRatings
+
+log = logging.getLogger( __name__ )
+
+
+class ItemRatings( UsesItemRatings ):
+    """Overrides rate_item method since we also allow for comments"""
+    def rate_item( self, trans, user, item, rating, comment='' ):
+        """ Rate an item. Return type is <item_class>RatingAssociation. """
+        item_rating = self.get_user_item_rating( trans.sa_session, user, item, webapp_model=trans.model )
+        if not item_rating:
+            # User has not yet rated item; create rating.
+            item_rating_assoc_class = self._get_item_rating_assoc_class( item, webapp_model=trans.model )
+            item_rating = item_rating_assoc_class()
+            item_rating.user = trans.user
+            item_rating.set_item( item )
+            item_rating.rating = rating
+            item_rating.comment = comment
+            trans.sa_session.add( item_rating )
+            trans.sa_session.flush()
+        elif item_rating.rating != rating or item_rating.comment != comment:
+            # User has previously rated item; update rating.
+            item_rating.rating = rating
+            item_rating.comment = comment
+            trans.sa_session.add( item_rating )
+            trans.sa_session.flush()
+        return item_rating
diff --git a/lib/galaxy/webapps/tool_shed/util/shed_statistics.py b/lib/galaxy/webapps/tool_shed/util/shed_statistics.py
new file mode 100644
index 0000000..9f23012
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/util/shed_statistics.py
@@ -0,0 +1,94 @@
+from time import gmtime
+from time import strftime
+
+
+class ShedCounter( object ):
+    def __init__( self, model ):
+        # TODO: Enhance the ShedCounter to retrieve information from the db instead of displaying what's currently in memory.
+        self.model = model
+        self.custom_datatypes = 0
+        self.generation_time = strftime( "%b %d, %Y", gmtime() )
+        self.deleted_repositories = 0
+        self.deprecated_repositories = 0
+        self.invalid_versions_of_tools = 0
+        self.repositories = 0
+        self.total_clones = 0
+        self.valid_versions_of_tools = 0
+        self.unique_owners = 0
+        self.unique_valid_tools = 0
+        self.workflows = 0
+        self.generate_statistics()
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session"""
+        return self.model.context
+
+    def generate_statistics( self ):
+        self.custom_datatypes = 0
+        self.deleted_repositories = 0
+        self.deprecated_repositories = 0
+        self.invalid_versions_of_tools = 0
+        self.repositories = 0
+        self.total_clones = 0
+        self.unique_owners = 0
+        self.valid_versions_of_tools = 0
+        self.unique_valid_tools = 0
+        self.workflows = 0
+        unique_user_ids = []
+        for repository in self.sa_session.query( self.model.Repository ):
+            self.repositories += 1
+            self.total_clones += repository.times_downloaded
+            is_deleted = repository.deleted
+            if is_deleted:
+                self.deleted_repositories += 1
+            else:
+                if repository.deprecated:
+                    self.deprecated_repositories += 1
+                if repository.user_id not in unique_user_ids:
+                    self.unique_owners += 1
+                    unique_user_ids.append( repository.user_id )
+                processed_datatypes = []
+                processed_guids = []
+                processed_invalid_tool_configs = []
+                processed_relative_workflow_paths = []
+                processed_tool_ids = []
+                # A repository's metadata_revisions are those that ignore the value of the
+                # repository_metadata.downloadable column.
+                for metadata_revision in repository.metadata_revisions:
+                    metadata = metadata_revision.metadata
+                    if 'tools' in metadata:
+                        tool_dicts = metadata[ 'tools' ]
+                        for tool_dict in tool_dicts:
+                            if 'guid' in tool_dict:
+                                guid = tool_dict[ 'guid' ]
+                                if guid not in processed_guids:
+                                    self.valid_versions_of_tools += 1
+                                    processed_guids.append( guid )
+                            if 'id' in tool_dict:
+                                tool_id = tool_dict[ 'id' ]
+                                if tool_id not in processed_tool_ids:
+                                    self.unique_valid_tools += 1
+                                    processed_tool_ids.append( tool_id )
+                    if 'invalid_tools' in metadata:
+                        invalid_tool_configs = metadata[ 'invalid_tools' ]
+                        for invalid_tool_config in invalid_tool_configs:
+                            if invalid_tool_config not in processed_invalid_tool_configs:
+                                self.invalid_versions_of_tools += 1
+                                processed_invalid_tool_configs.append( invalid_tool_config )
+                    if 'datatypes' in metadata:
+                        datatypes = metadata[ 'datatypes' ]
+                        for datatypes_dict in datatypes:
+                            if 'extension' in datatypes_dict:
+                                extension = datatypes_dict[ 'extension' ]
+                                if extension not in processed_datatypes:
+                                    self.custom_datatypes += 1
+                                    processed_datatypes.append( extension )
+                    if 'workflows' in metadata:
+                        workflows = metadata[ 'workflows' ]
+                        for workflow_tup in workflows:
+                            relative_path, exported_workflow_dict = workflow_tup
+                            if relative_path not in processed_relative_workflow_paths:
+                                self.workflows += 1
+                                processed_relative_workflow_paths.append( relative_path )
+        self.generation_time = strftime( "%b %d, %Y", gmtime() )
diff --git a/lib/galaxy/webapps/util.py b/lib/galaxy/webapps/util.py
new file mode 100644
index 0000000..618f27d
--- /dev/null
+++ b/lib/galaxy/webapps/util.py
@@ -0,0 +1,21 @@
+from __future__ import absolute_import
+
+import mako.exceptions
+
+
+def build_template_error_formatters():
+    """
+    Build a list of template error formatters for WebError. When an error
+    occurs, WebError pass the exception to each function in this list until
+    one returns a value, which will be displayed on the error page.
+    """
+    formatters = []
+    # Formatter for mako
+
+    def mako_html_data( exc_value ):
+        if isinstance( exc_value, ( mako.exceptions.CompileException, mako.exceptions.SyntaxException ) ):
+            return mako.exceptions.html_error_template().render( full=False, css=False )
+        if isinstance( exc_value, AttributeError ) and exc_value.args[0].startswith( "'Undefined' object has no attribute" ):
+            return mako.exceptions.html_error_template().render( full=False, css=False )
+    formatters.append( mako_html_data )
+    return formatters
diff --git a/lib/galaxy/webhooks/__init__.py b/lib/galaxy/webhooks/__init__.py
new file mode 100644
index 0000000..9aaa2b8
--- /dev/null
+++ b/lib/galaxy/webhooks/__init__.py
@@ -0,0 +1,106 @@
+"""
+This module manages loading of Galaxy webhooks.
+"""
+
+import os
+import yaml
+import logging
+
+from galaxy.util import config_directories_from_setting
+
+log = logging.getLogger(__name__)
+
+
+class Webhook(object):
+    def __init__(self, w_name, w_type, w_activate, w_path):
+        self.name = w_name
+        self.type = w_type
+        self.activate = w_activate
+        self.path = w_path
+        self.styles = ''
+        self.script = ''
+        self.helper = ''
+        self.config = {}
+
+    def to_dict(self):
+        return {
+            'name': self.name,
+            'type': self.type,
+            'activate': self.activate,
+            'styles': self.styles,
+            'script': self.script,
+            'config': self.config
+        }
+
+
+class WebhooksRegistry(object):
+    def __init__(self, webhooks_directories):
+        self.webhooks = []
+        self.webhooks_directories = []
+
+        for webhook_dir in config_directories_from_setting(
+                webhooks_directories):
+            for plugin_dir in os.listdir(webhook_dir):
+                self.webhooks_directories.append(
+                    os.path.join(webhook_dir, plugin_dir)
+                )
+
+        self.load_webhooks()
+
+    def load_webhooks(self):
+        for directory in self.webhooks_directories:
+            config_dir = os.path.join(directory, 'config')
+
+            if not os.path.exists(config_dir):
+                log.warning('directory not found: %s', config_dir)
+                continue
+
+            config_file = os.listdir(config_dir)[0]
+            config_file = config_file \
+                if config_file.endswith('.yml') \
+                or config_file.endswith('.yaml') \
+                else ''
+
+            if config_file:
+                self.load_webhook_from_config(config_dir, config_file)
+
+    def load_webhook_from_config(self, config_dir, config_file):
+        try:
+            with open(os.path.join(config_dir, config_file)) as file:
+                config = yaml.load(file)
+                path = os.path.normpath(os.path.join(config_dir, '..'))
+                webhook = Webhook(
+                    config['name'],
+                    config['type'],
+                    config['activate'],
+                    path,
+                )
+
+                # Read styles into a string, assuming all styles are in a
+                # single file
+                try:
+                    styles_file = os.path.join(path, 'static/styles.css')
+                    with open(styles_file, 'r') as file:
+                        webhook.styles = file.read().replace('\n', '')
+                except IOError:
+                    pass
+
+                # Read script into a string, assuming everything is in a
+                # single file
+                try:
+                    script_file = os.path.join(path, 'static/script.js')
+                    with open(script_file, 'r') as file:
+                        webhook.script = file.read()
+                except IOError:
+                    pass
+
+                # Save helper function path if it exists
+                helper_path = os.path.join(path, 'helper/__init__.py')
+                if os.path.isfile(helper_path):
+                    webhook.helper = helper_path
+
+                webhook.config = config
+                self.webhooks.append(webhook)
+
+        except Exception as e:
+            log.exception(e)
diff --git a/lib/galaxy/work/__init__.py b/lib/galaxy/work/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/work/context.py b/lib/galaxy/work/context.py
new file mode 100644
index 0000000..4252ef9
--- /dev/null
+++ b/lib/galaxy/work/context.py
@@ -0,0 +1,47 @@
+from galaxy.managers.context import (
+    ProvidesAppContext,
+    ProvidesUserContext,
+    ProvidesHistoryContext
+)
+
+
+class WorkRequestContext( ProvidesAppContext, ProvidesUserContext, ProvidesHistoryContext ):
+    """ Stripped down implementation of Galaxy web transaction god object for
+    work request handling outside of web threads - uses mix-ins shared with
+    GalaxyWebTransaction to provide app, user, and history context convience
+    methods - but nothing related to HTTP handling, mako views, etc....
+
+    Things that only need app shouldn't be consuming trans - but there is a
+    need for actions potentially tied to users and histories and  hopefully
+    this can define that stripped down interface providing access to user and
+    history information - but not dealing with web request and response
+    objects.
+    """
+
+    def __init__( self, app, user=None, history=None, workflow_building_mode=False ):
+        self.app = app
+        self.security = app.security
+        self.__user = user
+        self.__history = history
+        self.api_inherit_admin = False
+        self.workflow_building_mode = workflow_building_mode
+
+    def get_history( self, create=False ):
+        if create:
+            raise NotImplementedError( "Cannot create histories from a work request context." )
+        return self.__history
+
+    def set_history( self ):
+        raise NotImplementedError( "Cannot change histories from a work request context." )
+
+    history = property( get_history, set_history )
+
+    def get_user( self ):
+        """Return the current user if logged in or None."""
+        return self.__user
+
+    def set_user( self, user ):
+        """Set the current user."""
+        raise NotImplementedError( "Cannot change users from a work request context." )
+
+    user = property( get_user, set_user )
diff --git a/lib/galaxy/workflow/__init__.py b/lib/galaxy/workflow/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy/workflow/extract.py b/lib/galaxy/workflow/extract.py
new file mode 100644
index 0000000..8bf1624
--- /dev/null
+++ b/lib/galaxy/workflow/extract.py
@@ -0,0 +1,375 @@
+""" This module contains functionality to aid in extracting workflows from
+histories.
+"""
+import logging
+
+from galaxy import exceptions, model
+from galaxy.tools.parameters.basic import (
+    DataCollectionToolParameter,
+    DataToolParameter
+)
+from galaxy.tools.parameters.grouping import (
+    Conditional,
+    Repeat,
+    Section
+)
+from galaxy.tools.parser import ToolOutputCollectionPart
+from galaxy.util.odict import odict
+
+from .steps import (
+    attach_ordered_steps,
+    order_workflow_steps_with_levels
+)
+
+log = logging.getLogger( __name__ )
+
+WARNING_SOME_DATASETS_NOT_READY = "Some datasets still queued or running were ignored"
+
+
+def extract_workflow( trans, user, history=None, job_ids=None, dataset_ids=None, dataset_collection_ids=None, workflow_name=None, dataset_names=None, dataset_collection_names=None ):
+    steps = extract_steps( trans, history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, dataset_names=dataset_names, dataset_collection_names=None )
+    # Workflow to populate
+    workflow = model.Workflow()
+    workflow.name = workflow_name
+    # Order the steps if possible
+    attach_ordered_steps( workflow, steps )
+    # And let's try to set up some reasonable locations on the canvas
+    # (these are pretty arbitrary values)
+    levorder = order_workflow_steps_with_levels( steps )
+    base_pos = 10
+    for i, steps_at_level in enumerate( levorder ):
+        for j, index in enumerate( steps_at_level ):
+            step = steps[ index ]
+            step.position = dict( top=( base_pos + 120 * j ),
+                                  left=( base_pos + 220 * i ) )
+    # Store it
+    stored = model.StoredWorkflow()
+    stored.user = user
+    stored.name = workflow_name
+    workflow.stored_workflow = stored
+    stored.latest_workflow = workflow
+    trans.sa_session.add( stored )
+    trans.sa_session.flush()
+    return stored
+
+
+def extract_steps( trans, history=None, job_ids=None, dataset_ids=None, dataset_collection_ids=None, dataset_names=None, dataset_collection_names=None ):
+    # Ensure job_ids and dataset_ids are lists (possibly empty)
+    if job_ids is None:
+        job_ids = []
+    elif type( job_ids ) is not list:
+        job_ids = [ job_ids ]
+    if dataset_ids is None:
+        dataset_ids = []
+    elif type( dataset_ids ) is not list:
+        dataset_ids = [ dataset_ids ]
+    if dataset_collection_ids is None:
+        dataset_collection_ids = []
+    elif type( dataset_collection_ids) is not list:
+        dataset_collection_ids = [  dataset_collection_ids ]
+    # Convert both sets of ids to integers
+    job_ids = [ int( _ ) for _ in job_ids ]
+    dataset_ids = [ int( _ ) for _ in dataset_ids ]
+    dataset_collection_ids = [ int( _ ) for _ in dataset_collection_ids ]
+    # Find each job, for security we (implicitly) check that they are
+    # associated with a job in the current history.
+    summary = WorkflowSummary( trans, history )
+    jobs = summary.jobs
+    steps = []
+    hid_to_output_pair = {}
+    # Input dataset steps
+    for i, hid in enumerate( dataset_ids ):
+        step = model.WorkflowStep()
+        step.type = 'data_input'
+        if dataset_names:
+            name = dataset_names[i]
+        else:
+            name = "Input Dataset"
+        step.tool_inputs = dict( name=name )
+        hid_to_output_pair[ hid ] = ( step, 'output' )
+        steps.append( step )
+    for i, hid in enumerate( dataset_collection_ids ):
+        step = model.WorkflowStep()
+        step.type = 'data_collection_input'
+        if hid not in summary.collection_types:
+            raise exceptions.RequestParameterInvalidException( "hid %s does not appear to be a collection" % hid )
+        collection_type = summary.collection_types[ hid ]
+        if dataset_collection_names:
+            name = dataset_collection_names[i]
+        else:
+            name = "Input Dataset Collection"
+        step.tool_inputs = dict( name=name, collection_type=collection_type )
+        hid_to_output_pair[ hid ] = ( step, 'output' )
+        steps.append( step )
+    # Tool steps
+    for job_id in job_ids:
+        if job_id not in summary.job_id2representative_job:
+            log.warning( "job_id %s not found in job_id2representative_job %s" % ( job_id, summary.job_id2representative_job ) )
+            raise AssertionError( "Attempt to create workflow with job not connected to current history" )
+        job = summary.job_id2representative_job[job_id]
+        tool_inputs, associations = step_inputs( trans, job )
+        step = model.WorkflowStep()
+        step.type = 'tool'
+        step.tool_id = job.tool_id
+        step.tool_version = job.tool_version
+        step.tool_inputs = tool_inputs
+        # NOTE: We shouldn't need to do two passes here since only
+        #       an earlier job can be used as an input to a later
+        #       job.
+        for other_hid, input_name in associations:
+            if job in summary.implicit_map_jobs:
+                an_implicit_output_collection = jobs[ job ][ 0 ][ 1 ]
+                input_collection = an_implicit_output_collection.find_implicit_input_collection( input_name )
+                if input_collection:
+                    other_hid = input_collection.hid
+                else:
+                    log.info("Cannot find implicit input collection for %s" % input_name)
+            if other_hid in hid_to_output_pair:
+                other_step, other_name = hid_to_output_pair[ other_hid ]
+                conn = model.WorkflowStepConnection()
+                conn.input_step = step
+                conn.input_name = input_name
+                # Should always be connected to an earlier step
+                conn.output_step = other_step
+                conn.output_name = other_name
+        steps.append( step )
+        # Store created dataset hids
+        for assoc in (job.output_datasets + job.output_dataset_collection_instances):
+            assoc_name = assoc.name
+            if ToolOutputCollectionPart.is_named_collection_part_name( assoc_name ):
+                continue
+            if job in summary.implicit_map_jobs:
+                hid = None
+                for implicit_pair in jobs[ job ]:
+                    query_assoc_name, dataset_collection = implicit_pair
+                    if query_assoc_name == assoc_name:
+                        hid = dataset_collection.hid
+                if hid is None:
+                    template = "Failed to find matching implicit job - job id is %s, implicit pairs are %s, assoc_name is %s."
+                    message = template % ( job.id, jobs[job], assoc_name )
+                    log.warning( message )
+                    raise Exception( "Failed to extract job." )
+            else:
+                if hasattr( assoc, "dataset" ):
+                    hid = assoc.dataset.hid
+                else:
+                    hid = assoc.dataset_collection_instance.hid
+            hid_to_output_pair[ hid ] = ( step, assoc.name )
+    return steps
+
+
+class FakeJob( object ):
+    """
+    Fake job object for datasets that have no creating_job_associations,
+    they will be treated as "input" datasets.
+    """
+    def __init__( self, dataset ):
+        self.is_fake = True
+        self.id = "fake_%s" % dataset.id
+
+
+class DatasetCollectionCreationJob( object ):
+
+    def __init__( self, dataset_collection ):
+        self.is_fake = True
+        self.id = "fake_%s" % dataset_collection.id
+        self.from_jobs = None
+        self.name = "Dataset Collection Creation"
+        self.disabled_why = "Dataset collection created in a way not compatible with workflows"
+
+    def set_jobs( self, jobs ):
+        assert jobs is not None
+        self.from_jobs = jobs
+
+
+def summarize( trans, history=None ):
+    """ Return mapping of job description to datasets for active items in
+    supplied history - needed for building workflow from a history.
+
+    Formerly call get_job_dict in workflow web controller.
+    """
+    summary = WorkflowSummary( trans, history )
+    return summary.jobs, summary.warnings
+
+
+class WorkflowSummary( object ):
+
+    def __init__( self, trans, history ):
+        if not history:
+            history = trans.get_history()
+        self.history = history
+        self.warnings = set()
+        self.jobs = odict()
+        self.job_id2representative_job = {}  # map a non-fake job id to its representative job
+        self.implicit_map_jobs = []
+        self.collection_types = {}
+
+        self.__summarize()
+
+    def __summarize( self ):
+        # Make a first pass handle all singleton jobs, input dataset and dataset collections
+        # just grab the implicitly mapped jobs and handle in second pass. Second pass is
+        # needed because cannot allow selection of individual datasets from an implicit
+        # mapping during extraction - you get the collection or nothing.
+        for content in self.history.active_contents:
+            self.__summarize_content( content )
+
+    def __summarize_content( self, content ):
+        # Update internal state for history content (either an HDA or
+        # an HDCA).
+        if content.history_content_type == "dataset_collection":
+            self.__summarize_dataset_collection( content )
+        else:
+            self.__summarize_dataset( content )
+
+    def __summarize_dataset_collection( self, dataset_collection ):
+        dataset_collection = self.__original_hdca( dataset_collection )
+        hid = dataset_collection.hid
+        self.collection_types[ hid ] = dataset_collection.collection.collection_type
+        cja = dataset_collection.creating_job_associations
+        if cja:
+            # Use the "first" job to represent all mapped jobs.
+            representative_assoc = cja[0]
+            representative_job = representative_assoc.job
+            if representative_job not in self.jobs or self.jobs[ representative_job ][ 0 ][ 1 ].history_content_type == "dataset":
+                self.jobs[ representative_job ] = [ ( representative_assoc.name, dataset_collection ) ]
+                if dataset_collection.implicit_output_name:
+                    self.implicit_map_jobs.append( representative_job )
+            else:
+                self.jobs[ representative_job ].append( ( representative_assoc.name, dataset_collection ) )
+            for assoc in cja:
+                job = assoc.job
+                self.job_id2representative_job[job.id] = representative_job
+        # This whole elif condition may no longer be needed do to additional
+        # tracking with creating_job_associations. Will delete at some point.
+        elif dataset_collection.implicit_output_name:
+            # TODO: Optimize db call
+            dataset_instance = dataset_collection.collection.dataset_instances[ 0 ]
+            if not self.__check_state( dataset_instance ):
+                # Just checking the state of one instance, don't need more but
+                # makes me wonder if even need this check at all?
+                return
+
+            original_hda = self.__original_hda( dataset_instance )
+            if not original_hda.creating_job_associations:
+                log.warning( "An implicitly create output dataset collection doesn't have a creating_job_association, should not happen!" )
+                job = DatasetCollectionCreationJob( dataset_collection )
+                self.jobs[ job ] = [ ( None, dataset_collection ) ]
+
+            for assoc in original_hda.creating_job_associations:
+                job = assoc.job
+                if job not in self.jobs or self.jobs[ job ][ 0 ][ 1 ].history_content_type == "dataset":
+                    self.jobs[ job ] = [ ( assoc.name, dataset_collection ) ]
+                    self.job_id2representative_job[job.id] = job
+                    self.implicit_map_jobs.append( job )
+                else:
+                    self.jobs[ job ].append( ( assoc.name, dataset_collection ) )
+        else:
+            job = DatasetCollectionCreationJob( dataset_collection )
+            self.jobs[ job ] = [ ( None, dataset_collection ) ]
+
+    def __summarize_dataset( self, dataset ):
+        if not self.__check_state( dataset ):
+            return
+
+        original_hda = self.__original_hda( dataset )
+
+        if not original_hda.creating_job_associations:
+            self.jobs[ FakeJob( dataset ) ] = [ ( None, dataset ) ]
+
+        for assoc in original_hda.creating_job_associations:
+            job = assoc.job
+            if job in self.jobs:
+                self.jobs[ job ].append( ( assoc.name, dataset ) )
+            else:
+                self.jobs[ job ] = [ ( assoc.name, dataset ) ]
+                self.job_id2representative_job[job.id] = job
+
+    def __original_hdca( self, hdca ):
+        while hdca.copied_from_history_dataset_collection_association:
+            hdca = hdca.copied_from_history_dataset_collection_association
+        return hdca
+
+    def __original_hda( self, hda ):
+        # if this hda was copied from another, we need to find the job that created the original hda
+        while hda.copied_from_history_dataset_association:
+            hda = hda.copied_from_history_dataset_association
+        return hda
+
+    def __check_state( self, hda ):
+        # FIXME: Create "Dataset.is_finished"
+        if hda.state in ( 'new', 'running', 'queued' ):
+            self.warnings.add( WARNING_SOME_DATASETS_NOT_READY )
+            return
+        return hda
+
+
+def step_inputs( trans, job ):
+    tool = trans.app.toolbox.get_tool( job.tool_id )
+    param_values = job.get_param_values( trans.app, ignore_errors=True )  # If a tool was updated and e.g. had a text value changed to an integer, we don't want a traceback here
+    associations = __cleanup_param_values( tool.inputs, param_values )
+    tool_inputs = tool.params_to_strings( param_values, trans.app )
+    return tool_inputs, associations
+
+
+def __cleanup_param_values( inputs, values ):
+    """
+    Remove 'Data' values from `param_values`, along with metadata cruft,
+    but track the associations.
+    """
+    associations = []
+    # dbkey is pushed in by the framework
+    if 'dbkey' in values:
+        del values['dbkey']
+    root_values = values
+
+    # Recursively clean data inputs and dynamic selects
+    def cleanup( prefix, inputs, values ):
+        for key, input in inputs.items():
+            if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
+                tmp = values[key]
+                values[key] = None
+                # HACK: Nested associations are not yet working, but we
+                #       still need to clean them up so we can serialize
+                # if not( prefix ):
+                if isinstance( tmp, model.DatasetCollectionElement ):
+                    tmp = tmp.first_dataset_instance()
+                if tmp:  # this is false for a non-set optional dataset
+                    if not isinstance(tmp, list):
+                        associations.append( ( tmp.hid, prefix + key ) )
+                    else:
+                        associations.extend( [ (t.hid, prefix + key) for t in tmp] )
+
+                # Cleanup the other deprecated crap associated with datasets
+                # as well. Worse, for nested datasets all the metadata is
+                # being pushed into the root. FIXME: MUST REMOVE SOON
+                key = prefix + key + "_"
+                for k in root_values.keys():
+                    if k.startswith( key ):
+                        del root_values[k]
+            elif isinstance( input, Repeat ):
+                if key in values:
+                    group_values = values[key]
+                    for i, rep_values in enumerate( group_values ):
+                        rep_index = rep_values['__index__']
+                        cleanup( "%s%s_%d|" % (prefix, key, rep_index ), input.inputs, group_values[i] )
+            elif isinstance( input, Conditional ):
+                # Scrub dynamic resource related parameters from workflows,
+                # they cause problems and the workflow probably should include
+                # their state in workflow encoding.
+                if input.name == '__job_resource':
+                    if input.name in values:
+                        del values[input.name]
+                    return
+                if input.name in values:
+                    group_values = values[input.name]
+                    current_case = group_values['__current_case__']
+                    cleanup( "%s%s|" % ( prefix, key ), input.cases[current_case].inputs, group_values )
+            elif isinstance( input, Section ):
+                if input.name in values:
+                    cleanup( "%s%s|" % ( prefix, key ), input.inputs, values[input.name] )
+    cleanup( "", inputs, values )
+    return associations
+
+
+__all__ = ( 'summarize', 'extract_workflow' )
diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py
new file mode 100644
index 0000000..77dee25
--- /dev/null
+++ b/lib/galaxy/workflow/modules.py
@@ -0,0 +1,1367 @@
+"""
+Modules used in building workflows
+"""
+import logging
+from json import dumps, loads
+from xml.etree.ElementTree import Element
+
+from galaxy import exceptions, model, web
+from galaxy.dataset_collections import matching
+from galaxy.jobs.actions.post import ActionBox
+from galaxy.model import PostJobAction
+from galaxy.tools.execute import execute
+from galaxy.tools.parameters import check_param, visit_input_values
+from galaxy.tools.parameters.basic import (
+    parameter_types,
+    DataCollectionToolParameter,
+    DataToolParameter,
+    RuntimeValue,
+    workflow_building_modes
+)
+from galaxy.tools.parameters.wrapped import make_dict_copy
+from galaxy.tools import DefaultToolState
+from galaxy.tools import ToolInputsNotReadyException
+from galaxy.util import odict
+from galaxy.util.bunch import Bunch
+from galaxy.web.framework import formbuilder
+from tool_shed.util import common_util
+
+log = logging.getLogger( __name__ )
+
+# Key into Tool state to describe invocation-specific runtime properties.
+RUNTIME_STEP_META_STATE_KEY = "__STEP_META_STATE__"
+# Key into step runtime state dict describing invocation-specific post job
+# actions (i.e. PJA specified at runtime on top of the workflow-wide defined
+# ones.
+RUNTIME_POST_JOB_ACTIONS_KEY = "__POST_JOB_ACTIONS__"
+NO_REPLACEMENT = object()
+
+
+class WorkflowModule( object ):
+
+    def __init__( self, trans ):
+        self.trans = trans
+
+    # ---- Creating modules from various representations ---------------------
+
+    @classmethod
+    def new( Class, trans, content_id=None ):
+        """
+        Create a new instance of the module with default state
+        """
+        return Class( trans )
+
+    @classmethod
+    def from_dict( Class, trans, d ):
+        """
+        Create a new instance of the module initialized from values in the
+        dictionary `d`.
+        """
+        return Class( trans )
+
+    @classmethod
+    def from_workflow_step( Class, trans, step ):
+        return Class( trans )
+
+    # ---- Saving in various forms ------------------------------------------
+
+    def save_to_step( self, step ):
+        step.type = self.type
+
+    # ---- General attributes -----------------------------------------------
+
+    def get_type( self ):
+        return self.type
+
+    def get_name( self ):
+        return self.name
+
+    def get_content_id( self ):
+        """ If this component has an identifier external to the step (such
+        as a tool or another workflow) return the identifier for that content.
+        """
+        return None
+
+    def get_tooltip( self, static_path='' ):
+        return None
+
+    # ---- Configuration time -----------------------------------------------
+
+    def get_state( self, state=None ):
+        """ Return a serializable representation of the persistable state of
+        the step - for tools it DefaultToolState.encode returns a string and
+        for simpler module types a json description is dumped out.
+        """
+        return None
+
+    def update_state( self, incoming ):
+        """ Update the current state of the module against the user supplied
+        parameters in the dict-like object `incoming`.
+        """
+        pass
+
+    def get_errors( self ):
+        """ It seems like this is effectively just used as boolean - some places
+        in the tool shed self.errors is set to boolean, other places 'unavailable',
+        likewise in Galaxy it stores a list containing a string with an unrecognized
+        tool id error message.
+        """
+        return None
+
+    def get_data_inputs( self ):
+        """ Get configure time data input descriptions. """
+        return []
+
+    def get_data_outputs( self ):
+        return []
+
+    def get_runtime_input_dicts( self, step_annotation ):
+        """ Get runtime inputs (inputs and parameters) as simple dictionary. """
+        return []
+
+    def get_config_form( self ):
+        """ Render form that is embedded in workflow editor for modifying the
+        step state of a node.
+        """
+        raise TypeError( "Abstract method" )
+
+    def check_and_update_state( self ):
+        """
+        If the state is not in sync with the current implementation of the
+        module, try to update. Returns a list of messages to be displayed
+        """
+        pass
+
+    def add_dummy_datasets( self, connections=None, steps=None ):
+        # Replaced connected inputs with DummyDataset values.
+        pass
+
+    # ---- Run time ---------------------------------------------------------
+
+    def get_runtime_inputs( self, **kwds ):
+        """ Used internally by modules and when displaying inputs in workflow
+        editor and run workflow templates.
+
+        Note: The ToolModule doesn't implement this and these templates contain
+        specialized logic for dealing with the tool and state directly in the
+        case of ToolModules.
+        """
+        raise TypeError( "Abstract method" )
+
+    def compute_runtime_state( self, trans, step_updates=None ):
+        """ Determine the runtime state (potentially different from self.state
+        which describes configuration state). This (again unlike self.state) is
+        currently always a `DefaultToolState` object.
+
+        If `step_updates` is `None`, this is likely for rendering the run form
+        for instance and no runtime properties are available and state must be
+        solely determined by the default runtime state described by the step.
+
+        If `step_updates` are available they describe the runtime properties
+        supplied by the workflow runner.
+        """
+        raise TypeError( "Abstract method" )
+
+    def execute( self, trans, progress, invocation, step ):
+        """ Execute the given workflow step in the given workflow invocation.
+        Use the supplied workflow progress object to track outputs, find
+        inputs, etc...
+        """
+        raise TypeError( "Abstract method" )
+
+    def do_invocation_step_action( self, step, action ):
+        """ Update or set the workflow invocation state action - generic
+        extension point meant to allows users to interact with interactive
+        workflow modules. The action object returned from this method will
+        be attached to the WorkflowInvocationStep and be available the next
+        time the workflow scheduler visits the workflow.
+        """
+        raise exceptions.RequestParameterInvalidException( "Attempting to perform invocation step action on module that does not support actions." )
+
+    def recover_mapping( self, step, step_invocations, progress ):
+        """ Re-populate progress object with information about connections
+        from previously executed steps recorded via step_invocations.
+        """
+        raise TypeError( "Abstract method" )
+
+
+class SimpleWorkflowModule( WorkflowModule ):
+
+    @classmethod
+    def new( Class, trans, content_id=None ):
+        module = Class( trans )
+        module.state = Class.default_state()
+        module.label = None
+        return module
+
+    @classmethod
+    def from_dict( Class, trans, d ):
+        module = Class( trans )
+        state = loads( d["tool_state"] )
+        module.recover_state( state )
+        module.label = d.get("label", None) or None
+        return module
+
+    @classmethod
+    def from_workflow_step( Class, trans, step ):
+        module = Class( trans )
+        module.recover_state( step.tool_inputs )
+        module.label = step.label
+        return module
+
+    @classmethod
+    def default_state( Class ):
+        """ This method should return a dictionary describing each
+        configuration property and its default value.
+        """
+        raise TypeError( "Abstract method" )
+
+    def save_to_step( self, step ):
+        step.type = self.type
+        step.tool_id = None
+        step.tool_version = None
+        step.tool_inputs = self.state
+
+    def get_state( self, state=None ):
+        if isinstance( state, DefaultToolState ):
+            fake_tool = Bunch( inputs=self.get_runtime_inputs() )
+            return state.encode( fake_tool, self.trans.app )
+        return dumps( self.state )
+
+    def update_state( self, incoming ):
+        self.recover_state( incoming )
+
+    def recover_runtime_state( self, runtime_state ):
+        """ Take runtime state from persisted invocation and convert it
+        into a DefaultToolState object for use during workflow invocation.
+        """
+        fake_tool = Bunch( inputs=self.get_runtime_inputs() )
+        state = DefaultToolState()
+        state.decode( runtime_state, fake_tool, self.trans.app )
+        return state
+
+    def compute_runtime_state( self, trans, step_updates=None ):
+        state = self.get_runtime_state()
+        step_errors = {}
+        if step_updates:
+            for name, param in self.get_runtime_inputs().iteritems():
+                value, error = check_param( trans, param, step_updates.get( name ), step_updates )
+                state.inputs[ name ] = value
+                if error:
+                    step_errors[ name ] = error
+        return state, step_errors
+
+    def recover_state( self, state, **kwds ):
+        """ Recover state `dict` from simple dictionary describing configuration
+        state (potentially from persisted step state).
+
+        Sub-classes should supply `default_state` method and `state_fields`
+        attribute which are used to build up the state `dict`.
+        """
+        self.state = self.default_state()
+        for key in self.state_fields:
+            if state and key in state:
+                self.state[ key ] = state[ key ]
+
+    def get_config_form( self ):
+        form = self._abstract_config_form( )
+        return self.trans.fill_template( "workflow/editor_generic_form.mako",
+                                         module=self, form=form )
+
+
+class SubWorkflowModule( WorkflowModule ):
+    state_fields = [ ]
+    type = "subworkflow"
+    name = "Subworkflow"
+    default_name = "Subworkflow"
+
+    @classmethod
+    def new( Class, trans, content_id=None ):
+        module = Class( trans )
+        module.subworkflow = SubWorkflowModule.subworkflow_from_content_id( trans, content_id )
+        module.label = None
+        return module
+
+    @classmethod
+    def from_dict( Class, trans, d ):
+        module = Class( trans )
+        if "subworkflow" in d:
+            module.subworkflow = d["subworkflow"]
+        elif "content_id" in d:
+            content_id = d["content_id"]
+            module.subworkflow = SubWorkflowModule.subworkflow_from_content_id( trans, content_id )
+        module.label = d.get("label", None) or None
+        return module
+
+    @classmethod
+    def from_workflow_step( Class, trans, step ):
+        module = Class( trans )
+        module.subworkflow = step.subworkflow
+        module.label = step.label
+        return module
+
+    def save_to_step( self, step ):
+        step.type = self.type
+        step.subworkflow = self.subworkflow
+
+    @classmethod
+    def default_state( Class ):
+        return dict( )
+
+    def get_name( self ):
+        if hasattr( self, 'subworkflow' ) and hasattr( self.subworkflow, 'name' ):
+            return self.subworkflow.name
+        return self.name
+
+    def get_errors( self ):
+        return None
+
+    def get_data_inputs( self ):
+        """ Get configure time data input descriptions. """
+        # Filter subworkflow steps and get inputs
+        step_to_input_type = {
+            "data_input": "dataset",
+            "data_collection_input": "dataset_collection",
+        }
+        inputs = []
+        for step in self.subworkflow.input_steps:
+            name = step.label
+            if name is None:
+                # trans shouldn't really be needed for data inputs...
+                step_module = module_factory.from_workflow_step(self.trans, step)
+                name = step_module.get_runtime_input_dicts(None)[0]["name"]
+
+            if not name:
+                raise Exception("Failed to find name for workflow module.")
+            step_type = step.type
+            assert step_type in step_to_input_type
+            input = dict(
+                input_subworkflow_step_id=step.order_index,
+                name=name,
+                label=name,
+                multiple=False,
+                extensions="input",
+                input_type=step_to_input_type[step_type],
+            )
+            inputs.append(input)
+
+        return inputs
+
+    def get_data_outputs( self ):
+        outputs = []
+        for workflow_output in self.subworkflow.workflow_outputs:
+            output_step = workflow_output.workflow_step
+            label = name = workflow_output.label
+            if name is None:
+                name = "%s:%s" % (output_step.order_index, workflow_output.output_name)
+                label = name
+            output = dict(
+                name=name,
+                label=label,
+                extensions=['input'],  # TODO
+            )
+            outputs.append(output)
+        return outputs
+
+    def get_runtime_input_dicts( self, step_annotation ):
+        """ Get runtime inputs (inputs and parameters) as simple dictionary. """
+        return []
+
+    def get_content_id( self ):
+        return self.trans.security.encode_id(self.subworkflow.id)
+
+    def recover_runtime_state( self, runtime_state ):
+        """ Take runtime state from persisted invocation and convert it
+        into a DefaultToolState object for use during workflow invocation.
+        """
+        fake_tool = Bunch( inputs=self.get_runtime_inputs() )
+        state = DefaultToolState()
+        state.decode( runtime_state, fake_tool, self.trans.app )
+        return state
+
+    def get_state( self, state=None ):
+        if isinstance( state, DefaultToolState ):
+            fake_tool = Bunch( inputs=self.get_runtime_inputs() )
+            return state.encode( fake_tool, self.trans.app )
+
+    def compute_runtime_state( self, trans, step_updates=None ):
+        state = self.get_runtime_state()
+        step_errors = {}
+        return state, step_errors
+
+    def recover_state( self, state, **kwds ):
+        """ Recover state `dict` from simple dictionary describing configuration
+        state (potentially from persisted step state).
+
+        Sub-classes should supply `default_state` method and `state_fields`
+        attribute which are used to build up the state `dict`.
+        """
+        self.state = self.default_state()
+        for key in self.state_fields:
+            if state and key in state:
+                self.state[ key ] = state[ key ]
+
+    def get_config_form( self ):
+        form = self._abstract_config_form( )
+        return self.trans.fill_template( "workflow/editor_generic_form.mako",
+                                         module=self, form=form )
+
+    def _abstract_config_form( self ):
+        form = formbuilder.FormBuilder( title=self.get_name() )
+        return form
+
+    def check_and_update_state( self ):
+        """
+        If the state is not in sync with the current implementation of the
+        module, try to update. Returns a list of messages to be displayed
+        """
+        return None
+
+    def add_dummy_datasets( self, connections=None, steps=None ):
+        # Replaced connected inputs with DummyDataset values.
+        return None
+
+    def get_runtime_inputs( self, **kwds ):
+        # Two step improvements to this...
+        # - First pass verify nested workflow doesn't have an RuntimeInputs
+        # - Second pass actually turn RuntimeInputs into inputs here if possible.
+        return {}
+
+    def execute( self, trans, progress, invocation, step ):
+        """ Execute the given workflow step in the given workflow invocation.
+        Use the supplied workflow progress object to track outputs, find
+        inputs, etc...
+        """
+        subworkflow_invoker = progress.subworkflow_invoker( trans, step )
+        subworkflow_invoker.invoke()
+        subworkflow = subworkflow_invoker.workflow
+        subworkflow_progress = subworkflow_invoker.progress
+        outputs = {}
+        for workflow_output in subworkflow.workflow_outputs:
+            workflow_output_label = workflow_output.label
+            replacement = subworkflow_progress.get_replacement_workflow_output( workflow_output )
+            outputs[ workflow_output_label ] = replacement
+
+        progress.set_step_outputs( step, outputs )
+        return None
+
+    def recover_mapping( self, step, step_invocations, progress ):
+        """ Re-populate progress object with information about connections
+        from previously executed steps recorded via step_invocations.
+        """
+        raise TypeError( "Abstract method" )
+
+    def get_runtime_state( self ):
+        state = DefaultToolState()
+        state.inputs = dict( )
+        return state
+
+    @classmethod
+    def subworkflow_from_content_id(clazz, trans, content_id):
+        from galaxy.managers.workflows import WorkflowsManager
+        workflow_manager = WorkflowsManager(trans.app)
+        subworkflow = workflow_manager.get_owned_workflow( trans, content_id )
+        return subworkflow
+
+
+class InputModule( SimpleWorkflowModule ):
+
+    def get_runtime_state( self ):
+        state = DefaultToolState()
+        state.inputs = dict( input=None )
+        return state
+
+    def get_runtime_input_dicts( self, step_annotation ):
+        name = self.state.get( "name", self.default_name )
+        return [ dict( name=name, description=step_annotation ) ]
+
+    def get_data_inputs( self ):
+        return []
+
+    def execute( self, trans, progress, invocation, step ):
+        job, step_outputs = None, dict( output=step.state.inputs['input'])
+
+        # Web controller may set copy_inputs_to_history, API controller always sets
+        # inputs.
+        if invocation.copy_inputs_to_history:
+            for input_dataset_hda in step_outputs.values():
+                content_type = input_dataset_hda.history_content_type
+                if content_type == "dataset":
+                    new_hda = input_dataset_hda.copy( copy_children=True )
+                    invocation.history.add_dataset( new_hda )
+                    step_outputs[ 'input_ds_copy' ] = new_hda
+                elif content_type == "dataset_collection":
+                    new_hdca = input_dataset_hda.copy()
+                    invocation.history.add_dataset_collection( new_hdca )
+                    step_outputs[ 'input_ds_copy' ] = new_hdca
+                else:
+                    raise Exception("Unknown history content encountered")
+        # If coming from UI - we haven't registered invocation inputs yet,
+        # so do that now so dependent steps can be recalculated. In the future
+        # everything should come in from the API and this can be eliminated.
+        if not invocation.has_input_for_step( step.id ):
+            content = step_outputs.values()[ 0 ]
+            if content:
+                invocation.add_input( content, step.id )
+        progress.set_outputs_for_input( step, step_outputs )
+        return job
+
+    def recover_mapping( self, step, step_invocations, progress ):
+        progress.set_outputs_for_input( step )
+
+
+class InputDataModule( InputModule ):
+    type = "data_input"
+    name = "Input dataset"
+    default_name = "Input Dataset"
+    state_fields = [ "name" ]
+
+    @classmethod
+    def default_state( Class ):
+        return dict( name=Class.default_name )
+
+    def _abstract_config_form( self ):
+        form = formbuilder.FormBuilder( title=self.name ) \
+            .add_text( "name", "Name", value=self.state['name'] )
+        return form
+
+    def get_data_outputs( self ):
+        return [ dict( name='output', extensions=['input'] ) ]
+
+    def get_filter_set( self, connections=None ):
+        filter_set = []
+        if connections:
+            for oc in connections:
+                for ic in oc.input_step.module.get_data_inputs():
+                    if 'extensions' in ic and ic[ 'name' ] == oc.input_name:
+                        filter_set += ic[ 'extensions' ]
+        if not filter_set:
+            filter_set = [ 'data' ]
+        return ', '.join( filter_set )
+
+    def get_runtime_inputs( self, connections=None ):
+        label = self.state.get( "name", "Input Dataset" )
+        return dict( input=DataToolParameter( None, Element( "param", name="input", label=label, multiple=False, type="data", format=self.get_filter_set( connections ) ), self.trans ) )
+
+
+class InputDataCollectionModule( InputModule ):
+    default_name = "Input Dataset Collection"
+    default_collection_type = "list"
+    type = "data_collection_input"
+    name = "Input dataset collection"
+    collection_type = default_collection_type
+    state_fields = [ "name", "collection_type" ]
+
+    @classmethod
+    def default_state( Class ):
+        return dict( name=Class.default_name, collection_type=Class.default_collection_type )
+
+    def get_runtime_inputs( self, **kwds ):
+        label = self.state.get( "name", self.default_name )
+        collection_type = self.state.get( "collection_type", self.default_collection_type )
+        input_element = Element( "param", name="input", label=label, type="data_collection", collection_type=collection_type )
+        return dict( input=DataCollectionToolParameter( None, input_element, self.trans ) )
+
+    def _abstract_config_form( self ):
+        type_hints = odict.odict()
+        type_hints[ "list" ] = "List of Datasets"
+        type_hints[ "paired" ] = "Dataset Pair"
+        type_hints[ "list:paired" ] = "List of Dataset Pairs"
+
+        type_input = formbuilder.DatalistInput(
+            name="collection_type",
+            label="Collection Type",
+            value=self.state[ "collection_type" ],
+            extra_attributes=dict(refresh_on_change='true'),
+            options=type_hints
+        )
+        form = formbuilder.FormBuilder(
+            title=self.name
+        ).add_text(
+            "name", "Name", value=self.state['name']
+        )
+        form.inputs.append( type_input )
+        return form
+
+    def get_data_outputs( self ):
+        return [
+            dict(
+                name='output',
+                extensions=['input_collection'],
+                collection=True,
+                collection_type=self.state[ 'collection_type' ]
+            )
+        ]
+
+
+class InputParameterModule( SimpleWorkflowModule ):
+    default_name = "input_parameter"
+    default_parameter_type = "text"
+    default_optional = False
+    type = "parameter_input"
+    name = default_name
+    parameter_type = default_parameter_type
+    optional = default_optional
+    state_fields = [
+        "name",
+        "parameter_type",
+        "optional",
+    ]
+
+    @classmethod
+    def default_state( Class ):
+        return dict(
+            name=Class.default_name,
+            parameter_type=Class.default_parameter_type,
+            optional=Class.default_optional,
+        )
+
+    def _abstract_config_form( self ):
+        form = formbuilder.FormBuilder(
+            title=self.name
+        ).add_text(
+            "name", "Name", value=self.state['name']
+        ).add_select(
+            "parameter_type", "Parameter Type", value=self.state['parameter_type'],
+            options=[
+                ('text', "Text"),
+                ('integer', "Integer"),
+                ('float', "Float"),
+                ('boolean', "Boolean (True or False)"),
+                ('color', "Color"),
+            ]
+        ).add_checkbox(
+            "optional", "Optional", value=self.state['optional']
+        )
+
+        return form
+
+    def get_runtime_inputs( self, **kwds ):
+        label = self.state.get( "name", self.default_name )
+        parameter_type = self.state.get("parameter_type", self.default_parameter_type)
+        optional = self.state.get("optional", self.default_optional)
+        if parameter_type not in ["text", "boolean", "integer", "float", "color"]:
+            raise ValueError("Invalid parameter type for workflow parameters encountered.")
+        parameter_class = parameter_types[parameter_type]
+        parameter_kwds = {}
+        if parameter_type in ["integer", "float"]:
+            parameter_kwds["value"] = str(0)
+
+        # TODO: Use a dict-based description from YAML tool source
+        element = Element("param", name="input", label=label, type=parameter_type, optional=str(optional), **parameter_kwds )
+        input = parameter_class( None, element )
+        return dict( input=input )
+
+    def get_runtime_state( self ):
+        state = DefaultToolState()
+        state.inputs = dict( input=None )
+        return state
+
+    def get_runtime_input_dicts( self, step_annotation ):
+        name = self.state.get( "name", self.default_name )
+        return [ dict( name=name, description=step_annotation ) ]
+
+    def get_data_inputs( self ):
+        return []
+
+    def execute( self, trans, progress, invocation, step ):
+        job, step_outputs = None, dict( output=step.state.inputs['input'])
+        progress.set_outputs_for_input( step, step_outputs )
+        return job
+
+
+class PauseModule( SimpleWorkflowModule ):
+    """ Initially this module will unconditionally pause a workflow - will aim
+    to allow conditional pausing later on.
+    """
+    type = "pause"
+    name = "Pause for dataset review"
+    default_name = "Pause for Dataset Review"
+    state_fields = [ "name" ]
+
+    @classmethod
+    def default_state( Class ):
+        return dict( name=Class.default_name )
+
+    def get_data_inputs( self ):
+        input = dict(
+            name="input",
+            label="Dataset for Review",
+            multiple=False,
+            extensions='input',
+            input_type="dataset",
+        )
+        return [ input ]
+
+    def get_data_outputs( self ):
+        return [ dict( name="output", label="Reviewed Dataset", extensions=['input'] ) ]
+
+    def _abstract_config_form( self ):
+        form = formbuilder.FormBuilder(
+            title=self.name
+        ).add_text( "name", "Name", value=self.state['name'] )
+        return form
+
+    def get_runtime_inputs( self, **kwds ):
+        return dict( )
+
+    def get_runtime_input_dicts( self, step_annotation ):
+        return []
+
+    def get_runtime_state( self ):
+        state = DefaultToolState()
+        state.inputs = dict( )
+        return state
+
+    def execute( self, trans, progress, invocation, step ):
+        progress.mark_step_outputs_delayed( step )
+        return None
+
+    def recover_mapping( self, step, step_invocations, progress ):
+        if step_invocations:
+            step_invocation = step_invocations[0]
+            action = step_invocation.action
+            if action:
+                connection = step.input_connections_by_name[ "input" ][ 0 ]
+                replacement = progress.replacement_for_connection( connection )
+                progress.set_step_outputs( step, { 'output': replacement } )
+                return
+            elif action is False:
+                raise CancelWorkflowEvaluation()
+        raise DelayedWorkflowEvaluation()
+
+    def do_invocation_step_action( self, step, action ):
+        """ Update or set the workflow invocation state action - generic
+        extension point meant to allows users to interact with interactive
+        workflow modules. The action object returned from this method will
+        be attached to the WorkflowInvocationStep and be available the next
+        time the workflow scheduler visits the workflow.
+        """
+        return bool( action )
+
+
+class ToolModule( WorkflowModule ):
+
+    type = "tool"
+
+    def __init__( self, trans, tool_id, tool_version=None ):
+        self.trans = trans
+        self.tool_id = tool_id
+        self.tool = trans.app.toolbox.get_tool( tool_id, tool_version=tool_version )
+        self.post_job_actions = {}
+        self.runtime_post_job_actions = {}
+        self.workflow_outputs = []
+        self.state = None
+        self.version_changes = []
+        if self.tool:
+            self.errors = None
+        else:
+            self.errors = {}
+            self.errors[ tool_id ] = 'Tool unavailable'
+
+    @classmethod
+    def new( Class, trans, content_id=None ):
+        module = Class( trans, content_id )
+        if module.tool is None:
+            error_message = "Attempted to create new workflow module for invalid tool_id, no tool with id - %s." % content_id
+            raise Exception( error_message )
+        module.state = module.tool.new_state( trans )
+        module.label = None
+        return module
+
+    @classmethod
+    def from_dict( Class, trans, d ):
+        tool_id = d.get( 'content_id', None )
+        if tool_id is None:
+            tool_id = d.get( 'tool_id', None )  # Older workflows will have exported this as tool_id.
+        if tool_id is None:
+            raise exceptions.RequestParameterInvalidException("No content id could be located for for step [%s]" % d)
+        tool_version = str( d.get( 'tool_version', None ) )
+        module = Class( trans, tool_id, tool_version=tool_version )
+        module.state = DefaultToolState()
+        module.label = d.get("label", None) or None
+        if module.tool is not None:
+            message = ""
+            if tool_id != module.tool_id:
+                message += "The tool (id '%s') specified in this step is not available. Using the tool with id %s instead." % (tool_id, module.tool_id)
+            if d.get('tool_version', 'Unspecified') != module.get_tool_version():
+                message += "%s: using version '%s' instead of version '%s' specified in this workflow." % ( tool_id, module.get_tool_version(), d.get( 'tool_version', 'Unspecified' ) )
+            if message:
+                log.debug(message)
+                module.version_changes.append(message)
+            if d[ "tool_state" ]:
+                module.state.decode( d[ "tool_state" ], module.tool, module.trans.app )
+        module.errors = d.get( "tool_errors", None )
+        module.post_job_actions = d.get( "post_job_actions", {} )
+        module.workflow_outputs = d.get( "workflow_outputs", [] )
+        return module
+
+    @classmethod
+    def from_workflow_step( Class, trans, step ):
+        toolbox = trans.app.toolbox
+        tool_id = step.tool_id
+        if toolbox:
+            # See if we have access to a different version of the tool.
+            # TODO: If workflows are ever enhanced to use tool version
+            # in addition to tool id, enhance the selection process here
+            # to retrieve the correct version of the tool.
+            tool_id = toolbox.get_tool_id( tool_id )
+        if ( toolbox and tool_id ):
+            if step.config:
+                # This step has its state saved in the config field due to the
+                # tool being previously unavailable.
+                return module_factory.from_dict(trans, loads(step.config))
+            tool_version = step.tool_version
+            module = Class( trans, tool_id, tool_version=tool_version )
+            message = ""
+            if step.tool_id != module.tool_id:  # This means the exact version of the tool is not installed. We inform the user.
+                old_tool_shed = step.tool_id.split( "/repos/" )[0]
+                if old_tool_shed not in tool_id:  # Only display the following warning if the tool comes from a different tool shed
+                    old_tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, old_tool_shed )
+                    if not old_tool_shed_url:  # a tool from a different tool_shed has been found, but the original tool shed has been deactivated
+                        old_tool_shed_url = "http://" + old_tool_shed  # let's just assume it's either http, or a http is forwarded to https.
+                    old_url = old_tool_shed_url + "/view/%s/%s/" % (module.tool.repository_owner, module.tool.repository_name)
+                    new_url = module.tool.tool_shed_repository.get_sharable_url( module.tool.app ) + '/%s/' % module.tool.tool_shed_repository.changeset_revision
+                    new_tool_shed_url = new_url.split( "/view" )[0]
+                    message += "The tool \'%s\', version %s by the owner %s installed from <a href=\"%s\" target=\"_blank\">%s</a> is not available. " % (module.tool.name, tool_version, module.tool.repository_owner, old_url, old_tool_shed_url)
+                    message += "A derivation of this tool installed from <a href=\"%s\" target=\"_blank\">%s</a> will be used instead. " % (new_url, new_tool_shed_url)
+            if step.tool_version and (step.tool_version != module.tool.version):
+                message += "<span title=\"tool id '%s'\">Using version '%s' instead of version '%s' specified in this workflow. " % (tool_id, module.tool.version, step.tool_version)
+            if message:
+                log.debug(message)
+                module.version_changes.append(message)
+            module.recover_state( step.tool_inputs )
+            module.errors = step.tool_errors
+            module.workflow_outputs = step.workflow_outputs
+            module.label = step.label or None
+            pjadict = {}
+            for pja in step.post_job_actions:
+                pjadict[pja.action_type] = pja
+            module.post_job_actions = pjadict
+            return module
+        return None
+
+    def recover_state( self, state, **kwds ):
+        """ Recover module configuration state property (a `DefaultToolState`
+        object) using the tool's `params_from_strings` method.
+        """
+        app = self.trans.app
+        self.state = DefaultToolState()
+        params_from_kwds = dict(
+            ignore_errors=kwds.get( "ignore_errors", True )
+        )
+        self.state.inputs = self.tool.params_from_strings( state, app, **params_from_kwds )
+
+    def recover_runtime_state( self, runtime_state ):
+        """ Take runtime state from persisted invocation and convert it
+        into a DefaultToolState object for use during workflow invocation.
+        """
+        state = DefaultToolState()
+        app = self.trans.app
+        state.decode( runtime_state, self.tool, app )
+        state_dict = loads( runtime_state )
+        if RUNTIME_STEP_META_STATE_KEY in state_dict:
+            self.__restore_step_meta_runtime_state( loads( state_dict[ RUNTIME_STEP_META_STATE_KEY ] ) )
+        return state
+
+    def save_to_step( self, step ):
+        step.type = self.type
+        step.tool_id = self.tool_id
+        if self.tool:
+            step.tool_version = self.get_tool_version()
+            step.tool_inputs = self.tool.params_to_strings( self.state.inputs, self.trans.app )
+        else:
+            step.tool_version = None
+            step.tool_inputs = None
+        step.tool_errors = self.errors
+        for k, v in self.post_job_actions.iteritems():
+            pja = self.__to_pja( k, v, step )
+            self.trans.sa_session.add( pja )
+
+    def __to_pja( self, key, value, step ):
+        if 'output_name' in value:
+            output_name = value['output_name']
+        else:
+            output_name = None
+        if 'action_arguments' in value:
+            action_arguments = value['action_arguments']
+        else:
+            action_arguments = None
+        return PostJobAction(value['action_type'], step, output_name, action_arguments)
+
+    def get_name( self ):
+        if self.tool:
+            return self.tool.name
+        return 'unavailable'
+
+    def get_content_id( self ):
+        return self.tool_id
+
+    def get_tool_version( self ):
+        return self.tool.version
+
+    def get_state( self, state=None ):
+        state = state or self.state
+        return state.encode( self.tool, self.trans.app )
+
+    def get_errors( self ):
+        return self.errors
+
+    def get_tooltip( self, static_path='' ):
+        if self.tool.help:
+            return self.tool.help.render( host_url=web.url_for('/'), static_path=static_path )
+        else:
+            return None
+
+    def get_data_inputs( self ):
+        data_inputs = []
+
+        def callback( input, prefixed_name, prefixed_label, **kwargs ):
+            if not hasattr( input, 'hidden' ) or not input.hidden:
+                if isinstance( input, DataToolParameter ):
+                    data_inputs.append( dict(
+                        name=prefixed_name,
+                        label=prefixed_label,
+                        multiple=input.multiple,
+                        extensions=input.extensions,
+                        input_type="dataset", ) )
+                elif isinstance( input, DataCollectionToolParameter ):
+                    data_inputs.append( dict(
+                        name=prefixed_name,
+                        label=prefixed_label,
+                        multiple=input.multiple,
+                        input_type="dataset_collection",
+                        collection_types=input.collection_types,
+                        extensions=input.extensions,
+                    ) )
+
+        visit_input_values( self.tool.inputs, self.state.inputs, callback )
+        return data_inputs
+
+    def get_data_outputs( self ):
+        data_outputs = []
+        for name, tool_output in self.tool.outputs.iteritems():
+            extra_kwds = {}
+            if tool_output.collection:
+                extra_kwds["collection"] = True
+                extra_kwds["collection_type"] = tool_output.structure.collection_type
+                formats = [ 'input' ]  # TODO: fix
+            elif tool_output.format_source is not None:
+                formats = [ 'input' ]  # default to special name "input" which remove restrictions on connections
+            else:
+                formats = [ tool_output.format ]
+            for change_elem in tool_output.change_format:
+                for when_elem in change_elem.findall( 'when' ):
+                    format = when_elem.get( 'format', None )
+                    if format and format not in formats:
+                        formats.append( format )
+            data_outputs.append(
+                dict(
+                    name=name,
+                    extensions=formats,
+                    **extra_kwds
+                )
+            )
+        return data_outputs
+
+    def get_runtime_input_dicts( self, step_annotation ):
+        # Step is a tool and may have runtime inputs.
+        input_dicts = []
+        for name, val in self.state.inputs.items():
+            input_type = type( val )
+            if input_type == RuntimeValue:
+                input_dicts.append( { "name": name, "description": "runtime parameter for tool %s" % self.get_name() } )
+            elif input_type == dict:
+                # Input type is described by a dict, e.g. indexed parameters.
+                for partval in val.values():
+                    if type( partval ) == RuntimeValue:
+                        input_dicts.append( { "name": name, "description": "runtime parameter for tool %s" % self.get_name() } )
+        return input_dicts
+
+    def get_post_job_actions( self, incoming ):
+        return ActionBox.handle_incoming( incoming )
+
+    def get_config_form( self ):
+        self.add_dummy_datasets()
+        return self.trans.fill_template( "workflow/editor_tool_form.mako", module=self,
+                                         tool=self.tool, values=self.state.inputs, errors=( self.errors or {} ) )
+
+    def update_state( self, incoming ):
+        self.recover_state( incoming )
+
+    def check_and_update_state( self ):
+        inputs = self.state.inputs
+        return self.tool.check_and_update_param_values( inputs, self.trans, workflow_building_mode=True )
+
+    def compute_runtime_state( self, trans, step_updates=None ):
+        # Warning: This method destructively modifies existing step state.
+        step_errors = {}
+        state = self.state
+        self.runtime_post_job_actions = {}
+        if step_updates:
+
+            def update_value( input, context, prefixed_name, **kwargs ):
+                if prefixed_name in step_updates or '__force_update__' + prefixed_name in step_updates:
+                    value, error = check_param( trans, input, step_updates.get( prefixed_name ), context )
+                    if error is not None:
+                        step_errors[ prefixed_name ] = error
+                    return value
+                return NO_REPLACEMENT
+
+            self.runtime_post_job_actions = step_updates.get( RUNTIME_POST_JOB_ACTIONS_KEY, {} )
+            visit_input_values( self.tool.inputs, state.inputs, update_value, no_replacement_value=NO_REPLACEMENT )
+            step_metadata_runtime_state = self.__step_meta_runtime_state()
+            if step_metadata_runtime_state:
+                state.inputs[ RUNTIME_STEP_META_STATE_KEY ] = step_metadata_runtime_state
+        return state, step_errors
+
+    def __step_meta_runtime_state( self ):
+        """ Build a dictionary a of meta-step runtime state (state about how
+        the workflow step - not the tool state) to be serialized with the Tool
+        state.
+        """
+        return { RUNTIME_POST_JOB_ACTIONS_KEY: self.runtime_post_job_actions }
+
+    def __restore_step_meta_runtime_state( self, step_runtime_state ):
+        if RUNTIME_POST_JOB_ACTIONS_KEY in step_runtime_state:
+            self.runtime_post_job_actions = step_runtime_state[ RUNTIME_POST_JOB_ACTIONS_KEY ]
+
+    def execute( self, trans, progress, invocation, step ):
+        tool = trans.app.toolbox.get_tool( step.tool_id, tool_version=step.tool_version )
+        tool_state = step.state
+        # Not strictly needed - but keep Tool state clean by stripping runtime
+        # metadata parameters from it.
+        if RUNTIME_STEP_META_STATE_KEY in tool_state.inputs:
+            del tool_state.inputs[ RUNTIME_STEP_META_STATE_KEY ]
+        collections_to_match = self._find_collections_to_match( tool, progress, step )
+        # Have implicit collections...
+        if collections_to_match.has_collections():
+            collection_info = self.trans.app.dataset_collections_service.match_collections( collections_to_match )
+        else:
+            collection_info = None
+
+        param_combinations = []
+        if collection_info:
+            iteration_elements_iter = collection_info.slice_collections()
+        else:
+            iteration_elements_iter = [ None ]
+
+        for iteration_elements in iteration_elements_iter:
+            execution_state = tool_state.copy()
+            # TODO: Move next step into copy()
+            execution_state.inputs = make_dict_copy( execution_state.inputs )
+
+            # Connect up
+            def callback( input, prefixed_name, **kwargs ):
+                replacement = NO_REPLACEMENT
+                if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
+                    if iteration_elements and prefixed_name in iteration_elements:
+                        if isinstance( input, DataToolParameter ):
+                            # Pull out dataset instance from element.
+                            replacement = iteration_elements[ prefixed_name ].dataset_instance
+                            if hasattr(iteration_elements[ prefixed_name ], u'element_identifier') and iteration_elements[ prefixed_name ].element_identifier:
+                                replacement.element_identifier = iteration_elements[ prefixed_name ].element_identifier
+                        else:
+                            # If collection - just use element model object.
+                            replacement = iteration_elements[ prefixed_name ]
+                    else:
+                        replacement = progress.replacement_for_tool_input( step, input, prefixed_name )
+                else:
+                    replacement = progress.replacement_for_tool_input( step, input, prefixed_name )
+                return replacement
+
+            try:
+                # Replace DummyDatasets with historydatasetassociations
+                visit_input_values( tool.inputs, execution_state.inputs, callback, no_replacement_value=NO_REPLACEMENT )
+            except KeyError as k:
+                message_template = "Error due to input mapping of '%s' in '%s'.  A common cause of this is conditional outputs that cannot be determined until runtime, please review your workflow."
+                message = message_template % (tool.name, k.message)
+                raise exceptions.MessageException( message )
+            param_combinations.append( execution_state.inputs )
+
+        try:
+            execution_tracker = execute(
+                trans=self.trans,
+                tool=tool,
+                param_combinations=param_combinations,
+                history=invocation.history,
+                collection_info=collection_info,
+                workflow_invocation_uuid=invocation.uuid.hex
+            )
+        except ToolInputsNotReadyException:
+            raise DelayedWorkflowEvaluation()
+
+        if collection_info:
+            step_outputs = dict( execution_tracker.implicit_collections )
+        else:
+            step_outputs = dict( execution_tracker.output_datasets )
+            step_outputs.update( execution_tracker.output_collections )
+        progress.set_step_outputs( step, step_outputs )
+        jobs = execution_tracker.successful_jobs
+        for job in jobs:
+            self._handle_post_job_actions( step, job, invocation.replacement_dict )
+        if execution_tracker.execution_errors:
+            failed_count = len(execution_tracker.execution_errors)
+            success_count = len(execution_tracker.successful_jobs)
+            all_count = failed_count + success_count
+            message = "Failed to create %d out of %s job(s) for workflow step." % (failed_count, all_count)
+            raise Exception(message)
+        return jobs
+
+    def _find_collections_to_match( self, tool, progress, step ):
+        collections_to_match = matching.CollectionsToMatch()
+
+        def callback( input, prefixed_name, **kwargs ):
+            is_data_param = isinstance( input, DataToolParameter )
+            if is_data_param and not input.multiple:
+                data = progress.replacement_for_tool_input( step, input, prefixed_name )
+                if isinstance( data, model.HistoryDatasetCollectionAssociation ):
+                    collections_to_match.add( prefixed_name, data )
+
+            is_data_collection_param = isinstance( input, DataCollectionToolParameter )
+            if is_data_collection_param and not input.multiple:
+                data = progress.replacement_for_tool_input( step, input, prefixed_name )
+                history_query = input._history_query( self.trans )
+                subcollection_type_description = history_query.can_map_over( data )
+                if subcollection_type_description:
+                    collections_to_match.add( prefixed_name, data, subcollection_type=subcollection_type_description.collection_type )
+
+        visit_input_values( tool.inputs, step.state.inputs, callback )
+        return collections_to_match
+
+    def _handle_post_job_actions( self, step, job, replacement_dict ):
+        # Create new PJA associations with the created job, to be run on completion.
+        # PJA Parameter Replacement (only applies to immediate actions-- rename specifically, for now)
+        # Pass along replacement dict with the execution of the PJA so we don't have to modify the object.
+
+        # Combine workflow and runtime post job actions into the effective post
+        # job actions for this execution.
+        flush_required = False
+        effective_post_job_actions = step.post_job_actions[:]
+        for key, value in self.runtime_post_job_actions.iteritems():
+            effective_post_job_actions.append( self.__to_pja( key, value, None ) )
+        for pja in effective_post_job_actions:
+            if pja.action_type in ActionBox.immediate_actions:
+                ActionBox.execute( self.trans.app, self.trans.sa_session, pja, job, replacement_dict )
+            else:
+                pjaa = model.PostJobActionAssociation( pja, job_id=job.id )
+                self.trans.sa_session.add(pjaa)
+                flush_required = True
+        if flush_required:
+            self.trans.sa_session.flush()
+
+    def add_dummy_datasets( self, connections=None, steps=None ):
+        if connections:
+            # Store connections by input name
+            input_connections_by_name = dict( ( conn.input_name, conn ) for conn in connections )
+        else:
+            input_connections_by_name = {}
+
+        # Any input needs to have value RuntimeValue or obtain the value from connected steps
+        def callback( input, prefixed_name, context, **kwargs ):
+            if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
+                if connections is not None and steps is not None and self.trans.workflow_building_mode is workflow_building_modes.USE_HISTORY:
+                    if prefixed_name in input_connections_by_name:
+                        connection = input_connections_by_name[ prefixed_name ]
+                        output_step = next( output_step for output_step in steps if connection.output_step_id == output_step.id )
+                        if output_step.type.startswith( 'data' ):
+                            output_inputs = output_step.module.get_runtime_inputs( connections=connections )
+                            output_value = output_inputs[ 'input' ].get_initial_value( self.trans, context )
+                            if isinstance( input, DataToolParameter ) and isinstance( output_value, self.trans.app.model.HistoryDatasetCollectionAssociation ):
+                                return output_value.to_hda_representative()
+                            return output_value
+                        return RuntimeValue()
+                    else:
+                        return input.get_initial_value( self.trans, context )
+                elif connections is None or prefixed_name in input_connections_by_name:
+                    return RuntimeValue()
+
+        visit_input_values( self.tool.inputs, self.state.inputs, callback )
+
+    def recover_mapping( self, step, step_invocations, progress ):
+        # Grab a job representing this invocation - for normal workflows
+        # there will be just one job but if this step was mapped over there
+        # may be many.
+        job_0 = step_invocations[ 0 ].job
+
+        outputs = {}
+        for job_output in job_0.output_datasets:
+            replacement_name = job_output.name
+            replacement_value = job_output.dataset
+            # If was a mapping step, grab the output mapped collection for
+            # replacement instead.
+            if replacement_value.hidden_beneath_collection_instance:
+                replacement_value = replacement_value.hidden_beneath_collection_instance
+            outputs[ replacement_name ] = replacement_value
+        for job_output_collection in job_0.output_dataset_collection_instances:
+            replacement_name = job_output_collection.name
+            replacement_value = job_output_collection.dataset_collection_instance
+            outputs[ replacement_name ] = replacement_value
+
+        progress.set_step_outputs( step, outputs )
+
+
+class WorkflowModuleFactory( object ):
+
+    def __init__( self, module_types ):
+        self.module_types = module_types
+
+    def new( self, trans, type, content_id=None ):
+        """
+        Return module for type and (optional) tool_id intialized with
+        new / default state.
+        """
+        assert type in self.module_types
+        return self.module_types[type].new( trans, content_id )
+
+    def from_dict( self, trans, d, **kwargs ):
+        """
+        Return module initialized from the data in dictionary `d`.
+        """
+        type = d['type']
+        assert type in self.module_types
+        return self.module_types[type].from_dict( trans, d, **kwargs )
+
+    def from_workflow_step( self, trans, step ):
+        """
+        Return module initializd from the WorkflowStep object `step`.
+        """
+        type = step.type
+        return self.module_types[type].from_workflow_step( trans, step )
+
+
+def is_tool_module_type( module_type ):
+    return not module_type or module_type == "tool"
+
+
+module_types = dict(
+    data_input=InputDataModule,
+    data_collection_input=InputDataCollectionModule,
+    parameter_input=InputParameterModule,
+    pause=PauseModule,
+    tool=ToolModule,
+    subworkflow=SubWorkflowModule,
+)
+module_factory = WorkflowModuleFactory( module_types )
+
+
+def load_module_sections( trans ):
+    """ Get abstract description of the workflow modules this Galaxy instance
+    is configured with.
+    """
+    module_sections = {}
+    module_sections['inputs'] = {
+        "name": "inputs",
+        "title": "Inputs",
+        "modules": [
+            {
+                "name": "data_input",
+                "title": "Input Dataset",
+                "description": "Input dataset"
+            },
+            {
+                "name": "data_collection_input",
+                "title": "Input Dataset Collection",
+                "description": "Input dataset collection"
+            },
+        ],
+    }
+
+    if trans.app.config.enable_beta_workflow_modules:
+        module_sections['experimental'] = {
+            "name": "experimental",
+            "title": "Experimental",
+            "modules": [
+                {
+                    "name": "pause",
+                    "title": "Pause Workflow for Dataset Review",
+                    "description": "Pause for Review"
+                },
+                {
+                    "name": "parameter_input",
+                    "title": "Parameter Input",
+                    "description": "Simple inputs used for workflow logic"
+                },
+            ],
+        }
+
+    return module_sections
+
+
+class MissingToolException( Exception ):
+    """ WorkflowModuleInjector will raise this if the tool corresponding to the
+    module is missing. """
+
+
+class DelayedWorkflowEvaluation(Exception):
+    pass
+
+
+class CancelWorkflowEvaluation(Exception):
+    pass
+
+
+class WorkflowModuleInjector(object):
+    """ Injects workflow step objects from the ORM with appropriate module and
+    module generated/influenced state. """
+
+    def __init__( self, trans, allow_tool_state_corrections=False ):
+        self.trans = trans
+        self.allow_tool_state_corrections = allow_tool_state_corrections
+
+    def inject( self, step, step_args=None, steps=None ):
+        """ Pre-condition: `step` is an ORM object coming from the database, if
+        supplied `step_args` is the representation of the inputs for that step
+        supplied via web form.
+
+        Post-condition: The supplied `step` has new non-persistent attributes
+        useful during workflow invocation. These include 'upgrade_messages',
+        'state', 'input_connections_by_name', and 'module'.
+
+        If step_args is provided from a web form this is applied to generate
+        'state' else it is just obtained from the database.
+        """
+        step_errors = None
+        step.upgrade_messages = {}
+
+        # Make connection information available on each step by input name.
+        step.setup_input_connections_by_name()
+
+        # Populate module.
+        module = step.module = module_factory.from_workflow_step( self.trans, step )
+        if not module:
+            step.module = None
+            step.state = None
+            raise MissingToolException(step.tool_id)
+
+        # Fix any missing parameters
+        step.upgrade_messages = module.check_and_update_state()
+
+        # Any connected input needs to have value DummyDataset (these
+        # are not persisted so we need to do it every time)
+        module.add_dummy_datasets( connections=step.input_connections, steps=steps )
+        state, step_errors = module.compute_runtime_state( self.trans, step_args )
+        step.state = state
+        if step.type == "subworkflow":
+            subworkflow = step.subworkflow
+            populate_module_and_state( self.trans, subworkflow, param_map={}, )
+        return step_errors
+
+
+def populate_module_and_state( trans, workflow, param_map, allow_tool_state_corrections=False, module_injector=None ):
+    """ Used by API but not web controller, walks through a workflow's steps
+    and populates transient module and state attributes on each.
+    """
+    if module_injector is None:
+        module_injector = WorkflowModuleInjector( trans, allow_tool_state_corrections )
+    for step in workflow.steps:
+        step_args = param_map.get( step.id, {} )
+        step_errors = module_injector.inject( step, step_args=step_args )
+        if step.type == 'tool' or step.type is None:
+            if step_errors:
+                raise exceptions.MessageException( step_errors, err_data={ step.order_index: step_errors } )
+            if step.upgrade_messages:
+                if allow_tool_state_corrections:
+                    log.debug( 'Workflow step "%i" had upgrade messages: %s', step.id, step.upgrade_messages )
+                else:
+                    raise exceptions.MessageException( step.upgrade_messages, err_data={ step.order_index: step.upgrade_messages } )
diff --git a/lib/galaxy/workflow/render.py b/lib/galaxy/workflow/render.py
new file mode 100644
index 0000000..1534d94
--- /dev/null
+++ b/lib/galaxy/workflow/render.py
@@ -0,0 +1,151 @@
+import svgwrite
+
+MARGIN = 5
+LINE_SPACING = 15
+STANDALONE_SVG_TEMPLATE = """<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+  "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+%s"""
+
+
+class WorkflowCanvas( object ):
+
+    def __init__( self ):
+        self.canvas = svgwrite.Drawing(profile='full')
+
+        self.connectors = []
+        self.boxes = []
+        self.text = []
+        self.in_pos = {}
+        self.out_pos = {}
+        self.widths = {}
+        self.max_x = 0
+        self.max_y = 0
+        self.max_width = 0
+        self.data = []
+
+    def finish( self ):
+        # max_x, max_y, max_width = self.max_x, self.max_y, self.max_width
+        for box in self.boxes:
+            self.canvas.add(box)
+        for connector in self.connectors:
+            self.canvas.add(connector)
+        text_style_layer = self.canvas.g(style="font-family: Helvetica, Arial, FreeSans, Sans, sans, sans-serif;")
+        for text in self.text:
+            text_style_layer.add(text)
+        self.canvas.add(text_style_layer)
+        return self.canvas
+
+    def add_boxes( self, step_dict, width, name_fill ):
+        x, y = step_dict[ 'position' ][ 'left' ], step_dict[ 'position' ][ 'top' ]
+        self.boxes.append(svgwrite.shapes.Rect((x - MARGIN, y), (width, 30), fill=name_fill, stroke='#000000'))
+        box_height = ( len( step_dict[ 'data_inputs' ] ) + len( step_dict[ 'data_outputs' ] ) ) * LINE_SPACING + MARGIN
+        # Draw separator line.
+        if len( step_dict[ 'data_inputs' ] ) > 0:
+            box_height += LINE_SPACING
+            sep_y = y + len( step_dict[ 'data_inputs' ] ) * LINE_SPACING + 40
+            self.text.append( svgwrite.shapes.Line( (x - MARGIN, sep_y), (x + width - MARGIN, sep_y ), stroke=svgwrite.rgb(0, 0, 0)) )
+        # Define an input/output box.
+        self.boxes.append( svgwrite.shapes.Rect( (x - MARGIN, y + 30), (width, box_height), fill="#ffffff", stroke=svgwrite.rgb(0, 0, 0) ))
+
+    def add_text( self, module_data_inputs, module_data_outputs, step, module_name ):
+        left, top = step.position[ 'left' ], step.position[ 'top' ]
+        x, y = left, top
+        order_index = step.order_index
+        max_len = len( module_name ) * 1.5
+        self.text.append(svgwrite.text.Text(module_name, (x, y + 20), style='font-size:14px'))
+        y += 45
+        count = 0
+        in_pos = self.in_pos
+        out_pos = self.out_pos
+        for di in module_data_inputs:
+            cur_y = y + count * LINE_SPACING
+            if order_index not in in_pos:
+                in_pos[ order_index ] = {}
+            in_pos[ order_index ][ di[ 'name' ] ] = ( x, cur_y )
+            self.text.append( svgwrite.text.Text( di[ 'label' ], (x, cur_y), style='font-size:10px' ) )
+            count += 1
+            max_len = max( max_len, len( di[ 'label' ] ) )
+        if len( module_data_inputs ) > 0:
+            y += LINE_SPACING
+        for do in module_data_outputs:
+            cur_y = y + count * LINE_SPACING
+            if order_index not in out_pos:
+                out_pos[ order_index ] = {}
+            out_pos[ order_index ][ do[ 'name' ] ] = ( x, cur_y )
+            self.text.append( svgwrite.text.Text( do[ 'name' ], (x, cur_y), style='font-size:10px' ))
+            count += 1
+            max_len = max( max_len, len( do['name' ] ) )
+        self.widths[ order_index ] = max_len * 5.5
+        self.max_x = max( self.max_x, left )
+        self.max_y = max( self.max_y, top )
+        self.max_width = max( self.max_width, self.widths[ order_index ] )
+
+    def add_connection( self, step_dict, conn, output_dict):
+        in_coords = self.in_pos[ step_dict[ 'id' ] ][ conn ]
+        # out_pos_index will be a step number like 1, 2, 3...
+        out_pos_index = output_dict[ 'id' ]
+        # out_pos_name will be a string like 'o', 'o2', etc.
+        out_pos_name = output_dict[ 'output_name' ]
+        if out_pos_index in self.out_pos:
+            # out_conn_index_dict will be something like:
+            # 7: {'o': (824.5, 618)}
+            out_conn_index_dict = self.out_pos[ out_pos_index ]
+            if out_pos_name in out_conn_index_dict:
+                out_conn_pos = out_conn_index_dict[ out_pos_name ]
+            else:
+                # Take any key / value pair available in out_conn_index_dict.
+                # A problem will result if the dictionary is empty.
+                if out_conn_index_dict.keys():
+                    key = out_conn_index_dict.keys()[0]
+                    out_conn_pos = self.out_pos[ out_pos_index ][ key ]
+        adjusted = ( out_conn_pos[ 0 ] + self.widths[ output_dict[ 'id' ] ], out_conn_pos[ 1 ] )
+        self.text.append( svgwrite.shapes.Circle(center=(out_conn_pos[ 0 ] + self.widths[ output_dict[ 'id' ] ] - MARGIN,
+                                                         out_conn_pos[ 1 ] - MARGIN),
+                                                 r=5,
+                                                 fill="#ffffff",
+                                                 stroke="#000000" ) )
+        marker = self.canvas.marker(overflow='visible',
+                                    refX="0", refY="5",
+                                    viewBox="0 0 10 5",
+                                    markerWidth="8",
+                                    markerHeight="10",
+                                    markerUnits="strokeWidth",
+                                    orient="auto", stroke="none", fill="black")
+        marker.add(self.canvas.path(d="M 0 0 L 10 5 L 0 10 z"))
+        self.canvas.defs.add(marker)
+        conn = svgwrite.shapes.Line( (adjusted[ 0 ], adjusted[ 1 ] - MARGIN),
+                                     (in_coords[ 0 ] - 10, in_coords[ 1 ]),
+                                     stroke="#000000")
+        conn['marker-end'] = marker.get_funciri()
+        self.connectors.append( conn )
+
+    def add_steps( self, highlight_errors=False ):
+        # Only highlight missing tools if displaying in the tool shed.
+        for step_dict in self.data:
+            tool_unavailable = step_dict.get( 'tool_errors', False )
+            if highlight_errors and tool_unavailable:
+                fill = "#EBBCB2"
+            else:
+                fill = "#EBD9B2"
+            width = self.widths[ step_dict[ 'id' ] ]
+            self.add_boxes( step_dict, width, fill )
+            for conn, output_dict in step_dict[ 'input_connections' ].iteritems():
+                self.add_connection( step_dict, conn, output_dict )
+
+    def populate_data_for_step( self, step, module_name, module_data_inputs, module_data_outputs, tool_errors=None ):
+        step_dict = {
+            'id': step.order_index,
+            'data_inputs': module_data_inputs,
+            'data_outputs': module_data_outputs,
+            'position': step.position
+        }
+        if tool_errors:
+            step_dict[ 'tool_errors' ] = tool_errors
+        input_conn_dict = {}
+        for conn in step.input_connections:
+            input_conn_dict[ conn.input_name ] = \
+                dict( id=conn.output_step.order_index, output_name=conn.output_name )
+        step_dict['input_connections'] = input_conn_dict
+        self.data.append(step_dict)
+        self.add_text( module_data_inputs, module_data_outputs, step, module_name )
diff --git a/lib/galaxy/workflow/run.py b/lib/galaxy/workflow/run.py
new file mode 100644
index 0000000..3afb3ed
--- /dev/null
+++ b/lib/galaxy/workflow/run.py
@@ -0,0 +1,402 @@
+import logging
+import uuid
+
+from galaxy import model, util
+from galaxy.util import ExecutionTimer
+from galaxy.util.odict import odict
+from galaxy.workflow import modules
+from galaxy.workflow.run_request import (workflow_run_config_to_request,
+    WorkflowRunConfig)
+
+log = logging.getLogger( __name__ )
+
+
+# Entry point for direct invoke via controllers. Deprecated to some degree.
+def invoke( trans, workflow, workflow_run_config, workflow_invocation=None, populate_state=False ):
+    if force_queue( trans, workflow ):
+        invocation = queue_invoke( trans, workflow, workflow_run_config, populate_state=populate_state )
+        return [], invocation
+    else:
+        return __invoke( trans, workflow, workflow_run_config, workflow_invocation, populate_state )
+
+
+# Entry point for core workflow scheduler.
+def schedule( trans, workflow, workflow_run_config, workflow_invocation ):
+    return __invoke( trans, workflow, workflow_run_config, workflow_invocation )
+
+
+BASIC_WORKFLOW_STEP_TYPES = [ None, "tool", "data_input", "data_collection_input" ]
+
+
+def force_queue( trans, workflow ):
+    # Default behavior is still to just schedule workflows completley right
+    # away. This can be modified here in various ways.
+
+    # TODO: check for implicit connections - these should also force backgrounding
+    #       this would fix running Dan's data manager workflows via UI.
+    # TODO: ensure state if populated before calling force_queue from old API
+    #       workflow endpoint so the has_module check below is unneeded and these
+    #       interesting workflows will work with the older endpoint.
+    config = trans.app.config
+    force_for_collection = config.force_beta_workflow_scheduled_for_collections
+    force_min_steps = config.force_beta_workflow_scheduled_min_steps
+
+    step_count = len( workflow.steps )
+    if step_count > force_min_steps:
+        log.info("Workflow has many steps %d, backgrounding execution" % step_count)
+        return True
+    for step in workflow.steps:
+        # State and module haven't been populated if workflow submitted via
+        # the API. API requests for "interesting" workflows should use newer
+        # endpoint that skips this check entirely - POST /api/workflows/<id>/invocations
+        has_module = hasattr(step, "module")
+        if step.type not in BASIC_WORKFLOW_STEP_TYPES:
+            log.info("Found non-basic workflow step type - backgrounding execution")
+            # Force all new beta modules types to be use force queueing of
+            # workflow.
+            return True
+        if step.type == "data_collection_input" and force_for_collection:
+            log.info("Found collection input step - backgrounding execution")
+            return True
+        if step.type == "tool" and has_module and step.module.tool.produces_collections_with_unknown_structure:
+            log.info("Found dynamically structured output collection - backgrounding execution")
+            return True
+    return False
+
+
+def __invoke( trans, workflow, workflow_run_config, workflow_invocation=None, populate_state=False ):
+    """ Run the supplied workflow in the supplied target_history.
+    """
+    if populate_state:
+        modules.populate_module_and_state( trans, workflow, workflow_run_config.param_map, allow_tool_state_corrections=workflow_run_config.allow_tool_state_corrections )
+
+    invoker = WorkflowInvoker(
+        trans,
+        workflow,
+        workflow_run_config,
+        workflow_invocation=workflow_invocation,
+    )
+    try:
+        outputs = invoker.invoke()
+    except modules.CancelWorkflowEvaluation:
+        if workflow_invocation:
+            if workflow_invocation.cancel():
+                trans.sa_session.add( workflow_invocation )
+        outputs = []
+    except Exception:
+        log.exception("Failed to execute scheduled workflow.")
+        if workflow_invocation:
+            # Running workflow invocation in background, just mark
+            # persistent workflow invocation as failed.
+            workflow_invocation.fail()
+            trans.sa_session.add( workflow_invocation )
+        else:
+            # Running new transient workflow invocation in legacy
+            # controller action - propage the exception up.
+            raise
+        outputs = []
+
+    if workflow_invocation:
+        # Be sure to update state of workflow_invocation.
+        trans.sa_session.flush()
+
+    return outputs, invoker.workflow_invocation
+
+
+def queue_invoke( trans, workflow, workflow_run_config, request_params={}, populate_state=True ):
+    if populate_state:
+        modules.populate_module_and_state( trans, workflow, workflow_run_config.param_map, allow_tool_state_corrections=workflow_run_config.allow_tool_state_corrections )
+    workflow_invocation = workflow_run_config_to_request( trans, workflow_run_config, workflow )
+    workflow_invocation.workflow = workflow
+    return trans.app.workflow_scheduling_manager.queue( workflow_invocation, request_params )
+
+
+class WorkflowInvoker( object ):
+
+    def __init__( self, trans, workflow, workflow_run_config, workflow_invocation=None, progress=None ):
+        self.trans = trans
+        self.workflow = workflow
+        if progress is not None:
+            assert workflow_invocation is None
+            workflow_invocation = progress.workflow_invocation
+
+        if workflow_invocation is None:
+            invocation_uuid = uuid.uuid1()
+
+            workflow_invocation = model.WorkflowInvocation()
+            workflow_invocation.workflow = self.workflow
+
+            # In one way or another, following attributes will become persistent
+            # so they are available during delayed/revisited workflow scheduling.
+            workflow_invocation.uuid = invocation_uuid
+            workflow_invocation.history = workflow_run_config.target_history
+
+            self.workflow_invocation = workflow_invocation
+        else:
+            self.workflow_invocation = workflow_invocation
+
+        self.workflow_invocation.copy_inputs_to_history = workflow_run_config.copy_inputs_to_history
+        self.workflow_invocation.replacement_dict = workflow_run_config.replacement_dict
+
+        module_injector = modules.WorkflowModuleInjector( trans )
+        if progress is None:
+            progress = WorkflowProgress( self.workflow_invocation, workflow_run_config.inputs, module_injector )
+        self.progress = progress
+
+    def invoke( self ):
+        workflow_invocation = self.workflow_invocation
+        remaining_steps = self.progress.remaining_steps()
+        delayed_steps = False
+        for step in remaining_steps:
+            step_delayed = False
+            step_timer = ExecutionTimer()
+            jobs = None
+            try:
+                self.__check_implicitly_dependent_steps(step)
+
+                # TODO: step may fail to invoke, do something about that.
+                jobs = self._invoke_step( step )
+                for job in (util.listify( jobs ) or [None]):
+                    # Record invocation
+                    workflow_invocation_step = model.WorkflowInvocationStep()
+                    workflow_invocation_step.workflow_invocation = workflow_invocation
+                    workflow_invocation_step.workflow_step = step
+                    # Job may not be generated in this thread if bursting is enabled
+                    # https://github.com/galaxyproject/galaxy/issues/2259
+                    if job:
+                        workflow_invocation_step.job_id = job.id
+            except modules.DelayedWorkflowEvaluation:
+                step_delayed = delayed_steps = True
+                self.progress.mark_step_outputs_delayed( step )
+            except Exception:
+                log.exception(
+                    "Failed to schedule %s, problem occurred on %s.",
+                    self.workflow_invocation.workflow.log_str(),
+                    step.log_str(),
+                )
+                raise
+
+            step_verb = "invoked" if not step_delayed else "delayed"
+            log.debug("Workflow step %s of invocation %s %s %s" % (step.id, workflow_invocation.id, step_verb, step_timer))
+
+        if delayed_steps:
+            state = model.WorkflowInvocation.states.READY
+        else:
+            state = model.WorkflowInvocation.states.SCHEDULED
+        workflow_invocation.state = state
+
+        # All jobs ran successfully, so we can save now
+        self.trans.sa_session.add( workflow_invocation )
+
+        # Not flushing in here, because web controller may create multiple
+        # invocations.
+        return self.progress.outputs
+
+    def __check_implicitly_dependent_steps( self, step ):
+        """ Method will delay the workflow evaluation if implicitly dependent
+        steps (steps dependent but not through an input->output way) are not
+        yet complete.
+        """
+        for input_connection in step.input_connections:
+            if input_connection.non_data_connection:
+                output_id = input_connection.output_step.id
+                self.__check_implicitly_dependent_step( output_id )
+
+    def __check_implicitly_dependent_step( self, output_id ):
+        step_invocations = self.workflow_invocation.step_invocations_for_step_id( output_id )
+
+        # No steps created yet - have to delay evaluation.
+        if not step_invocations:
+            raise modules.DelayedWorkflowEvaluation()
+
+        for step_invocation in step_invocations:
+            job = step_invocation.job
+            if job:
+                # At least one job in incomplete.
+                if not job.finished:
+                    raise modules.DelayedWorkflowEvaluation()
+
+                if job.state != job.states.OK:
+                    raise modules.CancelWorkflowEvaluation()
+
+            else:
+                # TODO: Handle implicit dependency on stuff like
+                # pause steps.
+                pass
+
+    def _invoke_step( self, step ):
+        jobs = step.module.execute( self.trans, self.progress, self.workflow_invocation, step )
+        return jobs
+
+
+STEP_OUTPUT_DELAYED = object()
+
+
+class WorkflowProgress( object ):
+
+    def __init__( self, workflow_invocation, inputs_by_step_id, module_injector ):
+        self.outputs = odict()
+        self.module_injector = module_injector
+        self.workflow_invocation = workflow_invocation
+        self.inputs_by_step_id = inputs_by_step_id
+
+    def remaining_steps(self):
+        # Previously computed and persisted step states.
+        step_states = self.workflow_invocation.step_states_by_step_id()
+        steps = self.workflow_invocation.workflow.steps
+        remaining_steps = []
+        step_invocations_by_id = self.workflow_invocation.step_invocations_by_step_id()
+        for step in steps:
+            step_id = step.id
+            if not hasattr( step, 'module' ):
+                self.module_injector.inject( step )
+                if step_id not in step_states:
+                    template = "Workflow invocation [%s] has no step state for step id [%s]. States ids are %s."
+                    message = template % (self.workflow_invocation.id, step_id, list(step_states.keys()))
+                    raise Exception(message)
+                runtime_state = step_states[ step_id ].value
+                step.state = step.module.recover_runtime_state( runtime_state )
+
+            invocation_steps = step_invocations_by_id.get( step_id, None )
+            if invocation_steps:
+                self._recover_mapping( step, invocation_steps )
+            else:
+                remaining_steps.append( step )
+        return remaining_steps
+
+    def replacement_for_tool_input( self, step, input, prefixed_name ):
+        """ For given workflow 'step' that has had input_connections_by_name
+        populated fetch the actual runtime input for the given tool 'input'.
+        """
+        replacement = modules.NO_REPLACEMENT
+        if prefixed_name in step.input_connections_by_name:
+            connection = step.input_connections_by_name[ prefixed_name ]
+            if input.type == "data" and input.multiple:
+                replacement = [ self.replacement_for_connection( c ) for c in connection ]
+                # If replacement is just one dataset collection, replace tool
+                # input with dataset collection - tool framework will extract
+                # datasets properly.
+                if len( replacement ) == 1:
+                    if isinstance( replacement[ 0 ], model.HistoryDatasetCollectionAssociation ):
+                        replacement = replacement[ 0 ]
+            else:
+                is_data = input.type in ["data", "data_collection"]
+                replacement = self.replacement_for_connection( connection[ 0 ], is_data=is_data )
+        return replacement
+
+    def replacement_for_connection( self, connection, is_data=True ):
+        output_step_id = connection.output_step.id
+        if output_step_id not in self.outputs:
+            template = "No outputs found for step id %s, outputs are %s"
+            message = template % (output_step_id, self.outputs)
+            raise Exception(message)
+        step_outputs = self.outputs[ output_step_id ]
+        if step_outputs is STEP_OUTPUT_DELAYED:
+            raise modules.DelayedWorkflowEvaluation()
+        output_name = connection.output_name
+        try:
+            replacement = step_outputs[ output_name ]
+        except KeyError:
+            if is_data:
+                # Must resolve.
+                template = "Workflow evaluation problem - failed to find output_name %s in step_outputs %s"
+                message = template % ( output_name, step_outputs )
+                raise Exception( message )
+            else:
+                replacement = modules.NO_REPLACEMENT
+        if isinstance( replacement, model.HistoryDatasetCollectionAssociation ):
+            if not replacement.collection.populated:
+                if not replacement.collection.waiting_for_elements:
+                    # If we are not waiting for elements, there was some
+                    # problem creating the collection. Collection will never
+                    # be populated.
+                    # TODO: consider distinguish between cancelled and failed?
+                    raise modules.CancelWorkflowEvaluation()
+
+                raise modules.DelayedWorkflowEvaluation()
+        return replacement
+
+    def get_replacement_workflow_output( self, workflow_output ):
+        step = workflow_output.workflow_step
+        output_name = workflow_output.output_name
+        return self.outputs[ step.id ][ output_name ]
+
+    def set_outputs_for_input( self, step, outputs=None ):
+        if outputs is None:
+            outputs = {}
+
+        if self.inputs_by_step_id:
+            step_id = step.id
+            if step_id not in self.inputs_by_step_id:
+                template = "Step with id %s not found in inputs_step_id (%s)"
+                message = template % (step_id, self.inputs_by_step_id)
+                raise ValueError(message)
+            outputs[ 'output' ] = self.inputs_by_step_id[ step_id ]
+
+        self.set_step_outputs( step, outputs )
+
+    def set_step_outputs(self, step, outputs):
+        self.outputs[ step.id ] = outputs
+
+    def mark_step_outputs_delayed(self, step):
+        self.outputs[ step.id ] = STEP_OUTPUT_DELAYED
+
+    def _subworkflow_invocation(self, step):
+        workflow_invocation = self.workflow_invocation
+        subworkflow_invocation = workflow_invocation.get_subworkflow_invocation_for_step(step)
+        if subworkflow_invocation is None:
+            raise Exception("Failed to find persisted workflow invocation for step [%s]" % step.id)
+        return subworkflow_invocation
+
+    def subworkflow_invoker(self, trans, step):
+        subworkflow_progress = self.subworkflow_progress(step)
+        subworkflow_invocation = subworkflow_progress.workflow_invocation
+        workflow_run_config = WorkflowRunConfig(
+            target_history=subworkflow_invocation.history,
+            replacement_dict={},
+            inputs={},
+            param_map={},
+            copy_inputs_to_history=False,
+        )
+        return WorkflowInvoker(
+            trans,
+            workflow=subworkflow_invocation.workflow,
+            workflow_run_config=workflow_run_config,
+            progress=subworkflow_progress,
+        )
+
+    def subworkflow_progress(self, step):
+        subworkflow_invocation = self._subworkflow_invocation(step)
+        subworkflow = subworkflow_invocation.workflow
+        subworkflow_inputs = {}
+        for input_subworkflow_step in subworkflow.input_steps:
+            connection_found = False
+            for input_connection in step.input_connections:
+                if input_connection.input_subworkflow_step == input_subworkflow_step:
+                    subworkflow_step_id = input_subworkflow_step.id
+                    is_data = input_connection.output_step.type != "parameter_input"
+                    replacement = self.replacement_for_connection(
+                        input_connection,
+                        is_data=is_data,
+                    )
+                    subworkflow_inputs[subworkflow_step_id] = replacement
+                    connection_found = True
+                    break
+
+            if not connection_found:
+                raise Exception("Could not find connections for all subworkflow inputs.")
+
+        return WorkflowProgress(
+            subworkflow_invocation,
+            subworkflow_inputs,
+            self.module_injector,
+        )
+
+    def _recover_mapping( self, step, step_invocations ):
+        try:
+            step.module.recover_mapping( step, step_invocations, self )
+        except modules.DelayedWorkflowEvaluation:
+            self.mark_step_outputs_delayed( step )
+
+
+__all__ = ( 'invoke', 'WorkflowRunConfig' )
diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py
new file mode 100644
index 0000000..7a5f11e
--- /dev/null
+++ b/lib/galaxy/workflow/run_request.py
@@ -0,0 +1,422 @@
+import uuid
+
+from galaxy import exceptions
+from galaxy import model
+
+from galaxy.managers import histories
+from galaxy.tools.parameters.meta import expand_workflow_inputs
+
+INPUT_STEP_TYPES = [ 'data_input', 'data_collection_input', 'parameter_input' ]
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class WorkflowRunConfig( object ):
+    """ Wrapper around all the ways a workflow execution can be parameterized.
+
+    :param target_history: History to execute workflow in.
+    :type target_history: galaxy.model.History.
+
+    :param replacement_dict: Workflow level parameters used for renaming post
+        job actions.
+    :type replacement_dict: dict
+
+    :param copy_inputs_to_history: Should input data parameters be copied to
+        target_history. (Defaults to False)
+    :type copy_inputs_to_history: bool
+
+    :param inputs: Map from step ids to dict's containing HDA for these steps.
+    :type inputs: dict
+
+    :param inputs_by: How inputs maps to inputs (datasets/collections) to workflows
+                      steps - by unencoded database id ('step_id'), index in workflow
+                      'step_index' (independent of database), or by input name for
+                      that step ('name').
+    :type inputs_by: str
+
+    :param param_map: Override step parameters - should be dict with step id keys and
+                      tool param name-value dicts as values.
+    :type param_map: dict
+    """
+
+    def __init__( self, target_history, replacement_dict, copy_inputs_to_history=False, inputs={}, param_map={}, allow_tool_state_corrections=False ):
+        self.target_history = target_history
+        self.replacement_dict = replacement_dict
+        self.copy_inputs_to_history = copy_inputs_to_history
+        self.inputs = inputs
+        self.param_map = param_map
+        self.allow_tool_state_corrections = allow_tool_state_corrections
+
+
+def _normalize_inputs(steps, inputs, inputs_by):
+    normalized_inputs = {}
+    for step in steps:
+        if step.type not in INPUT_STEP_TYPES:
+            continue
+        possible_input_keys = []
+        for inputs_by_el in inputs_by.split("|"):
+            if inputs_by_el == "step_id":
+                possible_input_keys.append(str( step.id ))
+            elif inputs_by_el == "step_index":
+                possible_input_keys.append(str( step.order_index ))
+            elif inputs_by_el == "step_uuid":
+                possible_input_keys.append(str( step.uuid ))
+            elif inputs_by_el == "name":
+                possible_input_keys.append(step.tool_inputs.get( 'name', None ))
+            else:
+                message = "Workflow cannot be run because unexpected inputs_by value specified."
+                raise exceptions.MessageException( message )
+        inputs_key = None
+        for possible_input_key in possible_input_keys:
+            if possible_input_key in inputs:
+                inputs_key = possible_input_key
+        if not inputs_key:
+            message = "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
+            raise exceptions.MessageException( message )
+        normalized_inputs[ step.id ] = inputs[ inputs_key ]
+    return normalized_inputs
+
+
+def _normalize_step_parameters(steps, param_map, legacy=False, already_normalized=False):
+    """ Take a complex param_map that can reference parameters by
+    step_id in the new flexible way or in the old one-parameter
+    per tep fashion or by tool id and normalize the parameters so
+    everything is referenced by a numeric step id.
+    """
+    normalized_param_map = {}
+    for step in steps:
+        if already_normalized:
+            param_dict = param_map.get(str(step.order_index), {})
+        else:
+            param_dict = _step_parameters(step, param_map, legacy=legacy)
+        if param_dict:
+            normalized_param_map[step.id] = param_dict
+    return normalized_param_map
+
+
+def _step_parameters(step, param_map, legacy=False):
+    """
+    Update ``step`` parameters based on the user-provided ``param_map`` dict.
+
+    ``param_map`` should be structured as follows::
+
+      PARAM_MAP = {STEP_ID_OR_UUID: PARAM_DICT, ...}
+      PARAM_DICT = {NAME: VALUE, ...}
+
+    For backwards compatibility, the following (deprecated) format is
+    also supported for ``param_map``::
+
+      PARAM_MAP = {TOOL_ID: PARAM_DICT, ...}
+
+    in which case PARAM_DICT affects all steps with the given tool id.
+    If both by-tool-id and by-step-id specifications are used, the
+    latter takes precedence.
+
+    Finally (again, for backwards compatibility), PARAM_DICT can also
+    be specified as::
+
+      PARAM_DICT = {'param': NAME, 'value': VALUE}
+
+    Note that this format allows only one parameter to be set per step.
+    """
+    param_dict = param_map.get(step.tool_id, {}).copy()
+    if legacy:
+        param_dict.update(param_map.get(str(step.id), {}))
+    else:
+        param_dict.update(param_map.get(str(step.order_index), {}))
+    step_uuid = step.uuid
+    if step_uuid:
+        uuid_params = param_map.get(str(step_uuid), {})
+        param_dict.update(uuid_params)
+    if param_dict:
+        if 'param' in param_dict and 'value' in param_dict:
+            param_dict[param_dict['param']] = param_dict['value']
+            del param_dict[ 'param' ]
+            del param_dict[ 'value' ]
+    # Inputs can be nested dict, but Galaxy tool code wants nesting of keys (e.g.
+    # cond1|moo=4 instead of cond1: {moo: 4} ).
+    new_params = _flatten_step_params( param_dict )
+    return new_params
+
+
+def _flatten_step_params( param_dict, prefix="" ):
+    # TODO: Temporary work around until tool code can process nested data
+    # structures. This should really happen in there so the tools API gets
+    # this functionality for free and so that repeats can be handled
+    # properly. Also the tool code walks the tool inputs so it nows what is
+    # a complex value object versus something that maps to child parameters
+    # better than the hack or searching for src and id here.
+    new_params = {}
+    keys = param_dict.keys()[:]
+    for key in keys:
+        if prefix:
+            effective_key = "%s|%s" % ( prefix, key )
+        else:
+            effective_key = key
+        value = param_dict[key]
+        if isinstance(value, dict) and (not ('src' in value and 'id' in value) and key != "__POST_JOB_ACTIONS__"):
+            new_params.update(_flatten_step_params( value, effective_key) )
+        else:
+            new_params[effective_key] = value
+    return new_params
+
+
+def _get_target_history(trans, workflow, payload, param_keys=[], index=0):
+    history_name = payload.get('new_history_name', None)
+    history_id = payload.get('history_id', None)
+    history_param = payload.get('history', None)
+    if [ history_name, history_id, history_param ].count( None ) < 2:
+        raise exceptions.RequestParameterInvalidException("Specified workflow target history multiple ways - at most one of 'history', 'history_id', and 'new_history_name' may be specified.")
+    if history_param:
+        if history_param.startswith('hist_id='):
+            history_id = history_param[ 8: ]
+        else:
+            history_name = history_param
+    if history_id:
+        history_manager = histories.HistoryManager( trans.app )
+        target_history = history_manager.get_owned( trans.security.decode_id(history_id), trans.user, current_history=trans.history )
+    else:
+        if history_name:
+            nh_name = history_name
+        else:
+            nh_name = 'History from %s workflow' % workflow.name
+        if len( param_keys ) <= index:
+            raise exceptions.MessageException("Incorrect expansion of workflow batch parameters.")
+        ids = param_keys[ index ]
+        nids = len( ids )
+        if nids == 1:
+            nh_name = '%s on %s' % ( nh_name, ids[ 0 ] )
+        elif nids > 1:
+            nh_name = '%s on %s and %s' % ( nh_name, ', '.join( ids[ 0:-1 ] ), ids[ -1 ] )
+        new_history = trans.app.model.History( user=trans.user, name=nh_name )
+        trans.sa_session.add( new_history )
+        target_history = new_history
+    return target_history
+
+
+def build_workflow_run_configs( trans, workflow, payload ):
+    app = trans.app
+    allow_tool_state_corrections = payload.get( 'allow_tool_state_corrections', False )
+
+    # Sanity checks.
+    if len( workflow.steps ) == 0:
+        raise exceptions.MessageException( "Workflow cannot be run because it does not have any steps" )
+    if workflow.has_cycles:
+        raise exceptions.MessageException( "Workflow cannot be run because it contains cycles" )
+    if workflow.has_errors:
+        raise exceptions.MessageException( "Workflow cannot be run because of validation errors in some steps" )
+
+    if 'step_parameters' in payload and 'parameters' in payload:
+        raise exceptions.RequestParameterInvalidException( "Cannot specify both legacy parameters and step_parameters attributes." )
+    if 'inputs' in payload and 'ds_map' in payload:
+        raise exceptions.RequestParameterInvalidException( "Cannot specify both legacy ds_map and input attributes." )
+
+    add_to_history = 'no_add_to_history' not in payload
+    legacy = payload.get( 'legacy', False )
+    already_normalized = payload.get( 'parameters_normalized', False )
+    raw_parameters = payload.get( 'parameters', {} )
+
+    run_configs = []
+    unexpanded_param_map = _normalize_step_parameters( workflow.steps, raw_parameters, legacy=legacy, already_normalized=already_normalized )
+    expanded_params, expanded_param_keys = expand_workflow_inputs( unexpanded_param_map )
+    for index, param_map in enumerate( expanded_params ):
+        history = _get_target_history(trans, workflow, payload, expanded_param_keys, index)
+        inputs = payload.get( 'inputs', None )
+        inputs_by = payload.get( 'inputs_by', None )
+        # New default is to reference steps by index of workflow step
+        # which is intrinsic to the workflow and independent of the state
+        # of Galaxy at the time of workflow import.
+        default_inputs_by = 'step_index|step_uuid'
+        if inputs is None:
+            # Default to legacy behavior - read ds_map and reference steps
+            # by unencoded step id (a raw database id).
+            inputs = payload.get( 'ds_map', {} )
+            if legacy:
+                default_inputs_by = 'step_id|step_uuid'
+            inputs_by = inputs_by or default_inputs_by
+        else:
+            inputs = inputs or {}
+        inputs_by = inputs_by or default_inputs_by
+        if inputs or not already_normalized:
+            normalized_inputs = _normalize_inputs( workflow.steps, inputs, inputs_by )
+        else:
+            # Only allow dumping IDs directly into JSON database instead of properly recording the
+            # inputs with referential integrity if parameters are already normalized (coming from tool form).
+            normalized_inputs = {}
+
+        steps_by_id = workflow.steps_by_id
+        # Set workflow inputs.
+        for key, input_dict in normalized_inputs.iteritems():
+            step = steps_by_id[key]
+            if step.type == 'parameter_input':
+                continue
+            if 'src' not in input_dict:
+                raise exceptions.RequestParameterInvalidException( "Not input source type defined for input '%s'." % input_dict )
+            if 'id' not in input_dict:
+                raise exceptions.RequestParameterInvalidException( "Not input id defined for input '%s'." % input_dict )
+            if 'content' in input_dict:
+                raise exceptions.RequestParameterInvalidException( "Input cannot specify explicit 'content' attribute %s'." % input_dict )
+            input_source = input_dict[ 'src' ]
+            input_id = input_dict[ 'id' ]
+            try:
+                if input_source == 'ldda':
+                    ldda = trans.sa_session.query( app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( input_id ) )
+                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset )
+                    content = ldda.to_history_dataset_association( history, add_to_history=add_to_history )
+                elif input_source == 'ld':
+                    ldda = trans.sa_session.query( app.model.LibraryDataset ).get( trans.security.decode_id( input_id ) ).library_dataset_dataset_association
+                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset )
+                    content = ldda.to_history_dataset_association( history, add_to_history=add_to_history )
+                elif input_source == 'hda':
+                    # Get dataset handle, add to dict and history if necessary
+                    content = trans.sa_session.query( app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( input_id ) )
+                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), content.dataset )
+                elif input_source == 'uuid':
+                    dataset = trans.sa_session.query( app.model.Dataset ).filter( app.model.Dataset.uuid == input_id ).first()
+                    if dataset is None:
+                        # this will need to be changed later. If federation code is avalible, then a missing UUID
+                        # could be found amoung fereration partners
+                        raise exceptions.RequestParameterInvalidException( "Input cannot find UUID: %s." % input_id )
+                    assert trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), dataset )
+                    content = history.add_dataset( dataset )
+                elif input_source == 'hdca':
+                    content = app.dataset_collections_service.get_dataset_collection_instance( trans, 'history', input_id )
+                else:
+                    raise exceptions.RequestParameterInvalidException( "Unknown workflow input source '%s' specified." % input_source )
+                if add_to_history and content.history != history:
+                    content = content.copy()
+                    if isinstance( content, app.model.HistoryDatasetAssociation ):
+                        history.add_dataset( content )
+                    else:
+                        history.add_dataset_collection( content )
+                input_dict[ 'content' ] = content
+            except AssertionError:
+                raise exceptions.ItemAccessibilityException( "Invalid workflow input '%s' specified" % input_id )
+        for key in set( normalized_inputs.keys() ):
+            value = normalized_inputs[ key ]
+            if isinstance( value, dict ) and 'content' in value:
+                normalized_inputs[ key ] = value[ 'content' ]
+            else:
+                normalized_inputs[ key ] = value
+        run_configs.append(WorkflowRunConfig(
+            target_history=history,
+            replacement_dict=payload.get( 'replacement_params', {} ),
+            inputs=normalized_inputs,
+            param_map=param_map,
+            allow_tool_state_corrections=allow_tool_state_corrections
+        ))
+
+    return run_configs
+
+
+def workflow_run_config_to_request( trans, run_config, workflow ):
+    param_types = model.WorkflowRequestInputParameter.types
+
+    workflow_invocation = model.WorkflowInvocation()
+    workflow_invocation.uuid = uuid.uuid1()
+    workflow_invocation.history = run_config.target_history
+
+    def add_parameter( name, value, type ):
+        parameter = model.WorkflowRequestInputParameter(
+            name=name,
+            value=value,
+            type=type,
+        )
+        workflow_invocation.input_parameters.append( parameter )
+
+    steps_by_id = {}
+    for step in workflow.steps:
+        steps_by_id[step.id] = step
+        serializable_runtime_state = step.module.get_state( step.state )
+        step_state = model.WorkflowRequestStepState()
+        step_state.workflow_step = step
+        log.info("Creating a step_state for step.id %s" % step.id)
+        step_state.value = serializable_runtime_state
+        workflow_invocation.step_states.append( step_state )
+
+        if step.type == "subworkflow":
+            subworkflow_run_config = WorkflowRunConfig(
+                target_history=run_config.target_history,
+                replacement_dict=run_config.replacement_dict,
+                copy_inputs_to_history=False,
+                inputs={},
+                param_map={},
+                allow_tool_state_corrections=run_config.allow_tool_state_corrections
+            )
+            subworkflow_invocation = workflow_run_config_to_request(
+                trans,
+                subworkflow_run_config,
+                step.subworkflow,
+            )
+            workflow_invocation.attach_subworkflow_invocation_for_step(
+                step,
+                subworkflow_invocation,
+            )
+
+    replacement_dict = run_config.replacement_dict
+    for name, value in replacement_dict.iteritems():
+        add_parameter(
+            name=name,
+            value=value,
+            type=param_types.REPLACEMENT_PARAMETERS,
+        )
+    for step_id, content in run_config.inputs.iteritems():
+        workflow_invocation.add_input( content, step_id )
+
+    add_parameter( "copy_inputs_to_history", "true" if run_config.copy_inputs_to_history else "false", param_types.META_PARAMETERS )
+    return workflow_invocation
+
+
+def workflow_request_to_run_config( work_request_context, workflow_invocation ):
+    param_types = model.WorkflowRequestInputParameter.types
+
+    history = workflow_invocation.history
+    replacement_dict = {}
+    inputs = {}
+    param_map = {}
+    copy_inputs_to_history = None
+
+    for parameter in workflow_invocation.input_parameters:
+        parameter_type = parameter.type
+
+        if parameter_type == param_types.REPLACEMENT_PARAMETERS:
+            replacement_dict[ parameter.name ] = parameter.value
+        elif parameter_type == param_types.META_PARAMETERS:
+            if parameter.name == "copy_inputs_to_history":
+                copy_inputs_to_history = (parameter.value == "true")
+
+    # for parameter in workflow_invocation.step_parameters:
+    #    step_id = parameter.workflow_step_id
+    #    if step_id not in param_map:
+    #        param_map[ step_id ] = {}
+    #    param_map[ step_id ][ parameter.name ] = parameter.value
+
+    for input_association in workflow_invocation.input_datasets:
+        inputs[ input_association.workflow_step_id ] = input_association.dataset
+
+    for input_association in workflow_invocation.input_dataset_collections:
+        inputs[ input_association.workflow_step_id ] = input_association.dataset_collection
+
+    for input_association in workflow_invocation.input_step_parameters:
+        inputs[ input_association.workflow_step_id ] = input_association.parameter_value
+
+    if copy_inputs_to_history is None:
+        raise exceptions.InconsistentDatabase("Failed to find copy_inputs_to_history parameter loading workflow_invocation from database.")
+
+    workflow_run_config = WorkflowRunConfig(
+        target_history=history,
+        replacement_dict=replacement_dict,
+        inputs=inputs,
+        param_map=param_map,
+        copy_inputs_to_history=copy_inputs_to_history,
+    )
+    return workflow_run_config
+
+
+def __decode_id( trans, workflow_id, model_type="workflow" ):
+    try:
+        return trans.security.decode_id( workflow_id )
+    except Exception:
+        message = "Malformed %s id ( %s ) specified, unable to decode" % ( model_type, workflow_id )
+        raise exceptions.MalformedId( message )
diff --git a/lib/galaxy/workflow/schedulers/__init__.py b/lib/galaxy/workflow/schedulers/__init__.py
new file mode 100644
index 0000000..a7836e1
--- /dev/null
+++ b/lib/galaxy/workflow/schedulers/__init__.py
@@ -0,0 +1,41 @@
+""" Module containing Galaxy workflow scheduling plugins. Galaxy's interface
+for workflow scheduling is highly experimental and the interface required for
+scheduling plugins will almost certainly change.
+"""
+from abc import ABCMeta
+from abc import abstractmethod
+
+
+class WorkflowSchedulingPlugin( object ):
+    """ A plugin defining how Galaxy should schedule plugins. By default
+    plugins are passive and should monitor Galaxy's work queue for
+    WorkflowRequests. Inherit from ActiveWorkflowSchedulingPlugin instead if
+    the scheduling plugin should be forced (i.e. if scheduling happen all at
+    once or the request will be stored and monitored outside of Galaxy.)
+    """
+    __metaclass__ = ABCMeta
+
+    @property
+    @abstractmethod
+    def plugin_type( self ):
+        """ Short string providing labelling this plugin """
+
+    def startup( self, app ):
+        """ Called when Galaxy starts up if the plugin is enabled.
+        """
+
+    def shutdown( self ):
+        """ Called when Galaxy is shutting down, workflow scheduling should
+        end.
+        """
+
+
+class ActiveWorkflowSchedulingPlugin( WorkflowSchedulingPlugin ):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def schedule( self, workflow_invocation ):
+        """ Optionally return one or more commands to instrument job. These
+        commands will be executed on the compute server prior to the job
+        running.
+        """
diff --git a/lib/galaxy/workflow/schedulers/core.py b/lib/galaxy/workflow/schedulers/core.py
new file mode 100644
index 0000000..ec59029
--- /dev/null
+++ b/lib/galaxy/workflow/schedulers/core.py
@@ -0,0 +1,46 @@
+""" The class defines the stock Galaxy workflow scheduling plugin - currently
+it simply schedules the whole workflow up front when offered.
+"""
+import logging
+
+from galaxy.work import context
+from galaxy.workflow import run, run_request
+
+from ..schedulers import ActiveWorkflowSchedulingPlugin
+
+log = logging.getLogger( __name__ )
+
+
+class CoreWorkflowSchedulingPlugin( ActiveWorkflowSchedulingPlugin ):
+    plugin_type = "core"
+
+    def __init__( self, **kwds ):
+        pass
+
+    def startup( self, app ):
+        self.app = app
+
+    def shutdown( self ):
+        pass
+
+    def schedule( self, workflow_invocation ):
+        workflow = workflow_invocation.workflow
+        history = workflow_invocation.history
+        request_context = context.WorkRequestContext(
+            app=self.app,
+            history=history,
+            user=history.user
+        )  # trans-like object not tied to a web-thread.
+        workflow_run_config = run_request.workflow_request_to_run_config(
+            request_context,
+            workflow_invocation
+        )
+        run.schedule(
+            trans=request_context,
+            workflow=workflow,
+            workflow_run_config=workflow_run_config,
+            workflow_invocation=workflow_invocation,
+        )
+
+
+__all__ = ( 'CoreWorkflowSchedulingPlugin', )
diff --git a/lib/galaxy/workflow/scheduling_manager.py b/lib/galaxy/workflow/scheduling_manager.py
new file mode 100644
index 0000000..222b878
--- /dev/null
+++ b/lib/galaxy/workflow/scheduling_manager.py
@@ -0,0 +1,197 @@
+import os
+import time
+import logging
+import threading
+
+from xml.etree import ElementTree
+
+from galaxy import model
+from galaxy.util import plugin_config
+
+import galaxy.workflow.schedulers
+
+log = logging.getLogger( __name__ )
+
+DEFAULT_SCHEDULER_ID = "default"  # well actually this should be called DEFAULT_DEFAULT_SCHEDULER_ID...
+DEFAULT_SCHEDULER_PLUGIN_TYPE = "core"
+
+EXCEPTION_MESSAGE_SHUTDOWN = "Exception raised while attempting to shutdown workflow scheduler."
+EXCEPTION_MESSAGE_NO_SCHEDULERS = "Failed to defined workflow schedulers - no workflow schedulers defined."
+EXCEPTION_MESSAGE_NO_DEFAULT_SCHEDULER = "Failed to defined workflow schedulers - no workflow scheduler found for default id '%s'."
+EXCEPTION_MESSAGE_DUPLICATE_SCHEDULERS = "Failed to defined workflow schedulers - workflow scheduling plugin id '%s' duplicated."
+
+
+class WorkflowSchedulingManager( object ):
+    """ A workflow scheduling manager based loosely on pattern established by
+    ``galaxy.manager.JobManager``. Only schedules workflows on handler
+    processes.
+    """
+
+    def __init__( self, app ):
+        self.app = app
+        self.__job_config = app.job_config
+        self.workflow_schedulers = {}
+        self.active_workflow_schedulers = {}
+        # Passive workflow schedulers won't need to be monitored I guess.
+
+        self.request_monitor = None
+
+        self.__plugin_classes = self.__plugins_dict()
+        self.__init_schedulers()
+
+        if self._is_workflow_handler():
+            log.debug("Starting workflow schedulers")
+            self.__start_schedulers()
+            if self.active_workflow_schedulers:
+                self.__start_request_monitor()
+        else:
+            # Process should not schedule workflows - do nothing.
+            pass
+
+    # Provide a handler config-like interface by delegating to job handler
+    # config. Perhaps it makes sense to let there be explicit workflow
+    # handlers?
+    def _is_workflow_handler( self ):
+        return self.app.is_job_handler()
+
+    def _get_handler( self ):
+        return self.__job_config.get_handler( None )
+
+    def shutdown( self ):
+        for workflow_scheduler in self.workflow_schedulers.itervalues():
+            try:
+                workflow_scheduler.shutdown()
+            except Exception:
+                log.exception( EXCEPTION_MESSAGE_SHUTDOWN )
+        if self.request_monitor:
+            try:
+                self.request_monitor.shutdown()
+            except Exception:
+                log.exception( "Failed to shutdown workflow request monitor." )
+
+    def queue( self, workflow_invocation, request_params ):
+        workflow_invocation.state = model.WorkflowInvocation.states.NEW
+        scheduler = request_params.get( "scheduler", None ) or self.default_scheduler_id
+        handler = self._get_handler()
+        log.info("Queueing workflow invocation for handler [%s]" % handler)
+
+        workflow_invocation.scheduler = scheduler
+        workflow_invocation.handler = handler
+
+        sa_session = self.app.model.context
+        sa_session.add( workflow_invocation )
+        sa_session.flush()
+        return workflow_invocation
+
+    def __start_schedulers( self ):
+        for workflow_scheduler in self.workflow_schedulers.itervalues():
+            workflow_scheduler.startup( self.app )
+
+    def __plugins_dict( self ):
+        return plugin_config.plugins_dict( galaxy.workflow.schedulers, 'plugin_type' )
+
+    def __init_schedulers( self ):
+        config_file = self.app.config.workflow_schedulers_config_file
+        use_default_scheduler = False
+        if not config_file:
+            log.info( "Not workflow schedulers plugin config file defined, using default scheduler." )
+            use_default_scheduler = True
+        elif not os.path.exists( config_file ):
+            log.info( "Cannot find workflow schedulers plugin config file '%s', using default scheduler." % config_file )
+            use_default_scheduler = True
+
+        if use_default_scheduler:
+            self.__init_default_scheduler()
+        else:
+            plugins_element = ElementTree.parse( config_file ).getroot()
+            self.__init_schedulers_for_element( plugins_element )
+
+    def __init_default_scheduler( self ):
+        self.default_scheduler_id = DEFAULT_SCHEDULER_ID
+        self.__init_plugin( DEFAULT_SCHEDULER_PLUGIN_TYPE )
+
+    def __init_schedulers_for_element( self, plugins_element ):
+        plugins_kwds = dict( plugins_element.items() )
+        self.default_scheduler_id = plugins_kwds.get( 'default', DEFAULT_SCHEDULER_ID )
+        for plugin_element in plugins_element:
+            plugin_type = plugin_element.tag
+            plugin_kwds = dict( plugin_element.items() )
+            workflow_scheduler_id = plugin_kwds.get( 'id', None )
+            self.__init_plugin( plugin_type, workflow_scheduler_id, **plugin_kwds )
+
+        if not self.workflow_schedulers:
+            raise Exception( EXCEPTION_MESSAGE_NO_SCHEDULERS )
+        if self.default_scheduler_id not in self.workflow_schedulers:
+            raise Exception( EXCEPTION_MESSAGE_NO_DEFAULT_SCHEDULER % self.default_scheduler_id )
+
+    def __init_plugin( self, plugin_type, workflow_scheduler_id=None, **kwds ):
+        workflow_scheduler_id = workflow_scheduler_id or self.default_scheduler_id
+
+        if workflow_scheduler_id in self.workflow_schedulers:
+            raise Exception( EXCEPTION_MESSAGE_DUPLICATE_SCHEDULERS % workflow_scheduler_id )
+
+        workflow_scheduler = self.__plugin_classes[ plugin_type ]( **kwds )
+        self.workflow_schedulers[ workflow_scheduler_id ] = workflow_scheduler
+        if isinstance( workflow_scheduler, galaxy.workflow.schedulers.ActiveWorkflowSchedulingPlugin ):
+            self.active_workflow_schedulers[ workflow_scheduler_id ] = workflow_scheduler
+
+    def __start_request_monitor( self ):
+        self.request_monitor = WorkflowRequestMonitor( self.app, self )
+
+
+class WorkflowRequestMonitor( object ):
+
+    def __init__( self, app, workflow_scheduling_manager ):
+        self.app = app
+        self.active = True
+        self.workflow_scheduling_manager = workflow_scheduling_manager
+        self.monitor_thread = threading.Thread( name="WorkflowRequestMonitor.monitor_thread", target=self.__monitor )
+        self.monitor_thread.setDaemon( True )
+        self.monitor_thread.start()
+
+    def __monitor( self ):
+        to_monitor = self.workflow_scheduling_manager.active_workflow_schedulers
+        while self.active:
+            for workflow_scheduler_id, workflow_scheduler in to_monitor.iteritems():
+                if not self.active:
+                    return
+
+                self.__schedule( workflow_scheduler_id, workflow_scheduler )
+                # TODO: wake if stopped
+                time.sleep(1)
+
+    def __schedule( self, workflow_scheduler_id, workflow_scheduler ):
+        invocation_ids = self.__active_invocation_ids( workflow_scheduler_id )
+        for invocation_id in invocation_ids:
+            self.__attempt_schedule( invocation_id, workflow_scheduler )
+            if not self.active:
+                return
+
+    def __attempt_schedule( self, invocation_id, workflow_scheduler ):
+        sa_session = self.app.model.context
+        workflow_invocation = sa_session.query( model.WorkflowInvocation ).get( invocation_id )
+
+        if not workflow_invocation or not workflow_invocation.active:
+            return False
+
+        try:
+            workflow_scheduler.schedule( workflow_invocation )
+        except Exception:
+            # TODO: eventually fail this - or fail it right away?
+            log.exception( "Exception raised while attempting to schedule workflow request." )
+            return False
+
+        # A workflow was obtained and scheduled...
+        return True
+
+    def __active_invocation_ids( self, scheduler_id ):
+        sa_session = self.app.model.context
+        handler = self.app.config.server_name
+        return model.WorkflowInvocation.poll_active_workflow_ids(
+            sa_session,
+            scheduler=scheduler_id,
+            handler=handler,
+        )
+
+    def shutdown( self ):
+        self.active = False
diff --git a/lib/galaxy/workflow/steps.py b/lib/galaxy/workflow/steps.py
new file mode 100644
index 0000000..ba1ba48
--- /dev/null
+++ b/lib/galaxy/workflow/steps.py
@@ -0,0 +1,63 @@
+""" This module contains utility methods for reasoning about and ordering
+workflow steps.
+"""
+import math
+from galaxy.util.topsort import (
+    CycleError,
+    topsort,
+    topsort_levels
+)
+
+
+def attach_ordered_steps( workflow, steps ):
+    """ Attempt to topologically order steps and attach to workflow. If this
+    fails - the workflow contains cycles so it mark it as such.
+    """
+    ordered_steps = order_workflow_steps( steps )
+    if ordered_steps:
+        workflow.has_cycles = False
+        for i, step in enumerate( ordered_steps ):
+            step.order_index = i
+            workflow.steps.append( step )
+    else:
+        workflow.has_cycles = True
+        workflow.steps = steps
+
+
+def order_workflow_steps( steps ):
+    """
+    Perform topological sort of the steps, return ordered or None
+    """
+    position_data_available = True
+    for step in steps:
+        if not step.position or 'left' not in step.position or 'top' not in step.position:
+            position_data_available = False
+    if position_data_available:
+        steps.sort(cmp=lambda s1, s2: cmp( math.sqrt(s1.position['left'] ** 2 + s1.position['top'] ** 2), math.sqrt(s2.position['left'] ** 2 + s2.position['top'] ** 2)))
+    try:
+        edges = edgelist_for_workflow_steps( steps )
+        node_order = topsort( edges )
+        return [ steps[i] for i in node_order ]
+    except CycleError:
+        return None
+
+
+def edgelist_for_workflow_steps( steps ):
+    """
+    Create a list of tuples representing edges between ``WorkflowSteps`` based
+    on associated ``WorkflowStepConnection``s
+    """
+    edges = []
+    steps_to_index = dict( ( step, i ) for i, step in enumerate( steps ) )
+    for step in steps:
+        edges.append( ( steps_to_index[step], steps_to_index[step] ) )
+        for conn in step.input_connections:
+            edges.append( ( steps_to_index[conn.output_step], steps_to_index[conn.input_step] ) )
+    return edges
+
+
+def order_workflow_steps_with_levels( steps ):
+    try:
+        return topsort_levels( edgelist_for_workflow_steps( steps ) )
+    except CycleError:
+        return None
diff --git a/lib/galaxy_ext/__init__.py b/lib/galaxy_ext/__init__.py
new file mode 100644
index 0000000..3155ace
--- /dev/null
+++ b/lib/galaxy_ext/__init__.py
@@ -0,0 +1,4 @@
+""" Package for things which are loaded from outside Galaxy and which we can be
+sure will not conflict with the `galaxy` namespace, which may be provided by
+other packages (e.g. galaxy_ops in the toolshed).
+"""
diff --git a/lib/galaxy_ext/metadata/__init__.py b/lib/galaxy_ext/metadata/__init__.py
new file mode 100644
index 0000000..00e5bec
--- /dev/null
+++ b/lib/galaxy_ext/metadata/__init__.py
@@ -0,0 +1 @@
+""" Work with Galaxy metadata"""
diff --git a/lib/galaxy_ext/metadata/set_metadata.py b/lib/galaxy_ext/metadata/set_metadata.py
new file mode 100644
index 0000000..6f70a9d
--- /dev/null
+++ b/lib/galaxy_ext/metadata/set_metadata.py
@@ -0,0 +1,158 @@
+"""
+Execute an external process to set_meta() on a provided list of pickled datasets.
+
+This was formerly scripts/set_metadata.py and expects these arguments:
+
+    %prog datatypes_conf.xml job_metadata_file metadata_in,metadata_kwds,metadata_out,metadata_results_code,output_filename_override,metadata_override... max_metadata_value_size
+
+Galaxy should be importable on sys.path and output_filename_override should be
+set to the path of the dataset on which metadata is being set
+(output_filename_override could previously be left empty and the path would be
+constructed automatically).
+"""
+
+import cPickle
+import json
+import logging
+import os
+import sys
+
+# insert *this* galaxy before all others on sys.path
+sys.path.insert( 1, os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) ) )
+
+from sqlalchemy.orm import clear_mappers
+
+import galaxy.model.mapping  # need to load this before we unpickle, in order to setup properties assigned by the mappers
+from galaxy.model.custom_types import total_size
+from galaxy.util import stringify_dictionary_keys
+
+# ensure supported version
+assert sys.version_info[:2] >= ( 2, 6 ) and sys.version_info[:2] <= ( 2, 7 ), 'Python version must be 2.6 or 2.7, this is: %s' % sys.version
+
+logging.basicConfig()
+log = logging.getLogger( __name__ )
+
+galaxy.model.Job()  # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here
+
+
+def set_meta_with_tool_provided( dataset_instance, file_dict, set_meta_kwds, datatypes_registry ):
+    # This method is somewhat odd, in that we set the metadata attributes from tool,
+    # then call set_meta, then set metadata attributes from tool again.
+    # This is intentional due to interplay of overwrite kwd, the fact that some metadata
+    # parameters may rely on the values of others, and that we are accepting the
+    # values provided by the tool as Truth.
+    extension = dataset_instance.extension
+    if extension == "_sniff_":
+        try:
+            from galaxy.datatypes import sniff
+            extension = sniff.handle_uploaded_dataset_file( dataset_instance.dataset.external_filename, datatypes_registry )
+            # We need to both set the extension so it is available to set_meta
+            # and record it in the metadata so it can be reloaded on the server
+            # side and the model updated (see MetadataCollection.{from,to}_JSON_dict)
+            dataset_instance.extension = extension
+            # Set special metadata property that will reload this on server side.
+            setattr( dataset_instance.metadata, "__extension__", extension )
+        except Exception:
+            # TODO: log this when metadata can log stuff...
+            # https://trello.com/c/Nrwodu9d
+            pass
+
+    for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems():
+        setattr( dataset_instance.metadata, metadata_name, metadata_value )
+    dataset_instance.datatype.set_meta( dataset_instance, **set_meta_kwds )
+    for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems():
+        setattr( dataset_instance.metadata, metadata_name, metadata_value )
+
+
+def set_metadata():
+    # locate galaxy_root for loading datatypes
+    galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
+    galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tool_job_working_directory = os.path.abspath(os.getcwd())
+
+    # This is ugly, but to transition from existing jobs without this parameter
+    # to ones with, smoothly, it has to be the last optional parameter and we
+    # have to sniff it.
+    try:
+        max_metadata_value_size = int(sys.argv[-1])
+        sys.argv = sys.argv[:-1]
+    except ValueError:
+        max_metadata_value_size = 0
+        # max_metadata_value_size is unspecified and should be 0
+
+    # Set up datatypes registry
+    datatypes_config = sys.argv.pop( 1 )
+    datatypes_registry = galaxy.datatypes.registry.Registry()
+    datatypes_registry.load_datatypes( root_dir=galaxy_root, config=datatypes_config )
+    galaxy.model.set_datatypes_registry( datatypes_registry )
+
+    job_metadata = sys.argv.pop( 1 )
+    existing_job_metadata_dict = {}
+    new_job_metadata_dict = {}
+    if job_metadata != "None" and os.path.exists( job_metadata ):
+        for line in open( job_metadata, 'r' ):
+            try:
+                line = stringify_dictionary_keys( json.loads( line ) )
+                if line['type'] == 'dataset':
+                    existing_job_metadata_dict[ line['dataset_id'] ] = line
+                elif line['type'] == 'new_primary_dataset':
+                    new_job_metadata_dict[ line[ 'filename' ] ] = line
+            except:
+                continue
+
+    for filenames in sys.argv[1:]:
+        fields = filenames.split( ',' )
+        filename_in = fields.pop( 0 )
+        filename_kwds = fields.pop( 0 )
+        filename_out = fields.pop( 0 )
+        filename_results_code = fields.pop( 0 )
+        dataset_filename_override = fields.pop( 0 )
+        # Need to be careful with the way that these parameters are populated from the filename splitting,
+        # because if a job is running when the server is updated, any existing external metadata command-lines
+        # will not have info about the newly added override_metadata file
+        if fields:
+            override_metadata = fields.pop( 0 )
+        else:
+            override_metadata = None
+        set_meta_kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) )  # load kwds; need to ensure our keywords are not unicode
+        try:
+            dataset = cPickle.load( open( filename_in ) )  # load DatasetInstance
+            dataset.dataset.external_filename = dataset_filename_override
+            files_path = os.path.abspath(os.path.join( tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id) ))
+            dataset.dataset.external_extra_files_path = files_path
+            if dataset.dataset.id in existing_job_metadata_dict:
+                dataset.extension = existing_job_metadata_dict[ dataset.dataset.id ].get( 'ext', dataset.extension )
+            # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles
+            if override_metadata:
+                override_metadata = json.load( open( override_metadata ) )
+                for metadata_name, metadata_file_override in override_metadata:
+                    if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ):
+                        metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override )
+                    setattr( dataset.metadata, metadata_name, metadata_file_override )
+            file_dict = existing_job_metadata_dict.get( dataset.dataset.id, {} )
+            set_meta_with_tool_provided( dataset, file_dict, set_meta_kwds, datatypes_registry )
+            if max_metadata_value_size:
+                for k, v in dataset.metadata.items():
+                    if total_size(v) > max_metadata_value_size:
+                        log.info("Key %s too large for metadata, discarding" % k)
+                        dataset.metadata.remove_key(k)
+            dataset.metadata.to_JSON_dict( filename_out )  # write out results of set_meta
+            json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) )  # setting metadata has succeeded
+        except Exception as e:
+            json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) )  # setting metadata has failed somehow
+
+    for i, ( filename, file_dict ) in enumerate( new_job_metadata_dict.iteritems(), start=1 ):
+        new_dataset_filename = os.path.join( tool_job_working_directory, "working", file_dict[ 'filename' ] )
+        new_dataset = galaxy.model.Dataset( id=-i, external_filename=new_dataset_filename )
+        extra_files = file_dict.get( 'extra_files', None )
+        if extra_files is not None:
+            new_dataset._extra_files_path = os.path.join( tool_job_working_directory, "working", extra_files )
+        new_dataset.state = new_dataset.states.OK
+        new_dataset_instance = galaxy.model.HistoryDatasetAssociation( id=-i, dataset=new_dataset, extension=file_dict.get( 'ext', 'data' ) )
+        set_meta_with_tool_provided( new_dataset_instance, file_dict, set_meta_kwds, datatypes_registry )
+        file_dict[ 'metadata' ] = json.loads( new_dataset_instance.metadata.to_JSON_dict() )  # storing metadata in external form, need to turn back into dict, then later jsonify
+    if existing_job_metadata_dict or new_job_metadata_dict:
+        with open( job_metadata, 'wb' ) as job_metadata_fh:
+            for value in existing_job_metadata_dict.values() + new_job_metadata_dict.values():
+                job_metadata_fh.write( "%s\n" % ( json.dumps( value ) ) )
+
+    clear_mappers()
diff --git a/lib/galaxy_utils/__init__.py b/lib/galaxy_utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy_utils/sequence/__init__.py b/lib/galaxy_utils/sequence/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/galaxy_utils/sequence/fasta.py b/lib/galaxy_utils/sequence/fasta.py
new file mode 100644
index 0000000..fd078a3
--- /dev/null
+++ b/lib/galaxy_utils/sequence/fasta.py
@@ -0,0 +1,125 @@
+# Dan Blankenberg
+from six import string_types
+
+
+class fastaSequence( object ):
+    def __init__( self ):
+        self.identifier = None
+        self.sequence = ''  # holds raw sequence string: no whitespace
+
+    def __len__( self ):
+        return len( self.sequence )
+
+    def __str__( self ):
+        return "%s\n%s\n" % ( self.identifier, self.sequence )
+
+
+class fastaReader( object ):
+    def __init__( self, fh ):
+        self.file = fh
+
+    def close( self ):
+        return self.file.close()
+
+    def next( self ):
+        line = self.file.readline()
+        # remove header comment lines
+        while line and line.startswith( '#' ):
+            line = self.file.readline()
+        if not line:
+            raise StopIteration
+        assert line.startswith( '>' ), "FASTA headers must start with >"
+        rval = fastaSequence()
+        rval.identifier = line.strip()
+        offset = self.file.tell()
+        while True:
+            line = self.file.readline()
+            if not line or line.startswith( '>' ):
+                if line:
+                    self.file.seek( offset )  # this causes sequence id lines to be read twice, once to determine previous sequence end and again when getting actual sequence; can we cache this to prevent it from being re-read?
+                return rval
+            # 454 qual test data that was used has decimal scores that don't have trailing spaces
+            # so we'll need to parse and build these sequences not based upon de facto standards
+            # i.e. in a less than ideal fashion
+            line = line.rstrip()
+            if ' ' in rval.sequence or ' ' in line:
+                rval.sequence = "%s%s " % ( rval.sequence, line )
+            else:
+                rval.sequence += line
+            offset = self.file.tell()
+
+    def __iter__( self ):
+        while True:
+            yield self.next()
+
+
+class fastaNamedReader( object ):
+    def __init__( self, fh ):
+        self.file = fh
+        self.reader = fastaReader( self.file )
+        self.offset_dict = {}
+        self.eof = False
+
+    def close( self ):
+        return self.file.close()
+
+    def get( self, sequence_id ):
+        if not isinstance( sequence_id, string_types ):
+            sequence_id = sequence_id.identifier
+        rval = None
+        if sequence_id in self.offset_dict:
+            initial_offset = self.file.tell()
+            seq_offset = self.offset_dict[ sequence_id ].pop( 0 )
+            if not self.offset_dict[ sequence_id ]:
+                del self.offset_dict[ sequence_id ]
+            self.file.seek( seq_offset )
+            rval = self.reader.next()
+            self.file.seek( initial_offset )
+        else:
+            while True:
+                offset = self.file.tell()
+                try:
+                    fasta_seq = self.reader.next()
+                except StopIteration:
+                    self.eof = True
+                    break  # eof, id not found, will return None
+                if fasta_seq.identifier == sequence_id:
+                    rval = fasta_seq
+                    break
+                else:
+                    if fasta_seq.identifier not in self.offset_dict:
+                        self.offset_dict[ fasta_seq.identifier ] = []
+                    self.offset_dict[ fasta_seq.identifier ].append( offset )
+        return rval
+
+    def has_data( self ):
+        # returns a string representation of remaining data, or empty string (False) if no data remaining
+        eof = self.eof
+        count = 0
+        rval = ''
+        if self.offset_dict:
+            count = sum( map( len, self.offset_dict.values() ) )
+        if not eof:
+            offset = self.file.tell()
+            try:
+                self.reader.next()
+            except StopIteration:
+                eof = True
+            self.file.seek( offset )
+        if count:
+            rval = "There were %i known sequences not utilized. " % count
+        if not eof:
+            rval = "%s%s" % ( rval, "An additional unknown number of sequences exist in the input that were not utilized." )
+        return rval
+
+
+class fastaWriter( object ):
+    def __init__( self, fh ):
+        self.file = fh
+
+    def write( self, fastq_read ):
+        # this will include color space adapter base if applicable
+        self.file.write( ">%s\n%s\n" % ( fastq_read.identifier[1:], fastq_read.sequence ) )
+
+    def close( self ):
+        return self.file.close()
diff --git a/lib/galaxy_utils/sequence/fastq.py b/lib/galaxy_utils/sequence/fastq.py
new file mode 100644
index 0000000..da47279
--- /dev/null
+++ b/lib/galaxy_utils/sequence/fastq.py
@@ -0,0 +1,838 @@
+# Dan Blankenberg
+from __future__ import print_function
+
+import math
+import string
+
+from six import Iterator, string_types
+
+from . import transform
+from .fasta import fastaSequence
+from .sequence import SequencingRead
+
+
+class fastqSequencingRead( SequencingRead ):
+    format = 'sanger'  # sanger is default
+    ascii_min = 33
+    ascii_max = 126
+    quality_min = 0
+    quality_max = 93
+    score_system = 'phred'  # phred or solexa
+    sequence_space = 'base'  # base or color
+
+    @classmethod
+    def get_class_by_format( cls, format ):
+        assert format in FASTQ_FORMATS, 'Unknown format type specified: %s' % format
+        return FASTQ_FORMATS[ format ]
+
+    @classmethod
+    def convert_score_phred_to_solexa( cls, decimal_score_list ):
+        def phred_to_solexa( score ):
+            if score <= 0:  # can't take log10( 1 - 1 ); make <= 0 into -5
+                return -5
+            return int( round( 10.0 * math.log10( math.pow( 10.0, ( float( score ) / 10.0 ) ) - 1.0 ) ) )
+        return [phred_to_solexa(_) for _ in decimal_score_list]
+
+    @classmethod
+    def convert_score_solexa_to_phred( cls, decimal_score_list ):
+        def solexa_to_phred( score ):
+            return int( round( 10.0 * math.log10( math.pow( 10.0, ( float( score ) / 10.0 ) ) + 1.0 ) ) )
+        return [solexa_to_phred(_) for _ in decimal_score_list]
+
+    @classmethod
+    def restrict_scores_to_valid_range( cls, decimal_score_list ):
+        def restrict_score( score ):
+            return max( min( score, cls.quality_max ), cls.quality_min )
+        return [restrict_score(_) for _ in decimal_score_list]
+
+    @classmethod
+    def transform_scores_to_valid_range( cls, decimal_score_list):
+        cls_quality_max = cls.quality_max
+        cls_quality_min = cls.quality_min
+        for i in range( len( decimal_score_list ) ):
+            score = decimal_score_list[i]
+            if(score > cls_quality_max):
+                transformed_score = cls_quality_max
+            elif( score < cls_quality_min ):
+                transformed_score = cls_quality_min
+            else:
+                transformed_score = score
+            decimal_score_list[i] = str(transformed_score)
+
+    @classmethod
+    def transform_scores_to_valid_range_ascii( cls, decimal_score_list ):
+        cls_quality_max = cls.quality_max
+        cls_quality_min = cls.quality_min
+        to_quality = cls.ascii_min - cls.quality_min
+        for i in range( len( decimal_score_list ) ):
+            score = decimal_score_list[i]
+            if(score > cls_quality_max):
+                transformed_score = cls_quality_max
+            elif( score < cls_quality_min ):
+                transformed_score = cls_quality_min
+            else:
+                transformed_score = score
+            transformed_score = chr(transformed_score + to_quality)
+            decimal_score_list[i] = transformed_score
+
+    @classmethod
+    def convert_base_to_color_space( cls, sequence ):
+        return cls.color_space_converter.to_color_space( sequence )
+
+    @classmethod
+    def convert_color_to_base_space( cls, sequence ):
+        return cls.color_space_converter.to_base_space( sequence )
+
+    def is_ascii_encoded( self ):
+        # as per fastq definition only decimal quality strings can have spaces (and TABs for our purposes) in them (and must have a trailing space)
+        if ' ' in self.quality:
+            return False
+        if '\t' in self.quality:
+            return False
+        return True
+
+    def get_ascii_quality_scores( self ):
+        if self.is_ascii_encoded():
+            return list( self.quality )
+        else:
+            quality = self.quality.rstrip()  # decimal scores should have a trailing space
+            if quality:
+                try:
+                    to_quality = self.ascii_min - self.quality_min
+                    return [ chr( int( val ) + to_quality ) for val in quality.split() ]
+                except ValueError as e:
+                    raise ValueError( 'Error Parsing quality String. ASCII quality strings cannot contain spaces (%s): %s' % ( self.quality, e ) )
+            else:
+                return []
+
+    def get_ascii_quality_scores_len( self ):
+        """
+        Compute ascii quality score length, without generating relatively
+        expensive qualty score array.
+        """
+        if self.is_ascii_encoded():
+            return len( self.quality )
+        else:
+            quality = self.quality.rstrip()
+            if quality:
+                try:
+                    return len( quality.split() )
+                except ValueError as e:
+                    raise ValueError( 'Error Parsing quality String. ASCII quality strings cannot contain spaces (%s): %s' % ( self.quality, e ) )
+            else:
+                return 0
+
+    def get_decimal_quality_scores( self ):
+        return self.__get_decimal_quality_scores(self.is_ascii_encoded())
+
+    def __get_decimal_quality_scores( self, ascii ):
+        if ascii:
+            to_quality = self.quality_min - self.ascii_min
+            return [ ord( val ) + to_quality for val in self.quality ]
+        else:
+            quality = self.quality.rstrip()  # decimal scores should have a trailing space
+            if quality:
+                return [ int( val ) for val in quality.split() if val.strip() ]
+            else:
+                return []
+
+    def convert_read_to_format( self, format, force_quality_encoding=None ):
+        assert format in FASTQ_FORMATS, 'Unknown format type specified: %s' % format
+        assert force_quality_encoding in [ None, 'ascii', 'decimal' ], 'Invalid force_quality_encoding: %s' % force_quality_encoding
+        new_class = FASTQ_FORMATS[ format ]
+        new_read = new_class()
+        new_read.identifier = self.identifier
+        if self.sequence_space == new_class.sequence_space:
+            new_read.sequence = self.sequence
+        else:
+            if self.sequence_space == 'base':
+                new_read.sequence = self.convert_base_to_color_space( self.sequence )
+            else:
+                new_read.sequence = self.convert_color_to_base_space( self.sequence )
+        new_read.description = self.description
+        is_ascii = self.is_ascii_encoded()
+        if self.score_system != new_read.score_system:
+            if self.score_system == 'phred':
+                score_list = self.convert_score_phred_to_solexa( self.__get_decimal_quality_scores(is_ascii) )
+            else:
+                score_list = self.convert_score_solexa_to_phred( self.__get_decimal_quality_scores(is_ascii) )
+        else:
+            score_list = self.__get_decimal_quality_scores(is_ascii)
+        if force_quality_encoding is None:
+            if is_ascii:
+                new_encoding = 'ascii'
+            else:
+                new_encoding = 'decimal'
+        else:
+            new_encoding = force_quality_encoding
+        if new_encoding == 'ascii':
+            new_class.transform_scores_to_valid_range_ascii( score_list )
+            new_read.quality = "".join( score_list )
+        else:  # decimal
+            new_class.transform_scores_to_valid_range( score_list )
+            new_read.quality = "%s " % " ".join( score_list )  # need trailing space to be valid decimal fastq
+        return new_read
+
+    def get_sequence( self ):
+        return self.sequence
+
+    def slice( self, left_column_offset, right_column_offset ):
+        new_read = fastqSequencingRead.get_class_by_format( self.format )()
+        new_read.identifier = self.identifier
+        new_read.sequence = self.get_sequence()[left_column_offset:right_column_offset]
+        new_read.description = self.description
+        if self.is_ascii_encoded():
+            new_read.quality = self.quality[left_column_offset:right_column_offset]
+        else:
+            quality = [str(_) for _ in self.get_decimal_quality_scores()[left_column_offset:right_column_offset]]
+            if quality:
+                new_read.quality = "%s " % " ".join( quality )
+            else:
+                new_read.quality = ''
+        return new_read
+
+    def is_valid_format( self ):
+        if self.is_ascii_encoded():
+            for val in self.get_ascii_quality_scores():
+                val = ord( val )
+                if val < self.ascii_min or val > self.ascii_max:
+                    return False
+        else:
+            for val in self.get_decimal_quality_scores():
+                if val < self.quality_min or val > self.quality_max:
+                    return False
+        if not self.is_valid_sequence():
+            return False
+        return True
+
+    def is_valid_sequence( self ):
+        for base in self.get_sequence():
+            if base not in self.valid_sequence_list:
+                return False
+        return True
+
+    def insufficient_quality_length( self ):
+        return self.get_ascii_quality_scores_len() < len( self.sequence )
+
+    def assert_sequence_quality_lengths( self ):
+        qual_len = self.get_ascii_quality_scores_len()
+        seq_len = len( self.sequence )
+        assert qual_len == seq_len, "Invalid FASTQ file: quality score length (%i) does not match sequence length (%i)" % ( qual_len, seq_len )
+
+    def reverse( self, clone=True ):
+        # need to override how decimal quality scores are reversed
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        rval.sequence = transform.reverse( self.sequence )
+        if rval.is_ascii_encoded():
+            rval.quality = rval.quality[::-1]
+        else:
+            rval.quality = reversed( rval.get_decimal_quality_scores() )
+            rval.quality = "%s " % " ".join( map( str, rval.quality ) )
+        return rval
+
+    def apply_galaxy_conventions( self ):
+        pass
+
+
+class fastqSangerRead( fastqSequencingRead ):
+    format = 'sanger'
+    ascii_min = 33
+    ascii_max = 126
+    quality_min = 0
+    quality_max = 93
+    score_system = 'phred'
+    sequence_space = 'base'
+
+
+class fastqIlluminaRead( fastqSequencingRead ):
+    format = 'illumina'
+    ascii_min = 64
+    ascii_max = 126
+    quality_min = 0
+    quality_max = 62
+    score_system = 'phred'
+    sequence_space = 'base'
+
+
+class fastqSolexaRead( fastqSequencingRead ):
+    format = 'solexa'
+    ascii_min = 59
+    ascii_max = 126
+    quality_min = -5
+    quality_max = 62
+    score_system = 'solexa'
+    sequence_space = 'base'
+
+
+class fastqCSSangerRead( fastqSequencingRead ):
+    format = 'cssanger'  # color space
+    ascii_min = 33
+    ascii_max = 126
+    quality_min = 0
+    quality_max = 93
+    score_system = 'phred'
+    sequence_space = 'color'
+    valid_sequence_list = [str(_) for _ in range(7)] + [ '.' ]
+
+    def __len__( self ):
+        if self.has_adapter_base():  # Adapter base is not counted in length of read
+            return len( self.sequence ) - 1
+        return fastqSequencingRead.__len__( self )
+
+    def has_adapter_base( self ):
+        if self.sequence and self.sequence[0] in string.letters:  # adapter base must be a letter
+            return True
+        return False
+
+    def insufficient_quality_length( self ):
+        if self.has_adapter_base():
+            return self.get_ascii_quality_scores_len() + 1 < len( self.sequence )
+        return fastqSequencingRead.insufficient_quality_length( self )
+
+    def assert_sequence_quality_lengths( self ):
+        if self.has_adapter_base():
+            qual_len = self.get_ascii_quality_scores_len()
+            seq_len = len( self.sequence )
+            assert ( qual_len + 1 == seq_len ) or ( qual_len == seq_len ), "Invalid FASTQ file: quality score length (%i) does not match sequence length (%i with adapter base)" % ( qual_len, seq_len )  # SRA adds FAKE/DUMMY quality scores to the adapter base, we'll allow the reading of the Improper score here, but remove it in the Reader when "apply_galaxy_conventions" is set to True
+        else:
+            return fastqSequencingRead.assert_sequence_quality_lengths( self )
+
+    def get_sequence( self ):
+        if self.has_adapter_base():
+            return self.sequence[1:]
+        return self.sequence
+
+    def reverse( self, clone=True ):
+        # need to override how color space is reversed
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        if rval.has_adapter_base():
+            adapter = rval.sequence[0]
+            # sequence = rval.sequence[1:]
+            rval.sequence = self.color_space_converter.to_color_space( transform.reverse( self.color_space_converter.to_base_space( rval.sequence ) ), adapter_base=adapter )
+        else:
+            rval.sequence = transform.reverse( rval.sequence )
+
+        if rval.is_ascii_encoded():
+            rval.quality = rval.quality[::-1]
+        else:
+            rval.quality = reversed( rval.get_decimal_quality_scores() )
+            rval.quality = "%s " % " ".join( map( str, rval.quality ) )
+        return rval
+
+    def complement( self, clone=True ):
+        # need to override how color space is complemented
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        if rval.has_adapter_base():  # No adapter, color space stays the same
+            adapter = rval.sequence[0]
+            sequence = rval.sequence[1:]
+            if adapter.lower() != 'u':
+                adapter = transform.DNA_complement( adapter )
+            else:
+                adapter = transform.RNA_complement( adapter )
+            rval.sequence = "%s%s" % ( adapter, sequence )
+        return rval
+
+    def change_adapter( self, new_adapter, clone=True ):
+        # if new_adapter is empty, remove adapter, otherwise replace with new_adapter
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        if rval.has_adapter_base():
+            if new_adapter:
+                if new_adapter != rval.sequence[0]:
+                    rval.sequence = rval.color_space_converter.to_color_space( rval.color_space_converter.to_base_space( rval.sequence ), adapter_base=new_adapter )
+            else:
+                rval.sequence = rval.sequence[1:]
+        elif new_adapter:
+            rval.sequence = "%s%s" % ( new_adapter, rval.sequence )
+        return rval
+
+    def apply_galaxy_conventions( self ):
+        if self.has_adapter_base() and len( self.sequence ) == len( self.get_ascii_quality_scores() ):  # SRA adds FAKE/DUMMY quality scores to the adapter base, we remove them here
+            if self.is_ascii_encoded():
+                self.quality = self.quality[1:]
+            else:
+                self.quality = " ".join( map( str, self.get_decimal_quality_scores()[1:] ) )
+
+
+FASTQ_FORMATS = {}
+for format in [ fastqIlluminaRead, fastqSolexaRead, fastqSangerRead, fastqCSSangerRead ]:
+    FASTQ_FORMATS[ format.format ] = format
+
+
+class fastqAggregator( object ):
+    VALID_FORMATS = list(FASTQ_FORMATS.keys())
+
+    def __init__( self ):
+        self.ascii_values_used = []  # quick lookup of all ascii chars used
+        self.seq_lens = {}  # counts of seqs by read len
+        self.nuc_index_quality = []  # counts of scores by read column
+        self.nuc_index_base = []  # counts of bases by read column
+
+    def consume_read( self, fastq_read ):
+        # ascii values used
+        for val in fastq_read.get_ascii_quality_scores():
+            if val not in self.ascii_values_used:
+                self.ascii_values_used.append( val )
+        # lengths
+        seq_len = len( fastq_read )
+        self.seq_lens[ seq_len ] = self.seq_lens.get( seq_len, 0 ) + 1
+        # decimal qualities by column
+        for i, val in enumerate( fastq_read.get_decimal_quality_scores() ):
+            if i == len( self.nuc_index_quality ):
+                self.nuc_index_quality.append( {} )
+            self.nuc_index_quality[ i ][ val ] = self.nuc_index_quality[ i ].get( val, 0 ) + 1
+        # bases by column
+        for i, nuc in enumerate( fastq_read.get_sequence() ):
+            if i == len( self.nuc_index_base ):
+                self.nuc_index_base.append( {} )
+            nuc = nuc.upper()
+            self.nuc_index_base[ i ][ nuc ] = self.nuc_index_base[ i ].get( nuc, 0 ) + 1
+
+    def get_valid_formats( self, check_list=None ):
+        if not check_list:
+            check_list = self.VALID_FORMATS
+        rval = []
+        sequence = []
+        for nuc_dict in self.nuc_index_base:
+            for nuc in nuc_dict.keys():
+                if nuc not in sequence:
+                    sequence.append( nuc )
+        sequence = "".join( sequence )
+        quality = "".join( self.ascii_values_used )
+        for fastq_format in check_list:
+            fastq_read = fastqSequencingRead.get_class_by_format( fastq_format )()
+            fastq_read.quality = quality
+            fastq_read.sequence = sequence
+            if fastq_read.is_valid_format():
+                rval.append( fastq_format )
+        return rval
+
+    def get_ascii_range( self ):
+        if not self.ascii_values_used:
+            return None
+        return ( min( self.ascii_values_used ), max( self.ascii_values_used ) )
+
+    def get_decimal_range( self ):
+        if not self.nuc_index_quality:
+            return None
+        decimal_values_used = []
+        for scores in self.nuc_index_quality:
+            decimal_values_used.extend( scores.keys() )
+        return ( min( decimal_values_used ), max( decimal_values_used ) )
+
+    def get_length_counts( self ):
+        return self.seq_lens
+
+    def get_max_read_length( self ):
+        return len( self.nuc_index_quality )
+
+    def get_read_count_for_column( self, column ):
+        if column >= len( self.nuc_index_quality ):
+            return 0
+        return sum( self.nuc_index_quality[ column ].values() )
+
+    def get_read_count( self ):
+        return self.get_read_count_for_column( 0 )
+
+    def get_base_counts_for_column( self, column ):
+        return self.nuc_index_base[ column ]
+
+    def get_score_list_for_column( self, column ):
+        return list(self.nuc_index_quality[ column ].keys())
+
+    def get_score_min_for_column( self, column ):
+        return min( self.nuc_index_quality[ column ].keys() )
+
+    def get_score_max_for_column( self, column ):
+        return max( self.nuc_index_quality[ column ].keys() )
+
+    def get_score_sum_for_column( self, column ):
+        return sum( score * count for score, count in self.nuc_index_quality[ column ].items() )
+
+    def get_score_at_position_for_column( self, column, position ):
+        score_value_dict = self.nuc_index_quality[ column ]
+        scores = sorted( score_value_dict.keys() )
+        for score in scores:
+            if score_value_dict[ score ] <= position:
+                position -= score_value_dict[ score ]
+            else:
+                return score
+
+    def get_summary_statistics_for_column( self, i ):
+        def _get_med_pos( size ):
+            halfed = int( size / 2 )
+            if size % 2 == 1:
+                return [ halfed ]
+            return[ halfed - 1, halfed ]
+        read_count = self.get_read_count_for_column( i )
+
+        min_score = self.get_score_min_for_column( i )
+        max_score = self.get_score_max_for_column( i )
+        sum_score = self.get_score_sum_for_column( i )
+        mean_score = float( sum_score ) / float( read_count )
+        # get positions
+        med_pos = _get_med_pos( read_count )
+        if 0 in med_pos:
+            q1_pos = [ 0 ]
+            q3_pos = [ read_count - 1 ]
+        else:
+            q1_pos = _get_med_pos( min( med_pos ) )
+            q3_pos = []
+            for pos in q1_pos:
+                q3_pos.append( max( med_pos ) + 1 + pos )
+        # get scores at position
+        med_score = float( sum( [ self.get_score_at_position_for_column( i, pos ) for pos in med_pos ] ) ) / float( len( med_pos ) )
+        q1 = float( sum( [ self.get_score_at_position_for_column( i, pos ) for pos in q1_pos ] ) ) / float( len( q1_pos ) )
+        q3 = float( sum( [ self.get_score_at_position_for_column( i, pos ) for pos in q3_pos ] ) ) / float( len( q3_pos ) )
+        # determine iqr and step
+        iqr = q3 - q1
+        step = 1.5 * iqr
+
+        # Determine whiskers and outliers
+        outliers = []
+        score_list = sorted( self.get_score_list_for_column( i ) )
+        left_whisker = q1 - step
+        for score in score_list:
+            if left_whisker <= score:
+                left_whisker = score
+                break
+            else:
+                outliers.append( score )
+
+        right_whisker = q3 + step
+        score_list.reverse()
+        for score in score_list:
+            if right_whisker >= score:
+                right_whisker = score
+                break
+            else:
+                outliers.append( score )
+
+        column_stats = { 'read_count': read_count,
+                         'min_score': min_score,
+                         'max_score': max_score,
+                         'sum_score': sum_score,
+                         'mean_score': mean_score,
+                         'q1': q1,
+                         'med_score': med_score,
+                         'q3': q3,
+                         'iqr': iqr,
+                         'left_whisker': left_whisker,
+                         'right_whisker': right_whisker,
+                         'outliers': outliers }
+        return column_stats
+
+
+class fastqReader( Iterator ):
+    def __init__( self, fh, format='sanger', apply_galaxy_conventions=False ):
+        self.file = fh
+        self.format = format
+        self.apply_galaxy_conventions = apply_galaxy_conventions
+
+    def close( self ):
+        return self.file.close()
+
+    def __next__(self):
+        while True:
+            fastq_header = self.file.readline()
+            if not fastq_header:
+                raise StopIteration
+            fastq_header = fastq_header.rstrip( '\n\r' )
+            # remove empty lines, apparently extra new lines at end of file is common?
+            if fastq_header:
+                break
+
+        assert fastq_header.startswith( '@' ), 'Invalid fastq header: %s' % fastq_header
+        rval = fastqSequencingRead.get_class_by_format( self.format )()
+        rval.identifier = fastq_header
+        while True:
+            line = self.file.readline()
+            if not line:
+                raise Exception( 'Invalid FASTQ file: could not find quality score of sequence identifier %s.' % rval.identifier )
+            line = line.rstrip( '\n\r' )
+            if line.startswith( '+' ) and ( len( line ) == 1 or line[1:].startswith( fastq_header[1:] ) ):
+                rval.description = line
+                break
+            rval.append_sequence( line )
+        while rval.insufficient_quality_length():
+            line = self.file.readline()
+            if not line:
+                break
+            rval.append_quality( line )
+        rval.assert_sequence_quality_lengths()
+        if self.apply_galaxy_conventions:
+            rval.apply_galaxy_conventions()
+        return rval
+
+    def __iter__( self ):
+        while True:
+            yield next(self)
+
+
+class ReadlineCountFile( object ):
+    def __init__( self, f ):
+        self.__file = f
+        self.readline_count = 0
+
+    def readline( self, *args, **kwds ):
+        self.readline_count += 1
+        return self.__file.readline( *args, **kwds )
+
+    def __getattr__( self, name ):
+        return getattr( self.__file, name )
+
+
+class fastqVerboseErrorReader( fastqReader ):
+    MAX_PRINT_ERROR_BYTES = 1024
+
+    def __init__( self, fh, **kwds ):
+        super( fastqVerboseErrorReader, self ).__init__( ReadlineCountFile( fh ), **kwds  )
+        self.last_good_identifier = None
+
+    def __next__( self ):
+        last_good_end_offset = self.file.tell()
+        last_readline_count = self.file.readline_count
+        try:
+            block = super( fastqVerboseErrorReader, self ).__next__()
+            self.last_good_identifier = block.identifier
+            return block
+        except StopIteration as e:
+            raise e
+        except Exception as e:
+            print("There was an error reading your input file. Your input file is likely malformed.\nIt is suggested that you double-check your original input file for errors -- helpful information for this purpose has been provided below.\nHowever, if you think that you have encountered an actual error with this tool, please do tell us by using the bug reporting mechanism.\n\nThe reported error is: '%s'." % e)
+            if self.last_good_identifier is not None:
+                print("The last valid FASTQ read had an identifier of '%s'." % self.last_good_identifier)
+            else:
+                print("The error occurred at the start of your file and no valid FASTQ reads were found.")
+            error_offset = self.file.tell()
+            error_byte_count = error_offset - last_good_end_offset
+            print_error_bytes = min( self.MAX_PRINT_ERROR_BYTES, error_byte_count )
+            print("The error in your file occurs between lines '%i' and '%i', which corresponds to byte-offsets '%i' and '%i', and contains the text (%i of %i bytes shown):\n" % ( last_readline_count + 1, self.file.readline_count, last_good_end_offset, error_offset, print_error_bytes, error_byte_count ))
+            self.file.seek( last_good_end_offset )
+            print(self.file.read( print_error_bytes ))
+            raise e
+
+
+class fastqNamedReader( object ):
+    def __init__( self, fh, format='sanger', apply_galaxy_conventions=False ):
+        self.file = fh
+        self.format = format
+        self.reader = fastqReader( self.file, self.format )
+        # self.last_offset = self.file.tell()
+        self.offset_dict = {}
+        self.eof = False
+        self.apply_galaxy_conventions = apply_galaxy_conventions
+
+    def close( self ):
+        return self.file.close()
+
+    def get( self, sequence_identifier ):
+        # Input is either a sequence ID or a sequence object
+        if not isinstance( sequence_identifier, string_types ):
+            # Input was a sequence object (not a sequence ID). Get the sequence ID
+            sequence_identifier = sequence_identifier.identifier
+        # Get only the ID part of the sequence header
+        sequence_id, sequence_sep, sequence_desc = sequence_identifier.partition(' ')
+        rval = None
+        if sequence_id in self.offset_dict:
+            initial_offset = self.file.tell()
+            seq_offset = self.offset_dict[ sequence_id ].pop( 0 )
+            if not self.offset_dict[ sequence_id ]:
+                del self.offset_dict[ sequence_id ]
+            self.file.seek( seq_offset )
+            rval = next(self.reader)
+            # assert rval.id == sequence_id, 'seq id mismatch' #should be able to remove this
+            self.file.seek( initial_offset )
+        else:
+            while True:
+                offset = self.file.tell()
+                try:
+                    fastq_read = next(self.reader)
+                except StopIteration:
+                    self.eof = True
+                    break  # eof, id not found, will return None
+                fastq_read_id, fastq_read_sep, fastq_read_desc = fastq_read.identifier.partition(' ')
+                if fastq_read_id == sequence_id:
+                    rval = fastq_read
+                    break
+                else:
+                    if fastq_read_id not in self.offset_dict:
+                        self.offset_dict[ fastq_read_id ] = []
+                    self.offset_dict[ fastq_read_id ].append( offset )
+        if rval is not None and self.apply_galaxy_conventions:
+            rval.apply_galaxy_conventions()
+        return rval
+
+    def has_data( self ):
+        # returns a string representation of remaining data, or empty string (False) if no data remaining
+        eof = self.eof
+        count = 0
+        rval = ''
+        if self.offset_dict:
+            count = sum( map( len, self.offset_dict.values() ) )
+        if not eof:
+            offset = self.file.tell()
+            try:
+                next(self.reader)
+            except StopIteration:
+                eof = True
+            self.file.seek( offset )
+        if count:
+            rval = "There were %i known sequence reads not utilized. " % count
+        if not eof:
+            rval = "%s%s" % ( rval, "An additional unknown number of reads exist in the input that were not utilized." )
+        return rval
+
+
+class fastqWriter( object ):
+    def __init__( self, fh, format=None, force_quality_encoding=None ):
+        self.file = fh
+        self.format = format
+        self.force_quality_encoding = force_quality_encoding
+
+    def write( self, fastq_read ):
+        if self.format:
+            fastq_read = fastq_read.convert_read_to_format( self.format, force_quality_encoding=self.force_quality_encoding )
+        self.file.write( str( fastq_read ) )
+
+    def close( self ):
+        return self.file.close()
+
+
+class fastqJoiner( object ):
+    def __init__( self, format, force_quality_encoding=None ):
+        self.format = format
+        self.force_quality_encoding = force_quality_encoding
+
+    def join( self, read1, read2 ):
+        read1_id, read1_sep, read1_desc = read1.identifier.partition(' ')
+        read2_id, read2_sep, read2_desc = read2.identifier.partition(' ')
+        if read1_id.endswith( '/2' ) and read2_id.endswith( '/1' ):
+            # swap 1 and 2
+            tmp = read1
+            read1 = read2
+            read2 = tmp
+            del tmp
+        if read1_id.endswith( '/1' ) and read2_id.endswith( '/2' ):
+            read1_id = read1_id[:-2]
+
+        identifier = read1_id
+        if read1_desc:
+            identifier = identifier + ' ' + read1_desc
+
+        # use force quality encoding, if not present force to encoding of first read
+        force_quality_encoding = self.force_quality_encoding
+        if not force_quality_encoding:
+            if read1.is_ascii_encoded():
+                force_quality_encoding = 'ascii'
+            else:
+                force_quality_encoding = 'decimal'
+
+        new_read1 = read1.convert_read_to_format( self.format, force_quality_encoding=force_quality_encoding )
+        new_read2 = read2.convert_read_to_format( self.format, force_quality_encoding=force_quality_encoding )
+        rval = FASTQ_FORMATS[ self.format ]()
+        rval.identifier = identifier
+        if len( read1.description ) > 1:
+            rval.description = "+%s" % ( identifier[1:] )
+        else:
+            rval.description = '+'
+        if rval.sequence_space == 'color':
+            # need to handle color space joining differently
+            # convert to nuc space, join, then convert back
+            rval.sequence = rval.convert_base_to_color_space( new_read1.convert_color_to_base_space( new_read1.sequence ) + new_read2.convert_color_to_base_space( new_read2.sequence ) )
+        else:
+            rval.sequence = new_read1.sequence + new_read2.sequence
+        if force_quality_encoding == 'ascii':
+            rval.quality = new_read1.quality + new_read2.quality
+        else:
+            rval.quality = "%s %s" % ( new_read1.quality.strip(), new_read2.quality.strip() )
+        return rval
+
+    def get_paired_identifier( self, fastq_read ):
+        read_id, read_sep, read_desc = fastq_read.identifier.partition(' ')
+        if read_id[-2] == '/':
+            if read_id[-1] == "1":
+                read_id = "%s2" % read_id[:-1]
+            elif read_id[-1] == "2":
+                read_id = "%s1" % read_id[:-1]
+        return read_id
+
+    def is_first_mate( self, sequence_id ):
+        is_first = None
+        if not isinstance( sequence_id, string_types ):
+            sequence_id = sequence_id.identifier
+        sequence_id, sequence_sep, sequence_desc = sequence_id.partition(' ')
+        if sequence_id[-2] == '/':
+            if sequence_id[-1] == "1":
+                is_first = True
+            else:
+                is_first = False
+        return is_first
+
+
+class fastqSplitter( object ):
+    def split( self, fastq_read ):
+        length = len( fastq_read )
+        # Only reads of even lengths can be split
+        if length % 2 != 0:
+            return None, None
+        half = int( length / 2 )
+        read1 = fastq_read.slice( 0, half )
+        read1.identifier += "/1"
+        if len( read1.description ) > 1:
+            read1.description += "/1"
+        read2 = fastq_read.slice( half, None )
+        read2.identifier += "/2"
+        if len( read2.description ) > 1:
+            read2.description += "/2"
+        return read1, read2
+
+
+class fastqCombiner( object ):
+    def __init__( self, format ):
+        self.format = format
+
+    def combine(self, fasta_seq, quality_seq ):
+        fastq_read = fastqSequencingRead.get_class_by_format( self.format )()
+        fastq_read.identifier = "@%s" % fasta_seq.identifier[1:]
+        fastq_read.description = '+'
+        fastq_read.sequence = fasta_seq.sequence
+        fastq_read.quality = quality_seq.sequence
+        return fastq_read
+
+
+class fastqFakeFastaScoreReader( object ):
+    def __init__( self, format='sanger', quality_encoding=None ):
+        self.fastq_read = fastqSequencingRead.get_class_by_format( format )()
+        if quality_encoding != 'decimal':
+            quality_encoding = 'ascii'
+        self.quality_encoding = quality_encoding
+
+    def close( self ):
+        return  # nothing to close
+
+    def get( self, sequence ):
+        assert isinstance( sequence, fastaSequence ), 'fastqFakeFastaScoreReader requires a fastaSequence object as the parameter'
+        # add sequence to fastq_read, then get_sequence(), color space adapters do not have quality score values
+        self.fastq_read.sequence = sequence.sequence
+        new_sequence = fastaSequence()
+        new_sequence.identifier = sequence.identifier
+        if self.quality_encoding == 'ascii':
+            new_sequence.sequence = chr( self.fastq_read.ascii_max ) * len( self.fastq_read.get_sequence() )
+        else:
+            new_sequence.sequence = ( "%i " % self.fastq_read.quality_max ) * len( self.fastq_read.get_sequence() )
+        return new_sequence
+
+    def has_data( self ):
+        return ''  # No actual data exist, none can be remaining
diff --git a/lib/galaxy_utils/sequence/sequence.py b/lib/galaxy_utils/sequence/sequence.py
new file mode 100644
index 0000000..37a0a83
--- /dev/null
+++ b/lib/galaxy_utils/sequence/sequence.py
@@ -0,0 +1,74 @@
+# Dan Blankenberg
+import transform
+import string
+from copy import deepcopy
+
+
+class SequencingRead( object ):
+    color_space_converter = transform.ColorSpaceConverter()
+    valid_sequence_list = string.letters
+
+    def __init__( self ):
+        self.identifier = None
+        self.sequence = ''  # holds raw sequence string: no whitespace
+        self.description = None
+        self.quality = ''  # holds raw quality string: no whitespace, unless this contains decimal scores
+
+    def __len__( self ):
+        return len( self.sequence )
+
+    def __str__( self ):
+        return "%s\n%s\n%s\n%s\n" % ( self.identifier, self.sequence, self.description, self.quality )
+
+    def append_sequence( self, sequence ):
+        self.sequence += sequence.rstrip( '\n\r' )
+
+    def append_quality( self, quality ):
+        self.quality += quality.rstrip( '\n\r' )
+
+    def is_DNA( self ):
+        return 'u' not in self.sequence.lower()
+
+    def clone( self ):
+        return deepcopy( self )
+
+    def reverse( self, clone=True ):
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        rval.sequence = transform.reverse( self.sequence )
+        rval.quality = rval.quality[::-1]
+        return rval
+
+    def complement( self, clone=True ):
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        if rval.is_DNA():
+            rval.sequence = transform.DNA_complement( rval.sequence )
+        else:
+            rval.sequence = transform.RNA_complement( rval.sequence )
+        return rval
+
+    def reverse_complement( self, clone=True ):
+        # need to reverse first, then complement
+        rval = self.reverse( clone=clone )
+        return rval.complement( clone=False )  # already working with a clone if requested
+
+    def sequence_as_DNA( self, clone=True ):
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        rval.sequence = transform.to_DNA( rval.sequence )
+        return rval
+
+    def sequence_as_RNA( self, clone=True ):
+        if clone:
+            rval = self.clone()
+        else:
+            rval = self
+        rval.sequence = transform.to_RNA( rval.sequence )
+        return rval
diff --git a/lib/galaxy_utils/sequence/transform.py b/lib/galaxy_utils/sequence/transform.py
new file mode 100644
index 0000000..70c501d
--- /dev/null
+++ b/lib/galaxy_utils/sequence/transform.py
@@ -0,0 +1,90 @@
+# Dan Blankenberg
+# Contains methods to tranform sequence strings
+import string
+
+# Translation table for reverse Complement, with ambiguity codes
+DNA_COMPLEMENT = string.maketrans( "ACGTRYKMBDHVacgtrykmbdhv", "TGCAYRMKVHDBtgcayrmkvhdb" )
+RNA_COMPLEMENT = string.maketrans( "ACGURYKMBDHVacgurykmbdhv", "UGCAYRMKVHDBugcayrmkvhdb" )
+# Translation table for DNA <--> RNA
+DNA_TO_RNA = string.maketrans( "Tt", "Uu" )
+RNA_TO_DNA = string.maketrans( "Uu", "Tt" )
+
+
+# reverse sequence string
+def reverse( sequence ):
+    return sequence[::-1]
+
+
+# complement DNA sequence string
+def DNA_complement( sequence ):
+    return sequence.translate( DNA_COMPLEMENT )
+
+
+# complement RNA sequence string
+def RNA_complement( sequence ):
+    return sequence.translate( RNA_COMPLEMENT )
+
+
+# returns the reverse complement of the sequence
+def DNA_reverse_complement( sequence ):
+    sequence = reverse( sequence )
+    return DNA_complement( sequence )
+
+
+def RNA_reverse_complement( sequence ):
+    sequence = reverse( sequence )
+    return RNA_complement( sequence )
+
+
+def to_DNA( sequence ):
+    return sequence.translate( DNA_TO_RNA )
+
+
+def to_RNA( sequence ):
+    return sequence.translate( RNA_TO_DNA )
+
+
+class ColorSpaceConverter( object ):
+    unknown_base = 'N'
+    unknown_color = '.'
+    color_to_base_dict = {}
+    color_to_base_dict[ 'A' ] = { '0': 'A', '1': 'C', '2': 'G', '3': 'T', '4': 'N', '5': 'N', '6': 'N', '.': 'N' }
+    color_to_base_dict[ 'C' ] = { '0': 'C', '1': 'A', '2': 'T', '3': 'G', '4': 'N', '5': 'N', '6': 'N', '.': 'N' }
+    color_to_base_dict[ 'G' ] = { '0': 'G', '1': 'T', '2': 'A', '3': 'C', '4': 'N', '5': 'N', '6': 'N', '.': 'N' }
+    color_to_base_dict[ 'T' ] = { '0': 'T', '1': 'G', '2': 'C', '3': 'A', '4': 'N', '5': 'N', '6': 'N', '.': 'N' }
+    color_to_base_dict[ 'N' ] = { '0': 'N', '1': 'N', '2': 'N', '3': 'N', '4': 'N', '5': 'N', '6': 'N', '.': 'N' }
+    base_to_color_dict = {}
+    for base, color_dict in color_to_base_dict.iteritems():
+        base_to_color_dict[ base ] = {}
+        for key, value in color_dict.iteritems():
+            base_to_color_dict[ base ][ value ] = key
+        base_to_color_dict[ base ][ 'N' ] = '4'  # force ACGT followed by N to be '4', because this is now 'processed' data; we could force to '.' (non-processed data) also
+    base_to_color_dict[ 'N' ].update( { 'A': '5', 'C': '5', 'G': '5', 'T': '5', 'N': '6' } )
+
+    def __init__( self, fake_adapter_base='G' ):
+        assert fake_adapter_base in self.base_to_color_dict, 'A bad fake adapter base was provided: %s.' % fake_adapter_base
+        self.fake_adapter_base = fake_adapter_base
+
+    def to_color_space( self, sequence, adapter_base=None ):
+        if adapter_base is None:
+            adapter_base = self.fake_adapter_base
+        last_base = adapter_base  # we add a fake adapter base so that the sequence can be decoded properly again
+        rval = last_base
+        for base in sequence:
+            rval += self.base_to_color_dict.get( last_base, self.base_to_color_dict[ self.unknown_base ] ).get( base, self.unknown_color )
+            last_base = base
+        return rval
+
+    def to_base_space( self, sequence ):
+        if not isinstance( sequence, list ):
+            sequence = list( sequence )
+        if sequence:
+            last_base = sequence.pop( 0 )
+        else:
+            last_base = None
+        assert last_base in self.color_to_base_dict, 'A valid adapter base must be included when converting to base space from color space. Found: %s' % last_base
+        rval = ''
+        for color_val in sequence:
+            last_base = self.color_to_base_dict[ last_base ].get( color_val, self.unknown_base )
+            rval += last_base
+        return rval
diff --git a/lib/galaxy_utils/sequence/vcf.py b/lib/galaxy_utils/sequence/vcf.py
new file mode 100644
index 0000000..e4d93e5
--- /dev/null
+++ b/lib/galaxy_utils/sequence/vcf.py
@@ -0,0 +1,121 @@
+# Dan Blankenberg
+# See http://www.1000genomes.org/wiki/Analysis/variant-call-format
+
+NOT_A_NUMBER = float( 'NaN' )
+
+
+class VariantCall( object ):
+    version = None
+    header_startswith = None
+    required_header_fields = None
+    required_header_length = None
+
+    @classmethod
+    def get_class_by_format( cls, format ):
+        assert format in VCF_FORMATS, 'Unknown format type specified: %s' % format
+        return VCF_FORMATS[ format ]
+
+    def __init__( self, vcf_line, metadata, sample_names ):
+        raise Exception( 'Abstract Method' )
+
+
+class VariantCall33( VariantCall ):
+    version = 'VCFv3.3'
+    header_startswith = '#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO'
+    required_header_fields = header_startswith.split( '\t' )
+    required_header_length = len( required_header_fields )
+
+    def __init__( self, vcf_line, metadata, sample_names ):
+        # Raw line is needed for indexing file.
+        self.raw_line = vcf_line
+        self.line = vcf_line.rstrip( '\n\r' )
+        self.metadata = metadata
+        self.sample_names = sample_names
+        self.format = None
+        self.sample_values = []
+
+        # parse line
+        self.fields = self.line.split( '\t' )
+        if sample_names:
+            assert len( self.fields ) == self.required_header_length + len( sample_names ) + 1, 'Provided VCF line (%s) has wrong length (expected: %i)' % ( self.line, self.required_header_length + len( sample_names ) + 1 )
+        else:
+            assert len( self.fields ) == self.required_header_length, 'Provided VCF line (%s) has wrong length (expected: %i)' % ( self.line, self.required_header_length)
+        self.chrom, self.pos, self.id, self.ref, self.alt, self.qual, self.filter, self.info = self.fields[ :self.required_header_length ]
+        self.pos = int( self.pos )
+        self.alt = self.alt.split( ',' )
+        try:
+            self.qual = float( self.qual )
+        except:
+            self.qual = NOT_A_NUMBER  # Missing data can be denoted as a '.'
+        if len( self.fields ) > self.required_header_length:
+            self.format = self.fields[ self.required_header_length ].split( ':' )
+            for sample_value in self.fields[ self.required_header_length + 1: ]:
+                self.sample_values.append( sample_value.split( ':' ) )
+
+
+class VariantCall40( VariantCall33 ):
+    version = 'VCFv4.0'
+
+    def __init__( self, vcf_line, metadata, sample_names ):
+        VariantCall33.__init__( self, vcf_line, metadata, sample_names)
+
+
+class VariantCall41( VariantCall40 ):
+    version = 'VCFv4.1'
+
+
+# VCF Format version lookup dict
+VCF_FORMATS = {}
+for format in [ VariantCall33, VariantCall40, VariantCall41 ]:
+    VCF_FORMATS[format.version] = format
+
+
+class Reader( object ):
+    def __init__( self, fh ):
+        self.vcf_file = fh
+        self.metadata = {}
+        self.header_fields = None
+        self.metadata_len = 0
+        self.sample_names = []
+        self.vcf_class = None
+
+        # Read file metadata.
+        while True:
+            line = self.vcf_file.readline()
+            self.metadata_len += len( line )
+            assert line, 'Invalid VCF file provided.'
+            line = line.rstrip( '\r\n' )
+            if self.vcf_class and line.startswith( self.vcf_class.header_startswith ):
+                # read the header fields, ignoring any blank tabs, which GATK
+                # VCF produces after the sample
+                self.header_fields = [l for l in line.split( '\t' ) if l]
+                if len( self.header_fields ) > self.vcf_class.required_header_length:
+                    for sample_name in self.header_fields[ self.vcf_class.required_header_length + 1: ]:
+                        self.sample_names.append( sample_name )
+                break
+            assert line.startswith( '##' ), 'Non-metadata line found before header'
+            line = line[2:]  # strip ##
+            metadata = line.split( '=', 1 )
+            metadata_name = metadata[ 0 ]
+            if len( metadata ) == 2:
+                metadata_value = metadata[ 1 ]
+            else:
+                metadata_value = None
+            if metadata_name in self.metadata:
+                if not isinstance( self.metadata[ metadata_name ], list ):
+                    self.metadata[ metadata_name ] = [ self.metadata[ metadata_name ] ]
+                self.metadata[ metadata_name ].append( metadata_value )
+            else:
+                self.metadata[ metadata_name ] = metadata_value
+            if metadata_name == 'fileformat':
+                self.vcf_class = VariantCall.get_class_by_format( metadata_value )
+
+    def next( self ):
+        line = self.vcf_file.readline()
+        if not line:
+            raise StopIteration
+        return self.vcf_class( line, self.metadata, self.sample_names )
+
+    def __iter__( self ):
+        while True:
+            yield self.next()
diff --git a/lib/log_tempfile.py b/lib/log_tempfile.py
new file mode 100644
index 0000000..1c50aeb
--- /dev/null
+++ b/lib/log_tempfile.py
@@ -0,0 +1,31 @@
+# override tempfile methods for debugging
+
+import tempfile
+import traceback
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+class TempFile( object ):
+    def __init__( self ):
+        tempfile._NamedTemporaryFile = tempfile.NamedTemporaryFile
+        tempfile._mkstemp = tempfile.mkstemp
+        tempfile.NamedTemporaryFile = self.NamedTemporaryFile
+        tempfile.mkstemp = self.mkstemp
+
+    def NamedTemporaryFile( self, *args, **kwargs ):
+        f = tempfile._NamedTemporaryFile( *args, **kwargs )
+        try:
+            log.debug( ( "Opened tempfile %s with NamedTemporaryFile:\n" % f.name ) + "".join( traceback.format_stack() ) )
+        except AttributeError:
+            pass
+        return f
+
+    def mkstemp( self, *args, **kwargs ):
+        f = tempfile._mkstemp( *args, **kwargs )
+        try:
+            log.debug( ( "Opened tempfile %s with mkstemp:\n" % f[1] ) + "".join( traceback.format_stack() ) )
+        except TypeError:
+            pass
+        return f
diff --git a/lib/mimeparse.py b/lib/mimeparse.py
new file mode 100755
index 0000000..39040af
--- /dev/null
+++ b/lib/mimeparse.py
@@ -0,0 +1,191 @@
+"""MIME-Type Parser
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of
+the HTTP specification [RFC 2616] for a complete explanation.
+
+   http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Contents:
+    - parse_mime_type():   Parses a mime-type into its component parts.
+    - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q' quality parameter.
+    - quality():           Determines the quality ('q') of a mime-type when compared against a list of media-ranges.
+    - quality_parsed():    Just like quality() except the second parameter must be pre-parsed.
+    - best_match():        Choose the mime-type with the highest quality ('q') from a list of candidates.
+"""
+
+__version__ = "0.1.2"
+__author__ = 'Joe Gregorio'
+__email__ = "joe at bitworking.org"
+__credits__ = ""
+
+
+def parse_mime_type(mime_type):
+    """Carves up a mime-type and returns a tuple of the
+       (type, subtype, params) where 'params' is a dictionary
+       of all the parameters for the media range.
+       For example, the media range 'application/xhtml;q=0.5' would
+       get parsed into:
+
+       ('application', 'xhtml', {'q', '0.5'})
+       """
+    parts = mime_type.split(";")
+    params = dict( [tuple([s.strip() for s in param.split("=")]) for param in parts[1:] ] )
+    full_type = parts[0].strip()
+    # Java URLConnection class sends an Accept header that includes a single "*"
+    # Turn it into a legal wildcard.
+    if full_type == '*':
+        full_type = '*/*'
+    (type, subtype) = full_type.split("/")
+    return (type.strip(), subtype.strip(), params)
+
+
+def parse_media_range(range):
+    r"""
+    Carves up a media range and returns a tuple of the
+    (type, subtype, params) where 'params' is a dictionary
+    of all the parameters for the media range.
+    For example, the media range 'application/*;q=0.5' would
+    get parsed into:
+
+    .. raw:: text
+
+        ('application', '*', {'q', '0.5'})
+
+    In addition this function also guarantees that there
+    is a value for 'q' in the params dictionary, filling it
+    in with a proper default if necessary.
+    """
+    (type, subtype, params) = parse_mime_type(range)
+    if 'q' not in params or not params['q'] or \
+            not float(params['q']) or float(params['q']) > 1\
+            or float(params['q']) < 0:
+        params['q'] = '1'
+    return (type, subtype, params)
+
+
+def fitness_and_quality_parsed(mime_type, parsed_ranges):
+    """Find the best match for a given mime-type against
+       a list of media_ranges that have already been
+       parsed by parse_media_range(). Returns a tuple of
+       the fitness value and the value of the 'q' quality
+       parameter of the best match, or (-1, 0) if no match
+       was found. Just as for quality_parsed(), 'parsed_ranges'
+       must be a list of parsed media ranges. """
+    best_fitness = -1
+    best_fit_q = 0
+    (target_type, target_subtype, target_params) =\
+        parse_media_range(mime_type)
+    for (type, subtype, params) in parsed_ranges:
+        if (type == target_type or type == '*' or target_type == '*') and \
+                (subtype == target_subtype or subtype == '*' or target_subtype == '*'):
+            param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in
+                                   target_params.iteritems() if key != 'q' and
+                                   key in params and value == params[key]], 0)
+            fitness = (type == target_type) and 100 or 0
+            fitness += (subtype == target_subtype) and 10 or 0
+            fitness += param_matches
+            if fitness > best_fitness:
+                best_fitness = fitness
+                best_fit_q = params['q']
+
+    return best_fitness, float(best_fit_q)
+
+
+def quality_parsed(mime_type, parsed_ranges):
+    """Find the best match for a given mime-type against
+    a list of media_ranges that have already been
+    parsed by parse_media_range(). Returns the
+    'q' quality parameter of the best match, 0 if no
+    match was found. This function bahaves the same as quality()
+    except that 'parsed_ranges' must be a list of
+    parsed media ranges. """
+    return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+
+
+def quality(mime_type, ranges):
+    """Returns the quality 'q' of a mime-type when compared
+    against the media-ranges in ranges. For example:
+
+    >>> quality('text/html','text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
+    0.7
+
+    """
+    parsed_ranges = [parse_media_range(r) for r in ranges.split(",")]
+    return quality_parsed(mime_type, parsed_ranges)
+
+
+def best_match(supported, header):
+    """Takes a list of supported mime-types and finds the best
+    match for all the media-ranges listed in header. The value of
+    header must be a string that conforms to the format of the
+    HTTP Accept: header. The value of 'supported' is a list of
+    mime-types.
+
+    >>> best_match(['application/xbel+xml', 'text/xml'], 'text/*;q=0.5,*/*; q=0.1')
+    'text/xml'
+    """
+    parsed_header = [parse_media_range(r) for r in header.split(",")]
+    weighted_matches = [(fitness_and_quality_parsed(mime_type, parsed_header), mime_type)
+                        for mime_type in supported]
+    weighted_matches.sort()
+    return weighted_matches[-1][0][1] and weighted_matches[-1][1] or ''
+
+
+if __name__ == "__main__":
+    import unittest
+
+    class TestMimeParsing(unittest.TestCase):
+
+        def test_parse_media_range(self):
+            self.assert_(('application', 'xml', {'q': '1'}) == parse_media_range('application/xml;q=1'))
+            self.assertEqual(('application', 'xml', {'q': '1'}), parse_media_range('application/xml'))
+            self.assertEqual(('application', 'xml', {'q': '1'}), parse_media_range('application/xml;q='))
+            self.assertEqual(('application', 'xml', {'q': '1'}), parse_media_range('application/xml ; q='))
+            self.assertEqual(('application', 'xml', {'q': '1', 'b': 'other'}), parse_media_range('application/xml ; q=1;b=other'))
+            self.assertEqual(('application', 'xml', {'q': '1', 'b': 'other'}), parse_media_range('application/xml ; q=2;b=other'))
+            # Java URLConnection class sends an Accept header that includes a single *
+            self.assertEqual(('*', '*', {'q': '.2'}), parse_media_range(" *; q=.2"))
+
+        def test_rfc_2616_example(self):
+            accept = "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"
+            self.assertEqual(1, quality("text/html;level=1", accept))
+            self.assertEqual(0.7, quality("text/html", accept))
+            self.assertEqual(0.3, quality("text/plain", accept))
+            self.assertEqual(0.5, quality("image/jpeg", accept))
+            self.assertEqual(0.4, quality("text/html;level=2", accept))
+            self.assertEqual(0.7, quality("text/html;level=3", accept))
+
+        def test_best_match(self):
+            mime_types_supported = ['application/xbel+xml', 'application/xml']
+            # direct match
+            self.assertEqual(best_match(mime_types_supported, 'application/xbel+xml'), 'application/xbel+xml')
+            # direct match with a q parameter
+            self.assertEqual(best_match(mime_types_supported, 'application/xbel+xml; q=1'), 'application/xbel+xml')
+            # direct match of our second choice with a q parameter
+            self.assertEqual(best_match(mime_types_supported, 'application/xml; q=1'), 'application/xml')
+            # match using a subtype wildcard
+            self.assertEqual(best_match(mime_types_supported, 'application/*; q=1'), 'application/xml')
+            # match using a type wildcard
+            self.assertEqual(best_match(mime_types_supported, '*/*'), 'application/xml')
+
+            mime_types_supported = ['application/xbel+xml', 'text/xml']
+            # match using a type versus a lower weighted subtype
+            self.assertEqual(best_match(mime_types_supported, 'text/*;q=0.5,*/*; q=0.1'), 'text/xml')
+            # fail to match anything
+            self.assertEqual(best_match(mime_types_supported, 'text/html,application/atom+xml; q=0.9'), '')
+
+            # common AJAX scenario
+            mime_types_supported = ['application/json', 'text/html']
+            self.assertEqual(best_match(mime_types_supported, 'application/json, text/javascript, */*'), 'application/json')
+            # verify fitness ordering
+            self.assertEqual(best_match(mime_types_supported, 'application/json, text/html;q=0.9'), 'application/json')
+
+        def test_support_wildcards(self):
+            mime_types_supported = ['image/*', 'application/xml']
+            # match using a type wildcard
+            self.assertEqual(best_match(mime_types_supported, 'image/png'), 'image/*')
+            # match using a wildcard for both requested and supported
+            self.assertEqual(best_match(mime_types_supported, 'image/*'), 'image/*')
+
+    unittest.main()
diff --git a/lib/psyco_full.py b/lib/psyco_full.py
new file mode 100644
index 0000000..7b648de
--- /dev/null
+++ b/lib/psyco_full.py
@@ -0,0 +1,5 @@
+try:
+    import psyco
+    psyco.full()
+except:
+    pass
diff --git a/lib/tool_shed/__init__.py b/lib/tool_shed/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/capsule/__init__.py b/lib/tool_shed/capsule/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/capsule/capsule_manager.py b/lib/tool_shed/capsule/capsule_manager.py
new file mode 100644
index 0000000..de19843
--- /dev/null
+++ b/lib/tool_shed/capsule/capsule_manager.py
@@ -0,0 +1,932 @@
+import contextlib
+import logging
+import os
+import shutil
+import tarfile
+import tempfile
+import threading
+from time import gmtime, strftime
+
+from six.moves.urllib.request import urlopen
+from sqlalchemy import and_, false
+
+import tool_shed.repository_types.util as rt_util
+from galaxy import web
+from galaxy.util import asbool, build_url, CHUNK_SIZE, safe_relpath
+from galaxy.util.odict import odict
+from tool_shed.dependencies import attribute_handlers
+from tool_shed.dependencies.repository.relation_builder import RelationBuilder
+from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyInstallManager
+from tool_shed.metadata import repository_metadata_manager
+from tool_shed.util import (basic_util, commit_util, common_util, encoding_util,
+    hg_util, metadata_util, repository_util, shed_util_common as suc, xml_util)
+
+log = logging.getLogger( __name__ )
+
+
+class ExportedRepositoryRegistry( object ):
+
+    def __init__( self ):
+        self.exported_repository_elems = []
+
+
+class ExportRepositoryManager( object ):
+
+    def __init__( self, app, user, tool_shed_url, repository, changeset_revision, export_repository_dependencies, using_api ):
+        self.app = app
+        self.capsule_filename = 'capsule'
+        self.capsule_with_dependencies_filename = 'capsule_with_dependencies'
+        self.changeset_revision = changeset_revision
+        self.export_repository_dependencies = asbool( export_repository_dependencies )
+        self.file_type = 'gz'
+        self.repository = repository
+        self.repository_id = self.app.security.encode_id( repository.id )
+        self.tool_shed_url = tool_shed_url
+        self.user = user
+        self.using_api = using_api
+
+    def export_repository( self ):
+        repositories_archive_filename = self.generate_repository_archive_filename( use_tmp_archive_dir=True )
+        if self.export_repository_dependencies:
+            repo_info_dicts = self.get_repo_info_dicts()
+            repository_ids = self.get_repository_ids( repo_info_dicts )
+            ordered_repository_ids, ordered_repositories, ordered_changeset_revisions = \
+                self.order_components_for_import( repository_ids, repo_info_dicts )
+        else:
+            ordered_repository_ids = []
+            ordered_repositories = []
+            ordered_changeset_revisions = []
+            if self.repository:
+                repository_metadata = \
+                    metadata_util.get_current_repository_metadata_for_changeset_revision( self.app,
+                                                                                          self.repository,
+                                                                                          self.changeset_revision )
+                if repository_metadata:
+                    ordered_repository_ids = [ self.repository_id ]
+                    ordered_repositories = [ self.repository ]
+                    ordered_changeset_revisions = [ repository_metadata.changeset_revision ]
+        repositories_archive = None
+        error_messages = ''
+        lock = threading.Lock()
+        lock.acquire( True )
+        try:
+            repositories_archive = tarfile.open( repositories_archive_filename, "w:%s" % self.file_type )
+            exported_repository_registry = ExportedRepositoryRegistry()
+            for repository_id, ordered_repository, ordered_changeset_revision in zip( ordered_repository_ids,
+                                                                                      ordered_repositories,
+                                                                                      ordered_changeset_revisions ):
+                with self.__tempdir( prefix='tmp-toolshed-export-er' ) as work_dir:
+                    repository_archive, error_message = self.generate_repository_archive( ordered_repository,
+                                                                                          ordered_changeset_revision,
+                                                                                          work_dir )
+                    if error_message:
+                        error_messages = '%s  %s' % ( error_messages, error_message )
+                    else:
+                        archive_name = str( os.path.basename( repository_archive.name ) )
+                        repositories_archive.add( repository_archive.name, arcname=archive_name )
+                        attributes, sub_elements = self.get_repository_attributes_and_sub_elements( ordered_repository,
+                                                                                                    archive_name )
+                        elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements )
+                        exported_repository_registry.exported_repository_elems.append( elem )
+            # Keep information about the export in a file named export_info.xml in the archive.
+            sub_elements = self.generate_export_elem()
+            export_elem = xml_util.create_element( 'export_info', attributes=None, sub_elements=sub_elements )
+            tmp_export_info = xml_util.create_and_write_tmp_file( export_elem, use_indent=True )
+            repositories_archive.add( tmp_export_info, arcname='export_info.xml' )
+            # Write the manifest, which must preserve the order in which the repositories should be imported.
+            exported_repository_root = xml_util.create_element( 'repositories' )
+            for exported_repository_elem in exported_repository_registry.exported_repository_elems:
+                exported_repository_root.append( exported_repository_elem )
+            tmp_manifest = xml_util.create_and_write_tmp_file( exported_repository_root, use_indent=True )
+            repositories_archive.add( tmp_manifest, arcname='manifest.xml' )
+        except Exception as e:
+            log.exception( str( e ) )
+        finally:
+            if os.path.exists( tmp_export_info ):
+                os.remove( tmp_export_info )
+            if os.path.exists( tmp_manifest ):
+                os.remove( tmp_manifest )
+            lock.release()
+        if repositories_archive is not None:
+            repositories_archive.close()
+        if self.using_api:
+            encoded_repositories_archive_name = encoding_util.tool_shed_encode( repositories_archive_filename )
+            params = dict( encoded_repositories_archive_name=encoded_repositories_archive_name )
+            pathspec = [ 'repository', 'export_via_api' ]
+            tool_shed_url = web.url_for( '/', qualified=True )
+            download_url = build_url( tool_shed_url, pathspec=pathspec, params=params )
+            return dict( download_url=download_url, error_messages=error_messages )
+        return repositories_archive, error_messages
+
+    def generate_export_elem( self ):
+        sub_elements = odict()
+        sub_elements[ 'export_time' ] = strftime( '%a, %d %b %Y %H:%M:%S +0000', gmtime() )
+        sub_elements[ 'tool_shed' ] = str( self.tool_shed_url.rstrip( '/' ) )
+        sub_elements[ 'repository_name' ] = str( self.repository.name )
+        sub_elements[ 'repository_owner' ] = str( self.repository.user.username )
+        sub_elements[ 'changeset_revision' ] = str( self.changeset_revision )
+        sub_elements[ 'export_repository_dependencies' ] = str( self.export_repository_dependencies )
+        sub_elements[ 'exported_via_api' ] = str( self.using_api )
+        return sub_elements
+
+    def generate_repository_archive( self, repository, changeset_revision, work_dir ):
+        rdah = attribute_handlers.RepositoryDependencyAttributeHandler( self.app, unpopulate=True )
+        tdah = attribute_handlers.ToolDependencyAttributeHandler( self.app, unpopulate=True )
+        file_type_str = basic_util.get_file_type_str( changeset_revision, self.file_type )
+        file_name = '%s-%s' % ( repository.name, file_type_str )
+        return_code, error_message = hg_util.archive_repository_revision( self.app,
+                                                                          repository,
+                                                                          work_dir,
+                                                                          changeset_revision )
+        if return_code:
+            return None, error_message
+        repository_archive_name = os.path.join( work_dir, file_name )
+        # Create a compressed tar archive that will contain only valid files and possibly altered dependency definition files.
+        repository_archive = tarfile.open( repository_archive_name, "w:%s" % self.file_type )
+        for root, dirs, files in os.walk( work_dir ):
+            if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+                for dir in dirs:
+                    if dir in commit_util.UNDESIRABLE_DIRS:
+                        dirs.remove( dir )
+                for name in files:
+                    name = str( name )
+                    if str( name ) in commit_util.UNDESIRABLE_FILES:
+                        continue
+                    full_path = os.path.join( root, name )
+                    relative_path = full_path.replace( work_dir, '' ).lstrip( '/' )
+                    # See if we have a repository dependencies defined.
+                    if name == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+                        # Eliminate the toolshed, and changeset_revision attributes from all <repository> tags.
+                        altered, root_elem, error_message = rdah.handle_tag_attributes( full_path )
+                        if error_message:
+                            return None, error_message
+                        if altered:
+                            tmp_filename = xml_util.create_and_write_tmp_file( root_elem, use_indent=True )
+                            shutil.move( tmp_filename, full_path )
+                    elif name == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
+                        # Eliminate the toolshed, and changeset_revision attributes from all <repository> tags.
+                        altered, root_elem, error_message = tdah.handle_tag_attributes( full_path )
+                        if error_message:
+                            return None, error_message
+                        if altered:
+                            tmp_filename = xml_util.create_and_write_tmp_file( root_elem, use_indent=True )
+                            shutil.move( tmp_filename, full_path )
+                    repository_archive.add( full_path, arcname=relative_path )
+        repository_archive.close()
+        return repository_archive, error_message
+
+    def generate_repository_archive_filename( self, use_tmp_archive_dir=False ):
+        tool_shed = self.remove_protocol_from_tool_shed_url()
+        file_type_str = basic_util.get_file_type_str( self.changeset_revision, self.file_type )
+        if self.export_repository_dependencies:
+            repositories_archive_filename = '%s_%s_%s_%s_%s' % ( self.capsule_with_dependencies_filename,
+                                                                 tool_shed,
+                                                                 str( self.repository.name ),
+                                                                 str( self.repository.user.username ),
+                                                                 file_type_str )
+        else:
+            repositories_archive_filename = '%s_%s_%s_%s_%s' % ( self.capsule_filename,
+                                                                 tool_shed,
+                                                                 str( self.repository.name ),
+                                                                 str( self.repository.user.username ),
+                                                                 file_type_str )
+        if use_tmp_archive_dir:
+            tmp_archive_dir = tempfile.mkdtemp( prefix="tmp-toolshed-arcdir" )
+            repositories_archive_filename = os.path.join( tmp_archive_dir, repositories_archive_filename )
+        return repositories_archive_filename
+
+    def get_components_from_repo_info_dict( self, repo_info_dict ):
+        """
+        Return the repository and the associated latest installable changeset_revision (including
+        # updates) for the repository defined by the received repo_info_dict.
+        """
+        for repository_name, repo_info_tup in repo_info_dict.items():
+            # There should only be one entry in the received repo_info_dict.
+            description, repository_clone_url, changeset_revision, ctx_rev, \
+                repository_owner, repository_dependencies, tool_dependencies = \
+                repository_util.get_repo_info_tuple_contents( repo_info_tup )
+            repository = repository_util.get_repository_by_name_and_owner( self.app, repository_name, repository_owner )
+            repository_metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( self.app,
+                                                                                                        repository,
+                                                                                                        changeset_revision )
+            if repository_metadata:
+                return repository, repository_metadata.changeset_revision
+        return None, None
+
+    def get_repo_info_dict_for_import( self, encoded_repository_id, encoded_repository_ids, repo_info_dicts ):
+        """
+        The received encoded_repository_ids and repo_info_dicts are lists that contain associated
+        elements at each location in the list.  This method will return the element from repo_info_dicts
+        associated with the received encoded_repository_id by determining its location in the received
+        encoded_repository_ids list.
+        """
+        for index, repository_id in enumerate( encoded_repository_ids ):
+            if repository_id == encoded_repository_id:
+                repo_info_dict = repo_info_dicts[ index ]
+                return repo_info_dict
+        return None
+
+    def get_repo_info_dicts( self ):
+        """
+        Return a list of dictionaries defining repositories that are required by the repository
+        associated with self.repository_id.
+        """
+        rdim = RepositoryDependencyInstallManager( self.app )
+        repository = repository_util.get_repository_in_tool_shed( self.app, self.repository_id )
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                           self.repository_id,
+                                                                                           self.changeset_revision )
+        # Get a dictionary of all repositories upon which the contents of the current
+        # repository_metadata record depend.
+        toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+        rb = RelationBuilder( self.app, repository, repository_metadata, toolshed_base_url )
+        # Work-around to ensure repositories that contain packages needed only for compiling
+        # a dependent package are included in the capsule.
+        rb.set_filter_dependencies_needed_for_compiling( False )
+        repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+        repo = hg_util.get_repo_for_repository( self.app,
+                                                repository=self.repository,
+                                                repo_path=None,
+                                                create=False )
+        ctx = hg_util.get_changectx_for_changeset( repo, self.changeset_revision )
+        repo_info_dict = {}
+        # Cast unicode to string.
+        repo_info_dict[ str( repository.name ) ] = ( str( self.repository.description ),
+                                                     common_util.generate_clone_url_for_repository_in_tool_shed( self.user,
+                                                                                                                 self.repository ),
+                                                     str( self.changeset_revision ),
+                                                     str( ctx.rev() ),
+                                                     str( self.repository.user.username ),
+                                                     repository_dependencies,
+                                                     None )
+        all_required_repo_info_dict = rdim.get_required_repo_info_dicts( self.tool_shed_url, [ repo_info_dict ] )
+        all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
+        return all_repo_info_dicts
+
+    def get_repository_attributes_and_sub_elements( self, repository, archive_name ):
+        """
+        Get the information about a repository to create and populate an XML tag set.  The
+        generated attributes will be contained within the <repository> tag, while the sub_elements
+        will be tag sets contained within the <repository> tag set.
+        """
+        attributes = odict()
+        sub_elements = odict()
+        attributes[ 'name' ] = str( repository.name )
+        attributes[ 'type' ] = str( repository.type )
+        # We have to associate the public username since the user_id will be different between tool sheds.
+        attributes[ 'username' ] = str( repository.user.username )
+        # Don't coerce description or long description from unicode to string because the fields are free text.
+        sub_elements[ 'description' ] = repository.description
+        sub_elements[ 'long_description' ] = repository.long_description
+        sub_elements[ 'archive' ] = archive_name
+        # Keep track of Category associations.
+        categories = []
+        for rca in repository.categories:
+            category = rca.category
+            categories.append( ( 'category', str( category.name ) ) )
+        sub_elements[ 'categories' ] = categories
+        return attributes, sub_elements
+
+    def get_repository_ids( self, repo_info_dicts ):
+        """Return a list of repository ids associated with each dictionary in the received repo_info_dicts."""
+        repository_ids = []
+        for repo_info_dict in repo_info_dicts:
+            for repository_name, repo_info_tup in repo_info_dict.items():
+                description, repository_clone_url, changeset_revision, \
+                    ctx_rev, repository_owner, repository_dependencies, \
+                    tool_dependencies = \
+                    repository_util.get_repo_info_tuple_contents( repo_info_tup )
+                repository = repository_util.get_repository_by_name_and_owner( self.app, repository_name, repository_owner )
+                repository_ids.append( self.app.security.encode_id( repository.id ) )
+        return repository_ids
+
+    def order_components_for_import( self, repository_ids, repo_info_dicts ):
+        """
+        Some repositories may have repository dependencies that must be imported and have metadata set on
+        them before the dependent repository is imported.  This method will inspect the list of repositories
+        about to be exported and make sure to order them appropriately for proper import.  For each repository
+        about to be exported, if required repositories are not contained in the list of repositories about to
+        be exported, then they are not considered.  Repository dependency definitions that contain circular
+        dependencies should not result in an infinite loop, but obviously ordering the list will not be handled
+        for one or more of the repositories that require prior import.
+        """
+        # The received list of repository_ids are the ids of all of the primary exported repository's
+        # repository dependencies.  The primary repository will always be last in the returned lists.
+        ordered_repository_ids = []
+        ordered_repositories = []
+        ordered_changeset_revisions = []
+        # Create a dictionary whose keys are the received repository_ids and whose values are a list of
+        # repository_ids, each of which is contained in the received list of repository_ids and whose associated
+        # repository must be imported prior to the repository associated with the repository_id key.
+        prior_import_required_dict = repository_util.get_prior_import_or_install_required_dict( self.app,
+                                                                                                repository_ids,
+                                                                                                repo_info_dicts )
+        processed_repository_ids = []
+        # Process the list of repository dependencies defined for the primary exported repository.
+        while len( processed_repository_ids ) != len( prior_import_required_dict.keys() ):
+            repository_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_import_required_dict,
+                                                                                      processed_repository_ids )
+            if repository_id == self.repository_id:
+                # Append self.repository_id without processing it since it must be returned last in the order.
+                # It will be processed below after all dependencies are processed.
+                processed_repository_ids.append( self.repository_id )
+                continue
+            processed_repository_ids.append( repository_id )
+            if repository_id not in ordered_repository_ids:
+                prior_import_required_ids = prior_import_required_dict[ repository_id ]
+                for prior_import_required_id in prior_import_required_ids:
+                    if prior_import_required_id not in ordered_repository_ids:
+                        # Import the associated repository dependency first.
+                        prior_repo_info_dict = \
+                            self.get_repo_info_dict_for_import( prior_import_required_id,
+                                                                repository_ids,
+                                                                repo_info_dicts )
+                        prior_repository, prior_import_changeset_revision = \
+                            self.get_components_from_repo_info_dict( prior_repo_info_dict )
+                        if prior_repository and prior_import_changeset_revision:
+                            ordered_repository_ids.append( prior_import_required_id )
+                            ordered_repositories.append( prior_repository )
+                            ordered_changeset_revisions.append( prior_import_changeset_revision )
+                repo_info_dict = self.get_repo_info_dict_for_import( repository_id, repository_ids, repo_info_dicts )
+                repository, changeset_revision = self.get_components_from_repo_info_dict( repo_info_dict )
+                if repository and changeset_revision:
+                    ordered_repository_ids.append( repository_id )
+                    ordered_repositories.append( repository )
+                    ordered_changeset_revisions.append( changeset_revision )
+        # Process the repository associated with self.repository_id last.
+        repo_info_dict = self.get_repo_info_dict_for_import( self.repository_id, repository_ids, repo_info_dicts )
+        repository, changeset_revision = self.get_components_from_repo_info_dict( repo_info_dict )
+        if repository and changeset_revision:
+            ordered_repository_ids.append( repository_id )
+            ordered_repositories.append( repository )
+            ordered_changeset_revisions.append( changeset_revision )
+        return ordered_repository_ids, ordered_repositories, ordered_changeset_revisions
+
+    def remove_protocol_from_tool_shed_url( self ):
+        protocol, base = self.tool_shed_url.split( '://' )
+        base = base.replace( ':', '_colon_' )
+        base = base.rstrip( '/' )
+        return base
+
+    @contextlib.contextmanager
+    def __tempdir( self, prefix=None ):
+        td = tempfile.mkdtemp( prefix=prefix )
+        try:
+            yield td
+        finally:
+            shutil.rmtree( td )
+
+
+class ImportRepositoryManager( object ):
+
+    def __init__( self, app, host, user, user_is_admin ):
+        self.app = app
+        self.host = host
+        self.user = user
+        self.user_is_admin = user_is_admin
+
+    def check_status_and_reset_downloadable( self, import_results_tups ):
+        """Check the status of each imported repository and set downloadable to False if errors."""
+        sa_session = self.app.model.context.current
+        flush = False
+        for import_results_tup in import_results_tups:
+            ok, name_owner, message = import_results_tup
+            name, owner = name_owner
+            if not ok:
+                repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+                if repository is not None:
+                    # Do not allow the repository to be automatically installed if population resulted in errors.
+                    tip_changeset_revision = repository.tip( self.app )
+                    repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                                       self.app.security.encode_id( repository.id ),
+                                                                                                       tip_changeset_revision )
+                    if repository_metadata:
+                        if repository_metadata.downloadable:
+                            repository_metadata.downloadable = False
+                            sa_session.add( repository_metadata )
+                            if not flush:
+                                flush = True
+                        # Do not allow dependent repository revisions to be automatically installed if population
+                        # resulted in errors.
+                        dependent_downloadable_revisions = self.get_dependent_downloadable_revisions( repository_metadata )
+                        for dependent_downloadable_revision in dependent_downloadable_revisions:
+                            if dependent_downloadable_revision.downloadable:
+                                dependent_downloadable_revision.downloadable = False
+                                sa_session.add( dependent_downloadable_revision )
+                                if not flush:
+                                    flush = True
+        if flush:
+            sa_session.flush()
+
+    def create_repository_and_import_archive( self, repository_archive_dict, import_results_tups ):
+        """
+        Create a new repository in the tool shed and populate it with the contents of a gzip compressed
+        tar archive that was exported as part or all of the contents of a capsule.
+        """
+        results_message = ''
+        name = repository_archive_dict.get( 'name', None )
+        username = repository_archive_dict.get( 'owner', None )
+        if name is None or username is None:
+            ok = False
+            results_message += 'Import failed: required repository name <b>%s</b> or owner <b>%s</b> is missing.' % \
+                ( str( name ), str( username ))
+            import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+        else:
+            status = repository_archive_dict.get( 'status', None )
+            if status is None:
+                # The repository does not yet exist in this Tool Shed and the current user is authorized to import
+                # the current archive file.
+                type = repository_archive_dict.get( 'type', 'unrestricted' )
+                description = repository_archive_dict.get( 'description', '' )
+                long_description = repository_archive_dict.get( 'long_description', '' )
+                # The owner entry in the repository_archive_dict is the public username of the user associated with
+                # the exported repository archive.
+                user = common_util.get_user_by_username( self.app, username )
+                if user is None:
+                    ok = False
+                    results_message += 'Import failed: repository owner <b>%s</b> does not have an account in this Tool Shed.' % \
+                        str( username )
+                    import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+                else:
+                    user_id = user.id
+                    # The categories entry in the repository_archive_dict is a list of category names.  If a name does not
+                    # exist in the current Tool Shed, the category will not be created, so it will not be associated with
+                    # the repository.
+                    category_ids = []
+                    category_names = repository_archive_dict.get( 'category_names', [] )
+                    for category_name in category_names:
+                        category = suc.get_category_by_name( self.app, category_name )
+                        if category is None:
+                            results_message += 'This Tool Shed does not have the category <b>%s</b> so it ' % str( category_name )
+                            results_message += 'will not be associated with this repository.'
+                        else:
+                            category_ids.append( self.app.security.encode_id( category.id ) )
+                    # Create the repository record in the database.
+                    repository, create_message = repository_util.create_repository( self.app,
+                                                                                    name,
+                                                                                    type,
+                                                                                    description,
+                                                                                    long_description,
+                                                                                    user_id=user_id,
+                                                                                    category_ids=category_ids )
+                    if create_message:
+                        results_message += create_message
+                    # Populate the new repository with the contents of exported repository archive.
+                    results_dict = self.import_repository_archive( repository, repository_archive_dict )
+                    ok = results_dict.get( 'ok', False )
+                    error_message = results_dict.get( 'error_message', '' )
+                    if error_message:
+                        results_message += error_message
+                    import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+            else:
+                # The repository either already exists in this Tool Shed or the current user is not authorized to create it.
+                ok = True
+                results_message += 'Import not necessary: repository status for this Tool Shed is: %s.' % str( status )
+                import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+        return import_results_tups
+
+    def extract_capsule_files( self, **kwd ):
+        """
+        Extract the uploaded capsule archive into a temporary location for inspection, validation
+        and potential import.
+        """
+        return_dict = {}
+        tar_archive = kwd.get( 'tar_archive', None )
+        capsule_file_name = kwd.get( 'capsule_file_name', None )
+        if tar_archive is not None and capsule_file_name is not None:
+            return_dict.update( kwd )
+            extract_directory_path = tempfile.mkdtemp( prefix="tmp-capsule-ecf" )
+            if capsule_file_name.endswith( '.tar.gz' ):
+                extract_directory_name = capsule_file_name.replace( '.tar.gz', '' )
+            elif capsule_file_name.endswith( '.tar' ):
+                extract_directory_name = capsule_file_name.replace( '.tar', '' )
+            else:
+                extract_directory_name = capsule_file_name
+            file_path = os.path.join( extract_directory_path, extract_directory_name )
+            return_dict[ 'encoded_file_path' ] = encoding_util.tool_shed_encode( file_path )
+            tar_archive.extractall( path=file_path )
+            try:
+                tar_archive.close()
+            except Exception as e:
+                log.exception( "Cannot close tar_archive: %s" % str( e ) )
+            del return_dict[ 'tar_archive' ]
+        return return_dict
+
+    def get_archives_from_manifest( self, manifest_file_path ):
+        """
+        Return the list of archive names defined in the capsule manifest.  This method will validate
+        the manifest by ensuring all <repository> tag sets contain a valid <archive> sub-element.
+        """
+        archives = []
+        error_message = ''
+        manifest_tree, error_message = xml_util.parse_xml( manifest_file_path )
+        if error_message:
+            return archives, error_message
+        manifest_root = manifest_tree.getroot()
+        for elem in manifest_root:
+            # <repository name="package_lapack_3_4" type="tool_dependency_definition" username="test">
+            if elem.tag != 'repository':
+                error_message = 'All level one sub-elements in the manifest.xml file must be <repository> tag sets.  '
+                error_message += 'The tag <b><%s></b> is invalid.' % str( elem.tag )
+                return [], error_message
+            archive_file_name = None
+            for repository_elem in elem:
+                if repository_elem.tag == 'archive':
+                    # <archive>package_lapack_3_4-9e7a45ad3522.tar.gz</archive>
+                    archive_file_name = repository_elem.text
+                    break
+            if archive_file_name is None:
+                error_message = 'The %s tag set is missing a required <archive> sub-element.' % str( elem.tag )
+                return [], error_message
+            archives.append( archive_file_name )
+        return archives, error_message
+
+    def get_dependent_downloadable_revisions( self, repository_metadata ):
+        """
+        Return all repository_metadata records that are downloadable and that depend upon the received
+        repository_metadata record.
+        """
+        # This method is called only from the tool shed.
+        sa_session = self.app.model.context.current
+        rm_changeset_revision = repository_metadata.changeset_revision
+        rm_repository = repository_metadata.repository
+        rm_repository_name = str( rm_repository.name )
+        rm_repository_owner = str( rm_repository.user.username )
+        dependent_downloadable_revisions = []
+        for repository in sa_session.query( self.app.model.Repository ) \
+                                    .filter( and_( self.app.model.Repository.table.c.id != rm_repository.id,
+                                                   self.app.model.Repository.table.c.deleted == false(),
+                                                   self.app.model.Repository.table.c.deprecated == false() ) ):
+            downloadable_revisions = repository.downloadable_revisions
+            if downloadable_revisions:
+                for downloadable_revision in downloadable_revisions:
+                    if downloadable_revision.has_repository_dependencies:
+                        metadata = downloadable_revision.metadata
+                        if metadata:
+                            repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+                            repository_dependencies_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+                            for repository_dependencies_tup in repository_dependencies_tups:
+                                tool_shed, name, owner, changeset_revision, \
+                                    prior_installation_required, \
+                                    only_if_compiling_contained_td = \
+                                    common_util.parse_repository_dependency_tuple( repository_dependencies_tup )
+                                if name == rm_repository_name and owner == rm_repository_owner:
+                                    # We've discovered a repository revision that depends upon the repository associated
+                                    # with the received repository_metadata record, but we need to make sure it depends
+                                    # upon the revision.
+                                    if changeset_revision == rm_changeset_revision:
+                                        dependent_downloadable_revisions.append( downloadable_revision )
+                                    else:
+                                        # Make sure the defined changeset_revision is current.
+                                        defined_repository_metadata = \
+                                            sa_session.query( self.app.model.RepositoryMetadata ) \
+                                                      .filter( self.app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) \
+                                                      .first()
+                                        if defined_repository_metadata is None:
+                                            # The defined changeset_revision is not associated with a repository_metadata
+                                            # record, so updates must be necessary.
+                                            defined_repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+                                            defined_repo = hg_util.get_repo_for_repository( self.app,
+                                                                                            repository=defined_repository,
+                                                                                            repo_path=None,
+                                                                                            create=False )
+                                            updated_changeset_revision = \
+                                                metadata_util.get_next_downloadable_changeset_revision( defined_repository,
+                                                                                                        defined_repo,
+                                                                                                        changeset_revision )
+                                            if updated_changeset_revision == rm_changeset_revision and updated_changeset_revision != changeset_revision:
+                                                dependent_downloadable_revisions.append( downloadable_revision )
+        return dependent_downloadable_revisions
+
+    def get_export_info_dict( self, export_info_file_path ):
+        """
+        Parse the export_info.xml file contained within the capsule and return a dictionary
+        containing its entries.
+        """
+        export_info_tree, error_message = xml_util.parse_xml( export_info_file_path )
+        export_info_root = export_info_tree.getroot()
+        export_info_dict = {}
+        for elem in export_info_root:
+            if elem.tag == 'export_time':
+                export_info_dict[ 'export_time' ] = elem.text
+            elif elem.tag == 'tool_shed':
+                export_info_dict[ 'tool_shed' ] = elem.text
+            elif elem.tag == 'repository_name':
+                export_info_dict[ 'repository_name' ] = elem.text
+            elif elem.tag == 'repository_owner':
+                export_info_dict[ 'repository_owner' ] = elem.text
+            elif elem.tag == 'changeset_revision':
+                export_info_dict[ 'changeset_revision' ] = elem.text
+            elif elem.tag == 'export_repository_dependencies':
+                if asbool( elem.text ):
+                    export_info_dict[ 'export_repository_dependencies' ] = 'Yes'
+                else:
+                    export_info_dict[ 'export_repository_dependencies' ] = 'No'
+        return export_info_dict
+
+    def get_repository_info_from_manifest( self, manifest_file_path ):
+        """
+        Parse the capsule manifest and return a list of dictionaries containing information about
+        each exported repository archive contained within the capsule.
+        """
+        repository_info_dicts = []
+        manifest_tree, error_message = xml_util.parse_xml( manifest_file_path )
+        if error_message:
+            return repository_info_dicts, error_message
+        manifest_root = manifest_tree.getroot()
+        for elem in manifest_root:
+            # <repository name="package_lapack_3_4" type="tool_dependency_definition" username="test">
+            if elem.tag != 'repository':
+                error_message = 'All level one sub-elements in the manifest.xml file must be <repository> tag sets.  '
+                error_message += 'The tag <b><%s></b> is invalid.' % str( elem.tag )
+                return [], error_message
+            name = elem.get( 'name', None )
+            owner = elem.get( 'username', None )
+            type = elem.get( 'type', None )
+            if name is None or owner is None or type is None:
+                error_message = 'Missing required name, type, owner attributes from the tag %s' % str( elem.tag )
+                return [], error_message
+            repository_info_dict = dict( name=name, owner=owner, type=type )
+            for repository_elem in elem:
+                if repository_elem.tag == 'archive':
+                    # <archive>package_lapack_3_4-9e7a45ad3522.tar.gz</archive>
+                    archive_file_name = repository_elem.text
+                    repository_info_dict[ 'archive_file_name' ] = archive_file_name
+                    items = archive_file_name.split( '-' )
+                    changeset_revision = items[ 1 ].rstrip( '.tar.gz' )
+                    repository_info_dict[ 'changeset_revision' ] = changeset_revision
+                elif repository_elem.tag == 'categories':
+                    category_names = []
+                    for category_elem in repository_elem:
+                        if category_elem.tag == 'category':
+                            category_names.append( category_elem.text )
+                    repository_info_dict[ 'category_names' ] = category_names
+                elif repository_elem.tag == 'description':
+                    repository_info_dict[ 'description' ] = repository_elem.text
+                elif repository_elem.tag == 'long_description':
+                    repository_info_dict[ 'long_description' ] = repository_elem.text
+            repository_info_dicts.append( repository_info_dict )
+        return repository_info_dicts, error_message
+
+    def get_repository_status_from_tool_shed( self, repository_info_dicts ):
+        """
+        For each exported repository archive contained in the capsule, inspect the Tool Shed to
+        see if that repository already exists or if the current user is authorized to create the
+        repository and set a status appropriately.  If repository dependencies are included in the
+        capsule, repositories may have various owners.  We will keep repositories associated with
+        owners, so we need to restrict created repositories to those the current user can create.
+        If the current user is an admin or a member of the IUC, all repositories will be created
+        no matter the owner.  Otherwise only repositories whose associated owner is the current
+        user will be created.
+        """
+        repository_status_info_dicts = []
+        for repository_info_dict in repository_info_dicts:
+            repository = repository_util.get_repository_by_name_and_owner( self.app,
+                                                                           repository_info_dict[ 'name' ],
+                                                                           repository_info_dict[ 'owner' ] )
+            if repository:
+                if repository.deleted:
+                    repository_info_dict[ 'status' ] = 'Exists, deleted'
+                elif repository.deprecated:
+                    repository_info_dict[ 'status' ] = 'Exists, deprecated'
+                else:
+                    repository_info_dict[ 'status' ] = 'Exists'
+            else:
+                # No repository with the specified name and owner currently exists, so make sure
+                # the current user can create one.
+                if self.user_is_admin:
+                    repository_info_dict[ 'status' ] = None
+                elif self.app.security_agent.user_can_import_repository_archive( self.user,
+                                                                                 repository_info_dict[ 'owner' ] ):
+                    repository_info_dict[ 'status' ] = None
+                else:
+                    repository_info_dict[ 'status' ] = 'Not authorized to import'
+            repository_status_info_dicts.append( repository_info_dict )
+        return repository_status_info_dicts
+
+    def import_repository_archive( self, repository, repository_archive_dict ):
+        """Import a repository archive contained within a repository capsule."""
+        rdah = attribute_handlers.RepositoryDependencyAttributeHandler( self.app, unpopulate=False )
+        tdah = attribute_handlers.ToolDependencyAttributeHandler( self.app, unpopulate=False )
+        archive_file_name = repository_archive_dict.get( 'archive_file_name', None )
+        capsule_file_name = repository_archive_dict[ 'capsule_file_name' ]
+        encoded_file_path = repository_archive_dict[ 'encoded_file_path' ]
+        file_path = encoding_util.tool_shed_decode( encoded_file_path )
+        results_dict = dict( ok=True, error_message='' )
+        archive_file_path = os.path.join( file_path, archive_file_name )
+        archive = tarfile.open( archive_file_path, 'r:*' )
+        repo_dir = repository.repo_path( self.app )
+        hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_dir, create=False )
+        undesirable_dirs_removed = 0
+        undesirable_files_removed = 0
+        check_results = commit_util.check_archive( repository, archive )
+        # We filter out undesirable files but fail on undesriable dirs. Not
+        # sure why, just trying to maintain the same behavior as before. -nate
+        if not check_results.invalid and not check_results.undesirable_dirs:
+            full_path = os.path.abspath( repo_dir )
+            # Extract the uploaded archive to the repository root.
+            archive.extractall( path=full_path, members=check_results.valid )
+            archive.close()
+            for tar_member in check_results.valid:
+                filename = tar_member.name
+                uploaded_file_name = os.path.join( full_path, filename )
+                if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+                    # Inspect the contents of the file to see if toolshed or changeset_revision attributes
+                    # are missing and if so, set them appropriately.
+                    altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name )
+                    if error_message:
+                        results_dict[ 'ok' ] = False
+                        results_dict[ 'error_message' ] += error_message
+                    if altered:
+                        tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                        shutil.move( tmp_filename, uploaded_file_name )
+                elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
+                    # Inspect the contents of the file to see if toolshed or changeset_revision
+                    # attributes are missing and if so, set them appropriately.
+                    altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name )
+                    if error_message:
+                        results_dict[ 'ok' ] = False
+                        results_dict[ 'error_message' ] += error_message
+                    if altered:
+                        tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                        shutil.move( tmp_filename, uploaded_file_name )
+            commit_message = 'Imported from capsule %s' % str( capsule_file_name )
+            # Send email notification to those that have registered to receive alerts for new repositories in this Tool Shed.
+            new_repo_alert = True
+            # Since the repository is new, the following must be False.
+            remove_repo_files_not_in_tar = False
+            filenames_in_archive = [ member.name for member in check_results.valid ]
+            undesirable_files_removed = len( check_results.undesirable_files )
+            undesirable_dirs_removed = 0
+            ok, error_message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
+                commit_util.handle_directory_changes( self.app,
+                                                      self.host,
+                                                      self.user.username,
+                                                      repository,
+                                                      full_path,
+                                                      filenames_in_archive,
+                                                      remove_repo_files_not_in_tar,
+                                                      new_repo_alert,
+                                                      commit_message,
+                                                      undesirable_dirs_removed,
+                                                      undesirable_files_removed )
+            if error_message:
+                results_dict[ 'ok' ] = False
+                results_dict[ 'error_message' ] += error_message
+            try:
+                rmm = repository_metadata_manager.RepositoryMetadataManager( app=self.app,
+                                                                             user=self.user,
+                                                                             repository=repository )
+                status, error_message = rmm.set_repository_metadata_due_to_new_tip( self.host,
+                                                                                    content_alert_str=content_alert_str )
+                if error_message:
+                    results_dict[ 'ok' ] = False
+                    results_dict[ 'error_message' ] += error_message
+            except Exception as e:
+                log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" %
+                    ( str( repository.name ), str( archive_file_name ), str( e ) ) )
+        else:
+            archive.close()
+            results_dict[ 'ok' ] = False
+            results_dict[ 'error_message' ] += 'Capsule errors were found: '
+            if check_results.invalid:
+                results_dict[ 'error_message' ] += '%s Invalid files were: %s.' % (
+                    ' '.join( check_results.errors ), ', '.join( check_results.invalid ) )
+            if check_results.undesirable_dirs:
+                results_dict[ 'error_message' ] += ' Undesirable directories were: %s.' % (
+                    ', '.join( check_results.undesirable_dirs ) )
+        return results_dict
+
+    def upload_capsule( self, **kwd ):
+        """Upload and prepare an exported repository capsule for validation."""
+        file_data = kwd.get( 'file_data', '' )
+        url = kwd.get( 'url', '' )
+        uploaded_file = None
+        return_dict = dict( error_message='',
+                            encoded_file_path=None,
+                            status='ok',
+                            tar_archive=None,
+                            uploaded_file=None,
+                            capsule_file_name=None )
+        if url:
+            valid_url = True
+            try:
+                stream = urlopen( url )
+            except Exception as e:
+                valid_url = False
+                return_dict['error_message'] = 'Error importing file via http: %s' % str( e )
+                return_dict['status'] = 'error'
+                return return_dict
+            if valid_url:
+                fd, uploaded_file_name = tempfile.mkstemp()
+                uploaded_file = open( uploaded_file_name, 'wb' )
+                while 1:
+                    chunk = stream.read( CHUNK_SIZE )
+                    if not chunk:
+                        break
+                    uploaded_file.write( chunk )
+                uploaded_file.flush()
+                uploaded_file_filename = url.split( '/' )[ -1 ]
+        elif file_data not in ( '', None ):
+            uploaded_file = file_data.file
+            uploaded_file_name = uploaded_file.name
+            uploaded_file_filename = os.path.split( file_data.filename )[ -1 ]
+        if uploaded_file is not None:
+            if os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0:
+                uploaded_file.close()
+                return_dict[ 'error_message' ] = 'Your uploaded capsule file is empty.'
+                return_dict[ 'status' ] = 'error'
+                return return_dict
+            try:
+                # Open for reading with transparent compression.
+                tar_archive = tarfile.open( uploaded_file_name, 'r:*' )
+            except tarfile.ReadError as e:
+                error_message = 'Error opening file %s: %s' % ( str( uploaded_file_name ), str( e ) )
+                log.exception( error_message )
+                return_dict[ 'error_message' ] = error_message
+                return_dict[ 'status' ] = 'error'
+                uploaded_file.close()
+                return return_dict
+            if not self.validate_archive_paths( tar_archive ):
+                return_dict[ 'status' ] = 'error'
+                return_dict[ 'message' ] = ( 'This capsule contains an invalid member type '
+                    'or a file outside the archive path.' )
+                uploaded_file.close()
+                return return_dict
+            return_dict[ 'tar_archive' ] = tar_archive
+            return_dict[ 'capsule_file_name' ] = uploaded_file_filename
+            uploaded_file.close()
+        else:
+            return_dict[ 'error_message' ] = 'No files were entered on the import form.'
+            return_dict[ 'status' ] = 'error'
+            return return_dict
+        return return_dict
+
+    def validate_archive_paths( self, tar_archive ):
+        '''
+        Inspect the archive contents to ensure that there are no risky symlinks.
+        Returns True if a suspicious path is found.
+        '''
+        for member in tar_archive.getmembers():
+            if not ( member.isdir() or member.isfile() or member.islnk() ):
+                return False
+            elif not safe_relpath( member.name ):
+                return False
+        return True
+
+    def validate_capsule( self, **kwd ):
+        """
+        Inspect the uploaded capsule's manifest and its contained files to ensure it is a valid
+        repository capsule.
+        """
+        capsule_dict = {}
+        capsule_dict.update( kwd )
+        encoded_file_path = capsule_dict.get( 'encoded_file_path', '' )
+        file_path = encoding_util.tool_shed_decode( encoded_file_path )
+        # The capsule must contain a valid XML file named export_info.xml.
+        export_info_file_path = os.path.join( file_path, 'export_info.xml' )
+        export_info_tree, error_message = xml_util.parse_xml( export_info_file_path )
+        if error_message:
+            capsule_dict[ 'error_message' ] = error_message
+            capsule_dict[ 'status' ] = 'error'
+            return capsule_dict
+        # The capsule must contain a valid XML file named manifest.xml.
+        manifest_file_path = os.path.join( file_path, 'manifest.xml' )
+        # Validate the capsule manifest by inspecting name, owner, changeset_revision and type
+        # information contained within each <repository> tag set.
+        repository_info_dicts, error_message = self.get_repository_info_from_manifest( manifest_file_path )
+        if error_message:
+            capsule_dict[ 'error_message' ] = error_message
+            capsule_dict[ 'status' ] = 'error'
+            return capsule_dict
+        # Validate the capsule manifest by ensuring all <repository> tag sets contain a valid
+        # <archive> sub-element.
+        archives, error_message = self.get_archives_from_manifest( manifest_file_path )
+        if error_message:
+            capsule_dict[ 'error_message' ] = error_message
+            capsule_dict[ 'status' ] = 'error'
+            return capsule_dict
+        # Validate the capsule manifest by ensuring each defined archive file name exists within
+        # the capsule.
+        error_message = self.verify_archives_in_capsule( file_path, archives )
+        if error_message:
+            capsule_dict[ 'error_message' ] = error_message
+            capsule_dict[ 'status' ] = 'error'
+            return capsule_dict
+        capsule_dict[ 'status' ] = 'ok'
+        return capsule_dict
+
+    def verify_archives_in_capsule( self, file_path, archives ):
+        """
+        Inspect the files contained within the capsule and make sure each is defined correctly
+        in the capsule manifest.
+        """
+        error_message = ''
+        for archive_file_name in archives:
+            full_path = os.path.join( file_path, archive_file_name )
+            if not os.path.exists( full_path ):
+                error_message = 'The uploaded capsule is invalid because the contained manifest.xml '
+                error_message += 'file defines an archive file named <b>%s</b> which ' % str( archive_file_name )
+                error_message += 'is not contained within the capsule.'
+                break
+        return error_message
diff --git a/lib/tool_shed/dependencies/__init__.py b/lib/tool_shed/dependencies/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/dependencies/attribute_handlers.py b/lib/tool_shed/dependencies/attribute_handlers.py
new file mode 100644
index 0000000..8b9a896
--- /dev/null
+++ b/lib/tool_shed/dependencies/attribute_handlers.py
@@ -0,0 +1,204 @@
+import copy
+import logging
+
+from galaxy.util import asbool
+from galaxy.util.odict import odict
+from galaxy.web import url_for
+
+from tool_shed.dependencies.tool import tag_attribute_handler
+from tool_shed.repository_types.util import REPOSITORY_DEPENDENCY_DEFINITION_FILENAME
+from tool_shed.repository_types.util import TOOL_DEPENDENCY_DEFINITION_FILENAME
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+from tool_shed.util import xml_util
+log = logging.getLogger( __name__ )
+
+
+class RepositoryDependencyAttributeHandler( object ):
+
+    def __init__( self, app, unpopulate ):
+        self.app = app
+        self.file_name = REPOSITORY_DEPENDENCY_DEFINITION_FILENAME
+        self.unpopulate = unpopulate
+
+    def check_tag_attributes( self, elem ):
+        # <repository name="molecule_datatypes" owner="test" />
+        error_message = ''
+        name = elem.get( 'name' )
+        if not name:
+            error_message += 'The tag is missing the required name attribute.  '
+        owner = elem.get( 'owner' )
+        if not owner:
+            error_message += 'The tag is missing the required owner attribute.  '
+        log.debug( error_message )
+        return error_message
+
+    def handle_complex_dependency_elem( self, parent_elem, elem_index, elem ):
+        """
+        Populate or unpopulate the toolshed and changeset_revision attributes of a
+        <repository> tag that defines a complex repository dependency.
+        """
+        # <repository name="package_eigen_2_0" owner="test" prior_installation_required="True" />
+        altered, new_elem, error_message = self.handle_elem( elem )
+        if error_message:
+            error_message += '  The %s file contains an invalid <repository> tag.' % TOOL_DEPENDENCY_DEFINITION_FILENAME
+        return altered, new_elem, error_message
+
+    def handle_elem( self, elem ):
+        """Populate or unpopulate the changeset_revision and toolshed attributes of repository tags."""
+        # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+        # <repository changeset_revision="xxx" name="package_xorg_macros_1_17_1" owner="test" toolshed="yyy">
+        #    <package name="xorg_macros" version="1.17.1" />
+        # </repository>
+        error_message = ''
+        name = elem.get( 'name' )
+        owner = elem.get( 'owner' )
+        # The name and owner attributes are always required, so if either are missing, return the error message.
+        if not name or not owner:
+            error_message = self.check_tag_attributes( elem )
+            return False, elem, error_message
+        altered = False
+        toolshed = elem.get( 'toolshed' )
+        changeset_revision = elem.get( 'changeset_revision' )
+        # Over a short period of time a bug existed which caused the prior_installation_required attribute
+        # to be set to False and included in the <repository> tag when a repository was exported along with
+        # its dependencies.  The following will eliminate this problematic attribute upon import.
+        prior_installation_required = elem.get( 'prior_installation_required' )
+        if prior_installation_required is not None and not asbool( prior_installation_required ):
+            del elem.attrib[ 'prior_installation_required' ]
+        sub_elems = [ child_elem for child_elem in list( elem ) ]
+        if len( sub_elems ) > 0:
+            # At this point, a <repository> tag will point only to a package.
+            # <package name="xorg_macros" version="1.17.1" />
+            # Coerce the list to an odict().
+            sub_elements = odict()
+            packages = []
+            for sub_elem in sub_elems:
+                sub_elem_type = sub_elem.tag
+                sub_elem_name = sub_elem.get( 'name' )
+                sub_elem_version = sub_elem.get( 'version' )
+                if sub_elem_type and sub_elem_name and sub_elem_version:
+                    packages.append( ( sub_elem_name, sub_elem_version ) )
+            sub_elements[ 'packages' ] = packages
+        else:
+            # Set to None.
+            sub_elements = None
+        if self.unpopulate:
+            # We're exporting the repository, so eliminate all toolshed and changeset_revision attributes
+            # from the <repository> tag.
+            if toolshed or changeset_revision:
+                attributes = odict()
+                attributes[ 'name' ] = name
+                attributes[ 'owner' ] = owner
+                prior_installation_required = elem.get( 'prior_installation_required' )
+                if asbool( prior_installation_required ):
+                    attributes[ 'prior_installation_required' ] = 'True'
+                new_elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements )
+                altered = True
+            return altered, new_elem, error_message
+        # From here on we're populating the toolshed and changeset_revision attributes if necessary.
+        if not toolshed:
+            # Default the setting to the current tool shed.
+            toolshed = str( url_for( '/', qualified=True ) ).rstrip( '/' )
+            elem.attrib[ 'toolshed' ] = toolshed
+            altered = True
+        if not changeset_revision:
+            # Populate the changeset_revision attribute with the latest installable metadata revision for
+            # the defined repository.  We use the latest installable revision instead of the latest metadata
+            # revision to ensure that the contents of the revision are valid.
+            repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+            if repository:
+                repo = hg_util.get_repo_for_repository( self.app,
+                                                        repository=repository,
+                                                        repo_path=None,
+                                                        create=False )
+                lastest_installable_changeset_revision = \
+                    metadata_util.get_latest_downloadable_changeset_revision( self.app, repository, repo )
+                if lastest_installable_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
+                    elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
+                    altered = True
+                else:
+                    error_message = 'Invalid latest installable changeset_revision %s ' % \
+                        str( lastest_installable_changeset_revision )
+                    error_message += 'retrieved for repository %s owned by %s.  ' % ( str( name ), str( owner ) )
+            else:
+                error_message = 'Unable to locate repository with name %s and owner %s.  ' % ( str( name ), str( owner ) )
+        return altered, elem, error_message
+
+    def handle_sub_elem( self, parent_elem, elem_index, elem ):
+        """
+        Populate or unpopulate the toolshed and changeset_revision attributes for each of
+        the following tag sets.
+        <action type="set_environment_for_install">
+        <action type="setup_r_environment">
+        <action type="setup_ruby_environment">
+        """
+        sub_elem_altered = False
+        error_message = ''
+        for sub_index, sub_elem in enumerate( elem ):
+            # Make sure to skip comments and tags that are not <repository>.
+            if sub_elem.tag == 'repository':
+                altered, new_sub_elem, message = self.handle_elem( sub_elem )
+                if message:
+                    error_message += 'The %s file contains an invalid <repository> tag.  %s' % \
+                        ( TOOL_DEPENDENCY_DEFINITION_FILENAME, message )
+                if altered:
+                    if not sub_elem_altered:
+                        sub_elem_altered = True
+                    elem[ sub_index ] = new_sub_elem
+        if sub_elem_altered:
+            parent_elem[ elem_index ] = elem
+        return sub_elem_altered, parent_elem, error_message
+
+    def handle_tag_attributes( self, config ):
+        """
+        Populate or unpopulate the toolshed and changeset_revision attributes of a
+        <repository> tag.  Populating will occur when a dependency definition file
+        is being uploaded to the repository, while unpopulating will occur when the
+        repository is being exported.
+        """
+        # Make sure we're looking at a valid repository_dependencies.xml file.
+        tree, error_message = xml_util.parse_xml( config )
+        if tree is None:
+            return False, None, error_message
+        root = tree.getroot()
+        root_altered = False
+        new_root = copy.deepcopy( root )
+        for index, elem in enumerate( root ):
+            if elem.tag == 'repository':
+                # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+                altered, new_elem, error_message = self.handle_elem( elem )
+                if error_message:
+                    error_message = 'The %s file contains an invalid <repository> tag.  %s' % ( self.file_name, error_message )
+                    return False, None, error_message
+                if altered:
+                    if not root_altered:
+                        root_altered = True
+                    new_root[ index ] = new_elem
+        return root_altered, new_root, error_message
+
+
+class ToolDependencyAttributeHandler( object ):
+
+    def __init__( self, app, unpopulate ):
+        self.app = app
+        self.file_name = TOOL_DEPENDENCY_DEFINITION_FILENAME
+        self.unpopulate = unpopulate
+
+    def handle_tag_attributes( self, tool_dependencies_config ):
+        """
+        Populate or unpopulate the tooshed and changeset_revision attributes of each <repository>
+        tag defined within a tool_dependencies.xml file.
+        """
+        rdah = RepositoryDependencyAttributeHandler( self.app, self.unpopulate )
+        tah = tag_attribute_handler.TagAttributeHandler( self.app, rdah, self.unpopulate )
+        altered = False
+        error_message = ''
+        # Make sure we're looking at a valid tool_dependencies.xml file.
+        tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+        if tree is None:
+            return False, None, error_message
+        root = tree.getroot()
+        altered, new_root, error_message = tah.process_config( root, skip_actions_tags=False )
+        return altered, new_root, error_message
diff --git a/lib/tool_shed/dependencies/repository/__init__.py b/lib/tool_shed/dependencies/repository/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/dependencies/repository/relation_builder.py b/lib/tool_shed/dependencies/repository/relation_builder.py
new file mode 100644
index 0000000..5943800
--- /dev/null
+++ b/lib/tool_shed/dependencies/repository/relation_builder.py
@@ -0,0 +1,488 @@
+import logging
+
+import tool_shed.util.repository_util
+from galaxy.util import asbool, listify
+from tool_shed.util import (common_util, container_util, hg_util, metadata_util,
+    shed_util_common as suc)
+
+log = logging.getLogger( __name__ )
+
+
+class RelationBuilder( object ):
+
+    def __init__( self, app, repository, repository_metadata, tool_shed_url ):
+        self.all_repository_dependencies = {}
+        self.app = app
+        self.circular_repository_dependencies = []
+        self.repository = repository
+        self.repository_metadata = repository_metadata
+        self.handled_key_rd_dicts = []
+        self.key_rd_dicts_to_be_processed = []
+        self.tool_shed_url = tool_shed_url
+        # This is a temporary work-around for handling repository dependencies that are needed
+        # only if compiling a dependent package.  This value should be True unless exporting
+        # a repository capsule, in which case the set_filter_dependencies_needed_for_compiling()
+        # function is called.
+        self.filter_dependencies_needed_for_compiling = True
+
+    def can_add_to_key_rd_dicts( self, key_rd_dict, key_rd_dicts ):
+        """Handle the case where an update to the changeset revision was done."""
+        k = next(iter(key_rd_dict))
+        rd = key_rd_dict[ k ]
+        partial_rd = rd[ 0:3 ]
+        for kr_dict in key_rd_dicts:
+            key = next(iter(kr_dict))
+            if key == k:
+                repository_dependency = kr_dict[ key ]
+                if repository_dependency[ 0:3 ] == partial_rd:
+                    return False
+        return True
+
+    def filter_only_if_compiling_contained_td( self, key_rd_dict ):
+        """
+        Return a copy of the received key_rd_dict with repository dependencies that are needed
+        only_if_compiling_contained_td filtered out of the list of repository dependencies for
+        each rd_key.
+        """
+        filtered_key_rd_dict = {}
+        for rd_key, required_rd_tup in key_rd_dict.items():
+            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                common_util.parse_repository_dependency_tuple( required_rd_tup )
+            if not asbool( only_if_compiling_contained_td ):
+                filtered_key_rd_dict[ rd_key ] = required_rd_tup
+        return filtered_key_rd_dict
+
+    def get_prior_installation_required_and_only_if_compiling_contained_td( self ):
+        """
+        This method is called from the tool shed and never Galaxy.  If self.all_repository_dependencies
+        contains a repository dependency tuple that is associated with self.repository, return the
+        value of the tuple's prior_installation_required component.
+        """
+        cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url( self.tool_shed_url )
+        if self.all_repository_dependencies:
+            for rd_key, rd_tups in self.all_repository_dependencies.items():
+                if rd_key in [ 'root_key', 'description' ]:
+                    continue
+                for rd_tup in rd_tups:
+                    rd_toolshed, rd_name, rd_owner, rd_changeset_revision, \
+                        rd_prior_installation_required, \
+                        rd_only_if_compiling_contained_td = \
+                        common_util.parse_repository_dependency_tuple( rd_tup )
+                    cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
+                    if cleaned_rd_toolshed == cleaned_toolshed_base_url and \
+                            rd_name == self.repository.name and \
+                            rd_owner == self.repository.user.username and \
+                            rd_changeset_revision == self.repository_metadata.changeset_revision:
+                        return rd_prior_installation_required, rd_only_if_compiling_contained_td
+        elif self.repository_metadata:
+            # Get the list of changeset revisions from the tool shed to which self.repository may be updated.
+            metadata = self.repository_metadata.metadata
+            current_changeset_revision = str( self.repository_metadata.changeset_revision )
+            # Get the changeset revision to which the current value of required_repository_changeset_revision
+            # should be updated if it's not current.
+            text = metadata_util.get_updated_changeset_revisions( self.app,
+                                                                  name=str( self.repository.name ),
+                                                                  owner=str( self.repository.user.username ),
+                                                                  changeset_revision=current_changeset_revision )
+            if text:
+                valid_changeset_revisions = listify( text )
+                if current_changeset_revision not in valid_changeset_revisions:
+                    valid_changeset_revisions.append( current_changeset_revision )
+            else:
+                valid_changeset_revisions = [ current_changeset_revision ]
+            repository_dependencies_dict = metadata[ 'repository_dependencies' ]
+            rd_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+            for rd_tup in rd_tups:
+                rd_toolshed, rd_name, rd_owner, rd_changeset_revision, \
+                    rd_prior_installation_required, \
+                    rd_only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( rd_tup )
+                cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
+                if cleaned_rd_toolshed == cleaned_toolshed_base_url and \
+                        rd_name == self.repository.name and \
+                        rd_owner == self.repository.user.username and \
+                        rd_changeset_revision in valid_changeset_revisions:
+                    return rd_prior_installation_required, rd_only_if_compiling_contained_td
+        # Default both prior_installation_required and only_if_compiling_contained_td to False.
+        return 'False', 'False'
+
+    def get_key_for_repository_changeset_revision( self ):
+        # The received toolshed_base_url must include the port, but doesn't have to include the protocol.
+        prior_installation_required, only_if_compiling_contained_td = \
+            self.get_prior_installation_required_and_only_if_compiling_contained_td()
+        # Create a key with the value of prior_installation_required defaulted to False.
+        key = container_util.generate_repository_dependencies_key_for_repository( self.tool_shed_url,
+                                                                                  self.repository.name,
+                                                                                  self.repository.user.username,
+                                                                                  self.repository_metadata.changeset_revision,
+                                                                                  prior_installation_required,
+                                                                                  only_if_compiling_contained_td )
+        return key
+
+    def get_repository_dependencies_for_changeset_revision( self ):
+        """
+        Return a dictionary of all repositories upon which the contents of self.repository_metadata
+        record depend.  The dictionary keys are name-spaced values consisting of:
+        self.tool_shed_url/repository_name/repository_owner/changeset_revision
+        and the values are lists of repository_dependency tuples consisting of:
+        ( self.tool_shed_url, repository_name, repository_owner, changeset_revision ).
+        This method ensures that all required repositories to the nth degree are returned.
+        """
+        # Assume the current repository does not have repository dependencies defined for it.
+        current_repository_key = None
+        metadata = self.repository_metadata.metadata
+        if metadata:
+            # The value of self.tool_shed_url must include the port, but doesn't have to include
+            # the protocol.
+            if 'repository_dependencies' in metadata:
+                current_repository_key = self.get_key_for_repository_changeset_revision()
+                repository_dependencies_dict = metadata[ 'repository_dependencies' ]
+                if not self.all_repository_dependencies:
+                    self.initialize_all_repository_dependencies( current_repository_key, repository_dependencies_dict )
+                # Handle the repository dependencies defined in the current repository, if any, and populate
+                # the various repository dependency objects for this round of processing.
+                current_repository_key_rd_dicts = \
+                    self.populate_repository_dependency_objects_for_processing( current_repository_key,
+                                                                                repository_dependencies_dict )
+        if current_repository_key:
+            if current_repository_key_rd_dicts:
+                # There should be only a single current_repository_key_rd_dict in this list.
+                current_repository_key_rd_dict = current_repository_key_rd_dicts[ 0 ]
+                # Handle circular repository dependencies.
+                if not self.in_circular_repository_dependencies( current_repository_key_rd_dict ):
+                    if current_repository_key in self.all_repository_dependencies:
+                        self.handle_current_repository_dependency( current_repository_key )
+                elif self.key_rd_dicts_to_be_processed:
+                    self.handle_next_repository_dependency()
+            elif self.key_rd_dicts_to_be_processed:
+                self.handle_next_repository_dependency()
+        elif self.key_rd_dicts_to_be_processed:
+            self.handle_next_repository_dependency()
+        self.all_repository_dependencies = self.prune_invalid_repository_dependencies( self.all_repository_dependencies )
+        return self.all_repository_dependencies
+
+    def get_repository_dependency_as_key( self, repository_dependency ):
+        tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( repository_dependency )
+        return container_util.generate_repository_dependencies_key_for_repository( tool_shed,
+                                                                                   name,
+                                                                                   owner,
+                                                                                   changeset_revision,
+                                                                                   prior_installation_required,
+                                                                                   only_if_compiling_contained_td )
+
+    def get_updated_changeset_revisions_for_repository_dependencies( self, key_rd_dicts ):
+        updated_key_rd_dicts = []
+        for key_rd_dict in key_rd_dicts:
+            key = next(iter(key_rd_dict))
+            repository_dependency = key_rd_dict[ key ]
+            rd_toolshed, rd_name, rd_owner, rd_changeset_revision, \
+                rd_prior_installation_required, \
+                rd_only_if_compiling_contained_td = \
+                common_util.parse_repository_dependency_tuple( repository_dependency )
+            if suc.tool_shed_is_this_tool_shed( rd_toolshed ):
+                repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( self.app, rd_name, rd_owner )
+                if repository:
+                    repository_id = self.app.security.encode_id( repository.id )
+                    repository_metadata = \
+                        metadata_util.get_repository_metadata_by_repository_id_changeset_revision( self.app,
+                                                                                                   repository_id,
+                                                                                                   rd_changeset_revision )
+                    if repository_metadata:
+                        # The repository changeset_revision is installable, so no updates are available.
+                        new_key_rd_dict = {}
+                        new_key_rd_dict[ key ] = repository_dependency
+                        updated_key_rd_dicts.append( key_rd_dict )
+                    else:
+                        # The repository changeset_revision is no longer installable, so see if there's been an update.
+                        repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+                        changeset_revision = metadata_util.get_next_downloadable_changeset_revision( repository, repo, rd_changeset_revision )
+                        if changeset_revision != rd_changeset_revision:
+                            repository_metadata = \
+                                metadata_util.get_repository_metadata_by_repository_id_changeset_revision( self.app,
+                                                                                                           repository_id,
+                                                                                                           changeset_revision )
+                        if repository_metadata:
+                            new_key_rd_dict = {}
+                            new_key_rd_dict[ key ] = \
+                                [ rd_toolshed,
+                                  rd_name,
+                                  rd_owner,
+                                  repository_metadata.changeset_revision,
+                                  rd_prior_installation_required,
+                                  rd_only_if_compiling_contained_td ]
+                            # We have the updated changeset revision.
+                            updated_key_rd_dicts.append( new_key_rd_dict )
+                        else:
+                            repository_components_tuple = container_util.get_components_from_key( key )
+                            components_list = tool_shed.util.repository_util.extract_components_from_tuple( repository_components_tuple )
+                            toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ]
+                            # For backward compatibility to the 12/20/12 Galaxy release.
+                            if len( components_list ) in (4, 5):
+                                rd_only_if_compiling_contained_td = 'False'
+                            message = "The revision %s defined for repository %s owned by %s is invalid, so repository " % \
+                                ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ) )
+                            message += "dependencies defined for repository %s will be ignored." % str( repository_name )
+                            log.debug( message )
+                else:
+                    repository_components_tuple = container_util.get_components_from_key( key )
+                    components_list = tool_shed.util.repository_util.extract_components_from_tuple( repository_components_tuple )
+                    toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ]
+                    message = "The revision %s defined for repository %s owned by %s is invalid, so repository " % \
+                        ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ) )
+                    message += "dependencies defined for repository %s will be ignored." % str( repository_name )
+                    log.debug( message )
+        return updated_key_rd_dicts
+
+    def handle_circular_repository_dependency( self, repository_key, repository_dependency ):
+        all_repository_dependencies_root_key = self.all_repository_dependencies[ 'root_key' ]
+        repository_dependency_as_key = self.get_repository_dependency_as_key( repository_dependency )
+        self.update_circular_repository_dependencies( repository_key,
+                                                      repository_dependency,
+                                                      self.all_repository_dependencies[ repository_dependency_as_key ] )
+        if all_repository_dependencies_root_key != repository_dependency_as_key:
+            self.all_repository_dependencies[ repository_key ] = [ repository_dependency ]
+
+    def handle_current_repository_dependency( self, current_repository_key ):
+        current_repository_key_rd_dicts = []
+        for rd in self.all_repository_dependencies[ current_repository_key ]:
+            rd_copy = [ str( item ) for item in rd ]
+            new_key_rd_dict = {}
+            new_key_rd_dict[ current_repository_key ] = rd_copy
+            current_repository_key_rd_dicts.append( new_key_rd_dict )
+        if current_repository_key_rd_dicts:
+            self.handle_key_rd_dicts_for_repository( current_repository_key, current_repository_key_rd_dicts )
+            return self.get_repository_dependencies_for_changeset_revision()
+
+    def handle_key_rd_dicts_for_repository( self, current_repository_key, repository_key_rd_dicts ):
+        key_rd_dict = repository_key_rd_dicts.pop( 0 )
+        repository_dependency = key_rd_dict[ current_repository_key ]
+        toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( repository_dependency )
+        if suc.tool_shed_is_this_tool_shed( toolshed ):
+            required_repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+            self.repository = required_repository
+            repository_id = self.app.security.encode_id( required_repository.id )
+            required_repository_metadata = \
+                metadata_util.get_repository_metadata_by_repository_id_changeset_revision( self.app,
+                                                                                           repository_id,
+                                                                                           changeset_revision )
+            self.repository_metadata = required_repository_metadata
+            if required_repository_metadata:
+                # The required_repository_metadata changeset_revision is installable.
+                required_metadata = required_repository_metadata.metadata
+                if required_metadata:
+                    for current_repository_key_rd_dict in repository_key_rd_dicts:
+                        if not self.in_key_rd_dicts( current_repository_key_rd_dict, self.key_rd_dicts_to_be_processed ):
+                            # Add the current repository_dependency into self.key_rd_dicts_to_be_processed.
+                            self.key_rd_dicts_to_be_processed.append( current_repository_key_rd_dict )
+            if not self.in_key_rd_dicts( key_rd_dict, self.handled_key_rd_dicts ):
+                # Add the current repository_dependency into self.handled_key_rd_dicts.
+                self.handled_key_rd_dicts.append( key_rd_dict )
+            if self.in_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed ):
+                # Remove the current repository from self.key_rd_dicts_to_be_processed.
+                self.key_rd_dicts_to_be_processed = self.remove_from_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed )
+        else:
+            # The repository is in a different tool shed, so build an url and send a request.
+            error_message = "Repository dependencies are currently supported only within the same Tool Shed.  "
+            error_message += "Ignoring repository dependency definition for tool shed "
+            error_message += "%s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
+            log.debug( error_message )
+
+    def handle_next_repository_dependency( self ):
+        next_repository_key_rd_dict = self.key_rd_dicts_to_be_processed.pop( 0 )
+        next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
+        next_repository_key = next(iter(next_repository_key_rd_dict))
+        self.handle_key_rd_dicts_for_repository( next_repository_key, next_repository_key_rd_dicts )
+        return self.get_repository_dependencies_for_changeset_revision()
+
+    def in_all_repository_dependencies( self, repository_key, repository_dependency ):
+        """
+        Return True if { repository_key : repository_dependency } is in self.all_repository_dependencies.
+        """
+        for key, val in self.all_repository_dependencies.items():
+            if key != repository_key:
+                continue
+            if repository_dependency in val:
+                return True
+        return False
+
+    def in_circular_repository_dependencies( self, repository_key_rd_dict ):
+        """
+        Return True if any combination of a circular dependency tuple is the key : value pair defined
+        in the received repository_key_rd_dict.  This means that each circular dependency tuple is converted
+        into the key : value pair for comparison.
+        """
+        for tup in self.circular_repository_dependencies:
+            rd_0, rd_1 = tup
+            rd_0_as_key = self.get_repository_dependency_as_key( rd_0 )
+            rd_1_as_key = self.get_repository_dependency_as_key( rd_1 )
+            if rd_0_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_0_as_key ] == rd_1:
+                return True
+            if rd_1_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_1_as_key ] == rd_0:
+                return True
+        return False
+
+    def in_key_rd_dicts( self, key_rd_dict, key_rd_dicts ):
+        """Return True if key_rd_dict is contained in the list of key_rd_dicts."""
+        k = next(iter(key_rd_dict))
+        v = key_rd_dict[ k ]
+        for key_rd_dict in key_rd_dicts:
+            for key, val in key_rd_dict.items():
+                if key == k and val == v:
+                    return True
+        return False
+
+    def initialize_all_repository_dependencies( self, current_repository_key, repository_dependencies_dict ):
+        """Initialize the self.all_repository_dependencies dictionary."""
+        # It's safe to assume that current_repository_key in this case will have a value.
+        self.all_repository_dependencies[ 'root_key' ] = current_repository_key
+        self.all_repository_dependencies[ current_repository_key ] = []
+        # Store the value of the 'description' key only once, the first time through this recursive method.
+        description = repository_dependencies_dict.get( 'description', None )
+        self.all_repository_dependencies[ 'description' ] = description
+
+    def is_circular_repository_dependency( self, repository_key, repository_dependency ):
+        """
+        Return True if the received repository_dependency is a key in self.all_repository_dependencies
+        whose list of repository dependencies includes the received repository_key.
+        """
+        repository_dependency_as_key = self.get_repository_dependency_as_key( repository_dependency )
+        repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+        for key, val in self.all_repository_dependencies.items():
+            if key != repository_dependency_as_key:
+                continue
+            if repository_key_as_repository_dependency in val:
+                return True
+        return False
+
+    def populate_repository_dependency_objects_for_processing( self, current_repository_key, repository_dependencies_dict ):
+        """
+        The process that discovers all repository dependencies for a specified repository's changeset
+        revision uses this method to populate the following items for the current processing loop:
+        filtered_current_repository_key_rd_dicts, self.key_rd_dicts_to_be_processed,
+        self.handled_key_rd_dicts, self.all_repository_dependencies.  Each processing loop may discover
+        more repository dependencies, so this method is repeatedly called until all repository
+        dependencies have been discovered.
+        """
+        current_repository_key_rd_dicts = []
+        filtered_current_repository_key_rd_dicts = []
+        for rd_tup in repository_dependencies_dict[ 'repository_dependencies' ]:
+            new_key_rd_dict = {}
+            new_key_rd_dict[ current_repository_key ] = rd_tup
+            current_repository_key_rd_dicts.append( new_key_rd_dict )
+        if current_repository_key_rd_dicts and current_repository_key:
+            # Remove all repository dependencies that point to a revision within its own repository.
+            current_repository_key_rd_dicts = \
+                self.remove_repository_dependency_reference_to_self( current_repository_key_rd_dicts )
+        current_repository_key_rd_dicts = \
+            self.get_updated_changeset_revisions_for_repository_dependencies( current_repository_key_rd_dicts )
+        for key_rd_dict in current_repository_key_rd_dicts:
+            if self.filter_dependencies_needed_for_compiling:
+                # Filter out repository dependencies that are required only if compiling the dependent
+                # repository's tool dependency.
+                # TODO: this temporary work-around should be removed when the underlying framework
+                # support for handling only_if_compiling_contained_td-flagged repositories is completed.
+                key_rd_dict = self.filter_only_if_compiling_contained_td( key_rd_dict )
+            if key_rd_dict:
+                is_circular = False
+                in_handled_key_rd_dicts = self.in_key_rd_dicts( key_rd_dict, self.handled_key_rd_dicts )
+                in_key_rd_dicts_to_be_processed = self.in_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed )
+                if not in_handled_key_rd_dicts and not in_key_rd_dicts_to_be_processed:
+                    filtered_current_repository_key_rd_dicts.append( key_rd_dict )
+                    repository_dependency = key_rd_dict[ current_repository_key ]
+                    if current_repository_key in self.all_repository_dependencies:
+                        # Add all repository dependencies for the current repository into its entry
+                        # in self.all_repository_dependencies.
+                        all_repository_dependencies_val = self.all_repository_dependencies[ current_repository_key ]
+                        if repository_dependency not in all_repository_dependencies_val:
+                            all_repository_dependencies_val.append( repository_dependency )
+                            self.all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
+                    elif not self.in_all_repository_dependencies( current_repository_key, repository_dependency ):
+                        # Handle circular repository dependencies.
+                        if self.is_circular_repository_dependency( current_repository_key, repository_dependency ):
+                            is_circular = True
+                            self.handle_circular_repository_dependency( current_repository_key, repository_dependency )
+                        else:
+                            self.all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
+                    if not is_circular and self.can_add_to_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed ):
+                        new_key_rd_dict = {}
+                        new_key_rd_dict[ current_repository_key ] = repository_dependency
+                        self.key_rd_dicts_to_be_processed.append( new_key_rd_dict )
+        return filtered_current_repository_key_rd_dicts
+
+    def prune_invalid_repository_dependencies( self, repository_dependencies ):
+        """
+        Eliminate all invalid entries in the received repository_dependencies dictionary.  An entry
+        is invalid if the value_list of the key/value pair is empty.  This occurs when an invalid
+        combination of tool shed, name , owner, changeset_revision is used and a repository_metadata
+        record is not found.
+        """
+        valid_repository_dependencies = {}
+        description = repository_dependencies.get( 'description', None )
+        root_key = repository_dependencies.get( 'root_key', None )
+        if root_key is None:
+            return valid_repository_dependencies
+        for key, value in repository_dependencies.items():
+            if key in [ 'description', 'root_key' ]:
+                continue
+            if value:
+                valid_repository_dependencies[ key ] = value
+        if valid_repository_dependencies:
+            valid_repository_dependencies[ 'description' ] = description
+            valid_repository_dependencies[ 'root_key' ] = root_key
+        return valid_repository_dependencies
+
+    def remove_from_key_rd_dicts( self, key_rd_dict, key_rd_dicts ):
+        """Eliminate the key_rd_dict from the list of key_rd_dicts if it is contained in the list."""
+        k = next(iter(key_rd_dict))
+        v = key_rd_dict[ k ]
+        clean_key_rd_dicts = []
+        for krd_dict in key_rd_dicts:
+            key = next(iter(krd_dict))
+            val = krd_dict[ key ]
+            if key == k and val == v:
+                continue
+            clean_key_rd_dicts.append( krd_dict )
+        return clean_key_rd_dicts
+
+    def remove_repository_dependency_reference_to_self( self, key_rd_dicts ):
+        """Remove all repository dependencies that point to a revision within its own repository."""
+        clean_key_rd_dicts = []
+        key = next(iter(key_rd_dicts[ 0 ]))
+        repository_tup = key.split( container_util.STRSEP )
+        rd_toolshed, rd_name, rd_owner, rd_changeset_revision, \
+            rd_prior_installation_required, \
+            rd_only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( repository_tup )
+        cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
+        for key_rd_dict in key_rd_dicts:
+            k = next(iter(key_rd_dict))
+            repository_dependency = key_rd_dict[ k ]
+            toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                common_util.parse_repository_dependency_tuple( repository_dependency )
+            cleaned_toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed )
+            if cleaned_rd_toolshed == cleaned_toolshed and rd_name == name and rd_owner == owner:
+                debug_msg = "Removing repository dependency for repository %s owned by %s " % ( name, owner )
+                debug_msg += 'since it refers to a revision within itself.'
+                log.debug( debug_msg )
+            else:
+                new_key_rd_dict = {}
+                new_key_rd_dict[ key ] = repository_dependency
+                clean_key_rd_dicts.append( new_key_rd_dict )
+        return clean_key_rd_dicts
+
+    def set_filter_dependencies_needed_for_compiling( self, value ):
+        self.filter_dependencies_needed_for_compiling = asbool( value )
+
+    def update_circular_repository_dependencies( self, repository_key, repository_dependency, repository_dependencies ):
+        repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+        if repository_key_as_repository_dependency in repository_dependencies:
+            found = False
+            for tup in self.circular_repository_dependencies:
+                if repository_dependency in tup and repository_key_as_repository_dependency in tup:
+                    # The circular dependency has already been included.
+                    found = True
+            if not found:
+                new_circular_tup = [ repository_dependency, repository_key_as_repository_dependency ]
+                self.circular_repository_dependencies.append( new_circular_tup )
diff --git a/lib/tool_shed/dependencies/tool/__init__.py b/lib/tool_shed/dependencies/tool/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/dependencies/tool/tag_attribute_handler.py b/lib/tool_shed/dependencies/tool/tag_attribute_handler.py
new file mode 100644
index 0000000..092bf12
--- /dev/null
+++ b/lib/tool_shed/dependencies/tool/tag_attribute_handler.py
@@ -0,0 +1,203 @@
+import copy
+import logging
+
+log = logging.getLogger( __name__ )
+
+
+class TagAttributeHandler( object ):
+
+    def __init__( self, app, rdd, unpopulate ):
+        self.app = app
+        self.altered = False
+        self.rdd = rdd
+        self.unpopulate = unpopulate
+
+    def process_action_tag_set( self, elem, message ):
+        # Here we're inside of an <actions> tag set.  See http://localhost:9009/view/devteam/package_r_2_11_0 .
+        # <action>
+        #    <repository name="package_readline_6_2" owner="devteam">
+        #        <package name="readline" version="6.2" />
+        #    </repository>
+        # </action>
+        elem_altered = False
+        new_elem = copy.deepcopy( elem )
+        for sub_index, sub_elem in enumerate( elem ):
+            altered = False
+            error_message = ''
+            if sub_elem.tag == 'repository':
+                altered, new_sub_elem, error_message = \
+                    self.process_repository_tag_set( parent_elem=elem,
+                                                     elem_index=sub_index,
+                                                     elem=sub_elem,
+                                                     message=message )
+            if error_message and error_message not in message:
+                message += error_message
+            if altered:
+                if not self.altered:
+                    self.altered = True
+                if not elem_altered:
+                    elem_altered = True
+                new_elem[ sub_index ] = new_sub_elem
+        return elem_altered, new_elem, message
+
+    def process_actions_tag_set( self, elem, message, skip_actions_tags=True ):
+        # <actions>
+        #     <package name="libgtextutils" version="0.6">
+        #         <repository name="package_libgtextutils_0_6" owner="test" prior_installation_required="True" />
+        #     </package>
+        elem_altered = False
+        new_elem = copy.deepcopy( elem )
+        for sub_index, sub_elem in enumerate( elem ):
+            altered = False
+            error_message = ''
+            if sub_elem.tag == 'package':
+                altered, new_sub_elem, error_message = self.process_package_tag_set( elem=sub_elem,
+                                                                                     message=message,
+                                                                                     skip_actions_tags=skip_actions_tags )
+            elif sub_elem.tag == 'action':
+                # <action type="set_environment_for_install">
+                #    <repository name="package_readline_6_2" owner="devteam"">
+                #        <package name="readline" version="6.2" />
+                #    </repository>
+                # </action>
+                altered, new_sub_elem, error_message = self.process_action_tag_set( elem=sub_elem,
+                                                                                    message=message )
+            else:
+                # Inspect the sub elements of elem to locate all <repository> tags and
+                # populate them with toolshed and changeset_revision attributes if necessary.
+                altered, new_sub_elem, error_message = self.rdd.handle_sub_elem( parent_elem=elem,
+                                                                                 elem_index=sub_index,
+                                                                                 elem=sub_elem )
+            if error_message and error_message not in message:
+                message += error_message
+            if altered:
+                if not self.altered:
+                    self.altered = True
+                if not elem_altered:
+                    elem_altered = True
+                new_elem[ sub_index ] = new_sub_elem
+        return elem_altered, new_elem, message
+
+    def process_actions_group_tag_set( self, elem, message, skip_actions_tags=False ):
+        # Inspect all entries in the <actions_group> tag set, skipping <actions>
+        # tag sets that define os and architecture attributes.  We want to inspect
+        # only the last <actions> tag set contained within the <actions_group> tag
+        # set to see if a complex repository dependency is defined.
+        elem_altered = False
+        new_elem = copy.deepcopy( elem )
+        for sub_index, sub_elem in enumerate( elem ):
+            altered = False
+            error_message = ''
+            if sub_elem.tag == 'actions':
+                if skip_actions_tags:
+                    # Skip all actions tags that include os or architecture attributes.
+                    system = sub_elem.get( 'os' )
+                    architecture = sub_elem.get( 'architecture' )
+                    if system or architecture:
+                        continue
+                altered, new_sub_elem, error_message = \
+                    self.process_actions_tag_set( elem=sub_elem,
+                                                  message=message,
+                                                  skip_actions_tags=skip_actions_tags )
+            if error_message and error_message not in message:
+                message += error_message
+            if altered:
+                if not self.altered:
+                    self.altered = True
+                if not elem_altered:
+                    elem_altered = True
+                new_elem[ sub_index ] = new_sub_elem
+        return elem_altered, new_elem, message
+
+    def process_config( self, root, skip_actions_tags=True ):
+        error_message = ''
+        new_root = copy.deepcopy( root )
+        if root.tag == 'tool_dependency':
+            for elem_index, elem in enumerate( root ):
+                altered = False
+                if elem.tag == 'package':
+                    # <package name="eigen" version="2.0.17">
+                    altered, new_elem, error_message = \
+                        self.process_package_tag_set( elem=elem,
+                                                      message=error_message,
+                                                      skip_actions_tags=skip_actions_tags )
+                if altered:
+                    if not self.altered:
+                        self.altered = True
+                    new_root[ elem_index ] = new_elem
+        else:
+            error_message = "Invalid tool_dependencies.xml file."
+        return self.altered, new_root, error_message
+
+    def process_install_tag_set( self, elem, message, skip_actions_tags=True ):
+        # <install version="1.0">
+        elem_altered = False
+        new_elem = copy.deepcopy( elem )
+        for sub_index, sub_elem in enumerate( elem ):
+            altered = False
+            error_message = ''
+            if sub_elem.tag == 'actions_group':
+                altered, new_sub_elem, error_message = \
+                    self.process_actions_group_tag_set( elem=sub_elem,
+                                                        message=message,
+                                                        skip_actions_tags=skip_actions_tags )
+            elif sub_elem.tag == 'actions':
+                altered, new_sub_elem, error_message = \
+                    self.process_actions_tag_set( elem=sub_elem,
+                                                  message=message,
+                                                  skip_actions_tags=skip_actions_tags )
+            else:
+                package_name = elem.get( 'name', '' )
+                package_version = elem.get( 'version', '' )
+                error_message += 'Version %s of the %s package cannot be installed because ' % \
+                    ( str( package_version ), str( package_name ) )
+                error_message += 'the recipe for installing the package is missing either an '
+                error_message += '<actions> tag set or an <actions_group> tag set.'
+            if error_message and error_message not in message:
+                message += error_message
+            if altered:
+                if not self.altered:
+                    self.altered = True
+                if not elem_altered:
+                    elem_altered = True
+                new_elem[ sub_index ] = new_sub_elem
+        return elem_altered, new_elem, message
+
+    def process_package_tag_set( self, elem, message, skip_actions_tags=True ):
+        elem_altered = False
+        new_elem = copy.deepcopy( elem )
+        for sub_index, sub_elem in enumerate( elem ):
+            altered = False
+            error_message = ''
+            if sub_elem.tag == 'install':
+                altered, new_sub_elem, error_message = \
+                    self.process_install_tag_set( elem=sub_elem,
+                                                  message=message,
+                                                  skip_actions_tags=skip_actions_tags )
+            elif sub_elem.tag == 'repository':
+                altered, new_sub_elem, error_message = \
+                    self.process_repository_tag_set( parent_elem=elem,
+                                                     elem_index=sub_index,
+                                                     elem=sub_elem,
+                                                     message=message )
+            if error_message and error_message not in message:
+                message += error_message
+            if altered:
+                if not self.altered:
+                    self.altered = True
+                if not elem_altered:
+                    elem_altered = True
+                new_elem[ sub_index ] = new_sub_elem
+        return elem_altered, new_elem, message
+
+    def process_repository_tag_set( self, parent_elem, elem_index, elem, message ):
+        # We have a complex repository dependency.
+        altered, new_elem, error_message = self.rdd.handle_complex_dependency_elem( parent_elem=parent_elem,
+                                                                                    elem_index=elem_index,
+                                                                                    elem=elem )
+        if error_message and error_message not in message:
+            message += error_message
+        if altered:
+            if not self.altered:
+                self.altered = True
+        return altered, new_elem, message
diff --git a/lib/tool_shed/galaxy_install/__init__.py b/lib/tool_shed/galaxy_install/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/datatypes/__init__.py b/lib/tool_shed/galaxy_install/datatypes/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py b/lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py
new file mode 100644
index 0000000..3aebae9
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/datatypes/custom_datatype_manager.py
@@ -0,0 +1,214 @@
+import logging
+import os
+import tempfile
+
+from galaxy.util import asbool
+from tool_shed.util import basic_util, hg_util, shed_util_common as suc
+from tool_shed.util import tool_util, xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class CustomDatatypeLoader( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def alter_config_and_load_prorietary_datatypes( self, datatypes_config, relative_install_dir,
+                                                    deactivate=False, override=True ):
+        """
+        Parse a custom datatypes config (a datatypes_conf.xml file included in an installed
+        tool shed repository) and add information to appropriate element attributes that will
+        enable custom datatype class modules, datatypes converters and display applications
+        to be discovered and properly imported by the datatypes registry.  The value of override
+        will be False when a tool shed repository is being installed.  Since installation is
+        occurring after the datatypes registry has been initialized, the registry's contents
+        cannot be overridden by conflicting data types.
+        """
+        tree, error_message = xml_util.parse_xml( datatypes_config )
+        if tree is None:
+            return None, None
+        datatypes_config_root = tree.getroot()
+        registration = datatypes_config_root.find( 'registration' )
+        if registration is None:
+            # We have valid XML, but not a valid custom datatypes definition.
+            return None, None
+        sniffers = datatypes_config_root.find( 'sniffers' )
+        converter_path, display_path = self.get_converter_and_display_paths( registration,
+                                                                             relative_install_dir )
+        if converter_path:
+            # Path to datatype converters
+            registration.attrib[ 'proprietary_converter_path' ] = converter_path
+        if display_path:
+            # Path to datatype display applications
+            registration.attrib[ 'proprietary_display_path' ] = display_path
+        relative_path_to_datatype_file_name = None
+        datatype_files = datatypes_config_root.find( 'datatype_files' )
+        datatype_class_modules = []
+        if datatype_files is not None:
+            # The <datatype_files> tag set contains any number of <datatype_file> tags.
+            # <datatype_files>
+            #    <datatype_file name="gmap.py"/>
+            #    <datatype_file name="metagenomics.py"/>
+            # </datatype_files>
+            # We'll add attributes to the datatype tag sets so that the modules can be properly imported
+            # by the datatypes registry.
+            for elem in datatype_files.findall( 'datatype_file' ):
+                datatype_file_name = elem.get( 'name', None )
+                if datatype_file_name:
+                    # Find the file in the installed repository.
+                    for root, dirs, files in os.walk( relative_install_dir ):
+                        if root.find( '.hg' ) < 0:
+                            for name in files:
+                                if name == datatype_file_name:
+                                    datatype_class_modules.append( os.path.join( root, name ) )
+                                    break
+                    break
+            if datatype_class_modules:
+                for relative_path_to_datatype_file_name in datatype_class_modules:
+                    datatype_file_name_path, datatype_file_name = os.path.split( relative_path_to_datatype_file_name )
+                    for elem in registration.findall( 'datatype' ):
+                        # Handle 'type' attribute which should be something like one of the following:
+                        # type="gmap:GmapDB"
+                        # type="galaxy.datatypes.gmap:GmapDB"
+                        dtype = elem.get( 'type', None )
+                        if dtype:
+                            fields = dtype.split( ':' )
+                            proprietary_datatype_module = fields[ 0 ]
+                            if proprietary_datatype_module.find( '.' ) >= 0:
+                                # Handle the case where datatype_module is "galaxy.datatypes.gmap".
+                                proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
+                            # The value of proprietary_path must be an absolute path due to job_working_directory.
+                            elem.attrib[ 'proprietary_path' ] = os.path.abspath( datatype_file_name_path )
+                            elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
+        # Temporarily persist the custom datatypes configuration file so it can be loaded into the
+        # datatypes registry.
+        fd, proprietary_datatypes_config = tempfile.mkstemp( prefix="tmp-toolshed-acalpd" )
+        os.write( fd, '<?xml version="1.0"?>\n' )
+        os.write( fd, '<datatypes>\n' )
+        os.write( fd, '%s' % xml_util.xml_to_string( registration ) )
+        if sniffers is not None:
+            os.write( fd, '%s' % xml_util.xml_to_string( sniffers ) )
+        os.write( fd, '</datatypes>\n' )
+        os.close( fd )
+        os.chmod( proprietary_datatypes_config, 0o644 )
+        # Load custom datatypes
+        self.app.datatypes_registry.load_datatypes( root_dir=self.app.config.root,
+                                                    config=proprietary_datatypes_config,
+                                                    deactivate=deactivate,
+                                                    override=override )
+        if deactivate:
+            # Reload the upload tool to eliminate deactivated datatype extensions from the file_type
+            # select list.
+            tool_util.reload_upload_tools( self.app )
+        else:
+            self.append_to_datatypes_registry_upload_file_formats( registration )
+            tool_util.reload_upload_tools( self.app )
+        if datatype_files is not None:
+            try:
+                os.unlink( proprietary_datatypes_config )
+            except:
+                pass
+        return converter_path, display_path
+
+    def append_to_datatypes_registry_upload_file_formats( self, elem ):
+        # See if we have any datatypes that should be displayed in the upload tool's file_type select list.
+        for datatype_elem in elem.findall( 'datatype' ):
+            extension = datatype_elem.get( 'extension', None )
+            display_in_upload = datatype_elem.get( 'display_in_upload', None )
+            if extension is not None and display_in_upload is not None:
+                display_in_upload = asbool( str( display_in_upload ) )
+                if display_in_upload and extension not in self.app.datatypes_registry.upload_file_formats:
+                    self.app.datatypes_registry.upload_file_formats.append( extension )
+
+    def create_repository_dict_for_proprietary_datatypes( self, tool_shed, name, owner, installed_changeset_revision,
+                                                          tool_dicts, converter_path=None, display_path=None ):
+        return dict( tool_shed=tool_shed,
+                     repository_name=name,
+                     repository_owner=owner,
+                     installed_changeset_revision=installed_changeset_revision,
+                     tool_dicts=tool_dicts,
+                     converter_path=converter_path,
+                     display_path=display_path )
+
+    def get_converter_and_display_paths( self, registration_elem, relative_install_dir ):
+        """
+        Find the relative path to data type converters and display applications included
+        in installed tool shed repositories.
+        """
+        converter_path = None
+        display_path = None
+        for elem in registration_elem.findall( 'datatype' ):
+            if not converter_path:
+                # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
+                # if it is not already set.  This requires developers to place all converters in the
+                # same subdirectory within the repository hierarchy.
+                for converter in elem.findall( 'converter' ):
+                    converter_config = converter.get( 'file', None )
+                    if converter_config:
+                        converter_config_file_name = basic_util.strip_path( converter_config )
+                        for root, dirs, files in os.walk( relative_install_dir ):
+                            if root.find( '.hg' ) < 0:
+                                for name in files:
+                                    if name == converter_config_file_name:
+                                        # The value of converter_path must be absolute due to job_working_directory.
+                                        converter_path = os.path.abspath( root )
+                                        break
+                    if converter_path:
+                        break
+            if not display_path:
+                # If any of the <datatype> tag sets contain <display> tags, set the display_path
+                # if it is not already set.  This requires developers to place all display acpplications
+                # in the same subdirectory within the repository hierarchy.
+                for display_app in elem.findall( 'display' ):
+                    display_config = display_app.get( 'file', None )
+                    if display_config:
+                        display_config_file_name = basic_util.strip_path( display_config )
+                        for root, dirs, files in os.walk( relative_install_dir ):
+                            if root.find( '.hg' ) < 0:
+                                for name in files:
+                                    if name == display_config_file_name:
+                                        # The value of display_path must be absolute due to job_working_directory.
+                                        display_path = os.path.abspath( root )
+                                        break
+                    if display_path:
+                        break
+            if converter_path and display_path:
+                break
+        return converter_path, display_path
+
+    def load_installed_datatype_converters( self, installed_repository_dict, deactivate=False ):
+        """Load or deactivate proprietary datatype converters."""
+        self.app.datatypes_registry.load_datatype_converters( self.app.toolbox,
+                                                              installed_repository_dict=installed_repository_dict,
+                                                              deactivate=deactivate )
+
+    def load_installed_datatypes( self, repository, relative_install_dir, deactivate=False ):
+        """
+        Load proprietary datatypes and return information needed for loading custom
+        datatypes converters and display applications later.
+        """
+        metadata = repository.metadata
+        repository_dict = None
+        datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, relative_install_dir )
+        if datatypes_config:
+            converter_path, display_path = \
+                self.alter_config_and_load_prorietary_datatypes( datatypes_config,
+                                                                 relative_install_dir,
+                                                                 deactivate=deactivate )
+            if converter_path or display_path:
+                # Create a dictionary of tool shed repository related information.
+                repository_dict = \
+                    self.create_repository_dict_for_proprietary_datatypes( tool_shed=repository.tool_shed,
+                                                                           name=repository.name,
+                                                                           owner=repository.owner,
+                                                                           installed_changeset_revision=repository.installed_changeset_revision,
+                                                                           tool_dicts=metadata.get( 'tools', [] ),
+                                                                           converter_path=converter_path,
+                                                                           display_path=display_path )
+        return repository_dict
+
+    def load_installed_display_applications( self, installed_repository_dict, deactivate=False ):
+        """Load or deactivate custom datatype display applications."""
+        self.app.datatypes_registry.load_display_applications( self.app, installed_repository_dict=installed_repository_dict,
+                                                               deactivate=deactivate )
diff --git a/lib/tool_shed/galaxy_install/dependency_display.py b/lib/tool_shed/galaxy_install/dependency_display.py
new file mode 100644
index 0000000..9978224
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/dependency_display.py
@@ -0,0 +1,619 @@
+import json
+import logging
+import os
+import threading
+
+from galaxy import util
+from tool_shed.galaxy_install.utility_containers import GalaxyUtilityContainerManager
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import readme_util
+from tool_shed.util import repository_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.utility_containers import utility_container_manager
+
+log = logging.getLogger( __name__ )
+
+
+class DependencyDisplayer( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def add_installation_directories_to_tool_dependencies( self, tool_dependencies ):
+        """
+        Determine the path to the installation directory for each of the received
+        tool dependencies.  This path will be displayed within the tool dependencies
+        container on the select_tool_panel_section or reselect_tool_panel_section
+        pages when installing or reinstalling repositories that contain tools with
+        the defined tool dependencies.  The list of tool dependencies may be associated
+        with more than a single repository.
+        """
+        for dependency_key, requirements_dict in tool_dependencies.items():
+            if dependency_key in [ 'set_environment' ]:
+                continue
+            repository_name = requirements_dict.get( 'repository_name', 'unknown' )
+            repository_owner = requirements_dict.get( 'repository_owner', 'unknown' )
+            changeset_revision = requirements_dict.get( 'changeset_revision', 'unknown' )
+            dependency_name = requirements_dict[ 'name' ]
+            version = requirements_dict[ 'version' ]
+            if self.app.config.tool_dependency_dir:
+                root_dir = self.app.config.tool_dependency_dir
+            else:
+                root_dir = '<set your tool_dependency_dir in your Galaxy configuration file>'
+            install_dir = os.path.join( root_dir,
+                                        dependency_name,
+                                        version,
+                                        repository_owner,
+                                        repository_name,
+                                        changeset_revision )
+            requirements_dict[ 'install_dir' ] = install_dir
+            tool_dependencies[ dependency_key ] = requirements_dict
+        return tool_dependencies
+
+    def generate_message_for_invalid_repository_dependencies( self, metadata_dict, error_from_tuple=False ):
+        """
+        Get or generate and return an error message associated with an invalid repository dependency.
+        """
+        message = ''
+        if metadata_dict:
+            if error_from_tuple:
+                # Return the error messages associated with a set of one or more invalid repository
+                # dependency tuples.
+                invalid_repository_dependencies_dict = metadata_dict.get( 'invalid_repository_dependencies', None )
+                if invalid_repository_dependencies_dict is not None:
+                    invalid_repository_dependencies = \
+                        invalid_repository_dependencies_dict.get( 'invalid_repository_dependencies', [] )
+                    for repository_dependency_tup in invalid_repository_dependencies:
+                        toolshed, name, owner, changeset_revision, \
+                            prior_installation_required, \
+                            only_if_compiling_contained_td, error = \
+                            common_util.parse_repository_dependency_tuple( repository_dependency_tup, contains_error=True )
+                        if error:
+                            message += '%s  ' % str( error )
+            else:
+                # The complete dependency hierarchy could not be determined for a repository being installed into
+                # Galaxy.  This is likely due to invalid repository dependency definitions, so we'll get them from
+                # the metadata and parse them for display in an error message.  This will hopefully communicate the
+                # problem to the user in such a way that a resolution can be determined.
+                message += 'The complete dependency hierarchy could not be determined for this repository, so no required '
+                message += 'repositories will not be installed.  This is likely due to invalid repository dependency definitions.  '
+                repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', None )
+                if repository_dependencies_dict is not None:
+                    rd_tups = repository_dependencies_dict.get( 'repository_dependencies', None )
+                    if rd_tups is not None:
+                        message += 'Here are the attributes of the dependencies defined for this repository to help determine the '
+                        message += 'cause of this problem.<br/>'
+                        message += '<table cellpadding="2" cellspacing="2">'
+                        message += '<tr><th>Tool shed</th><th>Repository name</th><th>Owner</th><th>Changeset revision</th>'
+                        message += '<th>Prior install required</th></tr>'
+                        for rd_tup in rd_tups:
+                            tool_shed, name, owner, changeset_revision, pir, oicct = \
+                                common_util.parse_repository_dependency_tuple( rd_tup )
+                            if util.asbool( pir ):
+                                pir_str = 'True'
+                            else:
+                                pir_str = ''
+                            message += '<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % \
+                                ( tool_shed, name, owner, changeset_revision, pir_str )
+                        message += '</table>'
+        return message
+
+    def generate_message_for_invalid_tool_dependencies( self, metadata_dict ):
+        """
+        Tool dependency definitions can only be invalid if they include a definition for a complex
+        repository dependency and the repository dependency definition is invalid.  This method
+        retrieves the error message associated with the invalid tool dependency for display in the
+        caller.
+        """
+        message = ''
+        if metadata_dict:
+            invalid_tool_dependencies = metadata_dict.get( 'invalid_tool_dependencies', None )
+            if invalid_tool_dependencies:
+                for td_key, requirement_dict in invalid_tool_dependencies.items():
+                    error = requirement_dict.get( 'error', None )
+                    if error:
+                        message = '%s  ' % str( error )
+        return message
+
+    def generate_message_for_orphan_tool_dependencies( self, repository, metadata_dict ):
+        """
+        The designation of a ToolDependency into the "orphan" category has evolved over time,
+        and is significantly restricted since the introduction of the TOOL_DEPENDENCY_DEFINITION
+        repository type.  This designation is still critical, however, in that it handles the
+        case where a repository contains both tools and a tool_dependencies.xml file, but the
+        definition in the tool_dependencies.xml file is in no way related to anything defined
+        by any of the contained tool's requirements tag sets.  This is important in that it is
+        often a result of a typo (e.g., dependency name or version) that differs between the tool
+        dependency definition within the tool_dependencies.xml file and what is defined in the
+        tool config's <requirements> tag sets.  In these cases, the user should be presented with
+        a warning message, and this warning message is is in fact displayed if the following
+        is_orphan attribute is True.  This is tricky because in some cases it may be intentional,
+        and tool dependencies that are categorized as "orphan" are in fact valid.
+        """
+        has_orphan_package_dependencies = False
+        has_orphan_set_environment_dependencies = False
+        message = ''
+        package_orphans_str = ''
+        set_environment_orphans_str = ''
+        # Tool dependencies are categorized as orphan only if the repository contains tools.
+        if metadata_dict:
+            tools = metadata_dict.get( 'tools', [] )
+            tool_dependencies = metadata_dict.get( 'tool_dependencies', {} )
+            # The use of the orphan_tool_dependencies category in metadata has been deprecated,
+            # but we still need to check in case the metadata is out of date.
+            orphan_tool_dependencies = metadata_dict.get( 'orphan_tool_dependencies', {} )
+            # Updating should cause no problems here since a tool dependency cannot be included
+            # in both dictionaries.
+            tool_dependencies.update( orphan_tool_dependencies )
+            if tool_dependencies and tools:
+                for td_key, requirements_dict in tool_dependencies.items():
+                    if td_key == 'set_environment':
+                        # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
+                        for env_requirements_dict in requirements_dict:
+                            name = env_requirements_dict[ 'name' ]
+                            type = env_requirements_dict[ 'type' ]
+                            if self.tool_dependency_is_orphan( type, name, None, tools ):
+                                if not has_orphan_set_environment_dependencies:
+                                    has_orphan_set_environment_dependencies = True
+                                set_environment_orphans_str += "<b>* name:</b> %s, <b>type:</b> %s<br/>" % \
+                                    ( str( name ), str( type ) )
+                    else:
+                        # "R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
+                        name = requirements_dict[ 'name' ]
+                        type = requirements_dict[ 'type' ]
+                        version = requirements_dict[ 'version' ]
+                        if self.tool_dependency_is_orphan( type, name, version, tools ):
+                            if not has_orphan_package_dependencies:
+                                has_orphan_package_dependencies = True
+                            package_orphans_str += "<b>* name:</b> %s, <b>type:</b> %s, <b>version:</b> %s<br/>" % \
+                                ( str( name ), str( type ), str( version ) )
+        if has_orphan_package_dependencies:
+            message += "The settings for <b>name</b>, <b>version</b> and <b>type</b> from a "
+            message += "contained tool configuration file's <b>requirement</b> tag does not match "
+            message += "the information for the following tool dependency definitions in the "
+            message += "<b>tool_dependencies.xml</b> file, so these tool dependencies have no "
+            message += "relationship with any tools within this repository.<br/>"
+            message += package_orphans_str
+        if has_orphan_set_environment_dependencies:
+            message += "The settings for <b>name</b> and <b>type</b> from a contained tool "
+            message += "configuration file's <b>requirement</b> tag does not match the information "
+            message += "for the following tool dependency definitions in the <b>tool_dependencies.xml</b> "
+            message += "file, so these tool dependencies have no relationship with any tools within "
+            message += "this repository.<br/>"
+            message += set_environment_orphans_str
+        return message
+
+    def get_installed_and_missing_tool_dependencies_for_installed_repository( self, repository, all_tool_dependencies ):
+        """
+        Return the lists of installed tool dependencies and missing tool dependencies for a Tool Shed
+        repository that has been installed into Galaxy.
+        """
+        if all_tool_dependencies:
+            tool_dependencies = {}
+            missing_tool_dependencies = {}
+            for td_key, val in all_tool_dependencies.items():
+                if td_key in [ 'set_environment' ]:
+                    for index, td_info_dict in enumerate( val ):
+                        name = td_info_dict[ 'name' ]
+                        version = None
+                        type = td_info_dict[ 'type' ]
+                        tool_dependency = tool_dependency_util.get_tool_dependency_by_name_type_repository( self.app,
+                                                                                                            repository,
+                                                                                                            name,
+                                                                                                            type )
+                        if tool_dependency:
+                            td_info_dict[ 'repository_id' ] = repository.id
+                            td_info_dict[ 'tool_dependency_id' ] = tool_dependency.id
+                            if tool_dependency.status:
+                                tool_dependency_status = str( tool_dependency.status )
+                            else:
+                                tool_dependency_status = 'Never installed'
+                            td_info_dict[ 'status' ] = tool_dependency_status
+                            val[ index ] = td_info_dict
+                            if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED:
+                                tool_dependencies[ td_key ] = val
+                            else:
+                                missing_tool_dependencies[ td_key ] = val
+                else:
+                    name = val[ 'name' ]
+                    version = val[ 'version' ]
+                    type = val[ 'type' ]
+                    tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type_repository( self.app,
+                                                                                                                repository,
+                                                                                                                name,
+                                                                                                                version,
+                                                                                                                type )
+                    if tool_dependency:
+                        val[ 'repository_id' ] = repository.id
+                        val[ 'tool_dependency_id' ] = tool_dependency.id
+                        if tool_dependency.status:
+                            tool_dependency_status = str( tool_dependency.status )
+                        else:
+                            tool_dependency_status = 'Never installed'
+                        val[ 'status' ] = tool_dependency_status
+                        if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED:
+                            tool_dependencies[ td_key ] = val
+                        else:
+                            missing_tool_dependencies[ td_key ] = val
+        else:
+            tool_dependencies = None
+            missing_tool_dependencies = None
+        return tool_dependencies, missing_tool_dependencies
+
+    def merge_containers_dicts_for_new_install( self, containers_dicts ):
+        """
+        When installing one or more tool shed repositories for the first time, the received list of
+        containers_dicts contains a containers_dict for each repository being installed.  Since the
+        repositories are being installed for the first time, all entries are None except the repository
+        dependencies and tool dependencies.  The entries for missing dependencies are all None since
+        they have previously been merged into the installed dependencies.  This method will merge the
+        dependencies entries into a single container and return it for display.
+        """
+        new_containers_dict = dict( readme_files=None,
+                                    datatypes=None,
+                                    missing_repository_dependencies=None,
+                                    repository_dependencies=None,
+                                    missing_tool_dependencies=None,
+                                    tool_dependencies=None,
+                                    invalid_tools=None,
+                                    valid_tools=None,
+                                    workflows=None )
+        if containers_dicts:
+            lock = threading.Lock()
+            lock.acquire( True )
+            try:
+                repository_dependencies_root_folder = None
+                tool_dependencies_root_folder = None
+                # Use a unique folder id (hopefully the following is).
+                folder_id = 867
+                for old_container_dict in containers_dicts:
+                    # Merge repository_dependencies.
+                    old_container_repository_dependencies_root = old_container_dict[ 'repository_dependencies' ]
+                    if old_container_repository_dependencies_root:
+                        if repository_dependencies_root_folder is None:
+                            repository_dependencies_root_folder = utility_container_manager.Folder( id=folder_id,
+                                                                                                    key='root',
+                                                                                                    label='root',
+                                                                                                    parent=None )
+                            folder_id += 1
+                            repository_dependencies_folder = utility_container_manager.Folder( id=folder_id,
+                                                                                               key='merged',
+                                                                                               label='Repository dependencies',
+                                                                                               parent=repository_dependencies_root_folder )
+                            folder_id += 1
+                        # The old_container_repository_dependencies_root will be a root folder containing a single sub_folder.
+                        old_container_repository_dependencies_folder = old_container_repository_dependencies_root.folders[ 0 ]
+                        # Change the folder id so it won't confict with others being merged.
+                        old_container_repository_dependencies_folder.id = folder_id
+                        folder_id += 1
+                        repository_components_tuple = \
+                            container_util.get_components_from_key( old_container_repository_dependencies_folder.key )
+                        components_list = repository_util.extract_components_from_tuple( repository_components_tuple )
+                        name = components_list[ 1 ]
+                        # Generate the label by retrieving the repository name.
+                        old_container_repository_dependencies_folder.label = str( name )
+                        repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder )
+                    # Merge tool_dependencies.
+                    old_container_tool_dependencies_root = old_container_dict[ 'tool_dependencies' ]
+                    if old_container_tool_dependencies_root:
+                        if tool_dependencies_root_folder is None:
+                            tool_dependencies_root_folder = utility_container_manager.Folder( id=folder_id,
+                                                                                              key='root',
+                                                                                              label='root',
+                                                                                              parent=None )
+                            folder_id += 1
+                            tool_dependencies_folder = utility_container_manager.Folder( id=folder_id,
+                                                                                         key='merged',
+                                                                                         label='Tool dependencies',
+                                                                                         parent=tool_dependencies_root_folder )
+                            folder_id += 1
+                        else:
+                            td_list = [ td.listify for td in tool_dependencies_folder.tool_dependencies ]
+                            # The old_container_tool_dependencies_root will be a root folder containing a single sub_folder.
+                            old_container_tool_dependencies_folder = old_container_tool_dependencies_root.folders[ 0 ]
+                            for td in old_container_tool_dependencies_folder.tool_dependencies:
+                                if td.listify not in td_list:
+                                    tool_dependencies_folder.tool_dependencies.append( td )
+                if repository_dependencies_root_folder:
+                    repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
+                    new_containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+                if tool_dependencies_root_folder:
+                    tool_dependencies_root_folder.folders.append( tool_dependencies_folder )
+                    new_containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+            except Exception as e:
+                log.debug( "Exception in merge_containers_dicts_for_new_install: %s" % str( e ) )
+            finally:
+                lock.release()
+        return new_containers_dict
+
+    def merge_missing_repository_dependencies_to_installed_container( self, containers_dict ):
+        """
+        Merge the list of missing repository dependencies into the list of installed
+        repository dependencies.
+        """
+        missing_rd_container_root = containers_dict.get( 'missing_repository_dependencies', None )
+        if missing_rd_container_root:
+            # The missing_rd_container_root will be a root folder containing a single sub_folder.
+            missing_rd_container = missing_rd_container_root.folders[ 0 ]
+            installed_rd_container_root = containers_dict.get( 'repository_dependencies', None )
+            # The installed_rd_container_root will be a root folder containing a single sub_folder.
+            if installed_rd_container_root:
+                installed_rd_container = installed_rd_container_root.folders[ 0 ]
+                installed_rd_container.label = 'Repository dependencies'
+                for index, rd in enumerate( missing_rd_container.repository_dependencies ):
+                    # Skip the header row.
+                    if index == 0:
+                        continue
+                    installed_rd_container.repository_dependencies.append( rd )
+                installed_rd_container_root.folders = [ installed_rd_container ]
+                containers_dict[ 'repository_dependencies' ] = installed_rd_container_root
+            else:
+                # Change the folder label from 'Missing repository dependencies' to be
+                # 'Repository dependencies' for display.
+                root_container = containers_dict[ 'missing_repository_dependencies' ]
+                for sub_container in root_container.folders:
+                    # There should only be 1 sub-folder.
+                    sub_container.label = 'Repository dependencies'
+                containers_dict[ 'repository_dependencies' ] = root_container
+        containers_dict[ 'missing_repository_dependencies' ] = None
+        return containers_dict
+
+    def merge_missing_tool_dependencies_to_installed_container( self, containers_dict ):
+        """
+        Merge the list of missing tool dependencies into the list of installed tool
+        dependencies.
+        """
+        missing_td_container_root = containers_dict.get( 'missing_tool_dependencies', None )
+        if missing_td_container_root:
+            # The missing_td_container_root will be a root folder containing a single sub_folder.
+            missing_td_container = missing_td_container_root.folders[ 0 ]
+            installed_td_container_root = containers_dict.get( 'tool_dependencies', None )
+            # The installed_td_container_root will be a root folder containing a single sub_folder.
+            if installed_td_container_root:
+                installed_td_container = installed_td_container_root.folders[ 0 ]
+                installed_td_container.label = 'Tool dependencies'
+                for index, td in enumerate( missing_td_container.tool_dependencies ):
+                    # Skip the header row.
+                    if index == 0:
+                        continue
+                    installed_td_container.tool_dependencies.append( td )
+                installed_td_container_root.folders = [ installed_td_container ]
+                containers_dict[ 'tool_dependencies' ] = installed_td_container_root
+            else:
+                # Change the folder label from 'Missing tool dependencies' to be
+                # 'Tool dependencies' for display.
+                root_container = containers_dict[ 'missing_tool_dependencies' ]
+                for sub_container in root_container.folders:
+                    # There should only be 1 subfolder.
+                    sub_container.label = 'Tool dependencies'
+                containers_dict[ 'tool_dependencies' ] = root_container
+        containers_dict[ 'missing_tool_dependencies' ] = None
+        return containers_dict
+
+    def populate_containers_dict_for_new_install( self, tool_shed_url, tool_path, readme_files_dict,
+                                                  installed_repository_dependencies, missing_repository_dependencies,
+                                                  installed_tool_dependencies, missing_tool_dependencies,
+                                                  updating=False ):
+        """
+        Return the populated containers for a repository being installed for the first time
+        or for an installed repository that is being updated and the updates include newly
+        defined repository (and possibly tool) dependencies.
+        """
+        installed_tool_dependencies, missing_tool_dependencies = \
+            self.populate_tool_dependencies_dicts( tool_shed_url=tool_shed_url,
+                                                   tool_path=tool_path,
+                                                   repository_installed_tool_dependencies=installed_tool_dependencies,
+                                                   repository_missing_tool_dependencies=missing_tool_dependencies,
+                                                   required_repo_info_dicts=None )
+        # Most of the repository contents are set to None since we don't yet know what they are.
+        gucm = GalaxyUtilityContainerManager( self.app )
+        containers_dict = gucm.build_repository_containers( repository=None,
+                                                            datatypes=None,
+                                                            invalid_tools=None,
+                                                            missing_repository_dependencies=missing_repository_dependencies,
+                                                            missing_tool_dependencies=missing_tool_dependencies,
+                                                            readme_files_dict=readme_files_dict,
+                                                            repository_dependencies=installed_repository_dependencies,
+                                                            tool_dependencies=installed_tool_dependencies,
+                                                            valid_tools=None,
+                                                            workflows=None,
+                                                            valid_data_managers=None,
+                                                            invalid_data_managers=None,
+                                                            data_managers_errors=None,
+                                                            new_install=True,
+                                                            reinstalling=False )
+        if not updating:
+            # If we installing a new repository and not updaing an installed repository, we can merge
+            # the missing_repository_dependencies container contents to the installed_repository_dependencies
+            # container.  When updating an installed repository, merging will result in losing newly defined
+            # dependencies included in the updates.
+            containers_dict = self.merge_missing_repository_dependencies_to_installed_container( containers_dict )
+            # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
+            containers_dict = self.merge_missing_tool_dependencies_to_installed_container( containers_dict )
+        return containers_dict
+
+    def populate_containers_dict_from_repository_metadata( self, tool_shed_url, tool_path, repository, reinstalling=False,
+                                                           required_repo_info_dicts=None ):
+        """
+        Retrieve necessary information from the received repository's metadata to populate the
+        containers_dict for display.  This method is called only from Galaxy (not the tool shed)
+        when displaying repository dependencies for installed repositories and when displaying
+        them for uninstalled repositories that are being reinstalled.
+        """
+        metadata = repository.metadata
+        if metadata:
+            # Handle proprietary datatypes.
+            datatypes = metadata.get( 'datatypes', None )
+            # Handle invalid tools.
+            invalid_tools = metadata.get( 'invalid_tools', None )
+            # Handle README files.
+            if repository.has_readme_files:
+                if reinstalling or repository.status not in \
+                    [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED,
+                      self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+                    # Since we're reinstalling, we need to send a request to the tool shed to get the README files.
+                    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
+                    params = dict( name=str( repository.name ),
+                                   owner=str( repository.owner ),
+                                   changeset_revision=str( repository.installed_changeset_revision ) )
+                    pathspec = [ 'repository', 'get_readme_files' ]
+                    raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+                    readme_files_dict = json.loads( raw_text )
+                else:
+                    readme_files_dict = readme_util.build_readme_files_dict( self.app,
+                                                                             repository,
+                                                                             repository.changeset_revision,
+                                                                             repository.metadata, tool_path )
+            else:
+                readme_files_dict = None
+            # Handle repository dependencies.
+            installed_repository_dependencies, missing_repository_dependencies = \
+                self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository )
+            # Handle the current repository's tool dependencies.
+            repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
+            # Make sure to display missing tool dependencies as well.
+            repository_invalid_tool_dependencies = metadata.get( 'invalid_tool_dependencies', None )
+            if repository_invalid_tool_dependencies is not None:
+                if repository_tool_dependencies is None:
+                    repository_tool_dependencies = {}
+                repository_tool_dependencies.update( repository_invalid_tool_dependencies )
+            repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
+                self.get_installed_and_missing_tool_dependencies_for_installed_repository( repository,
+                                                                                           repository_tool_dependencies )
+            if reinstalling:
+                installed_tool_dependencies, missing_tool_dependencies = \
+                    self.populate_tool_dependencies_dicts( tool_shed_url,
+                                                           tool_path,
+                                                           repository_installed_tool_dependencies,
+                                                           repository_missing_tool_dependencies,
+                                                           required_repo_info_dicts )
+            else:
+                installed_tool_dependencies = repository_installed_tool_dependencies
+                missing_tool_dependencies = repository_missing_tool_dependencies
+            # Handle valid tools.
+            valid_tools = metadata.get( 'tools', None )
+            # Handle workflows.
+            workflows = metadata.get( 'workflows', None )
+            # Handle Data Managers
+            valid_data_managers = None
+            invalid_data_managers = None
+            data_managers_errors = None
+            if 'data_manager' in metadata:
+                valid_data_managers = metadata['data_manager'].get( 'data_managers', None )
+                invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
+                data_managers_errors = metadata['data_manager'].get( 'messages', None )
+            gucm = GalaxyUtilityContainerManager( self.app )
+            containers_dict = gucm.build_repository_containers( repository=repository,
+                                                                datatypes=datatypes,
+                                                                invalid_tools=invalid_tools,
+                                                                missing_repository_dependencies=missing_repository_dependencies,
+                                                                missing_tool_dependencies=missing_tool_dependencies,
+                                                                readme_files_dict=readme_files_dict,
+                                                                repository_dependencies=installed_repository_dependencies,
+                                                                tool_dependencies=installed_tool_dependencies,
+                                                                valid_tools=valid_tools,
+                                                                workflows=workflows,
+                                                                valid_data_managers=valid_data_managers,
+                                                                invalid_data_managers=invalid_data_managers,
+                                                                data_managers_errors=data_managers_errors,
+                                                                new_install=False,
+                                                                reinstalling=reinstalling )
+        else:
+            containers_dict = dict( datatypes=None,
+                                    invalid_tools=None,
+                                    readme_files_dict=None,
+                                    repository_dependencies=None,
+                                    tool_dependencies=None,
+                                    valid_tools=None,
+                                    workflows=None )
+        return containers_dict
+
+    def populate_tool_dependencies_dicts( self, tool_shed_url, tool_path, repository_installed_tool_dependencies,
+                                          repository_missing_tool_dependencies, required_repo_info_dicts ):
+        """
+        Return the populated installed_tool_dependencies and missing_tool_dependencies dictionaries
+        for all repositories defined by entries in the received required_repo_info_dicts.
+        """
+        installed_tool_dependencies = None
+        missing_tool_dependencies = None
+        if repository_installed_tool_dependencies is None:
+            repository_installed_tool_dependencies = {}
+        else:
+            # Add the install_dir attribute to the tool_dependencies.
+            repository_installed_tool_dependencies = \
+                self.add_installation_directories_to_tool_dependencies( repository_installed_tool_dependencies )
+        if repository_missing_tool_dependencies is None:
+            repository_missing_tool_dependencies = {}
+        else:
+            # Add the install_dir attribute to the tool_dependencies.
+            repository_missing_tool_dependencies = \
+                self.add_installation_directories_to_tool_dependencies( repository_missing_tool_dependencies )
+        if required_repo_info_dicts:
+            # Handle the tool dependencies defined for each of the repository's repository dependencies.
+            for rid in required_repo_info_dicts:
+                for name, repo_info_tuple in rid.items():
+                    description, repository_clone_url, changeset_revision, \
+                        ctx_rev, repository_owner, repository_dependencies, \
+                        tool_dependencies = \
+                        repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+                    if tool_dependencies:
+                        # Add the install_dir attribute to the tool_dependencies.
+                        tool_dependencies = self.add_installation_directories_to_tool_dependencies( tool_dependencies )
+                        # The required_repository may have been installed with a different changeset revision.
+                        required_repository, installed_changeset_revision = \
+                            repository_util.repository_was_previously_installed( self.app,
+                                                                                 tool_shed_url,
+                                                                                 name,
+                                                                                 repo_info_tuple,
+                                                                                 from_tip=False )
+                        if required_repository:
+                            required_repository_installed_tool_dependencies, required_repository_missing_tool_dependencies = \
+                                self.get_installed_and_missing_tool_dependencies_for_installed_repository( required_repository,
+                                                                                                           tool_dependencies )
+                            if required_repository_installed_tool_dependencies:
+                                # Add the install_dir attribute to the tool_dependencies.
+                                required_repository_installed_tool_dependencies = \
+                                    self.add_installation_directories_to_tool_dependencies( required_repository_installed_tool_dependencies )
+                                for td_key, td_dict in required_repository_installed_tool_dependencies.items():
+                                    if td_key not in repository_installed_tool_dependencies:
+                                        repository_installed_tool_dependencies[ td_key ] = td_dict
+                            if required_repository_missing_tool_dependencies:
+                                # Add the install_dir attribute to the tool_dependencies.
+                                required_repository_missing_tool_dependencies = \
+                                    self.add_installation_directories_to_tool_dependencies( required_repository_missing_tool_dependencies )
+                                for td_key, td_dict in required_repository_missing_tool_dependencies.items():
+                                    if td_key not in repository_missing_tool_dependencies:
+                                        repository_missing_tool_dependencies[ td_key ] = td_dict
+        if repository_installed_tool_dependencies:
+            installed_tool_dependencies = repository_installed_tool_dependencies
+        if repository_missing_tool_dependencies:
+            missing_tool_dependencies = repository_missing_tool_dependencies
+        return installed_tool_dependencies, missing_tool_dependencies
+
+    def tool_dependency_is_orphan( self, type, name, version, tools ):
+        """
+        Determine if the combination of the received type, name and version is defined in the <requirement>
+        tag for at least one tool in the received list of tools.  If not, the tool dependency defined by the
+        combination is considered an orphan in its repository in the tool shed.
+        """
+        if type == 'package':
+            if name and version:
+                for tool_dict in tools:
+                    requirements = tool_dict.get( 'requirements', [] )
+                    for requirement_dict in requirements:
+                        req_name = requirement_dict.get( 'name', None )
+                        req_version = requirement_dict.get( 'version', None )
+                        req_type = requirement_dict.get( 'type', None )
+                        if req_name == name and req_version == version and req_type == type:
+                            return False
+        elif type == 'set_environment':
+            if name:
+                for tool_dict in tools:
+                    requirements = tool_dict.get( 'requirements', [] )
+                    for requirement_dict in requirements:
+                        req_name = requirement_dict.get( 'name', None )
+                        req_type = requirement_dict.get( 'type', None )
+                        if req_name == name and req_type == type:
+                            return False
+        return True
diff --git a/lib/tool_shed/galaxy_install/grids/__init__.py b/lib/tool_shed/galaxy_install/grids/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
new file mode 100644
index 0000000..f3824bb
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -0,0 +1,431 @@
+import logging
+
+from sqlalchemy import false, or_
+
+from galaxy import util
+from galaxy.model import tool_shed_install
+from galaxy.web import url_for
+from galaxy.web.framework.helpers import grids, iff
+from tool_shed.util import repository_util, tool_dependency_util
+
+log = logging.getLogger( __name__ )
+
+
+def generate_deprecated_repository_img_str( include_mouse_over=False ):
+    if include_mouse_over:
+        deprecated_tip_str = 'class="icon-button" title="This repository is deprecated in the Tool Shed"'
+    else:
+        deprecated_tip_str = ''
+    return '<img src="%s/images/icon_error_sml.gif" %s/>' % ( url_for( '/static' ), deprecated_tip_str )
+
+
+def generate_includes_workflows_img_str( include_mouse_over=False ):
+    if include_mouse_over:
+        deprecated_tip_str = 'class="icon-button" title="This repository contains exported workflows"'
+    else:
+        deprecated_tip_str = ''
+    return '<img src="%s/images/fugue/gear.png" %s/>' % ( url_for( '/static' ), deprecated_tip_str )
+
+
+def generate_latest_revision_img_str( include_mouse_over=False ):
+    if include_mouse_over:
+        latest_revision_tip_str = 'class="icon-button" title="This is the latest installable revision of this repository"'
+    else:
+        latest_revision_tip_str = ''
+    return '<img src="%s/june_2007_style/blue/ok_small.png" %s/>' % ( url_for( '/static' ), latest_revision_tip_str )
+
+
+def generate_revision_updates_img_str( include_mouse_over=False ):
+    if include_mouse_over:
+        revision_updates_tip_str = 'class="icon-button" title="Updates are available in the Tool Shed for this revision"'
+    else:
+        revision_updates_tip_str = ''
+    return '<img src="%s/images/icon_warning_sml.gif" %s/>' % ( url_for( '/static' ), revision_updates_tip_str )
+
+
+def generate_revision_upgrades_img_str( include_mouse_over=False ):
+    if include_mouse_over:
+        revision_upgrades_tip_str = 'class="icon-button" title="A newer installable revision is available for this repository"'
+    else:
+        revision_upgrades_tip_str = ''
+    return '<img src="%s/images/up.gif" %s/>' % ( url_for( '/static' ), revision_upgrades_tip_str )
+
+
+def generate_unknown_img_str( include_mouse_over=False ):
+    if include_mouse_over:
+        unknown_tip_str = 'class="icon-button" title="Unable to get information from the Tool Shed"'
+    else:
+        unknown_tip_str = ''
+    return '<img src="%s/june_2007_style/blue/question-octagon-frame.png" %s/>' % ( url_for( '/static' ), unknown_tip_str )
+
+
+class InstalledRepositoryGrid( grids.Grid ):
+
+    class ToolShedStatusColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            if tool_shed_repository.tool_shed_status:
+                tool_shed_status_str = ''
+                if tool_shed_repository.is_deprecated_in_tool_shed:
+                    tool_shed_status_str += generate_deprecated_repository_img_str( include_mouse_over=True )
+                if tool_shed_repository.is_latest_installable_revision:
+                    tool_shed_status_str += generate_latest_revision_img_str( include_mouse_over=True )
+                if tool_shed_repository.revision_update_available:
+                    tool_shed_status_str += generate_revision_updates_img_str( include_mouse_over=True )
+                if tool_shed_repository.upgrade_available:
+                    tool_shed_status_str += generate_revision_upgrades_img_str( include_mouse_over=True )
+                if tool_shed_repository.includes_workflows:
+                    tool_shed_status_str += generate_includes_workflows_img_str( include_mouse_over=True )
+            else:
+                tool_shed_status_str = generate_unknown_img_str( include_mouse_over=True )
+            return tool_shed_status_str
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return str( tool_shed_repository.name )
+
+    class DescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return util.unicodify( tool_shed_repository.description )
+
+    class OwnerColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return str( tool_shed_repository.owner )
+
+    class RevisionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return str( tool_shed_repository.changeset_revision )
+
+    class StatusColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return repository_util.get_tool_shed_repository_status_label( trans.app, tool_shed_repository )
+
+    class ToolShedColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return tool_shed_repository.tool_shed
+
+    class DeletedColumn( grids.DeletedColumn ):
+
+            def get_accepted_filters( self ):
+                """ Returns a list of accepted filters for this column. """
+                accepted_filter_labels_and_vals = { "Active": "False", "Deactivated or uninstalled": "True", "All": "All" }
+                accepted_filters = []
+                for label, val in accepted_filter_labels_and_vals.items():
+                    args = { self.key: val }
+                    accepted_filters.append( grids.GridColumnFilter( label, args) )
+                return accepted_filters
+
+    # Grid definition
+    title = "Installed tool shed repositories"
+    model_class = tool_shed_install.ToolShedRepository
+    template = '/admin/tool_shed_repository/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        ToolShedStatusColumn( label="" ),
+        NameColumn( label="Name",
+                    key="name",
+                    link=( lambda item: iff( item.status in [ tool_shed_install.ToolShedRepository.installation_status.CLONING ],
+                                             None,
+                                             dict( operation="manage_repository", id=item.id ) ) ),
+                    attach_popup=True ),
+        DescriptionColumn( label="Description" ),
+        OwnerColumn( label="Owner" ),
+        RevisionColumn( label="Revision" ),
+        StatusColumn( label="Installation Status",
+                      filterable="advanced" ),
+        ToolShedColumn( label="Tool shed" ),
+        # Columns that are valid for filtering but are not visible.
+        DeletedColumn( label="Status",
+                       key="deleted",
+                       visible=False,
+                       filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[ 1 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( label="Update tool shed status",
+                          url_args=dict( controller='admin_toolshed',
+                                         action='update_tool_shed_status_for_installed_repository',
+                                         all_installed_repositories=True ),
+                         inbound=False )
+    ]
+    operations = [ grids.GridOperation( label="Update tool shed status",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='update tool shed status' ) ),
+                   grids.GridOperation( label="Get updates",
+                                        condition=( lambda item:
+                                                    not item.deleted and
+                                                    item.revision_update_available and
+                                                    item.status not in [
+                                                        tool_shed_install.ToolShedRepository.installation_status.ERROR,
+                                                        tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
+                                        allow_multiple=False,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='get updates' ) ),
+                   grids.GridOperation( label="Install latest revision",
+                                        condition=( lambda item: item.upgrade_available ),
+                                        allow_multiple=False,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='install latest revision' ) ),
+                   grids.GridOperation( label="Install",
+                                        condition=( lambda item:
+                                                    not item.deleted and
+                                                    item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ),
+                                        allow_multiple=False,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='manage_repository',
+                                                       operation='install' ) ),
+                   grids.GridOperation( label="Deactivate or uninstall",
+                                        condition=( lambda item:
+                                                    not item.deleted and
+                                                    item.status != tool_shed_install.ToolShedRepository.installation_status.NEW ),
+                                        allow_multiple=True,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='deactivate or uninstall' ) ),
+                   grids.GridOperation( label="Reset to install",
+                                        condition=( lambda item:
+                                                    ( item.status == tool_shed_install.ToolShedRepository.installation_status.ERROR ) ),
+                                        allow_multiple=False,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='reset to install' ) ),
+                   grids.GridOperation( label="Activate or reinstall",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=False,
+                                        target=None,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='activate or reinstall' ) ),
+                   grids.GridOperation( label="Purge",
+                                        condition=( lambda item: item.is_new ),
+                                        allow_multiple=False,
+                                        target=None,
+                                        url_args=dict( controller='admin_toolshed',
+                                                       action='browse_repositories',
+                                                       operation='purge' ) ) ]
+    standard_filters = []
+    default_filter = dict( deleted="False" )
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.install_model.context.query( self.model_class ) \
+                                          .order_by( self.model_class.table.c.tool_shed,
+                                                     self.model_class.table.c.name,
+                                                     self.model_class.table.c.owner,
+                                                     self.model_class.table.c.ctx_rev )
+
+    @property
+    def legend( self ):
+        legend_str = '%s  Updates are available in the Tool Shed for this revision<br/>' % generate_revision_updates_img_str()
+        legend_str += '%s  A newer installable revision is available for this repository<br/>' % generate_revision_upgrades_img_str()
+        legend_str += '%s  This is the latest installable revision of this repository<br/>' % generate_latest_revision_img_str()
+        legend_str += '%s  This repository is deprecated in the Tool Shed<br/>' % generate_deprecated_repository_img_str()
+        legend_str += '%s  This repository contains exported workflows<br/>' % generate_includes_workflows_img_str()
+        legend_str += '%s  Unable to get information from the Tool Shed<br/>' % generate_unknown_img_str()
+        return legend_str
+
+
+class RepositoryInstallationGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return tool_shed_repository.name
+
+    class DescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return tool_shed_repository.description
+
+    class OwnerColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return tool_shed_repository.owner
+
+    class RevisionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            return tool_shed_repository.changeset_revision
+
+    class StatusColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_shed_repository ):
+            status_label = tool_shed_repository.status
+            if tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING,
+                                                trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+                                                trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                                trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+                bgcolor = trans.install_model.ToolShedRepository.states.INSTALLING
+            elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+                                                 trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+                bgcolor = trans.install_model.ToolShedRepository.states.UNINSTALLED
+            elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR ]:
+                bgcolor = trans.install_model.ToolShedRepository.states.ERROR
+            elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+                bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+            elif tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+                if tool_shed_repository.missing_tool_dependencies or tool_shed_repository.missing_repository_dependencies:
+                    bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+                if tool_shed_repository.missing_tool_dependencies and not tool_shed_repository.missing_repository_dependencies:
+                    status_label = '%s, missing tool dependencies' % status_label
+                if tool_shed_repository.missing_repository_dependencies and not tool_shed_repository.missing_tool_dependencies:
+                    status_label = '%s, missing repository dependencies' % status_label
+                if tool_shed_repository.missing_tool_dependencies and tool_shed_repository.missing_repository_dependencies:
+                    status_label = '%s, missing both tool and repository dependencies' % status_label
+                if not tool_shed_repository.missing_tool_dependencies and not tool_shed_repository.missing_repository_dependencies:
+                    bgcolor = trans.install_model.ToolShedRepository.states.OK
+            else:
+                bgcolor = trans.install_model.ToolShedRepository.states.ERROR
+            rval = '<div class="count-box state-color-%s" id="RepositoryStatus-%s">%s</div>' % \
+                ( bgcolor, trans.security.encode_id( tool_shed_repository.id ), status_label )
+            return rval
+
+    title = "Monitor installing tool shed repositories"
+    template = "admin/tool_shed_repository/repository_installation_grid.mako"
+    model_class = tool_shed_install.ToolShedRepository
+    default_sort_key = "-create_time"
+    num_rows_per_page = 50
+    preserve_state = True
+    use_paging = False
+    columns = [
+        NameColumn( "Name",
+                    link=( lambda item: iff( item.status in
+                                             [ tool_shed_install.ToolShedRepository.installation_status.NEW,
+                                               tool_shed_install.ToolShedRepository.installation_status.CLONING,
+                                               tool_shed_install.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+                                               tool_shed_install.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+                                               tool_shed_install.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                               tool_shed_install.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES,
+                                               tool_shed_install.ToolShedRepository.installation_status.UNINSTALLED ],
+                                             None, dict( action="manage_repository", id=item.id ) ) ),
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                    filterable="advanced" ),
+        OwnerColumn( "Owner",
+                    filterable="advanced" ),
+        RevisionColumn( "Revision",
+                    filterable="advanced" ),
+        StatusColumn( "Installation Status",
+                      filterable="advanced",
+                      label_id_prefix="RepositoryStatus-" )
+    ]
+    operations = []
+
+    def build_initial_query( self, trans, **kwd ):
+        clause_list = []
+        tool_shed_repository_ids = util.listify( kwd.get( 'tool_shed_repository_ids', None ) )
+        if tool_shed_repository_ids:
+            for tool_shed_repository_id in tool_shed_repository_ids:
+                clause_list.append( self.model_class.table.c.id == trans.security.decode_id( tool_shed_repository_id ) )
+            if clause_list:
+                return trans.install_model.context.query( self.model_class ) \
+                                                  .filter( or_( *clause_list ) )
+        for tool_shed_repository in trans.install_model.context.query( self.model_class ) \
+                                                               .filter( self.model_class.table.c.deleted == false() ):
+            if tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+                                               trans.install_model.ToolShedRepository.installation_status.CLONING,
+                                               trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+                                               trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                               trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+                clause_list.append( self.model_class.table.c.id == tool_shed_repository.id )
+        if clause_list:
+            return trans.install_model.context.query( self.model_class ) \
+                                              .filter( or_( *clause_list ) )
+        return trans.install_model.context.query( self.model_class ) \
+                                          .filter( self.model_class.table.c.status == trans.install_model.ToolShedRepository.installation_status.NEW )
+
+    def apply_query_filter( self, trans, query, **kwd ):
+        tool_shed_repository_id = kwd.get( 'tool_shed_repository_id', None )
+        if tool_shed_repository_id:
+            return query.filter_by( tool_shed_repository_id=trans.security.decode_id( tool_shed_repository_id ) )
+        return query
+
+
+class ToolDependencyGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_dependency ):
+            return tool_dependency.name
+
+    class VersionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_dependency ):
+            return tool_dependency.version
+
+    class TypeColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_dependency ):
+            return tool_dependency.type
+
+    class StatusColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, tool_dependency ):
+            if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLING ]:
+                bgcolor = trans.install_model.ToolDependency.states.INSTALLING
+            elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                             trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                bgcolor = trans.install_model.ToolDependency.states.UNINSTALLED
+            elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]:
+                bgcolor = trans.install_model.ToolDependency.states.ERROR
+            elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
+                bgcolor = trans.install_model.ToolDependency.states.OK
+            rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">%s</div>' % \
+                ( bgcolor, trans.security.encode_id( tool_dependency.id ), tool_dependency.status )
+            return rval
+
+    title = "Tool Dependencies"
+    template = "admin/tool_shed_repository/tool_dependencies_grid.mako"
+    model_class = tool_shed_install.ToolDependency
+    default_sort_key = "-create_time"
+    num_rows_per_page = 50
+    preserve_state = True
+    use_paging = False
+    columns = [
+        NameColumn( "Name",
+                    link=( lambda item: iff( item.status in [ tool_shed_install.ToolDependency.installation_status.NEVER_INSTALLED,
+                                                              tool_shed_install.ToolDependency.installation_status.INSTALLING,
+                                                              tool_shed_install.ToolDependency.installation_status.UNINSTALLED ],
+                                             None,
+                                             dict( action="manage_tool_dependencies", operation='browse', id=item.id ) ) ),
+                    filterable="advanced" ),
+        VersionColumn( "Version",
+                       filterable="advanced" ),
+        TypeColumn( "Type",
+                    filterable="advanced" ),
+        StatusColumn( "Installation Status",
+                      filterable="advanced" ),
+    ]
+
+    def build_initial_query( self, trans, **kwd ):
+        tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
+        if tool_dependency_ids:
+            clause_list = []
+            for tool_dependency_id in tool_dependency_ids:
+                clause_list.append( self.model_class.table.c.id == trans.security.decode_id( tool_dependency_id ) )
+            return trans.install_model.context.query( self.model_class ) \
+                                              .filter( or_( *clause_list ) )
+        return trans.install_model.context.query( self.model_class )
+
+    def apply_query_filter( self, trans, query, **kwd ):
+        tool_dependency_id = kwd.get( 'tool_dependency_id', None )
+        if tool_dependency_id:
+            return query.filter_by( tool_dependency_id=trans.security.decode_id( tool_dependency_id ) )
+        return query
diff --git a/lib/tool_shed/galaxy_install/install_manager.py b/lib/tool_shed/galaxy_install/install_manager.py
new file mode 100644
index 0000000..7da8c6e
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -0,0 +1,1038 @@
+import json
+import logging
+import os
+import sys
+import tempfile
+import traceback
+
+from fabric.api import lcd
+from six import string_types
+from sqlalchemy import or_
+
+from galaxy import exceptions, util
+from galaxy.tools.deps import views
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
+from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
+from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
+from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
+from tool_shed.galaxy_install.tools import data_manager, tool_panel_manager
+from tool_shed.tools import data_table_manager, tool_version_manager
+from tool_shed.util import basic_util, common_util, encoding_util, hg_util, repository_util
+from tool_shed.util import shed_util_common as suc, tool_dependency_util
+from tool_shed.util import tool_util, xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class InstallToolDependencyManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.install_model = self.app.install_model
+        self.INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file',
+                                 'setup_perl_environment', 'setup_python_environment',
+                                 'setup_r_environment', 'setup_ruby_environment', 'shell_command' ]
+
+    def format_traceback( self ):
+        ex_type, ex, tb = sys.exc_info()
+        return ''.join( traceback.format_tb( tb ) )
+
+    def get_tool_shed_repository_install_dir( self, tool_shed_repository ):
+        return os.path.abspath( tool_shed_repository.repo_files_directory( self.app ) )
+
+    def install_and_build_package( self, install_environment, tool_dependency, actions_dict ):
+        """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
+        install_dir = actions_dict[ 'install_dir' ]
+        package_name = actions_dict[ 'package_name' ]
+        actions = actions_dict.get( 'actions', None )
+        filtered_actions = []
+        env_file_builder = EnvFileBuilder( install_dir )
+        step_manager = StepManager( self.app )
+        if actions:
+            with install_environment.use_tmp_dir() as work_dir:
+                with lcd( work_dir ):
+                    # The first action in the list of actions will be the one that defines the initial download process.
+                    # There are currently three supported actions; download_binary, download_by_url and clone via a
+                    # shell_command action type.  The recipe steps will be filtered at this stage in the process, with
+                    # the filtered actions being used in the next stage below.  The installation directory (i.e., dir)
+                    # is also defined in this stage and is used in the next stage below when defining current_dir.
+                    action_type, action_dict = actions[ 0 ]
+                    if action_type in self.INSTALL_ACTIONS:
+                        # Some of the parameters passed here are needed only by a subset of the step handler classes,
+                        # but to allow for a standard method signature we'll pass them along.  We don't check the
+                        # tool_dependency status in this stage because it should not have been changed based on a
+                        # download.
+                        tool_dependency, filtered_actions, dir = \
+                            step_manager.execute_step( tool_dependency=tool_dependency,
+                                                       package_name=package_name,
+                                                       actions=actions,
+                                                       action_type=action_type,
+                                                       action_dict=action_dict,
+                                                       filtered_actions=filtered_actions,
+                                                       env_file_builder=env_file_builder,
+                                                       install_environment=install_environment,
+                                                       work_dir=work_dir,
+                                                       current_dir=None,
+                                                       initial_download=True )
+                    else:
+                        # We're handling a complex repository dependency where we only have a set_environment tag set.
+                        # <action type="set_environment">
+                        #    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                        # </action>
+                        filtered_actions = [ a for a in actions ]
+                        dir = install_dir
+                    # We're in stage 2 of the installation process.  The package has been down-loaded, so we can
+                    # now perform all of the actions defined for building it.
+                    for action_tup in filtered_actions:
+                        if dir is None:
+                            dir = ''
+                        current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+                        with lcd( current_dir ):
+                            action_type, action_dict = action_tup
+                            tool_dependency, tmp_filtered_actions, tmp_dir = \
+                                step_manager.execute_step( tool_dependency=tool_dependency,
+                                                           package_name=package_name,
+                                                           actions=actions,
+                                                           action_type=action_type,
+                                                           action_dict=action_dict,
+                                                           filtered_actions=filtered_actions,
+                                                           env_file_builder=env_file_builder,
+                                                           install_environment=install_environment,
+                                                           work_dir=work_dir,
+                                                           current_dir=current_dir,
+                                                           initial_download=False )
+                            if tool_dependency.status in [ self.install_model.ToolDependency.installation_status.ERROR ]:
+                                # If the tool_dependency status is in an error state, return it with no additional
+                                # processing.
+                                return tool_dependency
+                            # Make sure to handle the special case where the value of dir is reset (this happens when
+                            # the action_type is change_directiory).  In all other action types, dir will be returned as
+                            # None.
+                            if tmp_dir is not None:
+                                dir = tmp_dir
+        return tool_dependency
+
+    def install_and_build_package_via_fabric( self, install_environment, tool_shed_repository, tool_dependency, actions_dict ):
+        try:
+            # There is currently only one fabric method.
+            tool_dependency = self.install_and_build_package( install_environment, tool_dependency, actions_dict )
+        except Exception as e:
+            log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) )
+            # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must
+            # be left False here.
+            error_message = '%s\n%s' % ( self.format_traceback(), str( e ) )
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes(self.app,
+                                                                                  tool_dependency=tool_dependency,
+                                                                                  status=self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                                                  error_message=error_message)
+        tool_dependency = self.mark_tool_dependency_installed( tool_dependency )
+        return tool_dependency
+
+    def install_specified_tool_dependencies( self, tool_shed_repository, tool_dependencies_config, tool_dependencies,
+                                             from_tool_migration_manager=False ):
+        """
+        Follow the recipe in the received tool_dependencies_config to install specified packages for
+        repository tools.  The received list of tool_dependencies are the database records for those
+        dependencies defined in the tool_dependencies_config that are to be installed.  This list may
+        be a subset of the set of dependencies defined in the tool_dependencies_config.  This allows
+        for filtering out dependencies that have not been checked for installation on the 'Manage tool
+        dependencies' page for an installed Tool Shed repository.
+        """
+        attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ]
+        installed_packages = []
+        tag_manager = TagManager( self.app )
+        # Parse the tool_dependencies.xml config.
+        tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+        if tree is None:
+            log.debug( "The received tool_dependencies.xml file is likely invalid: %s" % str( error_message ) )
+            return installed_packages
+        root = tree.getroot()
+        elems = []
+        for elem in root:
+            if elem.tag == 'set_environment':
+                version = elem.get( 'version', '1.0' )
+                if version != '1.0':
+                    raise Exception( 'The <set_environment> tag must have a version attribute with value 1.0' )
+                for sub_elem in elem:
+                    elems.append( sub_elem )
+            else:
+                elems.append( elem )
+        for elem in elems:
+            name = elem.get( 'name', None )
+            version = elem.get( 'version', None )
+            type = elem.get( 'type', None )
+            if type is None:
+                if elem.tag in [ 'environment_variable', 'set_environment' ]:
+                    type = 'set_environment'
+                else:
+                    type = 'package'
+            if ( name and type == 'set_environment' ) or ( name and version ):
+                # elem is a package set_environment tag set.
+                attr_tup = ( name, version, type )
+                try:
+                    index = attr_tups_of_dependencies_for_install.index( attr_tup )
+                except Exception as e:
+                    index = None
+                if index is not None:
+                    tool_dependency = tool_dependencies[ index ]
+                    # If the tool_dependency.type is 'set_environment', then the call to process_tag_set() will
+                    # handle everything - no additional installation is necessary.
+                    tool_dependency, proceed_with_install, action_elem_tuples = \
+                        tag_manager.process_tag_set( tool_shed_repository,
+                                                     tool_dependency,
+                                                     elem,
+                                                     name,
+                                                     version,
+                                                     from_tool_migration_manager=from_tool_migration_manager,
+                                                     tool_dependency_db_records=tool_dependencies )
+                    if ( tool_dependency.type == 'package' and proceed_with_install ):
+                        try:
+                            tool_dependency = self.install_package( elem,
+                                                                    tool_shed_repository,
+                                                                    tool_dependencies=tool_dependencies,
+                                                                    from_tool_migration_manager=from_tool_migration_manager )
+                        except Exception as e:
+                            error_message = "Error installing tool dependency %s version %s: %s" % \
+                                ( str( name ), str( version ), str( e ) )
+                            log.exception( error_message )
+                            if tool_dependency:
+                                # Since there was an installation error, update the tool dependency status to Error. The
+                                # remove_installation_path option must be left False here.
+                                tool_dependency = \
+                                    tool_dependency_util.set_tool_dependency_attributes(self.app,
+                                                                                        tool_dependency=tool_dependency,
+                                                                                        status=self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                                                        error_message=error_message)
+                        if tool_dependency and tool_dependency.status in [ self.install_model.ToolDependency.installation_status.INSTALLED,
+                                                                           self.install_model.ToolDependency.installation_status.ERROR ]:
+                            installed_packages.append( tool_dependency )
+                            if self.app.config.manage_dependency_relationships:
+                                # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager.
+                                self.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository,
+                                                                                                      tool_dependency )
+        return installed_packages
+
+    def install_via_fabric( self, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None,
+                            actions_elem=None, action_elem=None, **kwd ):
+        """
+        Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using
+        self.install_and_build_package().  The use of fabric is being eliminated, so some of these functions
+        may need to be renamed at some point.
+        """
+        if not os.path.exists( install_dir ):
+            os.makedirs( install_dir )
+        actions_dict = dict( install_dir=install_dir )
+        if package_name:
+            actions_dict[ 'package_name' ] = package_name
+        actions = []
+        is_binary_download = False
+        if actions_elem is not None:
+            elems = actions_elem
+            if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None:
+                is_binary_download = True
+        elif action_elem is not None:
+            # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded.
+            elems = [ action_elem ]
+        else:
+            elems = []
+        step_manager = StepManager( self.app )
+        tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( tool_shed_repository )
+        install_environment = InstallEnvironment( self.app, tool_shed_repository_install_dir, install_dir )
+        for action_elem in elems:
+            # Make sure to skip all comments, since they are now included in the XML tree.
+            if action_elem.tag != 'action':
+                continue
+            action_dict = {}
+            action_type = action_elem.get( 'type', None )
+            if action_type is not None:
+                action_dict = step_manager.prepare_step( tool_dependency=tool_dependency,
+                                                         action_type=action_type,
+                                                         action_elem=action_elem,
+                                                         action_dict=action_dict,
+                                                         install_environment=install_environment,
+                                                         is_binary_download=is_binary_download )
+                action_tuple = ( action_type, action_dict )
+                if action_type == 'set_environment':
+                    if action_tuple not in actions:
+                        actions.append( action_tuple )
+                else:
+                    actions.append( action_tuple )
+        if actions:
+            actions_dict[ 'actions' ] = actions
+        if custom_fabfile_path is not None:
+            # TODO: this is not yet supported or functional, but when it is handle it using the fabric api.
+            raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' )
+        else:
+            tool_dependency = self.install_and_build_package_via_fabric( install_environment,
+                                                                         tool_shed_repository,
+                                                                         tool_dependency,
+                                                                         actions_dict )
+        return tool_dependency
+
+    def install_package( self, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ):
+        """
+        Install a tool dependency package defined by the XML element elem.  The value of tool_dependencies is
+        a partial or full list of ToolDependency records associated with the tool_shed_repository.
+        """
+        tag_manager = TagManager( self.app )
+        # The value of package_name should match the value of the "package" type in the tool config's
+        # <requirements> tag set, but it's not required.
+        package_name = elem.get( 'name', None )
+        package_version = elem.get( 'version', None )
+        if tool_dependencies and package_name and package_version:
+            tool_dependency = None
+            for tool_dependency in tool_dependencies:
+                if package_name == str( tool_dependency.name ) and package_version == str( tool_dependency.version ):
+                    break
+            if tool_dependency is not None:
+                for package_elem in elem:
+                    tool_dependency, proceed_with_install, actions_elem_tuples = \
+                        tag_manager.process_tag_set( tool_shed_repository,
+                                                     tool_dependency,
+                                                     package_elem,
+                                                     package_name,
+                                                     package_version,
+                                                     from_tool_migration_manager=from_tool_migration_manager,
+                                                     tool_dependency_db_records=None )
+                    if proceed_with_install and actions_elem_tuples:
+                        # Get the installation directory for tool dependencies that will be installed for the received
+                        # tool_shed_repository.
+                        install_dir = \
+                            tool_dependency_util.get_tool_dependency_install_dir( app=self.app,
+                                                                                  repository_name=tool_shed_repository.name,
+                                                                                  repository_owner=tool_shed_repository.owner,
+                                                                                  repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                                  tool_dependency_type='package',
+                                                                                  tool_dependency_name=package_name,
+                                                                                  tool_dependency_version=package_version )
+                        # At this point we have a list of <actions> elems that are either defined within an <actions_group>
+                        # tag set with <actions> sub-elements that contains os and architecture attributes filtered by the
+                        # platform into which the appropriate compiled binary will be installed, or not defined within an
+                        # <actions_group> tag set and not filtered.  Here is an example actions_elem_tuple.
+                        # [(True, [<Element 'actions' at 0x109293d10>)]
+                        binary_installed = False
+                        for actions_elem_tuple in actions_elem_tuples:
+                            in_actions_group, actions_elems = actions_elem_tuple
+                            if in_actions_group:
+                                # Platform matching is only performed inside <actions_group> tag sets, os and architecture
+                                # attributes are otherwise ignored.
+                                can_install_from_source = False
+                                for actions_elem in actions_elems:
+                                    system = actions_elem.get( 'os' )
+                                    architecture = actions_elem.get( 'architecture' )
+                                    # If this <actions> element has the os and architecture attributes defined, then we only
+                                    # want to process until a successful installation is achieved.
+                                    if system and architecture:
+                                        # If an <actions> tag has been defined that matches our current platform, and the
+                                        # recipe specified within that <actions> tag has been successfully processed, skip
+                                        # any remaining platform-specific <actions> tags.  We cannot break out of the loop
+                                        # here because there may be <action> tags at the end of the <actions_group> tag set
+                                        # that must be processed.
+                                        if binary_installed:
+                                            continue
+                                        # No platform-specific <actions> recipe has yet resulted in a successful installation.
+                                        tool_dependency = self.install_via_fabric( tool_shed_repository,
+                                                                                   tool_dependency,
+                                                                                   install_dir,
+                                                                                   package_name=package_name,
+                                                                                   actions_elem=actions_elem,
+                                                                                   action_elem=None )
+                                        if tool_dependency.status == self.install_model.ToolDependency.installation_status.INSTALLED:
+                                            # If an <actions> tag was found that matches the current platform, and
+                                            # self.install_via_fabric() did not result in an error state, set binary_installed
+                                            # to True in order to skip any remaining platform-specific <actions> tags.
+                                            binary_installed = True
+                                        else:
+                                            # Process the next matching <actions> tag, or any defined <actions> tags that do not
+                                            # contain platform dependent recipes.
+                                            log.debug( 'Error downloading binary for tool dependency %s version %s: %s' %
+                                                       ( str( package_name ), str( package_version ), str( tool_dependency.error_message ) ) )
+                                    else:
+                                        if actions_elem.tag == 'actions':
+                                            # We've reached an <actions> tag that defines the recipe for installing and compiling from
+                                            # source.  If binary installation failed, we proceed with the recipe.
+                                            if not binary_installed:
+                                                installation_directory = tool_dependency.installation_directory( self.app )
+                                                if os.path.exists( installation_directory ):
+                                                    # Delete contents of installation directory if attempt at binary installation failed.
+                                                    installation_directory_contents = os.listdir( installation_directory )
+                                                    if installation_directory_contents:
+                                                        removed, error_message = \
+                                                            tool_dependency_util.remove_tool_dependency( self.app, tool_dependency )
+                                                        if removed:
+                                                            can_install_from_source = True
+                                                        else:
+                                                            log.debug( 'Error removing old files from installation directory %s: %s' %
+                                                                       ( str( installation_directory, str( error_message ) ) ) )
+                                                    else:
+                                                        can_install_from_source = True
+                                                else:
+                                                    can_install_from_source = True
+                                            if can_install_from_source:
+                                                # We now know that binary installation was not successful, so proceed with the <actions>
+                                                # tag set that defines the recipe to install and compile from source.
+                                                log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' %
+                                                           str( tool_dependency.name ) )
+                                                # Reset above error to installing
+                                                tool_dependency.status = self.install_model.ToolDependency.installation_status.INSTALLING
+                                                tool_dependency = self.install_via_fabric( tool_shed_repository,
+                                                                                           tool_dependency,
+                                                                                           install_dir,
+                                                                                           package_name=package_name,
+                                                                                           actions_elem=actions_elem,
+                                                                                           action_elem=None )
+                                    if actions_elem.tag == 'action' and \
+                                            tool_dependency.status != self.install_model.ToolDependency.installation_status.ERROR:
+                                        # If the tool dependency is not in an error state, perform any final actions that have been
+                                        # defined within the actions_group tag set, but outside of an <actions> tag, which defines
+                                        # the recipe for installing and compiling from source.
+                                        tool_dependency = self.install_via_fabric( tool_shed_repository,
+                                                                                   tool_dependency,
+                                                                                   install_dir,
+                                                                                   package_name=package_name,
+                                                                                   actions_elem=None,
+                                                                                   action_elem=actions_elem )
+                            else:
+                                # Checks for "os" and "architecture" attributes  are not made for any <actions> tag sets outside of
+                                # an <actions_group> tag set.  If the attributes are defined, they will be ignored. All <actions> tags
+                                # outside of an <actions_group> tag set will always be processed.
+                                tool_dependency = self.install_via_fabric( tool_shed_repository,
+                                                                           tool_dependency,
+                                                                           install_dir,
+                                                                           package_name=package_name,
+                                                                           actions_elem=actions_elems,
+                                                                           action_elem=None )
+                                if tool_dependency.status != self.install_model.ToolDependency.installation_status.ERROR:
+                                    log.debug( 'Tool dependency %s version %s has been installed in %s.' %
+                                               ( str( package_name ), str( package_version ), str( install_dir ) ) )
+        return tool_dependency
+
+    def mark_tool_dependency_installed( self, tool_dependency ):
+        if tool_dependency.status not in [ self.install_model.ToolDependency.installation_status.ERROR,
+                                           self.install_model.ToolDependency.installation_status.INSTALLED ]:
+            log.debug( 'Changing status for tool dependency %s from %s to %s.' %
+                       ( str( tool_dependency.name ),
+                         str( tool_dependency.status ),
+                           str( self.install_model.ToolDependency.installation_status.INSTALLED ) ) )
+            status = self.install_model.ToolDependency.installation_status.INSTALLED
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                   tool_dependency=tool_dependency,
+                                                                                   status=status )
+        return tool_dependency
+
+
+class InstallRepositoryManager( object ):
+
+    def __init__( self, app, tpm=None ):
+        self.app = app
+        self.install_model = self.app.install_model
+        self._view = views.DependencyResolversView(app)
+        if tpm is None:
+            self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+        else:
+            self.tpm = tpm
+
+    def get_repository_components_for_installation( self, encoded_tsr_id, encoded_tsr_ids, repo_info_dicts,
+                                                    tool_panel_section_keys ):
+        """
+        The received encoded_tsr_ids, repo_info_dicts, and
+        tool_panel_section_keys are 3 lists that contain associated elements
+        at each location in the list.  This method will return the elements
+        from repo_info_dicts and tool_panel_section_keys associated with the
+        received encoded_tsr_id by determining its location in the received
+        encoded_tsr_ids list.
+        """
+        for tsr_id, repo_info_dict, tool_panel_section_key in zip( encoded_tsr_ids,
+                                                                   repo_info_dicts,
+                                                                   tool_panel_section_keys ):
+            if tsr_id == encoded_tsr_id:
+                return repo_info_dict, tool_panel_section_key
+        return None, None
+
+    def __get_install_info_from_tool_shed( self, tool_shed_url, name, owner, changeset_revision ):
+        params = dict( name=str( name ),
+                       owner=str( owner ),
+                       changeset_revision=str( changeset_revision ) )
+        pathspec = [ 'api', 'repositories', 'get_repository_revision_install_info' ]
+        try:
+            raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        except Exception as e:
+            message = "Error attempting to retrieve installation information from tool shed "
+            message += "%s for revision %s of repository %s owned by %s: %s" % \
+                ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) )
+            log.warning( message )
+            raise exceptions.InternalServerError( message )
+        if raw_text:
+            # If successful, the response from get_repository_revision_install_info will be 3
+            # dictionaries, a dictionary defining the Repository, a dictionary defining the
+            # Repository revision (RepositoryMetadata), and a dictionary including the additional
+            # information required to install the repository.
+            items = json.loads( raw_text )
+            repository_revision_dict = items[ 1 ]
+            repo_info_dict = items[ 2 ]
+        else:
+            message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s" % \
+                ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ) )
+            log.warning( message )
+            raise exceptions.InternalServerError( message )
+        # Make sure the tool shed returned everything we need for installing the repository.
+        if not repository_revision_dict or not repo_info_dict:
+            invalid_parameter_message = "No information is available for the requested repository revision.\n"
+            invalid_parameter_message += "One or more of the following parameter values is likely invalid:\n"
+            invalid_parameter_message += "tool_shed_url: %s\n" % str( tool_shed_url )
+            invalid_parameter_message += "name: %s\n" % str( name )
+            invalid_parameter_message += "owner: %s\n" % str( owner )
+            invalid_parameter_message += "changeset_revision: %s\n" % str( changeset_revision )
+            raise exceptions.RequestParameterInvalidException( invalid_parameter_message )
+        repo_info_dicts = [ repo_info_dict ]
+        return repository_revision_dict, repo_info_dicts
+
+    def __handle_repository_contents( self, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir,
+                                      tool_shed=None, tool_section=None, shed_tool_conf=None, reinstalling=False,
+                                      tool_panel_section_mapping={} ):
+        """
+        Generate the metadata for the installed tool shed repository, among other things.
+        This method is called when an administrator is installing a new repository or
+        reinstalling an uninstalled repository.
+        """
+        shed_config_dict = self.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
+        tdtm = data_table_manager.ToolDataTableManager( self.app )
+        irmm = InstalledRepositoryMetadataManager( app=self.app,
+                                                   tpm=self.tpm,
+                                                   repository=tool_shed_repository,
+                                                   changeset_revision=tool_shed_repository.changeset_revision,
+                                                   repository_clone_url=repository_clone_url,
+                                                   shed_config_dict=shed_config_dict,
+                                                   relative_install_dir=relative_install_dir,
+                                                   repository_files_dir=None,
+                                                   resetting_all_metadata_on_repository=False,
+                                                   updating_installed_repository=False,
+                                                   persist=True )
+        irmm.generate_metadata_for_changeset_revision()
+        irmm_metadata_dict = irmm.get_metadata_dict()
+        tool_shed_repository.metadata = irmm_metadata_dict
+        # Update the tool_shed_repository.tool_shed_status column in the database.
+        tool_shed_status_dict = repository_util.get_tool_shed_status_for_installed_repository( self.app, tool_shed_repository )
+        if tool_shed_status_dict:
+            tool_shed_repository.tool_shed_status = tool_shed_status_dict
+        self.install_model.context.add( tool_shed_repository )
+        self.install_model.context.flush()
+        if 'tool_dependencies' in irmm_metadata_dict and not reinstalling:
+            tool_dependency_util.create_tool_dependency_objects( self.app,
+                                                                 tool_shed_repository,
+                                                                 relative_install_dir,
+                                                                 set_status=True )
+        if 'sample_files' in irmm_metadata_dict:
+            sample_files = irmm_metadata_dict.get( 'sample_files', [] )
+            tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files )
+            tool_data_table_conf_filename, tool_data_table_elems = \
+                tdtm.install_tool_data_tables( tool_shed_repository, tool_index_sample_files )
+            if tool_data_table_elems:
+                self.app.tool_data_tables.add_new_entries_from_config_file( tool_data_table_conf_filename,
+                                                                            None,
+                                                                            self.app.config.shed_tool_data_table_config,
+                                                                            persist=True )
+        if 'tools' in irmm_metadata_dict:
+            # Get the tool_versions from the Tool Shed for each tool in the installed change set.
+            self.update_tool_shed_repository_status( tool_shed_repository,
+                                                     self.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+            tool_version_dicts = fetch_tool_versions( self.app, tool_shed_repository )
+            tvm = tool_version_manager.ToolVersionManager( self.app )
+            tvm.handle_tool_versions( tool_version_dicts, tool_shed_repository )
+            tool_panel_dict = self.tpm.generate_tool_panel_dict_for_new_install( irmm_metadata_dict[ 'tools' ], tool_section )
+            sample_files = irmm_metadata_dict.get( 'sample_files', [] )
+            tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files )
+            tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
+            sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
+            repository_tools_tups = irmm.get_repository_tools_tups()
+            if repository_tools_tups:
+                # Handle missing data table entries for tool parameters that are dynamically generated select lists.
+                repository_tools_tups = tdtm.handle_missing_data_table_entry( relative_install_dir,
+                                                                              tool_path,
+                                                                              repository_tools_tups )
+                # Handle missing index files for tool parameters that are dynamically generated select lists.
+                repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app,
+                                                                                                  tool_path,
+                                                                                                  sample_files,
+                                                                                                  repository_tools_tups,
+                                                                                                  sample_files_copied )
+                # Copy remaining sample files included in the repository to the ~/tool-data directory of the
+                # local Galaxy instance.
+                tool_util.copy_sample_files( self.app,
+                                             sample_files,
+                                             tool_path=tool_path,
+                                             sample_files_copied=sample_files_copied )
+                self.tpm.add_to_tool_panel( repository_name=tool_shed_repository.name,
+                                            repository_clone_url=repository_clone_url,
+                                            changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                            repository_tools_tups=repository_tools_tups,
+                                            owner=tool_shed_repository.owner,
+                                            shed_tool_conf=shed_tool_conf,
+                                            tool_panel_dict=tool_panel_dict,
+                                            new_install=True,
+                                            tool_panel_section_mapping=tool_panel_section_mapping )
+        if 'data_manager' in irmm_metadata_dict:
+            dmh = data_manager.DataManagerHandler( self.app )
+            dmh.install_data_managers( self.app.config.shed_data_manager_config_file,
+                                       irmm_metadata_dict,
+                                       shed_config_dict,
+                                       relative_install_dir,
+                                       tool_shed_repository,
+                                       repository_tools_tups )
+        if 'datatypes' in irmm_metadata_dict:
+            self.update_tool_shed_repository_status( tool_shed_repository,
+                                                     self.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES )
+            if not tool_shed_repository.includes_datatypes:
+                tool_shed_repository.includes_datatypes = True
+            self.install_model.context.add( tool_shed_repository )
+            self.install_model.context.flush()
+            files_dir = relative_install_dir
+            if shed_config_dict.get( 'tool_path' ):
+                files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
+            datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir )
+            # Load data types required by tools.
+            cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+            converter_path, display_path = \
+                cdl.alter_config_and_load_prorietary_datatypes( datatypes_config, files_dir, override=False )
+            if converter_path or display_path:
+                # Create a dictionary of tool shed repository related information.
+                repository_dict = \
+                    cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+                                                                          name=tool_shed_repository.name,
+                                                                          owner=tool_shed_repository.owner,
+                                                                          installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                          tool_dicts=irmm_metadata_dict.get( 'tools', [] ),
+                                                                          converter_path=converter_path,
+                                                                          display_path=display_path )
+            if converter_path:
+                # Load proprietary datatype converters
+                self.app.datatypes_registry.load_datatype_converters( self.app.toolbox, installed_repository_dict=repository_dict )
+            if display_path:
+                # Load proprietary datatype display applications
+                self.app.datatypes_registry.load_display_applications( self.app, installed_repository_dict=repository_dict )
+
+    def handle_tool_shed_repositories( self, installation_dict ):
+        # The following installation_dict entries are all required.
+        install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ]
+        new_tool_panel_section_label = installation_dict[ 'new_tool_panel_section_label' ]
+        no_changes_checked = installation_dict[ 'no_changes_checked' ]
+        repo_info_dicts = installation_dict[ 'repo_info_dicts' ]
+        tool_panel_section_id = installation_dict[ 'tool_panel_section_id' ]
+        tool_path = installation_dict[ 'tool_path' ]
+        tool_shed_url = installation_dict[ 'tool_shed_url' ]
+        rdim = repository_dependency_manager.RepositoryDependencyInstallManager( self.app )
+        created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
+            rdim.create_repository_dependency_objects( tool_path=tool_path,
+                                                       tool_shed_url=tool_shed_url,
+                                                       repo_info_dicts=repo_info_dicts,
+                                                       install_repository_dependencies=install_repository_dependencies,
+                                                       no_changes_checked=no_changes_checked,
+                                                       tool_panel_section_id=tool_panel_section_id,
+                                                       new_tool_panel_section_label=new_tool_panel_section_label )
+        return created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts
+
+    def initiate_repository_installation( self, installation_dict ):
+        # The following installation_dict entries are all required.
+        created_or_updated_tool_shed_repositories = installation_dict[ 'created_or_updated_tool_shed_repositories' ]
+        filtered_repo_info_dicts = installation_dict[ 'filtered_repo_info_dicts' ]
+        has_repository_dependencies = installation_dict[ 'has_repository_dependencies' ]
+        includes_tool_dependencies = installation_dict[ 'includes_tool_dependencies' ]
+        includes_tools = installation_dict[ 'includes_tools' ]
+        includes_tools_for_display_in_tool_panel = installation_dict[ 'includes_tools_for_display_in_tool_panel' ]
+        install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ]
+        install_resolver_dependencies = installation_dict['install_resolver_dependencies']
+        install_tool_dependencies = installation_dict[ 'install_tool_dependencies' ]
+        message = installation_dict[ 'message' ]
+        new_tool_panel_section_label = installation_dict[ 'new_tool_panel_section_label' ]
+        shed_tool_conf = installation_dict[ 'shed_tool_conf' ]
+        status = installation_dict[ 'status' ]
+        tool_panel_section_id = installation_dict[ 'tool_panel_section_id' ]
+        tool_panel_section_keys = installation_dict[ 'tool_panel_section_keys' ]
+        tool_panel_section_mapping = installation_dict.get( 'tool_panel_section_mapping', {} )
+        tool_path = installation_dict[ 'tool_path' ]
+        tool_shed_url = installation_dict[ 'tool_shed_url' ]
+        # Handle contained tools.
+        if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section_label or tool_panel_section_id ):
+            self.tpm.handle_tool_panel_section( self.app.toolbox,
+                                                tool_panel_section_id=tool_panel_section_id,
+                                                new_tool_panel_section_label=new_tool_panel_section_label )
+        if includes_tools_for_display_in_tool_panel and ( tool_panel_section_mapping is not None ):
+            for tool_guid in tool_panel_section_mapping:
+                if tool_panel_section_mapping[ tool_guid ][ 'action' ] == 'create':
+                    new_tool_panel_section_name = tool_panel_section_mapping[ tool_guid ][ 'tool_panel_section' ]
+                    log.debug( 'Creating tool panel section "%s" for tool %s' % ( new_tool_panel_section_name, tool_guid ) )
+                    self.tpm.handle_tool_panel_section( self.app.toolbox, None, tool_panel_section_mapping[ tool_guid ][ 'tool_panel_section' ] )
+        encoded_repository_ids = [ self.app.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ]
+        new_kwd = dict( includes_tools=includes_tools,
+                        includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                        has_repository_dependencies=has_repository_dependencies,
+                        install_repository_dependencies=install_repository_dependencies,
+                        includes_tool_dependencies=includes_tool_dependencies,
+                        install_resolver_dependencies=install_resolver_dependencies,
+                        install_tool_dependencies=install_tool_dependencies,
+                        message=message,
+                        repo_info_dicts=filtered_repo_info_dicts,
+                        shed_tool_conf=shed_tool_conf,
+                        status=status,
+                        tool_path=tool_path,
+                        tool_panel_section_keys=tool_panel_section_keys,
+                        tool_shed_repository_ids=encoded_repository_ids,
+                        tool_shed_url=tool_shed_url )
+        encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
+        tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories  ]
+        tool_shed_repositories = []
+        for tsr_id in tsr_ids:
+            tsr = self.install_model.context.query( self.install_model.ToolShedRepository ).get( tsr_id )
+            tool_shed_repositories.append( tsr )
+        clause_list = []
+        for tsr_id in tsr_ids:
+            clause_list.append( self.install_model.ToolShedRepository.table.c.id == tsr_id )
+        query = self.install_model.context.query( self.install_model.ToolShedRepository ).filter( or_( *clause_list ) )
+        return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids
+
+    def install( self, tool_shed_url, name, owner, changeset_revision, install_options ):
+        # Get all of the information necessary for installing the repository from the specified tool shed.
+        repository_revision_dict, repo_info_dicts = self.__get_install_info_from_tool_shed( tool_shed_url,
+                                                                                            name,
+                                                                                            owner,
+                                                                                            changeset_revision )
+        installed_tool_shed_repositories = self.__initiate_and_install_repositories(
+            tool_shed_url,
+            repository_revision_dict,
+            repo_info_dicts,
+            install_options
+        )
+        return installed_tool_shed_repositories
+
+    def __initiate_and_install_repositories( self, tool_shed_url, repository_revision_dict, repo_info_dicts, install_options ):
+        try:
+            has_repository_dependencies = repository_revision_dict[ 'has_repository_dependencies' ]
+        except KeyError:
+            raise exceptions.InternalServerError( "Tool shed response missing required parameter 'has_repository_dependencies'." )
+        try:
+            includes_tools = repository_revision_dict[ 'includes_tools' ]
+        except KeyError:
+            raise exceptions.InternalServerError( "Tool shed response missing required parameter 'includes_tools'." )
+        try:
+            includes_tool_dependencies = repository_revision_dict[ 'includes_tool_dependencies' ]
+        except KeyError:
+            raise exceptions.InternalServerError( "Tool shed response missing required parameter 'includes_tool_dependencies'." )
+        try:
+            includes_tools_for_display_in_tool_panel = repository_revision_dict[ 'includes_tools_for_display_in_tool_panel' ]
+        except KeyError:
+            raise exceptions.InternalServerError( "Tool shed response missing required parameter 'includes_tools_for_display_in_tool_panel'." )
+        # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain the repository information.
+        install_repository_dependencies = install_options.get( 'install_repository_dependencies', False )
+        install_resolver_dependencies = install_options.get( 'install_resolver_dependencies', False)
+        install_tool_dependencies = install_options.get( 'install_tool_dependencies', False )
+        if install_tool_dependencies:
+            self.__assert_can_install_dependencies()
+        new_tool_panel_section_label = install_options.get( 'new_tool_panel_section_label', '' )
+        tool_panel_section_mapping = install_options.get( 'tool_panel_section_mapping', {} )
+        shed_tool_conf = install_options.get( 'shed_tool_conf', None )
+        if shed_tool_conf:
+            # Get the tool_path setting.
+            shed_conf_dict = self.tpm.get_shed_tool_conf_dict( shed_tool_conf )
+            tool_path = shed_conf_dict[ 'tool_path' ]
+        else:
+            # Don't use migrated_tools_conf.xml.
+            try:
+                shed_config_dict = self.app.toolbox.dynamic_confs( include_migrated_tool_conf=False )[ 0 ]
+            except IndexError:
+                raise exceptions.RequestParameterMissingException( "Missing required parameter 'shed_tool_conf'." )
+            shed_tool_conf = shed_config_dict[ 'config_filename' ]
+            tool_path = shed_config_dict[ 'tool_path' ]
+        tool_panel_section_id = self.app.toolbox.find_section_id( install_options.get( 'tool_panel_section_id', '' ) )
+        # Build the dictionary of information necessary for creating tool_shed_repository database records
+        # for each repository being installed.
+        installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
+                                  new_tool_panel_section_label=new_tool_panel_section_label,
+                                  tool_panel_section_mapping=tool_panel_section_mapping,
+                                  no_changes_checked=False,
+                                  repo_info_dicts=repo_info_dicts,
+                                  tool_panel_section_id=tool_panel_section_id,
+                                  tool_path=tool_path,
+                                  tool_shed_url=tool_shed_url )
+        # Create the tool_shed_repository database records and gather additional information for repository installation.
+        created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
+            self.handle_tool_shed_repositories( installation_dict )
+        if created_or_updated_tool_shed_repositories:
+            # Build the dictionary of information necessary for installing the repositories.
+            installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories,
+                                      filtered_repo_info_dicts=filtered_repo_info_dicts,
+                                      has_repository_dependencies=has_repository_dependencies,
+                                      includes_tool_dependencies=includes_tool_dependencies,
+                                      includes_tools=includes_tools,
+                                      includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                                      install_repository_dependencies=install_repository_dependencies,
+                                      install_resolver_dependencies=install_resolver_dependencies,
+                                      install_tool_dependencies=install_tool_dependencies,
+                                      message='',
+                                      new_tool_panel_section_label=new_tool_panel_section_label,
+                                      shed_tool_conf=shed_tool_conf,
+                                      status='done',
+                                      tool_panel_section_id=tool_panel_section_id,
+                                      tool_panel_section_keys=tool_panel_section_keys,
+                                      tool_panel_section_mapping=tool_panel_section_mapping,
+                                      tool_path=tool_path,
+                                      tool_shed_url=tool_shed_url )
+            # Prepare the repositories for installation.  Even though this
+            # method receives a single combination of tool_shed_url, name,
+            # owner and changeset_revision, there may be multiple repositories
+            # for installation at this point because repository dependencies
+            # may have added additional repositories for installation along
+            # with the single specified repository.
+            encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
+                self.initiate_repository_installation( installation_dict )
+            # Some repositories may have repository dependencies that are
+            # required to be installed before the dependent repository, so
+            # we'll order the list of tsr_ids to ensure all repositories
+            # install in the required order.
+            tsr_ids = [ self.app.security.encode_id( tool_shed_repository.id ) for tool_shed_repository in tool_shed_repositories ]
+
+            decoded_kwd = dict(
+                shed_tool_conf=shed_tool_conf,
+                tool_path=tool_path,
+                tool_panel_section_keys=tool_panel_section_keys,
+                repo_info_dicts=filtered_repo_info_dicts,
+                install_resolver_dependencies=install_resolver_dependencies,
+                install_tool_dependencies=install_tool_dependencies,
+                tool_panel_section_mapping=tool_panel_section_mapping,
+            )
+            return self.install_repositories(tsr_ids, decoded_kwd, reinstalling=False)
+
+    def install_repositories( self, tsr_ids, decoded_kwd, reinstalling ):
+        shed_tool_conf = decoded_kwd.get( 'shed_tool_conf', '' )
+        tool_path = decoded_kwd[ 'tool_path' ]
+        tool_panel_section_keys = util.listify( decoded_kwd[ 'tool_panel_section_keys' ] )
+        tool_panel_section_mapping = decoded_kwd.get( 'tool_panel_section_mapping', {} )
+        repo_info_dicts = util.listify( decoded_kwd[ 'repo_info_dicts' ] )
+        install_resolver_dependencies = decoded_kwd['install_resolver_dependencies']
+        install_tool_dependencies = decoded_kwd['install_tool_dependencies']
+        filtered_repo_info_dicts = []
+        filtered_tool_panel_section_keys = []
+        repositories_for_installation = []
+        # Some repositories may have repository dependencies that are required to be installed before the
+        # dependent repository, so we'll order the list of tsr_ids to ensure all repositories install in the
+        # required order.
+        ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \
+            self.order_components_for_installation( tsr_ids,
+                                                    repo_info_dicts,
+                                                    tool_panel_section_keys=tool_panel_section_keys )
+        for tsr_id in ordered_tsr_ids:
+            repository = self.install_model.context.query( self.install_model.ToolShedRepository ) \
+                .get( self.app.security.decode_id( tsr_id ) )
+            if repository.status in [ self.install_model.ToolShedRepository.installation_status.NEW,
+                                      self.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+                repositories_for_installation.append( repository )
+                repo_info_dict, tool_panel_section_key = \
+                    self.get_repository_components_for_installation( tsr_id,
+                                                                     ordered_tsr_ids,
+                                                                     ordered_repo_info_dicts,
+                                                                     ordered_tool_panel_section_keys )
+                filtered_repo_info_dicts.append( repo_info_dict )
+                filtered_tool_panel_section_keys.append( tool_panel_section_key )
+
+        installed_tool_shed_repositories = []
+        if repositories_for_installation:
+            for tool_shed_repository, repo_info_dict, tool_panel_section_key in zip( repositories_for_installation,
+                                                                                     filtered_repo_info_dicts,
+                                                                                     filtered_tool_panel_section_keys ):
+                self.install_tool_shed_repository( tool_shed_repository,
+                                                   repo_info_dict=repo_info_dict,
+                                                   tool_panel_section_key=tool_panel_section_key,
+                                                   shed_tool_conf=shed_tool_conf,
+                                                   tool_path=tool_path,
+                                                   install_resolver_dependencies=install_resolver_dependencies,
+                                                   install_tool_dependencies=install_tool_dependencies,
+                                                   reinstalling=reinstalling,
+                                                   tool_panel_section_mapping=tool_panel_section_mapping )
+                installed_tool_shed_repositories.append( tool_shed_repository )
+        else:
+            raise RepositoriesInstalledException()
+        return installed_tool_shed_repositories
+
+    def install_tool_shed_repository( self, tool_shed_repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path,
+                                      install_resolver_dependencies, install_tool_dependencies, reinstalling=False, tool_panel_section_mapping={} ):
+        self.app.install_model.context.flush()
+        if tool_panel_section_key:
+            _, tool_section = self.app.toolbox.get_section( tool_panel_section_key )
+            if tool_section is None:
+                log.debug( 'Invalid tool_panel_section_key "%s" specified.  Tools will be loaded outside of sections in the tool panel.',
+                           str( tool_panel_section_key ) )
+        else:
+            tool_section = None
+        if isinstance( repo_info_dict, string_types ):
+            repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+        # Clone the repository to the configured location.
+        self.update_tool_shed_repository_status( tool_shed_repository,
+                                                 self.install_model.ToolShedRepository.installation_status.CLONING )
+        repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+        relative_clone_dir = repository_util.generate_tool_shed_repository_install_dir( repository_clone_url,
+                                                                                        tool_shed_repository.installed_changeset_revision )
+        relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+        install_dir = os.path.join( tool_path, relative_install_dir )
+        cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+        if cloned_ok:
+            if reinstalling:
+                # Since we're reinstalling the repository we need to find the latest changeset revision to
+                # which it can be updated.
+                changeset_revision_dict = self.app.update_repository_manager.get_update_to_changeset_revision_and_ctx_rev( tool_shed_repository )
+                current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
+                current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
+                if current_ctx_rev != ctx_rev:
+                    repo = hg_util.get_repo_for_repository( self.app,
+                                                            repository=None,
+                                                            repo_path=os.path.abspath( install_dir ),
+                                                            create=False )
+                    hg_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
+                    hg_util.update_repository( repo, ctx_rev=current_ctx_rev )
+            self.__handle_repository_contents( tool_shed_repository=tool_shed_repository,
+                                               tool_path=tool_path,
+                                               repository_clone_url=repository_clone_url,
+                                               relative_install_dir=relative_install_dir,
+                                               tool_shed=tool_shed_repository.tool_shed,
+                                               tool_section=tool_section,
+                                               shed_tool_conf=shed_tool_conf,
+                                               reinstalling=reinstalling,
+                                               tool_panel_section_mapping=tool_panel_section_mapping )
+            self.install_model.context.refresh( tool_shed_repository )
+            metadata = tool_shed_repository.metadata
+            if 'tools' in metadata and install_resolver_dependencies:
+                self.update_tool_shed_repository_status( tool_shed_repository,
+                                                         self.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+                requirements = suc.get_unique_requirements_from_repository(tool_shed_repository)
+                [self._view.install_dependency(id=None, **req) for req in requirements]
+                if self.app.config.use_cached_dependency_manager:
+                    cached_requirements = []
+                    for tool_d in metadata['tools']:
+                        tool = self.app.toolbox._tools_by_id.get(tool_d['guid'], None)
+                        if tool and tool.requirements not in cached_requirements:
+                            cached_requirements.append(tool.requirements)
+                            tool.build_dependency_cache()
+            if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
+                work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itsr" )
+                # Install tool dependencies.
+                self.update_tool_shed_repository_status( tool_shed_repository,
+                                                         self.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+                # Get the tool_dependencies.xml file from the repository.
+                tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', install_dir )
+                itdm = InstallToolDependencyManager( self.app )
+                itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository,
+                                                          tool_dependencies_config=tool_dependencies_config,
+                                                          tool_dependencies=tool_shed_repository.tool_dependencies,
+                                                          from_tool_migration_manager=False )
+                basic_util.remove_dir( work_dir )
+            self.update_tool_shed_repository_status( tool_shed_repository,
+                                                     self.install_model.ToolShedRepository.installation_status.INSTALLED )
+            if self.app.config.manage_dependency_relationships:
+                # Add the installed repository and any tool dependencies to the in-memory dictionaries
+                # in the installed_repository_manager.
+                self.app.installed_repository_manager.handle_repository_install( tool_shed_repository )
+        else:
+            # An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
+            repository_util.set_repository_attributes( self.app,
+                                                       tool_shed_repository,
+                                                       status=self.install_model.ToolShedRepository.installation_status.ERROR,
+                                                       error_message=error_message,
+                                                       deleted=False,
+                                                       uninstalled=False,
+                                                       remove_from_disk=True )
+
+    def order_components_for_installation( self, tsr_ids, repo_info_dicts, tool_panel_section_keys ):
+        """
+        Some repositories may have repository dependencies that are required to be installed
+        before the dependent repository.  This method will inspect the list of repositories
+        about to be installed and make sure to order them appropriately.  For each repository
+        about to be installed, if required repositories are not contained in the list of repositories
+        about to be installed, then they are not considered.  Repository dependency definitions
+        that contain circular dependencies should not result in an infinite loop, but obviously
+        prior installation will not be handled for one or more of the repositories that require
+        prior installation.
+        """
+        ordered_tsr_ids = []
+        ordered_repo_info_dicts = []
+        ordered_tool_panel_section_keys = []
+        # Create a dictionary whose keys are the received tsr_ids and whose values are a list of
+        # tsr_ids, each of which is contained in the received list of tsr_ids and whose associated
+        # repository must be installed prior to the repository associated with the tsr_id key.
+        prior_install_required_dict = repository_util.get_prior_import_or_install_required_dict( self.app,
+                                                                                                 tsr_ids,
+                                                                                                 repo_info_dicts )
+        processed_tsr_ids = []
+        while len( processed_tsr_ids ) != len( prior_install_required_dict.keys() ):
+            tsr_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_install_required_dict,
+                                                                               processed_tsr_ids )
+            processed_tsr_ids.append( tsr_id )
+            # Create the ordered_tsr_ids, the ordered_repo_info_dicts and the ordered_tool_panel_section_keys lists.
+            if tsr_id not in ordered_tsr_ids:
+                prior_install_required_ids = prior_install_required_dict[ tsr_id ]
+                for prior_install_required_id in prior_install_required_ids:
+                    if prior_install_required_id not in ordered_tsr_ids:
+                        # Install the associated repository dependency first.
+                        prior_repo_info_dict, prior_tool_panel_section_key = \
+                            self.get_repository_components_for_installation( prior_install_required_id,
+                                                                             tsr_ids,
+                                                                             repo_info_dicts,
+                                                                             tool_panel_section_keys=tool_panel_section_keys )
+                        ordered_tsr_ids.append( prior_install_required_id )
+                        ordered_repo_info_dicts.append( prior_repo_info_dict )
+                        ordered_tool_panel_section_keys.append( prior_tool_panel_section_key )
+                repo_info_dict, tool_panel_section_key = \
+                    self.get_repository_components_for_installation( tsr_id,
+                                                                     tsr_ids,
+                                                                     repo_info_dicts,
+                                                                     tool_panel_section_keys=tool_panel_section_keys )
+                if tsr_id not in ordered_tsr_ids:
+                    ordered_tsr_ids.append( tsr_id )
+                    ordered_repo_info_dicts.append( repo_info_dict )
+                    ordered_tool_panel_section_keys.append( tool_panel_section_key )
+        return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys
+
+    def update_tool_shed_repository_status( self, tool_shed_repository, status, error_message=None ):
+        """
+        Update the status of a tool shed repository in the process of being installed into Galaxy.
+        """
+        tool_shed_repository.status = status
+        if error_message:
+            tool_shed_repository.error_message = str( error_message )
+        self.install_model.context.add( tool_shed_repository )
+        self.install_model.context.flush()
+
+    def __assert_can_install_dependencies(self):
+        if self.app.config.tool_dependency_dir is None:
+            no_tool_dependency_dir_message = "Tool dependencies can be automatically installed only if you set "
+            no_tool_dependency_dir_message += "the value of your 'tool_dependency_dir' setting in your Galaxy "
+            no_tool_dependency_dir_message += "configuration file (galaxy.ini) and restart your Galaxy server.  "
+            raise exceptions.ConfigDoesNotAllowException( no_tool_dependency_dir_message )
+
+
+class RepositoriesInstalledException(exceptions.RequestParameterInvalidException):
+
+    def __init__(self):
+        super(RepositoriesInstalledException, self).__init__('All repositories that you are attempting to install have been previously installed.')
+
+
+def fetch_tool_versions( app, tool_shed_repository ):
+    """ Fetch a data structure describing tool shed versions from the tool shed
+    corresponding to a tool_shed_repository object.
+    """
+    try:
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( tool_shed_repository.tool_shed ) )
+        params = dict( name=str( tool_shed_repository.name ),
+                       owner=str( tool_shed_repository.owner ),
+                       changeset_revision=str( tool_shed_repository.changeset_revision ) )
+        pathspec = [ 'repository', 'get_tool_versions' ]
+        url = util.build_url( tool_shed_url, pathspec=pathspec, params=params )
+        text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        if text:
+            return json.loads( text )
+        else:
+            raise Exception("No content returned from Tool Shed repository version request to %s" % url)
+    except Exception:
+        log.exception("Failed to fetch tool version information for Tool Shed repository.")
+        raise
diff --git a/lib/tool_shed/galaxy_install/installed_repository_manager.py b/lib/tool_shed/galaxy_install/installed_repository_manager.py
new file mode 100644
index 0000000..c541bd9
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -0,0 +1,1064 @@
+"""
+Class encapsulating the management of repositories installed into Galaxy from the Tool Shed.
+"""
+import copy
+import logging
+import os
+
+from sqlalchemy import and_, false, true
+
+from galaxy import util
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
+from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
+from tool_shed.galaxy_install.tools import data_manager
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class InstalledRepositoryManager( object ):
+
+    def __init__( self, app ):
+        """
+        Among other things, keep in in-memory sets of tuples defining installed repositories and tool dependencies along with
+        the relationships between each of them.  This will allow for quick discovery of those repositories or components that
+        can be uninstalled.  The feature allowing a Galaxy administrator to uninstall a repository should not be available to
+        repositories or tool dependency packages that are required by other repositories or their contents (packages). The
+        uninstall feature should be available only at the repository hierarchy level where every dependency will be uninstalled.
+        The exception for this is if an item (repository or tool dependency package) is not in an INSTALLED state - in these
+        cases, the specific item can be uninstalled in order to attempt re-installation.
+        """
+        self.app = app
+        self.install_model = self.app.install_model
+        self.context = self.install_model.context
+        self.tool_configs = self.app.config.tool_configs
+        if self.app.config.migrated_tools_config not in self.tool_configs:
+            self.tool_configs.append( self.app.config.migrated_tools_config )
+        self.installed_repository_dicts = []
+        # Keep an in-memory dictionary whose keys are tuples defining tool_shed_repository objects (whose status is 'Installed')
+        # and whose values are a list of tuples defining tool_shed_repository objects (whose status can be anything) required by
+        # the key.  The value defines the entire repository dependency tree.
+        self.repository_dependencies_of_installed_repositories = {}
+        # Keep an in-memory dictionary whose keys are tuples defining tool_shed_repository objects (whose status is 'Installed')
+        # and whose values are a list of tuples defining tool_shed_repository objects (whose status is 'Installed') required by
+        # the key.  The value defines the entire repository dependency tree.
+        self.installed_repository_dependencies_of_installed_repositories = {}
+        # Keep an in-memory dictionary whose keys are tuples defining tool_shed_repository objects (whose status is 'Installed')
+        # and whose values are a list of tuples defining tool_shed_repository objects (whose status is 'Installed') that require
+        # the key.
+        self.installed_dependent_repositories_of_installed_repositories = {}
+        # Keep an in-memory dictionary whose keys are tuples defining tool_shed_repository objects (whose status is 'Installed')
+        # and whose values are a list of tuples defining its immediate tool_dependency objects (whose status can be anything).
+        # The value defines only the immediate tool dependencies of the repository and does not include any dependencies of the
+        # tool dependencies.
+        self.tool_dependencies_of_installed_repositories = {}
+        # Keep an in-memory dictionary whose keys are tuples defining tool_shed_repository objects (whose status is 'Installed')
+        # and whose values are a list of tuples defining its immediate tool_dependency objects (whose status is 'Installed').
+        # The value defines only the immediate tool dependencies of the repository and does not include any dependencies of the
+        # tool dependencies.
+        self.installed_tool_dependencies_of_installed_repositories = {}
+        # Keep an in-memory dictionary whose keys are tuples defining tool_dependency objects (whose status is 'Installed') and
+        # whose values are a list of tuples defining tool_dependency objects (whose status can be anything) required by the
+        # installed tool dependency at runtime.  The value defines the entire tool dependency tree.
+        self.runtime_tool_dependencies_of_installed_tool_dependencies = {}
+        # Keep an in-memory dictionary whose keys are tuples defining tool_dependency objects (whose status is 'Installed') and
+        # whose values are a list of tuples defining tool_dependency objects (whose status is 'Installed') that require the key
+        # at runtime.  The value defines the entire tool dependency tree.
+        self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies = {}
+        if app.config.manage_dependency_relationships:
+            # Load defined dependency relationships for installed tool shed repositories and their contents.
+            self.load_dependency_relationships()
+
+    def activate_repository( self, repository ):
+        """Activate an installed tool shed repository that has been marked as deactivated."""
+        repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
+        shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
+        repository.deleted = False
+        repository.status = self.install_model.ToolShedRepository.installation_status.INSTALLED
+        if repository.includes_tools_for_display_in_tool_panel:
+            tpm = tool_panel_manager.ToolPanelManager( self.app )
+            irmm = InstalledRepositoryMetadataManager( app=self.app,
+                                                       tpm=tpm,
+                                                       repository=repository,
+                                                       changeset_revision=repository.changeset_revision,
+                                                       metadata_dict=repository.metadata )
+            repository_tools_tups = irmm.get_repository_tools_tups()
+            # Reload tools into the appropriate tool panel section.
+            tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
+            tpm.add_to_tool_panel( repository.name,
+                                   repository_clone_url,
+                                   repository.installed_changeset_revision,
+                                   repository_tools_tups,
+                                   repository.owner,
+                                   shed_tool_conf,
+                                   tool_panel_dict,
+                                   new_install=False )
+            if repository.includes_data_managers:
+                tp, data_manager_relative_install_dir = repository.get_tool_relative_path( self.app )
+                # Hack to add repository.name here, which is actually the root of the installed repository
+                data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name )
+                dmh = data_manager.DataManagerHandler( self.app )
+                dmh.install_data_managers( self.app.config.shed_data_manager_config_file,
+                                           repository.metadata,
+                                           repository.get_shed_config_dict( self.app ),
+                                           data_manager_relative_install_dir,
+                                           repository,
+                                           repository_tools_tups )
+        self.install_model.context.add( repository )
+        self.install_model.context.flush()
+        if repository.includes_datatypes:
+            if tool_path:
+                repository_install_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir ) )
+            else:
+                repository_install_dir = os.path.abspath( relative_install_dir )
+            # Activate proprietary datatypes.
+            cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+            installed_repository_dict = cdl.load_installed_datatypes( repository,
+                                                                      repository_install_dir,
+                                                                      deactivate=False )
+            if installed_repository_dict:
+                converter_path = installed_repository_dict.get( 'converter_path' )
+                if converter_path is not None:
+                    cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=False )
+                display_path = installed_repository_dict.get( 'display_path' )
+                if display_path is not None:
+                    cdl.load_installed_display_applications( installed_repository_dict, deactivate=False )
+
+    def add_entry_to_installed_repository_dependencies_of_installed_repositories( self, repository ):
+        """
+        Add an entry to self.installed_repository_dependencies_of_installed_repositories.  A side-effect of this method
+        is the population of self.installed_dependent_repositories_of_installed_repositories.  Since this method discovers
+        all repositories required by the received repository, it can use the list to add entries to the reverse dictionary.
+        """
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        tool_shed, name, owner, installed_changeset_revision = repository_tup
+        # Get the list of repository dependencies for this repository.
+        status = self.install_model.ToolShedRepository.installation_status.INSTALLED
+        repository_dependency_tups = self.get_repository_dependency_tups_for_installed_repository( repository, status=status )
+        # Add an entry to self.installed_repository_dependencies_of_installed_repositories.
+        if repository_tup not in self.installed_repository_dependencies_of_installed_repositories:
+            debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "to installed_repository_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            self.installed_repository_dependencies_of_installed_repositories[ repository_tup ] = repository_dependency_tups
+        # Use the repository_dependency_tups to add entries to the reverse dictionary
+        # self.installed_dependent_repositories_of_installed_repositories.
+        for required_repository_tup in repository_dependency_tups:
+            debug_msg = "Appending revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "to all dependent repositories in installed_dependent_repositories_of_installed_repositories."
+            log.debug( debug_msg )
+            if required_repository_tup in self.installed_dependent_repositories_of_installed_repositories:
+                self.installed_dependent_repositories_of_installed_repositories[ required_repository_tup ].append( repository_tup )
+            else:
+                self.installed_dependent_repositories_of_installed_repositories[ required_repository_tup ] = [ repository_tup ]
+
+    def add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
+        """Add an entry to self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."""
+        tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+        if tool_dependency_tup not in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies:
+            tool_shed_repository_id, name, version, type = tool_dependency_tup
+            debug_msg = "Adding an entry for version %s of %s %s " % ( version, type, name )
+            debug_msg += "to installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."
+            log.debug( debug_msg )
+            status = self.install_model.ToolDependency.installation_status.INSTALLED
+            installed_runtime_dependent_tool_dependency_tups = self.get_runtime_dependent_tool_dependency_tuples( tool_dependency,
+                                                                                                                  status=status )
+            self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ] = \
+                installed_runtime_dependent_tool_dependency_tups
+
+    def add_entry_to_installed_tool_dependencies_of_installed_repositories( self, repository ):
+        """Add an entry to self.installed_tool_dependencies_of_installed_repositories."""
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        if repository_tup not in self.installed_tool_dependencies_of_installed_repositories:
+            tool_shed, name, owner, installed_changeset_revision = repository_tup
+            debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "to installed_tool_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            installed_tool_dependency_tups = []
+            for tool_dependency in repository.tool_dependencies:
+                if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED:
+                    tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+                    installed_tool_dependency_tups.append( tool_dependency_tup )
+            self.installed_tool_dependencies_of_installed_repositories[ repository_tup ] = installed_tool_dependency_tups
+
+    def add_entry_to_repository_dependencies_of_installed_repositories( self, repository ):
+        """Add an entry to self.repository_dependencies_of_installed_repositories."""
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        if repository_tup not in self.repository_dependencies_of_installed_repositories:
+            tool_shed, name, owner, installed_changeset_revision = repository_tup
+            debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "to repository_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            repository_dependency_tups = self.get_repository_dependency_tups_for_installed_repository( repository, status=None )
+            self.repository_dependencies_of_installed_repositories[ repository_tup ] = repository_dependency_tups
+
+    def add_entry_to_runtime_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
+        """Add an entry to self.runtime_tool_dependencies_of_installed_tool_dependencies."""
+        tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+        if tool_dependency_tup not in self.runtime_tool_dependencies_of_installed_tool_dependencies:
+            tool_shed_repository_id, name, version, type = tool_dependency_tup
+            debug_msg = "Adding an entry for version %s of %s %s " % ( version, type, name )
+            debug_msg += "to runtime_tool_dependencies_of_installed_tool_dependencies."
+            log.debug( debug_msg )
+            runtime_dependent_tool_dependency_tups = self.get_runtime_dependent_tool_dependency_tuples( tool_dependency,
+                                                                                                        status=None )
+            self.runtime_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ] = \
+                runtime_dependent_tool_dependency_tups
+
+    def add_entry_to_tool_dependencies_of_installed_repositories( self, repository ):
+        """Add an entry to self.tool_dependencies_of_installed_repositories."""
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        if repository_tup not in self.tool_dependencies_of_installed_repositories:
+            tool_shed, name, owner, installed_changeset_revision = repository_tup
+            debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "to tool_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            tool_dependency_tups = []
+            for tool_dependency in repository.tool_dependencies:
+                tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+                tool_dependency_tups.append( tool_dependency_tup )
+            self.tool_dependencies_of_installed_repositories[ repository_tup ] = tool_dependency_tups
+
+    def get_containing_repository_for_tool_dependency( self, tool_dependency_tup ):
+        tool_shed_repository_id, name, version, type = tool_dependency_tup
+        return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ).get( tool_shed_repository_id )
+
+    def get_dependencies_for_repository( self, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ):
+        """
+        Return dictionaries containing the sets of installed and missing tool dependencies and repository
+        dependencies associated with the repository defined by the received repo_info_dict.
+        """
+        rdim = repository_dependency_manager.RepositoryDependencyInstallManager( self.app )
+        repository = None
+        installed_rd = {}
+        installed_td = {}
+        missing_rd = {}
+        missing_td = {}
+        name = next(iter(repo_info_dict))
+        repo_info_tuple = repo_info_dict[ name ]
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+            repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+        if tool_dependencies:
+            if not includes_tool_dependencies:
+                includes_tool_dependencies = True
+            # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies.
+            # We don't add to installed_td and missing_td here because at this point they are empty.
+            installed_td, missing_td = self.get_installed_and_missing_tool_dependencies_for_repository( tool_dependencies )
+        # In cases where a repository dependency is required only for compiling a dependent repository's
+        # tool dependency, the value of repository_dependencies will be an empty dictionary here.
+        if repository_dependencies:
+            # We have a repository with one or more defined repository dependencies.
+            if not repository:
+                repository = repository_util.get_repository_for_dependency_relationship( self.app,
+                                                                                         tool_shed_url,
+                                                                                         name,
+                                                                                         repository_owner,
+                                                                                         changeset_revision )
+            if not updating and repository and repository.metadata:
+                installed_rd, missing_rd = self.get_installed_and_missing_repository_dependencies( repository )
+            else:
+                installed_rd, missing_rd = \
+                    self.get_installed_and_missing_repository_dependencies_for_new_or_updated_install( repo_info_tuple )
+            # Discover all repository dependencies and retrieve information for installing them.
+            all_repo_info_dict = rdim.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+            has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+            has_repository_dependencies_only_if_compiling_contained_td = \
+                all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
+            includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+            includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
+            includes_tools = all_repo_info_dict.get( 'includes_tools', False )
+            required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
+            # Display tool dependencies defined for each of the repository dependencies.
+            if required_repo_info_dicts:
+                required_tool_dependencies = {}
+                for rid in required_repo_info_dicts:
+                    for name, repo_info_tuple in rid.items():
+                        description, repository_clone_url, changeset_revision, ctx_rev, \
+                            repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
+                            repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+                        if rid_tool_dependencies:
+                            for td_key, td_dict in rid_tool_dependencies.items():
+                                if td_key not in required_tool_dependencies:
+                                    required_tool_dependencies[ td_key ] = td_dict
+                if required_tool_dependencies:
+                    # Discover and categorize all tool dependencies defined for this repository's repository dependencies.
+                    required_installed_td, required_missing_td = \
+                        self.get_installed_and_missing_tool_dependencies_for_repository( required_tool_dependencies )
+                    if required_installed_td:
+                        if not includes_tool_dependencies:
+                            includes_tool_dependencies = True
+                        for td_key, td_dict in required_installed_td.items():
+                            if td_key not in installed_td:
+                                installed_td[ td_key ] = td_dict
+                    if required_missing_td:
+                        if not includes_tool_dependencies:
+                            includes_tool_dependencies = True
+                        for td_key, td_dict in required_missing_td.items():
+                            if td_key not in missing_td:
+                                missing_td[ td_key ] = td_dict
+        else:
+            # We have a single repository with (possibly) no defined repository dependencies.
+            all_repo_info_dict = rdim.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+            has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+            has_repository_dependencies_only_if_compiling_contained_td = \
+                all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
+            includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+            includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
+            includes_tools = all_repo_info_dict.get( 'includes_tools', False )
+            required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
+        dependencies_for_repository_dict = \
+            dict( changeset_revision=changeset_revision,
+                  has_repository_dependencies=has_repository_dependencies,
+                  has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
+                  includes_tool_dependencies=includes_tool_dependencies,
+                  includes_tools=includes_tools,
+                  includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+                  installed_repository_dependencies=installed_rd,
+                  installed_tool_dependencies=installed_td,
+                  missing_repository_dependencies=missing_rd,
+                  missing_tool_dependencies=missing_td,
+                  name=name,
+                  repository_owner=repository_owner )
+        return dependencies_for_repository_dict
+
+    def get_installed_and_missing_repository_dependencies( self, repository ):
+        """
+        Return the installed and missing repository dependencies for a tool shed repository that has a record
+        in the Galaxy database, but may or may not be installed.  In this case, the repository dependencies are
+        associated with the repository in the database.  Do not include a repository dependency if it is required
+        only to compile a tool dependency defined for the dependent repository since these special kinds of repository
+        dependencies are really a dependency of the dependent repository's contained tool dependency, and only
+        if that tool dependency requires compilation.
+        """
+        missing_repository_dependencies = {}
+        installed_repository_dependencies = {}
+        has_repository_dependencies = repository.has_repository_dependencies
+        if has_repository_dependencies:
+            # The repository dependencies container will include only the immediate repository
+            # dependencies of this repository, so the container will be only a single level in depth.
+            metadata = repository.metadata
+            installed_rd_tups = []
+            missing_rd_tups = []
+            for tsr in repository.repository_dependencies:
+                prior_installation_required = self.set_prior_installation_required( repository, tsr )
+                only_if_compiling_contained_td = self.set_only_if_compiling_contained_td( repository, tsr )
+                rd_tup = [ tsr.tool_shed,
+                           tsr.name,
+                           tsr.owner,
+                           tsr.changeset_revision,
+                           prior_installation_required,
+                           only_if_compiling_contained_td,
+                           tsr.id,
+                           tsr.status ]
+                if tsr.status == self.app.install_model.ToolShedRepository.installation_status.INSTALLED:
+                    installed_rd_tups.append( rd_tup )
+                else:
+                    # We'll only add the rd_tup to the missing_rd_tups list if the received repository
+                    # has tool dependencies that are not correctly installed.  This may prove to be a
+                    # weak check since the repository in question may not have anything to do with
+                    # compiling the missing tool dependencies.  If we discover that this is a problem,
+                    # more granular checking will be necessary here.
+                    if repository.missing_tool_dependencies:
+                        if not self.repository_dependency_needed_only_for_compiling_tool_dependency( repository, tsr ):
+                            missing_rd_tups.append( rd_tup )
+                    else:
+                        missing_rd_tups.append( rd_tup )
+            if installed_rd_tups or missing_rd_tups:
+                # Get the description from the metadata in case it has a value.
+                repository_dependencies = metadata.get( 'repository_dependencies', {} )
+                description = repository_dependencies.get( 'description', None )
+                # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the
+                # missing_repository_dependencies dictionaries for proper display parsing.
+                root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+                                                                                               repository.name,
+                                                                                               repository.owner,
+                                                                                               repository.installed_changeset_revision,
+                                                                                               prior_installation_required,
+                                                                                               only_if_compiling_contained_td )
+                if installed_rd_tups:
+                    installed_repository_dependencies[ 'root_key' ] = root_key
+                    installed_repository_dependencies[ root_key ] = installed_rd_tups
+                    installed_repository_dependencies[ 'description' ] = description
+                if missing_rd_tups:
+                    missing_repository_dependencies[ 'root_key' ] = root_key
+                    missing_repository_dependencies[ root_key ] = missing_rd_tups
+                    missing_repository_dependencies[ 'description' ] = description
+        return installed_repository_dependencies, missing_repository_dependencies
+
+    def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( self, repo_info_tuple ):
+        """
+        Parse the received repository_dependencies dictionary that is associated with a repository being
+        installed into Galaxy for the first time and attempt to determine repository dependencies that are
+        already installed and those that are not.
+        """
+        missing_repository_dependencies = {}
+        installed_repository_dependencies = {}
+        missing_rd_tups = []
+        installed_rd_tups = []
+        ( description, repository_clone_url, changeset_revision, ctx_rev,
+          repository_owner, repository_dependencies, tool_dependencies ) = repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+        if repository_dependencies:
+            description = repository_dependencies[ 'description' ]
+            root_key = repository_dependencies[ 'root_key' ]
+            # The repository dependencies container will include only the immediate repository dependencies of
+            # this repository, so the container will be only a single level in depth.
+            for key, rd_tups in repository_dependencies.items():
+                if key in [ 'description', 'root_key' ]:
+                    continue
+                for rd_tup in rd_tups:
+                    tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                        common_util.parse_repository_dependency_tuple( rd_tup )
+                    # Updates to installed repository revisions may have occurred, so make sure to locate the
+                    # appropriate repository revision if one exists.  We need to create a temporary repo_info_tuple
+                    # that includes the correct repository owner which we get from the current rd_tup.  The current
+                    # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner,
+                    #                     repository_dependencies, installed_td )
+                    tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup( self.app, rd_tup )
+                    tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
+                    repository, installed_changeset_revision = repository_util.repository_was_previously_installed( self.app,
+                                                                                                                    tool_shed,
+                                                                                                                    name,
+                                                                                                                    tmp_repo_info_tuple,
+                                                                                                                    from_tip=False )
+                    if repository:
+                        new_rd_tup = [ tool_shed,
+                                       name,
+                                       owner,
+                                       changeset_revision,
+                                       prior_installation_required,
+                                       only_if_compiling_contained_td,
+                                       repository.id,
+                                       repository.status ]
+                        if repository.status == self.install_model.ToolShedRepository.installation_status.INSTALLED:
+                            if new_rd_tup not in installed_rd_tups:
+                                installed_rd_tups.append( new_rd_tup )
+                        else:
+                            # A repository dependency that is not installed will not be considered missing if its value
+                            # for only_if_compiling_contained_td is True  This is because this type of repository dependency
+                            # will only be considered at the time that the specified tool dependency is being installed, and
+                            # even then only if the compiled binary of the tool dependency could not be installed due to the
+                            # unsupported installation environment.
+                            if not util.asbool( only_if_compiling_contained_td ):
+                                if new_rd_tup not in missing_rd_tups:
+                                    missing_rd_tups.append( new_rd_tup )
+                    else:
+                        new_rd_tup = [ tool_shed,
+                                       name,
+                                       owner,
+                                       changeset_revision,
+                                       prior_installation_required,
+                                       only_if_compiling_contained_td,
+                                       None,
+                                       'Never installed' ]
+                        if not util.asbool( only_if_compiling_contained_td ):
+                            # A repository dependency that is not installed will not be considered missing if its value for
+                            # only_if_compiling_contained_td is True - see above...
+                            if new_rd_tup not in missing_rd_tups:
+                                missing_rd_tups.append( new_rd_tup )
+        if installed_rd_tups:
+            installed_repository_dependencies[ 'root_key' ] = root_key
+            installed_repository_dependencies[ root_key ] = installed_rd_tups
+            installed_repository_dependencies[ 'description' ] = description
+        if missing_rd_tups:
+            missing_repository_dependencies[ 'root_key' ] = root_key
+            missing_repository_dependencies[ root_key ] = missing_rd_tups
+            missing_repository_dependencies[ 'description' ] = description
+        return installed_repository_dependencies, missing_repository_dependencies
+
+    def get_installed_and_missing_tool_dependencies_for_repository( self, tool_dependencies_dict ):
+        """
+        Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories
+        being installed into Galaxy.
+        """
+        # FIXME: This implementation breaks when updates to a repository contain dependencies that result in
+        # multiple entries for a specific tool dependency.  A scenario where this can happen is where 2 repositories
+        # define  the same dependency internally (not using the complex repository dependency definition to a separate
+        # package repository approach).  If 2 repositories contain the same tool_dependencies.xml file, one dependency
+        # will be lost since the values in these returned dictionaries are not lists.  All tool dependency dictionaries
+        # should have lists as values.  These scenarios are probably extreme corner cases, but still should be handled.
+        installed_tool_dependencies = {}
+        missing_tool_dependencies = {}
+        if tool_dependencies_dict:
+            # Make sure not to change anything in the received tool_dependencies_dict as that would be a bad side-effect!
+            tmp_tool_dependencies_dict = copy.deepcopy( tool_dependencies_dict )
+            for td_key, val in tmp_tool_dependencies_dict.items():
+                # Default the status to NEVER_INSTALLED.
+                tool_dependency_status = self.install_model.ToolDependency.installation_status.NEVER_INSTALLED
+                # Set environment tool dependencies are a list.
+                if td_key == 'set_environment':
+                    new_val = []
+                    for requirement_dict in val:
+                        # {'repository_name': 'xx',
+                        #  'name': 'bwa',
+                        #  'version': '0.5.9',
+                        #  'repository_owner': 'yy',
+                        #  'changeset_revision': 'zz',
+                        #  'type': 'package'}
+                        tool_dependency = \
+                            tool_dependency_util.get_tool_dependency_by_name_version_type( self.app,
+                                                                                           requirement_dict.get( 'name', None ),
+                                                                                           requirement_dict.get( 'version', None ),
+                                                                                           requirement_dict.get( 'type', 'package' ) )
+                        if tool_dependency:
+                            tool_dependency_status = tool_dependency.status
+                        requirement_dict[ 'status' ] = tool_dependency_status
+                        new_val.append( requirement_dict )
+                        if tool_dependency_status in [ self.install_model.ToolDependency.installation_status.INSTALLED ]:
+                            if td_key in installed_tool_dependencies:
+                                installed_tool_dependencies[ td_key ].extend( new_val )
+                            else:
+                                installed_tool_dependencies[ td_key ] = new_val
+                        else:
+                            if td_key in missing_tool_dependencies:
+                                missing_tool_dependencies[ td_key ].extend( new_val )
+                            else:
+                                missing_tool_dependencies[ td_key ] = new_val
+                else:
+                    # The val dictionary looks something like this:
+                    # {'repository_name': 'xx',
+                    #  'name': 'bwa',
+                    #  'version': '0.5.9',
+                    #  'repository_owner': 'yy',
+                    #  'changeset_revision': 'zz',
+                    #  'type': 'package'}
+                    tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( self.app,
+                                                                                                     val.get( 'name', None ),
+                                                                                                     val.get( 'version', None ),
+                                                                                                     val.get( 'type', 'package' ) )
+                    if tool_dependency:
+                        tool_dependency_status = tool_dependency.status
+                    val[ 'status' ] = tool_dependency_status
+                if tool_dependency_status in [ self.install_model.ToolDependency.installation_status.INSTALLED ]:
+                    installed_tool_dependencies[ td_key ] = val
+                else:
+                    missing_tool_dependencies[ td_key ] = val
+        return installed_tool_dependencies, missing_tool_dependencies
+
+    def get_repository_dependency_tups_for_installed_repository( self, repository, dependency_tups=None, status=None ):
+        """
+        Return a list of of tuples defining tool_shed_repository objects (whose status can be anything) required by the
+        received repository.  The returned list defines the entire repository dependency tree.  This method is called
+        only from Galaxy.
+        """
+        if dependency_tups is None:
+            dependency_tups = []
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        for rrda in repository.required_repositories:
+            repository_dependency = rrda.repository_dependency
+            required_repository = repository_dependency.repository
+            if status is None or required_repository.status == status:
+                required_repository_tup = self.get_repository_tuple_for_installed_repository_manager( required_repository )
+                if required_repository_tup == repository_tup:
+                    # We have a circular repository dependency relationship, skip this entry.
+                    continue
+                if required_repository_tup not in dependency_tups:
+                    dependency_tups.append( required_repository_tup )
+                    return self.get_repository_dependency_tups_for_installed_repository( required_repository,
+                                                                                         dependency_tups=dependency_tups )
+        return dependency_tups
+
+    def get_repository_tuple_for_installed_repository_manager( self, repository ):
+        return ( str( repository.tool_shed ),
+                 str( repository.name ),
+                 str( repository.owner ),
+                 str( repository.installed_changeset_revision ) )
+
+    def get_repository_install_dir( self, tool_shed_repository ):
+        for tool_config in self.tool_configs:
+            tree, error_message = xml_util.parse_xml( tool_config )
+            if tree is None:
+                return None
+            root = tree.getroot()
+            tool_path = root.get( 'tool_path', None )
+            if tool_path:
+                ts = common_util.remove_port_from_tool_shed_url( str( tool_shed_repository.tool_shed ) )
+                relative_path = os.path.join( tool_path,
+                                              ts,
+                                              'repos',
+                                              str( tool_shed_repository.owner ),
+                                              str( tool_shed_repository.name ),
+                                              str( tool_shed_repository.installed_changeset_revision ) )
+                if os.path.exists( relative_path ):
+                    return relative_path
+        return None
+
+    def get_runtime_dependent_tool_dependency_tuples( self, tool_dependency, status=None ):
+        """
+        Return the list of tool dependency objects that require the received tool dependency at run time.  The returned
+        list will be filtered by the received status if it is not None.  This method is called only from Galaxy.
+        """
+        runtime_dependent_tool_dependency_tups = []
+        required_env_shell_file_path = tool_dependency.get_env_shell_file_path( self.app )
+        if required_env_shell_file_path:
+            required_env_shell_file_path = os.path.abspath( required_env_shell_file_path )
+        if required_env_shell_file_path is not None:
+            for td in self.app.install_model.context.query( self.app.install_model.ToolDependency ):
+                if status is None or td.status == status:
+                    env_shell_file_path = td.get_env_shell_file_path( self.app )
+                    if env_shell_file_path is not None:
+                        try:
+                            contents = open( env_shell_file_path, 'r' ).read()
+                        except Exception as e:
+                            contents = None
+                            log.debug( 'Error reading file %s, so cannot determine if package %s requires package %s at run time: %s' %
+                                       ( str( env_shell_file_path ), str( td.name ), str( tool_dependency.name ), str( e ) ) )
+                        if contents is not None and contents.find( required_env_shell_file_path ) >= 0:
+                            td_tuple = self.get_tool_dependency_tuple_for_installed_repository_manager( td )
+                            runtime_dependent_tool_dependency_tups.append( td_tuple )
+        return runtime_dependent_tool_dependency_tups
+
+    def get_tool_dependency_tuple_for_installed_repository_manager( self, tool_dependency ):
+        if tool_dependency.type is None:
+            type = None
+        else:
+            type = str( tool_dependency.type )
+        return ( tool_dependency.tool_shed_repository_id, str( tool_dependency.name ), str( tool_dependency.version ), type )
+
+    def handle_existing_tool_dependencies_that_changed_in_update( self, repository, original_dependency_dict,
+                                                                  new_dependency_dict ):
+        """
+        This method is called when a Galaxy admin is getting updates for an installed tool shed
+        repository in order to cover the case where an existing tool dependency was changed (e.g.,
+        the version of the dependency was changed) but the tool version for which it is a dependency
+        was not changed.  In this case, we only want to determine if any of the dependency information
+        defined in original_dependency_dict was changed in new_dependency_dict.  We don't care if new
+        dependencies were added in new_dependency_dict since they will just be treated as missing
+        dependencies for the tool.
+        """
+        updated_tool_dependency_names = []
+        deleted_tool_dependency_names = []
+        for original_dependency_key, original_dependency_val_dict in original_dependency_dict.items():
+            if original_dependency_key not in new_dependency_dict:
+                updated_tool_dependency = self.update_existing_tool_dependency( repository,
+                                                                                original_dependency_val_dict,
+                                                                                new_dependency_dict )
+                if updated_tool_dependency:
+                    updated_tool_dependency_names.append( updated_tool_dependency.name )
+                else:
+                    deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] )
+        return updated_tool_dependency_names, deleted_tool_dependency_names
+
+    def handle_repository_install( self, repository ):
+        """Load the dependency relationships for a repository that was just installed or reinstalled."""
+        # Populate self.repository_dependencies_of_installed_repositories.
+        self.add_entry_to_repository_dependencies_of_installed_repositories( repository )
+        # Populate self.installed_repository_dependencies_of_installed_repositories.
+        self.add_entry_to_installed_repository_dependencies_of_installed_repositories( repository )
+        # Populate self.tool_dependencies_of_installed_repositories.
+        self.add_entry_to_tool_dependencies_of_installed_repositories( repository )
+        # Populate self.installed_tool_dependencies_of_installed_repositories.
+        self.add_entry_to_installed_tool_dependencies_of_installed_repositories( repository )
+        for tool_dependency in repository.tool_dependencies:
+            # Populate self.runtime_tool_dependencies_of_installed_tool_dependencies.
+            self.add_entry_to_runtime_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+            # Populate self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.
+            self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+
+    def handle_repository_uninstall( self, repository ):
+        """Remove the dependency relationships for a repository that was just uninstalled."""
+        for tool_dependency in repository.tool_dependencies:
+            tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+            # Remove this tool_dependency from all values in
+            # self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies
+            altered_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies = {}
+            for ( td_tup, installed_runtime_dependent_tool_dependency_tups ) in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.items():
+                if tool_dependency_tup in installed_runtime_dependent_tool_dependency_tups:
+                    # Remove the tool_dependency from the list.
+                    installed_runtime_dependent_tool_dependency_tups.remove( tool_dependency_tup )
+                # Add the possibly altered list to the altered dictionary.
+                altered_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies[ td_tup ] = \
+                    installed_runtime_dependent_tool_dependency_tups
+            self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies = \
+                altered_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies
+            # Remove the entry for this tool_dependency from self.runtime_tool_dependencies_of_installed_tool_dependencies.
+            self.remove_entry_from_runtime_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+            # Remove the entry for this tool_dependency from
+            # self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.
+            self.remove_entry_from_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+        # Remove this repository's entry from self.installed_tool_dependencies_of_installed_repositories.
+        self.remove_entry_from_installed_tool_dependencies_of_installed_repositories( repository )
+        # Remove this repository's entry from self.tool_dependencies_of_installed_repositories
+        self.remove_entry_from_tool_dependencies_of_installed_repositories( repository )
+        # Remove this repository's entry from self.installed_repository_dependencies_of_installed_repositories.
+        self.remove_entry_from_installed_repository_dependencies_of_installed_repositories( repository )
+        # Remove this repository's entry from self.repository_dependencies_of_installed_repositories.
+        self.remove_entry_from_repository_dependencies_of_installed_repositories( repository )
+
+    def handle_tool_dependency_install( self, repository, tool_dependency ):
+        """Load the dependency relationships for a tool dependency that was just installed independently of its containing repository."""
+        # The received repository must have a status of 'Installed'.  The value of tool_dependency.status will either be
+        # 'Installed' or 'Error', but we only need to change the in-memory dictionaries if it is 'Installed'.
+        if tool_dependency.is_installed:
+            # Populate self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.
+            self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+            # Populate self.installed_tool_dependencies_of_installed_repositories.
+            repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+            tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+            if repository_tup in self.installed_tool_dependencies_of_installed_repositories:
+                self.installed_tool_dependencies_of_installed_repositories[ repository_tup ].append( tool_dependency_tup )
+            else:
+                self.installed_tool_dependencies_of_installed_repositories[ repository_tup ] = [ tool_dependency_tup ]
+
+    def load_dependency_relationships( self ):
+        """Load relationships for all installed repositories and tool dependencies into in-memnory dictionaries."""
+        # Get the list of installed tool shed repositories.
+        for repository in self.context.query( self.app.install_model.ToolShedRepository ) \
+                                      .filter( self.app.install_model.ToolShedRepository.table.c.status ==
+                                               self.app.install_model.ToolShedRepository.installation_status.INSTALLED ):
+            # Populate self.repository_dependencies_of_installed_repositories.
+            self.add_entry_to_repository_dependencies_of_installed_repositories( repository )
+            # Populate self.installed_repository_dependencies_of_installed_repositories.
+            self.add_entry_to_installed_repository_dependencies_of_installed_repositories( repository )
+            # Populate self.tool_dependencies_of_installed_repositories.
+            self.add_entry_to_tool_dependencies_of_installed_repositories( repository )
+            # Populate self.installed_tool_dependencies_of_installed_repositories.
+            self.add_entry_to_installed_tool_dependencies_of_installed_repositories( repository )
+        # Get the list of installed tool dependencies.
+        for tool_dependency in self.context.query( self.app.install_model.ToolDependency ) \
+                                           .filter( self.app.install_model.ToolDependency.table.c.status ==
+                                                    self.app.install_model.ToolDependency.installation_status.INSTALLED ):
+            # Populate self.runtime_tool_dependencies_of_installed_tool_dependencies.
+            self.add_entry_to_runtime_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+            # Populate self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.
+            self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
+
+    def load_proprietary_datatypes( self ):
+        cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+        for tool_shed_repository in self.context.query( self.install_model.ToolShedRepository ) \
+                                                .filter( and_( self.install_model.ToolShedRepository.table.c.includes_datatypes == true(),
+                                                               self.install_model.ToolShedRepository.table.c.deleted == false() ) ) \
+                                                .order_by( self.install_model.ToolShedRepository.table.c.id ):
+            relative_install_dir = self.get_repository_install_dir( tool_shed_repository )
+            if relative_install_dir:
+                installed_repository_dict = cdl.load_installed_datatypes( tool_shed_repository, relative_install_dir )
+                if installed_repository_dict:
+                    self.installed_repository_dicts.append( installed_repository_dict )
+
+    def load_proprietary_converters_and_display_applications( self, deactivate=False ):
+        cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+        for installed_repository_dict in self.installed_repository_dicts:
+            if installed_repository_dict[ 'converter_path' ]:
+                cdl.load_installed_datatype_converters( installed_repository_dict, deactivate=deactivate )
+            if installed_repository_dict[ 'display_path' ]:
+                cdl.load_installed_display_applications( installed_repository_dict, deactivate=deactivate )
+
+    def purge_repository( self, repository ):
+        """Purge a repository with status New (a white ghost) from the database."""
+        sa_session = self.app.model.context.current
+        status = 'ok'
+        message = ''
+        purged_tool_versions = 0
+        purged_tool_dependencies = 0
+        purged_required_repositories = 0
+        purged_orphan_repository_repository_dependency_association_records = 0
+        purged_orphan_repository_dependency_records = 0
+        if repository.is_new:
+            # Purge this repository's associated tool versions.
+            if repository.tool_versions:
+                for tool_version in repository.tool_versions:
+                    if tool_version.parent_tool_association:
+                        for tool_version_association in tool_version.parent_tool_association:
+                            try:
+                                sa_session.delete( tool_version_association )
+                                sa_session.flush()
+                            except Exception as e:
+                                status = 'error'
+                                message = 'Error attempting to purge tool_versions for the repository named %s with status %s: %s.' % \
+                                    ( str( repository.name ), str( repository.status ), str( e ) )
+                                return status, message
+                    if tool_version.child_tool_association:
+                        for tool_version_association in tool_version.child_tool_association:
+                            try:
+                                sa_session.delete( tool_version_association )
+                                sa_session.flush()
+                            except Exception as e:
+                                status = 'error'
+                                message = 'Error attempting to purge tool_versions for the repository named %s with status %s: %s.' % \
+                                    ( str( repository.name ), str( repository.status ), str( e ) )
+                                return status, message
+                    try:
+                        sa_session.delete( tool_version )
+                        sa_session.flush()
+                        purged_tool_versions += 1
+                    except Exception as e:
+                        status = 'error'
+                        message = 'Error attempting to purge tool_versions for the repository named %s with status %s: %s.' % \
+                            ( str( repository.name ), str( repository.status ), str( e ) )
+                        return status, message
+            # Purge this repository's associated tool dependencies.
+            if repository.tool_dependencies:
+                for tool_dependency in repository.tool_dependencies:
+                    try:
+                        sa_session.delete( tool_dependency )
+                        sa_session.flush()
+                        purged_tool_dependencies += 1
+                    except Exception as e:
+                        status = 'error'
+                        message = 'Error attempting to purge tool_dependencies for the repository named %s with status %s: %s.' % \
+                            ( str( repository.name ), str( repository.status ), str( e ) )
+                        return status, message
+            # Purge this repository's associated required repositories.
+            if repository.required_repositories:
+                for rrda in repository.required_repositories:
+                    try:
+                        sa_session.delete( rrda )
+                        sa_session.flush()
+                        purged_required_repositories += 1
+                    except Exception as e:
+                        status = 'error'
+                        message = 'Error attempting to purge required_repositories for the repository named %s with status %s: %s.' % \
+                            ( str( repository.name ), str( repository.status ), str( e ) )
+                        return status, message
+            # Purge any "orphan" repository_dependency records associated with the repository, but not with any
+            # repository_repository_dependency_association records.
+            for orphan_repository_dependency in \
+                sa_session.query( self.app.install_model.RepositoryDependency ) \
+                          .filter( self.app.install_model.RepositoryDependency.table.c.tool_shed_repository_id == repository.id ):
+                # Purge any repository_repository_dependency_association records whose repository_dependency_id is
+                # the id of the orphan repository_dependency record.
+                for orphan_rrda in \
+                    sa_session.query( self.app.install_model.RepositoryRepositoryDependencyAssociation ) \
+                              .filter( self.app.install_model.RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == orphan_repository_dependency.id ):
+                    try:
+                        sa_session.delete( orphan_rrda )
+                        sa_session.flush()
+                        purged_orphan_repository_repository_dependency_association_records += 1
+                    except Exception as e:
+                        status = 'error'
+                        message = 'Error attempting to purge repository_repository_dependency_association records associated with '
+                        message += 'an orphan repository_dependency record for the repository named %s with status %s: %s.' % \
+                            ( str( repository.name ), str( repository.status ), str( e ) )
+                        return status, message
+                try:
+                    sa_session.delete( orphan_repository_dependency )
+                    sa_session.flush()
+                    purged_orphan_repository_dependency_records += 1
+                except Exception as e:
+                    status = 'error'
+                    message = 'Error attempting to purge orphan repository_dependency records for the repository named %s with status %s: %s.' % \
+                        ( str( repository.name ), str( repository.status ), str( e ) )
+                    return status, message
+            # Purge the repository.
+            sa_session.delete( repository )
+            sa_session.flush()
+            message = 'The repository named <b>%s</b> with status <b>%s</b> has been purged.<br/>' % \
+                ( str( repository.name ), str( repository.status ) )
+            message += 'Total associated tool_version records purged: %d<br/>' % purged_tool_versions
+            message += 'Total associated tool_dependency records purged: %d<br/>' % purged_tool_dependencies
+            message += 'Total associated repository_repository_dependency_association records purged: %d<br/>' % purged_required_repositories
+            message += 'Total associated orphan repository_repository_dependency_association records purged: %d<br/>' % \
+                purged_orphan_repository_repository_dependency_association_records
+            message += 'Total associated orphan repository_dependency records purged: %d<br/>' % purged_orphan_repository_dependency_records
+        else:
+            status = 'error'
+            message = 'A repository must have the status <b>New</b> in order to be purged.  This repository has '
+            message += ' the status %s.' % str( repository.status )
+        return status, message
+
+    def remove_entry_from_installed_repository_dependencies_of_installed_repositories( self, repository ):
+        """
+        Remove an entry from self.installed_repository_dependencies_of_installed_repositories.  A side-effect of this method
+        is removal of appropriate value items from self.installed_dependent_repositories_of_installed_repositories.
+        """
+        # Remove tuples defining this repository from value lists in self.installed_dependent_repositories_of_installed_repositories.
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        tool_shed, name, owner, installed_changeset_revision = repository_tup
+        altered_installed_dependent_repositories_of_installed_repositories = {}
+        for r_tup, v_tups in self.installed_dependent_repositories_of_installed_repositories.items():
+            if repository_tup in v_tups:
+                debug_msg = "Removing entry for revision %s of repository %s owned by %s " % \
+                    ( installed_changeset_revision, name, owner )
+                r_tool_shed, r_name, r_owner, r_installed_changeset_revision = r_tup
+                debug_msg += "from the dependent list for revision %s of repository %s owned by %s " % \
+                    ( r_installed_changeset_revision, r_name, r_owner )
+                debug_msg += "in installed_repository_dependencies_of_installed_repositories."
+                log.debug( debug_msg )
+                v_tups.remove( repository_tup )
+            altered_installed_dependent_repositories_of_installed_repositories[ r_tup ] = v_tups
+        self.installed_dependent_repositories_of_installed_repositories = \
+            altered_installed_dependent_repositories_of_installed_repositories
+        # Remove this repository's entry from self.installed_repository_dependencies_of_installed_repositories.
+        if repository_tup in self.installed_repository_dependencies_of_installed_repositories:
+            debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "from installed_repository_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            del self.installed_repository_dependencies_of_installed_repositories[ repository_tup ]
+
+    def remove_entry_from_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
+        """Remove an entry from self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."""
+        tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+        if tool_dependency_tup in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies:
+            tool_shed_repository_id, name, version, type = tool_dependency_tup
+            debug_msg = "Removing entry for version %s of %s %s " % ( version, type, name )
+            debug_msg += "from installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."
+            log.debug( debug_msg )
+            del self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ]
+
+    def remove_entry_from_installed_tool_dependencies_of_installed_repositories( self, repository ):
+        """Remove an entry from self.installed_tool_dependencies_of_installed_repositories."""
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        if repository_tup in self.installed_tool_dependencies_of_installed_repositories:
+            tool_shed, name, owner, installed_changeset_revision = repository_tup
+            debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "from installed_tool_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            del self.installed_tool_dependencies_of_installed_repositories[ repository_tup ]
+
+    def remove_entry_from_repository_dependencies_of_installed_repositories( self, repository ):
+        """Remove an entry from self.repository_dependencies_of_installed_repositories."""
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        if repository_tup in self.repository_dependencies_of_installed_repositories:
+            tool_shed, name, owner, installed_changeset_revision = repository_tup
+            debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
+            debug_msg += "from repository_dependencies_of_installed_repositories."
+            log.debug( debug_msg )
+            del self.repository_dependencies_of_installed_repositories[ repository_tup ]
+
+    def remove_entry_from_runtime_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
+        """Remove an entry from self.runtime_tool_dependencies_of_installed_tool_dependencies."""
+        tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+        if tool_dependency_tup in self.runtime_tool_dependencies_of_installed_tool_dependencies:
+            tool_shed_repository_id, name, version, type = tool_dependency_tup
+            debug_msg = "Removing entry for version %s of %s %s from runtime_tool_dependencies_of_installed_tool_dependencies." % \
+                ( version, type, name )
+            log.debug( debug_msg )
+            del self.runtime_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ]
+
+    def remove_entry_from_tool_dependencies_of_installed_repositories( self, repository ):
+        """Remove an entry from self.tool_dependencies_of_installed_repositories."""
+        repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+        if repository_tup in self.tool_dependencies_of_installed_repositories:
+            tool_shed, name, owner, installed_changeset_revision = repository_tup
+            debug_msg = "Removing entry for revision %s of repository %s owned by %s from tool_dependencies_of_installed_repositories." % \
+                ( installed_changeset_revision, name, owner )
+            log.debug( debug_msg )
+            del self.tool_dependencies_of_installed_repositories[ repository_tup ]
+
+    def repository_dependency_needed_only_for_compiling_tool_dependency( self, repository, repository_dependency ):
+        for rd_tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
+            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
+            # TODO: we may discover that we need to check more than just installed_changeset_revision and changeset_revision here, in which
+            # case we'll need to contact the tool shed to get the list of all possible changeset_revisions.
+            cleaned_tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( tool_shed )
+            cleaned_repository_dependency_tool_shed = \
+                common_util.remove_protocol_and_port_from_tool_shed_url( str( repository_dependency.tool_shed ) )
+            if cleaned_repository_dependency_tool_shed == cleaned_tool_shed and \
+                repository_dependency.name == name and \
+                repository_dependency.owner == owner and \
+                ( repository_dependency.installed_changeset_revision == changeset_revision or
+                  repository_dependency.changeset_revision == changeset_revision ):
+                return True
+        return False
+
+    def set_only_if_compiling_contained_td( self, repository, required_repository ):
+        """
+        Return True if the received required_repository is only needed to compile a tool
+        dependency defined for the received repository.
+        """
+        # This method is called only from Galaxy when rendering repository dependencies
+        # for an installed tool shed repository.
+        # TODO: Do we need to check more than changeset_revision here?
+        required_repository_tup = [ required_repository.tool_shed,
+                                    required_repository.name,
+                                    required_repository.owner,
+                                    required_repository.changeset_revision ]
+        for tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
+            partial_tup = tup[ 0:4 ]
+            if partial_tup == required_repository_tup:
+                return 'True'
+        return 'False'
+
+    def set_prior_installation_required( self, repository, required_repository ):
+        """
+        Return True if the received required_repository must be installed before the
+        received repository.
+        """
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app,
+                                                                               str( required_repository.tool_shed ) )
+        required_repository_tup = [ tool_shed_url,
+                                    str( required_repository.name ),
+                                    str( required_repository.owner ),
+                                    str( required_repository.changeset_revision ) ]
+        # Get the list of repository dependency tuples associated with the received repository
+        # where prior_installation_required is True.
+        required_rd_tups_that_must_be_installed = repository.requires_prior_installation_of
+        for required_rd_tup in required_rd_tups_that_must_be_installed:
+            # Repository dependency tuples in metadata include a prior_installation_required value,
+            # so strip it for comparision.
+            partial_required_rd_tup = required_rd_tup[ 0:4 ]
+            if partial_required_rd_tup == required_repository_tup:
+                # Return the string value of prior_installation_required, which defaults to 'False'.
+                return str( required_rd_tup[ 4 ] )
+        return 'False'
+
+    def update_existing_tool_dependency( self, repository, original_dependency_dict, new_dependencies_dict ):
+        """
+        Update an exsiting tool dependency whose definition was updated in a change set
+        pulled by a Galaxy administrator when getting updates to an installed tool shed
+        repository.  The original_dependency_dict is a single tool dependency definition,
+        an example of which is::
+
+            {"name": "bwa",
+             "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n        ",
+             "type": "package",
+             "version": "0.6.2"}
+
+        The new_dependencies_dict is the dictionary generated by the metadata_util.generate_tool_dependency_metadata method.
+        """
+        new_tool_dependency = None
+        original_name = original_dependency_dict[ 'name' ]
+        original_type = original_dependency_dict[ 'type' ]
+        original_version = original_dependency_dict[ 'version' ]
+        # Locate the appropriate tool_dependency associated with the repository.
+        tool_dependency = None
+        for tool_dependency in repository.tool_dependencies:
+            if tool_dependency.name == original_name and \
+                tool_dependency.type == original_type and \
+                    tool_dependency.version == original_version:
+                break
+        if tool_dependency and tool_dependency.can_update:
+            dependency_install_dir = tool_dependency.installation_directory( self.app )
+            removed_from_disk, error_message = \
+                tool_dependency_util.remove_tool_dependency_installation_directory( dependency_install_dir )
+            if removed_from_disk:
+                context = self.app.install_model.context
+                new_dependency_name = None
+                new_dependency_type = None
+                new_dependency_version = None
+                for new_dependency_key, new_dependency_val_dict in new_dependencies_dict.items():
+                    # Match on name only, hopefully this will be enough!
+                    if original_name == new_dependency_val_dict[ 'name' ]:
+                        new_dependency_name = new_dependency_val_dict[ 'name' ]
+                        new_dependency_type = new_dependency_val_dict[ 'type' ]
+                        new_dependency_version = new_dependency_val_dict[ 'version' ]
+                        break
+                if new_dependency_name and new_dependency_type and new_dependency_version:
+                    # Update all attributes of the tool_dependency record in the database.
+                    log.debug( "Updating version %s of tool dependency %s %s to have new version %s and type %s."
+                               % ( str( tool_dependency.version ),
+                                   str( tool_dependency.type ),
+                                   str( tool_dependency.name ),
+                                   str( new_dependency_version ),
+                                   str( new_dependency_type ) ) )
+                    tool_dependency.type = new_dependency_type
+                    tool_dependency.version = new_dependency_version
+                    tool_dependency.status = self.app.install_model.ToolDependency.installation_status.UNINSTALLED
+                    tool_dependency.error_message = None
+                    context.add( tool_dependency )
+                    context.flush()
+                    new_tool_dependency = tool_dependency
+                else:
+                    # We have no new tool dependency definition based on a matching dependency name, so remove
+                    # the existing tool dependency record from the database.
+                    log.debug( "Deleting version %s of tool dependency %s %s from the database since it is no longer defined."
+                               % ( str( tool_dependency.version ), str( tool_dependency.type ), str( tool_dependency.name ) ) )
+                    context.delete( tool_dependency )
+                    context.flush()
+        return new_tool_dependency
diff --git a/lib/tool_shed/galaxy_install/metadata/__init__.py b/lib/tool_shed/galaxy_install/metadata/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
new file mode 100644
index 0000000..bfa7cc7
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py
@@ -0,0 +1,189 @@
+import logging
+import os
+
+from sqlalchemy import false
+
+from galaxy import util
+from galaxy.util import inflector
+from galaxy.web.form_builder import SelectField
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.metadata import metadata_generator
+from tool_shed.util import common_util
+from tool_shed.util import repository_util
+from tool_shed.util import tool_util
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class InstalledRepositoryMetadataManager( metadata_generator.MetadataGenerator ):
+
+    def __init__( self, app, tpm=None, repository=None, changeset_revision=None, repository_clone_url=None,
+                  shed_config_dict=None, relative_install_dir=None, repository_files_dir=None,
+                  resetting_all_metadata_on_repository=False, updating_installed_repository=False,
+                  persist=False, metadata_dict=None ):
+        super( InstalledRepositoryMetadataManager, self ).__init__( app, repository, changeset_revision,
+                                                                    repository_clone_url, shed_config_dict,
+                                                                    relative_install_dir, repository_files_dir,
+                                                                    resetting_all_metadata_on_repository,
+                                                                    updating_installed_repository, persist,
+                                                                    metadata_dict=metadata_dict, user=None )
+        if tpm is None:
+            self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+        else:
+            self.tpm = tpm
+
+    def build_repository_ids_select_field( self, name='repository_ids', multiple=True, display='checkboxes' ):
+        """Generate the current list of repositories for resetting metadata."""
+        repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
+        query = self.get_query_for_setting_metadata_on_repositories( order=True )
+        for repository in query:
+            owner = str( repository.owner )
+            option_label = '%s (%s)' % ( str( repository.name ), owner )
+            option_value = '%s' % self.app.security.encode_id( repository.id )
+            repositories_select_field.add_option( option_label, option_value )
+        return repositories_select_field
+
+    def get_query_for_setting_metadata_on_repositories( self, order=True ):
+        """
+        Return a query containing repositories for resetting metadata.  The order parameter
+        is used for displaying the list of repositories ordered alphabetically for display on
+        a page.  When called from the Galaxy API, order is False.
+        """
+        if order:
+            return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \
+                                                 .filter( self.app.install_model.ToolShedRepository.table.c.uninstalled == false() ) \
+                                                 .order_by( self.app.install_model.ToolShedRepository.table.c.name,
+                                                            self.app.install_model.ToolShedRepository.table.c.owner )
+        else:
+            return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \
+                                                 .filter( self.app.install_model.ToolShedRepository.table.c.uninstalled == false() )
+
+    def get_repository_tools_tups( self ):
+        """
+        Return a list of tuples of the form (relative_path, guid, tool) for each tool defined
+        in the received tool shed repository metadata.
+        """
+        repository_tools_tups = []
+        shed_conf_dict = self.tpm.get_shed_tool_conf_dict( self.metadata_dict.get( 'shed_config_filename' ) )
+        if 'tools' in self.metadata_dict:
+            for tool_dict in self.metadata_dict[ 'tools' ]:
+                load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
+                if shed_conf_dict.get( 'tool_path' ):
+                    load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
+                guid = tool_dict.get( 'guid', None )
+                if relative_path and guid:
+                    tool = self.app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid, use_cached=False )
+                else:
+                    tool = None
+                if tool:
+                    repository_tools_tups.append( ( relative_path, guid, tool ) )
+        return repository_tools_tups
+
+    def reset_all_metadata_on_installed_repository( self ):
+        """Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
+        if self.relative_install_dir:
+            original_metadata_dict = self.repository.metadata
+            self.generate_metadata_for_changeset_revision()
+            if self.metadata_dict != original_metadata_dict:
+                self.repository.metadata = self.metadata_dict
+                self.update_in_shed_tool_config()
+                self.app.install_model.context.add( self.repository )
+                self.app.install_model.context.flush()
+                log.debug( 'Metadata has been reset on repository %s.' % self.repository.name )
+            else:
+                log.debug( 'Metadata did not need to be reset on repository %s.' % self.repository.name )
+        else:
+            log.debug( 'Error locating installation directory for repository %s.' % self.repository.name )
+
+    def reset_metadata_on_selected_repositories( self, user, **kwd ):
+        """
+        Inspect the repository changelog to reset metadata for all appropriate changeset revisions.
+        This method is called from both Galaxy and the Tool Shed.
+        """
+        repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
+        message = ''
+        status = 'done'
+        if repository_ids:
+            successful_count = 0
+            unsuccessful_count = 0
+            for repository_id in repository_ids:
+                try:
+                    repository = repository_util.get_installed_tool_shed_repository( self.app, repository_id )
+                    self.set_repository( repository )
+                    self.reset_all_metadata_on_installed_repository()
+                    if self.invalid_file_tups:
+                        message = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                                self.invalid_file_tups,
+                                                                                repository,
+                                                                                None,
+                                                                                as_html=False )
+                        log.debug( message )
+                        unsuccessful_count += 1
+                    else:
+                        log.debug( "Successfully reset metadata on repository %s owned by %s" %
+                            ( str( repository.name ), str( repository.owner ) ) )
+                        successful_count += 1
+                except:
+                    log.exception( "Error attempting to reset metadata on repository %s", str( repository.name ) )
+                    unsuccessful_count += 1
+            message = "Successfully reset metadata on %d %s.  " % \
+                ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
+            if unsuccessful_count:
+                message += "Error setting metadata on %d %s - see the paster log for details.  " % \
+                    ( unsuccessful_count, inflector.cond_plural( unsuccessful_count, "repository" ) )
+        else:
+            message = 'Select at least one repository to on which to reset all metadata.'
+            status = 'error'
+        return message, status
+
+    def set_repository( self, repository ):
+        super( InstalledRepositoryMetadataManager, self ).set_repository( repository )
+        self.repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
+
+    def tool_shed_from_repository_clone_url( self ):
+        """Given a repository clone URL, return the tool shed that contains the repository."""
+        cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( self.repository_clone_url )
+        return common_util.remove_protocol_and_user_from_clone_url( cleaned_repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' )
+
+    def update_in_shed_tool_config( self ):
+        """
+        A tool shed repository is being updated so change the shed_tool_conf file.  Parse the config
+        file to generate the entire list of config_elems instead of using the in-memory list.
+        """
+        shed_conf_dict = self.repository.get_shed_config_dict( self.app )
+        shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+        tool_path = shed_conf_dict[ 'tool_path' ]
+        self.tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( self.repository )
+        repository_tools_tups = self.get_repository_tools_tups()
+        clone_url = common_util.generate_clone_url_for_installed_repository( self.app, self.repository )
+        tool_shed = self.tool_shed_from_repository_clone_url()
+        owner = self.repository.owner
+        if not owner:
+            cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( clone_url )
+            owner = repository_util.get_repository_owner( cleaned_repository_clone_url )
+        guid_to_tool_elem_dict = {}
+        for tool_config_filename, guid, tool in repository_tools_tups:
+            guid_to_tool_elem_dict[ guid ] = self.tpm.generate_tool_elem( tool_shed,
+                                                                          self.repository.name,
+                                                                          self.repository.changeset_revision,
+                                                                          self.repository.owner or '',
+                                                                          tool_config_filename,
+                                                                          tool,
+                                                                          None )
+        config_elems = []
+        tree, error_message = xml_util.parse_xml( shed_tool_conf )
+        if tree:
+            root = tree.getroot()
+            for elem in root:
+                if elem.tag == 'section':
+                    for i, tool_elem in enumerate( elem ):
+                        guid = tool_elem.attrib.get( 'guid' )
+                        if guid in guid_to_tool_elem_dict:
+                            elem[i] = guid_to_tool_elem_dict[ guid ]
+                elif elem.tag == 'tool':
+                    guid = elem.attrib.get( 'guid' )
+                    if guid in guid_to_tool_elem_dict:
+                        elem = guid_to_tool_elem_dict[ guid ]
+                config_elems.append( elem )
+            self.tpm.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
diff --git a/lib/tool_shed/galaxy_install/migrate/__init__.py b/lib/tool_shed/galaxy_install/migrate/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/migrate/check.py b/lib/tool_shed/galaxy_install/migrate/check.py
new file mode 100644
index 0000000..fc2ad61
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/check.py
@@ -0,0 +1,163 @@
+import logging
+import os
+import subprocess
+import sys
+
+from migrate.versioning import repository, schema
+from sqlalchemy import create_engine, MetaData, Table
+
+from galaxy.util.odict import odict
+from tool_shed.util import common_util
+
+log = logging.getLogger( __name__ )
+
+# Path relative to galaxy
+migrate_repository_directory = os.path.abspath(os.path.dirname( __file__ )).replace( os.getcwd() + os.path.sep, '', 1 )
+migrate_repository = repository.Repository( migrate_repository_directory )
+
+
+def verify_tools( app, url, galaxy_config_file=None, engine_options={} ):
+    # Check the value in the migrate_tools.version database table column to verify that the number is in
+    # sync with the number of version scripts in ~/lib/galaxy/tools/migrate/versions.
+    # Create engine and metadata
+    engine = create_engine( url, **engine_options )
+    meta = MetaData( bind=engine )
+    # The migrate_tools table was created in database version script 0092_add_migrate_tools_table.py.
+    Table( "migrate_tools", meta, autoload=True )
+    # Verify that the code and the database are in sync.
+    db_schema = schema.ControlledSchema( engine, migrate_repository )
+    latest_tool_migration_script_number = migrate_repository.versions.latest
+    if latest_tool_migration_script_number != db_schema.version:
+        # The default behavior is that the tool shed is down.
+        tool_shed_accessible = False
+        if app.new_installation:
+            # New installations will not be missing tools, so we don't need to worry about them.
+            missing_tool_configs_dict = odict()
+        else:
+            tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
+            if tool_panel_configs:
+                # The missing_tool_configs_dict contents are something like:
+                # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+                tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app,
+                                                                                                       tool_panel_configs,
+                                                                                                       latest_tool_migration_script_number )
+            else:
+                # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
+                # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
+                # the database.
+                tool_shed_accessible = True
+                missing_tool_configs_dict = odict()
+        have_tool_dependencies = False
+        for k, v in missing_tool_configs_dict.items():
+            if v:
+                have_tool_dependencies = True
+                break
+        if not app.config.running_functional_tests:
+            if tool_shed_accessible:
+                # Automatically update the value of the migrate_tools.version database table column.
+                config_arg = ''
+                if galaxy_config_file:
+                    config_arg = " -c %s" % galaxy_config_file
+                cmd = 'sh manage_tools.sh%s upgrade' % config_arg
+                proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
+                return_code = proc.wait()
+                output = proc.stdout.read( 32768 )
+                if return_code != 0:
+                    raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output )
+                elif missing_tool_configs_dict:
+                    if len( tool_panel_configs ) == 1:
+                        plural = ''
+                        tool_panel_config_file_names = tool_panel_configs[ 0 ]
+                    else:
+                        plural = 's'
+                        tool_panel_config_file_names = ', '.join( tool_panel_configs )
+                    msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
+                    msg += "\n\nThe list of files at the end of this message refers to tools that are configured to load into the tool panel for\n"
+                    msg += "this Galaxy instance, but have been removed from the Galaxy distribution.  These tools and their dependencies can be\n"
+                    msg += "automatically installed from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
+                    msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use).  If you do this,\n"
+                    msg += "be aware that these tools will no longer be available in your Galaxy tool panel, and entries for each of them should\n"
+                    msg += "be removed from your file%s named %s.\n\n" % ( plural, tool_panel_config_file_names )
+                    msg += "CRITICAL NOTE IF YOU PLAN TO INSTALL\n"
+                    msg += "The location in which the tool repositories will be installed is the value of the 'tool_path' attribute in the <tool>\n"
+                    msg += 'tag of the file named ./migrated_tool_conf.xml (i.e., <toolbox tool_path="../shed_tools">).  The default location\n'
+                    msg += "setting is '../shed_tools', which may be problematic for some cluster environments, so make sure to change it before\n"
+                    msg += "you execute the installation process if appropriate.  The configured location must be outside of the Galaxy installation\n"
+                    msg += "directory or it must be in a sub-directory protected by a properly configured .hgignore file if the directory is within\n"
+                    msg += "the Galaxy installation directory hierarchy.  This is because tool shed repositories will be installed using mercurial's\n"
+                    msg += "clone feature, which creates .hg directories and associated mercurial repository files.  Not having .hgignore properly\n"
+                    msg += "configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed\n"
+                    msg += "repositories if they are in directories that pose conflicts.  See mercurial's .hgignore documentation at the following\n"
+                    msg += "URL for details.\n\nhttp://mercurial.selenic.com/wiki/.hgignore\n\n"
+                    if have_tool_dependencies:
+                        msg += "The following tool dependencies can also optionally be installed (see the option flag in the command below).  If you\n"
+                        msg += "choose to install them (recommended), they will be installed within the location specified by the 'tool_dependency_dir'\n"
+                        msg += "setting in your main Galaxy configuration file (e.g., uninverse_wsgi.ini).\n"
+                        processed_tool_dependencies = []
+                        for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
+                            for tool_dependencies_tup in missing_tool_configs_dict[ missing_tool_config ][ 'tool_dependencies' ]:
+                                if tool_dependencies_tup not in processed_tool_dependencies:
+                                    msg += "------------------------------------\n"
+                                    msg += "Tool Dependency\n"
+                                    msg += "------------------------------------\n"
+                                    msg += "Name: %s, Version: %s, Type: %s\n" % ( tool_dependencies_tup[ 0 ],
+                                                                                   tool_dependencies_tup[ 1 ],
+                                                                                   tool_dependencies_tup[ 2 ] )
+                                    if len( tool_dependencies_tup ) >= 4:
+                                        msg += "Requirements and installation information:\n"
+                                        msg += "%s\n" % tool_dependencies_tup[ 3 ]
+                                    else:
+                                        msg += "\n"
+                                    msg += "------------------------------------\n"
+                                    processed_tool_dependencies.append( tool_dependencies_tup )
+                        msg += "\n"
+                    msg += "%s" % output.replace( 'done', '' )
+                    msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
+                    msg += "sh ./scripts/migrate_tools/%04d_tools.sh\n" % latest_tool_migration_script_number
+                    msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
+                    if have_tool_dependencies:
+                        msg += "The tool dependencies listed above will be installed along with the repositories if you add the 'install_dependencies'\n"
+                        msg += "option to the above command like this:\n\n"
+                        msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
+                        msg += "sh ./scripts/migrate_tools/%04d_tools.sh install_dependencies\n" % latest_tool_migration_script_number
+                        msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
+                        msg += "Tool dependencies can be installed after the repositories have been installed as well.\n\n"
+                    msg += "After the installation process finishes, you can start your Galaxy server.  As part of this installation process,\n"
+                    msg += "entries for each of the following tool config files will be added to the file named ./migrated_tool_conf.xml, so these\n"
+                    msg += "tools will continue to be loaded into your tool panel.  Because of this, existing entries for these tools have been\n"
+                    msg += "removed from your file%s named %s.\n\n" % ( plural, tool_panel_config_file_names )
+                    for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
+                        msg += "%s\n" % missing_tool_config
+                    msg += "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"
+                    raise Exception( msg )
+            else:
+                log.debug( "The main Galaxy tool shed is not currently available, so skipped tool migration %s until next server startup" % db_schema.version )
+    else:
+        log.info( "At migrate_tools version %d" % db_schema.version )
+
+
+def migrate_to_current_version( engine, schema ):
+    # Changes to get to current version.
+    changeset = schema.changeset( None )
+    for ver, change in changeset:
+        nextver = ver + changeset.step
+        log.info( 'Installing tools from version %s -> %s... ' % ( ver, nextver ) )
+        old_stdout = sys.stdout
+
+        class FakeStdout( object ):
+            def __init__( self ):
+                self.buffer = []
+
+            def write( self, s ):
+                self.buffer.append( s )
+
+            def flush( self ):
+                pass
+
+        sys.stdout = FakeStdout()
+        try:
+            schema.runchange( ver, change, changeset.step )
+        finally:
+            for message in "".join( sys.stdout.buffer ).split( "\n" ):
+                log.info( message )
+            sys.stdout = old_stdout
diff --git a/lib/tool_shed/galaxy_install/migrate/common.py b/lib/tool_shed/galaxy_install/migrate/common.py
new file mode 100644
index 0000000..1c70512
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/common.py
@@ -0,0 +1,72 @@
+from __future__ import print_function
+
+import os
+import sys
+
+from six.moves import configparser
+
+import galaxy.config
+from tool_shed.galaxy_install import installed_repository_manager, tool_migration_manager
+
+
+class MigrateToolsApplication( object, galaxy.config.ConfiguresGalaxyMixin ):
+    """Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
+
+    def __init__( self, tools_migration_config ):
+        install_dependencies = 'install_dependencies' in sys.argv
+        galaxy_config_file = 'galaxy.ini'
+        self.name = 'galaxy'
+        if '-c' in sys.argv:
+            pos = sys.argv.index( '-c' )
+            sys.argv.pop( pos )
+            galaxy_config_file = sys.argv.pop( pos )
+        if not os.path.exists( galaxy_config_file ):
+            print("Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % galaxy_config_file)
+            sys.exit( 1 )
+        config_parser = configparser.ConfigParser( { 'here': os.getcwd() } )
+        config_parser.read( galaxy_config_file )
+        galaxy_config_dict = {}
+        for key, value in config_parser.items( "app:main" ):
+            galaxy_config_dict[ key ] = value
+        self.config = galaxy.config.Configuration( **galaxy_config_dict )
+
+        self.config.update_integrated_tool_panel = True
+
+        self._configure_object_store()
+
+        self._configure_security()
+
+        self._configure_models()
+
+        self._configure_datatypes_registry( )
+
+        self._configure_tool_data_tables( from_shed_config=True )
+
+        self._configure_toolbox()
+
+        self._configure_tool_shed_registry()
+
+        self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager( self )
+
+        # Get the latest tool migration script number to send to the Install manager.
+        latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] )
+        # The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for
+        # containing only those tools that have been eliminated from the distribution and moved
+        # to the tool shed.  A side-effect of instantiating the ToolMigrationManager is the automatic
+        # installation of all appropriate tool shed repositories.
+        self.tool_migration_manager = \
+            tool_migration_manager.ToolMigrationManager( app=self,
+                                                         latest_migration_script_number=latest_migration_script_number,
+                                                         tool_shed_install_config=os.path.join( self.config.root,
+                                                                                                'scripts',
+                                                                                                'migrate_tools',
+                                                                                                tools_migration_config ),
+                                                         migrated_tools_config=self.config.migrated_tools_config,
+                                                         install_dependencies=install_dependencies )
+
+    @property
+    def sa_session( self ):
+        return self.model.context.current
+
+    def shutdown( self ):
+        self.object_store.shutdown()
diff --git a/lib/tool_shed/galaxy_install/migrate/migrate.cfg b/lib/tool_shed/galaxy_install/migrate/migrate.cfg
new file mode 100644
index 0000000..3240536
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/migrate.cfg
@@ -0,0 +1,20 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=GalaxyTools
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to 
+# change the table name in each database too. 
+version_table=migrate_tools
+
+# When committing a change script, Migrate will attempt to generate the 
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the 
+# commit continues, perhaps ending successfully. 
+# Databases in this list MUST compile successfully during a commit, or the 
+# entire commit will fail. List the databases your application will actually 
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
\ No newline at end of file
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0001_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0001_tools.py
new file mode 100644
index 0000000..40f651a
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0001_tools.py
@@ -0,0 +1,12 @@
+"""
+Initialize the version column of the migrate_tools database table to 1.  No tool migrations are handled in this version.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0002_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0002_tools.py
new file mode 100644
index 0000000..085f10c
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0002_tools.py
@@ -0,0 +1,15 @@
+"""
+The Emboss 5.0.0 tools have been eliminated from the distribution and the Emboss datatypes have been removed from
+datatypes_conf.xml.sample.  You should remove the Emboss datatypes from your version of datatypes_conf.xml.  The
+repositories named emboss_5 and emboss_datatypes from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+will be installed into your local Galaxy instance at the location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0003_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0003_tools.py
new file mode 100644
index 0000000..5b6a2f4
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0003_tools.py
@@ -0,0 +1,14 @@
+"""
+The freebayes tool has been eliminated from the distribution .  The repository named freebayes from the main
+Galaxy tool shed at http://toolshed.g2.bx.psu.edu will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0004_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0004_tools.py
new file mode 100644
index 0000000..6df2fac
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0004_tools.py
@@ -0,0 +1,16 @@
+"""
+The NCBI BLAST+ tools have been eliminated from the distribution.  The tools and
+datatypes are now available in repositories named ncbi_blast_plus and
+blast_datatypes, in the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu.
+These repositories will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0005_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0005_tools.py
new file mode 100644
index 0000000..78e13ab
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0005_tools.py
@@ -0,0 +1,17 @@
+"""
+The tools "Map with BWA for Illumina" and "Map with BWA for SOLiD" have
+been eliminated from the distribution.  The tools are now available
+in the repository named bwa_wrappers from the main Galaxy tool shed at
+http://toolshed.g2.bx.psu.edu, and will be installed into your local
+Galaxy instance at the location discussed above by running the following
+command.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0006_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0006_tools.py
new file mode 100644
index 0000000..4e068e5
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0006_tools.py
@@ -0,0 +1,22 @@
+"""
+The following tools have been eliminated from the distribution:
+FASTQ to BAM, SAM to FASTQ, BAM Index Statistics, Estimate Library
+Complexity, Insertion size metrics for PAIRED data, SAM/BAM Hybrid
+Selection Metrics, bam/sam Cleaning, Add or Replace Groups, Replace
+SAM/BAM Header, Paired Read Mate Fixer, Mark Duplicate reads,
+SAM/BAM Alignment Summary Metrics, SAM/BAM GC Bias Metrics, and
+Reorder SAM/BAM.  The tools are now available in the repository
+named picard from the main Galaxy tool shed at
+http://toolshed.g2.bx.psu.edu, and will be installed into your
+local Galaxy instance at the location discussed above by running
+the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0007_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0007_tools.py
new file mode 100644
index 0000000..a66771b
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0007_tools.py
@@ -0,0 +1,19 @@
+"""
+The following tools have been eliminated from the distribution:
+Map with Bowtie for Illumina, Map with Bowtie for SOLiD, Lastz,
+and Lastz paired reads.  The tools are now available in the
+repositories named bowtie_wrappers, bowtie_color_wrappers, lastz,
+and lastz_paired_reads from the main Galaxy tool shed at
+http://toolshed.g2.bx.psu.edu, and will be installed into your
+local Galaxy instance at the location discussed above by running
+the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade(migrate_engine):
+    print(__doc__)
+
+
+def downgrade(migrate_engine):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0008_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0008_tools.py
new file mode 100644
index 0000000..1422465
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0008_tools.py
@@ -0,0 +1,116 @@
+"""
+The following tools have been eliminated from the distribution:
+
+1:  BAM-to-SAM converts BAM format to SAM format
+2:  Categorize Elements satisfying criteria
+3:  Compute Motif Frequencies For All Motifs motif by motif
+4:  Compute Motif Frequencies in indel flanking regions
+5:  CTD analysis of chemicals, diseases, or genes
+6:  Cuffcompare
+7:  Cuffdiff
+8:  Cufflinks
+9:  Cuffmerge
+10: Delete Overlapping Indels from a chromosome indels file
+11: Separate pgSnp alleles into columns
+12: Draw Stacked Bar Plots for different categories and different criteria
+13: Length Distribution chart
+14: FASTA Width formatter
+15: RNA/DNA converter
+16: Draw quality score boxplot
+17: Quality format converter (ASCII-Numeric)
+18: Filter by quality
+19: FASTQ to FASTA converter
+20: Remove sequencing artifacts
+21: Barcode Splitter
+22: Clip adapter sequences
+23: Collapse sequences
+24: Draw nucleotides distribution chart
+25: Compute quality statistics
+26: Rename sequences
+27: Reverse- Complement
+28: Trim sequences
+29: FunDO human genes associated with disease terms
+30: HVIS visualization of genomic data with the Hilbert curve
+31: Fetch Indels from 3-way alignments
+32: Identify microsatellite births and deaths
+33: Extract orthologous microsatellites for multiple (>2) species alignments
+34: Mutate Codons with SNPs
+35: Pileup-to-Interval condenses pileup format into ranges of bases
+36: Filter pileup on coverage and SNPs
+37: Filter SAM on bitwise flag values
+38: Merge BAM Files merges BAM files together
+39: Generate pileup from BAM dataset
+40: SAM-to-BAM converts SAM format to BAM format
+41: Convert SAM to interval
+42: flagstat provides simple stats on BAM files
+43: MPileup SNP and indel caller
+44: rmdup remove PCR duplicates
+45: Slice BAM by provided regions
+46: Split paired end reads
+47: T Test for Two Samples
+48: Plotting tool for multiple series and graph types.
+
+The tools are now available in the repositories respectively:
+
+1:  bam_to_sam
+2:  categorize_elements_satisfying_criteria
+3:  compute_motif_frequencies_for_all_motifs
+4:  compute_motifs_frequency
+5:  ctd_batch
+6:  cuffcompare
+7:  cuffdiff
+8:  cufflinks
+9:  cuffmerge
+10: delete_overlapping_indels
+11: divide_pg_snp
+12: draw_stacked_barplots
+13: fasta_clipping_histogram
+14: fasta_formatter
+15: fasta_nucleotide_changer
+16: fastq_quality_boxplot
+17: fastq_quality_converter
+18: fastq_quality_filter
+19: fastq_to_fasta
+20: fastx_artifacts_filter
+21: fastx_barcode_splitter
+22: fastx_clipper
+23: fastx_collapser
+24: fastx_nucleotides_distribution
+25: fastx_quality_statistics
+26: fastx_renamer
+27: fastx_reverse_complement
+28: fastx_trimmer
+29: hgv_fundo
+30: hgv_hilbertvis
+31: indels_3way
+32: microsatellite_birthdeath
+33: multispecies_orthologous_microsats
+34: mutate_snp_codon
+35: pileup_interval
+36: pileup_parser
+37: sam_bitwise_flag_filter
+38: sam_merge
+39: sam_pileup
+40: sam_to_bam
+41: sam2interval
+42: samtools_flagstat
+43: samtools_mpileup
+44: samtools_rmdup
+45: samtools_slice_bam
+46: split_paired_reads
+47: t_test_two_samples
+48: xy_plot
+
+from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+and will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade( migrate_engine ):
+    print(__doc__)
+
+
+def downgrade( migrate_engine ):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0009_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0009_tools.py
new file mode 100644
index 0000000..a2a3097
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0009_tools.py
@@ -0,0 +1,106 @@
+"""
+The following tools have been eliminated from the distribution:
+
+1:  Bowtie2
+2:  Control-based ChIP-seq Analysis Tool
+3:  ClustalW multiple sequence alignment program for DNA or proteins
+4:  Compute P-values and Correlation Coefficients for Feature Occurrences
+5:  Compute P-values and Correlation Coefficients for Occurrences of Two Set of Features
+6:  Compute P-values and Second Moments for Feature Occurrences
+7:  Compute P-values and Max Variances for Feature Occurrences
+8:  Wavelet variance using Discrete Wavelet Transfoms
+9:  Quantify the abundances of a set of target sequences from sampled subsequences
+10: Read QC reports using FastQC
+11: Combine FASTA and QUAL into FASTQ.
+12: Filter FASTQ reads by quality score and length
+13: Convert between various FASTQ quality formats.
+14: Manipulate FASTQ reads on various attributes.
+15: FASTQ Masker by quality score
+16: FASTQ de-interlacer on paired end reads.
+17: FASTQ interlacer on paired end reads
+18: FASTQ joiner on paired end reads
+19: FASTQ splitter on joined paired end reads
+20: FASTQ Summary Statistics by column
+21: FASTQ to FASTA converter
+22: FASTQ to Tabular converter
+23: FASTQ Trimmer by quality
+24: FASTQ Quality Trimmer by sliding window
+25: Filter Combined Transcripts
+26: find_diag_hits
+27: Call SNPS with Freebayes
+28: Fetch taxonomic representation
+29: GMAJ Multiple Alignment Viewer
+30: Find lowest diagnostic rank
+31: Model-based Analysis of ChIP-Seq
+32: Poisson two-sample test
+33: Statistical approach for the Identification of ChIP-Enriched Regions
+34: Draw phylogeny
+35: Summarize taxonomy
+36: Tabular to FASTQ converter
+37: Find splice junctions using RNA-seq data
+38: Gapped-read mapper for RNA-seq data
+39: Annotate a VCF file (dbSNP, hapmap)
+40: Extract reads from a specified region
+41: Filter a VCF file
+42: Generate the intersection of two VCF files
+43: Sequence Logo generator for fasta (eg Clustal alignments)
+
+The tools are now available in the repositories respectively:
+
+1:  bowtie2
+2:  ccat
+3:  clustalw
+4:  dwt_cor_ava_perclass
+5:  dwt_cor_avb_all
+6:  dwt_ivc_all
+7:  dwt_var_perclass
+8:  dwt_var_perfeature
+9:  express
+10: fastqc
+11: fastq_combiner
+12: fastq_filter
+13: fastq_groomer
+14: fastq_manipulation
+15: fastq_masker_by_quality
+16: fastq_paired_end_deinterlacer
+17: fastq_paired_end_interlacer
+18: fastq_paired_end_joiner
+19: fastq_paired_end_splitter
+20: fastq_stats
+21: fastqtofasta
+22: fastq_to_tabular
+23: fastq_trimmer
+24: fastq_trimmer_by_quality
+25: filter_transcripts_via_tracking
+26: find_diag_hits
+27: freebayes_wrapper
+28: gi2taxonomy
+29: gmaj
+30: lca_wrapper
+31: macs
+32: poisson2test
+33: sicer
+34: t2ps
+35: t2t_report
+36: tabular_to_fastq
+37: tophat
+38: tophat2
+39: vcf_annotate
+40: vcf_extract
+41: vcf_filter
+42: vcf_intersect
+43: weblogo3
+
+from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+and will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade( migrate_engine ):
+    print(__doc__)
+
+
+def downgrade( migrate_engine ):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0010_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0010_tools.py
new file mode 100644
index 0000000..ca06923
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0010_tools.py
@@ -0,0 +1,112 @@
+"""
+The following tools have been eliminated from the distribution:
+
+1. Analyze Covariates
+2. Base Coverage of all intervals
+3. Perform Best-subsets Regression
+4. Cluster
+5. Complement intervals of a dataset
+6. Compute q-values based on multiple simultaneous tests p-values
+7. Concatenate two datasets into one dataset
+8. Count Covariates on BAM files
+9. Coverage of a set of intervals on second set of intervals
+10. Depth of Coverage on BAM files
+11. Feature coverage
+12. Fetch closest non-overlapping feature for every interval
+13. Get flanks - returns flanking region/s for every gene
+14. Estimate Indel Rates for 3-way alignments
+15. Fetch Indels from pairwise alignments
+16. Indel Realigner - perform local realignment
+17. Intersect the intervals of two datasets
+18. Join the intervals of two datasets side-by-side
+19. Perform Linear Regression
+20. Perform Logistic Regression with vif
+21. Mask CpG/non-CpG sites from MAF file
+22. Merge the overlapping intervals of a dataset
+23. Extract Orthologous Microsatellites from pair-wise alignments
+24. Estimate microsatellite mutability by specified attributes
+25. Compute partial R square
+26. Print Reads from BAM files
+27. Filter nucleotides based on quality scores
+28. Compute RCVE
+29. Realigner Target Creator for use in local realignment
+30. Estimate substitution rates for non-coding regions
+31. Fetch substitutions from pairwise alignments
+32. Subtract the intervals of two datasets
+33. Subtract Whole Dataset from another dataset
+34. Table Recalibration on BAM files
+35. Arithmetic Operations on tables
+36. Unified Genotyper SNP and indel caller
+37. Variant Annotator
+38. Apply Variant Recalibration
+39. Combine Variants
+40. Eval Variants
+41. Variant Filtration on VCF files
+42. Variant Recalibrator
+43. Select Variants from VCF files
+44. Validate Variants
+45. Assign weighted-average of the values of features overlapping an interval
+46. Make windows
+
+The tools are now available in the repositories respectively:
+
+1. analyze_covariates
+2. basecoverage
+3. best_regression_subsets
+4. cluster
+5. complement
+6. compute_q_values
+7. concat
+8. count_covariates
+9. coverage
+10. depth_of_coverage
+11. featurecounter
+12. flanking_features
+13. get_flanks
+14. getindelrates_3way
+15. getindels_2way
+16. indel_realigner
+17. intersect
+18. join
+19. linear_regression
+20. logistic_regression_vif
+21. maf_cpg_filter
+22. merge
+23. microsats_alignment_level
+24. microsats_mutability
+25. partialr_square
+26. print_reads
+27. quality_filter
+28. rcve
+29. realigner_target_creator
+30. substitution_rates
+31. substitutions
+32. subtract
+33. subtract_query
+34. table_recalibration
+35. tables_arithmetic_operations
+36. unified_genotyper
+37. variant_annotator
+38. variant_apply_recalibration
+39. variant_combine
+40. variant_eval
+41. variant_filtration
+42. variant_recalibrator
+43. variant_select
+44. variants_validate
+45. weightedaverage
+46. windowsplitter
+
+from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+and will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade( migrate_engine ):
+    print(__doc__)
+
+
+def downgrade( migrate_engine ):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0011_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0011_tools.py
new file mode 100644
index 0000000..a9bf26f
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0011_tools.py
@@ -0,0 +1,64 @@
+"""
+The following tools have been eliminated from the distribution:
+
+1:  Profile Annotations for a set of genomic intervals
+2:  Polymorphism of the Reads
+3:  Coverage of the Reads in wiggle format
+4:  Canonical Correlation Analysis
+5:  Convert Color Space to Nucleotides
+6:  Compute sequence length
+7:  Concatenate FASTA alignment by species
+8:  Filter sequences by length
+9:  FASTA-to-Tabular converter
+10: FASTQSOLEXA-to-FASTA-QUAL extracts sequences and quality scores from FASTQSOLEXA data
+11: Kernel Canonical Correlation Analysis
+12: Kernel Principal Component Analysis
+13: Format mapping data as UCSC custom track
+14: Megablast compare short reads against htgs, nt, and wgs databases
+15: Parse blast XML output
+16: Principal Component Analysis
+17: RMAP for Solexa Short Reads Alignment
+18: RMAPQ for Solexa Short Reads Alignment with Quality Scores
+19: Histogram of high quality score reads
+20: Build base quality distribution
+21: Select high quality segments
+22: Tabular-to-FASTA
+
+The tools are now available in the repositories respectively:
+
+1:  annotation_profiler
+2:  blat_coverage_report
+3:  blat_mapping
+4:  canonical_correlation_analysis
+5:  convert_solid_color2nuc
+6:  fasta_compute_length
+7:  fasta_concatenate_by_species
+8:  fasta_filter_by_length
+9:  fasta_to_tabular
+10: fastqsolexa_to_fasta_qual
+11: kernel_canonical_correlation_analysis
+12: kernel_principal_component_analysis
+13: mapping_to_ucsc
+14: megablast_wrapper
+15: megablast_xml_parser
+16: principal_component_analysis
+17: rmap
+18: rmapq
+19: short_reads_figure_high_quality_length
+20: short_reads_figure_score
+21: short_reads_trim_seq
+22: tabular_to_fasta
+
+from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+and will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade( migrate_engine ):
+    print(__doc__)
+
+
+def downgrade( migrate_engine ):
+    pass
diff --git a/lib/tool_shed/galaxy_install/migrate/versions/0012_tools.py b/lib/tool_shed/galaxy_install/migrate/versions/0012_tools.py
new file mode 100644
index 0000000..98dd979
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/migrate/versions/0012_tools.py
@@ -0,0 +1,50 @@
+"""
+The following tools have been eliminated from the distribution:
+
+1:  Compute an expression on every row
+2:  Correlation for numeric columns
+3:  Count GFF Features
+4:  Filter on ambiguities in polymorphism datasets
+5:  Generate A Matrix for using PC and LDA
+6:  Histogram of a numeric column
+7:  Perform Linear Discriminant Analysis
+8:  Maximal Information-based Nonparametric Exploration
+9:  Pearson and apos Correlation between any two numeric columns
+10: Convert from pgSnp to gd_snp
+11: Draw ROC plot on "Perform LDA" output
+12: Scatterplot of two numeric columns
+13: snpFreq significant SNPs in case-control data
+14: Build custom track for UCSC genome browser
+15: VCF to pgSnp
+
+The tools are now available in the repositories respectively:
+
+1:  column_maker
+2:  correlation
+3:  count_gff_features
+4:  dna_filtering
+5:  generate_pc_lda_matrix
+6:  histogram
+7:  lda_analysis
+8:  mine
+9:  pearson_correlation
+10: pgsnp2gd_snp
+11: plot_from_lda
+12: scatterplot
+13: snpfreq
+14: ucsc_custom_track
+15: vcf2pgsnp
+
+from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
+and will be installed into your local Galaxy instance at the
+location discussed above by running the following command.
+"""
+from __future__ import print_function
+
+
+def upgrade( migrate_engine ):
+    print(__doc__)
+
+
+def downgrade( migrate_engine ):
+    pass
diff --git a/lib/tool_shed/galaxy_install/repair_repository_manager.py b/lib/tool_shed/galaxy_install/repair_repository_manager.py
new file mode 100644
index 0000000..2c444ee
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py
@@ -0,0 +1,229 @@
+import logging
+import tempfile
+
+from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.util import basic_util
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import hg_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import tool_dependency_util
+
+log = logging.getLogger( __name__ )
+
+
+class RepairRepositoryManager():
+
+    def __init__( self, app ):
+        self.app = app
+
+    def get_installed_repositories_from_repository_dependencies( self, repository_dependencies_dict ):
+        installed_repositories = []
+        if repository_dependencies_dict and isinstance( repository_dependencies_dict, dict ):
+            for rd_key, rd_vals in repository_dependencies_dict.items():
+                if rd_key in [ 'root_key', 'description' ]:
+                    continue
+                # rd_key is something like: 'http://localhost:9009__ESEP__package_rdkit_2012_12__ESEP__test__ESEP__d635ffb9c665__ESEP__True'
+                # rd_val is something like: [['http://localhost:9009', 'package_numpy_1_7', 'test', 'cddd64ecd985', 'True']]
+                repository_components_tuple = container_util.get_components_from_key( rd_key )
+                components_list = repository_util.extract_components_from_tuple( repository_components_tuple )
+                tool_shed, name, owner, changeset_revision = components_list[ 0:4 ]
+                installed_repository = repository_util.get_installed_repository( self.app,
+                                                                                 tool_shed=tool_shed,
+                                                                                 name=name,
+                                                                                 owner=owner,
+                                                                                 changeset_revision=changeset_revision )
+                if ( installed_repository ) and ( installed_repository not in installed_repositories ):
+                    installed_repositories.append( installed_repository )
+                for rd_val in rd_vals:
+                    tool_shed, name, owner, changeset_revision = rd_val[ 0:4 ]
+                    installed_repository = repository_util.get_installed_repository( self.app,
+                                                                                     tool_shed=tool_shed,
+                                                                                     name=name,
+                                                                                     owner=owner,
+                                                                                     changeset_revision=changeset_revision )
+                    if ( installed_repository ) and ( installed_repository not in installed_repositories ):
+                        installed_repositories.append( installed_repository )
+        return installed_repositories
+
+    def get_repair_dict( self, repository ):
+        """
+        Inspect the installed repository dependency hierarchy for a specified repository
+        and attempt to make sure they are all properly installed as well as each repository's
+        tool dependencies.  This method is called only from Galaxy when attempting to correct
+        issues with an installed repository that has installation problems somewhere in its
+        dependency hierarchy. Problems with dependencies that have never been installed
+        cannot be resolved with a repair.
+        """
+        rdim = repository_dependency_manager.RepositoryDependencyInstallManager( self.app )
+        tsr_ids = []
+        repo_info_dicts = []
+        tool_panel_section_keys = []
+        repair_dict = {}
+        irm = install_manager.InstallRepositoryManager( self.app )
+        # Get a dictionary of all repositories upon which the contents of the current repository_metadata
+        # record depend.
+        repository_dependencies_dict = rdim.get_repository_dependencies_for_installed_tool_shed_repository( self.app,
+                                                                                                            repository )
+        if repository_dependencies_dict:
+            # Generate the list of installed repositories from the information contained in the
+            # repository_dependencies dictionary.
+            installed_repositories = self.get_installed_repositories_from_repository_dependencies( repository_dependencies_dict )
+            # Some repositories may have repository dependencies that are required to be installed before
+            # the dependent repository, so we'll order the list of tsr_ids to ensure all repositories are
+            # repaired in the required order.
+            installed_repositories.append(repository)
+            for installed_repository in installed_repositories:
+                tsr_ids.append( self.app.security.encode_id( installed_repository.id ) )
+                repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( rdim,
+                                                                                             installed_repository )
+                tool_panel_section_keys.append( tool_panel_section_key )
+                repo_info_dicts.append( repo_info_dict )
+        else:
+            # The received repository has no repository dependencies.
+            tsr_ids.append( self.app.security.encode_id( repository.id ) )
+            repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( rdim,
+                                                                                         repository )
+            tool_panel_section_keys.append( tool_panel_section_key )
+            repo_info_dicts.append( repo_info_dict )
+        ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \
+            irm.order_components_for_installation( tsr_ids,
+                                                   repo_info_dicts,
+                                                   tool_panel_section_keys=tool_panel_section_keys )
+        repair_dict[ 'ordered_tsr_ids' ] = ordered_tsr_ids
+        repair_dict[ 'ordered_repo_info_dicts' ] = ordered_repo_info_dicts
+        repair_dict[ 'ordered_tool_panel_section_keys' ] = ordered_tool_panel_section_keys
+        return repair_dict
+
+    def get_repo_info_dict_for_repair( self, rdim, repository ):
+        tool_panel_section_key = None
+        repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
+        repository_dependencies = rdim.get_repository_dependencies_for_installed_tool_shed_repository( self.app,
+                                                                                                       repository )
+        metadata = repository.metadata
+        if metadata:
+            tool_dependencies = metadata.get( 'tool_dependencies', None )
+            tool_panel_section_dict = metadata.get( 'tool_panel_section', None )
+            if tool_panel_section_dict:
+                # The repository must be in the uninstalled state.  The structure of tool_panel_section_dict is:
+                # {<tool guid> :
+                # [{ 'id':<section id>, 'name':<section name>, 'version':<section version>, 'tool_config':<tool config file name> }]}
+                # Here is an example:
+                # {"localhost:9009/repos/test/filter/Filter1/1.1.0":
+                # [{"id": "filter_and_sort", "name": "Filter and Sort", "tool_config": "filtering.xml", "version": ""}]}
+                # Currently all tools contained within an installed tool shed repository must be loaded into the same
+                # section in the tool panel, so we can get the section id of the first guid in the tool_panel_section_dict.
+                # In the future, we'll have to handle different sections per guid.
+                guid = next(iter(tool_panel_section_dict))
+                section_dicts = tool_panel_section_dict[ guid ]
+                section_dict = section_dicts[ 0 ]
+                tool_panel_section_id = section_dict[ 'id' ]
+                tool_panel_section_name = section_dict[ 'name' ]
+                if tool_panel_section_id:
+                    tpm = tool_panel_manager.ToolPanelManager( self.app )
+                    tool_panel_section_key, _ = \
+                        tpm.get_or_create_tool_section( self.app.toolbox,
+                                                        tool_panel_section_id=tool_panel_section_id,
+                                                        new_tool_panel_section_label=tool_panel_section_name )
+        else:
+            tool_dependencies = None
+        repo_info_dict = repository_util.create_repo_info_dict( app=self.app,
+                                                                repository_clone_url=repository_clone_url,
+                                                                changeset_revision=repository.changeset_revision,
+                                                                ctx_rev=repository.ctx_rev,
+                                                                repository_owner=repository.owner,
+                                                                repository_name=repository.name,
+                                                                repository=None,
+                                                                repository_metadata=None,
+                                                                tool_dependencies=tool_dependencies,
+                                                                repository_dependencies=repository_dependencies )
+        return repo_info_dict, tool_panel_section_key
+
+    def repair_tool_shed_repository( self, repository, repo_info_dict ):
+
+        def add_repair_dict_entry( repository_name, error_message ):
+            if repository_name in repair_dict:
+                repair_dict[ repository_name ].append( error_message )
+            else:
+                repair_dict[ repository_name ] = [ error_message ]
+            return repair_dict
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed )
+        metadata = repository.metadata
+        # The repository.metadata contains dependency information that corresponds to the current changeset revision,
+        # which may be different from what is stored in the database
+        # If any of these repository-repository dependency associations is obsolete, clean_dependency_relationships removes them.
+        suc.clean_dependency_relationships(self.app, metadata, repository, tool_shed_url)
+        repair_dict = {}
+        tpm = tool_panel_manager.ToolPanelManager( self.app )
+        if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+            try:
+                self.app.installed_repository_manager.activate_repository( repository )
+            except Exception as e:
+                error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
+                log.debug( error_message )
+                repair_dict[ repository.name ] = error_message
+        elif repository.status not in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+            shed_tool_conf, tool_path, relative_install_dir = \
+                suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
+            # Reset the repository attributes to the New state for installation.
+            if metadata:
+                _, tool_panel_section_key = \
+                    tpm.handle_tool_panel_selection( self.app.toolbox,
+                                                     metadata,
+                                                     no_changes_checked=True,
+                                                     tool_panel_section_id=None,
+                                                     new_tool_panel_section_label=None )
+            else:
+                # The tools will be loaded outside of any sections in the tool panel.
+                tool_panel_section_key = None
+            repository_util.set_repository_attributes( self.app,
+                                                       repository,
+                                                       status=self.app.install_model.ToolShedRepository.installation_status.NEW,
+                                                       error_message=None,
+                                                       deleted=False,
+                                                       uninstalled=False,
+                                                       remove_from_disk=True )
+            irm = install_manager.InstallRepositoryManager( self.app, tpm )
+            irm.install_tool_shed_repository( repository,
+                                              repo_info_dict,
+                                              tool_panel_section_key,
+                                              shed_tool_conf,
+                                              tool_path,
+                                              install_tool_dependencies=True,
+                                              install_resolver_dependencies=False,  # Assuming repairs are only necessary toolshed packages
+                                              reinstalling=True )
+            if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.ERROR ]:
+                repair_dict = add_repair_dict_entry( repository.name, repository.error_message )
+        else:
+            irm = install_manager.InstallRepositoryManager( self.app, tpm )
+            # We have an installed tool shed repository, so handle tool dependencies if necessary.
+            if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata:
+                work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" )
+                # Reset missing tool dependencies.
+                for tool_dependency in repository.missing_tool_dependencies:
+                    if tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                   self.app.install_model.ToolDependency.installation_status.INSTALLING ]:
+                        tool_dependency = \
+                            tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                 tool_dependency=tool_dependency,
+                                                                                 status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED )
+                # Install tool dependencies.
+                irm.update_tool_shed_repository_status( repository,
+                                                        self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+                # Get the tool_dependencies.xml file from the repository.
+                tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( self.app ) )
+                itdm = install_manager.InstallToolDependencyManager( self.app )
+                installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository,
+                                                                                        tool_dependencies_config=tool_dependencies_config,
+                                                                                        tool_dependencies=repository.tool_dependencies,
+                                                                                        from_tool_migration_manager=False )
+                for installed_tool_dependency in installed_tool_dependencies:
+                    if installed_tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]:
+                        repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message )
+                basic_util.remove_dir( work_dir )
+            irm.update_tool_shed_repository_status( repository,
+                                                    self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
+        return repair_dict
diff --git a/lib/tool_shed/galaxy_install/repository_dependencies/__init__.py b/lib/tool_shed/galaxy_install/repository_dependencies/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
new file mode 100644
index 0000000..82accd5
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
@@ -0,0 +1,471 @@
+"""
+Class encapsulating the management of repository dependencies installed or being installed
+into Galaxy from the Tool Shed.
+"""
+import json
+import logging
+import os
+
+from six.moves.urllib.parse import urlencode
+from six.moves.urllib.request import Request, urlopen
+
+from galaxy.util import asbool, build_url, url_get
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import encoding_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoryDependencyInstallManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def build_repository_dependency_relationships( self, repo_info_dicts, tool_shed_repositories ):
+        """
+        Build relationships between installed tool shed repositories and other installed
+        tool shed repositories upon which they depend.  These relationships are defined in
+        the repository_dependencies entry for each dictionary in the received list of repo_info_dicts.
+        Each of these dictionaries is associated with a repository in the received tool_shed_repositories
+        list.
+        """
+        install_model = self.app.install_model
+        log.debug( "Building repository dependency relationships..." )
+        for repo_info_dict in repo_info_dicts:
+            for name, repo_info_tuple in repo_info_dict.items():
+                description, \
+                    repository_clone_url, \
+                    changeset_revision, \
+                    ctx_rev, \
+                    repository_owner, \
+                    repository_dependencies, \
+                    tool_dependencies = \
+                    repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+                if repository_dependencies:
+                    for key, val in repository_dependencies.items():
+                        if key in [ 'root_key', 'description' ]:
+                            continue
+                        d_repository = None
+                        repository_components_tuple = container_util.get_components_from_key( key )
+                        components_list = repository_util.extract_components_from_tuple( repository_components_tuple )
+                        d_toolshed, d_name, d_owner, d_changeset_revision = components_list[ 0:4 ]
+                        for tsr in tool_shed_repositories:
+                            # Get the the tool_shed_repository defined by name, owner and changeset_revision.  This is
+                            # the repository that will be dependent upon each of the tool shed repositories contained in
+                            # val.  We'll need to check tool_shed_repository.tool_shed as well if/when repository dependencies
+                            # across tool sheds is supported.
+                            if tsr.name == d_name and tsr.owner == d_owner and tsr.changeset_revision == d_changeset_revision:
+                                d_repository = tsr
+                                break
+                        if d_repository is None:
+                            # The dependent repository is not in the received list so look in the database.
+                            d_repository = self.get_or_create_tool_shed_repository( d_toolshed,
+                                                                                    d_name,
+                                                                                    d_owner,
+                                                                                    d_changeset_revision )
+                        # Process each repository_dependency defined for the current dependent repository.
+                        for repository_dependency_components_list in val:
+                            required_repository = None
+                            rd_toolshed, \
+                                rd_name, \
+                                rd_owner, \
+                                rd_changeset_revision, \
+                                rd_prior_installation_required, \
+                                rd_only_if_compiling_contained_td = \
+                                common_util.parse_repository_dependency_tuple( repository_dependency_components_list )
+                            # Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision.  This
+                            # is the repository that will be required by the current d_repository.
+                            # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
+                            for tsr in tool_shed_repositories:
+                                if tsr.name == rd_name and tsr.owner == rd_owner and tsr.changeset_revision == rd_changeset_revision:
+                                    required_repository = tsr
+                                    break
+                            if required_repository is None:
+                                # The required repository is not in the received list so look in the database.
+                                required_repository = self.get_or_create_tool_shed_repository( rd_toolshed,
+                                                                                               rd_name,
+                                                                                               rd_owner,
+                                                                                               rd_changeset_revision )
+                            # Ensure there is a repository_dependency relationship between d_repository and required_repository.
+                            rrda = None
+                            for rd in d_repository.repository_dependencies:
+                                if rd.id == required_repository.id:
+                                    rrda = rd
+                                    break
+                            if not rrda:
+                                # Make sure required_repository is in the repository_dependency table.
+                                repository_dependency = self.get_repository_dependency_by_repository_id( install_model,
+                                                                                                         required_repository.id )
+                                if not repository_dependency:
+                                    log.debug( 'Creating new repository_dependency record for installed revision %s of repository: %s owned by %s.' %
+                                               ( str( required_repository.installed_changeset_revision ),
+                                                 str( required_repository.name ),
+                                                   str( required_repository.owner ) ) )
+                                    repository_dependency = install_model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
+                                    install_model.context.add( repository_dependency )
+                                    install_model.context.flush()
+                                # Build the relationship between the d_repository and the required_repository.
+                                rrda = install_model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=d_repository.id,
+                                                                                                repository_dependency_id=repository_dependency.id )
+                                install_model.context.add( rrda )
+                                install_model.context.flush()
+
+    def create_repository_dependency_objects( self, tool_path, tool_shed_url, repo_info_dicts, install_repository_dependencies=False,
+                                              no_changes_checked=False, tool_panel_section_id=None, new_tool_panel_section_label=None ):
+        """
+        Discover all repository dependencies and make sure all tool_shed_repository and
+        associated repository_dependency records exist as well as the dependency relationships
+        between installed repositories.  This method is called when uninstalled repositories
+        are being reinstalled.  If the user elected to install repository dependencies, all
+        items in the all_repo_info_dicts list will be processed.  However, if repository
+        dependencies are not to be installed, only those items contained in the received
+        repo_info_dicts list will be processed.
+        """
+        install_model = self.app.install_model
+        log.debug( "Creating repository dependency objects..." )
+        # The following list will be maintained within this method to contain all created
+        # or updated tool shed repositories, including repository dependencies that may not
+        # be installed.
+        all_created_or_updated_tool_shed_repositories = []
+        # There will be a one-to-one mapping between items in 3 lists:
+        # created_or_updated_tool_shed_repositories, tool_panel_section_keys
+        # and filtered_repo_info_dicts.  The 3 lists will filter out repository
+        # dependencies that are not to be installed.
+        created_or_updated_tool_shed_repositories = []
+        tool_panel_section_keys = []
+        # Repositories will be filtered (e.g., if already installed, if elected
+        # to not be installed, etc), so filter the associated repo_info_dicts accordingly.
+        filtered_repo_info_dicts = []
+        # Discover all repository dependencies and retrieve information for installing
+        # them.  Even if the user elected to not install repository dependencies we have
+        # to make sure all repository dependency objects exist so that the appropriate
+        # repository dependency relationships can be built.
+        all_required_repo_info_dict = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+        all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
+        if not all_repo_info_dicts:
+            # No repository dependencies were discovered so process the received repositories.
+            all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
+        for repo_info_dict in all_repo_info_dicts:
+            # If the user elected to install repository dependencies, all items in the
+            # all_repo_info_dicts list will be processed.  However, if repository dependencies
+            # are not to be installed, only those items contained in the received repo_info_dicts
+            # list will be processed but the all_repo_info_dicts list will be used to create all
+            # defined repository dependency relationships.
+            if self.is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
+                for name, repo_info_tuple in repo_info_dict.items():
+                    can_update_db_record = False
+                    description, \
+                        repository_clone_url, \
+                        changeset_revision, \
+                        ctx_rev, \
+                        repository_owner, \
+                        repository_dependencies, \
+                        tool_dependencies = \
+                        repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+                    # See if the repository has an existing record in the database.
+                    repository_db_record, installed_changeset_revision = \
+                        repository_util.repository_was_previously_installed( self.app, tool_shed_url, name, repo_info_tuple, from_tip=False )
+                    if repository_db_record:
+                        if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.INSTALLED,
+                                                            install_model.ToolShedRepository.installation_status.CLONING,
+                                                            install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+                                                            install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+                                                            install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                                            install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+                            debug_msg = "Skipping installation of revision %s of repository '%s' because it was installed " % \
+                                ( str( changeset_revision ), str( repository_db_record.name ) )
+                            debug_msg += "with the (possibly updated) revision %s and its current installation status is '%s'." % \
+                                ( str( installed_changeset_revision ), str( repository_db_record.status ) )
+                            log.debug( debug_msg )
+                            can_update_db_record = False
+                        else:
+                            if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.ERROR,
+                                                                install_model.ToolShedRepository.installation_status.NEW,
+                                                                install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+                                # The current tool shed repository is not currently installed, so we can update its
+                                # record in the database.
+                                name = repository_db_record.name
+                                installed_changeset_revision = repository_db_record.installed_changeset_revision
+                                can_update_db_record = True
+                            elif repository_db_record.status in [ install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+                                # The current tool shed repository is deactivated, so updating its database record
+                                # is not necessary - just activate it.
+                                log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
+                                self.app.installed_repository_manager.activate_repository( repository_db_record )
+                                # No additional updates to the database record are necessary.
+                                can_update_db_record = False
+                            elif repository_db_record.status not in [ install_model.ToolShedRepository.installation_status.NEW ]:
+                                # Set changeset_revision here so repository_util.create_or_update_tool_shed_repository will find
+                                # the previously installed and uninstalled repository instead of creating a new record.
+                                changeset_revision = repository_db_record.installed_changeset_revision
+                                self.reset_previously_installed_repository( repository_db_record )
+                                can_update_db_record = True
+                    else:
+                        # No record exists in the database for the repository currently being processed.
+                        installed_changeset_revision = changeset_revision
+                        can_update_db_record = True
+                    if can_update_db_record:
+                        # The database record for the tool shed repository currently being processed can be updated.
+                        # Get the repository metadata to see where it was previously located in the tool panel.
+                        tpm = tool_panel_manager.ToolPanelManager( self.app )
+                        if repository_db_record and repository_db_record.metadata:
+                            _, tool_panel_section_key = \
+                                tpm.handle_tool_panel_selection( toolbox=self.app.toolbox,
+                                                                 metadata=repository_db_record.metadata,
+                                                                 no_changes_checked=no_changes_checked,
+                                                                 tool_panel_section_id=tool_panel_section_id,
+                                                                 new_tool_panel_section_label=new_tool_panel_section_label )
+                        else:
+                            # We're installing a new tool shed repository that does not yet have a database record.
+                            tool_panel_section_key, _ = \
+                                tpm.handle_tool_panel_section( self.app.toolbox,
+                                                               tool_panel_section_id=tool_panel_section_id,
+                                                               new_tool_panel_section_label=new_tool_panel_section_label )
+                        tool_shed_repository = \
+                            repository_util.create_or_update_tool_shed_repository( app=self.app,
+                                                                                   name=name,
+                                                                                   description=description,
+                                                                                   installed_changeset_revision=installed_changeset_revision,
+                                                                                   ctx_rev=ctx_rev,
+                                                                                   repository_clone_url=repository_clone_url,
+                                                                                   metadata_dict={},
+                                                                                   status=install_model.ToolShedRepository.installation_status.NEW,
+                                                                                   current_changeset_revision=changeset_revision,
+                                                                                   owner=repository_owner,
+                                                                                   dist_to_shed=False )
+                        if tool_shed_repository not in all_created_or_updated_tool_shed_repositories:
+                            all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+                        # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if
+                        # it is supposed to be installed.
+                        if install_repository_dependencies or self.is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+                            if tool_shed_repository not in created_or_updated_tool_shed_repositories:
+                                # Keep the one-to-one mapping between items in 3 lists.
+                                created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+                                tool_panel_section_keys.append( tool_panel_section_key )
+                                filtered_repo_info_dicts.append( repo_info_dict )
+        # Build repository dependency relationships even if the user chose to not install repository dependencies.
+        self.build_repository_dependency_relationships( all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
+        return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts
+
+    def get_or_create_tool_shed_repository( self, tool_shed, name, owner, changeset_revision ):
+        """
+        Return a tool shed repository database record defined by the combination of
+        tool shed, repository name, repository owner and changeset_revision or
+        installed_changeset_revision.  A new tool shed repository record will be
+        created if one is not located.
+        """
+        install_model = self.app.install_model
+        # We store the port in the database.
+        tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+        # This method is used only in Galaxy, not the tool shed.
+        repository = repository_util.get_repository_for_dependency_relationship( self.app, tool_shed, name, owner, changeset_revision )
+        if not repository:
+            tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed )
+            repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+            ctx_rev = suc.get_ctx_rev( self.app, tool_shed_url, name, owner, changeset_revision )
+            repository = repository_util.create_or_update_tool_shed_repository( app=self.app,
+                                                                                name=name,
+                                                                                description=None,
+                                                                                installed_changeset_revision=changeset_revision,
+                                                                                ctx_rev=ctx_rev,
+                                                                                repository_clone_url=repository_clone_url,
+                                                                                metadata_dict={},
+                                                                                status=install_model.ToolShedRepository.installation_status.NEW,
+                                                                                current_changeset_revision=None,
+                                                                                owner=owner,
+                                                                                dist_to_shed=False )
+        return repository
+
+    def get_repository_dependencies_for_installed_tool_shed_repository( self, app, repository ):
+        """
+        Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
+        for the received repository which is installed into Galaxy.  This method is called only from Galaxy.
+        """
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
+        params = dict( name=str( repository.name ),
+                       owner=str( repository.owner ),
+                       changeset_revision=str( repository.changeset_revision ) )
+        pathspec = [ 'repository', 'get_repository_dependencies' ]
+        try:
+            raw_text = url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        except Exception as e:
+            log.error("The URL\n%s\nraised the exception:\n%s\n", build_url( tool_shed_url, pathspec=pathspec, params=params ), str( e ) )
+            return ''
+        if len( raw_text ) > 2:
+            encoded_text = json.loads( raw_text )
+            text = encoding_util.tool_shed_decode( encoded_text )
+        else:
+            text = ''
+        return text
+
+    def get_repository_dependency_by_repository_id( self, install_model, decoded_repository_id ):
+        return install_model.context.query( install_model.RepositoryDependency ) \
+                                    .filter( install_model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \
+                                    .first()
+
+    def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
+        """
+        Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
+        them to the list.  All repository_dependency entries in each of the received repo_info_dicts includes
+        all required repositories, so only one pass through this method is required to retrieve all repository
+        dependencies.
+        """
+        all_required_repo_info_dict = {}
+        all_repo_info_dicts = []
+        if repo_info_dicts:
+            # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
+            # shed to discover repository ids.
+            required_repository_tups = []
+            for repo_info_dict in repo_info_dicts:
+                if repo_info_dict not in all_repo_info_dicts:
+                    all_repo_info_dicts.append( repo_info_dict )
+                for repository_name, repo_info_tup in repo_info_dict.items():
+                    description, \
+                        repository_clone_url, \
+                        changeset_revision, \
+                        ctx_rev, \
+                        repository_owner, \
+                        repository_dependencies, \
+                        tool_dependencies = \
+                        repository_util.get_repo_info_tuple_contents( repo_info_tup )
+                    if repository_dependencies:
+                        for key, val in repository_dependencies.items():
+                            if key in [ 'root_key', 'description' ]:
+                                continue
+                            repository_components_tuple = container_util.get_components_from_key( key )
+                            components_list = repository_util.extract_components_from_tuple( repository_components_tuple )
+                            # Skip listing a repository dependency if it is required only to compile a tool dependency
+                            # defined for the dependent repository since in this case, the repository dependency is really
+                            # a dependency of the dependent repository's contained tool dependency, and only if that
+                            # tool dependency requires compilation.
+                            # For backward compatibility to the 12/20/12 Galaxy release.
+                            only_if_compiling_contained_td = 'False'
+                            if len( components_list ) == 4:
+                                only_if_compiling_contained_td = 'False'
+                            elif len( components_list ) == 5:
+                                only_if_compiling_contained_td = 'False'
+                            if not asbool( only_if_compiling_contained_td ):
+                                if components_list not in required_repository_tups:
+                                    required_repository_tups.append( components_list )
+                            for components_list in val:
+                                try:
+                                    only_if_compiling_contained_td = components_list[ 5 ]
+                                except:
+                                    only_if_compiling_contained_td = 'False'
+                                # Skip listing a repository dependency if it is required only to compile a tool dependency
+                                # defined for the dependent repository (see above comment).
+                                if not asbool( only_if_compiling_contained_td ):
+                                    if components_list not in required_repository_tups:
+                                        required_repository_tups.append( components_list )
+                    else:
+                        # We have a single repository with no dependencies.
+                        components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
+                        required_repository_tups.append( components_list )
+                if required_repository_tups:
+                    # The value of required_repository_tups is a list of tuples, so we need to encode it.
+                    encoded_required_repository_tups = []
+                    for required_repository_tup in required_repository_tups:
+                        # Convert every item in required_repository_tup to a string.
+                        required_repository_tup = [ str( item ) for item in required_repository_tup ]
+                        encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+                    encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+                    encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+                    if repository_util.is_tool_shed_client( self.app ):
+                        # Handle secure / insecure Tool Shed URL protocol changes and port changes.
+                        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
+                    pathspec = [ 'repository', 'get_required_repo_info_dict' ]
+                    url = build_url( tool_shed_url, pathspec=pathspec )
+                    # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided
+                    url = urlopen( Request( url ) ).geturl()
+                    request = Request( url, data=urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
+                    response = urlopen( request ).read()
+                    if response:
+                        try:
+                            required_repo_info_dict = json.loads( response )
+                        except Exception as e:
+                            log.exception( e )
+                            return all_repo_info_dicts
+                        required_repo_info_dicts = []
+                        for k, v in required_repo_info_dict.items():
+                            if k == 'repo_info_dicts':
+                                encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+                                for encoded_dict_str in encoded_dict_strings:
+                                    decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+                                    required_repo_info_dicts.append( decoded_dict )
+                            else:
+                                if k not in all_required_repo_info_dict:
+                                    all_required_repo_info_dict[ k ] = v
+                                else:
+                                    if v and not all_required_repo_info_dict[ k ]:
+                                        all_required_repo_info_dict[ k ] = v
+                            if required_repo_info_dicts:
+                                for required_repo_info_dict in required_repo_info_dicts:
+                                    # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
+                                    # of dictionaries, each of which has a single entry.  We'll check keys here rather than
+                                    # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
+                                    # lists of discovered repository dependencies, but these lists will be empty in the
+                                    # required_repo_info_dict since dependency discovery has not yet been performed for these
+                                    # dictionaries.
+                                    required_repo_info_dict_key = next(iter(required_repo_info_dict))
+                                    all_repo_info_dicts_keys = [ next(iter(d)) for d in all_repo_info_dicts ]
+                                    if required_repo_info_dict_key not in all_repo_info_dicts_keys:
+                                        all_repo_info_dicts.append( required_repo_info_dict )
+                                    else:
+                                        # required_repo_info_dict_key corresponds to the repo name.
+                                        # A single install transaction might require the installation of 2 or more repos
+                                        # with the same repo name but different owners or versions.
+                                        # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts,
+                                        # check that the tool id is already present. If it is not, we are dealing with the same repo name,
+                                        # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed.
+                                        tool_id = required_repo_info_dict[ required_repo_info_dict_key ][ 1 ]
+                                        is_present = False
+                                        for repo_info_dict in all_repo_info_dicts:
+                                            for k, v in repo_info_dict.items():
+                                                if required_repo_info_dict_key == k:
+                                                    if tool_id == v[1]:
+                                                        is_present = True
+                                                        break
+                                        if not is_present:
+                                            all_repo_info_dicts.append( required_repo_info_dict )
+                        all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
+        return all_required_repo_info_dict
+
+    def is_in_repo_info_dicts( self, repo_info_dict, repo_info_dicts ):
+        """Return True if the received repo_info_dict is contained in the list of received repo_info_dicts."""
+        for name, repo_info_tuple in repo_info_dict.items():
+            for rid in repo_info_dicts:
+                for rid_name, rid_repo_info_tuple in rid.items():
+                    if rid_name == name:
+                        if len( rid_repo_info_tuple ) == len( repo_info_tuple ):
+                            for item in rid_repo_info_tuple:
+                                if item not in repo_info_tuple:
+                                    return False
+                            return True
+            return False
+
+    def reset_previously_installed_repository( self, repository ):
+        """
+        Reset the attributes of a tool_shed_repository that was previously installed.
+        The repository will be in some state other than INSTALLED, so all attributes
+        will be set to the default NEW state.  This will enable the repository to be
+        freshly installed.
+        """
+        debug_msg = "Resetting tool_shed_repository '%s' for installation.\n" % str( repository.name )
+        debug_msg += "The current state of the tool_shed_repository is:\n"
+        debug_msg += "deleted: %s\n" % str( repository.deleted )
+        debug_msg += "tool_shed_status: %s\n" % str( repository.tool_shed_status )
+        debug_msg += "uninstalled: %s\n" % str( repository.uninstalled )
+        debug_msg += "status: %s\n" % str( repository.status )
+        debug_msg += "error_message: %s\n" % str( repository.error_message )
+        log.debug( debug_msg )
+        repository.deleted = False
+        repository.tool_shed_status = None
+        repository.uninstalled = False
+        repository.status = self.app.install_model.ToolShedRepository.installation_status.NEW
+        repository.error_message = None
+        self.app.install_model.context.add( repository )
+        self.app.install_model.context.flush()
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/__init__.py b/lib/tool_shed/galaxy_install/tool_dependencies/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/env_manager.py b/lib/tool_shed/galaxy_install/tool_dependencies/env_manager.py
new file mode 100644
index 0000000..9aa9de3
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/env_manager.py
@@ -0,0 +1,146 @@
+import logging
+import os
+
+from tool_shed.util import common_util
+from tool_shed.util import repository_util
+
+log = logging.getLogger( __name__ )
+
+
+class EnvManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def create_env_var_dict( self, elem, install_environment ):
+        env_var_name = elem.get( 'name', 'PATH' )
+        env_var_action = elem.get( 'action', 'prepend_to' )
+        env_var_text = None
+        tool_dependency_install_dir = install_environment.install_dir
+        tool_shed_repository_install_dir = install_environment.tool_shed_repository_install_dir
+        if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0:
+            if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1:
+                env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir )
+                return dict( name=env_var_name, action=env_var_action, value=env_var_text )
+            else:
+                env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_dependency_install_dir )
+                return dict( name=env_var_name, action=env_var_action, value=env_var_text )
+        if elem.text and elem.text.find( 'INSTALL_DIR' ) >= 0:
+            if tool_dependency_install_dir:
+                env_var_text = elem.text.replace( '$INSTALL_DIR', tool_dependency_install_dir )
+                return dict( name=env_var_name, action=env_var_action, value=env_var_text )
+            else:
+                env_var_text = elem.text.replace( '$INSTALL_DIR', tool_shed_repository_install_dir )
+                return dict( name=env_var_name, action=env_var_action, value=env_var_text )
+        if elem.text:
+            # Allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR
+            # since there may be command line parameters that are tuned for a Galaxy instance.  Allowing them
+            # to be set in one location rather than being hard coded into each tool config is the best approach.
+            # For example:
+            # <environment_variable name="GATK2_SITE_OPTIONS" action="set_to">
+            #    "--num_threads 4 --num_cpu_threads_per_data_thread 3 --phone_home STANDARD"
+            # </environment_variable>
+            return dict( name=env_var_name, action=env_var_action, value=elem.text)
+        return None
+
+    def get_env_shell_file_path( self, installation_directory ):
+        env_shell_file_name = 'env.sh'
+        default_location = os.path.abspath( os.path.join( installation_directory, env_shell_file_name ) )
+        if os.path.exists( default_location ):
+            return default_location
+        for root, dirs, files in os.walk( installation_directory ):
+            for name in files:
+                if name == env_shell_file_name:
+                    return os.path.abspath( os.path.join( root, name ) )
+        return None
+
+    def get_env_shell_file_paths( self, elem ):
+        # Currently only the following tag set is supported.
+        #    <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056">
+        #        <package name="numpy" version="1.7.1" />
+        #    </repository>
+        env_shell_file_paths = []
+        toolshed = elem.get( 'toolshed', None )
+        repository_name = elem.get( 'name', None )
+        repository_owner = elem.get( 'owner', None )
+        changeset_revision = elem.get( 'changeset_revision', None )
+        if toolshed and repository_name and repository_owner and changeset_revision:
+            # The protocol is not stored, but the port is if it exists.
+            toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed )
+            repository = repository_util.get_repository_for_dependency_relationship( self.app,
+                                                                                     toolshed,
+                                                                                     repository_name,
+                                                                                     repository_owner,
+                                                                                     changeset_revision )
+            if repository:
+                for sub_elem in elem:
+                    tool_dependency_type = sub_elem.tag
+                    tool_dependency_name = sub_elem.get( 'name' )
+                    tool_dependency_version = sub_elem.get( 'version' )
+                    if tool_dependency_type and tool_dependency_name and tool_dependency_version:
+                        # Get the tool_dependency so we can get its installation directory.
+                        tool_dependency = None
+                        for tool_dependency in repository.tool_dependencies:
+                            if tool_dependency.type == tool_dependency_type and \
+                                    tool_dependency.name == tool_dependency_name and \
+                                    tool_dependency.version == tool_dependency_version:
+                                break
+                        if tool_dependency:
+                            installation_directory = tool_dependency.installation_directory( self.app )
+                            env_shell_file_path = self.get_env_shell_file_path( installation_directory )
+                            if env_shell_file_path:
+                                env_shell_file_paths.append( env_shell_file_path )
+                            else:
+                                error_message = "Skipping tool dependency definition because unable to locate env.sh file for tool dependency "
+                                error_message += "type %s, name %s, version %s for repository %s" % \
+                                    ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) )
+                                log.debug( error_message )
+                                continue
+                        else:
+                            error_message = "Skipping tool dependency definition because unable to locate tool dependency "
+                            error_message += "type %s, name %s, version %s for repository %s" % \
+                                ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) )
+                            log.debug( error_message )
+                            continue
+                    else:
+                        error_message = "Skipping invalid tool dependency definition: type %s, name %s, version %s." % \
+                            ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ) )
+                        log.debug( error_message )
+                        continue
+            else:
+                error_message = "Skipping set_environment_for_install definition because unable to locate required installed tool shed repository: "
+                error_message += "toolshed %s, name %s, owner %s, changeset_revision %s." % \
+                    ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) )
+                log.debug( error_message )
+        else:
+            error_message = "Skipping invalid set_environment_for_install definition: toolshed %s, name %s, owner %s, changeset_revision %s." % \
+                ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) )
+            log.debug( error_message )
+        return env_shell_file_paths
+
+    def get_env_shell_file_paths_from_setup_environment_elem( self, all_env_shell_file_paths, elem, action_dict ):
+        """
+        Parse an XML tag set to discover all child repository dependency tags and define the path to an env.sh file associated
+        with the repository (this requires the repository dependency to be in an installed state).  The received action_dict
+        will be updated with these discovered paths and returned to the caller.  This method handles tool dependency definition
+        tag sets <setup_r_environment>, <setup_ruby_environment>, <setup_python_environment> and <setup_perl_environment>.
+        """
+        # An example elem is:
+        # <action type="setup_perl_environment">
+        #     <repository name="package_perl_5_18" owner="iuc">
+        #         <package name="perl" version="5.18.1" />
+        #     </repository>
+        #     <repository name="package_expat_2_1" owner="iuc" prior_installation_required="True">
+        #         <package name="expat" version="2.1.0" />
+        #     </repository>
+        #     <package>http://search.cpan.org/CPAN/authors/id/T/TO/TODDR/XML-Parser-2.41.tar.gz</package>
+        #     <package>http://search.cpan.org/CPAN/authors/id/L/LD/LDS/CGI.pm-3.43.tar.gz</package>
+        # </action>
+        for action_elem in elem:
+            if action_elem.tag == 'repository':
+                env_shell_file_paths = self.get_env_shell_file_paths( action_elem )
+                all_env_shell_file_paths.extend( env_shell_file_paths )
+        if all_env_shell_file_paths:
+            action_dict[ 'env_shell_file_paths' ] = all_env_shell_file_paths
+            action_dict[ 'action_shell_file_paths' ] = all_env_shell_file_paths
+        return action_dict
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/__init__.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py
new file mode 100644
index 0000000..5eb6d5a
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py
@@ -0,0 +1,31 @@
+import logging
+import threading
+
+log = logging.getLogger( __name__ )
+
+
+class AsynchronousReader( threading.Thread ):
+    """
+    A helper class to implement asynchronous reading of a stream in a separate thread.  Read lines are pushed
+    onto a queue to be consumed in another thread.
+    """
+
+    def __init__( self, fd, queue ):
+        threading.Thread.__init__( self )
+        self._fd = fd
+        self._queue = queue
+        self.lines = []
+
+    def run( self ):
+        """Read lines and put them on the queue."""
+        thread_lock = threading.Lock()
+        thread_lock.acquire()
+        for line in iter( self._fd.readline, '' ):
+            stripped_line = line.rstrip()
+            self.lines.append( stripped_line )
+            self._queue.put( stripped_line )
+        thread_lock.release()
+
+    def installation_complete( self ):
+        """Make sure there is more installation and compilation logging content expected."""
+        return not self.is_alive() and self._queue.empty()
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py
new file mode 100644
index 0000000..e62924d
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py
@@ -0,0 +1,96 @@
+import logging
+import os
+import stat
+from six import string_types
+
+log = logging.getLogger( __name__ )
+
+
+class EnvFileBuilder( object ):
+
+    def __init__( self, install_dir ):
+        self.install_dir = install_dir
+        self.return_code = 0
+
+    def append_line( self, make_executable=True, **kwd ):
+        env_var_dict = dict( **kwd )
+        env_entry, env_file = self.create_or_update_env_shell_file( self.install_dir, env_var_dict )
+        return_code = self.file_append( env_entry, env_file, make_executable=make_executable )
+        self.return_code = self.return_code or return_code
+        return self.return_code
+
+    @staticmethod
+    def create_or_update_env_shell_file( install_dir, env_var_dict ):
+        env_var_action = env_var_dict[ 'action' ]
+        env_var_value = env_var_dict[ 'value' ]
+        if env_var_action in [ 'prepend_to', 'set_to', 'append_to' ]:
+            env_var_name = env_var_dict[ 'name' ]
+            if env_var_action == 'prepend_to':
+                changed_value = '%s:$%s' % ( env_var_value, env_var_name )
+            elif env_var_action == 'set_to':
+                changed_value = '%s' % env_var_value
+            elif env_var_action == 'append_to':
+                changed_value = '$%s:%s' % ( env_var_name, env_var_value )
+            line = "%s=%s; export %s" % ( env_var_name, changed_value, env_var_name )
+        elif env_var_action == "source":
+            line = "if [ -f %s ] ; then . %s ; fi" % ( env_var_value, env_var_value )
+        else:
+            raise Exception( "Unknown shell file action %s" % env_var_action )
+        env_shell_file_path = os.path.join( install_dir, 'env.sh' )
+        return line, env_shell_file_path
+
+    def file_append( self, text, file_path, make_executable=True ):
+        """
+        Append a line to a file unless the line already exists in the file.  This method creates the file if
+        it doesn't exist.  If make_executable is True, the permissions on the file are set to executable by
+        the owner.
+        """
+        file_dir = os.path.dirname( file_path )
+        if not os.path.exists( file_dir ):
+            try:
+                os.makedirs( file_dir )
+            except Exception as e:
+                log.exception( str( e ) )
+                return 1
+        if os.path.exists( file_path ):
+            try:
+                new_env_file_contents = []
+                env_file_contents = open( file_path, 'r' ).readlines()
+                # Clean out blank lines from the env.sh file.
+                for line in env_file_contents:
+                    line = line.rstrip()
+                    if line:
+                        new_env_file_contents.append( line )
+                env_file_contents = new_env_file_contents
+            except Exception as e:
+                log.exception( str( e ) )
+                return 1
+        else:
+            env_file_handle = open( file_path, 'w' )
+            env_file_handle.close()
+            env_file_contents = []
+        if make_executable:
+            # Explicitly set the file's executable bits.
+            try:
+                os.chmod( file_path, int( '111', base=8 ) | os.stat( file_path )[ stat.ST_MODE ] )
+            except Exception as e:
+                log.exception( str( e ) )
+                return 1
+        # Convert the received text to a list, in order to support adding one or more lines to the file.
+        if isinstance( text, string_types ):
+            text = [ text ]
+        for line in text:
+            line = line.rstrip()
+            if line and line not in env_file_contents:
+                env_file_contents.append( line )
+        try:
+            open( file_path, 'w' ).write( '\n'.join( env_file_contents ) )
+        except Exception as e:
+            log.exception( str( e ) )
+            return 1
+        return 0
+
+    def handle_action_shell_file_paths( self, action_dict ):
+        shell_file_paths = action_dict.get( 'action_shell_file_paths', [] )
+        for shell_file_path in shell_file_paths:
+            self.append_line( action="source", value=shell_file_path )
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py
new file mode 100644
index 0000000..56c0dd2
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py
@@ -0,0 +1,275 @@
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+import threading
+import time
+from contextlib import contextmanager
+
+# TODO: eliminate the use of fabric here.
+from fabric import state
+from fabric.operations import _AttributeString
+from six.moves import queue
+
+from galaxy.util import DATABASE_MAX_STRING_SIZE
+from galaxy.util import DATABASE_MAX_STRING_SIZE_PRETTY
+from galaxy.util import shrink_string_by_size
+from galaxy.util import unicodify
+
+from tool_shed.galaxy_install.tool_dependencies.recipe import asynchronous_reader
+
+from tool_shed.util import basic_util, tool_dependency_util
+
+log = logging.getLogger( __name__ )
+
+
+class InstallEnvironment( object ):
+    """Object describing the environment built up as part of the process of building and installing a package."""
+
+    def __init__( self, app, tool_shed_repository_install_dir, install_dir  ):
+        """
+        The value of the received tool_shed_repository_install_dir is the root installation directory
+        of the repository containing the tool dependency, and the value of the received install_dir is
+        the root installation directory of the tool dependency.
+        """
+        self.app = app
+        self.env_shell_file_paths = []
+        self.install_dir = install_dir
+        self.tool_shed_repository_install_dir = tool_shed_repository_install_dir
+        self.tmp_work_dir = os.path.abspath( tempfile.mkdtemp( prefix="tmp-toolshed-mtd" ) )
+
+    def add_env_shell_file_paths( self, paths ):
+        for path in paths:
+            self.env_shell_file_paths.append( str( path ) )
+
+    def build_command( self, command, action_type='shell_command' ):
+        """
+        Build command line for execution from simple command, but
+        configuring environment described by this object.
+        """
+        env_cmds = self.environment_commands( action_type )
+        return '\n'.join( env_cmds + [ command ] )
+
+    def close_file_descriptor( self, fd ):
+        """Attempt to close a file descriptor."""
+        start_timer = time.time()
+        error = ''
+        while True:
+            try:
+                fd.close()
+                break
+            except IOError as e:
+                # Undoubtedly close() was called during a concurrent operation on the same file object.
+                log.debug( 'Error closing file descriptor: %s' % str( e ) )
+                time.sleep( .5 )
+                current_wait_time = time.time() - start_timer
+                if current_wait_time >= 600:
+                    error = 'Error closing file descriptor: %s' % str( e )
+                    break
+        return error
+
+    def enqueue_output( self, stdout, stdout_queue, stderr, stderr_queue ):
+        """
+        This method places streamed stdout and stderr into a threaded IPC queue target.  Received data
+        is printed and saved to that thread's queue. The calling thread can then retrieve the data using
+        thread.stdout and thread.stderr.
+        """
+        stdout_logger = logging.getLogger( 'install_environment.STDOUT' )
+        stderr_logger = logging.getLogger( 'install_environment.STDERR' )
+        for line in iter( stdout.readline, '' ):
+            output = line.rstrip()
+            stdout_logger.debug( output )
+            stdout_queue.put( output )
+        stdout_queue.put( None )
+        for line in iter( stderr.readline, '' ):
+            output = line.rstrip()
+            stderr_logger.debug( output )
+            stderr_queue.put( output )
+        stderr_queue.put( None )
+
+    def environment_commands( self, action_type ):
+        """Build a list of commands used to construct the environment described by this object."""
+        cmds = []
+        for env_shell_file_path in self.env_shell_file_paths:
+            if os.path.exists( env_shell_file_path ):
+                for env_setting in open( env_shell_file_path ):
+                    cmds.append( env_setting.strip( '\n' ) )
+            else:
+                log.debug( 'Invalid file %s specified, ignoring %s action.' % ( str( env_shell_file_path ), str( action_type ) ) )
+        return cmds
+
+    def environment_dict( self, action_type='template_command' ):
+        env_vars = dict()
+        for env_shell_file_path in self.env_shell_file_paths:
+            if os.path.exists( env_shell_file_path ):
+                for env_setting in open( env_shell_file_path ):
+                    env_string = env_setting.split( ';' )[ 0 ]
+                    env_name, env_path = env_string.split( '=' )
+                    env_vars[ env_name ] = env_path
+            else:
+                log.debug( 'Invalid file %s specified, ignoring template_command action.' % str( env_shell_file_path ) )
+        return env_vars
+
+    def handle_command( self, tool_dependency, cmd, return_output=False, job_name="" ):
+        """Handle a command and log the results."""
+        command = str( cmd )
+        output = self.handle_complex_command( command, job_name=job_name )
+        self.log_results( cmd, output, os.path.join( self.install_dir, basic_util.INSTALLATION_LOG ) )
+        stdout = output.stdout
+        stderr = output.stderr
+        if len( stdout ) > DATABASE_MAX_STRING_SIZE:
+            log.warning( "Length of stdout > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY ) )
+            stdout = shrink_string_by_size( stdout, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+        if len( stderr ) > DATABASE_MAX_STRING_SIZE:
+            log.warning( "Length of stderr > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY ) )
+            stderr = shrink_string_by_size( stderr, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True )
+        if output.return_code not in [ 0 ]:
+            status = self.app.install_model.ToolDependency.installation_status.ERROR
+            if stderr:
+                error_message = unicodify( stderr )
+            elif stdout:
+                error_message = unicodify( stdout )
+            else:
+                # We have a problem if there was no stdout and no stderr.
+                error_message = "Unknown error occurred executing shell command %s, return_code: %s" % \
+                    ( str( cmd ), str( output.return_code ) )
+            tool_dependency_util.set_tool_dependency_attributes(self.app,
+                                                                tool_dependency=tool_dependency,
+                                                                status=status,
+                                                                error_message=error_message)
+        if return_output:
+            return output
+        return output.return_code
+
+    def handle_complex_command( self, command, job_name="" ):
+        """
+        Wrap subprocess.Popen in such a way that the stderr and stdout from running a shell command will
+        be captured and logged in nearly real time.  This is similar to fabric.local, but allows us to
+        retain control over the process.  This method is named "complex" because it uses queues and
+        threads to execute a command while capturing and displaying the output.
+        """
+        # We define a "local logger" here such that we can give it a slightly
+        # different name. We use the package name as part of the logger to
+        # allow admins to easily distinguish between which package is currently
+        # being installed.
+        llog_name = __name__
+        if len( job_name ) > 0:
+            llog_name += ':' + job_name
+        llog = logging.getLogger( llog_name )
+        # Print the command we're about to execute, ``set -x`` style.
+        llog.debug('+ ' + str( command ) )
+        # Launch the command as subprocess.  A bufsize of 1 means line buffered.
+        process_handle = subprocess.Popen( str( command ),
+                                           stdout=subprocess.PIPE,
+                                           stderr=subprocess.PIPE,
+                                           bufsize=1,
+                                           close_fds=False,
+                                           shell=True,
+                                           cwd=state.env[ 'lcwd' ] )
+        pid = process_handle.pid
+        # Launch the asynchronous readers of the process' stdout and stderr.
+        stdout_queue = queue.Queue()
+        stdout_reader = asynchronous_reader.AsynchronousReader( process_handle.stdout, stdout_queue )
+        stdout_reader.start()
+        stderr_queue = queue.Queue()
+        stderr_reader = asynchronous_reader.AsynchronousReader( process_handle.stderr, stderr_queue )
+        stderr_reader.start()
+        # Place streamed stdout and stderr into a threaded IPC queue target so it can
+        # be printed and stored for later retrieval when generating the INSTALLATION.log.
+        stdio_thread = threading.Thread( target=self.enqueue_output,
+                                         args=( process_handle.stdout,
+                                                stdout_queue,
+                                                process_handle.stderr,
+                                                stderr_queue ) )
+        thread_lock = threading.Lock()
+        thread_lock.acquire()
+        stdio_thread.start()
+        # Check the queues for output until there is nothing more to get.
+        start_timer = time.time()
+        while not stdout_reader.installation_complete() or not stderr_reader.installation_complete():
+            # Show what we received from standard output.
+            while not stdout_queue.empty():
+                try:
+                    line = stdout_queue.get()
+                except queue.Empty:
+                    line = None
+                    break
+                if line:
+                    llog.debug(line)
+                    start_timer = time.time()
+                else:
+                    break
+            # Show what we received from standard error.
+            while not stderr_queue.empty():
+                try:
+                    line = stderr_queue.get()
+                except queue.Empty:
+                    line = None
+                    break
+                if line:
+                    llog.debug(line)
+                    start_timer = time.time()
+                else:
+                    stderr_queue.task_done()
+                    break
+            # Sleep a bit before asking the readers again.
+            time.sleep( .1 )
+            current_wait_time = time.time() - start_timer
+            if stdout_queue.empty() and stderr_queue.empty() and current_wait_time > basic_util.NO_OUTPUT_TIMEOUT:
+                err_msg = "\nShutting down process id %s because it generated no output for the defined timeout period of %.1f seconds.\n" % \
+                          ( pid, basic_util.NO_OUTPUT_TIMEOUT )
+                stderr_reader.lines.append( err_msg )
+                process_handle.kill()
+                break
+        thread_lock.release()
+        # Wait until each of the threads we've started terminate.  The following calls will block each thread
+        # until it terminates either normally, through an unhandled exception, or until the timeout occurs.
+        stdio_thread.join( basic_util.NO_OUTPUT_TIMEOUT )
+        stdout_reader.join( basic_util.NO_OUTPUT_TIMEOUT )
+        stderr_reader.join( basic_util.NO_OUTPUT_TIMEOUT )
+        # Close subprocess' file descriptors.
+        self.close_file_descriptor( process_handle.stdout )
+        self.close_file_descriptor( process_handle.stderr )
+        stdout = '\n'.join( stdout_reader.lines )
+        stderr = '\n'.join( stderr_reader.lines )
+        # Handle error condition (deal with stdout being None, too)
+        output = _AttributeString( stdout.strip() if stdout else "" )
+        errors = _AttributeString( stderr.strip() if stderr else "" )
+        # Make sure the process has finished.
+        process_handle.poll()
+        output.return_code = process_handle.returncode
+        output.stderr = errors
+        return output
+
+    def log_results( self, command, fabric_AttributeString, file_path ):
+        """Write attributes of fabric.operations._AttributeString to a specified log file."""
+        if os.path.exists( file_path ):
+            logfile = open( file_path, 'ab' )
+        else:
+            logfile = open( file_path, 'wb' )
+        logfile.write( "\n#############################################\n" )
+        logfile.write( '%s\nSTDOUT\n' % command )
+        logfile.write( str( fabric_AttributeString.stdout ) )
+        logfile.write( "\n#############################################\n" )
+        logfile.write( "\n#############################################\n" )
+        logfile.write( '%s\nSTDERR\n' % command )
+        logfile.write( str( fabric_AttributeString.stderr ) )
+        logfile.write( "\n#############################################\n" )
+        logfile.close()
+
+    @contextmanager
+    def use_tmp_dir( self ):
+        work_dir = self.tmp_work_dir
+        yield work_dir
+        if os.path.exists( work_dir ):
+            try:
+                shutil.rmtree( work_dir )
+            except Exception as e:
+                log.exception( str( e ) )
+
+    def __setup_environment( self ):
+        return "&&".join( [ ". %s" % file for file in self.__valid_env_shell_file_paths() ] )
+
+    def __valid_env_shell_file_paths( self ):
+        return [ file for file in self.env_shell_file_paths if os.path.exists( file ) ]
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/recipe_manager.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/recipe_manager.py
new file mode 100644
index 0000000..91c7a26
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/recipe_manager.py
@@ -0,0 +1,107 @@
+import logging
+
+from tool_shed.galaxy_install.tool_dependencies.recipe import step_handler
+from tool_shed.galaxy_install.tool_dependencies.recipe import tag_handler
+
+log = logging.getLogger( __name__ )
+
+
+class StepManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.step_handlers_by_type = self.load_step_handlers()
+
+    def get_step_handler_by_type( self, type ):
+        return self.step_handlers_by_type.get( type, None )
+
+    def execute_step( self, tool_dependency, package_name, actions, action_type, action_dict, filtered_actions,
+                      env_file_builder, install_environment, work_dir, current_dir=None, initial_download=False ):
+        if actions:
+            step_handler = self.get_step_handler_by_type( action_type )
+            tool_dependency, filtered_actions, dir = step_handler.execute_step( tool_dependency=tool_dependency,
+                                                                                package_name=package_name,
+                                                                                actions=actions,
+                                                                                action_dict=action_dict,
+                                                                                filtered_actions=filtered_actions,
+                                                                                env_file_builder=env_file_builder,
+                                                                                install_environment=install_environment,
+                                                                                work_dir=work_dir,
+                                                                                current_dir=current_dir,
+                                                                                initial_download=initial_download )
+        else:
+            dir = None
+        return tool_dependency, filtered_actions, dir
+
+    def load_step_handlers( self ):
+        step_handlers_by_type = dict( assert_directory_executable=step_handler.AssertDirectoryExecutable( self.app ),
+                                      assert_directory_exists=step_handler.AssertDirectoryExists( self.app ),
+                                      assert_file_executable=step_handler.AssertFileExecutable( self.app ),
+                                      assert_file_exists=step_handler.AssertFileExists( self.app ),
+                                      autoconf=step_handler.Autoconf( self.app ),
+                                      change_directory=step_handler.ChangeDirectory( self.app ),
+                                      chmod=step_handler.Chmod( self.app ),
+                                      download_binary=step_handler.DownloadBinary( self.app ),
+                                      download_by_url=step_handler.DownloadByUrl( self.app ),
+                                      download_file=step_handler.DownloadFile( self.app ),
+                                      make_directory=step_handler.MakeDirectory( self.app ),
+                                      make_install=step_handler.MakeInstall( self.app ),
+                                      move_directory_files=step_handler.MoveDirectoryFiles( self.app ),
+                                      move_file=step_handler.MoveFile( self.app ),
+                                      regex_replace=step_handler.RegexReplace( self.app ),
+                                      set_environment=step_handler.SetEnvironment( self.app ),
+                                      set_environment_for_install=step_handler.SetEnvironmentForInstall( self.app ),
+                                      setup_perl_environment=step_handler.SetupPerlEnvironment( self.app ),
+                                      setup_r_environment=step_handler.SetupREnvironment( self.app ),
+                                      setup_ruby_environment=step_handler.SetupRubyEnvironment( self.app ),
+                                      setup_python_environment=step_handler.SetupPythonEnvironment( self.app ),
+                                      setup_virtualenv=step_handler.SetupVirtualEnv( self.app ),
+                                      shell_command=step_handler.ShellCommand( self.app ),
+                                      template_command=step_handler.TemplateCommand( self.app ) )
+        return step_handlers_by_type
+
+    def prepare_step( self, tool_dependency, action_type, action_elem, action_dict, install_environment, is_binary_download ):
+        """
+        Prepare the recipe step for later execution.  This generally alters the received action_dict
+        with new information needed during this step's execution.
+        """
+        if action_elem is not None:
+            step_handler = self.get_step_handler_by_type( action_type )
+            action_dict = step_handler.prepare_step( tool_dependency=tool_dependency,
+                                                     action_elem=action_elem,
+                                                     action_dict=action_dict,
+                                                     install_environment=install_environment,
+                                                     is_binary_download=is_binary_download )
+        return action_dict
+
+
+class TagManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tag_handlers = self.load_tag_handlers()
+
+    def get_tag_handler_by_tag( self, tag ):
+        return self.tag_handlers.get( tag, None )
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        tag_handler = self.get_tag_handler_by_tag( package_elem.tag )
+        tool_dependency, proceed_with_install, action_elem_tuples = \
+            tag_handler.process_tag_set( tool_shed_repository,
+                                         tool_dependency,
+                                         package_elem,
+                                         package_name,
+                                         package_version,
+                                         from_tool_migration_manager=from_tool_migration_manager,
+                                         tool_dependency_db_records=tool_dependency_db_records )
+        return tool_dependency, proceed_with_install, action_elem_tuples
+
+    def load_tag_handlers( self ):
+        tag_handlers = dict( environment_variable=tag_handler.SetEnvironment( self.app ),
+                             install=tag_handler.Install( self.app ),
+                             package=tag_handler.Package( self.app ),
+                             readme=tag_handler.ReadMe( self.app ),
+                             repository=tag_handler.Repository( self.app ),
+                             set_environment=tag_handler.SetEnvironment( self.app ) )
+        return tag_handlers
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
new file mode 100644
index 0000000..df8ae9a
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
@@ -0,0 +1,1912 @@
+import hashlib
+import logging
+import os
+import re
+import shutil
+import stat
+import tarfile
+import tempfile
+import zipfile
+from string import Template
+
+# TODO: eliminate the use of fabric here.
+from fabric.api import lcd, settings
+
+from galaxy.util import (
+    asbool,
+    download_to_file
+)
+from galaxy.util.template import fill_template
+from tool_shed.galaxy_install.tool_dependencies.env_manager import EnvManager
+from tool_shed.util import basic_util, tool_dependency_util
+
+log = logging.getLogger( __name__ )
+
+VIRTUALENV_URL = 'https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.9.1.tar.gz'
+
+
+class CompressedFile( object ):
+
+    def __init__( self, file_path, mode='r' ):
+        if tarfile.is_tarfile( file_path ):
+            self.file_type = 'tar'
+        elif zipfile.is_zipfile( file_path ) and not file_path.endswith( '.jar' ):
+            self.file_type = 'zip'
+        self.file_name = os.path.splitext( os.path.basename( file_path ) )[ 0 ]
+        if self.file_name.endswith( '.tar' ):
+            self.file_name = os.path.splitext( self.file_name )[ 0 ]
+        self.type = self.file_type
+        method = 'open_%s' % self.file_type
+        if hasattr( self, method ):
+            self.archive = getattr( self, method )( file_path, mode )
+        else:
+            raise NameError( 'File type %s specified, no open method found.' % self.file_type )
+
+    def extract( self, path ):
+        '''Determine the path to which the archive should be extracted.'''
+        contents = self.getmembers()
+        extraction_path = path
+        common_prefix = ''
+        if len( contents ) == 1:
+            # The archive contains a single file, return the extraction path.
+            if self.isfile( contents[ 0 ] ):
+                extraction_path = os.path.join( path, self.file_name )
+                if not os.path.exists( extraction_path ):
+                    os.makedirs( extraction_path )
+                self.archive.extractall( extraction_path )
+        else:
+            # Get the common prefix for all the files in the archive. If the common prefix ends with a slash,
+            # or self.isdir() returns True, the archive contains a single directory with the desired contents.
+            # Otherwise, it contains multiple files and/or directories at the root of the archive.
+            common_prefix = os.path.commonprefix( [ self.getname( item ) for item in contents ] )
+            if len( common_prefix ) >= 1 and not common_prefix.endswith( os.sep ) and self.isdir( self.getmember( common_prefix ) ):
+                common_prefix += os.sep
+            if not common_prefix.endswith( os.sep ):
+                common_prefix = ''
+                extraction_path = os.path.join( path, self.file_name )
+                if not os.path.exists( extraction_path ):
+                    os.makedirs( extraction_path )
+            self.archive.extractall( extraction_path )
+        # Since .zip files store unix permissions separately, we need to iterate through the zip file
+        # and set permissions on extracted members.
+        if self.file_type == 'zip':
+            for zipped_file in contents:
+                filename = self.getname( zipped_file )
+                absolute_filepath = os.path.join( extraction_path, filename )
+                external_attributes = self.archive.getinfo( filename ).external_attr
+                # The 2 least significant bytes are irrelevant, the next two contain unix permissions.
+                unix_permissions = external_attributes >> 16
+                if unix_permissions != 0:
+                    if os.path.exists( absolute_filepath ):
+                        os.chmod( absolute_filepath, unix_permissions )
+                    else:
+                        log.warning("Unable to change permission on extracted file '%s' as it does not exist" % absolute_filepath)
+        return os.path.abspath( os.path.join( extraction_path, common_prefix ) )
+
+    def getmembers_tar( self ):
+        return self.archive.getmembers()
+
+    def getmembers_zip( self ):
+        return self.archive.infolist()
+
+    def getname_tar( self, item ):
+        return item.name
+
+    def getname_zip( self, item ):
+        return item.filename
+
+    def getmember( self, name ):
+        for member in self.getmembers():
+            if self.getname( member ) == name:
+                return member
+
+    def getmembers( self ):
+        return getattr( self, 'getmembers_%s' % self.type )()
+
+    def getname( self, member ):
+        return getattr( self, 'getname_%s' % self.type )( member )
+
+    def isdir( self, member ):
+        return getattr( self, 'isdir_%s' % self.type )( member )
+
+    def isdir_tar( self, member ):
+        return member.isdir()
+
+    def isdir_zip( self, member ):
+        if member.filename.endswith( os.sep ):
+            return True
+        return False
+
+    def isfile( self, member ):
+        if not self.isdir( member ):
+            return True
+        return False
+
+    def open_tar( self, filepath, mode ):
+        return tarfile.open( filepath, mode, errorlevel=0 )
+
+    def open_zip( self, filepath, mode ):
+        return zipfile.ZipFile( filepath, mode )
+
+    def zipfile_ok( self, path_to_archive ):
+        """
+        This function is a bit pedantic and not functionally necessary.  It checks whether there is
+        no file pointing outside of the extraction, because ZipFile.extractall() has some potential
+        security holes.  See python zipfile documentation for more details.
+        """
+        basename = os.path.realpath( os.path.dirname( path_to_archive ) )
+        zip_archive = zipfile.ZipFile( path_to_archive )
+        for member in zip_archive.namelist():
+            member_path = os.path.realpath( os.path.join( basename, member ) )
+            if not member_path.startswith( basename ):
+                return False
+        return True
+
+
+class Download( object ):
+
+    def url_download( self, install_dir, downloaded_file_name, download_url, extract=True, checksums={} ):
+        """
+        The given download_url can have an extension like #md5#, #sha256#, (or #md5= to support pypi defaults).
+
+            https://pypi.python.org/packages/source/k/khmer/khmer-1.0.tar.gz#md5#b60639a8b2939836f66495b9a88df757
+
+        Alternatively, to not break HTTP spec, you can specify md5 and
+        sha256 as keys in the <action /> element.
+
+        This indicates a checksum which will be checked after download.
+        If the checksum does not match an exception is thrown.
+        """
+        file_path = os.path.join( install_dir, downloaded_file_name )
+        try:
+            download_to_file( download_url, file_path, chunk_size=basic_util.CHUNK_SIZE)
+        except Exception as e:
+            err_msg = 'Error downloading from URL %s : %s' % ( str( download_url ), str( e ) )
+            raise Exception( err_msg )
+
+        if 'sha256sum' in checksums or '#sha256#' in download_url:
+            downloaded_checksum = hashlib.sha256(open(file_path, 'rb').read()).hexdigest().lower()
+
+            # Determine expected value
+            if 'sha256sum' in checksums:
+                expected = checksums['sha256sum'].lower()
+            else:
+                expected = download_url.split('#sha256#')[1].lower()
+
+            if downloaded_checksum != expected:
+                raise Exception( 'Given sha256 checksum does not match with the one from the downloaded file (%s != %s).' % (downloaded_checksum, expected) )
+
+        if 'md5sum' in checksums or '#md5#' in download_url or '#md5=' in download_url:
+            downloaded_checksum = hashlib.md5(open(file_path, 'rb').read()).hexdigest().lower()
+
+            # Determine expected value
+            if 'md5sum' in checksums:
+                expected = checksums['md5sum'].lower()
+            else:
+                expected = re.split('#md5[#=]', download_url)[1].lower()
+
+            if downloaded_checksum != expected:
+                raise Exception( 'Given md5 checksum does not match with the one from the downloaded file (%s != %s).' % (downloaded_checksum, expected) )
+
+        if extract:
+            if tarfile.is_tarfile( file_path ) or ( zipfile.is_zipfile( file_path ) and not file_path.endswith( '.jar' ) ):
+                archive = CompressedFile( file_path )
+                extraction_path = archive.extract( install_dir )
+            else:
+                extraction_path = os.path.abspath( install_dir )
+        else:
+            extraction_path = os.path.abspath( install_dir )
+
+        return extraction_path
+
+    def get_elem_checksums( self, elem ):
+        rval = {}
+        for hash_type in ('md5sum', 'sha256sum'):
+            if hash_type in elem.keys():
+                rval[hash_type] = elem.get(hash_type).lower()
+        return rval
+
+    def get_dict_checksums( self, dct ):
+        return dict(i for i in dct.items() if i[0] in ['md5sum', 'sha256sum'])
+
+
+class RecipeStep( object ):
+    """Abstract class that defines a standard format for handling recipe steps when installing packages."""
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        raise Exception( "Unimplemented Method")
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        raise Exception( "Unimplemented Method" )
+
+
+class AssertDirectoryExecutable( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'assert_directory_executable'
+
+    def assert_directory_executable( self, full_path ):
+        """
+        Return True if a symbolic link or directory exists and is executable, but if
+        full_path is a file, return False.
+        """
+        if full_path is None:
+            return False
+        if os.path.isfile( full_path ):
+            return False
+        if os.path.isdir( full_path ):
+            # Make sure the owner has execute permission on the directory.
+            # See https://docs.python.org/2/library/stat.html
+            if stat.S_IXUSR & os.stat( full_path )[ stat.ST_MODE ] == 64:
+                return True
+        return False
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Make sure a symbolic link or directory on disk exists and is executable, but is not a file.
+        Since this class is not used in the initial download stage, no recipe step filtering is
+        performed here, and None values are always returned for filtered_actions and dir.
+        """
+        if os.path.isabs( action_dict[ 'full_path' ] ):
+            full_path = action_dict[ 'full_path' ]
+        else:
+            full_path = os.path.join( current_dir, action_dict[ 'full_path' ] )
+        if not self.assert_directory_executable( full_path=full_path ):
+            status = self.app.install_model.ToolDependency.installation_status.ERROR
+            error_message = 'The path %s is not a directory or is not executable by the owner.' % str( full_path )
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                   tool_dependency,
+                                                                                   status=status,
+                                                                                   error_message=error_message )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="assert_executable">$INSTALL_DIR/mira/my_file</action>
+        if action_elem.text:
+            action_dict[ 'full_path' ] = basic_util.evaluate_template( action_elem.text, install_environment )
+        return action_dict
+
+
+class AssertDirectoryExists( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'assert_directory_exists'
+
+    def assert_directory_exists( self, full_path ):
+        """
+        Return True if a symbolic link or directory exists, but if full_path is a file,
+        return False.
+        """
+        if full_path is None:
+            return False
+        if os.path.isfile( full_path ):
+            return False
+        if os.path.isdir( full_path ):
+            return True
+        return False
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Make sure a a symbolic link or directory on disk exists, but is not a file.  Since this
+        class is not used in the initial download stage, no recipe step filtering is performed
+        here, and None values are always returned for filtered_actions and dir.
+        """
+        if os.path.isabs( action_dict[ 'full_path' ] ):
+            full_path = action_dict[ 'full_path' ]
+        else:
+            full_path = os.path.join( current_dir, action_dict[ 'full_path' ] )
+        if not self.assert_directory_exists( full_path=full_path ):
+            status = self.app.install_model.ToolDependency.installation_status.ERROR
+            error_message = 'The path %s is not a directory or does not exist.' % str( full_path )
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                   tool_dependency,
+                                                                                   status=status,
+                                                                                   error_message=error_message )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="make_directory">$INSTALL_DIR/mira</action>
+        if action_elem.text:
+            action_dict[ 'full_path' ] = basic_util.evaluate_template( action_elem.text, install_environment )
+        return action_dict
+
+
+class AssertFileExecutable( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'assert_file_executable'
+
+    def assert_file_executable( self, full_path ):
+        """
+        Return True if a symbolic link or file exists and is executable, but if full_path
+        is a directory, return False.
+        """
+        if full_path is None:
+            return False
+        if os.path.isdir( full_path ):
+            return False
+        if os.path.exists( full_path ):
+            # Make sure the owner has execute permission on the file.
+            # See https://docs.python.org/2/library/stat.html
+            if stat.S_IXUSR & os.stat( full_path )[ stat.ST_MODE ] == 64:
+                return True
+        return False
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Make sure a symbolic link or file on disk exists and is executable, but is not a directory.
+        Since this class is not used in the initial download stage, no recipe step filtering is
+        performed here, and None values are always returned for filtered_actions and dir.
+        """
+        if os.path.isabs( action_dict[ 'full_path' ] ):
+            full_path = action_dict[ 'full_path' ]
+        else:
+            full_path = os.path.join( current_dir, action_dict[ 'full_path' ] )
+        if not self.assert_file_executable( full_path=full_path ):
+            status = self.app.install_model.ToolDependency.installation_status.ERROR
+            error_message = 'The path %s is not a file or is not executable by the owner.' % str( full_path )
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                   tool_dependency,
+                                                                                   status=status,
+                                                                                   error_message=error_message )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="assert_executable">$INSTALL_DIR/mira/my_file</action>
+        if action_elem.text:
+            action_dict[ 'full_path' ] = basic_util.evaluate_template( action_elem.text, install_environment )
+        return action_dict
+
+
+class AssertFileExists( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'assert_file_exists'
+
+    def assert_file_exists( self, full_path ):
+        """
+        Return True if a symbolic link or file exists, but if full_path is a directory,
+        return False.
+        """
+        if full_path is None:
+            return False
+        if os.path.isdir( full_path ):
+            return False
+        if os.path.exists( full_path ):
+            return True
+        return False
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Make sure a symbolic link or file on disk exists, but is not a directory.  Since this
+        class is not used in the initial download stage, no recipe step filtering is performed
+        here, and None values are always returned for filtered_actions and dir.
+        """
+        if os.path.isabs( action_dict[ 'full_path' ] ):
+            full_path = action_dict[ 'full_path' ]
+        else:
+            full_path = os.path.join( current_dir, action_dict[ 'full_path' ] )
+        if not self.assert_file_exists( full_path=full_path ):
+            status = self.app.install_model.ToolDependency.installation_status.ERROR
+            error_message = 'The path %s is not a file or does not exist.' % str( full_path )
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                   tool_dependency,
+                                                                                   status=status,
+                                                                                   error_message=error_message )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="assert_on_path">$INSTALL_DIR/mira/my_file</action>
+        if action_elem.text:
+            action_dict[ 'full_path' ] = basic_util.evaluate_template( action_elem.text, install_environment )
+        return action_dict
+
+
+class Autoconf( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'autoconf'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Handle configure, make and make install in a shell, allowing for configuration options.  Since this
+        class is not used in the initial download stage, no recipe step filtering is performed here, and None
+        values are always returned for filtered_actions and dir.
+        """
+        with settings( warn_only=True ):
+            configure_opts = action_dict.get( 'configure_opts', '' )
+            if 'prefix=' in configure_opts:
+                pre_cmd = './configure %s && make && make install' % configure_opts
+            else:
+                pre_cmd = './configure --prefix=$INSTALL_DIR %s && make && make install' % configure_opts
+            cmd = install_environment.build_command( basic_util.evaluate_template( pre_cmd, install_environment ) )
+            install_environment.handle_command( tool_dependency=tool_dependency,
+                                                cmd=cmd,
+                                                return_output=False,
+                                                job_name=package_name )
+            # The caller should check the status of the returned tool_dependency since this function
+            # does nothing with the return_code.
+            return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # Handle configure, make and make install allow providing configuration options
+        if action_elem.text:
+            configure_opts = basic_util.evaluate_template( action_elem.text, install_environment )
+            action_dict[ 'configure_opts' ] = configure_opts
+        return action_dict
+
+
+class ChangeDirectory( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'change_directory'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Change the working directory in a shell.  Since this class is not used in the initial download stage,
+        no recipe step filtering is performed here and a None value is return for filtered_actions.  However,
+        the new dir value is returned since it is needed for later steps.
+        """
+
+        def dir_valid( test_dir ):
+            """
+            Make sure the defined directory is within current_dir or work_dir.
+            """
+            for valid_dir in [ os.path.realpath( current_dir ), os.path.realpath( work_dir ) ]:
+                if test_dir.startswith( valid_dir ) and os.path.exists( test_dir ):
+                    return True
+            return False
+
+        target_dir = os.path.realpath( os.path.normpath( action_dict[ 'directory' ] ) )
+        if dir_valid( target_dir ):
+            # We have a directory that includes a path.
+            dir = target_dir
+        else:
+            target_dir = os.path.realpath( os.path.normpath( os.path.join( current_dir, action_dict[ 'directory' ] ) ) )
+            if dir_valid( target_dir ):
+                dir = target_dir
+            else:
+                log.debug( 'Invalid or nonexistent directory %s specified, ignoring change_directory action.', str( action_dict[ 'directory' ] )  )
+                dir = current_dir
+        return tool_dependency, None, dir
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="change_directory">PHYLIP-3.6b</action>
+        if action_elem.text:
+            action_dict[ 'directory' ] = basic_util.evaluate_template( action_elem.text, install_environment )
+        return action_dict
+
+
+class Chmod( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'chmod'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Change the mode setting for certain files in the installation environment.  Since this class is not
+        used in the initial download stage, no recipe step filtering is performed here, and None values are
+        always returned for filtered_actions and dir.
+        """
+        for target_file, mode in action_dict[ 'change_modes' ]:
+            if os.path.exists( target_file ):
+                os.chmod( target_file, mode )
+            else:
+                log.debug( 'Invalid file %s specified, ignoring %s action.', target_file, self.type )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # Change the read, write, and execute bits on a file.
+        # <action type="chmod">
+        #   <file mode="750">$INSTALL_DIR/bin/faToTwoBit</file>
+        # </action>
+        file_elems = action_elem.findall( 'file' )
+        chmod_actions = []
+        # A unix octal mode is the sum of the following values:
+        # Owner:
+        # 400 Read    200 Write    100 Execute
+        # Group:
+        # 040 Read    020 Write    010 Execute
+        # World:
+        # 004 Read    002 Write    001 Execute
+        for file_elem in file_elems:
+            # So by the above table, owner read/write/execute and group read permission would be 740.
+            # Python's os.chmod uses base 10 modes, convert received unix-style octal modes to base 10.
+            received_mode = int( file_elem.get( 'mode', 600 ), base=8 )
+            # For added security, ensure that the setuid and setgid bits are not set.
+            mode = received_mode & ~( stat.S_ISUID | stat.S_ISGID )
+            file = basic_util.evaluate_template( file_elem.text, install_environment )
+            chmod_tuple = ( file, mode )
+            chmod_actions.append( chmod_tuple )
+        if chmod_actions:
+            action_dict[ 'change_modes' ] = chmod_actions
+        return action_dict
+
+
+class DownloadBinary( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'download_binary'
+
+    def download_binary( self, url, work_dir, checksums ):
+        """Download a pre-compiled binary from the specified URL."""
+        downloaded_filename = os.path.split( url )[ -1 ]
+        self.url_download( work_dir, downloaded_filename, url, extract=False, checksums=checksums )
+        return downloaded_filename
+
+    def filter_actions_after_binary_installation( self, actions ):
+        '''Filter out actions that should not be processed if a binary download succeeded.'''
+        filtered_actions = []
+        for action in actions:
+            action_type, action_dict = action
+            if action_type in [ 'set_environment', 'chmod', 'download_binary' ]:
+                filtered_actions.append( action )
+        return filtered_actions
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Download a binary file.  If the value of initial_download is True, the recipe steps will be
+        filtered and returned and the installation directory (i.e., dir) will be defined and returned.
+        If we're not in the initial download stage, these actions will not occur, and None values will
+        be returned for them.
+        """
+        url = action_dict[ 'url' ]
+        # Get the target directory for this download if the user has specified one. Default to the root of $INSTALL_DIR.
+        target_directory = action_dict.get( 'target_directory', None )
+        # Attempt to download a binary from the specified URL.
+        downloaded_filename = None
+        try:
+            checksums = self.get_dict_checksums( action_dict )
+            log.debug( 'Attempting to download from %s to %s', url, str( target_directory ) )
+            downloaded_filename = self.download_binary( url, work_dir, checksums )
+            if initial_download:
+                # Filter out any actions that are not download_binary, chmod, or set_environment.
+                filtered_actions = self.filter_actions_after_binary_installation( actions[ 1: ] )
+                # Set actions to the same, so that the current download_binary doesn't get re-run in the
+                # next stage.  TODO: this may no longer be necessary...
+                actions = [ item for item in filtered_actions ]
+        except Exception as e:
+            log.exception( str( e ) )
+            if initial_download:
+                # No binary exists, or there was an error downloading the binary from the generated URL.
+                # Filter the actions so that stage 2 can proceed with the remaining actions.
+                filtered_actions = actions[ 1: ]
+                action_type, action_dict = filtered_actions[ 0 ]
+        # If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that
+        # any errors in the move step are correctly sent to the tool dependency error handler.
+        if downloaded_filename and os.path.exists( os.path.join( work_dir, downloaded_filename ) ):
+            if target_directory:
+                target_directory = os.path.realpath( os.path.normpath( os.path.join( install_environment.install_dir,
+                                                                                     target_directory ) ) )
+                # Make sure the target directory is not outside of $INSTALL_DIR.
+                if target_directory.startswith( os.path.realpath( install_environment.install_dir ) ):
+                    full_path_to_dir = os.path.abspath( os.path.join( install_environment.install_dir, target_directory ) )
+                else:
+                    full_path_to_dir = os.path.abspath( install_environment.install_dir )
+            else:
+                full_path_to_dir = os.path.abspath( install_environment.install_dir )
+            basic_util.move_file( current_dir=work_dir,
+                                  source=downloaded_filename,
+                                  destination=full_path_to_dir )
+        # Not sure why dir is ignored in this method, need to investigate...
+        dir = None
+        if initial_download:
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        platform_info_dict = tool_dependency_util.get_platform_info_dict()
+        platform_info_dict[ 'name' ] = str( tool_dependency.name )
+        platform_info_dict[ 'version' ] = str( tool_dependency.version )
+        url_template_elems = action_elem.findall( 'url_template' )
+        # Check if there are multiple url_template elements, each with attrib entries for a specific platform.
+        if len( url_template_elems ) > 1:
+            # <base_url os="darwin" extract="false">
+            #     http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit
+            # </base_url>
+            # This method returns the url_elem that best matches the current platform as received from os.uname().
+            # Currently checked attributes are os and architecture.  These correspond to the values sysname and
+            # processor from the Python documentation for os.uname().
+            url_template_elem = tool_dependency_util.get_download_url_for_platform( url_template_elems, platform_info_dict )
+        else:
+            url_template_elem = url_template_elems[ 0 ]
+        action_dict[ 'url' ] = Template( url_template_elem.text.strip() ).safe_substitute( platform_info_dict )
+        action_dict[ 'target_directory' ] = action_elem.get( 'target_directory', None )
+        action_dict.update( self.get_elem_checksums( action_elem ) )
+        return action_dict
+
+
+class DownloadByUrl( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'download_by_url'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Download a file via HTTP.  If the value of initial_download is True, the recipe steps will be
+        filtered and returned and the installation directory (i.e., dir) will be defined and returned.
+        If we're not in the initial download stage, these actions will not occur, and None values will
+        be returned for them.
+        """
+        if initial_download:
+            # Eliminate the download_by_url action so remaining actions can be processed correctly.
+            filtered_actions = actions[ 1: ]
+        url = action_dict[ 'url' ]
+        is_binary = action_dict.get( 'is_binary', False )
+        if 'target_filename' in action_dict:
+            # Sometimes compressed archives extract their content to a folder other than the default
+            # defined file name.  Using this attribute will ensure that the file name is set appropriately
+            # and can be located after download, decompression and extraction.
+            downloaded_filename = action_dict[ 'target_filename' ]
+        else:
+            downloaded_filename = os.path.split( url )[ -1 ]
+
+        checksums = self.get_dict_checksums( action_dict )
+        log.debug( 'Attempting to download via url: %s', url )
+        dir = self.url_download( work_dir, downloaded_filename, url, extract=True, checksums=checksums )
+        if is_binary:
+            log_file = os.path.join( install_environment.install_dir, basic_util.INSTALLATION_LOG )
+            if os.path.exists( log_file ):
+                logfile = open( log_file, 'ab' )
+            else:
+                logfile = open( log_file, 'wb' )
+            logfile.write( 'Successfully downloaded from url: %s\n' % action_dict[ 'url' ] )
+            logfile.close()
+        log.debug( 'Successfully downloaded from url: %s' % action_dict[ 'url' ] )
+        if initial_download:
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="download_by_url">
+        #     http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2
+        # </action>
+        #
+        # <action type="download_by_url" md5sum="71dab132e21c0766f0de84c2371a9157" sha256sum="f3faaf34430d4782956562eb72906289e8e34d44d0c4d73837bdbeead7746b16">
+        #     http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2
+        # </action>
+        if is_binary_download:
+            action_dict[ 'is_binary' ] = True
+        if action_elem.text:
+            action_dict[ 'url' ] = action_elem.text.strip()
+            target_filename = action_elem.get( 'target_filename', None )
+            if target_filename:
+                action_dict[ 'target_filename' ] = target_filename
+        action_dict.update( self.get_elem_checksums( action_elem ) )
+        return action_dict
+
+
+class DownloadFile( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'download_file'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Download a file.  If the value of initial_download is True, the recipe steps will be
+        filtered and returned and the installation directory (i.e., dir) will be defined and returned.
+        If we're not in the initial download stage, these actions will not occur, and None values will
+        be returned for them.
+        """
+        # <action type="download_file">http://effectors.org/download/version/TTSS_GUI-1.0.1.jar</action>
+        # Download a single file to the working directory.
+        if initial_download:
+            filtered_actions = actions[ 1: ]
+        url = action_dict[ 'url' ]
+        if 'target_filename' in action_dict:
+            # Sometimes compressed archives extracts their content to a folder other than the default
+            # defined file name.  Using this attribute will ensure that the file name is set appropriately
+            # and can be located after download, decompression and extraction.
+            filename = action_dict[ 'target_filename' ]
+        else:
+            filename = url.split( '/' )[ -1 ]
+        if current_dir is not None:
+            work_dir = current_dir
+        checksums = self.get_dict_checksums( action_dict )
+        log.debug( 'Attempting to download via url: %s', url )
+        self.url_download( work_dir, filename, url, extract=action_dict[ 'extract' ], checksums=checksums )
+        if initial_download:
+            dir = os.path.curdir
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="download_file">http://effectors.org/download/version/TTSS_GUI-1.0.1.jar</action>
+        if action_elem.text:
+            action_dict[ 'url' ] = action_elem.text.strip()
+            target_filename = action_elem.get( 'target_filename', None )
+            if target_filename:
+                action_dict[ 'target_filename' ] = target_filename
+            action_dict[ 'extract' ] = asbool( action_elem.get( 'extract', False ) )
+        action_dict.update( self.get_elem_checksums( action_elem ) )
+        return action_dict
+
+
+class MakeDirectory( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'make_directory'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Make a directory on disk.  Since this class is not used in the initial download stage, no recipe step
+        filtering is performed here, and None values are always returned for filtered_actions and dir.
+        """
+        if os.path.isabs( action_dict[ 'full_path' ] ):
+            full_path = action_dict[ 'full_path' ]
+        else:
+            full_path = os.path.join( current_dir, action_dict[ 'full_path' ] )
+        self.make_directory( full_path=full_path )
+        return tool_dependency, None, None
+
+    def make_directory( self, full_path ):
+        if not os.path.exists( full_path ):
+            os.makedirs( full_path )
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="make_directory">$INSTALL_DIR/lib/python</action>
+        if action_elem.text:
+            action_dict[ 'full_path' ] = basic_util.evaluate_template( action_elem.text, install_environment )
+        return action_dict
+
+
+class MakeInstall( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'make_install'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Execute a make_install command in a shell.  Since this class is not used in the initial download stage,
+        no recipe step filtering is performed here, and None values are always returned for filtered_actions and dir.
+        """
+        # make; make install; allow providing make options
+        with settings( warn_only=True ):
+            make_opts = action_dict.get( 'make_opts', '' )
+            cmd = install_environment.build_command( 'make %s && make install' % make_opts )
+            install_environment.handle_command( tool_dependency=tool_dependency,
+                                                cmd=cmd,
+                                                return_output=False,
+                                                job_name=package_name )
+            # The caller should check the status of the returned tool_dependency since this function
+            # does nothing with the return_code.
+            return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # make; make install; allow providing make options
+        if action_elem.text:
+            make_opts = basic_util.evaluate_template( action_elem.text, install_environment )
+            action_dict[ 'make_opts' ] = make_opts
+        return action_dict
+
+
+class MoveDirectoryFiles( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'move_directory_files'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Move a directory of files.  Since this class is not used in the initial download stage, no recipe step
+        filtering is performed here, and None values are always returned for filtered_actions and dir.
+        """
+        self.move_directory_files( current_dir=current_dir,
+                                   source_dir=os.path.join( action_dict[ 'source_directory' ] ),
+                                   destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
+        return tool_dependency, None, None
+
+    def move_directory_files( self, current_dir, source_dir, destination_dir ):
+        source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) )
+        destination_directory = os.path.abspath(os.path.join(destination_dir))
+        if not os.path.isdir(destination_directory):
+            os.makedirs(destination_directory)
+        for dir_entry in os.listdir(source_directory):
+            source_entry = os.path.join(source_directory, dir_entry)
+            if os.path.islink(source_entry):
+                destination_entry = os.path.join(destination_directory, dir_entry)
+                os.symlink(os.readlink(source_entry), destination_entry)
+                os.remove(source_entry)
+            else:
+                shutil.move(source_entry, destination_directory)
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="move_directory_files">
+        #     <source_directory>bin</source_directory>
+        #     <destination_directory>$INSTALL_DIR/bin</destination_directory>
+        # </action>
+        for move_elem in action_elem:
+            move_elem_text = basic_util.evaluate_template( move_elem.text, install_environment )
+            if move_elem_text:
+                action_dict[ move_elem.tag ] = move_elem_text
+        return action_dict
+
+
+class MoveFile( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'move_file'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Move a file on disk.  Since this class is not used in the initial download stage, no recipe step
+        filtering is performed here, and None values are always returned for filtered_actions and dir.
+        """
+        basic_util.move_file( current_dir=current_dir,
+                              source=os.path.join( action_dict[ 'source' ] ),
+                              destination=os.path.join( action_dict[ 'destination' ] ),
+                              rename_to=action_dict[ 'rename_to' ] )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="move_file" rename_to="new_file_name">
+        #     <source>misc/some_file</source>
+        #     <destination>$INSTALL_DIR/bin</destination>
+        # </action>
+        action_dict[ 'source' ] = basic_util.evaluate_template( action_elem.find( 'source' ).text, install_environment )
+        action_dict[ 'destination' ] = basic_util.evaluate_template( action_elem.find( 'destination' ).text, install_environment )
+        action_dict[ 'rename_to' ] = action_elem.get( 'rename_to' )
+        return action_dict
+
+
+class RegexReplace( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'regex_replace'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Search and replace text in a file using regular expressions. Since this class is not used in the initial
+        download stage, no recipe step filtering is performed here, and None values are always returned for
+        filtered_actions and dir.
+
+        This step supports the full range of python's regular expression engine, including backreferences in
+        the replacement text. Example::
+
+            <action type="regex_replace" filename="Makefile">
+                <regex>^CFLAGS(\s*)=\s*-g\s*-Wall\s*-O2\s*$$</regex>
+                <replacement>CFLAGS\1= -g -Wall -O2 -I$$(NCURSES_INCLUDE_PATH)/ncurses/ -I$$(NCURSES_INCLUDE_PATH) -L$$(NCURSES_LIB_PATH)</replacement>
+            </action>
+
+        Before::
+
+            CFLAGS  = -g -Wall -O2
+
+        After::
+
+            CFLAGS  = -g -Wall -O2 -I$(NCURSES_INCLUDE_PATH)/ncurses/ -I$(NCURSES_INCLUDE_PATH) -L$(NCURSES_LIB_PATH)
+        """
+        log_file = os.path.join( install_environment.install_dir, basic_util.INSTALLATION_LOG )
+        if os.path.exists( log_file ):
+            logfile = open( log_file, 'ab' )
+        else:
+            logfile = open( log_file, 'wb' )
+        if os.path.isabs( action_dict[ 'filename' ] ):
+            filename = action_dict[ 'filename' ]
+            if not ( filename.startswith( current_dir ) or filename.startswith( install_environment.install_dir ) ):
+                return tool_dependency, None, None
+        else:
+            filename = os.path.abspath( os.path.join( current_dir, action_dict[ 'filename' ] ) )
+        regex = re.compile( action_dict[ 'regex' ] )
+        replacement = action_dict[ 'replacement' ]
+        temp_fh = tempfile.NamedTemporaryFile( dir=current_dir )
+        ofh = temp_fh.file
+        total_replacements = 0
+        with open( filename, 'r' ) as haystack:
+            for line in haystack:
+                altered_text, replacement_count = re.subn( regex, replacement, line )
+                if replacement_count > 0:
+                    ofh.write( altered_text )
+                    total_replacements += replacement_count
+                else:
+                    ofh.write( line )
+            ofh.flush()
+        shutil.copyfile( temp_fh.name, filename )
+        log_text = 'Successfully replaced pattern %s with text %s in file %s: %s replacements made\n'
+        log_text = log_text % ( action_dict[ 'regex' ], action_dict[ 'replacement' ], filename, total_replacements )
+        log.debug( log_text )
+        logfile.write( log_text )
+        logfile.close()
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        '''
+        Populate action_dict with the provided filename, regex, and replacement text.
+        '''
+        action_dict[ 'filename' ] = basic_util.evaluate_template( action_elem.get( 'filename' ), install_environment )
+        action_dict[ 'regex' ] = basic_util.evaluate_template( action_elem.find( 'regex' ).text, install_environment )
+        action_dict[ 'replacement' ] = basic_util.evaluate_template( action_elem.find( 'replacement' ).text, install_environment )
+        return action_dict
+
+
+class SetEnvironment( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'set_environment'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Configure an install environment.  Since this class is not used in the initial download stage,
+        no recipe step filtering is performed here, and None values are always returned for filtered_actions
+        and dir.
+        """
+        # Currently the only action supported in this category is "environment_variable".
+        cmds = install_environment.environment_commands( 'set_environment' )
+        env_var_dicts = action_dict.get( 'environment_variable', [] )
+        root_dir_dict = dict( action='set_to',
+                              name='%s_ROOT_DIR' % re.sub( r"[^A-Z0-9_]", "_", tool_dependency.name.upper() ),
+                              value=install_environment.install_dir )
+        env_var_dicts.append( root_dir_dict )
+        for env_var_dict in env_var_dicts:
+            # Check for the presence of the $ENV[] key string and populate it if possible.
+            env_var_dict = self.handle_environment_variables( install_environment=install_environment,
+                                                              tool_dependency=tool_dependency,
+                                                              env_var_dict=env_var_dict,
+                                                              set_prior_environment_commands=cmds )
+            env_file_builder.append_line( **env_var_dict )
+        # The caller should check the status of the returned tool_dependency since return_code is not
+        # returned by this function.
+        return tool_dependency, None, None
+
+    def handle_environment_variables( self, install_environment, tool_dependency, env_var_dict,
+                                      set_prior_environment_commands ):
+        """
+        This method works with a combination of three tool dependency definition tag sets, which are defined in
+        the tool_dependencies.xml file in the order discussed here.  The example for this discussion is the
+        tool_dependencies.xml file contained in the osra repository, which is available at:
+
+        https://testtoolshed.g2.bx.psu.edu/view/bgruening/osra
+
+        The first tag set defines a complex repository dependency like this.  This tag set ensures that changeset
+        revision XXX of the repository named package_graphicsmagick_1_3 owned by YYY in the tool shed ZZZ has been
+        previously installed::
+
+            <tool_dependency>
+                <package name="graphicsmagick" version="1.3.18">
+                    <repository changeset_revision="XXX" name="package_graphicsmagick_1_3" owner="YYY" prior_installation_required="True" toolshed="ZZZ" />
+                </package>
+                ...
+
+        By the way, there is an env.sh file associated with version 1.3.18 of the graphicsmagick package which looks
+        something like this (we'll reference this file later in this discussion)::
+
+            GRAPHICSMAGICK_ROOT_DIR=/<my configured tool dependency path>/graphicsmagick/1.3.18/YYY/package_graphicsmagick_1_3/XXX/gmagick;
+            export GRAPHICSMAGICK_ROOT_DIR
+
+        The second tag set defines a specific package dependency that has been previously installed (guaranteed by the
+        tag set discussed above) and compiled, where the compiled dependency is needed by the tool dependency currently
+        being installed (osra version 2.0.0 in this case) and complied in order for its installation and compilation to
+        succeed.  This tag set is contained within the <package name="osra" version="2.0.0"> tag set, which implies that
+        version 2.0.0 of the osra package requires version 1.3.18 of the graphicsmagick package in order to successfully
+        compile.  When this tag set is handled, one of the effects is that the env.sh file associated with graphicsmagick
+        version 1.3.18 is "sourced", which undoubtedly sets or alters certain environment variables (e.g. PATH, PYTHONPATH,
+        etc)::
+
+            <!-- populate the environment variables from the dependent repositories -->
+            <action type="set_environment_for_install">
+                <repository changeset_revision="XXX" name="package_graphicsmagick_1_3" owner="YYY" toolshed="ZZZ">
+                    <package name="graphicsmagick" version="1.3.18" />
+                </repository>
+            </action>
+
+        The third tag set enables discovery of the same required package dependency discussed above for correctly compiling
+        the osra version 2.0.0 package, but in this case the package can be discovered at tool execution time.  Using the
+        $ENV[] option as shown in this example, the value of the environment variable named GRAPHICSMAGICK_ROOT_DIR (which
+        was set in the environment using the second tag set described above) will be used to automatically alter the env.sh
+        file associated with the osra version 2.0.0 tool dependency when it is installed into Galaxy.  * Refer to where we
+        discussed the env.sh file for version 1.3.18 of the graphicsmagick package above::
+
+            <action type="set_environment">
+                <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">$ENV[GRAPHICSMAGICK_ROOT_DIR]/lib/</environment_variable>
+                <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">$INSTALL_DIR/potrace/build/lib/</environment_variable>
+                <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/bin</environment_variable>
+                <!-- OSRA_DATA_FILES is only used by the galaxy wrapper and is not part of OSRA -->
+                <environment_variable action="set_to" name="OSRA_DATA_FILES">$INSTALL_DIR/share</environment_variable>
+            </action>
+
+        The above tag will produce an env.sh file for version 2.0.0 of the osra package when it it installed into Galaxy
+        that looks something like this.  Notice that the path to the gmagick binary is included here since it expands the
+        defined $ENV[GRAPHICSMAGICK_ROOT_DIR] value in the above tag set::
+
+            LD_LIBRARY_PATH=/<my configured tool dependency path>/graphicsmagick/1.3.18/YYY/package_graphicsmagick_1_3/XXX/gmagick/lib/:$LD_LIBRARY_PATH;
+            export LD_LIBRARY_PATH
+            LD_LIBRARY_PATH=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/potrace/build/lib/:$LD_LIBRARY_PATH;
+            export LD_LIBRARY_PATH
+            PATH=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/bin:$PATH;
+            export PATH
+            OSRA_DATA_FILES=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/share;
+            export OSRA_DATA_FILES
+        """
+        env_var_value = env_var_dict[ 'value' ]
+        # env_var_value is the text of an environment variable tag like this:
+        # <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">
+        # Here is an example of what env_var_value could look like: $ENV[GRAPHICSMAGICK_ROOT_DIR]/lib/
+        if '$ENV[' in env_var_value and ']' in env_var_value:
+            # Pull out the name of the environment variable to populate.
+            inherited_env_var_name = env_var_value.split( '[' )[1].split( ']' )[0]
+            to_replace = '$ENV[%s]' % inherited_env_var_name
+            found = False
+            for env_cmd in set_prior_environment_commands:
+                # LD_LIBRARY_PATH=/<my configured tool dependency path>/<some path>; export LD_LIBRARY_PATH
+                if env_cmd.startswith( inherited_env_var_name ):
+                    env_val = env_cmd.split( '=' )[1]
+                    # /<my configured tool dependency path>/<some path>; export LD_LIBRARY_PATH
+                    env_val = env_val.split( ';' )[0]
+                    # /<my configured tool dependency path>/<some path>
+                    log.info( 'Replacing %s with %s in env.sh for this repository.', to_replace, env_val )
+                    env_var_value = env_var_value.replace( to_replace, env_val )
+                    found = True
+                    break
+            if not found:
+                # Replace the original $ENV[] with nothing, to avoid any shell misparsings later on.
+                log.debug( 'Environment variable %s not found, removing from set_environment.', inherited_env_var_name )
+                env_var_value = env_var_value.replace( to_replace, '$%s' % inherited_env_var_name )
+            env_var_dict[ 'value' ] = env_var_value
+        return env_var_dict
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # This function is only called for set environment actions as defined above, not within a <set_environment> tool
+        # dependency type. Here is an example of the tag set this function does handle:
+        # <action type="set_environment">
+        #     <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
+        #     <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+        # </action>
+        # Here is an example of the tag set this function does not handle:
+        # <action type="set_environment">
+        #     <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR</environment_variable>
+        # </action>
+        env_manager = EnvManager( self.app )
+        env_var_dicts = []
+        for env_elem in action_elem:
+            if env_elem.tag == 'environment_variable':
+                env_var_dict = env_manager.create_env_var_dict( elem=env_elem,
+                                                                install_environment=install_environment )
+                if env_var_dict:
+                    env_var_dicts.append( env_var_dict )
+        if env_var_dicts:
+            # The last child of an <action type="set_environment"> might be a comment, so manually set it to be 'environment_variable'.
+            action_dict[ 'environment_variable' ] = env_var_dicts
+        return action_dict
+
+
+class SetEnvironmentForInstall( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'set_environment_for_install'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Configure an environment for compiling a package.  Since this class is not used in the initial
+        download stage, no recipe step filtering is performed here, and None values are always returned
+        for filtered_actions and dir.
+        """
+        # Currently the only action supported in this category is a list of paths to one or more tool
+        # dependency env.sh files, the environment setting in each of which will be injected into the
+        # environment for all <action type="shell_command"> tags that follow this
+        # <action type="set_environment_for_install"> tag set in the tool_dependencies.xml file.
+        env_shell_file_paths = action_dict.get( 'env_shell_file_paths', [] )
+        install_environment.add_env_shell_file_paths( env_shell_file_paths )
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="set_environment_for_install">
+        #    <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056">
+        #        <package name="numpy" version="1.7.1" />
+        #    </repository>
+        # </action>
+        # This action type allows for defining an environment that will properly compile a tool dependency.
+        # Currently, tag set definitions like that above are supported, but in the future other approaches
+        # to setting environment variables or other environment attributes can be supported.  The above tag
+        # set will result in the installed and compiled numpy version 1.7.1 binary to be used when compiling
+        # the current tool dependency package.  See the package_matplotlib_1_2 repository in the test tool
+        # shed for a real-world example.
+        all_env_shell_file_paths = []
+        env_manager = EnvManager( self.app )
+        for env_elem in action_elem:
+            if env_elem.tag == 'repository':
+                env_shell_file_paths = env_manager.get_env_shell_file_paths( env_elem )
+                if env_shell_file_paths:
+                    all_env_shell_file_paths.extend( env_shell_file_paths )
+        action_dict[ 'env_shell_file_paths' ] = all_env_shell_file_paths
+        return action_dict
+
+
+class SetupPerlEnvironment( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'setup_perl_environment'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Initialize the environment for installing Perl packages.  The class is called during the initial
+        download stage when installing packages, so the value of initial_download will generally be True.
+        However, the parameter value allows this class to also be used in the second stage of the installation,
+        although it may never be necessary.  If initial_download is True, the recipe steps will be filtered
+        and returned and the installation directory (i.e., dir) will be defined and returned.  If we're not
+        in the initial download stage, these actions will not occur, and None values will be returned for them.
+        """
+        # <action type="setup_perl_environment">
+        #       <repository name="package_perl_5_18" owner="bgruening">
+        #           <package name="perl" version="5.18.1" />
+        #       </repository>
+        #       <!-- allow downloading and installing an Perl package from cpan.org-->
+        #       <package>XML::Parser</package>
+        #       <package sha256sum="da8a88112bff0224bd17b74eb28f605f96db6481ed5c8c00ca7e851522deee2b">http://search.cpan.org/CPAN/authors/id/C/CJ/CJFIELDS/BioPerl-1.6.922.tar.gz</package>
+        # </action>
+        dir = None
+        if initial_download:
+            filtered_actions = actions[ 1: ]
+        env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
+        if env_shell_file_paths is None:
+            log.debug( 'Missing Perl environment, make sure your specified Perl installation exists.' )
+            if initial_download:
+                return tool_dependency, filtered_actions, dir
+            return tool_dependency, None, None
+        else:
+            install_environment.add_env_shell_file_paths( env_shell_file_paths )
+        log.debug( 'Handling setup_perl_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' %
+                   ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
+        dir = os.path.curdir
+        current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+        with lcd( current_dir ):
+            with settings( warn_only=True ):
+                perl_packages = action_dict.get( 'perl_packages', [] )
+                for perl_package_dict in perl_packages:
+                    perl_package = perl_package_dict['package']
+                    # If set to a true value then MakeMaker's prompt function will always
+                    # return the default without waiting for user input.
+                    cmd = '''PERL_MM_USE_DEFAULT=1; export PERL_MM_USE_DEFAULT; '''
+                    cmd += 'HOME=%s; export HOME; ' % work_dir
+                    cmd += 'export PERL5LIB=$INSTALL_DIR/lib/perl5:$PERL5LIB;'
+                    cmd += 'export PATH=$INSTALL_DIR/bin:$PATH;'
+                    if perl_package.find( '://' ) != -1:
+                        # We assume a URL to a gem file.
+                        url = perl_package
+                        perl_package_name = url.split( '/' )[ -1 ]
+                        checksums = perl_package_dict.get('checksums', {})
+                        dir = self.url_download( work_dir, perl_package_name, url, extract=True, checksums=checksums )
+                        # Search for Build.PL or Makefile.PL (ExtUtils::MakeMaker vs. Module::Build).
+                        tmp_work_dir = os.path.join( work_dir, dir )
+                        if os.path.exists( os.path.join( tmp_work_dir, 'Makefile.PL' ) ):
+                            cmd += '''perl Makefile.PL INSTALL_BASE=$INSTALL_DIR && make && make install'''
+                        elif os.path.exists( os.path.join( tmp_work_dir, 'Build.PL' ) ):
+                            cmd += '''perl Build.PL --install_base $INSTALL_DIR && perl Build && perl Build install'''
+                        else:
+                            log.debug( 'No Makefile.PL or Build.PL file found in %s. Skipping installation of %s.' %
+                                ( url, perl_package_name ) )
+                            if initial_download:
+                                return tool_dependency, filtered_actions, dir
+                            return tool_dependency, None, None
+                        with lcd( tmp_work_dir ):
+                            cmd = install_environment.build_command( basic_util.evaluate_template( cmd, install_environment ) )
+                            return_code = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                                              cmd=cmd,
+                                                                              return_output=False,
+                                                                              job_name=package_name)
+                            if return_code:
+                                if initial_download:
+                                    return tool_dependency, filtered_actions, dir
+                                return tool_dependency, None, None
+                    else:
+                        # perl package from CPAN without version number.
+                        # cpanm should be installed with the parent perl distribution, otherwise this will not work.
+                        cmd += '''cpanm --local-lib=$INSTALL_DIR %s''' % ( perl_package )
+                        cmd = install_environment.build_command( basic_util.evaluate_template( cmd, install_environment ) )
+                        return_code = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                                          cmd=cmd,
+                                                                          return_output=False,
+                                                                          job_name=package_name )
+                        if return_code:
+                            if initial_download:
+                                return tool_dependency, filtered_actions, dir
+                            return tool_dependency, None, None
+                # Pull in perl dependencies (runtime).
+                env_file_builder.handle_action_shell_file_paths( action_dict )
+                # Recursively add dependent PERL5LIB and PATH to env.sh & anything else needed.
+                env_file_builder.append_line( name="PERL5LIB",
+                                              action="prepend_to",
+                                              value=os.path.join( install_environment.install_dir, 'lib', 'perl5' ) )
+                env_file_builder.append_line( name="PATH",
+                                              action="prepend_to",
+                                              value=os.path.join( install_environment.install_dir, 'bin' ) )
+                return_code = env_file_builder.return_code
+                if return_code:
+                    if initial_download:
+                        return tool_dependency, filtered_actions, dir
+                    return tool_dependency, None, None
+        if initial_download:
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # setup a Perl environment.
+        # <action type="setup_perl_environment">
+        #       <repository name="package_perl_5_18" owner="bgruening">
+        #           <package name="perl" version="5.18.1" />
+        #       </repository>
+        #       <!-- allow downloading and installing an Perl package from cpan.org-->
+        #       <package>XML::Parser</package>
+        #       <package>http://search.cpan.org/CPAN/authors/id/C/CJ/CJFIELDS/BioPerl-1.6.922.tar.gz</package>
+        # </action>
+        # Discover all child repository dependency tags and define the path to an env.sh file associated
+        # with each repository.  This will potentially update the value of the 'env_shell_file_paths' entry
+        # in action_dict.
+        all_env_shell_file_paths = []
+        env_manager = EnvManager( self.app )
+        action_dict = env_manager.get_env_shell_file_paths_from_setup_environment_elem( all_env_shell_file_paths,
+                                                                                        action_elem,
+                                                                                        action_dict )
+        perl_packages = []
+        for env_elem in action_elem:
+            if env_elem.tag == 'package':
+                # A valid package definition can be:
+                #    XML::Parser
+                #     http://search.cpan.org/CPAN/authors/id/C/CJ/CJFIELDS/BioPerl-1.6.922.tar.gz
+                # Unfortunately CPAN does not support versioning, so if you want real reproducibility you need to specify
+                # the tarball path and the right order of different tarballs manually.
+                perl_packages.append( dict( package=env_elem.text.strip(),
+                                            checksums=self.get_elem_checksums( env_elem ) ) )
+        if perl_packages:
+            action_dict[ 'perl_packages' ] = perl_packages
+        return action_dict
+
+
+class SetupREnvironment( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'setup_r_environment'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Initialize the environment for installing R packages.  The class is called during the initial
+        download stage when installing packages, so the value of initial_download will generally be True.
+        However, the parameter value allows this class to also be used in the second stage of the installation,
+        although it may never be necessary.  If initial_download is True, the recipe steps will be filtered
+        and returned and the installation directory (i.e., dir) will be defined and returned.  If we're not
+        in the initial download stage, these actions will not occur, and None values will be returned for them.
+        """
+        # <action type="setup_r_environment">
+        #       <repository name="package_r_3_0_1" owner="bgruening">
+        #           <package name="R" version="3.0.1" />
+        #       </repository>
+        #       <!-- allow installing one or more R packages -->
+        #       <package sha256sum="7056b06041fd96ebea9c74f445906f1a5cd784b2b1573c02fcaee86a40f3034d">https://github.com/bgruening/download_store/raw/master/DESeq2-1_0_18/BiocGenerics_0.6.0.tar.gz</package>
+        # </action>
+        dir = None
+        if initial_download:
+            filtered_actions = actions[ 1: ]
+        env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
+        if env_shell_file_paths is None:
+            error_message = 'Missing R environment. Please check your specified R installation exists.'
+            log.error( error_message )
+            status = self.app.install_model.ToolDependency.installation_status.ERROR
+            tool_dependency = tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                   tool_dependency,
+                                                                                   status=status,
+                                                                                   error_message=error_message )
+            return tool_dependency, [], None
+        else:
+            install_environment.add_env_shell_file_paths( env_shell_file_paths )
+        log.debug( 'Handling setup_r_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' %
+                   ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
+        tarball_names = []
+        for r_package_dict in action_dict[ 'r_packages' ]:
+            url = r_package_dict['package']
+            filename = url.split( '/' )[ -1 ]
+            tarball_names.append( filename )
+            checksums = r_package_dict.get('checksums', {})
+            self.url_download( work_dir, filename, url, extract=False, checksums=checksums )
+        dir = os.path.curdir
+        current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+        with lcd( current_dir ):
+            with settings( warn_only=True ):
+                for tarball_name in tarball_names:
+                    # Use raw strings so that python won't automatically unescape the quotes before passing the command
+                    # to subprocess.Popen.
+                    cmd = r'''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR:$R_LIBS; export R_LIBS;
+                        Rscript -e "tryCatch( { install.packages(c('%s'), lib = '$INSTALL_DIR', repos = NULL, dependencies = FALSE) }, error = function(e) { print(e); quit(status = 1) }, warning = function(w) { if ( grepl('had non-zero exit status|is not writable|installation of one of more packages failed', as.character(w)) ) { print(w); quit(status = 1) } } )"''' % \
+                        ( str( tarball_name ) )
+                    cmd = install_environment.build_command( basic_util.evaluate_template( cmd, install_environment ) )
+                    return_code = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                                      cmd=cmd,
+                                                                      return_output=False,
+                                                                      job_name=package_name )
+                    if return_code:
+                        if initial_download:
+                            return tool_dependency, filtered_actions, dir
+                        return tool_dependency, None, None
+                # R libraries are installed to $INSTALL_DIR (install_dir), we now set the R_LIBS path to that directory
+                # Pull in R environment (runtime).
+                env_file_builder.handle_action_shell_file_paths( action_dict )
+                env_file_builder.append_line( name="R_LIBS", action="prepend_to", value=install_environment.install_dir )
+                return_code = env_file_builder.return_code
+                if return_code:
+                    if initial_download:
+                        return tool_dependency, filtered_actions, dir
+                    return tool_dependency, None, None
+        if initial_download:
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # setup an R environment.
+        # <action type="setup_r_environment">
+        #       <repository name="package_r_3_0_1" owner="bgruening">
+        #           <package name="R" version="3.0.1" />
+        #       </repository>
+        #       <!-- allow installing an R packages -->
+        #       <package>https://github.com/bgruening/download_store/raw/master/DESeq2-1_0_18/BiocGenerics_0.6.0.tar.gz</package>
+        # </action>
+        # Discover all child repository dependency tags and define the path to an env.sh file
+        # associated with each repository.  This will potentially update the value of the
+        # 'env_shell_file_paths' entry in action_dict.
+        all_env_shell_file_paths = []
+        env_manager = EnvManager( self.app )
+        action_dict = env_manager.get_env_shell_file_paths_from_setup_environment_elem( all_env_shell_file_paths,
+                                                                                        action_elem,
+                                                                                        action_dict )
+        r_packages = list()
+        for env_elem in action_elem:
+            if env_elem.tag == 'package':
+                r_packages.append( dict( package=env_elem.text.strip(),
+                                         checksums=self.get_elem_checksums( env_elem ) ) )
+        if r_packages:
+            action_dict[ 'r_packages' ] = r_packages
+        return action_dict
+
+
+class SetupRubyEnvironment( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'setup_ruby_environment'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Initialize the environment for installing Ruby packages.  The class is called during the initial
+        download stage when installing packages, so the value of initial_download will generally be True.
+        However, the parameter value allows this class to also be used in the second stage of the installation,
+        although it may never be necessary.  If initial_download is True, the recipe steps will be filtered
+        and returned and the installation directory (i.e., dir) will be defined and returned.  If we're not
+        in the initial download stage, these actions will not occur, and None values will be returned for them.
+        """
+        # <action type="setup_ruby_environment">
+        #       <repository name="package_ruby_2_0" owner="bgruening">
+        #           <package name="ruby" version="2.0" />
+        #       </repository>
+        #       <!-- allow downloading and installing an Ruby package from http://rubygems.org/ -->
+        #       <package>protk</package>
+        #       <package>protk=1.2.4</package>
+        #       <package>http://url-to-some-gem-file.de/protk.gem</package>
+        # </action>
+        dir = None
+        if initial_download:
+            filtered_actions = actions[ 1: ]
+        env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
+        if env_shell_file_paths is None:
+            log.debug( 'Missing Ruby environment, make sure your specified Ruby installation exists.' )
+            if initial_download:
+                return tool_dependency, filtered_actions, dir
+            return tool_dependency, None, None
+        else:
+            install_environment.add_env_shell_file_paths( env_shell_file_paths )
+        log.debug( 'Handling setup_ruby_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' %
+                   ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
+        dir = os.path.curdir
+        current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+        with lcd( current_dir ):
+            with settings( warn_only=True ):
+                ruby_package_tups = action_dict.get( 'ruby_package_tups', [] )
+                for ruby_package_tup_dict in ruby_package_tups:
+                    ruby_package_tup = ruby_package_tup_dict['package']
+                    gem, gem_version, gem_parameters = ruby_package_tup
+                    if gem_parameters:
+                        gem_parameters = '-- %s' % gem_parameters
+                    else:
+                        gem_parameters = ''
+                    if os.path.isfile( gem ):
+                        # we assume a local shipped gem file
+                        cmd = '''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
+                                gem install --local %s %s''' % ( gem, gem_parameters )
+                    elif gem.find( '://' ) != -1:
+                        # We assume a URL to a gem file.
+                        url = gem
+                        gem_name = url.split( '/' )[ -1 ]
+                        checksums = ruby_package_tup_dict.get('checksums', {})
+                        self.url_download( work_dir, gem_name, url, extract=False, checksums=checksums )
+                        cmd = '''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
+                                gem install --local %s %s''' % ( gem_name, gem_parameters )
+                    else:
+                        # gem file from rubygems.org with or without version number
+                        if gem_version:
+                            # Specific ruby gem version was requested.
+                            # Use raw strings so that python won't automatically unescape the quotes before passing the command
+                            # to subprocess.Popen.
+                            cmd = r'''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
+                                gem install %s --version "=%s" %s''' % ( gem, gem_version, gem_parameters)
+                        else:
+                            # no version number given
+                            cmd = '''PATH=$PATH:$RUBY_HOME/bin; export PATH; GEM_HOME=$INSTALL_DIR; export GEM_HOME;
+                                gem install %s %s''' % ( gem, gem_parameters )
+                    cmd = install_environment.build_command( basic_util.evaluate_template( cmd, install_environment ) )
+                    return_code = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                                      cmd=cmd,
+                                                                      return_output=False,
+                                                                      job_name=package_name )
+                    if return_code:
+                        if initial_download:
+                            return tool_dependency, filtered_actions, dir
+                        return tool_dependency, None, None
+                # Pull in ruby dependencies (runtime).
+                env_file_builder.handle_action_shell_file_paths( action_dict )
+                env_file_builder.append_line( name="GEM_PATH",
+                                              action="prepend_to",
+                                              value=install_environment.install_dir )
+                env_file_builder.append_line( name="PATH",
+                                              action="prepend_to",
+                                              value=os.path.join( install_environment.install_dir, 'bin' ) )
+                return_code = env_file_builder.return_code
+                if return_code:
+                    if initial_download:
+                        return tool_dependency, filtered_actions, dir
+                    return tool_dependency, None, None
+        if initial_download:
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # setup a Ruby environment.
+        # <action type="setup_ruby_environment">
+        #       <repository name="package_ruby_2_0" owner="bgruening">
+        #           <package name="ruby" version="2.0" />
+        #       </repository>
+        #       <!-- allow downloading and installing an Ruby package from http://rubygems.org/ -->
+        #       <package>protk</package>
+        #       <package>protk=1.2.4</package>
+        #       <package sha256sum="some_hash">http://url-to-some-gem-file.de/protk.gem</package>
+        # </action>
+        # Discover all child repository dependency tags and define the path to an env.sh file
+        # associated with each repository.  This will potentially update the value of the
+        # 'env_shell_file_paths' entry in action_dict.
+        all_env_shell_file_paths = []
+        env_manager = EnvManager( self.app )
+        action_dict = env_manager.get_env_shell_file_paths_from_setup_environment_elem( all_env_shell_file_paths,
+                                                                                        action_elem,
+                                                                                        action_dict )
+        ruby_package_tups = []
+        for env_elem in action_elem:
+            if env_elem.tag == 'package':
+                # A valid gem definition can be:
+                #    protk=1.2.4
+                #    protk
+                #    ftp://ftp.gruening.de/protk.gem
+                gem_token = env_elem.text.strip().split( '=' )
+                gem_parameters = env_elem.get( 'parameters', None)
+                if len( gem_token ) == 2:
+                    # version string
+                    gem_name = gem_token[ 0 ]
+                    gem_version = gem_token[ 1 ]
+                    tup = ( gem_name, gem_version, gem_parameters )
+                else:
+                    # gem name for rubygems.org without version number
+                    gem = env_elem.text.strip()
+                    tup = ( gem, None, gem_parameters )
+                ruby_package_tups.append( dict( package=tup,
+                                                checksums=self.get_elem_checksums( env_elem ) ) )
+        if ruby_package_tups:
+            action_dict[ 'ruby_package_tups' ] = ruby_package_tups
+        return action_dict
+
+
+class SetupPythonEnvironment( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'setup_python_environment'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Initialize the environment for installing Python packages.  The class is called during the initial
+        download stage when installing packages, so the value of initial_download will generally be True.
+        However, the parameter value allows this class to also be used in the second stage of the installation,
+        although it may never be necessary.  If initial_download is True, the recipe steps will be filtered
+        and returned and the installation directory (i.e., dir) will be defined and returned.  If we're not
+        in the initial download stage, these actions will not occur, and None values will be returned for them.
+        """
+        # <action type="setup_python_environment">
+        #       <repository name="package_python_2_7" owner="bgruening">
+        #           <package name="python" version="2.7" />
+        #       </repository>
+        #       <!-- allow downloading and installing a Python package from https://pypi.python.org/ -->
+        #       <package>pysam.tar.gz</package>
+        #       <package sha256sum="some_hash">http://url-to-some-python-package.de/pysam.tar.gz</package>
+        # </action>
+        dir = None
+        if initial_download:
+            filtered_actions = actions[ 1: ]
+        env_shell_file_paths = action_dict.get( 'env_shell_file_paths', None )
+        if env_shell_file_paths is None:
+            log.debug( 'Missing Python environment, make sure your specified Python installation exists.' )
+            if initial_download:
+                return tool_dependency, filtered_actions, dir
+            return tool_dependency, None, None
+        else:
+            install_environment.add_env_shell_file_paths( env_shell_file_paths )
+        log.debug( 'Handling setup_python_environment for tool dependency %s with install_environment.env_shell_file_paths:\n%s' %
+                   ( str( tool_dependency.name ), str( install_environment.env_shell_file_paths ) ) )
+        dir = os.path.curdir
+        current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+        with lcd( current_dir ):
+            with settings( warn_only=True ):
+                python_package_tups = action_dict.get( 'python_package_tups', [] )
+                for python_package_tup_dict in python_package_tups:
+                    python_package_tup = python_package_tup_dict['package']
+                    package, package_version = python_package_tup
+                    package_path = os.path.join( install_environment.tool_shed_repository_install_dir, package )
+                    if os.path.isfile( package_path ):
+                        # We assume a local shipped python package.
+                        package_to_install = package_path
+                    elif package.find( '://' ) != -1:
+                        # We assume a URL to a python package.
+                        url = package
+                        package_name = url.split( '/' )[ -1 ]
+                        checksums = python_package_tup_dict.get('checksums', {})
+                        self.url_download( work_dir, package_name, url, extract=False, checksums=checksums )
+                        package_to_install = os.path.join( work_dir, package_name )
+                    else:
+                        # pypi support is currently not working - pip can not install wheels into user specified directories
+                        pass
+                    archive = CompressedFile(package_to_install)
+                    uncompressed_path = archive.extract( work_dir )
+                    cmd = r'''PATH=$PYTHONHOME/bin:$PATH; export PATH;
+                            mkdir -p $INSTALL_DIR/lib/python;
+                            export PYTHONPATH=$INSTALL_DIR/lib/python:$PYTHONPATH;
+                            cd %s;
+                            python setup.py install --install-lib $INSTALL_DIR/lib/python --install-scripts $INSTALL_DIR/bin
+                        ''' % ( uncompressed_path )
+
+                    cmd = install_environment.build_command( basic_util.evaluate_template( cmd, install_environment ) )
+                    return_code = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                                      cmd=cmd,
+                                                                      return_output=False,
+                                                                      job_name=package_name )
+                    if return_code:
+                        if initial_download:
+                            return tool_dependency, filtered_actions, dir
+                        return tool_dependency, None, None
+                # Pull in python dependencies (runtime).
+                env_file_builder.handle_action_shell_file_paths( action_dict )
+                env_file_builder.append_line( name="PYTHONPATH",
+                                              action="prepend_to",
+                                              value=os.path.join( install_environment.install_dir, 'lib', 'python') )
+                env_file_builder.append_line( name="PYTHONPATH",
+                                              action="prepend_to",
+                                              value=os.path.join( install_environment.install_dir) )
+                env_file_builder.append_line( name="PATH",
+                                              action="prepend_to",
+                                              value=os.path.join( install_environment.install_dir, 'bin' ) )
+                return_code = env_file_builder.return_code
+                if return_code:
+                    if initial_download:
+                        return tool_dependency, filtered_actions, dir
+                    return tool_dependency, None, None
+        if initial_download:
+            return tool_dependency, filtered_actions, dir
+        return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # setup a Python environment.
+        # <action type="setup_python_environment">
+        #       <repository name="package_python_2_7" owner="bgruening">
+        #           <package name="python" version="2.7" />
+        #       </repository>
+        #       <!-- allow downloading and installing an Python package from https://pypi.org/ -->
+        #       <package>pysam.tar.gz</package>
+        #       <package>http://url-to-some-python-package.de/pysam.tar.gz</package>
+        # </action>
+        # Discover all child repository dependency tags and define the path to an env.sh file
+        # associated with each repository.  This will potentially update the value of the
+        # 'env_shell_file_paths' entry in action_dict.
+        all_env_shell_file_paths = []
+        env_manager = EnvManager( self.app )
+        action_dict = env_manager.get_env_shell_file_paths_from_setup_environment_elem( all_env_shell_file_paths,
+                                                                                        action_elem,
+                                                                                        action_dict )
+        python_package_tups = []
+        for env_elem in action_elem:
+            if env_elem.tag == 'package':
+                # A valid package definitions can be:
+                #    pysam.tar.gz -> locally shipped tarball
+                #    ftp://ftp.gruening.de/pysam.tar.gz -> online tarball
+                python_token = env_elem.text.strip().split( '=' )
+                if len( python_token ) == 2:
+                    # version string
+                    package_name = python_token[ 0 ]
+                    package_version = python_token[ 1 ]
+                    tup = ( package_name, package_version )
+                else:
+                    # package name for pypi.org without version number
+                    package = env_elem.text.strip()
+                    tup = ( package, None )
+                python_package_tups.append( dict( package=tup,
+                                                  checksums=self.get_elem_checksums( env_elem ) ) )
+        if python_package_tups:
+            action_dict[ 'python_package_tups' ] = python_package_tups
+        action_dict.update( self.get_elem_checksums( action_elem ) )
+        return action_dict
+
+
+class SetupVirtualEnv( Download, RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'setup_virtualenv'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Initialize a virtual environment for installing packages.  If initial_download is True, the recipe
+        steps will be filtered and returned and the installation directory (i.e., dir) will be defined and
+        returned.  If we're not in the initial download stage, these actions will not occur, and None values
+        will be returned for them.
+        """
+        # This class is not currently used during stage 1 of the installation process, so filter_actions
+        # are not affected, and dir is not set.  Enhancements can easily be made to this function if this
+        # class is needed in stage 1.
+        venv_src_directory = os.path.abspath( os.path.join( self.app.config.tool_dependency_dir, '__virtualenv_src' ) )
+        if not self.install_virtualenv( install_environment, venv_src_directory ):
+            log.debug( 'Unable to install virtualenv' )
+            return tool_dependency, None, None
+        requirements = action_dict[ 'requirements' ]
+        if os.path.exists( os.path.join( install_environment.install_dir, requirements ) ):
+            # requirements specified as path to a file
+            requirements_path = requirements
+        else:
+            # requirements specified directly in XML, create a file with these for pip.
+            requirements_path = os.path.join( install_environment.install_dir, "requirements.txt" )
+            with open( requirements_path, "w" ) as f:
+                f.write( requirements )
+        venv_directory = os.path.join( install_environment.install_dir, "venv" )
+        python_cmd = action_dict[ 'python' ]
+        # TODO: Consider making --no-site-packages optional.
+        setup_command = "%s %s/virtualenv.py --no-site-packages '%s'" % ( python_cmd, venv_src_directory, venv_directory )
+        # POSIXLY_CORRECT forces shell commands . and source to have the same
+        # and well defined behavior in bash/zsh.
+        activate_command = "POSIXLY_CORRECT=1; . %s" % os.path.join( venv_directory, "bin", "activate" )
+        if action_dict[ 'use_requirements_file' ]:
+            install_command = "python '%s' install -r '%s' --log '%s'" % \
+                ( os.path.join( venv_directory, "bin", "pip" ),
+                  requirements_path,
+                  os.path.join( install_environment.install_dir, 'pip_install.log' ) )
+        else:
+            install_command = ''
+            with open( requirements_path, "rb" ) as f:
+                while True:
+                    line = f.readline()
+                    if not line:
+                        break
+                    line = line.strip()
+                    if line:
+                        line_install_command = "python '%s' install %s --log '%s'" % \
+                            ( os.path.join( venv_directory, "bin", "pip" ),
+                              line,
+                              os.path.join( install_environment.install_dir, 'pip_install_%s.log' % ( line ) ) )
+                        if not install_command:
+                            install_command = line_install_command
+                        else:
+                            install_command = "%s && %s" % ( install_command, line_install_command )
+        full_setup_command = "%s; %s; %s" % ( setup_command, activate_command, install_command )
+        return_code = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                          cmd=full_setup_command,
+                                                          return_output=False,
+                                                          job_name=package_name)
+        if return_code:
+            log.error( "Failed to do setup_virtualenv install, exit code='%s'", return_code )
+            # would it be better to try to set env variables anway, instead of returning here?
+            return tool_dependency, None, None
+        site_packages_directory, site_packages_directory_list = \
+            self.__get_site_packages_directory( install_environment,
+                                                self.app,
+                                                tool_dependency,
+                                                python_cmd,
+                                                venv_directory )
+        env_file_builder.append_line( name="PATH", action="prepend_to", value=os.path.join( venv_directory, "bin" ) )
+        if site_packages_directory is None:
+            log.error( "virtualenv's site-packages directory '%s' does not exist", site_packages_directory_list )
+        else:
+            env_file_builder.append_line( name="PYTHONPATH", action="prepend_to", value=site_packages_directory )
+        # The caller should check the status of the returned tool_dependency since this function does nothing
+        # with the return_code.
+        return_code = env_file_builder.return_code
+        return tool_dependency, None, None
+
+    def install_virtualenv( self, install_environment, venv_dir ):
+        if not os.path.exists( venv_dir ):
+            with install_environment.use_tmp_dir() as work_dir:
+                downloaded_filename = VIRTUALENV_URL.rsplit('/', 1)[-1]
+                try:
+                    dir = self.url_download( work_dir, downloaded_filename, VIRTUALENV_URL )
+                except:
+                    log.error( "Failed to download virtualenv: url_download( '%s', '%s', '%s' ) threw an exception",
+                               work_dir, downloaded_filename, VIRTUALENV_URL )
+                    return False
+                full_path_to_dir = os.path.abspath( os.path.join( work_dir, dir ) )
+                shutil.move( full_path_to_dir, venv_dir )
+        return True
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="setup_virtualenv" />
+        # Install requirements from file requirements.txt of downloaded bundle - or -
+        # <action type="setup_virtualenv">tools/requirements.txt</action>
+        # Install requirements from specified file from downloaded bundle -or -
+        # <action type="setup_virtualenv">pyyaml==3.2.0
+        # lxml==2.3.0</action>
+        # Manually specify contents of requirements.txt file to create dynamically.
+        action_dict[ 'use_requirements_file' ] = asbool( action_elem.get( 'use_requirements_file', True ) )
+        action_dict[ 'requirements' ] = basic_util.evaluate_template( action_elem.text or 'requirements.txt', install_environment )
+        action_dict[ 'python' ] = action_elem.get( 'python', 'python' )
+        return action_dict
+
+    def __get_site_packages_directory( self, install_environment, app, tool_dependency, python_cmd, venv_directory ):
+        lib_dir = os.path.join( venv_directory, "lib" )
+        rval = os.path.join( lib_dir, python_cmd, 'site-packages' )
+        site_packages_directory_list = [ rval ]
+        if os.path.exists( rval ):
+            return ( rval, site_packages_directory_list )
+        for ( dirpath, dirnames, filenames ) in os.walk( lib_dir ):
+            for dirname in dirnames:
+                rval = os.path.join( lib_dir, dirname, 'site-packages' )
+                site_packages_directory_list.append( rval )
+                if os.path.exists( rval ):
+                    return ( rval, site_packages_directory_list )
+            break
+        # fall back to python call to get site packages
+        # FIXME: This is probably more robust?, but there is currently an issue with handling the output.stdout
+        # preventing the entire path from being included (it gets truncated)
+        # Use raw strings so that python won't automatically unescape the quotes before passing the command
+        # to subprocess.Popen.
+        for site_packages_command in [ r"""%s -c 'import site; site.getsitepackages()[0]'""" %
+                                       os.path.join( venv_directory, "bin", "python" ),
+                                       r"""%s -c 'import os, sys; print os.path.join( sys.prefix, "lib", "python" + sys.version[:3], "site-packages" )'""" %
+                                       os.path.join( venv_directory, "bin", "python" ) ]:
+            output = install_environment.handle_command( tool_dependency=tool_dependency,
+                                                         cmd=site_packages_command,
+                                                         return_output=True,
+                                                         job_name='_get_site_packages' )
+            site_packages_directory_list.append( output.stdout )
+            if not output.return_code and os.path.exists( output.stdout ):
+                return ( output.stdout, site_packages_directory_list )
+        return ( None, site_packages_directory_list )
+
+
+class ShellCommand( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'shell_command'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Execute a command in a shell.  If the value of initial_download is True, the recipe steps will
+        be filtered and returned and the installation directory (i.e., dir) will be defined and returned.
+        If we're not in the initial download stage, these actions will not occur, and None values will
+        be returned for them.
+        """
+        # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
+        # Eliminate the shell_command clone action so remaining actions can be processed correctly.
+        if initial_download:
+            # I'm not sure why we build the cmd differently in stage 1 vs stage 2.  Should this process
+            # be the same no matter the stage?
+            dir = package_name
+            filtered_actions = actions[ 1: ]
+            cmd = action_dict[ 'command' ]
+        else:
+            cmd = install_environment.build_command( action_dict[ 'command' ] )
+        with settings( warn_only=True ):
+            # The caller should check the status of the returned tool_dependency since this function
+            # does nothing with return_code.
+            install_environment.handle_command( tool_dependency=tool_dependency,
+                                                cmd=cmd,
+                                                return_output=False,
+                                                job_name=package_name )
+            if initial_download:
+                return tool_dependency, filtered_actions, dir
+            return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # <action type="shell_command">make</action>
+        action_elem_text = basic_util.evaluate_template( action_elem.text, install_environment )
+        if action_elem_text:
+            action_dict[ 'command' ] = action_elem_text
+        return action_dict
+
+
+class TemplateCommand( RecipeStep ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.type = 'template_command'
+
+    def execute_step( self, tool_dependency, package_name, actions, action_dict, filtered_actions, env_file_builder,
+                      install_environment, work_dir, current_dir=None, initial_download=False ):
+        """
+        Execute a template command in a shell.  If the value of initial_download is True, the recipe steps
+        will be filtered and returned and the installation directory (i.e., dir) will be defined and returned.
+        If we're not in the initial download stage, these actions will not occur, and None values will be
+        returned for them.
+        """
+        env_vars = dict()
+        env_vars = install_environment.environment_dict()
+        env_vars.update( basic_util.get_env_var_values( install_environment ) )
+        language = action_dict[ 'language' ]
+        with settings( warn_only=True, **env_vars ):
+            if language == 'cheetah':
+                # We need to import fabric.api.env so that we can access all collected environment variables.
+                cmd = fill_template( '#from fabric.api import env\n%s' % action_dict[ 'command' ], context=env_vars )
+                # The caller should check the status of the returned tool_dependency since this function
+                # does nothing with return_code.
+                install_environment.handle_command( tool_dependency=tool_dependency,
+                                                    cmd=cmd,
+                                                    return_output=False,
+                                                    job_name=package_name )
+            return tool_dependency, None, None
+
+    def prepare_step( self, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+        # Default to Cheetah as it's the first template language supported.
+        language = action_elem.get( 'language', 'cheetah' ).lower()
+        if language == 'cheetah':
+            # Cheetah template syntax.
+            # <action type="template_command" language="cheetah">
+            #     #if env.PATH:
+            #         make
+            #     #end if
+            # </action>
+            action_elem_text = action_elem.text.strip()
+            if action_elem_text:
+                action_dict[ 'language' ] = language
+                action_dict[ 'command' ] = action_elem_text
+        else:
+            log.debug( "Unsupported template language '%s'. Not proceeding." % str( language ) )
+            raise Exception( "Unsupported template language '%s' in tool dependency definition." % str( language ) )
+        return action_dict
diff --git a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py
new file mode 100644
index 0000000..8265b9b
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/tag_handler.py
@@ -0,0 +1,636 @@
+import logging
+import os
+import tempfile
+
+from galaxy.tools.deps.resolvers import NullDependency
+from galaxy.util import listify, url_get
+from tool_shed.galaxy_install.tool_dependencies.env_manager import EnvManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
+from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment
+from tool_shed.util import basic_util
+from tool_shed.util import common_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class RecipeTag( object ):
+    """Abstract class that defines a standard format for handling recipe tags when installing packages."""
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        raise Exception( "Unimplemented Method" )
+
+
+class SyncDatabase( object ):
+
+    def sync_database_with_file_system( self, app, tool_shed_repository, tool_dependency_name, tool_dependency_version,
+                                        tool_dependency_install_dir, tool_dependency_type='package' ):
+        """
+        The installation directory defined by the received tool_dependency_install_dir exists, so check for
+        the presence of INSTALLATION_LOG.  If the files exists, we'll assume the tool dependency is installed,
+        but not necessarily successfully (it could be in an error state on disk.  However, we can justifiably
+        assume here that no matter the state, an associated database record will exist.
+        """
+        # This method should be reached very rarely.  It implies that either the Galaxy environment
+        # became corrupted (i.e., the database records for installed tool dependencies is not synchronized
+        # with tool dependencies on disk) or the Tool Shed's install and test framework is running.  The Tool
+        # Shed's install and test framework installs repositories in 2 stages, those of type tool_dependency_definition
+        # followed by those containing valid tools and tool functional test components.
+        log.debug( "Synchronizing the database with the file system..." )
+        try:
+            log.debug( "The value of app.config.running_functional_tests is: %s" %
+                str( app.config.running_functional_tests ) )
+        except:
+            pass
+        sa_session = app.install_model.context
+        can_install_tool_dependency = False
+        tool_dependency = \
+            tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app,
+                                                                                      tool_shed_repository,
+                                                                                      tool_dependency_name,
+                                                                                      tool_dependency_version,
+                                                                                      tool_dependency_type )
+        if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLING:
+            # The tool dependency is in an Installing state, so we don't want to do anything to it.  If the tool
+            # dependency is being installed by someone else, we don't want to interfere with that.  This assumes
+            # the installation by "someone else" is not hung in an Installing state, which is a weakness if that
+            # "someone else" never repaired it.
+            log.debug( 'Skipping installation of tool dependency %s version %s because it has a status of %s' %
+                ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency.status ) ) )
+        else:
+            # We have a pre-existing installation directory on the file system, but our associated database record is
+            # in a state that allowed us to arrive here.  At this point, we'll inspect the installation directory to
+            # see if we have a "proper installation" and if so, synchronize the database record rather than reinstalling
+            # the dependency if we're "running_functional_tests".  If we're not "running_functional_tests, we'll set
+            # the tool dependency's installation status to ERROR.
+            tool_dependency_installation_directory_contents = os.listdir( tool_dependency_install_dir )
+            if basic_util.INSTALLATION_LOG in tool_dependency_installation_directory_contents:
+                # Since this tool dependency's installation directory contains an installation log, we consider it to be
+                # installed.  In some cases the record may be missing from the database due to some activity outside of
+                # the control of the Tool Shed.  Since a new record was created for it and we don't know the state of the
+                # files on disk, we will set it to an error state (unless we are running Tool Shed functional tests - see
+                # below).
+                log.debug( 'Skipping installation of tool dependency %s version %s because it is installed in %s' %
+                    ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency_install_dir ) ) )
+                if app.config.running_functional_tests:
+                    # If we are running functional tests, the state will be set to Installed because previously compiled
+                    # tool dependencies are not deleted by default, from the "install and test" framework..
+                    tool_dependency.status = app.install_model.ToolDependency.installation_status.INSTALLED
+                else:
+                    error_message = 'The installation directory for this tool dependency had contents but the database had no record. '
+                    error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the '
+                    error_message += 'missing database record it is now being set to Error.'
+                    tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR
+                    tool_dependency.error_message = error_message
+            else:
+                error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \
+                    ( str( tool_dependency_install_dir ),
+                      str( tool_dependency_name ),
+                      str( tool_dependency_version ),
+                      str( basic_util.INSTALLATION_LOG ) )
+                error_message += ' is missing.  This indicates an installation error so the tool dependency is being'
+                error_message += ' prepared for re-installation.'
+                log.error( error_message )
+                tool_dependency.status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED
+                basic_util.remove_dir( tool_dependency_install_dir )
+                can_install_tool_dependency = True
+            sa_session.add( tool_dependency )
+            sa_session.flush()
+        try:
+            log.debug( "Returning from sync_database_with_file_system with tool_dependency %s, can_install_tool_dependency %s." %
+                ( str( tool_dependency.name ), str( can_install_tool_dependency ) ) )
+        except Exception as e:
+            log.debug( str( e ) )
+        return tool_dependency, can_install_tool_dependency
+
+
+class Install( RecipeTag, SyncDatabase ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tag = 'install'
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        # <install version="1.0">
+        # Get the installation directory for tool dependencies that will be installed for the received tool_shed_repository.
+        actions_elem_tuples = []
+        proceed_with_install = False
+        install_dir = \
+            tool_dependency_util.get_tool_dependency_install_dir( app=self.app,
+                                                                  repository_name=tool_shed_repository.name,
+                                                                  repository_owner=tool_shed_repository.owner,
+                                                                  repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                  tool_dependency_type='package',
+                                                                  tool_dependency_name=package_name,
+                                                                  tool_dependency_version=package_version )
+        if os.path.exists( install_dir ):
+            # The tool_migration_manager handles tool migration stages and the sync_database_with_file_system()
+            # method handles two scenarios: (1) where a Galaxy file system environment related to installed
+            # Tool Shed repositories and tool dependencies has somehow gotten out of sync with the Galaxy
+            # database tables associated with these installed items, and (2) the Tool Shed's install and test
+            # framework which installs repositories in 2 stages, those of type tool_dependency_definition
+            # followed by those containing valid tools and tool functional test components.  Neither of these
+            # scenarios apply when the install manager is running.
+            if from_tool_migration_manager:
+                proceed_with_install = True
+            else:
+                # Notice that we'll throw away the following tool_dependency if it can be installed.
+                tool_dependency, proceed_with_install = self.sync_database_with_file_system( self.app,
+                                                                                             tool_shed_repository,
+                                                                                             package_name,
+                                                                                             package_version,
+                                                                                             install_dir,
+                                                                                             tool_dependency_type='package' )
+                if not proceed_with_install:
+                    log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), so returning it." %
+                        ( str( tool_dependency.name ), str( tool_dependency.version ) ) )
+                    return tool_dependency, proceed_with_install, actions_elem_tuples
+        else:
+            proceed_with_install = True
+        if proceed_with_install:
+            package_install_version = package_elem.get( 'version', '1.0' )
+            status = self.app.install_model.ToolDependency.installation_status.INSTALLING
+            tool_dependency = \
+                tool_dependency_util.create_or_update_tool_dependency( app=self.app,
+                                                                       tool_shed_repository=tool_shed_repository,
+                                                                       name=package_name,
+                                                                       version=package_version,
+                                                                       type='package',
+                                                                       status=status,
+                                                                       set_status=True )
+            # Get the information about the current platform in case the tool dependency definition includes tag sets
+            # for installing compiled binaries.
+            platform_info_dict = tool_dependency_util.get_platform_info_dict()
+            if package_install_version == '1.0':
+                # Handle tool dependency installation using a fabric method included in the Galaxy framework.
+                actions_elem_tuples = tool_dependency_util.parse_package_elem( package_elem,
+                                                                               platform_info_dict=platform_info_dict,
+                                                                               include_after_install_actions=True )
+                if not actions_elem_tuples:
+                    proceed_with_install = False
+                    error_message = 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) )
+                    error_message += 'the recipe for installing the package is missing either an <actions> tag set or an <actions_group> '
+                    error_message += 'tag set.'
+                    # Since there was an installation error, update the tool dependency status to Error.
+                    # The remove_installation_path option must be left False here.
+                    tool_dependency = tool_dependency_util.set_tool_dependency_attributes(self.app,
+                                                                                          tool_dependency=tool_dependency,
+                                                                                          status=self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                                                          error_message=error_message)
+            else:
+                raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' )
+        return tool_dependency, proceed_with_install, actions_elem_tuples
+
+
+class Package( RecipeTag ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tag = 'package'
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        action_elem_tuples = []
+        proceed_with_install = False
+        # Only install the tool_dependency if it is not already installed and it is associated with a database
+        # record in the received tool_dependencies.
+        if package_name and package_version:
+            dependencies_ignored = not self.app.toolbox.dependency_manager.uses_tool_shed_dependencies()
+            if dependencies_ignored:
+                log.debug( "Skipping installation of tool dependency package %s because tool shed dependency resolver not enabled." %
+                    str( package_name ) )
+                # Tool dependency resolves have been configured and they do not include the tool shed. Do not install package.
+                dep = self.app.toolbox.dependency_manager.find_dep( package_name, package_version, type='package')
+                if not isinstance( dep, NullDependency ):
+                    # TODO: Do something here such as marking it installed or configured externally.
+                    pass
+                tool_dependency = \
+                    tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                         tool_dependency=tool_dependency,
+                                                                         status=self.app.install_model.ToolDependency.installation_status.ERROR )
+            else:
+                proceed_with_install = True
+        return tool_dependency, proceed_with_install, action_elem_tuples
+
+
+class ReadMe( RecipeTag ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tag = 'readme'
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        # Nothing to be done.
+        action_elem_tuples = []
+        proceed_with_install = False
+        return tool_dependency, proceed_with_install, action_elem_tuples
+
+
+class Repository( RecipeTag, SyncDatabase ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tag = 'repository'
+
+    def create_temporary_tool_dependencies_config( self, tool_shed_url, name, owner, changeset_revision ):
+        """Make a call to the tool shed to get the required repository's tool_dependencies.xml file."""
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
+        if tool_shed_url is None or name is None or owner is None or changeset_revision is None:
+            message = "Unable to retrieve required tool_dependencies.xml file from the Tool Shed because one or more of the "
+            message += "following required parameters is None: tool_shed_url: %s, name: %s, owner: %s, changeset_revision: %s " % \
+                ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ) )
+            raise Exception( message )
+        params = dict( name=name,
+                       owner=owner,
+                       changeset_revision=changeset_revision )
+        pathspec = [ 'repository', 'get_tool_dependencies_config_contents' ]
+        text = url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        if text:
+            # Write the contents to a temporary file on disk so it can be reloaded and parsed.
+            fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cttdc"  )
+            tmp_filename = fh.name
+            fh.close()
+            fh = open( tmp_filename, 'wb' )
+            fh.write( text )
+            fh.close()
+            return tmp_filename
+        else:
+            message = "Unable to retrieve required tool_dependencies.xml file from the Tool Shed for revision "
+            message += "%s of installed repository %s owned by %s." % ( str( changeset_revision ), str( name ), str( owner ) )
+            raise Exception( message )
+            return None
+
+    def create_tool_dependency_with_initialized_env_sh_file( self, dependent_install_dir, tool_shed_repository,
+                                                             required_repository, package_name, package_version,
+                                                             tool_dependencies_config ):
+        """
+        Create or get a tool_dependency record that is defined by the received package_name and package_version.
+        An env.sh file will be created for the tool_dependency in the received dependent_install_dir.
+        """
+        # The received required_repository refers to a tool_shed_repository record that is defined as a complex
+        # repository dependency for this tool_dependency.  The required_repository may or may not be currently
+        # installed (it doesn't matter).  If it is installed, it is associated with a tool_dependency that has
+        # an env.sh file that this new tool_dependency must be able to locate and "source".  If it is not installed,
+        # we can still determine where that env.sh file will be, so we'll initialize this new tool_dependency's env.sh
+        # file in either case.  If the required repository ends up with an installation error, this new tool
+        # dependency will still be fine because its containing repository will be defined as missing dependencies.
+        tool_dependencies = []
+        if not os.path.exists( dependent_install_dir ):
+            os.makedirs( dependent_install_dir )
+        required_tool_dependency_env_file_path = None
+        if tool_dependencies_config:
+            required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+            if required_td_tree:
+                required_td_root = required_td_tree.getroot()
+                for required_td_elem in required_td_root:
+                    # Find the appropriate package name and version.
+                    if required_td_elem.tag == 'package':
+                        # <package name="bwa" version="0.5.9">
+                        required_td_package_name = required_td_elem.get( 'name', None )
+                        required_td_package_version = required_td_elem.get( 'version', None )
+                        # Check the database to see if we have a record for the required tool dependency (we may not which is ok).  If we
+                        # find a record, we need to see if it is in an error state and if so handle it appropriately.
+                        required_tool_dependency = \
+                            tool_dependency_util.get_tool_dependency_by_name_version_type_repository( self.app,
+                                                                                                      required_repository,
+                                                                                                      required_td_package_name,
+                                                                                                      required_td_package_version,
+                                                                                                      'package' )
+                        if required_td_package_name == package_name and required_td_package_version == package_version:
+                            # Get or create a database tool_dependency record with which the installed package on disk will be associated.
+                            tool_dependency = \
+                                tool_dependency_util.create_or_update_tool_dependency( app=self.app,
+                                                                                       tool_shed_repository=tool_shed_repository,
+                                                                                       name=package_name,
+                                                                                       version=package_version,
+                                                                                       type='package',
+                                                                                       status=self.app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                                                                       set_status=True )
+                            # Create an env.sh file for the tool_dependency whose first line will source the env.sh file located in
+                            # the path defined by required_tool_dependency_env_file_path.  It doesn't matter if the required env.sh
+                            # file currently exists..
+                            required_tool_dependency_env_file_path = \
+                                self.get_required_repository_package_env_sh_path( package_name,
+                                                                                  package_version,
+                                                                                  required_repository )
+                            env_file_builder = EnvFileBuilder( tool_dependency.installation_directory( self.app ) )
+                            env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path )
+                            return_code = env_file_builder.return_code
+                            if return_code:
+                                error_message = 'Error defining env.sh file for package %s, return_code: %s' % \
+                                    ( str( package_name ), str( return_code ) )
+                                tool_dependency = \
+                                    tool_dependency_util.set_tool_dependency_attributes(self.app,
+                                                                                        tool_dependency=tool_dependency,
+                                                                                        status=self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                                                        error_message=error_message)
+                            elif required_tool_dependency is not None and required_tool_dependency.in_error_state:
+                                error_message = "This tool dependency's required tool dependency %s version %s has status %s." % \
+                                    ( str( required_tool_dependency.name ), str( required_tool_dependency.version ), str( required_tool_dependency.status ) )
+                                tool_dependency = \
+                                    tool_dependency_util.set_tool_dependency_attributes(self.app,
+                                                                                        tool_dependency=tool_dependency,
+                                                                                        status=self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                                                        error_message=error_message)
+                            else:
+                                tool_dependency = \
+                                    tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                         tool_dependency=tool_dependency,
+                                                                                         status=self.app.install_model.ToolDependency.installation_status.INSTALLED )
+                            tool_dependencies.append( tool_dependency )
+        return tool_dependencies
+
+    def get_required_repository_package_env_sh_path( self, package_name, package_version, required_repository ):
+        """Return path to env.sh file in required repository if the required repository has been installed."""
+        env_sh_file_dir = \
+            tool_dependency_util.get_tool_dependency_install_dir( app=self.app,
+                                                                  repository_name=required_repository.name,
+                                                                  repository_owner=required_repository.owner,
+                                                                  repository_changeset_revision=required_repository.installed_changeset_revision,
+                                                                  tool_dependency_type='package',
+                                                                  tool_dependency_name=package_name,
+                                                                  tool_dependency_version=package_version )
+        env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
+        return env_sh_file_path
+
+    def handle_complex_repository_dependency_for_package( self, elem, package_name, package_version, tool_shed_repository,
+                                                          from_tool_migration_manager=False ):
+        """
+        Inspect the repository defined by a complex repository dependency definition and take certain steps to
+        enable installation of the received package name and version to proceed.  The received elem is the
+        <repository> tag set which defines the complex repository dependency.  The received tool_shed_repository
+        is the installed tool shed repository for which the tool dependency defined by the received package_name
+        and package_version is being installed.
+        """
+        handled_tool_dependencies = []
+        tool_shed_url = elem.attrib[ 'toolshed' ]
+        required_repository_name = elem.attrib[ 'name' ]
+        required_repository_owner = elem.attrib[ 'owner' ]
+        default_required_repository_changeset_revision = elem.attrib[ 'changeset_revision' ]
+        required_repository = repository_util.get_repository_for_dependency_relationship( self.app, tool_shed_url,
+                                                                                          required_repository_name,
+                                                                                          required_repository_owner,
+                                                                                          default_required_repository_changeset_revision )
+        tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url )
+        tmp_filename = None
+        if required_repository:
+            required_repository_changeset_revision = required_repository.installed_changeset_revision
+            # Define the installation directory for the required tool dependency package in the required repository.
+            required_repository_package_install_dir = \
+                tool_dependency_util.get_tool_dependency_install_dir( app=self.app,
+                                                                      repository_name=required_repository_name,
+                                                                      repository_owner=required_repository_owner,
+                                                                      repository_changeset_revision=required_repository_changeset_revision,
+                                                                      tool_dependency_type='package',
+                                                                      tool_dependency_name=package_name,
+                                                                      tool_dependency_version=package_version )
+            # Define this dependent repository's tool dependency installation directory that will contain
+            # the env.sh file with a path to the required repository's installed tool dependency package.
+            dependent_install_dir = \
+                tool_dependency_util.get_tool_dependency_install_dir( app=self.app,
+                                                                      repository_name=tool_shed_repository.name,
+                                                                      repository_owner=tool_shed_repository.owner,
+                                                                      repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                      tool_dependency_type='package',
+                                                                      tool_dependency_name=package_name,
+                                                                      tool_dependency_version=package_version )
+            if os.path.exists( dependent_install_dir ):
+                # The install manager handles tool migration stages and the sync_database_with_file_system()
+                # method handles two scenarios: (1) where a Galaxy file system environment related to installed
+                # Tool Shed repositories and tool dependencies has somehow gotten out of sync with the Galaxy
+                # database tables associated with these installed items, and (2) the Tool Shed's install and test
+                # framework which installs repositories in 2 stages, those of type tool_dependency_definition
+                # followed by those containing valid tools and tool functional test components.  Neither of these
+                # scenarios apply when the install manager is running.
+                if from_tool_migration_manager:
+                    can_install_tool_dependency = True
+                else:
+                    # Notice that we'll throw away the following tool_dependency if it can be installed.
+                    tool_dependency, can_install_tool_dependency = self.sync_database_with_file_system( self.app,
+                                                                                                        tool_shed_repository,
+                                                                                                        package_name,
+                                                                                                        package_version,
+                                                                                                        dependent_install_dir,
+                                                                                                        tool_dependency_type='package' )
+                    if not can_install_tool_dependency:
+                        log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), "
+                                   "so appending it to the list of handled tool dependencies.",
+                                   str( tool_dependency.name), str( tool_dependency.version ) )
+                        handled_tool_dependencies.append( tool_dependency )
+            else:
+                can_install_tool_dependency = True
+            if can_install_tool_dependency:
+                # Set this dependent repository's tool dependency env.sh file with a path to the required repository's
+                # installed tool dependency package.  We can get everything we need from the discovered installed
+                # required_repository.
+                if required_repository.is_deactivated_or_installed:
+                    if not os.path.exists( required_repository_package_install_dir ):
+                        log.error( 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir ) )
+                    repo_files_dir = required_repository.repo_files_directory( self.app )
+                    if not repo_files_dir:
+                        message = "Unable to locate the repository directory for revision %s of installed repository %s owned by %s." % \
+                            ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
+                        raise Exception( message )
+                    tool_dependencies_config = repository_util.get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' )
+                    if tool_dependencies_config:
+                        config_to_use = tool_dependencies_config
+                    else:
+                        message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \
+                            ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) )
+                        raise Exception( message )
+                else:
+                    # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision
+                    # should be updated if it's not current.
+                    text = metadata_util.get_updated_changeset_revisions_from_tool_shed( app=self.app,
+                                                                                         tool_shed_url=tool_shed,
+                                                                                         name=required_repository_name,
+                                                                                         owner=required_repository_owner,
+                                                                                         changeset_revision=required_repository_changeset_revision )
+                    if text:
+                        updated_changeset_revisions = listify( text )
+                        # The list of changeset revisions is in reverse order, so the newest will be first.
+                        required_repository_changeset_revision = updated_changeset_revisions[ 0 ]
+                    # Make a call to the tool shed to get the required repository's tool_dependencies.xml file.
+                    tmp_filename = self.create_temporary_tool_dependencies_config( tool_shed,
+                                                                                   required_repository_name,
+                                                                                   required_repository_owner,
+                                                                                   required_repository_changeset_revision )
+                    config_to_use = tmp_filename
+                handled_tool_dependencies = \
+                    self.create_tool_dependency_with_initialized_env_sh_file( dependent_install_dir=dependent_install_dir,
+                                                                              tool_shed_repository=tool_shed_repository,
+                                                                              required_repository=required_repository,
+                                                                              package_name=package_name,
+                                                                              package_version=package_version,
+                                                                              tool_dependencies_config=config_to_use )
+                self.remove_file( tmp_filename )
+        else:
+            message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \
+                ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) )
+            raise Exception( message )
+        return handled_tool_dependencies
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        # We have a complex repository dependency definition.
+        action_elem_tuples = []
+        proceed_with_install = False
+        rd_tool_dependencies = self.handle_complex_repository_dependency_for_package( package_elem,
+                                                                                      package_name,
+                                                                                      package_version,
+                                                                                      tool_shed_repository,
+                                                                                      from_tool_migration_manager=from_tool_migration_manager )
+        for rd_tool_dependency in rd_tool_dependencies:
+            if rd_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR:
+                # We'll log the error here, but continue installing packages since some may not require this dependency.
+                log.error( "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message ) )
+        return tool_dependency, proceed_with_install, action_elem_tuples
+
+    def remove_file( self, file_name ):
+        """Attempt to remove a file from disk."""
+        if file_name:
+            if os.path.exists( file_name ):
+                try:
+                    os.remove( file_name )
+                except:
+                    pass
+
+
+class SetEnvironment( RecipeTag ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tag = 'set_environment'
+
+    def process_tag_set( self, tool_shed_repository, tool_dependency, package_elem, package_name, package_version,
+                         from_tool_migration_manager=False, tool_dependency_db_records=None ):
+        # We need to handle two tag sets for package_elem here, this:
+        # <set_environment version="1.0">
+        #    <environment_variable name="R_SCRIPT_PATH"action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+        # </set_environment>
+        # or this:
+        # <environment_variable name="R_SCRIPT_PATH"action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+        action_elem_tuples = []
+        proceed_with_install = False
+        if tool_dependency_db_records is None:
+            attr_tups_of_dependencies_for_install = []
+        else:
+            attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependency_db_records ]
+        try:
+            self.set_environment( package_elem, tool_shed_repository, attr_tups_of_dependencies_for_install )
+        except Exception as e:
+            error_message = "Error setting environment for tool dependency: %s" % str( e )
+            log.debug( error_message )
+        return tool_dependency, proceed_with_install, action_elem_tuples
+
+    def set_environment( self, elem, tool_shed_repository, attr_tups_of_dependencies_for_install ):
+        """
+        Create a ToolDependency to set an environment variable.  This is different from the process used to
+        set an environment variable that is associated with a package.  An example entry in a tool_dependencies.xml
+        file is::
+
+            <set_environment version="1.0">
+                <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+            </set_environment>
+
+        This method must also handle the sub-element tag::
+            <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+        """
+        # TODO: Add support for a repository dependency definition within this tool dependency type's tag set.  This should look something like
+        # the following.  See the implementation of support for this in the tool dependency package type's method above.
+        # This function is only called for set environment actions as defined below, not within an <install version="1.0"> tool
+        # dependency type. Here is an example of the tag set this function does handle:
+        # <action type="set_environment">
+        #     <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR</environment_variable>
+        # </action>
+        # Here is an example of the tag set this function does not handle:
+        # <set_environment version="1.0">
+        #    <repository toolshed="<tool shed>" name="<repository name>" owner="<repository owner>" changeset_revision="<changeset revision>" />
+        # </set_environment>
+        env_manager = EnvManager( self.app )
+        tool_dependencies = []
+        env_var_version = elem.get( 'version', '1.0' )
+        tool_shed_repository_install_dir = os.path.abspath( tool_shed_repository.repo_files_directory( self.app ) )
+        if elem.tag == 'environment_variable':
+            # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+            elems = [ elem ]
+        else:
+            # <set_environment version="1.0">
+            #    <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+            # </set_environment>
+            elems = [ env_var_elem for env_var_elem in elem ]
+        for env_var_elem in elems:
+            env_var_name = env_var_elem.get( 'name', None )
+            # The value of env_var_name must match the text value of at least 1 <requirement> tag in the
+            # tool config's <requirements> tag set whose "type" attribute is "set_environment" (e.g.,
+            # <requirement type="set_environment">R_SCRIPT_PATH</requirement>).
+            env_var_action = env_var_elem.get( 'action', None )
+            if env_var_name and env_var_action:
+                # Tool dependencies of type "set_environment" always have the version attribute set to None.
+                attr_tup = ( env_var_name, None, 'set_environment' )
+                if attr_tup in attr_tups_of_dependencies_for_install:
+                    install_dir = \
+                        tool_dependency_util.get_tool_dependency_install_dir( app=self.app,
+                                                                              repository_name=tool_shed_repository.name,
+                                                                              repository_owner=tool_shed_repository.owner,
+                                                                              repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                              tool_dependency_type='set_environment',
+                                                                              tool_dependency_name=env_var_name,
+                                                                              tool_dependency_version=None )
+                    install_environment = InstallEnvironment( app=self.app,
+                                                              tool_shed_repository_install_dir=tool_shed_repository_install_dir,
+                                                              install_dir=install_dir )
+                    env_var_dict = env_manager.create_env_var_dict( elem=env_var_elem,
+                                                                    install_environment=install_environment )
+                    if env_var_dict:
+                        if not os.path.exists( install_dir ):
+                            os.makedirs( install_dir )
+                        status = self.app.install_model.ToolDependency.installation_status.INSTALLING
+                        tool_dependency = \
+                            tool_dependency_util.create_or_update_tool_dependency( app=self.app,
+                                                                                   tool_shed_repository=tool_shed_repository,
+                                                                                   name=env_var_name,
+                                                                                   version=None,
+                                                                                   type='set_environment',
+                                                                                   status=status,
+                                                                                   set_status=True )
+                        if env_var_version == '1.0':
+                            # Create this tool dependency's env.sh file.
+                            env_file_builder = EnvFileBuilder( install_dir )
+                            return_code = env_file_builder.append_line( make_executable=True, **env_var_dict )
+                            if return_code:
+                                error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \
+                                    ( str( tool_dependency.name ), str( return_code ) )
+                                log.debug( error_message )
+                                status = self.app.install_model.ToolDependency.installation_status.ERROR
+                                tool_dependency = \
+                                    tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                         tool_dependency=tool_dependency,
+                                                                                         status=status,
+                                                                                         error_message=error_message )
+                            else:
+                                if tool_dependency.status not in [ self.app.install_model.ToolDependency.installation_status.ERROR,
+                                                                   self.app.install_model.ToolDependency.installation_status.INSTALLED ]:
+                                    status = self.app.install_model.ToolDependency.installation_status.INSTALLED
+                                    tool_dependency = \
+                                        tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                             tool_dependency=tool_dependency,
+                                                                                             status=status )
+                                    log.debug( 'Environment variable %s set in %s for tool dependency %s.' %
+                                        ( str( env_var_name ), str( install_dir ), str( tool_dependency.name ) ) )
+                        else:
+                            error_message = 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).'
+                            status = self.app.install_model.ToolDependency.installation_status.ERROR
+                            tool_dependency = \
+                                tool_dependency_util.set_tool_dependency_attributes( self.app,
+                                                                                     tool_dependency=tool_dependency,
+                                                                                     status=status,
+                                                                                     error_message=error_message )
+            tool_dependencies.append( tool_dependency )
+        return tool_dependencies
diff --git a/lib/tool_shed/galaxy_install/tool_migration_manager.py b/lib/tool_shed/galaxy_install/tool_migration_manager.py
new file mode 100644
index 0000000..fe00d90
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py
@@ -0,0 +1,698 @@
+"""
+Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
+All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
+"""
+import json
+import logging
+import os
+import shutil
+import tempfile
+import threading
+
+from galaxy import util
+from galaxy.tools.toolbox import ToolSection
+from galaxy.tools.toolbox.parser import ensure_tool_conf_item
+from galaxy.util.odict import odict
+
+from tool_shed.galaxy_install import install_manager
+from tool_shed.galaxy_install.datatypes import custom_datatype_manager
+from tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import InstalledRepositoryMetadataManager
+from tool_shed.galaxy_install.tools import tool_panel_manager
+
+from tool_shed.tools import data_table_manager
+from tool_shed.tools import tool_version_manager
+
+from tool_shed.util import basic_util
+from tool_shed.util import common_util
+from tool_shed.util import hg_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolMigrationManager( object ):
+
+    def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config,
+                  install_dependencies ):
+        """
+        Check tool settings in tool_shed_install_config and install all repositories
+        that are not already installed.  The tool panel configuration file is the received
+        migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
+        """
+        self.app = app
+        self.toolbox = self.app.toolbox
+        self.migrated_tools_config = migrated_tools_config
+        # Initialize the ToolPanelManager.
+        self.tpm = tool_panel_manager.ToolPanelManager( self.app )
+        # If install_dependencies is True but tool_dependency_dir is not set, do not attempt
+        # to install but print informative error message.
+        if install_dependencies and app.config.tool_dependency_dir is None:
+            message = 'You are attempting to install tool dependencies but do not have a value '
+            message += 'for "tool_dependency_dir" set in your galaxy.ini file.  Set this '
+            message += 'location value to the path where you want tool dependencies installed and '
+            message += 'rerun the migration script.'
+            raise Exception( message )
+        # Get the local non-shed related tool panel configs (there can be more than one, and the
+        # default name is tool_conf.xml).
+        self.proprietary_tool_confs = self.non_shed_tool_panel_configs
+        self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
+        # Set the location where the repositories will be installed by retrieving the tool_path
+        # setting from migrated_tools_config.
+        tree, error_message = xml_util.parse_xml( migrated_tools_config )
+        if tree is None:
+            log.error( error_message )
+        else:
+            root = tree.getroot()
+            self.tool_path = root.get( 'tool_path' )
+            log.debug( "Repositories will be installed into configured tool_path location ", str( self.tool_path ) )
+            # Parse tool_shed_install_config to check each of the tools.
+            self.tool_shed_install_config = tool_shed_install_config
+            tree, error_message = xml_util.parse_xml( tool_shed_install_config )
+            if tree is None:
+                log.error( error_message )
+            else:
+                root = tree.getroot()
+                defined_tool_shed_url = root.get( 'name' )
+                self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, defined_tool_shed_url )
+                self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed_url )
+                self.repository_owner = common_util.REPOSITORY_OWNER
+                self.shed_config_dict = self.tpm.get_shed_tool_conf_dict( self.migrated_tools_config )
+                # Since tool migration scripts can be executed any number of times, we need to
+                # make sure the appropriate tools are defined in tool_conf.xml.  If no tools
+                # associated with the migration stage are defined, no repositories will be installed
+                # on disk.  The default behavior is that the tool shed is down.
+                tool_shed_accessible = False
+                tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
+                if tool_panel_configs:
+                    # The missing_tool_configs_dict contents are something like:
+                    # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+                    tool_shed_accessible, missing_tool_configs_dict = \
+                        common_util.check_for_missing_tools( app,
+                                                             tool_panel_configs,
+                                                             latest_migration_script_number )
+                else:
+                    # It doesn't matter if the tool shed is accessible since there are no migrated
+                    # tools defined in the local Galaxy instance, but we have to set the value of
+                    # tool_shed_accessible to True so that the value of migrate_tools.version can
+                    # be correctly set in the database.
+                    tool_shed_accessible = True
+                    missing_tool_configs_dict = odict()
+                if tool_shed_accessible:
+                    if len( self.proprietary_tool_confs ) == 1:
+                        plural = ''
+                        file_names = self.proprietary_tool_confs[ 0 ]
+                    else:
+                        plural = 's'
+                        file_names = ', '.join( self.proprietary_tool_confs )
+                    if missing_tool_configs_dict:
+                        for proprietary_tool_conf in self.proprietary_tool_confs:
+                            # Create a backup of the tool configuration in the un-migrated state.
+                            shutil.copy( proprietary_tool_conf, '%s-pre-stage-%04d' % ( proprietary_tool_conf,
+                                                                                        latest_migration_script_number ) )
+                        for repository_elem in root:
+                            # Make sure we have a valid repository tag.
+                            if self.__is_valid_repository_tag( repository_elem ):
+                                # Get all repository dependencies for the repository defined by the
+                                # current repository_elem.  Repository dependency definitions contained
+                                # in tool shed repositories with migrated tools must never define a
+                                # relationship to a repository dependency that contains a tool.  The
+                                # repository dependency can only contain items that are not loaded into
+                                # the Galaxy tool panel (e.g., tool dependency definitions, custom datatypes,
+                                # etc).  This restriction must be followed down the entire dependency hierarchy.
+                                name = repository_elem.get( 'name' )
+                                changeset_revision = repository_elem.get( 'changeset_revision' )
+                                tool_shed_accessible, repository_dependencies_dict = \
+                                    common_util.get_repository_dependencies( app,
+                                                                             self.tool_shed_url,
+                                                                             name,
+                                                                             self.repository_owner,
+                                                                             changeset_revision )
+                                # Make sure all repository dependency records exist (as tool_shed_repository
+                                # table rows) in the Galaxy database.
+                                created_tool_shed_repositories = \
+                                    self.create_or_update_tool_shed_repository_records( name,
+                                                                                        changeset_revision,
+                                                                                        repository_dependencies_dict )
+                                # Order the repositories for proper installation.  This process is similar to the
+                                # process used when installing tool shed repositories, but does not handle managing
+                                # tool panel sections and other components since repository dependency definitions
+                                # contained in tool shed repositories with migrated tools must never define a relationship
+                                # to a repository dependency that contains a tool.
+                                ordered_tool_shed_repositories = \
+                                    self.order_repositories_for_installation( created_tool_shed_repositories,
+                                                                              repository_dependencies_dict )
+
+                                for tool_shed_repository in ordered_tool_shed_repositories:
+                                    is_repository_dependency = self.__is_repository_dependency( name,
+                                                                                                changeset_revision,
+                                                                                                tool_shed_repository )
+                                    self.install_repository( repository_elem,
+                                                             tool_shed_repository,
+                                                             install_dependencies,
+                                                             is_repository_dependency=is_repository_dependency )
+                    else:
+                        message = "\nNo tools associated with migration stage %s are defined in your " % \
+                            str( latest_migration_script_number )
+                        message += "file%s named %s,\nso no repositories will be installed on disk.\n" % \
+                            ( plural, file_names )
+                        log.info( message )
+                else:
+                    message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % \
+                        str( latest_migration_script_number )
+                    message += "Try again later.\n"
+                    log.error( message )
+
+    def create_or_update_tool_shed_repository_record( self, name, owner, changeset_revision, description=None ):
+
+        # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
+        relative_clone_dir = os.path.join( self.tool_shed, 'repos', owner, name, changeset_revision )
+        clone_dir = os.path.join( self.tool_path, relative_clone_dir )
+        if not self.__iscloned( clone_dir ):
+            repository_clone_url = os.path.join( self.tool_shed_url, 'repos', owner, name )
+            ctx_rev = suc.get_ctx_rev( self.app, self.tool_shed_url, name, owner, changeset_revision )
+            tool_shed_repository = repository_util.create_or_update_tool_shed_repository( app=self.app,
+                                                                                          name=name,
+                                                                                          description=description,
+                                                                                          installed_changeset_revision=changeset_revision,
+                                                                                          ctx_rev=ctx_rev,
+                                                                                          repository_clone_url=repository_clone_url,
+                                                                                          metadata_dict={},
+                                                                                          status=self.app.install_model.ToolShedRepository.installation_status.NEW,
+                                                                                          current_changeset_revision=None,
+                                                                                          owner=self.repository_owner,
+                                                                                          dist_to_shed=True )
+            return tool_shed_repository
+        return None
+
+    def create_or_update_tool_shed_repository_records( self, name, changeset_revision, repository_dependencies_dict ):
+        """
+        Make sure the repository defined by name and changeset_revision and all of its repository dependencies have
+        associated tool_shed_repository table rows in the Galaxy database.
+        """
+        created_tool_shed_repositories = []
+        description = repository_dependencies_dict.get( 'description', None )
+        tool_shed_repository = self.create_or_update_tool_shed_repository_record( name,
+                                                                                  self.repository_owner,
+                                                                                  changeset_revision,
+                                                                                  description=description )
+        if tool_shed_repository:
+            created_tool_shed_repositories.append( tool_shed_repository )
+        for rd_key, rd_tups in repository_dependencies_dict.items():
+            if rd_key in [ 'root_key', 'description' ]:
+                continue
+            for rd_tup in rd_tups:
+                parsed_rd_tup = common_util.parse_repository_dependency_tuple( rd_tup )
+                rd_tool_shed, rd_name, rd_owner, rd_changeset_revision = parsed_rd_tup[ 0:4 ]
+                # TODO: Make sure the repository description is applied to the new repository record during installation.
+                tool_shed_repository = self.create_or_update_tool_shed_repository_record( rd_name,
+                                                                                          rd_owner,
+                                                                                          rd_changeset_revision,
+                                                                                          description=None )
+                if tool_shed_repository:
+                    created_tool_shed_repositories.append( tool_shed_repository )
+        return created_tool_shed_repositories
+
+    def filter_and_persist_proprietary_tool_panel_configs( self, tool_configs_to_filter ):
+        """Eliminate all entries in all non-shed-related tool panel configs for all tool config file names in the received tool_configs_to_filter."""
+        for proprietary_tool_conf in self.proprietary_tool_confs:
+            persist_required = False
+            tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
+            if tree:
+                root = tree.getroot()
+                for elem in root:
+                    if elem.tag == 'tool':
+                        # Tools outside of sections.
+                        file_path = elem.get( 'file', None )
+                        if file_path:
+                            if file_path in tool_configs_to_filter:
+                                root.remove( elem )
+                                persist_required = True
+                    elif elem.tag == 'section':
+                        # Tools contained in a section.
+                        for section_elem in elem:
+                            if section_elem.tag == 'tool':
+                                file_path = section_elem.get( 'file', None )
+                                if file_path:
+                                    if file_path in tool_configs_to_filter:
+                                        elem.remove( section_elem )
+                                        persist_required = True
+            if persist_required:
+                fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-fapptpc"  )
+                tmp_filename = fh.name
+                fh.close()
+                fh = open( tmp_filename, 'wb' )
+                tree.write( tmp_filename, encoding='utf-8', xml_declaration=True )
+                fh.close()
+                shutil.move( tmp_filename, os.path.abspath( proprietary_tool_conf ) )
+                os.chmod( proprietary_tool_conf, 0o644 )
+
+    def get_containing_tool_sections( self, tool_config ):
+        """
+        If tool_config is defined somewhere in self.proprietary_tool_panel_elems, return True and a list of ToolSections in which the
+        tool is displayed.  If the tool is displayed outside of any sections, None is appended to the list.
+        """
+        tool_sections = []
+        is_displayed = False
+        for proprietary_tool_panel_elem in self.proprietary_tool_panel_elems:
+            if proprietary_tool_panel_elem.tag == 'tool':
+                # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
+                proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
+                if tool_config == proprietary_tool_config:
+                    # The tool is loaded outside of any sections.
+                    tool_sections.append( None )
+                    if not is_displayed:
+                        is_displayed = True
+            if proprietary_tool_panel_elem.tag == 'section':
+                # The proprietary_tool_panel_elem looks something like <section name="EMBOSS" id="EMBOSSLite">.
+                for section_elem in proprietary_tool_panel_elem:
+                    if section_elem.tag == 'tool':
+                        # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
+                        proprietary_tool_config = section_elem.get( 'file' )
+                        if tool_config == proprietary_tool_config:
+                            # The tool is loaded inside of the section_elem.
+                            tool_sections.append( ToolSection( ensure_tool_conf_item( proprietary_tool_panel_elem ) ) )
+                            if not is_displayed:
+                                is_displayed = True
+        return is_displayed, tool_sections
+
+    def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
+        if self.shed_config_dict.get( 'tool_path' ):
+            relative_install_dir = os.path.join( self.shed_config_dict[ 'tool_path' ], relative_install_dir )
+        tool_config_filename = basic_util.strip_path( tool_config )
+        for root, dirs, files in os.walk( relative_install_dir ):
+            if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+                if '.hg' in dirs:
+                    dirs.remove( '.hg' )
+                for name in files:
+                    filename = basic_util.strip_path( name )
+                    if filename == tool_config_filename:
+                        full_path = str( os.path.abspath( os.path.join( root, name ) ) )
+                        tool = self.toolbox.load_tool( full_path, use_cached=False )
+                        return suc.generate_tool_guid( repository_clone_url, tool )
+        # Not quite sure what should happen here, throw an exception or what?
+        return None
+
+    def get_prior_install_required_dict( self, tool_shed_repositories, repository_dependencies_dict ):
+        """
+        Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained in the received
+        list of tsr_ids and whose associated repository must be installed prior to the repository associated with the tsr_id key.
+        """
+        # Initialize the dictionary.
+        prior_install_required_dict = {}
+        tsr_ids = [ tool_shed_repository.id for tool_shed_repository in tool_shed_repositories ]
+        for tsr_id in tsr_ids:
+            prior_install_required_dict[ tsr_id ] = []
+        # Inspect the repository dependencies about to be installed and populate the dictionary.
+        for rd_key, rd_tups in repository_dependencies_dict.items():
+            if rd_key in [ 'root_key', 'description' ]:
+                continue
+            for rd_tup in rd_tups:
+                prior_install_ids = []
+                tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( rd_tup )
+                if util.asbool( prior_installation_required ):
+                    for tsr in tool_shed_repositories:
+                        if tsr.name == name and tsr.owner == owner and tsr.changeset_revision == changeset_revision:
+                            prior_install_ids.append( tsr.id )
+                        prior_install_required_dict[ tsr.id ] = prior_install_ids
+        return prior_install_required_dict
+
+    def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
+        """
+        Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are
+        either ToolSection elements or Tool elements.  These will be used to generate new entries in the migrated_tools_conf.xml
+        file for the installed tools.
+        """
+        tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
+        # Parse the XML and load the file attributes for later checking against the integrated elements from self.proprietary_tool_confs.
+        migrated_tool_configs = []
+        tree, error_message = xml_util.parse_xml( tools_xml_file_path )
+        if tree is None:
+            return []
+        root = tree.getroot()
+        for elem in root:
+            if elem.tag == 'repository':
+                for tool_elem in elem:
+                    migrated_tool_configs.append( tool_elem.get( 'file' ) )
+        # Parse each file in self.proprietary_tool_confs and generate the integrated list of tool panel Elements that contain them.
+        tool_panel_elems = []
+        for proprietary_tool_conf in self.proprietary_tool_confs:
+            tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
+            if tree is None:
+                return []
+            root = tree.getroot()
+            for elem in root:
+                if elem.tag == 'tool':
+                    # Tools outside of sections.
+                    file_path = elem.get( 'file', None )
+                    if file_path:
+                        if file_path in migrated_tool_configs:
+                            if elem not in tool_panel_elems:
+                                tool_panel_elems.append( elem )
+                elif elem.tag == 'section':
+                    # Tools contained in a section.
+                    for section_elem in elem:
+                        if section_elem.tag == 'tool':
+                            file_path = section_elem.get( 'file', None )
+                            if file_path:
+                                if file_path in migrated_tool_configs:
+                                    # Append the section, not the tool.
+                                    if elem not in tool_panel_elems:
+                                        tool_panel_elems.append( elem )
+        return tool_panel_elems
+
+    def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem,
+                                    install_dependencies, is_repository_dependency=False ):
+        """
+        Generate the metadata for the installed tool shed repository, among other things.  If the installed tool_shed_repository
+        contains tools that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each
+        of the tools defined in the received repository_elem from all non-shed-related tool panel configuration files since the
+        entries are automatically added to the reserved migrated_tools_conf.xml file as part of the migration process.
+        """
+        tool_configs_to_filter = []
+        tool_panel_dict_for_display = odict()
+        if self.tool_path:
+            repo_install_dir = os.path.join( self.tool_path, relative_install_dir )
+        else:
+            repo_install_dir = relative_install_dir
+        if not is_repository_dependency:
+            for tool_elem in repository_elem:
+                # The tool_elem looks something like this:
+                # <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
+                tool_config = tool_elem.get( 'file' )
+                guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config )
+                # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
+                is_displayed, tool_sections = self.get_containing_tool_sections( tool_config )
+                if is_displayed:
+                    tool_panel_dict_for_tool_config = \
+                        self.tpm.generate_tool_panel_dict_for_tool_config( guid,
+                                                                           tool_config,
+                                                                           tool_sections=tool_sections )
+                    # The tool-panel_dict has the following structure.
+                    # {<Tool guid> : [{ tool_config : <tool_config_file>,
+                    #                   id: <ToolSection id>,
+                    #                   version : <ToolSection version>,
+                    #                   name : <TooSection name>}]}
+                    for k, v in tool_panel_dict_for_tool_config.items():
+                        tool_panel_dict_for_display[ k ] = v
+                        for tool_panel_dict in v:
+                            # Keep track of tool config file names associated with entries that have been made to the
+                            # migrated_tools_conf.xml file so they can be eliminated from all non-shed-related tool panel configs.
+                            if tool_config not in tool_configs_to_filter:
+                                tool_configs_to_filter.append( tool_config )
+                else:
+                    log.error( 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).'
+                        % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) )
+            if tool_configs_to_filter:
+                lock = threading.Lock()
+                lock.acquire( True )
+                try:
+                    self.filter_and_persist_proprietary_tool_panel_configs( tool_configs_to_filter )
+                except Exception as e:
+                    log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs:\n%s" % str( e ) )
+                finally:
+                    lock.release()
+        irmm = InstalledRepositoryMetadataManager( app=self.app,
+                                                   tpm=self.tpm,
+                                                   repository=tool_shed_repository,
+                                                   changeset_revision=tool_shed_repository.changeset_revision,
+                                                   repository_clone_url=repository_clone_url,
+                                                   shed_config_dict=self.shed_config_dict,
+                                                   relative_install_dir=relative_install_dir,
+                                                   repository_files_dir=None,
+                                                   resetting_all_metadata_on_repository=False,
+                                                   updating_installed_repository=False,
+                                                   persist=True )
+        irmm.generate_metadata_for_changeset_revision()
+        irmm_metadata_dict = irmm.get_metadata_dict()
+        tool_shed_repository.metadata = irmm_metadata_dict
+        self.app.install_model.context.add( tool_shed_repository )
+        self.app.install_model.context.flush()
+        has_tool_dependencies = self.__has_tool_dependencies( irmm_metadata_dict )
+        if has_tool_dependencies:
+            # All tool_dependency objects must be created before the tools are processed even if no
+            # tool dependencies will be installed.
+            tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app,
+                                                                                     tool_shed_repository,
+                                                                                     relative_install_dir,
+                                                                                     set_status=True )
+        else:
+            tool_dependencies = None
+        if 'tools' in irmm_metadata_dict:
+            tdtm = data_table_manager.ToolDataTableManager( self.app )
+            sample_files = irmm_metadata_dict.get( 'sample_files', [] )
+            sample_files = [ str( s ) for s in sample_files ]
+            tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files )
+            tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
+            sample_files_copied = [ s for s in tool_index_sample_files ]
+            repository_tools_tups = irmm.get_repository_tools_tups()
+            if repository_tools_tups:
+                # Handle missing data table entries for tool parameters that are dynamically
+                # generated select lists.
+                repository_tools_tups = tdtm.handle_missing_data_table_entry( relative_install_dir,
+                                                                              self.tool_path,
+                                                                              repository_tools_tups )
+                # Handle missing index files for tool parameters that are dynamically generated select lists.
+                repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app,
+                                                                                                  self.tool_path,
+                                                                                                  sample_files,
+                                                                                                  repository_tools_tups,
+                                                                                                  sample_files_copied )
+                # Copy remaining sample files included in the repository to the ~/tool-data
+                # directory of the local Galaxy instance.
+                tool_util.copy_sample_files( self.app,
+                                             sample_files,
+                                             tool_path=self.tool_path,
+                                             sample_files_copied=sample_files_copied )
+                if not is_repository_dependency:
+                    self.tpm.add_to_tool_panel( tool_shed_repository.name,
+                                                repository_clone_url,
+                                                tool_shed_repository.installed_changeset_revision,
+                                                repository_tools_tups,
+                                                self.repository_owner,
+                                                self.migrated_tools_config,
+                                                tool_panel_dict=tool_panel_dict_for_display,
+                                                new_install=True )
+        if install_dependencies and tool_dependencies and has_tool_dependencies:
+            # Install tool dependencies.
+            irm = install_manager.InstallRepositoryManager( self.app, self.tpm )
+            itdm = install_manager.InstallToolDependencyManager( self.app )
+            irm.update_tool_shed_repository_status( tool_shed_repository,
+                                                    self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+            # Get the tool_dependencies.xml file from disk.
+            tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
+            installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository,
+                                                                                    tool_dependencies_config=tool_dependencies_config,
+                                                                                    tool_dependencies=tool_dependencies,
+                                                                                    from_tool_migration_manager=True )
+            for installed_tool_dependency in installed_tool_dependencies:
+                if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR:
+                    log.error(
+                        'The ToolMigrationManager returned the following error while installing tool dependency %s: %s',
+                        installed_tool_dependency.name, installed_tool_dependency.error_message )
+        if 'datatypes' in irmm_metadata_dict:
+            cdl = custom_datatype_manager.CustomDatatypeLoader( self.app )
+            tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+            if not tool_shed_repository.includes_datatypes:
+                tool_shed_repository.includes_datatypes = True
+            self.app.install_model.context.add( tool_shed_repository )
+            self.app.install_model.context.flush()
+            work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-hrc" )
+            datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir )
+            # Load proprietary data types required by tools.  The value of override is not
+            # important here since the Galaxy server will be started after this installation
+            # completes.
+            converter_path, display_path = \
+                cdl.alter_config_and_load_prorietary_datatypes( datatypes_config,
+                                                                repo_install_dir,
+                                                                override=False )
+            if converter_path or display_path:
+                # Create a dictionary of tool shed repository related information.
+                repository_dict = \
+                    cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url,
+                                                                          name=tool_shed_repository.name,
+                                                                          owner=self.repository_owner,
+                                                                          installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+                                                                          tool_dicts=irmm_metadata_dict.get( 'tools', [] ),
+                                                                          converter_path=converter_path,
+                                                                          display_path=display_path )
+            if converter_path:
+                # Load proprietary datatype converters
+                self.app.datatypes_registry.load_datatype_converters( self.toolbox,
+                                                                      installed_repository_dict=repository_dict )
+            if display_path:
+                # Load proprietary datatype display applications
+                self.app.datatypes_registry.load_display_applications( self.app, installed_repository_dict=repository_dict )
+            basic_util.remove_dir( work_dir )
+
+    def install_repository( self, repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency=False ):
+        """Install a single repository, loading contained tools into the tool panel."""
+        # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
+        relative_clone_dir = os.path.join( tool_shed_repository.tool_shed,
+                                           'repos',
+                                           tool_shed_repository.owner,
+                                           tool_shed_repository.name,
+                                           tool_shed_repository.installed_changeset_revision )
+        clone_dir = os.path.join( self.tool_path, relative_clone_dir )
+        cloned_ok = self.__iscloned( clone_dir )
+        is_installed = False
+        # Any of the following states should count as installed in this context.
+        if tool_shed_repository.status in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED,
+                                            self.app.install_model.ToolShedRepository.installation_status.ERROR,
+                                            self.app.install_model.ToolShedRepository.installation_status.UNINSTALLED,
+                                            self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+            is_installed = True
+        if cloned_ok and is_installed:
+            log.info( "Skipping automatic install of repository '%s' because it has already been installed in location %s",
+                     tool_shed_repository.name, clone_dir )
+        else:
+            irm = install_manager.InstallRepositoryManager( self.app, self.tpm )
+            repository_clone_url = os.path.join( self.tool_shed_url, 'repos', tool_shed_repository.owner, tool_shed_repository.name )
+            relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+            install_dir = os.path.join( clone_dir, tool_shed_repository.name )
+            ctx_rev = suc.get_ctx_rev( self.app,
+                                       self.tool_shed_url,
+                                       tool_shed_repository.name,
+                                       tool_shed_repository.owner,
+                                       tool_shed_repository.installed_changeset_revision )
+            if not cloned_ok:
+                irm.update_tool_shed_repository_status( tool_shed_repository,
+                                                        self.app.install_model.ToolShedRepository.installation_status.CLONING )
+                cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+            if cloned_ok and not is_installed:
+                self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
+                                                 repository_clone_url=repository_clone_url,
+                                                 relative_install_dir=relative_install_dir,
+                                                 repository_elem=repository_elem,
+                                                 install_dependencies=install_dependencies,
+                                                 is_repository_dependency=is_repository_dependency )
+                self.app.install_model.context.refresh( tool_shed_repository )
+                metadata_dict = tool_shed_repository.metadata
+                if 'tools' in metadata_dict:
+                    # Initialize the ToolVersionManager.
+                    tvm = tool_version_manager.ToolVersionManager( self.app )
+                    irm.update_tool_shed_repository_status( tool_shed_repository,
+                                                            self.app.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+                    # Get the tool_versions from the tool shed for each tool in the installed change set.
+                    params = dict( name=tool_shed_repository.name,
+                                   owner=self.repository_owner,
+                                   changeset_revision=tool_shed_repository.installed_changeset_revision )
+                    pathspec = [ 'repository', 'get_tool_versions' ]
+                    text = util.url_get( self.tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( self.tool_shed_url ), pathspec=pathspec, params=params )
+                    if text:
+                        tool_version_dicts = json.loads( text )
+                        tvm.handle_tool_versions( tool_version_dicts, tool_shed_repository )
+                    else:
+                        # Set the tool versions since they seem to be missing
+                        # for this repository in the tool shed. CRITICAL NOTE:
+                        # These default settings may not properly handle all
+                        # parent/child associations.
+                        for tool_dict in metadata_dict[ 'tools' ]:
+                            tool_id = tool_dict[ 'guid' ]
+                            old_tool_id = tool_dict[ 'id' ]
+                            tool_version_using_old_id = tvm.get_tool_version( old_tool_id )
+                            tool_version_using_guid = tvm.get_tool_version( tool_id )
+                            if not tool_version_using_old_id:
+                                tool_version_using_old_id = self.app.install_model.ToolVersion( tool_id=old_tool_id,
+                                                                                                tool_shed_repository=tool_shed_repository )
+                                self.app.install_model.context.add( tool_version_using_old_id )
+                                self.app.install_model.context.flush()
+                            if not tool_version_using_guid:
+                                tool_version_using_guid = self.app.install_model.ToolVersion( tool_id=tool_id,
+                                                                                              tool_shed_repository=tool_shed_repository )
+                                self.app.install_model.context.add( tool_version_using_guid )
+                                self.app.install_model.context.flush()
+                            # Associate the two versions as parent / child.
+                            tool_version_association = tvm.get_tool_version_association( tool_version_using_old_id,
+                                                                                         tool_version_using_guid )
+                            if not tool_version_association:
+                                tool_version_association = \
+                                    self.app.install_model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
+                                                                                   parent_id=tool_version_using_old_id.id )
+                                self.app.install_model.context.add( tool_version_association )
+                                self.app.install_model.context.flush()
+                irm.update_tool_shed_repository_status( tool_shed_repository,
+                                                        self.app.install_model.ToolShedRepository.installation_status.INSTALLED )
+            else:
+                log.error('Error attempting to clone repository %s: %s', str( tool_shed_repository.name ), str( error_message ) )
+                irm.update_tool_shed_repository_status( tool_shed_repository,
+                                                        self.app.install_model.ToolShedRepository.installation_status.ERROR,
+                                                        error_message=error_message )
+
+    @property
+    def non_shed_tool_panel_configs( self ):
+        return common_util.get_non_shed_tool_panel_configs( self.app )
+
+    def order_repositories_for_installation( self, tool_shed_repositories, repository_dependencies_dict ):
+        """
+        Some repositories may have repository dependencies that are required to be installed before the dependent
+        repository.  This method will inspect the list of repositories about to be installed and make sure to order
+        them appropriately.  For each repository about to be installed, if required repositories are not contained
+        in the list of repositories about to be installed, then they are not considered.  Repository dependency
+        definitions that contain circular dependencies should not result in an infinite loop, but obviously prior
+        installation will not be handled for one or more of the repositories that require prior installation.  This
+        process is similar to the process used when installing tool shed repositories, but does not handle managing
+        tool panel sections and other components since repository dependency definitions contained in tool shed
+        repositories with migrated tools must never define a relationship to a repository dependency that contains
+        a tool.
+        """
+        ordered_tool_shed_repositories = []
+        ordered_tsr_ids = []
+        processed_tsr_ids = []
+        prior_install_required_dict = self.get_prior_install_required_dict( tool_shed_repositories, repository_dependencies_dict )
+        while len( processed_tsr_ids ) != len( prior_install_required_dict.keys() ):
+            tsr_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_install_required_dict, processed_tsr_ids )
+            processed_tsr_ids.append( tsr_id )
+            # Create the ordered_tsr_ids, the ordered_repo_info_dicts and the ordered_tool_panel_section_keys lists.
+            if tsr_id not in ordered_tsr_ids:
+                prior_install_required_ids = prior_install_required_dict[ tsr_id ]
+                for prior_install_required_id in prior_install_required_ids:
+                    if prior_install_required_id not in ordered_tsr_ids:
+                        # Install the associated repository dependency first.
+                        ordered_tsr_ids.append( prior_install_required_id )
+                ordered_tsr_ids.append( tsr_id )
+        for ordered_tsr_id in ordered_tsr_ids:
+            for tool_shed_repository in tool_shed_repositories:
+                if tool_shed_repository.id == ordered_tsr_id:
+                    ordered_tool_shed_repositories.append( tool_shed_repository )
+                    break
+        return ordered_tool_shed_repositories
+
+    def __has_tool_dependencies( self, metadata_dict ):
+        '''Determine if the provided metadata_dict specifies tool dependencies.'''
+        # The use of the orphan_tool_dependencies category in metadata has been deprecated, but we still need to check in case
+        # the metadata is out of date.
+        if 'tool_dependencies' in metadata_dict or 'orphan_tool_dependencies' in metadata_dict:
+            return True
+        return False
+
+    def __iscloned( self, clone_dir ):
+        full_path = os.path.abspath( clone_dir )
+        if os.path.exists( full_path ):
+            for root, dirs, files in os.walk( full_path ):
+                if '.hg' in dirs:
+                    # Assume that the repository has been installed if we find a .hg directory.
+                    return True
+        return False
+
+    def __is_repository_dependency( self, name, changeset_revision, tool_shed_repository ):
+        '''Determine if the provided tool shed repository is a repository dependency.'''
+        if str( tool_shed_repository.name ) != str( name ) or \
+                str( tool_shed_repository.owner ) != str( self.repository_owner ) or \
+                str( tool_shed_repository.changeset_revision ) != str( changeset_revision ):
+            return True
+        return False
+
+    def __is_valid_repository_tag( self, elem ):
+        # <repository name="emboss_datatypes" description="Datatypes for Emboss tools" changeset_revision="a89163f31369" />
+        if elem.tag != 'repository':
+            return False
+        if not elem.get( 'name' ):
+            return False
+        if not elem.get( 'changeset_revision' ):
+            return False
+        return True
diff --git a/lib/tool_shed/galaxy_install/tools/__init__.py b/lib/tool_shed/galaxy_install/tools/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/galaxy_install/tools/data_manager.py b/lib/tool_shed/galaxy_install/tools/data_manager.py
new file mode 100644
index 0000000..58906ac
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tools/data_manager.py
@@ -0,0 +1,161 @@
+import logging
+import os
+import threading
+import time
+
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class DataManagerHandler( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def data_manager_config_elems_to_xml_file( self, config_elems, config_filename ):
+        """
+        Persist the current in-memory list of config_elems to a file named by the value
+        of config_filename.
+        """
+        lock = threading.Lock()
+        lock.acquire( True )
+        try:
+            fh = open( config_filename, 'wb' )
+            fh.write( '<?xml version="1.0"?>\n<data_managers>\n' )
+            for elem in config_elems:
+                fh.write( xml_util.xml_to_string( elem ) )
+            fh.write( '</data_managers>\n' )
+            fh.close()
+        except Exception as e:
+            log.exception( "Exception in DataManagerHandler.data_manager_config_elems_to_xml_file: %s" % str( e ) )
+        finally:
+            lock.release()
+
+    def install_data_managers( self, shed_data_manager_conf_filename, metadata_dict, shed_config_dict,
+                               relative_install_dir, repository, repository_tools_tups ):
+        rval = []
+        if 'data_manager' in metadata_dict:
+            tpm = tool_panel_manager.ToolPanelManager( self.app )
+            repository_tools_by_guid = {}
+            for tool_tup in repository_tools_tups:
+                repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
+            # Load existing data managers.
+            tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+            if tree is None:
+                return rval
+            config_elems = [ elem for elem in tree.getroot() ]
+            repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
+            if repo_data_manager_conf_filename is None:
+                log.debug( "No data_manager_conf.xml file has been defined." )
+                return rval
+            data_manager_config_has_changes = False
+            relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
+            repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
+            tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
+            if tree is None:
+                return rval
+            root = tree.getroot()
+            for elem in root:
+                if elem.tag == 'data_manager':
+                    data_manager_id = elem.get( 'id', None )
+                    if data_manager_id is None:
+                        log.error( "A data manager was defined that does not have an id and will not be installed:\n%s" %
+                                   xml_util.xml_to_string( elem ) )
+                        continue
+                    data_manager_dict = metadata_dict['data_manager'].get( 'data_managers', {} ).get( data_manager_id, None )
+                    if data_manager_dict is None:
+                        log.error( "Data manager metadata is not defined properly for '%s'." % ( data_manager_id ) )
+                        continue
+                    guid = data_manager_dict.get( 'guid', None )
+                    if guid is None:
+                        log.error( "Data manager guid '%s' is not set in metadata for '%s'." % ( guid, data_manager_id ) )
+                        continue
+                    elem.set( 'guid', guid )
+                    tool_guid = data_manager_dict.get( 'tool_guid', None )
+                    if tool_guid is None:
+                        log.error( "Data manager tool guid '%s' is not set in metadata for '%s'." % ( tool_guid, data_manager_id ) )
+                        continue
+                    tool_dict = repository_tools_by_guid.get( tool_guid, None )
+                    if tool_dict is None:
+                        log.error( "Data manager tool guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" %
+                                   ( tool_guid, data_manager_id ) )
+                        continue
+                    tool = tool_dict.get( 'tool', None )
+                    if tool is None:
+                        log.error( "Data manager tool with guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" %
+                                   ( tool_guid, data_manager_id ) )
+                        continue
+                    tool_config_filename = tool_dict.get( 'tool_config_filename', None )
+                    if tool_config_filename is None:
+                        log.error( "Data manager metadata is missing 'tool_config_file' for '%s'." % ( data_manager_id ) )
+                        continue
+                    elem.set( 'shed_conf_file', shed_config_dict['config_filename'] )
+                    if elem.get( 'tool_file', None ) is not None:
+                        del elem.attrib[ 'tool_file' ]  # remove old tool_file info
+                    tool_elem = tpm.generate_tool_elem( repository.tool_shed,
+                                                        repository.name,
+                                                        repository.installed_changeset_revision,
+                                                        repository.owner,
+                                                        tool_config_filename,
+                                                        tool,
+                                                        None )
+                    elem.insert( 0, tool_elem )
+                    data_manager = \
+                        self.app.data_managers.load_manager_from_elem( elem,
+                                                                       tool_path=shed_config_dict.get( 'tool_path', '' ),
+                                                                       replace_existing=True )
+                    if data_manager:
+                        rval.append( data_manager )
+                else:
+                    log.warning( "Encountered unexpected element '%s':\n%s" % ( elem.tag, xml_util.xml_to_string( elem ) ) )
+                config_elems.append( elem )
+                data_manager_config_has_changes = True
+            # Persist the altered shed_data_manager_config file.
+            if data_manager_config_has_changes:
+                reload_count = self.app.data_managers._reload_count
+                self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename )
+                while self.app.data_managers._reload_count <= reload_count:
+                    time.sleep(1)  # Wait for shed_data_manager watcher thread to pick up changes
+        return rval
+
+    def remove_from_data_manager( self, repository ):
+        metadata_dict = repository.metadata
+        if metadata_dict and 'data_manager' in metadata_dict:
+            shed_data_manager_conf_filename = self.app.config.shed_data_manager_config_file
+            tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+            if tree:
+                root = tree.getroot()
+                assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
+                guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).values() if 'guid' in data_manager_dict ]
+                load_old_data_managers_by_guid = {}
+                data_manager_config_has_changes = False
+                config_elems = []
+                for elem in root:
+                    # Match Data Manager elements by guid and installed_changeset_revision
+                    elem_matches_removed_data_manager = False
+                    if elem.tag == 'data_manager':
+                        guid = elem.get( 'guid', None )
+                        if guid in guids:
+                            tool_elem = elem.find( 'tool' )
+                            if tool_elem is not None:
+                                installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
+                                if installed_changeset_revision_elem is not None:
+                                    if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
+                                        elem_matches_removed_data_manager = True
+                                    else:
+                                        # This is a different version, which had been previously overridden
+                                        load_old_data_managers_by_guid[ guid ] = elem
+                    if elem_matches_removed_data_manager:
+                        data_manager_config_has_changes = True
+                    else:
+                        config_elems.append( elem )
+                # Remove data managers from in memory
+                self.app.data_managers.remove_manager( guids )
+                # Load other versions of any now uninstalled data managers, if any
+                for elem in load_old_data_managers_by_guid.values():
+                    self.app.data_managers.load_manager_from_elem( elem )
+                # Persist the altered shed_data_manager_config file.
+                if data_manager_config_has_changes:
+                    self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename  )
diff --git a/lib/tool_shed/galaxy_install/tools/tool_panel_manager.py b/lib/tool_shed/galaxy_install/tools/tool_panel_manager.py
new file mode 100644
index 0000000..89cad58
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/tools/tool_panel_manager.py
@@ -0,0 +1,453 @@
+import logging
+import threading
+
+from xml.etree import ElementTree as XmlET
+
+from tool_shed.util import basic_util
+from tool_shed.util import common_util
+from tool_shed.util import repository_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolPanelManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def add_to_shed_tool_config( self, shed_tool_conf_dict, elem_list ):
+        """
+        "A tool shed repository is being installed so change the shed_tool_conf file.  Parse the
+        config file to generate the entire list of config_elems instead of using the in-memory list
+        since it will be a subset of the entire list if one or more repositories have been deactivated.
+        """
+        if not elem_list:
+            # We may have an empty elem_list in case a data manager is being installed.
+            # In that case we don't want to wait for a toolbox reload that will never happen.
+            return
+        shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
+        tool_path = shed_tool_conf_dict[ 'tool_path' ]
+        config_elems = []
+        tree, error_message = xml_util.parse_xml( shed_tool_conf )
+        if tree:
+            root = tree.getroot()
+            for elem in root:
+                config_elems.append( elem )
+            # Add the new elements to the in-memory list of config_elems.
+            for elem_entry in elem_list:
+                config_elems.append( elem_entry )
+            # Persist the altered shed_tool_config file.
+            toolbox = self.app.toolbox
+            self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
+            self.app.wait_for_toolbox_reload(toolbox)
+
+    def add_to_tool_panel( self, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, owner,
+                           shed_tool_conf, tool_panel_dict, new_install=True, tool_panel_section_mapping={} ):
+        """A tool shed repository is being installed or updated so handle tool panel alterations accordingly."""
+        # We need to change the in-memory version and the file system version of the shed_tool_conf file.
+        shed_tool_conf_dict = self.get_shed_tool_conf_dict( shed_tool_conf )
+        tool_panel_dict = self.update_tool_panel_dict( tool_panel_dict, tool_panel_section_mapping, repository_tools_tups )
+        # Generate the list of ElementTree Element objects for each section or tool.
+        elem_list = self.generate_tool_panel_elem_list( repository_name,
+                                                        repository_clone_url,
+                                                        changeset_revision,
+                                                        tool_panel_dict,
+                                                        repository_tools_tups,
+                                                        owner=owner )
+        if new_install:
+            tool_path = shed_tool_conf_dict['tool_path']
+            # Add the new elements to the shed_tool_conf file on disk.
+            config_elems = shed_tool_conf_dict['config_elems']
+            for config_elem in elem_list:
+                # Add the new elements to the in-memory list of config_elems.
+                config_elems.append(config_elem)
+                # Load the tools into the in-memory tool panel.
+                self.app.toolbox.load_item(
+                    config_elem,
+                    tool_path=tool_path,
+                    load_panel_dict=True,
+                    guid=config_elem.get('guid'),
+                )
+            # Replace the old list of in-memory config_elems with the new list for this shed_tool_conf_dict.
+            shed_tool_conf_dict['config_elems'] = config_elems
+            self.app.toolbox.update_shed_config(shed_tool_conf_dict )
+            self.add_to_shed_tool_config( shed_tool_conf_dict, elem_list )
+
+    def config_elems_to_xml_file( self, config_elems, config_filename, tool_path ):
+        """
+        Persist the current in-memory list of config_elems to a file named by the
+        value of config_filename.
+        """
+        lock = threading.Lock()
+        lock.acquire( True )
+        try:
+            fh = open( config_filename, 'wb' )
+            fh.write( '<?xml version="1.0"?>\n<toolbox tool_path="%s">\n' % str( tool_path ) )
+            for elem in config_elems:
+                fh.write( xml_util.xml_to_string( elem, use_indent=True ) )
+            fh.write( '</toolbox>\n' )
+            fh.close()
+        except Exception as e:
+            log.exception( "Exception in ToolPanelManager.config_elems_to_xml_file: %s" % str( e ) )
+        finally:
+            lock.release()
+
+    def generate_tool_elem( self, tool_shed, repository_name, changeset_revision, owner, tool_file_path,
+                            tool, tool_section ):
+        """Create and return an ElementTree tool Element."""
+        if tool_section is not None:
+            tool_elem = XmlET.SubElement( tool_section, 'tool' )
+        else:
+            tool_elem = XmlET.Element( 'tool' )
+        tool_elem.attrib[ 'file' ] = tool_file_path
+        if not tool.guid:
+            raise ValueError("tool has no guid")
+        tool_elem.attrib[ 'guid' ] = tool.guid
+        tool_shed_elem = XmlET.SubElement( tool_elem, 'tool_shed' )
+        tool_shed_elem.text = tool_shed
+        repository_name_elem = XmlET.SubElement( tool_elem, 'repository_name' )
+        repository_name_elem.text = repository_name
+        repository_owner_elem = XmlET.SubElement( tool_elem, 'repository_owner' )
+        repository_owner_elem.text = owner
+        changeset_revision_elem = XmlET.SubElement( tool_elem, 'installed_changeset_revision' )
+        changeset_revision_elem.text = changeset_revision
+        id_elem = XmlET.SubElement( tool_elem, 'id' )
+        id_elem.text = tool.id
+        version_elem = XmlET.SubElement( tool_elem, 'version' )
+        version_elem.text = tool.version
+        return tool_elem
+
+    def generate_tool_panel_dict_for_new_install( self, tool_dicts, tool_section=None ):
+        """
+        When installing a repository that contains tools, all tools must
+        currently be defined within the same tool section in the tool panel or
+        outside of any sections.
+        """
+        tool_panel_dict = {}
+        if tool_section:
+            section_id = tool_section.id
+            section_name = tool_section.name
+            section_version = tool_section.version or ''
+        else:
+            section_id = ''
+            section_name = ''
+            section_version = ''
+        for tool_dict in tool_dicts:
+            if tool_dict.get( 'add_to_tool_panel', True ):
+                guid = tool_dict[ 'guid' ]
+                tool_config = tool_dict[ 'tool_config' ]
+                tool_section_dict = dict( tool_config=tool_config, id=section_id, name=section_name, version=section_version )
+                if guid in tool_panel_dict:
+                    tool_panel_dict[ guid ].append( tool_section_dict )
+                else:
+                    tool_panel_dict[ guid ] = [ tool_section_dict ]
+        return tool_panel_dict
+
+    def generate_tool_panel_dict_for_tool_config( self, guid, tool_config, tool_sections=None ):
+        """
+        Create a dictionary of the following type for a single tool config file name.
+        The intent is to call this method for every tool config in a repository and
+        append each of these as entries to a tool panel dictionary for the repository.
+        This enables each tool to be loaded into a different section in the tool panel.
+        {<Tool guid> :
+           [{ tool_config : <tool_config_file>,
+              id: <ToolSection id>,
+              version : <ToolSection version>,
+              name : <TooSection name>}]}
+        """
+        tool_panel_dict = {}
+        file_name = basic_util.strip_path( tool_config )
+        tool_section_dicts = self. generate_tool_section_dicts( tool_config=file_name,
+                                                                tool_sections=tool_sections )
+        tool_panel_dict[ guid ] = tool_section_dicts
+        return tool_panel_dict
+
+    def generate_tool_panel_dict_from_shed_tool_conf_entries( self, repository ):
+        """
+        Keep track of the section in the tool panel in which this repository's
+        tools will be contained by parsing the shed_tool_conf in which the
+        repository's tools are defined and storing the tool panel definition
+        of each tool in the repository. This method is called only when the
+        repository is being deactivated or un-installed and allows for
+        activation or re-installation using the original layout.
+        """
+        tool_panel_dict = {}
+        shed_tool_conf, tool_path, relative_install_dir = \
+            suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
+        metadata = repository.metadata
+        # Create a dictionary of tool guid and tool config file name for each tool in the repository.
+        guids_and_configs = {}
+        if 'tools' in metadata:
+            for tool_dict in metadata[ 'tools' ]:
+                guid = tool_dict[ 'guid' ]
+                tool_config = tool_dict[ 'tool_config' ]
+                file_name = basic_util.strip_path( tool_config )
+                guids_and_configs[ guid ] = file_name
+        # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
+        tree, error_message = xml_util.parse_xml( shed_tool_conf )
+        if tree is None:
+            return tool_panel_dict
+        root = tree.getroot()
+        for elem in root:
+            if elem.tag == 'tool':
+                guid = elem.get( 'guid' )
+                if guid in guids_and_configs:
+                    # The tool is displayed in the tool panel outside of any tool sections.
+                    tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
+                    if guid in tool_panel_dict:
+                        tool_panel_dict[ guid ].append( tool_section_dict )
+                    else:
+                        tool_panel_dict[ guid ] = [ tool_section_dict ]
+            elif elem.tag == 'section':
+                section_id = elem.get( 'id' ) or ''
+                section_name = elem.get( 'name' ) or ''
+                section_version = elem.get( 'version' ) or ''
+                for section_elem in elem:
+                    if section_elem.tag == 'tool':
+                        guid = section_elem.get( 'guid' )
+                        if guid in guids_and_configs:
+                            # The tool is displayed in the tool panel inside the current tool section.
+                            tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
+                                                      id=section_id,
+                                                      name=section_name,
+                                                      version=section_version )
+                            if guid in tool_panel_dict:
+                                tool_panel_dict[ guid ].append( tool_section_dict )
+                            else:
+                                tool_panel_dict[ guid ] = [ tool_section_dict ]
+        return tool_panel_dict
+
+    def generate_tool_panel_elem_list( self, repository_name, repository_clone_url, changeset_revision,
+                                       tool_panel_dict, repository_tools_tups, owner='' ):
+        """Generate a list of ElementTree Element objects for each section or tool."""
+        elem_list = []
+        tool_elem = None
+        cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
+        if not owner:
+            owner = repository_util.get_repository_owner( cleaned_repository_clone_url )
+        tool_shed = cleaned_repository_clone_url.split( '/repos/' )[ 0 ].rstrip( '/' )
+        for guid, tool_section_dicts in tool_panel_dict.items():
+            for tool_section_dict in tool_section_dicts:
+                tool_section = None
+                inside_section = False
+                section_in_elem_list = False
+                if tool_section_dict[ 'id' ]:
+                    inside_section = True
+                    # Create a new section element only if we haven't already created it.
+                    for index, elem in enumerate( elem_list ):
+                        if elem.tag == 'section':
+                            section_id = elem.get( 'id', None )
+                            if section_id == tool_section_dict[ 'id' ]:
+                                section_in_elem_list = True
+                                tool_section = elem
+                                break
+                    if tool_section is None:
+                        tool_section = self.generate_tool_section_element_from_dict( tool_section_dict )
+                # Find the tuple containing the current guid from the list of repository_tools_tups.
+                for repository_tool_tup in repository_tools_tups:
+                    tool_file_path, tup_guid, tool = repository_tool_tup
+                    if tup_guid == guid:
+                        break
+                tool_elem = self.generate_tool_elem( tool_shed,
+                                                     repository_name,
+                                                     changeset_revision,
+                                                     owner,
+                                                     tool_file_path,
+                                                     tool,
+                                                     tool_section if inside_section else None )
+                if inside_section:
+                    if section_in_elem_list:
+                        elem_list[ index ] = tool_section
+                    else:
+                        elem_list.append( tool_section )
+                else:
+                    elem_list.append( tool_elem )
+        return elem_list
+
+    def generate_tool_section_dicts( self, tool_config=None, tool_sections=None ):
+        tool_section_dicts = []
+        if tool_config is None:
+            tool_config = ''
+        if tool_sections:
+            for tool_section in tool_sections:
+                # The value of tool_section will be None if the tool is displayed outside
+                # of any sections in the tool panel.
+                if tool_section:
+                    section_id = tool_section.id or ''
+                    section_version = tool_section.version or ''
+                    section_name = tool_section.name or ''
+                else:
+                    section_id = ''
+                    section_version = ''
+                    section_name = ''
+                tool_section_dicts.append( dict( tool_config=tool_config,
+                                                 id=section_id,
+                                                 version=section_version,
+                                                 name=section_name ) )
+        else:
+            tool_section_dicts.append( dict( tool_config=tool_config, id='', version='', name='' ) )
+        return tool_section_dicts
+
+    def generate_tool_section_element_from_dict( self, tool_section_dict ):
+        # The value of tool_section_dict looks like the following.
+        # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+        if tool_section_dict[ 'id' ]:
+            # Create a new tool section.
+            tool_section = XmlET.Element( 'section' )
+            tool_section.attrib[ 'id' ] = tool_section_dict[ 'id' ]
+            tool_section.attrib[ 'name' ] = tool_section_dict[ 'name' ]
+            tool_section.attrib[ 'version' ] = tool_section_dict[ 'version' ]
+        else:
+            tool_section = None
+        return tool_section
+
+    def get_or_create_tool_section( self, toolbox, tool_panel_section_id, new_tool_panel_section_label=None ):
+        return toolbox.get_section( section_id=tool_panel_section_id, new_label=new_tool_panel_section_label, create_if_needed=True )
+
+    def get_shed_tool_conf_dict( self, shed_tool_conf ):
+        """
+        Return the in-memory version of the shed_tool_conf file, which is stored in
+        the config_elems entry in the shed_tool_conf_dict associated with the file.
+        """
+        for shed_tool_conf_dict in self.app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+            if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
+                return shed_tool_conf_dict
+            else:
+                file_name = basic_util.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
+                if shed_tool_conf == file_name:
+                    return shed_tool_conf_dict
+
+    def handle_tool_panel_section( self, toolbox, tool_panel_section_id=None, new_tool_panel_section_label=None ):
+        """Return a ToolSection object retrieved from the current in-memory tool_panel."""
+        # If tool_panel_section_id is received, the section exists in the tool panel.  In this
+        # case, the value of the received tool_panel_section_id must be the id retrieved from a
+        # tool panel config (e.g., tool_conf.xml, which may have getext).  If new_tool_panel_section_label
+        # is received, a new section will be added to the tool panel.
+        if new_tool_panel_section_label:
+            section_id = str( new_tool_panel_section_label.lower().replace( ' ', '_' ) )
+            tool_panel_section_key, tool_section = \
+                self.get_or_create_tool_section( toolbox,
+                                                 tool_panel_section_id=section_id,
+                                                 new_tool_panel_section_label=new_tool_panel_section_label )
+        elif tool_panel_section_id:
+            tool_panel_section_key, tool_section = toolbox.get_section( tool_panel_section_id)
+        else:
+            return None, None
+        return tool_panel_section_key, tool_section
+
+    def handle_tool_panel_selection( self, toolbox, metadata, no_changes_checked, tool_panel_section_id,
+                                     new_tool_panel_section_label ):
+        """
+        Handle the selected tool panel location for loading tools included in
+        tool shed repositories when installing or reinstalling them.
+        """
+        # Get the location in the tool panel in which each tool was originally loaded.
+        tool_section = None
+        tool_panel_section_key = None
+        if 'tools' in metadata:
+            # This forces everything to be loaded into the same section (or no section)
+            # in the tool panel.
+            if no_changes_checked:
+                # Make sure the no_changes check box overrides the new_tool_panel_section_label
+                # if the user checked the check box and entered something into the field.
+                new_tool_panel_section_label = None
+                if 'tool_panel_section' in metadata:
+                    tool_panel_dict = metadata[ 'tool_panel_section' ]
+                    if not tool_panel_dict:
+                        tool_panel_dict = self.generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+                else:
+                    tool_panel_dict = self.generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+                if tool_panel_dict:
+                    # The tool_panel_dict is empty when tools exist but are not installed into a tool panel section.
+                    tool_section_dicts = tool_panel_dict[ next(iter(tool_panel_dict)) ]
+                    tool_section_dict = tool_section_dicts[ 0 ]
+                    original_section_id = tool_section_dict[ 'id' ]
+                    if original_section_id:
+                        tool_panel_section_key, tool_section = \
+                            self.get_or_create_tool_section( toolbox,
+                                                             tool_panel_section_id=original_section_id,
+                                                             new_tool_panel_section_label=new_tool_panel_section_label )
+            else:
+                # The user elected to change the tool panel section to contain the tools.
+                tool_panel_section_key, tool_section = \
+                    self.handle_tool_panel_section( toolbox,
+                                                    tool_panel_section_id=tool_panel_section_id,
+                                                    new_tool_panel_section_label=new_tool_panel_section_label )
+        return tool_section, tool_panel_section_key
+
+    def remove_from_shed_tool_config( self, shed_tool_conf_dict, guids_to_remove ):
+        """
+        A tool shed repository is being uninstalled so change the
+        shed_tool_conf file. Parse the config file to generate the entire list
+        of config_elems instead of using the in-memory list since it will be a
+        subset of the entire list if one or more repositories have been
+        deactivated.
+        """
+        shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
+        tool_path = shed_tool_conf_dict[ 'tool_path' ]
+        config_elems = []
+        tree, error_message = xml_util.parse_xml( shed_tool_conf )
+        if tree:
+            root = tree.getroot()
+            for elem in root:
+                config_elems.append( elem )
+            config_elems_to_remove = []
+            for config_elem in config_elems:
+                if config_elem.tag == 'section':
+                    tool_elems_to_remove = []
+                    for tool_elem in config_elem:
+                        if tool_elem.get( 'guid' ) in guids_to_remove:
+                            tool_elems_to_remove.append( tool_elem )
+                    for tool_elem in tool_elems_to_remove:
+                        # Remove all of the appropriate tool sub-elements from the section element.
+                        config_elem.remove( tool_elem )
+                    if len( config_elem ) < 1:
+                        # Keep a list of all empty section elements so they can be removed.
+                        config_elems_to_remove.append( config_elem )
+                elif config_elem.tag == 'tool':
+                    if config_elem.get( 'guid' ) in guids_to_remove:
+                        config_elems_to_remove.append( config_elem )
+            for config_elem in config_elems_to_remove:
+                config_elems.remove( config_elem )
+            # Persist the altered in-memory version of the tool config.
+            self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
+
+    def remove_repository_contents( self, repository, shed_tool_conf, uninstall ):
+        """
+        A tool shed repository is being deactivated or uninstalled, so handle
+        tool panel alterations accordingly.
+        """
+        # Determine where the tools are currently defined in the tool panel and store this
+        # information so the tools can be displayed in the same way when the repository is
+        # activated or reinstalled.
+        tool_panel_dict = self.generate_tool_panel_dict_from_shed_tool_conf_entries( repository )
+        repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
+        self.app.install_model.context.add( repository )
+        self.app.install_model.context.flush()
+        # Create a list of guids for all tools that will be removed from the in-memory tool panel
+        # and config file on disk.
+        guids_to_remove = list(tool_panel_dict.keys())
+        self.remove_guids( guids_to_remove, shed_tool_conf, uninstall )
+
+    def remove_guids( self, guids_to_remove, shed_tool_conf, uninstall ):
+        toolbox = self.app.toolbox
+        # Remove the tools from the toolbox's tools_by_id dictionary.
+        for guid_to_remove in guids_to_remove:
+            # remove_from_tool_panel to false, will handling that logic below.
+            toolbox.remove_tool_by_id( guid_to_remove, remove_from_panel=False )
+        shed_tool_conf_dict = self.get_shed_tool_conf_dict( shed_tool_conf )
+        # Always remove from the shed_tool_conf file on disk. Used to test for uninstall, not sure there is a legitimate use for this?!
+        self.remove_from_shed_tool_config( shed_tool_conf_dict, guids_to_remove )
+
+    def update_tool_panel_dict( self, tool_panel_dict, tool_panel_section_mapping, repository_tools_tups ):
+        for tool_guid in tool_panel_dict:
+            if tool_guid not in tool_panel_section_mapping:
+                continue
+            for idx, tool in enumerate( tool_panel_dict[ tool_guid ] ):
+                section_name = tool_panel_section_mapping[ tool_guid ][ 'tool_panel_section' ]
+                section_id = str( tool_panel_section_mapping[ tool_guid ][ 'tool_panel_section' ].lower().replace( ' ', '_' ) )
+                tool_panel_dict[ tool_guid ][ idx ][ 'name' ] = section_name
+                tool_panel_dict[ tool_guid ][ idx ][ 'id' ] = section_id
+        return tool_panel_dict
diff --git a/lib/tool_shed/galaxy_install/update_repository_manager.py b/lib/tool_shed/galaxy_install/update_repository_manager.py
new file mode 100644
index 0000000..3846f30
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/update_repository_manager.py
@@ -0,0 +1,148 @@
+"""
+Determine if installed tool shed repositories have updates available in their respective tool sheds.
+"""
+import logging
+import threading
+
+from sqlalchemy import false
+
+import tool_shed.util.shed_util_common as suc
+from galaxy import util
+from galaxy.util.postfork import register_postfork_function
+from tool_shed.util import common_util
+from tool_shed.util import encoding_util
+from tool_shed.util import repository_util
+
+log = logging.getLogger( __name__ )
+
+
+class UpdateRepositoryManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.context = self.app.install_model.context
+        # Ideally only one Galaxy server process should be able to check for repository updates.
+        if self.app.config.enable_tool_shed_check:
+            self.running = True
+            self.sleeper = Sleeper()
+            self.restarter = threading.Thread( target=self.__restarter )
+            self.restarter.daemon = True
+            register_postfork_function(self.restarter.start)
+            self.seconds_to_sleep = int( app.config.hours_between_check * 3600 )
+
+    def get_update_to_changeset_revision_and_ctx_rev( self, repository ):
+        """Return the changeset revision hash to which the repository can be updated."""
+        changeset_revision_dict = {}
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) )
+        params = dict( name=str( repository.name ),
+                       owner=str( repository.owner ),
+                       changeset_revision=str( repository.installed_changeset_revision ) )
+        pathspec = [ 'repository', 'get_changeset_revision_and_ctx_rev' ]
+        try:
+            encoded_update_dict = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+            if encoded_update_dict:
+                update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
+                includes_data_managers = update_dict.get( 'includes_data_managers', False )
+                includes_datatypes = update_dict.get( 'includes_datatypes', False )
+                includes_tools = update_dict.get( 'includes_tools', False )
+                includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+                includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
+                includes_workflows = update_dict.get( 'includes_workflows', False )
+                has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
+                has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
+                changeset_revision = update_dict.get( 'changeset_revision', None )
+                ctx_rev = update_dict.get( 'ctx_rev', None )
+            changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
+            changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes
+            changeset_revision_dict[ 'includes_tools' ] = includes_tools
+            changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
+            changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
+            changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
+            changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
+            changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
+            changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
+            changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
+        except Exception as e:
+            log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
+            changeset_revision_dict[ 'includes_data_managers' ] = False
+            changeset_revision_dict[ 'includes_datatypes' ] = False
+            changeset_revision_dict[ 'includes_tools' ] = False
+            changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False
+            changeset_revision_dict[ 'includes_tool_dependencies' ] = False
+            changeset_revision_dict[ 'includes_workflows' ] = False
+            changeset_revision_dict[ 'has_repository_dependencies' ] = False
+            changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False
+            changeset_revision_dict[ 'changeset_revision' ] = None
+            changeset_revision_dict[ 'ctx_rev' ] = None
+        return changeset_revision_dict
+
+    def __restarter( self ):
+        log.info( 'Update repository manager restarter starting up...' )
+        while self.running:
+            # Make a call to the Tool Shed for each installed repository to get the latest
+            # status information in the Tool Shed for the repository.  This information includes
+            # items like newer installable repository revisions, current revision updates, whether
+            # the repository revision is the latest installable revision, and whether the repository
+            # has been deprecated in the Tool Shed.
+            for repository in self.context.query( self.app.install_model.ToolShedRepository ) \
+                                          .filter( self.app.install_model.ToolShedRepository.table.c.deleted == false() ):
+                tool_shed_status_dict = repository_util.get_tool_shed_status_for_installed_repository( self.app, repository )
+                if tool_shed_status_dict:
+                    if tool_shed_status_dict != repository.tool_shed_status:
+                        repository.tool_shed_status = tool_shed_status_dict
+                        self.context.flush()
+                else:
+                    # The received tool_shed_status_dict is an empty dictionary, so coerce to None.
+                    tool_shed_status_dict = None
+                    if tool_shed_status_dict != repository.tool_shed_status:
+                        repository.tool_shed_status = tool_shed_status_dict
+                        self.context.flush()
+            self.sleeper.sleep( self.seconds_to_sleep )
+        log.info( 'Update repository manager restarter shutting down...' )
+
+    def shutdown( self ):
+        if self.app.config.enable_tool_shed_check:
+            self.running = False
+            self.sleeper.wake()
+
+    def update_repository_record( self, repository, updated_metadata_dict, updated_changeset_revision, updated_ctx_rev ):
+        """
+        Update a tool_shed_repository database record with new information retrieved from the
+        Tool Shed.  This happens when updating an installed repository to a new changeset revision.
+        """
+        repository.metadata = updated_metadata_dict
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed )
+        suc.clean_dependency_relationships(self.app, updated_metadata_dict, repository, tool_shed_url)
+        # Update the repository.changeset_revision column in the database.
+        repository.changeset_revision = updated_changeset_revision
+        repository.ctx_rev = updated_ctx_rev
+        # Update the repository.tool_shed_status column in the database.
+        tool_shed_status_dict = repository_util.get_tool_shed_status_for_installed_repository( self.app, repository )
+        if tool_shed_status_dict:
+            repository.tool_shed_status = tool_shed_status_dict
+        else:
+            repository.tool_shed_status = None
+        self.app.install_model.context.add( repository )
+        self.app.install_model.context.flush()
+        self.app.install_model.context.refresh( repository )
+        return repository
+
+
+class Sleeper( object ):
+    """
+    Provides a 'sleep' method that sleeps for a number of seconds *unless* the notify method
+    is called (from a different thread).
+    """
+
+    def __init__( self ):
+        self.condition = threading.Condition()
+
+    def sleep( self, seconds ):
+        self.condition.acquire()
+        self.condition.wait( seconds )
+        self.condition.release()
+
+    def wake( self ):
+        self.condition.acquire()
+        self.condition.notify()
+        self.condition.release()
diff --git a/lib/tool_shed/galaxy_install/utility_containers/__init__.py b/lib/tool_shed/galaxy_install/utility_containers/__init__.py
new file mode 100644
index 0000000..302679d
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/utility_containers/__init__.py
@@ -0,0 +1,144 @@
+import logging
+import threading
+
+from tool_shed.utility_containers import utility_container_manager
+
+log = logging.getLogger( __name__ )
+
+
+class GalaxyUtilityContainerManager( utility_container_manager.UtilityContainerManager ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def build_repository_containers( self, repository, datatypes, invalid_tools, missing_repository_dependencies,
+                                     missing_tool_dependencies, readme_files_dict, repository_dependencies,
+                                     tool_dependencies, valid_tools, workflows, valid_data_managers,
+                                     invalid_data_managers, data_managers_errors, new_install=False,
+                                     reinstalling=False ):
+        """
+        Return a dictionary of containers for the received repository's dependencies and readme files for
+        display during installation to Galaxy.
+        """
+        containers_dict = dict( datatypes=None,
+                                invalid_tools=None,
+                                missing_tool_dependencies=None,
+                                readme_files=None,
+                                repository_dependencies=None,
+                                missing_repository_dependencies=None,
+                                tool_dependencies=None,
+                                valid_tools=None,
+                                workflows=None,
+                                valid_data_managers=None,
+                                invalid_data_managers=None )
+        # Some of the tool dependency folders will include links to display tool dependency information, and
+        # some of these links require the repository id.  However we need to be careful because sometimes the
+        # repository object is None.
+        if repository:
+            repository_id = repository.id
+            changeset_revision = repository.changeset_revision
+        else:
+            repository_id = None
+            changeset_revision = None
+        lock = threading.Lock()
+        lock.acquire( True )
+        try:
+            folder_id = 0
+            # Datatypes container.
+            if datatypes:
+                folder_id, datatypes_root_folder = self.build_datatypes_folder( folder_id, datatypes )
+                containers_dict[ 'datatypes' ] = datatypes_root_folder
+            # Invalid tools container.
+            if invalid_tools:
+                folder_id, invalid_tools_root_folder = \
+                    self.build_invalid_tools_folder( folder_id,
+                                                     invalid_tools,
+                                                     changeset_revision,
+                                                     repository=repository,
+                                                     label='Invalid tools' )
+                containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
+            # Readme files container.
+            if readme_files_dict:
+                folder_id, readme_files_root_folder = self.build_readme_files_folder( folder_id, readme_files_dict )
+                containers_dict[ 'readme_files' ] = readme_files_root_folder
+            # Installed repository dependencies container.
+            if repository_dependencies:
+                if new_install:
+                    label = 'Repository dependencies'
+                else:
+                    label = 'Installed repository dependencies'
+                folder_id, repository_dependencies_root_folder = \
+                    self.build_repository_dependencies_folder( folder_id=folder_id,
+                                                               repository_dependencies=repository_dependencies,
+                                                               label=label,
+                                                               installed=True )
+                containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+            # Missing repository dependencies container.
+            if missing_repository_dependencies:
+                folder_id, missing_repository_dependencies_root_folder = \
+                    self.build_repository_dependencies_folder( folder_id=folder_id,
+                                                               repository_dependencies=missing_repository_dependencies,
+                                                               label='Missing repository dependencies',
+                                                               installed=False )
+                containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder
+            # Installed tool dependencies container.
+            if tool_dependencies:
+                if new_install:
+                    label = 'Tool dependencies'
+                else:
+                    label = 'Installed tool dependencies'
+                # We only want to display the Status column if the tool_dependency is missing.
+                folder_id, tool_dependencies_root_folder = \
+                    self.build_tool_dependencies_folder( folder_id,
+                                                         tool_dependencies,
+                                                         label=label,
+                                                         missing=False,
+                                                         new_install=new_install,
+                                                         reinstalling=reinstalling )
+                containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+            # Missing tool dependencies container.
+            if missing_tool_dependencies:
+                # We only want to display the Status column if the tool_dependency is missing.
+                folder_id, missing_tool_dependencies_root_folder = \
+                    self.build_tool_dependencies_folder( folder_id,
+                                                         missing_tool_dependencies,
+                                                         label='Missing tool dependencies',
+                                                         missing=True,
+                                                         new_install=new_install,
+                                                         reinstalling=reinstalling )
+                containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
+            # Valid tools container.
+            if valid_tools:
+                folder_id, valid_tools_root_folder = self.build_tools_folder( folder_id,
+                                                                              valid_tools,
+                                                                              repository,
+                                                                              changeset_revision,
+                                                                              label='Valid tools' )
+                containers_dict[ 'valid_tools' ] = valid_tools_root_folder
+            # Workflows container.
+            if workflows:
+                folder_id, workflows_root_folder = \
+                    self.build_workflows_folder( folder_id=folder_id,
+                                                 workflows=workflows,
+                                                 repository_metadata_id=None,
+                                                 repository_id=repository_id,
+                                                 label='Workflows' )
+                containers_dict[ 'workflows' ] = workflows_root_folder
+            if valid_data_managers:
+                folder_id, valid_data_managers_root_folder = \
+                    self.build_data_managers_folder( folder_id=folder_id,
+                                                     data_managers=valid_data_managers,
+                                                     label='Valid Data Managers' )
+                containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder
+            if invalid_data_managers or data_managers_errors:
+                folder_id, invalid_data_managers_root_folder = \
+                    self.build_invalid_data_managers_folder( folder_id=folder_id,
+                                                             data_managers=invalid_data_managers,
+                                                             error_messages=data_managers_errors,
+                                                             label='Invalid Data Managers' )
+                containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder
+        except Exception as e:
+            log.debug( "Exception in build_repository_containers: %s" % str( e ) )
+        finally:
+            lock.release()
+        return containers_dict
diff --git a/lib/tool_shed/grids/__init__.py b/lib/tool_shed/grids/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/grids/admin_grids.py b/lib/tool_shed/grids/admin_grids.py
new file mode 100644
index 0000000..da1c9f0
--- /dev/null
+++ b/lib/tool_shed/grids/admin_grids.py
@@ -0,0 +1,512 @@
+import logging
+
+from markupsafe import escape
+from sqlalchemy import and_
+
+from galaxy.web.framework.helpers import grids, time_ago
+from galaxy.webapps.tool_shed import model
+from tool_shed.grids.repository_grids import CategoryGrid, RepositoryGrid
+from tool_shed.util import hg_util
+
+log = logging.getLogger( __name__ )
+
+
+class UserGrid( grids.Grid ):
+
+    class UserLoginColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, user ):
+            return escape( user.email )
+
+    class UserNameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, user ):
+            if user.username:
+                return escape( user.username )
+            return 'not set'
+
+    class GroupsColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, user ):
+            if user.groups:
+                return len( user.groups )
+            return 0
+
+    class RolesColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, user ):
+            if user.roles:
+                return len( user.roles )
+            return 0
+
+    class ExternalColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, user ):
+            if user.external:
+                return 'yes'
+            return 'no'
+
+    class LastLoginColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, user ):
+            if user.galaxy_sessions:
+                return self.format( user.galaxy_sessions[ 0 ].update_time )
+            return 'never'
+
+    class StatusColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, user ):
+            if user.purged:
+                return "purged"
+            elif user.deleted:
+                return "deleted"
+            return ""
+
+    class EmailColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            return query.filter( and_( model.Tool.table.c.user_id == model.User.table.c.id,
+                                       model.User.table.c.email == column_filter ) )
+
+    title = "Users"
+    model_class = model.User
+    template = '/admin/user/grid.mako'
+    default_sort_key = "email"
+    columns = [
+        UserLoginColumn( "Email",
+                         key="email",
+                         link=( lambda item: dict( operation="information", id=item.id ) ),
+                         attach_popup=True,
+                         filterable="advanced" ),
+        UserNameColumn( "User Name",
+                        key="username",
+                        attach_popup=False,
+                        filterable="advanced" ),
+        GroupsColumn( "Groups", attach_popup=False ),
+        RolesColumn( "Roles", attach_popup=False ),
+        ExternalColumn( "External", attach_popup=False ),
+        LastLoginColumn( "Last Login", format=time_ago ),
+        StatusColumn( "Status", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        EmailColumn( "Email",
+                     key="email",
+                     visible=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Create new user",
+                          dict( controller='admin', action='users', operation='create' ) )
+    ]
+    operations = [
+        grids.GridOperation( "Manage Roles and Groups",
+                             condition=( lambda item: not item.deleted ),
+                             allow_multiple=False,
+                             url_args=dict( action="manage_roles_and_groups_for_user" ) ),
+        grids.GridOperation( "Reset Password",
+                             condition=( lambda item: not item.deleted ),
+                             allow_multiple=True,
+                             allow_popup=False,
+                             url_args=dict( action="reset_user_password" ) )
+    ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True, purged=False ) ),
+        grids.GridColumnFilter( "Purged", args=dict( purged=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+
+    use_paging = False
+
+    def get_current_item( self, trans, **kwargs ):
+        return trans.user
+
+
+class RoleGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, role ):
+            return escape( str( role.name ) )
+
+    class DescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, role ):
+            if role.description:
+                return str( role.description )
+            return ''
+
+    class TypeColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, role ):
+            return str( role.type )
+
+    class StatusColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, role ):
+            if role.deleted:
+                return "deleted"
+            return ""
+
+    class GroupsColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, role ):
+            if role.groups:
+                return len( role.groups )
+            return 0
+
+    class RepositoriesColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, role ):
+            if role.repositories:
+                return len( role.repositories )
+            return 0
+
+    class UsersColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, role ):
+            if role.users:
+                return len( role.users )
+            return 0
+
+    title = "Roles"
+    model_class = model.Role
+    template = '/admin/dataset_security/role/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="Manage role associations", id=item.id ) ),
+                    attach_popup=True,
+                    filterable="advanced" ),
+        DescriptionColumn( "Description",
+                           key='description',
+                           attach_popup=False,
+                           filterable="advanced" ),
+        GroupsColumn( "Groups", attach_popup=False ),
+        RepositoriesColumn( "Repositories", attach_popup=False ),
+        UsersColumn( "Users", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Add new role",
+                          dict( controller='admin', action='roles', operation='create' ) )
+    ]
+    # Repository admin roles currently do not have any operations since they are managed automatically based
+    # on other events.  For example, if a repository is renamed, its associated admin role is automatically
+    # renamed accordingly and if a repository is deleted its associated admin role is automatically deleted.
+    operations = [ grids.GridOperation( "Rename",
+                                        condition=( lambda item: not item.deleted and not item.is_repository_admin_role ),
+                                        allow_multiple=False,
+                                        url_args=dict( action="rename_role" ) ),
+                   grids.GridOperation( "Delete",
+                                        condition=( lambda item: not item.deleted and not item.is_repository_admin_role ),
+                                        allow_multiple=True,
+                                        url_args=dict( action="mark_role_deleted" ) ),
+                   grids.GridOperation( "Undelete",
+                                        condition=( lambda item: item.deleted and not item.is_repository_admin_role ),
+                                        allow_multiple=True,
+                                        url_args=dict( action="undelete_role" ) ),
+                   grids.GridOperation( "Purge",
+                                        condition=( lambda item: item.deleted and not item.is_repository_admin_role ),
+                                        allow_multiple=True,
+                                        url_args=dict( action="purge_role" ) ) ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+
+    use_paging = False
+
+    def apply_query_filter( self, trans, query, **kwd ):
+        return query.filter( model.Role.type != model.Role.types.PRIVATE )
+
+
+class GroupGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, group ):
+            return str( group.name )
+
+    class StatusColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, group ):
+            if group.deleted:
+                return "deleted"
+            return ""
+
+    class RolesColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, group ):
+            if group.roles:
+                return len( group.roles )
+            return 0
+
+    class UsersColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, group ):
+            if group.members:
+                return len( group.members )
+            return 0
+
+    title = "Groups"
+    model_class = model.Group
+    template = '/admin/dataset_security/group/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="Manage users and roles", id=item.id ) ),
+                    attach_popup=True ),
+        UsersColumn( "Users", attach_popup=False ),
+        RolesColumn( "Roles", attach_popup=False ),
+        StatusColumn( "Status", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    global_actions = [
+        grids.GridAction( "Add new group",
+                          dict( controller='admin', action='groups', operation='create' ) )
+    ]
+    operations = [ grids.GridOperation( "Rename",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=False,
+                                        url_args=dict( action="rename_group" ) ),
+                   grids.GridOperation( "Delete",
+                                        condition=( lambda item: not item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( action="mark_group_deleted" ) ),
+                   grids.GridOperation( "Undelete",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( action="undelete_group" ) ),
+                   grids.GridOperation( "Purge",
+                                        condition=( lambda item: item.deleted ),
+                                        allow_multiple=True,
+                                        url_args=dict( action="purge_group" ) ) ]
+    standard_filters = [
+        grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+        grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+        grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+    ]
+
+    use_paging = False
+
+
+class ManageCategoryGrid( CategoryGrid ):
+    columns = [ col for col in CategoryGrid.columns ]
+    # Override the NameColumn to include an Edit link
+    columns[ 0 ] = CategoryGrid.NameColumn( "Name",
+                                            key="Category.name",
+                                            link=( lambda item: dict( operation="Edit", id=item.id ) ),
+                                            model_class=model.Category,
+                                            attach_popup=False )
+    global_actions = [
+        grids.GridAction( "Add new category",
+                          dict( controller='admin', action='manage_categories', operation='create' ) )
+    ]
+
+
+class AdminRepositoryGrid( RepositoryGrid ):
+
+    class DeletedColumn( grids.BooleanColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            if repository.deleted:
+                return 'yes'
+            return ''
+
+    columns = [ RepositoryGrid.NameColumn( "Name",
+                                           key="name",
+                                           link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                           attach_popup=True ),
+                RepositoryGrid.HeadsColumn( "Heads" ),
+                RepositoryGrid.UserColumn( "Owner",
+                                           model_class=model.User,
+                                           link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                                           attach_popup=False,
+                                           key="User.username" ),
+                RepositoryGrid.DeprecatedColumn( "Deprecated", key="deprecated", attach_popup=False ),
+                # Columns that are valid for filtering but are not visible.
+                DeletedColumn( "Deleted", key="deleted", attach_popup=False ) ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [ operation for operation in RepositoryGrid.operations ]
+    operations.append( grids.GridOperation( "Delete",
+                                            allow_multiple=False,
+                                            condition=( lambda item: not item.deleted ),
+                                            async_compatible=False ) )
+    operations.append( grids.GridOperation( "Undelete",
+                                            allow_multiple=False,
+                                            condition=( lambda item: item.deleted ),
+                                            async_compatible=False ) )
+    standard_filters = []
+    default_filter = {}
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .join( model.User.table )
+
+
+class RepositoryMetadataGrid( grids.Grid ):
+
+    class IdColumn( grids.IntegerColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            return repository_metadata.id
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            return escape( repository_metadata.repository.name )
+
+    class OwnerColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            return escape( repository_metadata.repository.user.username )
+
+    class RevisionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            repository = repository_metadata.repository
+            return hg_util.get_revision_label( trans.app,
+                                               repository,
+                                               repository_metadata.changeset_revision,
+                                               include_date=True,
+                                               include_hash=True )
+
+    class ToolsColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            tools_str = '0'
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'tools' in metadata:
+                        # We used to display the following, but grid was too cluttered.
+                        # for tool_metadata_dict in metadata[ 'tools' ]:
+                        #    tools_str += '%s <b>%s</b><br/>' % ( tool_metadata_dict[ 'id' ], tool_metadata_dict[ 'version' ] )
+                        return '%d' % len( metadata[ 'tools' ] )
+            return tools_str
+
+    class DatatypesColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            datatypes_str = '0'
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'datatypes' in metadata:
+                        # We used to display the following, but grid was too cluttered.
+                        # for datatype_metadata_dict in metadata[ 'datatypes' ]:
+                        #    datatypes_str += '%s<br/>' % datatype_metadata_dict[ 'extension' ]
+                        return '%d' % len( metadata[ 'datatypes' ] )
+            return datatypes_str
+
+    class WorkflowsColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            workflows_str = '0'
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'workflows' in metadata:
+                        # We used to display the following, but grid was too cluttered.
+                        # workflows_str += '<b>Workflows:</b><br/>'
+                        # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+                        # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+                        # workflow_tups = metadata[ 'workflows' ]
+                        # workflow_metadata_dicts = [ workflow_tup[1] for workflow_tup in workflow_tups ]
+                        # for workflow_metadata_dict in workflow_metadata_dicts:
+                        #    workflows_str += '%s<br/>' % workflow_metadata_dict[ 'name' ]
+                        return '%d' % len( metadata[ 'workflows' ] )
+            return workflows_str
+
+    class DeletedColumn( grids.BooleanColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.repository.deleted:
+                return 'yes'
+            return ''
+
+    class DeprecatedColumn( grids.BooleanColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.repository.deprecated:
+                return 'yes'
+            return ''
+
+    class MaliciousColumn( grids.BooleanColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.malicious:
+                return 'yes'
+            return ''
+
+    # Grid definition
+    title = "Repository Metadata"
+    model_class = model.RepositoryMetadata
+    template = '/webapps/tool_shed/repository/grid.mako'
+    default_sort_key = "name"
+    use_hide_message = False
+    columns = [
+        IdColumn( "Id",
+                  visible=False,
+                  attach_popup=False ),
+        NameColumn( "Name",
+                    key="name",
+                    model_class=model.Repository,
+                    link=( lambda item: dict( operation="view_or_manage_repository_revision", id=item.id ) ),
+                    attach_popup=True ),
+        OwnerColumn( "Owner", attach_popup=False ),
+        RevisionColumn( "Revision", attach_popup=False ),
+        ToolsColumn( "Tools", attach_popup=False ),
+        DatatypesColumn( "Datatypes", attach_popup=False ),
+        WorkflowsColumn( "Workflows", attach_popup=False ),
+        DeletedColumn( "Deleted", attach_popup=False ),
+        DeprecatedColumn( "Deprecated", attach_popup=False ),
+        MaliciousColumn( "Malicious", attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [ grids.GridOperation( "Delete",
+                                        allow_multiple=False,
+                                        allow_popup=True,
+                                        async_compatible=False,
+                                        confirm="Repository metadata records cannot be recovered after they are deleted. Click OK to delete the selected items." ) ]
+    standard_filters = []
+    default_filter = {}
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .join( model.Repository.table )
diff --git a/lib/tool_shed/grids/repository_grid_filter_manager.py b/lib/tool_shed/grids/repository_grid_filter_manager.py
new file mode 100644
index 0000000..510e843
--- /dev/null
+++ b/lib/tool_shed/grids/repository_grid_filter_manager.py
@@ -0,0 +1,49 @@
+import logging
+
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoryGridFilterManager( object ):
+    """Provides filtered views of the many Tool SHed repository grids."""
+
+    filters = Bunch( CERTIFIED_LEVEL_ONE='certified_level_one',
+                     CERTIFIED_LEVEL_TWO='certified_level_two',
+                     CERTIFIED_LEVEL_ONE_SUITES='certified_level_one_suites',
+                     CERTIFIED_LEVEL_TWO_SUITES='certified_level_two_suites',
+                     SUITES='suites' )
+
+    def get_grid_title( self, trans, trailing_string='', default='' ):
+        filter = self.get_filter( trans )
+        if filter == self.filters.CERTIFIED_LEVEL_ONE:
+            return "Certified 1 Repositories %s" % trailing_string
+        if filter == self.filters.CERTIFIED_LEVEL_TWO:
+            return "Certified 2 Repositories %s" % trailing_string
+        if filter == self.filters.CERTIFIED_LEVEL_ONE_SUITES:
+            return "Certified 1 Repository Suites %s" % trailing_string
+        if filter == self.filters.CERTIFIED_LEVEL_TWO_SUITES:
+            return "Certified 2 Repository Suites %s" % trailing_string
+        if filter == self.filters.SUITES:
+            return "Repository Suites %s" % trailing_string
+        return "%s %s" % ( default, trailing_string )
+
+    def get_filter( self, trans ):
+        filter = trans.get_cookie( name='toolshedrepogridfilter' )
+        return filter or None
+
+    def is_valid_filter( self, filter ):
+        if filter is None:
+            return True
+        for valid_key, valid_filter in self.filters.items():
+            if filter == valid_filter:
+                return True
+        return False
+
+    def set_filter( self, trans, **kwd ):
+        # Set a session cookie value with the selected filter.
+        filter = kwd.get( 'filter', None )
+        if filter is not None and self.is_valid_filter( filter ):
+            trans.set_cookie( value=filter, name='toolshedrepogridfilter' )
+        # if the filter is not valid, expire the cookie.
+        trans.set_cookie( value=filter, name='toolshedrepogridfilter', age=-1 )
diff --git a/lib/tool_shed/grids/repository_grids.py b/lib/tool_shed/grids/repository_grids.py
new file mode 100644
index 0000000..f74994d
--- /dev/null
+++ b/lib/tool_shed/grids/repository_grids.py
@@ -0,0 +1,1432 @@
+import json
+import logging
+
+from markupsafe import escape as escape_html
+from sqlalchemy import and_, false, or_, true
+
+import tool_shed.grids.util as grids_util
+import tool_shed.repository_types.util as rt_util
+import tool_shed.util.shed_util_common as suc
+from galaxy.web.framework.helpers import grids
+from galaxy.webapps.tool_shed import model
+from tool_shed.util import hg_util, metadata_util, repository_util
+
+log = logging.getLogger( __name__ )
+
+
+class CategoryGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, category ):
+            return category.name
+
+    class DescriptionColumn( grids.TextColumn ):
+        def get_value( self, trans, grid, category ):
+            return category.description
+
+    class RepositoriesColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, category ):
+            category_name = str( category.name )
+            filter = trans.app.repository_grid_filter_manager.get_filter( trans )
+            if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+                return trans.app.repository_registry.certified_level_one_viewable_repositories_and_suites_by_category.get( category_name, 0 )
+            elif filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+                return trans.app.repository_registry.certified_level_one_viewable_suites_by_category.get( category_name, 0 )
+            elif filter == trans.app.repository_grid_filter_manager.filters.SUITES:
+                return trans.app.repository_registry.viewable_suites_by_category.get( category_name, 0 )
+            else:
+                # The value filter is None.
+                return trans.app.repository_registry.viewable_repositories_and_suites_by_category.get( category_name, 0 )
+
+    title = "Categories"
+    model_class = model.Category
+    template = '/webapps/tool_shed/category/grid.mako'
+    default_sort_key = "name"
+    columns = [
+        NameColumn( "Name",
+                    key="Category.name",
+                    link=( lambda item: dict( operation="repositories_by_category", id=item.id ) ),
+                    attach_popup=False ),
+        DescriptionColumn( "Description",
+                           key="Category.description",
+                           attach_popup=False ),
+        RepositoriesColumn( "Repositories",
+                            model_class=model.Repository,
+                            attach_popup=False )
+    ]
+    # Override these
+    default_filter = {}
+    global_actions = []
+    operations = []
+    standard_filters = []
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+
+class RepositoryGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            return escape_html( repository.name )
+
+    class TypeColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            type_class = repository.get_type_class( trans.app )
+            return escape_html( type_class.label )
+
+    class HeadsColumn( grids.GridColumn ):
+
+        def __init__( self, col_name ):
+            grids.GridColumn.__init__( self, col_name )
+
+        def get_value( self, trans, grid, repository ):
+            """Display the current repository heads."""
+            repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+            heads = hg_util.get_repository_heads( repo )
+            multiple_heads = len( heads ) > 1
+            if multiple_heads:
+                heads_str = '<font color="red">'
+            else:
+                heads_str = ''
+            for ctx in heads:
+                heads_str += '%s<br/>' % hg_util.get_revision_label_from_ctx( ctx, include_date=True )
+            heads_str.rstrip( '<br/>' )
+            if multiple_heads:
+                heads_str += '</font>'
+            return heads_str
+
+    class MetadataRevisionColumn( grids.GridColumn ):
+
+        def __init__( self, col_name ):
+            grids.GridColumn.__init__( self, col_name )
+
+        def get_value( self, trans, grid, repository ):
+            """Display a SelectField whose options are the changeset_revision strings of all metadata revisions of this repository."""
+            # A repository's metadata revisions may not all be installable, as some may contain only invalid tools.
+            select_field = grids_util.build_changeset_revision_select_field( trans, repository, downloadable=False )
+            if len( select_field.options ) > 1:
+                return select_field.get_html()
+            elif len( select_field.options ) == 1:
+                option_items = select_field.options[ 0 ][ 0 ]
+                rev_label, rev_date = option_items.split( ' ' )
+                rev_date = '<i><font color="#666666">%s</font></i>' % rev_date
+                return '%s %s' % ( rev_label, rev_date )
+                return select_field.options[ 0 ][ 0 ]
+            return ''
+
+    class LatestInstallableRevisionColumn( grids.GridColumn ):
+
+        def __init__( self, col_name ):
+            grids.GridColumn.__init__( self, col_name )
+
+        def get_value( self, trans, grid, repository ):
+            """Display the latest installable revision label (may not be the repository tip)."""
+            select_field = grids_util.build_changeset_revision_select_field( trans, repository, downloadable=False )
+            if select_field.options:
+                return select_field.options[ 0 ][ 0 ]
+            return ''
+
+    class TipRevisionColumn( grids.GridColumn ):
+
+        def __init__( self, col_name ):
+            grids.GridColumn.__init__( self, col_name )
+
+        def get_value( self, trans, grid, repository ):
+            """Display the repository tip revision label."""
+            return escape_html( repository.revision( trans.app ) )
+
+    class DescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            return escape_html( repository.description )
+
+    class CategoryColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            rval = '<ul>'
+            if repository.categories:
+                for rca in repository.categories:
+                    rval += '<li><a href="browse_repositories?operation=repositories_by_category&id=%s">%s</a></li>' \
+                        % ( trans.security.encode_id( rca.category.id ), rca.category.name )
+            else:
+                rval += '<li>not set</li>'
+            rval += '</ul>'
+            return rval
+
+    class RepositoryCategoryColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            """Modify query to filter by category."""
+            if column_filter == "All":
+                return query
+            return query.filter( model.Category.name == column_filter )
+
+    class UserColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            if repository.user:
+                return escape_html( repository.user.username )
+            return 'no user'
+
+    class EmailColumn( grids.TextColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            if column_filter == 'All':
+                return query
+            return query.filter( and_( model.Repository.table.c.user_id == model.User.table.c.id,
+                                       model.User.table.c.email == column_filter ) )
+
+    class EmailAlertsColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            if trans.user and repository.email_alerts and trans.user.email in json.loads( repository.email_alerts ):
+                return 'yes'
+            return ''
+
+    class DeprecatedColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            if repository.deprecated:
+                return 'yes'
+            return ''
+
+    title = "Repositories"
+    model_class = model.Repository
+    template = '/webapps/tool_shed/repository/grid.mako'
+    default_sort_key = "name"
+    use_hide_message = False
+    columns = [
+        NameColumn( "Name",
+                    key="name",
+                    link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                    attach_popup=False ),
+        DescriptionColumn( "Synopsis",
+                           key="description",
+                           attach_popup=False ),
+        TypeColumn( "Type" ),
+        MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        UserColumn( "Owner",
+                    model_class=model.User,
+                    link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                    attach_popup=False,
+                    key="User.username" ),
+        # Columns that are valid for filtering but are not visible.
+        EmailColumn( "Email",
+                     model_class=model.User,
+                     key="email",
+                     visible=False ),
+        RepositoryCategoryColumn( "Category",
+                                  model_class=model.Category,
+                                  key="Category.name",
+                                  visible=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    standard_filters = []
+    default_filter = dict( deleted="False" )
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        filter = trans.app.repository_grid_filter_manager.get_filter( trans )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+            return trans.sa_session.query( model.Repository ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( model.Repository.type == rt_util.REPOSITORY_SUITE_DEFINITION ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+        else:
+            # The filter is None.
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deleted == false(),
+                                                  model.Repository.table.c.deprecated == false() ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+
+
+class DockerImageGrid( RepositoryGrid ):
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.DescriptionColumn( "Synopsis",
+                                          key="description",
+                                          attach_popup=False ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   model_class=model.User,
+                                   link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                                   attach_popup=False,
+                                   key="User.username" ),
+        RepositoryGrid.EmailAlertsColumn( "Alert", attach_popup=False ),
+    ]
+    operations = [ grids.GridOperation( "Include in Docker image", allow_multiple=True  ) ]
+    show_item_checkboxes = True
+
+
+class EmailAlertsRepositoryGrid( RepositoryGrid ):
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.DescriptionColumn( "Synopsis",
+                                          key="description",
+                                          attach_popup=False ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   model_class=model.User,
+                                   link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                                   attach_popup=False,
+                                   key="User.username" ),
+        RepositoryGrid.EmailAlertsColumn( "Alert", attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        grids.DeletedColumn( "Deleted",
+                             key="deleted",
+                             visible=False,
+                             filterable="advanced" )
+    ]
+    operations = [ grids.GridOperation( "Receive email alerts", allow_multiple=True  ) ]
+    global_actions = [
+        grids.GridAction( "User preferences", dict( controller='user', action='index', cntrller='repository' ) )
+    ]
+
+
+class MatchedRepositoryGrid( grids.Grid ):
+    # This grid filters out repositories that have been marked as deleted or deprecated.
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            return escape_html( repository_metadata.repository.name )
+
+    class DescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            return escape_html( repository_metadata.repository.description )
+
+    class RevisionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            return repository_metadata.changeset_revision
+
+    class UserColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.repository.user:
+                return escape_html( repository_metadata.repository.user.username )
+            return 'no user'
+
+    # Grid definition
+    title = "Matching repositories"
+    model_class = model.RepositoryMetadata
+    template = '/webapps/tool_shed/repository/grid.mako'
+    default_sort_key = "Repository.name"
+    use_hide_message = False
+    columns = [
+        NameColumn( "Repository name",
+                    link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                    attach_popup=True ),
+        DescriptionColumn( "Synopsis",
+                           attach_popup=False ),
+        RevisionColumn( "Revision" ),
+        UserColumn( "Owner",
+                    model_class=model.User,
+                    attach_popup=False )
+    ]
+    operations = [ grids.GridOperation( "Install to Galaxy", allow_multiple=True  ) ]
+    standard_filters = []
+    default_filter = {}
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        match_tuples = kwd.get( 'match_tuples', [] )
+        clause_list = []
+        if match_tuples:
+            for match_tuple in match_tuples:
+                repository_id, changeset_revision = match_tuple
+                clause_list.append( "%s=%d and %s='%s'" % ( model.RepositoryMetadata.table.c.repository_id,
+                                                            int( repository_id ),
+                                                            model.RepositoryMetadata.table.c.changeset_revision,
+                                                            changeset_revision ) )
+            return trans.sa_session.query( model.RepositoryMetadata ) \
+                                   .join( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deleted == false(),
+                                                  model.Repository.table.c.deprecated == false() ) ) \
+                                   .join( model.User.table ) \
+                                   .filter( or_( *clause_list ) ) \
+                                   .order_by( model.Repository.name )
+        # Return an empty query
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .filter( model.RepositoryMetadata.id < 0 )
+
+
+class InstallMatchedRepositoryGrid( MatchedRepositoryGrid ):
+    columns = [ col for col in MatchedRepositoryGrid.columns ]
+    # Override the NameColumn
+    columns[ 0 ] = MatchedRepositoryGrid.NameColumn( "Name",
+                                                     link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                     attach_popup=False )
+
+
+class MyWritableRepositoriesGrid( RepositoryGrid ):
+    # This grid filters out repositories that have been marked as either deprecated or deleted.
+    title = 'Repositories I can change'
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.TypeColumn( "Type" ),
+        RepositoryGrid.MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   model_class=model.User,
+                                   link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                                   attach_popup=False,
+                                   key="User.username" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[ 0 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        # TODO: improve performance by adding a db table associating users with repositories for which they have write access.
+        username = trans.user.username
+        clause_list = []
+        for repository in trans.sa_session.query( model.Repository ) \
+                                          .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                         model.Repository.table.c.deleted == false() ) ):
+            allow_push = repository.allow_push( trans.app )
+            if allow_push:
+                allow_push_usernames = allow_push.split( ',' )
+                if username in allow_push_usernames:
+                    clause_list.append( model.Repository.table.c.id == repository.id )
+        if clause_list:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( or_( *clause_list ) ) \
+                                   .join( model.User.table )
+        # Return an empty query.
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( model.Repository.table.c.id < 0 )
+
+
+class RepositoriesByUserGrid( RepositoryGrid ):
+    title = "Repositories by user"
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.DescriptionColumn( "Synopsis",
+                                          key="description",
+                                          attach_popup=False ),
+        RepositoryGrid.TypeColumn( "Type" ),
+        RepositoryGrid.MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        RepositoryGrid.CategoryColumn( "Category",
+                                       model_class=model.Category,
+                                       key="Category.name",
+                                       attach_popup=False )
+    ]
+    operations = []
+    standard_filters = []
+    default_filter = dict( deleted="False" )
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        decoded_user_id = trans.security.decode_id( kwd[ 'user_id' ] )
+        filter = trans.app.repository_grid_filter_manager.get_filter( trans )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( model.Repository.table.c.user_id == decoded_user_id ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.type == rt_util.REPOSITORY_SUITE_DEFINITION,
+                                                  model.Repository.table.c.user_id == decoded_user_id ) ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+        else:
+            # The value of filter is None.
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deleted == false(),
+                                                  model.Repository.table.c.deprecated == false(),
+                                                  model.Repository.table.c.user_id == decoded_user_id ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+
+
+class RepositoriesInCategoryGrid( RepositoryGrid ):
+    title = "Category"
+
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( controller="repository", operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.DescriptionColumn( "Synopsis",
+                                          key="description",
+                                          attach_popup=False ),
+        RepositoryGrid.TypeColumn( "Type" ),
+        RepositoryGrid.MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   model_class=model.User,
+                                   link=( lambda item: dict( controller="repository", operation="repositories_by_user", id=item.id ) ),
+                                   attach_popup=False,
+                                   key="User.username" ),
+        # Columns that are valid for filtering but are not visible.
+        RepositoryGrid.EmailColumn( "Email",
+                                    model_class=model.User,
+                                    key="email",
+                                    visible=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        category_id = kwd.get( 'id', None )
+        filter = trans.app.repository_grid_filter_manager.get_filter( trans )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+            if category_id:
+                category = suc.get_category( trans.app, category_id )
+                if category:
+                    return trans.sa_session.query( model.Repository ) \
+                                           .join( model.RepositoryMetadata.table ) \
+                                           .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                           .join( model.User.table ) \
+                                           .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                           .outerjoin( model.Category.table ) \
+                                           .filter( model.Category.table.c.name == category.name )
+            return trans.sa_session.query( model.Repository ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+            if category_id:
+                category = suc.get_category( trans.app, category_id )
+                if category:
+                    return trans.sa_session.query( model.Repository ) \
+                                           .filter( model.Repository.type == rt_util.REPOSITORY_SUITE_DEFINITION ) \
+                                           .join( model.RepositoryMetadata.table ) \
+                                           .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                           .join( model.User.table ) \
+                                           .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                           .outerjoin( model.Category.table ) \
+                                           .filter( model.Category.table.c.name == category.name )
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( model.Repository.type == rt_util.REPOSITORY_SUITE_DEFINITION ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+        else:
+            # The value of filter is None.
+            if category_id:
+                category = suc.get_category( trans.app, category_id )
+                if category:
+                    return trans.sa_session.query( model.Repository ) \
+                                           .filter( and_( model.Repository.table.c.deleted == false(),
+                                                          model.Repository.table.c.deprecated == false() ) ) \
+                                           .join( model.User.table ) \
+                                           .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                           .outerjoin( model.Category.table ) \
+                                           .filter( model.Category.table.c.name == category.name )
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deleted == false(),
+                                                  model.Repository.table.c.deprecated == false() ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table )
+
+
+class RepositoriesIOwnGrid( RepositoryGrid ):
+    title = "Repositories I own"
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.TypeColumn( "Type" ),
+        RepositoryGrid.MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        RepositoryGrid.DeprecatedColumn( "Deprecated" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.user_id == trans.user.id ) ) \
+                               .join( model.User.table ) \
+                               .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                               .outerjoin( model.Category.table )
+
+
+class RepositoriesICanAdministerGrid( RepositoryGrid ):
+    title = "Repositories I can administer"
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.UserColumn( "Owner" ),
+        RepositoryGrid.MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        RepositoryGrid.DeprecatedColumn( "Deprecated" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        """
+        Retrieve all repositories for which the current user has been granted administrative privileges.
+        """
+        current_user = trans.user
+        # Build up an or-based clause list containing role table records.
+        clause_list = []
+        # Include each of the user's roles.
+        for ura in current_user.roles:
+            clause_list.append( model.Role.table.c.id == ura.role_id )
+        # Include each role associated with each group of which the user is a member.
+        for uga in current_user.groups:
+            group = uga.group
+            for gra in group.roles:
+                clause_list.append( model.Role.table.c.id == gra.role_id )
+        # Filter out repositories for which the user does not have the administrative role either directly
+        # via a role association or indirectly via a group -> role association.
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( model.Repository.table.c.deleted == false() ) \
+                               .outerjoin( model.RepositoryRoleAssociation.table ) \
+                               .outerjoin( model.Role.table ) \
+                               .filter( or_( *clause_list ) ) \
+                               .join( model.User.table ) \
+                               .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                               .outerjoin( model.Category.table )
+
+
+class RepositoriesMissingToolTestComponentsGrid( RepositoryGrid ):
+    # This grid displays only the latest installable revision of each repository.
+    title = "Repositories with missing tool test components"
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.LatestInstallableRevisionColumn( "Latest Installable Revision" ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   key="User.username",
+                                   model_class=model.User,
+                                   link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                                   attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        # Filter by latest installable revisions that contain tools with missing tool test components.
+        revision_clause_list = []
+        for repository in trans.sa_session.query( model.Repository ) \
+                                          .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                         model.Repository.table.c.deleted == false() ) ):
+            changeset_revision = \
+                grids_util.filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
+            if changeset_revision:
+                revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+        if revision_clause_list:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                  model.Repository.table.c.deleted == false() ) ) \
+                                   .join( model.RepositoryMetadata ) \
+                                   .filter( or_( *revision_clause_list ) ) \
+                                   .join( model.User.table )
+        # Return an empty query.
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( model.Repository.table.c.id < 0 )
+
+
+class MyWritableRepositoriesMissingToolTestComponentsGrid( RepositoriesMissingToolTestComponentsGrid ):
+    # This grid displays only the latest installable revision of each repository.
+    title = "Repositories I can change with missing tool test components"
+    columns = [ col for col in RepositoriesMissingToolTestComponentsGrid.columns ]
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        # First get all repositories that the current user is authorized to update.
+        username = trans.user.username
+        user_clause_list = []
+        for repository in trans.sa_session.query( model.Repository ) \
+                                          .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                         model.Repository.table.c.deleted == false() ) ):
+            allow_push = repository.allow_push( trans.app )
+            if allow_push:
+                allow_push_usernames = allow_push.split( ',' )
+                if username in allow_push_usernames:
+                    user_clause_list.append( model.Repository.table.c.id == repository.id )
+        if user_clause_list:
+            # We have the list of repositories that the current user is authorized to update, so filter
+            # further by latest installable revisions that contain tools with missing tool test components.
+            revision_clause_list = []
+            for repository in trans.sa_session.query( model.Repository ) \
+                                              .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                             model.Repository.table.c.deleted == false() ) ) \
+                                              .filter( or_( *user_clause_list ) ):
+                changeset_revision = \
+                    grids_util.filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository )
+                if changeset_revision:
+                    revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+            if revision_clause_list:
+                return trans.sa_session.query( model.Repository ) \
+                                       .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                      model.Repository.table.c.deleted == false() ) ) \
+                                       .join( model.User.table ) \
+                                       .filter( or_( *user_clause_list ) ) \
+                                       .join( model.RepositoryMetadata ) \
+                                       .filter( or_( *revision_clause_list ) )
+        # Return an empty query.
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( model.Repository.table.c.id < 0 )
+
+
+class DeprecatedRepositoriesIOwnGrid( RepositoriesIOwnGrid ):
+    title = "Deprecated repositories I own"
+    columns = [
+        RepositoriesIOwnGrid.NameColumn( "Name",
+                                         key="name",
+                                         link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                         attach_popup=False ),
+        RepositoryGrid.TypeColumn( "Type" ),
+        RepositoriesIOwnGrid.MetadataRevisionColumn( "Metadata<br/>Revisions" ),
+        RepositoriesIOwnGrid.CategoryColumn( "Category",
+                                             model_class=model.Category,
+                                             key="Category.name",
+                                             attach_popup=False ),
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.user_id == trans.user.id,
+                                              model.Repository.table.c.deprecated == true() ) ) \
+                               .join( model.User.table ) \
+                               .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                               .outerjoin( model.Category.table )
+
+
+class RepositoriesWithInvalidToolsGrid( RepositoryGrid ):
+    # This grid displays only the latest installable revision of each repository.
+
+    class InvalidToolConfigColumn( grids.GridColumn ):
+
+        def __init__( self, col_name ):
+            grids.GridColumn.__init__( self, col_name )
+
+        def get_value( self, trans, grid, repository ):
+            # At the time this grid is displayed we know that the received repository will have invalid tools in its latest changeset revision
+            # that has associated metadata.
+            val = ''
+            repository_metadata = \
+                grids_util.get_latest_repository_metadata_if_it_includes_invalid_tools( trans, repository )
+            metadata = repository_metadata.metadata
+            invalid_tools = metadata.get( 'invalid_tools', [] )
+            if invalid_tools:
+                for invalid_tool_config in invalid_tools:
+                    href_str = '<a href="load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s">%s</a>' % \
+                        ( trans.security.encode_id( repository.id ), invalid_tool_config, repository_metadata.changeset_revision, invalid_tool_config )
+                    val += href_str
+                    val += '<br/>'
+                val = val.rstrip( '<br/>' )
+            return val
+
+    title = "Repositories with invalid tools"
+    columns = [
+        InvalidToolConfigColumn( "Tool config" ),
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=False ),
+        RepositoryGrid.LatestInstallableRevisionColumn( "Latest Metadata Revision" ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   key="User.username",
+                                   model_class=model.User,
+                                   link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+                                   attach_popup=False )
+    ]
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        # Filter by latest metadata revisions that contain invalid tools.
+        revision_clause_list = []
+        for repository in trans.sa_session.query( model.Repository ) \
+                                          .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                         model.Repository.table.c.deleted == false() ) ):
+            changeset_revision = \
+                grids_util.filter_by_latest_metadata_changeset_revision_that_has_invalid_tools( trans, repository )
+            if changeset_revision:
+                revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+        if revision_clause_list:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                  model.Repository.table.c.deleted == false() ) ) \
+                                   .join( model.RepositoryMetadata ) \
+                                   .filter( or_( *revision_clause_list ) ) \
+                                   .join( model.User.table )
+        # Return an empty query.
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( model.Repository.table.c.id < 0 )
+
+
+class MyWritableRepositoriesWithInvalidToolsGrid( RepositoriesWithInvalidToolsGrid ):
+    # This grid displays only the latest installable revision of each repository.
+    title = "Repositories I can change with invalid tools"
+    columns = [ col for col in RepositoriesWithInvalidToolsGrid.columns ]
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        # First get all repositories that the current user is authorized to update.
+        username = trans.user.username
+        user_clause_list = []
+        for repository in trans.sa_session.query( model.Repository ) \
+                                          .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                         model.Repository.table.c.deleted == false() ) ):
+            allow_push = repository.allow_push( trans.app )
+            if allow_push:
+                allow_push_usernames = allow_push.split( ',' )
+                if username in allow_push_usernames:
+                    user_clause_list.append( model.Repository.table.c.id == repository.id )
+        if user_clause_list:
+            # We have the list of repositories that the current user is authorized to update, so filter
+            # further by latest metadata revisions that contain invalid tools.
+            revision_clause_list = []
+            for repository in trans.sa_session.query( model.Repository ) \
+                                              .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                             model.Repository.table.c.deleted == false() ) ) \
+                                              .filter( or_( *user_clause_list ) ):
+                changeset_revision = \
+                    grids_util.filter_by_latest_metadata_changeset_revision_that_has_invalid_tools( trans, repository )
+                if changeset_revision:
+                    revision_clause_list.append( model.RepositoryMetadata.table.c.changeset_revision == changeset_revision )
+            if revision_clause_list:
+                return trans.sa_session.query( model.Repository ) \
+                                       .filter( and_( model.Repository.table.c.deprecated == false(),
+                                                      model.Repository.table.c.deleted == false() ) ) \
+                                       .join( model.User.table ) \
+                                       .filter( or_( *user_clause_list ) ) \
+                                       .join( model.RepositoryMetadata ) \
+                                       .filter( or_( *revision_clause_list ) )
+        # Return an empty query.
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( model.Repository.table.c.id < 0 )
+
+
+class RepositoryMetadataGrid( grids.Grid ):
+
+    class RepositoryNameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            repository = repository_metadata.repository
+            return escape_html( repository.name )
+
+    class RepositoryTypeColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            repository = repository_metadata.repository
+            type_class = repository.get_type_class( trans.app )
+            return escape_html( type_class.label )
+
+    class RepositoryOwnerColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            repository = repository_metadata.repository
+            return escape_html( repository.user.username )
+
+    class ChangesetRevisionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            repository = repository_metadata.repository
+            changeset_revision = repository_metadata.changeset_revision
+            changeset_revision_label = hg_util.get_revision_label( trans.app, repository, changeset_revision, include_date=True )
+            return changeset_revision_label
+
+    class MaliciousColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.malicious:
+                return 'yes'
+            return ''
+
+    class DownloadableColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.downloadable:
+                return 'yes'
+            return ''
+
+    class HasRepositoryDependenciesColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.has_repository_dependencies:
+                return 'yes'
+            return ''
+
+    class IncludesDatatypesColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.includes_datatypes:
+                return 'yes'
+            return ''
+
+    class IncludesToolsColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.includes_tools:
+                return 'yes'
+            return ''
+
+    class IncludesToolDependenciesColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.includes_tool_dependencies:
+                return 'yes'
+            return ''
+
+    class IncludesWorkflowsColumn( grids.BooleanColumn ):
+        def get_value( self, trans, grid, repository_metadata ):
+            if repository_metadata.includes_workflows:
+                return 'yes'
+            return ''
+
+    title = "Repository metadata"
+    model_class = model.RepositoryMetadata
+    template = '/webapps/tool_shed/repository/grid.mako'
+    default_sort_key = "Repository.name"
+    columns = [
+        RepositoryNameColumn( "Repository name",
+                              key="Repository.name",
+                              link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                              attach_popup=False ),
+        RepositoryNameColumn( "Type" ),
+        RepositoryOwnerColumn( "Owner",
+                               model_class=model.User,
+                               attach_popup=False,
+                               key="User.username" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    standard_filters = []
+    default_filter = dict( malicious="False" )
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .join( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( model.User.table )
+
+
+class RepositoryDependenciesGrid( RepositoryMetadataGrid ):
+
+    class RequiredRepositoryColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            rd_str = []
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    rd_dict = metadata.get( 'repository_dependencies', {} )
+                    if rd_dict:
+                        rd_tups = rd_dict[ 'repository_dependencies' ]
+                        # "repository_dependencies": [["http://localhost:9009", "bwa059", "test", "a07baa797d53"]]
+                        # Sort rd_tups by by required repository name.
+                        sorted_rd_tups = sorted( rd_tups, key=lambda rd_tup: rd_tup[ 1 ] )
+                        for rd_tup in sorted_rd_tups:
+                            name, owner, changeset_revision = rd_tup[1:4]
+                            rd_line = ''
+                            required_repository = repository_util.get_repository_by_name_and_owner( trans.app, name, owner )
+                            if required_repository and not required_repository.deleted:
+                                required_repository_id = trans.security.encode_id( required_repository.id )
+                                required_repository_metadata = \
+                                    metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                                               required_repository_id,
+                                                                                                              changeset_revision )
+                                if not required_repository_metadata:
+                                    repo = hg_util.get_repo_for_repository( trans.app,
+                                                                            repository=required_repository,
+                                                                            repo_path=None,
+                                                                            create=False )
+                                    updated_changeset_revision = \
+                                        metadata_util.get_next_downloadable_changeset_revision( required_repository,
+                                                                                                repo,
+                                                                                                changeset_revision )
+                                    required_repository_metadata = \
+                                        metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans.app,
+                                                                                                                   required_repository_id,
+                                                                                                                   updated_changeset_revision )
+                                required_repository_metadata_id = trans.security.encode_id( required_repository_metadata.id )
+                                rd_line += '<a href="browse_repository_dependencies?operation=view_or_manage_repository&id=%s">' % ( required_repository_metadata_id )
+                            rd_line += 'Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>' % ( escape_html( name ), escape_html( owner ), escape_html( changeset_revision ) )
+                            if required_repository:
+                                rd_line += '</a>'
+                            rd_str.append( rd_line )
+            return '<br />'.join( rd_str )
+
+    title = "Valid repository dependency definitions in this tool shed"
+    default_sort_key = "Repository.name"
+    columns = [
+        RequiredRepositoryColumn( "Repository dependency",
+                                  attach_popup=False ),
+        RepositoryMetadataGrid.RepositoryNameColumn( "Repository name",
+                                                     model_class=model.Repository,
+                                                     link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                     attach_popup=False,
+                                                     key="Repository.name" ),
+        RepositoryMetadataGrid.RepositoryOwnerColumn( "Owner",
+                                                      model_class=model.User,
+                                                      attach_popup=False,
+                                                      key="User.username" ),
+        RepositoryMetadataGrid.ChangesetRevisionColumn( "Revision",
+                                                        attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, owner",
+                                                cols_to_filter=[ columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .join( model.Repository ) \
+                               .filter( and_( model.RepositoryMetadata.table.c.has_repository_dependencies == true(),
+                                              model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( model.User.table )
+
+
+class DatatypesGrid( RepositoryMetadataGrid ):
+
+    class DatatypesColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            datatype_list = []
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    datatype_dicts = metadata.get( 'datatypes', [] )
+                    if datatype_dicts:
+                        # Create tuples of the attributes we want so we can sort them by extension.
+                        datatype_tups = []
+                        for datatype_dict in datatype_dicts:
+                            # Example: {"display_in_upload": "true", "dtype": "galaxy.datatypes.blast:BlastXml", "extension": "blastxml", "mimetype": "application/xml"}
+                            extension = datatype_dict.get( 'extension', '' )
+                            dtype = datatype_dict.get( 'dtype', '' )
+                            # For now we'll just display extension and dtype.
+                            if extension and dtype:
+                                datatype_tups.append( ( extension, dtype ) )
+                        sorted_datatype_tups = sorted( datatype_tups, key=lambda datatype_tup: datatype_tup[ 0 ] )
+                        for datatype_tup in sorted_datatype_tups:
+                            extension, datatype = datatype_tup[:2]
+                            datatype_str = '<a href="browse_datatypes?operation=view_or_manage_repository&id=%s">' % trans.security.encode_id( repository_metadata.id )
+                            datatype_str += '<b>%s:</b> %s' % ( escape_html( extension ), escape_html( datatype ) )
+                            datatype_str += '</a>'
+                            datatype_list.append( datatype_str )
+            return '<br />'.join( datatype_list )
+
+    title = "Custom datatypes in this tool shed"
+    default_sort_key = "Repository.name"
+    columns = [
+        DatatypesColumn( "Datatype extension and class",
+                         attach_popup=False ),
+        RepositoryMetadataGrid.RepositoryNameColumn( "Repository name",
+                                                     model_class=model.Repository,
+                                                     link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                     attach_popup=False,
+                                                     key="Repository.name" ),
+        RepositoryMetadataGrid.RepositoryOwnerColumn( "Owner",
+                                                      model_class=model.User,
+                                                      attach_popup=False,
+                                                      key="User.username" ),
+        RepositoryMetadataGrid.ChangesetRevisionColumn( "Revision",
+                                                        attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, owner",
+                                                cols_to_filter=[ columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .join( model.Repository ) \
+                               .filter( and_( model.RepositoryMetadata.table.c.includes_datatypes == true(),
+                                              model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( model.User.table )
+
+
+class ToolDependenciesGrid( RepositoryMetadataGrid ):
+
+    class ToolDependencyColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            td_str = ''
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    tds_dict = metadata.get( 'tool_dependencies', {} )
+                    if tds_dict:
+                        # Example: {"bwa/0.5.9": {"name": "bwa", "type": "package", "version": "0.5.9"}}
+                        sorted_keys = sorted( tds_dict.keys() )
+                        num_keys = len( sorted_keys )
+                        # Handle environment settings first.
+                        if 'set_environment' in sorted_keys:
+                            # Example: "set_environment": [{"name": "JAVA_JAR_FILE", "type": "set_environment"}]
+                            env_dicts = tds_dict[ 'set_environment' ]
+                            num_env_dicts = len( env_dicts )
+                            if num_env_dicts > 0:
+                                td_str += '<a href="browse_datatypes?operation=view_or_manage_repository&id=%s">' % trans.security.encode_id( repository_metadata.id )
+                                td_str += '<b>environment:</b> '
+                                td_str += ', '.join( [ escape_html( env_dict['name'] ) for env_dict in env_dicts ] )
+                                td_str += '</a><br/>'
+                        for index, key in enumerate( sorted_keys ):
+                            if key == 'set_environment':
+                                continue
+                            td_dict = tds_dict[ key ]
+                            # Example: {"name": "bwa", "type": "package", "version": "0.5.9"}
+                            name = td_dict[ 'name' ]
+                            version = td_dict[ 'version' ]
+                            td_str += '<a href="browse_datatypes?operation=view_or_manage_repository&id=%s">' % trans.security.encode_id( repository_metadata.id )
+                            td_str += '<b>%s</b> version <b>%s</b>' % ( escape_html( name ), escape_html( version ) )
+                            td_str += '</a>'
+                            if index < num_keys - 1:
+                                td_str += '<br/>'
+            return td_str
+
+    title = "Tool dependency definitions in this tool shed"
+    default_sort_key = "Repository.name"
+    columns = [
+        ToolDependencyColumn( "Tool dependency",
+                              attach_popup=False ),
+        RepositoryMetadataGrid.RepositoryNameColumn( "Repository name",
+                                                     model_class=model.Repository,
+                                                     link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                     attach_popup=False,
+                                                     key="Repository.name" ),
+        RepositoryMetadataGrid.RepositoryOwnerColumn( "Owner",
+                                                      model_class=model.User,
+                                                      attach_popup=False,
+                                                      key="User.username" ),
+        RepositoryMetadataGrid.ChangesetRevisionColumn( "Revision",
+                                                        attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, owner",
+                                                cols_to_filter=[ columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .join( model.Repository ) \
+                               .filter( and_( model.RepositoryMetadata.table.c.includes_tool_dependencies == true(),
+                                              model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( model.User.table )
+
+
+class ToolsGrid( RepositoryMetadataGrid ):
+
+    class ToolsColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository_metadata ):
+            tool_line = []
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    tool_dicts = metadata.get( 'tools', [] )
+                    if tool_dicts:
+                        # Create tuples of the attributes we want so we can sort them by extension.
+                        tool_tups = []
+                        for tool_dict in tool_dicts:
+                            tool_id = tool_dict.get( 'id', '' )
+                            version = tool_dict.get( 'version', '' )
+                            # For now we'll just display tool id and version.
+                            if tool_id and version:
+                                tool_tups.append( ( tool_id, version ) )
+                        sorted_tool_tups = sorted( tool_tups, key=lambda tool_tup: tool_tup[ 0 ] )
+                        for tool_tup in sorted_tool_tups:
+                            tool_id, version = tool_tup[ :2 ]
+                            tool_str = '<a href="browse_datatypes?operation=view_or_manage_repository&id=%s">' % trans.security.encode_id( repository_metadata.id )
+                            tool_str += '<b>%s:</b> %s' % ( escape_html( tool_id ), escape_html( version ) )
+                            tool_str += '</a>'
+                            tool_line.append( tool_str )
+            return '<br />'.join( tool_line )
+
+    title = "Valid tools in this tool shed"
+    default_sort_key = "Repository.name"
+    columns = [
+        ToolsColumn( "Tool id and version",
+                     attach_popup=False ),
+        RepositoryMetadataGrid.RepositoryNameColumn( "Repository name",
+                                                     model_class=model.Repository,
+                                                     link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                     attach_popup=False,
+                                                     key="Repository.name" ),
+        RepositoryMetadataGrid.RepositoryOwnerColumn( "Owner",
+                                                      model_class=model.User,
+                                                      attach_popup=False,
+                                                      key="User.username" ),
+        RepositoryMetadataGrid.ChangesetRevisionColumn( "Revision",
+                                                        attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, owner",
+                                                cols_to_filter=[ columns[1], columns[2] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.RepositoryMetadata ) \
+                               .join( model.Repository ) \
+                               .filter( and_( model.RepositoryMetadata.table.c.includes_tools == true(),
+                                              model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( model.User.table )
+
+
+class ValidCategoryGrid( CategoryGrid ):
+
+    class RepositoriesColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, category ):
+            category_name = str( category.name )
+            filter = trans.app.repository_grid_filter_manager.get_filter( trans )
+            if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+                return trans.app.repository_registry.certified_level_one_viewable_repositories_and_suites_by_category.get( category_name, 0 )
+            elif filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+                return trans.app.repository_registry.certified_level_one_viewable_suites_by_category.get( category_name, 0 )
+            elif filter == trans.app.repository_grid_filter_manager.filters.SUITES:
+                return trans.app.repository_registry.viewable_valid_suites_by_category.get( category_name, 0 )
+            else:
+                # The value filter is None.
+                return trans.app.repository_registry.viewable_valid_repositories_and_suites_by_category.get( category_name, 0 )
+
+    title = "Categories of Valid Repositories"
+    model_class = model.Category
+    template = '/webapps/tool_shed/category/valid_grid.mako'
+    default_sort_key = "name"
+    columns = [
+        CategoryGrid.NameColumn( "Name",
+                                 key="Category.name",
+                                 link=( lambda item: dict( operation="valid_repositories_by_category", id=item.id ) ),
+                                 attach_popup=False ),
+        CategoryGrid.DescriptionColumn( "Description",
+                                        key="Category.description",
+                                        attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        RepositoriesColumn( "Valid repositories",
+                            model_class=model.Repository,
+                            attach_popup=False )
+    ]
+    # Override these
+    default_filter = {}
+    global_actions = []
+    operations = []
+    standard_filters = []
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+
+class ValidRepositoryGrid( RepositoryGrid ):
+    # This grid filters out repositories that have been marked as either deleted or deprecated.
+
+    class CategoryColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            rval = '<ul>'
+            if repository.categories:
+                for rca in repository.categories:
+                    rval += '<li><a href="browse_repositories?operation=valid_repositories_by_category&id=%s">%s</a></li>' \
+                        % ( trans.security.encode_id( rca.category.id ), rca.category.name )
+            else:
+                rval += '<li>not set</li>'
+            rval += '</ul>'
+            return rval
+
+    class RepositoryCategoryColumn( grids.GridColumn ):
+
+        def filter( self, trans, user, query, column_filter ):
+            """Modify query to filter by category."""
+            if column_filter == "All":
+                return query
+            return query.filter( model.Category.name == column_filter )
+
+    class InstallableRevisionColumn( grids.GridColumn ):
+
+        def __init__( self, col_name ):
+            grids.GridColumn.__init__( self, col_name )
+
+        def get_value( self, trans, grid, repository ):
+            """Display a SelectField whose options are the changeset_revision strings of all download-able revisions of this repository."""
+            select_field = grids_util.build_changeset_revision_select_field( trans, repository, downloadable=True )
+            if len( select_field.options ) > 1:
+                return select_field.get_html()
+            elif len( select_field.options ) == 1:
+                return select_field.options[ 0 ][ 0 ]
+            return ''
+
+    title = "Valid Repositories"
+    columns = [
+        RepositoryGrid.NameColumn( "Name",
+                                   key="name",
+                                   attach_popup=True ),
+        RepositoryGrid.DescriptionColumn( "Synopsis",
+                                          key="description",
+                                          attach_popup=False ),
+        RepositoryGrid.TypeColumn( "Type" ),
+        InstallableRevisionColumn( "Installable Revisions" ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   model_class=model.User,
+                                   attach_popup=False ),
+        # Columns that are valid for filtering but are not visible.
+        RepositoryCategoryColumn( "Category",
+                                  model_class=model.Category,
+                                  key="Category.name",
+                                  visible=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[0], columns[1] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = []
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        filter = trans.app.repository_grid_filter_manager.get_filter( trans )
+        if 'id' in kwd:
+            # The user is browsing categories of valid repositories, so filter the request by the received id,
+            # which is a category id.
+            if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+                return trans.sa_session.query( model.Repository ) \
+                                       .join( model.RepositoryMetadata.table ) \
+                                       .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                       .join( model.User.table ) \
+                                       .join( model.RepositoryCategoryAssociation.table ) \
+                                       .join( model.Category.table ) \
+                                       .filter( and_( model.Category.table.c.id == trans.security.decode_id( kwd[ 'id' ] ),
+                                                      model.RepositoryMetadata.table.c.downloadable == true() ) )
+            if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+                return trans.sa_session.query( model.Repository ) \
+                                       .filter( model.Repository.type == rt_util.REPOSITORY_SUITE_DEFINITION ) \
+                                       .join( model.RepositoryMetadata.table ) \
+                                       .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                       .join( model.User.table ) \
+                                       .join( model.RepositoryCategoryAssociation.table ) \
+                                       .join( model.Category.table ) \
+                                       .filter( and_( model.Category.table.c.id == trans.security.decode_id( kwd[ 'id' ] ),
+                                                      model.RepositoryMetadata.table.c.downloadable == true() ) )
+            else:
+                # The value of filter is None.
+                return trans.sa_session.query( model.Repository ) \
+                                       .filter( and_( model.Repository.table.c.deleted == false(),
+                                                      model.Repository.table.c.deprecated == false() ) ) \
+                                       .join( model.RepositoryMetadata.table ) \
+                                       .join( model.User.table ) \
+                                       .join( model.RepositoryCategoryAssociation.table ) \
+                                       .join( model.Category.table ) \
+                                       .filter( and_( model.Category.table.c.id == trans.security.decode_id( kwd[ 'id' ] ),
+                                                      model.RepositoryMetadata.table.c.downloadable == true() ) )
+        # The user performed a free text search on the ValidCategoryGrid.
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE:
+            return trans.sa_session.query( model.Repository ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table ) \
+                                   .filter( model.RepositoryMetadata.table.c.downloadable == true() )
+        if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES:
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( model.Repository.type == rt_util.REPOSITORY_SUITE_DEFINITION ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .filter( or_( *trans.app.repository_registry.certified_level_one_clause_list ) ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table ) \
+                                   .filter( model.RepositoryMetadata.table.c.downloadable == true() )
+        else:
+            # The value of filter is None.
+            return trans.sa_session.query( model.Repository ) \
+                                   .filter( and_( model.Repository.table.c.deleted == false(),
+                                                  model.Repository.table.c.deprecated == false() ) ) \
+                                   .join( model.RepositoryMetadata.table ) \
+                                   .join( model.User.table ) \
+                                   .outerjoin( model.RepositoryCategoryAssociation.table ) \
+                                   .outerjoin( model.Category.table ) \
+                                   .filter( model.RepositoryMetadata.table.c.downloadable == true() )
diff --git a/lib/tool_shed/grids/repository_review_grids.py b/lib/tool_shed/grids/repository_review_grids.py
new file mode 100644
index 0000000..4f16e37
--- /dev/null
+++ b/lib/tool_shed/grids/repository_review_grids.py
@@ -0,0 +1,435 @@
+import logging
+
+from markupsafe import escape
+from sqlalchemy import and_, false, null, or_, true
+
+from galaxy.web.framework.helpers import grids
+from galaxy.webapps.tool_shed import model
+from tool_shed.grids.repository_grids import RepositoryGrid
+from tool_shed.util import hg_util, metadata_util
+
+log = logging.getLogger( __name__ )
+
+
+class ComponentGrid( grids.Grid ):
+
+    class NameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, component ):
+            return escape( component.name )
+
+    class DescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, component ):
+            return escape( component.description )
+
+    title = "Repository review components"
+    model_class = model.Component
+    template = '/webapps/tool_shed/repository_review/grid.mako'
+    default_sort_key = "name"
+    use_hide_message = False
+    columns = [
+        NameColumn( "Name",
+                    key="Component.name",
+                    link=( lambda item: dict( operation="edit", id=item.id ) ),
+                    attach_popup=False ),
+        DescriptionColumn( "Description",
+                           key="Component.description",
+                           attach_popup=False )
+    ]
+    default_filter = {}
+    global_actions = [
+        grids.GridAction( "Add new component",
+                          dict( controller='repository_review', action='manage_components', operation='create' ) )
+    ]
+    operations = []
+    standard_filters = []
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+
+class RepositoriesWithReviewsGrid( RepositoryGrid ):
+    # This grid filters out repositories that have been marked as either deprecated or deleted.
+
+    class WithReviewsRevisionColumn( grids.GridColumn ):
+        def get_value( self, trans, grid, repository ):
+            # Restrict to revisions that have been reviewed.
+            if repository.reviews:
+                rval = ''
+                repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+                for review in repository.reviews:
+                    changeset_revision = review.changeset_revision
+                    rev, label = hg_util.get_rev_label_from_changeset_revision( repo, changeset_revision )
+                    rval += '<a href="manage_repository_reviews_of_revision?id=%s&changeset_revision=%s">%s</a><br/>' % \
+                        ( trans.security.encode_id( repository.id ), changeset_revision, label )
+                return rval
+            return ''
+
+    class WithoutReviewsRevisionColumn( grids.GridColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            # Restrict the options to revisions that have not yet been reviewed.
+            repository_metadata_revisions = metadata_util.get_repository_metadata_revisions_for_review( repository, reviewed=False )
+            if repository_metadata_revisions:
+                rval = ''
+                for repository_metadata in repository_metadata_revisions:
+                    rev, label, changeset_revision = \
+                        hg_util.get_rev_label_changeset_revision_from_repository_metadata( trans.app,
+                                                                                           repository_metadata,
+                                                                                           repository=repository,
+                                                                                           include_date=True,
+                                                                                           include_hash=False )
+                    rval += '<a href="manage_repository_reviews_of_revision?id=%s&changeset_revision=%s">%s</a><br/>' % \
+                        ( trans.security.encode_id( repository.id ), changeset_revision, label )
+                return rval
+            return ''
+
+    class ReviewersColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            rval = ''
+            if repository.reviewers:
+                for user in repository.reviewers:
+                    rval += '<a class="view-info" href="repository_reviews_by_user?id=%s">' % trans.security.encode_id( user.id )
+                    rval += '%s</a> | ' % user.username
+                rval = rval.rstrip( ' | ' )
+            return rval
+
+    class RatingColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            rval = ''
+            for review in repository.reviews:
+                if review.rating:
+                    for index in range( 1, 6 ):
+                        rval += '<input '
+                        rval += 'name="star1-%s" ' % trans.security.encode_id( review.id )
+                        rval += 'type="radio" '
+                        rval += 'class="community_rating_star star" '
+                        rval += 'disabled="disabled" '
+                        rval += 'value="%s" ' % str( review.rating )
+                        if review.rating > ( index - 0.5 ) and review.rating < ( index + 0.5 ):
+                            rval += 'checked="checked" '
+                        rval += '/>'
+                rval += '<br/>'
+            return rval
+
+    class ApprovedColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, repository ):
+            rval = ''
+            for review in repository.reviews:
+                if review.approved:
+                    rval += '%s<br/>' % review.approved
+            return rval
+
+    title = "All reviewed repositories"
+    model_class = model.Repository
+    template = '/webapps/tool_shed/repository_review/grid.mako'
+    default_sort_key = "Repository.name"
+    columns = [
+        RepositoryGrid.NameColumn( "Repository name",
+                                   key="name",
+                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                   attach_popup=True ),
+        RepositoryGrid.UserColumn( "Owner",
+                                   model_class=model.User,
+                                   attach_popup=False,
+                                   key="User.username" ),
+        WithReviewsRevisionColumn( "Reviewed revisions" ),
+        ReviewersColumn( "Reviewers", attach_popup=False ),
+        RatingColumn( "Rating", attach_popup=False ),
+        ApprovedColumn( "Approved", attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[ 0 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Inspect repository revisions",
+                             allow_multiple=False,
+                             condition=( lambda item: not item.deleted ),
+                             async_compatible=False )
+    ]
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( ( model.RepositoryReview.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
+                               .join( ( model.User.table, model.User.table.c.id == model.Repository.table.c.user_id ) ) \
+                               .outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
+                               .outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
+
+
+class RepositoriesWithoutReviewsGrid( RepositoriesWithReviewsGrid ):
+    # This grid filters out repositories that have been marked as either deprecated or deleted.
+    title = "Repositories with no reviews"
+    columns = [
+        RepositoriesWithReviewsGrid.NameColumn( "Repository name",
+                                                key="name",
+                                                link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                attach_popup=True ),
+        RepositoriesWithReviewsGrid.DescriptionColumn( "Synopsis",
+                                                       key="description",
+                                                       attach_popup=False ),
+        RepositoriesWithReviewsGrid.WithoutReviewsRevisionColumn( "Revisions for review" ),
+        RepositoriesWithReviewsGrid.UserColumn( "Owner",
+                                                model_class=model.User,
+                                                attach_popup=False,
+                                                key="User.username" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[ 0 ], columns[ 1 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [ grids.GridOperation( "Inspect repository revisions",
+                                        allow_multiple=False,
+                                        condition=( lambda item: not item.deleted ),
+                                        async_compatible=False ) ]
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false(),
+                                              model.Repository.reviews == null() ) ) \
+                               .join( model.User.table )
+
+
+class RepositoriesReadyForReviewGrid( RepositoriesWithoutReviewsGrid ):
+    # Repositories that are ready for human review are those that either:
+    # 1) Have no tools
+    # 2) Have tools that have been proven to be functionally correct within Galaxy.
+    # This grid filters out repositories that have been marked as either deprecated or deleted.
+    title = "Repositories ready for review"
+    columns = [
+        RepositoriesWithoutReviewsGrid.NameColumn( "Repository name",
+                                                   key="name",
+                                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                   attach_popup=True ),
+        RepositoriesWithoutReviewsGrid.DescriptionColumn( "Synopsis",
+                                                          key="description",
+                                                          attach_popup=False ),
+        RepositoriesWithoutReviewsGrid.WithoutReviewsRevisionColumn( "Revisions for review" ),
+        RepositoriesWithoutReviewsGrid.UserColumn( "Owner",
+                                                   model_class=model.User,
+                                                   attach_popup=False,
+                                                   key="User.username" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[ 0 ], columns[ 1 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [ grids.GridOperation( "Inspect repository revisions",
+                                        allow_multiple=False,
+                                        condition=( lambda item: not item.deleted ),
+                                        async_compatible=False ) ]
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false(),
+                                              model.Repository.reviews == null() ) ) \
+                               .join( model.RepositoryMetadata.table ) \
+                               .filter( and_( model.RepositoryMetadata.table.c.downloadable == true(),
+                                              or_( model.RepositoryMetadata.table.c.includes_tools == false(),
+                                                   and_( model.RepositoryMetadata.table.c.includes_tools == true(),
+                                                         model.RepositoryMetadata.table.c.tools_functionally_correct == true() ) ) ) ) \
+                               .join( model.User.table )
+
+
+class RepositoriesReviewedByMeGrid( RepositoriesWithReviewsGrid ):
+    # This grid filters out repositories that have been marked as either deprecated or deleted.
+
+    columns = [
+        RepositoriesWithReviewsGrid.NameColumn( "Repository name",
+                                                key="name",
+                                                link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                attach_popup=True ),
+        RepositoriesWithReviewsGrid.UserColumn( "Owner", attach_popup=False ),
+        RepositoriesWithReviewsGrid.WithReviewsRevisionColumn( "Reviewed revisions" ),
+        RepositoriesWithReviewsGrid.ReviewersColumn( "Reviewers", attach_popup=False ),
+        RepositoriesWithReviewsGrid.RatingColumn( "Rating", attach_popup=False ),
+        RepositoriesWithReviewsGrid.ApprovedColumn( "Approved", attach_popup=False )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[ 0 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( ( model.RepositoryReview.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
+                               .filter( model.RepositoryReview.table.c.user_id == trans.user.id ) \
+                               .join( ( model.User.table, model.User.table.c.id == model.RepositoryReview.table.c.user_id ) ) \
+                               .outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
+                               .outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
+
+
+class RepositoryReviewsByUserGrid( grids.Grid ):
+    # This grid filters out repositories that have been marked as deprecated.
+
+    class RepositoryNameColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, review ):
+            return escape( review.repository.name )
+
+    class RepositoryDescriptionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, review ):
+            return escape( review.repository.description )
+
+    class RevisionColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, review ):
+            encoded_review_id = trans.security.encode_id( review.id )
+            rval = '<a class="action-button" href="'
+            if review.user == trans.user:
+                rval += 'edit_review'
+            else:
+                rval += 'browse_review'
+            revision_label = hg_util.get_revision_label( trans.app,
+                                                         review.repository,
+                                                         review.changeset_revision,
+                                                         include_date=True,
+                                                         include_hash=False )
+            rval += '?id=%s">%s</a>' % ( encoded_review_id, revision_label )
+            return rval
+
+    class RatingColumn( grids.TextColumn ):
+
+        def get_value( self, trans, grid, review ):
+            if review.rating:
+                for index in range( 1, 6 ):
+                    rval = '<input '
+                    rval += 'name="star1-%s" ' % trans.security.encode_id( review.id )
+                    rval += 'type="radio" '
+                    rval += 'class="community_rating_star star" '
+                    rval += 'disabled="disabled" '
+                    rval += 'value="%s" ' % str( review.rating )
+                    if review.rating > ( index - 0.5 ) and review.rating < ( index + 0.5 ):
+                        rval += 'checked="checked" '
+                    rval += '/>'
+                return rval
+            return ''
+
+    title = "Reviews by user"
+    model_class = model.RepositoryReview
+    template = '/webapps/tool_shed/repository_review/grid.mako'
+    default_sort_key = 'repository_id'
+    use_hide_message = False
+    columns = [
+        RepositoryNameColumn( "Repository Name",
+                              model_class=model.Repository,
+                              key="Repository.name",
+                              link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                              attach_popup=True ),
+        RepositoryDescriptionColumn( "Description",
+                                     model_class=model.Repository,
+                                     key="Repository.description",
+                                     attach_popup=False ),
+        RevisionColumn( "Revision", attach_popup=False ),
+        RatingColumn( "Rating", attach_popup=False ),
+    ]
+    # Override these
+    default_filter = {}
+    global_actions = []
+    operations = [
+        grids.GridOperation( "Inspect repository revisions",
+                             allow_multiple=False,
+                             condition=( lambda item: not item.deleted ),
+                             async_compatible=False )
+    ]
+    standard_filters = []
+    num_rows_per_page = 50
+    preserve_state = False
+    use_paging = False
+
+    def build_initial_query( self, trans, **kwd ):
+        user_id = trans.security.decode_id( kwd[ 'id' ] )
+        return trans.sa_session.query( model.RepositoryReview ) \
+                               .filter( and_( model.RepositoryReview.table.c.deleted == false(),
+                                              model.RepositoryReview.table.c.user_id == user_id ) ) \
+                               .join( ( model.Repository.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
+                               .filter( model.Repository.table.c.deprecated == false() )
+
+
+class ReviewedRepositoriesIOwnGrid( RepositoriesWithReviewsGrid ):
+    title = "Reviewed repositories I own"
+    columns = [
+        RepositoriesWithReviewsGrid.NameColumn( "Repository name",
+                                                key="name",
+                                                link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                attach_popup=True ),
+        RepositoriesWithReviewsGrid.WithReviewsRevisionColumn( "Reviewed revisions" ),
+        RepositoriesWithReviewsGrid.WithoutReviewsRevisionColumn( "Revisions for review" ),
+        RepositoriesWithReviewsGrid.ReviewersColumn( "Reviewers", attach_popup=False ),
+        RepositoryGrid.DeprecatedColumn( "Deprecated" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name",
+                                                cols_to_filter=[ columns[0] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [
+        grids.GridOperation( "Inspect repository revisions",
+                             allow_multiple=False,
+                             condition=( lambda item: not item.deleted ),
+                             async_compatible=False )
+    ]
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .join( ( model.RepositoryReview.table, model.RepositoryReview.table.c.repository_id == model.Repository.table.c.id ) ) \
+                               .filter( model.Repository.table.c.user_id == trans.user.id ) \
+                               .join( ( model.User.table, model.User.table.c.id == model.RepositoryReview.table.c.user_id ) ) \
+                               .outerjoin( ( model.ComponentReview.table, model.ComponentReview.table.c.repository_review_id == model.RepositoryReview.table.c.id ) ) \
+                               .outerjoin( ( model.Component.table, model.Component.table.c.id == model.ComponentReview.table.c.component_id ) )
+
+
+class RepositoriesWithNoToolTestsGrid( RepositoriesWithoutReviewsGrid ):
+    # Repositories that are ready for human review are those that either:
+    # 1) Have no tools
+    # 2) Have tools that have been proven to be functionally correct within Galaxy.
+    # This grid filters out repositories that have been marked as either deprecated or deleted.
+    title = "Repositories that contain tools with no tests or test data"
+    columns = [
+        RepositoriesWithoutReviewsGrid.NameColumn( "Repository name",
+                                                   key="name",
+                                                   link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+                                                   attach_popup=True ),
+        RepositoriesWithoutReviewsGrid.DescriptionColumn( "Synopsis",
+                                                          key="description",
+                                                          attach_popup=False ),
+        RepositoriesWithoutReviewsGrid.WithoutReviewsRevisionColumn( "Revisions for review" ),
+        RepositoriesWithoutReviewsGrid.UserColumn( "Owner",
+                                                   model_class=model.User,
+                                                   attach_popup=False,
+                                                   key="User.username" )
+    ]
+    columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+                                                cols_to_filter=[ columns[ 0 ], columns[ 1 ] ],
+                                                key="free-text-search",
+                                                visible=False,
+                                                filterable="standard" ) )
+    operations = [ grids.GridOperation( "Inspect repository revisions",
+                                        allow_multiple=False,
+                                        condition=( lambda item: not item.deleted ),
+                                        async_compatible=False ) ]
+
+    def build_initial_query( self, trans, **kwd ):
+        return trans.sa_session.query( model.Repository ) \
+                               .filter( and_( model.Repository.table.c.deleted == false(),
+                                              model.Repository.table.c.deprecated == false() ) ) \
+                               .join( model.RepositoryMetadata.table ) \
+                               .filter( and_( model.RepositoryMetadata.table.c.downloadable == true(),
+                                              model.RepositoryMetadata.table.c.includes_tools == true(),
+                                              model.RepositoryMetadata.table.c.tools_functionally_correct == false() ) ) \
+                               .join( model.User.table )
diff --git a/lib/tool_shed/grids/util.py b/lib/tool_shed/grids/util.py
new file mode 100644
index 0000000..04e1b74
--- /dev/null
+++ b/lib/tool_shed/grids/util.py
@@ -0,0 +1,188 @@
+import logging
+
+from galaxy.web.form_builder import SelectField
+from tool_shed.util import hg_util, metadata_util
+
+log = logging.getLogger( __name__ )
+
+
+def build_approved_select_field( trans, name, selected_value=None, for_component=True ):
+    options = [ ( 'No', trans.model.ComponentReview.approved_states.NO ),
+                ( 'Yes', trans.model.ComponentReview.approved_states.YES ) ]
+    if for_component:
+        options.append( ( 'Not applicable', trans.model.ComponentReview.approved_states.NA ) )
+        if selected_value is None:
+            selected_value = trans.model.ComponentReview.approved_states.NA
+    select_field = SelectField( name=name )
+    for option_tup in options:
+        selected = selected_value and option_tup[ 1 ] == selected_value
+        select_field.add_option( option_tup[ 0 ], option_tup[ 1 ], selected=selected )
+    return select_field
+
+
+def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True,
+                                           downloadable=False, reviewed=False, not_reviewed=False ):
+    """
+    Build a SelectField whose options are the changeset_rev strings of certain revisions of the
+    received repository.
+    """
+    options = []
+    changeset_tups = []
+    refresh_on_change_values = []
+    if downloadable:
+        # Restrict the options to downloadable revisions.
+        repository_metadata_revisions = repository.downloadable_revisions
+    elif reviewed:
+        # Restrict the options to revisions that have been reviewed.
+        repository_metadata_revisions = []
+        metadata_changeset_revision_hashes = []
+        for metadata_revision in repository.metadata_revisions:
+            metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision )
+        for review in repository.reviews:
+            if review.changeset_revision in metadata_changeset_revision_hashes:
+                repository_metadata_revisions.append( review.repository_metadata )
+    elif not_reviewed:
+        # Restrict the options to revisions that have not yet been reviewed.
+        repository_metadata_revisions = []
+        reviewed_metadata_changeset_revision_hashes = []
+        for review in repository.reviews:
+            reviewed_metadata_changeset_revision_hashes.append( review.changeset_revision )
+        for metadata_revision in repository.metadata_revisions:
+            if metadata_revision.changeset_revision not in reviewed_metadata_changeset_revision_hashes:
+                repository_metadata_revisions.append( metadata_revision )
+    else:
+        # Restrict the options to all revisions that have associated metadata.
+        repository_metadata_revisions = repository.metadata_revisions
+    for repository_metadata in repository_metadata_revisions:
+        rev, label, changeset_revision = \
+            hg_util.get_rev_label_changeset_revision_from_repository_metadata( trans.app,
+                                                                               repository_metadata,
+                                                                               repository=repository,
+                                                                               include_date=True,
+                                                                               include_hash=False )
+        changeset_tups.append( ( rev, label, changeset_revision ) )
+        refresh_on_change_values.append( changeset_revision )
+    # Sort options by the revision label.  Even though the downloadable_revisions query sorts by update_time,
+    # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time.
+    for changeset_tup in sorted( changeset_tups ):
+        # Display the latest revision first.
+        options.insert( 0, ( changeset_tup[ 1 ], changeset_tup[ 2 ] ) )
+    if add_id_to_name:
+        name = 'changeset_revision_%d' % repository.id
+    else:
+        name = 'changeset_revision'
+    select_field = SelectField( name=name,
+                                refresh_on_change=True,
+                                refresh_on_change_values=refresh_on_change_values )
+    for option_tup in options:
+        selected = selected_value and option_tup[ 1 ] == selected_value
+        select_field.add_option( option_tup[ 0 ], option_tup[ 1 ], selected=selected )
+    return select_field
+
+
+def filter_by_latest_downloadable_changeset_revision_that_has_missing_tool_test_components( trans, repository ):
+    """
+    Inspect the latest downloadable changeset revision for the received repository to see if it
+    includes tools that are either missing functional tests or functional test data.  If the
+    changset revision includes tools but is missing tool test components, return the changeset
+    revision hash.  This will filter out repositories of type repository_suite_definition and
+    tool_dependency_definition.
+    """
+    repository_metadata = get_latest_downloadable_repository_metadata_if_it_includes_tools( trans, repository )
+    if repository_metadata is not None \
+            and repository_metadata.missing_test_components:
+        return repository_metadata.changeset_revision
+    return None
+
+
+def filter_by_latest_metadata_changeset_revision_that_has_invalid_tools( trans, repository ):
+    """
+    Inspect the latest changeset revision with associated metadata for the received repository
+    to see if it has invalid tools.  This will filter out repositories of type repository_suite_definition
+    and tool_dependency_definition.
+    """
+    repository_metadata = get_latest_repository_metadata_if_it_includes_invalid_tools( trans, repository )
+    if repository_metadata is not None:
+        return repository_metadata.changeset_revision
+    return None
+
+
+def get_latest_downloadable_repository_metadata( trans, repository ):
+    """
+    Return the latest downloadable repository_metadata record for the received repository.  This will
+    return repositories of type unrestricted as well as types repository_suite_definition and
+     tool_dependency_definition.
+    """
+    encoded_repository_id = trans.security.encode_id( repository.id )
+    repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+    tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
+    repository_metadata = None
+    try:
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, tip_ctx )
+        if repository_metadata is not None and repository_metadata.downloadable:
+            return repository_metadata
+        return None
+    except:
+        latest_downloadable_revision = metadata_util.get_previous_metadata_changeset_revision( repository,
+                                                                                               repo,
+                                                                                               tip_ctx,
+                                                                                               downloadable=True )
+        if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH:
+            return None
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                           encoded_repository_id,
+                                                                                           latest_downloadable_revision )
+        if repository_metadata is not None and repository_metadata.downloadable:
+            return repository_metadata
+        return None
+
+
+def get_latest_downloadable_repository_metadata_if_it_includes_tools( trans, repository ):
+    """
+    Return the latest downloadable repository_metadata record for the received repository if its
+    includes_tools attribute is True.  This will filter out repositories of type repository_suite_definition
+    and tool_dependency_definition.
+    """
+    repository_metadata = get_latest_downloadable_repository_metadata( trans, repository )
+    if repository_metadata is not None and repository_metadata.includes_tools:
+        return repository_metadata
+    return None
+
+
+def get_latest_repository_metadata( trans, repository ):
+    """
+    Return the latest repository_metadata record for the received repository if it exists.  This will
+    return repositories of type unrestricted as well as types repository_suite_definition and
+     tool_dependency_definition.
+    """
+    encoded_repository_id = trans.security.encode_id( repository.id )
+    repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+    tip_ctx = str( repo.changectx( repo.changelog.tip() ) )
+    try:
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, tip_ctx )
+        return repository_metadata
+    except:
+        latest_downloadable_revision = metadata_util.get_previous_metadata_changeset_revision( repository,
+                                                                                               repo,
+                                                                                               tip_ctx,
+                                                                                               downloadable=False )
+        if latest_downloadable_revision == hg_util.INITIAL_CHANGELOG_HASH:
+            return None
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( trans.app,
+                                                                                           encoded_repository_id,
+                                                                                           latest_downloadable_revision )
+        return repository_metadata
+
+
+def get_latest_repository_metadata_if_it_includes_invalid_tools( trans, repository ):
+    """
+    Return the latest repository_metadata record for the received repository that contains invalid
+    tools if one exists.  This will filter out repositories of type repository_suite_definition and
+    tool_dependency_definition.
+    """
+    repository_metadata = get_latest_repository_metadata( trans, repository )
+    if repository_metadata is not None:
+        metadata = repository_metadata.metadata
+        if metadata is not None and 'invalid_tools' in metadata:
+            return repository_metadata
+    return None
diff --git a/lib/tool_shed/managers/__init__.py b/lib/tool_shed/managers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/managers/groups.py b/lib/tool_shed/managers/groups.py
new file mode 100644
index 0000000..a85fde8
--- /dev/null
+++ b/lib/tool_shed/managers/groups.py
@@ -0,0 +1,123 @@
+"""
+Manager and Serializer for TS groups.
+"""
+import logging
+
+from sqlalchemy import false, true
+from sqlalchemy.orm.exc import MultipleResultsFound
+from sqlalchemy.orm.exc import NoResultFound
+
+from galaxy.exceptions import Conflict
+from galaxy.exceptions import InconsistentDatabase
+from galaxy.exceptions import InternalServerError
+from galaxy.exceptions import ItemAccessibilityException
+from galaxy.exceptions import ObjectNotFound
+from galaxy.exceptions import RequestParameterInvalidException
+
+log = logging.getLogger( __name__ )
+
+
+# =============================================================================
+class GroupManager( object ):
+    """
+    Interface/service object for interacting with TS groups.
+    """
+
+    def __init__( self, *args, **kwargs ):
+        super( GroupManager, self ).__init__( *args, **kwargs )
+
+    def get( self, trans, decoded_group_id=None, name=None ):
+        """
+        Get the group from the DB based on its ID or name.
+
+        :param  decoded_group_id:       decoded group id
+        :type   decoded_group_id:       int
+
+        :returns:   the requested group
+        :rtype:     Group
+        """
+        if decoded_group_id is None and name is None:
+            raise RequestParameterInvalidException( 'You must supply either ID or a name of the group.' )
+
+        name_query = trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name == name )
+        id_query = trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.id == decoded_group_id )
+
+        try:
+            group = id_query.one() if decoded_group_id else name_query.one()
+        except MultipleResultsFound:
+            raise InconsistentDatabase( 'Multiple groups found with the same identifier.' )
+        except NoResultFound:
+            raise ObjectNotFound( 'No group found with the identifier provided.' )
+        except Exception:
+            raise InternalServerError( 'Error loading from the database.' )
+        return group
+
+    def create( self, trans, name, description=''):
+        """
+        Create a new group.
+        """
+        if not trans.user_is_admin:
+            raise ItemAccessibilityException( 'Only administrators can create groups.' )
+        else:
+            if self.get( trans, name=name ):
+                raise Conflict( 'Group with the given name already exists. Name: ' + str( name ) )
+            # TODO add description field to the model
+            group = trans.app.model.Group( name=name )
+            trans.sa_session.add( group )
+            trans.sa_session.flush()
+            return group
+
+    def update( self, trans, group, name=None, description=None ):
+        """
+        Update the given group
+        """
+        changed = False
+        if not trans.user_is_admin():
+            raise ItemAccessibilityException( 'Only administrators can update groups.' )
+        if group.deleted:
+            raise RequestParameterInvalidException( 'You cannot modify a deleted group. Undelete it first.' )
+        if name is not None:
+            group.name = name
+            changed = True
+        if description is not None:
+            group.description = description
+            changed = True
+        if changed:
+            trans.sa_session.add( group )
+            trans.sa_session.flush()
+        return group
+
+    def delete( self, trans, group, undelete=False ):
+        """
+        Mark given group deleted/undeleted based on the flag.
+        """
+        if not trans.user_is_admin():
+            raise ItemAccessibilityException( 'Only administrators can delete and undelete groups.' )
+        if undelete:
+            group.deleted = False
+        else:
+            group.deleted = True
+        trans.sa_session.add( group )
+        trans.sa_session.flush()
+        return group
+
+    def list( self, trans, deleted=False ):
+        """
+        Return a list of groups from the DB.
+
+        :returns: query that will emit all groups
+        :rtype:   sqlalchemy query
+        """
+        is_admin = trans.user_is_admin()
+        query = trans.sa_session.query( trans.app.model.Group )
+        if is_admin:
+            if deleted is None:
+                #  Flag is not specified, do not filter on it.
+                pass
+            elif deleted:
+                query = query.filter( trans.app.model.Group.table.c.deleted == true() )
+            else:
+                query = query.filter( trans.app.model.Group.table.c.deleted == false() )
+        else:
+            query = query.filter( trans.app.model.Group.table.c.deleted == false() )
+        return query
diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py
new file mode 100644
index 0000000..d74845d
--- /dev/null
+++ b/lib/tool_shed/managers/repositories.py
@@ -0,0 +1,66 @@
+"""
+Manager and Serializer for TS repositories.
+"""
+import logging
+
+from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
+
+from galaxy.exceptions import (InconsistentDatabase, InternalServerError,
+    RequestParameterInvalidException)
+
+log = logging.getLogger( __name__ )
+
+
+# =============================================================================
+class RepoManager( object ):
+    """
+    Interface/service object for interacting with TS repositories.
+    """
+
+    def __init__( self, *args, **kwargs ):
+        super( RepoManager, self ).__init__( *args, **kwargs )
+
+    def get( self, trans, decoded_repo_id ):
+        """
+        Get the repo from the DB.
+
+        :param  decoded_repo_id:       decoded repo id
+        :type   decoded_repo_id:       int
+
+        :returns:   the requested repo
+        :rtype:     Repository
+        """
+        try:
+            repo = trans.sa_session.query( trans.app.model.Repository ).filter( trans.app.model.Repository.table.c.id == decoded_repo_id ).one()
+        except MultipleResultsFound:
+            raise InconsistentDatabase( 'Multiple repositories found with the same id.' )
+        except NoResultFound:
+            raise RequestParameterInvalidException( 'No repository found with the id provided.' )
+        except Exception:
+            raise InternalServerError( 'Error loading from the database.' )
+        return repo
+
+    def list_by_owner( self, trans, user_id ):
+        """
+        Return a list of of repositories owned by a given TS user from the DB.
+
+        :returns: query that will emit repositories owned by given user
+        :rtype:   sqlalchemy query
+        """
+        query = trans.sa_session.query( trans.app.model.Repository ).filter( trans.app.model.Repository.table.c.user_id == user_id )
+        return query
+
+    def create( self, trans, name, description=''):
+        """
+        Create a new group.
+        """
+
+    def update( self, trans, group, name=None, description=None ):
+        """
+        Update the given group
+        """
+
+    def delete( self, trans, group, undelete=False ):
+        """
+        Mark given group deleted/undeleted based on the flag.
+        """
diff --git a/lib/tool_shed/metadata/__init__.py b/lib/tool_shed/metadata/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/metadata/metadata_generator.py b/lib/tool_shed/metadata/metadata_generator.py
new file mode 100644
index 0000000..026af27
--- /dev/null
+++ b/lib/tool_shed/metadata/metadata_generator.py
@@ -0,0 +1,1122 @@
+import json
+import logging
+import os
+import tempfile
+
+from sqlalchemy import and_
+
+from galaxy import util
+from galaxy.tools.data_manager.manager import DataManager
+from galaxy.tools.loader_directory import looks_like_a_tool
+from galaxy.tools.parser.interface import TestCollectionDef
+from galaxy.web import url_for
+from tool_shed.repository_types import util as rt_util
+from tool_shed.tools import tool_validator
+from tool_shed.util import (
+    basic_util,
+    common_util,
+    hg_util,
+    metadata_util,
+    readme_util,
+    repository_util,
+    shed_util_common as suc,
+    tool_dependency_util,
+    tool_util,
+    xml_util
+)
+
+log = logging.getLogger( __name__ )
+
+
+class MetadataGenerator( object ):
+
+    def __init__( self, app, repository=None, changeset_revision=None, repository_clone_url=None,
+                  shed_config_dict=None, relative_install_dir=None, repository_files_dir=None,
+                  resetting_all_metadata_on_repository=False, updating_installed_repository=False,
+                  persist=False, metadata_dict=None, user=None ):
+        self.app = app
+        self.user = user
+        self.repository = repository
+        if self.app.name == 'galaxy':
+            if changeset_revision is None and self.repository is not None:
+                self.changeset_revision = self.repository.changeset_revision
+            else:
+                self.changeset_revision = changeset_revision
+
+            if repository_clone_url is None and self.repository is not None:
+                self.repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, self.repository )
+            else:
+                self.repository_clone_url = repository_clone_url
+            if shed_config_dict is None:
+                if self.repository is not None:
+                    self.shed_config_dict = self.repository.get_shed_config_dict( self.app )
+                else:
+                    self.shed_config_dict = {}
+            else:
+                self.shed_config_dict = shed_config_dict
+            if relative_install_dir is None and self.repository is not None:
+                tool_path, relative_install_dir = self.repository.get_tool_relative_path( self.app )
+            if repository_files_dir is None and self.repository is not None:
+                repository_files_dir = self.repository.repo_files_directory( self.app )
+            if metadata_dict is None:
+                # Shed related tool panel configs are only relevant to Galaxy.
+                self.metadata_dict = { 'shed_config_filename': self.shed_config_dict.get( 'config_filename', None ) }
+            else:
+                self.metadata_dict = metadata_dict
+        else:
+            # We're in the Tool Shed.
+            if changeset_revision is None and self.repository is not None:
+                self.changeset_revision = self.repository.tip( self.app )
+            else:
+                self.changeset_revision = changeset_revision
+            if repository_clone_url is None and self.repository is not None:
+                self.repository_clone_url = \
+                    common_util.generate_clone_url_for_repository_in_tool_shed( self.user, self.repository )
+            else:
+                self.repository_clone_url = repository_clone_url
+            if shed_config_dict is None:
+                self.shed_config_dict = {}
+            else:
+                self.shed_config_dict = shed_config_dict
+            if relative_install_dir is None and self.repository is not None:
+                relative_install_dir = self.repository.repo_path( self.app )
+            if repository_files_dir is None and self.repository is not None:
+                repository_files_dir = self.repository.repo_path( self.app )
+            if metadata_dict is None:
+                self.metadata_dict = {}
+            else:
+                self.metadata_dict = metadata_dict
+        self.relative_install_dir = relative_install_dir
+        self.repository_files_dir = repository_files_dir
+        self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository
+        self.updating_installed_repository = updating_installed_repository
+        self.persist = persist
+        self.invalid_file_tups = []
+        self.sa_session = app.model.context.current
+        self.NOT_TOOL_CONFIGS = [ suc.DATATYPES_CONFIG_FILENAME,
+                                  rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME,
+                                  rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME,
+                                  suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ]
+
+    def generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict,
+                                        shed_config_dict=None ):
+        """
+        Update the received metadata_dict with information from the parsed data_manager_config_filename.
+        """
+        if data_manager_config_filename is None:
+            return metadata_dict
+        repo_path = self.repository.repo_path( self.app )
+        try:
+            # Galaxy Side.
+            repo_files_directory = self.repository.repo_files_directory( self.app )
+            repo_dir = repo_files_directory
+        except AttributeError:
+            # Tool Shed side.
+            repo_files_directory = repo_path
+        relative_data_manager_dir = util.relpath( os.path.split( data_manager_config_filename )[0], repo_dir )
+        rel_data_manager_config_filename = os.path.join( relative_data_manager_dir,
+                                                         os.path.split( data_manager_config_filename )[1] )
+        data_managers = {}
+        invalid_data_managers = []
+        data_manager_metadata = { 'config_filename': rel_data_manager_config_filename,
+                                  'data_managers': data_managers,
+                                  'invalid_data_managers': invalid_data_managers,
+                                  'error_messages': [] }
+        metadata_dict[ 'data_manager' ] = data_manager_metadata
+        tree, error_message = xml_util.parse_xml( data_manager_config_filename )
+        if tree is None:
+            # We are not able to load any data managers.
+            data_manager_metadata[ 'error_messages' ].append( error_message )
+            return metadata_dict
+        tool_path = None
+        if shed_config_dict:
+            tool_path = shed_config_dict.get( 'tool_path', None )
+        tools = {}
+        for tool in metadata_dict.get( 'tools', [] ):
+            tool_conf_name = tool[ 'tool_config' ]
+            if tool_path:
+                tool_conf_name = os.path.join( tool_path, tool_conf_name )
+            tools[ tool_conf_name ] = tool
+        root = tree.getroot()
+        data_manager_tool_path = root.get( 'tool_path', None )
+        if data_manager_tool_path:
+            relative_data_manager_dir = os.path.join( relative_data_manager_dir, data_manager_tool_path )
+        for i, data_manager_elem in enumerate( root.findall( 'data_manager' ) ):
+            tool_file = data_manager_elem.get( 'tool_file', None )
+            data_manager_id = data_manager_elem.get( 'id', None )
+            if data_manager_id is None:
+                log.error( 'Data Manager entry is missing id attribute in "%s".' % ( data_manager_config_filename ) )
+                invalid_data_managers.append( { 'index': i,
+                                                'error_message': 'Data Manager entry is missing id attribute' } )
+                continue
+            # FIXME: default behavior is to fall back to tool.name.
+            data_manager_name = data_manager_elem.get( 'name', data_manager_id )
+            version = data_manager_elem.get( 'version', DataManager.DEFAULT_VERSION )
+            guid = self.generate_guid_for_object( DataManager.GUID_TYPE, data_manager_id, version )
+            data_tables = []
+            if tool_file is None:
+                log.error( 'Data Manager entry is missing tool_file attribute in "%s".' % ( data_manager_config_filename ) )
+                invalid_data_managers.append( { 'index': i,
+                                                'error_message': 'Data Manager entry is missing tool_file attribute' } )
+                continue
+            else:
+                bad_data_table = False
+                for data_table_elem in data_manager_elem.findall( 'data_table' ):
+                    data_table_name = data_table_elem.get( 'name', None )
+                    if data_table_name is None:
+                        log.error( 'Data Manager data_table entry is missing name attribute in "%s".' % ( data_manager_config_filename ) )
+                        invalid_data_managers.append( { 'index': i,
+                                                        'error_message': 'Data Manager entry is missing name attribute' } )
+                        bad_data_table = True
+                        break
+                    else:
+                        data_tables.append( data_table_name )
+                if bad_data_table:
+                    continue
+            data_manager_metadata_tool_file = os.path.normpath( os.path.join( relative_data_manager_dir, tool_file ) )
+            tool_metadata_tool_file = os.path.join( repo_files_directory, data_manager_metadata_tool_file )
+            tool = tools.get( tool_metadata_tool_file, None )
+            if tool is None:
+                log.error( "Unable to determine tools metadata for '%s'." % ( data_manager_metadata_tool_file ) )
+                invalid_data_managers.append( { 'index': i,
+                                                'error_message': 'Unable to determine tools metadata' } )
+                continue
+            data_managers[ data_manager_id ] = { 'id': data_manager_id,
+                                                 'name': data_manager_name,
+                                                 'guid': guid,
+                                                 'version': version,
+                                                 'tool_config_file': data_manager_metadata_tool_file,
+                                                 'data_tables': data_tables,
+                                                 'tool_guid': tool[ 'guid' ] }
+            log.debug( 'Loaded Data Manager tool_files: %s' % ( tool_file ) )
+        return metadata_dict
+
+    def generate_datatypes_metadata( self, tv, repository_files_dir, datatypes_config, metadata_dict ):
+        """Update the received metadata_dict with information from the parsed datatypes_config."""
+        tree, error_message = xml_util.parse_xml( datatypes_config )
+        if tree is None:
+            return metadata_dict
+        root = tree.getroot()
+        repository_datatype_code_files = []
+        datatype_files = root.find( 'datatype_files' )
+        if datatype_files is not None:
+            for elem in datatype_files.findall( 'datatype_file' ):
+                name = elem.get( 'name', None )
+                repository_datatype_code_files.append( name )
+            metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
+        datatypes = []
+        registration = root.find( 'registration' )
+        if registration is not None:
+            for elem in registration.findall( 'datatype' ):
+                converters = []
+                display_app_containers = []
+                datatypes_dict = {}
+                # Handle defined datatype attributes.
+                display_in_upload = elem.get( 'display_in_upload', None )
+                if display_in_upload:
+                    datatypes_dict[ 'display_in_upload' ] = display_in_upload
+                dtype = elem.get( 'type', None )
+                if dtype:
+                    datatypes_dict[ 'dtype' ] = dtype
+                extension = elem.get( 'extension', None )
+                if extension:
+                    datatypes_dict[ 'extension' ] = extension
+                max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+                if max_optional_metadata_filesize:
+                    datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize
+                mimetype = elem.get( 'mimetype', None )
+                if mimetype:
+                    datatypes_dict[ 'mimetype' ] = mimetype
+                subclass = elem.get( 'subclass', None )
+                if subclass:
+                    datatypes_dict[ 'subclass' ] = subclass
+                # Handle defined datatype converters and display applications.
+                for sub_elem in elem:
+                    if sub_elem.tag == 'converter':
+                        # <converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
+                        tool_config = sub_elem.attrib[ 'file' ]
+                        target_datatype = sub_elem.attrib[ 'target_datatype' ]
+                        # Parse the tool_config to get the guid.
+                        tool_config_path = hg_util.get_config_from_disk( tool_config, repository_files_dir )
+                        full_path = os.path.abspath( tool_config_path )
+                        tool, valid, error_message = \
+                            tv.load_tool_from_config( self.app.security.encode_id( self.repository.id ), full_path )
+                        if tool is None:
+                            guid = None
+                        else:
+                            guid = suc.generate_tool_guid( self.repository_clone_url, tool )
+                        converter_dict = dict( tool_config=tool_config,
+                                               guid=guid,
+                                               target_datatype=target_datatype )
+                        converters.append( converter_dict )
+                    elif sub_elem.tag == 'display':
+                        # <display file="ucsc/bigwig.xml" />
+                        # Should we store more than this?
+                        display_file = sub_elem.attrib[ 'file' ]
+                        display_app_dict = dict( display_file=display_file )
+                        display_app_containers.append( display_app_dict )
+                if converters:
+                    datatypes_dict[ 'converters' ] = converters
+                if display_app_containers:
+                    datatypes_dict[ 'display_app_containers' ] = display_app_containers
+                if datatypes_dict:
+                    datatypes.append( datatypes_dict )
+            if datatypes:
+                metadata_dict[ 'datatypes' ] = datatypes
+        return metadata_dict
+
+    def generate_environment_dependency_metadata( self, elem, valid_tool_dependencies_dict ):
+        """
+        The value of env_var_name must match the value of the "set_environment" type
+        in the tool config's <requirements> tag set, or the tool dependency will be
+        considered an orphan.
+        """
+        # The value of the received elem looks something like this:
+        # <set_environment version="1.0">
+        #    <environment_variable name="JAVA_JAR_PATH" action="set_to">$INSTALL_DIR</environment_variable>
+        # </set_environment>
+        for env_elem in elem:
+            # <environment_variable name="JAVA_JAR_PATH" action="set_to">$INSTALL_DIR</environment_variable>
+            env_name = env_elem.get( 'name', None )
+            if env_name:
+                requirements_dict = dict( name=env_name, type='set_environment' )
+                if 'set_environment' in valid_tool_dependencies_dict:
+                    valid_tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
+                else:
+                    valid_tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
+        return valid_tool_dependencies_dict
+
+    def generate_guid_for_object( self, guid_type, obj_id, version ):
+        tmp_url = common_util.remove_protocol_and_user_from_clone_url( self.repository_clone_url )
+        return '%s/%s/%s/%s' % ( tmp_url, guid_type, obj_id, version )
+
+    def generate_metadata_for_changeset_revision( self ):
+        """
+        Generate metadata for a repository using its files on disk.  To generate metadata
+        for changeset revisions older than the repository tip, the repository will have been
+        cloned to a temporary location and updated to a specified changeset revision to access
+        that changeset revision's disk files, so the value of self.repository_files_dir will not
+        always be self.repository.repo_path( self.app ) (it could be an absolute path to a temporary
+        directory containing a clone).  If it is an absolute path, the value of self.relative_install_dir
+        must contain repository.repo_path( self.app ).
+
+        The value of self.persist will be True when the installed repository contains a valid
+        tool_data_table_conf.xml.sample file, in which case the entries should ultimately be
+        persisted to the file referred to by self.app.config.shed_tool_data_table_config.
+        """
+        tv = tool_validator.ToolValidator( self.app )
+        if self.shed_config_dict is None:
+            self.shed_config_dict = {}
+        if self.updating_installed_repository:
+            # Keep the original tool shed repository metadata if setting metadata on a repository
+            # installed into a local Galaxy instance for which we have pulled updates.
+            original_repository_metadata = self.repository.metadata
+        else:
+            original_repository_metadata = None
+        readme_file_names = readme_util.get_readme_file_names( str( self.repository.name ) )
+        if self.app.name == 'galaxy':
+            # Shed related tool panel configs are only relevant to Galaxy.
+            metadata_dict = { 'shed_config_filename': self.shed_config_dict.get( 'config_filename' ) }
+        else:
+            metadata_dict = {}
+        readme_files = []
+        invalid_tool_configs = []
+        tool_dependencies_config = None
+        original_tool_data_path = self.app.config.tool_data_path
+        original_tool_data_table_config_path = self.app.config.tool_data_table_config_path
+        if self.resetting_all_metadata_on_repository:
+            if not self.relative_install_dir:
+                raise Exception( "The value of self.repository.repo_path must be set when resetting all metadata on a repository." )
+            # Keep track of the location where the repository is temporarily cloned so that we can
+            # strip the path when setting metadata.  The value of self.repository_files_dir is the
+            # full path to the temporary directory to which self.repository was cloned.
+            work_dir = self.repository_files_dir
+            files_dir = self.repository_files_dir
+            # Since we're working from a temporary directory, we can safely copy sample files included
+            # in the repository to the repository root.
+            self.app.config.tool_data_path = self.repository_files_dir
+            self.app.config.tool_data_table_config_path = self.repository_files_dir
+        else:
+            # Use a temporary working directory to copy all sample files.
+            work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-gmfcr" )
+            # All other files are on disk in the repository's repo_path, which is the value of
+            # self.relative_install_dir.
+            files_dir = self.relative_install_dir
+            if self.shed_config_dict.get( 'tool_path' ):
+                files_dir = os.path.join( self.shed_config_dict[ 'tool_path' ], files_dir )
+            self.app.config.tool_data_path = work_dir  # FIXME: Thread safe?
+            self.app.config.tool_data_table_config_path = work_dir
+        # Handle proprietary datatypes, if any.
+        datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir )
+        if datatypes_config:
+            metadata_dict = self.generate_datatypes_metadata( tv,
+                                                              files_dir,
+                                                              datatypes_config,
+                                                              metadata_dict )
+        # Get the relative path to all sample files included in the repository for storage in
+        # the repository's metadata.
+        sample_file_metadata_paths, sample_file_copy_paths = \
+            self.get_sample_files_from_disk( repository_files_dir=files_dir,
+                                             tool_path=self.shed_config_dict.get( 'tool_path' ),
+                                             relative_install_dir=self.relative_install_dir )
+        if sample_file_metadata_paths:
+            metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
+        # Copy all sample files included in the repository to a single directory location so we
+        # can load tools that depend on them.
+        for sample_file in sample_file_copy_paths:
+            tool_util.copy_sample_file( self.app, sample_file, dest_path=work_dir )
+            # If the list of sample files includes a tool_data_table_conf.xml.sample file, load
+            # its table elements into memory.
+            relative_path, filename = os.path.split( sample_file )
+            if filename == 'tool_data_table_conf.xml.sample':
+                new_table_elems, error_message = \
+                    self.app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
+                                                                                tool_data_path=self.app.config.tool_data_path,
+                                                                                shed_tool_data_table_config=self.app.config.shed_tool_data_table_config,
+                                                                                persist=False )
+                if error_message:
+                    self.invalid_file_tups.append( ( filename, error_message ) )
+        for root, dirs, files in os.walk( files_dir ):
+            if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+                if '.hg' in dirs:
+                    dirs.remove( '.hg' )
+                for name in files:
+                    # See if we have a repository dependencies defined.
+                    if name == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+                        path_to_repository_dependencies_config = os.path.join( root, name )
+                        metadata_dict, error_message = \
+                            self.generate_repository_dependency_metadata( path_to_repository_dependencies_config,
+                                                                          metadata_dict )
+                        if error_message:
+                            self.invalid_file_tups.append( ( name, error_message ) )
+                    # See if we have one or more READ_ME files.
+                    elif name.lower() in readme_file_names:
+                        relative_path_to_readme = self.get_relative_path_to_repository_file( root,
+                                                                                             name,
+                                                                                             self.relative_install_dir,
+                                                                                             work_dir,
+                                                                                             self.shed_config_dict )
+                        readme_files.append( relative_path_to_readme )
+                    # See if we have a tool config.
+                    elif looks_like_a_tool(os.path.join( root, name ), invalid_names=self.NOT_TOOL_CONFIGS ):
+                        full_path = str(os.path.abspath(os.path.join( root, name )))  # why the str, seems very odd
+                        element_tree, error_message = xml_util.parse_xml( full_path )
+                        if element_tree is None:
+                            is_tool = False
+                        else:
+                            element_tree_root = element_tree.getroot()
+                            is_tool = element_tree_root.tag == 'tool'
+                        if is_tool:
+                            tool, valid, error_message = \
+                                tv.load_tool_from_config( self.app.security.encode_id( self.repository.id ),
+                                                          full_path )
+                            if tool is None:
+                                if not valid:
+                                    invalid_tool_configs.append( name )
+                                    self.invalid_file_tups.append( ( name, error_message ) )
+                            else:
+                                invalid_files_and_errors_tups = \
+                                    tv.check_tool_input_params( files_dir,
+                                                                name,
+                                                                tool,
+                                                                sample_file_copy_paths )
+                                can_set_metadata = True
+                                for tup in invalid_files_and_errors_tups:
+                                    if name in tup:
+                                        can_set_metadata = False
+                                        invalid_tool_configs.append( name )
+                                        break
+                                if can_set_metadata:
+                                    relative_path_to_tool_config = \
+                                        self.get_relative_path_to_repository_file( root,
+                                                                                   name,
+                                                                                   self.relative_install_dir,
+                                                                                   work_dir,
+                                                                                   self.shed_config_dict )
+                                    metadata_dict = self.generate_tool_metadata( relative_path_to_tool_config,
+                                                                                 tool,
+                                                                                 metadata_dict )
+                                else:
+                                    for tup in invalid_files_and_errors_tups:
+                                        self.invalid_file_tups.append( tup )
+                    # Find all exported workflows.
+                    elif name.endswith( '.ga' ):
+                        relative_path = os.path.join( root, name )
+                        if os.path.getsize( os.path.abspath( relative_path ) ) > 0:
+                            fp = open( relative_path, 'rb' )
+                            workflow_text = fp.read()
+                            fp.close()
+                            if workflow_text:
+                                valid_exported_galaxy_workflow = True
+                                try:
+                                    exported_workflow_dict = json.loads( workflow_text )
+                                except Exception as e:
+                                    log.exception( "Skipping file %s since it does not seem to be a valid exported Galaxy workflow: %s"
+                                                   % ( str( relative_path ), str( e ) ) )
+                                    valid_exported_galaxy_workflow = False
+                            if valid_exported_galaxy_workflow and \
+                                'a_galaxy_workflow' in exported_workflow_dict and \
+                                    exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+                                metadata_dict = self.generate_workflow_metadata( relative_path,
+                                                                                 exported_workflow_dict,
+                                                                                 metadata_dict )
+        # Handle any data manager entries
+        data_manager_config = hg_util.get_config_from_disk( suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, files_dir )
+        metadata_dict = self.generate_data_manager_metadata( files_dir,
+                                                             data_manager_config,
+                                                             metadata_dict,
+                                                             shed_config_dict=self.shed_config_dict )
+
+        if readme_files:
+            metadata_dict[ 'readme_files' ] = readme_files
+        # This step must be done after metadata for tools has been defined.
+        tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, files_dir )
+        if tool_dependencies_config:
+            metadata_dict, error_message = \
+                self.generate_tool_dependency_metadata( tool_dependencies_config,
+                                                        metadata_dict,
+                                                        original_repository_metadata=original_repository_metadata )
+            if error_message:
+                self.invalid_file_tups.append( ( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, error_message ) )
+        if invalid_tool_configs:
+            metadata_dict[ 'invalid_tools' ] = invalid_tool_configs
+        self.metadata_dict = metadata_dict
+        # Reset the value of the app's tool_data_path  and tool_data_table_config_path to their respective original values.
+        self.app.config.tool_data_path = original_tool_data_path
+        self.app.config.tool_data_table_config_path = original_tool_data_table_config_path
+        basic_util.remove_dir( work_dir )
+
+    def generate_package_dependency_metadata( self, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict ):
+        """
+        Generate the metadata for a tool dependencies package defined for a repository.  The
+        value of package_name must match the value of the "package" type in the tool config's
+        <requirements> tag set.
+        """
+        # TODO: make this function a class.
+        repository_dependency_is_valid = True
+        repository_dependency_tup = []
+        requirements_dict = {}
+        error_message = ''
+        package_name = elem.get( 'name', None )
+        package_version = elem.get( 'version', None )
+        if package_name and package_version:
+            requirements_dict[ 'name' ] = package_name
+            requirements_dict[ 'version' ] = package_version
+            requirements_dict[ 'type' ] = 'package'
+            for sub_elem in elem:
+                if sub_elem.tag == 'readme':
+                    requirements_dict[ 'readme' ] = sub_elem.text
+                elif sub_elem.tag == 'repository':
+                    # We have a complex repository dependency.  If the returned value of repository_dependency_is_valid
+                    # is True, the tool dependency definition will be set as invalid.  This is currently the only case
+                    # where a tool dependency definition is considered invalid.
+                    repository_dependency_tup, repository_dependency_is_valid, error_message = \
+                        self.handle_repository_elem( repository_elem=sub_elem,
+                                                     only_if_compiling_contained_td=False )
+                elif sub_elem.tag == 'install':
+                    package_install_version = sub_elem.get( 'version', '1.0' )
+                    if package_install_version == '1.0':
+                        # Complex repository dependencies can be defined within the last <actions> tag set contained in an
+                        # <actions_group> tag set.  Comments, <repository> tag sets and <readme> tag sets will be skipped
+                        # in tool_dependency_util.parse_package_elem().
+                        actions_elem_tuples = tool_dependency_util.parse_package_elem( sub_elem,
+                                                                                       platform_info_dict=None,
+                                                                                       include_after_install_actions=False )
+                        if actions_elem_tuples:
+                            # We now have a list of a single tuple that looks something like:
+                            # [(True, <Element 'actions' at 0x104017850>)]
+                            actions_elem_tuple = actions_elem_tuples[ 0 ]
+                            in_actions_group, actions_elem = actions_elem_tuple
+                            if in_actions_group:
+                                # Since we're inside an <actions_group> tag set, inspect the actions_elem to see if a complex
+                                # repository dependency is defined.  By definition, complex repository dependency definitions
+                                # contained within the last <actions> tag set within an <actions_group> tag set will have the
+                                # value of "only_if_compiling_contained_td" set to True in
+                                for action_elem in actions_elem:
+                                    if action_elem.tag == 'package':
+                                        # <package name="libgtextutils" version="0.6">
+                                        #    <repository name="package_libgtextutils_0_6" owner="test" prior_installation_required="True" />
+                                        # </package>
+                                        ae_package_name = action_elem.get( 'name', None )
+                                        ae_package_version = action_elem.get( 'version', None )
+                                        if ae_package_name and ae_package_version:
+                                            for sub_action_elem in action_elem:
+                                                if sub_action_elem.tag == 'repository':
+                                                    # We have a complex repository dependency.
+                                                    repository_dependency_tup, repository_dependency_is_valid, error_message = \
+                                                        self.handle_repository_elem( repository_elem=sub_action_elem,
+                                                                                     only_if_compiling_contained_td=True )
+                                    elif action_elem.tag == 'action':
+                                        # <action type="set_environment_for_install">
+                                        #    <repository changeset_revision="b107b91b3574" name="package_readline_6_2" owner="devteam" prior_installation_required="True" toolshed="http://localhost:9009">
+                                        #        <package name="readline" version="6.2" />
+                                        #    </repository>
+                                        # </action>
+                                        for sub_action_elem in action_elem:
+                                            if sub_action_elem.tag == 'repository':
+                                                # We have a complex repository dependency.
+                                                repository_dependency_tup, repository_dependency_is_valid, error_message = \
+                                                    self.handle_repository_elem( repository_elem=sub_action_elem,
+                                                                                 only_if_compiling_contained_td=True )
+        if requirements_dict:
+            dependency_key = '%s/%s' % ( package_name, package_version )
+            if repository_dependency_is_valid:
+                valid_tool_dependencies_dict[ dependency_key ] = requirements_dict
+            else:
+                # Append the error message to the requirements_dict.
+                requirements_dict[ 'error' ] = error_message
+                invalid_tool_dependencies_dict[ dependency_key ] = requirements_dict
+        return valid_tool_dependencies_dict, \
+            invalid_tool_dependencies_dict, \
+            repository_dependency_tup, \
+            repository_dependency_is_valid, \
+            error_message
+
+    def generate_repository_dependency_metadata( self, repository_dependencies_config, metadata_dict ):
+        """
+        Generate a repository dependencies dictionary based on valid information defined in the received
+        repository_dependencies_config.  This method is called from the tool shed as well as from Galaxy.
+        """
+        error_message = ''
+        # Make sure we're looking at a valid repository_dependencies.xml file.
+        tree, error_message = xml_util.parse_xml( repository_dependencies_config )
+        if tree is None:
+            xml_is_valid = False
+        else:
+            root = tree.getroot()
+            xml_is_valid = root.tag == 'repositories'
+        if xml_is_valid:
+            invalid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
+            invalid_repository_dependency_tups = []
+            valid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
+            valid_repository_dependency_tups = []
+            for repository_elem in root.findall( 'repository' ):
+                repository_dependency_tup, repository_dependency_is_valid, err_msg = \
+                    self.handle_repository_elem( repository_elem,
+                                                 only_if_compiling_contained_td=False )
+                if repository_dependency_is_valid:
+                    valid_repository_dependency_tups.append( repository_dependency_tup )
+                else:
+                    # Append the error_message to the repository dependencies tuple.
+                    toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                        repository_dependency_tup
+                    repository_dependency_tup = ( toolshed,
+                                                  name,
+                                                  owner,
+                                                  changeset_revision,
+                                                  prior_installation_required,
+                                                  only_if_compiling_contained_td,
+                                                  err_msg )
+                    invalid_repository_dependency_tups.append( repository_dependency_tup )
+                    error_message += err_msg
+            if invalid_repository_dependency_tups:
+                invalid_repository_dependencies_dict[ 'repository_dependencies' ] = invalid_repository_dependency_tups
+                metadata_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_dict
+            if valid_repository_dependency_tups:
+                valid_repository_dependencies_dict[ 'repository_dependencies' ] = valid_repository_dependency_tups
+                metadata_dict[ 'repository_dependencies' ] = valid_repository_dependencies_dict
+        return metadata_dict, error_message
+
+    def generate_tool_metadata( self, tool_config, tool, metadata_dict ):
+        """Update the received metadata_dict with changes that have been applied to the received tool."""
+        # Generate the guid.
+        guid = suc.generate_tool_guid( self.repository_clone_url, tool )
+        # Handle tool.requirements.
+        tool_requirements = []
+        for tool_requirement in tool.requirements:
+            name = str( tool_requirement.name )
+            tool_type = str( tool_requirement.type )
+            version = str( tool_requirement.version ) if tool_requirement.version else None
+            requirement_dict = dict( name=name,
+                                     type=tool_type,
+                                     version=version )
+            tool_requirements.append( requirement_dict )
+        # Handle tool.tests.
+        tool_tests = []
+        if tool.tests:
+            for ttb in tool.tests:
+                required_files = []
+                for required_file in ttb.required_files:
+                    value, extra = required_file
+                    required_files.append( ( value ) )
+                inputs = []
+                for param_name, values in ttb.inputs.items():
+                    # Handle improperly defined or strange test parameters and values.
+                    if param_name is not None:
+                        if values in [ None, False ]:
+                            # An example is the third test in http://testtoolshed.g2.bx.psu.edu/view/devteam/samtools_rmdup
+                            # which is defined as:
+                            # <test>
+                            #    <param name="input1" value="1.bam" ftype="bam" />
+                            #    <param name="bam_paired_end_type_selector" value="PE" />
+                            #    <param name="force_se" />
+                            #    <output name="output1" file="1.bam" ftype="bam" sort="True" />
+                            # </test>
+                            inputs.append( ( param_name, values ) )
+                        else:
+                            if isinstance( values, TestCollectionDef ):
+                                # Nested required files are being populated correctly,
+                                # not sure we need the value here to be anything else?
+                                collection_type = values.collection_type
+                                metadata_display_value = "%s collection" % collection_type
+                                inputs.append( ( param_name, metadata_display_value ) )
+                            else:
+                                try:
+                                    if len( values ) == 1:
+                                        inputs.append( ( param_name, values[ 0 ] ) )
+                                        continue
+                                except TypeError:
+                                    log.exception( 'Expected a list of values for tool "%s" parameter "%s", got %s: %s', tool.id, param_name, type( values ), values )
+                                inputs.append( ( param_name, values ) )
+                outputs = []
+                for output in ttb.outputs:
+                    name, file_name, extra = output
+                    outputs.append( ( name, basic_util.strip_path( file_name ) if file_name else None ) )
+                    if file_name not in required_files and file_name is not None:
+                        required_files.append( file_name )
+                test_dict = dict( name=str( ttb.name ),
+                                  required_files=required_files,
+                                  inputs=inputs,
+                                  outputs=outputs )
+                tool_tests.append( test_dict )
+        # Determine if the tool should be loaded into the tool panel.  Examples of valid tools that
+        # should not be displayed in the tool panel are datatypes converters and DataManager tools
+        # (which are of type 'manage_data').
+        datatypes = metadata_dict.get( 'datatypes', None )
+        add_to_tool_panel_attribute = self.set_add_to_tool_panel_attribute_for_tool( tool=tool,
+                                                                                     guid=guid,
+                                                                                     datatypes=datatypes )
+        tool_dict = dict( id=tool.id,
+                          guid=guid,
+                          name=tool.name,
+                          version=tool.version,
+                          description=tool.description,
+                          version_string_cmd=tool.version_string_cmd,
+                          tool_config=tool_config,
+                          tool_type=tool.tool_type,
+                          requirements=tool_requirements,
+                          tests=tool_tests,
+                          add_to_tool_panel=add_to_tool_panel_attribute )
+        if 'tools' in metadata_dict:
+            metadata_dict[ 'tools' ].append( tool_dict )
+        else:
+            metadata_dict[ 'tools' ] = [ tool_dict ]
+        return metadata_dict
+
+    def generate_tool_dependency_metadata( self, tool_dependencies_config, metadata_dict, original_repository_metadata=None ):
+        """
+        If the combination of name, version and type of each element is defined in the <requirement> tag for
+        at least one tool in self.repository, then update the received metadata_dict with information from the
+        parsed tool_dependencies_config.
+        """
+        error_message = ''
+        if original_repository_metadata:
+            # Keep a copy of the original tool dependencies dictionary and the list of tool
+            # dictionaries in the metadata.
+            original_valid_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None )
+        else:
+            original_valid_tool_dependencies_dict = None
+        tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+        if tree is None:
+            return metadata_dict, error_message
+        root = tree.getroot()
+
+        class RecurserValueStore( object ):
+            pass
+        rvs = RecurserValueStore()
+        rvs.valid_tool_dependencies_dict = {}
+        rvs.invalid_tool_dependencies_dict = {}
+        valid_repository_dependency_tups = []
+        invalid_repository_dependency_tups = []
+        description = root.get( 'description' )
+
+        def _check_elem_for_dep( elems ):
+            error_messages = []
+            for elem in elems:
+                if elem.tag == 'package':
+                    rvs.valid_tool_dependencies_dict, rvs.invalid_tool_dependencies_dict, \
+                        repository_dependency_tup, repository_dependency_is_valid, \
+                        message = self.generate_package_dependency_metadata( elem,
+                                                                             rvs.valid_tool_dependencies_dict,
+                                                                             rvs.invalid_tool_dependencies_dict )
+                    if repository_dependency_is_valid:
+                        if repository_dependency_tup and repository_dependency_tup not in valid_repository_dependency_tups:
+                            # We have a valid complex repository dependency.
+                            valid_repository_dependency_tups.append( repository_dependency_tup )
+                    else:
+                        if repository_dependency_tup and repository_dependency_tup not in invalid_repository_dependency_tups:
+                            # We have an invalid complex repository dependency, so mark the tool dependency as invalid.
+                            # Append the error message to the invalid repository dependency tuple.
+                            toolshed, name, owner, changeset_revision, \
+                                prior_installation_required, \
+                                only_if_compiling_contained_td = \
+                                repository_dependency_tup
+                            repository_dependency_tup = \
+                                ( toolshed,
+                                  name,
+                                  owner,
+                                  changeset_revision,
+                                  prior_installation_required,
+                                  only_if_compiling_contained_td,
+                                  message )
+                            invalid_repository_dependency_tups.append( repository_dependency_tup )
+                            error_messages.append('%s  %s' % ( error_message, message ) )
+                elif elem.tag == 'set_environment':
+                    rvs.valid_tool_dependencies_dict = \
+                        self.generate_environment_dependency_metadata( elem, rvs.valid_tool_dependencies_dict )
+                error_messages += _check_elem_for_dep( elem )
+            return error_messages
+        error_message = "\n".join([error_message] + _check_elem_for_dep( root ))
+        if rvs.valid_tool_dependencies_dict:
+            if original_valid_tool_dependencies_dict:
+                # We're generating metadata on an update pulled to a tool shed repository installed
+                # into a Galaxy instance, so handle changes to tool dependencies appropriately.
+                irm = self.app.installed_repository_manager
+                updated_tool_dependency_names, deleted_tool_dependency_names = \
+                    irm.handle_existing_tool_dependencies_that_changed_in_update( self.repository,
+                                                                                  original_valid_tool_dependencies_dict,
+                                                                                  rvs.valid_tool_dependencies_dict )
+            metadata_dict[ 'tool_dependencies' ] = rvs.valid_tool_dependencies_dict
+        if rvs.invalid_tool_dependencies_dict:
+            metadata_dict[ 'invalid_tool_dependencies' ] = rvs.invalid_tool_dependencies_dict
+        if valid_repository_dependency_tups:
+            metadata_dict = \
+                self.update_repository_dependencies_metadata( metadata=metadata_dict,
+                                                              repository_dependency_tups=valid_repository_dependency_tups,
+                                                              is_valid=True,
+                                                              description=description )
+        if invalid_repository_dependency_tups:
+            metadata_dict = \
+                self.update_repository_dependencies_metadata( metadata=metadata_dict,
+                                                              repository_dependency_tups=invalid_repository_dependency_tups,
+                                                              is_valid=False,
+                                                              description=description )
+        return metadata_dict, error_message
+
+    def generate_workflow_metadata( self, relative_path, exported_workflow_dict, metadata_dict ):
+        """
+        Update the received metadata_dict with changes that have been applied to the
+        received exported_workflow_dict.
+        """
+        if 'workflows' in metadata_dict:
+            metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) )
+        else:
+            metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
+        return metadata_dict
+
+    def get_invalid_file_tups( self ):
+        return self.invalid_file_tups
+
+    def get_metadata_dict( self ):
+        return self.metadata_dict
+
+    def get_relative_path_to_repository_file( self, root, name, relative_install_dir, work_dir, shed_config_dict ):
+        if self.resetting_all_metadata_on_repository:
+            full_path_to_file = os.path.join( root, name )
+            stripped_path_to_file = full_path_to_file.replace( work_dir, '' )
+            if stripped_path_to_file.startswith( '/' ):
+                stripped_path_to_file = stripped_path_to_file[ 1: ]
+            relative_path_to_file = os.path.join( relative_install_dir, stripped_path_to_file )
+        else:
+            relative_path_to_file = os.path.join( root, name )
+            if relative_install_dir and \
+                shed_config_dict.get( 'tool_path' ) and \
+                    relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+                relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+        return relative_path_to_file
+
+    def get_sample_files_from_disk( self, repository_files_dir, tool_path=None, relative_install_dir=None ):
+        if self.resetting_all_metadata_on_repository:
+            # Keep track of the location where the repository is temporarily cloned so that we can strip
+            # it when setting metadata.
+            work_dir = repository_files_dir
+        sample_file_metadata_paths = []
+        sample_file_copy_paths = []
+        for root, dirs, files in os.walk( repository_files_dir ):
+            if root.find( '.hg' ) < 0:
+                for name in files:
+                    if name.endswith( '.sample' ):
+                        if self.resetting_all_metadata_on_repository:
+                            full_path_to_sample_file = os.path.join( root, name )
+                            stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
+                            if stripped_path_to_sample_file.startswith( '/' ):
+                                stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
+                            relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
+                            if os.path.exists( relative_path_to_sample_file ):
+                                sample_file_copy_paths.append( relative_path_to_sample_file )
+                            else:
+                                sample_file_copy_paths.append( full_path_to_sample_file )
+                        else:
+                            relative_path_to_sample_file = os.path.join( root, name )
+                            sample_file_copy_paths.append( relative_path_to_sample_file )
+                            if tool_path and relative_install_dir:
+                                if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
+                                    relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1:]
+                        sample_file_metadata_paths.append( relative_path_to_sample_file )
+        return sample_file_metadata_paths, sample_file_copy_paths
+
+    def handle_repository_elem( self, repository_elem, only_if_compiling_contained_td=False ):
+        """
+        Process the received repository_elem which is a <repository> tag either from a
+        repository_dependencies.xml file or a tool_dependencies.xml file.  If the former,
+        we're generating repository dependencies metadata for a repository in the Tool Shed.
+        If the latter, we're generating package dependency metadata within Galaxy or the
+        Tool Shed.
+        """
+        is_valid = True
+        error_message = ''
+        toolshed = repository_elem.get( 'toolshed', None )
+        name = repository_elem.get( 'name', None )
+        owner = repository_elem.get( 'owner', None )
+        changeset_revision = repository_elem.get( 'changeset_revision', None )
+        prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
+        repository_dependency_tup = [ toolshed,
+                                      name,
+                                      owner,
+                                      changeset_revision,
+                                      prior_installation_required,
+                                      str( only_if_compiling_contained_td ) ]
+        if self.app.name == 'galaxy':
+            if self.updating_installed_repository:
+                pass
+            else:
+                # We're installing a repository into Galaxy, so make sure its contained repository
+                # dependency definition is valid.
+                if toolshed is None or name is None or owner is None or changeset_revision is None:
+                    # Several packages exist in the Tool Shed that contain invalid repository
+                    # definitions, but will still install. We will report these errors to the
+                    # installing user. Previously, we would:
+                    # Raise an exception here instead of returning an error_message to keep the
+                    # installation from proceeding.  Reaching here implies a bug in the Tool Shed
+                    # framework.
+                    error_message = 'Installation encountered an invalid repository dependency definition:\n'
+                    error_message += xml_util.xml_to_string( repository_elem, use_indent=True )
+                    log.error( error_message )
+                    return repository_dependency_tup, False, error_message
+        if not toolshed:
+            # Default to the current tool shed.
+            toolshed = str( url_for( '/', qualified=True ) ).rstrip( '/' )
+            repository_dependency_tup[0] = toolshed
+        user = None
+        repository = None
+        toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed )
+        if self.app.name == 'galaxy':
+            # We're in Galaxy.  We reach here when we're generating the metadata for a tool
+            # dependencies package defined for a repository or when we're generating metadata
+            # for an installed repository.  See if we can locate the installed repository via
+            # the changeset_revision defined in the repository_elem (it may be outdated).  If
+            # we're successful in locating an installed repository with the attributes defined
+            # in the repository_elem, we know it is valid.
+            repository = repository_util.get_repository_for_dependency_relationship( self.app,
+                                                                                     toolshed,
+                                                                                     name,
+                                                                                     owner,
+                                                                                     changeset_revision )
+            if repository:
+                return repository_dependency_tup, is_valid, error_message
+            else:
+                # Send a request to the tool shed to retrieve appropriate additional changeset
+                # revisions with which the repository
+                # may have been installed.
+                text = metadata_util.get_updated_changeset_revisions_from_tool_shed( self.app,
+                                                                                     toolshed,
+                                                                                     name,
+                                                                                     owner,
+                                                                                     changeset_revision )
+                if text:
+                    updated_changeset_revisions = util.listify( text )
+                    for updated_changeset_revision in updated_changeset_revisions:
+                        repository = repository_util.get_repository_for_dependency_relationship( self.app,
+                                                                                                 toolshed,
+                                                                                                 name,
+                                                                                                 owner,
+                                                                                                 updated_changeset_revision )
+                        if repository:
+                            return repository_dependency_tup, is_valid, error_message
+                        if self.updating_installed_repository:
+                            # The repository dependency was included in an update to the installed
+                            # repository, so it will not yet be installed.  Return the tuple for later
+                            # installation.
+                            return repository_dependency_tup, is_valid, error_message
+                if self.updating_installed_repository:
+                    # The repository dependency was included in an update to the installed repository,
+                    # so it will not yet be installed.  Return the tuple for later installation.
+                    return repository_dependency_tup, is_valid, error_message
+                # Don't generate an error message for missing repository dependencies that are required
+                # only if compiling the dependent repository's tool dependency.
+                if not only_if_compiling_contained_td:
+                    # We'll currently default to setting the repository dependency definition as invalid
+                    # if an installed repository cannot be found.  This may not be ideal because the tool
+                    # shed may have simply been inaccessible when metadata was being generated for the
+                    # installed tool shed repository.
+                    error_message = "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, " % \
+                        ( toolshed, name, owner )
+                    error_message += "changeset revision %s." % changeset_revision
+                    log.debug( error_message )
+                    is_valid = False
+                    return repository_dependency_tup, is_valid, error_message
+        else:
+            # We're in the tool shed.
+            if suc.tool_shed_is_this_tool_shed( toolshed ):
+                try:
+                    user = self.sa_session.query( self.app.model.User ) \
+                                          .filter( self.app.model.User.table.c.username == owner ) \
+                                          .one()
+                except Exception:
+                    error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % \
+                        ( toolshed, name, owner )
+                    error_message += "changeset revision %s because the owner is invalid." % changeset_revision
+                    log.debug( error_message )
+                    is_valid = False
+                    return repository_dependency_tup, is_valid, error_message
+                try:
+                    repository = self.sa_session.query( self.app.model.Repository ) \
+                        .filter( and_( self.app.model.Repository.table.c.name == name,
+                                       self.app.model.Repository.table.c.user_id == user.id ) ) \
+                        .one()
+                except:
+                    error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % \
+                        ( toolshed, name, owner )
+                    error_message += "changeset revision %s because the name is invalid.  " % changeset_revision
+                    log.debug( error_message )
+                    is_valid = False
+                    return repository_dependency_tup, is_valid, error_message
+                repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+
+                # The received changeset_revision may be None since defining it in the dependency definition is optional.
+                # If this is the case, the default will be to set its value to the repository dependency tip revision.
+                # This probably occurs only when handling circular dependency definitions.
+                tip_ctx = repo.changectx( repo.changelog.tip() )
+                # Make sure the repo.changlog includes at least 1 revision.
+                if changeset_revision is None and tip_ctx.rev() >= 0:
+                    changeset_revision = str( tip_ctx )
+                    repository_dependency_tup = [ toolshed,
+                                                 name,
+                                                 owner,
+                                                 changeset_revision,
+                                                 prior_installation_required,
+                                                 str( only_if_compiling_contained_td ) ]
+                    return repository_dependency_tup, is_valid, error_message
+                else:
+                    # Find the specified changeset revision in the repository's changelog to see if it's valid.
+                    found = False
+                    for changeset in repo.changelog:
+                        changeset_hash = str( repo.changectx( changeset ) )
+                        if changeset_hash == changeset_revision:
+                            found = True
+                            break
+                    if not found:
+                        error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, " % \
+                            ( toolshed, name, owner )
+                        error_message += "changeset revision %s because the changeset revision is invalid.  " % changeset_revision
+                        log.debug( error_message )
+                        is_valid = False
+                        return repository_dependency_tup, is_valid, error_message
+            else:
+                # Repository dependencies are currently supported within a single tool shed.
+                error_message = "Repository dependencies are currently supported only within the same tool shed.  Ignoring "
+                error_message += "repository dependency definition  for tool shed %s, name %s, owner %s, changeset revision %s.  " % \
+                    ( toolshed, name, owner, changeset_revision )
+                log.debug( error_message )
+                is_valid = False
+                return repository_dependency_tup, is_valid, error_message
+        return repository_dependency_tup, is_valid, error_message
+
+    def set_add_to_tool_panel_attribute_for_tool( self, tool, guid, datatypes ):
+        """
+        Determine if a tool should be loaded into the Galaxy tool panel.  Examples of valid tools that
+        should not be displayed in the tool panel are datatypes converters and DataManager tools.
+        """
+        if hasattr( tool, 'tool_type' ):
+            if tool.tool_type in [ 'manage_data' ]:
+                # We have a DataManager tool.
+                return False
+        if datatypes:
+            for datatype_dict in datatypes:
+                converters = datatype_dict.get( 'converters', None )
+                # [{"converters":
+                #    [{"target_datatype": "gff",
+                #      "tool_config": "bed_to_gff_converter.xml",
+                #      "guid": "localhost:9009/repos/test/bed_to_gff_converter/CONVERTER_bed_to_gff_0/2.0.0"}],
+                #   "display_in_upload": "true",
+                #   "dtype": "galaxy.datatypes.interval:Bed",
+                #   "extension": "bed"}]
+                if converters:
+                    for converter_dict in converters:
+                        converter_guid = converter_dict.get( 'guid', None )
+                        if converter_guid:
+                            if converter_guid == guid:
+                                # We have a datatypes converter.
+                                return False
+        return True
+
+    def set_changeset_revision( self, changeset_revision ):
+        self.changeset_revision = changeset_revision
+
+    def set_relative_install_dir( self, relative_install_dir ):
+        self.relative_install_dir = relative_install_dir
+
+    def set_repository( self, repository, relative_install_dir=None, changeset_revision=None ):
+        self.repository = repository
+        # Shed related tool panel configs are only relevant to Galaxy.
+        if self.app.name == 'galaxy':
+            if relative_install_dir is None and self.repository is not None:
+                tool_path, relative_install_dir = self.repository.get_tool_relative_path( self.app )
+            if changeset_revision is None and self.repository is not None:
+                self.set_changeset_revision( self.repository.changeset_revision )
+            else:
+                self.set_changeset_revision( changeset_revision )
+            self.shed_config_dict = repository.get_shed_config_dict( self.app )
+            self.metadata_dict = { 'shed_config_filename': self.shed_config_dict.get( 'config_filename', None ) }
+        else:
+            if relative_install_dir is None and self.repository is not None:
+                relative_install_dir = repository.repo_path( self.app )
+            if changeset_revision is None and self.repository is not None:
+                self.set_changeset_revision( self.repository.tip( self.app ) )
+            else:
+                self.set_changeset_revision( changeset_revision )
+            self.shed_config_dict = {}
+            self.metadata_dict = {}
+        self.set_relative_install_dir( relative_install_dir )
+        self.set_repository_files_dir()
+        self.resetting_all_metadata_on_repository = False
+        self.updating_installed_repository = False
+        self.persist = False
+        self.invalid_file_tups = []
+
+    def set_repository_clone_url( self, repository_clone_url ):
+        self.repository_clone_url = repository_clone_url
+
+    def set_repository_files_dir( self, repository_files_dir=None ):
+        self.repository_files_dir = repository_files_dir
+
+    def update_repository_dependencies_metadata( self, metadata, repository_dependency_tups, is_valid, description ):
+        if is_valid:
+            repository_dependencies_dict = metadata.get( 'repository_dependencies', None )
+        else:
+            repository_dependencies_dict = metadata.get( 'invalid_repository_dependencies', None )
+        for repository_dependency_tup in repository_dependency_tups:
+            if is_valid:
+                tool_shed, name, owner, changeset_revision, \
+                    prior_installation_required, \
+                    only_if_compiling_contained_td = repository_dependency_tup
+            else:
+                tool_shed, name, owner, changeset_revision, \
+                    prior_installation_required, \
+                    only_if_compiling_contained_td, error_message = \
+                    repository_dependency_tup
+            if repository_dependencies_dict:
+                repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+                for repository_dependency_tup in repository_dependency_tups:
+                    if repository_dependency_tup not in repository_dependencies:
+                        repository_dependencies.append( repository_dependency_tup )
+                repository_dependencies_dict[ 'repository_dependencies' ] = repository_dependencies
+            else:
+                repository_dependencies_dict = dict( description=description,
+                                                     repository_dependencies=repository_dependency_tups )
+        if repository_dependencies_dict:
+            if is_valid:
+                metadata[ 'repository_dependencies' ] = repository_dependencies_dict
+            else:
+                metadata[ 'invalid_repository_dependencies' ] = repository_dependencies_dict
+        return metadata
diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py
new file mode 100644
index 0000000..5e3f451
--- /dev/null
+++ b/lib/tool_shed/metadata/repository_metadata_manager.py
@@ -0,0 +1,999 @@
+import logging
+import tempfile
+
+from sqlalchemy import false, or_
+
+from galaxy import util
+from galaxy.util import inflector
+from galaxy.web.form_builder import SelectField
+from tool_shed.metadata import metadata_generator
+from tool_shed.repository_types import util as rt_util
+from tool_shed.repository_types.metadata import TipOnly
+from tool_shed.util import (basic_util, common_util, hg_util, metadata_util,
+    repository_util, shed_util_common as suc, tool_util)
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoryMetadataManager( metadata_generator.MetadataGenerator ):
+
+    def __init__( self, app, user, repository=None, changeset_revision=None, repository_clone_url=None,
+                  shed_config_dict=None, relative_install_dir=None, repository_files_dir=None,
+                  resetting_all_metadata_on_repository=False, updating_installed_repository=False,
+                  persist=False, metadata_dict=None ):
+        super( RepositoryMetadataManager, self ).__init__( app, repository, changeset_revision,
+                                                           repository_clone_url, shed_config_dict,
+                                                           relative_install_dir, repository_files_dir,
+                                                           resetting_all_metadata_on_repository,
+                                                           updating_installed_repository, persist,
+                                                           metadata_dict=metadata_dict, user=user )
+        self.app = app
+        self.user = user
+        # Repository metadata comparisons for changeset revisions.
+        self.EQUAL = 'equal'
+        self.NO_METADATA = 'no metadata'
+        self.NOT_EQUAL_AND_NOT_SUBSET = 'not equal and not subset'
+        self.SUBSET = 'subset'
+        self.SUBSET_VALUES = [ self.EQUAL, self.SUBSET ]
+
+    def add_tool_versions( self, id, repository_metadata, changeset_revisions ):
+        # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata.
+        metadata = repository_metadata.metadata
+        tool_versions_dict = {}
+        for tool_dict in metadata.get( 'tools', [] ):
+            # We have at least 2 changeset revisions to compare tool guids and tool ids.
+            parent_id = self.get_parent_id( id,
+                                            tool_dict[ 'id' ],
+                                            tool_dict[ 'version' ],
+                                            tool_dict[ 'guid' ],
+                                            changeset_revisions )
+            tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+        if tool_versions_dict:
+            repository_metadata.tool_versions = tool_versions_dict
+            self.sa_session.add( repository_metadata )
+            self.sa_session.flush()
+
+    def build_repository_ids_select_field( self, name='repository_ids', multiple=True, display='checkboxes',
+                                           my_writable=False ):
+        """Generate the current list of repositories for resetting metadata."""
+        repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
+        query = self.get_query_for_setting_metadata_on_repositories( my_writable=my_writable, order=True )
+        for repository in query:
+            owner = str( repository.user.username )
+            option_label = '%s (%s)' % ( str( repository.name ), owner )
+            option_value = '%s' % self.app.security.encode_id( repository.id )
+            repositories_select_field.add_option( option_label, option_value )
+        return repositories_select_field
+
+    def clean_repository_metadata( self, changeset_revisions ):
+        # Delete all repository_metadata records associated with the repository that have
+        # a changeset_revision that is not in changeset_revisions.  We sometimes see multiple
+        # records with the same changeset revision value - no idea how this happens. We'll
+        # assume we can delete the older records, so we'll order by update_time descending and
+        # delete records that have the same changeset_revision we come across later.
+        changeset_revisions_checked = []
+        for repository_metadata in \
+            self.sa_session.query( self.app.model.RepositoryMetadata ) \
+                           .filter( self.app.model.RepositoryMetadata.table.c.repository_id == self.repository.id ) \
+                           .order_by( self.app.model.RepositoryMetadata.table.c.changeset_revision,
+                                      self.app.model.RepositoryMetadata.table.c.update_time.desc() ):
+            changeset_revision = repository_metadata.changeset_revision
+            if changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions:
+                self.sa_session.delete( repository_metadata )
+                self.sa_session.flush()
+
+    def compare_changeset_revisions( self, ancestor_changeset_revision, ancestor_metadata_dict ):
+        """
+        Compare the contents of two changeset revisions to determine if a new repository
+        metadata revision should be created.
+        """
+        # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict.
+        # This changeset_revision is an ancestor of self.changeset_revision which is associated
+        # with self.metadata_dict.  A new repository_metadata record will be created only
+        # when this method returns the constant value self.NOT_EQUAL_AND_NOT_SUBSET.
+        ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
+        ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
+        ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
+        ancestor_guids.sort()
+        ancestor_readme_files = ancestor_metadata_dict.get( 'readme_files', [] )
+        ancestor_repository_dependencies_dict = ancestor_metadata_dict.get( 'repository_dependencies', {} )
+        ancestor_repository_dependencies = ancestor_repository_dependencies_dict.get( 'repository_dependencies', [] )
+        ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', {} )
+        ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] )
+        ancestor_data_manager = ancestor_metadata_dict.get( 'data_manager', {} )
+        current_datatypes = self.metadata_dict.get( 'datatypes', [] )
+        current_tools = self.metadata_dict.get( 'tools', [] )
+        current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ]
+        current_guids.sort()
+        current_readme_files = self.metadata_dict.get( 'readme_files', [] )
+        current_repository_dependencies_dict = self.metadata_dict.get( 'repository_dependencies', {} )
+        current_repository_dependencies = current_repository_dependencies_dict.get( 'repository_dependencies', [] )
+        current_tool_dependencies = self.metadata_dict.get( 'tool_dependencies', {} )
+        current_workflows = self.metadata_dict.get( 'workflows', [] )
+        current_data_manager = self.metadata_dict.get( 'data_manager', {} )
+        # Handle case where no metadata exists for either changeset.
+        no_datatypes = not ancestor_datatypes and not current_datatypes
+        no_readme_files = not ancestor_readme_files and not current_readme_files
+        no_repository_dependencies = not ancestor_repository_dependencies and not current_repository_dependencies
+        no_tool_dependencies = not ancestor_tool_dependencies and not current_tool_dependencies
+        no_tools = not ancestor_guids and not current_guids
+        no_workflows = not ancestor_workflows and not current_workflows
+        no_data_manager = not ancestor_data_manager and not current_data_manager
+        if no_datatypes and no_readme_files and no_repository_dependencies and \
+                no_tool_dependencies and no_tools and no_workflows and \
+                no_data_manager:
+            return self.NO_METADATA
+        # Uncomment the following if we decide that README files should affect how installable
+        # repository revisions are defined.  See the NOTE in self.compare_readme_files().
+        # readme_file_comparision = self.compare_readme_files( ancestor_readme_files, current_readme_files )
+        repository_dependency_comparison = self.compare_repository_dependencies( ancestor_repository_dependencies,
+                                                                                 current_repository_dependencies )
+        tool_dependency_comparison = self.compare_tool_dependencies( ancestor_tool_dependencies,
+                                                                     current_tool_dependencies )
+        workflow_comparison = self.compare_workflows( ancestor_workflows, current_workflows )
+        datatype_comparison = self.compare_datatypes( ancestor_datatypes, current_datatypes )
+        data_manager_comparison = self.compare_data_manager( ancestor_data_manager, current_data_manager )
+        # Handle case where all metadata is the same.
+        if ancestor_guids == current_guids and \
+                repository_dependency_comparison == self.EQUAL and \
+                tool_dependency_comparison == self.EQUAL and \
+                workflow_comparison == self.EQUAL and \
+                datatype_comparison == self.EQUAL and \
+                data_manager_comparison == self.EQUAL:
+            return self.EQUAL
+        # Handle case where ancestor metadata is a subset of current metadata.
+        # readme_file_is_subset = readme_file_comparision in [ self.EQUAL, self.SUBSET ]
+        repository_dependency_is_subset = repository_dependency_comparison in self.SUBSET_VALUES
+        tool_dependency_is_subset = tool_dependency_comparison in self.SUBSET_VALUES
+        workflow_dependency_is_subset = workflow_comparison in self.SUBSET_VALUES
+        datatype_is_subset = datatype_comparison in self.SUBSET_VALUES
+        datamanager_is_subset = data_manager_comparison in self.SUBSET_VALUES
+        if repository_dependency_is_subset and tool_dependency_is_subset and \
+                workflow_dependency_is_subset and datatype_is_subset and \
+                datamanager_is_subset:
+            is_subset = True
+            for guid in ancestor_guids:
+                if guid not in current_guids:
+                    is_subset = False
+                    break
+            if is_subset:
+                return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def compare_data_manager( self, ancestor_metadata, current_metadata ):
+        """Determine if ancestor_metadata is the same as or a subset of current_metadata for data_managers."""
+        def __data_manager_dict_to_tuple_list( metadata_dict ):
+            # we do not check tool_guid or tool conf file name
+            return set( sorted( [ ( name,
+                                    tuple( sorted( value.get( 'data_tables', [] ) ) ),
+                                    value.get( 'guid'  ),
+                                    value.get( 'version' ),
+                                    value.get( 'name' ),
+                                    value.get( 'id' )  ) for name, value in metadata_dict.items() ] ) )
+        # only compare valid entries, any invalid entries are ignored
+        ancestor_metadata = __data_manager_dict_to_tuple_list( ancestor_metadata.get( 'data_managers', {} ) )
+        current_metadata = __data_manager_dict_to_tuple_list( current_metadata.get( 'data_managers', {} ) )
+        # use set comparisons
+        if ancestor_metadata.issubset( current_metadata ):
+            if ancestor_metadata == current_metadata:
+                return self.EQUAL
+            return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def compare_datatypes( self, ancestor_datatypes, current_datatypes ):
+        """Determine if ancestor_datatypes is the same as or a subset of current_datatypes."""
+        # Each datatype dict looks something like:
+        # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
+        if len( ancestor_datatypes ) <= len( current_datatypes ):
+            for ancestor_datatype in ancestor_datatypes:
+                # Currently the only way to differentiate datatypes is by name.
+                ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ]
+                ancestor_datatype_extension = ancestor_datatype[ 'extension' ]
+                ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None )
+                found_in_current = False
+                for current_datatype in current_datatypes:
+                    if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \
+                            current_datatype[ 'extension' ] == ancestor_datatype_extension and \
+                            current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype:
+                        found_in_current = True
+                        break
+                if not found_in_current:
+                    return self.NOT_EQUAL_AND_NOT_SUBSET
+            if len( ancestor_datatypes ) == len( current_datatypes ):
+                return self.EQUAL
+            else:
+                return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def compare_readme_files( self, ancestor_readme_files, current_readme_files ):
+        """Determine if ancestor_readme_files is equal to or a subset of current_readme_files."""
+        # NOTE: Although repository README files are considered a Galaxy utility similar to tools,
+        # repository dependency definition files, etc., we don't define installable repository revisions
+        # based on changes to README files.  To understand why, consider the following scenario:
+        # 1. Upload the filtering tool to a new repository - this will result in installable revision 0.
+        # 2. Upload a README file to the repository - this will move the installable revision from revision
+        #    0 to revision 1.
+        # 3. Delete the README file from the repository - this will move the installable revision from
+        #    revision 1 to revision 2.
+        # The above scenario is the current behavior, and that is why this method is not currently called.
+        # This method exists only in case we decide to change this current behavior.
+        # The lists of readme files looks something like: ["database/community_files/000/repo_2/readme.txt"]
+        if len( ancestor_readme_files ) <= len( current_readme_files ):
+            for ancestor_readme_file in ancestor_readme_files:
+                if ancestor_readme_file not in current_readme_files:
+                    return self.NOT_EQUAL_AND_NOT_SUBSET
+            if len( ancestor_readme_files ) == len( current_readme_files ):
+                return self.EQUAL
+            else:
+                return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def compare_repository_dependencies( self, ancestor_repository_dependencies, current_repository_dependencies ):
+        """
+        Determine if ancestor_repository_dependencies is the same as or a subset of
+        current_repository_dependencies.
+        """
+        # The list of repository_dependencies looks something like:
+        # [["http://localhost:9009", "emboss_datatypes", "test", "ab03a2a5f407", "False", "False"]].
+        # Create a string from each tuple in the list for easier comparison.
+        if len( ancestor_repository_dependencies ) <= len( current_repository_dependencies ):
+            for ancestor_tup in ancestor_repository_dependencies:
+                a_tool_shed, a_repo_name, a_repo_owner, a_changeset_revision, \
+                    a_prior_installation_required, \
+                    a_only_if_compiling_contained_td = ancestor_tup
+                cleaned_a_tool_shed = common_util.remove_protocol_from_tool_shed_url( a_tool_shed )
+                found_in_current = False
+                for current_tup in current_repository_dependencies:
+                    c_tool_shed, c_repo_name, c_repo_owner, \
+                        c_changeset_revision, c_prior_installation_required, \
+                        c_only_if_compiling_contained_td = current_tup
+                    cleaned_c_tool_shed = common_util.remove_protocol_from_tool_shed_url( c_tool_shed )
+                    if cleaned_c_tool_shed == cleaned_a_tool_shed and \
+                            c_repo_name == a_repo_name and \
+                            c_repo_owner == a_repo_owner and \
+                            c_changeset_revision == a_changeset_revision and \
+                            util.string_as_bool( c_prior_installation_required ) == util.string_as_bool( a_prior_installation_required ) and \
+                            util.string_as_bool( c_only_if_compiling_contained_td ) == util.string_as_bool( a_only_if_compiling_contained_td ):
+                        found_in_current = True
+                        break
+                if not found_in_current:
+                    # In some cases, the only difference between a dependency definition in the lists
+                    # is the changeset_revision value.  We'll check to see if this is the case, and if
+                    # the defined dependency is a repository that has metadata set only on its tip.
+                    if not self.different_revision_defines_tip_only_repository_dependency( ancestor_tup,
+                                                                                           current_repository_dependencies ):
+                        return self.NOT_EQUAL_AND_NOT_SUBSET
+                    return self.SUBSET
+            if len( ancestor_repository_dependencies ) == len( current_repository_dependencies ):
+                return self.EQUAL
+            else:
+                return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def compare_tool_dependencies( self, ancestor_tool_dependencies, current_tool_dependencies ):
+        """
+        Determine if ancestor_tool_dependencies is the same as or a subset of current_tool_dependencies.
+        """
+        # The tool_dependencies dictionary looks something like:
+        # {'bwa/0.5.9': {'readme': 'some string', 'version': '0.5.9', 'type': 'package', 'name': 'bwa'}}
+        if len( ancestor_tool_dependencies ) <= len( current_tool_dependencies ):
+            for ancestor_td_key, ancestor_requirements_dict in ancestor_tool_dependencies.items():
+                if ancestor_td_key in current_tool_dependencies:
+                    # The only values that could have changed between the 2 dictionaries are the
+                    # "readme" or "type" values.  Changing the readme value makes no difference.
+                    # Changing the type will change the installation process, but for now we'll
+                    # assume it was a typo, so new metadata shouldn't be generated.
+                    continue
+                else:
+                    return self.NOT_EQUAL_AND_NOT_SUBSET
+            # At this point we know that ancestor_tool_dependencies is at least a subset of current_tool_dependencies.
+            if len( ancestor_tool_dependencies ) == len( current_tool_dependencies ):
+                return self.EQUAL
+            else:
+                return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def compare_workflows( self, ancestor_workflows, current_workflows ):
+        """
+        Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows
+        is a subset of current_workflows.
+        """
+        if len( ancestor_workflows ) <= len( current_workflows ):
+            for ancestor_workflow_tup in ancestor_workflows:
+                # ancestor_workflows is a list of tuples where each contained tuple is
+                # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+                ancestor_workflow_dict = ancestor_workflow_tup[1]
+                # Currently the only way to differentiate workflows is by name.
+                ancestor_workflow_name = ancestor_workflow_dict[ 'name' ]
+                num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] )
+                found_in_current = False
+                for current_workflow_tup in current_workflows:
+                    current_workflow_dict = current_workflow_tup[1]
+                    # Assume that if the name and number of steps are euqal, then the workflows
+                    # are the same.  Of course, this may not be true...
+                    if current_workflow_dict[ 'name' ] == ancestor_workflow_name and \
+                            len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
+                        found_in_current = True
+                        break
+                if not found_in_current:
+                    return self.NOT_EQUAL_AND_NOT_SUBSET
+            if len( ancestor_workflows ) == len( current_workflows ):
+                return self.EQUAL
+            else:
+                return self.SUBSET
+        return self.NOT_EQUAL_AND_NOT_SUBSET
+
+    def create_or_update_repository_metadata( self, changeset_revision, metadata_dict ):
+        """Create or update a repository_metadata record in the tool shed."""
+        has_repository_dependencies = False
+        has_repository_dependencies_only_if_compiling_contained_td = False
+        includes_datatypes = False
+        includes_tools = False
+        includes_tool_dependencies = False
+        includes_workflows = False
+        if metadata_dict:
+            repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} )
+            repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+            has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+                repository_util.get_repository_dependency_types( repository_dependencies )
+            if 'datatypes' in metadata_dict:
+                includes_datatypes = True
+            if 'tools' in metadata_dict:
+                includes_tools = True
+            if 'tool_dependencies' in metadata_dict:
+                includes_tool_dependencies = True
+            if 'workflows' in metadata_dict:
+                includes_workflows = True
+        if has_repository_dependencies or \
+                has_repository_dependencies_only_if_compiling_contained_td or \
+                includes_datatypes or includes_tools or \
+                includes_tool_dependencies or includes_workflows:
+            downloadable = True
+        else:
+            downloadable = False
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                           self.app.security.encode_id( self.repository.id ),
+                                                                                           changeset_revision )
+        if repository_metadata:
+            repository_metadata.metadata = metadata_dict
+            repository_metadata.downloadable = downloadable
+            repository_metadata.has_repository_dependencies = has_repository_dependencies
+            repository_metadata.includes_datatypes = includes_datatypes
+            repository_metadata.includes_tools = includes_tools
+            repository_metadata.includes_tool_dependencies = includes_tool_dependencies
+            repository_metadata.includes_workflows = includes_workflows
+        else:
+            repository_metadata = \
+                self.app.model.RepositoryMetadata( repository_id=self.repository.id,
+                                                   changeset_revision=changeset_revision,
+                                                   metadata=metadata_dict,
+                                                   downloadable=downloadable,
+                                                   has_repository_dependencies=has_repository_dependencies,
+                                                   includes_datatypes=includes_datatypes,
+                                                   includes_tools=includes_tools,
+                                                   includes_tool_dependencies=includes_tool_dependencies,
+                                                   includes_workflows=includes_workflows )
+        # Always set the default values for the following columns.  When resetting all metadata
+        # on a repository this will reset the values.
+        repository_metadata.missing_test_components = False
+        self.sa_session.add( repository_metadata )
+        self.sa_session.flush()
+
+        return repository_metadata
+
+    def different_revision_defines_tip_only_repository_dependency( self, rd_tup, repository_dependencies ):
+        """
+        Determine if the only difference between rd_tup and a dependency definition in the list of
+        repository_dependencies is the changeset_revision value.
+        """
+        rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( rd_tup )
+        cleaned_rd_tool_shed = common_util.remove_protocol_from_tool_shed_url( rd_tool_shed )
+        for repository_dependency in repository_dependencies:
+            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                common_util.parse_repository_dependency_tuple( repository_dependency )
+            cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+            if cleaned_rd_tool_shed == cleaned_tool_shed and rd_name == name and rd_owner == owner:
+                # Determine if the repository represented by the dependency tuple is an instance of the repository type TipOnly.
+                required_repository = repository_util.get_repository_by_name_and_owner( self.app, name, owner )
+                repository_type_class = self.app.repository_types_registry.get_class_by_label( required_repository.type )
+                return isinstance( repository_type_class, TipOnly )
+        return False
+
+    def get_parent_id( self, id, old_id, version, guid, changeset_revisions ):
+        parent_id = None
+        # Compare from most recent to oldest.
+        changeset_revisions.reverse()
+        for changeset_revision in changeset_revisions:
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app, id, changeset_revision )
+            metadata = repository_metadata.metadata
+            tools_dicts = metadata.get( 'tools', [] )
+            for tool_dict in tools_dicts:
+                if tool_dict[ 'guid' ] == guid:
+                    # The tool has not changed between the compared changeset revisions.
+                    continue
+                if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
+                    # The tool version is different, so we've found the parent.
+                    return tool_dict[ 'guid' ]
+        if parent_id is None:
+            # The tool did not change through all of the changeset revisions.
+            return old_id
+
+    def get_query_for_setting_metadata_on_repositories( self, my_writable=False, order=True ):
+        """
+        Return a query containing repositories for resetting metadata.  The order parameter
+        is used for displaying the list of repositories ordered alphabetically for display on
+        a page.  When called from the Tool Shed API, order is False.
+        """
+        # When called from the Tool Shed API, the metadata is reset on all repositories of types
+        # repository_suite_definition and tool_dependency_definition in addition to other selected
+        # repositories.
+        if my_writable:
+            username = self.user.username
+            clause_list = []
+            for repository in self.sa_session.query( self.app.model.Repository ) \
+                                             .filter( self.app.model.Repository.table.c.deleted == false() ):
+                # Always reset metadata on all repositories of types repository_suite_definition and
+                # tool_dependency_definition.
+                if repository.type in [ rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION ]:
+                    clause_list.append( self.app.model.Repository.table.c.id == repository.id )
+                else:
+                    allow_push = repository.allow_push( self.app )
+                    if allow_push:
+                        # Include all repositories that are writable by the current user.
+                        allow_push_usernames = allow_push.split( ',' )
+                        if username in allow_push_usernames:
+                            clause_list.append( self.app.model.Repository.table.c.id == repository.id )
+            if clause_list:
+                if order:
+                    return self.sa_session.query( self.app.model.Repository ) \
+                                          .filter( or_( *clause_list ) ) \
+                                          .order_by( self.app.model.Repository.table.c.name,
+                                                     self.app.model.Repository.table.c.user_id )
+                else:
+                    return self.sa_session.query( self.app.model.Repository ) \
+                                          .filter( or_( *clause_list ) )
+            else:
+                # Return an empty query.
+                return self.sa_session.query( self.app.model.Repository ) \
+                                      .filter( self.app.model.Repository.table.c.id == -1 )
+        else:
+            if order:
+                return self.sa_session.query( self.app.model.Repository ) \
+                                      .filter( self.app.model.Repository.table.c.deleted == false() ) \
+                                      .order_by( self.app.model.Repository.table.c.name,
+                                                 self.app.model.Repository.table.c.user_id )
+            else:
+                return self.sa_session.query( self.app.model.Repository ) \
+                                      .filter( self.app.model.Repository.table.c.deleted == false() )
+
+    def new_datatypes_metadata_required( self, repository_metadata ):
+        """
+        Compare the last saved metadata for each datatype in the repository with the new metadata
+        in self.metadata_dict to determine if a new repository_metadata table record is required
+        or if the last saved metadata record can be updated for datatypes instead.
+        """
+        # Datatypes are stored in metadata as a list of dictionaries that looks like:
+        # [{'dtype': 'galaxy.datatypes.data:Text', 'subclass': 'True', 'extension': 'acedb'}]
+        if 'datatypes' in self.metadata_dict:
+            current_datatypes = self.metadata_dict[ 'datatypes' ]
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'datatypes' in metadata:
+                        ancestor_datatypes = metadata[ 'datatypes' ]
+                        # The saved metadata must be a subset of the new metadata.
+                        datatype_comparison = self.compare_datatypes( ancestor_datatypes, current_datatypes )
+                        if datatype_comparison == self.NOT_EQUAL_AND_NOT_SUBSET:
+                            return True
+                        else:
+                            return False
+                    else:
+                        # The new metadata includes datatypes, but the stored metadata does not,
+                        # so we can update the stored metadata.
+                        return False
+                else:
+                    # There is no stored metadata, so we can update the metadata column in the
+                    # repository_metadata table.
+                    return False
+            else:
+                # There is no stored repository metadata, so we need to create a new repository_metadata
+                # table record.
+                return True
+        # self.metadata_dict includes no metadata for datatypes, so a new repository_metadata
+        # table record is not needed.
+        return False
+
+    def new_metadata_required_for_utilities( self ):
+        """
+        This method compares the last stored repository_metadata record associated with self.repository
+        against the contents of self.metadata_dict and returns True or False for the union set of Galaxy
+        utilities contained in both metadata dictionaries.  The metadata contained in self.metadata_dict
+        may not be a subset of that contained in the last stored repository_metadata record associated with
+        self.repository because one or more Galaxy utilities may have been deleted from self.repository in
+        the new tip.
+        """
+        repository_metadata = metadata_util.get_latest_repository_metadata( self.app,
+                                                                            self.repository.id,
+                                                                            downloadable=False )
+        datatypes_required = self.new_datatypes_metadata_required( repository_metadata )
+        # Uncomment the following if we decide that README files should affect how installable
+        # repository revisions are defined.  See the NOTE in the compare_readme_files() method.
+        # readme_files_required = sewlf.new_readme_files_metadata_required( repository_metadata )
+        repository_dependencies_required = \
+            self.new_repository_dependency_metadata_required( repository_metadata )
+        tools_required = self.new_tool_metadata_required( repository_metadata )
+        tool_dependencies_required = self.new_tool_dependency_metadata_required( repository_metadata )
+        workflows_required = self.new_workflow_metadata_required( repository_metadata )
+        if datatypes_required or repository_dependencies_required or \
+                tools_required or tool_dependencies_required or workflows_required:
+            return True
+        return False
+
+    def new_readme_files_metadata_required( self, repository_metadata ):
+        """
+        Compare the last saved metadata for each readme file in the repository with the new metadata
+        in self.metadata_dict to determine if a new repository_metadata table record is required or
+        if the last saved metadata record can be updated for readme files instead.
+        """
+        # Repository README files are kind of a special case because they have no effect on reproducibility.
+        # We'll simply inspect the file names to determine if any that exist in the saved metadata are
+        # eliminated from the new metadata in self.metadata_dict.
+        if 'readme_files' in self.metadata_dict:
+            current_readme_files = self.metadata_dict[ 'readme_files' ]
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'readme_files' in metadata:
+                        ancestor_readme_files = metadata[ 'readme_files' ]
+                        # The saved metadata must be a subset of the new metadata.
+                        readme_file_comparison = self.compare_readme_files( ancestor_readme_files,
+                                                                            current_readme_files )
+                        if readme_file_comparison == self.NOT_EQUAL_AND_NOT_SUBSET:
+                            return True
+                        else:
+                            return False
+                    else:
+                        # The new metadata includes readme_files, but the stored metadata does not, so
+                        # we can update the stored metadata.
+                        return False
+                else:
+                    # There is no stored metadata, so we can update the metadata column in the repository_metadata
+                    # table.
+                    return False
+            else:
+                # There is no stored repository metadata, so we need to create a new repository_metadata
+                # table record.
+                return True
+        # self.metadata_dict includes no metadata for readme_files, so a new repository_metadata
+        # table record is not needed.
+        return False
+
+    def new_repository_dependency_metadata_required( self, repository_metadata ):
+        """
+        Compare the last saved metadata for each repository dependency in the repository
+        with the new metadata in self.metadata_dict to determine if a new repository_metadata
+        table record is required or if the last saved metadata record can be updated for
+        repository_dependencies instead.
+        """
+        if repository_metadata:
+            metadata = repository_metadata.metadata
+            if 'repository_dependencies' in metadata:
+                saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
+                new_repository_dependencies_metadata = self.metadata_dict.get( 'repository_dependencies', None )
+                if new_repository_dependencies_metadata:
+                    new_repository_dependencies = self.metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
+                    # TODO: We used to include the following here to handle the case where repository
+                    # dependency definitions were deleted.  However this erroneously returned True in
+                    # cases where is should not have done so.  This usually occurred where multiple single
+                    # files were uploaded when a single tarball should have been.  We need to implement
+                    # support for handling deleted repository dependency definitions so that we can guarantee
+                    # reproducibility, but we need to do it in a way that is better than the following.
+                    # for new_repository_dependency in new_repository_dependencies:
+                    #     if new_repository_dependency not in saved_repository_dependencies:
+                    #         return True
+                    # The saved metadata must be a subset of the new metadata.
+                    for saved_repository_dependency in saved_repository_dependencies:
+                        if saved_repository_dependency not in new_repository_dependencies:
+                            # In some cases, the only difference between a dependency definition in the lists
+                            # is the changeset_revision value.  We'll check to see if this is the case, and if
+                            # the defined dependency is a repository that has metadata set only on its tip.
+                            if not self.different_revision_defines_tip_only_repository_dependency( saved_repository_dependency,
+                                                                                                   new_repository_dependencies ):
+                                return True
+                    return False
+                else:
+                    # The repository_dependencies.xml file must have been deleted, so create a new
+                    # repository_metadata record so we always have access to the deleted file.
+                    return True
+            else:
+                return False
+        else:
+            if 'repository_dependencies' in self.metadata_dict:
+                # There is no saved repository metadata, so we need to create a new repository_metadata record.
+                return True
+            else:
+                # self.metadata_dict includes no metadata for repository dependencies, so a new repository_metadata
+                # record is not needed.
+                return False
+
+    def new_tool_metadata_required( self, repository_metadata ):
+        """
+        Compare the last saved metadata for each tool in the repository with the new metadata in
+        self.metadata_dict to determine if a new repository_metadata table record is required, or if
+        the last saved metadata record can be updated instead.
+        """
+        if 'tools' in self.metadata_dict:
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if 'tools' in metadata:
+                        saved_tool_ids = []
+                        # The metadata for one or more tools was successfully generated in the past
+                        # for this repository, so we first compare the version string for each tool id
+                        # in self.metadata_dict with what was previously saved to see if we need to create
+                        # a new table record or if we can simply update the existing record.
+                        for new_tool_metadata_dict in self.metadata_dict[ 'tools' ]:
+                            for saved_tool_metadata_dict in metadata[ 'tools' ]:
+                                if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
+                                    saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] )
+                                if new_tool_metadata_dict[ 'id' ] == saved_tool_metadata_dict[ 'id' ]:
+                                    if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]:
+                                        return True
+                        # So far, a new metadata record is not required, but we still have to check to see if
+                        # any new tool ids exist in self.metadata_dict that are not in the saved metadata.  We do
+                        # this because if a new tarball was uploaded to a repository that included tools, it
+                        # may have removed existing tool files if they were not included in the uploaded tarball.
+                        for new_tool_metadata_dict in self.metadata_dict[ 'tools' ]:
+                            if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
+                                return True
+                        return False
+                    else:
+                        # The new metadata includes tools, but the stored metadata does not, so we can
+                        # update the stored metadata.
+                        return False
+                else:
+                    # There is no stored metadata, so we can update the metadata column in the
+                    # repository_metadata table.
+                    return False
+            else:
+                # There is no stored repository metadata, so we need to create a new repository_metadata
+                # table record.
+                return True
+        # self.metadata_dict includes no metadata for tools, so a new repository_metadata table
+        # record is not needed.
+        return False
+
+    def new_tool_dependency_metadata_required( self, repository_metadata ):
+        """
+        Compare the last saved metadata for each tool dependency in the repository with the new
+        metadata in self.metadata_dict to determine if a new repository_metadata table record is
+        required or if the last saved metadata record can be updated for tool_dependencies instead.
+        """
+        if repository_metadata:
+            metadata = repository_metadata.metadata
+            if metadata:
+                if 'tool_dependencies' in metadata:
+                    saved_tool_dependencies = metadata[ 'tool_dependencies' ]
+                    new_tool_dependencies = self.metadata_dict.get( 'tool_dependencies', None )
+                    if new_tool_dependencies:
+                        # TODO: We used to include the following here to handle the case where
+                        # tool dependency definitions were deleted.  However, this erroneously
+                        # returned True in cases where is should not have done so.  This usually
+                        # occurred where multiple single files were uploaded when a single tarball
+                        # should have been.  We need to implement support for handling deleted
+                        # tool dependency definitions so that we can guarantee reproducibility,
+                        # but we need to do it in a way that is better than the following.
+                        # for new_tool_dependency in new_tool_dependencies:
+                        #     if new_tool_dependency not in saved_tool_dependencies:
+                        #         return True
+                        # The saved metadata must be a subset of the new metadata.
+                        for saved_tool_dependency in saved_tool_dependencies:
+                            if saved_tool_dependency not in new_tool_dependencies:
+                                return True
+                        return False
+                    else:
+                        # The tool_dependencies.xml file must have been deleted, so create a new
+                        # repository_metadata record so we always have
+                        # access to the deleted file.
+                        return True
+                else:
+                    return False
+            else:
+                # We have repository metadata that does not include metadata for any tool dependencies
+                # in the repository, so we can update the existing repository metadata.
+                return False
+        else:
+            if 'tool_dependencies' in self.metadata_dict:
+                # There is no saved repository metadata, so we need to create a new repository_metadata
+                # record.
+                return True
+            else:
+                # self.metadata_dict includes no metadata for tool dependencies, so a new repository_metadata
+                # record is not needed.
+                return False
+
+    def new_workflow_metadata_required( self, repository_metadata ):
+        """
+        Currently everything about an exported workflow except the name is hard-coded, so
+        there's no real way to differentiate versions of exported workflows.  If this changes
+        at some future time, this method should be enhanced accordingly.
+        """
+        if 'workflows' in self.metadata_dict:
+            if repository_metadata:
+                # The repository has metadata, so update the workflows value -
+                # no new record is needed.
+                return False
+            else:
+                # There is no saved repository metadata, so we need to create a
+                # new repository_metadata table record.
+                return True
+        # self.metadata_dict includes no metadata for workflows, so a new
+        # repository_metadata table record is not needed.
+        return False
+
+    def reset_all_metadata_on_repository_in_tool_shed( self ):
+        """Reset all metadata on a single repository in a tool shed."""
+        log.debug( "Resetting all metadata on repository: %s" % self.repository.name )
+        repo = hg_util.get_repo_for_repository( self.app,
+                                                repository=None,
+                                                repo_path=self.repository.repo_path( self.app ),
+                                                create=False )
+        # The list of changeset_revisions refers to repository_metadata records that have been created
+        # or updated.  When the following loop completes, we'll delete all repository_metadata records
+        # for this repository that do not have a changeset_revision value in this list.
+        changeset_revisions = []
+        # When a new repository_metadata record is created, it always uses the values of
+        # metadata_changeset_revision and metadata_dict.
+        metadata_changeset_revision = None
+        metadata_dict = None
+        ancestor_changeset_revision = None
+        ancestor_metadata_dict = None
+        for changeset in self.repository.get_changesets_for_setting_metadata( self.app ):
+            work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ramorits" )
+            ctx = repo.changectx( changeset )
+            log.debug( "Cloning repository changeset revision: %s", str( ctx.rev() ) )
+            cloned_ok, error_message = hg_util.clone_repository( self.repository_clone_url, work_dir, str( ctx.rev() ) )
+            if cloned_ok:
+                log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
+                self.set_changeset_revision( str( repo.changectx( changeset ) ) )
+                self.set_repository_files_dir( work_dir )
+                self.generate_metadata_for_changeset_revision()
+                if self.metadata_dict:
+                    if metadata_changeset_revision is None and metadata_dict is None:
+                        # We're at the first change set in the change log.
+                        metadata_changeset_revision = self.changeset_revision
+                        metadata_dict = self.metadata_dict
+                    if ancestor_changeset_revision:
+                        # Compare metadata from ancestor and current.  The value of comparison will be one of:
+                        # self.NO_METADATA - no metadata for either ancestor or current, so continue from current
+                        # self.EQUAL - ancestor metadata is equivalent to current metadata, so continue from current
+                        # self.SUBSET - ancestor metadata is a subset of current metadata, so continue from current
+                        # self.NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current
+                        # metadata, so persist ancestor metadata.
+                        comparison = self.compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict )
+                        if comparison in [ self.NO_METADATA, self.EQUAL, self.SUBSET ]:
+                            ancestor_changeset_revision = self.changeset_revision
+                            ancestor_metadata_dict = self.metadata_dict
+                        elif comparison == self.NOT_EQUAL_AND_NOT_SUBSET:
+                            metadata_changeset_revision = ancestor_changeset_revision
+                            metadata_dict = ancestor_metadata_dict
+                            self.create_or_update_repository_metadata( metadata_changeset_revision, metadata_dict )
+                            changeset_revisions.append( metadata_changeset_revision )
+                            ancestor_changeset_revision = self.changeset_revision
+                            ancestor_metadata_dict = self.metadata_dict
+                    else:
+                        # We're at the beginning of the change log.
+                        ancestor_changeset_revision = self.changeset_revision
+                        ancestor_metadata_dict = self.metadata_dict
+                    if not ctx.children():
+                        metadata_changeset_revision = self.changeset_revision
+                        metadata_dict = self.metadata_dict
+                        # We're at the end of the change log.
+                        self.create_or_update_repository_metadata( metadata_changeset_revision, metadata_dict )
+                        changeset_revisions.append( metadata_changeset_revision )
+                        ancestor_changeset_revision = None
+                        ancestor_metadata_dict = None
+                elif ancestor_metadata_dict:
+                    # We reach here only if self.metadata_dict is empty and ancestor_metadata_dict is not.
+                    if not ctx.children():
+                        # We're at the end of the change log.
+                        self.create_or_update_repository_metadata( metadata_changeset_revision, metadata_dict )
+                        changeset_revisions.append( metadata_changeset_revision )
+                        ancestor_changeset_revision = None
+                        ancestor_metadata_dict = None
+            basic_util.remove_dir( work_dir )
+        # Delete all repository_metadata records for this repository that do not have a changeset_revision
+        # value in changeset_revisions.
+        self.clean_repository_metadata( changeset_revisions )
+        # Set tool version information for all downloadable changeset revisions.  Get the list of changeset
+        # revisions from the changelog.
+        self.reset_all_tool_versions( repo )
+        # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+        self.app.tool_data_tables.data_tables = {}
+
+    def reset_all_tool_versions( self, repo ):
+        """Reset tool version lineage for those changeset revisions that include valid tools."""
+        encoded_repository_id = self.app.security.encode_id( self.repository.id )
+        changeset_revisions_that_contain_tools = []
+        for changeset in repo.changelog:
+            changeset_revision = str( repo.changectx( changeset ) )
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                               encoded_repository_id,
+                                                                                               changeset_revision )
+            if repository_metadata:
+                metadata = repository_metadata.metadata
+                if metadata:
+                    if metadata.get( 'tools', None ):
+                        changeset_revisions_that_contain_tools.append( changeset_revision )
+        # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that
+        # are downloadable and contain tools.  If a repository includes tools, build a dictionary of
+        # { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
+        for index, changeset_revision in enumerate( changeset_revisions_that_contain_tools ):
+            tool_versions_dict = {}
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                               encoded_repository_id,
+                                                                                               changeset_revision )
+            metadata = repository_metadata.metadata
+            tool_dicts = metadata[ 'tools' ]
+            if index == 0:
+                # The first changeset_revision is a special case because it will have no ancestor
+                # changeset_revisions in which to match tools.  The parent tool id for tools in the
+                # first changeset_revision will be the "old_id" in the tool config.
+                for tool_dict in tool_dicts:
+                    tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+            else:
+                for tool_dict in tool_dicts:
+                    parent_id = self.get_parent_id( encoded_repository_id,
+                                                    tool_dict[ 'id' ],
+                                                    tool_dict[ 'version' ],
+                                                    tool_dict[ 'guid' ],
+                                                    changeset_revisions_that_contain_tools[ 0:index ] )
+                    tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+            if tool_versions_dict:
+                repository_metadata.tool_versions = tool_versions_dict
+                self.sa_session.add( repository_metadata )
+                self.sa_session.flush()
+
+    def reset_metadata_on_selected_repositories( self, **kwd ):
+        """
+        Inspect the repository changelog to reset metadata for all appropriate changeset revisions.
+        This method is called from both Galaxy and the Tool Shed.
+        """
+        repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
+        message = ''
+        status = 'done'
+        if repository_ids:
+            successful_count = 0
+            unsuccessful_count = 0
+            for repository_id in repository_ids:
+                try:
+                    repository = repository_util.get_repository_in_tool_shed( self.app, repository_id )
+                    self.set_repository( repository )
+                    self.resetting_all_metadata_on_repository = True
+                    self.reset_all_metadata_on_repository_in_tool_shed()
+                    if self.invalid_file_tups:
+                        message = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                                self.invalid_file_tups,
+                                                                                repository,
+                                                                                None,
+                                                                                as_html=False )
+                        log.debug( message )
+                        unsuccessful_count += 1
+                    else:
+                        log.debug( "Successfully reset metadata on repository %s owned by %s" %
+                            ( str( repository.name ), str( repository.user.username ) ) )
+                        successful_count += 1
+                except:
+                    log.exception( "Error attempting to reset metadata on repository %s" % str( repository.name ) )
+                    unsuccessful_count += 1
+            message = "Successfully reset metadata on %d %s.  " % \
+                ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
+            if unsuccessful_count:
+                message += "Error setting metadata on %d %s - see the paster log for details.  " % \
+                    ( unsuccessful_count, inflector.cond_plural( unsuccessful_count, "repository" ) )
+        else:
+            message = 'Select at least one repository to on which to reset all metadata.'
+            status = 'error'
+        return message, status
+
+    def set_repository( self, repository ):
+        super( RepositoryMetadataManager, self ).set_repository( repository )
+        self.repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( self.user, repository )
+
+    def set_repository_metadata( self, host, content_alert_str='', **kwd ):
+        """
+        Set metadata using the self.repository's current disk files, returning specific error
+        messages (if any) to alert the repository owner that the changeset has problems.
+        """
+        message = ''
+        status = 'done'
+        encoded_id = self.app.security.encode_id( self.repository.id )
+        repo_dir = self.repository.repo_path( self.app )
+        repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_dir, create=False )
+        self.generate_metadata_for_changeset_revision()
+        if self.metadata_dict:
+            repository_metadata = None
+            repository_type_class = self.app.repository_types_registry.get_class_by_label( self.repository.type )
+            tip_only = isinstance( repository_type_class, TipOnly )
+            if not tip_only and self.new_metadata_required_for_utilities():
+                # Create a new repository_metadata table row.
+                repository_metadata = self.create_or_update_repository_metadata( self.repository.tip( self.app ),
+                                                                                 self.metadata_dict )
+                # If this is the first record stored for this repository, see if we need to send any email alerts.
+                if len( self.repository.downloadable_revisions ) == 1:
+                    suc.handle_email_alerts( self.app,
+                                             host,
+                                             self.repository,
+                                             content_alert_str='',
+                                             new_repo_alert=True,
+                                             admin_only=False )
+            else:
+                # Update the latest stored repository metadata with the contents and attributes of self.metadata_dict.
+                repository_metadata = metadata_util.get_latest_repository_metadata( self.app,
+                                                                                    self.repository.id,
+                                                                                    downloadable=False )
+                if repository_metadata:
+                    downloadable = metadata_util.is_downloadable( self.metadata_dict )
+                    # Update the last saved repository_metadata table row.
+                    repository_metadata.changeset_revision = self.repository.tip( self.app )
+                    repository_metadata.metadata = self.metadata_dict
+                    repository_metadata.downloadable = downloadable
+                    if 'datatypes' in self.metadata_dict:
+                        repository_metadata.includes_datatypes = True
+                    else:
+                        repository_metadata.includes_datatypes = False
+                    # We don't store information about the special type of repository dependency that is needed only for
+                    # compiling a tool dependency defined for the dependent repository.
+                    repository_dependencies_dict = self.metadata_dict.get( 'repository_dependencies', {} )
+                    repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+                    has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+                        repository_util.get_repository_dependency_types( repository_dependencies )
+                    repository_metadata.has_repository_dependencies = has_repository_dependencies
+                    if 'tool_dependencies' in self.metadata_dict:
+                        repository_metadata.includes_tool_dependencies = True
+                    else:
+                        repository_metadata.includes_tool_dependencies = False
+                    if 'tools' in self.metadata_dict:
+                        repository_metadata.includes_tools = True
+                    else:
+                        repository_metadata.includes_tools = False
+                    if 'workflows' in self.metadata_dict:
+                        repository_metadata.includes_workflows = True
+                    else:
+                        repository_metadata.includes_workflows = False
+                    repository_metadata.missing_test_components = False
+                    self.sa_session.add( repository_metadata )
+                    self.sa_session.flush()
+                else:
+                    # There are no metadata records associated with the repository.
+                    repository_metadata = self.create_or_update_repository_metadata( self.repository.tip( self.app ),
+                                                                                     self.metadata_dict )
+            if 'tools' in self.metadata_dict and repository_metadata and status != 'error':
+                # Set tool versions on the new downloadable change set.  The order of the list of changesets is
+                # critical, so we use the repo's changelog.
+                changeset_revisions = []
+                for changeset in repo.changelog:
+                    changeset_revision = str( repo.changectx( changeset ) )
+                    if metadata_util.get_repository_metadata_by_changeset_revision( self.app, encoded_id, changeset_revision ):
+                        changeset_revisions.append( changeset_revision )
+                self.add_tool_versions( encoded_id, repository_metadata, changeset_revisions )
+        elif len( repo ) == 1 and not self.invalid_file_tups:
+            message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % \
+                str( self.repository.tip( self.app ) )
+            message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
+            status = "error"
+        if self.invalid_file_tups:
+            message = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                    self.invalid_file_tups,
+                                                                    self.repository,
+                                                                    self.metadata_dict )
+            status = 'error'
+        # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+        self.app.tool_data_tables.data_tables = {}
+        return message, status
+
+    def set_repository_metadata_due_to_new_tip( self, host, content_alert_str=None, **kwd ):
+        """Set metadata on the tip of self.repository in the tool shed."""
+        error_message, status = self.set_repository_metadata( host, content_alert_str=content_alert_str, **kwd )
+        return status, error_message
diff --git a/lib/tool_shed/repository_registry.py b/lib/tool_shed/repository_registry.py
new file mode 100644
index 0000000..fce6b60
--- /dev/null
+++ b/lib/tool_shed/repository_registry.py
@@ -0,0 +1,378 @@
+import logging
+
+from sqlalchemy import and_, false, or_
+
+import tool_shed.repository_types.util as rt_util
+from galaxy.webapps.tool_shed import model
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+
+log = logging.getLogger( __name__ )
+
+
+class Registry( object ):
+
+    def __init__( self, app ):
+        log.debug( "Loading the repository registry..." )
+        self.app = app
+        self.certified_level_one_clause_list = self.get_certified_level_one_clause_list()
+        # The following lists contain tuples like ( repository.name, repository.user.username, changeset_revision )
+        # where the changeset_revision entry is always the latest installable changeset_revision..
+        self.certified_level_one_repository_and_suite_tuples = []
+        self.certified_level_one_suite_tuples = []
+        # These category dictionaries contain entries where the key is the category and the value is the integer count
+        # of viewable repositories within that category.
+        self.certified_level_one_viewable_repositories_and_suites_by_category = {}
+        self.certified_level_one_viewable_suites_by_category = {}
+        self.certified_level_two_repository_and_suite_tuples = []
+        self.certified_level_two_suite_tuples = []
+        self.certified_level_two_viewable_repositories_and_suites_by_category = {}
+        self.certified_level_two_viewable_suites_by_category = {}
+        self.repository_and_suite_tuples = []
+        self.suite_tuples = []
+        self.viewable_repositories_and_suites_by_category = {}
+        self.viewable_suites_by_category = {}
+        self.viewable_valid_repositories_and_suites_by_category = {}
+        self.viewable_valid_suites_by_category = {}
+        self.load_viewable_repositories_and_suites_by_category()
+        self.load_repository_and_suite_tuples()
+
+    def add_category_entry( self, category ):
+        category_name = str( category.name )
+        if category_name not in self.viewable_repositories_and_suites_by_category:
+            self.viewable_repositories_and_suites_by_category[ category_name ] = 0
+        if category_name not in self.viewable_suites_by_category:
+            self.viewable_suites_by_category[ category_name ] = 0
+        if category_name not in self.viewable_valid_repositories_and_suites_by_category:
+            self.viewable_valid_repositories_and_suites_by_category[ category_name ] = 0
+        if category_name not in self.viewable_valid_suites_by_category:
+            self.viewable_valid_suites_by_category[ category_name ] = 0
+        if category_name not in self.certified_level_one_viewable_repositories_and_suites_by_category:
+            self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] = 0
+        if category_name not in self.certified_level_one_viewable_suites_by_category:
+            self.certified_level_one_viewable_suites_by_category[ category_name ] = 0
+
+    def add_entry( self, repository ):
+        try:
+            if repository:
+                is_valid = self.is_valid( repository )
+                certified_level_one_tuple = self.get_certified_level_one_tuple( repository )
+                latest_installable_changeset_revision, is_level_one_certified = certified_level_one_tuple
+                for rca in repository.categories:
+                    category = rca.category
+                    category_name = str( category.name )
+                    if category_name in self.viewable_repositories_and_suites_by_category:
+                        self.viewable_repositories_and_suites_by_category[ category_name ] += 1
+                    else:
+                        self.viewable_repositories_and_suites_by_category[ category_name ] = 1
+                    if is_valid:
+                        if category_name in self.viewable_valid_repositories_and_suites_by_category:
+                            self.viewable_valid_repositories_and_suites_by_category[ category_name ] += 1
+                        else:
+                            self.viewable_valid_repositories_and_suites_by_category[ category_name ] = 1
+                    if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                        if category_name in self.viewable_suites_by_category:
+                            self.viewable_suites_by_category[ category_name ] += 1
+                        else:
+                            self.viewable_suites_by_category[ category_name ] = 1
+                        if is_valid:
+                            if category_name in self.viewable_valid_suites_by_category:
+                                self.viewable_valid_suites_by_category[ category_name ] += 1
+                            else:
+                                self.viewable_valid_suites_by_category[ category_name ] = 1
+                    if is_level_one_certified:
+                        if category_name in self.certified_level_one_viewable_repositories_and_suites_by_category:
+                            self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] += 1
+                        else:
+                            self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] = 1
+                        if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                            if category_name in self.certified_level_one_viewable_suites_by_category:
+                                self.certified_level_one_viewable_suites_by_category[ category_name ] += 1
+                            else:
+                                self.certified_level_one_viewable_suites_by_category[ category_name ] = 1
+                self.load_repository_and_suite_tuple( repository )
+                if is_level_one_certified:
+                    self.load_certified_level_one_repository_and_suite_tuple( repository )
+        except Exception as e:
+            # The viewable repository numbers and the categorized (filtered) lists of repository tuples
+            # may be slightly skewed, but that is no reason to result in a potential server error.  All
+            # will be corrected at next server start.
+            log.exception( "Handled error adding entry to repository registry: %s." % str( e ) )
+
+    def edit_category_entry( self, old_name, new_name ):
+        if old_name in self.viewable_repositories_and_suites_by_category:
+            val = self.viewable_repositories_and_suites_by_category[ old_name ]
+            del self.viewable_repositories_and_suites_by_category[ old_name ]
+            self.viewable_repositories_and_suites_by_category[ new_name ] = val
+        else:
+            self.viewable_repositories_and_suites_by_category[ new_name ] = 0
+        if old_name in self.viewable_valid_repositories_and_suites_by_category:
+            val = self.viewable_valid_repositories_and_suites_by_category[ old_name ]
+            del self.viewable_valid_repositories_and_suites_by_category[ old_name ]
+            self.viewable_valid_repositories_and_suites_by_category[ new_name ] = val
+        else:
+            self.viewable_valid_repositories_and_suites_by_category[ new_name ] = 0
+        if old_name in self.viewable_suites_by_category:
+            val = self.viewable_suites_by_category[ old_name ]
+            del self.viewable_suites_by_category[ old_name ]
+            self.viewable_suites_by_category[ new_name ] = val
+        else:
+            self.viewable_suites_by_category[ new_name ] = 0
+        if old_name in self.viewable_valid_suites_by_category:
+            val = self.viewable_valid_suites_by_category[ old_name ]
+            del self.viewable_valid_suites_by_category[ old_name ]
+            self.viewable_valid_suites_by_category[ new_name ] = val
+        else:
+            self.viewable_valid_suites_by_category[ new_name ] = 0
+        if old_name in self.certified_level_one_viewable_repositories_and_suites_by_category:
+            val = self.certified_level_one_viewable_repositories_and_suites_by_category[ old_name ]
+            del self.certified_level_one_viewable_repositories_and_suites_by_category[ old_name ]
+            self.certified_level_one_viewable_repositories_and_suites_by_category[ new_name ] = val
+        else:
+            self.certified_level_one_viewable_repositories_and_suites_by_category[ new_name ] = 0
+        if old_name in self.certified_level_one_viewable_suites_by_category:
+            val = self.certified_level_one_viewable_suites_by_category[ old_name ]
+            del self.certified_level_one_viewable_suites_by_category[ old_name ]
+            self.certified_level_one_viewable_suites_by_category[ new_name ] = val
+        else:
+            self.certified_level_one_viewable_suites_by_category[ new_name ] = 0
+
+    def get_certified_level_one_clause_list( self ):
+        certified_level_one_tuples = []
+        clause_list = []
+        for repository in self.sa_session.query( model.Repository ) \
+                                         .filter( and_( model.Repository.table.c.deleted == false(),
+                                                        model.Repository.table.c.deprecated == false() ) ):
+            certified_level_one_tuple = self.get_certified_level_one_tuple( repository )
+            latest_installable_changeset_revision, is_level_one_certified = certified_level_one_tuple
+            if is_level_one_certified:
+                certified_level_one_tuples.append( certified_level_one_tuple )
+                clause_list.append( "%s=%d and %s='%s'" % ( model.RepositoryMetadata.table.c.repository_id,
+                                                            repository.id,
+                                                            model.RepositoryMetadata.table.c.changeset_revision,
+                                                            latest_installable_changeset_revision ) )
+        return clause_list
+
+    def get_certified_level_one_tuple( self, repository ):
+        """
+        Return True if the latest installable changeset_revision of the received repository is level one certified.
+        """
+        if repository is None:
+            return ( None, False )
+        if repository.deleted or repository.deprecated:
+            return ( None, False )
+        repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+        # Get the latest installable changeset revision since that is all that is currently configured for testing.
+        latest_installable_changeset_revision = metadata_util.get_latest_downloadable_changeset_revision( self.app, repository, repo )
+        if latest_installable_changeset_revision not in [ None, hg_util.INITIAL_CHANGELOG_HASH ]:
+            encoded_repository_id = self.app.security.encode_id( repository.id )
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                               encoded_repository_id,
+                                                                                               latest_installable_changeset_revision )
+            if repository_metadata:
+                # No repository_metadata.
+                return ( latest_installable_changeset_revision, True )
+        else:
+            # No installable changeset_revision.
+            return ( None, False )
+
+    def is_level_one_certified( self, repository_metadata ):
+        if repository_metadata:
+            repository = repository_metadata.repository
+            if repository:
+                if repository.deprecated or repository.deleted:
+                    return False
+                tuple = ( str( repository.name ), str( repository.user.username ), str( repository_metadata.changeset_revision ) )
+                if repository.type in [ rt_util.REPOSITORY_SUITE_DEFINITION ]:
+                    return tuple in self.certified_level_one_suite_tuples
+                else:
+                    return tuple in self.certified_level_one_repository_and_suite_tuples
+        return False
+
+    def is_valid( self, repository ):
+        if repository and not repository.deleted and not repository.deprecated and repository.downloadable_revisions:
+            return True
+        return False
+
+    def load_certified_level_one_repository_and_suite_tuple( self, repository ):
+        # The received repository has been determined to be level one certified.
+        name = str( repository.name )
+        owner = str( repository.user.username )
+        tip_changeset_hash = repository.tip( self.app )
+        if tip_changeset_hash != hg_util.INITIAL_CHANGELOG_HASH:
+            certified_level_one_tuple = ( name, owner, tip_changeset_hash )
+            if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                if certified_level_one_tuple not in self.certified_level_one_suite_tuples:
+                    self.certified_level_one_suite_tuples.append( certified_level_one_tuple )
+            else:
+                if certified_level_one_tuple not in self.certified_level_one_repository_and_suite_tuples:
+                    self.certified_level_one_repository_and_suite_tuples.append( certified_level_one_tuple )
+
+    def load_repository_and_suite_tuple( self, repository ):
+        name = str( repository.name )
+        owner = str( repository.user.username )
+        for repository_metadata in repository.metadata_revisions:
+            changeset_revision = str( repository_metadata.changeset_revision )
+            tuple = ( name, owner, changeset_revision )
+            if tuple not in self.repository_and_suite_tuples:
+                self.repository_and_suite_tuples.append( tuple )
+            if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                if tuple not in self.suite_tuples:
+                    self.suite_tuples.append( tuple )
+
+    def load_repository_and_suite_tuples( self ):
+        # Load self.certified_level_one_repository_and_suite_tuples and self.certified_level_one_suite_tuples.
+        for repository in self.sa_session.query( model.Repository ) \
+                                         .join( model.RepositoryMetadata.table ) \
+                                         .filter( or_( *self.certified_level_one_clause_list ) ) \
+                                         .join( model.User.table ):
+            self.load_certified_level_one_repository_and_suite_tuple( repository )
+        # Load self.repository_and_suite_tuples and self.suite_tuples
+        for repository in self.sa_session.query( model.Repository ) \
+                                         .filter( and_( model.Repository.table.c.deleted == false(),
+                                                        model.Repository.table.c.deprecated == false() ) ) \
+                                         .join( model.User.table ):
+            self.load_repository_and_suite_tuple( repository )
+
+    def load_viewable_repositories_and_suites_by_category( self ):
+        # Clear all dictionaries just in case they were previously loaded.
+        self.certified_level_one_viewable_repositories_and_suites_by_category = {}
+        self.certified_level_one_viewable_suites_by_category = {}
+        self.certified_level_two_viewable_repositories_and_suites_by_category = {}
+        self.certified_level_two_viewable_suites_by_category = {}
+        self.viewable_repositories_and_suites_by_category = {}
+        self.viewable_suites_by_category = {}
+        self.viewable_valid_repositories_and_suites_by_category = {}
+        self.viewable_valid_suites_by_category = {}
+        for category in self.sa_session.query( model.Category ):
+            category_name = str( category.name )
+            if category not in self.certified_level_one_viewable_repositories_and_suites_by_category:
+                self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] = 0
+            if category not in self.certified_level_one_viewable_suites_by_category:
+                self.certified_level_one_viewable_suites_by_category[ category_name ] = 0
+            if category not in self.viewable_repositories_and_suites_by_category:
+                self.viewable_repositories_and_suites_by_category[ category_name ] = 0
+            if category not in self.viewable_suites_by_category:
+                self.viewable_suites_by_category[ category_name ] = 0
+            if category not in self.viewable_valid_repositories_and_suites_by_category:
+                self.viewable_valid_repositories_and_suites_by_category[ category_name ] = 0
+            if category not in self.viewable_valid_suites_by_category:
+                self.viewable_valid_suites_by_category[ category_name ] = 0
+            for rca in category.repositories:
+                repository = rca.repository
+                if not repository.deleted and not repository.deprecated:
+                    is_valid = self.is_valid( repository )
+                    encoded_repository_id = self.app.security.encode_id( repository.id )
+                    tip_changeset_hash = repository.tip( self.app )
+                    repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( self.app,
+                                                                                                       encoded_repository_id,
+                                                                                                       tip_changeset_hash )
+                    self.viewable_repositories_and_suites_by_category[ category_name ] += 1
+                    if is_valid:
+                        self.viewable_valid_repositories_and_suites_by_category[ category_name ] += 1
+                    if repository.type in [ rt_util.REPOSITORY_SUITE_DEFINITION ]:
+                        self.viewable_suites_by_category[ category_name ] += 1
+                        if is_valid:
+                            self.viewable_valid_suites_by_category[ category_name ] += 1
+                    if self.is_level_one_certified( repository_metadata ):
+                        self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] += 1
+                        if repository.type in [ rt_util.REPOSITORY_SUITE_DEFINITION ]:
+                            self.certified_level_one_viewable_suites_by_category[ category_name ] += 1
+
+    def remove_category_entry( self, category ):
+        catgeory_name = str( category.name )
+        if catgeory_name in self.viewable_repositories_and_suites_by_category:
+            del self.viewable_repositories_and_suites_by_category[ catgeory_name ]
+        if catgeory_name in self.viewable_valid_repositories_and_suites_by_category:
+            del self.viewable_valid_repositories_and_suites_by_category[ catgeory_name ]
+        if catgeory_name in self.viewable_suites_by_category:
+            del self.viewable_suites_by_category[ catgeory_name ]
+        if catgeory_name in self.viewable_valid_suites_by_category:
+            del self.viewable_valid_suites_by_category[ catgeory_name ]
+        if catgeory_name in self.certified_level_one_viewable_repositories_and_suites_by_category:
+            del self.certified_level_one_viewable_repositories_and_suites_by_category[ catgeory_name ]
+        if catgeory_name in self.certified_level_one_viewable_suites_by_category:
+            del self.certified_level_one_viewable_suites_by_category[ catgeory_name ]
+
+    def remove_entry( self, repository ):
+        try:
+            if repository:
+                is_valid = self.is_valid( repository )
+                certified_level_one_tuple = self.get_certified_level_one_tuple( repository )
+                latest_installable_changeset_revision, is_level_one_certified = certified_level_one_tuple
+                for rca in repository.categories:
+                    category = rca.category
+                    category_name = str( category.name )
+                    if category_name in self.viewable_repositories_and_suites_by_category:
+                        if self.viewable_repositories_and_suites_by_category[ category_name ] > 0:
+                            self.viewable_repositories_and_suites_by_category[ category_name ] -= 1
+                    else:
+                        self.viewable_repositories_and_suites_by_category[ category_name ] = 0
+                    if is_valid:
+                        if category_name in self.viewable_valid_repositories_and_suites_by_category:
+                            if self.viewable_valid_repositories_and_suites_by_category[ category_name ] > 0:
+                                self.viewable_valid_repositories_and_suites_by_category[ category_name ] -= 1
+                        else:
+                            self.viewable_valid_repositories_and_suites_by_category[ category_name ] = 0
+                    if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                        if category_name in self.viewable_suites_by_category:
+                            if self.viewable_suites_by_category[ category_name ] > 0:
+                                self.viewable_suites_by_category[ category_name ] -= 1
+                        else:
+                            self.viewable_suites_by_category[ category_name ] = 0
+                        if is_valid:
+                            if category_name in self.viewable_valid_suites_by_category:
+                                if self.viewable_valid_suites_by_category[ category_name ] > 0:
+                                    self.viewable_valid_suites_by_category[ category_name ] -= 1
+                            else:
+                                self.viewable_valid_suites_by_category[ category_name ] = 0
+                    if is_level_one_certified:
+                        if category_name in self.certified_level_one_viewable_repositories_and_suites_by_category:
+                            if self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] > 0:
+                                self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] -= 1
+                        else:
+                            self.certified_level_one_viewable_repositories_and_suites_by_category[ category_name ] = 0
+                        if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                            if category_name in self.certified_level_one_viewable_suites_by_category:
+                                if self.certified_level_one_viewable_suites_by_category[ category_name ] > 0:
+                                    self.certified_level_one_viewable_suites_by_category[ category_name ] -= 1
+                            else:
+                                self.certified_level_one_viewable_suites_by_category[ category_name ] = 0
+                self.unload_repository_and_suite_tuple( repository )
+                if is_level_one_certified:
+                    self.unload_certified_level_one_repository_and_suite_tuple( repository )
+        except Exception as e:
+            # The viewable repository numbers and the categorized (filtered) lists of repository tuples
+            # may be slightly skewed, but that is no reason to result in a potential server error.  All
+            # will be corrected at next server start.
+            log.exception( "Handled error removing entry from repository registry: %s." % str( e ) )
+
+    @property
+    def sa_session( self ):
+        return self.app.model.context.current
+
+    def unload_certified_level_one_repository_and_suite_tuple( self, repository ):
+        # The received repository has been determined to be level one certified.
+        name = str( repository.name )
+        owner = str( repository.user.username )
+        tip_changeset_hash = repository.tip( self.app )
+        if tip_changeset_hash != hg_util.INITIAL_CHANGELOG_HASH:
+            certified_level_one_tuple = ( name, owner, tip_changeset_hash )
+            if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                if certified_level_one_tuple in self.certified_level_one_suite_tuples:
+                    self.certified_level_one_suite_tuples.remove( certified_level_one_tuple )
+            else:
+                if certified_level_one_tuple in self.certified_level_one_repository_and_suite_tuples:
+                    self.certified_level_one_repository_and_suite_tuples.remove( certified_level_one_tuple )
+
+    def unload_repository_and_suite_tuple( self, repository ):
+        name = str( repository.name )
+        owner = str( repository.user.username )
+        for repository_metadata in repository.metadata_revisions:
+            changeset_revision = str( repository_metadata.changeset_revision )
+            tuple = ( name, owner, changeset_revision )
+            if tuple in self.repository_and_suite_tuples:
+                self.repository_and_suite_tuples.remove( tuple )
+            if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION:
+                if tuple in self.suite_tuples:
+                    self.suite_tuples.remove( tuple )
diff --git a/lib/tool_shed/repository_types/__init__.py b/lib/tool_shed/repository_types/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/repository_types/metadata.py b/lib/tool_shed/repository_types/metadata.py
new file mode 100644
index 0000000..be011aa
--- /dev/null
+++ b/lib/tool_shed/repository_types/metadata.py
@@ -0,0 +1,29 @@
+import logging
+
+from mercurial import hg
+from mercurial import ui
+
+log = logging.getLogger( __name__ )
+
+
+class Metadata( object ):
+
+    def __init__( self ):
+        self.type = None
+
+    def get_changesets_for_setting_metadata( self, app, repository ):
+        repo = hg.repository( ui.ui(), repository.repo_path( app ) )
+        return repo.changelog
+
+    def is_valid_for_type( self, app, repository, revisions_to_check=None ):
+        raise Exception( "Unimplemented Method" )
+
+
+class TipOnly( Metadata ):
+
+    def __init__( self ):
+        self.type = None
+
+    def get_changesets_for_setting_metadata( self, app, repository ):
+        repo = hg.repository( ui.ui(), repository.repo_path( app ) )
+        return [ repo.changelog.tip() ]
diff --git a/lib/tool_shed/repository_types/registry.py b/lib/tool_shed/repository_types/registry.py
new file mode 100644
index 0000000..722de90
--- /dev/null
+++ b/lib/tool_shed/repository_types/registry.py
@@ -0,0 +1,21 @@
+import logging
+
+from galaxy.util.odict import odict
+
+from . import repository_suite_definition
+from . import tool_dependency_definition
+from . import unrestricted
+
+log = logging.getLogger( __name__ )
+
+
+class Registry( object ):
+
+    def __init__( self ):
+        self.repository_types_by_label = odict()
+        self.repository_types_by_label[ 'unrestricted' ] = unrestricted.Unrestricted()
+        self.repository_types_by_label[ 'repository_suite_definition' ] = repository_suite_definition.RepositorySuiteDefinition()
+        self.repository_types_by_label[ 'tool_dependency_definition' ] = tool_dependency_definition.ToolDependencyDefinition()
+
+    def get_class_by_label( self, label ):
+        return self.repository_types_by_label.get( label, None )
diff --git a/lib/tool_shed/repository_types/repository_suite_definition.py b/lib/tool_shed/repository_types/repository_suite_definition.py
new file mode 100644
index 0000000..17069e8
--- /dev/null
+++ b/lib/tool_shed/repository_types/repository_suite_definition.py
@@ -0,0 +1,39 @@
+import logging
+
+from mercurial import hg, ui
+
+import tool_shed.repository_types.util as rt_util
+from tool_shed.repository_types.metadata import TipOnly
+from tool_shed.util import basic_util
+
+log = logging.getLogger( __name__ )
+
+
+class RepositorySuiteDefinition( TipOnly ):
+
+    def __init__( self ):
+        self.type = rt_util.REPOSITORY_SUITE_DEFINITION
+        self.label = 'Repository suite definition'
+        self.valid_file_names = [ 'repository_dependencies.xml' ]
+
+    def is_valid_for_type( self, app, repository, revisions_to_check=None ):
+        """
+        Inspect the received repository's contents to determine if they abide by the rules defined for
+        the contents of this type.  If the received revisions_to_check is a list of changeset revisions,
+        then inspection will be restricted to the revisions in the list.
+        """
+        repo = hg.repository( ui.ui(), repository.repo_path( app ) )
+        if revisions_to_check:
+            changeset_revisions = revisions_to_check
+        else:
+            changeset_revisions = repo.changelog
+        for changeset in changeset_revisions:
+            ctx = repo.changectx( changeset )
+            # Inspect all files in the changeset (in sorted order) to make sure there is only one and it
+            # is named repository_dependencies.xml.
+            files_changed_in_changeset = ctx.files()
+            for file_path in files_changed_in_changeset:
+                file_name = basic_util.strip_path( file_path )
+                if file_name not in self.valid_file_names:
+                    return False
+        return True
diff --git a/lib/tool_shed/repository_types/tool_dependency_definition.py b/lib/tool_shed/repository_types/tool_dependency_definition.py
new file mode 100644
index 0000000..37e9247
--- /dev/null
+++ b/lib/tool_shed/repository_types/tool_dependency_definition.py
@@ -0,0 +1,38 @@
+import logging
+
+from mercurial import hg, ui
+
+import tool_shed.repository_types.util as rt_util
+from tool_shed.repository_types.metadata import TipOnly
+from tool_shed.util import basic_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolDependencyDefinition( TipOnly ):
+
+    def __init__( self ):
+        self.type = rt_util.TOOL_DEPENDENCY_DEFINITION
+        self.label = 'Tool dependency definition'
+        self.valid_file_names = [ 'tool_dependencies.xml' ]
+
+    def is_valid_for_type( self, app, repository, revisions_to_check=None ):
+        """
+        Inspect the received repository's contents to determine if they abide by the rules defined for the contents of this type.
+        If the received revisions_to_check is a list of changeset revisions, then inspection will be restricted to the revisions
+        in the list.
+        """
+        repo = hg.repository( ui.ui(), repository.repo_path( app ) )
+        if revisions_to_check:
+            changeset_revisions = revisions_to_check
+        else:
+            changeset_revisions = repo.changelog
+        for changeset in changeset_revisions:
+            ctx = repo.changectx( changeset )
+            # Inspect all files in the changeset (in sorted order) to make sure there is only one and it is named tool_dependencies.xml.
+            files_changed_in_changeset = ctx.files()
+            for file_path in files_changed_in_changeset:
+                file_name = basic_util.strip_path( file_path )
+                if file_name not in self.valid_file_names:
+                    return False
+        return True
diff --git a/lib/tool_shed/repository_types/unrestricted.py b/lib/tool_shed/repository_types/unrestricted.py
new file mode 100644
index 0000000..05aa0de
--- /dev/null
+++ b/lib/tool_shed/repository_types/unrestricted.py
@@ -0,0 +1,20 @@
+import logging
+import tool_shed.repository_types.util as rt_util
+from tool_shed.repository_types.metadata import Metadata
+
+log = logging.getLogger( __name__ )
+
+
+class Unrestricted( Metadata ):
+
+    def __init__( self ):
+        self.type = rt_util.UNRESTRICTED
+        self.label = 'Unrestricted'
+
+    def is_valid_for_type( self, app, repository, revisions_to_check=None ):
+        """A repository's type can only be changed to the unrestricted type if it is new or has never been installed."""
+        if repository.is_new( app ):
+            return True
+        if repository.times_downloaded == 0:
+            return True
+        return False
diff --git a/lib/tool_shed/repository_types/util.py b/lib/tool_shed/repository_types/util.py
new file mode 100644
index 0000000..3447ec6
--- /dev/null
+++ b/lib/tool_shed/repository_types/util.py
@@ -0,0 +1,57 @@
+import logging
+
+from galaxy.web.form_builder import SelectField
+
+log = logging.getLogger( __name__ )
+
+REPOSITORY_DEPENDENCY_DEFINITION_FILENAME = 'repository_dependencies.xml'
+REPOSITORY_SUITE_DEFINITION = 'repository_suite_definition'
+TOOL_DEPENDENCY_DEFINITION = 'tool_dependency_definition'
+TOOL_DEPENDENCY_DEFINITION_FILENAME = 'tool_dependencies.xml'
+UNRESTRICTED = 'unrestricted'
+
+types = [ UNRESTRICTED, TOOL_DEPENDENCY_DEFINITION, REPOSITORY_SUITE_DEFINITION ]
+
+
+def build_repository_type_select_field( trans, repository=None, name='repository_type' ):
+    """Called from the Tool Shed to generate the current list of supported repository types."""
+    if repository:
+        selected_type = str( repository.type )
+    else:
+        selected_type = None
+    repository_type_select_field = SelectField( name=name )
+    for type_label, type_class in trans.app.repository_types_registry.repository_types_by_label.items():
+        option_label = str( type_class.label )
+        option_value = str( type_class.type )
+        if selected_type and selected_type == option_value:
+            selected = True
+        else:
+            selected = False
+        if repository:
+            if repository.type == option_value:
+                repository_type_select_field.add_option( option_label, option_value, selected=selected )
+            elif type_class.is_valid_for_type( trans.app, repository ):
+                repository_type_select_field.add_option( option_label, option_value, selected=selected )
+        else:
+            repository_type_select_field.add_option( option_label, option_value, selected=selected )
+    return repository_type_select_field
+
+
+def generate_message_for_repository_type_change( app, repository ):
+    message = ''
+    if repository.can_change_type_to( app, REPOSITORY_SUITE_DEFINITION ):
+        repository_suite_definition_type_class = \
+            app.repository_types_registry.get_class_by_label( REPOSITORY_SUITE_DEFINITION )
+        message += "This repository currently contains a single file named <b>%s</b>.  If the intent of this repository is " % \
+            REPOSITORY_DEPENDENCY_DEFINITION_FILENAME
+        message += "to define relationships to a collection of repositories that contain related Galaxy utilities with "
+        message += "no plans to add additional files, consider setting its type to <b>%s</b>.<br/>" % \
+            repository_suite_definition_type_class.label
+    elif repository.can_change_type_to( app, TOOL_DEPENDENCY_DEFINITION ):
+        tool_dependency_definition_type_class = \
+            app.repository_types_registry.get_class_by_label( TOOL_DEPENDENCY_DEFINITION )
+        message += "This repository currently contains a single file named <b>%s</b>.  If additional files will " % \
+            TOOL_DEPENDENCY_DEFINITION_FILENAME
+        message += "not be added to this repository, consider setting its type to <b>%s</b>.<br/>" % \
+            tool_dependency_definition_type_class.label
+    return message
diff --git a/lib/tool_shed/tool_shed_registry.py b/lib/tool_shed/tool_shed_registry.py
new file mode 100644
index 0000000..83e2c39
--- /dev/null
+++ b/lib/tool_shed/tool_shed_registry.py
@@ -0,0 +1,61 @@
+import logging
+
+from six.moves.urllib import request as urlrequest
+
+from galaxy.util.odict import odict
+from tool_shed.util import common_util, xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class Registry( object ):
+
+    def __init__( self, root_dir=None, config=None ):
+        self.tool_sheds = odict()
+        self.tool_sheds_auth = odict()
+        if root_dir and config:
+            # Parse tool_sheds_conf.xml
+            tree, error_message = xml_util.parse_xml( config )
+            if tree is None:
+                log.warning( "Unable to load references to tool sheds defined in file %s" % str( config ) )
+            else:
+                root = tree.getroot()
+                log.debug( 'Loading references to tool sheds from %s' % config )
+                for elem in root.findall( 'tool_shed' ):
+                    try:
+                        name = elem.get( 'name', None )
+                        url = elem.get( 'url', None )
+                        username = elem.get( 'user', None )
+                        password = elem.get( 'pass', None )
+                        if name and url:
+                            self.tool_sheds[ name ] = url
+                            self.tool_sheds_auth[ name ] = None
+                            log.debug( 'Loaded reference to tool shed: %s' % name )
+                        if name and url and username and password:
+                            pass_mgr = urlrequest.HTTPPasswordMgrWithDefaultRealm()
+                            pass_mgr.add_password( None, url, username, password )
+                            self.tool_sheds_auth[ name ] = pass_mgr
+                    except Exception as e:
+                        log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
+
+    def password_manager_for_url( self, url ):
+        """
+        If the tool shed is using external auth, the client to the tool shed must authenticate to that
+        as well.  This provides access to the six.moves.urllib.request.HTTPPasswordMgrWithdefaultRealm() object for the
+        url passed in.
+
+        Following more what galaxy.demo_sequencer.controllers.common does might be more appropriate at
+        some stage...
+        """
+        url_sans_protocol = common_util.remove_protocol_from_tool_shed_url( url )
+        for shed_name, shed_url in self.tool_sheds.items():
+            shed_url_sans_protocol = common_util.remove_protocol_from_tool_shed_url( shed_url )
+            if url_sans_protocol.startswith( shed_url_sans_protocol ):
+                return self.tool_sheds_auth[ shed_name ]
+        log.debug( "Invalid url '%s' received by tool shed registry's password_manager_for_url method." % str( url ) )
+        return None
+
+    def url_auth( self, url ):
+        password_manager = self.password_manager_for_url( url )
+        if password_manager is not None:
+            return urlrequest.HTTPBasicAuthHandler( password_manager )
diff --git a/lib/tool_shed/tools/__init__.py b/lib/tool_shed/tools/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/tools/data_table_manager.py b/lib/tool_shed/tools/data_table_manager.py
new file mode 100644
index 0000000..413ad25
--- /dev/null
+++ b/lib/tool_shed/tools/data_table_manager.py
@@ -0,0 +1,176 @@
+import logging
+import os
+import shutil
+
+from xml.etree import ElementTree as XmlET
+
+from tool_shed.util import hg_util, xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolDataTableManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def generate_repository_info_elem( self, tool_shed, repository_name, changeset_revision, owner,
+                                       parent_elem=None, **kwd ):
+        """Create and return an ElementTree repository info Element."""
+        if parent_elem is None:
+            elem = XmlET.Element( 'tool_shed_repository' )
+        else:
+            elem = XmlET.SubElement( parent_elem, 'tool_shed_repository' )
+        tool_shed_elem = XmlET.SubElement( elem, 'tool_shed' )
+        tool_shed_elem.text = tool_shed
+        repository_name_elem = XmlET.SubElement( elem, 'repository_name' )
+        repository_name_elem.text = repository_name
+        repository_owner_elem = XmlET.SubElement( elem, 'repository_owner' )
+        repository_owner_elem.text = owner
+        changeset_revision_elem = XmlET.SubElement( elem, 'installed_changeset_revision' )
+        changeset_revision_elem.text = changeset_revision
+        # add additional values
+        # TODO: enhance additional values to allow e.g. use of dict values that will recurse
+        for key, value in kwd.items():
+            new_elem = XmlET.SubElement( elem, key )
+            new_elem.text = value
+        return elem
+
+    def generate_repository_info_elem_from_repository( self, tool_shed_repository, parent_elem=None, **kwd ):
+        return self.generate_repository_info_elem( tool_shed_repository.tool_shed,
+                                                   tool_shed_repository.name,
+                                                   tool_shed_repository.installed_changeset_revision,
+                                                   tool_shed_repository.owner,
+                                                   parent_elem=parent_elem,
+                                                   **kwd )
+
+    def get_tool_index_sample_files( self, sample_files ):
+        """
+        Try to return the list of all appropriate tool data sample files included
+        in the repository.
+        """
+        tool_index_sample_files = []
+        for s in sample_files:
+            # The problem with this is that Galaxy does not follow a standard naming
+            # convention for file names.
+            if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
+                tool_index_sample_files.append( str( s ) )
+        return tool_index_sample_files
+
+    def handle_missing_data_table_entry( self, relative_install_dir, tool_path, repository_tools_tups ):
+        """
+        Inspect each tool to see if any have input parameters that are dynamically
+        generated select lists that require entries in the tool_data_table_conf.xml
+        file.  This method is called only from Galaxy (not the tool shed) when a
+        repository is being installed or reinstalled.
+        """
+        missing_data_table_entry = False
+        for index, repository_tools_tup in enumerate( repository_tools_tups ):
+            tup_path, guid, repository_tool = repository_tools_tup
+            if repository_tool.params_with_missing_data_table_entry:
+                missing_data_table_entry = True
+                break
+        if missing_data_table_entry:
+            # The repository must contain a tool_data_table_conf.xml.sample file that includes
+            # all required entries for all tools in the repository.
+            sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample',
+                                                                        relative_install_dir )
+            if sample_tool_data_table_conf:
+                # Add entries to the ToolDataTableManager's in-memory data_tables dictionary.
+                error, message = self.handle_sample_tool_data_table_conf_file( sample_tool_data_table_conf,
+                                                                               persist=True )
+                if error:
+                    # TODO: Do more here than logging an exception.
+                    log.debug( message )
+            # Reload the tool into the local list of repository_tools_tups.
+            repository_tool = self.app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid, use_cached=False )
+            repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
+            # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+            self.reset_tool_data_tables()
+        return repository_tools_tups
+
+    def handle_sample_tool_data_table_conf_file( self, filename, persist=False ):
+        """
+        Parse the incoming filename and add new entries to the in-memory
+        self.app.tool_data_tables dictionary.  If persist is True (should
+        only occur if call is from the Galaxy side, not the tool shed), the
+        new entries will be appended to Galaxy's shed_tool_data_table_conf.xml
+        file on disk.
+        """
+        error = False
+        message = ''
+        try:
+            new_table_elems, message = self.app.tool_data_tables \
+                .add_new_entries_from_config_file( config_filename=filename,
+                                                   tool_data_path=self.app.config.shed_tool_data_path,
+                                                   shed_tool_data_table_config=self.app.config.shed_tool_data_table_config,
+                                                   persist=persist )
+            if message:
+                error = True
+        except Exception as e:
+            message = str( e )
+            error = True
+        return error, message
+
+    def get_target_install_dir( self, tool_shed_repository ):
+        tool_path, relative_target_dir = tool_shed_repository.get_tool_relative_path( self.app )
+        # This is where index files will reside on a per repo/installed version basis.
+        target_dir = os.path.join( self.app.config.shed_tool_data_path, relative_target_dir )
+        if not os.path.exists( target_dir ):
+            os.makedirs( target_dir )
+        return target_dir, tool_path, relative_target_dir
+
+    def install_tool_data_tables( self, tool_shed_repository, tool_index_sample_files ):
+        TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
+        TOOL_DATA_TABLE_FILE_SAMPLE_NAME = '%s.sample' % ( TOOL_DATA_TABLE_FILE_NAME )
+        SAMPLE_SUFFIX = '.sample'
+        SAMPLE_SUFFIX_OFFSET = -len( SAMPLE_SUFFIX )
+        target_dir, tool_path, relative_target_dir = self.get_target_install_dir( tool_shed_repository )
+        for sample_file in tool_index_sample_files:
+            path, filename = os.path.split( sample_file )
+            target_filename = filename
+            if target_filename.endswith( SAMPLE_SUFFIX ):
+                target_filename = target_filename[ : SAMPLE_SUFFIX_OFFSET ]
+            source_file = os.path.join( tool_path, sample_file )
+            # We're not currently uninstalling index files, do not overwrite existing files.
+            target_path_filename = os.path.join( target_dir, target_filename )
+            if not os.path.exists( target_path_filename ) or target_filename == TOOL_DATA_TABLE_FILE_NAME:
+                shutil.copy2( source_file, target_path_filename )
+            else:
+                log.debug( "Did not copy sample file '%s' to install directory '%s' because file already exists.", filename, target_dir )
+            # For provenance and to simplify introspection, let's keep the original data table sample file around.
+            if filename == TOOL_DATA_TABLE_FILE_SAMPLE_NAME:
+                shutil.copy2( source_file, os.path.join( target_dir, filename ) )
+        tool_data_table_conf_filename = os.path.join( target_dir, TOOL_DATA_TABLE_FILE_NAME )
+        elems = []
+        if os.path.exists( tool_data_table_conf_filename ):
+            tree, error_message = xml_util.parse_xml( tool_data_table_conf_filename )
+            if tree:
+                for elem in tree.getroot():
+                    # Append individual table elems or other elemes, but not tables elems.
+                    if elem.tag == 'tables':
+                        for table_elem in elems:
+                            elems.append( elem )
+                    else:
+                        elems.append( elem )
+        else:
+            log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.",
+                       tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
+        for elem in elems:
+            if elem.tag == 'table':
+                for file_elem in elem.findall( 'file' ):
+                    path = file_elem.get( 'path', None )
+                    if path:
+                        file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
+                # Store repository info in the table tag set for trace-ability.
+                self.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
+        if elems:
+            # Remove old data_table
+            os.unlink( tool_data_table_conf_filename )
+            # Persist new data_table content.
+            self.app.tool_data_tables.to_xml_file( tool_data_table_conf_filename, elems )
+        return tool_data_table_conf_filename, elems
+
+    def reset_tool_data_tables( self ):
+        # Reset the tool_data_tables to an empty dictionary.
+        self.app.tool_data_tables.data_tables = {}
diff --git a/lib/tool_shed/tools/tool_validator.py b/lib/tool_shed/tools/tool_validator.py
new file mode 100644
index 0000000..d6576c7
--- /dev/null
+++ b/lib/tool_shed/tools/tool_validator.py
@@ -0,0 +1,337 @@
+import filecmp
+import logging
+import os
+import tempfile
+
+from galaxy.tools import (
+    parameters,
+    Tool
+)
+from galaxy.tools.parameters import dynamic_options
+
+from tool_shed.tools import data_table_manager
+
+from tool_shed.util import basic_util
+from tool_shed.util import hg_util
+from tool_shed.util import repository_util
+from tool_shed.util import tool_util
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolValidator( object ):
+
+    def __init__( self, app ):
+        self.app = app
+        self.tdtm = data_table_manager.ToolDataTableManager( self.app )
+
+    def can_use_tool_config_disk_file( self, repository, repo, file_path, changeset_revision ):
+        """
+        Determine if repository's tool config file on disk can be used.  This method
+        is restricted to tool config files since, with the exception of tool config
+        files, multiple files with the same name will likely be in various directories
+        in the repository and we're comparing file names only (not relative paths).
+        """
+        if not file_path or not os.path.exists( file_path ):
+            # The file no longer exists on disk, so it must have been deleted at some previous
+            # point in the change log.
+            return False
+        if changeset_revision == repository.tip( self.app ):
+            return True
+        file_name = basic_util.strip_path( file_path )
+        latest_version_of_file = \
+            self.get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
+        can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
+        try:
+            os.unlink( latest_version_of_file )
+        except:
+            pass
+        return can_use_disk_file
+
+    def check_tool_input_params( self, repo_dir, tool_config_name, tool, sample_files ):
+        """
+        Check all of the tool's input parameters, looking for any that are dynamically
+        generated using external data files to make sure the files exist.
+        """
+        invalid_files_and_errors_tups = []
+        correction_msg = ''
+        for input_param in tool.input_params:
+            if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+                # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml,
+                # make sure all requirements exist.
+                options = input_param.dynamic_options or input_param.options
+                if options and isinstance( options, dynamic_options.DynamicOptions ):
+                    if options.tool_data_table or options.missing_tool_data_table_name:
+                        # Make sure the repository contains a tool_data_table_conf.xml.sample file.
+                        sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
+                        if sample_tool_data_table_conf:
+                            error, correction_msg = \
+                                self.tdtm.handle_sample_tool_data_table_conf_file( sample_tool_data_table_conf,
+                                                                                   persist=False )
+                            if error:
+                                invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
+                            else:
+                                options.missing_tool_data_table_name = None
+                        else:
+                            correction_msg = "This file requires an entry in the tool_data_table_conf.xml file.  "
+                            correction_msg += "Upload a file named tool_data_table_conf.xml.sample to the repository "
+                            correction_msg += "that includes the required entry to correct this error.<br/>"
+                            invalid_tup = ( tool_config_name, correction_msg )
+                            if invalid_tup not in invalid_files_and_errors_tups:
+                                invalid_files_and_errors_tups.append( invalid_tup )
+                    if options.index_file or options.missing_index_file:
+                        # Make sure the repository contains the required xxx.loc.sample file.
+                        index_file = options.index_file or options.missing_index_file
+                        index_file_name = basic_util.strip_path( index_file )
+                        sample_found = False
+                        for sample_file in sample_files:
+                            sample_file_name = basic_util.strip_path( sample_file )
+                            if sample_file_name == '%s.sample' % index_file_name:
+                                options.index_file = index_file_name
+                                options.missing_index_file = None
+                                if options.tool_data_table:
+                                    options.tool_data_table.missing_index_file = None
+                                sample_found = True
+                                break
+                        if not sample_found:
+                            correction_msg = "This file refers to a file named <b>%s</b>.  " % str( index_file_name )
+                            correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % \
+                                str( index_file_name )
+                            invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+        return invalid_files_and_errors_tups
+
+    def concat_messages( self, msg1, msg2 ):
+        if msg1:
+            if msg2:
+                message = '%s  %s' % ( msg1, msg2 )
+            else:
+                message = msg1
+        elif msg2:
+            message = msg2
+        else:
+            message = ''
+        return message
+
+    def copy_disk_sample_files_to_dir( self, repo_files_dir, dest_path ):
+        """
+        Copy all files currently on disk that end with the .sample extension to the
+        directory to which dest_path refers.
+        """
+        sample_files = []
+        for root, dirs, files in os.walk( repo_files_dir ):
+            if root.find( '.hg' ) < 0:
+                for name in files:
+                    if name.endswith( '.sample' ):
+                        relative_path = os.path.join( root, name )
+                        tool_util.copy_sample_file( self.app, relative_path, dest_path=dest_path )
+                        sample_files.append( name )
+        return sample_files
+
+    def get_latest_tool_config_revision_from_repository_manifest( self, repo, filename, changeset_revision ):
+        """
+        Get the latest revision of a tool config file named filename from the repository
+        manifest up to the value of changeset_revision.  This method is restricted to tool_config
+        files rather than any file since it is likely that, with the exception of tool config
+        files, multiple files will have the same name in various directories within the repository.
+        """
+        stripped_filename = basic_util.strip_path( filename )
+        for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ):
+            manifest_ctx = repo.changectx( changeset )
+            for ctx_file in manifest_ctx.files():
+                ctx_file_name = basic_util.strip_path( ctx_file )
+                if ctx_file_name == stripped_filename:
+                    try:
+                        fctx = manifest_ctx[ ctx_file ]
+                    except LookupError:
+                        # The ctx_file may have been moved in the change set.  For example,
+                        # 'ncbi_blastp_wrapper.xml' was moved to 'tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml',
+                        # so keep looking for the file until we find the new location.
+                        continue
+                    fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gltcrfrm" )
+                    tmp_filename = fh.name
+                    fh.close()
+                    fh = open( tmp_filename, 'wb' )
+                    fh.write( fctx.data() )
+                    fh.close()
+                    return tmp_filename
+        return None
+
+    def get_list_of_copied_sample_files( self, repo, ctx, dir ):
+        """
+        Find all sample files (files in the repository with the special .sample extension)
+        in the reversed repository manifest up to ctx.  Copy each discovered file to dir and
+        return the list of filenames.  If a .sample file was added in a changeset and then
+        deleted in a later changeset, it will be returned in the deleted_sample_files list.
+        The caller will set the value of app.config.tool_data_path to dir in order to load
+        the tools and generate metadata for them.
+        """
+        deleted_sample_files = []
+        sample_files = []
+        for changeset in hg_util.reversed_upper_bounded_changelog( repo, ctx ):
+            changeset_ctx = repo.changectx( changeset )
+            for ctx_file in changeset_ctx.files():
+                ctx_file_name = basic_util.strip_path( ctx_file )
+                # If we decide in the future that files deleted later in the changelog should
+                # not be used, we can use the following if statement. if ctx_file_name.endswith( '.sample' )
+                # and ctx_file_name not in sample_files and ctx_file_name not in deleted_sample_files:
+                if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files:
+                    fctx = hg_util.get_file_context_from_ctx( changeset_ctx, ctx_file )
+                    if fctx in [ 'DELETED' ]:
+                        # Since the possibly future used if statement above is commented out, the
+                        # same file that was initially added will be discovered in an earlier changeset
+                        # in the change log and fall through to the else block below.  In other words,
+                        # if a file named blast2go.loc.sample was added in change set 0 and then deleted
+                        # in changeset 3, the deleted file in changeset 3 will be handled here, but the
+                        # later discovered file in changeset 0 will be handled in the else block below.
+                        # In this way, the file contents will always be found for future tools even though
+                        # the file was deleted.
+                        if ctx_file_name not in deleted_sample_files:
+                            deleted_sample_files.append( ctx_file_name )
+                    else:
+                        sample_files.append( ctx_file_name )
+                        tmp_ctx_file_name = os.path.join( dir, ctx_file_name.replace( '.sample', '' ) )
+                        fh = open( tmp_ctx_file_name, 'wb' )
+                        fh.write( fctx.data() )
+                        fh.close()
+        return sample_files, deleted_sample_files
+
+    def handle_sample_files_and_load_tool_from_disk( self, repo_files_dir, repository_id, tool_config_filepath, work_dir ):
+        """
+        Copy all sample files from disk to a temporary directory since the sample files may
+        be in multiple directories.
+        """
+        message = ''
+        sample_files = self.copy_disk_sample_files_to_dir( repo_files_dir, work_dir )
+        if sample_files:
+            if 'tool_data_table_conf.xml.sample' in sample_files:
+                # Load entries into the tool_data_tables if the tool requires them.
+                tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+                error, message = self.tdtm.handle_sample_tool_data_table_conf_file( tool_data_table_config,
+                                                                                    persist=False )
+        tool, valid, message2 = self.load_tool_from_config( repository_id, tool_config_filepath )
+        message = self.concat_messages( message, message2 )
+        return tool, valid, message, sample_files
+
+    def handle_sample_files_and_load_tool_from_tmp_config( self, repo, repository_id, changeset_revision,
+                                                           tool_config_filename, work_dir ):
+        tool = None
+        message = ''
+        ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+        # We're not currently doing anything with the returned list of deleted_sample_files here.  It is
+        # intended to help handle sample files that are in the manifest, but have been deleted from disk.
+        sample_files, deleted_sample_files = self.get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+        if sample_files:
+            self.app.config.tool_data_path = work_dir
+            if 'tool_data_table_conf.xml.sample' in sample_files:
+                # Load entries into the tool_data_tables if the tool requires them.
+                tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+                if tool_data_table_config:
+                    error, message = self.tdtm.handle_sample_tool_data_table_conf_file( tool_data_table_config,
+                                                                                        persist=False )
+                    if error:
+                        log.debug( message )
+        manifest_ctx, ctx_file = hg_util.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
+        if manifest_ctx and ctx_file:
+            tool, message2 = self.load_tool_from_tmp_config( repo, repository_id, manifest_ctx, ctx_file, work_dir )
+            message = self.concat_messages( message, message2 )
+        return tool, message, sample_files
+
+    def load_tool_from_changeset_revision( self, repository_id, changeset_revision, tool_config_filename ):
+        """
+        Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value
+        of tool_config_filename.  The value of changeset_revision is a valid (downloadable)
+        changeset revision.  The tool config will be located in the repository manifest between
+        the received valid changeset revision and the first changeset revision in the repository,
+        searching backwards.
+        """
+        original_tool_data_path = self.app.config.tool_data_path
+        repository = repository_util.get_repository_in_tool_shed( self.app, repository_id )
+        repo_files_dir = repository.repo_path( self.app )
+        repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_files_dir, create=False )
+        message = ''
+        tool = None
+        can_use_disk_file = False
+        tool_config_filepath = repository_util.get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
+        work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ltfcr" )
+        can_use_disk_file = self.can_use_tool_config_disk_file( repository,
+                                                                repo,
+                                                                tool_config_filepath,
+                                                                changeset_revision )
+        if can_use_disk_file:
+            self.app.config.tool_data_path = work_dir
+            tool, valid, message, sample_files = \
+                self.handle_sample_files_and_load_tool_from_disk( repo_files_dir,
+                                                                  repository_id,
+                                                                  tool_config_filepath,
+                                                                  work_dir )
+            if tool is not None:
+                invalid_files_and_errors_tups = \
+                    self.check_tool_input_params( repo_files_dir,
+                                                  tool_config_filename,
+                                                  tool,
+                                                  sample_files )
+                if invalid_files_and_errors_tups:
+                    message2 = tool_util.generate_message_for_invalid_tools( self.app,
+                                                                             invalid_files_and_errors_tups,
+                                                                             repository,
+                                                                             metadata_dict=None,
+                                                                             as_html=True,
+                                                                             displaying_invalid_tool=True )
+                    message = self.concat_messages( message, message2 )
+        else:
+            tool, message, sample_files = \
+                self.handle_sample_files_and_load_tool_from_tmp_config( repo,
+                                                                        repository_id,
+                                                                        changeset_revision,
+                                                                        tool_config_filename,
+                                                                        work_dir )
+        basic_util.remove_dir( work_dir )
+        self.app.config.tool_data_path = original_tool_data_path
+        # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+        self.tdtm.reset_tool_data_tables()
+        return repository, tool, message
+
+    def load_tool_from_config( self, repository_id, full_path ):
+        try:
+            tool = self.app.toolbox.load_tool( full_path, repository_id=repository_id, allow_code_files=False, use_cached=False )
+            valid = True
+            error_message = None
+        except KeyError as e:
+            tool = None
+            valid = False
+            error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file.  Upload a file ' % str( e )
+            error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+            error_message += 'this error.  '
+        except Exception as e:
+            tool = None
+            valid = False
+            error_message = str( e )
+        return tool, valid, error_message
+
+    def load_tool_from_tmp_config( self, repo, repository_id, ctx, ctx_file, work_dir ):
+        tool = None
+        message = ''
+        tmp_tool_config = hg_util.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
+        if tmp_tool_config:
+            tool_element, error_message = xml_util.parse_xml( tmp_tool_config )
+            if tool_element is None:
+                return tool, message
+            # Look for external files required by the tool config.
+            tmp_code_files = []
+            external_paths = Tool.get_externally_referenced_paths( tmp_tool_config )
+            for path in external_paths:
+                tmp_code_file_name = hg_util.copy_file_from_manifest( repo, ctx, path, work_dir )
+                if tmp_code_file_name:
+                    tmp_code_files.append( tmp_code_file_name )
+            tool, valid, message = self.load_tool_from_config( repository_id, tmp_tool_config )
+            for tmp_code_file in tmp_code_files:
+                try:
+                    os.unlink( tmp_code_file )
+                except:
+                    pass
+            try:
+                os.unlink( tmp_tool_config )
+            except:
+                pass
+        return tool, message
diff --git a/lib/tool_shed/tools/tool_version_manager.py b/lib/tool_shed/tools/tool_version_manager.py
new file mode 100644
index 0000000..7a99ccf
--- /dev/null
+++ b/lib/tool_shed/tools/tool_version_manager.py
@@ -0,0 +1,115 @@
+import logging
+
+from sqlalchemy import and_, or_
+
+from galaxy.tools.toolbox.lineages.tool_shed import ToolVersionCache
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+
+log = logging.getLogger( __name__ )
+
+
+class ToolVersionManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def get_tool_version( self, tool_id ):
+        context = self.app.install_model.context
+        return context.query( self.app.install_model.ToolVersion ) \
+                      .filter( self.app.install_model.ToolVersion.table.c.tool_id == tool_id ) \
+                      .first()
+
+    def get_tool_version_association( self, parent_tool_version, tool_version ):
+        """
+        Return a ToolVersionAssociation if one exists that associates the two
+        received tool_versions. This function is called only from Galaxy.
+        """
+        context = self.app.install_model.context
+        return context.query( self.app.install_model.ToolVersionAssociation ) \
+                      .filter( and_( self.app.install_model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
+                                     self.app.install_model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
+                      .first()
+
+    def get_version_lineage_for_tool( self, repository_id, repository_metadata, guid ):
+        """
+        Return the tool version lineage chain in descendant order for the received
+        guid contained in the received repsitory_metadata.tool_versions.  This function
+        is called only from the Tool Shed.
+        """
+        repository = repository_util.get_repository_by_id( self.app, repository_id )
+        repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+        # Initialize the tool lineage
+        version_lineage = [ guid ]
+        # Get all ancestor guids of the received guid.
+        current_child_guid = guid
+        for changeset in hg_util.reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
+            ctx = repo.changectx( changeset )
+            rm = metadata_util.get_repository_metadata_by_changeset_revision( self.app, repository_id, str( ctx ) )
+            if rm:
+                parent_guid = rm.tool_versions.get( current_child_guid, None )
+                if parent_guid:
+                    version_lineage.append( parent_guid )
+                    current_child_guid = parent_guid
+        # Get all descendant guids of the received guid.
+        current_parent_guid = guid
+        for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo,
+                                                                         repository_metadata.changeset_revision,
+                                                                         repository.tip( self.app ) ):
+            ctx = repo.changectx( changeset )
+            rm = metadata_util.get_repository_metadata_by_changeset_revision( self.app, repository_id, str( ctx ) )
+            if rm:
+                tool_versions = rm.tool_versions
+                for child_guid, parent_guid in tool_versions.items():
+                    if parent_guid == current_parent_guid:
+                        version_lineage.insert( 0, child_guid )
+                        current_parent_guid = child_guid
+                        break
+        return version_lineage
+
+    def handle_tool_versions( self, tool_version_dicts, tool_shed_repository ):
+        """
+        Using the list of tool_version_dicts retrieved from the Tool Shed (one per changeset
+        revision up to the currently installed changeset revision), create the parent / child
+        pairs of tool versions.  Each dictionary contains { tool id : parent tool id } pairs.
+        This function is called only from Galaxy.
+        """
+        context = self.app.install_model.context
+        for tool_version_dict in tool_version_dicts:
+            for tool_guid, parent_id in tool_version_dict.items():
+                tool_version_using_tool_guid = self.get_tool_version( tool_guid )
+                tool_version_using_parent_id = self.get_tool_version( parent_id )
+                if not tool_version_using_tool_guid:
+                    tool_version_using_tool_guid = \
+                        self.app.install_model.ToolVersion( tool_id=tool_guid,
+                                                            tool_shed_repository=tool_shed_repository )
+                    context.add( tool_version_using_tool_guid )
+                    context.flush()
+                if not tool_version_using_parent_id:
+                    tool_version_using_parent_id = \
+                        self.app.install_model.ToolVersion( tool_id=parent_id,
+                                                            tool_shed_repository=tool_shed_repository )
+                    context.add( tool_version_using_parent_id )
+                    context.flush()
+                # Remove existing wrong tool version associations having
+                # tool_version_using_parent_id as parent or
+                # tool_version_using_tool_guid as child.
+                context.query( self.app.install_model.ToolVersionAssociation ) \
+                       .filter( or_( and_( self.app.install_model.ToolVersionAssociation.table.c.parent_id == tool_version_using_parent_id.id,
+                                           self.app.install_model.ToolVersionAssociation.table.c.tool_id != tool_version_using_tool_guid.id ),
+                                     and_( self.app.install_model.ToolVersionAssociation.table.c.parent_id != tool_version_using_parent_id.id,
+                                           self.app.install_model.ToolVersionAssociation.table.c.tool_id == tool_version_using_tool_guid.id ) ) ) \
+                       .delete()
+                context.flush()
+                tool_version_association = \
+                    self.get_tool_version_association( tool_version_using_parent_id,
+                                                       tool_version_using_tool_guid )
+                if not tool_version_association:
+                    # Associate the two versions as parent / child.
+                    tool_version_association = \
+                        self.app.install_model.ToolVersionAssociation( tool_id=tool_version_using_tool_guid.id,
+                                                                       parent_id=tool_version_using_parent_id.id )
+                    context.add( tool_version_association )
+                    context.flush()
+        self.app.tool_version_cache = ToolVersionCache(self.app)
diff --git a/lib/tool_shed/util/__init__.py b/lib/tool_shed/util/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lib/tool_shed/util/basic_util.py b/lib/tool_shed/util/basic_util.py
new file mode 100644
index 0000000..7f26b6c
--- /dev/null
+++ b/lib/tool_shed/util/basic_util.py
@@ -0,0 +1,150 @@
+import logging
+import os
+import shutil
+import sys
+from string import Template
+
+import markupsafe
+from six import text_type
+
+from galaxy.util import nice_size, unicodify
+
+log = logging.getLogger( __name__ )
+
+CHUNK_SIZE = 2 ** 20  # 1Mb
+INSTALLATION_LOG = 'INSTALLATION.log'
+# Set no activity timeout to 20 minutes.
+NO_OUTPUT_TIMEOUT = 3600.0
+MAXDIFFSIZE = 8000
+MAX_DISPLAY_SIZE = 32768
+
+DOCKER_IMAGE_TEMPLATE = '''
+# Galaxy Docker image
+
+FROM bgruening/galaxy-stable
+
+MAINTAINER Bjoern A. Gruning, bjoern.gruening at gmail.com
+
+WORKDIR /galaxy-central
+
+${selected_repositories}
+
+# Mark folders as imported from the host.
+VOLUME ["/export/", "/data/", "/var/lib/docker"]
+
+# Expose port 80 (webserver), 21 (FTP server), 8800 (Proxy), 9001 (Galaxy report app)
+EXPOSE :80
+EXPOSE :21
+EXPOSE :8800
+EXPOSE :9001
+
+# Autostart script that is invoked during container start
+CMD ["/usr/bin/startup"]
+'''
+
+SELECTED_REPOSITORIES_TEMPLATE = '''
+RUN install-repository "--url ${tool_shed_url} -o ${repository_owner} --name ${repository_name}"
+'''
+
+
+def evaluate_template( text, install_environment ):
+    """
+    Substitute variables defined in XML blocks from dependencies file.  The value of the received
+    repository_install_dir is the root installation directory of the repository that contains the
+    tool dependency.  The value of the received install_dir is the root installation directory of
+    the tool_dependency.
+    """
+    return Template( text ).safe_substitute( get_env_var_values( install_environment ) )
+
+
+def get_env_var_values( install_environment ):
+    """
+    Return a dictionary of values, some of which enable substitution of reserved words for the values.
+    The received install_enviroment object has 3 important attributes for reserved word substitution:
+    install_environment.tool_shed_repository_install_dir is the root installation directory of the repository
+    that contains the tool dependency being installed, install_environment.install_dir is the root
+    installation directory of the tool dependency, and install_environment.tmp_work_dir is the
+    temporary directory where the tool dependency compilation/installation is being processed.
+    """
+    env_var_dict = {}
+    env_var_dict[ 'REPOSITORY_INSTALL_DIR' ] = install_environment.tool_shed_repository_install_dir
+    env_var_dict[ 'INSTALL_DIR' ] = install_environment.install_dir
+    env_var_dict[ 'TMP_WORK_DIR' ] = install_environment.tmp_work_dir
+    env_var_dict[ 'system_install' ] = install_environment.install_dir
+    # If the Python interpreter is 64bit then we can safely assume that the underlying system is also 64bit.
+    env_var_dict[ '__is64bit__' ] = sys.maxsize > 2 ** 32
+    return env_var_dict
+
+
+def get_file_type_str( changeset_revision, file_type ):
+    if file_type == 'zip':
+        file_type_str = '%s.zip' % changeset_revision
+    elif file_type == 'bz2':
+        file_type_str = '%s.tar.bz2' % changeset_revision
+    elif file_type == 'gz':
+        file_type_str = '%s.tar.gz' % changeset_revision
+    else:
+        file_type_str = ''
+    return file_type_str
+
+
+def move_file( current_dir, source, destination, rename_to=None ):
+    source_path = os.path.abspath( os.path.join( current_dir, source ) )
+    destination_directory = os.path.join( destination )
+    if rename_to is not None:
+        destination_path = os.path.join( destination_directory, rename_to )
+    else:
+        source_file = os.path.basename( source_path )
+        destination_path = os.path.join( destination_directory, source_file )
+    if not os.path.exists( destination_directory ):
+        os.makedirs( destination_directory )
+    shutil.move( source_path, destination_path )
+
+
+def remove_dir( dir ):
+    """Attempt to remove a directory from disk."""
+    if dir:
+        if os.path.exists( dir ):
+            try:
+                shutil.rmtree( dir )
+            except:
+                pass
+
+
+def size_string( raw_text, size=MAX_DISPLAY_SIZE ):
+    """Return a subset of a string (up to MAX_DISPLAY_SIZE) translated to a safe string for display in a browser."""
+    if raw_text and len( raw_text ) >= size:
+        large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % nice_size( size )
+        raw_text = '%s%s' % ( raw_text[ 0:size ], large_str )
+    return raw_text or ''
+
+
+def stringify( list ):
+    if list:
+        return ','.join( list )
+    return ''
+
+
+def strip_path( fpath ):
+    """Attempt to strip the path from a file name."""
+    if not fpath:
+        return fpath
+    try:
+        file_path, file_name = os.path.split( fpath )
+    except:
+        file_name = fpath
+    return file_name
+
+
+def to_html_string( text ):
+    """Translates the characters in text to an html string"""
+    if text:
+        try:
+            text = unicodify( text )
+        except UnicodeDecodeError as e:
+            return "Error decoding string: %s" % str( e )
+        text = text_type( markupsafe.escape( text ) )
+        text = text.replace( '\n', '<br/>' )
+        text = text.replace( '    ', '    ' )
+        text = text.replace( ' ', ' ' )
+    return text
diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py
new file mode 100644
index 0000000..ccceef6
--- /dev/null
+++ b/lib/tool_shed/util/commit_util.py
@@ -0,0 +1,257 @@
+import bz2
+import gzip
+import json
+import logging
+import os
+import shutil
+import tempfile
+from collections import namedtuple
+
+from sqlalchemy.sql.expression import null
+
+import tool_shed.repository_types.util as rt_util
+from galaxy.util import checkers, safe_relpath
+from tool_shed.tools import data_table_manager
+from tool_shed.util import basic_util, hg_util, shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+UNDESIRABLE_DIRS = [ '.hg', '.svn', '.git', '.cvs' ]
+UNDESIRABLE_FILES = [ '.hg_archival.txt', 'hgrc', '.DS_Store', 'tool_test_output.html', 'tool_test_output.json' ]
+
+
+def check_archive( repository, archive ):
+    valid = []
+    invalid = []
+    errors = []
+    undesirable_files = []
+    undesirable_dirs = []
+    for member in archive.getmembers():
+        # Allow regular files and directories only
+        if not ( member.isdir() or member.isfile() or member.islnk() ):
+            errors.append( "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc)." )
+            invalid.append( member )
+            continue
+        if not safe_relpath( member.name ):
+            errors.append( "Uploaded archives cannot contain files that would extract outside of the archive." )
+            invalid.append( member )
+            continue
+        if os.path.basename( member.name ) in UNDESIRABLE_FILES:
+            undesirable_files.append( member )
+            continue
+        head = tail = member.name
+        try:
+            while tail:
+                head, tail = os.path.split(head)
+                if tail in UNDESIRABLE_DIRS:
+                    undesirable_dirs.append( member )
+                    assert False
+        except AssertionError:
+            continue
+        if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and member.name != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+            errors.append( 'Repositories of type <b>Repository suite definition</b> can contain only a single file named <b>repository_dependencies.xml</b>.' )
+            invalid.append( member )
+            continue
+        if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and member.name != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
+            errors.append( 'Repositories of type <b>Tool dependency definition</b> can contain only a single file named <b>tool_dependencies.xml</b>.' )
+            invalid.append( member )
+            continue
+        valid.append( member )
+    ArchiveCheckResults = namedtuple( 'ArchiveCheckResults', [ 'valid', 'invalid', 'undesirable_files', 'undesirable_dirs', 'errors' ] )
+    return ArchiveCheckResults( valid, invalid, undesirable_files, undesirable_dirs, errors )
+
+
+def check_file_contents_for_email_alerts( app ):
+    """
+    See if any admin users have chosen to receive email alerts when a repository is updated.
+    If so, the file contents of the update must be checked for inappropriate content.
+    """
+    sa_session = app.model.context.current
+    admin_users = app.config.get( "admin_users", "" ).split( "," )
+    for repository in sa_session.query( app.model.Repository ) \
+                                .filter( app.model.Repository.table.c.email_alerts != null() ):
+        email_alerts = json.loads( repository.email_alerts )
+        for user_email in email_alerts:
+            if user_email in admin_users:
+                return True
+    return False
+
+
+def check_file_content_for_html_and_images( file_path ):
+    message = ''
+    if checkers.check_html( file_path ):
+        message = 'The file "%s" contains HTML content.\n' % str( file_path )
+    elif checkers.check_image( file_path ):
+        message = 'The file "%s" contains image content.\n' % str( file_path )
+    return message
+
+
+def get_change_lines_in_file_for_tag( tag, change_dict ):
+    """
+    The received change_dict is the jsonified version of the changes to a file in a
+    changeset being pushed to the Tool Shed from the command line. This method cleans
+    and returns appropriate lines for inspection.
+    """
+    cleaned_lines = []
+    data_list = change_dict.get( 'data', [] )
+    for data_dict in data_list:
+        block = data_dict.get( 'block', '' )
+        lines = block.split( '\\n' )
+        for line in lines:
+            index = line.find( tag )
+            if index > -1:
+                line = line[ index: ]
+                cleaned_lines.append( line )
+    return cleaned_lines
+
+
+def get_upload_point( repository, **kwd ):
+    upload_point = kwd.get( 'upload_point', None )
+    if upload_point is not None:
+        # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
+        if os.path.exists( upload_point ):
+            if os.path.isfile( upload_point ):
+                # Get the parent directory
+                upload_point, not_needed = os.path.split( upload_point )
+                # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
+            upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
+            if upload_point:
+                upload_point = upload_point.lstrip( '/' )
+                upload_point = upload_point.rstrip( '/' )
+            # Now the value of uplaod_point will be something like: /
+            if upload_point == '/':
+                upload_point = None
+        else:
+            # Must have been an error selecting something that didn't exist, so default to repository root
+            upload_point = None
+    return upload_point
+
+
+def handle_bz2( repository, uploaded_file_name ):
+    fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id,
+                                         dir=os.path.dirname( uploaded_file_name ),
+                                         text=False )
+    bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
+    while 1:
+        try:
+            chunk = bzipped_file.read( basic_util.CHUNK_SIZE )
+        except IOError:
+            os.close( fd )
+            os.remove( uncompressed )
+            log.exception( 'Problem uncompressing bz2 data "%s"' % uploaded_file_name )
+            return
+        if not chunk:
+            break
+        os.write( fd, chunk )
+    os.close( fd )
+    bzipped_file.close()
+    shutil.move( uncompressed, uploaded_file_name )
+
+
+def handle_directory_changes( app, host, username, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar,
+                              new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    content_alert_str = ''
+    files_to_remove = []
+    filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+    if remove_repo_files_not_in_tar and not repository.is_new( app ):
+        # We have a repository that is not new (it contains files), so discover those files that are in the
+        # repository, but not in the uploaded archive.
+        for root, dirs, files in os.walk( full_path ):
+            if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+                for undesirable_dir in UNDESIRABLE_DIRS:
+                    if undesirable_dir in dirs:
+                        dirs.remove( undesirable_dir )
+                        undesirable_dirs_removed += 1
+                for undesirable_file in UNDESIRABLE_FILES:
+                    if undesirable_file in files:
+                        files.remove( undesirable_file )
+                        undesirable_files_removed += 1
+                for name in files:
+                    full_name = os.path.join( root, name )
+                    if full_name not in filenames_in_archive:
+                        files_to_remove.append( full_name )
+        for repo_file in files_to_remove:
+            # Remove files in the repository (relative to the upload point) that are not in
+            # the uploaded archive.
+            try:
+                hg_util.remove_file( repo.ui, repo, repo_file, force=True )
+            except Exception as e:
+                log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+                relative_selected_file = repo_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+                repo.dirstate.remove( relative_selected_file )
+                repo.dirstate.write()
+                absolute_selected_file = os.path.abspath( repo_file )
+                if os.path.isdir( absolute_selected_file ):
+                    try:
+                        os.rmdir( absolute_selected_file )
+                    except OSError as e:
+                        # The directory is not empty.
+                        pass
+                elif os.path.isfile( absolute_selected_file ):
+                    os.remove( absolute_selected_file )
+                    dir = os.path.split( absolute_selected_file )[0]
+                    try:
+                        os.rmdir( dir )
+                    except OSError as e:
+                        # The directory is not empty.
+                        pass
+    # See if any admin users have chosen to receive email alerts when a repository is updated.
+    # If so, check every uploaded file to ensure content is appropriate.
+    check_contents = check_file_contents_for_email_alerts( app )
+    for filename_in_archive in filenames_in_archive:
+        # Check file content to ensure it is appropriate.
+        if check_contents and os.path.isfile( filename_in_archive ):
+            content_alert_str += check_file_content_for_html_and_images( filename_in_archive )
+        hg_util.add_changeset( repo.ui, repo, filename_in_archive )
+        if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+            # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
+            # by parsing the file and adding new entries to the in-memory app.tool_data_tables
+            # dictionary.
+            tdtm = data_table_manager.ToolDataTableManager( app )
+            error, message = tdtm.handle_sample_tool_data_table_conf_file( filename_in_archive, persist=False )
+            if error:
+                return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+    hg_util.commit_changeset( repo.ui,
+                              repo,
+                              full_path_to_changeset=full_path,
+                              username=username,
+                              message=commit_message )
+    admin_only = len( repository.downloadable_revisions ) != 1
+    suc.handle_email_alerts( app,
+                             host,
+                             repository,
+                             content_alert_str=content_alert_str,
+                             new_repo_alert=new_repo_alert,
+                             admin_only=admin_only )
+    return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+
+
+def handle_gzip( repository, uploaded_file_name ):
+    fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id,
+                                         dir=os.path.dirname( uploaded_file_name ),
+                                         text=False )
+    gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
+    while 1:
+        try:
+            chunk = gzipped_file.read( basic_util.CHUNK_SIZE )
+        except IOError as e:
+            os.close( fd )
+            os.remove( uncompressed )
+            log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+            return
+        if not chunk:
+            break
+        os.write( fd, chunk )
+    os.close( fd )
+    gzipped_file.close()
+    shutil.move( uncompressed, uploaded_file_name )
+
+
+def uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip=False, isbz2=False ):
+    if isgzip:
+        handle_gzip( repository, uploaded_file_name )
+        return uploaded_file_filename.rstrip( '.gz' )
+    if isbz2:
+        handle_bz2( repository, uploaded_file_name )
+        return uploaded_file_filename.rstrip( '.bz2' )
diff --git a/lib/tool_shed/util/common_util.py b/lib/tool_shed/util/common_util.py
new file mode 100644
index 0000000..0302227
--- /dev/null
+++ b/lib/tool_shed/util/common_util.py
@@ -0,0 +1,350 @@
+import json
+import logging
+import os
+
+from galaxy import util
+from galaxy.util.odict import odict
+from galaxy.web import url_for
+from tool_shed.util import encoding_util, xml_util
+
+log = logging.getLogger( __name__ )
+
+REPOSITORY_OWNER = 'devteam'
+
+
+def accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies ):
+    if tool_shed_accessible:
+        if tool_dependencies:
+            for tool_dependency in tool_dependencies:
+                if tool_dependency not in all_tool_dependencies:
+                    all_tool_dependencies.append( tool_dependency )
+    return all_tool_dependencies
+
+
+def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
+    # Get the 000x_tools.xml file associated with the current migrate_tools version number.
+    tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
+    # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
+    migrated_tool_configs_dict = odict()
+    tree, error_message = xml_util.parse_xml( tools_xml_file_path )
+    if tree is None:
+        return False, odict()
+    root = tree.getroot()
+    tool_shed = root.get( 'name' )
+    tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, tool_shed )
+    # The default behavior is that the tool shed is down.
+    tool_shed_accessible = False
+    missing_tool_configs_dict = odict()
+    if tool_shed_url:
+        for elem in root:
+            if elem.tag == 'repository':
+                repository_dependencies = []
+                all_tool_dependencies = []
+                repository_name = elem.get( 'name' )
+                changeset_revision = elem.get( 'changeset_revision' )
+                tool_shed_accessible, repository_dependencies_dict = get_repository_dependencies( app,
+                                                                                                  tool_shed_url,
+                                                                                                  repository_name,
+                                                                                                  REPOSITORY_OWNER,
+                                                                                                  changeset_revision )
+                if tool_shed_accessible:
+                    # Accumulate all tool dependencies defined for repository dependencies for display to the user.
+                    for rd_key, rd_tups in repository_dependencies_dict.items():
+                        if rd_key in [ 'root_key', 'description' ]:
+                            continue
+                        for rd_tup in rd_tups:
+                            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                                parse_repository_dependency_tuple( rd_tup )
+                        tool_shed_accessible, tool_dependencies = get_tool_dependencies( app,
+                                                                                         tool_shed_url,
+                                                                                         name,
+                                                                                         owner,
+                                                                                         changeset_revision )
+                        all_tool_dependencies = accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies )
+                    tool_shed_accessible, tool_dependencies = get_tool_dependencies( app,
+                                                                                     tool_shed_url,
+                                                                                     repository_name,
+                                                                                     REPOSITORY_OWNER,
+                                                                                     changeset_revision )
+                    all_tool_dependencies = accumulate_tool_dependencies( tool_shed_accessible, tool_dependencies, all_tool_dependencies )
+                    for tool_elem in elem.findall( 'tool' ):
+                        tool_config_file_name = tool_elem.get( 'file' )
+                        if tool_config_file_name:
+                            # We currently do nothing with repository dependencies except install them (we do not display repositories that will be
+                            # installed to the user).  However, we'll store them in the following dictionary in case we choose to display them in the
+                            # future.
+                            dependencies_dict = dict( tool_dependencies=all_tool_dependencies,
+                                                      repository_dependencies=repository_dependencies )
+                            migrated_tool_configs_dict[ tool_config_file_name ] = dependencies_dict
+                else:
+                    break
+        if tool_shed_accessible:
+            # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
+            for tool_panel_config in tool_panel_configs:
+                tree, error_message = xml_util.parse_xml( tool_panel_config )
+                if tree:
+                    root = tree.getroot()
+                    for elem in root:
+                        if elem.tag == 'tool':
+                            missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+                        elif elem.tag == 'section':
+                            for section_elem in elem:
+                                if section_elem.tag == 'tool':
+                                    missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+    else:
+        exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file.  ' % ( tool_shed, app.config.tool_sheds_config )
+        exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
+        raise Exception( exception_msg )
+    return tool_shed_accessible, missing_tool_configs_dict
+
+
+def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
+    file_path = elem.get( 'file', None )
+    if file_path:
+        name = os.path.basename( file_path )
+        for migrated_tool_config in migrated_tool_configs_dict.keys():
+            if migrated_tool_config in [ file_path, name ]:
+                missing_tool_configs_dict[ name ] = migrated_tool_configs_dict[ migrated_tool_config ]
+    return missing_tool_configs_dict
+
+
+def generate_clone_url_for_installed_repository( app, repository ):
+    """Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
+    tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
+    return util.build_url( tool_shed_url, pathspec=[ 'repos', str( repository.owner ), str( repository.name ) ] )
+
+
+def generate_clone_url_for_repository_in_tool_shed( user, repository ):
+    """Generate the URL for cloning a repository that is in the tool shed."""
+    base_url = url_for( '/', qualified=True ).rstrip( '/' )
+    if user:
+        protocol, base = base_url.split( '://' )
+        username = '%s@' % user.username
+        return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
+    else:
+        return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+
+
+def generate_clone_url_from_repo_info_tup( app, repo_info_tup ):
+    """Generate the URL for cloning a repository given a tuple of toolshed, name, owner, changeset_revision."""
+    # Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False]
+    toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+        parse_repository_dependency_tuple( repo_info_tup )
+    tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, toolshed )
+    # Don't include the changeset_revision in clone urls.
+    return util.build_url( tool_shed_url, pathspec=[ 'repos', owner, name ] )
+
+
+def get_non_shed_tool_panel_configs( app ):
+    """Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml."""
+    config_filenames = []
+    for config_filename in app.config.tool_configs:
+        # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
+        # <toolbox tool_path="../shed_tools">
+        tree, error_message = xml_util.parse_xml( config_filename )
+        if tree is None:
+            continue
+        root = tree.getroot()
+        tool_path = root.get( 'tool_path', None )
+        if tool_path is None:
+            config_filenames.append( config_filename )
+    return config_filenames
+
+
+def get_repository_dependencies( app, tool_shed_url, repository_name, repository_owner, changeset_revision ):
+    repository_dependencies_dict = {}
+    tool_shed_accessible = True
+    params = dict( name=repository_name, owner=repository_owner, changeset_revision=changeset_revision )
+    pathspec = [ 'repository', 'get_repository_dependencies' ]
+    try:
+        raw_text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        tool_shed_accessible = True
+    except Exception as e:
+        tool_shed_accessible = False
+        log.warning( "The URL\n%s\nraised the exception:\n%s\n", util.build_url( tool_shed_url, pathspec=pathspec, params=params ), e )
+    if tool_shed_accessible:
+        if len( raw_text ) > 2:
+            encoded_text = json.loads( raw_text )
+            repository_dependencies_dict = encoding_util.tool_shed_decode( encoded_text )
+    return tool_shed_accessible, repository_dependencies_dict
+
+
+def get_protocol_from_tool_shed_url( tool_shed_url ):
+    """Return the protocol from the received tool_shed_url if it exists."""
+    try:
+        if tool_shed_url.find( '://' ) > 0:
+            return tool_shed_url.split( '://' )[0].lower()
+    except Exception as e:
+        # We receive a lot of calls here where the tool_shed_url is None.  The container_util uses
+        # that value when creating a header row.  If the tool_shed_url is not None, we have a problem.
+        if tool_shed_url is not None:
+            log.exception( "Handled exception getting the protocol from Tool Shed URL %s:\n%s", str( tool_shed_url ), e )
+        # Default to HTTP protocol.
+        return 'http'
+
+
+def get_tool_dependencies( app, tool_shed_url, repository_name, repository_owner, changeset_revision ):
+    tool_dependencies = []
+    tool_shed_accessible = True
+    params = dict( name=repository_name, owner=repository_owner, changeset_revision=changeset_revision )
+    pathspec = [ 'repository', 'get_tool_dependencies' ]
+    try:
+        text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        tool_shed_accessible = True
+    except Exception as e:
+        tool_shed_accessible = False
+        log.warning( "The URL\n%s\nraised the exception:\n%s\n", util.build_url( tool_shed_url, pathspec=pathspec, params=params ), e )
+    if tool_shed_accessible:
+        if text:
+            tool_dependencies_dict = encoding_util.tool_shed_decode( text )
+            for requirements_dict in tool_dependencies_dict.values():
+                tool_dependency_name = requirements_dict[ 'name' ]
+                tool_dependency_version = requirements_dict[ 'version' ]
+                tool_dependency_type = requirements_dict[ 'type' ]
+                tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type ) )
+    return tool_shed_accessible, tool_dependencies
+
+
+def get_tool_shed_repository_ids( as_string=False, **kwd ):
+    tsrid = kwd.get( 'tool_shed_repository_id', None )
+    tsridslist = util.listify( kwd.get( 'tool_shed_repository_ids', None ) )
+    if not tsridslist:
+        tsridslist = util.listify( kwd.get( 'id', None ) )
+    if tsridslist is not None:
+        if tsrid is not None and tsrid not in tsridslist:
+            tsridslist.append( tsrid )
+        if as_string:
+            return ','.join( tsridslist )
+        return tsridslist
+    else:
+        tsridslist = util.listify( kwd.get( 'ordered_tsr_ids', None ) )
+        if tsridslist is not None:
+            if as_string:
+                return ','.join( tsridslist )
+            return tsridslist
+    if as_string:
+        return ''
+    return []
+
+
+def get_tool_shed_url_from_tool_shed_registry( app, tool_shed ):
+    """
+    The value of tool_shed is something like: toolshed.g2.bx.psu.edu.  We need the URL to this tool shed, which is
+    something like: http://toolshed.g2.bx.psu.edu/
+    """
+    cleaned_tool_shed = remove_protocol_from_tool_shed_url( tool_shed )
+    for shed_url in app.tool_shed_registry.tool_sheds.values():
+        if shed_url.find( cleaned_tool_shed ) >= 0:
+            if shed_url.endswith( '/' ):
+                shed_url = shed_url.rstrip( '/' )
+            return shed_url
+    # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
+    return None
+
+
+def get_user_by_username( app, username ):
+    """Get a user from the database by username."""
+    sa_session = app.model.context.current
+    try:
+        user = sa_session.query( app.model.User ) \
+                         .filter( app.model.User.table.c.username == username ) \
+                         .one()
+        return user
+    except Exception:
+        return None
+
+
+def handle_galaxy_url( trans, **kwd ):
+    galaxy_url = kwd.get( 'galaxy_url', None )
+    if galaxy_url:
+        trans.set_cookie( galaxy_url, name='toolshedgalaxyurl' )
+    else:
+        galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
+    return galaxy_url
+
+
+def handle_tool_shed_url_protocol( app, shed_url ):
+    """Handle secure and insecure HTTP protocol since they may change over time."""
+    try:
+        if app.name == 'galaxy':
+            url = remove_protocol_from_tool_shed_url( shed_url )
+            tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, url )
+        else:
+            tool_shed_url = str( url_for( '/', qualified=True ) ).rstrip( '/' )
+        return tool_shed_url
+    except Exception as e:
+        # We receive a lot of calls here where the tool_shed_url is None.  The container_util uses
+        # that value when creating a header row.  If the tool_shed_url is not None, we have a problem.
+        if shed_url is not None:
+            log.exception( "Handled exception removing protocol from URL %s:\n%s", str( shed_url ), e )
+        return shed_url
+
+
+def parse_repository_dependency_tuple( repository_dependency_tuple, contains_error=False ):
+    # Default both prior_installation_required and only_if_compiling_contained_td to False in cases where metadata should be reset on the
+    # repository containing the repository_dependency definition.
+    prior_installation_required = 'False'
+    only_if_compiling_contained_td = 'False'
+    if contains_error:
+        if len( repository_dependency_tuple ) == 5:
+            tool_shed, name, owner, changeset_revision, error = repository_dependency_tuple
+        elif len( repository_dependency_tuple ) == 6:
+            tool_shed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple
+        elif len( repository_dependency_tuple ) == 7:
+            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error = \
+                repository_dependency_tuple
+        return tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error
+    else:
+        if len( repository_dependency_tuple ) == 4:
+            tool_shed, name, owner, changeset_revision = repository_dependency_tuple
+        elif len( repository_dependency_tuple ) == 5:
+            tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tuple
+        elif len( repository_dependency_tuple ) == 6:
+            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = repository_dependency_tuple
+        return tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td
+
+
+def remove_port_from_tool_shed_url( tool_shed_url ):
+    """Return a partial Tool Shed URL, eliminating the port if it exists."""
+    try:
+        if tool_shed_url.find( ':' ) > 0:
+            # Eliminate the port, if any, since it will result in an invalid directory name.
+            new_tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+        else:
+            new_tool_shed_url = tool_shed_url
+        return new_tool_shed_url.rstrip( '/' )
+    except Exception as e:
+        # We receive a lot of calls here where the tool_shed_url is None.  The container_util uses
+        # that value when creating a header row.  If the tool_shed_url is not None, we have a problem.
+        if tool_shed_url is not None:
+            log.exception( "Handled exception removing the port from Tool Shed URL %s:\n%s", str( tool_shed_url ), e )
+        return tool_shed_url
+
+
+def remove_protocol_and_port_from_tool_shed_url( tool_shed_url ):
+    """Return a partial Tool Shed URL, eliminating the protocol and/or port if either exists."""
+    tool_shed = remove_protocol_from_tool_shed_url( tool_shed_url )
+    tool_shed = remove_port_from_tool_shed_url( tool_shed )
+    return tool_shed
+
+
+def remove_protocol_and_user_from_clone_url( repository_clone_url ):
+    """Return a URL that can be used to clone a repository, eliminating the protocol and user if either exists."""
+    if repository_clone_url.find( '@' ) > 0:
+        # We have an url that includes an authenticated user, something like:
+        # http://test@bx.psu.edu:9009/repos/some_username/column
+        items = repository_clone_url.split( '@' )
+        tmp_url = items[ 1 ]
+    elif repository_clone_url.find( '//' ) > 0:
+        # We have an url that includes only a protocol, something like:
+        # http://bx.psu.edu:9009/repos/some_username/column
+        items = repository_clone_url.split( '//' )
+        tmp_url = items[ 1 ]
+    else:
+        tmp_url = repository_clone_url
+    return tmp_url.rstrip( '/' )
+
+
+def remove_protocol_from_tool_shed_url( tool_shed_url ):
+    """Return a partial Tool Shed URL, eliminating the protocol if it exists."""
+    return util.remove_protocol_from_url( tool_shed_url )
diff --git a/lib/tool_shed/util/container_util.py b/lib/tool_shed/util/container_util.py
new file mode 100644
index 0000000..1f4ea75
--- /dev/null
+++ b/lib/tool_shed/util/container_util.py
@@ -0,0 +1,71 @@
+from __future__ import print_function
+
+import logging
+
+from tool_shed.util import common_util
+
+log = logging.getLogger( __name__ )
+
+# String separator
+STRSEP = '__ESEP__'
+
+
+def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner,
+                                                         changeset_revision, prior_installation_required,
+                                                         only_if_compiling_contained_td ):
+    """
+    Assumes tool shed is current tool shed since repository dependencies across tool sheds
+    is not yet supported.
+    """
+    # The tool_shed portion of the key must be the value that is stored in the tool_shed_repository.tool_shed column
+    # of the Galaxy database for an installed repository.  This value does not include the protocol, but does include
+    # the port if there is one.
+    tool_shed = common_util.remove_protocol_from_tool_shed_url( toolshed_base_url )
+    return '%s%s%s%s%s%s%s%s%s%s%s' % ( tool_shed,
+                                        STRSEP,
+                                        str( repository_name ),
+                                        STRSEP,
+                                        str( repository_owner ),
+                                        STRSEP,
+                                        str( changeset_revision ),
+                                        STRSEP,
+                                        str( prior_installation_required ),
+                                        STRSEP,
+                                        str( only_if_compiling_contained_td ) )
+
+
+def get_components_from_key( key ):
+    """
+    Assumes tool shed is current tool shed since repository dependencies across tool sheds is not
+    yet supported.
+    """
+    items = key.split( STRSEP )
+    toolshed_base_url = items[ 0 ]
+    repository_name = items[ 1 ]
+    repository_owner = items[ 2 ]
+    changeset_revision = items[ 3 ]
+    if len( items ) == 5:
+        prior_installation_required = items[ 4 ]
+        return toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required
+    elif len( items ) == 6:
+        prior_installation_required = items[ 4 ]
+        only_if_compiling_contained_td = items[ 5 ]
+        return toolshed_base_url, repository_name, repository_owner, \
+            changeset_revision, prior_installation_required, \
+            only_if_compiling_contained_td
+    else:
+        # For backward compatibility to the 12/20/12 Galaxy release we have to return the following, and callers
+        # must handle exceptions.
+        return toolshed_base_url, repository_name, repository_owner, changeset_revision
+
+
+def print_folders( pad, folder ):
+    # For debugging...
+    pad_str = ''
+    for i in range( 1, pad ):
+        pad_str += ' '
+    print('%sid: %s key: %s' % ( pad_str, str( folder.id ), folder.key ))
+    for repository_dependency in folder.repository_dependencies:
+        print('    %s%s' % ( pad_str, repository_dependency.listify ))
+    for sub_folder in folder.folders:
+        print_folders( pad + 5, sub_folder )
diff --git a/lib/tool_shed/util/encoding_util.py b/lib/tool_shed/util/encoding_util.py
new file mode 100644
index 0000000..2f92a44
--- /dev/null
+++ b/lib/tool_shed/util/encoding_util.py
@@ -0,0 +1,37 @@
+import binascii
+import json
+import logging
+
+from galaxy.util.hash_util import hmac_new
+
+log = logging.getLogger( __name__ )
+
+encoding_sep = '__esep__'
+encoding_sep2 = '__esepii__'
+
+
+def tool_shed_decode( value ):
+    # Extract and verify hash
+    a, b = value.split( ":" )
+    value = binascii.unhexlify( b )
+    test = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value )
+    assert a == test
+    # Restore from string
+    values = None
+    try:
+        values = json.loads( value )
+    except Exception:
+        pass
+    if values is None:
+        values = value
+    return values
+
+
+def tool_shed_encode( val ):
+    if isinstance( val, dict ) or isinstance( val, list ):
+        value = json.dumps( val )
+    else:
+        value = val
+    a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value )
+    b = binascii.hexlify( value )
+    return "%s:%s" % ( a, b )
diff --git a/lib/tool_shed/util/hg_util.py b/lib/tool_shed/util/hg_util.py
new file mode 100644
index 0000000..b0afd91
--- /dev/null
+++ b/lib/tool_shed/util/hg_util.py
@@ -0,0 +1,462 @@
+import json
+import logging
+import os
+import struct
+import tempfile
+from datetime import datetime
+from time import gmtime
+
+from mercurial import cmdutil, commands, hg, ui
+from mercurial.changegroup import readexactly
+from mercurial.exchange import readbundle
+
+from galaxy.util import listify
+from tool_shed.util import basic_util
+
+log = logging.getLogger( __name__ )
+
+INITIAL_CHANGELOG_HASH = '000000000000'
+
+
+def add_changeset( repo_ui, repo, path_to_filename_in_archive ):
+    commands.add( repo_ui, repo, str( path_to_filename_in_archive ) )
+
+
+def archive_repository_revision( app, repository, archive_dir, changeset_revision ):
+    '''Create an un-versioned archive of a repository.'''
+    repo = get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    options_dict = get_mercurial_default_options_dict( 'archive' )
+    options_dict[ 'rev' ] = changeset_revision
+    error_message = ''
+    return_code = None
+    try:
+        return_code = commands.archive( get_configured_ui, repo, archive_dir, **options_dict )
+    except Exception as e:
+        error_message = "Error attempting to archive revision <b>%s</b> of repository %s: %s\nReturn code: %s\n" % \
+            ( str( changeset_revision ), str( repository.name ), str( e ), str( return_code ) )
+        log.exception( error_message )
+    return return_code, error_message
+
+
+def bundle_to_json( fh ):
+    """
+    Convert the received HG10xx data stream (a mercurial 1.0 bundle created using hg push from the
+    command line) to a json object.
+    """
+    # See http://www.wstein.org/home/wstein/www/home/was/patches/hg_json
+    hg_unbundle10_obj = readbundle( get_configured_ui(), fh, None )
+    groups = [ group for group in unpack_groups( hg_unbundle10_obj ) ]
+    return json.dumps( groups, indent=4 )
+
+
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+    """
+    Clone the repository up to the specified changeset_revision.  No subsequent revisions will be
+    present in the cloned repository.
+    """
+    try:
+        commands.clone( get_configured_ui(),
+                        str( repository_clone_url ),
+                        dest=str( repository_file_dir ),
+                        pull=True,
+                        noupdate=False,
+                        rev=listify( str( ctx_rev ) ) )
+        return True, None
+    except Exception as e:
+        error_message = 'Error cloning repository: %s' % str( e )
+        log.debug( error_message )
+        return False, error_message
+
+
+def commit_changeset( repo_ui, repo, full_path_to_changeset, username, message ):
+    commands.commit( repo_ui, repo, full_path_to_changeset, user=username, message=message )
+
+
+def copy_file_from_manifest( repo, ctx, filename, dir ):
+    """
+    Copy the latest version of the file named filename from the repository manifest to the directory
+    to which dir refers.
+    """
+    for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+        changeset_ctx = repo.changectx( changeset )
+        fctx = get_file_context_from_ctx( changeset_ctx, filename )
+        if fctx and fctx not in [ 'DELETED' ]:
+            file_path = os.path.join( dir, filename )
+            fh = open( file_path, 'wb' )
+            fh.write( fctx.data() )
+            fh.close()
+            return file_path
+    return None
+
+
+def create_hgrc_file( app, repository ):
+    # At this point, an entry for the repository is required to be in the hgweb.config
+    # file so we can call repository.repo_path( trans.app ).  Since we support both
+    # http and https, we set push_ssl to False to override the default (which is True)
+    # in the mercurial api.  The hg purge extension purges all files and directories
+    # not being tracked by mercurial in the current repository.  It'll remove unknown
+    # files and empty directories.  This is not currently used because it is not supported
+    # in the mercurial API.
+    repo = get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    fp = repo.opener( 'hgrc', 'wb' )
+    fp.write( '[paths]\n' )
+    fp.write( 'default = .\n' )
+    fp.write( 'default-push = .\n' )
+    fp.write( '[web]\n' )
+    fp.write( 'allow_push = %s\n' % repository.user.username )
+    fp.write( 'name = %s\n' % repository.name )
+    fp.write( 'push_ssl = false\n' )
+    fp.write( '[extensions]\n' )
+    fp.write( 'hgext.purge=' )
+    fp.close()
+
+
+def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
+    """Retrieve a specified changectx from a repository."""
+    for changeset in repo.changelog:
+        ctx = repo.changectx( changeset )
+        if str( ctx ) == changeset_revision:
+            return ctx
+    return None
+
+
+def get_config( config_file, repo, ctx, dir ):
+    """Return the latest version of config_filename from the repository manifest."""
+    config_file = basic_util.strip_path( config_file )
+    for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+        changeset_ctx = repo.changectx( changeset )
+        for ctx_file in changeset_ctx.files():
+            ctx_file_name = basic_util.strip_path( ctx_file )
+            if ctx_file_name == config_file:
+                return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
+    return None
+
+
+def get_config_from_disk( config_file, relative_install_dir ):
+    for root, dirs, files in os.walk( relative_install_dir ):
+        if root.find( '.hg' ) < 0:
+            for name in files:
+                if name == config_file:
+                    return os.path.abspath( os.path.join( root, name ) )
+    return None
+
+
+def get_configured_ui():
+    """Configure any desired ui settings."""
+    _ui = ui.ui()
+    # The following will suppress all messages.  This is
+    # the same as adding the following setting to the repo
+    # hgrc file' [ui] section:
+    # quiet = True
+    _ui.setconfig( 'ui', 'quiet', True )
+    return _ui
+
+
+def get_ctx_file_path_from_manifest( filename, repo, changeset_revision ):
+    """
+    Get the ctx file path for the latest revision of filename from the repository manifest up
+    to the value of changeset_revision.
+    """
+    stripped_filename = basic_util.strip_path( filename )
+    for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+        manifest_ctx = repo.changectx( changeset )
+        for ctx_file in manifest_ctx.files():
+            ctx_file_name = basic_util.strip_path( ctx_file )
+            if ctx_file_name == stripped_filename:
+                return manifest_ctx, ctx_file
+    return None, None
+
+
+def get_file_context_from_ctx( ctx, filename ):
+    """Return the mercurial file context for a specified file."""
+    # We have to be careful in determining if we found the correct file because multiple files with
+    # the same name may be in different directories within ctx if the files were moved within the change
+    # set.  For example, in the following ctx.files() list, the former may have been moved to the latter:
+    # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample'].
+    # Another scenario is that the file has been deleted.
+    deleted = False
+    filename = basic_util.strip_path( filename )
+    for ctx_file in ctx.files():
+        ctx_file_name = basic_util.strip_path( ctx_file )
+        if filename == ctx_file_name:
+            try:
+                # If the file was moved, its destination will be returned here.
+                fctx = ctx[ ctx_file ]
+                return fctx
+            except LookupError:
+                # Set deleted for now, and continue looking in case the file was moved instead of deleted.
+                deleted = True
+    if deleted:
+        return 'DELETED'
+    return None
+
+
+def get_mercurial_default_options_dict( command, command_table=None, **kwd ):
+    '''Borrowed from repoman - get default parameters for a mercurial command.'''
+    if command_table is None:
+        command_table = commands.table
+    possible = cmdutil.findpossible( command, command_table )
+    # Mercurial >= 3.4 returns a tuple whose first element is the old return dict
+    if type(possible) is tuple:
+        possible = possible[0]
+    if len( possible ) != 1:
+        raise Exception('unable to find mercurial command "%s"' % command)
+    default_options_dict = dict( ( r[1].replace( '-', '_' ), r[2] ) for r in next(iter(possible.values()))[1][1] )
+    for option in kwd:
+        default_options_dict[ option ] = kwd[ option ]
+    return default_options_dict
+
+
+def get_named_tmpfile_from_ctx( ctx, filename, dir ):
+    """
+    Return a named temporary file created from a specified file with a given name included in a repository
+    changeset revision.
+    """
+    filename = basic_util.strip_path( filename )
+    for ctx_file in ctx.files():
+        ctx_file_name = basic_util.strip_path( ctx_file )
+        if filename == ctx_file_name:
+            try:
+                # If the file was moved, its destination file contents will be returned here.
+                fctx = ctx[ ctx_file ]
+            except LookupError:
+                # Continue looking in case the file was moved.
+                fctx = None
+                continue
+            if fctx:
+                fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gntfc", dir=dir )
+                tmp_filename = fh.name
+                fh.close()
+                fh = open( tmp_filename, 'wb' )
+                fh.write( fctx.data() )
+                fh.close()
+                return tmp_filename
+    return None
+
+
+def get_readable_ctx_date( ctx ):
+    """Convert the date of the changeset (the received ctx) to a human-readable date."""
+    t, tz = ctx.date()
+    date = datetime( *gmtime( float( t ) - tz )[ :6 ] )
+    ctx_date = date.strftime( "%Y-%m-%d" )
+    return ctx_date
+
+
+def get_repo_for_repository( app, repository=None, repo_path=None, create=False ):
+    if repository is not None:
+        return hg.repository( get_configured_ui(), repository.repo_path( app ), create=create )
+    if repo_path is not None:
+        return hg.repository( get_configured_ui(), repo_path, create=create )
+
+
+def get_repository_heads( repo ):
+    """Return current repository heads, which are changesets with no child changesets."""
+    heads = [ repo[ h ] for h in repo.heads( None ) ]
+    return heads
+
+
+def get_reversed_changelog_changesets( repo ):
+    """Return a list of changesets in reverse order from that provided by the repository manifest."""
+    reversed_changelog = []
+    for changeset in repo.changelog:
+        reversed_changelog.insert( 0, changeset )
+    return reversed_changelog
+
+
+def get_revision_label( app, repository, changeset_revision, include_date=True, include_hash=True ):
+    """
+    Return a string consisting of the human read-able changeset rev and the changeset revision string
+    which includes the revision date if the receive include_date is True.
+    """
+    repo = get_repo_for_repository( app, repository=repository, repo_path=None )
+    ctx = get_changectx_for_changeset( repo, changeset_revision )
+    if ctx:
+        return get_revision_label_from_ctx( ctx, include_date=include_date, include_hash=include_hash )
+    else:
+        if include_hash:
+            return "-1:%s" % changeset_revision
+        else:
+            return "-1"
+
+
+def get_rev_label_changeset_revision_from_repository_metadata( app, repository_metadata, repository=None,
+                                                               include_date=True, include_hash=True ):
+    if repository is None:
+        repository = repository_metadata.repository
+    repo = hg.repository( get_configured_ui(), repository.repo_path( app ) )
+    changeset_revision = repository_metadata.changeset_revision
+    ctx = get_changectx_for_changeset( repo, changeset_revision )
+    if ctx:
+        rev = '%04d' % ctx.rev()
+        if include_date:
+            changeset_revision_date = get_readable_ctx_date( ctx )
+            if include_hash:
+                label = "%s:%s (%s)" % ( str( ctx.rev() ), changeset_revision, changeset_revision_date )
+            else:
+                label = "%s (%s)" % ( str( ctx.rev() ), changeset_revision_date )
+        else:
+            if include_hash:
+                label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
+            else:
+                label = "%s" % str( ctx.rev() )
+    else:
+        rev = '-1'
+        if include_hash:
+            label = "-1:%s" % changeset_revision
+        else:
+            label = "-1"
+    return rev, label, changeset_revision
+
+
+def get_revision_label_from_ctx( ctx, include_date=True, include_hash=True ):
+    if include_date:
+        if include_hash:
+            return '%s:%s <i><font color="#666666">(%s)</font></i>' % \
+                ( str( ctx.rev() ), str( ctx ), str( get_readable_ctx_date( ctx ) ) )
+        else:
+            return '%s <i><font color="#666666">(%s)</font></i>' % \
+                ( str( ctx.rev() ), str( get_readable_ctx_date( ctx ) ) )
+    else:
+        if include_hash:
+            return '%s:%s' % ( str( ctx.rev() ), str( ctx ) )
+        else:
+            return '%s' % str( ctx.rev() )
+
+
+def get_rev_label_from_changeset_revision( repo, changeset_revision, include_date=True, include_hash=True ):
+    """
+    Given a changeset revision hash, return two strings, the changeset rev and the changeset revision hash
+    which includes the revision date if the receive include_date is True.
+    """
+    ctx = get_changectx_for_changeset( repo, changeset_revision )
+    if ctx:
+        rev = '%04d' % ctx.rev()
+        label = get_revision_label_from_ctx( ctx, include_date=include_date )
+    else:
+        rev = '-1'
+        label = "-1:%s" % changeset_revision
+    return rev, label
+
+
+def pull_repository( repo, repository_clone_url, ctx_rev ):
+    """Pull changes from a remote repository to a local one."""
+    commands.pull( get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
+
+
+def remove_file( repo_ui, repo, selected_file, force=True ):
+    commands.remove( repo_ui, repo, selected_file, force=force )
+
+
+def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
+    """
+    Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision,
+    but up to and including the included_upper_bounds_changeset_revision.  The value of excluded_lower_bounds_changeset_revision
+    will be the value of INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision.
+    """
+    # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value
+    # of changeset_revision is a downloadable changeset_revision.
+    # excluded_lower_bounds_changeset_revision = \
+    #     metadata_util.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=? )
+    if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH:
+        appending_started = True
+    else:
+        appending_started = False
+    reversed_changelog = []
+    for changeset in repo.changelog:
+        changeset_hash = str( repo.changectx( changeset ) )
+        if appending_started:
+            reversed_changelog.insert( 0, changeset )
+        if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
+            appending_started = True
+        if changeset_hash == included_upper_bounds_changeset_revision:
+            break
+    return reversed_changelog
+
+
+def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
+    """
+    Return a reversed list of changesets in the repository changelog up to and including the
+    included_upper_bounds_changeset_revision.
+    """
+    return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+
+
+def unpack_chunks( hg_unbundle10_obj ):
+    """
+    This method provides a generator of parsed chunks of a "group" in a mercurial unbundle10 object which
+    is created when a changeset that is pushed to a Tool Shed repository using hg push from the command line
+    is read using readbundle.
+    """
+    while True:
+        length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) )
+        if length <= 4:
+            # We found a "null chunk", which ends the group.
+            break
+        if length < 84:
+            raise Exception( "negative data length" )
+        node, p1, p2, cs = struct.unpack( '20s20s20s20s', readexactly( hg_unbundle10_obj, 80 ) )
+        yield { 'node': node.encode( 'hex' ),
+                'p1': p1.encode( 'hex' ),
+                'p2': p2.encode( 'hex' ),
+                'cs': cs.encode( 'hex' ),
+                'data': [ patch for patch in unpack_patches( hg_unbundle10_obj, length - 84 ) ] }
+
+
+def unpack_groups( hg_unbundle10_obj ):
+    """
+    This method provides a generator of parsed groups from a mercurial unbundle10 object which is
+    created when a changeset that is pushed to a Tool Shed repository using hg push from the command
+    line is read using readbundle.
+    """
+    # Process the changelog group.
+    yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ]
+    # Process the manifest group.
+    yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ]
+    while True:
+        length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) )
+        if length <= 4:
+            # We found a "null meta chunk", which ends the changegroup.
+            break
+        filename = readexactly( hg_unbundle10_obj, length - 4 ).encode( 'string_escape' )
+        # Process the file group.
+        yield ( filename, [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] )
+
+
+def unpack_patches( hg_unbundle10_obj, remaining ):
+    """
+    This method provides a generator of patches from the data field in a chunk. As there is no delimiter
+    for this data field, a length argument is required.
+    """
+    while remaining >= 12:
+        start, end, blocklen = struct.unpack( '>lll', readexactly( hg_unbundle10_obj, 12 ) )
+        remaining -= 12
+        if blocklen > remaining:
+            raise Exception( "unexpected end of patch stream" )
+        block = readexactly( hg_unbundle10_obj, blocklen )
+        remaining -= blocklen
+        yield { 'start': start,
+                'end': end,
+                'blocklen': blocklen,
+                'block': block.encode( 'string_escape' ) }
+    if remaining > 0:
+        log.error("Unexpected end of patch stream, %s remaining", remaining)
+        raise Exception( "unexpected end of patch stream" )
+
+
+def update_repository( repo, ctx_rev=None ):
+    """
+    Update the cloned repository to changeset_revision.  It is critical that the installed repository is updated to the desired
+    changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+    """
+    # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+    # The codes used to show the status of files are as follows.
+    # M = modified
+    # A = added
+    # R = removed
+    # C = clean
+    # ! = deleted, but still tracked
+    # ? = not tracked
+    # I = ignored
+    # It would be nice if we could use mercurial's purge extension to remove untracked files.  The problem is that
+    # purging is not supported by the mercurial API.
+    commands.update( get_configured_ui(), repo, rev=ctx_rev )
diff --git a/lib/tool_shed/util/metadata_util.py b/lib/tool_shed/util/metadata_util.py
new file mode 100644
index 0000000..f204058
--- /dev/null
+++ b/lib/tool_shed/util/metadata_util.py
@@ -0,0 +1,369 @@
+import logging
+from operator import itemgetter
+
+from sqlalchemy import and_
+
+import tool_shed.util.repository_util
+from galaxy import util
+from tool_shed.util import common_util
+from tool_shed.util import hg_util
+
+log = logging.getLogger( __name__ )
+
+
+def get_all_dependencies( app, metadata_entry, processed_dependency_links=[] ):
+    encoder = app.security.encode_id
+    value_mapper = { 'repository_id': encoder, 'id': encoder, 'user_id': encoder }
+    metadata = metadata_entry.to_dict( value_mapper=value_mapper, view='element' )
+    db = app.model.context.current
+    returned_dependencies = []
+    required_metadata = get_dependencies_for_metadata_revision( app, metadata )
+    if required_metadata is None:
+        return metadata
+    for dependency_metadata in required_metadata:
+        dependency_dict = dependency_metadata.to_dict( value_mapper=value_mapper, view='element' )
+        dependency_link = ( metadata[ 'id' ], dependency_dict['id'] )
+        if dependency_link in processed_dependency_links:
+            continue
+        processed_dependency_links.append( dependency_link )
+        repository = db.query( app.model.Repository ).get( app.security.decode_id( dependency_dict[ 'repository_id' ] ) )
+        dependency_dict[ 'repository' ] = repository.to_dict( value_mapper=value_mapper )
+        if dependency_metadata.includes_tools:
+            dependency_dict[ 'tools' ] = dependency_metadata.metadata[ 'tools' ]
+        dependency_dict[ 'repository_dependencies' ] = []
+        if dependency_dict['includes_tool_dependencies']:
+            dependency_dict['tool_dependencies'] = repository.get_tool_dependencies( app, dependency_dict['changeset_revision'] )
+        if dependency_dict['has_repository_dependencies']:
+            dependency_dict['repository_dependencies'] = get_all_dependencies( app, dependency_metadata, processed_dependency_links )
+        else:
+            dependency_dict['repository_dependencies'] = []
+        returned_dependencies.append( dependency_dict )
+    return returned_dependencies
+
+
+def get_current_repository_metadata_for_changeset_revision( app, repository, changeset_revision ):
+    encoded_repository_id = app.security.encode_id( repository.id )
+    repository_metadata = get_repository_metadata_by_changeset_revision( app,
+                                                                         encoded_repository_id,
+                                                                         changeset_revision )
+    if repository_metadata:
+        return repository_metadata
+    # The installable changeset_revision may have been changed because it was "moved ahead"
+    # in the repository changelog.
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    updated_changeset_revision = get_next_downloadable_changeset_revision( repository,
+                                                                           repo,
+                                                                           after_changeset_revision=changeset_revision )
+    if updated_changeset_revision and updated_changeset_revision != changeset_revision:
+        repository_metadata = get_repository_metadata_by_changeset_revision( app,
+                                                                             encoded_repository_id,
+                                                                             updated_changeset_revision )
+        if repository_metadata:
+            return repository_metadata
+    return None
+
+
+def get_dependencies_for_metadata_revision( app, metadata ):
+    dependencies = []
+    for shed, name, owner, changeset, prior, _ in metadata[ 'repository_dependencies' ]:
+        required_repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( app, name, owner )
+        repo = hg_util.get_repo_for_repository( app, repository=required_repository, repo_path=None, create=False )
+        updated_changeset = get_next_downloadable_changeset_revision( required_repository, repo, changeset )
+        if updated_changeset is None:
+            continue
+        metadata_entry = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( required_repository.id ), updated_changeset )
+        dependencies.append( metadata_entry )
+    return dependencies
+
+
+def get_latest_changeset_revision( app, repository, repo ):
+    repository_tip = repository.tip( app )
+    repository_metadata = get_repository_metadata_by_changeset_revision( app,
+                                                                         app.security.encode_id( repository.id ),
+                                                                         repository_tip )
+    if repository_metadata and repository_metadata.downloadable:
+        return repository_tip
+    changeset_revisions = [ revision[ 1 ] for revision in get_metadata_revisions( repository, repo ) ]
+    if changeset_revisions:
+        return changeset_revisions[ -1 ]
+    return hg_util.INITIAL_CHANGELOG_HASH
+
+
+def get_latest_downloadable_changeset_revision( app, repository, repo=None ):
+    if repo is None:
+        repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    repository_tip = repository.tip( app )
+    repository_metadata = get_repository_metadata_by_changeset_revision( app, app.security.encode_id( repository.id ), repository_tip )
+    if repository_metadata and repository_metadata.downloadable:
+        return repository_tip
+    changeset_revisions = [ revision[ 1 ] for revision in get_metadata_revisions( repository, repo ) ]
+    if changeset_revisions:
+        return changeset_revisions[ -1 ]
+    return hg_util.INITIAL_CHANGELOG_HASH
+
+
+def get_latest_repository_metadata( app, decoded_repository_id, downloadable=False ):
+    """Get last metadata defined for a specified repository from the database."""
+    sa_session = app.model.context.current
+    repository = sa_session.query( app.model.Repository ).get( decoded_repository_id )
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    if downloadable:
+        changeset_revision = get_latest_downloadable_changeset_revision( app, repository, repo )
+    else:
+        changeset_revision = get_latest_changeset_revision( app, repository, repo )
+    return get_repository_metadata_by_changeset_revision( app,
+                                                          app.security.encode_id( repository.id ),
+                                                          changeset_revision )
+
+
+def get_metadata_by_id( app, metadata_id ):
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryMetadata ).filter( app.model.RepositoryMetadata.table.c.id == metadata_id ).one()
+
+
+def get_metadata_changeset_revisions( repository, repo ):
+    """
+    Return an unordered list of changeset_revisions and changeset numbers that are defined as installable.
+    """
+    changeset_tups = []
+    for repository_metadata in repository.downloadable_revisions:
+        ctx = hg_util.get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+        if ctx:
+            rev = ctx.rev()
+        else:
+            rev = -1
+        changeset_tups.append( ( rev, repository_metadata.changeset_revision ) )
+    return sorted( changeset_tups )
+
+
+def get_metadata_revisions( repository, repo, sort_revisions=True, reverse=False, downloadable=True ):
+    """
+    Return a list of changesets for the provided repository.
+    """
+    if downloadable:
+        metadata_revisions = repository.downloadable_revisions
+    else:
+        metadata_revisions = repository.metadata_revisions
+    changeset_tups = []
+    for repository_metadata in metadata_revisions:
+        ctx = hg_util.get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+        if ctx:
+            rev = '%04d' % ctx.rev()
+        else:
+            rev = -1
+        changeset_tups.append( ( rev, repository_metadata.changeset_revision ) )
+    if sort_revisions:
+        changeset_tups.sort( key=itemgetter( 0 ), reverse=reverse )
+    return changeset_tups
+
+
+def get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision ):
+    """
+    Return the installable changeset_revision in the repository changelog after the changeset to which
+    after_changeset_revision refers.  If there isn't one, return None. If there is only one installable
+    changeset, and that matches the requested revision, return it.
+    """
+    changeset_revisions = [ revision[ 1 ] for revision in get_metadata_revisions( repository, repo ) ]
+    if len( changeset_revisions ) == 1:
+        changeset_revision = changeset_revisions[ 0 ]
+        if changeset_revision == after_changeset_revision:
+            return after_changeset_revision
+    found_after_changeset_revision = False
+    for changeset in repo.changelog:
+        changeset_revision = str( repo.changectx( changeset ) )
+        if found_after_changeset_revision:
+            if changeset_revision in changeset_revisions:
+                return changeset_revision
+        elif not found_after_changeset_revision and changeset_revision == after_changeset_revision:
+            # We've found the changeset in the changelog for which we need to get the next downloadable changeset.
+            found_after_changeset_revision = True
+    return None
+
+
+def get_previous_metadata_changeset_revision( repository, repo, before_changeset_revision, downloadable=True ):
+    """
+    Return the changeset_revision in the repository changelog that has associated metadata prior to
+    the changeset to which before_changeset_revision refers.  If there isn't one, return the hash value
+    of an empty repository changelog, hg_util.INITIAL_CHANGELOG_HASH.
+    """
+    changeset_revisions = [ revision[ 1 ] for revision in get_metadata_revisions( repository, repo ) ]
+    if len( changeset_revisions ) == 1:
+        changeset_revision = changeset_revisions[ 0 ]
+        if changeset_revision == before_changeset_revision:
+            return hg_util.INITIAL_CHANGELOG_HASH
+        return changeset_revision
+    previous_changeset_revision = None
+    for changeset_revision in changeset_revisions:
+        if changeset_revision == before_changeset_revision:
+            if previous_changeset_revision:
+                return previous_changeset_revision
+            else:
+                # Return the hash value of an empty repository changelog - note that this will not be a valid changeset revision.
+                return hg_util.INITIAL_CHANGELOG_HASH
+        else:
+            previous_changeset_revision = changeset_revision
+
+
+def get_repository_dependencies( app, metadata_id ):
+    '''Return a list of RepositoryDependency objects that specify the provided repository metadata record as the parent.'''
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryDependency ).filter( app.model.RepositoryDependency.table.c.parent_metadata_id == metadata_id ).all()
+
+
+def get_repository_dependency_tups_from_repository_metadata( app, repository_metadata, deprecated_only=False ):
+    """
+    Return a list of of tuples defining repository objects required by the received repository.  The returned
+    list defines the entire repository dependency tree.  This method is called only from the Tool Shed.
+    """
+    dependency_tups = []
+    if repository_metadata is not None:
+        metadata = repository_metadata.metadata
+        if metadata:
+            repository_dependencies_dict = metadata.get( 'repository_dependencies', None )
+            if repository_dependencies_dict is not None:
+                repository_dependency_tups = repository_dependencies_dict.get( 'repository_dependencies', None )
+                if repository_dependency_tups is not None:
+                    # The value of repository_dependency_tups is a list of repository dependency tuples like this:
+                    # ['http://localhost:9009', 'package_samtools_0_1_18', 'devteam', 'ef37fc635cb9', 'False', 'False']
+                    for repository_dependency_tup in repository_dependency_tups:
+                        toolshed, name, owner, changeset_revision, pir, oicct = \
+                            common_util.parse_repository_dependency_tuple( repository_dependency_tup )
+                        repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( app, name, owner )
+                        if repository:
+                            if deprecated_only:
+                                if repository.deprecated:
+                                    dependency_tups.append( repository_dependency_tup )
+                            else:
+                                dependency_tups.append( repository_dependency_tup )
+                        else:
+                            log.debug( "Cannot locate repository %s owned by %s for inclusion in repository dependency tups." %
+                                ( name, owner ) )
+    return dependency_tups
+
+
+def get_repository_metadata_by_changeset_revision( app, id, changeset_revision ):
+    """Get metadata for a specified repository change set from the database."""
+    # Make sure there are no duplicate records, and return the single unique record for the changeset_revision.
+    # Duplicate records were somehow created in the past.  The cause of this issue has been resolved, but we'll
+    # leave this method as is for a while longer to ensure all duplicate records are removed.
+    sa_session = app.model.context.current
+    all_metadata_records = sa_session.query( app.model.RepositoryMetadata ) \
+                                     .filter( and_( app.model.RepositoryMetadata.table.c.repository_id == app.security.decode_id( id ),
+                                                    app.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+                                     .order_by( app.model.RepositoryMetadata.table.c.update_time.desc() ) \
+                                     .all()
+    if len( all_metadata_records ) > 1:
+        # Delete all records older than the last one updated.
+        for repository_metadata in all_metadata_records[ 1: ]:
+            sa_session.delete( repository_metadata )
+            sa_session.flush()
+        return all_metadata_records[ 0 ]
+    elif all_metadata_records:
+        return all_metadata_records[ 0 ]
+    return None
+
+
+def get_repository_metadata_by_id( app, id ):
+    """Get repository metadata from the database"""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryMetadata ).get( app.security.decode_id( id ) )
+
+
+def get_repository_metadata_by_repository_id_changeset_revision( app, id, changeset_revision, metadata_only=False ):
+    """Get a specified metadata record for a specified repository in the tool shed."""
+    if metadata_only:
+        repository_metadata = get_repository_metadata_by_changeset_revision( app, id, changeset_revision )
+        if repository_metadata and repository_metadata.metadata:
+            return repository_metadata.metadata
+        return None
+    return get_repository_metadata_by_changeset_revision( app, id, changeset_revision )
+
+
+def get_repository_metadata_revisions_for_review( repository, reviewed=True ):
+    repository_metadata_revisions = []
+    metadata_changeset_revision_hashes = []
+    if reviewed:
+        for metadata_revision in repository.metadata_revisions:
+            metadata_changeset_revision_hashes.append( metadata_revision.changeset_revision )
+        for review in repository.reviews:
+            if review.changeset_revision in metadata_changeset_revision_hashes:
+                rmcr_hashes = [ rmr.changeset_revision for rmr in repository_metadata_revisions ]
+                if review.changeset_revision not in rmcr_hashes:
+                    repository_metadata_revisions.append( review.repository_metadata )
+    else:
+        for review in repository.reviews:
+            if review.changeset_revision not in metadata_changeset_revision_hashes:
+                metadata_changeset_revision_hashes.append( review.changeset_revision )
+        for metadata_revision in repository.metadata_revisions:
+            if metadata_revision.changeset_revision not in metadata_changeset_revision_hashes:
+                repository_metadata_revisions.append( metadata_revision )
+    return repository_metadata_revisions
+
+
+def get_updated_changeset_revisions( app, name, owner, changeset_revision ):
+    """
+    Return a string of comma-separated changeset revision hashes for all available updates to the received changeset
+    revision for the repository defined by the received name and owner.
+    """
+    repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( app, name, owner )
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    # Get the upper bound changeset revision.
+    upper_bound_changeset_revision = get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+    # Build the list of changeset revision hashes defining each available update up to, but excluding
+    # upper_bound_changeset_revision.
+    changeset_hashes = []
+    for changeset in hg_util.reversed_lower_upper_bounded_changelog( repo, changeset_revision, upper_bound_changeset_revision ):
+        # Make sure to exclude upper_bound_changeset_revision.
+        if changeset != upper_bound_changeset_revision:
+            changeset_hashes.append( str( repo.changectx( changeset ) ) )
+    if changeset_hashes:
+        changeset_hashes_str = ','.join( changeset_hashes )
+        return changeset_hashes_str
+    return ''
+
+
+def get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision ):
+    """
+    Get all appropriate newer changeset revisions for the repository defined by
+    the received tool_shed_url / name / owner combination.
+    """
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+    if tool_shed_url is None or name is None or owner is None or changeset_revision is None:
+        message = "Unable to get updated changeset revisions from the Tool Shed because one or more of the following "
+        message += "required parameters is None: tool_shed_url: %s, name: %s, owner: %s, changeset_revision: %s " % \
+            ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ) )
+        raise Exception( message )
+    params = dict( name=name, owner=owner, changeset_revision=changeset_revision )
+    pathspec = [ 'repository', 'updated_changeset_revisions' ]
+    text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+    return text
+
+
+def is_downloadable( metadata_dict ):
+    # NOTE: although repository README files are considered Galaxy utilities, they have no
+    # effect on determining if a revision is installable.  See the comments in the
+    # compare_readme_files() method.
+    if 'datatypes' in metadata_dict:
+        # We have proprietary datatypes.
+        return True
+    if 'repository_dependencies' in metadata_dict:
+        # We have repository_dependencies.
+        return True
+    if 'tools' in metadata_dict:
+        # We have tools.
+        return True
+    if 'tool_dependencies' in metadata_dict:
+        # We have tool dependencies, and perhaps only tool dependencies!
+        return True
+    if 'workflows' in metadata_dict:
+        # We have exported workflows.
+        return True
+    return False
+
+
+def is_malicious( app, id, changeset_revision, **kwd ):
+    """Check the malicious flag in repository metadata for a specified change set revision."""
+    repository_metadata = get_repository_metadata_by_changeset_revision( app, id, changeset_revision )
+    if repository_metadata:
+        return repository_metadata.malicious
+    return False
diff --git a/lib/tool_shed/util/readme_util.py b/lib/tool_shed/util/readme_util.py
new file mode 100644
index 0000000..59333f2
--- /dev/null
+++ b/lib/tool_shed/util/readme_util.py
@@ -0,0 +1,117 @@
+import json
+import logging
+import os
+import threading
+
+from mako.template import Template
+
+import tool_shed.util.shed_util_common as suc
+from galaxy import web
+from galaxy.util import rst_to_html, unicodify, url_get
+from tool_shed.util import basic_util
+from tool_shed.util import common_util
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_util
+
+log = logging.getLogger( __name__ )
+
+
+def build_readme_files_dict( app, repository, changeset_revision, metadata, tool_path=None ):
+    """
+    Return a dictionary of valid readme file name <-> readme file content pairs for all readme files defined in the received metadata.  Since the
+    received changeset_revision (which is associated with the received metadata) may not be the latest installable changeset revision, the README
+    file contents may not be available on disk.  This method is used by both Galaxy and the Tool Shed.
+    """
+    if app.name == 'galaxy':
+        can_use_disk_files = True
+    else:
+        repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+        latest_downloadable_changeset_revision = metadata_util.get_latest_downloadable_changeset_revision( app, repository, repo )
+        can_use_disk_files = changeset_revision == latest_downloadable_changeset_revision
+    readme_files_dict = {}
+    if metadata:
+        if 'readme_files' in metadata:
+            for relative_path_to_readme_file in metadata[ 'readme_files' ]:
+                readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
+                if can_use_disk_files:
+                    if tool_path:
+                        full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) )
+                    else:
+                        full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
+                    text = None
+                    try:
+                        f = open( full_path_to_readme_file, 'r' )
+                        text = unicodify( f.read() )
+                        f.close()
+                    except Exception as e:
+                        log.exception( "Error reading README file '%s' from disk: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
+                        text = None
+                    if text:
+                        text_of_reasonable_length = basic_util.size_string( text )
+                        if text_of_reasonable_length.find( '.. image:: ' ) >= 0:
+                            # Handle image display for README files that are contained in repositories in the tool shed or installed into Galaxy.
+                            lock = threading.Lock()
+                            lock.acquire( True )
+                            try:
+                                text_of_reasonable_length = suc.set_image_paths( app,
+                                                                                 app.security.encode_id( repository.id ),
+                                                                                 text_of_reasonable_length )
+                            except Exception as e:
+                                log.exception( "Exception in build_readme_files_dict, so images may not be properly displayed:\n%s" % str( e ) )
+                            finally:
+                                lock.release()
+                        if readme_file_name.endswith( '.rst' ):
+                            text_of_reasonable_length = Template( rst_to_html( text_of_reasonable_length ),
+                                                                  input_encoding='utf-8',
+                                                                  output_encoding='utf-8',
+                                                                  default_filters=[ 'decode.utf8' ],
+                                                                  encoding_errors='replace' )
+                            text_of_reasonable_length = text_of_reasonable_length.render( static_path=web.url_for( '/static' ),
+                                                                                          host_url=web.url_for( '/', qualified=True ) )
+                            text_of_reasonable_length = unicodify( text_of_reasonable_length )
+                        else:
+                            text_of_reasonable_length = basic_util.to_html_string( text_of_reasonable_length )
+                        readme_files_dict[ readme_file_name ] = text_of_reasonable_length
+                else:
+                    # We must be in the tool shed and have an old changeset_revision, so we need to retrieve the file contents from the repository manifest.
+                    ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+                    if ctx:
+                        fctx = hg_util.get_file_context_from_ctx( ctx, readme_file_name )
+                        if fctx and fctx not in [ 'DELETED' ]:
+                            try:
+                                text = unicodify( fctx.data() )
+                                readme_files_dict[ readme_file_name ] = basic_util.size_string( text )
+                            except Exception as e:
+                                log.exception( "Error reading README file '%s' from repository manifest: %s" %
+                                               ( str( relative_path_to_readme_file ), str( e ) ) )
+    return readme_files_dict
+
+
+def get_readme_files_dict_for_display( app, tool_shed_url, repo_info_dict ):
+    """
+    Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section
+    selection page.
+    """
+    name = next(iter(repo_info_dict))
+    repo_info_tuple = repo_info_dict[ name ]
+    description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
+        repository_util.get_repo_info_tuple_contents( repo_info_tuple )
+    # Handle changing HTTP protocols over time.
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+    params = dict( name=name, owner=repository_owner, changeset_revision=changeset_revision )
+    pathspec = [ 'repository', 'get_readme_files' ]
+    raw_text = url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+    readme_files_dict = json.loads( raw_text )
+    return readme_files_dict
+
+
+def get_readme_file_names( repository_name ):
+    """Return a list of file names that will be categorized as README files for the received repository_name."""
+    readme_files = [ 'readme', 'read_me', 'install' ]
+    valid_filenames = ['%s.txt' % f for f in readme_files]
+    valid_filenames.extend( ['%s.rst' % f for f in readme_files] )
+    valid_filenames.extend( readme_files )
+    valid_filenames.append( '%s.txt' % repository_name )
+    valid_filenames.append( '%s.rst' % repository_name )
+    return valid_filenames
diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py
new file mode 100644
index 0000000..15e9f59
--- /dev/null
+++ b/lib/tool_shed/util/repository_content_util.py
@@ -0,0 +1,64 @@
+import os
+import shutil
+
+import tool_shed.repository_types.util as rt_util
+from tool_shed.util import commit_util, hg_util, xml_util
+
+
+def upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar,
+                commit_message, new_repo_alert ):
+    # Upload a tar archive of files.
+    repo_dir = repository.repo_path( trans.app )
+    hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
+    undesirable_dirs_removed = 0
+    undesirable_files_removed = 0
+    check_results = commit_util.check_archive( repository, tar )
+    if check_results.invalid:
+        tar.close()
+        uploaded_file.close()
+        message = '%s Invalid paths were: %s' % (
+            ' '.join( check_results.errors ), ', '.join( check_results.invalid ) )
+        return False, message, [], '', undesirable_dirs_removed, undesirable_files_removed
+    else:
+        if upload_point is not None:
+            full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
+        else:
+            full_path = os.path.abspath( repo_dir )
+        undesirable_files_removed = len( check_results.undesirable_files )
+        undesirable_dirs_removed = len( check_results.undesirable_dirs )
+        filenames_in_archive = [ ti.name for ti in check_results.valid ]
+        # Extract the uploaded tar to the load_point within the repository hierarchy.
+        tar.extractall( path=full_path, members=check_results.valid )
+        tar.close()
+        uploaded_file.close()
+        for filename in filenames_in_archive:
+            uploaded_file_name = os.path.join( full_path, filename )
+            if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
+                # Inspect the contents of the file to see if toolshed or changeset_revision attributes
+                # are missing and if so, set them appropriately.
+                altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name )
+                if error_message:
+                    return False, error_message, [], '', [], []
+                elif altered:
+                    tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                    shutil.move( tmp_filename, uploaded_file_name )
+            elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
+                # Inspect the contents of the file to see if toolshed or changeset_revision
+                # attributes are missing and if so, set them appropriately.
+                altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name )
+                if error_message:
+                    return False, error_message, [], '', [], []
+                if altered:
+                    tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
+                    shutil.move( tmp_filename, uploaded_file_name )
+        return commit_util.handle_directory_changes( trans.app,
+                                                     trans.request.host,
+                                                     trans.user.username,
+                                                     repository,
+                                                     full_path,
+                                                     filenames_in_archive,
+                                                     remove_repo_files_not_in_tar,
+                                                     new_repo_alert,
+                                                     commit_message,
+                                                     undesirable_dirs_removed,
+                                                     undesirable_files_removed )
diff --git a/lib/tool_shed/util/repository_util.py b/lib/tool_shed/util/repository_util.py
new file mode 100644
index 0000000..b25b001
--- /dev/null
+++ b/lib/tool_shed/util/repository_util.py
@@ -0,0 +1,1043 @@
+import logging
+import os
+import re
+import shutil
+
+from six.moves import configparser
+from six.moves.urllib.error import HTTPError
+from sqlalchemy import and_, false, or_
+
+import tool_shed.dependencies.repository
+import tool_shed.util.metadata_util as metadata_util
+from galaxy import util
+from galaxy import web
+from galaxy.web.form_builder import build_select_field
+from tool_shed.util import basic_util, common_util, encoding_util, hg_util
+from tool_shed.util.web_util import escape
+
+log = logging.getLogger( __name__ )
+
+VALID_REPOSITORYNAME_RE = re.compile( "^[a-z0-9\_]+$" )
+
+
+def build_allow_push_select_field( trans, current_push_list, selected_value='none' ):
+    options = []
+    for user in trans.sa_session.query( trans.model.User ):
+        if user.username not in current_push_list:
+            options.append( user )
+    return build_select_field( trans,
+                               objs=options,
+                               label_attr='username',
+                               select_field_name='allow_push',
+                               selected_value=selected_value,
+                               refresh_on_change=False,
+                               multiple=True )
+
+
+def change_repository_name_in_hgrc_file( hgrc_file, new_name ):
+    config = configparser.ConfigParser()
+    config.read( hgrc_file )
+    config.read( hgrc_file )
+    config.set( 'web', 'name', new_name )
+    new_file = open( hgrc_file, 'wb' )
+    config.write( new_file )
+    new_file.close()
+
+
+def check_for_updates( app, model, repository_id=None ):
+    message = ''
+    status = 'ok'
+    if repository_id is None:
+        success_count = 0
+        repository_names_not_updated = []
+        updated_count = 0
+        for repository in model.context.query( model.ToolShedRepository ) \
+                                       .filter( model.ToolShedRepository.table.c.deleted == false() ):
+            ok, updated = \
+                check_or_update_tool_shed_status_for_installed_repository( app, repository )
+            if ok:
+                success_count += 1
+            else:
+                repository_names_not_updated.append( '<b>%s</b>' % escape( str( repository.name ) ) )
+            if updated:
+                updated_count += 1
+        message = "Checked the status in the tool shed for %d repositories.  " % success_count
+        message += "Updated the tool shed status for %d repositories.  " % updated_count
+        if repository_names_not_updated:
+            message += "Unable to retrieve status from the tool shed for the following repositories:\n"
+            message += ", ".join( repository_names_not_updated )
+    else:
+        repository = get_tool_shed_repository_by_id( app, repository_id )
+        ok, updated = \
+            check_or_update_tool_shed_status_for_installed_repository( app, repository )
+        if ok:
+            if updated:
+                message = "The tool shed status for repository <b>%s</b> has been updated." % escape( str( repository.name ) )
+            else:
+                message = "The status has not changed in the tool shed for repository <b>%s</b>." % escape( str( repository.name ) )
+        else:
+            message = "Unable to retrieve status from the tool shed for repository <b>%s</b>." % escape( str( repository.name ) )
+            status = 'error'
+    return message, status
+
+
+def check_or_update_tool_shed_status_for_installed_repository( app, repository ):
+    updated = False
+    tool_shed_status_dict = get_tool_shed_status_for_installed_repository( app, repository )
+    if tool_shed_status_dict:
+        ok = True
+        if tool_shed_status_dict != repository.tool_shed_status:
+            repository.tool_shed_status = tool_shed_status_dict
+            app.install_model.context.add( repository )
+            app.install_model.context.flush()
+            updated = True
+    else:
+        ok = False
+    return ok, updated
+
+
+def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url,
+                                           metadata_dict, status, current_changeset_revision=None, owner='', dist_to_shed=False ):
+    """
+    Update a tool shed repository record in the Galaxy database with the new information received.
+    If a record defined by the received tool shed, repository name and owner does not exist, create
+    a new record with the received information.
+    """
+    # The received value for dist_to_shed will be True if the ToolMigrationManager is installing a repository
+    # that contains tools or datatypes that used to be in the Galaxy distribution, but have been moved
+    # to the main Galaxy tool shed.
+    if current_changeset_revision is None:
+        # The current_changeset_revision is not passed if a repository is being installed for the first
+        # time.  If a previously installed repository was later uninstalled, this value should be received
+        # as the value of that change set to which the repository had been updated just prior to it being
+        # uninstalled.
+        current_changeset_revision = installed_changeset_revision
+    context = app.install_model.context
+    tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+    if not owner:
+        owner = get_repository_owner_from_clone_url( repository_clone_url )
+    includes_datatypes = 'datatypes' in metadata_dict
+    if status in [ app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+        deleted = True
+        uninstalled = False
+    elif status in [ app.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+        deleted = True
+        uninstalled = True
+    else:
+        deleted = False
+        uninstalled = False
+    tool_shed_repository = \
+        get_installed_repository( app, tool_shed=tool_shed, name=name, owner=owner, installed_changeset_revision=installed_changeset_revision )
+    if tool_shed_repository:
+        log.debug( "Updating an existing row for repository '%s' in the tool_shed_repository table, status set to '%s'." %
+                   ( str( name ), str( status ) ) )
+        tool_shed_repository.description = description
+        tool_shed_repository.changeset_revision = current_changeset_revision
+        tool_shed_repository.ctx_rev = ctx_rev
+        tool_shed_repository.metadata = metadata_dict
+        tool_shed_repository.includes_datatypes = includes_datatypes
+        tool_shed_repository.deleted = deleted
+        tool_shed_repository.uninstalled = uninstalled
+        tool_shed_repository.status = status
+    else:
+        log.debug( "Adding new row for repository '%s' in the tool_shed_repository table, status set to '%s'." %
+                   ( str( name ), str( status ) ) )
+        tool_shed_repository = \
+            app.install_model.ToolShedRepository( tool_shed=tool_shed,
+                                                  name=name,
+                                                  description=description,
+                                                  owner=owner,
+                                                  installed_changeset_revision=installed_changeset_revision,
+                                                  changeset_revision=current_changeset_revision,
+                                                  ctx_rev=ctx_rev,
+                                                  metadata=metadata_dict,
+                                                  includes_datatypes=includes_datatypes,
+                                                  dist_to_shed=dist_to_shed,
+                                                  deleted=deleted,
+                                                  uninstalled=uninstalled,
+                                                  status=status )
+    context.add( tool_shed_repository )
+    context.flush()
+    return tool_shed_repository
+
+
+def create_repo_info_dict( app, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None,
+                           repository=None, repository_metadata=None, tool_dependencies=None, repository_dependencies=None ):
+    """
+    Return a dictionary that includes all of the information needed to install a repository into a local
+    Galaxy instance.  The dictionary will also contain the recursive list of repository dependencies defined
+    for the repository, as well as the defined tool dependencies.
+
+    This method is called from Galaxy under four scenarios:
+    1. During the tool shed repository installation process via the tool shed's get_repository_information()
+    method.  In this case both the received repository and repository_metadata will be objects, but
+    tool_dependencies and repository_dependencies will be None.
+    2. When getting updates for an installed repository where the updates include newly defined repository
+    dependency definitions.  This scenario is similar to 1. above. The tool shed's get_repository_information()
+    method is the caller, and both the received repository and repository_metadata will be objects, but
+    tool_dependencies and repository_dependencies will be None.
+    3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
+    updates available.  In this case, both repository and repository_metadata will be None, but tool_dependencies
+    and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes
+    definitions for them.
+    4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
+    available.  In this case, this method is reached via the tool shed's get_updated_repository_information()
+    method, and both repository and repository_metadata will be objects but tool_dependencies and
+    repository_dependencies will be None.
+    """
+    repo_info_dict = {}
+    repository = get_repository_by_name_and_owner( app, repository_name, repository_owner )
+    if app.name == 'tool_shed':
+        # We're in the tool shed.
+        repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( app,
+                                                                                 app.security.encode_id( repository.id ),
+                                                                                 changeset_revision )
+        if repository_metadata:
+            metadata = repository_metadata.metadata
+            if metadata:
+                tool_shed_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+                rb = tool_shed.dependencies.repository.relation_builder.RelationBuilder( app, repository, repository_metadata, tool_shed_url )
+                # Get a dictionary of all repositories upon which the contents of the received repository depends.
+                repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
+                tool_dependencies = metadata.get( 'tool_dependencies', {} )
+    if tool_dependencies:
+        new_tool_dependencies = {}
+        for dependency_key, requirements_dict in tool_dependencies.items():
+            if dependency_key in [ 'set_environment' ]:
+                new_set_environment_dict_list = []
+                for set_environment_dict in requirements_dict:
+                    set_environment_dict[ 'repository_name' ] = repository_name
+                    set_environment_dict[ 'repository_owner' ] = repository_owner
+                    set_environment_dict[ 'changeset_revision' ] = changeset_revision
+                    new_set_environment_dict_list.append( set_environment_dict )
+                new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list
+            else:
+                requirements_dict[ 'repository_name' ] = repository_name
+                requirements_dict[ 'repository_owner' ] = repository_owner
+                requirements_dict[ 'changeset_revision' ] = changeset_revision
+                new_tool_dependencies[ dependency_key ] = requirements_dict
+        tool_dependencies = new_tool_dependencies
+    # Cast unicode to string, with the exception of description, since it is free text and can contain special characters.
+    repo_info_dict[ str( repository.name ) ] = ( repository.description,
+                                                 str( repository_clone_url ),
+                                                 str( changeset_revision ),
+                                                 str( ctx_rev ),
+                                                 str( repository_owner ),
+                                                 repository_dependencies,
+                                                 tool_dependencies )
+    return repo_info_dict
+
+
+def create_repository_admin_role( app, repository ):
+    """
+    Create a new role with name-spaced name based on the repository name and its owner's public user
+    name.  This will ensure that the tole name is unique.
+    """
+    sa_session = app.model.context.current
+    name = get_repository_admin_role_name( str( repository.name ), str( repository.user.username ) )
+    description = 'A user or group member with this role can administer this repository.'
+    role = app.model.Role( name=name, description=description, type=app.model.Role.types.SYSTEM )
+    sa_session.add( role )
+    sa_session.flush()
+    # Associate the role with the repository owner.
+    app.model.UserRoleAssociation( repository.user, role )
+    # Associate the role with the repository.
+    rra = app.model.RepositoryRoleAssociation( repository, role )
+    sa_session.add( rra )
+    sa_session.flush()
+    return role
+
+
+def create_repository( app, name, type, description, long_description, user_id, category_ids=[], remote_repository_url=None, homepage_url=None ):
+    """Create a new ToolShed repository"""
+    sa_session = app.model.context.current
+    # Add the repository record to the database.
+    repository = app.model.Repository( name=name,
+                                       type=type,
+                                       remote_repository_url=remote_repository_url,
+                                       homepage_url=homepage_url,
+                                       description=description,
+                                       long_description=long_description,
+                                       user_id=user_id )
+    # Flush to get the id.
+    sa_session.add( repository )
+    sa_session.flush()
+    # Create an admin role for the repository.
+    create_repository_admin_role( app, repository )
+    # Determine the repository's repo_path on disk.
+    dir = os.path.join( app.config.file_path, *util.directory_hash_id( repository.id ) )
+    # Create directory if it does not exist.
+    if not os.path.exists( dir ):
+        os.makedirs( dir )
+    # Define repo name inside hashed directory.
+    repository_path = os.path.join( dir, "repo_%d" % repository.id )
+    # Create local repository directory.
+    if not os.path.exists( repository_path ):
+        os.makedirs( repository_path )
+    # Create the local repository.
+    hg_util.get_repo_for_repository( app, repository=None, repo_path=repository_path, create=True )
+    # Add an entry in the hgweb.config file for the local repository.
+    lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
+    app.hgweb_config_manager.add_entry( lhs, repository_path )
+    # Create a .hg/hgrc file for the local repository.
+    hg_util.create_hgrc_file( app, repository )
+    flush_needed = False
+    if category_ids:
+        # Create category associations
+        for category_id in category_ids:
+            category = sa_session.query( app.model.Category ) \
+                                 .get( app.security.decode_id( category_id ) )
+            rca = app.model.RepositoryCategoryAssociation( repository, category )
+            sa_session.add( rca )
+            flush_needed = True
+    if flush_needed:
+        sa_session.flush()
+    # Update the repository registry.
+    app.repository_registry.add_entry( repository )
+    message = "Repository <b>%s</b> has been created." % escape( str( repository.name ) )
+    return repository, message
+
+
+def extract_components_from_tuple( repository_components_tuple ):
+    '''Extract the repository components from the provided tuple in a backward-compatible manner.'''
+    toolshed = repository_components_tuple[ 0 ]
+    name = repository_components_tuple[ 1 ]
+    owner = repository_components_tuple[ 2 ]
+    changeset_revision = repository_components_tuple[ 3 ]
+    components_list = [ toolshed, name, owner, changeset_revision ]
+    if len( repository_components_tuple ) == 5:
+        toolshed, name, owner, changeset_revision, prior_installation_required = repository_components_tuple
+        components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
+    elif len( repository_components_tuple ) == 6:
+        toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = repository_components_tuple
+        components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
+    return components_list
+
+
+def generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=None ):
+    """Generate the URL for sharing a repository that is in the tool shed."""
+    base_url = web.url_for( '/', qualified=True ).rstrip( '/' )
+    protocol, base = base_url.split( '://' )
+    sharable_url = '%s://%s/view/%s/%s' % ( protocol, base, repository.user.username, repository.name )
+    if changeset_revision:
+        sharable_url += '/%s' % changeset_revision
+    return sharable_url
+
+
+def generate_tool_shed_repository_install_dir( repository_clone_url, changeset_revision ):
+    """
+    Generate a repository installation directory that guarantees repositories with the same
+    name will always be installed in different directories.  The tool path will be of the form:
+    <tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision>
+    """
+    tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
+    # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column
+    items = tmp_url.split( '/repos/' )
+    tool_shed_url = items[ 0 ]
+    repo_path = items[ 1 ]
+    tool_shed_url = common_util.remove_port_from_tool_shed_url( tool_shed_url )
+    return '/'.join( [ tool_shed_url, 'repos', repo_path, changeset_revision ] )
+
+
+def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ):
+    """Return the absolute path to a specified disk file contained in a repository."""
+    stripped_file_name = basic_util.strip_path( file_name )
+    file_path = None
+    for root, dirs, files in os.walk( repo_files_dir ):
+        if root.find( '.hg' ) < 0:
+            for name in files:
+                if name == stripped_file_name:
+                    return os.path.abspath( os.path.join( root, name ) )
+    return file_path
+
+
+def get_ids_of_tool_shed_repositories_being_installed( app, as_string=False ):
+    installing_repository_ids = []
+    new_status = app.install_model.ToolShedRepository.installation_status.NEW
+    cloning_status = app.install_model.ToolShedRepository.installation_status.CLONING
+    setting_tool_versions_status = app.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS
+    installing_dependencies_status = app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES
+    loading_datatypes_status = app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+    for tool_shed_repository in \
+        app.install_model.context.query( app.install_model.ToolShedRepository ) \
+                                 .filter( or_( app.install_model.ToolShedRepository.status == new_status,
+                                               app.install_model.ToolShedRepository.status == cloning_status,
+                                               app.install_model.ToolShedRepository.status == setting_tool_versions_status,
+                                               app.install_model.ToolShedRepository.status == installing_dependencies_status,
+                                               app.install_model.ToolShedRepository.status == loading_datatypes_status ) ):
+        installing_repository_ids.append( app.security.encode_id( tool_shed_repository.id ) )
+    if as_string:
+        return ','.join( installing_repository_ids )
+    return installing_repository_ids
+
+
+def get_installed_repository( app, tool_shed, name, owner, changeset_revision=None, installed_changeset_revision=None ):
+    """
+    Return a tool shed repository database record defined by the combination of a toolshed, repository name,
+    repository owner and either current or originally installed changeset_revision.
+    """
+    query = app.install_model.context.query( app.install_model.ToolShedRepository )
+    # We store the port, if one exists, in the database.
+    tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+    clause_list = [ app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
+                    app.install_model.ToolShedRepository.table.c.name == name,
+                    app.install_model.ToolShedRepository.table.c.owner == owner ]
+    if changeset_revision is not None:
+        clause_list.append( app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision )
+    if installed_changeset_revision is not None:
+        clause_list.append( app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision )
+    return query.filter( and_( *clause_list ) ).first()
+
+
+def get_installed_tool_shed_repository( app, id ):
+    """Get a tool shed repository record from the Galaxy database defined by the id."""
+    rval = []
+    if isinstance( id, list ):
+        return_list = True
+    else:
+        id = [ id ]
+        return_list = False
+    for i in id:
+        rval.append( app.install_model.context.query( app.install_model.ToolShedRepository ).get( app.security.decode_id( i ) ) )
+    if return_list:
+        return rval
+    return rval[0]
+
+
+def get_prior_import_or_install_required_dict( app, tsr_ids, repo_info_dicts ):
+    """
+    This method is used in the Tool Shed when exporting a repository and its dependencies,
+    and in Galaxy when a repository and its dependencies are being installed.  Return a
+    dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids,
+    each of which is contained in the received list of tsr_ids and whose associated repository
+    must be imported or installed prior to the repository associated with the tsr_id key.
+    """
+    # Initialize the dictionary.
+    prior_import_or_install_required_dict = {}
+    for tsr_id in tsr_ids:
+        prior_import_or_install_required_dict[ tsr_id ] = []
+    # Inspect the repository dependencies for each repository about to be installed and populate the dictionary.
+    for repo_info_dict in repo_info_dicts:
+        repository, repository_dependencies = get_repository_and_repository_dependencies_from_repo_info_dict( app, repo_info_dict )
+        if repository:
+            encoded_repository_id = app.security.encode_id( repository.id )
+            if encoded_repository_id in tsr_ids:
+                # We've located the database table record for one of the repositories we're about to install, so find out if it has any repository
+                # dependencies that require prior installation.
+                prior_import_or_install_ids = get_repository_ids_requiring_prior_import_or_install( app, tsr_ids, repository_dependencies )
+                prior_import_or_install_required_dict[ encoded_repository_id ] = prior_import_or_install_ids
+    return prior_import_or_install_required_dict
+
+
+def get_repo_info_dict( app, user, repository_id, changeset_revision ):
+    repository = get_repository_in_tool_shed( app, repository_id )
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository )
+    repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( app,
+                                                                                       repository_id,
+                                                                                       changeset_revision )
+    if not repository_metadata:
+        # The received changeset_revision is no longer installable, so get the next changeset_revision
+        # in the repository's changelog.  This generally occurs only with repositories of type
+        # repository_suite_definition or tool_dependency_definition.
+        next_downloadable_changeset_revision = \
+            metadata_util.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+        if next_downloadable_changeset_revision and next_downloadable_changeset_revision != changeset_revision:
+            repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( app,
+                                                                                               repository_id,
+                                                                                               next_downloadable_changeset_revision )
+    if repository_metadata:
+        # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption
+        # is not valid we'll have to enhance the callers to handle repository_metadata values of None in the
+        # returned repo_info_dict.
+        metadata = repository_metadata.metadata
+        if 'tools' in metadata:
+            includes_tools = True
+        else:
+            includes_tools = False
+        includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
+        repository_dependencies_dict = metadata.get( 'repository_dependencies', {} )
+        repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+        has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \
+            get_repository_dependency_types( repository_dependencies )
+        if 'tool_dependencies' in metadata:
+            includes_tool_dependencies = True
+        else:
+            includes_tool_dependencies = False
+    else:
+        # Here's where we may have to handle enhancements to the callers. See above comment.
+        includes_tools = False
+        has_repository_dependencies = False
+        has_repository_dependencies_only_if_compiling_contained_td = False
+        includes_tool_dependencies = False
+        includes_tools_for_display_in_tool_panel = False
+    ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
+    repo_info_dict = create_repo_info_dict( app=app,
+                                            repository_clone_url=repository_clone_url,
+                                            changeset_revision=changeset_revision,
+                                            ctx_rev=str( ctx.rev() ),
+                                            repository_owner=repository.user.username,
+                                            repository_name=repository.name,
+                                            repository=repository,
+                                            repository_metadata=repository_metadata,
+                                            tool_dependencies=None,
+                                            repository_dependencies=None )
+    return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \
+        has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
+
+
+def get_repo_info_tuple_contents( repo_info_tuple ):
+    """Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced."""
+    if len( repo_info_tuple ) == 6:
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+        repository_dependencies = None
+    elif len( repo_info_tuple ) == 7:
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+    return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies
+
+
+def get_repositories_by_category( app, category_id ):
+    sa_session = app.model.context.current
+    resultset = sa_session.query( app.model.Category ).get( category_id )
+    repositories = []
+    default_value_mapper = { 'id': app.security.encode_id, 'user_id': app.security.encode_id }
+    for row in resultset.repositories:
+        repository_dict = row.repository.to_dict( value_mapper=default_value_mapper )
+        repository_dict[ 'metadata' ] = {}
+        for changeset, changehash in row.repository.installable_revisions( app ):
+            encoded_id = app.security.encode_id( row.repository.id )
+            metadata = metadata_util.get_repository_metadata_by_changeset_revision( app, encoded_id, changehash )
+            repository_dict[ 'metadata' ][ '%s:%s' % ( changeset, changehash ) ] = metadata.to_dict( value_mapper=default_value_mapper )
+        repositories.append( repository_dict )
+    return repositories
+
+
+def get_repository_admin_role_name( repository_name, repository_owner ):
+    return '%s_%s_admin' % ( str( repository_name ), str( repository_owner ) )
+
+
+def get_repository_and_repository_dependencies_from_repo_info_dict( app, repo_info_dict ):
+    """Return a tool_shed_repository or repository record defined by the information in the received repo_info_dict."""
+    repository_name = list(repo_info_dict.keys())[ 0 ]
+    repo_info_tuple = repo_info_dict[ repository_name ]
+    description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+        get_repo_info_tuple_contents( repo_info_tuple )
+    if hasattr( app, "install_model" ):
+        # In a tool shed client (Galaxy, or something install repositories like Galaxy)
+        tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+        repository = get_repository_for_dependency_relationship( app, tool_shed, repository_name, repository_owner, changeset_revision )
+    else:
+        # We're in the tool shed.
+        repository = get_repository_by_name_and_owner( app, repository_name, repository_owner )
+    return repository, repository_dependencies
+
+
+def get_repository_by_id( app, id ):
+    """Get a repository from the database via id."""
+    if is_tool_shed_client( app ):
+        return app.install_model.context.query( app.install_model.ToolShedRepository ).get( app.security.decode_id( id ) )
+    else:
+        sa_session = app.model.context.current
+        return sa_session.query( app.model.Repository ).get( app.security.decode_id( id ) )
+
+
+def get_repository_by_name_and_owner( app, name, owner ):
+    """Get a repository from the database via name and owner"""
+    repository_query = get_repository_query( app )
+    if is_tool_shed_client( app ):
+        return repository_query \
+            .filter( and_( app.install_model.ToolShedRepository.table.c.name == name,
+                           app.install_model.ToolShedRepository.table.c.owner == owner ) ) \
+            .first()
+    # We're in the tool shed.
+    user = common_util.get_user_by_username( app, owner )
+    if user:
+        return repository_query \
+            .filter( and_( app.model.Repository.table.c.name == name,
+                           app.model.Repository.table.c.user_id == user.id ) ) \
+            .first()
+    return None
+
+
+def get_repository_by_name( app, name ):
+    """Get a repository from the database via name."""
+    return get_repository_query( app ).filter_by( name=name ).first()
+
+
+def get_repository_dependency_types( repository_dependencies ):
+    """
+    Inspect the received list of repository_dependencies tuples and return boolean values
+    for has_repository_dependencies and has_repository_dependencies_only_if_compiling_contained_td.
+    """
+    # Set has_repository_dependencies, which will be True only if at least one repository_dependency
+    # is defined with the value of
+    # only_if_compiling_contained_td as False.
+    has_repository_dependencies = False
+    for rd_tup in repository_dependencies:
+        tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( rd_tup )
+        if not util.asbool( only_if_compiling_contained_td ):
+            has_repository_dependencies = True
+            break
+    # Set has_repository_dependencies_only_if_compiling_contained_td, which will be True only if at
+    # least one repository_dependency is defined with the value of only_if_compiling_contained_td as True.
+    has_repository_dependencies_only_if_compiling_contained_td = False
+    for rd_tup in repository_dependencies:
+        tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( rd_tup )
+        if util.asbool( only_if_compiling_contained_td ):
+            has_repository_dependencies_only_if_compiling_contained_td = True
+            break
+    return has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td
+
+
+def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
+    """
+    Return an installed tool_shed_repository database record that is defined by either the current changeset
+    revision or the installed_changeset_revision.
+    """
+    # This method is used only in Galaxy, not the Tool Shed.  We store the port (if one exists) in the database.
+    tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+    if tool_shed is None or name is None or owner is None or changeset_revision is None:
+        message = "Unable to retrieve the repository record from the database because one or more of the following "
+        message += "required parameters is None: tool_shed: %s, name: %s, owner: %s, changeset_revision: %s " % \
+            ( str( tool_shed ), str( name ), str( owner ), str( changeset_revision ) )
+        raise Exception( message )
+    repository = get_installed_repository( app=app,
+                                           tool_shed=tool_shed,
+                                           name=name,
+                                           owner=owner,
+                                           installed_changeset_revision=changeset_revision )
+    if not repository:
+        repository = get_installed_repository( app=app,
+                                               tool_shed=tool_shed,
+                                               name=name,
+                                               owner=owner,
+                                               changeset_revision=changeset_revision )
+    if not repository:
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed )
+        repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+        repo_info_tuple = (None, repository_clone_url, changeset_revision, None, owner, None, None)
+        repository, pcr = repository_was_previously_installed( app, tool_shed_url, name, repo_info_tuple )
+    if not repository:
+        # The received changeset_revision is no longer installable, so get the next changeset_revision
+        # in the repository's changelog in the tool shed that is associated with repository_metadata.
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed )
+        params = dict( name=name, owner=owner, changeset_revision=changeset_revision )
+        pathspec = [ 'repository', 'next_installable_changeset_revision' ]
+        text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        if text:
+            repository = get_installed_repository( app=app,
+                                                   tool_shed=tool_shed,
+                                                   name=name,
+                                                   owner=owner,
+                                                   changeset_revision=text )
+    return repository
+
+
+def get_repository_ids_requiring_prior_import_or_install( app, tsr_ids, repository_dependencies ):
+    """
+    This method is used in the Tool Shed when exporting a repository and its dependencies,
+    and in Galaxy when a repository and its dependencies are being installed.  Inspect the
+    received repository_dependencies and determine if the encoded id of each required
+    repository is in the received tsr_ids.  If so, then determine whether that required
+    repository should be imported / installed prior to its dependent repository.  Return a
+    list of encoded repository ids, each of which is contained in the received list of tsr_ids,
+    and whose associated repositories must be imported / installed prior to the dependent
+    repository associated with the received repository_dependencies.
+    """
+    prior_tsr_ids = []
+    if repository_dependencies:
+        for key, rd_tups in repository_dependencies.items():
+            if key in [ 'description', 'root_key' ]:
+                continue
+            for rd_tup in rd_tups:
+                tool_shed, \
+                    name, \
+                    owner, \
+                    changeset_revision, \
+                    prior_installation_required, \
+                    only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( rd_tup )
+                # If only_if_compiling_contained_td is False, then the repository dependency
+                # is not required to be installed prior to the dependent repository even if
+                # prior_installation_required is True.  This is because the only meaningful
+                # content of the repository dependency is its contained tool dependency, which
+                # is required in order to compile the dependent repository's tool dependency.
+                # In the scenario where the repository dependency is not installed prior to the
+                # dependent repository's tool dependency compilation process, the tool dependency
+                # compilation framework will install the repository dependency prior to compilation
+                # of the dependent repository's tool dependency.
+                if not util.asbool( only_if_compiling_contained_td ):
+                    if util.asbool( prior_installation_required ):
+                        if is_tool_shed_client( app ):
+                            # We store the port, if one exists, in the database.
+                            tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
+                            repository = get_repository_for_dependency_relationship( app,
+                                                                                     tool_shed,
+                                                                                     name,
+                                                                                     owner,
+                                                                                     changeset_revision )
+                        else:
+                            repository = get_repository_by_name_and_owner( app, name, owner )
+                        if repository:
+                            encoded_repository_id = app.security.encode_id( repository.id )
+                            if encoded_repository_id in tsr_ids:
+                                prior_tsr_ids.append( encoded_repository_id )
+    return prior_tsr_ids
+
+
+def get_repository_in_tool_shed( app, id ):
+    """Get a repository on the tool shed side from the database via id."""
+    return get_repository_query( app ).get( app.security.decode_id( id ) )
+
+
+def get_repository_owner( cleaned_repository_url ):
+    """Gvien a "cleaned" repository clone URL, return the owner of the repository."""
+    items = cleaned_repository_url.split( '/repos/' )
+    repo_path = items[ 1 ]
+    if repo_path.startswith( '/' ):
+        repo_path = repo_path.replace( '/', '', 1 )
+    return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
+
+
+def get_repository_owner_from_clone_url( repository_clone_url ):
+    """Given a repository clone URL, return the owner of the repository."""
+    tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
+    return get_repository_owner( tmp_url )
+
+
+def get_repository_query( app ):
+    if is_tool_shed_client( app ):
+        query = app.install_model.context.query( app.install_model.ToolShedRepository )
+    else:
+        query = app.model.context.query( app.model.Repository )
+    return query
+
+
+def get_role_by_id( app, role_id ):
+    """Get a Role from the database by id."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.Role ).get( app.security.decode_id( role_id ) )
+
+
+def get_tool_shed_from_clone_url( repository_clone_url ):
+    tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
+    return tmp_url.split( '/repos/' )[ 0 ].rstrip( '/' )
+
+
+def get_tool_shed_repository_by_id( app, repository_id ):
+    """Return a tool shed repository database record defined by the id."""
+    # This method is used only in Galaxy, not the tool shed.
+    return app.install_model.context.query( app.install_model.ToolShedRepository ) \
+                                    .filter( app.install_model.ToolShedRepository.table.c.id == app.security.decode_id( repository_id ) ) \
+                                    .first()
+
+
+def get_tool_shed_repository_status_label( app, tool_shed_repository=None, name=None, owner=None, changeset_revision=None, repository_clone_url=None ):
+    """Return a color-coded label for the status of the received tool-shed_repository installed into Galaxy."""
+    if tool_shed_repository is None:
+        if name is not None and owner is not None and repository_clone_url is not None:
+            tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+            tool_shed_repository = get_installed_repository( app,
+                                                             tool_shed=tool_shed,
+                                                             name=name,
+                                                             owner=owner,
+                                                             installed_changeset_revision=changeset_revision )
+    if tool_shed_repository:
+        status_label = tool_shed_repository.status
+        if tool_shed_repository.status in [ app.install_model.ToolShedRepository.installation_status.CLONING,
+                                            app.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+                                            app.install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+                                            app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                            app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+            bgcolor = app.install_model.ToolShedRepository.states.INSTALLING
+        elif tool_shed_repository.status in [ app.install_model.ToolShedRepository.installation_status.NEW,
+                                              app.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+            bgcolor = app.install_model.ToolShedRepository.states.UNINSTALLED
+        elif tool_shed_repository.status in [ app.install_model.ToolShedRepository.installation_status.ERROR ]:
+            bgcolor = app.install_model.ToolShedRepository.states.ERROR
+        elif tool_shed_repository.status in [ app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+            bgcolor = app.install_model.ToolShedRepository.states.WARNING
+        elif tool_shed_repository.status in [ app.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+            if tool_shed_repository.repository_dependencies_being_installed:
+                bgcolor = app.install_model.ToolShedRepository.states.WARNING
+                status_label = '%s, %s' % ( status_label,
+                                            app.install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES )
+            elif tool_shed_repository.missing_repository_dependencies:
+                bgcolor = app.install_model.ToolShedRepository.states.WARNING
+                status_label = '%s, missing repository dependencies' % status_label
+            elif tool_shed_repository.tool_dependencies_being_installed:
+                bgcolor = app.install_model.ToolShedRepository.states.WARNING
+                status_label = '%s, %s' % ( status_label,
+                                            app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+            elif tool_shed_repository.missing_tool_dependencies:
+                bgcolor = app.install_model.ToolShedRepository.states.WARNING
+                status_label = '%s, missing tool dependencies' % status_label
+            else:
+                bgcolor = app.install_model.ToolShedRepository.states.OK
+        else:
+            bgcolor = app.install_model.ToolShedRepository.states.ERROR
+    else:
+        bgcolor = app.install_model.ToolShedRepository.states.WARNING
+        status_label = 'unknown status'
+    return '<div class="count-box state-color-%s">%s</div>' % ( bgcolor, status_label )
+
+
+def get_tool_shed_status_for_installed_repository( app, repository ):
+    """
+    Send a request to the tool shed to retrieve information about newer installable repository revisions,
+    current revision updates, whether the repository revision is the latest downloadable revision, and
+    whether the repository has been deprecated in the tool shed.  The received repository is a ToolShedRepository
+    object from Galaxy.
+    """
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, str( repository.tool_shed ) )
+    params = dict( name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision )
+    pathspec = [ 'repository', 'status_for_installed_repository' ]
+    try:
+        encoded_tool_shed_status_dict = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+        tool_shed_status_dict = encoding_util.tool_shed_decode( encoded_tool_shed_status_dict )
+        return tool_shed_status_dict
+    except HTTPError as e:
+        # This should handle backward compatility to the Galaxy 12/20/12 release.  We used to only handle updates for an installed revision
+        # using a boolean value.
+        log.debug( "Error attempting to get tool shed status for installed repository %s: %s\nAttempting older 'check_for_updates' method.\n" %
+                   ( str( repository.name ), str( e ) ) )
+        pathspec = [ 'repository', 'check_for_updates' ]
+        params[ 'from_update_manager' ] = True
+        try:
+            # The value of text will be 'true' or 'false', depending upon whether there is an update available for the installed revision.
+            text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+            return dict( revision_update=text )
+        except Exception as e:
+            # The required tool shed may be unavailable, so default the revision_update value to 'false'.
+            return dict( revision_update='false' )
+    except Exception as e:
+        log.exception( "Error attempting to get tool shed status for installed repository %s: %s" % ( str( repository.name ), str( e ) ) )
+        return {}
+
+
+def handle_role_associations( app, role, repository, **kwd ):
+    sa_session = app.model.context.current
+    message = escape( kwd.get( 'message', '' ) )
+    status = kwd.get( 'status', 'done' )
+    repository_owner = repository.user
+    if kwd.get( 'manage_role_associations_button', False ):
+        in_users_list = util.listify( kwd.get( 'in_users', [] ) )
+        in_users = [ sa_session.query( app.model.User ).get( x ) for x in in_users_list ]
+        # Make sure the repository owner is always associated with the repostory's admin role.
+        owner_associated = False
+        for user in in_users:
+            if user.id == repository_owner.id:
+                owner_associated = True
+                break
+        if not owner_associated:
+            in_users.append( repository_owner )
+            message += "The repository owner must always be associated with the repository's administrator role.  "
+            status = 'error'
+        in_groups_list = util.listify( kwd.get( 'in_groups', [] ) )
+        in_groups = [ sa_session.query( app.model.Group ).get( x ) for x in in_groups_list ]
+        in_repositories = [ repository ]
+        app.security_agent.set_entity_role_associations( roles=[ role ],
+                                                         users=in_users,
+                                                         groups=in_groups,
+                                                         repositories=in_repositories )
+        sa_session.refresh( role )
+        message += "Role <b>%s</b> has been associated with %d users, %d groups and %d repositories.  " % \
+            ( escape( str( role.name ) ), len( in_users ), len( in_groups ), len( in_repositories ) )
+    in_users = []
+    out_users = []
+    in_groups = []
+    out_groups = []
+    for user in sa_session.query( app.model.User ) \
+                          .filter( app.model.User.table.c.deleted == false() ) \
+                          .order_by( app.model.User.table.c.email ):
+        if user in [ x.user for x in role.users ]:
+            in_users.append( ( user.id, user.email ) )
+        else:
+            out_users.append( ( user.id, user.email ) )
+    for group in sa_session.query( app.model.Group ) \
+                           .filter( app.model.Group.table.c.deleted == false() ) \
+                           .order_by( app.model.Group.table.c.name ):
+        if group in [ x.group for x in role.groups ]:
+            in_groups.append( ( group.id, group.name ) )
+        else:
+            out_groups.append( ( group.id, group.name ) )
+    associations_dict = dict( in_users=in_users,
+                              out_users=out_users,
+                              in_groups=in_groups,
+                              out_groups=out_groups,
+                              message=message,
+                              status=status )
+    return associations_dict
+
+
+def is_tool_shed_client( app ):
+    """
+    The tool shed and clients to the tool (i.e. Galaxy) require a lot
+    of similar functionality in this file but with small differences. This
+    method should determine if the app performing the action is the tool shed
+    or a client of the tool shed.
+    """
+    return hasattr( app, "install_model" )
+
+
+def repository_was_previously_installed( app, tool_shed_url, repository_name, repo_info_tuple, from_tip=False ):
+    """
+    Find out if a repository is already installed into Galaxy - there are several scenarios where this
+    is necessary.  For example, this method will handle the case where the repository was previously
+    installed using an older changeset_revsion, but later the repository was updated in the tool shed
+    and now we're trying to install the latest changeset revision of the same repository instead of
+    updating the one that was previously installed.  We'll look in the database instead of on disk since
+    the repository may be currently uninstalled.
+    """
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+    description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+        get_repo_info_tuple_contents( repo_info_tuple )
+    tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+    # See if we can locate the repository using the value of changeset_revision.
+    tool_shed_repository = get_installed_repository( app,
+                                                     tool_shed=tool_shed,
+                                                     name=repository_name,
+                                                     owner=repository_owner,
+                                                     installed_changeset_revision=changeset_revision )
+    if tool_shed_repository:
+        return tool_shed_repository, changeset_revision
+    # Get all previous changeset revisions from the tool shed for the repository back to, but excluding,
+    # the previous valid changeset revision to see if it was previously installed using one of them.
+    params = dict( galaxy_url=web.url_for( '/', qualified=True ),
+                   name=repository_name,
+                   owner=repository_owner,
+                   changeset_revision=changeset_revision,
+                   from_tip=str( from_tip ) )
+    pathspec = [ 'repository', 'previous_changeset_revisions' ]
+    text = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+    if text:
+        changeset_revisions = util.listify( text )
+        for previous_changeset_revision in changeset_revisions:
+            tool_shed_repository = get_installed_repository( app,
+                                                             tool_shed=tool_shed,
+                                                             name=repository_name,
+                                                             owner=repository_owner,
+                                                             installed_changeset_revision=previous_changeset_revision )
+            if tool_shed_repository:
+                return tool_shed_repository, previous_changeset_revision
+    return None, None
+
+
+def set_repository_attributes( app, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
+    if remove_from_disk:
+        relative_install_dir = repository.repo_path( app )
+        if relative_install_dir:
+            clone_dir = os.path.abspath( relative_install_dir )
+            try:
+                shutil.rmtree( clone_dir )
+                log.debug( "Removed repository installation directory: %s" % str( clone_dir ) )
+            except Exception as e:
+                log.debug( "Error removing repository installation directory %s: %s" % ( str( clone_dir ), str( e ) ) )
+    repository.error_message = error_message
+    repository.status = status
+    repository.deleted = deleted
+    repository.uninstalled = uninstalled
+    app.install_model.context.add( repository )
+    app.install_model.context.flush()
+
+
+def update_repository( app, trans, id, **kwds ):
+    """Update an existing ToolShed repository"""
+    message = None
+    flush_needed = False
+    sa_session = app.model.context.current
+    repository = sa_session.query( app.model.Repository ).get( app.security.decode_id( id ) )
+    if repository is None:
+        return None, "Unknown repository ID"
+
+    if not ( trans.user_is_admin() or
+            trans.app.security_agent.user_can_administer_repository( trans.user, repository ) ):
+        message = "You are not the owner of this repository, so you cannot administer it."
+        return None, message
+
+    # Whitelist properties that can be changed via this method
+    for key in ( 'type', 'description', 'long_description', 'remote_repository_url', 'homepage_url' ):
+        # If that key is available, not None and different than what's in the model
+        if key in kwds and kwds[ key ] is not None and kwds[ key ] != getattr( repository, key ):
+            setattr( repository, key, kwds[ key ] )
+            flush_needed = True
+
+    if 'category_ids' in kwds and isinstance( kwds[ 'category_ids' ], list ):
+        # Get existing category associations
+        category_associations = sa_session.query( app.model.RepositoryCategoryAssociation ) \
+                                          .filter( app.model.RepositoryCategoryAssociation.table.c.repository_id == app.security.decode_id( id ) )
+        # Remove all of them
+        for rca in category_associations:
+            sa_session.delete( rca )
+
+        # Then (re)create category associations
+        for category_id in kwds[ 'category_ids' ]:
+            category = sa_session.query( app.model.Category ) \
+                                 .get( app.security.decode_id( category_id ) )
+            if category:
+                rca = app.model.RepositoryCategoryAssociation( repository, category )
+                sa_session.add( rca )
+            else:
+                pass
+        flush_needed = True
+
+    # However some properties are special, like 'name'
+    if 'name' in kwds and kwds[ 'name' ] is not None and repository.name != kwds[ 'name' ]:
+        if repository.times_downloaded != 0:
+            message = "Repository names cannot be changed if the repository has been cloned."
+        else:
+            message = validate_repository_name( trans.app, kwds[ 'name' ], trans.user )
+        if message:
+            return None, message
+
+        repo_dir = repository.repo_path( app )
+        # Change the entry in the hgweb.config file for the repository.
+        old_lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
+        new_lhs = "repos/%s/%s" % ( repository.user.username, kwds[ 'name' ] )
+        trans.app.hgweb_config_manager.change_entry( old_lhs, new_lhs, repo_dir )
+
+        # Change the entry in the repository's hgrc file.
+        hgrc_file = os.path.join( repo_dir, '.hg', 'hgrc' )
+        change_repository_name_in_hgrc_file( hgrc_file, kwds[ 'name' ] )
+
+        # Rename the repository's admin role to match the new repository name.
+        repository_admin_role = repository.admin_role
+        repository_admin_role.name = get_repository_admin_role_name( str( kwds[ 'name' ] ), str( repository.user.username ) )
+        trans.sa_session.add( repository_admin_role )
+        repository.name = kwds[ 'name' ]
+        flush_needed = True
+
+    if flush_needed:
+        trans.sa_session.add( repository )
+        trans.sa_session.flush()
+        message = "The repository information has been updated."
+    else:
+        message = None
+    return repository, message
+
+
+def validate_repository_name( app, name, user ):
+    """
+    Validate whether the given name qualifies as a new TS repo name.
+    Repository names must be unique for each user, must be at least two characters
+    in length and must contain only lower-case letters, numbers, and the '_' character.
+    """
+    if name in [ 'None', None, '' ]:
+        return 'Enter the required repository name.'
+    if name in [ 'repos' ]:
+        return "The term <b>%s</b> is a reserved word in the tool shed, so it cannot be used as a repository name." % name
+    check_existing = get_repository_by_name_and_owner( app, name, user.username )
+    if check_existing is not None:
+        if check_existing.deleted:
+            return 'You own a deleted repository named <b>%s</b>, please choose a different name.' % escape( name )
+        else:
+            return "You already own a repository named <b>%s</b>, please choose a different name." % escape( name )
+    if len( name ) < 2:
+        return "Repository names must be at least 2 characters in length."
+    if len( name ) > 80:
+        return "Repository names cannot be more than 80 characters in length."
+    if not( VALID_REPOSITORYNAME_RE.match( name ) ):
+        return "Repository names must contain only lower-case letters, numbers and underscore."
+    return ''
diff --git a/lib/tool_shed/util/review_util.py b/lib/tool_shed/util/review_util.py
new file mode 100644
index 0000000..1d5cd06
--- /dev/null
+++ b/lib/tool_shed/util/review_util.py
@@ -0,0 +1,131 @@
+import logging
+
+from sqlalchemy import and_
+
+from galaxy.util.odict import odict
+from tool_shed.util import hg_util
+
+log = logging.getLogger( __name__ )
+
+
+def can_browse_repository_reviews( app, user, repository ):
+    """
+    Determine if there are any reviews of the received repository for which the
+    current user has permission to browse any component reviews.
+    """
+    if user:
+        for review in repository.reviews:
+            for component_review in review.component_reviews:
+                if app.security_agent.user_can_browse_component_review( app,
+                                                                        repository,
+                                                                        component_review, user ):
+                    return True
+    return False
+
+
+def changeset_revision_reviewed_by_user( user, repository, changeset_revision ):
+    """Determine if the current changeset revision has been reviewed by the current user."""
+    for review in repository.reviews:
+        if review.changeset_revision == changeset_revision and review.user == user:
+            return True
+    return False
+
+
+def get_component( app, id ):
+    """Get a component from the database."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.Component ).get( app.security.decode_id( id ) )
+
+
+def get_component_review( app, id ):
+    """Get a component_review from the database"""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.ComponentReview ).get( app.security.decode_id( id ) )
+
+
+def get_component_by_name( app, name ):
+    """Get a component from the database via a name."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.Component ) \
+                     .filter( app.model.Component.table.c.name == name ) \
+                     .first()
+
+
+def get_component_review_by_repository_review_id_component_id( app, repository_review_id, component_id ):
+    """Get a component_review from the database via repository_review_id and component_id."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.ComponentReview ) \
+                     .filter( and_( app.model.ComponentReview.table.c.repository_review_id == app.security.decode_id( repository_review_id ),
+                                    app.model.ComponentReview.table.c.component_id == app.security.decode_id( component_id ) ) ) \
+                     .first()
+
+
+def get_components( app ):
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.Component ) \
+                     .order_by( app.model.Component.name ) \
+                     .all()
+
+
+def get_previous_repository_reviews( app, repository, changeset_revision ):
+    """
+    Return an ordered dictionary of repository reviews up to and including the
+    received changeset revision.
+    """
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+    previous_reviews_dict = odict()
+    for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ):
+        previous_changeset_revision = str( repo.changectx( changeset ) )
+        if previous_changeset_revision in reviewed_revision_hashes:
+            previous_rev, previous_changeset_revision_label = \
+                hg_util.get_rev_label_from_changeset_revision( repo, previous_changeset_revision )
+            revision_reviews = get_reviews_by_repository_id_changeset_revision( app,
+                                                                                app.security.encode_id( repository.id ),
+                                                                                previous_changeset_revision )
+            previous_reviews_dict[ previous_changeset_revision ] = \
+                dict( changeset_revision_label=previous_changeset_revision_label,
+                      reviews=revision_reviews )
+    return previous_reviews_dict
+
+
+def get_review( app, id ):
+    """Get a repository_review from the database via id."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryReview ).get( app.security.decode_id( id ) )
+
+
+def get_review_by_repository_id_changeset_revision_user_id( app, repository_id, changeset_revision, user_id ):
+    """
+    Get a repository_review from the database via repository id, changeset_revision
+    and user_id.
+    """
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryReview ) \
+                     .filter( and_( app.model.RepositoryReview.repository_id == app.security.decode_id( repository_id ),
+                                    app.model.RepositoryReview.changeset_revision == changeset_revision,
+                                    app.model.RepositoryReview.user_id == app.security.decode_id( user_id ) ) ) \
+                     .first()
+
+
+def get_reviews_by_repository_id_changeset_revision( app, repository_id, changeset_revision ):
+    """Get all repository_reviews from the database via repository id and changeset_revision."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryReview ) \
+                     .filter( and_( app.model.RepositoryReview.repository_id == app.security.decode_id( repository_id ),
+                                    app.model.RepositoryReview.changeset_revision == changeset_revision ) ) \
+                     .all()
+
+
+def has_previous_repository_reviews( app, repository, changeset_revision ):
+    """
+    Determine if a repository has a changeset revision review prior to the
+    received changeset revision.
+    """
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    reviewed_revision_hashes = [ review.changeset_revision for review in repository.reviews ]
+    for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ):
+        previous_changeset_revision = str( repo.changectx( changeset ) )
+        if previous_changeset_revision in reviewed_revision_hashes:
+            return True
+    return False
diff --git a/lib/tool_shed/util/search_util.py b/lib/tool_shed/util/search_util.py
new file mode 100644
index 0000000..a3ac00b
--- /dev/null
+++ b/lib/tool_shed/util/search_util.py
@@ -0,0 +1,171 @@
+import logging
+
+from sqlalchemy import and_, false, true
+
+log = logging.getLogger( __name__ )
+
+
+def in_tool_dict( tool_dict, exact_matches_checked, tool_id=None, tool_name=None, tool_version=None ):
+    found = False
+    if tool_id and not tool_name and not tool_version:
+        tool_dict_tool_id = tool_dict[ 'id' ].lower()
+        found = ( tool_id == tool_dict_tool_id ) or \
+                ( not exact_matches_checked and tool_dict_tool_id.find( tool_id ) >= 0 )
+    elif tool_name and not tool_id and not tool_version:
+        tool_dict_tool_name = tool_dict[ 'name' ].lower()
+        found = ( tool_name == tool_dict_tool_name ) or \
+                ( not exact_matches_checked and tool_dict_tool_name.find( tool_name ) >= 0 )
+    elif tool_version and not tool_id and not tool_name:
+        tool_dict_tool_version = tool_dict[ 'version' ].lower()
+        found = ( tool_version == tool_dict_tool_version ) or \
+                ( not exact_matches_checked and tool_dict_tool_version.find( tool_version ) >= 0 )
+    elif tool_id and tool_name and not tool_version:
+        tool_dict_tool_id = tool_dict[ 'id' ].lower()
+        tool_dict_tool_name = tool_dict[ 'name' ].lower()
+        found = ( tool_id == tool_dict_tool_id and tool_name == tool_dict_tool_name ) or \
+                ( not exact_matches_checked and tool_dict_tool_id.find( tool_id ) >= 0 and tool_dict_tool_name.find( tool_name ) >= 0 )
+    elif tool_id and tool_version and not tool_name:
+        tool_dict_tool_id = tool_dict[ 'id' ].lower()
+        tool_dict_tool_version = tool_dict[ 'version' ].lower()
+        found = ( tool_id == tool_dict_tool_id and tool_version == tool_dict_tool_version ) or \
+                ( not exact_matches_checked and tool_dict_tool_id.find( tool_id ) >= 0 and tool_dict_tool_version.find( tool_version ) >= 0 )
+    elif tool_version and tool_name and not tool_id:
+        tool_dict_tool_version = tool_dict[ 'version' ].lower()
+        tool_dict_tool_name = tool_dict[ 'name' ].lower()
+        found = ( tool_version == tool_dict_tool_version and tool_name == tool_dict_tool_name ) or \
+                ( not exact_matches_checked and tool_dict_tool_version.find( tool_version ) >= 0 and tool_dict_tool_name.find( tool_name ) >= 0 )
+    elif tool_version and tool_name and tool_id:
+        tool_dict_tool_version = tool_dict[ 'version' ].lower()
+        tool_dict_tool_name = tool_dict[ 'name' ].lower()
+        tool_dict_tool_id = tool_dict[ 'id' ].lower()
+        found = ( tool_version == tool_dict_tool_version and
+                  tool_name == tool_dict_tool_name and
+                  tool_id == tool_dict_tool_id ) or \
+                ( not exact_matches_checked and
+                  tool_dict_tool_version.find( tool_version ) >= 0 and
+                  tool_dict_tool_name.find( tool_name ) >= 0 and
+                  tool_dict_tool_id.find( tool_id ) >= 0 )
+    return found
+
+
+def in_workflow_dict( workflow_dict, exact_matches_checked, workflow_name ):
+    workflow_dict_workflow_name = workflow_dict[ 'name' ].lower()
+    return ( workflow_name == workflow_dict_workflow_name ) or \
+           ( not exact_matches_checked and workflow_dict_workflow_name.find( workflow_name ) >= 0 )
+
+
+def make_same_length( list1, list2 ):
+    # If either list is 1 item, we'll append to it until its length is the same as the other.
+    if len( list1 ) == 1:
+        for i in range( 1, len( list2 ) ):
+            list1.append( list1[ 0 ] )
+    elif len( list2 ) == 1:
+        for i in range( 1, len( list1 ) ):
+            list2.append( list2[ 0 ] )
+    return list1, list2
+
+
+def search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names ):
+    for i, tool_id in enumerate( tool_ids ):
+        tool_name = tool_names[ i ]
+        if in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_name=tool_name ):
+            match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+    return match_tuples
+
+
+def search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions ):
+    for i, tool_id in enumerate( tool_ids ):
+        tool_version = tool_versions[ i ]
+        if in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_version=tool_version ):
+            match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+    return match_tuples
+
+
+def search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions ):
+    for i, tool_name in enumerate( tool_names ):
+        tool_version = tool_versions[ i ]
+        if in_tool_dict( tool_dict, exact_matches_checked, tool_name=tool_name, tool_version=tool_version ):
+            match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+    return match_tuples
+
+
+def search_repository_metadata( app, exact_matches_checked, tool_ids='', tool_names='', tool_versions='',
+                                workflow_names='', all_workflows=False ):
+    sa_session = app.model.context.current
+    match_tuples = []
+    ok = True
+    if tool_ids or tool_names or tool_versions:
+        for repository_metadata in sa_session.query( app.model.RepositoryMetadata ) \
+                                             .filter( app.model.RepositoryMetadata.table.c.includes_tools == true() ) \
+                                             .join( app.model.Repository ) \
+                                             .filter( and_( app.model.Repository.table.c.deleted == false(),
+                                                            app.model.Repository.table.c.deprecated == false() ) ):
+            metadata = repository_metadata.metadata
+            if metadata:
+                tools = metadata.get( 'tools', [] )
+                for tool_dict in tools:
+                    if tool_ids and not tool_names and not tool_versions:
+                        for tool_id in tool_ids:
+                            if in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id ):
+                                match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+                    elif tool_names and not tool_ids and not tool_versions:
+                        for tool_name in tool_names:
+                            if in_tool_dict( tool_dict, exact_matches_checked, tool_name=tool_name ):
+                                match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+                    elif tool_versions and not tool_ids and not tool_names:
+                        for tool_version in tool_versions:
+                            if in_tool_dict( tool_dict, exact_matches_checked, tool_version=tool_version ):
+                                match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+                    elif tool_ids and tool_names and not tool_versions:
+                        if len( tool_ids ) == len( tool_names ):
+                            match_tuples = search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names )
+                        elif len( tool_ids ) == 1 or len( tool_names ) == 1:
+                            tool_ids, tool_names = make_same_length( tool_ids, tool_names )
+                            match_tuples = search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names )
+                        else:
+                            ok = False
+                    elif tool_ids and tool_versions and not tool_names:
+                        if len( tool_ids ) == len( tool_versions ):
+                            match_tuples = search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions )
+                        elif len( tool_ids ) == 1 or len( tool_versions ) == 1:
+                            tool_ids, tool_versions = make_same_length( tool_ids, tool_versions )
+                            match_tuples = search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions )
+                        else:
+                            ok = False
+                    elif tool_versions and tool_names and not tool_ids:
+                        if len( tool_versions ) == len( tool_names ):
+                            match_tuples = search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions )
+                        elif len( tool_versions ) == 1 or len( tool_names ) == 1:
+                            tool_versions, tool_names = make_same_length( tool_versions, tool_names )
+                            match_tuples = search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions )
+                        else:
+                            ok = False
+                    elif tool_versions and tool_names and tool_ids:
+                        if len( tool_versions ) == len( tool_names ) and len( tool_names ) == len( tool_ids ):
+                            for i, tool_version in enumerate( tool_versions ):
+                                tool_name = tool_names[ i ]
+                                tool_id = tool_ids[ i ]
+                                if in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_name=tool_name, tool_version=tool_version ):
+                                    match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+                        else:
+                            ok = False
+    elif workflow_names or all_workflows:
+        for repository_metadata in sa_session.query( app.model.RepositoryMetadata ) \
+                                             .filter( app.model.RepositoryMetadata.table.c.includes_workflows == true() ) \
+                                             .join( app.model.Repository ) \
+                                             .filter( and_( app.model.Repository.table.c.deleted == false(),
+                                                            app.model.Repository.table.c.deprecated == false() ) ):
+            metadata = repository_metadata.metadata
+            if metadata:
+                # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+                # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+                if workflow_names:
+                    workflow_tups = metadata.get( 'workflows', [] )
+                    workflows = [ workflow_tup[1] for workflow_tup in workflow_tups ]
+                    for workflow_dict in workflows:
+                        for workflow_name in workflow_names:
+                            if in_workflow_dict( workflow_dict, exact_matches_checked, workflow_name ):
+                                match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+                elif all_workflows:
+                    match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+    return ok, match_tuples
diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py
new file mode 100644
index 0000000..dbf74d9
--- /dev/null
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -0,0 +1,652 @@
+import json
+import logging
+import os
+import re
+import socket
+import string
+
+import sqlalchemy.orm.exc
+from sqlalchemy import and_, false, true
+
+from galaxy import util
+from galaxy.util import checkers
+from galaxy.web import url_for
+from tool_shed.util import (
+    basic_util,
+    common_util,
+    hg_util,
+    repository_util
+)
+
+log = logging.getLogger( __name__ )
+
+MAX_CONTENT_SIZE = 1048576
+DATATYPES_CONFIG_FILENAME = 'datatypes_conf.xml'
+REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = 'data_manager_conf.xml'
+
+new_repo_email_alert_template = """
+Sharable link:         ${sharable_link}
+Repository name:       ${repository_name}
+Revision:              ${revision}
+Change description:
+${description}
+
+Uploaded by:           ${username}
+Date content uploaded: ${display_date}
+
+${content_alert_str}
+
+-----------------------------------------------------------------------------
+This change alert was sent from the Galaxy tool shed hosted on the server
+"${host}"
+-----------------------------------------------------------------------------
+You received this alert because you registered to receive email when
+new repositories were created in the Galaxy tool shed named "${host}".
+-----------------------------------------------------------------------------
+"""
+
+email_alert_template = """
+Sharable link:         ${sharable_link}
+Repository name:       ${repository_name}
+Revision:              ${revision}
+Change description:
+${description}
+
+Changed by:     ${username}
+Date of change: ${display_date}
+
+${content_alert_str}
+
+-----------------------------------------------------------------------------
+This change alert was sent from the Galaxy tool shed hosted on the server
+"${host}"
+-----------------------------------------------------------------------------
+You received this alert because you registered to receive email whenever
+changes were made to the repository named "${repository_name}".
+-----------------------------------------------------------------------------
+"""
+
+contact_owner_template = """
+GALAXY TOOL SHED REPOSITORY MESSAGE
+------------------------
+
+The user '${username}' sent you the following message regarding your tool shed
+repository named '${repository_name}'.  You can respond by sending a reply to
+the user's email address: ${email}.
+-----------------------------------------------------------------------------
+${message}
+-----------------------------------------------------------------------------
+This message was sent from the Galaxy Tool Shed instance hosted on the server
+'${host}'
+"""
+
+
+def can_eliminate_repository_dependency(metadata_dict, tool_shed_url, name, owner):
+    """
+    Determine if the relationship between a repository_dependency record
+    associated with a tool_shed_repository record on the Galaxy side
+    can be eliminated.
+    """
+    rd_dict = metadata_dict.get('repository_dependencies', {})
+    rd_tups = rd_dict.get( 'repository_dependencies', [] )
+    for rd_tup in rd_tups:
+        tsu, n, o, none1, none2, none3 = common_util.parse_repository_dependency_tuple(rd_tup)
+        if tsu == tool_shed_url and n == name and o == owner:
+            # The repository dependency is current, so keep it.
+            return False
+    return True
+
+
+def can_eliminate_tool_dependency(metadata_dict, name, dependency_type, version):
+    """
+    Determine if the relationship between a tool_dependency record
+    associated with a tool_shed_repository record on the Galaxy side
+    can be eliminated.
+    """
+    td_dict = metadata_dict.get('tool_dependencies', {})
+    for td_key, td_val in td_dict.items():
+        if td_key == 'set_environment':
+            for td in td_val:
+                n = td.get('name', None)
+                t = td.get('type', None)
+                if n == name and t == dependency_type:
+                    # The tool dependency is current, so keep it.
+                    return False
+        else:
+            n = td_val.get('name', None)
+            t = td_val.get('type', None)
+            v = td_val.get('version', None)
+            if n == name and t == dependency_type and v == version:
+                # The tool dependency is current, so keep it.
+                return False
+    return True
+
+
+def clean_dependency_relationships(trans, metadata_dict, tool_shed_repository, tool_shed_url):
+    """
+    Repositories of type tool_dependency_definition allow for defining a
+    package dependency at some point in the change log and then removing the
+    dependency later in the change log.  This function keeps the dependency
+    relationships on the Galaxy side current by deleting database records
+    that defined the now-broken relationships.
+    """
+    for rrda in tool_shed_repository.required_repositories:
+        rd = rrda.repository_dependency
+        r = rd.repository
+        if can_eliminate_repository_dependency(metadata_dict, tool_shed_url, r.name, r.owner):
+            message = "Repository dependency %s by owner %s is not required by repository %s, owner %s, "
+            message += "removing from list of repository dependencies."
+            log.debug(message % (r.name, r.owner, tool_shed_repository.name, tool_shed_repository.owner))
+            trans.install_model.context.delete(rrda)
+            trans.install_model.context.flush()
+    for td in tool_shed_repository.tool_dependencies:
+        if can_eliminate_tool_dependency(metadata_dict, td.name, td.type, td.version):
+            message = "Tool dependency %s, version %s is not required by repository %s, owner %s, "
+            message += "removing from list of tool dependencies."
+            log.debug(message % (td.name, td.version, tool_shed_repository.name, tool_shed_repository.owner))
+            trans.install_model.context.delete(td)
+            trans.install_model.context.flush()
+
+
+def generate_tool_guid( repository_clone_url, tool ):
+    """
+    Generate a guid for the installed tool.  It is critical that this guid matches the guid for
+    the tool in the Galaxy tool shed from which it is being installed.  The form of the guid is
+    <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version>
+    """
+    tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
+    return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
+
+
+def get_categories( app ):
+    """Get all categories from the database."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.Category ) \
+                     .filter( app.model.Category.table.c.deleted == false() ) \
+                     .order_by( app.model.Category.table.c.name ) \
+                     .all()
+
+
+def get_category( app, id ):
+    """Get a category from the database."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.Category ).get( app.security.decode_id( id ) )
+
+
+def get_category_by_name( app, name ):
+    """Get a category from the database via name."""
+    sa_session = app.model.context.current
+    try:
+        return sa_session.query( app.model.Category ).filter_by( name=name ).one()
+    except sqlalchemy.orm.exc.NoResultFound:
+        return None
+
+
+def get_tool_shed_repo_requirements(app, tool_shed_url, repositories=None, repo_info_dicts=None):
+    """
+    Contact tool_shed_url for a list of requirements for a repository or a list of repositories.
+    Returns a list of requirements, where each requirement is a dictionary with name and version as keys.
+    """
+    if not repositories and not repo_info_dicts:
+        raise Exception("Need to pass either repository or repo_info_dicts")
+    if repositories:
+        if not isinstance(repositories, list):
+            repositories = [repositories]
+        repository_params = [{'name': repository.name,
+                             'owner': repository.owner,
+                             'changeset_revision': repository.changeset_revision} for repository in repositories]
+    else:
+        if not isinstance(repo_info_dicts, list):
+            repo_info_dicts = [repo_info_dicts]
+        repository_params = []
+        for repo_info_dict in repo_info_dicts:
+            for name, repo_info_tuple in repo_info_dict.items():
+                # repo_info_tuple is a list, but keep terminology
+                owner = repo_info_tuple[4]
+                changeset_revision = repo_info_tuple[2]
+                repository_params.append({'name': name,
+                                          'owner': owner,
+                                          'changeset_revision': changeset_revision})
+    pathspec = ["api", "repositories", "get_repository_revision_install_info"]
+    tools = []
+    for params in repository_params:
+        response = util.url_get(tool_shed_url,
+                                password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ),
+                                pathspec=pathspec,
+                                params=params
+                                )
+        json_response = json.loads(response)
+        valid_tools = json_response[1].get('valid_tools', [])
+        if valid_tools:
+            tools.extend(valid_tools)
+    return get_unique_requirements_from_tools(tools)
+
+
+def get_unique_requirements_from_tools(tools):
+    requirements = []
+    for tool in tools:
+        if tool['requirements']:
+            requirements.append(tool['requirements'])
+    return get_unique_requirements(requirements)
+
+
+def get_unique_requirements(requirements):
+    uniq_reqs = dict()
+    for tool_requirements in requirements:
+        for req in tool_requirements:
+            name = req.get("name", None)
+            if not name:
+                continue  # A requirement without a name can't be resolved, so let's skip those
+            version = req.get("version", "versionless")
+            type = req.get("type", None)
+            if not type == "package":
+                continue
+            uniq_reqs["%s_%s" % (name, version)] = {'name': name, 'version': version, 'type': type}
+    return list(uniq_reqs.values())
+
+
+def get_unique_requirements_from_repository(repository):
+    if not repository.includes_tools:
+        return []
+    else:
+        return get_unique_requirements_from_tools(repository.metadata.get('tools', []))
+
+
+def get_ctx_rev( app, tool_shed_url, name, owner, changeset_revision ):
+    """
+    Send a request to the tool shed to retrieve the ctx_rev for a repository defined by the
+    combination of a name, owner and changeset revision.
+    """
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+    params = dict( name=name, owner=owner, changeset_revision=changeset_revision )
+    pathspec = [ 'repository', 'get_ctx_rev' ]
+    ctx_rev = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+    return ctx_rev
+
+
+def get_next_prior_import_or_install_required_dict_entry( prior_required_dict, processed_tsr_ids ):
+    """
+    This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy
+    when a repository and its dependencies are being installed.  The order in which the prior_required_dict
+    is processed is critical in order to ensure that the ultimate repository import or installation order is
+    correctly defined.  This method determines the next key / value pair from the received prior_required_dict
+    that should be processed.
+    """
+    # Return the first key / value pair that is not yet processed and whose value is an empty list.
+    for key, value in prior_required_dict.items():
+        if key in processed_tsr_ids:
+            continue
+        if not value:
+            return key
+    # Return the first key / value pair that is not yet processed and whose ids in value are all included
+    # in processed_tsr_ids.
+    for key, value in prior_required_dict.items():
+        if key in processed_tsr_ids:
+            continue
+        all_contained = True
+        for required_repository_id in value:
+            if required_repository_id not in processed_tsr_ids:
+                all_contained = False
+                break
+        if all_contained:
+            return key
+    # Return the first key / value pair that is not yet processed.  Hopefully this is all that is necessary
+    # at this point.
+    for key, value in prior_required_dict.items():
+        if key in processed_tsr_ids:
+            continue
+        return key
+
+
+def get_repository_categories( app, id ):
+    """Get categories of a repository on the tool shed side from the database via id"""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.RepositoryCategoryAssociation ) \
+        .filter(app.model.RepositoryCategoryAssociation.table.c.repository_id == app.security.decode_id( id ))
+
+
+def get_repository_file_contents( app, file_path, repository_id, is_admin=False ):
+    """Return the display-safe contents of a repository file for display in a browser."""
+    safe_str = ''
+    if not is_path_browsable( app, file_path, repository_id, is_admin ):
+        log.warning( 'Request tries to access a file outside of the repository location. File path: %s', file_path )
+        return 'Invalid file path'
+    # Symlink targets are checked by is_path_browsable
+    if os.path.islink( file_path ):
+        safe_str = 'link to: ' + basic_util.to_html_string( os.readlink( file_path ) )
+        return safe_str
+    elif checkers.is_gzip( file_path ):
+        return '<br/>gzip compressed file<br/>'
+    elif checkers.is_bz2( file_path ):
+        return '<br/>bz2 compressed file<br/>'
+    elif checkers.check_zip( file_path ):
+        return '<br/>zip compressed file<br/>'
+    elif checkers.check_binary( file_path ):
+        return '<br/>Binary file<br/>'
+    else:
+        for i, line in enumerate( open( file_path ) ):
+            safe_str = '%s%s' % ( safe_str, basic_util.to_html_string( line ) )
+            # Stop reading after string is larger than MAX_CONTENT_SIZE.
+            if len( safe_str ) > MAX_CONTENT_SIZE:
+                large_str = \
+                    '<br/>File contents truncated because file size is larger than maximum viewing size of %s<br/>' % \
+                    util.nice_size( MAX_CONTENT_SIZE )
+                safe_str = '%s%s' % ( safe_str, large_str )
+                break
+
+        if len( safe_str ) > basic_util.MAX_DISPLAY_SIZE:
+            # Eliminate the middle of the file to display a file no larger than basic_util.MAX_DISPLAY_SIZE.
+            # This may not be ideal if the file is larger than MAX_CONTENT_SIZE.
+            join_by_str = \
+                "<br/><br/>...some text eliminated here because file size is larger than maximum viewing size of %s...<br/><br/>" % \
+                util.nice_size( basic_util.MAX_DISPLAY_SIZE )
+            safe_str = util.shrink_string_by_size( safe_str,
+                                                   basic_util.MAX_DISPLAY_SIZE,
+                                                   join_by=join_by_str,
+                                                   left_larger=True,
+                                                   beginning_on_size_error=True )
+        return safe_str
+
+
+def get_repository_files( folder_path ):
+    """Return the file hierarchy of a tool shed repository."""
+    contents = []
+    for item in os.listdir( folder_path ):
+        # Skip .hg directories
+        if item.startswith( '.hg' ):
+            continue
+        contents.append( item )
+    if contents:
+        contents.sort()
+    return contents
+
+
+def get_repository_from_refresh_on_change( app, **kwd ):
+    # The changeset_revision_select_field in several grids performs a refresh_on_change which sends in request parameters like
+    # changeset_revison_1, changeset_revision_2, etc.  One of the many select fields on the grid performed the refresh_on_change,
+    # so we loop through all of the received values to see which value is not the repository tip.  If we find it, we know the
+    # refresh_on_change occurred and we have the necessary repository id and change set revision to pass on.
+    repository_id = None
+    v = None
+    for k, v in kwd.items():
+        changeset_revision_str = 'changeset_revision_'
+        if k.startswith( changeset_revision_str ):
+            repository_id = app.security.encode_id( int( k.lstrip( changeset_revision_str ) ) )
+            repository = repository_util.get_repository_in_tool_shed( app, repository_id )
+            if repository.tip( app ) != v:
+                return v, repository
+    # This should never be reached - raise an exception?
+    return v, None
+
+
+def get_repository_type_from_tool_shed( app, tool_shed_url, name, owner ):
+    """
+    Send a request to the tool shed to retrieve the type for a repository defined by the
+    combination of a name and owner.
+    """
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+    params = dict( name=name, owner=owner )
+    pathspec = [ 'repository', 'get_repository_type' ]
+    repository_type = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+    return repository_type
+
+
+def get_tool_dependency_definition_metadata_from_tool_shed( app, tool_shed_url, name, owner ):
+    """
+    Send a request to the tool shed to retrieve the current metadata for a
+    repository of type tool_dependency_definition defined by the combination
+    of a name and owner.
+    """
+    tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+    params = dict( name=name, owner=owner )
+    pathspec = [ 'repository', 'get_tool_dependency_definition_metadata' ]
+    metadata = util.url_get( tool_shed_url, password_mgr=app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
+    return metadata
+
+
+def get_tool_panel_config_tool_path_install_dir( app, repository ):
+    """
+    Return shed-related tool panel config, the tool_path configured in it, and the relative path to
+    the directory where the repository is installed.  This method assumes all repository tools are
+    defined in a single shed-related tool panel config.
+    """
+    tool_shed = common_util.remove_port_from_tool_shed_url( str( repository.tool_shed ) )
+    relative_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed,
+                                                   str( repository.owner ),
+                                                   str( repository.name ),
+                                                   str( repository.installed_changeset_revision ) )
+    # Get the relative tool installation paths from each of the shed tool configs.
+    shed_config_dict = repository.get_shed_config_dict( app )
+    if not shed_config_dict:
+        # Just pick a semi-random shed config.
+        for shed_config_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+            if ( repository.dist_to_shed and shed_config_dict[ 'config_filename' ] == app.config.migrated_tools_config ) \
+                    or ( not repository.dist_to_shed and shed_config_dict[ 'config_filename' ] != app.config.migrated_tools_config ):
+                break
+    shed_tool_conf = shed_config_dict[ 'config_filename' ]
+    tool_path = shed_config_dict[ 'tool_path' ]
+    return shed_tool_conf, tool_path, relative_install_dir
+
+
+def get_tool_path_by_shed_tool_conf_filename( app, shed_tool_conf ):
+    """
+    Return the tool_path config setting for the received shed_tool_conf file by searching the tool box's in-memory list of shed_tool_confs for the
+    dictionary whose config_filename key has a value matching the received shed_tool_conf.
+    """
+    for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
+        config_filename = shed_tool_conf_dict[ 'config_filename' ]
+        if config_filename == shed_tool_conf:
+            return shed_tool_conf_dict[ 'tool_path' ]
+        else:
+            file_name = basic_util.strip_path( config_filename )
+            if file_name == shed_tool_conf:
+                return shed_tool_conf_dict[ 'tool_path' ]
+    return None
+
+
+def get_user( app, id ):
+    """Get a user from the database by id."""
+    sa_session = app.model.context.current
+    return sa_session.query( app.model.User ).get( app.security.decode_id( id ) )
+
+
+def handle_email_alerts( app, host, repository, content_alert_str='', new_repo_alert=False, admin_only=False ):
+    """
+    There are 2 complementary features that enable a tool shed user to receive email notification:
+    1. Within User Preferences, they can elect to receive email when the first (or first valid)
+       change set is produced for a new repository.
+    2. When viewing or managing a repository, they can check the box labeled "Receive email alerts"
+       which caused them to receive email alerts when updates to the repository occur.  This same feature
+       is available on a per-repository basis on the repository grid within the tool shed.
+
+    There are currently 4 scenarios for sending email notification when a change is made to a repository:
+    1. An admin user elects to receive email when the first change set is produced for a new repository
+       from User Preferences.  The change set does not have to include any valid content.  This allows for
+       the capture of inappropriate content being uploaded to new repositories.
+    2. A regular user elects to receive email when the first valid change set is produced for a new repository
+       from User Preferences.  This differs from 1 above in that the user will not receive email until a
+       change set tha tincludes valid content is produced.
+    3. An admin user checks the "Receive email alerts" check box on the manage repository page.  Since the
+       user is an admin user, the email will include information about both HTML and image content that was
+       included in the change set.
+    4. A regular user checks the "Receive email alerts" check box on the manage repository page.  Since the
+       user is not an admin user, the email will not include any information about both HTML and image content
+       that was included in the change set.
+    """
+    sa_session = app.model.context.current
+    repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+    sharable_link = repository_util.generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=None )
+    smtp_server = app.config.smtp_server
+    if smtp_server and ( new_repo_alert or repository.email_alerts ):
+        # Send email alert to users that want them.
+        if app.config.email_from is not None:
+            email_from = app.config.email_from
+        elif host.split( ':' )[0] in [ 'localhost', '127.0.0.1', '0.0.0.0' ]:
+            email_from = 'galaxy-no-reply@' + socket.getfqdn()
+        else:
+            email_from = 'galaxy-no-reply@' + host.split( ':' )[0]
+        tip_changeset = repo.changelog.tip()
+        ctx = repo.changectx( tip_changeset )
+        try:
+            username = ctx.user().split()[0]
+        except:
+            username = ctx.user()
+        # We'll use 2 template bodies because we only want to send content
+        # alerts to tool shed admin users.
+        if new_repo_alert:
+            template = new_repo_email_alert_template
+        else:
+            template = email_alert_template
+        display_date = hg_util.get_readable_ctx_date( ctx )
+        admin_body = string.Template( template ).safe_substitute( host=host,
+                                                                  sharable_link=sharable_link,
+                                                                  repository_name=repository.name,
+                                                                  revision='%s:%s' % ( str( ctx.rev() ), ctx ),
+                                                                  display_date=display_date,
+                                                                  description=ctx.description(),
+                                                                  username=username,
+                                                                  content_alert_str=content_alert_str )
+        body = string.Template( template ).safe_substitute( host=host,
+                                                            sharable_link=sharable_link,
+                                                            repository_name=repository.name,
+                                                            revision='%s:%s' % ( str( ctx.rev() ), ctx ),
+                                                            display_date=display_date,
+                                                            description=ctx.description(),
+                                                            username=username,
+                                                            content_alert_str='' )
+        admin_users = app.config.get( "admin_users", "" ).split( "," )
+        frm = email_from
+        if new_repo_alert:
+            subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name )
+            subject = subject[ :80 ]
+            email_alerts = []
+            for user in sa_session.query( app.model.User ) \
+                                  .filter( and_( app.model.User.table.c.deleted == false(),
+                                                 app.model.User.table.c.new_repo_alert == true() ) ):
+                if admin_only:
+                    if user.email in admin_users:
+                        email_alerts.append( user.email )
+                else:
+                    email_alerts.append( user.email )
+        else:
+            subject = "Galaxy tool shed update alert for repository named %s" % str( repository.name )
+            email_alerts = json.loads( repository.email_alerts )
+        for email in email_alerts:
+            to = email.strip()
+            # Send it
+            try:
+                if to in admin_users:
+                    util.send_mail( frm, to, subject, admin_body, app.config )
+                else:
+                    util.send_mail( frm, to, subject, body, app.config )
+            except Exception:
+                log.exception( "An error occurred sending a tool shed repository update alert by email." )
+
+
+def have_shed_tool_conf_for_install( app ):
+    return bool( app.toolbox.dynamic_confs( include_migrated_tool_conf=False ) )
+
+
+def is_path_browsable( app, path, repository_id, is_admin=False ):
+    """
+    Detects whether the given path is browsable i.e. is within the
+    allowed repository folders. Admins can additionaly browse folders
+    with tool dependencies.
+    """
+    if is_admin and is_path_within_dependency_dir( app, path ):
+        return True
+    return is_path_within_repo( app, path, repository_id)
+
+
+def is_path_within_dependency_dir( app, path ):
+    """
+    Detect whether the given path is within the tool_dependency_dir folder on the disk.
+    (Specified by the config option). Use to filter malicious symlinks targeting outside paths.
+    """
+    allowed = False
+    resolved_path = os.path.realpath( path )
+    tool_dependency_dir = app.config.get( 'tool_dependency_dir', None )
+    if tool_dependency_dir:
+        dependency_path = os.path.abspath( tool_dependency_dir )
+        allowed = os.path.commonprefix( [ dependency_path, resolved_path ] ) == dependency_path
+    return allowed
+
+
+def is_path_within_repo( app, path, repository_id ):
+    """
+    Detect whether the given path is within the repository folder on the disk.
+    Use to filter malicious symlinks targeting outside paths.
+    """
+    repo_path = os.path.abspath( repository_util.get_repository_by_id( app, repository_id ).repo_path( app ) )
+    resolved_path = os.path.realpath( path )
+    return os.path.commonprefix( [ repo_path, resolved_path ] ) == repo_path
+
+
+def open_repository_files_folder( app, folder_path, repository_id, is_admin=False ):
+    """
+    Return a list of dictionaries, each of which contains information for a file or directory contained
+    within a directory in a repository file hierarchy.
+    """
+    if not is_path_browsable( app, folder_path, repository_id, is_admin ):
+        log.warning( 'Request tries to access a folder outside of the allowed locations. Folder path: %s', folder_path )
+        return []
+    try:
+        files_list = get_repository_files( folder_path )
+    except OSError as e:
+        if str( e ).find( 'No such file or directory' ) >= 0:
+            # We have a repository with no contents.
+            return []
+    folder_contents = []
+    for filename in files_list:
+        is_folder = False
+        full_path = os.path.join( folder_path, filename )
+        is_link = os.path.islink( full_path )
+        path_is_browsable = is_path_browsable( app, full_path, repository_id )
+        if is_link and not path_is_browsable:
+            log.warning( 'Valid folder contains a symlink outside of the repository location. Link found in: ' + str( full_path ) )
+        if filename:
+            if os.path.isdir( full_path ) and path_is_browsable:
+                # Append a '/' character so that our jquery dynatree will function properly.
+                filename = '%s/' % filename
+                full_path = '%s/' % full_path
+                is_folder = True
+            node = { "title": filename,
+                     "isFolder": is_folder,
+                     "isLazy": is_folder,
+                     "tooltip": full_path,
+                     "key": full_path }
+            folder_contents.append( node )
+    return folder_contents
+
+
+def set_image_paths( app, encoded_repository_id, text ):
+    """
+    Handle tool help image display for tools that are contained in repositories in
+    the tool shed or installed into Galaxy as well as image display in repository
+    README files.  This method will determine the location of the image file and
+    return the path to it that will enable the caller to open the file.
+    """
+    if text:
+        if repository_util.is_tool_shed_client( app ):
+            route_to_images = 'admin_toolshed/static/images/%s' % encoded_repository_id
+        else:
+            # We're in the tool shed.
+            route_to_images = '/repository/static/images/%s' % encoded_repository_id
+        # We used to require $PATH_TO_IMAGES and ${static_path}, but
+        # we now eliminate it if it's used.
+        text = text.replace( '$PATH_TO_IMAGES', '' )
+        text = text.replace( '${static_path}', '' )
+        # Use regex to instantiate routes into the defined image paths, but replace
+        # paths that start with neither http:// nor https://, which will allow for
+        # settings like .. images:: http_files/images/help.png
+        for match in re.findall( '.. image:: (?!http)/?(.+)', text ):
+            text = text.replace( match, match.replace( '/', '%2F' ) )
+        text = re.sub( r'\.\. image:: (?!https?://)/?(.+)', r'.. image:: %s/\1' % route_to_images, text )
+    return text
+
+
+def tool_shed_is_this_tool_shed( toolshed_base_url ):
+    """Determine if a tool shed is the current tool shed."""
+    cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url( toolshed_base_url )
+    cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url( str( url_for( '/', qualified=True ) ) )
+    return cleaned_toolshed_base_url == cleaned_tool_shed
diff --git a/lib/tool_shed/util/tool_dependency_util.py b/lib/tool_shed/util/tool_dependency_util.py
new file mode 100644
index 0000000..5239e4a
--- /dev/null
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -0,0 +1,391 @@
+import logging
+import os
+import shutil
+
+from sqlalchemy import and_
+
+from galaxy import util
+from galaxy.web.form_builder import SelectField
+from tool_shed.util import hg_util
+from tool_shed.util import xml_util
+
+log = logging.getLogger( __name__ )
+
+
+def build_tool_dependencies_select_field( app, tool_shed_repository, name, multiple=True, display='checkboxes',
+                                          uninstalled_only=False ):
+    """
+    Generate a SelectField consisting of the current list of tool dependency ids
+    for an installed tool shed repository.
+    """
+    tool_dependencies_select_field = SelectField( name=name, multiple=multiple, display=display )
+    for tool_dependency in tool_shed_repository.tool_dependencies:
+        if uninstalled_only:
+            if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                               app.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                continue
+        else:
+            if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                           app.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                continue
+        option_label = '%s version %s' % ( str( tool_dependency.name ), str( tool_dependency.version ) )
+        option_value = app.security.encode_id( tool_dependency.id )
+        tool_dependencies_select_field.add_option( option_label, option_value )
+    return tool_dependencies_select_field
+
+
+def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status, set_status=True ):
+    """Create or update a tool_dependency record in the Galaxy database."""
+    # Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled
+    # repository is being reinstalled.
+    context = app.install_model.context
+    # First see if an appropriate tool_dependency record exists for the received tool_shed_repository.
+    if version:
+        tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type )
+    else:
+        tool_dependency = get_tool_dependency_by_name_type_repository( app, tool_shed_repository, name, type )
+    if tool_dependency:
+        # In some cases we should not override the current status of an existing tool_dependency, so do so only
+        # if set_status is True.
+        if set_status:
+            set_tool_dependency_attributes(app, tool_dependency=tool_dependency, status=status)
+    else:
+        # Create a new tool_dependency record for the tool_shed_repository.
+        debug_msg = 'Creating a new record for version %s of tool dependency %s for revision %s of repository %s.  ' % \
+            ( str( version ), str( name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
+        debug_msg += 'The status is being set to %s.' % str( status )
+        log.debug( debug_msg )
+        tool_dependency = app.install_model.ToolDependency( tool_shed_repository.id, name, version, type, status )
+        context.add( tool_dependency )
+        context.flush()
+    return tool_dependency
+
+
+def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
+    """
+    Create or update a ToolDependency for each entry in tool_dependencies_config.  This method is called when
+    installing a new tool_shed_repository.
+    """
+    tool_dependency_objects = []
+    shed_config_dict = tool_shed_repository.get_shed_config_dict( app )
+    if shed_config_dict.get( 'tool_path' ):
+        relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
+    # Get the tool_dependencies.xml file from the repository.
+    tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+    tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+    if tree is None:
+        return tool_dependency_objects
+    root = tree.getroot()
+    for elem in root:
+        tool_dependency_type = elem.tag
+        if tool_dependency_type == 'package':
+            name = elem.get( 'name', None )
+            version = elem.get( 'version', None )
+            if name and version:
+                status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED
+                tool_dependency = create_or_update_tool_dependency( app,
+                                                                    tool_shed_repository,
+                                                                    name=name,
+                                                                    version=version,
+                                                                    type=tool_dependency_type,
+                                                                    status=status,
+                                                                    set_status=set_status )
+                tool_dependency_objects.append( tool_dependency )
+        elif tool_dependency_type == 'set_environment':
+            for env_elem in elem:
+                # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+                name = env_elem.get( 'name', None )
+                action = env_elem.get( 'action', None )
+                if name and action:
+                    status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED
+                    tool_dependency = create_or_update_tool_dependency( app,
+                                                                        tool_shed_repository,
+                                                                        name=name,
+                                                                        version=None,
+                                                                        type=tool_dependency_type,
+                                                                        status=status,
+                                                                        set_status=set_status )
+                    tool_dependency_objects.append( tool_dependency )
+    return tool_dependency_objects
+
+
+def get_download_url_for_platform( url_templates, platform_info_dict ):
+    '''
+    Compare the dict returned by get_platform_info() with the values specified in the url_template element. Return
+    true if and only if all defined attributes match the corresponding dict entries. If an entry is not
+    defined in the url_template element, it is assumed to be irrelevant at this stage. For example,
+    <url_template os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.${architecture}/faToTwoBit</url_template>
+    where the OS must be 'darwin', but the architecture is filled in later using string.Template.
+    '''
+    os_ok = False
+    architecture_ok = False
+    for url_template in url_templates:
+        os_name = url_template.get( 'os', None )
+        architecture = url_template.get( 'architecture', None )
+        if os_name:
+            if os_name.lower() == platform_info_dict[ 'os' ]:
+                os_ok = True
+            else:
+                os_ok = False
+        else:
+            os_ok = True
+        if architecture:
+            if architecture.lower() == platform_info_dict[ 'architecture' ]:
+                architecture_ok = True
+            else:
+                architecture_ok = False
+        else:
+            architecture_ok = True
+        if os_ok and architecture_ok:
+            return url_template
+    return None
+
+
+def get_platform_info_dict():
+    '''Return a dict with information about the current platform.'''
+    platform_dict = {}
+    sysname, nodename, release, version, machine = os.uname()
+    platform_dict[ 'os' ] = sysname.lower()
+    platform_dict[ 'architecture' ] = machine.lower()
+    return platform_dict
+
+
+def get_tool_dependency( app, id ):
+    """Get a tool_dependency from the database via id"""
+    return app.install_model.context.query( app.install_model.ToolDependency ).get( app.security.decode_id( id ) )
+
+
+def get_tool_dependency_by_name_type_repository( app, repository, name, type ):
+    context = app.install_model.context
+    return context.query( app.install_model.ToolDependency ) \
+                  .filter( and_( app.install_model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
+                                 app.install_model.ToolDependency.table.c.name == name,
+                                 app.install_model.ToolDependency.table.c.type == type ) ) \
+                  .first()
+
+
+def get_tool_dependency_by_name_version_type( app, name, version, type ):
+    context = app.install_model.context
+    return context.query( app.install_model.ToolDependency ) \
+                  .filter( and_( app.install_model.ToolDependency.table.c.name == name,
+                                 app.install_model.ToolDependency.table.c.version == version,
+                                 app.install_model.ToolDependency.table.c.type == type ) ) \
+                  .first()
+
+
+def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ):
+    context = app.install_model.context
+    return context.query( app.install_model.ToolDependency ) \
+                  .filter( and_( app.install_model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
+                                 app.install_model.ToolDependency.table.c.name == name,
+                                 app.install_model.ToolDependency.table.c.version == version,
+                                 app.install_model.ToolDependency.table.c.type == type ) ) \
+                  .first()
+
+
+def get_tool_dependency_ids( as_string=False, **kwd ):
+    tool_dependency_id = kwd.get( 'tool_dependency_id', None )
+    if 'tool_dependency_ids' in kwd:
+        tool_dependency_ids = util.listify( kwd[ 'tool_dependency_ids' ] )
+    elif 'id' in kwd:
+        tool_dependency_ids = util.listify( kwd[ 'id' ] )
+    elif 'inst_td_ids' in kwd:
+        tool_dependency_ids = util.listify( kwd[ 'inst_td_ids' ] )
+    elif 'uninstalled_tool_dependency_ids' in kwd:
+        tool_dependency_ids = util.listify( kwd[ 'uninstalled_tool_dependency_ids' ] )
+    else:
+        tool_dependency_ids = []
+    if tool_dependency_id and tool_dependency_id not in tool_dependency_ids:
+        tool_dependency_ids.append( tool_dependency_id )
+    if as_string:
+        return ','.join( tool_dependency_ids )
+    return tool_dependency_ids
+
+
+def get_tool_dependency_install_dir( app, repository_name, repository_owner, repository_changeset_revision, tool_dependency_type,
+                                     tool_dependency_name, tool_dependency_version ):
+    if tool_dependency_type == 'package':
+        return os.path.abspath( os.path.join( app.config.tool_dependency_dir,
+                                              tool_dependency_name,
+                                              tool_dependency_version,
+                                              repository_owner,
+                                              repository_name,
+                                              repository_changeset_revision ) )
+    if tool_dependency_type == 'set_environment':
+        return os.path.abspath( os.path.join( app.config.tool_dependency_dir,
+                                              'environment_settings',
+                                              tool_dependency_name,
+                                              repository_owner,
+                                              repository_name,
+                                              repository_changeset_revision ) )
+
+
+def parse_package_elem( package_elem, platform_info_dict=None, include_after_install_actions=True ):
+    """
+    Parse a <package> element within a tool dependency definition and return a list of action tuples.
+    This method is called when setting metadata on a repository that includes a tool_dependencies.xml
+    file or when installing a repository that includes a tool_dependencies.xml file.  If installing,
+    platform_info_dict must be a valid dictionary and include_after_install_actions must be True.
+    """
+    # The actions_elem_tuples list contains <actions> tag sets (possibly inside of an <actions_group>
+    # tag set) to be processed in the order they are defined in the tool_dependencies.xml file.
+    actions_elem_tuples = []
+    # The tag sets that will go into the actions_elem_list are those that install a compiled binary if
+    # the architecture and operating system match its defined attributes.  If compiled binary is not
+    # installed, the first <actions> tag set [following those that have the os and architecture attributes]
+    # that does not have os or architecture attributes will be processed.  This tag set must contain the
+    # recipe for downloading and compiling source.
+    actions_elem_list = []
+    for elem in package_elem:
+        if elem.tag == 'actions':
+            # We have an <actions> tag that should not be matched against a specific combination of
+            # architecture and operating system.
+            in_actions_group = False
+            actions_elem_tuples.append( ( in_actions_group, elem ) )
+        elif elem.tag == 'actions_group':
+            # We have an actions_group element, and its child <actions> elements should therefore be compared
+            # with the current operating system
+            # and processor architecture.
+            in_actions_group = True
+            # Record the number of <actions> elements so we can filter out any <action> elements that precede
+            # <actions> elements.
+            actions_elem_count = len( elem.findall( 'actions' ) )
+            # Record the number of <actions> elements that have both architecture and os specified, in order
+            # to filter out any platform-independent <actions> elements that come before platform-specific
+            # <actions> elements.
+            platform_actions_elements = []
+            for actions_elem in elem.findall( 'actions' ):
+                if actions_elem.get( 'architecture' ) is not None and actions_elem.get( 'os' ) is not None:
+                    platform_actions_elements.append( actions_elem )
+            platform_actions_element_count = len( platform_actions_elements )
+            platform_actions_elements_processed = 0
+            actions_elems_processed = 0
+            # The tag sets that will go into the after_install_actions list are <action> tags instead of <actions>
+            # tags.  These will be processed only if they are at the very end of the <actions_group> tag set (after
+            # all <actions> tag sets). See below for details.
+            after_install_actions = []
+            # Inspect the <actions_group> element and build the actions_elem_list and the after_install_actions list.
+            for child_element in elem:
+                if child_element.tag == 'actions':
+                    actions_elems_processed += 1
+                    system = child_element.get( 'os' )
+                    architecture = child_element.get( 'architecture' )
+                    # Skip <actions> tags that have only one of architecture or os specified, in order for the
+                    # count in platform_actions_elements_processed to remain accurate.
+                    if ( system and not architecture ) or ( architecture and not system ):
+                        log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' )
+                        continue
+                    # Since we are inside an <actions_group> tag set, compare it with our current platform information
+                    # and filter the <actions> tag sets that don't match. Require both the os and architecture attributes
+                    # to be defined in order to find a match.
+                    if system and architecture:
+                        platform_actions_elements_processed += 1
+                        # If either the os or architecture do not match the platform, this <actions> tag will not be
+                        # considered a match. Skip it and proceed with checking the next one.
+                        if platform_info_dict:
+                            if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture:
+                                continue
+                        else:
+                            # We must not be installing a repository into Galaxy, so determining if we can install a
+                            # binary is not necessary.
+                            continue
+                    else:
+                        # <actions> tags without both os and architecture attributes are only allowed to be specified
+                        # after platform-specific <actions> tags. If we find a platform-independent <actions> tag before
+                        # all platform-specific <actions> tags have been processed.
+                        if platform_actions_elements_processed < platform_actions_element_count:
+                            debug_msg = 'Error: <actions> tags without os and architecture attributes are only allowed '
+                            debug_msg += 'after all <actions> tags with os and architecture attributes have been defined.  '
+                            debug_msg += 'Skipping the <actions> tag set with no os or architecture attributes that has '
+                            debug_msg += 'been defined between two <actions> tag sets that have these attributes defined.  '
+                            log.debug( debug_msg )
+                            continue
+                    # If we reach this point, it means one of two things: 1) The system and architecture attributes are
+                    # not defined in this <actions> tag, or 2) The system and architecture attributes are defined, and
+                    # they are an exact match for the current platform. Append the child element to the list of elements
+                    # to process.
+                    actions_elem_list.append( child_element )
+                elif child_element.tag == 'action':
+                    # Any <action> tags within an <actions_group> tag set must come after all <actions> tags.
+                    if actions_elems_processed == actions_elem_count:
+                        # If all <actions> elements have been processed, then this <action> element can be appended to the
+                        # list of actions to execute within this group.
+                        after_install_actions.append( child_element )
+                    else:
+                        # If any <actions> elements remain to be processed, then log a message stating that <action>
+                        # elements are not allowed to precede any <actions> elements within an <actions_group> tag set.
+                        debug_msg = 'Error: <action> tags are only allowed at the end of an <actions_group> tag set after '
+                        debug_msg += 'all <actions> tags.  Skipping <%s> element with type %s.' % \
+                            ( child_element.tag, child_element.get( 'type', 'unknown' ) )
+                        log.debug( debug_msg )
+                        continue
+            if platform_info_dict is None and not include_after_install_actions:
+                # We must be setting metadata on a repository.
+                if len( actions_elem_list ) >= 1:
+                    actions_elem_tuples.append( ( in_actions_group, actions_elem_list[ 0 ] ) )
+                else:
+                    # We are processing a recipe that contains only an <actions_group> tag set for installing a binary,
+                    # but does not include an additional recipe for installing and compiling from source.
+                    actions_elem_tuples.append( ( in_actions_group, [] ) )
+            elif platform_info_dict is not None and include_after_install_actions:
+                # We must be installing a repository.
+                if after_install_actions:
+                    actions_elem_list.extend( after_install_actions )
+                actions_elem_tuples.append( ( in_actions_group, actions_elem_list ) )
+        else:
+            # Skip any element that is not <actions> or <actions_group> - this will skip comments, <repository> tags
+            # and <readme> tags.
+            in_actions_group = False
+            continue
+    return actions_elem_tuples
+
+
+def remove_tool_dependency( app, tool_dependency ):
+    """The received tool_dependency must be in an error state."""
+    context = app.install_model.context
+    dependency_install_dir = tool_dependency.installation_directory( app )
+    removed, error_message = remove_tool_dependency_installation_directory( dependency_install_dir )
+    if removed:
+        tool_dependency.status = app.install_model.ToolDependency.installation_status.UNINSTALLED
+        tool_dependency.error_message = None
+        context.add( tool_dependency )
+        context.flush()
+        # Since the received tool_dependency is in an error state, nothing will need to be changed in any
+        # of the in-memory dictionaries in the installed_repository_manager because changing the state from
+        # error to uninstalled requires no in-memory changes..
+    return removed, error_message
+
+
+def remove_tool_dependency_installation_directory( dependency_install_dir ):
+    if os.path.exists( dependency_install_dir ):
+        try:
+            shutil.rmtree( dependency_install_dir )
+            removed = True
+            error_message = ''
+            log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) )
+        except Exception as e:
+            removed = False
+            error_message = "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) )
+            log.warning( error_message )
+    else:
+        removed = True
+        error_message = ''
+    return removed, error_message
+
+
+def set_tool_dependency_attributes( app, tool_dependency, status, error_message=None ):
+    sa_session = app.install_model.context
+    if status == app.install_model.ToolDependency.installation_status.UNINSTALLED:
+        installation_directory = tool_dependency.installation_directory( app )
+        remove_tool_dependency_installation_directory( installation_directory )
+    tool_dependency.error_message = error_message
+    if str( tool_dependency.status ) != str( status ):
+        tool_shed_repository = tool_dependency.tool_shed_repository
+        debug_msg = 'Updating an existing record for version %s of tool dependency %s for revision %s of repository %s ' % \
+            ( str( tool_dependency.version ), str( tool_dependency.name ), str( tool_shed_repository.changeset_revision ), str( tool_shed_repository.name ) )
+        debug_msg += 'by updating the status from %s to %s.' % ( str( tool_dependency.status ), str( status ) )
+        log.debug( debug_msg )
+    tool_dependency.status = status
+    sa_session.add( tool_dependency )
+    sa_session.flush()
+    return tool_dependency
diff --git a/lib/tool_shed/util/tool_util.py b/lib/tool_shed/util/tool_util.py
new file mode 100644
index 0000000..e5e4b2c
--- /dev/null
+++ b/lib/tool_shed/util/tool_util.py
@@ -0,0 +1,279 @@
+import logging
+import os
+import shutil
+
+import galaxy.tools
+from galaxy import util
+from galaxy.util import checkers
+from galaxy.util import unicodify
+from galaxy.util.expressions import ExpressionContext
+from galaxy.web.form_builder import SelectField
+
+from tool_shed.util import basic_util
+
+log = logging.getLogger( __name__ )
+
+
+def build_shed_tool_conf_select_field( app ):
+    """Build a SelectField whose options are the keys in app.toolbox.shed_tool_confs."""
+    options = []
+    for dynamic_tool_conf_filename in app.toolbox.dynamic_conf_filenames():
+        if dynamic_tool_conf_filename.startswith( './' ):
+            option_label = dynamic_tool_conf_filename.replace( './', '', 1 )
+        else:
+            option_label = dynamic_tool_conf_filename
+        options.append( ( option_label, dynamic_tool_conf_filename ) )
+    select_field = SelectField( name='shed_tool_conf' )
+    for option_tup in options:
+        select_field.add_option( option_tup[ 0 ], option_tup[ 1 ] )
+    return select_field
+
+
+def build_tool_panel_section_select_field( app ):
+    """Build a SelectField whose options are the sections of the current in-memory toolbox."""
+    options = []
+    for section_id, section_name in app.toolbox.get_sections():
+        options.append( ( section_name, section_id ) )
+    select_field = SelectField( name='tool_panel_section_id', field_id='tool_panel_section_select' )
+    for option_tup in options:
+        select_field.add_option( option_tup[ 0 ], option_tup[ 1 ] )
+    return select_field
+
+
+def copy_sample_file( app, filename, dest_path=None ):
+    """
+    Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx.  The default value for dest_path
+    is ~/tool-data.
+    """
+    if dest_path is None:
+        dest_path = os.path.abspath( app.config.tool_data_path )
+    sample_file_name = basic_util.strip_path( filename )
+    copied_file = sample_file_name.replace( '.sample', '' )
+    full_source_path = os.path.abspath( filename )
+    full_destination_path = os.path.join( dest_path, sample_file_name )
+    # Don't copy a file to itself - not sure how this happens, but sometimes it does...
+    if full_source_path != full_destination_path:
+        # It's ok to overwrite the .sample version of the file.
+        shutil.copy( full_source_path, full_destination_path )
+    # Only create the .loc file if it does not yet exist.  We don't overwrite it in case it
+    # contains stuff proprietary to the local instance.
+    if not os.path.lexists( os.path.join( dest_path, copied_file ) ):
+        shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
+
+
+def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
+    """
+    Copy all appropriate files to dest_path in the local Galaxy environment that have not
+    already been copied.  Those that have been copied are contained in sample_files_copied.
+    The default value for dest_path is ~/tool-data.  We need to be careful to copy only
+    appropriate files here because tool shed repositories can contain files ending in .sample
+    that should not be copied to the ~/tool-data directory.
+    """
+    filenames_not_to_copy = [ 'tool_data_table_conf.xml.sample' ]
+    sample_files_copied = util.listify( sample_files_copied )
+    for filename in sample_files:
+        filename_sans_path = os.path.split( filename )[ 1 ]
+        if filename_sans_path not in filenames_not_to_copy and filename not in sample_files_copied:
+            if tool_path:
+                filename = os.path.join( tool_path, filename )
+            # Attempt to ensure we're copying an appropriate file.
+            if is_data_index_sample_file( filename ):
+                copy_sample_file( app, filename, dest_path=dest_path )
+
+
+def generate_message_for_invalid_tools( app, invalid_file_tups, repository, metadata_dict, as_html=True,
+                                        displaying_invalid_tool=False ):
+    if as_html:
+        new_line = '<br/>'
+        bold_start = '<b>'
+        bold_end = '</b>'
+    else:
+        new_line = '\n'
+        bold_start = ''
+        bold_end = ''
+    message = ''
+    if app.name == 'galaxy':
+        tip_rev = str( repository.changeset_revision )
+    else:
+        tip_rev = str( repository.tip( app ) )
+    if not displaying_invalid_tool:
+        if metadata_dict:
+            message += "Metadata may have been defined for some items in revision '%s'.  " % tip_rev
+            message += "Correct the following problems if necessary and reset metadata.%s" % new_line
+        else:
+            message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % tip_rev
+            message += "installed into a local Galaxy instance.  Correct the following problems and reset metadata.%s" % new_line
+    for itc_tup in invalid_file_tups:
+        tool_file, exception_msg = itc_tup
+        if exception_msg.find( 'No such file or directory' ) >= 0:
+            exception_items = exception_msg.split()
+            missing_file_items = exception_items[ 7 ].split( '/' )
+            missing_file = missing_file_items[ -1 ].rstrip( '\'' )
+            if missing_file.endswith( '.loc' ):
+                sample_ext = '%s.sample' % missing_file
+            else:
+                sample_ext = missing_file
+            correction_msg = "This file refers to a missing file %s%s%s.  " % \
+                ( bold_start, str( missing_file ), bold_end )
+            correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % \
+                ( bold_start, sample_ext, bold_end )
+        else:
+            if as_html:
+                correction_msg = exception_msg
+            else:
+                correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
+        message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
+    return message
+
+
+def get_headers( fname, sep, count=60, is_multi_byte=False ):
+    """Returns a list with the first 'count' lines split by 'sep'."""
+    headers = []
+    for idx, line in enumerate( open( fname ) ):
+        line = line.rstrip( '\n\r' )
+        if is_multi_byte:
+            line = unicodify( line, 'utf-8' )
+            sep = sep.encode( 'utf-8' )
+        headers.append( line.split( sep ) )
+        if idx == count:
+            break
+    return headers
+
+
+def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
+    for elem in config_elems:
+        if elem.tag == 'tool':
+            if elem.get( 'guid' ) == tool_dict[ 'guid' ]:
+                tool_path = shed_tool_conf_dict[ 'tool_path' ]
+                relative_install_dir = os.path.join( tool_path, partial_install_dir )
+                return tool_path, relative_install_dir
+        elif elem.tag == 'section':
+            for section_elem in elem:
+                if section_elem.tag == 'tool':
+                    if section_elem.get( 'guid' ) == tool_dict[ 'guid' ]:
+                        tool_path = shed_tool_conf_dict[ 'tool_path' ]
+                        relative_install_dir = os.path.join( tool_path, partial_install_dir )
+                        return tool_path, relative_install_dir
+    return None, None
+
+
+def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ):
+    """
+    Inspect each tool to see if it has any input parameters that are dynamically
+    generated select lists that depend on a .loc file.  This method is not called
+    from the tool shed, but from Galaxy when a repository is being installed.
+    """
+    for index, repository_tools_tup in enumerate( repository_tools_tups ):
+        tup_path, guid, repository_tool = repository_tools_tup
+        params_with_missing_index_file = repository_tool.params_with_missing_index_file
+        for param in params_with_missing_index_file:
+            options = param.options
+            missing_file_name = basic_util.strip_path( options.missing_index_file )
+            if missing_file_name not in sample_files_copied:
+                # The repository must contain the required xxx.loc.sample file.
+                for sample_file in sample_files:
+                    sample_file_name = basic_util.strip_path( sample_file )
+                    if sample_file_name == '%s.sample' % missing_file_name:
+                        copy_sample_file( app, sample_file )
+                        if options.tool_data_table and options.tool_data_table.missing_index_file:
+                            options.tool_data_table.handle_found_index_file( options.missing_index_file )
+                        sample_files_copied.append( options.missing_index_file )
+                        break
+        # Reload the tool into the local list of repository_tools_tups.
+        repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid, use_cached=False )
+        repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
+    return repository_tools_tups, sample_files_copied
+
+
+def is_column_based( fname, sep='\t', skip=0, is_multi_byte=False ):
+    """See if the file is column based with respect to a separator."""
+    headers = get_headers( fname, sep, is_multi_byte=is_multi_byte )
+    count = 0
+    if not headers:
+        return False
+    for hdr in headers[ skip: ]:
+        if hdr and hdr[ 0 ] and not hdr[ 0 ].startswith( '#' ):
+            if len( hdr ) > 1:
+                count = len( hdr )
+            break
+    if count < 2:
+        return False
+    for hdr in headers[ skip: ]:
+        if hdr and hdr[ 0 ] and not hdr[ 0 ].startswith( '#' ):
+            if len( hdr ) != count:
+                return False
+    return True
+
+
+def is_data_index_sample_file( file_path ):
+    """
+    Attempt to determine if a .sample file is appropriate for copying to ~/tool-data when
+    a tool shed repository is being installed into a Galaxy instance.
+    """
+    # Currently most data index files are tabular, so check that first.  We'll assume that
+    # if the file is tabular, it's ok to copy.
+    if is_column_based( file_path ):
+        return True
+    # If the file is any of the following, don't copy it.
+    if checkers.check_html( file_path ):
+        return False
+    if checkers.check_image( file_path ):
+        return False
+    if checkers.check_binary( name=file_path ):
+        return False
+    if checkers.is_bz2( file_path ):
+        return False
+    if checkers.is_gzip( file_path ):
+        return False
+    if checkers.check_zip( file_path ):
+        return False
+    # Default to copying the file if none of the above are true.
+    return True
+
+
+def new_state( trans, tool, invalid=False ):
+    """Create a new `DefaultToolState` for the received tool.  Only inputs on the first page will be initialized."""
+    state = galaxy.tools.DefaultToolState()
+    state.inputs = {}
+    if invalid:
+        # We're attempting to display a tool in the tool shed that has been determined to have errors, so is invalid.
+        return state
+    try:
+        # Attempt to generate the tool state using the standard Galaxy-side code
+        return tool.new_state( trans )
+    except Exception as e:
+        # Fall back to building tool state as below
+        log.debug( 'Failed to build tool state for tool "%s" using standard method, will try to fall back on custom method: %s', tool.id, e )
+    inputs = tool.inputs_by_page[ 0 ]
+    context = ExpressionContext( state.inputs, parent=None )
+    for input in inputs.values():
+        try:
+            state.inputs[ input.name ] = input.get_initial_value( trans, context )
+        except:
+            # FIXME: not all values should be an empty list
+            state.inputs[ input.name ] = []
+    return state
+
+
+def panel_entry_per_tool( tool_section_dict ):
+    # Return True if tool_section_dict looks like this.
+    # {<Tool guid> :
+    #    [{ tool_config : <tool_config_file>,
+    #       id: <ToolSection id>,
+    #       version : <ToolSection version>,
+    #       name : <TooSection name>}]}
+    # But not like this.
+    # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+    if not tool_section_dict:
+        return False
+    if len( tool_section_dict ) != 3:
+        return True
+    for k, v in tool_section_dict:
+        if k not in [ 'id', 'version', 'name' ]:
+            return True
+    return False
+
+
+def reload_upload_tools( app ):
+    if hasattr( app, 'toolbox' ):
+        app.toolbox.handle_datatypes_changed()
diff --git a/lib/tool_shed/util/web_util.py b/lib/tool_shed/util/web_util.py
new file mode 100644
index 0000000..1f1c9a6
--- /dev/null
+++ b/lib/tool_shed/util/web_util.py
@@ -0,0 +1,22 @@
+from markupsafe import escape as raw_escape
+from galaxy.util import smart_str
+
+ALLOWED_ELEMENTS = ["<b>", "</b>", "<br/>"]
+ALLOWED_MAP = dict((x, raw_escape(x)) for x in ALLOWED_ELEMENTS)
+
+
+def escape( string ):
+    """ A tool shed variant of markupsafe.escape that allows a select few
+    HTML elements that are repeatedly used in messages created deep
+    in the toolshed components. Ideally abstract things would be produced
+    in these components and messages in the views or client side - this is
+    what should be worked toward - but for now - we have this hack.
+
+    >>> escape("A <b>repo</b>")
+    u'A <b>repo</b>'
+    """
+    escaped = smart_str( raw_escape( string ), encoding="ascii", errors="replace" )
+    # Unescape few selected tags.
+    for key, value in ALLOWED_MAP.items():
+        escaped = escaped.replace(value, key)
+    return escaped
diff --git a/lib/tool_shed/util/workflow_util.py b/lib/tool_shed/util/workflow_util.py
new file mode 100644
index 0000000..d25f563
--- /dev/null
+++ b/lib/tool_shed/util/workflow_util.py
@@ -0,0 +1,416 @@
+""" Tool shed helper methods for dealing with workflows - only two methods are
+utilized outside of this modules - generate_workflow_image and import_workflow.
+"""
+import json
+import logging
+import os
+
+import galaxy.tools
+import galaxy.tools.parameters
+from galaxy.util.sanitize_html import sanitize_html
+from galaxy.workflow.modules import (
+    module_types,
+    ToolModule,
+    WorkflowModuleFactory
+)
+from galaxy.workflow.render import WorkflowCanvas
+from galaxy.workflow.steps import attach_ordered_steps
+from tool_shed.tools import tool_validator
+from tool_shed.util import (
+    encoding_util,
+    metadata_util,
+    repository_util
+)
+
+log = logging.getLogger( __name__ )
+
+
+class RepoToolModule( ToolModule ):
+
+    type = "tool"
+
+    def __init__( self, trans, repository_id, changeset_revision, tools_metadata, tool_id ):
+        self.trans = trans
+        self.tools_metadata = tools_metadata
+        self.tool_id = tool_id
+        self.tool = None
+        self.errors = None
+        self.tv = tool_validator.ToolValidator( trans.app )
+        if trans.webapp.name == 'tool_shed':
+            # We're in the tool shed.
+            for tool_dict in tools_metadata:
+                if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]:
+                    repository, self.tool, message = self.tv.load_tool_from_changeset_revision( repository_id,
+                                                                                                changeset_revision,
+                                                                                                tool_dict[ 'tool_config' ] )
+                    if message and self.tool is None:
+                        self.errors = 'unavailable'
+                    break
+        else:
+            # We're in Galaxy.
+            self.tool = trans.app.toolbox.get_tool( self.tool_id )
+            if self.tool is None:
+                self.errors = 'unavailable'
+        self.post_job_actions = {}
+        self.workflow_outputs = []
+        self.state = None
+
+    @classmethod
+    def from_dict( Class, trans, step_dict, repository_id, changeset_revision, tools_metadata, secure=True ):
+        tool_id = step_dict[ 'tool_id' ]
+        module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
+        module.state = galaxy.tools.DefaultToolState()
+        if module.tool is not None:
+            module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app )
+        module.errors = step_dict.get( "tool_errors", None )
+        return module
+
+    @classmethod
+    def from_workflow_step( Class, trans, step, repository_id, changeset_revision, tools_metadata ):
+        module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id )
+        module.state = galaxy.tools.DefaultToolState()
+        if module.tool:
+            module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
+        else:
+            module.state.inputs = {}
+        module.errors = step.tool_errors
+        return module
+
+    def get_data_inputs( self ):
+        data_inputs = []
+
+        def callback( input, prefixed_name, prefixed_label, **kwargs ):
+            if isinstance( input, galaxy.tools.parameters.basic.DataToolParameter ):
+                data_inputs.append( dict( name=prefixed_name,
+                                          label=prefixed_label,
+                                          extensions=input.extensions ) )
+        if self.tool:
+            try:
+                galaxy.tools.parameters.visit_input_values( self.tool.inputs, self.state.inputs, callback )
+            except:
+                # TODO have this actually use default parameters?  Fix at
+                # refactor, needs to be discussed wrt: reproducibility though.
+                log.exception("Tool parse failed for %s -- this indicates incompatibility of local tool version with expected version by the workflow." % self.tool.id)
+        return data_inputs
+
+    def get_data_outputs( self ):
+        data_outputs = []
+        if self.tool:
+            data_inputs = None
+            for name, tool_output in self.tool.outputs.items():
+                if tool_output.format_source is not None:
+                    # Default to special name "input" which remove restrictions on connections
+                    formats = [ 'input' ]
+                    if data_inputs is None:
+                        data_inputs = self.get_data_inputs()
+                    # Find the input parameter referenced by format_source
+                    for di in data_inputs:
+                        # Input names come prefixed with conditional and repeat names separated by '|',
+                        # so remove prefixes when comparing with format_source.
+                        if di[ 'name' ] is not None and di[ 'name' ].split( '|' )[ -1 ] == tool_output.format_source:
+                            formats = di[ 'extensions' ]
+                else:
+                    formats = [ tool_output.format ]
+                for change_elem in tool_output.change_format:
+                    for when_elem in change_elem.findall( 'when' ):
+                        format = when_elem.get( 'format', None )
+                        if format and format not in formats:
+                            formats.append( format )
+                data_outputs.append( dict( name=name, extensions=formats ) )
+        return data_outputs
+
+
+class RepoWorkflowModuleFactory( WorkflowModuleFactory ):
+
+    def __init__( self, module_types ):
+        self.module_types = module_types
+
+    def from_dict( self, trans, repository_id, changeset_revision, step_dict, tools_metadata, **kwd ):
+        """Return module initialized from the data in dictionary `step_dict`."""
+        type = step_dict[ 'type' ]
+        assert type in self.module_types
+        module_method_kwds = dict( **kwd )
+        if type == "tool":
+            module_method_kwds[ 'repository_id' ] = repository_id
+            module_method_kwds[ 'changeset_revision' ] = changeset_revision
+            module_method_kwds[ 'tools_metadata' ] = tools_metadata
+        return self.module_types[ type ].from_dict( trans, step_dict, **module_method_kwds )
+
+    def from_workflow_step( self, trans, repository_id, changeset_revision, tools_metadata, step ):
+        """Return module initialized from the WorkflowStep object `step`."""
+        type = step.type
+        module_method_kwds = dict( )
+        if type == "tool":
+            module_method_kwds[ 'repository_id' ] = repository_id
+            module_method_kwds[ 'changeset_revision' ] = changeset_revision
+            module_method_kwds[ 'tools_metadata' ] = tools_metadata
+        return self.module_types[ type ].from_workflow_step( trans, step, **module_method_kwds )
+
+
+tool_shed_module_types = module_types.copy()
+tool_shed_module_types[ 'tool' ] = RepoToolModule
+module_factory = RepoWorkflowModuleFactory( tool_shed_module_types )
+
+
+def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
+    """
+    Return an svg image representation of a workflow dictionary created when the workflow was exported.  This method is called
+    from both Galaxy and the tool shed.  When called from the tool shed, repository_metadata_id will have a value and repository_id
+    will be None.  When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
+    """
+    workflow_name = encoding_util.tool_shed_decode( workflow_name )
+    if trans.webapp.name == 'tool_shed':
+        # We're in the tool shed.
+        repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
+        repository_id = trans.security.encode_id( repository_metadata.repository_id )
+        changeset_revision = repository_metadata.changeset_revision
+        metadata = repository_metadata.metadata
+    else:
+        # We're in Galaxy.
+        repository = repository_util.get_tool_shed_repository_by_id( trans.app, repository_id )
+        changeset_revision = repository.changeset_revision
+        metadata = repository.metadata
+    # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+    # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+    for workflow_tup in metadata[ 'workflows' ]:
+        workflow_dict = workflow_tup[1]
+        if workflow_dict[ 'name' ] == workflow_name:
+            break
+    if 'tools' in metadata:
+        tools_metadata = metadata[ 'tools' ]
+    else:
+        tools_metadata = []
+    workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
+                                                          workflow_dict=workflow_dict,
+                                                          tools_metadata=tools_metadata,
+                                                          repository_id=repository_id,
+                                                          changeset_revision=changeset_revision )
+    workflow_canvas = WorkflowCanvas()
+    canvas = workflow_canvas.canvas
+    # Store px width for boxes of each step.
+    for step in workflow.steps:
+        step.upgrade_messages = {}
+        module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
+        tool_errors = module.type == 'tool' and not module.tool
+        module_data_inputs = get_workflow_data_inputs( step, module )
+        module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
+        module_name = get_workflow_module_name( module, missing_tool_tups )
+        workflow_canvas.populate_data_for_step(
+            step,
+            module_name,
+            module_data_inputs,
+            module_data_outputs,
+            tool_errors=tool_errors
+        )
+    workflow_canvas.add_steps( highlight_errors=True )
+    workflow_canvas.finish( )
+    trans.response.set_content_type( "image/svg+xml" )
+    return canvas.tostring()
+
+
+def get_workflow_data_inputs( step, module ):
+    if module.type == 'tool':
+        if module.tool:
+            return module.get_data_inputs()
+        else:
+            data_inputs = []
+            for wfsc in step.input_connections:
+                data_inputs_dict = {}
+                data_inputs_dict[ 'extensions' ] = [ '' ]
+                data_inputs_dict[ 'name' ] = wfsc.input_name
+                data_inputs_dict[ 'label' ] = 'Unknown'
+                data_inputs.append( data_inputs_dict )
+            return data_inputs
+    return module.get_data_inputs()
+
+
+def get_workflow_data_outputs( step, module, steps ):
+    if module.type == 'tool':
+        if module.tool:
+            return module.get_data_outputs()
+        else:
+            data_outputs = []
+            data_outputs_dict = {}
+            data_outputs_dict[ 'extensions' ] = [ 'input' ]
+            found = False
+            for workflow_step in steps:
+                for wfsc in workflow_step.input_connections:
+                    if step.name == wfsc.output_step.name:
+                        data_outputs_dict[ 'name' ] = wfsc.output_name
+                        found = True
+                        break
+                if found:
+                    break
+            if not found:
+                # We're at the last step of the workflow.
+                data_outputs_dict[ 'name' ] = 'output'
+            data_outputs.append( data_outputs_dict )
+            return data_outputs
+    return module.get_data_outputs()
+
+
+def get_workflow_from_dict( trans, workflow_dict, tools_metadata, repository_id, changeset_revision ):
+    """
+    Return an in-memory Workflow object from the dictionary object created when it was exported.  This method is called from
+    both Galaxy and the tool shed to retrieve a Workflow object that can be displayed as an SVG image.  This method is also
+    called from Galaxy to retrieve a Workflow object that can be used for saving to the Galaxy database.
+    """
+    trans.workflow_building_mode = True
+    workflow = trans.model.Workflow()
+    workflow.name = workflow_dict[ 'name' ]
+    workflow.has_errors = False
+    steps = []
+    # Keep ids for each step that we need to use to make connections.
+    steps_by_external_id = {}
+    # Keep track of tools required by the workflow that are not available in
+    # the tool shed repository.  Each tuple in the list of missing_tool_tups
+    # will be ( tool_id, tool_name, tool_version ).
+    missing_tool_tups = []
+    # First pass to build step objects and populate basic values
+    for step_dict in workflow_dict[ 'steps' ].values():
+        # Create the model class for the step
+        step = trans.model.WorkflowStep()
+        step.label = step_dict.get('label', None)
+        step.name = step_dict[ 'name' ]
+        step.position = step_dict[ 'position' ]
+        module = module_factory.from_dict( trans, repository_id, changeset_revision, step_dict, tools_metadata=tools_metadata )
+        if module.type == 'tool' and module.tool is None:
+            # A required tool is not available in the current repository.
+            step.tool_errors = 'unavailable'
+            missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
+            if missing_tool_tup not in missing_tool_tups:
+                missing_tool_tups.append( missing_tool_tup )
+        module.save_to_step( step )
+        if step.tool_errors:
+            workflow.has_errors = True
+        # Stick this in the step temporarily.
+        step.temp_input_connections = step_dict[ 'input_connections' ]
+        if trans.webapp.name == 'galaxy':
+            annotation = step_dict.get( 'annotation', '')
+            if annotation:
+                annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
+                new_step_annotation = trans.model.WorkflowStepAnnotationAssociation()
+                new_step_annotation.annotation = annotation
+                new_step_annotation.user = trans.user
+                step.annotations.append( new_step_annotation )
+        # Unpack and add post-job actions.
+        post_job_actions = step_dict.get( 'post_job_actions', {} )
+        for pja_dict in post_job_actions.values():
+            trans.model.PostJobAction( pja_dict[ 'action_type' ],
+                                       step,
+                                       pja_dict[ 'output_name' ],
+                                       pja_dict[ 'action_arguments' ] )
+        steps.append( step )
+        steps_by_external_id[ step_dict[ 'id' ] ] = step
+    # Second pass to deal with connections between steps.
+    for step in steps:
+        # Input connections.
+        for input_name, conn_dict in step.temp_input_connections.items():
+            if conn_dict:
+                output_step = steps_by_external_id[ conn_dict[ 'id' ] ]
+                conn = trans.model.WorkflowStepConnection()
+                conn.input_step = step
+                conn.input_name = input_name
+                conn.output_step = output_step
+                conn.output_name = conn_dict[ 'output_name' ]
+                step.input_connections.append( conn )
+        del step.temp_input_connections
+    # Order the steps if possible.
+    attach_ordered_steps( workflow, steps )
+    # Return the in-memory Workflow object for display or later persistence to the Galaxy database.
+    return workflow, missing_tool_tups
+
+
+def get_workflow_module_name( module, missing_tool_tups ):
+    module_name = module.get_name()
+    if module.type == 'tool' and module_name == 'unavailable':
+        for missing_tool_tup in missing_tool_tups:
+            missing_tool_id, missing_tool_name, missing_tool_version = missing_tool_tup
+            if missing_tool_id == module.tool_id:
+                module_name = '%s' % missing_tool_name
+                break
+    return module_name
+
+
+def import_workflow( trans, repository, workflow_name ):
+    """Import a workflow contained in an installed tool shed repository into Galaxy (this method is called only from Galaxy)."""
+    status = 'done'
+    message = ''
+    changeset_revision = repository.changeset_revision
+    metadata = repository.metadata
+    workflows = metadata.get( 'workflows', [] )
+    tools_metadata = metadata.get( 'tools', [] )
+    workflow_dict = None
+    for workflow_data_tuple in workflows:
+        # The value of workflow_data_tuple is ( relative_path_to_workflow_file, exported_workflow_dict ).
+        relative_path_to_workflow_file, exported_workflow_dict = workflow_data_tuple
+        if exported_workflow_dict[ 'name' ] == workflow_name:
+            # If the exported workflow is available on disk, import it.
+            if os.path.exists( relative_path_to_workflow_file ):
+                workflow_file = open( relative_path_to_workflow_file, 'rb' )
+                workflow_data = workflow_file.read()
+                workflow_file.close()
+                workflow_dict = json.loads( workflow_data )
+            else:
+                # Use the current exported_workflow_dict.
+                workflow_dict = exported_workflow_dict
+            break
+    if workflow_dict:
+        # Create workflow if possible.
+        workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
+                                                              workflow_dict=workflow_dict,
+                                                              tools_metadata=tools_metadata,
+                                                              repository_id=repository.id,
+                                                              changeset_revision=changeset_revision )
+        # Save the workflow in the Galaxy database.  Pass workflow_dict along to create annotation at this point.
+        stored_workflow = save_workflow( trans, workflow, workflow_dict )
+        # Use the latest version of the saved workflow.
+        workflow = stored_workflow.latest_workflow
+        if workflow_name:
+            workflow.name = workflow_name
+        # Provide user feedback and show workflow list.
+        if workflow.has_errors:
+            message += "Imported, but some steps in this workflow have validation errors. "
+            status = "error"
+        if workflow.has_cycles:
+            message += "Imported, but this workflow contains cycles.  "
+            status = "error"
+        else:
+            message += "Workflow <b>%s</b> imported successfully.  " % workflow.name
+        if missing_tool_tups:
+            name_and_id_str = ''
+            for missing_tool_tup in missing_tool_tups:
+                tool_id, tool_name, other = missing_tool_tup
+                name_and_id_str += 'name: %s, id: %s' % ( str( tool_id ), str( tool_name ) )
+            message += "The following tools required by this workflow are missing from this Galaxy instance: %s.  " % name_and_id_str
+    else:
+        workflow = None
+        message += 'The workflow named %s is not included in the metadata for revision %s of repository %s' % \
+            ( str( workflow_name ), str( changeset_revision ), str( repository.name ) )
+        status = 'error'
+    return workflow, status, message
+
+
+def save_workflow( trans, workflow, workflow_dict=None):
+    """Use the received in-memory Workflow object for saving to the Galaxy database."""
+    stored = trans.model.StoredWorkflow()
+    stored.name = workflow.name
+    workflow.stored_workflow = stored
+    stored.latest_workflow = workflow
+    stored.user = trans.user
+    if workflow_dict and workflow_dict.get('annotation', ''):
+        annotation = sanitize_html( workflow_dict['annotation'], 'utf-8', 'text/html' )
+        new_annotation = trans.model.StoredWorkflowAnnotationAssociation()
+        new_annotation.annotation = annotation
+        new_annotation.user = trans.user
+        stored.annotations.append(new_annotation)
+    trans.sa_session.add( stored )
+    trans.sa_session.flush()
+    # Add a new entry to the Workflows menu.
+    if trans.user.stored_workflow_menu_entries is None:
+        trans.user.stored_workflow_menu_entries = []
+    menuEntry = trans.model.StoredWorkflowMenuEntry()
+    menuEntry.stored_workflow = stored
+    trans.user.stored_workflow_menu_entries.append( menuEntry )
+    trans.sa_session.flush()
+    return stored
diff --git a/lib/tool_shed/util/xml_util.py b/lib/tool_shed/util/xml_util.py
new file mode 100644
index 0000000..0e1e789
--- /dev/null
+++ b/lib/tool_shed/util/xml_util.py
@@ -0,0 +1,154 @@
+import logging
+import sys
+import tempfile
+import xml.etree.ElementTree
+from xml.etree import ElementTree as XmlET
+
+from galaxy.util import listify
+
+log = logging.getLogger( __name__ )
+using_python_27 = sys.version_info[ :2 ] >= ( 2, 7 )
+
+
+class Py26CommentedTreeBuilder( XmlET.XMLTreeBuilder ):
+    # Python 2.6 uses ElementTree 1.2.x.
+
+    def __init__( self, html=0, target=None ):
+        XmlET.XMLTreeBuilder.__init__( self, html, target )
+        self._parser.CommentHandler = self.handle_comment
+
+    def handle_comment( self, data ):
+        self._target.start( XmlET.Comment, {} )
+        self._target.data( data )
+        self._target.end( XmlET.Comment )
+
+
+class Py27CommentedTreeBuilder( XmlET.TreeBuilder ):
+    # Python 2.7 uses ElementTree 1.3.x.
+
+    def comment( self, data ):
+        self.start( XmlET.Comment, {} )
+        self.data( data )
+        self.end( XmlET.Comment )
+
+
+def create_and_write_tmp_file( elems, use_indent=False ):
+    tmp_str = ''
+    for elem in listify( elems ):
+        tmp_str += xml_to_string( elem, use_indent=use_indent )
+    fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cawrf"  )
+    tmp_filename = fh.name
+    fh.close()
+    fh = open( tmp_filename, 'wb' )
+    fh.write( '<?xml version="1.0"?>\n' )
+    fh.write( tmp_str )
+    fh.close()
+    return tmp_filename
+
+
+def create_element( tag, attributes=None, sub_elements=None ):
+    """
+    Create a new element whose tag is the value of the received tag, and whose attributes are all
+    key / value pairs in the received attributes and sub_elements.
+    """
+    if tag:
+        elem = XmlET.Element( tag )
+        if attributes:
+            # The received attributes is an odict to preserve ordering.
+            for k, v in attributes.items():
+                elem.set( k, v )
+        if sub_elements:
+            # The received attributes is an odict.  These handle information that tends to be
+            # long text including paragraphs (e.g., description and long_description.
+            for k, v in sub_elements.items():
+                # Don't include fields that are blank.
+                if v:
+                    if k == 'packages':
+                        # The received sub_elements is an odict whose key is 'packages' and whose
+                        # value is a list of ( name, version ) tuples.
+                        for v_tuple in v:
+                            sub_elem = XmlET.SubElement( elem, 'package' )
+                            sub_elem_name, sub_elem_version = v_tuple
+                            sub_elem.set( 'name', sub_elem_name )
+                            sub_elem.set( 'version', sub_elem_version )
+                    elif isinstance( v, list ):
+                        sub_elem = XmlET.SubElement( elem, k )
+                        # If v is a list, then it must be a list of tuples where the first
+                        # item is the tag and the second item is the text value.
+                        for v_tuple in v:
+                            if len( v_tuple ) == 2:
+                                v_tag = v_tuple[ 0 ]
+                                v_text = v_tuple[ 1 ]
+                                # Don't include fields that are blank.
+                                if v_text:
+                                    v_elem = XmlET.SubElement( sub_elem, v_tag )
+                                    v_elem.text = v_text
+                    else:
+                        sub_elem = XmlET.SubElement( elem, k )
+                        sub_elem.text = v
+        return elem
+    return None
+
+
+def indent( elem, level=0 ):
+    """
+    Prints an XML tree with each node indented according to its depth.  This method is used to print the
+    shed tool config (e.g., shed_tool_conf.xml from the in-memory list of config_elems because each config_elem
+    in the list may be a hierarchical structure that was not created using the parse_xml() method below,
+    and so will not be properly written with xml.etree.ElementTree.tostring() without manually indenting
+    the tree first.
+    """
+    i = "\n" + level * "    "
+    if len( elem ):
+        if not elem.text or not elem.text.strip():
+            elem.text = i + "  "
+        if not elem.tail or not elem.tail.strip():
+            elem.tail = i
+        for child in elem:
+            indent( child, level + 1 )
+        if not child.tail or not child.tail.strip():
+            child.tail = i
+        if not elem.tail or not elem.tail.strip():
+            elem.tail = i
+    else:
+        if level and ( not elem.tail or not elem.tail.strip() ):
+            elem.tail = i
+
+
+def parse_xml( file_name ):
+    """Returns a parsed xml tree with comments intact."""
+    error_message = ''
+    fobj = open( file_name, 'r' )
+    if using_python_27:
+        try:
+            tree = XmlET.parse( fobj, parser=XmlET.XMLParser( target=Py27CommentedTreeBuilder() ) )
+        except Exception as e:
+            fobj.close()
+            error_message = "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) )
+            log.exception( error_message )
+            return None, error_message
+    else:
+        try:
+            tree = XmlET.parse( fobj, parser=Py26CommentedTreeBuilder() )
+        except Exception as e:
+            fobj.close()
+            error_message = "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) )
+            log.exception( error_message )
+            return None, error_message
+    fobj.close()
+    return tree, error_message
+
+
+def xml_to_string( elem, encoding='utf-8', use_indent=False, level=0 ):
+    if elem is not None:
+        if use_indent:
+            # We were called from ToolPanelManager.config_elems_to_xml_file(), so
+            # set the level to 1 since level 0 is the <toolbox> tag set.
+            indent( elem, level=level )
+        if using_python_27:
+            xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+        else:
+            xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+    else:
+        xml_str = ''
+    return xml_str
diff --git a/lib/tool_shed/utility_containers/__init__.py b/lib/tool_shed/utility_containers/__init__.py
new file mode 100644
index 0000000..d467f7d
--- /dev/null
+++ b/lib/tool_shed/utility_containers/__init__.py
@@ -0,0 +1,360 @@
+import logging
+import threading
+
+from galaxy import util
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import readme_util
+
+from . import utility_container_manager
+
+log = logging.getLogger( __name__ )
+
+
+class FailedTest( object ):
+    """Failed tool tests object"""
+
+    def __init__( self, id=None, stderr=None, test_id=None, tool_id=None, tool_version=None, traceback=None ):
+        self.id = id
+        self.stderr = stderr
+        self.test_id = test_id
+        self.tool_id = tool_id
+        self.tool_version = tool_version
+        self.traceback = traceback
+
+
+class InvalidRepositoryDependency( object ):
+    """Invalid repository dependency definition object"""
+
+    def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None,
+                  prior_installation_required=False, only_if_compiling_contained_td=False, error=None ):
+        self.id = id
+        self.toolshed = toolshed
+        self.repository_name = repository_name
+        self.repository_owner = repository_owner
+        self.changeset_revision = changeset_revision
+        self.prior_installation_required = prior_installation_required
+        self.only_if_compiling_contained_td = only_if_compiling_contained_td
+        self.error = error
+
+
+class InvalidToolDependency( object ):
+    """Invalid tool dependency definition object"""
+
+    def __init__( self, id=None, name=None, version=None, type=None, error=None ):
+        self.id = id
+        self.name = name
+        self.version = version
+        self.type = type
+        self.error = error
+
+
+class MissingTestComponent( object ):
+    """Missing tool test components object"""
+
+    def __init__( self, id=None, missing_components=None, tool_guid=None, tool_id=None, tool_version=None ):
+        self.id = id
+        self.missing_components = missing_components
+        self.tool_guid = tool_guid
+        self.tool_id = tool_id
+        self.tool_version = tool_version
+
+
+class NotTested( object ):
+    """NotTested object"""
+
+    def __init__( self, id=None, reason=None ):
+        self.id = id
+        self.reason = reason
+
+
+class PassedTest( object ):
+    """Passed tool tests object"""
+
+    def __init__( self, id=None, test_id=None, tool_id=None, tool_version=None ):
+        self.id = id
+        self.test_id = test_id
+        self.tool_id = tool_id
+        self.tool_version = tool_version
+
+
+class RepositoryInstallationError( object ):
+    """Repository installation error object"""
+
+    def __init__( self, id=None, tool_shed=None, name=None, owner=None, changeset_revision=None, error_message=None ):
+        self.id = id
+        self.tool_shed = tool_shed
+        self.name = name
+        self.owner = owner
+        self.changeset_revision = changeset_revision
+        self.error_message = error_message
+
+
+class RepositorySuccessfulInstallation( object ):
+    """Repository installation object"""
+
+    def __init__( self, id=None, tool_shed=None, name=None, owner=None, changeset_revision=None ):
+        self.id = id
+        self.tool_shed = tool_shed
+        self.name = name
+        self.owner = owner
+        self.changeset_revision = changeset_revision
+
+
+class ToolDependencyInstallationError( object ):
+    """Tool dependency installation error object"""
+
+    def __init__( self, id=None, type=None, name=None, version=None, error_message=None ):
+        self.id = id
+        self.type = type
+        self.name = name
+        self.version = version
+        self.error_message = error_message
+
+
+class ToolDependencySuccessfulInstallation( object ):
+    """Tool dependency installation object"""
+
+    def __init__( self, id=None, type=None, name=None, version=None, installation_directory=None ):
+        self.id = id
+        self.type = type
+        self.name = name
+        self.version = version
+        self.installation_directory = installation_directory
+
+
+class ToolShedUtilityContainerManager( utility_container_manager.UtilityContainerManager ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def build_invalid_repository_dependencies_root_folder( self, folder_id, invalid_repository_dependencies_dict ):
+        """Return a folder hierarchy containing invalid repository dependencies."""
+        label = 'Invalid repository dependencies'
+        if invalid_repository_dependencies_dict:
+            invalid_repository_dependency_id = 0
+            folder_id += 1
+            invalid_repository_dependencies_root_folder = \
+                utility_container_manager.Folder( id=folder_id,
+                                                  key='root',
+                                                  label='root',
+                                                  parent=None )
+            folder_id += 1
+            invalid_repository_dependencies_folder = \
+                utility_container_manager.Folder( id=folder_id,
+                                                  key='invalid_repository_dependencies',
+                                                  label=label,
+                                                  parent=invalid_repository_dependencies_root_folder )
+            invalid_repository_dependencies_root_folder.folders.append( invalid_repository_dependencies_folder )
+            invalid_repository_dependencies = invalid_repository_dependencies_dict[ 'repository_dependencies' ]
+            for invalid_repository_dependency in invalid_repository_dependencies:
+                folder_id += 1
+                invalid_repository_dependency_id += 1
+                toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error = \
+                    common_util.parse_repository_dependency_tuple( invalid_repository_dependency, contains_error=True )
+                key = container_util.generate_repository_dependencies_key_for_repository( toolshed,
+                                                                                          name,
+                                                                                          owner,
+                                                                                          changeset_revision,
+                                                                                          prior_installation_required,
+                                                                                          only_if_compiling_contained_td )
+                label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( name, changeset_revision, owner )
+                folder = utility_container_manager.Folder( id=folder_id,
+                                                           key=key,
+                                                           label=label,
+                                                           parent=invalid_repository_dependencies_folder )
+                ird = InvalidRepositoryDependency( id=invalid_repository_dependency_id,
+                                                   toolshed=toolshed,
+                                                   repository_name=name,
+                                                   repository_owner=owner,
+                                                   changeset_revision=changeset_revision,
+                                                   prior_installation_required=util.asbool( prior_installation_required ),
+                                                   only_if_compiling_contained_td=util.asbool( only_if_compiling_contained_td ),
+                                                   error=error )
+                folder.invalid_repository_dependencies.append( ird )
+                invalid_repository_dependencies_folder.folders.append( folder )
+        else:
+            invalid_repository_dependencies_root_folder = None
+        return folder_id, invalid_repository_dependencies_root_folder
+
+    def build_invalid_tool_dependencies_root_folder( self, folder_id, invalid_tool_dependencies_dict ):
+        """Return a folder hierarchy containing invalid tool dependencies."""
+        # # INvalid tool dependencies are always packages like:
+        # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1" "error" : "some sting" }
+        label = 'Invalid tool dependencies'
+        if invalid_tool_dependencies_dict:
+            invalid_tool_dependency_id = 0
+            folder_id += 1
+            invalid_tool_dependencies_root_folder = \
+                utility_container_manager.Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            invalid_tool_dependencies_folder = \
+                utility_container_manager.Folder( id=folder_id,
+                                                  key='invalid_tool_dependencies',
+                                                  label=label,
+                                                  parent=invalid_tool_dependencies_root_folder )
+            invalid_tool_dependencies_root_folder.folders.append( invalid_tool_dependencies_folder )
+            for td_key, requirements_dict in invalid_tool_dependencies_dict.items():
+                folder_id += 1
+                invalid_tool_dependency_id += 1
+                try:
+                    name = requirements_dict[ 'name' ]
+                    type = requirements_dict[ 'type' ]
+                    version = requirements_dict[ 'version' ]
+                    error = requirements_dict[ 'error' ]
+                except Exception as e:
+                    name = 'unknown'
+                    type = 'unknown'
+                    version = 'unknown'
+                    error = str( e )
+                key = self.generate_tool_dependencies_key( name, version, type )
+                label = "Version <b>%s</b> of the <b>%s</b> <b>%s</b>" % ( version, name, type )
+                folder = utility_container_manager.Folder( id=folder_id,
+                                                           key=key,
+                                                           label=label,
+                                                           parent=invalid_tool_dependencies_folder )
+                itd = InvalidToolDependency( id=invalid_tool_dependency_id,
+                                             name=name,
+                                             version=version,
+                                             type=type,
+                                             error=error )
+                folder.invalid_tool_dependencies.append( itd )
+                invalid_tool_dependencies_folder.folders.append( folder )
+        else:
+            invalid_tool_dependencies_root_folder = None
+        return folder_id, invalid_tool_dependencies_root_folder
+
+    def build_repository_containers( self, repository, changeset_revision, repository_dependencies, repository_metadata,
+                                     exclude=None ):
+        """
+        Return a dictionary of containers for the received repository's dependencies and
+        contents for display in the Tool Shed.
+        """
+        if exclude is None:
+            exclude = []
+        containers_dict = dict( datatypes=None,
+                                invalid_tools=None,
+                                readme_files=None,
+                                repository_dependencies=None,
+                                tool_dependencies=None,
+                                valid_tools=None,
+                                workflows=None,
+                                valid_data_managers=None
+                                )
+        if repository_metadata:
+            metadata = repository_metadata.metadata
+            lock = threading.Lock()
+            lock.acquire( True )
+            try:
+                folder_id = 0
+                # Datatypes container.
+                if metadata:
+                    if 'datatypes' not in exclude and 'datatypes' in metadata:
+                        datatypes = metadata[ 'datatypes' ]
+                        folder_id, datatypes_root_folder = self.build_datatypes_folder( folder_id, datatypes )
+                        containers_dict[ 'datatypes' ] = datatypes_root_folder
+                # Invalid repository dependencies container.
+                if metadata:
+                    if 'invalid_repository_dependencies' not in exclude and 'invalid_repository_dependencies' in metadata:
+                        invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ]
+                        folder_id, invalid_repository_dependencies_root_folder = \
+                            self.build_invalid_repository_dependencies_root_folder( folder_id,
+                                                                                    invalid_repository_dependencies )
+                        containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder
+                # Invalid tool dependencies container.
+                if metadata:
+                    if 'invalid_tool_dependencies' not in exclude and 'invalid_tool_dependencies' in metadata:
+                        invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ]
+                        folder_id, invalid_tool_dependencies_root_folder = \
+                            self.build_invalid_tool_dependencies_root_folder( folder_id,
+                                                                              invalid_tool_dependencies )
+                        containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder
+                # Invalid tools container.
+                if metadata:
+                    if 'invalid_tools' not in exclude and 'invalid_tools' in metadata:
+                        invalid_tool_configs = metadata[ 'invalid_tools' ]
+                        folder_id, invalid_tools_root_folder = \
+                            self.build_invalid_tools_folder( folder_id,
+                                                             invalid_tool_configs,
+                                                             changeset_revision,
+                                                             repository=repository,
+                                                             label='Invalid tools' )
+                        containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
+                # Readme files container.
+                if metadata:
+                    if 'readme_files' not in exclude and 'readme_files' in metadata:
+                        readme_files_dict = readme_util.build_readme_files_dict( self.app, repository, changeset_revision, metadata )
+                        folder_id, readme_files_root_folder = self.build_readme_files_folder( folder_id, readme_files_dict )
+                        containers_dict[ 'readme_files' ] = readme_files_root_folder
+                if 'repository_dependencies' not in exclude:
+                    # Repository dependencies container.
+                    folder_id, repository_dependencies_root_folder = \
+                        self.build_repository_dependencies_folder( folder_id=folder_id,
+                                                                   repository_dependencies=repository_dependencies,
+                                                                   label='Repository dependencies',
+                                                                   installed=False )
+                    if repository_dependencies_root_folder:
+                        containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+                # Tool dependencies container.
+                if metadata:
+                    if 'tool_dependencies' not in exclude and 'tool_dependencies' in metadata:
+                        tool_dependencies = metadata[ 'tool_dependencies' ]
+                        if 'orphan_tool_dependencies' in metadata:
+                            # The use of the orphan_tool_dependencies category in metadata has been deprecated,
+                            # but we still need to check in case the metadata is out of date.
+                            orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
+                            tool_dependencies.update( orphan_tool_dependencies )
+                        # Tool dependencies can be categorized as orphans only if the repository contains tools.
+                        if 'tools' not in exclude:
+                            tools = metadata.get( 'tools', [] )
+                            tools.extend( metadata.get( 'invalid_tools', [] ) )
+                        folder_id, tool_dependencies_root_folder = \
+                            self.build_tool_dependencies_folder( folder_id,
+                                                                 tool_dependencies,
+                                                                 missing=False,
+                                                                 new_install=False )
+                        containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+                # Valid tools container.
+                if metadata:
+                    if 'tools' not in exclude and 'tools' in metadata:
+                        valid_tools = metadata[ 'tools' ]
+                        folder_id, valid_tools_root_folder = self.build_tools_folder( folder_id,
+                                                                                      valid_tools,
+                                                                                      repository,
+                                                                                      changeset_revision,
+                                                                                      label='Valid tools' )
+                        containers_dict[ 'valid_tools' ] = valid_tools_root_folder
+                # Workflows container.
+                if metadata:
+                    if 'workflows' not in exclude and 'workflows' in metadata:
+                        workflows = metadata[ 'workflows' ]
+                        folder_id, workflows_root_folder = \
+                            self.build_workflows_folder( folder_id=folder_id,
+                                                         workflows=workflows,
+                                                         repository_metadata_id=repository_metadata.id,
+                                                         repository_id=None,
+                                                         label='Workflows' )
+                        containers_dict[ 'workflows' ] = workflows_root_folder
+                # Valid Data Managers container
+                if metadata:
+                    if 'data_manager' not in exclude and 'data_manager' in metadata:
+                        data_managers = metadata['data_manager'].get( 'data_managers', None )
+                        folder_id, data_managers_root_folder = \
+                            self.build_data_managers_folder( folder_id, data_managers, label="Data Managers" )
+                        containers_dict[ 'valid_data_managers' ] = data_managers_root_folder
+                        error_messages = metadata['data_manager'].get( 'error_messages', None )
+                        data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
+                        folder_id, data_managers_root_folder = \
+                            self.build_invalid_data_managers_folder( folder_id,
+                                                                     data_managers,
+                                                                     error_messages,
+                                                                     label="Invalid Data Managers" )
+                        containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder
+            except Exception as e:
+                log.exception( "Exception in build_repository_containers: %s" % str( e ) )
+            finally:
+                lock.release()
+        return containers_dict
+
+    def generate_tool_dependencies_key( self, name, version, type ):
+        return '%s%s%s%s%s' % ( str( name ), container_util.STRSEP, str( version ), container_util.STRSEP, str( type ) )
diff --git a/lib/tool_shed/utility_containers/utility_container_manager.py b/lib/tool_shed/utility_containers/utility_container_manager.py
new file mode 100644
index 0000000..0000b97
--- /dev/null
+++ b/lib/tool_shed/utility_containers/utility_container_manager.py
@@ -0,0 +1,860 @@
+import logging
+
+from galaxy import util
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import repository_util
+
+log = logging.getLogger( __name__ )
+
+
+class Folder( object ):
+    """Container object."""
+
+    def __init__( self, id=None, key=None, label=None, parent=None ):
+        self.id = id
+        self.key = key
+        self.label = label
+        self.parent = parent
+        self.current_repository_installation_errors = []
+        self.current_repository_successful_installations = []
+        self.description = None
+        self.datatypes = []
+        self.folders = []
+        self.invalid_data_managers = []
+        self.invalid_repository_dependencies = []
+        self.invalid_tool_dependencies = []
+        self.invalid_tools = []
+        self.missing_test_components = []
+        self.readme_files = []
+        self.repository_dependencies = []
+        self.repository_installation_errors = []
+        self.repository_successful_installations = []
+        self.test_environments = []
+        self.tool_dependencies = []
+        self.valid_tools = []
+        self.valid_data_managers = []
+        self.workflows = []
+
+    def contains_folder( self, folder ):
+        for index, contained_folder in enumerate( self.folders ):
+            if folder == contained_folder:
+                return index, contained_folder
+        return 0, None
+
+    def contains_repository_dependency( self, repository_dependency ):
+        listified_repository_dependency = repository_dependency.listify
+        for contained_repository_dependency in self.repository_dependencies:
+            if contained_repository_dependency.listify == listified_repository_dependency:
+                return True
+        return False
+
+    def remove_repository_dependency( self, repository_dependency ):
+        listified_repository_dependency = repository_dependency.listify
+        for contained_repository_dependency in self.repository_dependencies:
+            if contained_repository_dependency.listify == listified_repository_dependency:
+                self.repository_dependencies.remove( contained_repository_dependency )
+
+
+class DataManager( object ):
+    """Data Manager object"""
+
+    def __init__( self, id=None, name=None, version=None, data_tables=None ):
+        self.id = id
+        self.name = name
+        self.version = version
+        self.data_tables = data_tables
+
+
+class Datatype( object ):
+    """Datatype object"""
+
+    def __init__( self, id=None, extension=None, type=None, mimetype=None, subclass=None, converters=None, display_app_containers=None ):
+        self.id = id
+        self.extension = extension
+        self.type = type
+        self.mimetype = mimetype
+        self.subclass = subclass
+        self.converters = converters
+        self.display_app_containers = display_app_containers
+
+
+class InvalidDataManager( object ):
+    """Invalid data Manager object"""
+
+    def __init__( self, id=None, index=None, error=None ):
+        self.id = id
+        self.index = index
+        self.error = error
+
+
+class InvalidTool( object ):
+    """Invalid tool object"""
+
+    def __init__( self, id=None, tool_config=None, repository_id=None, changeset_revision=None, repository_installation_status=None ):
+        self.id = id
+        self.tool_config = tool_config
+        self.repository_id = repository_id
+        self.changeset_revision = changeset_revision
+        self.repository_installation_status = repository_installation_status
+
+
+class ReadMe( object ):
+    """Readme text object"""
+
+    def __init__( self, id=None, name=None, text=None ):
+        self.id = id
+        self.name = name
+        self.text = text
+
+
+class RepositoryDependency( object ):
+    """Repository dependency object"""
+
+    def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, prior_installation_required=False,
+                  only_if_compiling_contained_td=False, installation_status=None, tool_shed_repository_id=None ):
+        self.id = id
+        self.toolshed = toolshed
+        self.repository_name = repository_name
+        self.repository_owner = repository_owner
+        self.changeset_revision = changeset_revision
+        self.prior_installation_required = prior_installation_required
+        self.only_if_compiling_contained_td = only_if_compiling_contained_td
+        self.installation_status = installation_status
+        self.tool_shed_repository_id = tool_shed_repository_id
+
+    @property
+    def listify( self ):
+        return [ self.toolshed,
+                self.repository_name,
+                self.repository_owner,
+                self.changeset_revision,
+                self.prior_installation_required,
+                self.only_if_compiling_contained_td ]
+
+
+class Tool( object ):
+    """Tool object"""
+
+    def __init__( self, id=None, tool_config=None, tool_id=None, name=None, description=None, version=None, requirements=None,
+                  repository_id=None, changeset_revision=None, repository_installation_status=None ):
+        self.id = id
+        self.tool_config = tool_config
+        self.tool_id = tool_id
+        self.name = name
+        self.description = description
+        self.version = version
+        self.requirements = requirements
+        self.repository_id = repository_id
+        self.changeset_revision = changeset_revision
+        self.repository_installation_status = repository_installation_status
+
+
+class ToolDependency( object ):
+    """Tool dependency object"""
+
+    def __init__( self, id=None, name=None, version=None, type=None, readme=None, installation_status=None, repository_id=None,
+                  tool_dependency_id=None ):
+        self.id = id
+        self.name = name
+        self.version = version
+        self.type = type
+        self.readme = readme
+        self.installation_status = installation_status
+        self.repository_id = repository_id
+        self.tool_dependency_id = tool_dependency_id
+
+    @property
+    def listify( self ):
+        return [ self.name, self.version, self.type ]
+
+
+class Workflow( object ):
+    """Workflow object."""
+
+    def __init__( self, id=None, workflow_name=None, steps=None, format_version=None, annotation=None,
+                  repository_metadata_id=None, repository_id=None ):
+        """
+        When rendered in the tool shed, repository_metadata_id will have a value and repository_id will
+        be None.  When rendered in Galaxy, repository_id will have a value and repository_metadata_id will
+        be None.
+        """
+        self.id = id
+        self.workflow_name = workflow_name
+        self.steps = steps
+        self.format_version = format_version
+        self.annotation = annotation
+        self.repository_metadata_id = repository_metadata_id
+        self.repository_id = repository_id
+
+
+class UtilityContainerManager( object ):
+
+    def __init__( self, app ):
+        self.app = app
+
+    def build_data_managers_folder( self, folder_id, data_managers, label=None ):
+        """Return a folder hierarchy containing Data Managers."""
+        if data_managers:
+            if label is None:
+                label = "Data Managers"
+            data_manager_id = 0
+            folder_id += 1
+            data_managers_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            key = "valid_data_managers"
+            folder = Folder( id=folder_id, key=key, label=label, parent=data_managers_root_folder )
+            data_managers_root_folder.folders.append( folder )
+            # Insert a header row.
+            data_manager_id += 1
+            data_manager = DataManager( id=data_manager_id,
+                                        name='Name',
+                                        version='Version',
+                                        data_tables='Data Tables' )
+            folder.valid_data_managers.append( data_manager )
+            for data_manager_dict in data_managers.values():
+                data_manager_id += 1
+                try:
+                    name = data_manager_dict.get( 'name', '' )
+                    version = data_manager_dict.get( 'version', '' )
+                    data_tables = ", ".join( data_manager_dict.get( 'data_tables', '' ) )
+                except Exception as e:
+                    name = str( e )
+                    version = 'unknown'
+                    data_tables = 'unknown'
+                data_manager = DataManager( id=data_manager_id,
+                                            name=name,
+                                            version=version,
+                                            data_tables=data_tables )
+                folder.valid_data_managers.append( data_manager )
+        else:
+            data_managers_root_folder = None
+        return folder_id, data_managers_root_folder
+
+    def build_datatypes_folder( self, folder_id, datatypes, label='Datatypes' ):
+        """Return a folder hierarchy containing datatypes."""
+        if datatypes:
+            datatype_id = 0
+            folder_id += 1
+            datatypes_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            folder = Folder( id=folder_id, key='datatypes', label=label, parent=datatypes_root_folder )
+            datatypes_root_folder.folders.append( folder )
+            # Insert a header row.
+            datatype_id += 1
+            datatype = Datatype( id=datatype_id,
+                                 extension='extension',
+                                 type='type',
+                                 mimetype='mimetype',
+                                 subclass='subclass' )
+            folder.datatypes.append( datatype )
+            for datatypes_dict in datatypes:
+                # {"converters":
+                #    [{"target_datatype": "gff",
+                #      "tool_config": "bed_to_gff_converter.xml",
+                #      "guid": "localhost:9009/repos/test/bed_to_gff_converter/CONVERTER_bed_to_gff_0/2.0.0"}],
+                # "display_in_upload": "true",
+                # "dtype": "galaxy.datatypes.interval:Bed",
+                # "extension": "bed"}
+                # TODO: converters and display_app information is not currently rendered.  Should it be?
+                # Handle defined converters, if any.
+                converters = datatypes_dict.get( 'converters', None )
+                if converters:
+                    num_converters = len( converters )
+                else:
+                    num_converters = 0
+                # Handle defined display applications, if any.
+                display_app_containers = datatypes_dict.get( 'display_app_containers', None )
+                if display_app_containers:
+                    num_display_app_containers = len( display_app_containers )
+                else:
+                    num_display_app_containers = 0
+                datatype_id += 1
+                try:
+                    extension = datatypes_dict.get( 'extension', '' )
+                    type = datatypes_dict.get( 'dtype', '' )
+                    mimetype = datatypes_dict.get( 'mimetype', '' )
+                    subclass = datatypes_dict.get( 'subclass', '' )
+                    converters = num_converters
+                    display_app_containers = num_display_app_containers
+                except Exception as e:
+                    extension = str( e )
+                    type = 'unknown'
+                    mimetype = 'unknown'
+                    subclass = 'unknown'
+                    converters = 'unknown'
+                    display_app_containers = 'unknown'
+                datatype = Datatype( id=datatype_id,
+                                     extension=extension,
+                                     type=type,
+                                     mimetype=mimetype,
+                                     subclass=subclass,
+                                     converters=converters,
+                                     display_app_containers=display_app_containers )
+                folder.datatypes.append( datatype )
+        else:
+            datatypes_root_folder = None
+        return folder_id, datatypes_root_folder
+
+    def build_invalid_data_managers_folder( self, folder_id, data_managers, error_messages=None, label=None ):
+        """Return a folder hierarchy containing invalid Data Managers."""
+        if data_managers or error_messages:
+            if label is None:
+                label = "Invalid Data Managers"
+            data_manager_id = 0
+            folder_id += 1
+            data_managers_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            key = "invalid_data_managers"
+            folder = Folder( id=folder_id, key=key, label=label, parent=data_managers_root_folder )
+            data_managers_root_folder.folders.append( folder )
+            # Insert a header row.
+            data_manager_id += 1
+            data_manager = InvalidDataManager( id=data_manager_id,
+                                               index='Element Index',
+                                               error='Error' )
+            folder.invalid_data_managers.append( data_manager )
+            if error_messages:
+                for error_message in error_messages:
+                    data_manager_id += 1
+                    data_manager = InvalidDataManager( id=data_manager_id,
+                                                       index=0,
+                                                       error=error_message )
+                    folder.invalid_data_managers.append( data_manager )
+            for data_manager_dict in data_managers:
+                data_manager_id += 1
+                data_manager = InvalidDataManager( id=data_manager_id,
+                                                   index=data_manager_dict.get( 'index', 0 ) + 1,
+                                                   error=data_manager_dict.get( 'error_message', '' ) )
+                folder.invalid_data_managers.append( data_manager )
+        else:
+            data_managers_root_folder = None
+        return folder_id, data_managers_root_folder
+
+    def build_invalid_tools_folder( self, folder_id, invalid_tool_configs, changeset_revision, repository=None,
+                                    label='Invalid tools' ):
+        """Return a folder hierarchy containing invalid tools."""
+        # TODO: Should we display invalid tools on the tool panel selection page when installing the
+        # repository into Galaxy?
+        if invalid_tool_configs:
+            invalid_tool_id = 0
+            folder_id += 1
+            invalid_tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            folder = Folder( id=folder_id, key='invalid_tools', label=label, parent=invalid_tools_root_folder )
+            invalid_tools_root_folder.folders.append( folder )
+            for invalid_tool_config in invalid_tool_configs:
+                invalid_tool_id += 1
+                if repository:
+                    repository_id = repository.id
+                    if self.app.name == 'galaxy':
+                        repository_installation_status = repository.status
+                    else:
+                        repository_installation_status = None
+                else:
+                    repository_id = None
+                    repository_installation_status = None
+                invalid_tool = InvalidTool( id=invalid_tool_id,
+                                            tool_config=invalid_tool_config,
+                                            repository_id=repository_id,
+                                            changeset_revision=changeset_revision,
+                                            repository_installation_status=repository_installation_status )
+                folder.invalid_tools.append( invalid_tool )
+        else:
+            invalid_tools_root_folder = None
+        return folder_id, invalid_tools_root_folder
+
+    def build_readme_files_folder( self, folder_id, readme_files_dict, label='Readme files' ):
+        """Return a folder hierarchy containing readme text files."""
+        if readme_files_dict:
+            readme_id = 0
+            folder_id += 1
+            readme_files_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            readme_files_folder = Folder( id=folder_id, key='readme_files', label=label, parent=readme_files_root_folder )
+            multiple_readme_files = len( readme_files_dict ) > 1
+            readme_files_root_folder.folders.append( readme_files_folder )
+            for readme_file_name, readme_file_text in readme_files_dict.items():
+                readme_id += 1
+                readme = ReadMe( id=readme_id, name=readme_file_name, text=readme_file_text )
+                if multiple_readme_files:
+                    folder_id += 1
+                    folder = Folder( id=folder_id, key=readme_file_name, label=readme_file_name, parent=readme_files_folder )
+                    folder.readme_files.append( readme )
+                    readme_files_folder.folders.append( folder )
+                else:
+                    readme_files_folder.readme_files.append( readme )
+        else:
+            readme_files_root_folder = None
+        return folder_id, readme_files_root_folder
+
+    def build_repository_dependencies_folder( self, folder_id, repository_dependencies, label='Repository dependencies',
+                                              installed=False ):
+        """Return a folder hierarchy containing repository dependencies."""
+        if repository_dependencies:
+            repository_dependency_id = 0
+            folder_id += 1
+            # Create the root folder.
+            repository_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            # Create the Repository dependencies folder and add it to the root folder.
+            repository_dependencies_folder_key = repository_dependencies[ 'root_key' ]
+            repository_dependencies_folder = Folder( id=folder_id,
+                                                     key=repository_dependencies_folder_key,
+                                                     label=label,
+                                                     parent=repository_dependencies_root_folder )
+            del repository_dependencies[ 'root_key' ]
+            # The received repository_dependencies is a dictionary with keys: 'root_key', 'description', and one or more
+            # repository_dependency keys.  We want the description value associated with the repository_dependencies_folder.
+            repository_dependencies_folder.description = repository_dependencies.get( 'description', None )
+            repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
+            del repository_dependencies[ 'description' ]
+            repository_dependencies_folder, folder_id, repository_dependency_id = \
+                self.populate_repository_dependencies_container( repository_dependencies_folder,
+                                                                 repository_dependencies,
+                                                                 folder_id,
+                                                                 repository_dependency_id )
+            repository_dependencies_folder = self.prune_repository_dependencies( repository_dependencies_folder )
+        else:
+            repository_dependencies_root_folder = None
+        return folder_id, repository_dependencies_root_folder
+
+    def build_tools_folder( self, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
+        """Return a folder hierarchy containing valid tools."""
+        if tool_dicts:
+            container_object_tool_id = 0
+            folder_id += 1
+            tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            folder = Folder( id=folder_id, key='tools', label=label, parent=tools_root_folder )
+            if self.app.name == 'galaxy':
+                folder.description = 'click the name to inspect the tool metadata'
+            tools_root_folder.folders.append( folder )
+            # Insert a header row.
+            container_object_tool_id += 1
+            tool = Tool( id=container_object_tool_id,
+                         tool_config='',
+                         tool_id='',
+                         name='Name',
+                         description='Description',
+                         version='Version',
+                         requirements='',
+                         repository_id='',
+                         changeset_revision='' )
+            folder.valid_tools.append( tool )
+            if repository:
+                repository_id = repository.id
+                if self.app.name == 'galaxy':
+                    repository_installation_status = repository.status
+                else:
+                    repository_installation_status = None
+            else:
+                repository_id = None
+                repository_installation_status = None
+            for tool_dict in tool_dicts:
+                if not isinstance( tool_dict, dict ):
+                    # Due to some previous bug (hopefully not current) invalid tool strings may be included in the received
+                    # list of tool_dicts.  For example, the picard repository metadata has 2 invalid tools in the received
+                    # list of supposedly valid tools:  'rgPicardASMetrics.xml', 'rgPicardGCBiasMetrics.xml'.
+                    continue
+                container_object_tool_id += 1
+                requirements = tool_dict.get( 'requirements', None )
+                if requirements is not None:
+                    # 'requirements': [{'version': '1.56.0', 'type': 'package', 'name': 'picard'}],
+                    requirements_str = ''
+                    for requirement_dict in requirements:
+                        try:
+                            requirement_name = str( requirement_dict.get( 'name', 'unknown' ) )
+                            requirement_type = str( requirement_dict.get( 'type', 'unknown' ) )
+                        except Exception as e:
+                            requirement_name = str( e )
+                            requirement_type = 'unknown'
+                        requirements_str += '%s (%s), ' % ( requirement_name, requirement_type )
+                    requirements_str = requirements_str.rstrip( ', ' )
+                else:
+                    requirements_str = 'none'
+                try:
+                    tool_config = str( tool_dict.get( 'tool_config', 'missing' ) )
+                    tool_id = str( tool_dict.get( 'id', 'unknown' ) )
+                    name = str( tool_dict.get( 'name', 'unknown' ) )
+                    description = str( tool_dict.get( 'description', '' ) )
+                    version = str( tool_dict.get( 'version', 'unknown' ) )
+                except Exception as e:
+                    tool_config = str( e )
+                    tool_id = 'unknown'
+                    name = 'unknown'
+                    description = ''
+                    version = 'unknown'
+                tool = Tool( id=container_object_tool_id,
+                             tool_config=tool_config,
+                             tool_id=tool_id,
+                             name=name,
+                             description=description,
+                             version=version,
+                             requirements=requirements_str,
+                             repository_id=repository_id,
+                             changeset_revision=changeset_revision,
+                             repository_installation_status=repository_installation_status )
+                folder.valid_tools.append( tool )
+        else:
+            tools_root_folder = None
+        return folder_id, tools_root_folder
+
+    def build_tool_dependencies_folder( self, folder_id, tool_dependencies, label='Tool dependencies', missing=False,
+                                        new_install=False, reinstalling=False ):
+        """Return a folder hierarchy containing tool dependencies."""
+        # When we're in Galaxy (not the tool shed) and the tool dependencies are not installed or are in an error state,
+        # they are considered missing.  The tool dependency status will be displayed only if a record exists for the tool
+        # dependency in the Galaxy database, but the tool dependency is not installed.  The value for new_install will be
+        # True only if the associated repository in being installed for the first time.  This value is used in setting the
+        # container description.
+        if tool_dependencies:
+            tool_dependency_id = 0
+            folder_id += 1
+            tool_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
+            if self.app.name == 'galaxy':
+                if new_install or reinstalling:
+                    folder.description = "repository tools require handling of these dependencies"
+                elif missing and not new_install and not reinstalling:
+                    folder.description = 'click the name to install the missing dependency'
+                else:
+                    folder.description = 'click the name to browse the dependency installation directory'
+            tool_dependencies_root_folder.folders.append( folder )
+            # Insert a header row.
+            tool_dependency_id += 1
+            if self.app.name == 'galaxy':
+                tool_dependency = ToolDependency( id=tool_dependency_id,
+                                                  name='Name',
+                                                  version='Version',
+                                                  type='Type',
+                                                  readme=None,
+                                                  installation_status='Installation status',
+                                                  repository_id=None,
+                                                  tool_dependency_id=None )
+            else:
+                tool_dependency = ToolDependency( id=tool_dependency_id,
+                                                  name='Name',
+                                                  version='Version',
+                                                  type='Type',
+                                                  readme=None,
+                                                  installation_status=None,
+                                                  repository_id=None,
+                                                  tool_dependency_id=None )
+            folder.tool_dependencies.append( tool_dependency )
+            for dependency_key, requirements_dict in tool_dependencies.items():
+                tool_dependency_id += 1
+                if dependency_key in [ 'set_environment' ]:
+                    for set_environment_dict in requirements_dict:
+                        try:
+                            name = set_environment_dict.get( 'name', None )
+                            type = set_environment_dict[ 'type' ]
+                            repository_id = set_environment_dict.get( 'repository_id', None )
+                            td_id = set_environment_dict.get( 'tool_dependency_id', None )
+                        except Exception as e:
+                            name = str( e )
+                            type = 'unknown'
+                            repository_id = 'unknown'
+                            td_id = 'unknown'
+                        if self.app.name == 'galaxy':
+                            try:
+                                installation_status = set_environment_dict.get( 'status', 'Never installed' )
+                            except Exception as e:
+                                installation_status = str( e )
+                        else:
+                            installation_status = None
+                        tool_dependency = ToolDependency( id=tool_dependency_id,
+                                                          name=name,
+                                                          version=None,
+                                                          type=type,
+                                                          readme=None,
+                                                          installation_status=installation_status,
+                                                          repository_id=repository_id,
+                                                          tool_dependency_id=td_id )
+                        folder.tool_dependencies.append( tool_dependency )
+                else:
+                    try:
+                        name = requirements_dict[ 'name' ]
+                        version = requirements_dict[ 'version' ]
+                        type = requirements_dict[ 'type' ]
+                        repository_id = requirements_dict.get( 'repository_id', None )
+                        td_id = requirements_dict.get( 'tool_dependency_id', None )
+                    except Exception as e:
+                        name = str( e )
+                        version = 'unknown'
+                        type = 'unknown'
+                        repository_id = 'unknown'
+                        td_id = 'unknown'
+                    if self.app.name == 'galaxy':
+                        try:
+                            installation_status = requirements_dict.get( 'status', 'Never installed' )
+                        except Exception as e:
+                            installation_status = str( e )
+                    else:
+                        installation_status = None
+                    tool_dependency = ToolDependency( id=tool_dependency_id,
+                                                      name=name,
+                                                      version=version,
+                                                      type=type,
+                                                      readme=None,
+                                                      installation_status=installation_status,
+                                                      repository_id=repository_id,
+                                                      tool_dependency_id=td_id )
+                    folder.tool_dependencies.append( tool_dependency )
+        else:
+            tool_dependencies_root_folder = None
+        return folder_id, tool_dependencies_root_folder
+
+    def build_workflows_folder( self, folder_id, workflows, repository_metadata_id=None, repository_id=None,
+                                label='Workflows' ):
+        """
+        Return a folder hierarchy containing workflow objects for each workflow dictionary in the
+        received workflows list.  When this method is called from the tool shed, repository_metadata_id
+        will have a value and repository_id will be None.  When this method is called from Galaxy,
+        repository_id will have a value only if the repository is not currenlty being installed and
+        repository_metadata_id will be None.
+        """
+        if workflows:
+            workflow_id = 0
+            folder_id += 1
+            workflows_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+            folder_id += 1
+            folder = Folder( id=folder_id, key='workflows', label=label, parent=workflows_root_folder )
+            workflows_root_folder.folders.append( folder )
+            # Insert a header row.
+            workflow_id += 1
+            workflow = Workflow( id=workflow_id,
+                                 workflow_name='Name',
+                                 steps='steps',
+                                 format_version='format-version',
+                                 annotation='annotation',
+                                 repository_metadata_id=repository_metadata_id,
+                                 repository_id=repository_id )
+            folder.workflows.append( workflow )
+            for workflow_tup in workflows:
+                workflow_dict = workflow_tup[ 1 ]
+                steps = workflow_dict.get( 'steps', [] )
+                if steps:
+                    steps = str( len( steps ) )
+                else:
+                    steps = 'unknown'
+                workflow_id += 1
+                workflow = Workflow( id=workflow_id,
+                                     workflow_name=workflow_dict.get( 'name', '' ),
+                                     steps=steps,
+                                     format_version=workflow_dict.get( 'format-version', '' ),
+                                     annotation=workflow_dict.get( 'annotation', '' ),
+                                     repository_metadata_id=repository_metadata_id,
+                                     repository_id=repository_id )
+                folder.workflows.append( workflow )
+        else:
+            workflows_root_folder = None
+        return folder_id, workflows_root_folder
+
+    def generate_repository_dependencies_folder_label_from_key( self, repository_name, repository_owner, changeset_revision,
+                                                                prior_installation_required, only_if_compiling_contained_td, key ):
+        """Return a repository dependency label based on the repository dependency key."""
+        if self.key_is_current_repositorys_key( repository_name,
+                                                repository_owner,
+                                                changeset_revision,
+                                                prior_installation_required,
+                                                only_if_compiling_contained_td, key ):
+            label = 'Repository dependencies'
+        else:
+            if util.asbool( prior_installation_required ):
+                prior_installation_required_str = " <i>(prior install required)</i>"
+            else:
+                prior_installation_required_str = ""
+            label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>%s" % \
+                ( repository_name, changeset_revision, repository_owner, prior_installation_required_str )
+        return label
+
+    def get_components_from_repository_dependency_for_installed_repository( self, repository_dependency ):
+        """
+        Parse a repository dependency and return components necessary for proper display
+        in Galaxy on the Manage repository page.
+        """
+        # Default prior_installation_required and only_if_compiling_contained_td to False.
+        prior_installation_required = 'False'
+        only_if_compiling_contained_td = 'False'
+        if len( repository_dependency ) == 6:
+            # Metadata should have been reset on this installed repository, but it wasn't.
+            tool_shed_repository_id = repository_dependency[ 4 ]
+            installation_status = repository_dependency[ 5 ]
+            tool_shed, name, owner, changeset_revision = repository_dependency[ 0:4 ]
+            repository_dependency = [ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
+        elif len( repository_dependency ) == 7:
+            # We have a repository dependency tuple that includes a prior_installation_required value but not a only_if_compiling_contained_td value.
+            tool_shed_repository_id = repository_dependency[ 5 ]
+            installation_status = repository_dependency[ 6 ]
+            tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency[ 0:5 ]
+            repository_dependency = \
+                [ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ]
+        elif len( repository_dependency ) == 8:
+            # We have a repository dependency tuple that includes both a prior_installation_required value
+            # and a only_if_compiling_contained_td value.
+            tool_shed_repository_id = repository_dependency[ 6 ]
+            installation_status = repository_dependency[ 7 ]
+            repository_dependency = repository_dependency[ 0:6 ]
+        else:
+            tool_shed_repository_id = None
+            installation_status = 'unknown'
+        if tool_shed_repository_id:
+            tool_shed_repository = repository_util.get_tool_shed_repository_by_id( self.app,
+                                                                                   self.app.security.encode_id( tool_shed_repository_id ) )
+            if tool_shed_repository:
+                if tool_shed_repository.missing_repository_dependencies:
+                    installation_status = '%s, missing repository dependencies' % installation_status
+                elif tool_shed_repository.missing_tool_dependencies:
+                    installation_status = '%s, missing tool dependencies' % installation_status
+        return tool_shed_repository_id, installation_status, repository_dependency
+
+    def get_folder( self, folder, key ):
+        if folder.key == key:
+            return folder
+        for sub_folder in folder.folders:
+            return self.get_folder( sub_folder, key )
+        return None
+
+    def handle_repository_dependencies_container_entry( self, repository_dependencies_folder, rd_key, rd_value, folder_id,
+                                                        repository_dependency_id, folder_keys ):
+        repository_components_tuple = container_util.get_components_from_key( rd_key )
+        components_list = repository_util.extract_components_from_tuple( repository_components_tuple )
+        toolshed, repository_name, repository_owner, changeset_revision = components_list[ 0:4 ]
+        # For backward compatibility to the 12/20/12 Galaxy release.
+        if len( components_list ) == 4:
+            prior_installation_required = 'False'
+            only_if_compiling_contained_td = 'False'
+        elif len( components_list ) == 5:
+            prior_installation_required = components_list[ 4 ]
+            only_if_compiling_contained_td = 'False'
+        elif len( components_list ) == 6:
+            prior_installation_required = components_list[ 4 ]
+            only_if_compiling_contained_td = components_list[ 5 ]
+        folder = self.get_folder( repository_dependencies_folder, rd_key )
+        label = self.generate_repository_dependencies_folder_label_from_key( repository_name,
+                                                                             repository_owner,
+                                                                             changeset_revision,
+                                                                             prior_installation_required,
+                                                                             only_if_compiling_contained_td,
+                                                                             repository_dependencies_folder.key )
+        if folder:
+            if rd_key not in folder_keys:
+                folder_id += 1
+                sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=folder )
+                folder.folders.append( sub_folder )
+            else:
+                sub_folder = folder
+        else:
+            folder_id += 1
+            sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=repository_dependencies_folder )
+            repository_dependencies_folder.folders.append( sub_folder )
+        if self.app.name == 'galaxy':
+            # Insert a header row.
+            repository_dependency_id += 1
+            repository_dependency = RepositoryDependency( id=repository_dependency_id,
+                                                          repository_name='Name',
+                                                          changeset_revision='Revision',
+                                                          repository_owner='Owner',
+                                                          installation_status='Installation status' )
+            # Insert the header row into the folder.
+            sub_folder.repository_dependencies.append( repository_dependency )
+        for repository_dependency in rd_value:
+            if self.app.name == 'galaxy':
+                tool_shed_repository_id, installation_status, repository_dependency = \
+                    self.get_components_from_repository_dependency_for_installed_repository( repository_dependency )
+            else:
+                tool_shed_repository_id = None
+                installation_status = None
+            can_create_dependency = not self.is_subfolder_of( sub_folder, repository_dependency )
+            if can_create_dependency:
+                toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+                    common_util.parse_repository_dependency_tuple( repository_dependency )
+                repository_dependency_id += 1
+                repository_dependency = RepositoryDependency( id=repository_dependency_id,
+                                                              toolshed=toolshed,
+                                                              repository_name=repository_name,
+                                                              repository_owner=repository_owner,
+                                                              changeset_revision=changeset_revision,
+                                                              prior_installation_required=util.asbool( prior_installation_required ),
+                                                              only_if_compiling_contained_td=util.asbool( only_if_compiling_contained_td ),
+                                                              installation_status=installation_status,
+                                                              tool_shed_repository_id=tool_shed_repository_id )
+                # Insert the repository_dependency into the folder.
+                sub_folder.repository_dependencies.append( repository_dependency )
+        return repository_dependencies_folder, folder_id, repository_dependency_id
+
+    def is_subfolder_of( self, folder, repository_dependency ):
+        toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+            common_util.parse_repository_dependency_tuple( repository_dependency )
+        key = container_util.generate_repository_dependencies_key_for_repository( toolshed,
+                                                                                  repository_name,
+                                                                                  repository_owner,
+                                                                                  changeset_revision,
+                                                                                  prior_installation_required,
+                                                                                  only_if_compiling_contained_td )
+        for sub_folder in folder.folders:
+            if key == sub_folder.key:
+                return True
+        return False
+
+    def key_is_current_repositorys_key( self, repository_name, repository_owner, changeset_revision,
+                                        prior_installation_required, only_if_compiling_contained_td, key ):
+        repository_components_tuple = container_util.get_components_from_key( key )
+        components_list = repository_util.extract_components_from_tuple( repository_components_tuple )
+        toolshed, key_name, key_owner, key_changeset_revision = components_list[ 0:4 ]
+        # For backward compatibility to the 12/20/12 Galaxy release.
+        if len( components_list ) == 4:
+            key_prior_installation_required = 'False'
+            key_only_if_compiling_contained_td = 'False'
+        elif len( components_list ) == 5:
+            key_prior_installation_required = components_list[ 4 ]
+            key_only_if_compiling_contained_td = 'False'
+        elif len( components_list ) == 6:
+            key_prior_installation_required = components_list[ 4 ]
+            key_only_if_compiling_contained_td = components_list[ 5 ]
+        if ( repository_name == key_name and
+             repository_owner == key_owner and
+             changeset_revision == key_changeset_revision and
+             prior_installation_required == key_prior_installation_required and
+             only_if_compiling_contained_td == key_only_if_compiling_contained_td ):
+            return True
+        return False
+
+    def populate_repository_dependencies_container( self, repository_dependencies_folder, repository_dependencies, folder_id,
+                                                    repository_dependency_id ):
+        folder_keys = []
+        for key in repository_dependencies.keys():
+            if key not in folder_keys:
+                folder_keys.append( key )
+        for key, value in repository_dependencies.items():
+            repository_dependencies_folder, folder_id, repository_dependency_id = \
+                self.handle_repository_dependencies_container_entry( repository_dependencies_folder,
+                                                                     key,
+                                                                     value,
+                                                                     folder_id,
+                                                                     repository_dependency_id,
+                                                                     folder_keys )
+        return repository_dependencies_folder, folder_id, repository_dependency_id
+
+    def prune_folder( self, folder, repository_dependency ):
+        listified_repository_dependency = repository_dependency.listify
+        if self.is_subfolder_of( folder, listified_repository_dependency ):
+            folder.repository_dependencies.remove( repository_dependency )
+
+    def prune_repository_dependencies( self, folder ):
+        """
+        Since the object used to generate a repository dependencies container is a dictionary
+        and not an odict() (it must be json-serialize-able), the order in which the dictionary
+        is processed to create the container sometimes results in repository dependency entries
+        in a folder that also includes the repository dependency as a sub-folder (if the repository
+        dependency has its own repository dependency).  This method will remove all repository
+        dependencies from folder that are also sub-folders of folder.
+        """
+        repository_dependencies = [ rd for rd in folder.repository_dependencies ]
+        for repository_dependency in repository_dependencies:
+            self.prune_folder( folder, repository_dependency )
+        for sub_folder in folder.folders:
+            return self.prune_repository_dependencies( sub_folder )
+        return folder
diff --git a/locale/en/LC_MESSAGES/ginga.mo b/locale/en/LC_MESSAGES/ginga.mo
new file mode 100644
index 0000000..3ad6e60
Binary files /dev/null and b/locale/en/LC_MESSAGES/ginga.mo differ
diff --git a/locale/en/LC_MESSAGES/ginga.po b/locale/en/LC_MESSAGES/ginga.po
new file mode 100644
index 0000000..e9506ff
--- /dev/null
+++ b/locale/en/LC_MESSAGES/ginga.po
@@ -0,0 +1,299 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2009 THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# FIRST AUTHOR <EMAIL at ADDRESS>, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2008-09-21 18:33+0900\n"
+"PO-Revision-Date: 2009-03-17 03:55-0400\n"
+"Last-Translator: FULL NAME <EMAIL at ADDRESS>\n"
+"Language-Team: en <LL at li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.4\n"
+
+#: templates/history/options.mako:17
+msgid " a new empty history"
+msgstr ""
+
+#: templates/history/options.mako:22 templates/history/options.mako:24
+msgid " current history"
+msgstr ""
+
+#: templates/history/options.mako:14
+#, python-format
+msgid " current history (stored as \"%s\")"
+msgstr ""
+
+#: templates/history/options.mako:20
+msgid " from the current history"
+msgstr ""
+
+#: templates/history/options.mako:15
+msgid " previously stored histories"
+msgstr ""
+
+#: templates/history/options.mako:8
+msgid " to store or switch histories."
+msgstr ""
+
+#: templates/user/index.mako:9 templates/user/index.mako:11
+msgid "Account settings"
+msgstr ""
+
+#: templates/root/masthead.mako:39
+msgid "Account: "
+msgstr ""
+
+#: templates/admin_main.mako:16
+msgid "Admin password: "
+msgstr ""
+
+#: templates/root/history_common.mako:41
+msgid "An error occurred running this job: "
+msgstr ""
+
+#: templates/history/options.mako:24
+msgid "Are you sure you want to delete the current history?"
+msgstr ""
+
+#: templates/user/index.mako:16
+msgid "Change your password"
+msgstr ""
+
+#: templates/history/options.mako:20
+msgid "Construct workflow"
+msgstr ""
+
+#: templates/history/options.mako:17
+msgid "Create"
+msgstr ""
+
+#: templates/user/index.mako:24
+msgid "Create new account"
+msgstr ""
+
+#: templates/history/rename.mako:9
+msgid "Current Name"
+msgstr ""
+
+#: templates/history/options.mako:24
+msgid "Delete"
+msgstr ""
+
+#: templates/history/share.mako:23
+msgid "Email of User to share with:"
+msgstr ""
+
+#: templates/root/history_common.mako:76
+#, python-format
+msgid "Error: unknown dataset state \"%s\"."
+msgstr ""
+
+#: templates/admin_main.mako:2 templates/admin_main.mako:7
+msgid "Galaxy Administration"
+msgstr ""
+
+#: templates/root/history.mako:12
+msgid "Galaxy History"
+msgstr ""
+
+#: templates/root/tool_menu.mako:11
+msgid "Galaxy Tools"
+msgstr ""
+
+#: templates/root/index.mako:34
+msgid "History"
+msgstr ""
+
+#: templates/history/share.mako:8
+msgid "History Name:"
+msgstr ""
+
+#: templates/history/options.mako:4
+msgid "History Options"
+msgstr ""
+
+#: templates/root/history_common.mako:57 templates/root/masthead.mako:25
+msgid "Info: "
+msgstr ""
+
+#: templates/root/history_common.mako:38
+msgid "Job is currently running"
+msgstr ""
+
+#: templates/root/history_common.mako:36
+msgid "Job is waiting to run"
+msgstr ""
+
+#: templates/history/options.mako:15
+msgid "List"
+msgstr ""
+
+#: templates/root/masthead.mako:36
+#, python-format
+msgid "Logged in as %s: "
+msgstr ""
+
+#: templates/user/index.mako:23
+msgid "Login"
+msgstr ""
+
+#: templates/user/index.mako:18
+msgid "Logout"
+msgstr ""
+
+#: templates/root/tool_menu.mako:100
+msgid "Manage"
+msgstr ""
+
+#: templates/history/rename.mako:9
+msgid "New Name"
+msgstr ""
+
+#: templates/root/history_common.mako:45
+msgid "No data: "
+msgstr ""
+
+#: templates/history/share.mako:8
+msgid "Number of Datasets:"
+msgstr ""
+
+#: templates/root/index.mako:32
+msgid "Options"
+msgstr ""
+
+#: templates/admin_main.mako:27
+msgid "Reload"
+msgstr ""
+
+#: templates/admin_main.mako:18
+msgid "Reload tool: "
+msgstr ""
+
+#: templates/history/options.mako:14
+msgid "Rename"
+msgstr ""
+
+#: templates/history/rename.mako:2 templates/history/rename.mako:5
+msgid "Rename History"
+msgstr ""
+
+#: templates/history/options.mako:22
+msgid "Share"
+msgstr ""
+
+#: templates/history/share.mako:5
+msgid "Share Histories"
+msgstr ""
+
+#: templates/history/share.mako:8
+msgid "Share Link"
+msgstr ""
+
+#: templates/history/share.mako:2
+msgid "Share histories"
+msgstr ""
+
+#: templates/history/share.mako:14
+msgid "This history contains no data."
+msgstr ""
+
+#: templates/root/index.mako:5
+msgid "Tools"
+msgstr ""
+
+#: templates/user/index.mako:17
+msgid "Update your email address"
+msgstr ""
+
+#: templates/root/tool_menu.mako:95
+msgid "Workflow"
+msgstr ""
+
+#: templates/user/index.mako:14
+#, python-format
+msgid "You are currently logged in as %s."
+msgstr ""
+
+#: templates/user/index.mako:21
+msgid "You are currently not logged in."
+msgstr ""
+
+#: templates/root/history.mako:223
+msgid "You are currently viewing a deleted history!"
+msgstr ""
+
+#: templates/history/options.mako:8
+msgid "You must be "
+msgstr ""
+
+#: templates/root/history.mako:252
+msgid "Your history is empty. Click 'Get Data' on the left pane to start"
+msgstr ""
+
+#: templates/root/masthead.mako:28
+msgid "blog"
+msgstr ""
+
+#: templates/history/share.mako:20
+msgid "copy link to share"
+msgstr ""
+
+#: templates/root/masthead.mako:39
+msgid "create"
+msgstr ""
+
+#: templates/root/history_common.mako:50
+msgid "database: "
+msgstr ""
+
+#: templates/root/history_common.mako:49
+msgid "format: "
+msgstr ""
+
+#: templates/history/options.mako:8
+msgid "logged in"
+msgstr ""
+
+#: templates/root/masthead.mako:40
+msgid "login"
+msgstr ""
+
+#: templates/root/masthead.mako:37
+msgid "logout"
+msgstr ""
+
+#: templates/root/masthead.mako:36
+msgid "manage"
+msgstr ""
+
+#: templates/root/history.mako:218
+msgid "refresh"
+msgstr ""
+
+#: templates/root/masthead.mako:25
+msgid "report bugs"
+msgstr ""
+
+#: templates/root/history_common.mako:42
+msgid "report this error"
+msgstr ""
+
+#: templates/root/masthead.mako:27
+msgid "screencasts"
+msgstr ""
+
+#: templates/root/masthead.mako:26
+msgid "wiki"
+msgstr ""
+
+#: templates/root/tool_menu.mako:100
+msgid "workflows"
+msgstr ""
+
diff --git a/locale/en/LC_MESSAGES/tools.mo b/locale/en/LC_MESSAGES/tools.mo
new file mode 100644
index 0000000..55de14f
Binary files /dev/null and b/locale/en/LC_MESSAGES/tools.mo differ
diff --git a/locale/en/LC_MESSAGES/tools.po b/locale/en/LC_MESSAGES/tools.po
new file mode 100644
index 0000000..f737022
--- /dev/null
+++ b/locale/en/LC_MESSAGES/tools.po
@@ -0,0 +1,85 @@
+# English translations for PROJECT.
+# Copyright (C) 2009 ORGANIZATION
+# This file is distributed under the same license as the PROJECT project.
+# FIRST AUTHOR <EMAIL at ADDRESS>, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PROJECT VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL at ADDRESS\n"
+"POT-Creation-Date: 2009-03-17 04:07-0400\n"
+"PO-Revision-Date: 2009-03-17 04:07-0400\n"
+"Last-Translator: FULL NAME <EMAIL at ADDRESS>\n"
+"Language-Team: en <LL at li.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.4\n"
+
+msgid "Get Data"
+msgstr ""
+
+msgid "Get ENCODE Data"
+msgstr ""
+
+msgid "ENCODE Tools"
+msgstr ""
+
+msgid "Lift-Over"
+msgstr ""
+
+msgid "Text Manipulation"
+msgstr ""
+
+msgid "Filter and Sort"
+msgstr ""
+
+msgid "Join, Subtract and Group"
+msgstr ""
+
+msgid "Convert Formats"
+msgstr ""
+
+msgid "Extract Features"
+msgstr ""
+
+msgid "Fetch Sequences"
+msgstr ""
+
+msgid "Fetch Alignments"
+msgstr ""
+
+msgid "Get Genomic Scores"
+msgstr ""
+
+msgid "Operate on Genomic Intervals"
+msgstr ""
+
+msgid "Statistics"
+msgstr ""
+
+msgid "Graph/Display Data"
+msgstr ""
+
+msgid "Regional Variation"
+msgstr ""
+
+msgid "Evolution: HyPhy"
+msgstr ""
+
+msgid "Taxonomy manipulation"
+msgstr ""
+
+msgid "Solexa tools"
+msgstr ""
+
+msgid "FASTA manipulation"
+msgstr ""
+
+msgid "Short Read QC and Manipulation"
+msgstr ""
+
+msgid "Short Read Mapping"
+msgstr ""
+
diff --git a/locale/ginga.pot b/locale/ginga.pot
new file mode 100644
index 0000000..1703143
--- /dev/null
+++ b/locale/ginga.pot
@@ -0,0 +1,296 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# FIRST AUTHOR <EMAIL at ADDRESS>, YEAR.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2008-09-21 18:33+0900\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL at ADDRESS>\n"
+"Language-Team: LANGUAGE <LL at li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: templates/history/options.mako:17
+msgid " a new empty history"
+msgstr ""
+
+#: templates/history/options.mako:22 templates/history/options.mako:24
+msgid " current history"
+msgstr ""
+
+#: templates/history/options.mako:14
+#, python-format
+msgid " current history (stored as \"%s\")"
+msgstr ""
+
+#: templates/history/options.mako:20
+msgid " from the current history"
+msgstr ""
+
+#: templates/history/options.mako:15
+msgid " previously stored histories"
+msgstr ""
+
+#: templates/history/options.mako:8
+msgid " to store or switch histories."
+msgstr ""
+
+#: templates/user/index.mako:9 templates/user/index.mako:11
+msgid "Account settings"
+msgstr ""
+
+#: templates/root/masthead.mako:39
+msgid "Account: "
+msgstr ""
+
+#: templates/admin_main.mako:16
+msgid "Admin password: "
+msgstr ""
+
+#: templates/root/history_common.mako:41
+msgid "An error occurred running this job: "
+msgstr ""
+
+#: templates/history/options.mako:24
+msgid "Are you sure you want to delete the current history?"
+msgstr ""
+
+#: templates/user/index.mako:16
+msgid "Change your password"
+msgstr ""
+
+#: templates/history/options.mako:20
+msgid "Construct workflow"
+msgstr ""
+
+#: templates/history/options.mako:17
+msgid "Create"
+msgstr ""
+
+#: templates/user/index.mako:24
+msgid "Create new account"
+msgstr ""
+
+#: templates/history/rename.mako:9
+msgid "Current Name"
+msgstr ""
+
+#: templates/history/options.mako:24
+msgid "Delete"
+msgstr ""
+
+#: templates/history/share.mako:23
+msgid "Email of User to share with:"
+msgstr ""
+
+#: templates/root/history_common.mako:76
+#, python-format
+msgid "Error: unknown dataset state \"%s\"."
+msgstr ""
+
+#: templates/admin_main.mako:2 templates/admin_main.mako:7
+msgid "Galaxy Administration"
+msgstr ""
+
+#: templates/root/history.mako:12
+msgid "Galaxy History"
+msgstr ""
+
+#: templates/root/tool_menu.mako:11
+msgid "Galaxy Tools"
+msgstr ""
+
+#: templates/root/index.mako:34
+msgid "History"
+msgstr ""
+
+#: templates/history/share.mako:8
+msgid "History Name:"
+msgstr ""
+
+#: templates/history/options.mako:4
+msgid "History Options"
+msgstr ""
+
+#: templates/root/history_common.mako:57 templates/root/masthead.mako:25
+msgid "Info: "
+msgstr ""
+
+#: templates/root/history_common.mako:38
+msgid "Job is currently running"
+msgstr ""
+
+#: templates/root/history_common.mako:36
+msgid "Job is waiting to run"
+msgstr ""
+
+#: templates/history/options.mako:15
+msgid "List"
+msgstr ""
+
+#: templates/root/masthead.mako:36
+#, python-format
+msgid "Logged in as %s: "
+msgstr ""
+
+#: templates/user/index.mako:23
+msgid "Login"
+msgstr ""
+
+#: templates/user/index.mako:18
+msgid "Logout"
+msgstr ""
+
+#: templates/root/tool_menu.mako:100
+msgid "Manage"
+msgstr ""
+
+#: templates/history/rename.mako:9
+msgid "New Name"
+msgstr ""
+
+#: templates/root/history_common.mako:45
+msgid "No data: "
+msgstr ""
+
+#: templates/history/share.mako:8
+msgid "Number of Datasets:"
+msgstr ""
+
+#: templates/root/index.mako:32
+msgid "Options"
+msgstr ""
+
+#: templates/admin_main.mako:27
+msgid "Reload"
+msgstr ""
+
+#: templates/admin_main.mako:18
+msgid "Reload tool: "
+msgstr ""
+
+#: templates/history/options.mako:14
+msgid "Rename"
+msgstr ""
+
+#: templates/history/rename.mako:2 templates/history/rename.mako:5
+msgid "Rename History"
+msgstr ""
+
+#: templates/history/options.mako:22
+msgid "Share"
+msgstr ""
+
+#: templates/history/share.mako:5
+msgid "Share Histories"
+msgstr ""
+
+#: templates/history/share.mako:8
+msgid "Share Link"
+msgstr ""
+
+#: templates/history/share.mako:2
+msgid "Share histories"
+msgstr ""
+
+#: templates/history/share.mako:14
+msgid "This history contains no data."
+msgstr ""
+
+#: templates/root/index.mako:5
+msgid "Tools"
+msgstr ""
+
+#: templates/user/index.mako:17
+msgid "Update your email address"
+msgstr ""
+
+#: templates/root/tool_menu.mako:95
+msgid "Workflow"
+msgstr ""
+
+#: templates/user/index.mako:14
+#, python-format
+msgid "You are currently logged in as %s."
+msgstr ""
+
+#: templates/user/index.mako:21
+msgid "You are currently not logged in."
+msgstr ""
+
+#: templates/root/history.mako:223
+msgid "You are currently viewing a deleted history!"
+msgstr ""
+
+#: templates/history/options.mako:8
+msgid "You must be "
+msgstr ""
+
+#: templates/root/history.mako:252
+msgid "Your history is empty. Click 'Get Data' on the left pane to start"
+msgstr ""
+
+#: templates/root/masthead.mako:28
+msgid "blog"
+msgstr ""
+
+#: templates/history/share.mako:20
+msgid "copy link to share"
+msgstr ""
+
+#: templates/root/masthead.mako:39
+msgid "create"
+msgstr ""
+
+#: templates/root/history_common.mako:50
+msgid "database: "
+msgstr ""
+
+#: templates/root/history_common.mako:49
+msgid "format: "
+msgstr ""
+
+#: templates/history/options.mako:8
+msgid "logged in"
+msgstr ""
+
+#: templates/root/masthead.mako:40
+msgid "login"
+msgstr ""
+
+#: templates/root/masthead.mako:37
+msgid "logout"
+msgstr ""
+
+#: templates/root/masthead.mako:36
+msgid "manage"
+msgstr ""
+
+#: templates/root/history.mako:218
+msgid "refresh"
+msgstr ""
+
+#: templates/root/masthead.mako:25
+msgid "report bugs"
+msgstr ""
+
+#: templates/root/history_common.mako:42
+msgid "report this error"
+msgstr ""
+
+#: templates/root/masthead.mako:27
+msgid "screencasts"
+msgstr ""
+
+#: templates/root/masthead.mako:26
+msgid "wiki"
+msgstr ""
+
+#: templates/root/tool_menu.mako:100
+msgid "workflows"
+msgstr ""
diff --git a/locale/ja/LC_MESSAGES/ginga.mo b/locale/ja/LC_MESSAGES/ginga.mo
new file mode 100644
index 0000000..22e2807
Binary files /dev/null and b/locale/ja/LC_MESSAGES/ginga.mo differ
diff --git a/locale/ja/LC_MESSAGES/ginga.po b/locale/ja/LC_MESSAGES/ginga.po
new file mode 100644
index 0000000..8037ede
--- /dev/null
+++ b/locale/ja/LC_MESSAGES/ginga.po
@@ -0,0 +1,518 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# FIRST AUTHOR <EMAIL at ADDRESS>, YEAR.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: ginga 1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2008-09-15 13:58+0900\n"
+"PO-Revision-Date: 2008-09-21 19:13+0900\n"
+"Last-Translator: Mitsuteru Nakao <mn at kazusa.or.jp>\n"
+"Language-Team: Japanese\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+
+#:
+msgid "iso-8859-1"
+msgstr "utf-8"
+
+#:
+msgid "lang=\"en\""
+msgstr "lang=\"ja\""
+
+
+#: templates/base_panels.mako:5
+msgid "Galaxy"
+msgstr "Galaxy"
+
+#: templates/history/options.mako:24
+msgid "Are you sure you want to delete the current history?"
+msgstr "現在のヒストリーを消すことに同意しますか?"
+
+#: templates/root/history.mako:38
+msgid "collapse all"
+msgstr "すべてをおりたたむ"
+
+#: templates/root/index.mako:5
+msgid "Tools"
+msgstr "ツール"
+ 
+
+
+#: tools/**.xml
+msgid "Get Data"
+msgstr "データ取得"
+
+msgid "Get ENCODE Data"
+msgstr "ENCODEデータ取得"
+
+msgid "ENCODE Tools"
+msgstr "ENCODEツール"
+
+msgid "Lift-Over"
+msgstr ""
+
+msgid "Text Manipulation"
+msgstr "テキスト操作"
+
+msgid "Filter and Sort"
+msgstr "フィルターとソート"
+
+msgid "Join, Subtract and Group"
+msgstr "結合、差分、集合演算"
+
+msgid "Convert Formats"
+msgstr "フォーマット変換"
+
+msgid "Extract Features"
+msgstr "Features抽出"
+
+msgid "Fetch Sequences"
+msgstr "配列の取得"
+
+msgid "Fetch Alignments"
+msgstr "アラインメントの取得"
+
+msgid "Get Genomic Scores"
+msgstr "ゲノムスコア取得"
+
+msgid "Operate on Genomic Intervals"
+msgstr "ゲノム間隔での操作"
+
+msgid "Statistics"
+msgstr "統計量"
+
+msgid "Graph/Display Data"
+msgstr "グラフ/データ表示"
+
+msgid "Regional Variation"
+msgstr "領域的多型"
+
+msgid "Evolution: HyPhy"
+msgstr "進化: HyPhy"
+
+msgid "Taxonomy manipulation"
+msgstr "系統学的操作"
+
+msgid "Solexa tools"
+msgstr "Solexaツールス"
+
+msgid "FASTA manipulation"
+msgstr "FASTA操作"
+
+msgid "Short Read QC and Manipulation"
+msgstr "Short Read QCと操作"
+
+msgid "Short Read Mapping"
+msgstr "Short Readマッピング"
+
+
+
+
+
+#: templates/admin_main.mako:3 templates/admin_main.mako:8
+msgid "Galaxy Administration"
+msgstr "Galaxy 管理"
+
+#: templates/admin_main.mako:17
+msgid "Admin password: "
+msgstr "管理者パスワード: "
+
+#: templates/admin_main.mako:19
+msgid "Reload tool: "
+msgstr ""
+
+#: templates/admin_main.mako:35
+msgid "Reload"
+msgstr "再読込する"
+
+#: templates/dataset/edit_attributes.mako:2
+msgid "History Item Attributes"
+msgstr "ヒストリーアイテム変数"
+
+#: templates/dataset/edit_attributes.mako:19
+msgid "Edit Attributes"
+msgstr "変数を編集する"
+
+#: templates/dataset/edit_attributes.mako:64
+msgid ""
+"This will inspect the dataset and attempt to correct the above column values "
+"if they are not accurate."
+msgstr "これはデータセットを調査して上記のカラムの値を修正することを試みます。"
+
+#: templates/dataset/edit_attributes.mako:68
+msgid ""
+"Required metadata values are missing. Some of these values may not be "
+"editable by the user. Selecting \"Auto-detect\" will attempt to fix these "
+"values."
+msgstr "必要なメタデータの値が不明です。それらのいくつかの値はユーザによって編集可能にはなっていません。「自動判定」を選択するとそれらの値をただしくできるかもしれません。"
+
+#: templates/dataset/edit_attributes.mako:78
+msgid "Convert to new format"
+msgstr "新しいフォーマットに変換する"
+
+#: templates/dataset/edit_attributes.mako:84
+msgid "Convert to"
+msgstr "変換する"
+
+#: templates/dataset/edit_attributes.mako:95
+msgid ""
+"This will create a new dataset with the contents of this dataset converted "
+"to a new format."
+msgstr "新しいフォーマットに変換したデータセットを新規作成します。"
+
+#: templates/dataset/edit_attributes.mako:111
+msgid "Change data type"
+msgstr "データタイプを変更する"
+
+#: templates/dataset/edit_attributes.mako:117
+msgid "New Type"
+msgstr "新しいタイプ"
+
+#: templates/dataset/edit_attributes.mako:124
+msgid ""
+"This will change the datatype of the existing dataset but <i>not</i> modify "
+"its contents. Use this if Galaxy has incorrectly guessed the type of your "
+"dataset."
+msgstr "これは既存のデータセットのデータタイプを変更します。しかしデータセットの中身は変更しません。データセットのタイプの誤判定があったときに使用します。"
+
+#: templates/dataset/edit_attributes.mako:137
+msgid "Copy History Item"
+msgstr "ヒストリーアイテムをコピーする"
+
+#: templates/history/list.mako:3
+msgid "Your saved histories"
+msgstr "保存したヒストリー"
+
+#: templates/history/list.mako:19
+msgid "Stored Histories"
+msgstr "格納してあるヒストリー"
+
+#: templates/history/list.mako:21 templates/root/history.mako:239
+msgid "hide deleted"
+msgstr "削除したヒストリーを隠す"
+
+#: templates/history/list.mako:23
+msgid "show deleted"
+msgstr "削除したヒストリーを表示する"
+
+#: templates/history/list.mako:27
+msgid "Name"
+msgstr "名前"
+
+#: templates/history/list.mako:27
+msgid "Size"
+msgstr "サイズ"
+
+#: templates/history/list.mako:27
+msgid "Last modified"
+msgstr "最終更新日"
+
+#: templates/history/list.mako:27
+msgid "Actions"
+msgstr "操作"
+
+#: templates/history/list.mako:45
+msgid "rename"
+msgstr "名称変更する"
+
+#: templates/history/list.mako:46
+msgid "switch to"
+msgstr "変更する"
+
+#: templates/history/list.mako:47
+msgid "delete"
+msgstr "削除する"
+
+#: templates/history/list.mako:49
+msgid "undelete"
+msgstr "削除から戻す"
+
+#: templates/history/list.mako:55
+msgid "Action"
+msgstr "操作"
+
+#: templates/history/list.mako:56 templates/history/options.mako:21
+msgid "Share"
+msgstr "共有"
+
+#: templates/history/list.mako:56 templates/history/options.mako:15
+msgid "Rename"
+msgstr "名称変更する"
+
+#: templates/history/list.mako:56 templates/history/options.mako:24
+msgid "Delete"
+msgstr "削除する"
+
+#: templates/history/list.mako:58
+msgid "Undelete"
+msgstr "削除から戻す"
+
+#: templates/history/list.mako:65
+msgid "You have no stored histories"
+msgstr "保管してあるヒストリーはありません"
+
+#: templates/history/options.mako:5
+msgid "History Options"
+msgstr "ヒストリーオプション"
+
+#: templates/history/options.mako:9
+msgid "You must be "
+msgstr "あなたは"
+
+#: templates/history/options.mako:9
+msgid "logged in"
+msgstr "ログイン"
+
+#: templates/history/options.mako:9
+msgid " to store or switch histories."
+msgstr "しないとヒストリーの保管や変更ができません。"
+
+#: templates/history/options.mako:15
+#, python-format
+msgid " current history (stored as \"%s\")"
+msgstr " 現在のヒストリー(\"%s\" として保管されています)"
+
+#: templates/history/options.mako:16
+msgid "List"
+msgstr "リストする"
+
+#: templates/history/options.mako:16
+msgid " previously stored histories"
+msgstr " 以前に保管したヒストリー"
+
+#: templates/history/options.mako:18
+msgid "Create"
+msgstr "作成する"
+
+#: templates/history/options.mako:18
+msgid " a new empty history"
+msgstr " 新規ヒストリー"
+
+#: templates/history/options.mako:20
+msgid "Construct workflow"
+msgstr "ワークフローを構築する"
+
+#: templates/history/options.mako:20
+msgid " from the current history"
+msgstr " 現在のヒストリーから"
+
+#: templates/history/options.mako:21 templates/history/options.mako:24
+msgid " current history"
+msgstr " 現在のヒストリー"
+
+#: templates/history/options.mako:23
+msgid "Show deleted"
+msgstr "削除したヒストリーを表示する"
+
+#: templates/history/options.mako:23
+msgid " datasets in history"
+msgstr " ヒストリーのデータセット"
+
+#: templates/history/rename.mako:3 templates/history/rename.mako:6
+msgid "Rename History"
+msgstr "ヒストリーの名称変更をする"
+
+
+msgid "Rename Histories"
+msgstr "名称変更する"
+
+msgid "Perform Action"
+msgstr "操作を実行する"
+
+msgid "Submit"
+msgstr "登録する"
+
+
+
+#: templates/history/rename.mako:10
+msgid "Current Name"
+msgstr "現在の名称"
+
+#: templates/history/rename.mako:10
+msgid "New Name"
+msgstr "新しい名称"
+
+#: templates/history/share.mako:3
+msgid "Share histories"
+msgstr "ヒストリーを共有する"
+
+#: templates/history/share.mako:6
+msgid "Share Histories"
+msgstr "ヒストリーを共有する"
+
+#: templates/history/share.mako:9
+msgid "History Name:"
+msgstr "ヒストリー名"
+
+#: templates/history/share.mako:9
+msgid "Number of Datasets:"
+msgstr "データセット数"
+
+#: templates/history/share.mako:9
+msgid "Share Link"
+msgstr "共有リンク"
+
+#: templates/history/share.mako:15
+msgid "This history contains no data."
+msgstr "このヒストリーにはデータがありません。"
+
+#: templates/history/share.mako:21
+msgid "copy link to share"
+msgstr "共有リンクをコピーする"
+
+#: templates/history/share.mako:24
+msgid "Email of User to share with:"
+msgstr "共有したいユーザのEメール:"
+#msgstr "つぎのヒストリーを共有するユーザのEメールアドレス:"
+
+#: templates/root/history.mako:7
+msgid "Galaxy History"
+msgstr "Galaxy ヒストリー"
+
+#: templates/root/history.mako:237
+msgid "refresh"
+msgstr "リフレッシュ"
+
+#: templates/root/history.mako:245
+msgid "You are currently viewing a deleted history!"
+msgstr "消去したヒストリーをみています。"
+
+#: templates/root/history.mako:289
+msgid "Your history is empty. Click 'Get Data' on the left pane to start"
+msgstr "ヒストリーは空です。解析をはじめるには、左パネルの 'データ取得' をクリック"
+
+#: templates/root/history_common.mako:41
+msgid "Job is waiting to run"
+msgstr "ジョブは実行待ちです"
+
+#: templates/root/history_common.mako:43
+msgid "Job is currently running"
+msgstr "ジョブは実行中です"
+
+#: templates/root/history_common.mako:46
+msgid "An error occurred running this job: "
+msgstr "このジョブの実行中に発生したエラー: "
+
+#: templates/root/history_common.mako:47
+msgid "report this error"
+msgstr "このエラーを報告する"
+
+#: templates/root/history_common.mako:54
+msgid "No data: "
+msgstr "データ無し: "
+
+#: templates/root/history_common.mako:58
+msgid "format: "
+msgstr "フォーマット: "
+
+#: templates/root/history_common.mako:59
+msgid "database: "
+msgstr "データベース: "
+
+#: templates/root/history_common.mako:66 templates/root/masthead.mako:20
+msgid "Info: "
+msgstr "情報: "
+
+#: templates/root/history_common.mako:85
+#, python-format
+msgid "Error: unknown dataset state \"%s\"."
+msgstr "エラー: 不明なデータ状態 \"%s\"。"
+
+#: templates/root/index.mako:32
+msgid "Options"
+msgstr "オプション"
+
+#: templates/root/index.mako:34
+msgid "History"
+msgstr "ヒストリー"
+
+#: templates/root/masthead.mako:20
+msgid "report bugs"
+msgstr "バグを報告する"
+
+#: templates/root/masthead.mako:21
+msgid "wiki"
+msgstr "wiki"
+
+#: templates/root/masthead.mako:22
+msgid "screencasts"
+msgstr "スクリーンキャスト"
+
+#: templates/root/masthead.mako:23
+msgid "blog"
+msgstr "ブログ"
+
+#: templates/root/masthead.mako:31
+#, python-format
+msgid "Logged in as %s: "
+msgstr "%s としてログイン中: "
+
+#: templates/root/masthead.mako:31
+msgid "manage"
+msgstr "管理"
+
+#: templates/root/masthead.mako:32
+msgid "logout"
+msgstr "ログアウト"
+
+#: templates/root/masthead.mako:34
+msgid "Account: "
+msgstr "アカウント: "
+
+#: templates/root/masthead.mako:34
+msgid "create"
+msgstr "作成"
+
+#: templates/root/masthead.mako:35
+msgid "login"
+msgstr "ログイン"
+
+#: templates/root/tool_menu.mako:52
+msgid "Galaxy Tools"
+msgstr "Galaxy ツール群"
+
+#: templates/root/tool_menu.mako:129
+msgid "Workflow"
+msgstr "ワークフロー"
+
+#: templates/root/tool_menu.mako:134
+msgid "Manage"
+msgstr "管理"
+
+#: templates/root/tool_menu.mako:134
+msgid "workflows"
+msgstr "ワークフロー"
+
+#: templates/user/index.mako:2 templates/user/index.mako:4
+msgid "Account settings"
+msgstr "アカウント設定"
+
+#: templates/user/index.mako:7
+#, python-format
+msgid "You are currently logged in as %s."
+msgstr "%s としてログイン中。"
+
+#: templates/user/index.mako:9
+msgid "Change your password"
+msgstr "パスワード変更"
+
+#: templates/user/index.mako:10
+msgid "Update your email address"
+msgstr "メールアドレス変更"
+
+#: templates/user/index.mako:11
+msgid "Logout"
+msgstr "ログアウト"
+
+#: templates/user/index.mako:16
+msgid "Login"
+msgstr "ログイン"
+
+#: templates/user/index.mako:17
+msgid "Create new account"
+msgstr "新規アカウントを作成する"
diff --git a/locale/ja/LC_MESSAGES/tools.mo b/locale/ja/LC_MESSAGES/tools.mo
new file mode 100644
index 0000000..f6769d5
Binary files /dev/null and b/locale/ja/LC_MESSAGES/tools.mo differ
diff --git a/locale/ja/LC_MESSAGES/tools.po b/locale/ja/LC_MESSAGES/tools.po
new file mode 100644
index 0000000..9e3960d
--- /dev/null
+++ b/locale/ja/LC_MESSAGES/tools.po
@@ -0,0 +1,85 @@
+# Japanese translations for PROJECT.
+# Copyright (C) 2009 ORGANIZATION
+# This file is distributed under the same license as the PROJECT project.
+# FIRST AUTHOR <EMAIL at ADDRESS>, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PROJECT VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL at ADDRESS\n"
+"POT-Creation-Date: 2009-03-17 04:06-0400\n"
+"PO-Revision-Date: 2009-03-17 04:06-0400\n"
+"Last-Translator: FULL NAME <EMAIL at ADDRESS>\n"
+"Language-Team: ja <LL at li.org>\n"
+"Plural-Forms: nplurals=1; plural=0\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.4\n"
+
+msgid "Get Data"
+msgstr ""
+
+msgid "Get ENCODE Data"
+msgstr ""
+
+msgid "ENCODE Tools"
+msgstr ""
+
+msgid "Lift-Over"
+msgstr ""
+
+msgid "Text Manipulation"
+msgstr ""
+
+msgid "Filter and Sort"
+msgstr ""
+
+msgid "Join, Subtract and Group"
+msgstr ""
+
+msgid "Convert Formats"
+msgstr ""
+
+msgid "Extract Features"
+msgstr ""
+
+msgid "Fetch Sequences"
+msgstr ""
+
+msgid "Fetch Alignments"
+msgstr ""
+
+msgid "Get Genomic Scores"
+msgstr ""
+
+msgid "Operate on Genomic Intervals"
+msgstr ""
+
+msgid "Statistics"
+msgstr ""
+
+msgid "Graph/Display Data"
+msgstr ""
+
+msgid "Regional Variation"
+msgstr ""
+
+msgid "Evolution: HyPhy"
+msgstr ""
+
+msgid "Taxonomy manipulation"
+msgstr ""
+
+msgid "Solexa tools"
+msgstr ""
+
+msgid "FASTA manipulation"
+msgstr ""
+
+msgid "Short Read QC and Manipulation"
+msgstr ""
+
+msgid "Short Read Mapping"
+msgstr ""
+
diff --git a/locale/tools.pot b/locale/tools.pot
new file mode 100644
index 0000000..480f557
--- /dev/null
+++ b/locale/tools.pot
@@ -0,0 +1,66 @@
+#: tools/**.xml
+msgid "Get Data"
+msgstr ""
+
+msgid "Get ENCODE Data"
+msgstr ""
+
+msgid "ENCODE Tools"
+msgstr ""
+
+msgid "Lift-Over"
+msgstr ""
+
+msgid "Text Manipulation"
+msgstr ""
+
+msgid "Filter and Sort"
+msgstr ""
+
+msgid "Join, Subtract and Group"
+msgstr ""
+
+msgid "Convert Formats"
+msgstr ""
+
+msgid "Extract Features"
+msgstr ""
+
+msgid "Fetch Sequences"
+msgstr ""
+
+msgid "Fetch Alignments"
+msgstr ""
+
+msgid "Get Genomic Scores"
+msgstr ""
+
+msgid "Operate on Genomic Intervals"
+msgstr ""
+
+msgid "Statistics"
+msgstr ""
+
+msgid "Graph/Display Data"
+msgstr ""
+
+msgid "Regional Variation"
+msgstr ""
+
+msgid "Evolution: HyPhy"
+msgstr ""
+
+msgid "Taxonomy manipulation"
+msgstr ""
+
+msgid "Solexa tools"
+msgstr ""
+
+msgid "FASTA manipulation"
+msgstr ""
+
+msgid "Short Read QC and Manipulation"
+msgstr ""
+
+msgid "Short Read Mapping"
+msgstr ""
diff --git a/locale/zh/LC_MESSAGES/ginga.mo b/locale/zh/LC_MESSAGES/ginga.mo
new file mode 100644
index 0000000..313beca
Binary files /dev/null and b/locale/zh/LC_MESSAGES/ginga.mo differ
diff --git a/locale/zh/LC_MESSAGES/ginga.po b/locale/zh/LC_MESSAGES/ginga.po
new file mode 100644
index 0000000..7f13341
--- /dev/null
+++ b/locale/zh/LC_MESSAGES/ginga.po
@@ -0,0 +1,603 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# FIRST AUTHOR <EMAIL at ADDRESS>, YEAR.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: yinhe 1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2011-10-15 13:58+0800\n"
+"PO-Revision-Date: 2011-11-04 19:13+0900\n"
+"Last-Translator: Hanfei Sun <hfsun.tju at gmail.com> Juan Wang <jinling8 at gmail.com>\n"
+"Language-Team: Chinese\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+
+#:
+msgid "iso-8859-1"
+msgstr "utf-8"
+
+#:
+msgid "lang=\"en\""
+msgstr "lang=\"zh\""
+
+
+#: templates/base_panels.mako:5
+msgid "Galaxy"
+msgstr "Galaxy"
+
+#: templates/history/options.mako:24
+msgid "Are you sure you want to delete the current history?"
+msgstr "确认要删除当前的历史记录吗?"
+
+#: templates/root/history.mako:38
+msgid "collapse all"
+msgstr "全部收缩"
+
+#: templates/root/index.mako:5
+msgid "Tools"
+msgstr "工具"
+ 
+
+#: tools/**.xml
+msgid "Get Data"
+msgstr "获取数据"
+
+msgid "Get ENCODE Data"
+msgstr "获取ENCODE数据"
+
+msgid "ENCODE Tools"
+msgstr "ENCODE工具"
+
+msgid "Lift-Over"
+msgstr "版本转换"
+
+msgid "Text Manipulation"
+msgstr "文本操作"
+
+msgid "Filter and Sort"
+msgstr "过滤和排序"
+
+msgid "Join, Subtract and Group"
+msgstr "结合,差集与分组"
+
+msgid "Convert Formats"
+msgstr "格式转换"
+
+msgid "Extract Features"
+msgstr "特征提取"
+
+msgid "Fetch Sequences"
+msgstr "获取序列"
+
+msgid "Fetch Alignments"
+msgstr "获取比对上的序列"
+
+msgid "Get Genomic Scores"
+msgstr "获得基因组分数"
+
+msgid "Operate on Genomic Intervals"
+msgstr "基因组区间操作"
+
+msgid "Statistics"
+msgstr "统计量"
+
+msgid "Graph/Display Data"
+msgstr "图形/数据"
+
+msgid "Regional Variation"
+msgstr "区域多态性"
+
+msgid "Evolution: HyPhy"
+msgstr "进化: HyPhy"
+
+msgid "Taxonomy manipulation"
+msgstr "分类处理"
+
+msgid "Solexa tools"
+msgstr "Solexa工具"
+
+msgid "FASTA manipulation"
+msgstr "FASTA处理"
+
+msgid "Short Read QC and Manipulation"
+msgstr "短片段数据质量控制及处理"
+
+msgid "Short Read Mapping"
+msgstr "短片段回贴"
+
+
+#: templates/admin_main.mako:3 templates/admin_main.mako:8
+msgid "Galaxy Administration"
+msgstr "Galaxy 管理"
+
+#: templates/admin_main.mako:17
+msgid "Admin password: "
+msgstr "管理员密码: "
+
+#: templates/admin_main.mako:19
+msgid "Reload tool: "
+msgstr "重新载入工具"
+
+#: templates/admin_main.mako:35
+msgid "Reload"
+msgstr "重新载入"
+
+#: templates/dataset/edit_attributes.mako:2
+msgid "History Item Attributes"
+msgstr "历史项目属性"
+
+#: templates/dataset/edit_attributes.mako:19
+msgid "Edit attributes"
+msgstr "编辑属性"
+
+#: templates/dataset/edit_attributes.mako:64
+msgid ""
+"This will inspect the dataset and attempt to correct the above column values "
+"if they are not accurate."
+msgstr "数据集检查,若有错误,更正上述栏中的值。"
+
+#: templates/dataset/edit_attributes.mako:68
+msgid ""
+"Required metadata values are missing. Some of these values may not be "
+"editable by the user. Selecting \"Auto-detect\" will attempt to fix these "
+"values."
+msgstr "缺少所需的metadata的值。用户可能无法对这些值进行编辑。选择“自动检测”来尝试修正这些值。"
+
+#: templates/dataset/edit_attributes.mako:78
+msgid "Convert to new format"
+msgstr "转换为新格式"
+
+#: templates/dataset/edit_attributes.mako:84
+msgid "Convert to"
+msgstr "转换为"
+
+#: templates/dataset/edit_attributes.mako:95
+msgid ""
+"This will create a new dataset with the contents of this dataset converted "
+"to a new format."
+msgstr "这将产生一个转换格式后的新数据集,"
+
+#: templates/dataset/edit_attributes.mako:111
+msgid "Change data type"
+msgstr "改变数据类型"
+
+#: templates/dataset/edit_attributes.mako:117
+msgid "New Type"
+msgstr "新类型"
+
+#: templates/dataset/edit_attributes.mako:124
+msgid ""
+"This will change the datatype of the existing dataset but <i>not</i> modify "
+"its contents. Use this if Galaxy has incorrectly guessed the type of your "
+"dataset."
+msgstr "这将改变已有数据集的数据类型,但<i>不</i>改变其内容。当Galaxy不能正确判断你的数据类型时,设置该参数。"
+
+#: templates/dataset/edit_attributes.mako:137
+msgid "Copy History Item"
+msgstr "复制历史记录项"
+
+#: templates/history/list.mako:3
+msgid "Saved Histories"
+msgstr "已保存的历史"
+
+
+#: templates/history/list.mako:21 templates/root/history.mako:239
+msgid "hide deleted"
+msgstr "隐藏已删除的数据"
+
+#: templates/history/list.mako:23
+msgid "show deleted"
+msgstr "显示已删除的数据"
+
+#: templates/history/list.mako:27
+msgid "Name"
+msgstr "名称"
+
+#: templates/history/list.mako:27
+msgid "Size"
+msgstr "大小"
+
+#: templates/history/list.mako:27
+msgid "Last modified"
+msgstr "最后修改时间"
+
+#: templates/history/list.mako:27
+msgid "Actions"
+msgstr "操作"
+
+#: templates/history/list.mako:45
+msgid "rename"
+msgstr "重命名"
+
+#: templates/history/list.mako:46
+msgid "switch to"
+msgstr "切换到"
+
+#: templates/history/list.mako:47
+msgid "delete"
+msgstr "删除"
+
+#: templates/history/list.mako:49
+msgid "undelete"
+msgstr "还原"
+
+#: templates/history/list.mako:55
+msgid "Action"
+msgstr "操作"
+
+#: templates/history/list.mako:56 templates/history/options.mako:21
+msgid "Share"
+msgstr "共享"
+
+#: templates/history/list.mako:56 templates/history/options.mako:15
+msgid "Rename"
+msgstr "重命名"
+
+#: templates/history/list.mako:56 templates/history/options.mako:24
+msgid "Delete"
+msgstr "删除"
+
+#: templates/history/list.mako:58
+msgid "Undelete"
+msgstr "还原"
+
+#: templates/history/list.mako:65
+msgid "You have no stored histories"
+msgstr "没有存储的历史记录"
+
+#: templates/history/options.mako:5
+msgid "History Options"
+msgstr "历史记录选项"
+
+#: templates/history/options.mako:9
+msgid "You must be "
+msgstr "你必须成为"
+
+#: templates/history/options.mako:9
+msgid "logged in"
+msgstr "登录"
+
+#: templates/history/options.mako:9
+msgid " to store or switch histories."
+msgstr "以存储或切换历史记录"
+
+#: templates/history/options.mako:15
+#, python-format
+msgid " current history (stored as \"%s\")"
+msgstr " 当前历史(以\"%s\"形式存储)"
+
+#: templates/history/options.mako:16
+msgid "List"
+msgstr "列表"
+
+#: templates/history/options.mako:16
+msgid " previously stored histories"
+msgstr " 以前存储的历史记录"
+
+#: templates/history/options.mako:18
+msgid "Create"
+msgstr "创建"
+
+#: templates/history/options.mako:18
+msgid " a new empty history"
+msgstr " 一个新的空白历史记录"
+
+#: templates/history/options.mako:20
+msgid "Construct workflow"
+msgstr "构建工作流程"
+
+#: templates/history/options.mako:20
+msgid " from the current history"
+msgstr " 来源于当前历史"
+
+#: templates/history/options.mako:21 templates/history/options.mako:24
+msgid " current history"
+msgstr " 当前历史"
+
+#: templates/history/options.mako:23
+msgid "Show deleted"
+msgstr "显示已删除"
+
+#: templates/history/options.mako:23
+msgid " datasets in history"
+msgstr " 历史中的数据集"
+
+#: templates/history/rename.mako:3 templates/history/rename.mako:6
+msgid "Rename History"
+msgstr "重命名历史"
+
+
+msgid "Rename Histories"
+msgstr "重命名历史记录"
+
+msgid "Perform Action"
+msgstr "运行操作"
+
+msgid "Submit"
+msgstr "提交"
+
+
+
+#: templates/history/rename.mako:10
+msgid "Current Name"
+msgstr "当前名称"
+
+#: templates/history/rename.mako:10
+msgid "New Name"
+msgstr "新名称"
+
+#: templates/history/share.mako:3
+msgid "Share histories"
+msgstr "共享历史记录"
+
+#: templates/history/share.mako:6
+msgid "Share Histories"
+msgstr "共享历史记录"
+
+#: templates/history/share.mako:9
+msgid "History Name:"
+msgstr "历史名称"
+
+#: templates/history/share.mako:9
+msgid "Number of Datasets:"
+msgstr "数据集数量"
+
+#: templates/history/share.mako:9
+msgid "Share Link"
+msgstr "共享链接"
+
+#: templates/history/share.mako:15
+msgid "This history contains no data."
+msgstr "这项历史中没有数据"
+
+#: templates/history/share.mako:21
+msgid "copy link to share"
+msgstr "复制链接以共享"
+
+#: templates/history/share.mako:24
+msgid "Email of User to share with:"
+msgstr "发送到这些Email地址进行分享"
+
+#: templates/root/history.mako:7
+msgid "Galaxy History"
+msgstr "Galaxy 历史"
+
+#: templates/root/history.mako:237
+msgid "refresh"
+msgstr "刷新"
+
+#: templates/root/history.mako:245
+msgid "You are currently viewing a deleted history!"
+msgstr "正在查看已删除的历史"
+
+#: templates/root/history.mako:289
+msgid "Your history is empty. Click 'Get Data' on the left pane to start"
+msgstr "历史已空,请单击左边窗格中‘获取数据’"
+
+#: templates/root/history_common.mako:41
+msgid "Job is waiting to run"
+msgstr "等待运行的进程"
+
+#: templates/root/history_common.mako:43
+msgid "Job is currently running"
+msgstr "正在运行的进程"
+
+#: templates/root/history_common.mako:46
+msgid "An error occurred running this job: "
+msgstr "进程运行时出错 "
+
+#: templates/root/history_common.mako:47
+msgid "report this error"
+msgstr "报告错误"
+
+#: templates/root/history_common.mako:54
+msgid "No data: "
+msgstr "没有数据: "
+
+#: templates/root/history_common.mako:58
+msgid "format: "
+msgstr "格式: "
+
+#: templates/root/history_common.mako:59
+msgid "database: "
+msgstr "数据库: "
+
+#: templates/root/history_common.mako:66 templates/root/masthead.mako:20
+msgid "Info: "
+msgstr "信息: "
+
+#: templates/root/history_common.mako:85
+#, python-format
+msgid "Error: unknown dataset state \"%s\"."
+msgstr "错误:未知的数据集状态 \"%s\"。"
+
+
+msgid "Options"
+msgstr "选项"
+
+msgid "History"
+msgstr "历史"
+
+#: templates/root/masthead.mako:20
+msgid "report bugs"
+msgstr "错误报告"
+
+#: templates/root/masthead.mako:21
+msgid "wiki"
+msgstr "wiki"
+
+#: templates/root/masthead.mako:22
+msgid "screencasts"
+msgstr "演示视频"
+
+
+#: templates/root/masthead.mako:23
+msgid "blog"
+msgstr "博客"
+
+#: templates/root/masthead.mako:31
+#, python-format
+msgid "Logged in as %s: "
+msgstr "以%s的身份登录: "
+
+#: templates/root/masthead.mako:31
+msgid "manage"
+msgstr "管理"
+
+#: templates/root/masthead.mako:32
+msgid "logout"
+msgstr "注销"
+
+#: templates/root/masthead.mako:34
+msgid "Account: "
+msgstr "帐户: "
+
+#: templates/root/masthead.mako:34
+msgid "create"
+msgstr "创建"
+
+#: templates/root/masthead.mako:35
+msgid "login"
+msgstr "登录"
+
+#: templates/root/tool_menu.mako:52
+msgid "Galaxy Tools"
+msgstr "Galaxy 工具"
+
+#: templates/root/tool_menu.mako:129
+msgid "Workflow"
+msgstr "工作流程"
+
+#: templates/root/tool_menu.mako:134
+msgid "Manage"
+msgstr "管理"
+
+#: templates/root/tool_menu.mako:134
+msgid "workflows"
+msgstr "工作流程"
+
+#: templates/user/index.mako:2 templates/user/index.mako:4
+msgid "Account settings"
+msgstr "帐户设置"
+
+#: templates/user/index.mako:7
+#, python-format
+msgid "You are currently logged in as %s."
+msgstr "当前以%s的身份登录"
+
+#: templates/user/index.mako:9
+msgid "Change your password"
+msgstr "修改密码"
+
+#: templates/user/index.mako:10
+msgid "Update your email address"
+msgstr "更新电子邮件地址"
+
+#: templates/user/index.mako:11
+msgid "Logout"
+msgstr "注销"
+
+#: templates/user/index.mako:16
+msgid "Login"
+msgstr "登录"
+
+#: templates/user/index.mako:17
+msgid "Create new account"
+msgstr "创建新帐户"
+
+msgid "Show Tool Search"
+msgstr "显示工具搜索"
+
+msgid "Analyze Data"
+msgstr "分析数据"
+
+msgid "analysis"
+msgstr "分析"
+
+msgid "History Lists"
+msgstr "历史记录清单"
+
+msgid "Histories Shared with Me"
+msgstr "共享的数据"
+
+msgid "Current History"
+msgstr "当前历史记录"
+
+msgid "Create New"
+msgstr "创建"
+
+msgid "Clone"
+msgstr "复制"
+
+msgid "Share or Publish"
+msgstr "共享或发布"
+
+msgid "Extract Workflow"
+msgstr "提取工作流程"
+
+msgid "Dataset Security"
+msgstr "数据安全性"
+
+
+msgid "Show Deleted Datasets"
+msgstr "显示已删除的数据"
+
+msgid "Show Hidden Datasets"
+msgstr "显示隐藏的数据"
+
+msgid "Show Structure"
+msgstr "显示结构"
+
+msgid "Export to File"
+msgstr "导出为文件"
+
+msgid "Other Actions"
+msgstr "其他"
+
+msgid "Import from File"
+msgstr "导入文件"
+
+msgid "Shared Data"
+msgstr "数据共享"
+
+msgid "Data Libraries" 
+msgstr "数据仓库"
+
+msgid "Published Histories"
+msgstr "已发布的历史记录"
+
+msgid "Published Workflows"
+msgstr "已发布的工作流程"
+
+msgid "Published Pages"
+msgstr "已发布的页面"
+
+msgid "Help"
+msgstr "帮助"
+
+msgid "Email comments, bug reports, or suggestions"
+msgstr "发邮件进行意见反馈或错误报告"
+
+
+msgid "User"
+msgstr "用户"
+
+
+msgid "Register"
+msgstr "注册"
+
+msgid "Support"
+msgstr "技术支持"
+
+msgid "Galaxy Wiki"
+msgstr "Galaxy百科"
+
+msgid "Video tutorials (screencasts)"
+msgstr "视频教程(动画演示)"
+
+msgid "How to Cite Galaxy"
+msgstr "如何引用Galaxy"
diff --git a/locale/zh/LC_MESSAGES/tools.mo b/locale/zh/LC_MESSAGES/tools.mo
new file mode 100644
index 0000000..41ed000
Binary files /dev/null and b/locale/zh/LC_MESSAGES/tools.mo differ
diff --git a/locale/zh/LC_MESSAGES/tools.po b/locale/zh/LC_MESSAGES/tools.po
new file mode 100644
index 0000000..17466fe
--- /dev/null
+++ b/locale/zh/LC_MESSAGES/tools.po
@@ -0,0 +1,84 @@
+# Japanese translations for PROJECT.
+# Copyright (C) 2009 ORGANIZATION
+# This file is distributed under the same license as the PROJECT project.
+# FIRST AUTHOR <EMAIL at ADDRESS>, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PROJECT VERSION\n"
+"Report-Msgid-Bugs-To: EMAIL at ADDRESS\n"
+"POT-Creation-Date: 2009-03-17 04:06-0400\n"
+"PO-Revision-Date: 2009-03-17 04:06-0400\n"
+"Last-Translator: Hanfei Sun <hfsun.tju at gmail.com>\n"
+"Language-Team: zh <hfsun.tju at gmail.com>\n"
+"Plural-Forms: nplurals=1; plural=0\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 0.9.4\n"
+
+msgid "Get Data"
+msgstr "获取数据"
+
+msgid "Get ENCODE Data"
+msgstr ""
+
+msgid "ENCODE Tools"
+msgstr ""
+
+msgid "Lift-Over"
+msgstr ""
+
+msgid "Text Manipulation"
+msgstr ""
+
+msgid "Filter and Sort"
+msgstr ""
+
+msgid "Join, Subtract and Group"
+msgstr ""
+
+msgid "Convert Formats"
+msgstr ""
+
+msgid "Extract Features"
+msgstr ""
+
+msgid "Fetch Sequences"
+msgstr ""
+
+msgid "Fetch Alignments"
+msgstr ""
+
+msgid "Get Genomic Scores"
+msgstr ""
+
+msgid "Operate on Genomic Intervals"
+msgstr ""
+
+msgid "Statistics"
+msgstr ""
+
+msgid "Graph/Display Data"
+msgstr ""
+
+msgid "Regional Variation"
+msgstr ""
+
+msgid "Evolution: HyPhy"
+msgstr ""
+
+msgid "Taxonomy manipulation"
+msgstr ""
+
+msgid "Solexa tools"
+msgstr ""
+
+msgid "FASTA manipulation"
+msgstr ""
+
+msgid "Short Read QC and Manipulation"
+msgstr ""
+
+msgid "Short Read Mapping"
+msgstr ""
diff --git a/manage_db.sh b/manage_db.sh
new file mode 100755
index 0000000..6d4f342
--- /dev/null
+++ b/manage_db.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+#######
+# NOTE: To downgrade to a specific version, use something like:
+# sh manage_db.sh downgrade --version=3 <tool_shed if using that webapp - galaxy is the default>
+#######
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    printf "Activating virtualenv at $GALAXY_VIRTUAL_ENV\n"
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+cd `dirname $0`
+python ./scripts/manage_db.py $@
diff --git a/manage_tools.sh b/manage_tools.sh
new file mode 100644
index 0000000..67473f9
--- /dev/null
+++ b/manage_tools.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    printf "Activating virtualenv at $GALAXY_VIRTUAL_ENV\n"
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+cd `dirname $0`
+python ./scripts/manage_tools.py $@
diff --git a/openid/aol.xml b/openid/aol.xml
new file mode 100644
index 0000000..3479c1a
--- /dev/null
+++ b/openid/aol.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<provider id="aol" name="AOL/AIM">
+    <op_endpoint_url>http://openid.aol.com</op_endpoint_url>
+</provider>
diff --git a/openid/genomespace.xml b/openid/genomespace.xml
new file mode 100644
index 0000000..65bf95e
--- /dev/null
+++ b/openid/genomespace.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<provider id="genomespace" name="GenomeSpace">
+    <op_endpoint_url>https://identity.genomespace.org/identityServer/xrd.jsp</op_endpoint_url>
+    <sreg>
+        <field name="nickname" required="True">
+            <use_for name="username"/>
+            <store_user_preference name="genomespace_username"/>
+        </field>
+        <field name="email" required="False">
+            <use_for name="email"/>
+        </field>
+        <field name="gender" required="True">
+            <store_user_preference name="genomespace_token"/>
+        </field>
+    </sreg>
+</provider>
diff --git a/openid/google.xml b/openid/google.xml
new file mode 100644
index 0000000..8ca2b9b
--- /dev/null
+++ b/openid/google.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<provider id="google" name="Google">
+    <op_endpoint_url>https://www.google.com/accounts/o8/id</op_endpoint_url>
+</provider>
diff --git a/openid/launchpad.xml b/openid/launchpad.xml
new file mode 100644
index 0000000..27b59c6
--- /dev/null
+++ b/openid/launchpad.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<provider id="launchpad" name="Launchpad">
+    <op_endpoint_url>http://login.launchpad.net</op_endpoint_url>
+</provider>
diff --git a/openid/yahoo.xml b/openid/yahoo.xml
new file mode 100644
index 0000000..2ae7696
--- /dev/null
+++ b/openid/yahoo.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<provider id="yahoo" name="Yahoo!">
+    <op_endpoint_url>http://yahoo.com</op_endpoint_url>
+</provider>
diff --git a/requirements.txt b/requirements.txt
new file mode 120000
index 0000000..8a4bb84
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1 @@
+lib/galaxy/dependencies/pinned-requirements.txt
\ No newline at end of file
diff --git a/rolling_restart.sh b/rolling_restart.sh
new file mode 100755
index 0000000..9ad6613
--- /dev/null
+++ b/rolling_restart.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+cd `dirname $0`
+
+GALAXY_RUN_ALL=1 ./run.sh restart --wait
diff --git a/run.sh b/run.sh
new file mode 100755
index 0000000..039047a
--- /dev/null
+++ b/run.sh
@@ -0,0 +1,133 @@
+#!/bin/sh
+
+cd "$(dirname "$0")"
+
+# If there is a file that defines a shell environment specific to this
+# instance of Galaxy, source the file.
+if [ -z "$GALAXY_LOCAL_ENV_FILE" ];
+then
+    GALAXY_LOCAL_ENV_FILE='./config/local_env.sh'
+fi
+
+if [ -f $GALAXY_LOCAL_ENV_FILE ];
+then
+    . $GALAXY_LOCAL_ENV_FILE
+fi
+
+# Pop args meant for common_startup.sh
+while :
+do
+    case "$1" in
+        --skip-eggs|--skip-wheels|--skip-samples|--dev-wheels|--no-create-venv|--no-replace-pip|--replace-pip)
+            common_startup_args="$common_startup_args $1"
+            shift
+            ;;
+        --skip-venv)
+            skip_venv=1
+            common_startup_args="$common_startup_args $1"
+            shift
+            ;;
+        --stop-daemon)
+            common_startup_args="$common_startup_args $1"
+            paster_args="$paster_args $1"
+            stop_daemon_arg_set=1
+            shift
+            ;;
+        --daemon|--restart|restart)
+            if [ "$1" = "--restart" ]
+            then
+                paster_args="$paster_args restart"
+            else
+                paster_args="$paster_args $1"
+            fi
+
+            daemon_or_restart_arg_set=1
+            shift
+            ;;
+        --wait)
+            wait_arg_set=1
+            shift
+            ;;
+        "")
+            break
+            ;;
+        *)
+            paster_args="$paster_args $1"
+            shift
+            ;;
+    esac
+done
+
+./scripts/common_startup.sh $common_startup_args || exit 1
+
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+GALAXY_VIRTUAL_ENV="${GALAXY_VIRTUAL_ENV:-.venv}"
+if [ -d "$GALAXY_VIRTUAL_ENV" -a -z "$skip_venv" ];
+then
+    [ -n "$PYTHONPATH" ] && { echo 'Unsetting $PYTHONPATH'; unset PYTHONPATH; }
+    echo "Activating virtualenv at $GALAXY_VIRTUAL_ENV"
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+# If you are using --skip-venv we assume you know what you are doing but warn
+# in case you don't.
+[ -n "$PYTHONPATH" ] && echo 'WARNING: $PYTHONPATH is set, this can cause problems importing Galaxy dependencies'
+
+python ./scripts/check_python.py || exit 1
+
+if [ ! -z "$GALAXY_RUN_WITH_TEST_TOOLS" ];
+then
+    export GALAXY_CONFIG_OVERRIDE_TOOL_CONFIG_FILE="test/functional/tools/samples_tool_conf.xml"
+    export GALAXY_CONFIG_ENABLE_BETA_WORKFLOW_MODULES="true"
+    export GALAXY_CONFIG_OVERRIDE_ENABLE_BETA_TOOL_FORMATS="true"
+fi
+
+if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
+    python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
+fi
+
+if [ -z "$GALAXY_CONFIG_FILE" ]; then
+    if [ -f universe_wsgi.ini ]; then
+        GALAXY_CONFIG_FILE=universe_wsgi.ini
+    elif [ -f config/galaxy.ini ]; then
+        GALAXY_CONFIG_FILE=config/galaxy.ini
+    else
+        GALAXY_CONFIG_FILE=config/galaxy.ini.sample
+    fi
+    export GALAXY_CONFIG_FILE
+fi
+
+if [ -n "$GALAXY_RUN_ALL" ]; then
+    servers=$(sed -n 's/^\[server:\(.*\)\]/\1/  p' "$GALAXY_CONFIG_FILE" | xargs echo)
+    if [ -z "$stop_daemon_arg_set" -a -z "$daemon_or_restart_arg_set" ]; then
+        echo "ERROR: \$GALAXY_RUN_ALL cannot be used without the '--daemon', '--stop-daemon' or 'restart' arguments to run.sh"
+        exit 1
+    fi
+    for server in $servers; do
+        if [ -n "$wait_arg_set" -a -n "$daemon_or_restart_arg_set" ]; then
+            python ./scripts/paster.py serve "$GALAXY_CONFIG_FILE" --server-name="$server" --pid-file="$server.pid" --log-file="$server.log" $paster_args
+            while true; do
+                sleep 1
+                printf "."
+                # Grab the current pid from the pid file
+                if ! current_pid_in_file=$(cat "$server.pid"); then
+                    echo "A Galaxy process died, interrupting" >&2
+                    exit 1
+                fi
+                # Search for all pids in the logs and tail for the last one
+                latest_pid=$(egrep '^Starting server in PID [0-9]+\.$' "$server.log" -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1)
+                # If they're equivalent, then the current pid file agrees with our logs
+                # and we've succesfully started
+                [ -n "$latest_pid" ] && [ "$latest_pid" -eq "$current_pid_in_file" ] && break
+            done
+            echo
+        else
+            echo "Handling $server with log file $server.log..."
+            python ./scripts/paster.py serve "$GALAXY_CONFIG_FILE" --server-name="$server" --pid-file="$server.pid" --log-file="$server.log" $paster_args
+        fi
+    done
+else
+    # Handle only 1 server, whose name can be specified with --server-name parameter (defaults to "main")
+    python ./scripts/paster.py serve "$GALAXY_CONFIG_FILE" $paster_args
+fi
diff --git a/run_reports.sh b/run_reports.sh
new file mode 100755
index 0000000..eb2de06
--- /dev/null
+++ b/run_reports.sh
@@ -0,0 +1,50 @@
+#!/bin/sh
+
+
+# Usage: ./run_reports.sh [--sync-config] <start|stop>
+#
+#
+# Description: This script can be used to start or stop the galaxy
+# reports web application. Passing in --sync-config as the first
+# argument to this will cause Galaxy's database and path parameters
+# from galaxy.ini to be copied over into reports.ini.
+
+cd `dirname $0`
+
+./scripts/common_startup.sh --skip-samples
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+if [ -z "$GALAXY_REPORTS_CONFIG" ]; then
+    if [ -f reports_wsgi.ini ]; then
+        GALAXY_REPORTS_CONFIG=reports_wsgi.ini
+    elif [ -f config/reports_wsgi.ini ]; then
+        GALAXY_REPORTS_CONFIG=config/reports_wsgi.ini
+    elif [ -f config/reports.ini ]; then
+        GALAXY_REPORTS_CONFIG=config/reports.ini
+    else
+        GALAXY_REPORTS_CONFIG=config/reports.ini.sample
+    fi
+    export GALAXY_REPORTS_CONFIG
+fi
+
+GALAXY_REPORTS_PID=${GALAXY_REPORTS_PID:-reports_webapp.pid}
+GALAXY_REPORTS_LOG=${GALAXY_REPORTS_LOG:-reports_webapp.log}
+
+if [ -n "$GALAXY_REPORTS_CONFIG_DIR" ]; then
+    python ./scripts/build_universe_config.py "$GALAXY_REPORTS_CONFIG_DIR" "$GALAXY_REPORTS_CONFIG"
+fi
+
+
+if [ "$1" = "--sync-config" ];
+then
+    python ./scripts/sync_reports_config.py
+    shift
+fi
+
+python ./scripts/paster.py serve "$GALAXY_REPORTS_CONFIG" --pid-file="$GALAXY_REPORTS_PID" --log-file="$GALAXY_REPORTS_LOG" $@
diff --git a/run_tests.sh b/run_tests.sh
new file mode 100755
index 0000000..609653e
--- /dev/null
+++ b/run_tests.sh
@@ -0,0 +1,533 @@
+#!/bin/sh
+
+pwd_dir=$(pwd)
+cd `dirname $0`
+
+# A good place to look for nose info: http://somethingaboutorange.com/mrl/projects/nose/
+rm -f run_functional_tests.log
+
+show_help() {
+cat <<EOF
+'${0##*/} (test_path)'              for testing all the tools in functional directory
+'${0##*/} -id bbb'                  for testing one tool with id 'bbb' ('bbb' is the tool id)
+'${0##*/} -sid ccc'                 for testing one section with sid 'ccc' ('ccc' is the string after 'section::')
+'${0##*/} -list'                    for listing all the tool ids
+'${0##*/} -api (test_path)'         for running all the test scripts in the ./test/api directory
+'${0##*/} -toolshed (test_path)'    for running all the test scripts in the ./test/shed_functional/functional directory
+'${0##*/} -workflow test.xml'       for running a workflow test case as defined by supplied workflow xml test file (experimental)
+'${0##*/} -installed'               for running tests of Tool Shed installed tools
+'${0##*/} -framework'               for running through example tool tests testing framework features in test/functional/tools"
+'${0##*/} -framework -id toolid'    for testing one framework tool (in test/functional/tools/) with id 'toolid'
+'${0##*/} -data_managers -id data_manager_id'    for testing one Data Manager with id 'data_manager_id'
+'${0##*/} -unit (test_path)'        for running all unit tests (doctests in lib and tests in test/unit)
+'${0##*/} -qunit'                   for running qunit JavaScript tests
+'${0##*/} -qunit testname'          for running single JavaScript test with given name
+'${0##*/} -casperjs (py_test_path)' for running casperjs JavaScript tests using a Python wrapper for consistency. py_test_path in casperjs_runner.py e.g. 'Test_04_HDAs' or 'Test_04_HDAs.test_00_HDA_states'.
+
+Nose tests will allow specific tests to be selected per the documentation at
+https://nose.readthedocs.org/en/latest/usage.html#selecting-tests.  These are
+indicated with the optional parameter (test_path).  A few examples are:
+
+Run all TestUserInfo functional tests:
+    ./run_tests.sh test/functional/test_user_info.py:TestUserInfo
+
+Run a specific API test:
+    ./run_tests.sh -api test/api/test_tools.py:ToolsTestCase.test_map_over_with_output_format_actions
+
+
+External Tests:
+
+A small subset of tests can be run against an existing Galxy
+instance. The external Galaxy instance URL can be configured with
+--external_url. If this is set, either --external_master_key or
+--external_user_key must be set as well - more tests can be executed
+with --external_master_key than with a user key.
+
+Extra options:
+
+ --verbose_errors      Force some tests produce more verbose error reporting.
+ --no_cleanup          Do not delete temp files for Python functional tests
+                       (-toolshed, -framework, etc...)
+ --debug               On python test error or failure invoke a pdb shell for
+                       interactive debugging of the test
+ --report_file         Path of HTML report to produce (for Python Galaxy
+                       functional tests). If not given, a default filename will
+                       be used, and reported on stderr at the end of the run.
+ --xunit_report_file   Path of XUnit report to produce (for Python Galaxy
+                       functional tests).
+ --skip-venv           Do not create .venv (passes this flag to
+                       common_startup.sh)
+ --dockerize           Run tests in a pre-configured Docker container (must be
+                       first argument if present).
+ --db <type>           For use with --dockerize, run tests using partially
+                       migrated 'postgres', 'mysql', or 'sqlite' databases.
+ --external_url        External URL to use for Galaxy testing (only certain
+                       tests).
+ --external_master_key Master API key used to configure external tests.
+ --external_user_key   User API used for external tests - not required if
+                       external_master_key is specified.
+
+Environment Variables:
+
+In addition to the above command-line options, many environment variables
+can be used to control the Galaxy functional testing processing. Command-line
+options above like (--external_url) will set environment variables - in such
+cases the command line argument takes precedent over environment variables set
+at the time of running this script.
+
+Functional Test Environment Variables
+
+GALAXY_TEST_DBURI               Database connection string used for functional
+                                test database for Galaxy.
+GALAXY_TEST_INSTALL_DBURI       Database connection string used for functional
+                                test database for Galaxy's install framework.
+GALAXY_TEST_INSTALL_DB_MERGED   Set to use same database for Galaxy and install
+                                framework, this defaults to True for Galaxy
+                                tests an False for shed tests.
+GALAXY_TEST_DB_TEMPLATE         If GALAXY_TEST_DBURI is unset, this URL can be
+                                retrieved and should be an sqlite database that
+                                will be upgraded and tested against.
+GALAXY_TEST_TMP_DIR             Temp directory used for files required by
+                                Galaxy server setup for Galaxy functional tests.
+GALAXY_TEST_SAVE                Location to save certain test files (such as
+                                tool outputs).
+GALAXY_TEST_EXTERNAL            Target an external Galaxy as part of testing.
+GALAXY_TEST_JOB_CONFIG_FILE     Job config file to use for the test.
+GALAXY_CONFIG_MASTER_KEY        Master or admin API key to use as part of
+                                testing with GALAXY_TEST_EXTERNAL.
+GALAXY_TEST_USER_API_KEY        User API key to use as part of testing with
+                                GALAXY_TEST_EXTERNAL.
+GALAXY_TEST_HISTORY_ID          Point casperjs tests at specific external
+                                history for testing.
+GALAXY_TEST_WORKFLOW_FILE       Point casperjs tests at specific workflow
+                                file for testing.
+GALAXY_TEST_VERBOSE_ERRORS      Enable more verbose errors during API tests.
+GALAXY_TEST_UPLOAD_ASYNC        Upload tool test inputs asynchronously (may
+                                overwhelm sqlite database).
+GALAXY_TEST_RAW_DIFF            Don't slice up tool test diffs to keep output
+                                managable - print all output. (default off)
+GALAXY_TEST_DEFAULT_WAIT        Max time allowed for a tool test before Galaxy
+                                gives up (default 86400) - tools may define a
+                                maxseconds attribute to extend this.
+GALAXY_TEST_TOOL_DEPENDENCY_DIR tool dependency dir to use for Galaxy during
+                                functional tests.
+GALAXY_TEST_FILE_DIR            Test data sources (default to
+              test-data,https://github.com/galaxyproject/galaxy-test-data.git)
+GALAXY_TEST_DIRECTORY           $GALAXY_ROOT/test
+GALAXY_TEST_TOOL_DATA_PATH      Set to override tool data path during tool
+                                shed tests.
+GALAXY_TEST_FETCH_DATA          Fetch remote test data to
+                                GALAXY_TEST_DATA_REPO_CACHE as part of tool
+                                tests if it is not available locally (default
+                                to True). Requires git to be available on the
+                                command-line.
+GALAXY_TEST_DATA_REPO_CACHE     Where to cache remote test data to (default to
+                                test-data-cache).
+HTTP_ACCEPT_LANGUAGE            Defaults to 'en'
+GALAXY_TEST_NO_CLEANUP          Do not cleanup main test directory after tests,
+                                the deprecated option TOOL_SHED_TEST_NO_CLEANUP
+                                does the same thing.
+GALAXY_TEST_HOST                Host to use for Galaxy server setup for
+                                testing.
+GALAXY_TEST_PORT                Port to use for Galaxy server setup for
+                                testing.
+GALAXY_TEST_TOOL_PATH           Path defaulting to 'tools'.
+GALAXY_TEST_SHED_TOOL_CONF      Shed toolbox conf (defaults to
+                                config/shed_tool_conf.xml) used when testing
+                                installed to tools with -installed.
+TOOL_SHED_TEST_HOST             Host to use for shed server setup for testing.
+TOOL_SHED_TEST_PORT             Port to use for shed server setup for testing.
+TOOL_SHED_TEST_FILE_DIR         Defaults to test/shed_functional/test_data.
+TOOL_SHED_TEST_TMP_DIR          Defaults to random /tmp directory - place for
+                                tool shed test server files to be placed.
+TOOL_SHED_TEST_OMIT_GALAXY      Do not launch a Galaxy server for tool shed
+                                testing.
+
+Unit Test Environment Variables
+
+GALAXY_TEST_INCLUDE_SLOW - Used in unit tests to trigger slower tests that
+                           aren't included by default with --unit/-u.
+
+EOF
+}
+
+show_list() {
+    python tool_list.py
+    echo "==========================================================================================================================================="
+    echo "'${0##*/} -id bbb'               for testing one tool with id 'bbb' ('bbb' is the tool id)"
+    echo "'${0##*/} -sid ccc'              for testing one section with sid 'ccc' ('ccc' is the string after 'section::')"
+}
+
+exists() {
+    type "$1" >/dev/null 2>/dev/null
+}
+
+ensure_grunt() {
+    if ! exists "grunt";
+    then
+        echo "Grunt not on path, cannot run these tests."
+        exit 1
+    fi
+}
+
+
+DOCKER_DEFAULT_IMAGE='galaxy/testing-base:15.10.3'
+
+test_script="./scripts/functional_tests.py"
+report_file="run_functional_tests.html"
+xunit_report_file=""
+structured_data_report_file=""
+with_framework_test_tools_arg=""
+
+driver="python"
+
+if [ "$1" = "--dockerize" ];
+then
+    shift
+    DOCKER_EXTRA_ARGS=${DOCKER_ARGS:-""}
+    DOCKER_RUN_EXTRA_ARGS=${DOCKER_RUN_EXTRA_ARGS:-""}
+    DOCKER_IMAGE=${DOCKER_IMAGE:-${DOCKER_DEFAULT_IMAGE}}
+    if [ "$1" = "--db" ]; then
+       db_type=$2
+       shift 2
+    else
+       db_type="sqlite"
+    fi
+    if [ "$1" = "--external_tmp" ]; then
+       # If /tmp is a tmpfs there may be better performance by reusing
+       # the parent's temp file system. Also, it seems to decrease the
+       # frequency or errors such as the following:
+       # /bin/sh: 1: /tmp/tmpiWU3kJ/tmp_8zLxx/job_working_directory_mwwDmg/000/274/galaxy_274.sh: Text file busy
+       tmp=$(mktemp -d)
+       chmod 1777 $tmp
+       DOCKER_RUN_EXTRA_ARGS="-v ${tmp}:/tmp ${DOCKER_RUN_EXTRA_ARGS}"
+       shift
+    fi
+    docker $DOCKER_EXTRA_ARGS run $DOCKER_RUN_EXTRA_ARGS -e "GALAXY_TEST_DATABASE_TYPE=$db_type" --rm -v `pwd`:/galaxy $DOCKER_IMAGE "$@"
+    exit $?
+fi
+
+# Loop through and consume the main arguments.
+# Some loops will consume more than one argument (there are extra "shift"s in some cases).
+while :
+do
+    case "$1" in
+      -h|--help|-\?)
+          show_help
+          exit 0
+          ;;
+      -l|-list|--list)
+          show_list
+          exit 0
+          ;;
+      -id|--id)
+          if [ $# -gt 1 ]; then
+              test_id=$2;
+              shift 2
+          else
+              echo "--id requires an argument" 1>&2
+              exit 1
+          fi
+          ;;
+      -s|-sid|--sid)
+          if [ $# -gt 1 ]; then
+              section_id=$2
+              shift 2
+          else
+              echo "--sid requires an argument" 1>&2
+              exit 1
+          fi
+          ;;
+      -a|-api|--api)
+          with_framework_test_tools_arg="-with_framework_test_tools"
+          test_script="./scripts/functional_tests.py"
+          report_file="./run_api_tests.html"
+          if [ $# -gt 1 ]; then
+        	  api_script=$2
+              shift 2
+          else
+              api_script="./test/api"
+              shift 1
+          fi
+          ;;
+      -t|-toolshed|--toolshed)
+          test_script="./test/shed_functional/functional_tests.py"
+          report_file="run_toolshed_tests.html"
+          if [ $# -gt 1 ]; then
+              toolshed_script=$2
+              shift 2
+          else
+              toolshed_script="./test/shed_functional/functional"
+              shift 1
+          fi
+          ;;
+      -with_framework_test_tools|--with_framework_test_tools)
+          with_framework_test_tools_arg="-with_framework_test_tools"
+          shift
+          ;;
+      --external_url)
+          GALAXY_TEST_EXTERNAL=$2
+          shift 2
+          ;;
+      --external_master_key)
+          GALAXY_CONFIG_MASTER_KEY=$2
+          shift 2
+          ;;
+      --external_user_key)
+          GALAXY_TEST_USER_API_KEY=$2
+          shift 2
+          ;;
+      -w|-workflow|--workflow)
+          if [ $# -gt 1 ]; then
+              workflow_file=$2
+              workflow_test=1
+              shift 2
+          else
+              echo "--workflow requires an argument" 1>&2
+              exit 1
+          fi
+          ;;
+      -f|-framework|--framework)
+          report_file="run_framework_tests.html"
+          framework_test=1;
+          shift 1
+          ;;
+      -d|-data_managers|--data_managers)
+          data_managers_test=1;
+          shift 1
+          ;;
+      -j|-casperjs|--casperjs)
+          # TODO: Support running casper tests against existing
+          # Galaxy instances.
+          with_framework_test_tools_arg="-with_framework_test_tools"
+          if [ $# -gt 1 ]; then
+              casperjs_test_name=$2
+              shift 2
+          else
+              shift 1
+          fi
+          report_file="run_casperjs_tests.html"
+          casperjs_test=1;
+          ;;
+      -m|-migrated|--migrated)
+          migrated_test=1;
+          shift
+          ;;
+      -i|-installed|--installed)
+          installed_test=1;
+          shift
+          ;;
+      -r|--report_file)
+          if [ $# -gt 1 ]; then
+              report_file=$2
+              shift 2
+          else
+              echo "--report_file requires an argument" 1>&2
+              exit 1
+          fi
+          ;;
+      --xunit_report_file)
+          if [ $# -gt 1 ]; then
+              xunit_report_file=$2
+              shift 2
+          else
+              echo "--xunit_report_file requires an argument" 1>&2
+              exit 1
+          fi
+          ;;
+      --structured_data_report_file)
+          if [ $# -gt 1 ]; then
+              structured_data_report_file=$2
+              shift 2
+          else
+              echo "--structured_data_report_file requires an argument" 1>&2
+              exit 1
+          fi
+          ;;
+      --verbose_errors)
+          GALAXY_TEST_VERBOSE_ERRORS=True
+          export GALAXY_TEST_VERBOSE_ERRORS
+          shift
+          ;;
+      -c|--coverage)
+          # Must have coverage installed (try `which coverage`) - only valid with --unit
+          # for now. Would be great to get this to work with functional tests though.
+          coverage_arg="--with-coverage"
+          NOSE_WITH_COVERAGE=true
+          shift
+          ;;
+      --debug)
+          #TODO ipdb would be nicer.
+          NOSE_PDB=True
+          export NOSE_PDB
+          shift
+          ;;
+      -u|-unit|--unit)
+          report_file="run_unit_tests.html"
+          test_script="./scripts/nosetests.py"
+          if [ $# -gt 1 ]; then
+              unit_extra=$2
+              shift 2
+          else
+              unit_extra='--exclude=functional --exclude="^get" --exclude=controllers --exclude=runners --exclude dictobj --exclude=jstree lib test/unit'
+              shift 1
+          fi
+          ;;
+      -q|-qunit|--qunit)
+          # Requires grunt installed and dependencies configured see
+          # test/qunit/README.txt for more information.
+          driver="grunt"
+          gruntfile="./test/qunit/Gruntfile.js"
+          if [ $# -gt 1 ]; then
+              qunit_name=$2
+              shift 2
+          else
+              shift 1
+          fi
+          ;;
+      --no_cleanup)
+          GALAXY_TEST_NO_CLEANUP=1
+          export GALAXY_TEST_NO_CLEANUP
+          TOOL_SHED_TEST_NO_CLEANUP=1
+          export TOOL_SHED_TEST_NO_CLEANUP
+          GALAXY_INSTALL_TEST_NO_CLEANUP=1
+          export GALAXY_INSTALL_TEST_NO_CLEANUP
+          echo "Skipping Python test clean up."
+          shift
+          ;;
+      -watch|--watch)
+          # Have grunt watch test or directory for changes, only
+          # valid for javascript testing.
+          watch=1
+          shift
+          ;;
+      --skip-venv)
+          skip_venv='--skip-venv'
+          shift
+          ;;
+      --no-create-venv)
+          no_create_venv='--no-create-venv'
+          shift
+          ;;
+      --no-replace-pip)
+          no_replace_pip='--no-replace-pip'
+          shift
+          ;;
+      --replace-pip)
+          replace_pip='--replace-pip'
+          shift
+          ;;
+      --skip-common-startup)
+          # Don't run ./scripts/common_startup.sh (presumably it has already
+          # been done, or you know what you're doing).
+          skip_common_startup=1
+          shift
+          ;;
+      --)
+          shift
+          break
+          ;;
+      -*)
+          echo "invalid option: $1" 1>&2;
+          show_help
+          exit 1
+          ;;
+      *)
+          break;
+          ;;
+    esac
+done
+
+if [ -z "$skip_common_startup" ]; then
+    if [ -n "$GALAXY_TEST_DBURI" ]; then
+            GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION=$GALAXY_TEST_DBURI
+            export GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION
+    fi
+    ./scripts/common_startup.sh $skip_venv $no_create_venv $no_replace_pip $replace_pip --dev-wheels || exit 1
+fi
+
+GALAXY_VIRTUAL_ENV="${GALAXY_VIRTUAL_ENV:-.venv}"
+if [ -z "$skip_venv" -a -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    printf "Activating virtualenv at $GALAXY_VIRTUAL_ENV\n"
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+if [ -n "$migrated_test" ] ; then
+    [ -n "$test_id" ] && class=":TestForTool_$test_id" || class=""
+    extra_args="functional.test_toolbox$class -migrated"
+elif [ -n "$installed_test" ] ; then
+    [ -n "$test_id" ] && class=":TestForTool_$test_id" || class=""
+    extra_args="functional.test_toolbox$class -installed"
+elif [ -n "$framework_test" ] ; then
+    [ -n "$test_id" ] && class=":TestForTool_$test_id" || class=""
+    extra_args="functional.test_toolbox$class -framework"
+elif [ -n "$data_managers_test" ] ; then
+    [ -n "$test_id" ] && class=":TestForDataManagerTool_$test_id" || class=""
+    extra_args="functional.test_data_managers$class -data_managers"
+elif [ -n "$workflow_test" ]; then
+    GALAXY_TEST_WORKFLOW_FILE="$workflow_file"
+    extra_args="functional.workflow:WorkflowTestCase"
+elif [ -n "$toolshed_script" ]; then
+    extra_args="$toolshed_script"
+elif [ -n "$api_script" ]; then
+    extra_args="$api_script"
+elif [ -n "$casperjs_test" ]; then
+    # TODO: Ensure specific versions of casperjs and phantomjs are
+    # available. Some option for leveraging npm to automatically
+    # install these dependencies would be nice as well.
+    if [ -n "$casperjs_test_name" ]; then
+        extra_args="test/casperjs/casperjs_runner.py:$casperjs_test_name"
+    else
+        extra_args="test/casperjs/casperjs_runner.py"
+    fi
+elif [ -n "$section_id" ]; then
+    extra_args=`python tool_list.py $section_id`
+elif [ -n "$test_id" ]; then
+    class=":TestForTool_$test_id"
+    extra_args="functional.test_toolbox$class"
+elif [ -n "$unit_extra" ]; then
+    extra_args="--with-doctest $unit_extra"
+elif [ -n "$1" ] ; then
+    extra_args="$1"
+else
+    extra_args='--exclude="^get" functional'
+fi
+
+if [ "$driver" = "python" ]; then
+    if [ -n "$xunit_report_file" ]; then
+        xunit_args="--with-xunit --xunit-file $xunit_report_file"
+    else
+        xunit_args=""
+    fi
+    if [ -n "$structured_data_report_file" ]; then
+        structured_data_args="--with-structureddata --structured-data-file $structured_data_report_file"
+    else
+        structured_data_args=""
+    fi
+    if [ -n "$with_framework_test_tools_arg" ]; then
+        GALAXY_TEST_TOOL_CONF="config/tool_conf.xml.sample,test/functional/tools/samples_tool_conf.xml"
+        export GALAXY_TEST_TOOL_CONF
+    fi
+    python $test_script $coverage_arg -v --with-nosehtml --html-report-file $report_file $xunit_args $structured_data_args $extra_args
+    exit_status=$?
+    echo "Testing complete. HTML report is in \"$report_file\"." 1>&2
+    exit ${exit_status}
+else
+    ensure_grunt
+    if [ -n "$watch" ]; then
+        grunt_task="watch"
+    else
+        grunt_task=""
+    fi
+    if [ -n "$qunit_name" ]; then
+        grunt_args="--test=$qunit_name"
+    else
+        grunt_args=""
+    fi
+    # TODO: Exapnd javascript helpers to include setting up
+    # grunt deps in npm, "watch"ing directory, and running casper
+    # functional tests.
+    grunt --gruntfile=$gruntfile $grunt_task $grunt_args
+fi
+
diff --git a/run_tool_shed.sh b/run_tool_shed.sh
new file mode 100755
index 0000000..a69e08d
--- /dev/null
+++ b/run_tool_shed.sh
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+cd `dirname $0`
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+./scripts/common_startup.sh
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+
+tool_shed=`./scripts/tool_shed/bootstrap_tool_shed/parse_run_sh_args.sh $@`
+args=$@
+
+if [ $? -eq 0 ] ; then
+	bash ./scripts/tool_shed/bootstrap_tool_shed/bootstrap_tool_shed.sh $@
+	args=`echo $@ | sed "s#-\?-bootstrap_from_tool_shed $tool_shed##"`
+fi
+
+if [ -z "$TOOL_SHED_CONFIG_FILE" ]; then
+    if [ -f tool_shed_wsgi.ini ]; then
+        TOOL_SHED_CONFIG_FILE=tool_shed_wsgi.ini
+    elif [ -f config/tool_shed.ini ]; then
+        TOOL_SHED_CONFIG_FILE=config/tool_shed.ini
+    else
+        TOOL_SHED_CONFIG_FILE=config/tool_shed.ini.sample
+    fi
+    export TOOL_SHED_CONFIG_FILE
+fi
+
+python ./scripts/paster.py serve $TOOL_SHED_CONFIG_FILE --pid-file=tool_shed_webapp.pid --log-file=tool_shed_webapp.log $args
diff --git a/scripts/__init__.py b/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/scripts/api/README b/scripts/api/README
new file mode 100644
index 0000000..1ade28a
--- /dev/null
+++ b/scripts/api/README
@@ -0,0 +1,116 @@
+This is not documentation.  These are hints and examples to get you started
+until the documentation is written.
+
+Set these options in galaxy.ini and start the server:
+
+admin_users = you at example.org
+library_import_dir = /path/to/some/directory
+
+In the directory you specified for 'library_import_dir', create some
+subdirectories, and put (or symlink) files to import into Galaxy into those
+subdirectories.
+
+In Galaxy, create an account that matches the address you put in 'admin_users',
+then browse to that user's preferences and generate a new API Key.  Copy the
+key to your clipboard and then use these scripts:
+
+% ./display.py my_key http://localhost:4096/api/libraries
+Collection Members
+------------------
+
+0 elements in collection
+
+% ./library_create_library.py my_key http://localhost:4096/api/libraries api_test 'API Test Library'
+Response
+--------
+/api/libraries/f3f73e481f432006
+  name: api_test
+  id: f3f73e481f432006
+
+% ./display.py my_key http://localhost:4096/api/libraries
+Collection Members
+------------------
+/api/libraries/f3f73e481f432006
+  name: api_test
+  id: f3f73e481f432006
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006
+Member Information
+------------------
+synopsis: None
+contents_url: /api/libraries/f3f73e481f432006/contents
+description: API Test Library
+name: api_test
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 
+Collection Members
+------------------
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+  name: /
+  type: folder
+  id: 28202595c0d2591f61ddda595d2c3670
+
+% ./library_create_folder.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591f61ddda595d2c3670 api_test_folder1 'API Test Folder 1'
+Response
+--------
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+  name: api_test_folder1
+  id: 28202595c0d2591fa4f9089d2303fd89
+
+% ./library_upload_from_import_dir.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 28202595c0d2591fa4f9089d2303fd89 bed bed hg19
+Response
+--------
+/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+  name: 2.bed
+  id: e9ef7fdb2db87d7b
+/api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+  name: 3.bed
+  id: 3b7f6a31f80a5018
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents 
+Collection Members
+------------------
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591f61ddda595d2c3670
+  name: / 
+  type: folder
+  id: 28202595c0d2591f61ddda595d2c3670
+/api/libraries/f3f73e481f432006/contents/28202595c0d2591fa4f9089d2303fd89
+  name: /api_test_folder1
+  type: folder
+  id: 28202595c0d2591fa4f9089d2303fd89
+/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+  name: /api_test_folder1/2.bed
+  type: file
+  id: e9ef7fdb2db87d7b
+/api/libraries/f3f73e481f432006/contents/3b7f6a31f80a5018
+  name: /api_test_folder1/3.bed
+  type: file
+  id: 3b7f6a31f80a5018
+
+% ./display.py my_key http://localhost:4096/api/libraries/f3f73e481f432006/contents/e9ef7fdb2db87d7b
+Member Information
+------------------
+misc_blurb: 68 regions
+metadata_endCol: 3
+data_type: bed
+metadata_columns: 6
+metadata_nameCol: 4
+uploaded_by: nate at ...
+metadata_strandCol: 6
+name: 2.bed
+genome_build: hg19
+metadata_comment_lines: None
+metadata_startCol: 2
+metadata_chromCol: 1
+file_size: 4272
+metadata_data_lines: 68
+message:
+metadata_dbkey: hg19
+misc_info: uploaded bed file
+date_uploaded: 2010-06-22T17:01:51.266119
+metadata_column_types: str, int, int, str, int, str
+
+Other parameters are valid when uploading, they are the same parameters as are
+used in the web form, like 'link_data_only' and etc.
+
+The request and response format should be considered alpha and are subject to change.
diff --git a/scripts/api/common.py b/scripts/api/common.py
new file mode 100644
index 0000000..63ad99d
--- /dev/null
+++ b/scripts/api/common.py
@@ -0,0 +1,200 @@
+"""
+Common methods used by the API sample scripts.
+"""
+from __future__ import print_function
+
+import json
+import logging
+import sys
+
+from Crypto.Cipher import Blowfish
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import Request, urlopen
+
+log = logging.getLogger( __name__ )
+
+
+def make_url( api_key, url, args=None ):
+    """
+    Adds the API Key to the URL if it's not already there.
+    """
+    if args is None:
+        args = []
+    argsep = '&'
+    if '?' not in url:
+        argsep = '?'
+    if '?key=' not in url and '&key=' not in url:
+        args.insert( 0, ( 'key', api_key ) )
+    return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
+
+
+def get( api_key, url ):
+    """
+    Do the actual GET.
+    """
+    url = make_url( api_key, url )
+    try:
+        return json.loads( urlopen( url ).read() )
+    except ValueError as e:
+        print("URL did not return JSON data: %s" % e)
+        sys.exit(1)
+
+
+def post( api_key, url, data ):
+    """
+    Do the actual POST.
+    """
+    url = make_url( api_key, url )
+    req = Request( url, headers={ 'Content-Type': 'application/json' }, data=json.dumps( data ) )
+    return json.loads( urlopen( req ).read() )
+
+
+def put( api_key, url, data ):
+    """
+    Do the actual PUT
+    """
+    url = make_url( api_key, url )
+    req = Request( url, headers={ 'Content-Type': 'application/json' }, data=json.dumps( data ))
+    req.get_method = lambda: 'PUT'
+    return json.loads( urlopen( req ).read() )
+
+
+def __del( api_key, url, data ):
+    """
+    Do the actual DELETE
+    """
+    url = make_url( api_key, url )
+    req = Request( url, headers={ 'Content-Type': 'application/json' }, data=json.dumps( data ))
+    req.get_method = lambda: 'DELETE'
+    return json.loads( urlopen( req ).read() )
+
+
+def display( api_key, url, return_formatted=True ):
+    """
+    Sends an API GET request and acts as a generic formatter for the JSON response.
+    """
+    try:
+        r = get( api_key, url )
+    except HTTPError as e:
+        print(e)
+        print(e.read( 1024 ))  # Only return the first 1K of errors.
+        sys.exit( 1 )
+    if not return_formatted:
+        return r
+    elif type( r ) == list:
+        # Response is a collection as defined in the REST style.
+        print('Collection Members')
+        print('------------------')
+        for n, i in enumerate(r):
+            # All collection members should have a name in the response.
+            # url is optional
+            if 'url' in i:
+                print('#%d: %s' % (n + 1, i.pop( 'url' ) ))
+            if 'name' in i:
+                print('  name: %s' % i.pop( 'name' ))
+            for k, v in i.items():
+                print('  %s: %s' % ( k, v ))
+        print('')
+        print('%d element(s) in collection' % len( r ))
+    elif type( r ) == dict:
+        # Response is an element as defined in the REST style.
+        print('Member Information')
+        print('------------------')
+        for k, v in r.items():
+            print('%s: %s' % ( k, v ))
+    elif type( r ) == str:
+        print(r)
+    else:
+        print('response is unknown type: %s' % type( r ))
+
+
+def submit( api_key, url, data, return_formatted=True ):
+    """
+    Sends an API POST request and acts as a generic formatter for the JSON response.
+    'data' will become the JSON payload read by Galaxy.
+    """
+    try:
+        r = post( api_key, url, data )
+    except HTTPError as e:
+        if return_formatted:
+            print(e)
+            print(e.read( 1024 ))
+            sys.exit( 1 )
+        else:
+            return 'Error. ' + str( e.read( 1024 ) )
+    if not return_formatted:
+        return r
+    print('Response')
+    print('--------')
+    if type( r ) == list:
+        # Currently the only implemented responses are lists of dicts, because
+        # submission creates some number of collection elements.
+        for i in r:
+            if type( i ) == dict:
+                if 'url' in i:
+                    print(i.pop( 'url' ))
+                else:
+                    print('----')
+                if 'name' in i:
+                    print('  name: %s' % i.pop( 'name' ))
+                for k, v in i.items():
+                    print('  %s: %s' % ( k, v ))
+            else:
+                print(i)
+    else:
+        print(r)
+
+
+def update( api_key, url, data, return_formatted=True ):
+    """
+    Sends an API PUT request and acts as a generic formatter for the JSON response.
+    'data' will become the JSON payload read by Galaxy.
+    """
+    try:
+        r = put( api_key, url, data )
+    except HTTPError as e:
+        if return_formatted:
+            print(e)
+            print(e.read( 1024 ))
+            sys.exit( 1 )
+        else:
+            return 'Error. ' + str( e.read( 1024 ) )
+    if not return_formatted:
+        return r
+    print('Response')
+    print('--------')
+    print(r)
+
+
+def delete( api_key, url, data, return_formatted=True ):
+    """
+    Sends an API DELETE request and acts as a generic formatter for the JSON response.
+    'data' will become the JSON payload read by Galaxy.
+    """
+    try:
+        r = __del( api_key, url, data )
+    except HTTPError as e:
+        if return_formatted:
+            print(e)
+            print(e.read( 1024 ))
+            sys.exit( 1 )
+        else:
+            return 'Error. ' + str( e.read( 1024 ) )
+    if not return_formatted:
+        return r
+    print('Response')
+    print('--------')
+    print(r)
+
+
+def encode_id( config_id_secret, obj_id ):
+    """
+    utility method to encode ID's
+    """
+    id_cipher = Blowfish.new( config_id_secret )
+    # Convert to string
+    s = str( obj_id )
+    # Pad to a multiple of 8 with leading "!"
+    s = ( "!" * ( 8 - len(s) % 8 ) ) + s
+    # Encrypt
+    return id_cipher.encrypt( s ).encode( 'hex' )
diff --git a/scripts/api/copy_hda_to_library_folder.py b/scripts/api/copy_hda_to_library_folder.py
new file mode 100755
index 0000000..f838fef
--- /dev/null
+++ b/scripts/api/copy_hda_to_library_folder.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+import sys
+
+from common import submit
+
+usage = "USAGE: copy_hda_to_library_folder.py <base url> <api key> <hda id> <library id> <folder id> [ message ]"
+
+
+def copy_hda_to_library_folder( base_url, key, hda_id, library_id, folder_id, message='' ):
+    url = 'http://%s/api/libraries/%s/contents' % ( base_url, library_id )
+    payload = {
+        'folder_id'     : folder_id,
+        'create_type'   : 'file',
+        'from_hda_id'   : hda_id,
+    }
+    if message:
+        payload.update( dict( ldda_message=message ) )
+
+    return submit( key, url, payload )
+
+
+if __name__ == '__main__':
+    num_args = len( sys.argv )
+    if num_args < 6:
+        print >> sys.stderr, usage
+        sys.exit( 1 )
+
+    ( base_url, key, hda_id, library_id, folder_id ) = sys.argv[1:6]
+
+    message = ''
+    if num_args >= 7:
+        message = sys.argv[6]
+
+    print >> sys.stderr, base_url, key, hda_id, library_id, folder_id, message
+    returned = copy_hda_to_library_folder( base_url, key, hda_id, library_id, folder_id, message )
diff --git a/scripts/api/create.py b/scripts/api/create.py
new file mode 100644
index 0000000..99d1c2e
--- /dev/null
+++ b/scripts/api/create.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+"""
+Generic POST/create script
+
+usage: create.py key url [key=value ...]
+"""
+import sys
+
+from common import submit
+
+data = {}
+for k, v in [ kwarg.split('=', 1) for kwarg in sys.argv[3:]]:
+    data[k] = v
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/data_manager_example_execute.py b/scripts/api/data_manager_example_execute.py
new file mode 100644
index 0000000..cfdc85b
--- /dev/null
+++ b/scripts/api/data_manager_example_execute.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+# Very simple example of using the API to run Data Managers
+# Script makes the naive assumption that dbkey==sequence id, which in many cases is not true nor desired
+# *** This script is not recommended for use as-is on a production server ***
+from __future__ import print_function
+import optparse
+import time
+from six.moves.urllib.parse import urljoin
+
+from common import get, post
+
+DEFAULT_SLEEP_TIME = 3
+FETCH_GENOME_TOOL_ID = 'testtoolshed.g2.bx.psu.edu/repos/blankenberg/data_manager_fetch_genome_all_fasta/data_manager_fetch_genome_all_fasta/0.0.1'
+BUILD_INDEX_TOOLS_ID = [ 'testtoolshed.g2.bx.psu.edu/repos/blankenberg/data_manager_bwa_index_builder/bwa_index_builder_data_manager/0.0.1',
+                        'testtoolshed.g2.bx.psu.edu/repos/blankenberg/data_manager_bwa_index_builder/bwa_color_space_index_builder_data_manager/0.0.1' ]
+
+
+def run_tool( tool_id, history_id, params, api_key, galaxy_url, wait=True, sleep_time=None, **kwargs ):
+    sleep_time = sleep_time or DEFAULT_SLEEP_TIME
+    tools_url = urljoin( galaxy_url, 'api/tools' )
+    payload = {
+        'tool_id': tool_id,
+    }
+    if history_id:
+        payload['history_id'] = history_id
+    payload[ 'inputs' ] = params
+    rval = post( api_key, tools_url, payload )
+    if wait:
+        outputs = list( rval['outputs'] )
+        while outputs:
+            finished_datasets = []
+            for i, dataset_dict in enumerate( outputs ):
+                if dataset_is_terminal( dataset_dict['id'], api_key=api_key, galaxy_url=galaxy_url ):
+                    finished_datasets.append( i )
+            for i in reversed( finished_datasets ):
+                outputs.pop( 0 )
+            if wait and outputs:
+                time.sleep( sleep_time )
+
+    return rval
+
+
+def get_dataset_state( hda_id, api_key, galaxy_url ):
+    datasets_url = urljoin( galaxy_url, 'api/datasets/%s' % hda_id )
+    dataset_info = get( api_key, datasets_url )
+    return dataset_info['state']
+
+
+def dataset_is_terminal( hda_id, api_key, galaxy_url ):
+    dataset_state = get_dataset_state( hda_id, api_key, galaxy_url )
+    return dataset_state in [ 'ok', 'error' ]
+
+
+if __name__ == '__main__':
+    parser = optparse.OptionParser()
+    parser.add_option( '-k', '--key', dest='api_key', action='store', type="string", default=None, help='API Key.' )
+    parser.add_option( '-u', '--url', dest='base_url', action='store', type="string", default='http://localhost:8080', help='Base URL of Galaxy Server' )
+    parser.add_option( '-d', '--dbkey', dest='dbkeys', action='append', type="string", default=[], help='List of dbkeys to download and Index' )
+    parser.add_option( '-s', '--sleep_time', dest='sleep_time', action='store', type="int", default=DEFAULT_SLEEP_TIME, help='How long to sleep between check loops' )
+    (options, args) = parser.parse_args()
+
+    # check options
+    assert options.api_key is not None, ValueError( 'You must specify an API key.' )
+    assert options.dbkeys, ValueError( 'You must specify at least one dbkey to use.' )
+
+    # check user is admin
+    configuration_options = get( options.api_key, urljoin( options.base_url, 'api/configuration' ) )
+    if 'library_import_dir' not in configuration_options:  # hack to check if is admin user
+        print("Warning: Data Managers are only available to admin users. The API Key provided does not appear to belong to an admin user. Will attempt to run anyway.")
+
+    # Fetch Genomes
+    dbkeys = {}
+    for dbkey in options.dbkeys:
+        if dbkey not in dbkeys:
+            dbkeys[ dbkey ] = run_tool( FETCH_GENOME_TOOL_ID, None, { 'dbkey': dbkey, 'reference_source|reference_source_selector': 'ucsc', 'reference_source|requested_dbkey': dbkey }, options.api_key, options.base_url, wait=False )
+        else:
+            "dbkey (%s) was specified more than once, skipping additional specification." % ( dbkey )
+
+    print('Genomes Queued for downloading.')
+
+    # Start indexers
+    indexing_tools = []
+    while dbkeys:
+        for dbkey, value in dbkeys.items():
+            if dataset_is_terminal( value['outputs'][0]['id'], options.api_key, options.base_url ):
+                del dbkeys[ dbkey ]
+                for tool_id in BUILD_INDEX_TOOLS_ID:
+                    indexing_tools.append( run_tool( tool_id, None, { 'all_fasta_source': dbkey }, options.api_key, options.base_url, wait=False ) )
+        if dbkeys:
+            time.sleep( options.sleep_time )
+
+    print('All genomes downloaded and indexers now queued.')
+
+    # Wait for indexers to finish
+    while indexing_tools:
+        for i, indexing_tool_value in enumerate( indexing_tools ):
+            if dataset_is_terminal( indexing_tool_value['outputs'][0]['id'], options.api_key, options.base_url ):
+                print('Finished:', indexing_tool_value)
+                del indexing_tools[i]
+                break
+        if indexing_tools:
+            time.sleep( options.sleep_time )
+
+    print('All indexers have been run, please check results.')
diff --git a/scripts/api/delete.py b/scripts/api/delete.py
new file mode 100644
index 0000000..20d644c
--- /dev/null
+++ b/scripts/api/delete.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+"""
+Generic DELETE/delete script
+
+usage: delete.py key url
+"""
+import sys
+
+from common import delete
+
+data = {}
+for k, v in [ kwarg.split('=', 1) for kwarg in sys.argv[3:]]:
+    data[k] = v
+
+delete( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/display.py b/scripts/api/display.py
new file mode 100755
index 0000000..afc8555
--- /dev/null
+++ b/scripts/api/display.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import os
+import sys
+
+from six.moves.urllib.error import URLError
+
+from common import display
+
+try:
+    display( *sys.argv[1:3] )
+except TypeError as e:
+    print('usage: %s key url' % os.path.basename( sys.argv[0] ))
+    print(e)
+    sys.exit( 1 )
+except URLError as e:
+    print(e)
+    sys.exit( 1 )
diff --git a/scripts/api/example_watch_folder.py b/scripts/api/example_watch_folder.py
new file mode 100755
index 0000000..50dfd17
--- /dev/null
+++ b/scripts/api/example_watch_folder.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+"""
+Simple example script that watches a folder for new files, imports that data to a data library, and then
+execute a workflow on it, creating a new history for each workflow invocation.
+
+This assumes a workflow with only one input, though it could be adapted to many.
+
+Sample call:
+python example_watch_folder.py <api_key> <api_url> /tmp/g_inbox/ /tmp/g_inbox/done/ "API Imports" f2db41e1fa331b3e
+
+NOTE:  The upload method used requires the data library filesystem upload allow_library_path_paste
+"""
+from __future__ import print_function
+import os
+import shutil
+import sys
+import time
+
+from common import display, submit
+
+
+def main(api_key, api_url, in_folder, out_folder, data_library, workflow):
+    # Find/Create data library with the above name.  Assume we're putting datasets in the root folder '/'
+    libs = display(api_key, api_url + 'libraries', return_formatted=False)
+    library_id = None
+    for library in libs:
+        if library['name'] == data_library:
+            library_id = library['id']
+    if not library_id:
+        lib_create_data = {'name': data_library}
+        library = submit(api_key, api_url + 'libraries', lib_create_data, return_formatted=False)
+        library_id = library[0]['id']
+    folders = display(api_key, api_url + "libraries/%s/contents" % library_id, return_formatted=False)
+    for f in folders:
+        if f['name'] == "/":
+            library_folder_id = f['id']
+    workflow = display(api_key, api_url + 'workflows/%s' % workflow, return_formatted=False)
+    if not workflow:
+        print("Workflow %s not found, terminating.")
+        sys.exit(1)
+    if not library_id or not library_folder_id:
+        print("Failure to configure library destination.")
+        sys.exit(1)
+    while 1:
+        # Watch in_folder, upload anything that shows up there to data library and get ldda,
+        # invoke workflow, move file to out_folder.
+        for fname in os.listdir(in_folder):
+            fullpath = os.path.join(in_folder, fname)
+            if os.path.isfile(fullpath):
+                data = {}
+                data['folder_id'] = library_folder_id
+                data['file_type'] = 'auto'
+                data['dbkey'] = ''
+                data['upload_option'] = 'upload_paths'
+                data['filesystem_paths'] = fullpath
+                data['create_type'] = 'file'
+                libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted=False)
+                # TODO Handle this better, but the datatype isn't always
+                # set for the followup workflow execution without this
+                # pause.
+                time.sleep(5)
+                for ds in libset:
+                    if 'id' in ds:
+                        # Successful upload of dataset, we have the ldda now.  Run the workflow.
+                        wf_data = {}
+                        wf_data['workflow_id'] = workflow['id']
+                        wf_data['history'] = "%s - %s" % (fname, workflow['name'])
+                        wf_data['ds_map'] = {}
+                        for step_id, ds_in in workflow['inputs'].items():
+                            wf_data['ds_map'][step_id] = {'src': 'ld', 'id': ds['id']}
+                        res = submit( api_key, api_url + 'workflows', wf_data, return_formatted=False)
+                        if res:
+                            print(res)
+                            # Successful workflow execution, safe to move dataset.
+                            shutil.move(fullpath, os.path.join(out_folder, fname))
+        time.sleep(10)
+
+
+if __name__ == '__main__':
+    try:
+        api_key = sys.argv[1]
+        api_url = sys.argv[2]
+        in_folder = sys.argv[3]
+        out_folder = sys.argv[4]
+        data_library = sys.argv[5]
+        workflow = sys.argv[6]
+    except IndexError:
+        print('usage: %s key url in_folder out_folder data_library workflow' % os.path.basename( sys.argv[0] ))
+        sys.exit( 1 )
+    main(api_key, api_url, in_folder, out_folder, data_library, workflow )
diff --git a/scripts/api/filter_failed_datasets_from_collection.py b/scripts/api/filter_failed_datasets_from_collection.py
new file mode 100644
index 0000000..af1c722
--- /dev/null
+++ b/scripts/api/filter_failed_datasets_from_collection.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Given a history name and a collection integer id in that history, this script will split the collection into the failed/pending/empty
+datasets "(not ok)" and the successfully finished datasets "(ok)".
+
+Sample call:
+python filter_failed_datasets_from_collection.py <GalaxyUrl> <ApiKey> MySpecialHistory 1234
+"""
+from __future__ import print_function
+
+import sys
+
+from bioblend.galaxy import (
+    dataset_collections as collections,
+    GalaxyInstance
+)
+
+if len(sys.argv) < 5:
+    print("Usage: %s <GalaxyUrl> <ApiKey> <HistoryName (must be unique)> <CollectionHistoryId (i.e. the simple integer id)>" % sys.argv[0])
+    exit(0)
+
+galaxyUrl = sys.argv[1]
+galaxyApiKey = sys.argv[2]
+historyName = sys.argv[3]
+collectionHistoryId = int(sys.argv[4])
+
+gi = GalaxyInstance(url=galaxyUrl, key=galaxyApiKey)
+
+historyMatches = gi.histories.get_histories(name=historyName)
+if (len(historyMatches) > 1):
+    print("Error: more than one history matches that name.")
+    exit(1)
+
+historyId = historyMatches[0]['id']
+historyContents = gi.histories.show_history(historyId, contents=True, deleted=False, visible=True, details=False)
+matchingCollections = [x for x in historyContents if x['hid'] == collectionHistoryId]
+
+if len(matchingCollections) == 0:
+    print("Error: no collections matching that id found.")
+    exit(1)
+
+if len(matchingCollections) > 1:
+    print("Error: more than one collection matching that id found (WTF?)")
+    exit(1)
+
+collectionId = matchingCollections[0]['id']
+failedCollection = gi.histories.show_dataset_collection(historyId, collectionId)
+okDatasets = [d for d in failedCollection['elements'] if d['object']['state'] == 'ok' and d['object']['file_size'] > 0]
+notOkDatasets = [d for d in failedCollection['elements'] if d['object']['state'] != 'ok' or d['object']['file_size'] == 0]
+okCollectionName = failedCollection['name'] + " (ok)"
+notOkCollectionName = failedCollection['name'] + " (not ok)"
+
+gi.histories.create_dataset_collection(
+    history_id=historyId,
+    collection_description=collections.CollectionDescription(
+        name=okCollectionName,
+        elements=[collections.HistoryDatasetElement(d['object']['name'], d['object']['id']) for d in okDatasets]))
+
+gi.histories.create_dataset_collection(
+    history_id=historyId,
+    collection_description=collections.CollectionDescription(
+        name=notOkCollectionName,
+        elements=[collections.HistoryDatasetElement(d['object']['name'], d['object']['id']) for d in notOkDatasets]))
diff --git a/scripts/api/form_create_from_xml.py b/scripts/api/form_create_from_xml.py
new file mode 100644
index 0000000..c395a82
--- /dev/null
+++ b/scripts/api/form_create_from_xml.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    data = {}
+    data[ 'xml_text' ] = open( sys.argv[3] ).read()
+except IndexError:
+    print('usage: %s key url form_xml_description_file' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/history_create_history.py b/scripts/api/history_create_history.py
new file mode 100644
index 0000000..4f9be80
--- /dev/null
+++ b/scripts/api/history_create_history.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    assert sys.argv[2]
+except IndexError:
+    print('usage: %s key url [name] ' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data = {}
+    data[ 'name' ] = sys.argv[3]
+except IndexError:
+    pass
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/history_delete_history.py b/scripts/api/history_delete_history.py
new file mode 100644
index 0000000..be3d117
--- /dev/null
+++ b/scripts/api/history_delete_history.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import delete
+
+try:
+    assert sys.argv[2]
+except IndexError:
+    print('usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data = {}
+    data[ 'purge' ] = sys.argv[3]
+except IndexError:
+    pass
+
+delete( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/import_library_dataset_to_history.py b/scripts/api/import_library_dataset_to_history.py
new file mode 100644
index 0000000..7b5776b
--- /dev/null
+++ b/scripts/api/import_library_dataset_to_history.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    assert sys.argv[3]
+    data = {}
+    data['from_ld_id'] = sys.argv[3]
+except IndexError:
+    print('usage: %s key url library_file_id' % os.path.basename( sys.argv[0] ))
+    print('    library_file_id is from /api/libraries/<library_id>/contents/<library_file_id>')
+    sys.exit( 1 )
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/import_workflows_from_installed_tool_shed_repository.py b/scripts/api/import_workflows_from_installed_tool_shed_repository.py
new file mode 100644
index 0000000..a3d59fc
--- /dev/null
+++ b/scripts/api/import_workflows_from_installed_tool_shed_repository.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+"""
+Import one or more exported workflows contained within a specified tool shed repository installed into Galaxy.
+
+Here is a working example of how to use this script to repair a repository installed into Galaxy.
+python ./import_workflows_from_installed_tool_shed_repository.py -a 22be3b -l http://localhost:8763/ -n workflow_with_tools -o test -r ef45bb64237e -u http://localhost:9009/
+"""
+from __future__ import print_function
+import argparse
+
+from common import display, submit
+
+
+def clean_url( url ):
+    if url.find( '//' ) > 0:
+        # We have an url that includes a protocol, something like: http://localhost:9009
+        items = url.split( '//' )
+        return items[ 1 ].rstrip( '/' )
+    return url.rstrip( '/' )
+
+
+def main( options ):
+    api_key = options.api
+    base_galaxy_url = options.local_url.rstrip( '/' )
+    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+    cleaned_tool_shed_url = clean_url( base_tool_shed_url )
+    installed_tool_shed_repositories_url = '%s/api/tool_shed_repositories' % base_galaxy_url
+    tool_shed_repository_id = None
+    installed_tool_shed_repositories = display( api_key, installed_tool_shed_repositories_url, return_formatted=False )
+    for installed_tool_shed_repository in installed_tool_shed_repositories:
+        tool_shed = str( installed_tool_shed_repository[ 'tool_shed' ] )
+        name = str( installed_tool_shed_repository[ 'name' ] )
+        owner = str( installed_tool_shed_repository[ 'owner' ] )
+        changeset_revision = str( installed_tool_shed_repository[ 'changeset_revision' ] )
+        if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision:
+            tool_shed_repository_id = installed_tool_shed_repository[ 'id' ]
+            break
+    if tool_shed_repository_id:
+        # Get the list of exported workflows contained in the installed repository.
+        url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/exported_workflows' % str( tool_shed_repository_id ) )
+        exported_workflows = display( api_key, url, return_formatted=False )
+        if exported_workflows:
+            # Import all of the workflows in the list of exported workflows.
+            data = {}
+            # NOTE: to import a single workflow, add an index to data (e.g.,
+            # data[ 'index' ] = 0
+            # and change the url to be ~/import_workflow (singular).  For example,
+            # url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflow' % str( tool_shed_repository_id ) )
+            url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflows' % str( tool_shed_repository_id ) )
+            submit( options.api, url, data )
+    else:
+        print("Invalid tool_shed / name / owner / changeset_revision.")
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Import workflows contained in an installed tool shed repository via the Galaxy API.' )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-l", "--local", dest="local_url", required=True, help="URL of the galaxy instance." )
+    parser.add_argument( "-n", "--name", required=True, help="Repository name." )
+    parser.add_argument( "-o", "--owner", required=True, help="Repository owner." )
+    parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/api/install_tool_shed_repositories.py b/scripts/api/install_tool_shed_repositories.py
new file mode 100644
index 0000000..133381f
--- /dev/null
+++ b/scripts/api/install_tool_shed_repositories.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+"""
+If a repository name, owner and revision are specified,install the revision from a specified tool shed into Galaxy.
+Specifying a revision is optional, if it is no specified, the latest installable revision will automatically be installed.
+However, the name and owner are required.
+
+This example demonstrates installation of a repository that contains valid tools, loading them into a section of the
+Galaxy tool panel or creating a new tool panel section.  You can choose if tool dependencies or repository dependencies
+should be installed, use --repository-deps or --tool-deps.
+
+This example requires a tool panel config file (e.g., tool_conf.xml, shed_tool_conf.xml, etc) to contain a tool panel section like the following:
+
+<section id="from_test_tool_shed" name="From Test Tool Shed" version="">
+</section>
+
+Here is a working example of how to use this script to install a repository from the test tool shed.
+./install_tool_shed_repositories.py --api <api key> --local <galaxy base url> --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --tool-deps
+"""
+import argparse
+
+from common import submit
+
+
+def main( options ):
+    """Collect all user data and install the tools via the Galaxy API."""
+    data = {}
+    data[ 'tool_shed_url' ] = options.tool_shed_url
+    data[ 'name' ] = options.name
+    data[ 'owner' ] = options.owner
+    if options.changeset_revision:
+        data[ 'changeset_revision' ] = options.changeset_revision
+    else:
+        # If the changeset_revision is not specified, default to the latest installable revision.
+        revision_data = {}
+        revision_data[ 'tool_shed_url' ] = options.tool_shed_url.rstrip( '/' )
+        revision_data[ 'name' ] = options.name
+        revision_data[ 'owner' ] = options.owner
+        revision_url = '%s%s' % ( options.local_url.rstrip( '/' ), '/api/tool_shed_repositories/get_latest_installable_revision' )
+        latest_installable_revision = submit( options.api,
+                                              revision_url,
+                                              revision_data,
+                                              return_formatted=False )
+        data[ 'changeset_revision' ] = latest_installable_revision
+    if options.tool_panel_section_id:
+        data[ 'tool_panel_section_id' ] = options.tool_panel_section_id
+    elif options.new_tool_panel_section_label:
+        data[ 'new_tool_panel_section_label' ] = options.new_tool_panel_section_label
+    if options.install_repository_dependencies:
+        data[ 'install_repository_dependencies' ] = options.install_repository_dependencies
+    if options.install_tool_dependencies:
+        data[ 'install_tool_dependencies' ] = options.install_tool_dependencies
+    submit( options.api, '%s%s' % ( options.local_url.rstrip( '/' ), '/api/tool_shed_repositories/new/install_repository_revision' ), data )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+    parser.add_argument( "-l", "--local", dest="local_url", required=True, help="URL of the galaxy instance." )
+    parser.add_argument( "-n", "--name", required=True, help="Repository name." )
+    parser.add_argument( "-o", "--owner", required=True, help="Repository owner." )
+    parser.add_argument( "-r", "--revision", dest="changeset_revision", help="Repository revision." )
+    parser.add_argument( "--panel-section-id", dest="tool_panel_section_id", help="Tool panel section id if you want to add your repository to an existing tool section." )
+    parser.add_argument( "--panel-section-name", dest="new_tool_panel_section_label", help="New tool panel section label. If specified a new tool section will be created." )
+    parser.add_argument( "--repository-deps", dest="install_repository_dependencies", action="store_true", default=False, help="Install repository dependencies. [False]")
+    parser.add_argument( "--tool-deps", dest="install_tool_dependencies", action="store_true", default=False, help="Install tool dependencies. [False]" )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/api/library_create_folder.py b/scripts/api/library_create_folder.py
new file mode 100755
index 0000000..077a5e4
--- /dev/null
+++ b/scripts/api/library_create_folder.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    data = {}
+    data[ 'folder_id' ] = sys.argv[3]
+    data[ 'name' ] = sys.argv[4]
+    data[ 'create_type' ] = 'folder'
+except IndexError:
+    print('usage: %s key url folder_id name [description]' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data[ 'description' ] = sys.argv[5]
+except IndexError:
+    print("Unable to set description; using empty description in its place")
+    data[ 'description' ] = ''
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/library_create_library.py b/scripts/api/library_create_library.py
new file mode 100755
index 0000000..73be35d
--- /dev/null
+++ b/scripts/api/library_create_library.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    data = {}
+    data[ 'name' ] = sys.argv[3]
+except IndexError:
+    print('usage: %s key url name [description] [synopsys]' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data[ 'description' ] = sys.argv[4]
+    data[ 'synopsis' ] = sys.argv[5]
+except IndexError:
+    pass
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/library_upload_dir.py b/scripts/api/library_upload_dir.py
new file mode 100644
index 0000000..c4b6ad8
--- /dev/null
+++ b/scripts/api/library_upload_dir.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+import argparse
+import os
+import sys
+
+from bioblend import galaxy
+
+
+class Uploader:
+
+    def __init__(self, url, api, library_id, folder_id, should_link,
+                 non_local):
+        self.gi = galaxy.GalaxyInstance(url=url, key=api)
+        self.library_id = library_id
+        self.folder_id = folder_id
+        self.should_link = should_link
+        self.non_local = non_local
+
+        self.memo_path = {}
+        self.prepopulate_memo()
+
+    def prepopulate_memo(self):
+        """
+        Because the Galaxy Data Libraries API/system does not act like any
+        other file system in existence, and allows multiple files/folders with
+        identical names in the same parent directory, we have to prepopulate
+        the memoization cache with everything currently in the target
+        directory.
+
+        Because the Galaxy Data Libraries API does not work from a perspective
+        of "show me what is in this directory", we are forced to get the entire
+        contents of the data library, and then filter out things that are
+        interesting to us based on a folder prefix.
+        """
+        existing = self.gi.libraries.show_library(self.library_id, contents=True)
+
+        uploading_to = [x for x in existing if x['id'] == self.folder_id]
+        if len(uploading_to) == 0:
+            raise Exception("Unknown folder [%s] in library [%s]" %
+                            (self.folder_id, self.library_id))
+        else:
+            uploading_to = uploading_to[0]
+
+        for x in existing:
+            # We only care if it's a subdirectory of where we're uploading to
+            if not x['name'].startswith(uploading_to['name']):
+                continue
+
+            name_part = x['name'].split(uploading_to['name'], 1)[-1]
+            if name_part.startswith('/'):
+                name_part = name_part[1:]
+            self.memo_path[name_part] = x['id']
+
+    def memoized_path(self, path_parts, base_folder=None):
+        """Get the folder ID for a given folder path specified by path_parts.
+
+        If the folder does not exist, it will be created ONCE (during the
+        instantiation of this Uploader object). After that it is stored and
+        recycled. If the Uploader object is re-created, it is not aware of
+        previously existing paths and will not respect those. TODO: handle
+        existing paths.
+        """
+        if base_folder is None:
+            base_folder = self.folder_id
+        dropped_prefix = []
+
+        fk = '/'.join(path_parts)
+        if fk in self.memo_path:
+            # print "Cache hit %s" % fk
+            return self.memo_path[fk]
+        else:
+            # print "Cache miss %s" % fk
+            for i in reversed(range(len(path_parts))):
+                fk = '/'.join(path_parts[0:i + 1])
+                if fk in self.memo_path:
+                    # print "Parent folder hit %s" % fk
+                    dropped_prefix = path_parts[0:i + 1]
+                    path_parts = path_parts[i + 1:]
+                    base_folder = self.memo_path[fk]
+                    break
+
+        nfk = []
+        for i in range(len(path_parts)):
+            nfk.append('/'.join(list(dropped_prefix) + list(path_parts[0:i + 1])))
+
+        # Recursively create the path from our base_folder starting points,
+        # gettting the IDs of each folder per path component
+        ids = self.recursively_build_path(path_parts, base_folder)
+
+        # These are then associated with the paths.
+        for (key, fid) in zip(nfk, ids):
+            self.memo_path[key] = fid
+        return ids[-1]
+
+    def recursively_build_path(self, path_parts, parent_folder_id, ids=None):
+        """Given an iterable of path components and a parent folder id, recursively
+        create directories below parent_folder_id"""
+        if ids is None:
+            ids = []
+        if len(path_parts) == 0:
+            return ids
+        else:
+            pf = self.gi.libraries.create_folder(self.library_id, path_parts[0], base_folder_id=parent_folder_id)
+            ids.append(pf[0]['id'])
+            # print "create_folder(%s, %s, %s) = %s" % (self.library_id, path_parts[0], parent_folder_id, pf[0]['id'])
+            return self.recursively_build_path(path_parts[1:], pf[0]['id'], ids=ids)
+
+    # http://stackoverflow.com/questions/13505819/python-split-path-recursively/13505966#13505966
+    def rec_split(self, s):
+        if s == '/':
+            return ()
+
+        rest, tail = os.path.split(s)
+        if tail == '.':
+            return ()
+        if rest == '':
+            return tail,
+        return self.rec_split(rest) + (tail,)
+
+    def upload(self):
+        all_files = [x.strip() for x in list(sys.stdin.readlines())]
+
+        for idx, path in enumerate(all_files):
+            (dirName, fname) = path.rsplit(os.path.sep, 1)
+            if not os.path.exists(os.path.join(dirName, fname)):
+                continue
+            # Figure out what the memo key will be early
+            basepath = self.rec_split(dirName)
+            if len(basepath) == 0:
+                memo_key = fname
+            else:
+                memo_key = os.path.join(os.path.join(*basepath), fname)
+
+            # So that we can check if it really needs to be uploaded.
+            already_uploaded = memo_key in self.memo_path.keys()
+            fid = self.memoized_path(basepath, base_folder=self.folder_id)
+            print('[%s/%s] %s/%s uploaded=%' % (idx + 1, len(all_files), fid, fname, already_uploaded))
+
+            if not already_uploaded:
+                if self.non_local:
+                    self.gi.libraries.upload_file_from_local_path(
+                        self.library_id,
+                        os.path.join(dirName, fname),
+                        folder_id=fid,
+                    )
+                else:
+                    self.gi.libraries.upload_from_galaxy_filesystem(
+                        self.library_id,
+                        os.path.join(dirName, fname),
+                        folder_id=fid,
+                        link_data_only='link_to_files' if self.should_link else 'copy_files',
+                    )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Upload a directory into a data library')
+    parser.add_argument( "-u", "--url", dest="url", required=True, help="Galaxy URL" )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+
+    parser.add_argument( "-l", "--lib", dest="library_id", required=True, help="Library ID" )
+    parser.add_argument( "-f", "--folder", dest="folder_id", help="Folder ID. If not specified, will go to root of library." )
+
+    parser.add_argument( "--nonlocal", dest="non_local", action="store_true", default=False,
+                        help="Set this flag if you are NOT running this script on your Galaxy head node with access to the full filesystem" )
+    parser.add_argument( "--link", dest="should_link", action="store_true", default=False,
+                        help="Link datasets only, do not upload to Galaxy. ONLY Avaialble if you run 'locally' relative to your Galaxy head node/filesystem ")
+    args = parser.parse_args()
+
+    u = Uploader(**vars(args))
+    u.upload()
diff --git a/scripts/api/library_upload_from_import_dir.py b/scripts/api/library_upload_from_import_dir.py
new file mode 100755
index 0000000..f5ae2cf
--- /dev/null
+++ b/scripts/api/library_upload_from_import_dir.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+"""
+Example usage:
+./library_upload_from_import_dir.py <key> http://127.0.0.1:8080/api/libraries/dda47097d9189f15/contents Fdda47097d9189f15 auto /Users/EnisAfgan/projects/pprojects/galaxy/lib_upload_dir ?
+"""
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    data = {}
+    data[ 'folder_id' ] = sys.argv[3]
+    data[ 'file_type' ] = sys.argv[4]
+    data[ 'server_dir' ] = sys.argv[5]
+    data[ 'dbkey' ] = sys.argv[6]
+    data[ 'upload_option' ] = 'upload_directory'
+    data[ 'create_type' ] = 'file'
+except IndexError:
+    print('usage: %s key url folder_id file_type server_dir dbkey' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/load_data_with_metadata.py b/scripts/api/load_data_with_metadata.py
new file mode 100755
index 0000000..036eb78
--- /dev/null
+++ b/scripts/api/load_data_with_metadata.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+"""
+This script scans a directory for files with companion '.json' files, then loads
+the data from the file, and attaches the .json contents using the 'extended_metadata'
+system in the library
+
+Sample call:
+python load_data_with_metadata.py <api_key> <api_url> /data/folder "API Imports"
+
+NOTE:  The upload method used requires the data library filesystem upload allow_library_path_paste
+"""
+from __future__ import print_function
+import argparse
+import json
+import os
+import sys
+
+from common import display, submit
+
+
+def load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field=None):
+    data = {}
+    data['folder_id'] = library_folder_id
+    data['file_type'] = 'auto'
+    data['dbkey'] = ''
+    data['upload_option'] = 'upload_paths'
+    data['filesystem_paths'] = fullpath
+    data['create_type'] = 'file'
+    data['link_data_only'] = 'link_to_files'
+
+    handle = open( fullpath + ".json" )
+    smeta = handle.read()
+    handle.close()
+    ext_meta = json.loads(smeta)
+    data['extended_metadata'] = ext_meta
+    if uuid_field is not None and uuid_field in ext_meta:
+        data['uuid'] = ext_meta[uuid_field]
+
+    libset = submit(api_key, api_url + "libraries/%s/contents" % library_id, data, return_formatted=True)
+    print(libset)
+
+
+def main(api_key, api_url, in_folder, data_library, uuid_field=None):
+    # Find/Create data library with the above name.  Assume we're putting datasets in the root folder '/'
+    libs = display(api_key, api_url + 'libraries', return_formatted=False)
+    library_id = None
+    for library in libs:
+        if library['name'] == data_library:
+            library_id = library['id']
+    if not library_id:
+        lib_create_data = {'name': data_library}
+        library = submit(api_key, api_url + 'libraries', lib_create_data, return_formatted=False)
+        library_id = library['id']
+    folders = display(api_key, api_url + "libraries/%s/contents" % library_id, return_formatted=False)
+    for f in folders:
+        if f['name'] == "/":
+            library_folder_id = f['id']
+    if not library_id or not library_folder_id:
+        print("Failure to configure library destination.")
+        sys.exit(1)
+
+    if os.path.isfile(in_folder):
+        if os.path.exists(in_folder + ".json"):
+            fullpath = os.path.abspath(in_folder)
+            print("Loading", fullpath)
+            load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field)
+    else:
+        for fname in os.listdir(in_folder):
+            fullpath = os.path.join(in_folder, fname)
+            if os.path.isfile(fullpath) and os.path.exists(fullpath + ".json"):
+                print("Loading", fullpath)
+                load_file(fullpath, api_key, api_url, library_id, library_folder_id, uuid_field)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument("api_key", help="API KEY")
+    parser.add_argument('api_url', help='API URL')
+    parser.add_argument("in_folder", help="Input Folder")
+    parser.add_argument("data_library", help="Data Library")
+    parser.add_argument("--uuid_field", help="UUID Field", default=None)
+    args = parser.parse_args()
+    main(args.api_key, args.api_url, args.in_folder, args.data_library, args.uuid_field)
diff --git a/scripts/api/repair_tool_shed_repository.py b/scripts/api/repair_tool_shed_repository.py
new file mode 100644
index 0000000..4926c50
--- /dev/null
+++ b/scripts/api/repair_tool_shed_repository.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+"""
+Repair a specified repository revision previously installed into Galaxy.
+
+Here is a working example of how to use this script to repair a repository installed into Galaxy.
+./repair_tool_shed_repository.py --api <api key> --local <galaxy base url> --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --revision f28d5018f9cb
+"""
+from __future__ import print_function
+import argparse
+
+from common import display, submit
+
+
+def clean_url( url ):
+    if url.find( '//' ) > 0:
+        # We have an url that includes a protocol, something like: http://localhost:9009
+        items = url.split( '//' )
+        return items[ 1 ].rstrip( '/' )
+    return url.rstrip( '/' )
+
+
+def main( options ):
+    """Collect all user data and install the tools via the Galaxy API."""
+    api_key = options.api
+    base_galaxy_url = options.local_url.rstrip( '/' )
+    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+    cleaned_tool_shed_url = clean_url( base_tool_shed_url )
+    installed_tool_shed_repositories_url = '%s/api/%s' % ( base_galaxy_url, 'tool_shed_repositories' )
+    data = {}
+    data[ 'tool_shed_url' ] = cleaned_tool_shed_url
+    data[ 'name' ] = options.name
+    data[ 'owner' ] = options.owner
+    data[ 'changeset_revision' ] = options.changeset_revision
+    tool_shed_repository_id = None
+    installed_tool_shed_repositories = display( api_key, installed_tool_shed_repositories_url, return_formatted=False )
+    for installed_tool_shed_repository in installed_tool_shed_repositories:
+        tool_shed = str( installed_tool_shed_repository[ 'tool_shed' ] )
+        name = str( installed_tool_shed_repository[ 'name' ] )
+        owner = str( installed_tool_shed_repository[ 'owner' ] )
+        changeset_revision = str( installed_tool_shed_repository[ 'changeset_revision' ] )
+        if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision:
+            tool_shed_repository_id = installed_tool_shed_repository[ 'id' ]
+            break
+    if tool_shed_repository_id:
+        url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/repair_repository_revision' % str( tool_shed_repository_id ) )
+        submit( options.api, url, data )
+    else:
+        print("Invalid tool_shed / name / owner / changeset_revision.")
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+    parser.add_argument( "-l", "--local", dest="local_url", required=True, help="URL of the galaxy instance." )
+    parser.add_argument( "-n", "--name", required=True, help="Repository name." )
+    parser.add_argument( "-o", "--owner", required=True, help="Repository owner." )
+    parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/api/request_type_create_from_xml.py b/scripts/api/request_type_create_from_xml.py
new file mode 100755
index 0000000..5bf98ba
--- /dev/null
+++ b/scripts/api/request_type_create_from_xml.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+try:
+    data = {}
+    data[ 'request_form_id' ] = sys.argv[3]
+    data[ 'sample_form_id' ] = sys.argv[4]
+    data[ 'sequencer_id' ] = sys.argv[5]
+    data[ 'xml_text' ] = open( sys.argv[6] ).read()
+except IndexError:
+    print('usage: %s key url request_form_id sample_form_id request_type_xml_description_file [access_role_ids,]' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data[ 'role_ids' ] = [ i for i in sys.argv[7].split( ',' ) if i ]
+except IndexError:
+    data[ 'role_ids' ] = []
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/requests_update_state.py b/scripts/api/requests_update_state.py
new file mode 100755
index 0000000..6a0cb8b
--- /dev/null
+++ b/scripts/api/requests_update_state.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import update
+
+try:
+    data = {}
+    data[ 'update_type' ] = 'request_state'
+except IndexError:
+    print('usage: %s key url' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+
+update( sys.argv[1], sys.argv[2], data, return_formatted=True )
diff --git a/scripts/api/reset_metadata_on_installed_repositories.py b/scripts/api/reset_metadata_on_installed_repositories.py
new file mode 100644
index 0000000..e7c5b96
--- /dev/null
+++ b/scripts/api/reset_metadata_on_installed_repositories.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+"""
+Script to reset metadata on all Tool Shed repositories installed into a Galaxy instance.  The received
+API key must be associated with a Galaxy admin user.
+
+usage: reset_metadata_on_installed_repositories.py key
+
+Here is a working example of how to use this script.
+python ./reset_metadata_on_installed_repositories.py -a 22be3b -u http://localhost:8763/
+"""
+import argparse
+
+from common import submit
+
+
+def main( options ):
+    base_galaxy_url = options.galaxy_url.rstrip( '/' )
+    url = '%s/api/tool_shed_repositories/reset_metadata_on_installed_repositories' % base_galaxy_url
+    submit( options.api, url, {} )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Reset metadata on all Tool Shed repositories installed into Galaxy via the Galaxy API.' )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+    parser.add_argument( "-u", "--url", dest="galaxy_url", required=True, help="Galaxy URL" )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/api/sample_configurations/sample_tracking/request_form.xml b/scripts/api/sample_configurations/sample_tracking/request_form.xml
new file mode 100644
index 0000000..a22c20a
--- /dev/null
+++ b/scripts/api/sample_configurations/sample_tracking/request_form.xml
@@ -0,0 +1,15 @@
+<form type="request" name="Sample Request Form" description="Sample Request Form description">
+    <fields>
+        <field name="field1" type="text" label="Request form field1" description="Description of request form field1" value="" required="True"/>
+        <field name="field2" type="text" area="True" label="Request form field2" description="Description of request form field2" value=""/>
+        <field name="field3" type="select" label="Request form field3" description="Description of request form field3" value="">
+            <option value="option1" />
+            <option value="option2" />
+        </field>
+        <field name="field4" type="select" label="Request form field4" description="Description of request form field4" value="" checkboxes="True">
+            <option value="option1" />
+            <option value="option2" />
+        </field>
+        <field name="field5" type="address" label="Request form field5" description="Description of request form field5"/>
+    </fields>
+</form>
\ No newline at end of file
diff --git a/scripts/api/sample_configurations/sample_tracking/request_type.xml b/scripts/api/sample_configurations/sample_tracking/request_type.xml
new file mode 100644
index 0000000..412d008
--- /dev/null
+++ b/scripts/api/sample_configurations/sample_tracking/request_type.xml
@@ -0,0 +1,9 @@
+<sequencer name="Sample Sequencer configuration" description="Sample Sequencer configuration description">
+    <sample_states>
+        <state name="New" description="Sample entered into the system"/>
+        <state name="Received" description="Sample tube received"/>
+        <state name="Library Started" description="Sample library preparation"/>
+        <state name="Run Started" description="Sequence run in progress"/>
+        <state name="Done" description="Sequence run complete"/>
+    </sample_states>
+</sequencer>
\ No newline at end of file
diff --git a/scripts/api/sample_configurations/sample_tracking/sample_form.xml b/scripts/api/sample_configurations/sample_tracking/sample_form.xml
new file mode 100644
index 0000000..da41054
--- /dev/null
+++ b/scripts/api/sample_configurations/sample_tracking/sample_form.xml
@@ -0,0 +1,18 @@
+<form type="sample" name="Sample Sample Form" description="Sample Sample Form description">
+    <layout>
+        <grid name="Run details" />
+    </layout>
+    <fields>
+        <field name="field1" type="text" label="Sample form field1" description="Description of sample form field1" value="" required="True" layout="Run details"/>
+        <field name="field2" type="text" area="True" label="Sample form field2" description="Description of sample form field2" value="" layout="Run details"/>
+        <field name="field3" type="select" label="Sample form field3" description="Description of sample form field3" value="" layout="Run details">
+            <option value="option1" />
+            <option value="option2" />
+        </field>
+        <field name="field4" type="select" label="Sample form field4" description="Description of sample form field4" value="" checkboxes="True" layout="Run details">
+            <option value="option1" />
+            <option value="option2" />
+        </field>
+        <field name="field5" type="address" label="Sample form field5" description="Description of sample form field5" layout="Run details"/>
+    </fields>
+</form>
\ No newline at end of file
diff --git a/scripts/api/sample_dataset_update_status.py b/scripts/api/sample_dataset_update_status.py
new file mode 100644
index 0000000..e60bb90
--- /dev/null
+++ b/scripts/api/sample_dataset_update_status.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import update
+
+try:
+    data = {}
+    data[ 'update_type' ] = 'sample_dataset_transfer_status'
+    data[ 'sample_dataset_ids' ] = sys.argv[3].split(',')
+    data[ 'new_status' ] = sys.argv[4]
+except IndexError:
+    print('usage: %s key url sample_dataset_ids new_state [error msg]' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data[ 'error_msg' ] = sys.argv[5]
+except IndexError:
+    data[ 'error_msg' ] = ''
+print(data)
+update( sys.argv[1], sys.argv[2], data, return_formatted=True )
diff --git a/scripts/api/sample_update_state.py b/scripts/api/sample_update_state.py
new file mode 100755
index 0000000..fc5939d
--- /dev/null
+++ b/scripts/api/sample_update_state.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import update
+
+try:
+    data = {}
+    data[ 'update_type' ] = 'sample_state'
+    data[ 'new_state' ] = sys.argv[3]
+except IndexError:
+    print('usage: %s key url new_state [comment]' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data[ 'comment' ] = sys.argv[4]
+except IndexError:
+    data[ 'comment' ] = ''
+
+update( sys.argv[1], sys.argv[2], data, return_formatted=True )
diff --git a/scripts/api/search.py b/scripts/api/search.py
new file mode 100644
index 0000000..e503db4
--- /dev/null
+++ b/scripts/api/search.py
@@ -0,0 +1,59 @@
+"""
+Sample script for Galaxy Search API
+"""
+from __future__ import print_function
+import json
+import requests
+import sys
+
+
+class RemoteGalaxy(object):
+
+    def __init__(self, url, api_key):
+        self.url = url
+        self.api_key = api_key
+
+    def get(self, path):
+        c_url = self.url + path
+        params = {}
+        params['key'] = self.api_key
+        req = requests.get(c_url, params=params)
+        return req.json()
+
+    def post(self, path, payload):
+        c_url = self.url + path
+        params = {}
+        params['key'] = self.api_key
+        req = requests.post(c_url, data=json.dumps(payload), params=params, headers={'Content-Type': 'application/json'} )
+        return req.json()
+
+
+if __name__ == "__main__":
+    server = sys.argv[1]
+    api_key = sys.argv[2]
+
+    rg = RemoteGalaxy(server, api_key)
+
+    print("select name, id, file_size from hda")
+    print(rg.post("/api/search", { "query": "select name, id, file_size from hda" }))
+
+    print("select name from hda")
+    print(rg.post("/api/search", { "query": "select name from hda" }))
+
+    print("select name, model_class from ldda")
+    print(rg.post("/api/search", { "query": "select name, model_class from ldda" }))
+
+    print("select * from history")
+    print(rg.post("/api/search", { "query": "select * from history" }))
+
+    print("select * from tool")
+    print(rg.post("/api/search", { "query": "select * from tool" }))
+
+    print("select * from workflow")
+    print(rg.post("/api/search", { "query": "select * from workflow" }))
+
+    print("select id, name from history where name='Unnamed history'")
+    print(rg.post("/api/search", {"query": "select id, name from history where name='Unnamed history'"}))
+
+    print("select * from history where name='Unnamed history'")
+    print(rg.post("/api/search", {"query": "select * from history where name='Unnamed history'"}))
diff --git a/scripts/api/sequencer_configuration_create.py b/scripts/api/sequencer_configuration_create.py
new file mode 100755
index 0000000..7fbf6ed
--- /dev/null
+++ b/scripts/api/sequencer_configuration_create.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import os
+import sys
+
+from common import get, submit
+
+
+def create_sequencer_configuration( key, base_url, request_form_filename, sample_form_filename, request_type_filename, email_addresses, return_formatted=True ):
+    # create request_form
+    data = {}
+    data[ 'xml_text' ] = open( request_form_filename ).read()
+    request_form = submit( key, "%sforms" % base_url, data, return_formatted=False )[0]
+    # create sample_form
+    data = {}
+    data[ 'xml_text' ] = open( sample_form_filename ).read()
+    sample_form = submit( key, "%sforms" % base_url, data, return_formatted=False )[0]
+    # get user ids
+    user_ids = [ user['id'] for user in get( key, "%susers" % base_url ) if user['email'] in email_addresses ]
+    # create role, assign to user
+    data = {}
+    data[ 'name' ] = "request_type_role_%s_%s_%s name" % ( request_form['id'], sample_form['id'], '_'.join( email_addresses ) )
+    data[ 'description' ] = "request_type_role_%s_%s_%s description" % ( request_form['id'], sample_form['id'], '_'.join( email_addresses ) )
+    data[ 'user_ids' ] = user_ids
+    role_ids = [ role[ 'id' ] for role in submit( key, "%sroles" % base_url, data, return_formatted=False ) ]
+    # create request_type
+    data = {}
+    data[ 'request_form_id' ] = request_form[ 'id' ]
+    data[ 'sample_form_id' ] = sample_form[ 'id' ]
+    data[ 'role_ids' ] = role_ids
+    data[ 'xml_text' ] = open( request_type_filename ).read()
+    return submit( key, "%srequest_types" % base_url, data, return_formatted=return_formatted )  # create and print out results for request type
+
+
+def main():
+    try:
+        key = sys.argv[1]
+        base_url = sys.argv[2]
+        request_form_filename = sys.argv[3]
+        sample_form_filename = sys.argv[4]
+        request_type_filename = sys.argv[5]
+        email_addresses = sys.argv[6].split( ',' )
+    except IndexError:
+        print('usage: %s key base_url request_form_xml_description_file sample_form_xml_description_file request_type_xml_description_file email_address1[,email_address2]' % os.path.basename( sys.argv[0] ))
+        sys.exit( 1 )
+    return create_sequencer_configuration( key, base_url, request_form_filename, sample_form_filename, request_type_filename, email_addresses, return_formatted=True )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/api/update.py b/scripts/api/update.py
new file mode 100644
index 0000000..5ebbe49
--- /dev/null
+++ b/scripts/api/update.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+"""
+Generic PUT/update script
+
+usage: create.py key url [key=value ...]
+"""
+import sys
+
+from common import update
+
+data = {}
+for k, v in [ kwarg.split('=', 1) for kwarg in sys.argv[3:]]:
+    data[k] = v
+
+update( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/upload_to_history.py b/scripts/api/upload_to_history.py
new file mode 100755
index 0000000..1216cf9
--- /dev/null
+++ b/scripts/api/upload_to_history.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+"""
+Upload a file to the desired history.
+"""
+from __future__ import print_function
+import json
+import os
+import sys
+
+try:
+    import requests
+except ImportError:
+    print("Could not import the requests module. See http://docs.python-requests.org/en/latest/" +
+          " or install with 'pip install requests'")
+    raise
+
+
+def upload_file( base_url, api_key, history_id, filepath, **kwargs ):
+    full_url = base_url + '/api/tools'
+
+    payload = {
+        'key'           : api_key,
+        'tool_id'       : 'upload1',
+        'history_id'    : history_id,
+    }
+    inputs = {
+        'files_0|NAME'  : kwargs.get( 'filename', os.path.basename( filepath ) ),
+        'files_0|type'  : 'upload_dataset',
+        # TODO: the following doesn't work with tools.py
+        'dbkey'         : '?',
+        'file_type'     : kwargs.get( 'file_type', 'auto' ),
+        'ajax_upload'   : 'true',
+    }
+    payload[ 'inputs' ] = json.dumps( inputs )
+
+    response = None
+    with open( filepath, 'rb' ) as file_to_upload:
+        files = { 'files_0|file_data': file_to_upload }
+        response = requests.post( full_url, data=payload, files=files )
+    return response.json()
+
+
+if __name__ == '__main__':
+    if len( sys.argv ) < 5:
+        print("history_upload.py <api key> <galaxy base url> <history id> <filepath to upload>\n" +
+              "  (where galaxy base url is just the root url where your Galaxy is served; e.g. 'localhost:8080')")
+        sys.exit( 1 )
+
+    api_key, base_url, history_id, filepath = sys.argv[1:5]
+    kwargs = dict([ kwarg.split('=', 1) for kwarg in sys.argv[5:]])
+
+    response = upload_file( base_url, api_key, history_id, filepath, **kwargs )
+    print(response, file=sys.stderr)
+    print(response.content)
diff --git a/scripts/api/workflow_delete.py b/scripts/api/workflow_delete.py
new file mode 100644
index 0000000..cc68c93
--- /dev/null
+++ b/scripts/api/workflow_delete.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+"""
+# ---------------------------------------------- #
+# PARKLAB, Author: RPARK
+API example script for deleting workflows
+# ---------------------------------------------- #
+
+Example calls:
+python workflow_delete.py <api_key> <galaxy_url>/api/workflows/<workflow id> True
+"""
+from __future__ import print_function
+import os
+import sys
+
+from common import delete
+
+try:
+    assert sys.argv[2]
+except IndexError:
+    print('usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data = {}
+    data[ 'purge' ] = sys.argv[3]
+except IndexError:
+    pass
+
+delete( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/api/workflow_execute.py b/scripts/api/workflow_execute.py
new file mode 100755
index 0000000..d56b88f
--- /dev/null
+++ b/scripts/api/workflow_execute.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+"""
+Execute workflows from the command line.
+Example calls:
+python workflow_execute.py <api_key> <galaxy_url>/api/workflows f2db41e1fa331b3e 'Test API History' '38=ldda=0qr350234d2d192f'
+python workflow_execute.py <api_key> <galaxy_url>/api/workflows f2db41e1fa331b3e 'hist_id=a912e9e5d84530d4' '38=hda=03501d7626bd192f'
+"""
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+
+def main():
+    try:
+        data = {}
+        data['workflow_id'] = sys.argv[3]
+        data['history'] = sys.argv[4]
+        data['ds_map'] = {}
+        # DBTODO If only one input is given, don't require a step
+        # mapping, just use it for everything?
+        for v in sys.argv[5:]:
+            step, src, ds_id = v.split('=')
+            data['ds_map'][step] = {'src': src, 'id': ds_id}
+    except IndexError:
+        print('usage: %s key url workflow_id history step=src=dataset_id' % os.path.basename(sys.argv[0]))
+        sys.exit(1)
+    submit( sys.argv[1], sys.argv[2], data )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/api/workflow_execute_parameters.py b/scripts/api/workflow_execute_parameters.py
new file mode 100644
index 0000000..f54ebb4
--- /dev/null
+++ b/scripts/api/workflow_execute_parameters.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""
+# ---------------------------------------------- #
+# PARKLAB, Author: RPARK
+# ---------------------------------------------- #
+
+Execute workflows from the command line.
+Example calls:
+python workflow_execute.py <api_key> <galaxy_url>/api/workflows <workflow_id> 'hist_id=<history_id>' '38=hda=<file_id>' 'param=tool=name=value'
+python workflow_execute_parameters.py <api_key> http://localhost:8080/api/workflows 1cd8e2f6b131e891 'Test API' '69=ld=a799d38679e985db' '70=ld=33b43b4e7093c91f' 'param=peakcalling_spp=aligner=bowtie' 'param=bowtie_wrapper=suppressHeader=True' 'param=peakcalling_spp=window_size=1000'
+"""
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+
+def main():
+    try:
+        print("workflow_execute:py:")
+        data = {}
+        data['workflow_id'] = sys.argv[3]
+        data['history'] = sys.argv[4]
+        data['ds_map'] = {}
+
+        # Trying to pass in parameter for my own dictionary
+        data['parameters'] = {}
+
+        # DBTODO If only one input is given, don't require a step
+        # mapping, just use it for everything?
+        for v in sys.argv[5:]:
+            print("Multiple arguments ")
+            print(v)
+
+            try:
+                step, src, ds_id = v.split('=')
+                data['ds_map'][step] = {'src': src, 'id': ds_id}
+
+            except ValueError:
+                print("VALUE ERROR:")
+                wtype, wtool, wparam, wvalue = v.split('=')
+                try:
+                    data['parameters'][wtool] = {'param': wparam, 'value': wvalue}
+                except ValueError:
+                    print("TOOL ID ERROR:")
+
+    except IndexError:
+        print('usage: %s key url workflow_id history step=src=dataset_id' % os.path.basename(sys.argv[0]))
+        sys.exit(1)
+    submit( sys.argv[1], sys.argv[2], data )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/api/workflow_import.py b/scripts/api/workflow_import.py
new file mode 100755
index 0000000..bf83ed9
--- /dev/null
+++ b/scripts/api/workflow_import.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+"""
+Import workflows from the command line.
+Example calls:
+python workflow_import.py <api_key> <galaxy_url> '/path/to/workflow/file [--add_to_menu]'
+"""
+from __future__ import print_function
+import os
+import sys
+
+from common import submit
+
+
+def main():
+    api_key = sys.argv[1]
+    api_base_url = sys.argv[2]
+    api_url = "%s/api/workflows" % api_base_url
+    try:
+        data = {}
+        data['installed_repository_file'] = sys.argv[3]
+        if len(sys.argv) > 4 and sys.argv[4] == "--add_to_menu":
+            data['add_to_menu'] = True
+    except IndexError:
+        print('usage: %s key galaxy_url workflow_file' % os.path.basename(sys.argv[0]))
+        sys.exit(1)
+    # print display( api_key, api_base_url + "/api/workflows" )
+    submit( api_key, api_url, data, return_formatted=False )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/api/workflow_import_from_file_rpark.py b/scripts/api/workflow_import_from_file_rpark.py
new file mode 100644
index 0000000..9ee5a16
--- /dev/null
+++ b/scripts/api/workflow_import_from_file_rpark.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+"""
+python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f http://localhost:8080/api/workflows/import 'spp_submodule.ga'
+python rpark_import_workflow_from_file.py 35a24ae2643785ff3d046c98ea362c7f http://localhost:8080/api/workflows/import 'spp_submodule.ga'
+"""
+from __future__ import print_function
+import json
+import os
+import sys
+
+from common import submit
+
+
+def openWorkflow(in_file):
+    with open(in_file) as f:
+        temp_data = json.load(f)
+    return temp_data
+
+
+try:
+    assert sys.argv[2]
+except IndexError:
+    print('usage: %s key url [name] ' % os.path.basename( sys.argv[0] ))
+    sys.exit( 1 )
+try:
+    data = {}
+    workflow_dict = openWorkflow(sys.argv[3])
+    data['workflow'] = workflow_dict
+except IndexError:
+    pass
+
+submit( sys.argv[1], sys.argv[2], data )
diff --git a/scripts/auth/pam_auth_helper.py b/scripts/auth/pam_auth_helper.py
new file mode 100755
index 0000000..14d5e50
--- /dev/null
+++ b/scripts/auth/pam_auth_helper.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+import logging
+import signal
+import sys
+
+log = logging.getLogger(__name__)
+
+TIMEOUT = 5
+
+try:
+    import pam
+except ImportError:
+    log.debug('PAM auth helper: Could not import pam module')
+    sys.exit(1)
+
+
+def handle_timeout(signum, stack):
+    raise IOError("Timed out reading input")
+
+
+# set timeout so we don't block on reading stdin
+signal.alarm(TIMEOUT)
+
+pam_service = sys.stdin.readline().strip()
+pam_username = sys.stdin.readline().strip()
+pam_password = sys.stdin.readline().strip()
+
+# cancel the alarm
+signal.alarm(0)
+
+p_auth = pam.pam()
+authenticated = p_auth.authenticate(pam_username, pam_password, service=pam_service)
+if authenticated:
+    log.debug('PAM auth helper: authentication successful for {}'.format(pam_username))
+    sys.stdout.write('True\n')
+    sys.exit(0)
+else:
+    log.debug('PAM auth helper: authentication failed for {}'.format(pam_username))
+    sys.stdout.write('False\n')
+    sys.exit(1)
diff --git a/scripts/binary_compatibility.py b/scripts/binary_compatibility.py
new file mode 100644
index 0000000..29141d1
--- /dev/null
+++ b/scripts/binary_compatibility.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+"""
+Creates a Python binary-compatibility.cfg as described here:
+
+    https://mail.python.org/pipermail/distutils-sig/2015-July/026617.html
+"""
+from __future__ import print_function
+
+# Vaguely Python 2.6 compatibile ArgumentParser import
+try:
+    from argparse import ArgumentParser
+except ImportError:
+    from optparse import OptionParser
+
+    class ArgumentParser(OptionParser):
+
+        def __init__(self, **kwargs):
+            self.delegate = OptionParser(**kwargs)
+
+        def add_argument(self, *args, **kwargs):
+            if "required" in kwargs:
+                del kwargs["required"]
+            return self.delegate.add_option(*args, **kwargs)
+
+        def parse_args(self, args=None):
+            (options, args) = self.delegate.parse_args(args)
+            return options
+
+import sys
+import json
+
+from pip.pep425tags import get_supported
+from pip.platform import get_specific_platform
+
+
+compatible_platforms = {
+    'centos': 'rhel',
+    'scientific': 'rhel',
+}
+
+
+def install_compat():
+    spec_plat = get_specific_platform()
+    if spec_plat is None:
+        return None
+    this_plat = spec_plat[0]
+    compat_plat = compatible_platforms.get(this_plat, None)
+    rval = {}
+    if compat_plat:
+        print('{0} is binary compatible with {1} (and can install {2} wheels)'
+              .format(this_plat, compat_plat, compat_plat),
+              file=sys.stderr)
+        for py, abi, plat in get_supported():
+            if this_plat in plat:
+                rval[plat] = {'install': [plat.replace(this_plat, compat_plat)]}
+    return rval
+
+
+def main():
+    arg_parser = ArgumentParser()
+    arg_parser.add_argument('-o', '--output', default=None, help='Output to file')
+    args = arg_parser.parse_args()
+
+    compat = install_compat()
+
+    if compat:
+        if args.output is not None:
+            with open(args.output, 'w') as out:
+                json.dump(compat, out)
+        else:
+            print(json.dumps(install_compat()))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/bootstrap_history.py b/scripts/bootstrap_history.py
new file mode 100644
index 0000000..8f4ad47
--- /dev/null
+++ b/scripts/bootstrap_history.py
@@ -0,0 +1,668 @@
+#!/usr/bin/env python
+# Little script to make HISTORY.rst more easy to format properly, lots TODO
+# pull message down and embed, use arg parse, handle multiple, etc...
+from __future__ import print_function
+
+import ast
+import calendar
+import datetime
+import json
+import os
+import re
+import string
+import sys
+import textwrap
+
+try:
+    import requests
+except ImportError:
+    requests = None
+try:
+    from pygithub3 import Github
+except ImportError:
+    Github = None
+from six import string_types
+from six.moves.urllib.parse import urljoin
+
+PROJECT_DIRECTORY = os.path.join(os.path.dirname(__file__), os.pardir)
+SOURCE_DIR = os.path.join(PROJECT_DIRECTORY, "lib")
+GALAXY_SOURCE_DIR = os.path.join(SOURCE_DIR, "galaxy")
+GALAXY_VERSION_FILE = os.path.join(GALAXY_SOURCE_DIR, "version.py")
+PROJECT_OWNER = "galaxyproject"
+PROJECT_NAME = "galaxy"
+PROJECT_URL = "https://github.com/%s/%s" % (PROJECT_OWNER, PROJECT_NAME)
+PROJECT_API = "https://api.github.com/repos/%s/%s/" % (PROJECT_OWNER, PROJECT_NAME)
+RELEASES_PATH = os.path.join(PROJECT_DIRECTORY, "doc", "source", "releases")
+
+# Uncredit pull requestors... kind of arbitrary at this point.
+DEVTEAM = [
+    "afgane", "dannon", "blankenberg",
+    "davebx", "martenson", "jmchilton",
+    "tnabtaf", "natefoo", "carlfeberhard",
+    "jgoecks", "guerler", "jennaj",
+    "nekrut", "jxtx", "nitesh1989"
+]
+
+TEMPLATE = """
+.. to_doc
+
+%s
+===============================
+
+.. announce_start
+
+Enhancements
+-------------------------------
+
+.. major_feature
+
+
+.. feature
+
+
+.. enhancement
+
+
+.. small_enhancement
+
+
+
+Fixes
+-------------------------------
+
+.. major_bug
+
+
+.. bug
+
+
+.. github_links
+
+"""
+
+ANNOUNCE_TEMPLATE = string.Template("""
+===========================================================
+${month_name} 20${year} Galaxy Release (v ${release})
+===========================================================
+
+.. include:: _header.rst
+
+Highlights
+===========================================================
+
+**Feature1**
+  Feature description.
+
+**Feature2**
+  Feature description.
+
+**Feature3**
+  Feature description.
+
+`Github <https://github.com/galaxyproject/galaxy>`__
+===========================================================
+
+New Galaxy repository
+  .. code-block:: shell
+
+      $$ git clone -b release_${release} https://github.com/galaxyproject/galaxy.git
+
+Update of existing Galaxy repository
+  .. code-block:: shell
+
+      $$ git checkout release_${release} && git pull --ff-only origin release_${release}
+
+See `our wiki <https://wiki.galaxyproject.org/Develop/SourceCode>`__ for additional details regarding the source code locations.
+
+Release Notes
+===========================================================
+
+.. include:: ${release}.rst
+   :start-after: announce_start
+
+.. include:: _thanks.rst
+""")
+
+NEXT_TEMPLATE = string.Template("""
+===========================================================
+${month_name} 20${year} Galaxy Release (v ${version})
+===========================================================
+
+
+Schedule
+===========================================================
+ * Planned Freeze Date: ${freeze_date}
+ * Planned Release Date: ${release_date}
+""")
+
+
+RELEASE_ISSUE_TEMPLATE = string.Template("""
+
+- [X] **Prep**
+
+      - [X] ~~Create this release issue ``make release-issue RELEASE_CURR=${version}``.~~
+      - [X] ~~Set freeze date (${freeze_date}).~~
+
+- [ ] **Branch Release (on or around ${freeze_date})**
+
+      - [ ] Ensure all [blocking milestone PRs](https://github.com/galaxyproject/galaxy/pulls?q=is%3Aopen+is%3Apr+milestone%3A${version}) have been merged, delayed, or closed.
+
+            make release-check-blocking-prs RELEASE_CURR=${version}
+      - [ ] Merge the latest release into dev and push upstream.
+
+            make release-merge-stable-to-next RELEASE_PREVIOUS=release_${previous_version}
+            make release-push-dev
+
+      - [ ] Create and push release branch:
+
+            make release-create-rc RELEASE_CURR=${version} RELEASE_NEXT=${next_version}
+
+      - [ ] Open PRs from your fork of branch ``version-${version}`` to upstream ``release_${version}`` and of ``version-${next_version}.dev`` to ``dev``.
+
+      - [ ] Open PR against ``release_${version}`` branch to pin flake8 deps in tox.ini to the latest available version.
+
+      - [ ] Update ``next_milestone`` in [P4's configuration](https://github.com/galaxyproject/p4) to `{version}` so it properly tags new PRs.
+
+- [ ] **Deploy and Test Release**
+
+      - [ ] Update test.galaxyproject.org to ensure it is running a dev at or past branch point (${freeze_date} + 1 day).
+      - [ ] Update testtoolshed.g2.bx.psu.edu to ensure it is running a dev at or past branch point (${freeze_date} + 1 day).
+      - [ ] Deploy to usegalaxy.org (${freeze_date} + 1 week).
+      - [ ] Deploy to toolshed.g2.bx.psu.edu (${freeze_date} + 1 week).
+      - [ ] [Update bioblend testing](https://github.com/galaxyproject/bioblend/commit/b74b1c302a1b8fed86786b40d7ecc3520cbadcd3) to include a ``release_${version}`` target - add ``env`` target ``- TOX_ENV=py27 GALAXY_VERSION=release_${version}`` to ``tox.ini``.
+
+- [ ] **Create Release Notes**
+
+      - [ ] Review merged PRs and ensure they all have a milestones attached. [Link](https://github.com/galaxyproject/galaxy/pulls?q=is%3Apr+is%3Amerged+no%3Amilestone)
+      - [ ] Checkout release branch
+
+            git checkout release_${version} -b ${version}_release_notes
+      - [ ] Check for obvious missing metadata in release PRs
+
+            make release-check-metadata RELEASE_CURR=${version}
+      - [ ] Bootstrap the release notes
+
+            make release-bootstrap-history RELEASE_CURR=${version}
+      - [ ] Open newly created files and manually curate major topics and release notes.
+
+            - [ ] inject 3 witty comments
+            - [ ] inject one whimsical story
+            - [ ] inject one topical reference (preferably satirical in nature) to contemporary world event
+      - [ ] Commit release notes.
+
+            git add docs/; git commit -m "Release notes for $version"; git push upstream ${version}_release_notes
+      - [ ] Open a pull request for new release note branch.
+      - [ ] Merge release note pull request.
+
+- [ ] **Do Release**
+
+      - [ ] Ensure all [blocking milestone issues](https://github.com/galaxyproject/galaxy/issues?q=is%3Aopen+is%3Aissue+milestone%3A${version}) have been resolved.
+
+            make release-check-blocking-issues RELEASE_CURR=${version}
+      - [ ] Ensure all [blocking milestone PRs](https://github.com/galaxyproject/galaxy/pulls?q=is%3Aopen+is%3Apr+milestone%3A${version}) have been merged or closed.
+
+            make release-check-blocking-prs RELEASE_CURR=${version}
+      - [ ] Ensure previous release is merged into current. (TODO: Add Makefile target or this.)
+      - [ ] Create and push release tag:
+
+            make release-create RELEASE_CURR=${version}
+
+- [ ] **Do Docker Release**
+
+      - [ ] Change the [dev branch](https://github.com/bgruening/docker-galaxy-stable/tree/dev
+) of the Galaxy Docker container to ${next_version}
+      - [ ] Merge dev into master
+
+- [ ] **Ensure Tool Tests use Latest Release**
+
+      - [ ]  Update GALAXY_RELEASE in https://github.com/galaxyproject/tools-iuc/blob/master/.travis.yml#L6
+
+      - [ ]  Update GALAXY_RELEASE in https://github.com/galaxyproject/tools-devteam/blob/master/.travis.yml#L6
+
+- [ ] **Announce Release**
+
+      - [ ] Verify release included in https://docs.galaxyproject.org/en/master/releases/index.html
+      - [ ] Review announcement in https://github.com/galaxyproject/galaxy/blob/dev/doc/source/releases/${version}_announce.rst
+      - [ ] Stage annoucement content (Wiki, Biostars, Bit.ly link) on annouce date to capture date tags. Note: all final content does not need to be completed to do this.
+      - [ ] Create wiki *highlights* and post to http://galaxyproject.org News (w/ RSS) and NewsBriefs. [An Example](https://wiki.galaxyproject.org/News/2016_04_GalaxyRelease).
+      - [ ] Tweet docs news *highlights* via bit.ly link to https://twitter.com/galaxyproject/ (As user ``galaxyproject``, password in Galaxy password store under ``twitter.com / galaxyproject`` ). [An Example](https://twitter.com/galaxyproject/status/733029921316986881).
+      - [ ] Post *highlights* type News to Galaxy Biostars https://biostar.usegalaxy.org. [An Example](https://biostar.usegalaxy.org/p/17712/).
+      - [ ] Email *highlights* to [galaxy-dev](http://dev.list.galaxyproject.org/) and [galaxy-announce](http://announce.list.galaxyproject.org/) @lists.galaxyproject.org. [An Example](http://dev.list.galaxyproject.org/The-Galaxy-release-16-04-is-out-tp4669419.html)
+      - [ ] Adjust http://getgalaxy.org text and links to match current master branch (TODO: describe how to do this)
+
+- [ ] **Prepare for next release**
+
+      - [ ] Ensure milestone ``${next_version}`` exists.
+      - [ ] Create release issue for next version ``make release-issue RELEASE_CURR=${next_version}``.
+      - [ ] Schedule committer meeting to discuss re-alignment of priorities.
+      - [ ] Close this issue.
+
+""")
+
+# https://api.github.com/repos/galaxyproject/galaxy/pulls?base=dev&state=closed
+# https://api.github.com/repos/galaxyproject/galaxy/pulls?base=release_15.07&state=closed
+# https://api.github.com/repos/galaxyproject/galaxy/compare/release_15.05...dev
+
+
+def commit_time(commit_hash):
+    api_url = urljoin(PROJECT_API, "commits/%s" % commit_hash)
+    req = requests.get(api_url).json()
+    return datetime.datetime.strptime(req["commit"]["committer"]["date"], "%Y-%m-%dT%H:%M:%SZ")
+
+
+def release_issue(argv):
+    release_name = argv[2]
+    previous_release = _previous_release(release_name)
+    new_version_params = _next_version_params(release_name)
+    next_version = new_version_params["version"]
+    freeze_date, release_date = _release_dates(release_name)
+    release_issue_template_params = dict(
+        version=release_name,
+        next_version=next_version,
+        previous_version=previous_release,
+        freeze_date=freeze_date,
+    )
+    release_issue_contents = RELEASE_ISSUE_TEMPLATE.safe_substitute(**release_issue_template_params)
+    github = _github_client()
+    github.issues.create(
+        data=dict(
+            title="Publication of Galaxy Release v %s" % release_name,
+            body=release_issue_contents,
+        ),
+        user=PROJECT_OWNER,
+        repo=PROJECT_NAME,
+    )
+    return release_issue
+
+
+def do_release(argv):
+    release_name = argv[2]
+    release_file = _release_file(release_name + ".rst")
+    release_info = TEMPLATE % release_name
+    open(release_file, "w").write(release_info.encode("utf-8"))
+    month = int(release_name.split(".")[1])
+    month_name = calendar.month_name[month]
+    year = release_name.split(".")[0]
+
+    announce_info = ANNOUNCE_TEMPLATE.substitute(
+        month_name=month_name,
+        year=year,
+        release=release_name
+    )
+    announce_file = _release_file(release_name + "_announce.rst")
+    open(announce_file, "w").write(announce_info.encode("utf-8"))
+
+    next_version_params = _next_version_params(release_name)
+    next_version = next_version_params["version"]
+    next_release_file = _release_file(next_version + "_announce.rst")
+
+    next_announce = NEXT_TEMPLATE.substitute(**next_version_params)
+    open(next_release_file, "w").write(next_announce.encode("utf-8"))
+    releases_index = _release_file("index.rst")
+    releases_index_contents = open(releases_index, "r").read()
+    releases_index_contents = releases_index_contents.replace(".. annoucements\n", ".. annoucements\n   " + next_version + "_announce\n" )
+    with open(releases_index, "w") as f:
+        f.write(releases_index_contents)
+
+    for pr in _get_prs(release_name):
+        # 2015-06-29 18:32:13 2015-04-22 19:11:53 2015-08-12 21:15:45
+        as_dict = {
+            "title": pr.title,
+            "number": pr.number,
+            "head": pr.head,
+        }
+        main([argv[0], "--release_file", "%s.rst" % release_name, "--request", as_dict, "pr" + str(pr.number)])
+
+
+def check_release(argv):
+    github = _github_client()
+    release_name = argv[2]
+    for pr in _get_prs(release_name):
+        _text_target(github, pr)
+
+
+def check_blocking_prs(argv):
+    release_name = argv[2]
+    block = 0
+    for pr in _get_prs(release_name, state="open"):
+        print("WARN: Blocking PR| %s" % _pr_to_str(pr))
+        block = 1
+
+    sys.exit(block)
+
+
+def check_blocking_issues(argv):
+    release_name = argv[2]
+    block = 0
+    github = _github_client()
+    issues = github.issues.list_by_repo(
+        user='galaxyproject',
+        repo='galaxy',
+        state="open"
+    )
+    for page in issues:
+        for issue in page:
+            if issue.milestone and issue.milestone.title == release_name and "Publication of Galaxy Release" not in issue.title:
+                print("WARN: Blocking issue| %s" % _issue_to_str(issue))
+                block = 1
+
+    sys.exit(block)
+
+
+def _pr_to_str(pr):
+    if isinstance(pr, string_types):
+        return pr
+    return "PR #%s (%s) %s" % (pr.number, pr.title, pr.html_url)
+
+
+def _issue_to_str(pr):
+    if isinstance(pr, string_types):
+        return pr
+    return "Issue #%s (%s) %s" % (pr.number, pr.title, pr.html_url)
+
+
+def _next_version_params(release_name):
+    month = int(release_name.split(".")[1])
+    year = release_name.split(".")[0]
+    next_month = (((month - 1) + 3) % 12) + 1
+    next_month_name = calendar.month_name[next_month]
+    if next_month < 3:
+        next_year = int(year) + 1
+    else:
+        next_year = year
+    next_version = "%s.%02d" % (next_year, next_month)
+    freeze_date, release_date = _release_dates(next_version)
+    return dict(
+        version=next_version,
+        year=next_year,
+        month_name=next_month_name,
+        freeze_date=freeze_date,
+        release_date=release_date,
+    )
+
+
+def _release_dates(version):
+    year, month = version.split(".")
+    first_of_month = datetime.date(int(year) + 2000, int(month), 1)
+    freeze_date = next_weekday(first_of_month, 0)
+    release_date = next_weekday(first_of_month, 0) + datetime.timedelta(21)
+    return freeze_date, release_date
+
+
+def _get_prs(release_name, state="closed"):
+    github = _github_client()
+    pull_requests = github.pull_requests.list(
+        state=state,
+        user=PROJECT_OWNER,
+        repo=PROJECT_NAME,
+    )
+    for page in pull_requests:
+        for pr in page:
+            merged_at = pr.merged_at
+            milestone = pr.milestone
+            proper_state = state != "closed" or merged_at
+            if not proper_state or not milestone or milestone['title'] != release_name:
+                continue
+            yield pr
+
+
+def main(argv):
+    if requests is None:
+        raise Exception("Requests library not found, please pip install requests")
+    github = _github_client()
+    newest_release = None
+
+    if argv[1] == "--check-blocking-prs":
+        check_blocking_prs(argv)
+        return
+
+    if argv[1] == "--check-blocking-issues":
+        check_blocking_issues(argv)
+        return
+
+    if argv[1] == "--create-release-issue":
+        release_issue(argv)
+        return
+
+    if argv[1] == "--release":
+        do_release(argv)
+        return
+
+    if argv[1] == "--check-release":
+        check_release(argv)
+        return
+
+    if argv[1] == "--release_file":
+        newest_release = argv[2]
+        argv = [argv[0]] + argv[3:]
+
+    if argv[1] == "--request":
+        req = argv[2]
+        argv = [argv[0]] + argv[3:]
+    else:
+        req = None
+
+    if newest_release is None:
+        newest_release = sorted(os.listdir(RELEASES_PATH))[-1]
+    history_path = os.path.join(RELEASES_PATH, newest_release)
+    history = open(history_path, "r").read().decode("utf-8")
+
+    def extend(from_str, line, source=history):
+        from_str += "\n"
+        return source.replace(from_str, from_str + line + "\n" )
+
+    ident = argv[1]
+
+    message = ""
+    if len(argv) > 2:
+        message = argv[2]
+    elif not (ident.startswith("pr") or ident.startswith("issue")):
+        api_url = urljoin(PROJECT_API, "commits/%s" % ident)
+        if req is None:
+            req = requests.get(api_url).json()
+        commit = req["commit"]
+        message = commit["message"]
+        message = get_first_sentence(message)
+    elif requests is not None and ident.startswith("pr"):
+        pull_request = ident[len("pr"):]
+        api_url = urljoin(PROJECT_API, "pulls/%s" % pull_request)
+        if req is None:
+            req = requests.get(api_url).json()
+        message = req["title"]
+    elif requests is not None and ident.startswith("issue"):
+        issue = ident[len("issue"):]
+        api_url = urljoin(PROJECT_API, "issues/%s" % issue)
+        if req is None:
+            req = requests.get(api_url).json()
+        message = req["title"]
+    else:
+        message = ""
+
+    text_target = "to_doc"
+    to_doc = message + " "
+
+    owner = None
+    if ident.startswith("pr"):
+        pull_request = ident[len("pr"):]
+        user = req["head"]["user"]
+        owner = user["login"]
+        if owner in DEVTEAM:
+            owner = None
+        text = ".. _Pull Request {0}: {1}/pull/{0}".format(pull_request, PROJECT_URL)
+        history = extend(".. github_links", text)
+        if owner:
+            to_doc += "\n(thanks to `@%s <https://github.com/%s>`__)." % (
+                owner, owner,
+            )
+        to_doc += "\n`Pull Request {0}`_".format(pull_request)
+        if github:
+            text_target = _text_target(github, pull_request)
+    elif ident.startswith("issue"):
+        issue = ident[len("issue"):]
+        text = ".. _Issue {0}: {1}/issues/{0}".format(issue, PROJECT_URL)
+        history = extend(".. github_links", text)
+        to_doc += "`Issue {0}`_".format(issue)
+    else:
+        short_rev = ident[:7]
+        text = ".. _{0}: {1}/commit/{0}".format(short_rev, PROJECT_URL)
+        history = extend(".. github_links", text)
+        to_doc += "{0}_".format(short_rev)
+
+    to_doc = wrap(to_doc)
+    history = extend(".. %s\n" % text_target, to_doc, history)
+    open(history_path, "w").write(history.encode("utf-8"))
+
+
+def _text_target(github, pull_request):
+    labels = []
+    pr_number = None
+    if isinstance(pull_request, string_types):
+        pr_number = pull_request
+    else:
+        pr_number = pull_request.number
+
+    try:
+        labels = github.issues.labels.list_by_issue(int(pr_number), user=PROJECT_OWNER, repo=PROJECT_NAME)
+    except Exception as e:
+        print(e)
+    is_bug = is_enhancement = is_feature = is_minor = is_major = is_merge = is_small_enhancement = False
+    if len(labels) == 0:
+        print('No labels found for %s' % pr_number)
+        return None
+    for label in labels:
+        label_name = label.name.lower()
+        if label_name == "minor":
+            is_minor = True
+        elif label_name == "major":
+            is_major = True
+        elif label_name == "merge":
+            is_merge = True
+        elif label_name == "kind/bug":
+            is_bug = True
+        elif label_name == "kind/feature":
+            is_feature = True
+        elif label_name == "kind/enhancement":
+            is_enhancement = True
+        elif label_name in ["kind/testing", "kind/refactoring"]:
+            is_small_enhancement = True
+
+    is_some_kind_of_enhancement = is_enhancement or is_feature or is_small_enhancement
+
+    if not( is_bug or is_some_kind_of_enhancement or is_minor or is_merge ):
+        print("No kind/ or minor or merge label found for %s" % _pr_to_str(pull_request))
+        text_target = None
+
+    if is_minor or is_merge:
+        return
+
+    if is_some_kind_of_enhancement and is_major:
+        text_target = "major_feature"
+    elif is_feature:
+        text_target = "feature"
+    elif is_enhancement:
+        text_target = "enhancement"
+    elif is_some_kind_of_enhancement:
+        text_target = "small_enhancement"
+    elif is_major:
+        text_target = "major_bug"
+    elif is_bug:
+        text_target = "bug"
+    else:
+        print("Logic problem, cannot determine section for %s" % _pr_to_str(pull_request))
+        text_target = None
+    return text_target
+
+
+def _previous_release(to):
+    previous_release = None
+    for release in _releases():
+        if release == to:
+            break
+
+        previous_release = release
+
+    return previous_release
+
+
+def _latest_release():
+    return _releases()[-1]
+
+
+def _releases():
+    all_files = sorted(os.listdir(RELEASES_PATH))
+    release_note_file_pattern = re.compile(r"\d+\.\d+.rst")
+    release_note_files = [f for f in all_files if release_note_file_pattern.match(f)]
+    return sorted(f.rstrip('.rst') for f in release_note_files)
+
+
+def _get_major_version():
+    with open(GALAXY_VERSION_FILE, 'rb') as f:
+        init_contents = f.read().decode('utf-8')
+
+        def get_var(var_name):
+            pattern = re.compile(r'%s\s+=\s+(.*)' % var_name)
+            match = pattern.search(init_contents).group(1)
+            return str(ast.literal_eval(match))
+        return get_var("VERSION_MAJOR")
+
+
+def _get_release_name(argv):
+    if len(argv) > 2:
+        return argv[2]
+    else:
+        return _get_major_version()
+
+
+def _github_client():
+    if Github:
+        github_json = os.path.expanduser("~/.github.json")
+        github = Github(**json.load(open(github_json, "r")))
+    else:
+        github = None
+    return github
+
+
+def _release_file(release):
+    releases_path = os.path.join(PROJECT_DIRECTORY, "doc", "source", "releases")
+    if release is None:
+        release = sorted(os.listdir(releases_path))[-1]
+    history_path = os.path.join(releases_path, release)
+    return history_path
+
+
+def get_first_sentence(message):
+    first_line = message.split("\n")[0]
+    return first_line
+
+
+def process_sentence(message):
+    # Strip tags like [15.07].
+    message = re.sub("^\s*\[.*\]\s*", "", message)
+    # Link issues and pull requests...
+    issue_url = "https://github.com/%s/%s/issues" % (PROJECT_OWNER, PROJECT_NAME)
+    message = re.sub(r'#(\d+)', r'`#\1 <%s/\1>`__' % issue_url, message)
+    return message
+
+
+def wrap(message):
+    message = process_sentence(message)
+    wrapper = textwrap.TextWrapper(initial_indent="* ")
+    wrapper.subsequent_indent = '  '
+    wrapper.width = 78
+    message_lines = message.splitlines()
+    first_lines = "\n".join(wrapper.wrap(message_lines[0]))
+    wrapper.initial_indent = "  "
+    rest_lines = "\n".join(["\n".join(wrapper.wrap(m)) for m in message_lines[1:]])
+    return first_lines + ("\n" + rest_lines if rest_lines else "")
+
+
+def next_weekday(d, weekday):
+    """ Return the next week day (0 for Monday, 6 for Sunday) starting from ``d``. """
+    days_ahead = weekday - d.weekday()
+    if days_ahead <= 0:  # Target day already happened this week
+        days_ahead += 7
+    return d + datetime.timedelta(days_ahead)
+
+
+if __name__ == "__main__":
+    main(sys.argv)
diff --git a/scripts/build_toolbox.py b/scripts/build_toolbox.py
new file mode 100644
index 0000000..7de2176
--- /dev/null
+++ b/scripts/build_toolbox.py
@@ -0,0 +1,172 @@
+from __future__ import print_function
+import os
+from xml.etree import ElementTree as ET
+
+
+def prettify(elem):
+    from xml.dom import minidom
+    rough_string = ET.tostring(elem, 'utf-8')
+    repaired = minidom.parseString(rough_string)
+    return repaired.toprettyxml(indent='  ')
+
+
+# Build a list of all toolconf xml files in the tools directory
+def getfilenamelist(startdir):
+    filenamelist = []
+    for root, dirs, files in os.walk(startdir):
+        for fn in files:
+            fullfn = os.path.join(root, fn)
+            if fn.endswith('toolconf.xml'):
+                filenamelist.append(fullfn)
+            elif fn.endswith('.xml'):
+                try:
+                    doc = ET.parse(fullfn)
+                except:
+                    print("An OOPS on", fullfn)
+                    raise
+                rootelement = doc.getroot()
+                # Only interpret those 'tool' XML files that have
+                # the 'section' element.
+                if rootelement.tag == 'tool':
+                    if rootelement.findall('toolboxposition'):
+                        filenamelist.append(fullfn)
+                    else:
+                        print("DBG> tool config does not have a <section>:", fullfn)
+    return filenamelist
+
+
+class ToolBox(object):
+    def __init__(self):
+        from collections import defaultdict
+        self.tools = defaultdict(list)
+        self.sectionorders = {}
+
+    def add(self, toolelement, toolboxpositionelement):
+        section = toolboxpositionelement.attrib.get('section', '')
+        label = toolboxpositionelement.attrib.get('label', '')
+        order = int(toolboxpositionelement.attrib.get('order', '0'))
+        sectionorder = int(toolboxpositionelement.attrib.get('sectionorder', '0'))
+
+        # If this is the first time we encounter the section, store its order
+        # number. If we have seen it before, ignore the given order and use
+        # the stored one instead
+        if section not in self.sectionorders:
+            self.sectionorders[section] = sectionorder
+        else:
+            sectionorder = self.sectionorders[section]
+
+        # Sortorder: add intelligent mix to the front
+        self.tools[("%05d-%s" % (sectionorder, section), label, order, section)].append(toolelement)
+
+    def addElementsTo(self, rootelement):
+        toolkeys = list(self.tools.keys())
+        toolkeys.sort()
+
+        # Initialize the loop: IDs to zero, current section and label to ''
+        currentsection = ''
+        sectionnumber = 0
+        currentlabel = ''
+        labelnumber = 0
+        for toolkey in toolkeys:
+            section = toolkey[3]
+            # If we change sections, add the new section to the XML tree,
+            # and start adding stuff to the new section. If the new section
+            # is '', start adding stuff to the root again.
+            if currentsection != section:
+                currentsection = section
+                # Start the section with empty label
+                currentlabel = ''
+                if section:
+                    sectionnumber += 1
+                    attrib = {'name': section,
+                              'id': "section%d" % sectionnumber}
+                    sectionelement = ET.Element('section', attrib)
+                    rootelement.append(sectionelement)
+                    currentelement = sectionelement
+                else:
+                    currentelement = rootelement
+            label = toolkey[1]
+
+            # If we change labels, add the new label to the XML tree
+            if currentlabel != label:
+                currentlabel = label
+                if label:
+                    labelnumber += 1
+                    attrib = {'text': label,
+                              'id': "label%d" % labelnumber}
+                    labelelement = ET.Element('label', attrib)
+                    currentelement.append(labelelement)
+
+            # Add the tools that are in this place
+            for toolelement in self.tools[toolkey]:
+                currentelement.append(toolelement)
+
+
+# Analyze all the toolconf xml files given in the filenamelist
+# Build a list of all sections
+def scanfiles(filenamelist):
+    # Build an empty tool box
+    toolbox = ToolBox()
+
+    # Read each of the files in the list
+    for fn in filenamelist:
+        doc = ET.parse(fn)
+        root = doc.getroot()
+
+        if root.tag == 'tool':
+            toolelements = [root]
+        else:
+            toolelements = doc.findall('tool')
+
+        for toolelement in toolelements:
+            # Figure out where the tool XML file is, absolute path.
+            if 'file' in toolelement.attrib:
+                # It is mentioned, we need to make it absolute
+                fileattrib = os.path.join(os.getcwd(),
+                                          os.path.dirname(fn),
+                                          toolelement.attrib['file'])
+            else:
+                # It is the current file
+                fileattrib = os.path.join(os.getcwd(), fn)
+
+            # Store the file in the attibutes of the new tool element
+            attrib = {'file': fileattrib}
+
+            # Add the tags into the attributes
+            tags = toolelement.find('tags')
+            if tags:
+                tagarray = []
+                for tag in tags.findall('tag'):
+                    tagarray.append(tag.text)
+                attrib['tags'] = ",".join(tagarray)
+            else:
+                print("DBG> No tags in", fn)
+
+            # Build the tool element
+            newtoolelement = ET.Element('tool', attrib)
+            toolboxpositionelements = toolelement.findall('toolboxposition')
+            if not toolboxpositionelements:
+                print("DBG> %s has no toolboxposition" % fn)
+            else:
+                for toolboxpositionelement in toolboxpositionelements:
+                    toolbox.add(newtoolelement, toolboxpositionelement)
+    return toolbox
+
+
+def assemble():
+    filenamelist = []
+    for directorytree in ['tools']:
+        filenamelist.extend(getfilenamelist('tools'))
+    filenamelist.sort()
+
+    toolbox = scanfiles(filenamelist)
+
+    toolboxelement = ET.Element('toolbox')
+
+    toolbox.addElementsTo(toolboxelement)
+
+    print(prettify(toolboxelement))
+
+
+if __name__ == "__main__":
+    assemble()
diff --git a/scripts/build_universe_config.py b/scripts/build_universe_config.py
new file mode 100644
index 0000000..da54f11
--- /dev/null
+++ b/scripts/build_universe_config.py
@@ -0,0 +1,34 @@
+from ConfigParser import ConfigParser
+from os import listdir
+from os.path import join
+from re import match
+from sys import argv
+
+
+def merge():
+    """
+    Merges all .ini files in a specified directory into a file (defaults to
+    ./config/galaxy.ini ).
+    """
+    if len(argv) < 2:
+        message = "%s: Must specify directory to merge configuration files from." % argv[0]
+        raise Exception(message)
+    conf_directory = argv[1]
+    conf_files = [f for f in listdir(conf_directory) if match(r'.*\.ini', f)]
+    conf_files.sort()
+
+    parser = ConfigParser()
+    for conf_file in conf_files:
+        parser.read([join(conf_directory, conf_file)])
+    # TODO: Expand enviroment variables here, that would
+    # also make Galaxy much easier to configure.
+
+    destination = "config/galaxy.ini"
+    if len(argv) > 2:
+        destination = argv[2]
+
+    parser.write(open(destination, 'w'))
+
+
+if __name__ == '__main__':
+    merge()
diff --git a/scripts/check_eggs.py b/scripts/check_eggs.py
new file mode 120000
index 0000000..9ed1ea6
--- /dev/null
+++ b/scripts/check_eggs.py
@@ -0,0 +1 @@
+fetch_eggs.py
\ No newline at end of file
diff --git a/scripts/check_galaxy.py b/scripts/check_galaxy.py
new file mode 100755
index 0000000..5e3a4a9
--- /dev/null
+++ b/scripts/check_galaxy.py
@@ -0,0 +1,415 @@
+#!/usr/bin/env python
+"""
+check_galaxy can be run by hand, although it is meant to run from cron
+via the check_galaxy.sh script in Galaxy's cron/ directory.
+"""
+from __future__ import print_function
+import filecmp
+import formatter
+import getopt
+import htmllib
+import os
+import socket
+import sys
+import tempfile
+import time
+
+import twill
+import twill.commands as tc
+
+# options
+if "DEBUG" in os.environ:
+    debug = os.environ["DEBUG"]
+else:
+    debug = False
+scripts_dir = os.path.abspath( os.path.dirname( sys.argv[0] ) )
+test_data_dir = os.path.join( scripts_dir, os.pardir, "test-data" )
+# what tools to run - not so pretty
+tools = {
+    "gops_intersect_1":
+    [
+        {
+            "inputs":
+            (
+                os.path.join( test_data_dir, "1.bed" ),
+                os.path.join( test_data_dir, "2.bed" )
+            )
+        },
+        { "check_file": os.path.join( test_data_dir, "gops_intersect_out.bed" ) },
+        {
+            "tool_run_options":
+            {
+                "input1": "1.bed",
+                "input2": "2.bed",
+                "min": "1",
+                "returntype": ""
+            }
+        }
+    ]
+}
+
+
+# handle arg(s)
+def usage():
+    sys.exit("usage: check_galaxy.py <server>")
+
+
+try:
+    opts, args = getopt.getopt( sys.argv[1:], 'n' )
+except getopt.GetoptError as e:
+    print(str(e))
+    usage()
+if len( args ) < 1:
+    usage()
+server = args[0]
+if server.endswith(".g2.bx.psu.edu"):
+    if debug:
+        print("Checking a PSU Galaxy server, using maint file")
+    maint = "/errordocument/502/%s/maint" % args[0].split('.', 1)[0]
+else:
+    maint = None
+new_history = False
+for o, a in opts:
+    if o == "-n":
+        if debug:
+            print("Specified -n, will create a new history")
+        new_history = True
+    else:
+        usage()
+
+# state information
+var_dir = os.path.join( os.path.expanduser('~'), ".check_galaxy", server )
+if not os.access( var_dir, os.F_OK ):
+    os.makedirs( var_dir, 0o700 )
+
+# get user/pass
+login_file = os.path.join( var_dir, "login" )
+try:
+    f = open( login_file, 'r' )
+except:
+    message = """Please create the file:
+%s
+This should contain a username and password to log in to Galaxy with,
+on one line, separated by whitespace, e.g.:
+
+check_galaxy at example.com password
+
+If the user does not exist, check_galaxy will create it for you.""" % login_file
+    sys.exit(message)
+( username, password ) = f.readline().split()
+
+# default timeout for twill browser is never
+socket.setdefaulttimeout(300)
+
+# user-agent
+tc.agent("Mozilla/5.0 (compatible; check_galaxy/0.1)")
+tc.config('use_tidy', 0)
+
+
+class Browser:
+
+    def __init__(self):
+        self.server = server
+        self.maint = maint
+        self.tool = None
+        self.tool_opts = None
+        self.id = None
+        self.status = None
+        self.check_file = None
+        self.hid = None
+        self.cookie_jar = os.path.join( var_dir, "cookie_jar" )
+        dprint("cookie jar path: %s" % self.cookie_jar)
+        if not os.access(self.cookie_jar, os.R_OK):
+            dprint("no cookie jar at above path, creating")
+            tc.save_cookies(self.cookie_jar)
+        tc.load_cookies(self.cookie_jar)
+
+    def get(self, path):
+        tc.go("http://%s%s" % (self.server, path))
+        tc.code(200)
+
+    def reset(self):
+        self.tool = None
+        self.tool_opts = None
+        self.id = None
+        self.status = None
+        self.check_file = None
+        self.delete_datasets()
+        self.get("/root/history")
+        p = didParser()
+        p.feed(tc.browser.get_html())
+        if len(p.dids) > 0:
+            print("Remaining datasets ids:", " ".join( p.dids ))
+            raise Exception("History still contains datasets after attempting to delete them")
+        if new_history:
+            self.get("/history/delete_current")
+            tc.save_cookies(self.cookie_jar)
+
+    def check_redir(self, url):
+        try:
+            tc.get_browser()._browser.set_handle_redirect(False)
+            tc.go(url)
+            tc.code(302)
+            tc.get_browser()._browser.set_handle_redirect(True)
+            dprint( "%s is returning redirect (302)" % url )
+            return(True)
+        except twill.errors.TwillAssertionError as e:
+            tc.get_browser()._browser.set_handle_redirect(True)
+            dprint( "%s is not returning redirect (302): %s" % (url, e) )
+            code = tc.browser.get_code()
+            if code == 502:
+                is_maint = self.check_maint()
+                if is_maint:
+                    dprint( "Galaxy is down, but a maint file was found, so not sending alert" )
+                    sys.exit(0)
+                else:
+                    sys.exit("Galaxy is down (code 502)")
+            return(False)
+
+    # checks for a maint file
+    def check_maint(self):
+        if self.maint is None:
+            # dprint( "Warning: unable to check maint file for %s" % self.server )
+            return(False)
+        try:
+            self.get(self.maint)
+            return(True)
+        except twill.errors.TwillAssertionError:
+            return(False)
+
+    def login(self, user, pw):
+        self.get("/user/login")
+        tc.fv("1", "email", user)
+        tc.fv("1", "password", pw)
+        tc.submit("Login")
+        tc.code(200)
+        if len(tc.get_browser().get_all_forms()) > 0:
+            # uh ohs, fail
+            p = userParser()
+            p.feed(tc.browser.get_html())
+            if p.no_user:
+                dprint("user does not exist, will try creating")
+                self.create_user(user, pw)
+            elif p.bad_pw:
+                raise Exception("Password is incorrect")
+            else:
+                raise Exception("Unknown error logging in")
+        tc.save_cookies(self.cookie_jar)
+
+    def create_user(self, user, pw):
+        self.get("/user/create")
+        tc.fv("1", "email", user)
+        tc.fv("1", "password", pw)
+        tc.fv("1", "confirm", pw)
+        tc.submit("Submit")
+        tc.code(200)
+        if len(tc.get_browser().get_all_forms()) > 0:
+            p = userParser()
+            p.feed(tc.browser.get_html())
+            if p.already_exists:
+                raise Exception('The user you were trying to create already exists')
+
+    def upload(self, file):
+        self.get("/tool_runner/index?tool_id=upload1")
+        tc.fv("1", "file_type", "bed")
+        tc.formfile("1", "file_data", file)
+        tc.submit("runtool_btn")
+        tc.code(200)
+
+    def runtool(self):
+        self.get("/tool_runner/index?tool_id=%s" % self.tool)
+        for k, v in self.tool_opts.items():
+            tc.fv("1", k, v)
+        tc.submit("runtool_btn")
+        tc.code(200)
+
+    def wait(self):
+        sleep_amount = 1
+        count = 0
+        maxiter = 16
+        while count < maxiter:
+            count += 1
+            self.get("/root/history")
+            page = tc.browser.get_html()
+            if page.find( '<!-- running: do not change this comment, used by TwillTestCase.wait -->' ) > -1:
+                time.sleep( sleep_amount )
+                sleep_amount += 1
+            else:
+                break
+        if count == maxiter:
+            raise Exception("Tool never finished")
+
+    def check_status(self):
+        self.get("/root/history")
+        p = historyParser()
+        p.feed(tc.browser.get_html())
+        if p.status != "ok":
+            raise Exception("JOB %s NOT OK: %s" % (p.id, p.status))
+        self.id = p.id
+        self.status = p.status
+        # return((p.id, p.status))
+
+    def diff(self):
+        self.get("/datasets/%s/display/display?to_ext=bed" % self.id)
+        data = tc.browser.get_html()
+        tmp = tempfile.mkstemp()
+        dprint("tmp file: %s" % tmp[1])
+        tmpfh = os.fdopen(tmp[0], 'w')
+        tmpfh.write(data)
+        tmpfh.close()
+        if filecmp.cmp(tmp[1], self.check_file):
+            dprint("Tool output is as expected")
+        else:
+            if not debug:
+                os.remove(tmp[1])
+            raise Exception("Tool output differs from expected")
+        if not debug:
+            os.remove(tmp[1])
+
+    def delete_datasets(self):
+        self.get("/root/history")
+        p = didParser()
+        p.feed(tc.browser.get_html())
+        dids = p.dids
+        for did in dids:
+            self.get("/datasets/%s/delete" % did)
+
+    def check_if_logged_in(self):
+        self.get("/user?cntrller=user")
+        p = loggedinParser()
+        p.feed(tc.browser.get_html())
+        return p.logged_in
+
+
+class userParser(htmllib.HTMLParser):
+    def __init__(self):
+        htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+        self.in_span = False
+        self.in_div = False
+        self.no_user = False
+        self.bad_pw = False
+        self.already_exists = False
+
+    def start_span(self, attrs):
+        self.in_span = True
+
+    def start_div(self, attrs):
+        self.in_div = True
+
+    def end_span(self):
+        self.in_span = False
+
+    def end_div(self):
+        self.in_div = False
+
+    def handle_data(self, data):
+        if self.in_span or self.in_div:
+            if data == "No such user (please note that login is case sensitive)":
+                self.no_user = True
+            elif data == "Invalid password":
+                self.bad_pw = True
+            elif data == "User with that email already exists":
+                self.already_exists = True
+
+
+class historyParser(htmllib.HTMLParser):
+    def __init__(self):
+        htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+        self.status = None
+        self.id = None
+
+    def start_div(self, attrs):
+        # find the top history item
+        for i in attrs:
+            if i[0] == "class" and i[1].startswith("historyItemWrapper historyItem historyItem-"):
+                self.status = i[1].rsplit("historyItemWrapper historyItem historyItem-", 1)[1]
+                dprint("status: %s" % self.status)
+            if i[0] == "id" and i[1].startswith("historyItem-"):
+                self.id = i[1].rsplit("historyItem-", 1)[1]
+                dprint("id: %s" % self.id)
+        if self.status is not None:
+            self.reset()
+
+
+class didParser(htmllib.HTMLParser):
+    def __init__(self):
+        htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+        self.dids = []
+
+    def start_div(self, attrs):
+        for i in attrs:
+            if i[0] == "id" and i[1].startswith("historyItemContainer-"):
+                self.dids.append( i[1].rsplit("historyItemContainer-", 1)[1] )
+                dprint("got a dataset id: %s" % self.dids[-1])
+
+
+class loggedinParser(htmllib.HTMLParser):
+    def __init__(self):
+        htmllib.HTMLParser.__init__(self, formatter.NullFormatter())
+        self.in_p = False
+        self.logged_in = False
+
+    def start_p(self, attrs):
+        self.in_p = True
+
+    def end_p(self):
+        self.in_p = False
+
+    def handle_data(self, data):
+        if self.in_p:
+            if data == "You are currently not logged in.":
+                self.logged_in = False
+            elif data.startswith( "You are currently logged in as " ):
+                self.logged_in = True
+
+
+def dprint(str):
+    if debug:
+        print(str)
+
+
+if __name__ == "__main__":
+    dprint("checking %s" % server)
+
+    b = Browser()
+
+    # login (or not)
+    if b.check_if_logged_in():
+        dprint("we are already logged in (via cookies), hooray!")
+    else:
+        dprint("not logged in... logging in")
+        b.login(username, password)
+
+    for tool, params in tools.items():
+
+        check_file = ""
+
+        # make sure history and state is clean
+        b.reset()
+        b.tool = tool
+
+        # get all the tool run conditions
+        for dict in params:
+            for k, v in dict.items():
+                if k == 'inputs':
+                    for file in v:
+                        b.upload(file)
+                elif k == 'check_file':
+                    b.check_file = v
+                elif k == 'tool_run_options':
+                    b.tool_opts = v
+                else:
+                    raise Exception("Unknown key in tools dict: %s" % k)
+
+        b.runtool()
+        b.wait()
+        b.check_status()
+        b.diff()
+        b.delete_datasets()
+
+        # by this point, everything else has succeeded.  there should be no maint.
+        is_maint = b.check_maint()
+        if is_maint:
+            sys.exit("Galaxy is up and fully functional, but a maint file is in place.")
+
+    sys.exit(0)
diff --git a/scripts/check_python.py b/scripts/check_python.py
new file mode 100644
index 0000000..f23a12f
--- /dev/null
+++ b/scripts/check_python.py
@@ -0,0 +1,29 @@
+"""
+If the current installed python version is not 2.7, prints an error
+message to stderr and returns 1
+"""
+from __future__ import print_function
+import sys
+
+msg = """ERROR: Your Python version is: %s
+Galaxy is currently supported on Python 2.7 only.  To run Galaxy,
+please download and install a supported version from python.org.  If a
+supported version is installed but is not your default, getgalaxy.org
+contains instructions on how to force Galaxy to use a different version.""" % sys.version[:3]
+
+
+def check_python():
+    try:
+        assert sys.version_info[:2] == ( 2, 7 )
+    except AssertionError:
+        print(msg, file=sys.stderr)
+        raise
+
+
+if __name__ == '__main__':
+    rval = 0
+    try:
+        check_python()
+    except Exception:
+        rval = 1
+    sys.exit( rval )
diff --git a/scripts/cleanup_datasets/admin_cleanup_datasets.py b/scripts/cleanup_datasets/admin_cleanup_datasets.py
new file mode 100755
index 0000000..4033f2c
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_datasets.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+"""
+Mark datasets as deleted that are older than specified cutoff
+and (optionaly) with a tool_id that matches the specified search
+string.
+
+This script is useful for administrators to cleanup after users who
+leave many old datasets around.  It was modeled after the cleanup_datasets.py
+script originally distributed with Galaxy.
+
+Basic Usage:
+    admin_cleanup_datasets.py galaxy.ini -d 60 \
+        --template=email_template.txt
+
+Required Arguments:
+    config_file - the Galaxy configuration file (galaxy.ini)
+
+Optional Arguments:
+    -d --days - number of days old the dataset must be (default: 60)
+    --tool_id - string to search for in dataset tool_id (default: all)
+    --template - Mako template file to use for email notification
+    -i --info_only - Print results, but don't email or delete anything
+    -e --email_only - Email notifications, but don't delete anything
+        Useful for notifying users of pending deletion
+
+    --smtp - Specify smtp server
+        If not specified, use smtp settings specified in config file
+    --fromaddr - Specify from address
+        If not specified, use email_from specified in config file
+
+Email Template Variables:
+   cutoff - the cutoff in days
+   email - the users email address
+   datasets - a list of tuples containing 'dataset' and 'history' names
+
+
+Author: Lance Parsons (lparsons at princeton.edu)
+"""
+from __future__ import print_function
+
+import logging
+import os
+import shutil
+import sys
+import time
+from collections import defaultdict
+from datetime import datetime, timedelta
+from optparse import OptionParser
+from time import strftime
+
+import sqlalchemy as sa
+from mako.template import Template
+from six.moves import configparser
+from sqlalchemy import and_, false
+
+import galaxy.config
+import galaxy.model.mapping
+import galaxy.util
+
+from cleanup_datasets import CleanupDatasetsApplication
+
+log = logging.getLogger()
+log.setLevel(10)
+log.addHandler(logging.StreamHandler(sys.stdout))
+
+assert sys.version_info[:2] >= (2, 4)
+
+
+def main():
+    """
+    Datasets that are older than the specified cutoff and for which the tool_id
+    contains the specified text will be marked as deleted in user's history and
+    the user will be notified by email using the specified template file.
+    """
+    usage = "usage: %prog [options] galaxy.ini"
+    parser = OptionParser(usage=usage)
+    parser.add_option("-d", "--days", dest="days", action="store",
+                      type="int", help="number of days (60)", default=60)
+    parser.add_option("--tool_id", default=None,
+                      help="Text to match against tool_id"
+                      "Default: match all")
+    parser.add_option("--template", default=None,
+                      help="Mako Template file to use as email "
+                      "Variables are 'cutoff' for the cutoff in days, "
+                      "'email' for users email and "
+                      "'datasets' which is a list of tuples "
+                      "containing 'dataset' and 'history' names. "
+                      "Default: admin_cleanup_deletion_template.txt")
+    parser.add_option("-i", "--info_only", action="store_true",
+                      dest="info_only", help="info about the requested action",
+                      default=False)
+    parser.add_option("-e", "--email_only", action="store_true",
+                      dest="email_only", help="Send emails only, don't delete",
+                      default=False)
+    parser.add_option("--smtp", default=None,
+                      help="SMTP Server to use to send email. "
+                      "Default: [read from galaxy ini file]")
+    parser.add_option("--fromaddr", default=None,
+                      help="From address to use to send email. "
+                      "Default: [read from galaxy ini file]")
+    (options, args) = parser.parse_args()
+    if len(args) != 1:
+        parser.print_help()
+        sys.exit()
+    ini_file = args[0]
+
+    config_parser = configparser.ConfigParser({'here': os.getcwd()})
+    config_parser.read(ini_file)
+    config_dict = {}
+    for key, value in config_parser.items("app:main"):
+        config_dict[key] = value
+
+    if options.smtp is not None:
+        config_dict['smtp_server'] = options.smtp
+    if config_dict.get('smtp_server') is None:
+        parser.error("SMTP Server must be specified as an option (--smtp) "
+                     "or in the config file (smtp_server)")
+
+    if options.fromaddr is not None:
+        config_dict['email_from'] = options.fromaddr
+    if config_dict.get('email_from') is None:
+        parser.error("From address must be specified as an option "
+                     "(--fromaddr) or in the config file "
+                     "(email_from)")
+
+    scriptdir = os.path.dirname(os.path.abspath(__file__))
+    template_file = options.template
+    if template_file is None:
+        default_template = os.path.join(scriptdir,
+                                        'admin_cleanup_deletion_template.txt')
+        sample_template_file = "%s.sample" % default_template
+        if os.path.exists(default_template):
+            template_file = default_template
+        elif os.path.exists(sample_template_file):
+            print("Copying %s to %s" % (sample_template_file, default_template))
+            shutil.copyfile(sample_template_file, default_template)
+            template_file = default_template
+        else:
+            parser.error("Default template (%s) or sample template (%s) not "
+                         "found, please specify template as an option "
+                         "(--template)." % default_template,
+                         sample_template_file)
+    elif not os.path.exists(template_file):
+        parser.error("Specified template file (%s) not found." % template_file)
+
+    config = galaxy.config.Configuration(**config_dict)
+
+    app = CleanupDatasetsApplication(config)
+    cutoff_time = datetime.utcnow() - timedelta(days=options.days)
+    now = strftime("%Y-%m-%d %H:%M:%S")
+
+    print("##########################################")
+    print("\n# %s - Handling stuff older than %i days" % (now, options.days))
+
+    if options.info_only:
+        print("# Displaying info only ( --info_only )\n")
+    elif options.email_only:
+        print("# Sending emails only, not deleting ( --email_only )\n")
+
+    administrative_delete_datasets(
+        app, cutoff_time, options.days, tool_id=options.tool_id,
+        template_file=template_file, config=config,
+        email_only=options.email_only, info_only=options.info_only)
+    app.shutdown()
+    sys.exit(0)
+
+
+def administrative_delete_datasets(app, cutoff_time, cutoff_days,
+                                   tool_id, template_file,
+                                   config, email_only=False,
+                                   info_only=False):
+    # Marks dataset history association deleted and email users
+    start = time.time()
+    # Get HDAs older than cutoff time (ignore tool_id at this point)
+    # We really only need the id column here, but sqlalchemy barfs when
+    # trying to select only 1 column
+    hda_ids_query = sa.select(
+        (app.model.HistoryDatasetAssociation.table.c.id,
+         app.model.HistoryDatasetAssociation.table.c.deleted),
+        whereclause=and_(
+            app.model.Dataset.table.c.deleted == false(),
+            app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time,
+            app.model.HistoryDatasetAssociation.table.c.deleted == false()),
+        from_obj=[sa.outerjoin(
+                  app.model.Dataset.table,
+                  app.model.HistoryDatasetAssociation.table)])
+
+    # Add all datasets associated with Histories to our list
+    hda_ids = []
+    hda_ids.extend(
+        [row.id for row in hda_ids_query.execute()])
+
+    # Now find the tool_id that generated the dataset (even if it was copied)
+    tool_matched_ids = []
+    if tool_id is not None:
+        for hda_id in hda_ids:
+            this_tool_id = _get_tool_id_for_hda(app, hda_id)
+            if this_tool_id is not None and tool_id in this_tool_id:
+                tool_matched_ids.append(hda_id)
+        hda_ids = tool_matched_ids
+
+    deleted_instance_count = 0
+    user_notifications = defaultdict(list)
+
+    # Process each of the Dataset objects
+    for hda_id in hda_ids:
+        user_query = sa.select(
+            [app.model.HistoryDatasetAssociation.table,
+             app.model.History.table,
+             app.model.User.table],
+            whereclause=and_(
+                app.model.HistoryDatasetAssociation.table.c.id == hda_id),
+            from_obj=[sa.join(app.model.User.table,
+                              app.model.History.table)
+                      .join(app.model.HistoryDatasetAssociation.table)],
+            use_labels=True)
+        for result in user_query.execute():
+            user_notifications[result[app.model.User.table.c.email]].append(
+                (result[app.model.HistoryDatasetAssociation.table.c.name],
+                 result[app.model.History.table.c.name]))
+            deleted_instance_count += 1
+            if not info_only and not email_only:
+                # Get the HistoryDatasetAssociation objects
+                hda = app.sa_session.query(
+                    app.model.HistoryDatasetAssociation).get(hda_id)
+                if not hda.deleted:
+                    # Mark the HistoryDatasetAssociation as deleted
+                    hda.deleted = True
+                    app.sa_session.add(hda)
+                    print("Marked HistoryDatasetAssociation id %d as "
+                          "deleted" % hda.id)
+                app.sa_session.flush()
+
+    emailtemplate = Template(filename=template_file)
+    for (email, dataset_list) in user_notifications.items():
+        msgtext = emailtemplate.render(email=email,
+                                       datasets=dataset_list,
+                                       cutoff=cutoff_days)
+        subject = "Galaxy Server Cleanup " \
+            "- %d datasets DELETED" % len(dataset_list)
+        fromaddr = config.email_from
+        print()
+        print("From: %s" % fromaddr)
+        print("To: %s" % email)
+        print("Subject: %s" % subject)
+        print("----------")
+        print(msgtext)
+        if not info_only:
+            galaxy.util.send_mail(fromaddr, email, subject,
+                                  msgtext, config)
+
+    stop = time.time()
+    print()
+    print("Marked %d dataset instances as deleted" % deleted_instance_count)
+    print("Total elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def _get_tool_id_for_hda(app, hda_id):
+    # TODO Some datasets don't seem to have an entry in jtod or a copied_from
+    if hda_id is None:
+        return None
+    job = app.sa_session.query(app.model.Job).\
+        join(app.model.JobToOutputDatasetAssociation).\
+        filter(app.model.JobToOutputDatasetAssociation.table.c.dataset_id ==
+               hda_id).first()
+    if job is not None:
+        return job.tool_id
+    else:
+        hda = app.sa_session.query(app.model.HistoryDatasetAssociation).\
+            get(hda_id)
+        return _get_tool_id_for_hda(app, hda.
+                                    copied_from_history_dataset_association_id)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample b/scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample
new file mode 100644
index 0000000..7763f06
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_deletion_template.txt.sample
@@ -0,0 +1,11 @@
+Galaxy Server Cleanup
+---------------------
+The following datasets you own on Galaxy are older than ${cutoff} days and have been DELETED:
+
+% for dataset, history in datasets:
+    "${dataset}" in history "${history}"
+% endfor
+
+You may be able to undelete them by logging into Galaxy, navigating to the appropriate history, selecting "Include Deleted Datasets" from the history options menu, and clicking on the link to undelete each dataset that you want to keep.  You can then download the datasets.  Thank you for your understanding and cooporation in this necessary cleanup in order to keep the Galaxy resource available.  Please don't hesitate to contact us if you have any questions.
+
+ -- Galaxy Administrators
diff --git a/scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample b/scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample
new file mode 100644
index 0000000..7a17156
--- /dev/null
+++ b/scripts/cleanup_datasets/admin_cleanup_warning_template.txt.sample
@@ -0,0 +1,11 @@
+Galaxy Server Cleanup
+---------------------
+The following datasets you own on Galaxy are older than ${cutoff} days and will be deleted soon. Be sure to download any datasets you need to keep.
+
+% for dataset, history in datasets:
+    "${dataset}" in history "${history}"
+% endfor
+
+Please contact us if you have any questions.
+
+ -- Galaxy Administrators
diff --git a/scripts/cleanup_datasets/cleanup_datasets.py b/scripts/cleanup_datasets/cleanup_datasets.py
new file mode 100755
index 0000000..c4ce168
--- /dev/null
+++ b/scripts/cleanup_datasets/cleanup_datasets.py
@@ -0,0 +1,548 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import logging
+import os
+import shutil
+import sys
+import time
+from datetime import datetime, timedelta
+from optparse import OptionParser
+from time import strftime
+
+import sqlalchemy as sa
+from six.moves import configparser
+from sqlalchemy import and_, false, null, true
+from sqlalchemy.orm import eagerload
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+import galaxy.config
+import galaxy.model.mapping
+from galaxy.exceptions import ObjectNotFound
+from galaxy.objectstore import build_object_store_from_config
+from galaxy.util import unicodify
+
+log = logging.getLogger()
+log.setLevel( 10 )
+log.addHandler( logging.StreamHandler( sys.stdout ) )
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def main():
+    """
+    Managing library datasets is a bit complex, so here is a scenario that hopefully provides clarification.  The complexities
+    of handling library datasets is mostly contained in the delete_datasets() method in this script.
+
+    Assume we have 1 library dataset with: LibraryDatasetDatasetAssociation -> LibraryDataset and Dataset
+    At this point, we have the following database column values:
+
+    LibraryDatasetDatasetAssociation deleted: False
+    LibraryDataset deleted: False, purged: False
+    Dataset deleted: False purged: False
+
+    1. A user deletes the assumed dataset above from a data library via a UI menu option.
+    This action results in the following database column values (changes from previous step marked with *):
+
+    LibraryDatasetDatasetAssociation deleted: False
+    LibraryDataset deleted: True*, purged: False
+    Dataset deleted: False, purged: False
+
+    2. After the number of days configured for the delete_datasets() method (option -6 below) have passed, execution
+    of the delete_datasets() method results in the following database column values (changes from previous step marked with *):
+
+    LibraryDatasetDatasetAssociation deleted: True*
+    LibraryDataset deleted: True, purged: True*
+    Dataset deleted: True*, purged: False
+
+    3. After the number of days configured for the purge_datasets() method (option -3 below) have passed, execution
+    of the purge_datasets() method results in the following database column values (changes from previous step marked with *):
+
+    LibraryDatasetDatasetAssociation deleted: True
+    LibraryDataset deleted: True, purged: True
+    Dataset deleted: True, purged: True* (dataset file removed from disk if -r flag is used)
+
+    This scenario is about as simple as it gets.  Keep in mind that a Dataset object can have many HistoryDatasetAssociations
+    and many LibraryDatasetDatasetAssociations, and a LibraryDataset can have many LibraryDatasetDatasetAssociations.
+    Another way of stating it is: LibraryDatasetDatasetAssociation objects map LibraryDataset objects to Dataset objects,
+    and Dataset objects may be mapped to History objects via HistoryDatasetAssociation objects.
+    """
+    usage = "usage: %prog [options] galaxy.ini"
+    parser = OptionParser(usage=usage)
+    parser.add_option( "-d", "--days", dest="days", action="store", type="int", help="number of days (60)", default=60 )
+    parser.add_option( "-r", "--remove_from_disk", action="store_true", dest="remove_from_disk", help="remove datasets from disk when purged", default=False )
+    parser.add_option( "-i", "--info_only", action="store_true", dest="info_only", help="info about the requested action", default=False )
+    parser.add_option( "-f", "--force_retry", action="store_true", dest="force_retry", help="performs the requested actions, but ignores whether it might have been done before. Useful when -r wasn't used, but should have been", default=False )
+    parser.add_option( "-1", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" )
+    parser.add_option( "-2", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" )
+    parser.add_option( "-3", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" )
+    parser.add_option( "-4", "--purge_libraries", action="store_true", dest="purge_libraries", default=False, help="purge deleted libraries" )
+    parser.add_option( "-5", "--purge_folders", action="store_true", dest="purge_folders", default=False, help="purge deleted library folders" )
+    parser.add_option( "-6", "--delete_datasets", action="store_true", dest="delete_datasets", default=False, help="mark deletable datasets as deleted and purge associated dataset instances" )
+
+    ( options, args ) = parser.parse_args()
+    if len(args) != 1:
+        parser.print_help()
+        sys.exit()
+    ini_file = args[0]
+
+    if not ( options.purge_folders ^ options.delete_userless_histories ^
+             options.purge_libraries ^ options.purge_histories ^
+             options.purge_datasets ^ options.delete_datasets ):
+        parser.print_help()
+        sys.exit(0)
+
+    if options.remove_from_disk and options.info_only:
+        parser.error( "remove_from_disk and info_only are mutually exclusive" )
+
+    config_parser = configparser.ConfigParser( {'here': os.getcwd()} )
+    config_parser.read( ini_file )
+    config_dict = {}
+    for key, value in config_parser.items( "app:main" ):
+        config_dict[key] = value
+
+    config = galaxy.config.Configuration( **config_dict )
+
+    app = CleanupDatasetsApplication( config )
+    cutoff_time = datetime.utcnow() - timedelta( days=options.days )
+    now = strftime( "%Y-%m-%d %H:%M:%S" )
+
+    print("##########################################")
+    print("\n# %s - Handling stuff older than %i days" % ( now, options.days ))
+
+    if options.info_only:
+        print("# Displaying info only ( --info_only )\n")
+    elif options.remove_from_disk:
+        print("Datasets will be removed from disk.\n")
+    else:
+        print("Datasets will NOT be removed from disk.\n")
+
+    if options.delete_userless_histories:
+        delete_userless_histories( app, cutoff_time, info_only=options.info_only, force_retry=options.force_retry )
+    elif options.purge_histories:
+        purge_histories( app, cutoff_time, options.remove_from_disk, info_only=options.info_only, force_retry=options.force_retry )
+    elif options.purge_datasets:
+        purge_datasets( app, cutoff_time, options.remove_from_disk, info_only=options.info_only, force_retry=options.force_retry )
+    elif options.purge_libraries:
+        purge_libraries( app, cutoff_time, options.remove_from_disk, info_only=options.info_only, force_retry=options.force_retry )
+    elif options.purge_folders:
+        purge_folders( app, cutoff_time, options.remove_from_disk, info_only=options.info_only, force_retry=options.force_retry )
+    elif options.delete_datasets:
+        delete_datasets( app, cutoff_time, options.remove_from_disk, info_only=options.info_only, force_retry=options.force_retry )
+
+    app.shutdown()
+    sys.exit(0)
+
+
+def delete_userless_histories( app, cutoff_time, info_only=False, force_retry=False ):
+    # Deletes userless histories whose update_time value is older than the cutoff_time.
+    # The purge history script will handle marking DatasetInstances as deleted.
+    # Nothing is removed from disk yet.
+    history_count = 0
+    start = time.time()
+    if force_retry:
+        histories = app.sa_session.query( app.model.History ) \
+                                  .filter( and_( app.model.History.table.c.user_id == null(),
+                                                 app.model.History.table.c.update_time < cutoff_time ) )
+    else:
+        histories = app.sa_session.query( app.model.History ) \
+                                  .filter( and_( app.model.History.table.c.user_id == null(),
+                                                 app.model.History.table.c.deleted == false(),
+                                                 app.model.History.table.c.update_time < cutoff_time ) )
+    for history in histories:
+        if not info_only:
+            print("Deleting history id ", history.id)
+            history.deleted = True
+            app.sa_session.add( history )
+            app.sa_session.flush()
+        history_count += 1
+    stop = time.time()
+    print("Deleted %d histories" % history_count)
+    print("Elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def purge_histories( app, cutoff_time, remove_from_disk, info_only=False, force_retry=False ):
+    # Purges deleted histories whose update_time is older than the cutoff_time.
+    # The dataset associations of each history are also marked as deleted.
+    # The Purge Dataset method will purge each Dataset as necessary
+    # history.purged == True simply means that it can no longer be undeleted
+    # i.e. all associated datasets are marked as deleted
+    history_count = 0
+    start = time.time()
+    if force_retry:
+        histories = app.sa_session.query( app.model.History ) \
+                                  .filter( and_( app.model.History.table.c.deleted == true(),
+                                                 app.model.History.table.c.update_time < cutoff_time ) ) \
+                                  .options( eagerload( 'datasets' ) )
+    else:
+        histories = app.sa_session.query( app.model.History ) \
+                                  .filter( and_( app.model.History.table.c.deleted == true(),
+                                                 app.model.History.table.c.purged == false(),
+                                                 app.model.History.table.c.update_time < cutoff_time ) ) \
+                                  .options( eagerload( 'datasets' ) )
+    for history in histories:
+        print("### Processing history id %d (%s)" % (history.id, unicodify(history.name)))
+        for dataset_assoc in history.datasets:
+            _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only=info_only )  # mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
+        if not info_only:
+            # TODO: should the Delete DefaultHistoryPermissions be deleted here?  This was incorrectly
+            # done in the _list_delete() method of the history controller, so copied it here.  Not sure
+            # if we should ever delete info like this from the db though, so commented out for now...
+            # for dhp in history.default_permissions:
+            #     dhp.delete()
+            print("Purging history id ", history.id)
+            history.purged = True
+            app.sa_session.add( history )
+            app.sa_session.flush()
+        else:
+            print("History id %d will be purged (without 'info_only' mode)" % history.id)
+        history_count += 1
+    stop = time.time()
+    print('Purged %d histories.' % history_count)
+    print("Elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def purge_libraries( app, cutoff_time, remove_from_disk, info_only=False, force_retry=False ):
+    # Purges deleted libraries whose update_time is older than the cutoff_time.
+    # The dataset associations of each library are also marked as deleted.
+    # The Purge Dataset method will purge each Dataset as necessary
+    # library.purged == True simply means that it can no longer be undeleted
+    # i.e. all associated LibraryDatasets/folders are marked as deleted
+    library_count = 0
+    start = time.time()
+    if force_retry:
+        libraries = app.sa_session.query( app.model.Library ) \
+                                  .filter( and_( app.model.Library.table.c.deleted == true(),
+                                                 app.model.Library.table.c.update_time < cutoff_time ) )
+    else:
+        libraries = app.sa_session.query( app.model.Library ) \
+                                  .filter( and_( app.model.Library.table.c.deleted == true(),
+                                                 app.model.Library.table.c.purged == false(),
+                                                 app.model.Library.table.c.update_time < cutoff_time ) )
+    for library in libraries:
+        _purge_folder( library.root_folder, app, remove_from_disk, info_only=info_only )
+        if not info_only:
+            print("Purging library id ", library.id)
+            library.purged = True
+            app.sa_session.add( library )
+            app.sa_session.flush()
+        library_count += 1
+    stop = time.time()
+    print('# Purged %d libraries .' % library_count)
+    print("Elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def purge_folders( app, cutoff_time, remove_from_disk, info_only=False, force_retry=False ):
+    # Purges deleted folders whose update_time is older than the cutoff_time.
+    # The dataset associations of each folder are also marked as deleted.
+    # The Purge Dataset method will purge each Dataset as necessary
+    # libraryFolder.purged == True simply means that it can no longer be undeleted
+    # i.e. all associated LibraryDatasets/folders are marked as deleted
+    folder_count = 0
+    start = time.time()
+    if force_retry:
+        folders = app.sa_session.query( app.model.LibraryFolder ) \
+                                .filter( and_( app.model.LibraryFolder.table.c.deleted == true(),
+                                               app.model.LibraryFolder.table.c.update_time < cutoff_time ) )
+    else:
+        folders = app.sa_session.query( app.model.LibraryFolder ) \
+                                .filter( and_( app.model.LibraryFolder.table.c.deleted == true(),
+                                               app.model.LibraryFolder.table.c.purged == false(),
+                                               app.model.LibraryFolder.table.c.update_time < cutoff_time ) )
+    for folder in folders:
+        _purge_folder( folder, app, remove_from_disk, info_only=info_only )
+        folder_count += 1
+    stop = time.time()
+    print('# Purged %d folders.' % folder_count)
+    print("Elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def delete_datasets( app, cutoff_time, remove_from_disk, info_only=False, force_retry=False ):
+    # Marks datasets as deleted if associated items are all deleted.
+    start = time.time()
+    if force_retry:
+        history_dataset_ids_query = sa.select( ( app.model.Dataset.table.c.id,
+                                                 app.model.Dataset.table.c.state ),
+                                               whereclause=app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time,
+                                               from_obj=[ sa.outerjoin( app.model.Dataset.table,
+                                                                        app.model.HistoryDatasetAssociation.table ) ] )
+        library_dataset_ids_query = sa.select( ( app.model.LibraryDataset.table.c.id,
+                                                 app.model.LibraryDataset.table.c.deleted ),
+                                               whereclause=app.model.LibraryDataset.table.c.update_time < cutoff_time,
+                                               from_obj=[ app.model.LibraryDataset.table ] )
+    else:
+        # We really only need the id column here, but sqlalchemy barfs when trying to select only 1 column
+        history_dataset_ids_query = sa.select( ( app.model.Dataset.table.c.id,
+                                                 app.model.Dataset.table.c.state ),
+                                               whereclause=and_( app.model.Dataset.table.c.deleted == false(),
+                                                                 app.model.HistoryDatasetAssociation.table.c.update_time < cutoff_time,
+                                                                 app.model.HistoryDatasetAssociation.table.c.deleted == true() ),
+                                               from_obj=[ sa.outerjoin( app.model.Dataset.table,
+                                                                        app.model.HistoryDatasetAssociation.table ) ] )
+        library_dataset_ids_query = sa.select( ( app.model.LibraryDataset.table.c.id,
+                                                 app.model.LibraryDataset.table.c.deleted ),
+                                               whereclause=and_( app.model.LibraryDataset.table.c.deleted == true(),
+                                                                 app.model.LibraryDataset.table.c.purged == false(),
+                                                                 app.model.LibraryDataset.table.c.update_time < cutoff_time ),
+                                               from_obj=[ app.model.LibraryDataset.table ] )
+    deleted_dataset_count = 0
+    deleted_instance_count = 0
+    skip = []
+    # Handle library datasets.  This is a bit tricky, so here's some clarification.  We have a list of all
+    # LibraryDatasets that were marked deleted before our cutoff_time, but have not yet been marked purged.
+    # A LibraryDataset object is marked purged when all of its LibraryDatasetDatasetAssociations have been
+    # marked deleted.  When a LibraryDataset has been marked purged, it can never be undeleted in the data
+    # library.  We have several steps to complete here.  For each LibraryDataset, get its associated Dataset
+    # and add it to our accrued list of Datasets for later processing.  We mark  as deleted all of its
+    # LibraryDatasetDatasetAssociations.  Then we mark the LibraryDataset as purged.  We then process our
+    # list of Datasets.
+    library_dataset_ids = [ row.id for row in library_dataset_ids_query.execute() ]
+    dataset_ids = []
+    for library_dataset_id in library_dataset_ids:
+        print("######### Processing LibraryDataset id:", library_dataset_id)
+        # Get the LibraryDataset and the current LibraryDatasetDatasetAssociation objects
+        ld = app.sa_session.query( app.model.LibraryDataset ).get( library_dataset_id )
+        ldda = ld.library_dataset_dataset_association
+        # Append the associated Dataset object's id to our list of dataset_ids
+        dataset_ids.append( ldda.dataset_id )
+        # Mark all of the LibraryDataset's associated LibraryDatasetDatasetAssociation objects' as deleted
+        if not ldda.deleted:
+            ldda.deleted = True
+            app.sa_session.add( ldda )
+            print("Marked associated LibraryDatasetDatasetAssociation id %d as deleted" % ldda.id)
+        for expired_ldda in ld.expired_datasets:
+            if not expired_ldda.deleted:
+                expired_ldda.deleted = True
+                app.sa_session.add( expired_ldda )
+                print("Marked associated expired LibraryDatasetDatasetAssociation id %d as deleted" % ldda.id)
+        # Mark the LibraryDataset as purged
+        ld.purged = True
+        app.sa_session.add( ld )
+        print("Marked LibraryDataset id %d as purged" % ld.id)
+        app.sa_session.flush()
+    # Add all datasets associated with Histories to our list
+    dataset_ids.extend( [ row.id for row in history_dataset_ids_query.execute() ] )
+    # Process each of the Dataset objects
+    for dataset_id in dataset_ids:
+        dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id )
+        if dataset.id in skip:
+            continue
+        skip.append( dataset.id )
+        print("######### Processing dataset id:", dataset_id)
+        if not _dataset_is_deletable( dataset ):
+            print("Dataset is not deletable (shared between multiple histories/libraries, at least one is not deleted)")
+            continue
+        deleted_dataset_count += 1
+        for dataset_instance in dataset.history_associations + dataset.library_associations:
+            # Mark each associated HDA as deleted
+            _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=info_only, is_deletable=True )
+            deleted_instance_count += 1
+    stop = time.time()
+    print("Examined %d datasets, marked %d datasets and %d dataset instances (HDA) as deleted" % ( len( skip ), deleted_dataset_count, deleted_instance_count ))
+    print("Total elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def purge_datasets( app, cutoff_time, remove_from_disk, info_only=False, force_retry=False ):
+    # Purges deleted datasets whose update_time is older than cutoff_time.  Files may or may
+    # not be removed from disk.
+    dataset_count = 0
+    disk_space = 0
+    start = time.time()
+    if force_retry:
+        datasets = app.sa_session.query( app.model.Dataset ) \
+                                 .filter( and_( app.model.Dataset.table.c.deleted == true(),
+                                                app.model.Dataset.table.c.purgable == true(),
+                                                app.model.Dataset.table.c.update_time < cutoff_time ) )
+    else:
+        datasets = app.sa_session.query( app.model.Dataset ) \
+                                 .filter( and_( app.model.Dataset.table.c.deleted == true(),
+                                                app.model.Dataset.table.c.purgable == true(),
+                                                app.model.Dataset.table.c.purged == false(),
+                                                app.model.Dataset.table.c.update_time < cutoff_time ) )
+    for dataset in datasets:
+        file_size = dataset.file_size
+        _purge_dataset( app, dataset, remove_from_disk, info_only=info_only )
+        dataset_count += 1
+        try:
+            disk_space += file_size
+        except:
+            pass
+    stop = time.time()
+    print('Purged %d datasets' % dataset_count)
+    if remove_from_disk:
+        print('Freed disk space: ', disk_space)
+    print("Elapsed time: ", stop - start)
+    print("##########################################")
+
+
+def _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children=True, info_only=False, is_deletable=False ):
+    # A dataset_instance is either a HDA or an LDDA.  Purging a dataset instance marks the instance as deleted,
+    # and marks the associated dataset as deleted if it is not associated with another active DatsetInstance.
+    if not info_only:
+        print("Marking as deleted: %s id %d (for dataset id %d)" %
+              ( dataset_instance.__class__.__name__, dataset_instance.id, dataset_instance.dataset.id ))
+        dataset_instance.mark_deleted( include_children=include_children )
+        dataset_instance.clear_associated_files()
+        app.sa_session.add( dataset_instance )
+        app.sa_session.flush()
+        app.sa_session.refresh( dataset_instance.dataset )
+    else:
+        print("%s id %d (for dataset id %d) will be marked as deleted (without 'info_only' mode)" %
+              ( dataset_instance.__class__.__name__, dataset_instance.id, dataset_instance.dataset.id ))
+    if is_deletable or _dataset_is_deletable( dataset_instance.dataset ):
+        # Calling methods may have already checked _dataset_is_deletable, if so, is_deletable should be True
+        _delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only=info_only, is_deletable=is_deletable )
+    else:
+        if info_only:
+            print("Not deleting dataset ", dataset_instance.dataset.id, " (will be possibly deleted without 'info_only' mode)")
+        else:
+            print("Not deleting dataset %d (shared between multiple histories/libraries, at least one not deleted)" % dataset_instance.dataset.id)
+    # need to purge children here
+    if include_children:
+        for child in dataset_instance.children:
+            _purge_dataset_instance( child, app, remove_from_disk, include_children=include_children, info_only=info_only )
+
+
+def _dataset_is_deletable( dataset ):
+    # a dataset is deletable when it no longer has any non-deleted associations
+    return not bool( dataset.active_history_associations or dataset.active_library_associations )
+
+
+def _delete_dataset( dataset, app, remove_from_disk, info_only=False, is_deletable=False ):
+    # Marks a base dataset as deleted, hdas/lddas associated with dataset can no longer be undeleted.
+    # Metadata files attached to associated dataset Instances is removed now.
+    if not is_deletable and not _dataset_is_deletable( dataset ):
+        print("This Dataset (%i) is not deletable, associated Metadata Files will not be removed.\n" % ( dataset.id ))
+    else:
+        # Mark all associated MetadataFiles as deleted and purged and remove them from disk
+        metadata_files = []
+        # lets create a list of metadata files, then perform actions on them
+        for hda in dataset.history_associations:
+            for metadata_file in app.sa_session.query( app.model.MetadataFile ) \
+                                               .filter( app.model.MetadataFile.table.c.hda_id == hda.id ):
+                metadata_files.append( metadata_file )
+        for ldda in dataset.library_associations:
+            for metadata_file in app.sa_session.query( app.model.MetadataFile ) \
+                                               .filter( app.model.MetadataFile.table.c.lda_id == ldda.id ):
+                metadata_files.append( metadata_file )
+        for metadata_file in metadata_files:
+            op_description = "marked as deleted"
+            if remove_from_disk:
+                op_description = op_description + " and purged from disk"
+            if info_only:
+                print("The following metadata files attached to associations of Dataset '%s' will be %s (without 'info_only' mode):" % ( dataset.id, op_description ))
+            else:
+                print("The following metadata files attached to associations of Dataset '%s' have been %s:" % ( dataset.id, op_description ))
+                if remove_from_disk:
+                    try:
+                        print("Removing disk file ", metadata_file.file_name)
+                        os.unlink( metadata_file.file_name )
+                    except Exception as e:
+                        print("Error, exception: %s caught attempting to purge metadata file %s\n" % ( str( e ), metadata_file.file_name ))
+                    metadata_file.purged = True
+                    app.sa_session.add( metadata_file )
+                    app.sa_session.flush()
+                metadata_file.deleted = True
+                app.sa_session.add( metadata_file )
+                app.sa_session.flush()
+            print("%s" % metadata_file.file_name)
+        if not info_only:
+            print("Deleting dataset id", dataset.id)
+            dataset.deleted = True
+            app.sa_session.add( dataset )
+            app.sa_session.flush()
+        else:
+            print("Dataset %i will be deleted (without 'info_only' mode)" % ( dataset.id ))
+
+
+def _purge_dataset( app, dataset, remove_from_disk, info_only=False ):
+    if dataset.deleted:
+        try:
+            if dataset.purgable and _dataset_is_deletable( dataset ):
+                if not info_only:
+                    # Remove files from disk and update the database
+                    if remove_from_disk:
+                        # TODO: should permissions on the dataset be deleted here?
+                        print("Removing disk, file ", dataset.file_name)
+                        os.unlink( dataset.file_name )
+                        # Remove associated extra files from disk if they exist
+                        if dataset.extra_files_path and os.path.exists( dataset.extra_files_path ):
+                            shutil.rmtree( dataset.extra_files_path )  # we need to delete the directory and its contents; os.unlink would always fail on a directory
+                        usage_users = []
+                        for hda in dataset.history_associations:
+                            if not hda.purged:
+                                hda.purged = True
+                                if hda.history.user is not None and hda.history.user not in usage_users:
+                                    usage_users.append( hda.history.user )
+                        for user in usage_users:
+                            user.adjust_total_disk_usage(-dataset.get_total_size())
+                            app.sa_session.add( user )
+                    print("Purging dataset id", dataset.id)
+                    dataset.purged = True
+                    app.sa_session.add( dataset )
+                    app.sa_session.flush()
+                else:
+                    print("Dataset %i will be purged (without 'info_only' mode)" % (dataset.id))
+            else:
+                print("This dataset (%i) is not purgable, the file (%s) will not be removed.\n" % ( dataset.id, dataset.file_name ))
+        except OSError as exc:
+            print("Error, dataset file has already been removed: %s" % str( exc ))
+            print("Purging dataset id", dataset.id)
+            dataset.purged = True
+            app.sa_session.add( dataset )
+            app.sa_session.flush()
+        except ObjectNotFound:
+            print("Dataset %i cannot be found in the object store" % dataset.id)
+        except Exception as exc:
+            print("Error attempting to purge data file: ", dataset.file_name, " error: ", str( exc ))
+    else:
+        print("Error: '%s' has not previously been deleted, so it cannot be purged\n" % dataset.file_name)
+
+
+def _purge_folder( folder, app, remove_from_disk, info_only=False ):
+    """Purges a folder and its contents, recursively"""
+    for ld in folder.datasets:
+        print("Deleting library dataset id ", ld.id)
+        ld.deleted = True
+        for ldda in [ld.library_dataset_dataset_association] + ld.expired_datasets:
+            _purge_dataset_instance( ldda, app, remove_from_disk, info_only=info_only )  # mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
+    for sub_folder in folder.folders:
+        _purge_folder( sub_folder, app, remove_from_disk, info_only=info_only )
+    if not info_only:
+        # TODO: should the folder permissions be deleted here?
+        print("Purging folder id ", folder.id)
+        folder.purged = True
+        app.sa_session.add( folder )
+        app.sa_session.flush()
+
+
+class CleanupDatasetsApplication( object ):
+    """Encapsulates the state of a Universe application"""
+    def __init__( self, config ):
+        if config.database_connection is False:
+            config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % config.database
+        self.object_store = build_object_store_from_config( config )
+        # Setup the database engine and ORM
+        self.model = galaxy.model.mapping.init( config.file_path, config.database_connection, engine_options={}, create_tables=False, object_store=self.object_store )
+
+    @property
+    def sa_session( self ):
+        """
+        Returns a SQLAlchemy session -- currently just gets the current
+        session from the threadlocal session context, but this is provided
+        to allow migration toward a more SQLAlchemy 0.4 style of use.
+        """
+        return self.model.context.current
+
+    def shutdown( self ):
+        self.object_store.shutdown()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/delete_datasets.sh b/scripts/cleanup_datasets/delete_datasets.sh
new file mode 100644
index 0000000..7aaf360
--- /dev/null
+++ b/scripts/cleanup_datasets/delete_datasets.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -6 -r $@ >> ./scripts/cleanup_datasets/delete_datasets.log
diff --git a/scripts/cleanup_datasets/delete_userless_histories.sh b/scripts/cleanup_datasets/delete_userless_histories.sh
new file mode 100644
index 0000000..34ab1b6
--- /dev/null
+++ b/scripts/cleanup_datasets/delete_userless_histories.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log
diff --git a/scripts/cleanup_datasets/pgcleanup.py b/scripts/cleanup_datasets/pgcleanup.py
new file mode 100755
index 0000000..ee16591
--- /dev/null
+++ b/scripts/cleanup_datasets/pgcleanup.py
@@ -0,0 +1,781 @@
+#!/usr/bin/env python
+"""
+pgcleanup.py - A script for cleaning up datasets in Galaxy efficiently, by
+    bypassing the Galaxy model and operating directly on the database.
+    PostgreSQL 9.1 or greater is required.
+"""
+from __future__ import print_function
+
+import datetime
+import inspect
+import logging
+import os
+import shutil
+import sys
+from optparse import OptionParser
+
+from six.moves.configparser import ConfigParser
+
+galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+sys.path.insert(1, os.path.join(galaxy_root, 'lib'))
+
+import psycopg2
+from sqlalchemy.engine.url import make_url
+
+import galaxy.config
+from galaxy.exceptions import ObjectNotFound
+from galaxy.objectstore import build_object_store_from_config
+from galaxy.util.bunch import Bunch
+
+log = logging.getLogger()
+
+
+class MetadataFile(Bunch):
+    pass
+
+
+class Dataset(Bunch):
+    pass
+
+
+class Cleanup(object):
+    def __init__(self):
+        self.options = None
+        self.args = None
+        self.config = None
+        self.conn = None
+        self.action_names = []
+        self.logs = {}
+        self.disk_accounting_user_ids = []
+        self.object_store = None
+
+        self.__cache_action_names()
+        self.__parse_args()
+        self.__setup_logging()
+        self.__load_config()
+        self.__connect_db()
+        self.__load_object_store()
+
+    def __cache_action_names(self):
+        for name, value in inspect.getmembers(self):
+            if not name.startswith('_') and inspect.ismethod(value):
+                self.action_names.append(name)
+
+    def __parse_args(self):
+        default_config = os.path.abspath(os.path.join(galaxy_root, 'config', 'galaxy.ini'))
+
+        parser = OptionParser()
+        parser.add_option('-c', '--config', dest='config', help='Path to Galaxy config file (config/galaxy.ini)', default=default_config)
+        parser.add_option('-d', '--debug', action='store_true', dest='debug', help='Enable debug logging', default=False)
+        parser.add_option('--dry-run', action='store_true', dest='dry_run', help="Dry run (rollback all transactions)", default=False)
+        parser.add_option('--force-retry', action='store_true', dest='force_retry', help="Retry file removals (on applicable actions)", default=False)
+        parser.add_option('-o', '--older-than', type='int', dest='days', help='Only perform action(s) on objects that have not been updated since the specified number of days', default=14)
+        parser.add_option('-U', '--no-update-time', action='store_false', dest='update_time', help="Don't set update_time on updated objects", default=True)
+        parser.add_option('-s', '--sequence', dest='sequence', help='Comma-separated sequence of actions, chosen from: %s' % self.action_names, default='')
+        parser.add_option('-w', '--work-mem', dest='work_mem', help='Set PostgreSQL work_mem for this connection', default=None)
+        ( self.options, self.args ) = parser.parse_args()
+
+        self.options.sequence = [ x.strip() for x in self.options.sequence.split(',') ]
+
+        if self.options.sequence == ['']:
+            print("Error: At least one action must be specified in the action sequence\n")
+            parser.print_help()
+            sys.exit(0)
+
+    def __setup_logging(self):
+        format = "%(funcName)s %(levelname)s %(asctime)s %(message)s"
+        if self.options.debug:
+            logging.basicConfig(level=logging.DEBUG, format=format)
+        else:
+            logging.basicConfig(level=logging.INFO, format=format)
+
+    def __load_config(self):
+        log.info('Reading config from %s' % self.options.config)
+        config_parser = ConfigParser(dict(here=os.getcwd(),
+                                          database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE'))
+        config_parser.read(self.options.config)
+
+        config_dict = {}
+        for key, value in config_parser.items('app:main'):
+            config_dict[key] = value
+        config_dict['root_dir'] = galaxy_root
+
+        self.config = galaxy.config.Configuration(**config_dict)
+
+    def __connect_db(self):
+        url = make_url(self.config.database_connection)
+
+        log.info('Connecting to database with URL: %s' % url)
+        args = url.translate_connect_args( username='user' )
+        args.update(url.query)
+
+        assert url.get_dialect().name == 'postgresql', 'This script can only be used with PostgreSQL.'
+
+        self.conn = psycopg2.connect(**args)
+
+    def __load_object_store(self):
+        self.object_store = build_object_store_from_config(self.config)
+
+    def _open_logfile(self):
+        action_name = inspect.stack()[1][3]
+        logname = os.path.join(galaxy_root, 'scripts', 'cleanup_datasets', action_name + '.log')
+
+        if self.options.dry_run:
+            log.debug('--dry-run specified, logging changes to stdout instead of log file: %s' % logname)
+            self.logs[action_name] = sys.stdout
+        else:
+            log.debug('Opening log file: %s' % logname)
+            self.logs[action_name] = open(logname, 'a')
+
+        message = '==== Log opened: %s ' % datetime.datetime.now().isoformat()
+        self.logs[action_name].write(message.ljust(72, '='))
+        self.logs[action_name].write('\n')
+
+    def _log(self, message, action_name=None):
+        if action_name is None:
+            action_name = inspect.stack()[1][3]
+        if not message.endswith('\n'):
+            message += '\n'
+        self.logs[action_name].write(message)
+
+    def _close_logfile(self):
+        action_name = inspect.stack()[1][3]
+
+        message = '==== Log closed: %s ' % datetime.datetime.now().isoformat()
+        self.logs[action_name].write(message.ljust(72, '='))
+        self.logs[action_name].write('\n')
+
+        if self.options.dry_run:
+            log.debug('--dry-run specified, changes were logged to stdout insted of log file')
+        else:
+            log.debug('Closing log file: %s' % self.logs[action_name].name)
+            self.logs[action_name].close()
+
+        del self.logs[action_name]
+
+    def _run(self):
+        ok = True
+        for name in self.options.sequence:
+            if name not in self.action_names:
+                log.error('Unknown action in sequence: %s' % name)
+                ok = False
+        if not ok:
+            log.critical('Exiting due to previous error(s)')
+            sys.exit(1)
+        for name in self.options.sequence:
+            log.info('Calling %s' % name)
+            self.__getattribute__(name)()
+            log.info('Finished %s' % name)
+
+    def _create_event(self, message=None):
+        """
+        Create a new event in the cleanup_event table.
+        """
+
+        if message is None:
+            message = inspect.stack()[1][3]
+
+        sql = """
+            INSERT INTO cleanup_event
+                        (create_time, message)
+                 VALUES (NOW(), %s)
+              RETURNING id;
+        """
+
+        log.debug("SQL is: %s" % sql % ("'" + message + "'"))
+
+        args = (message,)
+
+        cur = self.conn.cursor()
+
+        if self.options.dry_run:
+            sql = "SELECT MAX(id) FROM cleanup_event;"
+            cur.execute(sql)
+            max_id = cur.fetchone()[0]
+            if max_id is None:
+                # there has to be at least one event in the table, if there are none just create a fake one.
+                sql = "INSERT INTO cleanup_event (create_time, message) VALUES (NOW(), 'dry_run_event') RETURNING id;"
+                cur.execute(sql)
+                max_id = cur.fetchone()[0]
+                self.conn.commit()
+                log.info("An event must exist for the subsequent query to succeed, so a dummy event has been created")
+            else:
+                log.info("Not executing event creation (increments sequence even when rolling back), using an old event ID (%i) for dry run" % max_id)
+            return max_id
+
+        log.info("Executing SQL")
+        cur.execute(sql, args)
+        log.info('Database status: %s' % cur.statusmessage)
+
+        return cur.fetchone()[0]
+
+    def _update(self, sql, args):
+        if args is not None:
+            log.debug('SQL is: %s' % sql % args)
+        else:
+            log.debug('SQL is: %s' % sql)
+
+        cur = self.conn.cursor()
+
+        if self.options.work_mem is not None:
+            log.info('Setting work_mem to %s' % self.options.work_mem)
+            cur.execute('SET work_mem TO %s', (self.options.work_mem,))
+
+        log.info('Executing SQL')
+        cur.execute(sql, args)
+        log.info('Database status: %s' % cur.statusmessage)
+
+        return cur
+
+    def _flush(self):
+        if self.options.dry_run:
+            self.conn.rollback()
+            log.info("--dry-run specified, all changes rolled back")
+        else:
+            self.conn.commit()
+            log.info("All changes committed")
+
+    def _remove_metadata_file(self, id, object_store_id, action_name):
+        metadata_file = MetadataFile(id=id, object_store_id=object_store_id)
+
+        try:
+            filename = self.object_store.get_filename(metadata_file, extra_dir='_metadata_files', extra_dir_at_root=True, alt_name="metadata_%d.dat" % id)
+            self._log('Removing from disk: %s' % filename, action_name)
+        except (ObjectNotFound, AttributeError) as e:
+            log.error('Unable to get MetadataFile %s filename: %s' % (id, e))
+            return
+
+        if not self.options.dry_run:
+            try:
+                os.unlink(filename)
+            except Exception as e:
+                self._log('Removal of %s failed with error: %s' % (filename, e), action_name)
+
+    def _update_user_disk_usage(self):
+        """
+        Any operation that purges a HistoryDatasetAssociation may require
+        updating a user's disk usage.  Rather than attempt to resolve dataset
+        copies at purge-time, simply maintain a list of users that have had
+        HDAs purged, and update their usages once all updates are complete.
+
+        This could probably be done more efficiently.
+        """
+        log.info('Recalculating disk usage for users whose HistoryDatasetAssociations were purged')
+
+        for user_id in self.disk_accounting_user_ids:
+
+            # TODO: h.purged = false should be unnecessary once all hdas in purged histories are purged.
+            sql = """
+                   UPDATE galaxy_user
+                      SET disk_usage = (SELECT COALESCE(SUM(total_size), 0)
+                                          FROM (  SELECT d.total_size
+                                                    FROM history_dataset_association hda
+                                                         JOIN history h ON h.id = hda.history_id
+                                                         JOIN dataset d ON hda.dataset_id = d.id
+                                                   WHERE h.user_id = %s
+                                                         AND h.purged = false
+                                                         AND hda.purged = false
+                                                         AND d.purged = false
+                                                         AND d.id NOT IN (SELECT dataset_id
+                                                                            FROM library_dataset_dataset_association)
+                                                GROUP BY d.id) sizes)
+                    WHERE id = %s
+                RETURNING disk_usage;
+            """
+
+            args = (user_id, user_id)
+            cur = self._update(sql, args)
+            self._flush()
+
+            for tup in cur:
+                # disk_usage might be None (e.g. user has purged all data)
+                log.debug('Updated disk usage for user id %i to %s bytes' % (user_id, tup[0]))
+
+    def _shutdown(self):
+        self.object_store.shutdown()
+        self.conn.close()
+        for handle in self.logs.values():
+            message = '==== Log closed at shutdown: %s ' % datetime.datetime.now().isoformat()
+            handle.write(message.ljust(72, '='))
+            handle.write('\n')
+            handle.close()
+
+    def update_hda_purged_flag(self):
+        """
+        The old cleanup script does not mark HistoryDatasetAssociations as purged when deleted Histories are purged.  This method can be used to rectify that situation.
+        """
+        log.info('Marking purged all HistoryDatasetAssociations associated with purged Datasets')
+
+        event_id = self._create_event()
+
+        # update_time is intentionally left unmodified.
+        sql = """
+                WITH purged_hda_ids
+                  AS (     UPDATE history_dataset_association
+                              SET purged = true
+                             FROM dataset
+                            WHERE history_dataset_association.dataset_id = dataset.id
+                                  AND dataset.purged
+                                  AND NOT history_dataset_association.purged
+                        RETURNING history_dataset_association.id),
+                     hda_events
+                  AS (INSERT INTO cleanup_event_hda_association
+                                  (create_time, cleanup_event_id, hda_id)
+                           SELECT NOW(), %s, id
+                             FROM purged_hda_ids)
+              SELECT id
+                FROM purged_hda_ids
+            ORDER BY id;
+        """
+
+        args = (event_id,)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked HistoryDatasetAssociation purged: %s' % tup[0])
+        self._close_logfile()
+
+    def delete_userless_histories(self):
+        """
+        Mark deleted all "anonymous" Histories (not owned by a registered user) that are older than the specified number of days.
+        """
+        log.info('Marking deleted all userless Histories older than %i days' % self.options.days)
+
+        event_id = self._create_event()
+
+        sql = """
+                WITH deleted_history_ids
+                  AS (     UPDATE history
+                              SET deleted = true%s
+                            WHERE user_id is null
+                                  AND NOT deleted
+                                  AND update_time < (NOW() - interval '%s days')
+                        RETURNING id),
+                     history_events
+                  AS (INSERT INTO cleanup_event_history_association
+                                  (create_time, cleanup_event_id, history_id)
+                           SELECT NOW(), %s, id
+                             FROM deleted_history_ids)
+              SELECT id
+                FROM deleted_history_ids
+            ORDER BY id;
+        """
+
+        update_time_sql = ''
+        if self.options.update_time:
+            update_time_sql = """,
+                                  update_time = NOW()"""
+
+        sql = sql % (update_time_sql, '%s', '%s')
+        args = (self.options.days, event_id)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked userless History deleted: %s' % tup[0])
+        self._close_logfile()
+
+    def purge_deleted_hdas(self):
+        """
+        Mark purged all HistoryDatasetAssociations currently marked deleted that are older than the specified number of days.
+        Mark deleted all MetadataFiles whose hda_id is purged in this step.
+        Mark deleted all ImplicitlyConvertedDatasetAssociations whose hda_parent_id is purged in this step.
+        Mark purged all HistoryDatasetAssociations for which an ImplicitlyConvertedDatasetAssociation with matching hda_id is deleted in this step.
+        """
+        log.info('Marking purged all deleted HistoryDatasetAssociations older than %i days' % self.options.days)
+
+        event_id = self._create_event()
+
+        sql = """
+              WITH purged_hda_ids
+                AS (     UPDATE history_dataset_association
+                            SET purged = true%s
+                          WHERE deleted%s
+                                AND update_time < (NOW() - interval '%s days')
+                      RETURNING id,
+                                history_id),
+                   deleted_metadata_file_ids
+                AS (     UPDATE metadata_file
+                            SET deleted = true%s
+                           FROM purged_hda_ids
+                          WHERE purged_hda_ids.id = metadata_file.hda_id
+                      RETURNING metadata_file.hda_id AS hda_id,
+                                metadata_file.id AS id,
+                                metadata_file.object_store_id AS object_store_id),
+                   deleted_icda_ids
+                AS (     UPDATE implicitly_converted_dataset_association
+                            SET deleted = true%s
+                           FROM purged_hda_ids
+                          WHERE purged_hda_ids.id = implicitly_converted_dataset_association.hda_parent_id
+                      RETURNING implicitly_converted_dataset_association.hda_id AS hda_id,
+                                implicitly_converted_dataset_association.hda_parent_id AS hda_parent_id,
+                                implicitly_converted_dataset_association.id AS id),
+                   deleted_icda_purged_child_hda_ids
+                AS (     UPDATE history_dataset_association
+                            SET purged = true%s
+                           FROM deleted_icda_ids
+                          WHERE deleted_icda_ids.hda_id = history_dataset_association.id),
+                   hda_events
+                AS (INSERT INTO cleanup_event_hda_association
+                                (create_time, cleanup_event_id, hda_id)
+                         SELECT NOW(), %s, id
+                           FROM purged_hda_ids),
+                   metadata_file_events
+                AS (INSERT INTO cleanup_event_metadata_file_association
+                                (create_time, cleanup_event_id, metadata_file_id)
+                         SELECT NOW(), %s, id
+                           FROM deleted_metadata_file_ids),
+                   icda_events
+                AS (INSERT INTO cleanup_event_icda_association
+                                (create_time, cleanup_event_id, icda_id)
+                         SELECT NOW(), %s, id
+                           FROM deleted_icda_ids),
+                   icda_hda_events
+                AS (INSERT INTO cleanup_event_hda_association
+                                (create_time, cleanup_event_id, hda_id)
+                         SELECT NOW(), %s, hda_id
+                           FROM deleted_icda_ids)
+            SELECT purged_hda_ids.id,
+                   history.user_id,
+                   deleted_metadata_file_ids.id,
+                   deleted_metadata_file_ids.object_store_id,
+                   deleted_icda_ids.id,
+                   deleted_icda_ids.hda_id
+              FROM purged_hda_ids
+                   LEFT OUTER JOIN deleted_metadata_file_ids
+                                   ON deleted_metadata_file_ids.hda_id = purged_hda_ids.id
+                   LEFT OUTER JOIN deleted_icda_ids
+                                   ON deleted_icda_ids.hda_parent_id = purged_hda_ids.id
+                   LEFT OUTER JOIN history
+                                   ON purged_hda_ids.history_id = history.id;
+        """
+
+        force_retry_sql = """
+                                AND NOT purged"""
+        update_time_sql = ""
+
+        if self.options.force_retry:
+            force_retry_sql = ""
+        else:
+            # only update time if not doing force retry (otherwise a lot of things would have their update times reset that were actually purged a long time ago)
+            if self.options.update_time:
+                update_time_sql = """,
+                              update_time = NOW()"""
+
+        sql = sql % (update_time_sql, force_retry_sql, '%s', update_time_sql, update_time_sql, update_time_sql, '%s', '%s', '%s', '%s')
+        args = (self.options.days, event_id, event_id, event_id, event_id)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked HistoryDatasetAssociations purged: %s' % tup[0])
+            if tup[1] is not None and tup[1] not in self.disk_accounting_user_ids:
+                self.disk_accounting_user_ids.append(int(tup[1]))
+            if tup[2] is not None:
+                self._log('Purge of HDA %s caused deletion of MetadataFile: %s in Object Store: %s' % (tup[0], tup[2], tup[3]))
+                self._remove_metadata_file(tup[2], tup[3], inspect.stack()[0][3])
+            if tup[4] is not None:
+                self._log('Purge of HDA %s caused deletion of ImplicitlyConvertedDatasetAssociation: %s and converted HistoryDatasetAssociation: %s' % (tup[0], tup[4], tup[5]))
+        self._close_logfile()
+
+    def purge_deleted_histories(self):
+        """
+        Mark purged all Histories marked deleted that are older than the specified number of days.
+        Mark purged all HistoryDatasetAssociations in Histories marked purged in this step (if not already purged).
+        """
+        log.info('Marking purged all deleted histories that are older than the specified number of days.')
+
+        event_id = self._create_event()
+
+        sql = """
+              WITH purged_history_ids
+                AS (     UPDATE history
+                            SET purged = true%s
+                          WHERE deleted%s
+                                AND update_time < (NOW() - interval '%s days')
+                      RETURNING id,
+                                user_id),
+                   purged_hda_ids
+                AS (     UPDATE history_dataset_association
+                            SET purged = true%s
+                           FROM purged_history_ids
+                          WHERE purged_history_ids.id = history_dataset_association.history_id
+                                AND NOT history_dataset_association.purged
+                      RETURNING history_dataset_association.history_id AS history_id,
+                                history_dataset_association.id AS id),
+                   deleted_metadata_file_ids
+                AS (     UPDATE metadata_file
+                            SET deleted = true%s
+                           FROM purged_hda_ids
+                          WHERE purged_hda_ids.id = metadata_file.hda_id
+                      RETURNING metadata_file.hda_id AS hda_id,
+                                metadata_file.id AS id,
+                                metadata_file.object_store_id AS object_store_id),
+                   deleted_icda_ids
+                AS (     UPDATE implicitly_converted_dataset_association
+                            SET deleted = true%s
+                           FROM purged_hda_ids
+                          WHERE purged_hda_ids.id = implicitly_converted_dataset_association.hda_parent_id
+                      RETURNING implicitly_converted_dataset_association.hda_id AS hda_id,
+                                implicitly_converted_dataset_association.hda_parent_id AS hda_parent_id,
+                                implicitly_converted_dataset_association.id AS id),
+                   deleted_icda_purged_child_hda_ids
+                AS (     UPDATE history_dataset_association
+                            SET purged = true%s
+                           FROM deleted_icda_ids
+                          WHERE deleted_icda_ids.hda_id = history_dataset_association.id),
+                   history_events
+                AS (INSERT INTO cleanup_event_history_association
+                                (create_time, cleanup_event_id, history_id)
+                         SELECT NOW(), %s, id
+                           FROM purged_history_ids),
+                   hda_events
+                AS (INSERT INTO cleanup_event_hda_association
+                                (create_time, cleanup_event_id, hda_id)
+                         SELECT NOW(), %s, id
+                           FROM purged_hda_ids),
+                   metadata_file_events
+                AS (INSERT INTO cleanup_event_metadata_file_association
+                                (create_time, cleanup_event_id, metadata_file_id)
+                         SELECT NOW(), %s, id
+                           FROM deleted_metadata_file_ids),
+                   icda_events
+                AS (INSERT INTO cleanup_event_icda_association
+                                (create_time, cleanup_event_id, icda_id)
+                         SELECT NOW(), %s, id
+                           FROM deleted_icda_ids),
+                   icda_hda_events
+                AS (INSERT INTO cleanup_event_hda_association
+                                (create_time, cleanup_event_id, hda_id)
+                         SELECT NOW(), %s, hda_id
+                           FROM deleted_icda_ids)
+            SELECT purged_history_ids.id,
+                   purged_history_ids.user_id,
+                   purged_hda_ids.id,
+                   deleted_metadata_file_ids.id,
+                   deleted_metadata_file_ids.object_store_id,
+                   deleted_icda_ids.id,
+                   deleted_icda_ids.hda_id
+              FROM purged_history_ids
+                   LEFT OUTER JOIN purged_hda_ids
+                                   ON purged_history_ids.id = purged_hda_ids.history_id
+                   LEFT OUTER JOIN deleted_metadata_file_ids
+                                   ON deleted_metadata_file_ids.hda_id = purged_hda_ids.id
+                   LEFT OUTER JOIN deleted_icda_ids
+                                   ON deleted_icda_ids.hda_parent_id = purged_hda_ids.id;
+        """
+
+        force_retry_sql = """
+                                AND NOT purged"""
+        update_time_sql = ""
+
+        if self.options.force_retry:
+            force_retry_sql = ""
+        else:
+            if self.options.update_time:
+                update_time_sql += """,
+                                update_time = NOW()"""
+
+        sql = sql % (update_time_sql, force_retry_sql, '%s', update_time_sql, update_time_sql, update_time_sql, update_time_sql, '%s', '%s', '%s', '%s', '%s')
+        args = (self.options.days, event_id, event_id, event_id, event_id, event_id)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked History purged: %s' % tup[0])
+            if tup[1] is not None and tup[1] not in self.disk_accounting_user_ids:
+                self.disk_accounting_user_ids.append(int(tup[1]))
+            if tup[2] is not None:
+                self._log('Purge of History %s caused deletion of HistoryDatasetAssociation: %s' % (tup[0], tup[2]))
+            if tup[3] is not None:
+                self._log('Purge of HDA %s caused deletion of MetadataFile: %s in Object Store: %s' % (tup[1], tup[3], tup[4]))
+                self._remove_metadata_file(tup[3], tup[4], inspect.stack()[0][3])
+            if tup[5] is not None:
+                self._log('Purge of HDA %s caused deletion of ImplicitlyConvertedDatasetAssociation: %s and converted HistoryDatasetAssociation: %s' % (tup[1], tup[5], tup[6]))
+        self._close_logfile()
+
+    def delete_exported_histories(self):
+        """
+        Mark deleted all Datasets that are derivative of JobExportHistoryArchives that are older than the specified number of days.
+        """
+        log.info('Marking deleted all Datasets that are derivative of JobExportHistoryArchives that are older than the specified number of days.')
+
+        event_id = self._create_event()
+
+        sql = """
+                WITH deleted_dataset_ids
+                  AS (     UPDATE dataset
+                              SET deleted = true%s
+                             FROM job_export_history_archive
+                            WHERE job_export_history_archive.dataset_id = dataset.id
+                                  AND NOT deleted
+                                  AND dataset.update_time <= (NOW() - interval '%s days')
+                        RETURNING dataset.id),
+                     dataset_events
+                  AS (INSERT INTO cleanup_event_dataset_association
+                                  (create_time, cleanup_event_id, dataset_id)
+                           SELECT NOW(), %s, id
+                             FROM deleted_dataset_ids)
+              SELECT id
+                FROM deleted_dataset_ids
+            ORDER BY id;
+        """
+
+        update_time_sql = ""
+        if self.options.update_time:
+            update_time_sql += """,
+                                  update_time = NOW()"""
+
+        sql = sql % (update_time_sql, '%s', '%s')
+        args = (self.options.days, event_id)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked Dataset deleted: %s' % tup[0])
+        self._close_logfile()
+
+    def delete_datasets(self):
+        """
+        Mark deleted all Datasets whose associations are all marked as deleted (LDDA) or purged (HDA) that are older than the specified number of days.
+        """
+        log.info('Marking deleted all Datasets whose associations are all marked as deleted/purged that are older than the specified number of days.')
+
+        event_id = self._create_event()
+
+        sql = """
+                WITH deleted_dataset_ids
+                  AS (     UPDATE dataset
+                              SET deleted = true%s
+                            WHERE NOT deleted
+                                  AND NOT EXISTS (SELECT true
+                                                    FROM library_dataset_dataset_association
+                                                   WHERE (NOT deleted
+                                                          OR update_time >= (NOW() - interval '%s days'))
+                                                         AND dataset.id = dataset_id)
+                                  AND NOT EXISTS (SELECT true
+                                                    FROM history_dataset_association
+                                                   WHERE (NOT purged
+                                                          OR update_time >= (NOW() - interval '%s days'))
+                                                         AND dataset.id = dataset_id)
+                        RETURNING id),
+                     dataset_events
+                  AS (INSERT INTO cleanup_event_dataset_association
+                                  (create_time, cleanup_event_id, dataset_id)
+                           SELECT NOW(), %s, id
+                             FROM deleted_dataset_ids)
+              SELECT id
+                FROM deleted_dataset_ids
+            ORDER BY id;
+        """
+
+        update_time_sql = ""
+        if self.options.update_time:
+            update_time_sql += """,
+                                  update_time = NOW()"""
+
+        sql = sql % (update_time_sql, '%s', '%s', '%s')
+        args = (self.options.days, self.options.days, event_id)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked Dataset deleted: %s' % tup[0])
+        self._close_logfile()
+
+    def purge_datasets(self):
+        """
+        Mark purged all Datasets marked deleted that are older than the specified number of days.
+        """
+        log.info('Marking purged all Datasets marked deleted that are older than the specified number of days.')
+
+        event_id = self._create_event()
+
+        sql = """
+                WITH purged_dataset_ids
+                  AS (     UPDATE dataset
+                              SET purged = true%s
+                            WHERE deleted%s
+                                  AND update_time < (NOW() - interval '%s days')
+                        RETURNING id,
+                                  object_store_id),
+                     dataset_events
+                  AS (INSERT INTO cleanup_event_dataset_association
+                                  (create_time, cleanup_event_id, dataset_id)
+                           SELECT NOW(), %s, id
+                             FROM purged_dataset_ids)
+              SELECT id,
+                     object_store_id
+                FROM purged_dataset_ids
+            ORDER BY id;
+        """
+
+        force_retry_sql = """
+                                  AND NOT purged"""
+        update_time_sql = ""
+
+        if self.options.force_retry:
+            force_retry_sql = ""
+        else:
+            if self.options.update_time:
+                update_time_sql = """,
+                                  update_time = NOW()"""
+
+        sql = sql % (update_time_sql, force_retry_sql, '%s', '%s')
+        args = (self.options.days, event_id)
+        cur = self._update(sql, args)
+        self._flush()
+
+        self._open_logfile()
+        for tup in cur:
+            self._log('Marked Dataset purged: %s in Object Store: %s' % (tup[0], tup[1]))
+
+            # always try to remove the "object store path" - if it's at an external_filename, that file will be untouched anyway (which is what we want)
+            dataset = Dataset(id=tup[0], object_store_id=tup[1])
+            try:
+                filename = self.object_store.get_filename(dataset)
+            except (ObjectNotFound, AttributeError) as e:
+                log.error('Unable to get Dataset %s filename: %s' % (tup[0], e))
+                continue
+
+            try:
+                extra_files_dir = self.object_store.get_filename(dataset, dir_only=True, extra_dir="dataset_%d_files" % tup[0])
+            except (ObjectNotFound, AttributeError):
+                extra_files_dir = None
+
+            # don't check for existence of the dataset, it should exist
+            self._log('Removing from disk: %s' % filename)
+            if not self.options.dry_run:
+                try:
+                    os.unlink(filename)
+                except Exception as e:
+                    self._log('Removal of %s failed with error: %s' % (filename, e))
+
+            # extra_files_dir is optional so it's checked first
+            if extra_files_dir is not None and os.path.exists(extra_files_dir):
+                self._log('Removing from disk: %s' % extra_files_dir)
+                if not self.options.dry_run:
+                    try:
+                        shutil.rmtree(extra_files_dir)
+                    except Exception as e:
+                        self._log('Removal of %s failed with error: %s' % (extra_files_dir, e))
+
+        self._close_logfile()
+
+
+if __name__ == '__main__':
+    cleanup = Cleanup()
+    try:
+        cleanup._run()
+        if cleanup.disk_accounting_user_ids:
+            cleanup._update_user_disk_usage()
+    except:
+        log.exception('Caught exception in run sequence:')
+    cleanup._shutdown()
diff --git a/scripts/cleanup_datasets/populate_uuid.py b/scripts/cleanup_datasets/populate_uuid.py
new file mode 100755
index 0000000..dd1e64e
--- /dev/null
+++ b/scripts/cleanup_datasets/populate_uuid.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+
+"""
+Populates blank uuid fields in datasets with randomly generated values
+
+Going forward, these ids will be generated for all new datasets. This
+script fixes datasets that were generated before the change.
+"""
+from __future__ import print_function
+import sys
+import uuid
+
+from galaxy.model import mapping
+from galaxy.model.orm.scripts import get_config
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def usage(prog):
+    print("usage: %s galaxy.ini" % prog)
+    print("""
+Populates blank uuid fields in datasets with randomly generated values.
+
+Going forward, these ids will be generated for all new datasets. This
+script fixes datasets that were generated before the change.
+    """)
+
+
+def main():
+    if len(sys.argv) != 2 or sys.argv == "-h" or sys.argv == "--help":
+        usage(sys.argv[0])
+        sys.exit()
+    ini_file = sys.argv.pop(1)
+    config = get_config(ini_file)
+
+    model = mapping.init( ini_file, config['db_url'], create_tables=False )
+
+    for row in model.context.query( model.Dataset ):
+        if row.uuid is None:
+            row.uuid = uuid.uuid4()
+            print("Setting dataset:", row.id, " UUID to ", row.uuid)
+    model.context.flush()
+
+    for row in model.context.query( model.Workflow ):
+        if row.uuid is None:
+            row.uuid = uuid.uuid4()
+            print("Setting Workflow:", row.id, " UUID to ", row.uuid)
+    model.context.flush()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/populate_uuid.sh b/scripts/cleanup_datasets/populate_uuid.sh
new file mode 100644
index 0000000..ac20284
--- /dev/null
+++ b/scripts/cleanup_datasets/populate_uuid.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+export PYTHONPATH=./lib/
+python ./scripts/cleanup_datasets/populate_uuid.py ./config/galaxy.ini $@
diff --git a/scripts/cleanup_datasets/purge_datasets.sh b/scripts/cleanup_datasets/purge_datasets.sh
new file mode 100644
index 0000000..63ae40c
--- /dev/null
+++ b/scripts/cleanup_datasets/purge_datasets.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log
diff --git a/scripts/cleanup_datasets/purge_folders.sh b/scripts/cleanup_datasets/purge_folders.sh
new file mode 100644
index 0000000..a3965ff
--- /dev/null
+++ b/scripts/cleanup_datasets/purge_folders.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log
diff --git a/scripts/cleanup_datasets/purge_histories.sh b/scripts/cleanup_datasets/purge_histories.sh
new file mode 100644
index 0000000..486ec60
--- /dev/null
+++ b/scripts/cleanup_datasets/purge_histories.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log
diff --git a/scripts/cleanup_datasets/purge_libraries.sh b/scripts/cleanup_datasets/purge_libraries.sh
new file mode 100644
index 0000000..ed676c4
--- /dev/null
+++ b/scripts/cleanup_datasets/purge_libraries.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/cleanup_datasets/cleanup_datasets.py ./config/galaxy.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log
diff --git a/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py b/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py
new file mode 100755
index 0000000..2967dfb
--- /dev/null
+++ b/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+"""
+Removes a dataset file ( which was first renamed by appending _purged to the file name ) from disk.
+Usage: python remove_renamed_datasets_from_disk.py renamed.log
+"""
+from __future__ import print_function
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def usage(prog):
+    print("usage: %s file" % prog)
+    print("""
+Removes a set of files from disk. The input file should contain a list of files
+to be deleted, one per line. The full path must be specified and must begin
+with /var/opt/galaxy.
+
+A log of files deleted is created in a file with the same name as that input but
+with .removed.log appended.
+    """)
+
+
+def main():
+    if len(sys.argv) != 2 or sys.argv == "-h" or sys.argv == "--help":
+        usage(sys.argv[0])
+        sys.exit()
+    infile = sys.argv[1]
+    outfile = infile + ".removed.log"
+    out = open( outfile, 'w' )
+
+    print("# The following renamed datasets have been removed from disk", file=out)
+    i = 0
+    removed_files = 0
+    for i, line in enumerate( open( infile ) ):
+        line = line.rstrip( '\r\n' )
+        if line and line.startswith( '/var/opt/galaxy' ):
+            try:
+                os.unlink( line )
+                print(line, file=out)
+                removed_files += 1
+            except Exception as exc:
+                print("# Error, exception " + str( exc ) + " caught attempting to remove " + line, file=out)
+    print("# Removed " + str( removed_files ) + " files", file=out)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/rename_purged_datasets.py b/scripts/cleanup_datasets/rename_purged_datasets.py
new file mode 100755
index 0000000..f9d888d
--- /dev/null
+++ b/scripts/cleanup_datasets/rename_purged_datasets.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+"""
+Renames a dataset file by appending _purged to the file name so that it can later be removed from disk.
+Usage: python rename_purged_datasets.py purge.log
+"""
+from __future__ import print_function
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def usage(prog):
+    print("usage: %s file" % prog)
+    print("""
+Marks a set of files as purged and renames them. The input file should contain a
+list of files to be purged, one per line. The full path must be specified and
+must begin with /var/opt/galaxy.
+A log of files marked as purged is created in a file with the same name as that
+input but with _purged appended. The resulting files can finally be removed from
+disk with remove_renamed_datasets_from_disk.py, by supplying it with a list of
+them.
+    """)
+
+
+def main():
+    if len(sys.argv) != 2 or sys.argv == "-h" or sys.argv == "--help":
+        usage(sys.argv[0])
+        sys.exit()
+    infile = sys.argv[1]
+    outfile = infile + ".renamed.log"
+    out = open( outfile, 'w' )
+
+    print("# The following renamed datasets can be removed from disk", file=out)
+    i = 0
+    renamed_files = 0
+    for i, line in enumerate( open( infile ) ):
+        line = line.rstrip( '\r\n' )
+        if line and line.startswith( '/var/opt/galaxy' ):
+            try:
+                purged_filename = line + "_purged"
+                os.rename( line, purged_filename )
+                print(purged_filename, file=out)
+                renamed_files += 1
+            except Exception as exc:
+                print("# Error, exception " + str( exc ) + " caught attempting to rename " + purged_filename, file=out)
+    print("# Renamed " + str( renamed_files ) + " files", file=out)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/update_dataset_size.py b/scripts/cleanup_datasets/update_dataset_size.py
new file mode 100755
index 0000000..15e4b5d
--- /dev/null
+++ b/scripts/cleanup_datasets/update_dataset_size.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+"""
+Updates dataset.size column.
+Remember to backup your database before running.
+"""
+from __future__ import print_function
+import os
+import sys
+
+import galaxy.app
+from six.moves import configparser
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def usage(prog):
+    print("usage: %s galaxy.ini" % prog)
+    print("""
+Updates the dataset.size column. Users are advised to backup the database before
+running.
+    """)
+
+
+def main():
+    if len(sys.argv) != 1 or sys.argv[1] == "-h" or sys.argv[1] == "--help":
+        usage(sys.argv[0])
+        sys.exit()
+    ini_file = sys.argv.pop(1)
+    conf_parser = configparser.ConfigParser( {'here': os.getcwd()} )
+    conf_parser.read( ini_file )
+    configuration = {}
+    for key, value in conf_parser.items( "app:main" ):
+        configuration[key] = value
+    app = galaxy.app.UniverseApplication( global_conf=ini_file, **configuration )
+
+    # Step through Datasets, determining size on disk for each.
+    print("Determining the size of each dataset...")
+    for row in app.model.Dataset.table.select().execute():
+        purged = app.model.Dataset.get( row.id ).purged
+        file_size = app.model.Dataset.get( row.id ).file_size
+        if file_size is None and not purged:
+            size_on_disk = app.model.Dataset.get( row.id ).get_size()
+            print("Updating Dataset.%d with file_size: %d" % ( row.id, size_on_disk ))
+            app.model.Dataset.table.update( app.model.Dataset.table.c.id == row.id ).execute( file_size=size_on_disk )
+    app.shutdown()
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/update_metadata.py b/scripts/cleanup_datasets/update_metadata.py
new file mode 100755
index 0000000..c275807
--- /dev/null
+++ b/scripts/cleanup_datasets/update_metadata.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+Updates metadata in the database to match rev 1891.
+
+Remember to backup your database before running.
+"""
+from __future__ import print_function
+import os
+import sys
+from six.moves import configparser
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+import galaxy.app
+import galaxy.datatypes.tabular
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def usage(prog):
+    print("usage: %s galaxy.ini" % prog)
+    print("""
+Updates the metadata in the database to match rev 1981.
+
+Remember to backup your database before running.
+    """)
+
+
+def main():
+    if len(sys.argv) != 2 or sys.argv[1] == "-h" or sys.argv[1] == "--help":
+        usage(sys.argv[0])
+        sys.exit()
+    ini_file = sys.argv.pop(1)
+    conf_parser = configparser.ConfigParser({'here': os.getcwd()})
+    conf_parser.read(ini_file)
+    configuration = {}
+    for key, value in conf_parser.items("app:main"):
+        configuration[key] = value
+    app = galaxy.app.UniverseApplication( global_conf=ini_file, **configuration )
+
+    # Search out tabular datatypes (and subclasses) and initialize metadata
+    print("Seeking out tabular based files and initializing metadata")
+    for row in app.model.Dataset.table.select().execute():
+        data = app.model.Dataset.get(row.id)
+        if issubclass(type(data.datatype), type(app.datatypes_registry.get_datatype_by_extension('tabular'))):
+            print(row.id, data.extension)
+            # Call meta_data for all tabular files
+            # special case interval type where we do not want to overwrite chr, start, end, etc assignments
+            if issubclass(type(data.datatype), type(app.datatypes_registry.get_datatype_by_extension('interval'))):
+                galaxy.datatypes.tabular.Tabular().set_meta(data)
+            else:
+                data.set_meta()
+            app.model.context.add( data )
+            app.model.context.flush()
+
+    app.shutdown()
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/cleanup_datasets/update_metadata.sh b/scripts/cleanup_datasets/update_metadata.sh
new file mode 100644
index 0000000..7933a0d
--- /dev/null
+++ b/scripts/cleanup_datasets/update_metadata.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+# This script must be executed from the $UNIVERSE_HOME directory
+# e.g., sh ./scripts/cleanup_datasets/update_metadata.sh
+
+. ./scripts/get_python.sh
+. ./setup_paths.sh
+
+$GALAXY_PYTHON ./scripts/cleanup_datasets/update_metadata.py ./config/galaxy.ini $@
diff --git a/scripts/common_startup.sh b/scripts/common_startup.sh
new file mode 100755
index 0000000..ab913cf
--- /dev/null
+++ b/scripts/common_startup.sh
@@ -0,0 +1,153 @@
+#!/bin/sh
+set -e
+
+SET_VENV=1
+for arg in "$@"; do
+    [ "$arg" = "--skip-venv" ] && SET_VENV=0
+done
+
+# Conda Python is in use, do not use virtualenv
+if python -V 2>&1 | grep -q -e 'Anaconda' -e 'Continuum Analytics' ; then
+    CONDA_ALREADY_INSTALLED=1
+else
+    CONDA_ALREADY_INSTALLED=0
+fi
+
+DEV_WHEELS=0
+FETCH_WHEELS=1
+CREATE_VENV=1
+REPLACE_PIP=$SET_VENV
+COPY_SAMPLE_FILES=1
+
+for arg in "$@"; do
+    [ "$arg" = "--skip-eggs" ] && FETCH_WHEELS=0
+    [ "$arg" = "--skip-wheels" ] && FETCH_WHEELS=0
+    [ "$arg" = "--dev-wheels" ] && DEV_WHEELS=1
+    [ "$arg" = "--no-create-venv" ] && CREATE_VENV=0
+    [ "$arg" = "--no-replace-pip" ] && REPLACE_PIP=0
+    [ "$arg" = "--replace-pip" ] && REPLACE_PIP=1
+    [ "$arg" = "--stop-daemon" ] && FETCH_WHEELS=0
+    [ "$arg" = "--skip-samples" ] && COPY_SAMPLE_FILES=0
+done
+
+SAMPLES="
+    config/migrated_tools_conf.xml.sample
+    config/shed_tool_conf.xml.sample
+    config/shed_tool_data_table_conf.xml.sample
+    config/shed_data_manager_conf.xml.sample
+    lib/tool_shed/scripts/bootstrap_tool_shed/user_info.xml.sample
+    tool-data/shared/ucsc/builds.txt.sample
+    tool-data/shared/ucsc/manual_builds.txt.sample
+    tool-data/shared/ucsc/ucsc_build_sites.txt.sample
+    tool-data/shared/igv/igv_build_sites.txt.sample
+    tool-data/shared/rviewer/rviewer_build_sites.txt.sample
+    static/welcome.html.sample
+"
+
+RMFILES="
+    lib/pkg_resources.pyc
+"
+
+if [ $COPY_SAMPLE_FILES -eq 1 ]; then
+	# Create any missing config/location files
+	for sample in $SAMPLES; do
+		file=${sample%.sample}
+	    if [ ! -f "$file" -a -f "$sample" ]; then
+	        echo "Initializing $file from $(basename "$sample")"
+	        cp "$sample" "$file"
+	    fi
+	done
+fi
+
+# remove problematic cached files
+for rmfile in $RMFILES; do
+    [ -f "$rmfile" ] && rm -f "$rmfile"
+done
+
+: ${GALAXY_CONFIG_FILE:=config/galaxy.ini}
+if [ ! -f "$GALAXY_CONFIG_FILE" ]; then
+    GALAXY_CONFIG_FILE=universe_wsgi.ini
+fi
+if [ ! -f "$GALAXY_CONFIG_FILE" ]; then
+    GALAXY_CONFIG_FILE=config/galaxy.ini.sample
+fi
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+if [ $SET_VENV -eq 1 -a $CREATE_VENV -eq 1 ]; then
+    if [ ! -d "$GALAXY_VIRTUAL_ENV" ]
+    then
+        if [ $CONDA_ALREADY_INSTALLED -eq 1 ]; then
+            echo "There is no existing Galaxy virtualenv and Conda is available, so we are skipping virtualenv creation.  Please be aware that this may cause missing dependencies."
+            SET_VENV=0
+        else
+            # If .venv does not exist, and there is no conda available, attempt to create it.
+            # Ensure Python is a supported version before creating .venv
+            python ./scripts/check_python.py || exit 1
+            if command -v virtualenv >/dev/null; then
+                virtualenv -p python2.7 "$GALAXY_VIRTUAL_ENV"
+            else
+                vvers=13.1.2
+                vurl="https://pypi.python.org/packages/source/v/virtualenv/virtualenv-${vvers}.tar.gz"
+                vsha="aabc8ef18cddbd8a2a9c7f92bc43e2fea54b1147330d65db920ef3ce9812e3dc"
+                vtmp=$(mktemp -d -t galaxy-virtualenv-XXXXXX)
+                vsrc="$vtmp/$(basename $vurl)"
+                # SSL certificates are not checked to prevent problems with messed
+                # up client cert environments. We verify the download using a known
+                # good sha256 sum instead.
+                echo "Fetching $vurl"
+                if command -v curl >/dev/null; then
+                    curl --insecure -L -o "$vsrc" "$vurl"
+                elif command -v wget >/dev/null; then
+                    wget --no-check-certificate -O "$vsrc" "$vurl"
+                else
+                    python -c "import urllib; urllib.urlretrieve('$vurl', '$vsrc')"
+                fi
+                echo "Verifying $vsrc checksum is $vsha"
+                python -c "import hashlib; assert hashlib.sha256(open('$vsrc', 'rb').read()).hexdigest() == '$vsha', '$vsrc: invalid checksum'"
+                tar zxf "$vsrc" -C "$vtmp"
+                python "$vtmp/virtualenv-$vvers/virtualenv.py" "$GALAXY_VIRTUAL_ENV"
+                rm -rf "$vtmp"
+            fi
+        fi
+    fi
+fi
+
+if [ $SET_VENV -eq 1 ]; then
+    # If there is a .venv/ directory, assume it contains a virtualenv that we
+    # should run this instance in.
+    if [ -d "$GALAXY_VIRTUAL_ENV" ];
+    then
+        echo "Activating virtualenv at $GALAXY_VIRTUAL_ENV"
+        . "$GALAXY_VIRTUAL_ENV/bin/activate"
+        # Because it's a virtualenv, we assume $PYTHONPATH is unnecessary for
+        # anything in the venv to work correctly, and having it set can cause
+        # problems when there are conflicts with Galaxy's dependencies outside
+        # the venv (e.g. virtualenv-burrito's pip and six).
+        #
+        # If you are skipping the venv setup we shall assume you know what
+        # you're doing and will deal with any conflicts.
+        unset PYTHONPATH
+    fi
+
+    if [ -z "$VIRTUAL_ENV" ]; then
+        echo "ERROR: A virtualenv cannot be found. Please create a virtualenv in $GALAXY_VIRTUAL_ENV, or activate one."
+        exit 1
+    fi
+fi
+
+: ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
+if [ $REPLACE_PIP -eq 1 ]; then
+    pip install 'pip>=8.1'
+fi
+
+if [ $FETCH_WHEELS -eq 1 ]; then
+    pip install -r requirements.txt --index-url "${GALAXY_WHEELS_INDEX_URL}"
+    GALAXY_CONDITIONAL_DEPENDENCIES=$(PYTHONPATH=lib python -c "import galaxy.dependencies; print '\n'.join(galaxy.dependencies.optional('$GALAXY_CONFIG_FILE'))")
+    [ -z "$GALAXY_CONDITIONAL_DEPENDENCIES" ] || echo "$GALAXY_CONDITIONAL_DEPENDENCIES" | pip install -r /dev/stdin --index-url "${GALAXY_WHEELS_INDEX_URL}"
+fi
+
+if [ $FETCH_WHEELS -eq 1 -a $DEV_WHEELS -eq 1 ]; then
+    dev_requirements='./lib/galaxy/dependencies/dev-requirements.txt'
+    [ -f $dev_requirements ] && pip install -r $dev_requirements --index-url "${GALAXY_WHEELS_INDEX_URL}"
+fi
diff --git a/scripts/communication/communication_server.py b/scripts/communication/communication_server.py
new file mode 100755
index 0000000..53b394e
--- /dev/null
+++ b/scripts/communication/communication_server.py
@@ -0,0 +1,212 @@
+#!/usr/bin/env python
+
+"""
+Server for realtime communication between Galaxy uers.
+
+At first you need to install a few requirements.
+
+. GALAXY_ROOT/.venv/bin/activate                        # activate Galaxy's virtualenv
+pip install flask flask-login flask-socketio eventlet   # install the requirements
+
+As a next step start the communication server with something like this:
+
+./scripts/communication/communication_server.py --port 7070 --host localhost
+
+Please make sure the host and the port matches the ones in ./config/galaxy.ini and
+to set the `secret_id`.
+
+This communication server can be controlled on three different levels:
+  1. The admin can activate/deactivate the communication server via ./config/galaxy.ini. [off by default]
+  2. The user can activate/deactivate it in their own personal-settings via Galaxy user preferences. [off by default]
+  3. The user can activate/deactivate communications for a session directly in the communication window. [on by default]
+
+"""
+
+import argparse
+import os
+import sys
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+import hashlib
+
+import logging
+logging.basicConfig()
+log = logging.getLogger(__name__)
+
+from flask import Flask, request, make_response, current_app, send_file
+from flask_socketio import SocketIO, emit, disconnect, join_room, leave_room
+import flask.ext.login as flask_login
+from flask.ext.login import current_user
+from datetime import timedelta
+from functools import update_wrapper
+
+from galaxy.model.orm.scripts import get_config
+from galaxy.model import mapping
+from galaxy.util.properties import load_app_properties
+from galaxy.web.security import SecurityHelper
+from galaxy.util.sanitize_html import sanitize_html
+
+# Get config file and load up SA session
+config = get_config( sys.argv )
+model = mapping.init( '/tmp/', config['db_url'] )
+sa_session = model.context.current
+
+# With the config file we can load the full app properties
+app_properties = load_app_properties(ini_file=config['config_file'])
+
+# We need the ID secret for configuring the security helper to decrypt
+# galaxysession cookies.
+if "id_secret" not in app_properties:
+    log.warn('No ID_SECRET specified. Please set the "id_secret" in your galaxy.ini.')
+
+id_secret = app_properties.get('id_secret', 'dangerous_default')
+
+security_helper = SecurityHelper(id_secret=id_secret)
+# And get access to the models
+# Login manager to manage current_user functionality
+login_manager = flask_login.LoginManager()
+
+app = Flask(__name__)
+app.config['SECRET_KEY'] = id_secret
+login_manager.init_app(app)
+socketio = SocketIO(app)
+
+
+ at login_manager.request_loader
+def findUserByCookie(request):
+    cookie_value = request.cookies.get('galaxysession')
+    if not cookie_value:
+        return None
+
+    session_key = security_helper.decode_guid(cookie_value)
+    user_session = sa_session.query(model.GalaxySession).filter_by(session_key=session_key).first()
+
+    if user_session:
+        return user_session.user
+
+    return None
+
+
+# Taken from flask.pocoo.org/snippets/56/
+def crossdomain(origin=None, methods=None, headers=None,
+                max_age=21600, attach_to_all=True,
+                automatic_options=True):
+    if methods is not None:
+        methods = ', '.join(sorted(x.upper() for x in methods))
+    if headers is not None and not isinstance(headers, basestring):
+        headers = ', '.join(x.upper() for x in headers)
+    if not isinstance(origin, basestring):
+        origin = ', '.join(origin)
+    if isinstance(max_age, timedelta):
+        max_age = max_age.total_seconds()
+
+    def get_methods():
+        if methods is not None:
+            return methods
+
+        options_resp = current_app.make_default_options_response()
+        return options_resp.headers['allow']
+
+    def decorator(f):
+        def wrapped_function(*args, **kwargs):
+            if automatic_options and request.method == 'OPTIONS':
+                resp = current_app.make_default_options_response()
+            else:
+                resp = make_response(f(*args, **kwargs))
+            if not attach_to_all and request.method != 'OPTIONS':
+                return resp
+
+            h = resp.headers
+
+            h['Access-Control-Allow-Origin'] = origin
+            h['Access-Control-Allow-Methods'] = get_methods()
+            h['Access-Control-Max-Age'] = str(max_age)
+            if headers is not None:
+                h['Access-Control-Allow-Headers'] = headers
+            return resp
+
+        f.provide_automatic_options = False
+        return update_wrapper(wrapped_function, f)
+    return decorator
+
+
+script_dir = os.path.dirname(os.path.realpath( __file__))
+communication_directory = os.path.join( script_dir, 'template' )
+
+
+ at app.route('/')
+ at crossdomain(origin='*')
+def index():
+    return send_file(os.path.join(communication_directory, 'communication.html'))
+
+
+ at app.route('/communication.js')
+def static_script():
+    return send_file(os.path.join(communication_directory, 'communication.js'))
+
+
+ at app.route('/communication.css')
+def static_style():
+    return send_file(os.path.join(communication_directory, 'communication.css'))
+
+
+ at socketio.on('event connect', namespace='/chat')
+def event_connect(message):
+    log.info("%s connected" % (current_user.username,))
+
+
+ at socketio.on('event broadcast', namespace='/chat')
+def event_broadcast(message):
+    message = sanitize_html(message['data'])
+
+    log.debug("%s broadcast '%s'" % (current_user.username, message))
+
+    emit('event response',
+        {'data': message, 'user': current_user.username, 'gravatar': hashlib.md5(current_user.email).hexdigest()}, broadcast=True)
+
+
+ at socketio.on('event room', namespace='/chat')
+def send_room_message(message):
+    data = sanitize_html(message['data'])
+    room = sanitize_html(message['room'])
+
+    log.debug("%s sent '%s' to %s" % (current_user.username, message, room))
+
+    emit('event response room',
+        {'data': data, 'user': current_user.username, 'gravatar': hashlib.md5(current_user.email).hexdigest(), 'chatroom': room}, room=room)
+
+
+ at socketio.on('event disconnect', namespace='/chat')
+def event_disconnect(message):
+    log.info("%s disconnected" % current_user.username)
+    disconnect()
+
+
+ at socketio.on('join', namespace='/chat')
+def join(message):
+    room = sanitize_html(message['room'])
+
+    log.debug("%s joined %s" % (current_user.username, room))
+    join_room(room)
+
+    emit('event response room',
+        {'data': room, 'userjoin': current_user.username}, broadcast=True)
+
+
+ at socketio.on('leave', namespace='/chat')
+def leave(message):
+    room = sanitize_html(message['room'])
+
+    log.debug("%s left %s" % (current_user.username, room))
+    leave_room(room)
+
+    emit('event response room',
+        {'data': room, 'userleave': current_user.username}, broadcast=True)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Real-time communication server for Galaxy.')
+    parser.add_argument('--port', type=int, default="7070", help='Port number on which the server should run.')
+    parser.add_argument('--host', default='localhost', help='Hostname of the communication server.')
+
+    args = parser.parse_args()
+    socketio.run(app, host=args.host, port=args.port)
diff --git a/scripts/communication/template/communication.css b/scripts/communication/template/communication.css
new file mode 100644
index 0000000..22d78d6
--- /dev/null
+++ b/scripts/communication/template/communication.css
@@ -0,0 +1,180 @@
+html, body {
+    height: 100%;
+    font-size: 12px;
+}
+
+body {
+    overflow: hidden;
+}
+
+.body-container {
+    opacity: 0;
+    transition: opacity 1s;
+}
+
+.body-container-loaded {
+    opacity: 1;
+}
+
+/* Styles for message text box */
+.message-textarea {
+    height: 50px !important;
+    width: 99.5%;
+    padding: 6px 3px;
+    font-size: inherit;
+}
+
+/* Styles for top right icons */
+.right_icons {
+    margin-left: 95%;
+}
+
+.chat-room-textbx {
+    width: 34%;
+    height: 10%;
+    padding: 6px 3px;
+    font-size: inherit;
+}
+
+ul, li,
+.anchor {
+    cursor: pointer;
+    color: black;
+}
+
+.nav>li>a:hover, .nav>li>a:focus {
+    background-color: #F7F8FA; 
+}
+
+.messages {
+    overflow-y: auto;
+    height: 100%;
+    overflow-x: hidden;
+    /*background-color: #F7F8FA;*/
+}
+
+.send_message {
+    margin-top: 5px;
+    margin-left: 2px;
+}
+
+.user_message {
+    background-color: #F7F8FA;
+    margin: 2px 0px 4px 2px;
+    padding: 2px;
+    box-shadow: 0px 1px 1px rgba(0, 0, 0, 0.5);
+    border-radius: 2px;
+}
+
+.user_name {
+    font-weight: 900;
+    padding-left: 0px;
+}
+
+.date_time {
+    font-style: italic;
+    padding-right: 0px;
+    font-size: 11px;
+}
+
+.message {
+    padding-top: 2px;
+}
+
+.tab-content {
+    height: 64%;
+}
+
+.tab-pane {
+    height: 100%;
+    padding: 5px 20px 5px 5px;
+    
+}
+
+#chat_room_tab {
+    margin-left: 2%;
+}
+
+.row {
+    /* Override the margins, otherwise goes outside of bounds */
+    margin-right: 0px;
+    margin-left: 0px;
+}
+
+.nav li a {
+    padding: 7px 7px;
+}
+
+ul > li i {
+    padding-left: 4px;
+}
+
+.join-room {
+    margin-left: -2.3%;
+}
+
+.global-rooms {
+    overflow-y: auto;
+    height: 200px;
+    margin-top: 1%;
+}
+
+.global-room {
+    margin: 0px 2px 2px 0px;
+    padding: 3px 6px;
+    font-size: inherit; 
+}
+
+a:hover, a:visited {
+    text-decoration: none;
+}
+
+/****styles for overflow tabs****/
+.wrapper {
+    position: relative;
+    margin: 0 auto;
+    overflow: hidden;
+    height: 45px;
+}
+
+.list {
+    position: absolute;
+    left: 0px;
+    top: 0px;
+    min-width: 3000px;
+    margin-top: 0px;
+}
+
+.list li {
+    display: table-cell;
+    position: relative;
+    text-align: center;
+    color: #efefef;
+    vertical-align: middle;
+}
+
+.scroller {
+    text-align: center;
+    cursor: pointer;
+    display: none;
+    padding: 5px;
+    padding-top: 9px;
+    vertical-align: middle;
+    background-color: #fff;
+}
+
+.scroller-right {
+    float: right;
+}
+
+.scroller-left {
+    float: left;
+}
+
+.message-height, 
+.notification-padding {
+    padding-right: 0px;
+}
+
+
+
diff --git a/scripts/communication/template/communication.html b/scripts/communication/template/communication.html
new file mode 100644
index 0000000..1a65271
--- /dev/null
+++ b/scripts/communication/template/communication.html
@@ -0,0 +1,58 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+    <title>Chat</title>
+    <link rel="stylesheet" type="text/css" href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css">
+    <link rel="stylesheet" type="text/css" href="//cdn.jsdelivr.net/jquery.mcustomscrollbar/3.0.6/jquery.mCustomScrollbar.min.css">
+    <link rel="stylesheet" type="text/css" href="communication.css">
+</head>
+<body class="body-container">
+<div class="right_icons">
+    <i id="online_status" class="anchor fa fa-comments" aria-hidden="true" title=""></i>
+    <i id="chat_history" class="anchor fa fa-history" aria-hidden="true" title="Show chat history"></i>
+</div>
+
+<div class="scroller scroller-left"><i class="fa fa-chevron-left" aria-hidden="true" title="Move left"></i></div>
+<div class="scroller scroller-right"><i class="fa fa-chevron-right" aria-hidden="true" title="Move right"></i></div>
+
+<div class="wrapper">
+    <ul class="nav nav-tabs list" id="chat_tabs">
+        <li class="active">
+            <a data-target="#all_chat_tab" data-toggle="tab" aria-expanded='true' title="All Chats">All Chats</a>
+        </li>
+        <li class="create-room">
+            <a data-target="#chat_room_tab" data-toggle="tab" aria-expanded='false' title="Chat Room">Chat Room</a>
+        </li>
+    </ul>
+</div>
+<div class="tab-content">
+    <div class="tab-pane active fade in" id="all_chat_tab">
+        <div id="all_messages" class="row mCustomScrollbar">
+        </div>
+    </div>
+    <div class="tab-pane fade" id="chat_room_tab">
+        <div id="join_room" class="join-room">
+            <input id="txtbox_chat_room" type="text" class="form-control chat-room-textbx" value=""
+                   placeholder="Type a room name to join...">
+            <br>
+            <div class="persistent-rooms-visibility">
+                <div class="room-text">Or click any available room to join</div>
+                <div id="global_rooms" class="global-rooms"></div>
+            </div>
+        </div>
+    </div>
+</div>
+<div class="send_message">
+    <textarea id="send_data" class="form-control clearable message-textarea" placeholder="Type your message...">
+    </textarea>
+</div>
+
+<script type="text/javascript" src="https://code.jquery.com/jquery-1.10.2.js"></script>
+<script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
+<script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/socket.io/1.3.5/socket.io.min.js"></script>
+<script type="text/javascript" src="http://malihu.github.io/custom-scrollbar/jquery.mCustomScrollbar.concat.min.js"></script>
+<script src="https://use.fontawesome.com/89a733ecb7.js"></script>
+<script type="text/javascript" src="communication.js"></script>
+
+</body>
+</html>
diff --git a/scripts/communication/template/communication.js b/scripts/communication/template/communication.js
new file mode 100644
index 0000000..4f23f73
--- /dev/null
+++ b/scripts/communication/template/communication.js
@@ -0,0 +1,759 @@
+// the namespace chat events connect to
+var namespace = '/chat',
+    socket = io.connect(window.location.protocol + '//' + document.domain + ':' + location.port + namespace);
+
+// socketio events
+var events_module = {
+    // event handler for sent data from server
+    event_response: function (socket) {
+        socket.on('event response', function ( msg ) {
+            var message = "",
+                uid = utils.get_userid(),
+                $el_all_messages = $( '#all_messages' ),
+                $el_tab_li = $( "a[data-target='#all_chat_tab']" );
+
+            // builds the message to be displayed
+            message = utils.build_message( msg );
+            
+            // append only for non empty messages
+            if (msg.data.length > 0) {
+                utils.append_message( $el_all_messages, message );
+                utils.vertical_center_align_gravatar( $( '#all_messages .message' ) );
+                // adding message to build full chat history
+                utils.store_append_message( uid, $el_all_messages.html() );
+            }
+            // updates the user session storage with all the messages
+            sessionStorage[uid] = $el_all_messages.html();
+            // show the last item by scrolling to the end
+            utils.fancyscroll_to_last( $("#all_chat_tab") );
+            // Alert user if needed
+            if ( uid !== msg.user ) {
+                utils.show_notification( $el_tab_li );
+            }
+        });
+    },
+    // event handler for room messages
+    event_response_room: function (socket) {
+        socket.on('event response room', function ( msg ) {
+            var $el_all_messages = $( '#all_messages' ),
+                    message = {},
+                    uid = utils.get_userid(),
+                    tab_counter = 0,
+                    $el_tab_li = null,
+                    server_text_name = 'Notification';
+            // response when user joins
+            if (msg.userjoin) {
+                message = {
+                    data: msg.userjoin + " has joined " + msg.data,
+                    user: server_text_name,
+                };
+                utils.append_message( $el_all_messages, utils.build_message( message ) );
+                utils.fancyscroll_to_last( $("#all_chat_tab") );
+                // shows notification when message is from other user
+                if ( uid !== msg.userjoin ) {
+                    $el_tab_li = $( "a[data-target='#all_chat_tab']" );
+                    utils.show_notification( $el_tab_li );
+                }
+            } // response when user leaves
+            else if ( msg.userleave ) {
+                message = {
+                    data: msg.userleave + " has left " + msg.data,
+                    user: server_text_name,
+                };
+                utils.append_message( $el_all_messages, utils.build_message( message ) );
+                utils.fancyscroll_to_last( $("#all_chat_tab") );
+                // shows notification when message is from other user
+                if ( uid !== msg.userleave ) {
+                    $el_tab_li = $( "a[data-target='#all_chat_tab']" );
+                    utils.show_notification( $el_tab_li );
+                }
+            }
+            else { // normal message sharing when connected
+                var room = utils.check_room_by_roomname( click_events.connected_room, msg.chatroom );
+                $el_room_msg = $( '#all_messages_' + room.id );
+                utils.append_message( $el_room_msg, utils.build_message( msg ) );
+                utils.vertical_center_align_gravatar( $( '#all_messages_' + room.id + ' .message' ) );
+                utils.fancyscroll_to_last( $( "#galaxy_tabroom_" + room.id ) );
+                // if the pushed message is for some other user, show notification
+                if (uid !== msg.data) {
+                    $el_tab_li = $( "a[data-target='#galaxy_tabroom_" + room.id + "'" + " ]" );
+                    utils.show_notification( $el_tab_li );
+                }
+            }
+        });
+    },
+    // event handler for new connections
+    event_connect: function ( socket ) {
+        socket.on('connect', function () {
+            var send_data = {};
+            send_data.data = 'connected';
+            socket.emit( 'event connect', send_data );
+        });
+    }
+}
+// all the click events of buttons
+var click_events = {
+    // on form load, user is connected, so the value is true
+    is_connected: true,
+    active_tab: "#all_chat_tab",
+    connected_room: [],
+    tab_counter : 0,
+    broadcast_data: function ( socket ) {
+        $('#send_data').keydown(function ( e ) {
+            var $el_active_li = $( '.nav-tabs>li.active' ),
+                $el_send_data = $( '#send_data' ),
+                message = "";
+            message = $el_send_data.val();
+            message = message.trim(); // removes whitespaces
+            // return false if entered is pressed without any message
+            if( message.length == 0 && ( e.keyCode == 13 || e.which == 13 ) ) {
+                return false;
+            }
+            if( click_events.is_connected ) {
+            var send_data = {},
+                event_name = "";
+                if ( e.keyCode == 13 || e.which == 13 ) { // if enter is pressed
+                    // if the tab is all chats
+                    if ( click_events.active_tab === '#all_chat_tab' ) {
+                        send_data.data = message;
+                        event_name = 'event broadcast';
+                    }
+                    else { // if the tab belongs to a room
+                        send_data.data = message;
+                        send_data.room = $el_active_li.children().attr( "title" );
+                        event_name = 'event room';
+                    }
+                    socket.emit( event_name, send_data );
+                    $el_send_data.val('');
+                    return false;
+                }
+            }
+        });
+    },
+    // sets the current active tab
+    active_element: function () {
+        $('.nav-tabs>li').click(function ( e ) {
+            var tab_content_id = null,
+                message_area_div_id = "",
+                message_area_id = "",
+                $el_create_textbox = $('#txtbox_chat_room');
+            if( e.target.attributes['data-target'] ) {
+                // sets the active tab
+                click_events.active_tab = e.target.attributes['data-target'].nodeValue;
+                // removes the background color
+                $( this ).children().css( 'background-color', '' );
+                // hides the message textarea for create room tab
+                utils.show_hide_textarea( $( this ).children() );
+                // finds the tab content and scrolls to last
+                message_area_div_id = $( this ).find('a').attr( 'data-target' );
+                if( message_area_div_id ) { // if it has message area
+                    utils.fancyscroll_to_last( $( message_area_div_id ) );
+                }
+            }
+        });
+    },
+    create_room: function() {
+       $(".create-room").click(function( e ) {
+           // create global chat rooom links and
+           // registers their click events
+           $( ".global-rooms" ).html( "" );
+           utils.create_global_chatroom_links();
+           click_events.open_global_chat_room();
+       });
+    },
+    // event for connected and disconneted states
+    // toggles the connection status icon
+    connect_disconnect: function ( socket ) {
+        $('#online_status').click(function () {
+            var $el_online_status = $( '#online_status' ),
+                    $el_input_text = $( '#send_data' ),
+                    send_data = {},
+                    connected_message = 'Type your message...',
+                    uid = utils.get_userid();
+            if ( click_events.is_connected ) { // if connected, disconnect
+                click_events.make_disconnect( uid, $el_input_text, $el_online_status );
+            }
+            else { // if disconnected, reconnect
+                socket.connect();
+                click_events.is_connected = true;
+                sessionStorage['connected'] = true;
+                utils.update_online_status( $el_online_status, click_events.is_connected );
+                $el_input_text.prop( 'disabled', false );
+                $el_input_text.val('');
+                $el_input_text.prop( 'placeholder', connected_message );
+            }
+        });
+    },
+    // shows full chat history
+    show_chat_history: function () {
+        $('#chat_history').click(function ( e ) {
+            utils.fill_messages( localStorage[ utils.get_userid() ] );
+        });
+    },
+    // makes disconnect and raises disconnect event at the server
+    make_disconnect: function ( uid, $el_input_text, $el_online_status ) {
+        var send_data = {}
+        disconnected_message = 'You are now disconnected. To send/receive messages, please connect';
+        click_events.is_connected = false;
+        socket.emit( 'event disconnect', send_data );
+        sessionStorage.removeItem( uid );
+        sessionStorage[ 'connected' ] = false;
+        utils.update_online_status( $el_online_status, click_events.is_connected );
+        $el_input_text.val( '' );
+        $el_input_text.prop( 'placeholder', disconnected_message );
+        $el_input_text.prop( 'disabled', true );
+    },
+    // for creating/joining user created chat room
+    create_chat_room: function () {
+        var $el_txtbox_chat_room = $( '#txtbox_chat_room' ),
+            $el_chat_room_tab = $( '#chat_room_tab' ),
+            $el_chat_tabs = $( '#chat_tabs' ),
+            $el_tab_content = $( '.tab-content' ),
+            $el_msg_box = $( '#send_data' );
+        $el_txtbox_chat_room.keydown(function ( e ) {
+            var chat_room_name = $el_txtbox_chat_room.val();
+            if ( ( e.which === 13 || e.keyCode === 13 ) && chat_room_name.length > 0 ) { // if enter is pressed
+                chat_room_name = chat_room_name.trim(); // removes the leading and trailing whitespaces
+                utils.create_new_tab( chat_room_name, $el_txtbox_chat_room, $el_chat_room_tab, $el_chat_tabs, $el_tab_content, $el_msg_box );
+                $el_txtbox_chat_room.val( "" ) // clears create room textbox
+                return false;
+            }
+        });
+    },
+    // for chat rooms/ group chat
+    leave_close_room: function () {
+        var tab_counter = "",
+            room_name = "",
+            room_index = 0,
+            self = click_events;
+        $('.close-room').click(function ( e ) {
+            e.stopPropagation();
+            // gets room name from the title of anchor tag
+            room_name = e.target.parentElement.title;
+            // leaves room
+            socket.emit( 'leave', { room: room_name } );
+            // removes tab and its content
+            $( '.tab-content ' + $( this ).parent().attr( 'data-target' ) ).remove();
+            $( this ).parent().parent().remove();
+            room_index = utils.delete_from_rooms( self.connected_room, room_name );
+            delete self.connected_room[ room_index ];
+            // selects the last tab and makes it active
+            $('#chat_tabs a:last').tab( 'show' );
+            // hides or shows textarea
+            utils.show_hide_textarea( $('#chat_tabs a:last') );
+            // adjusts the left/right scrollers
+            // when a tab is deleted
+            utils.adjust_scrollers();
+            return false;
+        });
+    },
+    // registers overflow tabs left/right click events
+    overflow_left_right_scroll: function() {
+        var $el_scroll_right = $('.scroller-right'),
+            $el_scroll_left = $('.scroller-left');
+        $('.scroller-right').click(function() {
+            $el_scroll_left.fadeIn('slow');
+            $el_scroll_right.fadeOut('slow');
+            // performs animation when the scroller is clicked
+            // and shifts the tab list to the right end
+            $('.list').animate( {left: "+=" + utils.get_width_hidden_list() + "px"}, 'slow', function() { } );
+        });
+        $el_scroll_left.click(function() {
+            $el_scroll_right.fadeIn('slow');
+            $el_scroll_left.fadeOut('slow');
+            // performs animation when the scroller is clicked
+            // and shifts the tab list to the left end
+            $('.list').animate( {left: "-=" + utils.get_list_left_position() + "px"}, 'slow', function() { } );
+        });
+    },
+    // opens a global chat room
+    open_global_chat_room: function() {
+        var $el_txtbox_chat_room = $( '#txtbox_chat_room' ),
+            $el_chat_room_tab = $( '#chat_room_tab' ),
+            $el_chat_tabs = $( '#chat_tabs' ),
+            $el_tab_content = $( '.tab-content' ),
+            $el_msg_box = $( '#send_data' ),
+            chat_room_name = '';
+        // creates a tab for persistent chat room upon clicking
+        $('.global-room').click(function( e ) {
+            e.stopPropagation();
+            chat_room_name = $(this)[0].title;
+            utils.create_new_tab( chat_room_name, $el_txtbox_chat_room, $el_chat_room_tab, $el_chat_tabs, $el_tab_content, $el_msg_box );
+            return false;
+        });
+    },
+    // deletes chat log on keypress ctrl + l
+    // for the active chat window
+    delete_chat: function() {
+        $(document).keydown(function( e ) {
+             var message_area_div_id = "",
+                 message_area_id = "",
+                 $el_active_li = $( '.nav-tabs>li.active' );
+            // detects keypress of ctrl+l
+            if ( ( e.which == 76 || e.keyCode == 76 ) && e.ctrlKey ) {
+                message_area_div_id = $el_active_li.find('a').attr( 'data-target' );
+                if( message_area_div_id === "#all_chat_tab" ) {
+                    // deletes log for the main chat tab
+                    $('#all_chat_tab').find('#all_messages').html('');
+                }
+                else {
+                    // deletes log for the other tabs
+                    message_area_id = $( message_area_div_id + ' .messages' ).attr( 'id' );
+                    if( message_area_id ) { // deletes the chat log
+                        utils.clear_message_area( $( '#' + message_area_id ) );
+                    }
+                }
+            }
+        });
+    },
+    // event for body resize
+    event_window_resize: function() {
+        $( window ).resize(function() {
+            var body_width = $('.body-container').width();
+            if ( body_width > 600 ) {
+                $('.right_icons').css('margin-left', '97%');
+                $('.tab-content').height( '79%' );
+            }
+            else {
+                $('.right_icons').css('margin-left', '95%');
+                $('.tab-content').height( '65%' );
+            }
+            // adjusts the vertical alignment of the gravatars
+            utils.gravatar_align();
+            
+        });
+    }
+}
+// utility methods
+var utils = {
+    notification_color_code: '#FFD700',
+    connected_color_code: "#00FF00",
+    disconnected_color_code: "#FF0000",
+    // fill in all messages
+    fill_messages: function ( collection ) {
+        var uid = utils.get_userid(),
+            message_html = $.parseHTML( collection ),
+            $el_all_messages = $( '#all_messages' );
+        // clears the previous items
+        this.clear_message_area( $el_all_messages );
+        if ( collection ) {
+            $el_all_messages.append( $( '<div' + '/' + '>' ).html( message_html ) );
+        }
+        // show the last item by scrolling to the end
+        utils.fancyscroll_to_last( $("#all_chat_tab") );
+        utils.gravatar_align();
+    },
+    // get the current username of logged in user
+    get_userid: function () {
+        var query_string_start = location.search.indexOf( '?' ) + 1,
+            query_string_list = location.search.slice( query_string_start ).split( '&' );
+        return query_string_list[0].split( '=' )[1];
+    },
+    // append message
+    append_message: function ( $el, message ) {
+        $el.append( message );
+    },
+    // builds message for self
+    build_message: function ( original_message ) {
+        var from_uid = original_message.user,
+            message_data = original_message.data,
+            message_user = "",
+            message_text = "",
+            uid = utils.get_userid();
+
+        var user = {
+            username: original_message.user,
+            gravatar: original_message.gravatar,
+        }
+
+        if (from_uid === uid) {
+            // for our own user we override the text_name
+            user.username = "me";
+        }
+
+        return this.build_message_from_template( user, message_data );
+    },
+    // builds template for message display
+    build_message_from_template: function ( user, original_message ) {
+        var gravatar_col_content = '' +
+               '<img src="https://s.gravatar.com/avatar/' + user.gravatar + '?s=32&d=identicon" />' +
+               '';
+
+        var message_col_content = '<div class="row">'+
+            '   <div class="col-xs-6 user_name">' +
+                    user.username +
+            '   </div>' +
+            '   <div class="col-xs-6 text-right date_time" title="'+ this.get_date() +'">' +
+                    this.get_time() +
+            '   </div>' +
+            '</div>' +
+            '<div class="row user_message">' +
+                unescape( original_message ) +
+            '</div>';
+        if ( user.username === "me" ){
+            return '<div class="row message">' +
+            '<div class="col-xs-11 col-md-12 message-height">' +
+                message_col_content +
+            '</div>' +
+            '<div class="col-xs-1 col-md vertical-align">' +
+                gravatar_col_content +
+            '</div>' +
+            '</div>';
+        } else if ( user.username === "Notification" ){
+            return '<div class="row message">' +
+            '<div class="col-xs-11 col-md-12 notification-padding">' +
+                message_col_content +
+            '</div>' +
+            '</div>';
+        } else {
+            return '<div class="row message">' +
+            '<div class="col-xs-1 col-md vertical-align">' +
+                gravatar_col_content +
+            '</div>' +
+            '<div class="col-xs-11 col-md-12 message-height">' +
+                message_col_content +
+            '</div>' +
+            '</div>';
+        }
+    },
+    // adds an information about the online status
+    update_online_status: function ( $el, connected ) {
+        var connected_message = "You are online. Press to be offline.",
+            disconnected_message = "You are offline. Press to be online.";
+        if ( connected ) {
+            $el.prop( "title", connected_message ).css( "color", this.connected_color_code );
+        }
+        else {
+            $el.prop( "title", disconnected_message ).css( "color", this.disconnected_color_code );
+        }
+    },
+    // gets the current time
+    get_time: function () {
+        var currentdate = new Date(),
+                datetime = "",
+                hours = 0,
+                minutes = 0;
+        hours = ( currentdate.getHours() < 10 ) ? ( "0" + currentdate.getHours() ) : currentdate.getHours();
+        minutes = ( currentdate.getMinutes() < 10 ) ? ( "0" + currentdate.getMinutes() ) : currentdate.getMinutes();
+        datetime = hours + ":" + minutes;
+        return datetime;
+    },
+    // gets the current date
+    get_date: function () {
+        var currentdate = new Date(),
+                day,
+                month;
+        month = ( (currentdate.getMonth() + 1 ) < 10) ? ( "0" + ( currentdate.getMonth() + 1 ) ) : ( currentdate.getMonth() + 1 );
+        day = ( currentdate.getDate() < 10 ) ? ( "0" + currentdate.getDate() ) : currentdate.getDate();
+        return month + "/" + day + "/" + currentdate.getFullYear();
+    },
+    // clears the message area or chat logs
+    // from the currently active tab
+    clear_message_area: function ( $el ) {
+        $el.html( "" );
+    },
+    // shows notification and animating when a message is being
+    // pushed to an inactive tab(s)
+    show_notification: function ( $el ) {
+        if ( !$el.parent().hasClass( 'active' ) ) {
+            $el.css( 'background-color', this.notification_color_code );
+            for (var i = 2; i >= 1; i--) {
+                // shows the animation
+                $el.fadeOut( 150 ).fadeIn( 150 );
+            }
+        }
+    },
+    // adjusts the scrollers when a
+    // tab is added or deleted
+    adjust_scrollers: function() {
+        if ( $( '.wrapper' ).width() < this.get_list_width() ) {
+            $( '.scroller-right' ).show();
+        }
+        else {
+            $( '.scroller-right' ).hide();
+        }
+
+        if ( this.get_list_left_position() < 0 ) {
+            $( '.scroller-left' ).show();
+        }
+        else {
+            $( '.item' ).animate( {left: "-=" + this.get_list_left_position() + "px"}, 'slow' );
+            $( '.scroller-left'  ).hide();
+        }
+    },
+    // gets the widht of all li elements
+    get_list_width: function() {
+        var total_width = 0;
+        $( '.list li' ).each(function() {
+            var li_width = $( this ).width();
+            total_width += li_width;
+        });
+        return total_width;
+    },
+    // gets the hidden width of the tab
+    get_width_hidden_list: function() {
+        var scroll_icon_width = 50;
+        return ( $( '.wrapper' ).width() - this.get_list_width() - this.get_list_left_position() - scroll_icon_width );
+    },
+    get_list_left_position: function() {
+        return $( '.list' ).position().left;
+    },
+    // checks the session storage on page load
+    // and updates connected status
+    checks_session_storage: function() {
+        var uid = utils.get_userid(),
+            $el_all_messages = $( '#all_messages' ),
+            $el_online_status = $( '#online_status' ),
+            $el_textarea = $( '#send_data' );
+        if ( sessionStorage[ 'connected' ] ) {
+            if ( sessionStorage[ 'connected' ] === 'true' || sessionStorage[ 'connected' ] === true ) {
+                utils.update_online_status( $el_online_status, true );
+                click_events.is_connected = true;
+            }
+            else {
+                click_events.make_disconnect( uid, $el_textarea, $el_online_status );
+                utils.clear_message_area( $el_all_messages );
+            }
+        }
+        else {
+            utils.update_online_status( $el_online_status, true );
+            click_events.is_connected = true;
+        }
+    },
+    // show/hide textarea message box
+    show_hide_textarea: function( $selector ) {
+         var $el_textarea = $( '#send_data' );
+         if ( $selector.attr( "data-target" ) === "#chat_room_tab" ) {
+             $el_textarea.css( 'display', 'none' );
+         }
+         else {
+             $el_textarea.css('display', 'block');
+             utils.set_focus_textarea( $el_textarea );
+         }
+    },
+    // save chat logs
+    store_append_message: function( key, data ) {
+        localStorage[key] = data;
+    },
+    create_global_chatroom_links: function() {
+        // global_rooms
+        var $el_room_container = $('#global_rooms'),
+            room_name = "",
+            room_template = "",
+            persistent_communication_rooms = [],
+            persistent_rooms_length = 0,
+            $el_persistent_rooms_visible = $(".persistent-rooms-visibility");
+        // gets an array of persistent communication rooms
+        persistent_communication_rooms = this.get_persistent_rooms();
+        persistent_rooms_length = persistent_communication_rooms.length;
+        if( persistent_rooms_length > 0 ) {
+            $el_persistent_rooms_visible.css('display', 'block');
+            // creates html template for persistent rooms
+            for(var room_counter = 0; room_counter < persistent_rooms_length; room_counter++ ) {
+                room_name = persistent_communication_rooms[room_counter];
+                room_template = "<button type='button' class='btn btn-primary global-room' title='" + room_name +
+                                "'>" + room_name + "</button>";
+                $el_room_container.append(room_template);
+            }
+        }
+        else {
+            $el_persistent_rooms_visible.css('display', 'none');
+        }
+    },
+    // creates a new tab and joins a chat room
+    create_new_tab: function( chat_room_name, $el_txtbox_chat_room, $el_chat_room_tab, $el_chat_tabs, $el_tab_content, $el_msg_box) {
+        var self = click_events,
+            tab_room_header_template = "",
+            tab_room_body_template = "",
+            tab_id = "",
+            message_area_id = "",
+            $el_active_li = $('ul>li.active'),
+            $el_textarea = $('#send_data'),
+            is_room = utils.check_room_by_roomname( self.connected_room, chat_room_name);
+        // checks if the room is already open and displays it if open
+        // otherwise create a new one
+        if( is_room ) {
+            $el_chat_tabs.find( "a[data-target='#galaxy_tabroom_" + is_room.id + "']" ).tab('show');
+        }
+        else {
+            // emits join room event
+            socket.emit('join', {room: chat_room_name});
+            // removes the active class from the chat creating tab
+            $el_chat_room_tab.removeClass('fade active in').addClass('fade');
+            $el_active_li.removeClass('active');
+            // create tab and message area ids for the
+            // new tab header and tab content elements
+            tab_id = "galaxy_tabroom_" + self.tab_counter;
+            message_area_id = "all_messages_" + self.tab_counter;
+            self.connected_room.push({
+                id: self.tab_counter,
+                name: chat_room_name
+            });
+            // create chat room tab header for new room
+            tab_room_header_template = "<li class='active'><a title='" + chat_room_name + "' data-target='#" + tab_id +
+                       "' data-toggle='tab' aria-expanded='false'>" + chat_room_name +
+                       "<i class='fa fa-times anchor close-room' title='Close room'></i></a></li>";
+            $el_chat_tabs.append( tab_room_header_template );
+            // create chat room tab body for new room
+            tab_room_body_template = "<div class='tab-pane active fade in' id='" + tab_id +
+                       "'><div id='" + message_area_id + "'" + " class='messages'></div></div>";
+            $el_tab_content.append(tab_room_body_template);
+            // registers leave room event
+            self.leave_close_room();
+            utils.create_fancy_scroll( $( '#' + tab_id ), "#"+tab_id );
+            click_events.tab_counter++;
+        }
+        self.active_element();
+        // displays the textarea
+        $el_msg_box.css('display', 'block');
+        utils.set_focus_textarea( $el_textarea );
+        // adjusts the left/right scrollers when a tab is created
+        utils.adjust_scrollers();
+    },
+    // returns persistent communication rooms from the iframe url
+    get_persistent_rooms: function() {
+        var query_string_start = location.search.indexOf('?') + 1,
+            query_string_list = location.search.slice(query_string_start).split('&'),
+            persistent_rooms_list = [];
+        if( query_string_list[1] ) {
+            // unescapes the list
+            query_string_list = unescape(query_string_list[1]);
+            persistent_rooms_list = query_string_list.split('=')[1];
+            if( persistent_rooms_list && persistent_rooms_list.length > 0 ) {
+                return query_string_list.split('=')[1].split(',');
+            }
+            else {
+                return [];
+            }
+        }
+        else {
+            return [];
+        }
+    },
+    // returns the room information if it exists
+    check_room_by_roomname: function( dictionary, room_name ) {
+        for(var ctr = 0; ctr < dictionary.length; ctr++) {
+            if( dictionary[ctr] ) {
+                if( room_name === dictionary[ctr].name ) {
+                    return dictionary[ctr];
+                }
+            }
+        }
+    },
+    // returns the index of the room to be deleted
+    // from the connected room list if the room is left by the user
+    delete_from_rooms: function( dictionary, item ) {
+        for(var ctr = 0; ctr < dictionary.length; ctr++) {
+            if( dictionary[ctr] ) {
+                if( item === dictionary[ctr].name ) {
+                    return ctr;
+                }
+            }
+        }
+    },
+    // sets focus inside the textarea and clears it too
+    set_focus_textarea: function( $el ) {
+        $el.val("");
+        $el.focus();
+    },
+    // smooth transition of body element upon first load
+    load_transition: function() {
+        $('.body-container').addClass('body-container-loaded');
+    },
+    // creates fancy scroll for the messages area
+    create_fancy_scroll: function( $el, element_id ) {
+        $el.mCustomScrollbar({
+            theme:"minimal"
+        });
+        $( element_id + ' .mCSB_dragger_bar' ).css( 'background-color', 'black' );
+    },
+    // scrolls the fancy scroll to the last element
+    fancyscroll_to_last: function( $el ) {
+        $el.mCustomScrollbar( "scrollTo", "bottom" );
+    },
+    // vertically aligns the gravatar icon in the center
+    vertical_center_align_gravatar: function( $el ) {
+        var usermsg_length = $el.find('.message-height').length,
+            usermsg_height = ( $( $el.find('.message-height')[ usermsg_length - 1 ] ).height() / 2 ),
+            gravatar_height = ( $( $el.find( '.vertical-align img' )[0] ).height() / 2 );
+
+        if( gravatar_height === 0 ) {
+            gravatar_height = 16;
+        }	
+        $( $el.find( '.vertical-align img' )[ usermsg_length - 1 ] ).css( 'padding-top', usermsg_height - gravatar_height );
+    },
+    
+    gravatar_align: function() {
+        var active_tab_target = $('li.active a').attr('data-target').split('_'),
+            tab_number = "",
+            selector = "", 
+            scroll_selector = "",
+            usermsg_height = 0,
+            gravatar_height = 0;
+        // finds the active tab's suffix number
+        tab_number = active_tab_target[active_tab_target.length - 1];
+        if( isNaN( tab_number ) ) {
+            selector = "#all_messages";
+            scroll_selector = "#all_chat_tab";
+        }
+        else {
+            selector = "#all_messages_" + tab_number;
+            scroll_selector = "#galaxy_tabroom_" + tab_number;
+        }
+        $( selector + ' .message' ).each(function( index ) {
+            usermsg_height = ( $( this ).find( '.message-height' ).height() / 2 ),
+            gravatar_height = ( $( this ).find( '.vertical-align img' ).height() / 2 );
+            $( this ).find( '.vertical-align img' ).css( 'padding-top', (usermsg_height - gravatar_height) );
+        });
+        // scrolls to the last element
+        utils.fancyscroll_to_last( $( scroll_selector ) );
+    }
+    
+}
+// this event fires when all the resources of the page
+// have been loaded
+$(window).load(function() {
+    var uid = utils.get_userid(),
+        $el_textarea = $('#send_data'),
+        $el_all_messages = $('#all_messages'),
+        $el_chat_tabs = $('#chat_tabs'),
+        main_tab_id = "#all_chat_tab",
+        $el_persistent_rooms_visible = $(".persistent-rooms-visibility");
+    // build tabs
+    $el_chat_tabs.tab();
+    // registers response event
+    events_module.event_response(socket);
+    // registers room response event
+    events_module.event_response_room(socket);
+    // registers connect event
+    events_module.event_connect(socket);
+    // registers create room event
+    click_events.create_chat_room(socket);
+    // broadcast the data
+    click_events.broadcast_data(socket);
+    // disconnet the user from the chat server
+    click_events.connect_disconnect(socket);
+    // registers event for showing chat log
+    click_events.show_chat_history();
+    click_events.active_element();
+    click_events.leave_close_room();
+    // register tab overflow scroll events
+    click_events.overflow_left_right_scroll();
+    // registers event for creating persistent rooms
+    click_events.create_room();
+    // registers event for deleting chat log
+    click_events.delete_chat();
+    // updates online status text
+    // by checking if user was connected or not
+    utils.checks_session_storage();
+    utils.fill_messages(sessionStorage[uid]);
+    // sets focus to the textarea
+    utils.set_focus_textarea( $el_textarea );
+    // sets smooth transition
+    utils.load_transition();
+    utils.create_fancy_scroll( $( main_tab_id ), main_tab_id );
+    // scrolls to the last of the element
+    utils.fancyscroll_to_last( $( main_tab_id ) );
+    $el_persistent_rooms_visible.css('display', 'none');
+    click_events.event_window_resize();
+});
diff --git a/scripts/create_db.py b/scripts/create_db.py
new file mode 100755
index 0000000..0d73b29
--- /dev/null
+++ b/scripts/create_db.py
@@ -0,0 +1,40 @@
+"""
+Creates the initial galaxy database schema using the settings defined in
+config/galaxy.ini.
+
+This script is also wrapped by create_db.sh.
+
+.. note: pass '-c /location/to/your_config.ini' for non-standard ini file
+locations.
+
+.. note: if no database_connection is set in galaxy.ini, the default, sqlite
+database will be constructed.
+    Using the database_file setting in galaxy.ini will create the file at the
+    settings location (??)
+
+.. seealso: galaxy.ini, specifically the settings: database_connection and
+database file
+"""
+import os.path
+import sys
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.model.migrate.check import create_or_verify_database as create_db
+from galaxy.model.orm.scripts import get_config
+from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as create_install_db
+from galaxy.webapps.tool_shed.model.migrate.check import create_or_verify_database as create_tool_shed_db
+
+
+def invoke_create():
+    config = get_config(sys.argv)
+    if config['database'] == 'galaxy':
+        create_db(config['db_url'], config['config_file'])
+    elif config['database'] == 'tool_shed':
+        create_tool_shed_db(config['db_url'])
+    elif config['database'] == 'install':
+        create_install_db(config['db_url'])
+
+
+if __name__ == "__main__":
+    invoke_create()
diff --git a/scripts/data_libraries/build_lucene_index.py b/scripts/data_libraries/build_lucene_index.py
new file mode 100644
index 0000000..5ffec9c
--- /dev/null
+++ b/scripts/data_libraries/build_lucene_index.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+"""
+Build index for full-text lucene search of files in data libraries.
+
+Requires a full text search server and configuration settings in
+galaxy.ini. See the lucene settings in the data library search section for more
+details.
+
+Run from the ~/scripts/data_libraries directory:
+%sh build_lucene_index.sh
+"""
+import ConfigParser
+import csv
+import os
+import sys
+import urllib
+import urllib2
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+import galaxy.model.mapping
+from galaxy import config, model
+
+
+def main( ini_file ):
+    sa_session, gconfig = get_sa_session( ini_file )
+    max_size = float( gconfig.get( "fulltext_max_size", 100 ) ) * 1048576
+    ignore_exts = gconfig.get( "fulltext_noindex_filetypes", "" ).split( "," )
+    search_url = gconfig.get( "fulltext_url", None )
+    if not search_url:
+        raise ValueError( "Need to specify search functionality in galaxy.ini" )
+    dataset_file = create_dataset_file( get_lddas( sa_session, max_size, ignore_exts ) )
+    try:
+        build_index( search_url, dataset_file )
+    finally:
+        if os.path.exists( dataset_file ):
+            os.remove( dataset_file )
+
+
+def build_index( search_url, dataset_file ):
+    url = "%s/index?%s" % ( search_url, urllib.urlencode( { "docfile": dataset_file } ) )
+    request = urllib2.Request( url )
+    request.get_method = lambda: "PUT"
+    urllib2.urlopen( request )
+
+
+def create_dataset_file( dataset_iter ):
+    dataset_file = os.path.join( os.getcwd(), "full-text-search-files.csv" )
+    out_handle = open( dataset_file, "w" )
+    writer = csv.writer( out_handle )
+    for did, dfile, dname in dataset_iter:
+        writer.writerow( [ did, dfile, dname ] )
+    out_handle.close()
+    return dataset_file
+
+
+def get_lddas( sa_session, max_size, ignore_exts ):
+    for ldda in sa_session.query( model.LibraryDatasetDatasetAssociation ).filter_by( deleted=False ):
+        if ( float( ldda.dataset.get_size() ) > max_size or ldda.extension in ignore_exts ):
+            fname = ""
+        else:
+            fname = ldda.dataset.get_file_name()
+        yield ldda.id, fname, _get_dataset_metadata(ldda).replace("\n", " ")
+
+
+def _get_dataset_metadata(ldda):
+    """Retrieve descriptions and information associated with a dataset.
+    """
+    lds = ldda.library_dataset
+    folder_info = _get_folder_info(lds.folder)
+    lds_info = lds.get_info()
+    if lds_info and not lds_info.startswith("upload"):
+        lds_info = lds_info.replace("no info", "")
+    else:
+        lds_info = ""
+    return "%s %s %s %s %s" % (lds.name or "", lds_info, ldda.metadata.dbkey,
+                               ldda.message, folder_info)
+
+
+def _get_folder_info(folder):
+    """Get names and descriptions for all parent folders except top level.
+    """
+    folder_info = ""
+    if folder and folder.parent:
+        folder_info = _get_folder_info(folder.parent)
+        folder_info += " %s %s" % (
+            folder.name.replace("Unnamed folder", ""),
+            folder.description or "")
+    return folder_info
+
+
+def get_sa_session( ini_file ):
+    conf_parser = ConfigParser.ConfigParser( { 'here': os.getcwd() } )
+    conf_parser.read( ini_file )
+    kwds = dict()
+    for key, value in conf_parser.items( "app:main" ):
+        kwds[ key ] = value
+    ini_config = config.Configuration( **kwds )
+    db_con = ini_config.database_connection
+    if not db_con:
+        db_con = "sqlite:///%s?isolation_level=IMMEDIATE" % ini_config.database
+    model = galaxy.model.mapping.init( ini_config.file_path,
+                                       db_con,
+                                       engine_options={},
+                                       create_tables=False )
+    return model.context.current, ini_config
+
+
+if __name__ == "__main__":
+    main( *sys.argv[1:] )
diff --git a/scripts/data_libraries/build_lucene_index.sh b/scripts/data_libraries/build_lucene_index.sh
new file mode 100644
index 0000000..0eb63d3
--- /dev/null
+++ b/scripts/data_libraries/build_lucene_index.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/data_libraries/build_lucene_index.py ./config/galaxy.ini
diff --git a/scripts/data_libraries/build_whoosh_index.py b/scripts/data_libraries/build_whoosh_index.py
new file mode 100644
index 0000000..811ccdb
--- /dev/null
+++ b/scripts/data_libraries/build_whoosh_index.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+"""
+Build index for full-text whoosh search of files in data libraries.
+
+Requires configuration settings in galaxy.ini. See the whoosh settings in the
+data library search section for more details.
+
+Run from the ~/scripts/data_libraries directory:
+%sh build_whoosh_index.sh
+"""
+from __future__ import print_function
+
+import os
+import sys
+
+from six import text_type
+from six.moves import configparser
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+# Whoosh is compatible with Python 2.5+ Try to import Whoosh and set flag to indicate whether search is enabled.
+try:
+    from whoosh.filedb.filestore import FileStorage
+    from whoosh.fields import Schema, STORED, TEXT
+    whoosh_search_enabled = True
+    schema = Schema( id=STORED, name=TEXT, info=TEXT, dbkey=TEXT, message=TEXT )
+    import galaxy.model.mapping
+    from galaxy import config, model
+except ImportError:
+    whoosh_search_enabled = False
+    schema = None
+
+
+def build_index( sa_session, whoosh_index_dir ):
+    storage = FileStorage( whoosh_index_dir )
+    index = storage.create_index( schema )
+    writer = index.writer()
+
+    def to_unicode( a_basestr ):
+        if not isinstance(a_basestr, text_type):
+            return text_type( a_basestr, 'utf-8' )
+        else:
+            return a_basestr
+    lddas_indexed = 0
+    for id, name, info, dbkey, message in get_lddas( sa_session ):
+        writer.add_document( id=id,
+                             name=to_unicode( name ),
+                             info=to_unicode( info ),
+                             dbkey=to_unicode( dbkey ),
+                             message=to_unicode( message ) )
+        lddas_indexed += 1
+    writer.commit()
+    print("Number of active library datasets indexed: ", lddas_indexed)
+
+
+def get_lddas( sa_session ):
+    for ldda in sa_session.query( model.LibraryDatasetDatasetAssociation ).filter_by( deleted=False ):
+        id = ldda.id
+        name = ldda.name
+        info = ldda.library_dataset.get_info()
+        if info and not info.startswith( 'upload' ):
+            info = info.replace( 'no info', '' )
+        else:
+            info = ''
+        dbkey = ldda.metadata.dbkey
+        if ldda.message:
+            message = ldda.message
+        else:
+            message = ''
+        yield id, name, info, dbkey, message
+
+
+def get_sa_session_and_needed_config_settings( ini_file ):
+    conf_parser = configparser.ConfigParser( { 'here': os.getcwd() } )
+    conf_parser.read( ini_file )
+    kwds = dict()
+    for key, value in conf_parser.items( "app:main" ):
+        kwds[ key ] = value
+    config_settings = config.Configuration( **kwds )
+    db_con = config_settings.database_connection
+    if not db_con:
+        db_con = "sqlite:///%s?isolation_level=IMMEDIATE" % config_settings.database
+    model = galaxy.model.mapping.init( config_settings.file_path, db_con, engine_options={}, create_tables=False )
+    return model.context.current, config_settings
+
+
+if __name__ == "__main__":
+    if whoosh_search_enabled:
+        ini_file = sys.argv[1]
+        sa_session, config_settings = get_sa_session_and_needed_config_settings( ini_file )
+        whoosh_index_dir = config_settings.whoosh_index_dir
+        build_index( sa_session, whoosh_index_dir )
diff --git a/scripts/data_libraries/build_whoosh_index.sh b/scripts/data_libraries/build_whoosh_index.sh
new file mode 100644
index 0000000..ac60d14
--- /dev/null
+++ b/scripts/data_libraries/build_whoosh_index.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/data_libraries/build_whoosh_index.py ./config/galaxy.ini
diff --git a/scripts/db_shell.py b/scripts/db_shell.py
new file mode 100644
index 0000000..e701439
--- /dev/null
+++ b/scripts/db_shell.py
@@ -0,0 +1,101 @@
+# This script allows easy access to Galaxy's database layer via the
+# Galaxy models. For example:
+# % python -i scripts/db_shell.py
+# >>> new_user = User("admin at gmail.com")
+# >>> new_user.set_password
+# >>> sa_session.add(new_user)
+# >>> sa_session.commit()
+# >>> sa_session.query(User).all()
+#
+# You can also use this script as a library, for instance see https://gist.github.com/1979583
+# TODO: This script overlaps a lot with manage_db.py and create_db.py,
+# these should maybe be refactored to remove duplication.
+from __future__ import print_function
+
+import datetime
+import decimal
+import os.path
+import sys
+
+from six import string_types
+
+# Setup DB scripting environment
+from sqlalchemy import *  # noqa
+from sqlalchemy.orm import *  # noqa
+from sqlalchemy.exc import *  # noqa
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.model import *  # noqa
+from galaxy.model.mapping import init
+from galaxy.model.orm.scripts import get_config
+
+if sys.version_info > (3,):
+    long = int
+
+db_url = get_config( sys.argv )['db_url']
+sa_session = init( '/tmp/', db_url ).context
+
+
+# Helper function for debugging sqlalchemy queries...
+# http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query
+def printquery(statement, bind=None):
+    """
+    print a query, with values filled in
+    for debugging purposes *only*
+    for security, you should always separate queries from their values
+    please also note that this function is quite slow
+    """
+    import sqlalchemy.orm
+    if isinstance(statement, sqlalchemy.orm.Query):
+        if bind is None:
+            bind = statement.session.get_bind(
+                statement._mapper_zero_or_none() )
+        statement = statement.statement
+    elif bind is None:
+        bind = statement.bind
+
+    dialect = bind.dialect
+    compiler = statement._compiler(dialect)
+
+    class LiteralCompiler(compiler.__class__):
+        def visit_bindparam(
+                self, bindparam, within_columns_clause=False,
+                literal_binds=False, **kwargs
+        ):
+            return super(LiteralCompiler, self).render_literal_bindparam(
+                bindparam,
+                within_columns_clause=within_columns_clause,
+                literal_binds=literal_binds,
+                **kwargs
+            )
+
+        def render_literal_value(self, value, type_):
+            """Render the value of a bind parameter as a quoted literal.
+
+            This is used for statement sections that do not accept bind paramters
+            on the target driver/database.
+
+            This should be implemented by subclasses using the quoting services
+            of the DBAPI.
+
+            """
+            if isinstance(value, string_types):
+                value = value.replace("'", "''")
+                return "'%s'" % value
+            elif value is None:
+                return "NULL"
+            elif isinstance(value, (float, int, long)):
+                return repr(value)
+            elif isinstance(value, decimal.Decimal):
+                return str(value)
+            elif isinstance(value, datetime.datetime):
+                return "TO_DATE('%s','YYYY-MM-DD HH24:MI:SS')" % value.strftime("%Y-%m-%d %H:%M:%S")
+
+            else:
+                raise NotImplementedError(
+                    "Don't know how to literal-quote value %r" % value
+                )
+
+    compiler = LiteralCompiler(dialect, statement)
+    print(compiler.process(statement))
diff --git a/scripts/drmaa_external_killer.py b/scripts/drmaa_external_killer.py
new file mode 100755
index 0000000..bdac661
--- /dev/null
+++ b/scripts/drmaa_external_killer.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+
+"""
+Terminates a DRMAA job if given a job id and (appropriate) user id.
+"""
+
+import errno
+import os
+import pwd
+import sys
+
+import drmaa
+
+
+def validate_paramters():
+    if len(sys.argv) < 3:
+        sys.stderr.write("usage: %s [job ID] [user uid]\n" % sys.argv[0])
+        exit(1)
+
+    jobID = sys.argv[1]
+    uid = int(sys.argv[2])
+    return jobID, uid
+
+
+def set_user(uid):
+    try:
+        gid = pwd.getpwuid(uid).pw_gid
+        os.setgid(gid)
+        os.setuid(uid)
+    except OSError as e:
+        if e.errno == errno.EPERM:
+            sys.stderr.write("error: setuid(%d) failed: permission denied. Did you setup 'sudo' correctly for this script?\n" % uid )
+            exit(1)
+        else:
+            pass
+    if os.getuid() == 0:
+        sys.stderr.write("error: UID is 0 (root) after changing user. This script should not be run as root. aborting.\n" )
+        exit(1)
+    if os.geteuid() == 0:
+        sys.stderr.write("error: EUID is 0 (root) after changing user. This script should not be run as root. aborting.\n" )
+        exit(1)
+
+
+def main():
+    jobID, uid = validate_paramters()
+    set_user(uid)
+    s = drmaa.Session()
+    s.initialize()
+    s.control(jobID, drmaa.JobControlAction.TERMINATE)
+    s.exit()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/drmaa_external_runner.py b/scripts/drmaa_external_runner.py
new file mode 100755
index 0000000..1231af9
--- /dev/null
+++ b/scripts/drmaa_external_runner.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python
+
+"""
+Submit a DRMAA job given a user id and a job template file (in JSON format)
+defining any or all of the following: args, remoteCommand, outputPath,
+errorPath, nativeSpecification, name, email, project
+"""
+from __future__ import print_function
+import errno
+import json
+import os
+import pwd
+import sys
+
+import drmaa
+
+DRMAA_jobTemplate_attributes = [ 'args', 'remoteCommand', 'outputPath', 'errorPath', 'nativeSpecification',
+                                 'workingDirectory', 'jobName', 'email', 'project' ]
+
+
+def load_job_template_from_file(jt, filename):
+    f = open(filename, 'r')
+    data = json.load(f)
+    for attr in DRMAA_jobTemplate_attributes:
+        if attr in data:
+            setattr(jt, attr, data[attr])
+
+
+def valid_numeric_userid(userid):
+    try:
+        uid = int(userid)
+    except:
+        return False
+    try:
+        pwd.getpwuid(uid)
+    except KeyError:
+        sys.stderr.write("error: User-ID (%d) is not valid.\n" % uid)
+        exit(1)
+    return True
+
+
+def get_user_id_by_name(username):
+    try:
+        pw = pwd.getpwnam(username)
+    except KeyError:
+        sys.stderr.write("error: User name (%s) is not valid.\n" % username)
+        exit(1)
+    return pw.pw_uid
+
+
+def json_file_exists(json_filename):
+    if not os.path.exists(json_filename):
+        sys.stderr.write("error: JobTemplate file (%s) doesn't exist\n" % ( json_filename ) )
+        exit(1)
+
+    return True
+
+
+def validate_paramters():
+    assign_all_groups = False
+    if "--assign_all_groups" in sys.argv:
+        assign_all_groups = True
+        sys.argv.remove("--assign_all_groups")
+
+    if len(sys.argv) < 3:
+        sys.stderr.write("usage: %s [USER-ID] [JSON-JOB-TEMPLATE-FILE]\n" % sys.argv[0])
+        exit(1)
+
+    userid = sys.argv[1]
+    json_filename = sys.argv[2]
+
+    if valid_numeric_userid(userid):
+        uid = int(userid)
+    else:
+        uid = get_user_id_by_name(userid)
+
+    if uid == 0:
+        sys.stderr.write("error: userid must not be 0 (root)\n")
+        exit(1)
+
+    return uid, json_filename, assign_all_groups
+
+
+def set_user(uid, assign_all_groups):
+    try:
+        # Get user's default group and set it to current process to make sure file permissions are inherited correctly
+        # Solves issue with permission denied for JSON files
+        gid = pwd.getpwuid(uid).pw_gid
+        import grp
+        os.setgid(gid)
+        if assign_all_groups:
+            # Added lines to assure read/write permission for groups
+            user = pwd.getpwuid(uid).pw_name
+            groups = [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
+
+            os.setgroups(groups)
+        os.setuid(uid)
+
+    except OSError as e:
+        if e.errno == errno.EPERM:
+            sys.stderr.write( "error: setuid(%d) failed: permission denied. Did you setup 'sudo' correctly for this script?\n" % uid )
+            exit(1)
+        else:
+            pass
+
+    if os.getuid() == 0:
+        sys.stderr.write( "error: UID is 0 (root) after changing user. This script should not be run as root. aborting.\n" )
+        exit(1)
+
+    if os.geteuid() == 0:
+        sys.stderr.write( "error: EUID is 0 (root) after changing user. This script should not be run as root. aborting.\n" )
+        exit(1)
+
+
+def main():
+    userid, json_filename, assign_all_groups = validate_paramters()
+    set_user(userid, assign_all_groups)
+    json_file_exists(json_filename)
+    # Added to disable LSF generated messages that would interfer with this
+    # script. Fix thank to Chong Chen at IBM.
+    os.environ['BSUB_QUIET'] = 'Y'
+    s = drmaa.Session()
+    s.initialize()
+    jt = s.createJobTemplate()
+    load_job_template_from_file(jt, json_filename)
+    # runJob will raise if there's a submittion error
+    jobId = s.runJob(jt)
+    s.deleteJobTemplate(jt)
+    s.exit()
+
+    # Print the Job-ID and exit. Galaxy will pick it up from there.
+    print(jobId)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/edam_mapping.py b/scripts/edam_mapping.py
new file mode 100644
index 0000000..f82685a
--- /dev/null
+++ b/scripts/edam_mapping.py
@@ -0,0 +1,61 @@
+"""This script loads a Galaxy datatypes registry against Galaxy's datatypes_conf.xml.sample file
+and uses it to generate a tabular file with four columns
+
+ - Galaxy datatype as short extension (e.g. bam)
+ - EDAM format (e.g. format_XXXX)
+ - EDAM label.
+ - EDAM definition.
+
+This file is printed to standard out. This script is designed to be
+run from the Galaxy root.
+
+ % python script/edam_mapping.py > edam_mapping.tsv
+"""
+from __future__ import absolute_import
+from __future__ import print_function
+
+import os
+import sys
+import urllib2
+from xml import etree
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+import galaxy.model
+import galaxy.datatypes.registry
+
+SCRIPTS_DIR = os.path.dirname(__file__)
+PROJECT_DIR = os.path.abspath(os.path.join(SCRIPTS_DIR, os.pardir))
+CONFIG_FILE = os.path.join(PROJECT_DIR, "config", "datatypes_conf.xml.sample")
+
+datatypes_registry = galaxy.datatypes.registry.Registry()
+datatypes_registry.load_datatypes(root_dir=PROJECT_DIR, config=CONFIG_FILE)
+
+EDAM_OWL_URL = "http://data.bioontology.org/ontologies/EDAM/submissions/25/download?apikey=8b5b7825-538d-40e0-9e9e-5ab9274a9aeb"
+
+
+if not os.path.exists("/tmp/edam.owl"):
+    open("/tmp/edam.owl", "w").write( urllib2.urlopen( EDAM_OWL_URL ).read() )
+
+
+owl_xml_tree = etree.ElementTree.parse("/tmp/edam.owl")
+format_info = {}
+for child in owl_xml_tree.getroot().findall('{http://www.w3.org/2002/07/owl#}Class'):
+    about = child.attrib.get("{http://www.w3.org/1999/02/22-rdf-syntax-ns#}about")
+    if not about:
+        continue
+    if not about.startswith("http://edamontology.org/format_"):
+        continue
+    the_format = about[len("http://edamontology.org/"):]
+    label = child.find("{http://www.w3.org/2000/01/rdf-schema#}label").text
+    definition = ""
+    def_el = child.find("{http://www.geneontology.org/formats/oboInOwl#}hasDefinition")
+    if def_el is not None:
+        definition = def_el.text
+    format_info[the_format] = {"label": label, "definition": definition}
+
+for ext, edam_format in sorted(datatypes_registry.edam_formats.items()):
+    edam_info = format_info[edam_format]
+    edam_label = edam_info["label"]
+    edam_definition = edam_info["definition"]
+    print("%s\t%s\t%s\t%s" % (ext, edam_format, edam_label, edam_definition))
diff --git a/scripts/external_chown_script.py b/scripts/external_chown_script.py
new file mode 100755
index 0000000..cf47e55
--- /dev/null
+++ b/scripts/external_chown_script.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+import os
+import sys
+
+
+def validate_paramters():
+    if len(sys.argv) < 4:
+        sys.stderr.write("usage: %s path user_name gid\n" % sys.argv[0])
+        exit(1)
+
+    path = sys.argv[1]
+    galaxy_user_name = sys.argv[2]
+    gid = sys.argv[3]
+
+    return path, galaxy_user_name, gid
+
+
+def main():
+    path, galaxy_user_name, gid = validate_paramters()
+    os.system('chown -Rh %s %s' % (galaxy_user_name, path))
+    os.system('chgrp -Rh %s %s' % (gid, path))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/extract_dataset_part.py b/scripts/extract_dataset_part.py
new file mode 100644
index 0000000..26bcc4e
--- /dev/null
+++ b/scripts/extract_dataset_part.py
@@ -0,0 +1,48 @@
+"""
+Reads a JSON file and uses it to call into a datatype class to extract
+a subset of a dataset for processing.
+
+Used by jobs that split large files into pieces to be processed concurrently
+on a gid in a scatter-gather mode. This does part of the scatter.
+
+"""
+import json
+import logging
+import os
+import sys
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+# This junk is here to prevent loading errors
+import galaxy.model.mapping  # need to load this before we unpickle, in order to setup properties assigned by the mappers
+
+galaxy.model.Job()  # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here
+
+logging.basicConfig()
+log = logging.getLogger( __name__ )
+
+
+def __main__():
+    """
+    Argument: a JSON file
+    """
+    file_path = sys.argv.pop( 1 )
+    if not os.path.isfile(file_path):
+        # Nothing to do - some splitters don't write a JSON file
+        sys.exit(0)
+    data = json.load(open(file_path, 'r'))
+    try:
+        class_name_parts = data['class_name'].split('.')
+        module_name = '.'.join(class_name_parts[:-1])
+        class_name = class_name_parts[-1]
+        mod = __import__(module_name, globals(), locals(), [class_name])
+        cls = getattr(mod, class_name)
+        if not cls.process_split_file(data):
+            sys.stderr.write('Writing split file failed\n')
+            sys.exit(1)
+    except Exception as e:
+        sys.stderr.write(str(e))
+        sys.exit(1)
+
+
+__main__()
diff --git a/scripts/extract_toolbox_sections.py b/scripts/extract_toolbox_sections.py
new file mode 100644
index 0000000..89a898a
--- /dev/null
+++ b/scripts/extract_toolbox_sections.py
@@ -0,0 +1,137 @@
+import os
+from collections import defaultdict
+from xml.etree import ElementTree as ET
+
+# Todo: ""
+# execute from galaxy root dir
+
+tooldict = defaultdict(list)
+
+
+def main():
+    doc = ET.parse("tool_conf.xml")
+    root = doc.getroot()
+
+    # index range 1-1000, current sections/tools divided between 250-750
+    sectionindex = 250
+    sectionfactor = int( 500 / len( root.getchildren() ) )
+
+    for rootchild in root.getchildren():
+        currentsectionlabel = ""
+        if ( rootchild.tag == "section" ):
+            sectionname = rootchild.attrib['name']
+            # per section tool index range 1-1000, current labels/tools
+            # divided between 20 and 750
+            toolindex = 250
+            toolfactor = int( 500 / len( rootchild.getchildren() ) )
+            currentlabel = ""
+            for sectionchild in rootchild.getchildren():
+                if ( sectionchild.tag == "tool" ):
+                    addToToolDict(sectionchild, sectionname, sectionindex, toolindex, currentlabel)
+                    toolindex += toolfactor
+                elif ( sectionchild.tag == "label" ):
+                    currentlabel = sectionchild.attrib["text"]
+            sectionindex += sectionfactor
+        elif ( rootchild.tag == "tool" ):
+            addToToolDict(rootchild, "", sectionindex, None, currentsectionlabel)
+            sectionindex += sectionfactor
+        elif ( rootchild.tag == "label" ):
+            currentsectionlabel = rootchild.attrib["text"]
+            sectionindex += sectionfactor
+
+    # scan galaxy root tools dir for tool-specific xmls
+    toolconffilelist = getfnl( os.path.join(os.getcwd(), "tools" ) )
+
+    # foreach tool xml:
+    #   check if the tags element exists in the tool xml (as child of <tool>)
+    #   if not, add empty tags element for later use
+    #   if this tool is in the above tooldict, add the toolboxposition element to the tool xml
+    #   if not, then nothing.
+    for toolconffile in toolconffilelist:
+        hastags = False
+        hastoolboxpos = False
+
+        # parse tool config file into a document structure as defined by the ElementTree
+        tooldoc = ET.parse(toolconffile)
+        # get the root element of the toolconfig file
+        tooldocroot = tooldoc.getroot()
+        # check tags element, set flag
+        tagselement = tooldocroot.find("tags")
+        if (tagselement):
+            hastags = True
+        # check if toolboxposition element already exists in this tooconfig file
+        toolboxposelement = tooldocroot.find("toolboxposition")
+        if ( toolboxposelement ):
+            hastoolboxpos = True
+
+        if ( not ( hastags and hastoolboxpos ) ):
+            original = open( toolconffile, 'r' )
+            contents = original.readlines()
+            original.close()
+
+            # the new elements will be added directly below the root tool element
+            addelementsatposition = 1
+            # but what's on the first line? Root or not?
+            if ( contents[0].startswith("<?") ):
+                addelementsatposition = 2
+            newelements = []
+            if ( not hastoolboxpos ):
+                if ( toolconffile in tooldict ):
+                    for attributes in tooldict[toolconffile]:
+                        # create toolboxposition element
+                        sectionelement = ET.Element("toolboxposition")
+                        sectionelement.attrib = attributes
+                        sectionelement.tail = "\n  "
+                        newelements.append( ET.tostring(sectionelement, 'utf-8') )
+
+            if ( not hastags ):
+                # create empty tags element
+                newelements.append( "<tags/>\n  " )
+
+            contents = ( contents[ 0:addelementsatposition ] + newelements +
+                         contents[ addelementsatposition: ] )
+
+            # add .new for testing/safety purposes :P
+            newtoolconffile = open( toolconffile, 'w' )
+            newtoolconffile.writelines( contents )
+            newtoolconffile.close()
+
+
+def addToToolDict(tool, sectionname, sectionindex, toolindex, currentlabel):
+    toolfile = tool.attrib["file"]
+    realtoolfile = os.path.join(os.getcwd(), "tools", toolfile)
+
+    # define attributes for the toolboxposition xml-tag
+    attribdict = {}
+    if ( sectionname ):
+        attribdict[ "section" ] = sectionname
+    if ( currentlabel ):
+        attribdict[ "label" ] = currentlabel
+    if ( sectionindex ):
+        attribdict[ "sectionorder" ] = str(sectionindex)
+    if ( toolindex ):
+        attribdict[ "order" ] = str(toolindex)
+    tooldict[ realtoolfile ].append(attribdict)
+
+
+# Build a list of all toolconf xml files in the tools directory
+def getfnl(startdir):
+    filenamelist = []
+    for root, dirs, files in os.walk(startdir):
+        for fn in files:
+            fullfn = os.path.join(root, fn)
+            if fn.endswith('.xml'):
+                try:
+                    doc = ET.parse(fullfn)
+                except:
+                    print "Oops, bad xml in: ", fullfn
+                    raise
+                rootelement = doc.getroot()
+                # here we check if this xml file actually is a tool conf xml!
+                if rootelement.tag == 'tool':
+                    filenamelist.append(fullfn)
+    return filenamelist
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/fetch_eggs.py b/scripts/fetch_eggs.py
new file mode 100755
index 0000000..5236a73
--- /dev/null
+++ b/scripts/fetch_eggs.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+from __future__ import print_function
+from os import pardir
+from os.path import join, abspath, dirname
+from sys import exit
+
+msg = """
+Eggs in this release of Galaxy have been replaced by Python's newer packaging
+format, wheels. Please use scripts/common_startup.sh to set up your
+environment:
+
+cd {dir} && ./scripts/common_startup.sh
+
+This will create a Python virtualenv and install Galaxy's dependencies into it.
+
+If you start Galaxy using means other than run.sh (as you probably do if you
+are seeing this message), be sure to activate the virtualenv before starting,
+using:
+
+. {venv}/bin/activate
+
+If you already run Galaxy in its own virtualenv, you can reuse your existing
+virtualenv with:
+
+cd {dir} && ./scripts/common_startup.sh --skip-venv
+"""
+
+galaxy = abspath(join(dirname(__file__), pardir))
+venv = join(galaxy, '.venv')
+print(msg.format(dir=abspath(join(dirname(__file__), pardir)),
+                 venv=venv))
+exit(1)
diff --git a/scripts/functional_tests.py b/scripts/functional_tests.py
new file mode 100644
index 0000000..b7b056c
--- /dev/null
+++ b/scripts/functional_tests.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+"""Test driver for many Galaxy Python functional tests.
+
+Launch this script by running ``run_tests.sh`` from GALAXY_ROOT, see
+that script for a list of options.
+"""
+
+import os
+import os.path
+import sys
+
+galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
+sys.path[1:1] = [ os.path.join( galaxy_root, "lib" ), os.path.join( galaxy_root, "test" ) ]
+
+from base import driver_util
+log = driver_util.build_logger()
+
+from base.api_util import get_master_api_key, get_user_api_key
+
+
+class MigratedToolsGalaxyTestDriver(driver_util.GalaxyTestDriver):
+    """Instantiate a Galaxy-style nose TestDriver for testing migrated Galaxy tools."""
+
+    testing_shed_tools = True
+
+    def build_tests(self):
+        """Build migrated tool test methods."""
+        self.setup_shed_tools(
+            testing_migrated_tools=True,
+        )
+        self.build_tool_tests()
+
+
+class InstalledToolsGalaxyTestDriver(driver_util.GalaxyTestDriver):
+    """Galaxy-style nose TestDriver for testing installed Galaxy tools."""
+
+    testing_shed_tools = True
+
+    def build_tests(self):
+        """Build installed tool test methods."""
+        self.setup_shed_tools(
+            testing_installed_tools=True,
+        )
+        self.build_tool_tests()
+
+
+class DefaultGalaxyTestDriver(driver_util.GalaxyTestDriver):
+    """Default Galaxy-style nose test driver.
+
+    Just populate non-shed tool tests and run tests. Works
+    for tool tests, regular twill tests, and API testing.
+    """
+
+    def build_tests(self):
+        """Build framework tool test methods."""
+        self.build_tool_tests()
+
+
+class FrameworkToolsGalaxyTestDriver(DefaultGalaxyTestDriver):
+    """Galaxy-style nose TestDriver for testing framework Galaxy tools."""
+
+    framework_tool_and_types = True
+
+
+class DataManagersGalaxyTestDriver(driver_util.GalaxyTestDriver):
+    """Galaxy-style nose TestDriver for testing framework Galaxy tools."""
+
+    def build_tests(self):
+        """Build data manager test methods."""
+        import functional.test_data_managers
+        functional.test_data_managers.data_managers = self.app.data_managers
+        functional.test_data_managers.build_tests(
+            tmp_dir=self.galaxy_test_tmp_dir,
+            testing_shed_tools=self.testing_shed_tools,
+            master_api_key=get_master_api_key(),
+            user_api_key=get_user_api_key(),
+        )
+
+
+class WorkflowGalaxyTestDriver(driver_util.GalaxyTestDriver):
+    """Galaxy-style nose TestDriver for testing a Galaxy workflow."""
+
+    def build_tests(self):
+        """Setup WorkflowTestCase for test execution."""
+        import functional.workflow
+        functional.workflow.WorkflowTestCase.master_api_key = get_master_api_key()
+        functional.workflow.WorkflowTestCase.user_api_key = get_user_api_key()
+
+
+TEST_DRIVERS = {
+    '-migrated': MigratedToolsGalaxyTestDriver,
+    '-installed': InstalledToolsGalaxyTestDriver,
+    '-framework': FrameworkToolsGalaxyTestDriver,
+    '-data_managers': DataManagersGalaxyTestDriver,
+    '-workflow': WorkflowGalaxyTestDriver,
+}
+
+
+def find_test_driver():
+    """Look at command-line args and find the correct Galaxy test driver."""
+    test_driver = DefaultGalaxyTestDriver
+
+    for key in TEST_DRIVERS.keys():
+        if _check_arg(key):
+            test_driver = TEST_DRIVERS[key]
+
+    return test_driver
+
+
+def _check_arg( name ):
+    try:
+        index = sys.argv.index( name )
+        del sys.argv[ index ]
+        ret_val = True
+    except ValueError:
+        ret_val = False
+    return ret_val
+
+
+if __name__ == "__main__":
+    driver_util.drive_test(find_test_driver())
diff --git a/scripts/galaxy-main b/scripts/galaxy-main
new file mode 100755
index 0000000..7523c54
--- /dev/null
+++ b/scripts/galaxy-main
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+""" Entry point for starting Galaxy without starting as part of a web server.
+
+Example Usage: Start a job/workflow handler without a web server and with
+a given name using.
+
+galaxy-main --server-name handler0
+
+Start as a daemon with (requires daemonized - install with 'pip install daemonize'):
+
+galaxy-main -d --daemon-log-file=handler0-daemon.log --pid-file handler0.pid --server-name handler0
+
+In daemon mode logging of Galaxy (as opposed to this script) is configured via
+a loggers section in Galaxy's ini file - this can be overridden with sensible
+defaults logging to a single file with the following:
+
+galaxy-main -d --server-name handler0 --daemon-log-file=handler0-daemon.log --pid-file handler0.pid --log-file handler0.log
+"""
+import functools
+import logging
+import os
+import sys
+import time
+from logging.config import fileConfig
+
+try:
+    import ConfigParser as configparser
+except ImportError:
+    import configparser
+
+try:
+    from daemonize import Daemonize
+except ImportError:
+    Daemonize = None
+
+# Vaguely Python 2.6 compatibile ArgumentParser import
+try:
+    from argparse import ArgumentParser
+except ImportError:
+    from optparse import OptionParser
+
+    class ArgumentParser(OptionParser):
+
+        def __init__(self, **kwargs):
+            self.delegate = OptionParser(**kwargs)
+
+        def add_argument(self, *args, **kwargs):
+            if "required" in kwargs:
+                del kwargs["required"]
+            return self.delegate.add_option(*args, **kwargs)
+
+        def parse_args(self, args=None):
+            (options, args) = self.delegate.parse_args(args)
+            return options
+
+REQUIRES_DAEMONIZE_MESSAGE = "Attempted to use Galaxy in daemon mode, but daemonize is unavailable."
+
+log = logging.getLogger(__name__)
+
+real_file = os.path.realpath(__file__)
+GALAXY_ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(real_file), os.pardir))
+GALAXY_LIB_DIR = os.path.join(GALAXY_ROOT_DIR, "lib")
+DEFAULT_INI_APP = "main"
+DEFAULT_INIS = ["config/galaxy.ini", "universe_wsgi.ini", "config/galaxy.ini.sample"]
+
+DEFAULT_PID = "galaxy.pid"
+DEFAULT_VERBOSE = True
+DESCRIPTION = "Daemonized entry point for Galaxy."
+
+
+def load_galaxy_app(
+    config_builder,
+    config_env=False,
+    log=None,
+    **kwds
+):
+    # Allow specification of log so daemon can reuse properly configured one.
+    if log is None:
+        log = logging.getLogger(__name__)
+
+    # If called in daemon mode, set the ROOT directory and ensure Galaxy is on
+    # sys.path.
+    if config_env:
+        try:
+            os.chdir(GALAXY_ROOT_DIR)
+        except Exception:
+            log.exception("Failed to chdir")
+            raise
+        try:
+            sys.path.insert(1, GALAXY_LIB_DIR)
+        except Exception:
+            log.exception("Failed to add Galaxy to sys.path")
+            raise
+
+    config_builder.setup_logging()
+    from galaxy.util.properties import load_app_properties
+    kwds = config_builder.app_kwds()
+    kwds = load_app_properties(**kwds)
+    from galaxy.app import UniverseApplication
+    app = UniverseApplication(
+        global_conf={"__file__": config_builder.ini_path},
+        **kwds
+    )
+    app.control_worker.bind_and_start()
+    return app
+
+
+def app_loop(args, log):
+    try:
+        config_builder = GalaxyConfigBuilder(args)
+        galaxy_app = load_galaxy_app(
+            config_builder,
+            config_env=True,
+            log=log,
+        )
+    except BaseException:
+        log.exception("Failed to initialize Galaxy application")
+        raise
+    sleep = True
+    while sleep:
+        try:
+            time.sleep(5)
+        except KeyboardInterrupt:
+            sleep = False
+        except SystemExit:
+            sleep = False
+        except Exception:
+            pass
+    try:
+        galaxy_app.shutdown()
+    except Exception:
+        log.exception("Failed to shutdown Galaxy application")
+        raise
+
+
+def absolute_config_path(path, galaxy_root):
+    if path and not os.path.isabs(path):
+        path = os.path.join(galaxy_root, path)
+    return path
+
+
+def find_ini(supplied_ini, galaxy_root):
+    if supplied_ini:
+        return supplied_ini
+
+    # If not explicitly supplied an ini, check server.ini and then
+    # just restort to sample if that has not been configured.
+    for guess in DEFAULT_INIS:
+        ini_path = os.path.join(galaxy_root, guess)
+        if os.path.exists(ini_path):
+            return ini_path
+
+    return guess
+
+
+class GalaxyConfigBuilder(object):
+    """ Generate paste-like configuration from supplied command-line arguments.
+    """
+
+    def __init__(self, args=None, **kwds):
+        ini_path = kwds.get("ini_path", None) or (args and args.ini_path)
+        # If given app_conf_path - use that - else we need to ensure we have an
+        # ini path.
+        if not ini_path:
+            galaxy_root = kwds.get("galaxy_root", GALAXY_ROOT_DIR)
+            ini_path = find_ini(ini_path, galaxy_root)
+            ini_path = absolute_config_path(ini_path, galaxy_root=galaxy_root)
+        self.ini_path = ini_path
+        self.app_name = kwds.get("app") or (args and args.app) or DEFAULT_INI_APP
+        self.log_file = (args and args.log_file)
+
+    @classmethod
+    def populate_options(cls, arg_parser):
+        arg_parser.add_argument("-c", "--ini-path", default=None, help="Galaxy ini config file (defaults to config/galaxy.ini)")
+        arg_parser.add_argument("--app", default=DEFAULT_INI_APP, help="app section in ini file (defaults to main)")
+        arg_parser.add_argument("-d", "--daemonize", default=False, help="Daemonzie process", action="store_true")
+        arg_parser.add_argument("--daemon-log-file", default=None, help="log file for daemon script ")
+        arg_parser.add_argument("--log-file", default=None, help="Galaxy log file (overrides log configuration in ini_path if set)")
+        arg_parser.add_argument("--pid-file", default=DEFAULT_PID, help="pid file (default is %s)" % DEFAULT_PID)
+        arg_parser.add_argument("--server-name", default=None, help="set a galaxy server name")
+
+    def app_kwds(self):
+        config = dict(
+            ini_file=self.ini_path,
+            ini_section="app:%s" % self.app_name,
+        )
+        return config
+
+    def setup_logging(self):
+        # Galaxy will attempt to setup logging if loggers is not present in
+        # ini config file - this handles that loggers block however if present
+        # (the way paste normally would)
+        if not self.ini_path:
+            return
+        raw_config = configparser.ConfigParser()
+        raw_config.read([self.ini_path])
+        if raw_config.has_section('loggers'):
+            config_file = os.path.abspath(self.ini_path)
+            fileConfig(
+                config_file,
+                dict(__file__=config_file, here=os.path.dirname(config_file))
+            )
+
+
+def main():
+    arg_parser = ArgumentParser(description=DESCRIPTION)
+    GalaxyConfigBuilder.populate_options(arg_parser)
+    args = arg_parser.parse_args()
+    if args.log_file:
+        os.environ["GALAXY_CONFIG_LOG_DESTINATION"] = os.path.abspath(args.log_file)
+    if args.server_name:
+        os.environ["GALAXY_CONFIG_SERVER_NAME"] = args.server_name
+    pid_file = args.pid_file
+
+    log.setLevel(logging.DEBUG)
+    log.propagate = False
+    if args.daemonize:
+        if Daemonize is None:
+            raise ImportError(REQUIRES_DAEMONIZE_MESSAGE)
+
+        keep_fds = []
+        if args.daemon_log_file:
+            fh = logging.FileHandler(args.daemon_log_file, "w")
+            fh.setLevel(logging.DEBUG)
+            log.addHandler(fh)
+            keep_fds.append(fh.stream.fileno())
+        else:
+            fh = logging.StreamHandler(sys.stderr)
+            fh.setLevel(logging.DEBUG)
+            log.addHandler(fh)
+
+        daemon = Daemonize(
+            app="galaxy",
+            pid=pid_file,
+            action=functools.partial(app_loop, args, log),
+            verbose=DEFAULT_VERBOSE,
+            logger=log,
+            keep_fds=keep_fds,
+        )
+        daemon.start()
+    else:
+        app_loop(args, log)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/get_platforms.py b/scripts/get_platforms.py
new file mode 100755
index 0000000..8e9b1dd
--- /dev/null
+++ b/scripts/get_platforms.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+lib = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, "lib" ) )
+sys.path.insert( 1, lib )
+
+import pkg_resources
+print pkg_resources.get_platform()
diff --git a/scripts/grt.py b/scripts/grt.py
new file mode 100644
index 0000000..a87ea5e
--- /dev/null
+++ b/scripts/grt.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python
+"""Script for uploading Galaxy statistics to the Galactic radio telescope.
+
+See doc/source/admin/grt.rst for more detailed usage information.
+"""
+from __future__ import print_function
+
+import os
+import sys
+import json
+import urllib2
+import argparse
+import sqlalchemy as sa
+import yaml
+import re
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.util.properties import load_app_properties
+import galaxy.config
+from galaxy.objectstore import build_object_store_from_config
+from galaxy.model import mapping
+
+sample_config = os.path.abspath(os.path.join(os.path.dirname(__file__), 'grt.yml.sample'))
+default_config = os.path.abspath(os.path.join(os.path.dirname(__file__), 'grt.yml'))
+
+
+def _init(config):
+    if config.startswith('/'):
+        config = os.path.abspath(config)
+    else:
+        config = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, config))
+
+    properties = load_app_properties(ini_file=config)
+    config = galaxy.config.Configuration(**properties)
+    object_store = build_object_store_from_config(config)
+
+    return (
+        mapping.init(
+            config.file_path,
+            config.database_connection,
+            create_tables=False,
+            object_store=object_store
+        ),
+        object_store,
+        config.database_connection.split(':')[0]
+    )
+
+
+def _sanitize_dict(unsanitized_dict):
+    sanitized_dict = dict()
+
+    for key in unsanitized_dict:
+        if key == 'values' and type(unsanitized_dict[key]) is list:
+            sanitized_dict[key] = None
+        else:
+            sanitized_dict[key] = _sanitize_value(unsanitized_dict[key])
+
+        if sanitized_dict[key] is None:
+            del sanitized_dict[key]
+
+    if len(sanitized_dict) == 0:
+        return None
+    else:
+        return sanitized_dict
+
+
+def _sanitize_list(unsanitized_list):
+    sanitized_list = list()
+
+    for key in range(len(unsanitized_list)):
+        sanitized_value = _sanitize_value(unsanitized_list[key])
+        if not None:
+            sanitized_list.append(sanitized_value)
+
+    if len(sanitized_list) == 0:
+        return None
+    else:
+        return sanitized_list
+
+
+def _sanitize_value(unsanitized_value):
+    sanitized_value = None
+
+    fp_regex = re.compile('^(\/[^\/]+)+$')
+
+    if type(unsanitized_value) is dict:
+        sanitized_value = _sanitize_dict(unsanitized_value)
+    elif type(unsanitized_value) is list:
+        sanitized_value = _sanitize_list(unsanitized_value)
+    else:
+        if fp_regex.match(str(unsanitized_value)):
+            sanitized_value = None
+        else:
+            sanitized_value = unsanitized_value
+
+    return sanitized_value
+
+
+def main(argv):
+    """Entry point for GRT statistics collection."""
+    parser = argparse.ArgumentParser()
+    parser.add_argument('instance_id', help='Galactic Radio Telescope Instance ID')
+    parser.add_argument('api_key', help='Galactic Radio Telescope API Key')
+
+    parser.add_argument('-c', '--config', dest='config', help='Path to GRT config file (scripts/grt.ini)', default=default_config)
+    parser.add_argument('--dry-run', dest='dryrun', help='Dry run (show data to be sent, but do not send)', action='store_true', default=False)
+    parser.add_argument('--grt-url', dest='grt_url', help='GRT Server (You can run your own!)')
+    args = parser.parse_args(argv[1:])
+
+    print('Loading GRT ini...')
+    try:
+        with open(args.config) as f:
+            config_dict = yaml.load(f)
+    except Exception:
+        with open(sample_config) as f:
+            config_dict = yaml.load(f)
+
+    # set to 0 by default
+    if 'last_job_id_sent' not in config_dict:
+        config_dict['last_job_id_sent'] = 0
+
+    if args.instance_id:
+        config_dict['instance_id'] = args.instance_id
+    if args.api_key:
+        config_dict['api_key'] = args.api_key
+    if args.grt_url:
+        config_dict['grt_url'] = args.grt_url
+
+    print('Loading Galaxy...')
+    model, object_store, engine = _init(config_dict['galaxy_config'])
+    sa_session = model.context.current
+
+    # Fetch jobs COMPLETED with status OK that have not yet been sent.
+    jobs = sa_session.query(model.Job)\
+        .filter(sa.and_(
+            model.Job.table.c.state == "ok",
+            model.Job.table.c.id > config_dict['last_job_id_sent']
+        ))\
+        .all()
+
+    # Set up our arrays
+    active_users = []
+    grt_tool_data = []
+    grt_jobs_data = []
+
+    def kw_metrics(job):
+        return {
+            '%s_%s' % (metric.plugin, metric.metric_name): metric.metric_value
+            for metric in job.metrics
+        }
+
+    # For every job
+    for job in jobs:
+        if job.tool_id in config_dict['tool_blacklist']:
+            continue
+
+        # Append an active user, we'll reduce at the end
+        active_users.append(job.user_id)
+
+        # Find the tool in our normalized tool table.
+        if (job.tool_id, job.tool_version) not in grt_tool_data:
+            grt_tool_idx = len(grt_tool_data)
+            grt_tool_data.append((job.tool_id, job.tool_version))
+        else:
+            grt_tool_idx = grt_tool_data.index((job.tool_id, job.tool_version))
+
+        metrics = kw_metrics(job)
+
+        wanted_metrics = ('core_galaxy_slots', 'core_runtime_seconds')
+
+        grt_metrics = {
+            k: int(metrics.get(k, 0))
+            for k in wanted_metrics
+        }
+
+        params = job.raw_param_dict()
+        for key in params:
+            params[key] = json.loads(params[key])
+
+        job_data = {
+            'tool': grt_tool_idx,
+            'date': job.update_time.strftime('%s'),
+            'metrics': grt_metrics,
+            'params': _sanitize_dict(params)
+        }
+        grt_jobs_data.append(job_data)
+
+    if len(jobs) > 0:
+        config_dict['last_job_id_sent'] = jobs[-1].id
+
+    grt_report_data = {
+        'meta': {
+            'version': 1,
+            'instance_uuid': config_dict['instance_id'],
+            'instance_api_key': config_dict['api_key'],
+            # We do not record ANYTHING about your users other than count.
+            'active_users': len(set(active_users)),
+            'total_users': sa_session.query(model.User).count(),
+            'recent_jobs': len(jobs),
+        },
+        'tools': [
+            {
+                'tool_id': a,
+                'tool_version': b,
+            }
+            for (a, b) in grt_tool_data
+        ],
+        'jobs': grt_jobs_data,
+    }
+
+    if args.dryrun:
+        print(json.dumps(grt_report_data, indent=2))
+    else:
+        try:
+            urllib2.urlopen(config_dict['grt_url'], data=json.dumps(grt_report_data))
+        except urllib2.HTTPError as htpe:
+            print(htpe.read())
+            exit(1)
+
+        # Update grt.ini with last id of job (prevent duplicates from being sent)
+        with open(args.config, 'w') as f:
+            yaml.dump(config_dict, f, default_flow_style=False)
+
+
+if __name__ == '__main__':
+    main(sys.argv)
diff --git a/scripts/grt.yml.sample b/scripts/grt.yml.sample
new file mode 100644
index 0000000..d837681
--- /dev/null
+++ b/scripts/grt.yml.sample
@@ -0,0 +1,7 @@
+galaxy_config: config/galaxy.ini
+#instance_id: blah
+#api_key: blah
+grt_url: https://radio-telescope.galaxyproject.org/api/v1/upload
+tool_blacklist:
+  - __SET_METADATA__
+  - upload1
diff --git a/scripts/helper.py b/scripts/helper.py
new file mode 100644
index 0000000..42402c2
--- /dev/null
+++ b/scripts/helper.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+A command line helper for common operations performed by Galaxy maintainers.
+Encodes and decodes IDs, returns Dataset IDs if provided an HDA or LDDA id,
+returns the disk path of a dataset.
+"""
+import os
+import sys
+from ConfigParser import ConfigParser
+from optparse import OptionParser
+
+sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, 'lib' ) )
+
+from galaxy.model import mapping
+from galaxy.web import security
+
+default_config = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, 'config/galaxy.ini') )
+
+parser = OptionParser()
+parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (config/galaxy.ini)', default=default_config )
+parser.add_option( '-e', '--encode-id', dest='encode_id', help='Encode an ID' )
+parser.add_option( '-d', '--decode-id', dest='decode_id', help='Decode an ID' )
+parser.add_option( '--hda', dest='hda_id', help='Display HistoryDatasetAssociation info' )
+parser.add_option( '--ldda', dest='ldda_id', help='Display LibraryDatasetDatasetAssociation info' )
+( options, args ) = parser.parse_args()
+
+try:
+    assert options.encode_id or options.decode_id or options.hda_id or options.ldda_id
+except:
+    parser.print_help()
+    sys.exit( 1 )
+
+options.config = os.path.abspath( options.config )
+
+config = ConfigParser( dict( file_path='database/files',
+                             id_secret='USING THE DEFAULT IS NOT SECURE!',
+                             database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) )
+config.read( options.config )
+
+helper = security.SecurityHelper( id_secret=config.get( 'app:main', 'id_secret' ) )
+model = mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables=False )
+
+if options.encode_id:
+    print 'Encoded "%s": %s' % ( options.encode_id, helper.encode_id( options.encode_id ) )
+
+if options.decode_id:
+    print 'Decoded "%s": %s' % ( options.decode_id, helper.decode_id( options.decode_id ) )
+
+if options.hda_id:
+    try:
+        hda_id = int( options.hda_id )
+    except:
+        hda_id = int( helper.decode_id( options.hda_id ) )
+    hda = model.context.current.query( model.HistoryDatasetAssociation ).get( hda_id )
+    print 'HDA "%s" is Dataset "%s" at: %s' % ( hda.id, hda.dataset.id, hda.file_name )
+
+if options.ldda_id:
+    try:
+        ldda_id = int( options.ldda_id )
+    except:
+        ldda_id = int( helper.decode_id( options.ldda_id ) )
+    ldda = model.context.current.query( model.HistoryDatasetAssociation ).get( ldda_id )
+    print 'LDDA "%s" is Dataset "%s" at: %s' % ( ldda.id, ldda.dataset.id, ldda.file_name )
diff --git a/scripts/loc_files/create_all_fasta_loc.py b/scripts/loc_files/create_all_fasta_loc.py
new file mode 100644
index 0000000..946499c
--- /dev/null
+++ b/scripts/loc_files/create_all_fasta_loc.py
@@ -0,0 +1,303 @@
+"""
+Generates a loc file containing names of all the fasta files that match the
+name of the genome subdirectory they're in.
+Assumptions:
+    - fasta files should be named the same as the genome subdirectory they're
+      in, with the possible addition of a recognized variant (canon, full, etc.)
+    - for "variants" (like full, canon[ical], chrM, etc.) the naming needs to be
+      consistent and specific:
+        - <genome_name><variant>, like hg19canon, hg19full, or hg19chrM
+Normal usage:
+create_all_fasta_loc.py -f unmatching_fasta.txt -i seq
+
+usage: %prog [options]
+   -d, --data-table-xml=d: The name of the data table configuration file to get format of loc file
+   -t, --data-table=t: The name of the data table listed in the data table XML file
+   -g, --genome-dir=g: Genome directory to look in
+   -e, --exemptions=e: Comma-separated list of genome dir subdirectories to not look in
+   -i, --inspect-dirs=i: Comma-separated list of subdirectories inside genome dirs to look in (default is all)
+   -x, --fasta-exts=x: Comma-separated list of all fasta extensions to list
+   -s, --loc-sample=s: The name of the sample loc file (to copy text into top of output loc file)
+   -f, --unmatching-fasta=f: Name of file to output non-matching fasta files to, if included
+   -v, --variants=v: Comma-separated list of recognized variants of fasta file names
+   -a, --append=a: Append to existing all_fasta.loc file rather than create new
+   -p, --sample-text=p: Copy over text from all_fasta.loc.sample file (false if set to append)
+"""
+import optparse
+import os
+import sys
+from xml.etree.ElementTree import parse
+
+DEFAULT_TOOL_DATA_TABLE_CONF = 'tool_data_table_conf.xml'
+DEFAULT_ALL_FASTA_LOC_BASE = 'all_fasta'
+DEFAULT_BASE_GENOME_DIR = '/afs/bx.psu.edu/depot/data/genome'
+EXEMPTIONS = 'bin,tmp,lengths,equCab2_chrM,microbes'
+INSPECT_DIR = None
+FASTA_EXTS = '.fa,.fasta,.fna'
+VARIANTS = 'chrM,chr21,full,canon,female,male,haps,nohaps'
+
+VARIANT_EXCLUSIONS = ':full'
+
+DBKEY_DESCRIPTION_MAP = { 'AaegL1': 'Mosquito (Aedes aegypti): AaegL1',
+                          'AgamP3': 'Mosquito (Anopheles gambiae): AgamP3',
+                          'anoCar1': 'Lizard (Anolis carolinensis): anoCar1',
+                          'anoGam1': 'Mosquito (Anopheles gambiae): anoGam1',
+                          'apiMel1': 'Honeybee (Apis mellifera): apiMel1',
+                          'apiMel2': 'Honeybee (Apis mellifera): apiMel2',
+                          'apiMel3': 'Honeybee (Apis mellifera): apiMel3',
+                          'Arabidopsis_thaliana_TAIR9': '',
+                          'borEut13': 'Boreoeutherian: borEut13',
+                          'bosTau2': 'Cow (Bos taurus): bosTau2',
+                          'bosTau3': 'Cow (Bos taurus): bosTau3',
+                          'bosTau4': 'Cow (Bos taurus): bosTau4',
+                          'bosTauMd3': 'Cow (Bos taurus): bosTauMd3',
+                          'calJac1': 'Marmoset (Callithrix jacchus): calJac1',
+                          'canFam1': 'Dog (Canis lupus familiaris): canFam1',
+                          'canFam2': 'Dog (Canis lupus familiaris): canFam2',
+                          'cavPor3': 'Guinea Pig (Cavia porcellus): cavPor3',
+                          'ce2': 'Caenorhabditis elegans: ce2',
+                          'ce4': 'Caenorhabditis elegans: ce4',
+                          'ce5': 'Caenorhabditis elegans: ce5',
+                          'ce6': 'Caenorhabditis elegans: ce6',
+                          'CpipJ1': 'Mosquito (Culex quinquefasciatus): CpipJ1',
+                          'danRer2': 'Zebrafish (Danio rerio): danRer2',
+                          'danRer3': 'Zebrafish (Danio rerio): danRer3',
+                          'danRer4': 'Zebrafish (Danio rerio): danRer4',
+                          'danRer5': 'Zebrafish (Danio rerio): danRer5',
+                          'danRer6': 'Zebrafish (Danio rerio): danRer6',
+                          'dm1': 'Fruit Fly (Drosophila melanogaster): dm1',
+                          'dm2': 'Fruit Fly (Drosophila melanogaster): dm2',
+                          'dm3': 'Fruit Fly (Drosophila melanogaster): dm3',
+                          'dm4': 'Fruit Fly (Drosophila melanogaster): dm',
+                          'dp3': 'Fruit Fly (Drosophila pseudoobscura): dp3',
+                          'dp4': 'Fruit Fly (Drosophila pseudoobscura): dp4',
+                          'droAna1': 'Fruit Fly (Drosophila ananassae): droAna1',
+                          'droAna2': 'Fruit Fly (Drosophila ananassae): droAna2',
+                          'droAna3': 'Fruit Fly (Drosophila ananassae): droAna3',
+                          'droEre1': 'Fruit Fly (Drosophila erecta): droEre1',
+                          'droEre2': 'Fruit Fly (Drosophila erecta): droEre2',
+                          'droGri1': 'Fruit Fly (Drosophila grimshawi): droGri1',
+                          'droGri2': 'Fruit Fly (Drosophila grimshawi): droGri2',
+                          'droMoj1': 'Fruit Fly (Drosophila mojavensis): droMoj1',
+                          'droMoj2': 'Fruit Fly (Drosophila mojavensis): droMoj2',
+                          'droMoj3': 'Fruit Fly (Drosophila mojavensis): droMoj3',
+                          'droPer1': 'Fruit Fly (Drosophila persimilis): droPer1',
+                          'droSec1': 'Fruit Fly (Drosophila sechellia): droSec1',
+                          'droSim1': 'Fruit Fly (Drosophila simulans): droSim1',
+                          'droVir1': 'Fruit Fly (Drosophila virilis): droVir1',
+                          'droVir2': 'Fruit Fly (Drosophila virilis): droVir2',
+                          'droVir3': 'Fruit Fly (Drosophila virilis): droVir3',
+                          'droYak1': 'Fruit Fly (Drosophila yakuba): droYak1',
+                          'droYak2': 'Fruit Fly (Drosophila yakuba): droYak2',
+                          'echTel1': 'Tenrec (Echinops telfairi): echTel1',
+                          'equCab1': 'Horse (Equus caballus): equCab1',
+                          'equCab2': 'Horse (Equus caballus): equCab2',
+                          'eriEur1': 'Hedgehog (Erinaceus europaeus): eriEur1',
+                          'felCat3': 'Cat (Felis catus): felCat3',
+                          'fr1': 'Fugu (Takifugu rubripes): fr1',
+                          'fr2': 'Fugu (Takifugu rubripes): fr2',
+                          'galGal2': 'Chicken (Gallus gallus): galGal2',
+                          'galGal3': 'Chicken (Gallus gallus): galGal3',
+                          'gasAcu1': 'Stickleback (Gasterosteus aculeatus): gasAcu1',
+                          'hg16': 'Human (Homo sapiens): hg16',
+                          'hg17': 'Human (Homo sapiens): hg17',
+                          'hg18': 'Human (Homo sapiens): hg18',
+                          'hg19': 'Human (Homo sapiens): hg19',
+                          'IscaW1': 'Deer Tick (Ixodes scapularis): IscaW1',
+                          'lMaj5': 'Leishmania major: lMaj5',
+                          'mm5': 'Mouse (Mus musculus): mm5',
+                          'mm6': 'Mouse (Mus musculus): mm6',
+                          'mm7': 'Mouse (Mus musculus): mm7',
+                          'mm8': 'Mouse (Mus musculus): mm8',
+                          'mm9': 'Mouse (Mus musculus): mm9',
+                          'monDom4': 'Opossum (Monodelphis domestica): monDom4',
+                          'monDom5': 'Opossum (Monodelphis domestica): monDom5',
+                          'ornAna1': 'Platypus (Ornithorhynchus anatinus): ornAna1',
+                          'oryCun1': 'Rabbit (Oryctolagus cuniculus): oryCun1',
+                          'oryLat1': 'Medaka (Oryzias latipes): oryLat1',
+                          'oryLat2': 'Medaka (Oryzias latipes): oryLat2',
+                          'oryza_sativa_japonica_nipponbare_IRGSP4.0': 'Rice (Oryza sativa L. ssp. japonica var. Nipponbare): IRGSP4.0',
+                          'otoGar1': 'Bushbaby (Otolemur garnetti): otoGar1',
+                          'panTro1': 'Chimpanzee (Pan troglodytes): panTro1',
+                          'panTro2': 'Chimpanzee (Pan troglodytes): panTro2',
+                          'petMar1': 'Lamprey (Petromyzon marinus): petMar1',
+                          'phiX': 'phiX174 (AF176034)',
+                          'PhumU1': 'Head Louse (Pediculus humanus): PhumU1',
+                          'ponAbe2': 'Orangutan (Pongo pygmaeus abelii): ponAbe2',
+                          'pUC18': 'pUC18 (L09136)',
+                          'rheMac2': 'Rhesus Macaque (Macaca mulatta): rheMac2',
+                          'rn3': 'Rat (Rattus norvegicus): rn3',
+                          'rn4': 'Rat (Rattus norvegicus): rn4',
+                          'sacCer1': 'Yeast (Saccharomyces cerevisiae): sacCer1',
+                          'sacCer2': 'Yeast (Saccharomyces cerevisiae): sacCer2',
+                          'sorAra1': 'Common Shrew (Sorex araneus): sorAra1',
+                          'Sscrofa9.58': 'Pig (Sus scrofa): Sscrofa9.58',
+                          'strPur2': 'Purple Sea Urchin (Strongylocentrotus purpuratus): strPur2',
+                          'susScr2': 'Pig (Sus scrofa): susScr2',
+                          'taeGut1': 'Zebra Finch (Taeniopygia guttata): taeGut1',
+                          'tetNig1': 'Tetraodon (Tetraodon nigroviridis): tetNig1',
+                          'tetNig2': 'Tetraodon (Tetraodon nigroviridis): tetNig2',
+                          'tupBel1': 'Tree Shrew (Tupaia belangeri): tupBel1',
+                          'venter1': 'Human (J. Craig Venter): venter1',
+                          'xenTro2': 'Frog (Xenopus tropicalis): xenTro2' }
+
+VARIANT_MAP = { 'canon': 'Canonical',
+               'full': 'Full',
+               'female': 'Female',
+               'male': 'Male' }
+
+
+# alphabetize ignoring case
+def caseless_compare( a, b ):
+    au = a.upper()
+    bu = b.upper()
+    if au > bu:
+        return 1
+    elif au == bu:
+        return 0
+    elif au < bu:
+        return -1
+
+
+def __main__():
+    # command line variables
+    parser = optparse.OptionParser()
+    parser.add_option( '-d', '--data-table-xml', dest='data_table_xml', type='string', default=DEFAULT_TOOL_DATA_TABLE_CONF, help='The name of the data table configuration file to get format of loc file' )
+    parser.add_option( '-t', '--data-table', dest='data_table_name', type='string', default=DEFAULT_ALL_FASTA_LOC_BASE, help='The name of the data table listed in the data table XML file' )
+    parser.add_option( '-g', '--genome_dir', dest='genome_dir', type='string', default=DEFAULT_BASE_GENOME_DIR, help='Genome directory to look in' )
+    parser.add_option( '-e', '--exemptions', dest='exemptions', type='string', default=EXEMPTIONS, help='Comma-separated list of subdirectories in genome dir to not look in' )
+    parser.add_option( '-i', '--inspect-dir', dest='inspect_dir', type='string', default=INSPECT_DIR, help='Comma-separated list of subdirectories inside genome dirs to look in (default is all)' )
+    parser.add_option( '-x', '--fasta_exts', dest='fasta_exts', type='string', default=FASTA_EXTS, help='Comma-separated list of all fasta extensions to list' )
+    parser.add_option( '-s', '--loc-sample', dest='loc_sample_name', type='string', help='The name of the sample loc file (to copy text into top of output loc file)' )
+    parser.add_option( '-f', '--unmatching-fasta', dest='unmatching_fasta', type='string', default=None, help='Name of file to output non-matching fasta files to' )
+    parser.add_option( '-v', '--variants', dest='variants', type='string', default=VARIANTS, help='Comma-separated list of recognized variants of fasta file names' )
+    parser.add_option( '-n', '--variant-exclusions', dest='variant_exclusions', type='string', default=VARIANT_EXCLUSIONS, help="List of files to exclude because they're duplicated by a variants; of the format: '<variant_to_keep_1>:<variant_to_remove_1>[,<variant_to_remove_2>[,...]][;<variant_to_keep_2>:<variant_to_remove_1>[,<variant_to_remove_2>[,...]]]'; default ':(full)' (if non-variant version present (like 'hg19'), full version (like 'hg19full') will be thrown out)" )
+    parser.add_option( '-a', '--append', dest='append', action='store_true', default=False, help='Append to existing all_fasta.loc file rather than create new' )
+    parser.add_option( '-p', '--sample-text', dest='sample_text', action='store_true', default='True', help='Copy over text from all_fasta.loc.sample file (false if set to append)' )
+    (options, args) = parser.parse_args()
+
+    exemptions = [ e.strip() for e in options.exemptions.split( ',' ) ]
+    fasta_exts = [ x.strip() for x in options.fasta_exts.split( ',' ) ]
+    variants = [ v.strip() for v in options.variants.split( ',' ) ]
+    variant_exclusions = {}
+    try:
+        for ve in options.variant_exclusions.split( ';' ):
+            v, e = ve.split( ':' )
+            variant_exclusions[ v ] = e.split( ',' )
+    except:
+        sys.stderr.write( 'Problem parsing the variant exclusion parameter (-n/--variant-exclusion). Make sure it follows the expected format\n' )
+        sys.exit( 1 )
+    if options.append:
+        sample_text = False
+    else:
+        sample_text = options.sample_text
+
+    # all paths to look in
+    if options.inspect_dir:
+        paths_to_look_in = [ os.path.join( options.genome_dir, '%s', id ) for id in options.inspect_dir.split( ',' ) ]
+    else:
+        paths_to_look_in = [os.path.join( options.genome_dir, '%s' )]
+
+    # say what we're looking in
+    print '\nLooking in:\n\t%s' % '\n\t'.join( [ p % '<build_name>' for p in paths_to_look_in ] )
+    poss_names = [ '<build_name>%s' % _ for _ in variants ]
+    print 'for files that are named %s' % ', '.join( poss_names[:-1] ),
+    if len( poss_names ) > 1:
+        print 'or %s' % poss_names[-1],
+    if len( options.fasta_exts ) == 1:
+        print 'with the extension %s.' % ', '.join( fasta_exts[:-1] )
+    else:
+        print 'with the extension %s or %s.' % ( ', '.join( fasta_exts[:-1] ), fasta_exts[-1] )
+    print '\nSkipping the following:\n\t%s' % '\n\t'.join( exemptions )
+
+    # get column names
+    col_values = []
+    loc_path = None
+    tree = parse( options.data_table_xml )
+    tables = tree.getroot()
+    for table in tables.getiterator():
+        name = table.attrib.get( 'name' )
+        if name == options.data_table_name:
+            cols = None
+            for node in table.getiterator():
+                if node.tag == 'columns':
+                    cols = node.text
+                elif node.tag == 'file':
+                    loc_path = node.attrib.get( 'path' )
+            if cols:
+                col_values = [ col.strip() for col in cols.split( ',' ) ]
+    if not col_values or not loc_path:
+        raise Exception( 'No columns can be found for this data table (%s) in %s' % ( options.data_table, options.data_table_xml ) )
+
+    # get all fasta paths under genome directory
+    fasta_locs = {}
+    unmatching_fasta_paths = []
+    genome_subdirs = [ dr for dr in os.listdir( options.genome_dir ) if dr not in exemptions ]
+    for genome_subdir in genome_subdirs:
+        possible_names = [ genome_subdir ]
+        possible_names.extend( [ '%s%s' % ( genome_subdir, _ ) for _ in variants ] )
+        # get paths to all fasta files
+        for path_to_look_in in paths_to_look_in:
+            for dirpath, dirnames, filenames in os.walk( path_to_look_in % genome_subdir ):
+                for fn in filenames:
+                    ext = os.path.splitext( fn )[-1]
+                    fasta_base = os.path.splitext( fn )[0]
+                    if ext in fasta_exts:
+                        if fasta_base in possible_names:
+                            if fasta_base == genome_subdir:
+                                name = DBKEY_DESCRIPTION_MAP[ genome_subdir ]
+                            else:
+                                try:
+                                    name = '%s %s' % ( DBKEY_DESCRIPTION_MAP[ genome_subdir ], VARIANT_MAP[ fasta_base.replace( genome_subdir, '' ) ] )
+                                except KeyError:
+                                    name = '%s %s' % ( DBKEY_DESCRIPTION_MAP[ genome_subdir ], fasta_base.replace( genome_subdir, '' ) )
+                            fasta_locs[ fasta_base ] = { 'value': fasta_base, 'dbkey': genome_subdir, 'name': name, 'path': os.path.join( dirpath, fn ) }
+                        else:
+                            unmatching_fasta_paths.append( os.path.join( dirpath, fn ) )
+        # remove redundant fasta files
+        if variant_exclusions.keys():
+            for k in variant_exclusions.keys():
+                leave_in = '%s%s' % ( genome_subdir, k )
+                if leave_in in fasta_locs:
+                    to_remove = [ '%s%s' % ( genome_subdir, k ) for k in variant_exclusions[ k ] ]
+                    for tr in to_remove:
+                        if tr in fasta_locs:
+                            del fasta_locs[ tr ]
+
+    # output results
+    print '\nThere were %s fasta files found that were not included because they did not have the expected file names.' % len( unmatching_fasta_paths )
+    print '%s fasta files were found and listed.\n' % len( fasta_locs.keys() )
+
+    # output unmatching fasta files
+    if options.unmatching_fasta and unmatching_fasta_paths:
+        open( options.unmatching_fasta, 'wb' ).write( '%s\n' % '\n'.join( unmatching_fasta_paths ) )
+
+    # output loc file
+    if options.append:
+        all_fasta_loc = open( loc_path, 'ab' )
+    else:
+        all_fasta_loc = open( loc_path, 'wb' )
+    # put sample loc file text at top of file if appropriate
+    if sample_text:
+        if options.loc_sample_name:
+            all_fasta_loc.write( '%s\n' % open( options.loc_sample_name, 'rb' ).read().strip() )
+        else:
+            all_fasta_loc.write( '%s\n' % open( '%s.sample' % loc_path, 'rb' ).read().strip() )
+    # output list of fasta files in alphabetical order
+    fasta_bases = fasta_locs.keys()
+    fasta_bases.sort( caseless_compare )
+    for fb in fasta_bases:
+        out_line = []
+        for col in col_values:
+            try:
+                out_line.append( fasta_locs[ fb ][ col ] )
+            except KeyError:
+                raise Exception( 'Unexpected column (%s) encountered' % col )
+        if out_line:
+            all_fasta_loc.write( '%s\n' % '\t'.join( out_line ) )
+    # close up output loc file
+    all_fasta_loc.close()
+
+
+if __name__ == '__main__':
+    __main__()
diff --git a/scripts/manage_db.py b/scripts/manage_db.py
new file mode 100644
index 0000000..7d1bfb1
--- /dev/null
+++ b/scripts/manage_db.py
@@ -0,0 +1,23 @@
+""" This script parses Galaxy or Tool Shed config file for database connection
+and then delegates to sqlalchemy_migrate shell main function in
+migrate.versioning.shell. """
+import os.path
+import sys
+
+from migrate.versioning.shell import main
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.model.orm.scripts import get_config
+
+
+def invoke_migrate_main():
+    config = get_config( sys.argv )
+    db_url = config['db_url']
+    repo = config['repo']
+
+    main( repository=repo, url=db_url )
+
+
+if __name__ == "__main__":
+    invoke_migrate_main()
diff --git a/scripts/manage_tools.py b/scripts/manage_tools.py
new file mode 100644
index 0000000..5d17e41
--- /dev/null
+++ b/scripts/manage_tools.py
@@ -0,0 +1,37 @@
+import logging
+import os.path
+import sys
+from ConfigParser import SafeConfigParser
+
+from migrate.versioning.shell import main
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.model.orm.scripts import read_config_file_arg
+from galaxy.util.properties import load_app_properties
+
+log = logging.getLogger( __name__ )
+
+config_file = read_config_file_arg( sys.argv, 'config/galaxy.ini', 'universe_wsgi.ini' )
+if not os.path.exists( config_file ):
+    print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
+    sys.exit( 1 )
+repo = 'lib/tool_shed/galaxy_install/migrate'
+
+properties = load_app_properties( ini_file=config_file )
+cp = SafeConfigParser()
+cp.read( config_file )
+
+if config_file == 'config/galaxy.ini.sample' and 'GALAXY_TEST_DBURI' in os.environ:
+    # Running functional tests.
+    db_url = os.environ[ 'GALAXY_TEST_DBURI' ]
+elif "install_database_connection" in properties:
+    db_url = properties[ "install_database_connection" ]
+elif "database_connection" in properties:
+    db_url = properties[ "database_connection" ]
+elif "database_file" in properties:
+    db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % properties[ "database_file" ]
+else:
+    db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
+
+main( repository=repo, url=db_url )
diff --git a/scripts/metagenomics/convert_title.py b/scripts/metagenomics/convert_title.py
new file mode 100644
index 0000000..64c0ff8
--- /dev/null
+++ b/scripts/metagenomics/convert_title.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+"""
+convert nt and wgs data (fasta format) to giNumber_seqLen
+run formatdb in the command line: gunzip -c nt.gz |formatdb -i stdin -p F -n "nt.chunk" -v 2000
+"""
+
+import sys
+
+if __name__ == '__main__':
+    seq = []
+    len_seq = 0
+    invalid_lines = 0
+    gi = None
+
+    for i, line in enumerate(sys.stdin):
+        line = line.rstrip('\r\n')
+        if line.startswith('>'):
+            if len_seq > 0:
+                if gi is None:
+                    raise Exception('The first sequence does not have an header.')
+                print ">%s_%d" % (gi, len_seq)
+                print "\n".join(seq)
+            title = line
+            fields = title.split('|')
+            if len(fields) >= 2 and fields[0] == '>gi':
+                gi = fields[1]
+            else:
+                gi = 'giunknown'
+                invalid_lines += 1
+            len_seq = 0
+            seq = []
+        else:
+            seq.append(line)
+            len_seq += len(line)
+    if len_seq > 0:
+        print ">%s_%d" % (gi, len_seq)
+        print "\n".join(seq)
+
+    print >> sys.stderr, "Unable to find gi number for %d sequences, the title is replaced as giunknown" % (invalid_lines)
diff --git a/scripts/metagenomics/process_BLAST_db.sh b/scripts/metagenomics/process_BLAST_db.sh
new file mode 100644
index 0000000..445bab9
--- /dev/null
+++ b/scripts/metagenomics/process_BLAST_db.sh
@@ -0,0 +1,18 @@
+echo "This will take several hours to finish due to the size of the databases (about 30GB)..." 
+echo "Getting nt database from NCBI..."
+wget ftp://ftp.ncbi.nlm.nih.gov/blast/db/FASTA/nt.gz
+echo "Changing fasta title lines to >ginumber_seqlength..."
+echo "Formatting nt database to chunks of 2GB each..."
+gunzip -c nt.gz | python convert_title.py | formatdb -i stdin -p F -n "nt.chunk" -v 2000
+echo "Remove the zip file, keep the formatted files."
+rm nt.gz
+
+echo "Getting wgs database from NCBI..."
+wget ftp://ftp.ncbi.nlm.nih.gov/blast/db/FASTA/wgs.gz
+echo "Changing fasta title lines to >ginumber_seqlength..."
+echo "Formatting wgs database to chunks of 2GB each..."
+gunzip -c wgs.gz | python convert_title.py | formatdb -i stdin -p F -n "wgs.chunk" -v 2000
+echo "Remove the zip file, keep the formatted files."
+rm wgs.gz
+
+echo "Job finished"
diff --git a/scripts/microbes/BeautifulSoup.py b/scripts/microbes/BeautifulSoup.py
new file mode 100644
index 0000000..5ff9204
--- /dev/null
+++ b/scripts/microbes/BeautifulSoup.py
@@ -0,0 +1,1808 @@
+"""Beautiful Soup
+Elixir and Tonic
+"The Screen-Scraper's Friend"
+http://www.crummy.com/software/BeautifulSoup/
+
+Beautiful Soup parses a (possibly invalid) XML or HTML document into a
+tree representation. It provides methods and Pythonic idioms that make
+it easy to navigate, search, and modify the tree.
+
+A well-structured XML/HTML document yields a well-behaved data
+structure. An ill-structured XML/HTML document yields a
+correspondingly ill-behaved data structure. If your document is only
+locally well-structured, you can use this library to find and process
+the well-structured part of it.
+
+Beautiful Soup works with Python 2.2 and up. It has no external
+dependencies, but you'll have more success at converting data to UTF-8
+if you also install these three packages:
+
+* chardet, for auto-detecting character encodings
+  http://chardet.feedparser.org/
+* cjkcodecs and iconv_codec, which add more encodings to the ones supported
+  by stock Python.
+  http://cjkpython.i18n.org/
+
+Beautiful Soup defines classes for two main parsing strategies:
+
+ * BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific
+   language that kind of looks like XML.
+
+ * BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid
+   or invalid. This class has web browser-like heuristics for
+   obtaining a sensible parse tree in the face of common HTML errors.
+
+Beautiful Soup also defines a class (UnicodeDammit) for autodetecting
+the encoding of an HTML or XML document, and converting it to
+Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed
+Parser.
+
+For more than you ever wanted to know about Beautiful Soup, see the
+documentation:
+http://www.crummy.com/software/BeautifulSoup/documentation.html
+"""
+from __future__ import generators
+
+import codecs
+import re
+import string
+import sys
+import types
+import sgmllib
+from htmlentitydefs import name2codepoint
+from sgmllib import SGMLParser, SGMLParseError
+
+__author__ = "Leonard Richardson (crummy.com)"
+__contributors__ = ["Sam Ruby (intertwingly.net)",
+                    "the unwitting Mark Pilgrim (diveintomark.org)",
+                    "http://www.crummy.com/software/BeautifulSoup/AUTHORS.html"]
+__version__ = "3.0.3"
+__copyright__ = "Copyright (c) 2004-2006 Leonard Richardson"
+__license__ = "PSF"
+
+# This RE makes Beautiful Soup able to parse XML with namespaces.
+sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
+
+# This RE makes Beautiful Soup capable of recognizing numeric character
+# references that use hexadecimal.
+sgmllib.charref = re.compile('&#(\d+|x[0-9a-fA-F]+);')
+
+DEFAULT_OUTPUT_ENCODING = "utf-8"
+
+
+# First, the classes that represent markup elements.
+class PageElement:
+    """Contains the navigational information for some part of the page
+    (either a tag or a piece of text)"""
+
+    def setup(self, parent=None, previous=None):
+        """Sets up the initial relations between this element and
+        other elements."""
+        self.parent = parent
+        self.previous = previous
+        self.next = None
+        self.previousSibling = None
+        self.nextSibling = None
+        if self.parent and self.parent.contents:
+            self.previousSibling = self.parent.contents[-1]
+            self.previousSibling.nextSibling = self
+
+    def replaceWith(self, replaceWith):
+        oldParent = self.parent
+        myIndex = self.parent.contents.index(self)
+        if hasattr(replaceWith, 'parent') and replaceWith.parent == self.parent:
+            # We're replacing this element with one of its siblings.
+            index = self.parent.contents.index(replaceWith)
+            if index and index < myIndex:
+                # Furthermore, it comes before this element. That
+                # means that when we extract it, the index of this
+                # element will change.
+                myIndex = myIndex - 1
+        self.extract()
+        oldParent.insert(myIndex, replaceWith)
+
+    def extract(self):
+        """Destructively rips this element out of the tree."""
+        if self.parent:
+            try:
+                self.parent.contents.remove(self)
+            except ValueError:
+                pass
+
+        # Find the two elements that would be next to each other if
+        # this element (and any children) hadn't been parsed. Connect
+        # the two.
+        lastChild = self._lastRecursiveChild()
+        nextElement = lastChild.next
+
+        if self.previous:
+            self.previous.next = nextElement
+        if nextElement:
+            nextElement.previous = self.previous
+        self.previous = None
+        lastChild.next = None
+
+        self.parent = None
+        if self.previousSibling:
+            self.previousSibling.nextSibling = self.nextSibling
+        if self.nextSibling:
+            self.nextSibling.previousSibling = self.previousSibling
+        self.previousSibling = self.nextSibling = None
+
+    def _lastRecursiveChild(self):
+        "Finds the last element beneath this object to be parsed."
+        lastChild = self
+        while hasattr(lastChild, 'contents') and lastChild.contents:
+            lastChild = lastChild.contents[-1]
+        return lastChild
+
+    def insert(self, position, newChild):
+        if (isinstance(newChild, basestring) or
+                isinstance(newChild, unicode)) and \
+                not isinstance(newChild, NavigableString):
+            newChild = NavigableString(newChild)
+
+        position = min(position, len(self.contents))
+        if hasattr(newChild, 'parent') and newChild.parent is not None:
+            # We're 'inserting' an element that's already one
+            # of this object's children.
+            if newChild.parent == self:
+                index = self.find(newChild)
+                if index and index < position:
+                    # Furthermore we're moving it further down the
+                    # list of this object's children. That means that
+                    # when we extract this element, our target index
+                    # will jump down one.
+                    position = position - 1
+            newChild.extract()
+
+        newChild.parent = self
+        previousChild = None
+        if position == 0:
+            newChild.previousSibling = None
+            newChild.previous = self
+        else:
+            previousChild = self.contents[position - 1]
+            newChild.previousSibling = previousChild
+            newChild.previousSibling.nextSibling = newChild
+            newChild.previous = previousChild._lastRecursiveChild()
+        if newChild.previous:
+            newChild.previous.next = newChild
+
+        newChildsLastElement = newChild._lastRecursiveChild()
+
+        if position >= len(self.contents):
+            newChild.nextSibling = None
+
+            parent = self
+            parentsNextSibling = None
+            while not parentsNextSibling:
+                parentsNextSibling = parent.nextSibling
+                parent = parent.parent
+                if not parent:  # This is the last element in the document.
+                    break
+            if parentsNextSibling:
+                newChildsLastElement.next = parentsNextSibling
+            else:
+                newChildsLastElement.next = None
+        else:
+            nextChild = self.contents[position]
+            newChild.nextSibling = nextChild
+            if newChild.nextSibling:
+                newChild.nextSibling.previousSibling = newChild
+            newChildsLastElement.next = nextChild
+
+        if newChildsLastElement.next:
+            newChildsLastElement.next.previous = newChildsLastElement
+        self.contents.insert(position, newChild)
+
+    def findNext(self, name=None, attrs={}, text=None, **kwargs):
+        """Returns the first item that matches the given criteria and
+        appears after this Tag in the document."""
+        return self._findOne(self.findAllNext, name, attrs, text, **kwargs)
+
+    def findAllNext(self, name=None, attrs={}, text=None, limit=None,
+                    **kwargs):
+        """Returns all items that match the given criteria and appear
+        before after Tag in the document."""
+        return self._findAll(name, attrs, text, limit, self.nextGenerator)
+
+    def findNextSibling(self, name=None, attrs={}, text=None, **kwargs):
+        """Returns the closest sibling to this Tag that matches the
+        given criteria and appears after this Tag in the document."""
+        return self._findOne(self.findNextSiblings, name, attrs, text,
+                             **kwargs)
+
+    def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
+                         **kwargs):
+        """Returns the siblings of this Tag that match the given
+        criteria and appear after this Tag in the document."""
+        return self._findAll(name, attrs, text, limit,
+                             self.nextSiblingGenerator, **kwargs)
+    fetchNextSiblings = findNextSiblings  # Compatibility with pre-3.x
+
+    def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
+        """Returns the first item that matches the given criteria and
+        appears before this Tag in the document."""
+        return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs)
+
+    def findAllPrevious(self, name=None, attrs={}, text=None, limit=None,
+                        **kwargs):
+        """Returns all items that match the given criteria and appear
+        before this Tag in the document."""
+        return self._findAll(name, attrs, text, limit, self.previousGenerator,
+                           **kwargs)
+    fetchPrevious = findAllPrevious  # Compatibility with pre-3.x
+
+    def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
+        """Returns the closest sibling to this Tag that matches the
+        given criteria and appears before this Tag in the document."""
+        return self._findOne(self.findPreviousSiblings, name, attrs, text,
+                             **kwargs)
+
+    def findPreviousSiblings(self, name=None, attrs={}, text=None,
+                             limit=None, **kwargs):
+        """Returns the siblings of this Tag that match the given
+        criteria and appear before this Tag in the document."""
+        return self._findAll(name, attrs, text, limit,
+                             self.previousSiblingGenerator, **kwargs)
+    fetchPreviousSiblings = findPreviousSiblings  # Compatibility with pre-3.x
+
+    def findParent(self, name=None, attrs={}, **kwargs):
+        """Returns the closest parent of this Tag that matches the given
+        criteria."""
+        # NOTE: We can't use _findOne because findParents takes a different
+        # set of arguments.
+        r = None
+        l = self.findParents(name, attrs, 1)
+        if l:
+            r = l[0]
+        return r
+
+    def findParents(self, name=None, attrs={}, limit=None, **kwargs):
+        """Returns the parents of this Tag that match the given
+        criteria."""
+
+        return self._findAll(name, attrs, None, limit, self.parentGenerator,
+                             **kwargs)
+    fetchParents = findParents  # Compatibility with pre-3.x
+
+    # These methods do the real heavy lifting.
+
+    def _findOne(self, method, name, attrs, text, **kwargs):
+        r = None
+        l = method(name, attrs, text, 1, **kwargs)
+        if l:
+            r = l[0]
+        return r
+
+    def _findAll(self, name, attrs, text, limit, generator, **kwargs):
+        "Iterates over a generator looking for things that match."
+
+        if isinstance(name, SoupStrainer):
+            strainer = name
+        else:
+            # Build a SoupStrainer
+            strainer = SoupStrainer(name, attrs, text, **kwargs)
+        results = ResultSet(strainer)
+        g = generator()
+        while True:
+            try:
+                i = g.next()
+            except StopIteration:
+                break
+            if i:
+                found = strainer.search(i)
+                if found:
+                    results.append(found)
+                    if limit and len(results) >= limit:
+                        break
+        return results
+
+    # These Generators can be used to navigate starting from both
+    # NavigableStrings and Tags.
+    def nextGenerator(self):
+        i = self
+        while i:
+            i = i.next
+            yield i
+
+    def nextSiblingGenerator(self):
+        i = self
+        while i:
+            i = i.nextSibling
+            yield i
+
+    def previousGenerator(self):
+        i = self
+        while i:
+            i = i.previous
+            yield i
+
+    def previousSiblingGenerator(self):
+        i = self
+        while i:
+            i = i.previousSibling
+            yield i
+
+    def parentGenerator(self):
+        i = self
+        while i:
+            i = i.parent
+            yield i
+
+    # Utility methods
+    def substituteEncoding(self, str, encoding=None):
+        encoding = encoding or "utf-8"
+        return str.replace("%SOUP-ENCODING%", encoding)
+
+    def toEncoding(self, s, encoding=None):
+        """Encodes an object to a string in some encoding, or to Unicode.
+        ."""
+        if isinstance(s, unicode):
+            if encoding:
+                s = s.encode(encoding)
+        elif isinstance(s, str):
+            if encoding:
+                s = s.encode(encoding)
+            else:
+                s = unicode(s)
+        else:
+            if encoding:
+                s = self.toEncoding(str(s), encoding)
+            else:
+                s = unicode(s)
+        return s
+
+
+class NavigableString(unicode, PageElement):
+
+    def __getattr__(self, attr):
+        """text.string gives you text. This is for backwards
+        compatibility for Navigable*String, but for CData* it lets you
+        get the string without the CData wrapper."""
+        if attr == 'string':
+            return self
+        else:
+            raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, attr))
+
+    def __unicode__(self):
+        return self.__str__()
+
+    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        if encoding:
+            return self.encode(encoding)
+        else:
+            return self
+
+
+class CData(NavigableString):
+
+    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
+
+
+class ProcessingInstruction(NavigableString):
+    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        output = self
+        if "%SOUP-ENCODING%" in output:
+            output = self.substituteEncoding(output, encoding)
+        return "<?%s?>" % self.toEncoding(output, encoding)
+
+
+class Comment(NavigableString):
+    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        return "<!--%s-->" % NavigableString.__str__(self, encoding)
+
+
+class Declaration(NavigableString):
+    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        return "<!%s>" % NavigableString.__str__(self, encoding)
+
+
+class Tag(PageElement):
+    """Represents a found HTML tag with its attributes and contents."""
+
+    XML_ENTITIES_TO_CHARS = { 'apos': "'",
+                              "quot": '"',
+                              "amp": "&",
+                              "lt": "<",
+                              "gt": ">"
+                              }
+    # An RE for finding ampersands that aren't the start of of a
+    # numeric entity.
+    BARE_AMPERSAND = re.compile("&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)")
+
+    def __init__(self, parser, name, attrs=None, parent=None,
+                 previous=None):
+        "Basic constructor."
+
+        # We don't actually store the parser object: that lets extracted
+        # chunks be garbage-collected
+        self.parserClass = parser.__class__
+        self.isSelfClosing = parser.isSelfClosingTag(name)
+        self.convertHTMLEntities = parser.convertHTMLEntities
+        self.name = name
+        if attrs is None:
+            attrs = []
+        self.attrs = attrs
+        self.contents = []
+        self.setup(parent, previous)
+        self.hidden = False
+        self.containsSubstitutions = False
+
+    def get(self, key, default=None):
+        """Returns the value of the 'key' attribute for the tag, or
+        the value given for 'default' if it doesn't have that
+        attribute."""
+        return self._getAttrMap().get(key, default)
+
+    def has_key(self, key):
+        return key in self._getAttrMap()
+
+    def __getitem__(self, key):
+        """tag[key] returns the value of the 'key' attribute for the tag,
+        and throws an exception if it's not there."""
+        return self._getAttrMap()[key]
+
+    def __iter__(self):
+        "Iterating over a tag iterates over its contents."
+        return iter(self.contents)
+
+    def __len__(self):
+        "The length of a tag is the length of its list of contents."
+        return len(self.contents)
+
+    def __contains__(self, x):
+        return x in self.contents
+
+    def __nonzero__(self):
+        "A tag is non-None even if it has no contents."
+        return True
+
+    def __setitem__(self, key, value):
+        """Setting tag[key] sets the value of the 'key' attribute for the
+        tag."""
+        self._getAttrMap()
+        self.attrMap[key] = value
+        found = False
+        for i in range(0, len(self.attrs)):
+            if self.attrs[i][0] == key:
+                self.attrs[i] = (key, value)
+                found = True
+        if not found:
+            self.attrs.append((key, value))
+        self._getAttrMap()[key] = value
+
+    def __delitem__(self, key):
+        "Deleting tag[key] deletes all 'key' attributes for the tag."
+        for item in self.attrs:
+            if item[0] == key:
+                self.attrs.remove(item)
+                # We don't break because bad HTML can define the same
+                # attribute multiple times.
+            self._getAttrMap()
+            if key in self.attrMap:
+                del self.attrMap[key]
+
+    def __call__(self, *args, **kwargs):
+        """Calling a tag like a function is the same as calling its
+        findAll() method. Eg. tag('a') returns a list of all the A tags
+        found within this tag."""
+        return apply(self.findAll, args, kwargs)
+
+    def __getattr__(self, tag):
+        if len(tag) > 3 and tag.rfind('Tag') == len(tag) - 3:
+            return self.find(tag[:-3])
+        elif tag.find('__') != 0:
+            return self.find(tag)
+
+    def __eq__(self, other):
+        """Returns true iff this tag has the same name, the same attributes,
+        and the same contents (recursively) as the given tag.
+
+        NOTE: right now this will return false if two tags have the
+        same attributes in a different order. Should this be fixed?"""
+        if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other):
+            return False
+        for i in range(0, len(self.contents)):
+            if self.contents[i] != other.contents[i]:
+                return False
+        return True
+
+    def __ne__(self, other):
+        """Returns true iff this tag is not identical to the other tag,
+        as defined in __eq__."""
+        return not self == other
+
+    def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        """Renders this tag as a string."""
+        return self.__str__(encoding)
+
+    def __unicode__(self):
+        return self.__str__(None)
+
+    def _convertEntities(self, match):
+        x = match.group(1)
+        if x in name2codepoint:
+            return unichr(name2codepoint[x])
+        elif "&" + x + ";" in self.XML_ENTITIES_TO_CHARS:
+            return '&%s;' % x
+        else:
+            return '&%s;' % x
+
+    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
+                prettyPrint=False, indentLevel=0):
+        """Returns a string or Unicode representation of this tag and
+        its contents. To get Unicode, pass None for encoding.
+
+        NOTE: since Python's HTML parser consumes whitespace, this
+        method is not certain to reproduce the whitespace present in
+        the original string."""
+
+        encodedName = self.toEncoding(self.name, encoding)
+
+        attrs = []
+        if self.attrs:
+            for key, val in self.attrs:
+                fmt = '%s="%s"'
+                if isString(val):
+                    if self.containsSubstitutions and '%SOUP-ENCODING%' in val:
+                        val = self.substituteEncoding(val, encoding)
+
+                    # The attribute value either:
+                    #
+                    # * Contains no embedded double quotes or single quotes.
+                    #   No problem: we enclose it in double quotes.
+                    # * Contains embedded single quotes. No problem:
+                    #   double quotes work here too.
+                    # * Contains embedded double quotes. No problem:
+                    #   we enclose it in single quotes.
+                    # * Embeds both single _and_ double quotes. This
+                    #   can't happen naturally, but it can happen if
+                    #   you modify an attribute value after parsing
+                    #   the document. Now we have a bit of a
+                    #   problem. We solve it by enclosing the
+                    #   attribute in single quotes, and escaping any
+                    #   embedded single quotes to XML entities.
+                    if '"' in val:
+                        # This can't happen naturally, but it can happen
+                        # if you modify an attribute value after parsing.
+                        if "'" in val:
+                            val = val.replace('"', """)
+                        else:
+                            fmt = "%s='%s'"
+
+                    # Optionally convert any HTML entities
+                    if self.convertHTMLEntities:
+                        val = re.sub("&(\w+);", self._convertEntities, val)
+
+                    # Now we're okay w/r/t quotes. But the attribute
+                    # value might also contain angle brackets, or
+                    # ampersands that aren't part of entities. We need
+                    # to escape those to XML entities too.
+                    val = val.replace("<", "<").replace(">", ">")
+                    val = self.BARE_AMPERSAND.sub("&", val)
+
+                attrs.append(fmt % (self.toEncoding(key, encoding),
+                                    self.toEncoding(val, encoding)))
+        close = ''
+        closeTag = ''
+        if self.isSelfClosing:
+            close = ' /'
+        else:
+            closeTag = '</%s>' % encodedName
+
+        indentTag, indentContents = 0, 0
+        if prettyPrint:
+            indentTag = indentLevel
+            space = (' ' * (indentTag - 1))
+            indentContents = indentTag + 1
+        contents = self.renderContents(encoding, prettyPrint, indentContents)
+        if self.hidden:
+            s = contents
+        else:
+            s = []
+            attributeString = ''
+            if attrs:
+                attributeString = ' ' + ' '.join(attrs)
+            if prettyPrint:
+                s.append(space)
+            s.append('<%s%s%s>' % (encodedName, attributeString, close))
+            if prettyPrint:
+                s.append("\n")
+            s.append(contents)
+            if prettyPrint and contents and contents[-1] != "\n":
+                s.append("\n")
+            if prettyPrint and closeTag:
+                s.append(space)
+            s.append(closeTag)
+            if prettyPrint and closeTag and self.nextSibling:
+                s.append("\n")
+            s = ''.join(s)
+        return s
+
+    def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING):
+        return self.__str__(encoding, True)
+
+    def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
+                       prettyPrint=False, indentLevel=0):
+        """Renders the contents of this tag as a string in the given
+        encoding. If encoding is None, returns a Unicode string.."""
+        s = []
+        for c in self:
+            text = None
+            if isinstance(c, NavigableString):
+                text = c.__str__(encoding)
+            elif isinstance(c, Tag):
+                s.append(c.__str__(encoding, prettyPrint, indentLevel))
+            if text and prettyPrint:
+                text = text.strip()
+            if text:
+                if prettyPrint:
+                    s.append(" " * (indentLevel - 1))
+                s.append(text)
+                if prettyPrint:
+                    s.append("\n")
+        return ''.join(s)
+
+    # Soup methods
+
+    def find(self, name=None, attrs={}, recursive=True, text=None,
+             **kwargs):
+        """Return only the first child of this Tag matching the given
+        criteria."""
+        r = None
+        l = self.findAll(name, attrs, recursive, text, 1, **kwargs)
+        if l:
+            r = l[0]
+        return r
+    findChild = find
+
+    def findAll(self, name=None, attrs={}, recursive=True, text=None,
+                limit=None, **kwargs):
+        """Extracts a list of Tag objects that match the given
+        criteria.  You can specify the name of the Tag and any
+        attributes you want the Tag to have.
+
+        The value of a key-value pair in the 'attrs' map can be a
+        string, a list of strings, a regular expression object, or a
+        callable that takes a string and returns whether or not the
+        string matches for some custom definition of 'matches'. The
+        same is true of the tag name."""
+        generator = self.recursiveChildGenerator
+        if not recursive:
+            generator = self.childGenerator
+        return self._findAll(name, attrs, text, limit, generator, **kwargs)
+    findChildren = findAll
+
+    # Pre-3.x compatibility methods
+    first = find
+    fetch = findAll
+
+    def fetchText(self, text=None, recursive=True, limit=None):
+        return self.findAll(text=text, recursive=recursive, limit=limit)
+
+    def firstText(self, text=None, recursive=True):
+        return self.find(text=text, recursive=recursive)
+
+    # Utility methods
+
+    def append(self, tag):
+        """Appends the given tag to the contents of this tag."""
+        self.contents.append(tag)
+
+    # Private methods
+
+    def _getAttrMap(self):
+        """Initializes a map representation of this tag's attributes,
+        if not already initialized."""
+        if not getattr(self, 'attrMap'):
+            self.attrMap = {}
+            for (key, value) in self.attrs:
+                self.attrMap[key] = value
+        return self.attrMap
+
+    # Generator methods
+    def childGenerator(self):
+        for i in range(0, len(self.contents)):
+            yield self.contents[i]
+        raise StopIteration
+
+    def recursiveChildGenerator(self):
+        stack = [(self, 0)]
+        while stack:
+            tag, start = stack.pop()
+            if isinstance(tag, Tag):
+                for i in range(start, len(tag.contents)):
+                    a = tag.contents[i]
+                    yield a
+                    if isinstance(a, Tag) and tag.contents:
+                        if i < len(tag.contents) - 1:
+                            stack.append((tag, i + 1))
+                        stack.append((a, 0))
+                        break
+        raise StopIteration
+
+
+# Next, a couple classes to represent queries and their results.
+class SoupStrainer:
+    """Encapsulates a number of ways of matching a markup element (tag or
+    text)."""
+
+    def __init__(self, name=None, attrs={}, text=None, **kwargs):
+        self.name = name
+        if isString(attrs):
+            kwargs['class'] = attrs
+            attrs = None
+        if kwargs:
+            if attrs:
+                attrs = attrs.copy()
+                attrs.update(kwargs)
+            else:
+                attrs = kwargs
+        self.attrs = attrs
+        self.text = text
+
+    def __str__(self):
+        if self.text:
+            return self.text
+        else:
+            return "%s|%s" % (self.name, self.attrs)
+
+    def searchTag(self, markupName=None, markupAttrs={}):
+        found = None
+        markup = None
+        if isinstance(markupName, Tag):
+            markup = markupName
+            markupAttrs = markup
+        callFunctionWithTagData = callable(self.name) and \
+            not isinstance(markupName, Tag)
+
+        if not self.name or callFunctionWithTagData or \
+                (markup and self._matches(markup, self.name)) or \
+                (not markup and self._matches(markupName, self.name)):
+            if callFunctionWithTagData:
+                match = self.name(markupName, markupAttrs)
+            else:
+                match = True
+                markupAttrMap = None
+                for attr, matchAgainst in self.attrs.items():
+                    if not markupAttrMap:
+                        if hasattr(markupAttrs, 'get'):
+                            markupAttrMap = markupAttrs
+                        else:
+                            markupAttrMap = {}
+                            for k, v in markupAttrs:
+                                markupAttrMap[k] = v
+                    attrValue = markupAttrMap.get(attr)
+                    if not self._matches(attrValue, matchAgainst):
+                        match = False
+                        break
+            if match:
+                if markup:
+                    found = markup
+                else:
+                    found = markupName
+        return found
+
+    def search(self, markup):
+        found = None
+        # If given a list of items, scan it for a text element that
+        # matches.
+        if isList(markup) and not isinstance(markup, Tag):
+            for element in markup:
+                if isinstance(element, NavigableString) and \
+                        self.search(element):
+                    found = element
+                    break
+        # If it's a Tag, make sure its name or attributes match.
+        # Don't bother with Tags if we're searching for text.
+        elif isinstance(markup, Tag):
+            if not self.text:
+                found = self.searchTag(markup)
+        # If it's text, make sure the text matches.
+        elif isinstance(markup, NavigableString) or isString(markup):
+            if self._matches(markup, self.text):
+                found = markup
+        else:
+            raise Exception("I don't know how to match against a %s" %
+                            markup.__class__)
+        return found
+
+    def _matches(self, markup, matchAgainst):
+        result = False
+        if matchAgainst is True:
+            result = markup is not None
+        elif callable(matchAgainst):
+            result = matchAgainst(markup)
+        else:
+            # Custom match methods take the tag as an argument, but all
+            # other ways of matching match the tag name as a string.
+            if isinstance(markup, Tag):
+                markup = markup.name
+            if markup and not isString(markup):
+                markup = unicode(markup)
+            # Now we know that chunk is either a string, or None.
+            if hasattr(matchAgainst, 'match'):
+                # It's a regexp object.
+                result = markup and matchAgainst.search(markup)
+            elif isList(matchAgainst):
+                result = markup in matchAgainst
+            elif hasattr(matchAgainst, 'items'):
+                result = matchAgainst in markup
+            elif matchAgainst and isString(markup):
+                if isinstance(markup, unicode):
+                    matchAgainst = unicode(matchAgainst)
+                else:
+                    matchAgainst = str(matchAgainst)
+
+            if not result:
+                result = matchAgainst == markup
+        return result
+
+
+class ResultSet(list):
+    """A ResultSet is just a list that keeps track of the SoupStrainer
+    that created it."""
+
+    def __init__(self, source):
+        list.__init__([])
+        self.source = source
+
+# Now, some helper functions.
+
+
+def isList(l):
+    """Convenience method that works with all 2.x versions of Python
+    to determine whether or not something is listlike."""
+    return hasattr(l, '__iter__') or \
+        type(l) in (types.ListType, types.TupleType)
+
+
+def isString(s):
+    """Convenience method that works with all 2.x versions of Python
+    to determine whether or not something is stringlike."""
+    try:
+        return isinstance(s, unicode) or isintance(s, basestring)
+    except NameError:
+        return isinstance(s, str)
+
+
+def buildTagMap(default, *args):
+    """Turns a list of maps, lists, or scalars into a single map.
+    Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
+    NESTING_RESET_TAGS maps out of lists and partial maps."""
+    built = {}
+    for portion in args:
+        if hasattr(portion, 'items'):
+            # It's a map. Merge it.
+            for k, v in portion.items():
+                built[k] = v
+        elif isList(portion):
+            # It's a list. Map each item to the default.
+            for k in portion:
+                built[k] = default
+        else:
+            # It's a scalar. Map it to the default.
+            built[portion] = default
+    return built
+
+# Now, the parser classes.
+
+
+class BeautifulStoneSoup(Tag, SGMLParser):
+
+    """This class contains the basic parser and search code. It defines
+    a parser that knows nothing about tag behavior except for the
+    following:
+
+      You can't close a tag without closing all the tags it encloses.
+      That is, "<foo><bar></foo>" actually means
+      "<foo><bar></bar></foo>".
+
+    [Another possible explanation is "<foo><bar /></foo>", but since
+    this class defines no SELF_CLOSING_TAGS, it will never use that
+    explanation.]
+
+    This class is useful for parsing XML or made-up markup languages,
+    or when BeautifulSoup makes an assumption counter to what you were
+    expecting."""
+
+    SELF_CLOSING_TAGS = {}
+    NESTABLE_TAGS = {}
+    RESET_NESTING_TAGS = {}
+    QUOTE_TAGS = {}
+
+    MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'),
+                       lambda x: x.group(1) + ' />'),
+                      (re.compile('<!\s+([^<>]*)>'),
+                       lambda x: '<!' + x.group(1) + '>')
+                      ]
+
+    ROOT_TAG_NAME = u'[document]'
+
+    HTML_ENTITIES = "html"
+    XML_ENTITIES = "xml"
+    ALL_ENTITIES = [HTML_ENTITIES, XML_ENTITIES]
+
+    def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
+                 markupMassage=True, smartQuotesTo=XML_ENTITIES,
+                 convertEntities=None, selfClosingTags=None):
+        """The Soup object is initialized as the 'root tag', and the
+        provided markup (which can be a string or a file-like object)
+        is fed into the underlying parser.
+
+        sgmllib will process most bad HTML, and the BeautifulSoup
+        class has some tricks for dealing with some HTML that kills
+        sgmllib, but Beautiful Soup can nonetheless choke or lose data
+        if your data uses self-closing tags or declarations
+        incorrectly.
+
+        By default, Beautiful Soup uses regexes to sanitize input,
+        avoiding the vast majority of these problems. If the problems
+        don't apply to you, pass in False for markupMassage, and
+        you'll get better performance.
+
+        The default parser massage techniques fix the two most common
+        instances of invalid HTML that choke sgmllib:
+
+         <br/> (No space between name of closing tag and tag close)
+         <! --Comment--> (Extraneous whitespace in declaration)
+
+        You can pass in a custom list of (RE object, replace method)
+        tuples to get Beautiful Soup to scrub your input the way you
+        want."""
+
+        self.parseOnlyThese = parseOnlyThese
+        self.fromEncoding = fromEncoding
+        self.smartQuotesTo = smartQuotesTo
+
+        if convertEntities:
+            # It doesn't make sense to convert encoded characters to
+            # entities even while you're converting entities to Unicode.
+            # Just convert it all to Unicode.
+            self.smartQuotesTo = None
+
+        if isList(convertEntities):
+            self.convertHTMLEntities = self.HTML_ENTITIES in convertEntities
+            self.convertXMLEntities = self.XML_ENTITIES in convertEntities
+        else:
+            self.convertHTMLEntities = self.HTML_ENTITIES == convertEntities
+            self.convertXMLEntities = self.XML_ENTITIES == convertEntities
+
+        self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
+        SGMLParser.__init__(self)
+
+        if hasattr(markup, 'read'):        # It's a file-type object.
+            markup = markup.read()
+        self.markup = markup
+        self.markupMassage = markupMassage
+        try:
+            self._feed()
+        except StopParsing:
+            pass
+        self.markup = None                 # The markup can now be GCed
+
+    def _feed(self, inDocumentEncoding=None):
+        # Convert the document to Unicode.
+        markup = self.markup
+        if isinstance(markup, unicode):
+            if not hasattr(self, 'originalEncoding'):
+                self.originalEncoding = None
+        else:
+            dammit = UnicodeDammit(markup,
+                                   [self.fromEncoding, inDocumentEncoding],
+                                   smartQuotesTo=self.smartQuotesTo)
+            markup = dammit.unicode
+            self.originalEncoding = dammit.originalEncoding
+        if markup:
+            if self.markupMassage:
+                if not isList(self.markupMassage):
+                    self.markupMassage = self.MARKUP_MASSAGE
+                for fix, m in self.markupMassage:
+                    markup = fix.sub(m, markup)
+        self.reset()
+
+        SGMLParser.feed(self, markup or "")
+        SGMLParser.close(self)
+        # Close out any unfinished strings and close all the open tags.
+        self.endData()
+        while self.currentTag.name != self.ROOT_TAG_NAME:
+            self.popTag()
+
+    def __getattr__(self, methodName):
+        """This method routes method call requests to either the SGMLParser
+        superclass or the Tag superclass, depending on the method name."""
+        if methodName.find('start_') == 0 or methodName.find('end_') == 0 or \
+                methodName.find('do_') == 0:
+            return SGMLParser.__getattr__(self, methodName)
+        elif methodName.find('__') != 0:
+            return Tag.__getattr__(self, methodName)
+        else:
+            raise AttributeError
+
+    def isSelfClosingTag(self, name):
+        """Returns true iff the given string is the name of a
+        self-closing tag according to this parser."""
+        return name in self.SELF_CLOSING_TAGS or \
+            name in self.instanceSelfClosingTags
+
+    def reset(self):
+        Tag.__init__(self, self, self.ROOT_TAG_NAME)
+        self.hidden = 1
+        SGMLParser.reset(self)
+        self.currentData = []
+        self.currentTag = None
+        self.tagStack = []
+        self.quoteStack = []
+        self.pushTag(self)
+
+    def popTag(self):
+        self.tagStack.pop()
+        # Tags with just one string-owning child get the child as a
+        # 'string' property, so that soup.tag.string is shorthand for
+        # soup.tag.contents[0]
+        if len(self.currentTag.contents) == 1 and \
+                isinstance(self.currentTag.contents[0], NavigableString):
+            self.currentTag.string = self.currentTag.contents[0]
+
+        if self.tagStack:
+            self.currentTag = self.tagStack[-1]
+        return self.currentTag
+
+    def pushTag(self, tag):
+        if self.currentTag:
+            self.currentTag.append(tag)
+        self.tagStack.append(tag)
+        self.currentTag = self.tagStack[-1]
+
+    def endData(self, containerClass=NavigableString):
+        if self.currentData:
+            currentData = ''.join(self.currentData)
+            if currentData.endswith('<') and self.convertHTMLEntities:
+                currentData = currentData[:-1] + '<'
+            if not currentData.strip():
+                if '\n' in currentData:
+                    currentData = '\n'
+                else:
+                    currentData = ' '
+            self.currentData = []
+            if self.parseOnlyThese and len(self.tagStack) <= 1 and \
+                    (not self.parseOnlyThese.text or
+                     not self.parseOnlyThese.search(currentData)):
+                return
+            o = containerClass(currentData)
+            o.setup(self.currentTag, self.previous)
+            if self.previous:
+                self.previous.next = o
+            self.previous = o
+            self.currentTag.contents.append(o)
+
+    def _popToTag(self, name, inclusivePop=True):
+        """Pops the tag stack up to and including the most recent
+        instance of the given tag. If inclusivePop is false, pops the tag
+        stack up to but *not* including the most recent instqance of
+        the given tag."""
+        if name == self.ROOT_TAG_NAME:
+            return
+
+        numPops = 0
+        mostRecentTag = None
+        for i in range(len(self.tagStack) - 1, 0, -1):
+            if name == self.tagStack[i].name:
+                numPops = len(self.tagStack) - i
+                break
+        if not inclusivePop:
+            numPops = numPops - 1
+
+        for i in range(0, numPops):
+            mostRecentTag = self.popTag()
+        return mostRecentTag
+
+    def _smartPop(self, name):
+        """We need to pop up to the previous tag of this type, unless
+        one of this tag's nesting reset triggers comes between this
+        tag and the previous tag of this type, OR unless this tag is a
+        generic nesting trigger and another generic nesting trigger
+        comes between this tag and the previous tag of this type.
+
+        Examples:
+         <p>Foo<b>Bar<p> should pop to 'p', not 'b'.
+         <p>Foo<table>Bar<p> should pop to 'table', not 'p'.
+         <p>Foo<table><tr>Bar<p> should pop to 'tr', not 'p'.
+         <p>Foo<b>Bar<p> should pop to 'p', not 'b'.
+
+         <li><ul><li> *<li>* should pop to 'ul', not the first 'li'.
+         <tr><table><tr> *<tr>* should pop to 'table', not the first 'tr'
+         <td><tr><td> *<td>* should pop to 'tr', not the first 'td'
+        """
+
+        nestingResetTriggers = self.NESTABLE_TAGS.get(name)
+        isNestable = nestingResetTriggers is not None
+        isResetNesting = name in self.RESET_NESTING_TAGS
+        popTo = None
+        inclusive = True
+        for i in range(len(self.tagStack) - 1, 0, -1):
+            p = self.tagStack[i]
+            if (not p or p.name == name) and not isNestable:
+                # Non-nestable tags get popped to the top or to their
+                # last occurance.
+                popTo = name
+                break
+            if (nestingResetTriggers is not None and
+                    p.name in nestingResetTriggers) or \
+                    (nestingResetTriggers is None and isResetNesting and
+                     p.name in self.RESET_NESTING_TAGS):
+                # If we encounter one of the nesting reset triggers
+                # peculiar to this tag, or we encounter another tag
+                # that causes nesting to reset, pop up to but not
+                # including that tag.
+                popTo = p.name
+                inclusive = False
+                break
+            p = p.parent
+        if popTo:
+            self._popToTag(popTo, inclusive)
+
+    def unknown_starttag(self, name, attrs, selfClosing=0):
+        if self.quoteStack:
+            # This is not a real tag.
+            attrs = ''.join(map(lambda(x, y): ' %s="%s"' % (x, y), attrs))
+            self.currentData.append('<%s%s>' % (name, attrs))
+            return
+        self.endData()
+
+        if not self.isSelfClosingTag(name) and not selfClosing:
+            self._smartPop(name)
+
+        if self.parseOnlyThese and len(self.tagStack) <= 1 and \
+                (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
+            return
+
+        tag = Tag(self, name, attrs, self.currentTag, self.previous)
+        if self.previous:
+            self.previous.next = tag
+        self.previous = tag
+        self.pushTag(tag)
+        if selfClosing or self.isSelfClosingTag(name):
+            self.popTag()
+        if name in self.QUOTE_TAGS:
+            self.quoteStack.append(name)
+            self.literal = 1
+        return tag
+
+    def unknown_endtag(self, name):
+        if self.quoteStack and self.quoteStack[-1] != name:
+            # This is not a real end tag.
+            self.currentData.append('</%s>' % name)
+            return
+        self.endData()
+        self._popToTag(name)
+        if self.quoteStack and self.quoteStack[-1] == name:
+            self.quoteStack.pop()
+            self.literal = (len(self.quoteStack) > 0)
+
+    def handle_data(self, data):
+        if self.convertHTMLEntities:
+            if data[0] == '&':
+                data = self.BARE_AMPERSAND.sub("&", data)
+            else:
+                data = data.replace('&', '&') \
+                           .replace('<', '<') \
+                           .replace('>', '>')
+        self.currentData.append(data)
+
+    def _toStringSubclass(self, text, subclass):
+        """Adds a certain piece of text to the tree as a NavigableString
+        subclass."""
+        self.endData()
+        self.handle_data(text)
+        self.endData(subclass)
+
+    def handle_pi(self, text):
+        """Handle a processing instruction as a ProcessingInstruction
+        object, possibly one with a %SOUP-ENCODING% slot into which an
+        encoding will be plugged later."""
+        if text[:3] == "xml":
+            text = "xml version='1.0' encoding='%SOUP-ENCODING%'"
+        self._toStringSubclass(text, ProcessingInstruction)
+
+    def handle_comment(self, text):
+        "Handle comments as Comment objects."
+        self._toStringSubclass(text, Comment)
+
+    def handle_charref(self, ref):
+        "Handle character references as data."
+        if ref[0] == 'x':
+            data = unichr(int(ref[1:], 16))
+        else:
+            data = unichr(int(ref))
+
+        if u'\x80' <= data <= u'\x9F':
+            data = UnicodeDammit.subMSChar(chr(ord(data)), self.smartQuotesTo)
+        elif not self.convertHTMLEntities and not self.convertXMLEntities:
+            data = '&#%s;' % ref
+
+        self.handle_data(data)
+
+    def handle_entityref(self, ref):
+        """Handle entity references as data, possibly converting known
+        HTML entity references to the corresponding Unicode
+        characters."""
+        replaceWithXMLEntity = self.convertXMLEntities and \
+            ref in self.XML_ENTITIES_TO_CHARS
+        if self.convertHTMLEntities or replaceWithXMLEntity:
+            try:
+                data = unichr(name2codepoint[ref])
+            except KeyError:
+                if replaceWithXMLEntity:
+                    data = self.XML_ENTITIES_TO_CHARS.get(ref)
+                else:
+                    data = "&%s" % ref
+        else:
+            data = '&%s;' % ref
+        self.handle_data(data)
+
+    def handle_decl(self, data):
+        "Handle DOCTYPEs and the like as Declaration objects."
+        self._toStringSubclass(data, Declaration)
+
+    def parse_declaration(self, i):
+        """Treat a bogus SGML declaration as raw data. Treat a CDATA
+        declaration as a CData object."""
+        j = None
+        if self.rawdata[i:i + 9] == '<![CDATA[':
+            k = self.rawdata.find(']]>', i)
+            if k == -1:
+                k = len(self.rawdata)
+            data = self.rawdata[i + 9:k]
+            j = k + 3
+            self._toStringSubclass(data, CData)
+        else:
+            try:
+                j = SGMLParser.parse_declaration(self, i)
+            except SGMLParseError:
+                toHandle = self.rawdata[i:]
+                self.handle_data(toHandle)
+                j = i + len(toHandle)
+        return j
+
+
+class BeautifulSoup(BeautifulStoneSoup):
+
+    """This parser knows the following facts about HTML:
+
+    * Some tags have no closing tag and should be interpreted as being
+      closed as soon as they are encountered.
+
+    * The text inside some tags (ie. 'script') may contain tags which
+      are not really part of the document and which should be parsed
+      as text, not tags. If you want to parse the text as tags, you can
+      always fetch it and parse it explicitly.
+
+    * Tag nesting rules:
+
+      Most tags can't be nested at all. For instance, the occurance of
+      a <p> tag should implicitly close the previous <p> tag.
+
+       <p>Para1<p>Para2
+        should be transformed into:
+       <p>Para1</p><p>Para2
+
+      Some tags can be nested arbitrarily. For instance, the occurance
+      of a <blockquote> tag should _not_ implicitly close the previous
+      <blockquote> tag.
+
+       Alice said: <blockquote>Bob said: <blockquote>Blah
+        should NOT be transformed into:
+       Alice said: <blockquote>Bob said: </blockquote><blockquote>Blah
+
+      Some tags can be nested, but the nesting is reset by the
+      interposition of other tags. For instance, a <tr> tag should
+      implicitly close the previous <tr> tag within the same <table>,
+      but not close a <tr> tag in another table.
+
+       <table><tr>Blah<tr>Blah
+        should be transformed into:
+       <table><tr>Blah</tr><tr>Blah
+        but,
+       <tr>Blah<table><tr>Blah
+        should NOT be transformed into
+       <tr>Blah<table></tr><tr>Blah
+
+    Differing assumptions about tag nesting rules are a major source
+    of problems with the BeautifulSoup class. If BeautifulSoup is not
+    treating as nestable a tag your page author treats as nestable,
+    try ICantBelieveItsBeautifulSoup, MinimalSoup, or
+    BeautifulStoneSoup before writing your own subclass."""
+
+    def __init__(self, *args, **kwargs):
+        if 'smartQuotesTo' not in kwargs:
+            kwargs['smartQuotesTo'] = self.HTML_ENTITIES
+        BeautifulStoneSoup.__init__(self, *args, **kwargs)
+
+    SELF_CLOSING_TAGS = buildTagMap(None,
+                                    ['br', 'hr', 'input', 'img', 'meta',
+                                    'spacer', 'link', 'frame', 'base'])
+
+    QUOTE_TAGS = {'script': None}
+
+    # According to the HTML standard, each of these inline tags can
+    # contain another tag of the same type. Furthermore, it's common
+    # to actually use these tags this way.
+    NESTABLE_INLINE_TAGS = ['span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
+                            'center']
+
+    # According to the HTML standard, these block tags can contain
+    # another tag of the same type. Furthermore, it's common
+    # to actually use these tags this way.
+    NESTABLE_BLOCK_TAGS = ['blockquote', 'div', 'fieldset', 'ins', 'del']
+
+    # Lists can contain other lists, but there are restrictions.
+    NESTABLE_LIST_TAGS = { 'ol': [],
+                           'ul': [],
+                           'li': ['ul', 'ol'],
+                           'dl': [],
+                           'dd': ['dl'],
+                           'dt': ['dl'] }
+
+    # Tables can contain other tables, but there are restrictions.
+    NESTABLE_TABLE_TAGS = {'table': [],
+                           'tr': ['table', 'tbody', 'tfoot', 'thead'],
+                           'td': ['tr'],
+                           'th': ['tr'],
+                           'thead': ['table'],
+                           'tbody': ['table'],
+                           'tfoot': ['table'],
+                           }
+
+    NON_NESTABLE_BLOCK_TAGS = ['address', 'form', 'p', 'pre']
+
+    # If one of these tags is encountered, all tags up to the next tag of
+    # this type are popped.
+    RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
+                                     NON_NESTABLE_BLOCK_TAGS,
+                                     NESTABLE_LIST_TAGS,
+                                     NESTABLE_TABLE_TAGS)
+
+    NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS,
+                                NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS)
+
+    # Used to detect the charset in a META tag; see start_meta
+    CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)")
+
+    def start_meta(self, attrs):
+        """Beautiful Soup can detect a charset included in a META tag,
+        try to convert the document to that charset, and re-parse the
+        document from the beginning."""
+        httpEquiv = None
+        contentType = None
+        contentTypeIndex = None
+        tagNeedsEncodingSubstitution = False
+
+        for i in range(0, len(attrs)):
+            key, value = attrs[i]
+            key = key.lower()
+            if key == 'http-equiv':
+                httpEquiv = value
+            elif key == 'content':
+                contentType = value
+                contentTypeIndex = i
+
+        if httpEquiv and contentType:  # It's an interesting meta tag.
+            match = self.CHARSET_RE.search(contentType)
+            if match:
+                if getattr(self, 'declaredHTMLEncoding') or \
+                        self.originalEncoding == self.fromEncoding:
+                    # This is our second pass through the document, or
+                    # else an encoding was specified explicitly and it
+                    # worked. Rewrite the meta tag.
+                    newAttr = self.CHARSET_RE.sub(
+                        lambda(match): match.group(1) + "%SOUP-ENCODING%",
+                        value)
+                    attrs[contentTypeIndex] = (attrs[contentTypeIndex][0],
+                                               newAttr)
+                    tagNeedsEncodingSubstitution = True
+                else:
+                    # This is our first pass through the document.
+                    # Go through it again with the new information.
+                    newCharset = match.group(3)
+                    if newCharset and newCharset != self.originalEncoding:
+                        self.declaredHTMLEncoding = newCharset
+                        self._feed(self.declaredHTMLEncoding)
+                        raise StopParsing
+        tag = self.unknown_starttag("meta", attrs)
+        if tag and tagNeedsEncodingSubstitution:
+            tag.containsSubstitutions = True
+
+
+class StopParsing(Exception):
+    pass
+
+
+class ICantBelieveItsBeautifulSoup(BeautifulSoup):
+
+    """The BeautifulSoup class is oriented towards skipping over
+    common HTML errors like unclosed tags. However, sometimes it makes
+    errors of its own. For instance, consider this fragment:
+
+     <b>Foo<b>Bar</b></b>
+
+    This is perfectly valid (if bizarre) HTML. However, the
+    BeautifulSoup class will implicitly close the first b tag when it
+    encounters the second 'b'. It will think the author wrote
+    "<b>Foo<b>Bar", and didn't close the first 'b' tag, because
+    there's no real-world reason to bold something that's already
+    bold. When it encounters '</b></b>' it will close two more 'b'
+    tags, for a grand total of three tags closed instead of two. This
+    can throw off the rest of your document structure. The same is
+    true of a number of other tags, listed below.
+
+    It's much more common for someone to forget to close a 'b' tag
+    than to actually use nested 'b' tags, and the BeautifulSoup class
+    handles the common case. This class handles the not-co-common
+    case: where you can't believe someone wrote what they did, but
+    it's valid HTML and BeautifulSoup screwed up by assuming it
+    wouldn't be."""
+
+    I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = ['em', 'big', 'i', 'small',
+        'tt', 'abbr', 'acronym', 'strong', 'cite', 'code', 'dfn', 'kbd', 'samp',
+        'strong', 'var', 'b', 'big']
+
+    I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ['noscript']
+
+    NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS,
+                                I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
+                                I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
+
+
+class MinimalSoup(BeautifulSoup):
+    """The MinimalSoup class is for parsing HTML that contains
+    pathologically bad markup. It makes no assumptions about tag
+    nesting, but it does know which tags are self-closing, that
+    <script> tags contain Javascript and should not be parsed, that
+    META tags may contain encoding information, and so on.
+
+    This also makes it better for subclassing than BeautifulStoneSoup
+    or BeautifulSoup."""
+
+    RESET_NESTING_TAGS = buildTagMap('noscript')
+    NESTABLE_TAGS = {}
+
+
+class BeautifulSOAP(BeautifulStoneSoup):
+    """This class will push a tag with only a single string child into
+    the tag's parent as an attribute. The attribute's name is the tag
+    name, and the value is the string child. An example should give
+    the flavor of the change:
+
+    <foo><bar>baz</bar></foo>
+     =>
+    <foo bar="baz"><bar>baz</bar></foo>
+
+    You can then access fooTag['bar'] instead of fooTag.barTag.string.
+
+    This is, of course, useful for scraping structures that tend to
+    use subelements instead of attributes, such as SOAP messages. Note
+    that it modifies its input, so don't print the modified version
+    out.
+
+    I'm not sure how many people really want to use this class; let me
+    know if you do. Mainly I like the name."""
+
+    def popTag(self):
+        if len(self.tagStack) > 1:
+            tag = self.tagStack[-1]
+            parent = self.tagStack[-2]
+            parent._getAttrMap()
+            if isinstance(tag, Tag) and len(tag.contents) == 1 and \
+                    isinstance(tag.contents[0], NavigableString) and \
+                    tag.name not in parent.attrMap:
+                parent[tag.name] = tag.contents[0]
+        BeautifulStoneSoup.popTag(self)
+
+# Enterprise class names! It has come to our attention that some people
+# think the names of the Beautiful Soup parser classes are too silly
+# and "unprofessional" for use in enterprise screen-scraping. We feel
+# your pain! For such-minded folk, the Beautiful Soup Consortium And
+# All-Night Kosher Bakery recommends renaming this file to
+# "RobustParser.py" (or, in cases of extreme enterprisitude,
+# "RobustParserBeanInterface.class") and using the following
+# enterprise-friendly class aliases:
+
+
+class RobustXMLParser(BeautifulStoneSoup):
+    pass
+
+
+class RobustHTMLParser(BeautifulSoup):
+    pass
+
+
+class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
+    pass
+
+
+class RobustInsanelyWackAssHTMLParser(MinimalSoup):
+    pass
+
+
+class SimplifyingSOAPParser(BeautifulSOAP):
+    pass
+
+
+######################################################
+#
+# Bonus library: Unicode, Dammit
+#
+# This class forces XML data into a standard format (usually to UTF-8
+# or Unicode).  It is heavily based on code from Mark Pilgrim's
+# Universal Feed Parser. It does not rewrite the XML or HTML to
+# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
+# (XML) and BeautifulSoup.start_meta (HTML).
+
+# Autodetects character encodings.
+# Download from http://chardet.feedparser.org/
+try:
+    import chardet
+#    import chardet.constants
+#    chardet.constants._debug = 1
+except:
+    chardet = None
+chardet = None
+
+
+class UnicodeDammit:
+    """A class for detecting the encoding of a *ML document and
+    converting it to a Unicode string. If the source encoding is
+    windows-1252, can replace MS smart quotes with their HTML or XML
+    equivalents."""
+
+    # This dictionary maps commonly seen values for "charset" in HTML
+    # meta tags to the corresponding Python codec names. It only covers
+    # values that aren't in Python's aliases and can't be determined
+    # by the heuristics in find_codec.
+    CHARSET_ALIASES = { "macintosh": "mac-roman",
+                        "x-sjis": "shift-jis" }
+
+    def __init__(self, markup, overrideEncodings=[],
+                 smartQuotesTo='xml'):
+        self.markup, documentEncoding, sniffedEncoding = \
+            self._detectEncoding(markup)
+        self.smartQuotesTo = smartQuotesTo
+        self.triedEncodings = []
+        if isinstance(markup, unicode):
+            return markup
+
+        u = None
+        for proposedEncoding in overrideEncodings:
+            u = self._convertFrom(proposedEncoding)
+            if u:
+                break
+        if not u:
+            for proposedEncoding in (documentEncoding, sniffedEncoding):
+                u = self._convertFrom(proposedEncoding)
+                if u:
+                    break
+
+        # If no luck and we have auto-detection library, try that:
+        if not u and chardet and not isinstance(self.markup, unicode):
+            u = self._convertFrom(chardet.detect(self.markup)['encoding'])
+
+        # As a last resort, try utf-8 and windows-1252:
+        if not u:
+            for proposed_encoding in ("utf-8", "windows-1252"):
+                u = self._convertFrom(proposed_encoding)
+                if u:
+                    break
+        self.unicode = u
+        if not u:
+            self.originalEncoding = None
+
+    def subMSChar(orig, smartQuotesTo):
+        """Changes a MS smart quote character to an XML or HTML
+        entity."""
+        sub = UnicodeDammit.MS_CHARS.get(orig)
+        if isinstance(sub, types.TupleType):
+            if smartQuotesTo == 'xml':
+                sub = '&#x%s;' % sub[1]
+            elif smartQuotesTo == 'html':
+                sub = '&%s;' % sub[0]
+            else:
+                sub = unichr(int(sub[1], 16))
+        return sub
+    subMSChar = staticmethod(subMSChar)
+
+    def _convertFrom(self, proposed):
+        proposed = self.find_codec(proposed)
+        if not proposed or proposed in self.triedEncodings:
+            return None
+        self.triedEncodings.append(proposed)
+        markup = self.markup
+
+        # Convert smart quotes to HTML if coming from an encoding
+        # that might have them.
+        if self.smartQuotesTo and proposed in ("windows-1252", "ISO-8859-1",
+                                               "ISO-8859-2"):
+            markup = re.compile("([\x80-\x9f])").sub(
+                lambda(x): self.subMSChar(x.group(1), self.smartQuotesTo),
+                markup)
+
+        try:
+            u = self._toUnicode(markup, proposed)
+            self.markup = u
+            self.originalEncoding = proposed
+        except Exception:
+            return None
+        return self.markup
+
+    def _toUnicode(self, data, encoding):
+        '''Given a string and its encoding, decodes the string into Unicode.
+        %encoding is a string recognized by encodings.aliases'''
+
+        # strip Byte Order Mark (if present)
+        if len(data) >= 4 and data[:2] == '\xfe\xff' and \
+                data[2:4] != '\x00\x00':
+            encoding = 'utf-16be'
+            data = data[2:]
+        elif len(data) >= 4 and data[:2] == '\xff\xfe' and \
+                data[2:4] != '\x00\x00':
+            encoding = 'utf-16le'
+            data = data[2:]
+        elif data[:3] == '\xef\xbb\xbf':
+            encoding = 'utf-8'
+            data = data[3:]
+        elif data[:4] == '\x00\x00\xfe\xff':
+            encoding = 'utf-32be'
+            data = data[4:]
+        elif data[:4] == '\xff\xfe\x00\x00':
+            encoding = 'utf-32le'
+            data = data[4:]
+        newdata = unicode(data, encoding)
+        return newdata
+
+    def _detectEncoding(self, xml_data):
+        """Given a document, tries to detect its XML encoding."""
+        xml_encoding = sniffed_xml_encoding = None
+        try:
+            if xml_data[:4] == '\x4c\x6f\xa7\x94':
+                # EBCDIC
+                xml_data = self._ebcdic_to_ascii(xml_data)
+            elif xml_data[:4] == '\x00\x3c\x00\x3f':
+                # UTF-16BE
+                sniffed_xml_encoding = 'utf-16be'
+                xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
+            elif len(xml_data) >= 4 and xml_data[:2] == '\xfe\xff' and \
+                    xml_data[2:4] != '\x00\x00':
+                # UTF-16BE with BOM
+                sniffed_xml_encoding = 'utf-16be'
+                xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
+            elif xml_data[:4] == '\x3c\x00\x3f\x00':
+                # UTF-16LE
+                sniffed_xml_encoding = 'utf-16le'
+                xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
+            elif len(xml_data) >= 4 and xml_data[:2] == '\xff\xfe' and \
+                    xml_data[2:4] != '\x00\x00':
+                # UTF-16LE with BOM
+                sniffed_xml_encoding = 'utf-16le'
+                xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
+            elif xml_data[:4] == '\x00\x00\x00\x3c':
+                # UTF-32BE
+                sniffed_xml_encoding = 'utf-32be'
+                xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
+            elif xml_data[:4] == '\x3c\x00\x00\x00':
+                # UTF-32LE
+                sniffed_xml_encoding = 'utf-32le'
+                xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
+            elif xml_data[:4] == '\x00\x00\xfe\xff':
+                # UTF-32BE with BOM
+                sniffed_xml_encoding = 'utf-32be'
+                xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
+            elif xml_data[:4] == '\xff\xfe\x00\x00':
+                # UTF-32LE with BOM
+                sniffed_xml_encoding = 'utf-32le'
+                xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
+            elif xml_data[:3] == '\xef\xbb\xbf':
+                # UTF-8 with BOM
+                sniffed_xml_encoding = 'utf-8'
+                xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
+            else:
+                sniffed_xml_encoding = 'ascii'
+                pass
+            xml_encoding_match = re.compile('^<\?.*encoding=[\'"](.*?)[\'"].*\?>')\
+                                   .match(xml_data)
+        except:
+            xml_encoding_match = None
+        if xml_encoding_match:
+            xml_encoding = xml_encoding_match.groups()[0].lower()
+            if sniffed_xml_encoding and \
+               (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
+                                 'iso-10646-ucs-4', 'ucs-4', 'csucs4',
+                                 'utf-16', 'utf-32', 'utf_16', 'utf_32',
+                                 'utf16', 'u16')):
+                xml_encoding = sniffed_xml_encoding
+        return xml_data, xml_encoding, sniffed_xml_encoding
+
+    def find_codec(self, charset):
+        return self._codec(self.CHARSET_ALIASES.get(charset, charset)) or \
+            (charset and self._codec(charset.replace("-", ""))) or \
+            (charset and self._codec(charset.replace("-", "_"))) or charset
+
+    def _codec(self, charset):
+        if not charset:
+            return charset
+        codec = None
+        try:
+            codecs.lookup(charset)
+            codec = charset
+        except LookupError:
+            pass
+        return codec
+
+    EBCDIC_TO_ASCII_MAP = None
+
+    def _ebcdic_to_ascii(self, s):
+        c = self.__class__
+        if not c.EBCDIC_TO_ASCII_MAP:
+            emap = (0, 1, 2, 3, 156, 9, 134, 127, 151, 141, 142, 11, 12, 13, 14, 15,
+                    16, 17, 18, 19, 157, 133, 8, 135, 24, 25, 146, 143, 28, 29, 30, 31,
+                    128, 129, 130, 131, 132, 10, 23, 27, 136, 137, 138, 139, 140, 5, 6, 7,
+                    144, 145, 22, 147, 148, 149, 150, 4, 152, 153, 154, 155, 20, 21, 158, 26,
+                    32, 160, 161, 162, 163, 164, 165, 166, 167, 168, 91, 46, 60, 40, 43, 33,
+                    38, 169, 170, 171, 172, 173, 174, 175, 176, 177, 93, 36, 42, 41, 59, 94,
+                    45, 47, 178, 179, 180, 181, 182, 183, 184, 185, 124, 44, 37, 95, 62, 63,
+                    186, 187, 188, 189, 190, 191, 192, 193, 194, 96, 58, 35, 64, 39, 61, 34,
+                    195, 97, 98, 99, 100, 101, 102, 103, 104, 105, 196, 197, 198, 199, 200,
+                    201, 202, 106, 107, 108, 109, 110, 111, 112, 113, 114, 203, 204, 205,
+                    206, 207, 208, 209, 126, 115, 116, 117, 118, 119, 120, 121, 122, 210,
+                    211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+                    225, 226, 227, 228, 229, 230, 231, 123, 65, 66, 67, 68, 69, 70, 71, 72,
+                    73, 232, 233, 234, 235, 236, 237, 125, 74, 75, 76, 77, 78, 79, 80, 81,
+                    82, 238, 239, 240, 241, 242, 243, 92, 159, 83, 84, 85, 86, 87, 88, 89,
+                    90, 244, 245, 246, 247, 248, 249, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+                    250, 251, 252, 253, 254, 255)
+            c.EBCDIC_TO_ASCII_MAP = string.maketrans(
+                ''.join(map(chr, range(256))), ''.join(map(chr, emap)))
+        return s.translate(c.EBCDIC_TO_ASCII_MAP)
+
+    MS_CHARS = { '\x80': ('euro', '20AC'),
+                 '\x81': ' ',
+                 '\x82': ('sbquo', '201A'),
+                 '\x83': ('fnof', '192'),
+                 '\x84': ('bdquo', '201E'),
+                 '\x85': ('hellip', '2026'),
+                 '\x86': ('dagger', '2020'),
+                 '\x87': ('Dagger', '2021'),
+                 '\x88': ('circ', '2C6'),
+                 '\x89': ('permil', '2030'),
+                 '\x8A': ('Scaron', '160'),
+                 '\x8B': ('lsaquo', '2039'),
+                 '\x8C': ('OElig', '152'),
+                 '\x8D': '?',
+                 '\x8E': ('#x17D', '17D'),
+                 '\x8F': '?',
+                 '\x90': '?',
+                 '\x91': ('lsquo', '2018'),
+                 '\x92': ('rsquo', '2019'),
+                 '\x93': ('ldquo', '201C'),
+                 '\x94': ('rdquo', '201D'),
+                 '\x95': ('bull', '2022'),
+                 '\x96': ('ndash', '2013'),
+                 '\x97': ('mdash', '2014'),
+                 '\x98': ('tilde', '2DC'),
+                 '\x99': ('trade', '2122'),
+                 '\x9a': ('scaron', '161'),
+                 '\x9b': ('rsaquo', '203A'),
+                 '\x9c': ('oelig', '153'),
+                 '\x9d': '?',
+                 '\x9e': ('#x17E', '17E'),
+                 '\x9f': ('Yuml', '178') }
+
+#######################################################################
+
+
+# By default, act as an HTML pretty-printer.
+if __name__ == '__main__':
+    soup = BeautifulSoup(sys.stdin.read())
+    print soup.prettify()
diff --git a/scripts/microbes/README.txt b/scripts/microbes/README.txt
new file mode 100644
index 0000000..7904f20
--- /dev/null
+++ b/scripts/microbes/README.txt
@@ -0,0 +1,24 @@
+The Scripts found in this directory are used to download and generate the files relating to Galaxy as a Microbial Resource.
+This includes scripts to access the NCBI Genome Projects site, download relevent data, and convert to a form usable in Galaxy. Data is generated for the Microbial Datasource Tool, as well as for Galaxy Interval Operations (chromosome names and lengths), and also the extract Genomic DNA tool.  Information about organisms is also written into '.info' files found in each organism directory.
+
+Step 3 requires a binary 'faToNib' to properly generate sequence files.
+
+Steps should be performed in the order they appear here.
+
+(1.) To Download and process Genome Projects from NCBI into a form usable by Galaxy:
+	python /GALAXY_ROOT/scripts/microbes/harvest_bacteria.py /OUTPUT/DIRECTORY/microbes/ > /OUTPUT/DIRECTORY/harvest.txt
+
+(2.) To Walk downloaded Genome Projects and Convert, in place, IDs to match the UCSC Archaea browser, where applicable:
+	python /GALAXY_ROOT/scripts/microbes/ncbi_to_ucsc.py /OUTPUT/DIRECTORY/microbes/ > /OUTPUT/DIRECTORY/ncbi_to_ucsc.txt
+
+(3.) To create nib files (for extraction) and to generate the location file content for Microbes used for extracting Genomic DNA:
+	python /GALAXY_ROOT/scripts/microbes/create_nib_seq_loc_file.py /OUTPUT/DIRECTORY/microbes/ seq.loc > /OUTPUT/DIRECTORY/sequence.txt
+
+(4.) To create the location file for the Microbial Data Resource tool in Galaxy:
+	python /GALAXY_ROOT/scripts/microbes/create_bacteria_loc_file.py /OUTPUT/DIRECTORY/microbes/ > /OUTPUT/DIRECTORY/microbial_data.loc
+
+(5.) To Generate a single file containing the lengths for each chromosome for each species, to be added to 'manual_builds.txt':
+	python /GALAXY_ROOT/scripts/microbes/get_builds_lengths.py /OUTPUT/DIRECTORY/microbes/ > /OUTPUT/DIRECTORY/microbes.len
+
+(6.) To Create the Wiki Table listing available Microbial Data in Galaxy:
+	python /GALAXY_ROOT/scripts/microbes/create_bacteria_table.py /OUTPUT/DIRECTORY/microbes/ > /OUTPUT/DIRECTORY/microbes.table
\ No newline at end of file
diff --git a/scripts/microbes/create_bacteria_loc_file.py b/scripts/microbes/create_bacteria_loc_file.py
new file mode 100644
index 0000000..8123170
--- /dev/null
+++ b/scripts/microbes/create_bacteria_loc_file.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    base_dir = os.path.join( os.getcwd(), "bacteria" )
+    try:
+        base_dir = sys.argv[1]
+    except:
+        pass
+        # print "using default base_dir:", base_dir
+
+    organisms = {}
+    for result in os.walk(base_dir):
+        this_base_dir, sub_dirs, files = result
+        for file in files:
+            if file[-5:] == ".info":
+                dict = {}
+                info_file = open(os.path.join(this_base_dir, file), 'r')
+                info = info_file.readlines()
+                info_file.close()
+                for line in info:
+                    fields = line.replace("\n", "").split("=")
+                    dict[fields[0]] = "=".join(fields[1:])
+                if 'genome project id' in dict.keys():
+                    name = dict['genome project id']
+                    if 'build' in dict.keys():
+                        name = dict['build']
+                    if name not in organisms.keys():
+                        organisms[name] = {'chrs': {}, 'base_dir': this_base_dir}
+                    for key in dict.keys():
+                        organisms[name][key] = dict[key]
+                else:
+                    if dict['organism'] not in organisms.keys():
+                        organisms[dict['organism']] = {'chrs': {}, 'base_dir': this_base_dir}
+                    organisms[dict['organism']]['chrs'][dict['chromosome']] = dict
+    for org in organisms:
+        org = organisms[org]
+        # if no gpi, then must be a ncbi chr which corresponds to a UCSC org, w/o matching UCSC designation
+        try:
+            build = org['genome project id']
+        except:
+            continue
+        if 'build' in org:
+            build = org['build']
+            print "ORG\t%s\t%s\t%s\t%s\t%s\t%s\tUCSC" % ( build, org['name'], org['kingdom'], org['group'], org['chromosomes'], org['info url'] )
+        else:
+            print "ORG\t%s\t%s\t%s\t%s\t%s\t%s\tNone" % ( build, org['name'], org['kingdom'], org['group'], org['chromosomes'], org['info url'] )
+
+        for chr in org['chrs']:
+            chr = org['chrs'][chr]
+            print "CHR\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % ( build, chr['chromosome'], chr['name'], chr['length'], chr['gi'], chr['gb'], "http://www.ncbi.nlm.nih.gov/entrez/viewer.fcgi?db=nucleotide&val=" + chr['refseq'] )
+            for feature in ['CDS', 'tRNA', 'rRNA']:
+                print "DATA\t%s_%s_%s\t%s\t%s\t%s\t%s\t%s" % ( build, chr['chromosome'], feature, build, chr['chromosome'], feature, "bed", os.path.join( org['base_dir'], "%s.%s.bed" % ( chr['chromosome'], feature ) ) )
+            # FASTA
+            print "DATA\t%s_%s_%s\t%s\t%s\t%s\t%s\t%s" % ( build, chr['chromosome'], "seq", build, chr['chromosome'], "sequence", "fasta", os.path.join( org['base_dir'], "%s.fna" % chr['chromosome'] ) )
+            # GeneMark
+            if os.path.exists( os.path.join( org['base_dir'], "%s.GeneMark.bed" % chr['chromosome'] ) ):
+                print "DATA\t%s_%s_%s\t%s\t%s\t%s\t%s\t%s" % ( build, chr['chromosome'], "GeneMark", build, chr['chromosome'], "GeneMark", "bed", os.path.join( org['base_dir'], "%s.GeneMark.bed" % chr['chromosome'] ) )
+            # GenMarkHMM
+            if os.path.exists( os.path.join( org['base_dir'], "%s.GeneMarkHMM.bed" % chr['chromosome'] ) ):
+                print "DATA\t%s_%s_%s\t%s\t%s\t%s\t%s\t%s" % ( build, chr['chromosome'], "GeneMarkHMM", build, chr['chromosome'], "GeneMarkHMM", "bed", os.path.join( org['base_dir'], "%s.GeneMarkHMM.bed" % chr['chromosome'] ) )
+            # Glimmer3
+            if os.path.exists( os.path.join( org['base_dir'], "%s.Glimmer3.bed" % chr['chromosome'] ) ):
+                print "DATA\t%s_%s_%s\t%s\t%s\t%s\t%s\t%s" % ( build, chr['chromosome'], "Glimmer3", build, chr['chromosome'], "Glimmer3", "bed", os.path.join( org['base_dir'], "%s.Glimmer3.bed" % chr['chromosome'] ) )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/microbes/create_bacteria_table.py b/scripts/microbes/create_bacteria_table.py
new file mode 100644
index 0000000..a8a237e
--- /dev/null
+++ b/scripts/microbes/create_bacteria_table.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    base_dir = os.path.join( os.getcwd(), "bacteria" )
+    try:
+        base_dir = sys.argv[1]
+    except:
+        pass
+        # print "using default base_dir:", base_dir
+
+    organisms = {}
+    for result in os.walk(base_dir):
+        this_base_dir, sub_dirs, files = result
+        for file in files:
+            if file[-5:] == ".info":
+                dict = {}
+                info_file = open(os.path.join(this_base_dir, file), 'r')
+                info = info_file.readlines()
+                info_file.close()
+                for line in info:
+                    fields = line.replace("\n", "").split("=")
+                    dict[fields[0]] = "=".join(fields[1:])
+                if 'genome project id' in dict.keys():
+                    name = dict['genome project id']
+                    if 'build' in dict.keys():
+                        name = dict['build']
+                    if name not in organisms.keys():
+                        organisms[name] = {'chrs': {}, 'base_dir': this_base_dir}
+                    for key in dict.keys():
+                        organisms[name][key] = dict[key]
+                else:
+                    if dict['organism'] not in organisms.keys():
+                        organisms[dict['organism']] = {'chrs': {}, 'base_dir': this_base_dir}
+                    organisms[dict['organism']]['chrs'][dict['chromosome']] = dict
+
+    orgs = organisms.keys()
+    for org in orgs:
+        if 'name' not in organisms[org]:
+            del organisms[org]
+
+    orgs = organisms.keys()
+    # need to sort by name
+    swap_test = False
+    for i in range(0, len(orgs) - 1):
+        for j in range(0, len(orgs) - i - 1):
+            if organisms[orgs[j]]['name'] > organisms[orgs[j + 1]]['name']:
+                orgs[j], orgs[j + 1] = orgs[j + 1], orgs[j]
+            swap_test = True
+        if swap_test is False:
+            break
+
+    print "||'''Organism'''||'''Kingdom'''||'''Group'''||'''Links to UCSC Archaea Browser'''||"
+
+    for org in orgs:
+        org = organisms[org]
+        at_ucsc = False
+        # if no gpi, then must be a ncbi chr which corresponds to a UCSC org, w/o matching UCSC designation
+        try:
+            org['genome project id']
+        except:
+            continue
+        if 'build' in org:
+            at_ucsc = True
+
+        out_str = "||" + org['name'] + "||" + org['kingdom'] + "||" + org['group'] + "||"
+        if at_ucsc:
+            out_str = out_str + "Yes"
+        out_str = out_str + "||"
+        print out_str
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/microbes/create_nib_seq_loc_file.py b/scripts/microbes/create_nib_seq_loc_file.py
new file mode 100644
index 0000000..5b4e924
--- /dev/null
+++ b/scripts/microbes/create_nib_seq_loc_file.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    base_dir = os.path.join( os.getcwd(), "bacteria" )
+    try:
+        base_dir = sys.argv[1]
+    except:
+        print "using default base_dir:", base_dir
+
+    loc_out = os.path.join( base_dir, "seq.loc" )
+    try:
+        loc_out = os.path.join( base_dir, sys.argv[2] )
+    except:
+        print "using default seq.loc:", loc_out
+
+    organisms = {}
+
+    loc_out = open( loc_out, 'wb' )
+
+    for result in os.walk( base_dir ):
+        this_base_dir, sub_dirs, files = result
+        for file in files:
+            if file[-5:] == ".info":
+                dict = {}
+                info_file = open( os.path.join( this_base_dir, file ), 'r' )
+                info = info_file.readlines()
+                info_file.close()
+                for line in info:
+                    fields = line.replace( "\n", "" ).split( "=" )
+                    dict[fields[0]] = "=".join(fields[1:])
+                if 'genome project id' in dict.keys():
+                    name = dict['genome project id']
+                    if 'build' in dict.keys():
+                        name = dict['build']
+                    if name not in organisms.keys():
+                        organisms[name] = {'chrs': {}, 'base_dir': this_base_dir}
+                    for key in dict.keys():
+                        organisms[name][key] = dict[key]
+                else:
+                    if dict['organism'] not in organisms.keys():
+                        organisms[dict['organism']] = {'chrs': {}, 'base_dir': this_base_dir}
+                    organisms[dict['organism']]['chrs'][dict['chromosome']] = dict
+
+    for org in organisms:
+        org = organisms[org]
+        try:
+            build = org['genome project id']
+        except:
+            continue
+        if 'build' in org:
+            build = org['build']
+
+        seq_path = os.path.join( org['base_dir'], "seq" )
+
+        # create seq dir, if exists go to next org
+        # TODO: add better checking, i.e. for updating
+        try:
+            os.mkdir( seq_path )
+        except:
+            print "Skipping", build
+            # continue
+
+        loc_out.write( "seq %s %s\n" % ( build, seq_path ) )
+
+        # print org info
+
+        for chr in org['chrs']:
+            chr = org['chrs'][chr]
+
+            fasta_file = os.path.join( org['base_dir'], "%s.fna" % chr['chromosome'] )
+            nib_out_file = os.path.join( seq_path, "%s.nib " % chr['chromosome'] )
+            # create nibs using faToNib binary
+            # TODO: when bx supports writing nib, use it here instead
+            command = "faToNib %s %s" % ( fasta_file, nib_out_file )
+            os.system( command )
+
+    loc_out.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/microbes/get_builds_lengths.py b/scripts/microbes/get_builds_lengths.py
new file mode 100644
index 0000000..a32a6fc
--- /dev/null
+++ b/scripts/microbes/get_builds_lengths.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    base_dir = os.path.join( os.getcwd(), "bacteria" )
+    try:
+        base_dir = sys.argv[1]
+    except:
+        pass
+        # print "using default base_dir:", base_dir
+
+    organisms = {}
+    for result in os.walk(base_dir):
+        this_base_dir, sub_dirs, files = result
+        for file in files:
+            if file[-5:] == ".info":
+                dict = {}
+                info_file = open(os.path.join(this_base_dir, file), 'r')
+                info = info_file.readlines()
+                info_file.close()
+                for line in info:
+                    fields = line.replace("\n", "").split("=")
+                    dict[fields[0]] = "=".join(fields[1:])
+                if 'genome project id' in dict.keys():
+                    name = dict['genome project id']
+                    if 'build' in dict.keys():
+                        name = dict['build']
+                    if name not in organisms.keys():
+                        organisms[name] = {'chrs': {}, 'base_dir': this_base_dir}
+                    for key in dict.keys():
+                        organisms[name][key] = dict[key]
+                else:
+                    if dict['organism'] not in organisms.keys():
+                        organisms[dict['organism']] = {'chrs': {}, 'base_dir': this_base_dir}
+                    organisms[dict['organism']]['chrs'][dict['chromosome']] = dict
+    for org in organisms:
+        org = organisms[org]
+        # if no gpi, then must be a ncbi chr which corresponds to a UCSC org, w/o matching UCSC designation
+        try:
+            build = org['genome project id']
+        except:
+            continue
+
+        if 'build' in org:
+            build = org['build']
+
+        chrs = []
+        for chrom in org['chrs']:
+            chrom = org['chrs'][chrom]
+            chrs.append( "%s=%s" % ( chrom['chromosome'], chrom['length'] ) )
+        print "%s\t%s\t%s" % ( build, org['name'], ",".join( chrs ) )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/microbes/harvest_bacteria.py b/scripts/microbes/harvest_bacteria.py
new file mode 100644
index 0000000..9b0bfb8
--- /dev/null
+++ b/scripts/microbes/harvest_bacteria.py
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+
+# Harvest Bacteria
+# Connects to NCBI's Microbial Genome Projects website and scrapes it for information.
+# Downloads and converts annotations for each Genome
+import os
+import sys
+import time
+from ftplib import FTP
+from urllib2 import urlopen
+from urllib import urlretrieve
+
+from BeautifulSoup import BeautifulSoup
+from util import get_bed_from_genbank, get_bed_from_glimmer3, get_bed_from_GeneMarkHMM, get_bed_from_GeneMark
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+# this defines the types of ftp files we are interested in, and how to process/convert them to a form for our use
+desired_ftp_files = {'GeneMark': {'ext': 'GeneMark-2.5f', 'parser': 'process_GeneMark'},
+                    'GeneMarkHMM': {'ext': 'GeneMarkHMM-2.6m', 'parser': 'process_GeneMarkHMM'},
+                    'Glimmer3': {'ext': 'Glimmer3', 'parser': 'process_Glimmer3'},
+                    'fna': {'ext': 'fna', 'parser': 'process_FASTA'},
+                    'gbk': {'ext': 'gbk', 'parser': 'process_Genbank'} }
+
+
+# number, name, chroms, kingdom, group, genbank, refseq, info_url, ftp_url
+def iter_genome_projects( url="http://www.ncbi.nlm.nih.gov/genomes/lproks.cgi?view=1", info_url_base="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=" ):
+    for row in BeautifulSoup( urlopen( url ) ).findAll( name='tr', bgcolor=["#EEFFDD", "#E8E8DD"] ):
+        row = str( row ).replace( "\n", "" ).replace( "\r", "" )
+
+        fields = row.split( "</td>" )
+
+        org_num = fields[0].split( "list_uids=" )[-1].split( "\"" )[0]
+
+        name = fields[1].split( "\">" )[-1].split( "<" )[0]
+
+        kingdom = "archaea"
+        if "<td class=\"bacteria\" align=\"center\">B" in fields[2]:
+            kingdom = "bacteria"
+
+        group = fields[3].split( ">" )[-1]
+
+        info_url = "%s%s" % ( info_url_base, org_num )
+
+        org_genbank = fields[7].split( "\">" )[-1].split( "<" )[0].split( "." )[0]
+        org_refseq = fields[8].split( "\">" )[-1].split( "<" )[0].split( "." )[0]
+
+        # seems some things donot have an ftp url, try and except it here:
+        try:
+            ftp_url = fields[22].split( "href=\"" )[1].split( "\"" )[0]
+        except:
+            print "FAILED TO AQUIRE FTP ADDRESS:", org_num, info_url
+            ftp_url = None
+
+        chroms = get_chroms_by_project_id( org_num )
+
+        yield org_num, name, chroms, kingdom, group, org_genbank, org_refseq, info_url, ftp_url
+
+
+def get_chroms_by_project_id( org_num, base_url="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=" ):
+    html_count = 0
+    html = None
+    while html_count < 500 and html is None:
+        html_count += 1
+        url = "%s%s" % ( base_url, org_num )
+        try:
+            html = urlopen( url )
+        except:
+            print "GENOME PROJECT FAILED:", html_count, "org:", org_num, url
+            html = None
+            time.sleep( 1 )  # Throttle Connection
+    if html is None:
+        "GENOME PROJECT COMPLETELY FAILED TO LOAD", "org:", org_num, "http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=" + org_num
+        return None
+
+    chroms = []
+    for chr_row in BeautifulSoup( html ).findAll( "tr", { "class": "vvv" } ):
+        chr_row = str( chr_row ).replace( "\n", "" ).replace( "\r", "" )
+        fields2 = chr_row.split( "</td>" )
+        refseq = fields2[1].split( "</a>" )[0].split( ">" )[-1]
+        # genbank = fields2[2].split( "</a>" )[0].split( ">" )[-1]
+        chroms.append( refseq )
+
+    return chroms
+
+
+def get_ftp_contents( ftp_url ):
+    ftp_count = 0
+    ftp_contents = None
+    while ftp_count < 500 and ftp_contents is None:
+        ftp_count += 1
+        try:
+            ftp = FTP( ftp_url.split("/")[2] )
+            ftp.login()
+            ftp.cwd( ftp_url.split( ftp_url.split( "/" )[2] )[-1] )
+            ftp_contents = ftp.nlst()
+            ftp.close()
+        except:
+            ftp_contents = None
+            time.sleep( 1 )  # Throttle Connection
+    return ftp_contents
+
+
+def scrape_ftp( ftp_contents, org_dir, org_num, refseq, ftp_url ):
+    for file_type, items in desired_ftp_files.items():
+        ext = items['ext']
+        ftp_filename = "%s.%s" % ( refseq, ext )
+        target_filename = os.path.join( org_dir, "%s.%s" % ( refseq, ext ) )
+        if ftp_filename in ftp_contents:
+            url_count = 0
+            url = "%s/%s" % ( ftp_url, ftp_filename )
+            results = None
+            while url_count < 500 and results is None:
+                url_count += 1
+                try:
+                    results = urlretrieve( url, target_filename )
+                except:
+                    results = None
+                    time.sleep(1)  # Throttle Connection
+            if results is None:
+                "URL COMPLETELY FAILED TO LOAD:", url
+                return
+
+            # do special processing for each file type:
+            if items['parser'] is not None:
+                globals()[items['parser']]( target_filename, org_num, refseq )
+        else:
+            print "FTP filetype:", file_type, "not found for", org_num, refseq
+    # FTP Files have been Loaded
+
+
+def process_FASTA( filename, org_num, refseq ):
+    fasta = []
+    fasta = [line.strip() for line in open( filename, 'rb' ).readlines()]
+    fasta_header = fasta.pop( 0 )[1:]
+    fasta_header_split = fasta_header.split( "|" )
+    chr_name = fasta_header_split.pop( -1 ).strip()
+    accesions = {fasta_header_split[0]: fasta_header_split[1], fasta_header_split[2]: fasta_header_split[3]}
+    fasta = "".join( fasta )
+
+    # Create Chrom Info File:
+    chrom_info_file = open( os.path.join( os.path.split( filename )[0], "%s.info" % refseq ), 'wb+' )
+    chrom_info_file.write( "chromosome=%s\nname=%s\nlength=%s\norganism=%s\n" % ( refseq, chr_name, len( fasta ), org_num ) )
+    try:
+        chrom_info_file.write( "gi=%s\n" % accesions['gi'] )
+    except:
+        chrom_info_file.write( "gi=None\n" )
+    try:
+        chrom_info_file.write( "gb=%s\n" % accesions['gb'] )
+    except:
+        chrom_info_file.write( "gb=None\n" )
+    try:
+        chrom_info_file.write( "refseq=%s\n" % refseq )
+    except:
+        chrom_info_file.write( "refseq=None\n" )
+    chrom_info_file.close()
+
+
+def process_Genbank( filename, org_num, refseq ):
+    # extracts 'CDS', 'tRNA', 'rRNA' features from genbank file
+    features = get_bed_from_genbank( filename, refseq, ['CDS', 'tRNA', 'rRNA'] )
+    for feature in features.keys():
+        feature_file = open( os.path.join( os.path.split( filename )[0], "%s.%s.bed" % ( refseq, feature ) ), 'wb+' )
+        feature_file.write( '\n'.join( features[feature] ) )
+        feature_file.close()
+    print "Genbank extraction finished for chrom:", refseq, "file:", filename
+
+
+def process_Glimmer3( filename, org_num, refseq ):
+    try:
+        glimmer3_bed = get_bed_from_glimmer3( filename, refseq )
+    except Exception as e:
+        print "Converting Glimmer3 to bed FAILED! For chrom:", refseq, "file:", filename, e
+        glimmer3_bed = []
+    glimmer3_bed_file = open( os.path.join( os.path.split( filename )[0], "%s.Glimmer3.bed" % refseq ), 'wb+' )
+    glimmer3_bed_file.write( '\n'.join( glimmer3_bed ) )
+    glimmer3_bed_file.close()
+
+
+def process_GeneMarkHMM( filename, org_num, refseq ):
+    try:
+        geneMarkHMM_bed = get_bed_from_GeneMarkHMM( filename, refseq )
+    except Exception as e:
+        print "Converting GeneMarkHMM to bed FAILED! For chrom:", refseq, "file:", filename, e
+        geneMarkHMM_bed = []
+    geneMarkHMM_bed_bed_file = open( os.path.join( os.path.split( filename )[0], "%s.GeneMarkHMM.bed" % refseq ), 'wb+' )
+    geneMarkHMM_bed_bed_file.write( '\n'.join( geneMarkHMM_bed ) )
+    geneMarkHMM_bed_bed_file.close()
+
+
+def process_GeneMark( filename, org_num, refseq ):
+    try:
+        geneMark_bed = get_bed_from_GeneMark( filename, refseq )
+    except Exception as e:
+        print "Converting GeneMark to bed FAILED! For chrom:", refseq, "file:", filename, e
+        geneMark_bed = []
+    geneMark_bed_bed_file = open( os.path.join( os.path.split( filename )[0], "%s.GeneMark.bed" % refseq ), 'wb+' )
+    geneMark_bed_bed_file.write( '\n'.join( geneMark_bed ) )
+    geneMark_bed_bed_file.close()
+
+
+def __main__():
+    start_time = time.time()
+    base_dir = os.path.join( os.getcwd(), "bacteria" )
+    try:
+        base_dir = sys.argv[1]
+    except:
+        print "using default base_dir:", base_dir
+
+    try:
+        os.mkdir( base_dir )
+        print "path '%s' has been created" % base_dir
+    except:
+        print "path '%s' seems to already exist" % base_dir
+
+    for org_num, name, chroms, kingdom, group, org_genbank, org_refseq, info_url, ftp_url in iter_genome_projects():
+        if chroms is None:
+            continue  # No chrom information, we can't really do anything with this organism
+        # Create org directory, if exists, assume it is done and complete --> skip it
+        try:
+            org_dir = os.path.join( base_dir, org_num )
+            os.mkdir( org_dir )
+        except:
+            print "Organism %s already exists on disk, skipping" % org_num
+            continue
+
+        # get ftp contents
+        ftp_contents = get_ftp_contents( ftp_url )
+        if ftp_contents is None:
+            "FTP COMPLETELY FAILED TO LOAD", "org:", org_num, "ftp:", ftp_url
+        else:
+            for refseq in chroms:
+                scrape_ftp( ftp_contents, org_dir, org_num, refseq, ftp_url )
+                # FTP Files have been Loaded
+                print "Org:", org_num, "chrom:", refseq, "[", time.time() - start_time, "seconds elapsed. ]"
+
+        # Create org info file
+        info_file = open( os.path.join( org_dir, "%s.info" % org_num ), 'wb+' )
+        info_file.write("genome project id=%s\n" % org_num )
+        info_file.write("name=%s\n" % name )
+        info_file.write("kingdom=%s\n" % kingdom )
+        info_file.write("group=%s\n" % group )
+        info_file.write("chromosomes=%s\n" % ",".join( chroms ) )
+        info_file.write("info url=%s\n" % info_url )
+        info_file.write("ftp url=%s\n" % ftp_url )
+        info_file.close()
+
+    print "Finished Harvesting", "[", time.time() - start_time, "seconds elapsed. ]"
+    print "[", ( time.time() - start_time ) / 60, "minutes. ]"
+    print "[", ( time.time() - start_time ) / 60 / 60, "hours. ]"
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/microbes/ncbi_to_ucsc.py b/scripts/microbes/ncbi_to_ucsc.py
new file mode 100644
index 0000000..6f37bb1
--- /dev/null
+++ b/scripts/microbes/ncbi_to_ucsc.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+"""
+Walk downloaded Genome Projects and Convert, in place, IDs to match the UCSC Archaea browser, where applicable.
+Uses UCSC Archaea DSN.
+"""
+import os
+import sys
+import urllib
+from shutil import move
+from xml.etree import ElementTree
+
+
+def __main__():
+    base_dir = os.path.join( os.getcwd(), "bacteria" )
+    try:
+        base_dir = sys.argv[1]
+    except:
+        print "using default base_dir:", base_dir
+
+    organisms = {}
+    for result in os.walk(base_dir):
+        this_base_dir, sub_dirs, files = result
+        for file in files:
+            if file[-5:] == ".info":
+                dict = {}
+                info_file = open(os.path.join(this_base_dir, file), 'r')
+                info = info_file.readlines()
+                info_file.close()
+                for line in info:
+                    fields = line.replace("\n", "").split("=")
+                    dict[fields[0]] = "=".join(fields[1:])
+                if 'genome project id' in dict.keys():
+                    if dict['genome project id'] not in organisms.keys():
+                        organisms[dict['genome project id']] = {'chrs': {}, 'base_dir': this_base_dir}
+                    for key in dict.keys():
+                        organisms[dict['genome project id']][key] = dict[key]
+                else:
+                    if dict['organism'] not in organisms.keys():
+                        organisms[dict['organism']] = {'chrs': {}, 'base_dir': this_base_dir}
+                    organisms[dict['organism']]['chrs'][dict['chromosome']] = dict
+
+    # get UCSC data
+
+    URL = "http://archaea.ucsc.edu/cgi-bin/das/dsn"
+
+    try:
+        page = urllib.urlopen(URL)
+    except:
+        print "#Unable to open " + URL
+        print "?\tunspecified (?)"
+        sys.exit(1)
+
+    text = page.read()
+    try:
+        tree = ElementTree.fromstring(text)
+    except:
+        print "#Invalid xml passed back from " + URL
+        print "?\tunspecified (?)"
+        sys.exit(1)
+
+    builds = {}
+
+    # print "#Harvested from http://archaea.ucsc.edu/cgi-bin/das/dsn"
+    # print "?\tunspecified (?)"
+    for dsn in tree:
+        build = dsn.find("SOURCE").attrib['id']
+        try:
+            org_page = urllib.urlopen("http://archaea.ucsc.edu/cgi-bin/hgGateway?db=" + build).read().replace("\n", "").split("<table border=2 cellspacing=2 cellpadding=2>")[1].split("</table>")[0].split("</tr>")
+        except:
+            print "NO CHROMS FOR", build
+            continue
+        org_page.pop(0)
+        if org_page[-1] == "":
+            org_page.pop(-1)
+
+        for row in org_page:
+            chr = row.split("</a>")[0].split(">")[-1]
+            refseq = row.split("</a>")[-2].split(">")[-1]
+            for org in organisms:
+                for org_chr in organisms[org]['chrs']:
+                    if organisms[org]['chrs'][org_chr]['chromosome'] == refseq:
+                        if org not in builds:
+                            builds[org] = {'chrs': {}, 'build': build}
+                        builds[org]['chrs'][refseq] = chr
+                        # print build,org,chr,refseq
+
+    print
+    ext_to_edit = ['bed', 'info', ]
+    for org in builds:
+        print org, "changed to", builds[org]['build']
+
+        # org info file
+        info_file_old = os.path.join(base_dir + org, org + ".info")
+        info_file_new = os.path.join(base_dir + org, builds[org]['build'] + ".info")
+
+        old_dir = base_dir + org
+        new_dir = base_dir + builds[org]['build']
+
+        # open and edit org info file
+        info_file_contents = open(info_file_old).read()
+        info_file_contents = info_file_contents + "build=" + builds[org]['build'] + "\n"
+        for chrom in builds[org]['chrs']:
+            info_file_contents = info_file_contents.replace(chrom, builds[org]['chrs'][chrom])
+            for result in os.walk(base_dir + org):
+                this_base_dir, sub_dirs, files = result
+                for file in files:
+                    if file[0:len(chrom)] == chrom:
+                        # rename file
+                        old_name = os.path.join(this_base_dir, file)
+                        new_name = os.path.join(this_base_dir, builds[org]['chrs'][chrom] + file[len(chrom):])
+                        move(old_name, new_name)
+
+                        # edit contents of file, skiping those in list
+                        if file.split(".")[-1] not in ext_to_edit:
+                            continue
+
+                        file_contents = open(new_name).read()
+                        file_contents = file_contents.replace(chrom, builds[org]['chrs'][chrom])
+
+                        # special case fixes...
+                        if file[-5:] == ".info":
+                            file_contents = file_contents.replace("organism=" + org, "organism=" + builds[org]['build'])
+                            file_contents = file_contents.replace("refseq=" + builds[org]['chrs'][chrom], "refseq=" + chrom)
+
+                        # write out new file
+                        file_out = open(new_name, 'w')
+                        file_out.write(file_contents)
+                        file_out.close()
+
+        # write out org info file and remove old file
+        org_info_out = open(info_file_new, 'w')
+        org_info_out.write(info_file_contents)
+        org_info_out.close()
+        os.unlink(info_file_old)
+
+        # change org directory name
+        move(old_dir, new_dir)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/microbes/util.py b/scripts/microbes/util.py
new file mode 100644
index 0000000..b4a05a9
--- /dev/null
+++ b/scripts/microbes/util.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+# genbank_to_bed
+class Region:
+    def __init__( self ):
+        self.qualifiers = {}
+        self.start = None
+        self.end = None
+        self.strand = '+'
+
+    def set_coordinates_by_location( self, location ):
+        location = location.strip().lower().replace( '..', ',' )
+        if "complement(" in location:  # if part of the sequence is on the negative strand, it all is?
+            self.strand = '-'  # default of + strand
+        for remove_text in ["join(", "order(", "complement(", ")"]:
+            location = location.replace( remove_text, "" )
+        for number in location.split( ',' ):
+            number = number.strip('\n\r\t <>,()')
+            if number:
+                if "^" in number:
+                    # a single point
+                    # check that this is correct for points, ie: 413/NC_005027.gbk:     misc_feature    6636286^6636287  ===> 6636285,6636286
+                    end = int( number.split( '^' )[0] )
+                    start = end - 1
+                else:
+                    end = int( number )
+                    start = end - 1  # match BED coordinates
+                if self.start is None or start < self.start:
+                    self.start = start
+                if self.end is None or end > self.end:
+                    self.end = end
+
+
+class GenBankFeatureParser:
+    """Parses Features from Single Locus GenBank file"""
+    def __init__( self, fh, features_list=[] ):
+        self.fh = fh
+        self.features = {}
+        fh.seek(0)
+        in_features = False
+        last_feature_name = None
+        base_indent = 0
+        last_attr_name = None
+        for line in fh:
+            if not in_features and line.startswith('FEATURES'):
+                in_features = True
+                continue
+            if in_features:
+                lstrip = line.lstrip()
+                if line and lstrip == line:
+                    break  # end of feature block
+                cur_indent = len( line ) - len( lstrip )
+                if last_feature_name is None:
+                    base_indent = cur_indent
+                if cur_indent == base_indent:
+                    # a new feature
+                    last_attr_name = None
+                    fields = lstrip.split( None, 1 )
+                    last_feature_name = fields[0].strip()
+                    if not features_list or ( features_list and last_feature_name in features_list ):
+                        if last_feature_name not in self.features:
+                            self.features[last_feature_name] = []
+                        region = Region()
+                        region.set_coordinates_by_location( fields[1] )
+                        self.features[last_feature_name].append( region )
+                else:
+                    # add info to last known feature
+                    line = line.strip()
+                    if line.startswith( '/' ):
+                        fields = line[1:].split( '=', 1 )
+                        if len( fields ) == 2:
+                            last_attr_name, content = fields
+                        else:
+                            # No data
+                            last_attr_name = line[1:]
+                            content = ""
+                        content = content.strip( '"' )
+                        if last_attr_name not in self.features[last_feature_name][-1].qualifiers:
+                            self.features[last_feature_name][-1].qualifiers[last_attr_name] = []
+                        self.features[last_feature_name][-1].qualifiers[last_attr_name].append( content )
+                    elif last_attr_name is None and last_feature_name:
+                        # must still be working on location
+                        self.features[last_feature_name][-1].set_coordinates_by_location( line )
+                    else:
+                        # continuation of multi-line qualifier content
+                        if last_feature_name.lower() in ['translation']:
+                            self.features[last_feature_name][-1].qualifiers[last_attr_name][-1] = "%s%s" % ( self.features[last_feature_name][-1].qualifiers[last_attr_name][-1], line.rstrip( '"' ) )
+                        else:
+                            self.features[last_feature_name][-1].qualifiers[last_attr_name][-1] = "%s %s" % ( self.features[last_feature_name][-1].qualifiers[last_attr_name][-1], line.rstrip( '"' ) )
+
+    def get_features_by_type( self, feature_type ):
+        if feature_type not in self.features:
+            return []
+        else:
+            return self.features[feature_type]
+
+
+# Parse A GenBank file and return arrays of BED regions for the corresponding features
+def get_bed_from_genbank(gb_file, chrom, feature_list):
+    genbank_parser = GenBankFeatureParser( open( gb_file ) )
+    features = {}
+    for feature_type in feature_list:
+        features[feature_type] = []
+        for feature in genbank_parser.get_features_by_type( feature_type ):
+            name = ""
+            for name_tag in ['gene', 'locus_tag', 'db_xref']:
+                if name_tag in feature.qualifiers:
+                    if name:
+                        name = name + ";"
+                    name = name + feature.qualifiers[name_tag][0].replace(" ", "_")
+            if not name:
+                name = "unknown"
+
+            features[feature_type].append( "%s\t%s\t%s\t%s\t%s\t%s" % ( chrom, feature.start, feature.end, name, 0, feature.strand ) )  # append new bed field here
+    return features
+
+
+# geneMark to bed
+# converts GeneMarkHMM to bed
+# returns an array of bed regions
+def get_bed_from_GeneMark(geneMark_filename, chr):
+    orfs = open(geneMark_filename).readlines()
+    while True:
+        line = orfs.pop(0).strip()
+        if line.startswith("--------"):
+            orfs.pop(0)
+            break
+    orfs = "".join(orfs)
+    ctr = 0
+    regions = []
+    for block in orfs.split("\n\n"):
+        if block.startswith("List of Regions of interest"):
+            break
+        best_block = {'start': 0, 'end': 0, 'strand': '+', 'avg_prob': -sys.maxint, 'start_prob': -sys.maxint, 'name': 'DNE'}
+        ctr += 1
+        ctr2 = 0
+        for line in block.split("\n"):
+            ctr2 += 1
+            fields = line.split()
+            start = int(fields.pop(0)) - 1
+            end = int(fields.pop(0))
+            strand = fields.pop(0)
+            if strand == 'complement':
+                strand = "-"
+            else:
+                strand = "+"
+            frame = fields.pop(0)
+            frame = frame + " " + fields.pop(0)
+            avg_prob = float(fields.pop(0))
+            try:
+                start_prob = float(fields.pop(0))
+            except:
+                start_prob = 0
+            name = "orf_" + str(ctr) + "_" + str(ctr2)
+            if avg_prob >= best_block['avg_prob']:
+                if start_prob > best_block['start_prob']:
+                    best_block = {'start': start, 'end': end, 'strand': strand, 'avg_prob': avg_prob, 'start_prob': start_prob, 'name': name}
+        regions.append(chr + "\t" + str(best_block['start']) + "\t" + str(best_block['end']) + "\t" + best_block['name'] + "\t" + str(int(best_block['avg_prob'] * 1000)) + "\t" + best_block['strand'])
+    return regions
+
+
+# geneMarkHMM to bed
+# converts GeneMarkHMM to bed
+# returns an array of bed regions
+def get_bed_from_GeneMarkHMM(geneMarkHMM_filename, chr):
+    orfs = open(geneMarkHMM_filename).readlines()
+    while True:
+        line = orfs.pop(0).strip()
+        if line == "Predicted genes":
+            orfs.pop(0)
+            orfs.pop(0)
+            break
+    regions = []
+    for line in orfs:
+        fields = line.split()
+        name = "gene_number_" + fields.pop(0)
+        strand = fields.pop(0)
+        start = fields.pop(0)
+        if start.startswith("<"):
+            start = 1
+        start = int(start) - 1
+        end = fields.pop(0)
+        if end.startswith(">"):
+            end = end[1:]
+        end = int(end)
+        score = 0  # no scores provided
+        regions.append(chr + "\t" + str(start) + "\t" + str(end) + "\t" + name + "\t" + str(score) + "\t" + strand)
+    return regions
+
+
+# glimmer3 to bed
+# converts glimmer3 to bed, doing some linear scaling (probably not correct?) on scores
+# returns an array of bed regions
+def get_bed_from_glimmer3(glimmer3_filename, chr):
+    max_score = -sys.maxint
+    min_score = sys.maxint
+    orfs = []
+    for line in open(glimmer3_filename).readlines():
+        if line.startswith(">"):
+            continue
+        fields = line.split()
+        name = fields.pop(0)
+        start = int(fields.pop(0))
+        end = int(fields.pop(0))
+        if int(fields.pop(0)) < 0:
+            strand = "-"
+            temp = start
+            start = end
+            end = temp
+        else:
+            strand = "+"
+        start = start - 1
+        score = (float(fields.pop(0)))
+        if score > max_score:
+            max_score = score
+        if score < min_score:
+            min_score = score
+        orfs.append((chr, start, end, name, score, strand))
+
+    delta = 0
+    if min_score < 0:
+        delta = min_score * -1
+    regions = []
+    for (chr, start, end, name, score, strand) in orfs:
+        # need to cast to str because was having the case where 1000.0 was rounded to 999 by int, some sort of precision bug?
+        my_score = int(float(str( ( (score + delta) * (1000 - 0 - (min_score + delta)) ) / ( (max_score + delta) + 0 ))))
+
+        regions.append(chr + "\t" + str(start) + "\t" + str(end) + "\t" + name + "\t" + str(my_score) + "\t" + strand)
+    return regions
diff --git a/scripts/migrate_tools/0002_tools.sh b/scripts/migrate_tools/0002_tools.sh
new file mode 100644
index 0000000..926aba9
--- /dev/null
+++ b/scripts/migrate_tools/0002_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0002_tools.xml $@
diff --git a/scripts/migrate_tools/0002_tools.xml b/scripts/migrate_tools/0002_tools.xml
new file mode 100644
index 0000000..2f1b1b8
--- /dev/null
+++ b/scripts/migrate_tools/0002_tools.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository name="emboss_datatypes" description="Datatypes for Emboss tools" changeset_revision="a89163f31369" />
+    <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5.0.0 tools" changeset_revision="7334f6d0ac17">
+        <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_5/emboss_antigenic.xml" />
+        <tool id="EMBOSS: backtranseq2" version="5.0.0" file="emboss_5/emboss_backtranseq.xml" />
+        <tool id="EMBOSS: banana3" version="5.0.0" file="emboss_5/emboss_banana.xml" />
+        <tool id="EMBOSS: biosed4" version="5.0.0" file="emboss_5/emboss_biosed.xml" />
+        <tool id="EMBOSS: btwisted5" version="5.0.0" file="emboss_5/emboss_btwisted.xml" />
+        <tool id="EMBOSS: cai_custom6" version="5.0.0" file="emboss_5/emboss_cai_custom.xml" />
+        <tool id="EMBOSS: cai6" version="5.0.0" file="emboss_5/emboss_cai.xml" />
+        <tool id="EMBOSS: chaos7" version="5.0.0" file="emboss_5/emboss_chaos.xml" />
+        <tool id="EMBOSS: charge8" version="5.0.0" file="emboss_5/emboss_charge.xml" />
+        <tool id="EMBOSS: checktrans9" version="5.0.0" file="emboss_5/emboss_checktrans.xml" />
+        <tool id="EMBOSS: chips10" version="5.0.0" file="emboss_5/emboss_chips.xml" />
+        <tool id="EMBOSS: cirdna11" version="5.0.0" file="emboss_5/emboss_cirdna.xml" />
+        <tool id="EMBOSS: codcmp12" version="5.0.0" file="emboss_5/emboss_codcmp.xml" />
+        <tool id="EMBOSS: coderet13" version="5.0.0" file="emboss_5/emboss_coderet.xml" />
+        <tool id="EMBOSS: compseq14" version="5.0.0" file="emboss_5/emboss_compseq.xml" />
+        <tool id="EMBOSS: cpgplot15" version="5.0.0" file="emboss_5/emboss_cpgplot.xml" />
+        <tool id="EMBOSS: cpgreport16" version="5.0.0" file="emboss_5/emboss_cpgreport.xml" />
+        <tool id="EMBOSS: cusp17" version="5.0.0" file="emboss_5/emboss_cusp.xml" />
+        <tool id="EMBOSS: cutseq18" version="5.0.0" file="emboss_5/emboss_cutseq.xml" />
+        <tool id="EMBOSS: dan19" version="5.0.0" file="emboss_5/emboss_dan.xml" />
+        <tool id="EMBOSS: degapseq20" version="5.0.0" file="emboss_5/emboss_degapseq.xml" />
+        <tool id="EMBOSS: descseq21" version="5.0.0" file="emboss_5/emboss_descseq.xml" />
+        <tool id="EMBOSS: diffseq22" version="5.0.0" file="emboss_5/emboss_diffseq.xml" />
+        <tool id="EMBOSS: digest23" version="5.0.0" file="emboss_5/emboss_digest.xml" />
+        <tool id="EMBOSS: dotmatcher24" version="5.0.0" file="emboss_5/emboss_dotmatcher.xml" />
+        <tool id="EMBOSS: dotpath25" version="5.0.0" file="emboss_5/emboss_dotpath.xml" />
+        <tool id="EMBOSS: dottup26" version="5.0.0" file="emboss_5/emboss_dottup.xml" />
+        <tool id="EMBOSS: dreg27" version="5.0.0" file="emboss_5/emboss_dreg.xml" />
+        <tool id="EMBOSS: einverted28" version="5.0.0" file="emboss_5/emboss_einverted.xml" />
+        <tool id="EMBOSS: epestfind29" version="5.0.0" file="emboss_5/emboss_epestfind.xml" />
+        <tool id="EMBOSS: equicktandem31" version="5.0.0" file="emboss_5/emboss_equicktandem.xml" />
+        <tool id="EMBOSS: est2genome32" version="5.0.0" file="emboss_5/emboss_est2genome.xml" />
+        <tool id="EMBOSS: etandem33" version="5.0.0" file="emboss_5/emboss_etandem.xml" />
+        <tool id="EMBOSS: extractfeat34" version="5.0.0" file="emboss_5/emboss_extractfeat.xml" />
+        <tool id="EMBOSS: extractseq35" version="5.0.0" file="emboss_5/emboss_extractseq.xml" />
+        <tool id="EMBOSS: freak36" version="5.0.0" file="emboss_5/emboss_freak.xml" />
+        <tool id="EMBOSS: fuzznuc37" version="5.0.0" file="emboss_5/emboss_fuzznuc.xml" />
+        <tool id="EMBOSS: fuzzpro38" version="5.0.0" file="emboss_5/emboss_fuzzpro.xml" />
+        <tool id="EMBOSS: fuzztran39" version="5.0.0" file="emboss_5/emboss_fuzztran.xml" />
+        <tool id="EMBOSS: garnier40" version="5.0.0" file="emboss_5/emboss_garnier.xml" />
+        <tool id="EMBOSS: geecee41" version="5.0.0" file="emboss_5/emboss_geecee.xml" />
+        <tool id="EMBOSS: getorf42" version="5.0.0" file="emboss_5/emboss_getorf.xml" />
+        <tool id="EMBOSS: helixturnhelix43" version="5.0.0" file="emboss_5/emboss_helixturnhelix.xml" />
+        <tool id="EMBOSS: hmoment44" version="5.0.0" file="emboss_5/emboss_hmoment.xml" />
+        <tool id="EMBOSS: iep45" version="5.0.0" file="emboss_5/emboss_iep.xml" />
+        <tool id="EMBOSS: infoseq46" version="5.0.0" file="emboss_5/emboss_infoseq.xml" />
+        <tool id="EMBOSS: isochore47" version="5.0.0" file="emboss_5/emboss_isochore.xml" />
+        <tool id="EMBOSS: lindna48" version="5.0.0" file="emboss_5/emboss_lindna.xml" />
+        <tool id="EMBOSS: marscan49" version="5.0.0" file="emboss_5/emboss_marscan.xml" />
+        <tool id="EMBOSS: maskfeat50" version="5.0.0" file="emboss_5/emboss_maskfeat.xml" />
+        <tool id="EMBOSS: maskseq51" version="5.0.0" file="emboss_5/emboss_maskseq.xml" />
+        <tool id="EMBOSS: matcher52" version="5.0.0" file="emboss_5/emboss_matcher.xml" />
+        <tool id="EMBOSS: megamerger53" version="5.0.0" file="emboss_5/emboss_megamerger.xml" />
+        <tool id="EMBOSS: merger54" version="5.0.0" file="emboss_5/emboss_merger.xml" />
+        <tool id="EMBOSS: msbar55" version="5.0.0" file="emboss_5/emboss_msbar.xml" />
+        <tool id="EMBOSS: needle56" version="5.0.0" file="emboss_5/emboss_needle.xml" />
+        <tool id="EMBOSS: newcpgreport57" version="5.0.0" file="emboss_5/emboss_newcpgreport.xml" />
+        <tool id="EMBOSS: newcpgseek58" version="5.0.0" file="emboss_5/emboss_newcpgseek.xml" />
+        <tool id="EMBOSS: newseq59" version="5.0.0" file="emboss_5/emboss_newseq.xml" />
+        <tool id="EMBOSS: noreturn60" version="5.0.0" file="emboss_5/emboss_noreturn.xml" />
+        <tool id="EMBOSS: notseq61" version="5.0.0" file="emboss_5/emboss_notseq.xml" />
+        <tool id="EMBOSS: nthseq62" version="5.0.0" file="emboss_5/emboss_nthseq.xml" />
+        <tool id="EMBOSS: octanol63" version="5.0.0" file="emboss_5/emboss_octanol.xml" />
+        <tool id="EMBOSS: oddcomp64" version="5.0.0" file="emboss_5/emboss_oddcomp.xml" />
+        <tool id="EMBOSS: palindrome65" version="5.0.0" file="emboss_5/emboss_palindrome.xml" />
+        <tool id="EMBOSS: pasteseq66" version="5.0.0" file="emboss_5/emboss_pasteseq.xml" />
+        <tool id="EMBOSS: patmatdb67" version="5.0.0" file="emboss_5/emboss_patmatdb.xml" />
+        <tool id="EMBOSS: pepcoil68" version="5.0.0" file="emboss_5/emboss_pepcoil.xml" />
+        <tool id="EMBOSS: pepinfo69" version="5.0.0" file="emboss_5/emboss_pepinfo.xml" />
+        <tool id="EMBOSS: pepnet70" version="5.0.0" file="emboss_5/emboss_pepnet.xml" />
+        <tool id="EMBOSS: pepstats71" version="5.0.0" file="emboss_5/emboss_pepstats.xml" />
+        <tool id="EMBOSS: pepwheel72" version="5.0.0" file="emboss_5/emboss_pepwheel.xml" />
+        <tool id="EMBOSS: pepwindow73" version="5.0.0" file="emboss_5/emboss_pepwindow.xml" />
+        <tool id="EMBOSS: pepwindowall74" version="5.0.0" file="emboss_5/emboss_pepwindowall.xml" />
+        <tool id="EMBOSS: plotcon75" version="5.0.0" file="emboss_5/emboss_plotcon.xml" />
+        <tool id="EMBOSS: plotorf76" version="5.0.0" file="emboss_5/emboss_plotorf.xml" />
+        <tool id="EMBOSS: polydot77" version="5.0.0" file="emboss_5/emboss_polydot.xml" />
+        <tool id="EMBOSS: preg78" version="5.0.0" file="emboss_5/emboss_preg.xml" />
+        <tool id="EMBOSS: prettyplot79" version="5.0.0" file="emboss_5/emboss_prettyplot.xml" />
+        <tool id="EMBOSS: prettyseq80" version="5.0.0" file="emboss_5/emboss_prettyseq.xml" />
+        <tool id="EMBOSS: primersearch81" version="5.0.0" file="emboss_5/emboss_primersearch.xml" />
+        <tool id="EMBOSS: revseq82" version="5.0.0" file="emboss_5/emboss_revseq.xml" />
+        <tool id="EMBOSS: seqmatchall83" version="5.0.0" file="emboss_5/emboss_seqmatchall.xml" />
+        <tool id="EMBOSS: seqret84" version="5.0.0" file="emboss_5/emboss_seqret.xml" />
+        <tool id="EMBOSS: showfeat85" version="5.0.0" file="emboss_5/emboss_showfeat.xml" />
+        <tool id="EMBOSS: shuffleseq87" version="5.0.0" file="emboss_5/emboss_shuffleseq.xml" />
+        <tool id="EMBOSS: sigcleave88" version="5.0.0" file="emboss_5/emboss_sigcleave.xml" />
+        <tool id="EMBOSS: sirna89" version="5.0.0" file="emboss_5/emboss_sirna.xml" />
+        <tool id="EMBOSS: sixpack90" version="5.0.0" file="emboss_5/emboss_sixpack.xml" />
+        <tool id="EMBOSS: skipseq91" version="5.0.0" file="emboss_5/emboss_skipseq.xml" />
+        <tool id="EMBOSS: splitter92" version="5.0.0" file="emboss_5/emboss_splitter.xml" />
+        <tool id="EMBOSS: supermatcher95" version="5.0.0" file="emboss_5/emboss_supermatcher.xml" />
+        <tool id="EMBOSS: syco96" version="5.0.0" file="emboss_5/emboss_syco.xml" />
+        <tool id="EMBOSS: tcode97" version="5.0.0" file="emboss_5/emboss_tcode.xml" />
+        <tool id="EMBOSS: textsearch98" version="5.0.0" file="emboss_5/emboss_textsearch.xml" />
+        <tool id="EMBOSS: tmap99" version="5.0.0" file="emboss_5/emboss_tmap.xml" />
+        <tool id="EMBOSS: tranalign100" version="5.0.0" file="emboss_5/emboss_tranalign.xml" />
+        <tool id="EMBOSS: transeq101" version="5.0.0" file="emboss_5/emboss_transeq.xml" />
+        <tool id="EMBOSS: trimest102" version="5.0.0" file="emboss_5/emboss_trimest.xml" />
+        <tool id="EMBOSS: trimseq103" version="5.0.0" file="emboss_5/emboss_trimseq.xml" />
+        <tool id="EMBOSS: twofeat104" version="5.0.0" file="emboss_5/emboss_twofeat.xml" />
+        <tool id="EMBOSS: union105" version="5.0.0" file="emboss_5/emboss_union.xml" />
+        <tool id="EMBOSS: vectorstrip106" version="5.0.0" file="emboss_5/emboss_vectorstrip.xml" />
+        <tool id="EMBOSS: water107" version="5.0.0" file="emboss_5/emboss_water.xml" />
+        <tool id="EMBOSS: wobble108" version="5.0.0" file="emboss_5/emboss_wobble.xml" />
+        <tool id="EMBOSS: wordcount109" version="5.0.0" file="emboss_5/emboss_wordcount.xml" />
+        <tool id="EMBOSS: wordmatch110" version="5.0.0" file="emboss_5/emboss_wordmatch.xml" />
+    </repository>
+</toolshed>
diff --git a/scripts/migrate_tools/0003_tools.sh b/scripts/migrate_tools/0003_tools.sh
new file mode 100644
index 0000000..dfc3bde
--- /dev/null
+++ b/scripts/migrate_tools/0003_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0003_tools.xml $@
diff --git a/scripts/migrate_tools/0003_tools.xml b/scripts/migrate_tools/0003_tools.xml
new file mode 100644
index 0000000..e97c6b5
--- /dev/null
+++ b/scripts/migrate_tools/0003_tools.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository name="freebayes" description="Galaxy Freebayes Bayesian genetic variant detector tool" changeset_revision="213a3d6b579a">
+        <tool id="freebayes" version="0.0.2 " file="variant_detection/freebayes.xml" />
+    </repository>
+</toolshed>
diff --git a/scripts/migrate_tools/0004_tools.sh b/scripts/migrate_tools/0004_tools.sh
new file mode 100644
index 0000000..40b7695
--- /dev/null
+++ b/scripts/migrate_tools/0004_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0004_tools.xml $@
diff --git a/scripts/migrate_tools/0004_tools.xml b/scripts/migrate_tools/0004_tools.xml
new file mode 100644
index 0000000..cd35e5d
--- /dev/null
+++ b/scripts/migrate_tools/0004_tools.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository name="blast_datatypes" description="Datatypes for BLAST" changeset_revision="e1c29f302301" />
+    <repository name="ncbi_blast_plus" description="Galaxy wrappers for NCBI BLAST+" changeset_revision="d375502056f1">
+        <tool id="blastxml_to_tabular" version="0.0.8" file="ncbi_blast_plus/blastxml_to_tabular.xml"/>
+        <tool id="ncbi_blastn_wrapper" version="0.0.11" file="ncbi_blast_plus/ncbi_blastn_wrapper.xml"/>
+        <tool id="ncbi_blastp_wrapper" version="0.0.11" file="ncbi_blast_plus/ncbi_blastp_wrapper.xml"/>
+        <tool id="ncbi_blastx_wrapper" version="0.0.11" file="ncbi_blast_plus/ncbi_blastx_wrapper.xml"/>
+        <tool id="ncbi_tblastn_wrapper" version="0.0.11" file="ncbi_blast_plus/ncbi_tblastn_wrapper.xml"/>
+        <tool id="ncbi_tblastx_wrapper" version="0.0.11" file="ncbi_blast_plus/ncbi_tblastx_wrapper.xml"/>
+    </repository>
+</toolshed>
diff --git a/scripts/migrate_tools/0005_tools.sh b/scripts/migrate_tools/0005_tools.sh
new file mode 100644
index 0000000..c9f1f3c
--- /dev/null
+++ b/scripts/migrate_tools/0005_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0005_tools.xml $@
diff --git a/scripts/migrate_tools/0005_tools.xml b/scripts/migrate_tools/0005_tools.xml
new file mode 100644
index 0000000..fd73b58
--- /dev/null
+++ b/scripts/migrate_tools/0005_tools.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository name="bwa_wrappers" description="Galaxy wrappers for the BWA short read aligner." changeset_revision="ffa8aaa14f7c">
+        <tool id="bwa_wrapper" version="1.2.3" file="sr_mapping/bwa_wrapper.xml"/>
+        <tool id="bwa_color_wrapper" version="1.0.2" file="sr_mapping/bwa_color_wrapper.xml"/>
+    </repository>
+</toolshed>
diff --git a/scripts/migrate_tools/0006_tools.sh b/scripts/migrate_tools/0006_tools.sh
new file mode 100644
index 0000000..1f000eb
--- /dev/null
+++ b/scripts/migrate_tools/0006_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0006_tools.xml $@
diff --git a/scripts/migrate_tools/0006_tools.xml b/scripts/migrate_tools/0006_tools.xml
new file mode 100644
index 0000000..5c8bdef
--- /dev/null
+++ b/scripts/migrate_tools/0006_tools.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository name="picard" description="Galaxy wrappers for the Picard SAM/BAM manipulation tools." changeset_revision="e0232cbac965">
+        <tool id="picard_FastqToSam" version="1.56.0" file="picard/picard_FastqToSam.xml" />
+        <tool id="picard_SamToFastq" version="1.56.1" file="picard/picard_SamToFastq.xml" />
+        <tool id="picard_BamIndexStats" version="1.56.0" file="picard/picard_BamIndexStats.xml" />
+        <tool id="PicardASMetrics" version="1.56.0" file="picard/rgPicardASMetrics.xml" />
+        <tool id="PicardGCBiasMetrics" version="1.56.0" file="picard/rgPicardGCBiasMetrics.xml" />
+        <tool id="rgEstLibComp" version="1.56.0" file="picard/rgPicardLibComplexity.xml" />
+        <tool id="PicardInsertSize" version="1.56.0" file="picard/rgPicardInsertSize.xml" />
+        <tool id="PicardHsMetrics" version="1.56.0" file="picard/rgPicardHsMetrics.xml" />
+        <tool id="picard_ARRG" version="1.56.0" file="picard/picard_AddOrReplaceReadGroups.xml" />
+        <tool id="picard_ReorderSam" version="1.56.0" file="picard/picard_ReorderSam.xml" />
+        <tool id="picard_ReplaceSamHeader" version="1.56.0" file="picard/picard_ReplaceSamHeader.xml" />
+        <tool id="rgPicFixMate" version="1.56.0" file="picard/rgPicardFixMate.xml" />
+        <tool id="rgPicardMarkDups" version="1.56.0" file="picard/rgPicardMarkDups.xml" />
+    </repository>
+</toolshed>
diff --git a/scripts/migrate_tools/0007_tools.sh b/scripts/migrate_tools/0007_tools.sh
new file mode 100644
index 0000000..a6cf948
--- /dev/null
+++ b/scripts/migrate_tools/0007_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0007_tools.xml $@
diff --git a/scripts/migrate_tools/0007_tools.xml b/scripts/migrate_tools/0007_tools.xml
new file mode 100644
index 0000000..db85c49
--- /dev/null
+++ b/scripts/migrate_tools/0007_tools.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository name="bowtie_wrappers" description="Galaxy wrappers for the Bowtie short read mapping tools." changeset_revision="0c7e4eadfb3c">
+        <tool id="bowtie_wrapper" version="1.1.2" file="sr_mapping/bowtie_wrapper.xml" />
+    </repository>
+    <repository name="bowtie_color_wrappers" description="Galaxy wrappers for the Bowtie short read mapping tools on color space reads" changeset_revision="fd0914e451c5">
+        <tool id="bowtie_color_wrapper" version="1.1.2" file="sr_mapping/bowtie_color_wrapper.xml" />
+    </repository>
+    <repository name="lastz" description="Galaxy wrappers for the Lastz alignment tool" changeset_revision="0801f8207d30">
+        <tool id="lastz_wrapper_2" version="1.1.2" file="sr_mapping/lastz_wrapper.xml" />
+    </repository>
+    <repository name="lastz_paired_reads" description="Galaxy wrapper for the Lastz alignment tool on paired reads" changeset_revision="96825cee5c25">
+        <tool id="lastz_paired_reads_wrapper" version="1.1.1" file="sr_mapping/lastz_paired_reads_wrapper.xml" />
+    </repository>
+</toolshed>
diff --git a/scripts/migrate_tools/0008_tools.sh b/scripts/migrate_tools/0008_tools.sh
new file mode 100644
index 0000000..50cafd1
--- /dev/null
+++ b/scripts/migrate_tools/0008_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0008_tools.xml $@
diff --git a/scripts/migrate_tools/0008_tools.xml b/scripts/migrate_tools/0008_tools.xml
new file mode 100644
index 0000000..2b4f71f
--- /dev/null
+++ b/scripts/migrate_tools/0008_tools.xml
@@ -0,0 +1,147 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository owner="devteam" changeset_revision="dc20f447c0e2" name="bam_to_sam" description="Galaxy wrappers for the tool BAM-to-SAM: converts BAM format to SAM format">
+        <tool id="bam_to_sam" version="1.0.3" file="samtools/bam_to_sam.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="586c1f0e1515" name="categorize_elements_satisfying_criteria" description="Galaxy wrappers for the tool Categorize Elements: satisfying criteria">
+        <tool id="categorize_elements_satisfying_criteria" version="1.0.0" file="regVariation/categorize_elements_satisfying_criteria.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="acc3dc5d26e3" name="compute_motif_frequencies_for_all_motifs" description="Galaxy wrappers for the tool Compute Motif Frequencies For All Motifs: motif by motif">
+        <tool id="compute_motif_frequencies_for_all_motifs" version="1.0.0" file="regVariation/compute_motif_frequencies_for_all_motifs.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="46325357b948" name="compute_motifs_frequency" description="Galaxy wrappers for the tool Compute Motif Frequencies: in indel flanking regions">
+        <tool id="compute_motifs_frequency" version="1.0.0" file="regVariation/compute_motifs_frequency.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="4a32700dcaa2" name="ctd_batch" description="Galaxy wrappers for the tool CTD: analysis of chemicals, diseases, or genes">
+        <tool id="ctdBatch_1" version="1.0.0" file="phenotype_association/ctd.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="9a3f7a890da6" name="cuffcompare" description="Galaxy wrappers for the Cuffcompare tool.">
+        <tool id="cuffcompare" version="0.0.5" file="ngs_rna/cuffcompare_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="0dabb2ed6eb1" name="cuffdiff" description="Galaxy wrappers for the Cuffdiff tool.">
+        <tool id="cuffdiff" version="0.0.6" file="ngs_rna/cuffdiff_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="b01956f26c36" name="cufflinks" description="Galaxy wrappers for the Cufflinks tool.">
+        <tool id="cufflinks" version="0.0.6" file="ngs_rna/cufflinks_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="fdc55fd74f78" name="cuffmerge" description="Galaxy wrappers for the Cuffmerge tool.">
+        <tool id="cuffmerge" version="0.0.5" file="ngs_rna/cuffmerge_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="f16000dc644b" name="delete_overlapping_indels" description="Galaxy wrappers for the tool Delete Overlapping Indels: from a chromosome indels file">
+        <tool id="delete_overlapping_indels" version="1.0.0" file="regVariation/delete_overlapping_indels.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="de839553d2a2" name="divide_pg_snp" description="Galaxy wrappers for the tool Separate pgSnp alleles: into columns">
+        <tool id="dividePgSnp" version="1.0.0" file="phenotype_association/dividePgSnpAlleles.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="063ecbe59faf" name="draw_stacked_barplots" description="Galaxy wrappers for the tool Draw Stacked Bar Plots: for different categories and different criteria">
+        <tool id="draw_stacked_barplots" version="1.0.0" file="regVariation/draw_stacked_barplots.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="f2ab5b44870d" name="fasta_clipping_histogram" description="Galaxy wrappers for the tool Length Distribution chart">
+        <tool id="cshl_fasta_clipping_histogram" version="1.0.0" file="fastx_toolkit/fasta_clipping_histogram.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="8f0ae92440b8" name="fasta_formatter" description="Galaxy wrappers for the tool FASTA Width: formatter">
+        <tool id="cshl_fasta_formatter" version="1.0.0" file="fastx_toolkit/fasta_formatter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="4b41e3076a50" name="fasta_nucleotide_changer" description="Galaxy wrappers for the tool RNA/DNA: converter">
+        <tool id="cshl_fasta_nucleotides_changer" version="1.0.0" file="fastx_toolkit/fasta_nucleotide_changer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="e359ce20fbd9" name="fastq_quality_boxplot" description="Galaxy wrappers for the tool Draw quality score boxplot">
+        <tool id="cshl_fastq_quality_boxplot" version="1.0.0" file="fastx_toolkit/fastq_quality_boxplot.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="b87c74b61d81" name="fastq_quality_converter" description="Galaxy wrappers for the tool Quality format converter: (ASCII-Numeric)">
+        <tool id="cshl_fastq_quality_converter" version="1.0.0" file="fastx_toolkit/fastq_quality_converter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="b145b9b26648" name="fastq_quality_filter" description="Galaxy wrappers for the Filter by quality tool.">
+        <tool id="cshl_fastq_quality_filter" version="1.0.0" file="fastx_toolkit/fastq_quality_filter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="04614cf6ed39" name="fastq_to_fasta" description="Galaxy wrappers for the tool FASTQ to FASTA: converter">
+        <tool id="cshl_fastq_to_fasta" version="1.0.0" file="fastx_toolkit/fastq_to_fasta.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="7f9660a246c0" name="fastx_artifacts_filter" description="Galaxy wrappers for the Remove sequencing artifacts tool.">
+        <tool id="cshl_fastx_artifacts_filter" version="1.0.0" file="fastx_toolkit/fastx_artifacts_filter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="a12850d0559b" name="fastx_barcode_splitter" description="Galaxy wrappers for the Barcode Splitter tool.">
+        <tool id="cshl_fastx_barcode_splitter" version="1.0.0" file="fastx_toolkit/fastx_barcode_splitter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="2fe2d2b9e8e4" name="fastx_clipper" description="Galaxy wrappers for the tool Clip: adapter sequences">
+        <tool id="cshl_fastx_clipper" version="1.0.1" file="fastx_toolkit/fastx_clipper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="e942fd3a76a5" name="fastx_collapser" description="Galaxy wrappers for the tool Collapse: sequences">
+        <tool id="cshl_fastx_collapser" version="1.0.0" file="fastx_toolkit/fastx_collapser.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="bcc5424a4a3c" name="fastx_nucleotides_distribution" description="Galaxy wrappers for the Draw nucleotides distribution chart tool.">
+        <tool id="cshl_fastx_nucleotides_distribution" version="1.0.0" file="fastx_toolkit/fastx_nucleotides_distribution.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="45f077341b24" name="fastx_quality_statistics" description="Galaxy wrappers for the Compute quality statistics tool.">
+        <tool id="cshl_fastx_quality_statistics" version="1.0.0" file="fastx_toolkit/fastx_quality_statistics.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="d7bce63e6e09" name="fastx_renamer" description="Galaxy wrappers for the Rename sequences tool.">
+        <tool id="cshl_fastx_renamer" version="0.0.11" file="fastx_toolkit/fastx_renamer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="c58871138668" name="fastx_reverse_complement" description="Galaxy wrappers for the Reverse-Complement tool.">
+        <tool id="cshl_fastx_reverse_complement" version="1.0.0" file="fastx_toolkit/fastx_reverse_complement.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="9cb372481a71" name="fastx_trimmer" description="Galaxy wrappers for the Trim sequences tool.">
+        <tool id="cshl_fastx_trimmer" version="1.0.0" file="fastx_toolkit/fastx_trimmer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="64133669255f" name="hgv_fundo" description="Galaxy wrappers for the tool FunDO: human genes associated with disease terms">
+        <tool id="hgv_funDo" version="1.0.0" file="phenotype_association/funDo.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="0e80ffa5b7f7" name="hgv_hilbertvis" description="Galaxy wrappers for the tool HVIS: visualization of genomic data with the Hilbert curve">
+        <tool id="hgv_hilbertvis" version="1.0.0" file="phenotype_association/hilbertvis.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="5ad24b81dd10" name="indels_3way" description="Galaxy wrappers for the tool Fetch Indels:  from 3-way alignments">
+        <tool id="indels_3way" version="1.0.3" file="regVariation/getIndels_3way.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="4e31fad3f08e" name="microsatellite_birthdeath" description="Galaxy wrappers for the tool Identify microsatellite births and deaths:  and causal mutational mechanisms from previously identified orthologous microsatellite sets">
+        <tool id="microsatellite_birthdeath" version="1.0.0" file="regVariation/microsatellite_birthdeath.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="275433d3a395" name="multispecies_orthologous_microsats" description="Galaxy wrappers for the tool Extract orthologous microsatellites:  for multiple (>2) species alignments">
+        <tool id="multispecies_orthologous_microsats" version="1.0.0" file="regVariation/multispecies_MicrosatDataGenerator_interrupted_GALAXY.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="8f0af7251167" name="mutate_snp_codon" description="Galaxy wrappers for the tool Mutate Codons: with SNPs">
+        <tool id="mutate_snp_codon_1" version="1.0.0" file="evolution/mutate_snp_codon.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="a110f9d6ae24" name="pileup_interval" description="Galaxy wrappers for the tool Pileup-to-Interval: condenses pileup format into ranges of bases">
+        <tool id="pileup_interval" version="1.0.0" file="samtools/pileup_interval.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="ff1ba9b75337" name="pileup_parser" description="Galaxy wrappers for the tool Filter pileup: on coverage and SNPs">
+        <tool id="pileup_parser" version="1.0.2" file="samtools/pileup_parser.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="8c737b8ddc45" name="sam2interval" description="Galaxy wrappers for the tool Convert SAM: to interval">
+        <tool id="sam2interval" version="1.0.1" file="samtools/sam2interval.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="0b2424a404d9" name="sam_bitwise_flag_filter" description="Galaxy wrappers for the tool Filter SAM: on bitwise flag values">
+        <tool id="sam_bw_filter" version="1.0.0" file="samtools/sam_bitwise_flag_filter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="a7a49d31c5cf" name="sam_merge" description="Galaxy wrappers for the tool Merge BAM Files: merges BAM files together">
+        <tool id="sam_merge2" version="1.1.2" file="samtools/sam_merge.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="95612c159681" name="sam_pileup" description="Galaxy wrappers for the tool Generate pileup: from BAM dataset">
+        <tool id="sam_pileup" version="1.1.1" file="samtools/sam_pileup.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="30fdbaccb96b" name="sam_to_bam" description="Galaxy wrappers for the tool SAM-to-BAM: converts SAM format to BAM format">
+        <tool id="sam_to_bam" version="1.1.2" file="samtools/sam_to_bam.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="13c4ad597626" name="samtools_flagstat" description="Galaxy wrappers for the tool flagstat: provides simple stats on BAM files">
+        <tool id="samtools_flagstat" version="1.0.0" file="samtools/samtools_flagstat.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="44a18a94d7a9" name="samtools_mpileup" description="Galaxy wrappers for the tool MPileup: SNP and indel caller">
+        <tool id="samtools_mpileup" version="0.0.1" file="samtools/samtools_mpileup.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="7e92b2a53aab" name="samtools_rmdup" description="Galaxy wrappers for the tool rmdup: remove PCR duplicates">
+        <tool id="samtools_rmdup" version="1.0.0" file="samtools/samtools_rmdup.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="68ba55e96489" name="samtools_slice_bam" description="Galaxy wrappers for the tool Slice BAM: by provided regions">
+        <tool id="samtools_slice_bam" version="0.0.1" file="samtools/samtools_slice_bam.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="36163bff76cc" name="split_paired_reads" description="Galaxy wrappers for the Split paired end reads tool.">
+        <tool id="split_paired_reads" version="1.0.0" file="metag_tools/split_paired_reads.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="e6a85a9297b3" name="t_test_two_samples" description="Galaxy wrappers for the T Test for Two Samples tool.">
+        <tool id="t_test_two_samples" version="1.0.0" file="regVariation/t_test_two_samples.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="acf952549b53" name="xy_plot" description="Galaxy wrappers for the tool Plotting tool: for multiple series and graph types">
+        <tool id="XY_Plot_1" version="1.0.1" file="plotting/xy_plot.xml" />
+    </repository>
+</toolshed>
\ No newline at end of file
diff --git a/scripts/migrate_tools/0009_tools.sh b/scripts/migrate_tools/0009_tools.sh
new file mode 100644
index 0000000..36f162b
--- /dev/null
+++ b/scripts/migrate_tools/0009_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0009_tools.xml $@
diff --git a/scripts/migrate_tools/0009_tools.xml b/scripts/migrate_tools/0009_tools.xml
new file mode 100644
index 0000000..e965088
--- /dev/null
+++ b/scripts/migrate_tools/0009_tools.xml
@@ -0,0 +1,132 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository owner="devteam" changeset_revision="96d2e31a3938" name="bowtie2" description="Bowtie2">
+        <tool id="bowtie2" version="0.2" file="sr_mapping/bowtie2_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="a0c8dc671a23" name="ccat" description="Control-based ChIP-seq Analysis Tool">
+        <tool id="peakcalling_ccat" version="0.0.1" file="peak_calling/ccat_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="7cc64024fe92" name="clustalw" description="ClustalW multiple sequence alignment program for DNA or proteins">
+        <tool id="clustalw" version="0.1" file="rgenetics/rgClustalw.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="6708501767b6" name="dwt_cor_ava_perclass" description="Compute P-values and Correlation Coefficients for Feature Occurrences">
+        <tool id="compute_p-values_correlation_coefficients_feature_occurrences_between_two_datasets_using_discrete_wavelet_transfom" version="1.0.0" file="discreteWavelet/execute_dwt_cor_aVa_perClass.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="0f2eda4ea8dc" name="dwt_cor_avb_all" description="Compute P-values and Correlation Coefficients for Occurrences of Two Set of Features">
+        <tool id="compute_p-values_correlation_coefficients_featureA_featureB_occurrences_between_two_datasets_using_discrete_wavelet_transfom" version="1.0.0" file="discreteWavelet/execute_dwt_cor_aVb_all.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="0b89b03ad760" name="dwt_ivc_all" description="Compute P-values and Second Moments for Feature Occurrences">
+        <tool id="compute_p-values_second_moments_feature_occurrences_between_two_datasets_using_discrete_wavelet_transfom" version="1.0.0" file="discreteWavelet/execute_dwt_IvC_all.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="cb422b6f49d2" name="dwt_var_perclass" description="Compute P-values and Max Variances for Feature Occurrences">
+        <tool id="compute_p-values_max_variances_feature_occurrences_in_one_dataset_using_discrete_wavelet_transfom" version="1.0.0" file="discreteWavelet/execute_dwt_var_perClass.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="d56c5d2e1a29" name="dwt_var_perfeature" description="Wavelet variance using Discrete Wavelet Transfoms">
+        <tool id="dwt_var1" version="1.0.0" file="discreteWavelet/execute_dwt_var_perFeature.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="7b0708761d05" name="express" description="Quantify the abundances of a set of target sequences from sampled subsequences">
+        <tool id="express" version="1.1.1" file="ngs_rna/express_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="e28c965eeed4" name="fastqc" description="Read QC reports using FastQC">
+        <tool id="fastqc" version="1.0.0" file="rgenetics/rgFastQC.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="94306bdd58f7" name="fastq_combiner" description="Combine FASTA and QUAL into FASTQ.">
+        <tool id="fastq_combiner" version="1.0.1" file="fastq/fastq_combiner.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="30d9ece6c752" name="fastq_filter" description="Filter FASTQ reads by quality score and length">
+        <tool id="fastq_filter" version="1.0.0" file="fastq/fastq_filter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="1298445c852b" name="fastq_groomer" description="Convert between various FASTQ quality formats.">
+        <tool id="fastq_groomer" version="1.0.4" file="fastq/fastq_groomer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="5d1e9e13e8db" name="fastq_manipulation" description="Manipulate FASTQ reads on various attributes.">
+        <tool id="fastq_manipulation" version="1.0.1" file="fastq/fastq_manipulation.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="5a7b5751617b" name="fastq_masker_by_quality" description="FASTQ Masker by quality score">
+        <tool id="fastq_masker_by_quality" version="1.0.0" file="fastq/fastq_masker_by_quality.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="f0949bc49926" name="fastq_paired_end_deinterlacer" description="FASTQ de-interlacer on paired end reads.">
+        <tool id="fastq_paired_end_deinterlacer" version="1.1" file="fastq/fastq_paired_end_deinterlacer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="b89bdf6acb6c" name="fastq_paired_end_interlacer" description="FASTQ interlacer on paired end reads">
+        <tool id="fastq_paired_end_interlacer" version="1.1" file="fastq/fastq_paired_end_interlacer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="2793d1d765b9" name="fastq_paired_end_joiner" description="FASTQ joiner on paired end reads">
+        <tool id="fastq_paired_end_joiner" version="1.0.0" file="fastq/fastq_paired_end_joiner.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="c549e99026db" name="fastq_paired_end_splitter" description="FASTQ splitter on joined paired end reads">
+        <tool id="fastq_paired_end_splitter" version="1.0.0" file="fastq/fastq_paired_end_splitter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="9b7b4e0ca9db" name="fastq_stats" description="FASTQ Summary Statistics by column">
+        <tool id="fastq_stats" version="1.0.0" file="fastq/fastq_stats.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="3571553aeb20" name="fastqtofasta" description="FASTQ to FASTA converter">
+        <tool id="fastq_to_fasta_python" version="1.0.0" file="fastq/fastq_to_fasta.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="bc9269529e88" name="fastq_to_tabular" description="FASTQ to Tabular converter">
+        <tool id="fastq_to_tabular" version="1.1.0" file="fastq/fastq_to_tabular.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="0b9feb0ed628" name="fastq_trimmer" description="FASTQ Trimmer by quality">
+        <tool id="fastq_trimmer" version="1.0.0" file="fastq/fastq_trimmer.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="1cdcaf5fc1da" name="fastq_trimmer_by_quality" description="FASTQ Quality Trimmer by sliding window">
+        <tool id="fastq_quality_trimmer" version="1.0.0" file="fastq/fastq_trimmer_by_quality.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="31154ff9f5e1" name="filter_transcripts_via_tracking" description="Filter Combined Transcripts">
+        <tool id="filter_combined_via_tracking" version="0.1" file="ngs_rna/filter_transcripts_via_tracking.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="acf51ff24c7d" name="find_diag_hits" description="Identify sequence reads corresponding to a particular taxonomic group">
+        <tool id="find_diag_hits" version="1.0.0" file="taxonomy/find_diag_hits.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="33a0e6aca936" name="freebayes_wrapper" description="Call SNPS with Freebayes">
+        <tool id="freebayes_wrapper" version="0.5.0" file="phenotype_association/freebayes.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="7b1b03c4465d" name="gi2taxonomy" description="Fetch taxonomic representation">
+        <tool id="Fetch Taxonomic Ranks" version="1.1.0" file="taxonomy/gi2taxonomy.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="2cd5ee197ec7" name="gmaj" description="GMAJ Multiple Alignment Viewer">
+        <tool id="gmaj_1" version="2.0.1" file="visualization/GMAJ.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="33e8ed5a4601" name="lca_wrapper" description="Find lowest diagnostic rank">
+        <tool id="lca1" version="1.0.1" file="taxonomy/lca.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="ae2ec275332a" name="macs" description="Model-based Analysis of ChIP-Seq">
+        <tool id="peakcalling_macs" version="1.0.1" file="peak_calling/macs_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="8cd5945559b8" name="poisson2test" description="Poisson two-sample test">
+        <tool id="poisson2test" version="1.0.0" file="taxonomy/poisson2test.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="82a8234e03f2" name="sicer" description="Statistical approach for the Identification of ChIP-Enriched Regions">
+        <tool id="peakcalling_sicer" version="0.0.1" file="peak_calling/sicer_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="9e2b9ca7f33a" name="t2ps" description="Draw phylogeny">
+        <tool id="Draw_phylogram" version="1.0.0" file="taxonomy/t2ps_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="592acb9505fc" name="t2t_report" description="Summarize taxonomy">
+        <tool id="t2t_report" version="1.0.0" file="taxonomy/t2t_report.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="b334cd1095ea" name="tabular_to_fastq" description="Tabular to FASTQ converter">
+        <tool id="tabular_to_fastq" version="1.0.0" file="fastq/tabular_to_fastq.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="1030acbecce6" name="tophat" description="Find splice junctions using RNA-seq data">
+        <tool id="tophat" version="1.5.0" file="ngs_rna/tophat_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="ffa30bedbee3" name="tophat2" description="Gapped-read mapper for RNA-seq data">
+        <tool id="tophat2" version="0.6" file="ngs_rna/tophat2_wrapper.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="b001b50f2009" name="vcf_annotate" description="Annotate a VCF file (dbSNP, hapmap)">
+        <tool id="vcf_annotate" version="1.0.0" file="vcf_tools/annotate.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="76ad0b7865b9" name="vcf_extract" description="Extract reads from a specified region">
+        <tool id="vcf_extract" version="1.0.0" file="vcf_tools/extract.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="da1a6f33b504" name="vcf_filter" description="Filter a VCF file">
+        <tool id="vcf_filter" version="1.0.0" file="vcf_tools/filter.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="9d162bde4113" name="vcf_intersect" description="Generate the intersection of two VCF files">
+        <tool id="vcf_intersect" version="1.0.0" file="vcf_tools/intersect.xml" />
+    </repository>
+    <repository owner="devteam" changeset_revision="66253fc0a69b" name="weblogo3" description="Sequence Logo generator for fasta (eg Clustal alignments)">
+        <tool id="rgweblogo3" version="0.4" file="rgenetics/rgWebLogo3.xml" />
+    </repository>
+</toolshed>
\ No newline at end of file
diff --git a/scripts/migrate_tools/0010_tools.sh b/scripts/migrate_tools/0010_tools.sh
new file mode 100644
index 0000000..fde1770
--- /dev/null
+++ b/scripts/migrate_tools/0010_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0010_tools.xml $@
diff --git a/scripts/migrate_tools/0010_tools.xml b/scripts/migrate_tools/0010_tools.xml
new file mode 100644
index 0000000..a3dd47e
--- /dev/null
+++ b/scripts/migrate_tools/0010_tools.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository owner="devteam" name="analyze_covariates" changeset_revision="738bc749f9a3" description="Analyze Covariates">
+        <tool id="gatk_analyze_covariates" version="0.0.5" file="gatk/analyze_covariates.xml" />
+    </repository>
+    <repository owner="devteam" name="basecoverage" changeset_revision="b8a9e718caa3" description="Base Coverage of all intervals">
+        <tool id="gops_basecoverage_1" version="1.0.0" file="new_operations/basecoverage.xml" />
+    </repository>
+    <repository owner="devteam" name="best_regression_subsets" changeset_revision="4f33ec73e445" description="Perform Best-subsets Regression">
+        <tool id="BestSubsetsRegression1" version="1.0.0" file="regVariation/best_regression_subsets.xml" />
+    </repository>
+    <repository owner="devteam" name="cluster" changeset_revision="520de69b107a" description="Cluster">
+        <tool id="gops_cluster_1" version="1.0.0" file="new_operations/cluster.xml" />
+    </repository>
+    <repository owner="devteam" name="complement" changeset_revision="2221cf5f59aa" description="Complement intervals of a dataset">
+        <tool id="gops_complement_1" version="1.0.0" file="new_operations/complement.xml" />
+    </repository>
+    <repository owner="devteam" name="compute_q_values" changeset_revision="63abfc350814" description="Compute q-values based on multiple simultaneous tests p-values">
+        <tool id="compute_q_values" version="1.0.1" file="regVariation/compute_q_values.xml" />
+    </repository>
+    <repository owner="devteam" name="concat" changeset_revision="855580142a12" description="Concatenate two datasets into one dataset">
+        <tool id="gops_concat_1" version="1.0.1" file="new_operations/concat.xml" />
+    </repository>
+    <repository owner="devteam" name="count_covariates" changeset_revision="14e304b70425" description="Count Covariates on BAM files">
+        <tool id="gatk_count_covariates" version="0.0.5" file="gatk/count_covariates.xml" />
+    </repository>
+    <repository owner="devteam" name="coverage" changeset_revision="16b62a897aac" description="Coverage of a set of intervals on second set of intervals">
+        <tool id="gops_coverage_1" version="1.0.0" file="new_operations/coverage.xml" />
+    </repository>
+    <repository owner="devteam" name="depth_of_coverage" changeset_revision="c3f08370fc82" description="Depth of Coverage on BAM files">
+        <tool id="gatk_depth_of_coverage" version="0.0.2" file="gatk/depth_of_coverage.xml" />
+    </repository>
+    <repository owner="devteam" name="featurecounter" changeset_revision="ac6218e2b686" description="Feature coverage">
+        <tool id="featureCoverage1" version="2.0.0" file="regVariation/featureCounter.xml" />
+    </repository>
+    <repository owner="devteam" name="flanking_features" changeset_revision="90100b587723" description="Fetch closest non-overlapping feature for every interval">
+        <tool id="flanking_features_1" version="4.0.1" file="new_operations/flanking_features.xml" />
+    </repository>
+    <repository owner="devteam" name="get_flanks" changeset_revision="4b14d0e3a837" description="Get flanks - returns flanking region/s for every gene">
+        <tool id="get_flanks1" version="1.0.0" file="new_operations/get_flanks.xml" />
+    </repository>
+    <repository owner="devteam" name="getindelrates_3way" changeset_revision="d427e5acb9ee" description="Estimate Indel Rates for 3-way alignments">
+        <tool id="indelRates_3way" version="1.0.0" file="regVariation/getIndelRates_3way.xml" />
+    </repository>
+    <repository owner="devteam" name="getindels_2way" changeset_revision="f1dc6f5fce6e" description="Fetch Indels from pairwise alignments">
+        <tool id="getIndels_2way" version="1.0.0" file="regVariation/getIndels_2way.xml" />
+    </repository>
+    <repository owner="devteam" name="indel_realigner" changeset_revision="bb0beda6cf83" description="Indel Realigner - perform local realignment">
+        <tool id="gatk_indel_realigner" version="0.0.6" file="gatk/indel_realigner.xml" />
+    </repository>
+    <repository owner="devteam" name="intersect" changeset_revision="77641d5731c8" description="Intersect the intervals of two datasets">
+        <tool id="gops_intersect_1" version="1.0.0" file="new_operations/intersect.xml" />
+    </repository>
+    <repository owner="devteam" name="join" changeset_revision="de21bdbb8d28" description="Join the intervals of two datasets side-by-side">
+        <tool id="gops_join_1" version="1.0.0" file="new_operations/join.xml" />
+    </repository>
+    <repository owner="devteam" name="linear_regression" changeset_revision="cf431604ec3e" description="Perform Linear Regression">
+        <tool id="LinearRegression1" version="1.0.1" file="regVariation/linear_regression.xml" />
+    </repository>
+    <repository owner="devteam" name="logistic_regression_vif" changeset_revision="bd196d7c1ca9" description="Perform Logistic Regression with vif">
+        <tool id="LogisticRegression" version="1.0.1" file="regVariation/logistic_regression_vif.xml" />
+    </repository>
+    <repository owner="devteam" name="maf_cpg_filter" changeset_revision="7f2a12cb047d" description="Mask CpG/non-CpG sites from MAF file">
+        <tool id="cpgFilter" version="1.0.0" file="regVariation/maf_cpg_filter.xml" />
+    </repository>
+    <repository owner="devteam" name="merge" changeset_revision="1bc76ceffa7f" description="Merge the overlapping intervals of a dataset">
+        <tool id="gops_merge_1" version="1.0.0" file="new_operations/merge.xml" />
+    </repository>
+    <repository owner="devteam" name="microsats_alignment_level" changeset_revision="7852385470f3" description="Extract Orthologous Microsatellites from pair-wise alignments">
+        <tool id="microsats_align1" version="1.0.0" file="regVariation/microsats_alignment_level.xml" />
+    </repository>
+    <repository owner="devteam" name="microsats_mutability" changeset_revision="4aa1ee5d8510" description="Estimate microsatellite mutability by specified attributes">
+        <tool id="microsats_mutability1" version="1.1.0" file="regVariation/microsats_mutability.xml" />
+    </repository>
+    <repository owner="devteam" name="partialr_square" changeset_revision="88ef41de020d" description="Compute partial R square">
+        <tool id="partialRsq" version="1.0.0" file="regVariation/partialR_square.xml" />
+    </repository>
+    <repository owner="devteam" name="print_reads" changeset_revision="e768f4851646" description="Print Reads from BAM files">
+        <tool id="gatk_print_reads" version="0.0.1" file="gatk/print_reads.xml" />
+    </repository>
+    <repository owner="devteam" name="quality_filter" changeset_revision="8d65bbc52dfe" description="Filter nucleotides based on quality scores">
+        <tool id="qualityFilter" version="1.0.1" file="regVariation/quality_filter.xml" />
+    </repository>
+    <repository owner="devteam" name="rcve" changeset_revision="7740956d197b" description="Compute RCVE">
+        <tool id="rcve1" version="1.0.0" file="regVariation/rcve.xml" />
+    </repository>
+    <repository owner="devteam" name="realigner_target_creator" changeset_revision="5b8eaae854da" description="Realigner Target Creator for use in local realignment">
+        <tool id="gatk_realigner_target_creator" version="0.0.4" file="gatk/realigner_target_creator.xml" />
+    </repository>
+    <repository owner="devteam" name="substitution_rates" changeset_revision="d1b35bcdaacc" description="Estimate substitution rates for non-coding regions">
+        <tool id="subRate1" version="1.0.0" file="regVariation/substitution_rates.xml" />
+    </repository>
+    <repository owner="devteam" name="substitutions" changeset_revision="c54f5d0bbb58" description="Fetch substitutions from pairwise alignments">
+        <tool id="substitutions1" version="1.0.0" file="regVariation/substitutions.xml" />
+    </repository>
+    <repository owner="devteam" name="subtract" changeset_revision="c19a2a29c561" description="Subtract the intervals of two datasets">
+        <tool id="gops_subtract_1" version="1.0.0" file="new_operations/subtract.xml" />
+    </repository>
+    <repository owner="devteam" name="subtract_query" changeset_revision="5f6ebef89722" description="Subtract Whole Dataset from another dataset">
+        <tool id="subtract_query1" version="0.1" file="new_operations/subtract_query.xml" />
+    </repository>
+    <repository owner="devteam" name="table_recalibration" changeset_revision="30e1dd77e99c" description="Table Recalibration on BAM files">
+        <tool id="gatk_table_recalibration" version="0.0.5" file="gatk/table_recalibration.xml" />
+    </repository>
+    <repository owner="devteam" name="tables_arithmetic_operations" changeset_revision="82fa5062d611" description="Arithmetic Operations on tables">
+        <tool id="tables_arithmetic_operations" version="1.0.0" file="new_operations/tables_arithmetic_operations.xml" />
+    </repository>
+    <repository owner="devteam" name="unified_genotyper" changeset_revision="66dd4d4c1743" description="Unified Genotyper SNP and indel caller">
+        <tool id="gatk_unified_genotyper" version="0.0.6" file="gatk/unified_genotyper.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_annotator" changeset_revision="ae9d0a543e9b" description="Variant Annotator">
+        <tool id="gatk_variant_annotator" version="0.0.5" file="gatk/variant_annotator.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_apply_recalibration" changeset_revision="350a4d0d1678" description="Apply Variant Recalibration">
+        <tool id="gatk_variant_apply_recalibration" version="0.0.4" file="gatk/variant_apply_recalibration.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_combine" changeset_revision="1a6e16391727" description="Combine Variants">
+        <tool id="gatk_variant_combine" version="0.0.4" file="gatk/variant_combine.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_eval" changeset_revision="fbca1c0956d2" description="Eval Variants">
+        <tool id="gatk_variant_eval" version="0.0.8" file="gatk/variant_eval.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_filtration" changeset_revision="da6e2503c62d" description="Variant Filtration on VCF files">
+        <tool id="gatk_variant_filtration" version="0.0.5" file="gatk/variant_filtration.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_recalibrator" changeset_revision="cb7cf57397a7" description="Variant Recalibrator">
+        <tool id="gatk_variant_recalibrator" version="0.0.4" file="gatk/variant_recalibrator.xml" />
+    </repository>
+    <repository owner="devteam" name="variant_select" changeset_revision="135e8721ffc5" description="Select Variants from VCF files">
+        <tool id="gatk_variant_select" version="0.0.2" file="gatk/variant_select.xml" />
+    </repository>
+    <repository owner="devteam" name="variants_validate" changeset_revision="7e1ecaa64370" description="Validate Variants">
+        <tool id="gatk_validate_variants" version="0.0.4" file="gatk/variants_validate.xml" />
+    </repository>
+    <repository owner="devteam" name="weightedaverage" changeset_revision="90611e86a998" description="Assign weighted-average of the values of features overlapping an interval">
+        <tool id="wtavg" version="1.0.0" file="regVariation/WeightedAverage.xml" />
+    </repository>
+    <repository owner="devteam" name="windowsplitter" changeset_revision="75abda2290cc" description="Make windows">
+        <tool id="winSplitter" version="1.0.0" file="regVariation/windowSplitter.xml" />
+    </repository>
+</toolshed>
\ No newline at end of file
diff --git a/scripts/migrate_tools/0011_tools.sh b/scripts/migrate_tools/0011_tools.sh
new file mode 100644
index 0000000..a313658
--- /dev/null
+++ b/scripts/migrate_tools/0011_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0011_tools.xml $@
diff --git a/scripts/migrate_tools/0011_tools.xml b/scripts/migrate_tools/0011_tools.xml
new file mode 100644
index 0000000..65b791e
--- /dev/null
+++ b/scripts/migrate_tools/0011_tools.xml
@@ -0,0 +1,69 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository changeset_revision="3b33da018e74" owner="devteam" name="annotation_profiler" description="Profile Annotations for a set of genomic intervals">
+        <tool file="annotation_profiler/annotation_profiler.xml" id="Annotation_Profiler_0" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="30f0948c649c" owner="devteam" name="blat_coverage_report" description="Polymorphism of the Reads">
+        <tool id="generate_coverage_report" version="1.0.0" file="metag_tools/blat_coverage_report.xml" />
+    </repository>
+    <repository changeset_revision="807e3e50845a" owner="devteam" name="blat_mapping" description="Coverage of the Reads in wiggle format">
+        <tool id="blat2wig" version="1.0.0" file="metag_tools/blat_mapping.xml" />
+    </repository>
+    <repository changeset_revision="9bc0c48a027f" owner="devteam" name="canonical_correlation_analysis" description="Canonical Correlation Analysis">
+        <tool file="multivariate_stats/cca.xml" id="cca1" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="ab28e7de2db3" owner="devteam" name="convert_solid_color2nuc" description="Convert Color Space to Nucleotides">
+        <tool id="color2nuc" version="1.0.0" file="metag_tools/convert_SOLiD_color2nuc.xm" />
+    </repository>
+    <repository changeset_revision="ece409f6573c" owner="devteam" name="fasta_compute_length" description="Compute sequence length">
+        <tool file="fasta_tools/fasta_compute_length.xml" id="fasta_compute_length" version="__VERSION__" />
+    </repository>
+    <repository changeset_revision="2126e1b833a2" owner="devteam" name="fasta_concatenate_by_species" description="Concatenate FASTA alignment by species">
+        <tool file="fasta_tools/fasta_concatenate_by_species.xml" id="fasta_concatenate0" version="0.0.0" />
+    </repository>
+    <repository changeset_revision="16679a7f554a" owner="devteam" name="fasta_filter_by_length" description="Filter sequences by length">
+        <tool file="fasta_tools/fasta_filter_by_length.xml" id="fasta_filter_by_length" version="1.1" />
+    </repository>
+    <repository changeset_revision="9d189d08f2ad" owner="devteam" name="fasta_to_tabular" description="FASTA-to-Tabular converter">
+        <tool file="fasta_tools/fasta_to_tabular.xml" id="fasta2tab" version="1.1.0" />
+    </repository>
+    <repository changeset_revision="ef23c03d7497" owner="devteam" name="fastqsolexa_to_fasta_qual" description="FASTQSOLEXA-to-FASTA-QUAL extracts sequences and quality scores from FASTQSOLEXA data">
+        <tool id="fastqsolexa_to_fasta_qual" version="1.0.0" file="metag_tools/fastqsolexa_to_fasta_qual.xml" />
+    </repository>
+    <repository changeset_revision="7a092113eb8c" owner="devteam" name="kernel_canonical_correlation_analysis" description="Kernel Canonical Correlation Analysis">
+        <tool file="multivariate_stats/kcca.xml" id="kcca1" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="e9ebd4bfbdfc" owner="devteam" name="kernel_principal_component_analysis" description="Kernel Principal Component Analysis">
+        <tool file="multivariate_stats/kpca.xml" id="kpca1" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="601abbd22cea" owner="devteam" name="mapping_to_ucsc" description="Format mapping data as UCSC custom track">
+        <tool id="mapToUCSC" version="1.0.0" file="metag_tools/mapping_to_ucsc.xml" />
+    </repository>
+    <repository changeset_revision="dc7b4acb3fa6" owner="devteam" name="megablast_wrapper" description="Megablast compare short reads against htgs, nt, and wgs databases">
+        <tool file="metag_tools/megablast_wrapper.xml" id="megablast_wrapper" version="1.2.0" />
+    </repository>
+    <repository changeset_revision="03ca082aeb2e" owner="devteam" name="megablast_xml_parser" description="Parse blast XML output">
+        <tool file="metag_tools/megablast_xml_parser.xml" id="megablast_xml_parser" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="f568051cdf2e" owner="devteam" name="principal_component_analysis" description="Principal Component Analysis">
+        <tool file="multivariate_stats/pca.xml" id="pca1" version="1.0.2" />
+    </repository>
+    <repository changeset_revision="ee49255302d8" owner="devteam" name="rmap" description="RMAP for Solexa Short Reads Alignment">
+        <tool id="rmap_wrapper" version="1.0.0" file="metag_tools/rmap_wrapper.xml" />
+    </repository>
+    <repository changeset_revision="f6e5bb5aa2f5" owner="devteam" name="rmapq" description="RMAPQ for Solexa Short Reads Alignment with Quality Scores">
+        <tool id="rmapq_wrapper" version="1.0.0" file="metag_tools/rmapq_wrapper.xml" />
+    </repository>
+    <repository changeset_revision="556ceed24699" owner="devteam" name="short_reads_figure_high_quality_length" description="Histogram of high quality score reads">
+        <tool id="hist_high_quality_score" version="1.0.0" file="metag_tools/short_reads_figure_high_quality_length.xml" />
+    </repository>
+    <repository changeset_revision="b52b9c7aabd9" owner="devteam" name="short_reads_figure_score" description="Build base quality distribution">
+        <tool file="metag_tools/short_reads_figure_score.xml" id="quality_score_distribution" version="1.0.2" />
+    </repository>
+    <repository changeset_revision="f17a1585733b" owner="devteam" name="short_reads_trim_seq" description="Select high quality segments">
+        <tool file="metag_tools/short_reads_trim_seq.xml" id="trim_reads" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="0b4e36026794" owner="devteam" name="tabular_to_fasta" description="Tabular-to-FASTA">
+        <tool file="fasta_tools/tabular_to_fasta.xml" id="tab2fasta" version="1.1.0" />
+    </repository>
+</toolshed>
\ No newline at end of file
diff --git a/scripts/migrate_tools/0012_tools.sh b/scripts/migrate_tools/0012_tools.sh
new file mode 100644
index 0000000..97fbe35
--- /dev/null
+++ b/scripts/migrate_tools/0012_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0012_tools.xml $@
diff --git a/scripts/migrate_tools/0012_tools.xml b/scripts/migrate_tools/0012_tools.xml
new file mode 100644
index 0000000..47e5b82
--- /dev/null
+++ b/scripts/migrate_tools/0012_tools.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+    <repository changeset_revision="08a01b2ce4cd" owner="devteam" name="column_maker" description="Compute an expression on every row">
+        <tool file="stats/column_maker.xml" id="Add_a_column1" version="1.1.0" />
+    </repository>
+    <repository changeset_revision="24e01abf9e34" owner="devteam" name="correlation" description="Correlation for numeric columns">
+        <tool file="stats/cor.xml" id="cor2" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="fabda887a71f" owner="devteam" name="count_gff_features" description="Count GFF Features">
+        <tool file="stats/count_gff_features.xml" id="count_gff_features" version="0.1" />
+    </repository>
+    <repository changeset_revision="a6f0d355b05f" owner="devteam" name="dna_filtering" description="Filter on ambiguities in polymorphism datasets">
+        <tool file="stats/dna_filtering.xml" id="dna_filter" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="04cdbd00dcec" owner="devteam" name="generate_pc_lda_matrix" description="Generate A Matrix for using PC and LDA">
+        <tool file="stats/generate_matrix_for_pca_lda.xml" id="generate_matrix_for_pca_and_lda1" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="6ff47de059a0" owner="devteam" name="histogram" description="Histogram of a numeric column">
+        <tool file="plotting/histogram2.xml" id="histogram_rpy" version="1.0.3" />
+    </repository>
+    <repository changeset_revision="f38763b52f33" owner="devteam" name="lda_analysis" description="Perform Linear Discriminant Analysis">
+        <tool file="stats/lda_analy.xml" id="lda_analy1" version="1.0.1" />
+    </repository>
+    <repository changeset_revision="783d91de9e6d" owner="devteam" name="mine" description="Maximal Information-based Nonparametric Exploration">
+        <tool file="stats/MINE.xml" id="maximal_information_based_nonparametric_exploration" version="0.0.1" />
+    </repository>
+    <repository changeset_revision="5ebbb889236a" owner="devteam" name="pearson_correlation" description="Pearson and apos Correlation between any two numeric columns">
+        <tool file="stats/correlation.xml" id="Pearson_and_apos_Correlation1" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="d281062566f9" owner="devteam" name="pgsnp2gd_snp" description="Convert from pgSnp to gd_snp">
+        <tool file="phenotype_association/pgSnp2gd_snp.xml" id="pgSnp2gd_snp" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="c5ab37076128" owner="devteam" name="plot_from_lda" description="Draw ROC plot on "Perform LDA" output">
+        <tool file="stats/plot_from_lda.xml" id="plot_for_lda_output1" version="1.0.1" />
+    </repository>
+    <repository changeset_revision="c12b0759203b" owner="devteam" name="scatterplot" description="Scatterplot of two numeric columns">
+        <tool file="plotting/scatterplot.xml" id="scatterplot_rpy" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="72ea0d13dd66" owner="devteam" name="snpfreq" description="snpFreq significant SNPs in case-control data">
+        <tool file="phenotype_association/snpFreq.xml" id="hgv_snpFreq" version="1.0.1" />
+    </repository>
+    <repository changeset_revision="618e56c3109b" owner="devteam" name="ucsc_custom_track" description="Build custom track for UCSC genome browser">
+        <tool file="visualization/build_ucsc_custom_track.xml" id="build_ucsc_custom_track_1" version="1.0.0" />
+    </repository>
+    <repository changeset_revision="5fca46616675" owner="devteam" name="vcf2pgsnp" description="VCF to pgSnp">
+        <tool file="phenotype_association/vcf2pgSnp.xml" id="vcf2pgSnp" version="1.0.0" />
+    </repository>
+</toolshed>
\ No newline at end of file
diff --git a/scripts/migrate_tools/migrate_tools.py b/scripts/migrate_tools/migrate_tools.py
new file mode 100644
index 0000000..0872417
--- /dev/null
+++ b/scripts/migrate_tools/migrate_tools.py
@@ -0,0 +1,29 @@
+"""
+This script will start up its own web application which includes a ToolMigrationManager (~/lib/galaxy/tool_shed/tool_migration_manager.py).
+For each tool discovered missing, the tool shed repository that contains it will be installed on disk and a new entry will be
+created for it in the migrated_tools_conf.xml file.  These entries will be made so that the tool panel will be displayed the same
+as it was before the tools were eliminated from the Galaxy distribution.  The ToolMigrationManager will properly handle entries in
+migrated_tools_conf.xml for tools outside tool panel sections as well as tools inside tool panel sections, depending upon the
+layout of the local tool_conf.xml file.  Entries will not be created in migrated_tools_conf.xml for tools included in the tool
+shed repository but not defined in tool_conf.xml.
+"""
+import os
+import sys
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+from tool_shed.galaxy_install.migrate.common import MigrateToolsApplication
+
+app = MigrateToolsApplication( sys.argv[ 1 ] )
+non_shed_tool_confs = app.tool_migration_manager.proprietary_tool_confs
+if len( non_shed_tool_confs ) == 1:
+    plural = ''
+    file_names = non_shed_tool_confs[ 0 ]
+else:
+    plural = 's'
+    file_names = ', '.join( non_shed_tool_confs )
+msg = "\nThe installation process is finished.  All tools associated with this migration that were defined in your file%s named\n" % plural
+msg += "%s, have been removed.  You may now start your Galaxy server.\n" % file_names
+print msg
+app.shutdown()
+sys.exit( 0 )
diff --git a/scripts/nosetests.py b/scripts/nosetests.py
new file mode 100755
index 0000000..dcf6f2a
--- /dev/null
+++ b/scripts/nosetests.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+# EASY-INSTALL-ENTRY-SCRIPT: 'nose','console_scripts','nosetests'
+# __requires__ = 'nose'
+import sys
+
+from pkg_resources import load_entry_point
+
+assert sys.version_info[:2] >= ( 2, 7 )
+
+nose_core_TestProgram = load_entry_point('nose', 'console_scripts', 'nosetests')
+nose_core_TestProgram()
diff --git a/scripts/others/incorrect_gops_jobs.py b/scripts/others/incorrect_gops_jobs.py
new file mode 100755
index 0000000..962ba88
--- /dev/null
+++ b/scripts/others/incorrect_gops_jobs.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+"""
+Fetch jobs using gops_intersect, gops_merge, gops_subtract, gops_complement, gops_coverage
+wherein the second dataset doesn't have chr, start and end in standard columns 1, 2 and 3.
+"""
+import ConfigParser
+import os
+import sys
+import tempfile
+
+import sqlalchemy as sa
+
+import galaxy.app
+import galaxy.model.mapping
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+class TestApplication( object ):
+    """Encapsulates the state of a Universe application"""
+    def __init__( self, database_connection=None, file_path=None ):
+        print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+        if database_connection is None:
+            raise Exception( "CleanupDatasetsApplication requires a database_connection value" )
+        if file_path is None:
+            raise Exception( "CleanupDatasetsApplication requires a file_path value" )
+        self.database_connection = database_connection
+        self.file_path = file_path
+        # Setup the database engine and ORM
+        self.model = galaxy.model.mapping.init( self.file_path, self.database_connection, engine_options={}, create_tables=False )
+
+
+def main():
+    ini_file = sys.argv[1]
+    conf_parser = ConfigParser.ConfigParser( {'here': os.getcwd()} )
+    conf_parser.read( ini_file )
+    configuration = {}
+    for key, value in conf_parser.items( "app:main" ):
+        configuration[key] = value
+    database_connection = configuration['database_connection']
+    file_path = configuration['file_path']
+    app = TestApplication( database_connection=database_connection, file_path=file_path )
+    jobs = {}
+    try:
+        for job in app.model.Job.filter( sa.and_( app.model.Job.table.c.create_time.between( '2008-05-23', '2008-11-29' ),
+                                                  app.model.Job.table.c.state == 'ok',
+                                                  sa.or_( sa.and_( sa.or_( app.model.Job.table.c.tool_id == 'gops_intersect_1',
+                                                                           app.model.Job.table.c.tool_id == 'gops_subtract_1',
+                                                                           app.model.Job.table.c.tool_id == 'gops_coverage_1' ),
+                                                                   sa.not_( app.model.Job.table.c.command_line.like( '%-2 1,2,3%' ) ) ),
+                                                          sa.and_( sa.or_( app.model.Job.table.c.tool_id == 'gops_complement_1',
+                                                                           app.model.Job.table.c.tool_id == 'gops_merge_1' ),
+                                                                   sa.not_( app.model.Job.table.c.command_line.like( '%-1 1,2,3%' ) ) ) ) ) ).all():
+            print "# processing job id %s" % str( job.id )
+            for jtoda in job.output_datasets:
+                print "# --> processing JobToOutputDatasetAssociation id %s" % str( jtoda.id )
+                hda = app.model.HistoryDatasetAssociation.get( jtoda.dataset_id )
+                print "# ----> processing HistoryDatasetAssociation id %s" % str( hda.id )
+                if not hda.deleted:
+                    # Probably don't need this check, since the job state should suffice, but...
+                    if hda.dataset.state == 'ok':
+                        history = app.model.History.get( hda.history_id )
+                        print "# ------> processing history id %s" % str( history.id )
+                        if history.user_id:
+                            cmd_line = str( job.command_line )
+                            new_output = tempfile.NamedTemporaryFile('w')
+                            if job.tool_id in ['gops_intersect_1', 'gops_subtract_1', 'gops_coverage_1']:
+                                new_cmd_line = " ".join(map(str, cmd_line.split()[:4])) + " " + new_output.name + " " + " ".join(map(str, cmd_line.split()[5:]))
+                                job_output = cmd_line.split()[4]
+                            else:
+                                new_cmd_line = " ".join(map(str, cmd_line.split()[:3])) + " " + new_output.name + " " + " ".join(map(str, cmd_line.split()[4:]))
+                                job_output = cmd_line.split()[3]
+                            try:
+                                os.system(new_cmd_line)
+                            except:
+                                pass
+                            diff_status = os.system('diff %s %s >> /dev/null' % (new_output.name, job_output))
+                            if diff_status == 0:
+                                continue
+                            print "# --------> Outputs differ"
+                            user = app.model.User.get( history.user_id )
+                            jobs[ job.id ] = {}
+                            jobs[ job.id ][ 'hda_id' ] = hda.id
+                            jobs[ job.id ][ 'hda_name' ] = hda.name
+                            jobs[ job.id ][ 'hda_info' ] = hda.info
+                            jobs[ job.id ][ 'history_id' ] = history.id
+                            jobs[ job.id ][ 'history_name' ] = history.name
+                            jobs[ job.id ][ 'history_update_time' ] = history.update_time
+                            jobs[ job.id ][ 'user_email' ] = user.email
+    except Exception as e:
+        print "# caught exception: %s" % str( e )
+
+    print "\n\n# Number of incorrect Jobs: %d\n\n" % ( len( jobs ) )
+    print "#job_id\thda_id\thda_name\thda_info\thistory_id\thistory_name\thistory_update_time\tuser_email"
+    for jid in jobs:
+        print '%s\t%s\t"%s"\t"%s"\t%s\t"%s"\t"%s"\t%s' % \
+            ( str( jid ),
+              str( jobs[ jid ][ 'hda_id' ] ),
+              jobs[ jid ][ 'hda_name' ],
+              jobs[ jid ][ 'hda_info' ],
+              str( jobs[ jid ][ 'history_id' ] ),
+              jobs[ jid ][ 'history_name' ],
+              jobs[ jid ][ 'history_update_time' ],
+              jobs[ jid ][ 'user_email' ] )
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/others/incorrect_gops_jobs.sh b/scripts/others/incorrect_gops_jobs.sh
new file mode 100644
index 0000000..877ef3f
--- /dev/null
+++ b/scripts/others/incorrect_gops_jobs.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/others/incorrect_gops_jobs.py ./config/galaxy.ini >> ./scripts/others/incorrect_gops_jobs.log
diff --git a/scripts/others/incorrect_gops_join_jobs.py b/scripts/others/incorrect_gops_join_jobs.py
new file mode 100644
index 0000000..d612f4f
--- /dev/null
+++ b/scripts/others/incorrect_gops_join_jobs.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+"""
+Fetch gops_join wherein the use specified minimum coverage is not 1.
+"""
+import ConfigParser
+import os
+import sys
+import tempfile
+
+import sqlalchemy as sa
+
+import galaxy.app
+import galaxy.model.mapping
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+class TestApplication( object ):
+    """Encapsulates the state of a Universe application"""
+    def __init__( self, database_connection=None, file_path=None ):
+        print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+        if database_connection is None:
+            raise Exception( "CleanupDatasetsApplication requires a database_connection value" )
+        if file_path is None:
+            raise Exception( "CleanupDatasetsApplication requires a file_path value" )
+        self.database_connection = database_connection
+        self.file_path = file_path
+        # Setup the database engine and ORM
+        self.model = galaxy.model.mapping.init( self.file_path, self.database_connection, engine_options={}, create_tables=False )
+
+
+def main():
+    ini_file = sys.argv[1]
+    conf_parser = ConfigParser.ConfigParser( {'here': os.getcwd()} )
+    conf_parser.read( ini_file )
+    configuration = {}
+    for key, value in conf_parser.items( "app:main" ):
+        configuration[key] = value
+    database_connection = configuration['database_connection']
+    file_path = configuration['file_path']
+    app = TestApplication( database_connection=database_connection, file_path=file_path )
+    jobs = {}
+    try:
+        for job in app.model.Job.filter( sa.and_( app.model.Job.table.c.create_time < '2008-12-16',
+                                                  app.model.Job.table.c.state == 'ok',
+                                                  app.model.Job.table.c.tool_id == 'gops_join_1',
+                                                  sa.not_( app.model.Job.table.c.command_line.like( '%-m 1 %' ) ) ) ).all():
+            print "# processing job id %s" % str( job.id )
+            for jtoda in job.output_datasets:
+                print "# --> processing JobToOutputDatasetAssociation id %s" % str( jtoda.id )
+                hda = app.model.HistoryDatasetAssociation.get( jtoda.dataset_id )
+                print "# ----> processing HistoryDatasetAssociation id %s" % str( hda.id )
+                if not hda.deleted:
+                    # Probably don't need this check, since the job state should suffice, but...
+                    if hda.dataset.state == 'ok':
+                        history = app.model.History.get( hda.history_id )
+                        print "# ------> processing history id %s" % str( history.id )
+                        if history.user_id:
+                            cmd_line = str( job.command_line )
+                            new_output = tempfile.NamedTemporaryFile('w')
+                            new_cmd_line = " ".join(map(str, cmd_line.split()[:4])) + " " + new_output.name + " " + " ".join(map(str, cmd_line.split()[5:]))
+                            job_output = cmd_line.split()[4]
+                            try:
+                                os.system(new_cmd_line)
+                            except:
+                                pass
+                            diff_status = os.system('diff %s %s >> /dev/null' % (new_output.name, job_output))
+                            if diff_status == 0:
+                                continue
+                            print "# --------> Outputs differ"
+                            user = app.model.User.get( history.user_id )
+                            jobs[ job.id ] = {}
+                            jobs[ job.id ][ 'hda_id' ] = hda.id
+                            jobs[ job.id ][ 'hda_name' ] = hda.name
+                            jobs[ job.id ][ 'hda_info' ] = hda.info
+                            jobs[ job.id ][ 'history_id' ] = history.id
+                            jobs[ job.id ][ 'history_name' ] = history.name
+                            jobs[ job.id ][ 'history_update_time' ] = history.update_time
+                            jobs[ job.id ][ 'user_email' ] = user.email
+    except Exception as e:
+        print "# caught exception: %s" % str( e )
+
+    print "\n\n# Number of incorrect Jobs: %d\n\n" % ( len( jobs ) )
+    print "#job_id\thda_id\thda_name\thda_info\thistory_id\thistory_name\thistory_update_time\tuser_email"
+    for jid in jobs:
+        print '%s\t%s\t"%s"\t"%s"\t%s\t"%s"\t"%s"\t%s' % \
+            ( str( jid ),
+              str( jobs[ jid ][ 'hda_id' ] ),
+              jobs[ jid ][ 'hda_name' ],
+              jobs[ jid ][ 'hda_info' ],
+              str( jobs[ jid ][ 'history_id' ] ),
+              jobs[ jid ][ 'history_name' ],
+              jobs[ jid ][ 'history_update_time' ],
+              jobs[ jid ][ 'user_email' ] )
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/others/incorrect_gops_join_jobs.sh b/scripts/others/incorrect_gops_join_jobs.sh
new file mode 100644
index 0000000..e9a58bd
--- /dev/null
+++ b/scripts/others/incorrect_gops_join_jobs.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/others/incorrect_gops_join_jobs.py ./config/galaxy.ini >> ./scripts/others/incorrect_gops_join_jobs.log
diff --git a/scripts/paster.py b/scripts/paster.py
new file mode 100755
index 0000000..f2243ad
--- /dev/null
+++ b/scripts/paster.py
@@ -0,0 +1,26 @@
+"""
+Bootstrap the Galaxy framework.
+
+This should not be called directly!  Use the run.sh script in Galaxy's
+top level directly.
+"""
+import os
+import sys
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.util.pastescript import serve
+
+from check_python import check_python
+
+# ensure supported version
+try:
+    check_python()
+except:
+    sys.exit( 1 )
+
+if 'LOG_TEMPFILES' in os.environ:
+    from log_tempfile import TempFile
+    _log_tempfile = TempFile()
+
+serve.run()
diff --git a/scripts/patch.sh b/scripts/patch.sh
new file mode 100755
index 0000000..7a963f0
--- /dev/null
+++ b/scripts/patch.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+set -e
+
+echo "THIS SCRIPT IS ONLY SAFE TO RUN IF YOUR BRANCHES ARE ALL MERGED FORWARD"
+echo "THIS SCRIPT DISCARDS UPSTREAM BRANCH CHANGES TO RESOLVE MERGE CONFLICTS"
+echo "YOU HAVE BEEN WARNED"
+
+patchbase=../patch
+clone=galaxy
+
+declare -a releases=("14.10" "15.01" "15.03" "15.05" "15.07" "15.10" "16.01" "dev")
+declare -a patches=("safe_relpath-RELEASEU.patch" "gx_history_import-RELEASEU.patch" "gx_objectstore_relpath-RELEASEU.patch" "gx_sample_transfer-RELEASEU.patch" "ts_browse_symlink_relpath-RELEASEU.patch" "ts_upload_symlink_relpath-RELEASEU.patch")
+declare -a patchmsgs=('Add a safe_relpath util function for ensuring a path does not reference an absolute or parent directory' 'Security fixes for history imports' 'Security fixes for object store paths' 'Remove sample tracking manual external service transfer due to security concerns' 'Security fixes for tool shed repository browsing' 'Security fixes for tool shed hg push and capsule/tarball uploads')
+
+[ -d $clone ] || git clone git at github.com:natefoo/galaxy.git $clone
+cd $clone
+
+for (( i=0; i < ${#releases[@]}; i++ )); do
+    release=${releases[$i]}
+    releaseu=${release/./_}
+    branch=release_${release}
+    [ $release == "dev" ] && branch=dev
+    git checkout ${branch}
+    for (( j=0; j < ${#patches[@]}; j++ )); do
+        patchf=${patchbase}/${patches[$j]}
+        patchf=${patchf/RELEASEU/$releaseu}
+        if [ -f ${patchf} ]; then
+            echo "Applying patch $patchf"
+            patch -p1 < ${patchf}
+            git add -u
+            git commit -m "${patchmsgs[$j]}"
+        else
+            echo "WARNING: no such patch: $patchf"
+        fi
+    done
+    if [ $i -gt 0 ]; then
+        prevrel=${releases[$(( $i - 1 ))]}
+        msg="Merge branch 'release_${prevrel}' into ${branch}"
+        echo "$msg"
+        if ! git merge -m "$msg" release_${prevrel}; then
+            while read status fp; do
+                echo 'Handling merge conflicts by restoring "our" version'
+                if [ $status == "UU" ]; then
+                    echo "git checkout --ours $fp"
+                    git checkout --ours $fp
+                fi
+            done < <(git status --short)
+            git add -u
+            git commit -m "$msg"
+        fi
+    fi
+done
diff --git a/scripts/rst2html.py b/scripts/rst2html.py
new file mode 100755
index 0000000..ef6e1fc
--- /dev/null
+++ b/scripts/rst2html.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Author: David Goodger
+# Contact: goodger at users.sourceforge.net
+# Revision: $Revision: 1.2 $
+# Date: $Date: 2004/03/28 15:39:27 $
+# Copyright: This module has been placed in the public domain.
+
+"""
+A minimal front end to the Docutils Publisher, producing HTML.
+"""
+
+try:
+    import locale
+    locale.setlocale(locale.LC_ALL, '')
+except:
+    pass
+
+from docutils.core import publish_cmdline, default_description
+
+
+description = ('Generates (X)HTML documents from standalone reStructuredText '
+               'sources.  ' + default_description)
+
+publish_cmdline(writer_name='html', description=description)
diff --git a/scripts/run_selenium_tests.sh b/scripts/run_selenium_tests.sh
new file mode 100644
index 0000000..c451fef
--- /dev/null
+++ b/scripts/run_selenium_tests.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+if [ ! -f selenium-server.jar ]; then
+    echo "Need to download selenium-server.jar from:"
+    echo "http://nexus.openqa.org/content/repositories/snapshots/org/seleniumhq/selenium/server/selenium-server/1.0-SNAPSHOT/selenium-server-1.0-20090319.053109-107-standalone.jar"
+fi
+
+java -jar selenium-server.jar -htmlSuite "*firefox" "http://localhost:8080" ./test/selenium/workflow/Suite.html ./selenium_results.html
diff --git a/scripts/runtime_stats.py b/scripts/runtime_stats.py
new file mode 100755
index 0000000..cb87fd6
--- /dev/null
+++ b/scripts/runtime_stats.py
@@ -0,0 +1,272 @@
+#!/usr/bin/env python
+"""
+Collect and report statistics on job run times
+
+To use metrics (which provide more accurate information), see:
+
+    https://github.com/galaxyproject/galaxy/blob/dev/config/job_metrics_conf.xml.sample
+
+If you do not have metrics enabled, use the `--source history` option to use
+the less accurate job_state_history table.
+
+Examples
+--------
+
+# Stats for Nate's runs of the Bowtie 2 tool installed from the Tool Shed (all
+# versions):
+% ./runtime_stats.py -c galaxy.ini -u nate at bx.psu.edu 'toolshed.g2.bx.psu.edu/repos/devteam/bowtie2/bowtie2/'
+
+# Stats for all runs of the Bowtie 2 tool installed from the Tool Shed (version
+# 0.4 only):
+% ./runtime_stats.py -c galaxy.ini 'toolshed.g2.bx.psu.edu/repos/devteam/bowtie2/bowtie2/0.4'
+
+# Stats for all runs of the Bowtie 2 tool installed from the Tool Shed but we
+# don't feel like figuring out or typing the long ID (matches any tool with
+# '/tophat2/' in its full ID):
+% ./runtime_stats.py -c galaxy.ini --like 'bowtie2'
+
+# Stats for all runs of Tophat 2 that took longer than 2 minutes but less than
+# 2 days:
+% ./runtime_stats.py -c galaxy.ini --like -m $((2 * 60)) -M $((2 * 24 * 60 * 60)) 'tophat2'
+"""
+from __future__ import print_function
+
+import argparse
+import re
+import sys
+
+try:
+    import configparser
+except:
+    import ConfigParser as configparser
+
+import numpy
+import psycopg2
+from sqlalchemy.engine import url
+
+
+DATA_SOURCES = ('metrics', 'history')
+METRICS_SQL = """
+    SELECT metric_value
+    FROM job_metric_numeric jmn
+    JOIN job j ON jmn.job_id = j.id
+    WHERE j.state = 'ok'
+          AND jmn.plugin = 'core'
+          AND jmn.metric_name = 'runtime_seconds'
+          {tool_clause}
+          {user_clause}
+          {time_clause}
+"""
+HISTORY_SQL = """
+    SELECT ctimes[1] - ctimes[2] AS delta
+    FROM (SELECT jsh.job_id,
+                 array_agg(jsh.create_time ORDER BY jsh.create_time DESC) AS ctimes
+          FROM job_state_history jsh
+          JOIN job j ON jsh.job_id = j.id
+          WHERE jsh.state IN ('running','ok')
+                AND j.state = 'ok'
+                {tool_clause}
+                {user_clause}
+          GROUP BY jsh.job_id) AS t_arrs
+    {time_clause}
+"""
+
+
+def parse_arguments():
+    parser = argparse.ArgumentParser(
+        description='Generate walltime statistics')
+    parser.add_argument('tool_id', help='Tool (by ID) to collect stats about')
+    parser.add_argument('--like',
+                        action='store_true',
+                        default=False,
+                        help='Use SQL `LIKE` operator to find '
+                             'a shed-installed tool using the tool\'s '
+                             '"short" id')
+    parser.add_argument('-c', '--config', help='Galaxy config file')
+    parser.add_argument('-d', '--debug',
+                        action='store_true',
+                        default=False,
+                        help='Print extra info')
+    parser.add_argument('-m', '--min',
+                        type=int,
+                        default=-1,
+                        help='Ignore runtimes less than MIN seconds')
+    parser.add_argument('-M', '--max',
+                        type=int,
+                        default=-1,
+                        help='Ignore runtimes greater than MAX seconds')
+    parser.add_argument('-u', '--user',
+                        help='Return stats for only this user (id, email, '
+                             'or username)')
+    parser.add_argument('-s', '--source',
+                        default='metrics',
+                        help='Runtime data source (SOURCES: %s)'
+                             % ', '.join(DATA_SOURCES))
+    args = parser.parse_args()
+
+    if args.like and '/' in args.tool_id:
+        print('ERROR: Do not use --like with a tool shed tool id (the tool '
+              'id should not contain `/` characters)')
+        sys.exit(2)
+
+    args.source = args.source.lower()
+    if args.source not in ('metrics', 'history'):
+        print('ERROR: Data source `%s` unknown, valid source are: %s'
+              % (args.source, ', '.join(DATA_SOURCES)))
+
+    if args.config:
+        cp = configparser.ConfigParser()
+        cp.readfp(open(args.config))
+        uri = cp.get('app:main', 'database_connection')
+        names = { 'database': 'dbname', 'username': 'user' }
+        args.connect_args = url.make_url(uri).translate_connect_args(**names)
+    else:
+        args.connect_args = {}
+
+    if args.debug:
+        print('Got options:')
+        for i in vars(args).items():
+            print('%s: %s' % i)
+
+    return args
+
+
+def query(tool_id=None, user=None, like=None, source='metrics',
+          connect_args=None, debug=False, min=-1, max=-1, **kwargs):
+
+    connect_arg_str = ''
+    for k, v in connect_args.items():
+        connect_arg_str += '%s=%s' % (k, v)
+
+    pc = psycopg2.connect(connect_arg_str)
+    cur = pc.cursor()
+
+    if user:
+        try:
+            user_id = int(user)
+        except:
+            if '@' not in user:
+                field = 'username'
+            else:
+                field = 'email'
+            sql = 'SELECT id FROM galaxy_user WHERE %s = %s' % (field, '%s')
+            cur.execute(sql, (user,))
+            if debug:
+                print('Executed:')
+                print(cur.query)
+            row = cur.fetchone()
+            if row:
+                user_id = row[0]
+            else:
+                print('Invalid user: %s' % user)
+                sys.exit(1)
+
+    if like:
+        query_tool_id = '%%/%s/%%' % tool_id
+    elif '/' in tool_id and not re.match('\d+\.\d+', tool_id.split('/')[-1]):
+        query_tool_id = '%s%%' % tool_id
+        like = True
+    else:
+        query_tool_id = tool_id
+
+    sql_args = [query_tool_id]
+
+    if like:
+        tool_clause = "AND j.tool_id LIKE %s"
+    else:
+        tool_clause = "AND j.tool_id = %s"
+
+    if user:
+        user_clause = "AND j.user_id = %s"
+        sql_args.append(user_id)
+    else:
+        user_clause = ""
+
+    if source == 'metrics':
+        if min > 0 and max > 0:
+            time_clause = """AND metric_value > %s
+          AND metric_value < %s"""
+            sql_args.append(min)
+            sql_args.append(max)
+        elif min > 0:
+            time_clause = "AND metric_value > %s"
+            sql_args.append(min)
+        elif max > 0:
+            time_clause = "AND metric_value < %s"
+            sql_args.append(max)
+        else:
+            time_clause = ""
+        sql = METRICS_SQL
+    elif source == 'history':
+        if min > 0 and max > 0:
+            time_clause = """WHERE ctimes[1] - ctimes[2] > interval %s
+          AND ctimes[1] - ctimes[2] < interval %s"""
+            sql_args.append('%s seconds' % min)
+            sql_args.append('%s seconds' % max)
+        elif min > 0:
+            time_clause = "WHERE ctimes[1] - ctimes[2] > interval %s"
+            sql_args.append('%s seconds' % min)
+        elif max > 0:
+            time_clause = "WHERE ctimes[1] - ctimes[2] < interval %s"
+            sql_args.append('%s seconds' % max)
+        else:
+            time_clause = ""
+        sql = HISTORY_SQL
+
+    sql = sql.format(tool_clause=tool_clause, user_clause=user_clause,
+                     time_clause=time_clause)
+
+    cur.execute(sql, sql_args)
+    if debug:
+        print('Executed:')
+        print(cur.query)
+    print('Query returned %d rows' % cur.rowcount)
+
+    if source == 'metrics':
+        times = numpy.array([ r[0] for r in cur if r[0] ])
+    elif source == 'history':
+        times = numpy.array([ r[0].total_seconds() for r in cur if r[0] ])
+
+    print('Collected %d times' % times.size)
+
+    if times.size == 0:
+        return
+
+    if user:
+        print('Displaying statistics for user %s' % user)
+
+    stats = (('Mean runtime', numpy.mean(times)),
+             ('Standard deviation', numpy.std(times)),
+             ('Minimum runtime', times.min()),
+             ('Maximum runtime', times.max()))
+
+    for name, seconds in stats:
+        hours, minutes = nice_times(seconds)
+        msg = name + ' is %0.0f seconds' % seconds
+        if minutes:
+            msg += ' (=%0.2f minutes)' % minutes
+        if hours:
+            msg += ' (=%0.2f hours)' % hours
+        print(msg)
+
+
+def nice_times(seconds):
+    if seconds < 60 * 60:
+        hours = None
+        if seconds < 60:
+            minutes = None
+        else:
+            minutes = seconds / 60
+    else:
+        minutes = seconds / 60
+        hours = seconds / 60 / 60
+    return hours, minutes
+
+
+def main():
+    args = parse_arguments()
+    query(**vars(args))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/scripts/set_dataset_sizes.py b/scripts/set_dataset_sizes.py
new file mode 100644
index 0000000..7b37210
--- /dev/null
+++ b/scripts/set_dataset_sizes.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+import os
+import sys
+from ConfigParser import ConfigParser
+from optparse import OptionParser
+
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+import galaxy.config
+from galaxy.model import mapping
+from galaxy.objectstore import build_object_store_from_config
+default_config = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, 'config/galaxy.ini') )
+
+parser = OptionParser()
+parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (config/galaxy.ini)', default=default_config )
+( options, args ) = parser.parse_args()
+
+
+def init():
+    options.config = os.path.abspath( options.config )
+
+    config_parser = ConfigParser( dict( here=os.getcwd(),
+                                        database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) )
+    config_parser.read( options.config )
+
+    config_dict = {}
+    for key, value in config_parser.items( "app:main" ):
+        config_dict[key] = value
+
+    config = galaxy.config.Configuration( **config_dict )
+
+    object_store = build_object_store_from_config( config )
+    return (mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ),
+            object_store)
+
+
+if __name__ == '__main__':
+    print 'Loading Galaxy model...'
+    model, object_store = init()
+    sa_session = model.context.current
+
+    set = 0
+    dataset_count = sa_session.query( model.Dataset ).count()
+    print 'Processing %i datasets...' % dataset_count
+    percent = 0
+    print 'Completed %i%%' % percent,
+    sys.stdout.flush()
+    for i, dataset in enumerate( sa_session.query( model.Dataset ).enable_eagerloads( False ).yield_per( 1000 ) ):
+        if dataset.total_size is None:
+            dataset.set_total_size()
+            set += 1
+            if not set % 1000:
+                sa_session.flush()
+        new_percent = int( float(i) / dataset_count * 100 )
+        if new_percent != percent:
+            percent = new_percent
+            print '\rCompleted %i%%' % percent,
+            sys.stdout.flush()
+    sa_session.flush()
+    print 'Completed 100%%'
+    object_store.shutdown()
diff --git a/scripts/set_user_disk_usage.py b/scripts/set_user_disk_usage.py
new file mode 100755
index 0000000..06c1b45
--- /dev/null
+++ b/scripts/set_user_disk_usage.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+import os
+import sys
+from ConfigParser import ConfigParser
+from optparse import OptionParser
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+import galaxy.config
+from galaxy.model.util import pgcalc
+from galaxy.objectstore import build_object_store_from_config
+from galaxy.util import nice_size
+
+
+default_config = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, 'config/galaxy.ini') )
+
+parser = OptionParser()
+parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (config/galaxy.ini)', default=default_config )
+parser.add_option( '-u', '--username', dest='username', help='Username of user to update', default='all' )
+parser.add_option( '-e', '--email', dest='email', help='Email address of user to update', default='all' )
+parser.add_option( '--dry-run', dest='dryrun', help='Dry run (show changes but do not save to database)', action='store_true', default=False )
+( options, args ) = parser.parse_args()
+
+
+def init():
+    options.config = os.path.abspath( options.config )
+    if options.username == 'all':
+        options.username = None
+    if options.email == 'all':
+        options.email = None
+
+    config_parser = ConfigParser( dict( here=os.getcwd(),
+                                        database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) )
+    config_parser.read( options.config )
+
+    config_dict = {}
+    for key, value in config_parser.items( "app:main" ):
+        config_dict[key] = value
+
+    config = galaxy.config.Configuration( **config_dict )
+    object_store = build_object_store_from_config( config )
+
+    from galaxy.model import mapping
+
+    return (mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ),
+            object_store,
+            config.database_connection.split(':')[0])
+
+
+def quotacheck( sa_session, users, engine ):
+    sa_session.refresh( user )
+    current = user.get_disk_usage()
+    print user.username, '<' + user.email + '>:',
+    if engine not in ( 'postgres', 'postgresql' ):
+        new = user.calculate_disk_usage()
+        sa_session.refresh( user )
+        # usage changed while calculating, do it again
+        if user.get_disk_usage() != current:
+            print 'usage changed while calculating, trying again...'
+            return quotacheck( sa_session, user, engine )
+    else:
+        new = pgcalc( sa_session, user.id, dryrun=options.dryrun )
+    # yes, still a small race condition between here and the flush
+    print 'old usage:', nice_size( current ), 'change:',
+    if new in ( current, None ):
+        print 'none'
+    else:
+        if new > current:
+            print '+%s' % ( nice_size( new - current ) )
+        else:
+            print '-%s' % ( nice_size( current - new ) )
+        if not options.dryrun and engine not in ( 'postgres', 'postgresql' ):
+            user.set_disk_usage( new )
+            sa_session.add( user )
+            sa_session.flush()
+
+
+if __name__ == '__main__':
+    print 'Loading Galaxy model...'
+    model, object_store, engine = init()
+    sa_session = model.context.current
+
+    if not options.username and not options.email:
+        user_count = sa_session.query( model.User ).count()
+        print 'Processing %i users...' % user_count
+        for i, user in enumerate( sa_session.query( model.User ).enable_eagerloads( False ).yield_per( 1000 ) ):
+            print '%3i%%' % int( float(i) / user_count * 100 ),
+            quotacheck( sa_session, user, engine )
+        print '100% complete'
+        object_store.shutdown()
+        sys.exit( 0 )
+    elif options.username:
+        user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( username=options.username ).first()
+    elif options.email:
+        user = sa_session.query( model.User ).enable_eagerloads( False ).filter_by( email=options.email ).first()
+    if not user:
+        print 'User not found'
+        sys.exit( 1 )
+    object_store.shutdown()
+    quotacheck( sa_session, user, engine )
diff --git a/scripts/slideshow/__init__.py b/scripts/slideshow/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/scripts/slideshow/build_slideshow.py b/scripts/slideshow/build_slideshow.py
new file mode 100644
index 0000000..fac0da4
--- /dev/null
+++ b/scripts/slideshow/build_slideshow.py
@@ -0,0 +1,28 @@
+import os
+import string
+import sys
+
+
+SCRIPTS_DIRECTORY = os.path.dirname(__file__)
+TEMPLATE_PATH = os.path.join(SCRIPTS_DIRECTORY, "slideshow_template.html")
+TEMPLATE = string.Template(open(TEMPLATE_PATH, "r").read())
+
+
+def main(argv=None):
+    if argv is None:
+        argv = sys.argv
+    title = argv[1]
+    markdown_source = argv[2]
+    output = os.path.splitext(markdown_source)[0] + '.html'
+    with open(markdown_source, "r") as s:
+        content = s.read()
+    html = TEMPLATE.safe_substitute(**{
+        'title': title,
+        'content': content,
+    })
+    print html
+    open(output, "w").write(html)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/slideshow/example.md b/scripts/slideshow/example.md
new file mode 100644
index 0000000..4c11f4c
--- /dev/null
+++ b/scripts/slideshow/example.md
@@ -0,0 +1,243 @@
+## Galaxy Tool Framework Changes
+
+This document describes changes to Galaxy's tooling framework over recent
+releases.
+
+---
+
+### 16.04
+
+Full [Galaxy changelog](https://docs.galaxyproject.org/en/master/releases/16.04_announce.html).
+
+---
+
+#### Tool Profile Version ([PR #1688](https://github.com/galaxyproject/galaxy/pull/1688))
+
+Tools may (and should) now declare a `profile` version (e.g.
+`<tool profile="16.04" ...>`).
+
+This allows Galaxy to fire a warning if a tool uses features too new for the
+current version and allows us to migrate away from some undesirable default
+behaviors that were required for backward compatiblity.
+
+---
+
+#### `set -e` by default ([d020522](https://github.com/galaxyproject/galaxy/pull/1688/commits/d020522650a9bfc86c22923a01fd5d7c07c65326))
+
+From the [IUC best practices documentation](http://galaxy-iuc-standards.readthedocs.org/en/latest/best_practices/tool_xml.html#command-tag):
+
+> _"If you need to execute more than one shell command, concatenate them with a double ampersand (`&&`), so that an error in a command will abort the execution of the following ones."_
+
+The job script generated with profile `16.04`+ tools will include a `#set -e` statement causing this behavior by default.
+
+Older-style tools can enable this behavior by setting `strict="true"` on
+the tool `command` XML block.
+
+---
+
+#### Using Exit Codes for Error Detection ([b92074e](b92074e6ff87a19133b4d973577779c4ee6286d7))
+
+Previously the default behavior was for Galaxy to ignore exit codes and
+declare a tool in error if issued any output on standard error. This was
+a regretable default behavior and so all tools were encouraged to declare
+special XML blocks to force the use of exit codes.
+
+For any tool that declares a profile version of `16.04` or greater, the
+default is now just to use exit codes for error detection.
+
+---
+
+#### Unrobust Features Removed ([b92074e](b92074e6ff87a19133b4d973577779c4ee6286d7))
+
+A few tool features have ben removed from tools that declare a version of `16.04` or newer.
+
+- The `interepreter=` attribute on `command` blocks has been eliminated. Please use `$__tool_directory__` from within the tool instead.
+- `format="input"` on output datasets has been eliminated, please use `format_source=` to specify an exact input to derive datatype from.
+- Disables extra output file discovery by default, tools must explicitly describe the outputs to collect with `discover_dataset` tags.
+- Tools require a `version` attribute - previously an implicit default to `1.0.0` would be used.
+- `$param_file` has been eliminated.
+
+---
+
+#### Clean Working Directories
+
+Previously, Galaxy would fill tool working directories with files related to
+metadata and job metric collection. Tools will no longer be executed in the same directory as these files.
+
+This applies to all tools not just profile `16.04`+ tools.
+
+---
+
+### 16.01
+
+Full [Galaxy changelog](https://docs.galaxyproject.org/en/master/releases/16.01_announce.html).
+
+---
+
+#### Conda Dependency Resolution ([PR #1345](https://github.com/galaxyproject/galaxy/pull/1345))
+
+```xml
+<tool>
+    ...
+    <requirements>
+        <requirement type="package" version="0.11.4">FastQC</requirement>
+    </requirements>
+    ...
+</tool>
+```
+
+- Dependency resolvers tell Galaxy how to translate requirements into jobs.
+- The Conda dependency resolver forces Galaxy to create a conda environment
+  for the job with `FastQC` at version `0.11.4` installed.
+- Only dependency resolver that can be installed at runtime - great for
+  Docker images, heterogeneous clusters, and testing tools.
+- Links [Conda](http://conda.pydata.org/docs/) and [BioConda](https://bioconda.github.io/)
+
+---
+
+#### ToolBox Enhancements - Labels ([PR #1012](https://github.com/galaxyproject/galaxy/pull/1012))
+
+  ![ToolBox Labels](images/gx_toolbox_labels.png)
+
+---
+
+#### ToolBox Enhancements - Monitoring ([PR #1398](https://github.com/galaxyproject/galaxy/pull/1398))
+
+- The Galaxy toolbox can be reloaded from the Admin interface.
+- Tool conf files (e.g. `tool_conf.xml`) can be monitored and automatically
+  reloaded by Galaxy.
+- Tool conf files can now be specified as YAML (or JSON).
+
+---
+
+#### Process Inputs as JSON ([PR #1405](https://github.com/galaxyproject/galaxy/pull/1405))
+
+```xml
+    <command>python "$__tool_directory/script.py" "$json_inputs"</command>
+    <configfiles>
+        <inputs name="json_inputs" />
+    </configfiles>
+```
+
+This will produce a file referenced as `$json_inputs` that contains a nested
+JSON structure corresponding to the tools inputs. Of limitted utility for
+simple command-line tools - but complex tools with many repeats, conditional,
+and nesting could potentially benefit from this.
+
+For instance, the [JBrowse](https://github.com/galaxyproject/tools-iuc/blob/master/tools/jbrowse/jbrowse.xml)
+tool generates a complex JSON data structure using a `configfile` inside the
+XML. This is a much more portable way to deal with that.
+
+---
+
+
+#### Collections
+
+- `data_collection` tool parameters (`param`s) can now specify multiple
+  `collection_type`s for consumption ([PR #1308](https://github.com/galaxyproject/galaxy/pull/1308)).
+  - This mirrors the `format` attribute which allows a comma-separated list
+    of potential format types.
+- Multiple collections can now be supplied to a `multiple="true"` data parameter ([PR #805](https://github.com/galaxyproject/galaxy/pull/805)).
+- Output collections can specify a `type_source` attribute (again mirroring
+  `format_source`) ([PR #1153](https://github.com/galaxyproject/galaxy/pull/1153)).
+
+---
+
+### 15.10
+
+Full [Galaxy changelog](https://docs.galaxyproject.org/en/master/releases/15.10_announce.html).
+
+---
+
+#### Collections
+
+- Tools may now produce explicit nested outputs [PR #538](https://github.com/galaxyproject/galaxy/pull/538).
+  This enhances the `discover_dataset` XML tag to allow this.
+- Allow certain `output` actions on collections.
+  [PR #544](https://github.com/galaxyproject/galaxy/pull/544).
+- Allow `discover_dataset` tags to use `format` instead of `ext`
+  when referring to datatype extensions/formats.
+- Allow `min`/`max` attributes on multiple data input parameters [PR #765](https://github.com/galaxyproject/galaxy/pull/765).
+
+---
+
+#### Whitelist Tools that Generate HTML ([PR #510](https://github.com/galaxyproject/galaxy/pull/510))
+
+Galaxy now contains a plain text file that contains a list of tools whose
+output can be trusted when rendering HTML.
+
+---
+
+### 15.07
+
+Full [Galaxy changelog](https://docs.galaxyproject.org/en/master/releases/15.07_announce.html).
+
+---
+
+#### Parameterized XML Macros ([PR #362](https://github.com/galaxyproject/galaxy/pull/362))
+
+Macros now allow defining tokens to be consumed
+as XML attributes. For instance, the following definition
+
+```xml
+<tool>
+    <expand macro="inputs" foo="hello" />
+    <expand macro="inputs" foo="world" />
+    <expand macro="inputs" />
+    <macros>
+        <xml name="inputs" token_foo="the_default">
+            <inputs>@FOO@</inputs>
+        </xml>
+    </macros>
+</tool>
+```
+
+would expand out as
+
+```xml
+<tool>
+    <inputs>hello</inputs>
+    <inputs>world</inputs>
+    <inputs>the_default</inputs>
+</tool>
+```
+
+---
+
+#### Tool Form
+
+The workflow editor was updated to the use Galaxy's newer
+frontend tool form.
+
+![New Workflow Editor](images/gx_new_workflow_editor.png)
+
+---
+
+#### Environment Variables ([PR #395](https://github.com/galaxyproject/galaxy/pull/395))
+
+Tools may now use `inputs` to define environment variables that will be
+set during tool execution. The new `environment_variables` XML block is
+used to define this.
+
+```xml
+<tool>
+    ...
+    <command>
+        echo "\$INTVAR"  >  $out_file1;
+        echo "\$FORTEST" >> $out_file1;
+    </command>
+    <environment_variables>
+        <environment_variable name="INTVAR">$inttest</environment_variable>
+        <environment_variable name="FORTEST">#for i in ['m', 'o', 'o']#$i#end for#</environment_variable>
+    </environment_variables>
+    ...
+```
+
+[Test tool](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/environment_variables.xml) demonstrating the use of the `environment_variables` tag.
+
+---
+
+#### Collections
+
+ - Explicit output collections can now be used in workflows. ([PR #311](https://github.com/galaxyproject/galaxy/pull/311))
+ - The `filter` tag has been implemented for output dataset collections ([PR #455](https://github.com/galaxyproject/galaxy/pull/455). See the example tool [output_collection_filter.xml](https://github.com/galaxyproject/galaxy/blob/dev/test/functional/tools/output_collection_filter.xml).
\ No newline at end of file
diff --git a/scripts/slideshow/slideshow_template.html b/scripts/slideshow/slideshow_template.html
new file mode 100644
index 0000000..dd53456
--- /dev/null
+++ b/scripts/slideshow/slideshow_template.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<html>
+  <head>
+    <title>${title}</title>
+    <meta charset="utf-8">
+    <link rel="stylesheet" type="text/css" href="style.css">
+  </head>
+  <body>
+    <textarea id="source">
+${content}
+    </textarea>
+    <script src="remark-latest.min.js">
+    </script>
+    <script>
+      var slideshow = remark.create();
+    </script>
+  </body>
+</html>
diff --git a/scripts/slideshow/style.sample.css b/scripts/slideshow/style.sample.css
new file mode 100644
index 0000000..d70cf9b
--- /dev/null
+++ b/scripts/slideshow/style.sample.css
@@ -0,0 +1,25 @@
+/*
+This is a sample of a CSS file for a slideshow
+You need to put your `style.css` at the same level than your *.md slideshow file
+Example:
+MyDirectory:
+  - mySlideshow.md
+  - myCss.css
+  - images/
+*/
+ at import url(https://fonts.googleapis.com/css?family=Yanone+Kaffeesatz);
+ at import url(https://fonts.googleapis.com/css?family=Droid+Serif:400,700,400italic);
+ at import url(https://fonts.googleapis.com/css?family=Ubuntu+Mono:400,700,400italic);
+
+body {
+    font-family: 'Droid Serif';
+}
+
+h1, h2, h3 {
+    font-family: 'Yanone Kaffeesatz';
+    font-weight: normal;
+}
+
+.remark-code, .remark-inline-code {
+    font-family: 'Ubuntu Mono';
+}
diff --git a/scripts/summarize_timings.py b/scripts/summarize_timings.py
new file mode 100644
index 0000000..5cf4295
--- /dev/null
+++ b/scripts/summarize_timings.py
@@ -0,0 +1,51 @@
+"""Script to parse timings out of a Galaxy log and summarize."""
+from __future__ import print_function
+
+from argparse import ArgumentParser
+import re
+
+import numpy
+
+
+DESCRIPTION = ""
+
+TIMING_LINE_PATTERN = re.compile("\((\d+.\d+) ms\)")
+
+
+def main(argv=None):
+    """Entry point for script."""
+    arg_parser = ArgumentParser(description=DESCRIPTION)
+    arg_parser.add_argument("--file", default="paster.log")
+    arg_parser.add_argument("--print_lines", default=False, action="store_true")
+    arg_parser.add_argument("--pattern", default=None)
+
+    args = arg_parser.parse_args(argv)
+    print_lines = args.print_lines
+    pattern_str = args.pattern
+    filter_pattern = re.compile(pattern_str) if pattern_str is not None else None
+    times = []
+    for line in open(args.file, "r"):
+        if filter_pattern and not filter_pattern.search(line):
+            continue
+
+        match = TIMING_LINE_PATTERN.search(line)
+        if not match:
+            continue
+
+        times.append(float(match.group(1)))
+        if print_lines:
+            print(line.strip())
+
+    template = "Summary (ms) - Mean: %f, Median: %f, Max: %f, Min: %f, StdDev: %f"
+    message = template % (
+        numpy.mean(times),
+        numpy.median(times),
+        numpy.max(times),
+        numpy.min(times),
+        numpy.std(times)
+    )
+    print(message)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/sync_reports_config.py b/scripts/sync_reports_config.py
new file mode 100644
index 0000000..f1d2c19
--- /dev/null
+++ b/scripts/sync_reports_config.py
@@ -0,0 +1,62 @@
+from ConfigParser import ConfigParser
+from sys import argv
+
+REPLACE_PROPERTIES = ["file_path", "database_connection", "new_file_path"]
+MAIN_SECTION = "app:main"
+
+
+def sync():
+    # Add or replace the relevant properites from galaxy.ini
+    # into reports.ini
+    reports_config_file = "config/reports.ini"
+    if len(argv) > 1:
+        reports_config_file = argv[1]
+
+    universe_config_file = "config/galaxy.ini"
+    if len(argv) > 2:
+        universe_config_file = argv[2]
+
+    parser = ConfigParser()
+    parser.read(universe_config_file)
+
+    with open(reports_config_file, "r") as f:
+        reports_config_lines = f.readlines()
+
+    replaced_properties = set([])
+    with open(reports_config_file, "w") as f:
+        # Write all properties from reports config replacing as
+        # needed.
+        for reports_config_line in reports_config_lines:
+            (line, replaced_property) = get_synced_line(reports_config_line, parser)
+            if replaced_property:
+                replaced_properties.add(replaced_property)
+            f.write(line)
+
+        # If any properties appear in universe config and not in
+        # reports write these as well.
+        for replacement_property in REPLACE_PROPERTIES:
+            if parser.has_option(MAIN_SECTION, replacement_property) and \
+                    not (replacement_property in replaced_properties):
+                f.write(get_universe_line(replacement_property, parser))
+
+
+def get_synced_line(reports_line, universe_config):
+    # Cycle through properties to replace and perform replacement on
+    # this line if needed.
+    synced_line = reports_line
+    replaced_property = None
+    for replacement_property in REPLACE_PROPERTIES:
+        if reports_line.startswith(replacement_property) and \
+                universe_config.has_option(MAIN_SECTION, replacement_property):
+            synced_line = get_universe_line(replacement_property, universe_config)
+            replaced_property = replacement_property
+            break
+    return (synced_line, replaced_property)
+
+
+def get_universe_line(property_name, universe_config):
+    return "%s=%s\n" % (property_name, universe_config.get(MAIN_SECTION, property_name))
+
+
+if __name__ == '__main__':
+    sync()
diff --git a/scripts/taxonomy/gi2tax_test.txt b/scripts/taxonomy/gi2tax_test.txt
new file mode 100644
index 0000000..27aee90
--- /dev/null
+++ b/scripts/taxonomy/gi2tax_test.txt
@@ -0,0 +1,100 @@
+2	9913
+3	9913
+4	9646
+5	9913
+7	9913
+9	9913
+11	9913
+13	9913
+15	9915
+16	9771
+17	9771
+18	9771
+19	9771
+20	9771
+21	9771
+22	9771
+23	9771
+24	9771
+25	9771
+26	9771
+27	9770
+28	9770
+29	9913
+31	9913
+33	9913
+34	9913
+36	9913
+37	9913
+38	9913
+39	9913
+40	9913
+41	9913
+43	9913
+45	9913
+49	9913
+53	9913
+56	9913
+60	9913
+62	9913
+63	9913
+65	9913
+67	9913
+69	9913
+70	9913
+71	9913
+72	9913
+73	9913
+75	9913
+77	9913
+79	9913
+80	9913
+82	9913
+84	9913
+86	9913
+88	9913
+89	9913
+93	9913
+95	9913
+98	9913
+101	9913
+103	9913
+105	9913
+109	9913
+113	9913
+115	9913
+117	9913
+119	9913
+120	9913
+121	9913
+123	9913
+124	9913
+125	9913
+127	9913
+128	9913
+133	9913
+136	9913
+137	9913
+138	9913
+142	9913
+147	9913
+150	9913
+152	9913
+154	9913
+157	9913
+158	9913
+159	9913
+160	9913
+161	9913
+162	9913
+164	9913
+166	9913
+167	9913
+169	9913
+170	9913
+171	9913
+175	9913
+176	9913
+177	9913
+180	9913
+183	9913
diff --git a/scripts/taxonomy/names_test.txt b/scripts/taxonomy/names_test.txt
new file mode 100644
index 0000000..bcbfbf1
--- /dev/null
+++ b/scripts/taxonomy/names_test.txt
@@ -0,0 +1,100 @@
+1	all	synonym	
+1	root	scientific name	
+2	Bacteria	scientific name	
+2	Monera	in-part	
+2	Procaryotae	in-part	
+2	Prokaryota	in-part	
+2	Prokaryotae	in-part	
+2	bacteria	blast name	
+2	eubacteria	genbank common name	
+2	not Bacteria Haeckel 1894	synonym	
+2	prokaryotes	in-part	
+6	Azorhizobium	scientific name	
+6	Azorhizobium Dreyfus et al. 1988	synonym	
+6	Azotirhizobium	equivalent name	
+7	Azorhizobium caulinodans	scientific name	
+7	Azorhizobium caulinodans Dreyfus et al. 1988	synonym	
+7	Azotirhizobium caulinodans	equivalent name	
+9	Acyrthosiphon pisum symbiont P	includes	
+9	Buchnera aphidicola	scientific name	
+9	Buchnera aphidicola Munson et al. 1991	synonym	
+10	Cellvibrio Winogradsky 1929	synonym	
+10	Cellvibrio	scientific name	
+10	Cellvibrio (ex Winogradsky 1929) Blackall et al. 1986 emend. Humphry et al. 2003	synonym	
+11	'Cellvibrio gilvus'	synonym	
+11	Cellvibrio gilvus	scientific name	
+13	Dictyoglomus	scientific name	
+13	Dictyoglomus Saiki et al. 1985	synonym	
+14	Dictyoglomus thermophilum	scientific name	
+14	Dictyoglomus thermophilum Saiki et al. 1985	synonym	
+16	Methyliphilus	equivalent name	
+16	Methylophilus	scientific name	
+16	Methylophilus Jenkins et al. 1987	synonym	
+16	Methylotrophus	misspelling	
+17	Methyliphilus methylitrophus	equivalent name	
+17	Methyliphilus methylotrophus	equivalent name	
+17	Methylophilus methylitrophus	equivalent name	
+17	Methylophilus methylotrophus	scientific name	
+17	Methylophilus methylotrophus Jenkins et al. 1987	synonym	
+17	Methylotrophus methylophilus	synonym	
+18	Pelobacter	scientific name	
+18	Pelobacter Schink and Pfennig 1983	synonym	
+19	Pelobacter carbinolicus	scientific name	
+19	Pelobacter carbinolicus Schink 1984	synonym	
+20	Phenylobacterium	scientific name	
+20	Phenylobacterium Lingens et al. 1985 emend. Kanso and Patel 2004	synonym	
+20	Phenylobacterium Lingens et al. 1985 emend. Tiago et al. 2005	synonym	
+21	Phenylobacterium immobile	scientific name	
+21	Phenylobacterium immobile Lingens et al. 1985	synonym	
+22	Shewanella	scientific name	
+22	Shewanella MacDonell and Colwell 1986	synonym	
+23	Alteromonas colwelliana	synonym	
+23	Shewanella colwelliana	scientific name	
+23	Shewanella colwelliana (Weiner et al. 1988) Coyne et al. 1990	synonym	
+24	Pseudomonas putrefaciens (Derby and Hammer) Long and Hammer 1941	synonym	
+24	Alteromonas putrefaciens	synonym	
+24	Alteromonas putrefaciens (ex Derby and Hammer) Lee et al. 1981	synonym	
+24	Alteromonas putrifaciens	misspelling	
+24	Pseudomonas putrefaciens	synonym	
+24	Shewanella putrefaciens	scientific name	
+24	Shewanella putrefaciens (Lee et al. 1981) MacDonell and Colwell 1986	synonym	
+24	Shewanella putrifaciens	misspelling	
+25	Alteromonas hanedai	synonym	
+25	Alteromonas hanedai Jensen et al. 1981	synonym	
+25	Shewanella hanedai	scientific name	
+25	Shewanella hanedai (Jensen et al. 1981) MacDonell and Colwell 1986	synonym	
+27	halophilic eubacterium (NRCC 41227)	synonym	
+27	halophilic eubacterium NRCC 41227	scientific name	
+27	halophilic eubacterium NRCC41227	synonym	
+29	Myxobacteria	synonym	
+29	Myxococcales	scientific name	
+29	Myxococcales Tchan et al. 1948	synonym	
+29	The Myxobacteria	synonym	
+29	fruiting gliding bacteria	genbank common name	
+31	Myxococcaceae	scientific name	
+31	Myxococcaceae Jahn 1924	synonym	
+32	Myxococcus	scientific name	
+32	Myxococcus Thaxter 1892	synonym	
+33	Micrococcus fulvus Cohn 1875	synonym	
+33	Micrococcus fulvus	synonym	
+33	Myxococcus fulvus	scientific name	
+33	Myxococcus fulvus (Cohn 1875) Jahn 1911	synonym	
+34	Myxococcus xanthus	scientific name	
+34	Myxococcus xanthus Beebe 1941	synonym	
+34	Myxococcus xanthus retron Mx162	includes	
+34	Myxococcus xanthus retron Mx65	includes	
+35	Chondrococcus macrosporus Krzemieniewska and Krzemieniewski 1926	synonym	
+35	'Corallococcus macrosporus'	synonym	
+35	Chondrococcus macrosporus	synonym	
+35	Corallococcus macrosporus	synonym	
+35	Myxococcus macrosporus	scientific name	
+35	Myxococcus macrosporus (Krzemieniewska and Krzemieniewski 1926) Zahler and McCurdy 1974	synonym	
+35	not Myxococcus macrosporus Zukal 1897	synonym	
+36	Chondrococcus coralloides (Thaxter 1892) Jahn 1924	synonym	
+36	Chondrococcus polycystus (Kofler 1913) Krzemieniewska and Krzemieniewski 1926	synonym	
+36	Myxococcus clavatus Quehl 1906	synonym	
+36	Myxococcus digitatus Quehl 1906	synonym	
+36	Myxococcus exiguus Kofler 1913	synonym	
+36	Myxococcus polycystus Kofler 1913	synonym	
+36	Chondrococcus coralloides	synonym	
+36	Chondrococcus polycystus	synonym	
diff --git a/scripts/taxonomy/processTaxonomy.sh b/scripts/taxonomy/processTaxonomy.sh
new file mode 100755
index 0000000..db44214
--- /dev/null
+++ b/scripts/taxonomy/processTaxonomy.sh
@@ -0,0 +1,18 @@
+echo "Getting files from NCBI..."
+wget ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz
+wget ftp://ftp.ncbi.nih.gov/pub/taxonomy/gi_taxid_nucl.dmp.gz
+wget ftp://ftp.ncbi.nih.gov/pub/taxonomy/gi_taxid_prot.dmp.gz
+echo "Unzipping untarring..."
+gunzip -c taxdump.tar.gz | tar xvf -
+gunzip gi_taxid_nucl.dmp.gz
+gunzip gi_taxid_prot.dmp.gz
+cat gi_taxid_nucl.dmp gi_taxid_prot.dmp > gi_taxid_all.dmp
+echo "Sorting gi2tax files..."
+sort -n -k 1 gi_taxid_all.dmp > gi_taxid_sorted.txt
+rm gi_taxid_nucl.dmp gi_taxid_prot.dmp gi_taxid_all.dmp
+echo "Removing parenthesis from names.dmp"
+cat names.dmp | sed s/[\(\)\'\"]/_/g > names.temporary
+mv names.dmp names.dmp.orig
+mv names.temporary names.dmp 
+
+
diff --git a/scripts/tool_shed/api/add_repository_registry_entry.py b/scripts/tool_shed/api/add_repository_registry_entry.py
new file mode 100644
index 0000000..d6b30e2
--- /dev/null
+++ b/scripts/tool_shed/api/add_repository_registry_entry.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+"""
+Add appropriate entries to the Tool Shed's repository registry for a specified repository.
+
+Here is a working example of how to use this script.
+python ./add_repository_registry_entry.py -a <api key> -u <tool shed url> -n <repository name> -o <repository owner>
+"""
+
+import argparse
+
+from common import submit
+
+
+def main( options ):
+    api_key = options.api_key
+    if api_key:
+        if options.tool_shed_url and options.name and options.owner:
+            base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+            data = {}
+            data[ 'tool_shed_url' ] = base_tool_shed_url
+            data[ 'name' ] = options.name
+            data[ 'owner' ] = options.owner
+            url = '%s%s' % ( base_tool_shed_url, '/api/repositories/add_repository_registry_entry' )
+            response_dict = submit( url, data, api_key=api_key, return_formatted=False )
+            print response_dict
+        else:
+            print "Invalid tool_shed: ", base_tool_shed_url, " name: ", options.name, " or owner: ", options.owner, "."
+    else:
+        print "An API key for an admin user in the Tool Shed is required to add entries into the Tool Shed's repository registry."
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Add entries into the Tool Shed repository registry for a specified repository.' )
+    parser.add_argument( "-a", "--api_key", dest="api_key", required=True, help="API Key for user adding entries into the Tool Shed's repository registry." )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-n", "--name", dest='name', required=True, help="Repository name." )
+    parser.add_argument( "-o", "--owner", dest='owner', required=True, help="Repository owner." )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/common.py b/scripts/tool_shed/api/common.py
new file mode 100644
index 0000000..bf31533
--- /dev/null
+++ b/scripts/tool_shed/api/common.py
@@ -0,0 +1,292 @@
+import json
+import os
+import sys
+import urllib
+import urllib2
+
+sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, 'lib' ) )
+
+from galaxy import util
+from tool_shed.util import hg_util
+
+
+class HTTPRedirectWithDataHandler( urllib2.HTTPRedirectHandler ):
+
+    def __init__( self, method ):
+        '''
+        Upon first inspection, it would seem that this shouldn't be necessary, but for some reason
+        not having a constructor explicitly set the request method breaks PUT requests.
+        '''
+        self.valid_methods = [ 'GET', 'HEAD', 'POST', 'PUT', 'DELETE' ]
+        self.redirect_codes = [ '301', '302', '303', '307' ]
+        self.method = method
+
+    def redirect_request( self, request, fp, code, msg, headers, new_url ):
+        request_method = request.get_method()
+        if str( code ) in self.redirect_codes and request_method in self.valid_methods:
+            new_url = new_url.replace( ' ', '%20' )
+            request = urllib2.Request( new_url,
+                                       data=request.data,
+                                       headers=request.headers,
+                                       origin_req_host=request.get_origin_req_host(),
+                                       unverifiable=True )
+            if self.method in self.valid_methods:
+                if request.get_method() != self.method:
+                    request.get_method = lambda: self.method
+            return request
+        else:
+            urllib2.HTTPRedirectHandler.redirect_request( request, fp, code, msg, headers, new_url )
+
+
+def build_request_with_data( url, data, api_key, method ):
+    """Build a request with the received method."""
+    http_redirect_with_data_handler = HTTPRedirectWithDataHandler( method=method )
+    opener = urllib2.build_opener( http_redirect_with_data_handler )
+    urllib2.install_opener( opener )
+    url = make_url( url, api_key=api_key, args=None )
+    request = urllib2.Request( url, headers={ 'Content-Type': 'application/json' }, data=json.dumps( data ) )
+    request_method = request.get_method()
+    if request_method != method:
+        request.get_method = lambda: method
+    return opener, request
+
+
+def delete( api_key, url, data, return_formatted=True ):
+    """
+    Sends an API DELETE request and acts as a generic formatter for the JSON response.  The
+    'data' will become the JSON payload read by the Tool Shed.
+    """
+    try:
+        opener, request = build_request_with_data( url, data, api_key, 'DELETE' )
+        delete_request = opener.open( request )
+        response = json.loads( delete_request.read() )
+    except urllib2.HTTPError as e:
+        if return_formatted:
+            print e
+            print e.read( 1024 )
+            sys.exit( 1 )
+        else:
+            return dict( status='error', message=str( e.read( 1024 ) ) )
+    if return_formatted:
+        print 'Response'
+        print '--------'
+        print response
+    else:
+        return response
+
+
+def display( url, api_key=None, return_formatted=True ):
+    """Sends an API GET request and acts as a generic formatter for the JSON response."""
+    try:
+        r = get( url, api_key=api_key )
+    except urllib2.HTTPError as e:
+        print e
+        # Only return the first 1K of errors.
+        print e.read( 1024 )
+        sys.exit( 1 )
+    if type( r ) == unicode:
+        print 'error: %s' % r
+        return None
+    if not return_formatted:
+        return r
+    elif type( r ) == list:
+        # Response is a collection as defined in the REST style.
+        print 'Collection Members'
+        print '------------------'
+        for n, i in enumerate(r):
+            # All collection members should have a name in the response.
+            # url is optional
+            if 'url' in i:
+                print '#%d: %s' % (n + 1, i.pop( 'url' ) )
+            if 'name' in i:
+                print '  name: %s' % i.pop( 'name' )
+            for k, v in i.items():
+                print '  %s: %s' % ( k, v )
+        print ''
+        print '%d element(s) in collection' % len( r )
+    elif type( r ) == dict:
+        # Response is an element as defined in the REST style.
+        print 'Member Information'
+        print '------------------'
+        for k, v in r.items():
+            print '%s: %s' % ( k, v )
+    elif type( r ) == str:
+        print r
+    else:
+        print 'response is unknown type: %s' % type( r )
+
+
+def get( url, api_key=None ):
+    """Do the GET."""
+    url = make_url( url, api_key=api_key, args=None )
+    try:
+        return json.loads( urllib2.urlopen( url ).read() )
+    except ValueError:
+        sys.exit( "URL did not return JSON data" )
+
+
+def get_api_url( base, parts=[], params=None ):
+    """Compose and return a URL for the Tool Shed API."""
+    if 'api' in parts and parts.index( 'api' ) != 0:
+        parts.pop( parts.index( 'api' ) )
+        parts.insert( 0, 'api' )
+    elif 'api' not in parts:
+        parts.insert( 0, 'api' )
+    url = util.build_url( base, pathspec=parts, params=params )
+    return url
+
+
+def get_latest_downloadable_changeset_revision_via_api( url, name, owner ):
+    """
+    Return the latest downloadable changeset revision for the repository defined by the received
+    name and owner.
+    """
+    error_message = ''
+    parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
+    params = dict( name=name, owner=owner )
+    api_url = get_api_url( base=url, parts=parts, params=params )
+    changeset_revisions, error_message = json_from_url( api_url )
+    if changeset_revisions is None or error_message:
+        return None, error_message
+    if len( changeset_revisions ) >= 1:
+        return changeset_revisions[ -1 ], error_message
+    return hg_util.INITIAL_CHANGELOG_HASH, error_message
+
+
+def get_repository_dict( url, repository_dict ):
+    """
+    Send a request to the Tool Shed to get additional information about the repository defined
+    by the received repository_dict.  Add the information to the repository_dict and return it.
+    """
+    error_message = ''
+    if not isinstance( repository_dict, dict ):
+        error_message = 'Invalid repository_dict received: %s' % str( repository_dict )
+        return None, error_message
+    repository_id = repository_dict.get( 'repository_id', None )
+    if repository_id is None:
+        error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict )
+        return None, error_message
+    parts = [ 'api', 'repositories', repository_id ]
+    api_url = get_api_url( base=url, parts=parts )
+    extended_dict, error_message = json_from_url( api_url )
+    if extended_dict is None or error_message:
+        return None, error_message
+    name = extended_dict.get( 'name', None )
+    owner = extended_dict.get( 'owner', None )
+    if name is not None and owner is not None:
+        name = str( name )
+        owner = str( owner )
+        latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name, owner )
+        if latest_changeset_revision is None or error_message:
+            return None, error_message
+        extended_dict[ 'latest_revision' ] = str( latest_changeset_revision )
+        return extended_dict, error_message
+    else:
+        error_message = 'Invalid extended_dict does not contain name or owner entries: %s' % str( extended_dict )
+        return None, error_message
+
+
+def json_from_url( url ):
+    """Send a request to the Tool Shed via the Tool Shed API and handle the response."""
+    error_message = ''
+    url_handle = urllib.urlopen( url )
+    url_contents = url_handle.read()
+    try:
+        parsed_json = json.loads( url_contents )
+    except Exception as e:
+        error_message = str( url_contents )
+        print 'Error parsing JSON data in json_from_url(): ', str( e )
+        return None, error_message
+    return parsed_json, error_message
+
+
+def make_url( url, api_key=None, args=None ):
+    """Adds the API Key to the URL if it's not already there."""
+    if args is None:
+        args = []
+    argsep = '&'
+    if '?' not in url:
+        argsep = '?'
+    if api_key:
+        if '?key=' not in url and '&key=' not in url:
+            args.insert( 0, ( 'key', api_key ) )
+    return url + argsep + '&'.join( [ '='.join( t ) for t in args ] )
+
+
+def post( url, data, api_key=None ):
+    """Do the POST."""
+    try:
+        opener, request = build_request_with_data( url, data, api_key, 'POST' )
+        post_request = opener.open( request )
+        return json.loads( post_request.read() )
+    except urllib2.HTTPError as e:
+        return dict( status='error', message=str( e.read( 1024 ) ) )
+
+
+def put( url, data, api_key=None ):
+    """Do the PUT."""
+    try:
+        opener, request = build_request_with_data( url, data, api_key, 'PUT' )
+        put_request = opener.open( request )
+        return json.loads( put_request.read() )
+    except urllib2.HTTPError as e:
+        return dict( status='error', message=str( e.read( 1024 ) ) )
+
+
+def submit( url, data, api_key=None, return_formatted=True ):
+    """
+    Sends an API POST request and acts as a generic formatter for the JSON response.  The
+    'data' will become the JSON payload read by the Tool Shed.
+    """
+    try:
+        response = post( url, data, api_key=api_key )
+    except urllib2.HTTPError as e:
+        if return_formatted:
+            print e
+            print e.read( 1024 )
+            sys.exit( 1 )
+        else:
+            return dict( status='error', message=str( e.read( 1024 ) ) )
+    if not return_formatted:
+        return response
+    print 'Response'
+    print '--------'
+    if type( response ) == list:
+        # Currently the only implemented responses are lists of dicts, because submission creates
+        # some number of collection elements.
+        for i in response:
+            if type( i ) == dict:
+                if 'url' in i:
+                    print i.pop( 'url' )
+                else:
+                    print '----'
+                if 'name' in i:
+                    print '  name: %s' % i.pop( 'name' )
+                for k, v in i.items():
+                    print '  %s: %s' % ( k, v )
+            else:
+                print i
+    else:
+        print response
+
+
+def update( api_key, url, data, return_formatted=True ):
+    """
+    Sends an API PUT request and acts as a generic formatter for the JSON response.  The
+    'data' will become the JSON payload read by the Tool Shed.
+    """
+    try:
+        response = put( url, data, api_key=api_key )
+    except urllib2.HTTPError as e:
+        if return_formatted:
+            print e
+            print e.read( 1024 )
+            sys.exit( 1 )
+        else:
+            return dict( status='error', message=str( e.read( 1024 ) ) )
+    if return_formatted:
+        print 'Response'
+        print '--------'
+        print response
+    else:
+        return response
diff --git a/scripts/tool_shed/api/create_categories.py b/scripts/tool_shed/api/create_categories.py
new file mode 100644
index 0000000..35d82f5
--- /dev/null
+++ b/scripts/tool_shed/api/create_categories.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+"""
+This script will retrieve a list of dictionaries (one for each category) from the Tool Shed defined
+by the --from_tool_shed parameter, which should be a base Tool Shed URL.  It will retrieve the category
+name and description from each dictionary and create a new category with that name and description in
+the Tool Shed defined by the --to_tool_shed parameter (a different base Tool Shed URL).  Categories
+that already exist with a specified name in the Tool Shed in which the categories are being created
+will not be affected.
+
+This script is very useful for populating a new development Tool Shed with the set of categories that
+currently exist in either the test or main public Galaxy Tool Sheds.  This will streamline building
+new repository hierarchies in the development Tool Shed and exporting them into a capsule that can be
+imported into one of the public Tool Sheds.
+
+Here is a working example of how to use this script to retrieve the current set of categories that are
+available in the test public Tool Shed and create each of them in a local development Tool Shed.
+
+./create_categories.py -a <api key> -f http://testtoolshed.g2.bx.psu.edu -t http://localhost:9009
+"""
+
+import argparse
+
+from common import get, submit
+
+
+def main( options ):
+    api_key = options.api
+    from_tool_shed = options.from_tool_shed.rstrip( '/' )
+    to_tool_shed = options.to_tool_shed.rstrip( '/' )
+    # Get the categories from the specified Tool Shed.
+    url = '%s/api/categories' % from_tool_shed
+    category_dicts = get( url )
+    create_response_dicts = []
+    for category_dict in category_dicts:
+        name = category_dict.get( 'name', None )
+        description = category_dict.get( 'description', None )
+        if name is not None and description is not None:
+            data = dict( name=name,
+                         description=description )
+            url = '%s/api/categories' % to_tool_shed
+            try:
+                response = submit( url, data, api_key )
+            except Exception as e:
+                response = str( e )
+                print "Error attempting to create category using URL: ", url, " exception: ", str( e )
+            create_response_dict = dict( response=response )
+            create_response_dicts.append( create_response_dict )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Retrieve a list of categories from a Tool Shed and create them in another Tool Shed.' )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key for Tool Shed in which categories will be created" )
+    parser.add_argument( "-f", "--from_tool_shed", dest="from_tool_shed", required=True, help="URL of Tool Shed from which to retrieve the categories" )
+    parser.add_argument( "-t", "--to_tool_shed", dest="to_tool_shed", required=True, help="URL of Tool Shed in which to create the categories" )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/create_users.py b/scripts/tool_shed/api/create_users.py
new file mode 100644
index 0000000..6661b37
--- /dev/null
+++ b/scripts/tool_shed/api/create_users.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+"""
+This script will retrieve a list of dictionaries (one for each user) from the Tool Shed defined
+by the --from_tool_shed parameter, which should be a base Tool Shed URL.  It will retrieve the
+username from each dictionary and create a new user with that username in the Tool Shed defined
+by the --to_tool_shed parameter (a different base Tool Shed URL).  An email and password value
+will automatically be provided for each user.  Email addresses will be <username>@test.org and
+passwords will be testuser.  Users that already exist with a specified username in the Tool Shed
+in which the users are being created will not be affected.
+
+This script is very useful for populating a new development Tool Shed with the set of users that
+currently exist in either the test or main public Galaxy Tool Sheds.  This will streamline building
+new repository hierarchies in the development Tool Shed and exporting them into a capsule that can
+be imported into one of the public Tool Sheds.
+
+Here is a working example of how to use this script to retrieve the current set of users that
+are available in the test public Tool Shed and create each of them in a local development Tool Shed.
+
+./create_users.py -a <api key> -f http://testtoolshed.g2.bx.psu.edu -t http://localhost:9009
+"""
+
+import argparse
+
+from common import get, submit
+
+
+def main( options ):
+    api_key = options.api
+    from_tool_shed = options.from_tool_shed.rstrip( '/' )
+    to_tool_shed = options.to_tool_shed.rstrip( '/' )
+    # Get the users from the specified Tool Shed.
+    url = '%s/api/users' % from_tool_shed
+    user_dicts = get( url )
+    create_response_dicts = []
+    for user_dict in user_dicts:
+        username = user_dict.get( 'username', None )
+        if username is not None:
+            email = '%s at test.org' % username
+            password = 'testuser'
+            data = dict( email=email,
+                         password=password,
+                         username=username )
+            url = '%s/api/users' % to_tool_shed
+            try:
+                response = submit( url, data, api_key )
+            except Exception as e:
+                response = str( e )
+                print "Error attempting to create user using URL: ", url, " exception: ", str( e )
+            create_response_dict = dict( response=response )
+            create_response_dicts.append( create_response_dict )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Retrieve a list of users from a Tool Shed and create them in another Tool Shed.' )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key for Tool Shed in which users will be created" )
+    parser.add_argument( "-f", "--from_tool_shed", dest="from_tool_shed", required=True, help="URL of Tool Shed from which to retrieve the users" )
+    parser.add_argument( "-t", "--to_tool_shed", dest="to_tool_shed", required=True, help="URL of Tool Shed in which to create the users" )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/export.py b/scripts/tool_shed/api/export.py
new file mode 100644
index 0000000..0d18e2b
--- /dev/null
+++ b/scripts/tool_shed/api/export.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+"""
+Export a specified repository revision and optionally all of its defined repository
+dependencies from the tool shed into a compressed archive.
+
+Here is a working example of how to use this script to export a repository from the tool shed.
+./export.py --url http://testtoolshed.g2.bx.psu.edu --name chemicaltoolbox --owner bgruening --revision 4133dbf7ff4d --export_repository_dependencies True --download_dir /tmp
+"""
+
+import argparse
+import os
+import sys
+import tempfile
+import urllib2
+
+sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, 'lib' ) )
+from tool_shed.util import basic_util
+
+from common import display, submit
+
+CAPSULE_FILENAME = 'capsule'
+CAPSULE_WITH_DEPENDENCIES_FILENAME = 'capsule_with_dependencies'
+CHUNK_SIZE = 2 ** 20  # 1Mb
+
+
+def generate_repository_archive_filename( tool_shed_url, name, owner, changeset_revision, file_type,
+                                          export_repository_dependencies, use_tmp_archive_dir=False ):
+    tool_shed = remove_protocol_from_tool_shed_url( tool_shed_url )
+    file_type_str = basic_util.get_file_type_str( changeset_revision, file_type )
+    if export_repository_dependencies:
+        repositories_archive_filename = '%s_%s_%s_%s_%s' % ( CAPSULE_WITH_DEPENDENCIES_FILENAME,
+                                                             tool_shed,
+                                                             name,
+                                                             owner,
+                                                             file_type_str )
+    else:
+        repositories_archive_filename = '%s_%s_%s_%s_%s' % ( CAPSULE_FILENAME,
+                                                             tool_shed,
+                                                             name,
+                                                             owner,
+                                                             file_type_str )
+    if use_tmp_archive_dir:
+        tmp_archive_dir = tempfile.mkdtemp( prefix="tmp-toolshed-arcdir" )
+        repositories_archive_filename = os.path.join( tmp_archive_dir, repositories_archive_filename )
+    return repositories_archive_filename
+
+
+def remove_protocol_from_tool_shed_url( tool_shed_url ):
+    protocol, base = tool_shed_url.split( '://' )
+    base = base.replace( ':', '_colon_' )
+    base = base.rstrip( '/' )
+    return base
+
+
+def string_as_bool( string ):
+    if str( string ).lower() in ( 'true', 'yes', 'on' ):
+        return True
+    else:
+        return False
+
+
+def main( options ):
+    """Collect all user data and export the repository via the Tool Shed API."""
+    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+    repositories_url = '%s/api/repositories' % base_tool_shed_url
+    data = {}
+    data[ 'tool_shed_url' ] = base_tool_shed_url
+    data[ 'name' ] = options.name
+    data[ 'owner' ] = options.owner
+    data[ 'changeset_revision' ] = options.changeset_revision
+    data[ 'export_repository_dependencies' ] = options.export_repository_dependencies
+    repository_id = None
+    repositories = display( repositories_url, api_key=None, return_formatted=False )
+    for repository in repositories:
+        name = str( repository[ 'name' ] )
+        owner = str( repository[ 'owner' ] )
+        if name == options.name and owner == options.owner:
+            repository_id = repository[ 'id' ]
+            break
+    if repository_id:
+        # We'll currently support only gzip-compressed tar archives.
+        file_type = 'gz'
+        url = '%s%s' % ( base_tool_shed_url, '/api/repository_revisions/%s/export' % str( repository_id ) )
+        export_dict = submit( url, data, return_formatted=False )
+        error_messages = export_dict[ 'error_messages' ]
+        if error_messages:
+            print "Error attempting to export revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner, ":\n", error_messages
+        else:
+            export_repository_dependencies = string_as_bool( options.export_repository_dependencies )
+            repositories_archive_filename = \
+                generate_repository_archive_filename( base_tool_shed_url,
+                                                      options.name,
+                                                      options.owner,
+                                                      options.changeset_revision,
+                                                      file_type,
+                                                      export_repository_dependencies=export_repository_dependencies,
+                                                      use_tmp_archive_dir=False )
+            download_url = export_dict[ 'download_url' ]
+            download_dir = os.path.abspath( options.download_dir )
+            file_path = os.path.join( download_dir, repositories_archive_filename )
+            src = None
+            dst = None
+            try:
+                src = urllib2.urlopen( download_url )
+                dst = open( file_path, 'wb' )
+                while True:
+                    chunk = src.read( CHUNK_SIZE )
+                    if chunk:
+                        dst.write( chunk )
+                    else:
+                        break
+            except:
+                raise
+            finally:
+                if src:
+                    src.close()
+                if dst:
+                    dst.close()
+            print "Successfully exported revision ", options.changeset_revision, " of repository ", options.name, " owned by ", options.owner
+            print "to location ", file_path
+    else:
+        print "Invalid tool_shed / name / owner ."
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-n", "--name", required=True, help="Repository name." )
+    parser.add_argument( "-o", "--owner", required=True, help="Repository owner." )
+    parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." )
+    parser.add_argument( "-e", "--export_repository_dependencies", dest="export_repository_dependencies", required=False, default='False', help="Export repository dependencies." )
+    parser.add_argument( "-d", "--download_dir", dest="download_dir", required=False, default='/tmp', help="Download directory." )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/get_filtered_repository_revisions.py b/scripts/tool_shed/api/get_filtered_repository_revisions.py
new file mode 100644
index 0000000..546b03c
--- /dev/null
+++ b/scripts/tool_shed/api/get_filtered_repository_revisions.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+"""
+Get a list of dictionaries, each of which defines a repository revision, that is filtered by a combination of one or more
+of the following.
+
+- do_not_test : true / false
+- downloadable : true / false
+- includes_tools : true / false
+- malicious : true / false
+- missing_test_components : true / false
+- skip_tool_test : true / false
+- test_install_error : true / false
+- tools_functionally_correct : true / false
+
+Results can also be restricted to the latest downloadable revision of each repository.
+
+This script is useful for analyzing the Tool Shed's install and test framework.
+
+Here is a working example of how to use this script.
+./get_filtered_repository_revisions.py --url http://testtoolshed.g2.bx.psu.edu
+"""
+
+import argparse
+import os
+import sys
+
+sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, 'lib' ) )
+from galaxy.util import asbool
+from tool_shed.util import hg_util
+
+from common import get_api_url, get_repository_dict, json_from_url
+
+
+def main( options ):
+    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+    latest_revision_only = asbool( options.latest_revision_only )
+    do_not_test = str( options.do_not_test )
+    downloadable = str( options.downloadable )
+    includes_tools = str( options.includes_tools )
+    malicious = str( options.malicious )
+    missing_test_components = str( options.missing_test_components )
+    skip_tool_test = str( options.skip_tool_test )
+    test_install_error = str( options.test_install_error )
+    tools_functionally_correct = str( options.tools_functionally_correct )
+    parts = [ 'repository_revisions' ]
+    params = dict( do_not_test=do_not_test,
+                   downloadable=downloadable,
+                   includes_tools=includes_tools,
+                   malicious=malicious,
+                   missing_test_components=missing_test_components,
+                   skip_tool_test=skip_tool_test,
+                   test_install_error=test_install_error,
+                   tools_functionally_correct=tools_functionally_correct )
+    api_url = get_api_url( base=base_tool_shed_url, parts=parts, params=params )
+    baseline_repository_dicts, error_message = json_from_url( api_url )
+    if baseline_repository_dicts is None or error_message:
+        print error_message
+    else:
+        repository_dicts = []
+        for baseline_repository_dict in baseline_repository_dicts:
+            # We need to get additional details from the tool shed API to pass on to the
+            # module that will generate the install methods.
+            repository_dict, error_message = get_repository_dict( base_tool_shed_url, baseline_repository_dict )
+            if error_message:
+                print 'Error getting additional details from the API: ', error_message
+                repository_dicts.append( baseline_repository_dict )
+            else:
+                # Don't test empty repositories.
+                changeset_revision = baseline_repository_dict.get( 'changeset_revision', hg_util.INITIAL_CHANGELOG_HASH )
+                if changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
+                    # Merge the dictionary returned from /api/repository_revisions with the detailed repository_dict and
+                    # append it to the list of repository_dicts to install and test.
+                    if latest_revision_only:
+                        latest_revision = repository_dict.get( 'latest_revision', hg_util.INITIAL_CHANGELOG_HASH )
+                        if changeset_revision == latest_revision:
+                            repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
+                    else:
+                        repository_dicts.append( dict( repository_dict.items() + baseline_repository_dict.items() ) )
+        print '\n\n', repository_dicts
+        print '\nThe url:\n\n', api_url, '\n\nreturned ', len( repository_dicts ), ' repository dictionaries...'
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Get a filtered list of repository dictionaries.' )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-l", "--latest_revision_only", dest="latest_revision_only", default=True,
+                         help="Restrict results to latest downloadable revision only" )
+    parser.add_argument( "-n", "--do_not_test", help="do_not_test", default=False )
+    parser.add_argument( "-d", "--downloadable", help="downloadable", default=True )
+    parser.add_argument( "-i", "--includes_tools", help="includes_tools", default=True )
+    parser.add_argument( "-m", "--malicious", help="malicious", default=False )
+    parser.add_argument( "-c", "--missing_test_components", help="missing_test_components", default=False )
+    parser.add_argument( "-s", "--skip_tool_test", help="skip_tool_test", default=False )
+    parser.add_argument( "-e", "--test_install_error", help="test_install_error", default=False )
+    parser.add_argument( "-t", "--tools_functionally_correct", help="tools_functionally_correct", default=True )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/import_capsule.py b/scripts/tool_shed/api/import_capsule.py
new file mode 100644
index 0000000..6eb9d00
--- /dev/null
+++ b/scripts/tool_shed/api/import_capsule.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+"""
+Import the contents of a repository capsule exported from a Tool Shed into another Tool Shed.  For each exported repository
+archive contained in the capsule, inspect the Tool Shed to see if that repository already exists or if the current user is
+authorized to create the repository.  If repository dependencies are included in the capsule, repositories may have various
+owners.  We will keep repositories associated with owners, so we need to restrict created repositories to those the current
+user can create.  If the current user is an admin or a member of the IUC, all repositories will be created no matter the owner.
+Otherwise, only repositories whose associated owner is the current user will be created.
+
+Repositories are also associated with 1 or more categories in the Tool Shed from which the capsule was exported.  If any of
+these categories are not contained in the Tool Shed to which the capsule is being imported, they will NOT be created by this
+method (they'll have to be created manually, which can be done after the import).
+
+Here is a working example of how to use this script to install a repository from the test tool shed.
+./import_capsule.py -a <api key> -u http://localhost:9009 -c capsule_localhost_colon_9009_filter_test1_8923f52d5c6d.tar.gz
+"""
+import argparse
+import logging
+import sys
+
+from common import submit
+
+log = logging.getLogger(__name__)
+
+
+def main( options ):
+    api_key = options.api
+    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+    data = {}
+    data[ 'tool_shed_url' ] = options.tool_shed_url
+    data[ 'capsule_file_name' ] = options.capsule_file_name
+    url = '%s/api/repositories/new/import_capsule' % base_tool_shed_url
+    try:
+        submit( url, data, api_key )
+    except Exception as e:
+        log.exception( str( e ) )
+        sys.exit( 1 )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Import the contents of a repository capsule via the Tool Shed API.' )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+    parser.add_argument( "-c", "--capsule_file_name", required=True, help="Capsule file name." )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/remove_repository_registry_entry.py b/scripts/tool_shed/api/remove_repository_registry_entry.py
new file mode 100644
index 0000000..adaac64
--- /dev/null
+++ b/scripts/tool_shed/api/remove_repository_registry_entry.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+"""
+Remove appropriate entries from the Tool Shed's repository registry for a specified repository.
+
+Here is a working example of how to use this script.
+python ./remove_repository_registry_entry.py -a <api key> -u <tool shed url> -n <repository name> -o <repository owner>
+"""
+
+import argparse
+
+from common import submit
+
+
+def main( options ):
+    api_key = options.api_key
+    if api_key:
+        if options.tool_shed_url and options.name and options.owner:
+            base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+            data = {}
+            data[ 'tool_shed_url' ] = base_tool_shed_url
+            data[ 'name' ] = options.name
+            data[ 'owner' ] = options.owner
+            url = '%s%s' % ( base_tool_shed_url, '/api/repositories/remove_repository_registry_entry' )
+            response_dict = submit( url, data, api_key=api_key, return_formatted=False )
+            print response_dict
+        else:
+            print "Invalid tool_shed: ", base_tool_shed_url, " name: ", options.name, " or owner: ", options.owner, "."
+    else:
+        print "An API key for an admin user in the Tool Shed is required to remove entries from the Tool Shed's repository registry."
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Remove entries from the Tool Shed repository registry for a specified repository.' )
+    parser.add_argument( "-a", "--api_key", dest="api_key", required=True, help="API Key for user removing entries from the Tool Shed's repository registry." )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    parser.add_argument( "-n", "--name", dest='name', required=True, help="Repository name." )
+    parser.add_argument( "-o", "--owner", dest='owner', required=True, help="Repository owner." )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/reset_metadata_on_repositories.py b/scripts/tool_shed/api/reset_metadata_on_repositories.py
new file mode 100644
index 0000000..ef7607a
--- /dev/null
+++ b/scripts/tool_shed/api/reset_metadata_on_repositories.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+"""
+Script to reset metadata on certain repositories in the Tool Shed.  If the received API key is associated
+with an admin user in the Tool Shed, setting the my_writable param value to True will restrict resetting
+metadata to only repositories that are writable by the user in addition to those repositories of type
+tool_dependency_definition.  The my_writable param is ignored if the current user is not an admin user,
+in which case this same restriction is automatic.
+
+usage: reset_metadata_on_repositories.py key <my_writable>
+
+Here is a working example of how to use this script to reset metadata on certain repositories in a specified Tool Shed.
+python ./reset_metadata_on_repositories.py -a 22be3b -m True -u http://localhost:9009/
+"""
+import argparse
+import logging
+import os
+import sys
+
+from common import get, submit
+
+log = logging.getLogger(__name__)
+
+
+def string_as_bool( string ):
+    if str( string ).lower() in [ 'true' ]:
+        return True
+    else:
+        return False
+
+
+def read_skip_file( skip_file ):
+    encoded_ids_to_skip = []
+    if os.path.exists( skip_file ):
+        # Contents of file must be 1 encoded repository id per line.
+        lines = open( skip_file, 'rb' ).readlines()
+        for line in lines:
+            if line.startswith( '#' ):
+                # Skip comments.
+                continue
+            encoded_ids_to_skip.append( line.rstrip( '\n' ) )
+    return encoded_ids_to_skip
+
+
+def main( options ):
+    api_key = options.api
+    base_tool_shed_url = options.tool_shed_url.rstrip( '/' )
+    my_writable = options.my_writable
+    one_per_request = options.one_per_request
+    skip_file = options.skip_file
+    if skip_file:
+        encoded_ids_to_skip = read_skip_file( skip_file )
+    else:
+        encoded_ids_to_skip = []
+    if string_as_bool( one_per_request ):
+        url = '%s/api/repositories/repository_ids_for_setting_metadata?key=%s&my_writable=%s' % ( base_tool_shed_url, api_key, str( my_writable ) )
+        repository_ids = get( url, api_key )
+        for repository_id in repository_ids:
+            if repository_id in encoded_ids_to_skip:
+                print "--------"
+                print "Skipping repository with id %s because it is in skip file %s" % ( str( repository_id ), str( skip_file ) )
+                print "--------"
+            else:
+                data = dict( repository_id=repository_id )
+                url = '%s/api/repositories/reset_metadata_on_repository' % base_tool_shed_url
+                try:
+                    submit( url, data, options.api )
+                except Exception as e:
+                    log.exception( ">>>>>>>>>>>>>>>Blew up on data: %s, exception: %s" % ( str( data ), str( e ) ) )
+                    # An nginx timeout undoubtedly occurred.
+                    sys.exit( 1 )
+    else:
+        data = dict( encoded_ids_to_skip=encoded_ids_to_skip,
+                     my_writable=my_writable )
+        url = '%s/api/repositories/reset_metadata_on_repositories' % base_tool_shed_url
+        try:
+            submit( url, data, options.api )
+        except Exception as e:
+            log.exception( str( e ) )
+            # An nginx timeout undoubtedly occurred.
+            sys.exit( 1 )
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser( description='Reset metadata on certain repositories in the Tool Shed via the Tool Shed API.' )
+    parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+    parser.add_argument( "-m", "--my_writable", dest="my_writable", required=False, default='False', help="Restrict to my writable repositories" )
+    parser.add_argument( "-o", "--one_per_request", dest="one_per_request", required=False, default='True', help="One repository per request" )
+    parser.add_argument( "-s", "--skip_file", dest="skip_file", required=False, help="Name of local file containing encoded repository ids to skip" )
+    parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+    options = parser.parse_args()
+    main( options )
diff --git a/scripts/tool_shed/api/tool_shed_repository_revision_update.py b/scripts/tool_shed/api/tool_shed_repository_revision_update.py
new file mode 100755
index 0000000..29ba521
--- /dev/null
+++ b/scripts/tool_shed/api/tool_shed_repository_revision_update.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+"""
+PUT/update script to update appropriate values in a repository_metadata table record in the Tool Shed.
+
+usage: tool_shed_repository_revision_update.py key url key1=value1 key2=value2 ...
+"""
+
+import json
+import sys
+
+from common import update
+
+data = {}
+for key, value in [ kwarg.split( '=', 1 ) for kwarg in sys.argv[ 3: ] ]:
+    """
+    This example script will properly handle updating the value of one or more of the following RepositoryMetadata attributes:
+    tools_functionally_correct, do_not_test, tool_test_results
+    """
+    if key in [ 'tools_functionally_correct', 'do_not_test' ]:
+        if str( value ).lower() in [ 'true', 'yes', 'on' ]:
+            new_value = True
+        else:
+            new_value = False
+    elif key in [ 'tool_test_results' ]:
+        new_value = json.loads( value )
+    else:
+        new_value = str( value )
+    data[ key ] = new_value
+
+update( sys.argv[ 1 ], sys.argv[ 2 ], data )
diff --git a/scripts/tool_shed/bootstrap_tool_shed/bootstrap_tool_shed.sh b/scripts/tool_shed/bootstrap_tool_shed/bootstrap_tool_shed.sh
new file mode 100755
index 0000000..4a7b612
--- /dev/null
+++ b/scripts/tool_shed/bootstrap_tool_shed/bootstrap_tool_shed.sh
@@ -0,0 +1,109 @@
+#!/bin/bash
+
+# Activate the virtualenv, if it exists.
+[ -f ./.venv/bin/activate ] && . ./.venv/bin/activate
+
+: ${TOOL_SHED_CONFIG_FILE:=config/tool_shed.ini.sample}
+
+stop_err() {
+	echo $1
+	python ./scripts/paster.py serve ${TOOL_SHED_CONFIG_FILE} --pid-file=tool_shed_bootstrap.pid --log-file=tool_shed_bootstrap.log --stop-daemon
+	exit 1
+}
+
+tool_shed=`./scripts/tool_shed/bootstrap_tool_shed/parse_run_sh_args.sh $@`
+
+if [ $? -ne 0 ] ; then
+	exit 0
+fi
+
+log_file="scripts/tool_shed/bootstrap_tool_shed/bootstrap.log"
+
+database_result=`python ./scripts/tool_shed/bootstrap_tool_shed/bootstrap_util.py --execute check_db --config_file ${TOOL_SHED_CONFIG_FILE}`
+
+if [ $? -ne 0 ] ; then
+	stop_err "Unable to bootstrap tool shed. $database_result"
+fi
+
+echo "Bootstrapping from tool shed at $tool_shed."
+echo -n "Creating database... "
+python scripts/create_db.py tool_shed
+
+if [ $? -eq 0 ] ; then
+	echo "done."
+else
+	stop_err "failed."
+fi
+
+if [ $? -eq 0 ] ; then
+	user_auth=`python ./scripts/tool_shed/bootstrap_tool_shed/bootstrap_util.py --execute admin_user_info --config_file ${TOOL_SHED_CONFIG_FILE}`
+	local_shed_url=`python ./scripts/tool_shed/bootstrap_tool_shed/bootstrap_util.py --execute get_url --config_file ${TOOL_SHED_CONFIG_FILE}`
+fi
+
+admin_user_name=`echo $user_auth | awk 'BEGIN { FS="__SEP__" } ; { print \$1 }'`
+admin_user_email=`echo $user_auth | awk 'BEGIN { FS="__SEP__" } ; { print \$2 }'`
+admin_user_password=`echo $user_auth | awk 'BEGIN { FS="__SEP__" } ; { print \$3 }'`
+
+echo -n "Creating user '$admin_user_name' with email address '$admin_user_email'..."
+
+python ./scripts/tool_shed/bootstrap_tool_shed/create_user_with_api_key.py -c ${TOOL_SHED_CONFIG_FILE} >> $log_file
+
+echo " done."
+
+sed -i"bak" "s/#admin_users = user1 at example.org,user2 at example.org/admin_users = $admin_user_email/" "${TOOL_SHED_CONFIG_FILE}"
+echo -n "Starting tool shed in order to populate users and categories... "
+
+if [ -f tool_shed_bootstrap.pid ] ; then
+	stop_err "A bootstrap process is already running."
+fi
+
+python ./scripts/paster.py serve ${TOOL_SHED_CONFIG_FILE} --pid-file=tool_shed_bootstrap.pid --log-file=tool_shed_bootstrap.log --daemon > /dev/null
+
+shed_pid=`cat tool_shed_bootstrap.pid`
+
+while : ; do
+	tail -n 1 tool_shed_bootstrap.log | grep -q "Removing PID file tool_shed_webapp.pid"
+	if [ $? -eq 0 ] ; then
+		echo "failed."
+		echo "More information about this failure may be found in the following log snippet from tool_shed_bootstrap.log:"
+		echo "========================================"
+		tail -n 40 tool_shed_bootstrap.log
+		echo "========================================"
+		stop_err " "
+	fi
+	tail -n 2 tool_shed_bootstrap.log | grep -q "Starting server in PID $shed_pid"
+	if [ $? -eq 0 ] ; then
+		echo "done."
+		break
+	fi
+done
+
+echo -n "Retrieving admin user's API key from $local_shed_url..."
+
+curl_response=`curl -s --user $admin_user_email:$admin_user_password $local_shed_url/api/authenticate/baseauth/`
+# Gets an empty response only on first attempt for some reason?
+sleep 1
+curl_response=`curl -s --user $admin_user_email:$admin_user_password $local_shed_url/api/authenticate/baseauth/`
+api_key=`echo $curl_response | grep api_key | awk -F\" '{print $4}'`
+
+if [[ -z $api_key && ${api_key+x} ]] ; then
+		stop_err "Error getting API key for user $admin_user_email. Response: $curl_response"
+fi
+
+echo " done."
+
+if [ $? -eq 0 ] ; then
+	echo -n "Creating users... "
+	python scripts/tool_shed/api/create_users.py -a $api_key -f $tool_shed -t $local_shed_url >> $log_file
+	echo "done."
+	echo -n "Creating categories... "
+	python scripts/tool_shed/api/create_categories.py -a $api_key -f $tool_shed -t $local_shed_url >> $log_file
+	echo "done."
+else
+	stop_err "Error getting API key from local tool shed."
+fi
+
+echo "Bootstrap complete, shutting down temporary tool shed process. A log has been saved to tool_shed_bootstrap.log"
+python ./scripts/paster.py serve ${TOOL_SHED_CONFIG_FILE} --pid-file=tool_shed_bootstrap.pid --log-file=tool_shed_bootstrap.log --stop-daemon
+
+exit 0
diff --git a/scripts/tool_shed/bootstrap_tool_shed/bootstrap_util.py b/scripts/tool_shed/bootstrap_tool_shed/bootstrap_util.py
new file mode 100755
index 0000000..fb2e9c8
--- /dev/null
+++ b/scripts/tool_shed/bootstrap_tool_shed/bootstrap_util.py
@@ -0,0 +1,130 @@
+#!/usr/bin/python
+import optparse
+import ConfigParser
+import os
+import sys
+
+sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, 'lib' ) )
+
+from sqlalchemy.exc import OperationalError, ProgrammingError
+
+import galaxy.webapps.tool_shed.model.mapping as tool_shed_model
+from tool_shed.util import xml_util
+
+
+def check_db( config_parser ):
+    dburi = None
+
+    if config_parser.has_option( 'app:main', 'database_connection' ):
+        dburi = config_parser.get( 'app:main', 'database_connection' )
+    elif config_parser.has_option( 'app:main', 'database_file' ):
+        db_file = config_parser.get( 'app:main', 'database_file' )
+        dburi = "sqlite:///%s?isolation_level=IMMEDIATE" % db_file
+    else:
+        sys.exit('The database configuration setting is missing from the tool_shed.ini file.  Add this setting before attempting to bootstrap.')
+
+    sa_session = None
+
+    database_exists_message = 'The database configured for this Tool Shed is not new, so bootstrapping is not allowed.  '
+    database_exists_message += 'Create a new database that has not been migrated before attempting to bootstrap.'
+
+    try:
+        model = tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ), dburi, engine_options={}, create_tables=False )
+        sa_session = model.context.current
+        sys.exit(database_exists_message)
+    except ProgrammingError:
+        pass
+    except OperationalError:
+        pass
+
+    try:
+        if sa_session is not None:
+            result = sa_session.execute( 'SELECT version FROM migrate_version' ).first()
+            if result[0] >= 2:
+                sys.exit(database_exists_message)
+            else:
+                pass
+    except ProgrammingError:
+        pass
+
+    if config_parser.has_option( 'app:main', 'hgweb_config_dir' ):
+        hgweb_config_parser = ConfigParser.ConfigParser()
+        hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir' )
+        hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config' )
+        if not os.path.exists( hgweb_config_file ):
+            sys.exit(0)
+        hgweb_config_parser.read( hgweb_config_file )
+        configured_repos = hgweb_config_parser.items( 'paths' )
+        if len( configured_repos ) >= 1:
+            message = "This Tool Shed's hgweb.config file contains entries, so bootstrapping is not allowed.  Delete"
+            message += " the current hgweb.config file along with all associated repositories in the configured "
+            message += "location before attempting to boostrap."
+            sys.exit(message)
+        else:
+            sys.exit(0)
+    else:
+        sys.exit(0)
+
+    sys.exit(0)
+
+
+def admin_user_info( ):
+    user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'scripts/tool_shed/bootstrap_tool_shed', 'user_info.xml' ) )
+    tree, error_message = xml_util.parse_xml( user_info_config )
+    username = None
+    email = None
+    password = None
+    if tree is None:
+        print "The XML file ", user_info_config, " seems to be invalid, using defaults."
+        email = 'admin at test.org'
+        password = 'testuser'
+        username = 'admin'
+    else:
+        root = tree.getroot()
+        for elem in root:
+            if elem.tag == 'email':
+                email = elem.text
+            elif elem.tag == 'password':
+                password = elem.text
+            elif elem.tag == 'username':
+                username = elem.text
+    return (username, email, password)
+
+
+def get_local_tool_shed_url( config_parser ):
+    port = '9009'
+    if config_parser.has_section( 'server:main' ):
+        if config_parser.has_option( 'server:main', 'port' ):
+            port = config_parser.get( 'server:main', 'port' )
+    host = '127.0.0.1'
+    print 'http://%s:%s' % ( host, port )
+    return 0
+
+
+def main( args ):
+    config_parser = ConfigParser.ConfigParser()
+
+    if os.path.exists( args.config ):
+        config_parser.read( args.config )
+    else:
+        return 1
+
+    if args.method == 'check_db':
+        return check_db( config_parser )
+    elif args.method == 'admin_user_info':
+        (username, email, password) = admin_user_info()
+        print '%s__SEP__%s__SEP__%s' % ( username, email, password )
+        return 0
+    elif args.method == 'get_url':
+        return get_local_tool_shed_url( config_parser )
+    else:
+        return 1
+
+
+parser = optparse.OptionParser()
+parser.add_option( '-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' )
+parser.add_option( '-e', '--execute', dest='method', action='store', default='check_db' )
+( args, options ) = parser.parse_args()
+
+if __name__ == '__main__':
+    sys.exit( main( args ) )
diff --git a/scripts/tool_shed/bootstrap_tool_shed/create_user_with_api_key.py b/scripts/tool_shed/bootstrap_tool_shed/create_user_with_api_key.py
new file mode 100644
index 0000000..0cc4fe9
--- /dev/null
+++ b/scripts/tool_shed/bootstrap_tool_shed/create_user_with_api_key.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+
+import ConfigParser
+import logging
+import os
+import re
+import sys
+import optparse
+
+sys.path.insert(1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, 'lib' ) )
+sys.path.insert(1, os.path.join( os.path.dirname( __file__ ) ) )
+
+import galaxy.webapps.tool_shed.config as tool_shed_config
+from galaxy.web import security
+from galaxy.webapps.tool_shed.model import mapping
+from bootstrap_util import admin_user_info
+
+log = logging.getLogger( __name__ )
+
+
+VALID_PUBLICNAME_RE = re.compile( "^[a-z0-9\-]+$" )
+VALID_EMAIL_RE = re.compile( "[^@]+@[^@]+\.[^@]+" )
+
+
+class BootstrapApplication( object ):
+    """
+    Creates a basic Tool Shed application in order to discover the database connection and use SQL
+    to create a user and API key.
+    """
+
+    def __init__( self, config ):
+        self.config = config
+        if not self.config.database_connection:
+            self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % str( config.database )
+        print 'Using database connection: ', self.config.database_connection
+        # Setup the database engine and ORM
+        self.model = mapping.init( self.config.file_path,
+                                   self.config.database_connection,
+                                   engine_options={},
+                                   create_tables=False )
+        self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+        self.hgweb_config_manager = self.model.hgweb_config_manager
+        self.hgweb_config_manager.hgweb_config_dir = self.config.hgweb_config_dir
+        print 'Using hgweb.config file: ', self.hgweb_config_manager.hgweb_config
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session."""
+        return self.model.context.current
+
+    def shutdown( self ):
+        pass
+
+
+def create_api_key( app, user ):
+    api_key = app.security.get_new_guid()
+    new_key = app.model.APIKeys()
+    new_key.user_id = user.id
+    new_key.key = api_key
+    app.sa_session.add( new_key )
+    app.sa_session.flush()
+    return api_key
+
+
+def create_user( app ):
+    (username, email, password) = admin_user_info()
+    if email and password and username:
+        invalid_message = validate( email, password, username )
+        if invalid_message:
+            print invalid_message
+        else:
+            user = app.model.User( email=email )
+            user.set_password_cleartext( password )
+            user.username = username
+            app.sa_session.add( user )
+            app.sa_session.flush()
+            app.model.security_agent.create_private_user_role( user )
+            return user
+    else:
+        print "Missing required values for email: ", email, ", password: ", password, ", username: ", username
+    return None
+
+
+def validate( email, password, username ):
+    message = validate_email( email )
+    if not message:
+        message = validate_password( password )
+    if not message:
+        message = validate_publicname( username )
+    return message
+
+
+def validate_email( email ):
+    """Validates the email format."""
+    message = ''
+    if not( VALID_EMAIL_RE.match( email ) ):
+        message = "Please enter a real email address."
+    elif len( email ) > 255:
+        message = "Email address exceeds maximum allowable length."
+    return message
+
+
+def validate_password( password ):
+    if len( password ) < 6:
+        return "Use a password of at least 6 characters"
+    return ''
+
+
+def validate_publicname( username ):
+    """Validates the public username."""
+    if len( username ) < 3:
+        return "Public name must be at least 3 characters in length"
+    if len( username ) > 255:
+        return "Public name cannot be more than 255 characters in length"
+    if not( VALID_PUBLICNAME_RE.match( username ) ):
+        return "Public name must contain only lower-case letters, numbers and '-'"
+    return ''
+
+
+if __name__ == "__main__":
+    parser = optparse.OptionParser( description='Create a user with API key.' )
+    parser.add_option( '-c', dest='config', action='store', help='.ini file to retried toolshed configuration from' )
+    ( args, options ) = parser.parse_args()
+    ini_file = args.config
+    config_parser = ConfigParser.ConfigParser( { 'here': os.getcwd() } )
+    print "Reading ini file: ", ini_file
+    config_parser.read( ini_file )
+    config_dict = {}
+    for key, value in config_parser.items( "app:main" ):
+        config_dict[ key ] = value
+    config = tool_shed_config.Configuration( **config_dict )
+    app = BootstrapApplication( config )
+    user = create_user( app )
+    if user is not None:
+        api_key = create_api_key( app, user )
+        print "Created new user with public username '", user.username, ".  An API key was also created and associated with the user."
+        sys.exit(0)
+    else:
+        sys.exit("Problem creating a new user and an associated API key.")
diff --git a/scripts/tool_shed/bootstrap_tool_shed/parse_run_sh_args.sh b/scripts/tool_shed/bootstrap_tool_shed/parse_run_sh_args.sh
new file mode 100755
index 0000000..e92a2bd
--- /dev/null
+++ b/scripts/tool_shed/bootstrap_tool_shed/parse_run_sh_args.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+while (( $# )) ; do
+    case "$1" in
+	-bootstrap_from_tool_shed|--bootstrap_from_tool_shed)
+		bootstrap="true"
+		tool_shed=$2
+		echo $tool_shed
+		exit 0
+		break
+		;;
+	esac
+	shift 1
+done
+exit 1
\ No newline at end of file
diff --git a/scripts/tool_shed/bootstrap_tool_shed/user_info.xml.sample b/scripts/tool_shed/bootstrap_tool_shed/user_info.xml.sample
new file mode 100644
index 0000000..a733152
--- /dev/null
+++ b/scripts/tool_shed/bootstrap_tool_shed/user_info.xml.sample
@@ -0,0 +1,12 @@
+<?xml version="1.0"?>
+<!--
+If you are planning to use the export / import repository capsule process for development in your
+local Tool Shed, make sure to change the following to be the account you'll use for development.
+When you export your developed repositories for importing into another Tool Shed, this account
+must be available in that Tool Shed..
+-->
+<user>
+    <email>admin at test.org</email>
+    <password>testuser</password>
+    <username>admin</username>
+</user>
diff --git a/scripts/tool_shed/build_ts_whoosh_index.py b/scripts/tool_shed/build_ts_whoosh_index.py
new file mode 100644
index 0000000..8f40c21
--- /dev/null
+++ b/scripts/tool_shed/build_ts_whoosh_index.py
@@ -0,0 +1,222 @@
+"""
+Build indexes for searching the TS.
+Run this script from the Tool Shed folder, example:
+
+$ python scripts/tool_shed/build_ts_whoosh_index.py -c config/tool_shed.ini
+
+Make sure you adjusted your config to:
+ * turn on searching via toolshed_search_on
+ * specify whoosh_index_dir where the indexes will be placed
+
+Also make sure that GALAXY_EGGS_PATH variable is properly set
+in case you are using non-default location for Galaxy.
+"""
+import ConfigParser
+import os
+import sys
+from optparse import OptionParser
+
+from whoosh.fields import Schema, STORED, TEXT
+from whoosh.filedb.filestore import FileStorage
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+import galaxy.webapps.tool_shed.model.mapping
+from galaxy.tools.loader_directory import load_tool_elements_from_path
+from galaxy.util import directory_hash_id, pretty_print_time_interval
+from galaxy.webapps.tool_shed import config, model
+
+repo_schema = Schema(
+    id=STORED,
+    name=TEXT( stored=True ),
+    description=TEXT( stored=True ),
+    long_description=TEXT( stored=True ),
+    homepage_url=TEXT( stored=True ),
+    remote_repository_url=TEXT( stored=True ),
+    repo_owner_username=TEXT( stored=True ),
+    times_downloaded=STORED,
+    approved=STORED,
+    last_updated=STORED,
+    full_last_updated=STORED )
+
+tool_schema = Schema(
+    name=TEXT( stored=True ),
+    description=TEXT( stored=True ),
+    owner=TEXT( stored=True ),
+    id=TEXT( stored=True ),
+    help=TEXT( stored=True ),
+    version=TEXT( stored=True),
+    repo_name=TEXT( stored=True ),
+    repo_owner_username=TEXT( stored=True ),
+    repo_id=STORED )
+
+
+def build_index( sa_session, whoosh_index_dir, path_to_repositories ):
+    """
+    Build the search indexes. One for repositories and another for tools within.
+    """
+    #  Rare race condition exists here and below
+    if not os.path.exists( whoosh_index_dir ):
+        os.makedirs( whoosh_index_dir )
+    tool_index_dir = os.path.join( whoosh_index_dir, 'tools' )
+    if not os.path.exists( tool_index_dir ):
+        os.makedirs( tool_index_dir )
+
+    repo_index_storage = FileStorage( whoosh_index_dir )
+    tool_index_storage = FileStorage( tool_index_dir )
+
+    repo_index = repo_index_storage.create_index( repo_schema )
+    tool_index = tool_index_storage.create_index( tool_schema )
+
+    repo_index_writer = repo_index.writer()
+    tool_index_writer = tool_index.writer()
+
+    def to_unicode( a_basestr ):
+        if type( a_basestr ) is str:
+            return unicode( a_basestr, 'utf-8' )
+        else:
+            return a_basestr
+
+    repos_indexed = 0
+    tools_indexed = 0
+
+    for repo in get_repos( sa_session, path_to_repositories ):
+
+        repo_index_writer.add_document( id=repo.get( 'id' ),
+                             name=to_unicode( repo.get( 'name' ) ),
+                             description=to_unicode( repo.get( 'description' ) ),
+                             long_description=to_unicode( repo.get( 'long_description' ) ),
+                             homepage_url=to_unicode( repo.get( 'homepage_url' ) ),
+                             remote_repository_url=to_unicode( repo.get( 'remote_repository_url' ) ),
+                             repo_owner_username=to_unicode( repo.get( 'repo_owner_username' ) ),
+                             times_downloaded=repo.get( 'times_downloaded' ),
+                             approved=repo.get( 'approved' ),
+                             last_updated=repo.get( 'last_updated' ),
+                             full_last_updated=repo.get( 'full_last_updated' ) )
+        #  Tools get their own index
+        for tool in repo.get( 'tools_list' ):
+            # print tool
+            tool_index_writer.add_document( id=to_unicode( tool.get( 'id' ) ),
+                                            name=to_unicode( tool.get( 'name' ) ),
+                                            version=to_unicode( tool.get( 'version' ) ),
+                                            description=to_unicode( tool.get( 'description' ) ),
+                                            help=to_unicode( tool.get( 'help' ) ),
+                                            repo_owner_username=to_unicode( repo.get( 'repo_owner_username' ) ),
+                                            repo_name=to_unicode( repo.get( 'name' ) ),
+                                            repo_id=repo.get( 'id' ) )
+            tools_indexed += 1
+            print tools_indexed, 'tools (', tool.get( 'id' ), ')'
+
+        repos_indexed += 1
+        print repos_indexed, 'repos (', repo.get( 'id' ), ')'
+
+    tool_index_writer.commit()
+    repo_index_writer.commit()
+
+    print "TOTAL repos indexed: ", repos_indexed
+    print "TOTAL tools indexed: ", tools_indexed
+
+
+def get_repos( sa_session, path_to_repositories ):
+    """
+    Load repos from DB and included tools from .xml configs.
+    """
+    results = []
+    for repo in sa_session.query( model.Repository ).filter_by( deleted=False ).filter_by( deprecated=False ).filter( model.Repository.type != 'tool_dependency_definition' ):
+
+        repo_id = repo.id
+        name = repo.name
+        description = repo.description
+        long_description = repo.long_description
+        homepage_url = repo.homepage_url
+        remote_repository_url = repo.remote_repository_url
+
+        times_downloaded = repo.times_downloaded
+        if not isinstance( times_downloaded, ( int, long ) ):
+            times_downloaded = 0
+
+        repo_owner_username = ''
+        if repo.user_id is not None:
+            user = sa_session.query( model.User ).filter( model.User.id == repo.user_id ).one()
+            repo_owner_username = user.username
+
+        approved = 'no'
+        for review in repo.reviews:
+            if review.approved == 'yes':
+                approved = 'yes'
+                break
+
+        #  Format the time since last update to be nicely readable.
+        last_updated = pretty_print_time_interval( repo.update_time )
+        full_last_updated = repo.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+
+        #  Parse all the tools within repo for separate index.
+        tools_list = []
+        path = os.path.join( path_to_repositories, *directory_hash_id( repo.id ) )
+        path = os.path.join( path, "repo_%d" % repo.id )
+        if os.path.exists(path):
+            tools_list.extend( load_one_dir( path ) )
+            for root, dirs, files in os.walk( path ):
+                if '.hg' in dirs:
+                    dirs.remove('.hg')
+                for dirname in dirs:
+                    tools_in_dir = load_one_dir( os.path.join( root, dirname ) )
+                    tools_list.extend( tools_in_dir )
+
+        results.append(dict( id=repo_id,
+                             name=name,
+                             description=description,
+                             long_description=long_description,
+                             homepage_url=homepage_url,
+                             remote_repository_url=remote_repository_url,
+                             repo_owner_username=repo_owner_username,
+                             times_downloaded=times_downloaded,
+                             approved=approved,
+                             last_updated=last_updated,
+                             full_last_updated=full_last_updated,
+                             tools_list=tools_list ) )
+    return results
+
+
+def load_one_dir( path ):
+    tools_in_dir = []
+    tool_elems = load_tool_elements_from_path( path )
+    if tool_elems:
+        for elem in tool_elems:
+            root = elem[1].getroot()
+            if root.tag == 'tool':
+                tool = {}
+                if root.find( 'help' ) is not None:
+                    tool.update( dict( help=root.find( 'help' ).text ) )
+                if root.find( 'description' ) is not None:
+                    tool.update( dict( description=root.find( 'description' ).text ) )
+                tool.update( dict( id=root.attrib.get( 'id' ),
+                                   name=root.attrib.get( 'name' ),
+                                   version=root.attrib.get( 'version' ) ) )
+                tools_in_dir.append( tool )
+    return tools_in_dir
+
+
+def get_sa_session_and_needed_config_settings( path_to_tool_shed_config ):
+    conf_parser = ConfigParser.ConfigParser( { 'here': os.getcwd() } )
+    conf_parser.read( path_to_tool_shed_config )
+    kwds = dict()
+    for key, value in conf_parser.items( "app:main" ):
+        kwds[ key ] = value
+    config_settings = config.Configuration( **kwds )
+    db_con = config_settings.database_connection
+    if not db_con:
+        db_con = "sqlite:///%s?isolation_level=IMMEDIATE" % config_settings.database
+    model = galaxy.webapps.tool_shed.model.mapping.init( config_settings.file_path, db_con, engine_options={}, create_tables=False )
+    return model.context.current, config_settings
+
+
+if __name__ == "__main__":
+    parser = OptionParser()
+    parser.add_option("-c", "--config", dest="path_to_tool_shed_config", default="config/tool_shed.ini", help="specify tool_shed.ini location")
+    (options, args) = parser.parse_args()
+    path_to_tool_shed_config = options.path_to_tool_shed_config
+    sa_session, config_settings = get_sa_session_and_needed_config_settings( path_to_tool_shed_config )
+    whoosh_index_dir = config_settings.get( 'whoosh_index_dir', None )
+    path_to_repositories = config_settings.get( 'file_path', 'database/community_files' )
+    build_index( sa_session, whoosh_index_dir, path_to_repositories )
diff --git a/scripts/tool_shed/check_download_urls.py b/scripts/tool_shed/check_download_urls.py
new file mode 100644
index 0000000..c676675
--- /dev/null
+++ b/scripts/tool_shed/check_download_urls.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+# Script that checks toolshed tags to see if URLs are accessible.
+# Does not currently handle 'download_binary'
+import os
+import urllib2
+import xml.etree.ElementTree as ET
+from optparse import OptionParser
+
+FILENAMES = [ 'tool_dependencies.xml' ]
+ACTION_TYPES = [ 'download_by_url', 'download_file' ]
+
+
+def main():
+    parser = OptionParser()
+    parser.add_option( '-d', '--directory', dest='directory', action='store', type="string", default='.', help='Root directory' )
+
+    ( options, args ) = parser.parse_args()
+
+    for (dirpath, dirnames, filenames) in os.walk( options.directory ):
+        for filename in filenames:
+            if filename in FILENAMES:
+                path = os.path.join( dirpath, filename )
+                try:
+                    tree = ET.parse( path )
+                    root = tree.getroot()
+                    for action_type in ACTION_TYPES:
+                        for element in root.findall( ".//action[@type='%s']" % action_type ):
+                            url = element.text.strip()
+                            try:
+                                urllib2.urlopen( urllib2.Request( url ) )
+                            except Exception as e:
+                                print "Bad URL '%s' in file '%s': %s" % ( url, path, e )
+                except Exception as e:
+                    print "Unable to check XML file '%s': %s" % ( path, e )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/tool_shed/check_filesystem_for_empty_tool_dependency_installation_paths.py b/scripts/tool_shed/check_filesystem_for_empty_tool_dependency_installation_paths.py
new file mode 100644
index 0000000..dbba90a
--- /dev/null
+++ b/scripts/tool_shed/check_filesystem_for_empty_tool_dependency_installation_paths.py
@@ -0,0 +1,70 @@
+import argparse
+import os
+import shutil
+import sys
+
+sys.path.insert(1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, 'lib' ) )
+
+from tool_shed.util.basic_util import INSTALLATION_LOG
+
+
+def main( args ):
+    empty_installation_paths = []
+    if not os.path.exists( args.basepath ):
+        print 'Tool dependency path %s does not exist.' % str( args.basepath )
+        return 1
+    print 'Checking path %s for empty tool dependency installation directories.' % args.basepath
+    for root, dirs, files in os.walk( args.basepath ):
+        path_parts = root.replace( args.basepath, '' ).lstrip( '/' ).split( os.sep )
+        # Skip certain special directories.
+        if '__virtualenv_src' in dirs:
+            dirs.remove( '__virtualenv_src' )
+        if 'environment_settings' in dirs:
+            dirs.remove( 'environment_settings' )
+        # Do not process the current path if it does not match the pattern
+        # <name>/<version>/<owner>/<repository>/<changeset>.
+        if len( path_parts ) != 5:
+            continue
+        # We have a tool dependency installation path.
+        no_dirs = False
+        no_files = False
+        if len( dirs ) == 0:
+            no_dirs = True
+        if len( files ) == 0 or len( files ) == 1 and INSTALLATION_LOG in files:
+            no_files = True
+        if no_files and no_dirs and root not in empty_installation_paths:
+            empty_installation_paths.append( root )
+    if len( empty_installation_paths ) > 0:
+        print 'The following %d tool dependency installation directories were found to be empty or contain only the file %s.' % \
+            ( len( empty_installation_paths ), INSTALLATION_LOG )
+        if args.delete:
+            for path in empty_installation_paths:
+                if os.path.exists( path ):
+                    shutil.rmtree( path )
+                    print 'Deleted %s.' % path
+        else:
+            for empty_installation_path in empty_installation_paths:
+                print empty_installation_path
+    else:
+        print 'No empty tool dependency installation directories found.'
+    return 0
+
+
+if __name__ == '__main__':
+    description = 'Determine if there are any tool dependency installation paths that should be removed. Remove them if '
+    description += 'the --delete command line argument is provided.'
+    parser = argparse.ArgumentParser( description=description )
+    parser.add_argument( '--delete',
+                         dest='delete',
+                         required=False,
+                         action='store_true',
+                         default=False,
+                         help='Whether to delete empty folders or list them on exit.' )
+    parser.add_argument( '--basepath',
+                         dest='basepath',
+                         required=True,
+                         action='store',
+                         metavar='name',
+                         help='The base path where tool dependencies are installed.' )
+    args = parser.parse_args()
+    sys.exit( main( args ) )
diff --git a/scripts/tool_shed/check_s3_for_empty_tool_dependency_installation_paths.py b/scripts/tool_shed/check_s3_for_empty_tool_dependency_installation_paths.py
new file mode 100644
index 0000000..f48136d
--- /dev/null
+++ b/scripts/tool_shed/check_s3_for_empty_tool_dependency_installation_paths.py
@@ -0,0 +1,153 @@
+import argparse
+import os
+import sys
+
+sys.path.insert(1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, 'lib' ) )
+
+import boto
+
+from galaxy.util import asbool
+from tool_shed.util.basic_util import INSTALLATION_LOG
+
+
+class BucketList( object ):
+
+    def __init__( self, amazon_id, amazon_secret, bucket ):
+        # Connect to S3 using the provided Amazon access key and secret identifier.
+        self.s3 = boto.connect_s3( amazon_id, amazon_secret )
+        self.bucket_name = bucket
+        # Connect to S3 using the received bucket name.
+        self.bucket = boto.s3.bucket.Bucket( self.s3, bucket )
+        self.install_dirs = self.get_tool_dependency_install_paths()
+        self.empty_installation_paths = self.check_for_empty_tool_dependency_installation_paths()
+
+    def display_empty_installation_paths( self ):
+        for empty_installation_path in self.empty_installation_paths:
+            print empty_installation_path
+
+    def delete_empty_installation_paths( self ):
+        print 'Deleting empty installation paths.'
+        for empty_installation_path in self.empty_installation_paths:
+            # Get all keys in the S3 bucket that start with the installation path, and delete each one.
+            for path_to_delete in self.bucket.list( prefix=empty_installation_path ):
+                self.bucket.delete_key( path_to_delete.key )
+                print 'Deleted empty path %s' % str( empty_installation_path )
+
+    def get_tool_dependency_install_paths( self ):
+        found_paths = []
+        for item in self.bucket.list():
+            name = str( item.name )
+            # Skip environment_settings and __virtualenv_src, since these directories do not contain package tool dependencies.
+            if name.startswith( 'environment_settings' ) or name.startswith( '__virtualenv_src' ):
+                continue
+            paths = name.rstrip('/').split( '/' )
+            # Paths are in the format name/version/owner/repository/changeset_revision. If the changeset revision is
+            # present, we need to check the contents of that path. If not, then the tool dependency was completely
+            # uninstalled.
+            if len( paths ) >= 5:
+                td_install_dir = '/'.join( paths[ :5 ] ) + '/'
+                if td_install_dir not in found_paths:
+                    found_paths.append( name )
+        return found_paths
+
+    def check_for_empty_tool_dependency_installation_paths( self ):
+        empty_directories = []
+        for item in self.install_dirs:
+            # Get all entries under the path for this tool dependency.
+            contents = self.bucket.list( prefix=item )
+            tool_dependency_path_contents = []
+            # Find out if there are two or less items in the path. The first entry will be the installation path itself.
+            # If only one other item exists, and the full path ends with the installation log, this is an incorrectly installed
+            # tool dependency.
+            for item in contents:
+                tool_dependency_path_contents.append( item )
+                # If there are more than two items in the path, we cannot safely assume that the dependency failed to
+                # install correctly.
+                if len( tool_dependency_path_contents ) > 2:
+                    break
+            # If the root directory is the only entry in the path, we have an empty tool dependency installation path.
+            if len( tool_dependency_path_contents ) == 1:
+                empty_directories.append( tool_dependency_path_contents[ 0 ] )
+            # Otherwise, if the only other entry is the installation log, we have an installation path that should be deleted.
+            # This would not be the case in a Galaxy instance, since the Galaxy admin will need to verify the contents of
+            # the installation path in order to determine which action should be taken.
+            elif len( tool_dependency_path_contents ) == 2 and \
+                    tool_dependency_path_contents[1].name.endswith( INSTALLATION_LOG ):
+                empty_directories.append( tool_dependency_path_contents[ 0 ] )
+        return [ item.name for item in empty_directories ]
+
+
+def main( args ):
+    '''
+    Amazon credentials can be provided in one of three ways:
+    1. By specifying them on the command line with the --id and --secret arguments.
+    2. By specifying a path to a file that contains the credentials in the form ACCESS_KEY:SECRET_KEY
+       using the --s3passwd argument.
+    3. By specifying the above path in the 's3passwd' environment variable.
+    Each listed option will override the ones below it, if present.
+    '''
+    if None in [ args.id, args.secret ]:
+        if args.s3passwd is None:
+            args.s3passwd = os.environ.get( 's3passwd', None )
+        if args.s3passwd is not None and os.path.exists( args.s3passwd ):
+            awsid, secret = open( args.s3passwd, 'r' ).read().rstrip( '\n' ).split( ':' )
+        else:
+            print 'Amazon ID and secret not provided, and no s3passwd file found.'
+            return 1
+    else:
+        awsid = args.id
+        secret = args.secret
+    dependency_cleaner = BucketList( awsid, secret, args.bucket )
+    if len( dependency_cleaner.empty_installation_paths ) == 0:
+        print 'No empty installation paths found, exiting.'
+        return 0
+    print 'The following %d tool dependency installation paths were found to be empty or contain only the file %s.' % \
+        ( len( dependency_cleaner.empty_installation_paths ), INSTALLATION_LOG )
+    if asbool( args.delete ):
+        dependency_cleaner.delete_empty_installation_paths()
+    else:
+        for empty_installation_path in dependency_cleaner.empty_installation_paths:
+            print empty_installation_path
+    return 0
+
+
+if __name__ == '__main__':
+    description = 'Determine if there are any tool dependency installation paths that should be removed. Remove them if '
+    description += 'the --delete command line argument is provided with a true value.'
+    parser = argparse.ArgumentParser( description=description )
+    parser.add_argument( '--delete',
+                         dest='delete',
+                         required=True,
+                         action='store',
+                         default=False,
+                         type=asbool,
+                         help='Whether to delete empty folders or list them on exit.' )
+    parser.add_argument( '--bucket',
+                         dest='bucket',
+                         required=True,
+                         action='store',
+                         metavar='name',
+                         help='The S3 bucket where tool dependencies are installed.' )
+    parser.add_argument( '--id',
+                         dest='id',
+                         required=False,
+                         action='store',
+                         default=None,
+                         metavar='ACCESS_KEY',
+                         help='The identifier for an amazon account that has read access to the bucket.' )
+    parser.add_argument( '--secret',
+                         dest='secret',
+                         required=False,
+                         action='store',
+                         default=None,
+                         metavar='SECRET_KEY',
+                         help='The secret key for an amazon account that has upload/delete access to the bucket.' )
+    parser.add_argument( '--s3passwd',
+                         dest='s3passwd',
+                         required=False,
+                         action='store',
+                         default=None,
+                         metavar='path/file',
+                         help='The path to a file containing Amazon access credentials, in the format KEY:SECRET.' )
+    args = parser.parse_args()
+    sys.exit( main( args ) )
diff --git a/scripts/tool_shed/clean_up_tool_dependency_directory.py b/scripts/tool_shed/clean_up_tool_dependency_directory.py
new file mode 100644
index 0000000..86a6d37
--- /dev/null
+++ b/scripts/tool_shed/clean_up_tool_dependency_directory.py
@@ -0,0 +1,32 @@
+import argparse
+import os
+import shutil
+import sys
+
+
+def main( args ):
+    if not os.path.exists( args.tool_dependency_dir ):
+        print 'Tool dependency base path %s does not exist, creating.' % str( args.tool_dependency_dir )
+        os.mkdir( args.tool_dependency_dir )
+        return 0
+    else:
+        for content in os.listdir( args.tool_dependency_dir ):
+            print 'Deleting directory %s from %s.' % ( content, args.tool_dependency_dir )
+            full_path = os.path.join( args.tool_dependency_dir, content )
+            if os.path.isdir( full_path ):
+                shutil.rmtree( full_path )
+            else:
+                os.remove( full_path )
+
+
+if __name__ == '__main__':
+    description = 'Clean out the configured tool dependency path, creating it if it does not exist.'
+    parser = argparse.ArgumentParser( description=description )
+    parser.add_argument( '--tool_dependency_dir',
+                         dest='tool_dependency_dir',
+                         required=True,
+                         action='store',
+                         metavar='name',
+                         help='The base path where tool dependencies will be installed.' )
+    args = parser.parse_args()
+    sys.exit( main( args ) )
diff --git a/scripts/tool_shed/deprecate_repositories_without_metadata.py b/scripts/tool_shed/deprecate_repositories_without_metadata.py
new file mode 100644
index 0000000..5832e31
--- /dev/null
+++ b/scripts/tool_shed/deprecate_repositories_without_metadata.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+
+import ConfigParser
+import logging
+import os
+import string
+import sys
+import textwrap
+import time
+from datetime import datetime, timedelta
+from time import strftime
+from optparse import OptionParser
+
+import sqlalchemy as sa
+from sqlalchemy import and_, distinct, false, not_
+
+sys.path.insert(1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, 'lib' ) )
+
+import galaxy.webapps.tool_shed.config as tool_shed_config
+import galaxy.webapps.tool_shed.model.mapping
+from galaxy.util import send_mail as galaxy_send_mail
+from galaxy.util import build_url
+
+log = logging.getLogger()
+log.setLevel( 10 )
+log.addHandler( logging.StreamHandler( sys.stdout ) )
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def build_citable_url( host, repository ):
+    return build_url( host, pathspec=[ 'view', repository.user.username, repository.name ] )
+
+
+def main():
+    '''
+    Script to deprecate any repositories that are older than n days, and have been empty since creation.
+    '''
+    parser = OptionParser()
+    parser.add_option( "-d", "--days", dest="days", action="store", type="int", help="number of days (14)", default=14 )
+    parser.add_option( "-i", "--info_only", action="store_true", dest="info_only", help="info about the requested action", default=False )
+    parser.add_option( "-v", "--verbose", action="store_true", dest="verbose", help="verbose mode, print the name of each repository", default=False )
+    ( options, args ) = parser.parse_args()
+    try:
+        ini_file = args[0]
+    except IndexError:
+        sys.exit( "Usage: python %s <tool shed .ini file> [options]" % sys.argv[ 0 ] )
+    config_parser = ConfigParser.ConfigParser( {'here': os.getcwd()} )
+    config_parser.read( ini_file )
+    config_dict = {}
+    for key, value in config_parser.items( "app:main" ):
+        config_dict[key] = value
+    config = tool_shed_config.Configuration( **config_dict )
+
+    app = DeprecateRepositoriesApplication( config )
+    cutoff_time = datetime.utcnow() - timedelta( days=options.days )
+    now = strftime( "%Y-%m-%d %H:%M:%S" )
+    print "\n####################################################################################"
+    print "# %s - Handling stuff older than %i days" % ( now, options.days )
+
+    if options.info_only:
+        print "# Displaying info only ( --info_only )"
+
+    deprecate_repositories( app, cutoff_time, days=options.days, info_only=options.info_only, verbose=options.verbose )
+
+
+def send_mail_to_owner( app, name, owner, email, repositories_deprecated, days=14 ):
+    '''
+    Sends an email to the owner of the provided repository.
+    '''
+    smtp_server = app.config.get( 'smtp_server', None )
+    from_address = app.config.get( 'email_from', None )
+    # Since there is no way to programmatically determine the URL for the tool shed from the .ini file, this method requires that
+    # an environment variable named TOOL_SHED_CANONICAL_URL be set, pointing to the tool shed that is being checked.
+    url = os.environ.get( 'TOOL_SHED_CANONICAL_URL', None )
+    if None in [ smtp_server, from_address ]:
+        print '# Mail not configured, not sending email to repository owner.'
+        return
+    elif url is None:
+        print '# Environment variable TOOL_SHED_CANONICAL_URL not set, not sending email to repository owner.'
+        return
+    subject = "Regarding your tool shed repositories at %s" % url
+    message_body_template = 'The tool shed automated repository checker has discovered that one or more of your repositories hosted ' + \
+        'at this tool shed url ${url} have remained empty for over ${days} days, so they have been marked as deprecated. If you have plans ' + \
+        'for these repositories, you can mark them as un-deprecated at any time.'
+    message_template = string.Template( message_body_template )
+    body = '\n'.join( textwrap.wrap( message_template.safe_substitute( days=days, url=url ), width=95 ) )
+    body += '\n\n'
+    body += 'Repositories that were deprecated:\n'
+    body += '\n'.join( [ build_citable_url( url, repository ) for repository in repositories_deprecated ] )
+    try:
+        galaxy_send_mail( from_address, repository.user.email, subject, body, app.config )
+        print "# An email has been sent to %s, the owner of %s." % ( repository.user.username, ', '.join( [ repository.name for repository in repositories_deprecated ] ) )
+        return True
+    except Exception as e:
+        print "# An error occurred attempting to send email: %s" % str( e )
+        return False
+
+
+def deprecate_repositories( app, cutoff_time, days=14, info_only=False, verbose=False ):
+    # This method will get a list of repositories that were created on or before cutoff_time, but have never
+    # had any metadata records associated with them. Then it will iterate through that list and deprecate the
+    # repositories, sending an email to each repository owner.
+    start = time.time()
+    repository_ids_to_not_check = []
+    # Get a unique list of repository ids from the repository_metadata table. Any repository ID found in this table is not
+    # empty, and will not be checked.
+    metadata_records = sa.select( [ distinct( app.model.RepositoryMetadata.table.c.repository_id ) ],
+                                  from_obj=app.model.RepositoryMetadata.table ) \
+                         .execute()
+    for metadata_record in metadata_records:
+        repository_ids_to_not_check.append( metadata_record.repository_id )
+    # Get the repositories that are A) not present in the above list, and b) older than the specified time.
+    # This will yield a list of repositories that have been created more than n days ago, but never populated.
+    repository_query = sa.select( [ app.model.Repository.table.c.id ],
+                                  whereclause=and_( app.model.Repository.table.c.create_time < cutoff_time,
+                                                    app.model.Repository.table.c.deprecated == false(),
+                                                    app.model.Repository.table.c.deleted == false(),
+                                                    not_( app.model.Repository.table.c.id.in_( repository_ids_to_not_check ) ) ),
+                                  from_obj=[ app.model.Repository.table ] )
+    query_result = repository_query.execute()
+    repositories = []
+    repositories_by_owner = {}
+    repository_ids = [ row.id for row in query_result ]
+    # Iterate through the list of repository ids for empty repositories and deprecate them unless info_only is set.
+    for repository_id in repository_ids:
+        repository = app.sa_session.query( app.model.Repository ) \
+            .filter( app.model.Repository.table.c.id == repository_id ).one()
+        owner = repository.user
+        if info_only:
+            print '# Repository %s owned by %s would have been deprecated, but info_only was set.' % ( repository.name, repository.user.username )
+        else:
+            if verbose:
+                print '# Deprecating repository %s owned by %s.' % ( repository.name, owner.username )
+            if owner.username not in repositories_by_owner:
+                repositories_by_owner[ owner.username ] = dict( owner=owner, repositories=[] )
+            repositories_by_owner[ owner.username ][ 'repositories' ].append( repository )
+            repositories.append( repository )
+    # Send an email to each repository owner, listing the repositories that were deprecated.
+    for repository_owner in repositories_by_owner:
+        for repository in repositories_by_owner[ repository_owner ][ 'repositories' ]:
+            repository.deprecated = True
+            app.sa_session.add( repository )
+            app.sa_session.flush()
+        owner = repositories_by_owner[ repository_owner ][ 'owner' ]
+        send_mail_to_owner( app, repository.name, owner.username, owner.email, repositories_by_owner[ repository_owner ][ 'repositories' ], days )
+    stop = time.time()
+    print '# Deprecated %d repositories.' % len( repositories )
+    print "# Elapsed time: ", stop - start
+    print "####################################################################################"
+
+
+class DeprecateRepositoriesApplication( object ):
+    """Encapsulates the state of a Universe application"""
+    def __init__( self, config ):
+        if config.database_connection is False:
+            config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % config.database
+        # Setup the database engine and ORM
+        self.model = galaxy.webapps.tool_shed.model.mapping.init( config.file_path, config.database_connection, engine_options={}, create_tables=False )
+        self.config = config
+
+    @property
+    def sa_session( self ):
+        """
+        Returns a SQLAlchemy session -- currently just gets the current
+        session from the threadlocal session context, but this is provided
+        to allow migration toward a more SQLAlchemy 0.4 style of use.
+        """
+        return self.model.context.current
+
+    def shutdown( self ):
+        pass
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/tool_shed/migrate_tools_to_repositories.py b/scripts/tool_shed/migrate_tools_to_repositories.py
new file mode 100644
index 0000000..f724f29
--- /dev/null
+++ b/scripts/tool_shed/migrate_tools_to_repositories.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python
+'''
+Migrate old Galaxy tool shed to next gen Galaxy tool shed.  Specifically, the tool archives stored as
+files in the old tool shed will be migrated to mercurial repositories in the next gen tool shed.  This
+script can be run any number of times as it initially eliminates any current repositories and db records
+associated with them, and migrates old tool shed stuff to new tool shed stuff.
+
+====== CRITICAL =======
+
+0. This script must be run on a repo updated to changeset:   5621:4618be57481b
+
+1. Before running this script, make sure the following config setting is set in tool_shed_wsgi.ini
+
+# Enable next-gen tool shed features
+enable_next_gen_tool_shed = True
+
+2. This script requires the Galaxy instance to use Postgres for database storage.
+
+To run this script, use "sh migrate_tools_to_repositories.sh" from this directory
+'''
+import ConfigParser
+import os
+import shutil
+import sys
+import tarfile
+import tempfile
+from time import strftime
+
+from mercurial import hg, ui
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
+
+import galaxy.webapps.tool_shed.app
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def directory_hash_id( id ):
+    s = str( id )
+    l = len( s )
+    # Shortcut -- ids 0-999 go under ../000/
+    if l < 4:
+        return [ "000" ]
+    # Pad with zeros until a multiple of three
+    padded = ( ( ( 3 - len( s ) ) % 3 ) * "0" ) + s
+    # Drop the last three digits -- 1000 files per directory
+    padded = padded[:-3]
+    # Break into chunks of three
+    return [ padded[i * 3:(i + 1) * 3] for i in range( len( padded ) // 3 ) ]
+
+
+def get_versions( app, item ):
+    """Get all versions of item whose state is a valid state"""
+    valid_states = [ app.model.Tool.states.NEW,
+                     app.model.Tool.states.WAITING,
+                     app.model.Tool.states.APPROVED,
+                     app.model.Tool.states.ARCHIVED ]
+    versions = [ item ]
+    this_item = item
+    while item.newer_version:
+        if item.newer_version.state in valid_states:
+            versions.append( item.newer_version )
+        item = item.newer_version
+    item = this_item
+    while item.older_version:
+        if item.older_version[ 0 ].state in valid_states:
+            versions.insert( 0, item.older_version[ 0 ] )
+        item = item.older_version[ 0 ]
+    return versions
+
+
+def get_approved_tools( app, sa_session ):
+    """Get only the latest version of each tool from the database whose state is approved"""
+    tools = []
+    for tool in sa_session.query( app.model.Tool ) \
+                          .order_by( app.model.Tool.table.c.name ):
+        if tool.state == app.model.Tool.states.APPROVED:
+            tools.append( tool )
+    return tools
+
+
+def create_repository_from_tool( app, sa_session, tool ):
+    # Make the repository name a form of the tool's tool_id by
+    # lower-casing everything and replacing any blank spaces with underscores.
+    repo_name = tool.tool_id.lower().replace( ' ', '_' )
+    print "Creating repository '%s' in database" % ( repo_name )
+    repository = app.model.Repository( name=repo_name,
+                                       description=tool.description,
+                                       user_id=tool.user_id )
+    # Flush to get the id
+    sa_session.add( repository )
+    sa_session.flush()
+    # Determine the local repository's path on disk
+    dir = os.path.join( app.config.file_path, *directory_hash_id( repository.id ) )
+    # Create directory if it does not exist
+    if not os.path.exists( dir ):
+        os.makedirs( dir )
+    # Define repository name inside hashed directory
+    repository_path = os.path.join( dir, "repo_%d" % repository.id )
+    # Create repository directory
+    if not os.path.exists( repository_path ):
+        os.makedirs( repository_path )
+    # Create the local hg repository
+    print "Creating repository '%s' on disk" % ( os.path.abspath( repository_path ) )
+    hg.repository( ui.ui(), os.path.abspath( repository_path ), create=True )
+    # Add an entry in the hgweb.config file for the new repository - this enables calls to repository.repo_path
+    add_hgweb_config_entry( repository, repository_path )
+    # Migrate tool categories
+    for tca in tool.categories:
+        category = tca.category
+        print "Associating category '%s' with repository '%s' in database" % ( category.name, repository.name )
+        rca = app.model.RepositoryCategoryAssociation( repository, category )
+        sa_session.add( rca )
+    sa_session.flush()
+    # Migrate tool ratings
+    print "Associating ratings for tool '%s' with repository '%s'" % ( tool.name, repository.name )
+    for tra in tool.ratings:
+        rra = app.model.RepositoryRatingAssociation( user=tra.user,
+                                                     rating=tra.rating,
+                                                     comment=tra.comment )
+        rra.repository = repository
+        sa_session.add( rra )
+    sa_session.flush()
+
+
+def add_hgweb_config_entry( repository, repository_path ):
+    # Add an entry in the hgweb.config file for a new repository.  This enables calls to repository.repo_path.
+    # An entry looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
+    hgweb_config = "%s/hgweb.config" % os.getcwd()
+    entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
+    if os.path.exists( hgweb_config ):
+        output = open( hgweb_config, 'a' )
+    else:
+        output = open( hgweb_config, 'w' )
+        output.write( '[paths]\n' )
+    output.write( "%s\n" % entry )
+    output.close()
+
+
+def create_hgrc_file( repository ):
+    # At this point, an entry for the repository is required to be in the hgweb.config
+    # file so we can call repository.repo_path.
+    # Create a .hg/hgrc file that looks something like this:
+    # [web]
+    # allow_push = test
+    # name = convert_characters1
+    # push_ssl = False
+    # Upon repository creation, only the owner can push to it ( allow_push setting ),
+    # and since we support both http and https, we set push_ssl to False to override
+    # the default (which is True) in the mercurial api.
+    hgrc_file = os.path.abspath( os.path.join( repository.repo_path, ".hg", "hgrc" ) )
+    output = open( hgrc_file, 'w' )
+    output.write( '[web]\n' )
+    output.write( 'allow_push = %s\n' % repository.user.username )
+    output.write( 'name = %s\n' % repository.name )
+    output.write( 'push_ssl = false\n' )
+    output.flush()
+    output.close()
+
+
+def add_tool_files_to_repository( app, sa_session, tool ):
+    current_working_dir = os.getcwd()
+    # Get the repository to which the tool will be migrated
+    repo_name = tool.tool_id.lower().replace( ' ', '_' )
+    repository = get_repository_by_name( app, sa_session, repo_name )
+    repo_path = os.path.abspath( repository.repo_path )
+    # Get all valid versions of the tool
+    tool_versions = get_versions( app, tool )
+    for tool_version in tool_versions:
+        print "------------------------------"
+        print "Migrating tool '%s' version '%s' from archive to repository '%s'" % ( tool_version.tool_id, tool_version.version, repo_path )
+        # Make a temporary working directory
+        tmp_dir = tempfile.mkdtemp()
+        tmp_archive_dir = os.path.join( tmp_dir, 'tmp_archive_dir' )
+        if not os.path.exists( tmp_archive_dir ):
+            os.makedirs( tmp_archive_dir )
+        cmd = "hg clone %s" % repo_path
+        os.chdir( tmp_archive_dir )
+        os.system( cmd )
+        os.chdir( current_working_dir )
+        cloned_repo_dir = os.path.join( tmp_archive_dir, 'repo_%d' % repository.id )
+        # We want these change sets to be associated with the owner of the repository, so we'll
+        # set the HGUSER environment variable accordingly.  We do this because in the mercurial
+        # api, the default username to be used in commits is determined in this order: $HGUSER,
+        # [ui] section of hgrcs, $EMAIL and stop searching if one of these is set.
+        os.environ[ 'HGUSER' ] = repository.user.username
+        # Copy the tool archive to the tmp_archive_dir.  The src file cannot be derived from
+        # tool.file_name here because we have not loaded the Tool class in the model, so the
+        # tool.file_name defaults to /tmp/...
+        dir = os.path.join( app.config.file_path, 'tools', *directory_hash_id( tool_version.id ) )
+        src = os.path.abspath( os.path.join( dir, 'tool_%d.dat' % tool_version.id ) )
+        dst = os.path.join( tmp_archive_dir, tool_archive_file_name( tool_version, src ) )
+        shutil.copy( src, dst )
+        # Extract the archive to cloned_repo_dir
+        tarfile.open( dst ).extractall( path=cloned_repo_dir )
+        # Remove the archive
+        os.remove( dst )
+        # Change current working directory to the cloned repository
+        os.chdir( cloned_repo_dir )
+        for root, dirs, files in os.walk( cloned_repo_dir ):
+            if '.hg' in dirs:
+                # Don't visit .hg directories
+                dirs.remove( '.hg' )
+            if 'hgrc' in files:
+                # Don't include hgrc files in commit - should be impossible
+                # since we don't visit .hg dirs, but just in case...
+                files.remove( 'hgrc' )
+            for dir in dirs:
+                os.system( "hg add %s" % dir )
+            for name in files:
+                print "Adding file '%s' to cloned repository at %s" % ( name, str( os.getcwd() ) )
+                os.system( "hg add %s" % name )
+        print "Committing change set to cloned repository at %s" % str( os.getcwd() )
+        os.system( "hg commit -m 'Migrated tool version %s from old tool shed archive to new tool shed repository'" % tool_version.version )
+        print "Pushing changeset from cloned repository '%s' to repository '%s'" % ( cloned_repo_dir, repo_path )
+        cmd = "hg push %s" % repo_path
+        print "cmd is: ", cmd
+        os.system( cmd )
+        # The tool shed includes a repository source file browser, which currently depends upon
+        # copies of the hg repository file store in the repo_path for browsing.  We'll do the
+        # following to make these copies.
+        os.chdir( repo_path )
+        os.system( 'hg update' )
+        # Change the current working directory to the original
+        os.chdir( current_working_dir )
+        # Now that we have out new repository made current with all change sets,
+        # we'll create a hgrc file for it.
+        create_hgrc_file( repository )
+        # Remove tmp directory
+        shutil.rmtree( tmp_dir )
+
+
+def get_repository_by_name( app, sa_session, repo_name ):
+    """Get a repository from the database"""
+    return sa_session.query( app.model.Repository ).filter_by( name=repo_name ).one()
+
+
+def contains( containing_str, contained_str ):
+    return containing_str.lower().find( contained_str.lower() ) >= 0
+
+
+def tool_archive_extension( file_name ):
+    extension = None
+    if extension is None:
+        head = open( file_name, 'rb' ).read( 4 )
+        try:
+            assert head[:3] == 'BZh'
+            assert int( head[-1] ) in range( 0, 10 )
+            extension = 'tar.bz2'
+        except AssertionError:
+            pass
+    if extension is None:
+        try:
+            assert head[:2] == '\037\213'
+            extension = 'tar.gz'
+        except:
+            pass
+    if extension is None:
+        extension = 'tar'
+    return extension
+
+
+def tool_archive_file_name( tool, file_name ):
+    return '%s_%s.%s' % ( tool.tool_id, tool.version, tool_archive_extension( file_name ) )
+
+
+def main():
+    if len( sys.argv ) < 2:
+        print "Usage: python %s <Tool shed config file>" % sys.argv[0]
+        sys.exit( 0 )
+    now = strftime( "%Y-%m-%d %H:%M:%S" )
+    print " "
+    print "##########################################"
+    print "%s - Migrating current tool archives to new tool repositories" % now
+    # tool_shed_wsgi.ini file
+    ini_file = sys.argv[1]
+    conf_parser = ConfigParser.ConfigParser( {'here': os.getcwd()} )
+    conf_parser.read( ini_file )
+    try:
+        db_conn_str = conf_parser.get( "app:main", "database_connection" )
+    except ConfigParser.NoOptionError:
+        db_conn_str = conf_parser.get( "app:main", "database_file" )
+    print 'DB Connection: ', db_conn_str
+    # Instantiate app
+    configuration = {}
+    for key, value in conf_parser.items( "app:main" ):
+        configuration[key] = value
+    app = galaxy.webapps.tool_shed.app.UniverseApplication( global_conf=dict( __file__=ini_file ), **configuration )
+    sa_session = app.model.context
+    # Remove the hgweb.config file if it exists
+    hgweb_config = "%s/hgweb.config" % os.getcwd()
+    if os.path.exists( hgweb_config ):
+        print "Removing old file: ", hgweb_config
+        os.remove( hgweb_config )
+    repo_records = 0
+    rca_records = 0
+    rra_records = 0
+    for repo in sa_session.query( app.model.Repository ):
+        # Remove the hg repository from disk.  We have to be careful here, because old
+        # tool files exist in app.config.file_path/tools and we don't want to delete them
+        dir = os.path.join( app.config.file_path, *directory_hash_id( repo.id ) )
+        if os.path.exists( dir ):
+            print "Removing old repository file directory: ", dir
+            shutil.rmtree( dir )
+        # Delete all records from db tables:
+        # repository_category_association, repository_rating_association, repository
+        print "Deleting db records for repository: ", repo.name
+        for rca in repo.categories:
+            sa_session.delete( rca )
+            rca_records += 1
+        for rra in repo.ratings:
+            sa_session.delete( rra )
+            rra_records += 1
+        sa_session.delete( repo )
+        repo_records += 1
+    sa_session.flush()
+    print "Deleted %d rows from the repository table" % repo_records
+    print "Deleted %d rows from the repository_category_association table" % rca_records
+    print "Deleted %d rows from the repository_rating_association table" % rra_records
+    # Migrate database tool, tool category and tool rating records to new
+    # database repository, repository category and repository rating records
+    # and create the hg repository on disk for each.
+    for tool in get_approved_tools( app, sa_session ):
+        create_repository_from_tool( app, sa_session, tool )
+    # Add, commit and push all valid versions of each approved tool to the
+    # associated hg repository.
+    for tool in get_approved_tools( app, sa_session ):
+        add_tool_files_to_repository( app, sa_session, tool )
+    app.shutdown()
+    print ' '
+    print 'Migration to next gen tool shed complete...'
+    print "##########################################"
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/tool_shed/migrate_tools_to_repositories.sh b/scripts/tool_shed/migrate_tools_to_repositories.sh
new file mode 100644
index 0000000..e1108e2
--- /dev/null
+++ b/scripts/tool_shed/migrate_tools_to_repositories.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/tool_shed/migrate_tools_to_repositories.py ./community_wsgi.ini >> ./scripts/tool_shed/migrate_tools_to_repositories.log
diff --git a/scripts/tool_shed/show_tool_dependency_installation_dir_contents.py b/scripts/tool_shed/show_tool_dependency_installation_dir_contents.py
new file mode 100644
index 0000000..794738e
--- /dev/null
+++ b/scripts/tool_shed/show_tool_dependency_installation_dir_contents.py
@@ -0,0 +1,73 @@
+import argparse
+import os
+import sys
+
+sys.path.insert(1, os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, 'lib' ) )
+
+import galaxy.config as galaxy_config
+import galaxy.model
+import galaxy.model.tool_shed_install.mapping as install_mapper
+
+
+class CleanUpDependencyApplication( object ):
+    """Application that enables querying the database using the tool_shed_install model."""
+
+    def __init__( self, config ):
+        self.config = config
+        # Setup the database engine and ORM
+        self.model = install_mapper.init( self.config.database_connection, engine_options={}, create_tables=False )
+
+    @property
+    def sa_session( self ):
+        """Returns a SQLAlchemy session."""
+        return self.model.context.current
+
+    def shutdown( self ):
+        pass
+
+
+def main( args, app ):
+    if not os.path.exists( args.basepath ):
+        print 'Tool dependency base path %s does not exist.' % str( args.basepath )
+        return
+    print 'Checking tool dependency path %s' % args.basepath
+    tool_dependency_dirs = get_tool_dependency_dirs( app )
+    for tool_dependency_dir in tool_dependency_dirs:
+        path = os.path.join( args.basepath, tool_dependency_dir )
+        if os.path.exists( path ):
+            path_contents = os.listdir( path )
+            if len( path_contents ) > 0:
+                print 'Found non-empty tool dependency installation directory %s.' % path
+                print 'Directory has the following contents: \n   %s' % '\n   '.join( path_contents )
+
+
+def get_tool_dependency_dirs( app ):
+    dependency_paths = []
+    for tool_dependency in app.sa_session.query( galaxy.model.tool_shed_install.ToolDependency ).all():
+        dependency_paths.append( tool_dependency.installation_directory( app ) )
+    return dependency_paths
+
+
+if __name__ == '__main__':
+    description = 'Clean out or list the contents any tool dependency directory under the provided'
+    description += 'tool dependency path. Remove any non-empty directories found if the '
+    description += '--delete command line argument is provided.'
+    parser = argparse.ArgumentParser( description=description )
+    parser.add_argument( '--basepath',
+                         dest='basepath',
+                         required=True,
+                         action='store',
+                         metavar='name',
+                         help='The base path where tool dependencies are installed.' )
+    parser.add_argument( '--dburi',
+                         dest='dburi',
+                         required=True,
+                         action='store',
+                         metavar='dburi',
+                         help='The database URI to connect to.' )
+    args = parser.parse_args()
+    database_connection = args.dburi
+    config_dict = dict( database_connection=database_connection, tool_dependency_dir=args.basepath )
+    config = galaxy_config.Configuration( **config_dict )
+    app = CleanUpDependencyApplication( config )
+    sys.exit( main( args, app ) )
diff --git a/scripts/tools/maf/check_loc_file.py b/scripts/tools/maf/check_loc_file.py
new file mode 100644
index 0000000..175f4fb
--- /dev/null
+++ b/scripts/tools/maf/check_loc_file.py
@@ -0,0 +1,56 @@
+# Dan Blankenberg
+# This script checks maf_index.loc file for inconsistencies between what is listed as available and what is really available.
+# Make sure that required dependencies (e.g. galaxy_root/lib) are included in your PYTHONPATH
+import bx.align.maf
+from galaxy.tools.util import maf_utilities
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    index_location_file = sys.argv[ 1 ]
+    for i, line in enumerate( open( index_location_file ) ):
+        try:
+            if line.startswith( '#' ):
+                continue
+            display_name, uid, indexed_for_species, species_exist, maf_files = line.rstrip().split('\t')
+            indexed_for_species = indexed_for_species.split( ',' )
+            species_exist = species_exist.split( ',' )
+            maf_files = maf_files.split( ',' )
+            species_indexed_in_maf = []
+            species_found_in_maf = []
+            for maf_file in maf_files:
+                indexed_maf = bx.align.maf.MAFIndexedAccess( maf_file, keep_open=True, parse_e_rows=False )
+                for key in indexed_maf.indexes.indexes.keys():
+                    spec = maf_utilities.src_split( key )[0]
+                    if spec not in species_indexed_in_maf:
+                        species_indexed_in_maf.append( spec )
+                while True:  # reading entire maf set will take some time
+                    block = indexed_maf.read_at_current_offset( indexed_maf.f )
+                    if block is None:
+                        break
+                    for comp in block.components:
+                        spec = maf_utilities.src_split( comp.src )[0]
+                        if spec not in species_found_in_maf:
+                            species_found_in_maf.append( spec )
+            # indexed species
+            for spec in indexed_for_species:
+                if spec not in species_indexed_in_maf:
+                    print "Line %i, %s claims to be indexed for %s, but indexes do not exist." % ( i, uid, spec )
+            for spec in species_indexed_in_maf:
+                if spec not in indexed_for_species:
+                    print "Line %i, %s is indexed for %s, but is not listed in loc file." % ( i, uid, spec )
+            # existing species
+            for spec in species_exist:
+                if spec not in species_found_in_maf:
+                    print "Line %i, %s claims to have blocks for %s, but was not found in MAF files." % ( i, uid, spec )
+            for spec in species_found_in_maf:
+                if spec not in species_exist:
+                    print "Line %i, %s contains %s, but is not listed in loc file." % ( i, uid, spec )
+        except Exception as e:
+            print "Line %i is invalid: %s" % ( i, e )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/tools/re_escape_output.py b/scripts/tools/re_escape_output.py
new file mode 100644
index 0000000..69508c2
--- /dev/null
+++ b/scripts/tools/re_escape_output.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+"""
+Escapes a file into a form suitable for use with tool tests using re_match or re_match_multiline (when -m/--multiline option is used)
+
+usage: re_escape_output.py [options] input_file [output_file]
+    -m: Use Multiline Matching
+"""
+import optparse
+import re
+
+
+def __main__():
+    parser = optparse.OptionParser()
+    parser.add_option( "-m", "--multiline", action="store_true", dest="multiline", default=False, help="Use Multiline Matching")
+    ( options, args ) = parser.parse_args()
+    input = open( args[0], 'rb' )
+    if len( args ) > 1:
+        output = open( args[1], 'wb' )
+    else:
+        if options.multiline:
+            suffix = 're_match_multiline'
+        else:
+            suffix = 're_match'
+        output = open( "%s.%s" % ( args[0], suffix ), 'wb' )
+    if options.multiline:
+        lines = [ re.escape( input.read() ) ]
+    else:
+        lines = [ "%s\n" % re.escape( line.rstrip( '\n\r' ) ) for line in input ]
+    output.writelines( lines )
+    output.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/scripts/transfer.py b/scripts/transfer.py
new file mode 100644
index 0000000..3ac085b
--- /dev/null
+++ b/scripts/transfer.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+"""
+Downloads files to temp locations.  This script is invoked by the Transfer
+Manager (galaxy.jobs.transfer_manager) and should not normally be invoked by
+hand.
+"""
+import ConfigParser
+import json
+import logging
+import optparse
+import os
+import random
+import SocketServer
+import sys
+import tempfile
+import threading
+import time
+import urllib2
+
+try:
+    import pexpect
+except ImportError:
+    pexpect = None
+
+from daemon import DaemonContext
+from sqlalchemy import create_engine, MetaData, Table
+from sqlalchemy.orm import scoped_session, sessionmaker
+
+galaxy_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir ) )
+sys.path.insert( 1, os.path.join( galaxy_root, 'lib' ) )
+
+import galaxy.model
+from galaxy.util import bunch
+from galaxy.util.json import jsonrpc_response, validate_jsonrpc_request
+
+PEXPECT_IMPORT_MESSAGE = ('The Python pexpect package is required to use this '
+                          'feature, please install it')
+
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+log.addHandler( handler )
+
+debug = False
+slow = False
+
+
+class ArgHandler( object ):
+    """
+    Collect command line flags.
+    """
+    def __init__( self ):
+        self.parser = optparse.OptionParser()
+        self.parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (config/galaxy.ini)',
+                                default=os.path.abspath( os.path.join( galaxy_root, 'config/galaxy.ini' ) ) )
+        self.parser.add_option( '-d', '--debug', action='store_true', dest='debug', help="Debug (don't detach)" )
+        self.parser.add_option( '-s', '--slow', action='store_true', dest='slow', help="Transfer slowly (for debugging)" )
+        self.opts = None
+
+    def parse( self ):
+        self.opts, args = self.parser.parse_args()
+        if len( args ) != 1:
+            log.error( 'usage: transfer.py <transfer job id>' )
+            sys.exit( 1 )
+        try:
+            self.transfer_job_id = int( args[0] )
+        except TypeError:
+            log.error( 'The provided transfer job ID is not an integer: %s' % args[0] )
+            sys.exit( 1 )
+        if self.opts.debug:
+            global debug
+            debug = True
+            log.setLevel( logging.DEBUG )
+        if self.opts.slow:
+            global slow
+            slow = True
+
+
+class GalaxyApp( object ):
+    """
+    A shell Galaxy App to provide access to the Galaxy configuration and
+    model/database.
+    """
+    def __init__( self, config_file ):
+        self.config = ConfigParser.ConfigParser( dict( database_file='database/universe.sqlite',
+                                                       file_path='database/files',
+                                                       transfer_worker_port_range='12275-12675',
+                                                       transfer_worker_log=None ) )
+        self.config.read( config_file )
+        self.model = bunch.Bunch()
+        self.connect_database()
+
+    def connect_database( self ):
+        # Avoid loading the entire model since doing so is exceptionally slow
+        default_dburl = 'sqlite:///%s?isolation_level=IMMEDIATE' % self.config.get( 'app:main', 'database_file' )
+        try:
+            dburl = self.config.get( 'app:main', 'database_connection' )
+        except ConfigParser.NoOptionError:
+            dburl = default_dburl
+        engine = create_engine( dburl )
+        metadata = MetaData( engine )
+        self.sa_session = scoped_session( sessionmaker( bind=engine, autoflush=False, autocommit=True ) )
+        self.model.TransferJob = galaxy.model.TransferJob
+        self.model.TransferJob.table = Table( "transfer_job", metadata, autoload=True )
+
+    def get_transfer_job( self, id ):
+        return self.sa_session.query( self.model.TransferJob ).get( int( id ) )
+
+
+class ListenerServer( SocketServer.ThreadingTCPServer ):
+    """
+    The listener will accept state requests and new transfers for as long as
+    the manager is running.
+    """
+    def __init__( self, port_range, RequestHandlerClass, app, transfer_job, state_result ):
+        self.state_result = state_result
+        # Try random ports until a free one is found
+        while True:
+            random_port = random.choice( port_range )
+            try:
+                SocketServer.ThreadingTCPServer.__init__( self, ( 'localhost', random_port ), RequestHandlerClass )
+                log.info( 'Listening on port %s' % random_port )
+                break
+            except Exception as e:
+                log.warning( 'Tried binding port %s: %s' % ( random_port, str( e ) ) )
+        transfer_job.socket = random_port
+        app.sa_session.add( transfer_job )
+        app.sa_session.flush()
+
+
+class ListenerRequestHandler( SocketServer.BaseRequestHandler ):
+    """
+    Handle state or transfer requests received on the socket.
+    """
+    def handle( self ):
+        request = self.request.recv( 8192 )
+        response = {}
+        valid, request, response = validate_jsonrpc_request( request, ( 'get_state', ), () )
+        if valid:
+            self.request.send( json.dumps( jsonrpc_response( request=request, result=self.server.state_result.result ) ) )
+        else:
+            error_msg = 'Unable to serve request: %s' % response['error']['message']
+            if 'data' in response['error']:
+                error_msg += ': %s' % response['error']['data']
+            log.error( error_msg )
+            log.debug( 'Original request was: %s' % request )
+
+
+class StateResult( object ):
+    """
+    A mutable container for the 'result' portion of JSON-RPC responses to state requests.
+    """
+    def __init__( self, result=None ):
+        self.result = result
+
+
+def transfer( app, transfer_job_id ):
+    transfer_job = app.get_transfer_job( transfer_job_id )
+    if transfer_job is None:
+        log.error( 'Invalid transfer job ID: %s' % transfer_job_id )
+        return False
+    port_range = app.config.get( 'app:main', 'transfer_worker_port_range' )
+    try:
+        port_range = [ int( p ) for p in port_range.split( '-' ) ]
+    except Exception as e:
+        log.error( 'Invalid port range set in transfer_worker_port_range: %s: %s' % ( port_range, str( e ) ) )
+        return False
+    protocol = transfer_job.params[ 'protocol' ]
+    if protocol not in ( 'http', 'https', 'scp' ):
+        log.error( 'Unsupported protocol: %s' % protocol )
+        return False
+    state_result = StateResult( result=dict( state=transfer_job.states.RUNNING, info='Transfer process starting up.' ) )
+    listener_server = ListenerServer( range( port_range[0], port_range[1] + 1 ), ListenerRequestHandler, app, transfer_job, state_result )
+    # daemonize here (if desired)
+    if not debug:
+        daemon_context = DaemonContext( files_preserve=[ listener_server.fileno() ], working_directory=os.getcwd() )
+        daemon_context.open()
+        # If this fails, it'll never be detected.  Hopefully it won't fail since it succeeded once.
+        app.connect_database()  # daemon closed the database fd
+        transfer_job = app.get_transfer_job( transfer_job_id )
+    listener_thread = threading.Thread( target=listener_server.serve_forever )
+    listener_thread.setDaemon( True )
+    listener_thread.start()
+    # Store this process' pid so unhandled deaths can be handled by the restarter
+    transfer_job.pid = os.getpid()
+    app.sa_session.add( transfer_job )
+    app.sa_session.flush()
+    terminal_state = None
+    if protocol in [ 'http', 'https' ]:
+        for transfer_result_dict in http_transfer( transfer_job ):
+            state_result.result = transfer_result_dict
+            if transfer_result_dict[ 'state' ] in transfer_job.terminal_states:
+                terminal_state = transfer_result_dict
+    elif protocol in [ 'scp' ]:
+        # Transfer the file using scp
+        transfer_result_dict = scp_transfer( transfer_job )
+        # Handle the state of the transfer
+        state = transfer_result_dict[ 'state' ]
+        state_result.result = transfer_result_dict
+        if state in transfer_job.terminal_states:
+            terminal_state = transfer_result_dict
+    if terminal_state is not None:
+        transfer_job.state = terminal_state[ 'state' ]
+        for name in [ 'info', 'path' ]:
+            if name in terminal_state:
+                transfer_job.__setattr__( name, terminal_state[ name ] )
+    else:
+        transfer_job.state = transfer_job.states.ERROR
+        transfer_job.info = 'Unknown error encountered by transfer worker.'
+    app.sa_session.add( transfer_job )
+    app.sa_session.flush()
+    return True
+
+
+def http_transfer( transfer_job ):
+    """Plugin" for handling http(s) transfers."""
+    url = transfer_job.params['url']
+    try:
+        f = urllib2.urlopen( url )
+    except urllib2.URLError as e:
+        yield dict( state=transfer_job.states.ERROR, info='Unable to open URL: %s' % str( e ) )
+        return
+    size = f.info().getheader( 'Content-Length' )
+    if size is not None:
+        size = int( size )
+    chunksize = 1024 * 1024
+    if slow:
+        chunksize = 1024
+    read = 0
+    last = 0
+    try:
+        fh, fn = tempfile.mkstemp()
+    except Exception as e:
+        yield dict( state=transfer_job.states.ERROR, info='Unable to create temporary file for transfer: %s' % str( e ) )
+        return
+    log.debug( 'Writing %s to %s, size is %s' % ( url, fn, size or 'unknown' ) )
+    try:
+        while True:
+            chunk = f.read( chunksize )
+            if not chunk:
+                break
+            os.write( fh, chunk )
+            read += chunksize
+            if size is not None and read < size:
+                percent = int( float( read ) / size * 100 )
+                if percent != last:
+                    yield dict( state=transfer_job.states.PROGRESS, read=read, percent='%s' % percent )
+                    last = percent
+            elif size is None:
+                yield dict( state=transfer_job.states.PROGRESS, read=read )
+            if slow:
+                time.sleep( 1 )
+        os.close( fh )
+        yield dict( state=transfer_job.states.DONE, path=fn )
+    except Exception as e:
+        yield dict( state=transfer_job.states.ERROR, info='Error during file transfer: %s' % str( e ) )
+        return
+    return
+
+
+def scp_transfer( transfer_job ):
+    """Plugin" for handling scp transfers using pexpect"""
+    def print_ticks( d ):
+        pass
+    host = transfer_job.params[ 'host' ]
+    user_name = transfer_job.params[ 'user_name' ]
+    password = transfer_job.params[ 'password' ]
+    file_path = transfer_job.params[ 'file_path' ]
+    if pexpect is None:
+        return dict( state=transfer_job.states.ERROR, info=PEXPECT_IMPORT_MESSAGE )
+    try:
+        fh, fn = tempfile.mkstemp()
+    except Exception as e:
+        return dict( state=transfer_job.states.ERROR, info='Unable to create temporary file for transfer: %s' % str( e ) )
+    try:
+        # TODO: add the ability to determine progress of the copy here like we do in the http_transfer above.
+        cmd = "scp %s@%s:'%s' '%s'" % ( user_name,
+                                        host,
+                                        file_path.replace( ' ', '\ ' ),
+                                        fn )
+        pexpect.run( cmd, events={ '.ssword:*': password + '\r\n',
+                                   pexpect.TIMEOUT: print_ticks },
+                     timeout=10 )
+        return dict( state=transfer_job.states.DONE, path=fn )
+    except Exception as e:
+        return dict( state=transfer_job.states.ERROR, info='Error during file transfer: %s' % str( e ) )
+
+
+if __name__ == '__main__':
+    arg_handler = ArgHandler()
+    arg_handler.parse()
+    app = GalaxyApp( arg_handler.opts.config )
+
+    log.debug( 'Initiating transfer...' )
+    if transfer( app, arg_handler.transfer_job_id ):
+        log.debug( 'Finished' )
+    else:
+        log.error( 'Error in transfer process...' )
+        sys.exit( 1 )
+    sys.exit( 0 )
diff --git a/scripts/update_shed_config_path.py b/scripts/update_shed_config_path.py
new file mode 100644
index 0000000..5b34f77
--- /dev/null
+++ b/scripts/update_shed_config_path.py
@@ -0,0 +1,60 @@
+import argparse
+import ConfigParser
+import os
+import sys
+
+from sqlalchemy import create_engine, MetaData
+from sqlalchemy.orm import scoped_session, sessionmaker
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+import galaxy.model.tool_shed_install.mapping as mapping
+
+
+def main( opts, session, model ):
+    '''
+    Find all tool shed repositories with the bad path and update with the correct path.
+    '''
+    for row in session.query( model.ToolShedRepository ).all():
+        if 'shed_config_filename' in row.metadata:
+            if row.metadata['shed_config_filename'] == opts.bad_filename:
+                row.metadata['shed_config_filename'] = opts.good_filename
+                session.add( row )
+                session.flush()
+    return 0
+
+
+def create_database( config_file ):
+    parser = ConfigParser.SafeConfigParser()
+    parser.read( config_file )
+    # Determine which database connection to use.
+    database_connection = parser.get( 'app:main', 'install_database_connection' )
+    if database_connection is None:
+        database_connection = parser.get( 'app:main', 'database_connection' )
+    if database_connection is None:
+        database_connection = 'sqlite:///%s' % parser.get( 'app:main', 'database_file' )
+    if database_connection is None:
+        print 'Unable to determine correct database connection.'
+        exit(1)
+
+    '''Initialize the database file.'''
+    # Initialize the database connection.
+    engine = create_engine( database_connection )
+    MetaData( bind=engine )
+    install_session = scoped_session( sessionmaker( bind=engine, autoflush=False, autocommit=True ) )
+    model = mapping.init( database_connection )
+    return install_session, model
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument( '--config_file', dest='config_file', required=True, help="The path to your Galaxy configuration .ini file." )
+    parser.add_argument( '--from', dest='bad_filename', required=True, help="The old, invalid path to the shed_tool_conf.xml or migrated_tools_conf.xml file." )
+    parser.add_argument( '--to', dest='good_filename', required=True, help="The updated path to the shed_tool_conf.xml or migrated_tools_conf.xml file." )
+    parser.add_argument( '--force', dest='force', action='store_true', help="Use this flag to set the new path even if the file does not (yet) exist there." )
+    opts = parser.parse_args()
+    if not os.path.exists( opts.good_filename ) and not opts.force:
+        print 'The file %s does not exist, use the --force option to proceed.' % opts.good_filename
+        exit(1)
+    session, model = create_database( opts.config_file )
+    exit( main( opts, session, model ) )
diff --git a/scripts/validate_tools.sh b/scripts/validate_tools.sh
new file mode 100755
index 0000000..df7bfde
--- /dev/null
+++ b/scripts/validate_tools.sh
@@ -0,0 +1,35 @@
+#!/bin/sh
+
+cd `dirname $0`/..
+
+./scripts/common_startup.sh
+GALAXY_VIRTUAL_ENV="${GALAXY_VIRTUAL_ENV:-.venv}"
+if [ -d "$GALAXY_VIRTUAL_ENV" ];
+then
+    printf "Activating virtualenv at $GALAXY_VIRTUAL_ENV\n"
+    . "$GALAXY_VIRTUAL_ENV/bin/activate"
+fi
+
+xsd_path="lib/galaxy/tools/xsd/galaxy.xsd"
+
+err_tmp=`mktemp`
+count=0
+exit=0
+for p in "$@"; do
+    count=$((count+1))
+    path="$p"
+    echo $path
+    PYTHONPATH=lib:$PYTHONPATH
+    export PYTHONPATH
+    result=`python -c "import galaxy.tools.loader; import xml.etree; xml.etree.ElementTree.dump(galaxy.tools.loader.load_tool('$path').getroot())" | xmllint --nowarning --noout --schema "$xsd_path" - 2> "$err_tmp"`
+    if [ $? -eq 0 ]
+    then
+        echo "ok $count";
+    else
+        echo "not ok $count $path";
+        cat "$err_tmp" | sed 's/^/    /'
+        exit=1
+    fi
+done
+rm "$err_tmp"
+exit $exit
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..b73b6ec
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,15 @@
+[flake8]
+# These are exceptions allowed by Galaxy style guidelines.
+# 128 continuation line under-indented for visual indent
+# 201 and 202 are spaces after ( and before )
+# 203 whitespace before ':'
+# 402 module level import not at top of file # TODO, we would like to improve this.
+# 501 is line length
+# W503 is line breaks before binary operators, which has been reversed in PEP 8.
+# D** are docstring linting - which we mostly ignore except D302. (Hopefully we will solve more over time).
+ignore = E128,E201,E202,E203,E501,E402,W503,D100,D101,D102,D103,D104,D105,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403
+exclude = lib/galaxy/util/jstree.py
+# For flake8-import-order
+# https://github.com/PyCQA/flake8-import-order/blob/master/tests/test_cases/complete_smarkets.py
+import-order-style = smarkets
+application-import-names = galaxy,tool_shed
diff --git a/static/favicon.ico b/static/favicon.ico
new file mode 100644
index 0000000..cf52fdc
Binary files /dev/null and b/static/favicon.ico differ
diff --git a/static/formatHelp.html b/static/formatHelp.html
new file mode 100644
index 0000000..84749c8
--- /dev/null
+++ b/static/formatHelp.html
@@ -0,0 +1,665 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Galaxy Data Formats</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<style type="text/css">
+	hr  { margin-top: 3ex; margin-bottom: 1ex; border: 1px inset }
+</style>
+</head>
+<body>
+<h2>Galaxy Data Formats</h2>
+<p>
+<br>
+
+<h3>Dataset missing?</h3>
+<p>
+If you have a dataset in your history that is not appearing in the
+drop-down selector for a tool, the most common reason is that it has
+the wrong format.  Each Galaxy dataset has an associated file format
+recorded in its metadata, and tools will only list datasets from your
+history that have a format compatible with that particular tool.  Of
+course some of these datasets might not actually contain relevant
+data, or even the correct columns needed by the tool, but filtering
+by format at least makes the list to select from a bit shorter.
+<p>
+Some of the formats are defined hierarchically, going from very
+general ones like <a href="#tab">Tabular</a> (which includes any text
+file with tab-separated columns), to more restrictive sub-formats
+like <a href="#interval">Interval</a> (where three of the columns
+must be the chromosome, start position, and end position), and on
+to even more specific ones such as <a href="#bed">BED</a> that have
+additional requirements.  So for example if a tool's required input
+format is Tabular, then all of your history items whose format is
+recorded as Tabular will be listed, along with those in all
+sub-formats that also qualify as Tabular (Interval, BED, GFF, etc.).
+<p>
+There are two usual methods for changing a dataset's format in
+Galaxy: if the file contents are already in the required format but
+the metadata is wrong (perhaps because the Auto-detect feature of the
+Upload File tool guessed it incorrectly), you can fix the metadata
+manually by clicking on the pencil icon beside that dataset in your
+history.  Or, if the file contents really are in a different format,
+Galaxy provides a number of format conversion tools (e.g. in the
+Text Manipulation and Convert Formats categories).  For instance,
+if the tool you want to run requires Tabular but your columns are
+delimited by spaces or commas, you can use the "Convert delimiters
+to TAB" tool under Text Manipulation to reformat your data.  However
+if your files are in a completely unsupported format, then you need
+to convert them yourself before uploading.
+<p>
+<hr>
+
+<h3>Format Descriptions</h3>
+<ul>
+<li><a href="#ab1">AB1</a>
+<li><a href="#axt">AXT</a>
+<li><a href="#bam">BAM</a>
+<li><a href="#bed">BED</a>
+<li><a href="#bedgraph">BedGraph</a>
+<li><a href="#binseq">Binseq.zip</a>
+<li><a href="#fasta">FASTA</a>
+<li><a href="#fastqsolexa">FastqSolexa</a>
+<li><a href="#fped">FPED</a>
+<li><a href="#gd_indivs">gd_indivs</a>
+<li><a href="#gd_ped">gd_ped</a>
+<li><a href="#gd_sap">gd_sap</a>
+<li><a href="#gd_snp">gd_snp</a>
+<li><a href="#gff">GFF</a>
+<li><a href="#gff3">GFF3</a>
+<li><a href="#gtf">GTF</a>
+<li><a href="#html">HTML</a>
+<li><a href="#interval">Interval</a>
+<li><a href="#lav">LAV</a>
+<li><a href="#lped">LPED</a>
+<li><a href="#maf">MAF</a>
+<li><a href="#mastervar">MasterVar</a>
+<li><a href="#pbed">PBED</a>
+<li><a href="#pgSnp">pgSnp</a>
+<li><a href="#psl">PSL</a>
+<li><a href="#scf">SCF</a>
+<li><a href="#sff">SFF</a>
+<li><a href="#table">Table</a>
+<li><a href="#tab">Tabular</a>
+<li><a href="#txtseqzip">Txtseq.zip</a>
+<li><a href="#vcf">VCF</a>
+<li><a href="#wig">Wiggle custom track</a>
+<li><a href="#text">Other text type</a>
+</ul>
+<p>
+
+<div><a name="ab1"></a></div>
+<hr>
+<strong>AB1</strong>
+<p>
+This is one of the ABIF family of binary sequence formats from
+Applied Biosystems Inc.
+<!-- Their PDF
+<a href="http://www.appliedbiosystems.com/support/software_community/ABIF_File_Format.pdf"
+>format specification</a> is unfortunately password-protected. -->
+Files should have a '<code>.ab1</code>' file extension.  You must
+manually select this file format when uploading the file.
+<p>
+
+<div><a name="axt"></a></div>
+<hr>
+<strong>AXT</strong>
+<p>
+Used for pairwise alignment output from BLASTZ, after post-processing.
+Each alignment block contains three lines: a summary line and two
+sequence lines.  Blocks are separated from one another by blank lines.
+The summary line contains chromosomal position and size information
+about the alignment, and consists of nine required fields.
+<a href="http://main.genome-browser.bx.psu.edu/goldenPath/help/axt.html"
+>More information</a>
+<!-- (not available on Main)
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>FASTA<br>
+Convert Formats → AXT to FASTA
+<li>LAV<br>
+Convert Formats → AXT to LAV
+</ul></dl>
+-->
+<p>
+
+<div><a name="bam"></a></div>
+<hr>
+<strong>BAM</strong>
+<p>
+A binary alignment file compressed in the BGZF format with a
+'<code>.bam</code>' file extension.
+<!-- You must manually select this file format when uploading the file. -->
+<a href="http://samtools.sourceforge.net/SAM1.pdf">SAM</a>
+is the human-readable text version of this format.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>SAM<br>
+NGS: SAM Tools → BAM-to-SAM
+<li>Pileup<br>
+NGS: SAM Tools → Generate pileup
+<li>Interval<br>
+First convert to Pileup as above, then use
+NGS: SAM Tools → Pileup-to-Interval
+</ul></dl>
+<p>
+
+<div><a name="bed"></a></div>
+<hr>
+<strong>BED</strong>
+<p>
+<ul>
+<li> also qualifies as Tabular
+<li> also qualifies as Interval
+</ul>
+This tab-separated format describes a genomic interval, but has
+strict field specifications for use in genome browsers.  BED files
+can have from 3 to 12 columns, but the order of the columns matters,
+and only the end ones can be omitted.  Some groups of columns must
+be all present or all absent.  As in Interval format (but unlike
+GFF and its relatives), the interval endpoints use a 0-based,
+half-open numbering system.
+<a href="http://main.genome-browser.bx.psu.edu/goldenPath/help/hgTracksHelp.html#BED"
+>Field specifications</a>
+<p>
+Example:
+<pre>
+chr22 1000 5000 cloneA 960 + 1000 5000 0 2 567,488, 0,3512
+chr22 2000 6000 cloneB 900 - 2000 6000 0 2 433,399, 0,3601
+</pre>
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>GFF<br>
+Convert Formats → BED-to-GFF
+</ul></dl>
+<p>
+
+<div><a name="bedgraph"></a></div>
+<hr>
+<strong>BedGraph</strong>
+<p>
+<ul>
+<li> also qualifies as Tabular
+<li> also qualifies as Interval
+<li> also qualifies as BED
+</ul>
+<a href="http://main.genome-browser.bx.psu.edu/goldenPath/help/bedgraph.html"
+>BedGraph</a> is a BED file with the name column being a float value
+that is displayed as a wiggle score in tracks.  Unlike in Wiggle
+format, the exact value of this score can be retrieved after being
+loaded as a track.
+<p>
+
+<div><a name="binseq"></a></div>
+<hr>
+<strong>Binseq.zip</strong>
+<p>
+A zipped archive consisting of binary sequence files in either AB1
+or SCF format.  All files in this archive must have the same file
+extension which is one of '<code>.ab1</code>' or '<code>.scf</code>'.
+You must manually select this file format when uploading the file.
+<p>
+
+<div><a name="fasta"></a></div>
+<hr>
+<strong>FASTA</strong>
+<p>
+A sequence in 
+<a href="http://www.ncbi.nlm.nih.gov/blast/fasta.shtml">FASTA</a>
+format consists of a single-line description, followed by lines of
+sequence data.  The first character of the description line is a
+greater-than ('<code>></code>') symbol.  All lines should be
+shorter than 80 characters.
+<pre>
+>sequence1
+atgcgtttgcgtgc
+gtcggtttcgttgc
+>sequence2
+tttcgtgcgtatag
+tggcgcggtga
+</pre>
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>Tabular<br>
+Convert Formats → FASTA-to-Tabular
+</ul></dl>
+<p>
+
+<div><a name="fastqsolexa"></a></div>
+<hr>
+<strong>FastqSolexa</strong>
+<p>
+<a href="http://maq.sourceforge.net/fastq.shtml">FastqSolexa</a>
+is the Illumina (Solexa) variant of the FASTQ format, which stores
+sequences and quality scores in a single file.
+<pre>
+ at seq1  
+GACAGCTTGGTTTTTAGTGAGTTGTTCCTTTCTTT  
++seq1  
+hhhhhhhhhhhhhhhhhhhhhhhhhhPW at hhhhhh  
+ at seq2  
+GCAATGACGGCAGCAATAAACTCAACAGGTGCTGG  
++seq2  
+hhhhhhhhhhhhhhYhhahhhhWhAhFhSIJGChO
+</pre>
+Or 
+<pre>
+ at seq1
+GAATTGATCAGGACATAGGACAACTGTAGGCACCAT
++seq1
+40 40 40 40 35 40 40 40 25 40 40 26 40 9 33 11 40 35 17 40 40 33 40 7 9 15 3 22 15 30 11 17 9 4 9 4
+ at seq2
+GAGTTCTCGTCGCCTGTAGGCACCATCAATCGTATG
++seq2
+40 15 40 17 6 36 40 40 40 25 40 9 35 33 40 14 14 18 15 17 19 28 31 4 24 18 27 14 15 18 2 8 12 8 11 9
+</pre>
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>FASTA<br>
+NGS: QC and manipulation → Generic FASTQ manipulation → FASTQ to FASTA
+<li>Tabular<br>
+NGS: QC and manipulation → Generic FASTQ manipulation → FASTQ to Tabular
+</ul></dl>
+<p>
+
+<div><a name="fped"></a></div>
+<hr>
+<strong>FPED</strong>
+<p>
+Also known as the FBAT format, for use with the
+<a href="http://biosun1.harvard.edu/~fbat/fbat.htm">FBAT</a> program.
+It consists of a pedigree file and a phenotype file.
+<p>
+
+<div><a name="gd_indivs"></a></div>
+<hr>
+<strong>ind</strong>
+<p>
+This format is a tabular file with the first column being the column number
+(1 based)
+from the gd_snp file where the individual/group starts.  The second column is
+the label from the metadata for the individual/group.  The third is an alias
+or blank.
+<p>
+
+<div><a name="gd_sap"></a></div>
+<hr>
+<strong>gd_sap</strong>
+<p>
+This is a tabular file describing single amino-acid polymorphisms (SAPs).
+You must manually select this file format when uploading the file.
+<!--
+<a href="http://www.bx.psu.edu/miller_lab/docs/formats/gd_sap_format.html"
+>Field specifications</a>
+-->
+<p>
+
+<div><a name="gd_snp"></a></div>
+<hr>
+<strong>gd_snp</strong>
+<p>
+This is a tabular file describing SNPs in individuals or populations.
+It contains the zero-based position of the SNP but not the range
+required by BED or interval so can not be used in Genomic Operations without
+adding an column for the end position.
+You must manually select this file format when uploading the file.
+<a href="http://www.bx.psu.edu/miller_lab/docs/formats/gd_snp_format.html"
+>Field specifications</a>
+<p>
+
+<div><a name="gff"></a></div>
+<hr>
+<strong>GFF</strong>
+<p>
+<ul>
+<li> also qualifies as Tabular
+</ul>
+GFF is a tab-separated format somewhat similar to BED, but it has
+different columns and is more flexible.  There are
+<a href="http://main.genome-browser.bx.psu.edu/FAQ/FAQformat#format3"
+>nine required fields</a>.
+Note that unlike Interval and BED, GFF and its relatives (GFF3, GTF)
+use 1-based inclusive coordinates to specify genomic intervals.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>BED<br>
+Convert Formats → GFF-to-BED
+</ul></dl>
+<p>
+
+<div><a name="gff3"></a></div>
+<hr>
+<strong>GFF3</strong>
+<p>
+<ul>
+<li> also qualifies as Tabular
+</ul>
+The <a href="http://www.sequenceontology.org/gff3.shtml">GFF3</a>
+format addresses the most common extensions to GFF, while attempting
+to preserve compatibility with previous formats.
+Note that unlike Interval and BED, GFF and its relatives (GFF3, GTF)
+use 1-based inclusive coordinates to specify genomic intervals.
+<p>
+
+<div><a name="gtf"></a></div>
+<hr>
+<strong>GTF</strong>
+<p>
+<ul>
+<li> also qualifies as Tabular
+</ul>
+<a href="http://main.genome-browser.bx.psu.edu/FAQ/FAQformat#format4"
+>GTF</a> is a format for describing genes and other features associated
+with DNA, RNA, and protein sequences.  It is a refinement to GFF that
+tightens the specification.
+Note that unlike Interval and BED, GFF and its relatives (GFF3, GTF)
+use 1-based inclusive coordinates to specify genomic intervals.
+<!-- (not available on Main)
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>BedGraph<br>
+Convert Formats → GTF-to-BEDGraph
+</ul></dl>
+-->
+<p>
+
+<div><a name="html"></a></div>
+<hr>
+<strong>HTML</strong>
+<p>
+This format is an HTML web page.  Click the eye icon next to the
+dataset to view it in your browser.
+<p>
+
+<div><a name="interval"></a></div>
+<hr>
+<strong>Interval</strong>
+<p>
+<ul>
+<li> also qualifies as Tabular
+</ul>
+This Galaxy format represents genomic intervals.  It is tab-separated,
+but has the added requirement that three of the columns must be the
+chromosome name, start position, and end position, where the positions
+use a 0-based, half-open numbering system (see below).  An optional
+strand column can also be specified, and an initial header row can
+be used to label the columns, which do not have to be in any special
+order.  Arbitrary additional columns can also be present.
+<p>
+Required fields:
+<ul>
+<li>CHROM - The name of the chromosome (e.g. chr3, chrY, chr2_random)
+    or contig (e.g. ctgY1).
+<li>START - The starting position of the feature in the chromosome or
+    contig.  The first base in a chromosome is numbered 0.
+<li>END - The ending position of the feature in the chromosome or
+    contig.  This base is not included in the feature.  For example,
+    the first 100 bases of a chromosome are described as START=0,
+    END=100, and span the bases numbered 0-99.
+</ul>
+Optional:
+<ul>
+<li>STRAND - Defines the strand, either '<code>+</code>' or
+'<code>-</code>'.
+<li>Header row
+</ul>
+Example:
+<pre>
+    #CHROM  START  END    STRAND  NAME  COMMENT
+    chr1    10     100    +       exon  myExon
+    chrX    1000   10050  -       gene  myGene
+</pre>
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>BED<br>
+The exact changes needed and tools to run will vary with what fields
+are in the Interval file and what type of BED you are converting to.
+In general you will likely use Text Manipulation → Compute, Cut,
+or Merge Columns.
+</ul></dl>
+<p>
+
+<div><a name="lav"></a></div>
+<hr>
+<strong>LAV</strong>
+<p>
+<a href="http://www.bx.psu.edu/miller_lab/dist/lav_format.html">LAV</a>
+is the raw pairwise alignment format that is output by BLASTZ.  The
+first line begins with <code>#:lav</code>.
+<!-- (not available on Main)
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>BED<br>
+Convert Formats → LAV to BED
+</ul></dl>
+-->
+<p>
+
+<div><a name="lped"></a></div>
+<hr>
+<strong>LPED</strong>
+<p>
+This is the linkage pedigree format, which consists of separate MAP and PED
+files.  Together these files describe SNPs; the map file contains the position
+and an identifier for the SNP, while the pedigree file has the alleles.  To
+upload this format into Galaxy, do not use Auto-detect for the file format;
+instead select <code>lped</code>.  You will then be given two sections for
+uploading files, one for the pedigree file and one for the map file.  For more
+information, see
+<a href="http://www.broadinstitute.org/science/programs/medical-and-population-genetics/haploview/input-file-formats-0"
+>linkage pedigree</a>,
+<a href="http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#map">MAP</a>,
+and/or <a href="http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped">PED</a>.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>PBED<br>Automatic
+<li>FPED<br>Automatic
+</ul></dl>
+<p>
+
+<div><a name="maf"></a></div>
+<hr>
+<strong>MAF</strong>
+<p>
+<a href="http://main.genome-browser.bx.psu.edu/FAQ/FAQformat#format5"
+>MAF</a> is the multi-sequence alignment format that is output by TBA
+and Multiz.  The first line begins with '<code>##maf</code>'.  This
+word is followed by whitespace-separated "variable<code>=</code>value"
+pairs.  There should be no whitespace surrounding the '<code>=</code>'.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>BED<br>
+Convert Formats → MAF to BED
+<li>Interval<br>
+Convert Formats → MAF to Interval
+<li>FASTA<br>
+Convert Formats → MAF to FASTA
+</ul></dl>
+<p>
+
+<div><a name="mastervar"></a></div>
+<hr>
+<strong>MasterVar</strong>
+<p>
+MasterVar is a tab delimited text format with specified fields developed
+by the Complete Genomics life sciences company.  
+<a href="http://media.completegenomics.com/documents/DataFileFormats_Standard_Pipeline_2.2.pdf"
+>Field specifications</a>.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>pgSnp<br>
+Convert Formats → MasterVar to pgSnp
+<li>gd_snp<br>
+Convert Formats → MasterVar to gd_snp
+</ul></dl>
+<p>
+
+<div><a name="pbed"></a></div>
+<hr>
+<strong>PBED</strong>
+<p>
+This is the binary version of the LPED format.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>LPED<br>Automatic
+</ul></dl>
+<p>
+
+<div><a name="pgSnp"></a></div>
+<hr>
+<strong>pgSnp</strong>
+<p>
+This is the personal genome SNP format used by UCSC.  It is a BED-like
+format with columns chosen for the specialized display in the browser
+for personal genomes.  
+<a href="http://genome.ucsc.edu/FAQ/FAQformat.html#format10"
+>Field specifications</a>.  
+Galaxy treats it the same as an interval file.
+<p>
+
+<div><a name="psl"></a></div>
+<hr>
+<strong>PSL</strong>
+<p>
+<a href="http://main.genome-browser.bx.psu.edu/FAQ/FAQformat#format2">PSL</a>
+format is used for alignments returned by
+<a href="http://genome.ucsc.edu/cgi-bin/hgBlat?command=start">BLAT</a>.
+It does not include any sequence.
+<p>
+
+<div><a name="scf"></a></div>
+<hr>
+<strong>SCF</strong>
+<p>
+This is a binary sequence format originally designed for the Staden
+sequence handling software package.  Files should have a
+'<code>.scf</code>' file extension.  You must manually select this
+file format when uploading the file.
+<a href="http://staden.sourceforge.net/manual/formats_unix_2.html"
+>More information</a>
+<p>
+
+<div><a name="sff"></a></div>
+<hr>
+<strong>SFF</strong>
+<p>
+This is a binary sequence format used by the Roche 454 GS FLX
+sequencing machine, and is documented on p. 528 of their
+<a href="http://sequence.otago.ac.nz/download/GS_FLX_Software_Manual.pdf"
+>software manual</a>.  Files should have a '<code>.sff</code>' file
+extension.
+<!-- You must manually select this file format when uploading the file. -->
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>FASTA<br>
+Convert Formats → SFF converter
+<li>FASTQ<br>
+Convert Formats → SFF converter
+</ul></dl>
+<p>
+
+<div><a name="table"></a></div>
+<hr>
+<strong>Table</strong>
+<p>
+Text data separated into columns by something other than tabs.
+<p>
+
+<div><a name="tab"></a></div>
+<hr>
+<strong>Tabular (tab-delimited)</strong>
+<p>
+One or more columns of text data separated by tabs.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>FASTA<br>
+Convert Formats → Tabular-to-FASTA<br>
+The Tabular file must have a title and sequence column.
+<li>FASTQ<br>
+NGS: QC and manipulation → Generic FASTQ manipulation → Tabular to FASTQ
+<li>Interval<br>
+If the Tabular file has a chromosome column (or is all on one
+chromosome) and has a position column, you can create an Interval
+file (e.g. for SNPs).  If it is all on one chromosome, use
+Text Manipulation → Add column to add a CHROM column.
+If the given position is 1-based, use
+Text Manipulation → Compute with the position column minus 1 to
+get the START, and use the original given column for the END.
+If the given position is 0-based, use it as the START, and compute
+that plus 1 to get the END.
+</ul></dl>
+<p>
+
+<div><a name="txtseqzip"></a></div>
+<hr>
+<strong>Txtseq.zip</strong>
+<p>
+A zipped archive consisting of flat text sequence files.  All files
+in this archive must have the same file extension of
+'<code>.txt</code>'.  You must manually select this file format when
+uploading the file.
+<p>
+
+<div><a name="vcf"></a></div>
+<hr>
+<strong>VCF</strong>
+<p>
+Variant Call Format (VCF) is a tab delimited text file with specified
+fields.  It was developed by the 1000 Genomes Project.
+<a href="http://www.1000genomes.org/wiki/Analysis/Variant%20Call%20Format/vcf-variant-call-format-version-41"
+>Field specifications</a>.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>pgSnp<br>
+Convert Formats → VCF to pgSnp
+</ul></dl>
+<p>
+
+<div><a name="wig"></a></div>
+<hr>
+<strong>Wiggle custom track</strong>
+<p>
+Wiggle tracks are typically used to display per-nucleotide scores
+in a genome browser.  The Wiggle format for custom tracks is
+line-oriented, and the wiggle data is preceded by a track definition
+line that specifies which of three different types is being used.
+<a href="http://main.genome-browser.bx.psu.edu/goldenPath/help/wiggle.html"
+>More information</a>
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>Interval<br>
+Get Genomic Scores → Wiggle-to-Interval
+<li>As a second step this could be converted to 3- or 4-column BED,
+by removing extra columns using
+Text Manipulation → Cut columns from a table.
+</ul></dl>
+<p>
+
+<div><a name="gd_ped"></a></div>
+<hr>
+<strong>gd_ped</strong>
+<p>
+Similar to the linkage pedigree format (lped).  
+<p>
+
+<div><a name="text"></a></div>
+<hr>
+<strong>Other text type</strong>
+<p>
+Any text file.
+<dl><dt>Can be converted to:
+<dd><ul>
+<li>Tabular<br>
+If the text has fields separated by spaces, commas, or some other
+delimiter, it can be converted to Tabular by using
+Text Manipulation → Convert delimiters to TAB.
+</ul></dl>
+<p>
+
+<!-- blank lines so internal links will jump farther to end -->
+<br><br><br><br><br><br><br><br><br><br><br><br>
+<br><br><br><br><br><br><br><br><br><br><br><br>
+</body>
+</html>
diff --git a/static/gmaj/docs/cathy.gmaj.png b/static/gmaj/docs/cathy.gmaj.png
new file mode 100644
index 0000000..7eaaa68
Binary files /dev/null and b/static/gmaj/docs/cathy.gmaj.png differ
diff --git a/static/gmaj/docs/gmaj.css b/static/gmaj/docs/gmaj.css
new file mode 100644
index 0000000..41e495c
--- /dev/null
+++ b/static/gmaj/docs/gmaj.css
@@ -0,0 +1,21 @@
+/* pre { font-family: monospace; } */    /* recommended at http://www.w3.org/MarkUp/Guide/Style, */
+                                         /* but confuses Mozilla */
+body { color: black; background-color: white; background-repeat: no-repeat }
+p.vvlarge { margin-top: 6ex; margin-bottom: 0 }
+p.vlarge { margin-top: 4ex; margin-bottom: 0 }
+p.large { margin-top: 3ex; margin-bottom: 0 }
+p { margin-top: 2ex; margin-bottom: 0 }
+p.small { margin-top: 1ex; margin-bottom: 0 }
+p.tiny { margin-top: 0.5ex; margin-bottom: 0 }
+p.hdr { margin-top: 3ex; margin-bottom: 0 }
+p.subhdr { margin-top: 2.5ex; margin-bottom: 0 }
+p.right { text-align: right; margin-right: 1ex }
+p.scrollspace { margin-top: 100em; margin-bottom: 0 }
+/* tbody { text-align: left; vertical-align: baseline } */
+ul.lessindent { padding-left: 4ex }
+img.lower { vertical-align: -3ex }
+code { padding-left: 0.5ex; padding-right: 0.5ex }
+.smallfont { font-size: smaller }
+.baseline { vertical-align: baseline }
+.notop { margin-top: 0 }
+.nobottom { margin-bottom: 0 }
diff --git a/static/gmaj/docs/gmaj_bugs.html b/static/gmaj/docs/gmaj_bugs.html
new file mode 100644
index 0000000..cbb18cb
--- /dev/null
+++ b/static/gmaj/docs/gmaj_bugs.html
@@ -0,0 +1,128 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Known Bugs in Gmaj</title>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<link rel="stylesheet" type="text/css" href="gmaj.css">
+</head>
+<body>
+<p class=vvlarge>
+<h2>Known Bugs in Gmaj</h2>
+<p class=vvlarge>
+The following anomalies in Gmaj's display and behavior have been
+observed, but not yet resolved.  Some of these are intermittent,
+and some may depend on your computer platform and/or version of
+Java.
+<p>
+If you experience any of these (or similar) problems, it may help
+to switch to a different version of Java.  For example, the first
+bug listed below (phantom repeats) seems to have been introduced
+with Sun's J2SE 1.4.x series, so you may be better off with 1.3.x
+or 1.5.x.  You can also try switching to a different vendor's
+implementation, but then you are on your own, as we develop and
+test Gmaj using Sun's official API and SDK.  Currently Gmaj does
+not rely on any features beyond Java 1.3, in order to preserve
+version flexibility for our users.
+<p>
+Note that anomalies that depend on the Java version could be due
+to bugs in Java itself, but could also be bugs in Gmaj that only
+"matter" with certain implementations of Java.  In either case,
+however, with further investigation it may be possible to work
+around the problem in future releases of Gmaj.
+<p class=vlarge>
+<ul>
+<li>	<a name="phantom"><i>Phantom repeats.</i></a> 
+	After zooming in on a small region, the "repeats" panel
+	sometimes shows a spurious extra bar superimposed on the
+	legitimate features across the entire width of the window,
+	or the features may disappear entirely.
+<p>
+<li>	<a name="zoombox"><i>Missing zoom box and crosshairs.</i></a> 
+	With some datasets (large ones especially) the lines for the
+	zoom rectangle and crosshairs may not display.  It still
+	zooms OK though, if you just pretend the lines are there.
+	<b>Update:</b> This appears to have been fixed in Java 1.5.
+<p>
+<!-- [fixed 02/19/08]
+<li>	<a name="buttonscroll"><i>Scrollbar obscures block buttons.</i></a> 
+	If there is not enough room for all of the block buttons
+	corresponding to the marked position, a scrollbar will
+	appear.  However, the extra vertical space required for
+	the scrollbar may not be calculated properly, so that the
+	scrollbar partially obscures the button labels.  To read
+	the labels, drag the partition between the mark indicator
+	box and the button panel toward the left, to create more
+	space for the buttons.
+<p>
+-->
+<li>	<a name="dialogcopy"><i>Can't copy text from dialog messages in applet.</i></a> 
+	In applet mode, you may not be able to select and copy text
+	from pop-up message boxes to the system clipboard for
+	pasting elsewhere.  This is apparently due to a bug in Java;
+	hopefully Sun will fix it eventually.
+<p>
+<li>	<a name="xor"><i>Patchy rendering of mark circle and/or red segments.</i></a> 
+	When using the Large Fonts option with Mark Color Varies,
+	the red circle and plot segments may not appear as solid
+	as they should.  This is because Mark Color Varies is
+	implemented using Java's XOR drawing mode, and the thick
+	lines are built up from several thin ones, so whenever two
+	red pixels overlap they turn white again.  Fixing this will
+	likely involve a custom implementation of Mark Color Varies
+	that doesn't use XOR.  In the meantime, switching to Mark
+	Always Red when using Large Fonts should solve the problem
+	(except that it will be invisible against red underlays).
+<p>
+<!-- [fixed 07/16/07]
+<li>	<a name="buildfocus"><i>GUI rebuild breaks keyboard shortcuts.</i></a> 
+	The items on the Options menu generally require all of the
+	open windows to be rebuilt, and although the window that was
+	active may still be in front, on some systems it might not
+	actually have the keyboard focus.  So when you press a key,
+	say "<code>b</code>" for "Zoom - Back", one of the other
+	windows changes instead of the one you wanted.  Even the
+	Escape key doesn't help, because it goes to the wrong
+	window too.  Investigation of how best to prevent this is
+	underway; in the meantime you can fix it by clicking on a
+	different window and then returning to the one you want.
+<p>
+-->
+<li>	<a name="leak"><i>Memory leak when closing or rebuilding windows.</i></a> 
+	When windows are closed or rebuilt (e.g. when loading data
+	via File - Open, switching to Large Fonts, or even just
+	displaying dialog boxes), not all of the memory they were
+	using is being reclaimed, so Gmaj will gradually run out
+	and have to be restarted.  How quickly this happens will
+	depend on how much memory you have allocated to Java with
+	the <code><a href="gmaj_help.html#memory">-Xmx</a></code>
+	switch, and the size of your dataset.
+<p>
+<!--
+<li>	<a name=" "><i>Name.</i></a> 
+	Description.
+<p>
+-->
+</ul>
+<p class=vlarge>
+If you encounter any bugs not listed here, please report them to
+<img align=top alt="[image of email address]" src="cathy.gmaj.png">.
+It would be helpful if you can include the version of Gmaj you
+are using (i.e., the JarDate line from the Help - About message),
+your computer platform and version of Java, copies of the data
+files you were trying to view, and a description of exactly what
+you were doing when the problem occurred.  This will help us to
+reproduce the problem so we can track it down and fix it.
+Non-bug suggestions and feedback are also welcome.
+<p>
+Thank you for using Gmaj, and helping us to make it better.
+<p>
+
+<p class=vvlarge>
+<hr>
+<i>Cathy Riemer, June 2008</i>
+
+<p class=scrollspace>
+</body>
+</html>
diff --git a/static/gmaj/docs/gmaj_help.html b/static/gmaj/docs/gmaj_help.html
new file mode 100644
index 0000000..b4d92ba
--- /dev/null
+++ b/static/gmaj/docs/gmaj_help.html
@@ -0,0 +1,860 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Starting and Running Gmaj</title>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<link rel="stylesheet" type="text/css" href="gmaj.css">
+</head>
+<body>
+<p class=vvlarge>
+<h2>Starting and Running Gmaj</h2>
+<p class=vvlarge>
+TABLE OF CONTENTS
+<p class=small>
+<ul class=notop>
+<li><a href="#intro">Introduction</a>
+<li><a href="#start">Starting Gmaj</a>
+<li><a href="#memory">Memory Allocation</a>
+<li><a href="#windows">Multi-Pip and Dotplot Windows</a>
+<li><a href="#state">The Zoom and the Mark</a>
+<li><a href="#layout">Window Layout</a>
+<li><a href="#mouse">Mouse Controls</a>
+<li><a href="#menu">Menus and Widgets</a>
+<li><a href="#copy">Copying and Printing</a>
+<li><a href="#notes">Footnotes</a>
+</ul>
+<p class=vlarge>
+
+<p class=hdr>
+<h3><a name="intro">Introduction</a></h3>
+<p>
+Gmaj can be run in two different modes: as an applet over the
+world-wide web (for viewing data delivered from a server), or as
+a stand-alone application (for viewing data stored on your own
+computer).  These modes are mostly similar but have a few minor
+differences, as noted below.
+<p>
+
+<p class=hdr>
+<h3><a name="start">Starting Gmaj</a></h3>
+<p>
+If you are using Gmaj in applet mode, it will be started for you
+when you visit the applicable web page or submit a query to the
+server.  If the Gmaj window does not appear automatically, just
+click on the labeled button to view the indicated data.  Then
+skip the rest of this section.
+<p>
+If you are using Gmaj in stand-alone mode, you need to start it
+yourself.  The Java runtime environment does not have its own
+GUI, so you generally need to run Gmaj from a command line (e.g.,
+in the Command Prompt window on Windows XP).  The basic command
+to type in looks like this:
+<pre>
+    [path1]java -jar [path2]gmaj.jar
+</pre>
+where <code>[path1]</code> is the location of your
+<code>java</code> program file (perhaps
+<code>c:\windows\system32\</code> on WinXP, or
+<code>/usr/bin/java/</code> on a Unix system), and
+<code>[path2]</code> is the location where the
+<code>gmaj.jar</code> file was installed.  Note that you can
+leave off <code>[path1]</code> if you have set up your system
+command path to include the location of the <code>java</code>
+program.  Depending on how your system is set up, it may also be
+possible to run the jar file directly by just typing its name or
+double-clicking on it.
+<p>
+Since you haven't yet specified any data to display, Gmaj will
+begin by presenting a dialog box to prompt you for the name of
+your input file (see <a href="gmaj_input.html"
+>Input Files for Gmaj</a>).  If the file is located in the
+current directory you can just type its name, otherwise you'll
+need to supply a complete path.  When you click the OK button,
+a window will appear displaying the loaded data.
+<p>
+As an alternative to using the dialog box, you can specify the
+input file (plus additional parameters) on the command line.
+As of this writing, the command syntax is:
+<pre>
+    [path1]java -jar [path2]gmaj.jar
+        [-version] [-help] [-debug] [-urlpause <millisec>]
+        [-initzoom <refseqname> <start> <end>]
+        [-bundle <zipfile>] [<paramfile>|<alignfile>]
+</pre>
+<p>
+This has been wrapped here for easier readability, but should be
+typed all on one line.  Arguments shown in square brackets
+<code>[]</code> are optional, while a vertical bar <code>|</code>
+indicates a choice between alternatives.  Angle brackets
+<code><></code> signify meta-syntactic variables that
+should be replaced with your names or numbers.  Don't type any
+of the brackets or the bar.
+<p>
+These parameters do the following:
+<dl>
+<dt>	<code>-version</code>:
+<dd>	Prints a message with information about Gmaj, including
+	version, author, etc.; then exits.
+<dt>	<code>-help</code>:
+<dd>	Prints a brief help message with up-to-date syntax; then
+	exits.
+<dt>	<code>-debug</code>:
+<dd>	Instructs Gmaj to print extra warning messages in
+	your terminal window if certain problems occur.  Normally
+	you won't need this, as it is mainly for development
+	purposes.
+<dt>	<code>-urlpause</code>:
+<dd>	Specifies how many milliseconds the program should pause
+	before retrieving each file from a URL, in order to avoid
+	overloading the server.
+<dt>	<code>-initzoom</code>:
+<dd>	Specifies an initial zoom setting to be applied when the
+	window opens.  You will still be able to invoke the Unzoom
+	or Set Zoom features interactively to see the entire
+	sequence range.  The <code><refseqname></code> must
+	match one of the sequence names from the alignment file(s),
+	and the endpoints must include the offset (if any) for that
+	sequence from the parameters file.  To specify the reference
+	sequence without a zoom region, use <code>-1</code> for both
+	endpoints.
+<dt>	<code>-bundle</code>:
+<dd>	Specifies the name of a <code>.zip</code> or
+	<code>.jar</code> file containing some or all of the data
+	files.  This option is mostly used with Gmaj's applet mode
+	to streamline the data download, but it is also supported
+	in stand-alone mode.  It is described in
+	<a href="gmaj_input.html">Input Files for Gmaj</a>.
+<dt>	<code><paramfile></code>:
+<dd>	This is the meta-data parameters file that lists the names
+	of all the data files, plus a few related parameters such as
+	display offsets and any intrinsic reference sequence.  For
+	more information about the contents and format of this file,
+	please see <a href="gmaj_input.html">Input Files for Gmaj</a>
+	and <code><a href="sample.gmaj">sample.gmaj</a></code>.
+<dt>	<code><alignfile></code>:
+<dd>	If you don't want to use any annotations or other
+	data-related options, you can specify a single alignment
+	file directly, instead of creating a parameters file.
+	This must be in MAF format; see <a href="gmaj_input.html"
+	>Input Files for Gmaj</a> for more details.
+</dl>
+<p>
+
+<p class=hdr>
+<h3><a name="memory">Memory Allocation</a></h3>
+<p>
+If the dataset you want to view is large, Gmaj may run out of
+memory, in which case Java will report an
+<code>OutOfMemoryError</code>.  This message will appear in the
+command window where you started Gmaj (for stand-alone mode) or
+in the Java Console window of your web browser (for applet mode).
+If the Java Console window does not appear when you run the Gmaj
+applet, open the Java Plug-in Control Panel on your computer and
+click the setting for Show Console so you can see this and other
+Java messages.
+<p>
+The <code>OutOfMemoryError</code> is not uncommon, because the
+default amount of memory that Java allocates is rather small.
+You can give it more memory using the <code>-Xmx</code> switch
+(at least with Sun's Java; this option may not be supported by
+all vendors).  For example, when using stand-alone mode the
+command line
+<pre>
+    [path1]java -Xmx1024m -jar [path2]gmaj.jar
+</pre>
+runs Gmaj with a heap memory allowance of ~1 gigabyte.  For the
+applet, you can do this by opening the Java Plug-in Control Panel
+on your computer and entering <code>-Xmx1024m</code> in the Java
+Runtime Parameters box (this will affect all applets you run via
+the Java Plug-in, not just Gmaj).
+<p>
+
+<p class=hdr>
+<h3><a name="windows">Multi-Pip and Dotplot Windows</a></h3>
+<p>
+Gmaj has two kinds of windows.  The main one displays a number
+of pips (percent identity plots) showing the pairwise alignments,
+projected from the multiple alignments, of a particular reference
+sequence against each of the other sequences.  A pip is similar
+to a dotplot, with the horizontal <code>x</code>-axis
+representing positions in the reference sequence, but the
+vertical <code>y</code>-axis represents the percentage of
+matching nucleotides in each gap-free segment of the pairwise
+alignment, instead of its position in the second sequence.  The
+window you see first when Gmaj opens is usually of this type, and
+if the alignments are reference-independent, you can open more of
+these with other sequences as the reference.
+<p>
+The second type of window focuses exclusively on a particular
+pair of sequences, and displays one pip together with its
+corresponding dotplot representation (similar to Gmaj's
+predecessor, <a href="http://globin.bx.psu.edu/dist/laj/"
+>Laj</a>).  These windows are opened upon request, by clicking
+on a button in the header for a particular pip in the multi-pip
+window.  Conceptually the dotplot windows are like children of
+their parent multi-pip window: they have the same reference
+sequence, and if you close a dotplot only that one window closes,
+but if you close the parent all of its children close too.
+<p>
+For the special case where the alignment files have only two
+sequences, the main window is redundant so Gmaj automatically
+hides it and shows the dotplot window directly.
+<p>
+
+<p class=hdr>
+<h3><a name="state">The Zoom and the Mark</a></h3>
+<p>
+Gmaj has two main elements of user state that reflect the user's
+interactive manipulation of the display.  The first is the zoom
+region, i.e. the portion of the reference and secondary sequences
+that is currently displayed; this is one-dimensional for
+multi-pip windows and two-dimensional for dotplots.  As the zoom
+is changed, the previous regions are remembered in a history
+list, so you can go back and forward through it similar to a web
+browser.  Each Gmaj window has its own separate zoom and history;
+when opening a new window the current zoom region is translated
+initially, but then they are independent.  As a convenience,
+each new window begins with several zoom regions already in the
+history: the fully unzoomed sequence length(s), as specified in
+the MAF files; the aligning portion of the applicable sequence(s);
+and an approximate translation of the previous window's current
+zoom (or in the case of the very first window, the initial zoom
+specified in the command-line or applet parameters, if any).
+The boundaries of the current region are displayed in a status
+indicator in the upper right corner of the window, below the
+menu bar.
+<p>
+The second state element is called the "mark", and it represents
+a particular selected point in a particular pairwise pip+dotplot
+and a particular MAF block.  It is typically selected by clicking
+in a pip, dotplot, or text alignment, and is drawn as a small
+<a href="#red">red</a> circle in the plots, and also as a red
+highlight in the text alignment.  Unlike the zoom regions, the
+mark is shared among several windows: there is at most one mark
+for each reference sequence, and it appears in both the multi-pip
+window for that sequence and the dotplot corresponding to
+whichever pip the mark is currently in (the other dotplots, with
+different secondary sequences, will show some indirect information
+about the mark, but not the mark itself).  Thus, moving the mark
+in a dotplot window will also move it in the parent multi-pip and
+vice-versa, but the mark for a different reference sequence is
+independent.  Information about the current mark and plot block
+is displayed in one of the status indicators below the menu bar.
+<p>
+
+<p class=hdr>
+<h3><a name="layout">Window Layout</a></h3>
+<p>
+Each Gmaj window is divided into several sections.  Across the
+top you will see a menu bar (including text boxes for setting
+display thresholds), and below that two lines containing
+status indicators with information about the position of the
+mouse pointer, the boundaries of the currently displayed zoom
+region, and the location of the mark (<a href="#red">red</a>
+circle), along with buttons for sliding the zoom region and
+selecting alternative blocks at the marked position.  Several
+of the dividers between these items are <a href="#dividers"
+>draggable</a>, so you can adjust the relative space they
+occupy.  Below these is a row of category checkboxes, which by
+default will only appear when there is more than one alignment
+file or if you have used the tagging feature.  The menus,
+threshold boxes, buttons, and checkboxes are discussed
+individually in the <a href="#menu">Menus and Widgets</a>
+section of this document.
+<p>
+<i>Ruler:</i><br>
+The first graphical panel is a horizontal ruler that displays
+tick marks corresponding to positions in the currently selected
+reference sequence.  These are intended to give you an immediate
+general feel for the location and scale of the region being
+displayed.  Precise locations can be determined via the position
+indicator, which displays the exact coordinate of the mouse
+pointer.
+<p>
+<i>Reconstruction scores:</i><br>
+For ancestral reconstruction alignments, the MAF files may
+contain scores indicating the confidence that 1) a particular
+inferred ancestral nucleotide is correct, and that 2) it was
+present at all.  The next two panels display bar graphs of these
+scores when the ancestral sequence is the reference.  The scores
+are binned according to the current zoom region and panel width,
+and the mean score for each bin is graphed on a scale of 0 - 1
+(note that the scores are transformed via simple linear scaling,
+and should not be interpreted as probabilities).  For this to
+work, the parameters file must specify which organism the scores
+apply to.  Otherwise, or if there are no scores in the file, or
+if a different sequence is currently the reference, these panels
+will not appear.  The position indicator displays the horizontal
+coordinate and vertical score position of the mouse pointer,
+along with the score for the bar at that location (if any).
+<p>
+<i>Linkbars:</i><br>
+Next is a panel that can display links to additional information
+about various regions in the current reference sequence.  Each
+annotation is represented by a color-coded bar spanning the
+region's position in the sequence.  (The bars' vertical positions
+are not meaningful; they are only placed in rows for convenience,
+to keep them from overlapping.)  Pointing to a particular bar
+will cause the position indicator to display the <code>x</code>
+coordinate of the pointer, and also the type and description of
+that bar's annotation; otherwise only the <code>x</code>
+coordinate will be shown.  In applet mode, clicking on a bar will
+open a separate browser window to visit the corresponding web
+site.  In stand-alone mode Gmaj is not working within a web
+browser, so instead it displays the URL for you to visit manually
+via copy-and-paste.  If no links file is provided for the current
+sequence, this panel will not appear.
+<p>
+<i>Sequence features:</i><br>
+The next two panels contain schematic diagrams of the known
+exons and interspersed repeats in the current reference sequence,
+respectively (if these files are provided).  Any additional
+features such as CpG islands are included with the repeats (which
+is why the label says "repeats+").  The diagram for repeats uses
+the same symbols as <a href="http://pipmaker.bx.psu.edu/pipmaker/"
+>PipMaker</a> to indicate the various repeat categories (Alu, MIR,
+etc.), but only if either the PipMaker category or the
+<a href="http://www.repeatmasker.org/">RepeatMasker</a> name and
+class/family are available.  For example, BED and GTF repeat
+files from the <a href="http://genome.ucsc.edu/cgi-bin/hgTables"
+>UCSC Table Browser</a> include the RepeatMasker name but not the
+class/family, so Gmaj cannot determine the PipMaker category and
+draws all of them as "Other".  As usual, the position indicator
+displays the <code>x</code> coordinate of the mouse pointer, and
+also identifies any features at that position.
+<p>
+<i>Plots:</i><br>
+The following panels display the alignment plots according to the
+window type: either a scrollable stack of <a href="#windows"
+>pips</a> (for the reference sequence against each of the others)
+or a single pip and its corresponding dotplot.  For pips, only
+the top half of each plot is shown, since segments matching less
+than 50% are usually not very interesting.  Plot segments from
+the primary alignment file are drawn with thin black lines, while
+those from subsequent files are drawn with thicker brown lines.
+Tagged blocks are green, and the marked block is red (or orange
+if it is also tagged), though these may <a href="#red">vary</a>
+on colored backgrounds.  When plot segments overlap, the marked
+and tagged ones are drawn "on top" so they won't be obscured,
+followed by other blocks from the primary alignment file and then
+the remaining files.  Plots that are completely empty (i.e. if
+that pair of sequences never occurs together in any of the
+alignment blocks) will be painted gray.  An additional feature of
+these panels is that colored backgrounds, or "underlays", can be
+used to highlight regions of interest (if files with this
+information are provided); dotplots can display these for both
+the reference and secondary sequences.  Vertical blue bars at the
+edges of the plots represent the boundaries of the current zoom
+region, whose endpoints are displayed in the zoom indicator.  For
+a pip, the position indicator displays the horizontal coordinate
+and vertical percentage position of the mouse pointer, along with
+a list of <a href="#numbering">block numbers</a> <a href="#cover"
+>covering</a> that location.  For a dotplot, it displays the
+horizontal and vertical coordinates in the reference and secondary
+sequences, respectively.  It will also display labels for the
+colored regions in both types of plots, if these are included in
+the underlay files.
+<p>
+<i>Text view:</i><br>
+The bottom panel displays a nucleotide-level view of a single
+selected alignment block: the one containing the mark
+(<a href="#red">red</a> circle).  Initially it is empty, since
+you haven't set the mark yet.  The top row of this display shows
+the current reference sequence, while the rows for the other
+sequences show a dot (<code>.</code>) wherever they match the
+reference sequence, and only explicitly list the nucleotides that
+don't match.  (This matching is case-insensitive to deal with
+soft masking, but non-nucleotide characters such as
+<code>X</code> or <code>N</code> never match anything, even
+themselves.)  All of the sequences will likely have had gaps
+(<code>-</code>) inserted by the alignment program.  Note that
+most of the blocks will be much too long to fit across this
+window, so a scrollbar is provided; the relative size of the
+scrollbar's slider indicates what fraction of the alignment is
+shown in the window.  Colored "highlights" (analogous to the plot
+underlays) can also be specified for each sequence; otherwise
+Gmaj will provide default highlights based on the exons files
+(if any).  Whenever the mouse pointer is in this bottom panel,
+the position indicator displays its location in the format
+<code>n(x)</code>, where <code>n</code> is the column position
+in this aligned block (starting with 0), and <code>x</code> is
+the sequence position in the individual row (i.e., in that entire
+chromosome or contig, starting with 1).  Note that <code>x</code>
+does not include the gaps, but <code>n</code> does.  Labels for
+any highlights at that position are also displayed.
+<p>
+With the exception of the text view, all of these data panels use
+the same horizontal coordinate scale (i.e., position in the
+current reference sequence), and they are always kept vertically
+aligned so they can be compared easily.  Note that in the
+multi-pip window the partition between the graphical panels and
+the text view is <a href="#dividers">draggable</a>, so you can
+adjust the relative amount of space they occupy.  Also, individual
+panels can be hidden if desired, using the Options - Show dialog
+(see <a href="#menu">Menus and Widgets</a>).
+<p>
+<!--
+<i>Dotplot:</i><br>
+The large middle panel displays a dotplot view of the alignments,
+with the reference sequence along the horizontal
+<code>x</code>-axis and the secondary sequence along the vertical
+<code>y</code>-axis.  If the second sequence contains multiple
+contigs, they will appear as separate horizontal bands across the
+plot, each with its own <code>y</code>-axis coordinate system.
+Whenever the mouse pointer is in this panel, the position
+indicator displays its location in the format <code>x,y</code>,
+where <code>x</code> is the position in the horizontal sequence
+and <code>y</code> is the position in the vertical sequence.  If
+there are multiple contigs, then the contig name will be
+displayed as well (actually only the first word is displayed, to
+prevent long names from crowding out the other information).
+<p>
+-->
+
+<p class=hdr>
+<h3><a name="mouse">Mouse Controls</a></h3>
+<p>
+As discussed in more detail <a href="#layout">above</a>,
+pointing with the mouse in the plots or other panels causes the
+position indicator below the menu bar to display information
+about that location and/or data item.
+<p>
+You can select a particular alignment block by clicking on one
+of its segments in any of the plots (pips or dotplots) with the
+left mouse button.  (You don't have to click exactly on it,
+because Gmaj will automatically jump to the nearest point if you
+miss; however proximity is measured in bases, not pixels, which
+can lead to non-intuitive results if the dotplot's zoom scale is
+highly skewed.)  The spot will be marked with a small
+<a href="#red">red</a> circle, and the entire alignment block
+containing the mark will change color from black to
+<a href="#red">red</a> in all of the plots for that reference
+sequence (each block typically spans several gap-free segments).
+Also, the corresponding text view for that block will appear in
+the bottom panel with the marked position highlighted.  Lastly,
+the mark indicator will be filled in with information about the
+marked block and position, and a row of buttons will appear next
+to it showing the <a href="#numbering">block numbers</a>
+<a href="#cover">covering</a> the marked location.  These buttons
+allow convenient selection of a different block at the same
+position in the reference sequence, from the same or a different
+alignment file (see <a href="#menu">Menus and Widgets</a>).
+Note that there is only one mark at a time for each reference
+sequence, so the previous one, if any, will be unmarked.
+<p>
+In a similar fashion, clicking the left mouse button in the
+text view will move the mark (both the highlight and the
+<a href="#red">red</a> circle) to that position.  However, gap
+positions cannot be selected in this manner because they do not
+correspond to plot segments; if you click in a gap, the nearest
+gap-free position is selected instead.  Also, if you click on a
+position in the reference sequence (which has no corresponding
+pip), the mark will move to the new column but will remain in
+the same pip as before.
+<p>
+You can "zoom in" on a particular region by dragging out a
+rectangle with the left mouse button in any of the white panels
+(ruler, annotations, pip, or dotplot).  All of these panels
+will always zoom together, to keep them lined up.  This can be
+repeated until the maximum resolution is reached; after that
+Gmaj will display an error message.  Additional zoom features
+are available via the Zoom menu and arrow buttons (see
+<a href="#menu">Menus and Widgets</a>).  Note that selecting
+a new region will cause any entries in your zoom history that
+are forward of the current point to be discarded, similar to
+a web browser.  If your rectangle is very tiny it will be
+treated as a click instead, to avoid unintended zooming.
+<p>
+Holding down the right mouse button over any of the white
+panels adds crosshairs at the mouse pointer's location, which
+is convenient for determining whether two regions really line
+up.  If you have a one-button mouse, you can achieve the same
+effect by applying the <code>Shift</code> key when initially
+pressing the mouse button.
+<p>
+Note that these controls only work in the active window (usually
+indicated in the operating system by a differently colored title
+bar).  If a window is not the active one, then your first click
+in it just activates the window; you will need to click again
+to set the mark, select a region, open a menu, etc.
+<p>
+
+<p class=hdr>
+<h3><a name="menu">Menus and Widgets</a></h3>
+<p>
+<dl>
+<dt>File - Open:
+<dd>
+Loads a new set of data files into Gmaj, replacing the currently
+displayed data.  A dialog box is presented for you to specify the
+new input file.  (See discussion under <a href="#start"
+>Starting Gmaj</a>, above.)  This menu item does not appear in
+applet mode, because the user is unlikely to know the locations
+of other data files on the server; instead the webmaster should
+set up separate access for each dataset.
+<p>
+<dt>File - Export:
+<dd>
+Opens a dialog box that allows you to save alignment blocks in
+MAF format or as FastA sequence files.  A variety of options are
+available re: which blocks to export, whether they should be
+clipped and/or cleaned up, whether to omit certain sequences,
+etc.  Note that all-gap rows are always skipped, and so are
+blocks that have no rows left.  When exporting in MAF format,
+if the alignment has a fixed, intrinsic reference sequence and
+that row is all gaps, the entire block will be skipped.  When
+exporting FastA sequences, a separate file is created for each
+sequence name (i.e. species or contig), and there is an option
+to restore sequences that align in reverse complement to their
+original orientation (which will also swap the order of the
+endpoint coordinates in the FastA header).  By default export is
+not available in applet mode, because security restrictions make
+it very awkward to save files on the client computer.  However,
+the applet administrator can <a href="gmaj_install.html#page"
+>specify a URL</a> where the output can be sent instead (in this
+case only MAF format is supported).
+<p>
+<dt>File - Close:
+<dd>
+Closes the current window, and if it is a multi-pip window, all
+of its dotplot children are closed as well.  When no windows are
+left, Gmaj will exit.
+<p>
+<dt>File - Exit:
+<dd>
+Exits from Gmaj.  In stand-alone mode, also exits from Java.
+<p>
+<dt>Options:
+<dd>
+This menu controls some of the aesthetic aspects of Gmaj.  You
+can choose between two sizes of fonts, which will also affect
+some other viewability settings, such as the thickness of the
+plot segments, the radius and thickness of the mark circle, the
+blackness of the ruler numbers, and the height of the pips.  You
+can also choose to make the mark circle and the selected block's
+plot segments change color with the background instead of always
+being red (this makes them visible against red underlays, but is
+more complicated to explain in a figure legend and causes
+<a href="gmaj_bugs.html#xor">patchy rendering</a> when used with
+Large Fonts).  Lastly, the Show item opens a dialog where you can
+choose which panels to display or hide, and whether the underlays
+should be painted on dotplots.  The sequence selections here
+control which pips, dotplot windows, and text rows are displayed
+(except where that sequence is the reference).  They can also
+serve to omit sequences from exports if desired; in this case
+they apply even to the reference sequence, but if the alignments
+have a fixed, intrinsic reference it will be grayed out to avoid
+exporting "orphaned" blocks.  The choices on this menu affect all
+of the windows, not just the current one.
+<p>
+<dt>Reference:
+<dd>
+This menu allows you to select a different reference sequence
+(unless the parameters file indicates that the alignments have a
+fixed, intrinsic reference sequence).  A new multi-pip window
+will open, showing the same data from the perspective of the
+sequence you chose.  The mark (if any) will be copied to the new
+window as closely as possible, and the current zoom region will
+be translated to a roughly equivalent one showing the same blocks.
+Thereafter, the windows will operate independently.  The text
+alignments will be rearranged to put the reference row at the top,
+but the rows are always shown in their MAF orientation.  Thus if
+the reference row is on the '-' strand, its coordinates will
+<i>decrease</i> from left to right in the text panel.  You can
+have one multi-pip window for each sequence in the data; if you
+already have one for the newly-chosen reference sequence, it will
+just be brought to the front unchanged.
+<p>
+<dt>Zoom - Back:
+<dd>
+Moves backward in your zoom history for this window, returning
+to previous regions.  Does not affect the mark.
+<p>
+<dt>Zoom - Forward:
+<dd>
+Moves forward in your zoom history for this window.  Does not
+affect the mark.
+<p>
+<dt>Zoom - Unzoom:
+<dd>
+Sets the zoom region for this window to the widest, unzoomed
+view, i.e., the full length of this entire reference sequence
+(and also this secondary sequence, for a dotplot) as specified in
+the MAF files.  Has the same effect as entering the "valid range"
+endpoints in Set Zoom.  Does not affect the mark.
+<p>
+<dt>Zoom - Set Zoom:
+<dd>
+Presents a dialog box that allows you to enter arbitrary zoom
+endpoints (within the valid ranges for the applicable sequences).
+Any left empty will be interpreted to mean "leave unchanged".
+The new region, if different from the current one, is added to
+your zoom history for this window.  Any regions forward of the
+current point in your history are discarded (similar to a web
+browser).  Does not affect the mark.
+<p>
+<dt>Tags - Tag/Untag Block:
+<dd>
+The tagging feature allows you to build an arbitrary subset of
+the alignment blocks for differential viewing or export (see
+<a href="#category">Category Checkboxes</a>).  There is only one
+tagged subset in each invocation of Gmaj, and it pertains to
+all windows.  This menu item toggles the status of the currently
+marked block (the one containing the <a href="#red">red</a>
+circle), tagging it if it's not already in the set, and removing
+the tag if it is.  
+<p>
+<dt>Tags - Clear All Tags:
+<dd>
+Empties the tagged subset by removing the tags from all blocks.
+Also hides the category checkboxes if they are no longer useful.
+<p>
+<dt>Help - About:
+<dd>
+Displays a message window with information about Gmaj, including
+version, author, etc.  Also reports the version of Java you are
+currently using.
+<p>
+<dt>Help - Manual:
+<dd>
+In applet mode, opens a new browser window to view this help
+page.  In stand-alone mode Gmaj is not working within a web
+browser, so instead it displays the URL for you to visit manually
+via copy-and-paste.
+<p>
+<dt>Help - Keys:
+<dd>
+Displays a message window listing Gmaj's keyboard shortcuts.  No
+<code>Alt</code> key is needed.  The shortcuts will not work if
+the keyboard focus is in a text area (threshold boxes, status
+indicators, text alignment, panel headers, etc., as indicated by
+a purple border or highlight); in this case press <code>Esc</code>
+first to cancel any text operation and restore the focus to the
+active window's menu bar.  <code>Esc</code> will also cancel
+dialog and message boxes.
+<p>
+<dt>Help - Sequence Summary:
+<dd>
+Displays the aligning extents for all sequences (i.e., the
+smallest range in each sequence that includes all of its aligning
+regions).  This is useful when fetching annotations from the UCSC
+Table Browser or other databases, or for identifying the relevant
+parts of already-in-hand annotation files so they can be trimmed
+down to size.
+<p>
+<dt>% Identity Box:
+<dd>
+Allows you to set a threshold for filtering the displayed
+alignments by the percent identity of the plot blocks (which are
+pairwise projections of the MAF blocks).  The percent identity
+of each plot block is computed as the length-weighted average
+percent identity of its gap-free segments, with no penalty for
+gaps.  A plot block below the threshold is not drawn or clickable
+in the plots, and the row for its secondary sequence is omitted
+in the text alignment panel; additionally it is excluded from
+the position indicator's block list and from the row of block
+buttons for the mark.  However, these plot blocks are only hidden
+and still exist otherwise (e.g., for export).  Setting the
+threshold will not move the mark, even if the marked position
+becomes hidden.  The same threshold applies across all windows,
+and keyboard shortcuts make it easy to adjust it up and down.
+Also, the percent identity of the current plot block is shown in
+the mark indicator when applicable (the current plot block is
+either the marked one, or if a dotplot has a different secondary
+sequence, the corresponding projection from the same MAF block).
+<p>
+<dt>Underlays Box:
+<dd>
+Allows you to set a threshold for filtering the displayed
+underlays and highlights based on the optional score values you
+have assigned in the annotation files.  The GFF, GTF, and BED
+formats already include a score field, and the PipMaker-style
+underlay format has been extended to include one as well (see
+<a href="gmaj_input.html">Input Files for Gmaj</a>).  Some of
+these formats allow floating-point score values, but they will
+be rounded off to integers for comparison with the threshold.
+Missing scores are treated as the maximum possible value, so
+they will never be filtered out; however note that <code>0</code>
+(which is sometimes used to mean "no score") will not be changed,
+since Gmaj cannot distinguish this from a score that is really
+zero.  As with the % Identity box, the same threshold applies
+across all windows, and keyboard shortcuts make it easy to adjust
+it up and down.  Also, pointing to a particular underlay or
+highlight will show that annotation's score in the position
+indicator.
+<p>
+<dt>Arrow Buttons:
+<dd>
+These buttons are located to the right of the zoom indicator.
+Clicking on one of them will move the zoom region in the
+indicated direction by half of its width or height.  The new
+region is added to your zoom history like any other zoom
+selection, so the Zoom - Back command will return to where you
+were as usual.
+<p>
+<dt>Block Buttons:
+<dd>
+When a mark is set (e.g. by clicking in a pip or dotplot), a row
+of buttons will appear to the right of the mark indicator showing
+the <a href="#numbering">block numbers</a> <a href="#cover"
+>covering</a> the marked position in the pip.  (If there is not
+enough room for all of the buttons, a scrollbar will appear;
+also the partition between this panel and the mark indicator is
+<a href="#dividers">draggable</a>.)  Clicking on one of the
+buttons causes the mark to move to that block (in the same pip),
+and the segment colors, text alignment, and mark indicator will
+be updated accordingly in all applicable windows.  The new marked
+position (<a href="#red">red</a> circle) will be as close as
+possible to the same coordinate in the reference sequence, but
+it may have to move slightly to avoid gaps.  This makes it
+theoretically possible, though rare, that the resulting block
+list (and therefore the row of buttons) may change.
+<p>
+<a name="category"></a>
+<dt>Category Checkboxes:
+<dd>
+These controls are located in a separate panel below the mark
+indicator, and allow you to show or hide several groups of
+alignment blocks en masse.  There is one checkbox for each of
+the alignment files in the input, and an extra one for the tagged
+blocks (whose label shows how many blocks are tagged); the colors
+of the labels correspond to the plot segments they control.  The
+tagged blocks are considered to be withdrawn from their files
+for this purpose, so all of the categories are disjoint.
+These settings apply across all windows, and as with the
+% identity threshold, hidden blocks are omitted from the plots
+and certain lists, but still exist otherwise.
+<!--
+in Gmaj's parlance they define the term <i>visible block</i>:
+a block is "visible" if it is not hidden by these checkboxes,
+even if it is not actually showing on the screen for some other
+reason (e.g. not in zoom region, below % identity threshold,
+in bottom half of pip, etc.).
+-->
+By default this panel only appears when it is
+relevant (i.e. if there is more than one alignment file, or you
+have used the tagging feature), but you can also show or hide it
+temporarily from the Options - Show dialog.
+<p>
+<dt>Dotplot Buttons:
+<dd>
+These buttons are located to the right of each pip's sequence
+label in the multi-pip window.  Clicking on one of them will
+open a dotplot window for that pair of reference and secondary
+sequences.  The zoom region will initially be translated from the
+current one to show the same blocks, and will thereafter operate
+independently.  The mark, however, is shared between the
+multi-pip window and all of its dotplots (see <a href="#state"
+>The Zoom and the Mark</a>).  If you already have a window for
+that dotplot, it will just be brought to the front unchanged.
+<p>
+<a name="dividers"></a>
+<dt>Draggable Dividers:
+<dd>
+Several of the panel dividers can be moved by dragging them with
+the mouse to adjust the amount of space allocated to the items
+on each side.  These include the vertical bars separating the
+left and right sides of the status indicator panels, and the
+horizontal bar separating the pips from the text alignment in a
+multi-pip window.  On most platforms Java draws these dividers
+with a pattern of little bumps to suggest a grip.  In Gmaj
+they also have a sticky feature that remembers if you moved
+them manually and keeps them at that position (until they are
+rebuilt due to a font change, etc.).  In sticky mode the divider
+appears pushed in, like a button; if you want to return to the
+default floating mode (where the divider is repositioned
+automatically as the panel content changes), just click on the
+pushed-in divider to release it.
+</dl>
+<p>
+
+<p class=hdr>
+<h3><a name="copy">Copying and Printing</a></h3>
+<p>
+Gmaj supports copy/paste via the system clipboard from most of
+its text panels and dialog boxes, using mouse selection followed
+by the standard keystrokes
+(<code>Ctrl-C</code>/<code>Ctrl-V</code> on Windows and Linux,
+<code>Cmd-C</code>/<code>Cmd-V</code> on Mac).  Some labels are
+not copyable, but their values generally are.  (Exception: with
+some versions of Java, all dialog text may be uncopyable in
+applet mode due to a <a href="gmaj_bugs.html#dialogcopy">bug</a>.)
+<p>
+In Gmaj's multi-pip and dotplot windows the text alignment, panel
+headers, and status indicators are copyable.  Clicking in any of
+these components (e.g. to sweep out a selection with the mouse)
+will transfer the keyboard focus to that component, as indicated
+by purple lines around it.  This is necessary for the Copy
+keystroke to work; however it means that Gmaj's other keyboard
+shortcuts will be disabled until the focus is restored, either by
+clicking somewhere else or by pressing the <code>Esc</code> key.
+Note that the mouse selection in the text alignment is
+rectangular (unlike the usual line-wrapped stream), and all of
+these components can be scrolled if necessary by dragging the
+mouse just outside their borders.
+<p>
+Gmaj does not currently have its own print capability.  The
+recommended way to record a particular Gmaj view is to use your
+operating system's "screenshot", "print screen", or "grab"
+facility to save an image of the window to a file, then adjust
+it as needed using image-editing software.  (Be careful with
+rescaling and format conversions, as these may degrade the
+image.)
+<p>
+To prevent the position indicator from changing when you move the
+mouse, hold down the <code>Ctrl</code> key.  This is useful both
+for copying the position indicator's contents and for taking
+screenshots.
+<p>
+
+<!-- <hr align=left noshade size=1 width="20%" color=black> -->
+<p class=hdr>
+<h3><a name="notes">Footnotes</a></h3>
+<p>
+<a name="red"></a>
+[1] 
+By default the circular mark and the selected block's plot
+segments are always red (or orange), regardless of the background
+color behind them, and similarly tagged blocks are always green.
+A setting on the Options menu can make these colors vary with
+the background (so they are not invisible against like-colored
+underlays), however this causes <a href="gmaj_bugs.html#xor"
+>patchy rendering</a> when used with Large Fonts.  These special
+blocks are drawn last, so they will not be obscured by ordinary
+ones.
+<p>
+<a name="numbering"></a>
+[2] 
+Blocks in the MAF files are numbered consecutively, starting
+with 0.  MAF files are also numbered starting with 0, in the
+order they are listed in the parameters file.  If there are
+several MAF files, they are catenated into one big list of
+blocks, and the block numbers for the second file continue where
+the first left off.  However, Gmaj also records the relative
+block numbers within each file, and displays this information
+in the mark indicator and certain error messages in the form
+<code>maf#.block#</code>.
+<p>
+<a name="cover"></a>
+[3] 
+An alignment block is considered to cover a plot position if it
+contains rows for both of the plot's sequences and the position
+falls within the endpoints of the <b>reference</b> sequence's
+row (not necessarily the row for the other sequence, as this is
+a pip-oriented computation); there are no "holes" due to gaps.
+In order to appear in the row of block buttons for the mark or in
+the position indicator's block list for pips, a block must also
+be in a visible category (according to the <a href="#category"
+>category checkboxes</a>) and meet the % identity threshold (in
+the applicable plot).
+<p>
+
+<p class=vvlarge>
+<hr>
+<i>Cathy Riemer, June 2008</i>
+
+<p class=scrollspace>
+</body>
+</html>
diff --git a/static/gmaj/docs/gmaj_input.html b/static/gmaj/docs/gmaj_input.html
new file mode 100644
index 0000000..47162c5
--- /dev/null
+++ b/static/gmaj/docs/gmaj_input.html
@@ -0,0 +1,735 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Input Files for Gmaj</title>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<link rel="stylesheet" type="text/css" href="gmaj.css">
+</head>
+<body>
+<p class=vvlarge>
+<h2>Input Files for Gmaj</h2>
+<p class=vvlarge>
+TABLE OF CONTENTS
+<p class=small>
+<ul class=notop>
+<li><a href="#intro">Introduction</a>
+<li><a href="#param">Parameters File</a>
+<li><a href="#zip">Compression and Bundling</a>
+<li><a href="#coord">Coordinate Systems</a>
+<li><a href="#align">Alignments</a>
+<li><a href="#exon">Exons</a>
+<li><a href="#repeat">Repeats</a>
+<li><a href="#link">Linkbars</a>
+<li><a href="#under">Underlays</a>
+<li><a href="#high">Highlights</a>
+<li><a href="#color">Color List</a>
+<li><a href="#generic">Generic Annotation Formats</a>
+</ul>
+<p class=vlarge>
+
+<p class=hdr>
+<h3><a name="intro">Introduction</a></h3>
+<p>
+This page describes the input files supported by Gmaj, and their
+formats.  Only the <a href="#align">alignment file</a> is
+required; the others are optional.  Except where noted, all
+information applies to both the stand-alone and applet modes of
+Gmaj.
+<p>
+For annotations, Gmaj supports two broad categories of file
+formats.  The original set of formats is essentially the same as
+those used by <a href="http://pipmaker.bx.psu.edu/pipmaker/"
+>PipMaker</a> and <a href="http://globin.bx.psu.edu/dist/laj/"
+>Laj</a>, where each destination for the data (exons panel, color
+underlays, etc.) has its own file format tailored for the needs of
+that display.  These files can be cumbersome to prepare manually,
+though PipMaker's associated utilities, such as
+<a href="http://pipmaker.bx.psu.edu/piphelper/">PipHelper</a> and
+the <a href="http://pipmaker.bx.psu.edu/pipmaker/tools.html"
+>PipTools</a>, can significantly reduce the burden.
+<p>
+However, since sequence annotations are increasingly becoming
+available in standardized formats from on-line resources such as
+the <a href="http://genome.ucsc.edu/cgi-bin/hgTables">UCSC Table
+Browser</a>, Gmaj can now accept some of these formats as well.
+These are referred to here as "generic" formats because they are
+not restricted to a particular biological data type or Gmaj
+display panel.
+<p>
+The PipMaker-style formats are described below in the sections for
+each panel, while the generic ones are discussed in a separate
+section, <a href="#generic">Generic Annotation Formats</a>.
+<p class=large>
+<center>
+<table width=55%>
+<tr>
+<td valign=top align=right><img class=lower src="hand14.gif">
+<!-- Pointing hand icon is from Clip Art Warehouse,
+	at http://www.clipart.co.uk/ -->
+</td>
+<td valign=top>
+<ul class="notop nobottom lessindent">
+<li>	<b>All files must consist solely of plain text ASCII
+	characters.</b>  (For example, no Word documents.)
+	<p class=small>
+<li>	<b>All <a href="#coord">coordinates</a> for PipMaker-style
+	annotations are 1-based, closed interval.</b>  Those
+	for generic annotations may be either 1-based or 0-based
+	and closed or half-open, depending on the format.
+</ul>
+</td>
+</tr>
+</table>
+</center>
+<p>
+
+<p class=hdr>
+<h3><a name="param">Parameters File</a></h3>
+<p>
+The annotation files are optional, but because in some alignments
+any of the sequences can be viewed as the reference sequence,
+there are potentially a large number of annotation files to
+provide, too many to type their names on the command line or
+paste them into a dialog box every time you want to view the data.
+For this reason, Gmaj uses a meta-level <b>parameters file</b>
+that lists the names of all the data files, plus a few other
+data-related options.  Then when running Gmaj, you only have to
+specify that one file name.  However, if you don't want to use
+any of these annotations or options, you can specify a single
+<a href="#align">alignment file</a> directly in place of a
+parameters file.
+<p>
+A sample parameters file that you can use as a template is
+provided at <code><a href="sample.gmaj">sample.gmaj</a></code>.
+It contains detailed comments at the bottom explaining the syntax
+and meaning of the parameters.
+<p>
+
+<p class=hdr>
+<h3><a name="zip">Compression and Bundling</a></h3>
+<p>
+Gmaj supports a "bundle" option, which allows you to collect and
+compress some or all of the data files into a single file in
+<code>.zip</code> or <code>.jar</code> format (not
+<code>.tar</code>, sorry).  This is especially useful for
+streamlining the applet's data download, but is also supported in
+stand-alone mode.  A few tips:
+<ul>
+<li>	If the <a href="#param">parameters file</a> is included in
+	the bundle it must be the first file in it, since Gmaj reads
+	the bundle sequentially and needs the parameters file to
+	process the others.  In this case, there is no need to
+	mention the parameters file on the command line or in the
+	applet tags; just specify the bundle.  But if the parameters
+	file is not in the bundle, specify both.
+	<p class=small>
+<li>	Data files in the bundle should be referred to within the
+	parameters file using their plain filenames, without paths,
+	and these must be unique.  Any data files outside the bundle
+	should be referred to normally, using the rules described in
+	<code><a href="sample.gmaj">sample.gmaj</a></code>.
+	<p class=small>
+<li>	Do not use filenames containing <code>/</code>,
+	<code>\</code>, or <code>:</code> in the bundle.  Gmaj
+	needs to remove the path that may have been added to each
+	name by the zip or jar program, and since it doesn't know
+	what platform that program was run on, it treats all of
+	these characters as path separators.
+	<p class=small>
+<li>	If you are not using a parameters file (i.e., you want to
+	specify the <a href="#align">alignment file</a> directly,
+	without any annotations or other data-related options),
+	then the alignment file must be listed in place of the
+	parameters file, not as a bundle (there's nothing else
+	to bundle with it anyway).
+</ul>
+<p>
+As an alternative to bundling, data files can be compressed
+individually in <code>.zip</code>, <code>.jar</code>, or
+<code>.gz</code> format; this gains the compact size for storage
+and transfer, but still requires overhead for multiple HTTP
+connections in applet mode.  The file name must end with the
+corresponding extension for the compression format to be
+recognized.  (Such files can also be included in the bundle
+if desired; though little if any additional compression is
+typically achieved, this may be more convenient than unzipping
+a large file just to bundle it.)
+<p>
+
+<p class=hdr>
+<h3><a name="coord">Coordinate Systems</a></h3>
+<p>
+If you supply any annotations for Gmaj to display, these files
+must all use position coordinates that refer to the same original
+sequences identified in the MAF <a href="#align">alignment files</a>
+(ignoring any display offsets specified in the <a href="#param"
+>parameters file</a>).  However, even though the MAF coordinates
+are 0-based, the PipMaker-style annotation files all use a
+1-based, closed-interval coordinate system (i.e., the first
+nucleotide in the sequence is called "1", and specified ranges
+include both endpoints).  This is for consistency with PipMaker,
+so the same files can be used with both programs, and the same
+tools can be used to prepare them.  Coordinates for generic
+annotations may be either 1-based or 0-based and closed or
+half-open, depending on the format, but Gmaj always adjusts
+them as needed (including the ones in the MAF files) to convert
+everything to a 1-based, closed-interval system for display.
+<p>
+
+<p class=hdr>
+<h3><a name="align">Alignments</a></h3>
+<p>
+Gmaj is designed to display multiple-sequence alignments in
+<a href="http://genome.ucsc.edu/FAQ/FAQformat">MAF</a> format.
+It is especially suited for sequence-symmetric alignments from
+programs such as <a href="http://www.bx.psu.edu/miller_lab/"
+>TBA</a>, but can also display MAF files that have a fixed
+reference sequence.  (In the latter case it is a good idea to
+set the <code>refseq</code> field in your <a href="#param"
+>parameters file</a>, to prevent displaying the alignments with
+an inappropriate reference sequence.)  It is possible to display
+several alignment files simultaneously on the same plots, e.g.
+for comparing output from different alignment programs.
+<p>
+Gmaj normally requires that each sequence name appears at most
+once in each MAF block, i.e., that the values of the "src" field
+are unique across all of the <code>s</code> lines within the
+same block.  However, there is a special exception for the case
+of pairwise self-alignments: if all of the blocks have just two
+rows, then all of the sequence names can be the same.  In this
+case Gmaj distinguishes the rows in each block by internally
+adding a <code>~</code> suffix to the second row's sequence name;
+the <code>~</code> does not show in the main display, but you may
+occasionally see it in an error message.
+<p>
+The downside of this feature is that <b>sequence names in the MAF
+files must not end with <code>~</code></b>, even for non-self
+alignments.
+<p>
+
+<p class=hdr>
+<h3><a name="exon">Exons</a></h3>
+<p>
+Each of these files lists the locations of genes, exons, and
+coding regions in a particular reference sequence.  The exons
+and UTRs are displayed as black and gray boxes in a separate
+panel above the alignment plots.
+<p>
+In the PipMaker-style exons format, the directionality of a gene
+(<code>></code>, <code><</code>, or <code>|</code>), its
+start and end positions, and name should be on one line, followed
+by an optional line beginning with a <code>+</code> character that
+indicates the first and last nucleotides of the translated region
+(including the initiation codon, <i>Met</i>, and the stop codon).
+These are followed by lines specifying the start and end positions
+of each exon, which must be listed in order of increasing address
+even if the gene is on the reverse strand (<code><</code>).  By
+default Gmaj will supply exon numbers, but you can override this
+by specifying your own name or number for individual exons.  Blank
+lines are ignored, and you can put an optional title line at the
+top.  Thus, the file might begin as follows:
+<pre>
+     My favorite genomic region
+
+     < 100 800 XYZZY
+     + 150 750
+     100 200
+     600 800
+
+     > 1000 2000 Frobozz gene
+     1000 1200 exon 1
+     1400 1500 alt. spliced exon
+     1800 2000 exon 2
+
+     ... etc.
+</pre>
+<p>
+
+<p class=hdr>
+<h3><a name="repeat">Repeats</a></h3>
+<p>
+Each of these files lists interspersed repeats (and possibly other
+features such as CpG islands) in a particular reference sequence.
+These are displayed in a separate panel just below the exons,
+using the same shapes and shading as PipMaker if possible.
+<p>
+In the PipMaker-style repeats format, the first line identifies
+this as a simplified repeats file (as opposed to
+<a href="http://www.repeatmasker.org/">RepeatMasker</a> output,
+which Gmaj does not yet support).  Each subsequent line specifies
+the start, end, direction, and type of an individual feature.
+<pre>
+     %:repeats
+
+     1081 1364 Right Alu
+     1365 1405 Simple
+     ... etc.
+</pre>
+The allowed PipMaker types are:
+<code>Alu</code>, <code>B1</code>, <code>B2</code>,
+<code>SINE</code>, <code>LINE1</code>, <code>LINE2</code>,
+<code>MIR</code>, <code>LTR</code>, <code>DNA</code>,
+<code>RNA</code>, <code>Simple</code>, <code>CpG60</code>,
+<code>CpG75</code>, and <code>Other</code>.  Of these, all except
+<code>Simple</code>, <code>CpG60</code>, and <code>CpG75</code>
+require a direction (<code>Right</code> or <code>Left</code>).
+<p>
+
+<p class=hdr>
+<h3><a name="link">Linkbars</a></h3>
+<p>
+Each of these files contains reference annotations, i.e.,
+noteworthy regions in a particular reference sequence, which are
+drawn in a separate panel as colored bars.  Typically each bar
+has an associated URL pointing to a web site with more information
+about the region, but this is not required.  In applet mode Gmaj
+opens a new browser window to visit the linked site when the user
+clicks on a bar; in stand-alone mode Gmaj is not running within
+a web browser, so it just displays the URL for the user to visit
+manually via copy-and-paste.
+<p>
+The PipMaker-style format first defines various types of links
+and associates a color with each of them, then specifies the type,
+position, description, and URL for each annotated region.
+<pre>
+     # linkbars for part of the mouse MHC class II region
+
+     %define type
+     %name PubMed
+     %color Blue
+
+     %define type
+     %name LocusLink
+     %color Orange
+
+     %define annotation
+     %type PubMed
+     %range 1 2000
+     %label Yang et al. 1997.  Daxx, a novel Fas-binding protein...
+     %summary Yang, X., Khosravi-Far, R. Chang, H., and Baltimore, D. (1997).
+       Daxx, a novel Fas-binding protein that activates JNK and apoptosis.
+       Cell 89(7):1067-76.
+     %url http://www.ncbi.nlm.nih.gov:80/entrez/
+     query.fcgi?cmd=Retrieve&db=PubMed&list_uids=9215629&dopt=Abstract
+
+     ... etc.
+</pre>
+Here, for example, the first stanza requests that each feature
+subsequently identified as a PubMed entry be colored blue.
+The name must be a single word, perhaps containing underline
+characters (e.g., <code>Entry_in_GenBank</code>), and the color
+must come from Gmaj's <a href="#color">color list</a>.
+<p>
+The third stanza associates a PubMed link with positions
+1-2000 in this sequence.  The label should be kept fairly
+short, as it will be displayed on Gmaj's position indicator line
+when the user points at this linkbar.  The summary is optional;
+it is used only by PipMaker and will be ignored by Gmaj.  Also,
+while PipMaker allows several summary/URL pairs within a single
+annotation, Gmaj expects each field to occur at most once.  If
+Gmaj encounters extra URLs, it will just use the first one and
+display a warning message.
+<p>
+Note that summaries and URLs (but not labels) can be broken into
+several lines for convenience; the line breaks are removed when
+the file is read, but they are not replaced with spaces.  Thus
+a continuation line for a summary typically begins with a space
+to separate it from the last word of the previous line, while
+a URL continuation does not.
+<p>
+Also note that stanzas should be separated by blank lines, and
+lines beginning with a <code>#</code> character are comments
+that will be ignored.  The linkbars can appear in the file in
+any order, and several can overlap at the same position with no
+problem, since Gmaj will display them in multiple rows if
+necessary.  In PipMaker this format is called "annotations with
+hyperlinks".
+<p>
+
+<p class=hdr>
+<h3><a name="under">Underlays</a></h3>
+<p>
+Each of these files specifies underlays (colored bands) to be
+painted on a particular pairwise pip and its corresponding
+dotplot.  The bands are specified as regions in the reference
+sequence and are normally drawn vertically; however for a dotplot,
+Gmaj will also look to see if you have specified an underlay file
+for the transposed situation where the reference and secondary
+sequences are swapped, and if so, will draw those underlays as
+horizontal bands in the secondary sequence.
+<p>
+The PipMaker-style underlay format supported by Gmaj looks like
+this:
+<pre>
+     # partial underlays for the BTK region
+
+     LightYellow Gene
+     Green Exon
+     Red Strongly_conserved
+
+     35324 72009 (BTK gene) Gene
+     49781 49849 (exon 4) Exon
+     51403 51484 Exon
+     50350 50513 (conserved 84%) Strongly_conserved 84
+     52376 52603 (Kilroy was here) Strongly_conserved 92 +
+     ... etc.
+</pre>
+The first group of lines describes the intended meaning of the
+colors, while the second group specifies the location of each band.
+Colors must come from Gmaj's <a href="#color">color list</a>, but
+the meaning of each color can be any single word chosen by you.
+The text in parentheses is an optional label which will be
+displayed on Gmaj's position indicator line when the user points
+the mouse at that band.  The parentheses must be present if the
+label is, and the label itself cannot contain any additional
+parentheses.  The number following the color category is an
+optional integer score that can be used to interactively adjust
+which underlays are displayed; see "Underlays Box" in the
+Menus and Widgets section of <a href="gmaj_help.html"
+>Starting and Running Gmaj</a> for more information.  (The
+label and score are extra features not supported by PipMaker.)
+A <code>+</code> or <code>-</code> character at the end of a
+location line will paint just the upper or lower half of the band
+on the pip (but is ignored for dotplots).  This allows you to
+differentiate between the two strands, or to plot potentially
+overlapping features like gene predictions and database matches.
+<p>
+Note that if two bands overlap, the one that was specified last
+in the file appears "on top" and obscures the earlier one (except
+for the special <code><a href="#hatch">Hatch</a></code> color).
+Thus in this example, the green exons and red strongly conserved
+regions cover up parts of the long yellow band representing the
+gene.  As in the links file, lines beginning with a <code>#</code>
+character are comments that will be ignored.
+<p>
+
+<p class=hdr>
+<h3><a name="high">Highlights</a></h3>
+<p>
+Highlight files are analogous to the <a href="#under">underlay</a>
+files, but each of these specifies colored regions for a
+particular sequence in the text view, rather than for a plot.
+If you do not specify a highlight file for a particular sequence,
+Gmaj will automatically provide default highlights based on the
+<a href="#exon">exons</a> file (if you provided one).  These will
+use one color for whole genes, overlaid with different colors to
+indicate exons on the forward vs. reverse strand.  If the exons
+file specifies a gene's translated region, then the 5´ and
+3´ UTRs will be shaded using lighter colors.  These default
+highlights make it easy to examine the putative start/stop codons
+and splice junctions, as well as providing a visual connection
+between the graphical and text views.  But if for some reason you
+do not want any text highlights, you can suppress them by
+specifying an empty highlight file.
+<p>
+The PipMaker-style format for highlights is the same as for
+underlays, except that any <code>+</code> or <code>-</code>
+indicators will be ignored, and the <code>Hatch</code> color is
+not supported for highlights.  Just as with underlays, labels
+can be included which will be shown when the user points at
+the highlight, scores can be used to limit which entries are
+displayed, and highlights that are listed later in the file will
+cover up those that appear earlier.
+<p>
+
+<p class=hdr>
+<h3><a name="color">Color List</a></h3>
+<p>
+For Gmaj's PipMaker-style annotations, the available colors are:
+<pre>
+    Black   White        Clear
+    Gray    LightGray    DarkGray
+    Red     LightRed     DarkRed
+    Green   LightGreen   DarkGreen
+    Blue    LightBlue    DarkBlue
+    Yellow  LightYellow  DarkYellow
+    Pink    LightPink    DarkPink
+    Cyan    LightCyan    DarkCyan
+    Purple  LightPurple  DarkPurple
+    Orange  LightOrange  DarkOrange
+    Brown   LightBrown   DarkBrown
+</pre>
+These names are case-sensitive (i.e., capitalization matters).
+Not all of these are supported by PipMaker.  Also, be aware that
+the appearance of the colors may vary between PipMaker and Gmaj,
+and from one printer or monitor to the next.
+<p class=subhdr>
+<a name="hatch"><b><code>Hatch</code></b></a>
+<p>
+In addition to the regular colors listed above, Gmaj supports a
+special "color" for underlays called <code>Hatch</code>, which
+is drawn as a pattern of diagonal gray lines.  Normally if two
+underlays overlap, the one that was specified last in the file
+appears "on top" and obscures the earlier one.  However,
+<code>Hatch</code> underlays have the special property that they
+are always drawn after the other colors, and since the space
+between the diagonal lines is transparent, they allow the other
+colors to show through.  Currently <code>Hatch</code> is only
+supported for underlays, not for highlights or linkbars.
+<p>
+
+<p class=hdr>
+<h3><a name="generic">Generic Annotation Formats</a></h3>
+<p>
+The standardized generic formats currently supported by Gmaj
+include
+<a href="http://www.sanger.ac.uk/Software/formats/GFF/GFF_Spec.shtml"
+>GFF</a> (v1 & v2),
+<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#GTF"
+>GTF</a>, and various flavors of
+<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#BED"
+>BED</a> (including the full BED12 format, a.k.a. "gene BED").
+For details on these formats, please see the specifications at
+the above links; this document will mainly discuss their use
+by Gmaj.
+<p>
+These formats are all <b>tab-separated</b>, and despite their
+differences are similar enough that Gmaj can extract comparable
+fields and treat them more or less the same.  Note that Gmaj is
+not intended as a format validator: parsing is more lenient in
+some respects than the official format specifications, and Gmaj
+will ignore fields it has no use for.  Also, interpretation of
+these open-ended formats depends partly on what type of annotation
+is expected; e.g. if Gmaj is trying to read exons from a GFF v1
+file, it will assume that the group field is the gene name.  It
+will generally show warning messages to keep the user apprised
+of any such assumptions it is making (if these become too annoying
+they can be individually suppressed in the <a href="#param"
+>parameters file</a>; see <code><a href="sample.gmaj"
+>sample.gmaj</a></code> for details).  Because one of the main
+reasons for supporting these formats is to enable the use of
+annotation files obtained from public sources, Gmaj tries not to
+balk at anomalies that are probably not the user's fault, and
+when practical will simply skip questionable items with a warning
+message.  Each type of message will generally be displayed only
+once, and not repeated for every item with the same problem.
+<p>
+<p class=subhdr>
+<a name="fileext"><b>Filename Extensions</b></a>
+<p>
+In order to distinguish generic files from PipMaker-style ones
+and handle them appropriately, Gmaj requires that files in
+generic formats have names ending with any of certain extensions.
+The default list is <code>.gff</code>, <code>.gtf</code>,
+<code>.bed</code>, <code>.ct</code>, and <code>.trk</code>, but
+this can be customized (see <code><a href="sample.gmaj"
+>sample.gmaj</a></code>).
+<p>
+<p class=subhdr>
+<a name="quote"><b>Quoting</b></a>
+<p>
+Some of the generic formats require text values to be enclosed
+in double quotes (<code>" "</code>).  Even when not strictly
+required it is usually a good idea to do so, especially if the
+value contains spaces.  The official specifications generally
+don't say what to do if a value contains embedded quote
+characters, but Gmaj supports a rudimentary mechanism for
+escaping them with a backslash (<code>\</code>).  However it
+does not provide for escaping the backslash: quoted values
+should not end with <code>\</code> (insert a space before the
+final quote if necessary).
+<p>
+<p class=subhdr>
+<a name="empty"><b>Empty Fields</b></a>
+<p>
+When reading the generic formats, Gmaj treats two adjacent tab
+characters as an empty field.  However, your files will be easier
+for humans to read if you do not leave fields completely empty.
+Gmaj recognizes a value of <code>.</code> (the dot character)
+to mean "unspecified" for fields such as strand, score, feature,
+and color, in some cases even when the official formats don't.
+For instance, GFF v2 explicitly calls for using <code>.</code>
+when there is no score, but Gmaj allows you to do this with the
+other generic formats as well, in order to distinguish between
+"no score" and a score that is truly zero.  For colors, in
+addition to <code>.</code> Gmaj also interprets <code>0</code>
+to mean "unspecified", in keeping with examples at UCSC.
+<p>
+<p class=subhdr>
+<a name="gencoord"><b>Coordinates</b></a>
+<p>
+The GFF and GTF formats use 1-based, closed-interval coordinates
+(i.e., sequence numbering starts with "1", and specified ranges
+include both endpoints), while BED uses a 0-based, half-open
+system (the first nucleotide of the sequence is numbered "0",
+and the ending position is not included in the region).  For all
+of these formats, positions are given relative to the beginning
+of the named sequence regardless of which strand the feature is
+on (unlike MAF), and <code>start</code> must be less than or
+equal to <code>end</code>.
+<p>
+<p class=subhdr>
+<a name="gffconv"><b>GFF Conventions</b></a>
+<p>
+BED format is relatively fixed in how its fields are used, but
+GFF and GTF are more variable and require additional conventions
+for most effective use with Gmaj.  In particular, the values of
+the "feature" field and the optional "attributes" affect how Gmaj
+will interpret and display an item.
+<p>
+Values of the feature field that are recognized for special
+treatment include:
+<p class=tiny>
+<ul class="notop nobottom">
+<li>	<code>gene</code> or values starting with <code>gene_</code>
+<li>	<code>exon</code> or values starting with <code>exon_</code>
+<li>	<code>start_codon</code>, <code>str_codon</code>,
+	<code>stop_codon</code>, <code>stp_codon</code>, or
+	<code>cds</code>
+<li>	<code>repeatmasker</code> or any of the 
+	<a href="#repeat">PipMaker repeat or CpG types</a>
+</ul>
+<p class=tiny>
+Of these, only the PipMaker types are case-sensitive.
+<p>
+For GFF v2 and GTF, the currently recognized attribute tags are:
+<p class=tiny>
+<ul class="notop nobottom">
+<li>	<code>gene</code> or <code>gene_id</code>: the name of the
+	gene, e.g. for grouping exons (<code>transcript_id</code> is
+	ignored)
+<li>	<code>name</code>: an optional name for this individual item,
+	e.g. for an exon label
+<li>	<code>sequence</code> (when feature is
+	<code>repeatmasker</code>): the name/class/family of the
+	repeat, e.g. <code>AluJb/SINE/Alu</code>
+<li>	<code>color</code>: a <a href="#gencolor">color</a>
+	specification in UCSC format, e.g. <code>0,0,255</code>
+<li>	<code>url</code> or <code>ucsc_id</code>: the URL for
+	linkbars; <code>$$</code> will be replaced with the value of
+	<code>name</code>
+</ul>
+<p class=tiny>
+These keywords are not case-sensitive, but they cannot have
+multiple values.
+<p>
+<p class=subhdr>
+<a name="custom"><b>Custom Tracks</b></a>
+<p>
+Along with the basic formats listed above, Gmaj also supports UCSC
+<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#CustomTracks"
+>custom track</a> headers.
+<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#TRACK"
+>Track lines</a> can specify certain settings for an entire
+track; currently <code><a href="#gencolor">color</a></code>,
+<code><a href="#gencolor">itemRgb</a></code>, <code>offset</code>,
+and <code>url</code> are supported.  They also allow several
+tracks (even in mixed formats) to be combined in a single file.
+Gmaj does not currently provide a way to use just one particular
+track from such a file (it will be treated as one big bag of
+annotations), but lines in unsupported formats such as
+<a href="http://genome.ucsc.edu/goldenPath/help/wiggle.html"
+>WIG</a> are gracefully skipped.
+<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#lines"
+>Browser lines</a> are also skipped; Gmaj's initial zoom position
+is controlled by command-line or applet parameters rather than by
+individual annotation files.
+<p>
+<p class=subhdr>
+<a name="multiseq"><b>Multiple Sequences</b></a>
+<p>
+Generic files can also contain annotations for several sequences,
+because unlike the PipMaker-style formats, they all have a
+"seqname" or "chrom" field that Gmaj can use to select the
+appropriate lines.  Ideally Gmaj expects this field to match
+the sequence name from the <a href="#align">alignment files</a>,
+but has two ways to deal with exceptions.  If there is only one
+seqname in the annotation file, then Gmaj will go ahead and use
+it, but will display a warning (unless the mismatch can be fixed
+by prepending the organism name, or the organism name plus
+<code>chr</code>, to the annotation seqname).  But if the file
+has annotations for several sequences and some don't match the
+alignment files, you need to tell Gmaj which is which by adding
+an alias in the <a href="#param">parameters file</a> (see
+<code><a href="sample.gmaj">sample.gmaj</a></code>).
+<p>
+<p class=subhdr>
+<a name="reuse"><b>Reusing Files</b></a>
+<p>
+One of the advantages of using generic formats is that files can
+be reused in multiple panels without reformatting, e.g. as both
+exons and underlays.  Normally linkbars, underlays, and text
+highlights are simply handled as arbitrary regions of a specified
+color, since they could represent any type of biological feature.
+However, you can ask Gmaj to interpret them as exons or repeats
+by adding a type hint in the <a href="#param">parameters file</a>
+(see <code><a href="sample.gmaj">sample.gmaj</a></code>).  Note
+that currently this will also cause any <a href="#gencolor"
+>specified colors</a> in that file to be overridden with Gmaj's
+defaults.
+<p>
+Combining several biological types of annotations (e.g. exons
+and repeats) in one file is possible, but not recommended.  Gmaj
+will try to skip lines that are not appropriate for the type it
+is seeking, but it may draw more than you want.
+<p>
+<p class=subhdr>
+<a name="cds"><b>Coding Sequence</b></a>
+<p>
+Currently Gmaj has no special support for multiple transcripts.
+When inferring UTRs, all of the CDS-related items for a single
+gene name are combined, and the interval from the lowest
+coordinate to the highest is used as the CDS.  Also, some of the
+formats' rules specify whether or not the initiation and stop
+codons should be included in the CDS, but Gmaj does not make
+adjustments to compensate for that; instead it simply includes
+all of the given endpoints in the CDS.
+<!-- and leaves it up to the user to interpret the display based
+on the convention used in the files he/she provided.  [the user
+does not supply files for applets] -->
+<p>
+<p class=subhdr>
+<a name="gencolor"><b>Colors</b></a>
+<p>
+Colors can be specified for individual annotation lines via the
+<code>itemRgb</code> field (for BED) or a <code>color</code>
+attribute (for GFF v2 or GTF).  However, for <a href="#custom"
+>custom tracks</a>, these are governed by the track line's
+<code>itemRgb</code> attribute, which defaults to off per the
+UCSC specification.  Thus if you have track lines and want to
+use the per-item colors, you need to include
+<code>itemRgb=On</code> in the track attributes.
+<p>
+Track lines can also have a <code>color</code> attribute for
+the entire track, which will be used if <code>itemRgb</code> is
+off, or if an individual item does not have its own color.
+However in a rare break from the UCSC specification, Gmaj does
+not use black as the default if the track color is unspecified
+(black underlays and highlights just don't work with black plots
+and text).  Instead it uses its own default colors, which for
+genes/exons are the same as the colors for <a href="#high"
+>default highlights</a>, or light gray for other annotations.
+Note that these defaults will also override your colors when
+<a href="#reuse">type hints</a> are used.
+<p>
+All of the above-mentioned color values are specified in UCSC
+format, which consists of three comma-separated RGB values from
+0-255 (e.g. <code>0,0,255</code>).
+<p>
+<p class=subhdr>
+<a name="sort"><b>Sorting</b></a>
+<p>
+The order of the lines is not supposed to matter in these generic
+formats, but for most of the Gmaj panels it does matter:  exons
+need to be grouped by gene and ordered by position so UTRs can be
+inferred and exon numbers assigned, early underlays are covered
+up by later ones, etc.  Gmaj solves this problem by sorting the
+data before it is displayed.  Exons are sorted first by gene name
+in ascending order, and then within each gene by start position
+(ascending) and lastly in case of a tie, by end position
+(descending).  All other annotation types are sorted first by
+length in descending order, and then in case of a tie by start
+position (ascending).  This usually produces a reasonable display,
+but if you need direct control of the order, you can use the
+PipMaker-style formats instead.
+<p>
+
+<p class=vvlarge>
+<hr>
+<i>Cathy Riemer, June 2008</i>
+
+<p class=scrollspace>
+</body>
+</html>
diff --git a/static/gmaj/docs/gmaj_install.html b/static/gmaj/docs/gmaj_install.html
new file mode 100644
index 0000000..b015d34
--- /dev/null
+++ b/static/gmaj/docs/gmaj_install.html
@@ -0,0 +1,187 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Installing Gmaj</title>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<link rel="stylesheet" type="text/css" href="gmaj.css">
+</head>
+<body>
+<p class=vvlarge>
+<h2>Installing Gmaj</h2>
+<p class=vvlarge>
+TABLE OF CONTENTS
+<p class=small>
+<ul class=notop>
+<li><a href="#intro">Introduction</a>
+<li><a href="#install">Installation</a>
+<li><a href="#data">Data Files</a>
+<li><a href="#page">Web Page</a>
+</ul>
+<p class=vlarge>
+
+<h3><a name="intro">Introduction</a></h3>
+<p>
+Gmaj can be run in two different modes: as a stand-alone
+application (for viewing local data files yourself) or as an
+applet over the world-wide web (to display your data on a server
+for viewing by others).  Both forms of the program are
+distributed together, so the initial download and unpacking
+instructions are the same.  Setting it up as an applet, however,
+requires some additional steps: ensuring that Gmaj and the data
+files you want to display are accessible to your web server, and
+building a web page to run the applet.
+<p>
+
+<h3><a name="install">Installation</a></h3>
+<p>
+Gmaj is available for download as a compressed zip archive,
+<code><a href="http://globin.bx.psu.edu/dist/gmaj/gmaj.zip"
+>gmaj.zip</a></code>.  This was created with the Java jar tool,
+but the format is compatible with PKUnzip and many other unzip
+programs.  Unzipping the archive will produce
+<code>gmaj.jar</code> (a jar file containing the program itself)
+and a <code>docs</code> subdirectory containing some
+documentation files in HTML format.  If your unzipper program
+does not preserve the directory structure and complete file
+names from the archive, you may need to move and/or rename the
+documentation files manually in order for the "Help - Manual"
+function to work.  Note that the <code>gmaj.jar</code> file does
+not need a second round of unzipping -- Java will access it
+"as is".  If you are setting up Gmaj as an applet, be sure to
+unzip the archive in a directory/folder that will be accessible
+to your web server, e.g., a new <code>gmaj</code> directory
+somewhere in the server's document space.
+<p>
+If you are running Gmaj in stand-alone mode, you will also need
+to have Java installed on your computer.  For applets, the
+server does not need Java but the end user does; you may want to
+mention this on your Gmaj web page.  In both cases <b>Java 1.3
+or higher</b> is required, and for best compatibility
+<a href="http://java.com/en/download/manual.jsp">Sun's JRE</a>
+(or JDK) is recommended.
+<p>
+For stand-alone mode, that's all there is to the installation;
+you will specify different start-up parameters for Gmaj each time
+you run it (see <a href="gmaj_help.html"
+>Starting and Running Gmaj</a>).  The remaining sections on this
+page apply only to applet setup.
+<p>
+
+<h3><a name="data">Data Files</a></h3>
+<p>
+In addition to the alignment data, Gmaj can display several
+kinds of annotations, including genes/exons, repeats, linkbars,
+color underlays, text highlights, and reconstruction scores for
+ancestral sequences, with a meta-data parameters file to tie
+them all together.  For detailed descriptions of these files and
+their format requirements, please see <a href="gmaj_input.html"
+>Input Files for Gmaj</a>.
+<p>
+When setting up Gmaj as an applet, the data files must be
+accessible to your web server.  Also, due to Java security
+restrictions, they must all be located on the same server as the
+<code>gmaj.jar</code> file, because an applet is normally only
+allowed to contact the same server it was loaded from.  We find
+it convenient to group the files for each invocation (e.g., each
+genomic region) in a separate subdirectory of the
+<code>gmaj</code> directory.  It is also possible to bundle them
+into a single zipped data file for each invocation, which eases
+both storage requirements and download time (discussed further
+in <a href="gmaj_input.html">Input Files for Gmaj</a>).
+<p>
+
+<h3><a name="page">Web Page</a></h3>
+<p>
+The last step in setting up the applet is to create a web page on
+your server that invokes it with the appropriate parameters for
+loading your data files.  The applet normally appears as a labeled
+button that opens a Gmaj window when the user clicks on it; thus
+you can have several buttons on the same page, each set up to
+display a different set of data.  The basic format of the HTML
+code looks like this:
+
+<blockquote>
+<pre>
+<applet code="edu.psu.bx.gmaj.MajApplet.class"
+        archive="gmaj.jar"
+        width="200" height="30">
+<param name=paramfile   value="/java/gmaj/alpha/demo.gmaj">
+<param name=bundle      value="/java/gmaj/alpha/demo.zip">
+<param name=buttonlabel value="Alpha-globin">
+<param name=nobutton    value="false">
+<param name=initzoom    value="mouse 110000 147000">
+<param name=posturl     value="/cgi-bin/save-posted-file.pl">
+<param name=urlpause    value="100">
+<param name=debug       value="false">
+<i>Your browser is not responding to the &lt;applet&gt; tag.</i>
+</applet>
+</pre>
+</blockquote>
+
+This particular fragment is based on the alpha-globin example
+from our server; naturally you need to replace the values with
+your own file URLs, button label, etc.  A few things to note:
+<ul>
+<li>	If the <code>gmaj.jar</code> file is not in the same
+	directory as your web page, you'll need to supply the path
+	to it in the <code>archive</code> attribute.
+<li>	The <code>width</code> and <code>height</code> attributes
+	are for the button, not the Gmaj windows.
+<li>	You can specify either or both of the first two
+	<code><param name=...</code> lines
+	(<code>paramfile</code> and <code>bundle</code>); the
+	others are optional.
+<li>	If the <code>nobutton</code> parameter is set to
+	<code>"true"</code>, Gmaj will proceed to open its window
+	immediately instead of displaying a start button.
+<li>	The <code>initzoom</code> parameter specifies an initial
+	zoom setting to be applied when the window opens.  The user
+	can still invoke the Unzoom or Set Zoom features
+	interactively to see the entire sequence range.  The
+	sequence name must match one of the names from the alignment
+	file(s), and the endpoints must include the offset (if any)
+	for that sequence from the parameters file.  To specify the
+	reference sequence without a zoom region, use <code>-1</code>
+	for both endpoints.
+<li>	The <code>posturl</code> parameter designates a URL on your
+	server where exported alignments should be sent.  By default
+	the Export feature is not available in applet mode, because
+	applets generally can't write to the user's local disk due
+	to security restrictions.  However, by specifying this
+	parameter you can enable the applet to send the exported data
+	to your server instead (typically a CGI script).  The output
+	is sent via an HTTP POST request using the MIME protocol for
+	web forms; currently for applets the export file format is
+	always MAF, and the filename is always
+	<code>Gmaj_output.maf</code>.
+<pre class=smallfont>
+  Content-Type: multipart/form-data; boundary=______AaB03x
+
+  --______AaB03x
+  Content-Disposition: form-data; name=file_data; filename=Gmaj_output.maf
+  Content-Type: application/octet-stream
+
+  [MAF file contents, in plain ASCII with platform-dependent line breaks]
+
+  --______AaB03x--
+</pre>
+<li>	The <code>urlpause</code> parameter specifies how many
+	milliseconds the program should pause before retrieving each
+	file from a URL, in order to avoid overloading your server.
+<li>	If the <code>debug</code> parameter is set to
+	<code>"true"</code>, Gmaj will print a few extra warning
+	messages in the browser's Java console if certain problems
+	occur.  Normally you won't need this, as it is mainly for
+	development purposes.
+<li>	To create several buttons, just repeat this entire block of
+	code (with new parameter values, of course).
+</ul>
+<p>
+
+<p class=vvlarge>
+<hr>
+<i>Cathy Riemer, June 2008</i>
+</body>
+</html>
diff --git a/static/gmaj/docs/gmaj_news.html b/static/gmaj/docs/gmaj_news.html
new file mode 100644
index 0000000..d596467
--- /dev/null
+++ b/static/gmaj/docs/gmaj_news.html
@@ -0,0 +1,525 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>What's New in Gmaj</title>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<link rel="stylesheet" type="text/css" href="gmaj.css">
+</head>
+<body>
+<p class=vvlarge>
+<h2>What's New in Gmaj</h2>
+<p class=vvlarge>
+<dl>
+<dt><b>2008-Jun-30</b><br>
+<dd>	<ul>
+	<li>	Gmaj can display several MAF files simultaneously on the same plots,
+		e.g. for comparing output from different alignment programs.
+	<li>	A new Tags menu allows the user to tag particular blocks for special
+		treatment.  Tagged blocks are conceptually separated from their MAFs
+		and can be displayed, hidden, or exported as a group.
+	<li>	There are many new Export options, including MAF format and exporting
+		only the tagged blocks.  Export is now possible in applet mode, by
+		posting the blocks to an administrator-designated URL (MAF format only).
+		The Export dialog remembers previous selections if you use it repeatedly.
+	<li>	The parameters file has a new option <code>skipotherseq</code>, which
+		causes MAF sequences that aren't mentioned to be ignored (handy for
+		reducing the number of pips and saving memory).
+	<li>	Aliases can be given for each <code>seqname</code> in the parameters
+		file, e.g. if two MAF files use different names for the same sequence.
+	<li>	A new menu item Help - Sequence Summary reports the aligning extents
+		for all sequences (useful when fetching/trimming annotation files).
+	<li>	The long-standing <i>Scrollbar obscures block buttons</i> bug has been
+		fixed, with additional usability enhancements such as remembering
+		manual divider placement (click divider to release).
+	<li>	Minor fixes and improvements:
+		<ul>
+		<li>	The <code>reconseq</code> field in the parameters file has been
+			renamed to <code>reconorg</code> (because it specifies an organism
+			rather than a single sequence); the old <code>reconseq</code>
+			keyword is deprecated but still supported for the time being.
+			The <code>seqfile</code> keyword from pre-MAF days is no longer
+			supported.
+		<li>	Tiny mouse drags in plots are assumed to be accidental and treated
+			as clicks, to avoid the annoying message "Zoom region can't be
+			smaller than ... pixels".
+		<li>	Options - Mark Always Red is now the default, due to the
+			<a href="gmaj_bugs.html#xor">rendering issue with XOR</a>.
+		<li>	The text alignment omits rows below the % identity threshold.
+		<li>	The row of block buttons and the position indicator's block list
+			only include blocks that are in visible categories (according to
+			the new MAF/Tagged checkboxes) and meet the % identity threshold.
+		<li>	When self-alignments include the trivial block, it is typically
+			very slow to load and manipulate in the text panel due to its
+			huge size.  Gmaj requests confirmation before moving the mark to
+			any block that covers the entire aligning extent of the reference
+			sequence, but then remembers if you say OK and only shows the
+			busy cursor thereafter.
+		<li>	The bundle file can contain pre-compressed files.
+		<li>	Several minor bug fixes and performance enhancements.
+<!--
+		<li>	Bugfix: some settings and state values were inappropriately shared
+			among applet invocations.
+		<li>	Bugfix: opening an applet for a pairwise dataset no longer distorts
+			the initial multipip size for other applets.
+		<li>	Bugfix: the text view must not be scrolled if it's not displayed.
+		<li>	Bugfix: race condition when painting "Please wait" dialog is reduced.
+-->
+		<li>	Improved error checking and messages.
+		<li>	Tweaks in wording, punctuation, spacing, dialog title, etc.
+		</ul>
+	</ul>
+<p>
+<dt><b>2008-Jan-08</b><br>
+<dd>	<ul>
+	<li>	Special treatment for pairwise alignments:
+		<ul>
+		<li>	The dotplot is displayed immediately.
+		<li>	Self-alignments are allowed to use the same name for both sequences.
+		</ul>
+	<li>	Filtering for underlays and highlights, based on score:
+		<ul>
+		<li>	Scores are obtained from the annotation files.
+		<li>	A new text box widget (with keyboard shortcuts) allows you to set the
+			display threshold.
+		<li>	Pointing at an underlay/highlight displays its score on the location
+			status line.
+		</ul>
+	<li>	The text alignment, headers, and status lines are now selectable for copying,
+		and for the text alignment the selection is rectangular.
+	<li>	On a Mac, Gmaj uses standard key bindings (<code>Cmd-C</code>,
+		<code>Cmd-V</code>, etc.).
+	<li>	Holding down the <code>Ctrl</code> key (on any platform) prevents the
+		position indicator from changing; this is useful for copying it and for
+		screenshots.
+	<li>	Bugfixes:
+		<ul>
+		<li>	Empty fields in annotation files were not accepted due to improper
+			handling.
+		<li>	When loading a new dataset with File - Open, the old windows were not
+			closed properly.
+		<li>	Descenders of pip header characters were clipped off.
+		</ul>
+	<li>	Minor adjustments to behavior, appearance, and messages, including:
+		<ul>
+		<li>	Sequence names in the MAF cannot end with <code>~</code> (which is used
+			internally for self-alignments).
+		<li>	The first click in an inactive window just activates it.
+		</ul>
+	<li>	Updated and enhanced documentation.
+	</ul>
+<p>
+<dt><b>2007-Jul-26</b><br>
+<dd>	<ul>
+	<li>	Gmaj can now display dotplots in addition to the main multi-pip views,
+		with color underlays for both sequences.
+		<ul>
+		<li>	Dotplots are opened by clicking on buttons in the pip headers.
+		<li>	Each dotplot opens in a separate window, similar to the multi-pip
+			view except it has pip, dotplot, and text panels for two sequences
+			only.
+		<li>	Dotplot windows share the same reference sequence and mark with
+			their parent multi-pip window but have independent zoom, and close
+			automatically when the parent does.
+		<li>	There is only one mark for each reference sequence, so the circle
+			will not appear in dotplots having a different secondary sequence
+			than the mark.
+		</ul>
+	<li>	Gmaj can now read annotation data in
+		<a href="http://www.sanger.ac.uk/Software/formats/GFF/GFF_Spec.shtml"
+		>GFF</a> (v1 & v2),
+		<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#GTF"
+		>GTF</a>, and
+		<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#BED"
+		>BED</a> formats in addition to its previous PipMaker-style formats.
+		<ul>
+		<li>	UCSC
+			<a href="http://genome.ucsc.edu/goldenPath/help/hgTracksHelp.html#CustomTracks"
+			>custom track</a> headers are supported, with multiple tracks and
+			sequence names allowed in a single file.
+		<li>	There is currently no way to use just one particular track from a
+			file, but lines in
+			<a href="http://genome.ucsc.edu/goldenPath/help/wiggle.html"
+			>WIG</a> or other unrecognized formats are skipped.
+		<li>	Files in these generic formats can be reused in multiple panels
+			(e.g. as both exons and underlays).
+		<li>	A new <code>tabext</code> field and file specification modifiers
+			(seqname alias and type hint) in the parameters file facilitate
+			interpretation of these formats.
+		<li>	Colors can be specified using UCSC-style RGB triplets (either
+			per-line or per-track), or Gmaj provides defaults.
+		<li>	Currently there is no special support for multiple transcripts;
+			when inferring UTRs, the largest bounding range of given CDS data
+			is used.
+		</ul>
+	<li>	A new control in the menu bar allows the user to limit the displayed
+		alignments by their average % identity.
+		<ul>
+		<li>	Keyboard shortcuts make it easy to move the threshold up and down.
+		<li>	The value for the current plot block is displayed with the mark
+			information when applicable.
+		</ul>
+	<li>	If the alignment file contains per-nucleotide reconstruction scores
+		for an inferred ancestral sequence, they can be displayed as a pair of
+		bar graphs, binned according to the current zoom region and panel size.
+		<ul>
+		<li>	The scores are transformed via simple linear scaling, and should
+			not be interpreted as probabilities.
+		<li>	A new <code>reconseq</code> field in the parameters file
+			identifies the organism for the scores.
+		</ul>
+	<li>	Miscellaneous:
+		<ul>
+		<li>	"Unsupported MAF version" is now a warning instead of a fatal
+			error.
+		<li>	A new <code>nowarn</code> field in the parameters file suppresses
+			certain warnings/errors using a keyword shown in the message.
+		<li>	When opening a new window, the current zoom is translated to a
+			roughly equivalent one in the new sequence(s).
+		<li>	Sequence choices in the Show dialog affect text rows and dotplots
+			in addition to pips.
+		<li>	The default highlights built from exons now include the genes
+			(light yellow).
+		<li>	The divider between the position indicator and the zoom indicator
+			is draggable.
+		<li>	Plot segments are drawn thicker in Large Fonts mode.
+		<li>	Plots that are completely empty have a gray background.
+		<li>	The Set Zoom dialog accepts a blank box to mean "leave unchanged".
+		<li>	Tabbing is easier in input dialogs: labels are skipped unless the
+			mouse is pointing at them.
+		<li>	Note that "<code>=</code>" is now required for settings in the
+			parameters file (except <code>seq N:</code> lines), and
+			lines with empty values (e.g. unused template items) are allowed
+			and skipped.
+		</ul>
+<!--
+	<li>	Bug fixes:
+		<ul>
+		<li>	Linkbar URLs are supposed to be optional; missing ones are now
+			handled gracefully.
+		<li>	Entries in the parameters file for particular-pip underlays were
+			not recognized properly if the underlay file was bundled.
+		<li>	Attempting to display a reference sequence that had no alignments
+			would cause a Java error.
+		<li>	On some platforms, rebuilding all of the Gmaj windows (e.g. when
+			Options settings were changed) would leave the keyboard focus in
+			the wrong window.
+		</ul>
+-->
+	<li>	Various minor bug fixes, and improvements in appearance, behavior,
+		messages, efficiency, and robustness.
+	<li>	Updated documentation.
+		<ul>
+		<li>	Terminology: the optional colored clickable bars located above
+			the exons panel were formerly called "annotations", "annotation
+			links", or "hyperlink annotations", but are now referred to as
+			"linkbars" so that "annotation" can be used in the general sense.
+		</ul>
+	</ul>
+<p>
+<dt><b>2006-Jun-17</b><br>
+<dd>	<ul>
+	<li>	Gmaj can now be deployed as a web-based applet, as an alternative to
+		the regular stand-alone mode.
+		 <a href="http://globin.bx.psu.edu/java/gmaj/demo.html">Demo</a>
+		<ul>
+		<li>	New parameters <code>bundle</code>, <code>initzoom</code>, and
+			<code>urlpause</code> facilitate applet use, but also work in
+			stand-alone mode.
+		<li>	Clicking on hyperlinked annotations (colored bars) visits the
+			referenced site (or in stand-alone mode, displays the url).
+		<li>	A new "Help - Manual" menu item visits a help page (or in
+			stand-alone mode, displays the file location).
+		<li>	The "File - Export" feature does not work yet for applets.
+		</ul>
+	<li>	Better conformance with the official UCSC specs for MAF format.
+	<li>	A new <code>refseq</code> field in the parameters file can be used
+		to prevent selection of an inappropriate reference sequence when the
+		alignments are not sequence-symmetric (e.g. from <i>multiz</i>).
+	<li>	Maximum zoom bounds are now the stated sequence length from the MAF
+		file (not just the extent of the blocks), but the default initial
+		view zooms to the aligned portion.
+	<li>	Search rules for relative filenames have been simplified, and Windows
+		names starting with "<code>\</code>" are now handled better.
+	<li>	The panel for hyperlinked annotations is now labeled, and it displays
+		the bars listed earliest in the file toward the top.
+	<li>	Colors for underlays and other annotations have been adjusted slightly.
+	<li>	The "Help - About" message now shows your current Java version and
+		vendor.
+	<li>	Improved error reporting.
+	<li>	Message dialogs have better line wrapping, and their contents can
+		be copied to the system clipboard (except in applets, due to a bug
+		in Java).
+	<li>	Several minor bug fixes, tweaks, and internal improvements.
+	<li>	Documentation has been reorganized.
+	</ul>
+<p>
+<dt><b>2006-Feb-08</b><br>
+<dd>	<ul>
+	<li>	New arrow buttons slide the zoom region left or right by half a
+		screenful.
+	<li>	The schematic diagrams for exons and repeats are now displayed in
+		separate, labeled panels.
+	<li>	A new Show dialog on the Options menu allows individual panels
+		to be displayed or hidden.
+	<li>	Users who have a one-button mouse can get crosshairs by using the
+		<code>Shift</code> key.
+	<li>	Minor aesthetic tweaks.
+	</ul>
+<p>
+<dt><b>2005-Nov-20</b><br>
+<dd>	<ul>
+	<li>	New export feature: Gmaj can now save the selected block (or all
+		blocks in the current zoom region) to files in FastA sequence format
+		for further processing by other programs.  Options include clipping
+		the sequences to the current zoom region, and restoring those that
+		align in reverse complement to their original orientation.
+	<li>	The zoom resolution limit has been reduced to 4 bp (or pixels),
+		instead of 10.
+	<li>	When setting the zoom by dragging out a rectangle, Gmaj will display
+		an error message if the rectangle is too small (instead of beeping).
+	</ul>
+<p>
+<dt><b>2005-Sep-30</b><br>
+<dd>	<ul>
+	<li>	Improved handling of <code>X</code>, <code>N</code>, and
+		unrecognized characters:
+		<ul>
+		<li>	Bug fix: the percent identity computation now ignores these
+			characters (in both numerator and denominator).
+		<li>	Bug fix: the text panel never shows dots for these characters.
+		<li>	One warning is issued for each unrecognized character.
+		</ul>
+	<li>	New zoom features:
+		<ul>
+		<li>	Gmaj keeps track of your zoom history for each window, so you can
+			move back and forward through your previous zoom regions.  A fresh
+			selection discards the history from the current point forward,
+			similar to a web browser.
+		<li>	The Unzoom facility does not go back, it just puts the maximum
+			range into the history as a fresh selection.
+		<li>	A new indicator box below the menu bar shows the current zoom
+			endpoints.
+		<li>	A Set Zoom dialog allows you to enter zoom endpoints explicitly.
+		<li>	When setting the zoom by dragging out a rectangle, a rectangle that
+			is too small will no longer be treated as a click; instead Gmaj
+			will beep (if possible).
+		</ul>
+	<li>	Improved display of pip and text panels:
+		<ul>
+		<li>	The divider between the pips and the text panel is now moveable, so
+			you can drag it to adjust the relative space they occupy.
+		<li>	The text panel starts with an initial message to explain why it is
+			empty.
+		<li>	After loading a file with many sequences, the pips will be scrolled
+			to start at the top.
+		<li>	If the current mark can be transferred when selecting a new
+			reference sequence, the pip panel will scroll to it (though the red
+			circle may not be visible if the percent identity at that point is
+			below 50%).
+		<li>	Vertical blue bars at the edges of the pips represent the
+			boundaries of the current zoom region.
+		</ul>
+	<li>	Improved display of block buttons:
+		<ul>
+		<li>	If there are too many block buttons to fit in their panel, a
+			scrollbar will appear so all of them can be accessed.
+		<li>	The divider between the mark indicator and the block buttons is now
+			moveable, so you can drag it to adjust the relative space they
+			occupy (this will reset when a new block is selected, since the
+			number of block buttons will likely be different).
+		</ul>
+	<li>	Miscellaneous:
+		<ul>
+		<li>	The menus have been rearranged slightly, and Settings has been
+			renamed to Options.
+		<li>	The keyboard shortcut for "Next Block" has been changed to
+			"<code>n</code>", so "<code>b</code>" can invoke "Zoom - Back".
+		<li>	The cascade of RefSequence window positions wraps around the screen
+			(in both width and height) so they are never placed completely
+			offscreen.
+		<li>	When opening a new RefSequence window, Gmaj will use the size of
+			the current window in case you have changed it.
+		<li>	Window sizing no longer requires Java 1.4 (accidentally introduced
+			in the 2005-May-16 release).
+		<li>	Bug fix: radio buttons for mark color now take effect immediately
+			in all windows.
+		<li>	Improved behavior of dialog boxes: pressing Enter invokes "Ok",
+			Escape invokes "Cancel", and the cursor begins in the first text
+			box.
+		</ul>
+	</ul>
+<p>
+<dt><b>2005-Sep-13</b><br>
+<dd>	<ul>
+	<li>	<i>IMPORTANT BUG FIX:</i>  Percent identities for the pip plots
+		were being computed in a case-sensitive fashion, which lowered the
+		scores when comparing soft-masked vs. unmasked sequences.  (This bug
+		did not affect the text view, just the pips.  Nor did it affect
+		<a href="http://globin.bx.psu.edu/dist/laj/">Laj</a>, which uses
+		pre-computed percent identity scores from the <code>lav</code> file.)
+		Note that hard masking and ambiguous nucleotides (<code>X</code>,
+		<code>N</code>, <code>W</code>, <code>R</code>, etc.) are still treated
+		literally.
+	</ul>
+<p>
+<dt><b>2005-Aug-26</b><br>
+<dd>	<ul>
+	<li>	Improved support for working with overlapping blocks (e.g. those due to
+		genomic duplications):
+		<ul>
+		<li>	A list of block numbers in the position indicator line shows the
+			blocks covering the mouse pointer's location.
+		<li>	A row of buttons next to the mark indicator line shows the blocks
+			covering the marked location, and allows convenient selection of
+			a different block at the same position.
+		<li>	The pip segments for the currently selected block are painted last,
+			so the red segments are not hidden by other blocks.
+		</ul>
+	<li>	It is no longer required to list every sequence in the parameters file.
+		Any sequences that appear in the alignment but are not mentioned in the
+		parameters file will be displayed in the order they are encountered
+		(filling in around any sequence numbers explicitly assigned in the
+		parameters file).
+	<li>	A new parameter <code>underlays</code> (plain, without a number) can be
+		used to specify a default underlay file for each reference sequence, so
+		it is no longer necessary to list underlays for each pip separately when
+		they are the same.  The old <code>underlays.N</code> parameters are
+		still supported as well, and override these new ones.
+	<li>	The <code>seqfile</code> parameter has been renamed to
+		<code>seqname</code>, in order to reflect MAF semantics (but
+		<code>seqfile</code> is still supported for backward compatibility).
+	<li>	Sequence label widths are now computed dynamically, based on the longest
+		name and the current font.
+	<li>	When the font size is changed, pip heights are also adjusted.
+	<li>	A new option on the Settings menu makes the mark circle and selected
+		block's pip segments always red, instead of changing with the background.
+	<li>	A new color <code>Clear</code> is provided, which is colorless and
+		transparent.  It is intended for gene introns, so the same file can be
+		used for pip underlays (white background) and text highlights (gray
+		background).
+	<li>	New colors <code>Brown</code> and <code>LightBrown</code> have also been
+		added, and <code>LightRed</code> and <code>Green</code> were tweaked.
+	<li>	When a gap in the text view is clicked, the nearest gap-free position
+		is selected (up or down, instead of always down).
+	<li>	Work-arounds for a few display problems in JRE 1.5 have been added.
+	<li>	The RefSeq menu has been renamed to "RefSequence" in order to avoid
+		confusion with the NCBI database.
+	<li>	The jar manifest and About information have been updated.
+	<li>	Support for alignment files in <code>.bb</code> format (from the old
+		<i>blockbuilder</i> program) has been discontinued.  The only alignment
+		format currently supported is MAF.
+	<li>	Due to a new mechanism for registering keyboard shortcuts, Gmaj now
+		requires <b>Java 1.3 or higher</b>.
+	<li>	This version should still be considered <b>beta quality</b>; please
+		report any bugs to
+		 <img align=top alt="image of email address" src="cathy.gmaj.png">.
+	</ul>
+<p>
+<dt><b>2005-May-16</b><br>
+<dd>	<ul>
+	<li>	Gmaj can now handle a wider variety of MAF files, including those
+		containing reverse-complement alignments.
+	<li>	The "proper threading" requirement has been eliminated.
+	<li>	Dynamic memory management for plot and underlay data allows larger files
+		to be viewed.
+	<li>	The pip section is now scrollable when there are many sequences.
+	<li>	Rows in the text alignment are sorted to match the order of pips.
+	<li>	This release should be considered <b>beta quality</b>.  In particular,
+		the numbering for reverse-complement alignments has not been thoroughly
+		tested.
+	</ul>
+<p>
+<dt><b>2004-Mar-12</b><br>
+<dd>	<ul>
+	<li>	A new Settings menu allows the user to choose between two sizes of fonts
+		and associated settings.
+	<li>	Initial window positions are cascaded according to reference sequence.
+	<li>	The usage message has been adjusted to show the full command syntax,
+		instead of our local wrapper script shortcut.
+	<li>	Empty alignment files are handled more gracefully.
+	<li>	Minor aesthetic tweaks.
+	</ul>
+<p>
+<dt><b>2004-Jan-14</b><br>
+<dd>	<ul>
+	<li>	Gmaj normally expects that the alignment data will be properly
+		<i>chained</i>, i.e., that for each sequence it is possible to order
+		the blocks so that every nucleotide from that sequence in the interval
+		<code>[1,length]</code> appears exactly once, and in the same order as
+		the original sequence.  (Note that MAF files are 0-based, so
+		<code>+1</code> is added to all MAF coordinates to convert them to
+		Gmaj's 1-based system.)
+		However, sometimes you may want to use Gmaj to display a subset of such
+		data (e.g., output from the <i>slice_maf</i> program).  Gmaj can now
+		handle input data where the chain starts after position <code>1</code>
+		and/or ends before position <code>length</code>, as long as it is still
+		contiguous.  In this case Gmaj will issue a warning message, but will
+		proceed to use the actual interval it found in your file for threading
+		the blocks, instead of the usual <code>[1,length]</code>.
+	</ul>
+<p>
+<dt><b>2003-Sep-16</b><br>
+<dd>	<ul>
+	<li>	Gmaj can now handle alignment files in the MAF format produced by
+		the <i>TBA</i> program, in addition to the <code>.bb</code> format
+		produced by <i>blockbuilder</i>.
+	<li>	When using a MAF alignment, the semantics of the parameters file
+		are a bit different, and its rules are stricter.  (See
+		<code><a href="sample.gmaj">sample.gmaj</a></code> for details.)
+	<li>	Gmaj still doesn't support reverse complement alignments, or multiple
+		contigs per sequence.
+	</ul>
+<p>
+<dt><b>2003-Jul-14</b><br>
+<dd>	<ul>
+	<li>	The "phantom repeats" phenomenon that was occurring with Java 1.4.x
+		has been greatly reduced, but not completely eliminated.  It appears
+		to be due to a clipping problem involving integer overflow, so that
+		items which should be off-screen are drawn anyway.  This (or a related
+		bug) may also manifest as the disappearance of items for certain zoom
+		windows.  These problems can also occur with items other than repeats,
+		such as underlays and even pip segments, though this is less common.
+		Manual clipping seems to solve the problem, and this release of Gmaj
+		should always display pip segments, underlays, and annotation bars
+		correctly.  Items in the "features" panel (exons, repeats, CpG islands,
+		etc.) are more difficult to clip manually, but a first approximation
+		has helped quite a bit.  It is still possible to experience these
+		problems if you zoom in very closely on a long feature.
+		<p>
+	<li>	If underlay labels are not provided but "kinds" (color meanings) are,
+		then Gmaj will use the kind as a default label, but only for the
+		underlay bands that are on top (i.e., not obscured by another band).
+	</ul>
+<p>
+<dt><b>2003-Apr-10</b><br>
+<dd>	<ul>
+	<li>	Fixed keyboard shortcuts, and failure to exit upon closing last
+		window.
+	<li>	New Help - Keys menu item lists keyboard shortcuts.
+	<li>	Tweak appearance of the Unzoom button.
+	<li>	Minor clarifications in documentation.
+	</ul>
+<p>
+<dt><b>2003-Apr-07</b><br>
+<dd>	<ul>
+	<li>	Fixed several bugs in the text panel, including scrolling
+		behavior and cursor display.
+	<li>	Added a comment in the <code>sample.gmaj</code> file regarding
+		the <code>#:gmaj</code> tag.
+	</ul>
+<p>
+<dt><b>2003-Mar-31</b><br>
+<dd>	<ul>
+	<li>	Preliminary beta-quality release.  Still has a few display
+		and behavior anomalies, and possibly other minor bugs.
+	</ul>
+</dl>
+<p>
+
+<p class=vvlarge>
+<hr>
+<i>Cathy Riemer, June 2008</i>
+</body>
+</html>
diff --git a/static/gmaj/docs/gmaj_readme.html b/static/gmaj/docs/gmaj_readme.html
new file mode 100644
index 0000000..8ac7166
--- /dev/null
+++ b/static/gmaj/docs/gmaj_readme.html
@@ -0,0 +1,85 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+	"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Gmaj: an Interactive Viewer for Multiple Sequence Alignments</title>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<meta http-equiv="Content-Style-Type" content="text/css">
+<link rel="stylesheet" type="text/css" href="gmaj.css">
+</head>
+<body>
+<p class=vvlarge>
+<h2>Gmaj: an Interactive Viewer for Multiple Sequence Alignments</h2>
+<p class=vvlarge>
+Gmaj is a tool designed for viewing and manipulating Generalized
+Multiple Alignments (GMAs) produced by sequence-symmetric alignment
+programs such as <a href="http://www.bx.psu.edu/miller_lab/">TBA</a>
+(though it can also be used with MAF format alignments from other
+sources).  It can display interactive graphical and text
+representations of the alignments, diagrams showing the locations
+of exons and repeats, and other annotations -- all with the user's
+choice of reference sequence.
+<p>
+The program is written in Java in order to provide a graphical
+user interface that is portable across a variety of computer
+platforms; indeed its name stands for "Generalized Multiple
+Alignments with Java".  It requires <b>Java 1.3 or higher</b>,
+and for best compatibility
+<a href="http://java.com/en/download/manual.jsp">Sun's JRE</a>
+(or JDK) is recommended.
+<p>
+Gmaj can be run in two different modes: as a stand-alone
+application (for viewing local data files yourself) or as an
+applet over the world-wide web (to display your data on a server
+for viewing by others).  These modes are mostly similar, but
+have a few minor differences due to the underlying capabilities
+of applets vs. applications in Java.
+<p>
+<blockquote class=notop>
+<a href="http://globin.bx.psu.edu/java/gmaj/demo.html"
+>Applet demo</a>
+</blockquote>
+<p class=large>
+The current Gmaj distribution package is available for download
+as a compressed zip archive,
+<code><a href="http://globin.bx.psu.edu/dist/gmaj/gmaj.zip"
+>gmaj.zip</a></code>.
+<p>
+The following additional documentation files are included:
+<ul>
+<li>	<a href="gmaj_news.html">What's New in Gmaj</a><br>
+	A summary of the changes in each released version of Gmaj.
+	(Note important <b>bug fix</b> on 2005-Sep-13.)
+<p>
+<li>	<a href="gmaj_bugs.html">Known Bugs in Gmaj</a><br>
+	A discussion of the known bugs and anomalies we have
+	observed, but not yet resolved.  Some of these may depend
+	on your computer platform and/or version of Java.
+<p>
+<li>	<a href="gmaj_install.html">Installing Gmaj</a><br>
+	Describes how to unpack and set up Gmaj, for both
+	stand-alone and applet use.
+<p>
+<li>	<a href="gmaj_help.html">Starting and Running Gmaj</a><br>
+	Describes how to start Gmaj and use the GUI, in both
+	stand-alone and applet modes.  This is the file that Gmaj's
+	"Help - Manual" menu item points to.
+<p>
+<li>	<a href="gmaj_input.html">Input Files for Gmaj</a><br>
+	Detailed descriptions of the input files used by Gmaj, and
+	their formats.
+<p>
+<li>	<code><a href="sample.gmaj">sample.gmaj</a></code><br>
+	A sample template for the parameters file that tells Gmaj
+	which data files to load, among other things.  Includes
+	explanatory comments at the bottom.  This file is optional
+	if you don't want to display any annotations or use the
+	other features it provides.
+</ul>
+<p>
+
+<p class=vvlarge>
+<hr>
+<i>Cathy Riemer, June 2008</i>
+</body>
+</html>
diff --git a/static/gmaj/docs/hand14.gif b/static/gmaj/docs/hand14.gif
new file mode 100644
index 0000000..07fc08c
Binary files /dev/null and b/static/gmaj/docs/hand14.gif differ
diff --git a/static/gmaj/docs/sample.gmaj b/static/gmaj/docs/sample.gmaj
new file mode 100644
index 0000000..6ab321c
--- /dev/null
+++ b/static/gmaj/docs/sample.gmaj
@@ -0,0 +1,239 @@
+#:gmaj
+
+#----------------------------------------------------------------
+# This file specifies input parameters for a Gmaj dataset.
+# See below for explanatory comments.
+#----------------------------------------------------------------
+
+title = "My favorite genomic region"
+datapath = /home/cathy/mydata/favreg/
+alignfile = tba.maf mlagan.maf
+refseq = any
+reconorg = none
+tabext = .gff .gtf .bed .ct .trk
+nowarn =  maf_version  repeat_type_missing
+skipotherseq = false
+
+seq 0:
+seqname = human.chr11  hg17.chr11  human
+exons   = human.exons.bed  chr11
+repeats = human.repeats
+links   = human.links
+underlays    = human.exons.bed  chr11  exons
+#underlays.1 = human-mouse.underlays
+#underlays.2 = human-rat.underlays
+highlights   = human.highlights
+offset = 4730995
+
+seq 1:
+seqname = mouse.chr7
+exons   = mouse.exons
+repeats = mouse.repeats
+links   = mouse.links
+underlays    = mouse.underlays
+#underlays.0 = mouse-human.underlays
+#underlays.2 = mouse-rat.underlays
+highlights   = mouse.highlights
+offset = 0
+
+seq 2:
+seqname = rat.chr1
+exons   = rat.exons
+repeats = rat.repeats
+links   = rat.links
+underlays    = rat.underlays
+#underlays.0 = rat-human.underlays
+#underlays.1 = rat-mouse.underlays
+highlights   = rat.highlights
+offset = 0
+
+#----------------------------------------------------------------
+# This file specifies input parameters for Gmaj, including the
+# names of all data files.  You can omit this file and just give
+# Gmaj the name of your alignment file directly, but then you
+# don't get the opportunity to provide annotations, offsets,
+# additional alignment files, or other optional features.
+#
+# Syntax:
+#
+# Each key=value(s) pair must reside on its own single, separate
+# line.  (Note that although the '=' was formerly optional, it
+# is now required.)  Other than that, the format is fairly loose.
+# Even the order of lines is arbitrary, except that "seq N:"
+# defines the current sequence until it is superseded by a new
+# "seq N:" line.  Values containing spaces must be enclosed in
+# double quotes.  Embedded quotes in such strings can be escaped
+# with '\', but there is no way to escape the backslash: quoted
+# values should not end with '\' (insert a space before the
+# final quote if necessary).  Lines with missing values are
+# skipped.  A '#' at the beginning of a line marks a comment
+# that will be ignored, except for the identifier tag "#:gmaj"
+# at the top, which is mandatory.
+#
+# Required Fields:
+#
+# At least one alignfile is required.  You do not have to provide
+# a section for every sequence (by default they will still be
+# displayed), but for each sequence you do mention, the "seq N:"
+# line and the seqname field are also required.  Everything else
+# is optional.
+#
+# File Names and Locations:
+#
+# Filenames can be relative or absolute (fully qualified paths).
+# Gmaj will look for relative names in the following locations:
+#
+#   1. the separately specified "bundle" file (if any)
+#   2. the "datapath" specified here (if any)
+#   3. the same directory as this parameters file
+#
+# If you are using Gmaj's "bundle" feature, you must refer to
+# the files located in the bundle by their plain filenames,
+# without any path.
+#
+# Title:
+#
+# This string will be used as the title for the Gmaj windows.
+# Typically it describes the alignment data, including the name
+# of the locus.  It does not control the applet button's label,
+# however, because the applet has not read this file yet;
+# instead there is a separate applet parameter for that.
+#
+# Reference Sequence:
+#
+# The refseq field identifies the reference sequence used in the
+# alignments.  The default value "any" means that the alignments
+# were generated by a sequence-symmetric program such as TBA, so
+# the user should be allowed to select the reference sequence
+# interactively.  Otherwise, the value must match the appropriate
+# sequence name from the MAF files (including the contig name, if
+# applicable).
+#
+# Reconstructed Sequence:
+#
+# If the alignment files include score rows for an ancestral
+# reconstruction, the reconorg field identifies which organism
+# these scores apply to.  The default value "none" means Gmaj
+# will ignore the scores; otherwise the value must match the
+# species prefix of the appropriate sequence names from the MAF
+# files.  Contig name extensions (e.g. ".chrX") are omitted, as
+# the scores can apply to any contig for that organism.  A score
+# can be supplied only once for each base in the ancestral
+# genome.
+#
+# Tabular File Extensions:
+# 
+# The tabext field specifies which filename extensions should
+# be treated as generic, tab-delimited formats (GFF/GTF/BED)
+# instead of the old PipMaker-style formats.  The default list
+# is ".gff .gtf .bed .ct .trk".  Note that it doesn't actually
+# matter which of these is used for a particular file, just
+# whether it is in the list.
+#
+# Warning Suppression:
+#
+# The nowarn field lists keywords for particular warning
+# messages that should not be displayed.  This is especially
+# useful for applets, when the administrator has seen the
+# warning, checked the data, and determined that everything
+# is OK and the end user does not need to see the warning.
+# The keyword for each suppressible message is displayed at
+# the bottom of the message.
+#
+# Ignoring Sequences:
+#
+# The skipotherseq field specifies whether sequences that appear
+# in the MAF files but are not mentioned here should be ignored.
+# If so, these rows are simply skipped; no adjustments are made
+# to remove all-gap columns or join adjacent blocks, and empty
+# blocks are kept to preserve the MAF files' block numbering.
+# This feature is useful for saving memory, and for reducing the
+# number of pips when some species have many aligning contigs.
+# The default value is false, so all sequences are displayed.
+#
+# Sequence Numbers and Sequence Names:
+#
+# The seqname field serves to match up the parameter entries with
+# the sequence name in each row of the MAF alignments (including
+# the contig name, if applicable).  The sequence number assigns
+# the display order, and is also used to identify the secondary
+# sequence for plot-specific underlays (see below).
+#
+# Multiple values can be given for each seqname keyword; in this
+# case the first is the primary name to be used for display, and
+# the rest are aliases for it.  This is useful when two MAF files
+# use different names for the same sequences, or simply for
+# changing the display labels.  Alias resolution is applied to
+# MAF seqnames, the refseq field, and the initzoom parameter, but
+# not to the reconorg field or annotation files.  All primary and
+# alias names must be unique (except in the special case of
+# pairwise self-alignments).
+#
+# Sequence numbers start with 0 and must turn out to be
+# consecutive, after Gmaj fills in any gaps you leave with the
+# MAF sequences you don't mention here.  Thus by default, if
+# the alignment files include ten sequences, the valid sequence
+# numbers would be 0-9, and Gmaj will assign any that you omit
+# (in the order it encounters them, which is affected by file
+# bundling).  However if you set skipotherseq = true, then you
+# must assign consecutive numbers because Gmaj will not assign
+# any.
+#
+# File Specification Modifiers:
+#
+# The generic, tabular annotation formats (GFF/GTF/BED) allow
+# entries for several sequences to be combined in one file,
+# since they can be distinguished by the "seqname" or "chrom"
+# column.  However in this case Gmaj will expect the column
+# value to match the seqname from the MAF alignments.  If it
+# does not (e.g. if the MAF files include a species prefix but
+# the annotation file omits it), you can add a sequence
+# designation after the filename to tell Gmaj what to look for
+# in the annotation file.
+#
+# Gmaj has special support for annotation data that represents
+# exons or repeats (namely adding exon numbers and inferring
+# UTRs, or finding the PipMaker repeat category).  For the exons
+# and repeats panels this is automatic, but you can also invoke
+# it explicitly for files used as linkbars, underlays, or text
+# highlights by adding a type hint of "exons" or "repeats" after
+# the filename.  This only works if the file is in a generic
+# (GFF/GTF/BED) format and contains the appropriate type of data
+# (genes/exons or repeats).
+#
+# Underlays and Highlights:
+#
+# Gmaj allows you to specify color underlays independently for
+# each plot, i.e. for each combination of reference and
+# secondary sequences.  Thus in the "seq 1:" section, the
+# "underlays.0" entry specifies the underlay file to be used
+# when sequence 1 is the reference and sequence 0 is the second
+# sequence.  Note that there is e.g. no "underlays.1" entry in
+# the "seq 1:" section, since we do not usually have plots
+# aligning sequences with themselves.
+#
+# However, specifying a quadratic number of files quickly becomes
+# burdensome as the number of sequences grows.  For the common
+# case where the same underlay file is used for most or all of a
+# particular reference sequence's plots, the plain "underlays"
+# entry (without a number) provides a default for that reference
+# sequence.  This can still be overridden as needed by numbered
+# entries for special plots.
+#
+# The highlights file specifies colors for a particular row of
+# the text display, so there is only one for each sequence.  If
+# you omit it, Gmaj will build default highlights based on the
+# exons file (if you provided one).
+#
+# Offsets:
+#
+# The offset parameter is used for display purposes only.  It
+# specifies an adjustment to be added to all position labels and
+# displayed references for a particular sequence.  For example,
+# this allows positions to be labeled with respect to some larger
+# region.  However, note that all annotations must still be
+# specified relative to the sequences referred to in the MAF
+# files.
+#
+#----------------------------------------------------------------
+# Cathy Riemer, June 2008
diff --git a/static/gmaj/gmaj.jar b/static/gmaj/gmaj.jar
new file mode 100644
index 0000000..8c685ca
Binary files /dev/null and b/static/gmaj/gmaj.jar differ
diff --git a/static/images/Armitagep_manhattan.png b/static/images/Armitagep_manhattan.png
new file mode 100644
index 0000000..16244b0
Binary files /dev/null and b/static/images/Armitagep_manhattan.png differ
diff --git a/static/images/Armitagep_qqplot.png b/static/images/Armitagep_qqplot.png
new file mode 100644
index 0000000..2e4ee96
Binary files /dev/null and b/static/images/Armitagep_qqplot.png differ
diff --git a/static/images/add_icon.png b/static/images/add_icon.png
new file mode 100644
index 0000000..35ac612
Binary files /dev/null and b/static/images/add_icon.png differ
diff --git a/static/images/add_icon_dark.png b/static/images/add_icon_dark.png
new file mode 100644
index 0000000..5ca133d
Binary files /dev/null and b/static/images/add_icon_dark.png differ
diff --git a/static/images/aggregate_history1.png b/static/images/aggregate_history1.png
new file mode 100644
index 0000000..0a02ded
Binary files /dev/null and b/static/images/aggregate_history1.png differ
diff --git a/static/images/aggregate_history2.png b/static/images/aggregate_history2.png
new file mode 100644
index 0000000..3e5ba7a
Binary files /dev/null and b/static/images/aggregate_history2.png differ
diff --git a/static/images/bar_chart.png b/static/images/bar_chart.png
new file mode 100644
index 0000000..c4bb3ae
Binary files /dev/null and b/static/images/bar_chart.png differ
diff --git a/static/images/bed_warn.png b/static/images/bed_warn.png
new file mode 100644
index 0000000..2a83680
Binary files /dev/null and b/static/images/bed_warn.png differ
diff --git a/static/images/closebox.png b/static/images/closebox.png
new file mode 100644
index 0000000..4de4396
Binary files /dev/null and b/static/images/closebox.png differ
diff --git a/static/images/dat_points_table_brows_1.png b/static/images/dat_points_table_brows_1.png
new file mode 100644
index 0000000..2485e03
Binary files /dev/null and b/static/images/dat_points_table_brows_1.png differ
diff --git a/static/images/ddarrowsplit.png b/static/images/ddarrowsplit.png
new file mode 100644
index 0000000..8710fc3
Binary files /dev/null and b/static/images/ddarrowsplit.png differ
diff --git a/static/images/delete.gif b/static/images/delete.gif
new file mode 100755
index 0000000..43c6ca8
Binary files /dev/null and b/static/images/delete.gif differ
diff --git a/static/images/delete_tag_icon_gray.png b/static/images/delete_tag_icon_gray.png
new file mode 100644
index 0000000..e7c2fc3
Binary files /dev/null and b/static/images/delete_tag_icon_gray.png differ
diff --git a/static/images/delete_tag_icon_white.png b/static/images/delete_tag_icon_white.png
new file mode 100644
index 0000000..2bf6dd1
Binary files /dev/null and b/static/images/delete_tag_icon_white.png differ
diff --git a/static/images/documents-stack-faded.png b/static/images/documents-stack-faded.png
new file mode 100644
index 0000000..e933f3f
Binary files /dev/null and b/static/images/documents-stack-faded.png differ
diff --git a/static/images/documents-stack.png b/static/images/documents-stack.png
new file mode 100644
index 0000000..a397f60
Binary files /dev/null and b/static/images/documents-stack.png differ
diff --git a/static/images/dropdownarrow.png b/static/images/dropdownarrow.png
new file mode 100644
index 0000000..6620552
Binary files /dev/null and b/static/images/dropdownarrow.png differ
diff --git a/static/images/dw.gif b/static/images/dw.gif
new file mode 100644
index 0000000..4d7f26d
Binary files /dev/null and b/static/images/dw.gif differ
diff --git a/static/images/fetchTax.png b/static/images/fetchTax.png
new file mode 100644
index 0000000..00eaf66
Binary files /dev/null and b/static/images/fetchTax.png differ
diff --git a/static/images/fonts/fontawesome-webfont.eot b/static/images/fonts/fontawesome-webfont.eot
new file mode 100644
index 0000000..84677bc
Binary files /dev/null and b/static/images/fonts/fontawesome-webfont.eot differ
diff --git a/static/images/fonts/fontawesome-webfont.ttf b/static/images/fonts/fontawesome-webfont.ttf
new file mode 100644
index 0000000..96a3639
Binary files /dev/null and b/static/images/fonts/fontawesome-webfont.ttf differ
diff --git a/static/images/fonts/fontawesome-webfont.woff b/static/images/fonts/fontawesome-webfont.woff
new file mode 100644
index 0000000..628b6a5
Binary files /dev/null and b/static/images/fonts/fontawesome-webfont.woff differ
diff --git a/static/images/fonts/iconic_stroke.eot b/static/images/fonts/iconic_stroke.eot
new file mode 100644
index 0000000..ec21209
Binary files /dev/null and b/static/images/fonts/iconic_stroke.eot differ
diff --git a/static/images/fonts/iconic_stroke.otf b/static/images/fonts/iconic_stroke.otf
new file mode 100644
index 0000000..0aa24eb
Binary files /dev/null and b/static/images/fonts/iconic_stroke.otf differ
diff --git a/static/images/fonts/iconic_stroke.svg b/static/images/fonts/iconic_stroke.svg
new file mode 100644
index 0000000..130e59b
--- /dev/null
+++ b/static/images/fonts/iconic_stroke.svg
@@ -0,0 +1,492 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg>
+<metadata>
+Created by FontForge 20110222 at Wed Feb  8 00:46:49 2012
+ By PJ Onori,,,
+Icons by PJ Onori, font creation script by Yann
+</metadata>
+<defs>
+<font id="IconicStroke" horiz-adv-x="1030" >
+  <font-face 
+    font-family="Untitled1"
+    font-weight="500"
+    font-stretch="normal"
+    units-per-em="1000"
+    panose-1="2 0 6 3 0 0 0 0 0 0"
+    ascent="800"
+    descent="-200"
+    bbox="15 -200.031 1015.02 800.008"
+    underline-thickness="50"
+    underline-position="-100"
+    unicode-range="U+0023-E079"
+  />
+    <missing-glyph />
+    <glyph glyph-name="glyph0" unicode="&#xe063;" horiz-adv-x="655" 
+d="M327.5 674.75c-103.469 0 -187.562 -84.3438 -187.562 -188c0 -59.1875 27.0938 -113.625 74.2188 -149.156c70.5312 -53.3438 99.125 -112.312 108.875 -161.875h8.9375c9.8125 49.5625 38.3125 108.531 108.875 161.875c47.1875 35.5 74.1562 89.9375 74.1562 149.156
+c0 103.656 -84.0312 188 -187.5 188zM327.5 800v0c172.594 0 312.5 -140.25 312.5 -313.219c0 -102.562 -48.2188 -192.156 -124.031 -249.281c-34.7812 -26.375 -63.5312 -60.3125 -63.5312 -107.188v-79.8438h-249.812v79.8438c0 46.875 -28.75 80.8125 -63.5938 107.188
+c-75.75 57.125 -124.031 146.719 -124.031 249.281c0 172.969 139.906 313.219 312.5 313.219zM202.625 -200v125.25h249.812v-125.25h-249.812z" />
+    <glyph glyph-name="glyph1" unicode="&#xe052;" 
+d="M640 175v-125h-62.5v-187.5c0 -34.5312 -27.8438 -62.5 -62.5 -62.5c-34.4375 0 -62.5 27.9688 -62.5 62.5v187.5h-62.5v125h62.5v562.5c0 34.5312 28.0625 62.5 62.5 62.5c34.6562 0 62.5 -27.9688 62.5 -62.5v-562.5h62.5zM1015 550v-125h-62.5v-562.5
+c0 -34.5312 -27.8438 -62.5 -62.5 -62.5c-34.4375 0 -62.5 27.9688 -62.5 62.5v562.5h-62.5v125h62.5v187.5c0 34.5312 28.0625 62.5 62.5 62.5c34.6562 0 62.5 -27.9688 62.5 -62.5v-187.5h62.5zM265 425v-125h-62.5v-437.5c0 -34.5312 -27.8438 -62.5 -62.5 -62.5
+c-34.4375 0 -62.5 27.9688 -62.5 62.5v437.5h-62.5v125h62.5v312.5c0 34.5312 28.0625 62.5 62.5 62.5c34.6562 0 62.5 -27.9688 62.5 -62.5v-312.5h62.5z" />
+    <glyph glyph-name="glyph2" unicode="&#xe068;" horiz-adv-x="655" 
+d="M327.484 675c-50.0625 0 -97.1562 -19.5312 -132.562 -54.9375s-54.9375 -82.5 -54.9375 -132.562s19.5312 -97.1562 54.9375 -132.5c4.875 -4.75 69.3438 -68.8438 132.562 -169.188c63.5 100.5 128.188 164.844 133.438 170.031
+c34.5312 34.5 54.0625 81.5938 54.0625 131.656s-19.5312 97.1562 -54.9375 132.5c-35.4062 35.4688 -82.5 55 -132.562 55zM327.484 800v0c79.9688 0 159.906 -30.5312 220.938 -91.5312c122.062 -122.031 122.062 -319.906 0 -441.906
+c0 0 -220.938 -216.562 -220.938 -466.562c0 250 -220.938 466.562 -220.938 466.531c-122.062 122.031 -122.062 319.906 0 441.906c61.0312 61.0312 140.969 91.5625 220.938 91.5625zM264.984 487.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5
+s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5z" />
+    <glyph glyph-name="glyph3" unicode="&#xe01c;" 
+d="M390 50v0c0 -138.062 -111.938 -250 -250 -250c-45.4062 0 -87.875 12.0938 -124.5 33.1875l-0.5 0.375c74.7188 43.2188 125 123.781 125 216.188v0.25c0 69.0938 55.9062 125 125 125c68.9688 0 125 -55.9062 125 -125zM978.312 763.375
+c48.8438 -48.8125 48.8438 -127.938 0 -176.75l-472.594 -472.781c-22.9375 86.3125 -90.5625 153.812 -176.75 176.875l472.594 472.656c48.8125 48.8125 127.938 48.8125 176.75 0z" />
+    <glyph glyph-name="glyph4" unicode="&#xe03e;" 
+d="M577.5 175v-187.5h125l-187.5 -187.5l-187.5 187.5h125v187.5h125zM452.5 425v187.5h-125l187.5 187.5l187.5 -187.5h-125v-187.5h-125zM390 237.5h-187.5v-125l-187.5 187.5l187.5 187.5v-125h187.5v-125zM640 362.5h187.5v125l187.5 -187.5l-187.5 -187.5v125h-187.5
+v125z" />
+    <glyph glyph-name="glyph5" unicode="&#xe005;" 
+d="M941.75 726.75c97.6562 -97.6562 97.6562 -255.844 0 -353.5l-573.25 -573.25h-353.5l0.96875 354.5l572.281 572.25c48.8125 48.8438 112.781 73.25 176.75 73.25s127.938 -24.4062 176.75 -73.25zM316.75 -75l535.531 535.406l-176.75 176.75l-529.812 -529.781
+c49.0938 42.75 122.938 42.125 169.688 -4.625c48.8125 -48.8438 48.8125 -128.938 0 -177.75h1.34375z" />
+    <glyph glyph-name="glyph6" unicode="&#xe03b;" horiz-adv-x="405" 
+d="M265 175v-187.5h125l-187.5 -187.5l-187.5 187.5h125v187.5h125zM140 425v187.5h-125l187.5 187.5l187.5 -187.5h-125v-187.5h-125z" />
+    <glyph glyph-name="glyph7" unicode="&#xe06a;" 
+d="M515 675c0 0 125 125 500 125v-750c-378.906 0 -500 -125 -500 -125s-121.094 125 -500 125v750c375 0 500 -125 500 -125zM140 675v-503.406c158.875 -13.4375 256.344 -50.1875 312.5 -79.8438v503.406c-56.1562 29.7812 -153.625 66.5312 -312.5 79.8438zM890 171.594
+v503.406c-158.938 -13.3125 -256.344 -50.0625 -312.5 -79.8438v-503.406c56.1562 29.6875 153.562 66.4062 312.5 79.8438z" />
+    <glyph glyph-name="glyph8" unicode="&#xe01f;" 
+d="M15 174.961l500.031 -125.008l499.969 125.008v-125.008l-499.969 -125.008l-500.031 125.008v125.008zM15 424.977l500.031 -125.008l499.969 125.008v-125.008l-499.969 -125.008l-500.031 125.008v125.008zM15 674.992l500.031 125.008l499.969 -125.008v-125.008
+l-499.969 -125.008l-500.031 125.008v125.008z" />
+    <glyph glyph-name="glyph9" unicode="&#xe049;" horiz-adv-x="780" 
+d="M15 -200v1000h250v-1000h-250zM515 -200v1000h250v-1000h-250z" />
+    <glyph glyph-name="glyph10" unicode="&#xe020;" 
+d="M15 50l500 -125l499.938 125v-125l-499.938 -125l-500 125v125zM15 300l500 -125l499.938 125v-125l-499.938 -125l-500 125v125zM15 550l500 -125l499.938 125v-125l-499.938 -125l-500 125v125zM15 800l500 -125l499.938 125v-125l-499.938 -125l-500 125v125z" />
+    <glyph glyph-name="glyph11" unicode="&#xe045;" 
+d="M765 675c137.938 0 250 -112.062 250 -250s-112.062 -250 -250 -250h-500c-137.875 0 -250 112.062 -250 250s112.125 250 250 250c11.1562 0 22.0938 -1.71875 33.0938 -3.1875c42.9062 76.1875 123.531 128.188 216.906 128.188c91.4375 0 172 -51.5 215.562 -128.406
+c11.375 1.46875 22.7188 3.40625 34.4375 3.40625zM765 300c68.8438 0 125 56.1562 125 125s-56.1562 125 -125 125s-125 -56.1562 -125 -125h-125c0 81.4062 39.6875 153.062 100.094 198.719c-23.125 31.25 -59.625 51.2812 -100.094 51.2812
+c-68.9062 0 -125 -56.1562 -125 -125c0 -15.125 2.625 -29.7812 7.8125 -43.7188l-117.062 -43.6875c-10.25 27.3438 -15.375 56.1562 -15.625 85.6875c-64.4062 -1.9375 -125.125 -54.6875 -125.125 -123.281c0 -68.8438 56.0938 -125 125 -125h500zM577.562 -12.5v-187.5
+h-125v187.5h-125l187.562 187.5l187.375 -187.5h-124.938z" />
+    <glyph glyph-name="glyph12" unicode="&#xe029;" horiz-adv-x="967" 
+d="M469.148 193.062l-330.562 -330.562c-164.781 190.188 -164.781 470.938 0 661.125zM516.023 237.5l-403.062 403.062c111.562 91.3125 249.75 149.906 403.062 159.438v-562.5zM577.555 674.5c211.594 -32.4688 374.938 -211.656 374.938 -432.844
+c0 -243.906 -197.75 -441.656 -442.062 -441.656c-96.9375 0 -184.594 33.6875 -257.344 87.1562l324.469 324.469v462.875z" />
+    <glyph glyph-name="glyph13" unicode="&#xe051;" 
+d="M720.812 5.8125l-80.8125 -80.8125v250h250l-80.8125 -80.8125l205.812 -205.812l-88.375 -88.375zM219.594 507.156l-204.594 204.469l88.375 88.375l204.594 -204.469l82.0312 79.4688l-1.21875 -248.656l-248.781 -1.34375zM15 -111.625l205.812 205.812
+l-80.8125 80.8125h250v-250l-80.8125 80.8125l-205.812 -205.812zM890.969 425h-250.969v250.906l81.1875 -81.0938l205.188 205.188l88.625 -88.6875l-205.062 -205.188z" />
+    <glyph glyph-name="glyph14" unicode="&#xe044;" 
+d="M765 675c137.938 0 250 -112.188 250 -250s-112.062 -250 -250 -250h-187.5v-187.5h125l-187.562 -187.5l-187.438 187.5h125v187.5h-187.5c-137.875 0 -250 112.188 -250 250s112.125 250 250 250c11.1562 0 22.0938 -1.59375 33.0938 -3.0625
+c42.9062 75.9375 123.531 128.062 216.906 128.062c91.4375 0 172 -51.4062 215.562 -128.531c11.375 1.6875 22.7188 3.53125 34.4375 3.53125zM765 300c68.8438 0 125 56.0312 125 125s-56.1562 125 -125 125s-125 -56.0312 -125 -125h-125
+c0 81.4062 39.6875 153.188 100.094 198.844c-23.125 31.125 -59.625 51.1562 -100.094 51.1562c-68.9062 0 -125 -56.0312 -125 -125c0 -15 2.625 -29.7812 7.8125 -43.5938l-117.062 -43.9375c-10.25 27.3438 -15.375 56.4062 -15.625 85.8125
+c-64.4062 -1.8125 -125.125 -54.8125 -125.125 -123.281c0 -68.9688 56.0938 -125 125 -125h500z" />
+    <glyph glyph-name="glyph15" unicode="&#xe004;" horiz-adv-x="905" 
+d="M452.5 675c-172.375 0 -312.5 -140.125 -312.5 -312.5c0 -82.4062 32.0938 -160.281 90.3438 -219.344l36.25 -36.75l-0.25 -51.625l-0.125 -21.375l14.2812 9.40625l45.6562 30.25l53.2188 -13.1875c27.0938 -6.59375 51.0312 -9.875 73.125 -9.875
+c172.375 0 312.5 140.125 312.5 312.5s-140.125 312.5 -312.5 312.5zM452.5 800v0c241.562 0 437.5 -195.938 437.5 -437.5s-195.938 -437.5 -437.5 -437.5c-35.6562 0 -69.8125 5.375 -103.031 13.5625l-209.469 -138.562l1.34375 255.375
+c-78 79.0938 -126.344 187.375 -126.344 307.125c0 241.562 195.938 437.5 437.5 437.5z" />
+    <glyph glyph-name="glyph16" unicode="&#x2709;" 
+d="M517.438 441.969l-502.438 235.969v122.062h1000v-122.438zM517.688 303.781l497.312 235.469v-489.25h-1000v490z" />
+    <glyph glyph-name="glyph17" unicode="&#x2718;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM437.5 69.2812l388.875 388.906l-88.375 88.375l-300.531 -300.531l-141.656 141.719l-88.375 -88.375z" />
+    <glyph glyph-name="glyph18" unicode="&#xe066;" 
+d="M703 800l312 -312.5v-687l-247.438 206.781l-250.375 -207.281l-250.594 207.5l-251.594 -205.562v998.062h688zM890.625 51.9375v250h-375.219v375h-375.25v-625l127.688 100.594l249.375 -206.531l249.875 206.781zM640.5 426.938h250.125
+c-50.0625 50.0625 -199.594 199.469 -250.125 250v-250z" />
+    <glyph glyph-name="glyph19" unicode="&#xe05d;" 
+d="M890 675h125v-250h-125v-62.5c0 -34.4375 -28.0625 -62.5 -62.5 -62.5h-750c-34.4375 0 -62.5 28.0625 -62.5 62.5v123.656h199.594c25.75 -72.625 94.5 -125 176.031 -125v63.8438h127.062v62.5h-127.062v125h124.5v62.5h-124.5v61.1562
+c-81.5625 0 -150.281 -52.375 -176.031 -125h-199.594v126.344c0 34.375 28.0625 62.5 62.5 62.5h750c34.4375 0 62.5 -28.125 62.5 -62.5v-62.5z" />
+    <glyph glyph-name="glyph20" unicode="&#xe04a;" 
+d="M15 -200v1000h1000v-1000h-1000z" />
+    <glyph glyph-name="glyph21" unicode="&#x2191;" 
+d="M15 300.5l500.062 499.5l499.938 -499.5l-177.281 -177.25l-197.438 197.5v-520.75h-250.719v520.75c-97.875 -97.875 -197.312 -197.25 -197.312 -197.25z" />
+    <glyph glyph-name="glyph22" unicode="&#xe038;" 
+d="M390 550h-187.5v-125l-187.5 187.5l187.5 187.5v-125h187.5v-125zM640 675h187.5v125l187.5 -187.5l-187.5 -187.5v125h-187.5v125z" />
+    <glyph glyph-name="glyph23" unicode="&#xe021;" 
+d="M515 675c-206.781 0 -375 -168.219 -375 -375s168.219 -375 375 -375s375 168.219 375 375s-168.219 375 -375 375zM515 800v0c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM581.406 366.281
+c-24.0625 24.0625 -57.625 30.7188 -88.375 23.125l271.969 160.594l-161.125 -275.688c9.03125 31.6562 2.4375 67 -22.4688 91.9688zM448.844 233.719c25.0312 -25.0312 60.4062 -31.625 92.1562 -22.4688l-276 -161.25l160.656 272.344
+c-7.5625 -30.8125 -0.96875 -64.5312 23.1875 -88.625z" />
+    <glyph glyph-name="glyph24" unicode="&#xe009;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM765 237.5v125h-500v-125h500z" />
+    <glyph glyph-name="glyph25" unicode="&#xe05c;" 
+d="M1015 675v-250h-125v-62.5c0 -34.3125 -28.0625 -62.5 -62.5 -62.5h-750c-34.4375 0 -62.5 28.1875 -62.5 62.5v375c0 34.3125 28.0625 62.5 62.5 62.5h750c34.4375 0 62.5 -28.1875 62.5 -62.5v-62.5h125zM765 425v250h-625v-250h625z" />
+    <glyph glyph-name="glyph26" unicode="&#xe002;" horiz-adv-x="530" 
+d="M265 800c138.062 0 250 -111.938 250 -250s-250 -750 -250 -750s-250 611.938 -250 750s111.938 250 250 250zM265 425c69.0312 0 125 55.9688 125 125s-55.9688 125 -125 125s-125 -55.9688 -125 -125s55.9688 -125 125 -125z" />
+    <glyph glyph-name="glyph27" unicode="&#xe076;" horiz-adv-x="780" 
+d="M452.625 800c172.25 0 312.375 -140.25 312.375 -312.5v-500c0 -103.5 -84.0938 -187.5 -187.5 -187.5h-375.062c-103.344 0 -187.438 84 -187.438 187.5v437.5h625v62.5c0 103.281 -83.9688 187.5 -187.375 187.5s-187.438 -84.2188 -187.438 -187.5h-125
+c0 172.25 140.125 312.5 312.438 312.5zM577.5 -75c34.4375 0 62.5 27.9688 62.5 62.5v312.5h-500.062v-312.5c0 -34.5312 28.0625 -62.5 62.5 -62.5h375.062zM452.5 112.5c0 -34.6562 -27.9688 -62.5 -62.5 -62.5s-62.5 27.8438 -62.5 62.5
+c0 34.4375 27.9688 62.5 62.5 62.5s62.5 -28.0625 62.5 -62.5z" />
+    <glyph glyph-name="glyph28" unicode="&#xe075;" horiz-adv-x="780" 
+d="M452.625 800c172.25 0 312.375 -140.125 312.375 -312.5v-500c0 -103.281 -84 -187.5 -187.5 -187.5h-375.062c-103.219 0 -187.438 84.2188 -187.438 187.5v437.5h125.188v62.5c0 172.375 140.25 312.5 312.438 312.5zM265.188 487.5v-62.5h374.812v62.5
+c0 103.5 -84 187.5 -187.375 187.5c-103.281 0 -187.438 -84 -187.438 -187.5zM577.5 -75c34.6562 0 62.5 28.0625 62.5 62.5v312.5h-500.062v-312.5c0 -34.4375 28.0625 -62.5 62.5 -62.5h375.062zM452.625 112.5c0 -34.4375 -28.0625 -62.5 -62.5 -62.5
+c-34.6562 0 -62.5 28.0625 -62.5 62.5c0 34.6562 27.8438 62.5 62.5 62.5c34.4062 0 62.5 -27.8438 62.5 -62.5z" />
+    <glyph glyph-name="question" unicode="?" horiz-adv-x="530" 
+d="M190 -137.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM254.875 -11.7812c-43.125 0 -79.9482 15.2607 -110.469 45.7812s-45.7812 67.3438 -45.7812 110.469s15.2607 79.9482 45.7812 110.469
+l209 209c24.3955 24.3633 36.5938 53.8223 36.5938 88.375c0 34.5645 -12.208 64.0225 -36.625 88.375c-24.3701 24.3857 -53.8223 36.585 -88.3555 36.5977s-63.9775 -12.1865 -88.332 -36.5977c-24.458 -24.3936 -36.6875 -53.8525 -36.6875 -88.375h-125
+c0 69.0439 24.458 127.992 73.375 176.844c47.1914 47.2305 106.077 70.8369 176.656 70.8203s129.496 -23.6445 176.75 -70.8828c48.8125 -48.748 73.2188 -107.676 73.2188 -176.781c0 -69.0322 -24.417 -127.949 -73.25 -176.75l-208.969 -209
+c-6.07812 -6.10938 -9.11719 -13.4736 -9.11719 -22.0938s3.03906 -15.9844 9.11719 -22.0938c6.10938 -6.10938 13.4736 -9.16406 22.0938 -9.16406s15.9844 3.05469 22.0938 9.16406c6.10449 6.10449 9.15625 13.4688 9.15625 22.0938h125
+c0 -43.125 -15.2607 -79.9482 -45.7812 -110.469s-67.3438 -45.7812 -110.469 -45.7812v0z" />
+    <glyph glyph-name="glyph30" unicode="&#xe055;" 
+d="M15 737.496c0 41.6689 20.835 62.5039 62.5039 62.5039s62.5039 -20.835 62.5039 -62.5039s-20.835 -62.5039 -62.5039 -62.5039s-62.5039 20.835 -62.5039 62.5039zM15 487.48c0 41.6689 20.835 62.5039 62.5039 62.5039s62.5039 -20.835 62.5039 -62.5039
+s-20.835 -62.5039 -62.5039 -62.5039s-62.5039 20.835 -62.5039 62.5039zM15 237.465c0 41.6689 20.835 62.5039 62.5039 62.5039s62.5039 -20.835 62.5039 -62.5039s-20.835 -62.5039 -62.5039 -62.5039s-62.5039 20.835 -62.5039 62.5039zM15 -12.5508
+c0 41.6689 20.835 62.5039 62.5039 62.5039s62.5039 -20.835 62.5039 -62.5039s-20.835 -62.5039 -62.5039 -62.5039s-62.5039 20.835 -62.5039 62.5039zM265.016 674.992v125.008h749.984v-125.008h-749.984zM265.016 424.977v125.008h749.984v-125.008h-749.984z
+M265.016 174.961v125.008h749.984v-125.008h-749.984zM265.016 -75.0547v125.008h749.984v-125.008h-749.984z" />
+    <glyph glyph-name="glyph31" unicode="&#xe043;" horiz-adv-x="780" 
+d="M15 657.125v142.875h750v-142.875h-750zM265 371.406h-125l249.625 285.719l250.375 -285.719h-125v-571.406h-250v571.406z" />
+    <glyph glyph-name="glyph32" unicode="&#xe030;" horiz-adv-x="887" 
+d="M729.286 228.571h142.856c0 -236.75 -191.964 -428.571 -428.571 -428.571s-428.571 191.821 -428.571 428.571s191.964 428.571 428.571 428.571h84.8213l-42.1426 41.8574l101 101l214.714 -214.286l-214.714 -214.964l-101 101l41.5713 41.5713l-84.25 0.964844
+c-157.643 0 -285.714 -128.215 -285.714 -285.715s128.071 -285.714 285.714 -285.714s285.715 128.214 285.715 285.714z" />
+    <glyph glyph-name="glyph33" unicode="&#xe035;" 
+d="M890 362.5h125c0 -103.281 -84.0938 -187.5 -187.5 -187.5h-562.75v-125l-249.75 187.5l249.75 187.5v-125h562.75c34.4375 0 62.5 28.0625 62.5 62.5zM202.5 550c-34.4375 0 -62.5 -28.0625 -62.5 -62.5h-125c0 103.5 84.0938 187.5 187.5 187.5h562.5v125l250 -187.5
+l-250 -187.5v125h-562.5z" />
+    <glyph glyph-name="glyph34" unicode="&#xe034;" 
+d="M890 425h125v-62.5c0 -103.281 -84.0938 -187.5 -187.5 -187.5h-437.625l0.125 -125l-250 187.5l250 187.5l-0.125 -125h437.625c34.4375 0 62.5 28.0625 62.5 62.5v62.5zM140 487.5v-62.5h-125v62.5c0 103.5 84.0938 187.5 187.5 187.5h437.5v125l249.75 -187.5
+l-249.75 -187.5v125h-437.5c-34.4375 0 -62.5 -27.9688 -62.5 -62.5z" />
+    <glyph glyph-name="glyph35" unicode="&#xe033;" 
+d="M638.281 175l187.75 250l188.969 -250h-125v-62.5c0 -103.406 -84.0938 -187.5 -187.5 -187.5h-375c-103.406 0 -187.5 84.0938 -187.5 187.5v62.5h125v-62.5c0 -34.4375 28.0625 -62.5 62.5 -62.5h375c34.4375 0 62.5 28.0625 62.5 62.5v62.5h-126.719zM391.719 550
+l-187.75 -250l-188.969 250h125v62.5c0 103.406 84.0938 187.5 187.5 187.5h375c103.406 0 187.5 -84.0938 187.5 -187.5v-62.5h-125v62.5c0 34.4375 -28.0625 62.5 -62.5 62.5h-375c-34.4375 0 -62.5 -28.0625 -62.5 -62.5v-62.5h126.719z" />
+    <glyph glyph-name="glyph36" unicode="&#xe032;" 
+d="M827.5 425l187.5 -187.5h-125c0 -103.406 -84.0938 -187.5 -187.5 -187.5h-375c-103.406 0 -187.5 84.0938 -187.5 187.5v62.5h125v-62.5c0 -34.4375 28.0625 -62.5 62.5 -62.5h375c34.4375 0 62.5 28.0625 62.5 62.5h-125zM702.5 800
+c103.406 0 187.5 -84.0938 187.5 -187.5v-62.5h-125v62.5c0 34.4375 -28.0625 62.5 -62.5 62.5h-375c-34.4375 0 -62.5 -28.0625 -62.5 -62.5v-1.9375h125l-187.5 -185.562l-187.5 185.562h125v1.9375c0 103.406 84.0938 187.5 187.5 187.5h375z" />
+    <glyph glyph-name="glyph37" unicode="&#x275d;" 
+d="M1015 50h-375v375c0 206.781 168.219 375 375 375v-125c-137.938 0 -250 -112.188 -250 -250h250v-375zM390 50h-375v375c0 206.781 168.219 375 375 375v-125c-137.938 0 -250 -112.188 -250 -250h250v-375z" />
+    <glyph glyph-name="glyph38" unicode="&#x2713;" 
+d="M1015 0.0712891l-200.071 -200.071l-299.929 300.071l-300.071 -300.071l-199.929 200.071l299.929 299.929l-299.929 299.929l199.929 200.071l300.071 -300.071l299.929 300.071l200.071 -200.071l-300.214 -299.929z" />
+    <glyph glyph-name="glyph39" unicode="&#xe04d;" 
+d="M15 800l750 -500l-750 -500v1000zM765 -75v750h250v-750h-250z" />
+    <glyph glyph-name="glyph40" unicode="&#xe000;" horiz-adv-x="780" 
+d="M452.5 800l312.5 -312.438v-687.562h-750v1000h437.5zM140 -75h500v500h-250v250h-250v-750z" />
+    <glyph glyph-name="glyph41" unicode="&#xe06f;" 
+d="M765 -200v1000h250v-1000h-250zM390 -200v750h250v-750h-250zM15 -200v500h250v-500h-250z" />
+    <glyph glyph-name="glyph42" unicode="&#x2190;" 
+d="M514.5 -200l-499.5 500l499.5 500l177.25 -177.281l-197.25 -197.5h520.5v-250.719h-520.5l197.25 -197.25z" />
+    <glyph glyph-name="glyph43" unicode="&#x2193;" 
+d="M1015 299.5l-500 -499.5l-500 499.5l177.281 177.25l197.5 -197.5v520.75h250.719v-520.75l197.25 197.25z" />
+    <glyph glyph-name="glyph44" unicode="&#xe042;" horiz-adv-x="780" 
+d="M15 -200v142.812h750v-142.812h-750zM515 228.594h125l-249.625 -285.781l-250.375 285.781h125v571.406h250v-571.406z" />
+    <glyph glyph-name="glyph45" unicode="&#x2302;" 
+d="M515 800l500 -500h-125v-500h-750v500h-125zM765 -75v448.25l-250 176.75l-250 -176.75v-448.25h187.5v187.5h125v-187.5h187.5z" />
+    <glyph glyph-name="glyph46" unicode="&#xe001;" 
+d="M762.438 675h252.562v-125h-252.562h-125h-250h-125h-247.438v125h247.438v125h125v-125h250v125h125v-125zM140 50v125h750v-125h-750zM140 300v125h750v-125h-750zM765 -200h-625v125h750z" />
+    <glyph glyph-name="glyph47" unicode="&#xe012;" 
+d="M15 800h375v-375l-375 -375v750zM640 800h375v-375l-375 -375v750z" />
+    <glyph glyph-name="glyph48" unicode="&#xe04e;" 
+d="M871.688 31.6875l143.312 143.312v-375h-375l143.312 143.312l-143.312 143.312l88.375 88.375zM390 800l-143.312 -143.312l141.969 -141.844l-88.375 -88.375l-141.969 141.844l-143.312 -143.312v375h375zM390 86.625l-143.312 -143.312l143.312 -143.312h-375v375
+l143.312 -143.312l143.312 143.312zM1015 800v-375l-143.312 143.312l-141.969 -141.844l-88.375 88.375l141.969 141.844l-143.312 143.312h375z" />
+    <glyph glyph-name="glyph49" unicode="&#xe058;" 
+d="M390 305.375c0 83.333 41.667 125 125 125s125 -41.667 125 -125s-41.667 -125 -125 -125s-125 41.667 -125 125zM907.344 611.031c71.7705 -90.8154 107.656 -192.701 107.656 -305.656c0 -67.8115 -13.2002 -132.582 -39.5996 -194.311
+c-26.4004 -61.7285 -61.916 -114.908 -106.549 -159.541s-97.8125 -80.1484 -159.541 -106.549c-61.7285 -26.3994 -126.499 -39.5996 -194.311 -39.5996s-132.582 13.2002 -194.311 39.5996c-61.7285 26.4004 -114.908 61.916 -159.541 106.549
+s-80.1484 97.8125 -106.549 159.541c-26.3994 61.7285 -39.5996 126.499 -39.5996 194.311c0 111.533 35.2812 212.929 105.844 304.188c11.6758 -8.79199 23.9258 -13.1875 36.75 -13.1875c17.3281 0 32.0771 6.08496 44.2461 18.2539s18.2539 26.918 18.2539 44.2461
+c0 12.7891 -4.52051 25.1641 -13.5625 37.125c72.4873 57.4258 154.883 92.0928 247.188 104c1.41699 -16.1016 7.9541 -29.6484 19.6133 -40.6387c11.6582 -10.9912 25.5479 -16.4863 41.668 -16.4863c16.0684 0 29.8965 5.4209 41.4834 16.2617
+c11.5879 10.8418 18.1865 24.2236 19.7979 40.1445c90.6689 -11.2656 172.294 -44.8799 244.875 -100.844c-10.083 -12.2812 -15.125 -25.4688 -15.125 -39.5625c0 -17.3027 6.10547 -32.0449 18.3174 -44.2275c12.2109 -12.1816 26.9385 -18.2725 44.1826 -18.2725
+c13.9131 0 26.8506 4.88574 38.8125 14.6562zM515 55.375c69.0459 0 127.974 24.3965 176.784 73.1904s73.2158 107.73 73.2158 176.81c0 69.1689 -24.5 128.169 -73.5 177l-176.5 176.5l-176.75 -176.75c-48.833 -48.833 -73.25 -107.75 -73.25 -176.75
+c0 -69.0312 24.4092 -127.956 73.2266 -176.773s107.742 -73.2266 176.773 -73.2266z" />
+    <glyph glyph-name="glyph50" unicode="&#xe008;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM765 237.5v125h-187.5v187.5h-125v-187.5h-187.5v-125h187.5v-187.5h125v187.5h187.5z" />
+    <glyph glyph-name="glyph51" unicode="&#xe079;" 
+d="M515 675c-206.781 0 -375 -168.219 -375 -375s168.219 -375 375 -375s375 168.219 375 375s-168.219 375 -375 375zM515 800v0c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM684.438 219.438l-88.375 -88.375
+l-122.562 122.562c-13.0625 11.5938 -21.7188 27.8125 -21.7188 46.625v249.75h125.719v-223.5z" />
+    <glyph glyph-name="glyph52" unicode="&#xe060;" 
+d="M889.281 550l125.719 125l-122.562 125h122.562v-1000h-1000v1000h125.5l125.469 -125.5l-126.938 -124.5h125.25l127.656 125.5l-124.5 124.5h176.75l125.375 -125.5l-126.844 -124.5h147.594l127.688 125.5l-124.625 124.5h173.938l122.688 -125.5l-124.125 -124.5
+h123.406zM890 -73.6562v125h-750v-125h750zM890 175v125h-750v-125h750z" />
+    <glyph glyph-name="glyph53" unicode="&#xe024;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM515 675c-162.719 0 -300.156 -104.812 -352.062 -250h704.125c-51.9062 145.188 -189.344 250 -352.062 250zM515 237.5
+c34.5312 0 62.5 27.9688 62.5 62.5s-27.9688 62.5 -62.5 62.5s-62.5 -27.9688 -62.5 -62.5s27.9688 -62.5 62.5 -62.5zM140 300c0 -185.188 135.25 -338.375 312 -368.656c-2.90625 204.094 -141.219 368.656 -312 368.656zM578 -68.6562
+c176.75 30.2812 312 183.469 312 368.656c-170.781 0 -309.094 -164.562 -312 -368.656z" />
+    <glyph glyph-name="glyph54" unicode="&#x270e;" 
+d="M941.75 726.75c97.6562 -97.6562 97.6562 -255.844 0 -353.5l-573.25 -573.25h-353.5l0.96875 354.5l572.281 572.25c48.8125 48.8438 112.781 73.25 176.75 73.25c64.0938 0 128.062 -24.4062 176.75 -73.25zM316.75 -75l536.625 536.625v0l-176.75 176.75v0
+l-535.781 -535.875l-0.125 -52.5h124.281v-125h51.75z" />
+    <glyph glyph-name="glyph55" unicode="&#xe02b;" 
+d="M380.594 530.844c0 -53.2188 -43.2188 -96.4375 -96.3125 -96.4375c-53.2188 0 -96.4375 43.2188 -96.4375 96.4375c0 53.0938 43.2188 96.1875 96.4375 96.1875c53.0938 0 96.3125 -43.0938 96.3125 -96.1875zM15 800h414.781l585.219 -585.312h-414.781v-414.688
+l-585.219 585.219v414.781zM383.656 688.688c-32.9688 0 -186.156 0 -257.344 -0.03125v-257.438c24.1875 -23.9062 246.219 -246.094 362.594 -362.281v257.094h257.312c-116.344 116.312 -338.406 338.594 -362.562 362.656z" />
+    <glyph glyph-name="glyph56" unicode="&#xe067;" 
+d="M1015 550c0 -93.9375 -52.5 -174.812 -129.156 -217.531c1 -10.5625 3.1875 -20.6875 3.1875 -31.5c0 -207.156 -167.844 -375 -375 -375c-77.2812 0 -149.062 23.4375 -208.75 63.4688l-290.281 -189.438l190.281 288.562
+c-41.625 60.3125 -66.2812 133.438 -66.2812 212.406c0 207.156 167.844 375 375 375c11.7188 0 22.8438 -2.375 34.3125 -3.46875c0 -0.0625 -0.125 -0.125 -0.125 -0.1875c43 75.875 123.562 127.688 216.812 127.688c137.938 0 250 -112 250 -250zM514.031 50.9688
+c137.562 0 249.375 111.594 249.875 249.156c-22.3438 0.125 -43.7188 4.03125 -64.3438 9.65625l-98.1562 -98.1562c-48.8125 -48.8125 -127.938 -48.8125 -176.75 0s-48.8125 127.938 0 176.75l99.125 99.1875c-5.125 20.0938 -8.78125 40.7188 -8.78125 62.4375
+c0 0.5 0.125 0.96875 0.125 1.46875c0 -0.1875 -0.125 -0.375 -0.125 -0.625c-0.25 0 -0.59375 0.125 -0.96875 0.125c-137.938 0 -250 -112.125 -250 -250c0 -137.938 112.062 -250 250 -250zM765 425c68.8438 0 125 56.0938 125 125s-56.1562 125 -125 125
+s-125 -56.0938 -125 -125s56.1562 -125 125 -125z" />
+    <glyph glyph-name="glyph57" unicode="&#x26d4;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM515 675c-206.781 0 -375 -168.219 -375 -375c0 -80.8125 26.25 -155.281 69.9375 -216.562l521.75 521.5
+c-61.2812 43.6875 -135.75 70.0625 -216.688 70.0625zM515 -75c206.781 0 375 168.219 375 375c0 80.8125 -26.25 155.281 -69.9375 216.562l-521.75 -521.5c61.2812 -43.6875 135.75 -70.0625 216.688 -70.0625z" />
+    <glyph glyph-name="glyph58" unicode="&#xe011;" 
+d="M1015 50h-375v375l375 375v-750zM390 50h-375v375l375 375v-750z" />
+    <glyph glyph-name="glyph59" unicode="&#xe071;" 
+d="M265 175c-138.062 0 -250 111.938 -250 250s111.938 250 250 250v-500zM390 675l250 125v-750l-250 125v500zM1015 362.5h-250v125h250v-125z" />
+    <glyph glyph-name="glyph60" unicode="&#xe018;" 
+d="M1015 300c0 -276.125 -223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500s500 -223.875 500 -500zM390 112.5l125 -125l125 125h-62.5v500h-125v-500h-62.5z" />
+    <glyph glyph-name="glyph61" unicode="&#xe056;" 
+d="M15 737.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM265 675v125h750v-125h-750zM265 487.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5
+s-62.5 20.833 -62.5 62.5zM515 425v125h500v-125h-500zM265 -12.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM515 -75v125h500v-125h-500zM515 237.5c0.000976562 41.667 20.834 62.5 62.5 62.5
+c41.667 0 62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM765 175v125h250v-125h-250z" />
+    <glyph glyph-name="glyph62" unicode="&#xe014;" 
+d="M15 300c0 276.188 223.875 500 500.125 500c276 0 499.875 -223.812 499.875 -500c0 -276.125 -223.875 -500 -499.875 -500c-276.25 0 -500.125 223.875 -500.125 500zM765 300.969l-249.875 249.094l-249.156 -249.094h186.531v-250.969h125v250.969h187.5z" />
+    <glyph glyph-name="glyph63" unicode="&#xe06d;" 
+d="M640 675h-312.5c-103.281 0 -187.5 -84.0938 -187.5 -187.5v-312.5c0 -40.7812 19.7812 -77.1562 50.1562 -100.094c45.6562 60.7812 118.312 100.094 199.844 100.094h250c137.938 0 250 112.062 250 250s-112.062 250 -250 250zM640 800v0
+c207.156 0 375 -167.969 375 -375s-167.844 -375 -375 -375h-250c-68.9688 0 -125 -55.9062 -125 -125c-137.938 0 -250 111.938 -250 250v312.5c0 172.594 139.906 312.5 312.5 312.5h312.5z" />
+    <glyph glyph-name="glyph64" unicode="&#xe02f;" 
+d="M894.82 683.788c77.4014 -77.4951 120.117 -180.52 120.117 -290.138s-42.7158 -212.643 -120.117 -290.325l-94.2441 -93.9941l-88.3701 88.3691l94.2441 94.1816c53.8408 53.9658 83.4951 125.492 83.4951 201.769c0 76.1826 -29.6543 147.709 -83.4951 201.519
+c-107.649 107.899 -295.388 107.899 -403.037 0.125l-170.396 -170.396h156.959v-125.117h-374.977v375.227h124.992v-166.239l174.896 175.02c154.803 155.022 425.036 154.897 579.933 0zM503.074 -111.463l88.4443 88.3193l88.3643 -88.4883l-88.4453 -88.3203z" />
+    <glyph glyph-name="glyph65" unicode="&#x2602;" 
+d="M515 800c275.812 0 499.938 -224.312 499.938 -500h-125c0 25.8125 -21 46.875 -46.875 46.875c-25.625 0 -46.875 -21.0625 -46.875 -46.875h-125c0 25.8125 -20.9375 46.875 -46.8125 46.875c-25.75 0 -46.875 -21.0625 -46.875 -46.875v-312.5
+c0 -103.406 -84 -187.5 -187.5 -187.5c-103.281 0 -187.5 84.0938 -187.5 187.5h125c0 -34.4375 28.0625 -62.5 62.5 -62.5c34.5312 0 62.5 28.0625 62.5 62.5v312.5c0 25.8125 -21 46.875 -46.875 46.875c-25.75 0 -46.875 -21.0625 -46.875 -46.875h-125
+c0 25.8125 -21 46.875 -46.875 46.875c-25.75 0 -46.875 -21.0625 -46.875 -46.875h-125c0 275.688 224.375 500 500 500z" />
+    <glyph glyph-name="glyph66" unicode="&#x26a1;" 
+d="M1015 800l-437.5 -500l187.5 -125l-750 -375l437.5 375l-187.5 125z" />
+    <glyph glyph-name="glyph67" unicode="&#xe053;" 
+d="M1015 675h-1000v125h1000v-125zM640 425h-625v125h625v-125zM1015 50h-1000v125h1000v-125zM765 -200h-750v125h750v-125zM1015 -137.5c0 -34.5312 -28.0625 -62.5 -62.5 -62.5c-34.6562 0 -62.625 27.9688 -62.625 62.5s27.9688 62.5 62.625 62.5
+c34.4375 0 62.5 -27.9688 62.5 -62.5z" />
+    <glyph glyph-name="glyph68" unicode="&#xe054;" 
+d="M1015 675h-1000v125h1000v-125zM640 425h-625v125h625v-125zM1015 172.312h-1000v125h1000v-125zM15 -137.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM390 -137.5c0 -17 -6.16699 -31.667 -18.5 -44
+s-27 -18.5 -44 -18.5c-17.3223 0 -32.0898 6.09863 -44.3037 18.2959s-18.3213 26.9316 -18.3213 44.2041s6.10742 32.0068 18.3213 44.2041s26.9824 18.2959 44.3037 18.2959c17 0 31.667 -6.16699 44 -18.5s18.5 -27 18.5 -44zM515 -137.5
+c0.000976562 41.667 20.834 62.5 62.5 62.5c41.667 0 62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5z" />
+    <glyph glyph-name="glyph69" unicode="&#xe023;" horiz-adv-x="530" 
+d="M452.5 800c34.5312 0 62.5 -27.9688 62.5 -62.5v-687.5c0 -138.062 -111.938 -250 -250 -250s-250 111.938 -250 250v687.5c0 34.5312 27.9688 62.5 62.5 62.5h375zM327.5 50c34.5312 0 62.5 27.9688 62.5 62.5s-27.9688 62.5 -62.5 62.5s-62.5 -27.9688 -62.5 -62.5
+s27.9688 -62.5 62.5 -62.5zM390 300v375h-250v-375h62.7812c0.46875 34.5312 28.5625 62.5 63.2188 62.5s62.7188 -27.9688 63.2188 -62.5h60.7812z" />
+    <glyph glyph-name="glyph70" unicode="&#xe010;" horiz-adv-x="530" 
+d="M265.969 518.75c0 20.1875 10.0938 30.2812 30.2812 30.2812s30.2812 -10.0938 30.2812 -30.2812s-10.0938 -30.2812 -30.2812 -30.2812s-30.2812 10.0938 -30.2812 30.2812zM452.5 800c17 0 31.667 -6.16699 44 -18.5s18.5 -27 18.5 -44v-687.5
+c0 -69.333 -24.333 -128.333 -73 -177s-107.667 -73 -177 -73s-128.333 24.333 -177 73s-73 107.667 -73 177v687.5c0 17 6.16699 31.667 18.5 44s27 18.5 44 18.5h375zM172.219 175c8.35938 0 15.4961 2.95703 21.4102 8.87109s8.87109 13.0508 8.87109 21.4102
+s-2.95703 15.4961 -8.87109 21.4102s-13.0508 8.87109 -21.4102 8.87109s-15.4961 -2.95703 -21.4102 -8.87109s-8.87109 -13.0508 -8.87109 -21.4102s2.95703 -15.4961 8.87109 -21.4102s13.0508 -8.87109 21.4102 -8.87109zM327.5 50c17 0 31.667 6.16699 44 18.5
+s18.5 27 18.5 44s-6.16699 31.667 -18.5 44s-27 18.5 -44 18.5s-31.667 -6.16699 -44 -18.5s-18.5 -27 -18.5 -44s6.16699 -31.667 18.5 -44s27 -18.5 44 -18.5zM390 300v375h-250v-375h62.7812c0.234375 17.2881 6.50391 32.0273 18.8066 44.2158
+c12.3027 12.1895 27.1064 18.2842 44.4121 18.2842c17.3018 0 32.1016 -6.09375 44.3984 -18.2812c12.2988 -12.1875 18.5723 -26.9268 18.8203 -44.2188h60.7812z" />
+    <glyph glyph-name="glyph71" unicode="&#xe073;" 
+d="M1015 675v-250h-125v-62.5c0 -34.4375 -28.0625 -62.5 -62.5 -62.5h-750c-34.4375 0 -62.5 28.0625 -62.5 62.5v375c0 34.3125 28.0625 62.5 62.5 62.5h750c34.4375 0 62.5 -28.1875 62.5 -62.5v-62.5h125z" />
+    <glyph glyph-name="glyph72" unicode="&#x2192;" 
+d="M515.5 800l499.5 -500l-499.5 -500l-177.25 177.281l197.375 197.375h-520.625v250.844h520.625l-197.25 197.25z" />
+    <glyph glyph-name="glyph73" unicode="&#xe059;" 
+d="M1015 800v-750h-250v-250h-750v750h250v250h750zM640 -75v125h-375v250h-125v-375h500zM890 175v375h-500v-375h500z" />
+    <glyph glyph-name="glyph74" unicode="&#x2795;" 
+d="M1015 425v-250h-375v-375h-250v375h-375v250h375v375h250v-375h375z" />
+    <glyph glyph-name="glyph75" unicode="&#x2699;" 
+d="M1015 238.469l-149.438 -63.7188c-4.15625 -11.5938 -8.4375 -22.9688 -13.6875 -34.1875l61.5312 -149.156l-88.375 -88.375l-149.906 60.0625c-11.2188 -5.375 -22.9375 -10 -34.7812 -14.4062l-61.875 -148.688h-125l-63.2188 147.938
+c-12.4375 4.28125 -24.5312 8.78125 -36.375 14.4062l-147.594 -60.7812l-88.375 88.375l59.4375 148.188c-5.84375 12.0938 -10.7188 24.4375 -15.25 37.125l-147.094 61.2812v125l146.969 62.8438c4.53125 12.7188 9.28125 25.0312 15.1562 37.125l-60.5625 147.219
+l88.375 88.375l148.562 -59.6875c11.8438 5.625 23.8125 10.25 36.25 14.6562l61.7812 147.938h125l63.3438 -148.438c11.9688 -4.28125 23.5625 -8.90625 34.9062 -14.4062l148.812 61.2812l88.4062 -88.375l-60.3125 -150.156
+c5.375 -11.125 9.75 -22.4688 13.9062 -34.1875l149.406 -62.25v-125zM514.031 112.5c103.5 0 187.5 84 187.5 187.5s-84 187.5 -187.5 187.5s-187.5 -84 -187.5 -187.5s84 -187.5 187.5 -187.5z" />
+    <glyph glyph-name="glyph76" unicode="&#x26bf;" 
+d="M702.5 675c-103.406 0 -187.5 -84.0938 -187.5 -187.5c0 -9.75 1.15625 -20.5 3.65625 -33.9375l11.9688 -64.6875l-46.5 -46.5l-344.125 -344.125v-73.25h125v125h125v125h125v51.75l36.625 36.625l5.75 5.75l46.5 46.5l64.6875 -11.9688
+c13.4375 -2.4375 24.1875 -3.65625 33.9375 -3.65625c103.406 0 187.5 84.0938 187.5 187.5s-84.0938 187.5 -187.5 187.5zM702.5 800v0c172.594 0 312.5 -139.906 312.5 -312.5s-139.906 -312.5 -312.5 -312.5c-19.5312 0 -38.3438 2.3125 -56.75 5.75l-5.75 -5.75v-125
+h-125v-125h-125v-125h-375v250l380.75 380.75c-3.4375 18.4062 -5.75 37.2188 -5.75 56.75c0 172.594 139.906 312.5 312.5 312.5zM640.25 487.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5z" />
+    <glyph glyph-name="glyph77" unicode="&#xe04c;" 
+d="M1015 -200l-750 500l750 500v-1000zM15 -75v750h250v-750h-250z" />
+    <glyph glyph-name="glyph78" unicode="&#xe003;" 
+d="M515.095 675c-193.719 0 -313.344 -51.75 -355.562 -153.688c-36.25 -87.2812 -17.4688 -195.938 11.8438 -276.25c70.6875 48.8438 134.281 54.9375 281.25 54.9375h414.062c12.2188 37.3438 23.3125 83 23.3125 125c0 120.969 -28.9375 250 -374.906 250zM515.095 800
+v0c250 0 499.906 -62.5 499.906 -375c0 -125 -64.4688 -250 -64.4688 -250h-497.938c-187.375 0 -187.375 0 -312.375 -125c-124.875 125 -324.219 750 374.875 750z" />
+    <glyph glyph-name="glyph79" unicode="&#xe05a;" 
+d="M1015 675v-125h-125v-562.5c0 -103.5 -84 -187.5 -187.5 -187.5h-375c-103.5 0 -187.5 84 -187.5 187.5v562.5h-125v125h125h125c0 69.0938 55.9062 125 125 125h250c69.0938 0 125 -55.9062 125 -125h125h125zM765 -12.5v562.5h-500v-562.5
+c0 -34.4375 28.0625 -62.5 62.5 -62.5h375c34.4375 0 62.5 28.0625 62.5 62.5zM327.5 112.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM327.5 362.5c0 41.667 20.833 62.5 62.5 62.5
+s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM577.5 112.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM577.5 362.5c0 41.667 20.833 62.5 62.5 62.5
+s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5z" />
+    <glyph glyph-name="glyph80" unicode="&#xe027;" 
+d="M265 50v250l125 125l125 -125l250 250l250 -250v-250h-750zM140 -75h875l-125 -125h-750h-125v125v750l125 125v-875z" />
+    <glyph glyph-name="glyph81" unicode="&#xe007;" 
+d="M826.781 418.344c101.312 -19.5938 188.219 -90.4375 188.219 -243.344c0 -179.688 -125 -250 -249.875 -250h-125.125c-69.0938 0 -125 -55.9062 -125 -125c-138.062 0 -249.875 111.938 -249.875 250v125c0 22.4688 2.1875 43.1562 6.21875 62.5h118.656
+c3.90625 0 7.5625 -1.59375 11.2188 -2.25c-6.8125 -16.0625 -11.2188 -35.5938 -11.2188 -60.25v-125c0 -40.7812 19.7812 -77.1562 50.0625 -100.094c45.625 60.7812 118.531 100.094 199.938 100.094h125.125c124.875 0 124.875 94 124.875 125
+c0 33.4375 -0.71875 106.812 -90.8125 121.812c17.0938 37.3125 26.75 78.1875 27.5938 121.531zM515 675h-249.875c-125.125 0 -125.125 -87.6562 -125.125 -125c0 -30.9375 0 -125 125.125 -125h124.875c81.5312 0 154.312 -39.375 199.938 -100.031
+c30.2812 22.8125 50.0625 59.125 50.0625 100.031v125c0 108.75 -78.375 125 -125 125zM515 800v0c125 0 250.125 -77.4688 250.125 -250v-125c0 -138.062 -112.062 -250 -250.125 -250c0 68.9688 -55.9062 125 -125 125h-124.875c-125.125 0 -250.125 70.3125 -250.125 250
+c0 183.594 125 250 250.125 250h249.875z" />
+    <glyph glyph-name="glyph82" unicode="&#xe064;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM515.969 177.062c68.625 0 124.031 55.4375 124.031 123.906c0 68.625 -55.4062 124.031 -124.031 124.031
+c-68.375 0 -123.906 -55.4375 -123.906 -124.031c0 -68.5 55.5312 -123.906 123.906 -123.906z" />
+    <glyph glyph-name="glyph83" unicode="&#x275e;" 
+d="M15 800h375v-375c0 -206.781 -168.219 -375 -375 -375v125c137.938 0 250 112.188 250 250h-250v375zM640 800h375v-375c0 -206.781 -168.219 -375 -375 -375v125c137.938 0 250 112.188 250 250h-250v375z" />
+    <glyph glyph-name="glyph84" unicode="&#xe01b;" 
+d="M514.875 133.375h0.125c0 -184.094 -149.281 -333.375 -333.375 -333.375c-60.5312 0 -117.188 16.125 -166.125 44.1875l-0.5 0.625c99.5 57.5938 166.5 165.156 166.625 288.438l-0.125 0.125c0 92.0312 74.5938 166.625 166.625 166.625
+s166.75 -74.5938 166.75 -166.625zM978.312 763.375c48.8438 -48.8125 48.8438 -127.938 0 -176.75l-356.25 -356.312c-29.2812 82.5 -94.2188 147.469 -176.75 176.875l356.25 356.188c48.8125 48.8125 127.938 48.8125 176.75 0z" />
+    <glyph glyph-name="glyph85" unicode="&#x2601;" 
+d="M765 675c137.812 0 250 -112.125 250 -250c0 -137.938 -112.188 -250 -250 -250h-500c-137.812 0 -250 112.062 -250 250c0 137.875 112.188 250 250 250c11.0938 0 22.0938 -1.65625 33.0938 -3.125c42.9688 76.0625 123.531 128.125 216.906 128.125
+c91.4375 0 172 -51.4375 215.562 -128.469c11.375 1.5625 22.5938 3.46875 34.4375 3.46875zM765 300c68.9688 0 125 56.1562 125 125c0 68.9062 -56.0312 125 -125 125s-125 -56.0938 -125 -125h-125c0 81.4062 39.6875 153.125 99.9688 198.781
+c-22.9375 31.1875 -59.5625 51.2188 -99.9688 51.2188c-68.9688 0 -125 -56.0938 -125 -125c0 -15.0625 2.5625 -29.7812 7.8125 -43.6562l-117.062 -43.8125c-10.25 27.3438 -15.375 56.2812 -15.625 85.75c-64.4688 -1.875 -125.125 -54.75 -125.125 -123.281
+c0 -68.8438 56.0312 -125 125 -125h500z" />
+    <glyph glyph-name="glyph86" unicode="&#xe025;" 
+d="M515 800c276.125 0 500 -370.125 500 -370.125s-223.875 -379.875 -500 -379.875s-500 379.875 -500 379.875s223.875 370.125 500 370.125zM515 175c138.062 0 250 111.938 250 250s-111.938 250 -250 250s-250 -111.938 -250 -250s111.938 -250 250 -250zM390 424.5
+c0 83.333 41.667 125 125 125s125 -41.667 125 -125s-41.667 -125 -125 -125s-125 41.667 -125 125z" />
+    <glyph glyph-name="glyph87" unicode="&#xe048;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM327.5 50l500.25 250l-500.25 250v-500z" />
+    <glyph glyph-name="glyph88" unicode="&#xe041;" 
+d="M1015 174.961h-750.234v-125.008l-249.766 187.512l249.766 187.512v-125.008h750.234v-125.008zM15 549.984v125.008h749.984v125.008l250.016 -187.512l-250.016 -187.512v125.008h-749.984z" />
+    <glyph glyph-name="glyph89" unicode="&#xe006;" 
+d="M978.345 763.376c48.8428 -48.8105 48.8428 -127.934 0 -176.744l-472.642 -472.798c-10.9678 41.249 -31.499 80.5596 -63.7168 112.777c-31.7178 31.749 -70.7783 52.9678 -113.027 63.9668l472.642 472.798c48.8105 48.8115 127.934 48.8115 176.744 0z
+M353.614 -38.5049l-338.614 -161.495l161.87 338.239c48.8105 48.8115 127.934 48.8115 176.744 0c48.8115 -48.8105 48.8115 -127.934 0 -176.744z" />
+    <glyph glyph-name="glyph90" unicode="&#xe070;" 
+d="M515 175c0 83.333 41.667 125 125 125s125 -41.667 125 -125s-41.667 -125 -125 -125s-125 41.667 -125 125zM890 550c34.667 0 64.167 -12.167 88.5 -36.5s36.5 -53.833 36.5 -88.5v-625h-1000v625c0 34.667 12.167 64.167 36.5 88.5s53.833 36.5 88.5 36.5h250.125
+l106.625 206.719c4.08496 12.7148 11.5781 23.1064 22.4785 31.1768c10.9023 8.06934 23.2432 12.1045 37.0215 12.1045h171.625c13.6172 0 25.8428 -3.94238 36.6758 -11.8262c10.834 -7.88477 18.3896 -18.0781 22.668 -30.5801zM202.5 300c17 0 31.667 6.16699 44 18.5
+s18.5 27 18.5 44s-6.16699 31.667 -18.5 44s-27 18.5 -44 18.5s-31.667 -6.16699 -44 -18.5c-12.334 -12.333 -18.5 -27 -18.5 -44s6.16699 -31.667 18.5 -44s27 -18.5 44 -18.5zM640 -75c69.333 0 128.333 24.333 177 73s73 107.667 73 177s-24.333 128.333 -73 177
+s-107.667 73 -177 73s-128.333 -24.333 -177 -73s-73 -107.667 -73 -177s24.333 -128.333 73 -177s107.667 -73 177 -73z" />
+    <glyph glyph-name="glyph91" unicode="&#xe03a;" 
+d="M1015 300c0 -276.125 -223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500s500 -223.875 500 -500zM327.5 237.5h375v-62.5l125 125l-125 125v-62.5h-375v62.5l-125 -125l125 -125v62.5z" />
+    <glyph glyph-name="glyph92" unicode="&#x2935;" 
+d="M890 362.5v-62.5h125l-250 -250l-250 250h125v62.5c0 172.594 -139.906 312.5 -312.5 312.5s-312.5 -139.906 -312.5 -312.5c0 241.562 195.938 437.5 437.5 437.5s437.5 -195.938 437.5 -437.5z" />
+    <glyph glyph-name="glyph93" unicode="&#xe039;" 
+d="M327.5 612.5l62.5 -62.5h-187.5v-125l-187.5 187.5l187.5 187.5v-125h187.5zM700.562 612.5l-60.5625 62.5h187.5v125l187.5 -187.5l-187.5 -187.5v125h-187.5z" />
+    <glyph glyph-name="glyph94" unicode="&#xe026;" 
+d="M676.625 363.125l-252.625 427.719c29.5938 5.4375 59.8125 9.15625 91 9.15625c131.594 0 250.25 -51.8125 339.469 -134.875zM638.531 175l260.125 442.031c71.6562 -86.5 116.344 -196 116.344 -317.031c0 -43.4688 -7.3125 -84.8438 -17.7188 -125h-358.75z
+M530.625 487.5h-478.5c55.1562 136.062 168.875 240.406 309.5 286zM503.531 112.5h474.375c-54.6875 -134.875 -166.875 -238.531 -305.906 -284.906zM393.281 425l-260.906 -443.375c-72.2188 86.8125 -117.375 196.781 -117.375 318.375
+c0 43.4688 7.25 84.8438 17.6875 125h360.594zM356.188 238.719l253.781 -429.188c-30.75 -5.875 -62.4375 -9.53125 -94.9688 -9.53125c-130.969 0 -249.156 51.5 -338.312 133.781z" />
+    <glyph glyph-name="glyph95" unicode="&#xe031;" horiz-adv-x="887" 
+d="M729.286 228.571h142.856c0 -236.607 -191.964 -428.571 -428.571 -428.571s-428.571 191.964 -428.571 428.571s191.964 428.571 428.571 428.571h143.679v142.857l214.714 -214.286l-214.714 -215v142.036l-143.679 1.53613
+c-157.643 0 -285.714 -128.072 -285.714 -285.715s128.071 -285.714 285.714 -285.714s285.715 128.071 285.715 285.714z" />
+    <glyph glyph-name="glyph96" unicode="&#xe074;" 
+d="M369.25 183.188c9.15625 -10.25 18.6875 -19.7812 28.6875 -28.8125c-57.875 -57.875 -137.938 -137.969 -177.625 -177.75c-13.5625 13.6875 -15.375 15.375 -28.8125 28.8125c39.8125 39.6875 119.781 119.875 177.75 177.75zM644.156 800
+c204.469 0 370.844 -166.25 370.812 -370.75c0 -204.344 -166.375 -370.594 -370.844 -370.594c-48.8125 0 -95.2188 10 -137.938 27.3438l-285.875 -286l-205.312 205.438l285.75 285.781c-17.2188 42.7188 -27.2188 89.125 -27.2188 138.062
+c0 204.469 166.281 370.719 370.625 370.719zM644.156 183.531c135.625 0 245.969 110.375 245.969 245.719c0 135.5 -110.344 245.844 -245.969 245.844c-135.375 0 -245.719 -110.344 -245.719 -245.844c0 -135.375 110.344 -245.719 245.719 -245.719z" />
+    <glyph glyph-name="glyph97" unicode="&#xe06e;" horiz-adv-x="780" 
+d="M515 800c138.062 0 250 -111.938 250 -250v-500c0 -138.062 -111.938 -250 -250 -250h-250c-138.062 0 -250 111.938 -250 250v500c0 138.062 111.938 250 250 250h250zM390 -139.438c35.5938 0 64.4375 28.8125 64.4375 64.4375s-28.875 64.4375 -64.4375 64.4375
+s-64.4375 -28.8125 -64.4375 -64.4375s28.8438 -64.4375 64.4375 -64.4375zM640 50v500c0 68.8438 -56.1562 125 -125 125h-250c-68.9062 0 -125 -56.1562 -125 -125v-500h500z" />
+    <glyph glyph-name="glyph98" unicode="&#xe046;" horiz-adv-x="905" 
+d="M640 800h125v-121.094c0 -83.5 -32.4688 -161.875 -91.5625 -220.938l-353.625 -351.562c-32.0938 -32.25 -50.1875 -73.9688 -53.4688 -118.906h123.656l-187.5 -187.5l-187.5 187.5h126.469c3.40625 78.25 34.1875 151.5 90.0938 207.281l353.625 351.688
+c35.2812 35.2812 54.8125 82.375 54.8125 132.438v121.094zM763.531 -12.5h126.469l-187.5 -187.5l-187.5 187.5h123.656c-3.3125 44.9375 -21.375 86.8125 -53.5938 118.906l-70.0625 69.6875l88.625 88.1562l69.6875 -69.2188
+c55.9062 -56.0312 86.8125 -129.281 90.2188 -207.531z" />
+    <glyph glyph-name="glyph99" unicode="&#xe06b;" 
+d="M15 -200v625h1000v-625h-1000zM577.5 550v250h312.5l125 -250h-437.5zM452.5 800v-250h-437.5l125 250h312.5z" />
+    <glyph glyph-name="glyph100" unicode="&#xe00a;" horiz-adv-x="905" 
+d="M765 -200v1000h125v-1000h-125zM515 -200v750h125v-750h-125zM265 -200v500h125v-500h-125zM15 -200v250h125v-250h-125z" />
+    <glyph glyph-name="glyph101" unicode="&#x2764;" 
+d="M737.281 675c-62.9688 0 -120.469 -67.625 -120.469 -67.625l-101.812 -108.156l-101.812 108.156s-57.4688 67.625 -120.469 67.625c-76.7812 0 -140.375 -56.875 -151.125 -130.875l373.531 -428.688l373.281 428.688c-10.75 74 -74.3438 130.875 -151.125 130.875z
+M737.281 800v0c153.219 0 277.719 -124.75 277.719 -277.844v-22.9375l-499.75 -574.219l-500.25 574.219v22.9375c0 153.094 124.5 277.844 277.719 277.844c93.25 0 171.875 -49.3125 222.281 -120.125c50.4062 70.8125 129.031 120.125 222.281 120.125z" />
+    <glyph glyph-name="glyph102" unicode="&#xe072;" 
+d="M265 175c-138.062 0 -250 111.938 -250 250s111.938 250 250 250v-500zM390 675l250 125v-750l-250 125v500zM765.125 175.125v125c10.375 0 20.875 1.34375 31.375 3.90625c54.8125 13.9062 93.5 63.7188 93.5 120.969s-38.6875 106.938 -94.125 120.969
+c-9.875 2.4375 -20.375 3.78125 -30.75 3.78125v125c20.5 0 41 -2.5625 61.1562 -7.4375c111.219 -28.3125 188.719 -127.812 188.719 -242.312c0 -114.625 -77.5 -214.219 -188.344 -242.312c-20.75 -5 -41.2812 -7.5625 -61.5312 -7.5625v0z" />
+    <glyph glyph-name="glyph103" unicode="&#x2714;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM762.312 141.062l-158.938 158.938l158.938 158.938l-88.375 88.375l-158.938 -158.938l-158.938 158.938l-88.375 -88.375l158.938 -158.938
+l-158.938 -158.938l88.375 -88.375l158.938 158.938l158.938 -158.938z" />
+    <glyph glyph-name="glyph104" unicode="&#xe077;" 
+d="M996.648 781.688c24.4062 -24.4062 24.4062 -63.9688 -0.03125 -88.375l-127.562 -127.312c80.3438 -121.219 67.1562 -286.375 -39.7812 -393.188l-187.125 -187.5c-61.0312 -61.0312 -140.969 -91.5625 -220.938 -91.5625c-60.0625 0 -120.25 17.2188 -172.375 51.875
+l-127.188 -127.312c-12.2188 -12.2188 -28.1875 -18.3125 -44.1875 -18.3125s-31.9688 6.09375 -44.1562 18.3125c-24.4062 24.4062 -24.4062 63.9688 0 88.375l127.188 127.312c-80.3438 121.219 -67.1562 286.375 39.7812 393.188l187.5 187.5
+c61.0312 61.0312 140.969 91.5625 220.938 91.5625c60.0625 0 119.875 -17.2188 172 -51.875l127.188 127.312c24.4062 24.4062 64.3438 24.4062 88.75 0zM795.836 393.75c0 28.4375 -6.8125 55.5312 -18.8125 80.3125l-94.9688 -95.2188
+c-12.1875 -12.2188 -28.1875 -18.3125 -44.0625 -18.3125c-16 0 -31.9688 6.09375 -44.1875 18.3125c-24.4062 24.4062 -24.4062 63.9688 0 88.375l95.0938 95.2188c-24.875 11.8438 -51.875 18.8125 -80.1875 18.8125c-50.0625 0 -97.1562 -19.5312 -132.562 -54.9375
+l-187.5 -187.5c-35.4062 -35.4062 -54.9375 -82.5 -54.9375 -132.562c0 -28.4375 6.84375 -55.5312 18.8125 -80.3125l93.25 93.25c24.4062 24.4062 63.9688 24.4062 88.375 0s24.4062 -63.9688 0 -88.375l-93.25 -93.25c24.9062 -11.8438 52 -18.8125 80.3125 -18.8125
+c50.0625 0 97.1562 19.5312 132.562 54.9375l187.125 187.5c35.4062 35.4062 54.9375 82.5 54.9375 132.562z" />
+    <glyph glyph-name="glyph105" unicode="&#x263e;" 
+d="M199.219 514.125c-37.7188 -59.7188 -59.2188 -129.719 -59.2188 -203.062c0 -212.969 173.094 -386.062 386 -386.031c73.375 0 143.438 21.4688 203.125 59.3125c-279.906 26.375 -503.406 249.812 -529.906 529.781zM385.125 800v0
+c-39.3125 -68.0625 -63.5 -145.938 -63.4688 -230.156c0 -255.719 207.25 -463.188 463.125 -463.188c84.2188 0 162.094 24.1562 230.219 63.4688c-61.5 -213.156 -255.969 -370.125 -489 -370.125c-282.25 0 -511 228.75 -511 511.062
+c0 233.031 156.875 427.406 370.125 488.938z" />
+    <glyph glyph-name="glyph106" unicode="&#xe01e;" 
+d="M941.719 372.375l-18.4375 -18.4375l89.9688 -89.9062l-88.25 -88.125l-89.8438 89.9062l-465.656 -465.812h-231.688l-122.812 124.75v226.5l467.312 467.594l-93.0312 92.9688l88.125 88.1875l92.9375 -93.0312l18.4375 18.4375
+c48.6875 48.7188 112.656 73.125 176.438 73.125c63.8438 0 127.781 -24.4062 176.5 -73.125c97.5312 -97.4062 97.5312 -255.562 0 -353.031zM337.5 -55.2188l409.406 409.406l-176.344 176.219l-403.719 -403.844c43.4688 -1.46875 86.6562 -17.7188 119.75 -50.9062
+c36 -36.0312 52.375 -83.5 50.9062 -130.875z" />
+    <glyph glyph-name="glyph107" unicode="&#xe036;" horiz-adv-x="905" 
+d="M761.344 609.812c83 -82.75 128.656 -192.75 128.656 -309.812c0 -241.219 -196.281 -437.5 -437.5 -437.5v-62.5l-125 125l125 125v-62.5c172.375 0 312.5 140.125 312.5 312.5c0 83.625 -32.7188 162.219 -91.7812 221.312zM577.5 675l-125 -125v62.5
+c-172.375 0 -312.5 -140.125 -312.5 -312.5c0 -83.625 32.7188 -162.219 91.7812 -221.312l-88.125 -88.5c-83 82.75 -128.656 192.75 -128.656 309.812c0 241.219 196.281 437.5 437.5 437.5v62.5z" />
+    <glyph glyph-name="glyph108" unicode="&#xe02c;" 
+d="M15 -75c0 83.3545 41.6768 125.031 125.031 125.031s125.031 -41.6768 125.031 -125.031s-41.6768 -125.031 -125.031 -125.031s-125.031 41.6768 -125.031 125.031zM203.219 174.875v125.125c86.1514 0 159.783 -30.5771 220.895 -91.7305
+c61.1123 -61.1543 91.668 -134.827 91.668 -221.02h-125.062c0 51.7588 -18.3223 95.9697 -54.9678 132.632s-80.8223 54.9932 -132.532 54.9932zM202.375 425v125c76.1846 0 149.002 -14.8789 218.452 -44.6367c69.4512 -29.7578 129.301 -69.7666 179.549 -120.026
+c50.249 -50.2607 90.249 -110.123 119.998 -179.589c29.751 -69.4658 44.626 -142.298 44.626 -218.498h-125c0 59.2617 -11.5752 115.91 -34.7256 169.946c-23.1504 54.0371 -54.2705 100.604 -93.3623 139.701c-39.0908 39.0977 -85.6445 70.2227 -139.662 93.374
+c-54.0166 23.1523 -110.642 34.7285 -169.875 34.7285zM202.375 675v125c110.043 0 215.219 -21.4854 315.528 -64.4551s186.752 -100.746 259.328 -173.328c72.5762 -72.583 130.349 -159.038 173.316 -259.367s64.4521 -205.528 64.4521 -315.6h-125
+c0 93.1348 -18.1807 182.15 -54.543 267.049s-85.251 158.058 -146.665 219.479c-61.415 61.4199 -134.562 110.312 -219.439 146.677c-84.8789 36.3643 -173.871 54.5459 -266.978 54.5459z" />
+    <glyph glyph-name="glyph109" unicode="&#x2139;" horiz-adv-x="530" 
+d="M77.5 300h-62.5v125h375l0.25 -437.5c0 -34.5312 28.0625 -62.5 62.5 -62.5h62.25v-125h-500v125h62.5c34.5312 0 62.5 27.9688 62.5 62.5v250c0 34.5312 -27.9688 62.5 -62.5 62.5zM140 675c0 83.333 41.667 125 125 125s125 -41.667 125 -125s-41.667 -125 -125 -125
+s-125 41.667 -125 125z" />
+    <glyph glyph-name="glyph110" unicode="&#xe02a;" 
+d="M578.219 363.281v180.906c88.625 -22.875 157.969 -92.2812 180.906 -180.906h-180.906zM578.219 236.719h180.906c-22.9375 -88.5625 -92.2812 -158.031 -180.906 -180.844v180.844zM451.656 363.281h-180.906c22.8125 88.625 92.2812 158.031 180.906 180.906v-180.906
+zM451.656 236.719v-180.844c-88.625 22.8125 -158.094 92.2812 -180.906 180.844h180.906zM578.219 673.344v126.656c228.531 -28.875 407.844 -208.25 436.781 -436.719h-126.719c-26.875 158.562 -151.5 283.219 -310.062 310.062zM141.594 363.281h-126.594
+c28.8125 228.469 208.25 407.844 436.656 436.719v-126.656c-158.594 -26.8438 -283.219 -151.469 -310.062 -310.062zM451.656 -73.2812v-126.719c-228.406 28.8125 -407.844 208.25 -436.656 436.719h126.594c26.875 -158.625 151.5 -283.156 310.062 -310z
+M888.281 236.719h126.719c-28.9375 -228.469 -208.25 -407.906 -436.781 -436.719v126.719c158.562 26.8438 283.219 151.375 310.062 310z" />
+    <glyph glyph-name="glyph111" unicode="&#xe057;" 
+d="M1015 -75l-125 -125l-437.5 437.625l-187.5 -187.5l-250 749.875l750 -250l-187.5 -187.5z" />
+    <glyph glyph-name="glyph112" unicode="&#x2796;" 
+d="M15 550v250h1000v-250h-1000z" />
+    <glyph glyph-name="glyph113" unicode="&#xe00b;" 
+d="M389.625 424.031v125.969h249.75v-125.969h-249.75zM889.156 548.531l124.875 -125h-124.875v-623.531h-749.281c-68.9688 0 -124.875 55.9062 -124.875 125v750c0 69.0938 55.9062 125 124.875 125h749.281v-125.5h125.844zM764.281 -75h-0.03125v750h-499.5v-750
+h499.531z" />
+    <glyph glyph-name="glyph114" unicode="&#xe061;" 
+d="M952.5 300c34.5312 0 62.5 -27.9688 62.5 -62.5v-250c0 -34.5312 -27.9688 -62.5 -62.5 -62.5h-187.5v375v125c0 137.938 -112.062 250 -250 250s-250 -112.062 -250 -250v-125v-375h-187.5c-34.5312 0 -62.5 27.9688 -62.5 62.5v250c0 34.5312 27.9688 62.5 62.5 62.5
+h62.5v125c0 207.031 167.969 375 375 375s375 -167.969 375 -375v-125h62.5z" />
+    <glyph glyph-name="numbersign" unicode="#" horiz-adv-x="905" 
+d="M890 425h-172l-31.125 -250h203.125v-125h-218.75l-31.25 -250h-125.062l31.25 250h-249.938l-31.25 -250h-125l31.25 250h-156.25v125h171.875l31.1875 250h-203.062v125h218.688l31.25 250h125l-31.25 -250h249.938l31.25 250h125l-31.25 -250h156.375v-125z
+M561.812 175l31.1875 250h-249.938l-31.1875 -250h249.938z" />
+    <glyph glyph-name="glyph116" unicode="&#xe013;" 
+d="M515 -200c-276.125 0 -500 223.875 -500 499.938c0 276.188 223.875 500.062 500 500.062s500 -223.875 500 -500.062c0 -276.062 -223.875 -499.938 -500 -499.938zM514.031 550l-249.031 -250.062l249.031 -248.969v186.531h250.969v124.938h-250.969v187.562z" />
+    <glyph glyph-name="glyph117" unicode="&#xe017;" 
+d="M515 -200c-276.125 0 -500 223.875 -500 500s223.875 500 500 500s500 -223.875 500 -500s-223.875 -500 -500 -500zM327.5 425l-125 -125l125 -125v62.5h500v125h-500v62.5z" />
+    <glyph glyph-name="glyph118" unicode="&#xe050;" 
+d="M783.312 -56.6875l-143.312 -143.312v375h375l-143.312 -143.312l143.312 -143.312l-88.375 -88.375zM15 425l143.312 143.312l-141.844 141.969l88.375 88.375l141.844 -141.969l143.312 143.312v-375h-375zM15 -111.625l143.312 143.312l-143.312 143.312h375v-375
+l-143.312 143.312l-143.312 -143.312zM640 425v375l143.312 -143.312l142.094 141.969l88.375 -88.375l-142.094 -141.969l143.312 -143.312h-375z" />
+    <glyph glyph-name="glyph119" unicode="&#xe02e;" 
+d="M889.938 50c69.0938 0 125 -56.0312 125 -125s-55.9062 -125 -125 -125s-125 56.0312 -125 125c0 15.75 3.65625 30.5312 9.03125 44.4375l-189.281 135.125c-33.9062 -33.6875 -80.6875 -54.5625 -132.188 -54.5625c-103.5 0 -187.5 84 -187.5 187.5
+c0 9.75 1.46875 19.1562 2.9375 28.5625l-149.656 49.9375c-11 -9.65625 -25.0312 -16 -40.7812 -16c-34.5312 0 -62.5 27.9688 -62.5 62.5s27.9688 62.5 62.5 62.5c29.9062 0 53.7188 -21.375 59.8125 -49.4375l150.5 -50.2812
+c31.625 59.0625 93.1562 99.7188 164.688 99.7188c40.5312 0 77.625 -13.0625 108.406 -35.0312l222.844 223.031c-10.75 18.5312 -18.8125 38.9375 -18.8125 62c0 69.0938 55.9062 125 125 125s125 -55.9062 125 -125s-55.9062 -125 -125 -125
+c-22.9375 0 -43.4688 7.9375 -62 18.8125l-222.844 -223.031c21.7188 -30.5312 34.9062 -67.875 34.9062 -108.281c0 -29.4062 -7.3125 -56.75 -19.4062 -81.6562l190.5 -136c21.7188 17.9375 48.5625 30.1562 78.8438 30.1562z" />
+    <glyph glyph-name="glyph120" unicode="&#xe04f;" 
+d="M934.188 -30.8125l80.8125 80.8125v-250h-250l80.8125 80.8125l-205.812 205.812l88.375 88.375zM95.8125 630.812l-80.8125 -80.8125v250h250l-80.8125 -80.8125l205.812 -205.812l-88.375 -88.375zM845.812 719.188l-80.8125 80.8125h250v-250l-80.8125 80.8125
+l-205.812 -205.812l-88.375 88.375zM390 86.625l-205.812 -205.812l80.8125 -80.8125h-250v250l80.8125 -80.8125l205.812 205.812z" />
+    <glyph glyph-name="at" unicode="@" 
+d="M947.625 50h-182.625c-40.7812 0 -75.4375 21 -98.1562 51.2812c-42.2188 -32.4688 -94.4688 -52.625 -151.844 -52.625c-138.062 0 -250 111.938 -250 250s111.938 250 250 250h250v-373.656h101.188l1 -1.46875c14.2812 39.5625 22.8125 82.0312 22.8125 126.469
+c0 206.781 -168.219 375 -375 375s-375 -168.219 -375 -375s168.219 -375 375 -375c103.406 0 197.156 42 265 110l88.5 -88.5c-90.4375 -90.5938 -215.438 -146.5 -353.5 -146.5c-276.125 0 -500 223.875 -500 500s223.875 500 500 500s500 -223.875 500 -500
+c0 -91.3125 -24.5312 -176.406 -67.375 -250zM640 362.25v61.4062h-125c-68.8438 0 -125 -56.0312 -125 -125s56.1562 -125 125 -125s125 56.0312 125 125v63.5938z" />
+    <glyph glyph-name="glyph122" unicode="&#xe05e;" 
+d="M764.875 425c125.125 0 250.125 -66.4062 250.125 -250c0 -179.688 -125 -250 -250.125 -250h-124.875c-68.9688 0 -125 -55.9062 -125 -125c-138.062 0 -250.125 111.938 -250.125 250v125c0 49.8125 11.375 90.625 29.0625 125h-29.0625
+c-124.875 0 -249.875 70.3125 -249.875 250c0 183.594 125 250 249.875 250h250.125c125 0 249.875 -77.4062 249.875 -250v-125zM264.875 425h125.125c81.5312 0 154.062 -39.3125 199.719 -100.031c30.625 22.8125 50.2812 59.1875 50.2812 100.031v125
+c0 108.75 -78.25 125 -125 125h-250.125c-124.875 0 -124.875 -87.5312 -124.875 -125c0 -30.875 0 -125 124.875 -125zM764.875 50c125.125 0 125.125 94.0625 125.125 125c0 37.4062 0 125 -125.125 125l-34.5312 0.0625l0.125 0.0625
+c-43.3438 -74.4688 -123.062 -125.125 -215.469 -125.125c0 48.2188 -28.1875 88.875 -68.25 109.75c-28.3125 -17.8438 -56.75 -52.5625 -56.75 -109.75v-125c0 -40.7812 19.6562 -77.1562 50.0625 -99.9688c45.625 60.6562 118.281 99.9688 199.938 99.9688h124.875z
+M756.219 362.688c-0.84375 -2.875 -1.84375 -5.6875 -2.6875 -8.46875c0.84375 2.78125 1.9375 5.59375 2.6875 8.46875z" />
+    <glyph glyph-name="glyph123" unicode="&#xe03d;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM577.5 112.5v375h62.5l-125 125l-125 -125h62.5v-375h-62.5l125 -125l125 125h-62.5z" />
+    <glyph glyph-name="glyph124" unicode="&#xe03c;" horiz-adv-x="405" 
+d="M202.5 112.5l62.5 62.5v-187.5h125l-187.5 -187.5l-187.5 187.5h125v187.5zM202.5 485.562l-62.5 -60.5625v187.5h-125l187.5 187.5l187.5 -187.5h-125v-187.5z" />
+    <glyph glyph-name="glyph125" unicode="&#x2717;" 
+d="M388.156 -4.8125l-373.156 373.188l177.969 177.969l195.188 -195.188l448.875 448.844l177.969 -177.969z" />
+    <glyph glyph-name="glyph126" unicode="&#xe05f;" horiz-adv-x="780" 
+d="M390 300c-69.0938 0 -125 55.9688 -125 125v250c0 69.0312 55.9062 125 125 125s125 -55.9688 125 -125v-250c0 -69.0312 -55.9062 -125 -125 -125zM515 -75c69.0938 0 125 -55.9062 125 -125h-500c0 69.0938 55.9062 125 125 125h62.5v131.344
+c-177 29.9062 -312.5 183.281 -312.5 368.656v60.5625c0 34.5312 27.9688 62.5 62.5 62.5s62.5 -27.9688 62.5 -62.5v-60.5625c0 -137.875 112.062 -250 250 -250s250 112.125 250 250v60.5625c0 34.5312 27.9688 62.5 62.5 62.5s62.5 -27.9688 62.5 -62.5v-60.5625
+c0 -185.375 -135.5 -338.75 -312.5 -368.656v-131.344h62.5z" />
+    <glyph glyph-name="glyph127" unicode="&#xe06c;" 
+d="M1015 -200h-1000v875h125v-750h750v750h125v-875zM202.5 300v125h125v-125h-125zM452.5 299.75v125h125v-125h-125zM702.5 300v125h125v-125h-125zM202.5 50v125h125v-125h-125zM452.5 50v125h125v-125h-125zM702.5 50v125h125zM265 612.5v125
+c0 34.5312 27.9688 62.5 62.5 62.5s62.5 -27.9688 62.5 -62.5v-125c0 -34.5312 -27.9688 -62.5 -62.5 -62.5s-62.5 27.9688 -62.5 62.5zM640 612.5v125c0 34.5312 27.9688 62.5 62.5 62.5s62.5 -27.9688 62.5 -62.5v-125c0 -34.5312 -27.9688 -62.5 -62.5 -62.5
+s-62.5 27.9688 -62.5 62.5z" />
+    <glyph glyph-name="glyph128" unicode="&#xe069;" horiz-adv-x="905" 
+d="M140 800h750v-1000h-750c-69.0312 0 -125 55.9062 -125 125v750c0 69.0938 55.9688 125 125 125zM765 -75v750h-124.531v-252.188l-125.969 125.969l-125 -125v251.219h-124.5v-750h500z" />
+    <glyph glyph-name="glyph129" unicode="&#xe03f;" 
+d="M515 112.5l62.5 62.5v-187.5h125l-187.5 -187.5l-187.5 187.5h125v187.5zM515 485.562l-62.5 -60.5625v187.5h-125l187.5 187.5l187.5 -187.5h-125v-187.5zM327.5 300l62.5 -62.5h-187.5v-125l-187.5 187.5l187.5 187.5v-125h187.5zM700.562 300l-60.5625 62.5h187.5v125
+l187.5 -187.5l-187.5 -187.5v125h-187.5z" />
+    <glyph glyph-name="glyph130" unicode="&#xe040;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM702.5 175l125 125l-125 125v-62.5h-125v125h62.5l-125 125l-125 -125h62.5v-125h-125v62.5l-125 -125l125 -125v62.5h125v-125h-62.5l125 -125l125 125
+h-62.5v125h125v-62.5z" />
+    <glyph glyph-name="glyph131" unicode="&#xe022;" horiz-adv-x="530" 
+d="M265 175c44.0625 0 85.6875 9.03125 125 22.9375v-397.938l-125 125l-125 -125v397.938c39.3125 -13.9062 80.9375 -22.9375 125 -22.9375zM265 800c138.062 0 250 -111.938 250 -250s-111.938 -250 -250 -250s-250 111.938 -250 250s111.938 250 250 250zM265 425
+c69.0938 0 125 55.9688 125 125s-55.9062 125 -125 125s-125 -55.9688 -125 -125s55.9062 -125 125 -125z" />
+    <glyph glyph-name="glyph132" unicode="&#xe078;" 
+d="M1002.72 623.656c7.34375 -23.4375 12.2188 -47.8438 12.2188 -73.6562c0 -138 -111.812 -250 -250 -250c-38.3438 0 -74.4688 9.46875 -107 24.9688l-500.719 -500.562c-14.9062 -15.125 -35.7812 -24.4062 -58.8438 -24.4062c-46.1562 0 -83.375 37.3438 -83.375 83.25
+c0 23.1875 9.40625 43.9375 24.4062 59.0938l500.625 500.469c-15.625 32.6562 -25.0312 68.6875 -25.0312 107.188c0 138 111.938 250 249.938 250c25.1562 0 48.8438 -4.8125 71.7812 -11.7188l-155.031 -154.969v-166.625h164.062z" />
+    <glyph glyph-name="glyph133" unicode="&#xe047;" horiz-adv-x="780" 
+d="M15 800l750 -500l-750 -500v1000z" />
+    <glyph glyph-name="glyph134" unicode="&#x2605;" 
+d="M706.781 186.719l120.969 -386.719l-312.25 240l-313 -240l121.094 387.688l-308.594 237.312h375l125 375l125 -375h375z" />
+    <glyph glyph-name="glyph135" unicode="&#xe028;" 
+d="M444.398 193.812l-312.75 -312.5c-155.531 179.812 -155.531 445.188 0 625zM639.961 670.062c211.438 -31.4375 375 -210.156 374.969 -430.719c0 -242.812 -196.781 -439.344 -439.688 -439.344c-97.6562 0 -186.031 34.4375 -258.781 88.75l323.5 323.625v457.688z
+M514.961 299.5l-358.656 358.719c99.375 81.2188 222.438 133.344 358.656 141.781v-500.5z" />
+    <glyph glyph-name="glyph136" unicode="&#x26c6;" 
+d="M765 674.992c137.946 0 250.016 -112.132 250.016 -250.016s-112.069 -250.016 -250.016 -250.016h-499.969c-137.821 0 -250.016 112.132 -250.016 250.016s112.194 250.016 250.016 250.016c11.0947 0 22.0947 -1.65625 33.0957 -3.125
+c42.9717 76.0674 123.539 128.133 216.92 128.133c91.4434 0 172.073 -51.4404 215.514 -128.477c11.5 1.5625 22.7197 3.46875 34.4395 3.46875zM765 299.969c69.0977 0 125.008 56.0977 125.008 125.008s-55.9102 125.008 -125.008 125.008
+c-68.8477 0 -124.945 -56.0977 -124.945 -125.008h-125.008c0 81.4111 39.6904 153.135 99.9746 198.794c-22.9385 31.1895 -59.5654 51.2217 -99.9746 51.2217c-68.9727 0 -125.008 -56.0977 -125.008 -125.008c0 -15.0635 2.5625 -29.7832 7.8125 -43.6592
+l-117.069 -43.8154c-10.251 27.3457 -15.376 56.2852 -15.626 85.7559c-64.4727 -1.875 -125.133 -54.7539 -125.133 -123.289c0 -68.9102 56.0352 -125.008 125.008 -125.008h499.969zM117.554 -57.4912c-23.4395 -23.4385 -61.5352 -23.4385 -84.9746 0
+c-23.4385 23.4395 -23.4385 61.5352 0 84.9746c23.4395 23.4385 169.948 84.9736 169.948 84.9736s-61.5352 -146.509 -84.9736 -169.948zM367.694 -57.4912c-23.4395 -23.4385 -61.4102 -23.4385 -84.9746 0c-23.4385 23.4395 -23.4385 61.5352 0 84.9746
+c23.5645 23.4385 169.948 84.9736 169.948 84.9736s-61.4102 -146.509 -84.9736 -169.948zM617.585 -57.4912c-23.4395 -23.4385 -61.5352 -23.4385 -84.9746 0c-23.4385 23.4395 -23.4385 61.5352 0 84.9746c23.4395 23.4385 169.886 84.9736 169.886 84.9736
+s-61.4727 -146.509 -84.9111 -169.948z" />
+    <glyph glyph-name="glyph137" unicode="&#xe065;" 
+d="M515 674.938h500v-250.062v-125v-374.875c0 -69.0938 -56.0312 -125 -125 -125h-750c-69.0938 0 -125 55.9062 -125 125v394.406v105.469v375.125h500v-125.062zM390 675h-250v-250h750v124.938h-375h-125v125v0.0625zM890 -75v374.875v0.125h-750v-375h750z" />
+    <glyph glyph-name="glyph138" unicode="&#x2600;" 
+d="M515 425c-68.8438 0 -125 -56.0938 -125 -125s56.1562 -125 125 -125s125 56.0938 125 125s-56.1562 125 -125 125zM515 550v0c138.062 0 249.938 -111.938 249.938 -250s-111.875 -250 -249.938 -250s-250 111.938 -250 250s111.938 250 250 250zM452.5 737.5
+c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM140 612.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5zM77.5 362.562
+c34.5312 0 62.5 -27.9688 62.5 -62.5c0 -34.5938 -27.9688 -62.5 -62.5 -62.5s-62.5 27.9062 -62.5 62.5c0 34.5312 27.9688 62.5 62.5 62.5zM140 -12.5c0 41.667 20.833 62.5 62.5 62.5s62.5 -20.833 62.5 -62.5s-20.833 -62.5 -62.5 -62.5s-62.5 20.833 -62.5 62.5z
+M452.5 -137.5c0 34.6562 27.9688 62.5 62.5 62.5c34.6562 0 62.5 -27.8438 62.5 -62.5c0 -34.4375 -27.8438 -62.5 -62.5 -62.5c-34.5312 0 -62.5 28.0625 -62.5 62.5zM783.5 -56.6875c-24.4062 24.4062 -24.4062 63.9688 0 88.375
+c24.6562 24.4062 63.9688 24.4062 88.375 0s24.6562 -63.9688 0 -88.375c-24.4062 -24.4062 -63.9688 -24.4062 -88.375 0zM952.438 237.562c-34.4375 0 -62.5 27.9688 -62.5 62.4375c0 34.6562 27.8438 62.5 62.5 62.5c34.4375 0 62.5 -27.9062 62.5 -62.4375
+c0 -34.5938 -28.0625 -62.5625 -62.5 -62.5zM871.875 568.25c-24.4062 -24.3438 -63.9688 -24.3438 -88.375 0c-24.4062 24.5312 -24.4062 63.9688 0 88.4375c24.4062 24.3438 63.9688 24.4062 88.375 -0.0625c24.4062 -24.4062 24.4062 -64.0312 0 -88.375z" />
+    <glyph glyph-name="glyph139" unicode="&#xe062;" horiz-adv-x="780" 
+d="M390 300c207.031 0 375 -167.969 375 -375c0 -69.0938 -55.9062 -125 -125 -125h-500c-69.0938 0 -125 55.9062 -125 125c0 207.031 167.969 375 375 375zM202.5 612.5c0 125 62.5 187.5 187.5 187.5s187.5 -62.5 187.5 -187.5s-62.5 -187.5 -187.5 -187.5
+s-187.5 62.5 -187.5 187.5z" />
+    <glyph glyph-name="glyph140" unicode="&#xe05b;" 
+d="M1015 675v-250h-125v-62.5c0 -34.4375 -28.0625 -62.5 -62.5 -62.5h-750c-34.4375 0 -62.5 28.0625 -62.5 62.5v375c0 34.375 28.0625 62.5 62.5 62.5h750c34.4375 0 62.5 -28.125 62.5 -62.5v-62.5h125zM765 425v250h-250v-250h250z" />
+    <glyph glyph-name="glyph141" unicode="&#xe00c;" 
+d="M962.156 519.594c32.8125 -66.5312 52.8438 -140.438 52.8438 -219.594c0 -31.5 -3.65625 -62 -9.28125 -92.0625h-355.219zM672.969 323.75v448.281c104.969 -35.1562 195.312 -102.094 256.344 -192.062zM538.375 142.031h448.656
+c-35.1562 -105.094 -102.156 -195.562 -192.25 -256.469zM606.969 433l-313.219 313.219c66.9375 33.3438 141.406 53.7812 221.25 53.7812c31.5 0 62.0625 -3.71875 91.9688 -9.28125v-357.719zM423.031 164l311.344 -311.25
+c-66.5312 -32.7188 -140.281 -52.75 -219.375 -52.75c-31.5 0 -62.0625 3.65625 -91.9688 9.25v354.75zM356.969 275.844v-447.906c-104.844 35.1562 -195.125 102.062 -256.031 191.906zM24.2812 391.969v0.03125h355.531l-311.844 -311.844
+c-32.8125 66.5312 -52.9688 140.5 -52.9688 219.844c0 31.5 3.71875 62.0625 9.28125 91.9688zM233.625 713.094l255.094 -255.062h-445.75c34.9688 104.344 101.438 194.156 190.656 255.062z" />
+    <glyph glyph-name="glyph142" unicode="&#xe04b;" 
+d="M15 175l500 625l500 -625h-1000zM15 -200v250h998.656v-250h-998.656z" />
+    <glyph glyph-name="glyph143" unicode="&#xe016;" 
+d="M1015 300.062c0 -276.188 -223.875 -500.062 -500 -500.062s-500 223.875 -500 500.062c0 276.062 223.875 499.938 500 499.938s500 -223.875 500 -499.938zM265 299.094l250 -249.094l249.031 249.094h-186.531v250.906h-125v-250.906h-187.5z" />
+    <glyph glyph-name="paragraph" unicode="&#xb6;" horiz-adv-x="780" 
+d="M765 800v-125h-125v-875h-125v875h-125v-875h-125v500c-138.062 0 -250 111.938 -250 250s111.938 250 250 250h125h125h125h125z" />
+    <glyph glyph-name="glyph145" unicode="&#xe01a;" 
+d="M15 300c0 276.125 223.875 500 500 500s500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500zM640 487.5l-125 125l-125 -125h62.5v-500h125v500h62.5z" />
+    <glyph glyph-name="glyph146" unicode="&#xe015;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM515.969 50l249.031 250l-249.031 249.031v-186.531h-250.969v-125h250.969v-187.5z" />
+    <glyph glyph-name="glyph147" unicode="&#xe019;" 
+d="M515 800c276.125 0 500 -223.875 500 -500s-223.875 -500 -500 -500s-500 223.875 -500 500s223.875 500 500 500zM702.5 175l125 125l-125 125v-62.5h-500v-125h500v-62.5z" />
+    <glyph glyph-name="glyph148" unicode="&#xe02d;" 
+d="M1015 -200h-142.812c0 472.656 -384.531 857.125 -857.188 857.125v142.875c551.281 0 1000 -448.594 1000 -1000v0zM729.219 -200h-142.812c0 315.062 -256.469 571.406 -571.406 571.406v142.875c393.812 0 714.219 -320.469 714.219 -714.281v0zM443.594 -200
+h-142.938c0 157.594 -128.062 285.656 -285.656 285.656v142.938c236.312 0 428.594 -192.281 428.594 -428.594v0zM157.812 -200h-142.812v142.812c78.9688 0 142.812 -63.9688 142.812 -142.812z" />
+    <glyph glyph-name="glyph149" unicode="&#xe037;" 
+d="M161.594 -53.75c-94.5938 94.4688 -146.594 219.969 -146.594 353.75c0 275.625 224.25 500 500 500l62.5 -62.5l-62.5 -62.5c-206.781 0 -375 -168.219 -375 -375c0 -100.344 39.0625 -194.594 110 -265.375l-76.7812 -10l-11.625 -78.375v0zM515 -200l-62.5 62.25
+l62.5 62.75c206.781 0 375 168.219 375 375c0 100.094 -39.0625 194.344 -110.094 265.25l76.9062 10.125l11.4688 78.25c94.5938 -94.4688 146.719 -220.094 146.719 -353.625c0 -275.625 -224.375 -500 -500 -500v0z" />
+  </font>
+</defs></svg>
diff --git a/static/images/fonts/iconic_stroke.ttf b/static/images/fonts/iconic_stroke.ttf
new file mode 100644
index 0000000..8af00b5
Binary files /dev/null and b/static/images/fonts/iconic_stroke.ttf differ
diff --git a/static/images/fugue/application-dock-270-bw.png b/static/images/fugue/application-dock-270-bw.png
new file mode 100644
index 0000000..1d0ad1b
Binary files /dev/null and b/static/images/fugue/application-dock-270-bw.png differ
diff --git a/static/images/fugue/application-dock-270.png b/static/images/fugue/application-dock-270.png
new file mode 100755
index 0000000..f1115ad
Binary files /dev/null and b/static/images/fugue/application-dock-270.png differ
diff --git a/static/images/fugue/arrow-000-small-bw.png b/static/images/fugue/arrow-000-small-bw.png
new file mode 100644
index 0000000..1ab046d
Binary files /dev/null and b/static/images/fugue/arrow-000-small-bw.png differ
diff --git a/static/images/fugue/arrow-090.png b/static/images/fugue/arrow-090.png
new file mode 100755
index 0000000..46d2f42
Binary files /dev/null and b/static/images/fugue/arrow-090.png differ
diff --git a/static/images/fugue/arrow-circle.png b/static/images/fugue/arrow-circle.png
new file mode 100755
index 0000000..c932759
Binary files /dev/null and b/static/images/fugue/arrow-circle.png differ
diff --git a/static/images/fugue/arrow-resize-090-bw.png b/static/images/fugue/arrow-resize-090-bw.png
new file mode 100644
index 0000000..1bb4022
Binary files /dev/null and b/static/images/fugue/arrow-resize-090-bw.png differ
diff --git a/static/images/fugue/arrow-resize-090.png b/static/images/fugue/arrow-resize-090.png
new file mode 100755
index 0000000..dcfa5ab
Binary files /dev/null and b/static/images/fugue/arrow-resize-090.png differ
diff --git a/static/images/fugue/arrow-split-bw.png b/static/images/fugue/arrow-split-bw.png
new file mode 100644
index 0000000..95964d5
Binary files /dev/null and b/static/images/fugue/arrow-split-bw.png differ
diff --git a/static/images/fugue/arrow-split.png b/static/images/fugue/arrow-split.png
new file mode 100644
index 0000000..2ea906b
Binary files /dev/null and b/static/images/fugue/arrow-split.png differ
diff --git a/static/images/fugue/arrow-transition-270-bw.png b/static/images/fugue/arrow-transition-270-bw.png
new file mode 100644
index 0000000..f1994ae
Binary files /dev/null and b/static/images/fugue/arrow-transition-270-bw.png differ
diff --git a/static/images/fugue/arrow-transition-bw.png b/static/images/fugue/arrow-transition-bw.png
new file mode 100644
index 0000000..d846a36
Binary files /dev/null and b/static/images/fugue/arrow-transition-bw.png differ
diff --git a/static/images/fugue/asterisk-small-outline.png b/static/images/fugue/asterisk-small-outline.png
new file mode 100644
index 0000000..3d9fe75
Binary files /dev/null and b/static/images/fugue/asterisk-small-outline.png differ
diff --git a/static/images/fugue/asterisk-small-yellow.png b/static/images/fugue/asterisk-small-yellow.png
new file mode 100755
index 0000000..c252613
Binary files /dev/null and b/static/images/fugue/asterisk-small-yellow.png differ
diff --git a/static/images/fugue/asterisk-small.png b/static/images/fugue/asterisk-small.png
new file mode 100755
index 0000000..0e4313c
Binary files /dev/null and b/static/images/fugue/asterisk-small.png differ
diff --git a/static/images/fugue/block--plus-bw.png b/static/images/fugue/block--plus-bw.png
new file mode 100644
index 0000000..9e1926c
Binary files /dev/null and b/static/images/fugue/block--plus-bw.png differ
diff --git a/static/images/fugue/block--plus.png b/static/images/fugue/block--plus.png
new file mode 100755
index 0000000..2ab7975
Binary files /dev/null and b/static/images/fugue/block--plus.png differ
diff --git a/static/images/fugue/bookmarks-bw.png b/static/images/fugue/bookmarks-bw.png
new file mode 100644
index 0000000..65bd8ce
Binary files /dev/null and b/static/images/fugue/bookmarks-bw.png differ
diff --git a/static/images/fugue/bookmarks.png b/static/images/fugue/bookmarks.png
new file mode 100755
index 0000000..b06c712
Binary files /dev/null and b/static/images/fugue/bookmarks.png differ
diff --git a/static/images/fugue/bug.png b/static/images/fugue/bug.png
new file mode 100755
index 0000000..cb0ec4b
Binary files /dev/null and b/static/images/fugue/bug.png differ
diff --git a/static/images/fugue/chart.png b/static/images/fugue/chart.png
new file mode 100755
index 0000000..d54d64a
Binary files /dev/null and b/static/images/fugue/chart.png differ
diff --git a/static/images/fugue/chevron-expand-bw.png b/static/images/fugue/chevron-expand-bw.png
new file mode 100644
index 0000000..f11e1d6
Binary files /dev/null and b/static/images/fugue/chevron-expand-bw.png differ
diff --git a/static/images/fugue/chevron-expand.png b/static/images/fugue/chevron-expand.png
new file mode 100755
index 0000000..0416ee9
Binary files /dev/null and b/static/images/fugue/chevron-expand.png differ
diff --git a/static/images/fugue/chevron.png b/static/images/fugue/chevron.png
new file mode 100755
index 0000000..c01b7aa
Binary files /dev/null and b/static/images/fugue/chevron.png differ
diff --git a/static/images/fugue/control-270.png b/static/images/fugue/control-270.png
new file mode 100755
index 0000000..b24aa15
Binary files /dev/null and b/static/images/fugue/control-270.png differ
diff --git a/static/images/fugue/cross-button.png b/static/images/fugue/cross-button.png
new file mode 100755
index 0000000..70e8d23
Binary files /dev/null and b/static/images/fugue/cross-button.png differ
diff --git a/static/images/fugue/cross-circle-bw.png b/static/images/fugue/cross-circle-bw.png
new file mode 100644
index 0000000..715f2a5
Binary files /dev/null and b/static/images/fugue/cross-circle-bw.png differ
diff --git a/static/images/fugue/cross-circle.png b/static/images/fugue/cross-circle.png
new file mode 100755
index 0000000..d3b37af
Binary files /dev/null and b/static/images/fugue/cross-circle.png differ
diff --git a/static/images/fugue/cross-small-bw.png b/static/images/fugue/cross-small-bw.png
new file mode 100644
index 0000000..7683024
Binary files /dev/null and b/static/images/fugue/cross-small-bw.png differ
diff --git a/static/images/fugue/cross.png b/static/images/fugue/cross.png
new file mode 100755
index 0000000..d692156
Binary files /dev/null and b/static/images/fugue/cross.png differ
diff --git a/static/images/fugue/disk--arrow-bw.png b/static/images/fugue/disk--arrow-bw.png
new file mode 100644
index 0000000..b0da0aa
Binary files /dev/null and b/static/images/fugue/disk--arrow-bw.png differ
diff --git a/static/images/fugue/disk--arrow.png b/static/images/fugue/disk--arrow.png
new file mode 100755
index 0000000..37cbf99
Binary files /dev/null and b/static/images/fugue/disk--arrow.png differ
diff --git a/static/images/fugue/disk.png b/static/images/fugue/disk.png
new file mode 100755
index 0000000..9a89d64
Binary files /dev/null and b/static/images/fugue/disk.png differ
diff --git a/static/images/fugue/exclamation.png b/static/images/fugue/exclamation.png
new file mode 100755
index 0000000..21fd8f7
Binary files /dev/null and b/static/images/fugue/exclamation.png differ
diff --git a/static/images/fugue/external.png b/static/images/fugue/external.png
new file mode 100755
index 0000000..6c672bb
Binary files /dev/null and b/static/images/fugue/external.png differ
diff --git a/static/images/fugue/eye.png b/static/images/fugue/eye.png
new file mode 100755
index 0000000..c4d182a
Binary files /dev/null and b/static/images/fugue/eye.png differ
diff --git a/static/images/fugue/gear-bw.png b/static/images/fugue/gear-bw.png
new file mode 100644
index 0000000..8f37acd
Binary files /dev/null and b/static/images/fugue/gear-bw.png differ
diff --git a/static/images/fugue/gear.png b/static/images/fugue/gear.png
new file mode 100755
index 0000000..8151463
Binary files /dev/null and b/static/images/fugue/gear.png differ
diff --git a/static/images/fugue/globe-bw.png b/static/images/fugue/globe-bw.png
new file mode 100644
index 0000000..9a72e1d
Binary files /dev/null and b/static/images/fugue/globe-bw.png differ
diff --git a/static/images/fugue/globe.png b/static/images/fugue/globe.png
new file mode 100755
index 0000000..2cef329
Binary files /dev/null and b/static/images/fugue/globe.png differ
diff --git a/static/images/fugue/hammer-bw.png b/static/images/fugue/hammer-bw.png
new file mode 100644
index 0000000..2642087
Binary files /dev/null and b/static/images/fugue/hammer-bw.png differ
diff --git a/static/images/fugue/hammer.png b/static/images/fugue/hammer.png
new file mode 100755
index 0000000..970bdaa
Binary files /dev/null and b/static/images/fugue/hammer.png differ
diff --git a/static/images/fugue/information-white.png b/static/images/fugue/information-white.png
new file mode 100644
index 0000000..7005238
Binary files /dev/null and b/static/images/fugue/information-white.png differ
diff --git a/static/images/fugue/layer-transparent-bw.png b/static/images/fugue/layer-transparent-bw.png
new file mode 100644
index 0000000..5adcf45
Binary files /dev/null and b/static/images/fugue/layer-transparent-bw.png differ
diff --git a/static/images/fugue/layer-transparent.png b/static/images/fugue/layer-transparent.png
new file mode 100755
index 0000000..40cbb90
Binary files /dev/null and b/static/images/fugue/layer-transparent.png differ
diff --git a/static/images/fugue/layers-stack-bw.png b/static/images/fugue/layers-stack-bw.png
new file mode 100644
index 0000000..accfc3b
Binary files /dev/null and b/static/images/fugue/layers-stack-bw.png differ
diff --git a/static/images/fugue/layers-stack.png b/static/images/fugue/layers-stack.png
new file mode 100755
index 0000000..7267af8
Binary files /dev/null and b/static/images/fugue/layers-stack.png differ
diff --git a/static/images/fugue/magnifier-left.png b/static/images/fugue/magnifier-left.png
new file mode 100755
index 0000000..5389af4
Binary files /dev/null and b/static/images/fugue/magnifier-left.png differ
diff --git a/static/images/fugue/magnifier-zoom-out.png b/static/images/fugue/magnifier-zoom-out.png
new file mode 100755
index 0000000..4fa98f0
Binary files /dev/null and b/static/images/fugue/magnifier-zoom-out.png differ
diff --git a/static/images/fugue/magnifier-zoom.png b/static/images/fugue/magnifier-zoom.png
new file mode 100755
index 0000000..37f4ef9
Binary files /dev/null and b/static/images/fugue/magnifier-zoom.png differ
diff --git a/static/images/fugue/navigation.png b/static/images/fugue/navigation.png
new file mode 100644
index 0000000..d6c5261
Binary files /dev/null and b/static/images/fugue/navigation.png differ
diff --git a/static/images/fugue/pencil-small.png b/static/images/fugue/pencil-small.png
new file mode 100755
index 0000000..e6f2ca0
Binary files /dev/null and b/static/images/fugue/pencil-small.png differ
diff --git a/static/images/fugue/pencil.png b/static/images/fugue/pencil.png
new file mode 100755
index 0000000..57609d8
Binary files /dev/null and b/static/images/fugue/pencil.png differ
diff --git a/static/images/fugue/plus-button-bw.png b/static/images/fugue/plus-button-bw.png
new file mode 100644
index 0000000..80b2efb
Binary files /dev/null and b/static/images/fugue/plus-button-bw.png differ
diff --git a/static/images/fugue/plus-button.png b/static/images/fugue/plus-button.png
new file mode 100755
index 0000000..f6cced5
Binary files /dev/null and b/static/images/fugue/plus-button.png differ
diff --git a/static/images/fugue/plus-circle.png b/static/images/fugue/plus-circle.png
new file mode 100755
index 0000000..49746a1
Binary files /dev/null and b/static/images/fugue/plus-circle.png differ
diff --git a/static/images/fugue/sticky-note-text.png b/static/images/fugue/sticky-note-text.png
new file mode 100755
index 0000000..8c90097
Binary files /dev/null and b/static/images/fugue/sticky-note-text.png differ
diff --git a/static/images/fugue/tag--plus.png b/static/images/fugue/tag--plus.png
new file mode 100644
index 0000000..753f6c8
Binary files /dev/null and b/static/images/fugue/tag--plus.png differ
diff --git a/static/images/fugue/tag-label.png b/static/images/fugue/tag-label.png
new file mode 100755
index 0000000..7ec902f
Binary files /dev/null and b/static/images/fugue/tag-label.png differ
diff --git a/static/images/fugue/tags.png b/static/images/fugue/tags.png
new file mode 100644
index 0000000..b5f4d81
Binary files /dev/null and b/static/images/fugue/tags.png differ
diff --git a/static/images/fugue/toggle-bw.png b/static/images/fugue/toggle-bw.png
new file mode 100644
index 0000000..e646302
Binary files /dev/null and b/static/images/fugue/toggle-bw.png differ
diff --git a/static/images/fugue/toggle-expand-bw.png b/static/images/fugue/toggle-expand-bw.png
new file mode 100644
index 0000000..af78861
Binary files /dev/null and b/static/images/fugue/toggle-expand-bw.png differ
diff --git a/static/images/fugue/toggle-expand.png b/static/images/fugue/toggle-expand.png
new file mode 100755
index 0000000..874c3a2
Binary files /dev/null and b/static/images/fugue/toggle-expand.png differ
diff --git a/static/images/fugue/toggle.png b/static/images/fugue/toggle.png
new file mode 100755
index 0000000..e99f0c3
Binary files /dev/null and b/static/images/fugue/toggle.png differ
diff --git a/static/images/fugue/toolbox-bw.png b/static/images/fugue/toolbox-bw.png
new file mode 100755
index 0000000..c800f2c
Binary files /dev/null and b/static/images/fugue/toolbox-bw.png differ
diff --git a/static/images/fugue/toolbox.png b/static/images/fugue/toolbox.png
new file mode 100755
index 0000000..12f988e
Binary files /dev/null and b/static/images/fugue/toolbox.png differ
diff --git a/static/images/fugue/ui-slider-050-bw.png b/static/images/fugue/ui-slider-050-bw.png
new file mode 100644
index 0000000..5b52775
Binary files /dev/null and b/static/images/fugue/ui-slider-050-bw.png differ
diff --git a/static/images/fugue/ui-slider-050.png b/static/images/fugue/ui-slider-050.png
new file mode 100755
index 0000000..9878961
Binary files /dev/null and b/static/images/fugue/ui-slider-050.png differ
diff --git a/static/images/galaxyIcon_noText.png b/static/images/galaxyIcon_noText.png
new file mode 100644
index 0000000..1e5e609
Binary files /dev/null and b/static/images/galaxyIcon_noText.png differ
diff --git a/static/images/hatch-023858.png b/static/images/hatch-023858.png
new file mode 100644
index 0000000..e3941f5
Binary files /dev/null and b/static/images/hatch-023858.png differ
diff --git a/static/images/hatch-fade-023858.gif b/static/images/hatch-fade-023858.gif
new file mode 100644
index 0000000..88947c4
Binary files /dev/null and b/static/images/hatch-fade-023858.gif differ
diff --git a/static/images/histogram.png b/static/images/histogram.png
new file mode 100644
index 0000000..49103eb
Binary files /dev/null and b/static/images/histogram.png differ
diff --git a/static/images/histogram2.png b/static/images/histogram2.png
new file mode 100644
index 0000000..5921556
Binary files /dev/null and b/static/images/histogram2.png differ
diff --git a/static/images/history-buttons/delete_icon.png b/static/images/history-buttons/delete_icon.png
new file mode 100644
index 0000000..ce6842a
Binary files /dev/null and b/static/images/history-buttons/delete_icon.png differ
diff --git a/static/images/history-buttons/delete_icon_dark.png b/static/images/history-buttons/delete_icon_dark.png
new file mode 100644
index 0000000..54570fb
Binary files /dev/null and b/static/images/history-buttons/delete_icon_dark.png differ
diff --git a/static/images/history-buttons/delete_icon_grey.png b/static/images/history-buttons/delete_icon_grey.png
new file mode 100644
index 0000000..d7c4b80
Binary files /dev/null and b/static/images/history-buttons/delete_icon_grey.png differ
diff --git a/static/images/history-buttons/eye_icon.png b/static/images/history-buttons/eye_icon.png
new file mode 100644
index 0000000..213b920
Binary files /dev/null and b/static/images/history-buttons/eye_icon.png differ
diff --git a/static/images/history-buttons/eye_icon_dark.png b/static/images/history-buttons/eye_icon_dark.png
new file mode 100644
index 0000000..05ccb48
Binary files /dev/null and b/static/images/history-buttons/eye_icon_dark.png differ
diff --git a/static/images/history-buttons/eye_icon_grey.png b/static/images/history-buttons/eye_icon_grey.png
new file mode 100644
index 0000000..d4da0f9
Binary files /dev/null and b/static/images/history-buttons/eye_icon_grey.png differ
diff --git a/static/images/history-buttons/pencil_icon.png b/static/images/history-buttons/pencil_icon.png
new file mode 100644
index 0000000..4af0ca1
Binary files /dev/null and b/static/images/history-buttons/pencil_icon.png differ
diff --git a/static/images/history-buttons/pencil_icon_dark.png b/static/images/history-buttons/pencil_icon_dark.png
new file mode 100644
index 0000000..ffcd3db
Binary files /dev/null and b/static/images/history-buttons/pencil_icon_dark.png differ
diff --git a/static/images/history-buttons/pencil_icon_grey.png b/static/images/history-buttons/pencil_icon_grey.png
new file mode 100644
index 0000000..c5c990b
Binary files /dev/null and b/static/images/history-buttons/pencil_icon_grey.png differ
diff --git a/static/images/history-states/data_empty.png b/static/images/history-states/data_empty.png
new file mode 100644
index 0000000..04cdc33
Binary files /dev/null and b/static/images/history-states/data_empty.png differ
diff --git a/static/images/history-states/data_error.png b/static/images/history-states/data_error.png
new file mode 100644
index 0000000..04cdc33
Binary files /dev/null and b/static/images/history-states/data_error.png differ
diff --git a/static/images/history-states/data_ok.png b/static/images/history-states/data_ok.png
new file mode 100644
index 0000000..dee022c
Binary files /dev/null and b/static/images/history-states/data_ok.png differ
diff --git a/static/images/history-states/data_queued.png b/static/images/history-states/data_queued.png
new file mode 100644
index 0000000..7202be8
Binary files /dev/null and b/static/images/history-states/data_queued.png differ
diff --git a/static/images/history.gif b/static/images/history.gif
new file mode 100644
index 0000000..9a0aa37
Binary files /dev/null and b/static/images/history.gif differ
diff --git a/static/images/history_down_arrow.gif b/static/images/history_down_arrow.gif
new file mode 100644
index 0000000..ac38f13
Binary files /dev/null and b/static/images/history_down_arrow.gif differ
diff --git a/static/images/history_up_arrow.gif b/static/images/history_up_arrow.gif
new file mode 100644
index 0000000..f707256
Binary files /dev/null and b/static/images/history_up_arrow.gif differ
diff --git a/static/images/icon_error_lrg.gif b/static/images/icon_error_lrg.gif
new file mode 100644
index 0000000..fccffd1
Binary files /dev/null and b/static/images/icon_error_lrg.gif differ
diff --git a/static/images/icon_error_sml.gif b/static/images/icon_error_sml.gif
new file mode 100644
index 0000000..61132ef
Binary files /dev/null and b/static/images/icon_error_sml.gif differ
diff --git a/static/images/icon_info_lrg.gif b/static/images/icon_info_lrg.gif
new file mode 100644
index 0000000..b390fd4
Binary files /dev/null and b/static/images/icon_info_lrg.gif differ
diff --git a/static/images/icon_info_sml.gif b/static/images/icon_info_sml.gif
new file mode 100644
index 0000000..c6cb9ad
Binary files /dev/null and b/static/images/icon_info_sml.gif differ
diff --git a/static/images/icon_success_lrg.gif b/static/images/icon_success_lrg.gif
new file mode 100644
index 0000000..9af317d
Binary files /dev/null and b/static/images/icon_success_lrg.gif differ
diff --git a/static/images/icon_success_sml.gif b/static/images/icon_success_sml.gif
new file mode 100644
index 0000000..52e85a4
Binary files /dev/null and b/static/images/icon_success_sml.gif differ
diff --git a/static/images/icon_warning_lrg.gif b/static/images/icon_warning_lrg.gif
new file mode 100644
index 0000000..83359d8
Binary files /dev/null and b/static/images/icon_warning_lrg.gif differ
diff --git a/static/images/icon_warning_sml.gif b/static/images/icon_warning_sml.gif
new file mode 100644
index 0000000..873bbb5
Binary files /dev/null and b/static/images/icon_warning_sml.gif differ
diff --git a/static/images/jstree/32px.png b/static/images/jstree/32px.png
new file mode 100755
index 0000000..1532715
Binary files /dev/null and b/static/images/jstree/32px.png differ
diff --git a/static/images/jstree/40px.png b/static/images/jstree/40px.png
new file mode 100755
index 0000000..1959347
Binary files /dev/null and b/static/images/jstree/40px.png differ
diff --git a/static/images/jstree/throbber.gif b/static/images/jstree/throbber.gif
new file mode 100755
index 0000000..1b5b2fd
Binary files /dev/null and b/static/images/jstree/throbber.gif differ
diff --git a/static/images/kendall.png b/static/images/kendall.png
new file mode 100644
index 0000000..3084662
Binary files /dev/null and b/static/images/kendall.png differ
diff --git a/static/images/light_gray_grid.gif b/static/images/light_gray_grid.gif
new file mode 100644
index 0000000..3347192
Binary files /dev/null and b/static/images/light_gray_grid.gif differ
diff --git a/static/images/loading_large_white_bg.gif b/static/images/loading_large_white_bg.gif
new file mode 100644
index 0000000..3c2f7c0
Binary files /dev/null and b/static/images/loading_large_white_bg.gif differ
diff --git a/static/images/loading_small_white_bg.gif b/static/images/loading_small_white_bg.gif
new file mode 100644
index 0000000..d42f72c
Binary files /dev/null and b/static/images/loading_small_white_bg.gif differ
diff --git a/static/images/maf_icons/interval2maf.png b/static/images/maf_icons/interval2maf.png
new file mode 100644
index 0000000..55434a8
Binary files /dev/null and b/static/images/maf_icons/interval2maf.png differ
diff --git a/static/images/maf_icons/stitchMaf.png b/static/images/maf_icons/stitchMaf.png
new file mode 100644
index 0000000..e1988c2
Binary files /dev/null and b/static/images/maf_icons/stitchMaf.png differ
diff --git a/static/images/mag_glass.png b/static/images/mag_glass.png
new file mode 100644
index 0000000..61a9e33
Binary files /dev/null and b/static/images/mag_glass.png differ
diff --git a/static/images/mutation_visualization_example.png b/static/images/mutation_visualization_example.png
new file mode 100644
index 0000000..fcb4143
Binary files /dev/null and b/static/images/mutation_visualization_example.png differ
diff --git a/static/images/openid-16x16.gif b/static/images/openid-16x16.gif
new file mode 100644
index 0000000..e2d8377
Binary files /dev/null and b/static/images/openid-16x16.gif differ
diff --git a/static/images/overview_arrows.png b/static/images/overview_arrows.png
new file mode 100644
index 0000000..ddaa17d
Binary files /dev/null and b/static/images/overview_arrows.png differ
diff --git a/static/images/pearson.png b/static/images/pearson.png
new file mode 100644
index 0000000..69ed1b3
Binary files /dev/null and b/static/images/pearson.png differ
diff --git a/static/images/resizable.png b/static/images/resizable.png
new file mode 100644
index 0000000..db29514
Binary files /dev/null and b/static/images/resizable.png differ
diff --git a/static/images/rgWebLogo3_test.jpg b/static/images/rgWebLogo3_test.jpg
new file mode 100644
index 0000000..8f7e77e
Binary files /dev/null and b/static/images/rgWebLogo3_test.jpg differ
diff --git a/static/images/scatter.png b/static/images/scatter.png
new file mode 100644
index 0000000..cadaee2
Binary files /dev/null and b/static/images/scatter.png differ
diff --git a/static/images/scatterplot.png b/static/images/scatterplot.png
new file mode 100644
index 0000000..a4ba2dd
Binary files /dev/null and b/static/images/scatterplot.png differ
diff --git a/static/images/select2-spinner.gif b/static/images/select2-spinner.gif
new file mode 100755
index 0000000..5b33f7e
Binary files /dev/null and b/static/images/select2-spinner.gif differ
diff --git a/static/images/select2.png b/static/images/select2.png
new file mode 100755
index 0000000..1d804ff
Binary files /dev/null and b/static/images/select2.png differ
diff --git a/static/images/select2x2.png b/static/images/select2x2.png
new file mode 100755
index 0000000..4bdd5c9
Binary files /dev/null and b/static/images/select2x2.png differ
diff --git a/static/images/silk/add.png b/static/images/silk/add.png
new file mode 100644
index 0000000..6332fef
Binary files /dev/null and b/static/images/silk/add.png differ
diff --git a/static/images/silk/book.png b/static/images/silk/book.png
new file mode 100644
index 0000000..b0f4dd7
Binary files /dev/null and b/static/images/silk/book.png differ
diff --git a/static/images/silk/book_open.png b/static/images/silk/book_open.png
new file mode 100644
index 0000000..7d863f9
Binary files /dev/null and b/static/images/silk/book_open.png differ
diff --git a/static/images/silk/chart_curve.png b/static/images/silk/chart_curve.png
new file mode 100755
index 0000000..01e933a
Binary files /dev/null and b/static/images/silk/chart_curve.png differ
diff --git a/static/images/silk/folder.png b/static/images/silk/folder.png
new file mode 100644
index 0000000..784e8fa
Binary files /dev/null and b/static/images/silk/folder.png differ
diff --git a/static/images/silk/folder_page.png b/static/images/silk/folder_page.png
new file mode 100644
index 0000000..1ef6e11
Binary files /dev/null and b/static/images/silk/folder_page.png differ
diff --git a/static/images/silk/link.png b/static/images/silk/link.png
new file mode 100644
index 0000000..25eacb7
Binary files /dev/null and b/static/images/silk/link.png differ
diff --git a/static/images/silk/link_break.png b/static/images/silk/link_break.png
new file mode 100644
index 0000000..5235753
Binary files /dev/null and b/static/images/silk/link_break.png differ
diff --git a/static/images/silk/page_white.png b/static/images/silk/page_white.png
new file mode 100644
index 0000000..8b8b1ca
Binary files /dev/null and b/static/images/silk/page_white.png differ
diff --git a/static/images/silk/page_white_compressed.png b/static/images/silk/page_white_compressed.png
new file mode 100644
index 0000000..2b6b100
Binary files /dev/null and b/static/images/silk/page_white_compressed.png differ
diff --git a/static/images/silk/resultset_bottom.png b/static/images/silk/resultset_bottom.png
new file mode 100644
index 0000000..22848b0
Binary files /dev/null and b/static/images/silk/resultset_bottom.png differ
diff --git a/static/images/silk/resultset_next.png b/static/images/silk/resultset_next.png
new file mode 100644
index 0000000..e252606
Binary files /dev/null and b/static/images/silk/resultset_next.png differ
diff --git a/static/images/silk/resultset_previous.png b/static/images/silk/resultset_previous.png
new file mode 100644
index 0000000..910ee94
Binary files /dev/null and b/static/images/silk/resultset_previous.png differ
diff --git a/static/images/solid_qual.png b/static/images/solid_qual.png
new file mode 100644
index 0000000..777a3ce
Binary files /dev/null and b/static/images/solid_qual.png differ
diff --git a/static/images/spearman.png b/static/images/spearman.png
new file mode 100644
index 0000000..071e32f
Binary files /dev/null and b/static/images/spearman.png differ
diff --git a/static/images/square_empty.gif b/static/images/square_empty.gif
new file mode 100644
index 0000000..5b2826b
Binary files /dev/null and b/static/images/square_empty.gif differ
diff --git a/static/images/square_error.gif b/static/images/square_error.gif
new file mode 100644
index 0000000..f483247
Binary files /dev/null and b/static/images/square_error.gif differ
diff --git a/static/images/square_ok.gif b/static/images/square_ok.gif
new file mode 100644
index 0000000..32151d4
Binary files /dev/null and b/static/images/square_ok.gif differ
diff --git a/static/images/square_queued.gif b/static/images/square_queued.gif
new file mode 100644
index 0000000..a73f55c
Binary files /dev/null and b/static/images/square_queued.gif differ
diff --git a/static/images/square_running.gif b/static/images/square_running.gif
new file mode 100644
index 0000000..bd57b5a
Binary files /dev/null and b/static/images/square_running.gif differ
diff --git a/static/images/star.gif b/static/images/star.gif
new file mode 100755
index 0000000..d0948a7
Binary files /dev/null and b/static/images/star.gif differ
diff --git a/static/images/thumbtack_icon.png b/static/images/thumbtack_icon.png
new file mode 100644
index 0000000..349815e
Binary files /dev/null and b/static/images/thumbtack_icon.png differ
diff --git a/static/images/thumbtack_icon_dark.png b/static/images/thumbtack_icon_dark.png
new file mode 100644
index 0000000..4b8d7d4
Binary files /dev/null and b/static/images/thumbtack_icon_dark.png differ
diff --git a/static/images/tipsy.gif b/static/images/tipsy.gif
new file mode 100644
index 0000000..eb7718d
Binary files /dev/null and b/static/images/tipsy.gif differ
diff --git a/static/images/tool_menu_down_arrow.gif b/static/images/tool_menu_down_arrow.gif
new file mode 100644
index 0000000..5c628dd
Binary files /dev/null and b/static/images/tool_menu_down_arrow.gif differ
diff --git a/static/images/tools/lda/first_matrix_generator_example_file.png b/static/images/tools/lda/first_matrix_generator_example_file.png
new file mode 100644
index 0000000..be90d84
Binary files /dev/null and b/static/images/tools/lda/first_matrix_generator_example_file.png differ
diff --git a/static/images/tools/lda/second_matrix_generator_example_file.png b/static/images/tools/lda/second_matrix_generator_example_file.png
new file mode 100644
index 0000000..f2cd56d
Binary files /dev/null and b/static/images/tools/lda/second_matrix_generator_example_file.png differ
diff --git a/static/images/tracks/block.png b/static/images/tracks/block.png
new file mode 100644
index 0000000..73d06c2
Binary files /dev/null and b/static/images/tracks/block.png differ
diff --git a/static/images/tracks/close_btn.gif b/static/images/tracks/close_btn.gif
new file mode 100644
index 0000000..cac3adf
Binary files /dev/null and b/static/images/tracks/close_btn.gif differ
diff --git a/static/images/tracks/diag_bg.gif b/static/images/tracks/diag_bg.gif
new file mode 100644
index 0000000..0a3f9d0
Binary files /dev/null and b/static/images/tracks/diag_bg.gif differ
diff --git a/static/images/tracks/go_btn.gif b/static/images/tracks/go_btn.gif
new file mode 100644
index 0000000..c5f6338
Binary files /dev/null and b/static/images/tracks/go_btn.gif differ
diff --git a/static/images/tracks/handle-left.gif b/static/images/tracks/handle-left.gif
new file mode 100644
index 0000000..75a5da6
Binary files /dev/null and b/static/images/tracks/handle-left.gif differ
diff --git a/static/images/tracks/handle-right.gif b/static/images/tracks/handle-right.gif
new file mode 100644
index 0000000..2f294d7
Binary files /dev/null and b/static/images/tracks/handle-right.gif differ
diff --git a/static/images/tracks/pan_left.gif b/static/images/tracks/pan_left.gif
new file mode 100644
index 0000000..40db4e8
Binary files /dev/null and b/static/images/tracks/pan_left.gif differ
diff --git a/static/images/tracks/pan_right.gif b/static/images/tracks/pan_right.gif
new file mode 100644
index 0000000..aec905e
Binary files /dev/null and b/static/images/tracks/pan_right.gif differ
diff --git a/static/images/tracks/show_history.gif b/static/images/tracks/show_history.gif
new file mode 100644
index 0000000..b43d3b2
Binary files /dev/null and b/static/images/tracks/show_history.gif differ
diff --git a/static/images/tracks/zoom_in.gif b/static/images/tracks/zoom_in.gif
new file mode 100644
index 0000000..bebcd0b
Binary files /dev/null and b/static/images/tracks/zoom_in.gif differ
diff --git a/static/images/tracks/zoom_in_full.gif b/static/images/tracks/zoom_in_full.gif
new file mode 100644
index 0000000..325cc40
Binary files /dev/null and b/static/images/tracks/zoom_in_full.gif differ
diff --git a/static/images/tracks/zoom_out.gif b/static/images/tracks/zoom_out.gif
new file mode 100644
index 0000000..bf79c82
Binary files /dev/null and b/static/images/tracks/zoom_out.gif differ
diff --git a/static/images/tracks/zoom_out_full.gif b/static/images/tracks/zoom_out_full.gif
new file mode 100644
index 0000000..771c97b
Binary files /dev/null and b/static/images/tracks/zoom_out_full.gif differ
diff --git a/static/images/up.gif b/static/images/up.gif
new file mode 100644
index 0000000..4d0822d
Binary files /dev/null and b/static/images/up.gif differ
diff --git a/static/images/visualization/draggable_horizontal.png b/static/images/visualization/draggable_horizontal.png
new file mode 100644
index 0000000..597b99a
Binary files /dev/null and b/static/images/visualization/draggable_horizontal.png differ
diff --git a/static/images/visualization/draggable_vertical.png b/static/images/visualization/draggable_vertical.png
new file mode 100644
index 0000000..0fe2869
Binary files /dev/null and b/static/images/visualization/draggable_vertical.png differ
diff --git a/static/images/visualization/strand_left.png b/static/images/visualization/strand_left.png
new file mode 100644
index 0000000..f517077
Binary files /dev/null and b/static/images/visualization/strand_left.png differ
diff --git a/static/images/visualization/strand_left_inv.png b/static/images/visualization/strand_left_inv.png
new file mode 100644
index 0000000..1e3baa8
Binary files /dev/null and b/static/images/visualization/strand_left_inv.png differ
diff --git a/static/images/visualization/strand_right.png b/static/images/visualization/strand_right.png
new file mode 100644
index 0000000..54b00c5
Binary files /dev/null and b/static/images/visualization/strand_right.png differ
diff --git a/static/images/visualization/strand_right_inv.png b/static/images/visualization/strand_right_inv.png
new file mode 100644
index 0000000..583528b
Binary files /dev/null and b/static/images/visualization/strand_right_inv.png differ
diff --git a/static/images/yui/rel_interstitial_loading.gif b/static/images/yui/rel_interstitial_loading.gif
new file mode 100644
index 0000000..94d46c1
Binary files /dev/null and b/static/images/yui/rel_interstitial_loading.gif differ
diff --git a/static/incompatible-browser.html b/static/incompatible-browser.html
new file mode 100644
index 0000000..760cd99
--- /dev/null
+++ b/static/incompatible-browser.html
@@ -0,0 +1,33 @@
+<!DOCTYPE html>
+<html lang="en">
+    <head>
+        <title>Galaxy</title>
+        <style type="text/css">
+            body {
+                min-width: 500px;
+                font-family: "Lucida Grande",verdana,arial,helvetica,sans-serif;
+                font-size: 12px;
+                line-height: 1.42857;
+            }
+            .errormessagelarge {
+                background-color: #DD1B15;
+                color: white;
+                padding: 15px 15px 15px 52px;
+                margin-bottom: 17px;
+                border: 1px solid transparent;
+                border-radius: 3px;
+                min-height: 36px;
+            }
+            .errormessagelarge a {
+                color: white;
+            }
+        </style>
+    </head>
+    <body>
+        <div class="errormessagelarge">
+            <h3>This browser can't run Galaxy</h3>
+            <p>Galaxy requires using a <a href="https://whatbrowser.org/">modern browser</a>
+               with <a href="https://simple.wikipedia.org/wiki/HTTP_cookie">cookies enabled</a></p>
+        </div>
+    </body>
+</html>
diff --git a/static/jqtouch/img/backButton.png b/static/jqtouch/img/backButton.png
new file mode 100644
index 0000000..e27ea8c
Binary files /dev/null and b/static/jqtouch/img/backButton.png differ
diff --git a/static/jqtouch/img/back_button.png b/static/jqtouch/img/back_button.png
new file mode 100644
index 0000000..9873901
Binary files /dev/null and b/static/jqtouch/img/back_button.png differ
diff --git a/static/jqtouch/img/back_button_clicked.png b/static/jqtouch/img/back_button_clicked.png
new file mode 100644
index 0000000..5ec4230
Binary files /dev/null and b/static/jqtouch/img/back_button_clicked.png differ
diff --git a/static/jqtouch/img/bg_row.gif b/static/jqtouch/img/bg_row.gif
new file mode 100644
index 0000000..f50ab47
Binary files /dev/null and b/static/jqtouch/img/bg_row.gif differ
diff --git a/static/jqtouch/img/bg_row_select.gif b/static/jqtouch/img/bg_row_select.gif
new file mode 100644
index 0000000..2ec37fc
Binary files /dev/null and b/static/jqtouch/img/bg_row_select.gif differ
diff --git a/static/jqtouch/img/blueButton.png b/static/jqtouch/img/blueButton.png
new file mode 100644
index 0000000..0f92dfd
Binary files /dev/null and b/static/jqtouch/img/blueButton.png differ
diff --git a/static/jqtouch/img/button.png b/static/jqtouch/img/button.png
new file mode 100644
index 0000000..52cc7e2
Binary files /dev/null and b/static/jqtouch/img/button.png differ
diff --git a/static/jqtouch/img/button_clicked.png b/static/jqtouch/img/button_clicked.png
new file mode 100644
index 0000000..25d478f
Binary files /dev/null and b/static/jqtouch/img/button_clicked.png differ
diff --git a/static/jqtouch/img/cancel.png b/static/jqtouch/img/cancel.png
new file mode 100644
index 0000000..5f6dcc8
Binary files /dev/null and b/static/jqtouch/img/cancel.png differ
diff --git a/static/jqtouch/img/chevron.png b/static/jqtouch/img/chevron.png
new file mode 100644
index 0000000..6421a16
Binary files /dev/null and b/static/jqtouch/img/chevron.png differ
diff --git a/static/jqtouch/img/chevron_select.png b/static/jqtouch/img/chevron_select.png
new file mode 100644
index 0000000..86832eb
Binary files /dev/null and b/static/jqtouch/img/chevron_select.png differ
diff --git a/static/jqtouch/img/grayButton.png b/static/jqtouch/img/grayButton.png
new file mode 100644
index 0000000..0ce6a30
Binary files /dev/null and b/static/jqtouch/img/grayButton.png differ
diff --git a/static/jqtouch/img/header.gif b/static/jqtouch/img/header.gif
new file mode 100644
index 0000000..db185db
Binary files /dev/null and b/static/jqtouch/img/header.gif differ
diff --git a/static/jqtouch/img/listGroup.png b/static/jqtouch/img/listGroup.png
new file mode 100644
index 0000000..221553a
Binary files /dev/null and b/static/jqtouch/img/listGroup.png differ
diff --git a/static/jqtouch/img/loading.gif b/static/jqtouch/img/loading.gif
new file mode 100644
index 0000000..8522ddf
Binary files /dev/null and b/static/jqtouch/img/loading.gif differ
diff --git a/static/jqtouch/img/pinstripes.png b/static/jqtouch/img/pinstripes.png
new file mode 100644
index 0000000..c997775
Binary files /dev/null and b/static/jqtouch/img/pinstripes.png differ
diff --git a/static/jqtouch/img/selection.png b/static/jqtouch/img/selection.png
new file mode 100644
index 0000000..537e3f0
Binary files /dev/null and b/static/jqtouch/img/selection.png differ
diff --git a/static/jqtouch/img/thumb.png b/static/jqtouch/img/thumb.png
new file mode 100644
index 0000000..cefa8fc
Binary files /dev/null and b/static/jqtouch/img/thumb.png differ
diff --git a/static/jqtouch/img/toggle.png b/static/jqtouch/img/toggle.png
new file mode 100644
index 0000000..3b62ebf
Binary files /dev/null and b/static/jqtouch/img/toggle.png differ
diff --git a/static/jqtouch/img/toggleOn.png b/static/jqtouch/img/toggleOn.png
new file mode 100644
index 0000000..b016814
Binary files /dev/null and b/static/jqtouch/img/toggleOn.png differ
diff --git a/static/jqtouch/img/toolButton.png b/static/jqtouch/img/toolButton.png
new file mode 100644
index 0000000..afe4d7a
Binary files /dev/null and b/static/jqtouch/img/toolButton.png differ
diff --git a/static/jqtouch/img/toolbar.gif b/static/jqtouch/img/toolbar.gif
new file mode 100644
index 0000000..e7e841c
Binary files /dev/null and b/static/jqtouch/img/toolbar.gif differ
diff --git a/static/jqtouch/img/toolbar.png b/static/jqtouch/img/toolbar.png
new file mode 100644
index 0000000..3dde94c
Binary files /dev/null and b/static/jqtouch/img/toolbar.png differ
diff --git a/static/jqtouch/img/whiteButton.png b/static/jqtouch/img/whiteButton.png
new file mode 100644
index 0000000..5514b27
Binary files /dev/null and b/static/jqtouch/img/whiteButton.png differ
diff --git a/static/jqtouch/jqtouch.css b/static/jqtouch/jqtouch.css
new file mode 100644
index 0000000..83656f3
--- /dev/null
+++ b/static/jqtouch/jqtouch.css
@@ -0,0 +1,436 @@
+body {
+    margin: 0;
+    font-family: Helvetica;
+    overflow-x: hidden;
+    -webkit-user-select: none;
+	-webkit-touch-callout: none;
+    -webkit-text-size-adjust: none;
+    background-color: #000;
+}
+
+*, a {
+	/*-webkit-tap-highlight-color: rbga(23,126,191,.5) !important;*/
+	-webkit-tap-highlight-color:rgba(0, 0, 0, 0);
+}
+
+.edgetoedge a, .panel a {
+	/*-webkit-tap-highlight-color: rbga(23,126,191,.5) !important;*/
+	-webkit-tap-highlight-color:rgba(23, 126, 191, .4);
+}
+
+body > * {
+    display: none;
+    position: absolute;
+    margin: 0;
+    padding: 0;
+    left: 0;
+    width: 100%;
+    min-height: 480px;
+    -webkit-transform: translate(0px,0px);
+    background-color: #fff;
+}
+
+body[orient="landscape"] > * {
+    min-height: 320px;
+}
+
+body > *[selected="true"], body > *[selected="selected"] {
+    display: block;
+}
+
+.edgetoedge a[selected=true], .edgetoedge a:active, .panel a[selected=true], .panel a:active {
+    background-color: rgb(170,170,238) !important;
+}
+
+a[selected="progress"] {
+   background-color: rgb(170,170,238) !important;
+   background-image: url(img/loading.gif) !important;
+   background-repeat: no-repeat !important;
+   background-position: right center !important;
+}
+
+/* @group Toolbar */
+.toolbar {
+    -webkit-box-sizing: border-box;
+    padding: 10px;
+    /* height: 60px; */
+    /* padding-top: 28px; */
+    /* background: #000 url(img/toolbar.gif) bottom left repeat-x; */
+    height: 42px;
+    background: rgb(44,49,67);
+    position: relative;
+}
+
+.toolbar > h1 {
+    position: absolute;
+    overflow: hidden;
+    left: 50%;
+    margin: 1px 0 0 -75px;
+    height: 45px;
+    font-size: 20px;
+    width: 150px;
+    font-weight: bold;
+    /*text-shadow: rgba(0, 0, 0, .75) -1px -1px 0;*/
+    text-align: center;
+    text-overflow: ellipsis;
+    white-space: nowrap;
+    color: #f6f6f6;
+}
+
+body[orient="landscape"] .toolbar > h1 {
+    margin-left: -125px;
+    width: 250px;
+}
+
+/* @end */
+
+.button {
+    position: absolute;
+    overflow: hidden;
+    bottom: 5px;
+    right: 6px;
+    margin: 0;
+    border-width: 0 5px;
+    padding: 0 3px;
+    width: auto;
+    height: 30px;
+    line-height: 30px;
+    font-family: inherit;
+    font-size: 12px;
+    font-weight: bold;
+    color: #FFFFFF;
+    text-shadow: rgba(0, 0, 0, 0.6) 0px -1px 0;
+    text-overflow: ellipsis;
+    text-decoration: none;
+    white-space: nowrap;
+    background: none;
+    -webkit-border-image: url(img/button.png) 0 5 0 5;
+}
+
+.button:active {
+	    -webkit-border-image: url(img/button_clicked.png) 0 5 0 5;
+
+}
+
+.blueButton {
+    -webkit-border-image: url(img/blueButton.png) 0 5 0 5;
+    border-width: 0 5px;
+}
+
+.leftButton {
+    left: 6px;
+    right: auto;
+}
+
+.toolbar .back {
+    left: 6px;
+    right: auto;
+    padding: 0;
+    max-width: 55px;
+    border-width: 0 8px 0 15px;
+    -webkit-border-image: url(img/back_button.png) 0 8 0 15;
+}
+
+.toolbar .back:active {
+    -webkit-border-image: url(img/back_button_clicked.png) !important;
+}
+
+.whiteButton,
+.grayButton {
+    display: block;
+    border-width: 0 12px;
+    padding: 10px;
+    text-align: center;
+    font-size: 20px;
+    font-weight: bold;
+    text-decoration: inherit;
+    color: inherit;
+}
+
+.whiteButton {
+    -webkit-border-image: url(img/whiteButton.png) 0 12 0 12;
+    text-shadow: rgba(255, 255, 255, 0.7) 0 1px 0;
+}
+
+.grayButton {
+    -webkit-border-image: url(img/grayButton.png) 0 12 0 12;
+    color: #FFFFFF;
+}
+
+
+/* @group Edge to edge */
+
+.edgetoedge { 
+	margin: 0;
+	padding: 0;
+	background-color: #fff;
+}
+
+.edgetoedge > li {
+    position: relative;
+    margin: 0;
+    border-bottom: 1px solid #E0E0E0;
+    padding: 8px 0 8px 10px;
+    font-size: 20px;
+    font-weight: bold;
+    list-style: none;
+    /* background: url(../img/bg_row.gif) 0 2px repeat-x; */
+    color: #999;
+}
+
+.edgetoedge > li.group {
+    position: relative;
+    top: -1px;
+    margin-bottom: -2px;
+    border-top: 1px solid #7d7d7d;
+    border-bottom: 1px solid #999999;
+    padding: 1px 10px;
+    background: url(img/listGroup.png) repeat-x;
+    font-size: 17px;
+    font-weight: bold;
+    text-shadow: rgba(0, 0, 0, 0.4) 0 1px 0;
+    color: #FFFFFF;
+}
+
+.edgetoedge > li.group:first-child, .inset > li.group:first-child {
+    top: 0;
+    border-top: none;
+}
+
+.edgetoedge a, .inset a {
+    display: block;
+    margin: -8px 0 -8px -10px;
+    padding: 8px 32px 8px 10px;
+    text-decoration: none;
+    color: #000;
+    /*
+    background-image:  url(img/bg_row.gif), url(img/chevron.png);
+    background-repeat: repeat-x, no-repeat;
+    background-position: 0 1px, right center;
+    */
+    background-image: url(img/chevron.png);
+    background-repeat: no-repeat;
+    background-position: right center;
+}
+/* @end */
+
+a[target="_replace"] {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    padding-top: 25px;
+    padding-bottom: 25px;
+    font-size: 18px;
+    color: cornflowerblue;
+    background-color: #FFFFFF;
+    background-image: none;
+}
+    
+body > .dialog {
+    top: 0;
+    width: 100%;
+    min-height: 417px;
+    z-index: 2;
+    background: rgba(0, 0, 0, 0.8);
+    padding: 0;
+    text-align: right;
+}
+
+.dialog > fieldset {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    width: 100%;
+    margin: 0;
+    border: none;
+    border-top: 1px solid #6d84a2;
+    padding: 10px 6px;
+    background: url(img/toolbar.png) #7388a5 repeat-x;
+}
+
+.dialog > fieldset > h1 {
+    margin: 0 10px 0 10px;
+    padding: 0;
+    font-size: 20px;
+    font-weight: bold;
+    color: #FFFFFF;
+    text-shadow: rgba(0, 0, 0, 0.4) 0px -1px 0;
+    text-align: center;
+}
+
+.dialog > fieldset > label {
+    position: absolute;
+    margin: 16px 0 0 6px;
+    font-size: 14px;
+    color: #999999;
+}
+
+input {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    width: 100%;
+    margin: 8px 0 0 0;
+    padding: 6px 6px 6px 44px;
+    font-size: 16px;
+    font-weight: normal;
+}
+
+
+.panel {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    background: #c8c8c8 url(img/pinstripes.png);
+    overflow: auto;
+}
+
+.pad {
+	padding: 15px;
+}
+
+.panel fieldset {
+    position: relative;
+    margin: 0 0 20px 0;
+    padding: 0;
+    background: #FFFFFF;
+    -webkit-border-radius: 10px;
+    border: 1px solid #999999;
+    /* text-align: right; */
+    font-size: 16px;
+}
+
+.row  {
+    position: relative;
+    min-height: 42px;
+    border-bottom: 1px solid #999999;
+    -webkit-border-radius: 0;
+    /* text-align: right; */
+}
+
+fieldset .row:last-child {
+    border-bottom: none !important;
+}
+
+.row > input {
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    margin: 0;
+    border: none;
+    padding: 12px 10px 0 110px;
+    min-height: 42px;
+    background: none;
+}
+
+.row > span {
+    display: block;
+    text-align: left;
+    box-sizing: border-box;
+    -webkit-box-sizing: border-box;
+    margin: 0;
+    border: none;
+    padding: 12px 10px 10px 110px;
+    min-height: 42px;
+    background: none;
+}
+
+.row > a {
+    display: block;
+    height: 42px;
+    text-decoration: none;
+    padding: 0 0 0 14px;
+    line-height: 42px;
+    font-weight: bold;
+    color: #000;
+    background-image: url(img/chevron.png);
+    background-repeat: no-repeat;
+    background-position: right center;
+}
+
+.row > label {
+    position: absolute;
+    margin: 0 0 0 14px;
+    line-height: 42px;
+    font-weight: bold;
+}
+
+.row > .toggle {
+    position: absolute;
+    top: 6px;
+    right: 6px;
+    width: 100px;
+    height: 28px;
+}
+
+.toggle {
+    border: 1px solid #888888;
+    -webkit-border-radius: 6px;
+    background: #FFFFFF url(img/toggle.png) repeat-x;
+    font-size: 19px;
+    font-weight: bold;
+    line-height: 30px;
+}
+
+.toggle[toggled="true"] {
+    border: 1px solid #143fae;
+    background: #194fdb url(img/toggleOn.png) repeat-x;
+}
+
+.toggleOn {
+    display: none;
+    position: absolute;
+    width: 60px;
+    text-align: center;
+    left: 0;
+    top: 0;
+    color: #FFFFFF;
+    text-shadow: rgba(0, 0, 0, 0.4) 0px -1px 0;
+}
+
+.toggleOff {
+    position: absolute;
+    width: 60px;
+    text-align: center;
+    right: 0;
+    top: 0;
+    color: #666666;
+}
+
+.toggle[toggled="true"] > .toggleOn {
+    display: block;
+}
+
+.toggle[toggled="true"] > .toggleOff {
+    display: none;
+}
+
+.thumb {
+    position: absolute;
+    top: -1px;
+    left: -1px;
+    width: 40px;
+    height: 28px;    
+    border: 1px solid #888888;
+    -webkit-border-radius: 6px;
+    background: #ffffff url(img/thumb.png) repeat-x;
+}
+
+.toggle[toggled="true"] > .thumb {
+    left: auto;
+    right: -1px;
+}
+
+.panel h2 {
+    margin: 0 0 8px 14px;
+    font-size: inherit;
+    font-weight: bold;
+    color: rgb(17, 20, 23, .5);
+    text-shadow: rgba(255, 255, 255, 0.75) 1px 1px 0;
+}
+
+.panel p a {
+	color: #4195ca;
+}
+
+#about {
+	background: rgb(17, 20, 23);
+	color: #ddd;
+    text-shadow: rgba(255, 255, 255, 0.3) 0px -1px 0;
+    font-size: 20px;
+}
diff --git a/static/june_2007_style b/static/june_2007_style
new file mode 120000
index 0000000..f350b1e
--- /dev/null
+++ b/static/june_2007_style
@@ -0,0 +1 @@
+style
\ No newline at end of file
diff --git a/static/laj/docs/java_plugin_help.html b/static/laj/docs/java_plugin_help.html
new file mode 100644
index 0000000..9165ee5
--- /dev/null
+++ b/static/laj/docs/java_plugin_help.html
@@ -0,0 +1,132 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<title>Installing the Java Plug-in</title>
+</head>
+<body bgcolor=#FFFFFF>
+<br>
+<p>
+<h2>Installing the Java Plug-in</h2>
+<br>
+<p>
+
+The Laj and Maj applets are written for Java 1.2 (a.k.a.
+J2SE 1.2), and theoretically any web browser that fully supports
+this (or any higher version) should work.  However, for best
+compatibility we recommend that you download and install the official
+<b>Java Plug-in</b> software from Sun Microsystems.
+This plug-in generally works with a variety of web browsers,
+including Internet Explorer, Netscape, and Mozilla.
+<p>
+The main overview page for the Java Plug-in product is
+<a href="http://java.sun.com/products/plugin/">
+http://java.sun.com/products/plugin/</a>, and an assortment
+of download links is available at
+<a href="http://java.sun.com/j2se/downloads/">
+http://java.sun.com/j2se/downloads/</a>.
+Sun also provides a simplified download facility (latest version
+only), which is accessible via the "Download NOW" button at
+<a href="http://java.com/">http://java.com/</a>.
+For details about exactly which platforms are supported, see
+<a href="http://java.sun.com/j2se/1.5.0/system-configurations.html">
+http://java.sun.com/j2se/1.5.0/system-configurations.html</a>.
+<p>
+The plug-in is
+usually bundled with the Java 2 Runtime Environment (JRE) and the
+Java 2 Software Development Kit (JDK/SDK).  If you are given a choice
+between these, either will do; the JRE is smaller because it
+doesn't include developer tools, but both of them contain the
+plug-in.  Note that even though Laj does not currently use any Java
+features beyond 1.2, as of this writing it is compiled with version
+1.3.1 of Sun's SDK, so running it with the corresponding version
+(or higher) of the plug-in is recommended.  We have, however,
+observed some drawing anomalies with the 1.4.x series on some
+platforms (including "phantom repeats" in the symbol panel and
+failure to draw the zoom box and crosshairs), so you may want to
+stick with 1.3.x or 1.5.x.
+<p>
+Each download should come with installation instructions
+(read them!), but here are some additional platform-specific
+tips.
+<p>
+
+<h3>Windows</h3>
+<p>
+We recommend that you <b>delete any previously installed versions</b>
+of the plug-in, JRE, JDK, and/or
+SDK before you install the new one, to avoid getting them mixed.
+For example, in Windows 98 you can do this by opening the Start
+menu and choosing Settings - Control Panel - Add/Remove Programs,
+then selecting from the list the items you want to uninstall.
+After the new installation is complete, you may need to close
+and restart your browser to make it recognize the new plug-in,
+or even reboot your computer.
+<p>
+Note that versions of Sun's Java above 1.3.x will not work on
+Windows 95.
+<p>
+
+<h3>Macintosh</h3>
+<p>
+Currently Sun does not have a Java Plug-in for Macs, but Mac OS X
+ships with a sufficiently recent version of Java already included,
+and further updates may be available via Apple's Software Update
+mechanism (depending on your version of OS X).  We have observed
+display anomalies in some versions (e.g., red circle has a black
+background), but Laj et al. should still be usable.
+For Mac OS Classic, Apple has a Java runtime called MRJ 2.2.5, but
+unfortunately it only supports Java 1.1.8 (not Java 1.2) and thus
+will not work with our applets.
+<p>
+For more information about Java on the Mac, see
+<a href="http://www.apple.com/java/">
+http://www.apple.com/java/</a> and 
+<a href="http://developer.apple.com/java/">
+http://developer.apple.com/java/</a>.
+<p>
+
+<h3>Linux</h3>
+<p>
+Sun now provides versions of the Java Plug-in for Linux, in
+either RPM or self-extracting binary format.
+Be sure to read the installation notes first,
+for system requirements and detailed instructions.
+<p>
+
+<h3>Solaris</h3>
+<p>
+Note that Solaris 8 already includes Java 1.2, though you may
+want to install a newer version.  Sun provides versions of the
+Java Plug-in for Solaris on both Sparc and x86 processors.
+Be sure to read the installation notes first,
+for system requirements and detailed instructions.  In particular,
+note that you may need to apply some Solaris patches to your
+system before installing Java, and possibly obtain additional
+fonts.  You might also want to uninstall any previous versions of
+the plug-in, JRE, JDK, and/or SDK to avoid getting them mixed.  After
+the installation, you may need to close and restart your browser
+to make it recognize the new plug-in.
+<p>
+One last bit of advice: at one point during our installation,
+we were instructed to enter the command  <tt>xhost +</tt> ,
+but this is a <b>bad idea</b> because it seriously compromises your
+system security.  If this message appears, we recommend using proper
+security protocols instead.
+<p>
+
+<!--
+<h3>Other Platforms</h3>
+<p>
+For availability of the Java Plug-in on other computers, see
+<a href="http://java.sun.com/j2se/1.4/ports.html">
+http://java.sun.com/j2se/1.4/ports.html</a>.
+<p>
+-->
+
+<br>
+<p>
+<hr>
+<i>Cathy Riemer, December 2005</i>
+</body>
+</html>
diff --git a/static/laj/docs/laj_applet_help.html b/static/laj/docs/laj_applet_help.html
new file mode 100644
index 0000000..0ef8cbc
--- /dev/null
+++ b/static/laj/docs/laj_applet_help.html
@@ -0,0 +1,331 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+<title>Help Page for the Laj Applet</title>
+</head>
+<body bgcolor=#FFFFFF>
+<br>
+<p>
+<h2>Help Page for the Laj Applet</h2>
+<br>
+<p>
+TABLE OF CONTENTS
+<ul>
+<li><a href="#intro">Introduction</a>
+<li><a href="#prep">Preparing Your Browser</a>
+<li><a href="#screen">Screen Layout</a>
+<li><a href="#mouse">Mouse Controls</a>
+<li><a href="#menu">Menus and Buttons</a>
+<li><a href="#notes">Footnotes</a>
+</ul>
+<p>
+<br>
+
+<h3><a name="intro">Introduction</a></h3>
+<p>
+Laj is a tool for viewing and manipulating the output from
+pairwise alignment programs such as
+<a href="http://bio.cse.psu.edu/">blastz</a>.  It can display
+interactive dotplot, pip, and text representations of the
+alignments, a diagram showing the locations of exons and
+repeats, and annotation links to other web sites containing
+additional information about particular regions.
+<p>
+The program is written in Java in order to provide a
+graphical user interface that is portable across a variety
+of computer platforms; indeed its name stands for "Local
+Alignments with Java".  Currently it exists in two forms,
+a stand-alone application and a web-based applet, with
+slightly different capabilities.  However, this help page
+will only discuss the applet.
+<p>
+
+<h3><a name="prep">Preparing Your Browser</a></h3>
+<p>
+This applet requires at least Java 1.2, and preferably Java 1.3
+or higher.  For best compatibility, Sun's Java Plug-in
+is recommended.  Please see
+<a href="java_plugin_help.html">Installing the Java Plug-in</a>
+for more information.
+<p>
+
+<h3><a name="screen">Screen Layout</a></h3>
+<p>
+The Laj window is divided into several sections.
+Across the top you will see a menu/control bar, and below
+that two indicator lines for displaying information about the
+positions of the mouse pointer and the "mark"
+(<a href="#red">red</a> circle)
+respectively.  The controls will be discussed individually
+in the <a href="#menu">Menus and Buttons</a> section of this
+document.
+<p>
+<i>Ruler:</i><br>
+The first graphical panel is a horizontal ruler that displays
+tick marks corresponding to positions in the first aligned
+sequence.  These are intended to give you an immediate
+general feel for the location and scale of the region being
+displayed.  Precise locations can be determined via the
+position indicator, which displays the exact coordinate of
+the mouse pointer.
+<p>
+<i>Dotplot:</i><br>
+The large middle panel displays a dotplot view of the
+alignments, with the first sequence (often human) along
+the horizontal <tt>x</tt>-axis and the second sequence
+(e.g., mouse) along the vertical <tt>y</tt>-axis.  If the
+second sequence contains multiple contigs, they will appear
+as separate horizontal bands across the plot, each with its
+own <tt>y</tt>-axis coordinate system.  Whenever
+the mouse pointer is in this panel, the position indicator
+displays its location in the format "<tt>x,y</tt>", where
+<tt>x</tt> is the position in the horizontal sequence and
+<tt>y</tt> is the position in the vertical sequence.  If
+there are multiple contigs, then the first word of the
+contig name will be displayed as well.
+<p>
+<i>Annotation links:</i><br>
+Below the dotplot is a panel that provides links to
+additional information about various sequence regions.  Each
+annotation is represented by a color-coded bar spanning the
+region's position in the first sequence.  The bars'
+vertical positions are not meaningful; they are placed in
+rows only for convenience, to keep them from overlapping.
+Pointing to a particular bar will cause the position
+indicator to display the <tt>x</tt> coordinate of the
+pointer, and also the type and description of that bar's
+annotation; otherwise only the <tt>x</tt> coordinate will be
+shown.  Clicking on a bar will open a separate browser
+window to visit the corresponding web site.  If no annotation
+file was provided, this panel will not appear.
+<p>
+<i>Sequence features:</i><br>
+The next panel contains a schematic diagram of the
+known exons, repeats, and other features in the first
+sequence, if these files were provided.  Again, the position
+indicator displays the <tt>x</tt> coordinate of the mouse
+pointer, and also identifies any features at that position.
+<p>
+<i>Pip:</i><br>
+The next panel displays a pip (percent identity plot) view
+of the alignments.  This is similar to the dotplot,
+except that the vertical scale represents the percentage of
+matching nucleotides in each gap-free segment of a local
+alignment, instead of its position in the second sequence.
+Only the top half of the plot is shown, since segments
+matching less than 50% are not very interesting.  An
+additional feature of this panel is that colored backgrounds,
+or "underlays", may be used to highlight regions of interest.
+The position indicator displays the horizontal coordinate
+and vertical percentage position of the mouse pointer, and
+it can also display labels for the colored regions if these
+were provided.
+<p>
+<i>Text view:</i><br>
+The bottom panel displays a nucleotide-level view of a single
+selected local alignment.  (Initially it is blank, since you
+haven't selected anything yet.)  The top row of this display
+shows the nucleotide sequence from the first species
+(<tt>x</tt>-axis in the dotplot), while the bottom row shows
+the sequence from the second one (<tt>y</tt>-axis).  Both
+sequences will likely have had gaps inserted by the alignment
+program.  The middle row contains symbols to
+indicate how well the nucleotides match at each position;
+this matching is case-insensitive to deal with soft masking,
+but non-nucleotide characters such as <tt>X</tt> or <tt>N</tt>
+never match anything, even themselves.
+Note that most
+of the local alignments will be much too long to fit across
+this window, so a scrollbar is provided; the
+relative size of the scrollbar's slider indicates what fraction
+of the alignment is shown in the window.  Shaded "highlights"
+similar to the pip underlays may also appear.  Whenever
+the mouse pointer is in this bottom panel, the position
+indicator displays its location in the
+format "<tt>n:x,y</tt>", where <tt>n</tt> is the column position
+in the text representation of the alignment (starting with 0),
+while <tt>x</tt> and <tt>y</tt> are the sequence positions
+in the top and bottom rows, respectively (starting with 1).
+Note that <tt>x</tt> and <tt>y</tt> do not include the gaps,
+but <tt>n</tt> does.  Labels for any highlights at that
+position are also displayed.
+<p>
+With the exception of the text view, all of these panels
+use the same horizontal coordinate scale (i.e., position in
+the first sequence), and they are always kept vertically
+aligned so they can be compared easily.
+<p>
+
+<h3><a name="mouse">Mouse Controls</a></h3>
+<p>
+You can select a particular local alignment in the dotplot
+or pip by clicking on one of its segments with the left mouse
+button.  (Actually you don't have to click exactly on it,
+because Laj will automatically jump to the nearest point in
+the same contig if you miss.)  The spot will be marked with a
+small <a href="#red">red</a> circle
+in both the dotplot and the pip, and the entire local
+alignment containing the mark will change color from black
+to <a href="#red">red</a>
+(each local alignment typically spans several
+gap-free "segments").  Also, the corresponding text view for
+that alignment will appear in the bottom panel with the
+selected position highlighted.  This requires loading the
+sequence files, so it may take a few moments.  Lastly, the
+mark indicator line will be filled in with information about
+the marked alignment and position, including the contig name
+if the second sequence is fragmented.  Note that there is only
+one mark at a time, so the previous one, if any, will be
+unmarked.
+<p>
+In a similar fashion, clicking the left mouse button in the
+text view will move the mark (both the highlight and the
+<a href="#red">red</a> circle) to that position
+(though sometimes you have to click twice).
+However, gap positions cannot be selected in this manner
+because they do not correspond to pip segments; if
+you click in a gap, the left end of the gap is selected
+instead.
+<p>
+As mentioned earlier, clicking on an annotation bar will
+open a separate browser window to visit the corresponding
+web site, but clicking in the ruler or feature panel has no
+effect.
+<p>
+You can "zoom in" on a particular region by dragging out a
+rectangle with the left mouse button in any of the white
+panels (ruler, dotplot, annotations, features, or pip).  All
+of these panels will always zoom together, to keep them lined
+up.  This can be repeated until the maximum resolution is
+reached; after that Laj will display an error message.
+Note that selecting your zoom in a non-dotplot panel only
+zooms horizontally (the zoom rectangle is always
+full-height), so to keep the dotplot looking nice it is best
+to select your zoom there, and keep the zoom rectangle
+roughly proportional to the dimensions of the existing
+dotplot panel.
+<p>
+Holding down the right mouse button over any of the white
+panels adds crosshairs at the mouse pointer's location, which
+is convenient for determining whether two regions really line
+up.  If you have a one-button mouse, you can achieve the same
+effect by applying the <code>Shift</code> key when initially
+pressing the mouse button.
+<p>
+
+<h3><a name="menu">Menus and Buttons</a></h3>
+<p>
+<dl>
+<dt>File - Save:
+<dd>
+Allows you to save the alignments that you have flagged (see below)
+by sending them back to the server in a format similar to the input.
+What the server does with them depends on the site.  For example,
+if you are running Laj via <a href="http://www.bx.psu.edu/">Galaxy</a>,
+they will be stored as a new dataset in your history for further
+analysis.  A different site might email the file to you, reflect
+it to your browser, or use it in some other way.  If the site does
+not specify a URL for sending the alignments, this menu item will not
+appear.
+<p>
+<dt>File - Exit:
+<dd>
+Closes the Laj window.
+<p>
+<dt>Tools - Flag:
+<dd>
+Flags the currently selected local alignment (the one containing the
+<a href="#red">red</a> mark circle) by changing its color.  You can
+repeat this on other alignments to build a flagged set.  The flagged
+alignments are normally green, but if one is also the currently
+selected alignment (e.g. when you have just flagged it), it can't be
+both green and red so it is orange instead.  Also, just like the
+<a href="#red">red</a> circle, all of these colors will be displayed
+differently against colored backgrounds.  
+<p>
+<dt>Tools - Unflag:
+<dd>
+Changes the local alignment containing the <a href="#red">red</a>
+circle back to its original color.
+<p>
+<dt>Tools - Unzoom:
+<dd>
+Restores all of the white panels back to the original, unzoomed view.
+<p>
+<dt>Help - About:
+<dd>
+Displays a message window with information about Laj, including
+version, author, etc.
+<p>
+<dt>Help - Manual:
+<dd>
+Displays this help page in a separate browser window.
+<p>
+<dt>Help - Keys:
+<dd>
+Displays a message window listing Laj's keyboard shortcuts.  No
+<i>Alt</i> key is needed, since Laj doesn't use the keyboard for
+much else.
+</dl>
+<p>
+
+<!-- <hr align=left noshade size=1 width="20%" color=black> -->
+<h3><a name="notes">Footnotes</a></h3>
+<p>
+<a name="red"></a>
+[1] 
+The circular mark and its local alignment are red
+when the background is white, but are displayed in different
+colors against other backgrounds to ensure good contrast.
+<p>
+
+<br>
+<p>
+<hr>
+<i>Cathy Riemer, August 2006</i>
+
+<!-- These blank lines allow the browser to scroll properly
+     to the last anchor target. -->
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+<br>
+</body>
+</html>
diff --git a/static/laj/laj.jar b/static/laj/laj.jar
new file mode 100644
index 0000000..32e1025
Binary files /dev/null and b/static/laj/laj.jar differ
diff --git a/static/maps/galaxy-app-base.js.map b/static/maps/galaxy-app-base.js.map
new file mode 100644
index 0000000..4eb9400
--- /dev/null
+++ b/static/maps/galaxy-app-base.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"galaxy-app-base.js","sources":["../src/galaxy-app-base.js"],"names":["define","userModel","metricsLogger","addLogging","localize","BASE_MVC","bootstrapped","GalaxyApp","options","self","this","_init","DEBUGGING_KEY","NAMESPACE_KEY","localDebugging","localStorage","getItem","storageErr","console","log","prototype","_","extend","Backbone","Events","logger","_processOptions","debug","_initConfig","config","_patchGalaxy","window","Galaxy","_initLogger","loggerOptions","_ [...]
\ No newline at end of file
diff --git a/static/maps/galaxy.interactive_environments.js.map b/static/maps/galaxy.interactive_environments.js.map
new file mode 100644
index 0000000..ee5a8e6
--- /dev/null
+++ b/static/maps/galaxy.interactive_environments.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"galaxy.interactive_environments.js","sources":["../src/galaxy.interactive_environments.js"],"names":["append_notebook","url","clear_main_area","$","append","remove","children","display_spinner","galaxy_root","test_ie_availability","success_callback","request_count","interval","setInterval","ajax","xhrFields","withCredentials","type","timeout","success","console","log","clearInterval","error","toastr","closeButton","timeOut","tapToDismiss"],"mappings":"AAIA,QAASA,iBAA [...]
\ No newline at end of file
diff --git a/static/maps/galaxy.js.map b/static/maps/galaxy.js.map
new file mode 100644
index 0000000..13cf261
--- /dev/null
+++ b/static/maps/galaxy.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"galaxy.js","sources":["../src/galaxy.js"],"names":["define","_","Backbone","BASE_MVC","userModel","metricsLogger","addLogging","localize","GalaxyApp","options","bootstrapped","self","this","_init","DEBUGGING_KEY","NAMESPACE_KEY","localDebugging","localStorage","getItem","storageErr","console","log","prototype","extend","Events","logger","debug","_processOptions","root","_initConfig","config","_patchGalaxy","window","Galaxy","_initLogger","loggerOptions","_initLocale" [...]
\ No newline at end of file
diff --git a/static/maps/galaxy.library.js.map b/static/maps/galaxy.library.js.map
new file mode 100644
index 0000000..67ae9bf
--- /dev/null
+++ b/static/maps/galaxy.library.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"galaxy.library.js","sources":["../src/galaxy.library.js"],"names":["define","mod_masthead","mod_utils","mod_toastr","mod_baseMVC","mod_library_model","mod_folderlist_view","mod_librarylist_view","mod_librarytoolbar_view","mod_foldertoolbar_view","mod_library_dataset_view","mod_library_library_view","mod_library_folder_view","LibraryRouter","Backbone","Router","extend","initialize","this","routesHit","history","on","bind","trackPageview","routes","page/:show_page","li [...]
\ No newline at end of file
diff --git a/static/maps/galaxy.menu.js.map b/static/maps/galaxy.menu.js.map
new file mode 100644
index 0000000..5979b28
--- /dev/null
+++ b/static/maps/galaxy.menu.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"galaxy.menu.js","sources":["../src/galaxy.menu.js"],"names":["define","Masthead","GalaxyMenu","Backbone","Model","extend","initialize","options","this","config","masthead","create","tab_analysis","GalaxyMastheadTab","id","title","content","title_attribute","append","workflow_options","Galaxy","user","disabled","tab_workflow","tab_shared","add","divider","user_requests","tab_lab","lims_doc_url","visualization_options","tab_visualization","target","get","tab_admin","ex [...]
\ No newline at end of file
diff --git a/static/maps/galaxy.pages.js.map b/static/maps/galaxy.pages.js.map
new file mode 100644
index 0000000..8ac6e3a
--- /dev/null
+++ b/static/maps/galaxy.pages.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"galaxy.pages.js","sources":["../src/galaxy.pages.js"],"names":["init_galaxy_elts","wym","$","_doc","body","each","this","click","range","createRange","selectNodeContents","selection","window","getSelection","removeAllRanges","addRange","get_item_info","dialog_type","item_singular","item_plural","item_controller","CONTROLS","item_class","item_list_action","toLowerCase","ajax_url","list_objects_url","replace","singular","plural","controller","iclass","list_ajax_url","m [...]
\ No newline at end of file
diff --git a/static/maps/i18n.js.map b/static/maps/i18n.js.map
new file mode 100644
index 0000000..ca45951
--- /dev/null
+++ b/static/maps/i18n.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"i18n.js","sources":["../src/i18n.js"],"names":["addPart","locale","master","needed","toLoad","prefix","suffix","push","addIfExists","req","fullName","require","_fileExists","toUrl","mixin","target","source","force","prop","hasOwnProperty","nlsRegExp","define","module","masterConfig","config","version","load","name","onLoad","masterName","i","part","match","exec","parts","split","value","current","navigator","language","userLanguage","toLowerCase","isBuild","length"," [...]
\ No newline at end of file
diff --git a/static/maps/layout/generic-nav-view.js.map b/static/maps/layout/generic-nav-view.js.map
new file mode 100644
index 0000000..9a0e237
--- /dev/null
+++ b/static/maps/layout/generic-nav-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"generic-nav-view.js","sources":["../../src/layout/generic-nav-view.js"],"names":["define","Modal","GenericNavView","Backbone","View","extend","initialize","this","modal","makeModalIframe","host","window","Galaxy","config","communication_server_host","port","communication_server_port","username","escape","user","attributes","persistent_communication_rooms","query_string","src","$el_chat_modal_header","$el_chat_modal_body","iframe_template","header_template","frame_hei [...]
\ No newline at end of file
diff --git a/static/maps/layout/masthead.js.map b/static/maps/layout/masthead.js.map
new file mode 100644
index 0000000..8da424b
--- /dev/null
+++ b/static/maps/layout/masthead.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"masthead.js","sources":["../../src/layout/masthead.js"],"names":["define","Utils","Menu","Scratchbook","QuotaMeter","View","Backbone","extend","initialize","options","self","this","setElement","_template","$navbarBrandLink","$","$navbarBrandImage","$navbarBrandTitle","$navbarTabs","$quoteMeter","collection","Collection","on","model","append","Tab","render","$el","empty","callback","each","m","fetch","Galaxy","frame","quotaMeter","UserQuotaMeter","user","el","window", [...]
\ No newline at end of file
diff --git a/static/maps/layout/menu.js.map b/static/maps/layout/menu.js.map
new file mode 100644
index 0000000..10d58e3
--- /dev/null
+++ b/static/maps/layout/menu.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"menu.js","sources":["../../src/layout/menu.js"],"names":["define","Tours","GenericNav","Webhooks","Collection","Backbone","extend","model","Model","defaults","visible","target","fetch","options","this","reset","extendedNavItem","GenericNavView","add","render","id","title","url","tooltip","disabled","Galaxy","user","menu","user_requests","lims_doc_url","callback","webhooks","$","document","ready","each","models","index","webhook","toJSON","activate","page","masthead", [...]
\ No newline at end of file
diff --git a/static/maps/layout/modal.js.map b/static/maps/layout/modal.js.map
new file mode 100644
index 0000000..50d2914
--- /dev/null
+++ b/static/maps/layout/modal.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"modal.js","sources":["../../src/layout/modal.js"],"names":["define","jQuery","hide_modal","modal","hide","show_modal","title","body","buttons","extra_buttons","init_fn","setContent","show","backdrop","show_message","show_in_overlay","options","width","height","scroll","$","bind","unbind","closeButton","url","Modal","this","$overlay","overlay","$dialog","dialog","$header","find","$body","$footer","$backdrop","on","proxy","extend","prototype","html","$buttons","each"," [...]
\ No newline at end of file
diff --git a/static/maps/layout/page.js.map b/static/maps/layout/page.js.map
new file mode 100644
index 0000000..c30bdc0
--- /dev/null
+++ b/static/maps/layout/page.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"page.js","sources":["../../src/layout/page.js"],"names":["define","Masthead","Panel","Modal","BaseMVC","PageLayoutView","Backbone","View","extend","LoggableMixin","_logNamespace","el","className","_panelIds","defaultOptions","message_box_visible","message_box_content","message_box_class","show_inactivity_warning","inactivity_box_content","initialize","options","this","log","_","pick","defaults","omit","config","Galaxy","modal","masthead","$el","attr","html","_templat [...]
\ No newline at end of file
diff --git a/static/maps/layout/panel.js.map b/static/maps/layout/panel.js.map
new file mode 100644
index 0000000..f6b297f
--- /dev/null
+++ b/static/maps/layout/panel.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"panel.js","sources":["../../src/layout/panel.js"],"names":["define","jQuery","_","Backbone","BASE_MVC","$","MIN_PANEL_WIDTH","MAX_PANEL_WIDTH","SidePanel","View","extend","LoggableMixin","_logNamespace","initialize","attributes","this","log","title","hidden","savedSize","hiddenByTool","$center","$el","siblings","$toggleButton","render","html","template","id","_templateHeader","_templateBody","_templateFooter","join","escape","events","mousedown .unified-panel-footer  [...]
\ No newline at end of file
diff --git a/static/maps/layout/scratchbook.js.map b/static/maps/layout/scratchbook.js.map
new file mode 100644
index 0000000..47f3424
--- /dev/null
+++ b/static/maps/layout/scratchbook.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"scratchbook.js","sources":["../../src/layout/scratchbook.js"],"names":["define","Frames","Backbone","View","extend","initialize","options","self","this","frames","visible","setElement","$el","buttonActive","collection","add","id","icon","tooltip","onclick","active","set","toggle","show_note","note_cls","hide","onbeforeunload","length","buttonLoad","show","on","note","history_cache","addDataset","dataset_id","current_dataset","Galaxy","currHistoryPanel","history_id"," [...]
\ No newline at end of file
diff --git a/static/maps/libs/backbone.js.map b/static/maps/libs/backbone.js.map
new file mode 100644
index 0000000..f1e1bea
--- /dev/null
+++ b/static/maps/libs/backbone.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"backbone.js","sources":["../../src/libs/backbone.js"],"names":["factory","root","self","global","define","amd","_","$","exports","Backbone","require","e","jQuery","Zepto","ender","previousBackbone","slice","Array","prototype","VERSION","noConflict","this","emulateHTTP","emulateJSON","addMethod","length","method","attribute","value","iteratee","context","cb","defaultVal","args","call","arguments","unshift","apply","addUnderscoreMethods","Class","methods","each","insta [...]
\ No newline at end of file
diff --git a/static/maps/libs/bbi/bigwig.js.map b/static/maps/libs/bbi/bigwig.js.map
new file mode 100644
index 0000000..08e4644
--- /dev/null
+++ b/static/maps/libs/bbi/bigwig.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"bigwig.js","sources":["../../../src/libs/bbi/bigwig.js"],"names":["define","spans","jszlib","DASFeature","DASGroup","id","this","readInt","ba","offset","read","url","start","size","Math","pow","$","ajax","type","dataType","timeout","beforeSend","xhrObj","setRequestHeader","xhrFields","responseType","bwg_readOffset","o","M1","M2","M3","M4","BigWig","BigWigView","bwg","cirTreeOffset","cirTreeLength","isSummary","makeBwg","promise","Deferred","when","then","result","res [...]
\ No newline at end of file
diff --git a/static/maps/libs/bbi/jquery-ajax-native.js.map b/static/maps/libs/bbi/jquery-ajax-native.js.map
new file mode 100644
index 0000000..945dc08
--- /dev/null
+++ b/static/maps/libs/bbi/jquery-ajax-native.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery-ajax-native.js","sources":["../../../src/libs/bbi/jquery-ajax-native.js"],"names":["root","factory","define","amd","exports","module","require","jQuery","this","$","ajaxSettings","responseFields","native","converters","support","xhrId","xhrSuccessStatus",1223,"xhrCallbacks","xhrSupported","xhr","window","ActiveXObject","on","key","cors","ajax","ajaxTransport","options","callback","crossDomain","send","headers","complete","i","id","responses","open","type","url [...]
\ No newline at end of file
diff --git a/static/maps/libs/bbi/jszlib.js.map b/static/maps/libs/bbi/jszlib.js.map
new file mode 100644
index 0000000..4294b93
--- /dev/null
+++ b/static/maps/libs/bbi/jszlib.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jszlib.js","sources":["../../../src/libs/bbi/jszlib.js"],"names":["define","ZStream","Inflate","this","was","InfBlocks","z","checkfn","w","hufts","Int32Array","MANY","window","Uint8Array","end","mode","IB_TYPE","reset","left","table","index","blens","bb","tb","codes","InfCodes","last","bitk","bitb","read","write","check","inftree","InfTree","inflate_trees_fixed","bl","bd","tl","td","fixed_bl","fixed_bd","fixed_tl","fixed_td","Z_OK","arrayCopy","src","srcOffset","dest [...]
\ No newline at end of file
diff --git a/static/maps/libs/bbi/spans.js.map b/static/maps/libs/bbi/spans.js.map
new file mode 100644
index 0000000..3acdfa5
--- /dev/null
+++ b/static/maps/libs/bbi/spans.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"spans.js","sources":["../../../src/libs/bbi/spans.js"],"names":["define","Range","min","max","this","_min","_max","_Compound","ranges","sorted","sort","_rangeOrder","merged","current","shift","forEach","range","push","_ranges","union","s0","s1","Array","insertRange","intersection","r0","r1","l0","length","l1","i0","i1","or","lapMin","Math","lapMax","coverage","s","tot","rl","ri","r","rangeOrder","a","b","prototype","contains","pos","isContiguous","_pushRanges","toStr [...]
\ No newline at end of file
diff --git a/static/maps/libs/bibtex.js.map b/static/maps/libs/bibtex.js.map
new file mode 100644
index 0000000..7118137
--- /dev/null
+++ b/static/maps/libs/bibtex.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"bibtex.js","sources":["../../src/libs/bibtex.js"],"names":["BibtexParser","arg0","accumulator","entry","entries","push","tempStorage","parser","call","parse","errors","getErrors","this","STATES_","ENTRY_OR_JUNK","OBJECT_TYPE","ENTRY_KEY","KV_KEY","EQUALS","KV_VALUE","DATA_","CALLBACK_","CHAR_","LINE_","CHAR_IN_LINE_","SKIPWS_","SKIPCOMMENT_","PARSETMP_","SKIPTILLEOL_","VALBRACES_","BRACETYPE_","BRACECOUNT_","STATE_","ERRORS_","ENTRY_TYPES_","inproceedings","proceedin [...]
\ No newline at end of file
diff --git a/static/maps/libs/bootstrap-tour.js.map b/static/maps/libs/bootstrap-tour.js.map
new file mode 100644
index 0000000..01d76ff
--- /dev/null
+++ b/static/maps/libs/bootstrap-tour.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"bootstrap-tour.js","sources":["../../src/libs/bootstrap-tour.js"],"names":["$","window","Tour","document","options","storage","localStorage","_error","this","_options","extend","name","steps","container","autoscroll","keyboard","debug","backdrop","backdropContainer","backdropPadding","redirect","orphan","duration","delay","basePath","template","afterSetState","afterGetState","afterRemoveState","onStart","onEnd","onShow","onShown","onHide","onHidden","onNext","onPrev" [...]
\ No newline at end of file
diff --git a/static/maps/libs/bootstrap.js.map b/static/maps/libs/bootstrap.js.map
new file mode 100644
index 0000000..e3e22bd
--- /dev/null
+++ b/static/maps/libs/bootstrap.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"bootstrap.js","sources":["../../src/libs/bootstrap.js"],"names":["$","transitionEnd","el","document","createElement","transEndEventNames","WebkitTransition","MozTransition","OTransition","transition","name","undefined","style","end","fn","emulateTransitionEnd","duration","called","$el","this","one","support","callback","trigger","setTimeout","window","jQuery","Tab","element","prototype","show","$this","$ul","closest","selector","attr","replace","parent","hasClass","p [...]
\ No newline at end of file
diff --git a/static/maps/libs/common-libs.js.map b/static/maps/libs/common-libs.js.map
new file mode 100644
index 0000000..3bdf4a0
--- /dev/null
+++ b/static/maps/libs/common-libs.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"common-libs.js","sources":["../../src/libs/common-libs.js"],"names":["jquery","require","window","jQuery","$","_","Backbone","Handlebars","exports"],"mappings":"AACA,GAAIA,QAASC,QAAS,SACtBC,QAAOC,OAASD,OAAOE,EAAIJ,OAC3BC,QAAS,2BAGTA,QAAS,oBACTA,QAAS,+BACTA,QAAS,wBACTA,QAAS,0BACTA,QAAS,uBAETA,QAAS,cAGT,IAAII,GAAIJ,QAAS,eACjBC,QAAOG,EAAIA,CACX,IAAIC,UAAWL,QAAS,aACxBC,QAAOI,SAAWA,QAClB,IAAIC,YAAaN,QAAS,uBAC1BC,QAAOK,WAAaA,WAGpBN,QAAS,kBACTA,QAAS,oBACTA,QAAS,6BAETO,QAAQR [...]
\ No newline at end of file
diff --git a/static/maps/libs/d3.js.map b/static/maps/libs/d3.js.map
new file mode 100644
index 0000000..c52f3ab
--- /dev/null
+++ b/static/maps/libs/d3.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"d3.js","sources":["../../src/libs/d3.js"],"names":["d3_documentElement","node","ownerDocument","document","documentElement","d3_window","defaultView","d3_ascending","a","b","d3_number","x","d3_numeric","isNaN","d3_bisector","compare","left","lo","hi","arguments","length","mid","right","d3_transposeLength","d","d3_range_integerScale","k","d3_class","ctor","properties","key","Object","defineProperty","prototype","value","enumerable","d3_Map","this","_","create","d3_map [...]
\ No newline at end of file
diff --git a/static/maps/libs/farbtastic.js.map b/static/maps/libs/farbtastic.js.map
new file mode 100644
index 0000000..76d68e4
--- /dev/null
+++ b/static/maps/libs/farbtastic.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"farbtastic.js","sources":["../../src/libs/farbtastic.js"],"names":["$","__debug","fn","farbtastic","options","this","container","_farbtastic","fb","linkTo","callback","unbind","updateValue","color","bind","value","setColor","unpack","rgb","hsl","RGBToHSL","updateDisplay","setHSL","HSLToRGB","pack","initWidget","dim","width","height","html","find","attr","css","end","browser","msie","radius","wheelWidth","square","Math","floor","mid","markerSize","solidFill","left","t [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery.complexify.js.map b/static/maps/libs/jquery.complexify.js.map
new file mode 100644
index 0000000..b9538f2
--- /dev/null
+++ b/static/maps/libs/jquery.complexify.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.complexify.js","sources":["../../src/libs/jquery.complexify.js"],"names":["$","fn","extend","complexify","options","callback","additionalComplexityForCharset","str","charset","i","length","charCodeAt","inBanlist","banMode","bannedPasswords","toLowerCase","indexOf","inArray","evaluateSecurity","password","this","val","complexity","valid","CHARSETS","Math","log","pow","strengthScaleFactor","MIN_COMPLEXITY","minimumChars","MAX_COMPLEXITY","call","defaults","windo [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery.sparklines.js.map b/static/maps/libs/jquery.sparklines.js.map
new file mode 100644
index 0000000..54f3001
--- /dev/null
+++ b/static/maps/libs/jquery.sparklines.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.sparklines.js","sources":["../../src/libs/jquery.sparklines.js"],"names":["document","Math","undefined","factory","define","amd","jQuery","fn","sparkline","$","getDefaults","createClass","SPFormat","clipval","quartile","normalizeValue","normalizeValues","remove","isNumber","all","sum","addCSS","ensureArray","formatNumber","RangeMap","MouseHandler","Tooltip","barHighlightMixin","line","bar","tristate","discrete","bullet","pie","box","defaultStyles","initStyles" [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jqtouch.js.map b/static/maps/libs/jquery/jqtouch.js.map
new file mode 100644
index 0000000..6f08288
--- /dev/null
+++ b/static/maps/libs/jquery/jqtouch.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jqtouch.js","sources":["../../../src/libs/jquery/jqtouch.js"],"names":["$","jQTouch","options","warn","message","undefined","window","console","jQTSettings","debug","addAnimation","animation","animations","push","addTapHandler","tapHandler","tapHandlers","addPageToHistory","page","history","unshift","hash","attr","id","clickHandler","e","$el","target","is","touchSelectors","join","closest","isExternalLink","preventDefault","support","touch","trigger","doNavigation"," [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery-ui.js.map b/static/maps/libs/jquery/jquery-ui.js.map
new file mode 100644
index 0000000..8f1ef6d
--- /dev/null
+++ b/static/maps/libs/jquery/jquery-ui.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery-ui.js","sources":["../../../src/libs/jquery/jquery-ui.js"],"names":["e","t","i","n","r","o","u","nodeName","toLowerCase","parentNode","name","href","s","test","disabled","expr","filters","visible","parents","andSelf","filter","css","this","length","ui","version","extend","keyCode","BACKSPACE","COMMA","DELETE","DOWN","END","ENTER","ESCAPE","HOME","LEFT","NUMPAD_ADD","NUMPAD_DECIMAL","NUMPAD_DIVIDE","NUMPAD_ENTER","NUMPAD_MULTIPLY","NUMPAD_SUBTRACT","PAGE_DOWN", [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.autocomplete.js.map b/static/maps/libs/jquery/jquery.autocomplete.js.map
new file mode 100644
index 0000000..7ee60e1
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.autocomplete.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.autocomplete.js","sources":["../../../src/libs/jquery/jquery.autocomplete.js"],"names":["$","fn","autocomplete","options","url","arguments","length","opts","extend","defaults","this","each","$this","data","Autocompleter","meta","inputClass","loadingClass","resultsClass","selectClass","queryParamName","extraParams","remoteDataType","lineSeparator","cellSeparator","minChars","maxItemsToShow","delay","useCache","maxCacheLength","matchSubset","matchCase","matchIns [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.cookie.js.map b/static/maps/libs/jquery/jquery.cookie.js.map
new file mode 100644
index 0000000..ce8bff6
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.cookie.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.cookie.js","sources":["../../../src/libs/jquery/jquery.cookie.js"],"names":["jQuery","cookie","name","value","options","cookieValue","document","cookies","split","i","length","trim","substring","decodeURIComponent","$","extend","expires","toUTCString","date","Date","setTime","getTime","path","domain","secure","encodeURIComponent","join"],"mappings":"AAuDAA,OAAOC,OAAS,SAASC,EAAMC,EAAOC,GAClC,GAAoB,mBAATD,GAyBJ,CACH,GAAIE,GAAc,IAClB,IAAIC,SAASL,QAA6B,IAAnBK,SAAS [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.dynatree.js.map b/static/maps/libs/jquery/jquery.dynatree.js.map
new file mode 100644
index 0000000..36a0f5d
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.dynatree.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.dynatree.js","sources":["../../../src/libs/jquery/jquery.dynatree.js"],"names":["_log","mode","_canLog","args","Array","prototype","slice","apply","arguments","dt","Date","tag","getHours","getMinutes","getSeconds","getMilliseconds","window","console","info","warn","log","e","number","join","logMsg","unshift","this","getDynaTreePersistData","DTNodeStatus_Error","DTNodeStatus_Loading","DTNodeStatus_Ok","$","getDtNodeFromElement","el","alert","ui","dynatree","get [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.event.drag.js.map b/static/maps/libs/jquery/jquery.event.drag.js.map
new file mode 100644
index 0000000..0dc6968
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.event.drag.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.event.drag.js","sources":["../../../src/libs/jquery/jquery.event.drag.js"],"names":["$","fn","drag","str","arg","opts","type","isFunction","indexOf","this","bind","trigger","$event","event","$special","special","defaults","which","distance","not","handle","relative","drop","click","datakey","noBubble","add","obj","data","related","each","key","undefined","remove","setup","extend","init","attachEvent","dontstart","teardown","removeData","textselect","detachEven [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.event.drop.js.map b/static/maps/libs/jquery/jquery.event.drop.js.map
new file mode 100644
index 0000000..1db4649
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.event.drop.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.event.drop.js","sources":["../../../src/libs/jquery/jquery.event.drop.js"],"names":["$","fn","drop","str","arg","opts","type","isFunction","indexOf","this","bind","trigger","multi","isNaN","delay","tolerance","mode","$event","event","$special","special","targets","datakey","noBubble","add","data","related","remove","setup","active","anyactive","winner","location","push","teardown","removeData","element","grep","target","handler","dd","$targets","filter","each" [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.event.hover.js.map b/static/maps/libs/jquery/jquery.event.hover.js.map
new file mode 100644
index 0000000..2932c51
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.event.hover.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.event.hover.js","sources":["../../../src/libs/jquery/jquery.event.hover.js"],"names":["$","hoverHandler","event","compare","data","type","dist2","dispatch","call","this","elem","add","timer","setTimeout","delay","Math","pow","pageX","pageY","clearTimeout","hovered","remove","speed","fn","_hover","hover","fn1","fn2","fn3","bind","trigger","special","setup","extend","teardown","jQuery"],"mappings":"CAAC,SAAUA,GAyCX,QAASC,GAAcC,GAsCtB,QAASC,KAAWF,EAAcG,GArClC,GAA [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.form.js.map b/static/maps/libs/jquery/jquery.form.js.map
new file mode 100644
index 0000000..466e1f0
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.form.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.form.js","sources":["../../../src/libs/jquery/jquery.form.js"],"names":["$","doAjaxSubmit","e","options","data","isDefaultPrevented","preventDefault","target","ajaxSubmit","captureSubmittingElement","$el","is","t","closest","length","form","this","clk","type","undefined","offsetX","clk_x","clk_y","offsetY","fn","offset","pageX","left","pageY","top","offsetLeft","offsetTop","setTimeout","log","debug","msg","Array","prototype","join","call","arguments","window", [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.js.map b/static/maps/libs/jquery/jquery.js.map
new file mode 100644
index 0000000..a52559b
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.js","sources":["../../../src/libs/jquery/jquery.js"],"names":["global","factory","module","exports","document","w","Error","window","this","noGlobal","isArrayLike","obj","length","type","jQuery","isWindow","winnow","elements","qualifier","not","isFunction","grep","elem","i","call","nodeType","risSimple","test","filter","inArray","sibling","cur","dir","createOptions","options","object","each","match","rnotwhite","_","flag","detach","addEventListener","removeEve [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.migrate.js.map b/static/maps/libs/jquery/jquery.migrate.js.map
new file mode 100644
index 0000000..77cdeb3
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.migrate.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.migrate.js","sources":["../../../src/libs/jquery/jquery.migrate.js"],"names":["jQuery","window","undefined","migrateWarn","msg","console","warnedAbout","migrateWarnings","push","warn","migrateMute","migrateTrace","trace","migrateWarnProp","obj","prop","value","Object","defineProperty","configurable","enumerable","get","set","newValue","err","_definePropertyBroken","migrateVersion","log","migrateReset","length","document","compatMode","attrFn","size","attr","ol [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.mousewheel.js.map b/static/maps/libs/jquery/jquery.mousewheel.js.map
new file mode 100644
index 0000000..e061ae5
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.mousewheel.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.mousewheel.js","sources":["../../../src/libs/jquery/jquery.mousewheel.js"],"names":["factory","jQuery","$","handler","event","fn","orgEvent","window","args","slice","call","arguments","delta","deltaX","deltaY","absDelta","absDeltaXY","fix","type","wheelDelta","detail","undefined","wheelDeltaY","wheelDeltaX","Math","abs","lowestDelta","max","lowestDeltaXY","unshift","dispatch","handle","apply","this","toFix","toBind","document","documentMode","fixHooks","i","le [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.rating.js.map b/static/maps/libs/jquery/jquery.rating.js.map
new file mode 100644
index 0000000..da2f8c0
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.rating.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.rating.js","sources":["../../../src/libs/jquery/jquery.rating.js"],"names":["window","jQuery","$","support","opacity","style","document","execCommand","e","fn","rating","options","this","length","arguments","args","each","apply","makeArray","slice","extend","calls","not","addClass","control","input","eid","name","replace","context","form","body","raters","data","call","count","rater","metadata","meta","stars","inputs","serial","before","attr","hasClass","readO [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jquery.wymeditor.js.map b/static/maps/libs/jquery/jquery.wymeditor.js.map
new file mode 100644
index 0000000..9cdc132
--- /dev/null
+++ b/static/maps/libs/jquery/jquery.wymeditor.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jquery.wymeditor.js","sources":["../../../src/libs/jquery/jquery.wymeditor.js"],"names":["WYMeditor","window","console","firebug","names","i","length","jQuery","extend","VERSION","INSTANCES","STRINGS","SKINS","NAME","INDEX","WYM_INDEX","BASE_PATH","CSS_PATH","WYM_PATH","SKINS_DEFAULT_PATH","SKINS_DEFAULT_CSS","SKINS_DEFAULT_JS","LANG_DEFAULT_PATH","IFRAME_BASE_PATH","IFRAME_DEFAULT","JQUERY_PATH","DIRECTION","LOGO","TOOLS","TOOLS_ITEMS","TOOL_NAME","TOOL_TITLE","TOOL [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jstorage.js.map b/static/maps/libs/jquery/jstorage.js.map
new file mode 100644
index 0000000..d1e0af7
--- /dev/null
+++ b/static/maps/libs/jquery/jstorage.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jstorage.js","sources":["../../../src/libs/jquery/jstorage.js"],"names":["_init","localStorageReallyWorks","window","localStorage","setItem","removeItem","BogusQuotaExceededErrorOnIos5","_storage_service","_backend","_observer_update","jStorage_update","E3","globalStorage","location","hostname","E4","_storage_elm","document","createElement","addBehavior","style","behavior","getElementsByTagName","appendChild","load","E","setAttribute","save","data","getAttribute","E5 [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/jstree.js.map b/static/maps/libs/jquery/jstree.js.map
new file mode 100644
index 0000000..155a407
--- /dev/null
+++ b/static/maps/libs/jquery/jstree.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"jstree.js","sources":["../../../src/libs/jquery/jstree.js"],"names":["factory","define","amd","exports","require","jQuery","$","undefined","jstree","_temp1","_temp2","instance_counter","ccp_node","ccp_mode","ccp_inst","themes_loaded","src","attr","_d","document","_node","createElement","setAttribute","className","appendChild","version","defaults","plugins","path","indexOf","replace","idregex","create","el","options","tmp","core","opt","extend","each","i","k","plugin" [...]
\ No newline at end of file
diff --git a/static/maps/libs/jquery/select2.js.map b/static/maps/libs/jquery/select2.js.map
new file mode 100644
index 0000000..b2cebe1
--- /dev/null
+++ b/static/maps/libs/jquery/select2.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"select2.js","sources":["../../../src/libs/jquery/select2.js"],"names":["$","fn","each2","extend","c","j","i","l","this","length","context","call","jQuery","undefined","reinsertElement","element","placeholder","document","createTextNode","before","remove","stripDiacritics","str","match","a","DIACRITICS","replace","indexOf","value","array","equal","measureScrollbar","$template","MEASURE_SCROLLBAR_TEMPLATE","appendTo","dim","width","clientWidth","height","clientHeight", [...]
\ No newline at end of file
diff --git a/static/maps/libs/raven.js.map b/static/maps/libs/raven.js.map
new file mode 100644
index 0000000..e31cbd1
--- /dev/null
+++ b/static/maps/libs/raven.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"raven.js","sources":["../../src/libs/raven.js"],"names":["f","exports","module","define","amd","g","window","global","self","this","Raven","e","t","n","r","s","o","u","a","require","i","Error","code","l","call","length",1,"_dereq_","stringify","obj","replacer","spaces","cycleReplacer","JSON","serializer","stack","keys","key","value","slice","indexOf","join","thisPos","splice","push","getSerialize",2,"RavenConfigError","message","name","prototype","constructor",3,"wra [...]
\ No newline at end of file
diff --git a/static/maps/libs/require.js.map b/static/maps/libs/require.js.map
new file mode 100644
index 0000000..046332d
--- /dev/null
+++ b/static/maps/libs/require.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"require.js","sources":["../../src/libs/require.js"],"names":["requirejs","require","define","global","commentReplace","match","multi","multiText","singlePrefix","isFunction","it","ostring","call","isArray","each","ary","func","i","length","eachReverse","hasProp","obj","prop","hasOwn","getOwn","eachProp","mixin","target","source","force","deepStringMixin","value","RegExp","bind","fn","apply","arguments","scripts","document","getElementsByTagName","defaultOnError","err [...]
\ No newline at end of file
diff --git a/static/maps/libs/toastr.js.map b/static/maps/libs/toastr.js.map
new file mode 100644
index 0000000..d201ec8
--- /dev/null
+++ b/static/maps/libs/toastr.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"toastr.js","sources":["../../src/libs/toastr.js"],"names":["define","$","jQuery","error","message","title","optionsOverride","notify","type","toastType","iconClass","getOptions","iconClasses","info","subscribe","callback","listener","success","warning","clear","$toastElement","options","$container","getContainer","length","hideMethod","duration","hideDuration","easing","hideEasing","complete","removeToast","children","remove","getDefaults","tapToDismiss","toastClass" [...]
\ No newline at end of file
diff --git a/static/maps/libs/underscore.js.map b/static/maps/libs/underscore.js.map
new file mode 100644
index 0000000..a8f4ac9
--- /dev/null
+++ b/static/maps/libs/underscore.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"underscore.js","sources":["../../src/libs/underscore.js"],"names":["createReduce","dir","iterator","obj","iteratee","memo","keys","index","length","currentKey","context","optimizeCb","isArrayLike","_","arguments","createPredicateIndexFinder","array","predicate","cb","getLength","createIndexFinder","predicateFind","sortedIndex","item","idx","i","Math","max","min","slice","call","isNaN","collectNonEnumProps","nonEnumIdx","nonEnumerableProps","constructor","proto","isFu [...]
\ No newline at end of file
diff --git a/static/maps/mvc/annotation.js.map b/static/maps/mvc/annotation.js.map
new file mode 100644
index 0000000..8c38c43
--- /dev/null
+++ b/static/maps/mvc/annotation.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"annotation.js","sources":["../../src/mvc/annotation.js"],"names":["define","baseMVC","_l","AnnotationEditor","Backbone","View","extend","LoggableMixin","HiddenUntilActivatedViewMixin","tagName","className","initialize","options","this","tooltipConfig","placement","listenTo","model","render","hiddenUntilActivated","$activator","view","$el","html","_template","$annotation","make_text_editable","use_textarea","on_finish","newAnnotation","text","save","annotation","silen [...]
\ No newline at end of file
diff --git a/static/maps/mvc/base-mvc.js.map b/static/maps/mvc/base-mvc.js.map
new file mode 100644
index 0000000..5d372f2
--- /dev/null
+++ b/static/maps/mvc/base-mvc.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"base-mvc.js","sources":["../../src/mvc/base-mvc.js"],"names":["define","_","Backbone","addLogging","_l","mixin","args","Array","prototype","slice","call","arguments","lastArg","pop","unshift","defaults","apply","wrapTemplate","template","jsonNamespace","templateFn","join","json","view","templateVars","buildComparator","attribute_name","options","ascending","a","b","get","LoggableMixin","logger","_logNamespace","SessionStorageModel","Model","extend","initialize","init [...]
\ No newline at end of file
diff --git a/static/maps/mvc/base/controlled-fetch-collection.js.map b/static/maps/mvc/base/controlled-fetch-collection.js.map
new file mode 100644
index 0000000..9849a26
--- /dev/null
+++ b/static/maps/mvc/base/controlled-fetch-collection.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"controlled-fetch-collection.js","sources":["../../../src/mvc/base/controlled-fetch-collection.js"],"names":["define","_","Backbone","BASE_MVC","ControlledFetchCollection","Collection","extend","initialize","models","options","prototype","call","this","setOrder","order","silent","_setUpListeners","on","changed-order","sort","fetch","_buildFetchOptions","clone","self","traditional","data","_buildFetchData","filters","_buildFetchFilters","isEmpty","_fetchFiltersToAjaxDa [...]
\ No newline at end of file
diff --git a/static/maps/mvc/citation/citation-model.js.map b/static/maps/mvc/citation/citation-model.js.map
new file mode 100644
index 0000000..b2cd9fb
--- /dev/null
+++ b/static/maps/mvc/citation/citation-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"citation-model.js","sources":["../../../src/mvc/citation/citation-model.js"],"names":["define","parseBibtex","baseMVC","window","BibtexParser","logNamespace","Citation","Backbone","Model","extend","LoggableMixin","_logNamespace","defaults","content","initialize","parsed","this","attributes","err","errors","length","reduce","all","current","log","_fields","entry","_","first","entries","rawFields","Fields","key","value","lowerKey","toLowerCase","entryType","EntryType", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/citation/citation-view.js.map b/static/maps/mvc/citation/citation-view.js.map
new file mode 100644
index 0000000..8454efa
--- /dev/null
+++ b/static/maps/mvc/citation/citation-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"citation-view.js","sources":["../../../src/mvc/citation/citation-view.js"],"names":["define","baseMVC","citationModel","_l","CitationView","Backbone","View","extend","tagName","className","render","this","$el","append","formattedReference","model","entryType","fields","ref","authorsAndYear","_asSentence","author","year","title","pages","address","volume","number","journal","booktitle","howpublished","note","institution","type","_formatBookInfo","doiUrl","doi","url"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/collection-li-edit.js.map b/static/maps/mvc/collection/collection-li-edit.js.map
new file mode 100644
index 0000000..b059cf8
--- /dev/null
+++ b/static/maps/mvc/collection/collection-li-edit.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"collection-li-edit.js","sources":["../../../src/mvc/collection/collection-li-edit.js"],"names":["define","DC_LI","DATASET_LI_EDIT","DCListItemView","DCListItemEdit","extend","initialize","attributes","prototype","call","this","toString","modelString","model","DCEListItemView","DCEListItemEdit","DatasetDCEListItemEdit","DatasetListItemEdit","_fetchModelDetails","view","inReadyState","hasDetails","fetch","silent","jQuery","when","_renderDeleteButton","templates","_","t [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/collection-li.js.map b/static/maps/mvc/collection/collection-li.js.map
new file mode 100644
index 0000000..7fa1eb2
--- /dev/null
+++ b/static/maps/mvc/collection/collection-li.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"collection-li.js","sources":["../../../src/mvc/collection/collection-li.js"],"names":["define","LIST_ITEM","DATASET_LI","BASE_MVC","_l","FoldoutListItemView","ListItemView","DCListItemView","extend","className","prototype","id","this","model","get","join","initialize","attributes","linkTarget","hasUser","call","_setUpListeners","listenTo","_","has","changed","render","$","replaceWith","_renderSubtitle","templates","subtitle","toJSON","_getFoldoutPanelOptions","option [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/collection-model.js.map b/static/maps/mvc/collection/collection-model.js.map
new file mode 100644
index 0000000..bb9f1ce
--- /dev/null
+++ b/static/maps/mvc/collection/collection-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"collection-model.js","sources":["../../../src/mvc/collection/collection-model.js"],"names":["define","DATASET_MODEL","BASE_MVC","DatasetCollectionElementMixin","defaults","model_class","element_identifier","element_index","element_type","_mergeObject","attributes","_","extend","object","element_id","id","constructor","this","idAttribute","Backbone","Model","apply","arguments","parse","response","DatasetCollectionElement","LoggableMixin","_logNamespace","DCECollection [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/collection-view-edit.js.map b/static/maps/mvc/collection/collection-view-edit.js.map
new file mode 100644
index 0000000..57cf6b4
--- /dev/null
+++ b/static/maps/mvc/collection/collection-view-edit.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"collection-view-edit.js","sources":["../../../src/mvc/collection/collection-view-edit.js"],"names":["define","DC_VIEW","DC_MODEL","DC_EDIT","BASE_MVC","_l","_super","CollectionView","CollectionViewEdit","extend","DatasetDCEViewClass","DatasetDCEListItemEdit","NestedDCDCEViewClass","NestedDCDCEListItemEdit","initialize","attributes","prototype","call","this","_setUpBehaviors","$where","$el","model","Galaxy","user","isAnonymous","panel","nameSelector","find","attr","to [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/collection-view.js.map b/static/maps/mvc/collection/collection-view.js.map
new file mode 100644
index 0000000..d4bd26d
--- /dev/null
+++ b/static/maps/mvc/collection/collection-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"collection-view.js","sources":["../../../src/mvc/collection/collection-view.js"],"names":["define","LIST_VIEW","DC_MODEL","DC_LI","BASE_MVC","_l","logNamespace","_super","ModelListPanel","CollectionView","extend","_logNamespace","className","prototype","DatasetDCEViewClass","DatasetDCEListItemView","NestedDCDCEViewClass","NestedDCDCEListItemView","modelCollectionKey","initialize","attributes","call","this","linkTarget","hasUser","panelStack","parentName","foldoutStyl [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/list-collection-creator.js.map b/static/maps/mvc/collection/list-collection-creator.js.map
new file mode 100644
index 0000000..e89da4f
--- /dev/null
+++ b/static/maps/mvc/collection/list-collection-creator.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"list-collection-creator.js","sources":["../../../src/mvc/collection/list-collection-creator.js"],"names":["define","HDCA","STATES","BASE_MVC","UI_MODAL","naturalSort","_l","createListCollection","contents","elements","toJSON","promise","listCollectionCreatorModal","creationFn","name","map","element","id","src","history_content_type","createHDCA","logNamespace","DatasetCollectionElementView","Backbone","View","extend","LoggableMixin","_logNamespace","tagName","classNa [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/list-of-pairs-collection-creator.js.map b/static/maps/mvc/collection/list-of-pairs-collection-creator.js.map
new file mode 100644
index 0000000..085c028
--- /dev/null
+++ b/static/maps/mvc/collection/list-of-pairs-collection-creator.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"list-of-pairs-collection-creator.js","sources":["../../../src/mvc/collection/list-of-pairs-collection-creator.js"],"names":["define","levenshteinDistance","naturalSort","LIST_COLLECTION_CREATOR","baseMVC","_l","autoPairFnBuilder","options","getRegExps","_regexps","length","RegExp","this","filters","createPair","params","a","listA","splice","indexA","b","listB","indexB","aInBIndex","indexOf","bInAIndex","_pair","silent","preprocessMatch","regexps","call","_","extend", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/pair-collection-creator.js.map b/static/maps/mvc/collection/pair-collection-creator.js.map
new file mode 100644
index 0000000..e42cae1
--- /dev/null
+++ b/static/maps/mvc/collection/pair-collection-creator.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"pair-collection-creator.js","sources":["../../../src/mvc/collection/pair-collection-creator.js"],"names":["define","LIST_CREATOR","HDCA","BASE_MVC","_l","createPairCollection","contents","elements","toJSON","promise","pairCollectionCreatorModal","creationFn","name","src","id","createHDCA","logNamespace","PairedDatasetCollectionElementView","Backbone","View","extend","LoggableMixin","_logNamespace","tagName","className","initialize","attributes","this","element","iden [...]
\ No newline at end of file
diff --git a/static/maps/mvc/collection/paired-collection-creator.js.map b/static/maps/mvc/collection/paired-collection-creator.js.map
new file mode 100644
index 0000000..c540f8f
--- /dev/null
+++ b/static/maps/mvc/collection/paired-collection-creator.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"paired-collection-creator.js","sources":["../../../src/mvc/collection/paired-collection-creator.js"],"names":["define","levenshteinDistance","naturalSort","baseMVC","_l","autoPairFnBuilder","options","getRegExps","_regexps","length","RegExp","this","filters","createPair","params","debug","listA","indexA","name","listB","indexB","_pair","splice","silent","preprocessMatch","regexps","call","_","extend","matchTo","replace","possible","bestMatch","score","index","paired" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/data.js.map b/static/maps/mvc/dataset/data.js.map
new file mode 100644
index 0000000..f266f02
--- /dev/null
+++ b/static/maps/mvc/dataset/data.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"data.js","sources":["../../../src/mvc/dataset/data.js"],"names":["define","Modal","Frames","mod_icon_btn","DatasetMetadata","Backbone","Model","extend","Dataset","defaults","id","type","name","hda_ldda","metadata","initialize","this","get","_set_metadata","on","_","each","keys","attributes","k","indexOf","new_key","split","set","silent","get_metadata","attribute","urlRoot","Galaxy","root","TabularDataset","prototype","chunk_url","first_data_chunk","offset","at_eof"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/dataset-choice.js.map b/static/maps/mvc/dataset/dataset-choice.js.map
new file mode 100644
index 0000000..5280650
--- /dev/null
+++ b/static/maps/mvc/dataset/dataset-choice.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"dataset-choice.js","sources":["../../../src/mvc/dataset/dataset-choice.js"],"names":["define","DATASET","DATASET_LIST","MODAL","BASE_MVC","_l","_filterDatasetJSON","datasetJSON","where","datasetsOnly","matches","obj","toMatch","key","hasOwnProperty","filter","json","console","debug","deleted","visible","undefined","collection_type","logNamespace","DatasetChoiceModal","options","resolveWithSelected","promise","resolve","list","getSelectedModels","map","model","toJSON" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/dataset-li-edit.js.map b/static/maps/mvc/dataset/dataset-li-edit.js.map
new file mode 100644
index 0000000..e6fed7a
--- /dev/null
+++ b/static/maps/mvc/dataset/dataset-li-edit.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"dataset-li-edit.js","sources":["../../../src/mvc/dataset/dataset-li-edit.js"],"names":["define","STATES","DATASET_LI","TAGS","ANNOTATIONS","faIconButton","BASE_MVC","_l","_super","DatasetListItemView","DatasetListItemEdit","extend","initialize","attributes","prototype","call","this","hasUser","purgeAllowed","tagsEditorShown","annotationEditorShown","_renderPrimaryActions","actions","model","get","NOT_VIEWABLE","concat","_renderEditButton","_renderDeleteButton","DISCA [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/dataset-li.js.map b/static/maps/mvc/dataset/dataset-li.js.map
new file mode 100644
index 0000000..41ddd5f
--- /dev/null
+++ b/static/maps/mvc/dataset/dataset-li.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"dataset-li.js","sources":["../../../src/mvc/dataset/dataset-li.js"],"names":["define","LIST_ITEM","STATES","faIconButton","BASE_MVC","_l","logNamespace","_super","ListItemView","DatasetListItemView","extend","_logNamespace","className","prototype","id","this","model","get","join","initialize","attributes","logger","log","call","linkTarget","_setUpListeners","self","listenTo","change","changedAttributes","state","inReadyState","expanded","hasDetails","fetch","silent", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/dataset-list.js.map b/static/maps/mvc/dataset/dataset-list.js.map
new file mode 100644
index 0000000..7ed3395
--- /dev/null
+++ b/static/maps/mvc/dataset/dataset-list.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"dataset-list.js","sources":["../../../src/mvc/dataset/dataset-list.js"],"names":["define","LIST_VIEW","DATASET_LI","BASE_MVC","_l","logNamespace","_super","ListPanel","DatasetList","extend","_logNamespace","viewClass","DatasetListItemView","className","prototype","noneFoundMsg","initialize","attributes","call","this","toString","collection"],"mappings":"AAAAA,QACI,qBACA,yBACA,eACA,sBACD,SAAUC,EAAWC,EAAYC,EAAUC,GAC9C,YAEA,IAAIC,GAAe,UAKfC,EAASL,EAAUM,UAGnBC,EAAcF,EAAO [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/dataset-model.js.map b/static/maps/mvc/dataset/dataset-model.js.map
new file mode 100644
index 0000000..d41baf8
--- /dev/null
+++ b/static/maps/mvc/dataset/dataset-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"dataset-model.js","sources":["../../../src/mvc/dataset/dataset-model.js"],"names":["define","STATES","BASE_MVC","_l","logNamespace","searchableMixin","SearchableModelMixin","DatasetAssociation","Backbone","Model","extend","LoggableMixin","mixin","_logNamespace","defaults","state","NEW","deleted","purged","name","accessible","data_type","file_ext","file_size","meta_files","misc_blurb","misc_info","tags","initialize","attributes","options","this","debug","get","set","N [...]
\ No newline at end of file
diff --git a/static/maps/mvc/dataset/states.js.map b/static/maps/mvc/dataset/states.js.map
new file mode 100644
index 0000000..b18435b
--- /dev/null
+++ b/static/maps/mvc/dataset/states.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"states.js","sources":["../../../src/mvc/dataset/states.js"],"names":["define","STATES","UPLOAD","QUEUED","RUNNING","SETTING_METADATA","NEW","EMPTY","OK","PAUSED","FAILED_METADATA","NOT_VIEWABLE","DISCARDED","ERROR","READY_STATES","NOT_READY_STATES"],"mappings":"AAAAA,UACG,WAEH,YAKA,IAAIC,IAGAC,OAAsB,SAEtBC,OAAsB,SAEtBC,QAAsB,UAEtBC,iBAAsB,mBAItBC,IAAsB,MAEtBC,MAAsB,QAEtBC,GAAsB,KAGtBC,OAAsB,SAEtBC,gBAAsB,kBAGtBC,aAAsB,eAEtBC,UAAsB,YAEtBC,MAAsB,QAuBtB,OApBJZ,GAAOa,cAC [...]
\ No newline at end of file
diff --git a/static/maps/mvc/form/form-data.js.map b/static/maps/mvc/form/form-data.js.map
new file mode 100644
index 0000000..7809f2d
--- /dev/null
+++ b/static/maps/mvc/form/form-data.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"form-data.js","sources":["../../../src/mvc/form/form-data.js"],"names":["define","Manager","Backbone","Model","extend","initialize","app","this","checksum","sum","self","section","$el","find","each","id","$","attr","field","field_list","JSON","stringify","value","collapsed","create","add","flat_id","input_id","input_value","flat_dict","result_dict","element_list","convert","identifier","head","index","node","input","name","type","section_label","block_indices","block [...]
\ No newline at end of file
diff --git a/static/maps/mvc/form/form-input.js.map b/static/maps/mvc/form/form-input.js.map
new file mode 100644
index 0000000..fcde13d
--- /dev/null
+++ b/static/maps/mvc/form/form-input.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"form-input.js","sources":["../../../src/mvc/form/form-input.js"],"names":["define","Backbone","View","extend","initialize","app","options","this","app_options","field","model","Model","text_enable","text_disable","cls_enable","cls_disable","set","setElement","_template","$field","$","$info","$preview","$collapsible","$collapsible_text","$collapsible_icon","$title","$title_text","$error_text","$error","$backdrop","prepend","$el","collapsible_value","get","collapsed"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/form/form-parameters.js.map b/static/maps/mvc/form/form-parameters.js.map
new file mode 100644
index 0000000..992a854
--- /dev/null
+++ b/static/maps/mvc/form/form-parameters.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"form-parameters.js","sources":["../../../src/mvc/form/form-parameters.js"],"names":["define","Utils","Ui","SelectContent","SelectLibrary","SelectFtp","ColorPicker","Backbone","Model","extend","types","text","select","data_column","genomebuild","data","data_collection","integer","float","boolean","drill_down","color","hidden","hidden_data","baseurl","library_data","ftpfile","create","input_def","fieldClass","this","type","field","call","options","_fieldSelect","_field [...]
\ No newline at end of file
diff --git a/static/maps/mvc/form/form-repeat.js.map b/static/maps/mvc/form/form-repeat.js.map
new file mode 100644
index 0000000..a6c7ecc
--- /dev/null
+++ b/static/maps/mvc/form/form-repeat.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"form-repeat.js","sources":["../../../src/mvc/form/form-repeat.js"],"names":["define","Utils","Portlet","Ui","View","Backbone","extend","initialize","options","this","list","merge","title","empty_text","max","min","button_new","ButtonIcon","icon","tooltip","floating","cls","onclick","onnew","setElement","$","append","$list","$el","size","_","add","id","Galaxy","emit","debug","button_delete","ondel","portlet","operations","addClass","hide","fadeIn","disable","_refresh" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/form/form-section.js.map b/static/maps/mvc/form/form-section.js.map
new file mode 100644
index 0000000..ee0f82b
--- /dev/null
+++ b/static/maps/mvc/form/form-section.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"form-section.js","sources":["../../../src/mvc/form/form-section.js"],"names":["define","Utils","Ui","Portlet","Repeat","InputElement","Parameters","View","Backbone","extend","initialize","app","options","this","inputs","parameters","setElement","$","render","self","$el","empty","_","each","input","add","input_def","jQuery","id","uid","input_list","type","_addConditional","_addRepeat","_addSection","_addRow","test_param","sustain_conditionals","disabled","field","mode [...]
\ No newline at end of file
diff --git a/static/maps/mvc/form/form-view.js.map b/static/maps/mvc/form/form-view.js.map
new file mode 100644
index 0000000..09d7f87
--- /dev/null
+++ b/static/maps/mvc/form/form-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"form-view.js","sources":["../../../src/mvc/form/form-view.js"],"names":["define","Utils","Portlet","Ui","FormSection","FormData","Backbone","View","extend","initialize","options","this","merge","initial_errors","cls","icon","always_refresh","setElement","render","update","new_model","self","data","matchModel","node","input_id","input","input_list","_","isEqual","field","field_list","new_options","indexOf","type","i","opt","length","push","label","value","trigger","Ga [...]
\ No newline at end of file
diff --git a/static/maps/mvc/grid/grid-model.js.map b/static/maps/mvc/grid/grid-model.js.map
new file mode 100644
index 0000000..74efba0
--- /dev/null
+++ b/static/maps/mvc/grid/grid-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"grid-model.js","sources":["../../../src/mvc/grid/grid-model.js"],"names":["define","Backbone","Model","extend","defaults","url_base","async","async_ops","categorical_filters","filters","sort_key","show_item_checkboxes","advanced_search","cur_page","num_pages","operation","undefined","item_ids","can_async_op","op","_","indexOf","this","attributes","add_filter","key","value","append","new_val","cur_val","values","push","remove_filter","condition","condition_index","get [...]
\ No newline at end of file
diff --git a/static/maps/mvc/grid/grid-template.js.map b/static/maps/mvc/grid/grid-template.js.map
new file mode 100644
index 0000000..642528d
--- /dev/null
+++ b/static/maps/mvc/grid/grid-template.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"grid-template.js","sources":["../../../src/mvc/grid/grid-template.js"],"names":["define","Utils","grid","options","tmpl","embedded","this","grid_header","grid_table","info_text","title","global_actions","show_popup","length","i","action","label_cls","inbound","url_args","label","insert","grid_filters","header","show_item_checkboxes","items","columns","column","visible","key","href","extra","body","num_rows_rendered","items_length","item","encoded_id","encode_id","cur [...]
\ No newline at end of file
diff --git a/static/maps/mvc/grid/grid-view.js.map b/static/maps/mvc/grid/grid-view.js.map
new file mode 100644
index 0000000..f56e2c7
--- /dev/null
+++ b/static/maps/mvc/grid/grid-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"grid-view.js","sources":["../../../src/mvc/grid/grid-view.js"],"names":["jQuery","ajaxSettings","traditional","define","GridModel","Templates","PopupMenu","Backbone","View","extend","grid","initialize","grid_config","this","setElement","use_panels","$","css","padding","overflow","init_grid","handle_refresh","refresh_frames","inArray","top","Galaxy","currHistoryPanel","loadCurrentHistory","options","attributes","url","get","replace","set","$el","html","find","header", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group-detail-view.js.map b/static/maps/mvc/groups/group-detail-view.js.map
new file mode 100644
index 0000000..ef653f0
--- /dev/null
+++ b/static/maps/mvc/groups/group-detail-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-detail-view.js","sources":["../../../src/mvc/groups/group-detail-view.js"],"names":["define","mod_toastr","mod_group_model","GroupDetailView","Backbone","View","extend","el","options","app","initialize","this","_","window","globalTS","groups","collection","model","get","group_id","render","fetchGroup","that","Group","id","fetch","success","console","log","error","response","responseJSON","err_msg","template","templateRow","$el","html","group","$","tooltip","joi [...]
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group-groupdetail-view.js.map b/static/maps/mvc/groups/group-groupdetail-view.js.map
new file mode 100644
index 0000000..4977e80
--- /dev/null
+++ b/static/maps/mvc/groups/group-groupdetail-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-groupdetail-view.js","sources":["../../../src/mvc/groups/group-groupdetail-view.js"],"names":["define","GroupDetailView","Backbone","View","extend","events","initialize","group","this","render","tmpl","templateRow","setElement","$el","show","_","template","join"],"mappings":"AAAAA,UACA,WAGA,GAAIC,GAAkBC,SAASC,KAAKC,QAChCC,UAGAC,WAAa,SAAUC,GACnBC,KAAKC,OAAQF,IAGjBE,OAAQ,SAAUF,GACd,GAAIG,GAAOF,KAAKG,aAGhB,OAFAH,MAAKI,WAAWF,GAAQH,MAAMA,KAC9BC,KAAKK,IAAIC,OACFN,MAG [...]
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group-grouprow-view.js.map b/static/maps/mvc/groups/group-grouprow-view.js.map
new file mode 100644
index 0000000..29804f6
--- /dev/null
+++ b/static/maps/mvc/groups/group-grouprow-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-grouprow-view.js","sources":["../../../src/mvc/groups/group-grouprow-view.js"],"names":["define","GroupRowView","Backbone","View","extend","events","initialize","group","this","render","tmpl","templateRow","setElement","$el","show","_","template","join"],"mappings":"AAAAA,UACA,WAGA,GAAIC,GAAeC,SAASC,KAAKC,QAC7BC,UAGAC,WAAa,SAAUC,GACnBC,KAAKC,OAAOF,IAGhBE,OAAQ,SAAUF,GACd,GAAIG,GAAOF,KAAKG,aAIhB,OAFAH,MAAKI,WAAWF,GAAQH,MAAMA,KAC9BC,KAAKK,IAAIC,OACFN,MAaXG,YAAa,WA [...]
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group-list-view.js.map b/static/maps/mvc/groups/group-list-view.js.map
new file mode 100644
index 0000000..3fce756
--- /dev/null
+++ b/static/maps/mvc/groups/group-list-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-list-view.js","sources":["../../../src/mvc/groups/group-list-view.js"],"names":["define","mod_toastr","mod_group_model","mod_group_listrow_view","GroupListView","Backbone","View","extend","el","defaults","initialize","options","this","_","that","window","globalTS","groups","collection","Groups","fetch","success","model","console","log","render","error","response","responseJSON","err_msg","$","hide","template","templateGroupsList","$el","html","length","order"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group-listrow-view.js.map b/static/maps/mvc/groups/group-listrow-view.js.map
new file mode 100644
index 0000000..a3a5007
--- /dev/null
+++ b/static/maps/mvc/groups/group-listrow-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-listrow-view.js","sources":["../../../src/mvc/groups/group-listrow-view.js"],"names":["define","GroupListRowView","Backbone","View","extend","events","initialize","options","this","render","group","tmpl","templateRow","setElement","$el","show","_","template","join"],"mappings":"AAAAA,UACA,WAGA,GAAIC,GAAmBC,SAASC,KAAKC,QACjCC,UAEAC,WAAa,SAAUC,GACnBC,KAAKC,OAAQF,EAAQG,QAGzBD,OAAQ,SAAUC,GACd,GAAIC,GAAOH,KAAKI,aAGhB,OAFAJ,MAAKK,WAAWF,GAAQD,MAAMA,KAC9BF,KAAKM,IAAIC, [...]
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group-model.js.map b/static/maps/mvc/groups/group-model.js.map
new file mode 100644
index 0000000..7cf1622
--- /dev/null
+++ b/static/maps/mvc/groups/group-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-model.js","sources":["../../../src/mvc/groups/group-model.js"],"names":["define","Group","Backbone","Model","extend","urlRoot","Groups","Collection","url","model"],"mappings":"AAAAA,UAAW,WAKP,GAAIC,GAAQC,SAASC,MAAMC,QACzBC,QAAS,gBAGPC,EAASJ,SAASK,WAAWH,QAC/BI,IAAK,cAELC,MAAOR,GAIb,QACIA,MAAOA,EACPK,OAAQA"}
\ No newline at end of file
diff --git a/static/maps/mvc/groups/group.model.js.map b/static/maps/mvc/groups/group.model.js.map
new file mode 100644
index 0000000..8b65022
--- /dev/null
+++ b/static/maps/mvc/groups/group.model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group.model.js","sources":["../../../src/mvc/groups/group.model.js"],"names":["define","Group","Backbone","Model","extend","urlRoot","Groups","Collection","url","model"],"mappings":"AAAAA,UAAW,WAKP,GAAIC,GAAQC,SAASC,MAAMC,QACzBC,QAAS,iBAGPC,EAASJ,SAASK,WAAWH,QAC/BI,IAAK,cAELC,MAAOR,GAIb,QACIA,MAAOA,EACPK,OAAQA"}
\ No newline at end of file
diff --git a/static/maps/mvc/history/copy-dialog.js.map b/static/maps/mvc/history/copy-dialog.js.map
new file mode 100644
index 0000000..911aa35
--- /dev/null
+++ b/static/maps/mvc/history/copy-dialog.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"copy-dialog.js","sources":["../../../src/mvc/history/copy-dialog.js"],"names":["define","MODAL","ERROR_MODAL","_l","CopyDialog","defaultName","_","template","title","submitLabel","errorMessage","progressive","activeLabel","allLabel","anonWarning","_template","join","_showAjaxIndicator","indicator","this","modal","$","empty","append","css","margin-top","dialog","history","options","checkNameAndCopy","name","val","show","copyAllDatasets","prop","copy","done","response" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/hda-li-edit.js.map b/static/maps/mvc/history/hda-li-edit.js.map
new file mode 100644
index 0000000..678d371
--- /dev/null
+++ b/static/maps/mvc/history/hda-li-edit.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hda-li-edit.js","sources":["../../../src/mvc/history/hda-li-edit.js"],"names":["define","DATASET_LI_EDIT","HDA_LI","BASE_MVC","_l","_super","DatasetListItemEdit","HDAListItemEdit","extend","className","prototype","_fetchModelDetails","view","this","model","inReadyState","hasDetails","fetch","silent","has","jQuery","when","data","keys","join","events","_","clone","click .unhide-link","unhide","toString","modelString","templates","warnings","hidden","wrapTemplate","tit [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/hda-li.js.map b/static/maps/mvc/history/hda-li.js.map
new file mode 100644
index 0000000..ab9a378
--- /dev/null
+++ b/static/maps/mvc/history/hda-li.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hda-li.js","sources":["../../../src/mvc/history/hda-li.js"],"names":["define","DATASET_LI","BASE_MVC","_l","_super","DatasetListItemView","HDAListItemView","extend","className","prototype","initialize","attributes","options","call","this","toString","modelString","model","templates","titleBarTemplate","wrapTemplate","warnings","_","hidden","titleBar"],"mappings":"AAAAA,QACI,yBACA,eACA,sBACD,SAAUC,EAAYC,EAAUC,GAEnC,YAGA,IAAIC,GAASH,EAAWI,oBAMpBC,EAAkBF,EAAOG,QAGzBC,UA [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/hda-model.js.map b/static/maps/mvc/history/hda-model.js.map
new file mode 100644
index 0000000..1f3c6f5
--- /dev/null
+++ b/static/maps/mvc/history/hda-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hda-model.js","sources":["../../../src/mvc/history/hda-model.js"],"names":["define","DATASET","HISTORY_CONTENT","BASE_MVC","_super","DatasetAssociation","hcontentMixin","HistoryContentMixin","HistoryDatasetAssociation","extend","mixin","defaults","_","prototype","history_content_type","model_class"],"mappings":"AAAAA,QACI,4BACA,oCACA,eACA,sBACD,SAAUC,EAASC,EAAiBC,GACvC,YAGA,IAAIC,GAASH,EAAQI,mBACjBC,EAAgBJ,EAAgBK,oBAGhCC,EAA4BJ,EAAOK,OAAQN,EAASO,MAAOJ,GAI3DK,SAAWC,EA [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/hdca-li-edit.js.map b/static/maps/mvc/history/hdca-li-edit.js.map
new file mode 100644
index 0000000..b83fa6e
--- /dev/null
+++ b/static/maps/mvc/history/hdca-li-edit.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hdca-li-edit.js","sources":["../../../src/mvc/history/hdca-li-edit.js"],"names":["define","HDCA_LI","DC_VIEW_EDIT","faIconButton","_l","_super","HDCAListItemView","HDCAListItemEdit","extend","_getFoldoutPanelClass","this","model","get","ListCollectionViewEdit","PairCollectionViewEdit","ListOfPairsCollectionViewEdit","ListOfListsCollectionViewEdit","TypeError","_renderPrimaryActions","log","prototype","call","concat","_renderDeleteButton","self","deleted","title","cla [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/hdca-li.js.map b/static/maps/mvc/history/hdca-li.js.map
new file mode 100644
index 0000000..b95b69c
--- /dev/null
+++ b/static/maps/mvc/history/hdca-li.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hdca-li.js","sources":["../../../src/mvc/history/hdca-li.js"],"names":["define","STATES","DC_LI","DC_VIEW","BASE_MVC","_l","_super","DCListItemView","HDCAListItemView","extend","className","prototype","_setUpListeners","call","this","listenTo","model","change:populated change:visible","render","_getFoldoutPanelClass","get","ListCollectionView","PairCollectionView","ListOfPairsCollectionView","ListOfListsCollectionView","TypeError","_swapNewRender","$newRender","state [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/hdca-model.js.map b/static/maps/mvc/history/hdca-model.js.map
new file mode 100644
index 0000000..389160b
--- /dev/null
+++ b/static/maps/mvc/history/hdca-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hdca-model.js","sources":["../../../src/mvc/history/hdca-model.js"],"names":["define","DC_MODEL","HISTORY_CONTENT","buildHDCASave","_super","attributes","options","this","isNew","url","urlRoot","get","type","call","hcontentMixin","HistoryContentMixin","ListDC","ListDatasetCollection","PairDC","PairDatasetCollection","ListPairedDC","ListPairedDatasetCollection","ListOfListsDC","ListOfListsDatasetCollection","HistoryListDatasetCollection","extend","defaults","_","clone [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-content-model.js.map b/static/maps/mvc/history/history-content-model.js.map
new file mode 100644
index 0000000..7a4f2ef
--- /dev/null
+++ b/static/maps/mvc/history/history-content-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-content-model.js","sources":["../../../src/mvc/history/history-content-model.js"],"names":["define","HistoryContentMixin","defaults","history_id","history_content_type","hid","visible","idAttribute","hidden","this","get","isVisible","includeDeleted","includeHidden","urlRoot","Galaxy","root","url","hide","options","save","jQuery","when","unhide","toString","join"],"mappings":"AAAAA,QACI,qBACA,eACA,sBACD,WACH,YAKA,IAAIC,IAGAC,UAEIC,WAAsB,KAEtBC,qBAAsB,KAEtBC,IA [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-contents.js.map b/static/maps/mvc/history/history-contents.js.map
new file mode 100644
index 0000000..2d2c193
--- /dev/null
+++ b/static/maps/mvc/history/history-contents.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-contents.js","sources":["../../../src/mvc/history/history-contents.js"],"names":["define","CONTROLLED_FETCH_COLLECTION","HDA_MODEL","HDCA_MODEL","HISTORY_PREFS","BASE_MVC","AJAX_QUEUE","_super","PaginatedCollection","HistoryContents","extend","LoggableMixin","_logNamespace","model","attrs","options","history_content_type","HistoryDatasetAssociation","collection_type","HistoryListDatasetCollection","HistoryPairDatasetCollection","HistoryListPairedDatasetCollec [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-model.js.map b/static/maps/mvc/history/history-model.js.map
new file mode 100644
index 0000000..072590a
--- /dev/null
+++ b/static/maps/mvc/history/history-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-model.js","sources":["../../../src/mvc/history/history-model.js"],"names":["define","HISTORY_CONTENTS","HISTORY_PREFS","CONTROLLED_FETCH_COLLECTION","UTILS","BASE_MVC","_l","History","Backbone","Model","extend","LoggableMixin","mixin","SearchableModelMixin","_logNamespace","UPDATE_DELAY","defaults","model_class","id","name","state","deleted","contents_active","contents_states","urlRoot","Galaxy","root","contentsClass","HistoryContents","searchAttributes","sea [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-preferences.js.map b/static/maps/mvc/history/history-preferences.js.map
new file mode 100644
index 0000000..b3c358f
--- /dev/null
+++ b/static/maps/mvc/history/history-preferences.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-preferences.js","sources":["../../../src/mvc/history/history-preferences.js"],"names":["define","BASE_MVC","HistoryPrefs","SessionStorageModel","extend","defaults","expandedIds","show_deleted","show_hidden","addExpanded","model","current","this","get","id","save","removeExpanded","isExpanded","contentId","_","result","allExpanded","values","clearExpanded","set","includeDeleted","val","isUndefined","includeHidden","toString","storageKeyPrefix","historyStorageK [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-structure-view.js.map b/static/maps/mvc/history/history-structure-view.js.map
new file mode 100644
index 0000000..197abae
--- /dev/null
+++ b/static/maps/mvc/history/history-structure-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-structure-view.js","sources":["../../../src/mvc/history/history-structure-view.js"],"names":["define","JobDAG","JOB","JOB_LI","DATASET_LI","BASE_MVC","logNamespace","window","HistoryStructureComponent","Backbone","View","extend","LoggableMixin","_logNamespace","className","_INITIAL_ZOOM_LEVEL","_MIN_ZOOM_LEVEL","_LINK_ID_SEP","_VERTEX_NAME_DATA_KEY","JobItemClass","JobListItemView","ContentItemClass","DatasetListItemView","initialize","attributes","this","log [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-view-annotated.js.map b/static/maps/mvc/history/history-view-annotated.js.map
new file mode 100644
index 0000000..de37bf3
--- /dev/null
+++ b/static/maps/mvc/history/history-view-annotated.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-view-annotated.js","sources":["../../../src/mvc/history/history-view-annotated.js"],"names":["define","HISTORY_VIEW","HDA_LI","HDCA_LI","BASE_MVC","_l","_super","HistoryView","AnnotatedHistoryView","extend","className","prototype","_buildNewRender","$newRender","call","this","renderHistoryAnnotation","annotation","model","get","find","text","renderItems","$whereTo","$el","$controls","remove","$","append","appendTo","self","views","_renderItemView$el","view"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-view-edit-current.js.map b/static/maps/mvc/history/history-view-edit-current.js.map
new file mode 100644
index 0000000..96fc804
--- /dev/null
+++ b/static/maps/mvc/history/history-view-edit-current.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-view-edit-current.js","sources":["../../../src/mvc/history/history-view-edit-current.js"],"names":["define","HISTORY_MODEL","HISTORY_VIEW_EDIT","BASE_MVC","_l","HistoryViewPrefs","SessionStorageModel","extend","defaults","tagsEditorShown","annotationEditorShown","scrollPosition","toString","JSON","stringify","this","toJSON","storageKey","_super","HistoryViewEdit","CurrentHistoryView","className","prototype","HDCAViewClass","foldoutStyle","emptyMsg","join","in [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-view-edit.js.map b/static/maps/mvc/history/history-view-edit.js.map
new file mode 100644
index 0000000..ef40208
--- /dev/null
+++ b/static/maps/mvc/history/history-view-edit.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-view-edit.js","sources":["../../../src/mvc/history/history-view-edit.js"],"names":["define","HISTORY_VIEW","HISTORY_CONTENTS","STATES","HDA_MODEL","HDA_LI_EDIT","HDCA_LI_EDIT","TAGS","ANNOTATIONS","LIST_COLLECTION_CREATOR","PAIR_COLLECTION_CREATOR","LIST_OF_PAIRS_COLLECTION_CREATOR","faIconButton","PopupMenu","BASE_MVC","_l","_super","HistoryView","HistoryViewEdit","extend","HDAViewClass","HDAListItemEdit","HDCAViewClass","HDCAListItemEdit","initialize","attr [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/history-view.js.map b/static/maps/mvc/history/history-view.js.map
new file mode 100644
index 0000000..f0b0030
--- /dev/null
+++ b/static/maps/mvc/history/history-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"history-view.js","sources":["../../../src/mvc/history/history-view.js"],"names":["define","LIST_VIEW","HISTORY_MODEL","HISTORY_CONTENTS","HISTORY_PREFS","HDA_LI","HDCA_LI","USER","ERROR_MODAL","faIconButton","BASE_MVC","_l","_super","ModelListPanel","HistoryView","extend","_logNamespace","HDAViewClass","HDAListItemView","HDCAViewClass","HDCAListItemView","collectionClass","HistoryContents","modelCollectionKey","tagName","className","prototype","emptyMsg","noneFoundMs [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/job-dag.js.map b/static/maps/mvc/history/job-dag.js.map
new file mode 100644
index 0000000..3106f30
--- /dev/null
+++ b/static/maps/mvc/history/job-dag.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"job-dag.js","sources":["../../../src/mvc/history/job-dag.js"],"names":["define","GRAPH","addLogging","_super","Graph","JobDAG","options","self","this","filters","_jobsData","_historyContentsMap","_toolMap","_outputIdToJobMap","noInputJobs","noOutputJobs","filteredSetMetadata","filteredErroredJobs","dataKeys","call","_","pick","omit","prototype","constructor","init","defaults","excludeSetMetadata","_initFilters","push","jobData","job","tool_id","id","excludeErroredJob [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/multi-panel.js.map b/static/maps/mvc/history/multi-panel.js.map
new file mode 100644
index 0000000..a8b703d
--- /dev/null
+++ b/static/maps/mvc/history/multi-panel.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"multi-panel.js","sources":["../../../src/mvc/history/multi-panel.js"],"names":["define","HISTORY_MODEL","HISTORY_VIEW_EDIT","historyCopyDialog","ERROR_MODAL","baseMVC","ajaxQueue","logNamespace","HistoryViewColumn","Backbone","View","extend","LoggableMixin","_logNamespace","tagName","className","id","this","model","get","initialize","options","purgeAllowed","_","isUndefined","panel","createPanel","setUpListeners","panelOptions","HistoryViewEdit","defaults","dragItems [...]
\ No newline at end of file
diff --git a/static/maps/mvc/history/options-menu.js.map b/static/maps/mvc/history/options-menu.js.map
new file mode 100644
index 0000000..37b8b79
--- /dev/null
+++ b/static/maps/mvc/history/options-menu.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"options-menu.js","sources":["../../../src/mvc/history/options-menu.js"],"names":["define","PopupMenu","historyCopyDialog","BASE_MVC","_l","Webhooks","buildMenu","isAnon","purgeAllowed","urlRoot","_","clone","menu","filter","menuOption","anon","purge","href","target","confirm","func","galaxy_main","location","html","header","Galaxy","currHistoryPanel","createNewHistory","model","done","loadCurrentHistory","window","id","collapseAll","filtered","contents","hidden","aja [...]
\ No newline at end of file
diff --git a/static/maps/mvc/job/job-li.js.map b/static/maps/mvc/job/job-li.js.map
new file mode 100644
index 0000000..53e9c6e
--- /dev/null
+++ b/static/maps/mvc/job/job-li.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"job-li.js","sources":["../../../src/mvc/job/job-li.js"],"names":["define","LIST_ITEM","DATASET_LIST","BASE_MVC","_l","_super","FoldoutListItemView","JobListItemView","extend","className","prototype","id","this","model","get","join","foldoutPanelClass","DatasetList","initialize","attributes","logger","log","call","tool","jobData","linkTarget","_swapNewRender","$newRender","has","$el","addClass","_getFoldoutPanelOptions","options","_","collection","outputCollection","s [...]
\ No newline at end of file
diff --git a/static/maps/mvc/job/job-model.js.map b/static/maps/mvc/job/job-model.js.map
new file mode 100644
index 0000000..bed0d7b
--- /dev/null
+++ b/static/maps/mvc/job/job-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"job-model.js","sources":["../../../src/mvc/job/job-model.js"],"names":["define","HISTORY_CONTENTS","STATES","AJAX_QUEUE","BASE_MVC","logNamespace","searchableMixin","SearchableModelMixin","Job","Backbone","Model","extend","LoggableMixin","mixin","_logNamespace","defaults","model_class","tool_id","exit_code","inputs","outputs","params","create_time","update_time","state","NEW","parse","response","this","parseParams","newParams","_","each","value","key","JSON","initial [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-dataset-view.js.map b/static/maps/mvc/library/library-dataset-view.js.map
new file mode 100644
index 0000000..cecbc1c
--- /dev/null
+++ b/static/maps/mvc/library/library-dataset-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-dataset-view.js","sources":["../../../src/mvc/library/library-dataset-view.js"],"names":["define","mod_toastr","mod_library_model","mod_utils","mod_select","LibraryDatasetView","Backbone","View","extend","el","model","options","events","click .toolbtn_modify_dataset","click .toolbtn_cancel_modifications","click .toolbtn-download-dataset","click .toolbtn-import-dataset","click .toolbtn-share-dataset","click .btn-copy-link-to-clipboard","click .btn-make-private [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-folder-view.js.map b/static/maps/mvc/library/library-folder-view.js.map
new file mode 100644
index 0000000..6d44856
--- /dev/null
+++ b/static/maps/mvc/library/library-folder-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-folder-view.js","sources":["../../../src/mvc/library/library-folder-view.js"],"names":["define","mod_toastr","mod_library_model","mod_select","FolderView","Backbone","View","extend","el","model","options","events","click .toolbtn_save_permissions","initialize","this","_","id","fetchFolder","FolderAsModel","that","fetch","success","show_permissions","showPermissions","render","error","response","responseJSON","err_msg","onclick","Galaxy","libraries","library_r [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-folderlist-view.js.map b/static/maps/mvc/library/library-folderlist-view.js.map
new file mode 100644
index 0000000..a3ff814
--- /dev/null
+++ b/static/maps/mvc/library/library-folderlist-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-folderlist-view.js","sources":["../../../src/mvc/library/library-folderlist-view.js"],"names":["define","mod_masthead","mod_utils","mod_toastr","mod_library_model","mod_library_folderrow_view","FolderListView","Backbone","View","extend","el","progress","progressStep","folderContainer","sort","events","click #select-all-checkboxes","click .dataset_row","click .folder_row","click .sort-folder-link","collection","defaults","include_deleted","page_count","show_pa [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-folderrow-view.js.map b/static/maps/mvc/library/library-folderrow-view.js.map
new file mode 100644
index 0000000..9952524
--- /dev/null
+++ b/static/maps/mvc/library/library-folderrow-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-folderrow-view.js","sources":["../../../src/mvc/library/library-folderrow-view.js"],"names":["define","mod_toastr","mod_library_model","mod_library_dataset_view","FolderRowView","Backbone","View","extend","events","click .undelete_dataset_btn","click .undelete_folder_btn","click .edit_folder_btn","click .cancel_folder_btn","click .save_folder_btn","defaults","type","visibility_config","edit_folder_btn","save_folder_btn","cancel_folder_btn","permission_folder_ [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-foldertoolbar-view.js.map b/static/maps/mvc/library/library-foldertoolbar-view.js.map
new file mode 100644
index 0000000..3467916
--- /dev/null
+++ b/static/maps/mvc/library/library-foldertoolbar-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-foldertoolbar-view.js","sources":["../../../src/mvc/library/library-foldertoolbar-view.js"],"names":["define","mod_masthead","mod_utils","mod_toastr","mod_library_model","mod_select","FolderToolbarView","Backbone","View","extend","el","events","click #toolbtn_create_folder","click #toolbtn_bulk_import","click #include_deleted_datasets_chk","click #toolbtn_bulk_delete","click .toolbtn-show-locinfo","click .page_size_prompt","defaults","can_add_library_item","c [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-library-view.js.map b/static/maps/mvc/library/library-library-view.js.map
new file mode 100644
index 0000000..7932f03
--- /dev/null
+++ b/static/maps/mvc/library/library-library-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-library-view.js","sources":["../../../src/mvc/library/library-library-view.js"],"names":["define","mod_toastr","mod_library_model","mod_select","LibraryView","Backbone","View","extend","el","model","options","events","click .toolbtn_save_permissions","initialize","this","_","id","fetchLibrary","Library","that","fetch","success","show_permissions","showPermissions","render","error","response","responseJSON","err_msg","onclick","Galaxy","libraries","library_rou [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-librarylist-view.js.map b/static/maps/mvc/library/library-librarylist-view.js.map
new file mode 100644
index 0000000..a3d5f33
--- /dev/null
+++ b/static/maps/mvc/library/library-librarylist-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-librarylist-view.js","sources":["../../../src/mvc/library/library-librarylist-view.js"],"names":["define","mod_masthead","mod_baseMVC","mod_utils","mod_toastr","mod_library_model","mod_library_libraryrow_view","_","LibraryListView","Backbone","View","extend","el","events","click .sort-libraries-link","defaults","page_count","show_page","all_fetched","initialize","options","this","that","modal","collection","Libraries","url","urlRoot","fetch","success","render [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-libraryrow-view.js.map b/static/maps/mvc/library/library-libraryrow-view.js.map
new file mode 100644
index 0000000..11319dc
--- /dev/null
+++ b/static/maps/mvc/library/library-libraryrow-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-libraryrow-view.js","sources":["../../../src/mvc/library/library-libraryrow-view.js"],"names":["define","mod_masthead","mod_utils","mod_toastr","LibraryRowView","Backbone","View","extend","events","click .edit_library_btn","click .cancel_library_btn","click .save_library_btn","click .delete_library_btn","click .undelete_library_btn","edit_mode","element_visibility_config","upload_library_btn","edit_library_btn","permission_library_btn","save_library_btn","can [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-librarytoolbar-view.js.map b/static/maps/mvc/library/library-librarytoolbar-view.js.map
new file mode 100644
index 0000000..630aaca
--- /dev/null
+++ b/static/maps/mvc/library/library-librarytoolbar-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-librarytoolbar-view.js","sources":["../../../src/mvc/library/library-librarytoolbar-view.js"],"names":["define","mod_toastr","mod_library_model","LibraryToolbarView","Backbone","View","extend","el","defaults","search_term","events","click #create_new_library_btn","click #include_deleted_chk","click #lib_page_size_prompt","keyup .library-search-input","initialize","options","this","_","render","toolbar_template","templateToolBar","is_admin","is_anonym","Galaxy [...]
\ No newline at end of file
diff --git a/static/maps/mvc/library/library-model.js.map b/static/maps/mvc/library/library-model.js.map
new file mode 100644
index 0000000..1c7f215
--- /dev/null
+++ b/static/maps/mvc/library/library-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"library-model.js","sources":["../../../src/mvc/library/library-model.js"],"names":["define","Library","Backbone","Model","extend","urlRoot","Galaxy","root","isVisible","show_deleted","this","get","Libraries","Collection","model","sort_key","sort_order","initialize","options","search","search_term","lowercase_term","toLowerCase","filter","data","lowercase_name","indexOf","getVisible","filters","filteredLibraries","item","sortByNameAsc","comparator","libraryA","library [...]
\ No newline at end of file
diff --git a/static/maps/mvc/list/list-item.js.map b/static/maps/mvc/list/list-item.js.map
new file mode 100644
index 0000000..6a4e6a1
--- /dev/null
+++ b/static/maps/mvc/list/list-item.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"list-item.js","sources":["../../../src/mvc/list/list-item.js"],"names":["define","BASE_MVC","logNamespace","ExpandableView","Backbone","View","extend","LoggableMixin","_logNamespace","initialize","attributes","this","expanded","log","fxSpeed","undefined","render","speed","$newRender","_buildNewRender","_setUpBehaviors","_queueNewRender","$","templates","el","model","toJSON","$details","replaceWith","_renderDetails","show","view","_swapNewRender","trigger","queue","ne [...]
\ No newline at end of file
diff --git a/static/maps/mvc/list/list-panel.js.map b/static/maps/mvc/list/list-panel.js.map
new file mode 100644
index 0000000..12af867
--- /dev/null
+++ b/static/maps/mvc/list/list-panel.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"list-panel.js","sources":["../../../src/mvc/list/list-panel.js"],"names":["define","LIST_ITEM","LoadingIndicator","BASE_MVC","_l","logNamespace","ListPanel","Backbone","View","extend","LoggableMixin","_logNamespace","viewClass","ListItemView","collectionClass","Collection","tagName","className","fxSpeed","emptyMsg","noneFoundMsg","searchPlaceholder","initialize","attributes","logger","this","log","_","has","filters","searchFor","indicator","$el","selecting","undefine [...]
\ No newline at end of file
diff --git a/static/maps/mvc/list/list-view.js.map b/static/maps/mvc/list/list-view.js.map
new file mode 100644
index 0000000..b434a2a
--- /dev/null
+++ b/static/maps/mvc/list/list-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"list-view.js","sources":["../../../src/mvc/list/list-view.js"],"names":["define","LIST_ITEM","LoadingIndicator","BASE_MVC","_l","logNamespace","ListPanel","Backbone","View","extend","LoggableMixin","_logNamespace","viewClass","ListItemView","collectionClass","Collection","tagName","className","fxSpeed","emptyMsg","noneFoundMsg","searchPlaceholder","initialize","attributes","logger","this","log","_","has","filters","searchFor","selecting","undefined","selected","lastS [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tag.js.map b/static/maps/mvc/tag.js.map
new file mode 100644
index 0000000..fc80e50
--- /dev/null
+++ b/static/maps/mvc/tag.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tag.js","sources":["../../src/mvc/tag.js"],"names":["define","baseMVC","_l","TagsEditor","Backbone","View","extend","LoggableMixin","HiddenUntilActivatedViewMixin","tagName","className","initialize","options","this","listenTo","model","render","hiddenUntilActivated","$activator","view","$el","html","_template","$input","select2","placeholder","width","tags","_getTagsUsed","_setUpBehaviors","tagsToCSV","join","tagsArray","get","_","isArray","isEmpty","map","tag","esca [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tool-form-base.js.map b/static/maps/mvc/tool/tool-form-base.js.map
new file mode 100644
index 0000000..9f58820
--- /dev/null
+++ b/static/maps/mvc/tool/tool-form-base.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool-form-base.js","sources":["../../../src/mvc/tool/tool-form-base.js"],"names":["define","Utils","Deferred","Ui","FormBase","CitationModel","CitationView","extend","initialize","options","self","this","prototype","call","deferred","inputs","_buildForm","execute","process","_buildModel","listen_to_history","parent","Galaxy","currHistoryPanel","listenTo","collection","refresh","$el","on","remove","reset","_updateModel","hide","emit","debug","merge","icon","title","na [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tool-form-composite.js.map b/static/maps/mvc/tool/tool-form-composite.js.map
new file mode 100644
index 0000000..5f382f8
--- /dev/null
+++ b/static/maps/mvc/tool/tool-form-composite.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool-form-composite.js","sources":["../../../src/mvc/tool/tool-form-composite.js"],"names":["define","Utils","Deferred","Ui","Form","FormData","ToolFormBase","Modal","Webhooks","View","Backbone","extend","initialize","options","self","this","modal","parent","Galaxy","model","Model","deferred","setElement","$","addClass","append","$message","$header","$steps","$el","_configure","render","_refresh","on","scroll","window","resize","margin","_","reduce","children","memo" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tool-form-workflow.js.map b/static/maps/mvc/tool/tool-form-workflow.js.map
new file mode 100644
index 0000000..24232d4
--- /dev/null
+++ b/static/maps/mvc/tool/tool-form-workflow.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool-form-workflow.js","sources":["../../../src/mvc/tool/tool-form-workflow.js"],"names":["define","Utils","ToolFormBase","View","Backbone","extend","initialize","options","self","this","workflow","node","setElement","post_job_actions","deepeach","inputs","input","type","indexOf","info","name","textify","extensions","value","__class__","collapsible_value","is_workflow","length","test_param","undefined","_makeSections","form","merge","text_enable","text_disable","narr [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tool-form.js.map b/static/maps/mvc/tool/tool-form.js.map
new file mode 100644
index 0000000..a5f22b5
--- /dev/null
+++ b/static/maps/mvc/tool/tool-form.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool-form.js","sources":["../../../src/mvc/tool/tool-form.js"],"names":["define","Utils","Ui","Modal","ToolFormBase","Webhooks","View","Backbone","extend","initialize","options","self","this","modal","parent","Galaxy","form","merge","listen_to_history","always_refresh","customize","buttons","execute","execute_btn","Button","icon","tooltip","name","version","title","cls","floating","onclick","wait","portlet","disable","submit","unwait","enable","job_id","job_remap","i [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tool-template.js.map b/static/maps/mvc/tool/tool-template.js.map
new file mode 100644
index 0000000..8657b59
--- /dev/null
+++ b/static/maps/mvc/tool/tool-template.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool-template.js","sources":["../../../src/mvc/tool/tool-template.js"],"names":["define","error","response","$","append","text","addClass","css","color","height","JSON","stringify","undefined"],"mappings":"AAAAA,UAAW,WACX,OACIC,MAAO,SAAUC,GACb,MAAQC,GAAG,SAAUC,OAAQD,EAAG,QACPE,KAAM,sGACVD,OAAQD,EAAG,eAAgBG,SAAU,eAAgBC,KAAOC,MAAO,QAASC,OAAU,qBAClFJ,KAAMK,KAAKC,UAAWT,EAAUU,OAAW"}
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tool-webhooks.js.map b/static/maps/mvc/tool/tool-webhooks.js.map
new file mode 100644
index 0000000..db8b097
--- /dev/null
+++ b/static/maps/mvc/tool/tool-webhooks.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool-webhooks.js","sources":["../../../src/mvc/tool/tool-webhooks.js"],"names":["define","galaxyRoot","Galaxy","root","WebhookModel","Backbone","Model","extend","urlRoot","defaults","name","type","styles","script","WebhookView","View","el","initialize","me","this","model","fetch","success","render","webhook","toJSON","$el","html","$","text","appendTo"],"mappings":"AAGAA,UAAW,WACP,GAAIC,GAA8B,mBAAVC,QAAwBA,OAAOC,KAAO,IAE1DC,EAAeC,SAASC,MAAMC,QAC9BC,QAAUP,EAAa,wBACvBQ, [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tool/tools.js.map b/static/maps/mvc/tool/tools.js.map
new file mode 100644
index 0000000..39ea8d8
--- /dev/null
+++ b/static/maps/mvc/tool/tools.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tools.js","sources":["../../../src/mvc/tool/tools.js"],"names":["define","_","util","data","ToolForm","VisibilityMixin","hidden","show","this","set","hide","toggle","get","is_visible","attributes","ToolParameter","Backbone","Model","extend","defaults","name","label","type","value","html","num_samples","initialize","unescape","copy","toJSON","set_value","ToolParameterCollection","Collection","model","DataToolParameter","IntegerToolParameter","parseInt","get_samples"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/tours.js.map b/static/maps/mvc/tours.js.map
new file mode 100644
index 0000000..bfafd51
--- /dev/null
+++ b/static/maps/mvc/tours.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tours.js","sources":["../../src/mvc/tours.js"],"names":["define","gxy_root","Galaxy","root","tour_opts","storage","window","sessionStorage","onEnd","removeItem","delay","orphan","hooked_tour_from_data","data","_","each","steps","step","preclick","onShow","$","click","postclick","onHide","textinsert","onShown","element","val","trigger","TourItem","Backbone","Model","extend","urlRoot","Tours","Collection","url","model","giveTour","tour_id","getJSON","tourdata","setItem [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/error-modal.js.map b/static/maps/mvc/ui/error-modal.js.map
new file mode 100644
index 0000000..f59f61c
--- /dev/null
+++ b/static/maps/mvc/ui/error-modal.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"error-modal.js","sources":["../../../src/mvc/ui/error-modal.js"],"names":["define","_l","_errorModal","message","title","details","Galaxy","modal","show","body","closing_events","buttons","Ok","hide","$el","addClass","$","add","remove","appendTo","append","text","DETAILS_MSG","JSON","stringify","click","toggle","errorModal","window","alert","console","log","offlineErrorModal","badGatewayErrorModal","CONTACT_MSG","ajaxErrorModal","model","xhr","options","DEFAULT_AJAX_ [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/icon-button.js.map b/static/maps/mvc/ui/icon-button.js.map
new file mode 100644
index 0000000..6351e69
--- /dev/null
+++ b/static/maps/mvc/ui/icon-button.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"icon-button.js","sources":["../../../src/mvc/ui/icon-button.js"],"names":["define","IconButton","Backbone","Model","extend","defaults","title","icon_class","on_click","menu_options","is_menu_button","id","href","target","enabled","visible","tooltip_config","IconButtonView","View","initialize","this","model","attributes","placement","bind","render","$el","tooltip","new_elem","template","toJSON","get","replaceWith","setElement","events","click","event","_","isFunction" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/popup-menu.js.map b/static/maps/mvc/ui/popup-menu.js.map
new file mode 100644
index 0000000..e43e299
--- /dev/null
+++ b/static/maps/mvc/ui/popup-menu.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"popup-menu.js","sources":["../../../src/mvc/ui/popup-menu.js"],"names":["define","PopupMenu","Backbone","View","extend","initialize","$button","options","this","length","$","data","menu","click","event","remove","_renderAndShow","clickEvent","render","$el","appendTo","css","_getShownPosition","show","_setUpCloseBehavior","addClass","hide","position","html","template","attr","find","each","i","option","func","children","call","preventDefault","id","_templateOptions"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-buttons.js.map b/static/maps/mvc/ui/ui-buttons.js.map
new file mode 100644
index 0000000..e759b1b
--- /dev/null
+++ b/static/maps/mvc/ui/ui-buttons.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-buttons.js","sources":["../../../src/mvc/ui/ui-buttons.js"],"names":["define","Utils","ButtonDefault","Backbone","View","extend","initialize","options","this","model","Model","id","uid","title","floating","icon","cls","wait","wait_text","wait_cls","disabled","percentage","set","setElement","$","attr","append","$icon","$title","$progress","$progress_bar","listenTo","render","self","attributes","$el","removeClass","addClass","css","off","on","hide","onclick","toolti [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-color-picker.js.map b/static/maps/mvc/ui/ui-color-picker.js.map
new file mode 100644
index 0000000..2941a87
--- /dev/null
+++ b/static/maps/mvc/ui/ui-color-picker.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-color-picker.js","sources":["../../../src/mvc/ui/ui-color-picker.js"],"names":["define","Utils","Backbone","View","extend","colors","standard","base","theme","initialize","options","this","merge","setElement","_template","$panel","$","$view","$value","$header","_build","visible","value","$boxes","self","on","css","trigger","fadeIn","fadeOut","new_val","undefined","empty","_getValue","html","_templateCheck","onchange","hex","x","parseInt","toString","slice","rgb"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-drilldown.js.map b/static/maps/mvc/ui/ui-drilldown.js.map
new file mode 100644
index 0000000..6e4b2d8
--- /dev/null
+++ b/static/maps/mvc/ui/ui-drilldown.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-drilldown.js","sources":["../../../src/mvc/ui/ui-drilldown.js"],"names":["define","Utils","Options","View","BaseIcons","extend","initialize","options","type","display","multiple","prototype","call","this","_setValue","new_value","undefined","header_index","self","values","$","isArray","_","each","v","list","element","_setState","header_id","is_expanded","$button","$subgroup","data","show","removeClass","addClass","hide","_templateOptions","attach","$el","find","on [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-frames.js.map b/static/maps/mvc/ui/ui-frames.js.map
new file mode 100644
index 0000000..4bdde40
--- /dev/null
+++ b/static/maps/mvc/ui/ui-frames.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-frames.js","sources":["../../../src/mvc/ui/ui-frames.js"],"names":["define","FrameView","Backbone","View","extend","initialize","options","this","model","Model","setElement","$","addClass","$el","append","tooltip","title","placement","$header","$title","$content","render","listenTo","self","attributes","html","find","remove","_","each","menu","option","$option","icon","isFunction","disabled","attr","on","onclick","url","indexOf","content","defaultOptions","frame", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-list.js.map b/static/maps/mvc/ui/ui-list.js.map
new file mode 100644
index 0000000..c493d75
--- /dev/null
+++ b/static/maps/mvc/ui/ui-list.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-list.js","sources":["../../../src/mvc/ui/ui-list.js"],"names":["define","Utils","Portlet","Ui","View","Backbone","extend","initialize","options","self","this","name","multiple","message","Message","portlet","cls","select","Select","optional","button","ButtonIcon","icon","floating","tooltip","onclick","add","id","value","text","setElement","_template","$","append","$el","val","undefined","empty","isArray","i","v","v_id","v_name","type","_refresh","lst","each","push [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-misc.js.map b/static/maps/mvc/ui/ui-misc.js.map
new file mode 100644
index 0000000..e7195dc
--- /dev/null
+++ b/static/maps/mvc/ui/ui-misc.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-misc.js","sources":["../../../src/mvc/ui/ui-misc.js"],"names":["define","Utils","Select","Slider","Options","Drilldown","Buttons","Modal","Label","Backbone","View","extend","tagName","initialize","options","this","model","Model","setElement","$","listenTo","render","title","new_title","set","value","get","$el","removeClass","addClass","html","Message","message","status","cls","persistent","fade","update","timeout","window","clearTimeout","self","setTimeout","fadeO [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-modal.js.map b/static/maps/mvc/ui/ui-modal.js.map
new file mode 100644
index 0000000..bc40aac
--- /dev/null
+++ b/static/maps/mvc/ui/ui-modal.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-modal.js","sources":["../../../src/mvc/ui/ui-modal.js"],"names":["define","View","Backbone","extend","className","optionsDefault","container","title","cls","body","backdrop","height","width","closing_events","closing_callback","title_separator","buttonList","initialize","options","this","_","defaults","$","prepend","el","render","show","visible","$el","fadeIn","self","document","on","e","keyCode","hide","$backdrop","canceled","fadeOut","off","html","_template","$h [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-options.js.map b/static/maps/mvc/ui/ui-options.js.map
new file mode 100644
index 0000000..e1b644a
--- /dev/null
+++ b/static/maps/mvc/ui/ui-options.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-options.js","sources":["../../../src/mvc/ui/ui-options.js"],"names":["define","Utils","Buttons","Base","Backbone","View","extend","initialize","options","self","this","model","Model","visible","data","id","uid","error_text","wait_text","multiple","optional","onchange","set","listenTo","_changeValue","_changeWait","_changeData","_changeVisible","on","get","value","render","$el","empty","removeClass","addClass","append","$message","$","$menu","$options","_template", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-popover.js.map b/static/maps/mvc/ui/ui-popover.js.map
new file mode 100644
index 0000000..e2e3a44
--- /dev/null
+++ b/static/maps/mvc/ui/ui-popover.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-popover.js","sources":["../../../src/mvc/ui/ui-popover.js"],"names":["define","Utils","View","Backbone","extend","optionsDefault","with_close","title","placement","container","body","initialize","options","this","setElement","_template","uid","_","defaults","parent","append","el","$title","$","$close","$body","self","on","e","visible","is","target","has","length","hide","render","html","$el","removeClass","addClass","css","_get_placement","show","off","newTitle"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-portlet.js.map b/static/maps/mvc/ui/ui-portlet.js.map
new file mode 100644
index 0000000..f0d451e
--- /dev/null
+++ b/static/maps/mvc/ui/ui-portlet.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-portlet.js","sources":["../../../src/mvc/ui/ui-portlet.js"],"names":["define","Utils","Ui","View","Backbone","extend","visible","initialize","options","self","this","model","Model","id","uid","cls","title","icon","buttons","body","scrollable","nopadding","operations","collapsible","collapsible_button","collapsed","set","setElement","_template","$body","$","$title_text","$title_icon","$header","$content","$backdrop","$buttons","$operations","get","append","ButtonIc [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-select-content.js.map b/static/maps/mvc/ui/ui-select-content.js.map
new file mode 100644
index 0000000..889b7c4
--- /dev/null
+++ b/static/maps/mvc/ui/ui-select-content.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-select-content.js","sources":["../../../src/mvc/ui/ui-select-content.js"],"names":["define","Utils","Ui","Select","Batch","DISABLED","ENABLED","LINKED","Configurations","data","src","icon","tooltip","multiple","batch","data_multiple","data_collection","workflow_data","workflow_data_multiple","workflow_data_collection","module_data","module_data_collection","View","Backbone","extend","initialize","options","self","this","model","Model","src_labels","hda","hdca","pa [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-select-default.js.map b/static/maps/mvc/ui/ui-select-default.js.map
new file mode 100644
index 0000000..8bf51e7
--- /dev/null
+++ b/static/maps/mvc/ui/ui-select-default.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-select-default.js","sources":["../../../src/mvc/ui/ui-select-default.js"],"names":["define","Utils","Buttons","View","Backbone","extend","initialize","options","self","this","data","data2","model","Model","id","uid","cls","error_text","empty_text","visible","wait","multiple","searchable","optional","disabled","onchange","value","selectall","pagesize","set","on","get","listenTo","_changeData","_changeDisabled","_changeWait","_changeVisible","_changeValue","render", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-select-ftp.js.map b/static/maps/mvc/ui/ui-select-ftp.js.map
new file mode 100644
index 0000000..19b9129
--- /dev/null
+++ b/static/maps/mvc/ui/ui-select-ftp.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-select-ftp.js","sources":["../../../src/mvc/ui/ui-select-ftp.js"],"names":["define","Utils","List","View","Backbone","extend","initialize","options","self","this","ftpfile_list","name","optional","multiple","onchange","value","setElement","$el","get","url","Galaxy","root","success","response","data","i","push","label","update","val"],"mappings":"AACAA,QAAQ,cAAe,kBACf,SAASC,EAAOC,GAKxB,GAAIC,GAAOC,SAASD,KAAKE,QAErBC,WAAa,SAASC,GAElB,GAAIC,GAAOC,IAGXA,MAAKC,aAAe,GAA [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-select-library.js.map b/static/maps/mvc/ui/ui-select-library.js.map
new file mode 100644
index 0000000..bc5ea2f
--- /dev/null
+++ b/static/maps/mvc/ui/ui-select-library.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-select-library.js","sources":["../../../src/mvc/ui/ui-select-library.js"],"names":["define","Utils","Ui","Table","List","Libraries","Backbone","Collection","extend","url","Galaxy","root","LibraryDatasets","initialize","self","this","config","Model","library_id","on","fetch","reset","get","View","options","libraries","datasets","library_select","Select","onchange","value","set","dataset_list","name","optional","multiple","trigger","data","each","model","push","id", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-select.js.map b/static/maps/mvc/ui/ui-select.js.map
new file mode 100644
index 0000000..7d887ae
--- /dev/null
+++ b/static/maps/mvc/ui/ui-select.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-select.js","sources":["../../../src/mvc/ui/ui-select.js"],"names":["define","Utils","View","Backbone","extend","optionsDefault","css","placeholder","data","value","multiple","minimumInputLength","initialData","initialize","options","this","merge","setElement","_template","container","console","log","append","$el","select_data","_refresh","_setValue","self","onchange","on","new_value","before","_getValue","undefined","after","text","select2","disabled","enable","di [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-slider.js.map b/static/maps/mvc/ui/ui-slider.js.map
new file mode 100644
index 0000000..2c1fea4
--- /dev/null
+++ b/static/maps/mvc/ui/ui-slider.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-slider.js","sources":["../../../src/mvc/ui/ui-slider.js"],"names":["define","Utils","View","Backbone","extend","initialize","options","self","this","merge","id","uid","min","max","step","precise","split","setElement","_template","useslider","$slider","$","slider","on","event","ui","value","css","$text","undefined","pressed","val","e","which","onchange","v","is_workflow","preventDefault","indexOf","_isParameter","new_val","isNaN","Math","String","substring"],"mappi [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-table.js.map b/static/maps/mvc/ui/ui-table.js.map
new file mode 100644
index 0000000..8d29475
--- /dev/null
+++ b/static/maps/mvc/ui/ui-table.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-table.js","sources":["../../../src/mvc/ui/ui-table.js"],"names":["define","Utils","View","Backbone","extend","row","row_count","optionsDefault","content","onchange","ondblclick","onconfirm","cls","cls_tr","events","click","dblclick","initialize","options","this","merge","$el","$","_template","$thead","find","$tbody","$tmessage","setElement","_row","addHeader","wrapper","append","appendHeader","add","width","align","css","id","fade","_commit","prepend","get","del", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-tabs.js.map b/static/maps/mvc/ui/ui-tabs.js.map
new file mode 100644
index 0000000..595ec93
--- /dev/null
+++ b/static/maps/mvc/ui/ui-tabs.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-tabs.js","sources":["../../../src/mvc/ui/ui-tabs.js"],"names":["define","View","Backbone","extend","initialize","options","this","collection","Collection","model","Model","onchange","visible","set","setElement","$","_template","$nav","$content","$el","on","hide","render","listenTo","_add","_remove","_change","_reset","id","get","length","first","children","removeClass","addClass","size","current","show","add","del","remove","delAll","reset","showTab","hideTab","ta [...]
\ No newline at end of file
diff --git a/static/maps/mvc/ui/ui-thumbnails.js.map b/static/maps/mvc/ui/ui-thumbnails.js.map
new file mode 100644
index 0000000..d6d4562
--- /dev/null
+++ b/static/maps/mvc/ui/ui-thumbnails.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ui-thumbnails.js","sources":["../../../src/mvc/ui/ui-thumbnails.js"],"names":["define","Utils","Ui","Tabs","View","Backbone","extend","events","click .ui-thumbnails-item","dblclick .ui-thumbnails-item","initialize","options","this","model","Model","collection","Collection","get","tabs","setElement","$el","addClass","render","listenTo","first","delAll","_renderDefault","_renderList","self","title_length","$","each","indexOf","title","append","_templateThumbnailItem"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/composite/composite-row.js.map b/static/maps/mvc/upload/composite/composite-row.js.map
new file mode 100644
index 0000000..6a02ca1
--- /dev/null
+++ b/static/maps/mvc/upload/composite/composite-row.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"composite-row.js","sources":["../../../../src/mvc/upload/composite/composite-row.js"],"names":["define","Utils","UploadSettings","UploadFtp","Popover","Ui","Backbone","View","extend","status_classes","init","ready","running","success","error","initialize","app","options","self","this","model","setElement","_template","$source","$","$settings","$status","$text","$text_content","$info_text","$info_progress","$file_name","$file_desc","$file_size","$progress_bar","$perce [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/composite/composite-view.js.map b/static/maps/mvc/upload/composite/composite-view.js.map
new file mode 100644
index 0000000..50ed379
--- /dev/null
+++ b/static/maps/mvc/upload/composite/composite-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"composite-view.js","sources":["../../../../src/mvc/upload/composite/composite-view.js"],"names":["define","Utils","UploadModel","UploadRow","Popover","Select","Ui","Backbone","View","extend","collection","Collection","initialize","app","self","this","options","list_extensions","list_genomes","ftp_upload_site","currentFtp","setElement","_template","btnStart","Button","title","onclick","_eventStart","btnClose","modal","hide","_","each","button","$","prepend","$el","sel [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/default/default-row.js.map b/static/maps/mvc/upload/default/default-row.js.map
new file mode 100644
index 0000000..272a372
--- /dev/null
+++ b/static/maps/mvc/upload/default/default-row.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"default-row.js","sources":["../../../../src/mvc/upload/default/default-row.js"],"names":["define","Utils","UploadModel","UploadSettings","Popover","Select","Backbone","View","extend","status_classes","init","queued","running","success","error","initialize","app","options","self","this","model","setElement","_template","$mode","$","$title","$text","$size","$info_text","$info_progress","$text_content","$settings","$symbol","$progress_bar","$percentage","settings","titl [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/default/default-view.js.map b/static/maps/mvc/upload/default/default-view.js.map
new file mode 100644
index 0000000..03149f3
--- /dev/null
+++ b/static/maps/mvc/upload/default/default-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"default-view.js","sources":["../../../../src/mvc/upload/default/default-view.js"],"names":["define","Utils","UploadModel","UploadRow","UploadFtp","Popover","Select","Ui","Backbone","View","extend","upload_size","collection","Collection","counter","announce","success","error","running","reset","this","initialize","app","self","options","list_extensions","list_genomes","ui_button","ftp_upload_site","currentFtp","setElement","_template","btnLocal","Button","id","title", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/upload-button.js.map b/static/maps/mvc/upload/upload-button.js.map
new file mode 100644
index 0000000..cdf7152
--- /dev/null
+++ b/static/maps/mvc/upload/upload-button.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"upload-button.js","sources":["../../../src/mvc/upload/upload-button.js"],"names":["define","View","Backbone","extend","initialize","options","self","this","model","Model","icon","tooltip","label","percentage","status","onunload","onclick","set","setElement","_template","$progress","$","listenTo","render","window","on","get","attributes","$el","off","e","title","placement","removeClass","addClass","css","width"],"mappings":"AACAA,UAAY,WACR,GAAIC,GAAOC,SAASD,KAAKE,QACr [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/upload-ftp.js.map b/static/maps/mvc/upload/upload-ftp.js.map
new file mode 100644
index 0000000..819a814
--- /dev/null
+++ b/static/maps/mvc/upload/upload-ftp.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"upload-ftp.js","sources":["../../../src/mvc/upload/upload-ftp.js"],"names":["define","Utils","Backbone","View","extend","initialize","options","self","this","merge","class_add","class_remove","class_partial","collection","onchange","onadd","onremove","setElement","_template","rows","get","url","Galaxy","root","success","ftp_files","_fill","error","length","$","html","_templateTable","size","index","push","_add","bytesToString","show","$select_all","addClass","on","ad [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/upload-model.js.map b/static/maps/mvc/upload/upload-model.js.map
new file mode 100644
index 0000000..441d139
--- /dev/null
+++ b/static/maps/mvc/upload/upload-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"upload-model.js","sources":["../../../src/mvc/upload/upload-model.js"],"names":["define","Model","Backbone","extend","defaults","extension","genome","url_paste","status","info","file_name","file_mode","file_size","file_type","file_path","file_data","percentage","space_to_tab","to_posix_lines","enabled","reset","attr","this","clear","set","Collection","model"],"mappings":"AAAAA,UAAY,WACR,GAAIC,GAAQC,SAASD,MAAME,QACvBC,UACIC,UAAkB,OAClBC,OAAkB,IAClBC,UAAkB,GAClBC,OAAkB [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/upload-row.js.map b/static/maps/mvc/upload/upload-row.js.map
new file mode 100644
index 0000000..4923eec
--- /dev/null
+++ b/static/maps/mvc/upload/upload-row.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"upload-row.js","sources":["../../../src/mvc/upload/upload-row.js"],"names":["define","Utils","UploadModel","UploadSettings","Popover","Select","Backbone","View","extend","options","padding","status_classes","init","queued","running","success","error","settings","select_genome","select_extension","initialize","app","this","self","model","Model","setElement","_template","it","$el","title","container","find","placement","default_genome","value","css","onchange","genome" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/upload-settings.js.map b/static/maps/mvc/upload/upload-settings.js.map
new file mode 100644
index 0000000..c769a7e
--- /dev/null
+++ b/static/maps/mvc/upload/upload-settings.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"upload-settings.js","sources":["../../../src/mvc/upload/upload-settings.js"],"names":["define","Backbone","View","extend","options","class_check","class_uncheck","parameters","id","title","initialize","this","model","setElement","$","addClass","$el","append","$cover","$table","listenTo","render","trigger","self","empty","_","each","parameter","$checkbox","get","on","set"],"mappings":"AACAA,QAAU,eAAiB,WACvB,MAAOC,UAASC,KAAKC,QACjBC,SACIC,YAAkB,oBAClBC,cAAkB,cAClBC,aAC [...]
\ No newline at end of file
diff --git a/static/maps/mvc/upload/upload-view.js.map b/static/maps/mvc/upload/upload-view.js.map
new file mode 100644
index 0000000..d7cf96a
--- /dev/null
+++ b/static/maps/mvc/upload/upload-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"upload-view.js","sources":["../../../src/mvc/upload/upload-view.js"],"names":["define","Utils","Modal","Tabs","UploadButton","UploadViewDefault","UploadViewComposite","Backbone","View","extend","options","nginx_upload_path","ftp_upload_site","default_genome","default_extension","height","width","auto","id","text","description","list_extensions","list_genomes","initialize","self","this","merge","ui_button","onclick","e","preventDefault","show","onunload","percentage", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/user/change-password.js.map b/static/maps/mvc/user/change-password.js.map
new file mode 100644
index 0000000..95659ff
--- /dev/null
+++ b/static/maps/mvc/user/change-password.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"change-password.js","sources":["../../../src/mvc/user/change-password.js"],"names":["define","Manage","ChangePassword","Backbone","View","extend","initialize","data","this","render","renderMessage","msg","status","template","self","$","css","remove","ManageUserInformation","prototype","hideErrorDoneMessage","append","on","e","savePassword","show","url","Galaxy","root","current","val","password","confirm","messageBar","change_password_button","token","getJSON","respon [...]
\ No newline at end of file
diff --git a/static/maps/mvc/user/extra-information.js.map b/static/maps/mvc/user/extra-information.js.map
new file mode 100644
index 0000000..86b3662
--- /dev/null
+++ b/static/maps/mvc/user/extra-information.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"extra-information.js","sources":["../../../src/mvc/user/extra-information.js"],"names":["define","Manage","ExtraInformation","Backbone","View","extend","initialize","data","this","render","data_plugin","template","self","item_object","model","plugins","plugin_name","is_plugin_empty","$","hide","Object","keys","length","JSON","parse","undefined","item","input_val","i","input_object","name","label","type","append","on","e","saveExtraInformation","remove","show","url"," [...]
\ No newline at end of file
diff --git a/static/maps/mvc/user/manage-user-information.js.map b/static/maps/mvc/user/manage-user-information.js.map
new file mode 100644
index 0000000..94cdd48
--- /dev/null
+++ b/static/maps/mvc/user/manage-user-information.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"manage-user-information.js","sources":["../../../src/mvc/user/manage-user-information.js"],"names":["define","ManageUserInformation","Backbone","View","extend","original_email","original_username","user_id","address_id","initialize","data","this","initializeSection","render","validateString","test_string","type","mail_re","username_re","test","renderError","message","$el_errormessage","$","hide","length","html","show","prepend","renderDone","$el_donemessage","saveUse [...]
\ No newline at end of file
diff --git a/static/maps/mvc/user/user-model.js.map b/static/maps/mvc/user/user-model.js.map
new file mode 100644
index 0000000..845d3ed
--- /dev/null
+++ b/static/maps/mvc/user/user-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"user-model.js","sources":["../../../src/mvc/user/user-model.js"],"names":["define","_","Backbone","baseMVC","_l","logNamespace","User","Model","extend","LoggableMixin","_logNamespace","urlRoot","Galaxy","root","defaults","id","username","email","total_disk_usage","nice_total_disk_usage","quota_percent","is_admin","initialize","data","this","log","on","model","resp","changes","isAnonymous","get","isAdmin","loadFromApi","idOrCurrent","options","CURRENT_ID_STR","userFn" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/user/user-quotameter.js.map b/static/maps/mvc/user/user-quotameter.js.map
new file mode 100644
index 0000000..17b3889
--- /dev/null
+++ b/static/maps/mvc/user/user-quotameter.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"user-quotameter.js","sources":["../../../src/mvc/user/user-quotameter.js"],"names":["define","baseMVC","_l","logNamespace","UserQuotaMeter","Backbone","View","extend","LoggableMixin","_logNamespace","options","warnAtPercent","errorAtPercent","initialize","this","log","_","listenTo","model","render","update","loadFromApi","get","isOverQuota","_render_quota","modelJson","toJSON","percent","quota_percent","$meter","$","_templateQuotaMeter","$bar","find","attr","css","tr [...]
\ No newline at end of file
diff --git a/static/maps/mvc/visualization/visualization-model.js.map b/static/maps/mvc/visualization/visualization-model.js.map
new file mode 100644
index 0000000..0e61f11
--- /dev/null
+++ b/static/maps/mvc/visualization/visualization-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"visualization-model.js","sources":["../../../src/mvc/visualization/visualization-model.js"],"names":["Visualization","Backbone","Model","extend","defaults","config","urlRoot","apiUrl","Galaxy","root","initialize","data","_","isObject","this","_setUpListeners","set","key","val","oldConfig","get","clone","prototype","call","toString","idAndTitle","VisualizationCollection","Collection","model","url","models","options","collection","map","existing","id","merged","toJSON" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/webhooks.js.map b/static/maps/mvc/webhooks.js.map
new file mode 100644
index 0000000..9f51bb7
--- /dev/null
+++ b/static/maps/mvc/webhooks.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"webhooks.js","sources":["../../src/mvc/webhooks.js"],"names":["define","WebhookModel","Backbone","Model","extend","defaults","activate","Webhooks","Collection","model","WebhookView","View","el","initialize","options","me","this","urlRoot","fetch","success","render","webhook","toJSON","$el","html","name","styles","$","type","text","appendTo","script","add","webhooks","url","Galaxy","root","async","callback"],"mappings":"AAGAA,UAAW,WAEP,GAAIC,GAAeC,SAASC,MAAMC,QAC9BC,U [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-canvas.js.map b/static/maps/mvc/workflow/workflow-canvas.js.map
new file mode 100644
index 0000000..c0ae36d
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-canvas.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-canvas.js","sources":["../../../src/mvc/workflow/workflow-canvas.js"],"names":["define","CanvasManager","app","canvas_viewport","overview","this","cv","cc","find","oc","ov","init_drag","ScrollPanel","panel","$","extend","prototype","self","move","x","y","Math","min","width","max","height","css","left","top","background-position-x","background-position-y","update_viewport_overlay","each","scroll_panel","x_adjust","y_adjust","bind","o","offset","p","position", [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-connector.js.map b/static/maps/mvc/workflow/workflow-connector.js.map
new file mode 100644
index 0000000..3271d91
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-connector.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-connector.js","sources":["../../../src/mvc/workflow/workflow-connector.js"],"names":["define","Connector","handle1","handle2","this","canvas","dragging","inner_color","outer_color","connect","$","extend","prototype","t1","t2","destroy","disconnect","remove","destroyIfInvalid","attachable","redraw","canvas_container","document","createElement","append","style","zIndex","relativeLeft","e","offset","left","relativeTop","top","start_x","element","start_y","end_x [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-manager.js.map b/static/maps/mvc/workflow/workflow-manager.js.map
new file mode 100644
index 0000000..cf2bdee
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-manager.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-manager.js","sources":["../../../src/mvc/workflow/workflow-manager.js"],"names":["define","Connector","Toastr","Workflow","app","canvas_container","this","id_counter","nodes","name","has_changes","active_form_has_changes","nodeLabels","workflowOutputLabels","$","extend","prototype","canLabelNodeWith","label","registerNodeLabel","unregisterNodeLabel","updateNodeLabel","fromLabel","toLabel","warning","attemptUpdateNodeLabel","node","setLabel","canLabelOutputWi [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-node.js.map b/static/maps/mvc/workflow/workflow-node.js.map
new file mode 100644
index 0000000..b893976
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-node.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-node.js","sources":["../../../src/mvc/workflow/workflow-node.js"],"names":["define","NodeView","Node","Backbone","Model","extend","initialize","app","attr","this","element","input_terminals","output_terminals","tool_errors","workflow_outputs","getWorkflowOutput","outputName","_","findWhere","output_name","isWorkflowOutput","undefined","removeWorkflowOutput","splice","addWorkflowOutput","label","output","push","labelWorkflowOutput","changed","oldLabel","workf [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-terminals.js.map b/static/maps/mvc/workflow/workflow-terminals.js.map
new file mode 100644
index 0000000..ec9bada
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-terminals.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-terminals.js","sources":["../../../src/mvc/workflow/workflow-terminals.js"],"names":["define","Globals","CollectionTypeDescription","collectionType","this","isCollection","rank","split","length","$","extend","prototype","append","otherCollectionTypeDescription","NULL_COLLECTION_TYPE_DESCRIPTION","ANY_COLLECTION_TYPE_DESCRIPTION","otherCollectionType","canMatch","canMapOver","requiredSuffix","_endsWith","effectiveMapOver","effectiveCollectionType","substring" [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-view-data.js.map b/static/maps/mvc/workflow/workflow-view-data.js.map
new file mode 100644
index 0000000..adfb2b8
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-view-data.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-view-data.js","sources":["../../../src/mvc/workflow/workflow-view-data.js"],"names":["define","Globals","DataInputView","Backbone","View","extend","className","initialize","options","this","input","nodeView","terminalElement","$el","attr","name","html","label","skipResize","css","position","left","top","display","$","append","el","updateMaxWidth","outerWidth","remove","DataOutputView","output","node","isInput","extensions","indexOf","join","calloutView","typ [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-view-node.js.map b/static/maps/mvc/workflow/workflow-view-node.js.map
new file mode 100644
index 0000000..e3520e4
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-view-node.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-view-node.js","sources":["../../../src/mvc/workflow/workflow-view-node.js"],"names":["define","_","TerminalViews","DataViews","Backbone","View","extend","initialize","options","this","node","output_width","Math","max","$el","width","tool_body","find","remove","newInputsDiv","appendTo","terminalViews","outputViews","render","renderToolLabel","renderToolErrors","css","min","$","text","label","name","tool_errors","addClass","removeClass","updateMaxWidth","newWi [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-view-terminals.js.map b/static/maps/mvc/workflow/workflow-view-terminals.js.map
new file mode 100644
index 0000000..c9643de
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-view-terminals.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-view-terminals.js","sources":["../../../src/mvc/workflow/workflow-view-terminals.js"],"names":["define","Globals","Terminals","Connector","TerminalMappingView","Backbone","View","extend","tagName","className","initialize","mapText","this","$el","tooltip","delay","title","model","bind","_","render","mapOver","isCollection","show","hide","InputTerminalMappingView","events","click","mouseenter","mouseleave","onMouseEnter","terminal","connected","css","onMouseLe [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow-view.js.map b/static/maps/mvc/workflow/workflow-view.js.map
new file mode 100644
index 0000000..d1f2f51
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow-view.js","sources":["../../../src/mvc/workflow/workflow-view.js"],"names":["define","Utils","Globals","Workflow","WorkflowCanvas","Node","ToolForm","Ui","async_save_text","Toastr","reset_tool_search","initValue","tool_menu_frame","$","contents","length","document","this","removeClass","find","hide","show","each","attr","hasClass","search_input","val","NODE_ICONS","tool","data_input","data_collection_input","subworkflow","pause","add_node_icon","$to_el","node [...]
\ No newline at end of file
diff --git a/static/maps/mvc/workflow/workflow.js.map b/static/maps/mvc/workflow/workflow.js.map
new file mode 100644
index 0000000..25e27b4
--- /dev/null
+++ b/static/maps/mvc/workflow/workflow.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"workflow.js","sources":["../../../src/mvc/workflow/workflow.js"],"names":["define","Utils","Globals","Workflow","WorkflowCanvas","Node","ToolsForm","Backbone","View","extend","initialize","options","layout_editor","self","workflow","layout","fit_canvas_to_nodes","scroll_to_nodes","canvas_manager","draw_overview","edit_workflow_attributes","clear_active_node","$","hide","show","show_overview","jStorage","set","css","hide_overview","app","this","urls","active_ajax_call [...]
\ No newline at end of file
diff --git a/static/maps/nls/ja/locale.js.map b/static/maps/nls/ja/locale.js.map
new file mode 100644
index 0000000..6a79687
--- /dev/null
+++ b/static/maps/nls/ja/locale.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"locale.js","sources":["../../../src/nls/ja/locale.js"],"names":["define","This history is empty","No matching datasets found","Search datasets","You are currently viewing a deleted history!","You are over your disk quota","All","None","For all selected","Click to rename history","Operations on multiple datasets","Permanently delete datasets","This will permanently remove the data in your datasets. Are you sure?","Dataset","This history is empty. Click 'Get Data' on t [...]
\ No newline at end of file
diff --git a/static/maps/nls/locale.js.map b/static/maps/nls/locale.js.map
new file mode 100644
index 0000000..cded8ae
--- /dev/null
+++ b/static/maps/nls/locale.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"locale.js","sources":["../../src/nls/locale.js"],"names":["define","root","This history is empty","No matching datasets found","An error occurred while getting updates from the server","Please contact a Galaxy administrator if the problem persists","Search datasets","You are currently viewing a deleted history!","You are over your disk quota","Tool execution is on hold until your disk usage drops below your allocated quota","All","None","For all selected","Edit histo [...]
\ No newline at end of file
diff --git a/static/maps/nls/zh/locale.js.map b/static/maps/nls/zh/locale.js.map
new file mode 100644
index 0000000..e0a07e7
--- /dev/null
+++ b/static/maps/nls/zh/locale.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"locale.js","sources":["../../../src/nls/zh/locale.js"],"names":["define","This history is empty","No matching datasets found","Search datasets","You are currently viewing a deleted history!","You are over your disk quota","All","None","For all selected","Click to rename history","Operations on multiple datasets","Permanently delete datasets","This will permanently remove the data in your datasets. Are you sure?","Dataset","This history is empty. Click 'Get Data' on t [...]
\ No newline at end of file
diff --git a/static/maps/onload.js.map b/static/maps/onload.js.map
new file mode 100644
index 0000000..9723faf
--- /dev/null
+++ b/static/maps/onload.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"onload.js","sources":["../src/onload.js"],"names":["replace_big_select_inputs","min_length","max_length","select_elts","refresh_select2","element","select_elt","$","options","placeholder","closeOnSelect","is","dropdownAutoWidth","containerCssClass","select2","jQuery","fn","undefined","each","this","not","num_options","find","length","hasClass","init_refresh_on_change","off","change","select_field","select_val","val","ref_on_change_vals","attr","split","last_selected_ [...]
\ No newline at end of file
diff --git a/static/maps/polyfills.js.map b/static/maps/polyfills.js.map
new file mode 100644
index 0000000..87dbd52
--- /dev/null
+++ b/static/maps/polyfills.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"polyfills.js","sources":["../src/polyfills.js"],"names":["window","console","log","debug","info","warn","error","assert","Object","assign","_","extend","lastTime","vendors","x","length","requestAnimationFrame","cancelRequestAnimationFrame","callback","currTime","Date","getTime","timeToCall","Math","max","id","setTimeout","cancelAnimationFrame","clearTimeout","features","name","compatible","CanvasRenderingContext2D","sessionStorage","err","incompatibilities","filter", [...]
\ No newline at end of file
diff --git a/static/maps/reports_webapp/run_stats.js.map b/static/maps/reports_webapp/run_stats.js.map
new file mode 100644
index 0000000..3704f7f
--- /dev/null
+++ b/static/maps/reports_webapp/run_stats.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"run_stats.js","sources":["../../src/reports_webapp/run_stats.js"],"names":["days_in_month","month","year","Date","getDate","date_by_subtracting_days","date","days","getFullYear","getMonth","getHours","getMinutes","getSeconds","getMilliseconds","date_by_subtracting_hours","hours","get_utc_time_hours","getUTCFullYear","getUTCMonth","getUTCDate","getUTCHours","refresh","window","location","reload","create_chart","inp_data","name","time","title","click","classes","d3","s [...]
\ No newline at end of file
diff --git a/static/maps/templates/compiled/panel_section.js.map b/static/maps/templates/compiled/panel_section.js.map
new file mode 100644
index 0000000..d4406f1
--- /dev/null
+++ b/static/maps/templates/compiled/panel_section.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"panel_section.js","sources":["../../../src/templates/compiled/panel_section.js"],"names":["define","Handlebars","template","compiler","main","depth0","helpers","partials","data","helper","alias1","helperMissing","alias2","alias3","this","escapeExpression","id","call","name","hash","useData"],"mappings":"AAAAA,QAAQ,2BAA4B,SAASC,GAE7C,MAAOA,GAAWC,UAAUC,UAAY,EAAE,mBAAmBC,KAAO,SAASC,EAAOC,EAAQC,EAASC,GACjG,GAAIC,GAAQC,EAAOJ,EAAQK,cAAeC,EAAO,WAAYC,EAAOC,KAAKC,gBAE3E,OAAO, [...]
\ No newline at end of file
diff --git a/static/maps/templates/compiled/tool_form.js.map b/static/maps/templates/compiled/tool_form.js.map
new file mode 100644
index 0000000..2859ce1
--- /dev/null
+++ b/static/maps/templates/compiled/tool_form.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool_form.js","sources":["../../../src/templates/compiled/tool_form.js"],"names":["define","Handlebars","template","1","depth0","helpers","partials","data","stack1","helper","alias1","helperMissing","alias2","alias3","this","escapeExpression","name","call","hash","label","html","help","compiler","main","version","each","inputs","fn","program","inverse","noop","useData"],"mappings":"AAAAA,QAAQ,2BAA4B,SAASC,GAE7C,MAAOA,GAAWC,UAAUC,EAAI,SAASC,EAAOC,EAAQC,EAASC,GAC7D,GAA [...]
\ No newline at end of file
diff --git a/static/maps/templates/compiled/tool_link.js.map b/static/maps/templates/compiled/tool_link.js.map
new file mode 100644
index 0000000..a850777
--- /dev/null
+++ b/static/maps/templates/compiled/tool_link.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool_link.js","sources":["../../../src/templates/compiled/tool_link.js"],"names":["define","Handlebars","template","1","depth0","alias1","this","lambda","alias2","escapeExpression","compiler","main","helpers","partials","data","stack1","helper","helperMissing","alias3","each","call","labels","name","hash","fn","program","inverse","noop","id","link","target","min_width","description","useData"],"mappings":"AAAAA,QAAQ,2BAA4B,SAASC,GAE7C,MAAOA,GAAWC,UAAUC,EAAI,SAASC,GAC [...]
\ No newline at end of file
diff --git a/static/maps/templates/compiled/tool_search.js.map b/static/maps/templates/compiled/tool_search.js.map
new file mode 100644
index 0000000..ded9adc
--- /dev/null
+++ b/static/maps/templates/compiled/tool_search.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tool_search.js","sources":["../../../src/templates/compiled/tool_search.js"],"names":["define","Handlebars","template","compiler","main","depth0","helpers","partials","data","helper","alias1","helperMissing","alias2","alias3","this","escapeExpression","search_hint_string","call","name","hash","spinner_url","useData"],"mappings":"AAAAA,QAAQ,2BAA4B,SAASC,GAE7C,MAAOA,GAAWC,UAAUC,UAAY,EAAE,mBAAmBC,KAAO,SAASC,EAAOC,EAAQC,EAASC,GACjG,GAAIC,GAAQC,EAAOJ,EAAQK,cAAeC,EAAO,WAAY [...]
\ No newline at end of file
diff --git a/static/maps/toolshed.groups.js.map b/static/maps/toolshed.groups.js.map
new file mode 100644
index 0000000..2ef66e1
--- /dev/null
+++ b/static/maps/toolshed.groups.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"toolshed.groups.js","sources":["../src/toolshed.groups.js"],"names":["define","mod_group_list","mod_group_detail","ToolshedRouter","Backbone","Router","extend","routes",":group_id","ToolshedGroups","View","groupListView","groupDetailView","collection","initialize","window","globalTS","groups","this","ts_router","on","GroupListView","group_id","GroupDetailView","history","start","pushState"],"mappings":"AAIAA,QACQ,6BACA,+BACA,0BAEJ,SACIC,EACAC,GAMR,GAAIC,GAAiBC,SAASC, [...]
\ No newline at end of file
diff --git a/static/maps/ui/autocom_tagging.js.map b/static/maps/ui/autocom_tagging.js.map
new file mode 100644
index 0000000..1a8ae02
--- /dev/null
+++ b/static/maps/ui/autocom_tagging.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"autocom_tagging.js","sources":["../../src/ui/autocom_tagging.js"],"names":["factory","define","amd","jQuery","init_tag_click_function","tag_elt","click_func","$","find","each","this","click","tag_str","text","tag_name_and_value","split","fn","autocomplete_tagging","options","init_delete_tag_image","delete_img","mouseenter","attr","settings","delete_tag_img_rollover","mouseleave","delete_tag_img","tag_button","parent","tag_name_elt","eq","tag_name","tag_value","prev_b [...]
\ No newline at end of file
diff --git a/static/maps/ui/editable-text.js.map b/static/maps/ui/editable-text.js.map
new file mode 100644
index 0000000..fba8651
--- /dev/null
+++ b/static/maps/ui/editable-text.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"editable-text.js","sources":["../../src/ui/editable-text.js"],"names":["factory","define","amd","jQuery","$","fn","make_text_editable","config_dict","num_cols","num_rows","use_textarea","on_finish","help_text","container","this","addClass","click","e","children","length","removeClass","input_elt","button_elt","set_text","new_text","find","remove","text","html","cur_text","attr","rows","cols","trim","keyup","keyCode","val","value","size","blur","trigger","stopPropagat [...]
\ No newline at end of file
diff --git a/static/maps/ui/fa-icon-button.js.map b/static/maps/ui/fa-icon-button.js.map
new file mode 100644
index 0000000..9255f0c
--- /dev/null
+++ b/static/maps/ui/fa-icon-button.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"fa-icon-button.js","sources":["../../src/ui/fa-icon-button.js"],"names":["root","factory","define","amd","faIconButton","this","options","tooltipConfig","placement","classes","concat","disabled","push","html","join","title","target","href","faIcon","$button","$","tooltip","_","isFunction","onclick","click"],"mappings":"CAAC,SAAUA,EAAMC,GACS,kBAAXC,SAAyBA,OAAOC,IACvCD,UAAWD,GAEXD,EAAKI,aAAeH,KAG1BI,KAAM,WAaJ,GAAID,GAAe,SAAUE,GACzBA,EAAUA,MACVA,EAAQC,cAAgBD,EAAQC,gBAAm [...]
\ No newline at end of file
diff --git a/static/maps/ui/filter-control.js.map b/static/maps/ui/filter-control.js.map
new file mode 100644
index 0000000..79fac21
--- /dev/null
+++ b/static/maps/ui/filter-control.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"filter-control.js","sources":["../../src/ui/filter-control.js"],"names":["factory","define","amd","jQuery","$","dropDownSelect","options","selected","selectThis","$this","this","$select","parents","newSelection","text","find","trigger","_","isEmpty","join","length","addClass","attr","append","map","option","click","FilterControl","element","init","prototype","DATA_KEY","filters","$element","extend","defaults","currFilter","render","empty","_renderKeySelect","_renderO [...]
\ No newline at end of file
diff --git a/static/maps/ui/hoverhighlight.js.map b/static/maps/ui/hoverhighlight.js.map
new file mode 100644
index 0000000..a30be39
--- /dev/null
+++ b/static/maps/ui/hoverhighlight.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"hoverhighlight.js","sources":["../../src/ui/hoverhighlight.js"],"names":["factory","define","amd","jQuery","fn","extend","hoverhighlight","scope","color","this","length","$","each","$this","targetSelector","data","mouseover","css","background","mouseout"],"mappings":"CAAC,SAAUA,GACe,kBAAXC,SAAyBA,OAAOC,IACvCD,UAAWD,GAGXA,EAAQG,SAGd,WAGEA,OAAOC,GAAGC,QACNC,eAAiB,SAA0BC,EAAOC,GAE9C,MADAD,GAAQA,GAAS,OACZE,KAAKC,QAEVC,EAAGF,MAAOG,KAAM,WACZ,GAAIC,GAAQF,EAAGF,MACXK,EAAiBD, [...]
\ No newline at end of file
diff --git a/static/maps/ui/loading-indicator.js.map b/static/maps/ui/loading-indicator.js.map
new file mode 100644
index 0000000..3e97700
--- /dev/null
+++ b/static/maps/ui/loading-indicator.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"loading-indicator.js","sources":["../../src/ui/loading-indicator.js"],"names":["root","factory","define","amd","LoadingIndicator","this","$where","options","render","html","join","$indicator","$","hide","css","position","$text","children","cover","z-index","top","bottom","left","right","opacity","background-color","text-align","margin-top","margin","color","font-style","self","jQuery","extend","show","msg","speed","callback","parent","find","remove","insertBefore","m [...]
\ No newline at end of file
diff --git a/static/maps/ui/mode-button.js.map b/static/maps/ui/mode-button.js.map
new file mode 100644
index 0000000..3620cbc
--- /dev/null
+++ b/static/maps/ui/mode-button.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"mode-button.js","sources":["../../src/ui/mode-button.js"],"names":["factory","define","amd","jQuery","ModeButton","element","options","this","currModeIndex","_init","prototype","DATA_KEY","defaults","switchModesOnClick","$element","$","extend","modes","Error","modeButton","click","callModeFn","_incModeIndex","html","reset","length","_getModeIndex","modeKey","i","mode","_setModeByIndex","index","newMode","currentMode","current","getMode","hasMode","err","setMode","ini [...]
\ No newline at end of file
diff --git a/static/maps/ui/pagination.js.map b/static/maps/ui/pagination.js.map
new file mode 100644
index 0000000..694a722
--- /dev/null
+++ b/static/maps/ui/pagination.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"pagination.js","sources":["../../src/ui/pagination.js"],"names":["factory","define","amd","jQuery","$","Pagination","element","options","this","numPages","currPage","init","_make$Li","contents","join","prototype","DATA_KEY","defaults","startingPage","perPage","totalDataSize","currDataSize","$element","extend","Math","ceil","data","_render","_renderPages","_scrollToActivePage","_renderPrevNext","pagination","$prev","$next","$paginationContainer","addClass","click","pr [...]
\ No newline at end of file
diff --git a/static/maps/ui/peek-column-selector.js.map b/static/maps/ui/peek-column-selector.js.map
new file mode 100644
index 0000000..a193718
--- /dev/null
+++ b/static/maps/ui/peek-column-selector.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"peek-column-selector.js","sources":["../../src/ui/peek-column-selector.js"],"names":["factory","define","amd","jQuery","$","validateControl","control","disabled","type","Error","JSON","stringify","multiselect","selected","label","id","indexOf","buildButton","addClass","BUTTON_CLASS","text","buildControlCell","columnIndex","$td","html","attr","COLUMN_INDEX_DATA_KEY","DISABLED_CLASS","setSelectedText","$cell","$button","children","hasClass","SELECTED_CLASS","undefined" [...]
\ No newline at end of file
diff --git a/static/maps/ui/popupmenu.js.map b/static/maps/ui/popupmenu.js.map
new file mode 100644
index 0000000..6625008
--- /dev/null
+++ b/static/maps/ui/popupmenu.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"popupmenu.js","sources":["../../src/ui/popupmenu.js"],"names":["define","jQuery","make_popupmenu","button_element","initial_options","element_menu_exists","data","bind","e","$","remove","setTimeout","menu_element","attr","options","_","size","appendTo","each","k","v","action","append","url","html","click","addClass","wrapper","x","pageX","width","Math","min","document","scrollLeft","window","max","css","top","pageY","left","close_popup","el","unbind","frame_id","fram [...]
\ No newline at end of file
diff --git a/static/maps/ui/scroll-panel.js.map b/static/maps/ui/scroll-panel.js.map
new file mode 100644
index 0000000..36fd8ea
--- /dev/null
+++ b/static/maps/ui/scroll-panel.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"scroll-panel.js","sources":["../../src/ui/scroll-panel.js"],"names":["$","ui","plugin","add","drag","e","instance","this","data","clearTimeout","timeout","o","options","element","panel","panel_pos","position","panel_w","width","panel_h","height","viewport","parent","viewport_w","viewport_h","element_w","element_h","moved","close_dist","nudge","p_min_x","p_min_y","p_max_x","p_max_y","min_vis_x","left","max_vis_x","min_vis_y","top","max_vis_y","mouse_x","offset","click [...]
\ No newline at end of file
diff --git a/static/maps/ui/search-input.js.map b/static/maps/ui/search-input.js.map
new file mode 100644
index 0000000..cc5ce99
--- /dev/null
+++ b/static/maps/ui/search-input.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"search-input.js","sources":["../../src/ui/search-input.js"],"names":["factory","define","amd","jQuery","searchInput","parentNode","options","clearSearchInput","$input","$","this","parent","children","val","trigger","blur","onclear","search","event","searchTerms","onfirstsearch","firstSearch","onsearch","inputTemplate","name","placeholder","classes","join","focus","select","keyup","preventDefault","stopPropagation","which","KEYCODE_ESC","escWillClear","call","KEYCODE_ [...]
\ No newline at end of file
diff --git a/static/maps/utils/add-logging.js.map b/static/maps/utils/add-logging.js.map
new file mode 100644
index 0000000..9854cc7
--- /dev/null
+++ b/static/maps/utils/add-logging.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"add-logging.js","sources":["../../src/utils/add-logging.js"],"names":["define","addLogging","obj","namespace","addTo","undefined","prototype","_logNamespace","LOGGING_FNS","forEach","logFn","this","logger","emit","arguments","apply"],"mappings":"AAAAA,UACG,WAOH,QAASC,GAAYC,EAAKC,GACtB,GAAIC,GAA4BC,SAAlBH,EAAII,UAA4BJ,EAAc,UAAE,CA2B9D,OA1BkBG,UAAdF,IACAC,EAAMG,cAAgBJ,GAQ1BK,EAAYC,QAAS,SAAUC,GAC3BN,EAAOM,GAAU,WACb,MAAKC,MAAKC,OAGND,KAAKC,OAAOC,KACLF,KAAKC,OAAOC,KAAMH,E [...]
\ No newline at end of file
diff --git a/static/maps/utils/ajax-queue.js.map b/static/maps/utils/ajax-queue.js.map
new file mode 100644
index 0000000..86a814f
--- /dev/null
+++ b/static/maps/utils/ajax-queue.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ajax-queue.js","sources":["../../src/utils/ajax-queue.js"],"names":["define","AjaxQueue","initialFunctions","self","this","deferred","jQuery","Deferred","queue","responses","numToProcess","running","init","start","NamedAjaxQueue","names","call","prototype","forEach","fn","add","index","length","push","fnIndex","xhr","done","response","notify","curr","total","always","shift","stop","causeFail","msg","reject","resolve","fail","progress","create","constructor","obj","ha [...]
\ No newline at end of file
diff --git a/static/maps/utils/async-save-text.js.map b/static/maps/utils/async-save-text.js.map
new file mode 100644
index 0000000..59ee2d3
--- /dev/null
+++ b/static/maps/utils/async-save-text.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"async-save-text.js","sources":["../../src/utils/async-save-text.js"],"names":["define","jQuery","async_save_text","click_to_edit_elt","text_elt_id","save_url","text_parm_name","num_cols","use_textarea","num_rows","on_start","on_finish","undefined","$","click","length","t","text_elt","old_text","text","attr","rows","cols","trim","value","size","blur","this","remove","show","keyup","e","keyCode","trigger","ajax_data","val","ajax","url","data","error","alert","success", [...]
\ No newline at end of file
diff --git a/static/maps/utils/config.js.map b/static/maps/utils/config.js.map
new file mode 100644
index 0000000..6b87ce6
--- /dev/null
+++ b/static/maps/utils/config.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"config.js","sources":["../../src/utils/config.js"],"names":["define","_","util_mod","ConfigSetting","Backbone","Model","extend","initialize","options","key","this","get","set","defaults","find","known_settings_defaults","s","undefined","set_value","get_random_color","value","type","parseFloat","parseInt","label","default_value","mode","hidden","ConfigSettingCollection","Collection","model","to_key_value_dict","rval","each","setting","get_value","set_default_value","f [...]
\ No newline at end of file
diff --git a/static/maps/utils/deferred.js.map b/static/maps/utils/deferred.js.map
new file mode 100644
index 0000000..31ff459
--- /dev/null
+++ b/static/maps/utils/deferred.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"deferred.js","sources":["../../src/utils/deferred.js"],"names":["define","Utils","Backbone","Model","extend","initialize","this","active","last","execute","callback","self","id","uid","has_deferred","length","process","$","Deferred","promise","always","Galaxy","emit","debug","state","charAt","toUpperCase","slice","when","resolve","reject","reset","i","ready","isEmptyObject"],"mappings":"AAGAA,QAAQ,eAAgB,SAAUC,GAClC,MAAOC,UAASC,MAAMC,QAClBC,WAAY,WACRC,KAAKC,UACLD,KAAK [...]
\ No newline at end of file
diff --git a/static/maps/utils/graph.js.map b/static/maps/utils/graph.js.map
new file mode 100644
index 0000000..6ea9140
--- /dev/null
+++ b/static/maps/utils/graph.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"graph.js","sources":["../../src/utils/graph.js"],"names":["define","matches","d","d2","k","hasOwnProperty","iterate","obj","propsOrFn","fn","undefined","props","returned","index","key","value","push","call","Edge","source","target","data","self","this","Vertex","name","edges","degree","Graph","directed","options","init","read","randGraph","numVerts","numEdges","randRange","range","Math","floor","random","nodes","links","i","prototype","toString","toJSON","json","each [...]
\ No newline at end of file
diff --git a/static/maps/utils/levenshtein.js.map b/static/maps/utils/levenshtein.js.map
new file mode 100644
index 0000000..0536e2d
--- /dev/null
+++ b/static/maps/utils/levenshtein.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"levenshtein.js","sources":["../../src/utils/levenshtein.js"],"names":["define","levenshteinDistance","a","b","length","i","matrix","j","charAt","Math","min"],"mappings":"AAAAA,UACG,WAqBH,QAASC,GAAoBC,EAAGC,GAC9B,GAAgB,IAAbD,EAAEE,OAAe,MAAOD,GAAEC,MAC7B,IAAgB,IAAbD,EAAEC,OAAe,MAAOF,GAAEE,MAE7B,IAGIC,GAHAC,IAIJ,KAAID,EAAI,EAAGA,GAAKF,EAAEC,OAAQC,IACxBC,EAAOD,IAAMA,EAIf,IAAIE,EACJ,KAAIA,EAAI,EAAGA,GAAKL,EAAEE,OAAQG,IACxBD,EAAO,GAAGC,GAAKA,CAIjB,KAAIF,EAAI,EAAGA,GAAKF,EA [...]
\ No newline at end of file
diff --git a/static/maps/utils/localization.js.map b/static/maps/utils/localization.js.map
new file mode 100644
index 0000000..1691668
--- /dev/null
+++ b/static/maps/utils/localization.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"localization.js","sources":["../../src/utils/localization.js"],"names":["define","localeStrings","hasOwnProperty","locale","navigator","language","userLanguage","toLowerCase","split","__root","localize","strToLocalize","cacheNonLocalized"],"mappings":"AAAAA,QACI,mBACD,SAAUC,GAOb,GAAIA,EAAcC,eAAgB,UAAY,CAE1C,GAAIC,GACqB,mBAAdC,WAA4B,UAC3BA,UAAUC,UAAYD,UAAUE,cAAgB,UAAWC,aAEvEN,GAAgBA,EAAe,KAAOE,IAAYF,EAAe,KAAOE,EAAOK,MAAM,KAAK,KAAOP,EAAcQ,OAanH,GAAIC,GAAW,SAAUC,GAcrB,M [...]
\ No newline at end of file
diff --git a/static/maps/utils/metrics-logger.js.map b/static/maps/utils/metrics-logger.js.map
new file mode 100644
index 0000000..81083d6
--- /dev/null
+++ b/static/maps/utils/metrics-logger.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"metrics-logger.js","sources":["../../src/utils/metrics-logger.js"],"names":["define","MetricsLogger","options","self","this","userId","window","bootstrapped","user","id","consoleLogger","_init","LoggingCache","ALL","LOG","DEBUG","INFO","WARN","ERROR","METRIC","NONE","defaultOptions","logLevel","consoleLevel","defaultNamespace","consoleNamespaceWhitelist","clientPrefix","maxCacheSize","postSize","addTime","cacheKeyPrefix","postUrl","delayPostInMs","getPingData","undef [...]
\ No newline at end of file
diff --git a/static/maps/utils/natural-sort.js.map b/static/maps/utils/natural-sort.js.map
new file mode 100644
index 0000000..774caef
--- /dev/null
+++ b/static/maps/utils/natural-sort.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"natural-sort.js","sources":["../../src/utils/natural-sort.js"],"names":["define","naturalSort","a","b","re","x","toString","toLowerCase","y","nC","String","fromCharCode","xN","replace","split","yN","xD","Date","getTime","yD","oFxNcL","oFyNcL","cLoc","numS","Math","max","length","parseFloat"],"mappings":"AAAAA,UAAW,WAEX,QAASC,GAAYC,EAAGC,GAEpB,GAAIC,GAAK,gBACLC,EAAIH,EAAEI,WAAWC,eAAiB,GAClCC,EAAIL,EAAEG,WAAWC,eAAiB,GAClCE,EAAKC,OAAOC,aAAa,GACzBC,EAAKP,EAAEQ,QAAST,EAAI [...]
\ No newline at end of file
diff --git a/static/maps/utils/query-string-parsing.js.map b/static/maps/utils/query-string-parsing.js.map
new file mode 100644
index 0000000..a2e26fe
--- /dev/null
+++ b/static/maps/utils/query-string-parsing.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"query-string-parsing.js","sources":["../../src/utils/query-string-parsing.js"],"names":["define","get","key","queryString","window","location","search","substr","keyRegex","RegExp","matches","match","length","splice","undefined","parse","parsed","split","forEach","pairString","pair","decodeURI"],"mappings":"AAAAA,UAAW,WAEX,QAASC,GAAKC,EAAKC,GACfA,EAAcA,GAAeC,OAAOC,SAASC,OAAOC,OAAQ,EAC5D,IAAIC,GAAW,GAAIC,QAAQP,EAAM,cAC7BQ,EAAUP,EAAYQ,MAAOH,EACjC,OAAKE,IAAYA,EAAQE,QAGz [...]
\ No newline at end of file
diff --git a/static/maps/utils/uploadbox.js.map b/static/maps/utils/uploadbox.js.map
new file mode 100644
index 0000000..11cd934
--- /dev/null
+++ b/static/maps/utils/uploadbox.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"uploadbox.js","sources":["../../src/utils/uploadbox.js"],"names":["$","jQuery","event","props","push","uploadpost","config","cnf","extend","data","success","error","progress","url","maxfilesize","error_filesize","error_default","error_server","error_login","error_message","form","FormData","key","payload","append","sizes","files","d","name","file","size","xhr","XMLHttpRequest","open","setRequestHeader","onreadystatechange","readyState","DONE","response","responseText [...]
\ No newline at end of file
diff --git a/static/maps/utils/utils.js.map b/static/maps/utils/utils.js.map
new file mode 100644
index 0000000..6c66cb8
--- /dev/null
+++ b/static/maps/utils/utils.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"utils.js","sources":["../../src/utils/utils.js"],"names":["define","iframe","src","deepeach","dict","callback","i","d","_","isObject","new_dict","linkify","inputText","replacedText","replacePattern1","replacePattern2","replacePattern3","replace","clone","obj","JSON","parse","stringify","isJSON","text","test","sanitize","content","$","html","isEmpty","value","Array","length","undefined","indexOf","textify","lst","isArray","toString","pos","lastIndexOf","substr","get", [...]
\ No newline at end of file
diff --git a/static/maps/viz/bbi-data-manager.js.map b/static/maps/viz/bbi-data-manager.js.map
new file mode 100644
index 0000000..5e85c9f
--- /dev/null
+++ b/static/maps/viz/bbi-data-manager.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"bbi-data-manager.js","sources":["../../src/viz/bbi-data-manager.js"],"names":["define","visualization","bigwig","BBIDataManager","GenomeDataManager","extend","load_data","region","deferred","$","Deferred","this","set_data","url","Galaxy","root","get","id","self","when","makeBwg","then","bb","readWigData","data","result","prev","max","Number","MIN_VALUE","forEach","d","min","push","score","entry","dataset_type","resolve"],"mappings":"AAAAA,QAAS,oBAAqB,mBACtB,SAASC,EAA [...]
\ No newline at end of file
diff --git a/static/maps/viz/circster.js.map b/static/maps/viz/circster.js.map
new file mode 100644
index 0000000..abadf45
--- /dev/null
+++ b/static/maps/viz/circster.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"circster.js","sources":["../../src/viz/circster.js"],"names":["require","mod_utils","cssLoadFile","define","_","d3","visualization","config","mod_icon_btn","SVGUtils","Backbone","Model","extend","is_visible","svg_elt","eltBRect","getBoundingClientRect","svgBRect","$","right","left","bottom","top","UsesTicks","drawTicks","parent_elt","data","dataHandler","textTransform","horizontal","ticks","append","selectAll","enter","attr","d","angle","Math","PI","radius","tick_coo [...]
\ No newline at end of file
diff --git a/static/maps/viz/phyloviz.js.map b/static/maps/viz/phyloviz.js.map
new file mode 100644
index 0000000..5852358
--- /dev/null
+++ b/static/maps/viz/phyloviz.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"phyloviz.js","sources":["../../src/viz/phyloviz.js"],"names":["define","d3","visualization_mod","data_mod","mod_icon_btn","PhyloTreeLayout","layout","node","maxDepth","vertSeparation","parent","children","sumChildVertSeparation","dist","defaultDist","y0","depthSeparation","maxTextWidth","forEach","child","x0","length","leafIndex","x","y","self","this","hierarchy","sort","value","height","layoutMode","leafHeight","inputLeafHeight","mode","layoutAngle","angle","isNaN", [...]
\ No newline at end of file
diff --git a/static/maps/viz/sweepster.js.map b/static/maps/viz/sweepster.js.map
new file mode 100644
index 0000000..4b07e08
--- /dev/null
+++ b/static/maps/viz/sweepster.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"sweepster.js","sources":["../../src/viz/sweepster.js"],"names":["define","_","d3","util","visualization","tracks","tools","data","config","mod_icon_btn","ToolInputsSettings","Backbone","Model","extend","defaults","inputs","values","ToolParameterTree","tool","tree_data","initialize","options","self","this","get","each","input","on","set_tree_data","add_param","remove_param","input_config","find","name","set","param","get_tree_params","length","index","params_samples", [...]
\ No newline at end of file
diff --git a/static/maps/viz/trackster.js.map b/static/maps/viz/trackster.js.map
new file mode 100644
index 0000000..1dc4c55
--- /dev/null
+++ b/static/maps/viz/trackster.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"trackster.js","sources":["../../src/viz/trackster.js"],"names":["ui","view","browser_router","require","mod_utils","cssLoadFile","define","_","tracks","visualization","mod_icon_btn","query_string","Base","this","initialize","apply","arguments","extend","Backbone","Model","TracksterUI","baseURL","save_viz","Galaxy","modal","show","title","body","bookmarks","$","each","push","position","children","text","annotation","overview_track_name","overview_drawable","config","g [...]
\ No newline at end of file
diff --git a/static/maps/viz/trackster/filters.js.map b/static/maps/viz/trackster/filters.js.map
new file mode 100644
index 0000000..d0c3f8e
--- /dev/null
+++ b/static/maps/viz/trackster/filters.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"filters.js","sources":["../../../src/viz/trackster/filters.js"],"names":["define","_","extend","Filter","obj_dict","this","manager","name","index","tool_id","tool_exp_name","prototype","to_dict","create_action_icon","title","css_class","on_click_fn","$","attr","addClass","tooltip","click","NumberFilter","call","low","Number","MAX_VALUE","high","min","max","container","slider","slider_label","edit_slider_values","span","cur_value","text","parseFloat","input_size","toS [...]
\ No newline at end of file
diff --git a/static/maps/viz/trackster/painters.js.map b/static/maps/viz/trackster/painters.js.map
new file mode 100644
index 0000000..4e7175e
--- /dev/null
+++ b/static/maps/viz/trackster/painters.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"painters.js","sources":["../../../src/viz/trackster/painters.js"],"names":["define","_","BEFORE","CONTAINS","OVERLAP_START","OVERLAP_END","CONTAINED_BY","AFTER","compute_overlap","first_region","second_region","overlap","first_start","first_end","second_start","second_end","is_overlap","dashedLine","ctx","x1","y1","x2","y2","dashLen","undefined","q","dX","dY","dashes","Math","floor","sqrt","dashX","dashY","fillRect","drawDownwardEquilateralTriangle","down_vertex_x"," [...]
\ No newline at end of file
diff --git a/static/maps/viz/trackster/slotting.js.map b/static/maps/viz/trackster/slotting.js.map
new file mode 100644
index 0000000..75bff6a
--- /dev/null
+++ b/static/maps/viz/trackster/slotting.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"slotting.js","sources":["../../../src/viz/trackster/slotting.js"],"names":["define","_","extend","LABEL_SPACING","PACK_SPACING","SlottedInfo","slot","feature","this","FeatureSlotter","w_scale","mode","max_rows","measureText","slots","start_end_dct","include_label","prototype","_get_draw_coords","text_align","draw_start","Math","floor","draw_end","ceil","f_name","undefined","text_len","width","_find_slot","draw_coords","slot_num","has_overlap","k","k_len","length","s_ [...]
\ No newline at end of file
diff --git a/static/maps/viz/trackster/tracks.js.map b/static/maps/viz/trackster/tracks.js.map
new file mode 100644
index 0000000..b06ee84
--- /dev/null
+++ b/static/maps/viz/trackster/tracks.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"tracks.js","sources":["../../../src/viz/trackster/tracks.js"],"names":["define","_","visualization","viz_views","util","slotting","painters","filters_mod","data","tools_mod","config_mod","bbi","round","num","places","val","Math","pow","supportsByteRanges","url","promise","$","Deferred","ajax","type","beforeSend","xhr","setRequestHeader","success","result","status","resolve","extend","html_elt_js_obj_dict","is_container","element","obj","attr","moveable","handle_class [...]
\ No newline at end of file
diff --git a/static/maps/viz/trackster/util.js.map b/static/maps/viz/trackster/util.js.map
new file mode 100644
index 0000000..3595148
--- /dev/null
+++ b/static/maps/viz/trackster/util.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"util.js","sources":["../../../src/viz/trackster/util.js"],"names":["define","commatize","number","rgx","test","replace","is_deferred","d","ServerStateDeferred","Backbone","Model","extend","defaults","ajax_settings","interval","success_fn","go","deferred","$","Deferred","self","this","get","_go","ajax","success","result","resolve","setTimeout","get_random_color","colors","i","length","parseInt","slice","new_color","nr","ng","nb","other_color","or","og","ob","n_brightn [...]
\ No newline at end of file
diff --git a/static/maps/viz/visualization.js.map b/static/maps/viz/visualization.js.map
new file mode 100644
index 0000000..c117318
--- /dev/null
+++ b/static/maps/viz/visualization.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"visualization.js","sources":["../../src/viz/visualization.js"],"names":["define","_","data_mod","util_mod","config_mod","CustomToJSON","toJSON","self","this","json","each","constructor","to_json_keys","k","val","get","to_json_mappers","select_datasets","dataset_url","add_track_async_url","filters","success_fn","$","ajax","url","data","error","alert","success","table_html","Galaxy","modal","show","title","body","buttons","Cancel","hide","Add","requests","data_type","h [...]
\ No newline at end of file
diff --git a/static/maps/viz/viz_views.js.map b/static/maps/viz/viz_views.js.map
new file mode 100644
index 0000000..9461d09
--- /dev/null
+++ b/static/maps/viz/viz_views.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"viz_views.js","sources":["../../src/viz/viz_views.js"],"names":["define","_","TrackHeaderView","Backbone","View","extend","className","initialize","this","model","config","get","on","update_name","render","$el","append","$","addClass","drag_handle_class","text","get_value","action_icons","render_action_icons","dblclick","e","stopPropagation","find","self","icons_div","hide","appendTo","each","action_icons_def","icon_dict","add_action_icon","name","title","css_class", [...]
\ No newline at end of file
diff --git a/static/patmat/findcluster.png b/static/patmat/findcluster.png
new file mode 100644
index 0000000..f6b9a49
Binary files /dev/null and b/static/patmat/findcluster.png differ
diff --git a/static/robots.txt b/static/robots.txt
new file mode 100644
index 0000000..fc145db
--- /dev/null
+++ b/static/robots.txt
@@ -0,0 +1,3 @@
+User-agent: *
+Disallow: /display?
+Disallow: /display_as?
\ No newline at end of file
diff --git a/static/scripts/bundled/analysis.bundled.js b/static/scripts/bundled/analysis.bundled.js
new file mode 100644
index 0000000..5760cbb
--- /dev/null
+++ b/static/scripts/bundled/analysis.bundled.js
@@ -0,0 +1,13 @@
+webpackJsonp([3,1],[function(e,t,i){(function(e,t){var n=i(1),s=n,o=i(58).GalaxyApp,a=i(57),r=i(10),l=i(91),c=i(90),d=i(60),h=i(18),u=i(46);window.app=function(i,n){window.Galaxy=new o(i,n),Galaxy.debug("analysis app");var p=i.config,g=new l({el:"#left",userIsAnonymous:Galaxy.user.isAnonymous(),toolbox:p.toolbox,toolbox_in_panel:p.toolbox_in_panel,stored_workflow_menu_entries:p.stored_workflow_menu_entries,nginx_upload_path:p.nginx_upload_path,ftp_upload_site:p.ftp_upload_site,default_ge [...]
+}return!0},_templateSuccess:function(e){if(e.jobs&&e.jobs.length>0){var t=e.jobs.length,i=1==t?"1 job has":t+" jobs have",n=a("<div/>").addClass("donemessagelarge").append(a("<p/>").text(i+" been successfully added to the queue - resulting in the following datasets:"));return r.each(e.outputs,function(e){n.append(a("<p/>").addClass("messagerow").append(a("<b/>").text(e.hid+": "+e.name)))}),n.append(a("<p/>").append("<b/>").text("You can check the status of queued jobs and view the result [...]
+this}"undefined"!=typeof e&&e.exports&&(e.exports=i)},function(e,t,i){var n,s;(function(o){n=[i(6),i(17),i(5)],s=function(e,t,i){var n=o.View.extend({tagName:"div",className:"citations",render:function(){return this.$el.append("<p>"+this.formattedReference()+"</p>"),this},formattedReference:function(){var e=this.model,t=e.entryType(),i=e.fields(),n="",s=this._asSentence((i.author?i.author:"")+(i.year?" ("+i.year+")":""))+" ",o=i.title||"",a=i.pages?"pp. "+i.pages:"",r=i.address;if("artic [...]
+this.log(this+".initialize:",e),d.prototype.initialize.call(this,e),this.linkTarget=e.linkTarget||"_blank"},_setUpListeners:function(){d.prototype._setUpListeners.call(this);var e=this;return e.listenTo(e.model,{change:function(t,i){e.model.changedAttributes().state&&e.model.inReadyState()&&e.expanded&&!e.model.hasDetails()?e.model.fetch({silent:!0}).done(function(){e.render()}):e.render()}})},_fetchModelDetails:function(){var e=this;return e.model.inReadyState()&&!e.model.hasDetails()?e [...]
+},ajaxQueue:function(e,t,i){return i=i||this.models,new l.AjaxQueue(i.slice().reverse().map(function(i,n){var s=o.isString(e)?i[e]:e;return function(){return s.apply(i,t)}})).deferred},progressivelyFetchDetails:function(e){function i(t){t=t||0;var a=o.extend(o.clone(e),{view:"summary",keys:c,limit:r,offset:t,reset:0===t,remove:!1});o.defer(function(){s.fetch.call(s,a).fail(n.reject).done(function(e){n.notify(e,r,t),e.length!==r?(s.allFetched=!0,n.resolve(e,r,t)):i(t+r)})})}e=e||{};var n= [...]
+multiple:!1,batch:n.LINKED}],data_multiple:[{src:"hda",icon:"fa-files-o",tooltip:"Multiple datasets",multiple:!0,batch:n.DISABLED},{src:"hdca",icon:"fa-folder-o",tooltip:"Dataset collection",multiple:!1,batch:n.DISABLED}],data_collection:[{src:"hdca",icon:"fa-folder-o",tooltip:"Dataset collection",multiple:!1,batch:n.DISABLED}],workflow_data:[{src:"hda",icon:"fa-file-o",tooltip:"Single dataset",multiple:!1,batch:n.DISABLED}],workflow_data_multiple:[{src:"hda",icon:"fa-files-o",tooltip:"M [...]
+this.on("change:state",function(e,t){this.log(this+" has changed state:",e,t),this.inReadyState()&&this.trigger("state:ready",e,t,this.previous("state"))}),this.on("change:id change:file_ext",function(e){this._generateUrls()})},toJSON:function(){var e=o.Model.prototype.toJSON.call(this);return a.extend(e,{urls:this.urls})},isDeletedOrPurged:function(){return this.get("deleted")||this.get("purged")},inReadyState:function(){var t=a.contains(e.READY_STATES,this.get("state"));return this.isD [...]
+return['<li><a class="popupmenu-option" href="',t,'"',i,">",n,e.html,"</a></li>"].join("")}).join(""):"<li>(no options)</li>"},_getShownPosition:function(e){var t=this.$el.width(),i=e.pageX-t/2;return i=Math.min(i,o(document).scrollLeft()+o(window).width()-t-5),i=Math.max(i,o(document).scrollLeft()+5),{top:e.pageY,left:i}},_setUpCloseBehavior:function(){function e(e){if(o(document).off("click.close_popup"),window&&window.parent!==window)try{o(window.parent.document).off("click.close_popu [...]
+},_renderUnpairedEmpty:function(){var e=l('<div class="empty-message"></div>').text("("+s("no remaining unpaired datasets")+")");return this.$(".unpaired-columns .paired-column .column-datasets").empty().prepend(e),e},_renderUnpairedNotShown:function(){var e=l('<div class="empty-message"></div>').text("("+s("no datasets were found matching the current filters")+")");return this.$(".unpaired-columns .paired-column .column-datasets").empty().prepend(e),e},_adjUnpairedOnScrollbar:function() [...]
+l.prototype.initialize.call(this,e),this.panelStack=[],this.currentContentId=e.currentContentId||null},_setUpListeners:function(){l.prototype._setUpListeners.call(this);var e=this;this.on("new-model",function(){e.preferences.set("scrollPosition",0)})},loadCurrentHistory:function(){return this.loadHistory(null,{url:Galaxy.root+"history/current_history_json"})},switchToHistory:function(e,t){return Galaxy.user.isAnonymous()?(this.trigger("error",n("You must be logged in to switch histories" [...]
+this.$content.empty()},_change:function(e){this.$("#tab-"+e.id)[e.get("hidden")?"hide":"show"]()},_template:function(){return a("<div/>").addClass("ui-tabs tabbable tabs-left").append(a("<ul/>").addClass("tab-navigation nav nav-tabs")).append(a("<div/>").addClass("tab-content"))},_template_tab:function(e){var t=a("<li/>").addClass("tab-element").attr("id","tab-"+e.id).append(a("<a/>").attr("id","tab-title-link-"+e.id)),i=t.find("a");return e.icon&&i.append(a("<i/>").addClass("tab-icon fa [...]
+(function(i){n=[],s=function(){function e(e){var t=this;return t.deferred=i.Deferred(),t.queue=[],t.responses=[],t.numToProcess=0,t.running=!1,t.init(e||[]),t.start(),t}function t(t){var i=this;return i.names={},e.call(this,t),i}return e.prototype.init=function(e){var t=this;e.forEach(function(e){t.add(e)})},e.prototype.add=function(e){var t=this,i=this.queue.length;return this.numToProcess+=1,this.queue.push(function(){var n=i,s=e();s.done(function(e){t.deferred.notify({curr:n,total:t.n [...]
+//# sourceMappingURL=analysis.bundled.js.map
\ No newline at end of file
diff --git a/static/scripts/bundled/analysis.bundled.js.map b/static/scripts/bundled/analysis.bundled.js.map
new file mode 100644
index 0000000..f7b9003
--- /dev/null
+++ b/static/scripts/bundled/analysis.bundled.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///analysis.bundled.js","webpack:///./galaxy/scripts/apps/analysis.js","webpack:///./galaxy/scripts/mvc/ui/ui-misc.js?fd98","webpack:///./galaxy/scripts/mvc/ui/ui-portlet.js?0568","webpack:///./galaxy/scripts/mvc/dataset/data.js?f841*","webpack:///./galaxy/scripts/mvc/dataset/states.js","webpack:///./galaxy/scripts/mvc/ui/ui-buttons.js?4ced","webpack:///./galaxy/scripts/mvc/ui/icon-button.js?40a6*","webpack:///./galaxy/scripts/ui/editable-text.js?53d0","w [...]
\ No newline at end of file
diff --git a/static/scripts/bundled/libs.bundled.js b/static/scripts/bundled/libs.bundled.js
new file mode 100644
index 0000000..b46ba3d
--- /dev/null
+++ b/static/scripts/bundled/libs.bundled.js
@@ -0,0 +1,44 @@
+!function(t){function e(i){if(n[i])return n[i].exports;var o=n[i]={exports:{},id:i,loaded:!1};return t[i].call(o.exports,o,o.exports,e),o.loaded=!0,o.exports}var i=window.webpackJsonp;window.webpackJsonp=function(r,s){for(var a,l,h=0,c=[];h<r.length;h++)l=r[h],o[l]&&c.push.apply(c,o[l]),o[l]=0;for(a in s)t[a]=s[a];for(i&&i(r,s);c.length;)c.shift().call(null,e);return s[0]?(n[0]=0,e(0)):void 0};var n={},o={2:0};return e.e=function(t,i){if(0===o[t])return i.call(null,e);if(void 0!==o[t])o[ [...]
+	 * jQuery JavaScript Library v1.12.4
+	 * http://jquery.com/
+	 *
+	 * Includes Sizzle.js
+	 * http://sizzlejs.com/
+	 *
+	 * Copyright jQuery Foundation and other contributors
+	 * Released under the MIT license
+	 * http://jquery.org/license
+	 *
+	 * Date: 2016-05-20T17:17Z
+	 */
+!function(e,i){"object"==typeof t&&"object"==typeof t.exports?t.exports=e.document?i(e,!0):function(t){if(!t.document)throw new Error("jQuery requires a window with a document");return i(t)}:i(e)}("undefined"!=typeof window?window:this,function(i,r){function s(t){var e=!!t&&"length"in t&&t.length,i=vt.type(t);return"function"===i||vt.isWindow(t)?!1:"array"===i||0===e||"number"==typeof e&&e>0&&e-1 in t}function a(t,e,i){if(vt.isFunction(e))return vt.grep(t,function(t,n){return!!e.call(t,n [...]
+	 * Sizzle CSS Selector Engine v2.2.1
+	 * http://sizzlejs.com/
+	 *
+	 * Copyright jQuery Foundation and other contributors
+	 * Released under the MIT license
+	 * http://jquery.org/license
+	 *
+	 * Date: 2015-10-17
+	 */
+function(t){function e(t,e,i,n){var o,r,s,a,l,h,u,p,f=e&&e.ownerDocument,g=e?e.nodeType:9;if(i=i||[],"string"!=typeof t||!t||1!==g&&9!==g&&11!==g)return i;if(!n&&((e?e.ownerDocument||e:F)!==D&&M(e),e=e||D,R)){if(11!==g&&(h=vt.exec(t)))if(o=h[1]){if(9===g){if(!(s=e.getElementById(o)))return i;if(s.id===o)return i.push(s),i}else if(f&&(s=f.getElementById(o))&&I(e,s)&&s.id===o)return i.push(s),i}else{if(h[2])return K.apply(i,e.getElementsByTagName(t)),i;if((o=h[3])&&_.getElementsByClassName [...]
+var Kt=/^(?:input|select|textarea)$/i,Zt=/^key/,te=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,ee=/^(?:focusinfocus|focusoutblur)$/,ie=/^([^.]*)(?:\.(.+)|)/;vt.event={global:{},add:function(t,e,i,n,o){var r,s,a,l,h,c,u,d,p,f,g,m=vt._data(t);if(m){for(i.handler&&(l=i,i=l.handler,o=l.selector),i.guid||(i.guid=vt.guid++),(s=m.events)||(s=m.events={}),(c=m.handle)||(c=m.handle=function(t){return"undefined"==typeof vt||t&&vt.event.triggered===t.type?void 0:vt.event.dispatch.apply(c.elem, [...]
+var e,n,o,r;if("string"===i)for(n=0,o=vt(this),r=t.match(Pt)||[];e=r[n++];)o.hasClass(e)?o.removeClass(e):o.addClass(e);else void 0!==t&&"boolean"!==i||(e=X(this),e&&vt._data(this,"__className__",e),vt.attr(this,"class",e||t===!1?"":vt._data(this,"__className__")||""))})},hasClass:function(t){var e,i,n=0;for(e=" "+t+" ";i=this[n++];)if(1===i.nodeType&&(" "+X(i)+" ").replace(ze," ").indexOf(e)>-1)return!0;return!1}}),vt.each("blur focus focusin focusout load resize scroll unload click dbl [...]
+};case 4:return function(t,o,r){return i[e](this[n],l(t,this),o,r)};default:return function(){var t=r.call(arguments);return t.unshift(this[n]),i[e].apply(i,t)}}},a=function(t,e,n){i.each(e,function(e,o){i[o]&&(t.prototype[o]=s(e,o,n))})},l=function(t,e){return i.isFunction(t)?t:i.isObject(t)&&!e._isModel(t)?h(t):i.isString(t)?function(e){return e.get(t)}:t},h=function(t){var e=i.matches(t);return function(t){return e(t.attributes)}},c=e.Events={},u=/\s+/,d=function(t,e,n,o,r){var s,a=0; [...]
+this.$el.fadeIn("fast"),this.options.closing_events)){var e=this;s(document).on("keyup.ui-modal",function(t){27==t.keyCode&&e.hide(!0)}),this.$backdrop.on("click",function(){e.hide(!0)})}},hide:function(t){this.visible=!1,this.$el.fadeOut("fast"),this.options.closing_callback&&this.options.closing_callback(t),s(document).off("keyup.ui-modal"),this.$backdrop.off("click")},render:function(){var t=this;if(this.$el.html(this._template()),this.$header=this.$(".modal-header"),this.$dialog=this [...]
+this._callOnPromiseDone(e,this.showStep,0)),this},i.prototype.next=function(){var t;return t=this.hideStep(this._current),this._callOnPromiseDone(t,this._showNextStep)},i.prototype.prev=function(){var t;return t=this.hideStep(this._current),this._callOnPromiseDone(t,this._showPrevStep)},i.prototype.goTo=function(t){var e;return e=this.hideStep(this._current),this._callOnPromiseDone(e,this.showStep,t)},i.prototype.end=function(){var i,o;return i=function(i){return function(o){return t(n). [...]
+	* http://jqueryui.com
+	* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.position.js, jquery.ui.autocomplete.js, jquery.ui.button.js, jquery.ui.menu.js, jquery.ui.slider.js
+	* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
+!function(t,e){function i(e,i){var o,r,s,a=e.nodeName.toLowerCase();return"area"===a?(o=e.parentNode,r=o.name,e.href&&r&&"map"===o.nodeName.toLowerCase()?(s=t("img[usemap=#"+r+"]")[0],!!s&&n(s)):!1):(/input|select|textarea|button|object/.test(a)?!e.disabled:"a"===a?e.href||i:i)&&n(e)}function n(e){return t.expr.filters.visible(e)&&!t(e).parents().andSelf().filter(function(){return"hidden"===t.css(this,"visibility")}).length}var o=0,r=/^ui-id-\d+$/;t.ui=t.ui||{},t.ui.version||(t.extend(t. [...]
+},refresh:function(){var e=this.element.is(":disabled")||this.element.hasClass("ui-button-disabled");e!==this.options.disabled&&this._setOption("disabled",e),"radio"===this.type?c(this.element[0]).each(function(){t(this).is(":checked")?t(this).button("widget").addClass("ui-state-active").attr("aria-pressed","true"):t(this).button("widget").removeClass("ui-state-active").attr("aria-pressed","false")}):"checkbox"===this.type&&(this.element.is(":checked")?this.buttonElement.addClass("ui-sta [...]
+t(e.target)[n](i).each(b,arguments)})}else e.success&&y.push(e.success);if(e.success=function(t,i,n){for(var o=e.context||this,r=0,s=y.length;s>r;r++)y[r].apply(o,[t,i,n||u,u])},e.error){var w=e.error;e.error=function(t,i,n){var o=e.context||this;w.apply(o,[t,i,n,u])}}if(e.complete){var _=e.complete;e.complete=function(t,i){var n=e.context||this;_.apply(n,[t,i,u])}}var x=t("input[type=file]:enabled",this).filter(function(){return""!==t(this).val()}),C=x.length>0,S="multipart/form-data",k [...]
+i!==e&&(this.propertyObserver&&(delete this.propertyObserver,this.propertyObserver=null),this.propertyObserver=new i(function(e){t.each(e,o._sync)}),this.propertyObserver.observe(n.get(0),{attributes:!0,subtree:!1}))},triggerSelect:function(e){var i=t.Event("select2-selecting",{val:this.id(e),object:e,choice:e});return this.opts.element.trigger(i),!i.isDefaultPrevented()},triggerChange:function(e){e=e||{},e=t.extend({},e,{type:"change",val:this.val()}),this.opts.element.data("select2-cha [...]
+if(i=e.data("select2-data")){var s=t.Event("select2-removing");if(s.val=this.id(i),s.choice=i,this.opts.element.trigger(s),s.isDefaultPrevented())return!1;for(;(n=o(this.id(i),r))>=0;)r.splice(n,1),this.setVal(r),this.select&&this.postprocessResults();return e.remove(),this.opts.element.trigger({type:"select2-removed",val:this.id(i),choice:i}),this.triggerChange({removed:i}),!0}},postprocessResults:function(t,e,i){var n=this.getVal(),r=this.results.find(".select2-result"),s=this.results. [...]
+top:0,left:0,display:"block"}).addClass(n),this.options.container?i.appendTo(this.options.container):i.insertAfter(this.$element);var s=this.getPosition(),a=i[0].offsetWidth,l=i[0].offsetHeight;if(r){var h=this.$element.parent(),c=n,u=document.documentElement.scrollTop||document.body.scrollTop,d="body"==this.options.container?window.innerWidth:h.outerWidth(),p="body"==this.options.container?window.innerHeight:h.outerHeight(),f="body"==this.options.container?0:h.offset().left;n="bottom"== [...]
+(d||t.isArray(P))&&(A=!0,d&&(P=i[v]=c(P.split(":"))),P=u(P,null),p=a.min.apply(a,P),f=a.max.apply(a,P),q>p&&(q=p),f>W&&(W=f));this.stacked=A,this.regionShapes={},this.barWidth=L,this.barSpacing=H,this.totalBarWidth=L+H,this.width=r=i.length*L+(i.length-1)*H,this.initTarget(),F&&(T=I===l?-(1/0):I,E=$===l?1/0:$),m=[],g=A?[]:m;var z=[],B=[];for(v=0,b=i.length;b>v;v++)if(A)for(N=i[v],i[v]=D=[],z[v]=0,g[v]=B[v]=0,O=0,M=N.length;M>O;O++)P=D[O]=F?o(N[O],T,E):N[O],null!==P&&(P>0&&(z[v]+=P),0>q&& [...]
+	 * jQuery Migrate - v1.4.0 - 2016-02-26
+	 * Copyright jQuery Foundation and other contributors
+	 */
+!function(t,e,i){function n(i){var n=e.console;r[i]||(r[i]=!0,t.migrateWarnings.push(i),n&&n.warn&&!t.migrateMute&&(n.warn("JQMIGRATE: "+i),t.migrateTrace&&n.trace&&n.trace()))}function o(e,i,o,r){if(Object.defineProperty)try{return void Object.defineProperty(e,i,{configurable:!0,enumerable:!0,get:function(){return n(r),o},set:function(t){n(r),o=t}})}catch(s){}t._definePropertyBroken=!0,e[i]=o}t.migrateVersion="1.4.0";var r={};t.migrateWarnings=[],e.console&&e.console.log&&e.console.log( [...]
+//# sourceMappingURL=libs.bundled.js.map
\ No newline at end of file
diff --git a/static/scripts/bundled/libs.bundled.js.map b/static/scripts/bundled/libs.bundled.js.map
new file mode 100644
index 0000000..7b20ec6
--- /dev/null
+++ b/static/scripts/bundled/libs.bundled.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///libs.bundled.js","webpack:///webpack/bootstrap eebee742ba75704a3d11","webpack:///./galaxy/scripts/libs/jquery/jquery.js","webpack:///./galaxy/scripts/libs/underscore.js","webpack:///./galaxy/scripts/libs/backbone.js","webpack:///./galaxy/scripts/utils/utils.js","webpack:///./galaxy/scripts/utils/localization.js","webpack:///./galaxy/scripts/mvc/base-mvc.js","webpack:///./galaxy/scripts/mvc/ui/ui-modal.js","webpack:///./galaxy/scripts/layout/panel.js"," [...]
\ No newline at end of file
diff --git a/static/scripts/bundled/login.bundled.js b/static/scripts/bundled/login.bundled.js
new file mode 100644
index 0000000..873da93
--- /dev/null
+++ b/static/scripts/bundled/login.bundled.js
@@ -0,0 +1,2 @@
+webpackJsonp([4],[function(e,r,n){(function(e){var r=n(1),a=r,i=n(58).GalaxyApp,o=n(10),t=n(5),l=n(60);window.app=function(n,c){window.Galaxy=new i(n,c),Galaxy.debug("login app");var d=encodeURI(n.redirect);if(!n.show_welcome_with_login){var w=r.param({use_panels:"True",redirect:d});return void(window.location.href=Galaxy.root+"user/login?"+w)}var p=new l.PageLayoutView(e.extend(n,{el:"body",center:new o.CenterPanel({el:"#center"}),right:new o.RightPanel({title:t("Login required"),el:"#r [...]
+//# sourceMappingURL=login.bundled.js.map
\ No newline at end of file
diff --git a/static/scripts/bundled/login.bundled.js.map b/static/scripts/bundled/login.bundled.js.map
new file mode 100644
index 0000000..0405eb1
--- /dev/null
+++ b/static/scripts/bundled/login.bundled.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["webpack:///login.bundled.js","webpack:///./galaxy/scripts/apps/login.js"],"names":["webpackJsonp","module","exports","__webpack_require__","_","jQuery","$","GalaxyApp","PANEL","_l","PAGE","window","app","options","bootstrapped","Galaxy","debug","redirect","encodeURI","show_welcome_with_login","params","param","use_panels","location","href","root","loginPage","PageLayoutView","extend","el","center","CenterPanel","right","RightPanel","title","loginUrl","render","pr [...]
\ No newline at end of file
diff --git a/static/scripts/galaxy.interactive_environments.js b/static/scripts/galaxy.interactive_environments.js
new file mode 100644
index 0000000..c859c44
--- /dev/null
+++ b/static/scripts/galaxy.interactive_environments.js
@@ -0,0 +1,2 @@
+function append_notebook(a){clear_main_area(),$("#main").append('<iframe frameBorder="0" seamless="seamless" style="width: 100%; height: 100%; overflow:hidden;" scrolling="no" src="'+a+'"></iframe>')}function clear_main_area(){$("#spinner").remove(),$("#main").children().remove()}function display_spinner(){$("#main").append('<img id="spinner" src="'+galaxy_root+'static/style/largespinner.gif" style="position:absolute;margin:auto;top:0;left:0;right:0;bottom:0;">')}function test_ie_availab [...]
+//# sourceMappingURL=../maps/galaxy.interactive_environments.js.map
\ No newline at end of file
diff --git a/static/scripts/galaxy.js b/static/scripts/galaxy.js
new file mode 100644
index 0000000..5abda48
--- /dev/null
+++ b/static/scripts/galaxy.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","libs/backbone","mvc/base-mvc","mvc/user/user-model","utils/metrics-logger","utils/add-logging","utils/localization"],function(a,b,c,d,e,f,g){function h(a,b){var c=this;return c._init(a||{},b||{})}f(h,"GalaxyApp");var i="galaxy:debug",j=i+":namespaces",k=!1;try{k="true"==localStorage.getItem(i)}catch(l){console.log(g("localStorage not available for debug flag retrieval"))}return h.prototype._init=function(c,d){var e=this;return a.extend(e,b.Events),k&&(e.logger= [...]
+//# sourceMappingURL=../maps/galaxy.js.map
\ No newline at end of file
diff --git a/static/scripts/galaxy.library.js b/static/scripts/galaxy.library.js
new file mode 100644
index 0000000..6319799
--- /dev/null
+++ b/static/scripts/galaxy.library.js
@@ -0,0 +1,2 @@
+define(["layout/masthead","utils/utils","libs/toastr","mvc/base-mvc","mvc/library/library-model","mvc/library/library-folderlist-view","mvc/library/library-librarylist-view","mvc/library/library-librarytoolbar-view","mvc/library/library-foldertoolbar-view","mvc/library/library-dataset-view","mvc/library/library-library-view","mvc/library/library-folder-view"],function(a,b,c,d,e,f,g,h,i,j,k,l){var m=Backbone.Router.extend({initialize:function(){this.routesHit=0,Backbone.history.on("route" [...]
+//# sourceMappingURL=../maps/galaxy.library.js.map
\ No newline at end of file
diff --git a/static/scripts/galaxy.menu.js b/static/scripts/galaxy.menu.js
new file mode 100644
index 0000000..d09950b
--- /dev/null
+++ b/static/scripts/galaxy.menu.js
@@ -0,0 +1,2 @@
+define(["galaxy.masthead"],function(a){var b=Backbone.Model.extend({initialize:function(a){this.options=a.config,this.masthead=a.masthead,this.create()},create:function(){var b=new a.GalaxyMastheadTab({id:"analysis",title:"Analyze Data",content:"",title_attribute:"Analysis home view"});this.masthead.append(b);var c={id:"workflow",title:"Workflow",content:"workflow",title_attribute:"Chain tools into workflows"};Galaxy.user.id||(c.disabled=!0);var d=new a.GalaxyMastheadTab(c);this.masthead [...]
+//# sourceMappingURL=../maps/galaxy.menu.js.map
\ No newline at end of file
diff --git a/static/scripts/galaxy.pages.js b/static/scripts/galaxy.pages.js
new file mode 100644
index 0000000..86d53e1
--- /dev/null
+++ b/static/scripts/galaxy.pages.js
@@ -0,0 +1,2 @@
+function init_galaxy_elts(a){$(".annotation",a._doc.body).each(function(){$(this).click(function(){var b=a._doc.createRange();b.selectNodeContents(this);var c=window.getSelection();c.removeAllRanges(),c.addRange(b)})})}function get_item_info(a){var b,c,d;switch(a){case CONTROLS.ITEM_HISTORY:b="History",c="Histories",d="history",item_class="History";break;case CONTROLS.ITEM_DATASET:b="Dataset",c="Datasets",d="dataset",item_class="HistoryDatasetAssociation";break;case CONTROLS.ITEM_WORKFLO [...]
+//# sourceMappingURL=../maps/galaxy.pages.js.map
\ No newline at end of file
diff --git a/static/scripts/i18n.js b/static/scripts/i18n.js
new file mode 100644
index 0000000..defe0d2
--- /dev/null
+++ b/static/scripts/i18n.js
@@ -0,0 +1,2 @@
+!function(){"use strict";function a(a,b,c,d,e,f){b[a]&&(c.push(a),(b[a]===!0||1===b[a])&&d.push(e+a+"/"+f))}function b(a,b,c,d,e){var f=d+b+"/"+e;require._fileExists(a.toUrl(f+".js"))&&c.push(f)}function c(a,b,d){var e;for(e in b)!b.hasOwnProperty(e)||a.hasOwnProperty(e)&&!d?"object"==typeof b[e]&&(!a[e]&&b[e]&&(a[e]={}),c(a[e],b[e],d)):a[e]=b[e]}var d=/(^.*(^|\/)nls(\/|$))([^\/]*)\/?([^\/]*)/;define(["module"],function(e){var f=e.config?e.config():{};return{version:"2.0.4",load:function [...]
+//# sourceMappingURL=../maps/i18n.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/generic-nav-view.js b/static/scripts/layout/generic-nav-view.js
new file mode 100644
index 0000000..b137bb5
--- /dev/null
+++ b/static/scripts/layout/generic-nav-view.js
@@ -0,0 +1,2 @@
+define(["mvc/ui/ui-modal"],function(a){var b=Backbone.View.extend({initialize:function(){this.modal=null},makeModalIframe:function(){var c=window.Galaxy.config.communication_server_host,d=window.Galaxy.config.communication_server_port,e=escape(window.Galaxy.user.attributes.username),f=escape(window.Galaxy.config.persistent_communication_rooms),g="?username="+e+"&persistent_communication_rooms="+f,h=c+":"+d+g,i=null,j=null,k='<iframe class="f-iframe fade in communication-iframe" src="'+h+ [...]
+//# sourceMappingURL=../../maps/layout/generic-nav-view.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/masthead.js b/static/scripts/layout/masthead.js
new file mode 100644
index 0000000..625f8a2
--- /dev/null
+++ b/static/scripts/layout/masthead.js
@@ -0,0 +1,2 @@
+define(["utils/utils","layout/menu","layout/scratchbook","mvc/user/user-quotameter"],function(a,b,c,d){var e=Backbone.View.extend({initialize:function(a){var e=this;this.options=a,this.setElement(this._template()),this.$navbarBrandLink=this.$(".navbar-brand-link"),this.$navbarBrandImage=this.$(".navbar-brand-image"),this.$navbarBrandTitle=this.$(".navbar-brand-title"),this.$navbarTabs=this.$(".navbar-tabs"),this.$quoteMeter=this.$(".quota-meter-container"),this.collection=new b.Collectio [...]
+//# sourceMappingURL=../../maps/layout/masthead.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/menu.js b/static/scripts/layout/menu.js
new file mode 100644
index 0000000..d4a726d
--- /dev/null
+++ b/static/scripts/layout/menu.js
@@ -0,0 +1,2 @@
+define(["mvc/tours","layout/generic-nav-view","mvc/webhooks"],function(a,b,c){var d=Backbone.Collection.extend({model:Backbone.Model.extend({defaults:{visible:!0,target:"_parent"}}),fetch:function(d){d=d||{},this.reset();var e=new b.GenericNavView;this.add(e.render()),this.add({id:"analysis",title:"Analyze Data",url:"",tooltip:"Analysis home view"}),this.add({id:"workflow",title:"Workflow",url:"workflow",tooltip:"Chain tools into workflows",disabled:!Galaxy.user.id}),this.add({id:"shared [...]
+//# sourceMappingURL=../../maps/layout/menu.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/modal.js b/static/scripts/layout/modal.js
new file mode 100644
index 0000000..75449cf
--- /dev/null
+++ b/static/scripts/layout/modal.js
@@ -0,0 +1,2 @@
+define(["jquery"],function(a){"use strict";function b(){h.hide()}function c(a,b,c,d,e){h.setContent({title:a,body:b,buttons:c,extra_buttons:d}),h.show({backdrop:!0},e)}function d(a,b,c,d,e){h.setContent({title:a,body:b,buttons:c,extra_buttons:d}),h.show({backdrop:!1},e)}function e(a){var c=a.width||"600",d=a.height||"400",e=a.scroll||"auto";f("#overlay-background").bind("click.overlay",function(){b(),f("#overlay-background").unbind("click.overlay")}),h.setContent({closeButton:!0,title:"& [...]
+//# sourceMappingURL=../../maps/layout/modal.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/page.js b/static/scripts/layout/page.js
new file mode 100644
index 0000000..6eab1f5
--- /dev/null
+++ b/static/scripts/layout/page.js
@@ -0,0 +1,2 @@
+define(["layout/masthead","layout/panel","mvc/ui/ui-modal","mvc/base-mvc"],function(a,b,c,d){var e=Backbone.View.extend(d.LoggableMixin).extend({_logNamespace:"layout",el:"body",className:"full-content",_panelIds:["left","center","right"],defaultOptions:{message_box_visible:!1,message_box_content:"",message_box_class:"info",show_inactivity_warning:!1,inactivity_box_content:""},initialize:function(b){this.log(this+".initialize:",b),_.extend(this,_.pick(b,this._panelIds)),this.options=_.de [...]
+//# sourceMappingURL=../../maps/layout/page.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/panel.js b/static/scripts/layout/panel.js
new file mode 100644
index 0000000..ab31a0b
--- /dev/null
+++ b/static/scripts/layout/panel.js
@@ -0,0 +1,2 @@
+define(["jquery","libs/underscore","libs/backbone","mvc/base-mvc"],function(a,b,c,d){"use strict";var e=a,f=160,g=800,h=c.View.extend(d.LoggableMixin).extend({_logNamespace:"layout",initialize:function(a){this.log(this+".initialize:",a),this.title=a.title||this.title||"",this.hidden=!1,this.savedSize=null,this.hiddenByTool=!1},$center:function(){return this.$el.siblings("#center")},$toggleButton:function(){return this.$(".unified-panel-footer > .panel-collapse")},render:function(){this.l [...]
+//# sourceMappingURL=../../maps/layout/panel.js.map
\ No newline at end of file
diff --git a/static/scripts/layout/scratchbook.js b/static/scripts/layout/scratchbook.js
new file mode 100644
index 0000000..4db47e1
--- /dev/null
+++ b/static/scripts/layout/scratchbook.js
@@ -0,0 +1,2 @@
+define(["mvc/ui/ui-frames"],function(a){return Backbone.View.extend({initialize:function(b){var c=this;b=b||{},this.frames=new a.View({visible:!1}),this.setElement(this.frames.$el),this.buttonActive=b.collection.add({id:"enable-scratchbook",icon:"fa-th",tooltip:"Enable/Disable Scratchbook",onclick:function(){c.active=!c.active,c.buttonActive.set({toggle:c.active,show_note:c.active,note_cls:c.active&&"fa fa-check"}),!c.active&&c.frames.hide()},onbeforeunload:function(){return c.frames.len [...]
+//# sourceMappingURL=../../maps/layout/scratchbook.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/backbone.js b/static/scripts/libs/backbone.js
new file mode 100644
index 0000000..600e90a
--- /dev/null
+++ b/static/scripts/libs/backbone.js
@@ -0,0 +1,2 @@
+!function(a){var b="object"==typeof self&&self.self===self&&self||"object"==typeof global&&global.global===global&&global;if("function"==typeof define&&define.amd)define(["underscore","jquery","exports"],function(c,d,e){b.Backbone=a(b,e,c,d)});else if("undefined"!=typeof exports){var c,d=require("underscore");try{c=require("jquery")}catch(e){}a(b,exports,d,c)}else b.Backbone=a(b,{},b._,b.jQuery||b.Zepto||b.ender||b.$)}(function(a,b,c,d){var e=a.Backbone,f=Array.prototype.slice;b.VERSION= [...]
+//# sourceMappingURL=../../maps/libs/backbone.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bbi/bigwig.js b/static/scripts/libs/bbi/bigwig.js
new file mode 100644
index 0000000..f1bd397
--- /dev/null
+++ b/static/scripts/libs/bbi/bigwig.js
@@ -0,0 +1,2 @@
+define(["libs/bbi/spans","libs/bbi/jszlib","libs/bbi/jquery-ajax-native"],function(a,b){"use strict";function c(){}function d(a){a&&(this.id=a)}function e(a,b){return a[b+3]<<24|a[b+2]<<16|a[b+1]<<8|a[b]}function f(a,b,c){Math.pow(10,6);return $.ajax({type:"GET",dataType:"native",url:a,timeout:5e3,beforeSend:function(a){a.setRequestHeader("Range","bytes="+b+"-"+(b+(c-1)))},xhrFields:{responseType:"arraybuffer"}})}function g(a,b){var c=a[b]+a[b+1]*x+a[b+2]*y+a[b+3]*z+a[b+4]*A;return c}fun [...]
+//# sourceMappingURL=../../../maps/libs/bbi/bigwig.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bbi/jquery-ajax-native.js b/static/scripts/libs/bbi/jquery-ajax-native.js
new file mode 100644
index 0000000..18f54b8
--- /dev/null
+++ b/static/scripts/libs/bbi/jquery-ajax-native.js
@@ -0,0 +1,2 @@
+!function(a,b){"function"==typeof define&&define.amd?define(["jquery"],b):"object"==typeof exports?module.exports=b(require("jquery")):b(a.jQuery)}(this,function(a){var b=a.ajaxSettings;b.responseFields.native="responseNative",b.converters["* native"]=!0;var c={},d=0,e={0:200,1223:204},f={},g=jQuery.ajaxSettings.xhr();window.ActiveXObject&&a(window).on("unload",function(){for(var a in f)f[a]()}),c.cors=!!g&&"withCredentials"in g,c.ajax=g=!!g,a.ajaxTransport("native",function(a){var b;ret [...]
+//# sourceMappingURL=../../../maps/libs/bbi/jquery-ajax-native.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bbi/jszlib.js b/static/scripts/libs/bbi/jszlib.js
new file mode 100644
index 0000000..c208fb5
--- /dev/null
+++ b/static/scripts/libs/bbi/jszlib.js
@@ -0,0 +1,2 @@
+define([],function(){function a(){}function b(){this.was=[0]}function c(a,b,c){this.hufts=new Int32Array(3*m),this.window=new Uint8Array(c),this.end=c,this.checkfn=b,this.mode=O,this.reset(a,null),this.left=0,this.table=0,this.index=0,this.blens=null,this.bb=new Int32Array(1),this.tb=new Int32Array(1),this.codes=new d,this.last=0,this.bitk=0,this.bitb=0,this.read=0,this.write=0,this.check=0,this.inftree=new e}function d(){}function e(){}function f(a,b,c,d){return a[0]=Y,b[0]=Z,c[0]=$,d[0 [...]
+//# sourceMappingURL=../../../maps/libs/bbi/jszlib.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bbi/spans.js b/static/scripts/libs/bbi/spans.js
new file mode 100644
index 0000000..0fcb902
--- /dev/null
+++ b/static/scripts/libs/bbi/spans.js
@@ -0,0 +1,2 @@
+define([],function(){"use strict";function a(a,b){if("number"!=typeof a||"number"!=typeof b)throw"Bad range "+a+","+b;this._min=a,this._max=b}function b(a){var b=a.sort(g),c=[],d=b.shift();b.forEach(function(a){a._min<=d._max?a._max>d._max&&(d._max=a._max):(c.push(d),d=a)}),c.push(d),this._ranges=c}function c(a,c){return a instanceof b||(a instanceof Array||(a=[a]),a=new b(a)),c&&a.insertRange(c),a}function d(c,d){for(var e=c.ranges(),f=d.ranges(),g=e.length,h=f.length,i=0,j=0,k=[];g>i&& [...]
+//# sourceMappingURL=../../../maps/libs/bbi/spans.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bibtex.js b/static/scripts/libs/bibtex.js
new file mode 100644
index 0000000..4552187
--- /dev/null
+++ b/static/scripts/libs/bibtex.js
@@ -0,0 +1,3 @@
+function BibtexParser(a){function b(a){d.push(a)}if("string"==typeof a){var c={},d=[],e=BibtexParser.call(c,b);return e.parse(a),{entries:d,errors:e.getErrors()}}if("function"!=typeof a)throw"Invalid parser construction.";return this.STATES_={ENTRY_OR_JUNK:0,OBJECT_TYPE:1,ENTRY_KEY:2,KV_KEY:3,EQUALS:4,KV_VALUE:5},this.DATA_={},this.CALLBACK_=a,this.CHAR_=0,this.LINE_=1,this.CHAR_IN_LINE_=0,this.SKIPWS_=!0,this.SKIPCOMMENT_=!0,this.PARSETMP_={},this.SKIPTILLEOL_=!1,this.VALBRACES_=null,th [...]
+this}"undefined"!=typeof module&&module.exports&&(module.exports=BibtexParser);
+//# sourceMappingURL=../../maps/libs/bibtex.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bootstrap-tour.js b/static/scripts/libs/bootstrap-tour.js
new file mode 100644
index 0000000..1720103
--- /dev/null
+++ b/static/scripts/libs/bootstrap-tour.js
@@ -0,0 +1,2 @@
+!function(a,b){var c,d;return d=b.document,c=function(){function c(c){var d;try{d=b.localStorage}catch(e){d=!1}this._options=a.extend({name:"tour",steps:[],container:"body",autoscroll:!0,keyboard:!0,storage:d,debug:!1,backdrop:!1,backdropContainer:"body",backdropPadding:0,redirect:!0,orphan:!1,duration:!1,delay:!1,basePath:"",template:'<div class="popover" role="tooltip"> <div class="arrow"></div> <h3 class="popover-title"></h3> <div class="popover-content"></div> <div class="popover-nav [...]
+//# sourceMappingURL=../../maps/libs/bootstrap-tour.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/bootstrap.js b/static/scripts/libs/bootstrap.js
new file mode 100644
index 0000000..c41494d
--- /dev/null
+++ b/static/scripts/libs/bootstrap.js
@@ -0,0 +1,2 @@
++function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]}}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a [...]
+//# sourceMappingURL=../../maps/libs/bootstrap.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/common-libs.js b/static/scripts/libs/common-libs.js
new file mode 100644
index 0000000..ea2b5f3
--- /dev/null
+++ b/static/scripts/libs/common-libs.js
@@ -0,0 +1,2 @@
+var jquery=require("jquery");window.jQuery=window.$=jquery,require("./jquery/jquery.migrate"),require("./jquery/select2"),require("./jquery/jquery.event.hover"),require("./jquery/jquery.form"),require("./jquery/jquery.rating"),require("./jquery.sparklines"),require("./bootstrap");var _=require("./underscore");window._=_;var Backbone=require("./backbone");window.Backbone=Backbone;var Handlebars=require("./handlebars.runtime");window.Handlebars=Handlebars,require("../galaxy.base"),require( [...]
+//# sourceMappingURL=../../maps/libs/common-libs.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/d3.js b/static/scripts/libs/d3.js
new file mode 100755
index 0000000..ffd3cd3
--- /dev/null
+++ b/static/scripts/libs/d3.js
@@ -0,0 +1,7 @@
+!function(){function a(a){return a&&(a.ownerDocument||a.document||a).documentElement}function b(a){return a&&(a.ownerDocument&&a.ownerDocument.defaultView||a.document&&a||a.defaultView)}function c(a,b){return b>a?-1:a>b?1:a>=b?0:0/0}function d(a){return null===a?0/0:+a}function e(a){return!isNaN(a)}function f(a){return{left:function(b,c,d,e){for(arguments.length<3&&(d=0),arguments.length<4&&(e=b.length);e>d;){var f=d+e>>>1;a(b[f],c)<0?d=f+1:e=f}return d},right:function(b,c,d,e){for(argum [...]
+d(),y.point=j,y.lineEnd=k}function j(a,b){f(l=a,m=b),n=t,o=u,p=v,q=w,r=x,y.point=f}function k(){e(t,u,s,v,w,x,n,o,l,p,q,r,h,b),y.lineEnd=g,g()}var l,m,n,o,p,q,r,s,t,u,v,w,x,y={point:c,lineStart:d,lineEnd:g,polygonStart:function(){b.polygonStart(),y.lineStart=i},polygonEnd:function(){b.polygonEnd(),y.lineStart=d}};return y}function e(b,c,d,h,i,j,k,l,m,n,o,p,q,r){var s=k-b,t=l-c,u=s*s+t*t;if(u>4*f&&q--){var v=h+n,w=i+o,x=j+p,y=Math.sqrt(v*v+w*w+x*x),z=Math.asin(x/=y),A=tg(tg(x)-1)<Kg||tg(d [...]
+return n}:Aa(d),x=e===f?function(){return o}:Aa(f);++s<t;)g.call(this,m=b[s],s)?(q.push([n=+u.call(this,m,s),o=+v.call(this,m,s)]),r.push([+w.call(this,m,s),+x.call(this,m,s)])):q.length&&(i(),q=[],r=[]);return q.length&&i(),p.length?p.join(""):null}var c=Cc,d=Cc,e=0,f=Dc,g=Db,h=uf,i=h.key,j=h,k="L",l=.7;return b.x=function(a){return arguments.length?(c=d=a,b):d},b.x0=function(a){return arguments.length?(c=a,b):c},b.x1=function(a){return arguments.length?(d=a,b):d},b.y=function(a){return [...]
+shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});hg.format=th.numberFormat,hg.geo={},jb.prototype={s:0,t:0,add:function(a){kb(a,this.t,uh),kb(uh.s,this.s,this),this.s?this.t+=uh.t:this.s=uh.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var uh=new jb;hg.g [...]
+
+if(b==c.dx){for((d||k>c.dy)&&(k=c.dy);++f<g;)e=a[f],e.x=h,e.y=j,e.dy=k,h+=e.dx=Math.min(c.x+c.dx-h,k?i(e.area/k):0);e.z=!0,e.dx+=c.x+c.dx-h,c.y+=k,c.dy-=k}else{for((d||k>c.dx)&&(k=c.dx);++f<g;)e=a[f],e.x=h,e.y=j,e.dx=k,j+=e.dy=Math.min(c.y+c.dy-j,k?i(e.area/k):0);e.z=!1,e.dy+=c.y+c.dy-j,c.x+=k,c.dx-=k}}function f(d){var e=g||h(d),f=e[0];return f.x=f.y=0,f.value?(f.dx=j[0],f.dy=j[1]):f.dx=f.dy=0,g&&h.revalue(f),a([f],f.dx*f.dy/f.value),(g?c:b)(f),m&&(g=e),e}var g,h=hg.layout.hierarchy(),i [...]
+//# sourceMappingURL=../../maps/libs/d3.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/farbtastic.js b/static/scripts/libs/farbtastic.js
new file mode 100644
index 0000000..624ccff
--- /dev/null
+++ b/static/scripts/libs/farbtastic.js
@@ -0,0 +1,2 @@
+!function(a){var b=!1;a.fn.farbtastic=function(b){return a.farbtastic(this,b),this},a.farbtastic=function(b,c){var b=a(b)[0];return b.farbtastic||(b.farbtastic=new a._farbtastic(b,c))},a._farbtastic=function(c,d){var e=this;e.linkTo=function(b){return"object"==typeof e.callback&&a(e.callback).unbind("keyup",e.updateValue),e.color=null,"function"==typeof b?e.callback=b:("object"==typeof b||"string"==typeof b)&&(e.callback=a(b),e.callback.bind("keyup",e.updateValue),e.callback[0].value&&e. [...]
+//# sourceMappingURL=../../maps/libs/farbtastic.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery.complexify.js b/static/scripts/libs/jquery.complexify.js
new file mode 100644
index 0000000..48674a8
--- /dev/null
+++ b/static/scripts/libs/jquery.complexify.js
@@ -0,0 +1,2 @@
+!function(a){a.fn.extend({complexify:function(b,c){function d(a,b){for(var c=a.length-1;c>=0;c--)if(b[0]<=a.charCodeAt(c)&&a.charCodeAt(c)<=b[1])return b[1]-b[0]+1;return 0}function e(c){if("strict"===b.banMode){for(var d=0;d<b.bannedPasswords.length;d++)if(-1!==c.toLowerCase().indexOf(b.bannedPasswords[d].toLowerCase()))return!0;return!1}return a.inArray(c,b.bannedPasswords)>-1?!0:!1}function f(){var f=a(this).val(),j=0,k=!1;if(e(f))j=1;else for(var l=i.length-1;l>=0;l--)j+=d(f,i[l]);j= [...]
+//# sourceMappingURL=../../maps/libs/jquery.complexify.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery.sparklines.js b/static/scripts/libs/jquery.sparklines.js
new file mode 100644
index 0000000..7254f9a
--- /dev/null
+++ b/static/scripts/libs/jquery.sparklines.js
@@ -0,0 +1,3 @@
+!function(a,b,c){!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):jQuery&&!jQuery.fn.sparkline&&a(jQuery)}(function(d){"use strict";var e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C,D,E,F,G,H,I,J,K={},L=0;e=function(){return{common:{type:"line",lineColor:"#00f",fillColor:"#cdf",defaultPixelsPerValue:3,width:"auto",height:"auto",composite:!1,tagValuesAttribute:"values",tagOptionsPrefix:"spark",enableTagOptions:!1,enableHighlight:!0,highlightLighten:1.4,tooltipSk [...]
+for(i&&e.drawCircle(h,h,b.floor(h-i/2),g.get("borderColor"),c,i).append(),d=f.length;d--;)f[d]&&(a=this.renderSlice(d).append(),this.valueShapes[d]=a.id,this.shapes[a.id]=d);e.render()}}}),d.fn.sparkline.box=C=f(d.fn.sparkline._base,{type:"box",init:function(a,b,c,e,f){C._super.init.call(this,a,b,c,e,f),this.values=d.map(b,Number),this.width="auto"===c.get("width")?"4.0em":e,this.initTarget(),this.values.length||(this.disabled=1)},getRegion:function(){return 1},getCurrentRegionFields:fun [...]
+//# sourceMappingURL=../../maps/libs/jquery.sparklines.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jqtouch.js b/static/scripts/libs/jquery/jqtouch.js
new file mode 100644
index 0000000..7d24f64
--- /dev/null
+++ b/static/scripts/libs/jquery/jqtouch.js
@@ -0,0 +1,2 @@
+!function(){$.jQTouch=function(a){function b(a){void 0!==window.console&&F.debug===!0&&console.warn(a)}function c(a){"string"==typeof a.selector&&"string"==typeof a.name&&O.push(a)}function d(a){"string"==typeof a.name&&"function"==typeof a.isSupported&&"function"==typeof a.fn&&N.push(a)}function e(a,b){D.unshift({page:a,animation:b,hash:"#"+a.attr("id"),id:a.attr("id")})}function f(a){var c=$(a.target);c.is(I.join(", "))||(c=$(a.target).closest(I.join(", "))),c&&c.attr("href")&&!c.isExt [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jqtouch.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery-ui.js b/static/scripts/libs/jquery/jquery-ui.js
new file mode 100755
index 0000000..40c88d7
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery-ui.js
@@ -0,0 +1,4 @@
+!function(a,b){function c(b,c){var e,f,g,h=b.nodeName.toLowerCase();return"area"===h?(e=b.parentNode,f=e.name,b.href&&f&&"map"===e.nodeName.toLowerCase()?(g=a("img[usemap=#"+f+"]")[0],!!g&&d(g)):!1):(/input|select|textarea|button|object/.test(h)?!b.disabled:"a"===h?b.href||c:c)&&d(b)}function d(b){return a.expr.filters.visible(b)&&!a(b).parents().andSelf().filter(function(){return"hidden"===a.css(this,"visibility")}).length}var e=0,f=/^ui-id-\d+$/;a.ui=a.ui||{},a.ui.version||(a.extend(a. [...]
+
+},refresh:function(){var b=this.element.is(":disabled")||this.element.hasClass("ui-button-disabled");b!==this.options.disabled&&this._setOption("disabled",b),"radio"===this.type?j(this.element[0]).each(function(){a(this).is(":checked")?a(this).button("widget").addClass("ui-state-active").attr("aria-pressed","true"):a(this).button("widget").removeClass("ui-state-active").attr("aria-pressed","false")}):"checkbox"===this.type&&(this.element.is(":checked")?this.buttonElement.addClass("ui-sta [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery-ui.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.autocomplete.js b/static/scripts/libs/jquery/jquery.autocomplete.js
new file mode 100644
index 0000000..7dc5f68
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.autocomplete.js
@@ -0,0 +1,2 @@
+!function(a){"use strict";a.fn.autocomplete=function(b){var c;arguments.length>1?(c=b,b=arguments[1],b.url=c):"string"==typeof b&&(c=b,b={url:c});var d=a.extend({},a.fn.autocomplete.defaults,b);return this.each(function(){var b=a(this);b.data("autocompleter",new a.Autocompleter(b,a.meta?a.extend({},d,b.data()):d))})},a.fn.autocomplete.defaults={inputClass:"acInput",loadingClass:"acLoading",resultsClass:"acResults",selectClass:"acSelect",queryParamName:"q",extraParams:{},remoteDataType:!1 [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.autocomplete.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.cookie.js b/static/scripts/libs/jquery/jquery.cookie.js
new file mode 100644
index 0000000..00baf8b
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.cookie.js
@@ -0,0 +1,2 @@
+jQuery.cookie=function(a,b,c){if("undefined"==typeof b){var d=null;if(document.cookie&&""!=document.cookie)for(var e=document.cookie.split(";"),f=0;f<e.length;f++){var g=jQuery.trim(e[f]);if(g.substring(0,a.length+1)==a+"="){d=decodeURIComponent(g.substring(a.length+1));break}}return d}c=c||{},null===b&&(b="",c=$.extend({},c),c.expires=-1);var h="";if(c.expires&&("number"==typeof c.expires||c.expires.toUTCString)){var i;"number"==typeof c.expires?(i=new Date,i.setTime(i.getTime()+24*c.ex [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.cookie.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.dynatree.js b/static/scripts/libs/jquery/jquery.dynatree.js
new file mode 100644
index 0000000..6af152a
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.dynatree.js
@@ -0,0 +1,3 @@
+function _log(a){if(_canLog){var b=Array.prototype.slice.apply(arguments,[1]),c=new Date,d=c.getHours()+":"+c.getMinutes()+":"+c.getSeconds()+"."+c.getMilliseconds();b[0]=d+" - "+b[0];try{switch(a){case"info":window.console.info.apply(window.console,b);break;case"warn":window.console.warn.apply(window.console,b);break;default:window.console.log.apply(window.console,b)}}catch(e){window.console?-2146827850===e.number&&window.console.log(b.join(", ")):_canLog=!1}}}function logMsg(){Array.pr [...]
+for(var b=this.getSelectedNodes(a),c=this.$tree.attr("name")||this.$tree.attr("id"),d=[],e=0,f=b.length;f>e;e++)d.push({name:c,value:b[e].data.key});return d},getPersistData:function(){return this.persistence.toDict()},logDebug:function(){this.options.debugLevel>=2&&(Array.prototype.unshift.apply(arguments,["debug"]),_log.apply(this,arguments))},logInfo:function(){this.options.debugLevel>=1&&(Array.prototype.unshift.apply(arguments,["info"]),_log.apply(this,arguments))},logWarning:functi [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.dynatree.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.event.drag.js b/static/scripts/libs/jquery/jquery.event.drag.js
new file mode 100644
index 0000000..47aaef3
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.event.drag.js
@@ -0,0 +1,2 @@
+!function(a){a.fn.drag=function(b,c,d){var e="string"==typeof b?b:"",f=a.isFunction(b)?b:a.isFunction(c)?c:null;return 0!==e.indexOf("drag")&&(e="drag"+e),d=(b==f?c:d)||{},f?this.bind(e,d,f):this.trigger(e)};var b=a.event,c=b.special,d=c.drag={defaults:{which:1,distance:0,not:":input",handle:null,relative:!1,drop:!0,click:!1},datakey:"dragdata",noBubble:!0,add:function(b){var c=a.data(this,d.datakey),e=b.data||{};c.related+=1,a.each(d.defaults,function(a){void 0!==e[a]&&(c[a]=e[a])})},re [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.event.drag.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.event.drop.js b/static/scripts/libs/jquery/jquery.event.drop.js
new file mode 100644
index 0000000..97a4174
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.event.drop.js
@@ -0,0 +1,2 @@
+!function(a){a.fn.drop=function(b,c,d){var e="string"==typeof b?b:"",f=a.isFunction(b)?b:a.isFunction(c)?c:null;return 0!==e.indexOf("drop")&&(e="drop"+e),d=(b==f?c:d)||{},f?this.bind(e,d,f):this.trigger(e)},a.drop=function(b){b=b||{},d.multi=b.multi===!0?1/0:b.multi===!1?1:isNaN(b.multi)?d.multi:b.multi,d.delay=b.delay||d.delay,d.tolerance=a.isFunction(b.tolerance)?b.tolerance:null===b.tolerance?null:d.tolerance,d.mode=b.mode||d.mode||"intersect"};var b=a.event,c=b.special,d=a.event.spe [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.event.drop.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.event.hover.js b/static/scripts/libs/jquery/jquery.event.hover.js
new file mode 100644
index 0000000..75c3232
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.event.hover.js
@@ -0,0 +1,2 @@
+!function(a){function b(c){function d(){b(e)}var e=c.data||c;switch(c.type){case"mouseenter":e.dist2=0,e.event=c,c.type="hoverstart",a.event.dispatch.call(this,c)!==!1&&(e.elem=this,a.event.add(this,"mousemove",b,e),e.timer=setTimeout(d,e.delay));break;case"mousemove":e.dist2+=Math.pow(c.pageX-e.event.pageX,2)+Math.pow(c.pageY-e.event.pageY,2),e.event=c;break;case"mouseleave":clearTimeout(e.timer),e.hovered?(c.type="hoverend",a.event.dispatch.call(this,c),e.hovered--):a.event.remove(e.el [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.event.hover.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.form.js b/static/scripts/libs/jquery/jquery.form.js
new file mode 100644
index 0000000..36311de
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.form.js
@@ -0,0 +1,2 @@
+!function(a){"use strict";function b(b){var c=b.data;b.isDefaultPrevented()||(b.preventDefault(),a(b.target).ajaxSubmit(c))}function c(b){var c=b.target,d=a(c);if(!d.is("[type=submit],[type=image]")){var e=d.closest("[type=submit]");if(0===e.length)return;c=e[0]}var f=this;if(f.clk=c,"image"==c.type)if(void 0!==b.offsetX)f.clk_x=b.offsetX,f.clk_y=b.offsetY;else if("function"==typeof a.fn.offset){var g=d.offset();f.clk_x=b.pageX-g.left,f.clk_y=b.pageY-g.top}else f.clk_x=b.pageX-c.offsetLe [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.form.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.js b/static/scripts/libs/jquery/jquery.js
new file mode 100644
index 0000000..ba0026d
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.js
@@ -0,0 +1,5 @@
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){function c(a){var b=!!a&&"length"in a&&a.length,c=na.type(a);return"function"===c||na.isWindow(a)?!1:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}function d(a,b,c){if(na.isFunction(b))return na.grep(a,function(a,d){ret [...]
+t=n&&j[2],m=n&&q.childNodes[n];m=++n&&m&&m[p]||(t=n=0)||o.pop();)if(1===m.nodeType&&++t&&m===b){k[a]=[P,n,t];break}}else if(s&&(m=b,l=m[N]||(m[N]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===P&&j[1],t=n),t===!1)for(;(m=++n&&m&&m[p]||(t=n=0)||o.pop())&&((h?m.nodeName.toLowerCase()!==r:1!==m.nodeType)||!++t||(s&&(l=m[N]||(m[N]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[P,t]),m!==b)););return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,c){var e,f=w.pseudos[a]||w.setFi [...]
+na.inArray(this,a)<0&&(na.cleanData(o(this)),c&&c.replaceChild(b,this))},a)}}),na.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){na.fn[a]=function(a){for(var c,d=0,e=[],f=na(a),g=f.length-1;g>=d;d++)c=d===g?this:this.clone(!0),na(f[d])[b](c),ga.apply(e,c.get());return this.pushStack(e)}});var ib,jb={HTML:"block",BODY:"block"},kb=/^margin/,lb=new RegExp("^("+Ja+")(?!px)[a-z%]+$","i"),mb=function(a,b,c,d){var e, [...]
+left:b.left-c.left-na.css(d,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent;a&&!na.nodeName(a,"html")&&"static"===na.css(a,"position");)a=a.offsetParent;return a||nb})}}),na.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,b){var c=/Y/.test(b);na.fn[a]=function(d){return Na(this,function(a,d,e){var f=ba(a);return void 0===e?f?b in f?f[b]:f.document.documentElement[d]:a[d]:void(f?f.scrollTo(c?na(f).scrollLeft():e,c?e:na( [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.migrate.js b/static/scripts/libs/jquery/jquery.migrate.js
new file mode 100644
index 0000000..7898e6e
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.migrate.js
@@ -0,0 +1,2 @@
+!function(a,b,c){function d(c){var d=b.console;f[c]||(f[c]=!0,a.migrateWarnings.push(c),d&&d.warn&&!a.migrateMute&&(d.warn("JQMIGRATE: "+c),a.migrateTrace&&d.trace&&d.trace()))}function e(b,c,e,f){if(Object.defineProperty)try{return void Object.defineProperty(b,c,{configurable:!0,enumerable:!0,get:function(){return d(f),e},set:function(a){d(f),e=a}})}catch(g){}a._definePropertyBroken=!0,b[c]=e}a.migrateVersion="1.4.0";var f={};a.migrateWarnings=[],b.console&&b.console.log&&b.console.log( [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.migrate.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.mousewheel.js b/static/scripts/libs/jquery/jquery.mousewheel.js
new file mode 100644
index 0000000..5bf2d81
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.mousewheel.js
@@ -0,0 +1,2 @@
+!function(a){a(jQuery)}(function(a){function b(b){var e,f=b||window.event,g=[].slice.call(arguments,1),h=0,i=0,j=0,k=0,l=0;return b=a.event.fix(f),b.type="mousewheel",f.wheelDelta&&(h=f.wheelDelta),f.detail&&(h=-1*f.detail),f.deltaY&&(j=-1*f.deltaY,h=j),f.deltaX&&(i=f.deltaX,h=-1*i),void 0!==f.wheelDeltaY&&(j=f.wheelDeltaY),void 0!==f.wheelDeltaX&&(i=-1*f.wheelDeltaX),k=Math.abs(h),(!c||c>k)&&(c=k),l=Math.max(Math.abs(j),Math.abs(i)),(!d||d>l)&&(d=l),e=h>0?"floor":"ceil",h=Math[e](h/c),i [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.mousewheel.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.rating.js b/static/scripts/libs/jquery/jquery.rating.js
new file mode 100644
index 0000000..659cd23
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.rating.js
@@ -0,0 +1,2 @@
+window.jQuery&&function(a){if(!a.support.opacity&&!a.support.style)try{document.execCommand("BackgroundImageCache",!1,!0)}catch(b){}a.fn.rating=function(b){if(0==this.length)return this;if("string"==typeof arguments[0]){if(this.length>1){var c=arguments;return this.each(function(){a.fn.rating.apply(a(this),c)})}return a.fn.rating[arguments[0]].apply(this,a.makeArray(arguments).slice(1)||[]),this}var b=a.extend({},a.fn.rating.options,b||{});return a.fn.rating.calls++,this.not(".star-ratin [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.rating.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jquery.wymeditor.js b/static/scripts/libs/jquery/jquery.wymeditor.js
new file mode 100644
index 0000000..00d3382
--- /dev/null
+++ b/static/scripts/libs/jquery/jquery.wymeditor.js
@@ -0,0 +1,4 @@
+if(!WYMeditor)var WYMeditor={};!function(){if(window.console&&console.firebug)WYMeditor.console=window.console;else{var a=["log","debug","info","warn","error","assert","dir","dirxml","group","groupEnd","time","timeEnd","count","trace","profile","profileEnd"];WYMeditor.console={};for(var b=0;b<a.length;++b)WYMeditor.console[a[b]]=function(){}}}(),jQuery.extend(WYMeditor,{VERSION:"0.5-rc1",INSTANCES:[],STRINGS:[],SKINS:[],NAME:"name",INDEX:"{Wym_Index}",WYM_INDEX:"wym_index",BASE_PATH:"{Wy [...]
+required:["alt","src"]},input:{attributes:{0:"accept",1:"alt",checked:/^(checked)$/,disabled:/^(disabled)$/,maxlength:/^(\d)+$/,2:"name",readonly:/^(readonly)$/,size:/^(\d)+$/,3:"src",type:/^(button|checkbox|file|hidden|image|password|radio|reset|submit|text)$/,4:"value"},inside:"form"},ins:{attributes:{0:"cite",datetime:/^([0-9]){8}/}},24:"kbd",label:{attributes:["for"],inside:"form"},25:"legend",26:"li",link:{attributes:{0:"charset",1:"href",2:"hreflang",media:/^(all|braille|print|proj [...]
+this._doc.designMode="on";try{this._doc=iframe.contentWindow.document}catch(e){}},WYMeditor.WymClassExplorer.prototype._exec=function(a,b){switch(a){case WYMeditor.INDENT:case WYMeditor.OUTDENT:var c=this.findUp(this.container(),WYMeditor.LI);if(c){var d=c.parentNode.parentNode;(c.parentNode.childNodes.length>1||d.tagName.toLowerCase()==WYMeditor.OL||d.tagName.toLowerCase()==WYMeditor.UL)&&this._doc.execCommand(a)}break;default:b?this._doc.execCommand(a,!1,b):this._doc.execCommand(a)}thi [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jquery.wymeditor.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jstorage.js b/static/scripts/libs/jquery/jstorage.js
new file mode 100644
index 0000000..0abdd83
--- /dev/null
+++ b/static/scripts/libs/jquery/jstorage.js
@@ -0,0 +1,2 @@
+!function(){function a(){var a=!1;if("localStorage"in window)try{window.localStorage.setItem("_tmptest","tmpval"),a=!0,window.localStorage.removeItem("_tmptest")}catch(b){}if(a)try{window.localStorage&&(v=window.localStorage,y="localStorage",B=v.jStorage_update)}catch(e){}else if("globalStorage"in window)try{window.globalStorage&&(v="localhost"==window.location.hostname?window.globalStorage["localhost.localdomain"]:window.globalStorage[window.location.hostname],y="globalStorage",B=v.jSto [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jstorage.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/jstree.js b/static/scripts/libs/jquery/jstree.js
new file mode 100755
index 0000000..bbe55b4
--- /dev/null
+++ b/static/scripts/libs/jquery/jstree.js
@@ -0,0 +1,6 @@
+!function(a){"use strict";"function"==typeof define&&define.amd?define(["jquery"],a):a("object"==typeof exports?require("jquery"):jQuery)}(function(a,b){"use strict";if(!a.jstree){var c,d,e=0,f=!1,g=!1,h=!1,i=[],j=a("script:last").attr("src"),k=document,l=k.createElement("LI");l.setAttribute("role","treeitem"),c=k.createElement("I"),c.className="jstree-icon jstree-ocl",l.appendChild(c),c=k.createElement("A"),c.className="jstree-anchor",c.setAttribute("href","#"),d=k.createElement("I"),d. [...]
+
+return this.trigger("close_all",{node:b})}f=this.is_open(b)?f.find(".jstree-open").addBack():f.find(".jstree-open"),a(f.get().reverse()).each(function(){g.close_node(this,c||0)}),this.trigger("close_all",{node:b})},is_disabled:function(a){return a=this.get_node(a),a&&a.state&&a.state.disabled},enable_node:function(b){var c,d;if(a.isArray(b)){for(b=b.slice(),c=0,d=b.length;d>c;c++)this.enable_node(b[c]);return!0}return b=this.get_node(b),b&&"#"!==b.id?(b.state.disabled=!1,this.get_node(b, [...]
+g=this.get_node(g.parent)}},this)).on("move_node.jstree",a.proxy(function(b,c){var d,e,f,g,h,i=c.is_multi,j=c.old_parent,k=this.get_node(c.parent),l=this._model.data,m=this.settings.checkbox.tie_selection;if(!i)for(d=this.get_node(j);d&&"#"!==d.id;){for(e=0,f=0,g=d.children.length;g>f;f++)e+=l[d.children[f]].state[m?"selected":"checked"];if(e!==g)break;d.state[m?"selected":"checked"]=!0,this._data[m?"core":"checkbox"].selected.push(d.id),h=this.get_node(d,!0),h&&h.length&&h.children(".js [...]
+this._data.search.opn=[],this._data.search.dom=a()},this._search_open=function(b){var c=this;a.each(b.concat([]),function(d,e){if("#"===e)return!0;try{e=a("#"+e.replace(a.jstree.idregex,"\\$&"),c.element)}catch(f){}e&&e.length&&c.is_closed(e)&&(c._data.search.opn.push(e[0].id),c.open_node(e,function(){c._search_open(b)},0))})}},function(a){a.vakata.search=function(a,b,c){c=c||{},c.fuzzy!==!1&&(c.fuzzy=!0),a=c.caseSensitive?a:a.toLowerCase();var d,e,f,g,h=c.location||0,i=c.distance||100,j [...]
+//# sourceMappingURL=../../../maps/libs/jquery/jstree.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/jquery/select2.js b/static/scripts/libs/jquery/select2.js
new file mode 100755
index 0000000..1159cf0
--- /dev/null
+++ b/static/scripts/libs/jquery/select2.js
@@ -0,0 +1,4 @@
+!function(a){"undefined"==typeof a.fn.each2&&a.extend(a.fn,{each2:function(b){for(var c=a([0]),d=-1,e=this.length;++d<e&&(c.context=c[0]=this[d])&&b.call(c[0],d,c)!==!1;);return this}})}(jQuery),function(a,b){"use strict";function c(b){var c=a(document.createTextNode(""));b.before(c),c.before(b),c.remove()}function d(a){function b(a){return O[a]||a}return a.replace(/[^\u0000-\u007E]/g,b)}function e(a,b){for(var c=0,d=b.length;d>c;c+=1)if(g(a,b[c]))return c;return-1}function f(){var b=a(N [...]
+
+k.empty(),m.opts.populateResults.call(this,k,f.results,{term:j.val(),page:this.resultsPage,context:null}),f.more===!0&&y(l.formatLoadMore,"formatLoadMore")&&(k.append("<li class='select2-more-results'>"+l.escapeMarkup(z(l.formatLoadMore,l.element,this.resultsPage))+"</li>"),window.setTimeout(function(){m.loadMoreIfNeeded()},10)),this.postprocessResults(f,c),d(),this.opts.element.trigger({type:"select2-loaded",items:f})}})})}},cancel:function(){this.close()},blur:function(){this.opts.sele [...]
+//# sourceMappingURL=../../../maps/libs/jquery/select2.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/raven.js b/static/scripts/libs/raven.js
new file mode 100644
index 0000000..8c6dd4d
--- /dev/null
+++ b/static/scripts/libs/raven.js
@@ -0,0 +1,2 @@
+!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.Raven=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.cod [...]
+//# sourceMappingURL=../../maps/libs/raven.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/require.js b/static/scripts/libs/require.js
new file mode 100644
index 0000000..6cbae86
--- /dev/null
+++ b/static/scripts/libs/require.js
@@ -0,0 +1,2 @@
+var requirejs,require,define;!function(global){function commentReplace(a,b,c,d){return d||""}function isFunction(a){return"[object Function]"===ostring.call(a)}function isArray(a){return"[object Array]"===ostring.call(a)}function each(a,b){if(a){var c;for(c=0;c<a.length&&(!a[c]||!b(a[c],c,a));c+=1);}}function eachReverse(a,b){if(a){var c;for(c=a.length-1;c>-1&&(!a[c]||!b(a[c],c,a));c-=1);}}function hasProp(a,b){return hasOwn.call(a,b)}function getOwn(a,b){return hasProp(a,b)&&a[b]}functi [...]
+//# sourceMappingURL=../../maps/libs/require.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/toastr.js b/static/scripts/libs/toastr.js
new file mode 100644
index 0000000..7935e88
--- /dev/null
+++ b/static/scripts/libs/toastr.js
@@ -0,0 +1,2 @@
+!function(a){a([],function(){var a=jQuery;return function(){function b(a,b,c){return j({type:r.error,iconClass:l().iconClasses.error,message:a,optionsOverride:c,title:b})}function c(a,b,c){return j({type:r.info,iconClass:l().iconClasses.info,message:a,optionsOverride:c,title:b})}function d(a){o=a}function e(a,b,c){return j({type:r.success,iconClass:l().iconClasses.success,message:a,optionsOverride:c,title:b})}function f(a,b,c){return j({type:r.warning,iconClass:l().iconClasses.warning,me [...]
+//# sourceMappingURL=../../maps/libs/toastr.js.map
\ No newline at end of file
diff --git a/static/scripts/libs/underscore.js b/static/scripts/libs/underscore.js
new file mode 100644
index 0000000..09e6a70
--- /dev/null
+++ b/static/scripts/libs/underscore.js
@@ -0,0 +1,2 @@
+(function(){function a(a){function b(b,c,d,e,f,g){for(;f>=0&&g>f;f+=a){var h=e?e[f]:f;d=c(d,b[h],h,b)}return d}return function(c,d,e,f){d=t(d,f,4);var g=!A(c)&&s.keys(c),h=(g||c).length,i=a>0?0:h-1;return arguments.length<3&&(e=c[g?g[i]:i],i+=a),b(c,d,e,g,i,h)}}function b(a){return function(b,c,d){c=u(c,d);for(var e=z(b),f=a>0?0:e-1;f>=0&&e>f;f+=a)if(c(b[f],f,b))return f;return-1}}function c(a,b,c){return function(d,e,f){var g=0,h=z(d);if("number"==typeof f)a>0?g=f>=0?f:Math.max(f+h,g):h [...]
+//# sourceMappingURL=../../maps/libs/underscore.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/annotation.js b/static/scripts/mvc/annotation.js
new file mode 100644
index 0000000..c702ff0
--- /dev/null
+++ b/static/scripts/mvc/annotation.js
@@ -0,0 +1,2 @@
+define(["mvc/base-mvc","utils/localization","ui/editable-text"],function(a,b){var c=Backbone.View.extend(a.LoggableMixin).extend(a.HiddenUntilActivatedViewMixin).extend({tagName:"div",className:"annotation-display",initialize:function(a){a=a||{},this.tooltipConfig=a.tooltipConfig||{placement:"bottom"},this.listenTo(this.model,"change:annotation",function(){this.render()}),this.hiddenUntilActivated(a.$activator,a)},render:function(){var a=this;return this.$el.html(this._template()),this.$ [...]
+//# sourceMappingURL=../../maps/mvc/annotation.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/base-mvc.js b/static/scripts/mvc/base-mvc.js
new file mode 100644
index 0000000..01786a7
--- /dev/null
+++ b/static/scripts/mvc/base-mvc.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","libs/backbone","utils/add-logging","utils/localization"],function(a,b,c,d){"use strict";function e(){var b=Array.prototype.slice.call(arguments,0),c=b.pop();return b.unshift(c),a.defaults.apply(a,b)}function f(b,c){c=c||"model";var e=a.template(b.join(""));return function(a,b){var f={view:b||{},_l:d};return f[c]=a||{},e(f)}}function g(a,b){b=b||{};var c=b.ascending?1:-1;return function(b,d){return b=b.get(a),d=d.get(a),(d>b?-1:b>d?1:0)*c}}var h={logger:null,_lo [...]
+//# sourceMappingURL=../../maps/mvc/base-mvc.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/base/controlled-fetch-collection.js b/static/scripts/mvc/base/controlled-fetch-collection.js
new file mode 100644
index 0000000..354d3f1
--- /dev/null
+++ b/static/scripts/mvc/base/controlled-fetch-collection.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","libs/backbone","mvc/base-mvc"],function(a,b,c){"use strict";var d=b.Collection.extend({initialize:function(a,c){b.Collection.prototype.initialize.call(this,a,c),this.setOrder(c.order||this.order,{silent:!0})},_setUpListeners:function(){return this.on({"changed-order":this.sort})},fetch:function(a){return a=this._buildFetchOptions(a),b.Collection.prototype.fetch.call(this,a)},_buildFetchOptions:function(b){b=a.clone(b)||{};var c=this;b.traditional=!0,b.data=b.da [...]
+//# sourceMappingURL=../../../maps/mvc/base/controlled-fetch-collection.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/citation/citation-model.js b/static/scripts/mvc/citation/citation-model.js
new file mode 100644
index 0000000..592801e
--- /dev/null
+++ b/static/scripts/mvc/citation/citation-model.js
@@ -0,0 +1,2 @@
+define(["libs/bibtex","mvc/base-mvc","utils/localization"],function(a,b){a=a||window.BibtexParser;var c="citation",d=Backbone.Model.extend(b.LoggableMixin).extend({_logNamespace:c,defaults:{content:""},initialize:function(){var b;try{b=a(this.attributes.content)}catch(c){return}if(b.errors.length){var d=b.errors.reduce(function(a,b){return a+"; "+b});this.log("Error parsing bibtex: "+d)}if(this._fields={},this.entry=_.first(b.entries),this.entry){var e=this.entry.Fields;for(var f in e){v [...]
+//# sourceMappingURL=../../../maps/mvc/citation/citation-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/citation/citation-view.js b/static/scripts/mvc/citation/citation-view.js
new file mode 100644
index 0000000..6bd43d7
--- /dev/null
+++ b/static/scripts/mvc/citation/citation-view.js
@@ -0,0 +1,2 @@
+define(["mvc/base-mvc","mvc/citation/citation-model","utils/localization"],function(a,b,c){var d=Backbone.View.extend({tagName:"div",className:"citations",render:function(){return this.$el.append("<p>"+this.formattedReference()+"</p>"),this},formattedReference:function(){var a=this.model,b=a.entryType(),c=a.fields(),d="",e=this._asSentence((c.author?c.author:"")+(c.year?" ("+c.year+")":""))+" ",f=c.title||"",g=c.pages?"pp. "+c.pages:"",h=c.address;if("article"==b){var i=(c.volume?c.volum [...]
+//# sourceMappingURL=../../../maps/mvc/citation/citation-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/collection-li-edit.js b/static/scripts/mvc/collection/collection-li-edit.js
new file mode 100644
index 0000000..dc47398
--- /dev/null
+++ b/static/scripts/mvc/collection/collection-li-edit.js
@@ -0,0 +1,2 @@
+define(["mvc/collection/collection-li","mvc/dataset/dataset-li-edit","mvc/base-mvc","utils/localization"],function(a,b){"use strict";var c=a.DCListItemView,d=c.extend({initialize:function(a){c.prototype.initialize.call(this,a)},toString:function(){var a=this.model?this.model+"":"(no model)";return"DCListItemEdit("+a+")"}}),e=a.DCEListItemView,f=e.extend({initialize:function(a){e.prototype.initialize.call(this,a)},toString:function(){var a=this.model?this.model+"":"(no model)";return"DCEL [...]
+//# sourceMappingURL=../../../maps/mvc/collection/collection-li-edit.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/collection-li.js b/static/scripts/mvc/collection/collection-li.js
new file mode 100644
index 0000000..5b76437
--- /dev/null
+++ b/static/scripts/mvc/collection/collection-li.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-item","mvc/dataset/dataset-li","mvc/base-mvc","utils/localization"],function(a,b,c,d){"use strict";var e=a.FoldoutListItemView,f=a.ListItemView,g=e.extend({className:e.prototype.className+" dataset-collection",id:function(){return["dataset_collection",this.model.get("id")].join("-")},initialize:function(a){this.linkTarget=a.linkTarget||"_blank",this.hasUser=a.hasUser,e.prototype.initialize.call(this,a)},_setUpListeners:function(){e.prototype._setUpListeners.call(th [...]
+//# sourceMappingURL=../../../maps/mvc/collection/collection-li.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/collection-model.js b/static/scripts/mvc/collection/collection-model.js
new file mode 100644
index 0000000..2eac87e
--- /dev/null
+++ b/static/scripts/mvc/collection/collection-model.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/dataset-model","mvc/base-mvc","utils/localization"],function(a,b){"use strict";var c={defaults:{model_class:"DatasetCollectionElement",element_identifier:null,element_index:null,element_type:null},_mergeObject:function(a){return _.extend(a,a.object,{element_id:a.id}),delete a.object,a},constructor:function(a){a=this._mergeObject(a),this.idAttribute="element_id",Backbone.Model.apply(this,arguments)},parse:function(a){var b=a;return b=this._mergeObject(b)}},d=Backbone. [...]
+//# sourceMappingURL=../../../maps/mvc/collection/collection-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/collection-view-edit.js b/static/scripts/mvc/collection/collection-view-edit.js
new file mode 100644
index 0000000..1614c93
--- /dev/null
+++ b/static/scripts/mvc/collection/collection-view-edit.js
@@ -0,0 +1,2 @@
+define(["mvc/collection/collection-view","mvc/collection/collection-model","mvc/collection/collection-li-edit","mvc/base-mvc","utils/localization","ui/editable-text"],function(a,b,c,d,e){"use strict";var f=a.CollectionView,g=f.extend({DatasetDCEViewClass:c.DatasetDCEListItemEdit,NestedDCDCEViewClass:c.NestedDCDCEListItemEdit,initialize:function(a){f.prototype.initialize.call(this,a)},_setUpBehaviors:function(a){if(a=a||this.$el,f.prototype._setUpBehaviors.call(this,a),this.model&&Galaxy. [...]
+//# sourceMappingURL=../../../maps/mvc/collection/collection-view-edit.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/collection-view.js b/static/scripts/mvc/collection/collection-view.js
new file mode 100644
index 0000000..4bdc54b
--- /dev/null
+++ b/static/scripts/mvc/collection/collection-view.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-view","mvc/collection/collection-model","mvc/collection/collection-li","mvc/base-mvc","utils/localization"],function(a,b,c,d,e){"use strict";var f="collections",g=a.ModelListPanel,h=g.extend({_logNamespace:f,className:g.prototype.className+" dataset-collection-panel",DatasetDCEViewClass:c.DatasetDCEListItemView,NestedDCDCEViewClass:c.NestedDCDCEListItemView,modelCollectionKey:"elements",initialize:function(a){g.prototype.initialize.call(this,a),this.linkTarget=a.li [...]
+//# sourceMappingURL=../../../maps/mvc/collection/collection-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/list-collection-creator.js b/static/scripts/mvc/collection/list-collection-creator.js
new file mode 100644
index 0000000..53abb11
--- /dev/null
+++ b/static/scripts/mvc/collection/list-collection-creator.js
@@ -0,0 +1,2 @@
+define(["mvc/history/hdca-model","mvc/dataset/states","mvc/base-mvc","mvc/ui/ui-modal","utils/natural-sort","utils/localization","ui/hoverhighlight"],function(a,b,c,d,e,f){"use strict";function g(a){var b=a.toJSON(),c=l(b,{creationFn:function(b,c){return b=b.map(function(a){return{id:a.id,name:a.name,src:"dataset"===a.history_content_type?"hda":"hdca"}}),a.createHDCA(b,"list",c)}});return c}var h="collections",i=Backbone.View.extend(c.LoggableMixin).extend({_logNamespace:h,tagName:"li",c [...]
+//# sourceMappingURL=../../../maps/mvc/collection/list-collection-creator.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/list-of-pairs-collection-creator.js b/static/scripts/mvc/collection/list-of-pairs-collection-creator.js
new file mode 100644
index 0000000..6a9cb95
--- /dev/null
+++ b/static/scripts/mvc/collection/list-of-pairs-collection-creator.js
@@ -0,0 +1,2 @@
+define(["utils/levenshtein","utils/natural-sort","mvc/collection/list-collection-creator","mvc/base-mvc","utils/localization","ui/hoverhighlight"],function(a,b,c,d,e){"use strict";function f(a){function b(){return c.length||(c=[new RegExp(this.filters[0]),new RegExp(this.filters[1])]),c}a=a||{},a.createPair=a.createPair||function(a){a=a||{};var b=a.listA.splice(a.indexA,1)[0],c=a.listB.splice(a.indexB,1)[0],d=a.listB.indexOf(b),e=a.listA.indexOf(c);return-1!==d&&a.listB.splice(d,1),-1!== [...]
+//# sourceMappingURL=../../../maps/mvc/collection/list-of-pairs-collection-creator.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/collection/pair-collection-creator.js b/static/scripts/mvc/collection/pair-collection-creator.js
new file mode 100644
index 0000000..4b5651a
--- /dev/null
+++ b/static/scripts/mvc/collection/pair-collection-creator.js
@@ -0,0 +1,2 @@
+define(["mvc/collection/list-collection-creator","mvc/history/hdca-model","mvc/base-mvc","utils/localization"],function(a,b,c,d){"use strict";function e(a){var b=a.toJSON(),c=j(b,{creationFn:function(b,c){return b=[{name:"forward",src:"hda",id:b[0].id},{name:"reverse",src:"hda",id:b[1].id}],a.createHDCA(b,"paired",c)}});return c}var f="collections",g=Backbone.View.extend(c.LoggableMixin).extend({_logNamespace:f,tagName:"li",className:"collection-element",initialize:function(a){this.eleme [...]
+//# sourceMappingURL=../../../maps/mvc/collection/pair-collection-creator.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/data.js b/static/scripts/mvc/dataset/data.js
new file mode 100644
index 0000000..58528c8
--- /dev/null
+++ b/static/scripts/mvc/dataset/data.js
@@ -0,0 +1,2 @@
+define(["mvc/ui/ui-modal","mvc/ui/ui-frames","mvc/ui/icon-button"],function(a,b,c){var d=Backbone.Model.extend({}),e=Backbone.Model.extend({defaults:{id:"",type:"",name:"",hda_ldda:"hda",metadata:null},initialize:function(){this.get("metadata")||this._set_metadata(),this.on("change",this._set_metadata,this)},_set_metadata:function(){var a=new d;_.each(_.keys(this.attributes),function(b){if(0===b.indexOf("metadata_")){var c=b.split("metadata_")[1];a.set(c,this.attributes[b]),delete this.a [...]
+//# sourceMappingURL=../../../maps/mvc/dataset/data.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/dataset-choice.js b/static/scripts/mvc/dataset/dataset-choice.js
new file mode 100644
index 0000000..8f4793e
--- /dev/null
+++ b/static/scripts/mvc/dataset/dataset-choice.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/dataset-model","mvc/dataset/dataset-list","mvc/ui/ui-modal","mvc/base-mvc","utils/localization"],function(a,b,c,d,e){"use strict";function f(a,b,c){function d(a,b){for(var c in b)if(b.hasOwnProperty(c)&&a[c]!==b[c])return!1;return!0}return a.filter(function(a){return console.debug(a),!a.deleted&&a.visible&&(!c||void 0===a.collection_type)&&d(a,b)})}var g="dataset",h=function(d,g){function h(){l.resolve(j.getSelectedModels().map(function(a){return a.toJSON()}))}g=_.de [...]
+//# sourceMappingURL=../../../maps/mvc/dataset/dataset-choice.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/dataset-li-edit.js b/static/scripts/mvc/dataset/dataset-li-edit.js
new file mode 100644
index 0000000..b54c343
--- /dev/null
+++ b/static/scripts/mvc/dataset/dataset-li-edit.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/states","mvc/dataset/dataset-li","mvc/tag","mvc/annotation","ui/fa-icon-button","mvc/base-mvc","utils/localization"],function(a,b,c,d,e,f,g){"use strict";var h=b.DatasetListItemView,i=h.extend({initialize:function(a){h.prototype.initialize.call(this,a),this.hasUser=a.hasUser,this.purgeAllowed=a.purgeAllowed||!1,this.tagsEditorShown=a.tagsEditorShown||!1,this.annotationEditorShown=a.annotationEditorShown||!1},_renderPrimaryActions:function(){var b=h.prototype._renderP [...]
+//# sourceMappingURL=../../../maps/mvc/dataset/dataset-li-edit.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/dataset-li.js b/static/scripts/mvc/dataset/dataset-li.js
new file mode 100644
index 0000000..5e9e387
--- /dev/null
+++ b/static/scripts/mvc/dataset/dataset-li.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-item","mvc/dataset/states","ui/fa-icon-button","mvc/base-mvc","utils/localization"],function(a,b,c,d,e){"use strict";var f="dataset",g=a.ListItemView,h=g.extend({_logNamespace:f,className:g.prototype.className+" dataset",id:function(){return["dataset",this.model.get("id")].join("-")},initialize:function(a){a.logger&&(this.logger=this.model.logger=a.logger),this.log(this+".initialize:",a),g.prototype.initialize.call(this,a),this.linkTarget=a.linkTarget||"_blank"},_s [...]
+//# sourceMappingURL=../../../maps/mvc/dataset/dataset-li.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/dataset-list.js b/static/scripts/mvc/dataset/dataset-list.js
new file mode 100644
index 0000000..1cc1778
--- /dev/null
+++ b/static/scripts/mvc/dataset/dataset-list.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-view","mvc/dataset/dataset-li","mvc/base-mvc","utils/localization"],function(a,b,c,d){"use strict";var e="dataset",f=a.ListPanel,g=f.extend({_logNamespace:e,viewClass:b.DatasetListItemView,className:f.prototype.className+" dataset-list",noneFoundMsg:d("No matching datasets found"),initialize:function(a){f.prototype.initialize.call(this,a)},toString:function(){return"DatasetList("+this.collection+")"}});return{DatasetList:g}});
+//# sourceMappingURL=../../../maps/mvc/dataset/dataset-list.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/dataset-model.js b/static/scripts/mvc/dataset/dataset-model.js
new file mode 100644
index 0000000..941696a
--- /dev/null
+++ b/static/scripts/mvc/dataset/dataset-model.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/states","mvc/base-mvc","utils/localization"],function(a,b,c){"use strict";var d="dataset",e=b.SearchableModelMixin,f=Backbone.Model.extend(b.LoggableMixin).extend(b.mixin(e,{_logNamespace:d,defaults:{state:a.NEW,deleted:!1,purged:!1,name:"(unnamed dataset)",accessible:!0,data_type:"",file_ext:"",file_size:0,meta_files:[],misc_blurb:"",misc_info:"",tags:[]},initialize:function(b,c){this.debug(this+"(Dataset).initialize",b,c),this.get("accessible")||this.set("state",a. [...]
+//# sourceMappingURL=../../../maps/mvc/dataset/dataset-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/dataset/states.js b/static/scripts/mvc/dataset/states.js
new file mode 100644
index 0000000..f61db7a
--- /dev/null
+++ b/static/scripts/mvc/dataset/states.js
@@ -0,0 +1,2 @@
+define([],function(){"use strict";var a={UPLOAD:"upload",QUEUED:"queued",RUNNING:"running",SETTING_METADATA:"setting_metadata",NEW:"new",EMPTY:"empty",OK:"ok",PAUSED:"paused",FAILED_METADATA:"failed_metadata",NOT_VIEWABLE:"noPermission",DISCARDED:"discarded",ERROR:"error"};return a.READY_STATES=[a.OK,a.EMPTY,a.PAUSED,a.FAILED_METADATA,a.NOT_VIEWABLE,a.DISCARDED,a.ERROR],a.NOT_READY_STATES=[a.UPLOAD,a.QUEUED,a.RUNNING,a.SETTING_METADATA,a.NEW],a});
+//# sourceMappingURL=../../../maps/mvc/dataset/states.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/form/form-data.js b/static/scripts/mvc/form/form-data.js
new file mode 100644
index 0000000..8f04594
--- /dev/null
+++ b/static/scripts/mvc/form/form-data.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(){var a=Backbone.Model.extend({initialize:function(a){this.app=a},checksum:function(){var a="",b=this;return this.app.section.$el.find(".section-row").each(function(){var c=$(this).attr("id"),d=b.app.field_list[c];d&&(a+=c+":"+JSON.stringify(d.value&&d.value())+":"+d.collapsed+";")}),a},create:function(){function a(a,b,c){d.flat_dict[a]=b,f[a]=c,d.app.element_list[b]&&d.app.element_list[b].$el.attr("tour_id",a)}function c(e,f){for(var g in f){var h=f[g];if [...]
+//# sourceMappingURL=../../../maps/mvc/form/form-data.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/form/form-input.js b/static/scripts/mvc/form/form-input.js
new file mode 100644
index 0000000..857f0a9
--- /dev/null
+++ b/static/scripts/mvc/form/form-input.js
@@ -0,0 +1,2 @@
+define([],function(){return Backbone.View.extend({initialize:function(a,b){this.app=a,this.app_options=a.options||{},this.field=b&&b.field||new Backbone.View,this.model=b&&b.model||new Backbone.Model({text_enable:this.app_options.text_enable||"Enable",text_disable:this.app_options.text_disable||"Disable",cls_enable:this.app_options.cls_enable||"fa fa-caret-square-o-down",cls_disable:this.app_options.cls_disable||"fa fa-caret-square-o-up"}).set(b),this.setElement(this._template()),this.$f [...]
+//# sourceMappingURL=../../../maps/mvc/form/form-input.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/form/form-parameters.js b/static/scripts/mvc/form/form-parameters.js
new file mode 100644
index 0000000..837f674
--- /dev/null
+++ b/static/scripts/mvc/form/form-parameters.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc","mvc/ui/ui-select-content","mvc/ui/ui-select-library","mvc/ui/ui-select-ftp","mvc/ui/ui-color-picker"],function(a,b,c,d,e,f){return Backbone.Model.extend({types:{text:"_fieldText",select:"_fieldSelect",data_column:"_fieldSelect",genomebuild:"_fieldSelect",data:"_fieldData",data_collection:"_fieldData",integer:"_fieldSlider","float":"_fieldSlider","boolean":"_fieldBoolean",drill_down:"_fieldDrilldown",color:"_fieldColor",hidden:"_fieldHidden",hidden_ [...]
+//# sourceMappingURL=../../../maps/mvc/form/form-parameters.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/form/form-repeat.js b/static/scripts/mvc/form/form-repeat.js
new file mode 100644
index 0000000..4d5ab27
--- /dev/null
+++ b/static/scripts/mvc/form/form-repeat.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-portlet","mvc/ui/ui-misc"],function(a,b,c){var d=Backbone.View.extend({initialize:function(b){this.list={},this.options=a.merge(b,{title:"Repeat",empty_text:"Not available.",max:null,min:null}),this.button_new=new c.ButtonIcon({icon:"fa-plus",title:"Insert "+this.options.title,tooltip:"Add new "+this.options.title+" block",floating:"clear",cls:"ui-button-icon form-repeat-add",onclick:function(){b.onnew&&b.onnew()}}),this.setElement($("<div/>").append(this [...]
+//# sourceMappingURL=../../../maps/mvc/form/form-repeat.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/form/form-section.js b/static/scripts/mvc/form/form-section.js
new file mode 100644
index 0000000..d9c796d
--- /dev/null
+++ b/static/scripts/mvc/form/form-section.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc","mvc/ui/ui-portlet","mvc/form/form-repeat","mvc/form/form-input","mvc/form/form-parameters"],function(a,b,c,d,e,f){var g=Backbone.View.extend({initialize:function(a,b){this.app=a,this.inputs=b.inputs,this.parameters=new f,this.setElement($("<div/>")),this.render()},render:function(){var a=this;this.$el.empty(),_.each(this.inputs,function(b){a.add(b)})},add:function(b){var c=jQuery.extend(!0,{},b);switch(c.id=b.id=a.uid(),this.app.input_list[c.id]=c, [...]
+//# sourceMappingURL=../../../maps/mvc/form/form-section.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/form/form-view.js b/static/scripts/mvc/form/form-view.js
new file mode 100644
index 0000000..c6050fd
--- /dev/null
+++ b/static/scripts/mvc/form/form-view.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-portlet","mvc/ui/ui-misc","mvc/form/form-section","mvc/form/form-data"],function(a,b,c,d,e){return Backbone.View.extend({initialize:function(b){this.options=a.merge(b,{initial_errors:!1,cls:"ui-portlet-limited",icon:null,always_refresh:!0}),this.setElement("<div/>"),this.render()},update:function(a){var b=this;this.data.matchModel(a,function(a,c){var d=b.input_list[c];if(d&&d.options&&!_.isEqual(d.options,a.options)){d.options=a.options;var e=b.field_list [...]
+//# sourceMappingURL=../../../maps/mvc/form/form-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/grid/grid-model.js b/static/scripts/mvc/grid/grid-model.js
new file mode 100644
index 0000000..6cdce12
--- /dev/null
+++ b/static/scripts/mvc/grid/grid-model.js
@@ -0,0 +1,2 @@
+define([],function(){return Backbone.Model.extend({defaults:{url_base:"",async:!1,async_ops:[],categorical_filters:[],filters:{},sort_key:null,show_item_checkboxes:!1,advanced_search:!1,cur_page:1,num_pages:1,operation:void 0,item_ids:void 0},can_async_op:function(a){return-1!==_.indexOf(this.attributes.async_ops,a)},add_filter:function(a,b,c){if(c){var d,e=this.attributes.filters[a];if(null===e||void 0===e)d=b;else if("string"==typeof e)if("All"==e)d=b;else{var f=[];f[0]=e,f[1]=b,d=f}el [...]
+//# sourceMappingURL=../../../maps/mvc/grid/grid-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/grid/grid-template.js b/static/scripts/mvc/grid/grid-template.js
new file mode 100644
index 0000000..ec652ec
--- /dev/null
+++ b/static/scripts/mvc/grid/grid-template.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){return{grid:function(a){var b="";return b=a.embedded?this.grid_header(a)+this.grid_table(a):'<div class="loading-elt-overlay"></div><table><tr><td width="75%">'+this.grid_header(a)+'</td><td></td><td></td></tr><tr><td width="100%" id="grid-message" valign="top"></td><td></td><td></td></tr></table>'+this.grid_table(a),a.info_text&&(b+='<br><div class="toolParamHelp" style="clear: both;">'+a.info_text+"</div>"),b},grid_table:function(){return'<form method [...]
+//# sourceMappingURL=../../../maps/mvc/grid/grid-template.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/grid/grid-view.js b/static/scripts/mvc/grid/grid-view.js
new file mode 100644
index 0000000..1195923
--- /dev/null
+++ b/static/scripts/mvc/grid/grid-view.js
@@ -0,0 +1,2 @@
+jQuery.ajaxSettings.traditional=!0,define(["mvc/grid/grid-model","mvc/grid/grid-template","mvc/ui/popup-menu"],function(a,b,c){return Backbone.View.extend({grid:null,initialize:function(a){this.setElement("#grid-container"),a.use_panels&&$("#center").css({padding:"10px",overflow:"auto"}),this.init_grid(a)},handle_refresh:function(a){a&&$.inArray("history",a)>-1&&top.Galaxy&&top.Galaxy.currHistoryPanel&&top.Galaxy.currHistoryPanel.loadCurrentHistory()},init_grid:function(c){this.grid=new  [...]
+//# sourceMappingURL=../../../maps/mvc/grid/grid-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/copy-dialog.js b/static/scripts/mvc/history/copy-dialog.js
new file mode 100644
index 0000000..9aa73f7
--- /dev/null
+++ b/static/scripts/mvc/history/copy-dialog.js
@@ -0,0 +1,2 @@
+define(["mvc/ui/ui-modal","mvc/ui/error-modal","utils/localization"],function(a,b,c){"use strict";var d={defaultName:_.template("Copy of '<%- name %>'"),title:_.template(c("Copying history")+' "<%- name %>"'),submitLabel:c("Copy"),errorMessage:c("History could not be copied."),progressive:c("Copying history"),activeLabel:c("Copy only the active, non-deleted datasets"),allLabel:c("Copy all datasets including deleted ones"),anonWarning:c("As an anonymous user, unless you login or register, [...]
+//# sourceMappingURL=../../../maps/mvc/history/copy-dialog.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/hda-li-edit.js b/static/scripts/mvc/history/hda-li-edit.js
new file mode 100644
index 0000000..0cc4c0c
--- /dev/null
+++ b/static/scripts/mvc/history/hda-li-edit.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/dataset-li-edit","mvc/history/hda-li","mvc/base-mvc","utils/localization"],function(a,b,c,d){"use strict";var e=a.DatasetListItemEdit,f=e.extend({className:e.prototype.className+" history-content",_fetchModelDetails:function(){var a=this;return a.model.inReadyState()&&!a.model.hasDetails()?a.model.fetch({silent:!0}):a.model.has("rerunnable")?jQuery.when():a.model.fetch({silent:!0,data:{keys:["rerunnable","creating_job"].join(",")}})},events:_.extend(_.clone(e.prototy [...]
+//# sourceMappingURL=../../../maps/mvc/history/hda-li-edit.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/hda-li.js b/static/scripts/mvc/history/hda-li.js
new file mode 100644
index 0000000..20383ab
--- /dev/null
+++ b/static/scripts/mvc/history/hda-li.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/dataset-li","mvc/base-mvc","utils/localization"],function(a,b,c){"use strict";var d=a.DatasetListItemView,e=d.extend({className:d.prototype.className+" history-content",initialize:function(a,b){d.prototype.initialize.call(this,a,b)},toString:function(){var a=this.model?this.model+"":"(no model)";return"HDAListItemView("+a+")"}});return e.prototype.templates=function(){var a=b.wrapTemplate(['<div class="title-bar clear" tabindex="0">','<span class="state-icon"></span> [...]
+//# sourceMappingURL=../../../maps/mvc/history/hda-li.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/hda-model.js b/static/scripts/mvc/history/hda-model.js
new file mode 100644
index 0000000..279b67d
--- /dev/null
+++ b/static/scripts/mvc/history/hda-model.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/dataset-model","mvc/history/history-content-model","mvc/base-mvc","utils/localization"],function(a,b,c){"use strict";var d=a.DatasetAssociation,e=b.HistoryContentMixin,f=d.extend(c.mixin(e,{defaults:_.extend({},d.prototype.defaults,e.defaults,{history_content_type:"dataset",model_class:"HistoryDatasetAssociation"})}));return{HistoryDatasetAssociation:f}});
+//# sourceMappingURL=../../../maps/mvc/history/hda-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/hdca-li-edit.js b/static/scripts/mvc/history/hdca-li-edit.js
new file mode 100644
index 0000000..dc7c60e
--- /dev/null
+++ b/static/scripts/mvc/history/hdca-li-edit.js
@@ -0,0 +1,2 @@
+define(["mvc/history/hdca-li","mvc/collection/collection-view-edit","ui/fa-icon-button","utils/localization"],function(a,b,c,d){"use strict";var e=a.HDCAListItemView,f=e.extend({_getFoldoutPanelClass:function(){switch(this.model.get("collection_type")){case"list":return b.ListCollectionViewEdit;case"paired":return b.PairCollectionViewEdit;case"list:paired":return b.ListOfPairsCollectionViewEdit;case"list:list":return b.ListOfListsCollectionViewEdit}throw new TypeError("Uknown collection_ [...]
+//# sourceMappingURL=../../../maps/mvc/history/hdca-li-edit.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/hdca-li.js b/static/scripts/mvc/history/hdca-li.js
new file mode 100644
index 0000000..2023c4e
--- /dev/null
+++ b/static/scripts/mvc/history/hdca-li.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/states","mvc/collection/collection-li","mvc/collection/collection-view","mvc/base-mvc","utils/localization"],function(a,b,c,d,e){"use strict";var f=b.DCListItemView,g=f.extend({className:f.prototype.className+" history-content",_setUpListeners:function(){f.prototype._setUpListeners.call(this),this.listenTo(this.model,{"change:populated change:visible":function(){this.render()}})},_getFoldoutPanelClass:function(){switch(this.model.get("collection_type")){case"list":re [...]
+//# sourceMappingURL=../../../maps/mvc/history/hdca-li.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/hdca-model.js b/static/scripts/mvc/history/hdca-model.js
new file mode 100644
index 0000000..1ba0577
--- /dev/null
+++ b/static/scripts/mvc/history/hdca-model.js
@@ -0,0 +1,2 @@
+define(["mvc/collection/collection-model","mvc/history/history-content-model","utils/localization"],function(a,b){"use strict";function c(a){return function(b,c){return this.isNew()&&(c=c||{},c.url=this.urlRoot+this.get("history_id")+"/contents",b=b||{},b.type="dataset_collection"),a.call(this,b,c)}}var d=b.HistoryContentMixin,e=a.ListDatasetCollection,f=a.PairDatasetCollection,g=a.ListPairedDatasetCollection,h=a.ListOfListsDatasetCollection,i=e.extend(d).extend({defaults:_.extend(_.clon [...]
+//# sourceMappingURL=../../../maps/mvc/history/hdca-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-content-model.js b/static/scripts/mvc/history/history-content-model.js
new file mode 100644
index 0000000..9271c0f
--- /dev/null
+++ b/static/scripts/mvc/history/history-content-model.js
@@ -0,0 +1,2 @@
+define(["mvc/dataset/states","mvc/base-mvc","utils/localization"],function(){"use strict";var a={defaults:{history_id:null,history_content_type:null,hid:null,visible:!0},idAttribute:"type_id",hidden:function(){return!this.get("visible")},isVisible:function(a,b){var c=!0;return a||!this.get("deleted")&&!this.get("purged")||(c=!1),b||this.get("visible")||(c=!1),c},urlRoot:Galaxy.root+"api/histories/",url:function(){var a=this.urlRoot+this.get("history_id")+"/contents/"+this.get("history_co [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-content-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-contents.js b/static/scripts/mvc/history/history-contents.js
new file mode 100644
index 0000000..a4fcc11
--- /dev/null
+++ b/static/scripts/mvc/history/history-contents.js
@@ -0,0 +1,2 @@
+define(["mvc/base/controlled-fetch-collection","mvc/history/hda-model","mvc/history/hdca-model","mvc/history/history-preferences","mvc/base-mvc","utils/ajax-queue"],function(a,b,c,d,e,f){"use strict";var g=a.PaginatedCollection,h=g.extend(e.LoggableMixin).extend({_logNamespace:"history",model:function(a,d){if("dataset"===a.history_content_type)return new b.HistoryDatasetAssociation(a,d);if("dataset_collection"===a.history_content_type){switch(a.collection_type){case"list":return new c.Hi [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-contents.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-model.js b/static/scripts/mvc/history/history-model.js
new file mode 100644
index 0000000..b4d58a6
--- /dev/null
+++ b/static/scripts/mvc/history/history-model.js
@@ -0,0 +1,2 @@
+define(["mvc/history/history-contents","mvc/history/history-preferences","mvc/base/controlled-fetch-collection","utils/utils","mvc/base-mvc","utils/localization"],function(a,b,c,d,e,f){"use strict";var g=Backbone.Model.extend(e.LoggableMixin).extend(e.mixin(e.SearchableModelMixin,{_logNamespace:"history",UPDATE_DELAY:4e3,defaults:{model_class:"History",id:null,name:"Unnamed History",state:"new",deleted:!1,contents_active:{},contents_states:{}},urlRoot:Galaxy.root+"api/histories",contents [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-preferences.js b/static/scripts/mvc/history/history-preferences.js
new file mode 100644
index 0000000..914a3fe
--- /dev/null
+++ b/static/scripts/mvc/history/history-preferences.js
@@ -0,0 +1,2 @@
+define(["mvc/base-mvc"],function(a){"use strict";var b=a.SessionStorageModel.extend({defaults:{expandedIds:{},show_deleted:!1,show_hidden:!1},addExpanded:function(a){var b=this.get("expandedIds");b[a.id]=a.get("id"),this.save("expandedIds",b)},removeExpanded:function(a){var b=this.get("expandedIds");delete b[a.id],this.save("expandedIds",b)},isExpanded:function(a){return _.result(this.get("expandedIds"),a,!1)},allExpanded:function(){return _.values(this.get("expandedIds"))},clearExpanded [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-preferences.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-structure-view.js b/static/scripts/mvc/history/history-structure-view.js
new file mode 100644
index 0000000..53fcb04
--- /dev/null
+++ b/static/scripts/mvc/history/history-structure-view.js
@@ -0,0 +1,2 @@
+define(["mvc/history/job-dag","mvc/job/job-model","mvc/job/job-li","mvc/dataset/dataset-li","mvc/base-mvc","utils/localization","libs/d3"],function(a,b,c,d,e){"use strict";var f="history";window.JobDAG=a;var g=Backbone.View.extend(e.LoggableMixin).extend({_logNamespace:f,className:"history-structure-component",_INITIAL_ZOOM_LEVEL:1,_MIN_ZOOM_LEVEL:.25,_LINK_ID_SEP:"-to-",_VERTEX_NAME_DATA_KEY:"vertex-name",JobItemClass:c.JobListItemView,ContentItemClass:d.DatasetListItemView,initialize:f [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-structure-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-view-annotated.js b/static/scripts/mvc/history/history-view-annotated.js
new file mode 100644
index 0000000..fd87324
--- /dev/null
+++ b/static/scripts/mvc/history/history-view-annotated.js
@@ -0,0 +1,2 @@
+define(["mvc/history/history-view","mvc/history/hda-li","mvc/history/hdca-li","mvc/base-mvc","utils/localization"],function(a,b,c,d,e){"use strict";var f=a.HistoryView,g=f.extend({className:f.prototype.className+" annotated-history-panel",_buildNewRender:function(){var a=f.prototype._buildNewRender.call(this);return this.renderHistoryAnnotation(a),a},renderHistoryAnnotation:function(a){var b=this.model.get("annotation");b&&a.find("> .controls .subtitle").text(b)},renderItems:function(a){ [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-view-annotated.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-view-edit-current.js b/static/scripts/mvc/history/history-view-edit-current.js
new file mode 100644
index 0000000..57a39a0
--- /dev/null
+++ b/static/scripts/mvc/history/history-view-edit-current.js
@@ -0,0 +1,2 @@
+define(["mvc/history/history-model","mvc/history/history-view-edit","mvc/base-mvc","utils/localization"],function(a,b,c,d){"use strict";var e=c.SessionStorageModel.extend({defaults:{tagsEditorShown:!1,annotationEditorShown:!1,scrollPosition:0},toString:function(){return"HistoryViewPrefs("+JSON.stringify(this.toJSON())+")"}});e.storageKey=function(){return"history-panel"};var f=b.HistoryViewEdit,g=f.extend({className:f.prototype.className+" current-history-panel",HDCAViewClass:f.prototype [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-view-edit-current.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-view-edit.js b/static/scripts/mvc/history/history-view-edit.js
new file mode 100644
index 0000000..0500db7
--- /dev/null
+++ b/static/scripts/mvc/history/history-view-edit.js
@@ -0,0 +1,2 @@
+define(["mvc/history/history-view","mvc/history/history-contents","mvc/dataset/states","mvc/history/hda-model","mvc/history/hda-li-edit","mvc/history/hdca-li-edit","mvc/tag","mvc/annotation","mvc/collection/list-collection-creator","mvc/collection/pair-collection-creator","mvc/collection/list-of-pairs-collection-creator","ui/fa-icon-button","mvc/ui/popup-menu","mvc/base-mvc","utils/localization","ui/editable-text"],function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o){"use strict";var p=a.HistoryView, [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-view-edit.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/history-view.js b/static/scripts/mvc/history/history-view.js
new file mode 100644
index 0000000..08e5872
--- /dev/null
+++ b/static/scripts/mvc/history/history-view.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-view","mvc/history/history-model","mvc/history/history-contents","mvc/history/history-preferences","mvc/history/hda-li","mvc/history/hdca-li","mvc/user/user-model","mvc/ui/error-modal","ui/fa-icon-button","mvc/base-mvc","utils/localization","ui/search-input"],function(a,b,c,d,e,f,g,h,i,j,k){"use strict";var l=a.ModelListPanel,m=l.extend({_logNamespace:"history",HDAViewClass:e.HDAListItemView,HDCAViewClass:f.HDCAListItemView,collectionClass:c.HistoryContents,modelCo [...]
+//# sourceMappingURL=../../../maps/mvc/history/history-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/job-dag.js b/static/scripts/mvc/history/job-dag.js
new file mode 100644
index 0000000..079f6d1
--- /dev/null
+++ b/static/scripts/mvc/history/job-dag.js
@@ -0,0 +1,2 @@
+define(["utils/graph","utils/add-logging"],function(a,b){"use strict";var c=a.Graph,d=function(a){a=a||{};var b=this;b.filters=[],b._jobsData=[],b._historyContentsMap={},b._toolMap={},b._outputIdToJobMap={},b.noInputJobs=[],b.noOutputJobs=[],b.filteredSetMetadata=[],b.filteredErroredJobs=[],b.dataKeys=["jobs","historyContents","tools"],c.call(b,!0,_.pick(a,b.dataKeys),_.omit(a,b.dataKeys))};return d.prototype=new a.Graph,d.prototype.constructor=d,b(d),d.prototype.init=function(a){a=a||{} [...]
+//# sourceMappingURL=../../../maps/mvc/history/job-dag.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/multi-panel.js b/static/scripts/mvc/history/multi-panel.js
new file mode 100644
index 0000000..5e5895c
--- /dev/null
+++ b/static/scripts/mvc/history/multi-panel.js
@@ -0,0 +1,2 @@
+define(["mvc/history/history-model","mvc/history/history-view-edit","mvc/history/copy-dialog","mvc/ui/error-modal","mvc/base-mvc","utils/ajax-queue","ui/mode-button","ui/search-input"],function(a,b,c,d,e,f){"use strict";var g="history",h=Backbone.View.extend(e.LoggableMixin).extend({_logNamespace:g,tagName:"div",className:"history-column flex-column flex-row-container",id:function(){return this.model?"history-column-"+this.model.get("id"):""},initialize:function(a){a=a||{},this.purgeAllo [...]
+//# sourceMappingURL=../../../maps/mvc/history/multi-panel.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/history/options-menu.js b/static/scripts/mvc/history/options-menu.js
new file mode 100644
index 0000000..41a97af
--- /dev/null
+++ b/static/scripts/mvc/history/options-menu.js
@@ -0,0 +1,2 @@
+define(["mvc/ui/popup-menu","mvc/history/copy-dialog","mvc/base-mvc","utils/localization","mvc/webhooks"],function(a,b,c,d,e){"use strict";function f(a,b,c){return _.clone(g).filter(function(d){return a&&!d.anon?!1:!b&&d.purge?!1:(d.href&&(d.href=c+d.href,d.target="galaxy_main"),d.confirm&&(d.func=function(){confirm(d.confirm)&&(galaxy_main.location=d.href)}),!0)})}var g=[{html:d("History Lists"),header:!0},{html:d("Saved Histories"),href:"history/list"},{html:d("Histories Shared with Me [...]
+//# sourceMappingURL=../../../maps/mvc/history/options-menu.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/job/job-li.js b/static/scripts/mvc/job/job-li.js
new file mode 100644
index 0000000..e9657d1
--- /dev/null
+++ b/static/scripts/mvc/job/job-li.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-item","mvc/dataset/dataset-list","mvc/base-mvc","utils/localization"],function(a,b,c,d){var e=a.FoldoutListItemView,f=e.extend({className:e.prototype.className+" job",id:function(){return["job",this.model.get("id")].join("-")},foldoutPanelClass:b.DatasetList,initialize:function(a){a.logger&&(this.logger=this.model.logger=a.logger),this.log(this+".initialize:",a),e.prototype.initialize.call(this,a),this.tool=a.tool||{},this.jobData=a.jobData||{},this.linkTarget=a.li [...]
+//# sourceMappingURL=../../../maps/mvc/job/job-li.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/job/job-model.js b/static/scripts/mvc/job/job-model.js
new file mode 100644
index 0000000..a6b29a3
--- /dev/null
+++ b/static/scripts/mvc/job/job-model.js
@@ -0,0 +1,2 @@
+define(["mvc/history/history-contents","mvc/dataset/states","utils/ajax-queue","mvc/base-mvc","utils/localization"],function(a,b,c,d){var e="jobs",f=d.SearchableModelMixin,g=Backbone.Model.extend(d.LoggableMixin).extend(d.mixin(f,{_logNamespace:e,defaults:{model_class:"Job",tool_id:null,exit_code:null,inputs:{},outputs:{},params:{},create_time:null,update_time:null,state:b.NEW},parse:function(a){return a.params=this.parseParams(a.params),a},parseParams:function(a){var b={};return _.each( [...]
+//# sourceMappingURL=../../../maps/mvc/job/job-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-dataset-view.js b/static/scripts/mvc/library/library-dataset-view.js
new file mode 100644
index 0000000..b0299c9
--- /dev/null
+++ b/static/scripts/mvc/library/library-dataset-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","mvc/library/library-model","utils/utils","mvc/ui/ui-select"],function(a,b,c,d){var e=Backbone.View.extend({el:"#center",model:null,options:{},events:{"click .toolbtn_modify_dataset":"enableModification","click .toolbtn_cancel_modifications":"render","click .toolbtn-download-dataset":"downloadDataset","click .toolbtn-import-dataset":"importIntoHistory","click .toolbtn-share-dataset":"shareDataset","click .btn-copy-link-to-clipboard":"copyToClipboard","click .btn-mak [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-dataset-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-folder-view.js b/static/scripts/mvc/library/library-folder-view.js
new file mode 100644
index 0000000..e6dc509
--- /dev/null
+++ b/static/scripts/mvc/library/library-folder-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","mvc/library/library-model","mvc/ui/ui-select"],function(a,b,c){var d=Backbone.View.extend({el:"#center",model:null,options:{},events:{"click .toolbtn_save_permissions":"savePermissions"},initialize:function(a){this.options=_.extend(this.options,a),this.options.id&&this.fetchFolder()},fetchFolder:function(c){this.options=_.extend(this.options,c),this.model=new b.FolderAsModel({id:this.options.id});var d=this;this.model.fetch({success:function(){d.options.show_permis [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-folder-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-folderlist-view.js b/static/scripts/mvc/library/library-folderlist-view.js
new file mode 100644
index 0000000..01c732c
--- /dev/null
+++ b/static/scripts/mvc/library/library-folderlist-view.js
@@ -0,0 +1,2 @@
+define(["layout/masthead","utils/utils","libs/toastr","mvc/library/library-model","mvc/library/library-folderrow-view","mvc/library/library-dataset-view"],function(a,b,c,d,e){var f=Backbone.View.extend({el:"#folder_items_element",progress:0,progressStep:1,folderContainer:null,sort:"asc",events:{"click #select-all-checkboxes":"selectAll","click .dataset_row":"selectClickedRow","click .folder_row":"selectClickedRow","click .sort-folder-link":"sortColumnClicked"},collection:null,defaults:{i [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-folderlist-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-folderrow-view.js b/static/scripts/mvc/library/library-folderrow-view.js
new file mode 100644
index 0000000..fb69759
--- /dev/null
+++ b/static/scripts/mvc/library/library-folderrow-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","mvc/library/library-model","mvc/library/library-dataset-view"],function(a,b,c){var d=Backbone.View.extend({events:{"click .undelete_dataset_btn":"undeleteDataset","click .undelete_folder_btn":"undeleteFolder","click .edit_folder_btn":"startModifications","click .cancel_folder_btn":"cancelModifications","click .save_folder_btn":"saveModifications"},defaults:{type:null,visibility_config:{edit_folder_btn:!0,save_folder_btn:!1,cancel_folder_btn:!1,permission_folder_btn [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-folderrow-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-foldertoolbar-view.js b/static/scripts/mvc/library/library-foldertoolbar-view.js
new file mode 100644
index 0000000..7c3d283
--- /dev/null
+++ b/static/scripts/mvc/library/library-foldertoolbar-view.js
@@ -0,0 +1,3 @@
+define(["layout/masthead","utils/utils","libs/toastr","mvc/library/library-model","mvc/ui/ui-select"],function(a,b,c,d,e){var f=Backbone.View.extend({el:"#center",events:{"click #toolbtn_create_folder":"createFolderFromModal","click #toolbtn_bulk_import":"modalBulkImport","click #include_deleted_datasets_chk":"checkIncludeDeleted","click #toolbtn_bulk_delete":"deleteSelectedItems","click .toolbtn-show-locinfo":"showLocInfo","click .page_size_prompt":"showPageSizePrompt"},defaults:{can_ad [...]
+return _.template(["<strong>Choose the datasets to import:</strong>","<ul>","<% _.each(history_contents, function(history_item) { %>",'<li data-id="<%= _.escape(history_item.get("id")) %>">','<input style="margin: 0;" type="checkbox"> <%= _.escape(history_item.get("hid")) %>: <%= _.escape(history_item.get("name")) %>',"</li>","<% }); %>","</ul>"].join(""))},templatePaginator:function(){return _.template(['<ul class="pagination pagination-sm">',"<% if ( ( show_page - 1 ) > 0 ) { %>","<% i [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-foldertoolbar-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-library-view.js b/static/scripts/mvc/library/library-library-view.js
new file mode 100644
index 0000000..d1cd742
--- /dev/null
+++ b/static/scripts/mvc/library/library-library-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","mvc/library/library-model","mvc/ui/ui-select"],function(a,b,c){var d=Backbone.View.extend({el:"#center",model:null,options:{},events:{"click .toolbtn_save_permissions":"savePermissions"},initialize:function(a){this.options=_.extend(this.options,a),this.options.id&&this.fetchLibrary()},fetchLibrary:function(c){this.options=_.extend(this.options,c),this.model=new b.Library({id:this.options.id});var d=this;this.model.fetch({success:function(){d.options.show_permission [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-library-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-librarylist-view.js b/static/scripts/mvc/library/library-librarylist-view.js
new file mode 100644
index 0000000..aebdd93
--- /dev/null
+++ b/static/scripts/mvc/library/library-librarylist-view.js
@@ -0,0 +1,2 @@
+define(["layout/masthead","mvc/base-mvc","utils/utils","libs/toastr","mvc/library/library-model","mvc/library/library-libraryrow-view","libs/underscore"],function(a,b,c,d,e,f,g){var h=Backbone.View.extend({el:"#libraries_element",events:{"click .sort-libraries-link":"sort_clicked"},defaults:{page_count:null,show_page:null,all_fetched:!1},initialize:function(a){this.options=g.defaults(this.options||{},a,this.defaults);var b=this;this.modal=null,this.collection=new e.Libraries,this.collect [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-librarylist-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-libraryrow-view.js b/static/scripts/mvc/library/library-libraryrow-view.js
new file mode 100644
index 0000000..70019c2
--- /dev/null
+++ b/static/scripts/mvc/library/library-libraryrow-view.js
@@ -0,0 +1,2 @@
+define(["layout/masthead","utils/utils","libs/toastr"],function(a,b,c){var d=Backbone.View.extend({events:{"click .edit_library_btn":"edit_button_clicked","click .cancel_library_btn":"cancel_library_modification","click .save_library_btn":"save_library_modification","click .delete_library_btn":"delete_library","click .undelete_library_btn":"undelete_library"},edit_mode:!1,element_visibility_config:{upload_library_btn:!1,edit_library_btn:!1,permission_library_btn:!1,save_library_btn:!1,ca [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-libraryrow-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-librarytoolbar-view.js b/static/scripts/mvc/library/library-librarytoolbar-view.js
new file mode 100644
index 0000000..64920d5
--- /dev/null
+++ b/static/scripts/mvc/library/library-librarytoolbar-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","mvc/library/library-model"],function(a,b){var c=Backbone.View.extend({el:"#center",defaults:{search_term:""},events:{"click #create_new_library_btn":"createLibraryFromModal","click #include_deleted_chk":"includeDeletedChecked","click #lib_page_size_prompt":"showPageSizePrompt","keyup .library-search-input":"searchLibraries"},initialize:function(a){this.options=_.defaults(this.options||{},a,this.defaults),this.render()},render:function(){var a=this.templateToolBar() [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-librarytoolbar-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/library/library-model.js b/static/scripts/mvc/library/library-model.js
new file mode 100644
index 0000000..cc187f3
--- /dev/null
+++ b/static/scripts/mvc/library/library-model.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.Model.extend({urlRoot:Galaxy.root+"api/libraries/",isVisible:function(a){var b=!0;return!a&&this.get("deleted")&&(b=!1),b}}),b=Backbone.Collection.extend({urlRoot:Galaxy.root+"api/libraries",model:a,sort_key:"name",sort_order:null,initialize:function(a){a=a||{}},search:function(a){if(""==a)return this;var b=a.toLowerCase();return this.filter(function(a){return lowercase_name=a.get("name").toLowerCase(),-1!==lowercase_name.indexOf(b)})},getVisible:funct [...]
+//# sourceMappingURL=../../../maps/mvc/library/library-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/list/list-item.js b/static/scripts/mvc/list/list-item.js
new file mode 100644
index 0000000..19d3ff8
--- /dev/null
+++ b/static/scripts/mvc/list/list-item.js
@@ -0,0 +1,2 @@
+define(["mvc/base-mvc","utils/localization"],function(a){"use strict";var b="list",c=Backbone.View.extend(a.LoggableMixin).extend({_logNamespace:b,initialize:function(a){this.expanded=a.expanded||!1,this.log("	 expanded:",this.expanded),this.fxSpeed=void 0!==a.fxSpeed?a.fxSpeed:this.fxSpeed},fxSpeed:"fast",render:function(a){var b=this._buildNewRender();return this._setUpBehaviors(b),this._queueNewRender(b,a),this},_buildNewRender:function(){var a=$(this.templates.el(this.model.toJSON(), [...]
+//# sourceMappingURL=../../../maps/mvc/list/list-item.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/list/list-view.js b/static/scripts/mvc/list/list-view.js
new file mode 100644
index 0000000..be5edc1
--- /dev/null
+++ b/static/scripts/mvc/list/list-view.js
@@ -0,0 +1,2 @@
+define(["mvc/list/list-item","ui/loading-indicator","mvc/base-mvc","utils/localization","ui/search-input"],function(a,b,c,d){"use strict";var e="list",f=Backbone.View.extend(c.LoggableMixin).extend({_logNamespace:e,viewClass:a.ListItemView,collectionClass:Backbone.Collection,tagName:"div",className:"list-panel",fxSpeed:"fast",emptyMsg:d("This list is empty"),noneFoundMsg:d("No matching items found"),searchPlaceholder:d("search"),initialize:function(a){a=a||{},a.logger&&(this.logger=a.log [...]
+//# sourceMappingURL=../../../maps/mvc/list/list-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tag.js b/static/scripts/mvc/tag.js
new file mode 100644
index 0000000..7f6afd6
--- /dev/null
+++ b/static/scripts/mvc/tag.js
@@ -0,0 +1,2 @@
+define(["mvc/base-mvc","utils/localization"],function(a,b){var c=Backbone.View.extend(a.LoggableMixin).extend(a.HiddenUntilActivatedViewMixin).extend({tagName:"div",className:"tags-display",initialize:function(a){this.listenTo(this.model,"change:tags",function(){this.render()}),this.hiddenUntilActivated(a.$activator,a)},render:function(){var a=this;return this.$el.html(this._template()),this.$input().select2({placeholder:"Add tags",width:"100%",tags:function(){return a._getTagsUsed()}}), [...]
+//# sourceMappingURL=../../maps/mvc/tag.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tool-form-base.js b/static/scripts/mvc/tool/tool-form-base.js
new file mode 100644
index 0000000..85d6d2c
--- /dev/null
+++ b/static/scripts/mvc/tool/tool-form-base.js
@@ -0,0 +1,2 @@
+define(["utils/utils","utils/deferred","mvc/ui/ui-misc","mvc/form/form-view","mvc/citation/citation-model","mvc/citation/citation-view"],function(a,b,c,d,e,f){return d.extend({initialize:function(a){var c=this;d.prototype.initialize.call(this,a),this.deferred=new b,a.inputs?this._buildForm(a):this.deferred.execute(function(b){c._buildModel(b,a,!0)}),a.listen_to_history&&parent.Galaxy&&parent.Galaxy.currHistoryPanel&&this.listenTo(parent.Galaxy.currHistoryPanel.collection,"change",functio [...]
+//# sourceMappingURL=../../../maps/mvc/tool/tool-form-base.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tool-form-composite.js b/static/scripts/mvc/tool/tool-form-composite.js
new file mode 100644
index 0000000..b32c636
--- /dev/null
+++ b/static/scripts/mvc/tool/tool-form-composite.js
@@ -0,0 +1,2 @@
+define(["utils/utils","utils/deferred","mvc/ui/ui-misc","mvc/form/form-view","mvc/form/form-data","mvc/tool/tool-form-base","mvc/ui/ui-modal","mvc/webhooks"],function(a,b,c,d,e,f,g,h){var i=Backbone.View.extend({initialize:function(a){var c=this;this.modal=parent.Galaxy.modal||new g.View,this.model=a&&a.model||new Backbone.Model(a),this.deferred=new b,this.setElement($("<div/>").addClass("ui-form-composite").append(this.$message=$("<div/>")).append(this.$header=$("<div/>")).append(this.$ [...]
+//# sourceMappingURL=../../../maps/mvc/tool/tool-form-composite.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tool-form-workflow.js b/static/scripts/mvc/tool/tool-form-workflow.js
new file mode 100644
index 0000000..39d103d
--- /dev/null
+++ b/static/scripts/mvc/tool/tool-form-workflow.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/tool/tool-form-base"],function(a,b){var c=Backbone.View.extend({initialize:function(c){var d=this;this.workflow=c.workflow,this.node=c.node,this.setElement("<div/>"),this.node?(this.post_job_actions=this.node.post_job_actions||{},a.deepeach(c.inputs,function(b){b.type&&(-1!=["data","data_collection"].indexOf(b.type)?(b.type="hidden",b.info="Data input '"+b.name+"' ("+a.textify(b.extensions)+")",b.value={__class__:"RuntimeValue"}):(b.collapsible_value={__class__ [...]
+//# sourceMappingURL=../../../maps/mvc/tool/tool-form-workflow.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tool-form.js b/static/scripts/mvc/tool/tool-form.js
new file mode 100644
index 0000000..1fe5490
--- /dev/null
+++ b/static/scripts/mvc/tool/tool-form.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc","mvc/ui/ui-modal","mvc/tool/tool-form-base","mvc/webhooks"],function(a,b,c,d,e){var f=Backbone.View.extend({initialize:function(e){var f=this;this.modal=parent.Galaxy.modal||new c.View,this.form=new d(a.merge({listen_to_history:!0,always_refresh:!1,customize:function(a){a.buttons={execute:execute_btn=new b.Button({icon:"fa-check",tooltip:"Execute: "+a.name+" ("+a.version+")",title:"Execute",cls:"ui-button btn btn-primary",floating:"clear",onclick:fu [...]
+//# sourceMappingURL=../../../maps/mvc/tool/tool-form.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tool-template.js b/static/scripts/mvc/tool/tool-template.js
new file mode 100644
index 0000000..ab70fc3
--- /dev/null
+++ b/static/scripts/mvc/tool/tool-template.js
@@ -0,0 +1,2 @@
+define([],function(){return{error:function(a){return $("<div>").append($("<p/>").text("The server could not complete the request. Please contact the Galaxy Team if this error persists.")).append($("<textarea/>").addClass("ui-textarea").css({color:"black",height:"300px !important"}).text(JSON.stringify(a,void 0,4)))}}});
+//# sourceMappingURL=../../../maps/mvc/tool/tool-template.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tool-webhooks.js b/static/scripts/mvc/tool/tool-webhooks.js
new file mode 100644
index 0000000..1008dd1
--- /dev/null
+++ b/static/scripts/mvc/tool/tool-webhooks.js
@@ -0,0 +1,2 @@
+define([],function(){var a="undefined"!=typeof Galaxy?Galaxy.root:"/",b=Backbone.Model.extend({urlRoot:a+"api/webhooks/toolview",defaults:{name:"",type:"",styles:"",script:""}}),c=Backbone.View.extend({el:"#webhook-toolview",initialize:function(){var a=this;this.model=new b,this.model.fetch({success:function(){a.render()}})},render:function(){var a=this.model.toJSON();return this.$el.html('<div id="'+a.name+'"></div>'),a.styles&&$("<style/>",{type:"text/css"}).text(a.styles).appendTo("he [...]
+//# sourceMappingURL=../../../maps/mvc/tool/tool-webhooks.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tool/tools.js b/static/scripts/mvc/tool/tools.js
new file mode 100644
index 0000000..4c845d7
--- /dev/null
+++ b/static/scripts/mvc/tool/tools.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","viz/trackster/util","mvc/dataset/data","mvc/tool/tool-form"],function(a,b,c,d){"use strict";var e={hidden:!1,show:function(){this.set("hidden",!1)},hide:function(){this.set("hidden",!0)},toggle:function(){this.set("hidden",!this.get("hidden"))},is_visible:function(){return!this.attributes.hidden}},f=Backbone.Model.extend({defaults:{name:null,label:null,type:null,value:null,html:null,num_samples:5},initialize:function(){this.attributes.html=unescape(this.attribu [...]
+//# sourceMappingURL=../../../maps/mvc/tool/tools.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/tours.js b/static/scripts/mvc/tours.js
new file mode 100644
index 0000000..3bc447f
--- /dev/null
+++ b/static/scripts/mvc/tours.js
@@ -0,0 +1,2 @@
+define(["libs/bootstrap-tour"],function(){var a="undefined"==typeof Galaxy?"/":Galaxy.root,b={storage:window.sessionStorage,onEnd:function(){sessionStorage.removeItem("activeGalaxyTour")},delay:150,orphan:!0},c=function(a){return _.each(a.steps,function(a){a.preclick&&(a.onShow=function(){_.each(a.preclick,function(a){$(a).click()})}),a.postclick&&(a.onHide=function(){_.each(a.postclick,function(a){$(a).click()})}),a.textinsert&&(a.onShown=function(){$(a.element).val(a.textinsert).trigge [...]
+//# sourceMappingURL=../../maps/mvc/tours.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/error-modal.js b/static/scripts/mvc/ui/error-modal.js
new file mode 100644
index 0000000..f85bd44
--- /dev/null
+++ b/static/scripts/mvc/ui/error-modal.js
@@ -0,0 +1,2 @@
+define(["utils/localization"],function(a){"use strict";function b(b,c,d){return Galaxy.modal.show({title:c,body:b,closing_events:!0,buttons:{Ok:function(){Galaxy.modal.hide()}}}),Galaxy.modal.$el.addClass("error-modal"),d&&(Galaxy.modal.$(".error-details").add(Galaxy.modal.$('button:contains("Details")')).remove(),$("<div/>").addClass("error-details").hide().appendTo(Galaxy.modal.$(".modal-content")).append([$("<p/>").text(j),$("<pre/>").text(JSON.stringify(d,null,"  "))]),$('<button id= [...]
+//# sourceMappingURL=../../../maps/mvc/ui/error-modal.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/icon-button.js b/static/scripts/mvc/ui/icon-button.js
new file mode 100644
index 0000000..c2f1efa
--- /dev/null
+++ b/static/scripts/mvc/ui/icon-button.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.Model.extend({defaults:{title:"",icon_class:"",on_click:null,menu_options:null,is_menu_button:!0,id:null,href:null,target:null,enabled:!0,visible:!0,tooltip_config:{}}}),b=Backbone.View.extend({initialize:function(){this.model.attributes.tooltip_config={placement:"bottom"},this.model.bind("change",this.render,this)},render:function(){this.$el.tooltip("hide");var a=this.template(this.model.toJSON());return a.tooltip(this.model.get("tooltip_config")),thi [...]
+//# sourceMappingURL=../../../maps/mvc/ui/icon-button.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/popup-menu.js b/static/scripts/mvc/ui/popup-menu.js
new file mode 100644
index 0000000..dcd0272
--- /dev/null
+++ b/static/scripts/mvc/ui/popup-menu.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.View.extend({initialize:function(a,b){this.$button=a,this.$button.length||(this.$button=$("<div/>")),this.options=b||[],this.$button.data("popupmenu",this);var c=this;this.$button.click(function(a){return $(".popmenu-wrapper").remove(),c._renderAndShow(a),!1})},_renderAndShow:function(a){this.render(),this.$el.appendTo("body").css(this._getShownPosition(a)).show(),this._setUpCloseBehavior()},render:function(){if(this.$el.addClass("popmenu-wrapper").hid [...]
+//# sourceMappingURL=../../../maps/mvc/ui/popup-menu.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-buttons.js b/static/scripts/mvc/ui/ui-buttons.js
new file mode 100644
index 0000000..0f4616b
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-buttons.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){var b=Backbone.View.extend({initialize:function(b){this.model=b&&b.model||new Backbone.Model({id:a.uid(),title:"",floating:"right",icon:"",cls:"btn btn-default",wait:!1,wait_text:"Sending...",wait_cls:"btn btn-info",disabled:!1,percentage:-1}).set(b),this.setElement($("<button/>").attr("type","button").append(this.$icon=$("<i/>")).append(this.$title=$("<span/>")).append(this.$progress=$("<div/>").append(this.$progress_bar=$("<div/>")))),this.listenTo(th [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-buttons.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-color-picker.js b/static/scripts/mvc/ui/ui-color-picker.js
new file mode 100644
index 0000000..1547dba
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-color-picker.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){return Backbone.View.extend({colors:{standard:["c00000","ff0000","ffc000","ffff00","92d050","00b050","00b0f0","0070c0","002060","7030a0"],base:["ffffff","000000","eeece1","1f497d","4f81bd","c0504d","9bbb59","8064a2","4bacc6","f79646"],theme:[["f2f2f2","7f7f7f","ddd9c3","c6d9f0","dbe5f1","f2dcdb","ebf1dd","e5e0ec","dbeef3","fdeada"],["d8d8d8","595959","c4bd97","8db3e2","b8cce4","e5b9b7","d7e3bc","ccc1d9","b7dde8","fbd5b5"],["bfbfbf","3f3f3f","938953","54 [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-color-picker.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-drilldown.js b/static/scripts/mvc/ui/ui-drilldown.js
new file mode 100644
index 0000000..e72702e
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-drilldown.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-options"],function(a,b){var c=b.BaseIcons.extend({initialize:function(a){a.type=a.display||"checkbox",a.multiple="checkbox"==a.type,b.BaseIcons.prototype.initialize.call(this,a)},_setValue:function(a){if(b.BaseIcons.prototype._setValue.call(this,a),void 0!==a&&null!==a&&this.header_index){var c=this,d=$.isArray(a)?a:[a];_.each(d,function(a){var b=c.header_index[a];_.each(b,function(a){c._setState(a,!0)})})}},_setState:function(a,b){var c=this.$(".button-" [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-drilldown.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-frames.js b/static/scripts/mvc/ui/ui-frames.js
new file mode 100644
index 0000000..333a9db
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-frames.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.View.extend({initialize:function(a){this.model=a&&a.model||new Backbone.Model(a),this.setElement($("<div/>").addClass("corner frame")),this.$el.append($("<div/>").addClass("f-header corner").append($("<div/>").addClass("f-title")).append($("<div/>").addClass("f-icon f-close fa fa-close").tooltip({title:"Close",placement:"bottom"}))).append($("<div/>").addClass("f-content")).append($("<div/>").addClass("f-resize f-icon corner fa fa-expand").tooltip({tit [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-frames.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-list.js b/static/scripts/mvc/ui/ui-list.js
new file mode 100644
index 0000000..b8ae51b
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-list.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-portlet","mvc/ui/ui-misc"],function(a,b,c){var d=Backbone.View.extend({initialize:function(a){var d=this;this.options=a,this.name=a.name||"element",this.multiple=a.multiple||!1,this.message=new c.Message,this.portlet=new b.View({cls:"ui-portlet-section"}),this.select=new c.Select.View({optional:a.optional}),this.button=new c.ButtonIcon({icon:"fa fa-sign-in",floating:"left",tooltip:"Insert new "+this.name,onclick:function(){d.add({id:d.select.value(),name: [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-list.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-misc.js b/static/scripts/mvc/ui/ui-misc.js
new file mode 100644
index 0000000..a355c76
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-misc.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-select-default","mvc/ui/ui-slider","mvc/ui/ui-options","mvc/ui/ui-drilldown","mvc/ui/ui-buttons","mvc/ui/ui-modal"],function(a,b,c,d,e,f,g){var h=Backbone.View.extend({tagName:"label",initialize:function(a){this.model=a&&a.model||new Backbone.Model(a),this.tagName=a.tagName||this.tagName,this.setElement($("<"+this.tagName+"/>")),this.listenTo(this.model,"change",this.render,this),this.render()},title:function(a){this.model.set("title",a)},value:function() [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-misc.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-modal.js b/static/scripts/mvc/ui/ui-modal.js
new file mode 100644
index 0000000..78001ed
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-modal.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.View.extend({className:"ui-modal",optionsDefault:{container:"body",title:"ui-modal",cls:"ui-modal",body:"",backdrop:!0,height:null,width:null,closing_events:!1,closing_callback:null,title_separator:!0},buttonList:{},initialize:function(a){this.options=_.defaults(a||{},this.optionsDefault),$(this.options.container).prepend(this.el),a&&this.render()},show:function(a){if(a&&(this.options=_.defaults(a,this.optionsDefault),this.render()),!this.visible&&(thi [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-modal.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-options.js b/static/scripts/mvc/ui/ui-options.js
new file mode 100644
index 0000000..e3392d8
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-options.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-buttons"],function(a,b){var c=Backbone.View.extend({initialize:function(b){var c=this;this.model=b&&b.model||new Backbone.Model({visible:!0,data:[],id:a.uid(),error_text:"No options available.",wait_text:"Please wait...",multiple:!1,optional:!1,onchange:function(){}}).set(b),this.listenTo(this.model,"change:value",this._changeValue,this),this.listenTo(this.model,"change:wait",this._changeWait,this),this.listenTo(this.model,"change:data",this._changeData,t [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-options.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-popover.js b/static/scripts/mvc/ui/ui-popover.js
new file mode 100644
index 0000000..2623216
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-popover.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{with_close:!0,title:null,placement:"top",container:"body",body:null},initialize:function(b){this.setElement(this._template()),this.uid=a.uid(),this.options=_.defaults(b||{},this.optionsDefault),this.options.container.parent().append(this.el),this.$title=this.$(".popover-title-label"),this.$close=this.$(".popover-close"),this.$body=this.$(".popover-content"),this.options.body&&this.append(this.options.body);var [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-popover.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-portlet.js b/static/scripts/mvc/ui/ui-portlet.js
new file mode 100644
index 0000000..1fb2e76
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-portlet.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc"],function(a,b){var c=Backbone.View.extend({visible:!1,initialize:function(c){var d=this;this.model=c&&c.model||new Backbone.Model({id:a.uid(),cls:"ui-portlet",title:"",icon:"",buttons:null,body:null,scrollable:!0,nopadding:!1,operations:null,collapsible:!1,collapsible_button:!1,collapsed:!1}).set(c),this.setElement(this._template()),this.$body=this.$(".portlet-body"),this.$title_text=this.$(".portlet-title-text"),this.$title_icon=this.$(".portlet-ti [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-portlet.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-select-content.js b/static/scripts/mvc/ui/ui-select-content.js
new file mode 100644
index 0000000..5393abf
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-select-content.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc","mvc/ui/ui-select-default"],function(a,b,c){var d={DISABLED:"disabled",ENABLED:"enabled",LINKED:"linked"},e={data:[{src:"hda",icon:"fa-file-o",tooltip:"Single dataset",multiple:!1,batch:d.DISABLED},{src:"hda",icon:"fa-files-o",tooltip:"Multiple datasets",multiple:!0,batch:d.LINKED},{src:"hdca",icon:"fa-folder-o",tooltip:"Dataset collection",multiple:!1,batch:d.LINKED}],data_multiple:[{src:"hda",icon:"fa-files-o",tooltip:"Multiple datasets",multiple: [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-select-content.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-select-default.js b/static/scripts/mvc/ui/ui-select-default.js
new file mode 100644
index 0000000..a90adcc
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-select-default.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-buttons"],function(a,b){var c=Backbone.View.extend({initialize:function(b){var c=this;this.data=[],this.data2=[],this.model=b&&b.model||new Backbone.Model({id:a.uid(),cls:"ui-select",error_text:"No options available",empty_text:"Nothing selected",visible:!0,wait:!1,multiple:!1,searchable:!0,optional:!1,disabled:!1,onchange:function(){},value:null,selectall:!0,pagesize:20}).set(b),this.on("change",function(){c.model.get("onchange")&&c.model.get("onchange") [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-select-default.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-select-ftp.js b/static/scripts/mvc/ui/ui-select-ftp.js
new file mode 100644
index 0000000..4c4126b
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-select-ftp.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-list"],function(a,b){var c=Backbone.View.extend({initialize:function(c){var d=this;this.ftpfile_list=new b.View({name:"file",optional:c.optional,multiple:c.multiple,onchange:function(){c.onchange&&c.onchange(d.value())}}),this.setElement(this.ftpfile_list.$el),a.get({url:Galaxy.root+"api/remote_files",success:function(a){var b=[];for(var c in a)b.push({value:a[c].path,label:a[c].path});d.ftpfile_list.update(b)}})},value:function(a){return this.ftpfile_lis [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-select-ftp.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-select-library.js b/static/scripts/mvc/ui/ui-select-library.js
new file mode 100644
index 0000000..648fd2c
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-select-library.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc","mvc/ui/ui-table","mvc/ui/ui-list"],function(a,b,c,d){var e=Backbone.Collection.extend({url:Galaxy.root+"api/libraries?deleted=false"}),f=Backbone.Collection.extend({initialize:function(){var a=this;this.config=new Backbone.Model({library_id:null}),this.config.on("change",function(){a.fetch({reset:!0})})},url:function(){return Galaxy.root+"api/libraries/"+this.config.get("library_id")+"/contents"}}),g=Backbone.View.extend({initialize:function(a){var [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-select-library.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-select.js b/static/scripts/mvc/ui/ui-select.js
new file mode 100644
index 0000000..912d504
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-select.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{css:"",placeholder:"No data available",data:[],value:null,multiple:!1,minimumInputLength:0,initialData:""},initialize:function(b){if(this.options=a.merge(b,this.optionsDefault),this.setElement(this._template(this.options)),!this.options.container)return void console.log("ui-select::initialize() : container not specified.");if(this.options.container.append(this.$el),this.select_data=this.options.data,this._refr [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-select.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-slider.js b/static/scripts/mvc/ui/ui-slider.js
new file mode 100644
index 0000000..fbd3dd6
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-slider.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){var b=Backbone.View.extend({initialize:function(b){var c=this;this.options=a.merge(b,{id:a.uid(),min:null,max:null,step:null,precise:!1,split:1e4}),this.setElement(this._template(this.options)),this.useslider=null!==this.options.max&&null!==this.options.min&&this.options.max>this.options.min,null===this.options.step&&(this.options.step=1,this.options.precise&&this.useslider&&(this.options.step=(this.options.max-this.options.min)/this.options.split)),thi [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-slider.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-table.js b/static/scripts/mvc/ui/ui-table.js
new file mode 100644
index 0000000..4541ed7
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-table.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){var b=Backbone.View.extend({row:null,row_count:0,optionsDefault:{content:"No content available.",onchange:null,ondblclick:null,onconfirm:null,cls:"ui-table",cls_tr:""},events:{click:"_onclick",dblclick:"_ondblclick"},initialize:function(b){this.options=a.merge(b,this.optionsDefault);var c=$(this._template(this.options));this.$thead=c.find("thead"),this.$tbody=c.find("tbody"),this.$tmessage=c.find("tmessage"),this.setElement(c),this.row=this._row()},addH [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-table.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-tabs.js b/static/scripts/mvc/ui/ui-tabs.js
new file mode 100644
index 0000000..9a0e91b
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-tabs.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(){var a=Backbone.View.extend({initialize:function(a){this.collection=new Backbone.Collection,this.model=a&&a.model||new Backbone.Model({onchange:null,visible:!0}).set(a),this.setElement($(this._template())),this.$nav=this.$(".tab-navigation"),this.$content=this.$(".tab-content"),this.$el.on("click",function(){$(".tooltip").hide()}),this.render(),this.listenTo(this.model,"change",this.render,this),this.listenTo(this.collection,"add",this._add,this),this.lis [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-tabs.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/ui/ui-thumbnails.js b/static/scripts/mvc/ui/ui-thumbnails.js
new file mode 100644
index 0000000..94d7eaa
--- /dev/null
+++ b/static/scripts/mvc/ui/ui-thumbnails.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-misc","mvc/ui/ui-tabs"],function(a,b,c){var d=Backbone.View.extend({events:{"click .ui-thumbnails-item":"_onclick","dblclick .ui-thumbnails-item":"_ondblclick"},initialize:function(a){this.model=a.model||new Backbone.Model(a),this.collection=new Backbone.Collection(this.model.get("collection")),this.tabs=new c.View({}),this.setElement(this.tabs.$el.addClass("ui-thumbnails")),this.render(),this.listenTo(this.model,"change",this.render,this),this.listenTo(t [...]
+//# sourceMappingURL=../../../maps/mvc/ui/ui-thumbnails.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/composite/composite-row.js b/static/scripts/mvc/upload/composite/composite-row.js
new file mode 100644
index 0000000..9f62700
--- /dev/null
+++ b/static/scripts/mvc/upload/composite/composite-row.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/upload/upload-settings","mvc/upload/upload-ftp","mvc/ui/ui-popover","mvc/ui/ui-misc","mvc/ui/ui-select","utils/uploadbox"],function(a,b,c,d,e){return Backbone.View.extend({status_classes:{init:"upload-mode fa fa-exclamation text-primary",ready:"upload-mode fa fa-check text-success",running:"upload-mode fa fa-spinner fa-spin",success:"upload-mode fa fa-check",error:"upload-mode fa fa-exclamation-triangle"},initialize:function(a,b){var c=this;this.app=a,this.mode [...]
+//# sourceMappingURL=../../../../maps/mvc/upload/composite/composite-row.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/composite/composite-view.js b/static/scripts/mvc/upload/composite/composite-view.js
new file mode 100644
index 0000000..2e6188c
--- /dev/null
+++ b/static/scripts/mvc/upload/composite/composite-view.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/upload/upload-model","mvc/upload/composite/composite-row","mvc/ui/ui-popover","mvc/ui/ui-select","mvc/ui/ui-misc"],function(a,b,c,d,e,f){return Backbone.View.extend({collection:new b.Collection,initialize:function(a){var b=this;this.app=a,this.options=a.options,this.list_extensions=a.list_extensions,this.list_genomes=a.list_genomes,this.ftp_upload_site=a.currentFtp(),this.setElement(this._template()),this.btnStart=new f.Button({title:"Start",onclick:function(){ [...]
+//# sourceMappingURL=../../../../maps/mvc/upload/composite/composite-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/default/default-row.js b/static/scripts/mvc/upload/default/default-row.js
new file mode 100644
index 0000000..3211049
--- /dev/null
+++ b/static/scripts/mvc/upload/default/default-row.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/upload/upload-model","mvc/upload/upload-settings","mvc/ui/ui-popover","mvc/ui/ui-select"],function(a,b,c,d,e){return Backbone.View.extend({status_classes:{init:"upload-icon-button fa fa-trash-o",queued:"upload-icon fa fa-spinner fa-spin",running:"upload-icon fa fa-spinner fa-spin",success:"upload-icon-button fa fa-check",error:"upload-icon-button fa fa-exclamation-triangle"},initialize:function(a,b){var c=this;this.app=a,this.model=b.model,this.setElement(this. [...]
+//# sourceMappingURL=../../../../maps/mvc/upload/default/default-row.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/default/default-view.js b/static/scripts/mvc/upload/default/default-view.js
new file mode 100644
index 0000000..75de233
--- /dev/null
+++ b/static/scripts/mvc/upload/default/default-view.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/upload/upload-model","mvc/upload/default/default-row","mvc/upload/upload-ftp","mvc/ui/ui-popover","mvc/ui/ui-select","mvc/ui/ui-misc","utils/uploadbox"],function(a,b,c,d,e,f,g){return Backbone.View.extend({upload_size:0,collection:new b.Collection,counter:{announce:0,success:0,error:0,running:0,reset:function(){this.announce=this.success=this.error=this.running=0}},initialize:function(a){var b=this;this.app=a,this.options=a.options,this.list_extensions=a.list_e [...]
+//# sourceMappingURL=../../../../maps/mvc/upload/default/default-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/upload-button.js b/static/scripts/mvc/upload/upload-button.js
new file mode 100644
index 0000000..f3110ea
--- /dev/null
+++ b/static/scripts/mvc/upload/upload-button.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.View.extend({initialize:function(a){var b=this;this.model=a&&a.model||new Backbone.Model({icon:"fa-upload",tooltip:"Download from URL or upload files from disk",label:"Load Data",percentage:0,status:"",onunload:function(){},onclick:function(){}}).set(a),this.setElement(this._template()),this.$progress=this.$(".progress-bar"),this.listenTo(this.model,"change",this.render,this),this.render(),$(window).on("beforeunload",function(){return b.model.get("onun [...]
+//# sourceMappingURL=../../../maps/mvc/upload/upload-button.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/upload-ftp.js b/static/scripts/mvc/upload/upload-ftp.js
new file mode 100644
index 0000000..98223ec
--- /dev/null
+++ b/static/scripts/mvc/upload/upload-ftp.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){return Backbone.View.extend({initialize:function(b){var c=this;this.options=a.merge(b,{class_add:"upload-icon-button fa fa-square-o",class_remove:"upload-icon-button fa fa-check-square-o",class_partial:"upload-icon-button fa fa-minus-square-o",collection:null,onchange:function(){},onadd:function(){},onremove:function(){}}),this.collection=this.options.collection,this.setElement(this._template()),this.rows=[],a.get({url:Galaxy.root+"api/remote_files",suc [...]
+//# sourceMappingURL=../../../maps/mvc/upload/upload-ftp.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/upload-model.js b/static/scripts/mvc/upload/upload-model.js
new file mode 100644
index 0000000..4a605da
--- /dev/null
+++ b/static/scripts/mvc/upload/upload-model.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.Model.extend({defaults:{extension:"auto",genome:"?",url_paste:"",status:"init",info:null,file_name:"",file_mode:"",file_size:0,file_type:null,file_path:"",file_data:null,percentage:0,space_to_tab:!1,to_posix_lines:!0,enabled:!0},reset:function(a){this.clear().set(this.defaults).set(a)}}),b=Backbone.Collection.extend({model:a});return{Model:a,Collection:b}});
+//# sourceMappingURL=../../../maps/mvc/upload/upload-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/upload-settings.js b/static/scripts/mvc/upload/upload-settings.js
new file mode 100644
index 0000000..621b61d
--- /dev/null
+++ b/static/scripts/mvc/upload/upload-settings.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(){return Backbone.View.extend({options:{class_check:"fa-check-square-o",class_uncheck:"fa-square-o",parameters:[{id:"space_to_tab",title:"Convert spaces to tabs"},{id:"to_posix_lines",title:"Use POSIX standard"}]},initialize:function(a){this.model=a.model,this.setElement($("<div/>").addClass("upload-settings")),this.$el.append($("<div/>").addClass("upload-settings-cover")),this.$el.append($("<table/>").addClass("upload-settings-table ui-table-striped").app [...]
+//# sourceMappingURL=../../../maps/mvc/upload/upload-settings.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/upload/upload-view.js b/static/scripts/mvc/upload/upload-view.js
new file mode 100644
index 0000000..6de4f91
--- /dev/null
+++ b/static/scripts/mvc/upload/upload-view.js
@@ -0,0 +1,2 @@
+define(["utils/utils","mvc/ui/ui-modal","mvc/ui/ui-tabs","mvc/upload/upload-button","mvc/upload/default/default-view","mvc/upload/composite/composite-view"],function(a,b,c,d,e,f){return Backbone.View.extend({options:{nginx_upload_path:"",ftp_upload_site:"n/a",default_genome:"?",default_extension:"auto",height:500,width:900,auto:{id:"auto",text:"Auto-detect",description:"This system will try to detect the file type automatically. If your file is not detected properly as one of the known f [...]
+//# sourceMappingURL=../../../maps/mvc/upload/upload-view.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/user/change-password.js b/static/scripts/mvc/user/change-password.js
new file mode 100644
index 0000000..37df16c
--- /dev/null
+++ b/static/scripts/mvc/user/change-password.js
@@ -0,0 +1,2 @@
+define(["mvc/user/manage-user-information"],function(a){var b=Backbone.View.extend({initialize:function(a){this.render(a)},renderMessage:function(a,b){return'<div class="'+(""===b?"done":b)+"message"+("error"===b?" validate":"")+'">'+a+"</div>"},render:function(b){var c="",d=this;$(".user-pref").css("display","none"),$(".change-password-section").remove(),a.ManageUserInformation.prototype.hideErrorDoneMessage(),b.status&&(c=d.renderMessage(b.message,b.status)),c+='<div class="change-pass [...]
+//# sourceMappingURL=../../../maps/mvc/user/change-password.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/user/extra-information.js b/static/scripts/mvc/user/extra-information.js
new file mode 100644
index 0000000..6700819
--- /dev/null
+++ b/static/scripts/mvc/user/extra-information.js
@@ -0,0 +1,2 @@
+define(["mvc/user/manage-user-information"],function(a){var b=Backbone.View.extend({initialize:function(a){this.render(a)},render:function(a){var b="",c=this,d=null,e=null,f={},g={},h="",i=!1;if($(".user-pref").hide(),$(".donemessage").hide(),$(".errormessage").hide(),f=a.config,g=0===Object.keys(a.plugins).length?{}:JSON.parse(a.plugins),b+='<div class="extra-information-section"> <h2>Extra Information</h2>',b+='<ul class="manage-table-actions"> <li><a class="action-button back-user-pre [...]
+//# sourceMappingURL=../../../maps/mvc/user/extra-information.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/user/manage-user-information.js b/static/scripts/mvc/user/manage-user-information.js
new file mode 100644
index 0000000..5f9bc7e
--- /dev/null
+++ b/static/scripts/mvc/user/manage-user-information.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.View.extend({original_email:"",original_username:"",user_id:"",address_id:"",initialize:function(a){this.initializeSection(a)},initializeSection:function(a){this.render(this,a),this.user_id=a.user_id,this.original_email=a.email,this.original_username=a.username},validateString:function(a,b){var c=/^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/,d= [...]
+//# sourceMappingURL=../../../maps/mvc/user/manage-user-information.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/user/user-model.js b/static/scripts/mvc/user/user-model.js
new file mode 100644
index 0000000..247e672
--- /dev/null
+++ b/static/scripts/mvc/user/user-model.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","libs/backbone","mvc/base-mvc","utils/localization"],function(a,b,c,d){"use strict";var e="user",f=b.Model.extend(c.LoggableMixin).extend({_logNamespace:e,urlRoot:function(){return Galaxy.root+"api/users"},defaults:{id:null,username:"("+d("anonymous user")+")",email:"",total_disk_usage:0,nice_total_disk_usage:"",quota_percent:null,is_admin:!1},initialize:function(a){this.log("User.initialize:",a),this.on("loaded",function(a,b){this.log(this+" has loaded:",a,b)}) [...]
+//# sourceMappingURL=../../../maps/mvc/user/user-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/user/user-quotameter.js b/static/scripts/mvc/user/user-quotameter.js
new file mode 100644
index 0000000..aec065a
--- /dev/null
+++ b/static/scripts/mvc/user/user-quotameter.js
@@ -0,0 +1,2 @@
+define(["mvc/base-mvc","utils/localization"],function(a,b){"use strict";var c="user",d=Backbone.View.extend(a.LoggableMixin).extend({_logNamespace:c,options:{warnAtPercent:85,errorAtPercent:100},initialize:function(a){this.log(this+".initialize:",a),_.extend(this.options,a),this.listenTo(this.model,"change:quota_percent change:total_disk_usage",this.render)},update:function(a){return this.log(this+" updating user data...",a),this.model.loadFromApi(this.model.get("id"),a),this},isOverQuot [...]
+//# sourceMappingURL=../../../maps/mvc/user/user-quotameter.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/visualization/visualization-model.js b/static/scripts/mvc/visualization/visualization-model.js
new file mode 100644
index 0000000..f532dd8
--- /dev/null
+++ b/static/scripts/mvc/visualization/visualization-model.js
@@ -0,0 +1,2 @@
+var Visualization=Backbone.Model.extend({defaults:{config:{}},urlRoot:function(){var a="api/visualizations";return Galaxy.root+a},initialize:function(a){_.isObject(a.config)&&_.isObject(this.defaults.config)&&_.defaults(a.config,this.defaults.config),this._setUpListeners()},_setUpListeners:function(){},set:function(a,b){if("config"===a){var c=this.get("config");_.isObject(c)&&(b=_.extend(_.clone(c),b))}return Backbone.Model.prototype.set.call(this,a,b),this},toString:function(){var a=thi [...]
+//# sourceMappingURL=../../../maps/mvc/visualization/visualization-model.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/webhooks.js b/static/scripts/mvc/webhooks.js
new file mode 100644
index 0000000..d8c612e
--- /dev/null
+++ b/static/scripts/mvc/webhooks.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.Model.extend({defaults:{activate:!1}}),b=Backbone.Collection.extend({model:a}),c=Backbone.View.extend({el:"#webhook-view",initialize:function(b){var c=this;this.model=new a,this.model.urlRoot=b.urlRoot,this.model.fetch({success:function(){c.render()}})},render:function(){var a=this.model.toJSON();return this.$el.html('<div id="'+a.name+'"></div>'),a.styles&&$("<style/>",{type:"text/css"}).text(a.styles).appendTo("head"),a.script&&$("<script/>",{type:"t [...]
+//# sourceMappingURL=../../maps/mvc/webhooks.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-canvas.js b/static/scripts/mvc/workflow/workflow-canvas.js
new file mode 100644
index 0000000..f2dcdb4
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-canvas.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,b,c){this.app=a,this.cv=b,this.cc=this.cv.find("#canvas-container"),this.overview=c,this.oc=c.find("#overview-canvas"),this.ov=c.find("#overview-viewport"),this.init_drag()}function b(a){this.panel=a}return $.extend(a.prototype,{init_drag:function(){var a=this,c=function(b,c){b=Math.min(b,a.cv.width()/2),b=Math.max(b,-a.cc.width()+a.cv.width()/2),c=Math.min(c,a.cv.height()/2),c=Math.max(c,-a.cc.height()+a.cv.height()/2),a.cc.css({left:b,top:c}),a.cv.css( [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-canvas.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-connector.js b/static/scripts/mvc/workflow/workflow-connector.js
new file mode 100644
index 0000000..6c894e7
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-connector.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,b){this.canvas=null,this.dragging=!1,this.inner_color="#FFFFFF",this.outer_color="#D8B365",a&&b&&this.connect(a,b)}return $.extend(a.prototype,{connect:function(a,b){this.handle1=a,this.handle1&&this.handle1.connect(this),this.handle2=b,this.handle2&&this.handle2.connect(this)},destroy:function(){this.handle1&&this.handle1.disconnect(this),this.handle2&&this.handle2.disconnect(this),$(this.canvas).remove()},destroyIfInvalid:function(){this.handle1&&this. [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-connector.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-manager.js b/static/scripts/mvc/workflow/workflow-manager.js
new file mode 100644
index 0000000..d24d523
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-manager.js
@@ -0,0 +1,2 @@
+define(["mvc/workflow/workflow-connector","libs/toastr"],function(a,b){function c(a,b){this.app=a,this.canvas_container=b,this.id_counter=0,this.nodes={},this.name=null,this.has_changes=!1,this.active_form_has_changes=!1,this.nodeLabels={},this.workflowOutputLabels={}}return $.extend(c.prototype,{canLabelNodeWith:function(a){return a?!(a in this.nodeLabels):!0},registerNodeLabel:function(a){a&&(this.nodeLabels[a]=!0)},unregisterNodeLabel:function(a){a&&delete this.nodeLabels[a]},updateNo [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-manager.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-node.js b/static/scripts/mvc/workflow/workflow-node.js
new file mode 100644
index 0000000..cb57d15
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-node.js
@@ -0,0 +1,2 @@
+define(["mvc/workflow/workflow-view-node"],function(a){var b=Backbone.Model.extend({initialize:function(a,b){this.app=a,this.element=b.element,this.input_terminals={},this.output_terminals={},this.tool_errors={},this.workflow_outputs=[]},getWorkflowOutput:function(a){return _.findWhere(this.workflow_outputs,{output_name:a})},isWorkflowOutput:function(a){return void 0!=this.getWorkflowOutput(a)},removeWorkflowOutput:function(a){for(;this.isWorkflowOutput(a);)this.workflow_outputs.splice(t [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-node.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-terminals.js b/static/scripts/mvc/workflow/workflow-terminals.js
new file mode 100644
index 0000000..60d4e84
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-terminals.js
@@ -0,0 +1,2 @@
+define(["mvc/workflow/workflow-globals"],function(a){function b(a){this.collectionType=a,this.isCollection=!0,this.rank=a.split(":").length}$.extend(b.prototype,{append:function(a){return a===NULL_COLLECTION_TYPE_DESCRIPTION?this:a===ANY_COLLECTION_TYPE_DESCRIPTION?otherCollectionType:new b(this.collectionType+":"+a.collectionType)},canMatch:function(a){return a===NULL_COLLECTION_TYPE_DESCRIPTION?!1:a===ANY_COLLECTION_TYPE_DESCRIPTION?!0:a.collectionType==this.collectionType},canMapOver: [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-terminals.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-view-data.js b/static/scripts/mvc/workflow/workflow-view-data.js
new file mode 100644
index 0000000..7625031
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-view-data.js
@@ -0,0 +1,2 @@
+define(["mvc/workflow/workflow-globals"],function(a){var b=Backbone.View.extend({className:"form-row dataRow input-data-row",initialize:function(a){this.input=a.input,this.nodeView=a.nodeView,this.terminalElement=a.terminalElement,this.$el.attr("name",this.input.name).html(this.input.label),a.skipResize||(this.$el.css({position:"absolute",left:-1e3,top:-1e3,display:"none"}),$("body").append(this.el),this.nodeView.updateMaxWidth(this.$el.outerWidth()),this.$el.css({position:"",left:"",top [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-view-data.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-view-node.js b/static/scripts/mvc/workflow/workflow-view-node.js
new file mode 100644
index 0000000..060a6ff
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-view-node.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","mvc/workflow/workflow-view-terminals","mvc/workflow/workflow-view-data"],function(a,b,c){return Backbone.View.extend({initialize:function(a){this.node=a.node,this.output_width=Math.max(150,this.$el.width()),this.tool_body=this.$el.find(".toolFormBody"),this.tool_body.find("div").remove(),this.newInputsDiv().appendTo(this.tool_body),this.terminalViews={},this.outputViews={}},render:function(){this.renderToolLabel(),this.renderToolErrors(),this.$el.css("width",Ma [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-view-node.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-view-terminals.js b/static/scripts/mvc/workflow/workflow-view-terminals.js
new file mode 100644
index 0000000..3628713
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-view-terminals.js
@@ -0,0 +1,2 @@
+define(["mvc/workflow/workflow-globals","mvc/workflow/workflow-terminals","mvc/workflow/workflow-connector"],function(a,b,c){var d=Backbone.View.extend({tagName:"div",className:"fa-icon-button fa fa-folder-o",initialize:function(){var a="Run tool in parallel over collection";this.$el.tooltip({delay:500,title:a}),this.model.bind("change",_.bind(this.render,this))},render:function(){this.model.mapOver.isCollection?this.$el.show():this.$el.hide()}}),e=d.extend({events:{click:"onClick",mouse [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-view-terminals.js.map
\ No newline at end of file
diff --git a/static/scripts/mvc/workflow/workflow-view.js b/static/scripts/mvc/workflow/workflow-view.js
new file mode 100644
index 0000000..fc9659c
--- /dev/null
+++ b/static/scripts/mvc/workflow/workflow-view.js
@@ -0,0 +1,2 @@
+define("mvc/workflow/workflow-globals",{}),define(["utils/utils","mvc/workflow/workflow-globals","mvc/workflow/workflow-manager","mvc/workflow/workflow-canvas","mvc/workflow/workflow-node","mvc/tool/tool-form-workflow","mvc/ui/ui-misc","utils/async-save-text","libs/toastr","ui/editable-text"],function(a,b,c,d,e,f,g,h,i){function j(a){var b=$("#galaxy_tools").contents();if(0===b.length&&(b=$(document)),$(this).removeClass("search_active"),b.find(".toolTitle").removeClass("search_match"),b [...]
+//# sourceMappingURL=../../../maps/mvc/workflow/workflow-view.js.map
\ No newline at end of file
diff --git a/static/scripts/nls/ja/locale.js b/static/scripts/nls/ja/locale.js
new file mode 100644
index 0000000..3863bed
--- /dev/null
+++ b/static/scripts/nls/ja/locale.js
@@ -0,0 +1,2 @@
+define({"This history is empty":"ヒストリーは空です","No matching datasets found":"一致するデータセットが見つかりませんでした","Search datasets":"データセットを検索する","You are currently viewing a deleted history!":"消去したヒストリーをみています。","You are over your disk quota":"あなたはディスククォータを超えている",All:"一式",None:"なし","For all selected":"各項目を","Click to rename history":"ヒストリーの名前を変更するにはクリック","Operations on multiple datasets":"複数のデータセットに対する操作","Permanently delete datasets":"永久にデータセットを削除","This will permanently remove the data in your datasets [...]
+//# sourceMappingURL=../../../maps/nls/ja/locale.js.map
\ No newline at end of file
diff --git a/static/scripts/nls/locale.js b/static/scripts/nls/locale.js
new file mode 100644
index 0000000..16b9006
--- /dev/null
+++ b/static/scripts/nls/locale.js
@@ -0,0 +1,2 @@
+define({root:{"This history is empty":!1,"No matching datasets found":!1,"An error occurred while getting updates from the server":!1,"Please contact a Galaxy administrator if the problem persists":!1,"Search datasets":!1,"You are currently viewing a deleted history!":!1,"You are over your disk quota":!1,"Tool execution is on hold until your disk usage drops below your allocated quota":!1,All:!1,None:!1,"For all selected":!1,"Edit history tags":!1,"Edit history Annotation":!1,"Click to r [...]
+//# sourceMappingURL=../../maps/nls/locale.js.map
\ No newline at end of file
diff --git a/static/scripts/nls/zh/locale.js b/static/scripts/nls/zh/locale.js
new file mode 100644
index 0000000..fda4024
--- /dev/null
+++ b/static/scripts/nls/zh/locale.js
@@ -0,0 +1,2 @@
+define({"This history is empty":"历史已空","No matching datasets found":"未找到匹配的数据集","Search datasets":"搜索数据集","You are currently viewing a deleted history!":"正在查看已删除的历史","You are over your disk quota":"您已超过磁盘配额",All:"皆",None:"一个也没有","For all selected":"为每个选定","Click to rename history":"单击要重命名的历史","Operations on multiple datasets":"编辑多个数据集","Permanently delete datasets":"永久删除数据集","This will permanently remove the data in your datasets. Are you sure?":"这将永久在你的数据集删除数据。你确定?",Dataset:"数据集","This  [...]
+//# sourceMappingURL=../../../maps/nls/zh/locale.js.map
\ No newline at end of file
diff --git a/static/scripts/onload.js b/static/scripts/onload.js
new file mode 100644
index 0000000..073cfce
--- /dev/null
+++ b/static/scripts/onload.js
@@ -0,0 +1,2 @@
+function replace_big_select_inputs(a,b,c){function d(a){var b=$(a),c={placeholder:"Click to select",closeOnSelect:!b.is("[MULTIPLE]"),dropdownAutoWidth:!0,containerCssClass:"select2-minwidth"};return a.select2(c)}jQuery.fn.select2&&(void 0===a&&(a=20),void 0===b&&(b=3e3),c=c||$("select"),c.each(function(){var c=$(this).not("[multiple]"),e=c.find("option").length;a>e||e>b||c.hasClass("no-autocomplete")||d(c)}))}function init_refresh_on_change(){$("select[refresh_on_change='true']").off("c [...]
+//# sourceMappingURL=../maps/onload.js.map
\ No newline at end of file
diff --git a/static/scripts/packed b/static/scripts/packed
new file mode 120000
index 0000000..6a04314
--- /dev/null
+++ b/static/scripts/packed
@@ -0,0 +1 @@
+./
\ No newline at end of file
diff --git a/static/scripts/polyfills.js b/static/scripts/polyfills.js
new file mode 100644
index 0000000..9ca35e1
--- /dev/null
+++ b/static/scripts/polyfills.js
@@ -0,0 +1,2 @@
+!function(){"use strict";window.console=window.console||{log:function(){},debug:function(){},info:function(){},warn:function(){},error:function(){},assert:function(){}},Object.assign=Object.assign||_.extend;for(var a=0,b=["ms","moz","webkit","o"],c=0;c<b.length&&!window.requestAnimationFrame;++c)window.requestAnimationFrame=window[b[c]+"RequestAnimationFrame"],window.cancelRequestAnimationFrame=window[b[c]+"CancelRequestAnimationFrame"];window.requestAnimationFrame||(window.requestAnimat [...]
+//# sourceMappingURL=../maps/polyfills.js.map
\ No newline at end of file
diff --git a/static/scripts/reports_webapp/run_stats.js b/static/scripts/reports_webapp/run_stats.js
new file mode 100644
index 0000000..1363ca2
--- /dev/null
+++ b/static/scripts/reports_webapp/run_stats.js
@@ -0,0 +1,2 @@
+function days_in_month(a,b){return new Date(b,a,0).getDate()}function date_by_subtracting_days(a,b){return new Date(a.getFullYear(),a.getMonth(),a.getDate()-b,a.getHours(),a.getMinutes(),a.getSeconds(),a.getMilliseconds())}function date_by_subtracting_hours(a,b){return new Date(a.getFullYear(),a.getMonth(),a.getDate(),a.getHours()-b,a.getMinutes(),a.getSeconds(),a.getMilliseconds())}function get_utc_time_hours(){var a=new Date;return new Date(a.getUTCFullYear(),a.getUTCMonth(),a.getUTCDa [...]
+//# sourceMappingURL=../../maps/reports_webapp/run_stats.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/autocom_tagging.js b/static/scripts/ui/autocom_tagging.js
new file mode 100644
index 0000000..2735676
--- /dev/null
+++ b/static/scripts/ui/autocom_tagging.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a(jQuery)}(function(a){"use_strict";function b(a,b){c(a).find(".tag-name").each(function(){c(this).click(function(){var a=c(this).text(),d=a.split(":");return b(d[0],d[1]),!0})})}var c=a;return a.fn.autocomplete_tagging=function(d){function e(a){c(a).mouseenter(function(){c(this).attr("src",h.delete_tag_img_rollover)}),c(a).mouseleave(function(){c(this).attr("src",h.delete_tag_img)}),c(a).click(function(){var b=c(thi [...]
+//# sourceMappingURL=../../maps/ui/autocom_tagging.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/editable-text.js b/static/scripts/ui/editable-text.js
new file mode 100644
index 0000000..ae116af
--- /dev/null
+++ b/static/scripts/ui/editable-text.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a(jQuery)}(function(a){"use_strict";var b=a;b.fn.make_text_editable=function(a){var c="num_cols"in a?a.num_cols:30,d="num_rows"in a?a.num_rows:4,e="use_textarea"in a?a.use_textarea:!1,f="on_finish"in a?a.on_finish:null,g="help_text"in a?a.help_text:null,h=b(this);return h.addClass("editable-text").click(function(g){if(!(b(this).children(":input").length>0)){h.removeClass("editable-text");var i,j,k=function(a){h.find( [...]
+//# sourceMappingURL=../../maps/ui/editable-text.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/fa-icon-button.js b/static/scripts/ui/fa-icon-button.js
new file mode 100644
index 0000000..f1cc815
--- /dev/null
+++ b/static/scripts/ui/fa-icon-button.js
@@ -0,0 +1,2 @@
+!function(a,b){"function"==typeof define&&define.amd?define([],b):a.faIconButton=b()}(this,function(){var a=function(a){a=a||{},a.tooltipConfig=a.tooltipConfig||{placement:"bottom"},a.classes=["icon-btn"].concat(a.classes||[]),a.disabled&&a.classes.push("disabled");var b=['<a class="',a.classes.join(" "),'"',a.title?' title="'+a.title+'"':"",!a.disabled&&a.target?' target="'+a.target+'"':"",' href="',!a.disabled&&a.href?a.href:"javascript:void(0);",'">','<span class="fa ',a.faIcon,'"></s [...]
+//# sourceMappingURL=../../maps/ui/fa-icon-button.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/filter-control.js b/static/scripts/ui/filter-control.js
new file mode 100644
index 0000000..eed1c27
--- /dev/null
+++ b/static/scripts/ui/filter-control.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a(jQuery)}(function(a){function b(b,c){function d(){var b=a(this),c=b.parents(".dropdown-select"),d=b.text();c.find(".dropdown-select-selected").text(d),c.trigger("change.dropdown-select",d)}c=c||(_.isEmpty(b)?"":b[0]);var e=a(['<div class="dropdown-select btn-group">','<button type="button" class="btn btn-default">','<span class="dropdown-select-selected">'+c+"</span>","</button>","</div>"].join("\n"));return b&&b.l [...]
+//# sourceMappingURL=../../maps/ui/filter-control.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/hoverhighlight.js b/static/scripts/ui/hoverhighlight.js
new file mode 100644
index 0000000..cf8c58b
--- /dev/null
+++ b/static/scripts/ui/hoverhighlight.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define([],a):a(jQuery)}(function(){jQuery.fn.extend({hoverhighlight:function(a,b){return a=a||"body",this.length?($(this).each(function(){var c=$(this),d=c.data("target");d&&c.mouseover(function(){$(d,a).css({background:b})}).mouseout(function(){$(d).css({background:""})})}),this):this}})});
+//# sourceMappingURL=../../maps/ui/hoverhighlight.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/loading-indicator.js b/static/scripts/ui/loading-indicator.js
new file mode 100644
index 0000000..6e2c7d4
--- /dev/null
+++ b/static/scripts/ui/loading-indicator.js
@@ -0,0 +1,2 @@
+!function(a,b){"function"==typeof define&&define.amd?define([],b):a.LoadingIndicator=b()}(this,function(){function a(a,b){function c(){var c=['<div class="loading-indicator">','<div class="loading-indicator-text">','<span class="fa fa-spinner fa-spin fa-lg"></span>','<span class="loading-indicator-message">loading...</span>',"</div>","</div>"].join("\n"),d=$(c).hide().css(b.css||{position:"fixed"}),e=d.children(".loading-indicator-text");return b.cover?(d.css({"z-index":2,top:a.css("top" [...]
+//# sourceMappingURL=../../maps/ui/loading-indicator.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/mode-button.js b/static/scripts/ui/mode-button.js
new file mode 100644
index 0000000..7b92be4
--- /dev/null
+++ b/static/scripts/ui/mode-button.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define([],a):a(jQuery)}(function(){function a(a,b){return this.currModeIndex=0,this._init(a,b)}a.prototype.DATA_KEY="mode-button",a.prototype.defaults={switchModesOnClick:!0},a.prototype._init=function(a,b){if(b=b||{},this.$element=$(a),this.options=$.extend(!0,{},this.defaults,b),!b.modes)throw new Error('ModeButton requires a "modes" array');var c=this;return this.$element.click(function(){c.callModeFn(),c.options.switchModesOnClick&&c [...]
+//# sourceMappingURL=../../maps/ui/mode-button.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/pagination.js b/static/scripts/ui/pagination.js
new file mode 100644
index 0000000..30985ad
--- /dev/null
+++ b/static/scripts/ui/pagination.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a(jQuery)}(function(a){function b(a,b){return this.numPages=null,this.currPage=0,this.init(a,b)}function c(b){return a(['<li><a href="javascript:void(0);">',b,"</a></li>"].join(""))}b.prototype.DATA_KEY="pagination",b.prototype.defaults={startingPage:0,perPage:20,totalDataSize:null,currDataSize:null},b.prototype.init=function(a,c){return c=c||{},this.$element=a,this.options=jQuery.extend(!0,{},this.defaults,c),this.c [...]
+//# sourceMappingURL=../../maps/ui/pagination.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/peek-column-selector.js b/static/scripts/ui/peek-column-selector.js
new file mode 100644
index 0000000..331c9b1
--- /dev/null
+++ b/static/scripts/ui/peek-column-selector.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a(jQuery)}(function(a){function b(a){if(a.disabled&&"array"!==jQuery.type(a.disabled))throw new Error('"disabled" must be defined as an array of indeces: '+JSON.stringify(a));if(a.multiselect&&a.selected&&"array"!==jQuery.type(a.selected))throw new Error('Mulitselect rows need an array for "selected": '+JSON.stringify(a));if(!a.label||!a.id)throw new Error("Peek controls need a label and id for each control row: "+JS [...]
+//# sourceMappingURL=../../maps/ui/peek-column-selector.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/popupmenu.js b/static/scripts/ui/popupmenu.js
new file mode 100644
index 0000000..dffe058
--- /dev/null
+++ b/static/scripts/ui/popupmenu.js
@@ -0,0 +1,2 @@
+define(["jquery"],function(a){"use_strict";function b(a,b){var c=a.data("menu_options");a.data("menu_options",b),c||a.bind("click.show_popup",function(b){return d(".popmenu-wrapper").remove(),setTimeout(function(){var c=d("<ul class='dropdown-menu' id='"+a.attr("id")+"-menu'></ul>"),e=a.data("menu_options");_.size(e)<=0&&d("<li>No Options.</li>").appendTo(c),d.each(e,function(a,b){if(b){var e=b.action||b;c.append(d("<li></li>").append(d("<a>").attr("href",b.url).html(a).click(e)))}else c [...]
+//# sourceMappingURL=../../maps/ui/popupmenu.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/scroll-panel.js b/static/scripts/ui/scroll-panel.js
new file mode 100644
index 0000000..c79827d
--- /dev/null
+++ b/static/scripts/ui/scroll-panel.js
@@ -0,0 +1,2 @@
+$.ui.plugin.add("draggable","scrollPanel",{drag:function(a,b){var c=$(this).data("draggable");clearTimeout(c.timeout);var d=b.options,e=c.element,f=d.panel,g=f.position(),h=f.width(),i=f.height();if(viewport=f.parent(),viewport_w=viewport.width(),viewport_h=viewport.height(),element_w=e.width(),element_h=e.height(),moved=!1,close_dist=5,nudge=23,p_min_x=-(h-viewport_w),p_min_y=-(i-viewport_h),p_max_x=0,p_max_y=0,min_vis_x=-g.left,max_vis_x=min_vis_x+viewport_w,min_vis_y=-g.top,max_vis_y= [...]
+//# sourceMappingURL=../../maps/ui/scroll-panel.js.map
\ No newline at end of file
diff --git a/static/scripts/ui/search-input.js b/static/scripts/ui/search-input.js
new file mode 100644
index 0000000..aca25b7
--- /dev/null
+++ b/static/scripts/ui/search-input.js
@@ -0,0 +1,2 @@
+!function(a){"function"==typeof define&&define.amd?define([],a):a(jQuery)}(function(){function a(a,c){function d(){var a=$(this).parent().children("input");a.val("").trigger("searchInput.clear").blur(),c.onclear()}function e(a,b){return b?($(this).trigger("search.search",b),void("function"==typeof c.onfirstsearch&&n?(n=!1,c.onfirstsearch(b)):c.onsearch(b))):d()}function f(){return['<input type="text" name="',c.name,'" placeholder="',c.placeholder,'" ','class="search-query ',c.classes,'"  [...]
+//# sourceMappingURL=../../maps/ui/search-input.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/add-logging.js b/static/scripts/utils/add-logging.js
new file mode 100644
index 0000000..3de1f25
--- /dev/null
+++ b/static/scripts/utils/add-logging.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,c){var d=void 0!==a.prototype?a.prototype:a;return void 0!==c&&(d._logNamespace=c),b.forEach(function(a){d[a]=function(){return this.logger?this.logger.emit?this.logger.emit(a,this._logNamespace,arguments):this.logger[a]?this.logger[a].apply(this.logger,arguments):void 0:void 0}}),a}var b=["log","debug","info","warn","error","metric"];return a});
+//# sourceMappingURL=../../maps/utils/add-logging.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/ajax-queue.js b/static/scripts/utils/ajax-queue.js
new file mode 100644
index 0000000..294317f
--- /dev/null
+++ b/static/scripts/utils/ajax-queue.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a){var b=this;return b.deferred=jQuery.Deferred(),b.queue=[],b.responses=[],b.numToProcess=0,b.running=!1,b.init(a||[]),b.start(),b}function b(b){var c=this;return c.names={},a.call(this,b),c}return a.prototype.init=function(a){var b=this;a.forEach(function(a){b.add(a)})},a.prototype.add=function(a){var b=this,c=this.queue.length;return this.numToProcess+=1,this.queue.push(function(){var d=c,e=a();e.done(function(a){b.deferred.notify({curr:d,total:b.numToP [...]
+//# sourceMappingURL=../../maps/utils/ajax-queue.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/async-save-text.js b/static/scripts/utils/async-save-text.js
new file mode 100644
index 0000000..5ac2eba
--- /dev/null
+++ b/static/scripts/utils/async-save-text.js
@@ -0,0 +1,2 @@
+define(["jquery"],function(a){"use_strict";function b(a,b,d,e,f,g,h,i,j){void 0===f&&(f=30),void 0===h&&(h=4),c("#"+a).click(function(){if(!(c("#renaming-active").length>0)){var a,k=c("#"+b),l=k.text();a=g?c("<textarea></textarea>").attr({rows:h,cols:f}).text(c.trim(l)):c("<input type='text'></input>").attr({value:c.trim(l),size:f}),a.attr("id","renaming-active"),a.blur(function(){c(this).remove(),k.show(),j&&j(a)}),a.keyup(function(f){if(27===f.keyCode)c(this).trigger("blur");else if(13 [...]
+//# sourceMappingURL=../../maps/utils/async-save-text.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/config.js b/static/scripts/utils/config.js
new file mode 100644
index 0000000..f678085
--- /dev/null
+++ b/static/scripts/utils/config.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","viz/trackster/util","utils/config"],function(a,b){var c=Backbone.Model.extend({initialize:function(d){var e=this.get("key");this.set("id",e);var f=a.find(c.known_settings_defaults,function(a){return a.key===e});f&&this.set(a.extend({},f,d)),void 0===this.get("value")&&void 0!==this.get("default_value")&&(this.set_value(this.get("default_value")),this.get("value")||"color"!==this.get("type")||this.set("value",b.get_random_color()))},set_value:function(a,b){var c [...]
+//# sourceMappingURL=../../maps/utils/config.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/deferred.js b/static/scripts/utils/deferred.js
new file mode 100644
index 0000000..86e2695
--- /dev/null
+++ b/static/scripts/utils/deferred.js
@@ -0,0 +1,2 @@
+define(["utils/utils"],function(a){return Backbone.Model.extend({initialize:function(){this.active={},this.last=null},execute:function(b){var c=this,d=a.uid(),e=b.length>0;this.active[d]=!0;var f=$.Deferred();f.promise().always(function(){delete c.active[d],e&&Galaxy.emit.debug("deferred::execute()",this.state().charAt(0).toUpperCase()+this.state().slice(1)+" "+d)}),$.when(this.last).always(function(){c.active[d]?(e&&Galaxy.emit.debug("deferred::execute()","Running "+d),b(f),!e&&f.resolv [...]
+//# sourceMappingURL=../../maps/utils/deferred.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/graph.js b/static/scripts/utils/graph.js
new file mode 100644
index 0000000..5d5a883
--- /dev/null
+++ b/static/scripts/utils/graph.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,b){for(var c in b)if(b.hasOwnProperty(c)&&(!a.hasOwnProperty(c)||a[c]!==b[c]))return!1;return!0}function b(b,c){var d="function"==typeof c?c:void 0,e="object"==typeof c?c:void 0,f=[],g=0;for(var h in b)if(b.hasOwnProperty(h)){var i=b[h];d?f.push(d.call(i,i,h,g)):e?"object"==typeof i&&a(i,e)&&f.push(i):f.push(i),g+=1}return f}function c(a,b,c){var d=this;return d.source=void 0!==a?a:null,d.target=void 0!==b?b:null,d.data=c||null,d}function d(a,b){var c=th [...]
+//# sourceMappingURL=../../maps/utils/graph.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/levenshtein.js b/static/scripts/utils/levenshtein.js
new file mode 100644
index 0000000..1f208d3
--- /dev/null
+++ b/static/scripts/utils/levenshtein.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,b){if(0===a.length)return b.length;if(0===b.length)return a.length;var c,d=[];for(c=0;c<=b.length;c++)d[c]=[c];var e;for(e=0;e<=a.length;e++)d[0][e]=e;for(c=1;c<=b.length;c++)for(e=1;e<=a.length;e++)d[c][e]=b.charAt(c-1)===a.charAt(e-1)?d[c-1][e-1]:Math.min(d[c-1][e-1]+1,Math.min(d[c][e-1]+1,d[c-1][e]+1));return d[b.length][a.length]}return a});
+//# sourceMappingURL=../../maps/utils/levenshtein.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/localization.js b/static/scripts/utils/localization.js
new file mode 100644
index 0000000..2a3bb39
--- /dev/null
+++ b/static/scripts/utils/localization.js
@@ -0,0 +1,2 @@
+define(["i18n!nls/locale"],function(a){if(a.hasOwnProperty("__root")){var b="undefined"==typeof navigator?"__root":(navigator.language||navigator.userLanguage||"__root").toLowerCase();a=a["__"+b]||a["__"+b.split("-")[0]]||a.__root}var c=function(b){return a[b]||b};return c.cacheNonLocalized=!1,c});
+//# sourceMappingURL=../../maps/utils/localization.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/metrics-logger.js b/static/scripts/utils/metrics-logger.js
new file mode 100644
index 0000000..61f210d
--- /dev/null
+++ b/static/scripts/utils/metrics-logger.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a){a=a||{};var b=this;return b.userId=window.bootstrapped&&window.bootstrapped.user?window.bootstrapped.user.id:null,b.userId=b.userId||a.userId||null,b.consoleLogger=a.consoleLogger||null,b._init(a),b}function b(a){var b=this;return b._init(a||{})}return a.ALL=0,a.LOG=0,a.DEBUG=10,a.INFO=20,a.WARN=30,a.ERROR=40,a.METRIC=50,a.NONE=100,a.defaultOptions={logLevel:a.NONE,consoleLevel:a.NONE,defaultNamespace:"Galaxy",consoleNamespaceWhitelist:null,clientPrefix [...]
+//# sourceMappingURL=../../maps/utils/metrics-logger.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/natural-sort.js b/static/scripts/utils/natural-sort.js
new file mode 100644
index 0000000..b8dcb62
--- /dev/null
+++ b/static/scripts/utils/natural-sort.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,b){var c=/(-?[0-9\.]+)/g,d=a.toString().toLowerCase()||"",e=b.toString().toLowerCase()||"",f=String.fromCharCode(0),g=d.replace(c,f+"$1"+f).split(f),h=e.replace(c,f+"$1"+f).split(f),i=new Date(d).getTime(),j=i?new Date(e).getTime():null;if(j){if(j>i)return-1;if(i>j)return 1}for(var k,l,m=0,n=Math.max(g.length,h.length);n>m;m++){if(k=parseFloat(g[m])||g[m],l=parseFloat(h[m])||h[m],l>k)return-1;if(k>l)return 1}return 0}return a});
+//# sourceMappingURL=../../maps/utils/natural-sort.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/query-string-parsing.js b/static/scripts/utils/query-string-parsing.js
new file mode 100644
index 0000000..11267de
--- /dev/null
+++ b/static/scripts/utils/query-string-parsing.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a,b){b=b||window.location.search.substr(1);var c=new RegExp(a+"=([^&#$]+)"),d=b.match(c);return d&&d.length?(d=d.splice(1),1===d.length?d[0]:d):void 0}function b(a){if(!a)return{};var b={},c=a.split("&");return c.forEach(function(a){var c=a.split("=");b[c[0]]=decodeURI(c[1])}),b}return{get:a,parse:b}});
+//# sourceMappingURL=../../maps/utils/query-string-parsing.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/uploadbox.js b/static/scripts/utils/uploadbox.js
new file mode 100755
index 0000000..ced4615
--- /dev/null
+++ b/static/scripts/utils/uploadbox.js
@@ -0,0 +1,2 @@
+!function(a){jQuery.event.props.push("dataTransfer"),a.uploadpost=function(b){var c=a.extend({},{data:{},success:function(){},error:function(){},progress:function(){},url:null,maxfilesize:2048,error_filesize:"File exceeds 2GB. Please use a FTP client.",error_default:"Please make sure the file is available.",error_server:"Upload request failed.",error_login:"Uploads require you to log in."},b),d=c.data;if(d.error_message)return void c.error(d.error_message);var e=new FormData;for(var f in [...]
+//# sourceMappingURL=../../maps/utils/uploadbox.js.map
\ No newline at end of file
diff --git a/static/scripts/utils/utils.js b/static/scripts/utils/utils.js
new file mode 100644
index 0000000..d2f4907
--- /dev/null
+++ b/static/scripts/utils/utils.js
@@ -0,0 +1,2 @@
+define([],function(){function a(a){return'<iframe src="'+a+'" frameborder="0" style="width: 100%; height: 100%;"/>'}function b(a,c){for(var d in a){var e=a[d];if(_.isObject(e)){var f=c(e);f&&(a[d]=f),b(e,c)}}}function c(a){var b,c,d,e;return c=/(\b(https?|ftp):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/gim,b=a.replace(c,'<a href="$1" target="_blank">$1</a>'),d=/(^|[^\/])(www\.[\S]+(\b|$))/gim,b=b.replace(d,'$1<a href="http://$2" target="_blank">$2</a>'),e=/(([a-zA-Z0-9\-\_\.])+ [...]
+//# sourceMappingURL=../../maps/utils/utils.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/bbi-data-manager.js b/static/scripts/viz/bbi-data-manager.js
new file mode 100644
index 0000000..a73670b
--- /dev/null
+++ b/static/scripts/viz/bbi-data-manager.js
@@ -0,0 +1,2 @@
+define(["viz/visualization","libs/bbi/bigwig"],function(a,b){var c=a.GenomeDataManager.extend({load_data:function(a){var c=$.Deferred();this.set_data(a,c);{var d=Galaxy.root+"datasets/"+this.get("dataset").id+"/display",e=this;new $.Deferred}return $.when(b.makeBwg(d)).then(function(b){$.when(b.readWigData(a.get("chrom"),a.get("start"),a.get("end"))).then(function(b){var d=[],f={max:Number.MIN_VALUE};b.forEach(function(a){f.max!==a.min-1&&(d.push([f.max+1,0]),d.push([a.min-2,0])),d.push( [...]
+//# sourceMappingURL=../../maps/viz/bbi-data-manager.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/circster.js b/static/scripts/viz/circster.js
new file mode 100644
index 0000000..cb3e227
--- /dev/null
+++ b/static/scripts/viz/circster.js
@@ -0,0 +1,2 @@
+require(["utils/utils"],function(a){a.cssLoadFile("static/style/circster.css")}),define(["libs/underscore","libs/d3","viz/visualization","utils/config","mvc/ui/icon-button","libs/farbtastic"],function(a,b,c,d,e){var f=Backbone.Model.extend({is_visible:function(a){var b=a.getBoundingClientRect(),c=$("svg")[0].getBoundingClientRect();return b.right<0||b.left>c.right||b.bottom<0||b.top>c.bottom?!1:!0}}),g={drawTicks:function(a,b,c,d,e){var f=a.append("g").selectAll("g").data(b).enter().appe [...]
+//# sourceMappingURL=../../maps/viz/circster.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/phyloviz.js b/static/scripts/viz/phyloviz.js
new file mode 100644
index 0000000..7b68167
--- /dev/null
+++ b/static/scripts/viz/phyloviz.js
@@ -0,0 +1,2 @@
+define(["libs/d3","viz/visualization","mvc/dataset/data","mvc/ui/icon-button"],function(a,b,c,d){function e(){function b(a,c,d,e){var f=a.children,g=0,l=a.dist||j;return l=l>1?1:l,a.dist=l,a.y0=null!==e?e.y0+l*h:k,f?(f.forEach(function(e){e.parent=a,g+=b(e,c,d,a)}),a.x0=g/f.length):(a.x0=i*d,i+=1),a.x=a.x0,a.y=a.y0,a.x0}var c=this,d=a.layout.hierarchy().sort(null).value(null),e=360,f="Linear",g=18,h=200,i=0,j=.5,k=50;return c.leafHeight=function(a){return"undefined"==typeof a?g:(g=a,c)}, [...]
+//# sourceMappingURL=../../maps/viz/phyloviz.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/sweepster.js b/static/scripts/viz/sweepster.js
new file mode 100644
index 0000000..f35cd8c
--- /dev/null
+++ b/static/scripts/viz/sweepster.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","libs/d3","viz/trackster/util","viz/visualization","viz/trackster/tracks","mvc/tool/tools","mvc/dataset/data","utils/config","mvc/ui/icon-button"],function(a,b,c,d,e,f,g,h,i){var j=Backbone.Model.extend({defaults:{inputs:null,values:null}}),k=Backbone.Model.extend({defaults:{tool:null,tree_data:null},initialize:function(b){var c=this;this.get("tool").get("inputs").each(function(a){a.on("change:min change:max change:num_samples",function(a){a.get("in_ptree")&&c.s [...]
+//# sourceMappingURL=../../maps/viz/sweepster.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/trackster.js b/static/scripts/viz/trackster.js
new file mode 100644
index 0000000..41dab66
--- /dev/null
+++ b/static/scripts/viz/trackster.js
@@ -0,0 +1,2 @@
+var ui=null,view=null,browser_router=null;require(["utils/utils","libs/jquery/jquery.event.drag","libs/jquery/jquery.event.hover","libs/jquery/jquery.mousewheel","libs/jquery/jquery-ui","libs/jquery/select2","libs/farbtastic","libs/jquery/jquery.form","libs/jquery/jquery.rating","ui/editable-text"],function(a){a.cssLoadFile("static/style/jquery.rating.css"),a.cssLoadFile("static/style/autocomplete_tagging.css"),a.cssLoadFile("static/style/jquery-ui/smoothness/jquery-ui.css"),a.cssLoadFil [...]
+//# sourceMappingURL=../../maps/viz/trackster.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/trackster/filters.js b/static/scripts/viz/trackster/filters.js
new file mode 100644
index 0000000..244de9b
--- /dev/null
+++ b/static/scripts/viz/trackster/filters.js
@@ -0,0 +1,2 @@
+define(["libs/underscore"],function(a){var b=a.extend,c=function(a){this.manager=null,this.name=a.name,this.index=a.index,this.tool_id=a.tool_id,this.tool_exp_name=a.tool_exp_name};b(c.prototype,{to_dict:function(){return{name:this.name,index:this.index,tool_id:this.tool_id,tool_exp_name:this.tool_exp_name}}});var d=function(a,b,c){return $("<a/>").attr("href","javascript:void(0);").attr("title",a).addClass("icon-button").addClass(b).tooltip().click(c)},e=function(a){c.call(this,a),this. [...]
+//# sourceMappingURL=../../../maps/viz/trackster/filters.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/trackster/painters.js b/static/scripts/viz/trackster/painters.js
new file mode 100644
index 0000000..6b3316e
--- /dev/null
+++ b/static/scripts/viz/trackster/painters.js
@@ -0,0 +1,2 @@
+define(["libs/underscore"],function(a){var b=1001,c=1002,d=1003,e=1004,f=1005,g=1006,h=function(a,h){var i,j=a[0],k=a[1],l=h[0],m=h[1];return i=l>j?l>=k?b:m>=k?d:c:j>m?g:m>=k?f:e},i=function(a,c){var d=h(a,c);return d!==b&&d!==g},j=function(a,b,c,d,e,f){void 0===f&&(f=4);var g,h=d-b,i=e-c,j=Math.floor(Math.sqrt(h*h+i*i)/f),k=h/j,l=i/j;for(g=0;j>g;g++,b+=k,c+=l)g%2===0&&a.fillRect(b,c,f,1)},k=function(a,b,c,d){var e=b-d/2,f=b+d/2,g=c-Math.sqrt(3*d/2);a.beginPath(),a.moveTo(e,g),a.lineTo(f [...]
+//# sourceMappingURL=../../../maps/viz/trackster/painters.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/trackster/slotting.js b/static/scripts/viz/trackster/slotting.js
new file mode 100644
index 0000000..384c679
--- /dev/null
+++ b/static/scripts/viz/trackster/slotting.js
@@ -0,0 +1,2 @@
+define(["libs/underscore"],function(a){var b=a.extend,c=2,d=5,e=function(a,b){this.slot=a,this.feature=b},f=function(a,b,c,d){this.slots={},this.start_end_dct={},this.w_scale=a,this.mode=b,this.include_label="Pack"===b,this.max_rows=c,this.measureText=d};return b(f.prototype,{_get_draw_coords:function(a){var b,e=Math.floor(a[1]*this.w_scale),f=Math.ceil(a[2]*this.w_scale),g=a[3];if(void 0!==g&&this.include_label){var h=this.measureText(g).width+(c+d);e-h>=0?(e-=h,b="left"):(f+=h,b="right [...]
+//# sourceMappingURL=../../../maps/viz/trackster/slotting.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/trackster/tracks.js b/static/scripts/viz/trackster/tracks.js
new file mode 100644
index 0000000..fbf8610
--- /dev/null
+++ b/static/scripts/viz/trackster/tracks.js
@@ -0,0 +1,4 @@
+define(["libs/underscore","viz/visualization","viz/viz_views","viz/trackster/util","viz/trackster/slotting","viz/trackster/painters","viz/trackster/filters","mvc/dataset/data","mvc/tool/tools","utils/config","viz/bbi-data-manager","ui/editable-text"],function(a,b,c,d,e,f,g,h,i,j,k){function l(a,b){b||(b=0);var c=Math.pow(10,b);return Math.round(a*c)/c}function m(a){var b=$.Deferred();return $.ajax({type:"HEAD",url:a,beforeSend:function(a){a.setRequestHeader("Range","bytes=0-10")},success [...]
+
+},show_message:function(a){return this.tiles_div.remove(),$("<span/>").addClass("message").html(a).appendTo(this.content_div)},init:function(a){var b=this;if(b.enabled=!1,b.tile_cache.clear(),b.data_manager.clear(),b.content_div.children().remove(),b.container_div.removeClass("nodata error pending"),b.tiles_div=$("<div/>").addClass("tiles").appendTo(b.content_div),b.dataset.id){var c=$.Deferred(),d={hda_ldda:b.dataset.get("hda_ldda"),data_type:this.dataset_check_type,chrom:b.view.chrom,r [...]
+//# sourceMappingURL=../../../maps/viz/trackster/tracks.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/trackster/util.js b/static/scripts/viz/trackster/util.js
new file mode 100644
index 0000000..c9ec447
--- /dev/null
+++ b/static/scripts/viz/trackster/util.js
@@ -0,0 +1,2 @@
+define(function(){function a(a){a+="";for(var b=/(\d+)(\d{3})/;b.test(a);)a=a.replace(b,"$1,$2");return a}var b=function(a){return"promise"in a},c=Backbone.Model.extend({defaults:{ajax_settings:{},interval:1e3,success_fn:function(){return!0}},go:function(){var a=$.Deferred(),b=this,c=b.get("ajax_settings"),d=b.get("success_fn"),e=b.get("interval"),f=function(){$.ajax(c).success(function(b){d(b)?a.resolve(b):setTimeout(f,e)})};return f(),a}}),d=function(a){a||(a="#ffffff"),"string"==typeo [...]
+//# sourceMappingURL=../../../maps/viz/trackster/util.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/visualization.js b/static/scripts/viz/visualization.js
new file mode 100644
index 0000000..2c530f9
--- /dev/null
+++ b/static/scripts/viz/visualization.js
@@ -0,0 +1,2 @@
+define(["libs/underscore","mvc/dataset/data","viz/trackster/util","utils/config"],function(a,b,c,d){var e={toJSON:function(){var b=this,c={};return a.each(b.constructor.to_json_keys,function(a){var d=b.get(a);a in b.constructor.to_json_mappers&&(d=b.constructor.to_json_mappers[a](d,b)),c[a]=d}),c}},f=function(a,b,c,d){$.ajax({url:a,data:c,error:function(){alert("Grid failed")},success:function(a){Galaxy.modal.show({title:"Select datasets for new tracks",body:a,buttons:{Cancel:function(){ [...]
+//# sourceMappingURL=../../maps/viz/visualization.js.map
\ No newline at end of file
diff --git a/static/scripts/viz/viz_views.js b/static/scripts/viz/viz_views.js
new file mode 100644
index 0000000..1bd21a6
--- /dev/null
+++ b/static/scripts/viz/viz_views.js
@@ -0,0 +1,2 @@
+define(["libs/underscore"],function(a){var b=Backbone.View.extend({className:"track-header",initialize:function(){this.model.config.get("name").on("change:value",this.update_name,this),this.render()},render:function(){this.$el.append($("<div/>").addClass(this.model.drag_handle_class)),this.$el.append($("<div/>").addClass("track-name").text(this.model.config.get_value("name"))),this.action_icons={},this.render_action_icons(),this.$el.dblclick(function(a){a.stopPropagation()}),this.$el.app [...]
+//# sourceMappingURL=../../maps/viz/viz_views.js.map
\ No newline at end of file
diff --git a/static/src b/static/src
new file mode 120000
index 0000000..e3ba799
--- /dev/null
+++ b/static/src
@@ -0,0 +1 @@
+../client/galaxy/scripts
\ No newline at end of file
diff --git a/static/style/base.css b/static/style/base.css
new file mode 120000
index 0000000..7486b4f
--- /dev/null
+++ b/static/style/base.css
@@ -0,0 +1 @@
+./blue/base.css
\ No newline at end of file
diff --git a/static/style/blue/autocomplete_tagging.css b/static/style/blue/autocomplete_tagging.css
new file mode 100644
index 0000000..c1b3c2a
--- /dev/null
+++ b/static/style/blue/autocomplete_tagging.css
@@ -0,0 +1 @@
+.ac_results{padding:0;border:1px solid black;background-color:white;overflow:hidden;z-index:99999}.ac_results ul{width:100%;list-style-position:outside;list-style:none;padding:0;margin:0}.ac_results li{padding:2px 5px;cursor:default;display:block;font-size:12px;line-height:16px;overflow:hidden}.ac_loading{background:white url('indicator.gif') right center no-repeat}.ac_odd{background-color:#fff;margin-left:.3em}.ac_even{margin-left:.3em}.ac_over{background-color:#0A246A;color:white}.ac_h [...]
\ No newline at end of file
diff --git a/static/style/blue/base.css b/static/style/blue/base.css
new file mode 100644
index 0000000..052aeb3
--- /dev/null
+++ b/static/style/blue/base.css
@@ -0,0 +1,4 @@
+/*! normalize.css v2.1.0 | MIT License | git.io/normalize */article,aside,details,figcaption,figure,footer,header,hgroup,main,nav,section,summary{display:block}audio,canvas,video{display:inline-block}audio:not([controls]){display:none;height:0}[hidden]{display:none}html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:focus{outline:thin dotted}a:active,a:hover{outline:0}h1{font-size:2em;margin:.67em 0}abbr[title]{border-bottom:1px dotted}b,st [...]
+ *  Font Awesome 4.2.0 by @davegandy - http://fontawesome.io - @fontawesome
+ *  License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
+ */@font-face{font-family:'FontAwesome';src:url('../images/fonts/fontawesome-webfont.eot?v=4.2.0');src:url('../images/fonts/fontawesome-webfont.eot?#iefix&v=4.2.0') format('embedded-opentype'),url('../images/fonts/fontawesome-webfont.woff?v=4.2.0') format('woff'),url('../images/fonts/fontawesome-webfont.ttf?v=4.2.0') format('truetype'),url('../images/fonts/fontawesome-webfont.svg?v=4.2.0#fontawesomeregular') format('svg');font-weight:normal;font-style:normal}.fa{display:inline-block;font [...]
\ No newline at end of file
diff --git a/static/style/blue/base_bg.png b/static/style/blue/base_bg.png
new file mode 100644
index 0000000..f88fb8e
Binary files /dev/null and b/static/style/blue/base_bg.png differ
diff --git a/static/style/blue/bootstrap-tour.css b/static/style/blue/bootstrap-tour.css
new file mode 100644
index 0000000..b730758
--- /dev/null
+++ b/static/style/blue/bootstrap-tour.css
@@ -0,0 +1,73 @@
+/* ========================================================================
+ * bootstrap-tour - v0.10.1
+ * http://bootstraptour.com
+ * ========================================================================
+ * Copyright 2012-2013 Ulrich Sossou
+ *
+ * ========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ========================================================================
+ */
+
+.tour-backdrop {
+  position: fixed;
+  top: 0;
+  right: 0;
+  bottom: 0;
+  left: 0;
+  z-index: 1100;
+  background-color: #000;
+  opacity: 0.8;
+  filter: alpha(opacity=80);
+}
+.tour-step-backdrop {
+  position: relative;
+  z-index: 1101;
+  background: inherit;
+}
+.tour-step-backdrop > td {
+  position: relative;
+  z-index: 1101;
+}
+.tour-step-background {
+  position: absolute !important;
+  z-index: 1100;
+  background: inherit;
+  border-radius: 6px;
+}
+.popover[class*="tour-"] {
+  z-index: 1100;
+}
+.popover[class*="tour-"] .popover-navigation {
+  padding: 9px 14px;
+}
+.popover[class*="tour-"] .popover-navigation *[data-role="end"] {
+  float: right;
+}
+.popover[class*="tour-"] .popover-navigation *[data-role="prev"],
+.popover[class*="tour-"] .popover-navigation *[data-role="next"],
+.popover[class*="tour-"] .popover-navigation *[data-role="end"] {
+  cursor: pointer;
+}
+.popover[class*="tour-"] .popover-navigation *[data-role="prev"].disabled,
+.popover[class*="tour-"] .popover-navigation *[data-role="next"].disabled,
+.popover[class*="tour-"] .popover-navigation *[data-role="end"].disabled {
+  cursor: default;
+}
+.popover[class*="tour-"].orphan {
+  position: fixed;
+  margin-top: 0;
+}
+.popover[class*="tour-"].orphan .arrow {
+  display: none;
+}
diff --git a/static/style/blue/button_bar_bg_light.png b/static/style/blue/button_bar_bg_light.png
new file mode 100644
index 0000000..720b10a
Binary files /dev/null and b/static/style/blue/button_bar_bg_light.png differ
diff --git a/static/style/blue/circster.css b/static/style/blue/circster.css
new file mode 100644
index 0000000..8df29bf
--- /dev/null
+++ b/static/style/blue/circster.css
@@ -0,0 +1 @@
+.chrom-label{font-size:80%}.tick{font-size:80%}
\ No newline at end of file
diff --git a/static/style/blue/data_empty.png b/static/style/blue/data_empty.png
new file mode 100644
index 0000000..04cdc33
Binary files /dev/null and b/static/style/blue/data_empty.png differ
diff --git a/static/style/blue/data_error.png b/static/style/blue/data_error.png
new file mode 100644
index 0000000..04cdc33
Binary files /dev/null and b/static/style/blue/data_error.png differ
diff --git a/static/style/blue/data_ok.png b/static/style/blue/data_ok.png
new file mode 100644
index 0000000..dee022c
Binary files /dev/null and b/static/style/blue/data_ok.png differ
diff --git a/static/style/blue/data_queued.png b/static/style/blue/data_queued.png
new file mode 100644
index 0000000..7202be8
Binary files /dev/null and b/static/style/blue/data_queued.png differ
diff --git a/static/style/blue/data_running.gif b/static/style/blue/data_running.gif
new file mode 100644
index 0000000..ba19ad7
Binary files /dev/null and b/static/style/blue/data_running.gif differ
diff --git a/static/style/blue/data_upload.gif b/static/style/blue/data_upload.gif
new file mode 100644
index 0000000..706c4c9
Binary files /dev/null and b/static/style/blue/data_upload.gif differ
diff --git a/static/style/blue/done_message_icon.png b/static/style/blue/done_message_icon.png
new file mode 100644
index 0000000..3363d04
Binary files /dev/null and b/static/style/blue/done_message_icon.png differ
diff --git a/static/style/blue/dynatree_skin/icons-rtl.gif b/static/style/blue/dynatree_skin/icons-rtl.gif
new file mode 100755
index 0000000..a29b5fb
Binary files /dev/null and b/static/style/blue/dynatree_skin/icons-rtl.gif differ
diff --git a/static/style/blue/dynatree_skin/icons.gif b/static/style/blue/dynatree_skin/icons.gif
new file mode 100755
index 0000000..a58eb93
Binary files /dev/null and b/static/style/blue/dynatree_skin/icons.gif differ
diff --git a/static/style/blue/dynatree_skin/loading.gif b/static/style/blue/dynatree_skin/loading.gif
new file mode 100755
index 0000000..251df05
Binary files /dev/null and b/static/style/blue/dynatree_skin/loading.gif differ
diff --git a/static/style/blue/dynatree_skin/ui.dynatree.css b/static/style/blue/dynatree_skin/ui.dynatree.css
new file mode 100755
index 0000000..79e64d7
--- /dev/null
+++ b/static/style/blue/dynatree_skin/ui.dynatree.css
@@ -0,0 +1,442 @@
+/*******************************************************************************
+ * Tree container
+ */
+ul.dynatree-container
+{
+	font-family: tahoma, arial, helvetica;
+	font-size: 10pt; /* font size should not be too big */
+	white-space: nowrap;
+	padding: 3px;
+	margin: 0; /* issue 201 */
+	background-color: white;
+	border: 1px dotted gray;
+	overflow: auto;
+/*	height: 100%; /* issue 263, 470 */
+	min-height: 0%;
+}
+
+ul.dynatree-container ul
+{
+	padding: 0 0 0 16px;
+	margin: 0;
+}
+
+ul.dynatree-container li
+{
+	list-style-image: none;
+	list-style-position: outside;
+	list-style-type: none;
+	-moz-background-clip:border;
+	-moz-background-inline-policy: continuous;
+	-moz-background-origin: padding;
+	background-attachment: scroll;
+	background-color: transparent;
+	background-repeat: repeat-y;
+	background-image: url("vline.gif");
+	background-position: 0 0;
+	/*
+	background-image: url("icons_96x256.gif");
+	background-position: -80px -64px;
+	*/
+	margin: 0;
+	padding: 1px 0 0 0;
+}
+/* Suppress lines for last child node */
+ul.dynatree-container li.dynatree-lastsib
+{
+	background-image: none;
+}
+/* Suppress lines if level is fixed expanded (option minExpandLevel) */
+ul.dynatree-no-connector > li
+{
+	background-image: none;
+}
+
+/* Style, when control is disabled */
+.ui-dynatree-disabled ul.dynatree-container
+{
+	opacity: 0.5;
+/*	filter: alpha(opacity=50); /* Yields a css warning */
+	background-color: silver;
+}
+
+/*******************************************************************************
+ * Common icon definitions
+ */
+span.dynatree-empty,
+span.dynatree-vline,
+span.dynatree-connector,
+span.dynatree-expander,
+span.dynatree-icon,
+span.dynatree-checkbox,
+span.dynatree-radio,
+span.dynatree-drag-helper-img,
+#dynatree-drop-marker
+{
+	width: 16px;
+	height: 16px;
+/*	display: -moz-inline-box; /* @ FF 1+2 removed for issue 221 */
+/*	-moz-box-align: start; /* issue 221 */
+	display: inline-block; /* Required to make a span sizeable */
+	vertical-align: top;
+	background-repeat: no-repeat;
+	background-position: left;
+	background-image: url("icons.gif");
+	background-position: 0 0;
+}
+
+/** Used by 'icon' node option: */
+ul.dynatree-container img
+{
+	width: 16px;
+	height: 16px;
+	margin-left: 3px;
+	vertical-align: top;
+	border-style: none;
+}
+
+
+/*******************************************************************************
+ * Lines and connectors
+ */
+
+span.dynatree-connector
+{
+	background-position: -16px -64px;
+}
+
+/*******************************************************************************
+ * Expander icon
+ * Note: IE6 doesn't correctly evaluate multiples class names,
+ *		 so we create combined class names that can be used in the CSS.
+ *
+ * Prefix: dynatree-exp-
+ * 1st character: 'e': expanded, 'c': collapsed
+ * 2nd character (optional): 'd': lazy (Delayed)
+ * 3rd character (optional): 'l': Last sibling
+ */
+
+span.dynatree-expander
+{
+	background-position: 0px -80px;
+	cursor: pointer;
+}
+.dynatree-exp-cl span.dynatree-expander /* Collapsed, not delayed, last sibling */
+{
+	background-position: 0px -96px;
+}
+.dynatree-exp-cd span.dynatree-expander /* Collapsed, delayed, not last sibling */
+{
+	background-position: -64px -80px;
+}
+.dynatree-exp-cdl span.dynatree-expander /* Collapsed, delayed, last sibling */
+{
+	background-position: -64px -96px;
+}
+.dynatree-exp-e span.dynatree-expander,  /* Expanded, not delayed, not last sibling */
+.dynatree-exp-ed span.dynatree-expander  /* Expanded, delayed, not last sibling */
+{
+	background-position: -32px -80px;
+}
+.dynatree-exp-el span.dynatree-expander,  /* Expanded, not delayed, last sibling */
+.dynatree-exp-edl span.dynatree-expander  /* Expanded, delayed, last sibling */
+{
+	background-position: -32px -96px;
+}
+.dynatree-loading span.dynatree-expander  /* 'Loading' status overrides all others */
+{
+	background-position: 0 0;
+	background-image: url("loading.gif");
+}
+
+
+/*******************************************************************************
+ * Checkbox icon
+ */
+span.dynatree-checkbox
+{
+	margin-left: 3px;
+	background-position: 0px -32px;
+}
+span.dynatree-checkbox:hover
+{
+	background-position: -16px -32px;
+}
+
+.dynatree-partsel span.dynatree-checkbox
+{
+	background-position: -64px -32px;
+}
+.dynatree-partsel span.dynatree-checkbox:hover
+{
+	background-position: -80px -32px;
+}
+
+.dynatree-selected span.dynatree-checkbox
+{
+	background-position: -32px -32px;
+}
+.dynatree-selected span.dynatree-checkbox:hover
+{
+	background-position: -48px -32px;
+}
+
+/*******************************************************************************
+ * Radiobutton icon
+ * This is a customization, that may be activated by overriding the 'checkbox'
+ * class name as 'dynatree-radio' in the tree options.
+ */
+span.dynatree-radio
+{
+	margin-left: 3px;
+	background-position: 0px -48px;
+}
+span.dynatree-radio:hover
+{
+	background-position: -16px -48px;
+}
+
+.dynatree-partsel span.dynatree-radio
+{
+	background-position: -64px -48px;
+}
+.dynatree-partsel span.dynatree-radio:hover
+{
+	background-position: -80px -48px;
+}
+
+.dynatree-selected span.dynatree-radio
+{
+	background-position: -32px -48px;
+}
+.dynatree-selected span.dynatree-radio:hover
+{
+	background-position: -48px -48px;
+}
+
+/*******************************************************************************
+ * Node type icon
+ * Note: IE6 doesn't correctly evaluate multiples class names,
+ *		 so we create combined class names that can be used in the CSS.
+ *
+ * Prefix: dynatree-ico-
+ * 1st character: 'e': expanded, 'c': collapsed
+ * 2nd character (optional): 'f': folder
+ */
+
+span.dynatree-icon /* Default icon */
+{
+	margin-left: 3px;
+	background-position: 0px 0px;
+}
+
+.dynatree-ico-cf span.dynatree-icon  /* Collapsed Folder */
+{
+	background-position: 0px -16px;
+}
+
+.dynatree-ico-ef span.dynatree-icon  /* Expanded Folder */
+{
+	background-position: -64px -16px;
+}
+
+/* Status node icons */
+
+.dynatree-statusnode-wait span.dynatree-icon
+{
+	background-image: url("loading.gif");
+}
+
+.dynatree-statusnode-error span.dynatree-icon
+{
+	background-position: 0px -112px;
+/*	background-image: url("ltError.gif");*/
+}
+
+/*******************************************************************************
+ * Node titles
+ */
+
+/* @Chrome: otherwise hit area of node titles is broken (issue 133)
+   Removed again for issue 165; (133 couldn't be reproduced) */
+span.dynatree-node
+{
+/*	display: -moz-inline-box; /* issue 133, 165, 172, 192. removed for issue 221*/
+/*	-moz-box-align: start; /* issue 221 */
+	display: inline-block; /* issue 373 Required to make a span sizeable */
+	vertical-align: top;
+}
+
+
+/* Remove blue color and underline from title links */
+ul.dynatree-container a
+/*, ul.dynatree-container a:visited*/
+{
+	color: black; /* inherit doesn't work on IE */
+	text-decoration: none;
+	vertical-align: top;
+	margin: 0px;
+	margin-left: 3px;
+/*	outline: 0; /* @ Firefox, prevent dotted border after click */
+}
+
+ul.dynatree-container a:hover
+{
+/*	text-decoration: underline; */
+	background-color: #F2F7FD; /* light blue */
+	border-color: #B8D6FB; /* darker light blue */
+}
+
+span.dynatree-node a
+{
+	font-size: 10pt; /* required for IE, quirks mode */
+	display: inline-block; /* Better alignment, when title contains <br> */
+/*	vertical-align: top;*/
+	padding-left: 3px;
+	padding-right: 3px; /* Otherwise italic font will be outside bounds */
+	/*	line-height: 16px; /* should be the same as img height, in case 16 px */
+}
+span.dynatree-folder a
+{
+	font-weight: bold;
+}
+
+ul.dynatree-container a:focus,
+span.dynatree-focused a:link  /* @IE */
+{
+	background-color: #EFEBDE; /* gray */
+}
+
+span.dynatree-has-children a
+{
+}
+
+span.dynatree-expanded a
+{
+}
+
+span.dynatree-selected a
+{
+	color: green;
+	font-style: italic;
+}
+
+span.dynatree-active a
+{
+	background-color: #3169C6 !important;
+	color: white !important; /* @ IE6 */
+}
+
+/*******************************************************************************
+ * Drag'n'drop support
+ */
+
+/*** Helper object ************************************************************/
+div.dynatree-drag-helper
+{
+}
+div.dynatree-drag-helper a
+{
+	border: 1px solid gray;
+	background-color: white;
+	padding-left: 5px;
+	padding-right: 5px;
+	opacity: 0.8;
+}
+span.dynatree-drag-helper-img
+{
+	/*
+	position: relative;
+	left: -16px;
+	*/
+}
+div.dynatree-drag-helper /*.dynatree-drop-accept*/
+{
+
+/*    border-color: green;
+	background-color: red;*/
+}
+div.dynatree-drop-accept span.dynatree-drag-helper-img
+{
+	background-position: -32px -112px;
+}
+div.dynatree-drag-helper.dynatree-drop-reject
+{
+	border-color: red;
+}
+div.dynatree-drop-reject span.dynatree-drag-helper-img
+{
+	background-position: -16px -112px;
+}
+
+/*** Drop marker icon *********************************************************/
+
+#dynatree-drop-marker
+{
+	width: 24px;
+	position: absolute;
+	background-position: 0 -128px;
+	margin: 0;
+/*	border: 1px solid red; */
+}
+#dynatree-drop-marker.dynatree-drop-after,
+#dynatree-drop-marker.dynatree-drop-before
+{
+	width:64px;
+	background-position: 0 -144px;
+}
+#dynatree-drop-marker.dynatree-drop-copy
+{
+	background-position: -64px -128px;
+}
+#dynatree-drop-marker.dynatree-drop-move
+{
+	background-position: -64px -128px;
+}
+
+/*** Source node while dragging ***********************************************/
+
+span.dynatree-drag-source
+{
+	/* border: 1px dotted gray; */
+	background-color: #e0e0e0;
+}
+span.dynatree-drag-source a
+{
+	color: gray;
+}
+
+/*** Target node while dragging cursor is over it *****************************/
+
+span.dynatree-drop-target
+{
+	/*border: 1px solid gray;*/
+}
+span.dynatree-drop-target a
+{
+}
+span.dynatree-drop-target.dynatree-drop-accept a
+{
+	/*border: 1px solid green;*/
+	background-color: #3169C6 !important;
+	color: white !important; /* @ IE6 */
+	text-decoration: none;
+}
+span.dynatree-drop-target.dynatree-drop-reject
+{
+	/*border: 1px solid red;*/
+}
+span.dynatree-drop-target.dynatree-drop-after a
+{
+}
+
+
+/*******************************************************************************
+ * Custom node classes (sample)
+ */
+
+span.custom1 a
+{
+	background-color: maroon;
+	color: yellow;
+}
diff --git a/static/style/blue/dynatree_skin/vline-rtl.gif b/static/style/blue/dynatree_skin/vline-rtl.gif
new file mode 100755
index 0000000..0400cb3
Binary files /dev/null and b/static/style/blue/dynatree_skin/vline-rtl.gif differ
diff --git a/static/style/blue/dynatree_skin/vline.gif b/static/style/blue/dynatree_skin/vline.gif
new file mode 100755
index 0000000..1b00ae5
Binary files /dev/null and b/static/style/blue/dynatree_skin/vline.gif differ
diff --git a/static/style/blue/embed_item.css b/static/style/blue/embed_item.css
new file mode 100644
index 0000000..4c8dbc0
--- /dev/null
+++ b/static/style/blue/embed_item.css
@@ -0,0 +1 @@
+.embedded-item{margin:0 auto;width:90%;-moz-border-radius:.5em;-webkit-border-radius:.5em;border-radius:.5em}.embedded-item .expanded-content{display:none;background-color:white}.embedded-item .item-content{max-height:45em;overflow:auto}.embedded-item>.title{vertical-align:top;text-align:center;font-weight:bold;padding-bottom:5px}.embedded-item.placeholder .content{padding:.5em .5em;font-style:italic;text-align:center}.embedded-item p{background:inherit;margin-top:0;margin-bottom:0}.embe [...]
\ No newline at end of file
diff --git a/static/style/blue/error_bg.png b/static/style/blue/error_bg.png
new file mode 100644
index 0000000..242cdba
Binary files /dev/null and b/static/style/blue/error_bg.png differ
diff --git a/static/style/blue/error_large.png b/static/style/blue/error_large.png
new file mode 100644
index 0000000..2cb2e8f
Binary files /dev/null and b/static/style/blue/error_large.png differ
diff --git a/static/style/blue/error_message_icon.png b/static/style/blue/error_message_icon.png
new file mode 100644
index 0000000..80be3f3
Binary files /dev/null and b/static/style/blue/error_message_icon.png differ
diff --git a/static/style/blue/error_small.png b/static/style/blue/error_small.png
new file mode 100644
index 0000000..04cdc33
Binary files /dev/null and b/static/style/blue/error_small.png differ
diff --git a/static/style/blue/footer_title_bg.png b/static/style/blue/footer_title_bg.png
new file mode 100644
index 0000000..e9e445d
Binary files /dev/null and b/static/style/blue/footer_title_bg.png differ
diff --git a/static/style/blue/form_body_bg.png b/static/style/blue/form_body_bg.png
new file mode 100644
index 0000000..52ad46a
Binary files /dev/null and b/static/style/blue/form_body_bg.png differ
diff --git a/static/style/blue/form_title_bg.png b/static/style/blue/form_title_bg.png
new file mode 100644
index 0000000..dd7599a
Binary files /dev/null and b/static/style/blue/form_title_bg.png differ
diff --git a/static/style/blue/fugue.png b/static/style/blue/fugue.png
new file mode 100644
index 0000000..b09c858
Binary files /dev/null and b/static/style/blue/fugue.png differ
diff --git a/static/style/blue/gray_bg.png b/static/style/blue/gray_bg.png
new file mode 100644
index 0000000..5ce5311
Binary files /dev/null and b/static/style/blue/gray_bg.png differ
diff --git a/static/style/blue/hgrad.png b/static/style/blue/hgrad.png
new file mode 100644
index 0000000..3f6cce3
Binary files /dev/null and b/static/style/blue/hgrad.png differ
diff --git a/static/style/blue/hgrad_over.png b/static/style/blue/hgrad_over.png
new file mode 100644
index 0000000..bf94175
Binary files /dev/null and b/static/style/blue/hgrad_over.png differ
diff --git a/static/style/blue/history-buttons.png b/static/style/blue/history-buttons.png
new file mode 100644
index 0000000..e761954
Binary files /dev/null and b/static/style/blue/history-buttons.png differ
diff --git a/static/style/blue/history-states.png b/static/style/blue/history-states.png
new file mode 100644
index 0000000..b893f01
Binary files /dev/null and b/static/style/blue/history-states.png differ
diff --git a/static/style/blue/history.css b/static/style/blue/history.css
new file mode 100644
index 0000000..e69de29
diff --git a/static/style/blue/info_large.png b/static/style/blue/info_large.png
new file mode 100644
index 0000000..8ac030b
Binary files /dev/null and b/static/style/blue/info_large.png differ
diff --git a/static/style/blue/info_message_icon.png b/static/style/blue/info_message_icon.png
new file mode 100644
index 0000000..b2c7823
Binary files /dev/null and b/static/style/blue/info_message_icon.png differ
diff --git a/static/style/blue/info_small.png b/static/style/blue/info_small.png
new file mode 100644
index 0000000..81522c4
Binary files /dev/null and b/static/style/blue/info_small.png differ
diff --git a/static/style/blue/iphone.css b/static/style/blue/iphone.css
new file mode 100644
index 0000000..cff317a
--- /dev/null
+++ b/static/style/blue/iphone.css
@@ -0,0 +1 @@
+a{text-decoration:underline}label{font-weight:normal}body{margin:0;font-family:Helvetica;background:#FFFFFF;color:#000000;overflow-x:hidden;-webkit-user-select:none;-webkit-text-size-adjust:none}body>*:not(.toolbar){display:none;position:absolute;margin:0;padding:0;left:0;width:100%;min-height:372px}body[orient="landscape"]>*:not(.toolbar){min-height:268px}body>*[selected="true"]{display:block}a[selected],a:active{background-color:#194fdb !important;background-image:url(../iui/listArrowS [...]
\ No newline at end of file
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_flat_0_aaaaaa_40x100.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_flat_0_aaaaaa_40x100.png
new file mode 100755
index 0000000..5b5dab2
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_flat_0_aaaaaa_40x100.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_flat_75_ffffff_40x100.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_flat_75_ffffff_40x100.png
new file mode 100755
index 0000000..ac8b229
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_flat_75_ffffff_40x100.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_55_fbf9ee_1x400.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_55_fbf9ee_1x400.png
new file mode 100755
index 0000000..b39a6fb
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_55_fbf9ee_1x400.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_65_ffffff_1x400.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_65_ffffff_1x400.png
new file mode 100755
index 0000000..42ccba2
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_65_ffffff_1x400.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_75_dadada_1x400.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_75_dadada_1x400.png
new file mode 100755
index 0000000..5a46b47
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_75_dadada_1x400.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_75_e6e6e6_1x400.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_75_e6e6e6_1x400.png
new file mode 100755
index 0000000..86c2baa
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_75_e6e6e6_1x400.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_95_fef1ec_1x400.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_95_fef1ec_1x400.png
new file mode 100755
index 0000000..4443fdc
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_glass_95_fef1ec_1x400.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-bg_highlight-soft_75_cccccc_1x100.png b/static/style/blue/jquery-ui/smoothness/images/ui-bg_highlight-soft_75_cccccc_1x100.png
new file mode 100755
index 0000000..7c9fa6c
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-bg_highlight-soft_75_cccccc_1x100.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-icons_222222_256x240.png b/static/style/blue/jquery-ui/smoothness/images/ui-icons_222222_256x240.png
new file mode 100755
index 0000000..b273ff1
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-icons_222222_256x240.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-icons_2e83ff_256x240.png b/static/style/blue/jquery-ui/smoothness/images/ui-icons_2e83ff_256x240.png
new file mode 100755
index 0000000..84defe6
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-icons_2e83ff_256x240.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-icons_454545_256x240.png b/static/style/blue/jquery-ui/smoothness/images/ui-icons_454545_256x240.png
new file mode 100755
index 0000000..59bd45b
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-icons_454545_256x240.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-icons_888888_256x240.png b/static/style/blue/jquery-ui/smoothness/images/ui-icons_888888_256x240.png
new file mode 100755
index 0000000..6d02426
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-icons_888888_256x240.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/images/ui-icons_cd0a0a_256x240.png b/static/style/blue/jquery-ui/smoothness/images/ui-icons_cd0a0a_256x240.png
new file mode 100755
index 0000000..2ab019b
Binary files /dev/null and b/static/style/blue/jquery-ui/smoothness/images/ui-icons_cd0a0a_256x240.png differ
diff --git a/static/style/blue/jquery-ui/smoothness/jquery-ui.css b/static/style/blue/jquery-ui/smoothness/jquery-ui.css
new file mode 100644
index 0000000..e7bceb3
--- /dev/null
+++ b/static/style/blue/jquery-ui/smoothness/jquery-ui.css
@@ -0,0 +1,10 @@
+/*! jQuery UI - v1.9.1 - 2012-10-29
+* http://jqueryui.com
+* Includes: jquery.ui.core.css, jquery.ui.autocomplete.css, jquery.ui.button.css, jquery.ui.menu.css, jquery.ui.slider.css
+* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Verdana%2CArial%2Csans-serif&fwDefault=normal&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=cccccc&bgTextureHeader=03_highlight_soft.png&bgImgOpacityHeader=75&borderColorHeader=aaaaaa&fcHeader=222222&iconColorHeader=222222&bgColorContent=ffffff&bgTextureContent=01_flat.png&bgImgOpacityContent=75&borderColorContent=aaaaaa&fcContent=222222&iconColorContent=222222&bgColorDefault=e6e6e6&bgTextureDefault=02_gla [...]
+* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */.ui-helper-hidden{display:none}.ui-helper-hidden-accessible{position:absolute!important;clip:rect(1px);clip:rect(1px,1px,1px,1px)}.ui-helper-reset{margin:0;padding:0;border:0;outline:0;line-height:1.3;text-decoration:none;font-size:100%;list-style:none}.ui-helper-clearfix:before,.ui-helper-clearfix:after{content:"";display:table}.ui-helper-clearfix:after{clear:both}.ui-helper-clearfix{zoom:1}.ui-helper-zfix{widt [...]
+
+/* For jQueryUI combobox */
+.ui-combobox{position: relative; display: inline-block;}
+.ui-combobox-toggle {position: absolute; top: 0; bottom: 0; margin-left: -1px; padding: 0;}
+.ui-combobox-input {margin: 0; padding: 0.2em;}
\ No newline at end of file
diff --git a/static/style/blue/jquery.rating.css b/static/style/blue/jquery.rating.css
new file mode 100644
index 0000000..aaf43e8
--- /dev/null
+++ b/static/style/blue/jquery.rating.css
@@ -0,0 +1,12 @@
+/* jQuery.Rating Plugin CSS - http://www.fyneworks.com/jquery/star-rating/ */
+div.rating-cancel,div.star-rating{float:left;width:17px;height:15px;text-indent:-999em;cursor:pointer;display:block;background:transparent;overflow:hidden}
+div.rating-cancel,div.rating-cancel a{background:url(../images/delete.gif) no-repeat 0 -16px}
+div.star-rating,div.star-rating a{background:url(../images/star.gif) no-repeat 0 0px}
+div.rating-cancel a,div.star-rating a{display:block;width:16px;height:100%;background-position:0 0px;border:0}
+div.star-rating-on a{background-position:0 -16px!important}
+div.star-rating-hover a{background-position:0 -32px}
+/* Read Only CSS */
+div.star-rating-readonly a{cursor:default !important}
+/* Partial Star CSS */
+div.star-rating{background:transparent!important;overflow:hidden!important}
+/* END jQuery.Rating Plugin CSS */
\ No newline at end of file
diff --git a/static/style/blue/largespinner.gif b/static/style/blue/largespinner.gif
new file mode 100644
index 0000000..3288d10
Binary files /dev/null and b/static/style/blue/largespinner.gif differ
diff --git a/static/style/blue/layout_callout_top.png b/static/style/blue/layout_callout_top.png
new file mode 100644
index 0000000..2fcca6c
Binary files /dev/null and b/static/style/blue/layout_callout_top.png differ
diff --git a/static/style/blue/library.css b/static/style/blue/library.css
new file mode 100644
index 0000000..cdb1c4f
--- /dev/null
+++ b/static/style/blue/library.css
@@ -0,0 +1 @@
+a{text-decoration:underline}label{font-weight:normal}.library_style_container{width:95%;margin:auto;margin-top:2em;overflow:auto !important}.library_style_container .fa{font-size:12px}.library_style_container .fa-globe{font-size:initial;margin-left:.6em}.library_style_container .libraryRow{background-color:#ebd9b2}.library_style_container .datasetHighlighted{background-color:#f9f9f9}.library_style_container .libraryItemDeleted-True{font-style:italic}.library_style_container div.libraryIt [...]
\ No newline at end of file
diff --git a/static/style/blue/masthead.css b/static/style/blue/masthead.css
new file mode 100644
index 0000000..34bef5c
--- /dev/null
+++ b/static/style/blue/masthead.css
@@ -0,0 +1 @@
+a{text-decoration:underline}label{font-weight:normal}body{background:#2c3143;color:#999;padding:0;border:0;margin:3px;margin-right:5px;margin-left:5px;overflow:hidden}body #everything{position:absolute;top:0;left:0;width:100%;height:100%}body #messagebox{display:none}body #inactivebox{display:none}body #dd-helper{display:none}div.pageTitle{font-size:175%;font-weight:bold}div.pageTitle a:link,div.pageTitle a:visited,div.pageTitle a:active,div.pageTitle a:hover{text-decoration:none}a:link, [...]
\ No newline at end of file
diff --git a/static/style/blue/masthead_bg.png b/static/style/blue/masthead_bg.png
new file mode 100644
index 0000000..783c913
Binary files /dev/null and b/static/style/blue/masthead_bg.png differ
diff --git a/static/style/blue/menu_bg.png b/static/style/blue/menu_bg.png
new file mode 100644
index 0000000..10cf281
Binary files /dev/null and b/static/style/blue/menu_bg.png differ
diff --git a/static/style/blue/ok_bg.png b/static/style/blue/ok_bg.png
new file mode 100644
index 0000000..5378356
Binary files /dev/null and b/static/style/blue/ok_bg.png differ
diff --git a/static/style/blue/ok_large.png b/static/style/blue/ok_large.png
new file mode 100644
index 0000000..e68759a
Binary files /dev/null and b/static/style/blue/ok_large.png differ
diff --git a/static/style/blue/ok_small.png b/static/style/blue/ok_small.png
new file mode 100644
index 0000000..dee022c
Binary files /dev/null and b/static/style/blue/ok_small.png differ
diff --git a/static/style/blue/panel_header_bg.png b/static/style/blue/panel_header_bg.png
new file mode 100644
index 0000000..fd5aa05
Binary files /dev/null and b/static/style/blue/panel_header_bg.png differ
diff --git a/static/style/blue/popupmenu_callout_top.png b/static/style/blue/popupmenu_callout_top.png
new file mode 100644
index 0000000..2ded49b
Binary files /dev/null and b/static/style/blue/popupmenu_callout_top.png differ
diff --git a/static/style/blue/question-balloon.png b/static/style/blue/question-balloon.png
new file mode 100755
index 0000000..f78c11b
Binary files /dev/null and b/static/style/blue/question-balloon.png differ
diff --git a/static/style/blue/question-octagon-frame.png b/static/style/blue/question-octagon-frame.png
new file mode 100755
index 0000000..cff30ad
Binary files /dev/null and b/static/style/blue/question-octagon-frame.png differ
diff --git a/static/style/blue/reports.css b/static/style/blue/reports.css
new file mode 100644
index 0000000..2f6d17f
--- /dev/null
+++ b/static/style/blue/reports.css
@@ -0,0 +1 @@
+.reports-panel-container{position:absolute;top:30px;bottom:10px;overflow:scroll;width:100%;padding:10px}#reports_home{position:relative;top:-7px;float:right}#spark_time_select{display:inline-block}#spark_select{height:23px;width:30px;padding:0;border-radius:2px}.jqstooltip{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}.dir_arrow{visibility:hidden}.chart{cursor:zoom-in;position:relative;z-index:2}.chart rect{fill:steelblue;shape-rendering:preserveAspect [...]
\ No newline at end of file
diff --git a/static/style/blue/sprite-fugue.png b/static/style/blue/sprite-fugue.png
new file mode 100644
index 0000000..c3a670a
Binary files /dev/null and b/static/style/blue/sprite-fugue.png differ
diff --git a/static/style/blue/sprite-history-buttons.png b/static/style/blue/sprite-history-buttons.png
new file mode 100644
index 0000000..729156e
Binary files /dev/null and b/static/style/blue/sprite-history-buttons.png differ
diff --git a/static/style/blue/sprite-history-states.png b/static/style/blue/sprite-history-states.png
new file mode 100644
index 0000000..36eff80
Binary files /dev/null and b/static/style/blue/sprite-history-states.png differ
diff --git a/static/style/blue/tiny_arrow_left.png b/static/style/blue/tiny_arrow_left.png
new file mode 100644
index 0000000..d4d6579
Binary files /dev/null and b/static/style/blue/tiny_arrow_left.png differ
diff --git a/static/style/blue/tiny_arrow_right.png b/static/style/blue/tiny_arrow_right.png
new file mode 100644
index 0000000..487a055
Binary files /dev/null and b/static/style/blue/tiny_arrow_right.png differ
diff --git a/static/style/blue/trackster.css b/static/style/blue/trackster.css
new file mode 100644
index 0000000..0508d94
--- /dev/null
+++ b/static/style/blue/trackster.css
@@ -0,0 +1 @@
+.viewport-container{overflow-x:hidden;overflow-y:auto;background:white}.trackster-nav-container{width:100%;text-align:center}.trackster-nav-container.stand-alone{height:0}.trackster-nav{padding:0 0;color:#333;font-weight:bold;background:#cccccc;display:inline-block;top:-2em;background:transparent;border:none}.trackster-nav.stand-alone{position:relative}.chrom-nav{width:15em}.content{font:10px verdana;position:relative}.nav-controls{text-align:center;padding:1px 0}.nav-controls input{marg [...]
\ No newline at end of file
diff --git a/static/style/blue/wait_large.png b/static/style/blue/wait_large.png
new file mode 100644
index 0000000..480d6d9
Binary files /dev/null and b/static/style/blue/wait_large.png differ
diff --git a/static/style/blue/wait_small.png b/static/style/blue/wait_small.png
new file mode 100644
index 0000000..7202be8
Binary files /dev/null and b/static/style/blue/wait_small.png differ
diff --git a/static/style/blue/warn_bg.png b/static/style/blue/warn_bg.png
new file mode 100644
index 0000000..98b4cc8
Binary files /dev/null and b/static/style/blue/warn_bg.png differ
diff --git a/static/style/blue/warn_large.png b/static/style/blue/warn_large.png
new file mode 100644
index 0000000..c4c4af2
Binary files /dev/null and b/static/style/blue/warn_large.png differ
diff --git a/static/style/blue/warn_message_icon.png b/static/style/blue/warn_message_icon.png
new file mode 100644
index 0000000..cdfd42d
Binary files /dev/null and b/static/style/blue/warn_message_icon.png differ
diff --git a/static/style/blue/warn_small.png b/static/style/blue/warn_small.png
new file mode 100644
index 0000000..34c0c93
Binary files /dev/null and b/static/style/blue/warn_small.png differ
diff --git a/static/style/blue/workflow_circle_drag.png b/static/style/blue/workflow_circle_drag.png
new file mode 100644
index 0000000..39ddd45
Binary files /dev/null and b/static/style/blue/workflow_circle_drag.png differ
diff --git a/static/style/blue/workflow_circle_green.png b/static/style/blue/workflow_circle_green.png
new file mode 100644
index 0000000..14ed14c
Binary files /dev/null and b/static/style/blue/workflow_circle_green.png differ
diff --git a/static/style/blue/workflow_circle_open.png b/static/style/blue/workflow_circle_open.png
new file mode 100644
index 0000000..a3bdec1
Binary files /dev/null and b/static/style/blue/workflow_circle_open.png differ
diff --git a/static/style/shared_images/data_running.gif b/static/style/shared_images/data_running.gif
new file mode 100644
index 0000000..ba19ad7
Binary files /dev/null and b/static/style/shared_images/data_running.gif differ
diff --git a/static/style/shared_images/error_large.png b/static/style/shared_images/error_large.png
new file mode 100644
index 0000000..2cb2e8f
Binary files /dev/null and b/static/style/shared_images/error_large.png differ
diff --git a/static/style/shared_images/error_small.png b/static/style/shared_images/error_small.png
new file mode 100644
index 0000000..04cdc33
Binary files /dev/null and b/static/style/shared_images/error_small.png differ
diff --git a/static/style/shared_images/info_large.png b/static/style/shared_images/info_large.png
new file mode 100644
index 0000000..8ac030b
Binary files /dev/null and b/static/style/shared_images/info_large.png differ
diff --git a/static/style/shared_images/info_small.png b/static/style/shared_images/info_small.png
new file mode 100644
index 0000000..81522c4
Binary files /dev/null and b/static/style/shared_images/info_small.png differ
diff --git a/static/style/shared_images/ok_large.png b/static/style/shared_images/ok_large.png
new file mode 100644
index 0000000..e68759a
Binary files /dev/null and b/static/style/shared_images/ok_large.png differ
diff --git a/static/style/shared_images/ok_small.png b/static/style/shared_images/ok_small.png
new file mode 100644
index 0000000..dee022c
Binary files /dev/null and b/static/style/shared_images/ok_small.png differ
diff --git a/static/style/shared_images/wait_large.png b/static/style/shared_images/wait_large.png
new file mode 100644
index 0000000..480d6d9
Binary files /dev/null and b/static/style/shared_images/wait_large.png differ
diff --git a/static/style/shared_images/wait_small.png b/static/style/shared_images/wait_small.png
new file mode 100644
index 0000000..7202be8
Binary files /dev/null and b/static/style/shared_images/wait_small.png differ
diff --git a/static/style/shared_images/warn_large.png b/static/style/shared_images/warn_large.png
new file mode 100644
index 0000000..c4c4af2
Binary files /dev/null and b/static/style/shared_images/warn_large.png differ
diff --git a/static/style/shared_images/warn_small.png b/static/style/shared_images/warn_small.png
new file mode 100644
index 0000000..34c0c93
Binary files /dev/null and b/static/style/shared_images/warn_small.png differ
diff --git a/static/toolshed/maps/mvc/groups/group-detail-view.js.map b/static/toolshed/maps/mvc/groups/group-detail-view.js.map
new file mode 100644
index 0000000..36fd453
--- /dev/null
+++ b/static/toolshed/maps/mvc/groups/group-detail-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-detail-view.js","sources":["../../../src/mvc/groups/group-detail-view.js"],"names":["define","mod_toastr","mod_group_model","GroupDetailView","Backbone","View","extend","el","options","app","initialize","this","_","window","globalTS","groups","collection","model","get","group_id","render","fetchGroup","that","Group","id","fetch","success","console","log","error","response","responseJSON","err_msg","template","templateRow","$el","html","group","$","tooltip","css [...]
\ No newline at end of file
diff --git a/static/toolshed/maps/mvc/groups/group-list-view.js.map b/static/toolshed/maps/mvc/groups/group-list-view.js.map
new file mode 100644
index 0000000..d124bbb
--- /dev/null
+++ b/static/toolshed/maps/mvc/groups/group-list-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-list-view.js","sources":["../../../src/mvc/groups/group-list-view.js"],"names":["define","mod_toastr","mod_group_model","mod_group_row","GroupListView","Backbone","View","extend","el","defaults","initialize","options","this","_","that","window","globalTS","groups","collection","Groups","fetch","success","model","console","log","render","error","response","responseJSON","err_msg","$","hide","template","templateGroupsList","$el","html","length","models","renderRo [...]
\ No newline at end of file
diff --git a/static/toolshed/maps/mvc/groups/group-listrow-view.js.map b/static/toolshed/maps/mvc/groups/group-listrow-view.js.map
new file mode 100644
index 0000000..f7bc728
--- /dev/null
+++ b/static/toolshed/maps/mvc/groups/group-listrow-view.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-listrow-view.js","sources":["../../../src/mvc/groups/group-listrow-view.js"],"names":["define","GroupListRowView","Backbone","View","extend","events","initialize","options","this","render","group","tmpl","templateRow","setElement","$el","show","_","template","join"],"mappings":"AAAAA,UACA,WAGA,GAAIC,GAAmBC,SAASC,KAAKC,QACjCC,UAEAC,WAAa,SAAUC,GACnBC,KAAKC,OAAQF,EAAQG,QAGzBD,OAAQ,SAAUC,GACd,GAAIC,GAAOH,KAAKI,aAGhB,OAFAJ,MAAKK,WAAWF,GAAQD,MAAMA,KAC9BF,KAAKM,IAAIC, [...]
\ No newline at end of file
diff --git a/static/toolshed/maps/mvc/groups/group-model.js.map b/static/toolshed/maps/mvc/groups/group-model.js.map
new file mode 100644
index 0000000..7cf1622
--- /dev/null
+++ b/static/toolshed/maps/mvc/groups/group-model.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"group-model.js","sources":["../../../src/mvc/groups/group-model.js"],"names":["define","Group","Backbone","Model","extend","urlRoot","Groups","Collection","url","model"],"mappings":"AAAAA,UAAW,WAKP,GAAIC,GAAQC,SAASC,MAAMC,QACzBC,QAAS,gBAGPC,EAASJ,SAASK,WAAWH,QAC/BI,IAAK,cAELC,MAAOR,GAIb,QACIA,MAAOA,EACPK,OAAQA"}
\ No newline at end of file
diff --git a/static/toolshed/maps/test-file.js.map b/static/toolshed/maps/test-file.js.map
new file mode 100644
index 0000000..60e635d
--- /dev/null
+++ b/static/toolshed/maps/test-file.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"test-file.js","sources":["../src/test-file.js"],"names":["define"],"mappings":"AAAAA,UAAY,WACX,MAAO"}
\ No newline at end of file
diff --git a/static/toolshed/maps/toolshed.groups.js.map b/static/toolshed/maps/toolshed.groups.js.map
new file mode 100644
index 0000000..654c967
--- /dev/null
+++ b/static/toolshed/maps/toolshed.groups.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"toolshed.groups.js","sources":["../src/toolshed.groups.js"],"names":["define","mod_group_list","mod_group_detail","ToolshedRouter","Backbone","Router","extend","routes",":group_id","ToolshedGroups","View","groupListView","groupDetailView","collection","initialize","window","globalTS","groups","this","ts_router","on","GroupListView","group_id","GroupDetailView","history","start","pushState"],"mappings":"AAIAA,QACQ,8CACA,gDACA,2CAEJ,SACIC,EACAC,GAMR,GAAIC,GAAiBC,SAASC, [...]
\ No newline at end of file
diff --git a/static/toolshed/scripts/mvc/groups/group-detail-view.js b/static/toolshed/scripts/mvc/groups/group-detail-view.js
new file mode 100644
index 0000000..3165f3f
--- /dev/null
+++ b/static/toolshed/scripts/mvc/groups/group-detail-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","toolshed/scripts/mvc/groups/group-model"],function(a,b){var c=Backbone.View.extend({el:"#groups_element",options:{},app:null,initialize:function(a){this.options=_.extend(this.options,a),this.app=window.globalTS.groups,null!==this.app.collection?(this.model=this.app.collection.get(this.options.group_id),this.render()):this.fetchGroup()},fetchGroup:function(c){var d=this;this.options=_.extend(this.options,c),this.model=new b.Group({id:this.options.group_id}),this.mod [...]
+//# sourceMappingURL=../../../maps/mvc/groups/group-detail-view.js.map
\ No newline at end of file
diff --git a/static/toolshed/scripts/mvc/groups/group-list-view.js b/static/toolshed/scripts/mvc/groups/group-list-view.js
new file mode 100644
index 0000000..0531102
--- /dev/null
+++ b/static/toolshed/scripts/mvc/groups/group-list-view.js
@@ -0,0 +1,2 @@
+define(["libs/toastr","toolshed/scripts/mvc/groups/group-model","toolshed/scripts/mvc/groups/group-listrow-view"],function(a,b,c){var d=Backbone.View.extend({el:"#groups_element",defaults:{},initialize:function(c){this.options=_.defaults(this.options||{},this.defaults,c);var d=this;window.globalTS.groups.collection=new b.Groups,window.globalTS.groups.collection.fetch({success:function(a){console.log("received data: "),console.log(a),d.render()},error:function(b,c){a.error("undefined"!=ty [...]
+//# sourceMappingURL=../../../maps/mvc/groups/group-list-view.js.map
\ No newline at end of file
diff --git a/static/toolshed/scripts/mvc/groups/group-listrow-view.js b/static/toolshed/scripts/mvc/groups/group-listrow-view.js
new file mode 100644
index 0000000..2642c3d
--- /dev/null
+++ b/static/toolshed/scripts/mvc/groups/group-listrow-view.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.View.extend({events:{},initialize:function(a){this.render(a.group)},render:function(a){var b=this.templateRow();return this.setElement(b({group:a})),this.$el.show(),this},templateRow:function(){return _.template(['<tr class="" data-id="<%- group.get("id") %>">','<td><a href="groups#/<%= group.get("id") %>"><%= group.get("name") %></a></td>','<td><%= group.get("total_members") %></td>','<td><%= group.get("total_repos") %></td>',"</tr>"].join(""))}});ret [...]
+//# sourceMappingURL=../../../maps/mvc/groups/group-listrow-view.js.map
\ No newline at end of file
diff --git a/static/toolshed/scripts/mvc/groups/group-model.js b/static/toolshed/scripts/mvc/groups/group-model.js
new file mode 100644
index 0000000..ab732aa
--- /dev/null
+++ b/static/toolshed/scripts/mvc/groups/group-model.js
@@ -0,0 +1,2 @@
+define([],function(){var a=Backbone.Model.extend({urlRoot:"/api/groups"}),b=Backbone.Collection.extend({url:"/api/groups",model:a});return{Group:a,Groups:b}});
+//# sourceMappingURL=../../../maps/mvc/groups/group-model.js.map
\ No newline at end of file
diff --git a/static/toolshed/scripts/toolshed.groups.js b/static/toolshed/scripts/toolshed.groups.js
new file mode 100644
index 0000000..87de050
--- /dev/null
+++ b/static/toolshed/scripts/toolshed.groups.js
@@ -0,0 +1,2 @@
+define(["toolshed/scripts/mvc/groups/group-list-view","toolshed/scripts/mvc/groups/group-detail-view","toolshed/scripts/mvc/groups/group-model"],function(a,b){var c=Backbone.Router.extend({routes:{"":"groups",":group_id":"group_page"}}),d=Backbone.View.extend({groupListView:null,groupDetailView:null,collection:null,initialize:function(){window.globalTS.groups=this,this.ts_router=new c,this.ts_router.on("route:groups",function(){window.globalTS.groups.groupListView=new a.GroupListView}),t [...]
+//# sourceMappingURL=../maps/toolshed.groups.js.map
\ No newline at end of file
diff --git a/static/toolshed/src b/static/toolshed/src
new file mode 120000
index 0000000..c92cf98
--- /dev/null
+++ b/static/toolshed/src
@@ -0,0 +1 @@
+../../client/toolshed/scripts/
\ No newline at end of file
diff --git a/static/user_disabled.html b/static/user_disabled.html
new file mode 100644
index 0000000..024af41
--- /dev/null
+++ b/static/user_disabled.html
@@ -0,0 +1,28 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html lang="en">
+    <head>
+        <title>Galaxy</title>
+        <style type="text/css">
+        body {
+            min-width: 500px;
+            text-align: center;
+        }
+        .errormessage {
+            font: 75%% verdana, "Bitstream Vera Sans", geneva, arial, helvetica, helve, sans-serif;
+            padding: 10px;
+            margin: 100px auto;
+            min-height: 32px;
+            max-width: 500px;
+            border: 1px solid #AA6666;
+            background-color: #FFCCCC;
+            text-align: left;
+        }
+        </style>
+    </head>
+    <body>
+        <div class="errormessage">
+            <h4>Account Disabled</h4>
+            <p>Your account is no longer valid, contact your Galaxy administrator to activate your account.</p>
+        </div>
+    </body>
+</html>
diff --git a/static/welcome.html.sample b/static/welcome.html.sample
new file mode 100644
index 0000000..7fc590a
--- /dev/null
+++ b/static/welcome.html.sample
@@ -0,0 +1,42 @@
+<!DOCTYPE html>
+<html lang="en">
+    <head>
+        <meta charset="utf-8">
+        <link rel="stylesheet" href="style/base.css" type="text/css" />
+    </head>
+    <body style="margin: 0">
+        <div class="jumbotron">
+            <div class="container">
+                <h2>Hello, <strong>Galaxy</strong> is running!</h2>
+                To customize this page edit <code>static/welcome.html</code>
+                <br>
+                <a target="_blank" href="https://wiki.galaxyproject.org/Admin/Config" class="btn btn-primary btn-lg">Configuring Galaxy »</a>
+                <a target="_blank" href="https://wiki.galaxyproject.org/Admin/Tools/AddToolFromToolShedTutorial" class="btn btn-primary btn-lg">Installing Tools »</a>
+            </div>
+            <br>
+            <div class="container">
+                <p>Take an interactive tour:
+                    <a target="_parent" href="../tours/core.galaxy_ui" class="btn btn-default btn-xs">Galaxy UI</a>
+                    <a target="_parent" href="../tours/core.history" class="btn btn-default btn-xs">History</a>
+                    <a target="_parent" href="../tours/core.scratchbook" class="btn btn-default btn-xs">Scratchbook</a>
+                </p>
+            </div>
+        </div>
+        <div class="container">
+            <p class="lead">
+                <a target="_blank" class="reference" href="http://galaxyproject.org/">
+                Galaxy</a> is an open platform for supporting data intensive
+                research. Galaxy is developed by <a target="_blank" class="reference" href="http://wiki.galaxyproject.org/GalaxyTeam">The Galaxy Team</a>
+                with the support of  <a target="_blank" class="reference" href="https://github.com/galaxyproject/galaxy/blob/dev/CONTRIBUTORS.md">many contributors</a>.
+            </p>
+            <footer>
+                The <a target="_blank" class="reference" href="http://galaxyproject.org/">Galaxy Project</a>
+                is supported in part by <a target="_blank" class="reference" href="http://www.genome.gov">NHGRI</a>,
+                <a target="_blank" class="reference" href="http://www.nsf.gov">NSF</a>,
+                <a target="_blank" class="reference" href="http://www.huck.psu.edu">The Huck Institutes of the Life Sciences</a>,
+                <a target="_blank" class="reference" href="http://www.ics.psu.edu">The Institute for CyberScience at Penn State</a>,
+                and <a target="_blank" class="reference" href="http://www.jhu.edu/">Johns Hopkins University</a>.
+            </footer>
+        </div>
+    </body>
+</html>
diff --git a/static/wymeditor/iframe/default/lbl-blockquote.png b/static/wymeditor/iframe/default/lbl-blockquote.png
new file mode 100644
index 0000000..65ea205
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-blockquote.png differ
diff --git a/static/wymeditor/iframe/default/lbl-h1.png b/static/wymeditor/iframe/default/lbl-h1.png
new file mode 100644
index 0000000..dea1da3
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-h1.png differ
diff --git a/static/wymeditor/iframe/default/lbl-h2.png b/static/wymeditor/iframe/default/lbl-h2.png
new file mode 100644
index 0000000..f4b4274
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-h2.png differ
diff --git a/static/wymeditor/iframe/default/lbl-h3.png b/static/wymeditor/iframe/default/lbl-h3.png
new file mode 100644
index 0000000..bd99f76
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-h3.png differ
diff --git a/static/wymeditor/iframe/default/lbl-h4.png b/static/wymeditor/iframe/default/lbl-h4.png
new file mode 100644
index 0000000..e06ab3d
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-h4.png differ
diff --git a/static/wymeditor/iframe/default/lbl-h5.png b/static/wymeditor/iframe/default/lbl-h5.png
new file mode 100644
index 0000000..360fc60
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-h5.png differ
diff --git a/static/wymeditor/iframe/default/lbl-h6.png b/static/wymeditor/iframe/default/lbl-h6.png
new file mode 100644
index 0000000..73e644a
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-h6.png differ
diff --git a/static/wymeditor/iframe/default/lbl-p.png b/static/wymeditor/iframe/default/lbl-p.png
new file mode 100644
index 0000000..7f2bf34
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-p.png differ
diff --git a/static/wymeditor/iframe/default/lbl-pre.png b/static/wymeditor/iframe/default/lbl-pre.png
new file mode 100644
index 0000000..e026e47
Binary files /dev/null and b/static/wymeditor/iframe/default/lbl-pre.png differ
diff --git a/static/wymeditor/iframe/default/wymiframe.css b/static/wymeditor/iframe/default/wymiframe.css
new file mode 100644
index 0000000..769520c
--- /dev/null
+++ b/static/wymeditor/iframe/default/wymiframe.css
@@ -0,0 +1,90 @@
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ * 
+ * File Name:
+ *        wymeditor.css
+ *        Main editor css file.
+ *        See the documentation for more info.
+ * 
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ *        Daniel Reszka (d.reszka a-t wymeditor dotorg)
+*/
+
+/* VISUAL FEEDBACK */
+
+/* basic */
+  body  { background: #e1e8f1;}
+ 
+/* make HTML blocs visible */
+  p,
+  h1,
+  h2,
+  h3,
+  h4,
+  h5,
+  h6,
+  ul,
+  ol,
+  table,
+  blockquote,
+  pre           { background: #FFFFFF no-repeat 2px 2px;
+                  padding:8px 5px 5px;
+                  margin:10px; }
+  td            { background: #F0F4F8; }
+  th            { background: #ffffcc; }
+  ul,
+  ol            { border-left:20px solid #B9C4D0; padding:0px 5px; }
+  caption       { background: #E4E4B0; padding: 5px; font-weight: bold; }
+  table         { font-size: 12px; width: 500px; }
+  td            { width: 25%; }
+  blockquote    { margin-left: 30px; }
+  pre           { background-color:transparent; border: 1px solid white; }
+
+/* Gecko min height fix */
+  p             { min-height: 1em; } /*min-height is needed under Firefox, because empty parargraphs */
+  *+html p      { min-height: auto; } /* but we have to remove it under IE7 because it triggers the 'haslayout' mode */
+  td            { height: 1.6em; }
+ 
+/* labels */
+  p         { background-image: url(lbl-p.png); }
+  h1        { background-image: url(lbl-h1.png); }
+  h2        { background-image: url(lbl-h2.png); }
+  h3        { background-image: url(lbl-h3.png); }
+  h4        { background-image: url(lbl-h4.png); }
+  h5        { background-image: url(lbl-h5.png); }
+  h6        { background-image: url(lbl-h6.png); }
+  blockquote{ background-image: url(lbl-blockquote.png); }
+  pre       { background-image: url(lbl-pre.png); }
+
+/* specific HTML elements */
+  caption   { text-align: left; }
+  img       { margin-right: 5px;
+              border-style: solid;
+              border-color: gray;
+              border-width: 0; }
+  a img     { border-width: 1px; border-color: blue; }
+  acronym   { border: 1px solid gray; }
+  span      { background-color: #eef; }
+
+/* visual feedback for non-valid nesting of elements*/
+  h1 h1, h1 h2, h1 h3, h1 h4, h1 h5, h1 h6, h1 p, h1 pre, h1 address,
+  h2 h1, h2 h2, h2 h3, h2 h4, h2 h5, h2 h6, h2 p, h2 pre, h2 address,
+  h3 h1, h3 h2, h3 h3, h3 h4, h3 h5, h3 h6, h3 p, h3 pre, h3 address,
+  h4 h1, h4 h2, h4 h3, h4 h4, h4 h5, h4 h6, h4 p, h4 pre, h4 address,
+  h5 h1, h5 h2, h5 h3, h5 h4, h5 h5, h5 h6, h5 p, h5 pre, h5 address,
+  h6 h1, h6 h2, h6 h3, h6 h4, h6 h4, h6 h6, h6 p, h6 pre, h6 address,
+  p h1, p h2, p h3, p h4, p h5, p h6, p pre, p address,
+  pre h1, pre h2, pre h3, pre h4, pre h5, pre h6, pre p, pre pre, pre address,
+  address h1, address h2, address h3, address h4, address h5, address h6,
+  address p, address pre, address address
+  { background-color: #ff9999 !important;
+    border: 1px solid red !important;
+    font-size: 12px !important;
+    font-weight: normal; }
diff --git a/static/wymeditor/iframe/default/wymiframe.html b/static/wymeditor/iframe/default/wymiframe.html
new file mode 100644
index 0000000..91a690c
--- /dev/null
+++ b/static/wymeditor/iframe/default/wymiframe.html
@@ -0,0 +1,26 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        wymiframe.html
+ *        Iframe used by designMode.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+-->
+<html>
+<head>
+<title>WYMeditor iframe</title>
+<meta http-equiv="X-UA-Compatible" content="IE=EmulateIE7" />
+<link rel="stylesheet" type="text/css" media="screen" href="wymiframe.css" />
+</head>
+<body class="wym_iframe"></body>
+</html>
diff --git a/static/wymeditor/iframe/galaxy/lbl-blockquote.png b/static/wymeditor/iframe/galaxy/lbl-blockquote.png
new file mode 100644
index 0000000..65ea205
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-blockquote.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-h1.png b/static/wymeditor/iframe/galaxy/lbl-h1.png
new file mode 100644
index 0000000..dea1da3
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-h1.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-h2.png b/static/wymeditor/iframe/galaxy/lbl-h2.png
new file mode 100644
index 0000000..f4b4274
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-h2.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-h3.png b/static/wymeditor/iframe/galaxy/lbl-h3.png
new file mode 100644
index 0000000..bd99f76
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-h3.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-h4.png b/static/wymeditor/iframe/galaxy/lbl-h4.png
new file mode 100644
index 0000000..e06ab3d
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-h4.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-h5.png b/static/wymeditor/iframe/galaxy/lbl-h5.png
new file mode 100644
index 0000000..360fc60
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-h5.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-h6.png b/static/wymeditor/iframe/galaxy/lbl-h6.png
new file mode 100644
index 0000000..73e644a
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-h6.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-p.png b/static/wymeditor/iframe/galaxy/lbl-p.png
new file mode 100644
index 0000000..7f2bf34
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-p.png differ
diff --git a/static/wymeditor/iframe/galaxy/lbl-pre.png b/static/wymeditor/iframe/galaxy/lbl-pre.png
new file mode 100644
index 0000000..e026e47
Binary files /dev/null and b/static/wymeditor/iframe/galaxy/lbl-pre.png differ
diff --git a/static/wymeditor/iframe/galaxy/wymiframe.css b/static/wymeditor/iframe/galaxy/wymiframe.css
new file mode 100644
index 0000000..165a716
--- /dev/null
+++ b/static/wymeditor/iframe/galaxy/wymiframe.css
@@ -0,0 +1,95 @@
+/*
+ * JG: edited to remove '!important' from rules so that embedded Galaxy items can have unique background colors.
+ */
+
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ * 
+ * File Name:
+ *        wymeditor.css
+ *        Main editor css file.
+ *        See the documentation for more info.
+ * 
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+ *        Daniel Reszka (d.reszka a-t wymeditor dotorg)
+*/
+
+/* VISUAL FEEDBACK */
+
+/* basic */
+ 
+/* make HTML blocs visible */
+  p,
+  h1,
+  h2,
+  h3,
+  h4,
+  h5,
+  h6,
+  ul,
+  ol,
+  table,
+  blockquote,
+  pre           { background: #FFFFFF no-repeat 2px 2px; /* JG removed: !important */
+                  padding:8px 5px 5px; /* JG removed: !important */
+                  margin:10px} /* JG removed: !important */
+  td            { background: #F0F4F8; }
+  th            { background: #ffffcc; }
+  ul,
+  ol            { border-left:20px solid #B9C4D0; padding:0px 5px; }
+  caption       { background: #E4E4B0; padding: 5px; font-weight: bold; }
+  table         { font-size: 12px; min-width: 500px; border: none !important; border-collapse: separate !important }
+  td            { width: 25%; }
+  blockquote    { margin-left: 30px; }
+  pre           { background-color:transparent; border: 1px solid white; }
+
+/* Gecko min height fix */
+  p             { min-height: 1em; } /*min-height is needed under Firefox, because empty parargraphs */
+  *+html p      { min-height: auto; } /* but we have to remove it under IE7 because it triggers the 'haslayout' mode */
+  td            { height: 1.6em; }
+ 
+/* labels */
+/* JG commented out:
+  p         { background-image: url(lbl-p.png); }
+  h1        { background-image: url(lbl-h1.png); }
+  h2        { background-image: url(lbl-h2.png); }
+  h3        { background-image: url(lbl-h3.png); }
+  h4        { background-image: url(lbl-h4.png); }
+  h5        { background-image: url(lbl-h5.png); }
+  h6        { background-image: url(lbl-h6.png); }
+  blockquote{ background-image: url(lbl-blockquote.png); }
+  pre       { background-image: url(lbl-pre.png); }
+*/
+
+/* specific HTML elements */
+  caption   { text-align: left; }
+  img       { margin-right: 5px;
+              border-style: solid;
+              border-color: gray;
+              border-width: 0; }
+  a img     { border-width: 1px; border-color: blue; }
+  acronym   { border: 1px solid gray; }
+  span      { background-color: #eef; }
+
+/* visual feedback for non-valid nesting of elements*/
+  h1 h1, h1 h2, h1 h3, h1 h4, h1 h5, h1 h6, h1 p, h1 pre, h1 address,
+  h2 h1, h2 h2, h2 h3, h2 h4, h2 h5, h2 h6, h2 p, h2 pre, h2 address,
+  h3 h1, h3 h2, h3 h3, h3 h4, h3 h5, h3 h6, h3 p, h3 pre, h3 address,
+  h4 h1, h4 h2, h4 h3, h4 h4, h4 h5, h4 h6, h4 p, h4 pre, h4 address,
+  h5 h1, h5 h2, h5 h3, h5 h4, h5 h5, h5 h6, h5 p, h5 pre, h5 address,
+  h6 h1, h6 h2, h6 h3, h6 h4, h6 h4, h6 h6, h6 p, h6 pre, h6 address,
+  p h1, p h2, p h3, p h4, p h5, p h6, p pre, p address,
+  pre h1, pre h2, pre h3, pre h4, pre h5, pre h6, pre p, pre pre, pre address,
+  address h1, address h2, address h3, address h4, address h5, address h6,
+  address p, address pre, address address
+  { background-color: #ff9999 !important;
+    border: 1px solid red !important;
+    font-size: 12px !important;
+    font-weight: normal; }
diff --git a/static/wymeditor/iframe/galaxy/wymiframe.html b/static/wymeditor/iframe/galaxy/wymiframe.html
new file mode 100644
index 0000000..932e7a4
--- /dev/null
+++ b/static/wymeditor/iframe/galaxy/wymiframe.html
@@ -0,0 +1,27 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        wymiframe.html
+ *        Iframe used by designMode.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+-->
+<html>
+<head>
+<title>WYMeditor iframe</title>
+<meta http-equiv="X-UA-Compatible" content="IE=EmulateIE7" />
+<link rel="stylesheet" type="text/css" media="screen" href="../../../style/base.css" />
+<link rel="stylesheet" type="text/css" media="screen" href="wymiframe.css" />
+</head>
+<body class="wym_iframe text-content"></body>
+</html>
diff --git a/static/wymeditor/lang/bg.js b/static/wymeditor/lang/bg.js
new file mode 100644
index 0000000..576bca5
--- /dev/null
+++ b/static/wymeditor/lang/bg.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['bg'] = {
+    Strong:           'Получер',
+    Emphasis:         'Курсив',
+    Superscript:      'Горен индекс',
+    Subscript:        'Долен индекс',
+    Ordered_List:     'Подреден списък',
+    Unordered_List:   'Неподреден списък',
+    Indent:           'Блок навътре',
+    Outdent:          'Блок навън',
+    Undo:             'Стъпка назад',
+    Redo:             'Стъпка напред',
+    Link:             'Създай хипервръзка',
+    Unlink:           'Премахни хипервръзката',
+    Image:            'Изображение',
+    Table:            'Таблица',
+    HTML:             'HTML',
+    Paragraph:        'Абзац',
+    Heading_1:        'Заглавие 1',
+    Heading_2:        'Заглавие 2',
+    Heading_3:        'Заглавие 3',
+    Heading_4:        'Заглавие 4',
+    Heading_5:        'Заглавие 5',
+    Heading_6:        'Заглавие 6',
+    Preformatted:     'Преформатиран',
+    Blockquote:       'Цитат',
+    Table_Header:     'Заглавие на таблицата',
+    URL:              'URL',
+    Title:            'Заглавие',
+    Alternative_Text: 'Алтернативен текст',
+    Caption:          'Етикет',
+    Summary:          'Общо',
+    Number_Of_Rows:   'Брой редове',
+    Number_Of_Cols:   'Брой колони',
+    Submit:           'Изпрати',
+    Cancel:           'Отмени',
+    Choose:           'Затвори',
+    Preview:          'Предварителен преглед',
+    Paste_From_Word:  'Вмъкни от MS WORD',
+    Tools:            'Инструменти',
+    Containers:       'Контейнери',
+    Classes:          'Класове',
+    Status:           'Статус',
+    Source_Code:      'Източник, код'
+};
+
diff --git a/static/wymeditor/lang/ca.js b/static/wymeditor/lang/ca.js
new file mode 100644
index 0000000..c342406
--- /dev/null
+++ b/static/wymeditor/lang/ca.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['ca'] = {
+    Strong:           'Ressaltar',
+    Emphasis:         'Emfatitzar',
+    Superscript:      'Superindex', 
+    Subscript:        'Subindex',
+    Ordered_List:     'Llistat ordenat',
+    Unordered_List:   'Llistat sense ordenar',
+    Indent:           'Indentat',
+    Outdent:          'Sense indentar',
+    Undo:             'Desfer',
+    Redo:             'Refer',
+    Link:             'Enllaçar',
+    Unlink:           'Eliminar enllaç',
+    Image:            'Imatge',
+    Table:            'Taula',
+    HTML:             'HTML',
+    Paragraph:        'Paràgraf',
+    Heading_1:        'Capçalera 1',
+    Heading_2:        'Capçalera 2',
+    Heading_3:        'Capçalera 3',
+    Heading_4:        'Capçalera 4',
+    Heading_5:        'Capçalera 5',
+    Heading_6:        'Capçalera 6',
+    Preformatted:     'Pre-formatejat',
+    Blockquote:       'Cita',
+    Table_Header:     'Capçalera de la taula',
+    URL:              'URL',
+    Title:            'Títol',
+    Alternative_Text: 'Text alternatiu',
+    Caption:          'Llegenda',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Nombre de files',
+    Number_Of_Cols:   'Nombre de columnes',
+    Submit:           'Enviar',
+    Cancel:           'Cancel·lar',
+    Choose:           'Triar',
+    Preview:          'Vista prèvia',
+    Paste_From_Word:  'Pegar des de Word',
+    Tools:            'Eines',
+    Containers:       'Contenidors',
+    Classes:          'Classes',
+    Status:           'Estat',
+    Source_Code:      'Codi font'
+};
+
diff --git a/static/wymeditor/lang/cs.js b/static/wymeditor/lang/cs.js
new file mode 100644
index 0000000..3939d71
--- /dev/null
+++ b/static/wymeditor/lang/cs.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['cs'] = {
+    Strong:           'Tučné',
+    Emphasis:         'Kurzíva',
+    Superscript:      'Horní index',
+    Subscript:        'Dolní index',
+    Ordered_List:     'Číslovaný seznam',
+    Unordered_List:   'Nečíslovaný seznam',
+    Indent:           'Zvětšit odsazení',
+    Outdent:          'Zmenšit odsazení',
+    Undo:             'Zpět',
+    Redo:             'Znovu',
+    Link:             'Vytvořit odkaz',
+    Unlink:           'Zrušit odkaz',
+    Image:            'Obrázek',
+    Table:            'Tabulka',
+    HTML:             'HTML',
+    Paragraph:        'Odstavec',
+    Heading_1:        'Nadpis 1. úrovně',
+    Heading_2:        'Nadpis 2. úrovně',
+    Heading_3:        'Nadpis 3. úrovně',
+    Heading_4:        'Nadpis 4. úrovně',
+    Heading_5:        'Nadpis 5. úrovně',
+    Heading_6:        'Nadpis 6. úrovně',
+    Preformatted:     'Předformátovaný text',
+    Blockquote:       'Citace',
+    Table_Header:     'Hlavičková buňka tabulky',
+    URL:              'Adresa',
+    Title:            'Text po najetí myší',
+    Alternative_Text: 'Text pro případ nezobrazení obrázku',
+    Caption:          'Titulek tabulky',
+    Summary:          'Shrnutí obsahu',
+    Number_Of_Rows:   'Počet řádek',
+    Number_Of_Cols:   'Počet sloupců',
+    Submit:           'Vytvořit',
+    Cancel:           'Zrušit',
+    Choose:           'Vybrat',
+    Preview:          'Náhled',
+    Paste_From_Word:  'Vložit z Wordu',
+    Tools:            'Nástroje',
+    Containers:       'Typy obsahu',
+    Classes:          'Třídy',
+    Status:           'Stav',
+    Source_Code:      'Zdrojový kód'
+};
+
diff --git a/static/wymeditor/lang/de.js b/static/wymeditor/lang/de.js
new file mode 100644
index 0000000..a1e01e1
--- /dev/null
+++ b/static/wymeditor/lang/de.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['de'] = {
+    Strong:           'Fett',
+    Emphasis:         'Kursiv',
+    Superscript:      'Text hochstellen',
+    Subscript:        'Text tiefstellen',
+    Ordered_List:     'Geordnete Liste einfügen',
+    Unordered_List:   'Ungeordnete Liste einfügen',
+    Indent:           'Einzug erhöhen',
+    Outdent:          'Einzug vermindern',
+    Undo:             'Befehle rückgängig machen',
+    Redo:             'Befehle wiederherstellen',
+    Link:             'Hyperlink einfügen',
+    Unlink:           'Hyperlink entfernen',
+    Image:            'Bild einfügen',
+    Table:            'Tabelle einfügen',
+    HTML:             'HTML anzeigen/verstecken',
+    Paragraph:        'Absatz',
+    Heading_1:        'Überschrift 1',
+    Heading_2:        'Überschrift 2',
+    Heading_3:        'Überschrift 3',
+    Heading_4:        'Überschrift 4',
+    Heading_5:        'Überschrift 5',
+    Heading_6:        'Überschrift 6',
+    Preformatted:     'Vorformatiert',
+    Blockquote:       'Zitat',
+    Table_Header:     'Tabellenüberschrift',
+    URL:              'URL',
+    Title:            'Titel',
+    Alternative_Text: 'Alternativer Text',
+    Caption:          'Tabellenüberschrift',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Anzahl Zeilen',
+    Number_Of_Cols:   'Anzahl Spalten',
+    Submit:           'Absenden',
+    Cancel:           'Abbrechen',
+    Choose:           'Auswählen',
+    Preview:          'Vorschau',
+    Paste_From_Word:  'Aus Word einfügen',
+    Tools:            'Werkzeuge',
+    Containers:       'Inhaltstyp',
+    Classes:          'Klassen',
+    Status:           'Status',
+    Source_Code:      'Quellcode'
+};
+
diff --git a/static/wymeditor/lang/en.js b/static/wymeditor/lang/en.js
new file mode 100644
index 0000000..86452ab
--- /dev/null
+++ b/static/wymeditor/lang/en.js
@@ -0,0 +1,52 @@
+WYMeditor.STRINGS['en'] = {
+    Strong:           'Strong',
+    Emphasis:         'Emphasis',
+    Superscript:      'Superscript',
+    Subscript:        'Subscript',
+    Ordered_List:     'Ordered List',
+    Unordered_List:   'Unordered List',
+    Indent:           'Indent',
+    Outdent:          'Outdent',
+    Undo:             'Undo',
+    Redo:             'Redo',
+    Link:             'Link',
+    Unlink:           'Unlink',
+    Image:            'Image',
+    Table:            'Table',
+    HTML:             'HTML',
+    Paragraph:        'Paragraph',
+    Heading_1:        'Heading 1',
+    Heading_2:        'Heading 2',
+    Heading_3:        'Heading 3',
+    Heading_4:        'Heading 4',
+    Heading_5:        'Heading 5',
+    Heading_6:        'Heading 6',
+    Preformatted:     'Preformatted',
+    Blockquote:       'Blockquote',
+    Table_Header:     'Table Header',
+    URL:              'URL',
+    Title:            'Title',
+    Alternative_Text: 'Alternative text',
+    Caption:          'Caption',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Number of rows',
+    Number_Of_Cols:   'Number of cols',
+    Submit:           'Submit',
+    Cancel:           'Cancel',
+    Choose:           'Choose',
+    Preview:          'Preview',
+    Paste_From_Word:  'Paste from Word',
+    Tools:            'Tools',
+    Containers:       'Containers',
+    Classes:          'Classes',
+    Status:           'Status',
+    Source_Code:      'Source code',
+    
+    // Galaxy replacements.
+    Galaxy_History_Link:     'Insert Link to History',
+    Galaxy_Dataset_Link:     'Insert Link to Dataset',
+    Galaxy_Workflow_Link:    'Insert Link to Workflow',
+    Galaxy_Page_Link:        'Insert Link to Page',
+    Annotate_Galaxy_History: 'Annotate History',
+};
+
diff --git a/static/wymeditor/lang/es.js b/static/wymeditor/lang/es.js
new file mode 100644
index 0000000..cdb03c1
--- /dev/null
+++ b/static/wymeditor/lang/es.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['es'] = {
+    Strong:           'Resaltar',
+    Emphasis:         'Enfatizar',
+    Superscript:      'Superindice',
+    Subscript:        'Subindice',
+    Ordered_List:     'Lista ordenada',
+    Unordered_List:   'Lista sin ordenar',
+    Indent:           'Indentado',
+    Outdent:          'Sin indentar',
+    Undo:             'Deshacer',
+    Redo:             'Rehacer',
+    Link:             'Enlazar',
+    Unlink:           'Eliminar enlace',
+    Image:            'Imagen',
+    Table:            'Tabla',
+    HTML:             'HTML',
+    Paragraph:        'Párrafo',
+    Heading_1:        'Cabecera 1',
+    Heading_2:        'Cabecera 2',
+    Heading_3:        'Cabecera 3',
+    Heading_4:        'Cabecera 4',
+    Heading_5:        'Cabecera 5',
+    Heading_6:        'Cabecera 6',
+    Preformatted:     'Preformateado',
+    Blockquote:       'Cita',
+    Table_Header:     'Cabecera de la tabla',
+    URL:              'URL',
+    Title:            'Título',
+    Alternative_Text: 'Texto alternativo',
+    Caption:          'Leyenda',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Número de filas',
+    Number_Of_Cols:   'Número de columnas',
+    Submit:           'Enviar',
+    Cancel:           'Cancelar',
+    Choose:           'Seleccionar',
+    Preview:          'Vista previa',
+    Paste_From_Word:  'Pegar desde Word',
+    Tools:            'Herramientas',
+    Containers:       'Contenedores',
+    Classes:          'Clases',
+    Status:           'Estado',
+    Source_Code:      'Código fuente'
+};
+
diff --git a/static/wymeditor/lang/fa.js b/static/wymeditor/lang/fa.js
new file mode 100644
index 0000000..9d70fcb
--- /dev/null
+++ b/static/wymeditor/lang/fa.js
@@ -0,0 +1,46 @@
+//Translation To Persian: Ghassem Tofighi (http://ght.ir)
+WYMeditor.STRINGS['fa'] = {
+    Strong:           'پررنگ',//Strong
+    Emphasis:         'ایتالیک',//Emphasis
+    Superscript:      'بالانويس‌ ',//Superscript
+    Subscript:        'زيرنويس‌',//Subscript
+    Ordered_List:     'لیست مرتب',//Ordered List
+    Unordered_List:   'لیست نامرتب',//Unordered List
+    Indent:           'افزودن دندانه',//Indent
+    Outdent:          'کاهش دندانه',//Outdent
+    Undo:             'واگردانی',//Undo
+    Redo:             'تکرار',//Redo
+    Link:             'ساختن پیوند',//Link
+    Unlink:           'برداشتن پیوند',//Unlink
+    Image:            'تصویر',//Image
+    Table:            'جدول',//Table
+    HTML:             'HTML',//HTML
+    Paragraph:        'پاراگراف',//Paragraph
+    Heading_1:        'سرتیتر ۱',//Heading 1
+    Heading_2:        'سرتیتر ۲',//Heading 2
+    Heading_3:        'سرتیتر ۳',//Heading 3
+    Heading_4:        'سرتیتر ۴',//Heading 4
+    Heading_5:        'سرتیتر ۵',//Heading 5
+    Heading_6:        'سرتیتر ۶',//Heading 6
+    Preformatted:     'قالب آماده',//Preformatted
+    Blockquote:       'نقل قول',//Blockquote
+    Table_Header:     'سرجدول',//Table Header
+    URL:              'آدرس اینترنتی',//URL
+    Title:            'عنوان',//Title
+    Alternative_Text: 'متن جایگزین',//Alternative text
+    Caption:          'عنوان',//Caption
+    Summary:          'Summary',
+    Number_Of_Rows:   'تعداد سطرها',//Number of rows
+    Number_Of_Cols:   'تعداد ستون‌ها',//Number of cols
+    Submit:           'فرستادن',//Submit
+    Cancel:           'لغو',//Cancel
+    Choose:           'انتخاب',//Choose
+    Preview:          'پیش‌نمایش',//Preview
+    Paste_From_Word:  'انتقال از ورد',//Paste from Word
+    Tools:            'ابزار',//Tools
+    Containers:       '‌قالب‌ها',//Containers
+    Classes:          'کلاس‌ها',//Classes
+    Status:           'وضعیت',//Status
+    Source_Code:      'کد مبدأ'//Source code
+};
+
diff --git a/static/wymeditor/lang/fi.js b/static/wymeditor/lang/fi.js
new file mode 100644
index 0000000..fe1eab4
--- /dev/null
+++ b/static/wymeditor/lang/fi.js
@@ -0,0 +1,44 @@
+WYMeditor.STRINGS['fi'] = {
+    Strong:           'Lihavoitu',
+    Emphasis:         'Korostus',
+    Superscript:      'Yläindeksi',
+    Subscript:        'Alaindeksi',
+    Ordered_List:     'Numeroitu lista',
+    Unordered_List:   'Luettelomerkit',
+    Indent:           'Suurenna sisennystä',
+    Outdent:          'Pienennä sisennystä',
+    Undo:             'Kumoa',
+    Redo:             'Toista',
+    Link:             'Linkitä',
+    Unlink:           'Poista linkitys',
+    Image:            'Kuva',
+    Table:            'Taulukko',
+    HTML:             'HTML',
+    Paragraph:        'Kappale',
+    Heading_1:        'Otsikko 1',
+    Heading_2:        'Otsikko 2',
+    Heading_3:        'Otsikko 3',
+    Heading_4:        'Otsikko 4',
+    Heading_5:        'Otsikko 5',
+    Heading_6:        'Otsikko 6',
+    Preformatted:     'Esimuotoilu',
+    Blockquote:       'Sitaatti',
+    Table_Header:     'Taulukon otsikko',
+    URL:              'URL',
+    Title:            'Otsikko',
+    Alternative_Text: 'Vaihtoehtoinen teksti',
+    Caption:          'Kuvateksti',
+    Summary:          'Yhteenveto',
+    Number_Of_Rows:   'Rivien määrä',
+    Number_Of_Cols:   'Palstojen määrä',
+    Submit:           'Lähetä',
+    Cancel:           'Peruuta',
+    Choose:           'Valitse',
+    Preview:          'Esikatsele',
+    Paste_From_Word:  'Tuo Wordista',
+    Tools:            'Työkalut',
+    Containers:       'Muotoilut',
+    Classes:          'Luokat',
+    Status:           'Tila',
+    Source_Code:      'Lähdekoodi'
+};
diff --git a/static/wymeditor/lang/fr.js b/static/wymeditor/lang/fr.js
new file mode 100644
index 0000000..9b6deb9
--- /dev/null
+++ b/static/wymeditor/lang/fr.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['fr'] = {
+    Strong:           'Mise en évidence',
+    Emphasis:         'Emphase',
+    Superscript:      'Exposant',
+    Subscript:        'Indice',
+    Ordered_List:     'Liste Ordonnée',
+    Unordered_List:   'Liste Non-Ordonnée',
+    Indent:           'Imbriqué',
+    Outdent:          'Non-imbriqué',
+    Undo:             'Annuler',
+    Redo:             'Rétablir',
+    Link:             'Lien',
+    Unlink:           'Supprimer le Lien',
+    Image:            'Image',
+    Table:            'Tableau',
+    HTML:             'HTML',
+    Paragraph:        'Paragraphe',
+    Heading_1:        'Titre 1',
+    Heading_2:        'Titre 2',
+    Heading_3:        'Titre 3',
+    Heading_4:        'Titre 4',
+    Heading_5:        'Titre 5',
+    Heading_6:        'Titre 6',
+    Preformatted:     'Pré-formatté',
+    Blockquote:       'Citation',
+    Table_Header:     'Cellule de titre',
+    URL:              'URL',
+    Title:            'Titre',
+    Alternative_Text: 'Texte alternatif',
+    Caption:          'Légende',
+    Summary:          'Résumé',
+    Number_Of_Rows:   'Nombre de lignes',
+    Number_Of_Cols:   'Nombre de colonnes',
+    Submit:           'Envoyer',
+    Cancel:           'Annuler',
+    Choose:           'Choisir',
+    Preview:          'Prévisualisation',
+    Paste_From_Word:  'Copier depuis Word',
+    Tools:            'Outils',
+    Containers:       'Type de texte',
+    Classes:          'Type de contenu',
+    Status:           'Infos',
+    Source_Code:      'Code source'
+};
+
diff --git a/static/wymeditor/lang/he.js b/static/wymeditor/lang/he.js
new file mode 100644
index 0000000..97c9675
--- /dev/null
+++ b/static/wymeditor/lang/he.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['he'] = {
+    Strong:           'חזק',
+    Emphasis:         'מובלט',
+    Superscript:      'כתב עילי',
+    Subscript:        'כתב תחתי',
+    Ordered_List:     'רשימה ממוספרת',
+    Unordered_List:   'רשימה לא ממוספרת',
+    Indent:           'הזחה פנימה',
+    Outdent:          'הזחה החוצה',
+    Undo:             'בטל פעולה',
+    Redo:             'בצע מחדש פעולה',
+    Link:             'קישור',
+    Unlink:           'בטל קישור',
+    Image:            'תמונה',
+    Table:            'טבלה',
+    HTML:             'קוד HTML',
+    Paragraph:        'פסקה',
+    Heading_1:        'כותרת 1 ; תג <h1>',
+    Heading_2:        'כותרת 2 ; תג <h2>',
+    Heading_3:        'כותרת 3 ; תג <h3>',
+    Heading_4:        'כותרת 4 ; תג <h4>',
+    Heading_5:        'כותרת 5 ; תג <h5>',
+    Heading_6:        'כותרת 6 ; תג <h6>',
+    Preformatted:     'משמר רווחים',
+    Blockquote:       'ציטוט',
+    Table_Header:     'כותרת טבלה',
+    URL:              'קישור (URL)',
+    Title:            'כותרת',
+    Alternative_Text: 'טקסט חלופי',
+    Caption:          'כותרת',
+    Summary:          'סיכום',
+    Number_Of_Rows:   'מספר שורות',
+    Number_Of_Cols:   'מספר טורים',
+    Submit:           'שלח',
+    Cancel:           'בטל',
+    Choose:           'בחר',
+    Preview:          'תצוגה מקדימה',
+    Paste_From_Word:  'העתק מ-Word',
+    Tools:            'כלים',
+    Containers:       'מיכלים',
+    Classes:          'מחלקות',
+    Status:           'מצב',
+    Source_Code:      'קוד מקור'
+};
+
diff --git a/static/wymeditor/lang/hr.js b/static/wymeditor/lang/hr.js
new file mode 100644
index 0000000..193e31a
--- /dev/null
+++ b/static/wymeditor/lang/hr.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['hr'] = {
+    Strong:           'Podebljano',
+    Emphasis:         'Naglašeno',
+    Superscript:      'Iznad',
+    Subscript:        'Ispod',
+    Ordered_List:     'Pobrojana lista',
+    Unordered_List:   'Nepobrojana lista',
+    Indent:           'Uvuci',
+    Outdent:          'Izvuci',
+    Undo:             'Poništi promjenu',
+    Redo:             'Ponovno promjeni',
+    Link:             'Hiperveza',
+    Unlink:           'Ukloni hipervezu',
+    Image:            'Slika',
+    Table:            'Tablica',
+    HTML:             'HTML',
+    Paragraph:        'Paragraf',
+    Heading_1:        'Naslov 1',
+    Heading_2:        'Naslov 2',
+    Heading_3:        'Naslov 3',
+    Heading_4:        'Naslov 4',
+    Heading_5:        'Naslov 5',
+    Heading_6:        'Naslov 6',
+    Preformatted:     'Unaprijed formatirano',
+    Blockquote:       'Citat',
+    Table_Header:     'Zaglavlje tablice',
+    URL:              'URL',
+    Title:            'Naslov',
+    Alternative_Text: 'Alternativni tekst',
+    Caption:          'Zaglavlje',
+    Summary:          'Sažetak',
+    Number_Of_Rows:   'Broj redova',
+    Number_Of_Cols:   'Broj kolona',
+    Submit:           'Snimi',
+    Cancel:           'Odustani',
+    Choose:           'Izaberi',
+    Preview:          'Pregled',
+    Paste_From_Word:  'Zalijepi iz Word-a',
+    Tools:            'Alati',
+    Containers:       'Kontejneri',
+    Classes:          'Klase',
+    Status:           'Status',
+    Source_Code:      'Izvorni kod'
+};
+
diff --git a/static/wymeditor/lang/hu.js b/static/wymeditor/lang/hu.js
new file mode 100644
index 0000000..a8cdbc6
--- /dev/null
+++ b/static/wymeditor/lang/hu.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['hu'] = {
+    Strong:           'Félkövér',
+    Emphasis:         'Kiemelt',
+    Superscript:      'Felső index',
+    Subscript:        'Alsó index',
+    Ordered_List:     'Rendezett lista',
+    Unordered_List:   'Rendezetlen lista',
+    Indent:           'Bekezdés',
+    Outdent:          'Bekezdés törlése',
+    Undo:             'Visszavon',
+    Redo:             'Visszaállít',
+    Link:             'Link',
+    Unlink:           'Link törlése',
+    Image:            'Kép',
+    Table:            'Tábla',
+    HTML:             'HTML',
+    Paragraph:        'Bekezdés',
+    Heading_1:        'Címsor 1',
+    Heading_2:        'Címsor 2',
+    Heading_3:        'Címsor 3',
+    Heading_4:        'Címsor 4',
+    Heading_5:        'Címsor 5',
+    Heading_6:        'Címsor 6',
+    Preformatted:     'Előformázott',
+    Blockquote:       'Idézet',
+    Table_Header:     'Tábla Fejléc',
+    URL:              'Webcím',
+    Title:            'Megnevezés',
+    Alternative_Text: 'Alternatív szöveg',
+    Caption:          'Fejléc',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Sorok száma',
+    Number_Of_Cols:   'Oszlopok száma',
+    Submit:           'Elküld',
+    Cancel:           'Mégsem',
+    Choose:           'Választ',
+    Preview:          'Előnézet',
+    Paste_From_Word:  'Másolás Word-ból',
+    Tools:            'Eszközök',
+    Containers:       'Tartalmak',
+    Classes:          'Osztályok',
+    Status:           'Állapot',
+    Source_Code:      'Forráskód'
+};
+
diff --git a/static/wymeditor/lang/it.js b/static/wymeditor/lang/it.js
new file mode 100644
index 0000000..ca632a9
--- /dev/null
+++ b/static/wymeditor/lang/it.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['it'] = {
+    Strong:           'Grassetto',
+    Emphasis:         'Corsetto',
+    Superscript:      'Apice',
+    Subscript:        'Pedice',
+    Ordered_List:     'Lista Ordinata',
+    Unordered_List:   'Lista Puntata',
+    Indent:           'Indenta',
+    Outdent:          'Caccia',
+    Undo:             'Indietro',
+    Redo:             'Avanti',
+    Link:             'Inserisci Link',
+    Unlink:           'Togli Link',
+    Image:            'Inserisci Immagine',
+    Table:            'Inserisci Tabella',
+    HTML:             'HTML',
+    Paragraph:        'Paragrafo',
+    Heading_1:        'Heading 1',
+    Heading_2:        'Heading 2',
+    Heading_3:        'Heading 3',
+    Heading_4:        'Heading 4',
+    Heading_5:        'Heading 5',
+    Heading_6:        'Heading 6',
+    Preformatted:     'Preformattato',
+    Blockquote:       'Blockquote',
+    Table_Header:     'Header Tabella',
+    URL:              'Indirizzo',
+    Title:            'Titolo',
+    Alternative_Text: 'Testo Alternativo',
+    Caption:          'Caption',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Numero di Righe',
+    Number_Of_Cols:   'Numero di Colonne',
+    Submit:           'Invia',
+    Cancel:           'Cancella',
+    Choose:           'Scegli',
+    Preview:          'Anteprima',
+    Paste_From_Word:  'Incolla',
+    Tools:            'Tools',
+    Containers:       'Contenitori',
+    Classes:          'Classi',
+    Status:           'Stato',
+    Source_Code:      'Codice Sorgente'
+};
+
diff --git a/static/wymeditor/lang/nb.js b/static/wymeditor/lang/nb.js
new file mode 100644
index 0000000..7573b78
--- /dev/null
+++ b/static/wymeditor/lang/nb.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['nb'] = {
+    Strong:           'Fet',
+    Emphasis:         'Uthevet',
+    Superscript:      'Opphøyet',
+    Subscript:        'Nedsenket',
+    Ordered_List:     'Nummerert liste',
+    Unordered_List:   'Punktliste',
+    Indent:           'Rykk inn',
+    Outdent:          'Rykk ut',
+    Undo:             'Angre',
+    Redo:             'Gjenta',
+    Link:             'Lenke',
+    Unlink:           'Ta bort lenken',
+    Image:            'Bilde',
+    Table:            'Tabell',
+    HTML:             'HTML',
+    Paragraph:        'Avsnitt',
+    Heading_1:        'Overskrift 1',
+    Heading_2:        'Overskrift 2',
+    Heading_3:        'Overskrift 3',
+    Heading_4:        'Overskrift 4',
+    Heading_5:        'Overskrift 5',
+    Heading_6:        'Overskrift 6',
+    Preformatted:     'Preformatert',
+    Blockquote:       'Sitat',
+    Table_Header:     'Tabelloverskrift',
+    URL:              'URL',
+    Title:            'Tittel',
+    Alternative_Text: 'Alternativ tekst',
+    Caption:          'Overskrift',
+    Summary:          'Sammendrag',
+    Number_Of_Rows:   'Antall rader',
+    Number_Of_Cols:   'Antall kolonner',
+    Submit:           'Ok',
+    Cancel:           'Avbryt',
+    Choose:           'Velg',
+    Preview:          'Forhåndsvis',
+    Paste_From_Word:  'Lim inn fra Word',
+    Tools:            'Verktøy',
+    Containers:       'Formatering',
+    Classes:          'Klasser',
+    Status:           'Status',
+    Source_Code:      'Kildekode'
+};
+
diff --git a/static/wymeditor/lang/nl.js b/static/wymeditor/lang/nl.js
new file mode 100644
index 0000000..cdfa21c
--- /dev/null
+++ b/static/wymeditor/lang/nl.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['nl'] = {
+    Strong:           'Sterk benadrukken',
+    Emphasis:         'Benadrukken',
+    Superscript:      'Bovenschrift',
+    Subscript:        'Onderschrift',
+    Ordered_List:     'Geordende lijst',
+    Unordered_List:   'Ongeordende lijst',
+    Indent:           'Inspringen',
+    Outdent:          'Terugspringen',
+    Undo:             'Ongedaan maken',
+    Redo:             'Opnieuw uitvoeren',
+    Link:             'Linken',
+    Unlink:           'Ontlinken',
+    Image:            'Afbeelding',
+    Table:            'Tabel',
+    HTML:             'HTML',
+    Paragraph:        'Paragraaf',
+    Heading_1:        'Kop 1',
+    Heading_2:        'Kop 2',
+    Heading_3:        'Kop 3',
+    Heading_4:        'Kop 4',
+    Heading_5:        'Kop 5',
+    Heading_6:        'Kop 6',
+    Preformatted:     'Voorgeformatteerd',
+    Blockquote:       'Citaat',
+    Table_Header:     'Tabel-kop',
+    URL:              'URL',
+    Title:            'Titel',
+    Alternative_Text: 'Alternatieve tekst',
+    Caption:          'Bijschrift',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Aantal rijen',
+    Number_Of_Cols:   'Aantal kolommen',
+    Submit:           'Versturen',
+    Cancel:           'Annuleren',
+    Choose:           'Kiezen',
+    Preview:          'Voorbeeld bekijken',
+    Paste_From_Word:  'Plakken uit Word',
+    Tools:            'Hulpmiddelen',
+    Containers:       'Teksttypes',
+    Classes:          'Klassen',
+    Status:           'Status',
+    Source_Code:      'Broncode'
+};
+
diff --git a/static/wymeditor/lang/nn.js b/static/wymeditor/lang/nn.js
new file mode 100644
index 0000000..51cec2b
--- /dev/null
+++ b/static/wymeditor/lang/nn.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['nn'] = {
+    Strong:           'Feit',
+    Emphasis:         'Utheva',
+    Superscript:      'Opphøgd',
+    Subscript:        'Nedsenka',
+    Ordered_List:     'Nummerert liste',
+    Unordered_List:   'Punktliste',
+    Indent:           'Rykk inn',
+    Outdent:          'Rykk ut',
+    Undo:             'Angre',
+    Redo:             'Gjentaka',
+    Link:             'Lenkje',
+    Unlink:           'Ta bort lenkja',
+    Image:            'Bilete',
+    Table:            'Tabell',
+    HTML:             'HTML',
+    Paragraph:        'Avsnitt',
+    Heading_1:        'Overskrift 1',
+    Heading_2:        'Overskrift 2',
+    Heading_3:        'Overskrift 3',
+    Heading_4:        'Overskrift 4',
+    Heading_5:        'Overskrift 5',
+    Heading_6:        'Overskrift 6',
+    Preformatted:     'Preformatert',
+    Blockquote:       'Sitat',
+    Table_Header:     'Tabelloverskrift',
+    URL:              'URL',
+    Title:            'Tittel',
+    Alternative_Text: 'Alternativ tekst',
+    Caption:          'Overskrift',
+    Summary:          'Samandrag',
+    Number_Of_Rows:   'Tal på rader',
+    Number_Of_Cols:   'Tal på kolonnar',
+    Submit:           'Ok',
+    Cancel:           'Avbryt',
+    Choose:           'Vel',
+    Preview:          'Førehandsvis',
+    Paste_From_Word:  'Lim inn frå Word',
+    Tools:            'Verkty',
+    Containers:       'Formatering',
+    Classes:          'Klassar',
+    Status:           'Status',
+    Source_Code:      'Kjeldekode'
+};
+
diff --git a/static/wymeditor/lang/pl.js b/static/wymeditor/lang/pl.js
new file mode 100644
index 0000000..d6c0471
--- /dev/null
+++ b/static/wymeditor/lang/pl.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['pl'] = {
+    Strong:           'Nacisk',
+    Emphasis:         'Emfaza',
+    Superscript:      'Indeks górny',
+    Subscript:        'Indeks dolny',
+    Ordered_List:     'Lista numerowana',
+    Unordered_List:   'Lista wypunktowana',
+    Indent:           'Zwiększ wcięcie',
+    Outdent:          'Zmniejsz wcięcie',
+    Undo:             'Cofnij',
+    Redo:             'Ponów',
+    Link:             'Wstaw link',
+    Unlink:           'Usuń link',
+    Image:            'Obraz',
+    Table:            'Tabela',
+    HTML:             'Źródło HTML',
+    Paragraph:        'Akapit',
+    Heading_1:        'Nagłówek 1',
+    Heading_2:        'Nagłówek 2',
+    Heading_3:        'Nagłówek 3',
+    Heading_4:        'Nagłówek 4',
+    Heading_5:        'Nagłówek 5',
+    Heading_6:        'Nagłówek 6',
+    Preformatted:     'Preformatowany',
+    Blockquote:       'Cytat blokowy',
+    Table_Header:     'Nagłówek tabeli',
+    URL:              'URL',
+    Title:            'Tytuł',
+    Alternative_Text: 'Tekst alternatywny',
+    Caption:          'Tytuł tabeli',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Liczba wierszy',
+    Number_Of_Cols:   'Liczba kolumn',
+    Submit:           'Wyślij',
+    Cancel:           'Anuluj',
+    Choose:           'Wybierz',
+    Preview:          'Podgląd',
+    Paste_From_Word:  'Wklej z Worda',
+    Tools:            'Narzędzia',
+    Containers:       'Format',
+    Classes:          'Styl',
+    Status:           'Status',
+    Source_Code:      'Kod źródłowy'
+};
+
diff --git a/static/wymeditor/lang/pt-br.js b/static/wymeditor/lang/pt-br.js
new file mode 100644
index 0000000..2ec18fe
--- /dev/null
+++ b/static/wymeditor/lang/pt-br.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['pt-br'] = {
+    Strong:           'Resaltar',
+    Emphasis:         'Enfatizar',
+    Superscript:      'Sobre escrito',
+    Subscript:        'Sub escrito ',
+    Ordered_List:     'Lista ordenada',
+    Unordered_List:   'Lista desordenada',
+    Indent:           'Indentado',
+    Outdent:          'Desidentar',
+    Undo:             'Desfazer',
+    Redo:             'Refazer',
+    Link:             'Link',
+    Unlink:           'Remover Link',
+    Image:            'Imagem',
+    Table:            'Tabela',
+    HTML:             'HTML',
+    Paragraph:        'Parágrafo',
+    Heading_1:        'Título 1',
+    Heading_2:        'Título 2',
+    Heading_3:        'Título 3',
+    Heading_4:        'Título 4',
+    Heading_5:        'Título 5',
+    Heading_6:        'Título 6',
+    Preformatted:     'Preformatado',
+    Blockquote:       'Citação',
+    Table_Header:     'Título de tabela',
+    URL:              'URL',
+    Title:            'Título',
+    Alternative_Text: 'Texto alternativo',
+    Caption:          'Legenda',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Número de linhas',
+    Number_Of_Cols:   'Número de colunas',
+    Submit:           'Enviar',
+    Cancel:           'Cancelar',
+    Choose:           'Selecionar',
+    Preview:          'Previsualizar',
+    Paste_From_Word:  'Copiar do Word',
+    Tools:            'Ferramentas',
+    Containers:       'Conteneiners',
+    Classes:          'Classes',
+    Status:           'Estado',
+    Source_Code:      'Código fonte'
+};
+
diff --git a/static/wymeditor/lang/pt.js b/static/wymeditor/lang/pt.js
new file mode 100644
index 0000000..a3d1a17
--- /dev/null
+++ b/static/wymeditor/lang/pt.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['pt'] = {
+    Strong:           'Negrito',
+    Emphasis:         'Itálico',
+    Superscript:      'Sobrescrito',
+    Subscript:        'Subsescrito',
+    Ordered_List:     'Lista Numerada',
+    Unordered_List:   'Lista Marcada',
+    Indent:           'Aumentar Indentaçã',
+    Outdent:          'Diminuir Indentaçã',
+    Undo:             'Desfazer',
+    Redo:             'Restaurar',
+    Link:             'Link',
+    Unlink:           'Tirar link',
+    Image:            'Imagem',
+    Table:            'Tabela',
+    HTML:             'HTML',
+    Paragraph:        'Parágrafo',
+    Heading_1:        'Título 1',
+    Heading_2:        'Título 2',
+    Heading_3:        'Título 3',
+    Heading_4:        'Título 4',
+    Heading_5:        'Título 5',
+    Heading_6:        'Título 6',
+    Preformatted:     'Pré-formatado',
+    Blockquote:       'Citação',
+    Table_Header:     'Cabeçalho Tabela',
+    URL:              'URL',
+    Title:            'Título',
+    Alternative_Text: 'Texto Alterativo',
+    Caption:          'Título Tabela',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Número de Linhas',
+    Number_Of_Cols:   'Número de Colunas',
+    Submit:           'Enviar',
+    Cancel:           'Cancelar',
+    Choose:           'Escolha',
+    Preview:          'Prever',
+    Paste_From_Word:  'Colar do Word',
+    Tools:            'Ferramentas',
+    Containers:       'Containers',
+    Classes:          'Classes',
+    Status:           'Status',
+    Source_Code:      'Código Fonte'
+};
+
diff --git a/static/wymeditor/lang/ru.js b/static/wymeditor/lang/ru.js
new file mode 100644
index 0000000..7895f8d
--- /dev/null
+++ b/static/wymeditor/lang/ru.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['ru'] = {
+    Strong:           'Жирный',
+    Emphasis:         'Наклонный',
+    Superscript:      'Надстрочный',
+    Subscript:        'Подстрочный',
+    Ordered_List:     'Нумерованый список',
+    Unordered_List:   'Ненумерованый список',
+    Indent:           'Увеличить отступ',
+    Outdent:          'Уменьшить отступ',
+    Undo:             'Отменить',
+    Redo:             'Повторить',
+    Link:             'Ссылка',
+    Unlink:           'Удалить ссылку',
+    Image:            'Изображение',
+    Table:            'Таблица',
+    HTML:             'Править HTML',
+    Paragraph:        'Параграф',
+    Heading_1:        'Заголовок 1',
+    Heading_2:        'Заголовок 2',
+    Heading_3:        'Заголовок 3',
+    Heading_4:        'Заголовок 4',
+    Heading_5:        'Заголовок 5',
+    Heading_6:        'Заголовок 6',
+    Preformatted:     'Preformatted',
+    Blockquote:       'Цитата',
+    Table_Header:     'Заголовок таблицы',
+    URL:              'URL',
+    Title:            'Заголовок',
+    Alternative_Text: 'Альтернативный текст',
+    Caption:          'Надпись',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Кол-во строк',
+    Number_Of_Cols:   'Кол-во столбцов',
+    Submit:           'Отправить',
+    Cancel:           'Отмена',
+    Choose:           'Выбор',
+    Preview:          'Просмотр',
+    Paste_From_Word:  'Вставить из Word',
+    Tools:            'Инструменты',
+    Containers:       'Контейнеры',
+    Classes:          'Классы',
+    Status:           'Статус',
+    Source_Code:      'Исходный код'
+};
+
diff --git a/static/wymeditor/lang/sv.js b/static/wymeditor/lang/sv.js
new file mode 100644
index 0000000..bc5485c
--- /dev/null
+++ b/static/wymeditor/lang/sv.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['sv'] = {
+    Strong:           'Viktigt',
+    Emphasis:         'Betoning',
+    Superscript:      'Upphöjt',
+    Subscript:        'Nedsänkt',
+    Ordered_List:     'Nummerlista',
+    Unordered_List:   'Punktlista',
+    Indent:           'Indrag',
+    Outdent:          'Utdrag',
+    Undo:             'Ångra',
+    Redo:             'Gör om',
+    Link:             'Länk',
+    Unlink:           'Ta bort länk',
+    Image:            'Bild',
+    Table:            'Tabell',
+    HTML:             'HTML',
+    Paragraph:        'Paragraf',
+    Heading_1:        'Rubrik 1',
+    Heading_2:        'Rubrik 2',
+    Heading_3:        'Rubrik 3',
+    Heading_4:        'Rubrik 4',
+    Heading_5:        'Rubrik 5',
+    Heading_6:        'Rubrik 6',
+    Preformatted:     'Förformaterad',
+    Blockquote:       'Blockcitat',
+    Table_Header:     'Tabellrubrik',
+    URL:              'URL',
+    Title:            'Titel',
+    Alternative_Text: 'Alternativ text',
+    Caption:          'Överskrift',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Antal rader',
+    Number_Of_Cols:   'Antal kolumner',
+    Submit:           'Skicka',
+    Cancel:           'Avbryt',
+    Choose:           'Välj',
+    Preview:          'Förhandsgranska',
+    Paste_From_Word:  'Klistra in från Word',
+    Tools:            'Verktyg',
+    Containers:       'Formatering',
+    Classes:          'Klasser',
+    Status:           'Status',
+    Source_Code:      'Källkod'
+};
+
diff --git a/static/wymeditor/lang/tr.js b/static/wymeditor/lang/tr.js
new file mode 100644
index 0000000..d26f0ff
--- /dev/null
+++ b/static/wymeditor/lang/tr.js
@@ -0,0 +1,45 @@
+WYMeditor.STRINGS['tr'] = {
+    Strong:           'Kalın',
+    Emphasis:         'Vurgu',
+    Superscript:      'Superscript',
+    Subscript:        'Subscript',
+    Ordered_List:     'Sıralı List',
+    Unordered_List:   'Sırasız List',
+    Indent:           'İçerlek',
+    Outdent:          'Çıkıntılı',
+    Undo:             'Geri Al',
+    Redo:             'Yinele',
+    Link:             'Bağlantı',
+    Unlink:           'Bağlantıyı Kaldır',
+    Image:            'İmaj',
+    Table:            'Tablo',
+    HTML:             'HTML',
+    Paragraph:        'Parağraf',
+    Heading_1:        'Başlık 1',
+    Heading_2:        'Başlık 2',
+    Heading_3:        'Başlık 3',
+    Heading_4:        'Başlık 4',
+    Heading_5:        'Başlık 5',
+    Heading_6:        'Başlık 6',
+    Preformatted:     'Önceden Formatlı',
+    Blockquote:       'Alıntı',
+    Table_Header:     'Tablo Başlığı',
+    URL:              'URL',
+    Title:            'Başlık',
+    Alternative_Text: 'Alternatif Metin',
+    Caption:          'Etiket',
+    Summary:          'Summary',
+    Number_Of_Rows:   'Satır sayısı',
+    Number_Of_Cols:   'Sütun sayısı',
+    Submit:           'Gönder',
+    Cancel:           'İptal',
+    Choose:           'Seç',
+    Preview:          'Önizleme',
+    Paste_From_Word:  'Wordden yapıştır',
+    Tools:            'Araçlar',
+    Containers:       'Kapsayıcılar',
+    Classes:          'Sınıflar',
+    Status:           'Durum',
+    Source_Code:      'Kaynak Kodu'
+};
+
diff --git a/static/wymeditor/lang/zh_cn.js b/static/wymeditor/lang/zh_cn.js
new file mode 100644
index 0000000..72f5aaf
--- /dev/null
+++ b/static/wymeditor/lang/zh_cn.js
@@ -0,0 +1,47 @@
+WYMeditor.STRINGS['zh_cn'] = {
+    Strong: '加粗',
+    Emphasis: '斜体',
+    Superscript: '上标',
+    Subscript: '下标',
+    Ordered_List: '有序列表',
+    Unordered_List: '无序列表',
+    Indent: '增加缩进',
+    Outdent: '减少缩进',
+    Undo: '撤消',
+    Redo: '重做',
+    Link: '链接',
+    Unlink: '取消链接',
+    Image: '图片',
+    Table: '表格',
+    HTML: 'HTML源代码',
+    Paragraph: '段落',
+    Heading_1: '标题 1',
+    Heading_2: '标题 2',
+    Heading_3: '标题 3',
+    Heading_4: '标题 4',
+    Heading_5: '标题 5',
+    Heading_6: '标题 6',
+    Preformatted: '原始文本',
+    Blockquote: '引语',
+    Table_Header: '表头',
+    URL: '地址',
+    Title: '提示文字',
+    Alternative_Text: '失效文字',
+    Caption: '标题',
+    Summary: 'Summary',
+    Number_Of_Rows: '行数',
+    Number_Of_Cols: '列数',
+    Submit: '提交',
+    Cancel: '放弃',
+    Choose: '选择',
+    Preview: '预览',
+    Paste_From_Word: '从Word粘贴纯文本',
+    Tools: '工具',
+    Containers: '容器',
+    Classes: '预定义样式',
+    Status: '状态',
+    Source_Code: '源代码',
+    Attachment: '附件',
+    NewParagraph: '新段落'
+};
+
diff --git a/static/wymeditor/skins/default/icons.png b/static/wymeditor/skins/default/icons.png
new file mode 100644
index 0000000..c6eb463
Binary files /dev/null and b/static/wymeditor/skins/default/icons.png differ
diff --git a/static/wymeditor/skins/default/skin.css b/static/wymeditor/skins/default/skin.css
new file mode 100644
index 0000000..eb4680f
--- /dev/null
+++ b/static/wymeditor/skins/default/skin.css
@@ -0,0 +1,133 @@
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *      http://www.wymeditor.org/
+ *
+ * File Name:
+ *      skin.css
+ *      main stylesheet for the default WYMeditor skin
+ *      See the documentation for more info.
+ *
+ * File Authors:
+ *      Daniel Reszka (d.reszka a-t wymeditor dotorg)
+*/
+
+/*TRYING TO RESET STYLES THAT MAY INTERFERE WITH WYMEDITOR*/
+        .wym_skin_default p, .wym_skin_default h2, .wym_skin_default h3,
+        .wym_skin_default ul, .wym_skin_default li { background: transparent url(); margin: 0; padding: 0; border-width:0; list-style: none; }
+
+
+/*HIDDEN BY DEFAULT*/
+        .wym_skin_default .wym_area_left          { display: none; }
+        .wym_skin_default .wym_area_right         { display: block; }
+
+
+/*TYPO*/    
+        .wym_skin_default                { font-size: 62.5%; font-family: Verdana, Arial, sans-serif; }
+        .wym_skin_default h2             { font-size: 110%; /* = 11px */}
+        .wym_skin_default h3             { font-size: 100%; /* = 10px */}
+        .wym_skin_default li             { font-size: 100%; /* = 10px */}
+
+
+/*WYM_BOX*/
+        .wym_skin_default                { border: 1px solid gray; background: #f2f2f2; padding: 5px}
+
+    /*auto-clear the wym_box*/
+        .wym_skin_default:after 	        { content: "."; display: block; height: 0; clear: both; visibility: hidden; }
+ * html .wym_skin_default                { height: 1%;}
+
+
+/*WYM_HTML*/
+        .wym_skin_default .wym_html               { width: 98%;}
+        .wym_skin_default .wym_html textarea      { width: 100%; height: 200px; border: 1px solid gray; background: white;  }
+
+
+/*WYM_IFRAME*/
+        .wym_skin_default .wym_iframe             { width: 98%;}
+        .wym_skin_default .wym_iframe iframe      { width: 100%; height: 200px; border: 1px solid gray; background: white }
+
+
+/*AREAS*/
+        .wym_skin_default .wym_area_left          { width: 150px; float: left;}
+        .wym_skin_default .wym_area_right         { width: 150px; float: right;}
+        .wym_skin_default .wym_area_bottom        { height: 1%; clear: both;}
+ * html .wym_skin_default .wym_area_main          { height: 1%;}
+ * html .wym_skin_default .wym_area_top           { height: 1%;}
+ *+html .wym_skin_default .wym_area_top           { height: 1%;}
+
+/*SECTIONS SYSTEM*/
+
+    /*common defaults for all sections*/
+        .wym_skin_default .wym_section            { margin-bottom: 5px; }
+        .wym_skin_default .wym_section h2,
+        .wym_skin_default .wym_section h3         { padding: 1px 3px; margin: 0; }
+        .wym_skin_default .wym_section a          { padding: 0 3px; display: block; text-decoration: none; color: black; }
+        .wym_skin_default .wym_section a:hover    { background-color: yellow; }
+      /*hide section titles by default*/
+        .wym_skin_default .wym_section h2         { display: none; }
+      /*disable any margin-collapse*/
+        .wym_skin_default .wym_section            { padding-top: 1px; padding-bottom: 1px; }    
+      /*auto-clear sections*/
+        .wym_skin_default .wym_section ul:after   { content: "."; display: block; height: 0; clear: both; visibility: hidden; }
+ * html .wym_skin_default .wym_section ul         { height: 1%;}
+
+    /*option: add this class to a section to make it render as a panel*/
+        .wym_skin_default .wym_panel              { }
+        .wym_skin_default .wym_panel h2           { display: block; }
+
+    /*option: add this class to a section to make it render as a dropdown menu*/
+        .wym_skin_default .wym_dropdown h2        { display: block; }
+        .wym_skin_default .wym_dropdown ul        { display: none; position: absolute; background: white; }
+        .wym_skin_default .wym_dropdown:hover ul,
+        .wym_skin_default .wym_dropdown.hover ul  { display: block; }
+
+    /*option: add this class to a section to make its elements render buttons (icons are only available for the wym_tools section for now)*/
+        .wym_skin_default .wym_buttons li         { float:left;}
+        .wym_skin_default .wym_buttons a          { width: 20px; height: 20px; overflow: hidden; padding: 2px }
+      /*image replacements*/
+        .wym_skin_default .wym_buttons li a                         { background: url(icons.png) no-repeat; text-indent: -9999px;} 
+        .wym_skin_default .wym_buttons li.wym_tools_strong a        { background-position: 0 -382px;}
+        .wym_skin_default .wym_buttons li.wym_tools_emphasis a      { background-position: 0 -22px;}
+        .wym_skin_default .wym_buttons li.wym_tools_superscript a   { background-position: 0 -430px;}
+        .wym_skin_default .wym_buttons li.wym_tools_subscript a     { background-position: 0 -454px;}
+        .wym_skin_default .wym_buttons li.wym_tools_ordered_list a  { background-position: 0 -48px;}
+        .wym_skin_default .wym_buttons li.wym_tools_unordered_list a{ background-position: 0 -72px;}
+        .wym_skin_default .wym_buttons li.wym_tools_indent a        { background-position: 0 -574px;}
+        .wym_skin_default .wym_buttons li.wym_tools_outdent a       { background-position: 0 -598px;}
+        .wym_skin_default .wym_buttons li.wym_tools_undo a          { background-position: 0 -502px;}
+        .wym_skin_default .wym_buttons li.wym_tools_redo a          { background-position: 0 -526px;}
+        .wym_skin_default .wym_buttons li.wym_tools_link a          { background-position: 0 -96px;}
+        .wym_skin_default .wym_buttons li.wym_tools_unlink a        { background-position: 0 -168px;}
+        .wym_skin_default .wym_buttons li.wym_tools_image a         { background-position: 0 -121px;}
+        .wym_skin_default .wym_buttons li.wym_tools_table a         { background-position: 0 -144px;}
+        .wym_skin_default .wym_buttons li.wym_tools_paste a         { background-position: 0 -552px;}
+        .wym_skin_default .wym_buttons li.wym_tools_html a          { background-position: 0 -193px;}
+        .wym_skin_default .wym_buttons li.wym_tools_preview a       { background-position: 0 -408px;}
+
+/*DECORATION*/
+        .wym_skin_default .wym_section h2             { background: #ddd; border: solid gray; border-width: 0 0 1px;}
+        .wym_skin_default .wym_section h2 span        { color: gray;}
+        .wym_skin_default .wym_panel                  { padding: 0; border: solid gray; border-width: 1px; background: white;}    
+        .wym_skin_default .wym_panel ul               { margin: 2px 0 5px; }        
+        .wym_skin_default .wym_dropdown               { padding: 0; border: solid gray; border-width: 1px 1px 0 1px; }
+        .wym_skin_default .wym_dropdown ul            { border: solid gray; border-width: 0 1px 1px 1px; margin-left: -1px; padding: 5px 10px 5px 3px;}
+        
+/*DIALOGS*/
+        .wym_dialog div.row         { margin-bottom: 5px;}
+        .wym_dialog div.row input   { margin-right: 5px;}
+        .wym_dialog div.row label   { float: left; width: 150px; display: block; text-align: right; margin-right: 10px; }
+        .wym_dialog div.row-indent  { padding-left: 160px; }
+        /*autoclearing*/        
+        .wym_dialog div.row:after            { content: "."; display: block; height: 0; clear: both; visibility: hidden; }
+        .wym_dialog div.row                  { display: inline-block; }
+            /* Hides from IE-mac \*/
+            * html .wym_dialog div.row       { height: 1%; }
+            .wym_dialog div.row              { display: block; }
+            /* End hide from IE-mac */                
+            
+/*WYMEDITOR_LINK*/
+        a.wym_wymeditor_link        { text-indent: -9999px; float: right; display: block; width: 50px; height: 15px; background: url(../wymeditor_icon.png);  overflow: hidden; text-decoration: none;  }
diff --git a/static/wymeditor/skins/default/skin.js b/static/wymeditor/skins/default/skin.js
new file mode 100644
index 0000000..3d204e0
--- /dev/null
+++ b/static/wymeditor/skins/default/skin.js
@@ -0,0 +1,40 @@
+WYMeditor.SKINS['default'] = {
+
+    init: function(wym) {
+
+        //render following sections as panels
+        jQuery(wym._box).find(wym._options.classesSelector)
+          .addClass("wym_panel");
+
+        //render following sections as buttons
+        jQuery(wym._box).find(wym._options.toolsSelector)
+          .addClass("wym_buttons");
+
+        //render following sections as dropdown menus
+        jQuery(wym._box).find(wym._options.containersSelector)
+          .addClass("wym_dropdown")
+          .find(WYMeditor.H2)
+          .append("<span> ></span>");
+
+        // auto add some margin to the main area sides if left area
+        // or right area are not empty (if they contain sections)
+        jQuery(wym._box).find("div.wym_area_right ul")
+          .parents("div.wym_area_right").show()
+          .parents(wym._options.boxSelector)
+          .find("div.wym_area_main")
+          .css({"margin-right": "155px"});
+
+        jQuery(wym._box).find("div.wym_area_left ul")
+          .parents("div.wym_area_left").show()
+          .parents(wym._options.boxSelector)
+          .find("div.wym_area_main")
+          .css({"margin-left": "155px"});
+
+        //make hover work under IE < 7
+        jQuery(wym._box).find(".wym_section").hover(function(){
+          jQuery(this).addClass("hover");
+        },function(){
+          jQuery(this).removeClass("hover");
+        });
+    }
+};
diff --git a/static/wymeditor/skins/galaxy/icons.png b/static/wymeditor/skins/galaxy/icons.png
new file mode 100644
index 0000000..8c8aed9
Binary files /dev/null and b/static/wymeditor/skins/galaxy/icons.png differ
diff --git a/static/wymeditor/skins/galaxy/skin.css b/static/wymeditor/skins/galaxy/skin.css
new file mode 100644
index 0000000..c28f032
--- /dev/null
+++ b/static/wymeditor/skins/galaxy/skin.css
@@ -0,0 +1,137 @@
+/*
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *      http://www.wymeditor.org/
+ *
+ * File Name:
+ *      screen.css
+ *      main stylesheet for the WYMeditor skin
+ *      See the documentation for more info.
+ *
+ * File Authors:
+ *      Daniel Reszka (d.reszka a-t wymeditor dotorg)
+ *      Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+*/
+
+
+/*TRYING TO RESET STYLES THAT MAY INTERFERE WITH WYMEDITOR*/
+        .wym_skin_galaxy p, .wym_skin_galaxy h2, .wym_skin_galaxy h3,
+        .wym_skin_galaxy ul, .wym_skin_galaxy li { background: transparent url(); margin: 0; padding: 0; border-width:0; list-style: none; }
+
+
+/*HIDDEN BY DEFAULT*/
+        .wym_skin_galaxy .wym_area_left          { display: none; }
+        .wym_skin_galaxy .wym_area_right         { display: none; }
+
+
+/*TYPO*/    
+        .wym_skin_galaxy                { font-size: 10px; font-family: Verdana, Arial, sans-serif; }
+        .wym_skin_galaxy h2             { font-size: 110%; /* = 11px */}
+        .wym_skin_galaxy h3             { font-size: 100%; /* = 10px */}
+        .wym_skin_galaxy li             { font-size: 100%; /* = 10px */}
+
+
+/*WYM_BOX*/
+        .wym_skin_galaxy                { border: none; padding: 5px}
+
+    /*auto-clear the wym_box*/
+        .wym_skin_galaxy:after 	 { content: "."; display: block; height: 0; clear: both; visibility: hidden; }
+ * html .wym_skin_galaxy                { height: 1%;}
+
+
+/*WYM_HTML*/
+        .wym_skin_galaxy .wym_html               { width: 98%;}
+        .wym_skin_galaxy .wym_html textarea      { font-size: 120%; width: 100%; height: 200px; border: 1px solid gray; background: white; }
+
+
+/*WYM_IFRAME*/
+        .wym_skin_galaxy .wym_iframe             { width: 100%; height: 100%; }
+        .wym_skin_galaxy .wym_iframe iframe      { width: 100%; height: 100%; border: 1px solid gray; background: white }
+
+
+/*AREAS*/
+        .wym_skin_galaxy .wym_area_left          { width: 100px; float: left;}
+        .wym_skin_galaxy .wym_area_right         { width: 150px; float: right;}
+        .wym_skin_galaxy .wym_area_bottom        { height: 1%; clear: both;}
+ * html .wym_skin_galaxy .wym_area_main          { height: 1%;}
+ * html .wym_skin_galaxy .wym_area_top           { height: 1%;}
+ *+html .wym_skin_galaxy .wym_area_top           { height: 1%;}
+
+/*SECTIONS SYSTEM*/
+
+    /*common defaults for all sections*/
+        .wym_skin_galaxy .wym_section            { margin-bottom: 5px; }
+        .wym_skin_galaxy .wym_section h2,
+        .wym_skin_galaxy .wym_section h3         { padding: 1px 3px; margin: 0; }
+        .wym_skin_galaxy .wym_section a          { padding: 0 3px; display: block; text-decoration: none; color: black; }
+        .wym_skin_galaxy .wym_section a:hover    { background-color: yellow; }
+      /*hide section titles by default*/
+        .wym_skin_galaxy .wym_section h2         { display: none; }
+      /*disable any margin-collapse*/
+        .wym_skin_galaxy .wym_section            { padding-top: 1px; padding-bottom: 1px; }    
+      /*auto-clear sections*/
+        .wym_skin_galaxy .wym_section ul:after   { content: "."; display: block; height: 0; clear: both; visibility: hidden; }
+ * html .wym_skin_galaxy .wym_section ul         { height: 1%;}
+
+    /*option: add this class to a section to make it render as a panel*/
+        .wym_skin_galaxy .wym_panel              { }
+        .wym_skin_galaxy .wym_panel h2           { display: block; }
+
+    /*option: add this class to a section to make it render as a dropdown menu*/
+        .wym_skin_galaxy .wym_dropdown h2        { display: block; }
+        .wym_skin_galaxy .wym_dropdown ul        { display: none; position: absolute; background: white; }
+        .wym_skin_galaxy .wym_dropdown:hover ul,
+        .wym_skin_galaxy .wym_dropdown.hover ul  { display: block; }
+
+    /*option: add this class to a section to make its elements render buttons (icons are only available for the wym_tools section for now)*/
+        .wym_skin_galaxy .wym_buttons li         { float:left;}
+        .wym_skin_galaxy .wym_buttons a          { width: 20px; height: 20px; overflow: hidden; padding: 2px }
+      /*image replacements*/
+        .wym_skin_galaxy .wym_buttons li a                         { background: url(icons.png) no-repeat; text-indent: -9999px;} 
+        .wym_skin_galaxy .wym_buttons li.wym_tools_strong a        { background-position: 0 -382px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_emphasis a      { background-position: 0 -22px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_superscript a   { background-position: 0 -430px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_subscript a     { background-position: 0 -454px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_ordered_list a  { background-position: 0 -48px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_unordered_list a{ background-position: 0 -72px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_indent a        { background-position: 0 -574px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_outdent a       { background-position: 0 -598px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_undo a          { background-position: 0 -502px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_redo a          { background-position: 0 -526px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_link a          { background-position: 0 -96px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_unlink a        { background-position: 0 -168px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_image a         { background-position: 0 -121px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_table a         { background-position: 0 -144px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_paste a         { background-position: 0 -552px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_html a          { background-position: 0 -193px;}
+        .wym_skin_galaxy .wym_buttons li.wym_tools_preview a       { background-position: 0 -408px;}
+        .wym_skin_galaxy .wym_buttons li.galaxy_tools_insert_history_link a   { background-position: 0 -646px;}
+        .wym_skin_galaxy .wym_buttons li.galaxy_tools_annotate_history a   { background-position: 0 -622px;}
+
+/*DECORATION*/
+        .wym_skin_galaxy .wym_section h2             { background: #f0f0f0; border: solid gray; border-width: 0 0 1px;}
+        .wym_skin_galaxy .wym_section h2 span        { color: gray;}
+        .wym_skin_galaxy .wym_panel                  { padding: 0; border: solid gray; border-width: 1px; background: white;}    
+        .wym_skin_galaxy .wym_panel ul               { margin: 2px 0 5px; }        
+        .wym_skin_galaxy .wym_dropdown               { padding: 0; border: solid gray; border-width: 1px 1px 0 1px; }
+        .wym_skin_galaxy .wym_dropdown ul            { border: solid gray; border-width: 0 1px 1px 1px; margin-left: -1px; padding: 5px 10px 5px 3px;}
+        
+/*DIALOGS*/
+        .wym_dialog div.row         { margin-bottom: 5px;}
+        .wym_dialog div.row input   { margin-right: 5px;}
+        .wym_dialog div.row label   { float: left; width: 150px; display: block; text-align: right; margin-right: 10px; }
+        .wym_dialog div.row-indent  { padding-left: 160px; }
+        /*autoclearing*/        
+        .wym_dialog div.row:after            { content: "."; display: block; height: 0; clear: both; visibility: hidden; }
+        .wym_dialog div.row                  { display: inline-block; }
+            /* Hides from IE-mac \*/
+            * html .wym_dialog div.row       { height: 1%; }
+            .wym_dialog div.row              { display: block; }
+            /* End hide from IE-mac */                
+            
+/*WYMEDITOR_LINK*/
+        a.wym_wymeditor_link        { text-indent: -9999px; float: right; display: block; width: 50px; height: 15px; background: url(../wymeditor_icon.png);  overflow: hidden; text-decoration: none;  }
diff --git a/static/wymeditor/skins/galaxy/skin.js b/static/wymeditor/skins/galaxy/skin.js
new file mode 100644
index 0000000..dfabc91
--- /dev/null
+++ b/static/wymeditor/skins/galaxy/skin.js
@@ -0,0 +1,35 @@
+WYMeditor.SKINS['galaxy'] = {
+
+    init: function(wym) {
+    
+        //move the containers panel to the top area
+        jQuery(wym._options.containersSelector + ', '
+          + wym._options.classesSelector, wym._box)
+          .appendTo( jQuery("div.wym_area_top", wym._box) )
+          .addClass("wym_dropdown")
+          .css({"margin-right": "10px", "width": "120px", "float": "left"});
+
+        //render following sections as buttons
+        jQuery(wym._options.toolsSelector, wym._box)
+          .addClass("wym_buttons")
+          .css({"margin-right": "10px", "float": "left"});
+
+        //make hover work under IE < 7
+        jQuery(".wym_section", wym._box).hover(function(){
+          jQuery(this).addClass("hover");
+        },function(){
+          jQuery(this).removeClass("hover");
+        });
+
+        var postInit = wym._options.postInit;
+        wym._options.postInit = function(wym) {
+
+            if(postInit) postInit.call(wym, wym);
+            var rule = {
+                name: 'body',
+                css: 'background-color: #f0f0f0;'
+            };
+            wym.addCssRule( wym._doc.styleSheets[0], rule);
+        };
+    }
+};
diff --git a/templates/admin/dataset_security/group/grid.mako b/templates/admin/dataset_security/group/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/dataset_security/group/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/dataset_security/group/group.mako b/templates/admin/dataset_security/group/group.mako
new file mode 100644
index 0000000..d8f013b
--- /dev/null
+++ b/templates/admin/dataset_security/group/group.mako
@@ -0,0 +1,83 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {  
+    $('#roles_add_button').click(function() {
+        return !$('#out_roles option:selected').remove().appendTo('#in_roles');
+    });
+    $('#roles_remove_button').click(function() {
+        return !$('#in_roles option:selected').remove().appendTo('#out_roles');
+    });
+    $('#users_add_button').click(function() {
+        return !$('#out_users option:selected').remove().appendTo('#in_users');
+    });
+    $('#users_remove_button').click(function() {
+        return !$('#in_users option:selected').remove().appendTo('#out_users');
+    });
+    $('form#associate_group_role_user').submit(function() {
+        $('#in_roles option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_users option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+    });
+});
+</script>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Group '${group.name|h}'</div>
+    <div class="toolFormBody">
+        <form name="associate_group_role_user" id="associate_group_role_user" action="${h.url_for(controller='admin', action='manage_users_and_roles_for_group', id=trans.security.encode_id( group.id ) )}" method="post" >
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Roles associated with '${group.name|h}'</label>
+                    ${render_select( "in_roles", in_roles )}<br/>
+                    <input type="submit" id="roles_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Roles not associated with '${group.name|h}'</label>
+                    ${render_select( "out_roles", out_roles )}<br/>
+                    <input type="submit" id="roles_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with '${group.name|h}'</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Users not associated with '${group.name|h}'</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="group_roles_users_edit_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/dataset_security/group/group_create.mako b/templates/admin/dataset_security/group/group_create.mako
new file mode 100644
index 0000000..307611b
--- /dev/null
+++ b/templates/admin/dataset_security/group/group_create.mako
@@ -0,0 +1,100 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {  
+    $('#roles_add_button').click(function() {
+        return !$('#out_roles option:selected').remove().appendTo('#in_roles');
+    });
+    $('#roles_remove_button').click(function() {
+        return !$('#in_roles option:selected').remove().appendTo('#out_roles');
+    });
+    $('#users_add_button').click(function() {
+        return !$('#out_users option:selected').remove().appendTo('#in_users');
+    });
+    $('#users_remove_button').click(function() {
+        return !$('#in_users option:selected').remove().appendTo('#out_users');
+    });
+    $('form#associate_group_role_user').submit(function() {
+        $('#in_roles option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_users option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+    });
+    //Temporary removal of select2 for inputs -- refactor this later.
+    $('select').select2("destroy");
+});
+</script>
+
+<%
+    from galaxy.web.form_builder import CheckboxField
+    create_role_for_group_checkbox = CheckboxField( 'create_role_for_group' )
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create Group</div>
+    <div class="toolFormBody">
+        <form name="associate_group_role_user" id="associate_group_role_user" action="${h.url_for(controller='admin', action='create_group' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <input  name="name" type="textfield" value="${name|h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Roles associated with new group</label>
+                    ${render_select( "in_roles", in_roles )}<br/>
+                    <input type="submit" id="roles_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Roles not associated with new group</label>
+                    ${render_select( "out_roles", out_roles )}<br/>
+                    <input type="submit" id="roles_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with new group</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Users not associated with new group</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                %if create_role_for_group_checked:
+                    <% create_role_for_group_checkbox.checked = True %>
+                %endif
+                ${create_role_for_group_checkbox.get_html()} Create a new role of the same name for this group
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_group_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/dataset_security/group/group_rename.mako b/templates/admin/dataset_security/group/group_rename.mako
new file mode 100644
index 0000000..e434a67
--- /dev/null
+++ b/templates/admin/dataset_security/group/group_rename.mako
@@ -0,0 +1,36 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Change group name</div>
+    <div class="toolFormBody">
+        <form name="library" action="${h.url_for( controller='admin', action='rename_group' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${group.name|h}" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="rename" value="submitted"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="id" value="${ trans.security.encode_id( group.id )}"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="rename_group_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/dataset_security/role/grid.mako b/templates/admin/dataset_security/role/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/dataset_security/role/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/dataset_security/role/role.mako b/templates/admin/dataset_security/role/role.mako
new file mode 100644
index 0000000..c1e2cf0
--- /dev/null
+++ b/templates/admin/dataset_security/role/role.mako
@@ -0,0 +1,117 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {  
+    $('#users_add_button').click(function() {
+        return !$('#out_users option:selected').remove().appendTo('#in_users');
+    });
+    $('#users_remove_button').click(function() {
+        return !$('#in_users option:selected').remove().appendTo('#out_users');
+    });
+    $('#groups_add_button').click(function() {
+        return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+    });
+    $('#groups_remove_button').click(function() {
+        return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+    });
+    $('form#associate_role_user_group').submit(function() {
+        $('#in_users option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_groups option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+    });
+});
+</script>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Role '${role.name|h}'</div>
+    <div class="toolFormBody">
+        <form name="associate_role_user_group" id="associate_role_user_group" action="${h.url_for(controller='admin', action='manage_users_and_groups_for_role', id=trans.security.encode_id( role.id ) )}" method="post" >
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with '${role.name|h}'</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Users not associated with '${role.name|h}'</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Groups associated with '${role.name|h}'</label>
+                    ${render_select( "in_groups", in_groups )}<br/>
+                    <input type="submit" id="groups_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Groups not associated with '${role.name|h}'</label>
+                    ${render_select( "out_groups", out_groups )}<br/>
+                    <input type="submit" id="groups_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="role_members_edit_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
+<br clear="left"/>
+<br/>
+%if len( library_dataset_actions ) > 0:
+    <h3>Data library datasets associated with role '${role.name|h}'</h3>
+    <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+        <tr>
+            <td>
+                <ul>
+                    %for ctr, library, in enumerate( library_dataset_actions.keys() ):
+                        <li>
+                            <img src="${h.url_for( '/static/images/silk/book_open.png' )}" class="rowIcon"/>
+                            ${library.name|h}
+                            <ul>
+                                %for folder_path, permissions in library_dataset_actions[ library ].items():
+                                    <li>
+                                        <img src="/static/images/silk/folder_page.png" class="rowIcon"/>
+                                        ${folder_path|h}
+                                        <ul>
+                                            % for permission in permissions:
+                                                <ul>
+                                                    <li>${permission|h}</li>
+                                                </ul>
+                                            %endfor
+                                        </ul>
+                                    </li>
+                                %endfor
+                            </ul>
+                        </li>
+                    %endfor
+                </ul>
+            </td>
+        </tr>
+    </table>
+%endif
diff --git a/templates/admin/dataset_security/role/role_create.mako b/templates/admin/dataset_security/role/role_create.mako
new file mode 100644
index 0000000..e0e67ee
--- /dev/null
+++ b/templates/admin/dataset_security/role/role_create.mako
@@ -0,0 +1,104 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+    $().ready(function() {  
+        $('#groups_add_button').click(function() {
+            return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+        });
+        $('#groups_remove_button').click(function() {
+            return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+        });
+        $('#users_add_button').click(function() {
+            return !$('#out_users option:selected').remove().appendTo('#in_users');
+        });
+        $('#users_remove_button').click(function() {
+            return !$('#in_users option:selected').remove().appendTo('#out_users');
+        });
+        $('form#associate_role_group_user').submit(function() {
+            $('#in_groups option').each(function(i) {
+                $(this).attr("selected", "selected");
+            });
+            $('#in_users option').each(function(i) {
+                $(this).attr("selected", "selected");
+            });
+        });
+        //Temporary removal of select2 for inputs -- refactor this later.
+        $('select').select2("destroy");
+    });
+</script>
+
+<%
+    from galaxy.web.form_builder import CheckboxField
+    create_group_for_role_checkbox = CheckboxField( 'create_group_for_role' )
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create Role</div>
+    <div class="toolFormBody">
+        <form name="associate_role_group_user" id="associate_role_group_user" action="${h.url_for(controller='admin', action='create_role' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <input  name="name" type="textfield" value="${name|h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <input  name="description" type="textfield" value="${description|h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Groups associated with new role</label>
+                    ${render_select( "in_groups", in_groups )}<br/>
+                    <input type="submit" id="groups_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Groups not associated with new role</label>
+                    ${render_select( "out_groups", out_groups )}<br/>
+                    <input type="submit" id="groups_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with new role</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Users not associated with new role</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                %if create_group_for_role_checked:
+                    <% create_group_for_role_checkbox.checked = True %>
+                %endif
+                ${create_group_for_role_checkbox.get_html()} Create a new group of the same name for this role
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_role_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/dataset_security/role/role_rename.mako b/templates/admin/dataset_security/role/role_rename.mako
new file mode 100644
index 0000000..d8a7dae
--- /dev/null
+++ b/templates/admin/dataset_security/role/role_rename.mako
@@ -0,0 +1,43 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Change role name and description</div>
+    <div class="toolFormBody">
+        <form name="library" action="${h.url_for( controller='admin', action='rename_role' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${role.name|h}" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input  name="description" type="textfield" value="${role.description|h}" size=40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="rename" value="submitted"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="id" value="${trans.security.encode_id( role.id )}"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="rename_role_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/external_service/common.mako b/templates/admin/external_service/common.mako
new file mode 100644
index 0000000..22cd879
--- /dev/null
+++ b/templates/admin/external_service/common.mako
@@ -0,0 +1,47 @@
+<%def name="render_external_service( external_service )">
+    <div class="toolForm">
+        <div class="toolFormTitle">External service</div>
+        <div class="form-row">
+            <label>Name:</label>
+            ${external_service.name}
+            ##<a href="${h.url_for( controller='external_service', action='view_external_service', id=trans.security.encode_id( external_service.id ) )}">${external_service.name}</a>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Description:</label>
+            ${external_service.description}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Version:</label>
+            ${external_service.version}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>External service type:</label>
+            %if trans.app.external_service_types.all_external_service_types.has_key( external_service.external_service_type_id ):
+                ${trans.app.external_service_types.all_external_service_types[ external_service.external_service_type_id ].name}
+            %else:
+                ${'Error loading external_service type: %s' % external_service.external_service_type_id}
+            %endif
+            <div style="clear: both"></div>
+        </div>
+        %if external_service.external_service_type_id != 'none':
+            %for field_index, field in enumerate( external_service.form_definition.fields ):
+                <% 
+                    field_value = external_service.form_values.content.get( field['name'], '' )
+                    if field[ 'type' ] == 'PasswordField':
+                        field_value = '*' * len( field_value )
+                %>
+                <div class="form-row">
+                    <label>${field[ 'label' ]}:</label>
+                    ${field_value}
+                </div>
+            %endfor
+        %else:
+            <div class="form-row">
+                External service information is not set, click the <b>Edit external service</b> button to set it.
+            </div>
+        %endif
+    </div>
+</%def>
diff --git a/templates/admin/external_service/create_external_service.mako b/templates/admin/external_service/create_external_service.mako
new file mode 100644
index 0000000..b39e480
--- /dev/null
+++ b/templates/admin/external_service/create_external_service.mako
@@ -0,0 +1,28 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/external_service/common.mako" import="*" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<form name="create_external_service" action="${h.url_for( controller='external_service', action='create_external_service' )}" method="post">
+    <div class="toolForm">
+        <div class="toolFormTitle">New external service</div>
+        %if widgets:
+            %for i, field in enumerate( widgets ):
+                <div class="form-row">
+                    <label>${field['label']|h}:</label>
+                    ${field['widget'].get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${field['helptext']|h}
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+            %endfor
+        %endif
+    </div>
+    <div class="form-row">
+        <input type="submit" name="create_external_service_button" value="Save"/>
+    </div>
+</form>
diff --git a/templates/admin/external_service/edit_external_service.mako b/templates/admin/external_service/edit_external_service.mako
new file mode 100644
index 0000000..5245e3a
--- /dev/null
+++ b/templates/admin/external_service/edit_external_service.mako
@@ -0,0 +1,40 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="external_service-${external_service.id}-popup" class="menubutton">External service actions</a></li>
+    <div popupmenu="external_service-${external_service.id}-popup">
+        %if not external_service.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='view_external_service', id=trans.security.encode_id( external_service.id ) )}">Browse external service</a></li>
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='edit_external_service_form_definition', id=trans.security.encode_id( external_service.id ) )}">Edit form definition</a></li>
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='delete_external_service', id=trans.security.encode_id( external_service.id ) )}">Delete external service</a></li>
+        %endif
+        %if external_service.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='undelete_external_service', id=trans.security.encode_id( external_service.id ) )}">Undelete external service</a></li>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<form name="edit_external_service" action="${h.url_for( controller='external_service', action='edit_external_service', id=trans.security.encode_id( external_service.id ) )}" method="post" >
+    <div class="toolForm">
+        <div class="toolFormTitle">Edit external service</div>
+        %for i, field in enumerate( widgets ):
+            <div class="form-row">
+                <label>${field['label']|h}:</label>
+                ${field['widget'].get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    ${field['helptext']|h}
+                </div>
+                <div style="clear: both"></div>
+            </div>
+        %endfor  
+    </div>
+    <div class="form-row">
+        <input type="submit" name="edit_external_service_button" value="Save"/>
+    </div>
+</form>
diff --git a/templates/admin/external_service/grid.mako b/templates/admin/external_service/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/external_service/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/external_service/reload_external_service_types.mako b/templates/admin/external_service/reload_external_service_types.mako
new file mode 100644
index 0000000..f966645
--- /dev/null
+++ b/templates/admin/external_service/reload_external_service_types.mako
@@ -0,0 +1,22 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<br/><br/>
+
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+<form name="reload_external_service_types" action="${h.url_for( controller='external_service', action='reload_external_service_types' )}" method="post">
+    <div class="toolForm">
+        <div class="toolFormTitle">Reload external service types</div>
+        <div class="form-row">
+            <label>Select external service type to reload:</label>
+            ${external_service_type_select_field.get_html()}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="reload_external_service_type_button" value="Reload"/>
+        </div>
+    </div>
+</form>
diff --git a/templates/admin/external_service/view_external_service.mako b/templates/admin/external_service/view_external_service.mako
new file mode 100644
index 0000000..2321017
--- /dev/null
+++ b/templates/admin/external_service/view_external_service.mako
@@ -0,0 +1,23 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/external_service/common.mako" import="*" />
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="external_service-${external_service.id}-popup" class="menubutton">External service actions</a></li>
+    <div popupmenu="external_service-${external_service.id}-popup">
+        %if not external_service.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='edit_external_service', id=trans.security.encode_id( external_service.id ) )}">Edit external service</a></li>
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='delete_external_service', id=trans.security.encode_id( external_service.id ) )}">Delete external service</a></li>
+        %endif
+        %if external_service.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='external_service', action='undelete_external_service', id=trans.security.encode_id( external_service.id ) )}">Undelete external service</a></li>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+${render_external_service( external_service )}
diff --git a/templates/admin/forms/create_form.mako b/templates/admin/forms/create_form.mako
new file mode 100644
index 0000000..8d55b64
--- /dev/null
+++ b/templates/admin/forms/create_form.mako
@@ -0,0 +1,21 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+<form name="create_form_definition" action="${h.url_for( controller='forms', action='create_form_definition' )}" enctype="multipart/form-data" method="post" >
+    <div class="toolForm">
+        <div class="toolFormTitle">Create a new form definition</div>
+        %for label, input in inputs:
+            <div class="form-row">
+                <label>${label | h}</label>
+                ${input.get_html()}
+                <div style="clear: both"></div>
+            </div>
+        %endfor
+        <div class="form-row">
+            <input type="submit" name="create_form_button" value="Add fields"/>
+        </div>
+    </div>
+</form>
diff --git a/templates/admin/forms/edit_form_definition.mako b/templates/admin/forms/edit_form_definition.mako
new file mode 100644
index 0000000..d4ea953
--- /dev/null
+++ b/templates/admin/forms/edit_form_definition.mako
@@ -0,0 +1,153 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<script type="text/javascript">
+$(document).ready(function(){
+    //hide the all of the element with class msg_body
+    $(".msg_body").hide();
+    //toggle the componenet with class msg_body
+    $(".msg_head").click(function(){
+        $(this).next(".msg_body").slideToggle(450);
+    });
+});
+</script>
+<style type="text/css">
+.msg_head {
+    padding: 0px 0px;
+    cursor: pointer;
+}
+
+}
+</style>
+
+<%def name="render_selectbox_options( index, field_attr )">
+    %if field_attr[0] == 'Type':
+        %if field_attr[1].get_selected( return_label=True ) == 'SelectField':
+            <% options = field_attr[3] %>
+            <div class="repeat-group-item">
+                <div class="form-row">
+                    <label> Options</label>
+                    %for i, option in enumerate(options):
+                        <div class="form-row">
+                            <b> ${i+1}</b>
+                            ${option[1].get_html()}                          
+                            <input type="submit" name="removeoption_${index}_${i}" value="Remove"/>
+                        </div>
+                    %endfor
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="addoption_${index}" value="Add"/>
+            </div>
+        %endif
+    %endif
+</%def>
+
+<%def name="render_field( index, field, saved )">
+    %if saved:        
+        <h4 class="msg_head"> 
+            <div class="form-row">${index+1}. ${field[0][1].value} (${field[2][1].get_selected( return_value=True )})</div>
+        </h4>
+        <div class="msg_body">
+    %else:
+        <h4 class="msg_head"> 
+            <div class="form-row">${index+1}. ${field[0][1].value}</div>
+        </h4>
+        <div class="msg_body2">
+    %endif
+        <div class="repeat-group-item">
+            %for field_attr in field:
+                <div class="form-row">
+                    <label>${field_attr[0]}</label>
+                    ${field_attr[1].get_html()}
+                    ${render_selectbox_options( index, field_attr )}
+                    %if len(field_attr) == 3:
+                        <div class="toolParamHelp" style="clear: both;">
+                           ${field_attr[2]}
+                        </div>
+                    %endif
+                </div>
+            %endfor
+            <div class="form-row">
+                <input type="submit" name="remove_button" value="Remove field ${index+1}"/>
+            </div>
+        </div>
+    </div>
+</%def>
+
+<%def name="render_layout( index, widget )">
+    <div class="repeat-group-item">
+        <div class="form-row">
+            <b> ${index+1}</b>
+            ${widget.get_html()}
+            <input type="submit" name="remove_layout_grid_button" value="Remove grid ${index+1}"/>
+        </div>
+   </div>
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" href="${h.url_for( controller='forms', action='view_latest_form_definition', id=trans.security.encode_id( form_definition.current.id ) )}">View</a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<form id="edit_form_definition" name="edit_form_definition" action="${h.url_for( controller='forms', action='edit_form_definition', id=trans.security.encode_id( form_definition.current.id ) )}" method="post" >
+    <div class="toolForm">
+       <div class="toolFormTitle">Edit form definition "${form_definition.name | h}" (${form_definition.type | h})</div>
+        %if response_redirect:
+            <input type="hidden" name="response_redirect" value="${response_redirect}" size="40" />
+        %endif
+        %for label, input in form_details:
+            <div class="form-row">
+                %if label != 'Type':
+                    <label>${label | h}</label>
+                %endif
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    ${input.get_html()}
+                </div>
+                <div style="clear: both"></div>
+            </div>
+        %endfor
+    </div>
+    %if current_form_type == trans.app.model.FormDefinition.types.SAMPLE:
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">Form Layout</div>
+            <div class="form-row">
+                <label>Layout grid names</label>
+            </div>
+            %for index, lg in enumerate( layout_grids ):
+                ${render_layout( index, lg )}
+            %endfor
+            <div class="form-row">
+                <input type="submit" name="add_layout_grid_button" value="Add layout grid"/>
+            </div>
+        </div>
+    %endif
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Form definition fields</div>
+        %for ctr, field in enumerate(field_details):
+            %if ctr < len( form_definition.fields ):
+                ${render_field( ctr, field, True )}
+            %else:
+                ${render_field( ctr, field, False )}
+            %endif
+        %endfor
+        <div class="form-row">
+            <input type="submit" name="add_field_button" value="Add field"/>
+        </div>
+        <div class="form-row">
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                <input type="hidden" name="refresh" value="true" size="40"/>
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+    <div class="form-row">
+        <input type="submit" name="save_changes_button" value="Save"/>
+    </div>
+</form>
diff --git a/templates/admin/forms/grid.mako b/templates/admin/forms/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/forms/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/forms/view_form_definition.mako b/templates/admin/forms/view_form_definition.mako
new file mode 100644
index 0000000..275fd72
--- /dev/null
+++ b/templates/admin/forms/view_form_definition.mako
@@ -0,0 +1,85 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<%def name="render_grid( grid_index, grid_name, fields_dict )">
+    %if grid_name:
+        <div class="form-row">
+            <label>${grid_name}</label>
+        </div>
+    %endif
+    <div style="clear: both"></div>
+    <table class="grid">
+        <thead>
+            <tr>
+                %for index, field in fields_dict.items():
+                    <th>${field[ 'label' ]}</th>
+                %endfor
+            </tr>
+        <thead>
+        <tbody>
+            <tr>
+                %for index, field in fields_dict.items():
+                    <td>
+                        ${field[ 'type' ]}: ${form_definition.field_as_html( field )}<br/>
+                        <div class="toolParamHelp" style="clear: both;">
+                            <i>${field[ 'helptext' ]}</i> - (${field[ 'required' ]})
+                        </div>
+                        %if field[ 'type' ] == 'SelectField':
+                            <div class="toolParamHelp" style="clear: both;">
+                                <label>Options:</label>
+                                %for option in field[ 'selectlist' ]:
+                                    ${option}
+                                %endfor
+                            </div>
+                        %endif
+                    </td>
+                %endfor
+            </tr>
+        <tbody>
+    </table>
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" href="${h.url_for( controller='forms', action='edit_form_definition', id=trans.security.encode_id( form_definition.current.id ) )}">Edit</a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Form definition "${form_definition.name}"  (${form_definition.type})</div>    
+    %if form_definition.type == trans.app.model.FormDefinition.types.SAMPLE:
+        %if form_definition.layout:
+            %for grid_index, grid_name in enumerate( form_definition.layout ):
+                ${render_grid( grid_index, grid_name, form_definition.grid_fields( grid_index ) )}
+            %endfor
+        %else:
+            ${render_grid( 0, '', form_definition.grid_fields( None ) )}
+        %endif
+    %else:
+        %for index, field in enumerate( form_definition.fields ):
+            <div class="form-row">
+                <label>${field[ 'label' ]}</label>
+                ${field[ 'type' ]}: ${form_definition.field_as_html( field )}
+                <div class="toolParamHelp" style="clear: both;">
+                    <i>${field[ 'helptext' ]}</i> - (${field[ 'required' ]})
+                </div>
+                %if field[ 'type' ] == 'SelectField':
+                    <div class="toolParamHelp" style="clear: both;">
+                        <label>Options:</label>
+                        %for option in field[ 'selectlist' ]:
+                            ${option}
+                        %endfor
+                    </div>
+                %endif
+            </div>
+            <div style="clear: both"></div>
+        %endfor
+    %endif
+</div>
diff --git a/templates/admin/impersonate.mako b/templates/admin/impersonate.mako
new file mode 100644
index 0000000..915b931
--- /dev/null
+++ b/templates/admin/impersonate.mako
@@ -0,0 +1,58 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if emails:
+    <div class="toolForm">
+        <div class="toolFormTitle">Impersonate another user</div>
+        <div class="toolFormBody">
+        <form name="impersonate" id="impersonate" action="${h.url_for( controller='admin', action='impersonate' )}" method="post" >
+            <div class="form-row">
+                <label>
+                    User to impersonate:
+                </label>
+                <input type="hidden" id="email_select" name="email">
+                </input>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="impersonate_button" value="Impersonate"/>
+            </div>
+        </form>
+        </div>
+    </div>
+    <script type="text/javascript">
+    /*  This should be ripped out and made generic at some point for the
+     *  various API bindings available, and once the API can filter list
+     *  queries (term, below) */
+    $("#email_select").select2({
+        placeholder: "Select a user",
+	width: "33%",
+        ajax: {
+            url: "${h.url_for(controller="/api/users", action="index")}",
+            dataType: 'json',
+            quietMillis: 250,
+            matcher: function(term, text) { return text.toUpperCase().indexOf(term.toUpperCase())>=0; },
+            data: function (term) {
+                return {
+                    f_email: term
+                };
+            },
+            results: function (data) {
+              var results = [];
+              $.each(data, function(index, item){
+                    results.push({
+                      id: item.email,
+                      text: item.username + " : " + item.email
+                    });
+              });
+              return {
+                  results: results
+              };
+            }
+        }
+    });
+    </script>
+%endif
diff --git a/templates/admin/jobs.mako b/templates/admin/jobs.mako
new file mode 100644
index 0000000..212bb6b
--- /dev/null
+++ b/templates/admin/jobs.mako
@@ -0,0 +1,209 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        function toggle_all(source) {
+            // sets all checkboxes in source's parent form to match source element.
+            $.each($(source).closest("form").find(":checkbox"), function(i, v){
+                v.checked = source.checked;
+            });
+        }
+    </script>
+</%def>
+
+<%def name="title()">Jobs</%def>
+
+<h2>Jobs</h2>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<p>
+    Unfinished and recently finished jobs are displayed on this page.  The
+    'cutoff' input box will do two things -- it will limit the display of
+    unfinished jobs to only those jobs that have not had their job state
+    updated recently, and it will limit the recently finished jobs list to only
+    displaying jobs that have finished since the cutoff.
+</p>
+<p>
+    If any jobs are displayed, you may choose to stop them.  Your stop message
+    will be displayed to the user as: "This job was stopped by an
+    administrator: <b><YOUR MESSAGE></b>  For more information or help,
+    report this error".
+</p>
+
+%if jobs:
+<form name="jobs" action="${h.url_for(controller='admin', action='jobs')}" method="POST">
+    <h4>
+        Unfinished Jobs: These jobs are unfinished and have had their state updated in the previous ${cutoff} seconds.
+    </h4>
+    <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+        <tr class="header">
+            <td><input type="checkbox" onClick="toggle_all(this)"/></td>
+            <td>Job ID</td>
+            <td>User</td>
+            <td>Last Update</td>
+            <td>Tool</td>
+            <td>State</td>
+            <td>Inputs</td>
+            <td>Command Line</td>
+            <td>Job Runner</td>
+            <td>PID/Cluster ID</td>
+        </tr>
+        %for job in jobs:
+                <td>
+                    <input type="checkbox" name="stop" value="${job.id}"/>
+                </td>
+                <td>${job.id}</td>
+                %if job.history and job.history.user:
+                    <td>${job.history.user.email|h}</td>
+                %else:
+                    <td>anonymous</td>
+                %endif
+                <td>${last_updated[job.id]} ago</td>
+                <td>${job.tool_id|h}</td>
+                <td>${job.state}</td>
+                <%
+                    try:
+                        inputs = ", ".join( [ '%s %s' % ( da.dataset.id, da.dataset.state ) for da in job.input_datasets ] )
+                    except:
+                        inputs = 'Unable to determine inputs'
+                %>
+                <td>${inputs}</td>
+                <td>${job.command_line|h}</td>
+                <td>${job.job_runner_name|h}</td>
+                <td>${job.job_runner_external_id}</td>
+            </tr>
+        %endfor
+    </table>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">
+            Stop Jobs
+        </div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>
+                    Stop message:
+                </label>
+                <div class="form-row-input">
+                    <input type="text" name="stop_msg" size="40"/>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    to be displayed to the user
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="submit" value="Submit">
+            </div>
+        </div>
+    </div>
+    <p/>
+</form>
+%else:
+    <div class="infomessage">There are no unfinished jobs to show with current cutoff time.</div>
+    <p/>
+%endif
+
+%if recent_jobs:
+    <h4>
+        Recent Jobs: These jobs have completed in the previous ${cutoff} seconds.
+    </h4>
+    <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+        <tr class="header">
+            <td>Job ID</td>
+            <td>User</td>
+            <td>Finished</td>
+            <td>Tool</td>
+            <td>State</td>
+            <td>Inputs</td>
+            <td>Command Line</td>
+            <td>Job Runner</td>
+            <td>PID/Cluster ID</td>
+        </tr>
+        %for job in recent_jobs:
+                <td><a href="${h.url_for( controller="admin", action="job_info" )}?jobid=${job.id}">${job.id}</a></td>
+                %if job.history and job.history.user:
+                    <td>${job.history.user.email|h}</td>
+                %else:
+                    <td>anonymous</td>
+                %endif
+                <td>${finished[job.id]} ago</td>
+                <td>${job.tool_id|h}</td>
+                <td>${job.state}</td>
+                <%
+                    try:
+                        inputs = ", ".join( [ '%s %s' % ( da.dataset.id, da.dataset.state ) for da in job.input_datasets ] )
+                    except:
+                        inputs = 'Unable to determine inputs'
+                %>
+                <td>${inputs}</td>
+                <td>${job.command_line|h}</td>
+                <td>${job.job_runner_name|h}</td>
+                <td>${job.job_runner_external_id|h}</td>
+            </tr>
+        %endfor
+    </table>
+    <p/>
+%else:
+    <div class="infomessage">There are no recently finished jobs to show with current cutoff time.</div>
+    <p/>
+%endif
+
+<form name="jobs" action="${h.url_for(controller='admin', action='jobs')}" method="POST">
+    <div class="toolForm">
+        <div class="toolFormTitle">
+            Update Jobs
+        </div>
+        <div class="toolFormBody">
+
+            <div class="form-row">
+                <label>
+                    Cutoff:
+                </label>
+                <div class="form-row-input">
+                    <input type="text" name="cutoff" size="4" value="${cutoff}"/>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    In seconds
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="submit" value="Refresh">
+            </div>
+        </div>
+    </div>
+</form>
+
+<form name="jobs" action="${h.url_for(controller='admin', action='jobs')}" method="POST">
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">
+            Administrative Job Lock
+        </div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <input type="hidden" name="ajl_submit" value="True"/>
+    %if job_lock==True:
+                <p>Job dispatching is currently <strong>locked</strong>.</p>
+                <label>
+                    <input type='checkbox' name='job_lock' checked='checked' />
+                    Prevent jobs from dispatching.
+                </label>
+    %else:
+                <p>Job dispatching is currently <strong>unlocked</strong>.</p>
+                <label>
+                    <input type='checkbox' name='job_lock' />
+                    Prevent jobs from dispatching.
+                </label>
+    %endif
+            </div>
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="submit" value="Update">
+            </div>
+        </div>
+    </div>
+</form>
diff --git a/templates/admin/library/grid.mako b/templates/admin/library/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/library/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/library/new_library.mako b/templates/admin/library/new_library.mako
new file mode 100644
index 0000000..683fdad
--- /dev/null
+++ b/templates/admin/library/new_library.mako
@@ -0,0 +1,44 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create a new data library</div>
+    <div class="toolFormBody">
+        <form name="library" action="${h.url_for( controller='library_admin', action='create_library' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="New data library" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="description" value="" size="40"/>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    Displayed when browsing all libraries
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Synopsis:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="synopsis" value="" size="40"/>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    Displayed when browsing this library
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_library_button" value="Create"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/package_tool.mako b/templates/admin/package_tool.mako
new file mode 100644
index 0000000..6d4818e
--- /dev/null
+++ b/templates/admin/package_tool.mako
@@ -0,0 +1,56 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%
+   from galaxy.tools import Tool
+   from galaxy.tools.toolbox import ToolSection
+%>
+
+<script type="text/javascript">
+$().ready(function() {
+%if tool_id:
+    var focus_el = $("input[name=package_tool_button]");
+%else:
+    var focus_el = $("select[name=tool_id]");
+%endif
+    focus_el.focus();
+});
+</script>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Download Tarball For ToolShed</div>
+    <div class="toolFormBody">
+    <form name="package_tool" id="package_tool" action="${h.url_for( controller='admin', action='package_tool' )}" method="post" >
+        <div class="form-row">
+            <label>
+                Tool to bundle:
+            </label>
+            <select name="tool_id">
+                %for val in toolbox.tool_panel_contents( trans ):
+                    %if isinstance( val, Tool ):
+                        <option value="${val.id|h}">${val.name|h}</option>
+                    %elif isinstance( val, ToolSection ):
+                        <optgroup label="${val.name|h}">
+                        <% section = val %>
+                        %for section_key, section_val in section.elems.items():
+                            %if isinstance( section_val, Tool ):
+                                <% selected_str = "" %>
+                                %if section_val.id == tool_id:
+                                     <% selected_str = " selected=\"selected\"" %>
+                                %endif
+                                <option value="${section_val.id|h}"${selected_str}>${section_val.name|h}</option>
+                            %endif
+                        %endfor
+                    %endif
+                %endfor
+            </select>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="package_tool_button" value="Download"/>
+        </div>
+    </form>
+    </div>
+</div>
diff --git a/templates/admin/quota/grid.mako b/templates/admin/quota/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/quota/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/quota/quota.mako b/templates/admin/quota/quota.mako
new file mode 100644
index 0000000..8fd9cb5
--- /dev/null
+++ b/templates/admin/quota/quota.mako
@@ -0,0 +1,83 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {  
+    $('#users_add_button').click(function() {
+        return !$('#out_users option:selected').remove().appendTo('#in_users');
+    });
+    $('#users_remove_button').click(function() {
+        return !$('#in_users option:selected').remove().appendTo('#out_users');
+    });
+    $('#groups_add_button').click(function() {
+        return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+    });
+    $('#groups_remove_button').click(function() {
+        return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+    });
+    $('form#associate_quota_user_group').submit(function() {
+        $('#in_users option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_groups option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+    });
+});
+</script>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Quota '${name|h}'</div>
+    <div class="toolFormBody">
+        <form name="associate_quota_user_group" id="associate_quota_user_group" action="${h.url_for(controller='admin', action='manage_users_and_groups_for_quota', id=id )}" method="post" >
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with '${name|h}'</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Users not associated with '${name|h}'</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Groups associated with '${name|h}'</label>
+                    ${render_select( "in_groups", in_groups )}<br/>
+                    <input type="submit" id="groups_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Groups not associated with '${name|h}'</label>
+                    ${render_select( "out_groups", out_groups )}<br/>
+                    <input type="submit" id="groups_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="quota_members_edit_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/quota/quota_create.mako b/templates/admin/quota/quota_create.mako
new file mode 100644
index 0000000..c581744
--- /dev/null
+++ b/templates/admin/quota/quota_create.mako
@@ -0,0 +1,125 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+    $().ready(function() {  
+        $('#groups_add_button').click(function() {
+            return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+        });
+        $('#groups_remove_button').click(function() {
+            return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+        });
+        $('#users_add_button').click(function() {
+            return !$('#out_users option:selected').remove().appendTo('#in_users');
+        });
+        $('#users_remove_button').click(function() {
+            return !$('#in_users option:selected').remove().appendTo('#out_users');
+        });
+        $('form#associate_quota_group_user').submit(function() {
+            $('#in_groups option').each(function(i) {
+                $(this).attr("selected", "selected");
+            });
+            $('#in_users option').each(function(i) {
+                $(this).attr("selected", "selected");
+            });
+        });
+        //Temporary removal of select2 for inputs -- refactor this later.
+        $('select').select2("destroy");
+    });
+</script>
+
+<%
+    from galaxy.web.form_builder import SelectField
+    operation_selectfield = SelectField( 'operation' )
+    for op in ( '=', '+', '-' ):
+        selected = op == operation
+        operation_selectfield.add_option( op, op, selected )
+    default_selectfield = SelectField( 'default' )
+    selected = 'no' == default
+    default_selectfield.add_option( 'No', 'no', selected )
+    for typ in trans.app.model.DefaultQuotaAssociation.types.__dict__.values():
+        selected = typ == default
+        default_selectfield.add_option( 'Yes, ' + typ, typ, selected )
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create quota</div>
+    <div class="toolFormBody">
+        <form name="associate_quota_group_user" id="associate_quota_group_user" action="${h.url_for(controller='admin', action='create_quota' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <input  name="name" type="textfield" value="${name|h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <input  name="description" type="textfield" value="${description|h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <label>Amount</label>
+                <input  name="amount" type="textfield" value="${amount|h}" size=40"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    Examples: "10000MB", "99 gb", "0.2T", "unlimited"
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Assign, increase by amount, or decrease by amount?</label>
+                ${operation_selectfield.get_html()}
+            </div>
+            <div class="form-row">
+                <label>Is this quota a default for a class of users (if yes, what type)?</label>
+                ${default_selectfield.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Warning: Any user or group associations selected below will be ignored if this quota is used as a default.
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with new quota</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Users not associated with new quota</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Groups associated with new quota</label>
+                    ${render_select( "in_groups", in_groups )}<br/>
+                    <input type="submit" id="groups_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Groups not associated with new quota</label>
+                    ${render_select( "out_groups", out_groups )}<br/>
+                    <input type="submit" id="groups_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_quota_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/quota/quota_edit.mako b/templates/admin/quota/quota_edit.mako
new file mode 100644
index 0000000..c437fea
--- /dev/null
+++ b/templates/admin/quota/quota_edit.mako
@@ -0,0 +1,46 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%
+    from galaxy.web.form_builder import SelectField
+    operation_selectfield = SelectField( 'operation' )
+    for op in ( '=', '+', '-' ):
+        selected = op == operation
+        operation_selectfield.add_option( op, op, selected )
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Change quota amount</div>
+    <div class="toolFormBody">
+        <form name="library" action="${h.url_for( controller='admin', action='edit_quota' )}" method="post" >
+            <input name="id" type="hidden" value="${id}"/>
+            <div class="form-row">
+                <label>Amount</label>
+                <input  name="amount" type="textfield" value="${display_amount|h}" size=40"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    Examples: "10000MB", "99 gb", "0.2T", "unlimited"
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Assign, increase by amount, or decrease by amount?</label>
+                ${operation_selectfield.get_html()}
+            </div>
+            <div class="form-row">
+                <input type="submit" name="edit_quota_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/quota/quota_rename.mako b/templates/admin/quota/quota_rename.mako
new file mode 100644
index 0000000..476518b
--- /dev/null
+++ b/templates/admin/quota/quota_rename.mako
@@ -0,0 +1,52 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Change quota name and description</div>
+    <div class="toolFormBody">
+        <form name="library" action="${h.url_for( controller='admin', action='rename_quota' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${name|h}" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input  name="description" type="textfield" value="${description|h}" size=40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="rename" value="submitted"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="id" value="${id}"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="rename_quota_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/quota/quota_set_default.mako b/templates/admin/quota/quota_set_default.mako
new file mode 100644
index 0000000..ed64e0a
--- /dev/null
+++ b/templates/admin/quota/quota_set_default.mako
@@ -0,0 +1,44 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%
+    from galaxy.web.form_builder import SelectField
+    default_selectfield = SelectField( 'default' )
+    selected = 'no' == default
+    default_selectfield.add_option( 'No', 'no', selected )
+    for typ in trans.app.model.DefaultQuotaAssociation.types.__dict__.values():
+        selected = typ == default
+        default_selectfield.add_option( 'Yes, ' + typ, typ, selected )
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Set quota default</div>
+    <div class="toolFormBody">
+        <form name="set_quota_default" id="set_quota_default" action="${h.url_for(controller='admin', action='set_quota_default' )}" method="post" >
+            <input name="id" type="hidden" value="${id}"/>
+            <div class="form-row">
+                <label>Is this quota a default for a class of users (if yes, what type)?</label>
+                ${default_selectfield.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Warning: Any users or groups associated with this quota will be disassociated.
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="set_default_quota_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/reload_tool.mako b/templates/admin/reload_tool.mako
new file mode 100644
index 0000000..76b48a7
--- /dev/null
+++ b/templates/admin/reload_tool.mako
@@ -0,0 +1,79 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%
+   from galaxy.tools import Tool
+   from galaxy.tools.toolbox import ToolSection
+%>
+
+<script type="text/javascript">
+$().ready(function() {
+%if tool_id:
+    var focus_el = $("input[name=reload_tool_button]");
+%else:
+    var focus_el = $("select[name=tool_id]");
+%endif
+    focus_el.focus();
+});
+$().ready(function() {
+    $("#reload_toolbox").click(function(){
+        $.ajax({
+            url: "${h.url_for(controller="/api/configuration", action="toolbox")}",
+            type: 'PUT'
+        });
+    });
+});
+</script>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Reload Tool</div>
+    <div class="toolFormBody">
+    <form name="reload_tool" id="reload_tool" action="${h.url_for( controller='admin', action='reload_tool' )}" method="post" >
+        <div class="form-row">
+            <label>
+                Tool to reload:
+            </label>
+            <select name="tool_id">
+                %for val in toolbox.tool_panel_contents( trans ):
+                    %if isinstance( val, Tool ):
+                        <option value="${val.id|h}">${val.name|h}</option>
+                    %elif isinstance( val, ToolSection ):
+                        <optgroup label="${val.name|h}">
+                        <% section = val %>
+                        %for section_key, section_val in section.elems.items():
+                            %if isinstance( section_val, Tool ):
+                                <% selected_str = "" %>
+                                %if section_val.id == tool_id:
+                                     <% selected_str = " selected=\"selected\"" %>
+                                %endif
+                                <option value="${section_val.id|h}"${selected_str}>${section_val.name|h}</option>
+                            %endif
+                        %endfor
+                    %endif
+                %endfor
+            </select>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="reload_tool_button" value="Reload"/>
+        </div>
+    </form>
+    </div>
+</div>
+<p>
+<div class="toolForm">
+    <div class="toolFormTitle">Reload Toolbox</div>
+    <div class="toolFormBody">
+    <form name="reload_toolbox_form" id="reload_toolbox_form" action="" method="" >
+        <div class="form-row">
+        Clicking <a href="#" id="reload_toolbox">here</a> will reload
+        the Galaxy toolbox. This will cause newly discovered tools
+        to be added, tools now missing from tool confs to be removed,
+        and items in the panel reordered. Individual tools, even ones
+        with modified tool descriptions willl not be reloaded.
+        </div>
+    </form>
+    </div>
+</div>
diff --git a/templates/admin/request_type/common.mako b/templates/admin/request_type/common.mako
new file mode 100644
index 0000000..afb2883
--- /dev/null
+++ b/templates/admin/request_type/common.mako
@@ -0,0 +1,33 @@
+<%def name="render_state( element_count, state_name, state_desc )">
+    <div class="repeat-group-item">
+        <div class="form-row">
+            <label>${1+element_count}. State name:</label>
+            <input type="text" name="state_name_${element_count}" value="${state_name | h}" size="40"/>
+            ## Do not show remove button for the first state
+            %if element_count > 0:
+                <input type="submit" name="remove_state_button" value="Remove state ${1+element_count}"/>
+            %endif
+        </div>
+        <div class="form-row">
+            <label>Description:</label>
+            <input type="text" name="state_desc_${element_count}" value="${state_desc | h}" size="40"/>
+            <div class="toolParamHelp" style="clear: both;">
+                optional
+            </div>
+        </div>
+        <div style="clear: both"></div>
+   </div>
+</%def>
+
+<%def name="render_external_services( element_count, external_service_select_field )">
+    <div class="repeat-group-item">
+        <div class="form-row">
+            <label>${1+element_count}. External service:</label>
+            ${external_service_select_field.get_html()}
+        </div>
+        <div class="form-row">
+            <input type="submit" name="remove_external_service_button" value="Remove external service ${1+element_count}"/>
+        </div>
+        <div style="clear: both"></div>
+   </div>
+</%def>
diff --git a/templates/admin/request_type/create_request_type.mako b/templates/admin/request_type/create_request_type.mako
new file mode 100644
index 0000000..cd0bfb1
--- /dev/null
+++ b/templates/admin/request_type/create_request_type.mako
@@ -0,0 +1,65 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/request_type/common.mako" import="*" />
+
+%if not rt_info_widgets:
+    <br/><br/>
+    <ul class="manage-table-actions">
+        <li><a class="action-button" href="${h.url_for( controller='forms', action='create_form_definition' )}">Create new form</a></li>
+    </ul>
+    <br/<br/>
+    Creating a new request type requires two form definitions, a <b>Sequencing Request Form</b>,
+    and a <b>Sequencing Sample Form</b>, which must be created first.  Click the <b>Create new form</b>
+    button to create them.
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if rt_info_widgets:
+    <form name="create_request_type" action="${h.url_for( controller='request_type', action='create_request_type' )}" method="post">
+        <div class="toolForm">
+            <div class="toolFormTitle">Create a new request type</div>
+            %for rt_info in rt_info_widgets:
+                <div class="form-row">
+                    <label>${rt_info['label'] | h}</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        ${rt_info['widget'].get_html()}
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+            %endfor
+        </div>
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">Sample states defined for this request type</div>
+            <div class="form-row">
+                A request_type requires at least one possible sample state so that it can be used to create a sequencing request
+            </div>
+            %for index, info in enumerate( rt_states_widgets ):
+                ${render_state( index, info[0], info[1] )}
+            %endfor
+            <div class="form-row">
+                <input type="submit" name="add_state_button" value="Add state"/>
+            </div>
+        </div>
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">External services</div>
+            <div class="form-row">
+                An external service can be a sequencer or any application that is web accessible.  A request type can be associated with 
+                multiple external services.
+            </div>
+            %for index, external_service_select_field in enumerate( external_service_select_fields_list ):
+                ${render_external_services( index, external_service_select_field )}
+            %endfor
+            <div class="form-row">
+                <input type="submit" name="add_external_service_button" value="Add external service"/>
+            </div>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="create_request_type_button" value="Save"/>
+        </div>
+    </form>
+%endif
diff --git a/templates/admin/request_type/edit_request_type.mako b/templates/admin/request_type/edit_request_type.mako
new file mode 100644
index 0000000..e344457
--- /dev/null
+++ b/templates/admin/request_type/edit_request_type.mako
@@ -0,0 +1,100 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/request_type/common.mako" import="*" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<% form_type = trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE %>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="request_type-${request_type.id}-popup" class="menubutton">Request type actions</a></li>
+    <div popupmenu="request_type-${request_type.id}-popup">
+        <li><a class="action-button" href="${h.url_for( controller='request_type', action='view_request_type', id=trans.security.encode_id( request_type.id ) )}">Browse request type</a></li>
+        %if not request_type.deleted:
+            %if not request_type.run_details:
+                <a class="action-button" href="${h.url_for( controller='request_type', action='add_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Use run details template</a>
+            %elif request_type.run_details:
+                <a class="action-button" href="${h.url_for( controller='request_type', action='edit_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Edit run details template</a>
+                <a class="action-button" href="${h.url_for( controller='request_type', action='delete_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Unuse run details template</a>
+            %endif
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='request_type_permissions', id=trans.security.encode_id( request_type.id ) )}">Edit permissions</a></li>
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='delete_request_type', id=trans.security.encode_id( request_type.id ) )}">Delete request type</a></li>
+        %endif
+        %if request_type.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='undelete_request_type', id=trans.security.encode_id( request_type.id ) )}">Undelete request type</a></li>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<form name="edit_request_type" action="${h.url_for( controller='request_type', action='edit_request_type', id=trans.security.encode_id( request_type.id ) )}" method="post" >
+    <div class="toolForm">
+        <div class="toolFormTitle">"Edit ${request_type.name | h}" request type</div>
+        <div class="form-row">
+            <label>Name:</label>
+            <input type="text" name="name" value="${request_type.name | }" size="40"/>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Description:</label>
+            <input type="text" name="desc" value="${request_type.desc | h}" size="40"/>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Sequencing request form definition:</label>
+            <a href="${h.url_for( controller='request_type', action='view_form_definition', id=trans.security.encode_id( request_type.request_form_id ) )}">${request_type.request_form.name | h}</a>
+            ## Hidden field needed by the __save_request_type() method
+            <input type="hidden" name="request_form_id" value="${trans.security.encode_id( request_type.request_form_id )}" size="40"/>
+        </div>       
+        <div class="form-row">
+            <label>Sample form definition:</label>
+            <a href="${h.url_for( controller='request_type', action='view_form_definition', id=trans.security.encode_id( request_type.sample_form_id ) )}">${request_type.sample_form.name | h}</a>
+            ## Hidden field needed by the __save_request_type() method
+            <input type="hidden" name="sample_form_id" value="${trans.security.encode_id( request_type.sample_form_id )}" size="40"/>
+        </div>
+    </div>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Sample states defined for this request type</div>
+        %for element_count, state in enumerate( request_type.states ):
+            <div class="repeat-group-item">
+                <div class="form-row">
+                    <label>${1+element_count}. State name:</label>
+                    <input type="text" name="state_name_${trans.security.encode_id( state.id )}" value="${state.name | h}" size="40"/>
+                </div>
+                <div class="form-row">
+                    <label>Description:</label>
+                    <input type="text" name="state_desc_${trans.security.encode_id( state.id )}" value="${state.desc | h}" size="40"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        optional
+                    </div>
+                </div>
+                <div style="clear: both"></div>
+           </div>
+        %endfor
+    </div>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">External services</div>
+        <div class="form-row">
+            This information is needed only if you will transfer datasets from the sequencer or any other external service to a target Galaxy data library.
+            A request type can be associated with multiple external services. Click on 'Add' button below to add an external service to this request type.
+        </div>
+        %for index, external_service_select_field in enumerate( external_service_select_fields_list ):
+            ${render_external_services( index, external_service_select_field )}
+        %endfor
+        <div class="form-row">
+            <input type="submit" name="add_external_service_button" value="Add external service"/>
+        </div>
+    </div>
+    <div class="form-row">
+        <input type="submit" name="edit_request_type_button" value="Save"/>
+    </div>
+</form>
+
+%if widgets:
+    ${render_template_fields( cntrller='requests_admin', item_type='request_type', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, request_type_id=trans.security.encode_id( request_type.id ) )}
+%endif
diff --git a/templates/admin/request_type/grid.mako b/templates/admin/request_type/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/request_type/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/request_type/request_type_permissions.mako b/templates/admin/request_type/request_type_permissions.mako
new file mode 100644
index 0000000..8aa486e
--- /dev/null
+++ b/templates/admin/request_type/request_type_permissions.mako
@@ -0,0 +1,95 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<script type="text/javascript">
+    $( document ).ready( function () {
+        $( '.role_add_button' ).click( function() {
+            var action = this.id.substring( 0, this.id.lastIndexOf( '_add_button' ) )
+            var in_select = '#' + action + '_in_select';
+            var out_select = '#' + action + '_out_select';
+            return !$( out_select + ' option:selected' ).remove().appendTo( in_select );
+        });
+        $( '.role_remove_button' ).click( function() {
+            var action = this.id.substring( 0, this.id.lastIndexOf( '_remove_button' ) )
+            var in_select = '#' + action + '_in_select';
+            var out_select = '#' + action + '_out_select';
+            return !$( in_select + ' option:selected' ).remove().appendTo( out_select );
+        });
+        $( 'form#request_type_permissions' ).submit( function() {
+            $( '.in_select option' ).each(function( i ) {
+                $( this ).attr( "selected", "selected" );
+            });
+        });
+    });
+</script>
+
+<% form_type = trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE %>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="request_type-${request_type.id}-popup" class="menubutton">Request type actions</a></li>
+    <div popupmenu="request_type-${request_type.id}-popup">
+        <li><a class="action-button" href="${h.url_for( controller='request_type', action='view_request_type', id=trans.security.encode_id( request_type.id ) )}">Browse request type</a></li>
+        <li><a class="action-button" href="${h.url_for( controller='request_type', action='view_editable_request_type', id=trans.security.encode_id( request_type.id ) )}">Edit request type</a></li>
+        %if not request_type.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='delete_request_type', id=trans.security.encode_id( request_type.id ) )}">Delete request type</a></li>
+            %if not request_type.run_details:
+                <a class="action-button" href="${h.url_for( controller='request_type', action='add_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Use run details template</a>
+            %elif request_type.run_details:
+                <a class="action-button" href="${h.url_for( controller='request_type', action='edit_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Edit run details template</a>
+                <a class="action-button" href="${h.url_for( controller='request_type', action='delete_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Unuse run details template</a>
+            %endif
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Manage access permissions on request type "${request_type.name | h}"</div>
+    <div class="toolFormBody">
+        <form name="request_type_permissions" id="request_type_permissions" action="${h.url_for( controller='request_type', action='request_type_permissions', id=trans.security.encode_id( request_type.id ) )}" method="post">
+            <div class="form-row">
+                <%
+                    current_actions = request_type.actions
+                    action = trans.app.security_agent.permitted_actions.REQUEST_TYPE_ACCESS
+                    all_roles = roles
+                    action_key = 'REQUEST_TYPE_ACCESS'
+                    
+                    import sets
+                    in_roles = sets.Set()
+                    for a in current_actions:
+                        if a.action == action.action:
+                            in_roles.add( a.role )
+                    out_roles = filter( lambda x: x not in in_roles, all_roles )
+                %>
+                ${action.description | h}<br/><br/>
+                <div style="width: 100%; white-space: nowrap;">
+                    <div style="float: left; width: 50%;">
+                        Roles associated:<br/>
+                        <select name="${action_key}_in" id="${action_key}_in_select" class="in_select" style="max-width: 98%; width: 98%; height: 150px; font-size: 100%;" multiple>
+                            %for role in in_roles:
+                                <option value="${role.id}">${role.name | h}</option>
+                            %endfor
+                        </select> <br/>
+                        <div style="width: 98%; text-align: right"><input type="submit" id="${action_key}_remove_button" class="role_remove_button" value=">>"/></div>
+                    </div>
+                    <div style="width: 50%;">
+                        Roles not associated:<br/>
+                        <select name="${action_key}_out" id="${action_key}_out_select" style="max-width: 98%; width: 98%; height: 150px; font-size: 100%;" multiple>
+                            %for role in out_roles:
+                                <option value="${role.id}">${role.name | h}</option>
+                            %endfor
+                        </select> <br/>
+                        <input type="submit" id="${action_key}_add_button" class="role_add_button" value="<<"/>
+                    </div>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="update_roles_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/request_type/view_request_type.mako b/templates/admin/request_type/view_request_type.mako
new file mode 100644
index 0000000..cc65909
--- /dev/null
+++ b/templates/admin/request_type/view_request_type.mako
@@ -0,0 +1,83 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<% form_type = trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE %>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="request_type-${request_type.id}-popup" class="menubutton">Request type actions</a></li>
+    <div popupmenu="request_type-${request_type.id}-popup">
+        %if not request_type.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='view_editable_request_type', id=trans.security.encode_id( request_type.id ) )}">Edit request type</a></li>
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='request_type_permissions', id=trans.security.encode_id( request_type.id ) )}">Edit permissions</a></li>
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='delete_request_type', id=trans.security.encode_id( request_type.id ) )}">Delete request type</a></li>
+            %if not request_type.run_details:
+                <a class="action-button" href="${h.url_for( controller='request_type', action='add_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Use run details template</a>
+            %elif request_type.run_details:
+                <a class="action-button" href="${h.url_for( controller='request_type', action='edit_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Edit run details template</a>
+                <a class="action-button" href="${h.url_for( controller='request_type', action='delete_template', cntrller='requests_admin', item_type='request_type', form_type=form_type, request_type_id=trans.security.encode_id( request_type.id ) )}">Unuse run details template</a>
+            %endif
+        %endif
+        %if request_type.deleted:
+            <li><a class="action-button" href="${h.url_for( controller='request_type', action='undelete_request_type', id=trans.security.encode_id( request_type.id ) )}">Undelete request type</a></li>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">"${request_type.name | h}" request type</div>
+    <div class="form-row">
+        <label>Name:</label>
+        ${request_type.name | h}
+        <div style="clear: both"></div>
+    </div>
+    <div class="form-row">
+        <label>Description:</label>
+        ${request_type.desc | h}
+        <div style="clear: both"></div>
+    </div>
+    <div class="form-row">
+        <label>Sequencing request form definition:</label>
+        <a href="${h.url_for( controller='request_type', action='view_form_definition', id=trans.security.encode_id( request_type.request_form_id ) )}">${request_type.request_form.name | h}</a>
+    </div>       
+    <div class="form-row">
+        <label>Sample form definition:</label>
+        <a href="${h.url_for( controller='request_type', action='view_form_definition', id=trans.security.encode_id( request_type.sample_form_id ) )}">${request_type.sample_form.name | h}</a>
+    </div>
+</div>
+<p/>
+<div class="toolForm">
+    <div class="toolFormTitle">Sample states defined for this request type</div>
+    %for state in request_type.states:
+        <div class="form-row">
+            <label>${state.name | h}</label>
+            ${state.desc | h}
+        </div>
+        <div style="clear: both"></div>
+    %endfor
+</div>
+<p/>
+<div class="toolForm">
+    <div class="toolFormTitle">External services</div>
+    %if request_type.external_services:
+        %for index, external_service in enumerate( request_type.external_services ):
+            <div class="form-row">
+                <label><a href="${h.url_for( controller='external_service', action='view_external_service', id=trans.security.encode_id( external_service.id ) )}">${external_service.name | h}</a></label> 
+                ${external_service.get_external_service_type( trans ).name | h}
+            </div>
+        %endfor
+    %else:
+        <div class="form-row">
+            External service login information is not set.  Select the <b>Edit request type</b> option in the <b>Request type actions</b> menu.
+        </div>
+    %endif
+</div>
+
+%if widgets:
+    ${render_template_fields( cntrller='requests_admin', item_type='request_type', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, request_type_id=trans.security.encode_id( request_type.id ), editable=False )}
+%endif
diff --git a/templates/admin/requests/grid.mako b/templates/admin/requests/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/requests/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/requests/reject.mako b/templates/admin/requests/reject.mako
new file mode 100644
index 0000000..d742b9f
--- /dev/null
+++ b/templates/admin/requests/reject.mako
@@ -0,0 +1,36 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request_history', cntrller=cntrller, id=trans.security.encode_id(request.id) )}">View history</a>
+    </li>
+    <li>
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id(request.id) )}">Browse this request</a>
+    </li>
+</ul>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Reject sequencing request "${request.name | h}"</div>
+        <form name="event" action="${h.url_for( controller='requests_admin', action='reject_request', id=trans.security.encode_id( request.id ) )}" method="post" >
+            <div class="form-row">
+                Rejecting this request will move the request state to <b>Rejected</b>.
+            </div>
+            <div class="form-row">
+                <label>Comments</label>
+                <textarea name="comment" rows="5" cols="40"></textarea>
+                <div class="toolParamHelp" style="clear: both;">
+                    Required
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="reject_button" value="Reject"/>
+                <input type="submit" name="cancel_reject_button" value="Cancel"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/requests/rename_datasets.mako b/templates/admin/requests/rename_datasets.mako
new file mode 100644
index 0000000..c9d6174
--- /dev/null
+++ b/templates/admin/requests/rename_datasets.mako
@@ -0,0 +1,59 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% from galaxy.webapps.galaxy.controllers.requests_admin import build_rename_datasets_for_sample_select_field %>
+
+<h3>Rename datasets for Sample "${sample.name | h}"</h3>
+
+<ul class="manage-table-actions">
+    <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='manage_datasets', sample_id=trans.security.encode_id( sample.id ) )}">Browse datasets</a></li>
+    <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller='requests_admin', id=trans.security.encode_id( sample.request.id ) )}">Browse this request</a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <form name="rename_datasets" id="rename_datasets" action="${h.url_for( controller='requests_admin', action='rename_datasets', id_list=id_list, sample_id=trans.security.encode_id( sample.id ) )}" method="post" >
+
+        <table class="grid">
+            <thead>
+                <tr>
+                    <th>Prepend directory name</th>
+                    <th>Name</th>
+                    <th>Path on external service</th>
+                </tr>
+            <thead>
+            <tbody>
+                %for id in id_list:
+                   <% sample_dataset = trans.sa_session.query( trans.model.SampleDataset ).get( trans.security.decode_id( id ) ) %>
+                   %if sample_dataset.status == trans.app.model.SampleDataset.transfer_status.NOT_STARTED:
+                        <tr>
+                            <td>
+                                <% rename_datasets_for_sample_select_field = build_rename_datasets_for_sample_select_field( trans, sample_dataset ) %>
+                                ${rename_datasets_for_sample_select_field.get_html()}
+                            </td>
+                            <td>
+                                <input type="text" name="new_name_${trans.security.encode_id( sample_dataset.id ) }" value="${sample_dataset.name | h}" size="100"/>
+                            </td>
+                            <td>${sample_dataset.file_path}</td>
+                        </tr>
+                    %endif
+                %endfor
+            </tbody>
+        </table>
+        <br/>
+        <div class="form-row">
+            <div class="toolParamHelp" style="clear: both;">
+                A dataset name should only contain the alphanumeric characters or underscore(_).
+                If a dataset name contains any other character, it would be replaced by an underscore(_).
+            </div>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="rename_datasets_button" value="Save"/>
+            <input type="submit" name="cancel_rename_datasets_button" value="Close"/>
+        </div>
+    </form>
+
+</div>
diff --git a/templates/admin/requests/sample_datasets_grid.mako b/templates/admin/requests/sample_datasets_grid.mako
new file mode 100644
index 0000000..7a0f35b
--- /dev/null
+++ b/templates/admin/requests/sample_datasets_grid.mako
@@ -0,0 +1,9 @@
+<%inherit file="/grid_base.mako"/>
+<%namespace file="/requests/common/common.mako" import="common_javascripts" />
+<%namespace file="/requests/common/common.mako" import="transfer_status_updater" />
+
+<%def name="load()">
+    ${parent.load()}
+    ${common_javascripts()}
+    ${transfer_status_updater()}
+</%def>
diff --git a/templates/admin/requests/view_sample_dataset.mako b/templates/admin/requests/view_sample_dataset.mako
new file mode 100644
index 0000000..6b16824
--- /dev/null
+++ b/templates/admin/requests/view_sample_dataset.mako
@@ -0,0 +1,79 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<br/><br/>
+
+<%
+    sample = sample_dataset.sample
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    can_manage_datasets = is_admin and sample.untransferred_dataset_files
+%>
+
+<ul class="manage-table-actions">
+    %if can_manage_datasets:
+        <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='manage_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">Manage sample datasets</a></li>
+    %endif
+    <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( sample.request.id ) )}">Browse this request</a></li>
+</ul>
+
+<div class="toolForm">
+    <div class="toolFormTitle">"${sample.name | h}" Dataset</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Name:</label>
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                ${sample_dataset.name | h}
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>External service:</label>
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                ${sample_dataset.external_service.name | h} (${sample_dataset.external_service.get_external_service_type( trans ).name | h})
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>File path:</label>
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                ${sample_dataset.file_path}
+            </div>
+            <div class="toolParamHelp" style="clear: both;">
+                This file is contained in a sub-directory of the data directory configured for the external service.
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Size:</label>
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                ${sample_dataset.size}
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Date this dataset was selected for this sample:</label>
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                ${sample_dataset.create_time}
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Transfer status:</label>
+            <div style="float: left; width: 250px; margin-right: 10px;">
+                ${sample_dataset.status}
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        %if sample_dataset.status == trans.app.model.SampleDataset.transfer_status.ERROR:
+            <div class="form-row">
+                 <label>Transfer error:</label>
+                ${sample_dataset.error_msg}
+            </div>
+            <div style="clear: both"></div>
+        %endif
+    </div>
+</div>
diff --git a/templates/admin/review_tool_migration_stages.mako b/templates/admin/review_tool_migration_stages.mako
new file mode 100644
index 0000000..9fe048f
--- /dev/null
+++ b/templates/admin/review_tool_migration_stages.mako
@@ -0,0 +1,123 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+<%
+from markupsafe import escape
+%>
+<div class="toolForm">
+    <div class="toolFormTitle">Tool migrations that can be performed on this Galaxy instance</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <p>
+                The list of tool migration stages below, displayed most recent to oldest, provides information about the repositories in the
+                main Galaxy tool shed that will be cloned at each stage if you run the shell command for that stage.  This enables you to execute
+                the migration process for any stage at any time.
+            </p>
+            <p>
+                Keep in mind that tools included in a repository that you want to be displayed in the Galaxy tool panel when the repository is
+                installed must be defined in the <b>tool_conf.xml</b> (or equivalent) config file prior to execution of the migration process for a
+                stage.  Executing a migration process multiple times will have no affect unless the repositories associated with that stage have been
+                uninstalled prior to the execution of the migration process.
+            </p>
+            <p>
+                When you initiate a migration process, the associated repositories will be cloned from the Galaxy tool shed at
+                <a href="http://toolshed.g2.bx.psu.edu" target="_blank">http://toolshed.g2.bx.psu.edu</a>.  The location in which the tool repositories
+                will be installed is the value of the 'tool_path' attribute in the <tool> tag of the file named ./migrated_tool_conf.xml
+                (i.e., <b><toolbox tool_path="../shed_tools"></b>).  The default location setting is <b>'../shed_tools'</b>, which may be problematic
+                for some cluster environments, so make sure to change it before you execute the installation process if appropriate.  The configured location
+                must be outside of the Galaxy installation directory or it must be in a sub-directory protected by a properly configured <b>.hgignore</b>
+                file if the directory is within the Galaxy installation directory hierarchy.  This is because tool shed repositories will be installed using
+                mercurial's clone feature, which creates .hg directories and associated mercurial repository files.  Not having <b>.hgignore</b> properly
+                configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed repositories if they are
+                in directories that pose conflicts.  See mercurial's .hgignore documentation at
+                <a href="http://mercurial.selenic.com/wiki/.hgignore" target="_blank">http://mercurial.selenic.com/wiki/.hgignore</a> for details.
+            </p>
+        </div>
+        <table class="grid">
+            <% from tool_shed.util.basic_util import to_html_string %>
+            %for stage in migration_stages_dict.keys():
+                <%
+                    migration_command = 'sh ./scripts/migrate_tools/%04d_tools.sh' % stage
+                    install_dependencies = '%s install_dependencies' % migration_command
+                    migration_tup = migration_stages_dict[ stage ]
+                    migration_info, repo_name_dependency_tups = migration_tup
+                    repository_names = []
+                    for repo_name_dependency_tup in repo_name_dependency_tups:
+                        repository_name, tool_dependencies = repo_name_dependency_tup
+                        if repository_name not in repository_names:
+                            repository_names.append( repository_name )
+                    if repository_names:
+                        repository_names.sort()
+                        repository_names = ', '.join( repository_names )
+                %>
+                <tr><td bgcolor="#D8D8D8"><b>Tool migration stage ${stage} - repositories: ${repository_names|h}</b></td></tr>
+                <tr>
+                    <td bgcolor="#FFFFCC">
+                        <div class="form-row">
+                            <p>${to_html_string(migration_info)} <b>Run commands from the Galaxy installation directory!</b></p>
+                            <p>
+                                %if tool_dependencies:
+                                    This migration stage includes tools that have tool dependencies that can be automatically installed.  To install them, run:<br/>
+                                    <b>${install_dependencies|h}</b><br/><br/>
+                                    To skip tool dependency installation run:<br/>
+                                    <b>${migration_command|h}</b>
+                                %else:
+                                    <b>${migration_command|h}</b>
+                                %endif
+                            </p>
+                        </div>
+                    </td>
+                </tr>
+                %for repo_name_dependency_tup in repo_name_dependency_tups:
+                    <% repository_name, tool_dependencies = repo_name_dependency_tup %>
+                    <tr>
+                        <td bgcolor="#DADFEF">
+                            <div class="form-row">
+                                <b>Repository:</b> ${repository_name|h}
+                            </div>
+                        </td>
+                    </tr>
+                    %if tool_dependencies:
+                        <tr>
+                            <td>
+                                <div class="form-row">
+                                    <b>Tool dependencies</b>
+                                </div>
+                            </td>
+                        </tr>
+                        %for tool_dependencies_tup in tool_dependencies:
+                            <%
+                                tool_dependency_name = escape( tool_dependencies_tup[0] )
+                                tool_dependency_version = escape( tool_dependencies_tup[1] )
+                                tool_dependency_type = escape( tool_dependencies_tup[2] )
+                                installation_requirements = escape( tool_dependencies_tup[3] ).replace( '\n', '<br/>' )
+                            %>
+                            <tr>
+                                <td>
+                                    <div class="form-row">
+                                        <b>Name:</b> ${tool_dependency_name} <b>Version:</b> ${tool_dependency_version} <b>Type:</b> ${tool_dependency_type}
+                                    </div>
+                                    <div class="form-row">
+                                        <b>Requirements and installation information:</b><br/>
+                                        ${installation_requirements}
+                                    </div>
+                                </td>
+                            </tr>
+                        %endfor
+                    %else:
+                        <tr>
+                            <td>
+                                <div class="form-row">
+                                    No tool dependencies have been defined for this repository.
+                                </div>
+                            </td>
+                        </tr>
+                    %endif
+                %endfor
+            %endfor
+        </table>
+    </div>
+</div>
diff --git a/templates/admin/tool_errors.mako b/templates/admin/tool_errors.mako
new file mode 100644
index 0000000..7843f00
--- /dev/null
+++ b/templates/admin/tool_errors.mako
@@ -0,0 +1,27 @@
+<%inherit file="/base.mako"/>
+
+<%def name="title()">Tool Errors</%def>
+
+<h2>Tool Errors</h2>
+
+<p>
+Internal Tool Error log
+</p>
+
+<table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+<tr class="header">
+<td>Time</td>
+<td>Phase</td>
+<td>File</td>
+<td>Error</td>
+</tr>
+%for error in tool_errors:
+<tr>
+    <td>${error['time']}</td>
+    <td>${error['phase']}</td>
+    <td>${error['file']}</td>
+    <td>${error['error']}</td>
+</tr>
+%endfor
+</table>
+
diff --git a/templates/admin/tool_shed_repository/browse_category.mako b/templates/admin/tool_shed_repository/browse_category.mako
new file mode 100644
index 0000000..39a6288
--- /dev/null
+++ b/templates/admin/tool_shed_repository/browse_category.mako
@@ -0,0 +1,80 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<script type="text/javascript">
+var repositories = ${repositories};
+var preview_repo_url = '${h.url_for(controller='admin_toolshed', action='preview_repository', tool_shed_url=tool_shed_url)}';
+$(function() {
+    require(["libs/jquery/jquery-ui"], function() {
+        $( "#repository_search" ).autocomplete({
+            source: repositories,
+            select: function(event, ui) {
+                window.location.href = preview_repo_url + '&tsr_id=' + ui.item.value;
+            },
+            focus: function( event, ui ) {
+                event.preventDefault();
+                $(this).val(ui.item.label);
+            }
+        });
+    });
+});
+</script>
+<style type="text/css">
+.ui-autocomplete {
+    width: 30%;
+    border: 1px solid #000;
+    background: #fff;
+}
+.ui-state-focus {
+    background: #bbf;
+}
+</style>
+<div id="standard-search" style="height: 2em; margin: 1em;">
+    <span class="ui-widget" >
+        <input class="search-box-input" id="repository_search" name="search" placeholder="Search repositories by name or id" size="60" type="text" />
+    </span>
+</div>
+
+<div style="clear: both; margin-top: 1em;">
+    <h2>Repositories in ${category['name']}</h2>
+    <table class="grid">
+        <thead id="grid-table-header">
+            <tr>
+                <th style="width: 10%;">Owner</th>
+                <th style="width: 15%;">Name</th>
+                <th>Synopsis</th>
+                <th style="width: 10%;">Type</th>
+                <th style="width: 5%;">Certified</th>
+            </tr>
+        </thead>
+    %for repository in category['repositories']:
+        <tr>
+            <td>${repository['owner']}</td>
+            <td>
+                <a href="${h.url_for( controller='admin_toolshed', action='preview_repository', tool_shed_url=tool_shed_url, tsr_id=repository['id'] )}">${repository['name']}</a>
+            </td>
+            <td>${repository['description']}</td>
+            <td>${repository['type']}</td>
+            %if 'tools_functionally_correct' in repository['metadata'] and repository['metadata']['tools_functionally_correct']:
+                <td>Yes</td>
+            %else:
+                <td>No</td>
+            %endif
+        </tr>
+    %endfor
+    </table>
+<div>
diff --git a/templates/admin/tool_shed_repository/browse_repository.mako b/templates/admin/tool_shed_repository/browse_repository.mako
new file mode 100644
index 0000000..4b225a6
--- /dev/null
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -0,0 +1,36 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "dynatree_skin/ui.dynatree" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery-ui", "libs/jquery/jquery.dynatree" )}
+    ${browse_files(repository.name, repository.repo_files_directory(trans.app))}
+</%def>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Browse ${repository.name|h} revision ${repository.changeset_revision} files</div>
+    <div class="toolFormBody">
+        <div class="form-row" >
+            <label>Contents:</label>
+            <div id="tree" >
+                Loading...
+            </div>
+        </div>
+        <div class="form-row">
+            <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>
+        </div>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/browse_tool_dependency.mako b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
new file mode 100644
index 0000000..6555d7a
--- /dev/null
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -0,0 +1,66 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "dynatree_skin/ui.dynatree" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery-ui", "libs/jquery/jquery.dynatree" )}
+    ${browse_files(tool_dependency.name, tool_dependency.installation_directory( trans.app ))}
+</%def>
+
+<% tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] %>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Browse tool dependency ${tool_dependency.name|h} installation directory</div>
+    <div class="toolFormBody">
+        <div class="form-row" >
+            <label>Tool shed repository:</label>
+            ${repository.name|h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row" >
+            <label>Tool shed repository changeset revision:</label>
+            ${repository.changeset_revision|h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row" >
+            <label>Tool dependency status:</label>
+            ${tool_dependency.status|h}
+            <div style="clear: both"></div>
+        </div>
+        %if tool_dependency.in_error_state:
+            <div class="form-row" >
+                <label>Tool dependency installation error:</label>
+                ${tool_dependency.error_message|h}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        <div class="form-row" >
+            <label>Tool dependency installation directory:</label>
+            ${tool_dependency.installation_directory( trans.app )|h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row" >
+            <label>Contents:</label>
+            <div id="tree" >
+                Loading...
+            </div>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>
+        </div>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/browse_toolsheds.mako b/templates/admin/tool_shed_repository/browse_toolsheds.mako
new file mode 100644
index 0000000..036c1a9
--- /dev/null
+++ b/templates/admin/tool_shed_repository/browse_toolsheds.mako
@@ -0,0 +1,918 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "dynatree_skin/ui.dynatree" )}
+<style type="text/css">
+div.expandLink {
+    float: left;
+    padding-left: 2px;
+    background-color: #d8d8d8;
+    width: 100%;
+}
+div.changeset {
+    padding: 5px 10px 5px 10px;
+}
+div.container {
+    max-width: 100%;
+    width: 100%;
+}
+.container-table {
+    padding-top: 1em;
+}
+ul.jstree-container-ul {
+    margin-top: 1em;
+}
+</style>
+</%def>
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+<script type="text/javascript">
+<%
+import json
+sheds = []
+for name, url in trans.app.tool_shed_registry.tool_sheds.items():
+    sheds.append(dict(name=name, url=url))
+tool_sheds = json.dumps(sheds)
+%>
+/*
+ *
+ * Define some global variables, data, and templates
+ *
+ */
+var has_repo_dependencies = false;
+var valid_tool_dependencies = Array();
+var valid_tools = Array();
+var repository_data = Object();
+var tool_sheds = JSON.parse('${tool_sheds}');
+repository_queue_tab = _.template([
+    '<li class="nav-tab" role="presentation" id="repository_installation_queue">',
+        '<a href="#repository_queue" data-toggle="tab">Repository Installation Queue</a>',
+    '</li>',
+].join(''))
+repository_queue_template = _.template([
+    '<div class="tab-pane" id="repository_queue">',
+        '<table id="queued_repositories" class="grid" border="0" cellpadding="2" cellspacing="2" width="100%">',
+            '<thead id="grid-table-header">',
+                '<tr>',
+                    '<th class="datasetRow"><input class="btn btn-primary" type="submit" id="install_all" name="install_all" value="Install all" /></th>',
+                    '<th class="datasetRow"><input class="btn btn-primary" type="submit" id="clear_queue" name="clear_queue" value="Clear queue" /></th>',
+                    '<th class="datasetRow">ToolShed</th>',
+                    '<th class="datasetRow">Name</th>',
+                    '<th class="datasetRow">Owner</th>',
+                    '<th class="datasetRow">Revision</th>',
+                '</tr>',
+            '</thead>',
+            '<tbody>',
+                '<\% _.each(repositories, function(repository) { \%>',
+                    '<tr id="queued_repository_<\%= repository.id \%>">',
+                        '<td class="datasetRow">',
+                            '<input class="btn btn-primary install_one" data-repokey="<\%= repository.queue_key \%>" type="submit" id="install_repository_<\%= repository.id \%>" name="install_repository" value="Install now" />',
+                        '</td>',
+                        '<td class="datasetRow">',
+                            '<input class="btn btn-primary remove_one" data-repokey="<\%= repository.queue_key \%>" type="submit" id="unqueue_repository_<\%= repository.id \%>" name="unqueue_repository" value="Remove from queue" />',
+                        '</td>',
+                        '<td class="datasetRow"><\%= repository.tool_shed_url \%></td>',
+                        '<td class="datasetRow"><\%= repository.name \%></td>',
+                        '<td class="datasetRow"><\%= repository.owner \%></td>',
+                        '<td class="datasetRow"><\%= repository.changeset \%></td>',
+                    '</tr>',
+                '<\% }); \%>',
+            '</tbody>',
+        '</table>',
+    '</div>',
+].join(''));
+repository_details_template = _.template([
+    '<div class="tab-pane" id="repository_details">',
+        '<h2 style="font-weight: normal;">Repository information for <strong><\%= repository.name \%></strong> from <strong><\%= repository.owner \%></strong></h2>',
+        '<form id="repository_installation" name="install_repository" method="post" action="${h.url_for(controller='/api/tool_shed_repositories', action='install', async=True)}">',
+            '<input type="hidden" id="repositories" name="<\%= current_metadata.repository.id \%>" value="ID" />',
+            '<input type="hidden" id="tool_shed_url" name="tool_shed_url" value="<\%= tool_shed_url \%>" />',
+            '<div class="toolForm">',
+                '<div class="toolFormTitle">Changeset</div>',
+                '<div class="toolFormBody changeset">',
+                    '<select id="changeset" name="changeset">',
+                        '<\% _.each(Object.keys(repository.metadata), function(changeset) { \%>',
+                            '<\% if (changeset == current_changeset) { var selected = "selected "; } else { var selected = ""; } \%>',
+                            '<option <\%= selected \%>data-changeset="<\%= changeset \%>" value="<\%= changeset.split(":")[1] \%>"><\%= changeset \%></option>',
+                        '<\% }); \%>',
+                    '</select>',
+                    '<input class="btn btn-primary" data-tsrid="<\%= current_metadata.repository.id \%>" type="submit" id="install_repository" name="install_repository" value="Install this revision now" />',
+                    '<input class="btn btn-primary" type="button" id="queue_install" name="queue_install" value="Install this revision later" />',
+                    '<div class="toolParamHelp" style="clear: both;">Please select a revision and review the settings below before installing.</div>',
+                '</div>',
+                '<div class="toolParamHelp" style="clear: both;">',
+                'Please select a revision and review the settings below before installing.',
+                '</div>',
+            '</div>',
+            '<\%= shed_tool_conf \%>',
+            '<\%= tool_panel_section \%>',
+            '<div class="toolFormTitle">Contents of this repository at revision <strong id="current_changeset"><\%= current_changeset \%></strong></div>',
+            '<div class="toolFormBody">',
+                '<\% if (current_metadata.has_repository_dependencies) { \%>',
+                    '<p id="install_repository_dependencies_checkbox">',
+                        '<input type="checkbox" checked id="install_repository_dependencies" />',
+                        '<label for="install_repository_dependencies">Install repository dependencies</label>',
+                    '</p>',
+                    '<\% current_metadata.repository_dependency_template = repository_dependency_template; \%>',
+                    '<div class="tables container-table" id="repository_dependencies">',
+                        '<div class="expandLink">',
+                            '<a class="toggle_folder" data_target="repository_dependencies_table">',
+                                'Repository dependencies – <em>installation of these additional repositories is required</em>',
+                            '</a>',
+                        '</div>',
+                        '<\%= repository_dependencies_template(current_metadata) \%>',
+                    '</div>',
+                '<\% } \%>',
+                '<\% if (current_metadata.includes_tool_dependencies) { \%>',
+                    '<p id="install_resolver_dependencies_checkbox">',
+                        '<input type="checkbox" checked id="install_resolver_dependencies" />',
+                        '<label for="install_resolver_dependencies">Install resolver dependencies</label>',
+                    '</p>',
+                    '<p id="install_tool_dependencies_checkbox">',
+                        '<input type="checkbox" checked id="install_tool_dependencies" />',
+                        '<label for="install_tool_dependencies">Install tool dependencies</label>',
+                    '</p>',
+                    '<div class="tables container-table" id="tool_dependencies">',
+                        '<div class="expandLink">',
+                            '<a class="toggle_folder" data_target="tool_dependencies_table">',
+                                'Tool dependencies – <em>repository tools require handling of these dependencies</em>',
+                            '</a>',
+                        '</div>',
+                        '<table class="tables container-table" id="tool_dependencies_table" border="0" cellpadding="2" cellspacing="2" width="100%">',
+                            '<thead>',
+                                '<tr style="display: table-row;" class="datasetRow" parent="0" id="libraryItem-rt-f9cad7b01a472135">',
+                                    '<th style="padding-left: 40px;">Name</th>',
+                                    '<th>Version</th>',
+                                    '<th>Type</th>',
+                                '</tr>',
+                            '</thead>',
+                            '<tbody id="tool_deps">',
+                                '<\% _.each(tool_dependencies[current_changeset], function(dependency) { \%>',
+                                    '<tr class="datasetRow tool_dependency_row" style="display: table-row;">',
+                                        '<td style="padding-left: 40px;">',
+                                        '<\%= dependency.name \%></td>',
+                                        '<td><\%= dependency.version \%></td>',
+                                        '<td><\%= dependency.type \%></td>',
+                                    '</tr>',
+                                '<\% }); \%>',
+                            '</tbody>',
+                        '</table>',
+                    '</div>',
+                '<\% } \%>',
+                '<\% if (current_metadata.includes_tools_for_display_in_tool_panel) { \%>',
+                    '<div class="tables container-table" id="tools_toggle">',
+                        '<div class="expandLink">',
+                            '<a class="toggle_folder" data_target="valid_tools">',
+                                'Valid tools – <em>click the name to preview the tool and use the pop-up menu to inspect all metadata</em>',
+                            '</a>',
+                        '</div>',
+                        '<table class="tables container-table" id="valid_tools" border="0" cellpadding="2" cellspacing="2" width="100%">',
+                            '<thead>',
+                                '<tr style="display: table-row;" class="datasetRow" parent="0" id="libraryItem-rt-f9cad7b01a472135">',
+                                    '<th style="padding-left: 40px;">Name</th>',
+                                    '<th>Description</th>',
+                                    '<th>Version</th>',
+                                    '<th>Tool Panel Section</th>',
+                                '</tr>',
+                            '</thead>',
+                            '<tbody id="tools_in_repo">',
+                                    '<\% _.each(tools[current_changeset], function(tool) { \%>',
+                                        '<tr id="libraryItem" class="tool_row" style="display: table-row;" style="width: 15%">',
+                                            '<td style="padding-left: 40px;">',
+                                                '<div id="tool" class="menubutton split popup" style="float: left;">',
+                                                    '<a class="view-info"><\%= tool.name \%></a>',
+                                                '</div>',
+                                            '</td>',
+                                            '<td><\%= tool.description \%></td>',
+                                            '<td style="width: 15%"><\%= tool.version \%></td>',
+                                            '<td style="width: 35%">',
+                                                '<div class="tool_tps_switcher tps_switcher_<\%= tool.clean \%>" id="per_tool_tps_container_<\%= tool.clean \%>">',
+                                                    '<span id="tps_button_<\%= tool.clean \%>" >',
+                                                        '<input class="btn btn-primary show_tool_tps_selector" id="select_tps_button_<\%= tool.clean \%>" data-toolguid="<\%= tool.guid \%>" data-toolname="<\%= tool.clean \%>" type="button" value="Specify panel section" />',
+                                                    '</span>',
+                                                '</div>',
+                                            '</td>',
+                                        '</tr>',
+                                    '<\% }); \%>',
+                            '</tbody>',
+                        '</table>',
+                    '</div>',
+                '<\% } \%>',
+            '</div>',
+        '</form>',
+    '</div>',
+].join(''));
+shed_tool_conf = _.template([
+    '<div class="toolFormTitle">Shed tool configuration file:</div>',
+    '<div class="toolFormBody">',
+    '<div class="form-row">',
+        '<\%= stc_html \%>',
+        '<div class="toolParamHelp" style="clear: both;">Select the file whose <b>tool_path</b> setting you want used for installing repositories.</div>',
+    '</div>',
+].join(''));
+var tool_dependency_template = _.template([
+    '<\% if (has_repository_dependencies) { \%>',
+        '<\% _.each(repository_dependencies, function(dependency) { \%>',
+            '<\% if (dependency.includes_tool_dependencies) { \%>',
+                '<\% dependency.tool_dependency_template = tool_dependency_template \%>',
+                '<\%= tool_dependency_template(dependency) \%>',
+            '<\% } \%>',
+        '<\% }); \%>',
+    '<\% } \%>',
+].join(''));
+repository_dependencies_template = _.template([
+    '<div class="tables container-table" id="repository_dependencies_table">',
+        '<span class="repository_dependency_row"><p>Repository installation requires the following:</p></span>',
+        '<ul id="repository_deps">',
+            '<\% if (has_repository_dependencies) { \%>',
+                '<\% _.each(repository_dependencies, function(dependency) { \%>',
+                    '<\% dependency.repository_dependency_template = repository_dependency_template; \%>',
+                    '<\%= repository_dependency_template(dependency) \%>',
+                '<\% }); \%>',
+            '<\% } \%>',
+        '</ul>',
+    '</div>'].join(''));
+repository_dependency_template = _.template([
+    '<li id="metadata_<\%= id \%>" class="datasetRow repository_dependency_row" style="display: table-row;">',
+        'Repository <b><\%= repository.name \%></b> revision <b><\%= changeset_revision \%></b> owned by <b><\%= repository.owner \%></b>',
+    '</li>',
+    '<\% if (has_repository_dependencies) { \%>',
+        '<\% _.each(repository_dependencies, function(dependency) { \%>',
+            '<\% dependency.repository_dependency_template = repository_dependency_template; \%>',
+            '<li id="repository_<\%= id \%>_deps">',
+                '<\%= repository_dependency_template(dependency) \%>',
+            '</li>',
+        '<\% }); \%>',
+    '<\% } \%>'
+].join(''));
+categories_in_shed = _.template([
+    '<div class="tab-pane" id="list_categories">',
+        '<div style="clear: both; margin-top: 1em;">',
+            '<h2>Repositories by Category</h2>',
+            '<table class="grid">',
+                '<thead id="grid-table-header">',
+                    '<tr>',
+                        '<th>Name</th>',
+                        '<th>Description</th>',
+                        '<th>Repositories</th>',
+                    '</tr>',
+                '</thead>',
+                '<\% _.each(categories, function(category) { \%>',
+                    '<tr>',
+                        '<td>',
+                            '<button class="category-selector" data-categoryid="<\%= category.id \%>"><\%= category.name \%></button>',
+                        '</td>',
+                        '<td><\%= category.description \%></td>',
+                        '<td><\%= category.repositories \%></td>',
+                    '</tr>',
+                '<\% }); \%>',
+            '</table>',
+        '</div>',
+    '</div>',
+].join(''));
+repositories_in_category = _.template([
+    '<div class="tab-pane" id="list_repositories">',
+        '<div id="standard-search" style="height: 2em; margin: 1em;">',
+            '<span class="ui-widget" >',
+                '<input class="search-box-input" id="repository_search" name="search" placeholder="Search repositories by name or id" size="60" type="text" />',
+            '</span>',
+        '</div>',
+        '<div style="clear: both; margin-top: 1em;">',
+            '<h2>Repositories in <\%= name \%></h2>',
+            '<table class="grid">',
+                '<thead id="grid-table-header">',
+                    '<tr>',
+                        '<th style="width: 10%;">Owner</th>',
+                        '<th style="width: 15%;">Name</th>',
+                        '<th>Synopsis</th>',
+                        '<th style="width: 10%;">Type</th>',
+                        '<th style="width: 5%;">Certified</th>',
+                    '</tr>',
+                '</thead>',
+                '<\% _.each(repositories, function(repository) { \%>',
+                    '<tr>',
+                        '<td><\%= repository.owner \%></td>',
+                        '<td>',
+                            '<button class="repository-selector" data-tsrid="<\%= repository.id \%>"><\%= repository.name \%></button>',
+                        '</td>',
+                        '<td><\%= repository.description \%></td>',
+                        '<td><\%= repository.type \%></td>',
+                        '<td><\%= repository.metadata.tools_functionally_correct \%></td>',
+                    '</tr>',
+                '<\% }); \%>',
+            '</table>',
+        '</div>',
+    '</div>',
+].join(''));
+tool_sheds_template = _.template([
+    '<div class="tab-pane" id="list_toolsheds">',
+        '<div class="toolFormTitle">Accessible Galaxy tool sheds</div>',
+        '<div class="toolFormBody">',
+            '<div class="form-row">',
+                '<table class="grid">',
+                    '<\% _.each(tool_sheds, function(shed) { \%>',
+                        '<tr class="libraryTitle">',
+                            '<td>',
+                                '<div style="float: left; margin-left: 1px;" class="menubutton split">',
+                                    '<a class="view-info shed-selector" data-shedurl="<\%= shed.url \%>" href="#"><\%= shed.name \%></a>',
+                                '</div>',
+                            '</td>',
+                        '</tr>',
+                    '<\% }); \%>',
+                '</table>',
+            '</div>',
+            '<div style="clear: both"></div>',
+        '</div>',
+    '</div>',
+].join(''));
+tps_selection_template = _.template([
+    '<div class="tab-pane" id="select_tps">',
+        '<select name="<\%= name \%>" id="<\%= id \%>',
+            '<\% _.each(sections, function(section) { \%>',
+                '<option value="<\%= section.id \%>"><\%= section.name \%>',
+            '<\% }); \%>',
+        '</select>',
+        '<input class="btn btn-primary" type="button" id="create_new" value="Create new" />',
+        '<div class="toolParamHelp" style="clear: both;">',
+            'Select an existing tool panel section to contain the installed tools (optional).',
+        '</div>',
+    '</div>'].join(''));
+tps_creation_template = _.template([
+    '<div class="form-row" id="new_tps">',
+        '<input id="new_tool_panel_section" name="new_tool_panel_section" type="textfield" value="" size="40"/>',
+        '<input class="btn btn-primary" type="button" id="select_existing" value="Select existing" />',
+        '<div class="toolParamHelp" style="clear: both;">',
+            'Add a new tool panel section to contain the installed tools (optional).',
+        '</div>',
+    '</div>'].join(''));
+var tps_picker_template = _.template([
+    '<span id="tps_button_<\%= tool.clean \%>" >',
+        '<input class="btn btn-primary show_tool_tps_selector" id="select_tps_button_<\%= tool.clean \%>" data-toolguid="<\%= tool.guid \%>" type="button" value="Specify panel section" />',
+    '</span>',
+].join(''));
+var select_tps_template = _.template([
+    '<div id="select_tps_<\%= tool.clean \%>" class="tps_creator">',
+        '<select style="width: 30em;" data-toolguid="<\%= tool.guid \%>" class="tool_panel_section_picker" name="tool_panel_section_id" id="tool_panel_section_select_<\%= tool.clean \%>">',
+        '</select>',
+        '<input id="per_tool_create_<\%= tool.clean \%>" class="btn btn-primary" data-toolguid="<\%= tool.guid \%>" value="Create new" id="create_new_<\%= tool.clean \%>" type="button">',
+        '<input id="cancel_<\%= tool.clean \%>" class="btn btn-primary" data-toolguid="<\%= tool.guid \%>" value="Cancel" type="button">',
+        '<div style="clear: both;" class="toolParamHelp"></div>',
+    '</div>',
+].join(''));
+var create_tps_template = _.template([
+    '<div id="new_tps_<\%= tool.clean \%>" class="form-row">',
+        '<input data-toolguid="<\%= tool.guid \%>" class="tool_panel_section_picker" size="40" name="new_tool_panel_section" id="new_tool_panel_section_<\%= tool.clean \%>" type="text">',
+        '<input id="per_tool_select_<\%= tool.clean \%>" class="btn btn-primary" data-toolguid="<\%= tool.guid \%>" value="Select existing" id="select_existing_<\%= tool.clean \%>" type="button">',
+        '<input id="cancel_<\%= tool.clean \%>" class="btn btn-primary" data-toolguid="<\%= tool.guid \%>" value="Cancel" type="button">',
+    '</div>',
+].join(''));
+
+function array_contains_dict(array, dict) {
+    for (var i in array) {
+        needle = array[i];
+        var found = true;
+        for (var key in dict) {
+            if (needle[key] !== dict[key]) {
+                found = false;
+            }
+        }
+        if (found) { return true; }
+    }
+    return false;
+
+}
+function bind_category_events() {
+    $('.repository-selector').click(function() {
+        $('#repository_details').replaceWith('<div class="tab-pane" id="repository_details"><p><img src="/static/images/jstree/throbber.gif" alt="Loading repository..." /></p></div>');
+        tsr_id = $(this).attr('data-tsrid');
+        shed_url = $('#tab_contents').attr('data-shedurl');
+        api_url = '${h.url_for(controller='/api/tool_shed_repositories', action="shed_repository")}'
+        params = {"tool_shed_url": shed_url, "tsr_id": tsr_id}
+        $.get(api_url, params, function(data) {
+            var changesets = Object.keys(data.repository.metadata);
+            data.tool_shed_url = shed_url;
+            data.current_changeset = changesets[changesets.length - 1];
+            data.current_metadata = data.repository.metadata[data.current_changeset];
+            data.repository_dependencies_template = repository_dependencies_template;
+            data.repository_dependency_template = repository_dependency_template;
+            data.tps_selection_template = tps_selection_template;
+            repository_data = data;
+            repository_data.shed_tool_conf = shed_tool_conf({'stc_html': repository_data.shed_conf});
+            repository_data.tool_panel_section = '';
+            if (repository_data.repository.metadata[data.current_changeset].includes_tools_for_display_in_tool_panel) {
+                repository_data.tool_panel_section = tps_selection_template(repository_data.panel_section_dict)
+            }
+            $('#repository_details').replaceWith(repository_details_template(data));
+            $('#repo_info_tab').click();
+            require(["libs/jquery/jstree"], function() {
+                $('#repository_deps').jstree();
+            });
+            $('#repository_installation').ready(function() {
+                bind_repository_events();
+            });
+        });
+        $('#repository_contents').click();
+    });
+}
+function bind_repository_events() {
+    var current_changeset = get_current_changeset();
+    var current_metadata = repository_data.repository.metadata[current_changeset];
+    $('.show_tool_tps_selector').click(function() {
+        var changeset = get_current_changeset();
+        var tool_guid = $(this).attr('data-toolguid');
+        show_panel_selector(tool_guid, changeset);
+    });
+    $('#changeset').change(changeset_metadata);
+    $('.toggle_folder').click(function() {
+        toggle_folder($(this));
+    });
+    $('#queue_install').click(function() {
+        var changeset = get_current_changeset();
+        var repository_metadata = repository_data.current_metadata;
+        repository_metadata.install_tool_dependencies = $("#install_tool_dependencies").val();
+        repository_metadata.install_repository_dependencies = $("#install_repository_dependencies").val();
+        repository_metadata.install_resolver_dependencies = $("#install_resolver_dependencies").val();
+        repository_metadata.tool_panel_section = JSON.stringify(select_tps({}));
+        repository_metadata.shed_tool_conf = $("select[name='shed_tool_conf']").find('option:selected').val()
+        var queue_key = get_queue_key(repository_metadata, changeset);
+        var queued_repos = new Object();
+        if (localStorage.repositories) {
+            queued_repos = get_repository_queue();
+        }
+        if (!queued_repos.hasOwnProperty(queue_key)) {
+            queued_repos[queue_key] = repository_metadata;
+        }
+        save_repository_queue(queued_repos);
+        check_if_installed(repository_metadata.repository.name, repository_metadata.repository.owner, current_changeset.split(':')[1]);
+        check_queue();
+    });
+    $('#create_new').click(show_global_tps_create);
+    $('#install_repository').click(function() {
+        var form = $('#repository_installation');
+        var params = {};
+        params.repositories = JSON.stringify([[$('#install_repository').attr('data-tsrid'), $('#changeset').find("option:selected").val()]]);
+        params.tool_shed_repository_ids = JSON.stringify([$('#install_repository').attr('data-tsrid')]);
+        params.tool_shed_url = $('#tab_contents').attr('data-shedurl');
+        params.install_tool_dependencies = $("#install_tool_dependencies").val();
+        params.install_repository_dependencies = $("#install_repository_dependencies").val();
+        params.install_resolver_dependencies = $("#install_resolver_dependencies").val();
+        params.tool_panel_section = JSON.stringify(select_tps(params));
+        params.shed_tool_conf = $("select[name='shed_tool_conf']").find('option:selected').val()
+        params.changeset = $('#changeset').find("option:selected").val();
+        url = $('#repository_installation').attr('action');
+        prepare_installation(params, url);
+    });
+    check_if_installed(current_metadata.repository.name, current_metadata.repository.owner, current_changeset.split(':')[1]);
+}
+function bind_shed_events() {
+    $('.category-selector').click(function() {
+        var tabsw = $('#category_list_tab');
+        $('#list_repositories').replaceWith('<div id="list_repositories" class="tab-pane"><img src="/static/images/jstree/throbber.gif" alt="Loading repositories..." /></div>');
+        var category_id = $(this).attr('data-categoryid');
+        var shed_url = $('#tab_contents').attr('data-shedurl');
+        var api_url = '${h.url_for(controller='/api/tool_shed_repositories', action="shed_category")}'
+        var params = {'tool_shed_url': shed_url, 'category_id': category_id};
+        $.get(api_url, params, function(data) {
+            $('#list_repositories').replaceWith(repositories_in_category(data));
+            $('#repo_list_tab').click();
+            bind_category_events();
+        });
+    });
+}
+function changeset_metadata() {
+    repository_data.current_changeset = get_current_changeset();
+    repository_data.current_metadata = repository_data.repository.metadata[changeset];
+    repository_information = repository_data.repository;
+    $('#repository_details').replaceWith(repository_details_template(repository_data));
+    check_if_installed(repository_information.name, repository_information.owner, changeset.split(':')[1]);
+    bind_repository_events();
+}
+function check_if_installed(name, owner, changeset) {
+    params = {name: name, owner: owner}
+    var already_installed = false;
+    var queued = repository_in_queue();
+    $.get('${h.url_for(controller='/api/tool_shed_repositories')}', params, function(data) {
+        for (var index = 0; index < data.length; index++) {
+            var repository = data[index];
+            var installed = !repository.deleted && !repository.uninstalled;
+            var changeset_match = repository.changeset_revision == changeset ||
+                                  repository.installed_changeset_revision == changeset;
+            if (repository.name == name && repository.owner == owner && installed && changeset_match) {
+                already_installed = true;
+            }
+            if (already_installed && !repository_in_queue()) {
+                $('#install_repository').prop('disabled', true);
+                $('#install_repository').val('This revision is already installed');
+                $('#queue_install').prop('disabled', true);
+                $('#queue_install').val('This revision is already installed');
+            }
+            else {
+                $('#install_repository').prop('disabled', false);
+                $('#install_repository').val('Install this revision');
+                $('#queue_install').prop('disabled', false);
+                $('#queue_install').val('Install this revision later');
+            }
+        }
+    });
+    if (repository_in_queue() && !already_installed) {
+        $('#queue_install').prop('disabled', true);
+        $('#queue_install').val('This revision has been queued');
+    }
+    else {
+        $('#queue_install').prop('disabled', false);
+        $('#queue_install').val('Install this revision later');
+    }
+}
+function check_queue() {
+    if (localStorage.hasOwnProperty('repositories')) {
+        repository_queue = JSON.parse(localStorage.repositories);
+        queue_keys = Object.keys(repository_queue);
+        queue = Array();
+        for (var i = 0; i < queue_keys.length; i++) {
+            queue_key = queue_keys[i];
+            repository_metadata = repository_queue[queue_key];
+            repository = repository_metadata.repository;
+            key_parts = queue_key.split('|');
+            tool_shed_url = key_parts[0];
+            repository.queue_key = queue_key;
+            repository.changeset = repository_metadata.changeset_revision;
+            repository.tool_shed_url = tool_shed_url;
+            queue.push(repository);
+        }
+        $('#repository_queue').replaceWith(repository_queue_template({'repositories': queue}));
+        $('.install_one').click(function() {
+            var repository_metadata = get_repository_from_queue($(this).attr('data-repokey'));
+            install_from_queue(repository_metadata, $(this).attr('data-repokey'));
+        });
+        $('.remove_one').click(function(){
+            queue_key = $(this).attr('data-repokey');
+            repository_metadata = get_repository_from_queue(queue_key);
+            repository_id = repository_metadata.repository.id;
+            selector = "#queued_repository_" + repository_id;
+            $(selector).remove();
+            remove_from_queue(undefined, undefined, queue_key);
+        });
+        $('#clear_queue').click(function() {
+            localStorage.removeItem('repositories');
+        });
+        $('#install_all').click(process_queue);
+    }
+}
+function find_tool_by_guid(tool_guid, changeset) {
+    var tools = repository_data.tools[changeset];
+    for (var index = 0; index < tools.length; index++) {
+        var tool = tools[index];
+        if (tool.guid === tool_guid) {
+            return tool;
+        }
+    }
+}
+function get_current_changeset() {
+    return $('#changeset').find("option:selected").text();
+}
+function get_queue_key(repository_metadata, changeset, shed_url = undefined) {
+    if (shed_url === undefined) {
+        shed_url = $("#tab_contents").attr('data-shedurl');
+    }
+    return shed_url + '|' + repository_metadata.id + '|' + changeset;
+}
+function get_queued_repositories(current_ids = null, processed_ids = null, metadata = null) {
+    if (processed_ids === null) {
+        processed_ids = Array();
+    }
+    if (current_ids === null) {
+        current_ids = Array();
+    }
+    if (metadata === null) {
+        var changeset = get_current_changeset();
+        $("#current_changeset").text(changeset);
+        var metadata = repository_information.metadata[changeset];
+    }
+    if (processed_ids.indexOf(metadata.repository.id) === -1) {
+        var repo_tuple = Array(metadata.repository.id, metadata.changeset_revision)
+        processed_ids.push(metadata.repository.id)
+        current_ids.push(repo_tuple)
+    }
+    if (metadata.has_repository_dependencies) {
+        for (var item in metadata.repository_dependencies) {
+            var dependency = metadata.repository_dependencies[item];
+            var repository = dependency.repository;
+            if (processed_ids.indexOf(repository.id) === -1) {
+                repo_tuple = Array(repository.id, dependency.changeset_revision)
+                current_ids.push(repo_tuple);
+                processed_ids.push(repository.id)
+            }
+            if (dependency.has_repository_dependencies) {
+                current_ids = get_queued_repositories(current_ids, processed_ids, dependency);
+            }
+        }
+    }
+    return current_ids;
+}
+function get_repository_queue() {
+    return JSON.parse(localStorage.repositories);
+}
+function get_repository_from_queue(queue_key) {
+    repository_queue = get_repository_queue();
+    if (repository_queue.hasOwnProperty(queue_key)) {
+        return repository_queue[queue_key];
+    }
+    return undefined;
+}
+function install_repository(data) {
+    var params = data;
+    $.post("${h.url_for( controller='admin_toolshed', action='manage_repositories' )}", params, function(data) {
+        console.log( "Initializing repository installation succeeded" );
+        window.location.assign('${h.url_for(controller='admin_toolshed', action='monitor_repository_installation')}');
+    })
+}
+function install_from_queue(repository_metadata, queue_key) {
+    var params = Object();
+    params.install_tool_dependencies = repository_metadata.install_tool_dependencies;
+    params.install_repository_dependencies = repository_metadata.install_repository_dependencies;
+    params.install_resolver_dependencies = repository_metadata.install_resolver_dependencies;
+    params.tool_panel_section = repository_metadata.tool_panel_section;
+    params.shed_tool_conf = repository_metadata.shed_tool_conf;
+    params.repositories = JSON.stringify([[repository_metadata.repository.id, repository_metadata.changeset_revision]]);
+    params.tool_shed_repository_ids = JSON.stringify([repository_metadata.repository.id]);
+    params.tool_shed_url = queue_key.split('|')[0];
+    params.changeset = repository_metadata.changeset_revision;
+    var url = '${h.url_for(controller='/api/tool_shed_repositories', action='install', async=True)}';
+    $('#queued_repository_' + repository_metadata.repository.id).remove();
+    remove_from_queue(undefined, undefined, queue_key);
+    prepare_installation(params, url);
+}
+function prepare_installation(params, api_url) {
+    $.post(api_url, params, function(data) {
+        iri_parameters = JSON.parse(data);
+        install_repository(iri_parameters);
+    });
+}
+function process_dependencies(metadata, selector) {
+    has_repo_dependencies = false;
+    if (metadata.has_repository_dependencies) {
+        has_repo_dependencies = true;
+        for (var item in metadata.repository_dependencies) {
+            var dependency = metadata.repository_dependencies[item];
+            if (dependency.has_repository_dependencies) {
+                has_repo_dependencies = true;
+            }
+            var repository = dependency.repository;
+            if (repository !== null) {
+                template_values = {dependency_id: dependency.id,
+                                   name: repository.name,
+                                   revision: dependency.changeset_revision,
+                                   owner: repository.owner,
+                                   prior: ''};
+                if (dependency.prior_installation_required) {
+                    template_values.prior = ' (<b>Prior installation required</b>)';
+                }
+                var dependency_html = repository_dependency_template(template_values);
+                if (selector === undefined) {
+                    $("#repository_deps").append(dependency_html);
+                }
+                else {
+                    $(selector).append('<ul>' + dependency_html + '</ul>');
+                }
+                if (dependency.has_repository_dependencies) {
+                    process_dependencies(dependency, '#metadata_' + dependency.id);
+                }
+            }
+        }
+    }
+    if (metadata.includes_tool_dependencies) {
+        for (var item in metadata.tool_dependencies) {
+            var dependency = metadata.tool_dependencies[item];
+            if (item === 'set_environment') {
+                for (var i = 0; i < dependency.length; i++) {
+                    var tool_dependency = {name: dependency[i].name, version: 'N/A', type: dependency[i].type}
+                }
+            }
+            else {
+                var tool_dependency = {name: dependency.name, version: dependency.version, type: dependency.type};
+            }
+            if (!array_contains_dict(valid_tool_dependencies, tool_dependency)) {
+                valid_tool_dependencies.push(tool_dependency);
+            }
+        }
+    }
+    if (metadata.includes_tools_for_display_in_tool_panel) {
+        $('#tools_toggle').show();
+        for (var i = 0; i < metadata.tools.length; i++) {
+            var tool = metadata.tools[i];
+            valid_tool = {clean_name: tool.clean, name: tool.name, version: tool.version, description: tool.description, guid: tool.guid};
+            if (!array_contains_dict(valid_tools, valid_tool) && tool.add_to_tool_panel) {
+                valid_tools.push(valid_tool);
+            }
+        }
+    }
+    else {
+        $('#tools_toggle').hide();
+    }
+}
+function process_queue() {
+    if (!localStorage.repositories) {
+        return;
+    }
+    var toolsheds = Array();
+    var queue = Object();
+    var queued_repositories = get_repository_queue();
+    var queue_keys = Object.keys(queued_repositories);
+    for (var i = 0; i < queue_keys.length; i++) {
+        queue_key = queue_keys[i];
+        toolshed = queue_key.split('|')[0];
+        if (toolsheds.indexOf(toolshed) === -1) {
+            toolsheds.push(toolshed);
+            queue[toolshed] = Array();
+        }
+        repository_metadata = queued_repositories[queue_key]
+        repository_metadata.queue_key = queue_key
+        queue[toolshed].push(repository_metadata);
+    }
+    for (i = 0; i < toolsheds.length; i++) {
+        for (var j = 0; j < queue[toolsheds[i]].length; j++) {
+            repository = queue[toolsheds[i]][j];
+            install_from_queue(repository, repository.queue_key);
+        }
+    }
+}
+function remove_from_queue(repository_metadata, changeset, queue_key=undefined) {
+    if (!localStorage.repositories) {
+        return;
+    }
+    if (queue_key === undefined) {
+        queue_key = get_queue_key(repository_metadata, changeset);
+    }
+    repository_queue = get_repository_queue();
+    if (repository_queue.hasOwnProperty(queue_key)) {
+        delete repository_queue[queue_key];
+        save_repository_queue(repository_queue);
+    }
+}
+function repository_in_queue() {
+    if (!localStorage.repositories) {
+        return;
+    }
+    repository_queue = get_repository_queue();
+    changeset = get_current_changeset();
+    var repository_metadata = repository_data.current_metadata;
+    var queue_key = get_queue_key(repository_metadata, changeset);
+    if (localStorage.repositories) {
+        queued_repos = get_repository_queue();
+    }
+    if (queued_repos.hasOwnProperty(queue_key)) {
+        return true;
+    }
+    return false;
+}
+function save_repository_queue(repository_queue) {
+    localStorage.repositories = JSON.stringify(repository_queue);
+}
+function select_tps(params) {
+    var tool_panel_section = {};
+    if ($('#tool_panel_section_select').length) {
+        params.tool_panel_section_id = $('#tool_panel_section_select').find("option:selected").val();
+    }
+    else {
+        params.new_tool_panel_section = $("#new_tool_panel_section").val();
+    }
+    $('.tool_panel_section_picker').each(function() {
+        element_name = $(this).attr('name');
+        tool_guid = $(this).attr('data-toolguid');
+        if (element_name === 'tool_panel_section_id') {
+            tool_panel_section[tool_guid] = { tool_panel_section: $(this).find("option:selected").val(), action: 'append' }
+        }
+        else {
+            tool_panel_section[tool_guid] = { tool_panel_section: $(this).val(), action: 'create' }
+        }
+    });
+    return tool_panel_section;
+}
+function show_global_tps_select() {
+    $('#tool_panel_section').empty();
+    $('#tool_panel_section').append(tps_selection_template(repository_data.panel_section_dict));
+    $('#create_new').click(show_global_tps_create);
+}
+function show_global_tps_create() {
+    $('#tool_panel_section').empty();
+    $('#tool_panel_section').append(tps_creation_template(repository_data.panel_section_dict));
+    $('#select_existing').click(show_global_tps_select);
+}
+function show_panel_button(tool_guid, changeset) {
+    var tool = find_tool_by_guid(tool_guid, changeset);
+    var selector = '#per_tool_tps_container_' + tool.clean;
+    $(selector).empty();
+    $(selector).append(tps_picker_template({tool: tool}));
+    $('#select_tps_button_' + tool.clean).click(function() {
+        var changeset = get_current_changeset();
+        var tool_guid = $(this).attr('data-toolguid');
+        show_panel_selector(tool_guid, changeset);
+    });
+}
+function show_panel_selector(tool_guid, changeset) {
+    var tool = find_tool_by_guid(tool_guid, changeset);
+    var selector = '#per_tool_tps_container_' + tool.clean;
+    $(selector).empty();
+    $(selector).append(select_tps_template({tool: tool}));
+    $('#per_tool_create_' + tool.clean).click(function() {
+        show_tool_create(tool_guid, changeset);
+    });
+    $('#cancel_' + tool.clean).click(function() {
+        show_panel_button(tool_guid, changeset);
+    });
+}
+function show_tool_create(tool_guid, changeset) {
+    var tool = find_tool_by_guid(tool_guid, changeset);
+    var selector = '#per_tool_tps_container_' + tool.clean;
+    $(selector).empty();
+    $(selector).append(create_tps_template({tool: tool}));
+    $('#per_tool_select_' + tool.clean).click(function() {
+        var changeset = get_current_changeset();
+        var tool_guid = $(this).attr('data-toolguid');
+        show_tool_select(tool_guid, changeset);
+    });
+    $('#cancel_' + tool.clean).click(function() {
+        var changeset = get_current_changeset();
+        var tool_guid = $(this).attr('data-toolguid');
+        show_panel_button(tool_guid, changeset);
+    });
+}
+function show_tool_select(tool_guid, changeset) {
+    var tool = find_tool_by_guid(tool_guid, changeset);
+    var selector = '#per_tool_tps_container_' + tool.clean;
+    $(selector).empty();
+    $(selector).append(select_tps_template({tool: tool}));
+    $('#per_tool_create_' + tool.clean).click(function() {
+        var changeset = get_current_changeset();
+        var tool_guid = $(this).attr('data-toolguid');
+        show_tool_create(tool_guid, changeset);
+    });
+    $('#cancel_' + tool.clean).click(function() {
+        var changeset = get_current_changeset();
+        var tool_guid = $(this).attr('data-toolguid');
+        show_panel_button(tool_guid, changeset);
+    });
+}
+function toggle_folder(folder) {
+    target_selector = '#' + folder.attr('data_target');
+    $(target_selector).toggle();
+}
+function tool_panel_section() {
+    var tps_selection = $('#tool_panel_section_select').find("option:selected").text();
+    if (tps_selection === 'Create New') {
+        $("#new_tool_panel_section").prop('disabled', false);
+        $("#new_tps").show();
+    }
+    else {
+        $("#new_tool_panel_section").prop('disabled', true);
+        $("#new_tps").hide();
+    }
+}
+$(document).ready(function() {
+    $('#list_toolsheds').replaceWith(tool_sheds_template({tool_sheds: tool_sheds}));
+    $('#shed_list_tab').click();
+    check_queue();
+    $('.shed-selector').click(function() {
+        $('#list_categories').replaceWith('<div id="list_categories" class="nav-tab"><img src="/static/images/jstree/throbber.gif" alt="Loading categories..." /></div>');
+        shed_url = $(this).attr('data-shedurl');
+        $('#tab_contents').attr('data-shedurl', shed_url);
+        api_url = '${h.url_for(controller='/api/tool_shed_repositories', action="shed_categories")}'
+        $.get(api_url, { tool_shed_url: shed_url }, function(data) {
+            $('#list_categories').replaceWith(categories_in_shed(data));
+            $('#category_list_tab').click();
+            bind_shed_events();
+        });
+    });
+});
+</script>
+<div class="container" role="navigation">
+    <ul class="nav nav-tabs" id="browse_toolsheds">
+        <li class="nav-tab tab_toolsheds" role="presentation" id="toolshed_list">
+            <a id="shed_list_tab" href="#list_toolsheds" data-toggle="tab">Toolsheds</a>
+        </li>
+        <li class="nav-tab tab_categories" role="presentation" id="category_list">
+            <a id="category_list_tab" href="#list_categories" data-toggle="tab">Categories</a>
+        </li>
+        <li class="nav-tab tab_repositories" role="presentation" id="repository_list">
+            <a id="repo_list_tab" href="#list_repositories" data-toggle="tab">Repositories</a>
+        </li>
+        <li class="nav-tab tab_repository_details" role="presentation" id="repository_contents">
+            <a id="repo_info_tab" href="#repository_details" data-toggle="tab">Repository</a>
+        </li>
+        <li class="nav-tab" role="presentation" id="repository_installation_queue">
+            <a href="#repository_queue" data-toggle="tab">Repository Installation Queue</a>
+        </li>
+    </ul>
+    <div id="tab_contents" class="tab-content clearfix">
+        <div class="tab-pane active" id="list_toolsheds">Loading...</div>
+        <div class="tab-pane" id="list_categories">Select a toolshed in the previous tab to see a list of its categories.</div>
+        <div class="tab-pane" id="list_repositories">Select a category in the previous tab to see a list of its repositories.</div>
+        <div class="tab-pane" id="repository_details">Select a repository in the previous tab.</div>
+        <div class="tab-pane" id="repository_queue">There are no repositories queued for installation.</div>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/common.mako b/templates/admin/tool_shed_repository/common.mako
new file mode 100644
index 0000000..a3f18db
--- /dev/null
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -0,0 +1,394 @@
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+
+<%def name="browse_files(title_text, directory_path)">
+    <script type="text/javascript">
+        $(function(){
+            // --- Initialize sample trees
+            $("#tree").dynatree({
+                title: "${title_text|h}",
+                minExpandLevel: 1,
+                persist: false,
+                checkbox: true,
+                selectMode: 3,
+                onPostInit: function(isReloading, isError) {
+                    // Re-fire onActivate, so the text is updated
+                    this.reactivate();
+                },
+                fx: { height: "toggle", duration: 200 },
+                // initAjax is hard to fake, so we pass the children as object array:
+                initAjax: {url: "${h.url_for( controller='admin_toolshed', action='open_folder' )}",
+                           dataType: "json",
+                           data: { folder_path: "${directory_path|h}",
+                                   repository_id: "${trans.security.encode_id( repository.id )}" },
+                },
+                onLazyRead: function(dtnode){
+                    dtnode.appendAjax({
+                        url: "${h.url_for( controller='admin_toolshed', action='open_folder' )}",
+                        dataType: "json",
+                        data: { folder_path: dtnode.data.key,
+                                repository_id: "${trans.security.encode_id( repository.id )}" },
+                    });
+                },
+                onSelect: function(select, dtnode) {
+                    // Display list of selected nodes
+                    var selNodes = dtnode.tree.getSelectedNodes();
+                    // convert to title/key array
+                    var selKeys = $.map(selNodes, function(node) {
+                        return node.data.key;
+                    });
+                },
+                onActivate: function(dtnode) {
+                    var cell = $("#file_contents");
+                    var selected_value;
+                     if (dtnode.data.key == 'root') {
+                        selected_value = "${directory_path|h}/";
+                    } else {
+                        selected_value = dtnode.data.key;
+                    };
+                    if (selected_value.charAt(selected_value.length-1) != '/') {
+                        // Make ajax call
+                        $.ajax( {
+                            type: "POST",
+                            url: "${h.url_for( controller='admin_toolshed', action='get_file_contents' )}",
+                            dataType: "json",
+                            data: { file_path: selected_value, repository_id: "${trans.security.encode_id( repository.id )}" },
+                            success : function( data ) {
+                                cell.html( '<label>'+data+'</label>' )
+                            }
+                        });
+                    } else {
+                        cell.html( '' );
+                    };
+                },
+            });
+        });
+    </script>
+</%def>
+
+<%def name="render_dependencies_section( install_resolver_dependencies_check_box, repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False, requirements_status=None )">
+    <style type="text/css">
+        #dependency_table{ table-layout:fixed;
+                           width:100%;
+                           overflow-wrap:normal;
+                           overflow:hidden;
+                           border:0px;
+                           word-break:keep-all;
+                           word-wrap:break-word;
+                           line-break:strict; }
+    </style>
+    <script type="text/javascript">
+         $(function(){
+             $(".detail-section").hide();
+             var hidden = true;
+             $(".toggle-detail-section").click(function(e){
+                 e.preventDefault();
+                 hidden = !hidden;
+                 if (hidden === true){
+                     $(".toggle-detail-section").text('Display Details');
+                 } else{
+                     $(".toggle-detail-section").text('Hide Details');
+                 }
+                 $(".detail-section").toggle();
+             })
+         });
+     </script>
+    <%
+        from markupsafe import escape
+        class RowCounter( object ):
+            def __init__( self ):
+                self.count = 0
+            def increment( self ):
+                self.count += 1
+            def __str__( self ):
+                return str( self.count )
+
+        repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+        missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
+        tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
+        missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
+        env_settings_heaader_row_displayed = False
+        package_header_row_displayed = False
+        if revision_label:
+            revision_label_str = ' revision <b>%s</b> of ' % escape( str( revision_label ) )
+        else:
+            revision_label_str = ' '
+    %>
+    <div class="form-row">
+        <p>
+            By default Galaxy will install all needed dependencies for${revision_label_str}the repository. See the
+            <a target="_blank" href="https://docs.galaxyproject.org/en/master/admin/dependency_resolvers.html">
+                dependency resolver documentation
+            </a>.
+        </p>
+        <p>
+            You can control how dependencies are installed (this is an advanced option, if in doubt, use the default)
+            <button class="toggle-detail-section">
+                Display Details
+            </button>
+        </p>
+        <p>
+        </p>
+     </div>
+   %if export:
+    <div class="form-row">
+        <div class="toolParamHelp" style="clear: both;">
+            <p>
+                The following additional repositories are required by${revision_label_str}the <b>${repository.name|h}</b> repository and they can be exported as well.
+            </p>
+        </div>
+    </div>
+    %endif
+    <div style="clear: both"></div>
+    <div class="detail-section">
+    %if repository_dependencies_root_folder or missing_repository_dependencies_root_folder:
+        %if repository_dependencies_check_box:
+            <div class="form-row">
+                %if export:
+                    <label>Export repository dependencies?</label>
+                %else:
+                    <label>Handle repository dependencies?</label>
+                %endif
+                ${repository_dependencies_check_box.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    %if export:
+                        Select to export the following additional repositories that are required by this repository.
+                    %else:
+                        Select to automatically install these additional Tool Shed repositories required by this repository.
+                    %endif
+                </div>
+            </div>
+            <div style="clear: both"></div>
+        %endif
+        %if repository_dependencies_root_folder:
+            <div class="form-row">
+                <p/>
+                <% row_counter = RowCounter() %>
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table">
+                    ${render_folder( repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+                </table>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        %if missing_repository_dependencies_root_folder:
+            <div class="form-row">
+                <p/>
+                <% row_counter = RowCounter() %>
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table">
+                    ${render_folder( missing_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+                </table>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+    %endif
+    %if tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
+        %if install_tool_dependencies_check_box is not None:
+            <div class="form-row">
+                <label>When available, install Tool Shed managed tool dependencies?</label>
+                <% disabled = trans.app.config.tool_dependency_dir is None %>
+                ${install_tool_dependencies_check_box.get_html( disabled=disabled )}
+                <div class="toolParamHelp" style="clear: both;">
+                    %if disabled:
+                        Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies.
+                    %else:
+                        Select to automatically handle tool dependencies via Tool Shed.
+                    %endif
+                </div>
+            <div style="clear: both"></div>
+        %endif
+        %if tool_dependencies_root_folder:
+            <div class="form-row">
+                <p/>
+                <% row_counter = RowCounter() %>
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+                    ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+                </table>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        %if missing_tool_dependencies_root_folder:
+            <div class="form-row">
+                <p/>
+                <% row_counter = RowCounter() %>
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="dependency_table">
+                    ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+                </table>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+    </div>
+    %endif
+    <div style="clear: both"></div>
+    %if requirements_status and install_resolver_dependencies_check_box:
+    <div class="form-row">
+        <label>When available, install <a href="https://docs.galaxyproject.org/en/master/admin/conda_faq.html" target="_blank">Conda</a> managed tool dependencies?</label>
+        ${install_resolver_dependencies_check_box.get_html()}
+        <div class="toolParamHelp" style="clear: both;">
+            Select to automatically install tool dependencies via Conda.
+        </div>
+    </div>
+    %endif
+    </div>
+</%def>
+
+<%def name="render_readme_section( containers_dict )">
+    <%
+        class RowCounter( object ):
+            def __init__( self ):
+                self.count = 0
+            def increment( self ):
+                self.count += 1
+            def __str__( self ):
+                return str( self.count )
+
+        readme_files_root_folder = containers_dict.get( 'readme_files', None )
+    %>
+    %if readme_files_root_folder:
+        <p/>
+        <div class="form-row">
+            <% row_counter = RowCounter() %>
+            <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table">
+                ${render_folder( readme_files_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+            </table>
+        </div>
+    %endif
+</%def>
+
+<%def name="dependency_status_updater()">
+    <script type="text/javascript">
+        // Tool dependency status updater - used to update the installation status on the Tool Dependencies Grid.
+        // Looks for changes in tool dependency installation status using an async request. Keeps calling itself
+        // (via setTimeout) until dependency installation status is neither 'Installing' nor 'Building'.
+        var tool_dependency_status_updater = function( dependency_status_list ) {
+            // See if there are any items left to track
+            var empty = true;
+            for ( var item in dependency_status_list ) {
+                //alert( "item" + item.toSource() );
+                //alert( "dependency_status_list[item] " + dependency_status_list[item].toSource() );
+                //alert( "dependency_status_list[item]['status']" + dependency_status_list[item]['status'] );
+                if ( dependency_status_list[item]['status'] != 'Installed' ) {
+                    empty = false;
+                    break;
+                }
+            }
+            if ( ! empty ) {
+                setTimeout( function() { tool_dependency_status_updater_callback( dependency_status_list ) }, 3000 );
+            }
+        };
+        var tool_dependency_status_updater_callback = function( dependency_status_list ) {
+            var ids = [];
+            var status_list = [];
+            $.each( dependency_status_list, function( index, dependency_status ) {
+                ids.push( dependency_status[ 'id' ] );
+                status_list.push( dependency_status[ 'status' ] );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='admin_toolshed', action='tool_dependency_status_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), status_list: status_list.join( "," ) },
+                success : function( data ) {
+                    $.each( data, function( index, val ) {
+                        // Replace HTML
+                        var cell1 = $( "#ToolDependencyStatus-" + val[ 'id' ] );
+                        cell1.html( val[ 'html_status' ] );
+                        dependency_status_list[ index ] = val;
+                    });
+                    tool_dependency_status_updater( dependency_status_list );
+                },
+            });
+        };
+    </script>
+</%def>
+
+<%def name="repository_installation_status_updater()">
+    <script type="text/javascript">
+        // Tool shed repository status updater - used to update the installation status on the Repository Installation Grid.
+        // Looks for changes in repository installation status using an async request. Keeps calling itself (via setTimeout) until
+        // repository installation status is not one of: 'New', 'Cloning', 'Setting tool versions', 'Installing tool dependencies',
+        // 'Loading proprietary datatypes'.
+        var tool_shed_repository_status_updater = function( repository_status_list ) {
+            // See if there are any items left to track
+            //alert( "repository_status_list start " + repository_status_list.toSource() );
+            var empty = true;
+            for ( var item in repository_status_list ) {
+                //alert( "item" + item.toSource() );
+                //alert( "repository_status_list[item] " + repository_status_list[item].toSource() );
+                //alert( "repository_status_list[item]['status']" + repository_status_list[item]['status'] );
+                if (repository_status_list[item]['status'] != 'Installed'){
+                    empty = false;
+                    break;
+                }
+            }
+            if ( ! empty ) {
+                setTimeout( function() { tool_shed_repository_status_updater_callback( repository_status_list ) }, 3000 );
+            }
+        };
+        var tool_shed_repository_status_updater_callback = function( repository_status_list ) {
+            //alert( repository_status_list );
+            //alert( repository_status_list.toSource() );
+            var ids = [];
+            var status_list = [];
+            $.each( repository_status_list, function( index, repository_status ) {
+                //alert('repository_status '+ repository_status.toSource() );
+                //alert('id '+ repository_status['id'] );
+                //alert( 'status'+ repository_status['status'] );
+                ids.push( repository_status[ 'id' ] );
+                status_list.push( repository_status[ 'status' ] );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='admin_toolshed', action='repository_installation_status_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), status_list: status_list.join( "," ) },
+                success : function( data ) {
+                    $.each( data, function( index, val ) {
+                        // Replace HTML
+                        var cell1 = $( "#RepositoryStatus-" + val[ 'id' ] );
+                        cell1.html( val[ 'html_status' ] );
+                        repository_status_list[ index ] = val;
+                    });
+                    tool_shed_repository_status_updater( repository_status_list );
+                },
+            });
+        };
+    </script>
+</%def>
+
+<%def name="tool_dependency_installation_updater()">
+    <%
+        can_update = False
+        if query.count():
+            # Get the first tool dependency to get to the tool shed repository.
+            tool_dependency = query[0]
+            tool_shed_repository = tool_dependency.tool_shed_repository
+            can_update = tool_shed_repository.tool_dependencies_being_installed or tool_shed_repository.missing_tool_dependencies
+    %>
+    %if can_update:
+        <script type="text/javascript">
+            // Tool dependency installation status updater
+            tool_dependency_status_updater( [${ ",".join( [ '{"id" : "%s", "status" : "%s"}' % ( trans.security.encode_id( td.id ), td.status ) for td in query ] ) } ] );
+        </script>
+    %endif
+</%def>
+
+<%def name="repository_installation_updater()">
+    <%
+        can_update = False
+        if query.count():
+            for tool_shed_repository in query:
+                if tool_shed_repository.status not in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED,
+                                                        trans.install_model.ToolShedRepository.installation_status.ERROR,
+                                                        trans.install_model.ToolShedRepository.installation_status.DEACTIVATED,
+                                                        trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+                    can_update = True
+                    break
+    %>
+    %if can_update:
+        <script type="text/javascript">
+            // Tool shed repository installation status updater
+            tool_shed_repository_status_updater( [${ ",".join( [ '{"id" : "%s", "status" : "%s"}' % ( trans.security.encode_id( tsr.id ), tsr.status ) for tsr in query ] ) } ] );
+        </script>
+    %endif
+</%def>
diff --git a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
new file mode 100644
index 0000000..8b1b3f7
--- /dev/null
+++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
@@ -0,0 +1,207 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%
+repository = context.get( 'repository', None )
+if isinstance( repository, list ):
+    repositories = repository
+else:
+    repositories = [ repository ]
+%>
+
+%if len( repositories ) == 1:
+    ${render_galaxy_repository_actions( repositories[0] )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+<form name="deactivate_or_uninstall_repository" id="deactivate_or_uninstall_repository" action="${ h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository' ) }" method="post" >
+%for repository in repositories:
+    <input type="hidden" name="id" value="${ trans.security.encode_id( repository.id ) | h }" />
+    <div class="toolFormTitle">${repository.name|h}</div>
+    <div class="toolFormBody">
+            <div class="form-row">
+                <label>Description:</label>
+                ${repository.description|h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Revision:</label>
+                ${repository.changeset_revision|h}</a>
+            </div>
+            <div class="form-row">
+                <label>Tool shed:</label>
+                ${repository.tool_shed|h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Owner:</label>
+                ${repository.owner|h}
+            </div>
+            <div class="form-row">
+                <label>Deleted:</label>
+                ${repository.deleted|h}
+            </div>
+            <div class="form-row">
+                <%
+                    can_deactivate_repository = repository.can_deactivate
+                    can_uninstall_repository = repository.can_uninstall
+                %>
+                %if can_deactivate_repository:
+                    <table width="100%" border="0" cellpadding="0" cellspacing="0">
+                        <tr>
+                            <td bgcolor="#D8D8D8">
+                                <label>Deactivating this repository will result in the following:</label>
+                            </td>
+                        </tr>
+                    </table>
+                    <div class="toolParamHelp" style="clear: both;">
+                            * The repository and all of its contents will remain on disk and can still be used by dependent items.
+                    </div>
+                    %if repository.includes_tools_for_display_in_tool_panel:
+                        <div class="toolParamHelp" style="clear: both;">
+                            * The repository's tools will not be loaded into the tool panel.
+                        </div>
+                    %endif
+                    %if repository.includes_tool_dependencies:
+                        <div class="toolParamHelp" style="clear: both;">
+                            * The repository's installed tool dependencies will remain on disk.
+                        </div>
+                    %endif
+                    %if repository.includes_datatypes:
+                        <div class="toolParamHelp" style="clear: both;">
+                            * The repository's datatypes, datatype converters and display applications will be eliminated from the datatypes registry.
+                        </div>
+                    %endif
+                    <div class="toolParamHelp" style="clear: both;">
+                        * The repository record's deleted column in the tool_shed_repository database table will be set to True.
+                    </div>
+                    <br/>
+                %endif
+                %if can_uninstall_repository:
+                    <table width="100%" border="0" cellpadding="0" cellspacing="0">
+                        <tr>
+                            <td bgcolor="#D8D8D8">
+                                <label>Uninstalling this repository will result in the following:</label>
+                            </td>
+                        </tr>
+                    </table>
+                    <div class="toolParamHelp" style="clear: both;">
+                        * The repository and all of its contents will be removed from disk and can no longer be used by dependent items.
+                    </div>
+                    %if repository.includes_tools_for_display_in_tool_panel:
+                        <div class="toolParamHelp" style="clear: both;">
+                            * The repository's tool tag sets will be removed from the tool config file in which they are defined.
+                        </div>
+                    %endif
+                    %if repository.includes_tool_dependencies:
+                        <div class="toolParamHelp" style="clear: both;">
+                            * The repository's installed tool dependencies will be removed from disk and can no longer be used by dependent items.
+                        </div>
+                        <div class="toolParamHelp" style="clear: both;">
+                            * Each associated tool dependency record's status column in the tool_dependency database table will be set to 'Uninstalled'.
+                        </div>
+                    %endif
+                    %if repository.includes_datatypes:
+                        <div class="toolParamHelp" style="clear: both;">
+                            * The repository's datatypes, datatype converters and display applications will be eliminated from the datatypes registry.
+                        </div>
+                    %endif
+                    <div class="toolParamHelp" style="clear: both;">
+                        * The repository record's deleted column in the tool_shed_repository database table will be set to True.
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        * The repository record's uninstalled column in the tool_shed_repository database table will be set to True.
+                    </div>
+                    <div style="clear: both"></div>
+                    <br/>
+                    <%                        
+                        irm = trans.app.installed_repository_manager
+                        repository_tup = irm.get_repository_tuple_for_installed_repository_manager( repository )
+
+                        # Get installed repositories that this repository requires.
+                        installed_dependent_repositories = []
+                        installed_runtime_dependent_tool_dependencies = []
+                        installed_dependent_repositories = irm.installed_dependent_repositories_of_installed_repositories.get( repository_tup, [] )
+
+                        # Get this repository's installed tool dependencies.
+                        installed_tool_dependencies = irm.installed_tool_dependencies_of_installed_repositories.get( repository_tup, [] )
+
+                        # Get installed runtime dependent tool dependencies of this repository's installed tool dependencies.
+                        installed_runtime_dependent_tool_dependencies = []
+                        for itd_tup in installed_tool_dependencies:
+                            installed_dependent_td_tups = \
+                                irm.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.get( itd_tup, [] )
+                            if installed_dependent_td_tups:
+                                installed_runtime_dependent_tool_dependencies.extend( installed_dependent_td_tups )
+                    %>
+                    %if installed_dependent_repositories or installed_runtime_dependent_tool_dependencies:
+                        <table width="100%" border="0" cellpadding="0" cellspacing="0">
+                            <tr>
+                                <td bgcolor="#D8D8D8">
+                                    <label>Uninstalling this repository will affect the following dependent items:</label>
+                                </td>
+                            </tr>
+                        </table>
+                        %if installed_dependent_repositories:
+                            <label>Dependent repositories:</label>
+                            <ul>
+                            %for installed_dependent_repository_tup in installed_dependent_repositories:
+                                <%
+                                    tool_shed, name, owner, installed_changeset_revision = installed_dependent_repository_tup
+                                %>
+                                <li>Revision <b>${ installed_changeset_revision | h}</b> of repository <b>${name | h}</b> owned by <b>${owner | h}</b></li>
+                            %endfor
+                            </ul>
+                        %endif
+                        %if installed_runtime_dependent_tool_dependencies:
+                            <label>Runtime dependent tool dependencies of this repository's tool dependencies:</label>
+                            <ul>
+                                %for td_tup in installed_runtime_dependent_tool_dependencies:
+                                    <%
+                                        tool_shed_repository_id, name, version, type = td_tup
+                                        containing_repository = irm.get_containing_repository_for_tool_dependency( td_tup )
+                                        repository_name = containing_repository.name
+                                        changeset_revision = containing_repository.changeset_revision
+                                        owner = containing_repository.owner
+                                    %>
+                                    <li>
+                                        Version <b>${version | h}</b> of ${type | h} <b>${name | h}</b> contained in revision 
+                                        <b>${changeset_revision | h}</b> of repository <b>${repository_name | h}</b> owned by <b>${owner | h}</b>
+                                    </li>
+                                %endfor
+                            </ul>
+                        %endif
+                        <br/>
+                    %endif
+                %endif
+            </div>
+        </div>
+%endfor
+            <div class="form-row">
+                <%
+                    can_deactivate_repository = True in map( lambda x: x.can_deactivate, repositories )
+                    can_uninstall_repository = True in map( lambda x: x.can_uninstall, repositories )
+                %>
+                %if can_deactivate_repository and can_uninstall_repository:
+                    <% deactivate_uninstall_button_text = "Deactivate or Uninstall" %>
+                    ${remove_from_disk_check_box.get_html()}
+                    <label for="remove_from_disk" style="display: inline;font-weight:normal;">Check to uninstall or leave blank to deactivate</label>
+                    <br/><br/>
+                %elif can_deactivate_repository:
+                    <% deactivate_uninstall_button_text = "Deactivate" %>
+                %else:
+                    <% deactivate_uninstall_button_text = "Uninstall" %>
+                    ##hack to mimic check box
+                    <input type="hidden" name="remove_from_disk" value="true"/><input type="hidden" name="remove_from_disk" value="true"/>
+                %endif
+                <input type="submit" name="deactivate_or_uninstall_repository_button" value="${deactivate_uninstall_button_text|h}"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/grid.mako b/templates/admin/tool_shed_repository/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/tool_shed_repository/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/tool_shed_repository/initiate_repository_installation.mako b/templates/admin/tool_shed_repository/initiate_repository_installation.mako
new file mode 100644
index 0000000..0ddac1e
--- /dev/null
+++ b/templates/admin/tool_shed_repository/initiate_repository_installation.mako
@@ -0,0 +1,71 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${repository_installation_status_updater()}
+   ${repository_installation_updater()}
+   ${self.repository_installation_javascripts()}
+</%def>
+
+<%def name="repository_installation_javascripts()">
+    <script type="text/javascript">
+        $(document).ready(function( ){
+            initiate_repository_installation( "${initiate_repository_installation_ids}", "${encoded_kwd}", "${reinstalling}" );
+        });
+        var initiate_repository_installation = function ( iri_ids, encoded_kwd, reinstalling ) {
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='admin_toolshed', action='manage_repositories' )}",
+                dataType: "html",
+                data: { operation: "install", tool_shed_repository_ids: iri_ids, encoded_kwd: encoded_kwd, reinstalling: reinstalling },
+                success : function ( data ) {
+                    console.log( "Initializing repository installation succeeded" );
+                },
+            });
+        };
+    </script>
+</%def>
+
+%if tool_shed_repositories:
+    <div class="toolForm">
+        <div class="toolFormTitle">Monitor installing tool shed repositories</div>
+        <div class="toolFormBody">
+            <table class="grid">
+                <tr>
+                    <td>Name</td>
+                    <td>Description</td>
+                    <td>Owner</td>
+                    <td>Revision</td>
+                    <td>Status</td>
+                </tr>
+                %for tool_shed_repository in tool_shed_repositories:
+                    <%
+                        encoded_repository_id = trans.security.encode_id( tool_shed_repository.id )
+                        ids_of_tool_dependencies_missing_or_being_installed = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.tool_dependencies_missing_or_being_installed ]
+                        link_to_manage_tool_dependencies = tool_shed_repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ]
+                    %>
+                    <tr>
+                        <td>
+                            %if link_to_manage_tool_dependencies:
+                                <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', tool_dependency_ids=ids_of_tool_dependencies_missing_or_being_installed )}">
+                                    ${tool_shed_repository.name|h}
+                                </a>
+                            %else:
+                                <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=encoded_repository_id )}">
+                                    ${tool_shed_repository.name|h}
+                                </a>
+                            %endif
+                        </td>
+                        <td>${tool_shed_repository.description}</td>
+                        <td>${tool_shed_repository.owner}</td>
+                        <td>${tool_shed_repository.changeset_revision}</td>
+                        <td><div id="RepositoryStatus-${encoded_repository_id}">${tool_shed_repository.status|h}</div></td>
+                    </tr>
+                %endfor
+            </table>
+            <br clear="left"/>
+        </div>
+    </div>
+%endif
diff --git a/templates/admin/tool_shed_repository/install_tool_dependencies_with_update.mako b/templates/admin/tool_shed_repository/install_tool_dependencies_with_update.mako
new file mode 100644
index 0000000..7c1955f
--- /dev/null
+++ b/templates/admin/tool_shed_repository/install_tool_dependencies_with_update.mako
@@ -0,0 +1,93 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import os %>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    <p>
+        The updates to the <b>${repository.name}</b> repository require the following packages.  Click the <b>Install</b> button to install them.
+        Installing some packages may take a while, but you can continue to use Galaxy during installation.
+    </p> 
+</div>
+  
+<div class="toolForm">
+    <div class="toolFormBody">
+        <form name="install_tool_dependencies_with_update" id="install_tool_dependencies_with_update" action="${h.url_for( controller='admin_toolshed', action='install_tool_dependencies_with_update' )}" method="post" >
+            <input type="hidden" name="updating_repository_id" value="${updating_repository_id|h}"/>
+            <input type="hidden" name="updating_to_ctx_rev" value="${updating_to_ctx_rev|h}"/>
+            <input type="hidden" name="updating_to_changeset_revision" value="${updating_to_changeset_revision|h}"/>
+            <input type="hidden" name="encoded_updated_metadata" value="${encoded_updated_metadata|h}"/>
+            <input type="hidden" name="encoded_relative_install_dir" value="${encoded_relative_install_dir|h}"/>
+            <input type="hidden" name="encoded_tool_dependencies_dict" value="${encoded_tool_dependencies_dict|h}"/>
+            %if tool_dependencies_dict:
+                %if install_tool_dependencies_check_box is not None:
+                    <div class="form-row">
+                        <label>Handle tool dependencies?</label>
+                        <% disabled = trans.app.config.tool_dependency_dir is None %>
+                        ${install_tool_dependencies_check_box.get_html( disabled=disabled )}
+                        <div class="toolParamHelp" style="clear: both;">
+                            %if disabled:
+                                Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies.
+                            %else:
+                                Un-check to skip automatic handling of these tool dependencies.
+                            %endif
+                        </div>
+                    </div>
+                    <div style="clear: both"></div>
+                %endif
+                <div class="form-row">
+                    <table class="grid">
+                        <tr><td colspan="4" bgcolor="#D8D8D8"><b>New tool dependencies included in update</b></td></tr>
+                        <tr>
+                            <th>Name</th>
+                            <th>Version</th>
+                            <th>Install directory</th>
+                        </tr>
+                        %for key, requirements_dict in tool_dependencies_dict.items():
+                            <%
+                                readme_text = None
+                                if key == 'set_environment':
+                                    key_name = ', '.join( [ environment_variable[ 'name' ] for environment_variable in requirements_dict ] )
+                                    key_version = ''
+                                    install_dir = ''
+                                else:
+                                    key_items = key.split( '/' )
+                                    key_name = key_items[ 0 ]
+                                    key_version = key_items[ 1 ]
+                                    readme_text = requirements_dict.get( 'readme', None )
+                                    install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+                                                                key_name,
+                                                                key_version,
+                                                                repository.owner,
+                                                                repository.name,
+                                                                repository.installed_changeset_revision )
+                            %>
+                            %if not os.path.exists( install_dir ):
+                                <tr>
+                                    <td>${key_name|h}</td>
+                                    <td>${key_version|h}</td>
+                                    <td>${install_dir|h}</td>
+                                </tr>
+                                %if readme_text:
+                                    <tr><td colspan="4" bgcolor="#FFFFCC">${key_name|h} ${key_version|h} requirements and installation information</td></tr>
+                                    <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
+                                %endif
+                            %endif
+                        %endfor
+                    </table>
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            <div class="form-row">
+                <input type="submit" name="install_tool_dependencies_with_update_button" value="Install"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/manage_repository.mako b/templates/admin/tool_shed_repository/manage_repository.mako
new file mode 100644
index 0000000..9640a87
--- /dev/null
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -0,0 +1,82 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Installed tool shed repository '${repository.name|h}'</div>
+    <div class="toolFormBody">
+        <form name="edit_repository" id="edit_repository" action="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Tool shed:</label>
+                ${repository.tool_shed|h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Name:</label>
+                ${repository.name|h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                %if in_error_state:
+                    ${description|h}
+                %else:
+                    <input name="description" type="textfield" value="${description|h}" size="80"/>
+                %endif
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Revision:</label>
+                ${repository.changeset_revision|h}
+            </div>
+            <div class="form-row">
+                <label>Owner:</label>
+                ${repository.owner|h}
+            </div>
+            %if in_error_state:
+                <div class="form-row">
+                    <label>Repository installation error:</label>
+                    ${repository.error_message|h}
+                </div>
+            %else:
+                <div class="form-row">
+                    <label>Location:</label>
+                    ${repo_files_dir|h}
+                </div>
+            %endif
+            <div class="form-row">
+                <label>Deleted:</label>
+                ${repository.deleted|h}
+            </div>
+            %if not in_error_state:
+                <div class="form-row">
+                    <input type="submit" name="edit_repository_button" value="Save"/>
+                </div>
+            %endif
+        </form>
+    </div>
+</div>
+<p/>
+${render_resolver_dependencies(requirements_status)}
+%if not in_error_state:
+    ${render_repository_items( repository.metadata, containers_dict, can_set_metadata=False, render_repository_actions_for='galaxy' )}
+%endif
diff --git a/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako b/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako
new file mode 100644
index 0000000..babb9ce
--- /dev/null
+++ b/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako
@@ -0,0 +1,107 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="common_misc_javascripts" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${common_misc_javascripts()}
+</%def>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Tool shed repository '${repository.name|h}' tool dependencies</div>
+        <%
+            can_install = False
+            can_uninstall = False
+        %>
+        <br/><br/>
+        <table class="grid">
+            <tr><th  bgcolor="#D8D8D8">Name</th><th  bgcolor="#D8D8D8">Version</th><th  bgcolor="#D8D8D8">Type</th><th bgcolor="#D8D8D8">Status</th><th bgcolor="#D8D8D8">Error</th></tr>
+            %for tool_dependency in repository.tool_dependencies:
+                <%
+                    if tool_dependency.error_message:
+                        from tool_shed.util.basic_util import to_html_string
+                        error_message = to_html_string( tool_dependency.error_message )
+                    else:
+                        error_message = ''
+                    if not can_install:
+                        if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                                       trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                            can_install = True
+                    if not can_uninstall:
+                        if tool_dependency.status not in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                                           trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                            can_uninstall = True
+                %>
+                <tr>
+                    <td>
+                        %if tool_dependency.status not in [ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                            <a target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='manage_repository_tool_dependencies', operation='browse', tool_dependency_ids=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+                                ${tool_dependency.name|h}
+                            </a>
+                        %else:
+                            ${tool_dependency.name|h}
+                        %endif
+                    </td>
+                    <td>${tool_dependency.version|h}</td>
+                    <td>${tool_dependency.type|h}</td>
+                    <td>${tool_dependency.status|h}</td>
+                    <td>${error_message}</td>
+                </tr>
+            %endfor
+        </table>
+        %if can_install:
+            <br/>
+            <form name="install_tool_dependencies" id="install_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', operation='install', repository_id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    Check each tool dependency that you want to install and click <b>Install</b>.
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <input type="checkbox" id="checkAllUninstalled" name="select_all_uninstalled_tool_dependencies_checkbox" value="true" onclick="checkAllUninstalledToolDependencyIdFields(1);"/><input type="hidden" name="select_all_uninstalled_tool_dependencies_checkbox" value="true"/><b>Select/unselect all tool dependencies</b>
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    ${uninstalled_tool_dependencies_select_field.get_html()}
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <input type="submit" name="install_button" value="Install"/></td>
+                </div>
+            </form>
+            <br/>
+        %endif
+        %if can_uninstall:
+            <br/>
+            <form name="uninstall_tool_dependencies" id="uninstall_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='manage_repository_tool_dependencies', operation='uninstall', repository_id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    Check each tool dependency that you want to uninstall and click <b>Uninstall</b>.
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <input type="checkbox" id="checkAllInstalled" name="select_all_installed_tool_dependencies_checkbox" value="true" onclick="checkAllInstalledToolDependencyIdFields(1);"/><input type="hidden" name="select_all_installed_tool_dependencies_checkbox" value="true"/><b>Select/unselect all tool dependencies</b>
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    ${installed_tool_dependencies_select_field.get_html()}
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <input type="submit" name="uninstall_button" value="Uninstall"/></td>
+                </div>
+            </form>
+            <br/>
+        %endif
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/purge_repository_confirmation.mako b/templates/admin/tool_shed_repository/purge_repository_confirmation.mako
new file mode 100644
index 0000000..fd53127
--- /dev/null
+++ b/templates/admin/tool_shed_repository/purge_repository_confirmation.mako
@@ -0,0 +1,74 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    <p>
+        Purging the repository named <b>${repository.name|h}</b> will result in deletion of all records for the
+        following associated items from the database.  Click the <b>Purge</b> button to purge this repository
+        and its associated items.
+    </p>
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Purge tool shed repository <b>${repository.name|h}</b></div>
+        <form name="purge_repository" id="purge_repository" action="${h.url_for( controller='admin_toolshed', action='purge_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <%
+                tool_versions = 0
+                tool_dependencies = 0
+                required_repositories = 0
+                orphan_repository_repository_dependency_association_records = 0
+                orphan_repository_dependency_records = 0
+                # Count this repository's tool version lineage chain links that will be purged.
+                for tool_version in repository.tool_versions:
+                    for tool_version_association in tool_version.parent_tool_association:
+                        tool_versions += 1
+                    for tool_version_association in tool_version.child_tool_association:
+                        tool_versions += 1
+                    tool_versions += 1
+                # Count this repository's associated tool dependencies that will be purged.
+                for tool_dependency in repository.tool_dependencies:
+                    tool_dependencies += 1
+                # Count this repository's associated required repositories that will be purged.
+                for rrda in repository.required_repositories:
+                    required_repositories += 1
+                # Count any "orphan" repository_dependency records associated with the repository but not with any
+                # repository_repository_dependency_association records that will be purged.
+                for orphan_repository_dependency in \
+                    trans.sa_session.query( trans.app.install_model.RepositoryDependency ) \
+                                    .filter( trans.app.install_model.RepositoryDependency.table.c.tool_shed_repository_id == repository.id ):
+                    for orphan_rrda in \
+                        trans.sa_session.query( trans.app.install_model.RepositoryRepositoryDependencyAssociation ) \
+                                        .filter( trans.app.install_model.RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == orphan_repository_dependency.id ):
+                        orphan_repository_repository_dependency_association_records += 1
+                    orphan_repository_dependency_records += 1
+            %>
+            <table class="grid">
+                <tr><td>Tool version records</td><td>${tool_versions|h}</td><tr>
+                <tr><td>Tool dependency records</td><td>${tool_dependencies|h}</td><tr>
+                <tr><td>Repository dependency records</td><td>${required_repositories|h}</td><tr>
+                <tr><td>Orphan repository_repository_dependency_association records</td><td>${orphan_repository_repository_dependency_association_records|h}</td><tr>
+                <tr><td>Orphan repository_dependency records</td><td>${orphan_repository_dependency_records|h}</td><tr>
+            </table>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" name="purge_repository_button" value="Purge"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/repair_repository.mako b/templates/admin/tool_shed_repository/repair_repository.mako
new file mode 100644
index 0000000..db5c490
--- /dev/null
+++ b/templates/admin/tool_shed_repository/repair_repository.mako
@@ -0,0 +1,80 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    <p>
+        The following repositories will be inspected and repaired in the order listed to ensure each repository and all of its tool dependencies are
+        correctly installed.
+    </p>
+    <p>
+        Existing system processes associated with repositories or tool dependencies that are currently being installed will not be automatically
+        terminated.  If possible, make sure no installation processes are still running for repositories whose status is or includes <b>cloning</b>,
+        <b>setting tool versions</b>, <b>installing repository dependencies</b>, or <b>installing tool dependencies</b> before clicking the <b>Repair</b>
+        button.
+    </p>
+    <p>
+        All repositories that do not display an <b>Installed</b> status will be removed from disk and reinstalled.
+    </p>
+    <p>
+        Click <b>Repair</b> to inspect and repair these repositories.
+    </p>
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Repair tool shed repository <b>${repository.name|h}</b></div>
+        <form name="repair_repository" id="repair_repository" action="${h.url_for( controller='admin_toolshed', action='repair_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <input type="hidden" name="repair_dict" value="${encoded_repair_dict|h}"/>
+            <%
+                from tool_shed.util.repository_util import get_tool_shed_repository_status_label
+                ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] ) 
+            %>
+            <table class="grid">
+                <tr>
+                    <th bgcolor="#D8D8D8">Name</th>
+                    <th bgcolor="#D8D8D8">Owner</th>
+                    <th bgcolor="#D8D8D8">Changeset revision</th>
+                    <th bgcolor="#D8D8D8">Status</th>
+                </tr>
+                %for repo_info_dict in ordered_repo_info_dicts:
+                    <%
+                        for name, repo_info_tuple in repo_info_dict.items():
+                            description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+                            break
+                        status_label = get_tool_shed_repository_status_label( trans.app,
+                                                                              tool_shed_repository=None,
+                                                                              name=name,
+                                                                              owner=repository_owner,
+                                                                              changeset_revision=changeset_revision,
+                                                                              repository_clone_url=repository_clone_url )
+                    %>
+                    <tr>
+                        <td>${name | h}</td>
+                        <td>${repository_owner | h}</td>
+                        <td>${changeset_revision | h}</td>
+                        <td>${status_label}</td>
+                    </tr>
+                %endfor
+            </table>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" name="repair_repository_button" value="Repair"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/repository_actions_menu.mako b/templates/admin/tool_shed_repository/repository_actions_menu.mako
new file mode 100644
index 0000000..c3a43e8
--- /dev/null
+++ b/templates/admin/tool_shed_repository/repository_actions_menu.mako
@@ -0,0 +1,47 @@
+<%inherit file="/base.mako"/>
+
+<%def name="render_galaxy_repository_actions( repository=None )">
+    <%
+        from tool_shed.util.encoding_util import tool_shed_encode
+        in_error_state = repository.in_error_state
+        tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ]
+        if repository.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED,
+                                  trans.install_model.ToolShedRepository.installation_status.ERROR,
+                                  trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+            can_administer = True
+        else:
+            can_administer = False
+    %>
+    <br/><br/>
+    <ul class="manage-table-actions">
+        <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+        <div popupmenu="repository-${repository.id}-popup">
+            %if workflow_name:
+                <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='import_workflow', workflow_name=tool_shed_encode( workflow_name ), repository_id=trans.security.encode_id( repository.id ) )}">Import workflow to Galaxy</a></li>
+            %endif
+            %if repository.can_reinstall_or_activate:
+                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repositories', operation='activate or reinstall', id=trans.security.encode_id( repository.id ) )}">Activate or reinstall repository</a>
+            %endif
+            %if in_error_state:
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='reset_to_install', id=trans.security.encode_id( repository.id ), reset_repository=True )}">Reset to install</a>
+            %elif repository.can_install:
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ), operation='install' )}">Install</a>
+            %elif can_administer:
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a>
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a>
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get repository updates</a>
+                %if repository.can_reset_metadata:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='reset_repository_metadata', id=trans.security.encode_id( repository.id ) )}">Reset repository metadata</a>
+                %endif
+                %if repository.includes_tools:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
+                %endif
+                %if tool_dependency_ids:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='manage_repository_tool_dependencies', tool_dependency_ids=tool_dependency_ids, repository_id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+                %endif
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
+            %endif
+            <a class="action-button" target="galaxy_main" href="${h.url_for( controller='admin_toolshed', action='repair_repository', id=trans.security.encode_id( repository.id ) )}">Repair repository</a>
+        </div>
+    </ul>
+</%def>
diff --git a/templates/admin/tool_shed_repository/repository_installation_grid.mako b/templates/admin/tool_shed_repository/repository_installation_grid.mako
new file mode 100644
index 0000000..12e82a6
--- /dev/null
+++ b/templates/admin/tool_shed_repository/repository_installation_grid.mako
@@ -0,0 +1,8 @@
+<%inherit file="/grid_base.mako"/>
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${repository_installation_status_updater()}
+   ${repository_installation_updater()}
+</%def>
diff --git a/templates/admin/tool_shed_repository/repository_installation_status.mako b/templates/admin/tool_shed_repository/repository_installation_status.mako
new file mode 100644
index 0000000..fccc8d6
--- /dev/null
+++ b/templates/admin/tool_shed_repository/repository_installation_status.mako
@@ -0,0 +1,30 @@
+<%def name="render_repository_status( repository )">
+    <%
+        from markupsafe import escape
+        if repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING,
+                                  trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+                                  trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+                                  trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+            bgcolor = trans.install_model.ToolShedRepository.states.INSTALLING
+        elif repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
+                                    trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+            bgcolor = trans.install_model.ToolShedRepository.states.UNINSTALLED
+        elif repository.status in [ trans.install_model.ToolShedRepository.installation_status.ERROR ]:
+            bgcolor = trans.install_model.ToolShedRepository.states.ERROR
+        elif repository.status in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+            bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+        elif repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+            if repository.missing_tool_dependencies or repository.missing_repository_dependencies:
+                bgcolor = trans.install_model.ToolShedRepository.states.WARNING
+            else:
+                bgcolor = trans.install_model.ToolShedRepository.states.OK
+        else:
+            bgcolor = trans.install_model.ToolShedRepository.states.ERROR
+        rval = '<div class="count-box state-color-%s" id="RepositoryStatus-%s">' % ( bgcolor, trans.security.encode_id( repository.id ) )
+        rval += '%s</div>' % escape( repository.status )
+        return rval
+    %>    
+    ${rval}
+</%def>
+
+${render_repository_status( repository )}
diff --git a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
new file mode 100644
index 0000000..281762f
--- /dev/null
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -0,0 +1,101 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="render_dependencies_section" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="render_readme_section" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormBody">
+        <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <div class="form-row">
+                <input type="hidden" name="repo_info_dict" value="${encoded_repo_info_dict}" />
+            </div>
+            <div style="clear: both"></div>
+            <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
+            %if readme_files_dict:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Repository README files - may contain important installation or license information</th>
+                    </table>
+                </div>
+                ${render_readme_section( containers_dict )}
+                <div style="clear: both"></div>
+            %endif
+            %if has_repository_dependencies or includes_tool_dependencies:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Confirm dependency installation</th>
+                    </table>
+                </div>
+                ${render_dependencies_section( install_resolver_dependencies_check_box, install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False )}
+            %endif
+            %if shed_tool_conf_select_field:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Choose the configuration file whose tool_path setting will be used for installing repositories</th>
+                    </table>
+                </div>
+                <%
+                    if len( shed_tool_conf_select_field.options ) == 1:
+                        select_help = "Your Galaxy instance is configured with 1 shed-related tool configuration file, so repositories will be "
+                        select_help += "installed using its <b>tool_path</b> setting."
+                    else:
+                        select_help = "Your Galaxy instance is configured with %d shed-related tool configuration files, " % len( shed_tool_conf_select_field.options )
+                        select_help += "so select the file whose <b>tool_path</b> setting you want used for installing repositories."
+                %>
+                <div class="form-row">
+                    <label>Shed tool configuration file:</label>
+                    ${shed_tool_conf_select_field.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${select_help|h}
+                    </div>
+                </div>
+                <div style="clear: both"></div>
+            %else:
+                <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf|h}"/>
+            %endif
+            %if includes_tools_for_display_in_tool_panel:
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    ${no_changes_check_box.get_html()}
+                    <label style="display: inline;">No changes</label>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Uncheck and select a different tool panel section to load the tools into a different section in the tool panel.
+                    </div>
+                </div>
+                <div class="form-row">
+                    <label>Add new tool panel section:</label>
+                    <input name="new_tool_panel_section_label" type="textfield" value="" size="40"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Add a new tool panel section to contain the installed tools (optional).
+                    </div>
+                </div>
+                <div class="form-row">
+                    <label>Select existing tool panel section:</label>
+                    ${tool_panel_section_select_field.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        Choose an existing section in your tool panel to contain the installed tools (optional).  
+                    </div>
+                </div>
+            %endif
+            <div class="form-row">
+                <input type="submit" name="select_tool_panel_section_button" value="Install"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako b/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako
new file mode 100644
index 0000000..800d2c2
--- /dev/null
+++ b/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako
@@ -0,0 +1,39 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="common_misc_javascripts" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${common_misc_javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    Resetting metadata may take a while, so wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as 
+    doing anything else will not be helpful.
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Reset all metadata on each selected tool shed repository</div>
+        <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_installed_repositories' )}" method="post" >
+            <div class="form-row">
+                Check each repository for which you want to reset metadata.  Repository names are followed by owners in parentheses.
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="checkbox" id="checkAll" name="select_all_repositories_checkbox" value="true" onclick="checkAllRepositoryIdFields(1);"/><input type="hidden" name="select_all_repositories_checkbox" value="true"/><b>Select/unselect all repositories</b>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                ${repositories_select_field.get_html()}
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" name="reset_metadata_on_selected_repositories_button" value="Reset metadata on selected repositories"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako b/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako
new file mode 100644
index 0000000..efec7ae
--- /dev/null
+++ b/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako
@@ -0,0 +1,128 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="render_dependencies_section" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="render_readme_section" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+<%
+    # Handle the case where an uninstalled repository encountered errors during the process of being reinstalled.  In
+    # this case, the repository metadata is an empty dictionary, but one or both of has_repository_dependencies
+    # and includes_tool_dependencies may be True.  If either of these are True but we have no metadata, we cannot install
+    # repository dependencies on this pass.
+    if has_repository_dependencies:
+        repository_dependencies = containers_dict[ 'repository_dependencies' ]
+        missing_repository_dependencies = containers_dict[ 'missing_repository_dependencies' ]
+        if repository_dependencies or missing_repository_dependencies:
+            can_display_repository_dependencies = True
+        else:
+            can_display_repository_dependencies = False
+    else:
+        can_display_repository_dependencies = False
+    if includes_tool_dependencies:
+        tool_dependencies = containers_dict[ 'tool_dependencies' ]
+        missing_tool_dependencies = containers_dict[ 'missing_tool_dependencies' ]
+        if tool_dependencies or missing_tool_dependencies:
+            can_display_tool_dependencies = True
+        else:
+            can_display_tool_dependencies = False
+    else:
+        can_display_tool_dependencies = False
+    can_display_resolver_installation = install_resolver_dependencies_check_box is not None
+
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    <p>
+        The Galaxy development team does not maintain the contents of many Galaxy Tool Shed repositories.  Some
+        repository tools may include code that produces malicious behavior, so be aware of what you are installing.
+    </p>
+    <p>
+        If you discover a repository that causes problems after installation, contact <a href="https://wiki.galaxyproject.org/Support" target="_blank">Galaxy support</a>,
+        sending all necessary information, and appropriate action will be taken.
+    </p>
+    <p>
+        <a href="https://wiki.galaxyproject.org/ToolShedRepositoryFeatures#Contact_repository_owner" target="_blank">Contact the repository owner</a> for 
+        general questions or concerns.
+    </p>
+</div>
+<div class="toolForm">
+    <div class="toolFormBody">
+        <form name="select_shed_tool_panel_config" id="select_shed_tool_panel_config" action="${h.url_for( controller='admin_toolshed', action='prepare_for_install' )}" method="post" >
+            <div class="form-row">
+                <input type="hidden" name="encoded_repo_info_dicts" value="${encoded_repo_info_dicts}" />
+                <input type="hidden" name="updating" value="${updating}" />
+                <input type="hidden" name="updating_repository_id" value="${updating_repository_id}" />
+                <input type="hidden" name="updating_to_ctx_rev" value="${updating_to_ctx_rev}" />
+                <input type="hidden" name="updating_to_changeset_revision" value="${updating_to_changeset_revision}" />
+                <input type="hidden" name="encoded_updated_metadata" value="${encoded_updated_metadata}" />
+                <input type="hidden" name="includes_tools" value="${includes_tools}" />
+                <input type="hidden" name="includes_tool_dependencies" value="${includes_tool_dependencies}" />
+                <input type="hidden" name="includes_tools_for_display_in_tool_panel" value="${includes_tools_for_display_in_tool_panel}" />
+                <input type="hidden" name="tool_shed_url" value="${tool_shed_url|h}" />
+            </div>
+            <div style="clear: both"></div>
+            <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
+            %if readme_files_dict:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Repository README file - may contain important installation or license information</th>
+                    </table>
+                </div>
+                ${render_readme_section( containers_dict )}
+                <div style="clear: both"></div>
+            %endif
+            %if can_display_repository_dependencies or can_display_tool_dependencies or can_display_resolver_installation:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Confirm dependency installation</th>
+                    </table>
+                </div>
+                ${render_dependencies_section( install_resolver_dependencies_check_box, install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False )}
+                <div style="clear: both"></div>
+            %endif
+            <div class="form-row">
+                <table class="colored" width="100%">
+                    <th bgcolor="#EBD9B2">Choose the configuration file whose tool_path setting will be used for installing repositories</th>
+                </table>
+            </div>
+            %if shed_tool_conf_select_field:
+                <%
+                    if len( shed_tool_conf_select_field.options ) == 1:
+                        select_help = "Your Galaxy instance is configured with 1 shed-related tool configuration file, so repositories will be "
+                        select_help += "installed using its <b>tool_path</b> setting."
+                    else:
+                        select_help = "Your Galaxy instance is configured with %d shed-related tool configuration files, " % len( shed_tool_conf_select_field.options )
+                        select_help += "so select the file whose <b>tool_path</b> setting you want used for installing repositories."
+                %>
+                <div class="form-row">
+                    <label>Shed tool configuration file:</label>
+                    ${shed_tool_conf_select_field.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${select_help|h}
+                    </div>
+                </div>
+                <div style="clear: both"></div>
+            %else:
+                <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf|h}"/>
+            %endif
+            <div class="form-row">
+                <input type="submit" name="select_shed_tool_panel_config_button" value="Install"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/select_tool_panel_section.mako b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
new file mode 100644
index 0000000..642e4cd
--- /dev/null
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -0,0 +1,182 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="render_dependencies_section" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="render_readme_section" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+<%
+    # Handle the case where an uninstalled repository encountered errors during the process of being reinstalled.  In
+    # this case, the repository metadata is an empty dictionary, but one or both of has_repository_dependencies
+    # and includes_tool_dependencies may be True.  If either of these are True but we have no metadata, we cannot install
+    # repository dependencies on this pass.
+    if has_repository_dependencies:
+        repository_dependencies = containers_dict[ 'repository_dependencies' ]
+        missing_repository_dependencies = containers_dict[ 'missing_repository_dependencies' ]
+        if repository_dependencies or missing_repository_dependencies:
+            can_display_repository_dependencies = True
+        else:
+            can_display_repository_dependencies = False
+    else:
+        can_display_repository_dependencies = False
+    if includes_tool_dependencies:
+        tool_dependencies = containers_dict[ 'tool_dependencies' ]
+        missing_tool_dependencies = containers_dict[ 'missing_tool_dependencies' ]
+        if tool_dependencies or missing_tool_dependencies:
+            can_display_tool_dependencies = True
+        else:
+            can_display_tool_dependencies = False
+    else:
+        can_display_tool_dependencies = False
+    can_display_resolver_installation = install_resolver_dependencies_check_box is not None
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    <p>
+        The Galaxy development team does not maintain the contents of many Galaxy Tool Shed repositories.  Some
+        repository tools may include code that produces malicious behavior, so be aware of what you are installing.
+    </p>
+    <p>
+        If you discover a repository that causes problems after installation, contact <a href="https://wiki.galaxyproject.org/Support" target="_blank">Galaxy support</a>,
+        sending all necessary information, and appropriate action will be taken.
+    </p>
+    <p>
+        <a href="https://wiki.galaxyproject.org/ToolShedRepositoryFeatures#Contact_repository_owner" target="_blank">Contact the repository owner</a> for
+        general questions or concerns.
+    </p>
+</div>
+<div class="toolForm">
+    <div class="toolFormBody">
+        <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='prepare_for_install' )}" method="post" >
+            <div class="form-row">
+                <input type="hidden" name="includes_tools" value="${includes_tools}" />
+                <input type="hidden" name="includes_tool_dependencies" value="${includes_tool_dependencies}" />
+                <input type="hidden" name="requirements_status" value="${requirements_status}" />
+                <input type="hidden" name="includes_tools_for_display_in_tool_panel" value="${includes_tools_for_display_in_tool_panel}" />
+                <input type="hidden" name="tool_shed_url" value="${tool_shed_url}" />
+                <input type="hidden" name="encoded_repo_info_dicts" value="${encoded_repo_info_dicts}" />
+                <input type="hidden" name="updating" value="${updating}" />
+                <input type="hidden" name="updating_repository_id" value="${updating_repository_id}" />
+                <input type="hidden" name="updating_to_ctx_rev" value="${updating_to_ctx_rev}" />
+                <input type="hidden" name="updating_to_changeset_revision" value="${updating_to_changeset_revision}" />
+                <input type="hidden" name="encoded_updated_metadata" value="${encoded_updated_metadata}" />
+            </div>
+            <div style="clear: both"></div>
+            <% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
+            %if readme_files_dict:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Repository README file - may contain important installation or license information</th>
+                    </table>
+                </div>
+                ${render_readme_section( containers_dict )}
+                <div style="clear: both"></div>
+            %endif
+            <%
+                if requirements_status and install_resolver_dependencies_check_box or includes_tool_dependencies:
+                    display_dependency_confirmation = True
+                else:
+                    display_dependency_confirmation = False
+            %>
+            %if requirements_status:
+                %if not install_resolver_dependencies_check_box and not includes_tool_dependencies:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <head>
+                            <th>
+                                <img src="${h.url_for('/static')}/images/icon_error_sml.gif" title='Cannot install dependencies'/>
+                                This repository requires dependencies that cannot be installed through the Tool Shed
+                            </th>
+                        </head>
+                    </table>
+                </div>
+                <div class="form-row">
+                     <p>This repository defines tool requirements that cannot be installed through the Tool Shed.</p>
+                     <p>Please activate Conda dependency resolution, activate Docker dependency resolution, setup Environment Modules
+or manually satisfy the dependencies listed below.</p>
+                     <p>For details see <a target="_blank" href="https://docs.galaxyproject.org/en/latest/admin/dependency_resolvers.html">the dependency resolver documentation.</a></p>
+                </div>
+                %endif
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">The following tool dependencies are required by the current repository</th>
+                    </table>
+                </div>
+                <div class="form-row">
+                    ${render_tool_dependency_resolver( requirements_status, prepare_for_install=True )}
+                </div>
+                <div style="clear: both"></div>
+            %endif
+            %if can_display_repository_dependencies or display_dependency_confirmation:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Confirm dependency installation</th>
+                    </table>
+                </div>
+                ${render_dependencies_section( install_resolver_dependencies_check_box, install_repository_dependencies_check_box, install_tool_dependencies_check_box, containers_dict, revision_label=None, export=False, requirements_status=requirements_status )}
+                <div style="clear: both"></div>
+            %endif
+            %if shed_tool_conf_select_field:
+                <div class="form-row">
+                    <table class="colored" width="100%">
+                        <th bgcolor="#EBD9B2">Choose the tool panel section to contain the installed tools (optional)</th>
+                    </table>
+                </div>
+                <div class="detail-section">
+                <%
+                    if len( shed_tool_conf_select_field.options ) == 1:
+                        select_help = "Your Galaxy instance is configured with 1 shed-related tool configuration file, so repositories will be "
+                        select_help += "installed using its <b>tool_path</b> setting."
+                    else:
+                        select_help = "Your Galaxy instance is configured with %d shed-related tool configuration files, " % len( shed_tool_conf_select_field.options )
+                        select_help += "so select the file whose <b>tool_path</b> setting you want used for installing repositories."
+                %>
+                <div class="form-row">
+                    <label>Shed tool configuration file:</label>
+                    ${shed_tool_conf_select_field.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${select_help|h}
+                    </div>
+                </div>
+                <div style="clear: both"></div>
+                </div>
+            %else:
+                <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf|h}"/>
+            %endif
+            <div class="form-row">
+                <label>Add new tool panel section:</label>
+                <input name="new_tool_panel_section_label" type="textfield" value="${new_tool_panel_section_label|h}" size="40"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    Add a new tool panel section to contain the installed tools (optional).
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Select existing tool panel section:</label>
+                ${tool_panel_section_select_field.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Choose an existing section in your tool panel to contain the installed tools (optional).
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="select_tool_panel_section_button" value="Install"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    Clicking <b>Install</b> without selecting a tool panel section will load the installed tools into the tool panel outside of any sections.
+                </div>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/tool_dependencies_grid.mako b/templates/admin/tool_shed_repository/tool_dependencies_grid.mako
new file mode 100644
index 0000000..7dfdeff
--- /dev/null
+++ b/templates/admin/tool_shed_repository/tool_dependencies_grid.mako
@@ -0,0 +1,8 @@
+<%inherit file="/grid_base.mako"/>
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${dependency_status_updater()}
+   ${tool_dependency_installation_updater()}
+</%def>
diff --git a/templates/admin/tool_shed_repository/tool_dependency_installation_status.mako b/templates/admin/tool_shed_repository/tool_dependency_installation_status.mako
new file mode 100644
index 0000000..2cd9deb
--- /dev/null
+++ b/templates/admin/tool_shed_repository/tool_dependency_installation_status.mako
@@ -0,0 +1,19 @@
+<%def name="render_tool_dependency_status( tool_dependency )">
+    <%
+        if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLING ]:
+            bgcolor = trans.install_model.ToolDependency.states.INSTALLING
+        elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+                                         trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+            bgcolor = trans.install_model.ToolDependency.states.UNINSTALLED
+        elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]:
+            bgcolor = trans.install_model.ToolDependency.states.ERROR
+        elif tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
+            bgcolor = trans.install_model.ToolDependency.states.OK
+        rval = '<div class="count-box state-color-%s" id="ToolDependencyStatus-%s">%s</div>' % \
+            ( bgcolor, trans.security.encode_id( tool_dependency.id ), tool_dependency.status )
+        return rval
+    %>    
+    ${rval}
+</%def>
+
+${render_tool_dependency_status( tool_dependency )}
diff --git a/templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako b/templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako
new file mode 100644
index 0000000..3311b7f
--- /dev/null
+++ b/templates/admin/tool_shed_repository/uninstall_tool_dependencies.mako
@@ -0,0 +1,63 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<% import os %>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Uninstall tool dependencies</div>
+    <div class="toolFormBody">
+        <form name="uninstall_tool_dependenceies" id="uninstall_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependencies' )}" method="post" >       
+            <div class="form-row">
+                <table class="grid">
+                    <tr>
+                        <th>Name</th>
+                        <th>Version</th>
+                        <th>Type</th>
+                        <th>Install directory</th>
+                    </tr>
+                    %for tool_dependency in tool_dependencies:
+                        <input type="hidden" name="tool_dependency_ids" value="${trans.security.encode_id( tool_dependency.id )}"/>
+                        <%
+                            if tool_dependency.type == 'package':
+                                install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+                                                            tool_dependency.name,
+                                                            tool_dependency.version,
+                                                            tool_dependency.tool_shed_repository.owner,
+                                                            tool_dependency.tool_shed_repository.name,
+                                                            tool_dependency.tool_shed_repository.installed_changeset_revision )
+                            elif tool_dependency.type == 'set_environment':
+                                install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+                                                            'environment_settings',
+                                                            tool_dependency.name,
+                                                            tool_dependency.tool_shed_repository.owner,
+                                                            tool_dependency.tool_shed_repository.name,
+                                                            tool_dependency.tool_shed_repository.installed_changeset_revision )
+                            if not os.path.exists( install_dir ):
+                                install_dir = "This dependency's installation directory does not exist, click <b>Uninstall</b> to reset for installation."
+                        %>
+                        <tr>
+                            <td>${tool_dependency.name|h}</td>
+                            <td>${tool_dependency.version|h}</td>
+                            <td>${tool_dependency.type|h}</td>
+                            <td>${install_dir|h}</td>
+                        </tr>
+                    %endfor
+                </table>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="uninstall_tool_dependencies_button" value="Uninstall"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    Click to uninstall the tool dependencies listed above.
+                </div>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/tool_shed_repository/view_tool_metadata.mako b/templates/admin/tool_shed_repository/view_tool_metadata.mako
new file mode 100644
index 0000000..8849b36
--- /dev/null
+++ b/templates/admin/tool_shed_repository/view_tool_metadata.mako
@@ -0,0 +1,211 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if tool_metadata:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">${tool_metadata[ 'name' ]|h} tool metadata</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Miscellaneous</td></tr>
+                </table>
+            </div>
+            <div class="form-row">
+                <label>Name:</label>
+                ${tool_metadata[ 'name' ]|h}
+                <div style="clear: both"></div>
+            </div>
+            %if 'description' in tool_metadata:
+                <div class="form-row">
+                    <label>Description:</label>
+                    ${tool_metadata[ 'description' ]|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'id' in tool_metadata:
+                <div class="form-row">
+                    <label>Id:</label>
+                    ${tool_metadata[ 'id' ]|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'guid' in tool_metadata:
+                <div class="form-row">
+                    <label>Guid:</label>
+                    ${tool_metadata[ 'guid' ]|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'version' in tool_metadata:
+                <div class="form-row">
+                    <label>Version:</label>
+                    ${tool_metadata[ 'version' ]|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'version_string_cmd' in tool_metadata:
+                <div class="form-row">
+                    <label>Version command string:</label>
+                    ${tool_metadata[ 'version_string_cmd' ]|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Version lineage of this tool (guids ordered most recent to oldest)</td></tr>
+                </table>
+            </div>
+            <div class="form-row">
+                %if tool_lineage:
+                    <table class="grid">
+                        %for guid in tool_lineage:
+                            <tr>
+                                <td>
+                                    %if guid == tool_metadata[ 'guid' ]:
+                                        ${guid|h} <b>(this tool)</b>
+                                    %else:
+                                        ${guid|h}
+                                    %endif
+                                </td>
+                            </tr>
+                        %endfor
+                    </table>
+                %else:
+                    No tool versions are defined for this tool so it is critical that you <b>Set tool versions</b> from the <b>Manage repository</b> page.
+                %endif
+            </div>
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Requirements (dependencies defined in the <requirements> tag set)</td></tr>
+                </table>
+            </div>
+            <%
+                if 'requirements' in tool_metadata:
+                    requirements = tool_metadata[ 'requirements' ]
+                else:
+                    requirements = None
+            %>
+            %if requirements:
+                <div class="form-row">
+                    <label>Requirements:</label>
+                    <table class="grid">
+                        <tr>
+                            <td><b>name</b></td>
+                            <td><b>version</b></td>
+                            <td><b>type</b></td>
+                        </tr>
+                        %for requirement_dict in requirements:
+                            <%
+                                requirement_name = requirement_dict[ 'name' ] or 'not provided'
+                                requirement_version = requirement_dict[ 'version' ] or 'not provided'
+                                requirement_type = requirement_dict[ 'type' ] or 'not provided'
+                            %>
+                            <tr>
+                                <td>${requirement_name|h}</td>
+                                <td>${requirement_version|h}</td>
+                                <td>${requirement_type|h}</td>
+                            </tr>
+                        %endfor
+                    </table>
+                    <div style="clear: both"></div>
+                </div>
+            %else:
+                <div class="form-row">
+                    No requirements defined
+                </div>
+            %endif
+            %if tool:
+                <div class="form-row">
+                    <table width="100%">
+                        <tr bgcolor="#D8D8D8" width="100%"><td><b>Additional information about this tool</td></tr>
+                    </table>
+                </div>
+                <div class="form-row">
+                    <label>Command:</label>
+                    <pre>${tool.command|h}</pre>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Interpreter:</label>
+                    ${tool.interpreter|h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Is multi-byte:</label>
+                    ${tool.is_multi_byte|h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Forces a history refresh:</label>
+                    ${tool.force_history_refresh|h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Parallelism:</label>
+                    ${tool.parallelism|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Functional tests</td></tr>
+                </table>
+            </div>
+            <%
+                if 'tests' in tool_metadata:
+                    tests = tool_metadata[ 'tests' ]
+                else:
+                    tests = None
+            %>
+            %if tests:
+                <div class="form-row">
+                    <table class="grid">
+                        <tr>
+                            <td><b>name</b></td>
+                            <td><b>inputs</b></td>
+                            <td><b>outputs</b></td>
+                            <td><b>required files</b></td>
+                        </tr>
+                        %for test_dict in tests:
+                            <%
+                                inputs = test_dict[ 'inputs' ]
+                                outputs = test_dict[ 'outputs' ]
+                                required_files = test_dict[ 'required_files' ]
+                            %>
+                            <tr>
+                                <td>${test_dict[ 'name' ]|h}</td>
+                                <td>
+                                    %for input in inputs:
+                                        <b>${input[0]|h}:</b> ${input[1]|h}<br/>
+                                    %endfor
+                                </td>
+                                <td>
+                                    %for output in outputs:
+                                        <b>${output[0]|h}:</b> ${output[1]|h}<br/>
+                                    %endfor
+                                </td>
+                                <td>
+                                    %for required_file in required_files:
+                                        ${required_file|h}<br/>
+                                    %endfor
+                                </td>
+                            </tr>
+                        %endfor
+                    </table>
+                </div>
+            %else:
+                <div class="form-row">
+                    No functional tests defined
+                </div>
+            %endif
+        </div>
+    </div>
+%endif
diff --git a/templates/admin/tool_shed_repository/view_workflow.mako b/templates/admin/tool_shed_repository/view_workflow.mako
new file mode 100644
index 0000000..4d1bde5
--- /dev/null
+++ b/templates/admin/tool_shed_repository/view_workflow.mako
@@ -0,0 +1,37 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+
+<% from tool_shed.util.encoding_util import tool_shed_encode %>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="render_workflow( workflow_name, repository_id )">
+    <% center_url = h.url_for( controller='admin_toolshed', action='generate_workflow_image', workflow_name=tool_shed_encode( workflow_name ), repository_id=repository_id ) %>
+    <iframe name="workflow_image" id="workflow_image" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url|h}"> </iframe>
+</%def>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolFormTitle">${workflow_name | h}</div>
+<div class="form-row">
+    <div class="toolParamHelp" style="clear: both;">
+        (this page displays SVG graphics)
+    </div>
+</div>
+<br clear="left"/>
+
+${render_workflow( workflow_name, repository_id )}
diff --git a/templates/admin/tool_version/grid.mako b/templates/admin/tool_version/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/tool_version/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/user/grid.mako b/templates/admin/user/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/admin/user/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/admin/user/reset_password.mako b/templates/admin/user/reset_password.mako
new file mode 100644
index 0000000..0ac54d9
--- /dev/null
+++ b/templates/admin/user/reset_password.mako
@@ -0,0 +1,37 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Reset password for users</div>
+    <div class="toolFormBody">
+        <form name="form" action="${h.url_for( controller='admin', action='reset_user_password' )}" method="post" >
+            <input type="hidden" name="id" value="${id}" size="40">
+            %for user in users:
+                <div class="form-row">
+                    <label>Email:</label>
+                    ${user.email|h}
+                    <div style="clear: both"></div>
+                </div>
+            %endfor
+            <div class="form-row">
+                <label>Password:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="password" name="password" value="${password}" size="40">
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Confirm password:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="password" name="confirm" value="${confirm}" size="40">
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <input type="submit" name="reset_user_password_button" value="Reset">
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/user/user.mako b/templates/admin/user/user.mako
new file mode 100644
index 0000000..41e7793
--- /dev/null
+++ b/templates/admin/user/user.mako
@@ -0,0 +1,83 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name|h}" id="${name|h}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]|h}">${option[1]|h}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {  
+    $('#roles_add_button').click(function() {
+        return !$('#out_roles option:selected').remove().appendTo('#in_roles');
+    });
+    $('#roles_remove_button').click(function() {
+        return !$('#in_roles option:selected').remove().appendTo('#out_roles');
+    });
+    $('#groups_add_button').click(function() {
+        return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+    });
+    $('#groups_remove_button').click(function() {
+        return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+    });
+    $('form#associate_user_role_group').submit(function() {
+        $('#in_roles option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_groups option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+    });
+});
+</script>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">User '${user.email|h}'</div>
+    <div class="toolFormBody">
+        <form name="associate_user_role_group" id="associate_user_role_group" action="${h.url_for(controller='admin', action='manage_roles_and_groups_for_user', id=trans.security.encode_id( user.id ) )}" method="post" >
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Roles associated with '${user.email|h}'</label>
+                    ${render_select( "in_roles", in_roles )}<br/>
+                    <input type="submit" id="roles_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Roles not associated with '${user.email|h}'</label>
+                    ${render_select( "out_roles", out_roles )}<br/>
+                    <input type="submit" id="roles_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Groups associated with '${user.email|h}'</label>
+                    ${render_select( "in_groups", in_groups )}<br/>
+                    <input type="submit" id="groups_remove_button" value=">>"/>
+                </div>
+                <div>
+                    <label>Groups not associated with '${user.email|h}'</label>
+                    ${render_select( "out_groups", out_groups )}<br/>
+                    <input type="submit" id="groups_add_button" value="<<"/>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="user_roles_groups_edit_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/admin/view_data_tables_registry.mako b/templates/admin/view_data_tables_registry.mako
new file mode 100644
index 0000000..faf75f5
--- /dev/null
+++ b/templates/admin/view_data_tables_registry.mako
@@ -0,0 +1,50 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<%
+    ctr = 0
+    sorted_data_tables = sorted( trans.app.tool_data_tables.get_tables().items() )
+%>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Current data table registry contains ${len( sorted_data_tables )} data tables</div>
+    <div class="toolFormBody">
+        <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+            <tr>
+                <th bgcolor="#D8D8D8">Name</th>
+                <th bgcolor="#D8D8D8">Filename</th>
+                <th bgcolor="#D8D8D8">Tool data path</th>
+                <th bgcolor="#D8D8D8">Errors</th>
+            </tr>
+            %for data_table_elem_name, data_table in sorted_data_tables:
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td><a href="${ h.url_for( controller="data_manager", action="manage_data_table", table_name=data_table.name ) }">${data_table.name}</a></td>
+                    %for i, ( filename, file_dict ) in enumerate( data_table.filenames.iteritems() ):
+                        %if i > 0:
+                            <tr><td></td>
+                        %endif
+                        <td>${ filename | h }</td>
+                        <td>${ file_dict.get( 'tool_data_path' ) | h }</td>
+                        <td>
+                            %if not file_dict.get( 'found' ):
+                                file missing
+                            %endif
+                            %for error in file_dict.get( 'errors', [] ):
+                                ${ error | h } <br/>
+                            %endfor
+                        </td>
+                        </tr>
+                    %endfor
+                <% ctr += 1 %>
+            %endfor
+        </table>
+    </div>
+</div>
diff --git a/templates/admin/view_datatypes_registry.mako b/templates/admin/view_datatypes_registry.mako
new file mode 100644
index 0000000..a83e545
--- /dev/null
+++ b/templates/admin/view_datatypes_registry.mako
@@ -0,0 +1,57 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<%
+    import galaxy.util
+    from galaxy.web.base.controller import sort_by_attr, Datatype
+    ctr = 0
+    datatypes = []
+    for elem in trans.app.datatypes_registry.datatype_elems:
+        # Build a list of objects that can be sorted.
+        extension = elem.get( 'extension', None )
+        dtype = elem.get( 'type', None )
+        type_extension = elem.get( 'type_extension', None )
+        mimetype = elem.get( 'mimetype', None )
+        display_in_upload = galaxy.util.string_as_bool( elem.get( 'display_in_upload', False ) )
+        datatypes.append( Datatype( extension, dtype, type_extension, mimetype, display_in_upload ) )
+    sorted_datatypes = sort_by_attr( datatypes, 'extension' )
+%>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Current data types registry contains ${len( sorted_datatypes )} data types</div>
+    <div class="toolFormBody">
+        <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+            <tr>
+                <th bgcolor="#D8D8D8">Extension</th>
+                <th bgcolor="#D8D8D8">Type</th>
+                <th bgcolor="#D8D8D8">Mimetype</th>
+                <th bgcolor="#D8D8D8">Display in upload</th>
+            </tr>
+            %for datatype in sorted_datatypes:
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td>${datatype.extension|h}</td>
+                    <td>${datatype.dtype|h}</td>
+                    <td>
+                        %if datatype.mimetype:
+                            ${datatype.mimetype|h}
+                        %endif
+                    </td>
+                    <td>
+                        %if datatype.display_in_upload:
+                            ${datatype.display_in_upload|h}
+                        %endif
+                    </td>
+                </tr>
+                <% ctr += 1 %>
+            %endfor
+        </table>
+    </div>
+</div>
diff --git a/templates/base.mako b/templates/base.mako
new file mode 100644
index 0000000..22af35c
--- /dev/null
+++ b/templates/base.mako
@@ -0,0 +1,109 @@
+<%namespace name="galaxy_client" file="/galaxy_client_app.mako" />
+<% self.js_app = None %>
+
+<% _=n_ %>
+<!DOCTYPE HTML>
+<html>
+    <!--base.mako-->
+    ${self.init()}
+    <head>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        ## For mobile browsers, don't scale up
+        <meta name = "viewport" content = "maximum-scale=1.0">
+        ## Force IE to standards mode, and prefer Google Chrome Frame if the user has already installed it
+        <meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1">
+
+        <title>${self.title()}</title>
+        ## relative href for site root
+        <link rel="index" href="${ h.url_for( '/' ) }"/>
+        ${self.metas()}
+        ${self.stylesheets()}
+        ${self.javascripts()}
+        ${self.javascript_app()}
+    </head>
+    <body class="inbound">
+        ${next.body()}
+    </body>
+</html>
+
+## Default title
+<%def name="title()"></%def>
+
+## Default init
+<%def name="init()"></%def>
+
+## Default stylesheets
+<%def name="stylesheets()">
+    ${h.css('base')}
+    ${h.css('bootstrap-tour')}
+</%def>
+
+## Default javascripts
+<%def name="javascripts()">
+    ## Send errors to Sntry server if configured
+    %if app.config.sentry_dsn:
+        ${h.js( "libs/raven" )}
+        <script>
+            Raven.config('${app.config.sentry_dsn_public}').install();
+            %if trans.user:
+                Raven.setUser( { email: "${trans.user.email|h}" } );
+            %endif
+        </script>
+    %endif
+
+    ${h.js(
+        ## TODO: remove when all libs are required directly in modules
+        'bundled/libs.bundled',
+        'libs/require',
+        "libs/bootstrap-tour",
+    )}
+
+    <script type="text/javascript">
+        ## global configuration object
+        ## TODO: remove
+        window.Galaxy = window.Galaxy || {};
+        window.Galaxy.root = '${h.url_for( "/" )}';
+        window.Galaxy.config = {};
+
+        // configure require
+        // due to our using both script tags and require, we need to access the same jq in both for plugin retention
+        // source http://www.manuel-strehl.de/dev/load_jquery_before_requirejs.en.html
+        define( 'jquery', [], function(){ return jQuery; })
+        // TODO: use one system
+
+        // shims and paths
+        require.config({
+            baseUrl: "${h.url_for('/static/scripts') }",
+            shim: {
+                "libs/underscore": {
+                    exports: "_"
+                },
+                "libs/backbone": {
+                    deps: [ 'jquery', 'libs/underscore' ],
+                    exports: "Backbone"
+                }
+            },
+            // cache busting using time server was restarted
+            urlArgs: 'v=${app.server_starttime}'
+        });
+    </script>
+
+    %if not form_input_auto_focus is UNDEFINED and form_input_auto_focus:
+        <script type="text/javascript">
+            $(document).ready( function() {
+                // Auto Focus on first item on form
+                if ( $("*:focus").html() == null ) {
+                    $(":input:not([type=hidden]):visible:enabled:first").focus();
+                }
+            });
+        </script>
+    %endif
+
+</%def>
+
+<%def name="javascript_app()">
+    ${ galaxy_client.load( app=self.js_app ) }
+</%def>
+
+## Additional metas can be defined by templates inheriting from this one.
+<%def name="metas()"></%def>
diff --git a/templates/base/base_panels.mako b/templates/base/base_panels.mako
new file mode 100644
index 0000000..0beaa5d
--- /dev/null
+++ b/templates/base/base_panels.mako
@@ -0,0 +1,246 @@
+<!DOCTYPE HTML>
+<%namespace name="galaxy_client" file="/galaxy_client_app.mako" />
+
+<%
+    self.has_left_panel = hasattr( self, 'left_panel' )
+    self.has_right_panel = hasattr( self, 'right_panel' )
+    self.message_box_visible = app.config.message_box_visible
+    self.show_inactivity_warning = False
+    if trans.webapp.name == 'galaxy' and trans.user:
+        self.show_inactivity_warning = ( ( trans.user.active is False ) and ( app.config.user_activation_on ) )
+    self.overlay_visible=False
+    self.active_view=None
+    self.body_class=""
+    self.require_javascript=False
+%>
+
+<%def name="init()">
+    ## Override
+</%def>
+
+## Default stylesheets
+<%def name="stylesheets()">
+    ${h.css(
+        'base',
+        'jquery.rating',
+        'bootstrap-tour'
+    )}
+    <style type="text/css">
+    #center {
+        %if not self.has_left_panel:
+            left: 0 !important;
+        %endif
+        %if not self.has_right_panel:
+            right: 0 !important;
+        %endif
+    }
+    </style>
+</%def>
+
+## Default javascripts
+<%def name="javascripts()">
+    ## Send errors to Sentry server if configured
+    %if app.config.sentry_dsn:
+        ${h.js( "libs/raven" )}
+        <script>
+            Raven.config('${app.config.sentry_dsn_public}').install();
+            %if trans.user:
+                Raven.setUser( { email: "${trans.user.email | h}" } );
+            %endif
+        </script>
+    %endif
+
+    ${h.js(
+        ## TODO: remove when all libs are required directly in modules
+        'bundled/libs.bundled',
+        'libs/require',
+    )}
+
+    <script type="text/javascript">
+        // configure require
+        // due to our using both script tags and require, we need to access the same jq in both for plugin retention
+        // source http://www.manuel-strehl.de/dev/load_jquery_before_requirejs.en.html
+        window.Galaxy = window.Galaxy || {};
+        window.Galaxy.root = '${h.url_for( "/" )}';
+        define( 'jquery', [], function(){ return jQuery; })
+        // TODO: use one system
+
+        // shims and paths
+        require.config({
+            baseUrl: "${h.url_for('/static/scripts') }",
+            shim: {
+                "libs/underscore": {
+                    exports: "_"
+                },
+                "libs/backbone": {
+                    deps: [ 'jquery', 'libs/underscore' ],
+                    exports: "Backbone"
+                }
+            },
+            // cache busting using time server was restarted
+            urlArgs: 'v=${app.server_starttime}',
+        });
+    </script>
+
+</%def>
+
+<%def name="javascript_app()">
+    ## load the Galaxy global js var
+    ${ galaxy_client.load() }
+</%def>
+
+## Default late-load javascripts
+<%def name="late_javascripts()">
+    ## Scripts can be loaded later since they progressively add features to
+    ## the panels, but do not change layout
+    <script type="text/javascript">
+
+    %if self.has_left_panel:
+        var lp = new panels.LeftPanel({ el: '#left' });
+        force_left_panel = function( x ) { lp.force_panel( x ) };
+    %endif
+
+    %if self.has_right_panel:
+        var rp = new panels.RightPanel({ el: '#right' });
+        window.handle_minwidth_hint = function( x ) { rp.handle_minwidth_hint( x ) };
+        force_right_panel = function( x ) { rp.force_panel( x ) };
+    %endif
+
+    %if t.webapp.name == 'galaxy' and app.config.ga_code:
+          (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+          (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+          m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+          })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+          ga('create', '${app.config.ga_code}', 'auto');
+          ga('send', 'pageview');
+    %endif
+
+    </script>
+</%def>
+
+## Masthead
+<%def name="masthead()">
+    ## Override
+</%def>
+
+<%def name="overlay( title='', content='', visible=False )">
+    <%def name="title()"></%def>
+    <%def name="content()"></%def>
+
+    <%
+    if visible:
+        display = "style='display: block;'"
+        overlay_class = "in"
+    else:
+        display = "style='display: none;'"
+        overlay_class = ""
+    %>
+
+    <div id="top-modal" class="modal fade ${overlay_class}" ${display}>
+        <div id="top-modal-backdrop" class="modal-backdrop fade ${overlay_class}" style="z-index: -1"></div>
+        <div id="top-modal-dialog" class="modal-dialog">
+            <div class="modal-content">
+                <div class="modal-header">
+                    <button type='button' class='close' style="display: none;">×</button>
+                    <h4 class='title'>${title}</h4>
+                </div>
+                <div class="modal-body">${content}</div>
+                <div class="modal-footer">
+                    <div class="buttons" style="float: right;"></div>
+                    <div class="extra_buttons" style=""></div>
+                    <div style="clear: both;"></div>
+                </div>
+            </div>
+        </div>
+    </div>
+</%def>
+
+## Document
+<html>
+    <!--base_panels.mako-->
+    ${self.init()}
+    <head>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        ## For mobile browsers, don't scale up
+        <meta name = "viewport" content = "maximum-scale=1.0">
+        ## Force IE to standards mode, and prefer Google Chrome Frame if the user has already installed it
+        <meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1">
+
+        %if app.config.brand:
+            <title>${self.title()} / ${app.config.brand}</title>
+        %else:
+            <title>${self.title()}</title>
+        %endif
+        ## relative href for site root
+        <link rel="index" href="${ h.url_for( '/' ) }"/>
+        ${self.stylesheets()}
+        ${self.javascripts()}
+        ${self.javascript_app()}
+    </head>
+
+    <%
+    body_class = self.body_class
+    if self.message_box_visible:
+        body_class += " has-message-box"
+    if self.show_inactivity_warning:
+        body_class += " has-inactivity-box"
+    %>
+
+    <body scroll="no" class="full-content ${body_class}">
+        %if self.require_javascript:
+            <noscript>
+                <div class="overlay overlay-background">
+                    <div class="modal dialog-box" border="0">
+                        <div class="modal-header"><h3 class="title">Javascript Required</h3></div>
+                        <div class="modal-body">The Galaxy analysis interface requires a browser with Javascript enabled. <br> Please enable Javascript and refresh this page</div>
+                    </div>
+                </div>
+            </noscript>
+        %endif
+        <div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%;">
+            ## Background displays first
+            <div id="background"></div>
+            ## Layer iframes over backgrounds
+            <div id="masthead" class="navbar navbar-fixed-top navbar-inverse">
+                ${self.masthead()}
+            </div>
+            %if self.message_box_visible:
+                <div id="messagebox" class="panel-${app.config.message_box_class}-message" style="display:block">
+                    ${app.config.message_box_content}
+                </div>
+            %endif
+            %if self.show_inactivity_warning:
+                <div id="inactivebox" class="panel-warning-message">
+                    ${app.config.inactivity_box_content} <a href="${h.url_for( controller='user', action='resend_verification' )}">Resend verification.</a>
+                </div>
+            %endif
+            ${self.overlay(visible=self.overlay_visible)}
+            %if self.has_left_panel:
+                <div id="left">
+                    ${self.left_panel()}
+                    <div class="unified-panel-footer">
+                        <div class="panel-collapse"></div>
+                        <div class="drag"></div>
+                    </div>
+                </div><!--end left-->
+            %endif
+            <div id="center" class="inbound">
+                ${self.center_panel()}
+            </div><!--end center-->
+            %if self.has_right_panel:
+                <div id="right">
+                    ${self.right_panel()}
+                    <div class="unified-panel-footer">
+                        <div class="panel-collapse right"></div>
+                        <div class="drag"></div>
+                    </div>
+                </div><!--end right-->
+            %endif
+        </div><!--end everything-->
+        <div id='dd-helper' style="display: none;"></div>
+        ## Allow other body level elements
+        ## Scripts can be loaded later since they progressively add features to
+        ## the panels, but do not change layout
+        ${self.late_javascripts()}
+    </body>
+</html>
diff --git a/templates/common/select_template.mako b/templates/common/select_template.mako
new file mode 100644
index 0000000..7044f5a
--- /dev/null
+++ b/templates/common/select_template.mako
@@ -0,0 +1,76 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+    in_library = form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+    in_sample_tracking = form_type == trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE
+    if in_library:
+        # If rendering for a library folder or dataset, inheritance is set by the user, while
+        # rendering for a RequestType, the template is always available to samples.
+        from galaxy.web.form_builder import CheckboxField
+        inheritable_check_box = CheckboxField( 'inheritable' )
+%>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    %if in_library:
+        <li><a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse the data library</span></a></li>
+    %elif in_sample_tracking:
+        <li><a class="action-button" href="${h.url_for( controller='request_type', action='view_request_type', id=request_type_id )}"><span>Browse the configuration</span></a></li>
+    %endif
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Select a template for the ${item_desc} '${util.unicodify( item_name )}'</div>
+    <div class="toolFormBody">
+        %if form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE:
+            <form id="select_template" name="select_template" action="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type=item_type, form_type=trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, use_panels=use_panels, show_deleted=show_deleted )}" method="post" >
+        %elif form_type == trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE:
+            <form id="select_template" name="select_template" action="${h.url_for( controller='request_type', action='add_template', cntrller=cntrller, item_type=item_type, form_type=trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE, request_type_id=request_type_id, sample_id=sample_id )}" method="post">
+        %endif
+            <div class="form-row">
+                <label>Template:</label>
+                ${form_id_select_field.get_html()}
+            </div>
+            % if form_type == trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE and item_type in [ 'library', 'folder' ]:
+                <div class="form-row">
+                    %if inheritable_checked:
+                        <% inheritable_check_box.checked = True %>
+                    %endif
+                    ${inheritable_check_box.get_html()}
+                    <label for="inheritable" style="display:inline;">Inherit template to contained folders and datasets?</label>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Check if you want this template to be used by other folders and datasets contained within this ${item_desc}
+                    </div>
+                </div>
+            %endif
+            <div class="form-row">
+                <input type="submit" name="add_template_button" value="Use this template"/>
+            </div>
+        </form>
+    </div>
+</div>
+<p/>
+%if form_id_select_field.get_selected( return_label=True, return_value=True ) != ('Select one', 'none'):
+    <div class="toolForm">
+        <div class="toolFormTitle">Layout of selected template</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                %for i, field in enumerate( widgets ):
+                    <div class="form-row">
+                        <label>${field[ 'label' ]}</label>
+                        ${field[ 'widget' ].get_html( disabled=True )}
+                        <div class="toolParamHelp" style="clear: both;">
+                            ${field[ 'helptext' ]}
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                %endfor 
+            </div>
+        </div>
+    </div>
+%endif
diff --git a/templates/common/template_common.mako b/templates/common/template_common.mako
new file mode 100644
index 0000000..1bd4c00
--- /dev/null
+++ b/templates/common/template_common.mako
@@ -0,0 +1,200 @@
+<%def name="render_template_field( field, render_as_hidden=False )">
+    <%
+        from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField
+
+        widget = field[ 'widget' ]
+        has_contents = False
+        label = field[ 'label' ]
+        value = ''
+        if isinstance( widget, TextArea ) and widget.value:
+            has_contents = True
+            if render_as_hidden:
+                value = widget.value
+            else:
+                value = '<pre>%s</pre>' % widget.value
+        elif isinstance( widget, TextField ) and widget.value:
+            has_contents = True
+            value = widget.value
+        elif isinstance( widget, SelectField ) and widget.options:
+            for option_label, option_value, selected in widget.options:
+                if selected:
+                    has_contents = True
+                    value = option_value
+        elif isinstance( widget, CheckboxField ) and widget.checked:
+            has_contents = True
+            if render_as_hidden:
+                value = 'true'
+            else:
+                value = 'checked'
+        elif isinstance( widget, WorkflowField ) and str( widget.value ).lower() not in [ 'none' ]:
+            has_contents = True
+            if render_as_hidden:
+                value = widget.value
+            else:
+                workflow_user = widget.user
+                if workflow_user:
+                    for workflow in workflow_user.stored_workflows:
+                        if not workflow.deleted and str( widget.value ) == str( workflow.id ):
+                            value = workflow.name
+                            break
+                else:
+                    # If we didn't find the selected workflow option above, we'll just print the value
+                    value = widget.value
+        elif isinstance( widget, WorkflowMappingField ) and str( widget.value ).lower() not in [ 'none' ]:
+            has_contents = True
+            if render_as_hidden:
+                value = widget.value
+            else:
+                workflow_user = widget.user
+                if workflow_user:
+                    for workflow in workflow_user.stored_workflows:
+                        if not workflow.deleted and str( widget.value ) == str( workflow.id ):
+                            value = workflow.name
+                            break
+                else:
+                    # If we didn't find the selected workflow option above, we'll just print the value
+                    value = widget.value
+        elif isinstance( widget, HistoryField ) and str( widget.value ).lower() not in [ 'none' ]:
+            has_contents = True
+            if render_as_hidden:
+                value = widget.value
+            else:
+                history_user = widget.user
+                if history_user:
+                    for history in history_user.histories:
+                        if not history.deleted and str( widget.value ) == str( history.id ):
+                            value = util.unicodify( history.name )
+                            break
+                else:
+                    # If we didn't find the selected workflow option above, we'll just print the value
+                    value = widget.value
+        elif isinstance( widget, AddressField ) and str( widget.value ).lower() not in [ 'none' ]:
+            has_contents = True
+            if render_as_hidden:
+                value = widget.value
+            else:
+                address = trans.sa_session.query( trans.model.UserAddress ).get( int( widget.value ) )
+                label = address.desc
+                value = address.get_html()
+    %>
+    %if has_contents:
+        % if render_as_hidden:
+            <input type="hidden" name="${widget.name}" value="${value}"/>
+        %else:
+            <div class="form-row">
+                <label>${label}</label>
+                ${value}
+                <div class="toolParamHelp" style="clear: both;">
+                    ${field[ 'helptext' ]}
+                </div>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+    %endif
+</%def>
+            
+<%def name="render_template_fields( cntrller, item_type, widgets, widget_fields_have_contents, request_type_id=None, sample_id=None, library_id=None, folder_id=None, ldda_id=None, info_association=None, inherited=False, editable=True )">
+    <%  
+        in_library = False
+        in_sample_tracking = False
+
+        if item_type == 'library':
+            item = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) )
+        elif item_type == 'folder':
+            item = trans.sa_session.query( trans.app.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) )
+        elif item_type == 'ldda':
+            item = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( trans.security.decode_id( ldda_id ) )
+        elif item_type == 'request_type':
+            item = trans.sa_session.query( trans.app.model.RequestType ).get( trans.security.decode_id( request_type_id ) )
+        elif item_type == 'sample':
+            item = trans.sa_session.query( trans.app.model.Sample ).get( trans.security.decode_id( sample_id ) )
+
+        if cntrller in [ 'library', 'library_admin' ]:
+            in_library = True
+            template_section_title = 'Other information'
+            form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+            if trans.user_is_admin() and cntrller == 'library_admin':
+                can_modify = True
+            elif cntrller == 'library':
+                can_modify = trans.app.security_agent.can_modify_library_item( trans.get_current_user_roles(), item )
+            else:
+                can_modify = False
+        elif cntrller in [ 'requests_admin', 'requests', 'request_type' ]:
+            in_sample_tracking = True
+            template_section_title = 'Run details'
+            form_type = trans.model.FormDefinition.types.RUN_DETAILS_TEMPLATE
+    %>
+    %if ( in_sample_tracking and editable ) or ( in_library and editable and can_modify ):
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">
+                <div class="menubutton popup" id="item-${item.id}-popup">${template_section_title}</div>
+                <div popupmenu="item-${item.id}-popup">
+                    %if in_library and info_association and inherited and can_modify:
+                        ## "inherited" will be true only if the info_association is not associated with the current item,
+                        ## which means that the currently display template has not yet been saved for the current item.
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}">Select a different template</a>
+                    %elif in_library and info_association and not inherited and can_modify:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}">Edit template</a>
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}">Unuse template</a>
+                        %if item_type not in [ 'ldda', 'library_dataset' ]:
+                            %if info_association.inheritable:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='manage_template_inheritance', cntrller=cntrller, item_type=item_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}">Dis-inherit template</a>
+                            %else:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='manage_template_inheritance', cntrller=cntrller, item_type=item_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}">Inherit template</a>
+                            %endif
+                        %endif
+                    %elif in_sample_tracking:
+                        <a class="action-button" href="${h.url_for( controller='request_type', action='add_template', cntrller=cntrller, item_type=item_type, form_type=form_type, request_type_id=request_type_id )}">Select a different template</a>
+                        <a class="action-button" href="${h.url_for( controller='request_type', action='edit_template', cntrller=cntrller, item_type=item_type, form_type=form_type, request_type_id=request_type_id )}">Edit template</a>
+                        <a class="action-button" href="${h.url_for( controller='request_type', action='delete_template', cntrller=cntrller, item_type=item_type, form_type=form_type, request_type_id=request_type_id )}">Unuse template</a>
+                    %endif
+                </div>
+            </div>
+            <div class="toolFormBody">
+                %if in_library and inherited:
+                    <div class="form-row">
+                        <font color="red">
+                            <b>
+                                This is an inherited template and is not required to be used with this ${item_type}.  You can 
+                                <a href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}"><font color="red">Select a different template</font></a>
+                                or fill in the desired fields and save this one.  This template will not be associated with this ${item_type} until you click the Save button.
+                            </b>
+                        </font>
+                    </div>
+                %endif
+                %if in_library:
+                    <form name="edit_info" id="edit_info" action="${h.url_for( controller='library_common', action='edit_template_info', cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id, folder_id=folder_id, ldda_id=ldda_id, show_deleted=show_deleted )}" method="post">
+                %elif in_sample_tracking:
+                    <form name="edit_info" id="edit_info" action="${h.url_for( controller='request_type', action='edit_template_info', cntrller=cntrller, item_type=item_type, form_type=form_type, request_type_id=request_type_id, sample_id=sample_id )}" method="post">
+                %endif
+                    %for i, field in enumerate( widgets ):
+                        <div class="form-row">
+                            <label>${field[ 'label' ]}</label>
+                            ${field[ 'widget' ].get_html()}
+                            <div class="toolParamHelp" style="clear: both;">
+                                ${field[ 'helptext' ]}
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    %endfor 
+                    <div class="form-row">
+                        <input type="submit" name="edit_info_button" value="Save"/>
+                    </div>
+                </form>
+            </div>
+        </div>
+        <p/>
+    %elif widget_fields_have_contents:
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">Other information about ${ util.unicodify( item.name )}</div>
+            <div class="toolFormBody">
+                %for i, field in enumerate( widgets ):
+                    ${render_template_field( field )}
+                %endfor
+            </div>
+        </div>
+        <p/>
+    %endif
+</%def>
diff --git a/templates/display_base.mako b/templates/display_base.mako
new file mode 100644
index 0000000..d80d31d
--- /dev/null
+++ b/templates/display_base.mako
@@ -0,0 +1,295 @@
+<%!
+    def inherit( context ):
+        if context.get('no_panels'):
+            return '/base.mako'
+        else:
+            return '/webapps/galaxy/base_panels.mako'
+
+    from galaxy.model import History, StoredWorkflow, Page
+    from galaxy.web.framework.helpers import iff
+%>
+<%inherit file="${inherit( context )}"/>
+<%namespace file="/tagging_common.mako" import="render_individual_tagging_element, render_community_tagging_element, community_tag_js" />
+<%namespace file="/display_common.mako" import="*" />
+
+##
+## Functions used by base.mako and base_panels.mako to display content.
+##
+
+<%def name="title()">
+    Galaxy | ${iff( item.published, "Published ", iff( item.importable , "Accessible ", iff( item.users_shared_with, "Shared ", "Private " ) ) ) + get_class_display_name( item.__class__ )} | ${get_item_name( item ) | h}
+</%def>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=True
+    self.message_box_visible=False
+    self.active_view="shared"
+    self.overlay_visible=False
+%>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js(
+        "libs/jquery/jstorage",
+        "libs/jquery/jquery.event.drag",
+        "libs/jquery/jquery.mousewheel",
+        "libs/farbtastic",
+        "libs/jquery/jquery.autocomplete",
+    )}
+    ${community_tag_js( get_controller_name( item ) )}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css(
+        "autocomplete_tagging",
+        "embed_item",
+        "jquery.rating",
+        "library",
+        "jquery-ui/smoothness/jquery-ui"
+    )}
+
+    <style type="text/css">
+        .page-body {
+            padding: 10px;
+            ## float: left;
+            ## width: 65%;
+        }
+        .page-meta {
+            float: right;
+            width: 27%;
+            padding: 0.5em;
+            margin: 0.25em;
+            vertical-align: text-top;
+            border: 2px solid #DDDDDD;
+            border-top: 4px solid #DDDDDD;
+        }
+
+        ## Make sure that workflow steps do not get too long.
+        .toolForm {
+            max-width: 500px;
+        }
+
+        ## Space out tool forms in workflows.
+        div.toolForm{
+            margin-top: 10px;
+            margin-bottom: 10px;
+        }
+
+    </style>
+</%def>
+
+<%def name="render_item_links( item )">
+    ## Override.
+</%def>
+
+<%def name="render_item_header( item )">
+    <h3>Galaxy ${get_class_display_name( item.__class__ )} '${get_item_name( item )| h}'</h3>
+    %if hasattr( item, "annotation") and item.annotation is not None:
+        <div class="annotation">Annotation: ${item.annotation}</div>
+    %endif
+    <hr/>
+</%def>
+
+<%def name="render_item( item, item_data=None )">
+    ## Override.
+</%def>
+
+## For base.mako
+<%def name="body()">
+    ${self.render_content()}
+</%def>
+
+## For base_panels.mako
+<%def name="center_panel()">
+    ${self.render_content()}
+</%def>
+
+
+##
+## Render page content. Pages that inherit this page should override render_item_links() and render_item()
+##
+<%def name="render_content()">
+
+    ## Get URL to other published items owned by user that owns this item.
+    <%
+        ##TODO: is there a better way to create this URL? Can't use 'f-username' as a key b/c it's not a valid identifier.
+        controller_name = get_controller_name( item )
+        item_plural = get_item_plural( item )
+        href_to_all_items = h.url_for( controller='/' + controller_name, action='list_published')
+        href_to_user_items = h.url_for( controller='/' + controller_name, action='list_published', xxx=item.user.username)
+        href_to_user_items = href_to_user_items.replace( 'xxx', 'f-username')
+    %>
+
+    <div class="unified-panel-header" unselectable="on" style="overflow: hidden">
+        <div class="unified-panel-header-inner">
+            <div style="float: right">
+                ${self.render_item_links( item )}
+            </div>
+            %if item.published:
+                    <a href="${href_to_all_items}">Published ${item_plural}</a> |
+                    <a href="${href_to_user_items}">${item.user.username}</a>
+            %elif item.importable:
+                Accessible ${get_class_display_name( item.__class__ )}
+            %elif item.users_shared_with:
+                Shared ${get_class_display_name( item.__class__ )}
+            %else:
+                Private ${get_class_display_name( item.__class__ )}
+            %endif
+            | ${get_item_name( item ) | h}
+        </div>
+    </div>
+
+    <div class="unified-panel-body">
+        <div style="overflow: auto; height: 100%;">
+            <div class="page-body">
+                <div>
+                    ${self.render_item_header( item )}
+                </div>
+
+                ${self.render_item( item, item_data )}
+            </div>
+
+
+        </div>
+    </div>
+</%def>
+
+<%def name="right_panel()">
+
+    <%
+        ## FIXME: duplicated from above for now
+        controller_name = get_controller_name( item )
+        item_plural = get_item_plural( item )
+        href_to_all_items = h.url_for( controller='/' + controller_name, action='list_published')
+        href_to_user_items = h.url_for( controller='/' + controller_name, action='list_published', xxx=item.user.username)
+        href_to_user_items = href_to_user_items.replace( 'xxx', 'f-username')
+    %>
+
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner">
+            About this ${get_class_display_name( item.__class__ )}
+        </div>
+    </div>
+
+    <div class="unified-panel-body">
+        <div style="overflow: auto; height: 100%;">
+            <div style="padding: 10px;">
+
+                <div style="float: right;"><img src="https://secure.gravatar.com/avatar/${h.md5(item.user.email)}?d=identicon"></div>
+
+                <h4>Author</h4>
+
+                <p>${item.user.username | h}</p>
+
+                ## Related items.
+                <h4>Related ${item_plural}</h4>
+                <p>
+                    <a href="${href_to_all_items}">All published ${item_plural.lower()}</a><br>
+                    <a href="${href_to_user_items}">Published ${item_plural.lower()} by ${item.user.username | h}</a>
+
+                ## Rating.
+                <h4>Rating</h4>
+
+                <%
+                    label = "ratings"
+                    if num_ratings == 1:
+                        label = "rating"
+                %>
+                <div style="padding-bottom: 0.75em; float: left">
+                    Community<br>
+                    <span style="font-size:80%">
+                        (<span id="num_ratings">${num_ratings}</span> ${label},
+                         <span id="ave_rating">${"%.1f" % ave_item_rating}</span> average)
+                    <span>
+                </div>
+                <div style="float: right">
+                    <input name="star1" type="radio" class="community_rating_star star" disabled="disabled" value="1"
+                    %if ave_item_rating > 0 and ave_item_rating <= 1.5:
+                        checked="checked"
+                    %endif
+
+                    />
+                    <input name="star1" type="radio" class="community_rating_star star" disabled="disabled" value="2"
+                    %if ave_item_rating > 1.5 and ave_item_rating <= 2.5:
+                        checked="checked"
+                    %endif
+                    />
+                    <input name="star1" type="radio" class="community_rating_star star" disabled="disabled" value="3"
+                    %if ave_item_rating > 2.5 and ave_item_rating <= 3.5:
+                        checked="checked"
+                    %endif
+                    />
+                    <input name="star1" type="radio" class="community_rating_star star" disabled="disabled" value="4"
+                    %if ave_item_rating > 3.5 and ave_item_rating <= 4.5:
+                        checked="checked"
+                    %endif
+                    />
+                    <input name="star1" type="radio" class="community_rating_star star" disabled="disabled" value="5"
+                    %if ave_item_rating > 4.5:
+                        checked="checked"
+                    %endif
+                    />
+                </div>
+                <div style="clear: both;"></div>
+                %if trans.get_user():
+                    <div style="float: left">
+                        Yours<br><span id="rating_feedback" style="font-size:80%; display: none">(thanks!)</span>
+                    </div>
+                    <div style="float: right">
+                        <input name="star2" type="radio" class="user_rating_star" value="1"
+                        %if user_item_rating == 1:
+                            checked="checked"
+                        %endif
+                        />
+                        <input name="star2" type="radio" class="user_rating_star" value="2"
+                        %if user_item_rating == 2:
+                            checked="checked"
+                        %endif
+                        />
+                        <input name="star2" type="radio" class="user_rating_star" value="3"
+                        %if user_item_rating == 3:
+                            checked="checked"
+                        %endif
+                        />
+                        <input name="star2" type="radio" class="user_rating_star" value="4"
+                        %if user_item_rating == 4:
+                            checked="checked"
+                        %endif
+                        />
+                        <input name="star2" type="radio" class="user_rating_star" value="5"
+                        %if user_item_rating == 5:
+                            checked="checked"
+                        %endif
+                        />
+                    </div>
+                %endif
+                <div style="clear: both;"></div>
+
+                ## Tags.
+                <h4>Tags</h4>
+                <p>
+                ## Community tags.
+                <div>
+                    Community:
+                    ${render_community_tagging_element( tagged_item=item, tag_click_fn='community_tag_click', use_toggle_link=False )}
+                    %if len ( item.tags ) == 0:
+                        none
+                    %endif
+                </div>
+                ## Individual tags.
+                %if trans.get_user():
+                    <p>
+                    <div>
+                        Yours:
+                        ${render_individual_tagging_element( user=trans.get_user(), tagged_item=item, elt_context='view.mako', use_toggle_link=False, tag_click_fn='community_tag_click' )}
+                    </div>
+                %endif
+            </div>
+        </div>
+    </div>
+
+</%def>
diff --git a/templates/display_common.mako b/templates/display_common.mako
new file mode 100644
index 0000000..d78633d
--- /dev/null
+++ b/templates/display_common.mako
@@ -0,0 +1,154 @@
+##
+## Utilities for sharing items and displaying shared items.
+## HACK: these should probably go in the web helper object.
+##
+## TODO: FIXME Cannot import model here, because grids are
+## used across webapps, and each webapp has its own model.
+
+<%! from galaxy import model %>
+
+## Get display name for a class.
+<%def name="get_class_display_name( a_class )">
+<%
+    ## Start with exceptions, end with default.
+    if a_class is model.StoredWorkflow:
+        return "Workflow"
+    else:
+        return a_class.__name__
+%>
+</%def>
+
+<%def name="get_item_name( item )">
+    <% 
+        # Start with exceptions, end with default.
+        if type( item ) is model.Page:
+            item_name = item.title
+        elif type( item ) is model.Visualization:
+            item_name = item.title
+        elif hasattr( item, 'get_display_name'):
+            item_name = item.get_display_name()
+        else:
+            item_name = item.name
+        
+        # Encode in unicode.
+        if type( item_name ) is str:
+            item_name = unicode( item_name, 'utf-8' )
+        return item_name    
+    %>
+</%def>
+
+## Get plural display name for a class.
+<%def name="get_class_plural_display_name( a_class )">
+<%
+    # Start with exceptions, end with default.
+    if a_class is model.History:
+        return "Histories"
+    elif a_class is model.FormDefinitionCurrent:
+        return "Forms"
+    else:
+        return get_class_display_name( a_class ) + "s"
+%>
+</%def>
+
+## Get display name for a class.
+<%def name="get_class_display_name( a_class )">
+<%
+    ## Start with exceptions, end with default.
+    if a_class is model.StoredWorkflow:
+        return "Workflow"
+    elif a_class is model.HistoryDatasetAssociation:
+        return "Dataset"
+    else:
+        return a_class.__name__
+%>
+</%def>
+
+## Get plural term for item.
+<%def name="get_item_plural( item )">
+    <% return get_class_plural( item.__class__ ) %>
+</%def>
+
+## Get plural term for class.
+<%def name="get_class_plural( a_class )">
+<%
+    if a_class == model.History:
+        class_plural = "Histories"
+    elif a_class == model.StoredWorkflow:
+        class_plural = "Workflows"
+    elif a_class == model.Page:
+        class_plural = "Pages"
+    elif a_class == model.Library:
+        class_plural = "Libraries"
+    elif a_class == model.HistoryDatasetAssociation:
+        class_plural = "Datasets"
+    elif a_class == model.SampleDataset:
+        class_plural = "Sample Datasets"
+    elif a_class == model.FormDefinitionCurrent:
+        class_plural = "Forms"
+    elif a_class == model.RequestType:
+        class_plural = "request types"
+    elif a_class == model.UserOpenID:
+        class_plural = "OpenIDs"
+    else:
+        class_plural = "items"
+    return class_plural
+%>
+</%def>
+
+## Returns the controller name for an item based on its class.
+<%def name="get_controller_name( item )">
+    <%
+        if isinstance( item, model.History ):
+            return "history"
+        elif isinstance( item, model.StoredWorkflow ):
+            return "workflow"
+        elif isinstance( item, model.HistoryDatasetAssociation ):
+            return "dataset"
+        elif isinstance( item, model.Page ):
+            return "page"
+        elif isinstance( item, model.Visualization ):
+            return "visualization"
+    %>
+</%def>
+
+## Returns item user/owner.
+<%def name="get_item_user( item )">
+    <%
+        # Exceptions first, default last.
+        if isinstance( item, model.HistoryDatasetAssociation ):
+            return item.history.user
+        else:
+            return item.user
+    %>
+</%def>
+
+## Returns item slug.
+<%def name="get_item_slug( item )">
+    <%
+        # Exceptions first, default last.
+        if isinstance( item, model.HistoryDatasetAssociation ):
+            return trans.security.encode_id( item.id )
+        else:
+            return item.slug
+    %>
+</%def>
+
+## Return a link to view a history.
+<%def name="get_history_link( history, qualify=False )">
+    %if history.slug and history.user.username:
+        <% return h.url_for( controller='/history', action='display_by_username_and_slug', username=history.user.username, slug=history.slug, qualified=qualify ) %>
+    %else:
+        <% return h.url_for( controller='/history', action='view', id=trans.security.encode_id( history.id ), qualified=qualify, use_panels=context.get('use_panels', True) ) %>
+    %endif
+</%def>
+
+## Render message.
+<%def name="render_message( message, status )">
+    %if message:
+        <p>
+            <div class="${status}message transient-message">${util.restore_text( message )}</div>
+            <div style="clear: both"></div>
+        </p>
+    %endif
+</%def>
+
diff --git a/templates/embed_base.mako b/templates/embed_base.mako
new file mode 100644
index 0000000..1e7c74b
--- /dev/null
+++ b/templates/embed_base.mako
@@ -0,0 +1,63 @@
+##
+## Base file for generating HTML for embedded objects.
+##
+## parameters: item, item_data
+##
+<%namespace file="/display_common.mako" import="*" />
+
+## HTML structure.
+<div class='embedded-item display ${get_class_display_name( item.__class__ ).lower()}'>
+    <div class='title'>
+        ${self.render_title( item )}
+    </div>
+    <div class='summary-content'>
+        ${self.render_summary_content( item, item_data )}
+    </div>
+    <div class='expanded-content'>
+        <div class='item-content'></div>
+    </div>
+</div>
+
+## Render item links.
+<%def name="render_item_links( item )">
+    <%
+        item_display_name = get_class_display_name( item.__class__ ).lower()
+        item_controller = "/%s" % get_controller_name( item )
+        item_user = get_item_user( item )
+        item_slug = get_item_slug( item )
+        display_href = h.url_for( controller=item_controller, action='display_by_username_and_slug', username=item_user.username, slug=item_slug )
+    %>
+    
+    ## Links for importing and viewing an item.
+    <a href="${h.url_for( controller=item_controller, action='imp', id=trans.security.encode_id( item.id ) )}" title="Import ${item_display_name}" class="icon-button import"></a>
+    <a class="icon-button go-to-full-screen" href="${display_href}" title="Go to ${item_display_name}"></a>
+</%def>
+
+<%def name="render_title( item )">
+    <%
+        item_display_name = get_class_display_name( item.__class__ ).lower()
+        item_controller = "/%s" % get_controller_name( item )
+        item_user = get_item_user( item )
+        item_slug = get_item_slug( item )
+        display_href = h.url_for( controller=item_controller, action='display_by_username_and_slug', username=item_user.username, slug=item_slug )
+    %>
+    <div style="float: left">
+        <a class="display_in_embed icon-button toggle-expand" item_id="${trans.security.encode_id( item.id )}" item_class="$item.__class__.__name__" href="${display_href}"
+            title="Show ${item_display_name} content"></a>
+        <a class="toggle icon-button" href="${display_href}" title="Hide ${item_display_name} content"></a>
+    </div>
+    <div style="float: right;">
+        ${self.render_item_links( item )}
+    </div>
+    <h4><a class="toggle-embed" href="${display_href}" title="Show or hide ${item_display_name} content">Galaxy ${get_class_display_name( item.__class__ )} | ${get_item_name( item ) | h}</a></h4>
+    %if hasattr( item, "annotation") and item.annotation:
+        <div class="annotation">${item.annotation | h}</div>
+    %endif
+    
+    ## Use a hidden var to store the ajax URL for getting an item's content.
+    <input type="hidden" name="ajax-item-content-url" value="${h.url_for( controller=item_controller, action='get_item_content_async', id=trans.security.encode_id( item.id ) )}"/>
+</%def>
+
+## Methods to override to render summary content.
+<%def name="render_summary_content( item, item_data )">
+</%def>
diff --git a/templates/export_base.mako b/templates/export_base.mako
new file mode 100644
index 0000000..69da012
--- /dev/null
+++ b/templates/export_base.mako
@@ -0,0 +1,122 @@
+##
+## Base template for exporting an item. Template expects the following parameters:
+## (a) item - item to be exported.
+##
+<%!
+    def inherit(context):
+        if context.get('use_panels', False) == True:
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%namespace file="./display_common.mako" import="*" />
+<%namespace file="/message.mako" import="render_msg" />
+
+##
+## Page methods.
+##
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.overlay_visible=False
+    self.message_box_class=""
+    self.active_view=""
+    self.body_class=""
+    
+    # Get class name strings.
+    self.item_class_name = get_class_display_name( item.__class__ ) 
+    self.item_class_name_lc = self.item_class_name.lower()
+    self.item_class_plural_name = get_class_plural_display_name( item.__class__ )
+    self.item_class_plural_name_lc = self.item_class_plural_name.lower()
+    self.controller = get_controller_name(item)
+%>
+</%def>
+
+<%def name="title()">
+    Export ${get_class_display_name( item.__class__ )} '${get_item_name( item ) | h}'
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        ## Put some whitespace before each section header.
+        h3 {
+            margin-top: 1.5em;
+        }
+        input.action-button {
+            margin-left: 0;
+        }
+        ## If page is displayed in panels, pad from edges for readability.
+        %if context.get('use_panels'):
+        div#center {
+            padding: 10px;
+        }
+        %endif
+        .display-url {
+            margin: 0.5em 0em 0.5em 0.5em;
+            font-weight: bold;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+    ${self.body()}
+</%def>
+
+<%def name="render_url_for_importing(item)">
+    <h3>URL for Importing to Another Galaxy</h3>
+    
+    %if item.importable:
+        Use this URL to import the ${get_class_display_name( item.__class__ ).lower()} directly into another Galaxy server: 
+        <div class="display-url">
+            ${h.url_for(controller=self.controller, action='display_by_username_and_slug', username=item.user.username,
+                        slug=item.slug, format='json', qualified=True )}
+        </div>
+        (Copy this URL into the box titled 'Workflow URL' in the Import Workflow page.)
+    %else:
+        <a href="${h.url_for(controller=self.controller, action='sharing', id=trans.security.encode_id( item.id ) )}">This  ${get_class_display_name( item.__class__ ).lower()} must be accessible before it can be imported into another Galaxy.</a>
+    %endif
+</%def>
+
+<%def name="render_download_to_file(item)">
+    <h3>Download to File</h3>
+    
+    <a href="${h.url_for( controller=self.controller, action='display_by_username_and_slug', username=item.user.username,
+                          slug=item.slug, format='json-download' )}">
+        Download ${get_class_display_name( item.__class__ ).lower()} to file so that it can be saved or imported into another Galaxy server.</a>
+</%def>
+
+<%def name="render_more(item)">
+    ## Override.
+</%def>
+
+<%def name="render_footer()">
+    <p><br><br>
+    <a href="${h.url_for(controller=self.controller, action="list" )}">Back to ${self.item_class_plural_name} List</a>
+</%def>
+
+<%def name="body()">
+    <%
+        item_name = get_item_name(item)
+    %>
+    <h2>Download or Export ${self.item_class_name} '${item_name | h}'</h2>
+    
+    ${self.render_download_to_file(item)}
+    
+    ${self.render_url_for_importing(item)}
+    
+    ${self.render_more(item)}
+    
+    ${self.render_footer()}
+</%def>
diff --git a/templates/form.mako b/templates/form.mako
new file mode 100644
index 0000000..be0eab3
--- /dev/null
+++ b/templates/form.mako
@@ -0,0 +1,112 @@
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+<% _=n_ %>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
+
+
+<%def name="title()">${form.title | h}</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.autocomplete")}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css("autocomplete_tagging")}
+</%def>
+
+<%def name="center_panel()">
+    ${render_form( )}
+</%def>
+
+<%def name="body()">
+    ${render_form( )}
+</%def>
+
+<%def name="render_form()">
+    %if header:
+        ${header}
+    %endif
+    
+    <div class="form" style="margin: 1em">
+        <div class="form-title">${util.unicodify( form.title ) | h }</div>
+        <div class="form-body">
+        <%
+            has_file_input = False
+            for input in form.inputs:
+                if input.type == 'file':
+                    has_file_input = True
+                    break
+        %>
+        <form name="${form.name | h }" action="${form.action}" method="post" 
+        %if has_file_input:
+             enctype="multipart/form-data"
+        %endif
+        >
+            %for input in form.inputs:
+                <%
+                cls = "form-row"
+                if input.error:
+                    cls += " form-row-error"
+                %>
+                <div class="${cls}">
+                %if input.use_label:
+                  <label>
+                      ${_(input.label) | h }:
+                  </label>
+                %endif
+                <div class="form-row-input">
+                    %if input.type == 'textarea':
+                        <textarea name="${input.name | h }">${input.value | h }</textarea>
+                    %elif input.type == 'select':
+                        <select name="${input.name | h}">
+                            %for (name, value) in input.options:
+                                <option value="${value | h }">${name | h }</option>
+                            %endfor
+                        </select>
+                    %else:
+                        <input type="${input.type}" name="${input.name | h }" value="${input.value | h }">
+                    %endif      
+                </div>
+                %if input.error:
+                    <div class="form-row-error-message">${input.error | h }</div>
+                %endif
+                %if input.help:
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${input.help | h}
+                    </div>
+                %endif
+                <div style="clear: both"></div>
+        </div>
+        %endfor
+        <div class="form-row"><input type="submit" value="${form.submit_text}"></div>
+        </form>
+        </div>
+    </div>
+</%def>
diff --git a/templates/galaxy_client_app.mako b/templates/galaxy_client_app.mako
new file mode 100644
index 0000000..c95b2e4
--- /dev/null
+++ b/templates/galaxy_client_app.mako
@@ -0,0 +1,115 @@
+<%def name="render_json( dictionary )">
+${ h.dumps( dictionary, indent=( 2 if trans.debug else 0 ) ) }
+</%def>
+
+## ============================================================================
+<%def name="bootstrap( **kwargs )">
+    ## 1) Bootstap all kwargs to json, assigning to:
+    ##      global 'bootstrapped' var
+    ##      named require module 'bootstrapped-data'
+    <script type="text/javascript">
+        //TODO: global...
+        %for key in kwargs:
+            ( window.bootstrapped = window.bootstrapped || {} )[ '${key}' ] = (
+                ${ render_json( kwargs[ key ] ) }
+            );
+        %endfor
+        define( 'bootstrapped-data', function(){
+            return window.bootstrapped;
+        });
+    </script>
+</%def>
+
+<%def name="load( app=None, **kwargs )">
+    ## 1) bootstrap kwargs (as above), 2) build Galaxy global var, 3) load 'app' by AMD (optional)
+    ${ self.bootstrap( **kwargs ) }
+    <script type="text/javascript">
+        require([ 'require', 'galaxy' ], function( require, galaxy ){
+            //TODO: global...
+            window.Galaxy = new galaxy.GalaxyApp({
+                root    : '${h.url_for( "/" )}',
+                config  : ${ render_json( get_config_dict() )},
+                user    : ${ render_json( get_user_dict() )},
+            }, window.bootstrapped );
+
+            %if app:
+                require([ '${app}' ]);
+            %endif
+        });
+    </script>
+</%def>
+
+
+## ----------------------------------------------------------------------------
+<%def name="get_config_dict()">
+    ## Return a dictionary of galaxy.ini settings
+    <%
+        config_dict = {}
+        try:
+            controller = trans.webapp.api_controllers.get( 'configuration', None )
+            if controller:
+                config_dict = controller.get_config_dict( trans, trans.user_is_admin() )
+        except Exception, exc:
+            pass
+        return config_dict
+    %>
+</%def>
+
+<%def name="get_config_json()">
+    ## Conv. fn to write as JSON
+${ h.dumps( get_config_dict() )}
+</%def>
+
+
+## ----------------------------------------------------------------------------
+<%def name="get_user_dict()">
+    ## Return a dictionary of user or anonymous user data including:
+    ##  email, id, disk space used, quota percent, and tags used
+    <%
+        from markupsafe import escape
+        user_dict = {}
+        try:
+            if trans.user:
+                user_dict = trans.user.to_dict( view='element',
+                    value_mapper={ 'id': trans.security.encode_id, 'total_disk_usage': float, 'email': escape, 'username': escape } )
+                user_dict[ 'quota_percent' ] = trans.app.quota_agent.get_percent( trans=trans )
+                user_dict[ 'is_admin' ] = trans.user_is_admin()
+
+                # tags used
+                users_api_controller = trans.webapp.api_controllers[ 'users' ]
+                tags_used = []
+                for tag in users_api_controller.get_user_tags_used( trans, user=trans.user ):
+                    tag = escape( tag )
+                    if tag:
+                        tags_used.append( tag )
+                user_dict[ 'tags_used' ] = tags_used
+
+                return user_dict
+
+            usage = 0
+            percent = None
+            try:
+                usage = trans.app.quota_agent.get_usage( trans, history=trans.history )
+                percent = trans.app.quota_agent.get_percent( trans=trans, usage=usage )
+            except AssertionError, assertion:
+                # no history for quota_agent.get_usage assertion
+                pass
+            return {
+                'total_disk_usage'      : int( usage ),
+                'nice_total_disk_usage' : util.nice_size( usage ),
+                'quota_percent'         : percent
+            }
+
+        except Exception, exc:
+            pass
+            #TODO: no logging available?
+            #log.exception( exc )
+
+        return user_dict
+    %>
+</%def>
+
+<%def name="get_user_json()">
+    ## Conv. fn to write as JSON
+${ h.dumps( get_user_dict() )}
+</%def>
diff --git a/templates/grid_base.mako b/templates/grid_base.mako
new file mode 100644
index 0000000..805e984
--- /dev/null
+++ b/templates/grid_base.mako
@@ -0,0 +1,264 @@
+<%!
+    from galaxy.web.framework.helpers.grids import TextColumn
+
+    def inherit(context):
+        kwargs = context.get( 'kwargs', {} )
+        if kwargs.get( 'embedded', False ):
+            # No inheritance - using only embeddable content (self.body)
+            return None
+        if context.get('use_panels'):
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+<%namespace file="/display_common.mako" import="get_class_plural" />
+
+##
+## Override methods from base.mako and base_panels.mako
+##
+
+<%def name="init( embedded=False, insert=None )">
+<%
+    self.has_left_panel         = False
+    self.has_right_panel        = False
+    self.message_box_visible    = False
+    self.overlay_visible        = False
+    self.active_view            = 'user'
+%>
+</%def>
+
+## render title
+<%def name="title()">${grid.title}</%def>
+
+## render in center panel
+<%def name="center_panel()">
+    ${self.load()}
+</%def>
+
+## render in body
+<%def name="body()">
+    ${self.load()}
+</%def>
+
+## creates grid
+<%def name="load( embedded=False, insert=None )">
+    <!-- grid_base.mako -->
+    ## imports
+    ${h.css(
+        "autocomplete_tagging",
+        "jquery.rating"
+    )}
+    ${h.js(
+        "libs/jquery/jquery.autocomplete",
+    )}
+
+    ## grid container
+    <div id="grid-container"></div>
+
+    ## load javascript
+    <script type="text/javascript">
+        var gridView = null;
+        function add_tag_to_grid_filter( tag_name, tag_value ){
+            // Put tag name and value together.
+            var tag = tag_name + ( tag_value !== undefined && tag_value !== "" ? ":" + tag_value : "" );
+            var advanced_search = $( '#advanced-search').is(":visible" );
+            if( !advanced_search ){
+                $('#standard-search').slideToggle('fast');
+                $('#advanced-search').slideToggle('fast');
+            }
+            gridView.add_filter_condition( "tags", tag );
+        };
+
+        // load grid viewer
+        require(['mvc/grid/grid-view'], function(GridView) {
+            $(function() {
+                gridView = new GridView( ${ h.dumps( self.get_grid_config( embedded=embedded, insert=insert ) ) } );
+            });
+        });
+    </script>
+</%def>
+
+<%def name="get_grid_config( embedded=False, insert=None )">
+## generates dictionary
+<%
+    item_class = grid.model_class.__name__
+    self.grid_config = {
+        'title'                         : grid.title,
+        'url_base'                      : trans.request.path_url,
+        'async'                         : grid.use_async,
+        'async_ops'                     : [],
+        'categorical_filters'           : {},
+        'filters'                       : cur_filter_dict,
+        'sort_key'                      : sort_key,
+        'show_item_checkboxes'          : context.get('show_item_checkboxes', False),
+        'cur_page_num'                  : cur_page_num,
+        'num_pages'                     : num_pages,
+        'num_page_links'                : num_page_links,
+        'history_tag_autocomplete_url'  : url( controller='tag', action='tag_autocomplete_data', item_class=item_class ),
+        ## 'history_name_autocomplete_url' : url( controller='history', action='name_autocomplete_data' ),
+        'status'                        : status,
+        'message'                       : util.restore_text(message),
+        'global_actions'                : [],
+        'operations'                    : [],
+        'items'                         : [],
+        'columns'                       : [],
+        'get_class_plural'              : get_class_plural( grid.model_class ).lower(),
+        'use_paging'                    : grid.use_paging,
+        'legend'                        : grid.legend,
+        'current_item_id'               : False,
+        'use_panels'                    : context.get('use_panels'),
+        'use_hide_message'              : grid.use_hide_message,
+        'insert'                        : insert,
+        'default_filter_dict'           : default_filter_dict,
+        'advanced_search'               : advanced_search,
+        'refresh_frames'                : [],
+        'embedded'                      : embedded,
+        'info_text'                     : grid.info_text,
+        'url'                           : url(dict())
+    }
+
+    ## add refresh frames
+    if refresh_frames:
+        self.grid_config['refresh_frames'] = refresh_frames
+
+    ## add current item if exists
+    if current_item:
+        self.grid_config['current_item_id'] = current_item.id
+    endif
+
+    ## column
+    for column in grid.columns:
+
+        ## add column sort links
+        href = None
+        extra = ''
+        if column.sortable:
+            if sort_key.endswith(column.key):
+                if not sort_key.startswith("-"):
+                    href = url( sort=( "-" + column.key ) )
+                    extra = "↓"
+                else:
+                    href = url( sort=( column.key ) )
+                    extra = "↑"
+            else:
+                href = url( sort=column.key )
+
+        ## add to configuration
+        self.grid_config['columns'].append({
+            'key'               : column.key,
+            'visible'           : column.visible,
+            'nowrap'            : column.nowrap,
+            'attach_popup'      : column.attach_popup,
+            'label_id_prefix'   : column.label_id_prefix,
+            'sortable'          : column.sortable,
+            'label'             : column.label,
+            'filterable'        : column.filterable,
+            'is_text'           : isinstance(column, TextColumn),
+            'href'              : href,
+            'extra'             : extra
+        })
+    endfor
+
+    ## operations
+    for operation in grid.operations:
+        self.grid_config['operations'].append({
+            'allow_multiple'        : operation.allow_multiple,
+            'allow_popup'           : operation.allow_popup,
+            'target'                : operation.target,
+            'label'                 : operation.label,
+            'confirm'               : operation.confirm,
+            'inbound'               : operation.inbound,
+            'global_operation'      : False
+        })
+        if operation.allow_multiple:
+            self.grid_config['show_item_checkboxes'] = True
+
+        if operation.global_operation:
+            self.grid_config['global_operation'] = url( ** (operation.global_operation()) )
+    endfor
+
+    ## global actions
+    for action in grid.global_actions:
+        self.grid_config['global_actions'].append({
+            'url_args'  : url(**action.url_args),
+            'label'     : action.label,
+            'inbound'   : action.inbound
+        })
+    endfor
+
+    ## Operations that are async (AJAX) compatible.
+    for operation in [op for op in grid.operations if op.async_compatible]:
+        self.grid_config['async_ops'].append(operation.label.lower());
+    endfor
+
+    ## Filter values for categorical filters.
+    for column in grid.columns:
+        if column.filterable is not None and not isinstance( column, TextColumn ):
+            self.grid_config['categorical_filters'][column.key] = dict([ (filter.label, filter.args) for filter in column.get_accepted_filters() ])
+        endif
+    endfor
+
+    # items
+    for i, item in enumerate( query ):
+        item_dict = {
+            'id'                    : item.id,
+            'encode_id'             : trans.security.encode_id(item.id),
+            'link'                  : [],
+            'operation_config'      : {},
+            'column_config'         : {}
+        }
+
+        ## data columns
+        for column in grid.columns:
+            if column.visible:
+                ## get link
+                link = column.get_link(trans, grid, item)
+                if link:
+                    link = url(**link)
+                else:
+                    link = None
+                endif
+
+                ## inbound
+                inbound = column.inbound
+
+                ## get value
+                value = column.get_value( trans, grid, item )
+
+                # Handle non-ascii chars.
+                if isinstance(value, str):
+                    value = unicode(value, 'utf-8')
+                    value = value.replace('/', '//')
+                endif
+
+                ## Item dictionary
+                item_dict['column_config'][column.label] = {
+                    'link'      : link,
+                    'value'     : value,
+                    'inbound'   : inbound
+                }
+            endif
+        endfor
+        ## add operation details to item
+        for operation in grid.operations:
+            item_dict['operation_config'][operation.label] = {
+                'allowed'   : operation.allowed(item),
+                'url_args'  : url( **operation.get_url_args( item ) )
+            }
+        endfor
+
+        ## add item to list
+        self.grid_config['items'].append(item_dict)
+    endfor
+
+    return self.grid_config
+%>
+</%def>
+
diff --git a/templates/grid_base_async.mako b/templates/grid_base_async.mako
new file mode 100644
index 0000000..62f5b52
--- /dev/null
+++ b/templates/grid_base_async.mako
@@ -0,0 +1,4 @@
+<%namespace name="grid_base" file="./grid_base.mako" import="*" />
+
+${init()}
+${h.dumps( grid_base.get_grid_config() )}
diff --git a/templates/ind_share_base.mako b/templates/ind_share_base.mako
new file mode 100644
index 0000000..85bbc31
--- /dev/null
+++ b/templates/ind_share_base.mako
@@ -0,0 +1,195 @@
+##
+## Base template for sharing an item with an individual user. Template expects the following parameters:
+## (a) item - item to be shared.
+##
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%namespace file="./display_common.mako" import="*" />
+
+##
+## Page methods.
+##
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.overlay_visible=False
+    self.message_box_class=""
+    self.active_view=""
+    self.body_class=""
+%>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        ## If page is displayed in panels, pad from edges for readabilit.
+        %if context.get('use_panels'):
+        div#center
+        {
+            padding: 10px;
+        }
+        %endif
+    </style>
+</%def>
+
+    
+<%def name="center_panel()">
+    ${self.body()}
+</%def>
+
+<%def name="body()">
+    %if message:
+    <%
+    if messagetype is UNDEFINED:
+        mt = "done"
+    else:
+        mt = messagetype
+    %>
+    <p />
+    <div class="${mt}message">
+        ${message}
+    </div>
+    <p />
+    %endif
+    
+    <%
+        #
+        # Setup and variables needed for page.
+        #
+    
+        # Get class name strings.
+        item_class_name = get_class_display_name( item.__class__ ) 
+        item_class_name_lc = item_class_name.lower()
+        item_class_plural_name = get_class_plural_display_name( item.__class__ )
+        item_class_plural_name_lc = item_class_plural_name.lower()
+        item_controller = get_controller_name(item)
+        
+        # Get item name.
+        item_name = get_item_name(item)
+    %>
+    
+    <div class="toolForm">
+        <div class="toolFormTitle">Share ${item_class_name} '${item_name | h}' with Another User</div>
+            <div class="toolFormBody">
+                <form action="${h.url_for(controller=item_controller, action='share', id=trans.security.encode_id( item.id ) )}" method="POST">
+                    <div class="form-row">
+                        <label>
+                            Email address of user to share with
+                        </label>
+                        <div style="float: left; width: 100%;  margin-right: 10px;">
+                            <input type="hidden" id="email_select" name="email" >
+                            </input>
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    <div class="form-row">
+                        <input type="submit" value="Share"></input>
+                    </div>
+                    <div class="form-row">
+                        <a href="${h.url_for(controller=item_controller, action="sharing", id=trans.security.encode_id( item.id ) )}">Back to ${item_class_name}'s Sharing Home</a>
+                    </div>
+                </form>
+            </div>
+        </div>
+    </div>
+
+    <script type="text/javascript">
+    /*  This should be ripped out and made generic at some point for the
+     *  various API bindings available, and once the API can filter list
+     *  queries (term, below) */
+
+    var user_id = "${trans.security.encode_id(trans.user.id)}";
+
+    function item_to_label(item){
+        var text = "";
+        if(typeof(item.username) === "string" && typeof(item.email) === "string"){
+            text = item.username + " <" + item.email + ">";
+        }else if(typeof(item.username) === "string"){
+            text = item.username;
+        }else{
+            text = item.email;
+        }
+        return text;
+        //return "id:" + item.id + "|e:" + item.email + "|u:" + item.username;
+    }
+
+    $("#email_select").select2({
+        placeholder: "Select a user",
+	width: "33%",
+        multiple: false,
+        initSelection: function(element, callback) {
+            var data = [];
+            callback(data);
+        },
+        // Required for initSelection
+        id: function(object) {
+            return object.id;
+        },
+        ajax: {
+            url: "${h.url_for(controller="/api/users", action="index")}",
+            data: function (term) {
+                return {
+                    f_any: term,
+                };
+            },
+            dataType: 'json',
+            quietMillis: 250,
+            results: function (data) {
+                var results = [];
+                // For every user returned by the API call,
+                $.each(data, function(index, item){
+                    // If they aren't the requesting user, add to the
+                    // list that will populate the select
+                    if(item.id != "${trans.security.encode_id(trans.user.id)}"){
+                        // Because we "share-by-email", we can ONLY add a
+                        // result if we can see the email. Hopefully someday
+                        // someone will allow sharing by Galaxy user ID (or
+                        // something else "opaque" and people will be able to
+                        // share-by-username.)
+                        if(item.email !== undefined){
+                            results.push({
+                              id: item.email,
+                              name: item.username,
+                              text: item_to_label(item),
+                            });
+                        }
+                    }
+                });
+                return {
+                    results: results
+                };
+            }
+        },
+        createSearchChoice: function(term, data) {
+            // Check for a user with a matching email.
+            var matches = _.filter(data, function(user){
+                return user.text.indexOf(term) > -1;
+            });
+            // If there aren't any users with matching object labels, then
+            // display a "default" entry with whatever text they're entering.
+            // id is set to term as that will be used in 
+            if(matches.length == 0){
+                return {id: term, text:term};
+            }else{
+                // No extra needed
+            }
+        }
+    });
+    </script>
+</%def>
diff --git a/templates/js-app.mako b/templates/js-app.mako
new file mode 100644
index 0000000..62f440e
--- /dev/null
+++ b/templates/js-app.mako
@@ -0,0 +1,114 @@
+
+<!DOCTYPE HTML>
+<html>
+    <!--js-app.mako-->
+    <head>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        ## For mobile browsers, don't scale up
+        <meta name="viewport" content="maximum-scale=1.0">
+        ## Force IE to standards mode, and prefer Google Chrome Frame if the user has already installed it
+        <meta http-equiv="X-UA-Compatible" content="IE=Edge,chrome=1">
+
+        <title>Galaxy</title>
+        ## relative href for site root
+        <link rel="index" href="${ h.url_for( '/' ) }"/>
+        ## TODO: use loaders to move everything but the essentials below the fold
+        ${ h.css(
+            'jquery.rating',
+            'jquery-ui/smoothness/jquery-ui',
+            ## base needs to come after jquery-ui because of ui-button, ui- etc. name collision
+            'base',
+            'bootstrap-tour',
+        )}
+        ${ page_setup() }
+    </head>
+
+    <body scroll="no" class="full-content">
+        <div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%;">
+            ## TODO: needed?
+            <div id="background"></div>
+
+            %if masthead:
+            <div id="masthead" class="navbar navbar-fixed-top navbar-inverse"></div>
+            ## a div below the masthead to show server messages set in galaxy.ini
+            <div id="messagebox" style="display: none;"></div>
+            ## a message displayed when the user has been inactive and needs to reactivate their account
+            <div id="inactivebox" class="panel-warning-message" style="display: none;"></div>
+            %endif
+
+        </div><!--end everything-->
+        <div id='dd-helper' style="display: none;"></div>
+        ${ js_disabled_warning() }
+
+        ## js libraries and bundled js app
+        ${ h.js(
+            'bundled/libs.bundled',
+            'bundled/' + js_app_name + '.bundled'
+        )}
+        <script type="text/javascript">
+            ${js_app_entry_fn}(
+                ${ h.dumps( options ) },
+                ${ h.dumps( bootstrapped ) }
+            );
+        </script>
+    </body>
+</html>
+
+## ============================================================================
+<%def name="page_setup()">
+    ## Send js errors to Sentry server if configured
+    %if app.config.sentry_dsn:
+    ${h.js( "libs/raven" )}
+    <script>
+        Raven.config('${app.config.sentry_dsn_public}').install();
+        %if trans.user:
+            Raven.setUser( { email: "${trans.user.email|h}" } );
+        %endif
+    </script>
+    %endif
+
+    <script type="text/javascript">
+        // this is needed *before* the app code is loaded - many MVC access Galaxy.root for their url
+        // TODO: change this by using a common Backbone.Model base class and url fn
+        window.Galaxy = { root: '${ options[ "root" ] }' };
+    </script>
+
+    %if not form_input_auto_focus is UNDEFINED and form_input_auto_focus:
+    <script type="text/javascript">
+        $(document).ready( function() {
+            // Auto Focus on first item on form
+            if ( $("*:focus").html() == null ) {
+                $(":input:not([type=hidden]):visible:enabled:first").focus();
+            }
+        });
+    </script>
+    %endif
+
+    ## google analytics
+    %if app.config.ga_code:
+    <script type="text/javascript">
+        (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+        (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+        m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+        })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+        ga('create', '${app.config.ga_code}', 'auto');
+        ga('send', 'pageview');
+    </script>
+    %endif
+
+</%def>
+
+## ============================================================================
+<%def name="js_disabled_warning()">
+    <noscript>
+        <div class="overlay overlay-background noscript-overlay">
+            <div>
+                <h3 class="title">Javascript Required for Galaxy</h3>
+                <div>
+                    The Galaxy analysis interface requires a browser with Javascript enabled.<br>
+                    Please enable Javascript and refresh this page.
+                </div>
+            </div>
+        </div>
+    </noscript>
+</%def>
diff --git a/templates/message.mako b/templates/message.mako
new file mode 100644
index 0000000..a47b02c
--- /dev/null
+++ b/templates/message.mako
@@ -0,0 +1,62 @@
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%namespace file="/refresh_frames.mako" import="handle_refresh_frames" />
+
+<% _=n_ %>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${handle_refresh_frames()}
+    <script type="text/javascript">
+        if ( parent.handle_minwidth_hint )
+        {
+            parent.handle_minwidth_hint( -1 );
+        }
+    </script>
+</%def>
+
+##
+## Override methods from base.mako and base_panels.mako
+##
+
+<%def name="center_panel()">
+    ${render_large_message( message, status )}
+</%def>
+
+<%def name="body()">
+    ${render_large_message( message, status )}
+</%def>
+
+## Render large message.
+<%def name="render_large_message( message, status )">
+    <div class="${status}messagelarge" style="margin: 1em">${_(message)}</div>
+</%def>
+
+## Render a message
+<%def name="render_msg( msg, status='done' )">
+    <div class="${status}message">${_(msg)}</div>
+    <br/>
+</%def>
+
diff --git a/templates/no_access.mako b/templates/no_access.mako
new file mode 100644
index 0000000..031cafb
--- /dev/null
+++ b/templates/no_access.mako
@@ -0,0 +1,15 @@
+<%inherit file="/base.mako"/>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        if ( parent.force_left_panel ) {
+            parent.force_left_panel( 'hide' );
+        }
+        if ( parent.force_right_panel ) {
+            parent.force_right_panel( 'hide' );
+        }
+    </script>
+</%def>
+
+<div class="errormessage">${message}</div>
diff --git a/templates/page_base.mako b/templates/page_base.mako
new file mode 100644
index 0000000..ca46548
--- /dev/null
+++ b/templates/page_base.mako
@@ -0,0 +1,199 @@
+<%doc>
+    This file defines methods for displaying information about pagination
+</%doc>
+
+<%def name="get_page_url( sort_id, order, *args, **kwargs )">
+        <a href="${h.url_for( controller=args[0], action=args[1], sort_id=sort_id, order=order, **kwargs )}">${kwargs.get("page")}</a>
+</%def>
+
+<%!
+    def get_raw_url(sort_id, order, *args, **kwargs):
+        return h.url_for( controller=args[0], action=args[1], sort_id=sort_id, order=order, **kwargs )
+%>
+
+<%def name="get_pages( sort_id, order, page_specs, *args, **kwargs )">
+    ## Creates the page buttons
+    ${get_page_script()}
+
+    <div id="page_selector">
+        <div id="back_button">&#x219e;</div>
+        %for x in range(-2,3):
+            <% 
+               page = int(page_specs.page) + x
+               pages_found = int(page_specs.pages_found)
+            %>
+            %if page > 0:
+                %if x == 0:
+                    <div id="curr_button">${page}</div>
+                %elif page < page_specs.page + pages_found:
+                    <%
+                       entries = page_specs.entries
+                       offset = page_specs.entries * (page - 1)
+                    %>
+                    %if x == -2 and page > 1:
+                        <div class="miss_pages">...</div>
+                    %endif
+                    <div class="page_button">${get_page_url( sort_id, order, *args, page=page, offset=offset, entries=entries, **kwargs )}</div>
+                    %if x == 2 and pages_found == 4:
+                        <div class="miss_pages">...</div>
+                    %endif
+                %endif
+            %endif
+        %endfor
+        <div id="next_button">&#x21a0;</div>
+    </div>
+</%def>
+
+<%def name="get_entry_selector(controller, action, entries, sort_id, order)">
+    <div id="entry_form" >
+        <form method="post" controller=${controller} action=${action}>
+            <input type="hidden" value=${sort_id} name="sort_id">
+            <input type="hidden" value=${order} name="order">
+            %try:
+                %if spark_limit:
+                    <input type="hidden" value=${spark_limit} name="spark_limit">
+                %endif
+            %except NameError:
+            %endtry
+            Max items:
+            <input id="entries_edit"
+                   type="text"
+                   name="entries"
+                   value="${entries}">
+            </input>
+            <button id="entry_submit">Go</button>
+        </form>
+    </div>
+</%def>
+
+<%def name="get_page_script()">
+    <script>
+        $(document).ready( function(e) {
+            var drop_down = false;
+            
+            //Close the dropdown in the event of focusout() from entries edit
+            $("#entries_edit").focusout( function(e) {
+                var speed = 50;
+                
+                $("#entry_submit").css("cursor", "default");
+                $("#entry_submit").fadeTo(speed, 0.0);
+                $(".st4").animate({top: "-=18px"}, {duration: speed, queue: false, complete: function() {
+                    $(".st3").animate({top: "-=18px"}, {duration: speed, queue: false, complete: function() {
+                        $(".st2").animate({top: "-=18px"}, {duration: speed, queue: false, complete: function() {
+                            $(".st1").animate({top: "-=18px"}, {duration: speed, queue: false, complete: function() {
+                                $(".st1").remove()
+                            }});
+                        }});
+                    }});
+                }});
+                
+                drop_down = false;
+            });
+            
+            //Make sure the elements stay correctly positioned
+            $("#formHeader").css("margin-left", $(".colored").css("margin-left"));
+            $("#formHeader").css("width", $(".colored").css("width"));
+            $(window).resize( function(e) {
+                $("#formHeader").css("margin-left", $(".colored").css("margin-left"));
+                $("#formHeader").css("width", $(".colored").css("width"));
+                
+                //Remove drop down for entry amount selection
+                $(".st1").remove();
+                $("#entry_submit").css("cursor", "default");
+                $("#entry_submit").css("opacity", "0.0");
+                $("#entry_submit").blur();
+                
+                drop_down = false;
+            });
+            
+            //If there are pages to go back to, go back
+            if( $("#curr_button").html() == 1) {
+                $("#back_button").css( "cursor", "default" );
+                $("#back_button").css( "color", "grey" );
+            }
+            $("#back_button").click( function(e) {
+                if( $("#curr_button").html() != 1) {
+                    window.open( $(".page_button:first").children().attr("href"), "_self" );
+                }
+            });
+            
+            //If there is a next page, go to the next page
+            if( ${int(page_specs.pages_found)} == 1 ) {
+                $("#next_button").css( "cursor", "default" );
+                $("#next_button").css( "color", "grey" );
+            }
+            $("#next_button").click( function(e) {
+                if( ${int(page_specs.pages_found)} > 1 ) {
+                    window.open( $(".page_button:last").children().attr("href"), "_self" );
+                }
+            });
+            
+            //Select amount of entries per page
+            $("#entry_form").on( "mousedown", ".st1", function(e) {
+                e.preventDefault();
+                $("#entries_edit").val( $(this).html() );
+            });
+            
+            $("#entry_form").on("mouseenter", ".st1", function(e) {
+                    $(this).css({
+                        "border-color": "black",
+                        "background-color": "#ebd9b2",
+                    })
+            });
+            
+            $("#entry_form").on("mouseleave", ".st1", function(e) {
+                $(this).css({
+                    "border-color": "grey",
+                    "background-color": "white",
+                })
+            });
+            
+            $("#entries_edit").click( function(e) {
+                if(!drop_down) {
+                    //Initialize items
+                    $("#entries_edit").parent().append("<div class=\"st1\"\">10</div>");
+                    $("#entries_edit").parent().append("<div class=\"st1 st2\">25</div>");
+                    $("#entries_edit").parent().append("<div class=\"st1 st2 st3\">50</div>");
+                    $("#entries_edit").parent().append("<div class=\"st1 st2 st3 st4\">100</div>");
+                    
+                    $("#entry_submit").css("cursor", "pointer");
+                    
+                    var top_pos = $("#entries_edit").offset().top;
+                    var left_pos = $("#entries_edit").offset().left;
+                    $(".st1").css({
+                        "cursor": "pointer",
+                        "position": "absolute",
+                        "text-align": "center",
+                        "border": "1px solid grey",
+                        "background-color": "white",
+                        "margin-left": "3px",
+                        "top": top_pos,
+                        "left": left_pos,
+                        "width": "30px",
+                        "z-index": "4",
+                    });
+                    $(".st1").css({
+                        "top": $("#entries_edit").offset().top,
+                    });
+                    $(".st2").css({"z-index": "3"})
+                    $(".st3").css({"z-index": "2"})
+                    $(".st4").css({
+                        "z-index": "1",
+                        "border-bottom-left-radius": "3px",
+                        "border-bottom-right-radius": "3px",
+                    });
+                
+                    //Anitmate items
+                    var speed = 50;
+                    $("#entry_submit").fadeTo(speed, 1.0);
+                    $(".st1").animate({top: "+=18px"}, speed);
+                    $(".st2").animate({top: "+=18px"}, speed);
+                    $(".st3").animate({top: "+=18px"}, speed);
+                    $(".st4").animate({top: "+=18px"}, speed);
+                }
+                
+                drop_down = true;
+            });
+        });
+    </script>
+</%def>
diff --git a/templates/panels.mako b/templates/panels.mako
new file mode 100644
index 0000000..29d1e02
--- /dev/null
+++ b/templates/panels.mako
@@ -0,0 +1,18 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+    <div style="overflow: auto; height: 100%;">
+        <div style="padding: 10px">
+            ${grid}
+        </div>
+    </div>
+</%def>
diff --git a/templates/refresh_frames.mako b/templates/refresh_frames.mako
new file mode 100644
index 0000000..7ec18e8
--- /dev/null
+++ b/templates/refresh_frames.mako
@@ -0,0 +1,61 @@
+## Include JavaScript code to refresh Galaxy application frames as needed.
+<%def name="handle_refresh_frames()">
+    ## If no refresh frames, print nothing.
+    <%
+        if not refresh_frames: return ''
+    %>
+
+    ## Write JavaScript to refresh specified frames.
+    <script type="text/javascript">
+        function user_changed( user_email, is_admin ) {
+            if ( user_email ) {
+                $(".loggedin-only").show();
+                $(".loggedout-only").hide();
+                $("#user-email").text( user_email );
+                if ( is_admin ) {
+                    $(".admin-only").show();
+                }
+            } else {
+                $(".loggedin-only").hide();
+                $(".loggedout-only").show();
+                $(".admin-only").hide();
+            }
+        }
+
+    %if 'everything' in refresh_frames:
+        parent.location.href="${h.url_for( controller='root' )}";
+    %endif
+    %if 'masthead' in refresh_frames:
+        ## if ( parent.frames && parent.frames.galaxy_masthead ) {
+        ##     parent.frames.galaxy_masthead.location.href="${h.url_for( controller='root', action='masthead')}";
+        ## }
+        ## else if ( parent.parent && parent.parent.frames && parent.parent.frames.galaxy_masthead ) {
+        ##     parent.parent.frames.galaxy_masthead.location.href="${h.url_for( controller='root', action='masthead')}";
+        ## }
+
+        ## Refresh masthead == user changes (backward compatibility)
+        if ( parent.user_changed ) {
+            %if trans.user:
+                parent.user_changed( "${trans.user.email | h }", ${int( app.config.is_admin_user( trans.user ) )} );
+            %else:
+                parent.user_changed( null, false );
+            %endif
+        }
+    %endif
+    %if 'history' in refresh_frames:
+        if( top.Galaxy && top.Galaxy.currHistoryPanel ){
+            top.Galaxy.currHistoryPanel.loadCurrentHistory();
+        }
+    %endif
+    %if 'tools' in refresh_frames:
+        if ( parent.frames && Galaxy.toolPanel ) {
+            // FIXME: refreshing the tool menu does not work with new JS-based approach,
+            // but refreshing the tool menu is not used right now, either.
+
+            if ( parent.force_left_panel ) {
+                parent.force_left_panel( 'show' );
+            }
+        }
+    %endif
+    </script>
+</%def>
diff --git a/templates/rss.mako b/templates/rss.mako
new file mode 100644
index 0000000..d516855
--- /dev/null
+++ b/templates/rss.mako
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<rss version="2.0">
+    <channel>
+        <title>${title}</title>
+        <link>${link}</link>
+        <pubDate>${pubdate}</pubDate>
+        <description>${description}</description>
+        <language>en-US</language>
+        <ttl>60</ttl>
+        <docs>http://cyber.law.harvard.edu/rss/rss.html</docs>
+        %for item in items:
+            <item>
+                <pubDate>${item['pubdate']}</pubDate>
+                <title>${item['title']}</title>
+                <link>${item['link']}</link>
+                <guid>${item['guid']}</guid>
+                <description>
+                    ${item['description']}
+                </description>
+            </item>
+        %endfor
+    </channel>
+</rss>
diff --git a/templates/search/index.mako b/templates/search/index.mako
new file mode 100644
index 0000000..a921c95
--- /dev/null
+++ b/templates/search/index.mako
@@ -0,0 +1,133 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+    <%
+        self.has_left_panel=False
+        self.has_right_panel=False
+        self.message_box_visible=False
+        self.active_view="shared"
+        self.overlay_visible=False
+    %>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+    div.historyItem {
+        margin: 0px -5px;
+        padding: 8px 10px;
+        border-top: solid #999 1px;
+        border-right: none;
+        word-wrap: break-word;
+        background: #EEE;
+    }
+    div.historyItem .state-icon {
+        display: inline-block;
+        vertical-align: middle;
+        width: 16px;
+        height: 16px;
+        background-position: 0 1px;
+        background-repeat: no-repeat;
+    }
+    div.historyItem .historyItemTitle {
+        font-weight: bold;
+        line-height: 16px;
+    }
+
+    .searchResult {
+        border-style:dashed;
+        border-width:1px;
+        margin: 5px;
+    }
+    </style>
+</%def>
+
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js(
+        "libs/jquery/jquery",
+    )}
+    <script type="text/javascript">
+
+        function search_format_output(doc) {
+            var div_class = "historyItem";
+            var a = $("<div class='" + div_class + "'>")
+            a.append($("<div>").append(doc['model_class']));
+            b = a.append( $("<div class='historyItemTitle'><a href='/file/" + doc['id'] + "'>" + doc['name'] + "</a></div>") );
+            if ('misc_blurb' in doc) {
+                b.append( $("<div>").append(doc["misc_blurb"]) );
+            }
+            if ('peek' in doc) {
+                b.append( $("<pre class='peek'>").append( doc["peek"]) );
+            }
+            return a;
+        }
+
+        function doSearch(query) {
+            if (query.length > 1) {
+                var url = "/api/search";
+                $.ajax({
+                    type : 'POST',
+                    url: url,
+                    data: JSON.stringify({"query" : query }),
+                    contentType : 'application/json',
+                    dataType : 'json',
+                    success : function(data) {
+                        var p = $("#output");
+                        p.empty();
+                        _.each(data.results, search_format_output
+                            var div_class = "historyItem";
+                            var a = $("<div class='" + div_class + "'>")
+                            a.append($("<div>").append(doc['model_class']));
+                            b = a.append( $("<div class='historyItemTitle'><a href='/file/" + doc['id'] + "'>" + doc['name'] + "</a></div>") );
+                            if ('misc_blurb' in doc) {
+                                b.append( $("<div>").append(doc["misc_blurb"]) );
+                            }
+                            if ('peek' in doc) {
+                                b.append( $("<pre class='peek'>").append( doc["peek"]) );
+                            }
+                            p.append(b);
+                        });
+                    }
+                });
+            }
+        };
+
+
+        var queryURL = function (query) {
+            var url = "/api/search" + encodeURIComponent(query);
+            url = url + "&field=" + $("#searchFields").val();
+            if ($("#fileType").val() != "All") {
+                url = url + "&type=" +  $("#fileType").val()
+            }
+            return url;
+        }
+
+        $(document).ready( function() {
+            $("#search_button").click(function() {
+                doSearch($("#search_text").val());
+            });
+            $('#search_text').keyup(function(e){
+                if(e.keyCode == 13) {
+                    doSearch($("#search_text").val());
+                }
+            });
+            doSearch($("#search_text").val());
+        });
+    </script>
+
+</%def>
+
+
+<%def name="center_panel()">
+
+    <div id="search_box" style="margin: 20px;">
+        <input type="text" id="search_text" size="90"/>
+    </div>
+    <div style="margin: 20px;">
+        <input type="button" id="search_button" value="Search"/>
+    </div>
+    <div id="output"></div>
+
+</%def>
diff --git a/templates/sharing_base.mako b/templates/sharing_base.mako
new file mode 100644
index 0000000..4e61e0a
--- /dev/null
+++ b/templates/sharing_base.mako
@@ -0,0 +1,243 @@
+##
+## Base template for sharing or publishing an item. Template expects the following parameters:
+## (a) item - item to be shared.
+##
+<%!
+    def inherit(context):
+        if context.get('use_panels', False) == True:
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%namespace file="/display_common.mako" import="*" />
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/slug_editing_js.mako" import="*" />
+
+##
+## Page methods.
+##
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.overlay_visible=False
+    self.message_box_class=""
+    self.active_view=""
+    self.body_class=""
+%>
+</%def>
+
+<%def name="title()">
+    Sharing and Publishing ${get_class_display_name( item.__class__ )} '${get_item_name( item ) | h}'
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${slug_editing_js(item)}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        ## Put some whitespace before each section header.
+        h3 {
+            margin-top: 2em;
+        }
+        input.action-button {
+            margin-left: 0;
+        }
+        ## If page is displayed in panels, pad from edges for readability.
+        %if context.get('use_panels'):
+            div#center {
+                padding: 10px;
+            }
+        %endif
+    </style>
+</%def>
+
+<%def name="center_panel()">
+    ${self.body()}
+</%def>
+
+<%def name="body()">
+    ## Set use_panels var for use in page's URLs.
+    <% use_panels = context.get('use_panels', False)  %>
+    <% controller_name = get_controller_name( item ) %>
+
+    ## Render message.
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+
+    <%
+        #
+        # Setup and variables needed for page.
+        #
+
+        # Get class name strings.
+        item_class_name = get_class_display_name( item.__class__ )
+        item_class_name_lc = item_class_name.lower()
+        item_class_plural_name = get_class_plural_display_name( item.__class__ )
+        item_class_plural_name_lc = item_class_plural_name.lower()
+
+        # Get item name.
+        item_name = get_item_name(item)
+    %>
+
+    <h2>Share or Publish ${item_class_name} '${item_name | h}'</h2>
+
+    ## Require that user have a public username before sharing or publishing an item.
+    %if trans.get_user().username is None or trans.get_user().username is "":
+        <p>To make a ${item_class_name_lc} accessible via link or publish it, you must create a public username:</p>
+
+        <form action="${h.url_for( controller=controller_name, action='set_public_username', id=trans.security.encode_id( item.id ) )}"
+                method="POST">
+            <div class="form-row">
+                <label>Public Username:</label>
+                <div class="form-row-input">
+                    <input type="text" name="username" size="40"/>
+                </div>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input class="action-button" type="submit" name="Set Username" value="Set Username"/>
+            </div>
+        </form>
+    %else:
+        ## User has a public username, so private sharing and publishing options.
+        <h3>Make ${item_class_name} Accessible via Link and Publish It</h3>
+
+            <div>
+                %if item.importable:
+                    <%
+                        item_status = "accessible via link"
+                        if item.published:
+                            item_status = item_status + " and published"
+                    %>
+                    This ${item_class_name_lc} is currently <strong>${item_status}</strong>.
+                    <div>
+                        <p>Anyone can view and import this ${item_class_name_lc} by visiting the following URL:
+
+                        <blockquote>
+                            <%
+                                url = h.url_for( controller=controller_name, action='display_by_username_and_slug', username=trans.get_user().username, slug=item.slug, qualified=True )
+                                url_parts = url.split("/")
+                            %>
+                            <a id="item-url" href="${url}" target="_top">${url}</a>
+                            <span id="item-url-text" style="display: none">
+                                ${"/".join( url_parts[:-1] )}/<span id='item-identifier'>${url_parts[-1]}</span>
+                            </span>
+
+                            <a href="#" id="edit-identifier"><img src="${h.url_for('/static/images/fugue/pencil.png')}"/></a>
+                        </blockquote>
+
+                        %if item.published:
+                            This ${item_class_name_lc} is publicly listed and searchable in Galaxy's <a href='${h.url_for( controller=controller_name, action='list_published' )}' target="_top">Published ${item_class_plural_name}</a> section.
+                        %endif
+                    </div>
+
+                    <p>You can:
+                    <div>
+                    <form action="${h.url_for( controller=controller_name, action='sharing', id=trans.security.encode_id( item.id ) )}" method="POST">
+                        %if not item.published:
+                            ## Item is importable but not published. User can disable importable or publish.
+                            <input class="action-button" type="submit" name="disable_link_access" value="Disable Access to ${item_class_name} Link">
+                            <div class="toolParamHelp">Disables ${item_class_name_lc}'s link so that it is not accessible.</div>
+                            <br />
+                            <input class="action-button" type="submit" name="publish" value="Publish ${item_class_name}" method="POST">
+                            <div class="toolParamHelp">Publishes the ${item_class_name_lc} to Galaxy's <a href='${h.url_for( controller=controller_name, action='list_published' )}' target="_top">Published ${item_class_plural_name}</a> section, where it is publicly listed and searchable.</div>
+
+                        <br />
+                        %else: ## item.published == True
+                            ## Item is importable and published. User can unpublish or disable import and unpublish.
+                            <input class="action-button" type="submit" name="unpublish" value="Unpublish ${item_class_name}">
+                            <div class="toolParamHelp">Removes this ${item_class_name_lc} from Galaxy's <a href='${h.url_for(controller=controller_name, action='list_published' )}' target="_top">Published ${item_class_plural_name}</a> section so that it is not publicly listed or searchable.</div>
+                            <br />
+                            <input class="action-button" type="submit" name="disable_link_access_and_unpublish" value="Disable Access to ${item_class_name} via Link and Unpublish">
+                            <div class="toolParamHelp">Disables this ${item_class_name_lc}'s link so that it is not accessible and removes ${item_class_name_lc} from Galaxy's <a href='${h.url_for(controller=controller_name, action='list_published' )}' target='_top'>Published ${item_class_plural_name}</a> section so that it is not publicly listed or searchable.</div>
+                        %endif
+                    </form>
+                    </div>
+
+                %else:
+
+                    <p>This ${item_class_name_lc} is currently restricted so that only you and the users listed below can access it. You can:</p>
+
+                    <form action="${h.url_for(controller=controller_name, action='sharing', id=trans.security.encode_id(item.id) )}" method="POST">
+                        <input class="action-button" type="submit" name="make_accessible_via_link" value="Make ${item_class_name} Accessible via Link">
+                        <div class="toolParamHelp">Generates a web link that you can share with other people so that they can view and import the ${item_class_name_lc}.</div>
+
+                        <br />
+                        <input class="action-button" type="submit" name="make_accessible_and_publish" value="Make ${item_class_name} Accessible and Publish" method="POST">
+                        <div class="toolParamHelp">Makes the ${item_class_name_lc} accessible via link (see above) and publishes the ${item_class_name_lc} to Galaxy's <a href='${h.url_for(controller=controller_name, action='list_published' )}' target='_top'>Published ${item_class_plural_name}</a> section, where it is publicly listed and searchable.</div>
+                    </form>
+
+                %endif
+
+        ##
+        ## Sharing with Galaxy users.
+        ##
+        <h3>Share ${item_class_name} with Individual Users</h3>
+
+            <div>
+                %if item.users_shared_with:
+
+                    <p>
+                        The following users will see this ${item_class_name_lc} in their ${item_class_name_lc} list and will be
+                        able to view, import, and run it.
+                    </p>
+
+                    <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                        <tr class="header">
+                            <th>Email</th>
+                            <th></th>
+                        </tr>
+                        %for i, association in enumerate( item.users_shared_with ):
+                            <% user = association.user %>
+                            <tr>
+                                <td>
+                                    <div class="menubutton popup" id="user-${i}-popup">${user.email}</div>
+                                </td>
+                                <td>
+                                    <div popupmenu="user-${i}-popup">
+                                    <a class="action-button" href="${h.url_for(controller=controller_name, action='sharing', id=trans.security.encode_id( item.id ), unshare_user=trans.security.encode_id( user.id ), use_panels=use_panels )}">Unshare</a>
+                                    </div>
+                                </td>
+                            </tr>
+                        %endfor
+                    </table>
+
+                    <p>
+                    <a class="action-button"
+                       href="${h.url_for(controller=controller_name, action='share', id=trans.security.encode_id(item.id), use_panels=use_panels )}">
+                        <span>Share with another user</span>
+                    </a>
+
+                %else:
+
+                    <p>You have not shared this ${item_class_name_lc} with any users.</p>
+
+                    <a class="action-button"
+                       href="${h.url_for(controller=controller_name, action='share', id=trans.security.encode_id(item.id), use_panels=use_panels )}">
+                        <span>Share with a user</span>
+                    </a>
+                    <br />
+
+                %endif
+            </div>
+        </div>
+    %endif
+
+    <br /><br />
+    <a href="${h.url_for(controller=controller_name, action="list" )}">Back to ${item_class_plural_name} List</a>
+</%def>
diff --git a/templates/show_params.mako b/templates/show_params.mako
new file mode 100644
index 0000000..76b5f2d
--- /dev/null
+++ b/templates/show_params.mako
@@ -0,0 +1,251 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<% from galaxy.util import listify, nice_size, unicodify %>
+
+<style>
+    .inherit {
+        border: 1px solid #bbb;
+        padding: 15px;
+        text-align: center;
+        background-color: #eee;
+    }
+</style>
+
+<%def name="inputs_recursive( input_params, param_values, depth=1, upgrade_messages=None )">
+    <%
+        if upgrade_messages is None:
+            upgrade_messages = {}
+    %>
+    %for input_index, input in enumerate( input_params.itervalues() ):
+        %if input.name in param_values:
+            %if input.type == "repeat":
+                %for i in range( len(param_values[input.name]) ):
+                    ${ inputs_recursive(input.inputs, param_values[input.name][i], depth=depth+1) }
+                %endfor
+            %elif input.type == "section":
+                <tr>
+                    ##<!-- Get the value of the current Section parameter -->
+                    ${inputs_recursive_indent( text=input.name, depth=depth )}
+                    <td></td>
+                </tr>
+                ${ inputs_recursive( input.inputs, param_values[input.name], depth=depth+1, upgrade_messages=upgrade_messages.get( input.name ) ) }
+            %elif input.type == "conditional":
+                <%
+                try:
+                    current_case = param_values[input.name]['__current_case__']
+                    is_valid = True
+                except:
+                    current_case = None
+                    is_valid = False
+                %>
+                %if is_valid:
+                    <tr>
+                        ${ inputs_recursive_indent( text=input.test_param.label, depth=depth )}
+                        ##<!-- Get the value of the current Conditional parameter -->
+                        <td>${input.cases[current_case].value | h}</td>
+                        <td></td>
+                    </tr>
+                    ${ inputs_recursive( input.cases[current_case].inputs, param_values[input.name], depth=depth+1, upgrade_messages=upgrade_messages.get( input.name ) ) }
+                %else:
+                    <tr>
+                        ${ inputs_recursive_indent( text=input.name, depth=depth )}
+                        <td><em>The previously used value is no longer valid</em></td>
+                        <td></td>
+                    </tr>
+                %endif
+            %elif input.type == "upload_dataset":
+                    <tr>
+                        ${inputs_recursive_indent( text=input.group_title( param_values ), depth=depth )}
+                        <td>${ len( param_values[input.name] ) } uploaded datasets</td>
+                        <td></td>
+                    </tr>
+            ## files used for inputs
+            %elif input.type == "data":
+                    <tr>
+                        ${inputs_recursive_indent( text=input.label, depth=depth )}
+                        <td>
+                        %for i, element in enumerate(listify(param_values[input.name])):
+                            %if i > 0:
+                            ,
+                            %endif
+                            %if element.history_content_type == "dataset":
+                                <%
+                                    hda = element
+                                    encoded_id = trans.security.encode_id( hda.id )
+                                    show_params_url = h.url_for( controller='dataset', action='show_params', dataset_id=encoded_id )
+                                %>
+                                <a class="input-dataset-show-params" data-hda-id="${encoded_id}"
+                                       href="${show_params_url}">${hda.hid}: ${hda.name | h}</a>
+
+                            %else:
+                                ${element.hid}: ${element.name | h}
+                            %endif
+                        %endfor
+                        </td>
+                        <td></td>
+                    </tr>
+             %elif input.visible:
+                <%
+                if  hasattr( input, "label" ) and input.label:
+                    label = input.label
+                else:
+                    #value for label not required, fallback to input name (same as tool panel)
+                    label = input.name
+                %>
+                <tr>
+                    ${inputs_recursive_indent( text=label, depth=depth )}
+                    <td>${input.value_to_display_text( param_values[input.name], trans.app ) | h}</td>
+                    <td>${ upgrade_messages.get( input.name, '' ) | h }</td>
+                </tr>
+            %endif
+        %else:
+            ## Parameter does not have a stored value.
+            <tr>
+                <%
+                    # Get parameter label.
+                    if input.type == "conditional":
+                        label = input.test_param.label
+                    elif input.type == "repeat":
+                        label = input.label()
+                    else:
+                        label = input.label or input.name
+                %>
+                ${inputs_recursive_indent( text=label, depth=depth )}
+                <td><em>not used (parameter was added after this job was run)</em></td>
+                <td></td>
+            </tr>
+        %endif
+
+    %endfor
+</%def>
+
+ ## function to add a indentation depending on the depth in a <tr>
+<%def name="inputs_recursive_indent( text, depth )">
+    <td style="padding-left: ${ ( depth - 1 ) * 10 }px">
+        ${text | h}
+    </td>
+</%def>
+
+<table class="tabletip">
+    <thead>
+        <tr><th colspan="2" style="font-size: 120%;">
+            % if tool:
+                Tool: ${tool.name | h}
+            % else:
+                Unknown Tool
+            % endif
+        </th></tr>
+    </thead>
+    <tbody>
+        <%
+        encoded_hda_id = trans.security.encode_id( hda.id )
+        encoded_history_id = trans.security.encode_id( hda.history_id )
+        %>
+        <tr><td>Number:</td><td>${hda.hid | h}</td></tr>
+        <tr><td>Name:</td><td>${hda.name | h}</td></tr>
+        <tr><td>Created:</td><td>${unicodify(hda.create_time.strftime(trans.app.config.pretty_datetime_format))}</td></tr>
+        ##      <tr><td>Copied from another history?</td><td>${hda.source_library_dataset}</td></tr>
+        <tr><td>Filesize:</td><td>${nice_size(hda.dataset.file_size)}</td></tr>
+        <tr><td>Dbkey:</td><td>${hda.dbkey | h}</td></tr>
+        <tr><td>Format:</td><td>${hda.ext | h}</td></tr>
+        %if job:
+            <tr><td>Galaxy Tool ID:</td><td>${ job.tool_id | h }</td></tr>
+            <tr><td>Galaxy Tool Version:</td><td>${ job.tool_version | h }</td></tr>
+        %endif
+        <tr><td>Tool Version:</td><td>${hda.tool_version | h}</td></tr>
+        <tr><td>Tool Standard Output:</td><td><a href="${h.url_for( controller='dataset', action='stdout', dataset_id=encoded_hda_id )}">stdout</a></td></tr>
+        <tr><td>Tool Standard Error:</td><td><a href="${h.url_for( controller='dataset', action='stderr', dataset_id=encoded_hda_id )}">stderr</a></td></tr>
+        %if job:
+            <tr><td>Tool Exit Code:</td><td>${ job.exit_code | h }</td></tr>
+        %endif
+        <tr><td>History Content API ID:</td><td>${encoded_hda_id}</td></tr>
+        %if job:
+            <tr><td>Job API ID:</td><td>${trans.security.encode_id( job.id )}</td></tr>
+        %endif
+        <tr><td>History API ID:</td><td>${encoded_history_id}</td></tr>
+        %if hda.dataset.uuid:
+        <tr><td>UUID:</td><td>${hda.dataset.uuid}</td></tr>
+        %endif
+        %if trans.user_is_admin() or trans.app.config.expose_dataset_path:
+            <tr><td>Full Path:</td><td>${hda.file_name | h}</td></tr>
+        %endif
+        %if job and job.command_line and trans.user_is_admin():
+            <tr><td>Job Command-Line:</td><td>${ job.command_line | h }</td></tr>
+        %endif
+        %if job and trans.user_is_admin():
+            <% job_metrics = trans.app.job_metrics %>
+            %for metric in job.metrics:
+                <% metric_title, metric_value = job_metrics.format( metric.plugin, metric.metric_name, metric.metric_value ) %>
+                <tr><td>${ metric_title | h }</td><td>${ metric_value | h }</td></tr>
+            %endfor
+        %endif
+</table>
+<br />
+
+<table class="tabletip">
+    <thead>
+        <tr>
+            <th>Input Parameter</th>
+            <th>Value</th>
+            <th>Note for rerun</th>
+        </tr>
+    </thead>
+    <tbody>
+        % if params_objects and tool:
+            ${ inputs_recursive( tool.inputs, params_objects, depth=1, upgrade_messages=upgrade_messages ) }
+        %elif params_objects is None:
+            <tr><td colspan="3">Unable to load parameters.</td></tr>
+        % else:
+            <tr><td colspan="3">No parameters.</td></tr>
+        % endif
+    </tbody>
+</table>
+%if has_parameter_errors:
+    <br />
+    ${ render_msg( 'One or more of your original parameters may no longer be valid or displayed properly.', status='warning' ) }
+%endif
+
+%if job and job.dependencies:
+    <br>
+    <table class="tabletip">
+        <thead>
+        <tr>
+            <th>Dependency</th>
+            <th>Dependency Type</th>
+            <th>Version</th>
+        </tr>
+        </thead>
+        <tbody>
+
+            %for dependency in job.dependencies:
+                <tr><td>${ dependency['name'] | h }</td>
+                    <td>${ dependency['dependency_type'] | h }</td>
+                    <td>${ dependency['version'] | h }</td>
+                </tr>
+            %endfor
+
+        </tbody>
+    </table>
+    <br />
+%endif
+
+<script type="text/javascript">
+$(function(){
+    $( '.input-dataset-show-params' ).on( 'click', function( ev ){
+        ## some acrobatics to get the Galaxy object that has a history from the contained frame
+        if( window.parent.Galaxy && window.parent.Galaxy.currHistoryPanel ){
+            window.parent.Galaxy.currHistoryPanel.scrollToId( 'dataset-' + $( this ).data( 'hda-id' ) );
+        }
+    })
+});
+</script>
+
+    <h3>Inheritance Chain</h3>
+    <div class="inherit" style="background-color: #fff; font-weight:bold;">${hda.name | h}</div>
+
+    % for dep in inherit_chain:
+        <div style="font-size: 36px; text-align: center; position: relative; top: 3px">↑</div>
+        <div class="inherit">
+            '${dep[0].name | h}' in ${dep[1]}<br/>
+        </div>
+    % endfor
diff --git a/templates/slug_editing_js.mako b/templates/slug_editing_js.mako
new file mode 100644
index 0000000..e7bb1e7
--- /dev/null
+++ b/templates/slug_editing_js.mako
@@ -0,0 +1,36 @@
+<%namespace file="/display_common.mako" import="get_controller_name" />
+
+<%def name="slug_editing_js(item)">
+    <script type="text/javascript">
+    $(document).ready( function() {
+        //
+        // Set up slug-editing functionality.
+        //
+        var on_start = function( text_elt ) {
+            // Replace URL with URL text.
+            $('#item-url').hide();
+            $('#item-url-text').show();
+
+            // Allow only lowercase alphanumeric and '-' characters in slug.
+            text_elt.keyup(function(){
+                text_elt.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
+            });
+        };
+
+        var on_finish = function( text_elt ) {
+            // Replace URL text with URL.
+            $('#item-url-text').hide();
+            $('#item-url').show();
+
+            // Set URL to new value.
+            var new_url = $('#item-url-text').text();
+            var item_url_obj = $('#item-url');
+            item_url_obj.attr( "href", new_url );
+            item_url_obj.text( new_url );
+        };
+
+        <% controller_name = get_controller_name( item ) %>
+        async_save_text("edit-identifier", "item-identifier", "${h.url_for( controller=controller_name, action='set_slug_async', id=trans.security.encode_id( item.id ) )}", "new_slug", null, false, 0, on_start, on_finish);
+    });
+    </script>
+</%def>
diff --git a/templates/sorting_base.mako b/templates/sorting_base.mako
new file mode 100644
index 0000000..bd7dba6
--- /dev/null
+++ b/templates/sorting_base.mako
@@ -0,0 +1,27 @@
+<%def name="get_sort_url( sort_id, order, test_id, *args, **kwargs )">
+    <%
+        if sort_id == test_id:
+            if order == "asc":
+                tool_order = "desc"
+            elif order == "desc":
+                tool_order = "asc"
+            else:
+                tool_order = "default"
+        else:
+            tool_order = "default"
+    %>
+        
+    %if len(kwargs.keys()) > 0:
+        <a href="${h.url_for( controller=args[0], action=args[1], sort_id=test_id, order=tool_order, **kwargs )}">${" ".join(args[2:])}</a>
+    %else:
+        <a href="${h.url_for( controller=args[0], action=args[1], sort_id=test_id, order=tool_order )}">${" ".join(args[2:])}</a>
+    %endif
+</%def>
+
+<%def name="get_css()">
+    <style>
+    .${sort_id} {
+        visibility: visible
+    }
+    </style>
+</%def>
diff --git a/templates/spark_base.mako b/templates/spark_base.mako
new file mode 100644
index 0000000..6f39138
--- /dev/null
+++ b/templates/spark_base.mako
@@ -0,0 +1,68 @@
+<%def name="make_spark_settings( controller, action, limit, sort_id, order, time_period, **kwargs )">
+    <div id="spark_time_select">
+        <form method="post" controller=${controller} action=${action}>
+            <input type="hidden" value=${sort_id} name="sort_id">
+            <input type="hidden" value=${order} name="order">
+            %try:
+                <input type="hidden" value=${kwargs['entries']} name="entries">
+            %except KeyError:
+            %endtry
+
+            %try:
+                <input type="hidden" value=${kwargs['page']} name="page">
+            %except KeyError:
+            %endtry
+
+            %try:
+                <input type="hidden" value=${kwargs['offset']} name="offset">
+            %except KeyError:
+            %endtry
+
+            ${limit}
+            <select name="spark_time">
+                %if time_period == "days":
+                    <option value="days" selected="selected">Days</option>
+                %else:
+                    <option value="days">Days</option>
+                %endif
+                
+                %if time_period == "weeks":
+                    <option value="weeks" selected="selected">Weeks</option>
+                %else:
+                    <option value="weeks">Weeks</option>
+                %endif
+                
+                %if time_period == "months":
+                    <option value="months" selected="selected">Months</option>
+                %else:
+                    <option value="months">Months</option>
+                %endif
+                
+                %if time_period == "years":
+                    <option value="years" selected="selected">Years</option>
+                %else:
+                    <option value="years">Years</option>
+                %endif
+            </select>
+            <button id="spark_select">Go</button>
+        </form>
+    </div>
+</%def>
+
+<%def name="make_sparkline(id, data, sparktype, length)">
+    <%
+        color = '<span style="color: {{color}}">●</span>'
+        if sparktype == "bar":
+            tooltip = color + '{{value}} ' + length + '</span>'
+        else:
+            tooltip = color + '{{prefix}}{{y}} ' + length + '{{suffix}}</span>'
+    %>
+    <script>
+        $(document).ready(function(e) {
+            $("#${id}").sparkline(${data}, {
+                type: '${sparktype}',
+                tooltipFormat: '${tooltip}'
+            }).css("width", "1%");
+        })
+    </script>
+</%def>
\ No newline at end of file
diff --git a/templates/tagging_common.mako b/templates/tagging_common.mako
new file mode 100644
index 0000000..ece9d40
--- /dev/null
+++ b/templates/tagging_common.mako
@@ -0,0 +1,307 @@
+<%!
+    from cgi import escape
+    from galaxy.web.framework.helpers import iff
+    from random import random
+    from sys import maxint
+    from math import floor
+    from galaxy.model import Tag, ItemTagAssociation
+%>
+
+## Render a tagging element if there is a tagged_item.
+%if tagged_item is not None:
+    %if tag_type == "individual":
+        ${render_individual_tagging_element( user=user, tagged_item=tagged_item, elt_context=elt_context, in_form=in_form, input_size=input_size, tag_click_fn=tag_click_fn, use_toggle_link=use_toggle_link )}
+    %elif tag_type == "community":
+        ${render_community_tagging_element(tagged_item=tagged_item, elt_context=elt_context, tag_click_fn=tag_click_fn)}
+    %endif
+%endif
+
+## Render HTML for a list of tags.
+<%def name="render_tagging_element_html(elt_id=None, tags=None, editable=True, use_toggle_link=True, input_size='15', in_form=False, tag_type='individual', render_add_tag_button=True)">
+    ## Useful attributes.
+    <%
+        num_tags = len( tags )
+    %>
+    <div class="tag-element"
+        %if elt_id:
+            id="${elt_id}"
+        %endif
+        ## Do not display element if there are no tags and it is not editable.
+        %if num_tags == 0 and not editable:
+            style="display: none"
+        %endif
+    >
+        %if use_toggle_link:
+            <a class="toggle-link" href="#">${num_tags} Tag${iff( num_tags == 1, "", "s")}</a>
+        %endif
+        <div class="tag-area
+            %if tag_type == 'individual':
+                individual-tag-area
+            %endif
+        ">
+
+            ## Build buttons for current tags.
+            %for tag in tags:
+                <%
+                    ## Handle both Tag and ItemTagAssociation objects.
+                    if isinstance( tag, Tag ):
+                        tag_name = tag.name
+                        tag_value = None
+                    elif isinstance( tag, ItemTagAssociation ):
+                        tag_name = tag.user_tname
+                        tag_value = tag.user_value
+
+                    ## Convert tag name, value to unicode.
+                    if isinstance( tag_name, str ):
+                        tag_name = unicode( escape( tag_name ), 'utf-8' )
+                        if tag_value:
+                            tag_value = unicode( escape( tag_value ), 'utf-8' )
+                    if tag_value:
+                        tag_str = tag_name + ":" + tag_value
+                    else:
+                        tag_str = tag_name
+                %>
+                <span class="tag-button">
+                    <span class="tag-name">${tag_str | h}</span>
+                    %if editable:
+                        <img class="delete-tag-img" src="${h.url_for('/static/images/delete_tag_icon_gray.png')}"/>
+                    %endif
+                </span>
+            %endfor
+
+            ## Add tag input field. If element is in form, tag input is a textarea; otherwise element is a input type=text.
+            %if editable:
+                %if in_form:
+                    <textarea class="tag-input" rows='1' cols='${input_size}'></textarea>
+                %else:
+                    <input class="tag-input" type='text' size='${input_size}'/>
+                %endif
+                ## Add "add tag" button.
+                %if render_add_tag_button:
+                    <img src='${h.url_for('/static/images/fugue/tag--plus.png')}' class="add-tag-button" title="Add tags"/>
+                %endif
+            %endif
+        </div>
+    </div>
+</%def>
+
+## Render tool tagging elements
+<%def name="render_tool_tagging_elements()">
+    <%
+        elt_id = int ( floor ( random()*maxint ) )
+        tags = trans.app.tag_handler.get_tool_tags()
+    %>
+    ${self.render_tagging_element_html(elt_id=elt_id, \
+                                        tags=tags, \
+                                        editable=False, \
+                                        use_toggle_link=False )}
+    <script type="text/javascript">
+        init_tag_click_function($('#${elt_id}'), tool_tag_click);
+    </script>
+</%def>
+
+## Render community tagging element.
+<%def name="render_community_tagging_element(tagged_item=None, elt_context=None, use_toggle_link=False, tag_click_fn='default_tag_click_fn')">
+    ## Build HTML.
+    <%
+        elt_id = int ( floor ( random()*maxint ) )
+        community_tags = trans.app.tag_handler.get_community_tags( item=tagged_item, limit=5 )
+    %>
+    ${self.render_tagging_element_html(elt_id=elt_id, \
+                                        tags=community_tags, \
+                                        use_toggle_link=use_toggle_link, \
+                                        editable=False, tag_type="community")}
+
+    ## Set up tag click function.
+    <script type="text/javascript">
+        init_tag_click_function($('#${elt_id}'), ${tag_click_fn});
+    </script>
+</%def>
+
+
+## Render individual tagging element.
+<%def name="render_individual_tagging_element(user=None, tagged_item=None, elt_context=None, use_toggle_link=True, in_form=False, input_size='15', tag_click_fn='default_tag_click_fn', get_toggle_link_text_fn='default_get_toggle_link_text_fn', editable=True, render_add_tag_button=True)">
+    ## Useful attributes.
+    <%
+        # Useful ids.
+        tagged_item_id = str( trans.security.encode_id ( tagged_item.id ) )
+        elt_id = int ( floor ( random()*maxint ) )
+
+        # Get list of user's item tags. TODO: implement owner_tags for all taggable objects and use here.
+        item_tags = [ tag for tag in tagged_item.tags if ( tag.user == user ) ]
+    %>
+
+    ## Build HTML.
+    ${self.render_tagging_element_html(elt_id=elt_id, tags=item_tags, editable=editable, use_toggle_link=use_toggle_link, input_size=input_size, in_form=in_form, render_add_tag_button=render_add_tag_button)}
+
+    ## Build script that augments tags using progressive javascript.
+    <script type="text/javascript">
+        //
+        // Set up autocomplete tagger.
+        //
+
+        //
+        // Default function get text to display on the toggle link.
+        //
+        var default_get_toggle_link_text_fn = function(tags)
+        {
+            var text = "";
+            var num_tags = _.size(tags);
+            if (num_tags != 0)
+              {
+                text = num_tags + (num_tags != 1 ? " Tags" : " Tag");
+                /*
+                // Show first N tags; hide the rest.
+                var max_to_show = 1;
+
+                // Build tag string.
+                var tag_strs = new Array();
+                var count = 0;
+                for (tag_name in tags)
+                  {
+                    tag_value = tags[tag_name];
+                    tag_strs[tag_strs.length] = build_tag_str(tag_name, tag_value);
+                    if (++count == max_to_show)
+                      break;
+                  }
+                tag_str = tag_strs.join(", ");
+
+                // Finalize text.
+                var num_tags_hiding = num_tags - max_to_show;
+                text = "Tags: " + tag_str +
+                  (num_tags_hiding != 0 ? " and " + num_tags_hiding + " more" : "");
+                */
+              }
+            else
+              {
+                // No tags.
+                text = "Add tags";
+              }
+            return text;
+        };
+
+        // Default function to handle a tag click.
+        var default_tag_click_fn = function(tag_name, tag_value) { };
+
+        <%
+            ## Build dict of tag name, values.
+            tag_names_and_values = dict()
+            for tag in item_tags:
+                tag_name = escape( tag.user_tname )
+                tag_value = ""
+                if tag.value is not None:
+                    tag_value = escape( tag.user_value )
+
+                ## Tag names and values may be string or unicode object.
+                if isinstance( tag_name, str ):
+                    tag_names_and_values[unicode(tag_name, 'utf-8')] = unicode(tag_value, 'utf-8')
+                else: ## isInstance( tag_name, unicode ):
+                    tag_names_and_values[tag_name] = tag_value
+        %>
+        var options =
+        {
+            tags : ${h.dumps(tag_names_and_values)},
+            editable : ${iff( editable, 'true', 'false' )},
+            get_toggle_link_text_fn: ${get_toggle_link_text_fn},
+            tag_click_fn: ${tag_click_fn},
+            ## Use forward slash in controller to suppress route memory.
+            ajax_autocomplete_tag_url: "${h.url_for( controller='/tag', action='tag_autocomplete_data', item_id=tagged_item_id, item_class=tagged_item.__class__.__name__ )}",
+            ajax_add_tag_url: "${h.url_for( controller='/tag', action='add_tag_async', item_id=tagged_item_id, item_class=tagged_item.__class__.__name__, context=elt_context )}",
+            ajax_delete_tag_url: "${h.url_for( controller='/tag', action='remove_tag_async', item_id=tagged_item_id, item_class=tagged_item.__class__.__name__, context=elt_context )}",
+            delete_tag_img: "${h.url_for('/static/images/delete_tag_icon_gray.png')}",
+            delete_tag_img_rollover: "${h.url_for('/static/images/delete_tag_icon_white.png')}",
+            use_toggle_link: ${iff( use_toggle_link, 'true', 'false' )}
+         };
+
+        $('#${elt_id}').autocomplete_tagging(options);
+    </script>
+
+    ## Use style to hide/display the tag area.
+    <style>
+    .tag-area {
+        display: ${iff( use_toggle_link, "none", "block" )};
+    }
+    </style>
+
+    <noscript>
+    <style>
+    .tag-area {
+        display: block;
+    }
+    </style>
+    </noscript>
+</%def>
+
+
+<%def name="community_tag_js( controller_name )">
+## set up comminity tag and rating handling - used for page start up / set up
+## controller_name: the model controller for the item being tagged - generally gotten with get_controller_name( item )
+<script type="text/javascript">
+    // Handle click on community tag.
+    function community_tag_click(tag_name, tag_value) {
+        var href = '${h.url_for ( controller='/' + controller_name , action='list_published')}';
+        href = href + "?f-tags=" + tag_name;
+        if (tag_value != undefined && tag_value != "") {
+            href = href + ":" + tag_value;
+        }
+        self.location = href;
+    }
+
+    // Map item rating to number of stars to show.
+    function map_rating_to_num_stars(rating) {
+        if (rating <= 0)
+            return 0;
+        else if (rating > 0 && rating <= 1.5)
+            return 1;
+        else if (rating > 1.5 && rating <= 2.5)
+            return 2;
+        else if (rating > 2.5 && rating <= 3.5)
+            return 3;
+        else if (rating > 3.5 && rating <= 4.5)
+            return 4;
+        else if (rating > 4.5)
+            return 5;
+    }
+
+    // Init. on document load.
+    $(function() {
+        // Set links to Galaxy screencasts to open in overlay.
+        $(this).find("a[href^='http://screencast.g2.bx.psu.edu/']").each( function() {
+            $(this).click( function() {
+                var href = $(this).attr('href');
+                show_in_overlay(
+                    {
+                        url: href,
+                        width: 640,
+                        height: 480,
+                        scroll: 'no'
+                    }
+                );
+                return false;
+            });
+        });
+
+        // Init user item rating.
+        $('.user_rating_star').rating({
+            callback: function(rating, link) {
+                $.ajax({
+                    type: "GET",
+                    url: "${h.url_for ( controller='/' + controller_name , action='rate_async' )}",
+                    data: { id : "${trans.security.encode_id( item.id )}", rating : rating },
+                    dataType: 'json',
+                    error: function() { alert( "Rating submission failed" ); },
+                    success: function( community_data ) {
+                        $('#rating_feedback').show();
+                        $('#num_ratings').text(Math.round(community_data[1]*10)/10);
+                        $('#ave_rating').text(community_data[0]);
+                        $('.community_rating_star').rating('readOnly', false);
+                        $('.community_rating_star').rating('select', map_rating_to_num_stars(community_data[0])-1);
+                        $('.community_rating_star').rating('readOnly', true);
+                    }
+                });
+            },
+            required: true // Hide cancel button.
+        });
+    });
+</script>
+</%def>
diff --git a/templates/tool_shed_rating.mako b/templates/tool_shed_rating.mako
new file mode 100644
index 0000000..55def07
--- /dev/null
+++ b/templates/tool_shed_rating.mako
@@ -0,0 +1,33 @@
+<%
+    label = "ratings"
+    if num_ratings == 1:
+        label = "rating"
+%>
+<div>
+    <input name="star1-${item_id}" type="radio" class="community_rating_star star" disabled="disabled" value="1"
+    %if ave_item_rating > 0 and ave_item_rating <= 1.5:
+        checked="checked"
+    %endif
+    
+    />
+    <input name="star1-${item_id}" type="radio" class="community_rating_star star" disabled="disabled" value="2"
+    %if ave_item_rating > 1.5 and ave_item_rating <= 2.5:
+        checked="checked"
+    %endif
+    />
+    <input name="star1-${item_id}" type="radio" class="community_rating_star star" disabled="disabled" value="3"
+    %if ave_item_rating > 2.5 and ave_item_rating <= 3.5:
+        checked="checked"
+    %endif
+    />
+    <input name="star1-${item_id}" type="radio" class="community_rating_star star" disabled="disabled" value="4"
+    %if ave_item_rating > 3.5 and ave_item_rating <= 4.5:
+        checked="checked"
+    %endif
+    />
+    <input name="star1-${item_id}" type="radio" class="community_rating_star star" disabled="disabled" value="5"
+    %if ave_item_rating > 4.5:
+        checked="checked"
+    %endif
+    />
+</div>
diff --git a/templates/user/change_password.mako b/templates/user/change_password.mako
new file mode 100644
index 0000000..ee806e7
--- /dev/null
+++ b/templates/user/change_password.mako
@@ -0,0 +1,60 @@
+<%inherit file="/base.mako"/>
+
+%if display_top:
+<script type="text/javascript">
+    if(window.top.location != window.location)
+    {
+        window.top.location.href = window.location.href;
+    }
+</script>
+%endif
+
+
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<script>
+$(function() {
+  $("[name='password']").complexify({'minimumChars':6}, function(valid, complexity){
+    var progressBar = $('.progress-bar');
+    var color = valid ? 'lightgreen' : 'red';
+
+    progressBar.css('background-color', color);
+    progressBar.css({'width': complexity + '%'});
+  });
+});
+</script>
+
+<div class="toolForm">
+    <form name="change_password" id="change_password" action="${h.url_for( controller='user', action='change_password' )}" method="post" >
+        <input type="hidden" name="display_top" value="${display_top}"/>
+        <div class="toolFormTitle">Change Password</div>
+        %if token:
+            <input type="hidden" name="token" value="${token|h}"/>
+        %else:
+            <div class="form-row">
+                <label>Current password:</label>
+                <input type="password" name="current" value="" size="40"/>
+            </div>
+        %endif
+        <div class="form-row">
+            <label>New password:</label>
+            <input type="password" name="password" value="" size="40"/>
+        </div>
+        <div class="progress">
+            <div id="complexity-bar" class="progress-bar" role="progressbar">
+                Strength
+            </div>
+        </div>
+        <div class="form-row">
+            <label>Confirm:</label>
+            <input type="password" name="confirm" value="" size="40"/>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="change_password_button" value="Save"/>
+        </div>
+    </form>
+</div>
diff --git a/templates/user/communication_settings.mako b/templates/user/communication_settings.mako
new file mode 100644
index 0000000..9c572a3
--- /dev/null
+++ b/templates/user/communication_settings.mako
@@ -0,0 +1,19 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <form name="change_communication" id="change_communication" action="${h.url_for( controller='user', action='change_communication', cntrller=cntrller )}" method="post" >
+        <div class="toolFormTitle">Change your communication settings</div>
+        <div class="form-row">
+            <label>Activate real-time communication with other Galaxy users.</label>
+            <input type="checkbox" name="enable_communication_server" ${activated} />
+        </div>
+        <div class="form-row">
+            <input type="submit" name="change_communication_button" value="Save"/>
+        </div>
+    </form>
+</div>
diff --git a/templates/user/dbkeys.mako b/templates/user/dbkeys.mako
new file mode 100644
index 0000000..e331a07
--- /dev/null
+++ b/templates/user/dbkeys.mako
@@ -0,0 +1,249 @@
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/webapps/galaxy/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.active_view="user"
+    self.overlay_visible=False
+%>
+</%def>
+
+<%def name="title()">Custom Database Builds</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style type="text/css">
+        #custom_dbkeys * {
+            min-width: 100px;
+            vertical-align: text-top;
+        }
+        pre {
+            padding: 0;
+            margin: 0;
+        }
+        ## If page is displayed in panels, pad from edges for readability.
+        %if context.get('use_panels'):
+        div#center {
+            padding: 10px;
+        }
+        %endif
+        div.def_tab {
+            float: left;
+            padding: 0.2em 0.5em;
+            background-color: white;
+        }
+        div.def_tab.active {
+            background-color: #CCF;
+            border: solid 1px #66A;
+        }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   
+    <script type="text/javascript">
+
+    $(function() {
+        $(".db_hide").each(function() {
+            var pre = $(this);
+            pre.hide();
+            pre.siblings("span").wrap( "<a href='javascript:void(0);'></a>" ).click( function() {
+                pre.toggle();
+            });     
+        });
+        $("#installed_builds").hide();
+        $("#show_installed_builds").click(function() {
+            $("#installed_builds").show();
+        });
+        
+        // Set up behavior for build definition tab controls.
+        $("div.def_tab > a").each(function() {
+            $(this).click(function() {
+                var tab_id = $(this).attr("id");
+
+                // Hide all build inputs, help.
+                $("div.build_definition").children(":input").hide();
+                $(".infomessagesmall > div").hide();
+                
+                // Show input item, help corresponding to tab id.
+                $("#" + tab_id + "_input").show();
+                $("." + tab_id + "_help").show();
+                
+                // Update tabs.
+                $("div.def_tab").removeClass("active");
+                $(this).parent().addClass("active");
+            });
+        });
+        
+        ## If there are fasta HDAs available, show fasta tab; otherwise show len file tab.
+        // Set starting tab.
+        % if fasta_hdas.first():
+            $("#fasta").click();
+        % else:
+            $("#len_file").click();
+        % endif
+        
+        // Before submit, remove inputs not associated with the active tab.
+        $("#submit").click(function() {
+            var id = $(this).parents("form").find(".active > a").attr("id");
+            $("div.build_definition").children(":input").each(function() {
+                if ( $(this).attr("id") !== (id + "_input")  ) {
+                    $(this).remove();
+                }
+            });
+        });
+    });
+
+    </script>
+</%def>
+
+<%def name="center_panel()">
+    ${self.body()}
+</%def>
+
+<%def name="body()">
+    % if message:
+        <div class="errormessagelarge">${message}</div>
+    % elif lines_skipped > 0:
+        <div class="warningmessagelarge">Skipped ${lines_skipped} lines that could not be parsed. (Line was either blank or not 2-column, with 2nd column being an integer)</div>
+    % endif
+
+    <h3>Current Custom Builds:</h3>
+
+    % if dbkeys:
+        <table id="custom_dbkeys" class="colored" cellspacing="0" cellpadding="0">
+            <tr class="header">
+                <th>Name</th>
+                <th>Key</th>
+                <th>Number of chroms/contigs</th>
+                <th></th>
+            </tr>
+        % for key, dct in dbkeys.iteritems():
+            <tr>
+                <td>${dct['name'] | h}</td>
+                <td>${key | h}</td>
+                <td>
+    ##                <span>${len(dct["chroms"])} entries</span>
+    ##                <pre id="pre_${key}" class="db_hide">
+    ##                    <table cellspacing="0" cellpadding="0">
+    ##                        <tr><th>Chrom</th><th>Length</th></tr>
+    ##                        % for chrom, chrom_len in dct["chroms"].iteritems():
+    ##                            <tr><td>${chrom | h}</td><td>${chrom_len | h}</td></tr>
+    ##                        % endfor
+    ##                    </table>
+    ##                </pre>
+                    % if 'count' in dct:
+                        ${dct['count']}
+                    % else:
+                        Processing
+                    % endif
+                </td>
+                <td><form action="dbkeys" method="post"><input type="hidden" name="key" value="${key | h}" /><input type="submit" name="delete" value="Delete" /></form></td>
+            </tr>
+        % endfor
+        </table>
+    % else:
+        <p>You currently have no custom builds.</p>
+    % endif
+    
+    <p>
+        <a id="show_installed_builds" href="javascript:void(0);">Show loaded, system-installed builds</a>
+        <blockquote id="installed_builds">${installed_len_files}</blockquote>
+    </p>
+    
+    <hr />
+    <h3>Add a Custom Build</h3>
+    <form action="dbkeys" method="post" enctype="multipart/form-data">
+        ## Include hidden param for panels:
+        %if use_panels:
+            <input type="hidden" name="use_panels" value="True">
+        %endif
+        ## Custom build via fasta in history.
+        <div class="toolForm" style="float: left;">
+            <div class="toolFormTitle">New Build</div>
+            <div class="toolFormBody">
+                <div class="form-row">
+                    <label for="name">Name (eg: Hamster):</label>
+                    <input type="text" id="name" name="name" />
+                </div>
+                <div class="form-row">
+                    <label for="key">Key (eg: hamster_v1):</label>
+                    <input type="text" id="key" name="key" />
+                </div>
+                <div class="form-row build_definition">
+                    <label>Definition:</label>
+                    <div class="def_tab">
+                        <a id="fasta" href="javascript:void(0)">FASTA</a>
+                    </div>
+                    <div class="def_tab">
+                        <a id="len_file" href="javascript:void(0)">Len File</a>
+                    </div>
+                    <div class="def_tab">
+                        <a id="len_entry" href="javascript:void(0)">Len Entry</a>
+                    </div>
+                    <div style="clear: both; padding-bottom: 0.5em"></div>
+                    <select id="fasta_input" name="dataset_id">
+                    %for dataset in fasta_hdas:
+                        <option value="${trans.security.encode_id( dataset.id )}">${dataset.hid | h}: ${dataset.name | h}</option>
+                    %endfor
+                    </select>
+                    <input type="file" id="len_file_input" name="len_file" /></input>
+                    <textarea id="len_entry_input" name="len_text" cols="30" rows="8"></textarea>
+                </div>            
+                <div class="form-row"><input id="submit" type="submit" name="add" value="Submit"/></div>
+            </div>
+        </div>
+    </form>
+    <div class="infomessagesmall" style="float: left; margin-left: 10px; width: 40%;">
+        <div class="fasta_help">
+            <h3>FASTA format</h3>
+            <p>
+                This is a multi-fasta file from your current history that provides the genome 
+                sequences for each chromosome/contig in your build.
+            </p>
+            
+            <p>
+                Here is a snippet from an example multi-fasta file:
+                <pre>
+    >chr1
+    ATTATATATAAGACCACAGAGAGAATATTTTGCCCGG...
+    >chr2
+    GGCGGCCGCGGCGATATAGAACTACTCATTATATATA...
+    ...
+                </pre>
+            </p>
+        </div>
+        <div class="len_file_help len_entry_help">
+            <h3>Length Format</h3>
+            <p>
+                The length format is two-column, separated by whitespace, of the form:
+                <pre>chrom/contig   length of chrom/contig</pre>
+            </p>
+            <p>
+                For example, the first few entries of <em>mm9.len</em> are as follows:
+                <pre>
+    chr1    197195432
+    chr2    181748087
+    chr3    159599783
+    chr4    155630120
+    chr5    152537259
+                </pre>
+            </p>
+        
+            <p>Trackster uses this information to populate the select box for chrom/contig, and
+            to set the maximum basepair of the track browser. You may either upload a .len file
+            of this format (Len File option), or directly enter the information into the box 
+            (Len Entry option).</p>
+        </div>
+    </div>
+</%def>
diff --git a/templates/user/edit_address.mako b/templates/user/edit_address.mako
new file mode 100644
index 0000000..6f27d5e
--- /dev/null
+++ b/templates/user/edit_address.mako
@@ -0,0 +1,96 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+</br>
+</br>
+<h3>Edit address</h3>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, id=trans.security.encode_id( user.id) )}">Manage user information</a>
+    </li>
+</ul>
+<div class="toolForm">
+    <div class="toolFormTitle">Edit address</div>
+    <div class="toolFormBody">
+        <form name="login_info" id="login_info" action="${h.url_for( controller='user', action='edit_address', cntrller=cntrller, address_id=trans.security.encode_id( address_obj.id ), id=trans.security.encode_id( user.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Short Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="short_desc" value="${address_obj.desc | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${address_obj.name | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Institution:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="institution" value="${address_obj.institution | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Address:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="address" value="${address_obj.address | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>City:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="city" value="${address_obj.city | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>State/Province/Region:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="state" value="${address_obj.state | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Postal Code:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="postal_code" value="${address_obj.postal_code | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Country:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="country" value="${address_obj.country | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Phone:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="phone" value="${address_obj.phone | h}" size="40">
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="edit_address_button" value="Save changes">
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/user/index.mako b/templates/user/index.mako
new file mode 100644
index 0000000..7acad13
--- /dev/null
+++ b/templates/user/index.mako
@@ -0,0 +1,47 @@
+<%inherit file="/base.mako"/>
+
+%if trans.user:
+    <h2>${_('User preferences')}</h2>
+    <p>You are currently logged in as ${trans.user.email|h}.</p>
+    <ul>
+        %if t.webapp.name == 'galaxy':
+            %if not trans.app.config.use_remote_user:
+                <li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller )}">${_('Manage your information')}</a> (email, address, etc.)</li>
+                <li><a href="${h.url_for( controller='user', action='change_password' )}">${_('Change your password')}</a></li>
+            %endif
+            %if trans.app.config.enable_communication_server:
+                <li><a href="${h.url_for( controller='user', action='change_communication', cntrller=cntrller )}">${_('Change your communication settings')}</a></li>
+            %endif
+            <li><a href="${h.url_for( controller='user', action='set_default_permissions', cntrller=cntrller )}">${_('Change default permissions')}</a> for new histories</li>
+            <li><a href="${h.url_for( controller='user', action='api_keys', cntrller=cntrller )}">${_('Manage your API keys')}</a></li>
+            <li><a href="${h.url_for( controller='user', action='toolbox_filters', cntrller=cntrller )}">${_('Manage your ToolBox filters')}</a></li>
+            %if trans.app.config.enable_openid and not trans.app.config.use_remote_user:
+                <li><a href="${h.url_for( controller='user', action='openid_manage', cntrller=cntrller )}">${_('Manage OpenIDs')}</a> linked to your account</li>
+            %endif
+            <li><a href="${h.url_for( controller='user', action='logout', logout_all=True )}" target="_top">${_('Logout')}</a> ${_('of all user sessions')}</li>
+        %else:
+            <li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller )}">${_('Manage your information')}</a></li>
+            <li><a href="${h.url_for( controller='user', action='change_password' )}">${_('Change your password')}</a></li>
+            <li><a href="${h.url_for( controller='user', action='api_keys', cntrller=cntrller )}">${_('Manage your API keys')}</a></li>
+            <li><a href="${h.url_for( controller='repository', action='manage_email_alerts', cntrller=cntrller )}">${_('Manage your email alerts')}</a></li>
+            <li><a href="${h.url_for( controller='user', action='logout', logout_all=True )}" target="_top">${_('Logout')}</a> ${_('of all user sessions')}</li>
+        %endif
+    </ul>
+    %if t.webapp.name == 'galaxy':
+        <p>
+            You are using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.
+            %if trans.app.config.enable_quotas:
+                Your disk quota is: <strong>${trans.app.quota_agent.get_quota( trans.user, nice_size=True )}</strong>.
+            %endif
+            Is your usage more than expected?  See the <a href="https://wiki.galaxyproject.org/Learn/ManagingDatasets" target="_blank">documentation</a> for tips on how to find all of the data in your account.
+        </p>
+    %endif
+%else:
+    %if not message:
+        <p>${n_('You are currently not logged in.')}</p>
+    %endif
+    <ul>
+        <li><a href="${h.url_for( controller='user', action='login' )}">${_('Login')}</li>
+        <li><a href="${h.url_for( controller='user', action='create', cntrller='user' )}">${_('Register')}</a></li>
+    </ul>
+%endif
diff --git a/templates/user/info.mako b/templates/user/info.mako
new file mode 100644
index 0000000..c7a17eb
--- /dev/null
+++ b/templates/user/info.mako
@@ -0,0 +1,130 @@
+<%inherit file="/base.mako"/>
+
+<% is_admin = cntrller == 'admin' and trans.user_is_admin() %>
+
+<%def name="render_user_info()">
+
+    <script type="text/javascript">
+        $(document).ready(function() {
+
+            function validateString(test_string, type) {
+                var mail_re = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
+                var username_re = /^[a-z0-9._\-]{3,255}$/;
+                if (type === 'email') {
+                    return mail_re.test(test_string);
+                } else if (type === 'username'){
+                    return username_re.test(test_string);
+                }
+            }
+
+            function renderError(message) {
+                $(".donemessage").hide();
+                if ($(".errormessage").length === 1) {
+                    $(".errormessage").html(message);
+                } else {
+                    var div = document.createElement( "div" );
+                    div.className = "errormessage";
+                    div.innerHTML = message;
+                    document.body.insertBefore( div, document.body.firstChild );
+                }
+            }
+
+            function renderDone(message) {
+                $(".errormessage").hide();
+                if ($(".donemessage").length === 1) {
+                    $(".donemessage").html(message);
+                } else {
+                    var div = document.createElement( "div" );
+                    div.className = "donemessage";
+                    div.innerHTML = message;
+                    document.body.insertBefore( div, document.body.firstChild );
+                }
+            }
+
+            original_email = $( '#email_input' ).val();
+            original_username = $( '#name_input' ).val();
+
+            $( '#login_info' ).bind( 'submit', function( e ) {
+                var error_text_email = 'The format of the email address is not correct.';
+                var error_text_email_long = 'Email address cannot be more than 255 characters in length.';
+                var error_text_username_characters = "Public name must contain only lowercase letters, numbers, '.', '_' and '-'. It also must be between 3 and 255 characters in length.";
+                var email = $( '#email_input' ).val();
+                var name = $( '#name_input' ).val();
+                var validForm = true;
+                var nothing_changed = ( original_email === email && original_username === name );
+                // we need this value to detect submitting at backend
+                var hidden_input = '<input type="hidden" id="login_info_button" name="login_info_button" value="Submit"/>';
+                $( '#send' ).attr( 'disabled', 'disabled' );
+                $( "#email_input" ).before( hidden_input );
+                if ( original_email !== email ){
+                    if ( email.length > 255 ){ renderError( error_text_email_long ); validForm = false; }
+                    else if ( !validateString( email, "email" ) ){ renderError( error_text_email ); validForm = false; }
+                }
+                if ( original_username !== name ){
+                    if ( name && !( validateString( name,"username" ) ) ){ renderError( error_text_username_characters ); validForm = false; }
+                }
+                if ( nothing_changed ){
+                    renderDone( "Nothing has changed." );
+                }
+                if ( !validForm  || nothing_changed ) {
+                    e.preventDefault();
+                    // reactivate the button if the form wasn't submitted
+                    $( '#send' ).removeAttr( 'disabled' );
+                    }
+                });
+        });
+
+    </script>
+
+
+    <h2>Manage User Information</h2>
+    %if not is_admin:
+        <ul class="manage-table-actions">
+            <li>
+                <a class="action-button"  href="${h.url_for( controller='user', action='index', cntrller=cntrller )}">User preferences</a>
+            </li>
+        </ul>
+    %endif
+    <div class="toolForm">
+        <form name="login_info" id="login_info" action="${h.url_for( controller='user', action='edit_info', cntrller=cntrller, user_id=trans.security.encode_id( user.id ) )}" method="post" >
+            <div class="toolFormTitle">Login Information</div>
+            <div class="form-row">
+                <label>Email address:</label>
+                <input type="text" id ="email_input" name="email" value="${email | h}" size="40"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    If you change your email address you will receive an activation link in the new mailbox and you have to activate your account by visiting it.
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Public name:</label>
+                %if t.webapp.name == 'tool_shed':
+                    %if user.active_repositories:
+                        <input type="hidden" id="name_input" name="username" value="${username | h}"/>
+                        ${username | h}
+                        <div class="toolParamHelp" style="clear: both;">
+                            You cannot change your public name after you have created a repository in this Tool Shed.
+                        </div>
+                    %else:
+                        <input type="text" id="name_input" name="username" size="40" value="${username | h}"/>
+                        <div class="toolParamHelp" style="clear: both;">
+                            Your public name provides a means of identifying you publicly within this Tool Shed. Public
+                            names must be at least three characters in length and contain only lower-case letters, numbers,
+                            dots, underscores, and dashes ('.', '_', '-').  You cannot change your public name after you have created a repository
+                            in this Tool Shed.
+                        </div>
+                    %endif
+                %else:
+                    <input type="text" id="name_input" name="username" size="40" value="${username | h}"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Your public name is an identifier that will be used to generate addresses for information
+                        you share publicly. Public names must be at least three characters in length and contain only lower-case
+                        letters, numbers, dots, underscores, and dashes ('.', '_', '-').
+                    </div>
+                %endif
+            </div>
+            <div class="form-row">
+                <input type="submit" id="send" name="login_info_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</%def>
diff --git a/templates/user/login.mako b/templates/user/login.mako
new file mode 100644
index 0000000..c68980b
--- /dev/null
+++ b/templates/user/login.mako
@@ -0,0 +1,124 @@
+<%!
+#This is a hack, we should restructure templates to avoid this.
+def inherit(context):
+    if context.get('trans').webapp.name == 'galaxy' and context.get( 'use_panels', True ):
+        return '/webapps/galaxy/base_panels.mako'
+    elif context.get('trans').webapp.name == 'tool_shed' and context.get( 'use_panels', True ):
+        return '/webapps/tool_shed/base_panels.mako'
+    else:
+        return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
+
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="center_panel()">
+    ${body()}
+</%def>
+
+<%def name="body()">
+
+    %if redirect_url:
+        <script type="text/javascript">  
+            top.location.href = '${redirect_url | h}';
+        </script>
+    %endif
+
+    %if context.get('use_panels'):
+        <div style="margin: 1em;">
+    %else:
+        <div>
+    %endif
+
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+
+    %if not trans.user:
+
+        ${render_login_form()}
+
+        %if trans.app.config.enable_openid:
+            <br/>
+            ${render_openid_form( redirect, False, openid_providers )}
+        %endif
+
+        %if trans.app.config.get( 'terms_url', None ) is not None:
+            <br/>
+            <p>
+                <a href="${trans.app.config.get('terms_url', None)}">Terms and Conditions for use of this service</a>
+            </p>
+        %endif
+
+    %endif
+
+    </div>
+
+</%def>
+
+<%def name="render_login_form( form_action=None )">
+
+    <%
+        if form_action is None:
+            form_action = h.url_for( controller='user', action='login', use_panels=use_panels )
+    %>
+
+    %if header:
+        ${header}
+    %endif
+    <div class="toolForm">
+        <div class="toolFormTitle">Login</div>
+        <form name="login" id="login" action="${form_action}" method="post" >
+            <div class="form-row">
+                <label>Username / Email Address:</label>
+                <input type="text" name="login" value="${login or ''| h}" size="40"/>
+                <input type="hidden" name="redirect" value="${redirect | h}" size="40"/>
+            </div>
+            <div class="form-row">
+                <label>Password:</label>
+                <input type="password" name="password" value="" size="40"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    <a href="${h.url_for( controller='user', action='reset_password', use_panels=use_panels )}">Forgot password? Reset here</a>
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="login_button" value="Login"/>
+            </div>
+        </form>
+    </div>
+
+</%def>
+
+<%def name="render_openid_form( redirect, auto_associate, openid_providers )">
+    <div class="toolForm">
+        <div class="toolFormTitle">OpenID Login</div>
+        <form name="openid" id="openid" action="${h.url_for( controller='user', action='openid_auth' )}" method="post" target="_parent" >
+            <div class="form-row">
+                <label>OpenID URL:</label>
+                <input type="text" name="openid_url" size="60" style="background-image:url('${h.url_for( '/static/images/openid-16x16.gif' )}' ); background-repeat: no-repeat; padding-right: 20px; background-position: 99% 50%;"/>
+                <input type="hidden" name="redirect" value="${redirect | h}" size="40"/>
+            </div>
+            <div class="form-row">
+                Or, authenticate with your <select name="openid_provider">
+                %for provider in openid_providers:
+                    <option value="${provider.id}">${provider.name}</option>
+                %endfor
+                </select> account.
+            </div>
+            <div class="form-row">
+                <input type="submit" name="login_button" value="Login"/>
+            </div>
+        </form>
+    </div>
+
+</%def>
diff --git a/templates/user/logout.mako b/templates/user/logout.mako
new file mode 100644
index 0000000..fe0a037
--- /dev/null
+++ b/templates/user/logout.mako
@@ -0,0 +1,53 @@
+<%!
+#This is a hack, we should restructure templates to avoid this.
+def inherit(context):
+    if context.get('trans').webapp.name == 'galaxy':
+        return '/webapps/galaxy/base_panels.mako'
+    elif context.get('trans').webapp.name == 'tool_shed':
+        return '/webapps/tool_shed/base_panels.mako'
+    else:
+        return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.active_view="user"
+    self.overlay_visible=False
+%>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        div#center {
+            padding: 10px;
+        }
+    </style>
+</%def>
+
+<%def name="title()">Galaxy :: Logout</%def>
+
+<%def name="center_panel()">
+    ${self.body()}
+</%def>
+
+<%def name="body()">
+    <script type="text/javascript">
+        $(function(){
+            //HACK: should happen before we get to this page - _before_ logged out of session
+            if( top.Galaxy && top.Galaxy.user ){
+                top.Galaxy.user.clearSessionStorage();
+            }
+        });
+    </script>
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+</%def>
diff --git a/templates/user/new_address.mako b/templates/user/new_address.mako
new file mode 100644
index 0000000..c80f613
--- /dev/null
+++ b/templates/user/new_address.mako
@@ -0,0 +1,97 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+</br>
+</br>
+<h3>Add new address</h3>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, id=trans.security.encode_id( user.id) )}">
+        <span>Manage User Information</span></a>
+    </li>
+</ul>
+<div class="toolForm">
+    <div class="toolFormTitle">Add new address</div>
+    <div class="toolFormBody">
+        <form name="login_info" id="login_info" action="${h.url_for( controller='user', action='new_address', cntrller=cntrller, id=trans.security.encode_id( user.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Short Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="short_desc" value="${short_desc | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${name | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Institution:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="institution" value="${institution | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Address:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="address" value="${address | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>City:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="city" value="${city | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>State/Province/Region:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="state" value="${state | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Postal Code:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="postal_code" value="${postal_code | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Country:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="country" value="${country | h}" size="40">
+                </div>
+                <div class="toolParamHelp" style="clear: both;">Required</div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Phone:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="phone" value="${phone | h}" size="40">
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="new_address_button" value="Save">
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/user/openid_associate.mako b/templates/user/openid_associate.mako
new file mode 100644
index 0000000..2991a94
--- /dev/null
+++ b/templates/user/openid_associate.mako
@@ -0,0 +1,73 @@
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/webapps/galaxy/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view=active_view
+    self.message_box_visible=False
+%>
+</%def>
+
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="login.mako" import="render_login_form" />
+<%namespace file="register.mako" import="render_registration_form" />
+
+<%def name="center_panel()">
+    ${body()}
+</%def>
+
+<%def name="body()">
+
+    <div style="overflow: auto; height: 100%">
+    %if context.get('use_panels'):
+        <div class="page-container" style="padding: 10px;">
+    %else:
+        <div class="page-container">
+    %endif
+
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+
+    <h2>OpenID Account Association</h2>
+    <div>
+        OpenIDs must be associated with a Galaxy account before they can be used for authentication.  This only needs to be done once per OpenID.  You may associate your OpenID with an existing Galaxy account, or create a new one.
+    </div>
+    <br/>
+
+    %if len( openids ) > 1:
+        <div>
+            The following OpenIDs will be associated with the account chosen or created below.
+            <ul>
+            %for openid in openids:
+                <li>${openid.openid | h}</li>
+            %endfor
+            </ul>
+        </div>
+    %else:
+        <div>
+            The OpenID <strong>${openids[0].openid | h}</strong> will be associated with the account chosen or created.
+        </div>
+    %endif
+    <br/>
+
+    <% form_action = h.url_for( controller='user', action='openid_associate', cntrller=cntrller, use_panels=use_panels ) %>
+
+    ${render_login_form( form_action=form_action )}
+
+    <br/>
+
+    ${render_registration_form( form_action=form_action )}
+
+    </div>
+    </div>
+
+</%def>
diff --git a/templates/user/openid_manage.mako b/templates/user/openid_manage.mako
new file mode 100644
index 0000000..dfd0d21
--- /dev/null
+++ b/templates/user/openid_manage.mako
@@ -0,0 +1,11 @@
+## Template generates a grid that enables user to select items.
+<%inherit file="../grid_base.mako" />
+
+<%namespace file="login.mako" import="render_openid_form" />
+
+<%def name="load()">
+    <h2>Associate more OpenIDs</h2>
+    ${render_openid_form( kwargs['redirect'], True, kwargs['openid_providers'] )}
+    <br/><br/>
+    ${parent.load()}
+</%def>
diff --git a/templates/user/permissions.mako b/templates/user/permissions.mako
new file mode 100644
index 0000000..fd060fb
--- /dev/null
+++ b/templates/user/permissions.mako
@@ -0,0 +1,19 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
+<%def name="title()">Change Default Permissions on New Histories</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='user', cntrller=cntrller, action='index')}">User preferences</a>
+    </li>
+</ul>
+%if trans.user:
+    ${render_permission_form( trans.user, trans.user.email, h.url_for( controller='user', action='set_default_permissions', cntrller=cntrller ), trans.user.all_roles() )}
+%endif
diff --git a/templates/user/register.mako b/templates/user/register.mako
new file mode 100644
index 0000000..7de41b2
--- /dev/null
+++ b/templates/user/register.mako
@@ -0,0 +1,215 @@
+<%!
+#This is a hack, we should restructure templates to avoid this.
+def inherit(context):
+    if context.get('trans').webapp.name == 'galaxy' and context.get( 'use_panels', True ):
+        return '/webapps/galaxy/base_panels.mako'
+    else:
+        return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="user"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="center_panel()">
+    ${body()}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="body()">
+    <div style="${ 'margin: 1em;' if context.get( 'use_panels', True ) else '' }">
+
+        %if redirect_url:
+            <script type="text/javascript">
+                top.location.href = '${redirect_url | h}';
+            </script>
+        %elif message:
+            ${render_msg( message, status )}
+        %endif
+
+        ## An admin user may be creating a new user account, in which case we want to display the registration form.
+        ## But if the current user is not an admin user, then don't display the registration form.
+        %if ( cntrller=='admin' and trans.user_is_admin() ) or not trans.user:
+            ${render_registration_form()}
+
+            %if trans.app.config.get( 'terms_url', None ) is not None:
+                <br/>
+                <p>
+                    <a href="${trans.app.config.get('terms_url', None)}">Terms and Conditions for use of this service</a>
+                </p>
+            %endif
+        %endif
+
+    </div>
+</%def>
+
+<%def name="render_registration_form( form_action=None )">
+
+    <%
+        if form_action is None:
+            form_action = h.url_for( controller='user', action='create', cntrller=cntrller )
+        from galaxy.web.form_builder import CheckboxField
+        subscribe_check_box = CheckboxField( 'subscribe' )
+    %>
+
+    <script type="text/javascript">
+        $(document).ready(function() {
+
+            function validateString(test_string, type) {
+                var mail_re = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
+                //var mail_re_RFC822 = /^([^\x00-\x20\x22\x28\x29\x2c\x2e\x3a-\x3c\x3e\x40\x5b-\x5d\x7f-\xff]+|\x22([^\x0d\x22\x5c\x80-\xff]|\x5c[\x00-\x7f])*\x22)(\x2e([^\x00-\x20\x22\x28\x29\x2c\x2e\x3a-\x3c\x3e\x40\x5b-\x5d\x7f-\xff]+|\x22([^\x0d\x22\x5c\x80-\xff]|\x5c[\x00-\x7f])*\x22))*\x40([^\x00-\x20\x22\x28\x29\x2c\x2e\x3a-\x3c\x3e\x40\x5b-\x5d\x7f-\xff]+|\x5b([^\x0d\x5b-\x5d\x80-\xff]|\x5c[\x00-\x7f])*\x5d)(\x2e([^\x00-\x20\x22\x28\x29\x2c\x2e\x3a-\x3c\x3e\x40\x5b-\x5d\x7f-\xff] [...]
+                var username_re = /^[a-z0-9._\-]{3,255}$/;
+                if (type === 'email') {
+                    return mail_re.test(test_string);
+                } else if (type === 'username'){
+                    return username_re.test(test_string);
+                }
+            }
+
+            function renderError(message) {
+                if (!$(".errormessage").size()) {
+                    $('<div/>').addClass('errormessage').insertBefore('#registrationForm');
+                }
+                console.debug( $( '#registrationForm' ) );
+                console.debug( '.errormessage:', $( '.errormessage' ) );
+                $(".errormessage").html(message);
+            }
+
+            $("[name='password']").complexify({'minimumChars':6}, function(valid, complexity){
+                var progressBar = $('.progress-bar');
+                var color = valid ? 'lightgreen' : 'red';
+
+                progressBar.css('background-color', color);
+                progressBar.css({'width': complexity + '%'});
+            });
+
+            $('#registration').bind('submit', function(e) {
+                $('#send').attr('disabled', 'disabled');
+
+                // we need this value to detect submitting at backend
+                var hidden_input = '<input type="hidden" id="create_user_button" name="create_user_button" value="Submit"/>';
+                $("#email_input").before(hidden_input);
+
+                var error_text_email = 'The format of the email address is not correct.';
+                var error_text_email_long = 'Email address cannot be more than 255 characters in length.';
+                var error_text_username_characters = "Public name must contain only lowercase letters, numbers, '.', '_' and '-'. It also has to be between 3 and 255 characters in length.";
+                var error_text_password_short = 'Use a password of at least 6 characters';
+                var error_text_password_match = "Passwords don't match";
+
+                var validForm = true;
+
+                var email = $('#email_input').val();
+                var name = $('#name_input').val();
+                if (email.length > 255){ renderError(error_text_email_long); validForm = false;}
+                else if (!validateString(email,"email")){ renderError(error_text_email); validForm = false;}
+                else if (!($('#password_input').val() === $('#password_check_input').val())){ renderError(error_text_password_match); validForm = false;}
+                else if ($('#password_input').val().length < 6 ){ renderError(error_text_password_short); validForm = false;}
+                else if (name && !(validateString(name,"username"))){ renderError(error_text_username_characters); validForm = false;}
+
+                   if (!validForm) {
+                    e.preventDefault();
+                    // reactivate the button if the form wasn't submitted
+                    $('#send').removeAttr('disabled');
+                    }
+                });
+        });
+    </script>
+
+    <div id="registrationForm" class="toolForm">
+        <form name="registration" id="registration" action="${form_action}" method="post" >
+            <div class="toolFormTitle">Create account</div>
+            <div class="form-row">
+                <label>Email address:</label>
+                <input id="email_input" type="text" name="email" value="${email | h}" size="40"/>
+                <input type="hidden" name="redirect" value="${redirect | h}" size="40"/>
+            </div>
+            <div class="form-row">
+                <label>Password:</label>
+                <input id="password_input" type="password" name="password" value="" size="40"/>
+            </div>
+            <div class="progress">
+                <div id="complexity-bar" class="progress-bar" role="progressbar">
+                    Strength
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Confirm password:</label>
+                <input id="password_check_input" type="password" name="confirm" value="" size="40"/>
+            </div>
+            <div class="form-row">
+                <label>Public name:</label>
+                <input id="name_input" type="text" name="username" size="40" value="${username |h}"/>
+                %if t.webapp.name == 'galaxy':
+                    <div class="toolParamHelp" style="clear: both;">
+                        Your public name is an identifier that will be used to generate addresses for information
+                        you share publicly. Public names must be at least three characters in length and contain only
+                        lower-case letters, numbers, dots, underscores, and dashes ('.', '_', '-').
+                    </div>
+                %else:
+                    <div class="toolParamHelp" style="clear: both;">
+                        Your public name provides a means of identifying you publicly within this tool shed. Public
+                        names must be at least three characters in length and contain only lower-case letters, numbers,
+                        dots, underscores, and dashes ('.', '_', '-'). You cannot change your public name after you have
+                        created a repository in this Tool Shed.
+                    </div>
+                %endif
+            </div>
+            %if trans.app.config.smtp_server and trans.app.config.mailing_join_addr:
+                <div class="form-row">
+                    <label>Subscribe to mailing list:</label>
+                    %if subscribe_checked:
+                        <% subscribe_check_box.checked = True %>
+                    %endif
+                    ${subscribe_check_box.get_html()}
+                    <p>See <a href="http://galaxyproject.org/wiki/Mailing%20Lists" target="_blank">
+                    all Galaxy project mailing lists</a>.</p>
+                </div>
+            %endif
+            %if user_type_fd_id_select_field and len( user_type_fd_id_select_field.options ) >= 1:
+                <div class="form-row">
+                    <label>User type</label>
+                    ${user_type_fd_id_select_field.get_html()}
+                </div>
+            %endif
+            %if user_type_form_definition:
+                %for field in widgets:
+                    <div class="form-row">
+                        <label>${field['label']}</label>
+                        ${field['widget'].get_html()}
+                        <div class="toolParamHelp" style="clear: both;">
+                            ${field['helptext']}
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                %endfor
+                %if not user_type_fd_id_select_field:
+                    <input type="hidden" name="user_type_fd_id" value="${trans.security.encode_id( user_type_form_definition.id )}"/>
+                %endif
+            %endif
+            <div id="for_bears">
+            If you see this, please leave following field blank.
+            <input type="text" name="bear_field" size="1" value=""/>
+            </div>
+            <div class="form-row">
+                <input type="submit" id="send" name="create_user_button" value="Submit"/>
+            </div>
+        </form>
+        %if registration_warning_message:
+        <div class="alert alert-danger" style="margin: 30px 12px 12px 12px;">
+            ${registration_warning_message}
+        </div>
+        %endif
+    </div>
+
+</%def>
diff --git a/templates/user/reset_password.mako b/templates/user/reset_password.mako
new file mode 100644
index 0000000..806d4c8
--- /dev/null
+++ b/templates/user/reset_password.mako
@@ -0,0 +1,20 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Reset Password</div>
+    <form name="reset_password" id="reset_password" action="${h.url_for( controller='user', action='reset_password' )}" method="post" >
+        <div class="form-row">
+            <label>Email:</label>
+            <input type="text" name="email" value="" size="40"/>
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <input type="submit" name="reset_password_button" value="Submit"/>
+        </div>
+    </form>
+</div>
diff --git a/templates/user/toolbox_filters.mako b/templates/user/toolbox_filters.mako
new file mode 100644
index 0000000..cd0d99e
--- /dev/null
+++ b/templates/user/toolbox_filters.mako
@@ -0,0 +1,91 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+</br>
+</br>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='user', action='index', cntrller=cntrller )}">User preferences</a>
+    </li>
+</ul>
+
+%if tool_filters or section_filters or label_filters:
+    <div class="toolForm">
+        <form name="toolbox_filter" id="toolbox_filter" action="${h.url_for( controller='user', action='edit_toolbox_filters', cntrller=cntrller )}" method="post" >
+            % if tool_filters:
+                <div class="toolFormTitle">Edit ToolBox filters :: Tools</div>
+                <div class="toolFormBody">
+                    % for filter in tool_filters:
+                        <div class="form-row">
+                            <div style="float: left; width: 40px; margin-right: 10px;">
+                                % if filter['checked']:
+                                    <input type="checkbox" name="t_${filter['filterpath']}" checked="checked">
+                                % else:
+                                    <input type="checkbox" name="t_${filter['filterpath']}">
+                                % endif
+                            </div>
+                            <div style="float: left; margin-right: 10px;">
+                                ${filter['short_desc']}
+                                <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    % endfor
+                </div>
+            % endif
+
+            % if section_filters:
+                <div class="toolFormTitle">Edit ToolBox filters :: Sections</div>
+                <div class="toolFormBody">
+                    % for filter in section_filters:
+                        <div class="form-row">
+                            <div style="float: left; width: 40px; margin-right: 10px;">
+                                % if filter['checked']:
+                                    <input type="checkbox" name="s_${filter['filterpath']}" checked="checked">
+                                % else:
+                                    <input type="checkbox" name="s_${filter['filterpath']}">
+                                % endif
+                            </div>
+                            <div style="float: left; margin-right: 10px;">
+                                ${filter['short_desc']}
+                                <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    % endfor
+                </div>
+            % endif
+
+            % if label_filters:
+                <div class="toolFormTitle">Edit ToolBox filters :: Labels</div>
+                <div class="toolFormBody">
+                    % for filter in label_filters:
+                        <div class="form-row">
+                            <div style="float: left; width: 40px; margin-right: 10px;">
+                                % if filter['checked']:
+                                    <input type="checkbox" name="l_${filter['filterpath']}" checked="checked">
+                                % else:
+                                    <input type="checkbox" name="l_${filter['filterpath']}">
+                                % endif
+                            </div>
+                            <div style="float: left; margin-right: 10px;">
+                                ${filter['short_desc']}
+                                <div class="toolParamHelp" style="clear: both;">${filter['desc']}</div>
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    % endfor
+                </div>
+            % endif
+            <div class="form-row">
+                <input type="submit" name="edit_toolbox_filter_button" value="Save changes">
+            </div>
+        </form>
+    </div>
+%else:
+    ${render_msg( 'No filters available. Contact your system administrator or check your configuration file.', 'info' )}
+%endif
diff --git a/templates/user/username.mako b/templates/user/username.mako
new file mode 100644
index 0000000..adb922f
--- /dev/null
+++ b/templates/user/username.mako
@@ -0,0 +1,27 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<% is_admin = cntrller == 'admin' and trans.user_is_admin() %>
+
+<h2>Manage Public Name</h2>
+<div class="toolForm">
+    <form name="username" id="username" action="${h.url_for( controller='user', action='edit_username', cntrller=cntrller, user_id=trans.security.encode_id( user.id ) )}" method="post" >
+        <div class="toolFormTitle">Login Information</div>
+        <div class="form-row">
+            <label>Public name:</label>
+            <input type="text" name="username" size="40" value="${username}"/>
+            <div class="toolParamHelp" style="clear: both;">
+                Your public name is an identifier that will be used to generate addresses for information
+                you share publicly. Public names must be at least four characters in length and contain only lower-case
+                letters, numbers, dots, underscores, and dashes ('.', '_', '-').
+            </div>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="change_username_button" value="Save"/>
+        </div>
+    </form>
+</div>
diff --git a/templates/webapps/galaxy/admin/center.mako b/templates/webapps/galaxy/admin/center.mako
new file mode 100644
index 0000000..bc95b26
--- /dev/null
+++ b/templates/webapps/galaxy/admin/center.mako
@@ -0,0 +1,110 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="title()">Galaxy Administration</%def>
+
+<h2>Administration</h2>
+Please visit <a href="https://wiki.galaxyproject.org/Admin" target="_blank">the Galaxy administration hub</a> to learn how to keep your Galaxy in best shape.
+%if message:
+    ${render_msg( message, status )}
+%else:
+        <h4>Server</h4>
+            <ul>
+                <li>
+                    <strong>Data types registry</strong> - See all datatypes available in this Galaxy.
+                </li>
+                <li>
+                    <strong>Data tables registry</strong> - See all data tables available in this Galaxy.
+                </li>
+                <li>
+                    <strong>Display applications</strong> - See all display applications configured in this Galaxy.
+                </li>
+                <li>
+                    <strong>Manage jobs</strong> - Display all jobs that are currently not finished (i.e., their state is new, waiting, queued, or running).  Administrators are able to cleanly stop long-running jobs. 
+                </li>
+            </ul>
+
+        <h4>Tools and Tool Shed</h4>
+            <ul>
+            %if trans.app.tool_shed_registry and trans.app.tool_shed_registry.tool_sheds:
+                <li>
+                    <strong>Search Tool Shed</strong> - Search and install new tools and other Galaxy utilities from the Tool Shed. See <a href="https://wiki.galaxyproject.org/Admin/Tools/AddToolFromToolShedTutorial" target="_blank">the tutorial</a>.
+                </li>
+            %endif
+            %if tool_shed_repository_ids:
+                <li>
+                    <strong>Monitor installing repositories</strong> - View the status of tools that are being currently installed.
+                </li>
+            %endif
+            %if is_repo_installed:
+                <li>
+                    <strong>Manage installed tools</strong> - View and administer installed tools and utilities on this Galaxy.
+                </li>
+                <li>
+                    <strong>Reset metadata</strong> - Select on which repositories you want to reset metadata.
+                </li>
+            %endif
+                <li>
+                    <strong>Download local tool</strong> - Download a tarball with a tool from this Galaxy.
+                </li>
+                <li>
+                    <strong>Reload a tool's configuration</strong> - Allows a new version of a tool to be loaded while the server is running.
+                </li>
+                <li>
+                    <strong>Tool lineage</strong> - A view of a version lineages for all installed tools. Useful for debugging.
+                </li>
+                <li>
+                    <strong>Review tool migration stages</strong> - See the list of migration stages that moved sets of tools from the distribution to the Tool Shed.
+                </li>
+            </ul>
+
+        <h4>User Management</h4>
+            <ul>
+                <li>
+                    <strong>Users</strong> - A view of all users and all groups and non-private roles associated with each user.  
+                </li>
+                <li>
+                    <strong>Groups</strong> - A view of all groups along with the members of the group and the roles associated with each group.
+                </li>
+                <li>
+                    <strong>Roles</strong> - A view of all non-private roles along with the role type, and the users and groups that are associated with the role.
+                    Also includes a view of the data library datasets that are associated with the role and the permissions applied to each dataset.
+                </li>
+                <li>
+                    <strong>API keys</strong> - A view of all generated API keys with an option to re-generate.
+                </li>
+            %if trans.app.config.allow_user_impersonation:
+                <li>
+                    <strong>Impersonate a user</strong> - Allows to view Galaxy as another user in order to help troubleshoot issues.
+                </li>
+            %endif
+            </ul>
+
+        <h4>Data</h4>
+            <ul>
+            %if trans.app.config.enable_quotas:
+                <li>
+                    <strong>Quotas</strong> - Manage user space quotas. See <a href="https://wiki.galaxyproject.org/Admin/DiskQuotas" target="_blank">wiki</a> for details.
+                </li>
+            %endif
+                <li>
+                    <strong>Data libraries</strong> - Data libraries enable authorized Galaxy users to share datasets with other groups or users. Only administrators can create data libraries. See <a href="https://wiki.galaxyproject.org/DataLibraries" target="_blank">wiki</a> for details and <a href="https://wiki.galaxyproject.org/Admin/DataLibraries/LibrarySecurity" target="_blank">this page</a> for security description.
+                </li>
+                <li>
+                    <strong>Local data</strong> - Manage the reference (and other) data that is stored within Tool Data Tables. See <a href="https://wiki.galaxyproject.org/Admin/Tools/DataManagers" target="_blank">wiki</a> for details.
+                </li>
+            </ul>
+        <h4>Form definitions</h4>
+            <ul>
+                <li>
+                    <strong>Form definitions</strong> - Manage local form definitions.
+                </li>
+            </ul>
+
+        <h4>Sample tracking</h4>
+            <ul>
+                <li>
+                    Please see the <a href="https://wiki.galaxyproject.org/Admin/DataLibraries/LibrarySampleTracking" target="_blank">sample tracking tutorial</a>.
+                </li>
+            </ul>
+%endif
diff --git a/templates/webapps/galaxy/admin/index.mako b/templates/webapps/galaxy/admin/index.mako
new file mode 100644
index 0000000..0f8bc97
--- /dev/null
+++ b/templates/webapps/galaxy/admin/index.mako
@@ -0,0 +1,132 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+## Default title
+<%def name="title()">Galaxy Administration</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ## Include "base.css" for styling tool menu and forms (details)
+    ${h.css( "base", "autocomplete_tagging" )}
+
+    ## But make sure styles for the layout take precedence
+    ${parent.stylesheets()}
+
+    <style type="text/css">
+        body { margin: 0; padding: 0; overflow: hidden; }
+        #left {
+            background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
+        }
+
+        .unified-panel-body {
+            overflow: auto;
+        }
+        .toolMenu {
+            margin: 8px 0 0 10px;
+        }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="init()">
+    <%
+        self.has_left_panel=True
+        self.has_right_panel=False
+        self.active_view="admin"
+    %>
+</%def>
+
+<%def name="left_panel()">
+    <div class="unified-panel-header" unselectable="on">
+        <div class='unified-panel-header-inner'>Administration</div>
+    </div>
+    <div class="unified-panel-body">
+        <div class="toolMenu">
+            <div class="toolSectionList">
+                <div class="toolSectionTitle">Server</div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='view_datatypes_registry' )}" target="galaxy_main">Data types registry</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='view_tool_data_tables' )}" target="galaxy_main">Data tables registry</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='display_applications' )}" target="galaxy_main">Display applications</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='jobs' )}" target="galaxy_main">Manage jobs</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">Tools and Tool Shed</div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                    %if trans.app.tool_shed_registry and trans.app.tool_shed_registry.tool_sheds:
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='browse_tool_sheds' )}" target="galaxy_main">Search Tool Shed</a></div>
+                        %if trans.app.config.enable_beta_ts_api_install:
+                            <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='browse_toolsheds' )}" target="galaxy_main">Search Tool Shed (Beta)</a></div>
+                        %endif
+                    %endif
+                    %if installing_repository_ids:
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='monitor_repository_installation', tool_shed_repository_ids=installing_repository_ids )}" target="galaxy_main">Monitor installing repositories</a></div>
+                    %endif
+                    %if is_repo_installed:
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='browse_repositories' )}" target="galaxy_main">Manage installed tools</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_installed_repositories' )}" target="galaxy_main">Reset metadata</a></div>
+                    %endif
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='package_tool' )}" target="galaxy_main">Download local tool</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='tool_versions' )}" target="galaxy_main">Tool lineage</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='reload_tool' )}" target="galaxy_main">Reload a tool's configuration</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='review_tool_migration_stages' )}" target="galaxy_main">Review tool migration stages</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='tool_errors' )}" target="galaxy_main">View Tool Error Logs</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='sanitize_whitelist' )}" target="galaxy_main">Manage Display Whitelist</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">User Management</div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='users' )}" target="galaxy_main">Users</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='groups' )}" target="galaxy_main">Groups</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='admin', action='roles' )}" target="galaxy_main">Roles</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='userskeys', action='all_users' )}" target="galaxy_main">API keys</a></div>
+                        %if trans.app.config.allow_user_impersonation:
+                            <div class="toolTitle"><a href="${h.url_for( controller='admin', action='impersonate' )}" target="galaxy_main">Impersonate a user</a></div>
+                        %endif
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">Data</div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        %if trans.app.config.enable_quotas:
+                            <div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Quotas</a></div>
+                        %endif
+                        <div class="toolTitle"><a href="${h.url_for( controller='library_admin', action='browse_libraries' )}" target="galaxy_main">Data libraries</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='data_manager' )}" target="galaxy_main">Local data</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">Form Definitions</div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a href="${h.url_for( controller='forms', action='browse_form_definitions' )}" target="galaxy_main">Form definitions</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">Sample Tracking</div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a href="${h.url_for( controller='external_service', action='browse_external_services' )}" target="galaxy_main">Sequencers and external services</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='request_type', action='browse_request_types' )}" target="galaxy_main">Request types</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='requests_admin', action='browse_requests' )}" target="galaxy_main">Sequencing requests</a></div>
+                        <div class="toolTitle"><a href="${h.url_for( controller='requests_common', action='find_samples', cntrller='requests_admin' )}" target="galaxy_main">Find samples</a></div>
+                    </div>
+                </div>
+            </div>
+        </div>
+    </div>
+</%def>
+
+<%def name="center_panel()">
+    <% center_url = h.url_for( controller='admin', action='center', message=message, status=status ) %>
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"> </iframe>
+</%def>
diff --git a/templates/webapps/galaxy/admin/sanitize_whitelist.mako b/templates/webapps/galaxy/admin/sanitize_whitelist.mako
new file mode 100644
index 0000000..56e58b3
--- /dev/null
+++ b/templates/webapps/galaxy/admin/sanitize_whitelist.mako
@@ -0,0 +1,59 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if not sanitize_all:
+    <div><p>You currently have <strong>sanitize_all_html</strong> set to False
+    in your galaxy configuration file.  This prevents Galaxy from sanitizing
+    tool outputs, which is an important security feature.  For improved
+    security, we recommend you disable the old-style blanket sanitization and
+    manage it via this whitelist instead.</p></div>
+%else:
+    <div><p>This interface will allow you to mark particular tools as 'trusted'
+    after which Galaxy will no longer attempt to sanitize any HTML contents of
+    datasets created by these tools upon display.  Please be aware of the
+    potential security implications of doing this -- bypassing sanitization
+    using this whitelist disables Galaxy's security feature (for the indicated
+    tools) that prevents Galaxy from displaying potentially malicious
+    Javascript.<br/>
+    Note that datasets originating from an archive import are still sanitized
+    even when their creating tool is whitelisted since it isn't possible to
+    validate the information supplied in the archive.</p></div>
+    <form name="sanitize_whitelist" action="${h.url_for( controller='admin', action='sanitize_whitelist' )}">
+    <div class="toolForm">
+        <div class="toolFormTitle">Tool Sanitization Whitelist</div>
+        <div class="toolFormBody">
+            <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                <tr>
+                    <th bgcolor="#D8D8D8">Whitelist</th>
+                    <th bgcolor="#D8D8D8">Name</th>
+                    <th bgcolor="#D8D8D8">ID</th>
+                </tr>
+                <% ctr = 0 %>
+                %for tool in tools.values():
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>
+                            %if tool.id in trans.app.config.sanitize_whitelist:
+                                <input type="checkbox" name="tools_to_whitelist" value="${tool.id}" checked="checked"/>
+                            %else:
+                                <input type="checkbox" name="tools_to_whitelist" value="${tool.id}"/>
+                            %endif
+                        </td>
+                        <td>${ tool.name | h }</td>
+                        <td>${ tool.id | h }</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            </table>
+        </div>
+    </div>
+    <input type="submit" name="submit_whitelist" value="Submit new whitelist"/>
+    </form>
+%endif
diff --git a/templates/webapps/galaxy/admin/tool_sheds.mako b/templates/webapps/galaxy/admin/tool_sheds.mako
new file mode 100644
index 0000000..54fe80b
--- /dev/null
+++ b/templates/webapps/galaxy/admin/tool_sheds.mako
@@ -0,0 +1,41 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="title()">Configured Galaxy tool sheds</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Accessible Galaxy tool sheds</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <table class="grid">
+                <% shed_id = 0 %>
+                %for name, url in trans.app.tool_shed_registry.tool_sheds.items():
+                    <tr class="libraryTitle">
+                        <td>
+                            <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${shed_id}-popup">
+                                <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url )}">${name|h}</a>
+                            </div>
+                            <div popupmenu="dataset-${shed_id}-popup">
+                                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url )}">Browse valid repositories</a>
+                                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url )}">Search for valid tools</a>
+                                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url )}">Search for workflows</a>
+                            </div>
+                        </td>
+                    </tr>
+                    <% shed_id += 1 %>
+                %endfor
+                </tr>
+            </table>
+        </div>
+        <div style="clear: both"></div>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/admin/toolsheds.mako b/templates/webapps/galaxy/admin/toolsheds.mako
new file mode 100644
index 0000000..64c0c2c
--- /dev/null
+++ b/templates/webapps/galaxy/admin/toolsheds.mako
@@ -0,0 +1,41 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="title()">Configured Galaxy tool sheds</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Accessible Galaxy tool sheds</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <table class="grid">
+                <% shed_id = 0 %>
+                %for name, url in trans.app.tool_shed_registry.tool_sheds.items():
+                    <tr class="libraryTitle">
+                        <td>
+                            <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${shed_id}-popup">
+                                <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_toolshed', tool_shed_url=url )}">${name|h}</a>
+                            </div>
+                            <div popupmenu="dataset-${shed_id}-popup">
+                                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_toolshed', tool_shed_url=url )}">Browse valid repositories</a>
+                                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url )}">Search for valid tools</a>
+                                <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url )}">Search for workflows</a>
+                            </div>
+                        </td>
+                    </tr>
+                    <% shed_id += 1 %>
+                %endfor
+                </tr>
+            </table>
+        </div>
+        <div style="clear: both"></div>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/admin/view_display_applications.mako b/templates/webapps/galaxy/admin/view_display_applications.mako
new file mode 100644
index 0000000..e9cf6cf
--- /dev/null
+++ b/templates/webapps/galaxy/admin/view_display_applications.mako
@@ -0,0 +1,48 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">There are currently ${len( display_applications )} <a class="icon-btn" href="${ h.url_for( controller='admin', action='reload_display_application' ) }" title="Reload all display applications" data-placement="bottom">
+                        <span class="fa fa-refresh"></span>
+                    </a> display applications loaded.</div>
+    <div class="toolFormBody">
+        <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+            <tr>
+                <th bgcolor="#D8D8D8">Reload</th>
+                <th bgcolor="#D8D8D8">Name</th>
+                <th bgcolor="#D8D8D8">ID</th>
+                <th bgcolor="#D8D8D8">Version</th>
+                <th bgcolor="#D8D8D8">Links</th>
+                <th bgcolor="#D8D8D8">Filename</th>
+            </tr>
+            <% ctr = 0 %>
+            %for display_app in display_applications.values():
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td>
+                        <a class="icon-btn" href="${ h.url_for( controller='admin', action='reload_display_application', id=display_app.id ) }" title="Reload ${ display_app.name | h } display application" data-placement="bottom">
+                            <span class="fa fa-refresh"></span>
+                        </a>
+                    </td>
+                    <td>${ display_app.name | h }</td>
+                    <td>${ display_app.id | h }</td>
+                    <td>${ display_app.version | h }</td>
+                    <td><ul>
+                        %for link in display_app.links.values():
+                            <li>${  link.name | h }</li>
+                        %endfor
+                    </ul></td>
+                    <td>${ display_app._filename | h }</td>
+                </tr>
+                <% ctr += 1 %>
+            %endfor
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/base_panels.mako b/templates/webapps/galaxy/base_panels.mako
new file mode 100644
index 0000000..735ee10
--- /dev/null
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -0,0 +1,21 @@
+<%inherit file="/base/base_panels.mako"/>
+<!-- webapps/galaxy/base_panels.mako -->
+<%namespace name="mod_masthead" file="/webapps/galaxy/galaxy.masthead.mako"/>
+
+## Default title
+<%def name="title()">Galaxy</%def>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+</%def>
+
+<%def name="late_javascripts()">
+${parent.late_javascripts()}
+</%def>
+
+## Masthead
+<%def name="masthead()">
+    <%
+        mod_masthead.load(self.active_view);
+    %>
+</%def>
diff --git a/templates/webapps/galaxy/biostar/post_redirect.mako b/templates/webapps/galaxy/biostar/post_redirect.mako
new file mode 100644
index 0000000..d2c4015
--- /dev/null
+++ b/templates/webapps/galaxy/biostar/post_redirect.mako
@@ -0,0 +1,26 @@
+<%inherit file="/base.mako"/>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script>
+        $( document ).ready( function(){
+            $("div#postRedirect").hide();
+            $("form#postRedirectForm").submit();
+        });
+    </script>
+</%def>
+
+<%def name="title()">Post Biostar Question</%def>
+
+<div class="infomessagelarge">
+    <p>You are now being forwarded to Biostar.<p>
+    <div id="postRedirect">
+        <p>If you are not automatically forwarded, click the button below:<p>
+        <form id="postRedirectForm" action="${post_url}" method="post" >
+            %for input_name, input_value in form_inputs.items():
+                <input type="hidden" name="${input_name | h}" value="${input_value | h}">
+            %endfor
+                <input type="submit" name="GalaxySubmitPostRedirectForm" id='GalaxySubmitPostRedirectForm' value="Click Here">
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/data_manager/index.mako b/templates/webapps/galaxy/data_manager/index.mako
new file mode 100644
index 0000000..edf4be2
--- /dev/null
+++ b/templates/webapps/galaxy/data_manager/index.mako
@@ -0,0 +1,70 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="title()">Data Manager</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<h2>Data Manager</h2>
+
+%if view_only:
+    <p>Not implemented</p>
+%elif not data_managers.data_managers:
+    ${ render_msg( 'You do not currently have any Data Managers installed. You can install some from a <a href="%s">ToolShed</a>.' % ( h.url_for( controller="admin_toolshed", action="browse_tool_sheds" ) ), "warning" ) }
+%else:
+    <p>Choose your data managing option from below. You may install additional Data Managers from a <a href="${ h.url_for( controller='admin_toolshed', action='browse_tool_sheds' ) }">ToolShed</a>.</p>
+    <ul>
+        <li><h3>Run Data Manager Tools</h3>
+            <div style="margin-left:1em">
+            <ul>
+            %for data_manager_id, data_manager in sorted( data_managers.data_managers.iteritems(), key=lambda x:x[1].name ):
+                <li>
+                    <a href="${ h.url_for( controller='root', tool_id=data_manager.tool.id ) }" target="_blank"><strong>${ data_manager.name | h }</strong></a> - ${ data_manager.description | h }
+                </li>
+                <p/>
+            %endfor
+            </ul>
+            </div>
+        </li>
+        <p/>
+        <li><h3>View Data Manager Jobs</h3>
+            <div style="margin-left:1em">
+            <ul>
+                %for data_manager_id, data_manager in sorted( data_managers.data_managers.iteritems(), key=lambda x:x[1].name ):
+                    <li>
+                        <a href="${ h.url_for( controller='data_manager', action='manage_data_manager', id=data_manager_id)}" target="galaxy_main"><strong>${ data_manager.name | h }</strong></a> - ${ data_manager.description | h }</a>
+                    </li>
+                    <p/>
+                %endfor
+            </ul>
+            </div>
+        </li>
+        <p/>
+        <p/>
+        <li><h3>View Tool Data Table Entries</h3>
+            <div style="margin-left:1em">
+            <ul>
+                <% managed_table_names = data_managers.managed_data_tables.keys() %>
+                %for table_name in sorted( tool_data_tables.get_tables().keys() ):
+                    <li>
+                        <a href="${h.url_for( controller='data_manager', action='manage_data_table', table_name=table_name)}" target="galaxy_main">
+                            %if table_name in managed_table_names:
+                                </span><strong>${ table_name | h }</strong></a> <span class="fa fa-exchange">
+                            %else:
+                                ${ table_name | h }</a>
+                            %endif
+                    </li>
+                    <p/>
+                %endfor
+            </ul>
+            </div>
+        </li>
+        <p/>
+    </ul>
+    <p/>
+    <br/>
+%endif
+
+${render_msg( 'To find out more information about Data Managers, please visit the <a href="https://wiki.galaxyproject.org/Admin/Tools/DataManagers" target="_blank">wiki.</a>', "info" ) }
diff --git a/templates/webapps/galaxy/data_manager/manage_data_manager.mako b/templates/webapps/galaxy/data_manager/manage_data_manager.mako
new file mode 100644
index 0000000..6b2eb43
--- /dev/null
+++ b/templates/webapps/galaxy/data_manager/manage_data_manager.mako
@@ -0,0 +1,56 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="title()">Data Manager: ${ data_manager.name | h } - ${ data_manager.description | h }</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<h2>Data Manager: ${ data_manager.name | h } - ${ data_manager.description | h }</h2>
+
+%if view_only:
+    <p>Not implemented</p>
+%else:
+    <p>Access managed data by job</p>
+    
+%if jobs:
+<div>
+    <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+        <tr class="header">
+            <td>Actions</td>
+            <td>Job ID</td>
+            <td>User</td>
+            <td>Last Update</td>
+            <td>State</td>
+            <td>Command Line</td>
+            <td>Job Runner</td>
+            <td>PID/Cluster ID</td>
+        </tr>
+        %for job in jobs:
+                <td>
+                    <div class="icon-btn-group">
+                        <a class="icon-btn" href="${ h.url_for( controller="data_manager", action="view_job", id=trans.security.encode_id( job.id ) ) }" title="View info"><span class="fa fa-info-circle"></span></a><a class="icon-btn" href="${ h.url_for( controller="tool_runner", action="rerun", job_id=trans.security.encode_id( job.id ) ) }" title="Rerun"><span class="fa fa-refresh"></span></a>
+                    </div>
+                </td>
+                <td>${ job.id | h }</td>
+                %if job.history and job.history.user:
+                    <td>${job.history.user.email | h}</td>
+                %else:
+                    <td>anonymous</td>
+                %endif
+                <td>${job.update_time | h}</td>
+                <td>${job.state | h}</td>
+                <td>${job.command_line | h}</td>
+                <td>${job.job_runner_name | h}</td>
+                <td>${job.job_runner_external_id | h}</td>
+            </tr>
+        %endfor
+    </table>
+    <p/>
+</div>
+%else:
+    <div class="infomessage">There are no jobs for this data manager.</div>
+%endif
+
+%endif
diff --git a/templates/webapps/galaxy/data_manager/manage_data_table.mako b/templates/webapps/galaxy/data_manager/manage_data_table.mako
new file mode 100644
index 0000000..d6f642a
--- /dev/null
+++ b/templates/webapps/galaxy/data_manager/manage_data_table.mako
@@ -0,0 +1,45 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="title()">Tool Data Table: ${ data_table.name | h }</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if view_only:
+    <p>Not implemented</p>
+%else:
+    <p>
+        <% column_name_list = data_table.get_column_name_list() %>
+        <table class="tabletip">
+            <thead>
+                <tr><th colspan="${ len( column_name_list ) | h}" style="font-size: 120%;">
+                    Data Manager: ${ data_table.name | h }
+                    <a class="icon-btn" href="${ h.url_for( controller="data_manager", action="reload_tool_data_tables", table_name=data_table.name ) }" title="Reload ${data_table.name | h} tool data table" data-placement="bottom">
+                        <span class="fa fa-refresh"></span>
+                    </a>
+                </th></tr>
+                <tr>
+
+                %for name in column_name_list:
+                    <th>${name | h}</th>
+                %endfor
+                </tr>
+            </thead>
+            <tbody>
+                %for table_row in data_table.data:
+                <tr>
+                %for field in table_row:
+                    <td>${field | h}</td>
+                %endfor
+                </tr>
+                %endfor
+            </tbody>
+        </table>
+    </p>
+    %if not data_table.data:
+        ${render_msg( "There are currently no entries in this Tool Data Table.", "warning" ) }
+    %endif
+
+%endif
diff --git a/templates/webapps/galaxy/data_manager/view_job.mako b/templates/webapps/galaxy/data_manager/view_job.mako
new file mode 100644
index 0000000..d1a5a6d
--- /dev/null
+++ b/templates/webapps/galaxy/data_manager/view_job.mako
@@ -0,0 +1,63 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<% from galaxy.util import nice_size, unicodify %>
+
+<%def name="title()">Data Manager: ${ data_manager.name | h } - ${ data_manager.description | h }</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if error_messages:
+    %for error_message in error_messages:
+        ${ render_msg( error_message, 'error' ) }
+    %endfor
+%endif
+
+%if view_only:
+    <p>Not implemented</p>
+%else:
+%for i, hda in enumerate( hdas ):
+<table class="tabletip">
+    <thead>
+        <tr><th colspan="2" style="font-size: 120%;">
+            Data Manager: <a href="${ h.url_for( controller='root', tool_id=data_manager.tool.id ) }" target="_blank">${ data_manager.name | h }</a> - ${ data_manager.description | h } <a class="icon-btn" href="${ h.url_for( controller="tool_runner", action="rerun", job_id=trans.security.encode_id( job.id ) ) }" title="Rerun" data-placement="bottom"><span class="fa fa-refresh"></span></a>
+        </th></tr>
+    </thead>
+    <tbody>
+        <tr><td>Name:</td><td>${hda.name | h}</td></tr>
+        <tr><td>Created:</td><td>${unicodify(hda.create_time.strftime(trans.app.config.pretty_datetime_format)) | h}</td></tr>
+        <tr><td>Filesize:</td><td>${nice_size(hda.dataset.file_size) | h}</td></tr>
+        <tr><td>Tool Exit Code:</td><td>${job.exit_code | h}</td></tr>
+        <tr><td>Full Path:</td><td>${hda.file_name | h}</td></tr>
+        <tr><td>View complete info:</td><td><a href="${h.url_for( controller='dataset', action='show_params', dataset_id=trans.security.encode_id( hda.id ))}">${ hda.id | h }</a></td></tr>
+        
+</table>
+<br />
+
+<% json_tables = data_manager_output[i]%>
+%for table_name, json_table in json_tables: 
+<table class="tabletip">
+    <thead>
+        <tr><th colspan="2" style="font-size: 120%;">
+            Data Table: <a href="${h.url_for( controller='data_manager', action='manage_data_table', table_name=table_name)}">${ table_name | h }</a>
+        </th></tr>
+    </thead>
+    <% len_json_table = len( json_table ) %>
+        %for j, table_row in enumerate( json_table ):
+        <tbody>
+        %if len_json_table > 1:
+        <tr><td><strong>Entry #${j | h}</strong></td><td> </td></tr>
+        %endif
+        %for name, value in table_row.iteritems():
+        <tr><td>${name | h}:</td><td>${value | h}</td></tr>
+        %endfor
+        %endfor
+        </tbody>
+</table>
+<br />
+%endfor
+
+%endfor
+
+%endif
diff --git a/templates/webapps/galaxy/dataset/copy_view.mako b/templates/webapps/galaxy/dataset/copy_view.mako
new file mode 100644
index 0000000..356f645
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/copy_view.mako
@@ -0,0 +1,159 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/refresh_frames.mako" import="handle_refresh_frames" />
+
+<%def name="title()">Copy History Items</%def>
+
+<%def name="javascripts()">
+
+    ${parent.javascripts()}
+
+    ${handle_refresh_frames()}
+    
+    <script type="text/javascript">
+        $(function() {
+            $("#select-multiple").click(function() {
+                $("#single-dest-select").val("");
+                $("#single-destination").hide();
+                $("#multiple-destination").show();
+            });
+        });
+        $(function() {
+            $("#source-content-all").click(function() {
+                $("input[name='source_content_ids']").each(function() {
+                    this.checked = true;
+                });
+            });
+        });
+        $(function() {
+            $("#source-content-none").click(function() {
+                $("input[name='source_content_ids']").each(function() {
+                    this.checked = false;
+                });
+            });
+        });
+    </script>
+    
+</%def>
+
+%if error_msg:
+    <div>
+        <div class="errormessage">${error_msg}</div>
+        <div style="clear: both"></div>
+    </div>
+%endif
+%if done_msg:
+    <div>
+        <div class="donemessage">${done_msg}</div>
+        <div style="clear: both"></div>
+    </div>
+%endif
+<div>
+    <div class="infomessage">Copy any number of history items from one history to another.</div>
+    <div style="clear: both"></div>
+</div>
+<div>
+    <form method="post">
+        <div class="toolForm" style="float: left; width: 45%; padding: 0px;">
+            <div class="toolFormTitle">Source History:<br />
+                <select id="source_history" name="source_history" refresh_on_change="true" style="font-weight: normal;">
+                    %for i, hist in enumerate(target_histories):
+                        <%
+                            selected = ""
+                            current_history_text = ""
+                            if hist == source_history:
+                                selected = "selected='selected'"
+                            if hist == current_history:
+                                current_history_text = " (current history)"
+                            
+                        %>
+                        <option value="${trans.security.encode_id(hist.id)}" ${selected}>
+                            ${i + 1}: ${h.truncate(util.unicodify( hist.name ), 30) | h}${current_history_text}
+                        </option>
+                    %endfor
+                </select>
+            </div>
+            <div class="toolFormBody">
+                <% has_source_contents = False %>
+                %for data in source_contents:
+                    %if not has_source_contents:
+                        <div class="form-row">
+                            <div class="btn-group">
+                                <span class="select-all btn btn-default" name="source-content-all" id="source-content-all">All</span>
+                                <span class="deselect-all btn btn-default" name="source-content-none" id="source-content-none">None</span>
+                            </div>
+                        </div>
+                    %endif
+                    <%
+                        has_source_contents = True
+                        checked = ""
+                        encoded_id = trans.security.encode_id(data.id)
+                        input_id = "%s|%s" % ( data.history_content_type, encoded_id )
+                        if input_id in source_content_ids:
+                            checked = " checked='checked'"
+                    %>
+                    <div class="form-row">
+                        <input type="checkbox" name="source_content_ids" id="${input_id}" value="${input_id}"${checked}/>
+                        <label for="${input_id}" style="display: inline;font-weight:normal;"> ${data.hid}: ${h.to_unicode(data.name) | h}</label>
+                    </div>
+                %endfor
+                %if not has_source_contents:
+                    <div class="form-row">This history has no datasets.</div>
+                %endif
+            </div>
+        </div>
+        <div style="float: left; padding-left: 10px; font-size: 36px;">→</div>
+        <div class="toolForm" style="float: right; width: 45%; padding: 0px;">
+            <div class="toolFormTitle">Destination History:</div>
+            <div class="toolFormBody">
+                <div class="form-row" id="single-destination">
+                    <select id="single-dest-select" name="target_history_id">
+                        %for i, hist in enumerate(target_histories):
+                            <%
+                                encoded_id = trans.security.encode_id(hist.id)
+                                source_history_text = ""
+                                selected = ""
+                                if hist == source_history:
+                                    source_history_text = " (source history)"
+                                if encoded_id == target_history_id:
+                                    selected = " selected='selected'"
+                            %>
+                            <option value="${encoded_id}"${selected}>${i + 1}: ${h.truncate( util.unicodify( hist.name ), 30) | h}${source_history_text}</option>
+                        %endfor
+                    </select><br /><br />
+                    <a style="margin-left: 10px;" href="javascript:void(0);" id="select-multiple">Choose multiple histories</a>
+                </div>
+                <div id="multiple-destination" style="display: none;">
+                    %for i, hist in enumerate( target_histories ):
+                        <%
+                            cur_history_text = ""
+                            encoded_id = trans.security.encode_id(hist.id)
+                            if hist == source_history:
+                                cur_history_text = " <strong>(source history)</strong>"
+                        %>
+                        <div class="form-row">
+                            <input type="checkbox" name="target_history_ids" id="hist_${encoded_id}" value="${encoded_id}"/>
+                            <label for="hist_${encoded_id}" style="display: inline; font-weight:normal;">${i + 1}: ${ util.unicodify( hist.name ) | h }${cur_history_text}</label>
+                        </div>
+                    %endfor
+                </div>
+                %if trans.get_user():
+                    <%
+                        checked = ""
+                        if "create_new_history" in target_history_ids:
+                            checked = " checked='checked'"
+                    %>
+                    <hr />
+                    <div style="text-align: center; color: #888;">— OR —</div>
+                    <div class="form-row">
+                        <label for="new_history_name" style="display: inline; font-weight:normal;">New history named:</label>
+                        <input id="new_history_name" type="text" name="new_history_name" />
+                    </div>
+                %endif
+            </div>
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row" style="text-align: center;">
+            <input type="submit" class="primary-button" name="do_copy" value="Copy History Items"/>
+        </div>
+    </form>
+</div>
diff --git a/templates/webapps/galaxy/dataset/display.mako b/templates/webapps/galaxy/dataset/display.mako
new file mode 100644
index 0000000..4feef6d
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/display.mako
@@ -0,0 +1,154 @@
+## Because HDAs do not have many of the properties that other sharable items have, we need to override most of the default code for display.
+<%inherit file="/display_base.mako"/>
+<%namespace file="/display_common.mako" import="*" />
+<%namespace file="/tagging_common.mako" import="render_individual_tagging_element, render_community_tagging_element" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+
+    ## If data is chunkable, use JavaScript for display.
+    %if item.datatype.CHUNKABLE:
+
+    <script type="text/javascript">
+        require(['mvc/dataset/data'], function(data) {
+            //
+            // Use tabular data display progressively by deleting data from page body
+            // and then showing dataset view.
+            //
+            $('.page-body').children().remove();
+
+            data.createTabularDatasetChunkedView({
+                // TODO: encode id.
+                dataset_config:
+                    _.extend( ${h.dumps( item.to_dict() )}, {
+                        chunk_url: "${h.url_for( controller='/dataset', action='display',
+                                         dataset_id=trans.security.encode_id( item.id ))}",
+                        first_data_chunk: ${first_chunk}
+                    }),
+                parent_elt: $('.page-body')
+            });
+        });
+    </script>
+
+    %endif
+</%def>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=True
+    self.message_box_visible=False
+    self.active_view="user"
+    self.overlay_visible=False
+%>
+</%def>
+
+<%def name="title()">
+    Galaxy | ${get_class_display_name( item.__class__ )} | ${get_item_name( item ) | h}
+</%def>
+
+<%def name="render_item_links( data )">
+    ## Provide links to save data and import dataset.
+    <a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}" class="icon-button disk" title="Save dataset"></a>
+        <a
+            href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( data.id ) )}"
+            class="icon-button import"
+            title="Import dataset"></a>
+</%def>
+
+## Renders dataset content. Function is used to render data in stand-along page and to provide content for embedded datasets as well.
+<%def name="render_item( data, data_to_render )">
+    ${ render_deleted_data_message( data ) }
+    %if data_to_render:
+        %if truncated:
+            <div class="warningmessagelarge">
+                 This dataset is large and only the first megabyte is shown below. |
+                 <a href="${h.url_for( controller='dataset', action='display_by_username_and_slug', username=data.history.user.username, slug=trans.security.encode_id( data.id ), preview=False )}">Show all</a>
+            </div>
+        %endif
+        ## TODO: why is the default font size so small?
+        <pre style="font-size: 135%">${ data_to_render | h }</pre>
+    %else:
+        <p align='center'>Cannot show dataset content</p>
+    %endif
+</%def>
+
+<%def name="render_deleted_data_message( data )">
+    %if data.deleted:
+        <div class="errormessagelarge" id="deleted-data-message">
+            You are viewing a deleted dataset.
+            %if data.history and data.history.user == trans.get_user():
+                <br />
+                <a href="#" onclick="$.ajax( {type: 'GET', cache: false, url: '${h.url_for( controller='dataset', action='undelete_async', dataset_id=trans.security.encode_id( data.id ) )}', dataType: 'text', contentType: 'text/html', success: function( data, textStatus, jqXHR ){ if (data == 'OK' ){ $( '#deleted-data-message' ).slideUp( 'slow' ) } else { alert( 'Undelete failed.' ) } }, error: function( data, textStatus, jqXHR ){ alert( 'Undelete failed.' ); } } );">Undelete</a>
+            %endif
+        </div>
+    %endif
+</%def>
+
+<%def name="center_panel()">
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner">
+                ${get_class_display_name( item.__class__ )}
+            | ${get_item_name( item ) | h}
+        </div>
+    </div>
+
+    <div class="unified-panel-body">
+        <div style="overflow: auto; height: 100%;">
+            <div class="page-body">
+                <div style="float: right">
+                    ${self.render_item_links( item )}
+                </div>
+                <div>
+                    ${self.render_item_header( item )}
+                </div>
+
+                ${self.render_item( item, item_data )}
+            </div>
+        </div>
+    </div>
+</%def>
+
+<%def name="right_panel()">
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner">
+            About this ${get_class_display_name( item.__class__ )}
+        </div>
+    </div>
+
+    <div class="unified-panel-body">
+        <div style="overflow: auto; height: 100%;">
+            <div style="padding: 10px;">
+                <h4>Author</h4>
+
+                <p>${item.history.user.username | h}</p>
+
+                <div><img src="https://secure.gravatar.com/avatar/${h.md5(item.history.user.email)}?d=identicon&s=150"></div>
+
+                ## Page meta.
+
+                ## No links for datasets right now.
+
+                ## Tags.
+                <p>
+                <h4>Tags</h4>
+                <p>
+                ## Community tags.
+                <div>
+                    Community:
+                    ${render_community_tagging_element( tagged_item=item, tag_click_fn='community_tag_click', use_toggle_link=False )}
+                    %if len ( item.tags ) == 0:
+                        none
+                    %endif
+                </div>
+                ## Individual tags.
+                <p>
+                <div>
+                    Yours:
+                    ${render_individual_tagging_element( user=trans.get_user(), tagged_item=item, elt_context='view.mako', use_toggle_link=False, tag_click_fn='community_tag_click' )}
+                </div>
+            </div>
+        </div>
+    </div>
+
+</%def>
diff --git a/templates/webapps/galaxy/dataset/display_application/display.mako b/templates/webapps/galaxy/dataset/display_application/display.mako
new file mode 100644
index 0000000..273ad68
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/display_application/display.mako
@@ -0,0 +1,27 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<% import datetime %>
+<%def name="title()">Display Application: ${display_link.link.display_application.name}  ${display_link.link.name}</%def>
+%for message, status in msg:
+    ${render_msg( message, status )}
+%endfor
+
+%if preparable_steps:
+    <p>
+        <h2>Preparation Status</h2>
+        <table class="colored">
+            <tr><th>Step Name</th><th>Ready</th><th>Dataset Status</th></tr>
+            %for step_dict in preparable_steps:
+                <tr><td>${ step_dict.get( "name" ) | h }</td><td>${ step_dict.get( "ready" ) | h }</td><td>${ step_dict.get( "value" ).state if step_dict.get( "value" ) else 'unknown' | h }</td></tr>
+            %endfor
+        </table>
+    </p>
+%endif
+
+
+%if refresh:
+<%def name="metas()"><meta http-equiv="refresh" content="3" /></%def>
+<br /><br /><p>
+This page will <a href="${trans.request.url}">refresh</a> after 3 seconds, and was last refreshed on ${ datetime.datetime.strftime( datetime.datetime.now(), "%Y-%m-%dT%H:%M:%S.Z" ) | h }.
+</p>
+%endif
diff --git a/templates/webapps/galaxy/dataset/edit_attributes.mako b/templates/webapps/galaxy/dataset/edit_attributes.mako
new file mode 100644
index 0000000..2e9fcd1
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/edit_attributes.mako
@@ -0,0 +1,206 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/refresh_frames.mako" import="handle_refresh_frames" />
+
+<%def name="title()">${_('Edit Dataset Attributes')}</%def>
+
+<%def name="stylesheets()">
+    ${h.css( "base", "autocomplete_tagging" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${handle_refresh_frames()}
+    ${h.js(
+        "libs/jquery/jquery.autocomplete",
+    )}
+</%def>
+
+<%def name="datatype( dataset, datatypes )">
+    <select name="datatype">
+        %for ext in datatypes:
+            %if dataset.ext == ext:
+                <option value="${ext}" selected="yes">${_(ext)}</option>
+            %else:
+                <option value="${ext}">${_(ext)}</option>
+            %endif
+        %endfor
+    </select>
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+
+<ul class="nav nav-tabs">
+    <li class="active"><a href="#attributes" data-toggle="tab">Attributes</a></li>
+    <li><a href="#convert" data-toggle="tab">Convert Format</a></li>
+    <li><a href="#datatype" data-toggle="tab">Datatype</a></li>
+    <li><a href="#permissions" data-toggle="tab">Permissions</a></li>
+</ul>
+
+<div class="tab-content">
+
+<div class="tab-pane active toolForm" id="attributes">
+    <div class="toolFormTitle">${_('Edit Attributes')}</div>
+    <div class="toolFormBody">
+        <form name="edit_attributes" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post">
+            <div class="form-row">
+                <label>
+                    Name:
+                </label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${data.get_display_name() | h}" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>
+                    Info:
+                </label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <% info = data.info if data.info else '' %>
+                    <textarea name="info" cols="40" rows="2">${ util.unicodify( info ) | h}</textarea>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            %if trans.get_user() is not None:
+                <div class="form-row">
+                    <label>
+                        Annotation / Notes:
+                    </label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <% annotation = data_annotation if data_annotation else '' %>
+                        <textarea name="annotation" cols="40" rows="2">${annotation | h}</textarea>
+                    </div>
+                    <div style="clear: both"></div>
+                    <div class="toolParamHelp">Add an annotation or notes to a dataset; annotations are available when a history is viewed.</div>
+                </div>
+            %endif
+            %for name, spec in data.metadata.spec.items():
+                %if spec.visible:
+                    <div class="form-row">
+                        <label>
+                            ${spec.desc}:
+                        </label>
+                        <div style="float: left; width: 250px; margin-right: 10px;">
+                            ${data.metadata.get_html_by_name( name, trans=trans )}
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                %endif
+            %endfor
+            <div class="form-row">
+                <input type="submit" name="save" value="${_('Save')}"/>
+            </div>
+        </form>
+        <form name="auto_detect" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post">
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="submit" name="detect" value="${_('Auto-detect')}"/>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    This will inspect the dataset and attempt to correct the above column values if they are not accurate.
+                </div>
+            </div>
+        </form>
+        %if data.missing_meta():
+            <div class="form-row">
+                <div class="errormessagesmall">${_('Required metadata values are missing. Some of these values may not be editable by the user. Selecting "Auto-detect" will attempt to fix these values.')}</div>
+            </div>
+        %endif
+    </div>
+</div>
+
+<div class="tab-pane toolForm" id="convert">
+    <div class="toolFormTitle">${_('Convert to new format')}</div>
+    <div class="toolFormBody">
+        <% converters = data.get_converter_types() %>
+        %if len( converters ) > 0:
+            <form name="convert_data" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post">
+                <div class="form-row">
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <select name="target_type">
+                            %for key, value in converters.items():
+                                <option value="${key}">${value.name}</option>
+                            %endfor
+                        </select>
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        This will create a new dataset with the contents of this dataset converted to a new format.
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="convert_data" value="${_('Convert')}"/>
+                </div>
+            </form>
+        %else:
+            No conversions available
+        %endif
+    </div>
+</div>
+
+<div class="tab-pane toolForm" id="datatype">
+    <div class="toolFormTitle">${_('Change data type')}</div>
+    <div class="toolFormBody">
+        %if data.datatype.allow_datatype_change:
+            <form name="change_datatype" action="${h.url_for( controller='dataset', action='edit', dataset_id=dataset_id )}" method="post">
+                <div class="form-row">
+                    <label>
+                        ${_('New Type')}:
+                    </label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        ${datatype( data, datatypes )}
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${_('This will change the datatype of the existing dataset but <i>not</i> modify its contents. Use this if Galaxy has incorrectly guessed the type of your dataset.')}
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="change" value="${_('Save')}"/>
+                </div>
+            </form>
+        %else:
+            <div class="form-row">
+                <div class="warningmessagesmall">${_('Changing the datatype of this dataset is not allowed.')}</div>
+            </div>
+        %endif
+    </div>
+</div>
+<p />
+
+<div class="tab-pane" id="permissions">
+%if trans.app.security_agent.can_manage_dataset( current_user_roles, data.dataset ):
+    <%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+    ${render_permission_form( data.dataset, data.get_display_name(), h.url_for( controller='dataset', action='edit', dataset_id=dataset_id ), all_roles )}
+%elif trans.user:
+    <div class="toolForm">
+        <div class="toolFormTitle">View Permissions</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                %if data.dataset.actions:
+                    <ul>
+                        %for action, roles in trans.app.security_agent.get_permissions( data.dataset ).items():
+                            %if roles:
+                                <li>${action.description}</li>
+                                <ul>
+                                    %for role in roles:
+                                        <li>${role.name}</li>
+                                    %endfor
+                                </ul>
+                            %endif
+                        %endfor
+                    </ul>
+                %else:
+                    <p>This dataset is accessible by everyone (it is public).</p>
+                %endif
+            </div>
+        </div>
+    </div>
+%else:
+    Permissions not available (not logged in)
+%endif
+</div>
diff --git a/templates/webapps/galaxy/dataset/embed.mako b/templates/webapps/galaxy/dataset/embed.mako
new file mode 100644
index 0000000..b31c04e
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/embed.mako
@@ -0,0 +1,22 @@
+<%inherit file="/embed_base.mako"/>
+<%!
+    from galaxy.web.framework.helpers import iff
+%>
+
+<%def name="render_item_links( dataset )">
+    <a href="${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ), to_ext=dataset.ext )}"
+       title="Save dataset" class="icon-button disk"></a>
+    ## Links for importing and viewing an item.
+    <a href="${h.url_for( controller='/dataset', action='imp', dataset_id=trans.security.encode_id( item.id ) )}"
+       title="Import dataset" class="icon-button import"></a>
+    <a href="${h.url_for( controller='/dataset', action='display_by_username_and_slug', username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) )}"
+       title="Go to dataset" class="icon-button go-to-full-screen"></a>
+    
+</%def>
+
+<%def name="render_summary_content( dataset, data )">
+##    <ul>
+##        <li>Format : ${dataset.extension}
+##        <pre>${dataset.peek}</pre>
+##    </ul>
+</%def>
diff --git a/templates/webapps/galaxy/dataset/errors.mako b/templates/webapps/galaxy/dataset/errors.mako
new file mode 100644
index 0000000..7e7f989
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/errors.mako
@@ -0,0 +1,114 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+    <head>
+        <title>Dataset generation errors</title>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        <link href="/static/style/base.css" rel="stylesheet" type="text/css" />
+        <style>
+            pre
+            {
+                background: white;
+                color: black;
+                border: dotted black 1px;
+                overflow: auto;
+                padding: 10px;
+            }
+        </style>
+
+        <script type="text/javascript">
+            function sendReport( button, form, target, doConfirm )
+            {
+                var doIt = true;
+                if ( doConfirm==true )
+                {
+                    doIt = confirm( 'You are about to submit to a public forum, do you want to continue?' );
+                }
+                if ( doIt==true )
+                {
+                    form.setAttribute( 'target', target );
+                    for( i=0; i<form.elements.length; i++ )
+                    {
+                        if ( form.elements[i].type == 'submit' )
+                        {
+                            form.elements[i].disabled = true;
+                        }
+
+                    }
+                    var hiddenInput = document.createElement('input');
+                    hiddenInput.type = 'hidden';
+                    hiddenInput.name = button.name;
+                    hiddenInput.value = button.value;
+                    form.appendChild( hiddenInput );
+                    form.submit();
+                    return false;
+                }
+                return false;
+            }
+        </script>
+    </head>
+
+    <body>
+        <h2>Dataset generation errors</h2>
+        <p><b>Dataset ${hda.hid}: ${hda.display_name() | h}</b></p>
+        <% job = hda.creating_job %>
+        %if job:
+
+            %if job.traceback:
+                The Galaxy framework encountered the following error while attempting to run the tool:
+                <pre>${ util.unicodify( job.traceback ) | h}</pre>
+            %endif
+            %if job.stderr or job.info:
+                Tool execution generated the following error message:
+                %if job.stderr:
+                    <pre>${ util.unicodify( job.stderr ) | h}</pre>
+                %elif job.info:
+                    <pre>${ util.unicodify( job.info ) | h}</pre>
+                %endif
+            %else:
+                Tool execution did not generate any error messages.
+            %endif
+            %if job.stdout:
+                The tool produced the following additional output:
+                <pre>${ util.unicodify( job.stdout ) | h}</pre>
+            %endif
+        %else:
+            The tool did not create any additional job / error info.
+        %endif
+        <%
+            if trans.user:
+                user_email = trans.user.email
+            else:
+                user_email = ''
+        %>
+        <h2>Report this error to the local Galaxy administrators</h2>
+        <p>
+            Usually the local Galaxy administrators regularly review errors that occur on the server.
+            However, if you would like to provide additional information (such as
+            what you were trying to do when the error occurred) and a contact e-mail
+            address, we will be better able to investigate your problem and get back
+            to you.
+        </p>
+        <div class="toolForm">
+            <div class="toolFormTitle">Error Report</div>
+            <div class="toolFormBody">
+                <form name="report_error" action="${h.url_for(controller='dataset', action='report_error')}" method="post" >
+                    <input type="hidden" name="id" value="${trans.security.encode_id( hda.id)}" />
+                    <div class="form-row">
+                        <label>Your email</label>
+                        <input type="text" name="email" size="40" value="${user_email|h}" />
+                    </div>
+                    <div class="form-row">
+                        <label>Message</label>
+                        <textarea name="message" rows="10" cols="60"></textarea>
+                    </div>
+                    <div class="form-row">
+                        <input type="submit" name="submit_error_report" value="Report" onclick="return sendReport( this, this.form, '_self' );"/>
+                        %if trans.app.config.biostar_url and trans.app.config.biostar_enable_bug_reports:
+                            <input type="submit" name="submit_error_report" value="Post on Biostar" onclick="return sendReport( this, this.form, '_blank', true );"/>
+                        %endif
+                    </div>
+                </form>
+            </div>
+      </div>
+    </body>
+</html>
diff --git a/templates/webapps/galaxy/dataset/grid.mako b/templates/webapps/galaxy/dataset/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/webapps/galaxy/dataset/item_content.mako b/templates/webapps/galaxy/dataset/item_content.mako
new file mode 100644
index 0000000..bd6dcd4
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/item_content.mako
@@ -0,0 +1,3 @@
+<%namespace file="/dataset/display.mako" import="*" />
+
+${render_item( item, item_data )}
diff --git a/templates/webapps/galaxy/dataset/large_file.mako b/templates/webapps/galaxy/dataset/large_file.mako
new file mode 100644
index 0000000..68148d3
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/large_file.mako
@@ -0,0 +1,14 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/dataset/display.mako" import="render_deleted_data_message" />
+
+${ render_deleted_data_message( data ) }
+
+<div class="warningmessagelarge">
+    This dataset is large and only the first megabyte is shown below.<br />
+    <a href="${h.url_for( controller='dataset', action='display', dataset_id=trans.security.encode_id( data.id ), filename='' )}">Show all</a> |
+    <a href="${h.url_for( controller='dataset', action='display', dataset_id=trans.security.encode_id( data.id ), to_ext=data.ext )}">Save</a>
+</div>
+
+<pre>
+${ util.unicodify( truncated_data ) | h }
+</pre>
diff --git a/templates/webapps/galaxy/dataset/security_common.mako b/templates/webapps/galaxy/dataset/security_common.mako
new file mode 100644
index 0000000..8cd0700
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/security_common.mako
@@ -0,0 +1,132 @@
+<%def name="render_select( current_actions, action_key, action, roles )">
+    <%
+        import sets
+        in_roles = sets.Set()
+        for a in current_actions:
+            if a.action == action.action:
+                in_roles.add( a.role )
+        out_roles = filter( lambda x: x not in in_roles, roles )
+    %>
+    <p>
+        <b>${action.action}:</b> ${action.description}
+        %if action == trans.app.security_agent.permitted_actions.DATASET_ACCESS:
+            <br/>
+            NOTE: Users must have every role associated with this dataset in order to access it
+        %endif
+    </p>
+    <div style="width: 100%; white-space: nowrap;">
+        <div style="float: left; width: 50%;">
+            Roles associated:<br />
+            <select name="${action_key}_in" id="${action_key}_in_select" class="in_select" style="max-width: 98%; width: 98%; height: 150px; font-size: 100%;" multiple>
+                %for role in in_roles:
+                    <option value="${role.id}">${role.name}</option>
+                %endfor
+            </select> <br />
+            <div style="width: 98%; text-align: right"><input type="submit" id="${action_key}_remove_button" class="role_remove_button" value=">>"/></div>
+        </div>
+        <div style="width: 50%;">
+            Roles not associated:<br />
+            <select name="${action_key}_out" id="${action_key}_out_select" style="max-width: 98%; width: 98%; height: 150px; font-size: 100%;" multiple>
+                %for role in out_roles:
+                    <option value="${role.id}">${role.name}</option>
+                %endfor
+            </select> <br />
+            <input type="submit" id="${action_key}_add_button" class="role_add_button" value="<<"/>
+        </div>
+    </div>
+</%def>
+
+## Any permission ( e.g., 'DATASET_ACCESS' ) included in the do_not_render param will not be rendered on the page.
+<%def name="render_permission_form( obj, obj_name, form_url, roles, do_not_render=[], all_roles=[] )">
+    <%
+        if isinstance( obj, trans.app.model.User ):
+            current_actions = obj.default_permissions
+            permitted_actions = trans.app.model.Dataset.permitted_actions.items()
+            obj_str = 'user %s' % obj_name
+            obj_type = 'dataset'
+        elif isinstance( obj, trans.app.model.History ):
+            current_actions = obj.default_permissions
+            permitted_actions = trans.app.model.Dataset.permitted_actions.items()
+            obj_str = 'history %s' % obj_name
+            obj_type = 'dataset'
+        elif isinstance( obj, trans.app.model.Dataset ):
+            current_actions = obj.actions
+            permitted_actions = trans.app.model.Dataset.permitted_actions.items()
+            obj_str = obj_name
+            obj_type = 'dataset'
+        elif isinstance( obj, trans.app.model.LibraryDatasetDatasetAssociation ):
+            current_actions = obj.actions + obj.dataset.actions
+            permitted_actions = trans.app.model.Dataset.permitted_actions.items() + trans.app.model.Library.permitted_actions.items()
+            obj_str = obj_name
+            obj_type = 'dataset'
+        elif isinstance( obj, trans.app.model.Library ):
+            current_actions = obj.actions
+            permitted_actions = trans.app.model.Library.permitted_actions.items()
+            obj_str = 'library %s' % obj_name
+            obj_type = 'library'
+        elif isinstance( obj, trans.app.model.LibraryDataset ):
+            current_actions = obj.actions
+            permitted_actions = trans.app.model.Library.permitted_actions.items()
+            obj_str = 'library dataset %s' % obj_name
+            obj_type = 'library'
+        elif isinstance( obj, trans.app.model.LibraryFolder ):
+            current_actions = obj.actions
+            permitted_actions = trans.app.model.Library.permitted_actions.items()
+            obj_str = 'library folder %s' % obj_name
+            obj_type = 'library'
+        else:
+            current_actions = []
+            permitted_actions = {}.items()
+            obj_str = 'unknown object %s' % obj_name
+            obj_type = ''
+    %>
+    <script type="text/javascript">
+        $( document ).ready( function () {
+            $( '.role_add_button' ).click( function() {
+                var action = this.id.substring( 0, this.id.lastIndexOf( '_add_button' ) )
+                var in_select = '#' + action + '_in_select';
+                var out_select = '#' + action + '_out_select';
+                return !$( out_select + ' option:selected' ).remove().appendTo( in_select );
+            });
+            $( '.role_remove_button' ).click( function() {
+                var action = this.id.substring( 0, this.id.lastIndexOf( '_remove_button' ) )
+                var in_select = '#' + action + '_in_select';
+                var out_select = '#' + action + '_out_select';
+                return !$( in_select + ' option:selected' ).remove().appendTo( out_select );
+            });
+            $( 'form#edit_role_associations' ).submit( function() {
+                $( '.in_select option' ).each(function( i ) {
+                    $( this ).attr( "selected", "selected" );
+                });
+            });
+            // Temporary removal of select2 for all permissions forms
+            $('#edit_role_associations select').select2("destroy");
+        });
+    </script>
+    <div class="toolForm">
+        <div class="toolFormTitle">Manage ${obj_type} permissions on ${obj_str | h}</div>
+        <div class="toolFormBody">
+            <form name="edit_role_associations" id="edit_role_associations" action="${form_url}" method="post">
+                <div class="form-row"></div>
+                %for k, v in permitted_actions:
+                    %if k not in do_not_render:
+                        <div class="form-row">
+                            ## LIBRARY_ACCESS is a special case because we need to render all roles instead of
+                            ## roles derived from the roles associated with LIBRARY_ACCESS.
+                            <% render_all_roles = k == 'LIBRARY_ACCESS' %>
+                            %if render_all_roles:
+                                ${render_select( current_actions, k, v, all_roles )}
+                            %else:
+                                ${render_select( current_actions, k, v, roles )}
+                            %endif
+                        </div>
+                    %endif
+                %endfor
+                <div class="form-row">
+                    <input type="submit" name="update_roles_button" value="Save"/>
+                </div>
+            </form>
+        </div>
+    </div>
+    <p/>
+</%def>
diff --git a/templates/webapps/galaxy/dataset/tabular_chunked.mako b/templates/webapps/galaxy/dataset/tabular_chunked.mako
new file mode 100644
index 0000000..abc70ec
--- /dev/null
+++ b/templates/webapps/galaxy/dataset/tabular_chunked.mako
@@ -0,0 +1,25 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/dataset/display.mako" import="render_deleted_data_message" />
+
+<%def name="title()">Dataset Display</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+
+    <script type="text/javascript">
+        require([ 'mvc/dataset/data' ], function( data ) {
+            data.createTabularDatasetChunkedView({
+                dataset_config : _.extend( ${ h.dumps( trans.security.encode_dict_ids( dataset.to_dict() ) )}, {
+                        first_data_chunk: ${ chunk }
+                    }),
+                parent_elt : $( 'body' )
+            });
+        });
+    </script>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+${ render_deleted_data_message( dataset ) }
diff --git a/templates/webapps/galaxy/external_services/generic_jquery_grid.mako b/templates/webapps/galaxy/external_services/generic_jquery_grid.mako
new file mode 100644
index 0000000..8be21f9
--- /dev/null
+++ b/templates/webapps/galaxy/external_services/generic_jquery_grid.mako
@@ -0,0 +1,59 @@
+<%inherit file="/base.mako"/>
+<%namespace file="json_common.mako" import="display_item" />
+
+<%def name="title()">${param_dict['service_instance'].name}: ${action.label}</%def>
+
+<%def name="display_json_grid_result( headers, rows )">
+    %for row in rows:
+        %for name in headers:
+            <div class="form-row">
+                <label>${name}</label>
+                ${display_item( row.get( name ) )}
+                <div style="clear: both"></div>
+            </div>
+        %endfor
+    %endfor
+</%def>
+
+<%
+    #HACK!!!! need to use better method of displaying jqGrid here, needs to allow paging as optionally available.
+    if 'Rows' in result: #paged
+        records = result['Records']
+        total = result['Total']
+        rows = result['Rows']
+        page = result['Page']
+    else:
+        rows = result
+        records = None
+        total = None
+        page = None
+    headers = rows[0].keys()
+%>
+
+<div class="toolForm">
+    <div class="toolFormTitle">${action.label} of ${param_dict['service_instance'].name} (${param_dict['service'].name}) on ${param_dict['item'].name}</div>
+    <div class="toolFormBody">
+        %if records:
+            <div class="form-row">
+                <label>Records</label>
+                ${records}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        %if total:
+            <div class="form-row">
+                <label>Total</label>
+                ${total}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        %if page:
+            <div class="form-row">
+                <label>Page</label>
+                ${page}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        ${display_json_grid_result( headers, rows )}
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/external_services/generic_json.mako b/templates/webapps/galaxy/external_services/generic_json.mako
new file mode 100644
index 0000000..1b9da4d
--- /dev/null
+++ b/templates/webapps/galaxy/external_services/generic_json.mako
@@ -0,0 +1,11 @@
+<%inherit file="/base.mako"/>
+<%namespace file="json_common.mako" import="display_item" />
+
+<%def name="title()">${action.label} of ${param_dict['service_instance'].name} (${param_dict['service'].name}) on ${param_dict['item'].name}</%def>
+
+<div class="toolForm">
+    <div class="toolFormTitle">${action.label} of ${param_dict['service_instance'].name} (${param_dict['service'].name}) on ${param_dict['item'].name}</i></div>
+    <div class="toolFormBody">
+        ${display_item( result )}
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/external_services/json_common.mako b/templates/webapps/galaxy/external_services/json_common.mako
new file mode 100644
index 0000000..2340152
--- /dev/null
+++ b/templates/webapps/galaxy/external_services/json_common.mako
@@ -0,0 +1,28 @@
+
+<%def name="display_dict( result_dict )">
+    %for key, value in result_dict.items():
+        <div class="form-row">
+            <label>${key}</label>
+            ${display_item( value )}
+            <div style="clear: both"></div>
+        </div>
+    %endfor
+</%def>
+
+<%def name="display_list( items )">
+    <ul>
+        %for item in items:
+            <li>${display_item( item ) }</li>
+        %endfor
+    </ul>
+</%def>
+
+<%def name="display_item( item )">
+    %if isinstance( item, ( list, tuple ) ):
+        ${display_list( item )}
+    %elif isinstance( item, dict ):
+        ${display_dict( item )}
+    %else:
+        ${item}
+    %endif
+</%def>
diff --git a/templates/webapps/galaxy/galaxy.masthead.mako b/templates/webapps/galaxy/galaxy.masthead.mako
new file mode 100644
index 0000000..3367418
--- /dev/null
+++ b/templates/webapps/galaxy/galaxy.masthead.mako
@@ -0,0 +1,81 @@
+<%namespace file="/galaxy_client_app.mako" import="get_user_dict" />
+
+## masthead head generator
+<%def name="load(active_view = None)">
+    <%
+        from markupsafe import escape
+        ## get configuration
+        masthead_config = {
+            ## inject configuration
+            'brand'                     : app.config.get("brand", ""),
+            'nginx_upload_path'         : app.config.get("nginx_upload_path", h.url_for(controller='api', action='tools')),
+            'use_remote_user'           : app.config.use_remote_user,
+            'remote_user_logout_href'   : app.config.remote_user_logout_href,
+            'enable_cloud_launch'       : app.config.get_bool('enable_cloud_launch', False),
+            'lims_doc_url'              : app.config.get("lims_doc_url", "https://usegalaxy.org/u/rkchak/p/sts"),
+            'biostar_url'               : app.config.biostar_url,
+            'biostar_url_redirect'      : h.url_for( controller='biostar', action='biostar_redirect', qualified=True ),
+            'support_url'               : app.config.get("support_url", "https://wiki.galaxyproject.org/Support"),
+            'search_url'                : app.config.get("search_url", "http://galaxyproject.org/search/usegalaxy/"),
+            'mailing_lists'             : app.config.get("mailing_lists", "https://wiki.galaxyproject.org/MailingLists"),
+            'screencasts_url'           : app.config.get("screencasts_url", "https://vimeo.com/galaxyproject"),
+            'wiki_url'                  : app.config.get("wiki_url", "https://wiki.galaxyproject.org/"),
+            'citation_url'              : app.config.get("citation_url", "https://wiki.galaxyproject.org/CitingGalaxy"),
+            'terms_url'                 : app.config.get("terms_url", ""),
+            'allow_user_creation'       : app.config.allow_user_creation,
+            'logo_url'                  : h.url_for(app.config.get( 'logo_url', '/')),
+            'logo_src'                  : h.url_for( app.config.get( 'logo_src', '../../../static/images/galaxyIcon_noText.png' ) ),
+            'is_admin_user'             : trans.user_is_admin(),
+            'active_view'               : active_view,
+            'ftp_upload_dir'            : app.config.get("ftp_upload_dir",  None),
+            'ftp_upload_site'           : app.config.get("ftp_upload_site",  None),
+            'datatypes_disable_auto'    : app.config.get_bool("datatypes_disable_auto",  False),
+            'user_requests'             : bool( trans.user and ( trans.user.requests or app.security_agent.get_accessible_request_types( trans, trans.user ) ) ),
+            'user_json'                 : get_user_dict()
+        }
+    %>
+
+    ## load the frame manager
+    <script type="text/javascript">
+        if( !window.Galaxy ){
+            Galaxy = {};
+        }
+
+        // if we're in an iframe, create styles that hide masthead/messagebox, and reset top for panels
+        // note: don't use a link to avoid roundtrip request
+        // note: we can't select here because the page (incl. messgaebox, center, etc.) isn't fully rendered
+        // TODO: remove these when we no longer navigate with iframes
+        var in_iframe = window !== window.top;
+        if( in_iframe ){
+            var styleElement = document.createElement( 'style' );
+            document.head.appendChild( styleElement );
+            [
+                '#masthead, #messagebox { display: none; }',
+                '#center, #right, #left { top: 0 !important; }',
+             ].forEach( function( rule ){
+                styleElement.sheet.insertRule( rule, 0 );
+            });
+        }
+        // TODO: ?? move above to base_panels.mako?
+
+        ## load galaxy js-modules
+        require([
+            'layout/masthead',
+            'mvc/ui/ui-modal',
+            'mvc/user/user-model'
+        ], function( Masthead, Modal, user ){
+            if( !Galaxy.user ) {
+                // this doesn't need to wait for the page being readied
+                Galaxy.user = new user.User(${ h.dumps( masthead_config[ 'user_json' ], indent=2 ) });
+            }
+
+            $(function() {
+                if (!Galaxy.masthead) {
+                    Galaxy.masthead = new Masthead.View(${ h.dumps( masthead_config ) });
+                    Galaxy.modal = new Modal.View();
+                    $('#masthead').replaceWith( Galaxy.masthead.render().$el );
+                }
+            });
+        });
+    </script>
+</%def>
\ No newline at end of file
diff --git a/templates/webapps/galaxy/galaxy.panels.mako b/templates/webapps/galaxy/galaxy.panels.mako
new file mode 100644
index 0000000..eaef84c
--- /dev/null
+++ b/templates/webapps/galaxy/galaxy.panels.mako
@@ -0,0 +1,237 @@
+<%namespace name="masthead" file="/webapps/galaxy/galaxy.masthead.mako"/>
+<%namespace name="galaxy_client" file="/galaxy_client_app.mako" />
+
+<!DOCTYPE html>
+
+## inject parameters parsed by controller config dictionary
+<%
+    ## set defaults
+    self.galaxy_config = {
+        ## template options
+        'title'         : 'Galaxy - Data Intensive Biology for Everyone',
+        'master'        : True,
+        'left_panel'    : False,
+        'right_panel'   : False,
+        'message_box'   : False,
+
+        ## root
+        'root'          : h.url_for("/"),
+
+        ## inject app specific configuration
+        'app'           : config['app']
+    }
+
+    ## update configuration
+    self.galaxy_config.update(config)
+%>
+
+<%def name="stylesheets()">
+    ## load default style
+    ${h.css("base")}
+
+    ## modify default style
+    <style type="text/css">
+    #center {
+        %if not self.galaxy_config['left_panel']:
+            left: 0 !important;
+        %endif
+        %if not self.galaxy_config['right_panel']:
+            right: 0 !important;
+        %endif
+    }
+    </style>
+
+    <style type="text/css">
+        %if self.galaxy_config['message_box']:
+            #left, #left-border, #center, #right-border, #right {
+                top: 64px;
+            }
+        %endif
+    </style>
+
+</%def>
+
+<%def name="javascripts()">
+    ## Send errors to Sentry server if configured
+    %if app.config.sentry_dsn:
+        ${h.js( "libs/raven" )}
+        <script>
+            Raven.config('${app.config.sentry_dsn_public}').install();
+            %if trans.user:
+                Raven.setUser( { email: "${trans.user.email | h}" } );
+            %endif
+        </script>
+    %endif
+
+    ## load jscript libraries
+    ${h.js(
+        ## TODO: remove when all libs are required directly in modules
+        'bundled/libs.bundled',
+        'libs/jquery/jquery-ui',
+        'libs/d3',
+        'libs/require',
+    )}
+
+    <script type="text/javascript">
+        // configure require
+        // due to our using both script tags and require, we need to access the same jq in both for plugin retention
+        define( 'jquery', [], function(){ return jQuery; })
+        require.config({
+            baseUrl: "${h.url_for('/static/scripts')}",
+            // cache buster based on templated server (re)start time
+            urlArgs: 'v=${app.server_starttime}',
+            shim: {
+                "libs/underscore": { exports: "_" },
+                "libs/backbone": {
+                    deps: [ 'jquery', 'libs/underscore' ],
+                    exports: "Backbone"
+                },
+                "libs/d3": { exports: "d3" },
+            },
+        });
+
+        // console protection
+        // TODO: Only needed for IE <9 which I believe we dropped
+        window.console = window.console || {
+            log     : function(){},
+            debug   : function(){},
+            info    : function(){},
+            warn    : function(){},
+            error   : function(){},
+            assert  : function(){}
+        };
+
+        // extra configuration global
+        var galaxy_config = ${ h.dumps( self.galaxy_config ) };
+    </script>
+
+</%def>
+
+<%def name="javascript_app()">
+    <script type="text/javascript">
+        // load any app configured
+        define( 'app', function(){
+            var jscript = galaxy_config.app.jscript;
+            if( jscript ){
+                require([ jscript ], function( js_lib ){
+                    $( function(){
+                        // load galaxy module application
+                        var module = new js_lib.GalaxyApp();
+                    });
+                });
+            } else {
+                console.error("'galaxy_config.app.jscript' missing.");
+            }
+        });
+    </script>
+
+    ## load the Galaxy global js var and run 'app' from above
+    ${ galaxy_client.load( app='app' ) }
+</%def>
+
+## default late-load javascripts
+<%def name="late_javascripts()">
+    ## Scripts can be loaded later since they progressively add features to
+    ## the panels, but do not change layout
+    <script type="text/javascript">
+        ## configure left panel
+        %if self.galaxy_config['left_panel']:
+            var lp = new panels.LeftPanel({ el: '#left' });
+            force_left_panel = function( x ) { lp.force_panel( x ) };
+        %endif
+
+        ## configure right panel
+        %if self.galaxy_config['right_panel']:
+            var rp = new panels.RightPanel({ el: '#right' });
+            window.handle_minwidth_hint = function( x ) { rp.handle_minwidth_hint( x ) };
+            force_right_panel = function( x ) { rp.force_panel( x ) };
+        %endif
+    </script>
+</%def>
+
+## document
+<html>
+    <head>
+        <meta charset="UTF-8">
+        ## for mobile browsers, don't scale up
+        <meta name = "viewport" content = "maximum-scale=1.0">
+        ## force IE to standards mode, and prefer Google Chrome Frame if the user has already installed it
+        <meta http-equiv="x-ua-compatible" content="ie=edge,chrome=1">
+
+        <title>
+        %if self.galaxy_config['title']:
+            ${self.galaxy_config['title']}
+        %endif
+        </title>
+
+        ${self.stylesheets()}
+        ${self.javascripts()}
+        ${self.javascript_app()}
+    </head>
+
+    <body scroll="no" class="full-content">
+        <div id="everything" style="position: absolute; top: 0; left: 0; width: 100%; height: 100%;">
+            ## background displays first
+            <div id="background"></div>
+
+            ## master header
+            %if self.galaxy_config['master']:
+                <div id="masthead" class="navbar navbar-fixed-top navbar-inverse"></div>
+                ${masthead.load()}
+            %endif
+
+            ## message box
+            %if self.galaxy_config['message_box']:
+                <div id="messagebox" class="panel-message"></div>
+            %endif
+            ## left panel
+            %if self.galaxy_config['left_panel']:
+                <div id="left">
+                    <div class="unified-panel-header" unselectable="on">
+                        <div class="unified-panel-header-inner">
+                            <div class="unified-panel-icons" style="float: right"></div>
+                            <div class="unified-panel-title"></div>
+                        </div>
+                    </div>
+                    <div class="unified-panel-body" style="overflow: auto;"></div>
+                    <div class="unified-panel-footer">
+                        <div class="panel-collapse right"></span></div>
+                        <div class="drag"></div>
+                    </div>
+                </div>
+            %endif
+
+            ## center panel
+            <div id="center">
+                <div class="unified-panel-header" unselectable="on">
+                    <div class="unified-panel-header-inner">
+                        <div class="unified-panel-title" style="float:left;"></div>
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="unified-panel-body"></div>
+            </div>
+
+            ## right panel
+            %if self.galaxy_config['right_panel']:
+                <div id="right">
+                    <div class="unified-panel-header" unselectable="on">
+                        <div class="unified-panel-header-inner">
+                            <div class="unified-panel-icons" style="float: right"></div>
+                            <div class="unified-panel-title"></div>
+                        </div>
+                    </div>
+                    <div class="unified-panel-body" style="overflow: auto;"></div>
+                    <div class="unified-panel-footer">
+                        <div class="panel-collapse right"></span></div>
+                        <div class="drag"></div>
+                    </div>
+                </div>
+            %endif
+        </div>
+        <div id='dd-helper' style="display: none;"></div>
+        ## Scripts can be loaded later since they progressively add features to
+        ## the panels, but do not change layout
+        ${self.late_javascripts()}
+    </body>
+</html>
diff --git a/templates/webapps/galaxy/history/as_xml.mako b/templates/webapps/galaxy/history/as_xml.mako
new file mode 100644
index 0000000..b174615
--- /dev/null
+++ b/templates/webapps/galaxy/history/as_xml.mako
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<history id="${trans.security.encode_id( history.id )}" num="${len(history.datasets)}" name="${history.name}" create="${history.create_time}" update="${history.update_time}">
+    %if show_deleted:
+        %for data in history.activatable_datasets:
+            <data id="${data.id}" hid="${data.hid}" name="${data.name}" state="${data.state}" dbkey="${data.dbkey}">
+                ${_(data.blurb)}
+            </data>
+        %endfor
+    %else:
+        %for data in history.active_datasets:
+            <data id="${data.id}" hid="${data.hid}" name="${data.name}" state="${data.state}" dbkey="${data.dbkey}">
+                ${_(data.blurb)}
+            </data>
+        %endfor
+    %endif
+</history>
diff --git a/templates/webapps/galaxy/history/citations.mako b/templates/webapps/galaxy/history/citations.mako
new file mode 100644
index 0000000..a8e3681
--- /dev/null
+++ b/templates/webapps/galaxy/history/citations.mako
@@ -0,0 +1,35 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">${ history.name } | ${ _( 'Citations' ) }</%def>
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style type="text/css">
+    </style>
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="javascripts()">
+    ${h.js( "libs/bibtex" )}
+<%
+    self.js_app = 'display-citations'   
+    history_id = trans.security.encode_id( history.id )
+%>
+    ${parent.javascripts()}
+
+    ## load edit views for each of the hdas
+    <script type="text/javascript">
+        define( 'display-citations', function(){
+            require(["mvc/citation/citation-model", "mvc/citation/citation-view"
+            ], function( citationModel, citationView ){
+                $(function() {
+                    var citations = new citationModel.HistoryCitationCollection();
+                    citations.history_id = "${history_id}";
+                    var citation_list_view = new citationView.CitationListView({ collection: citations } );
+                    citation_list_view.render();
+                    citations.fetch();
+                } );
+            } );
+        });
+    </script>
+</%def>
+<div id="citations">
+</div>
diff --git a/templates/webapps/galaxy/history/display.mako b/templates/webapps/galaxy/history/display.mako
new file mode 100644
index 0000000..cafbbd8
--- /dev/null
+++ b/templates/webapps/galaxy/history/display.mako
@@ -0,0 +1,104 @@
+<%inherit file="/display_base.mako"/>
+
+## Set vars so that there's no need to change the code below.
+<%
+    history = published_item
+    datasets = published_item_data
+%>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style type="text/css">
+
+    </style>
+</%def>
+
+<%def name="render_item_links( history )">
+<%
+    encoded_history_id = history_dict[ 'id' ]
+    switch_url = h.url_for( controller='history', action='switch_to_history', hist_id=encoded_history_id )
+%>
+    ## Needed to overwide initial width so that link is floated left appropriately.
+    %if not user_is_owner:
+    <a class="history-copy-link" title="${_('Make a copy of this history and switch to it')}"
+       href="javascript:void(0)" style="width: 100%" >
+        ${_('Import history')}
+    </a>
+    %else:
+    <a href="${switch_url}" style="width: 100%" title="${_('Make this history your current history')}">
+        ${_('Switch to this history')}
+    </a>
+    %endif
+</%def>
+
+<%def name="render_item_header( item )">
+</%def>
+
+<%def name="render_item( history, datasets )">
+<div id="history-${ history_dict[ 'id' ] }" class="history-panel"></div>
+<script type="text/javascript">
+    var historyJSON  = ${h.dumps( history_dict )};
+
+    $( '.page-body' )
+        .css( 'height', '100%' )
+        .addClass( 'flex-vertical-container' );
+
+    require.config({
+        baseUrl : "${h.url_for( '/static/scripts' )}",
+        urlArgs: 'v=${app.server_starttime}'
+    })([
+        'mvc/history/history-view-annotated',
+        'mvc/history/copy-dialog',
+    ], function( panelMod, historyCopyDialog ){
+        // history module is already in the dpn chain from the panel. We can re-scope it here.
+        var HISTORY = require( 'mvc/history/history-model' );
+        var HISTORY_CONTENTS = require( 'mvc/history/history-contents' );
+
+        var HistoryContentsWithAnnotations = HISTORY_CONTENTS.HistoryContents.extend({
+            _buildFetchData : function( options ){
+                console.log( '_buildFetchData:' );
+                options = options || {};
+                if( !options.keys && !options.view ){
+                    options.view = 'summary';
+                    options.keys = 'annotation,tags';
+                }
+                return HISTORY_CONTENTS.HistoryContents.prototype._buildFetchData.call( this, options );
+            }
+        });
+        var HistoryWithAnnotations = HISTORY.History.extend({
+            contentsClass : HistoryContentsWithAnnotations
+        });
+
+        var historyModel = new HistoryWithAnnotations( historyJSON, null, {
+            order           : 'hid-asc',
+        });
+
+        $( '.history-copy-link' ).click( function( ev ){
+            historyCopyDialog( historyModel, { useImport: true, allowAll: false })
+                .done( function(){
+                    var mainWindow = ( window && ( window !== window.parent ) )? window.top : window;
+                    mainWindow.location.href = Galaxy.root;
+                });
+        });
+
+        window.historyView = new panelMod.AnnotatedHistoryView({
+            el              : $( "#history-" + historyJSON.id ),
+            className       : panelMod.AnnotatedHistoryView.prototype.className + ' wide',
+            model           : historyModel,
+            show_deleted    : false,
+            show_hidden     : false,
+        });
+        historyView.trigger( 'loading' );
+        historyModel.fetchContents({ silent: true })
+            .fail( function(){ alert( 'Galaxy history failed to load' ); })
+            .done( function(){
+                historyView.trigger( 'loading-done' );
+                historyView.render();
+            });
+    });
+</script>
+</%def>
diff --git a/templates/webapps/galaxy/history/display_structured.mako b/templates/webapps/galaxy/history/display_structured.mako
new file mode 100644
index 0000000..a963bc4
--- /dev/null
+++ b/templates/webapps/galaxy/history/display_structured.mako
@@ -0,0 +1,317 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/show_params.mako" name="show_params" />
+
+## ----------------------------------------------------------------------------
+<%def name="title()">${ history.name } | ${ _( 'Structure' ) }</%def>
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style type="text/css">
+        body {
+            padding: 5px;
+        }
+        body > .workflow, body > .toolForm, body > .copied-from {
+            /*width           : 80%;*/
+            margin-bottom   : 8px;
+            /*margin-left     : auto;*/
+            /*margin-right    : auto;*/
+        }
+        .bold {
+            font-weight: bold;
+        }
+        .light {
+            font-weight: lighter;
+            color: grey;
+        }
+        .right-aligned {
+            text-align: right;
+        }
+
+        .clickable {
+            cursor: pointer;
+        }
+
+        .workflow {
+            border: solid gray 1px;
+        }
+        .workflow > .header {
+            background: lightgray;
+            padding: 5px 10px;
+        }
+        .workflow > .light {
+            color: gray;
+        }
+        .workflow > .body {
+            border-top: solid gray 1px;
+        }
+        .workflow > .body > .toolForm {
+            border: 0px;
+        }
+
+        div.toolForm {
+            border-width        : 1px;
+            border-radius       : 0px;
+        }
+        .toolForm > .header {
+            background-color: #EBD9B2;
+            padding: 5px 10px;
+        }
+        .workflow div.toolForm:not(:first-child) .header {
+            border-top: 1px solid #D6B161;
+        }
+        div.toolFormTitle {
+            padding: 0px 0px 4px 0px;
+            margin: 0px 0px 4px 0px;
+            border: 0px;
+            background-color: transparent;
+            border-bottom: 1px solid #D6B161;
+        }
+        /* down from EBD9B2 --> 90743A */
+        .toolFormTitle > .light {
+            color: #90743A;
+        }
+        .toolForm em {
+            color: #90743A;
+        }
+
+        .job-inputs {
+            margin: 0px 6px 0px 6px;
+            text-align: left;
+        }
+        .job-inputs td:nth-child(1) {
+            text-align: right;
+            font-weight: lighter;
+            color: #90743A;
+        }
+        .job-inputs td:nth-child(1):after {
+            content : ':'
+        }
+        .job-inputs td:nth-child(2) {
+            padding-left: 4px;
+        }
+        .job-inputs em {
+        }
+
+        .job-inputs-show {
+            float: right;
+        }
+
+        .copied-from {
+            border: 1px solid lightgrey;
+            border-width: 1px 1px 0px 1px;
+        }
+        .copied-from .header {
+            border-bottom: 1px solid lightgrey;
+            padding: 5px;
+        }
+        .copied-from .header .bold, .copied-from .header a {
+            color: #888;
+        }
+
+        .dataset.hda {
+            min-height  : 37px;
+            border-width: 0px 0px 1px 0px;
+        }
+        .toolFormBody > .dataset.hda:last-child {
+            border-bottom-width: 0px;
+        }
+        .dataset.hda:first-child {
+            border-top: 1px solid #D6B161;
+        }
+        .dataset.hda .dataset-title-bar {
+            padding-top: 8px;
+            padding-left: 10px;
+        }
+
+    </style>
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="javascripts()">
+<%
+    from galaxy.managers import hdas
+    hda_serializer = hdas.HDASerializer( trans.app )
+    self.js_app = 'display-structured'
+
+    controller = trans.webapp.controllers[ 'history' ]
+    hda_dicts = []
+    id_hda_dict_map = {}
+    for hda in history.active_datasets:
+        hda_dict = hda_serializer.serialize_to_view( hda, user=trans.user, trans=trans, view='detailed' )
+        id_hda_dict_map[ hda_dict[ 'id' ] ] = hda_dict
+        hda_dicts.append( hda_dict )
+%>
+    ${parent.javascripts()}
+
+    ## load edit views for each of the hdas
+    <script type="text/javascript">
+        define( 'display-structured', function(){
+            require([ 'mvc/history/hda-li-edit', 'mvc/history/hda-model' ], function( hdaEdit, hdaModel ){
+                var hdaJSON = ${ h.dumps( hda_dicts, indent=( 2 if trans.debug else 0 ) ) };
+
+                window.hdas = hdaJSON.map( function( hda ){
+                    return new hdaEdit.HDAListItemEdit({
+                        model           : new hdaModel.HistoryDatasetAssociation( hda ),
+                        el              : $( '#hda-' + hda.id ),
+                        linkTarget      : '_self',
+                        purgeAllowed    : Galaxy.config.allow_user_dataset_purge,
+                        logger          : Galaxy.logger
+                    }).render( 0 );
+                });
+            });
+        });
+
+        $(function(){
+            $( ".workflow, .tool" ).each( function(){
+                var body = $( this ).children( ".body" );
+                $( this ).children( ".header" ).click( function(){
+                    body.toggle();
+                }).addClass( "clickable" );
+            });
+            //$( ".job-inputs-show" ).click( function( ev ){
+            //    ev.stopPropagation();
+            //    $( this ).parent().siblings( '.job-inputs' ).toggle();
+            //});
+        });
+    </script>
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="render_item( entity, children )">
+<%
+entity_name = entity.__class__.__name__
+if entity_name == "HistoryDatasetAssociation":
+    render_item_hda( entity, children )
+elif entity_name == "Job":
+    render_item_job( entity, children )
+elif entity_name == "WorkflowInvocation":
+    render_item_wf( entity, children )
+%>
+</%def>
+
+## ---------------------------------------------------------------------------- hda
+<%def name="render_item_hda( hda, children )">
+## render hdas as a id'd stub for js to fill later
+
+    %if hda.copied_from_history_dataset_association:
+    ${ render_hda_copied_from_history( hda, children ) }
+
+    %elif hda.copied_from_library_dataset_dataset_association:
+    ${ render_hda_copied_from_library( hda, children ) }
+
+    %else:
+    <% id = trans.security.encode_id( hda.id ) %>
+    <div id="hda-${id}" class="dataset hda state-${hda.state}"></div>
+    %endif
+</%def>
+
+<%def name="render_hda_copied_from_history( hda, children )">
+## wrap an hda in info about the history from where it was copied
+    <% id = trans.security.encode_id( hda.id ) %>
+    <% history_id = trans.security.encode_id( hda.copied_from_history_dataset_association.history_id ) %>
+    <div class="copied-from copied-from-history">
+        <div class="header">
+            <div class="copied-from-dataset">
+                <span class="light">${ _( 'Copied from history dataset' ) + ':' }</span>
+                <span class="bold">${ hda.copied_from_history_dataset_association.name }</span>
+            </div>
+            <div class="copied-from-source">
+                <span class="light">${ _( 'History' ) + ':' }</span>
+                <span class="bold">
+                    <a href="${ h.url_for( controller='history', action='view', id=history_id ) }">
+                        ${ hda.copied_from_history_dataset_association.history.name }
+                    </a>
+                </span>
+            </div>
+        </div>
+        <div id="hda-${id}" class="dataset hda state-${hda.state}"></div>
+    </div>
+</%def>
+
+<%def name="render_hda_copied_from_library( hda, children )">
+## wrap an hda in info about the library from where it was copied
+    <% id = trans.security.encode_id( hda.id ) %>
+    <%
+        folder = hda.copied_from_library_dataset_dataset_association.library_dataset.folder
+        folder_id = 'F' + trans.security.encode_id( folder.id )
+    %>
+    <div class="copied-from copied-from-library">
+        <div class="header">
+            <div class="copied-from-dataset">
+                <span class="light">${ _( 'Copied from library dataset' ) + ':' }</span>
+                <span class="bold">${ hda.copied_from_library_dataset_dataset_association.name }</span>
+            </div>
+            <div class="copied-from-source">
+                <span class="light">${ _( 'Library' ) + ':' }</span>
+                <span class="bold">
+                    <a href="${ h.url_for( controller='library', action='list' ) + '#folders/F' + folder_id }">
+                        ${ folder.name }
+                    </a>
+                </span>
+            </div>
+        </div>
+        <div id="hda-${id}" class="dataset hda state-${hda.state}"></div>
+    </div>
+</%def>
+
+## ---------------------------------------------------------------------------- job (and output hdas)
+<%def name="render_item_job( job, children  )">
+## render a job (as a toolForm) and its children (hdas)
+    <div class="tool toolForm">
+        <%
+            tool = trans.app.toolbox.get_tool( job.tool_id )
+            if tool:
+                tool_name = tool.name
+                tool_desc = tool.description
+            else:
+                tool_name = "Unknown tool with id '%s'" % job.tool_id
+                tool_desc = ''
+
+            params_object = None
+            try:
+                params_object = job.get_param_values( trans.app, ignore_errors=True )
+            except Exception, exc:
+                pass
+        %>
+        <div class="header">
+            <div class="toolFormTitle">
+                <span class="bold">${tool_name}</span>
+                <span class="light">- ${tool_desc}</span>
+                ##<a class="job-inputs-show" href="javascript:void(0)">${ _( "parameters" )}</a>
+            </div>
+            %if tool and params_object:
+            <table class="job-inputs">
+                ${ show_params.inputs_recursive( tool.inputs, params_object, depth=1 ) }
+            </table>
+            %else:
+                <em>(${ _( 'No parameter data available' ) })</em>
+            %endif
+        </div>
+        <div class="body toolFormBody">
+        %for e, c in reversed( children ):
+            ${render_item( e, c )}
+        %endfor
+        </div>
+    </div>
+</%def>
+
+## ---------------------------------------------------------------------------- workflow (w/ jobs, hdas)
+<%def name="render_item_wf( wf, children )">
+## render a workflow and its children (jobs/toolForms)
+    <div class="workflow">
+        <div class="header">
+            <span class="bold">${wf.workflow.name}</span>
+            <span class="light">- Workflow</span>
+        </div>
+        <div class="body">
+        %for e, c in reversed( children ):
+            ${render_item( e, c )}
+        %endfor
+        </div>
+    </div>
+</%def>
+
+## ---------------------------------------------------------------------------- body
+## render all items from a dictionary prov. by history/display_structured
+%for entity, children in items:
+    ${render_item( entity, children )}
+%endfor
diff --git a/templates/webapps/galaxy/history/embed.mako b/templates/webapps/galaxy/history/embed.mako
new file mode 100644
index 0000000..d2ad6c7
--- /dev/null
+++ b/templates/webapps/galaxy/history/embed.mako
@@ -0,0 +1,138 @@
+<%namespace file="/display_common.mako" import="*" />
+
+## Some duplication with embed_base here, needed a way to override the main embedded-item html for histories
+<%
+    encoded_history_id = trans.security.encode_id( item.id )
+    display_href = h.url_for( controller='history', action='display_by_username_and_slug',
+        username=item.user.username, slug=item.slug )
+%>
+<div id="history-${encoded_history_id}" class='embedded-item display history'>
+    <div class='title'>
+        <div style="float: left">
+            <a class="expand-content-btn icon-button toggle-expand" href="${display_href}"
+               title="Show or hide history contents"></a>
+        </div>
+        <div style="float: right;">
+            <a title="Import history" class="icon-button import" href="javascript:void(0)"></a>
+            <a title="View history" class="icon-button go-to-full-screen" href="${display_href}"></a>
+        </div>
+        <h4>
+            <a class="toggle-embed" href="${display_href}" title="Show or hide history contents">
+                Galaxy History | ${get_item_name( item ) | h}
+            </a>
+        </h4>
+        %if hasattr( item, "annotation") and item.annotation:
+        <div class="annotation">${ item.annotation | h }</div>
+        %endif
+    </div>
+    <div class='summary-content'>
+        ## currently, no summary content for history
+    </div>
+    <div class='expanded-content'>
+        <div class='item-content'>
+            <div class='history-panel'></div>
+        </div>
+    </div>
+</div>
+
+<script type="text/javascript">
+// Embedding the same history more than once will confuse DOM ids.
+//  In order to handle this, find this script and cache the previous node (the div above).
+//  (Since we need thisScript to be locally scoped or it will get overwritten, enclose in self-calling function)
+(function(){
+    var scripts = document.getElementsByTagName( 'script' ),
+        // this is executed immediately, so the last script will be this script
+        thisScript = scripts[ scripts.length - 1 ],
+        $embeddedHistory = $( thisScript ).prev();
+
+    require.config({
+        baseUrl: "${h.url_for( '/static/scripts' )}",
+        urlArgs: 'v=${app.server_starttime}'
+    });
+    require([
+        'mvc/history/history-view-annotated',
+        'mvc/history/copy-dialog'
+    ], function( viewMod, historyCopyDialog ){
+        var $embeddedHistory = $( thisScript ).prev();
+        $embeddedHistory.find( '.item-content' ).addClass( 'flex-vertical-container' );
+
+        $(function(){
+            var HISTORY = require( 'mvc/history/history-model' );
+            var HISTORY_CONTENTS = require( 'mvc/history/history-contents' );
+
+            var HistoryContentsWithAnnotations = HISTORY_CONTENTS.HistoryContents.extend({
+                _buildFetchData : function( options ){
+                    console.log( '_buildFetchData:' );
+                    options = options || {};
+                    if( !options.keys && !options.view ){
+                        options.view = 'summary';
+                        options.keys = 'annotation,tags';
+                    }
+                    return HISTORY_CONTENTS.HistoryContents.prototype._buildFetchData.call( this, options );
+                }
+            });
+            var HistoryWithAnnotations = HISTORY.History.extend({
+                contentsClass : HistoryContentsWithAnnotations
+            });
+
+            var historyJSON = ${h.dumps( history_dict )};
+            var historyModel = new HistoryWithAnnotations( historyJSON, null, {
+                order           : 'hid-asc',
+            });
+
+            var historyView = new viewMod.AnnotatedHistoryView({
+                el          : $embeddedHistory.find( ".history-panel" ),
+                className   : viewMod.AnnotatedHistoryView.prototype.className + ' wide',
+                model       : historyModel
+            });
+
+            historyView.trigger( 'loading' );
+            historyModel.fetchContents({ silent: true })
+                .fail( function(){ alert( 'Galaxy history failed to load' ); })
+                .done( function(){
+                    historyView.trigger( 'loading-done' );
+                    historyView.render();
+                });
+
+            function toggleExpanded( ev ){
+                ev.preventDefault();
+                $embeddedHistory.find( '.expand-content-btn' ).toggleClass( 'toggle-expand' ).toggleClass( 'toggle' );
+                $embeddedHistory.find( ".summary-content" ).slideToggle( "fast" );
+                $embeddedHistory.find( ".annotation" ).slideToggle( "fast" );
+                $embeddedHistory.find( ".expanded-content" ).slideToggle( "fast" );
+            }
+
+            $embeddedHistory.find( '.expand-content-btn' ).click( toggleExpanded );
+            $embeddedHistory.find( '.toggle-embed' ).click( toggleExpanded );
+
+            function showConfirmationModal( name ){
+                var body = [
+                        '<div class="donemessagelarge">',
+                            _l( 'History imported' ), ': ', _.escape( historyModel.get( 'name' ) ),
+                        '</div>'
+                    ].join('');
+                Galaxy.modal.show({
+                    title : _l( 'Success!' ),
+                    body : $( body ),
+                    buttons : {
+                        'Return to the published page' : function(){
+                            Galaxy.modal.hide();
+                        },
+                        'Start using the history' : function(){
+                            window.location = Galaxy.root;
+                        },
+                    }
+                });
+                Galaxy.modal.$( '.modal-header' ).hide();
+                Galaxy.modal.$( '.modal-body' ).css( 'padding-bottom', 0 );
+                Galaxy.modal.$( '.modal-footer' ).css({ border : 0, 'padding-top': 0 });
+            }
+
+            $embeddedHistory.find( '.import' ).click( function( ev ){
+                var dialogOptions = { useImport: true, allowAll: false, autoClose: false };
+                historyCopyDialog( historyModel, dialogOptions ).done( showConfirmationModal );
+            })
+        });
+    });
+})();
+</script>
diff --git a/templates/webapps/galaxy/history/grid.mako b/templates/webapps/galaxy/history/grid.mako
new file mode 100644
index 0000000..256d921
--- /dev/null
+++ b/templates/webapps/galaxy/history/grid.mako
@@ -0,0 +1,40 @@
+<%inherit file="../grid_base.mako"/>
+
+<%namespace file="grid_js.mako" import="copy_dialog_hack" />
+<%def name="load( embedded=False, insert=None )">
+    <!-- saved history grid.mako -->
+    ${parent.load( embedded=embedded, insert=insert )}
+
+    ## define the module required below
+    ${copy_dialog_hack()}
+    <script type="text/javascript">
+        require([ 'copy-dialog-hack' ], function( copyDialogHack ){
+            function findHistoryId( menuButton ){
+                var $link = $( menuButton ).children( '.menubutton-label' );
+                // TODO: ohdearlord. stahp.
+                return ( $link.attr( 'href' ).match( /id=(\w+)/ ) || [] )[1];
+            }
+
+            // wait for page ready and set it all up, do it again when the grid refreshes
+            $(function(){
+                if( !gridView ){
+                    console.warn( 'no grid' );
+                    return;
+                }
+
+                function replaceCopyFunction(){
+                    gridView.$( '.popup.menubutton' ).each( function( i ){
+                        copyDialogHack.call( this, i, findHistoryId );
+                    });
+                }
+                replaceCopyFunction();
+
+                var originalInitGrid = gridView.init_grid;
+                gridView.init_grid = function __patched_init_grid( json ){
+                    originalInitGrid.call( gridView, json );
+                    replaceCopyFunction();
+                };
+            });
+        });
+    </script>
+</%def>
diff --git a/templates/webapps/galaxy/history/grid_js.mako b/templates/webapps/galaxy/history/grid_js.mako
new file mode 100644
index 0000000..166048e
--- /dev/null
+++ b/templates/webapps/galaxy/history/grid_js.mako
@@ -0,0 +1,65 @@
+<%def name="copy_dialog_hack()">
+    ## TODO: remove!!!!! when grids are iterated over again
+    <script type="text/javascript">
+        // define a module that has:
+        // an hack for which to override the url based history copy function in the popupmenus
+        // and replace it with a dialog that uses the API instead
+        define( 'copy-dialog-hack', [
+            'mvc/history/history-model',
+            'mvc/history/copy-dialog'
+        ], function( mHistory, historyCopyDialog ){
+
+            // callbacks
+            function tellTheUserItFailed(){
+                // history failed to copy, put the relevant techy crap in the console and alert the user
+                console.error( arguments );
+                alert( "${_('History could not be fetched. Please contact an administrator')}" );
+            }
+            function refreshEverything(){
+                // history was copied
+                // if we're in a frame, check for the parent Galaxy and try to refresh the history
+                if( window.parent && window.parent.Galaxy && window.parent.Galaxy.currHistoryPanel ){
+                    window.parent.Galaxy.currHistoryPanel.loadCurrentHistory();
+                }
+                // in any case reload the save history panel
+                window.location.reload( true );
+            }
+
+            // get the id from the dom somehow (the two doms (saved/shared) are different...)
+            function findHistoryId( $menuButton ){
+                var title = '${grid.title}';
+                if( title === 'Saved Histories' ){
+                    var $link = $menuButton.children( '.menubutton-label' );
+                    // TODO: ohdearlord. stahp.
+                    return ( $link.attr( 'href' ).match( /id=(\w+)/ ) || [] )[1];
+                }
+                // Histories shared with you
+                var $label = $menuButton.children( 'label' );
+                return $label.attr( 'id' );
+            }
+
+            // for each popupmenu, (this == a popup activator button), remove the link and add a click function
+            // that fetches the history and shows a copy dialog for it
+            // pass in a fn for extracting the id from the dom and an (optional) object with dialog options
+            function copyDialogHack( i, historyIdFindFn, dialogOptions ){
+                dialogOptions = dialogOptions || {};
+                var $this = $( this ),
+                    historyId = historyIdFindFn( this ),
+                    menuOptions = $this.data( 'popupmenu' ).options,
+                    copyOption  = menuOptions.filter( function( o ){ return o.html === 'Copy' })[0];
+
+                copyOption.href = 'javascript:void(0)';
+                copyOption.func = function copyOptionClicked( ev ){
+                    ev.preventDefault();
+                    var history = new mHistory.History({ id : historyId });
+                    history.fetch()
+                        .fail( tellTheUserItFailed )
+                        .done( function(){
+                            historyCopyDialog( history, dialogOptions ).done( refreshEverything );
+                        });
+                }
+            }
+            return copyDialogHack;
+        });
+    </script>
+</%def>
diff --git a/templates/webapps/galaxy/history/item_content.mako b/templates/webapps/galaxy/history/item_content.mako
new file mode 100644
index 0000000..a5e786e
--- /dev/null
+++ b/templates/webapps/galaxy/history/item_content.mako
@@ -0,0 +1,3 @@
+<%namespace file="/history/display.mako" import="*" />
+
+${render_item( item, item_data )}
diff --git a/templates/webapps/galaxy/history/list_as_xml.mako b/templates/webapps/galaxy/history/list_as_xml.mako
new file mode 100644
index 0000000..4b39dee
--- /dev/null
+++ b/templates/webapps/galaxy/history/list_as_xml.mako
@@ -0,0 +1,7 @@
+<?xml version="1.0"?>
+<history_ids>
+  %for i, history in enumerate( t.user.histories ):
+    <data id="${trans.security.encode_id( history.id )}" hid="${i+1}" num="${len(history.datasets)}" name="${history.name}" create="${history.create_time}" update="${history.update_time}" >
+    </data>
+  %endfor
+</history_ids>
diff --git a/templates/webapps/galaxy/history/list_published.mako b/templates/webapps/galaxy/history/list_published.mako
new file mode 100644
index 0000000..b56ac73
--- /dev/null
+++ b/templates/webapps/galaxy/history/list_published.mako
@@ -0,0 +1,32 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="shared"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">
+    Galaxy | Published Histories
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        .grid td {
+            min-width: 100px;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            <!-- embedded grid -->
+            ${h.to_unicode( embedded_grid )}
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/history/permissions.mako b/templates/webapps/galaxy/history/permissions.mako
new file mode 100644
index 0000000..71d1099
--- /dev/null
+++ b/templates/webapps/galaxy/history/permissions.mako
@@ -0,0 +1,9 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">Change Default Permissions on New Datasets in This History</%def>
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
+%if trans.user:
+    <% history = trans.get_history() %>
+    ${render_permission_form( history, history.name,
+        h.url_for( controller='root', action='history_set_default_permissions' ), trans.user.all_roles() )}
+%endif
diff --git a/templates/webapps/galaxy/history/rename.mako b/templates/webapps/galaxy/history/rename.mako
new file mode 100644
index 0000000..f619192
--- /dev/null
+++ b/templates/webapps/galaxy/history/rename.mako
@@ -0,0 +1,38 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">${_('Rename History')}</%def>
+
+<div class="toolForm">
+  <div class="toolFormTitle">${_('Rename')}</div>
+    <div class="toolFormBody">
+        <form action="${h.url_for( controller='history', action='rename' )}" method="post" >
+            <div class="form-row">
+            <table>
+                <thead>
+                    <tr>
+                        <th>${_('Current Name')}</th>
+                        <th>${_('New Name')}</th>
+                    </tr>
+                </thead>
+                <tbody>
+                %for history in histories:
+                    <tr>
+                        <td>
+                            <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
+                            ${history.get_display_name() | h}
+                        </td>
+                        <td>
+                            <input type="text" name="name" value="${history.get_display_name() | h}" size="40">
+                        </td>
+                    </tr>
+                %endfor
+                </tbody>
+                <tr>
+                    <td colspan="2">
+                        <input type="submit" name="history_rename_btn" value="${_('Rename Histories')}">
+                    </td>
+                </tr>
+            </table>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/history/share.mako b/templates/webapps/galaxy/history/share.mako
new file mode 100644
index 0000000..5c05adb
--- /dev/null
+++ b/templates/webapps/galaxy/history/share.mako
@@ -0,0 +1,282 @@
+<% _=n_ %>
+<%inherit file="/base.mako"/>
+<%def name="title()">Share histories</%def>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Share ${len( histories)} histories</div>
+    <div class="toolFormBody">
+        %if not can_change and not cannot_change and not no_change_needed:
+            ## We are sharing histories that contain only public datasets
+            <form name='share' id='share' action="${h.url_for( controller='history', action='share' )}" method="post" >
+                <div class="form-title-row"><b>Histories to be shared:</b></div>
+                <div class="form-row" style="padding-left: 2em;">
+                    <table width="100%">
+                        <thead>
+                            <th>${_('History Name')}</th>
+                            <th>${_('Number of Datasets')}</th>
+                        </thead>
+                        <tbody>
+                            %for history in histories:
+                                <tr>
+                                    <td>
+                                        <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
+                                        ${ util.unicodify( history.name ) | h }
+                                    </td>
+                                    <td>
+                                        %if len( history.datasets ) < 1:
+                                            <div class="warningmark">${_('This history contains no data.')}</div>
+                                        %else:
+                                            ${len(history.datasets)}
+                                        %endif
+                                    </td>
+                                </tr>
+                            %endfor
+                        </tbody>
+                    </table>
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <% existing_emails = ','.join([ d.user.email for d in history.users_shared_with ]) %>
+                    <label>Galaxy user emails with which to share histories</label>
+                    <input type="hidden" id="email_select" name="email" value="${ existing_emails }" style="float: left; width: 250px; margin-right: 10px;">
+                    </input>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Enter a Galaxy user email address or a comma-separated list of addresses if sharing with multiple users
+                    </div>
+                </div>
+                %if send_to_err:
+                    <div style="clear: both"></div>
+                    <div class="form-row">
+                        <div class="errormessage">${send_to_err}</div>
+                    </div>
+                %endif
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <input type="submit" name="share_button" value="Submit">
+                </div>
+            </form>
+            <script type="text/javascript">
+            // stolen from templates/admin/impersonate.mako
+            /*  This should be ripped out and made generic at some point for the
+             *  various API bindings available, and once the API can filter list
+             *  queries (term, below) */
+
+            var user_id = "${trans.security.encode_id(trans.user.id)}";
+            var history_id = "${trans.security.encode_id( history.id )}";
+
+            function item_to_label(item){
+                var text = "";
+                if(typeof(item.username) === "string" && typeof(item.email) === "string"){
+                    text = item.username + " <" + item.email + ">";
+                }else if(typeof(item.username) === "string"){
+                    text = item.username;
+                }else{
+                    text = item.email;
+                }
+                return text;
+                //return "id:" + item.id + "|e:" + item.email + "|u:" + item.username;
+            }
+
+            $("#email_select").select2({
+                placeholder: "Select a user",
+                multiple: true,
+                initSelection: function(element, callback) {
+                    var data = [
+                    // Must be here to loop across the users that this has been shared with.
+                    %for i, association in enumerate( history.users_shared_with ):
+                        <% shared_with = association.user %>
+                        {
+                            email: "${ shared_with.email }",
+                            id: "${trans.security.encode_id(shared_with.id)}",
+                            text: item_to_label({"email": "${ shared_with.email }", "username": "${ shared_with.username }" })
+                        }, 
+                    %endfor
+                    ];
+                    callback(data);
+                },
+                tokenSeparators: [',', ' '],
+                // Required for initSelection
+                id: function(object) {
+                    return object.id;
+                },
+                ajax: {
+                    url: "${h.url_for(controller="/api/users", action="index")}",
+                    data: function (term) {
+                        return {
+                            f_any: term,
+                        };
+                    },
+                    dataType: 'json',
+                    quietMillis: 250,
+                    results: function (data) {
+                        var results = [];
+                        // For every user returned by the API call,
+                        $.each(data, function(index, item){
+                            // If they aren't the requesting user, add to the
+                            // list that will populate the select
+                            if(item.id != "${trans.security.encode_id(trans.user.id)}"){
+                                if(item.email !== undefined){
+                                    results.push({
+                                      id: item.id,
+                                      name: item.username,
+                                      text: item_to_label(item),
+                                    });
+                                }
+                            }
+                        });
+                        return {
+                            results: results
+                        };
+                    }
+                },
+                createSearchChoice: function(term, data) {
+                    // Check for a user with a matching email.
+                    var matches = _.filter(data, function(user){
+                        return user.text.indexOf(term) > -1;
+                    });
+                    // If there aren't any users with matching object labels, then 
+                    // display a "default" entry with whatever text they're entering.
+                    // id is set to term as that will be used in 
+                    if(matches.length == 0){
+                        return {id: term, text:term};
+                    }else{
+                        // No extra needed
+                    }
+                }
+            });
+            </script>
+        %else:
+            ## We are sharing restricted histories
+            %if no_change_needed or can_change:
+                <form name='share_restricted' id=share_restricted' action="${h.url_for( controller='history', action='share_restricted' )}" method="post">
+                    %if send_to_err:
+                        <div style="clear: both"></div>
+                        <div class="form-row">
+                            <div class="errormessage">${send_to_err}</div>
+                        </div>
+                    %endif
+                    ## Needed for rebuilding dicts
+                    <input type="hidden" name="email" value="${email}" size="40">
+                    %for history in histories:
+                        <input type="hidden" name="id" value="${trans.security.encode_id( history.id )}">
+                    %endfor
+                    %if no_change_needed:
+                        ## no_change_needed looks like: {historyX : [hda, hda], historyY : [hda] }
+                        <div style="clear: both"></div>
+                        <div class="form-row">
+                            <div class="donemessage">
+                                The following datasets can be shared with ${email} with no changes
+                            </div>
+                        </div>                        
+                        %for history, hdas in no_change_needed.items():
+                            <div class="form-row">
+                                <label>History</label>
+                                ${util.unicodify( history.name )}
+                            </div>
+                            <div style="clear: both"></div>
+                            <div class="form-row">
+                                <label>Datasets</label>
+                            </div>
+                            %for hda in hdas:
+                                <div class="form-row">
+                                    ${util.unicodify( hda.name )}
+                                    %if hda.deleted:
+                                        (deleted)
+                                    %endif
+                                </div>
+                            %endfor
+                        %endfor
+                    %endif
+                    %if can_change:
+                        ## can_change looks like: {historyX : [hda, hda], historyY : [hda] }
+                        <div style="clear: both"></div>
+                        <div class="form-row">
+                            <div class="warningmessage">
+                                The following datasets can be shared with ${email} by updating their permissions
+                            </div>
+                        </div>
+                        %for history, hdas in can_change.items():
+                            <div class="form-row">
+                                <label>History</label>
+                                ${util.unicodify( history.name )}
+                            </div>
+                            <div style="clear: both"></div>
+                            <div class="form-row">
+                                <label>Datasets</label>
+                            </div>
+                            %for hda in hdas:
+                                <div class="form-row">
+                                    ${util.unicodify( hda.name )}
+                                    %if hda.deleted:
+                                        (deleted)
+                                    %endif
+                                </div>
+                            %endfor
+                        %endfor
+                    %endif
+                    %if cannot_change:
+                        ## cannot_change looks like: {historyX : [hda, hda], historyY : [hda] }
+                        <div style="clear: both"></div>
+                        <div class="form-row">
+                            <div class="errormessage">
+                                The following datasets cannot be shared with ${email} because you are not authorized to 
+                                change the permissions on them
+                            </div>
+                        </div>
+                        %for history, hdas in cannot_change.items():
+                            <div class="form-row">
+                                <label>History</label>
+                                ${util.unicodify( history.name )}
+                            </div>
+                            <div style="clear: both"></div>
+                            <div class="form-row">
+                                <label>Datasets</label>
+                            </div>
+                            %for hda in hdas:
+                                <div class="form-row">
+                                    ${util.unicodify( hda.name )}
+                                    %if hda.deleted:
+                                        (deleted)
+                                    %endif
+                                </div>
+                            %endfor
+                        %endfor
+                    %endif
+                    <div class="toolFormTitle"></div>
+                    <div class="form-row">
+                        <label>How would you like to proceed?</label>
+                    </div>
+                    %if can_change:
+                        <div class="form-row">
+                            <input type="radio" name="action" value="public"> Make datasets public so anyone can access them 
+                            %if cannot_change:
+                                (where possible)
+                            %endif
+                        </div>
+                        <div class="form-row">
+                            %if no_change_needed:
+                                <input type="radio" name="action" value="private"> Make datasets private to me and the user(s) with whom I am sharing
+                            %else:
+                                <input type="radio" name="action" value="private" checked> Make datasets private to me and the user(s) with whom I am sharing
+                            %endif
+                            %if cannot_change:
+                                (where possible)
+                            %endif
+                        </div>
+                    %endif
+                    %if no_change_needed:
+                        <div class="form-row">
+                            <input type="radio" name="action" value="share_anyway" checked> Share anyway
+                            %if can_change:
+                                (don't change any permissions)
+                            %endif
+                        </div>
+                    %endif
+                    <div class="form-row">
+                        <input type="submit" name="share_restricted_button" value="Go"><br/>
+                    </div>
+                </form>
+            %endif
+        %endif
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/history/shared_grid.mako b/templates/webapps/galaxy/history/shared_grid.mako
new file mode 100644
index 0000000..fc628be
--- /dev/null
+++ b/templates/webapps/galaxy/history/shared_grid.mako
@@ -0,0 +1,99 @@
+<%inherit file="../grid_base.mako"/>
+
+<%namespace file="grid_js.mako" import="copy_dialog_hack" />
+<%def name="load( embedded=False, insert=None )">
+    <!-- shared history grid.mako -->
+    ${parent.load( embedded=embedded, insert=insert )}
+
+    ## define the module required below
+    ${copy_dialog_hack()}
+    <script type="text/javascript">
+        require([
+            'copy-dialog-hack',
+            'mvc/history/history-model',
+            'utils/ajax-queue'
+        ], function( copyDialogHack, HISTORY, AJAX_QUEUE ){
+
+            function sharedNameFn( name, owner ){
+                return [ _l( 'Copy of' ), name, _l( 'shared by' ), owner, _l( '(active items only)' ) ].join( ' ' );
+            }
+
+            // ---------------------------------------------------------------- insert a popupmenu multiple copy
+            function findHistoryId( menuButton ){
+                return $( menuButton ).children( 'label' ).attr( 'id' );
+            }
+
+            // wait for page ready and set it all up
+            $(function(){
+                var $buttons = $( '.popup.menubutton' ).each( function( i ){
+                    // ugh. up to the row, then the text of the last cell
+                    var sharedBy = $( this ).parent().parent().children('td:last-child').text(),
+                        dialogOptions = {
+                            allowAll : false,
+                            nameFn : function( o ){
+                                return sharedNameFn( "'" + _.escape( o.name ) + "'", sharedBy );
+                            }
+                        };
+
+                    copyDialogHack.call( this, i, findHistoryId, dialogOptions );
+                });
+            });
+
+            // ---------------------------------------------------------------- insert a new 'for n histories' copy
+            var setAsCurrent = false,
+                allDatasets = false;
+            // a function that accepts a number of history ids, attempts to copy them via the API, and update the user
+            function ajaxCopyMultipleHistories( historyData ){
+                if( !historyData.length ){ return jQuery.when(); }
+
+                var queue = new AJAX_QUEUE.AjaxQueue( historyData.map( function( data ){
+                    // create a series of functions that make ajax calls from the data scraped from the grid table
+                    return function(){
+                        var history = new HISTORY.History({ id: data.id });
+                        return history.copy( setAsCurrent, sharedNameFn( data.name, data.owner ), allDatasets );
+                    }
+                }));
+                // quick and dirty qarl
+                queue.progress( function( call ){
+                    // show a timed message (like what existing before) that each has been copied
+                    var $msg = $( '<div class="donemessage"/>' )
+                            .text( '"' + call.response.name + '" copied' )
+                            .css( 'margin-bottom', '4px' );
+                    $( '#grid-message' ).append( $msg );
+                    _.delay( function(){ $msg.remove() }, 5000 );
+                });
+                return queue;
+            }
+
+            // wait for page ready and set it all up
+            $(function(){
+                // remove any previous handlers and actual hrefs
+                var $copyMultipleBtn = $( '.operation-button[value="Copy"]' )
+                    .off( 'click' ).attr( 'href', 'javascript:void(0)' );
+
+                // insert a function that gathers all ids, names, and users and sends them to the ajax queue
+                $copyMultipleBtn.click( function( ev ){
+                    ev.preventDefault();
+                    var ids = $( '#grid-table td:first-child input:checked' ).toArray().map( function( input ){
+                        var $tr = $( input ).parent().parent();
+                        return {
+                            id      : input.getAttribute( 'id' ),
+                            owner   : $tr.children( 'td:last-child' ).text(),
+                            name    : $tr.find( 'td:nth-child(2) .popup.menubutton label' ).text(),
+                        };
+                    });
+                    ajaxCopyMultipleHistories( ids )
+                        .fail( function(){
+                            console.error( 'multiple copy failed:', arguments );
+                            $( '#grid-message' ).html([
+                                '<div class="donemessage">',
+                                    _l( 'An error occurred during a copy. Please contact a Galaxy administrator.' ),
+                                '</div>'
+                            ].join( '' ));
+                        });
+                });
+            });
+
+        });
+    </script>
+</%def>
diff --git a/templates/webapps/galaxy/history/structure.mako b/templates/webapps/galaxy/history/structure.mako
new file mode 100644
index 0000000..442f718
--- /dev/null
+++ b/templates/webapps/galaxy/history/structure.mako
@@ -0,0 +1,93 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/galaxy_client_app.mako" name="galaxy_client" />
+
+<%def name="title()">
+    ${ ' | '.join([ history[ 'name' ], _( 'Structure' ) ]) }
+</%def>
+
+## -----------------------------------------------------------------------------
+<%def name="stylesheets()">
+${ parent.stylesheets() }
+
+<style>
+.history-structure {
+}
+
+.history-structure-component {
+    /*position: relative;*/
+    border: 1px solid lightgrey;
+    padding: 8px;
+    margin-bottom: 8px;
+    overflow: auto;
+}
+.history-structure-component.vertical {
+    margin-right: 8px;
+    float:left
+}
+
+.history-structure-component .graph {
+    /*background: rgba( 96, 96, 32, 0.5 );*/
+    transform-origin: top left;
+    position: relative;
+}
+
+.history-structure-component .graph svg {
+    /*background: rgba( 32, 96, 96, 0.5 );*/
+    position: relative;
+}
+
+.history-structure-component .graph > .list-item {
+    width: 300px;
+    max-height: 300px;
+    /* since .graph is position: relative, this top and left will be relative to it */
+    position: absolute;
+    overflow-y: auto;
+    overflow-x: hidden;
+    z-index: 1;
+}
+.history-structure-component .graph > .list-item.highlighted {
+    width: 306px;
+    border: 4px solid black;
+    margin-top: -3px;
+    margin-left: -3px;
+}
+
+.history-structure-component .graph svg .connection {
+    stroke-width: 4px;
+    stroke: lightgrey;
+    stroke-opacity: .6;
+    fill: none;
+}
+.history-structure-component .graph svg .connection.highlighted {
+    stroke: black;
+    stroke-opacity: 1.0;
+}
+</style>
+</%def>
+
+<%def name="javascript_app()">
+<script type="text/javascript">
+define( 'app', function(){
+    require([
+        'mvc/job/job-model',
+        'mvc/history/history-model',
+        'mvc/history/history-structure-view'
+    ], function( JOB, HISTORY, StructureView ){
+
+        var historyModel = new HISTORY.History( bootstrapped.history, bootstrapped.contents );
+window.historymodel = historyModel;
+window.jobs = bootstrapped.jobs;
+window.tools = bootstrapped.tools;
+
+        var structure = new StructureView({
+            model   : historyModel,
+            jobs    : bootstrapped.jobs,
+            tools   : bootstrapped.tools
+        });
+window.structure = structure;
+       structure.render().$el.appendTo( 'body' );
+    });
+});
+</script>
+${ galaxy_client.load( app='app', historyId=historyId, history=history, contents=contents, jobs=jobs, tools=tools ) }
+</%def>
diff --git a/templates/webapps/galaxy/history/view.mako b/templates/webapps/galaxy/history/view.mako
new file mode 100644
index 0000000..26d68a9
--- /dev/null
+++ b/templates/webapps/galaxy/history/view.mako
@@ -0,0 +1,212 @@
+<%namespace file="/galaxy_client_app.mako" import="get_user_json" />
+
+## ----------------------------------------------------------------------------
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/webapps/galaxy/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+%>
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="body()">
+    ${center_panel()}
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="title()">
+    ${history[ 'name' ]}
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="stylesheets()">
+${parent.stylesheets()}
+<style>
+%if not use_panels:
+    body, html {
+        margin: 0px;
+        padding: 0px;
+    }
+%endif
+#history-view-controls {
+    flex: 0 0 44px;
+    background-color: white;
+    border-bottom: 1px solid #DDD;
+    width: 100%;
+    padding: 8px;
+}
+.history-panel > .controls .title {
+    font-size: 120%;
+}
+.history-panel > .controls .title input {
+    font-size: 100%;
+}
+a.btn {
+    text-decoration: none;
+}
+</style>
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="center_panel()">
+<%
+    structure_url = h.url_for( controller='history', action='display_structured', id=history[ 'id' ] )
+
+    switch_to_url = h.url_for( controller='history', action='switch_to_history', hist_id=history[ 'id' ] )
+
+    show_deleted = context.get( 'show_deleted', None )
+    show_hidden  = context.get( 'show_hidden',  None )
+
+    user_is_owner_json = 'true' if user_is_owner else 'false'
+    show_deleted_json  = h.dumps( show_deleted )
+    show_hidden_json   = h.dumps( show_hidden )
+%>
+
+<div id="history-view-controls" class="clear">
+    <div class="pull-left">
+        %if not history[ 'purged' ]:
+            %if not user_is_owner:
+                <button id="import" class="btn btn-default">${ _( 'Import and start using history' ) }</button>
+            %elif not history_is_current:
+                <button id="switch" class="btn btn-default">${ _( 'Switch to this history' ) }</button>
+            %endif
+            <a id="structure" href="${ structure_url }" class="btn btn-default">${ _( 'Show structure' ) }</a>
+        %endif
+    </div>
+    <div class="pull-right">
+        <button id="toggle-deleted" class="btn btn-default">
+            ${ _( 'Include deleted' ) }
+        </button>
+        <button id="toggle-hidden" class="btn btn-default">
+            ${ _( 'Include hidden' ) }
+        </button>
+    </div>
+</div>
+
+<div id="history-${ history[ 'id' ] }" class="history-panel unified-panel-body" style="overflow: auto;"></div>
+
+<script type="text/javascript">
+
+    // use_panels effects where the the center_panel() is rendered:
+    //  w/o it renders to the body, w/ it renders to #center - we need to adjust a few things for scrolling to work
+    var hasMasthead  = ${ 'true' if use_panels else 'false' },
+        userIsOwner  = ${ 'true' if user_is_owner else 'false' },
+        isCurrent    = ${ 'true' if history_is_current else 'false' },
+        historyJSON  = ${ h.dumps( history ) },
+        viewToUse   = ( userIsOwner )?
+//TODO: change class names
+            ({ location: 'mvc/history/history-view-edit',  className: 'HistoryViewEdit' }):
+            ({ location: 'mvc/history/history-view',       className: 'HistoryView' });
+
+    require.config({
+        baseUrl : "${h.url_for( '/static/scripts' )}",
+        paths   : {
+            'jquery' : 'libs/jquery/jquery'
+        },
+        urlArgs: 'v=${app.server_starttime}'
+    })([
+        'mvc/user/user-model',
+        viewToUse.location,
+        'mvc/history/copy-dialog',
+        'utils/localization',
+        'ui/mode-button'
+    ], function( user, viewMod, historyCopyDialog, _l ){
+        +(function setUpBehaviors(){
+            $( '#toggle-deleted' ).modeButton({
+                initialMode : "${ 'showing_deleted' if show_deleted else 'not_showing_deleted' }",
+                modes: [
+                    { mode: 'showing_deleted',      html: _l( 'Exclude deleted' ) },
+                    { mode: 'not_showing_deleted',  html: _l( 'Include deleted' ) }
+                ]
+            });
+
+            $( '#toggle-hidden' ).modeButton({
+                initialMode : "${ 'showing_hidden' if show_hidden else 'not_showing_hidden' }",
+                modes: [
+                    { mode: 'showing_hidden',     html: _l( 'Exclude hidden' ) },
+                    { mode: 'not_showing_hidden', html: _l( 'Include hidden' ) }
+                ]
+            });
+
+            $( '#switch' ).click( function( ev ){
+                //##HACK:ity hack hack
+                //##TODO: remove when out of iframe
+                var hview = Galaxy.currHistoryPanel
+                         || ( top.Galaxy && top.Galaxy.currHistoryPanel )? top.Galaxy.currHistoryPanel : null;
+                if( hview ){
+                    hview.switchToHistory( "${ history[ 'id' ] }" );
+                } else {
+                    window.location = "${ switch_to_url }";
+                }
+            });
+
+        })();
+
+        $(function(){
+            if( hasMasthead ){
+                $( '#center' ).addClass( 'flex-vertical-container' );
+            }
+
+            var viewClass = viewMod[ viewToUse.className ],
+                // history module is already in the dpn chain from the view. We can re-scope it here.
+                HISTORY = require( 'mvc/history/history-model' ),
+                historyModel = new HISTORY.History( historyJSON );
+
+            // attach the copy dialog to the import button now that we have a history
+            $( '#import' ).click( function( ev ){
+                historyCopyDialog( historyModel, {
+                    useImport   : true,
+                    // use default datasets option to match the toggle-deleted button
+                    allDatasets : $( '#toggle-deleted' ).modeButton( 'getMode' ).mode === 'showing_deleted',
+                }).done( function(){
+                    if( window === window.parent ){
+                        window.location = Galaxy.root;
+                    } else if( Galaxy.currHistoryPanel ){
+                        Galaxy.currHistoryPanel.loadCurrentHistory();
+                    }
+                });
+            });
+
+            window.historyView = new viewClass({
+                el              : $( "#history-" + historyJSON.id ),
+                className       : viewClass.prototype.className + ' wide',
+                $scrollContainer: hasMasthead? function(){ return this.$el.parent(); } : undefined,
+                model           : historyModel,
+                show_deleted    : ${show_deleted_json},
+                show_hidden     : ${show_hidden_json},
+                purgeAllowed    : Galaxy.config.allow_user_dataset_purge,
+            });
+            historyView.trigger( 'loading' );
+            historyModel.fetchContents({ silent: true })
+                .fail( function(){ alert( 'Galaxy history failed to load' ); })
+                .done( function(){
+                    historyView.trigger( 'loading-done' );
+                    historyView.render();
+                });
+
+            $( '#toggle-deleted' ).on( 'click', function(){
+                historyView.toggleShowDeleted();
+            });
+            $( '#toggle-hidden' ).on( 'click', function(){
+                historyView.toggleShowHidden();
+            });
+        });
+    });
+</script>
+
+</%def>
diff --git a/templates/webapps/galaxy/history/view_multiple.mako b/templates/webapps/galaxy/history/view_multiple.mako
new file mode 100644
index 0000000..ffc7cdb
--- /dev/null
+++ b/templates/webapps/galaxy/history/view_multiple.mako
@@ -0,0 +1,59 @@
+
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+<%namespace file="/galaxy_client_app.mako" name="galaxy_client"/>
+
+<%def name="title()">
+    ${_( 'Histories' )}
+</%def>
+
+## ----------------------------------------------------------------------------
+<%def name="stylesheets()">
+    ${ parent.stylesheets() }
+    <style type="text/css">
+    /* reset */
+    html, body {
+        margin: 0px;
+        padding: 0px;
+    }
+    </style>
+</%def>
+
+
+## ----------------------------------------------------------------------------
+<%def name="center_panel()"></%def>
+
+<%def name="javascript_app()">
+<script type="text/javascript">
+define( 'app', function(){
+    require([
+        'mvc/history/history-model',
+        'mvc/history/multi-panel'
+    ], function( HISTORY_MODEL, MULTI_HISTORY ){
+        $(function(){
+            histories = new HISTORY_MODEL.HistoryCollection( [], {
+                includeDeleted      : bootstrapped.includingDeleted,
+                order               : bootstrapped.order,
+                limitOnFirstFetch   : bootstrapped.limit,
+                limitPerFetch       : bootstrapped.limit,
+                // lastFetched         : bootstrapped.limit,
+                currentHistoryId    : bootstrapped.current_history_id,
+            });
+
+            multipanel = new MULTI_HISTORY.MultiPanelColumns({
+                el                          : $( '#center' ).get(0),
+                histories                   : histories,
+            })
+            histories.fetchFirst({ silent: true })
+                .done( function(){
+                    multipanel.createColumns();
+                    multipanel.render( 0 );
+                });
+        });
+    });
+});
+</script>
+${ galaxy_client.load( app='app', current_history_id=current_history_id,
+    includingDeleted=include_deleted_histories, order=order, limit=limit ) }
+##${ galaxy_client.load( app='app', histories=histories,
+##    includingDeleted=include_deleted_histories, order=order, limit=limit ) }
+</%def>
diff --git a/templates/webapps/galaxy/library/common/browse_library.mako b/templates/webapps/galaxy/library/common/browse_library.mako
new file mode 100644
index 0000000..a966838
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/browse_library.mako
@@ -0,0 +1,615 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/library_item_info.mako" import="render_library_item_info" />
+<%namespace file="/library/common/common.mako" import="render_actions_on_multiple_items" />
+<%namespace file="/library/common/common.mako" import="render_compression_types_help" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/webapps/galaxy/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.active_view="user"
+    self.overlay_visible=False
+    self.has_accessible_datasets = False
+%>
+</%def>
+
+##
+## Override methods from base.mako and base_panels.mako
+##
+<%def name="center_panel()">
+   <div style="overflow: auto; height: 100%;">
+       <div class="page-container" style="padding: 10px;">
+           ${render_content()}
+       </div>
+   </div>
+</%def>
+
+## Render the grid's basic elements. Each of these elements can be subclassed.
+<%def name="body()">
+    ${render_content()}
+</%def>
+
+<%def name="title()">Browse data library</%def>
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jstorage")}
+    ${common_javascripts()}
+    ${self.grid_javascripts()}
+</%def>
+
+<%def name="grid_javascripts()">
+    <script type="text/javascript">
+        var init_libraries = function() {
+            var storage_id = "library-expand-state-${trans.security.encode_id(library.id)}";
+            
+            var restore_folder_state = function() {
+                var state = $.jStorage.get(storage_id);
+                if (state) {
+                    for (var id in state) {
+                        if (state[id] === true) {
+                            var row = $("#" + id),
+                                index = row.parent().children().index(row);
+                            row.addClass("expanded").show();
+                            row.siblings().filter("tr[parent='" + index + "']").show();
+                        }
+                    }
+                }
+            };
+            
+            var save_folder_state = function() {
+                var state = {};
+                $("tr.folderRow").each( function() {
+                    var folder = $(this);
+                    state[folder.attr("id")] = folder.hasClass("expanded");
+                });
+                $.jStorage.set(storage_id, state);
+            };
+            
+            $("#library-grid").each(function() {
+                var child_of_parent_cache = {};
+                // Recursively fill in children and descendents of each row
+                var process_row = function(q, parents) {
+                    // Find my index
+                    var parent = q.parent(),
+                        this_level = child_of_parent_cache[parent] || (child_of_parent_cache[parent] = parent.children());
+                        
+                    var index = this_level.index(q);
+                    // Find my immediate children
+                    var children = $(par_child_dict[index]);
+                    // Recursively handle them
+                    var descendents = children;
+                    children.each( function() {
+                        child_descendents = process_row( $(this), parents.add(q) );
+                        descendents = descendents.add(child_descendents);
+                    });
+                    // Set up expand / hide link
+                    var expand_fn = function() {
+                        if ( q.hasClass("expanded") ) {
+                            descendents.hide();
+                            descendents.removeClass("expanded");
+                            q.removeClass("expanded");
+                        } else {
+                            children.show();
+                            q.addClass("expanded");
+                        }
+                        save_folder_state();
+                    };
+                    $("." + q.attr("id") + "-click").click(expand_fn);
+                    // Check/uncheck boxes in subfolders.
+                    q.children("td").children("input[type=checkbox]").click( function() {
+                        if ( $(this).is(":checked") ) {
+                            descendents.find("input[type=checkbox]").attr("checked", true);
+                        } else {
+                            descendents.find("input[type=checkbox]").attr("checked", false);
+                            // If you uncheck a lower level checkbox, uncheck the boxes above it
+                            // (since deselecting a child means the parent is not fully selected any more).
+                            parents.children("td").children("input[type=checkbox]").attr("checked", false);
+                        }
+                    });
+                    // return descendents for use by parent
+                    return descendents;
+                }
+                
+                // Initialize dict[parent_id] = rows_which_have_that_parent_id_as_parent_attr
+                var par_child_dict = {},
+                    no_parent = [];
+                
+                $(this).find("tbody tr").each( function() {
+                    if ( $(this).attr("parent")) {
+                        var parent = $(this).attr("parent");
+                        if (par_child_dict[parent] !== undefined) {
+                            par_child_dict[parent].push(this);
+                        } else {
+                            par_child_dict[parent] = [this];
+                        }
+                    } else {
+                        no_parent.push(this);
+                    }                        
+                });
+                
+                $(no_parent).each( function() {
+                    descendents = process_row( $(this), $([]) );
+                    descendents.hide();
+               });
+            });
+            
+            restore_folder_state();
+        };
+        $(function() {
+            init_libraries();
+        });
+        
+        // Looks for changes in dataset state using an async request. Keeps
+        // calling itself (via setTimeout) until all datasets are in a terminal
+        // state.
+        var updater = function ( tracked_datasets ) {
+            // Check if there are any items left to track
+            var empty = true;
+            for ( i in tracked_datasets ) {
+                empty = false;
+                break;
+            }
+            if ( ! empty ) {
+                setTimeout( function() { updater_callback( tracked_datasets ) }, 3000 );
+            }
+        };
+        var updater_callback = function ( tracked_datasets ) {
+            // Build request data
+            var ids = []
+            var states = []
+            $.each( tracked_datasets, function ( id, state ) {
+                ids.push( id );
+                states.push( state );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='library_common', action='library_item_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), states: states.join( "," ) },
+                success : function ( data ) {
+                    $.each( data, function( id, val ) {
+                        // Replace HTML
+                        var cell = $("#libraryItem-" + id).find("#libraryItemInfo");
+                        cell.html( val.html );
+                        // If new state was terminal, stop tracking
+                        if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
+                            delete tracked_datasets[ parseInt(id) ];
+                        } else {
+                            tracked_datasets[ parseInt(id) ] = val.state;
+                        }
+                    });
+                    updater( tracked_datasets ); 
+                },
+                error: function() {
+                    // Just retry, like the old method, should try to be smarter
+                    updater( tracked_datasets );
+                }
+            });
+        };
+    </script>
+</%def>
+
+<%def name="render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, parent, row_counter, tracked_datasets, show_deleted=False, simple=False )">
+    <%
+        ## The received ldda must always be a LibraryDatasetDatasetAssociation object.  The object id passed to methods
+        ## from the drop down menu should be the ldda id to prevent id collision ( which could happen when displaying
+        ## children, which are always lddas ).  We also need to make sure we're displaying the latest version of this
+        ## library_dataset, so we display the attributes from the ldda.
+        
+        from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
+        
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        
+        if ldda == library_dataset.library_dataset_dataset_association:
+            current_version = True
+            if is_admin:
+                can_modify = can_manage = True
+            elif cntrller in [ 'library', 'requests' ]:
+                can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, library_dataset )
+                can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, library_dataset )
+            else:
+                can_modify = can_manage = False
+        else:
+            current_version = False
+        if current_version and ldda.state not in ( 'ok', 'error', 'empty', 'deleted', 'discarded' ):
+            tracked_datasets[ldda.id] = ldda.state
+        info_association, inherited = ldda.get_info_association( restrict=True )
+        form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+    %>
+    %if current_version and ( not ldda.library_dataset.deleted or show_deleted ):
+        <tr class="datasetRow"
+            %if parent is not None:
+                parent="${parent | h}"
+            %endif
+            id="libraryItem-${ trans.security.encode_id( ldda.id ) | h}">
+            <td style="padding-left: ${pad+20}px;">
+                <input style="float: left;" type="checkbox" name="ldda_ids" id="${trans.security.encode_id( ldda.id ) | h}" value="${trans.security.encode_id( ldda.id ) | h}"
+                %if selected:
+                    checked="checked"
+                %endif
+                />
+                %if simple:
+                    <label for="${trans.security.encode_id( ldda.id ) | h}">${ util.unicodify( ldda.name ) | h}</label>
+                %else:
+                    <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${ trans.security.encode_id( ldda.id ) | h}-popup">
+                        <a class="view-info" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">
+                            %if ldda.library_dataset.deleted:
+                                <div class="libraryItem-error">${util.unicodify( ldda.name ) | h}</div>
+                            %else:
+                                ${util.unicodify( ldda.name ) | h}
+                            %endif     
+                        </a>
+                    </div>
+                    %if not library.deleted:
+                        <div popupmenu="dataset-${ trans.security.encode_id( ldda.id ) | h}-popup">
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type='ldda', item_id=trans.security.encode_id( ldda.id ), source_library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Move this dataset</a>
+                            %else:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and not info_association:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and info_association:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_manage:
+                                %if not trans.app.security_agent.dataset_is_public( ldda.dataset ):
+                                    <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='ldda', id=trans.security.encode_id( ldda.dataset.id ), use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+                                %endif
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into selected histories</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
+                            %endif
+                            %if can_modify:
+                                %if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
+                                    <a class="action-button" confirm="Click OK to delete dataset '${util.unicodify( ldda.name ) | h}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
+                                %elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
+                                    <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
+                                %endif
+                            %endif
+                        </div>
+                    %endif
+                %endif
+            </td>
+            % if not simple:
+                <td id="libraryItemInfo">${render_library_item_info( ldda )}</td>
+                <td>${ldda.extension | h}</td>
+            % endif
+            <td>${util.unicodify(ldda.create_time.strftime( trans.app.config.pretty_datetime_format )) | h}</td>
+            <td>${ldda.get_size( nice_size=True ) | h}</td>
+        </tr>
+        <%
+            my_row = row_counter.count
+            row_counter.increment()
+        %>
+    %endif
+</%def>
+
+<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False, simple=False )">
+    <%
+        from galaxy.webapps.galaxy.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, branch_deleted
+        
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        has_accessible_library_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, folder, trans.user, current_user_roles, search_downward=False )
+        
+        if root_folder:
+            pad = folder_pad
+            expander = h.url_for("/static/images/silk/resultset_bottom.png")
+            folder_img = h.url_for("/static/images/silk/folder_page.png")
+        else:
+            pad = folder_pad + 20
+            expander = h.url_for("/static/images/silk/resultset_next.png")
+            folder_img = h.url_for("/static/images/silk/folder.png")
+        if created_ldda_ids:
+            created_ldda_ids = util.listify( created_ldda_ids )
+        if str( folder.id ) in hidden_folder_ids:
+            return ""
+        my_row = None
+        if is_admin:
+            can_add = can_modify = can_manage = True
+        elif cntrller in [ 'library' ]:
+            can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, folder )
+            if not can_access:
+                can_show, folder_ids = \
+                    trans.app.security_agent.show_library_item( trans.user,
+                                                                current_user_roles,
+                                                                folder,
+                                                                [ trans.app.security_agent.permitted_actions.LIBRARY_ADD,
+                                                                  trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
+                                                                  trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ] )
+                if not can_show:
+                    return ""
+            can_add = trans.app.security_agent.can_add_library_item( current_user_roles, folder )
+            can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, folder )
+            can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, folder )
+        else:
+            can_add = can_modify = can_manage = False
+            
+        form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        info_association, inherited = folder.get_info_association( restrict=True )
+    %>
+    %if not root_folder and ( not folder.deleted or show_deleted ):
+        <% encoded_id = trans.security.encode_id( folder.id ) %>
+        <tr id="folder-${encoded_id | h}" class="folderRow libraryOrFolderRow"
+            %if parent is not None:
+                parent="${parent | h}"
+                style="display: none;"
+            %endif
+            >
+            <td style="padding-left: ${folder_pad | h}px;">
+                <input type="checkbox" class="folderCheckbox"/>
+                <span class="expandLink folder-${encoded_id | h}-click">
+                    <div style="float: left; margin-left: 2px;" class="menubutton split popup" id="folder_img-${ 'F' + trans.security.encode_id( folder.id ) }-popup">
+                        <a class="folder-${encoded_id | h}-click" href="javascript:void(0);">
+                            <span class="rowIcon"></span>
+                            %if folder.deleted:
+                                <div class="libraryItem-error">${folder.name | h}</div>
+                            %else:
+                                ${folder.name | h}
+                            %endif
+                        </a>
+                    </div>
+                </span>
+                %if not library.deleted:
+                    <div popupmenu="folder_img-${ 'F' + trans.security.encode_id( folder.id ) }-popup">
+                        %if not branch_deleted( folder ) and can_add:
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a>
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add sub-folder</a>
+                        %endif
+                        %if not branch_deleted( folder ):
+                            %if has_accessible_library_datasets:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select datasets for import into selected histories</a>
+                            %endif
+                            %if can_modify:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type='folder', item_id=trans.security.encode_id( folder.id ), source_library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Move this folder</a>
+                            %else:
+                                <a class="action-button" class="view-info" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
+                            %endif
+                        %endif
+                        %if not branch_deleted( folder ) and can_modify and not info_association:
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='folder', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                        %endif
+                        %if not branch_deleted( folder ) and can_modify and info_association:
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='folder', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='folder', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                        %endif
+                        %if not branch_deleted( folder ) and can_manage:
+                           %if not trans.app.security_agent.folder_is_public( folder ):
+                               <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='folder', id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+                           %endif
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='folder_permissions', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                        %endif
+                        %if can_modify:
+                            %if not library.deleted and not folder.deleted:
+                                <a class="action-button" confirm="Click OK to delete the folder '${folder.name | h}.'" href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Delete this folder</a>
+                            %elif not library.deleted and folder.deleted and not folder.purged:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Undelete this folder</a>
+                            %endif
+                        %endif
+                    </div>
+                %endif
+            <td>
+            %if folder.description:
+                ${folder.description | h}
+            %endif
+            <td colspan="3"></td>
+        </tr>
+        <%
+            my_row = row_counter.count
+            row_counter.increment()
+        %>
+    %endif
+    <%
+        if show_deleted:
+            sub_folders, library_datasets = activatable_folders_and_library_datasets( trans, folder )
+        else:
+            sub_folders, library_datasets = active_folders_and_library_datasets( trans, folder )
+    %>
+    %if is_admin:
+        %for sub_folder in sub_folders:
+            ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, [], tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )}
+        %endfor 
+        %for library_dataset in library_datasets:
+            <%
+                ldda = library_dataset.library_dataset_dataset_association
+                if ldda:
+                    # There should always be an ldda, but some users running their own instances have reported that
+                    # some of their LibraryDatasets have no associated lddas
+                    selected = created_ldda_ids and str( ldda.id ) in created_ldda_ids
+            %>
+            %if ldda:
+                ${render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, my_row, row_counter, tracked_datasets, show_deleted=show_deleted )}
+            %endif
+        %endfor
+    %else:
+        %for sub_folder in sub_folders:
+            ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False, simple=simple )}
+        %endfor
+        %for library_dataset in library_datasets:
+            <%
+                ldda = library_dataset.library_dataset_dataset_association
+                if ldda:
+                    # There should always be an ldda, but some users running their own instances have reported that
+                    # some of their LibraryDatasets have no associated lddas
+                    can_access = trans.app.security_agent.can_access_dataset( current_user_roles, ldda.dataset )
+                    selected = created_ldda_ids and str( ldda.id ) in created_ldda_ids
+                else:
+                    can_access = False
+            %>
+            %if can_access:
+                ${render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, my_row, row_counter, tracked_datasets, show_deleted=show_deleted, simple=simple )}
+            %endif
+        %endfor
+    %endif
+</%def>
+
+<%def name="render_content(simple=False)">
+    <%
+        from galaxy import util
+        from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
+        from time import strftime
+        
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        
+        if is_admin:
+            can_add = can_modify = can_manage = True
+        elif cntrller in [ 'library', 'requests' ]:
+            can_add = trans.app.security_agent.can_add_library_item( current_user_roles, library )
+            can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, library )
+            can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, library )
+        else:
+            can_add = can_modify = can_manage = False
+            
+        info_association, inherited = library.get_info_association()
+        form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        
+        self.has_accessible_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, library.root_folder, trans.user, current_user_roles )
+        root_folder_has_accessible_library_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, library.root_folder, trans.user, current_user_roles, search_downward=False )
+        has_accessible_folders = is_admin or trans.app.security_agent.has_accessible_folders( trans, library.root_folder, trans.user, current_user_roles )
+        
+        tracked_datasets = {}
+        
+        class RowCounter( object ):
+            def __init__( self ):
+                self.count = 0
+            def increment( self ):
+                self.count += 1
+            def __str__( self ):
+                return str( self.count )
+    %>
+    
+    <h2>Data Library “${library.name | h}”</h2>
+    
+     <ul class="manage-table-actions">
+         %if not library.deleted and ( is_admin or can_add ):
+             <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a></li>
+             <li><a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( library.root_folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add folder</a></li>
+         %endif
+         %if ( ( not library.deleted ) and ( can_modify or can_manage ) ) or ( can_modify and not library.purged ) or ( library.purged ):
+             <li><a class="action-button" id="library-${ trans.security.encode_id( library.id ) }-popup" class="menubutton">Library Actions</a></li>
+             <div popupmenu="library-${ trans.security.encode_id( library.id ) }-popup">
+                 %if not library.deleted:
+                     %if can_modify:
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='library_info', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                         <a class="action-button" confirm="Click OK to delete the library named '${library.name | h}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library' )}">Delete this data library</a>
+                         %if show_deleted:
+                             <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=False )}">Hide deleted items</a>
+                         %else:
+                             <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=True )}">Show deleted items</a>
+                         %endif
+                     %endif
+                     %if can_modify and not library.info_association:
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='library', form_type=form_type, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                     %endif
+                     %if can_modify and info_association:
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='library', form_type=form_type, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='library', form_type=form_type, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                     %endif
+                     %if can_manage:
+                         %if not trans.app.security_agent.library_is_public( library, contents=True ):
+                             <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='library', id=trans.security.encode_id( library.id ), contents=True, use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+                         %endif
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                     %endif
+                     %if root_folder_has_accessible_library_datasets:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select datasets for import into selected histories</a>
+                     %endif
+                 %elif can_modify and not library.purged:
+                     <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library', use_panels=use_panels )}">Undelete this data library</a>
+                 %elif library.purged:
+                     <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">This data library has been purged</a>
+                 %endif
+             </div>
+         %endif
+    </ul>
+    
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+
+    %if library.synopsis not in [ '', 'None', None ]:
+        <div class="libraryItemBody">
+            ${library.synopsis | h}
+        </div>
+    %endif
+    
+    %if self.has_accessible_datasets:
+        <form name="act_on_multiple_datasets" action="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}" onSubmit="javascript:return checkForm();" method="post">
+    %endif
+    %if has_accessible_folders:
+        <table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid">
+            <thead>
+                <tr class="libraryTitle">
+                    <th>
+                        %if self.has_accessible_datasets:
+                            <input type="checkbox" id="checkAll" name=select_all_datasets_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_datasets_checkbox value="true"/>
+                        %endif
+                        Name
+                    </th>
+                    % if not simple:
+                        <th>Message</th>
+                        <th>Data type</th>
+                    % endif
+                    <th>Date uploaded</th>
+                    <th>File size</th>
+                </tr>
+            </thead>
+            <% row_counter = RowCounter() %>
+            %if cntrller in [ 'library', 'requests' ]:
+                ${self.render_folder( 'library', library.root_folder, 0, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True, simple=simple )}
+                %if not library.deleted and self.has_accessible_datasets and not simple:
+                    ${render_actions_on_multiple_items()}
+                %endif
+            %elif ( trans.user_is_admin() and cntrller in [ 'library_admin', 'requests_admin' ] ):
+                ${self.render_folder( 'library_admin', library.root_folder, 0, created_ldda_ids, library, [], tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
+                %if not library.deleted and not show_deleted and self.has_accessible_datasets:
+                    ${render_actions_on_multiple_items()}
+                %endif
+            %endif
+        </table>
+    %endif
+    %if self.has_accessible_datasets:
+        </form>
+    %endif
+     
+    %if tracked_datasets:
+        <script type="text/javascript">
+            // Updater
+            updater({${ ",".join( [ '"%s" : "%s"' % ( k, v ) for k, v in tracked_datasets.iteritems() ] ) }});
+        </script>
+        <!-- running: do not change this comment, used by TwillTestCase.library_wait -->
+    %endif
+    
+    %if self.has_accessible_datasets and not simple:
+        ${render_compression_types_help( comptypes )}
+    %endif
+    %if not has_accessible_folders:
+        The data library '${library.name | h}' does not contain any datasets that you can access.
+    %endif
+</%def>
diff --git a/templates/webapps/galaxy/library/common/browse_library_opt.mako b/templates/webapps/galaxy/library/common/browse_library_opt.mako
new file mode 100644
index 0000000..553a674
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/browse_library_opt.mako
@@ -0,0 +1,621 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/library_item_info.mako" import="render_library_item_info" />
+<%namespace file="/library/common/common.mako" import="render_actions_on_multiple_items" />
+<%namespace file="/library/common/common.mako" import="render_compression_types_help" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/webapps/galaxy/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.active_view="user"
+    self.overlay_visible=False
+    self.has_accessible_datasets = False
+%>
+</%def>
+
+##
+## Override methods from base.mako and base_panels.mako
+##
+<%def name="center_panel()">
+   <div style="overflow: auto; height: 100%;">
+       <div class="page-container" style="padding: 10px;">
+           ${render_content()}
+       </div>
+   </div>
+</%def>
+
+## Render the grid's basic elements. Each of these elements can be subclassed.
+<%def name="body()">
+    ${render_content()}
+</%def>
+
+<%def name="title()">Browse data library</%def>
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jstorage")}
+    ${common_javascripts()}
+    ${self.grid_javascripts()}
+</%def>
+
+<%def name="grid_javascripts()">
+    <script type="text/javascript">
+        var init_libraries = function() {
+            var storage_id = "library-expand-state-${trans.security.encode_id(library.id)}";
+            
+            var restore_folder_state = function() {
+                var state = $.jStorage.get(storage_id);
+                if (state) {
+                    for (var id in state) {
+                        if (state[id] === true) {
+                            var row = $("#" + id),
+                                index = row.parent().children().index(row);
+                            row.addClass("expanded").show();
+                            row.siblings().filter("tr[parent='" + index + "']").show();
+                        }
+                    }
+                }
+            };
+            
+            var save_folder_state = function() {
+                var state = {};
+                $("tr.folderRow").each( function() {
+                    var folder = $(this);
+                    state[folder.attr("id")] = folder.hasClass("expanded");
+                });
+                $.jStorage.set(storage_id, state);
+            };
+            
+            $("#library-grid").each(function() {
+                var child_of_parent_cache = {};
+                // Recursively fill in children and descendents of each row
+                var process_row = function(q, parents) {
+                    // Find my index
+                    var parent = q.parent(),
+                        this_level = child_of_parent_cache[parent] || (child_of_parent_cache[parent] = parent.children());
+                        
+                    var index = this_level.index(q);
+                    // Find my immediate children
+                    var children = $(par_child_dict[index]);
+                    // Recursively handle them
+                    var descendents = children;
+                    children.each( function() {
+                        child_descendents = process_row( $(this), parents.add(q) );
+                        descendents = descendents.add(child_descendents);
+                    });
+                    // Set up expand / hide link
+                    var expand_fn = function() {
+                        if ( q.hasClass("expanded") ) {
+                            descendents.hide();
+                            descendents.removeClass("expanded");
+                            q.removeClass("expanded");
+                        } else {
+                            children.show();
+                            q.addClass("expanded");
+                        }
+                        save_folder_state();
+                    };
+                    $("." + q.attr("id") + "-click").click(expand_fn);
+                    // Check/uncheck boxes in subfolders.
+                    q.children("td").children("input[type=checkbox]").click( function() {
+                        if ( $(this).is(":checked") ) {
+                            descendents.find("input[type=checkbox]").attr("checked", true);
+                        } else {
+                            descendents.find("input[type=checkbox]").attr("checked", false);
+                            // If you uncheck a lower level checkbox, uncheck the boxes above it
+                            // (since deselecting a child means the parent is not fully selected any more).
+                            parents.children("td").children("input[type=checkbox]").attr("checked", false);
+                        }
+                    });
+                    // return descendents for use by parent
+                    return descendents;
+                }
+                
+                // Initialize dict[parent_id] = rows_which_have_that_parent_id_as_parent_attr
+                var par_child_dict = {},
+                    no_parent = [];
+                
+                $(this).find("tbody tr").each( function() {
+                    if ( $(this).attr("parent")) {
+                        var parent = $(this).attr("parent");
+                        if (par_child_dict[parent] !== undefined) {
+                            par_child_dict[parent].push(this);
+                        } else {
+                            par_child_dict[parent] = [this];
+                        }
+                    } else {
+                        no_parent.push(this);
+                    }                        
+                });
+                
+                $(no_parent).each( function() {
+                    descendents = process_row( $(this), $([]) );
+                    descendents.hide();
+               });
+            });
+            
+            restore_folder_state();
+        };
+        $(function() {
+            init_libraries();
+        });
+        
+        // Looks for changes in dataset state using an async request. Keeps
+        // calling itself (via setTimeout) until all datasets are in a terminal
+        // state.
+        var updater = function ( tracked_datasets ) {
+            // Check if there are any items left to track
+            var empty = true;
+            for ( i in tracked_datasets ) {
+                empty = false;
+                break;
+            }
+            if ( ! empty ) {
+                setTimeout( function() { updater_callback( tracked_datasets ) }, 3000 );
+            }
+        };
+        var updater_callback = function ( tracked_datasets ) {
+            // Build request data
+            var ids = []
+            var states = []
+            $.each( tracked_datasets, function ( id, state ) {
+                ids.push( id );
+                states.push( state );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='library_common', action='library_item_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), states: states.join( "," ) },
+                success : function ( data ) {
+                    $.each( data, function( id, val ) {
+                        // Replace HTML
+                        var cell = $("#libraryItem-" + id).find("#libraryItemInfo");
+                        cell.html( val.html );
+                        // If new state was terminal, stop tracking
+                        if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
+                            delete tracked_datasets[ parseInt(id) ];
+                        } else {
+                            tracked_datasets[ parseInt(id) ] = val.state;
+                        }
+                    });
+                    updater( tracked_datasets ); 
+                },
+                error: function() {
+                    // Just retry, like the old method, should try to be smarter
+                    updater( tracked_datasets );
+                }
+            });
+        };
+    </script>
+</%def>
+
+<%def name="render_dataset( cntrller, ldda, library_dataset, can_modify, can_manage, selected, library, folder, pad, parent, row_counter, tracked_datasets, show_deleted=False, simple=False )">
+    <%
+        ## The received ldda must always be a LibraryDatasetDatasetAssociation object.  The object id passed to methods
+        ## from the drop down menu should be the ldda id to prevent id collision ( which could happen when displaying
+        ## children, which are always lddas ).  We also need to make sure we're displaying the latest version of this
+        ## library_dataset, so we display the attributes from the ldda.
+        
+        from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
+
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        current_version = ( ldda == library_dataset.library_dataset_dataset_association )
+        if current_version and ldda.state not in ( 'ok', 'error', 'empty', 'deleted', 'discarded' ):
+            tracked_datasets[ldda.id] = ldda.state
+        # SM: This causes a query to be emitted, but it quickly goes down a
+        # rabbit hole of many possible inheritable cases. It may not be 
+        # possible to easily eliminate the extra query from this call.
+        info_association, inherited = ldda.get_info_association( restrict=True )
+        form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+    %>
+    %if current_version and ( not ldda.library_dataset.deleted or show_deleted ):
+        <tr class="datasetRow"
+            %if parent is not None:
+                parent="${parent | h}"
+            %endif
+            id="libraryItem-${ trans.security.encode_id( ldda.id ) | h }">
+            <td style="padding-left: ${pad+20}px;">
+                <input style="float: left;" type="checkbox" name="ldda_ids" id="${trans.security.encode_id( ldda.id ) | h}" value="${ trans.security.encode_id( ldda.id ) | h }"
+                %if selected:
+                    checked="checked"
+                %endif
+                />
+                %if simple:
+                    <label for="${trans.security.encode_id( ldda.id ) | h}">${ util.unicodify( ldda.name ) | h}</label>
+                %else:
+                    <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${ trans.security.encode_id( ldda.id ) | h }-popup">
+                        <a class="view-info" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">
+                            %if ldda.library_dataset.deleted:
+                                <div class="libraryItem-error">${util.unicodify( ldda.name ) | h}</div>
+                            %else:
+                                ${util.unicodify( ldda.name ) | h}
+                            %endif     
+                        </a>
+                    </div>
+                    %if not library.deleted:
+                        <div popupmenu="dataset-${ trans.security.encode_id( ldda.id ) | h }-popup">
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type='ldda', item_id=trans.security.encode_id( ldda.id ), source_library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Move this dataset</a>
+                            %else:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and not info_association:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and info_association:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_manage:
+                                %if not trans.app.security_agent.dataset_is_public( ldda.dataset ):
+                                    <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='ldda', id=trans.security.encode_id( ldda.dataset.id ), use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+                                %endif
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
+                            %endif
+                            %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into selected histories</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
+                            %endif
+                            %if can_modify:
+                                %if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
+                                    <a class="action-button" confirm="Click OK to delete dataset '${util.unicodify( ldda.name ) | h}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
+                                %elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
+                                    <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
+                                %endif
+                            %endif
+                        </div>
+                    %endif
+                %endif
+            </td>
+            % if not simple:
+                <td id="libraryItemInfo">${render_library_item_info( ldda )}</td>
+                <td>${ldda.extension | h}</td>
+            % endif
+            <td>${ldda.create_time.strftime( "%Y-%m-%d" ) | h}</td>
+            <td>${ldda.get_size( nice_size=True ) | h}</td>
+        </tr>
+        <%
+            my_row = row_counter.count
+            row_counter.increment()
+        %>
+    %endif
+</%def>
+
+<%def name="format_delta( tdelta )">
+    <%
+        from datetime import datetime
+        return "%d.%.6d" % ( tdelta.seconds, tdelta.microseconds )
+     %>
+</%def>
+
+<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False, simple=False )">
+    <%
+        from galaxy.webapps.galaxy.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, map_library_datasets_to_lddas, branch_deleted, datasets_for_lddas 
+
+        # SM: DELETEME
+        from datetime import datetime, timedelta
+        import logging
+        log = logging.getLogger( __name__ )
+        
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        has_accessible_library_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, folder, trans.user, current_user_roles, search_downward=False )
+        
+        if root_folder:
+            pad = folder_pad
+            expander = h.url_for("/static/images/silk/resultset_bottom.png")
+            folder_img = h.url_for("/static/images/silk/folder_page.png")
+        else:
+            pad = folder_pad + 20
+            expander = h.url_for("/static/images/silk/resultset_next.png")
+            folder_img = h.url_for("/static/images/silk/folder.png")
+        # SM: If this is a comma-delimited list of LDDAs, then split them up
+        # into a list. For anything else, turn created_ldda_ids into a single
+        # item list.
+        if created_ldda_ids:
+            created_ldda_ids = util.listify( created_ldda_ids )
+        if str( folder.id ) in hidden_folder_ids:
+            return ""
+        my_row = None
+        if is_admin:
+            can_add = can_modify = can_manage = True
+        elif cntrller in [ 'library' ]:
+            can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, folder )
+            if not can_access:
+                can_show, folder_ids = \
+                    trans.app.security_agent.show_library_item( trans.user,
+                                                                current_user_roles,
+                                                                folder,
+                                                                [ trans.app.security_agent.permitted_actions.LIBRARY_ADD,
+                                                                  trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
+                                                                  trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ] )
+                if not can_show:
+                    return ""
+            can_add = trans.app.security_agent.can_add_library_item( current_user_roles, folder )
+            can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, folder )
+            can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, folder )
+        else:
+            can_add = can_modify = can_manage = False
+            
+        form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        info_association, inherited = folder.get_info_association( restrict=True )
+    %>
+    %if not root_folder and ( not folder.deleted or show_deleted ):
+        <% encoded_id = trans.security.encode_id( folder.id ) %>
+        <tr id="folder-${encoded_id | h}" class="folderRow libraryOrFolderRow"
+            %if parent is not None:
+                parent="${parent | h}"
+                style="display: none;"
+            %endif
+            >
+            <td style="padding-left: ${folder_pad | h}px;">
+                <input type="checkbox" class="folderCheckbox"/>
+                <span class="expandLink folder-${encoded_id | h}-click">
+                    <div style="float: left; margin-left: 2px;" class="menubutton split popup" id="folder_img-${ 'F' + trans.security.encode_id( folder.id ) }-popup">
+                        <a class="folder-${encoded_id | h}-click" href="javascript:void(0);">
+                            <span class="rowIcon"></span>
+                            %if folder.deleted:
+                                <div class="libraryItem-error">${folder.name | h}</div>
+                            %else:
+                                ${folder.name | h}
+                            %endif
+                        </a>
+                    </div>
+                </span>
+                %if not library.deleted:
+                    <div popupmenu="folder_img-${ 'F' + trans.security.encode_id( folder.id ) }-popup">
+                        %if not branch_deleted( folder ) and can_add:
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a>
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add sub-folder</a>
+                        %endif
+                        %if not branch_deleted( folder ):
+                            %if has_accessible_library_datasets:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select datasets for import into selected histories</a>
+                            %endif
+                            %if can_modify:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type='folder', item_id=trans.security.encode_id( folder.id ), source_library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Move this folder</a>
+                            %else:
+                                <a class="action-button" class="view-info" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
+                            %endif
+                        %endif
+                        %if not branch_deleted( folder ) and can_modify and not info_association:
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='folder', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                        %endif
+                        %if not branch_deleted( folder ) and can_modify and info_association:
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='folder', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='folder', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                        %endif
+                        %if not branch_deleted( folder ) and can_manage:
+                           %if not trans.app.security_agent.folder_is_public( folder ):
+                               <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='folder', id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+                           %endif
+                            <a class="action-button" href="${h.url_for( controller='library_common', action='folder_permissions', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                        %endif
+                        %if can_modify:
+                            %if not library.deleted and not folder.deleted:
+                                <a class="action-button" confirm="Click OK to delete the folder '${folder.name | h}.'" href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Delete this folder</a>
+                            %elif not library.deleted and folder.deleted and not folder.purged:
+                                <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( folder.id ), item_type='folder', show_deleted=show_deleted )}">Undelete this folder</a>
+                            %endif
+                        %endif
+                    </div>
+                %endif
+            <td>
+            %if folder.description:
+                ${folder.description | h}
+            %endif
+            <td colspan="3"></td>
+        </tr>
+        <%
+            my_row = row_counter.count
+            row_counter.increment()
+        %>
+    %endif
+    <%
+        # TODO: If show_deleted is set to True, then nothing is displayed. Why? This wasn't the case
+        # in the past.
+        if show_deleted:
+            sub_folders, library_datasets = activatable_folders_and_library_datasets( trans, folder )
+        else:
+            sub_folders, library_datasets = active_folders_and_library_datasets( trans, folder )
+        # Render all the subfolders: 
+        # TODO: Check permissions first. 
+        for sub_folder in sub_folders:
+            render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, [], tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )
+
+        # Map LibraryDatasets to LDDAs, then map LDDAs to Datasets.
+        # Then determine which Datasets are accessible and which are not.
+        # For every LibraryDataset, if there's an LDDA for it and it's 
+        # accessible then display it.
+        if ( len( library_datasets ) > 0 ):
+            lib_dataset_ldda_map = map_library_datasets_to_lddas( trans, library_datasets )
+            dataset_list = datasets_for_lddas( trans, lib_dataset_ldda_map.values() )
+            #can_access_datasets = trans.app.security_agent.dataset_access_mapping( trans, current_user_roles, dataset_list )
+            can_access_datasets = trans.app.security_agent.dataset_permission_map_for_access( trans, current_user_roles, dataset_list )
+            can_modify_datasets = trans.app.security_agent.item_permission_map_for_modify( trans, current_user_roles, dataset_list )
+            can_manage_datasets = trans.app.security_agent.item_permission_map_for_manage( trans, current_user_roles, dataset_list )
+            for library_dataset in library_datasets:
+                ldda = lib_dataset_ldda_map[ library_dataset.id ]
+                if ldda: 
+                    # SMTODO: Fix awkard modify/manage permission checks.
+                    can_access = is_admin or can_access_datasets[ ldda.dataset_id ]
+                    can_modify = is_admin or ( cntrller in ['library', 'requests'] and can_modify_datasets[ ldda.dataset_id ])
+                    can_manage = is_admin or ( cntrller in ['library', 'requests'] and can_manage_datasets[ ldda.dataset_id ])
+                    selected = created_ldda_ids and str( ldda.id ) in created_ldda_ids
+                    if can_access:
+                        render_dataset( cntrller, ldda, library_dataset, can_modify, can_manage, selected, library, folder, pad, my_row, row_counter, tracked_datasets, show_deleted=show_deleted )
+    %>
+</%def>
+
+<%def name="render_content(simple=False)">
+    <%
+        from galaxy import util
+        from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
+        from time import strftime
+        import logging
+        log = logging.getLogger( __name__ )
+        
+        is_admin = trans.user_is_admin() and cntrller == 'library_admin'
+        
+        if is_admin:
+            can_add = can_modify = can_manage = True
+        elif cntrller in [ 'library', 'requests' ]:
+            can_add = trans.app.security_agent.can_add_library_item( current_user_roles, library )
+            can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, library )
+            can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, library )
+        else:
+            can_add = can_modify = can_manage = False
+            
+        info_association, inherited = library.get_info_association()
+        form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+        
+        # SM: These are mostly display-specific; ignore them for now. 
+        # The has_accessible_folders determines if anything can be shown - use it.
+        self.has_accessible_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, library.root_folder, trans.user, current_user_roles )
+        root_folder_has_accessible_library_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, library.root_folder, trans.user, current_user_roles, search_downward=False )
+        has_accessible_folders = is_admin or trans.app.security_agent.has_accessible_folders( trans, library.root_folder, trans.user, current_user_roles )
+        
+        tracked_datasets = {}
+        
+        class RowCounter( object ):
+            def __init__( self ):
+                self.count = 0
+            def increment( self ):
+                self.count += 1
+            def __str__( self ):
+                return str( self.count )
+    %>
+    
+    <h2>Data Library “${library.name}”</h2>
+    
+     <ul class="manage-table-actions">
+         %if not library.deleted and ( is_admin or can_add ):
+             <li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a></li>
+             <li><a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( library.root_folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add folder</a></li>
+         %endif
+         %if ( ( not library.deleted ) and ( can_modify or can_manage ) ) or ( can_modify and not library.purged ) or ( library.purged ):
+             <li><a class="action-button" id="library-${ trans.security.encode_id( library.id ) | h}-popup" class="menubutton">Library Actions</a></li>
+             <div popupmenu="library-${ trans.security.encode_id( library.id ) | h}-popup">
+                 %if not library.deleted:
+                     %if can_modify:
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='library_info', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                         <a class="action-button" confirm="Click OK to delete the library named '${library.name | h}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library' )}">Delete this data library</a>
+                         %if show_deleted:
+                             <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=False )}">Hide deleted items</a>
+                         %else:
+                             <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=True )}">Show deleted items</a>
+                         %endif
+                     %endif
+                     %if can_modify and not library.info_association:
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='library', form_type=form_type, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                     %endif
+                     %if can_modify and info_association:
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='library', form_type=form_type, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='library', form_type=form_type, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                     %endif
+                     %if can_manage:
+                         %if not trans.app.security_agent.library_is_public( library, contents=True ):
+                             <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='library', id=trans.security.encode_id( library.id ), contents=True, use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+                         %endif
+                         <a class="action-button" href="${h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                     %endif
+                     %if root_folder_has_accessible_library_datasets:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select datasets for import into selected histories</a>
+                     %endif
+                 %elif can_modify and not library.purged:
+                     <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library', use_panels=use_panels )}">Undelete this data library</a>
+                 %elif library.purged:
+                     <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">This data library has been purged</a>
+                 %endif
+             </div>
+         %endif
+    </ul>
+    
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+
+    %if library.synopsis not in [ '', 'None', None ]:
+        <div class="libraryItemBody">
+            ${library.synopsis | h}
+        </div>
+    %endif
+    
+    %if self.has_accessible_datasets:
+        <form name="act_on_multiple_datasets" action="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}" onSubmit="javascript:return checkForm();" method="post">
+    %endif
+    %if has_accessible_folders:
+        <table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid">
+            <thead>
+                <tr class="libraryTitle">
+                    <th>
+                        %if self.has_accessible_datasets:
+                            <input type="checkbox" id="checkAll" name=select_all_datasets_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_datasets_checkbox value="true"/>
+                        %endif
+                        Name
+                    </th>
+                    % if not simple:
+                        <th>Message</th>
+                        <th>Data type</th>
+                    % endif
+                    <th>Date uploaded</th>
+                    <th>File size</th>
+                </tr>
+            </thead>
+            <% row_counter = RowCounter() %>
+            ## SM: Here is where we render the libraries based on admin/non-admin privileges:
+            %if cntrller in [ 'library', 'requests' ]:
+                ${self.render_folder( 'library', library.root_folder, 0, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True, simple=simple )}
+                ## SM: TODO: WTF?
+                %if not library.deleted and self.has_accessible_datasets and not simple:
+                    ${render_actions_on_multiple_items()}
+                %endif
+            %elif ( trans.user_is_admin() and cntrller in [ 'library_admin', 'requests_admin' ] ):
+                ${self.render_folder( 'library_admin', library.root_folder, 0, created_ldda_ids, library, [], tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
+                ## SM: TODO: WTF?
+                %if not library.deleted and not show_deleted and self.has_accessible_datasets:
+                    ${render_actions_on_multiple_items()}
+                %endif
+            %endif
+        </table>
+    %endif
+    %if self.has_accessible_datasets:
+        </form>
+    %endif
+     
+    %if tracked_datasets:
+        <script type="text/javascript">
+            // Updater
+            updater({${ ",".join( [ '"%s" : "%s"' % ( k, v ) for k, v in tracked_datasets.iteritems() ] ) }});
+        </script>
+        <!-- running: do not change this comment, used by TwillTestCase.library_wait -->
+    %endif
+    
+    %if self.has_accessible_datasets and not simple:
+        ${render_compression_types_help( comptypes )}
+    %endif
+    %if not has_accessible_folders:
+        The data library '${library.name | h}' does not contain any datasets that you can access.
+    %endif
+</%def>
diff --git a/templates/webapps/galaxy/library/common/common.mako b/templates/webapps/galaxy/library/common/common.mako
new file mode 100644
index 0000000..28b9664
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/common.mako
@@ -0,0 +1,491 @@
+<%namespace file="/common/template_common.mako" import="render_template_field" />
+
+<%def name="common_javascripts()">
+    <script type="text/javascript">
+        function checkAllFields()
+        {
+            var chkAll = document.getElementById('checkAll');
+            var checks = document.getElementsByTagName('input');
+            var boxLength = checks.length;
+            var allChecked = false;
+            var totalChecked = 0;
+            if ( chkAll.checked == true )
+            {
+                for ( i=0; i < boxLength; i++ )
+                {
+                    if ( checks[i].name.indexOf( 'ldda_ids' ) != -1)
+                    {
+                       checks[i].checked = true;
+                    }
+                }
+            }
+            else
+            {
+                for ( i=0; i < boxLength; i++ )
+                {
+                    if ( checks[i].name.indexOf( 'ldda_ids' ) != -1)
+                    {
+                       checks[i].checked = false
+                    }
+                }
+            }
+        }
+
+        function checkForm() {
+            if ( $("select#action_on_datasets_select option:selected").text() == "delete" ) {
+                if ( confirm( "Click OK to delete these datasets?" ) ) {
+                    return true;
+                } else {
+                    return false;
+                }
+            }
+        }
+    </script>
+</%def>
+
+<%def name="render_compression_types_help( comptypes )">
+    <div class="libraryItemBody">
+        <p class="infomark">
+            TIP: You can download individual library datasets by selecting "Download this dataset" from the context menu (triangle) next to each dataset's name.
+        </p>
+    </div>
+    %if len( comptypes ) > 1:
+        <div class="libraryItemBody">
+            <p class="infomark">
+                TIP: Several compression options are available for downloading multiple library datasets simultaneously:
+            </p>
+            <ul style="padding-left: 1em; list-style-type: disc;">
+                %if 'gz' in comptypes:
+                    <li>gzip: Recommended for fast network connections
+                        %if trans.app.config.upstream_gzip:
+                            NOTE: The file you receive will be an uncompressed .tar file - this is because the Galaxy server compresses it and your browser decompresses it on the fly.
+                        %endif
+                    </li>
+                %endif
+                %if 'bz2' in comptypes:
+                    <li>bzip2: Recommended for slower network connections (smaller size but takes longer to compress)</li>
+                %endif
+                %if 'zip' in comptypes:
+                    <li>zip: Not recommended but is provided as an option for those who cannot open the above formats</li>
+                %endif
+            </ul>
+        </div>
+    %endif
+</%def>
+
+<%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, space_to_tab, link_data_only, widgets, roles_select_list, history, show_deleted )">
+    <%
+        import os, os.path
+        from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField
+    %>
+    %if upload_option in [ 'upload_file', 'upload_directory', 'upload_paths' ]:
+        <div class="toolForm" id="upload_library_dataset_tool_form">
+            <%
+                if upload_option == 'upload_directory':
+                    tool_form_title = 'Upload a directory of files'
+                elif upload_option == 'upload_paths':
+                    tool_form_title = 'Upload files from filesystem paths'
+                else:
+                    tool_form_title = 'Upload files'
+            %>
+            <div class="toolFormTitle">${tool_form_title | h}</div>
+            <div class="toolFormBody">
+                <form name="upload_library_dataset" id="upload_library_dataset" action="${action}" enctype="multipart/form-data" method="post">
+                    <input type="hidden" name="tool_id" value="upload1"/>
+                    <input type="hidden" name="tool_state" value="None"/>
+                    <input type="hidden" name="cntrller" value="${cntrller | h}"/>
+                    <input type="hidden" name="library_id" value="${library_id | h}"/>
+                    <input type="hidden" name="folder_id" value="${folder_id | h}"/>
+                    <input type="hidden" name="show_deleted" value="${show_deleted | h}"/>
+                    %if replace_dataset not in [ None, 'None' ]:
+                        <input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id ) | h}"/>
+                        <div class="form-row">
+                            You are currently selecting a new file to replace '<a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=trans.security.encode_id( replace_dataset.library_dataset_dataset_association.id ) )}">${util.unicodify( replace_dataset.name ) | h}</a>'.
+                            <div style="clear: both"></div>
+                        </div>
+                    %endif
+                    <div class="form-row">
+                        <label>Upload option:</label>
+                        <div class="form-row-input">
+                            ${upload_option_select_list.get_html()}
+                        </div>
+                        <div class="toolParamHelp" style="clear: both;">
+                            Choose upload option (file, directory, filesystem paths, current history).
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    <div class="form-row">
+                        <label>File Format:</label>
+                        <div class="form-row-input">
+                            <select name="file_type">
+                                <option value="auto" selected>Auto-detect</option>
+                                %for file_format in file_formats:
+                                    <option value="${file_format | h}">${file_format | h}</option>
+                                %endfor
+                            </select>
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    %if upload_option == 'upload_file':
+                        <div class="form-row">
+                            <input type="hidden" name="async_datasets" value="None"/>
+                            <div style="clear: both"></div>
+                        </div>
+                        <div class="form-row">
+                            <label>File:</label>
+                            <div class="form-row-input">
+                                <input type="file" name="files_0|file_data" galaxy-ajax-upload="true"/>
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                        <div class="form-row">
+                            <label>URL/Text:</label>
+                            <div class="form-row-input">
+                                <textarea name="files_0|url_paste" rows="5" cols="35"></textarea>
+                            </div>
+                            <div class="toolParamHelp" style="clear: both;">
+                                Specify a list of URLs (one per line) or paste the contents of a file.
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    %elif upload_option == 'upload_directory':
+                        <%
+                            if ( trans.user_is_admin() and cntrller == 'library_admin' ):
+                                import_dir = trans.app.config.library_import_dir
+                            else:
+                                # Directories of files from the Data Libraries view are restricted to a
+                                # sub-directory named the same as the current user's email address
+                                # contained within the configured setting for user_library_import_dir
+                                import_dir = os.path.join( trans.app.config.user_library_import_dir, trans.user.email )
+                        %>
+                        <div class="form-row">
+                            <%
+                                # See if we have any contained sub-directories, if not the only option
+                                # in the server_dir select list will be library_import_dir
+                                contains_directories = False
+                                for entry in os.listdir( import_dir ):
+                                    if os.path.isdir( os.path.join( import_dir, entry ) ):
+                                        contains_directories = True
+                                        break
+                            %>
+                            <label>Server Directory</label>
+                            <div class="form-row-input">
+                                <select name="server_dir">
+                                    %if contains_directories:
+                                        <option>None</option>
+                                        %for entry in os.listdir( import_dir ):
+                                            ## Do not include entries that are not directories
+                                            %if os.path.isdir( os.path.join( import_dir, entry ) ):
+                                                <option>${entry | h}</option>
+                                            %endif
+                                        %endfor
+                                    %else:
+                                        %if ( trans.user_is_admin() and cntrller == 'library_admin' ):
+                                            <option>${import_dir | h}</option>
+                                        %else:
+                                            <option>${trans.user.email | h}</option>
+                                        %endif
+                                    %endif
+                                </select>
+                            </div>
+                            <div class="toolParamHelp" style="clear: both;">
+                                %if contains_directories:
+                                    Upload all files in a sub-directory of <strong>${import_dir | h}</strong> on the Galaxy server.
+                                %else:
+                                    Upload all files in <strong>${import_dir | h}</strong> on the Galaxy server.
+                                %endif
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    %elif upload_option == 'upload_paths':
+                        <div class="form-row">
+                            <label>Paths to upload</label>
+                            <div class="form-row-input">
+                                <textarea name="filesystem_paths" rows="10" cols="35"></textarea>
+                            </div>
+                            <div class="toolParamHelp" style="clear: both;">
+                                Upload all files pasted in the box.  The (recursive) contents of any pasted directories will be added as well.
+                            </div>
+                        </div>
+                        <div class="form-row">
+                            <label>Preserve directory structure?</label>
+                            <div class="form-row-input">
+                                <input type="checkbox" name="preserve_dirs" value="Yes" checked="true" />Yes
+                            </div>
+                            <div class="toolParamHelp" style="clear: both;">
+                                If checked (default), library sub-folders will be used to preserve any subdirectories on the filesystem.
+                                If unchecked, any files in subdirectories on the filesystem will be placed directly in the library folder.
+                            </div>
+                        </div>
+                    %endif
+                    %if upload_option in ( 'upload_directory', 'upload_paths' ):
+                        <div class="form-row">
+                            <label>Copy data into Galaxy?</label>
+                            <div class="form-row-input">
+                                <select name="link_data_only">
+                                    %if not link_data_only or link_data_only == 'copy_files':
+                                        <option value="copy_files" selected>Copy files into Galaxy
+                                        <option value="link_to_files">Link to files without copying into Galaxy
+                                    %else:
+                                        <option value="copy_files">Copy files into Galaxy
+                                        <option value="link_to_files" selected>Link to files without copying into Galaxy
+                                    %endif
+                                </select>
+                            </div>
+                            <div class="toolParamHelp" style="clear: both;">
+                                Normally data uploaded with this tool is copied into Galaxy's configured "file_path" location where Galaxy
+                                has a form of control over the data files.  However, this may not be desired (especially for large NGS 
+                                datasets), so using the option labeled "Link to files without copying into Galaxy" will force Galaxy to 
+                                always read the data from its original path.
+                                %if upload_option == 'upload_directory':
+                                    Any symlinks encountered in the uploaded directory will be dereferenced once.  That is, Galaxy will 
+                                    point directly to the file that is linked, but no other symlinks further down the line will be dereferenced.
+                                %endif
+                            </div>
+                        </div>
+                    %endif
+                    <div class="form-row">
+                        <label>
+                            Convert spaces to tabs:
+                        </label>
+                        <div class="form-row-input">
+                            <%
+                                if space_to_tab == 'true':
+                                    checked = ' checked'
+                                else:
+                                    checked = ''
+                                if upload_option == 'upload_file':
+                                    name = 'files_0|space_to_tab'
+                                else:
+                                    name = 'space_to_tab'
+                                space2tab = '<input type="checkbox" name="%s" value="true"%s/>Yes' % ( name, checked )
+                            %>
+                            ${space2tab}
+                        </div>
+                        <div class="toolParamHelp" style="clear: both;">
+                            Use this option if you are entering intervals by hand.
+                        </div>
+                    </div>
+                    <div style="clear: both"></div>
+                    <!-- Convert from universal line endings to Posix line endings. -->
+                    <div class="form-row">
+                        <label>
+                            Convert universal line endings to Posix line endings:
+                        </label>
+                        <div class="form-row-input">
+                            <%
+                                if to_posix_lines == 'true':
+                                    checked = ' checked'
+                                else:
+                                    checked = ''
+                                if upload_option == 'upload_file':
+                                    name = 'files_0|to_posix_lines'
+                                else:
+                                    name = 'to_posix_lines'
+                                uni2posix = '<input type="checkbox" name="%s" value="true"%s/>Yes' % ( name, checked )
+                            %>
+                            ${uni2posix}
+                        </div>
+                        <div class="toolParamHelp" style="clear: both;">
+                            Use this option if you need your datasets to be converted from uni to posix line endings.
+                        </div>
+                    </div>
+                    <div class="form-row">
+                        <label>Genome:</label>
+                        <div class="form-row-input">
+                            <select name="dbkey" last_selected_value="?">
+                                <%
+                                    # move unspecified to the first option and set as default if not last_used_build
+                                    #TODO: remove when we decide on a common dbkey selector widget
+                                    unspecified = ('unspecified (?)', '?')
+                                    if unspecified in dbkeys:
+                                        dbkeys.remove( unspecified )
+                                        dbkeys.insert( 0, unspecified )
+                                    default_selected = last_used_build or '?'
+                                %>
+                                %for dbkey in dbkeys:
+                                    %if dbkey[1] == default_selected:
+                                        <option value="${dbkey[1] | h}" selected>${dbkey[0] | h}</option>
+                                    %else:
+                                        <option value="${dbkey[1] | h}">${dbkey[0] | h}</option>
+                                    %endif
+                                %endfor
+                            </select>
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    <div class="form-row">
+                        <label>Message:</label>
+                        <div class="form-row-input">
+                            %if ldda_message:
+                                <textarea name="ldda_message" rows="3" cols="35">${ldda_message | h}</textarea>
+                            %else:
+                                <textarea name="ldda_message" rows="3" cols="35"></textarea>
+                            %endif
+                        </div>
+                        <div class="toolParamHelp" style="clear: both;">
+                            This information will be displayed in the "Message" column for this dataset in the data library browser
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    %if roles_select_list:
+                        <div class="form-row">
+                            <label>Restrict dataset access to specific roles:</label>
+                            <div class="form-row-input">
+                                ${roles_select_list.get_html()}
+                            </div>
+                            <div class="toolParamHelp" style="clear: both;">
+                                Multi-select list - hold the appropriate key while clicking to select multiple roles.  More restrictions can be applied after the upload is complete.  Selecting no roles makes a dataset public.
+                            </div>
+                        </div>
+                        <div style="clear: both"></div>
+                    %endif
+                    %if widgets:
+                        %for i, field in enumerate( widgets ):
+                            <div class="form-row">
+                                <label>${field[ 'label' ] | h}</label>
+                                <div class="form-row-input">
+                                    ${field[ 'widget' ].get_html()}
+                                </div>
+                                <div class="toolParamHelp" style="clear: both;">
+                                    %if field[ 'helptext' ]:
+                                        ${field[ 'helptext' ] | h}<br/>
+                                    %endif
+                                    *Inherited template field
+                                </div>
+                                <div style="clear: both"></div>
+                            </div>
+                        %endfor 
+                    %endif
+                    <div class="form-row">
+                        <input type="submit" class="primary-button" name="runtool_btn" value="Upload to library"/>
+                    </div>
+                </form>
+            </div>
+        </div>
+    %elif upload_option == 'import_from_history':
+        <div class="toolForm">
+            <div class="toolFormTitle">Active datasets in your current history (${ util.unicodify( history.name ) | h})</div>
+            <div class="toolFormBody">
+                %if history and history.active_datasets:
+                    <form name="add_history_datasets_to_library" action="${h.url_for( controller='library_common', action='add_history_datasets_to_library', cntrller=cntrller, library_id=library_id )}" enctype="multipart/form-data" method="post">
+                        <input type="hidden" name="folder_id" value="${folder_id | h}"/>
+                        <input type="hidden" name="show_deleted" value="${show_deleted | h}"/>
+                        <input type="hidden" name="upload_option" value="import_from_history"/>
+                        <input type="hidden" name="ldda_message" value="${ldda_message | h}"/>
+                        <%
+                            role_ids_selected = ''
+                            if roles_select_list:
+                                selected = roles_select_list.get_selected( return_value=True, multi=True )
+                                if selected:
+                                    role_ids_selected = ','.join( selected )
+                        %>
+                        <input type="hidden" name="roles" value="${role_ids_selected | h}"/>
+                        %if replace_dataset not in [ None, 'None' ]:
+                            <input type="hidden" name="replace_id" value="${trans.security.encode_id( replace_dataset.id ) | h}"/>
+                            <div class="form-row">
+                                You are currently selecting a new file to replace '<a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=trans.security.encode_id( replace_dataset.library_dataset_dataset_association.id ) )}">${ util.unicodify( replace_dataset.name ) | h}</a>'.
+                                <div style="clear: both"></div>
+                            </div>
+                        %endif
+                        %for hda in history.visible_datasets:
+                            <% encoded_id = trans.security.encode_id( hda.id ) %>
+                            <div class="form-row">
+                                <input name="hda_ids" id="hist_${encoded_id | h}" value="${encoded_id | h}" type="checkbox"/>
+                                <label for="hist_${encoded_id | h}" style="display: inline;font-weight:normal;">${hda.hid | h}: ${ util.unicodify( hda.name ) | h}</label>
+                            </div>
+                        %endfor
+                        %if widgets:
+                            <input type="hidden" name="template_id" value="${template_id | h}"/>
+                            %for i, field in enumerate( widgets ):
+                                <div class="form-row">
+                                    <label>${field[ 'label' ] | h}</label>
+                                    <div class="form-row-input">
+                                        ${field[ 'widget' ].get_html()}
+                                    </div>
+                                    <div class="toolParamHelp" style="clear: both;">
+                                        %if field[ 'helptext' ]:
+                                            ${field[ 'helptext' ] | h}<br/>
+                                        %endif
+                                        *Inherited template field
+                                    </div>
+                                    <div style="clear: both"></div>
+                                </div>
+                            %endfor 
+                        %endif
+                        <div class="form-row">
+                            <input type="submit" name="add_history_datasets_to_library_button" value="Import to library"/>
+                        </div>
+                    </form>
+                %else:
+                    <p>Your current history is empty</p>
+                %endif
+            </div>
+        </div>
+    %endif
+</%def>
+
+<%def name="render_actions_on_multiple_items( actions_to_exclude=[] )">
+    <%
+        is_admin = trans.user_is_admin() and cntrller=='library_admin'
+        can_delete = 'delete' not in actions_to_exclude
+        can_download = 'download' not in actions_to_exclude
+        can_import_to_histories = 'import_to_histories' not in actions_to_exclude
+        can_manage_permissions = 'manage_permissions' not in actions_to_exclude
+        can_move = 'move' not in actions_to_exclude
+    %>
+    <tfoot>
+        <tr>
+            <td colspan="5" style="padding-left: 42px;">
+                For selected datasets:
+                <select name="do_action" id="action_on_selected_items">
+                    %if can_import_to_histories:
+                        %if default_action == 'import_to_current_history':
+                            <option value="import_to_current_history" selected>Import to current history</option>
+                        %else:
+                            <option value="import_to_current_history">Import to current history</option>
+                        %endif
+                        <option value="import_to_histories">Import to histories</option>
+                    %endif
+                    %if can_manage_permissions:
+                        %if not is_admin and default_action == 'manage_permissions':
+                            <option value="manage_permissions" selected>Edit permissions</option>
+                        %else:
+                            <option value="manage_permissions">Edit permissions</option>
+                        %endif
+                    %endif
+                    %if can_move:
+                        <option value="move">Move</option>
+                    %endif
+                    %if can_delete:
+                        <option value="delete">Delete</option>
+                    %endif
+                    %if can_download:
+                        %if 'gz' in comptypes:
+                            <option value="tgz"
+                            %if default_action == 'download':
+                                selected
+                            %endif>
+                            >Download as a .tar.gz file</option>
+                        %endif
+                        %if 'bz2' in comptypes:
+                            <option value="tbz">Download as a .tar.bz2 file</option>
+                        %endif
+                        %if 'zip' in comptypes:
+                            <option value="zip">Download as a .zip file</option>
+                        %endif
+                        %if 'ngxzip' in comptypes:
+                            ## We can safely have two default selected items since ngxzip, if present, will always be the only available type.
+                            <option value="ngxzip"
+                            %if default_action == 'download':
+                                selected
+                            %endif>
+                            >Download as a .zip file</option>
+                        %endif
+                    %endif
+                </select>
+                <input type="submit" class="primary-button" name="action_on_datasets_button" id="action_on_datasets_button" value="Go"/>
+            </td>
+        </tr>
+    </tfoot>
+</%def>
diff --git a/templates/webapps/galaxy/library/common/folder_info.mako b/templates/webapps/galaxy/library/common/folder_info.mako
new file mode 100644
index 0000000..e28d683
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/folder_info.mako
@@ -0,0 +1,61 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%
+    from cgi import escape
+    folder_name = escape( str( folder.name ), quote=True )
+    folder_description = escape( str( folder.description ), quote=True )
+%>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Edit folder name and description</div>
+    <div class="toolFormBody">
+        %if ( trans.user_is_admin() and cntrller == 'library_admin' ) or trans.app.security_agent.can_modify_library_item( current_user_roles, folder ):
+            <form name="folder" action="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=library_id, use_panels=use_panels, show_deleted=show_deleted )}" method="post" >
+                <div class="form-row">
+                    <label>Name:</label>
+                    <input type="text" name="name" value="${folder_name}" size="40"/>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Description:</label>
+                    <input type="text" name="description" value="${folder_description}" size="40"/>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="rename_folder_button" value="Save"/>
+                </div>
+            </form>
+        %else:
+            <div class="form-row">
+                <label>Name:</label>
+                ${folder_name}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                ${folder_description}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+    </div>
+</div>
+%if widgets:
+    ${render_template_fields( cntrller=cntrller, item_type='folder', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=library_id, folder_id=trans.security.encode_id( folder.id ), info_association=info_association, inherited=inherited )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/folder_permissions.mako b/templates/webapps/galaxy/library/common/folder_permissions.mako
new file mode 100644
index 0000000..eeddf72
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/folder_permissions.mako
@@ -0,0 +1,19 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if ( trans.user_is_admin() and cntrller in [ 'library_admin', 'requests_admin' ] ) or trans.app.security_agent.can_manage_library_item( current_user_roles, folder ):
+    ## LIBRARY_ACCESS is a special permission that is set only at the library level.
+    ${render_permission_form( folder, folder.name, h.url_for( controller='library_common', action='folder_permissions', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=library_id, show_deleted=show_deleted ), roles, do_not_render=[ 'LIBRARY_ACCESS' ] )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/import_datasets_to_histories.mako b/templates/webapps/galaxy/library/common/import_datasets_to_histories.mako
new file mode 100644
index 0000000..d538c99
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/import_datasets_to_histories.mako
@@ -0,0 +1,104 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="javascripts" />
+<%def name="title()">Import library datasets to histories</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function() {
+            $("#select-multiple").click(function() {
+                $("#single-dest-select").val("");
+                $("#single-destination").hide();
+                $("#multiple-destination").show();
+            });
+        });
+    </script>    
+</%def>
+
+%if message:
+    ${render_msg( util.unicodify( message ), status )}
+%endif
+
+<b>Import library datasets into histories</b>
+<br/><br/>
+<form action="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, use_panels=use_panels, show_deleted=show_deleted )}" method="post">
+    <div class="toolForm" style="float: left; width: 45%; padding: 0px;">
+        <div class="toolFormBody">
+            %if source_lddas:
+                %for source_ldda in source_lddas:
+                    <%
+                        checked = ""
+                        encoded_id = trans.security.encode_id( source_ldda.id )
+                        if source_ldda.id in ldda_ids:
+                            checked = " checked='checked'"
+                    %>
+                    <div class="form-row">
+                        <input type="checkbox" name="ldda_ids" id="dataset_${encoded_id | h}" value="${encoded_id | h}" ${checked}/>
+                        <label for="dataset_${encoded_id | h}" style="display: inline;font-weight:normal;">${util.unicodify( source_ldda.name ) | h}</label>
+                    </div>
+                %endfor
+            %else:
+                <div class="form-row">This folder has no accessible library datasets.</div>
+            %endif
+        </div>
+    </div>
+    <div style="float: left; padding-left: 10px; font-size: 36px;">→</div>
+    <div class="toolForm" style="float: right; width: 45%; padding: 0px;">
+        <div class="toolFormTitle">Destination Histories:</div>
+        <div class="toolFormBody">
+            <div class="form-row" id="single-destination">
+                <select id="single-dest-select" name="target_history_id">
+                    %for i, target_history in enumerate( target_histories ):
+                        <%
+                            encoded_id = trans.security.encode_id( target_history.id )
+                            if encoded_id == target_history_id:
+                                selected_text = " selected='selected'"
+                            else:
+                                selected_text = ""
+                            if target_history == current_history:
+                                current_history_text = " (current history)"
+                            else:
+                                current_history_text = ""
+                        %>
+                        <option value="${encoded_id | h}"${selected_text}>${i + 1}: ${h.truncate( util.unicodify( target_history.name ), 30 ) | h}${current_history_text | h}</option>
+                    %endfor
+                </select>
+                <br/><br/>
+                <a style="margin-left: 10px;" href="javascript:void(0);" id="select-multiple">Choose multiple histories</a>
+            </div>
+            <div id="multiple-destination" style="display: none;">
+                %for i, target_history in enumerate( target_histories ):
+                    <%
+                        encoded_id = trans.security.encode_id( target_history.id )
+                        if target_history == current_history:
+                            current_history_text = " (current history)"
+                        else:
+                            current_history_text = ""
+                    %>
+                    <div class="form-row">
+                        <input type="checkbox" name="target_history_ids" id="target_history_${encoded_id | h}" value="${encoded_id | h}"/>
+                        <label for="target_history_${encoded_id | h}" style="display: inline; font-weight:normal;">${i + 1}: ${util.unicodify( target_history.name ) | h}${current_history_text | h}</label>
+                    </div>
+                %endfor
+            </div>
+            %if trans.get_user():
+                <%
+                    checked = ""
+                    if "create_new_history" in target_history_ids:
+                        checked = " checked='checked'"
+                %>
+                <hr />
+                <div style="text-align: center; color: #888;">— OR —</div>
+                <div class="form-row">
+                    <label for="new_history_name" style="display: inline; font-weight:normal;">New history named:</label>
+                    <input id="new_history_name" type="text" name="new_history_name" />
+                </div>
+            %endif
+        </div>
+    </div>
+    <div style="clear: both"></div>
+    <div class="form-row" style="text-align: center;">
+        <input type="submit" class="primary-button" name="import_datasets_to_histories_button" value="Import library datasets"/>
+    </div>
+</form>
diff --git a/templates/webapps/galaxy/library/common/ldda_edit_info.mako b/templates/webapps/galaxy/library/common/ldda_edit_info.mako
new file mode 100644
index 0000000..eae198d
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/ldda_edit_info.mako
@@ -0,0 +1,181 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+<% from galaxy import util %>
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${h.js("libs/jquery/jquery.autocomplete")}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "autocomplete_tagging" )}
+</%def>
+
+%if ldda == ldda.library_dataset.library_dataset_dataset_association:
+    <b><i>This is the latest version of this library dataset</i></b>
+%else:
+    <font color="red"><b><i>This is an expired version of this library dataset</i></b></font>
+%endif
+<p/>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<%def name="datatype( ldda, file_formats )">
+    <select name="datatype">
+        %for ext in file_formats:
+            %if ldda.ext == ext:
+                <option value="${ext | h}" selected="yes">${ext | h}</option>
+            %else:
+                <option value="${ext | h}">${ext | h}</option>
+            %endif
+        %endfor
+    </select>
+</%def>
+
+%if ( trans.user_is_admin() and cntrller=='library_admin' ) or trans.app.security_agent.can_modify_library_item( current_user_roles, ldda.library_dataset ):
+    <div class="toolForm">
+        <div class="toolFormTitle">Edit attributes of ${util.unicodify( ldda.name ) | h}</div>
+        <div class="toolFormBody">
+            <form name="edit_attributes" action="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), use_panels=use_panels, show_deleted=show_deleted, )}" method="post">
+                <input type="hidden" name="id" value="${trans.security.encode_id( ldda.id ) | h}"/>
+                <div class="form-row">
+                    <label>Name:</label>
+                    <input type="text" name="name" value="${util.unicodify( ldda.name ) | h}" size="40"/>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Info:</label>
+                    <input type="text" name="info" value="${util.unicodify( ldda.info ) | h}" size="40"/>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Message:</label>
+                    %if ldda.message:
+                        <textarea name="message" rows="3" cols="35">${ldda.message | h}</textarea>
+                    %else:
+                        <textarea name="message" rows="3" cols="35"></textarea>
+                    %endif
+                    <div class="toolParamHelp" style="clear: both;">
+                        This information will be displayed in the library browser
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                %for name, spec in ldda.metadata.spec.items():
+                    %if spec.visible:
+                        <div class="form-row">
+                            <label>${spec.desc | h}:</label>
+                            ${ldda.metadata.get_html_by_name( name, trans=trans )}
+                            <div style="clear: both"></div>
+                        </div>
+                    %endif
+                %endfor
+                <div class="form-row">
+                    <input type="submit" name="save" value="Save"/>
+                </div>
+            </form>
+            <form name="auto_detect" action="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), use_panels=use_panels, show_deleted=show_deleted, )}" method="post">
+                <div class="form-row">
+                    <input type="hidden" name="id" value="${trans.security.encode_id( ldda.id ) | h}"/>
+                    <input type="submit" name="detect" value="Auto-detect"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        This will inspect the dataset and attempt to correct the above column values if they are not accurate.
+                    </div>
+                </div>
+            </form>
+        </div>
+    </div>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Change data type</div>
+        <div class="toolFormBody">
+            %if ldda.datatype.allow_datatype_change:
+                <form name="change_datatype" action="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), use_panels=use_panels, show_deleted=show_deleted, )}" method="post">
+                    <div class="form-row">
+                        <input type="hidden" name="id" value="${trans.security.encode_id( ldda.id ) | h}"/>
+                        <label>New Type:</label>
+                        ${datatype( ldda, file_formats )}
+                        <div class="toolParamHelp" style="clear: both;">
+                            This will change the datatype of the existing dataset
+                            but <i>not</i> modify its contents. Use this if Galaxy
+                            has incorrectly guessed the type of your dataset.
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    <div class="form-row">
+                        <input type="submit" name="change" value="Save"/>
+                    </div>
+                </form>
+            %else:
+                <div class="form-row">
+                    <div class="warningmessagesmall">${_('Changing the datatype of this dataset is not allowed.')}</div>
+                </div>
+            %endif
+        </div>
+    </div>
+     <div class="toolForm">
+        <div class="toolFormTitle">Change Extended Metadata</div>
+        <div class="toolFormBody">
+                <form name="change_datatype" action="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), use_panels=use_panels, show_deleted=show_deleted, )}" method="post">
+                <div class="form-row">
+                <label>Extended Metadata:</label>
+                </div>
+                <input type="hidden" name="id" value="${trans.security.encode_id( ldda.id ) | h}"/>
+                <div class="form-row">
+                %if ldda.extended_metadata:
+                    <textarea name="extended_metadata" rows="15" cols="35">${util.pretty_print_json(ldda.extended_metadata.data) | h}</textarea>
+                %else:
+                    <textarea name="extended_metadata" rows="15" cols="35"></textarea>
+                %endif
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <input type="submit" name="change_extended_metadata" value="Save"/>
+                </div>
+            </form>
+        </div>
+    </div>
+    <p/>
+%else:
+    <div class="toolForm">
+        <div class="toolFormTitle">View information about ${util.unicodify( ldda.name ) | h}</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Name:</label>
+                ${util.unicodify( ldda.name ) | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Info:</label>
+                ${util.unicodify( ldda.info ) | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Data Format:</label>
+                ${ldda.ext | h}
+                <div style="clear: both"></div>
+            </div>
+            %for name, spec in ldda.metadata.spec.items():
+                %if spec.visible:
+                    <div class="form-row">
+                        <label>${spec.desc | h}:</label>
+                        ${ldda.metadata.get( name ) | h}
+                        <div style="clear: both"></div>
+                    </div>
+                %endif
+            %endfor
+        </div>
+    </div>
+%endif
+%if widgets:
+    ${render_template_fields( cntrller=cntrller, item_type='ldda', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), ldda_id=trans.security.encode_id( ldda.id ), info_association=info_association, inherited=inherited )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/ldda_info.mako b/templates/webapps/galaxy/library/common/ldda_info.mako
new file mode 100644
index 0000000..87a515e
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/ldda_info.mako
@@ -0,0 +1,311 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+<%
+    from galaxy import util
+    from galaxy.webapps.galaxy.controllers.library_common import branch_deleted, get_containing_library_from_library_dataset
+    from galaxy.web.framework.helpers import time_ago
+
+    if ldda == ldda.library_dataset.library_dataset_dataset_association:
+        current_version = True
+    else:
+        current_version = False
+    if ldda.user:
+        uploaded_by = ldda.user.email
+    else:
+        uploaded_by = 'anonymous'
+    if trans.user_is_admin() and cntrller == 'library_admin':
+        can_modify = can_manage = True
+    elif cntrller in [ 'library', 'requests' ]:
+        can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, ldda.library_dataset )
+        can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, ldda.library_dataset )
+    else:
+        can_modify = can_manage = False
+    form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
+%>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+    .ldda-peek {
+        padding-top: 2px;
+    }
+    </style>
+</%def>
+
+%if current_version:
+    <b><i>This is the latest version of this library dataset</i></b>
+%else:
+    <font color="red"><b><i>This is an expired version of this library dataset</i></b></font>
+%endif
+<p/>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">
+        Information about <div class="menubutton popup" id="dataset-${ trans.security.encode_id( ldda.id ) | h}-popup">${util.unicodify( ldda.name ) | h}</div>
+        %if not library.deleted and not branch_deleted( ldda.library_dataset.folder ) and not ldda.library_dataset.deleted:
+            <div popupmenu="dataset-${ trans.security.encode_id( ldda.id ) | h}-popup">
+                %if can_modify:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+                    %if not info_association:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                    %else:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+                    %endif
+                %endif
+                %if can_manage:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                %endif
+                %if current_version and can_modify:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), replace_id=trans.security.encode_id( ldda.library_dataset.id ) )}">Upload a new version of this dataset</a>
+                %endif
+                %if ldda.has_data():
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into selected histories</a>
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Download this dataset</a>
+                %endif
+                %if show_associated_hdas_and_lddas:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), id=trans.security.encode_id( ldda.id ), show_associated_hdas_and_lddas=False, use_panels=use_panels, show_deleted=show_deleted )}">Hide items using this dataset's disk file</a>
+                %else:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), id=trans.security.encode_id( ldda.id ), show_associated_hdas_and_lddas=True, use_panels=use_panels, show_deleted=show_deleted )}">Show items using this dataset's disk file</a>
+                %endif
+            </div>
+        %endif
+    </div>
+    <div class="toolFormBody">
+        %if ldda.message:
+            <div class="form-row">
+                <label>Message:</label>
+                <pre>${ldda.message | h}</pre>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        <div class="form-row">
+            <label>Uploaded by:</label>
+            ${uploaded_by | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Date uploaded:</label>
+            ${util.unicodify(ldda.create_time.strftime( trans.app.config.pretty_datetime_format )) | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>File size:</label>
+            ${ldda.get_size( nice_size=True ) | h}
+            <div style="clear: both"></div>
+        </div>
+        %if ldda.dataset.uuid:
+        <div class="form-row">
+            <label>UUID:</label>
+            ${ldda.dataset.uuid | h}
+            <div style="clear: both"></div>
+        </div>
+        %endif
+        %if ldda.tool_version:
+            <div class="form-row">
+                <label>Tool version:</label>
+                ${ldda.tool_version | h}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        <div class="form-row">
+            <label>Data type:</label>
+            ${ldda.ext | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Build:</label>
+            ${ldda.dbkey | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Miscellaneous information:</label>
+            ${util.unicodify( ldda.info ) | h}
+            <div style="clear: both"></div>
+        </div>
+        %if ldda.creating_job_associations:
+            <% job = ldda.creating_job_associations[0].job %>
+            %if job.stdout and job.stdout.strip() != '':
+                <div class="form-row">
+                    <label>Job Standard Output</label>
+                    <pre>${job.stdout | h}</pre>
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if job.stderr and job.stderr.strip() != '':
+                <div class="form-row">
+                    <label>Job Standard Error</label>
+                    <pre>${job.stderr | h}</pre>
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+        %endif
+        <div class="form-row">
+            <div>${ldda.blurb | h}</div>
+        </div>
+        ## We want to display all metadata item here, whether marked visible or not since they are all pretty useful
+        %for name, spec in ldda.metadata.spec.items():
+            <div class="form-row">
+                <label>${spec.desc.replace( ' (click box & select)', '' ) | h}:</label>
+                <%
+                    metadata_val = ldda.metadata.get( name )
+                    if isinstance( metadata_val, trans.model.MetadataFile ):
+                        metadata_val = metadata_val.file_name
+                    elif isinstance( metadata_val, list ):
+                        # Make sure list items are strings
+                        metadata_val = [ str( item ) for item in metadata_val ]
+                        metadata_val = ', '.join( metadata_val )
+                %>
+                ${metadata_val | h}
+                <div style="clear: both"></div>
+            </div>
+        %endfor
+        %if ldda.peek != "no peek":
+            <div class="form-row">
+                <div id="info${ trans.security.encode_id( ldda.id ) | h}" class="ldda-peek">
+                    <label>Peek:</label>
+                    <div><pre id="peek${ trans.security.encode_id( ldda.id ) | h}" class="peek">${util.unicodify( ldda.display_peek() )}</pre></div>
+                </div>
+            </div>
+        %endif
+        %if ldda.extended_metadata:
+            <div class="form-row">
+                <label>Extended Metadata:</label>
+                <pre>${util.pretty_print_json(ldda.extended_metadata.data) | h}</pre>
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        %if trans.user_is_admin() and cntrller == 'library_admin':
+            <div class="form-row">
+                <label>Disk file:</label>
+                ${ldda.file_name | h}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+    </div>
+</div>
+%if widgets:
+    ${render_template_fields( cntrller=cntrller, item_type='ldda', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=library_id, folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), ldda_id=trans.security.encode_id( ldda.id ), info_association=info_association, inherited=inherited, editable=False )}
+%endif
+%if show_associated_hdas_and_lddas:
+    %if associated_hdas:
+        <p/>
+        <b>Active (undeleted) history items that use this library dataset's disk file</b>
+        <div class="toolForm">
+            <table class="grid">
+                <thead>
+                    <tr>
+                        <th>History</th>
+                        <th>History Item</th>
+                        <th>Last Updated</th>
+                        <th>User</th>
+                    </tr>
+                </thead>
+                %for hda in associated_hdas:
+                    <tr>
+                        <td>
+                            %if hda.history:
+                                <a target="_blank" href="${h.url_for( controller='history', action='view', id=trans.security.encode_id( hda.history_id ) )}">${hda.history.get_display_name() | h}</a>
+                            %else:
+                                no history
+                            %endif
+                        </td>
+                        <td>${hda.get_display_name() | h}</td>
+                        <td>${time_ago( hda.update_time ) | h}</td>
+                        <td>
+                            %if hda.history and hda.history.user:
+                                ${hda.history.user.email | h}
+                            %else:
+                                anonymous
+                            %endif
+                        </td>
+                    </tr>
+                %endfor
+            </table>
+        </div>
+        <p/>
+    %endif
+    %if associated_lddas:
+        <p/>
+        <b>Other active (undeleted) library datasets that use this library dataset's disk file</b>
+        <div class="toolForm">
+            <table class="grid">
+                <thead>
+                    <tr>
+                        <th>Library</th>
+                        <th>Library Folder</th>
+                        <th>Library Dataset</th>
+                        <th>Last Updated</th>
+                        <th>User</th>
+                    </tr>
+                </thead>
+                %for copied_ldda in associated_lddas:
+                    <% containing_library = get_containing_library_from_library_dataset( trans, copied_ldda.library_dataset ) %>
+                    <tr>
+                        <td>
+                            <%
+                                if containing_library:
+                                    library_display_name = containing_library.get_display_name()
+                                else:
+                                    library_display_name = 'no library'
+                            %>
+                            %if containing_library:
+                                <a href="${h.url_for( controller='library_common', action='browse_library', id=trans.security.encode_id( containing_library.id ), cntrller=cntrller, use_panels=use_panels )}">${library_display_name | h}</a>
+                            %else:
+                                ${library_display_name | h}
+                            %endif
+                        </td>
+                        <td>
+                            <%
+                                library_dataset = copied_ldda.library_dataset
+                                folder = library_dataset.folder
+                                folder_display_name = folder.get_display_name()
+                                if folder_display_name == library_display_name:
+                                    folder_display_name = 'library root'
+                            %>
+                            ${folder_display_name | h}
+                            ${copied_ldda.library_dataset.folder.get_display_name() | h}
+                        </td>
+                        <td>${copied_ldda.get_display_name() | h}</td>
+                        <td>${time_ago( copied_ldda.update_time ) | h}</td>
+                        <td>
+                            %if copied_ldda.user:
+                                ${copied_ldda.user.email | h}
+                            %else:
+                                anonymous
+                            %endif
+                        </td>
+                    </tr>
+                %endfor
+            </table>
+        </div>
+        <p/>
+    %endif
+%endif
+%if current_version:
+    <% expired_lddas = [ e_ldda for e_ldda in ldda.library_dataset.expired_datasets ] %>
+    %if expired_lddas:
+        <br/>
+        <div class="toolFormTitle">Expired versions of ${util.unicodify( ldda.name ) | h}</div>
+        %for expired_ldda in expired_lddas:
+            <div class="form-row">
+                <a href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( expired_ldda.library_dataset.folder.id ), id=trans.security.encode_id( expired_ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">${util.unicodify( expired_ldda.name ) | h}</a>
+            </div>
+        %endfor
+    %endif
+%endif
diff --git a/templates/webapps/galaxy/library/common/ldda_permissions.mako b/templates/webapps/galaxy/library/common/ldda_permissions.mako
new file mode 100644
index 0000000..c1b36f2
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/ldda_permissions.mako
@@ -0,0 +1,70 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+    .ldda-title {
+        font-weight: bold;
+        line-height: 16px;
+    }
+    </style>
+</%def>
+
+<%
+    if len( lddas ) > 1:
+        name_str = '%d selected datasets' % len( lddas )
+    else:
+        ldda = lddas[0]
+        name_str = util.unicodify( ldda.name )
+%>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if len( lddas ) > 1:
+    <div class="toolFormTitle">Manage the following selected datasets</div>
+    <p/>
+    <table cellspacing="0" cellpadding="5" border="0" width="100%" class="libraryTitle">
+        %for ldd_assoc in lddas:
+            <tr>
+                <td>
+                    <div class="rowTitle">
+                        <span class="ldda-title"><b>${ldd_assoc.name | h}</b></span>
+                        <a id="ldda-${ldd_assoc.id | h}-popup" class="popup-arrow" style="display: none;">▼</a>
+                    </div>
+                    <div popupmenu="ldd_assoc-${ldd_assoc.id | h}-popup">
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='library_dataset_info', id=trans.security.encode_id( ldd_assoc.library_dataset_id ), library_id=library_id )}">Manage this dataset's versions</a>
+                    </div>
+                </td>
+                <td>
+                    %if ldd_assoc == ldd_assoc.library_dataset.library_dataset_dataset_association:
+                        <i>This is the latest version of this library dataset</i>
+                    %else:
+                        <font color="red"><i>This is an expired version of this library dataset</i></font>
+                    %endif
+                </td>
+            </tr>
+        %endfor
+    </table>
+    <p/>
+%else:
+    %if ldda == ldda.library_dataset.library_dataset_dataset_association:
+        <b><i>This is the latest version of this library dataset</i></b>
+    %else:
+        <font color="red"><b><i>This is an expired version of this library dataset</i></b></font>
+    %endif
+    <p/>
+%endif
+
+<% ldda_ids = ",".join( [ trans.security.encode_id( d.id ) for d in lddas ] ) %>
+## LIBRARY_ACCESS is a special permission that is set only at the library level,
+## and DATASET_MANAGE_PERMISSIONS is inherited to the dataset from the ldda.
+${render_permission_form( lddas[0], name_str, h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=library_id, folder_id=trans.security.encode_id( lddas[0].library_dataset.folder.id ), id=ldda_ids, show_deleted=show_deleted ), roles, do_not_render=[ 'LIBRARY_ACCESS', 'DATASET_MANAGE_PERMISSIONS' ] )}
diff --git a/templates/webapps/galaxy/library/common/library_dataset_info.mako b/templates/webapps/galaxy/library/common/library_dataset_info.mako
new file mode 100644
index 0000000..25a7ed6
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/library_dataset_info.mako
@@ -0,0 +1,70 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+%if library_dataset == library_dataset.library_dataset_dataset_association.library_dataset:
+    <b><i>This is the latest version of this library dataset</i></b>
+%else:
+    <font color="red"><b><i>This is an expired version of this library dataset</i></b></font>
+%endif
+<p/>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if ( trans.user_is_admin() and cntrller=='library_admin' ) or trans.app.security_agent.can_modify_library_item( current_user_roles, library_dataset ):
+    <div class="toolForm">
+        <div class="toolFormTitle">Edit attributes of ${util.unicodify( library_dataset.name ) | h}</div>
+        <div class="toolFormBody">
+            <form name="edit_attributes" action="${h.url_for( controller='library_common', action='library_dataset_info', id=trans.security.encode_id( library_dataset.id ), library_id=library_id, show_deleted=show_deleted )}" method="post">
+                <div class="form-row">
+                    <label>Name:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="name" value="${util.unicodify( library_dataset.name ) | h}" size="40"/>
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Info:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="info" value="${util.unicodify( library_dataset.info ) | h}" size="40"/>
+                    </div>
+                    <div style="clear: both"></div>
+                </div> 
+                <div class="form-row">
+                    <input type="submit" name="edit_attributes_button" value="Save"/>
+                </div>
+            </form>
+        </div>
+    </div>
+%else:
+    <div class="toolForm">
+        <div class="toolFormTitle">View information about ${util.unicodify( library_dataset.name ) | h}</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <b>Name:</b> ${util.unicodify( library_dataset.name ) | h}
+                <div style="clear: both"></div>
+                <b>Info:</b> ${util.unicodify( library_dataset.info ) | h}
+                <div style="clear: both"></div>
+                <b>Dataset Versions:</b>
+                <div style="clear: both"></div>
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+%endif
+
+%if widgets:
+    ${render_template_fields( cntrller, item_type='library_dataset', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=library_id, info_association=None, inherited=False, editable=False )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/library_dataset_permissions.mako b/templates/webapps/galaxy/library/common/library_dataset_permissions.mako
new file mode 100644
index 0000000..b0928df
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/library_dataset_permissions.mako
@@ -0,0 +1,25 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />>
+
+%if library_dataset == library_dataset.library_dataset_dataset_association.library_dataset:
+    <b><i>This is the latest version of this library dataset</i></b>
+%else:
+    <font color="red"><b><i>This is an expired version of this library dataset</i></b></font>
+%endif
+<p/>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if ( trans.user_is_admin() and cntrller == 'library_admin' ) or trans.app.security_agent.can_manage_library_item( current_user_roles, library_dataset ):
+    ## LIBRARY_ACCESS is a special permission that is set only at the library level.
+    ${render_permission_form( library_dataset, library_dataset.name, h.url_for( controller='library_common', action='library_dataset_permissions', cntrller=cntrller, id=trans.security.encode_id( library_dataset.id ), library_id=library_id, show_deleted=show_deleted ), roles, do_not_render=[ 'LIBRARY_ACCESS' ] )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/library_dataset_search_results.mako b/templates/webapps/galaxy/library/common/library_dataset_search_results.mako
new file mode 100644
index 0000000..69df886
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/library_dataset_search_results.mako
@@ -0,0 +1,137 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/browse_library.mako" import="render_dataset" />
+<%namespace file="/library/common/common.mako" import="render_actions_on_multiple_items" />
+<%namespace file="/library/common/common.mako" import="render_compression_types_help" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+    def inherit(context):
+        if context.get('use_panels'):
+            return '/webapps/galaxy/base_panels.mako'
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.active_view="user"
+    self.overlay_visible=False
+%>
+</%def>
+
+##
+## Override methods from base.mako and base_panels.mako
+##
+<%def name="center_panel()">
+   <div style="overflow: auto; height: 100%;">
+       <div class="page-container" style="padding: 10px;">
+           ${render_content()}
+       </div>
+   </div>
+</%def>
+
+## Render the grid's basic elements. Each of these elements can be subclassed.
+<%def name="body()">
+    ${render_content()}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jstorage")}
+    ${common_javascripts()}
+</%def>
+
+<%def name="render_searched_components()">
+    <ul style="padding-left: 1em; list-style-type: disc;">
+        <li>name</li>
+        <li>info</li>
+        <li>dbkey (genome build)</li>
+        <li>message</li>
+        %if trans.app.config.enable_lucene_library_search:
+            <li>disk file content</li>
+        %endif
+    </ul>
+    <br/>
+</%def>
+
+<%def name="render_content()">
+    <%
+        from galaxy import util
+        from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
+        from time import strftime
+ 
+        class RowCounter( object ):
+            def __init__( self ):
+                self.count = 0
+            def increment( self ):
+                self.count += 1
+            def __str__( self ):
+                return str( self.count )
+    %>
+ 
+    <br/><br/>
+    <ul class="manage-table-actions">
+        <li>
+            <a class="action-button" href="${h.url_for( controller=cntrller, action='browse_libraries' )}" target="galaxy_main">Browse data libraries</a></div>
+        </li>
+    </ul>
+
+    <h2>Results for search on “${search_term | h}”</h2>
+ 
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+ 
+    %if lddas:
+        <p>The string "${search_term | h}" was found in at least one of the following information components of the displayed library datasets.</p>
+        ${render_searched_components()}
+        <form name="act_on_multiple_datasets" action="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, use_panels=use_panels, show_deleted=show_deleted )}" onSubmit="javascript:return checkForm();" method="post">
+            <input type="hidden" name="search_term" value="${search_term | h}"/>
+            <table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid">
+                <thead>
+                    <tr class="libraryTitle">
+                        <th>
+                            <input type="checkbox" id="checkAll" name=select_all_datasets_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_datasets_checkbox value="true"/>
+                            Name
+                        </th>
+                        <th>Message</th>
+                        <th>Uploaded By</th>
+                        <th>Date</th>
+                        <th>File Size</th>
+                    </tr>
+                </thead>
+                <%
+                    tracked_datasets = {}
+                    row_counter = RowCounter()
+                    my_row = row_counter.count
+                %>
+                %for ldda in lddas:
+                    <%
+                        library_dataset = ldda.library_dataset
+                        folder = library_dataset.folder
+                        library = folder.parent_library
+                    %>
+                    ${render_dataset( cntrller, ldda, library_dataset, False, library, folder, 0, my_row, row_counter, tracked_datasets, show_deleted=False )}
+                    <%
+                        my_row = row_counter.count
+                        row_counter.increment()
+                    %>
+                %endfor
+                ${render_actions_on_multiple_items( actions_to_exclude=[ 'manage_permissions' ] )}
+            </table>
+        </form>
+        ${render_compression_types_help( comptypes )}
+    %elif status != 'error':
+        <p>The string "${search_term | h}" was not found in any of the following information components for any library datasets that you can access.</p>
+        ${render_searched_components()}
+    %endif
+</%def>
diff --git a/templates/webapps/galaxy/library/common/library_info.mako b/templates/webapps/galaxy/library/common/library_info.mako
new file mode 100644
index 0000000..2da7790
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/library_info.mako
@@ -0,0 +1,116 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%
+    from galaxy import util
+    if trans.user_is_admin() and cntrller == 'library_admin':
+        can_add = can_modify = can_manage = True
+    elif cntrller in [ 'library', 'requests' ]:
+        can_add = trans.app.security_agent.can_add_library_item( current_user_roles, library )
+        can_modify = trans.app.security_agent.can_modify_library_item( current_user_roles, library )
+        can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, library )
+    else:
+        can_add = can_modify = can_manage = False
+    library_name = util.unicodify( library.name )
+    library_description = util.unicodify(library.description)
+    library_synopsis = util.unicodify(library.synopsis)
+%>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">
+        <div class="menubutton split popup" id="library-${trans.security.encode_id( library.id ) | h }-popup">
+            <a href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">${library.name[:50] | h}</a>
+        </div>
+        %if can_add or can_modify or can_manage:
+            <div popupmenu="library-${ trans.security.encode_id( library.id ) | h }-popup">
+                %if not library.deleted:
+                    %if can_add and not library.info_association:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='library', form_type=trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
+                    %endif
+                    %if can_manage:
+                        <a class="action-button" href="${h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+                    %endif
+                    %if can_modify:
+                        <a class="action-button" confirm="Click OK to delete the library named '${library.name | h}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library' )}">Delete this data library</a>
+                    %endif
+                %elif can_modify and not library.purged:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library' )}">Undelete this data library</a>
+                %elif library.purged:
+                    <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">This data library has been purged</a>
+                %endif
+            </div>
+        %endif
+    </div>
+    <div class="toolFormBody">
+        %if not library.deleted and can_modify:
+            <form name="library" action="${h.url_for( controller='library_common', action='library_info', id=trans.security.encode_id( library.id ), cntrller=cntrller, use_panels=use_panels, show_deleted=show_deleted )}" method="post" >
+                <div class="form-row">
+                    <label>Name:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="name" value="${library_name | h}" size="40"/>
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Description:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="description" value="${library_description | h}" size="40"/>
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Displayed when browsing all libraries
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Synopsis:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="synopsis" value="${library_synopsis | h}" size="40"/>
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Displayed when browsing this library
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="library_info_button" value="Save"/>
+                </div>
+            </form>
+        %else:
+            <div class="form-row">
+                <label>Name:</label>
+                ${library_name | h}
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Description:</label>
+                ${library_description | h}
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Synopsis:</label>
+                ${library_synopsis | h}
+            </div>
+            <div style="clear: both"></div>
+        %endif
+    </div>
+</div>
+
+%if widgets:
+    ${render_template_fields( cntrller=cntrller, item_type='library', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=trans.security.encode_id( library.id ), info_association=info_association, inherited=inherited, editable=not( library.deleted ) )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/library_item_info.mako b/templates/webapps/galaxy/library/common/library_item_info.mako
new file mode 100644
index 0000000..1c2a25f
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/library_item_info.mako
@@ -0,0 +1,15 @@
+<%def name="render_library_item_info( ldda )">
+                            %if ldda.state == 'error':
+                                <div class="libraryItem-${ldda.state | h}">Job error <i>(click name for more info)</i></div>
+                            %elif ldda.state == 'queued':
+                                <div class="libraryItem-${ldda.state | h}">This job is queued</div>
+                            %elif ldda.state == 'running':
+                                <div class="libraryItem-${ldda.state | h}">This job is running</div>
+                            %elif ldda.state == 'upload':
+                                <div class="libraryItem-${ldda.state | h}">This dataset is uploading</div>
+                            %else:
+                                ${ldda.message | h}
+                            %endif
+</%def>
+
+${render_library_item_info( ldda )}
diff --git a/templates/webapps/galaxy/library/common/library_permissions.mako b/templates/webapps/galaxy/library/common/library_permissions.mako
new file mode 100644
index 0000000..9f18fd2
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/library_permissions.mako
@@ -0,0 +1,18 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if ( trans.user_is_admin() and cntrller == 'library_admin' ) or trans.app.security_agent.can_manage_library_item( current_user_roles, library ):
+    ${render_permission_form( library, library.name, h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), show_deleted=show_deleted ), roles, do_not_render=[], all_roles=all_roles )}
+%endif
diff --git a/templates/webapps/galaxy/library/common/move_library_item.mako b/templates/webapps/galaxy/library/common/move_library_item.mako
new file mode 100644
index 0000000..cfb746d
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/move_library_item.mako
@@ -0,0 +1,106 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%inherit file="/base.mako"/>
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${h.js("libs/jquery/jquery.autocomplete")}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "autocomplete_tagging" )}
+</%def>
+
+<%
+    if source_library:
+        source_library_id = trans.security.encode_id( source_library.id )
+    else:
+        source_library_id = ''
+    if target_library:
+        target_library_id = trans.security.encode_id( target_library.id )
+    else:
+        target_library_id = ''
+%>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<b>Move data library items</b>
+<br/><br/>
+<form name="move_library_item" action="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type=item_type, make_target_current=make_target_current, use_panels=use_panels, show_deleted=show_deleted )}" method="post">
+    <div class="toolForm" style="float: left; width: 45%; padding: 0px;">
+        <div class="toolFormBody">
+            <input type="hidden" name="source_library_id" value="${source_library_id | h}"/>
+            %if target_library:
+                <input type="hidden" name="target_library_id" value="${target_library_id | h}"/>
+            %endif
+            %if item_type == 'ldda':
+                %for move_ldda in move_lddas:
+                    <%
+                        checked = ""
+                        encoded_id = trans.security.encode_id( move_ldda.id )
+                        if move_ldda.id in move_ldda_ids:
+                            checked = " checked='checked'"
+                    %>
+                    <div class="form-row">
+                        <input type="checkbox" name="item_id" id="dataset_${encoded_id | h}" value="${encoded_id | h}" ${checked}/>
+                        <label for="dataset_${encoded_id | h}" style="display: inline;font-weight:normal;">${util.unicodify( move_ldda.name ) | h}</label>
+                    </div>
+                %endfor
+            %elif item_type == 'folder':
+                <div class="form-row">
+                    <% encoded_id = trans.security.encode_id( move_folder.id ) %>
+                    <input type="checkbox" name="item_id" id="folder_${encoded_id | h}" value="${encoded_id | h}" checked='checked'/>
+                    <label for="folder_${encoded_id | h}" style="display: inline;font-weight:normal;">${move_folder.name | h}</label>
+                </div>
+            %endif
+        </div>
+    </div>
+    <div style="float: left; padding-left: 10px; font-size: 36px;">→</div>
+    <div class="toolForm" style="float: right; width: 45%; padding: 0px;">
+        %if target_library:
+            <div class="toolFormTitle">Select folder within data library: ${h.truncate( target_library.name, 30 ) | h}</div>
+        %else:
+            <div class="toolFormTitle">Select a data library</div>
+        %endif
+        <div class="toolFormBody">
+            %if target_library:
+                <div class="form-row">
+                    %if len( target_folder_id_select_field.options ) >= 1:
+                        ${target_folder_id_select_field.get_html()}
+                    %else:
+                        %if source_library and source_library.id == target_library.id:
+                            You are not authorized to move items within the source data library
+                        %else:
+                            You are not authorized to move items into the selected data library
+                        %endif
+                    %endif
+                    %if source_library:
+                        <br/><br/>
+                        %if target_library.id == source_library.id:
+                            <a style="margin-left: 10px;" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type=item_type, item_id=item_id, source_library_id=source_library_id, make_target_current=False, use_panels=use_panels, show_deleted=show_deleted )}">Choose another data library</a>
+                        %else:
+                            <a style="margin-left: 10px;" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type=item_type, item_id=item_id, source_library_id=source_library_id, make_target_current=True, use_panels=use_panels, show_deleted=show_deleted )}">Choose source data library</a>
+                        %endif
+                    %elif not target_library_folders:
+                        <br/><br/>
+                        <a style="margin-left: 10px;" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type=item_type, item_id=item_id, source_library_id=source_library_id, make_target_current=False, use_panels=use_panels, show_deleted=show_deleted )}">Choose another data library</a>
+                    %endif
+                </div>
+            %else:
+                <div class="form-row">
+                    %if len( target_library_id_select_field.options ) > 1:
+                        ${target_library_id_select_field.get_html()}
+                    %else:
+                        You are not authorized to move items to any data libraries
+                    %endif
+                </div>
+            %endif
+        </div>
+    </div>
+    <div style="clear: both"></div>
+    <div class="form-row" align="center">
+        <input type="submit" class="primary-button" name="move_library_item_button" value="Move"/>
+    </div>
+</form>
diff --git a/templates/webapps/galaxy/library/common/new_folder.mako b/templates/webapps/galaxy/library/common/new_folder.mako
new file mode 100644
index 0000000..e17b83e
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/new_folder.mako
@@ -0,0 +1,38 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<br/<br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, use_panels=use_panels, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create a new folder</div>
+    <div class="toolFormBody">
+        <form name="folder" action="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( folder.id ), library_id=library_id, show_deleted=show_deleted )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="New Folder" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="description" value="" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="new_folder_button" value="Create"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/library/common/upload.mako b/templates/webapps/galaxy/library/common/upload.mako
new file mode 100644
index 0000000..08857c3
--- /dev/null
+++ b/templates/webapps/galaxy/library/common/upload.mako
@@ -0,0 +1,36 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/common.mako" import="render_upload_form" />
+
+<% import os, os.path %>
+
+<%
+    if replace_dataset not in [ None, 'None' ]:
+        replace_id = trans.security.encode_id( replace_dataset.id )
+    else:
+        replace_id = 'None'
+%>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.autocomplete")}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "autocomplete_tagging" )}
+</%def>
+
+<b>Upload files to a data library</b>
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=library_id, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+${render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, space_to_tab, link_data_only, widgets, roles_select_list, history, show_deleted )}
diff --git a/templates/webapps/galaxy/library/grid.mako b/templates/webapps/galaxy/library/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/webapps/galaxy/library/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/webapps/galaxy/library/index.mako b/templates/webapps/galaxy/library/index.mako
new file mode 100644
index 0000000..5482845
--- /dev/null
+++ b/templates/webapps/galaxy/library/index.mako
@@ -0,0 +1,15 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="shared"
+%>
+</%def>
+
+<%def name="center_panel()">
+
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="library", action="browse_libraries", default_action=default_action )}"> </iframe>
+
+</%def>
diff --git a/templates/webapps/galaxy/mobile/dataset/detail.mako b/templates/webapps/galaxy/mobile/dataset/detail.mako
new file mode 100644
index 0000000..02fed87
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/dataset/detail.mako
@@ -0,0 +1,29 @@
+<div id="dataset_${dataset.id}" title="Dataset" class="panel">
+    <div class="toolbar">
+        <h1>${dataset.display_name()}</h1>
+        <a class="back button" href="#">Back</a>
+    </div>
+
+    <div class="pad">
+        <fieldset>
+            <div class="row">
+                <label>State</label> <span>${dataset.state}</span>
+            </div>
+            <div class="row">
+                <label>Content</label> <span>${dataset.blurb}</span>
+            </div>
+            <div class="row">
+                <label>Format</label> <span>${dataset.ext}</span>
+            </div>
+            <div class="row">
+                <label>Info</label> <span>${dataset.display_info()}</span>
+            </div>
+            %if dataset.state == "ok":
+                <div class="row">
+                    <a href="${h.url_for(controller='mobile', action='dataset_peek', id=dataset.id )}">Peek</a>
+                </div>
+            %endif
+        </fieldset>
+    </div>
+ 
+</body>
diff --git a/templates/webapps/galaxy/mobile/dataset/peek.mako b/templates/webapps/galaxy/mobile/dataset/peek.mako
new file mode 100644
index 0000000..c21e270
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/dataset/peek.mako
@@ -0,0 +1,12 @@
+<div id="dataset_peek_${dataset.id}" title="Dataset Peek" class="panel">
+    <div class="toolbar">
+        <h1>${dataset.display_name()}</h1>
+        <a class="back button" href="#">Back</a>
+    </div>
+
+    <div class="pad" style="overflow: auto;">
+        <pre>
+            ${dataset.display_peek()}
+        </pre>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/mobile/form.mako b/templates/webapps/galaxy/mobile/form.mako
new file mode 100644
index 0000000..35069d9
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/form.mako
@@ -0,0 +1,46 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+         "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <title>${form.title}</title>
+  <meta name="viewport" content="width=devicewidth; initial-scale=1.0; maximum-scale=1.0; user-scalable=0;"/>
+  <link rel="apple-touch-icon" href="${h.url_for('/static/iui/iui-logo-touch-icon.png')}" />
+  <meta name="apple-touch-fullscreen" content="YES" />
+  <style type="text/css" media="screen">@import "${h.url_for('/static/style/iphone.css')}";</style>
+  ## <script type="application/x-javascript" src="${h.url_for('/static/iui/iui.js')}"></script>
+</head>
+
+<body>
+    <div class="toolbar">
+        <h1 id="pageTitle">${form.title}</h1>
+        <a id="backButton" class="button" href="#"></a>
+    </div>
+    
+    <form title="${form.title}" class="panel" selected="true" name="${form.name}" action="${form.action}" method="post" >
+        <fieldset>
+        %for input in form.inputs:
+            <%
+            cls = "row"
+            if input.error:
+                cls += " form-row-error"
+            %>
+            <div class="${cls}">
+              %if input.use_label:
+              <label>
+                  ${input.label}:
+              </label>
+              %endif
+              <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
+              %if input.error:
+                  <div class="error">Error: ${input.error}</div>
+              %endif
+            </div>
+            
+        %endfor
+        </fieldset>
+      <input class="whiteButton" type="submit" value="${form.submit_text}">
+
+  </form>
+  </div>
+</div>
diff --git a/templates/webapps/galaxy/mobile/history/detail.mako b/templates/webapps/galaxy/mobile/history/detail.mako
new file mode 100644
index 0000000..2be4f0f
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/history/detail.mako
@@ -0,0 +1,80 @@
+<div id="history_detail_${history.id}">
+    <div class="toolbar">
+        <h1>${history.name}</h1>
+        <a class="back button" href="#">Back</a>
+    </div>
+    <ul class="edgetoedge">
+            
+        %for data in history.active_datasets:
+            %if data.visible:
+
+                <%
+                    if data.state in ['no state','',None]:
+                        data_state = "queued"
+                    else:
+                        data_state = str( data.state )
+                %>
+
+                <li id="historyItemContainer-${data.id}">
+                
+                    <div style="float: left; padding-right: 8px;">
+                        <div style='display: none;' id="progress-${data.id}">
+                            <img src="${h.url_for('/static/style/data_running.gif')}" border="0">
+                        </div>
+                        %if data_state == 'running':
+                            <div><img src="${h.url_for('/static/style/data_running.gif')}" border="0"></div>
+                        %elif data_state == 'upload':
+                            <div><img src="${h.url_for('/static/style/data_upload.gif')}" border="0"></div>
+                        %else:
+                            <div><img src="${h.url_for( "/static/style/data_%s.png" % data_state )}" border="0"></div>
+                        %endif
+                    </div>    
+           
+                    <a href="${h.url_for(controller='mobile', action="dataset_detail", id=data.id )}">
+              
+                        <div>${data.hid}: ${data.display_name()}</div>
+        
+                        <div class="secondary">
+                            ## Body for history items, extra info and actions, data "peek"
+                            <% current_user_roles = trans.get_current_user_roles() %>
+                            %if not trans.user_is_admin() and not trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
+                                <div>You do not have permission to view this dataset.</div>
+                            %elif data_state == "queued":
+                                <div>Job is waiting to run</div>
+                            %elif data_state == "running":
+                                <div>Job is currently running</div>
+                            %elif data_state == "error":
+                                <div>
+                                    An error occurred running this job.
+                                </div>
+                            %elif data_state == "discarded":
+                                <div>
+                                    The job creating this dataset was cancelled before completion.
+                                </div>
+                            %elif data_state == 'setting_metadata':
+                                <div>Metadata is being Auto-Detected.</div>
+                            %elif data_state == "empty":
+                                <div>No data: <i>${data.display_info()}</i></div>
+                            %elif data_state in [ "ok", "failed_metadata" ]:
+                                <div>
+                                    %if data_state == "failed_metadata":
+                                        Warning: setting metadata failed,
+                                    %endif
+                                    ${data.blurb},
+                                    format: <span class="${data.ext}">${data.ext}</span>, 
+                                    database: <span class="${data.dbkey}">${data.dbkey}</span>
+                                </div>
+                            %else:
+                                <div>Error: unknown dataset state "${data_state}".</div>
+                            %endif               
+                        </div>
+        
+                    </a>
+               
+                </li>
+            %endif 
+
+        %endfor
+              
+    </ul>
+</div>
diff --git a/templates/webapps/galaxy/mobile/history/list.mako b/templates/webapps/galaxy/mobile/history/list.mako
new file mode 100644
index 0000000..ba9a590
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/history/list.mako
@@ -0,0 +1,55 @@
+<div id="history_list">
+    <div class="toolbar">
+        <h1>Histories</h1>
+        <a class="back button" href="#">Back</a>
+    </div>
+    
+    %if trans.user is None:
+    <ul class="edgetoedge">
+      <li><i style="color: gray"> No histories available (not logged in) </i></li>
+    </ul>
+    
+    %else:
+    <ul class="edgetoedge">
+            
+            %for i, history in enumerate( trans.user.histories ):
+    
+                %if not ( history.purged or history.deleted ):
+    
+    
+                        <li>
+                            
+                            <a href="${h.url_for(controller='mobile', action="history_detail", id=history.id )}">
+                                ${history.name | h}
+                            
+                            <div class="secondary">${h.date.distance_of_time_in_words( history.update_time, h.date.datetime.utcnow() )} ago</div>
+              
+                            <div class="counts">
+                            <%
+                                total_ok = sum( 1 for d in history.active_datasets if d.state == 'ok' )
+                                total_running = sum( 1 for d in history.active_datasets if d.state == 'running' )
+                                total_queued = sum( 1 for d in history.active_datasets if d.state == 'queued' )
+                                total_error = sum( 1 for d in history.active_datasets if d.state in ( 'error', 'fail' ) )
+                                parts = []
+                                if total_ok:
+                                    parts.append( "<span style='color: #66AA66'>" + str(total_ok) + " finished</span>" )
+                                if total_queued:
+                                    parts.append( "<span style='color: #888888'>" + str(total_queued) + " queued</span>" )
+                                if total_running:
+                                    parts.append( "<span style='color: #AAAA66'>" + str(total_running) + " running</span>" )
+                                if total_error:
+                                    parts.append( "<span style='color: #AA6666'>" + str(total_error) + " failed</span>" )
+                                
+                            %>
+                            ${", ".join( parts )}        
+                            </div>
+                            
+                            </a>
+              
+                        </li>
+                %endif
+        %endfor
+              
+    </ul>
+    %endif
+</div>
diff --git a/templates/webapps/galaxy/mobile/index.mako b/templates/webapps/galaxy/mobile/index.mako
new file mode 100644
index 0000000..98ed491
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/index.mako
@@ -0,0 +1,52 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+         "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+    <title>Galaxy</title>
+    
+    <style>
+    li > a > div {
+        white-space: nowrap;
+        overflow: hidden;
+        text-overflow: ellipsis;
+    }
+    div.secondary {
+        font-size: 13px;
+        color: gray;
+    }
+    div.counts {
+        padding-top: 1px;
+        font-size: 13px;
+        color: gray;
+    }
+    .logo {
+        background: center left no-repeat url(${h.url_for('/static/images/galaxyIcon_noText.png')});
+        padding-left: 30px;
+    }
+    </style>
+    
+    <style type="text/css" media="screen">@import "${h.url_for('/static/jqtouch/jqtouch.css')}";</style>
+    <script type="text/javascript" src="${h.url_for('/static/scripts/libs/jquery/jquery.js')}"></script>
+    <script type="text/javascript" src="${h.url_for('/static/scripts/libs/jquery/jqtouch.js')}"></script>
+    <script type="text/javascript" charset="utf-8">
+        $(document).jQTouch( {
+            icon: "${h.url_for('/static/images/galaxyIcon_noText.png')}",
+            slideInSelector: 'ul li a, .row a, a.async'
+        });
+    </script>
+</head>
+<body>
+    
+    
+    <div id="home" selected="true">
+        <div class="toolbar">
+            <h1><span class="logo">Galaxy</span></h1>
+            <a class="button async" href="${h.url_for(controller='mobile', action='settings' )}">Settings</a>
+        </div>
+        <ul class="edgetoedge">
+            <li><a href="${h.url_for(controller='mobile', action='history_list' )}">Histories</a></li>
+        </ul>
+    </div>
+
+</body>
+</html>
diff --git a/templates/webapps/galaxy/mobile/manage_library.mako b/templates/webapps/galaxy/mobile/manage_library.mako
new file mode 100644
index 0000000..8553b94
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/manage_library.mako
@@ -0,0 +1,73 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/dataset/security_common.mako" import="render_permission_form" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<% current_user_roles = trans.get_current_user_roles() %>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if trans.app.security_agent.can_modify_library_item( current_user_roles, library ):
+    <div class="toolForm">
+        <div class="toolFormTitle">Change library name and description</div>
+        <div class="toolFormBody">
+            <form name="library" action="${h.url_for( controller='library_common', action='library_info', id=trans.security.encode_id( library.id ), cntrller='mobile' )}" method="post" >
+                <div class="form-row">
+                    <label>Name:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="name" value="${library.name}" size="40"/>
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Description:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="description" value="${library.description}" size="40"/>
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Synopsis:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        <input type="text" name="synopsis" value="${library.synopsis}" size="40"/>
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <input type="submit" name="library_info_button" value="Save"/>
+            </form>
+        </div>
+    </div>
+    <p/>
+%else:
+    <div class="toolForm">
+        <div class="toolFormTitle">View information about ${library.name}</div>
+        <div class="form-row">
+            <label>Name:</label>
+            ${library.name}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Description:</label>
+            ${library.description}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Synopsis:</label>
+            ${library.synopsis}
+        </div>
+        <div style="clear: both"></div>
+        <div class="toolForm">
+            ${render_template_fields( cntrller='mobile', item_type='library', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=trans.security.encode_id( library.id ), info_association=info_association, inherited=inherited )}
+        </div>
+    </div>
+%endif
+%if trans.app.security_agent.can_manage_library_item( current_user_roles, library ):
+    <% roles = trans.app.security_agent.get_legitimate_roles( trans, library, 'mobile' ) %>
+    ${render_permission_form( library, library.name, h.url_for( controller='library_common', cntrller='mobile', action='library_permissions', id=trans.security.encode_id( library.id ) ), roles )}
+%endif
+
+%if widgets:
+    ${render_template_fields( cntrller='mobile', item_type='library', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, library_id=trans.security.encode_id( library.id ), info_association=info_association, inherited=inherited )}
+%endif
diff --git a/templates/webapps/galaxy/mobile/settings.mako b/templates/webapps/galaxy/mobile/settings.mako
new file mode 100644
index 0000000..f012a85
--- /dev/null
+++ b/templates/webapps/galaxy/mobile/settings.mako
@@ -0,0 +1,34 @@
+<form id="settings" class="panel" action="${h.url_for(controller='mobile', action='settings' )}" method="post">
+    
+    <div class="toolbar">
+        <h1>Settings</h1>
+        <a class="back button leftButton" href="#">Cancel</a>
+        <input class="button blueButton" type="submit" href="#" value="Save">
+    </div>
+    
+    <div class="pad">
+        
+
+            <h2>User information</h2>
+            <fieldset>
+                <%
+                    if t.user:
+                        email = t.user.email
+                    else:
+                        email = ""
+                %>
+                
+                <div class="row">
+                    <label>Email</label>
+                    <input type="text" name="email" value="${email}">
+                </div>
+                <div class="row">
+                    <label>Password</label>
+                    <input type="password" name="password" value ="">
+                </div>
+            </fieldset>
+            %if message:
+                <div>${message}</div>
+            %endif
+    </div>
+</form>
diff --git a/templates/webapps/galaxy/page/create.mako b/templates/webapps/galaxy/page/create.mako
new file mode 100644
index 0000000..0f38c4d
--- /dev/null
+++ b/templates/webapps/galaxy/page/create.mako
@@ -0,0 +1,14 @@
+<%inherit file="/form.mako"/>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript">
+$(function(){
+    var page_name = $("input[name=page_title]");
+    var page_slug = $("input[name=page_slug]");
+    page_name.keyup(function(){
+        page_slug.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
+    });    
+})
+</script>
+</%def>
diff --git a/templates/webapps/galaxy/page/display.mako b/templates/webapps/galaxy/page/display.mako
new file mode 100644
index 0000000..12ebbca
--- /dev/null
+++ b/templates/webapps/galaxy/page/display.mako
@@ -0,0 +1,158 @@
+<%inherit file="/display_base.mako"/>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+    
+        $(function() {
+            // Setup embedded content:
+            //  (a) toggles for showing/hiding embedded content;
+            //  (b) ...
+            $('.embedded-item').each( function() {
+                var container = $(this);
+                if( container.hasClass( 'history' ) ){ return; }
+                //note: we can't do the same override for visualizations
+                // bc builtins (like trackster) need the handlers/ajax below to work.
+                // instead: (for registry visualizations) we'll clear the handlers below
+                //  and add new ones (in embed_in_frame.mako) ...ugh.
+            
+                // Show embedded item.
+                var show_embedded_item = function() {
+                    var ajax_url = container.find("input[type=hidden]").val();
+                    // Only get item content if it's not already there.
+                    var item_content = $.trim(container.find(".item-content").text());
+                    if (!item_content) {
+                        $.ajax({
+                            type: "GET",
+                            url: ajax_url,
+                            error: function() { alert("Getting item content failed."); },
+                            success: function( item_content ) {
+                                container.find(".summary-content").hide("fast");
+                                container.find(".item-content").html(item_content);
+                                container.find(".expanded-content").show("fast");
+                                container.find(".toggle-expand").hide();
+                                container.find(".toggle").show();
+
+                                make_popup_menus();
+                            }
+                        });
+                    } else {
+                        container.find(".summary-content").hide("fast");
+                        container.find(".expanded-content").show("fast");
+                        container.find(".toggle-expand").hide();
+                        container.find(".toggle").show();
+                    }
+                };
+            
+                // Hide embedded item.
+                var hide_embedded_item = function() {
+                    container.find(".expanded-content").hide("fast");
+                    container.find(".summary-content").show("fast");
+                    container.find(".toggle").hide();
+                    container.find(".toggle-expand").show();
+                };
+            
+                // Setup toggle expand.
+                var toggle_expand = $(this).find('.toggle-expand');
+                toggle_expand.click( function() {
+                    show_embedded_item();
+                    return false;
+                });
+            
+                // Setup toggle contract.
+                var toggle_contract = $(this).find('.toggle');
+                toggle_contract.click( function() {
+                    hide_embedded_item();
+                    return false;
+                });
+            
+                // Setup toggle embed.
+                var toggle_embed = $(this).find('.toggle-embed');
+                toggle_embed.click( function() {
+                    if (container.find(".expanded-content").is(":visible")) {
+                        hide_embedded_item();
+                    } else {
+                        show_embedded_item();
+                    }
+                    return false;
+                });
+            });
+        });
+    
+    </script>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "base", "autocomplete_tagging" )}
+    <style type="text/css">
+        .toggle { display: none; }
+        .embedded-item .title {
+            padding-top: 1px;
+        }
+        .embedded-item h4 {
+            margin: 0px;
+        }
+        ## Format tables in pages so that they look like they do in the page editor.
+        .page-body > table {
+            padding: 8px 5px 5px;
+            min-width: 500px; 
+            border: none;
+            margin-top: 1em;
+            margin-bottom: 1em;
+        }
+        .page-body caption { 
+            text-align: left;
+            background: #E4E4B0; 
+            padding: 5px; 
+            font-weight: bold; 
+        }
+        .page-body > table td {
+            width: 25%;
+            padding: 0.2em 0.8em;
+        }
+        ## HACKs to get Trackster navigation controls to display.
+        .embedded-item .trackster-nav-container {
+            height: inherit;
+        }
+        .embedded-item .trackster-nav {
+            position: inherit;
+        }
+
+        /* ---------------------------- histories */
+        .embedded-item.history .toggle {
+            display: inline;
+        }
+        /** wraps around the history */
+        .embedded-item.history .item-content {
+            background-color: white;
+            padding: 8px;
+            border-radius: 0px 0px 4px 4px;
+        }
+        .embedded-item.history .history-panel .datasets-list {
+            margin-bottom: 8px;
+        }
+        .embedded-item.history .history-panel .errormessage {
+            margin-top: 8px;
+        }
+        .annotated-history-panel .history-controls {
+            margin: 0px 0px 16px 0px;
+        }
+
+        /* ---------------------------- visualizations */
+        .embedded-item.visualization .item-content {
+            max-height: none;
+        }
+    </style>
+</%def>
+
+<%def name="render_item_header( item )">
+    ## No header for pages.
+</%def>
+
+<%def name="render_item_links( page )">
+</%def>
+
+<%def name="render_item( page, page_data=None )">
+    ${page_data}
+</%def>
diff --git a/templates/webapps/galaxy/page/editor.mako b/templates/webapps/galaxy/page/editor.mako
new file mode 100644
index 0000000..e3f7cda
--- /dev/null
+++ b/templates/webapps/galaxy/page/editor.mako
@@ -0,0 +1,64 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="user"
+    self.overlay_visible=False
+%>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        // Define variables needed by galaxy.pages script.
+        var page_id = "${trans.security.encode_id(page.id)}",
+            page_list_url = '${h.url_for( controller='page', action='list' )}',
+            list_objects_url = "${h.url_for(controller='page', action='LIST_ACTION' )}",
+            set_accessible_url = "${h.url_for( controller='ITEM_CONTROLLER', action='set_accessible_async' )}",
+            get_name_and_link_url = "${h.url_for( controller='ITEM_CONTROLLER', action='get_name_and_link_async' )}?id=",
+            list_histories_for_selection_url = "${h.url_for(controller='page', action='list_histories_for_selection' )}",
+            editor_base_path = "${h.url_for('/static/wymeditor')}/",
+            iframe_base_path = "${h.url_for('/static/wymeditor/iframe/galaxy')}/",
+            save_url = "${h.url_for(controller='page', action='save' )}";
+    </script>
+    ${h.js(
+        "libs/jquery/jquery.form",
+        "libs/jquery/jstorage",
+        "libs/jquery/jquery.wymeditor",
+        "libs/jquery/jquery.autocomplete",
+        "galaxy.pages"
+    )}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "base", "autocomplete_tagging", "embed_item" )}
+    <style type='text/css'>
+        .galaxy-page-editor-button
+        {
+            position: relative;
+            float: left;
+            padding: 0.2em;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner" style="float: right">
+            <a id="save-button" class="panel-header-button">Save</a>
+            <a id="close-button" class="panel-header-button">Close</a>
+        </div>
+        <div class="unified-panel-header-inner">
+            Page Editor <span style="font-weight: normal">| Title : ${page.title | h}</span>
+        </div>
+    </div>
+
+    <div class="unified-panel-body">
+        <textarea name="page_content">${util.unicodify( page.latest_revision.content )}</textarea>
+    </div>
+
+</%def>
diff --git a/templates/webapps/galaxy/page/index.mako b/templates/webapps/galaxy/page/index.mako
new file mode 100644
index 0000000..450b74d
--- /dev/null
+++ b/templates/webapps/galaxy/page/index.mako
@@ -0,0 +1,53 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="shared"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            <!-- embedded grid -->
+            ${h.to_unicode( embedded_grid )}
+
+            <br><br>
+            <h2>Pages shared with you by others</h2>
+
+            %if shared_by_others:
+                <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                    <tr class="header">
+                        <th>Title</th>
+                        <th>Owner</th>
+                        <th></th>
+                    </tr>
+                    %for i, association in enumerate( shared_by_others ):
+                        <% page = association.page %>
+                        <tr>
+                            <td>
+                                <a class="menubutton" id="shared-${i}-popup" href="${h.url_for(controller='page', action='display_by_username_and_slug', username=page.user.username, slug=page.slug)}">${page.title | h}</a>
+                            </td>
+                            <td>${page.user.username}</td>
+                            <td>
+                                <div popupmenu="shared-${i}-popup">
+                                    <a class="action-button" href="${h.url_for(controller='page', action='display_by_username_and_slug', username=page.user.username, slug=page.slug)}" target="_top">View</a>
+                                </div>
+                            </td>
+                        </tr>    
+                    %endfor
+                </table>
+            %else:
+
+                No pages have been shared with you.
+
+            %endif
+
+        </div>
+    </div>
+
+</%def>
diff --git a/templates/webapps/galaxy/page/list_published.mako b/templates/webapps/galaxy/page/list_published.mako
new file mode 100644
index 0000000..d016a9e
--- /dev/null
+++ b/templates/webapps/galaxy/page/list_published.mako
@@ -0,0 +1,33 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="shared"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">
+    Galaxy | Published Pages
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        .grid td {
+            min-width: 100px;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            <!-- embedded grid -->
+            ${h.to_unicode( embedded_grid )}
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/page/select_items_grid.mako b/templates/webapps/galaxy/page/select_items_grid.mako
new file mode 100644
index 0000000..0cc15ca
--- /dev/null
+++ b/templates/webapps/galaxy/page/select_items_grid.mako
@@ -0,0 +1,3 @@
+## Template generates a grid that enables user to select items.
+<%namespace file="../grid_base.mako" import="*" />
+${load(True)}
diff --git a/templates/webapps/galaxy/page/wymiframe.mako b/templates/webapps/galaxy/page/wymiframe.mako
new file mode 100644
index 0000000..5211ec3
--- /dev/null
+++ b/templates/webapps/galaxy/page/wymiframe.mako
@@ -0,0 +1,27 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+ * WYMeditor : what you see is What You Mean web-based editor
+ * Copyright (c) 2005 - 2009 Jean-Francois Hovinne, http://www.wymeditor.org/
+ * Dual licensed under the MIT (MIT-license.txt)
+ * and GPL (GPL-license.txt) licenses.
+ *
+ * For further information visit:
+ *        http://www.wymeditor.org/
+ *
+ * File Name:
+ *        wymiframe.html
+ *        Iframe used by designMode.
+ *        See the documentation for more info.
+ *
+ * File Authors:
+ *        Jean-Francois Hovinne (jf.hovinne a-t wymeditor dotorg)
+-->
+<html>
+    <head>
+        <title>WYMeditor iframe</title>
+        <meta http-equiv="X-UA-Compatible" content="IE=EmulateIE7" />
+        <link rel="stylesheet" type="text/css" media="screen" href="/static/wymeditor/iframe/galaxy/wymiframe.css" />
+        ${h.css("base", "autocomplete_tagging", "embed_item")}
+    </head>
+    <body class="wym_iframe text-content"></body>
+</html>
diff --git a/templates/webapps/galaxy/requests/common/add_samples.mako b/templates/webapps/galaxy/requests/common/add_samples.mako
new file mode 100644
index 0000000..e795841
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/add_samples.mako
@@ -0,0 +1,128 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/requests/common/common.mako" import="common_javascripts" />
+<%namespace file="/requests/common/common.mako" import="render_samples_grid" />
+<%namespace file="/requests/common/common.mako" import="render_request_type_sample_form_grids" />
+<%namespace file="/requests/common/common.mako" import="render_samples_messages" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${common_javascripts()}
+</%def>
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = request.is_complete
+    is_submitted = request.is_submitted
+    is_unsubmitted = request.is_unsubmitted
+    if is_admin:
+       can_add_samples = not is_complete
+    else:
+       can_add_samples = is_unsubmitted
+    can_delete_samples = request.samples and not is_complete
+    can_edit_samples = request.samples and ( is_admin or not is_complete )
+    can_edit_request = ( is_admin and not request.is_complete ) or request.is_unsubmitted
+    can_reject = is_admin and is_submitted
+    can_submit = request.samples and is_unsubmitted
+%>
+
+<br/><br/>
+
+<ul class="manage-table-actions">
+    %if can_edit_samples:
+        <li><a class="action-button" href="${h.url_for( controller='requests_common', action='edit_samples', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Edit samples</a></li>
+    %endif
+    %if can_submit:
+        <li><a class="action-button" confirm="More samples cannot be added to this request after it is submitted. Click OK to submit." href="${h.url_for( controller='requests_common', action='submit_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Submit request</a></li>
+    %endif
+    <li><a class="action-button" id="request-${request.id}-popup" class="menubutton">Request Actions</a></li>
+    <div popupmenu="request-${request.id}-popup">
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Browse this request</a>
+        %if can_edit_request:
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='edit_basic_request_info', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Edit this request</a>
+        %endif
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request_history', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">View history</a>
+        %if can_reject:
+            <a class="action-button" href="${h.url_for( controller='requests_admin', action='reject_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Reject this request</a>
+        %endif
+    </div>
+</ul>
+
+${render_samples_messages(request, is_admin, is_submitted, message, status)}
+
+<div class="toolFormBody">
+    <form id="add_samples" name="add_samples" action="${h.url_for( controller='requests_common', action='add_samples', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}" method="post">
+        %if displayable_sample_widgets:
+            <%
+                grid_header = '<h3>Add samples to sequencing request "%s"</h3>' % request.name
+            %>
+            ${render_samples_grid( cntrller, request, displayable_sample_widgets, action='edit_samples', adding_new_samples=True, encoded_selected_sample_ids=[], render_buttons=False, grid_header=grid_header )}
+            <div class="toolParamHelp" style="clear: both;">
+                For each sample, select the data library and folder in which you would like the run datasets deposited.
+                To automatically run a workflow on run datastets, select a history first and then the desired workflow. 
+            </div>
+            ## Render the other grids
+            <% trans.sa_session.refresh( request.type.sample_form ) %>
+            %for grid_index, grid_name in enumerate( request.type.sample_form.layout ):
+                ${render_request_type_sample_form_grids( grid_index, grid_name, request.type.sample_form.grid_fields( grid_index ), displayable_sample_widgets=displayable_sample_widgets, show_saved_samples_read_only=True )}
+            %endfor
+        %else:
+            <label>There are no samples.</label>
+        %endif  
+        %if can_add_samples:
+            ## The user is adding a new sample
+            %if displayable_sample_widgets:
+                <p/>
+                <div class="form-row">
+                    <label> Copy <input type="text" name="num_sample_to_copy" value="1" size="3"/> samples from sample ${sample_copy_select_field.get_html()}</label>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Select the sample from which the new sample should be copied or leave selection as <b>None</b> to add a new "generic" sample.
+                    </div>
+                </div>
+            %endif
+            <p/>
+            <div class="form-row">
+                <input type="hidden" name="twill" value=""/>
+                %if ( request.samples or displayable_sample_widgets ) and len( displayable_sample_widgets ) > len( request.samples ):
+                    <input type="submit" name="add_sample_button" value="Add sample" />
+                    <input type="submit" name="save_samples_button" value="Save"/>
+                    <input type="submit" name="cancel_changes_button" value="Cancel"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Click the <b>Add sample</b> button for each new sample and click the <b>Save</b> button when you have finished adding samples.
+                    </div>
+                %else:
+                    <input type="submit" name="add_sample_button" value="Add sample"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Click the <b>Add sample</b> button for each new sample.
+                    </div>
+                %endif
+            </div>
+        %endif
+    </form>
+</div>
+%if is_unsubmitted:
+    <p/>
+    <h4><img src="/static/images/silk/resultset_next.png" alt="Hide" onclick="showContent(this);" style="cursor:pointer;"/>Import samples from csv file</h4>
+    <div style="display:none;">
+        <div class="toolFormBody">
+            <form id="import" name="import" action="${h.url_for( controller='requests_common', action='add_samples', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}" enctype="multipart/form-data" method="post" >
+                <div class="form-row">
+                    <input type="file" name="file_data" />
+                    <input type="submit" name="import_samples_button" value="Import samples"/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        The csv file must be in the following format.<br/>
+                        The [:FieldValue] is optional, the named form field will contain the value after the ':' if included.<br/>
+                        SampleName,DataLibraryName,FolderName,HistoryName,WorkflowName,Field1Name:Field1Value,Field2Name:Field2Value...
+                    </div>
+                </div>
+            </form>
+        </div>
+    </div>
+%endif
diff --git a/templates/webapps/galaxy/requests/common/common.mako b/templates/webapps/galaxy/requests/common/common.mako
new file mode 100644
index 0000000..2880800
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/common.mako
@@ -0,0 +1,727 @@
+<%namespace file="/requests/common/sample_state.mako" import="render_sample_state" />
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+   ${self.common_javascripts()}
+</%def>
+
+<%def name="common_javascripts()">
+    <script type="text/javascript">
+        function showContent(vThis)
+        {
+            // http://www.javascriptjunkie.com
+            // alert(vSibling.className + " " + vDef_Key);
+            vParent = vThis.parentNode;
+            vSibling = vParent.nextSibling;
+            while (vSibling.nodeType==3) { 
+                // Fix for Mozilla/FireFox Empty Space becomes a TextNode or Something
+                vSibling = vSibling.nextSibling;
+            };
+            if(vSibling.style.display == "none")
+            {
+                vThis.src="/static/images/silk/resultset_bottom.png";
+                vThis.alt = "Hide";
+                vSibling.style.display = "block";
+            } else {
+                vSibling.style.display = "none";
+                vThis.src="/static/images/silk/resultset_next.png";
+                vThis.alt = "Show";
+            }
+            return;
+        }
+        $(document).ready(function(){
+            //hide the all of the element with class msg_body
+            $(".msg_body").hide();
+            //toggle the component with class msg_body
+            $(".msg_head").click(function(){
+                $(this).next(".msg_body").slideToggle(0);
+            });
+        });
+
+        function checkAllFields()
+        {
+            var chkAll = document.getElementById('checkAll');
+            var checks = document.getElementsByTagName('input');
+            var boxLength = checks.length;
+            var allChecked = false;
+            var totalChecked = 0;
+            if ( chkAll.checked == true )
+            {
+                for ( i=0; i < boxLength; i++ )
+                {
+                    if ( checks[i].name.indexOf( 'select_sample_' ) != -1)
+                    {
+                       checks[i].checked = true;
+                    }
+                }
+            }
+            else
+            {
+                for ( i=0; i < boxLength; i++ )
+                {
+                    if ( checks[i].name.indexOf( 'select_sample_' ) != -1)
+                    {
+                       checks[i].checked = false
+                    }
+                }
+            }
+        }
+
+        // Sample State Updater
+        // 
+        // Looks for changes in sample states using an async request. Keeps
+        // calling itself (via setTimeout) until all samples are in a terminal
+        // state.
+        var sample_state_updater = function ( sample_states ) {
+            // Check if there are any items left to track
+            var empty = true;
+            for ( i in sample_states ) {
+                empty = false;
+                break;
+            }
+            if ( ! empty ) {
+                setTimeout( function() { sample_state_updater_callback( sample_states ) }, 3000 );
+            }
+        };
+        var sample_state_updater_callback = function ( sample_states ) {
+            // Build request data
+            var ids = []
+            var states = []
+            $.each( sample_states, function ( id, state ) {
+                ids.push( id );
+                states.push( state );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='requests_common', action='sample_state_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), states: states.join( "," ) },
+                success : function ( data ) {
+                    $.each( data, function( id, val ) {
+                        // Replace sample state HTML
+                        var cell1 = $("#sampleState-" + id);
+                        cell1.html( val.html_state );
+                        sample_states[ parseInt( id ) ] = val.state;
+                    });
+                    sample_state_updater( sample_states ); 
+                },
+                error: function() {
+                    // Just retry, like the old method, should try to be smarter
+                    sample_state_updater( sample_states );
+                }
+            });
+        };
+        
+        
+        // Sample Datasets Updater
+        // 
+        // Looks for changes in the number sample datasets using an async request. Keeps
+        // calling itself (via setTimeout) until all samples are in a terminal
+        // state.
+        var sample_datasets_updater = function ( sample_datasets ) {
+            // Check if there are any items left to track
+            var empty = true;
+            for ( i in sample_datasets ) {
+                empty = false;
+                break;
+            }
+            if ( ! empty ) {
+                setTimeout( function() { sample_datasets_updater_callback( sample_datasets ) }, 3000 );
+            }
+        };
+        var sample_datasets_updater_callback = function ( sample_datasets ) {
+            // Build request data
+            var ids = []
+            var datasets = []
+            $.each( sample_datasets, function ( id, num_of_datasets ) {
+                ids.push( id );
+                datasets.push( num_of_datasets );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='requests_common', action='sample_datasets_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), datasets: datasets.join( "," ) },
+                success : function ( data ) {
+                    $.each( data, function( id, val ) {
+                        // Replace sample datasets HTML
+                        var cell2 = $("#sampleDatasets-" + id);
+                        cell2.html( val.html_datasets );
+                        sample_datasets[ parseInt( id ) ] = val.datasets;
+
+                    });
+                    sample_datasets_updater( sample_datasets ); 
+                },
+                error: function() {
+                    // Just retry, like the old method, should try to be smarter
+                    sample_datasets_updater( sample_datasets ); 
+                }
+            });
+        };
+        
+        // Sample Dataset Transfer Status Updater
+        //
+        // It is used to update the transfer status on Manage Datasets page for a sample 
+        // of a sequencing request
+        // Looks for changes in sample dataset transfer status using an async request. Keeps
+        // calling itself (via setTimeout) until transfer_status is complete
+        var dataset_transfer_status_updater = function ( dataset_transfer_status_list ) {
+            // Check if there are any items left to track
+            var empty = true;
+            for ( i in dataset_transfer_status_list ) {
+                empty = false;
+                break;
+            }
+            if ( ! empty ) {
+                setTimeout( function() { dataset_transfer_status_updater_callback( dataset_transfer_status_list ) }, 3000 );
+            }
+        };
+        var dataset_transfer_status_updater_callback = function ( dataset_transfer_status_list ) {
+            // Build request data
+            var ids = []
+            var transfer_status_list = []
+            $.each( dataset_transfer_status_list, function ( id, dataset_transfer_status ) {
+                ids.push( id );
+                transfer_status_list.push( dataset_transfer_status );
+            });
+            // Make ajax call
+            $.ajax( {
+                type: "POST",
+                url: "${h.url_for( controller='requests_common', action='dataset_transfer_status_updates' )}",
+                dataType: "json",
+                data: { ids: ids.join( "," ), transfer_status_list: transfer_status_list.join( "," ) },
+                success : function ( data ) {
+                    $.each( data, function( id, val ) {
+                        // Replace HTML
+                        var cell1 = $("#datasetTransferStatus-" + id);
+                        cell1.html( val.html_status );
+                        dataset_transfer_status_list[ id ] = val.status;
+                    });
+                    dataset_transfer_status_updater( dataset_transfer_status_list ); 
+                },
+                error: function() {
+                    // Just retry, like the old method, should try to be smarter
+                    dataset_transfer_status_updater( dataset_transfer_status_list ); 
+                }
+            });
+        };
+    </script>
+</%def>
+
+<%def name="transfer_status_updater()">
+    <% 
+        can_update = False
+        if query.count():
+            # Get the first sample dataset to get to the parent sample
+            sample_dataset = query[0]
+            sample = sample_dataset.sample
+            is_complete = sample.request.is_complete
+            is_submitted = sample.request.is_submitted
+            can_update = is_complete or is_submitted and sample.untransferred_dataset_files
+    %>
+    %if can_update:
+        <script type="text/javascript">
+            // Sample dataset transfer status updater
+            dataset_transfer_status_updater( {${ ",".join( [ '"%s" : "%s"' % ( trans.security.encode_id( sd.id ), sd.status ) for sd in query ] ) }});
+        </script>
+    %endif
+</%def>
+
+<%def name="render_editable_sample_row( cntrller, request, sample, sample_widget_index, sample_widget, encoded_selected_sample_ids, adding_new_samples=False )">
+    <%
+        trans.sa_session.refresh( request )
+        is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+        is_rejected = request.is_rejected
+        is_complete = request.is_complete
+        is_submitted = request.is_submitted
+        is_unsubmitted = request.is_unsubmitted
+        if sample:
+            can_delete_samples = not adding_new_samples and request.samples and ( ( is_admin and not is_complete ) or is_unsubmitted )
+            display_checkboxes = not adding_new_samples and ( is_complete or is_rejected or is_submitted )
+            display_datasets = request.samples and ( is_complete or is_submitted )
+        else:
+            can_delete_samples = False
+            display_checkboxes = False
+            display_datasets = False
+        display_bar_code = request.samples and ( is_complete or is_rejected or is_submitted )
+    %>
+    <%
+        if display_checkboxes and trans.security.encode_id( sample.id ) in encoded_selected_sample_ids:
+            checked_str = "checked"
+        else:
+            checked_str = ""
+    %>
+    %if display_checkboxes:
+        <td valign="top"><input type="checkbox" name=select_sample_${sample.id} id="sample_checkbox" value="true" ${checked_str}/><input type="hidden" name=select_sample_${sample.id} id="sample_checkbox" value="true"/></td>
+    %endif
+    <td valign="top">
+        <input type="text" name="sample_${sample_widget_index}_name" value="${sample_widget['name'] | h}" size="10"/>
+        <div class="toolParamHelp" style="clear: both;">
+            <i>(required)</i>
+        </div>
+    </td>
+    %if display_bar_code:
+        <td valign="top">
+            %if is_admin and is_submitted:
+                <input type="text" name="sample_${sample_widget_index}_bar_code" value="${sample_widget['bar_code'] | h}" size="10"/>
+            %else:
+                ${sample_widget['bar_code'] | h}
+                <input type="hidden" name="sample_${sample_widget_index}_bar_code" value="${sample_widget['bar_code'] | h}"/>
+            %endif
+        </td>
+    %endif
+    %if sample:
+        %if is_unsubmitted:
+            <td>Unsubmitted</td>
+        %else:
+            <td valign="top"><a href="${h.url_for( controller='requests_common', action='view_sample_history', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${sample.state.name}</a></td>
+        %endif
+    %else:
+        <td></td>
+    %endif
+    <td valign="top">${sample_widget['library_select_field'].get_html()}</td>
+    <td valign="top">${sample_widget['folder_select_field'].get_html()}</td>
+    <td valign="top">${sample_widget['history_select_field'].get_html()}</td>
+    <td valign="top">
+    ${sample_widget['workflow_select_field'][0].get_html()}
+    %if len(sample_widget['workflow_select_field']) > 1:
+        <br/>
+        ${'<br/>'.join(["%s:<br/>%s" % (w_l, w_i.get_html()) for w_l, w_i in sample_widget['workflow_select_field'][1:]])}
+    %endif
+    </td>
+    %if display_datasets:
+        <td valign="top">
+            ## An admin can select the datasets to transfer, while a non-admin can only view what has been selected
+            %if is_admin:
+                ## This link will direct the admin to a page allowing them to manage datasets.
+                <a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_admin', action='manage_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
+            %elif sample.datasets:
+                <%
+                    # Get an external_service from one of the sample datasets.  This assumes all sample datasets are associated with
+                    # the same external service - hopefully this is a good assumption.
+                    external_service = sample.datasets[0].external_service
+                %>
+                ## Since this is a regular user, only display a link if there is at least 1
+                ## selected dataset for the sample.
+                <a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
+            %else:
+                ## Since this is a regular user, do not display a link if there are no datasets.
+                <a id="sampleDatasets-${sample.id}">${len( sample.datasets )}</a>
+            %endif
+        </td>
+    %endif
+    %if can_delete_samples:
+        ## Delete button
+        <td valign="top"><a class="action-button" confirm="This sample is not recoverable after deletion. Click Ok to delete." href="${h.url_for( controller='requests_common', action='delete_sample', cntrller=cntrller, request_id=trans.security.encode_id( request.id ), sample_id=sample_widget_index )}"><img src="${h.url_for('/static/images/history-buttons/delete_icon.png')}" style="cursor:pointer;"/></a></td>
+    %endif
+</%def>
+
+<%def name="render_samples_grid( cntrller, request, displayable_sample_widgets, action, adding_new_samples=False, encoded_selected_sample_ids=[], render_buttons=False, grid_header='<h3>Samples</h3>' )">
+    ## Displays the "Samples" grid
+<%
+    trans.sa_session.refresh( request )
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = request.is_complete
+    is_rejected = request.is_rejected
+    is_submitted = request.is_submitted
+    is_unsubmitted = request.is_unsubmitted
+    if is_admin:
+       can_add_samples = not is_complete
+    else:
+       can_add_samples = is_unsubmitted
+    can_delete_samples = not adding_new_samples and request.samples and ( ( is_admin and not is_complete ) or is_unsubmitted )
+    can_edit_samples = request.samples and ( is_admin or not is_complete )
+    can_transfer_datasets = is_admin and request.samples and not request.is_rejected
+    display_checkboxes = not adding_new_samples and ( is_complete or is_rejected or is_submitted )
+    display_bar_code = request.samples and ( is_complete or is_rejected or is_submitted )
+    display_datasets = request.samples and ( is_complete or is_submitted )
+%>
+    ${grid_header}
+    %if render_buttons and ( can_add_samples or can_edit_samples ):
+        <ul class="manage-table-actions">
+            %if can_add_samples:
+                <li><a class="action-button" href="${h.url_for( controller='requests_common', action='add_sample', cntrller=cntrller, request_id=trans.security.encode_id( request.id ), add_sample_button='Add sample' )}">Add sample</a></li>
+            %endif
+            %if can_edit_samples:
+                <li><a class="action-button" href="${h.url_for( controller='requests_common', action='edit_samples', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Edit samples</a></li>
+            %endif
+        </ul>
+    %endif
+    <table class="grid">
+        <thead>
+            <tr>
+                %if display_checkboxes:
+                    <th><input type="checkbox" id="checkAll" name=select_all_samples_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_samples_checkbox value="true"/></th>
+                %endif
+                <th>Name</th>
+                %if display_bar_code:
+                    <th>Barcode</th>
+                %endif
+                <th>State</th>
+                <th>Data Library</th>
+                <th>Folder</th>
+                <th>History</th>
+                <th>Workflow</th>
+                %if display_datasets:
+                    <th>Run Datasets</th>
+                %endif
+                <th>
+                    %if can_delete_samples:
+                        Delete
+                    %endif
+                </th>
+            </tr>
+        <thead>
+        <tbody>
+            <% trans.sa_session.refresh( request ) %>
+            ## displayable_sample_widgets is a dictionary whose keys are:
+            ## id, name, bar_code, library, folder, field_values, library_select_field, folder_select_field
+            ## A displayable_sample_widget will have an id == None if the widget's associated sample has not
+            ## yet been saved (i.e., the use clicked the "Add sample" button but has not yet clicked the
+            ## "Save" button.
+            %for sample_widget_index, sample_widget in enumerate( displayable_sample_widgets ):
+                <%
+                    sample_widget_name = sample_widget[ 'name' ]
+                    sample_widget_bar_code = sample_widget[ 'bar_code' ]
+                    sample_widget_library = sample_widget[ 'library' ]
+                    sample_widget_history = sample_widget[ 'history' ]
+                    sample_widget_workflow = sample_widget[ 'workflow' ]
+                    if sample_widget_library:
+                        if cntrller == 'requests':
+                            library_cntrller = 'library'
+                        elif is_admin:
+                            library_cntrller = 'library_admin'
+                        else:
+                            library_cntrller = None
+                    sample_widget_folder = sample_widget[ 'folder' ]
+                    try:
+                        sample = request.samples[ sample_widget_index ]
+                    except:
+                        sample = None 
+                %>
+                %if not adding_new_samples:
+                    <tr>${render_editable_sample_row( cntrller, request, sample, sample_widget_index, sample_widget, encoded_selected_sample_ids, adding_new_samples=False )}</tr>
+                %elif sample:
+                    <tr>
+                        <td>
+                            %if sample.state and can_transfer_datasets:
+                                ## A sample will have a state only after the request has been submitted.
+                                <%
+                                    encoded_id = trans.security.encode_id( sample.id )
+                                    transferred_dataset_files = sample.transferred_dataset_files
+                                    if not transferred_dataset_files:
+                                        transferred_dataset_files = []
+                                %>
+                                <div style="float: left; margin-left: 2px;" class="menubutton split popup" id="sample-${sample.id}-popup">
+                                    <a class="view-info" href="${h.url_for( controller='requests_common', action='view_sample', cntrller=cntrller, id=trans.security.encode_id( sample.id ) )}">${sample.name | h}</a>
+                                </div>
+                                <div popupmenu="sample-${sample.id}-popup">
+                                    %if sample.datasets and len( sample.datasets ) > len( transferred_dataset_files ) and sample.library and sample.folder:
+                                        <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='manage_datasets', sample_id=trans.security.encode_id( sample.id ) )}">Manage selected datasets</a></li>
+                                    %elif sample.datasets and len( sample.datasets ) == len( transferred_dataset_files ):
+                                        <%
+                                            # Get an external_service from one of the sample datasets.  This assumes all sample datasets are associated with
+                                            # the same external service - hopefully this is a good assumption.
+                                            external_service = sample.datasets[0].external_service
+                                        %>
+                                        <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), sample_id=trans.security.encode_id( sample.id ), transfer_status=trans.model.SampleDataset.transfer_status.COMPLETE )}">View transferred datasets</a></li>
+                                    %endif
+                                </div>
+                            %else:
+                                ${sample_widget_name | h}
+                            %endif
+                        </td>
+                        %if display_bar_code:
+                            <td>${sample_widget_bar_code | h}</td>
+                        %endif
+                        %if is_unsubmitted:
+                            <td>Unsubmitted</td>
+                        %else:
+                            <td><a id="sampleState-${sample.id}" href="${h.url_for( controller='requests_common', action='view_sample_history', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${render_sample_state( sample )}</a></td>
+                        %endif
+                        %if sample_widget_library and library_cntrller is not None:
+                            <td><a href="${h.url_for( controller='library_common', action='browse_library', cntrller=library_cntrller, id=trans.security.encode_id( sample_widget_library.id ) )}">${sample_widget_library.name | h}</a></td>                                  
+                        %else:
+                            <td></td>
+                        %endif
+                        %if sample_widget_folder:
+                            <td>${sample_widget_folder.name | h}</td>
+                        %else:
+                            <td></td>
+                        %endif
+                        %if sample_widget_history:
+                            %if trans.user == sample_widget_history.user:
+                                <td>
+                                    <a target='_parent' href="${h.url_for( controller='history', action='list', operation="Switch", id=trans.security.encode_id(sample_widget_history.id), use_panels=False )}">
+                                    ${sample_widget_history.name | h}
+                                    </a>
+                                </td>
+                            %else:
+                                <td>${sample_widget_history.name | h}</td>
+                            %endif
+                        %else:
+                            <td></td>
+                        %endif
+                        %if sample_widget_workflow:
+                            %if trans.user == sample_widget_workflow.stored_workflow.user:
+                                <td>
+                                    <a target='_parent' href="${h.url_for( controller='workflow', action='editor', id=trans.security.encode_id(sample_widget_workflow.stored_workflow.id) )}">
+                                    ${sample_widget_workflow.name | h}
+                                    </a>
+                                </td>
+                            %else:
+                                <td>${sample_widget_workflow.name | h}</td>
+                            %endif
+                        %else:
+                            <td></td>
+                        %endif
+                        %if is_submitted or is_complete:
+                            <td>
+                                ## An admin can select the datasets to transfer, while a non-admin can only view what has been selected
+                                %if sample.library and is_admin:
+                                    ## This link will direct the admin to a page allowing them to manage datasets.
+                                    <a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_admin', action='manage_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
+                                %elif sample.library and sample.datasets:
+                                    <%
+                                        # Get an external_service from one of the sample datasets.  This assumes all sample datasets are associated with
+                                        # the same external service - hopefully this is a good assumption.
+                                        external_service = sample.datasets[0].external_service
+                                    %>
+                                    ## Since this is a regular user, only display a link if there is at least 1
+                                    ## selected dataset for the sample.
+                                    <a id="sampleDatasets-${sample.id}" href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a>
+                                %else:
+                                    ## Since this is a regular user, do not display a link if there are no datasets.
+                                    <a id="sampleDatasets-${sample.id}">${len( sample.datasets )}</a>
+                                %endif
+                            </td>
+                        %endif
+                    </tr>
+                %else:
+                    ## The Add sample button was clicked for this sample_widget
+                    <tr>${render_editable_sample_row( cntrller, request, None, sample_widget_index, sample_widget, encoded_selected_sample_ids, adding_new_samples=True )}</tr>
+                %endif
+            %endfor
+        </tbody>
+    </table>
+</%def>
+
+<%def name="render_sample_form( index, sample_name, sample_values, fields_dict, display_only )">
+    <tr>
+        <td>${sample_name | h}</td>
+        %for field_index, field in fields_dict.items():
+            <% 
+                field_type = field[ 'type' ]
+                field_name = field[ 'name' ]
+                field_value = sample_values[ field_name ]
+            %>
+            <td>
+                %if display_only:
+                    %if field_value:
+                        %if field_type == 'WorkflowField':
+                            %if str( field_value ) != 'none':
+                                <% workflow = trans.sa_session.query( trans.app.model.StoredWorkflow ).get( int( field_value ) ) %>
+                                <a href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id( workflow.id ) )}">${workflow.name | h}</a>
+                            %endif
+                        %else:
+                            ${field_value | h}
+                        %endif
+                    %else:
+                        <i>None</i>
+                    %endif
+                %else:
+                    %if field_type == 'TextField':
+                        <input type="text" name="sample_${index}_field_${field_index}" value="${field_value | h}" size="7"/>
+                    %elif field_type == 'SelectField':
+                        <select name="sample_${index}_field_${field_index}" last_selected_value="2">
+                            %for option_index, option in enumerate(field[ 'selectlist' ]):
+                                %if option == field_value:
+                                    <option value="${option}" selected>${option}</option>
+                                %else:
+                                    <option value="${option}">${option}</option>
+                                %endif
+                            %endfor
+                        </select>
+                    %elif field_type == 'WorkflowField':
+                        <select name="sample_${index}_field_${field_index}">
+                            %if str( field_value ) == 'none':
+                                <option value="none" selected>Select one</option>
+                            %else:
+                                <option value="none">Select one</option>
+                            %endif
+                            %for option_index, option in enumerate(request.user.stored_workflows):
+                                %if not option.deleted:
+                                    %if str( option.id ) == str( field_value ):
+                                        <option value="${option.id}" selected>${option.name}</option>
+                                    %else:
+                                        <option value="${option.id}">${option.name}</option>
+                                    %endif
+                                %endif
+                            %endfor
+                        </select>
+                    %elif field_type == 'WorkflowMappingField':
+                        ##DBTODO Make this useful, use form_builder approach to displaying this stuff.
+                        <select name="sample_${index}_field_${field_index}">
+                            %if str( field_value ) == 'none':
+                                <option value="none" selected>Select one</option>
+                            %else:
+                                <option value="none">Select one</option>
+                            %endif
+                            %for option_index, option in enumerate(request.user.stored_workflows):
+                                %if not option.deleted:
+                                    %if str( option.id ) == str( field_value ):
+                                        <option value="${option.id}" selected>${option.name}</option>
+                                    %else:
+                                        <option value="${option.id}">${option.name}</option>
+                                    %endif
+                                %endif
+                            %endfor
+                        </select>
+                    %elif field_type == 'HistoryField':
+                        <select name="sample_${index}_field_${field_index}">
+                            %if str( field_value ) == 'none':
+                                <option value="none" selected>Select one</option>
+                            %else:
+                                <option value="none">Select one</option>
+                            %endif
+                            %for option_index, option in enumerate(request.user.histories):
+                                %if not option.deleted:
+                                    %if str( option.id ) == str( field_value ):
+                                        <option value="${option.id}" selected>${option.name}</option>
+                                    %else:
+                                        <option value="${option.id}">${option.name}</option>
+                                    %endif
+                                %endif
+                            %endfor
+                        </select>
+                    %elif field_type == 'CheckboxField':
+                        %if field_value is True:
+                            <input type="checkbox" name="sample_${index}_field_${field_index}" value="Yes" checked="checked"/><input type="hidden" name="sample_${index}_field_${field_index}" value="Yes"/>
+                        %else:
+                            <input type="checkbox" name="sample_${index}_field_${field_index}" value="Yes"/><input type="hidden" name="sample_${index}_field_${field_index}" value="Yes"/>
+                        %endif
+                    %endif
+                    <div class="toolParamHelp" style="clear: both;">
+                        <i>${'('+field['required']+')' }</i>
+                    </div>
+                %endif
+            </td>
+        %endfor
+    </tr> 
+</%def>
+
+<%def name="render_request_type_sample_form_grids( grid_index, grid_name, fields_dict, displayable_sample_widgets, show_saved_samples_read_only )">
+    <%
+        if not grid_name:
+            grid_name = "Sample form layout %s" % str( grid_index )
+    %>
+    <h4><img src="/static/images/silk/resultset_next.png" alt="Hide" onclick="showContent(this);" style="cursor:pointer;"/> ${grid_name}</h4>
+    <div style="display:none;">
+        <table class="grid">
+            <thead>
+                <tr>
+                    <th>Name</th>
+                    %for index, field in fields_dict.items():
+                        <th>
+                            <a class="display" title="${field['helptext']}" >${field['label']}</a>
+                        </th>
+                    %endfor
+                    <th></th>
+                </tr>
+            <thead>
+            <tbody>
+                <% trans.sa_session.refresh( request ) %>
+                %for sample_index, sample in enumerate( displayable_sample_widgets ):
+                    <%
+                        if not show_saved_samples_read_only or sample_index >= len( request.samples ):
+                            display_only = False
+                        else:
+                            display_only = True
+                    %>
+                    ${render_sample_form( sample_index, sample['name'], sample['field_values'], fields_dict, display_only )}    
+                %endfor
+            </tbody>
+        </table>
+    </div>
+</%def>
+
+<%def name="render_sample_datasets( cntrller, sample, sample_datasets, title )">
+    ## The list of sample_datasets may not be the same as sample.datasets because it may be
+    ## filtered by a transfer_status value.  The value of title changes based on this filter.
+    %if sample_datasets:
+        <%
+            trans.sa_session.refresh( sample )
+            is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+            is_complete = sample.request.is_complete
+            is_submitted = sample.request.is_submitted
+            can_transfer_datasets = is_admin and sample.untransferred_dataset_files
+        %>
+        ## The transfer status should update only when the request has been submitted or complete
+        ## and when the sample has in-progress datasets.
+        %if ( is_complete or is_submitted ) and sample.inprogress_dataset_files: 
+            <script type="text/javascript">
+                // Sample dataset transfer status updater
+                dataset_transfer_status_updater( {${ ",".join( [ '"%s" : "%s"' % ( trans.security.encode_id( sd.id ), sd.status ) for sd in sample_datasets ] ) }});
+            </script>
+        %endif
+        <h3>${title}</h3>
+        <table class="grid">
+            <thead>
+                <tr>
+                    <th>Name</th>
+                    <th>Size</th>
+                    <th>Data library</th>
+                    <th>Folder</th>
+                    <th>Transfer status</th>
+                </tr>
+            <thead>
+            <tbody>
+                %for dataset in sample_datasets:
+                    <% encoded_id = trans.security.encode_id( dataset.id ) %>
+                    <tr>
+                        <td>
+                            %if is_admin:
+                                <span class="expandLink dataset-${dataset}-click"><span class="rowIcon"></span>
+                                    <div style="float: left; margin-left: 2px;" class="menubutton split popup" id="dataset-${ trans.security.encode_id( dataset.id ) }-popup">
+                                        <a class="dataset-${encoded_id}-click" href="${h.url_for( controller='requests_admin', action='manage_datasets', operation='view', id=trans.security.encode_id( dataset.id ) )}">${dataset.name | h}</a>
+                                    </div>
+                                </span>
+                                <div popupmenu="dataset-${ trans.security.encode_id( dataset.id ) }-popup">
+                                    %if can_transfer_datasets and dataset in sample.untransferred_dataset_files:
+                                        <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='initiate_data_transfer', sample_id=trans.security.encode_id( sample.id ), sample_dataset_id=trans.security.encode_id( dataset.id ) )}">Transfer</a></li>
+                                    %endif
+                                </div>
+                            %else:
+                                ${dataset.name | h}
+                            %endif
+                        </td>
+                        <td>${dataset.size}</td>
+                        <td><a href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( sample.library.id ) )}">${dataset.sample.library.name | h}</a></td>
+                        <td>${dataset.sample.folder.name | h}</td>
+                        <td id="datasetTransferStatus-${encoded_id}">${dataset.status}</td>
+                    </tr>
+                %endfor
+            </tbody>
+        </table>
+    %else:
+        No datasets for this sample.
+    %endif
+</%def>
+
+<%def name="render_samples_messages( request, is_admin=False, is_submitted=False, message=None, status=None)">
+    %if request.is_rejected:
+        <div class='errormessage'>
+            ${request.last_comment | h}
+        </div><br/>
+    %endif
+    %if is_admin and is_submitted and request.samples_without_library_destinations:
+        <div class='infomessage'>
+            Select a target data library and folder for a sample before selecting its datasets to transfer from the external service.
+        </div><br/>
+    %endif
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+</%def>
diff --git a/templates/webapps/galaxy/requests/common/create_request.mako b/templates/webapps/galaxy/requests/common/create_request.mako
new file mode 100644
index 0000000..d7265e3
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/create_request.mako
@@ -0,0 +1,60 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${h.js("libs/jquery/jquery.autocomplete")}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "autocomplete_tagging" )}
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button"  href="${h.url_for( controller=cntrller, action='browse_requests' )}">Browse requests</a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create a new sequencing request</div>
+    %if len( request_type_select_field.options ) < 1:
+        There are no request types available for ${trans.user.email | h} to create sequencing requests.
+    %else:
+        <div class="toolFormBody">
+            <form name="create_request" id="create_request" action="${h.url_for( controller='requests_common', action='create_request', cntrller=cntrller )}" method="post" >
+                <div class="form-row">
+                    <label>Select a request type configuration:</label>
+                    ## The request_type_select_field is a SelectField named request_type_id
+                    ${request_type_select_field.get_html()}
+                    %if cntrller != 'requests_admin':
+                        <div class="toolParamHelp" style="clear: both;">
+                            Contact the lab manager if you are not sure about the request type configuration.
+                        </div>
+                    %endif
+                </div>
+                %if request_type_select_field_selected != 'none':
+                    ## If a request_type has been selected, display the associated form using received widgets.
+                    %for i, field in enumerate( widgets ):
+                        <div class="form-row">
+                            <label>${field['label']}</label>
+                            ${field['widget'].get_html()}
+                            <div class="toolParamHelp" style="clear: both;">
+                                ${field['helptext']}
+                            </div>
+                            <div style="clear: both"></div>
+                        </div>
+                    %endfor
+                    <div class="form-row">
+                        <input type="submit" name="create_request_button" value="Save"/>
+                        <input type="submit" name="add_sample_button" value="Add samples"/>
+                    </div>
+                %endif
+            </form>
+        </div>
+    %endif
+</div>
diff --git a/templates/webapps/galaxy/requests/common/edit_basic_request_info.mako b/templates/webapps/galaxy/requests/common/edit_basic_request_info.mako
new file mode 100644
index 0000000..592044e
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/edit_basic_request_info.mako
@@ -0,0 +1,102 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = request.is_complete
+    is_submitted = request.is_submitted
+    is_unsubmitted = request.is_unsubmitted
+    can_add_samples = is_unsubmitted
+    can_reject = is_admin and is_submitted
+    can_submit_request = request.samples and is_unsubmitted
+%>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="request-${request.id}-popup" class="menubutton">Request Actions</a></li>
+    <div popupmenu="request-${request.id}-popup">
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Browse this request</a>
+        %if can_submit_request:
+            <a class="action-button" confirm="More samples cannot be added to this request once it is submitted. Click OK to submit." href="${h.url_for( controller='requests_common', action='submit_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Submit this request</a>
+        %endif
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request_history', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">View history</a>
+        %if can_reject:
+            <a class="action-button" href="${h.url_for( controller='requests_admin', action='reject_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Reject this request</a>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Edit sequencing request "${request.name | h}"</div>
+    <div class="toolFormBody">
+        <form name="edit_basic_request_info" id="edit_basic_request_info" action="${h.url_for( controller='requests_common', action='edit_basic_request_info', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}" method="post" >
+            %for i, field in enumerate( widgets ):
+                <div class="form-row">
+                    <label>${field['label']}</label>
+                    ${field['widget'].get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${field['helptext']}
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+            %endfor                    
+            <div class="form-row">
+                <input type="submit" name="edit_basic_request_info_button" value="Save"/> 
+            </div>
+        </form>
+    </div>
+</div>
+<p/>
+<div class="toolForm">
+    <div class="toolFormTitle">Email notification settings</div>
+    <div class="toolFormBody">
+        <form name="edit_email_settings" id="edit_email_settings" action="${h.url_for( controller='requests_common', action='edit_email_settings', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}" method="post" >
+            <% 
+                email_address = ''
+                emails = ''
+                additional_email_addresses = []
+                if request.notification:
+                    for e in request.notification[ 'email' ]:
+                       if e == request.user.email:
+                           email_address = 'checked'
+                       else:
+                           additional_email_addresses.append( e )
+                if additional_email_addresses:
+                    emails = '\r\n'.join( additional_email_addresses )
+            %>
+            <div class="form-row">
+                <label>Send to:</label>
+                <input type="checkbox" name="email_address" value="true" ${email_address}>${request.user.email | h} (sequencing request owner)<input type="hidden" name="email_address" value="true">
+            </div>
+            <div class="form-row">
+                <label>Additional email addresses:</label>
+                <textarea name="additional_email_addresses" rows="3" cols="40">${emails | h}</textarea>
+                <div class="toolParamHelp" style="clear: both;">
+                    Enter one email address per line
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Select sample states to send email notification:</label>
+                %for sample_state in request.type.states:
+                    <%  
+                        email_state = ''
+                        if request.notification and sample_state.id in request.notification[ 'sample_states' ]:
+                            email_state = 'checked'
+                     %>
+                    <input type="checkbox" name=sample_state_${sample_state.id} value="true" ${email_state} >${sample_state.name}<input type="hidden" name=sample_state_${sample_state.id} value="true">
+                    <br/>
+                %endfor
+                <div class="toolParamHelp" style="clear: both;">
+                    Email notification will be sent when all the samples of this sequencing request are in the selected states.
+                </div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="edit_email_settings_button" value="Save"/> 
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/requests/common/edit_samples.mako b/templates/webapps/galaxy/requests/common/edit_samples.mako
new file mode 100644
index 0000000..12c5c16
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/edit_samples.mako
@@ -0,0 +1,145 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/requests/common/common.mako" import="common_javascripts" />
+<%namespace file="/requests/common/common.mako" import="render_samples_grid" />
+<%namespace file="/requests/common/common.mako" import="render_request_type_sample_form_grids" />
+<%namespace file="/requests/common/common.mako" import="render_samples_messages" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${common_javascripts()}
+   ${local_javascripts()}
+</%def>
+
+<%def name="local_javascripts()">
+    <script type="text/javascript">
+        // This function stops the form from getting submitted when return key is pressed
+        // This is needed in this form as the barcode scanner (when in keyboard emulation mode)
+        // may send a return key appended to the scanned barcode string.  
+        function stopRKey(evt) {
+          var evt = (evt) ? evt : ((event) ? event : null);
+          var node = (evt.target) ? evt.target : ((evt.srcElement) ? evt.srcElement : null);
+          if ((evt.keyCode == 13) && (node.type=="text"))  {return false;}
+        }
+        document.onkeypress = stopRKey
+    </script>
+</%def>
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = request.is_complete
+    is_submitted = request.is_submitted
+    is_unsubmitted = request.is_unsubmitted
+    if is_admin:
+        can_add_samples = not is_complete
+    else:
+        can_add_samples = is_unsubmitted
+    can_delete_samples = request.samples and not is_complete
+    can_edit_request = ( is_admin and not request.is_complete ) or request.is_unsubmitted
+    can_reject = is_admin and is_submitted
+    can_submit = request.samples and is_unsubmitted
+%>
+
+<br/><br/>
+
+<ul class="manage-table-actions">
+    %if can_add_samples:
+        <li><a class="action-button" href="${h.url_for( controller='requests_common', action='add_sample', cntrller=cntrller, request_id=trans.security.encode_id( request.id ), add_sample_button='Add sample' )}">Add sample</a></li>
+    %endif
+    %if can_submit:
+        <li><a class="action-button" confirm="More samples cannot be added to this request after it is submitted. Click OK to submit." href="${h.url_for( controller='requests_common', action='submit_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Submit request</a></li>
+    %endif
+    <li><a class="action-button" id="request-${request.id}-popup" class="menubutton">Request Actions</a></li>
+    <div popupmenu="request-${request.id}-popup">
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Browse this request</a>
+        %if can_edit_request:
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='edit_basic_request_info', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Edit this request</a>
+        %endif
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request_history', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">View history</a>
+        %if can_reject:
+            <a class="action-button" href="${h.url_for( controller='requests_admin', action='reject_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Reject this request</a>
+        %endif
+    </div>
+</ul>
+
+${render_samples_messages(request, is_admin, is_submitted, message, status)}
+
+<div class="toolFormBody">
+    <form id="edit_samples" name="edit_samples" action="${h.url_for( controller='requests_common', action='edit_samples', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}" method="post">
+        %if displayable_sample_widgets:
+            <%
+                grid_header = '<h3>Edit Current Samples of Sequencing Request "%s"</h3>' % request.name
+            %>
+            ${render_samples_grid( cntrller, request, displayable_sample_widgets, action='edit_samples', encoded_selected_sample_ids=encoded_selected_sample_ids, render_buttons=False, grid_header=grid_header )}
+            %if len( sample_operation_select_field.options ) >= 1 and not is_unsubmitted:
+                <div class="form-row" style="background-color:#FAFAFA;">
+                    For selected samples: 
+                    ${sample_operation_select_field.get_html()}
+                </div>
+                <% sample_operation_selected_value = sample_operation_select_field.get_selected( return_value=True ) %>
+                %if ( is_admin or not is_complete ) and sample_operation_selected_value != 'none' and encoded_selected_sample_ids:
+                    <div class="form-row" style="background-color:#FAFAFA;">
+                        %if sample_operation_selected_value == trans.model.Sample.bulk_operations.CHANGE_STATE:
+                            ## sample_operation_selected_value == 'Change state'
+                            <div class="form-row">
+                                <label>Change current state</label>
+                                ${sample_state_id_select_field.get_html()}
+                                <label>Comments</label>
+                                <input type="text" name="sample_event_comment" value=""/>
+                                <div class="toolParamHelp" style="clear: both;">
+                                    Optional
+                                </div>
+                            </div>
+                        %elif sample_operation_selected_value == trans.app.model.Sample.bulk_operations.SELECT_LIBRARY:
+                        <% libraries_selected_value = libraries_select_field.get_selected( return_value=True ) %>
+                            <div class="form-row">
+                                <label>Select data library:</label>
+                                ${libraries_select_field.get_html()}
+                            </div>
+                            %if libraries_selected_value != 'none':
+                                <div class="form-row">
+                                    <label>Select folder:</label>
+                                    ${folders_select_field.get_html()}
+                                </div>
+                            %endif
+                        %endif
+                    </div>
+                %endif
+            %endif
+            <div class="toolParamHelp" style="clear: both;">
+                For each sample, select the data library and folder in which you would like the run datasets deposited.
+                To automatically run a workflow on run datastets, select a history first and then the desired workflow. 
+            </div>
+            ## Render the other grids
+            <% trans.sa_session.refresh( request.type.sample_form ) %>
+            %for grid_index, grid_name in enumerate( request.type.sample_form.layout ):
+                ${render_request_type_sample_form_grids( grid_index, grid_name, request.type.sample_form.grid_fields( grid_index ), displayable_sample_widgets=displayable_sample_widgets, show_saved_samples_read_only=False )}
+            %endfor
+        %else:
+            <label>There are no samples.</label>
+        %endif  
+        <p/>
+        <div class="form-row">
+            ## hidden element to make twill work.
+            ## Greg will fix this
+            <input type="hidden" name="twill" value=""/>
+            <input type="submit" name="save_samples_button" value="Save"/>
+            <input type="submit" name="cancel_changes_button" value="Cancel"/>
+            <div class="toolParamHelp" style="clear: both;">
+            Click the <b>Save</b> button when you have finished editing the samples
+        </div>
+        %if request.samples and request.is_submitted:
+            <script type="text/javascript">
+                // Updater
+                sample_state_updater( {${ ",".join( [ '"%s" : "%s"' % ( s.id, s.state.name ) for s in request.samples ] ) }});
+            </script>
+        %endif
+    </form>
+</div>
diff --git a/templates/webapps/galaxy/requests/common/find_samples.mako b/templates/webapps/galaxy/requests/common/find_samples.mako
new file mode 100644
index 0000000..fd6ff61
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/find_samples.mako
@@ -0,0 +1,100 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${h.js("libs/jquery/jquery.autocomplete")}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "autocomplete_tagging" )}
+</%def>
+
+<% is_admin = cntrller == 'requests_admin' and trans.user_is_admin() %>
+
+<br/>
+<br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller=cntrller, action='browse_requests' )}">Browse requests</a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Find samples</div>
+    <div class="toolFormBody">
+        <form name="find_request" id="find_request" action="${h.url_for( controller='requests_common', action='find_samples', cntrller=cntrller )}" method="post" >
+            <div class="form-row">
+                <label>Find samples using:</label>
+                ${search_type.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Select a sample attribute for searching.  To search <br/>
+                    for a sample with a dataset name, select the dataset <br/>
+                    option above. This will return all the samples that <br/>
+                    are associated with a dataset with that name. <br/>
+                </div>
+            </div>
+            <div class="form-row">
+                <label>Show only sequencing requests in state:</label>
+                ${request_states.get_html()}
+            </div>
+            <div class="form-row">
+                ${search_box.get_html()}
+                <input type="submit" name="find_samples_button" value="Find"/>
+                <div class="toolParamHelp" style="clear: both;">
+                   <p>
+                   Wildcard search (%) can be used as placeholder for any sequence of characters or words.<br/>
+                   For example, to search for samples starting with 'mysample' use 'mysample%' as the search string.
+                   </p>
+                   <p>
+                   When 'form value' search type is selected, then enter the search string in 'field label=value' format.
+                   <br/>For example, when searching for all samples whose 'Volume' field is 1.3mL, then the search string
+                   should be 'Volume=1.3mL' (without qoutes).
+                   </p>
+                </div>
+            </div>
+            %if results:
+                <div class="form-row">
+                    <label><i>${results}</i></label>
+                    %if samples:
+                        <div class="toolParamHelp" style="clear: both;">
+                           The search results are sorted by the date the samples where created.
+                        </div>
+                    %endif
+                </div>
+            %endif
+            <div class="form-row">
+                %if samples:
+                    %for sample in samples:
+                        <div class="form-row">
+                            Sample: <b>${sample.name | h}</b> | Barcode: ${sample.bar_code | h}<br/>
+                            %if sample.request.is_new or not sample.state:
+                                State: Unsubmitted<br/>
+                            %else:
+                                State: ${sample.state.name}<br/>
+                            %endif
+                            <%
+                                # Get an external_service from one of the sample datasets.  This assumes all sample datasets are associated with
+                                # the same external service - hopefully this is a good assumption.
+                                external_service = sample.datasets[0].external_service
+                            %>
+                            Datasets: <a href="${h.url_for( controller='requests_common', action='view_sample_datasets', cntrller=cntrller, external_service_id=trans.security.encode_id( external_service.id ), sample_id=trans.security.encode_id( sample.id ) )}">${len( sample.datasets )}</a><br/>
+                            %if is_admin:
+                               <i>User: ${sample.request.user.email | h}</i>
+                            %endif
+                            <div class="toolParamHelp" style="clear: both;">
+                                <a href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( sample.request.id ) )}">Sequencing request: ${sample.request.name | h} | Type: ${sample.request.type.name} | State: ${sample.request.state}</a>
+                            </div>
+                        </div>
+                        <br/>
+                    %endfor
+                %endif
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/requests/common/index.mako b/templates/webapps/galaxy/requests/common/index.mako
new file mode 100644
index 0000000..f80997e
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/index.mako
@@ -0,0 +1,16 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="requests"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="requests", action="list" )}"> </iframe>
+
+</%def>
diff --git a/templates/webapps/galaxy/requests/common/sample_dataset_transfer_status.mako b/templates/webapps/galaxy/requests/common/sample_dataset_transfer_status.mako
new file mode 100644
index 0000000..dbd0cac
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/sample_dataset_transfer_status.mako
@@ -0,0 +1,5 @@
+<%def name="render_sample_dataset_transfer_status( sample_dataset )">
+    ${sample_dataset.status}
+</%def>
+
+${render_sample_dataset_transfer_status( sample_dataset )}
diff --git a/templates/webapps/galaxy/requests/common/sample_datasets.mako b/templates/webapps/galaxy/requests/common/sample_datasets.mako
new file mode 100644
index 0000000..b98dec4
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/sample_datasets.mako
@@ -0,0 +1,5 @@
+<%def name="render_sample_datasets( sample )">
+    ${len( sample.datasets )}
+</%def>
+
+${render_sample_datasets( sample )}
diff --git a/templates/webapps/galaxy/requests/common/sample_state.mako b/templates/webapps/galaxy/requests/common/sample_state.mako
new file mode 100644
index 0000000..001956a
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/sample_state.mako
@@ -0,0 +1,5 @@
+<%def name="render_sample_state( sample )">
+    ${sample.state.name}
+</%def>
+
+${render_sample_state( sample )}
diff --git a/templates/webapps/galaxy/requests/common/view_request.mako b/templates/webapps/galaxy/requests/common/view_request.mako
new file mode 100644
index 0000000..98fdc64
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/view_request.mako
@@ -0,0 +1,174 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/requests/common/common.mako" import="common_javascripts" />
+<%namespace file="/requests/common/common.mako" import="render_samples_grid" />
+<%namespace file="/requests/common/common.mako" import="render_request_type_sample_form_grids" />
+<%namespace file="/requests/common/common.mako" import="render_samples_messages" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${common_javascripts()}
+</%def>
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = request.is_complete
+    is_submitted = request.is_submitted
+    is_unsubmitted = request.is_unsubmitted
+    can_edit_request = ( is_admin and not request.is_complete ) or request.is_unsubmitted
+    can_delete_samples = request.samples and not is_complete
+    can_edit_samples = request.samples and ( is_admin or not is_complete )
+    can_reject = is_admin and is_submitted
+    can_submit = request.samples and is_unsubmitted
+    can_undelete = request.deleted
+    if is_admin:
+        can_add_samples = not is_complete
+    else:
+        can_add_samples = is_unsubmitted
+%>
+
+<br/><br/>
+
+<ul class="manage-table-actions">
+    %if can_submit:
+        <li><a class="action-button" confirm="More samples cannot be added to this request after it is submitted. Click OK to submit." href="${h.url_for( controller='requests_common', action='submit_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Submit request</a></li>
+    %endif
+    <li><a class="action-button" id="request-${request.id}-popup" class="menubutton">Request Actions</a></li>
+    <div popupmenu="request-${request.id}-popup">
+        %if can_undelete:
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='undelete_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Undelete this request</a>
+        %endif
+        %if can_edit_request:
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='edit_basic_request_info', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Edit this request</a>
+        %endif
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request_history', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">View history</a>
+        %if can_reject:
+            <a class="action-button" href="${h.url_for( controller='requests_admin', action='reject_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Reject this request</a>
+        %endif
+    </div>
+</ul>
+
+${render_samples_messages(request, is_admin, is_submitted, message, status)}
+
+<div class="toolForm">
+    <div class="toolFormTitle">Sequencing request "${request.name | h}"</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Current state:</label>
+            <a href="${h.url_for( controller='requests_common', action='view_request_history', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">${request.state}</a>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Description:</label>
+            ${request.desc | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>User:</label>
+            ${request.user.email | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Request type:</label>
+            %if is_admin:
+                <a href="${h.url_for( controller='request_type', action='view_request_type', cntrller=cntrller, id=trans.security.encode_id( request.type.id ) )}">${request.type.name}</a>
+            %else:
+                ${request.type.name}
+            %endif
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <h4><img src="/static/images/silk/resultset_next.png" alt="Show" onclick="showContent(this);" style="cursor:pointer;"/> More</h4>
+            <div style="display:none;">
+                %for index, rd in enumerate( request_widgets ):
+                    <%
+                        field_label = rd[ 'label' ]
+                        field_value = rd[ 'value' ]
+                    %>
+                    <div class="form-row">
+                        <label>${field_label}:</label>                   
+                        ${field_value | h}     
+                    </div>
+                    <div style="clear: both"></div>
+                %endfor
+                <div class="form-row">
+                    <label>Date created:</label>
+                    ${request.create_time}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Last updated:</label>
+                    ${time_ago( request.update_time )}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Email recipients:</label>
+                    <%
+                        if request.notification:
+                            emails = ', '.join( request.notification[ 'email' ] )
+                        else:
+                            emails = ''
+                    %>
+                    ${emails | h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Send email when state changes to:</label>
+                    <%
+                        if request.notification:
+                            states = []
+                            for ss in request.type.states:
+                                if ss.id in request.notification[ 'sample_states' ]:
+                                    states.append( ss.name )
+                            states = ', '.join( states )
+                        else:
+                            states = ''
+                    %>
+                    ${states}
+                    <div style="clear: both"></div>
+                </div>
+                ## Sample state updater
+                %if request.samples and request.is_submitted and request.samples_with_bar_code:
+                    <script type="text/javascript">
+                        // Updater
+                        sample_state_updater( {${ ",".join( [ '"%s" : "%s"' % ( s.id, s.state.name ) for s in request.samples ] ) }});
+                    </script>
+                %endif
+                ## Number of sample datasets updater
+                %if request.samples and request.is_submitted:
+                    <script type="text/javascript">
+                        // Updater
+                        sample_datasets_updater( {${ ",".join( [ '"%s" : "%s"' % ( s.id, len(s.datasets) ) for s in request.samples ] ) }});
+                    </script>
+                %endif
+            </div>
+        </div>
+    </div>
+</div>
+<p/>
+%if displayable_sample_widgets:
+    <%
+        grid_header = '<h3>Samples</h3>'
+        render_buttons = can_edit_samples
+    %>
+    ${render_samples_grid( cntrller, request, displayable_sample_widgets=displayable_sample_widgets, action='view_request', adding_new_samples=True, encoded_selected_sample_ids=[], render_buttons=render_buttons, grid_header=grid_header )}
+    ## Render the other grids
+    <% trans.sa_session.refresh( request.type.sample_form ) %>
+    %for grid_index, grid_name in enumerate( request.type.sample_form.layout ):
+        ${render_request_type_sample_form_grids( grid_index, grid_name, request.type.sample_form.grid_fields( grid_index ), displayable_sample_widgets=displayable_sample_widgets, show_saved_samples_read_only=True )}
+    %endfor
+%else:
+    There are no samples.
+    %if can_add_samples:
+        <ul class="manage-table-actions">
+            <li><a class="action-button" href="${h.url_for( controller='requests_common', action='add_sample', cntrller=cntrller, request_id=trans.security.encode_id( request.id ), add_sample_button='Add sample' )}">Add sample</a></li>
+        </ul>
+    %endif
+%endif
diff --git a/templates/webapps/galaxy/requests/common/view_request_history.mako b/templates/webapps/galaxy/requests/common/view_request_history.mako
new file mode 100644
index 0000000..913914d
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/view_request_history.mako
@@ -0,0 +1,59 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = request.is_complete
+    is_submitted = request.is_submitted
+    is_unsubmitted = request.is_unsubmitted
+    can_add_samples = is_unsubmitted
+    can_edit_request = ( is_admin and not is_complete ) or is_unsubmitted
+    can_reject = is_admin and is_submitted
+    can_submit_request = request.samples and is_unsubmitted
+%>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="request-${request.id}-popup" class="menubutton">Request Actions</a></li>
+    <div popupmenu="request-${request.id}-popup">
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Browse this request</a>
+        %if can_edit_request:
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='edit_basic_request_info', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Edit this request</a>
+        %endif
+        %if can_submit_request:
+            <a class="action-button" confirm="More samples cannot be added to this request once it is submitted. Click OK to submit." href="${h.url_for( controller='requests_common', action='submit_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Submit this request</a>
+        %endif
+        %if can_reject:
+            <a class="action-button" href="${h.url_for( controller='requests_admin', action='reject_request', cntrller=cntrller, id=trans.security.encode_id( request.id ) )}">Reject this request</a>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<h3>History of sequencing request "${request.name | h}"</h3>
+
+<div class="toolForm">
+    <table class="grid">
+        <thead>
+            <tr>
+                <th>State</th>
+                <th>Last Updated</th>
+                <th>Comments</th>
+            </tr>
+        </thead>
+        <tbody>
+            %for event in request.events:    
+                <tr>
+                    <td><b>${event.state}</b></td>
+                    <td>${time_ago( event.update_time )}</td>
+                    <td>${event.comment | h}</td>
+                </tr>             
+            %endfor
+        </tbody>
+    </table>
+</div>
diff --git a/templates/webapps/galaxy/requests/common/view_sample.mako b/templates/webapps/galaxy/requests/common/view_sample.mako
new file mode 100644
index 0000000..8d4eede
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/view_sample.mako
@@ -0,0 +1,122 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/common/template_common.mako" import="render_template_fields" />
+
+<%def name="render_external_service_actions( external_service )">
+%if external_service:
+    <p>
+        <div class="toolForm">
+            <div class="toolFormTitle">Available External Service Actions for ${sample.name | h} at ${external_service.name | h}</div>
+            <div class="toolFormBody">
+                    <div class="toolMenu">
+                        %for item in external_service.actions:
+                            ${ render_external_service_action_group( item ) }
+                        %endfor
+                    </div>
+            </div>
+        </div>
+    </p>
+%endif
+</%def>
+
+<%def name="render_external_service_action_group( external_service_group )">
+    %if external_service_group.has_action():
+        %if external_service_group.label:
+            <div class="form-row">
+                <div class="toolSectionList">
+                    <div class="toolSectionTitle">
+                        <span>${external_service_group.label | h}</span>
+                    </div>
+                    <div class="toolSectionBody">
+                        <div class="toolSectionBg">
+        %endif
+                        %for item in external_service_group:
+                            %if isinstance( item, list ):
+                                ${ render_external_service_action_group( item ) }
+                            %else:
+                                ${ render_external_service_action( item ) }
+                            %endif
+                        %endfor
+        %if external_service_group.label:
+                        </div>
+                    </div>
+                </div>
+            </div>
+        %endif
+    %endif
+</%def>
+
+<%def name="render_external_service_action( external_service_action )">
+    <%
+        if hasattr( external_service_action.action, 'target' ):
+            target = external_service_action.action.target
+        else:
+            target = 'galaxy_main'
+    %>
+    <div class="toolTitle">
+        <a href="${external_service_action.get_action_access_link( trans )}" target="${target}">${external_service_action.label | h}</a>
+    </div>
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" id="sample-${sample.id}-popup" class="menubutton">Sample Actions</a></li>
+    <div popupmenu="sample-${sample.id}-popup">
+        <a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( sample.request.id ) )}">Browse this request</a>
+        %if sample.runs:
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='edit_template', cntrller=cntrller, item_type='sample', form_type=trans.app.model.FormDefinition.types.RUN_DETAILS_TEMPLATE, sample_id=trans.security.encode_id( sample.id ) )}">Edit template</a>
+            <a class="action-button" href="${h.url_for( controller='requests_common', action='delete_template', cntrller=cntrller, item_type='sample', form_type=trans.app.model.FormDefinition.types.RUN_DETAILS_TEMPLATE, sample_id=trans.security.encode_id( sample.id ) )}">Unuse template</a>
+        %endif
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Sample "${sample.name | h}"</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Name:</label>
+            ${sample.name | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Description:</label>
+            ${sample.desc | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Barcode:</label>
+            ${sample.bar_code | h}
+            <div style="clear: both"></div>
+        </div>
+        %if sample.library:
+            <div class="form-row">
+                <label>Library:</label>
+                ${sample.library.name | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Folder:</label>
+                ${sample.folder.name | h}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        <div class="form-row">
+            <label>Request:</label>
+            ${sample.request.name | h}
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+
+%if widgets:
+    ${render_template_fields( cntrller=cntrller, item_type='sample', widgets=widgets, widget_fields_have_contents=widget_fields_have_contents, sample_id=trans.security.encode_id( sample.id ), editable=False )}
+%endif
+%if external_services:
+    %for external_service in external_services:
+        ${ render_external_service_actions( external_service ) }
+    %endfor
+%endif
diff --git a/templates/webapps/galaxy/requests/common/view_sample_datasets.mako b/templates/webapps/galaxy/requests/common/view_sample_datasets.mako
new file mode 100644
index 0000000..9f4210f
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/view_sample_datasets.mako
@@ -0,0 +1,45 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/requests/common/common.mako" import="render_sample_datasets" />
+<%namespace file="/requests/common/common.mako" import="common_javascripts" />
+
+<%def name="javascripts()">
+   ${parent.javascripts()}
+   ${common_javascripts()}
+</%def>
+
+<%
+    is_admin = cntrller == 'requests_admin' and trans.user_is_admin()
+    is_complete = sample.request.is_complete
+    is_submitted = sample.request.is_submitted
+    can_transfer_datasets = is_admin and sample.untransferred_dataset_files and sample.library and sample.folder
+%>
+
+<br/><br/>
+
+<ul class="manage-table-actions">
+    %if can_transfer_datasets:
+        <li><a class="action-button" href="${h.url_for( controller='requests_admin', action='manage_datasets', cntrller=cntrller, sample_id=trans.security.encode_id( sample.id ) )}">Manage selected datasets</a></li>
+    %endif
+    <li><a class="action-button" id="sample-${sample.id}-popup" class="menubutton">Dataset Actions</a></li>
+    <div popupmenu="sample-${sample.id}-popup">
+        <li><a class="action-button" href="${h.url_for( controller='library_common', action='browse_library', cntrller=cntrller, id=trans.security.encode_id( sample.library.id ) )}">View target Data Library</a></li>
+        <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( sample.request.id ) )}">Browse this request</a></li>
+    </div>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if sample and sample_datasets:
+    ## The list of sample_datasets may not be the same as sample.datasets because it may be
+    ## filtered by a transfer_status value.  The value of title changes based on this filter.
+    ${render_sample_datasets( cntrller, sample, sample_datasets, title )}
+%else:
+    %if transfer_status:
+        No datasets with status "${transfer_status}" belong to this sample
+    %else:
+        No datasets have been selected for this sample.
+    %endif
+%endif
diff --git a/templates/webapps/galaxy/requests/common/view_sample_history.mako b/templates/webapps/galaxy/requests/common/view_sample_history.mako
new file mode 100644
index 0000000..902fcff
--- /dev/null
+++ b/templates/webapps/galaxy/requests/common/view_sample_history.mako
@@ -0,0 +1,38 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% from galaxy.web.framework.helpers import time_ago %>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" href="${h.url_for( controller='requests_common', action='view_request', cntrller=cntrller, id=trans.security.encode_id( sample.request.id ) )}">Browse this request</a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<h3>History of sample "${sample.name | h}"</h3>
+
+<div class="toolForm">
+    <table class="grid">
+        <thead>
+            <tr>
+                <th>State</th>
+                <th>Description</th>
+                <th>Last Updated</th>
+                <th>Comments</th>
+            </tr>
+        </thead>
+        <tbody>
+            %for event in sample.events:    
+                <tr>
+                    <td><b>${event.state.name | h}</b></td>
+                    <td>${event.state.desc | h}</td>
+                    <td>${time_ago( event.update_time )}</td>
+                    <td>${event.comment | h}</td>
+                </tr>             
+            %endfor
+        </tbody>
+    </table>
+</div>
diff --git a/templates/webapps/galaxy/requests/find_samples_index.mako b/templates/webapps/galaxy/requests/find_samples_index.mako
new file mode 100644
index 0000000..34065ce
--- /dev/null
+++ b/templates/webapps/galaxy/requests/find_samples_index.mako
@@ -0,0 +1,14 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+    <%
+        self.has_left_panel=False
+        self.has_right_panel=False
+        self.active_view="requests"
+        self.message_box_visible=False
+    %>
+</%def>
+
+<%def name="center_panel()">
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller='requests_common', action='find_samples', cntrller='requests')}"> </iframe>
+</%def>
diff --git a/templates/webapps/galaxy/requests/grid.mako b/templates/webapps/galaxy/requests/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/webapps/galaxy/requests/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/webapps/galaxy/requests/index.mako b/templates/webapps/galaxy/requests/index.mako
new file mode 100644
index 0000000..1c43803
--- /dev/null
+++ b/templates/webapps/galaxy/requests/index.mako
@@ -0,0 +1,14 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="requests"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="requests", action="browse_requests" )}"> </iframe>
+</%def>
diff --git a/templates/webapps/galaxy/root/redirect.mako b/templates/webapps/galaxy/root/redirect.mako
new file mode 100644
index 0000000..cf288fd
--- /dev/null
+++ b/templates/webapps/galaxy/root/redirect.mako
@@ -0,0 +1,5 @@
+<%inherit file="/base.mako"/>
+
+<script type="text/javascript">  
+    top.location.href = '${redirect_url}';
+</script>
diff --git a/templates/webapps/galaxy/root/tool_runner.mako b/templates/webapps/galaxy/root/tool_runner.mako
new file mode 100644
index 0000000..f6b77e1
--- /dev/null
+++ b/templates/webapps/galaxy/root/tool_runner.mako
@@ -0,0 +1,42 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+    <head>
+        <title>Galaxy</title>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
+        <script type="text/javascript">
+            setTimeout( function() { top.location.href = '${h.url_for( "/" )}'; }, 1000 );
+        </script>
+    </head>
+    <body>
+        <div class="donemessagelarge">
+            %if num_jobs > 1:
+              <% jobs_str = "%d jobs have" % num_jobs %>
+            %else:
+              <% jobs_str = "A job has" %>
+            %endif
+            %if len(out_data) == 1:
+              <% datasets_str = "dataset" %>
+            %else:
+              <% datasets_str = "datasets" %>
+            %endif
+            <p>
+                ${jobs_str} been successfully added to the queue - resulting in the following ${datasets_str}:
+            </p>
+            %for _, data in out_data:
+                <div style="padding: 10px"><b> ${data.hid}: ${data.name | h}</b></div>
+            %endfor
+            <p> You can check the status of queued jobs and view the resulting data by refreshing the <b>History</b> pane. When the job has been run the status will change from 'running' to 'finished' if completed successfully or 'error' if problems were encountered. You are now being redirected back to <a href="${h.url_for( '/' )}">Galaxy</a>.</p>
+        </div>
+        %if job_errors:
+            <div class="errormessagelarge">
+                There were errors setting up ${len(job_errors)} submitted job(s):
+                <ul>
+                    %for job_error in job_errors:
+                        <li><b>${job_error | h}</b></li>
+                    %endfor
+                </ul>
+            </div>
+        %endif
+    </body>
+</html>
diff --git a/templates/webapps/galaxy/tracks/add_to_viz.mako b/templates/webapps/galaxy/tracks/add_to_viz.mako
new file mode 100644
index 0000000..d838e79
--- /dev/null
+++ b/templates/webapps/galaxy/tracks/add_to_viz.mako
@@ -0,0 +1,3 @@
+## Template generates a grid that enables user to add tracks
+<%namespace file="../grid_base.mako" import="*" />
+${load(True)}
diff --git a/templates/webapps/galaxy/tracks/add_tracks.mako b/templates/webapps/galaxy/tracks/add_tracks.mako
new file mode 100644
index 0000000..d838e79
--- /dev/null
+++ b/templates/webapps/galaxy/tracks/add_tracks.mako
@@ -0,0 +1,3 @@
+## Template generates a grid that enables user to add tracks
+<%namespace file="../grid_base.mako" import="*" />
+${load(True)}
diff --git a/templates/webapps/galaxy/tracks/history_datasets_select_grid.mako b/templates/webapps/galaxy/tracks/history_datasets_select_grid.mako
new file mode 100644
index 0000000..dc53826
--- /dev/null
+++ b/templates/webapps/galaxy/tracks/history_datasets_select_grid.mako
@@ -0,0 +1,5 @@
+<%inherit file="/tracks/history_select_grid.mako"/>
+
+<%def name="title()">
+    <h2>History '${grid.get_current_item( trans, **kwargs ).name | h}'</h2>
+</%def>
diff --git a/templates/webapps/galaxy/tracks/history_select_grid.mako b/templates/webapps/galaxy/tracks/history_select_grid.mako
new file mode 100644
index 0000000..6cc4793
--- /dev/null
+++ b/templates/webapps/galaxy/tracks/history_select_grid.mako
@@ -0,0 +1,84 @@
+##
+## TODO: what is needed is a general template for an 'embedded grid' that
+## can be easily subclassed. Importing methods like this template does
+## not make it possible to easily subclass templates.
+##
+<%namespace name="grid_base" file="../grid_base.mako" import="*" />
+
+<%def name="select_header()">
+    <script type="text/javascript">
+        // Load all grid URLs into modal-body element so that
+        // grid + links stays embedded.
+        $(document).ready(function() {
+            $(".addtracktab").click(function() {
+                var modal_body = $(this).closest('.inbound');
+                if (modal_body.length !== 0) {
+                    modal_body.load($(this).attr("href"));
+                    return false;
+                }
+            });
+        });
+    </script>
+    <style>
+        .dialog-box .body {
+            overflow-x: hidden;
+        }
+        .addtracktab {
+            margin: 0px 5px;
+            padding: 5px;
+            display: block;
+            width: 35%;
+            text-align: center;
+            float: left;
+            background-color: #ccc;
+            border: 1px solid #ccc;
+            border-bottom: 0px;
+            -webkit-border-top-left-radius: 10px;
+            -webkit-border-top-right-radius: 10px;
+            -moz-border-radius-topleft: 10px;
+            -moz-border-radius-topright: 10px;
+            border-top-left-radius: 10px;
+            border-top-right-radius: 10px;
+        }
+        .activetab {
+            border: 1px solid #aaa;
+            border-bottom: 0px;
+            background-color: white;
+            margin-bottom: -2px;
+        }
+        .divider {
+            clear: both;
+            border-top: 1px solid #aaa;
+            margin-bottom: 5px;
+        }
+    
+    </style>
+
+    <% histories_active = data_libraries_active = "" %>
+    %if getattr(grid, "datasets_param", None):
+        %if grid.datasets_param == "f-history":
+            <% histories_active = " activetab" %>
+        %else:
+            <% data_libraries_active = " activetab" %>
+        %endif
+    %endif
+    ## Add filter parameters manually because they include a hyphen and hence cannot be 
+    ## added as key words.
+    <% 
+        dbkey = '?'
+        if cur_filter_dict:
+            dbkey = cur_filter_dict.get( 'dbkey', '?' ) 
+    %>
+    <a class="addtracktab${histories_active}" href="${h.url_for(controller='visualization', action='list_histories')}?f-dbkey=${dbkey}">Histories</a>
+    <a class="addtracktab${data_libraries_active}" href="${h.url_for(controller='visualization', action='list_libraries' )}">Data Libraries</a>
+    <div class="divider"></div>
+</%def>
+
+## Need to define title so that it can be overridden by child templates.
+<%def name="title()"></%def>
+
+<div class='inbound'>
+    ${select_header()}
+    ${self.title()}
+    ${grid_base.load(True)}
+</div>
diff --git a/templates/webapps/galaxy/tracks/library_datasets_select_grid.mako b/templates/webapps/galaxy/tracks/library_datasets_select_grid.mako
new file mode 100644
index 0000000..a5663be
--- /dev/null
+++ b/templates/webapps/galaxy/tracks/library_datasets_select_grid.mako
@@ -0,0 +1,13 @@
+<%namespace file="/tracks/history_select_grid.mako" import="select_header" />
+<%namespace file='/library/common/browse_library.mako' import="render_content, grid_javascripts" />
+
+<%def name="title()">
+    <h2>History '${grid.get_current_item( trans, **kwargs ).name | h}'</h2>
+</%def>
+
+${select_header()}
+${grid_javascripts()}
+${render_content(simple=True)}
+<script type="text/javascript">
+    make_popup_menus();
+</script>
diff --git a/templates/webapps/galaxy/user/api_keys.mako b/templates/webapps/galaxy/user/api_keys.mako
new file mode 100644
index 0000000..1bcac10
--- /dev/null
+++ b/templates/webapps/galaxy/user/api_keys.mako
@@ -0,0 +1,45 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='user', action='index', cntrller=cntrller )}">User preferences</a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Web API Key</div>
+    <div class="toolFormBody">
+        <form name="user_api_keys" id="user_api_keys" action="${h.url_for( controller='user', action='api_keys', cntrller=cntrller )}" method="post" >
+            <div class="form-row">
+                <label>Current API key:</label>
+                %if user.api_keys:
+                    ${user.api_keys[0].key}
+                %else:
+                    none set
+                %endif
+            </div>
+            <div class="form-row">
+                <input type="submit" name="new_api_key_button" value="Generate a new key now"/>
+                %if user.api_keys:
+                    (invalidates old key)
+                %endif
+                <div class="toolParamHelp" style="clear: both;">
+                    <%
+                        if trans.webapp.name == 'galaxy':
+                            webapp_str = 'Galaxy'
+                        else:
+                            webapp_str = 'the Tool Shed'
+                    %>
+                    An API key will allow you to access ${webapp_str} via its web API.  Please note that <strong>this key acts as an alternate means 
+                    to access your account and should be treated with the same care as your login password</strong>.
+                </div>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/galaxy/user/list_users.mako b/templates/webapps/galaxy/user/list_users.mako
new file mode 100644
index 0000000..e5638b9
--- /dev/null
+++ b/templates/webapps/galaxy/user/list_users.mako
@@ -0,0 +1,38 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+
+%if users:
+    <div class="toolForm">
+            <div class="toolFormTitle">Users informations</div>
+            <table class="grid">
+                <thead><th>UID</th><th>email</th></thead>
+                <tbody>
+                %for user in users:
+                     <tr>
+                        <td>${user['uid']}</td>
+                        <td>${user['email']}</td>
+                        <td>${user['key']}</td>
+                        <td>
+                          <form action="${h.url_for( controller='userskeys', action='admin_api_keys', cntrller=cntrller )}" method="POST">
+                          <input type="hidden" name="uid" value=${user['uid']} />
+                          <input type="submit" name="new_api_key_button" value="Generate a new key now" />
+                          </form>
+                        </td>
+                     </tr>
+                %endfor
+                </tbody>
+                </table>
+            <div style="clear: both"></div>
+    </div>
+%else:
+	<div>No informations available</div>
+%endif
+
+
+<p/>
+
diff --git a/templates/webapps/galaxy/user/manage_info.mako b/templates/webapps/galaxy/user/manage_info.mako
new file mode 100644
index 0000000..8b1a747
--- /dev/null
+++ b/templates/webapps/galaxy/user/manage_info.mako
@@ -0,0 +1,95 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/user/info.mako" import="render_user_info" />
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+${render_user_info()}
+
+%if user.values or user_info_forms:
+    <p></p>
+    <div class="toolForm">
+        <form name="user_info" id="user_info" action="${h.url_for( controller='user', action='edit_info', cntrller=cntrller, user_id=trans.security.encode_id( user.id ) )}" method="post" >
+            <div class="toolFormTitle">User information</div>
+            %if user_type_fd_id_select_field and len( user_type_fd_id_select_field.options ) >= 1:
+                <div class="form-row">
+                    <label>User type:</label>
+                    ${user_type_fd_id_select_field.get_html()}
+                </div>
+            %else:
+                <input type="hidden" name="user_type_fd_id" value="${trans.security.encode_id( user_type_fd_id )}"/>
+            %endif
+            %for field in widgets:
+                <div class="form-row">
+                    <label>${field['label']}:</label>
+                    ${field['widget'].get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${field['helptext']}
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+            %endfor
+            <div class="form-row">
+                <input type="submit" name="edit_user_info_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+    <p></p>
+%endif
+
+<p/>
+
+<div class="toolForm">
+    <form name="user_addresses" id="user_addresses" action="${h.url_for( controller='user', action='new_address', cntrller=cntrller, id=trans.security.encode_id( user.id ) )}" method="post" >
+        <div class="toolFormTitle">User Addresses</div>
+        <div class="toolFormBody">
+            %if user.addresses:
+                <div class="form-row">
+                <div class="grid-header">
+                    %for i, filter in enumerate( ['Active', 'Deleted', 'All'] ):
+                        %if i > 0:
+                            <span>|</span>
+                        %endif
+                        %if show_filter == filter:
+                            <span class="filter"><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, show_filter=filter, id=trans.security.encode_id( user.id ) )}"><b>${filter}</b></a></span>
+                        %else:
+                            <span class="filter"><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, show_filter=filter, id=trans.security.encode_id( user.id ) )}">${filter}</a></span>
+                        %endif
+                    %endfor
+                </div>
+                </div>
+                <table class="grid">
+                    <tbody>
+                        %for index, address in enumerate(addresses):
+                            <tr class="libraryRow libraryOrFolderRow" id="libraryRow">
+                                <td>
+                                    <div class="form-row">
+                                        <label>${address.desc | h}:</label>
+                                        ${address.get_html()}
+                                    </div>
+                                    <div class="form-row">
+                                        <ul class="manage-table-actions">
+                                            <li>
+                                                %if not address.deleted:
+                                                    <a class="action-button"  href="${h.url_for( controller='user', action='edit_address', cntrller=cntrller, address_id=trans.security.encode_id( address.id ), id=trans.security.encode_id( user.id ) )}">Edit</a>
+                                                    <a class="action-button"  href="${h.url_for( controller='user', action='delete_address', cntrller=cntrller, address_id=trans.security.encode_id( address.id ), id=trans.security.encode_id( user.id ) )}">Delete</a>
+                                                %else:
+                                                    <a class="action-button"  href="${h.url_for( controller='user', action='undelete_address', cntrller=cntrller, address_id=trans.security.encode_id( address.id ), id=trans.security.encode_id( user.id ) )}">Undelete</a>
+                                                %endif
+                                            </li>
+                                        </ul>
+                                    </div>
+                                </td>
+                             </tr>
+                        %endfor
+                    </tbody>
+                </table>
+            %endif
+            <div class="form-row">
+                <input type="submit" value="Add a new address">
+            </div>
+        </div>
+    </form>
+</div>
diff --git a/templates/webapps/galaxy/visualization/create.mako b/templates/webapps/galaxy/visualization/create.mako
new file mode 100644
index 0000000..ed30098
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/create.mako
@@ -0,0 +1,14 @@
+<%inherit file="/form.mako"/>
+
+<%def name="javascripts()">
+${parent.javascripts()}
+<script type="text/javascript">
+$(function(){
+    var visualization_name = $("input[name=visualization_title]");
+    var visualization_slug = $("input[name=visualization_slug]");
+    visualization_name.keyup(function(){
+        visualization_slug.val( $(this).val().replace(/\s+/g,'-').replace(/[^a-zA-Z0-9\-]/g,'').toLowerCase() )
+    });    
+})
+</script>
+</%def>
diff --git a/templates/webapps/galaxy/visualization/display.mako b/templates/webapps/galaxy/visualization/display.mako
new file mode 100644
index 0000000..d10052e
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/display.mako
@@ -0,0 +1,109 @@
+<%inherit file="/display_base.mako"/>
+
+<%def name="javascripts()">
+    <% config = item_data %>
+    ${parent.javascripts()}
+
+    <script type='text/javascript'>
+        $(function() {
+            // HACK: add bookmarks container and header.
+            $('#right > .unified-panel-body > div').append(
+                $('<div/>').attr('id', 'bookmarks-container')
+                .append( $('<h4/>').text('Bookmarks') )
+            );
+        });
+    </script>
+
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css(
+        "trackster"
+    )}
+
+    ## Style changes needed for display.
+    <style type="text/css">
+        .page-body {
+            padding: 0px;
+        }
+        #bookmarks-container {
+            padding-left: 10px;
+        }
+        .bookmark {
+            margin: 0em;
+        }
+    </style>
+</%def>
+
+<%def name="render_item_header( item )">
+    ## Don't need to show header
+</%def>
+
+<%def name="render_item_links( visualization )">
+    <a
+        href="${h.url_for( controller='/visualization', action='imp', id=trans.security.encode_id( visualization.id ) )}"
+        class="icon-button import"
+        ## Needed to overwide initial width so that link is floated left appropriately.
+        style="width: 100%"
+        title="Import visualization">Import visualization</a>
+</%def>
+
+<%def name="render_item( visualization, config )">
+    <div id="${trans.security.encode_id( visualization.id )}" class="unified-panel-body" style="overflow:none;top:0px;"></div>
+
+    <script type="text/javascript">
+        require.config({
+            baseUrl: "${h.url_for('/static/scripts') }",
+            shim: {
+                "libs/underscore": { exports: "_" },
+                "libs/backbone": { exports: "Backbone" },
+            },
+            urlArgs: 'v=${app.server_starttime}'
+        });
+        require( ["viz/trackster"], function(trackster) {
+
+            // FIXME: deliberate global required for now due to requireJS integration.
+            view = null;
+
+            var ui = new (trackster.TracksterUI)( "${h.url_for('/')}" )
+                container_element = $("#${trans.security.encode_id( visualization.id )}");
+
+            $(function() {
+                var is_embedded = (container_element.parents(".item-content").length > 0);
+
+                // HTML setup.
+                if (is_embedded) {
+                    container_element.css( { "position": "relative" } );
+                } else { // Viewing just one shared viz
+                    $("#right-border").click(function() { view.resize_window(); });
+                }
+
+                // Create visualization.
+                var callback;
+                %if 'viewport' in config:
+                    var callback = function() { view.change_chrom( '${config['viewport']['chrom']}', ${config['viewport']['start']}, ${config['viewport']['end']} ); }
+                %endif
+                view = ui.create_visualization( {
+                                                container: container_element,
+                                                name: "${config.get('title') | h}",
+                                                vis_id: "${config.get('vis_id')}",
+                                                dbkey: "${config.get('dbkey')}"
+                                             },
+                                             ${ h.dumps( config.get( 'viewport', dict() ) ) },
+                                             ${ h.dumps( config['tracks'] ) },
+                                             ${ h.dumps( config.get('bookmarks') ) }
+                                             );
+
+                // Set up keyboard navigation.
+                ui.init_keyboard_nav(view);
+
+                // HACK: set viewport height because it cannot be set automatically. Currently, max height for embedded
+                // elts is 25em, so use 20em.
+                view.viewport_container.height("20em");
+            });
+
+        });
+
+    </script>
+</%def>
diff --git a/templates/webapps/galaxy/visualization/display_in_frame.mako b/templates/webapps/galaxy/visualization/display_in_frame.mako
new file mode 100644
index 0000000..1400e9d
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/display_in_frame.mako
@@ -0,0 +1,58 @@
+<%inherit file="/display_base.mako"/>
+
+<%def name="javascripts()">
+    <% config = item_data %>
+    ${parent.javascripts()}
+
+    <script type='text/javascript'>
+        $(function() {
+            // HACK: add bookmarks container and header.
+            $('#right > .unified-panel-body > div').append(
+                $('<div/>').attr('id', 'bookmarks-container')
+                .append( $('<h4/>').text('Bookmarks') )
+            );
+        });
+    </script>
+
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css(
+        "trackster"
+    )}
+
+    ## Style changes needed for display.
+    <style type="text/css">
+        .page-body {
+            padding: 0px;
+        }
+        #bookmarks-container {
+            padding-left: 10px;
+        }
+        .bookmark {
+            margin: 0em;
+        }
+    </style>
+</%def>
+
+<%def name="render_item_header( item )">
+    ## Don't need to show header
+</%def>
+
+<%def name="render_item_links( visualization )">
+    <a
+        href="${h.url_for( controller='/visualization', action='imp', id=trans.security.encode_id( visualization.id ) )}"
+        class="icon-button import"
+        ## Needed to overwide initial width so that link is floated left appropriately.
+        style="width: 100%"
+        title="Import visualization">Import visualization</a>
+</%def>
+
+<%def name="render_item( visualization, config )">
+    <div id="${trans.security.encode_id( visualization.id )}" class="unified-panel-body" style="overflow:none;top:0px;">
+        <iframe frameborder="0" width="100%" height="100%" sandbox="allow-forms allow-same-origin allow-scripts"
+                src="/visualization/saved?id=${encoded_visualization_id}&embedded=True">
+        </iframe>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/visualization/embed.mako b/templates/webapps/galaxy/visualization/embed.mako
new file mode 100644
index 0000000..242d6df
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/embed.mako
@@ -0,0 +1,4 @@
+<%inherit file="/embed_base.mako"/>
+
+<%def name="render_summary_content( workflow, steps )">
+</%def>
diff --git a/templates/webapps/galaxy/visualization/embed_in_frame.mako b/templates/webapps/galaxy/visualization/embed_in_frame.mako
new file mode 100644
index 0000000..b239bc1
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/embed_in_frame.mako
@@ -0,0 +1,80 @@
+<%namespace file="/display_common.mako" import="*" />
+
+<%
+    import_href = h.url_for( controller='visualization', action='imp', id=encoded_visualization_id )
+    display_href = h.url_for( controller='visualization', action='display_by_username_and_slug',
+        username=item.user.username, slug=item.slug )
+%>
+<div id="visualization-${encoded_visualization_id}" class='embedded-item display visualization'>
+    <div class='title'>
+        <div style="float: left">
+            <a class="expand-content-btn icon-button toggle-expand" href="${display_href}"
+               title="Show or hide visualization"></a>
+        </div>
+        <div style="float: right;">
+            <a title="Import" class="icon-button import" href="${import_href}"></a>
+            <a title="View" class="icon-button go-to-full-screen" href="${display_href}"></a>
+        </div>
+        <h4>
+            <a class="toggle-embed" href="${display_href}" title="Show or hide visualization">
+                Galaxy Visualization | ${get_item_name( item )}
+            </a>
+        </h4>
+        %if hasattr( item, "annotation") and item.annotation:
+        <div class="annotation">${item.annotation}</div>
+        %endif
+    </div>
+    ##<div class='summary-content'>
+        ## currently, no summary content for visualization
+        ## could do the title or caption, whatever...
+    ##</div>
+    <div class='expanded-content'>
+        <div class='item-content'>
+        </div>
+    </div>
+</div>
+
+<script type="text/javascript">
+// Embedding the same visualization more than once will confuse DOM ids.
+//  In order to handle this, find this script and cache the previous node (the div above).
+//  (Since we need thisScript to be locally scoped or it will get overwritten, enclose in self-calling function)
+(function(){
+    var scripts = document.getElementsByTagName( 'script' ),
+        // this is executed immediately, so the last script will be this script
+        thisScript = scripts[ scripts.length - 1 ],
+        $embeddedObj = $( thisScript ).prev();
+
+    /** check for an existing iframe for this visualization, adding one to the item-content if needed */
+    function addVisualizationIFrame(){
+        var $embeddedObj = $( thisScript ).prev(),
+            $itemContent = $embeddedObj.find( '.expanded-content .item-content' ),
+            $iframe = $itemContent.find( 'iframe' );
+        if( $iframe.size() ){ return $iframe; }
+        return $itemContent.html([
+                '<iframe frameborder="0" width="100%" height="100%" ',
+                        'sandbox="allow-forms allow-same-origin allow-scripts" ',
+                        'src="/visualization/saved?id=${encoded_visualization_id}&embedded=True">',
+                '</iframe>'
+            ].join('')).find( 'iframe' );
+    }
+
+    /** 4 elements change when expanding - toggle them all, add the iframe and prevent the url change */
+    function toggleExpanded( ev ){
+        var $embeddedObj = $( thisScript ).prev();
+        $embeddedObj.find( '.expand-content-btn' ).toggleClass( 'toggle-expand' ).toggleClass( 'toggle' ).show();
+        $embeddedObj.find( ".summary-content" ).slideToggle( "fast" );
+        $embeddedObj.find( ".annotation" ).slideToggle( "fast" );
+        $embeddedObj.find( ".expanded-content" ).slideToggle( "fast" );
+        addVisualizationIFrame();
+        ev.preventDefault();
+    }
+
+    // add expansion to +/- btn and title
+    $(function(){
+        var $embeddedObj = $( thisScript ).prev();
+        // clear the handlers (w/ off) created in page/display/mako for visualizations
+        $embeddedObj.find( '.expand-content-btn' ).off().click( toggleExpanded );
+        $embeddedObj.find( '.toggle-embed' ).off().click( toggleExpanded );
+    });
+})();
+</script>
diff --git a/templates/webapps/galaxy/visualization/gie.mako b/templates/webapps/galaxy/visualization/gie.mako
new file mode 100644
index 0000000..4696810
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/gie.mako
@@ -0,0 +1,157 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="visualization"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+
+    ${h.js('libs/jquery/select2')}
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            <div class="col-md-8 col-md-offset 2 col-xs-12">
+                <div class="row">
+                    <h1>Galaxy Interactive Environment Launcher</h1>
+                    <p>
+                        Galaxy Interactive Environments (GIEs) launch embedded,
+                        dockerized versions of popular data analysis suites such as
+                        Jupyter and RStudio, right from within Galaxy. They allow you
+                        to dynamically interact with your data, right on the server. No
+                        more uploading and downloading between various platforms just
+                        to get your work done!
+                        <br />
+                        <a href="https://docs.galaxyproject.org/en/master/admin/interactive_environments.html">Admin Docs</a>
+                    </p>
+                    <form id='launcher' action="NONE" method="GET">
+
+                        <table class="table table-striped">
+                            <tr>
+                                <td>GIE: </td>
+                                <td>
+                                    <span id="image_name" style="width: 400px" />
+                                </td>
+                            </tr>
+                            <tr>
+                                <td>Image: </td>
+                                <td>
+                                    <span id="image_tag" style="width:400px" />
+                                    <input id="image_tag_hidden" type="hidden" name="image_tag" value="NONE" />
+                                    <p id="image_desc">
+                                    </p>
+                                </td>
+                            </tr>
+                            <tr>
+                                <td>Datasets: </td>
+                                <td>
+                                    <span id="additional_datasets" style="width:400px"></span>
+                                    <input id="dataset_id" name="dataset_id" type="hidden">
+                                    <input id="additional_dataset_ids" name="additional_dataset_ids" type="hidden">
+                                </td>
+                            </tr>
+                        </table>
+                        <input type="submit" class="button" value="Launch" disabled="">
+                    </form>
+                </div>
+            </div>
+
+
+        </div>
+    </div>
+
+<script type="text/javascript">
+$(document).ready(function(){
+    var gie_image_map = {
+        % for image_name in gie_image_map.keys():
+            "${image_name}": [
+                % for image in gie_image_map[image_name]:
+                {
+                    id: "${image['image']}",
+                    text: "${image['image']}",
+                    extra: "${image['description'].replace('\n', ' ')}",
+                },
+                % endfor
+            ],
+        % endfor
+    }
+
+    var images = [
+        % for image_name in gie_image_map.keys():
+        {
+            id: "${image_name}",
+            text: "${image_name}"
+        },
+        % endfor
+    ]
+
+    var datasets = [
+        % for hda in history.visible_datasets[::-1]:
+            { id: "${ trans.security.encode_id(hda.id) }", text: "${ hda.hid } : ${ hda.name }" },
+        % endfor
+    ]
+
+    $("#additional_datasets").select2({
+        multiple: true,
+        data: datasets
+    }).on('change', function(e){
+        ds_ids = $("#additional_datasets").val().split(',');
+        if(ds_ids.length < 1){
+            $('input[type="submit"]').attr('disabled', '');
+        } else {
+            $("#dataset_id").val(ds_ids[0])
+
+            // In a perfect world the controller would just support a single
+            // parameter passing a list of dataset IDs.
+            //
+            // We aren't in that world (yet).
+            if(ds_ids.length > 1){
+                $("#additional_dataset_ids").val(ds_ids.slice(1).join(","))
+            }else{
+                $("#additional_dataset_ids").val("")
+            }
+            $('input[type="submit"]').removeAttr('disabled');
+        }
+    })
+
+    function formatter(v){
+        if(!v.id) return v.text;
+        return "<b>" + v.id + "</b><p>" + v.extra + "</p>"
+    }
+
+    $('#image_name').select2({
+        placeholder: "Select Image",
+        data: images
+    }).on('change', function(e){
+        // Get the versions for this image name
+        image_versions = gie_image_map[e.val]
+        // Update the action
+        $("#launcher").attr("action", "../plugins/interactive_environments/" + e.val + "/show")
+        // Update the hidden input
+        $("#image_name_hidden").val(e.val)
+        // Set disabled if they switch image family without updating image.
+        $('input[type="submit"]').attr('disabled', '');
+
+        // Create our select2 appropriately
+        image_tags = $("#image_tag").select2({
+            placholder: "Image Version",
+            formatResult: formatter,
+            formatSelection: formatter,
+            escapeMarkup: function(m) { return m; },
+            data: image_versions
+        }).on('change', function(e2){
+            // Inner actions, update the hidden input
+            $("#image_tag_hidden").val(e2.val)
+            // Enable the button
+            if($("additional_datasets").length > 0){
+                $('input[type="submit"]').removeAttr('disabled');
+            }
+        })
+    })
+})
+</script>
+</%def>
diff --git a/templates/webapps/galaxy/visualization/item_content.mako b/templates/webapps/galaxy/visualization/item_content.mako
new file mode 100644
index 0000000..947d34d
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/item_content.mako
@@ -0,0 +1,3 @@
+<%namespace file="/visualization/display.mako" import="*" />
+
+${render_item( item, item_data )}
diff --git a/templates/webapps/galaxy/visualization/list.mako b/templates/webapps/galaxy/visualization/list.mako
new file mode 100644
index 0000000..e82a038
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/list.mako
@@ -0,0 +1,67 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="visualization"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="center_panel()">
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            %if message:
+                <%
+                    try:
+                        status
+                    except:
+                        status = "done"
+                %>
+                <p />
+                <div class="${status}message">
+                    ${h.to_unicode( message )}
+                </div>
+            %endif
+
+            <!-- embedded grid -->
+            ${h.to_unicode( embedded_grid )}
+
+            <br><br>
+            <h2>Visualizations shared with you by others</h2>
+
+            %if shared_by_others:
+                <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                    <tr class="header">
+                        <th>Title</th>
+                        <th>Owner</th>
+                        <th></th>
+                    </tr>
+                    %for i, association in enumerate( shared_by_others ):
+                        <% visualization = association.visualization %>
+                        <tr>
+                            <td>
+                                <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( controller='visualization', action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}">${visualization.title}</a>
+                            </td>
+                            <td>${visualization.user.username}</td>
+                            <td>
+                                <div popupmenu="shared-${i}-popup">
+                                    <a class="action-button" href="${h.url_for( controller='visualization', action='display_by_username_and_slug', username=visualization.user.username, slug=visualization.slug)}" target="_top">View</a>
+                                    <a class="action-button" href="${h.url_for( controller='visualization', action='copy', id=trans.security.encode_id(visualization.id) )}">Copy</a>
+                                </div>
+                            </td>
+                        </tr>
+                    %endfor
+                </table>
+            %else:
+
+                No visualizations have been shared with you.
+
+            %endif
+
+        </div>
+    </div>
+
+</%def>
diff --git a/templates/webapps/galaxy/visualization/list_published.mako b/templates/webapps/galaxy/visualization/list_published.mako
new file mode 100644
index 0000000..ae088f5
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/list_published.mako
@@ -0,0 +1,35 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="shared"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">
+    Galaxy | Published Visualizations
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        .grid td {
+            min-width: 100px;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            <!-- embedded grid -->
+            ${h.to_unicode( embedded_grid )}
+        </div>
+    </div>
+
+
+</%def>
diff --git a/templates/webapps/galaxy/visualization/phyloviz.mako b/templates/webapps/galaxy/visualization/phyloviz.mako
new file mode 100644
index 0000000..fd447e4
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/phyloviz.mako
@@ -0,0 +1,318 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+##
+<%def name="init()">
+    <%
+        self.has_left_panel=False
+        self.has_right_panel=False
+        self.active_view="visualization"
+        self.message_box_visible=False
+    %>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+
+        .node circle {
+            cursor: pointer;
+            fill: #fff;
+            stroke: steelblue;
+            stroke-width: 1.5px;
+        }
+
+        .node.searchHighlight circle {
+            stroke-width: 3px;
+            stroke: #7adc26;
+        }
+
+        .node.selectedHighlight circle {
+            stroke-width: 3px;
+            stroke: #dc143c;
+        }
+
+        path.link {
+            fill: none;
+            stroke: #B5BBFF;
+            stroke-width: 4.0px;
+        }
+
+
+        div #phyloVizNavContainer{
+            text-align: center;
+            width: 100%;
+            height: 0px;
+        }
+
+        div #phyloVizNav{
+            font-weight: bold;
+            display: inline-block;
+            background: transparent;
+            top: -2em;
+            position: relative;
+        }
+
+        div .navControl{
+            float: left;
+        }
+
+        div#FloatingMenu {
+            left: 0;
+            top: 15%;
+            width:20%;
+            z-index:100;
+            padding: 5px;
+
+        }
+
+        div#SettingsMenu {
+            width: 25%;
+            top: 350px;
+
+        }
+
+        div#nodeSelectionView {
+            width: 25%;
+            top:70px;
+        }
+
+        .Panel {
+            right: 0%;
+            z-index: 101;
+            position: fixed;
+
+        ##          Borrowed from galaxy modal_dialogues
+            background-color: white;
+            border: 1px solid #999;
+            border: 1px solid rgba(0, 0, 0, 0.3);
+            -webkit-border-radius: 6px;
+            -moz-border-radius: 6px;
+            border-radius: 6px;
+            -webkit-border-radius: 6px;
+            -moz-border-radius: 6px;
+            border-radius: 6px;
+            -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+            -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+            box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+            -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+            -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+            box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3);
+            -webkit-background-clip: padding-box;
+            -moz-background-clip: padding-box;
+            background-clip: padding-box;
+            -webkit-background-clip: padding-box;
+            -moz-background-clip: padding-box;
+            background-clip: padding-box;
+        }
+
+        span.PhylovizCloseBtn{
+            cursor: pointer;
+            float : right;
+        }
+
+        #PhyloViz{
+            width: 100%;
+            height: 95%;
+        }
+
+        h2.PhyloVizMenuTitle{
+            color: white;
+        }
+
+        ##        Settings Menu
+        .SettingMenuRows{
+                    margin: 2px 0 2px 0;
+                }
+
+
+        ##        Helper Styles
+        .PhyloVizFloatLeft{
+                    float : left;
+                }
+        .icon-button.zoom-in,.icon-button.zoom-out{display:inline-block;height:16px;width:16px;margin-bottom:-3px;cursor:pointer;}
+        .icon-button.zoom-out{background:transparent url(../images/fugue/magnifier-zoom-out.png) center center no-repeat;}
+        .icon-button.zoom-in{margin-left:10px;background:transparent url(../images/fugue/magnifier-zoom.png) center center no-repeat;}
+
+    </style>
+</%def>
+
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/require" )}
+
+    <script type="text/javascript">
+
+        require.config({
+            baseUrl: "${h.url_for('/static/scripts')}",
+            shim: {
+                "libs/underscore": { exports: "_" },
+                "libs/d3": { exports: "d3" }
+            },
+            urlArgs: 'v=${app.server_starttime}'
+        });
+
+        require(["viz/phyloviz"], function(phyloviz_mod) {
+
+            function initPhyloViz(data, config) {
+                var phyloviz;
+
+                // -- Initialization code |-->
+                phyloviz = new phyloviz_mod.PhylovizView({
+                    data    : data,
+                    layout  : "Linear",
+                    config  :  config
+                });
+
+                // -- Render viz. --
+                phyloviz.render();
+
+            };
+
+            $(function firstVizLoad(){       // calls when viz is loaded for the first time
+                var config = ${ h.dumps( config )};
+                var data = ${h.dumps(data['data'])};
+                initPhyloViz(data, config);
+            });
+        });
+
+    </script>
+</%def>
+
+
+
+<%def name="center_panel()">
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner">
+            <div style="float:left;" id="title"></div>
+            <div style="float:right;" id="panelHeaderRightBtns"></div>
+        </div>
+        <div style="clear: both"></div>
+    </div>
+
+    <div id="phyloVizNavContainer">
+        <div id="phyloVizNav">
+            %if config["ext"] == "nex" and not config["saved_visualization"]:
+                <div id = "phylovizNexInfo" class="navControl">
+                <p>Select a tree to view:   
+                <select id="phylovizNexSelector">
+                    % for tree, index in data["trees"]:
+                        <option value="${index | h}">${tree | h}</option>
+                    % endfor
+                </select>
+                </p>
+                </div>
+            %endif
+            <div id="phyloVizNavBtns" class="navControl">
+            </div>
+            <div class="navControl">
+                <p> | Alt+click to select nodes</p>
+            </div>
+        </div>
+    </div>
+
+    ##  Node Selection Menu
+    <div id="nodeSelectionView" class="Panel">
+        <div class="modal-header">
+            <h3 class="PhyloVizMenuTitle">Search / Edit Nodes :
+                <span class="PhylovizCloseBtn" id="nodeSelCloseBtn"> X </span>
+            </h3>
+        </div>
+
+        <div class="modal-body">
+
+            <div class="SettingMenuRows">
+                Search for nodes with:
+                <select id="phyloVizSearchCondition" style="width: 55%">
+                    <option value="name-containing">Name (containing)</option>
+                    <option value="annotation-containing">Annotation (containing)</option>
+                    <option value="dist-greaterEqual">Distance (>=)</option>
+                    <option value="dist-lesserEqual">Distance (<=)</option>
+                </select>
+                <input  type="text" id="phyloVizSearchTerm" value="None" size="15" displayLabel="Distance">
+
+                <div class="SettingMenuRows" style="text-align: center;">
+                    <button id="phyloVizSearchBtn" > Search! </button>
+                </div>
+            </div>
+
+            <br/>
+
+            <div class="SettingMenuRows">
+                Name: <input type="text" id="phyloVizSelectedNodeName" value="None" size="15" disabled="disabled" >
+            </div>
+            <div class="SettingMenuRows">
+                Dist: <input type="text" id="phyloVizSelectedNodeDist" value="None" size="15" disabled="disabled" displayLabel="Distance">
+            </div>
+            <div class="SettingMenuRows">
+                Annotation:
+                <textarea id="phyloVizSelectedNodeAnnotation" disabled="disabled" ></textarea>
+            </div>
+            <div class="SettingMenuRows">
+                Edit: <input type="checkbox" id="phylovizEditNodesCheck" value="You can put custom annotations here and it will be saved">
+                <button id="phylovizNodeSaveChanges" style="display: none;"> Save edits</button>
+                <button id="phylovizNodeCancelChanges" style="display: none;"> Cancel</button>
+            </div>
+        </div>
+    </div>
+
+    ##  Settings Menus
+    <div id="SettingsMenu" class="Panel">
+        <div class="modal-header">
+            <h3 class="PhyloVizMenuTitle">Phyloviz Settings:
+                <span class="PhylovizCloseBtn" id="settingsCloseBtn"> X </span>
+            </h3>
+        </div>
+        <div class="modal-body">
+            <div class="SettingMenuRows">
+                Phylogenetic Spacing (px per unit): <input id="phyloVizTreeSeparation" type="text" value="250" size="10" displayLabel="Phylogenetic Separation"> (50-2500)
+            </div>
+            <div class="SettingMenuRows">
+                Vertical Spacing (px): <input type="text" id="phyloVizTreeLeafHeight" value="18" size="10" displayLabel="Vertical Spacing"> (5-30)
+            </div>
+            <div class="SettingMenuRows">
+                Font Size (px): <input type="text" id="phyloVizTreeFontSize" value="12" size="4" displayLabel="Font Size"> (5-20)
+            </div>
+
+        </div>
+        <div class="modal-footer">
+            <button id="phylovizResetSettingsBtn" class="PhyloVizFloatLeft" > Reset </button>
+            <button id="phylovizApplySettingsBtn" class="PhyloVizFloatRight" > Apply </button>
+        </div>
+    </div>
+
+    <div class="Panel" id="FloatingMenu" style="display: None;">
+
+        <h2>PhyloViz (<a onclick="displayHelp()" href="javascript:void(0);">?</a>)</h2>
+        <div style="display: none;">
+            <h2>Summary of Interactions and Functions:</h2>
+            <div class="hint">1. Expansion of Nodes: click or option-click to expand or collapse</div>
+            <div class="hint">2. Zooming and translation: mousewheel, buttons, click and drag, double click. Reset</div>
+            <div class="hint">3. Tooltip: Displays "Name and Size" on mouseOver on nodes</div>
+            <div class="hint">4. Minimap: Currently displays an exact but scaled down replicate of the tree, orange bounding box is correct for linear only<br/>
+                Can be switched on or off</div>
+            <div class="hint">5. Changing Layouts: Able to change between circular and linear layouts.</div>
+        </div>
+
+        <h5>Scaling & Rotation:</h5>
+        <button id="phylovizZoomInBtn" class="" > + </button>
+        <button id="phylovizZoomOutBtn" class="" > - </button>
+
+        <h5>Translation:</h5>
+        <button id="phylovizTranslateUpBtn" > Up </button>
+        <button id="phylovizTranslateDownBtn" > Down </button>
+        <br/>
+        <button id="phylovizTranslateLeftBtn" > Left </button>
+        <button id="phylovizTranslateRightBtn" > Right </button>
+
+        <h5>Others:</h5>
+        <button id="phylovizResetBtn" > Reset Zoom/Translate </button>
+        <button id="phylovizSaveBtn" > Save vizualization </button>
+        <button id="phylovizOpenSettingsBtn" > Settings </button>
+    </div>
+
+    <div id="PhyloViz" >
+    </div>
+
+</%def>
+
+
diff --git a/templates/webapps/galaxy/visualization/sweepster.mako b/templates/webapps/galaxy/visualization/sweepster.mako
new file mode 100644
index 0000000..7adaef9
--- /dev/null
+++ b/templates/webapps/galaxy/visualization/sweepster.mako
@@ -0,0 +1,152 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=True
+    self.has_right_panel=True
+    self.active_view="visualization"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        .link {
+            fill: none;
+            stroke: #ccc;
+            stroke-width: 1.5px;
+        }
+        .node {
+            font: 10px sans-serif;
+        }
+        .node circle {
+            fill: #fff;
+            stroke: steelblue;
+            stroke-width: 1.5px;
+            cursor: pointer;
+        }
+        .node:hover {
+            fill: #f00;
+        }
+        .node:hover circle {
+            fill: #ccc;
+            stroke: #f00;
+        }
+        table.tracks {
+            border-collapse: separate;
+            border-spacing: 5px;
+        }
+        .tile {
+            border: solid 1px #DDD;
+            margin: 2px;
+            border-radius: 10px;
+            margin: 3px;
+        }
+        .label {
+            position: fixed;
+            font: 10px sans-serif;
+            font-weight: bold;
+            background-color: #DDD;
+            border-radius: 5px;
+            padding: 1px;
+        }
+        th,td {
+            text-align: center;
+        }
+        td.settings {
+            vertical-align: top;
+        }
+        .icon-button.track-settings {
+            float: none;
+        }
+        .track-info {
+            text-align: left;
+            font: 10px sans-serif;
+            position: fixed;
+            background-color: #CCC;
+            border: solid 1px #AAA;
+            border-radius: 2px;
+            padding: 2px;
+        }
+        .btn-primary, .btn-primary:hover {
+            color: #EEE;
+            background-color: #DDD;
+            background-image: none;
+            border-radius: 12px;
+        }
+        #left {
+            width: 300px;
+        }
+        #center {
+            left: 300px;
+            right: 600px;
+            overflow: auto;
+        }
+        #right {
+            width: 600px;
+        }
+        .tiles {
+            overflow: auto;
+            position: absolute;
+            top: 30px;
+            bottom: 25px;
+            left: 0;
+            right: 0;
+        }
+        .help {
+            border-radius: 15px;
+            border: solid 1px #CCC;
+            padding: 0px 2px;
+            margin: 10px;
+        }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+
+    ${h.js( "libs/jquery/jquery-ui" )}
+
+    <script type="text/javascript">
+        // require.config({
+        //     baseUrl: "${h.url_for('/static/scripts') }",
+        //     shim: {
+        //         "libs/underscore": { exports: "_" },
+        //         "libs/backbone": {
+        //             deps: [ 'jquery', 'libs/underscore' ],
+        //             exports: "Backbone"
+        //         },
+        //         "libs/d3": { exports: "d3" },
+        //     },
+        //     // cache buster based on templated server (re)start time
+        //     urlArgs: 'v=${app.server_starttime}'
+        // });
+
+        require(["viz/sweepster"], function(sweepster) {
+
+            var viz;
+            $(function() {
+                // -- Viz set up. --
+                var viz = new sweepster.SweepsterVisualization(
+                    ${ h.dumps( config )}
+                );
+                var viz_view = new sweepster.SweepsterVisualizationView({ model: viz });
+                viz_view.render();
+            });
+        });
+    </script>
+</%def>
+
+<%def name="center_panel()">
+</%def>
+
+<%def name="left_panel()">
+</%def>
+
+<%def name="right_panel()">
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner">
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/build_from_current_history.mako b/templates/webapps/galaxy/workflow/build_from_current_history.mako
new file mode 100644
index 0000000..89c1f00
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/build_from_current_history.mako
@@ -0,0 +1,163 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% _=n_ %>
+
+<%def name="title()">Extract workflow from history</%def>
+
+<%def name="stylesheets()">
+    ${h.css( 'history', 'base' )}
+    <style type="text/css">
+    div.toolForm{
+        margin-top: 10px;
+        margin-bottom: 10px;
+    }
+    .list-item.dataset.history-content {
+        padding: 8px 10px;
+    }
+    .list-item.dataset.history-content .title-bar {
+        cursor: auto;
+    }
+    input[type="checkbox"].as-input {
+        margin-left: 8px;
+    }
+    th {
+        border-bottom: solid black 1px;
+    }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+    $(function() {
+        $("#checkall").click( function() {
+            $("input[type=checkbox]").attr( 'checked', true );
+            return false;
+        }).show();
+        $("#uncheckall").click( function() {
+            $("input[type=checkbox]").attr( 'checked', false );
+            return false;
+        }).show();
+    });
+    </script>
+</%def>
+
+<%def name="history_item( data, creator_disabled=False )">
+    %if data.state in [ "no state", "", None ]:
+        <% data_state = "queued" %>
+    %else:
+        <% data_state = data.state %>
+    %endif
+    <% encoded_id = trans.app.security.encode_id( data.id ) %>
+    <table cellpadding="0" cellspacing="0" border="0" width="100%">
+        <tr>
+            <td>
+                <div class="list-item dataset history-content state-${ data.state }" id="dataset-${ encoded_id }">
+                    <div class="title-bar clear">
+                        <div class="title">
+                            <span class="hid">${data.hid}</span>
+                            <span class="name">${data.display_name()}</span>
+                        </div>
+                    </div>
+                    %if disabled:
+                        <input type="checkbox" id="as-input-${ encoded_id }" class="as-input"
+                               name="${data.history_content_type}_ids" value="${data.hid}" checked="true" />
+                        <label for="as-input-${ encoded_id }" >${_('Treat as input dataset')}</label>
+                        <input type="text" id="as-named-input-${ encoded_id }" class="as-named-input"
+                               name="${data.history_content_type}_names" value="${data.display_name() | h}" />
+                    %endif
+                </div>
+            </td>
+        </tr>
+    </table>
+</%def>
+
+<p>The following list contains each tool that was run to create the
+datasets in your current history. Please select those that you wish
+to include in the workflow.</p>
+
+<p>Tools which cannot be run interactively and thus cannot be incorporated
+into a workflow will be shown in gray.</p>
+
+%for warning in warnings:
+    <div class="warningmark">${warning}</div>
+%endfor
+
+<form method="post" action="${h.url_for(controller='workflow', action='build_from_current_history')}">
+<div class='form-row'>
+    <label>${_('Workflow name')}</label>
+    <input name="workflow_name" type="text" value="Workflow constructed from history '${ util.unicodify( history.name )}'" size="60"/>
+</div>
+<p>
+    <input type="submit" value="${_('Create Workflow')}" />
+    <button id="checkall" style="display: none;">Check all</button>
+    <button id="uncheckall" style="display: none;">Uncheck all</button>
+</p>
+
+<table border="0" cellspacing="0">
+
+    <tr>
+        <th style="width: 47.5%">${_('Tool')}</th>
+        <th style="width: 5%"></th>
+        <th style="width: 47.5%">${_('History items created')}</th>
+    </tr>
+
+%for job, datasets in jobs.iteritems():
+
+    <%
+    cls = "toolForm"
+    tool_name = "Unknown"
+    if hasattr( job, 'is_fake' ) and job.is_fake:
+        cls += " toolFormDisabled"
+        disabled = True
+        tool_name = getattr( job, 'name', tool_name )
+    else:
+        tool = app.toolbox.get_tool( job.tool_id )
+        if tool:
+            tool_name = tool.name
+        if tool is None or not( tool.is_workflow_compatible ):
+            cls += " toolFormDisabled"
+            disabled = True
+        else:
+            disabled = False
+        if tool and tool.version != job.tool_version:
+            tool_version_warning = 'Dataset was created with tool version "%s", but workflow extraction will use version "%s".' % ( job.tool_version, tool.version )
+        else:
+            tool_version_warning = ''
+    if disabled:
+        disabled_why = getattr( job, 'disabled_why', "This tool cannot be used in workflows" )
+    %>
+
+    <tr>
+        <td>
+            <div class="${cls}">
+
+                <div class="toolFormTitle">${tool_name}</div>
+                <div class="toolFormBody">
+                    %if disabled:
+                        <div style="font-style: italic; color: gray">${disabled_why}</div>
+                    %else:
+                        <div><input type="checkbox" name="job_ids" value="${job.id}" checked="true" />Include "${tool_name}" in workflow</div>
+                        %if tool_version_warning:
+                            ${ render_msg( tool_version_warning, status="warning" ) }
+                        %endif
+                    %endif
+                </div>
+            </div>
+        </td>
+        <td style="text-align: center;">
+            &#x25B6;
+        </td>
+        <td>
+            %for _, data in datasets:
+                <div>${history_item( data, disabled )}</div>
+            %endfor
+        </td>
+    </tr>
+
+%endfor
+
+</table>
+
+</form>
diff --git a/templates/webapps/galaxy/workflow/configure_menu.mako b/templates/webapps/galaxy/workflow/configure_menu.mako
new file mode 100644
index 0000000..715e47a
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/configure_menu.mako
@@ -0,0 +1,99 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+<%page expression_filter="h"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="workflow"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">Configure workflow menu</%def>
+
+<%def name="center_panel()">
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+%if message:
+<%
+    try:
+        messagetype
+    except:
+        messagetype = "done"
+%>
+<p />
+<div class="${messagetype}message">
+    ${message}
+</div>
+%endif
+
+<form action="${h.url_for(controller='workflow', action='configure_menu')}" method="POST">
+
+<table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+    <tr class="header">
+        <th>Name</th>
+        <th>Owner</th>
+        <th># of Steps</th>
+        ## <th>Last Updated</th>
+        <th>Show in menu</th>
+    </tr>
+        
+%if workflows:
+
+        %for i, workflow in enumerate( workflows ):
+            <tr>
+                <td>
+                    ${util.unicodify( workflow.name )}
+                </td>
+                <td>You</td>
+                <td>${len(workflow.latest_workflow.steps)}</td>
+                <td>
+                    <input type="checkbox" name="workflow_ids" value="${workflow.id}"
+                    %if workflow.id in ids_in_menu:
+                        checked
+                    %endif
+                    />
+                </td>
+            </tr>    
+        %endfor
+
+%endif
+
+%if shared_by_others:
+
+        %for i, association in enumerate( shared_by_others ):
+            <% workflow = association.stored_workflow %>
+            <tr>
+                <td>
+                    ${util.unicodify( workflow.name )}
+                </td>
+                <td>${workflow.user.email}</td>
+                <td>${len(workflow.latest_workflow.steps)}</td>
+                <td>
+                    <input type="checkbox" name="workflow_ids" value="${workflow.id}"
+                    %if workflow.id in ids_in_menu:
+                        checked
+                    %endif
+                    />
+                </td>
+            </tr>    
+        %endfor
+
+%endif
+
+%if not workflows and not shared_by_others:
+        <tr>
+            <td colspan="4">You do not have any accessible workflows.</td>
+        </tr>
+%endif
+
+</table>
+
+<p />
+<input type="Submit" value="Save" />
+
+</form>
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/display.mako b/templates/webapps/galaxy/workflow/display.mako
new file mode 100644
index 0000000..7d3726e
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/display.mako
@@ -0,0 +1,132 @@
+<%inherit file="/display_base.mako"/>
+<%namespace file="/display_common.mako" import="render_message" />
+
+<%!
+    from galaxy.tools.parameters.basic import DataCollectionToolParameter, DataToolParameter, RuntimeValue
+    from galaxy.web import form_builder
+    import cgi
+%>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+<%def name="do_inputs( inputs, values, prefix, step, other_values=None )">
+  %for input_index, input in enumerate( inputs.itervalues() ):
+    %if input.type == "repeat":
+      <div class="repeat-group">
+          <div class="form-title-row"><b>${input.title_plural}</b></div>
+          <% repeat_values = values[input.name] %>
+          %for i in range( len( repeat_values ) ):
+            <div class="repeat-group-item">
+                <% index = repeat_values[i]['__index__'] %>
+                <div class="form-title-row"><b>${input.title} ${i + 1}</b></div>
+                ${do_inputs( input.inputs, repeat_values[ i ], prefix + input.name + "_" + str(index) + "|", step, other_values )}
+            </div> 
+          %endfor
+      </div>
+    %elif input.type == "conditional":
+      <% group_values = values[input.name] %>
+      <% current_case = group_values['__current_case__'] %>
+      <% new_prefix = prefix + input.name + "|" %>
+      ${row_for_param( input.test_param, group_values[ input.test_param.name ], other_values, prefix, step )}
+      ${do_inputs( input.cases[ current_case ].inputs, group_values, new_prefix, step, other_values )}
+    %elif input.type == "section":
+      <% new_prefix = prefix + input.name + "|" %>
+      <% group_values = values[input.name] %>
+      <div class="form-title-row"><b>${input.title}:</b></div>
+      <div class="repeat-group">
+        <div class="repeat-group-item">
+          ${do_inputs( input.inputs, group_values, new_prefix, step, other_values )}
+        </div>
+      </div>
+    %else:
+      ${row_for_param( input, values[ input.name ], other_values, prefix, step )}
+    %endif
+  %endfor
+</%def>
+
+<%def name="row_for_param( param, value, other_values, prefix, step )">
+    <% cls = "form-row" %>
+    <div class="${cls}">
+        <label>${param.get_label() | h}</label>
+        <div>
+            %if isinstance( param, DataToolParameter ) or isinstance( param, DataCollectionToolParameter ):
+                %if ( prefix + param.name ) in step.input_connections_by_name:
+                    <%
+                        conns = step.input_connections_by_name[ prefix + param.name ]
+                        if not isinstance(conns, list):
+                            conns = [conns]
+                        vals = ["Output dataset '%s' from step %d" % (conn.output_name, int(conn.output_step.order_index)+1) for conn in conns]
+                    %>
+                    ${",".join(vals)}
+                %else:
+                    <i>select at runtime</i>
+                %endif
+            %else:
+                ${cgi.escape( param.value_to_display_text( value, app ) or 'Unavailable.' )}
+            %endif
+        </div>
+        %if hasattr( step, 'upgrade_messages' ) and step.upgrade_messages and param.name in step.upgrade_messages:
+            ${render_message( step.upgrade_messages[param.name], "info" )}
+        %endif
+    </div>
+</%def>
+
+<%def name="render_item_links( workflow )">
+    %if workflow.importable:
+    <a
+        href="${h.url_for( controller='/workflow', action='imp', id=trans.security.encode_id(workflow.id) )}"
+        class="icon-button import"
+        title="Import workflow"></a>
+    <a
+        href="${h.url_for( controller='/workflow', action='export_to_file', id=trans.security.encode_id(workflow.id) )}"
+        class="icon-button disk"
+        title="Save workflow"></a>
+    %endif
+</%def>
+
+<%def name="render_item( workflow, steps )">
+    <%
+        # HACK: Rendering workflow steps requires that trans have a history; however, if its user's first visit to Galaxy is here, he won't have a history
+        # and an error will occur. To prevent this error, make sure user has a history. 
+        trans.get_history( most_recent=True, create=True )
+    %>
+    <table class="annotated-item">
+        <tr><th>Step</th><th class="annotation">Annotation</th></tr>
+        %for i, step in enumerate( steps ):
+            <tr><td>
+            %if step.type == 'tool' or step.type is None:
+              <% 
+                tool = trans.app.toolbox.get_tool( step.tool_id )
+              %>
+              <div class="toolForm">
+                %if tool:
+                  <div class="toolFormTitle">Step ${int(step.order_index)+1}: ${tool.name | h}</div>
+                  <div class="toolFormBody">
+                    ${do_inputs( tool.inputs, step.state.inputs, "", step )}
+                  </div>
+                %else:
+                  <div class="toolFormTitle">Step ${int(step.order_index)+1}: Unknown Tool with id '${step.tool_id | h}'</div>
+                %endif
+              </div>
+            %else:
+            ## TODO: always input dataset?
+            <% module = step.module %>
+              <div class="toolForm">
+                  <div class="toolFormTitle">Step ${int(step.order_index)+1}: ${module.name | h}</div>
+                  <div class="toolFormBody">
+                    ${do_inputs( module.get_runtime_inputs(), step.state.inputs, "", step )}
+                  </div>
+              </div>
+            %endif
+            </td>
+            <td class="annotation">
+                %if hasattr( step, "annotation") and step.annotation is not None:
+                    ${step.annotation}
+                %endif                
+            </td>
+            </tr>
+        %endfor
+    </table>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/editor.mako b/templates/webapps/galaxy/workflow/editor.mako
new file mode 100644
index 0000000..341444a
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/editor.mako
@@ -0,0 +1,442 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.active_view="workflow"
+    self.overlay_visible=True
+    self.editor_config = {
+        'id'      : trans.security.encode_id( stored.id ),
+        'urls'    : {
+            'tool_search'         : h.url_for( '/api/tools' ),
+            'get_datatypes'       : h.url_for( '/api/datatypes/mapping' ),
+            'load_workflow'       : h.url_for( controller='workflow', action='load_workflow' ),
+            'run_workflow'        : h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id(stored.id)),
+            'rename_async'        : h.url_for( controller='workflow', action='rename_async', id=trans.security.encode_id(stored.id) ),
+            'annotate_async'      : h.url_for( controller='workflow', action='annotate_async', id=trans.security.encode_id(stored.id) ),
+            'get_new_module_info' : h.url_for(controller='workflow', action='get_new_module_info' ),
+            'workflow_index'      : h.url_for( controller='workflow', action='index' ),
+            'save_workflow'       : h.url_for(controller='workflow', action='save_workflow' ),
+            'workflow_save_as'    : h.url_for(controller='workflow', action='save_workflow_as') 
+        },
+        'workflows' : [{
+            'id'                  : trans.security.encode_id( workflow.id ),
+            'latest_id'           : trans.security.encode_id( workflow.latest_workflow.id ),
+            'step_count'          : len( workflow.latest_workflow.steps ),
+            'name'                : h.to_unicode( workflow.name )
+        } for workflow in workflows ]
+    }
+%>
+</%def>
+
+<%def name="javascripts()">
+
+    ${parent.javascripts()}
+
+    ${h.js(
+        "libs/jquery/jquery.event.drag",
+        "libs/jquery/jquery.event.drop",
+        "libs/jquery/jquery.event.hover",
+        "libs/jquery/jquery.form",
+        "libs/jquery/jstorage",
+        "libs/jquery/jquery.autocomplete",
+    )}
+
+    <script type='text/javascript'>
+        workflow_view = null;
+        $( function() {
+            require(['mvc/workflow/workflow-view'], function(Workflow){
+                workflow_view = new Workflow(${h.dumps(self.editor_config)});
+            });
+        });
+    </script>
+</%def>
+
+<%def name="stylesheets()">
+
+    ## Include "base.css" for styling tool menu and forms (details)
+    ${h.css( "base", "autocomplete_tagging", "jquery-ui/smoothness/jquery-ui" )}
+
+    ## But make sure styles for the layout take precedence
+    ${parent.stylesheets()}
+
+    <style type="text/css">
+    body { margin: 0; padding: 0; overflow: hidden; }
+
+    div.toolTitleDisabled {
+        padding-top: 5px;
+        padding-bottom: 5px;
+        margin-left: 16px;
+        margin-right: 10px;
+        display: list-item;
+        list-style: square outside;
+        font-style: italic;
+        color: gray;
+    }
+    div.toolTitleNoSectionDisabled {
+      padding-bottom: 0px;
+      font-style: italic;
+      color: gray;
+    }
+    div.toolFormRow {
+        position: relative;
+    }
+
+    .right-content {
+        margin: 3px;
+    }
+
+    canvas { position: absolute; z-index: 10; }
+    canvas.dragging { position: absolute; z-index: 1000; }
+    .input-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; left: -6px; z-index: 1500; }
+    .output-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_open.png')}); position: absolute; top: 50%; margin-top: -6px; right: -6px; z-index: 1500; }
+    .drag-terminal { width: 12px; height: 12px; background: url(${h.url_for('/static/style/workflow_circle_drag.png')}); position: absolute; z-index: 1500; }
+    .input-terminal-active { background: url(${h.url_for('/static/style/workflow_circle_green.png')}); }
+    ## .input-terminal-hover { background: yellow; border: solid black 1px; }
+    .unselectable { -moz-user-select: none; -khtml-user-select: none; user-select: none; }
+    img { border: 0; }
+
+    div.buttons img {
+    width: 16px; height: 16px;
+    cursor: pointer;
+    }
+
+    ## Extra styles for the representation of a tool on the canvas (looks like
+    ## a tiny tool form)
+    div.toolFormInCanvas {
+        z-index: 100;
+        position: absolute;
+        ## min-width: 130px;
+        margin: 6px;
+    }
+
+    div.toolForm-active {
+        z-index: 1001;
+        border: solid #8080FF 4px;
+        margin: 3px;
+    }
+
+    div.toolFormTitle {
+        cursor: move;
+        min-height: 16px;
+    }
+
+    div.titleRow {
+        font-weight: bold;
+        border-bottom: dotted gray 1px;
+        margin-bottom: 0.5em;
+        padding-bottom: 0.25em;
+    }
+    div.form-row {
+      position: relative;
+    }
+
+    div.tool-node-error div.toolFormTitle {
+        background: #FFCCCC;
+        border-color: #AA6666;
+    }
+    div.tool-node-error {
+        border-color: #AA6666;
+    }
+
+    #canvas-area {
+        position: absolute;
+        top: 0; left: 305px; bottom: 0; right: 0;
+        border: solid red 1px;
+        overflow: none;
+    }
+
+    .form-row {
+    }
+
+    div.toolFormInCanvas div.toolFormBody {
+        padding: 0;
+    }
+    .form-row-clear {
+        clear: both;
+    }
+
+    div.rule {
+        height: 0;
+        border: none;
+        border-bottom: dotted black 1px;
+        margin: 0 5px;
+    }
+
+    .callout {
+        position: absolute;
+        z-index: 10000;
+    }
+
+    .pjaForm {
+        margin-bottom:10px;
+    }
+
+    .pjaForm .toolFormBody{
+        padding:10px;
+    }
+
+    .pjaForm .toolParamHelp{
+        padding:5px;
+    }
+
+    .panel-header-button-group {
+        margin-right: 5px;
+        padding-right: 5px;
+        border-right: solid gray 1px;
+    }
+
+    </style>
+</%def>
+
+## Render a tool in the tool panel
+<%def name="render_tool( tool, section )">
+    %if not tool.hidden:
+        %if tool.is_workflow_compatible:
+            %if section:
+                <div class="toolTitle">
+            %else:
+                <div class="toolTitleNoSection">
+            %endif
+                %if "[[" in tool.description and "]]" in tool.description:
+                    ${tool.description.replace( '[[', '<a id="link-${tool.id}" href="workflow_view.add_node_for_tool( ${tool.id} )">' % tool.id ).replace( "]]", "</a>" )}
+                %elif tool.name:
+                    <a id="link-${tool.id}" href="#" onclick="workflow_view.add_node_for_tool( '${tool.id}', '${tool.name}' )">${tool.name}</a> ${tool.description}
+                %else:
+                    <a id="link-${tool.id}" href="#" onclick="workflow_view.add_node_for_tool( '${tool.id}', '${tool.name}' )">${tool.description}</a>
+                %endif
+            </div>
+        %else:
+            %if section:
+                <div class="toolTitleDisabled">
+            %else:
+                <div class="toolTitleNoSectionDisabled">
+            %endif
+                %if "[[" in tool.description and "]]" in tool.description:
+                    ${tool.description.replace( '[[', '' % tool.id ).replace( "]]", "" )}
+                %elif tool.name:
+                    ${tool.name} ${tool.description}
+                %else:
+                    ${tool.description}
+                %endif
+            </div>
+        %endif
+    %endif
+</%def>
+
+## Render a label in the tool panel
+<%def name="render_label( label )">
+    <div class="toolPanelLabel" id="title_${label.id}">
+        <span>${label.text}</span>
+    </div>
+</%def>
+
+<%def name="overlay(visible=False)">
+    ${parent.overlay( "Loading workflow editor...",
+                      "<div class='progress progress-striped progress-info active'><div class='progress-bar' style='width: 100%;'></div></div>", self.overlay_visible )}
+</%def>
+
+
+<%def name="render_module_section(module_section)">
+    <div class="toolSectionTitle" id="title___workflow__${module_section['name']}__">
+        <span>${module_section["title"]}</span>
+    </div>
+    <div id="__workflow__${module_section['name']}__" class="toolSectionBody">
+        <div class="toolSectionBg">
+            %for module in module_section["modules"]:
+                <div class="toolTitle">
+                    <a href="#" onclick="workflow_view.add_node_for_module( '${module['name']}', '${module['title']}' )">
+                        ${module['description']}
+                    </a>
+                </div>
+            %endfor
+        </div>
+    </div>
+</%def>
+
+<%def name="left_panel()">
+    <%
+       from galaxy.tools import Tool
+       from galaxy.tools.toolbox import ToolSection, ToolSectionLabel
+    %>
+
+    <div class="unified-panel-header" unselectable="on">
+        <div class='unified-panel-header-inner'>
+            ${n_('Tools')}
+        </div>
+    </div>
+
+    <div class="unified-panel-body" style="overflow: auto;">
+        <div class="toolMenuContainer">
+            <div class="toolMenu">
+                <%
+                    from galaxy.workflow.modules import load_module_sections
+                    module_sections = load_module_sections( trans )
+                %>
+                %if trans.app.config.message_box_visible:
+                    <div id="tool-search" style="top: 95px;">
+                %else:
+                    <div id="tool-search">
+                %endif
+                    <input type="text" name="query" placeholder="search tools" id="tool-search-query" class="search-query parent-width" />
+                    <img src="${h.url_for('/static/images/loading_small_white_bg.gif')}" id="search-spinner" class="search-spinner" />
+                </div>
+
+                <div class="toolSectionWrapper">
+                    ${render_module_section(module_sections['inputs'])}
+                </div>
+
+                <div class="toolSectionList">
+                    %for val in app.toolbox.tool_panel_contents( trans ):
+                        <div class="toolSectionWrapper">
+                        %if isinstance( val, Tool ):
+                            ${render_tool( val, False )}
+                        %elif isinstance( val, ToolSection ) and val.elems:
+                        <% section = val %>
+                            <div class="toolSectionTitle" id="title_${section.id}">
+                                <span>${section.name}</span>
+                            </div>
+                            <div id="${section.id}" class="toolSectionBody">
+                                <div class="toolSectionBg">
+                                    %for section_key, section_val in section.elems.items():
+                                        %if isinstance( section_val, Tool ):
+                                            ${render_tool( section_val, True )}
+                                        %elif isinstance( section_val, ToolSectionLabel ):
+                                            ${render_label( section_val )}
+                                        %endif
+                                    %endfor
+                                </div>
+                            </div>
+                        %elif isinstance( val, ToolSectionLabel ):
+                            ${render_label( val )}
+                        %endif
+                        </div>
+                    %endfor
+                    ## Data Manager Tools
+                    %if trans.user_is_admin() and trans.app.data_managers.data_managers:
+                       <div> </div>
+                       <div class="toolSectionWrapper">
+                           <div class="toolSectionTitle" id="title___DATA_MANAGER_TOOLS__">
+                               <span>Data Manager Tools</span>
+                           </div>
+                           <div id="__DATA_MANAGER_TOOLS__" class="toolSectionBody">
+                               <div class="toolSectionBg">
+                                   %for data_manager_id, data_manager_val in trans.app.data_managers.data_managers.items():
+                                       ${ render_tool( data_manager_val.tool, True ) }
+                                   %endfor
+                               </div>
+                           </div>
+                       </div>
+                    %endif
+                    ## End Data Manager Tools
+                </div>
+                <div> </div>
+                %for section_name, module_section in module_sections.items():
+                    %if section_name != "inputs":
+                        ${render_module_section(module_section)}
+                    %endif
+                %endfor
+
+                ## Feedback when search returns no results.
+                <div id="search-no-results" style="display: none; padding-top: 5px">
+                    <em><strong>Search did not match any tools.</strong></em>
+                </div>
+
+            </div>
+        </div>
+    </div>
+</%def>
+
+<%def name="center_panel()">
+
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner" style="float: right">
+            <a id="workflow-options-button" class="panel-header-button" href="#"><span class="fa fa-cog"></span></a>
+        </div>
+        <div class="unified-panel-header-inner">
+            Workflow Canvas | ${h.to_unicode( stored.name ) | h}
+        </div>
+    </div>
+    <div class="unified-panel-body">
+        <div id="canvas-viewport" style="width: 100%; height: 100%; position: absolute; overflow: hidden; background: #EEEEEE; background: white url(${h.url_for('/static/images/light_gray_grid.gif')}) repeat;">
+            <div id="canvas-container" style="position: absolute; width: 100%; height: 100%;"></div>
+        </div>
+        <div id="overview-border" style="position: absolute; width: 150px; height: 150px; right: 20000px; bottom: 0px; border-top: solid gray 1px; border-left: solid grey 1px; padding: 7px 0 0 7px; background: #EEEEEE no-repeat url(${h.url_for('/static/images/resizable.png')}); z-index: 20000; overflow: hidden; max-width: 300px; max-height: 300px; min-width: 50px; min-height: 50px">
+            <div style="position: relative; overflow: hidden; width: 100%; height: 100%; border-top: solid gray 1px; border-left: solid grey 1px;">
+                <div id="overview" style="position: absolute;">
+                    <canvas width="0" height="0" style="background: white; width: 100%; height: 100%;" id="overview-canvas"></canvas>
+                    <div id="overview-viewport" style="position: absolute; width: 0px; height: 0px; border: solid blue 1px; z-index: 10;"></div>
+                </div>
+            </div>
+        </div>
+        <div id='workflow-parameters-box' style="display:none; position: absolute; /*width: 150px; height: 150px;*/ right: 0px; top: 0px; border-bottom: solid gray 1px; border-left: solid grey 1px; padding: 7px; background: #EEEEEE; z-index: 20000; overflow: hidden; max-width: 300px; max-height: 300px; /*min-width: 50px; min-height: 50px*/">
+            <div style="margin-bottom:5px;"><b>Workflow Parameters</b></div>
+            <div id="workflow-parameters-container">
+            </div>
+        </div>
+        <div id="close-viewport" style="border-left: 1px solid #999; border-top: 1px solid #999; background: #ddd url(${h.url_for('/static/images/overview_arrows.png')}) 12px 0px; position: absolute; right: 0px; bottom: 0px; width: 12px; height: 12px; z-index: 25000;"></div>
+    </div>
+
+</%def>
+
+<%def name="right_panel()">
+    <div class="unified-panel-header" unselectable="on">
+        <div class="unified-panel-header-inner">
+            Details
+        </div>
+    </div>
+    <div class="unified-panel-body workflow-right" style="overflow: auto;">
+        ## Div for elements to modify workflow attributes.
+        <div id="edit-attributes" class="metadataForm right-content">
+            <div class="metadataFormTitle">Edit Workflow Attributes</div>
+            <div class="metadataFormBody">
+            ## Workflow name.
+            <div id="workflow-name-area" class="form-row">
+                <label>Name:</label>
+                <span id="workflow-name" class="editable-text" title="Click to rename workflow">${h.to_unicode( stored.name ) | h}</span>
+            </div>
+            ## Workflow tags.
+            <%namespace file="/tagging_common.mako" import="render_individual_tagging_element" />
+            <div class="form-row">
+                <label>
+                    Tags:
+                </label>
+                    <div style="float: left; width: 225px; margin-right: 10px; border-style: inset; border-width: 1px; margin-left: 2px">
+                        <style>
+                            .tag-area {
+                                border: none;
+                            }
+                        </style>
+                        ${render_individual_tagging_element(user=trans.get_user(), tagged_item=stored, elt_context="edit_attributes.mako", use_toggle_link=False, input_size="20")}
+                    </div>
+                    <div class="toolParamHelp">Apply tags to make it easy to search for and find items with the same tag.</div>
+                </div>
+                ## Workflow annotation.
+                ## Annotation elt.
+                <div id="workflow-annotation-area" class="form-row">
+                    <label>Annotation / Notes:</label>
+                    <div id="workflow-annotation" class="editable-text" title="Click to edit annotation">
+                    %if annotation:
+                        ${h.to_unicode( annotation ) | h}
+                    %else:
+                        <em>Describe or add notes to workflow</em>
+                    %endif
+                    </div>
+                    <div class="toolParamHelp">Add an annotation or notes to a workflow; annotations are available when a workflow is viewed.</div>
+                </div>
+            </div>
+        </div>
+
+        ## Div where tool details are loaded and modified.
+        <div id="right-content" class="right-content"></div>
+
+        ## Workflow output tagging
+        <div style="display:none;" id="workflow-output-area" class="metadataForm right-content">
+            <div class="metadataFormTitle">Edit Workflow Outputs</div>
+            <div class="metadataFormBody"><div class="form-row">
+                <div class="toolParamHelp">Tag step outputs to indicate the final dataset(s) to be generated by running this workflow.</div>
+                <div id="output-fill-area"></div>
+            </div></div>
+        </div>
+
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/editor_generic_form.mako b/templates/webapps/galaxy/workflow/editor_generic_form.mako
new file mode 100644
index 0000000..32ad460
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/editor_generic_form.mako
@@ -0,0 +1,71 @@
+<form name="${form.name}" action="${h.url_for( controller='workflow', action='editor_form_post' )}" method="post">
+    <%
+    from xml.sax.saxutils import escape
+    label = module.label
+    if label is not None:
+        title = label
+    else:
+        title = form.title
+    %>
+    <div class="ui-portlet-narrow">
+        <div class="portlet-header">
+          <div class="portlet-title">
+            <span class="portlet-title-text"><b>${title}</b></span>
+          </div>
+        </div>
+        <div class="portlet-content">
+          <div class="content">
+            <div class="ui-margin-top"></div>
+            <div>
+            <input type="hidden" name="type" value="${module.type}" />
+            <input type="hidden" name="label" value="${escape(label or '')}" />
+            <table class="ui-table-plain">
+              <thead></thead>
+              <tbody>
+            %if form.inputs:
+              %for input in form.inputs:
+                  <%
+                  cls = "section-row"
+                  if input.error:
+                      cls += " form-row-error"
+                  extra_attributes = ""
+                  for key, value in getattr( input, "extra_attributes", {} ).iteritems():
+                      extra_attributes += " %s=\"%s\"" % ( key, value )
+                  type_attribute = ""
+                  if input.type:
+                    type_attribute = "type=\"input.type\""
+                  %>
+                  <tr class="${cls}"><td><div class="ui-table-form-element">
+                    <div class="ui-table-form-title">
+                        ${input.label}:
+                    </div>
+                    <div class="ui-table-form-field" style="display: block;">
+                        <input ${type_attribute} name="${input.name | h}" value="${input.value | h}" size="30" ${extra_attributes}>
+                        %if hasattr( input, "body_html" ):
+                              ${input.body_html()}
+                            </input>
+                        %endif
+                      %if input.error:
+                      <div style="float: left; color: red; font-weight: bold; padding-top: 1px; padding-bottom: 3px;">
+                          <div style="width: 300px;"><img style="vertical-align: middle;" src="${h.url_for('/static/style/error_small.png')}"> <span style="vertical-align: middle;">${input.error}</span></div>
+                      </div>
+                      %endif
+  
+                      %if input.help:
+                      <div class="ui-table-form-info">
+                          ${input.help}
+                      </div>
+                      %endif
+                    </div><!-- ui-table-form-field -->
+  
+                  </div></td></tr>
+              %endfor
+            %else:
+              <div class="form-row"><i>No options</i></div>
+            %endif
+              </tbody>
+            </table>
+          </div> <!-- content -->
+        </div><!-- portlet-content -->
+    </div>
+</form>
diff --git a/templates/webapps/galaxy/workflow/editor_tool_form.mako b/templates/webapps/galaxy/workflow/editor_tool_form.mako
new file mode 100644
index 0000000..eaaf955
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/editor_tool_form.mako
@@ -0,0 +1,15 @@
+<%
+    ## TEMPORARY: create tool dictionary in mako while both tool forms are in use.
+    ## This avoids making two separate requests since the classic form requires the mako anyway.
+    from galaxy.tools.parameters import params_to_incoming
+    incoming = {}
+    params_to_incoming( incoming, tool.inputs, module.state.inputs, trans.app )
+    self.form_config = tool.to_json(trans, incoming, workflow_building_mode=True)
+    self.form_config.update({
+        'id'                : tool.id,
+        'job_id'            : trans.security.encode_id( job.id ) if job else None,
+        'history_id'        : trans.security.encode_id( trans.history.id ),
+        'container'         : '#right-content'
+    })
+%>
+${ h.dumps(self.form_config) }
\ No newline at end of file
diff --git a/templates/webapps/galaxy/workflow/embed.mako b/templates/webapps/galaxy/workflow/embed.mako
new file mode 100644
index 0000000..2297b99
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/embed.mako
@@ -0,0 +1,24 @@
+<%inherit file="/embed_base.mako"/>
+<%!
+    from galaxy.web.framework.helpers import iff
+%>
+
+<%def name="render_item_links( workflow )">
+	<a href="${h.url_for( controller='workflow', action='display_by_username_and_slug', username=workflow.user.username,
+                          slug=item.slug, format='json-download' )}" title="Save Worflow" class="icon-button disk">
+	## FIXME: find and set appropriate icon for linking to workflow.
+	<!--
+	<a href="${h.url_for( controller='workflow', action='display_by_username_and_slug', username=workflow.user.username,
+                          slug=item.slug, format='json' )}" title="Worflow Link for Import" class="icon-button TODO ">
+	-->
+	${parent.render_item_links( workflow )}
+</%def>
+
+<%def name="render_summary_content( workflow, steps )">
+
+##    <ul>
+##        <% num_steps = len ( steps ) %>
+##        <li>${num_steps} step${iff( num_steps != 1, "s", "" )}
+##        <li>Operations: ...
+##    </ul>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/import.mako b/templates/webapps/galaxy/workflow/import.mako
new file mode 100644
index 0000000..411f1ff
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/import.mako
@@ -0,0 +1,69 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "workflow" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="workflow"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">Import Galaxy workflow</%def>
+
+<%def name="center_panel()">
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+    <div class="toolForm"> 
+        <div class="toolFormTitle">Import Galaxy workflow</div>
+        <div class="toolFormBody">
+            <form name="import_workflow" id="import_workflow" action="${h.url_for( controller='workflow', action='import_workflow' )}" enctype="multipart/form-data" method="POST">
+                <div class="form-row">
+                    <label>Galaxy workflow URL:</label> 
+                    <input type="text" name="url" value="${url | h}" size="40">
+                    <div class="toolParamHelp" style="clear: both;">
+                        If the workflow is accessible via a URL, enter the URL above and click <b>Import</b>.
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Galaxy workflow file:</label>
+                    <div class="form-row-input">
+                        <input type="file" name="file_data"/>
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        If the workflow is in a file on your computer, choose it and then click <b>Import</b>.
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" class="primary-button" name="import_button" value="Import">
+                </div>
+            </form>
+            <hr/>
+            <div class="form-row">
+                <label>Import a Galaxy workflow from myExperiment:</label>
+                <div class="form-row-input">
+                    <a href="${h.url_for( myexperiment_target_url )}">
+                        Visit myExperiment
+                    </a>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    Click the link above to visit myExperiment and browse for Galaxy workflows.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/item_content.mako b/templates/webapps/galaxy/workflow/item_content.mako
new file mode 100644
index 0000000..1414e24
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/item_content.mako
@@ -0,0 +1,3 @@
+<%namespace file="/workflow/display.mako" import="*" />
+
+${render_item( item, item_data )}
diff --git a/templates/webapps/galaxy/workflow/list.mako b/templates/webapps/galaxy/workflow/list.mako
new file mode 100644
index 0000000..35c373e
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/list.mako
@@ -0,0 +1,122 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="workflow"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">Workflow home</%def>
+
+<%def name="center_panel()">
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            %if message:
+                <%
+                    try:
+                        status
+                    except:
+                        status = "done"
+                %>
+                <p />
+                <div class="${status}message">
+                    ${h.to_unicode( message )}
+                </div>
+            %endif
+
+            <h2>Your workflows</h2>
+
+            <ul class="manage-table-actions">
+                <li>
+                    <a class="action-button" href="${h.url_for( controller='workflow', action='create' )}">
+                        <img src="${h.url_for('/static/images/silk/add.png')}" />
+                        <span>Create new workflow</span>
+                    </a>
+                </li>
+                <li>
+                    <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow' )}">
+                        <img src="${h.url_for('/static/images/fugue/arrow-090.png')}" />
+                        <span>Upload or import workflow</span>
+                    </a>
+                </li>
+            </ul>
+
+            %if workflows:
+                <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" style="width:100%;">
+                    <tr class="header">
+                        <th>Name</th>
+                        <th># of Steps</th>
+                        ## <th>Last Updated</th>
+                        <th></th>
+                    </tr>
+                    %for i, workflow in enumerate( workflows ):
+                        <tr>
+                            <td>
+                                <div class="menubutton" style="float: left;" id="wf-${i}-popup">
+                                ${h.to_unicode( workflow.name ) | h}
+                                </div>
+                            </td>
+                            <td>${len(workflow.latest_workflow.steps)}</td>
+                            ## <td>${str(workflow.update_time)[:19]}</td>
+                            <td>
+                                <div popupmenu="wf-${i}-popup">
+                                <a class="action-button" href="${h.url_for( controller='workflow', action='editor', id=trans.security.encode_id( workflow.id ) )}" target="_parent">Edit</a>
+                                <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id( workflow.id ) )}" target="_parent">Run</a>
+                                <a class="action-button" href="${h.url_for( controller='workflow', action='sharing', id=trans.security.encode_id( workflow.id ) )}">Share or Download</a>
+                                <a class="action-button" href="${h.url_for( controller='workflow', action='copy', id=trans.security.encode_id( workflow.id ) )}">Copy</a>
+                                <a class="action-button" href="${h.url_for( controller='workflow', action='rename', id=trans.security.encode_id( workflow.id ) )}">Rename</a>
+                                <a class="action-button" href="${h.url_for( controller='workflow', action='display_by_id', id=trans.security.encode_id( workflow.id ) )}" target="_top">View</a>
+                                <a class="action-button" confirm="Are you sure you want to delete workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( controller='workflow', action='delete', id=trans.security.encode_id( workflow.id ) )}">Delete</a>
+                                </div>
+                            </td>
+                        </tr>
+                    %endfor
+                </table>
+            %else:
+                You have no workflows.
+            %endif
+
+            <h2>Workflows shared with you by others</h2>
+
+            %if shared_by_others:
+                <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                    <tr class="header">
+                        <th>Name</th>
+                        <th>Owner</th>
+                        <th># of Steps</th>
+                        <th></th>
+                    </tr>
+                    %for i, association in enumerate( shared_by_others ):
+                        <% workflow = association.stored_workflow %>
+                        <tr>
+                            <td>
+                                <a class="menubutton" id="shared-${i}-popup" href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id(workflow.id) )}">${h.to_unicode( workflow.name ) | h}</a>
+                            </td>
+                            <td>${workflow.user.email}</td>
+                            <td>${len(workflow.latest_workflow.steps)}</td>
+                            <td>
+                                <div popupmenu="shared-${i}-popup">
+                                    <a class="action-button" href="${h.url_for( controller='workflow', action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug )}" target="_top">View</a>
+                                    <a class="action-button" href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id( workflow.id ) )}">Run</a>
+                                    <a class="action-button" href="${h.url_for( controller='workflow', action='copy', id=trans.security.encode_id( workflow.id ) )}">Copy</a>
+                                    <a class="action-button" confirm="Are you sure you want to remove the shared workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( controller='workflow', action='sharing', unshare_me=True, id=trans.security.encode_id( workflow.id ))}">Remove</a>
+                                </div>
+                            </td>
+                        </tr>
+                    %endfor
+                </table>
+            %else:
+                No workflows have been shared with you.
+            %endif
+
+            <h2>Other options</h2>
+
+            <a class="action-button" href="${h.url_for( controller='workflow', action='configure_menu' )}">
+                <span>Configure your workflow menu</span>
+            </a>
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/list_for_run.mako b/templates/webapps/galaxy/workflow/list_for_run.mako
new file mode 100644
index 0000000..ced2c24
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/list_for_run.mako
@@ -0,0 +1,66 @@
+<%inherit file="/base.mako"/>
+
+<%def name="title()">Workflow home</%def>
+
+<h2>Your workflows</h2>
+
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button" href="${h.url_for( controller='workflow', action='index' )}" target="_parent">
+            <span>Switch to workflow management view</span>
+        </a>
+    </li>
+</ul>
+  
+%if workflows:
+    <table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+        <tr class="header">
+            <th>Name</th>
+            <th># of Steps</th>
+            ## <th>Last Updated</th>
+            <th></th>
+        </tr>
+        %for i, workflow in enumerate( workflows ):
+            <tr>
+                <td>
+                    <a href="${h.url_for(controller='workflow', action='run', id=trans.security.encode_id(workflow.id) )}">${h.to_unicode( workflow.name ) | h}</a>
+                    <a id="wf-${i}-popup" class="popup-arrow" style="display: none;">▼</a>
+                </td>
+                <td>${len(workflow.latest_workflow.steps)}</td>
+                ## <td>${str(workflow.update_time)[:19]}</td>
+            </tr>    
+        %endfor
+    </table>
+%else:
+
+    You have no workflows.
+
+%endif
+
+<h2>Workflows shared with you by others</h2>
+
+%if shared_by_others:
+    <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+        <tr class="header">
+            <th>Name</th>
+            <th>Owner</th>
+            <th># of Steps</th>
+            <th></th>
+        </tr>
+        %for i, association in enumerate( shared_by_others ):
+            <% workflow = association.stored_workflow %>
+            <tr>
+                <td>
+                    <a href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id(workflow.id) )}">${workflow.name | h}</a>
+                    <a id="shared-${i}-popup" class="popup-arrow" style="display: none;">▼</a>
+                </td>
+                <td>${workflow.user.email | h}</td>
+                <td>${len(workflow.latest_workflow.steps)}</td>
+            </tr>    
+        %endfor
+    </table>
+%else:
+
+    No workflows have been shared with you.
+
+%endif
diff --git a/templates/webapps/galaxy/workflow/list_published.mako b/templates/webapps/galaxy/workflow/list_published.mako
new file mode 100644
index 0000000..ff44b3b
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/list_published.mako
@@ -0,0 +1,32 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.active_view="shared"
+    self.message_box_visible=False
+%>
+</%def>
+
+<%def name="title()">
+    Galaxy | Published Workflows
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        .grid td {
+            min-width: 100px;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            <!-- embedded grid -->
+            ${h.to_unicode( embedded_grid )}
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/galaxy/workflow/myexp_export.mako b/templates/webapps/galaxy/workflow/myexp_export.mako
new file mode 100644
index 0000000..d708299
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/myexp_export.mako
@@ -0,0 +1,21 @@
+##
+## Generate the XML for a myExperiment 'upload workflow' request.
+##
+<%!
+    from xml.sax.saxutils import escape
+    import textwrap, base64 
+%>
+
+## Generate request.
+<?xml version="1.0"?>
+<workflow>
+  <title>${workflow_name | h}</title>
+  <description>${workflow_description}</description>
+  <type>Galaxy</type>
+  <content encoding="base64" type="binary">
+      ${textwrap.fill( base64.b64encode( workflow_content ), 64 )}
+  </content>
+  <svg encoding="base64">
+      ${textwrap.fill( base64.b64encode( workflow_svg ), 64 )}
+  </svg>
+</workflow>
diff --git a/templates/webapps/galaxy/workflow/run.mako b/templates/webapps/galaxy/workflow/run.mako
new file mode 100644
index 0000000..2878269
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -0,0 +1,10 @@
+<%inherit file="/base.mako"/>
+${h.js("libs/bibtex", "libs/jquery/jquery-ui")}
+${h.css('jquery-ui/smoothness/jquery-ui')}
+<script>
+    require(['mvc/tool/tool-form-composite'], function( ToolForm ) {
+        $(function() {
+            var form = new ToolForm.View(${ h.dumps( workflow_dict ) });
+        });
+    });
+</script>
\ No newline at end of file
diff --git a/templates/webapps/galaxy/workflow/sharing.mako b/templates/webapps/galaxy/workflow/sharing.mako
new file mode 100644
index 0000000..d88bfa7
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/sharing.mako
@@ -0,0 +1,320 @@
+##
+## Template for "Share or Download"
+##
+
+<%!
+    def inherit(context):
+        if context.get('use_panels', False) == True:
+            if context.get('webapp'):
+                app_name = context.get('webapp')
+            elif context.get('app'):
+                app_name = context.get('app').name
+            else:
+                app_name = 'galaxy'
+            return '/webapps/%s/base_panels.mako' % app_name
+        else:
+            return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%namespace file="/display_common.mako" import="*" />
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/slug_editing_js.mako" import="*" />
+
+
+##
+## Page methods.
+##
+
+<%def name="init()">
+<%
+    self.has_left_panel=False
+    self.has_right_panel=False
+    self.message_box_visible=False
+    self.overlay_visible=False
+    self.message_box_class=""
+    self.active_view=""
+    self.body_class=""
+
+    # Get class name strings.
+    self.item_class_name = get_class_display_name( item.__class__ )
+    self.item_class_name_lc = self.item_class_name.lower()
+    self.item_class_plural_name = get_class_plural_display_name( item.__class__ )
+    self.item_class_plural_name_lc = self.item_class_plural_name.lower()
+    self.controller = get_controller_name(item)
+    self.active_view="workflow"
+%>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${slug_editing_js(item)}
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    <style>
+        ## Put some whitespace before each section header.
+        h3 {
+            margin-top: 1em;
+        }
+        input.action-button {
+            margin-left: 0;
+        }
+        ## If page is displayed in panels, pad from edges for readability.
+        %if context.get('use_panels'):
+        div#center {
+            padding: 10px;
+        }
+        %endif
+        .display-url {
+            margin: 0.5em 0em 0.5em 0.5em;
+            font-weight: bold;
+        }
+        .sharing-section{
+            margin-top: 1em;
+        }
+    </style>
+</%def>
+
+<%def name="center_panel()">
+    ${self.body()}
+</%def>
+
+
+## Share and Publish section
+
+<%def name="render_sharing(item)">
+    ## Set use_panels var for use in page's URLs.
+    <% use_panels = context.get('use_panels', False)  %>
+    <% controller_name = get_controller_name( item ) %>
+    ## Render message.
+    %if message:
+        ${render_msg( message, status )}
+    %endif
+    <%
+        #
+        # Setup and variables needed for page.
+        #
+        # Get class name strings.
+        item_class_name = get_class_display_name( item.__class__ )
+        item_class_name_lc = item_class_name.lower()
+        item_class_plural_name = get_class_plural_display_name( item.__class__ )
+        item_class_plural_name_lc = item_class_plural_name.lower()
+
+        # Get item name.
+        item_name = get_item_name(item)
+    %>
+    ## Require that user have a public username before sharing or publishing an item.
+    %if trans.get_user().username is None or trans.get_user().username is "":
+        <p>To make a ${item_class_name_lc} accessible via link or publish it, you must create a public username:</p>
+
+        <form action="${h.url_for( controller=controller_name, action='set_public_username', id=trans.security.encode_id( item.id ) )}"
+                method="POST">
+            <div class="form-row">
+                <label>Public Username:</label>
+                <div class="form-row-input">
+                    <input type="text" name="username" size="40"/>
+                </div>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input class="action-button" type="submit" name="Set Username" value="Set Username"/>
+            </div>
+        </form>
+    %else:
+        ## User has a public username, so private sharing and publishing options.
+        <h3>Share</h3>
+        <div>
+            %if item.importable:
+                <%
+                    item_status = "accessible via link"
+                    if item.published:
+                        item_status = item_status + " and published"
+                %>
+                This ${item_class_name_lc} is currently <strong>${item_status}</strong>.
+                <div>
+                    Anyone can view and import this ${item_class_name_lc} by visiting the following URL:
+                    <blockquote>
+                        <%
+                            url = h.url_for( controller=controller_name, action='display_by_username_and_slug', username=trans.get_user().username, slug=item.slug, qualified=True )
+                            url_parts = url.split("/")
+                        %>
+                        <a id="item-url" href="${url}" target="_top">${url}</a>
+                        <span id="item-url-text" style="display: none">
+                            ${"/".join( url_parts[:-1] )}/<span id='item-identifier'>${url_parts[-1]}</span>
+                        </span>
+
+                        <a href="#" id="edit-identifier"><img src="${h.url_for('/static/images/fugue/pencil.png')}"/></a>
+                    </blockquote>
+
+                    %if item.published:
+                        This ${item_class_name_lc} is publicly listed and searchable in Galaxy's <a href='${h.url_for( controller=controller_name, action='list_published' )}' target="_top">Published ${item_class_plural_name}</a> section.
+                    %endif
+                </div>
+                <div>
+                    <form action="${h.url_for( controller=controller_name, action='sharing', id=trans.security.encode_id( item.id ) )}" method="POST">
+                        %if not item.published:
+                            ## Item is importable but not published. User can disable importable or publish.
+                            <input class="action-button" type="submit" name="disable_link_access" value="Disable Access to ${item_class_name} Link">
+                            <div class="toolParamHelp">Disables ${item_class_name_lc}'s link so that it is not accessible.</div>
+                            <br />
+                            <input class="action-button" type="submit" name="publish" value="Publish ${item_class_name}" method="POST">
+                            <div class="toolParamHelp">Publishes the ${item_class_name_lc} to Galaxy's <a href='${h.url_for( controller=controller_name, action='list_published' )}' target="_top">Published ${item_class_plural_name}</a> section, where it is publicly listed and searchable.</div>
+                        <br />
+                        %else: ## item.published == True
+                            ## Item is importable and published. User can unpublish or disable import and unpublish.
+                            <input class="action-button" type="submit" name="unpublish" value="Unpublish ${item_class_name}">
+                            <div class="toolParamHelp">Removes this ${item_class_name_lc} from Galaxy's <a href='${h.url_for(controller=controller_name, action='list_published' )}' target="_top">Published ${item_class_plural_name}</a> section so that it is not publicly listed or searchable.</div>
+                            <br />
+                            <input class="action-button" type="submit" name="disable_link_access_and_unpublish" value="Disable Access to ${item_class_name} via Link and Unpublish">
+                            <div class="toolParamHelp">Disables this ${item_class_name_lc}'s link so that it is not accessible and removes ${item_class_name_lc} from Galaxy's <a href='${h.url_for(controller=controller_name, action='list_published' )}' target='_top'>Published ${item_class_plural_name}</a> section so that it is not publicly listed or searchable.</div>
+                        %endif
+                    </form>
+                </div>
+            %else:
+                <p>This ${item_class_name_lc} is currently restricted so that only you and the users listed below can access it.</p>
+                <form action="${h.url_for(controller=controller_name, action='sharing', id=trans.security.encode_id(item.id) )}" method="POST">
+                    <input class="action-button" type="submit" name="make_accessible_via_link" value="Make ${item_class_name} Accessible via Link">
+                    <div class="toolParamHelp">Generates a web link that you can share with other people so that they can view and import the ${item_class_name_lc}.</div>
+
+                    <br />
+                    <input class="action-button" type="submit" name="make_accessible_and_publish" value="Make ${item_class_name} Accessible and Publish" method="POST">
+                    <div class="toolParamHelp">
+                        Makes the ${item_class_name_lc} accessible via link (see above) and publishes the ${item_class_name_lc} to Galaxy's <a href='${h.url_for(controller=controller_name, action='list_published' )}' target='_top'>Published ${item_class_plural_name}</a> section, where it is publicly listed and searchable.
+                    </div>
+                </form>
+            %endif
+            ##
+            ## Sharing with Galaxy users.
+            ##
+            <div class="sharing-section">
+                <div>
+                    %if item.users_shared_with:
+                        <p>
+                            The following users will see this ${item_class_name_lc} in their ${item_class_name_lc} list and will be
+                            able to view, import, and run it.
+                        </p>
+
+                        <table class="colored" border="0" cellspacing="0" cellpadding="0" width="100%">
+                            <tr class="header">
+                                <th>Email</th>
+                                <th></th>
+                            </tr>
+                            %for i, association in enumerate( item.users_shared_with ):
+                                <% user = association.user %>
+                                <tr>
+                                    <td>
+                                        <div class="menubutton popup" id="user-${i}-popup">${user.email}</div>
+                                    </td>
+                                    <td>
+                                        <div popupmenu="user-${i}-popup">
+                                        <a class="action-button" href="${h.url_for(controller=controller_name, action='sharing', id=trans.security.encode_id( item.id ), unshare_user=trans.security.encode_id( user.id ), use_panels=use_panels )}">Unshare</a>
+                                        </div>
+                                    </td>
+                                </tr>
+                            %endfor
+                        </table>
+
+                        <a class="action-button"
+                           href="${h.url_for(controller=controller_name, action='share', id=trans.security.encode_id(item.id), use_panels=use_panels )}">
+                            <span>Share with another user</span>
+                        </a>
+                    %else:
+                        <p>You have not shared this ${item_class_name_lc} with any users yet.</p>
+                        <a class="action-button" href="${h.url_for(controller=controller_name, action='share', id=trans.security.encode_id(item.id), use_panels=use_panels )}">
+                            <span>Share with a user</span>
+                        </a>
+                    %endif
+                </div>
+            </div>
+        </div>
+    %endif
+</%def>
+
+
+## Download and Export section
+<%def name="render_download_to_file(item)">
+    <hr/>
+    <h3>Export</h3>
+    <div class="sharing-section">
+        <a class="action-button" href="${h.url_for( controller=self.controller, action='display_by_username_and_slug', username=item.user.username, slug=item.slug, format='json-download' )}">
+        Download
+        </a>
+    ${get_class_display_name( item.__class__ ).lower()} as a file so that it can be saved or imported into another Galaxy server.
+    </div>
+</%def>
+
+
+<%def name="render_url_for_importing(item)">
+    <div class="sharing-section">
+    %if item.importable:
+        Use this URL to import the ${get_class_display_name( item.__class__ ).lower()} directly into another Galaxy server:
+        <div class="display-url">
+            ${h.url_for(controller=self.controller, action='display_by_username_and_slug', username=item.user.username,
+                        slug=item.slug, format='json', qualified=True )}
+        </div>
+        (Copy this URL into the box titled 'Workflow URL' in the Import Workflow page.)
+    %else:
+        This ${get_class_display_name( item.__class__ ).lower()} must be accessible. Please use the option above to "Make Workflow Accessible and Publish" before receiving a URL for importing to another Galaxy.</a>
+    %endif
+    </div>
+</%def>
+
+<%def name="render_header()">
+    <a href="${h.url_for(controller=self.controller, action="list" )}">Go back to ${self.item_class_plural_name} List</a>
+</%def>
+
+
+<%def name="render_export_to_myexp(item)">
+    ##
+    ## Renders form for exporting workflow to myExperiment.
+    ##
+    <div class="sharing-section">
+        <span>Export to the <a href="http://www.myexperiment.org/" target="_blank">www.myexperiment.org</a> site.</span>
+        <form action="${h.url_for(controller='workflow', action='export_to_myexp', id=trans.security.encode_id( item.id ) )}"
+                method="POST">
+            <div class="form-row">
+                <label>myExperiment username:</label>
+                <input type="text" name="myexp_username" value="" size="25" placeholder="username" autocomplete="off"/>
+            </div>
+            <div class="form-row">
+                <label>myExperiment password:</label>
+                <input type="password" name="myexp_password" value="" size="25" placeholder="password" autocomplete="off"/>
+            </div>
+            <div class="form-row">
+                <input type="submit" value="Export to myExperiment"/>
+            </div>
+        </form>
+    </div>
+</%def>
+
+
+<%def name="render_more(item)">
+    ## Add link to render as SVG image.
+    <div class="sharing-section">
+        <a class="action-button" href="${h.url_for(controller='workflow', action='gen_image', id=trans.security.encode_id( item.id ) )}">
+            Create image
+        </a>
+        of ${get_class_display_name( item.__class__ ).lower()} in SVG format
+    </div>
+    ## Add form to export to myExperiment.
+    <div class="sharing-section">
+        ${self.render_export_to_myexp(item)}
+    </div>
+</%def>
+
+
+<%def name="body()">
+    <div style="overflow: auto; height: 100%;">
+        <div class="page-container" style="padding: 10px;">
+            ${self.render_header()}
+            <h2>${get_class_display_name( item.__class__ )} '${get_item_name( item ) | h}'</h2>
+            <hr/>
+            ${self.render_sharing(item)}
+            ${self.render_download_to_file(item)}
+            ${self.render_url_for_importing(item)}
+            ${self.render_more(item)}
+        </div>
+    </div>
+</%def>
diff --git a/templates/webapps/reports/base_panels.mako b/templates/webapps/reports/base_panels.mako
new file mode 100644
index 0000000..085f690
--- /dev/null
+++ b/templates/webapps/reports/base_panels.mako
@@ -0,0 +1,44 @@
+<%inherit file="/base/base_panels.mako"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "reports" )}
+</%def>
+
+
+## Default title
+<%def name="title()">Galaxy Reports</%def>
+
+## Masthead
+<%def name="masthead()">
+    ## Tab area, fills entire width
+    <div style="position: absolute; top: 0; left: 0; width: 100%; text-align: center">
+        <table class="tab-group" border="0" cellspacing="0" style="margin: auto;">
+            <tr>
+                <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )">
+                    <%
+                    cls = "tab"
+                    if extra_class:
+                        cls += " " + extra_class
+                    if self.active_view == id:
+                        cls += " active"
+                    style = ""
+                    if not visible:
+                        style = "display: none;"
+                    %>
+                    <td class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></td>
+                </%def>
+            </tr>
+        </table>
+    </div>
+    ## Logo, layered over tabs to be clickable
+    <div class="navbar-brand" style="position: absolute; top: 0; left: 0;">
+        <a href="${h.url_for( app.config.get( 'logo_url', '/' ) )}">
+        <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
+        Galaxy Reports
+        %if app.config.brand:
+            <span class='brand'>/ ${app.config.brand}</span>
+        %endif
+        </a>
+    </div>
+</%def>
diff --git a/templates/webapps/reports/dataset_info.mako b/templates/webapps/reports/dataset_info.mako
new file mode 100644
index 0000000..09507d9
--- /dev/null
+++ b/templates/webapps/reports/dataset_info.mako
@@ -0,0 +1,125 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+    from galaxy.webapps.galaxy.controllers.library_common import get_containing_library_from_library_dataset
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="toolForm">
+    <h3 align="center">Dataset Information</h3>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Date uploaded:</label>
+            ${time_ago( dataset.create_time )}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Last updated:</label>
+            ${time_ago( dataset.update_time )}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>File size:</label>
+            ${dataset.get_size( nice_size=True )}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>State:</label>
+            ${dataset.state}
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+%if associated_hdas:
+    <p/>
+    <b>Active (undeleted) history items that use this library dataset's disk file</b>
+    <div class="toolForm">
+        <table class="grid">
+            <thead>
+                <tr>
+                    <th>History</th>
+                    <th>History Item</th>
+                    <th>Last Updated</th>
+                    <th>User</th>
+                </tr>
+            </thead>
+            %for hda in associated_hdas:
+                <tr>
+                    <td>
+                        %if hda.history:
+                            ${hda.history.get_display_name()}
+                        %else:
+                            no history
+                        %endif
+                    </td>
+                    <td>${hda.get_display_name()}</td>
+                    <td>${time_ago( hda.update_time )}</td>
+                    <td>
+                        %if hda.history and hda.history.user:
+                            ${hda.history.user.email}
+                        %else:
+                            anonymous
+                        %endif
+                    </td>
+                </tr>
+            %endfor
+        </table>
+    </div>
+    <p/>
+%endif
+%if associated_lddas:
+    <p/>
+    <b>Other active (undeleted) library datasets that use this library dataset's disk file</b>
+    <div class="toolForm">
+        <table class="grid">
+            <thead>
+                <tr>
+                    <th>Library</th>
+                    <th>Folder</th>
+                    <th>Library Dataset</th>
+                    <th>Last Updated</th>
+                    <th>Uploaded By</th>
+                </tr>
+            </thead>
+            %for ldda in associated_lddas:
+                <% containing_library = get_containing_library_from_library_dataset( trans, ldda.library_dataset ) %>
+                <tr>
+                    <td>
+                        <%
+                            if containing_library:
+                                library_display_name = containing_library.get_display_name()
+                            else:
+                                library_display_name = 'no library'
+                        %>
+                        ${library_display_name}
+                    </td>
+                    <td>
+                        <%
+                            library_dataset = ldda.library_dataset
+                            folder = library_dataset.folder
+                            folder_display_name = folder.get_display_name()
+                            if folder_display_name == library_display_name:
+                                folder_display_name = 'library root'
+                        %>
+                        ${folder_display_name}
+                    </td>
+                    <td>${ldda.get_display_name()}</td>
+                    <td>${time_ago( ldda.update_time )}</td>
+                    <td>
+                        %if ldda.user:
+                            ${ldda.user.email}
+                        %else:
+                            anonymous
+                        %endif
+                    </td>
+                </tr>
+            %endfor
+        </table>
+    </div>
+    <p/>
+%endif
diff --git a/templates/webapps/reports/grid.mako b/templates/webapps/reports/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/webapps/reports/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/webapps/reports/history_and_dataset_per_user.mako b/templates/webapps/reports/history_and_dataset_per_user.mako
new file mode 100644
index 0000000..56de43b
--- /dev/null
+++ b/templates/webapps/reports/history_and_dataset_per_user.mako
@@ -0,0 +1,76 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">Histories and Datasets per User</h3>
+    <h4 align="center">Listed in
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by
+    %if sorting == 0:
+        Users
+    %elif sorting == 1:
+        number of History
+    %elif sorting == 2:
+        number of Dataset
+    %else:
+        History Space
+    %endif
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="history_and_dataset_per_user">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${sorting}" size="4" name="sorting">
+                            <option value="User"> by username </option>
+                            <option value="size"> by history space </option>
+                            <option value="HSort"> by number of history </option>
+                            <option value="DSort"> by number of dataset </option>
+                        </select>
+                        <select value="${descending}" size="3" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        </br>
+                        <button name="action" value="commit">Sort my Data!</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if data:
+            <tr class="header">
+                <td>User</td>
+                <td>Number of History</td>
+                <td>Number of Dataset</td>
+            </tr>
+            <% odd = False%>
+            %for user in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                <td><a href="history_and_dataset_type?user_selection=${user}">${user}</a></td>
+                <td>${data[user][0]}</td>
+                <td>${data[user][1]}</td>
+                </tr>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/history_and_dataset_type.mako b/templates/webapps/reports/history_and_dataset_type.mako
new file mode 100644
index 0000000..e0cf8e6
--- /dev/null
+++ b/templates/webapps/reports/history_and_dataset_type.mako
@@ -0,0 +1,75 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">States of Datasets per History</h3>
+    <h4 align="center">Listed in
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by History name
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="history_and_dataset_type">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${descending}" size="3" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        </br>
+                        <button name="action" value="commit">Sort my Data!</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if data:
+            <tr class="header">
+                <td>History name</td>
+                <td>Dataset in state 'ok'</td>
+                <td>Dataset in state 'upload'</td>
+                <td>Dataset paused</td>
+                <td>Dataset queued</td>
+                <td>Dataset in error</td>
+                <td>Dataset discarded</td>
+
+                <!-- <td>Dataset in other status</td>  do you want to show other status?-->
+
+            </tr>
+            <% odd = False%>
+            %for name in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                <td>${name}</td>
+                <td>${data[name][0]}</td>
+                <td>${data[name][1]}</td>
+                <td>${data[name][2]}</td>
+                <td>${data[name][3]}</td>
+                <td>${data[name][4]}</td>
+                <td>${data[name][5]}</td>
+
+                <!-- <td>${data[name][6]}</td>  do you want to show other status?-->
+
+                </tr>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/history_per_user.mako b/templates/webapps/reports/history_per_user.mako
new file mode 100644
index 0000000..4c82e82
--- /dev/null
+++ b/templates/webapps/reports/history_per_user.mako
@@ -0,0 +1,68 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">Histories per User</h3>
+    <h4 align="center">Listed in
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by
+    %if sorting == 0:
+        Users
+    %else:
+        Number
+    %endif
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="history_per_user">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${sorting}" size="3" name="sorting">
+                            <option value="User"> by username </option>
+                            <option value="Number"> by number </option>
+                        </select>
+                        <select value="${descending}" size="3" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        </br>
+                        <button name="action" value="commit">Go</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if histories:
+            <tr class="header">
+                <td>User</td>
+                <td>Number of History</td>
+            </tr>
+            <% ctr = 0 %>
+            %for history in histories:
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td>${history[0]}</td>
+                    <td>${history[1]}</td>
+                </tr>
+                <% ctr += 1 %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/index.mako b/templates/webapps/reports/index.mako
new file mode 100644
index 0000000..cefcdbd
--- /dev/null
+++ b/templates/webapps/reports/index.mako
@@ -0,0 +1,139 @@
+<%inherit file="/webapps/reports/base_panels.mako"/>
+
+<%def name="init()">
+    <%
+        self.has_left_panel=True
+        self.has_right_panel=False
+        self.active_view="reports"
+    %>
+</%def>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ## Include "base.css" for styling tool menu and forms (details)
+    ${h.css( "base", "autocomplete_tagging" )}
+
+    ## But make sure styles for the layout take precedence
+    ${parent.stylesheets()}
+
+    <style type="text/css">
+        body { margin: 0; padding: 0; overflow: hidden; }
+        #left {
+            background: #C1C9E5 url("${h.url_for('/static/style/menu_bg.png')}") top repeat-x;
+        }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="left_panel()">
+    <%
+        from datetime import datetime
+        from time import mktime, strftime, localtime
+    %>
+    <div class="unified-panel-header" unselectable="on">
+        <div class='unified-panel-header-inner'><span>Reports</span>
+            <a target="galaxy_main" href="${h.url_for( controller='home', action='run_stats' )}">
+                <button id="reports_home" data-toggle="tooltip" data-placement="top" title="Dashboard" class="btn btn-default primary-button" type="button"><span class="fa fa-home"></span></button>
+            </a>
+        </div>
+    </div>
+    <div class="page-container reports-panel-container">
+        <div class="toolMenu">
+            <div class="toolSectionList">
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>Jobs</span>
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='specified_date_handler', specified_date=datetime.utcnow().strftime( "%Y-%m-%d" ), sort_id='default', order='default' )}">Today's jobs</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='specified_month_all', sort_id='default', order='default' )}">Jobs per day this month</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='specified_month_in_error', sort_id='default', order='default' )}">Jobs in error per day this month</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='specified_date_handler', operation='unfinished', sort_id='default', order='default' )}">All unfinished jobs</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='per_month_all', sort_id='default', order='default' )}">Jobs per month</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='per_month_in_error', sort_id='default', order='default' )}">Jobs in error per month</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='per_user', sort_id='default', order='default' )}">Jobs per user</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='per_tool', sort_id='default', order='default' )}">Jobs per tool</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='jobs', action='errors_per_tool', sort_id='default', order='default', spark_time='')}">Errors per tool</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>Histories</span>
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='history', action='history_and_dataset_per_user' )}">Histories and Datasets per User</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='history', action='history_and_dataset_type' )}">States of Datasets per History</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>Tools</span>
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='tools', action='tools_and_job_state' )}">States of Jobs per Tool</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='tools', action='tool_execution_time' )}">Execution Time per Tool</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>Workflows</span>
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='workflows', action='per_workflow', sort_id='default', order='default' )}">Runs per Workflows</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='workflows', action='per_month_all', sort_id='default', order='default' )}">Workflows per month</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='workflows', action='per_user', sort_id='default', order='default' )}">Workflows per user</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>Users</span>
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='users', action='registered_users' )}">Registered users</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='users', action='last_access_date', sort_id='default', order='default' )}">Date of last login</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='users', action='user_disk_usage', sort_id='default', order='default' )}">User disk usage</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='users', action='history_per_user', sort_id='default', order='default' )}">Number of History per user</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>System</span>
+                </div>
+                  <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='system', action='index' )}">Disk space maintenance</a></div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    <span>Sample Tracking</span>
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='sample_tracking', action='per_month_all' )}">Sequencing requests per month</a></div>
+                        <div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='sample_tracking', action='per_user' )}">Sequencing requests per user</a></div>
+                    </div>
+                </div>
+            </div>
+        </div>
+    </div>
+</%def>
+
+<%def name="center_panel()">
+    <% center_url = h.url_for( controller='home', action='run_stats' ) %>
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"> </iframe>
+</%def>
diff --git a/templates/webapps/reports/job_info.mako b/templates/webapps/reports/job_info.mako
new file mode 100644
index 0000000..9b844e9
--- /dev/null
+++ b/templates/webapps/reports/job_info.mako
@@ -0,0 +1,95 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<% import datetime %>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">Job Information</h3>
+        <table align="center" class="colored">
+            <tr class="header">
+                <td>State</td>
+                <td>Job Id</td>
+                <td>Create Time</td>
+                <td>Time To Finish</td>
+                <td>Session Id</td>
+            </tr>
+            <tr>
+                <td><div class="count-box state-color-${job.state}">${job.state}</div></td>
+                <td>${job.id}</td>
+                <td>${job.create_time}</td>
+                <td>
+                    <% execute_time = job.update_time - job.create_time %>
+                    ${datetime.timedelta( seconds=execute_time.seconds )}
+                </td>
+                <td>${job.session_id}</td>
+            </tr>
+                <tr class="header">
+                <td colspan="2">Tool</td>
+                <td>User</td>
+                <td>Runner</td>
+                <td>Runner Id</td>
+            </tr>
+            <tr>
+                <td colspan="2">${job.tool_id}</td>
+                <td>
+                    %if job.user and job.user.email:
+                        ${job.user.email}
+                    %else:
+                        anonymous
+                    %endif
+                </td>
+                <td>${job.job_runner_name}</td>
+                <td>${job.job_runner_external_id}</td>
+            </tr>
+            <tr class="header">
+                <td colspan="5">Remote Host</td>
+            </tr>
+            <tr>
+                <td colspan="5">
+                    %if job.galaxy_session and job.galaxy_session.remote_host:
+                        ${job.galaxy_session.remote_host}
+                    %else:
+                        no remote host
+                    %endif
+                </td>
+            </tr>
+            <tr class="header">
+                <td colspan="5">Command Line</td>
+            </tr>
+            <tr>
+                <td colspan="5">${job.command_line}</td>
+            </tr>
+            <tr class="header">
+                <td colspan="5">Stdout</td>
+            </tr>
+            <tr>
+                <td colspan="5"><pre>${job.stdout}</pre></td>
+            </tr>
+            <tr class="header">
+                <td colspan="5">Stderr</td>
+            </tr>
+            <tr>
+                <td colspan="5"><pre>${job.stderr}</pre></td>
+            </tr>
+            <tr class="header">
+                <td colspan="5">Stack Trace</td>
+            </tr>
+            <tr>
+                <td colspan="5"><pre>${job.traceback}</pre></td>
+            </tr>
+            <tr class="header">
+                <td colspan="5">Info</td>
+            </tr>
+            <tr>
+                <td colspan="5">${job.info}</td>
+            </tr>
+            <tr><td colspan="5"> </td></tr>
+            <tr><td colspan="5"> </td></tr>
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/jobs_errors_per_tool.mako b/templates/webapps/reports/jobs_errors_per_tool.mako
new file mode 100644
index 0000000..7f4b822
--- /dev/null
+++ b/templates/webapps/reports/jobs_errors_per_tool.mako
@@ -0,0 +1,115 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline, make_spark_settings" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+<%!
+    import re
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<!--jobs_errors_per_tool.mako-->
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'jobs', 'errors_per_tool', spark_time=time_period)}
+                </td>
+                <td>
+                    <h4 align="center">Jobs In Error Per Tool</h4>
+                    <h5 align="center">
+                        <p>
+                            Click Tool ID to view details.
+                            Click error number to view job details.
+                        </p>
+
+                        Graph goes from present to past for
+                        ${make_spark_settings("jobs", "errors_per_tool", spark_limit, sort_id, order, time_period, page=page, offset=offset, entries=entries)}
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("jobs", "errors_per_tool", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no jobs in the error state.
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'tool_id', 'jobs', 'errors_per_tool', 'Tool ID', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow tool_id'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+    					<td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'errors_per_tool', 'User Jobs in Error', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+	                    <td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'errors_per_tool', 'User and Monitor Jobs in Error', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for job in jobs:
+                    <% key = re.sub(r'\W+', '', job[1]) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='tool_per_month', tool_id=job[1], sort_id='default', order='default')}">
+                                ${job[1]}
+                            </a>
+                        </td>
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='specified_date_handler', operation='specified_tool_in_error', tool_id=job[1])}">
+                                ${job[0]}
+                            </a>
+                        </td>
+                        %try:
+                            ${make_sparkline(key, trends[key], "bar", "/ " + time_period[:-1])}
+                        %except KeyError:
+                        %endtry
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_errors_per_tool.mako-->
diff --git a/templates/webapps/reports/jobs_per_month_all.mako b/templates/webapps/reports/jobs_per_month_all.mako
new file mode 100644
index 0000000..33165df
--- /dev/null
+++ b/templates/webapps/reports/jobs_per_month_all.mako
@@ -0,0 +1,97 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<!--jobs_per_month_all.mako-->
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'jobs', 'per_month_all')}
+                </td>
+                <td>
+                    <h4 align="center">Jobs Per Month</h4>
+                    <h5 align="center">
+                        Click Month to view details.
+                        Graph goes from the 1st to the last of the month.
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("jobs", "per_month_all", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr><td colspan="4">There are no jobs.</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'date', 'jobs', 'per_month_all', 'Month', page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+    					<td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'per_month_all', 'User Jobs', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+	                    <td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'per_month_all', 'User and Monitor Jobs', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <%
+                    ctr = 0
+                    entries = 1
+                %>
+                %for job in jobs:
+                    <% key = str(job[2]) + str(job[3]) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='specified_month_all', specified_date=job[0]+'-01', sort_id='default', order='default' )}">
+                                ${job[2]} ${job[3]}
+                            </a>
+                        </td>
+                        <td>${job[1]}</td>
+                        ${make_sparkline(key, trends[key], "bar", "/ day")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--jobs_per_month_all.mako-->
diff --git a/templates/webapps/reports/jobs_per_month_in_error.mako b/templates/webapps/reports/jobs_per_month_in_error.mako
new file mode 100644
index 0000000..d805733
--- /dev/null
+++ b/templates/webapps/reports/jobs_per_month_in_error.mako
@@ -0,0 +1,93 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<!--jobs_per_month_in_error.mako-->
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'jobs', 'per_month_in_error')}
+                </td>
+                <td>
+                    <h4 align="center">Jobs In Error Per Month</h4>
+                    <h5 align="center">Click Month to view details.</h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("jobs", "per_month_in_error", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no jobs in the error state.
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'date', 'jobs', 'per_month_in_error', 'Month', page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+    					<td class="third_width">
+                            ${get_sort_url( sort_id, order, 'total_jobs', 'jobs', 'per_month_in_error', 'User Jobs', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+	                    <td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'per_month_in_error', 'User and Monitor Jobs', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for job in jobs:
+                    <% key = str(job[2]) + str(job[3]) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td><a href="${h.url_for( controller='jobs', action='specified_month_in_error', specified_date=job[0]+'-01', sort_id='default', order='default' )}">${job[2]} ${job[3]}</a></td>
+                        <td>${job[1]}</td>
+                        ${make_sparkline(key, trends[key], "bar", "/ day")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_per_month_in_error.mako-->
diff --git a/templates/webapps/reports/jobs_per_tool.mako b/templates/webapps/reports/jobs_per_tool.mako
new file mode 100644
index 0000000..723013f
--- /dev/null
+++ b/templates/webapps/reports/jobs_per_tool.mako
@@ -0,0 +1,103 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline, make_spark_settings" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+<%!
+    import re
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<!--jobs_per_tool.mako-->
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'jobs', 'per_tool', spark_time=time_period)}
+                </td>
+                <td>
+                    <h4 align="center">Jobs Per Tool</h4>
+                    <h5 align="center">
+                        Click Tool ID to view details.
+                        Graph goes from present to past
+                        ${make_spark_settings("jobs", "per_tool", spark_limit, sort_id, order, time_period, page=page, offset=offset, entries=entries)}
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("jobs", "per_tool", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr><td colspan="2">There are no jobs.</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'tool_id', 'jobs', 'per_tool', 'Tool ID', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow tool_id'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+                        <td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'per_tool', 'User Jobs', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+                        <td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'per_tool', 'User and Monitor Jobs', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for job in jobs:
+                    <% key = re.sub(r'\W+', '', job[0]) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='tool_per_month', tool_id=job[0], sort_id='default', order='default' )}">
+                                ${job[0]}
+                            </a>
+                        </td>
+                        <td>${job[1]}</td>
+                        %try:
+                            ${make_sparkline(key, trends[key], "bar", "/ " + time_period[:-1])}
+                        %except KeyError:
+                        %endtry
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_per_tool.mako-->
diff --git a/templates/webapps/reports/jobs_per_user.mako b/templates/webapps/reports/jobs_per_user.mako
new file mode 100644
index 0000000..c928f79
--- /dev/null
+++ b/templates/webapps/reports/jobs_per_user.mako
@@ -0,0 +1,97 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline, make_spark_settings" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+<%!
+    import re
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+<!--jobs_per_user.mako-->
+${q1time}, ${q2time}, ${ttime}
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                  ${get_pages(sort_id, order, page_specs, 'jobs', 'per_user', spark_time=time_period)}
+                </td>
+                <td>
+                    <h4 align="center">Jobs Per User</h4>
+                    <h5 align="center">
+                        Click User to view details.
+                        Graph goes from present to past
+                        ${make_spark_settings("jobs", "per_user", spark_limit, sort_id, order, time_period, page=page, offset=offset, entries=entries)}
+                    </h5>
+                </td>
+                <td align="right">
+                  ${get_entry_selector("jobs", "per_user", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr><td colspan="2">There are no jobs.</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'user_email', 'jobs', 'per_user', 'User', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow user_email'>${arrow}</span>
+                    </td>
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'per_user', 'Total Jobs', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow total_jobs'>${arrow}</span>
+                    </td>
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for job in jobs:
+                    <% key = re.sub(r'\W+', '', job[0]) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='user_per_month', email=job[0], sort_id='default', order='default' )}">
+                                ${job[0]}
+                            </a>
+                        </td>
+                        <td>${job[1]}</td>
+                        %try:
+                            ${make_sparkline(key, trends[key], "bar", "/ " + time_period[:-1])}
+                        %except KeyError:
+                        %endtry
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_per_user.mako-->
diff --git a/templates/webapps/reports/jobs_specified_month_all.mako b/templates/webapps/reports/jobs_specified_month_all.mako
new file mode 100644
index 0000000..b9c5edc
--- /dev/null
+++ b/templates/webapps/reports/jobs_specified_month_all.mako
@@ -0,0 +1,106 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+<!--jobs_specified_month_all.mako-->
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'jobs', 'specified_month_all')}
+                </td>
+                <td>
+                    <h4 align="center">
+                        Jobs for ${month_label} ${year_label}
+                    </h4>
+                    <h5 align="center">
+                        Click job count to see the day's details.
+                    </h5>
+                    <h5 align="center">
+                        Graph goes from beggining to the end of the day.
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("jobs", "specified_month_all", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr>
+                    <td colspan="5">
+                        There are no jobs for ${month_label} ${year_label}
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="quarter_width">Day</td>
+                    <td class="quarter_width">
+                        ${get_sort_url(sort_id, order, 'date', 'jobs', 'specified_month_all', 'Date', page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+    					<td class="quarter_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'specified_month_all', 'User Jobs', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+	                    <td class="quarter_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'specified_month_all', 'User and Monitor Jobs', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for job in jobs:
+                    <% key = job[1] %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>${job[0]}</td>
+                        <td>${month_label} ${job[1]}, ${year_label}</td>
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='specified_date_handler', specified_date=job[3], webapp='reports', sort_id='default', order='default' )}">
+                                ${job[2]}
+                            </a>
+                        </td>
+                        ${make_sparkline(key, trends[key], "bar", "/ hour")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_specified_month_all.mako-->
diff --git a/templates/webapps/reports/jobs_specified_month_in_error.mako b/templates/webapps/reports/jobs_specified_month_in_error.mako
new file mode 100644
index 0000000..e24464c
--- /dev/null
+++ b/templates/webapps/reports/jobs_specified_month_in_error.mako
@@ -0,0 +1,104 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'jobs', 'specified_month_in_error')}
+                </td>
+                <td>
+                    <h4 align="center">
+                        Jobs in Error for ${month_label} ${year_label}
+                    </h4>
+                    <h5 align="center">
+                        Click job count to see the day's details
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("jobs", "specified_month_in_error", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr>
+                    <td colspan="3">
+                        There are no jobs in the error state
+                        for ${month_label} ${year_label}
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="quarter_width">Day</td>
+                    <td class="quarter_width">
+                        ${get_sort_url(sort_id, order, 'date', 'jobs', 'specified_month_in_error', 'Date', page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+    					<td class="quarter_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'specified_month_in_error', 'User Jobs in Error', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+	                    <td class="quarter_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'specified_month_in_error', 'User and Monitor Jobs in Error', page=page, offset=offset, entries=entries)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <%
+                    ctr = 0
+                    entries = 1
+                %>
+                %for job in jobs:
+                    <% key = job[1] %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>${job[0]}</td>
+                        <td>
+                            ${month_label} ${job[1]}, ${year_label}
+                        </td>
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='specified_date_handler', operation='specified_date_in_error', specified_date=job[3],sort_id='default', order='default' )}">
+                                ${job[2]}
+                            </a>
+                        </td>
+                        ${make_sparkline(key, trends[key], "bar", "/ hour")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                        ctr += 1
+                        entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/jobs_tool_per_month.mako b/templates/webapps/reports/jobs_tool_per_month.mako
new file mode 100644
index 0000000..e14dfca
--- /dev/null
+++ b/templates/webapps/reports/jobs_tool_per_month.mako
@@ -0,0 +1,66 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+
+<!--jobs_tool_per_month.mako-->
+<div class="report">
+    <div class="reportBody">
+        <h4 align="center">Jobs per month for tool "${tool_id}"</h4>
+        <h5 align="center">
+            <p>Click Jobs to view details.</p>
+            <p>Graph goes from first of the month to the last</p>
+        </h5>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no jobs for tool "${tool_id}"
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'date', 'jobs', 'tool_per_month', 'Month', tool_id=tool_id)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    %if is_user_jobs_only:
+    					<td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'tool_per_month', 'User Jobs', tool_id=tool_id)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+					%else:
+	                    <td class="third_width">
+                            ${get_sort_url(sort_id, order, 'total_jobs', 'jobs', 'tool_per_month', 'User and Monitor Jobs', tool_id=tool_id)}
+                            <span class='dir_arrow total_jobs'>${arrow}</span>
+                        </td>
+	                %endif
+                    <td></td>
+                </tr>
+                <% ctr = 0 %>
+                %for job in jobs:
+                    <% key = job[2] + job[3] %>
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>${job[2]} ${job[3]}</td>
+                        <td><a href="${h.url_for( controller='jobs', action='specified_date_handler', operation='tool_for_month', tool_id=tool_id, specified_date=job[0] )}">${job[1]}</a></td>
+                        ${make_sparkline(key, trends[key], "bar", "/ day")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_tool_per_month.mako-->
diff --git a/templates/webapps/reports/jobs_user_per_month.mako b/templates/webapps/reports/jobs_user_per_month.mako
new file mode 100644
index 0000000..5e32179
--- /dev/null
+++ b/templates/webapps/reports/jobs_user_per_month.mako
@@ -0,0 +1,70 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+<%
+   from galaxy import util
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<%
+   _email = util.restore_text( email )
+%>
+
+<!--jobs_user_per_month.mako-->
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">Jobs per month for user "${_email}"</h3>
+        <h4 align="center">
+            <p>Click Total Jobs to see the user's jobs for that month</p>
+            <p>Graph goes from first of the month to the last</p>
+        </h4>
+        <table align="center" width="60%" class="colored">
+            %if len( jobs ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no jobs for user "${ _email }"
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'date', 'jobs', 'user_per_month', 'Month', email=email)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    <td class="third_width">
+                        ${get_sort_url( sort_id, order, 'total_jobs', 'jobs', 'user_per_month', 'Total Jobs', email=email)}
+                        <span class='dir_arrow total_jobs'>${arrow}</span>
+                    </td>
+                    <td></td>
+                </tr>
+                <% ctr = 0 %>
+                %for job in jobs:
+                    <% key = job[2] + job[3] %>
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>${job[2]} ${job[3]}</td>
+                        <td>
+                            <a href="${h.url_for( controller='jobs', action='specified_date_handler', operation='user_for_month', email=email, specified_date=job[0], sort_id='default', order='default')}">
+                                ${job[1]}
+                            </a>
+                        </td>
+                        ${make_sparkline(key, trends[key], "bar", "/ day")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_user_per_month.mako-->
diff --git a/templates/webapps/reports/registered_users.mako b/templates/webapps/reports/registered_users.mako
new file mode 100644
index 0000000..e70e057
--- /dev/null
+++ b/templates/webapps/reports/registered_users.mako
@@ -0,0 +1,34 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">Registered Users</h3>
+        <h4 align="center">
+            Click Number of Registered Users to see
+            the number of user registrations per month
+        </h4>
+        <table align="center" class="colored">
+            %if num_users == 0:
+                <tr><td>There are no registered users</td></tr>
+            %else:
+                <tr class="header">
+                    <td align="center">
+                        Number of Registered Users
+                    </td>
+                </tr>
+                <tr class="tr">
+                    <td align="center">
+                        <a href="${h.url_for( controller='users', action='registered_users_per_month', sort_id='default', order='default' )}">
+                            ${num_users}
+                        </a>
+                    </td>
+                </tr>
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/registered_users_per_month.mako b/templates/webapps/reports/registered_users_per_month.mako
new file mode 100644
index 0000000..0e212dd
--- /dev/null
+++ b/templates/webapps/reports/registered_users_per_month.mako
@@ -0,0 +1,51 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">User Registrations Per Month</h3>
+        <h4 align="center">
+            Click Month to view the number of user registrations
+            for each day of that month
+        </h4>
+        <table align="center" width="30%" class="colored">
+            %if len( users ) == 0:
+                <tr><td colspan="2">There are no registered users</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'date', 'users', 'registered_users_per_month', 'Month')}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'num_users', 'users', 'registered_users_per_month', 'Number of Registrations')}
+                        <span class='dir_arrow num_users'>${arrow}</span>
+                    </td>
+                </tr>
+                <% ctr = 0 %>
+                %for user in users:
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>
+                            <a href="${h.url_for( controller='users', action='specified_month', specified_date=user[0]+'-01' )}">
+                                ${user[2]} ${user[3]}
+                            </a>
+                        </td>
+                        <td>${user[1]}</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/registered_users_specified_date.mako b/templates/webapps/reports/registered_users_specified_date.mako
new file mode 100644
index 0000000..71f3e2a
--- /dev/null
+++ b/templates/webapps/reports/registered_users_specified_date.mako
@@ -0,0 +1,42 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">
+            User Registrations for ${day_label},
+             ${month_label} ${day_of_month},
+             ${year_label}
+        </h3>
+        <table align="center" width="30%" class="colored">
+            %if len( users ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no user registrations for ${day_label},
+                         ${month_label} ${day_of_month},
+                         ${year_label}
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td>Email</td>
+                </tr>
+                <% ctr = 0 %>
+                %for user in users:
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>${user}</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/registered_users_specified_month.mako b/templates/webapps/reports/registered_users_specified_month.mako
new file mode 100644
index 0000000..9899bf9
--- /dev/null
+++ b/templates/webapps/reports/registered_users_specified_month.mako
@@ -0,0 +1,50 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">
+            User Registrations for ${month_label} ${year_label}
+        </h3>
+        <h4 align="center">
+            Click Day to see user registrations for that day
+        </h4>
+        <table align="center" width="60%" class="colored">
+            %if len( users ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no user registrations for
+                        ${month_label} ${year_label}
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">Day</td>
+                    <td class="half_width">New Registrations</td>
+                </tr>
+                <% ctr = 0 %>
+                %for user in users:
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>
+                            <a href="${h.url_for( controller='users', action='specified_date', specified_date=user[0] )}">
+                                ${user[3]},
+                                 ${month_label} ${user[1]},
+                                 ${year_label}
+                            </a>
+                        </td>
+                        <td>${user[2]}</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/requests_per_month_all.mako b/templates/webapps/reports/requests_per_month_all.mako
new file mode 100644
index 0000000..191b570
--- /dev/null
+++ b/templates/webapps/reports/requests_per_month_all.mako
@@ -0,0 +1,38 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">Sequencing Requests Per Month</h3>
+        <table align="center" width="60%" class="colored">
+            %if len( requests ) == 0:
+                <tr><td colspan="4">There are no requests</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">Month</td>
+                    <td class="half_width">Total</td>
+                </tr>
+                <% ctr = 0 %>
+                %for request in requests:
+                    <%
+                        month = request[0]
+                        total = request[1]
+                    %>
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>${month}</td>
+                        <td>${total}</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/requests_per_user.mako b/templates/webapps/reports/requests_per_user.mako
new file mode 100644
index 0000000..d025cf8
--- /dev/null
+++ b/templates/webapps/reports/requests_per_user.mako
@@ -0,0 +1,42 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">Sequencing Requests Per User</h3>
+        <table align="center" width="60%" class="colored">
+            %if len( requests ) == 0:
+                <tr><td colspan="2">There are no requests</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">User</td>
+                    <td class="half_width">Total Requests</td>
+                </tr>
+                <% ctr = 0 %>
+                %for request in requests:
+                    <%
+                        from galaxy import util
+                        email = request[0]
+                        total = request[1]
+                        user = trans.sa_session.query( trans.model.User ) \
+                                               .filter_by( email=email ) \
+                                               .one()
+                    %>
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td><a href="${h.url_for( controller='sample_tracking', action='user_per_month', id=trans.security.encode_id( user.id ), email=util.sanitize_text( user.email ) )}">${email}</a></td>
+                        <td>${total}</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/requests_user_per_month.mako b/templates/webapps/reports/requests_user_per_month.mako
new file mode 100644
index 0000000..a1e5c2f
--- /dev/null
+++ b/templates/webapps/reports/requests_user_per_month.mako
@@ -0,0 +1,59 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+<%
+    from galaxy import util
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">
+            Sequencing requests per month for user
+            "${util.restore_text( email )}"
+        </h3>
+        <table align="center" width="60%" class="colored">
+            %if len( requests ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no requests for user
+                        "${util.restore_text( email )}"
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'date', 'users', 'requests_users_per_month', 'Month')}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    <td class="half_width">Total
+                        ${get_sort_url(sort_id, order, 'num_users', 'users', 'requests_users_per_month', 'Month')}
+                        <span class='dir_arrow num_users'>${arrow}</span>
+                    </td>
+                </tr>
+                <% ctr = 0 %>
+                %for request in requests:
+                    <%
+                        month = request[0]
+                        total = request[1]
+                    %>
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>${month}</td>
+                        <td>${total}</td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/run_stats.mako b/templates/webapps/reports/run_stats.mako
new file mode 100644
index 0000000..2df21c6
--- /dev/null
+++ b/templates/webapps/reports/run_stats.mako
@@ -0,0 +1,52 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+${h.js("libs/d3")}
+${parent.javascripts()}
+
+<script type="text/javascript" src="${h.url_for('/static/scripts/reports_webapp/run_stats.js')}"></script>
+<script type="text/javascript">
+$(document).ready( function(e) {
+    create_chart(${jf_hr_data}, "jf_hr_chart", "hours", "Jobs Finished per Hour");
+    create_chart(${jf_dy_data}, "jf_dy_chart", "days", "Jobs Finished per Day");
+    create_chart(${jc_hr_data}, "jc_hr_chart", "hours", "Jobs Created per Hour");
+    create_chart(${jc_dy_data}, "jc_dy_chart", "days", "Jobs Created per Day");
+    create_histogram(${et_hr_data}, "et_hr_chart", "Job Run Times (past day)");
+    create_histogram(${et_dy_data}, "et_dy_chart", "Job Run Time (past 30 days)");
+});
+</script>
+</%def>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<!--run_stats.mako-->
+<div class="report">
+    <div class="charts">
+        <div class="trim" id="tr_hr"></div>
+        <div class="hr_container" >
+            <svg class="chart hr" id="jf_hr_chart"></svg>
+        </div>
+        <div class="hr_container">
+            <svg class="chart hr" id="jc_hr_chart"></svg>
+        </div>
+        <div class="hr_container">
+            <svg class="chart hr" id="et_hr_chart"></svg>
+        </div>
+    </div>
+    <div class="charts">
+        <div class="trim" id="tr_dy"></div>
+        <div class="dy_container">
+            <svg class="chart dy" id="jf_dy_chart"></svg>
+        </div>
+        <div class="dy_container">
+            <svg class="chart dy" id="jc_dy_chart"></svg>
+        </div>
+        <div class="dy_container">
+            <svg class="chart dy" id="et_dy_chart"></svg>
+        </div>
+    </div>
+</div>
+<!--run_stats.mako-->
diff --git a/templates/webapps/reports/system.mako b/templates/webapps/reports/system.mako
new file mode 100644
index 0000000..f1c5087
--- /dev/null
+++ b/templates/webapps/reports/system.mako
@@ -0,0 +1,118 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+    <h3 align="center">Old Histories and Datasets</h3>
+    <table id="systemForm" class="border">
+        <tr>
+            <td>
+                <form method="post" action="system">
+                    <p>
+                        <button name="action" value="userless_histories">
+                            Number of Histories
+                        </button>
+                        that are not associate with a user and were last
+                        updated more than
+                        <input type="textfield"
+                               value="${userless_histories_days}"
+                               size="3"
+                               name="userless_histories_days">
+                        days ago.
+                    </p>
+                    <p>
+                        <button name="action" value="deleted_histories">
+                            Number of Histories
+                        </button>
+                        that were deleted more than 
+                        <input type="textfield" 
+                               value="${deleted_histories_days}"
+                               size="3"
+                               name="deleted_histories_days"> 
+                        days ago but have not yet been purged.
+                    </p>
+                    <p>
+                        <button name="action" value="deleted_datasets">
+                            Number of Datasets
+                        </button> 
+                        that were deleted more than 
+                        <input type="textfield" 
+                               value="${deleted_datasets_days}"
+                               size="3"
+                               name="deleted_datasets_days"> 
+                        days ago but have not yet been purged.
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <br clear="left" />
+    <h3 align="center">Current Disk Space Where Datasets are Stored</h3>
+    <table align="center" width="90%" class="colored">
+        <tr>
+            <td colspan="5">
+                <b>Disk Usage for ${file_path}</b>
+            </td>
+        </tr>
+        <tr class="header">
+            <td>File System</td>
+            <td>Disk Size</td>
+            <td>Used</td>
+            <td>Available</td>
+            <td>Percent Used</td>
+        </tr>
+        <tr class="tr">
+            <td>${disk_usage[0]}</td>
+            <td>${disk_usage[1]}</td>
+            <td>${disk_usage[2]}</td>
+            <td>${disk_usage[3]}</td>
+            <td>${disk_usage[4]}</td>
+        </tr>
+    </table>
+    <br clear="left" />
+    %if datasets.count() > 0:
+        <h3 align="center">
+            ${datasets.count()} largest unpurged data files over
+            ${file_size_str}
+        </h3>
+        <table align="center" width="90%" class="colored">
+            <tr class="header">
+                <td>File</td>
+                <td>Last Updated</td>
+                <td>Deleted</td>
+                <td>File Size</td>
+            </tr>
+            <% ctr = 0 %>
+            %for dataset in datasets:
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td>
+                        <% dataset_label = 'dataset_%d.dat' % dataset.id %>
+                        <a href="${h.url_for( controller='system', action='dataset_info', id=trans.security.encode_id( dataset.id ) )}">
+                            ${dataset_label}
+                        </a>
+                    </td>
+                    <td>${time_ago( dataset.update_time )}</td>
+                    <td>${dataset.deleted}</td>
+                    <td>${nice_size( dataset.file_size, True )}</td>
+                </tr>
+                <% ctr += 1 %>
+            %endfor
+        </table>
+        <br clear="left" />
+    %else:
+        <h3 align="center">
+            There are no unpurged data files larger than ${file_size_str}
+        </h3>
+    %endif
+</div>
diff --git a/templates/webapps/reports/tool_error_messages.mako b/templates/webapps/reports/tool_error_messages.mako
new file mode 100644
index 0000000..1d94e27
--- /dev/null
+++ b/templates/webapps/reports/tool_error_messages.mako
@@ -0,0 +1,87 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="toolForm">
+    <h3 align="center">${tool_name} in error's stderr message</h3>
+    <h4 align="center">Listed in 
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by number of error of each type
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="tool_error_messages?tool=${tool_name}">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${descending}" size="2" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        by:
+                        <select value="${sort_by}" size="2" name="sort_by">
+                            <option value="time"> date of the error </option>
+                            <option value="nb"> number of identical error </option>
+                        </select>
+                        </br>
+                        <button name="action" value="commit">Sort my Data!</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5"  style="border-collapse:collapse;">
+        %if data:
+            <tr class="header">
+                <td>Tool error</td>
+                <td>Number of error like this</td>
+                <td>Date of this error</td>
+            </tr>
+            <% odd = False%>
+            %for error in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                %if len (error.split ('</br>')) < 7:
+                    <td>${error}</td>
+                %else:
+                    <style>
+                        .content{
+                            height:100px;
+                            width:600px;
+                            overflow:hidden;
+                            text-overflow:ellipsis;}
+                        input[type='checkbox'] { visibility: hidden; position: absolute; }
+                        input[type='checkbox']:checked + .content { height: auto; width: auto;}
+                    </style>
+                    <td>
+                        <label>
+                            <input type="checkbox" />
+                            <div class="content">
+                                <span class="hidden">
+                                    ${error.replace ('</br>'.join (error.split ('</br>')[2:-3]), '...') + \
+                                    '</br>' + '-' * 25 + ' Extended error ' + '-' * 25 + '</br>' + \
+                                    error + '</br>' + '-' * 66}
+                                </span>
+                            </div>
+                        </label>
+                    </td>
+                %endif
+                <td>${data[error][0]}</td>
+                <td>${str(data[error][1])[:11]}</td>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
diff --git a/templates/webapps/reports/tool_execution_time.mako b/templates/webapps/reports/tool_execution_time.mako
new file mode 100644
index 0000000..07e034d
--- /dev/null
+++ b/templates/webapps/reports/tool_execution_time.mako
@@ -0,0 +1,79 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">Execution Time per Tool</h3>
+    <h4 align="center">Listed in
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by
+    %if sort_by == 0:
+        Tool
+    %elif sort_by == 1:
+        average time
+    %elif sort_by == 2:
+        min time
+    %elif sort_by == 3:
+        max time
+    %endif
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="tool_execution_time">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${sort_by}" size="4" name="sort_by">
+                            <option value="tool"> by Tool </option>
+                            <option value="avg"> average time </option>
+                            <option value="min"> min time </option>
+                            <option value="max"> max time </option>
+                        </select>
+                        <select value="${descending}" size="3" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        <input type="checkbox" name="color" value="True">
+                            Highlight big times of execution</br>
+                        <button name="action" value="commit">Sort my Data!</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if data:
+            <tr class="header">
+                <td>Tool</td>
+                <td>Average time of execution</td>
+                <td>Last job's time of execution</td>
+                <td>Min time of execution</td>
+                <td>Max time of execution</td>
+            </tr>
+            <% odd = False%>
+            %for tool in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                <td>
+                    <a href=tool_execution_time_per_month?tool=${tool}>${tool}</a>
+                </td>
+                <td>${data[tool]["avg"]}</td>
+                <td>${data[tool]["last"]}</td>
+                <td>${data[tool]["min"]}</td>
+                <td>${data[tool]["max"]}</td>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/tool_execution_time_per_month.mako b/templates/webapps/reports/tool_execution_time_per_month.mako
new file mode 100644
index 0000000..0273f2e
--- /dev/null
+++ b/templates/webapps/reports/tool_execution_time_per_month.mako
@@ -0,0 +1,77 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">Execution Time for ${tool} per Month</h3>
+    <h4 align="center">Listed in
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by
+    %if sort_by == 0:
+        Month
+    %elif sort_by == 1:
+        min time
+    %elif sort_by == 2:
+        max time
+    %elif sort_by == 3:
+        average time
+    %endif
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="tool_execution_time_per_month?tool=${tool}">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${sort_by}" size="4" name="sort_by">
+                            <option value="month"> by Month </option>
+                            <option value="min"> min time </option>
+                            <option value="max"> max time </option>
+                            <option value="avg"> average time </option>
+                        </select>
+                        <select value="${descending}" size="3" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        <input type="checkbox" name="color" value="True">
+                            Highlight big times of execution</br>
+                        <button name="action" value="commit">Sort my Data!</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if data:
+            <tr class="header">
+                <td>Tool</td>
+                <td>Min time of execution</td>
+                <td>Max time of execution</td>
+                <td>Average time of execution</td>
+            </tr>
+            <% odd = False%>
+            %for month in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                <td>
+                ${month}
+                </td>
+                <td>${data[month][2]}</td>
+                <td>${data[month][0]}
+                <td>${data[month][1]}</td></td>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/tools_and_job_state.mako b/templates/webapps/reports/tools_and_job_state.mako
new file mode 100644
index 0000000..b093f62
--- /dev/null
+++ b/templates/webapps/reports/tools_and_job_state.mako
@@ -0,0 +1,84 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">States of Jobs per Tool</h3>
+    <h4 align="center">Listed in
+    %if descending == 1:
+        descending
+    %else:
+        ascending
+    %endif
+    order by
+    %if sorting == 0:
+        Tool
+    %elif sorting == 1:
+        state Ok
+    %elif sorting == 2:
+        state error
+    %endif
+    </h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        <tr>
+            <td>
+                <form method="post" controller="users" action="tools_and_job_state">
+                    <p>
+                        Top <input type="textfield" value="${user_cutoff}" size="3" name="user_cutoff"> shown (0 = all).
+                        </br>
+                        Sort:
+                        <select value="${sorting}" size="3" name="sorting">
+                            <option value="tool"> by Tool </option>
+                            <option value="ok"> state ok </option>
+                            <option value="error"> state error </option>
+                        </select>
+                        <select value="${descending}" size="3" name="descending">
+                            <option value="desc"> descending </option>
+                            <option value="asc"> ascending </option>
+                        </select>
+                        </br>
+                        <button name="action" value="commit">Sort my Data!</button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if data:
+            <tr class="header">
+                <td>Tool</td>
+                <td>Jobs ok</td>
+                <td>Job in error</td>
+            </tr>
+            <% odd = False%>
+            %for tool in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                <td>
+                %if data[tool][0] + data[tool][1] != "--":
+                    <a href=tools_and_job_state_per_month?tool=${tool}>${tool}</a>
+                %else:
+                    ${tool}
+                %endif
+                </td>
+                <td>${data[tool][0]}</td>
+                <td>
+                %if data[tool][1] != '-':
+                    <a href=tool_error_messages?tool=${tool}>${data[tool][1]}</a>
+                %else:
+                    -
+                %endif
+                </td>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/tools_and_job_state_per_month.mako b/templates/webapps/reports/tools_and_job_state_per_month.mako
new file mode 100644
index 0000000..5e358a9
--- /dev/null
+++ b/templates/webapps/reports/tools_and_job_state_per_month.mako
@@ -0,0 +1,46 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<div class="report">
+<div class="reportBody">
+    <h3 align="center">States of Jobs for ${tool}</h3>
+    <h4 align="center">Listed in descending by month</h4>
+    <table align="center" width="70%" class="colored" cellpadding="5" cellspacing="5">
+        %if data:
+            <tr class="header">
+                <td>Month</td>
+                <td>Ok</td>
+                <td>Error</td>
+            </tr>
+            <% odd = False%>
+            %for month in data:
+                %if odd:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                <td>${month}</td>
+                <td>
+                %if data[month][0] == 0:
+                    -
+                %else:
+                    ${data[month][0]}
+                %endif
+                </td>
+                <td>
+                %if data[month][1] == 0:
+                    -
+                %else:
+                    ${data[month][1]}
+                %endif
+                </td>
+                <% odd = not odd %>
+            %endfor
+        %endif
+    </table>
+</div>
+</div>
diff --git a/templates/webapps/reports/users_last_access_date.mako b/templates/webapps/reports/users_last_access_date.mako
new file mode 100644
index 0000000..d4ef0a9
--- /dev/null
+++ b/templates/webapps/reports/users_last_access_date.mako
@@ -0,0 +1,78 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<div class="report">
+    <h3 align="center">Date of Last Galaxy Login</h3>
+    <h4 align="center">
+        Listed in descending order by access date ( oldest date first )
+    </h4>
+    <table class="lastAccessForm colored" >
+        <tr>
+            <td>
+                <form method="post"
+                      controller="users"
+                      action="last_access_date">
+                    <p>
+                        %if users:
+                            ${len( users ) }
+                        %else:
+                            0
+                        %endif
+                         users have not logged in to Galaxy for
+                        <input type="textfield"
+                               value="${days_not_logged_in}"
+                               size="3"
+                               name="days_not_logged_in">
+                        days.
+                        <input type="hidden" value=${sort_id} name="sort_id">
+                        <input type="hidden" value=${order} name="order">
+                         
+                        <button name="action" value="days_not_logged_in">
+                            Go
+                        </button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table class="lastAccessForm colored">
+        %if users:
+            <tr class="header">
+                <td class="half_width">
+                    ${get_sort_url(sort_id, order, 'zero', 'users', 'last_access_date', 'Email', days_not_logged_in=days_not_logged_in)}
+                    <span class='dir_arrow zero'>${arrow}</span>
+                </td>
+                <td class="half_width">
+                    ${get_sort_url(sort_id, order, 'one', 'users', 'last_access_date', 'Date of last Login', days_not_logged_in=days_not_logged_in)}
+                    <span class='dir_arrow one'>${arrow}</span>
+                </td>
+            </tr>
+            <% ctr = 0 %>
+            %for user in users:
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td>${user[0]}</td>
+                    <td>${user[1]}</td>
+                </tr>
+                <% ctr += 1 %>
+            %endfor
+        %else:
+            <tr>
+                <td>
+                    All users have logged in to Galaxy within the past
+                    ${days_not_logged_in} days
+                </td>
+            </tr>
+        %endif
+    </table>
+</div>
diff --git a/templates/webapps/reports/users_user_disk_usage.mako b/templates/webapps/reports/users_user_disk_usage.mako
new file mode 100644
index 0000000..37dbd8c
--- /dev/null
+++ b/templates/webapps/reports/users_user_disk_usage.mako
@@ -0,0 +1,62 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<!--users_user_disk_usage.mako-->
+<div class="report">
+    <h3 align="center">Per-user disk usage</h3>
+    <h4 align="center">Listed in descending order by usage size</h4>
+    <table class="colored diskUsageForm">
+        <tr>
+            <td>
+                <form method="post"
+                      controller="users"
+                      action="user_disk_usage">
+                    <p>
+                        Top <input type="textfield"
+                                   value="${user_cutoff}"
+                                   size="3"
+                                   name="user_cutoff">
+                        shown (0 = all). 
+                        <button name="action" value="user_cutoff">
+                            Go
+                        </button>
+                    </p>
+                </form>
+            </td>
+        </tr>
+    </table>
+    <table class="colored diskUsageForm">
+        %if users:
+            <tr class="header">
+                <td class="half_width">
+                    ${get_sort_url(sort_id, order, 'email', 'users', 'user_disk_usage', 'Email')}
+                    <span class='dir_arrow email'>${arrow}</span>
+                </td>
+                <td class="half_width">
+                    ${get_sort_url(sort_id, order, 'disk_usage', 'users', 'user_disk_usage', 'Disk Usage')}
+                    <span class='dir_arrow disk_usage'>${arrow}</span>
+                </td>
+            </tr>
+            <% ctr = 0 %>
+            %for user in users:
+                %if ctr % 2 == 1:
+                    <tr class="odd_row">
+                %else:
+                    <tr class="tr">
+                %endif
+                    <td>${user.email}</td>
+                    <td>${user.get_disk_usage( nice_size=True )}</td>
+                </tr>
+                <% ctr += 1 %>
+            %endfor
+        %endif
+    </table>
+</div>
+<!--End users_user_disk_usage.mako-->
diff --git a/templates/webapps/reports/workflows_per_month_all.mako b/templates/webapps/reports/workflows_per_month_all.mako
new file mode 100644
index 0000000..11aa976
--- /dev/null
+++ b/templates/webapps/reports/workflows_per_month_all.mako
@@ -0,0 +1,88 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/spark_base.mako" import="make_sparkline, make_spark_settings" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'workflows', 'per_month_all')}
+                </td>
+                <td>
+                    <h3 align="center">Workflows Per Month</h3>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("workflows", "per_month_all", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+
+        <table align="center" width="60%" class="colored">
+            %if len( workflows ) == 0:
+                <tr><td colspan="4">There are no workflows</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'date', 'workflows', 'per_month_all', 'Month', page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'total_workflows', 'workflows', 'per_month_all', 'Total', page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow total_workflows'>${arrow}</span>
+                    </td>
+                    <td></td>
+                </tr>
+                <%
+                    ctr = 0
+                    entries = 1
+                %>
+                %for workflow in workflows:
+                    <% key = str(workflow[2]) + str(workflow[3]) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    <%
+                        month = workflow[0]
+                        total = workflow[1]
+                    %>
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>${month}</td>
+                        <td>${total}</td>
+                        %try:
+                            ${make_sparkline(key, trends[key], "bar", "/ day")}
+                        %except KeyError:
+                        %endtry
+                        <td id=${key}></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/workflows_per_user.mako b/templates/webapps/reports/workflows_per_user.mako
new file mode 100644
index 0000000..2f35c46
--- /dev/null
+++ b/templates/webapps/reports/workflows_per_user.mako
@@ -0,0 +1,103 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline, make_spark_settings" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+
+<%!
+    import re
+    from galaxy import util
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'workflows', 'per_user', spark_time=time_period)}
+                </td>
+                <td>
+                    <h3 align="center">Workflows Per User</h3>
+                    <h5 align="center">
+                        Graph goes from present to past
+                        ${make_spark_settings("jobs", "per_user", spark_limit, sort_id, order, time_period, page=page, offset=offset, entries=entries)}
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("workflows", "per_user", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+
+        <table align="center" width="60%" class="colored">
+            %if len( workflows ) == 0:
+                <tr><td colspan="2">There are no workflows</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="half_width">
+                        ${get_sort_url(sort_id, order, 'user_email', 'workflows', 'per_user', 'User', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow user_email'>${arrow}</span>
+                    </td>
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'total_workflows', 'workflows', 'per_user', 'Total Workflows', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow total_workflows'>${arrow}</span>
+                    </td>
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for workflow in workflows:
+                    <%
+                        email = workflow[0]
+                        total = workflow[1]
+                        user = trans.sa_session.query( trans.model.User ) \
+                                               .filter_by( email=email ) \
+                                               .one()
+                        key = re.sub(r'\W+', '', workflow[0])
+                    %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>
+                            <a href="${h.url_for( controller='workflows', action='user_per_month', id=trans.security.encode_id( user.id ), email=util.sanitize_text( user.email ), sort_id='default', order='default')}">
+                                ${email}
+                            </a>
+                        </td>
+                        <td>${total}</td>
+                        %try:
+                            ${make_sparkline(key, trends[key], "bar", "/ " + time_period[:-1])}
+                        %except KeyError:
+                        %endtry
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/reports/workflows_per_workflow.mako b/templates/webapps/reports/workflows_per_workflow.mako
new file mode 100644
index 0000000..0426ae6
--- /dev/null
+++ b/templates/webapps/reports/workflows_per_workflow.mako
@@ -0,0 +1,97 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline, make_spark_settings" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+<%namespace file="/page_base.mako" import="get_pages, get_entry_selector" />
+<%!
+    import re
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+<%
+    page = page_specs.page
+    offset = page_specs.offset
+    entries = page_specs.entries
+%>
+
+${get_css()}
+
+<!--jobs_per_tool.mako-->
+<div class="report">
+    <div class="reportBody">
+        <table id="formHeader">
+            <tr>
+                <td>
+                    ${get_pages(sort_id, order, page_specs, 'workflows', 'per_workflow', spark_time=time_period)}
+                </td>
+                <td>
+                    <h4 align="center">Runs per Workflow</h4>
+                    <h5 align="center">
+                        Graph goes from present to past
+                        ${make_spark_settings('workflows', 'per_workflow', spark_limit, sort_id, order, time_period, page=page, offset=offset, entries=entries)}
+                    </h5>
+                </td>
+                <td align="right">
+                    ${get_entry_selector("workflows", "per_workflow", page_specs.entries, sort_id, order)}
+                </td>
+            </tr>
+        </table>
+
+        <table align="center" width="60%" class="colored">
+            %if len( runs ) == 0:
+                <tr><td colspan="2">There are no runs.</td></tr>
+            %else:
+                <tr class="header">
+                    <td class="quarter_width">
+                        ${get_sort_url(sort_id, order, 'workflow_id', 'workflows', 'per_workflow', 'Workflow ID', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow workflow_id'>${arrow}</span>
+                    </td>
+                    <td class="quarter_width">
+                        ${get_sort_url(sort_id, order, 'workflow_name', 'workflows', 'per_workflow', 'Workflow Name', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow workflow_name'>${arrow}</span>
+                    </td>
+                    <td class="quarter_width">
+                        ${get_sort_url(sort_id, order, 'total_runs', 'workflows', 'per_workflow', 'Workflow Runs', spark_time=time_period, page=page, offset=offset, entries=entries)}
+                        <span class='dir_arrow total_runs'>${arrow}</span>
+                    </td>
+                    <td></td>
+                </tr>
+                <%
+                   ctr = 0
+                   entries = 1
+                %>
+                %for run in runs:
+                    <% key = re.sub(r'\W+', '', str(run[2])) %>
+
+                    %if entries > page_specs.entries:
+                        <%break%>
+                    %endif
+
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+
+                        <td>${run[2]}</td>
+                        <td>${run[0]}</td>
+                        <td>${run[1]}</td>
+                        %try:
+                            ${make_sparkline(key, trends[key], "bar", "/ " + time_period[:-1])}
+                        %except KeyError:
+                        %endtry
+                        <td id="${key}"></td>
+                    </tr>
+                    <%
+                       ctr += 1
+                       entries += 1
+                    %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
+<!--End jobs_per_tool.mako-->
diff --git a/templates/webapps/reports/workflows_user_per_month.mako b/templates/webapps/reports/workflows_user_per_month.mako
new file mode 100644
index 0000000..a35ae30
--- /dev/null
+++ b/templates/webapps/reports/workflows_user_per_month.mako
@@ -0,0 +1,67 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/spark_base.mako" import="make_sparkline" />
+<%namespace file="/sorting_base.mako" import="get_sort_url, get_css" />
+
+<%
+    from galaxy import util
+%>
+
+%if message:
+    ${render_msg( message, 'done' )}
+%endif
+
+${get_css()}
+
+<%
+   _email = util.restore_text( email )
+%>
+
+<div class="report">
+    <div class="reportBody">
+        <h3 align="center">Workflows per month for user "${_email}"</h3>
+        <h4 align="center">
+            <p>Graph goes from first of the month to the last</p>
+        </h4>
+        <table align="center" width="60%" class="colored">
+            %if len( workflows ) == 0:
+                <tr>
+                    <td colspan="2">
+                        There are no workflows for user "${_email}"
+                    </td>
+                </tr>
+            %else:
+                <tr class="header">
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'date', 'workflows', 'user_per_month', 'Month', email=util.sanitize_text( email ))}
+                        <span class='dir_arrow date'>${arrow}</span>
+                    </td>
+                    <td class="third_width">
+                        ${get_sort_url(sort_id, order, 'total_workflows', 'workflows', 'user_per_month', 'Total', email=util.sanitize_text( email ))}
+                        <span class='dir_arrow total_workflows'>${arrow}</span>
+                    </td>
+                    <td></td>
+                </tr>
+                <% ctr = 0 %>
+                %for workflow in workflows:
+                    <%
+                        key = workflow[2] + workflow[3]
+                        month = workflow[0]
+                        total = workflow[1]
+                    %>
+                    %if ctr % 2 == 1:
+                        <tr class="odd_row">
+                    %else:
+                        <tr class="tr">
+                    %endif
+                        <td>${month}</td>
+                        <td>${total}</td>
+                        ${make_sparkline(key, trends[key], "bar", "/ day")}
+                        <td id="${key}"></td>
+                    </tr>
+                    <% ctr += 1 %>
+                %endfor
+            %endif
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/admin/center.mako b/templates/webapps/tool_shed/admin/center.mako
new file mode 100644
index 0000000..038281f
--- /dev/null
+++ b/templates/webapps/tool_shed/admin/center.mako
@@ -0,0 +1,50 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+
+<%def name="title()">Galaxy Administration</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<h2>Administration</h2>
+
+<p>The menu on the left provides the following features</p>
+<ul>
+    <li>
+        <strong>Categories</strong>
+        <p/>
+        <ul>
+            <li>
+                <strong>Manage categories</strong>
+            </li>
+            <p/>
+        </ul>
+    </li>
+    <li>
+        <strong>Security</strong>
+        <p/>
+        <ul>
+            <li>
+                <strong>Manage users</strong> - provides a view of the registered users and all groups and non-private roles associated 
+                with each user.  
+            </li>
+            <p/>
+            <li>
+                <strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
+                each group (both private and non-private roles).  The group names include a link to a page that allows you to manage the users and 
+                roles that are associated with the group.
+            </li>
+            <p/>
+            <li>
+                <strong>Manage roles</strong> - provides a view of all non-private roles along with the role type, and the users and groups that
+                are associated with the role.  The role names include a link to a page that allows you to manage the users and groups that are associated 
+                with the role.  The page also includes a view of the data library datasets that are associated with the role and the permissions applied 
+                to each dataset.
+            </li>
+        </ul>
+    </li>
+    <p/>
+</ul>
+<br/>
diff --git a/templates/webapps/tool_shed/admin/index.mako b/templates/webapps/tool_shed/admin/index.mako
new file mode 100644
index 0000000..6b8a1e4
--- /dev/null
+++ b/templates/webapps/tool_shed/admin/index.mako
@@ -0,0 +1,118 @@
+<%inherit file="/webapps/tool_shed/base_panels.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="stylesheets()">
+    ## Include "base.css" for styling tool menu and forms (details)
+    ${h.css( "base", "autocomplete_tagging" )}
+
+    ## But make sure styles for the layout take precedence
+    ${parent.stylesheets()}
+
+    <style type="text/css">
+        body { margin: 0; padding: 0; overflow: hidden; }
+        #left {
+            background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
+        }
+        .unified-panel-body {
+            overflow: auto;
+        }
+        .toolMenu {
+            margin-left: 10px;
+        }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="init()">
+    <%
+        self.has_left_panel=True
+        self.has_right_panel=False
+        self.active_view="tools"
+    %>
+    %if trans.app.config.require_login and not trans.user:
+        <script type="text/javascript">
+            if ( window != top ) {
+                top.location.href = location.href;
+            }
+        </script>
+    %endif
+</%def>
+
+<%def name="left_panel()">
+    <% can_review_repositories = trans.app.security_agent.user_can_review_repositories( trans.user ) %>
+    <div class="unified-panel-header" unselectable="on">
+        <div class='unified-panel-header-inner'>Administration</div>
+    </div>
+    <div class="unified-panel-body">
+        <div class="toolMenu">
+            <div class="toolSectionList">
+                <div class="toolSectionTitle">
+                    Repositories
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories' )}">Browse by category</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repositories' )}">Browse all repositories</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}">Reset selected metadata</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repository_metadata' )}">Browse metadata</a>
+                        </div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    Categories
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='manage_categories' )}">Manage categories</a>
+                        </div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    Security
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolSectionBg">
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='users' )}">Manage users</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='groups' )}">Manage groups</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='admin', action='roles' )}">Manage roles</a>
+                        </div>
+                    </div>
+                </div>
+                <div class="toolSectionPad"></div>
+                <div class="toolSectionTitle">
+                    Statistics
+                </div>
+                <div class="toolSectionBody">
+                    <div class="toolTitle">
+                        <a target="galaxy_main" href="${h.url_for( controller='admin', action='regenerate_statistics' )}">View shed statistics</a>
+                    </div>
+                </div>
+            </div>
+        </div>    
+    </div>
+</%def>
+
+<%def name="center_panel()">
+    <%
+        center_url = h.url_for(controller='admin', action='center' )
+    %>
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"> </iframe>
+</%def>
diff --git a/templates/webapps/tool_shed/admin/statistics.mako b/templates/webapps/tool_shed/admin/statistics.mako
new file mode 100644
index 0000000..d0aa9f1
--- /dev/null
+++ b/templates/webapps/tool_shed/admin/statistics.mako
@@ -0,0 +1,64 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Tool shed statistics generated on ${trans.app.shed_counter.generation_time}</div>
+        <form name="regenerate_statistics" id="regenerate_statistics" action="${h.url_for( controller='admin', action='regenerate_statistics' )}" method="post" >
+            <div class="form-row">
+                <table class="grid">
+                    <tr>
+                        <th>Item</th>
+                        <th>Count</th>
+                    </tr>
+                    <tr>
+                        <td>Total repositories</td>
+                        <td>${trans.app.shed_counter.repositories | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Unique owners</td>
+                        <td>${trans.app.shed_counter.unique_owners | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Deprecated repositories</td>
+                        <td>${trans.app.shed_counter.deprecated_repositories | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Deleted repositories</td>
+                        <td>${trans.app.shed_counter.deleted_repositories | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Valid tools</td>
+                        <td>${trans.app.shed_counter.unique_valid_tools | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Valid versions of tools</td>
+                        <td>${trans.app.shed_counter.valid_versions_of_tools | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Invalid versions of tools</td>
+                        <td>${trans.app.shed_counter.invalid_versions_of_tools | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Exported Galaxy workflows</td>
+                        <td>${trans.app.shed_counter.workflows | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Custom datatypes</td>
+                        <td>${trans.app.shed_counter.custom_datatypes | h}</td>
+                    </tr>
+                    <tr>
+                        <td>Total clones</td>
+                        <td>${trans.app.shed_counter.total_clones | h}</td>
+                    </tr>
+                </table>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="regenerate_statistics_button" value="Regenerate statistics"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/base_panels.mako b/templates/webapps/tool_shed/base_panels.mako
new file mode 100644
index 0000000..8bfcde8
--- /dev/null
+++ b/templates/webapps/tool_shed/base_panels.mako
@@ -0,0 +1,178 @@
+<%inherit file="/base/base_panels.mako"/>
+
+## Default title
+<%def name="title()">Galaxy Tool Shed</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(document).ready( function() {
+
+            // Masthead dropdown menus
+            var $dropdowns = $("#masthead ul.nav > li.dropdown > .dropdown-menu");
+            $("body").on( "click.nav_popups", function( e ) {
+                $dropdowns.hide();
+                $("#dd-helper").hide();
+                // If the target is in the menu, treat normally
+                if ( $(e.target).closest( "#masthead ul.nav > li.dropdown > .dropdown-menu" ).length ) {
+                    return;
+                }
+                // Otherwise, was the click in a tab
+                var $clicked = $(e.target).closest( "#masthead ul.nav > li.dropdown" );
+                if ( $clicked.length ) {
+                    $("#dd-helper").show();
+                    $clicked.children( ".dropdown-menu" ).show();
+                    e.preventDefault();
+                }
+            });
+        });
+    </script>
+</%def>
+
+## Masthead
+<%def name="masthead()">
+
+    %if app.config.ga_code:
+        <script>
+          (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+          (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+          m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+          })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+          ga('create', '${app.config.ga_code}', 'auto');
+          ga('send', 'pageview');
+        </script>
+    %endif
+
+    ## start main tag
+    <div id="masthead" class="navbar navbar-fixed-top navbar-inverse">
+
+    ## Tab area, fills entire width
+    <div style="position: relative; right: -50%; float: left;">
+        <div style="display: block; position: relative; right: 50%;">
+
+            <ul class="nav navbar-nav" border="0" cellspacing="0">
+    
+                <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='', menu_options=None )">
+                    <%
+                    cls = ""
+                    a_cls = ""
+                    extra = ""
+                    if extra_class:
+                        cls += " " + extra_class
+                    if self.active_view == id:
+                        cls += " active"
+                    if menu_options:
+                        cls += " dropdown"
+                        a_cls += " dropdown-toggle"
+                        extra = "<b class='caret'></b>"
+                    style = ""
+                    if not visible:
+                        style = "display: none;"
+                    %>
+                    <li class="${cls}" style="${style}">
+                        %if href:
+                            <a class="${a_cls}"  target="${target}" href="${href}">${display}${extra}</a>
+                        %else:
+                            <a class="${a_cls}" >${display}${extra}</a>
+                        %endif
+                        %if menu_options:
+                            <ul class="dropdown-menu">
+                                %for menu_item in menu_options:
+                                    %if not menu_item:
+                                        <li class="divider"></li>
+                                    %else:
+                                        <li>
+                                        %if len ( menu_item ) == 1:
+                                            ${menu_item[0]}
+                                        %elif len ( menu_item ) == 2:
+                                            <% name, link = menu_item %>
+                                            <a href="${link}">${name | h}</a>
+                                        %else:
+                                            <% name, link, target = menu_item %>
+                                            <a target="${target}" href="${link}">${name | h}</a>
+                                        %endif
+                                        </li>
+                                    %endif
+                                %endfor
+                            </ul>
+                        %endif
+                    </li>
+                </%def>
+
+                ## Repositories tab.
+                ${tab( "repositories", "Repositories", h.url_for( controller='/repository', action='index' ) )}
+                
+                ## Groups tab.
+                ${tab( "groups", "Groups", h.url_for( controller='/groups', action='index' ) )}
+
+                ## Admin tab.
+                ${tab( "admin", "Admin", h.url_for( controller='/admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
+
+                ## Help tab.
+                <%
+                    menu_options = []
+                    qa_url = app.config.get( "qa_url", None )
+                    if qa_url:
+                        menu_options = [ [_('Galaxy Q&A'), qa_url, "_blank" ] ]
+                    menu_options.extend( [
+                        [_('Tool Shed Wiki'), app.config.get( "wiki_url", "https://wiki.galaxyproject.org/ToolShed" ), "_blank" ],
+                        [_('Support'), app.config.get( "support_url", "https://wiki.galaxyproject.org/Support" ), "_blank" ],
+                        [_('Search'), app.config.get( "search_url", "http://galaxyproject.org/search/usegalaxy/" ), "_blank" ],
+                        [_('Mailing Lists'), app.config.get( "mailing_lists_url", "https://wiki.galaxyproject.org/MailingLists" ), "_blank" ],
+                        [_('Videos'), app.config.get( "screencasts_url", "https://vimeo.com/galaxyproject" ), "_blank" ],
+                        [_('Wiki'), app.config.get( "wiki_url", "http://galaxyproject.org/" ), "_blank" ],
+                        [_('How to Cite Galaxy'), app.config.get( "citation_url", "https://wiki.galaxyproject.org/CitingGalaxy" ), "_blank" ]
+                    ] )
+                    tab( "help", _("Help"), None, menu_options=menu_options )
+                %>
+
+                ## User tabs.
+                <%
+                    from markupsafe import escape 
+                    # Menu for user who is not logged in.
+                    menu_options = [ [ _("Login"), h.url_for( controller='/user', action='login' ), "galaxy_main" ] ]
+                    if app.config.allow_user_creation:
+                        menu_options.append( [ _("Register"), h.url_for( controller='/user', action='create', cntrller='user' ), "galaxy_main" ] ) 
+                    extra_class = "loggedout-only"
+                    visible = ( trans.user == None )
+                    tab( "user", _("User"), None, visible=visible, menu_options=menu_options )
+                    # Menu for user who is logged in.
+                    if trans.user:
+                        email = escape( trans.user.email )
+                    else:
+                        email = ""
+                    menu_options = [ [ '<a>Logged in as <span id="user-email">%s</span></a>' %  email ] ]
+                    if app.config.use_remote_user:
+                        if app.config.remote_user_logout_href:
+                            menu_options.append( [ _('Logout'), app.config.remote_user_logout_href, "_top" ] )
+                    else:
+                        menu_options.append( [ _('Preferences'), h.url_for( controller='/user', action='index', cntrller='user' ), "galaxy_main" ] )
+                        menu_options.append( [ _('API Keys'), h.url_for( controller='/user', action='api_keys', cntrller='user' ), "galaxy_main" ] )
+                        logout_url = h.url_for( controller='/user', action='logout' )
+                        menu_options.append( [ 'Logout', logout_url, "_top" ] )
+                        menu_options.append( None )
+                    if app.config.use_remote_user:
+                        menu_options.append( [ _('Public Name'), h.url_for( controller='/user', action='edit_username', cntrller='user' ), "galaxy_main" ] )
+            
+                    extra_class = "loggedin-only"
+                    visible = ( trans.user != None )
+                    tab( "user", "User", None, visible=visible, menu_options=menu_options )
+                %>
+            </ul>
+        </div>
+    </div>
+    
+    ## Logo, layered over tabs to be clickable
+    <div class="navbar-brand">
+        <a href="${h.url_for( app.config.get( 'logo_url', '/' ) )}">
+        <img style="margin-left: 0.35em;" border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}">
+        Galaxy Tool Shed
+        %if app.config.brand:
+            <span>/ ${app.config.brand}</span>
+        %endif
+        </a>
+    </div>
+    
+    ## end main tag
+    </div>
+</%def>
diff --git a/templates/webapps/tool_shed/category/create_category.mako b/templates/webapps/tool_shed/category/create_category.mako
new file mode 100644
index 0000000..4ea1303
--- /dev/null
+++ b/templates/webapps/tool_shed/category/create_category.mako
@@ -0,0 +1,34 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create Category</div>
+    <div class="toolFormBody">
+        <form name="create_category_form" id="create_category_form" action="${h.url_for(controller='admin', action='create_category' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <input  name="name" type="textfield" value="${name | h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <input  name="description" type="textfield" value="${description | h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_category_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/category/edit_category.mako b/templates/webapps/tool_shed/category/edit_category.mako
new file mode 100644
index 0000000..896aa0e
--- /dev/null
+++ b/templates/webapps/tool_shed/category/edit_category.mako
@@ -0,0 +1,43 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Change category name and description</div>
+    <div class="toolFormBody">
+        <form name="edit_category" action="${h.url_for( controller='admin', action='edit_category' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="text" name="name" value="${category.name | h}" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input  name="description" type="textfield" value="${category.description | h}" size=40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="rename" value="submitted"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="id" value="${trans.security.encode_id( category.id )}"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="edit_category_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/category/grid.mako b/templates/webapps/tool_shed/category/grid.mako
new file mode 100644
index 0000000..e56a03b
--- /dev/null
+++ b/templates/webapps/tool_shed/category/grid.mako
@@ -0,0 +1,13 @@
+<%inherit file="/base.mako"/>
+<%namespace name="grid_base" file="/grid_base.mako" import="*" />
+<%namespace name="grid_common" file="../common/grid_common.mako" import="*" />
+
+<%def name="insert()">
+    <%
+        from tool_shed.grids.repository_grids import RepositoryGrid
+        repo_grid = RepositoryGrid()
+        grid_common.render_grid_filters(repo_grid)
+    %>
+</%def>
+
+${grid_base.load(False, capture(self.insert))}
diff --git a/templates/webapps/tool_shed/category/valid_grid.mako b/templates/webapps/tool_shed/category/valid_grid.mako
new file mode 100644
index 0000000..2872e12
--- /dev/null
+++ b/templates/webapps/tool_shed/category/valid_grid.mako
@@ -0,0 +1,13 @@
+<%inherit file="/base.mako"/>
+<%namespace name="grid_base" file="/grid_base.mako" import="*" />
+<%namespace name="grid_common" file="../common/grid_common.mako" import="*" />
+
+<%def name="insert()">
+<%
+    from tool_shed.grids.repository_grids import ValidRepositoryGrid
+    repo_grid = ValidRepositoryGrid()
+    grid_common.render_grid_filters(repo_grid)
+%>
+</%def>
+
+${grid_base.load(False, capture(self.insert))}
diff --git a/templates/webapps/tool_shed/common/common.mako b/templates/webapps/tool_shed/common/common.mako
new file mode 100644
index 0000000..cd2c640
--- /dev/null
+++ b/templates/webapps/tool_shed/common/common.mako
@@ -0,0 +1,135 @@
+<%def name="common_misc_javascripts()">
+    <script type="text/javascript">
+        function checkAllFields( chkAll, name )
+        {
+            var checks = document.getElementsByTagName( 'input' );
+            var boxLength = checks.length;
+            var allChecked = false;
+            var totalChecked = 0;
+            if ( chkAll.checked == true )
+            {
+                for ( i=0; i < boxLength; i++ )
+                {
+                    if ( checks[ i ].name.indexOf( name ) != -1 )
+                    {
+                       checks[ i ].checked = true;
+                    }
+                }
+            }
+            else
+            {
+                for ( i=0; i < boxLength; i++ )
+                {
+                    if ( checks[ i ].name.indexOf( name ) != -1 )
+                    {
+                       checks[ i ].checked = false;
+                    }
+                }
+            }
+        }
+
+        function checkAllRepositoryIdFields()
+        {
+            var chkAll = document.getElementById( 'checkAll' );
+            var name = 'repository_ids';
+            checkAllFields( chkAll, name );
+        }
+
+        function checkAllInstalledToolDependencyIdFields()
+        {
+            var chkAll = document.getElementById( 'checkAllInstalled' );
+            var name = 'inst_td_ids';
+            checkAllFields( chkAll, name );
+        }
+
+        function checkAllUninstalledToolDependencyIdFields()
+        {
+            var chkAll = document.getElementById( 'checkAllUninstalled' );
+            var name = 'uninstalled_tool_dependency_ids';
+            checkAllFields( chkAll, name );
+        }
+    </script>
+</%def>
+
+<%def name="render_deprecated_repository_dependencies_message( deprecated_repository_dependency_tups )">
+    <div class="warningmessage">
+        <%
+            from tool_shed.util.common_util import parse_repository_dependency_tuple
+            msg = '<ul>'
+            for deprecated_repository_dependency_tup in deprecated_repository_dependency_tups:
+                toolshed, name, owner, changeset_revision, pir, oicct = \
+                parse_repository_dependency_tuple( deprecated_repository_dependency_tup )
+                msg += '<li>Revision <b>%s</b> of repository <b>%s</b> owned by <b>%s</b></li>' % \
+                    ( changeset_revision, name, owner )
+            msg += '</ul>'
+        %>
+        This repository depends upon the following deprecated repositories<br/>
+        ${msg}
+    </div>
+</%def>
+
+<%def name="render_star_rating( name, rating, disabled=False )">
+    <%
+        if disabled:
+            disabled_str = ' disabled="disabled"'
+        else:
+            disabled_str = ''
+        html = ''
+        for index in range( 1, 6 ):
+            html += '<input name="%s" type="radio" class="star" value="%s" %s' % ( str( name ), str( index ), disabled_str )
+            if rating > ( index - 0.5 ) and rating < ( index + 0.5 ):
+                html += ' checked="checked"'
+            html += '/>'
+    %>
+    ${html}
+</%def>
+
+<%def name="render_long_description( description_text )">
+    <style type="text/css">
+        #description_table{ table-layout:fixed;
+                            width:100%;
+                            overflow-wrap:normal;
+                            overflow:hidden;
+                            border:0px; 
+                            word-break:keep-all;
+                            word-wrap:break-word;
+                            line-break:strict; }
+    </style>
+    <div class="form-row">
+        <label>Detailed description:</label>
+        <table id="description_table">
+            <tr><td>${description_text}</td></tr>
+        </table>
+        <div style="clear: both"></div>
+    </div>
+</%def>
+
+<%def name="render_multiple_heads_message( heads )">
+    <div class="warningmessage">
+        <%
+            from tool_shed.util.hg_util import get_revision_label_from_ctx
+            heads_str = ''
+            for ctx in heads:
+                heads_str += '%s<br/>' % get_revision_label_from_ctx( ctx, include_date=True )
+        %>
+        Contact the administrator of this Tool Shed as soon as possible and let them know that
+        this repository has the following multiple heads which must be merged.<br/>
+        ${heads_str}
+    </div>
+</%def>
+
+<%def name="render_review_comment( comment_text )">
+    <style type="text/css">
+        #reviews_table{ table-layout:fixed;
+                        width:100%;
+                        overflow-wrap:normal;
+                        overflow:hidden;
+                        border:0px; 
+                        word-break:keep-all;
+                        word-wrap:break-word;
+                        line-break:strict; }
+    </style>
+    <table id="reviews_table">
+        <tr><td>${comment_text}</td></tr>
+    </table>
+</%def>
diff --git a/templates/webapps/tool_shed/common/grid_common.mako b/templates/webapps/tool_shed/common/grid_common.mako
new file mode 100644
index 0000000..627e19a
--- /dev/null
+++ b/templates/webapps/tool_shed/common/grid_common.mako
@@ -0,0 +1,187 @@
+<%!
+    from galaxy.web.framework.helpers.grids import TextColumn, StateColumn, GridColumnFilter
+    from galaxy.web.framework.helpers import iff
+%>
+
+## Render a filter UI for a grid column. Filter is rendered as a table row.
+<%def name="render_grid_column_filter( grid, column )">
+    <tr>
+        <%
+            column_label = column.label
+            if column.filterable == "advanced":
+                column_label = column_label.lower()
+        %>
+        %if column.filterable == "advanced":
+            <td align="left" style="padding-left: 10px">${column_label}:</td>
+        %endif
+        <td style="padding: 0;">
+            %if isinstance(column, TextColumn):
+                <form class="text-filter-form" column_key="${column.key}" action="${url(dict())}" method="get" >
+                    ## Carry forward filtering criteria with hidden inputs.
+                    %for temp_column in grid.columns:
+                        %if temp_column.key in cur_filter_dict:
+                            <% value = cur_filter_dict[ temp_column.key ] %>
+                            %if value != "All":
+                                <%
+                                    if isinstance( temp_column, TextColumn ):
+                                        value = h.dumps( value )
+                                %>
+                                <input type="hidden" id="${temp_column.key}" name="f-${temp_column.key}" value='${value}'/>
+                            %endif
+                        %endif
+                    %endfor
+                    ## Print current filtering criteria and links to delete.
+                    <span id="${column.key}-filtering-criteria">
+                        %if column.key in cur_filter_dict:
+                            <% column_filter = cur_filter_dict[column.key] %>
+                            %if isinstance( column_filter, basestring ):
+                                %if column_filter != "All":
+                                    <span class='text-filter-val'>
+                                        ${cur_filter_dict[column.key]}
+                                        <% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+                                        <a href="${url(filter_all.get_url_args())}"><span class="delete-search-icon" /></a>
+                                    </span>
+                                %endif
+                            %elif isinstance( column_filter, list ):
+                                %for i, filter in enumerate( column_filter ):
+                                    %if i > 0:
+                                        ,
+                                    %endif
+                                    <span class='text-filter-val'>${filter}
+                                        <%
+                                            new_filter = list( column_filter )
+                                            del new_filter[ i ]
+                                            new_column_filter = GridColumnFilter( "", { column.key : h.dumps( new_filter ) } )
+                                        %>
+                                        <a href="${url(new_column_filter.get_url_args())}"><span class="delete-search-icon" /></a>
+                                    </span>
+                                %endfor
+                            %endif
+                        %endif
+                    </span>
+                    ## Print input field for column.
+                    <span class="search-box">
+                        <% 
+                            # Set value, size of search input field. Minimum size is 20 characters.
+                            value = iff( column.filterable == "standard", column.label.lower(), "") 
+                            size = len( value )
+                            if size < 20:
+                                size = 20
+                            # +4 to account for search icon/button.
+                            size = size + 4
+                        %>
+                        <input class="search-box-input" id="input-${column.key}-filter" name="f-${column.key}" type="text" value="${value}" size="${size}"/>
+                        <button class="submit-image" type="submit" title='Search'><span style="display: none;"></button>
+                    </span>
+                </form>
+            %else:
+                <span id="${column.key}-filtering-criteria">
+                    %for i, filter in enumerate( column.get_accepted_filters() ):
+                        <% 
+                            # HACK: we know that each filter will have only a single argument, so get that single argument.
+                            for key, arg in filter.args.items():
+                                filter_key = key
+                                filter_arg = arg
+                        %>
+                        %if i > 0:
+                            |
+                        %endif
+                        %if column.key in cur_filter_dict and column.key in filter.args and cur_filter_dict[column.key] == filter.args[column.key]:
+                            <span class="categorical-filter ${column.key}-filter current-filter">${filter.label}</span>
+                        %else:
+                            <span class="categorical-filter ${column.key}-filter">
+                                <a href="${url(filter.get_url_args())}" filter_key="${filter_key}" filter_val="${filter_arg}">${filter.label}</a>
+                            </span>
+                        %endif
+                    %endfor
+                </span>
+            %endif
+        </td>
+    </tr>
+</%def>
+
+## Print grid search/filtering UI.
+<%def name="render_grid_filters( grid, render_advanced_search=True )">
+    <%
+        # Show advanced search if flag set or if there are filters for advanced search fields.
+        advanced_search_display = "none"
+        if 'advanced-search' in kwargs and kwargs['advanced-search'] in ['True', 'true']:
+            advanced_search_display = "block"
+
+        for column in grid.columns:
+            if column.filterable == "advanced":
+                ## Show div if current filter has value that is different from the default filter.
+                if column.key in cur_filter_dict and column.key in default_filter_dict and \
+                    cur_filter_dict[column.key] != default_filter_dict[column.key]:
+                        advanced_search_display = "block"
+
+        # do not show standard search if showing adv.
+        standard_search_display = "block"
+        if advanced_search_display == "block":
+            standard_search_display = "none"
+    %>
+    ## Standard search.
+    <div id="standard-search" style="display: ${standard_search_display};">
+        <table>
+            <tr><td style="padding: 0;">
+                <table>
+                %for column in grid.columns:
+                    %if column.filterable == "standard":
+                       ${render_grid_column_filter( grid, column )}
+                    %endif
+                %endfor
+                </table>
+            </td></tr>
+            <tr><td>
+                ## Clear the standard search.
+                ##|
+                ##<% filter_all = GridColumnFilter( "", { column.key : "All" } ) %>
+                ##<a href="${url(filter_all.get_url_args())}">Clear All</a>
+                
+                ## Only show advanced search if there are filterable columns.
+                <%
+                    show_advanced_search_link = False
+                    if render_advanced_search:
+                        for column in grid.columns:
+                            if column.filterable == "advanced":
+                                show_advanced_search_link = True
+                                break
+                            endif
+                %>
+                %if show_advanced_search_link:
+                    <% args = { "advanced-search" : True } %>
+                    <a href="${url(args)}" class="advanced-search-toggle">Advanced Search</a>
+                %endif
+            </td></tr>
+        </table>
+    </div>
+    
+    ## Advanced search.
+    <div id="advanced-search" style="display: ${advanced_search_display}; margin-top: 5px; border: 1px solid #ccc;">
+        <table>
+            <tr><td style="text-align: left" colspan="100">
+                <% args = { "advanced-search" : False } %>
+                <a href="${url(args)}" class="advanced-search-toggle">Close Advanced Search</a>
+                ## Link to clear all filters.
+                ##|
+                ##<%
+                ##    no_filter = GridColumnFilter("Clear All", default_filter_dict)
+                ##%>
+                ##<a href="${url(no_filter.get_url_args())}">${no_filter.label}</a>
+            </td></tr>
+            %for column in grid.columns:            
+                %if column.filterable == "advanced":
+                    ## Show div if current filter has value that is different from the default filter.
+                    %if column.key in cur_filter_dict and column.key in default_filter_dict and \
+                        cur_filter_dict[column.key] != default_filter_dict[column.key]:
+                        <script type="text/javascript">
+                            $('#advanced-search').css("display", "block");
+                        </script>
+                    %endif
+            
+                    ${render_grid_column_filter( grid, column )}
+                %endif
+            %endfor
+        </table>
+    </div>
+</%def>
diff --git a/templates/webapps/tool_shed/common/repository_actions_menu.mako b/templates/webapps/tool_shed/common/repository_actions_menu.mako
new file mode 100644
index 0000000..2e97b1f
--- /dev/null
+++ b/templates/webapps/tool_shed/common/repository_actions_menu.mako
@@ -0,0 +1,210 @@
+<%inherit file="/base.mako"/>
+
+<%def name="render_tool_shed_repository_actions( repository, metadata=None, changeset_revision=None )">
+    <%
+        from tool_shed.util.review_util import can_browse_repository_reviews, changeset_revision_reviewed_by_user, get_review_by_repository_id_changeset_revision_user_id
+        from tool_shed.util.metadata_util import is_malicious
+
+        if repository.metadata_revisions:
+            has_metadata = True
+        else:
+            has_metadata = False
+
+        is_admin = trans.user_is_admin()
+
+        if is_admin or trans.app.security_agent.user_can_administer_repository( trans.user, repository ):
+            can_administer = True
+        else:
+            can_administer = False
+
+        if repository.deprecated:
+            is_deprecated = True
+        else:
+            is_deprecated = False
+
+        if repository.is_new( trans.app ):
+            is_new = True
+        else:
+            is_new = False
+
+        if is_malicious( trans.app, trans.security.encode_id( repository.id ), repository.tip( trans.app ) ):
+            changeset_is_malicious = True
+        else:
+            changeset_is_malicious = False
+
+        can_browse_contents = not is_new
+
+        if can_browse_repository_reviews( trans.app, trans.user, repository ):
+            can_browse_reviews = True
+        else:
+            can_browse_reviews = False
+
+        if trans.user and trans.user != repository.user:
+            can_contact_owner = True
+        else:
+            can_contact_owner = False
+        
+        if not is_new and trans.user and ( is_admin or repository.user == trans.user ) and not is_deprecated:
+            can_deprecate = True
+        else:
+            can_deprecate = False
+
+        if not is_deprecated and trans.app.security_agent.can_push( trans.app, trans.user, repository ):
+            can_push = True
+        else:
+            can_push = False
+
+        if not is_deprecated and not is_new and not changeset_is_malicious:
+            can_download = True
+        else:
+            can_download = False
+
+        if ( can_administer or can_push ) and not repository.deleted and not repository.deprecated and not is_new:
+            can_reset_all_metadata = True
+        else:
+            can_reset_all_metadata = False
+
+        if can_push and not is_deprecated:
+            can_upload = True
+        else:
+            can_upload = False
+
+        if not is_new and not is_deprecated and trans.user and repository.user != trans.user:
+            can_rate = True
+        else:
+            can_rate = False
+        
+        if metadata is not None and changeset_revision is not None:
+            if has_metadata and not is_deprecated and trans.app.security_agent.user_can_review_repositories( trans.user ):
+                can_review_repository = True
+            else:
+                can_review_repository = False
+            if changeset_revision_reviewed_by_user( trans.user, repository, changeset_revision ):
+                reviewed_by_user = True
+            else:
+                reviewed_by_user = False
+        else:
+            can_review_repository = False
+            reviewed_by_user = False
+
+        if reviewed_by_user:
+            review = get_review_by_repository_id_changeset_revision_user_id( app=trans.app,
+                                                                             repository_id=trans.security.encode_id( repository.id ),
+                                                                             changeset_revision=changeset_revision,
+                                                                             user_id=trans.security.encode_id( trans.user.id ) )
+            review_id = trans.security.encode_id( review.id )
+        else:
+            review_id = None
+
+        if not is_new and not is_deprecated:
+            can_set_metadata = True
+        else:
+            can_set_metadata = False
+
+        if changeset_revision is not None:
+            if changeset_revision == repository.tip( trans.app ):
+                changeset_revision_is_repository_tip = True
+            else:
+                changeset_revision_is_repository_tip = False
+        else:
+            changeset_revision_is_repository_tip = False
+
+        if trans.user and ( is_admin or repository.user == trans.user ) and is_deprecated:
+            can_undeprecate = True
+        else:
+            can_undeprecate = False
+
+        can_view_change_log = not is_new
+
+        if can_push:
+            browse_label = 'Browse or delete repository tip files'
+        else:
+            browse_label = 'Browse repository tip files'
+    %>
+
+    <br/><br/>
+    <ul class="manage-table-actions">
+        %if is_new:
+            %if can_upload:
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}">Upload files to repository</a>
+            %endif
+            %if can_undeprecate:
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='deprecate', id=trans.security.encode_id( repository.id ), mark_deprecated=False )}">Mark repository as not deprecated</a>
+            %endif
+        %else:
+            <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+            <div popupmenu="repository-${repository.id}-popup">
+                %if can_review_repository:
+                    %if reviewed_by_user:
+                        <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository_review', action='edit_review', id=review_id )}">Manage my review of this revision</a>
+                    %else:
+                        <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository_review', action='create_review', id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Add a review to this revision</a>
+                    %endif
+                %endif
+                %if can_browse_reviews:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repository_reviews', id=trans.app.security.encode_id( repository.id ) )}">Browse reviews of this repository</a>
+                %endif
+                %if can_upload:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}">Upload files to repository</a>
+                %endif
+                %if can_administer:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='manage_repository', id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ) )}">Manage repository</a>
+                %else:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='view_repository', id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ) )}">View repository</a>
+                %endif
+                %if can_view_change_log:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">View change log</a>
+                %endif
+                %if can_browse_contents:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${browse_label | h}</a>
+                %endif
+                %if can_rate:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='rate_repository', id=trans.app.security.encode_id( repository.id ) )}">Rate repository</a>
+                %endif
+                %if can_contact_owner:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='contact_owner', id=trans.security.encode_id( repository.id ) )}">Contact repository owner</a>
+                %endif
+                %if can_reset_all_metadata:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='reset_all_metadata', id=trans.security.encode_id( repository.id ) )}">Reset all repository metadata</a>
+                %endif
+                %if can_deprecate:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='deprecate', id=trans.security.encode_id( repository.id ), mark_deprecated=True )}" confirm="Click Ok to deprecate this repository.">Mark repository as deprecated</a>
+                %endif
+                %if can_undeprecate:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='deprecate', id=trans.security.encode_id( repository.id ), mark_deprecated=False )}">Mark repository as not deprecated</a>
+                %endif
+                %if can_administer:
+                    <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='manage_repository_admins', id=trans.security.encode_id( repository.id ) )}">Manage repository administrators</a>
+                %endif
+                %if can_download:
+                    %if metadata is not None and changeset_revision is not None:
+                        <a class="action-button" href="${h.url_for( controller='repository', action='export', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Export this revision</a>
+                    %endif
+                    <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ), file_type='gz' )}">Download as a .tar.gz file</a>
+                    <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ), file_type='bz2' )}">Download as a .tar.bz2 file</a>
+                    <a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=repository.tip( trans.app ), file_type='zip' )}">Download as a zip file</a>
+                %endif
+            </div>
+        %endif
+    </ul>
+</%def>
+
+<%def name="render_galaxy_repository_actions( repository=None )">
+    <br/><br/>
+    <ul class="manage-table-actions">
+        %if repository:
+            <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='install_repositories_by_revision', repository_ids=trans.security.encode_id( repository.id ), changeset_revisions=changeset_revision )}">Install to Galaxy</a></li>
+            <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='preview_tools_in_changeset', repository_id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Browse repository</a></li>
+            <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Tool Shed Actions</a></li>
+            <div popupmenu="repository-${repository.id}-popup">
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='browse_valid_categories' )}">Browse valid repositories</a>
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='find_tools' )}">Search for valid tools</a>
+                <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='find_workflows' )}">Search for workflows</a>
+            </div>
+        %else:
+            <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='browse_valid_categories' )}">Browse valid repositories</a></li>
+            <a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='find_tools' )}">Search for valid tools</a>
+            <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='find_workflows' )}">Search for workflows</a></li>
+        %endif
+    </ul>
+</%def>
diff --git a/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako b/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
new file mode 100644
index 0000000..cee55b7
--- /dev/null
+++ b/templates/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako
@@ -0,0 +1,93 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="common_misc_javascripts" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${common_misc_javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<style type="text/css">
+.state-ok {
+    background: #aff1af;
+}
+.state-error {
+    background: #f9c7c5;
+}
+.state-queued {
+    background: #eee;
+}
+.state-running {
+    background: #ffc;
+}
+</style>
+<div class="warningmessage">
+    Resetting metadata may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk.
+    Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not be helpful.  Watch 
+    the tool shed paster log to pass the time if necessary.
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Reset all metadata on each selected repository</div>
+        <%
+            if trans.user_is_admin():
+                controller = 'admin'
+                action = 'reset_metadata_on_selected_repositories_in_tool_shed'
+            else:
+                controller = 'repository'
+                action = 'reset_metadata_on_my_writable_repositories_in_tool_shed'
+        %>
+        <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller=controller, action=action )}" method="post" >
+            <div class="form-row">
+                Check each repository for which you want to reset metadata.  Repository names are followed by owners in parentheses.
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="checkbox" id="checkAll" name="select_all_repositories_checkbox" value="true" onclick="checkAllRepositoryIdFields(1);"/><input type="hidden" name="select_all_repositories_checkbox" value="true"/><b>Select/unselect all repositories</b>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                ${repositories_select_field.get_html()}
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" id="reset_metadata_button" name="reset_metadata_on_selected_repositories_button" value="Reset metadata on selected repositories"/>
+            </div>
+        </form>
+    </div>
+</div>
+<script type="text/javascript">
+    $('#reset_metadata_on_selected_repositories').submit(function(f) {
+        f.preventDefault();
+        var repository_ids = Array()
+        $('input:checked').each(function() {
+            if ($(this).id != 'checkAll') {
+                var repository_id = $(this).attr('value');
+                repository_ids.push(repository_id);
+                repo_div = $(this).parent();
+                repo_div.attr('class', 'state-queued');
+            }
+        });
+        for (var i = 0; i < repository_ids.length; i++) {
+            repository_id = repository_ids[i];
+            repo_elem = $("[value=" + repository_id + "]");
+            repo_div = repo_elem.parent();
+            repo_div.attr('class', 'state-running');
+            $.ajax({
+                type: 'POST',
+                url: '${h.url_for('/api/repositories/reset_metadata_on_repository')}',
+                data: { repository_id: repository_id },
+                dataType: "json"
+            }).always(function (data) {
+                repo_div.attr('class', 'state-' + data['status']);
+                if (data['status'] == 'error') {
+                    repo_div.attr('title', data['repository_status'][0]);
+                }
+            });
+        }; 
+    });
+</script>
\ No newline at end of file
diff --git a/templates/webapps/tool_shed/group/index.mako b/templates/webapps/tool_shed/group/index.mako
new file mode 100644
index 0000000..314ff24
--- /dev/null
+++ b/templates/webapps/tool_shed/group/index.mako
@@ -0,0 +1,55 @@
+<%inherit file="/webapps/tool_shed/base_panels.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="stylesheets()">
+    ## Include "base.css" for styling tool menu and forms (details)
+    ${h.css( "base", "autocomplete_tagging" )}
+
+    ## But make sure styles for the layout take precedence
+    ${parent.stylesheets()}
+
+    <style type="text/css">
+        body { margin: 0; padding: 0; overflow: hidden; }
+        #left {
+            background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
+        }
+        .unified-panel-body {
+            overflow: auto;
+        }
+        .toolMenu {
+            margin-left: 10px;
+        }
+    </style>
+</%def>
+
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="init()">
+    %if trans.app.config.require_login and not trans.user:
+        <script type="text/javascript">
+            if ( window != top ) {
+                top.location.href = location.href;
+            }
+        </script>
+    %endif
+</%def>
+
+<%def name="center_panel()">
+    <script type="text/javascript">
+        window.globalTS = new Object();
+        $( function(){
+            require.config({
+                paths: {
+                    'toolshed': '../toolshed'
+            }
+            });
+            require([ '${config.get( "app" ).get( "jscript" )}' ], function( groups ){
+                app = new groups.ToolshedGroups();
+            });
+        });
+    </script>
+    <div id="groups_element" style="width: 95%; margin:auto; margin-top:2em; "></div>
+</%def>
diff --git a/templates/webapps/tool_shed/index.mako b/templates/webapps/tool_shed/index.mako
new file mode 100644
index 0000000..0689e78
--- /dev/null
+++ b/templates/webapps/tool_shed/index.mako
@@ -0,0 +1,241 @@
+<%inherit file="/webapps/tool_shed/base_panels.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="stylesheets()">
+    ## Include "base.css" for styling tool menu and forms (details)
+    ${h.css( "base", "autocomplete_tagging" )}
+
+    ## But make sure styles for the layout take precedence
+    ${parent.stylesheets()}
+
+    <style type="text/css">
+        body { margin: 0; padding: 0; overflow: hidden; }
+        #left {
+            background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
+        }
+        .unified-panel-body {
+            overflow: auto;
+        }
+        .toolMenu {
+            margin-left: 10px;
+        }
+    </style>
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<%def name="init()">
+    <%
+        self.has_left_panel=True
+        self.has_right_panel=False
+        self.active_view="tools"
+    %>
+    %if trans.app.config.require_login and not trans.user:
+        <script type="text/javascript">
+            if ( window != top ) {
+                top.location.href = location.href;
+            }
+        </script>
+    %endif
+</%def>
+
+<%def name="left_panel()">
+    <% can_review_repositories = trans.app.security_agent.user_can_review_repositories( trans.user ) %>
+    <div class="unified-panel-header" unselectable="on">
+        <div class='unified-panel-header-inner'>${trans.app.shed_counter.unique_valid_tools | h} valid tools on ${util.unicodify( trans.app.shed_counter.generation_time ) | h}</div>
+    </div>
+    <div class="unified-panel-body">
+        <div class="toolMenu">
+            <div class="toolSectionList">
+                %if user_id or repository_id:
+                    ## The route in was a sharable url, and may have included a changeset_revision, although we don't check for it.
+                    <div class="toolSectionPad"></div>
+                    <div class="toolSectionTitle">
+                        All Repositories
+                    </div>
+                    <div class="toolTitle">
+                        <a href="${h.url_for( controller='repository', action='index' )}">Browse by category</a>
+                    </div>
+                %else:
+                    %if repository_metadata:
+                        <div class="toolSectionPad"></div>
+                        <div class="toolSectionTitle">
+                            Search
+                        </div>
+                        <div class="toolSectionBody">
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='find_tools' )}">Search for valid tools</a>
+                            </div>
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='find_workflows' )}">Search for workflows</a>
+                            </div>
+                        </div>
+                        <div class="toolSectionPad"></div>
+                        <div class="toolSectionTitle">
+                            Valid Galaxy Utilities
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_tools' )}">Tools</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_datatypes' )}">Custom datatypes</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repository_dependencies' )}">Repository dependency definitions</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_tool_dependencies' )}">Tool dependency definitions</a>
+                        </div>
+                    %endif
+                    <div class="toolSectionPad"></div>
+                    <div class="toolSectionTitle">
+                        All Repositories
+                    </div>
+                    <div class="toolTitle">
+                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories' )}">Browse by category</a>
+                    </div>
+                    %if trans.user:
+                        %if trans.user.active_repositories or can_administer_repositories:
+                            <div class="toolSectionPad"></div>
+                            <div class="toolSectionTitle">
+                                Repositories I Can Change
+                            </div>
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_i_own' )}">Repositories I own</a>
+                            </div>
+                            %if can_administer_repositories:
+                                <div class="toolTitle">
+                                    <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_i_can_administer' )}">Repositories I can administer</a>
+                                </div>
+                            %endif
+                            %if has_reviewed_repositories:
+                                <div class="toolTitle">
+                                    <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories', operation='reviewed_repositories_i_own' )}">Reviewed repositories I own</a>
+                                </div>
+                            %endif
+                            %if has_deprecated_repositories:
+                                <div class="toolTitle">
+                                    <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_deprecated_repositories_i_own' )}">Deprecated repositories I own</a>
+                                </div>
+                            %endif
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories' )}">My writable repositories</a>
+                            </div>
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='reset_metadata_on_my_writable_repositories_in_tool_shed' )}">Reset metadata on my repositories</a>
+                            </div>
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_missing_tool_test_components' )}">Latest revision: missing tool tests</a>
+                            </div>
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_my_writable_repositories_with_invalid_tools' )}">Latest revision: invalid tools</a>
+                            </div>
+                        %endif
+                        <div class="toolSectionPad"></div>
+                        <div class="toolSectionTitle">
+                            Available Actions
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='create_repository' )}">Create new repository</a>
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='repository', action='upload_capsule' )}">Import repository capsule</a>
+                        </div>
+                        %if trans.app.config.enable_galaxy_flavor_docker_image:
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='create_galaxy_docker_image' )}">Create Galaxy Docker Image</a>
+                            </div>
+                        %endif
+                        %if can_review_repositories:
+                            <div class="toolSectionPad"></div>
+                            <div class="toolSectionTitle">
+                                Reviewing Repositories
+                            </div>
+                            <div class="toolSectionBody">
+                                <div class="toolSectionBg">
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repositories_ready_for_review' )}">Repositories ready for review</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repositories_without_reviews' )}">All repositories with no reviews</a>
+                                    </div>
+                                    %if trans.user.repository_reviews:
+                                        <div class="toolTitle">
+                                            <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repositories_reviewed_by_me' )}">Repositories reviewed by me</a>
+                                        </div>
+                                    %endif
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_repositories_with_reviews' )}">All reviewed repositories</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository_review', action='manage_components' )}">Manage review components</a>
+                                    </div>
+                                </div>
+                            </div>
+                            <div class="toolSectionPad"></div>
+                            <div class="toolSectionTitle">
+                                Reviewing Repositories With Tools
+                            </div>
+                            <div class="toolSectionBody">
+                                <div class="toolSectionBg">
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_missing_tool_test_components' )}">Latest revision: missing tool tests</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_install_errors' )}">Latest revision: installation errors</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_failing_tool_tests' )}">Latest revision: failing tool tests</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_skip_tool_test_checked' )}">Latest revision: skip tool tests</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_no_failing_tool_tests' )}">Latest revision: all tool tests pass</a>
+                                    </div>
+                                    <div class="toolTitle">
+                                        <a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories_with_invalid_tools' )}">Latest revision: invalid tools</a>
+                                    </div>
+                                </div>
+                            </div>
+                        %endif
+                    %else:
+                        <div class="toolSectionPad"></div>
+                        <div class="toolSectionTitle">
+                            Available Actions
+                        </div>
+                        <div class="toolTitle">
+                            <a target="galaxy_main" href="${h.url_for( controller='/user', action='login' )}">Login to create a repository</a>
+                        </div>
+                        %if trans.app.config.enable_galaxy_flavor_docker_image:
+                            <div class="toolTitle">
+                                <a target="galaxy_main" href="${h.url_for( controller='repository', action='create_galaxy_docker_image' )}">Create Galaxy Docker Image</a>
+                            </div>
+                        %endif
+                    %endif
+                %endif
+            </div>
+        </div>
+    </div>
+</%def>
+
+<%def name="center_panel()">
+    <%
+        if trans.app.config.require_login and not trans.user:
+            center_url = h.url_for( controller='user', action='login', message=message, status=status )
+        elif repository_id and changeset_revision:
+            # Route in was a sharable url: /view/{owner}/{name}/{changeset_revision}.
+            center_url = h.url_for( controller='repository', action='view_repository', id=repository_id, changeset_revision=changeset_revision, message=message, status=status )
+        elif repository_id:
+            # Route in was a sharable url: /view/{owner}/{name}.
+            center_url = h.url_for( controller='repository', action='view_repository', id=repository_id, message=message, status=status )
+        elif user_id:
+            # Route in was a sharable url: /view/{owner}.
+            center_url = h.url_for( controller='repository', action='browse_repositories', operation="repositories_by_user", user_id=user_id, message=message, status=status )
+        else:
+            center_url = h.url_for( controller='repository', action='browse_categories', message=message, status=status )
+    %>
+    <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"></iframe>
+</%def>
diff --git a/templates/webapps/tool_shed/repository/browse_repository.mako b/templates/webapps/tool_shed/repository/browse_repository.mako
new file mode 100644
index 0000000..32409fc
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/browse_repository.mako
@@ -0,0 +1,100 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "jquery.rating", "dynatree_skin/ui.dynatree" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating", "libs/jquery/jquery-ui", "libs/jquery/jquery.cookie", "libs/jquery/jquery.dynatree" )}
+    ${common_javascripts(repository)}
+</%def>
+
+<%
+    is_new = repository.is_new( trans.app )
+    can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
+    can_download = not is_new and ( not is_malicious or can_push )
+    can_browse_contents = not is_new
+%>
+
+${render_tool_shed_repository_actions( repository, metadata=metadata, changeset_revision=changeset_revision )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if can_browse_contents:
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository '${repository.name | h}' revision ${repository.tip( trans.app ) | h} (repository tip)</div>
+        %if can_download:
+            <div class="form-row">
+                <label>Clone this repository:</label>
+                ${render_clone_str( repository )}
+            </div>
+        %endif
+        <form name="repository_type">
+            ${render_repository_type_select_field( repository_type_select_field, render_help=False )}
+        </form>
+        %if can_push:
+            <form name="select_files_to_delete" id="select_files_to_delete" action="${h.url_for( controller='repository', action='select_files_to_delete', id=trans.security.encode_id( repository.id ))}" method="post" >
+                <div class="form-row" >
+                    <label>Contents:</label>
+                    <div id="tree" >
+                        Loading...
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Click on a file to display its contents below.  You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.
+                    </div>
+                    <input id="selected_files_to_delete" name="selected_files_to_delete" type="hidden" value=""/>
+                </div>
+                <div class="form-row">
+                    <label>Message:</label>
+                    <div class="form-row-input">
+                        %if commit_message:
+                            <textarea name="commit_message" rows="3" cols="35">${commit_message | h}</textarea>
+                        %else:
+                            <textarea name="commit_message" rows="3" cols="35"></textarea>
+                        %endif
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        This is the commit message for the mercurial change set that will be created if you delete selected files.
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="select_files_to_delete_button" value="Delete selected files"/>
+                </div>
+                <div class="form-row">
+                    <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>
+                </div>
+            </form>
+        %else:
+            <div class="toolFormBody">
+                <div class="form-row" >
+                    <label>Contents:</label>
+                    <div id="tree" >
+                        Loading...
+                    </div>
+                </div>
+                <div class="form-row">
+                    <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>
+                </div>
+            </div>
+        %endif
+    </div>
+    <p/>
+%endif
diff --git a/templates/webapps/tool_shed/repository/common.mako b/templates/webapps/tool_shed/repository/common.mako
new file mode 100644
index 0000000..e688908
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/common.mako
@@ -0,0 +1,1265 @@
+<%def name="common_javascripts(repository)">
+    <script type="text/javascript">
+        $(function(){
+
+            // --- Initialize sample trees
+            $("#tree").dynatree({
+                title: "${repository.name}",
+                minExpandLevel: 1,
+                persist: false,
+                checkbox: true,
+                selectMode: 3,
+                onPostInit: function(isReloading, isError) {
+                    // Re-fire onActivate, so the text is updated
+                    this.reactivate();
+                },
+                fx: { height: "toggle", duration: 200 },
+                // initAjax is hard to fake, so we pass the children as object array:
+                initAjax: {url: "${h.url_for( controller='repository', action='open_folder' )}",
+                           dataType: "json",
+                           data: { folder_path: "${repository.repo_path( trans.app )}", repository_id: "${trans.security.encode_id( repository.id )}"  },
+                },
+                onLazyRead: function(dtnode){
+                    dtnode.appendAjax({
+                        url: "${h.url_for( controller='repository', action='open_folder' )}",
+                        dataType: "json",
+                        data: { folder_path: dtnode.data.key, repository_id: "${trans.security.encode_id( repository.id )}"  },
+                    });
+                },
+                onSelect: function(select, dtnode) {
+                    // Display list of selected nodes
+                    var selNodes = dtnode.tree.getSelectedNodes();
+                    // convert to title/key array
+                    var selKeys = $.map(selNodes, function(node) {
+                        return node.data.key;
+                    });
+                    if (document.forms["select_files_to_delete"]) {
+                        // The following is used only ~/templates/webapps/tool_shed/repository/browse_repository.mako.
+                        document.select_files_to_delete.selected_files_to_delete.value = selKeys.join(",");
+                    }
+                    // The following is used only in ~/templates/webapps/tool_shed/repository/upload.mako.
+                    if (document.forms["upload_form"]) {
+                        document.upload_form.upload_point.value = selKeys.slice(-1);
+                    }
+                },
+                onActivate: function(dtnode) {
+                    var cell = $("#file_contents");
+                    var selected_value;
+                     if (dtnode.data.key == 'root') {
+                        selected_value = "${repository.repo_path( trans.app )}/";
+                    } else {
+                        selected_value = dtnode.data.key;
+                    };
+                    if (selected_value.charAt(selected_value.length-1) != '/') {
+                        // Make ajax call
+                        $.ajax( {
+                            type: "POST",
+                            url: "${h.url_for( controller='repository', action='get_file_contents' )}",
+                            dataType: "json",
+                            data: { file_path: selected_value, repository_id: "${trans.security.encode_id( repository.id )}" },
+                            success : function ( data ) {
+                                cell.html( '<label>'+data+'</label>' )
+                            }
+                        });
+                    } else {
+                        cell.html( '' );
+                    };
+                },
+            });
+        });
+    </script>
+</%def>
+
+<%def name="container_javascripts()">
+    <script type="text/javascript">
+        var init_dependencies = function() {
+            var storage_id = "library-expand-state-${trans.security.encode_id(10000)}";
+            var restore_folder_state = function() {
+                var state = $.jStorage.get(storage_id);
+                if (state) {
+                    for (var id in state) {
+                        if (state[id] === true) {
+                            var row = $("#" + id),
+                                index = row.parent().children().index(row);
+                            row.addClass("expanded").show();
+                            row.siblings().filter("tr[parent='" + index + "']").show();
+                        }
+                    }
+                }
+            };
+            var save_folder_state = function() {
+                var state = {};
+                $("tr.folderRow").each( function() {
+                    var folder = $(this);
+                    state[folder.attr("id")] = folder.hasClass("expanded");
+                });
+                $.jStorage.set(storage_id, state);
+            };
+            $(".container-table").each(function() {
+                //var container_id = this.id.split( "-" )[0];
+                //alert( container_id );
+                var child_of_parent_cache = {};
+                // Recursively fill in children and descendants of each row
+                var process_row = function(q, parents) {
+                    // Find my index
+                    var parent = q.parent(),
+                        this_level = child_of_parent_cache[parent] || (child_of_parent_cache[parent] = parent.children());
+                    var index = this_level.index(q);
+                    // Find my immediate children
+                    var children = $(par_child_dict[index]);
+                    // Recursively handle them
+                    var descendants = children;
+                    children.each( function() {
+                        child_descendants = process_row( $(this), parents.add(q) );
+                        descendants = descendants.add(child_descendants);
+                    });
+                    // Set up expand / hide link
+                    var expand_fn = function() {
+                        if ( q.hasClass("expanded") ) {
+                            descendants.hide();
+                            descendants.removeClass("expanded");
+                            q.removeClass("expanded");
+                        } else {
+                            children.show();
+                            q.addClass("expanded");
+                        }
+                        save_folder_state();
+                    };
+                    $("." + q.attr("id") + "-click").click(expand_fn);
+                    // return descendants for use by parent
+                    return descendants;
+                }
+                // Initialize dict[parent_id] = rows_which_have_that_parent_id_as_parent_attr
+                var par_child_dict = {},
+                    no_parent = [];
+                $(this).find("tbody tr").each( function() {
+                    if ( $(this).attr("parent")) {
+                        var parent = $(this).attr("parent");
+                        if (par_child_dict[parent] !== undefined) {
+                            par_child_dict[parent].push(this);
+                        } else {
+                            par_child_dict[parent] = [this];
+                        }
+                    } else {
+                        no_parent.push(this);
+                    }
+                });
+                $(no_parent).each( function() {
+                    descendants = process_row( $(this), $([]) );
+                    descendants.hide();
+               });
+            });
+            restore_folder_state();
+        };
+
+        var init_clipboard = function() {
+                %if hasattr( repository, 'clone_url' ):
+                    $('#clone_clipboard').on('click', function( event ) {
+                        event.preventDefault();
+                        window.prompt("Copy to clipboard: Ctrl+C, Enter", "hg clone ${ repository.clone_url }");
+                    });
+                %endif
+                %if hasattr( repository, 'share_url' ):
+                    $('#share_clipboard').on('click', function( event ) {
+                        event.preventDefault();
+                        window.prompt("Copy to clipboard: Ctrl+C, Enter", "${ repository.share_url }");
+                    });
+                %endif
+        };
+
+        $(function() {
+            init_dependencies();
+            init_clipboard();
+        });
+    </script>
+</%def>
+
+<%def name="render_repository_type_select_field( repository_type_select_field, render_help=True )">
+    <div class="form-row">
+        <label>Repository type:</label>
+        <%
+            from tool_shed.repository_types import util
+            options = repository_type_select_field.options
+            repository_types = []
+            for option_tup in options:
+                repository_types.append( option_tup[ 1 ] )
+            render_as_text = len( options ) == 1
+            if render_as_text:
+                repository_type = options[ 0 ][ 0 ]
+        %>
+        %if render_as_text:
+            ${repository_type | h}
+            %if render_help:
+                <div class="toolParamHelp" style="clear: both;">
+                    This repository's type cannot be changed because its contents are valid only for its current type or it has been cloned.
+                </div>
+            %endif
+        %else:
+            ${repository_type_select_field.get_html()}
+            %if render_help:
+                <div class="toolParamHelp" style="clear: both;">
+                    Select the repository type based on the following criteria.
+                    <ul>
+                        %if util.UNRESTRICTED in repository_types:
+                            <li><b>Unrestricted</b> - contents can be any set of valid Galaxy utilities or files
+                        %endif
+                        %if util.REPOSITORY_SUITE_DEFINITION in repository_types:
+                            <li><b>Repository suite definition</b> - contents will always be restricted to one file named repository_dependencies.xml
+                        %endif
+                        %if util.TOOL_DEPENDENCY_DEFINITION in repository_types:
+                            <li><b>Tool dependency definition</b> - contents will always be restricted to one file named tool_dependencies.xml
+                        %endif
+                    </ul>
+                </div>
+            %endif
+        %endif
+        <div style="clear: both"></div>
+    </div>
+</%def>
+
+<%def name="render_sharable_str( repository, changeset_revision=None )">
+    <%
+        from tool_shed.util.repository_util import generate_sharable_link_for_repository_in_tool_shed
+        sharable_link = generate_sharable_link_for_repository_in_tool_shed( repository, changeset_revision=changeset_revision )
+    %>
+    <a href="${ sharable_link }" target="_blank">${ sharable_link }</a>
+</%def>
+
+<%def name="render_clone_str( repository )"><%
+        from tool_shed.util.common_util import generate_clone_url_for_repository_in_tool_shed
+        clone_str = generate_clone_url_for_repository_in_tool_shed( trans.user, repository )
+    %>hg clone ${ clone_str }</%def>
+
+<%def name="render_folder( folder, folder_pad, parent=None, row_counter=None, is_root_folder=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( folder.id )
+
+        if is_root_folder:
+            pad = folder_pad
+            expander = h.url_for("/static/images/silk/resultset_bottom.png")
+            folder_img = h.url_for("/static/images/silk/folder_page.png")
+        else:
+            pad = folder_pad + 20
+            expander = h.url_for("/static/images/silk/resultset_next.png")
+            folder_img = h.url_for("/static/images/silk/folder.png")
+        my_row = None
+    %>
+    %if not is_root_folder:
+        <%
+            if parent is None:
+                bg_str = 'bgcolor="#D8D8D8"'
+            else:
+                bg_str = ''
+        %>
+        <tr id="folder-${encoded_id}" ${bg_str} class="folderRow libraryOrFolderRow"
+            %if parent is not None:
+                parent="${parent}"
+                style="display: none;"
+            %endif
+            >
+            <%
+                col_span_str = ''
+                folder_label = str( folder.label )
+                if folder.datatypes:
+                    col_span_str = 'colspan="4"'
+                elif folder.label == 'Missing tool dependencies':
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    else:
+                        folder_label = "%s<i> - repository tools require handling of these missing dependencies</i>" % folder_label
+                    col_span_str = 'colspan="5"'
+                elif folder.label in [ 'Installed repository dependencies', 'Repository dependencies', 'Missing repository dependencies' ]:
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    elif folder.label not in [ 'Installed repository dependencies' ] and folder.parent.label not in [ 'Installation errors' ]:
+                        folder_label = "%s<i> - installation of these additional repositories is required</i>" % folder_label
+                    if trans.webapp.name == 'galaxy':
+                        col_span_str = 'colspan="4"'
+                elif folder.label == 'Invalid repository dependencies':
+                    folder_label = "%s<i> - click the repository dependency to see why it is invalid</i>" % folder_label
+                elif folder.label == 'Invalid tool dependencies':
+                    folder_label = "%s<i> - click the tool dependency to see why it is invalid</i>" % folder_label
+                elif folder.label == 'Valid tools':
+                    col_span_str = 'colspan="3"'
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    else:
+                        folder_label = "%s<i> - click the name to preview the tool and use the pop-up menu to inspect all metadata</i>" % folder_label
+                elif folder.invalid_tools:
+                    if trans.webapp.name == 'tool_shed':
+                        folder_label = "%s<i> - click the tool config file name to see why the tool is invalid</i>" % folder_label
+                elif folder.tool_dependencies:
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    else:
+                        folder_label = "%s<i> - repository tools require handling of these dependencies</i>" % folder_label
+                    col_span_str = 'colspan="4"'
+                elif folder.workflows:
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    else:
+                        folder_label = "%s<i> - click the name to view an SVG image of the workflow</i>" % folder_label
+                    col_span_str = 'colspan="4"'
+                elif folder.valid_data_managers:
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    col_span_str = 'colspan="3"'
+                elif folder.invalid_data_managers:
+                    if folder.description:
+                        folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+                    col_span_str = 'colspan="2"'
+            %>
+            <td ${col_span_str} style="padding-left: ${folder_pad}px;">
+                <span class="expandLink folder-${encoded_id}-click">
+                    <div style="float: left; margin-left: 2px;" class="expandLink folder-${encoded_id}-click">
+                        <a class="folder-${encoded_id}-click" href="javascript:void(0);">
+                            ${folder_label}
+                        </a>
+                    </div>
+                </span>
+            </td>
+        </tr>
+        <%
+            my_row = row_counter.count
+            row_counter.increment()
+        %>
+    %endif
+    %for sub_folder in folder.folders:
+        ${render_folder( sub_folder, pad, parent=my_row, row_counter=row_counter, is_root_folder=False, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %for readme in folder.readme_files:
+        ${render_readme( readme, pad, my_row, row_counter, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %for invalid_repository_dependency in folder.invalid_repository_dependencies:
+        ${render_invalid_repository_dependency( invalid_repository_dependency, pad, my_row, row_counter, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %for index, repository_dependency in enumerate( folder.repository_dependencies ):
+        <% row_is_header = index == 0 %>
+        ${render_repository_dependency( repository_dependency, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %for invalid_tool_dependency in folder.invalid_tool_dependencies:
+        ${render_invalid_tool_dependency( invalid_tool_dependency, pad, my_row, row_counter, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %for index, tool_dependency in enumerate( folder.tool_dependencies ):
+        <% row_is_header = index == 0 %>
+        ${render_tool_dependency( tool_dependency, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %if folder.valid_tools:
+        %for index, tool in enumerate( folder.valid_tools ):
+            <% row_is_header = index == 0 %>
+            ${render_tool( tool, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+        %endfor
+    %endif
+    %for invalid_tool in folder.invalid_tools:
+        ${render_invalid_tool( invalid_tool, pad, my_row, row_counter, render_repository_actions_for=render_repository_actions_for )}
+    %endfor
+    %if folder.workflows:
+        %for index, workflow in enumerate( folder.workflows ):
+            <% row_is_header = index == 0 %>
+            ${render_workflow( workflow, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+        %endfor
+    %endif
+    %if folder.datatypes:
+        %for index, datatype in enumerate( folder.datatypes ):
+            <% row_is_header = index == 0 %>
+            ${render_datatype( datatype, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+        %endfor
+    %endif
+    %if folder.valid_data_managers:
+        %for index, data_manager in enumerate( folder.valid_data_managers ):
+            <% row_is_header = index == 0 %>
+            ${render_valid_data_manager( data_manager, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+        %endfor
+    %endif
+    %if folder.invalid_data_managers:
+        %for index, data_manager in enumerate( folder.invalid_data_managers ):
+            <% row_is_header = index == 0 %>
+            ${render_invalid_data_manager( data_manager, pad, my_row, row_counter, row_is_header, render_repository_actions_for=render_repository_actions_for )}
+        %endfor
+    %endif
+    %if folder.missing_test_components:
+        %for missing_test_component in folder.missing_test_components:
+            ${render_missing_test_component( missing_test_component, pad, my_row, row_counter, render_repository_actions_for=render_repository_actions_for )}
+        %endfor
+    %endif
+</%def>
+
+<%def name="render_datatype( datatype, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( datatype.id )
+        if row_is_header:
+            cell_type = 'th'
+        else:
+            cell_type = 'td'
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rd-${encoded_id}">
+        <${cell_type} style="padding-left: ${pad+20}px;">${datatype.extension | h}</${cell_type}>
+        <${cell_type}>${datatype.type | h}</${cell_type}>
+        <${cell_type}>${datatype.mimetype | h}</${cell_type}>
+        <${cell_type}>${datatype.subclass | h}</${cell_type}>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_failed_test( failed_test, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        from tool_shed.util.basic_util import to_html_string
+        encoded_id = trans.security.encode_id( failed_test.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rft-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                <tr><td bgcolor="#FFFFCC"><b>Tool id:</b> ${failed_test.tool_id | h}</td></tr>
+                <tr><td><b>Tool version:</b> ${failed_test.tool_id | h}</td></tr>
+                <tr><td><b>Test:</b> ${failed_test.test_id | h}</td></tr>
+                <tr><td><b>Stderr:</b> <br/>${ to_html_string( failed_test.stderr ) }</td></tr>
+                <tr><td><b>Traceback:</b> <br/>${ to_html_string( failed_test.traceback ) }</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_invalid_data_manager( data_manager, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( data_manager.id )
+        if row_is_header:
+            cell_type = 'th'
+        else:
+            cell_type = 'td'
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-ridm-${encoded_id}">
+        <${cell_type} style="padding-left: ${pad+20}px;">${data_manager.index | h}</${cell_type}>
+        <${cell_type}>${data_manager.error | h}</${cell_type}>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_invalid_repository_dependency( invalid_repository_dependency, pad, parent, row_counter, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( invalid_repository_dependency.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rird-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            ${ invalid_repository_dependency.error | h }
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_invalid_tool( invalid_tool, pad, parent, row_counter, valid=True, render_repository_actions_for='tool_shed' )">
+    <% encoded_id = trans.security.encode_id( invalid_tool.id ) %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rit-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            %if trans.webapp.name == 'tool_shed' and invalid_tool.repository_id and invalid_tool.tool_config and invalid_tool.changeset_revision:
+                <a class="view-info" href="${h.url_for( controller='repository', action='load_invalid_tool', repository_id=trans.security.encode_id( invalid_tool.repository_id ), tool_config=invalid_tool.tool_config, changeset_revision=invalid_tool.changeset_revision, render_repository_actions_for=render_repository_actions_for )}">
+                    ${invalid_tool.tool_config | h}
+                </a>
+            %else:
+                ${invalid_tool.tool_config | h}
+            %endif
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_invalid_tool_dependency( invalid_tool_dependency, pad, parent, row_counter, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( invalid_tool_dependency.id )
+    %>
+    <style type="text/css">
+        #invalid_td_table{ table-layout:fixed;
+                           width:100%;
+                           overflow-wrap:normal;
+                           overflow:hidden;
+                           border:0px;
+                           word-break:keep-all;
+                           word-wrap:break-word;
+                           line-break:strict; }
+    </style>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-ritd-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="invalid_td_table">
+                <tr><td>${ invalid_tool_dependency.error | h }</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_missing_test_component( missing_test_component, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( missing_test_component.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rmtc-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                <tr><td bgcolor="#FFFFCC"><b>Tool id:</b> ${missing_test_component.tool_id | h}</td></tr>
+                <tr><td><b>Tool version:</b> ${missing_test_component.tool_version | h}</td></tr>
+                <tr><td><b>Tool guid:</b> ${missing_test_component.tool_guid | h}</td></tr>
+                <tr><td><b>Missing components:</b> <br/>${missing_test_component.missing_components | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_readme( readme, pad, parent, row_counter, render_repository_actions_for='tool_shed' )">
+    <% encoded_id = trans.security.encode_id( readme.id ) %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rr-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="readme_files">
+                <tr><td>${ readme.text }</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_repository_dependency( repository_dependency, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        from galaxy.util import asbool
+        from tool_shed.util.repository_util import get_repository_by_name_and_owner
+        encoded_id = trans.security.encode_id( repository_dependency.id )
+        if trans.webapp.name == 'galaxy':
+            if repository_dependency.tool_shed_repository_id:
+                encoded_required_repository_id = trans.security.encode_id( repository_dependency.tool_shed_repository_id )
+            else:
+                encoded_required_repository_id = None
+            if repository_dependency.installation_status:
+                installation_status = str( repository_dependency.installation_status )
+            else:
+                installation_status = None
+        repository_name = str( repository_dependency.repository_name )
+        repository_owner = str( repository_dependency.repository_owner )
+        changeset_revision = str( repository_dependency.changeset_revision )
+        if asbool( str( repository_dependency.prior_installation_required ) ):
+            prior_installation_required_str = " <i>(prior install required)</i>"
+        else:
+            prior_installation_required_str = ""
+        if trans.webapp.name == 'galaxy':
+            if row_is_header:
+                cell_type = 'th'
+            else:
+                cell_type = 'td'
+            rd = None
+        else:
+            # We're in the tool shed.
+            cell_type = 'td'
+            rd = get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rrd-${encoded_id}">
+        %if trans.webapp.name == 'galaxy':
+            <${cell_type} style="padding-left: ${pad+20}px;">
+                %if row_is_header:
+                    ${repository_name | h}
+                %elif encoded_required_repository_id:
+                    <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=encoded_required_repository_id )}">${repository_name | h}</a>
+                %else:
+                   ${repository_name | h}
+                %endif
+            </${cell_type}>
+            <${cell_type}>
+                ${changeset_revision | h}
+            </${cell_type}>
+            <${cell_type}>
+                ${repository_owner | h}
+            </${cell_type}>
+            <${cell_type}>
+                ${installation_status}
+            </${cell_type}>
+        %else:
+            <td style="padding-left: ${pad+20}px;">
+                %if render_repository_actions_for == 'tool_shed' and rd:
+                    <a class="view-info" href="${h.url_for( controller='repository', action='view_or_manage_repository', id=trans.security.encode_id( rd.id ), changeset_revision=changeset_revision )}">Repository <b>${repository_name | h}</b> revision <b>${changeset_revision | h}</b> owned by <b>${repository_owner | h}</b></a>${prior_installation_required_str}
+                %else:
+                    Repository <b>${repository_name | h}</b> revision <b>${changeset_revision | h}</b> owned by <b>${repository_owner | h}</b>${prior_installation_required_str}
+                %endif
+            </td>
+        %endif
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_table_wrap_style( table_id )">
+    <style type="text/css">
+        table.${table_id}{ table-layout:fixed;
+                           width:100%;
+                           overflow-wrap:normal;
+                           overflow:hidden;
+                           border:0px;
+                           word-break:keep-all;
+                           word-wrap:break-word;
+                           line-break:strict; }
+        ul{ list-style-type: disc;
+            padding-left: 20px; }
+    </style>
+</%def>
+
+<%def name="render_tool_dependency_installation_error( installation_error, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        from galaxy.util import unicodify
+        encoded_id = trans.security.encode_id( installation_error.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rtdie-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                <tr bgcolor="#FFFFCC">
+                    <th>Type</th><th>Name</th><th>Version</th>
+                </tr>
+                <tr>
+                    <td>${installation_error.name | h}</td>
+                    <td>${installation_error.type | h}</td>
+                    <td>${installation_error.version | h}</td>
+                </tr>
+                <tr><th>Error</th></tr>
+                <tr><td colspan="3">${unicodify( installation_error.error_message ) | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_tool_dependency_successful_installation( successful_installation, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( successful_installation.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rtdsi-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                <tr bgcolor="#FFFFCC">
+                    <th>Type</th><th>Name</th><th>Version</th>
+                </tr>
+                <tr>
+                    <td>${successful_installation.name | h}</td>
+                    <td>${successful_installation.type | h}</td>
+                    <td>${successful_installation.version | h}</td>
+                </tr>
+                <tr><th>Installation directory</th></tr>
+                <tr><td colspan="3">${successful_installation.installation_directory | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_repository_installation_error( installation_error, pad, parent, row_counter, row_is_header=False, is_current_repository=False, render_repository_actions_for='tool_shed' )">
+    <%
+        from galaxy.util import unicodify
+        encoded_id = trans.security.encode_id( installation_error.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rrie-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                %if not is_current_repository:
+                    <tr bgcolor="#FFFFCC">
+                        <th>Tool shed</th><th>Name</th><th>Owner</th><th>Changeset revision</th>
+                    </tr>
+                    <tr>
+                        <td>${installation_error.tool_shed | h}</td>
+                        <td>${installation_error.name | h}</td>
+                        <td>${installation_error.owner | h}</td>
+                        <td>${installation_error.changeset_revision | h}</td>
+                    </tr>
+                %endif
+                <tr><th>Error</th></tr>
+                <tr><td colspan="4">${unicodify( installation_error.error_message ) | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_repository_successful_installation( successful_installation, pad, parent, row_counter, row_is_header=False, is_current_repository=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( successful_installation.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rrsi-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                %if not is_current_repository:
+                    <tr bgcolor="#FFFFCC">
+                        <th>Tool shed</th><th>Name</th><th>Owner</th><th>Changeset revision</th>
+                    </tr>
+                    <tr>
+                        <td>${successful_installation.tool_shed | h}</td>
+                        <td>${successful_installation.name | h}</td>
+                        <td>${successful_installation.owner | h}</td>
+                        <td>${successful_installation.changeset_revision | h}</td>
+                    </tr>
+                %endif
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_not_tested( not_tested, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( not_tested.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rnt-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                <tr><td>${not_tested.reason | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_passed_test( passed_test, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( passed_test.id )
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rpt-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table id="test_environment">
+                <tr><td bgcolor="#FFFFCC"><b>Tool id:</b> ${passed_test.tool_id | h}</td></tr>
+                <tr><td><b>Tool version:</b> ${passed_test.tool_id | h}</td></tr>
+                <tr><td><b>Test:</b> ${passed_test.test_id | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_tool( tool, pad, parent, row_counter, row_is_header, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( tool.id )
+        if row_is_header:
+            cell_type = 'th'
+        else:
+            cell_type = 'td'
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rt-${encoded_id}">
+        %if row_is_header:
+            <th style="padding-left: ${pad+20}px;">${tool.name | h}</th>
+        %else:
+            <td style="padding-left: ${pad+20}px;">
+                %if tool.repository_id:
+                    %if trans.webapp.name == 'tool_shed':
+                        <div style="float:left;" class="menubutton split popup" id="tool-${encoded_id}-popup">
+                            <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( tool.repository_id ), tool_config=tool.tool_config, changeset_revision=tool.changeset_revision, render_repository_actions_for=render_repository_actions_for )}">${tool.name | h}</a>
+                        </div>
+                        <div popupmenu="tool-${encoded_id}-popup">
+                            <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id, render_repository_actions_for=render_repository_actions_for )}">View tool metadata</a>
+                        </div>
+                    %elif trans.webapp.name == 'galaxy':
+                        %if tool.repository_installation_status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
+                            <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">${tool.name | h}</a>
+                        %else:
+                            ${tool.name | h}
+                        %endif
+                    %else:
+                        ${tool.name | h}
+                    %endif
+                %else:
+                    ${tool.name | h}
+                %endif
+            </td>
+        %endif
+        <${cell_type}>${tool.description | h}</${cell_type}>
+        <${cell_type}>${tool.version | h}</${cell_type}>
+        ##<${cell_type}>${tool.requirements | h}</${cell_type}>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_tool_dependency( tool_dependency, pad, parent, row_counter, row_is_header, render_repository_actions_for='tool_shed' )">
+    <%
+        from galaxy.util import string_as_bool
+        encoded_id = trans.security.encode_id( tool_dependency.id )
+        is_missing = tool_dependency.installation_status not in [ 'Installed' ]
+        if row_is_header:
+            cell_type = 'th'
+        else:
+            cell_type = 'td'
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rtd-${encoded_id}">
+        <${cell_type} style="padding-left: ${pad+20}px;">
+            %if row_is_header:
+                ${tool_dependency.name | h}
+            %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id:
+                %if tool_dependency.repository_id and tool_dependency.installation_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
+                    <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
+                        ${tool_dependency.name | h}
+                    </a>
+                %elif tool_dependency.installation_status not in [ trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
+                    <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository_tool_dependencies', tool_dependency_ids=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
+                        ${tool_dependency.name}
+                    </a>
+                %else:
+                    ${tool_dependency.name | h}
+                %endif
+            %else:
+                ${tool_dependency.name | h}
+            %endif
+        </${cell_type}>
+        <${cell_type}>
+            <%
+                if tool_dependency.version:
+                    version_str = tool_dependency.version
+                else:
+                    version_str = ''
+            %>
+            ${version_str | h}
+        </${cell_type}>
+        <${cell_type}>${tool_dependency.type | h}</${cell_type}>
+        <${cell_type}>
+            %if trans.webapp.name == 'galaxy':
+                ${tool_dependency.installation_status | h}
+            %endif
+        </${cell_type}>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_test_environment( test_environment, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <% encoded_id = trans.security.encode_id( test_environment.id ) %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rte-${encoded_id}">
+        <td style="padding-left: ${pad+20}px;">
+            <table class="grid" id="test_environment">
+                <tr><td><b>Time tested:</b> ${test_environment.time_tested | h}</td></tr>
+                <tr><td><b>System:</b> ${test_environment.system | h}</td></tr>
+                <tr><td><b>Architecture:</b> ${test_environment.architecture | h}</td></tr>
+                <tr><td><b>Python version:</b> ${test_environment.python_version | h}</td></tr>
+                <tr><td><b>Galaxy revision:</b> ${test_environment.galaxy_revision | h}</td></tr>
+                <tr><td><b>Galaxy database version:</b> ${test_environment.galaxy_database_version | h}</td></tr>
+                <tr><td><b>Tool shed revision:</b> ${test_environment.tool_shed_revision | h}</td></tr>
+                <tr><td><b>Tool shed database version:</b> ${test_environment.tool_shed_database_version | h}</td></tr>
+                <tr><td><b>Tool shed mercurial version:</b> ${test_environment.tool_shed_mercurial_version | h}</td></tr>
+            </table>
+        </td>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_valid_data_manager( data_manager, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        encoded_id = trans.security.encode_id( data_manager.id )
+        if row_is_header:
+            cell_type = 'th'
+        else:
+            cell_type = 'td'
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rvdm-${encoded_id}">
+        <${cell_type} style="padding-left: ${pad+20}px;">${data_manager.name | h}</${cell_type}>
+        <${cell_type}>${data_manager.version | h}</${cell_type}>
+        <${cell_type}>${data_manager.data_tables | h}</${cell_type}>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_workflow( workflow, pad, parent, row_counter, row_is_header=False, render_repository_actions_for='tool_shed' )">
+    <%
+        from tool_shed.util.encoding_util import tool_shed_encode
+        encoded_id = trans.security.encode_id( workflow.id )
+        encoded_workflow_name = tool_shed_encode( workflow.workflow_name )
+        if trans.webapp.name == 'tool_shed':
+            encoded_repository_metadata_id = trans.security.encode_id( workflow.repository_metadata_id )
+            encoded_repository_id = None
+        else:
+            encoded_repository_metadata_id = None
+            encoded_repository_id = trans.security.encode_id( workflow.repository_id )
+        if row_is_header:
+            cell_type = 'th'
+        else:
+            cell_type = 'td'
+    %>
+    <tr class="datasetRow"
+        %if parent is not None:
+            parent="${parent}"
+        %endif
+        id="libraryItem-rw-${encoded_id}">
+        <${cell_type} style="padding-left: ${pad+20}px;">
+            %if row_is_header:
+                ${workflow.workflow_name | h}
+            %elif trans.webapp.name == 'tool_shed' and encoded_repository_metadata_id:
+                <a href="${h.url_for( controller='repository', action='view_workflow', workflow_name=encoded_workflow_name, repository_metadata_id=encoded_repository_metadata_id, render_repository_actions_for=render_repository_actions_for )}">${workflow.workflow_name | h}</a>
+            %elif trans.webapp.name == 'galaxy' and encoded_repository_id:
+                <a href="${h.url_for( controller='admin_toolshed', action='view_workflow', workflow_name=encoded_workflow_name, repository_id=encoded_repository_id )}">${workflow.workflow_name | h}</a>
+            %else:
+                ${workflow.workflow_name | h}
+            %endif
+        </${cell_type}>
+        <${cell_type}>${workflow.steps | h}</${cell_type}>
+        <${cell_type}>${workflow.format_version | h}</${cell_type}>
+        <${cell_type}>${workflow.annotation | h}</${cell_type}>
+    </tr>
+    <%
+        my_row = row_counter.count
+        row_counter.increment()
+    %>
+</%def>
+
+<%def name="render_tool_dependency_resolver( requirements_status, prepare_for_install=False )">
+    <tr class="datasetRow">
+        <td style="padding-left: 20 px;">
+            <table class="grid" id="module_resolver_environment">
+                <head>
+                    <tr>
+                        <th>Dependency</th>
+                        <th>Version</th>
+                        %if not prepare_for_install:
+                            <th>Resolver</th>
+                            <th>Exact version</th>
+                        %endif
+                        <th>Current Installation Status<th>
+                    </tr>
+                </head>
+                <body>
+                    %for dependency in requirements_status:
+                        <tr>
+                            <td>${dependency['name'] | h}</td>
+                            <td>${dependency['version'] | h}</td>
+                            %if not prepare_for_install:
+                                %if dependency['dependency_type']:
+                                    <td>${dependency['dependency_type'].title() | h}</td>
+                                %else:
+                                    <td>${dependency['dependency_type'] | h}</td>
+                                %endif
+                                <td>${dependency['exact'] | h}</td>
+                            %endif
+                        %if dependency['dependency_type'] == None:
+                            <td>
+                               <img src="${h.url_for('/static')}/images/icon_error_sml.gif" title='Dependency not resolved'/>
+                               %if prepare_for_install:
+                                   Not Installed
+                               %endif
+                            </td>
+                        %elif not dependency['exact']:
+                            <td>
+                                <img src="${h.url_for('/static')}/images/icon_warning_sml.gif" title='Dependency resolved, but version ${dependency['version']} not found'/>
+                            </td>
+                        %else:
+                            <td>
+                                <img src="${h.url_for('/static')}/june_2007_style/blue/ok_small.png"/>
+                                %if prepare_for_install:
+                                    Installed through ${dependency['dependency_type'].title() | h}
+                                %endif
+                            </td>
+                        %endif
+                        </tr>
+                    %endfor
+                </body>
+            </table>
+        </td>
+    </tr>
+</%def>
+
+<%def name="render_resolver_dependencies( requirements_status )">
+    %if requirements_status:
+        <div class="toolForm">
+            <div class="toolFormTitle">Dependency Resolver Details</div>
+            <div class="toolFormBody">
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="module_resolvers">
+                    ${render_tool_dependency_resolver( requirements_status )}
+                </table>
+            </div>
+        </div>
+    %endif
+</%def>
+
+<%def name="render_repository_items( metadata, containers_dict, can_set_metadata=False, render_repository_actions_for='tool_shed' )">
+    <%
+        from tool_shed.util.encoding_util import tool_shed_encode
+
+        has_datatypes = metadata and 'datatypes' in metadata
+        has_readme_files = metadata and 'readme_files' in metadata
+        has_workflows = metadata and 'workflows' in metadata
+
+        datatypes_root_folder = containers_dict.get( 'datatypes', None )
+        invalid_data_managers_root_folder = containers_dict.get( 'invalid_data_managers', None )
+        invalid_repository_dependencies_root_folder = containers_dict.get( 'invalid_repository_dependencies', None )
+        invalid_tool_dependencies_root_folder = containers_dict.get( 'invalid_tool_dependencies', None )
+        invalid_tools_root_folder = containers_dict.get( 'invalid_tools', None )
+        missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
+        missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
+        readme_files_root_folder = containers_dict.get( 'readme_files', None )
+        repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+        test_environment_root_folder = containers_dict.get( 'test_environment', None )
+        tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
+        tool_test_results_root_folder = containers_dict.get( 'tool_test_results', None )
+        valid_data_managers_root_folder = containers_dict.get( 'valid_data_managers', None )
+        valid_tools_root_folder = containers_dict.get( 'valid_tools', None )
+        workflows_root_folder = containers_dict.get( 'workflows', None )
+
+        has_contents = datatypes_root_folder or invalid_tools_root_folder or valid_tools_root_folder or workflows_root_folder
+        has_dependencies = \
+            invalid_repository_dependencies_root_folder or \
+            invalid_tool_dependencies_root_folder or \
+            missing_repository_dependencies_root_folder or \
+            repository_dependencies_root_folder or \
+            tool_dependencies_root_folder or \
+            missing_tool_dependencies_root_folder
+
+        class RowCounter( object ):
+            def __init__( self ):
+                self.count = 0
+            def increment( self ):
+                self.count += 1
+            def __str__( self ):
+                return str( self.count )
+    %>
+    %if readme_files_root_folder:
+        ${render_table_wrap_style( "readme_files" )}
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">Repository README files - may contain important installation or license information</div>
+            <div class="toolFormBody">
+                <p/>
+                <% row_counter = RowCounter() %>
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="readme_files">
+                    ${render_folder( readme_files_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                </table>
+            </div>
+        </div>
+    %endif
+    %if has_dependencies:
+        <div class="toolForm">
+            <div class="toolFormTitle">Dependencies of this repository</div>
+            <div class="toolFormBody">
+                %if invalid_repository_dependencies_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_repository_dependencies">
+                        ${render_folder( invalid_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if missing_repository_dependencies_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="missing_repository_dependencies">
+                        ${render_folder( missing_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if repository_dependencies_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="repository_dependencies">
+                        ${render_folder( repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if invalid_tool_dependencies_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_tool_dependencies">
+                        ${render_folder( invalid_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if tool_dependencies_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="tool_dependencies">
+                        ${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if missing_tool_dependencies_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="missing_tool_dependencies">
+                        ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+            </div>
+        </div>
+    %endif
+    %if has_contents:
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">Contents of this repository</div>
+            <div class="toolFormBody">
+                %if valid_tools_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="valid_tools">
+                        ${render_folder( valid_tools_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if invalid_tools_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_tools">
+                        ${render_folder( invalid_tools_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if valid_data_managers_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="valid_data_managers">
+                        ${render_folder( valid_data_managers_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if invalid_data_managers_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_data_managers">
+                        ${render_folder( invalid_data_managers_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if workflows_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="workflows">
+                        ${render_folder( workflows_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+                %if datatypes_root_folder:
+                    <p/>
+                    <% row_counter = RowCounter() %>
+                    <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="datatypes">
+                        ${render_folder( datatypes_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                    </table>
+                %endif
+            </div>
+        </div>
+    %endif
+    %if tool_test_results_root_folder and trans.app.config.display_legacy_test_results:
+        ${render_table_wrap_style( "test_environment" )}
+        <p/>
+        <div class="toolForm">
+            <div class="toolFormTitle">Automated tool test results</div>
+            <div class="toolFormBody">
+                <p/>
+                <% row_counter = RowCounter() %>
+                <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="test_environment">
+                    ${render_folder( tool_test_results_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True, render_repository_actions_for=render_repository_actions_for )}
+                </table>
+            </div>
+        </div>
+    %endif
+</%def>
diff --git a/templates/webapps/tool_shed/repository/contact_owner.mako b/templates/webapps/tool_shed/repository/contact_owner.mako
new file mode 100644
index 0000000..3b2d5b8
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/contact_owner.mako
@@ -0,0 +1,38 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+${render_tool_shed_repository_actions( repository, metadata=metadata )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Contact the owner of the repository named '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            This feature is intended to streamline appropriate communication between
+            Galaxy tool developers and those in the Galaxy community that use them.
+            Please don't send messages unnecessarily.
+        </div>
+        <form name="send_to_owner" id="send_to_owner" action="${h.url_for( controller='repository', action='send_to_owner', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Message:</label>
+                <textarea name="message" rows="10" cols="40"></textarea>
+            </div>
+            <div class="form-row">
+                <input type="submit" value="Send to owner"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/create_repository.mako b/templates/webapps/tool_shed/repository/create_repository.mako
new file mode 100644
index 0000000..89e4d44
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/create_repository.mako
@@ -0,0 +1,83 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="render_repository_type_select_field" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create Repository</div>
+    <div class="toolFormBody">
+        <form name="create_repository_form" id="create_repository_form" action="${h.url_for( controller='repository', action='create_repository' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <input  name="name" type="textfield" value="${name | h}" size="40"/>
+                <div style="clear: both"></div>
+            </div>
+            ${render_repository_type_select_field( repository_type_select_field, render_help=True )}
+
+            <div class="form-row">
+                <label>Remote repository URL:</label>
+                <input name="remote_repository_url" placeholder="optional" type="textfield" value="${remote_repository_url | h}" size="80"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    A link to your public development repository.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Homepage URL:</label>
+                <input name="homepage_url" placeholder="optional" type="textfield" value="${homepage_url | h}" size="80"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    Link to tool's homepage.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+
+            <div class="form-row">
+                <label>Synopsis:</label>
+                <input  name="description" type="textfield" value="${description | h}" size="80"/>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Detailed description:</label>
+                %if long_description:
+                    <pre><textarea name="long_description" rows="3" cols="80">${long_description | h}</textarea></pre>
+                %else:
+                    <textarea name="long_description" rows="3" cols="80"></textarea>
+                %endif
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Categories</label>
+                <div class="form-row">
+                    <select name="category_id" multiple>
+                        %for category in categories:
+                            %if category.id in selected_categories:
+                                <option value="${trans.security.encode_id( category.id )}" selected>${category.name | h}</option>
+                            %else:
+                                <option value="${trans.security.encode_id( category.id )}">${category.name | h}</option>
+                            %endif
+                        %endfor
+                    </select>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    Multi-select list - hold the appropriate key while clicking to select multiple categories.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_repository_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/docker_image_repositories.mako b/templates/webapps/tool_shed/repository/docker_image_repositories.mako
new file mode 100644
index 0000000..8606da6
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/docker_image_repositories.mako
@@ -0,0 +1,64 @@
+<%namespace file="/message.mako" import="render_msg" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormBody">
+        <div class="form-row">
+            <div class="warningmessage">
+                Click the <b>Create Docker Image</b> button below to create a Docker Image that will install the following repositories.
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+<div class="toolForm">
+    <div class="toolFormTitle">Repositories for inclusion in Docker Image</div>
+        <form id="docker_image_form" name="docker_image_form" action="${h.url_for( controller='repository', action='create_galaxy_docker_image' )}" enctype="multipart/form-data" method="post">
+            <div class="form-row">
+                <input type="hidden" name="id" value="${id}" />
+            </div>
+            <div class="form-row">
+                <table class="grid">
+                    <tr>
+                        <th bgcolor="#D8D8D8">Name</th>
+                        <th bgcolor="#D8D8D8">Owner</th>
+                        <th bgcolor="#D8D8D8">Type</th>
+                    </tr>
+                    %for repository_tup in repository_tups:
+                        <% name, owner, type = repository_tup %>
+                        <tr>
+                            <td>${ name | h }</td>
+                            <td>${ owner | h }</td>
+                            <td>${ type | h }</td>
+                        </tr>
+                    %endfor
+                </table>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="create_docker_image_button" value="Create Docker Image">
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/export_repository.mako b/templates/webapps/tool_shed/repository/export_repository.mako
new file mode 100644
index 0000000..0a60ea2
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/export_repository.mako
@@ -0,0 +1,47 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${container_javascripts()}
+</%def>
+
+${render_tool_shed_repository_actions( repository, metadata=metadata, changeset_revision=changeset_revision )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <form name="export_repository" id="export_repository" action="${h.url_for( controller='repository', action='export', repository_id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}" method="post" >
+            %if containers_dict is not None and export_repository_dependencies_check_box is not None:
+                ${render_dependencies_section( None, export_repository_dependencies_check_box, None, containers_dict, revision_label=revision_label, export=True )}
+                <div style="clear: both"></div>
+            %else:
+                No repository dependencies are defined for revision <b>${revision_label}</b> of this repository, so click <b>Export</b> to export the selected revision.
+            %endif
+            <div class="form-row">
+                <input type="submit" name="export_repository_button" value="Export"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/find_tools.mako b/templates/webapps/tool_shed/repository/find_tools.mako
new file mode 100644
index 0000000..7bcacfc
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/find_tools.mako
@@ -0,0 +1,61 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_galaxy_repository_actions" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+%if trans.webapp.name == 'galaxy':
+    ${render_galaxy_repository_actions( repository=None )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Search repositories for valid tools</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            Valid tools are those that properly load in Galaxy.  Enter any combination of the following tool attributes to find repositories that contain 
+            valid tools matching the search criteria.<br/><br/>
+            Comma-separated strings may be entered in each field to expand search criteria.  Each field must contain the same number of comma-separated
+            strings if these types of search strings are entered in more than one field.
+        </div>
+        <div style="clear: both"></div>
+        <form name="find_tools" id="find_tools" action="${h.url_for( controller='repository', action='find_tools' )}" method="post" >
+            <div class="form-row">
+                <label>Tool id:</label>
+                <input name="tool_id" type="textfield" value="${tool_id | h}" size="40"/>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Tool name:</label>
+                <input name="tool_name" type="textfield" value="${tool_name | h}" size="40"/>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Tool version:</label>
+                <input name="tool_version" type="textfield" value="${tool_version | h}" size="40"/>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Exact matches only:</label>
+                ${exact_matches_check_box.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Check the box to match text exactly (text case doesn't matter as all strings are forced to lower case).
+                </div>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" value="Search repositories"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/find_workflows.mako b/templates/webapps/tool_shed/repository/find_workflows.mako
new file mode 100644
index 0000000..f4f3691
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/find_workflows.mako
@@ -0,0 +1,50 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_galaxy_repository_actions" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+%if trans.webapp.name == 'galaxy':
+    ${render_galaxy_repository_actions( repository=None )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Search repositories for workflows</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            Enter a workflow name to find repositories that contain workflows matching the search criteria, or leave blank to find all repositories that contain a workflow.<br/><br/>
+            Comma-separated strings may be entered to expand search criteria.
+        </div>
+        <div style="clear: both"></div>
+        <form name="find_workflows" id="find_workflows" action="${h.url_for( controller='repository', action='find_workflows' )}" method="post" >
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Workflow name:</label>
+                <input name="workflow_name" type="textfield" value="${workflow_name | h}" size="40"/>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <label>Exact matches only:</label>
+                ${exact_matches_check_box.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Check the box to match text exactly (text case doesn't matter as all strings are forced to lower case).
+                </div>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" name="find_workflows_button" value="Search repositories"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/grid.mako b/templates/webapps/tool_shed/repository/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/webapps/tool_shed/repository/import_capsule.mako b/templates/webapps/tool_shed/repository/import_capsule.mako
new file mode 100644
index 0000000..a57ae06
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/import_capsule.mako
@@ -0,0 +1,146 @@
+<%namespace file="/message.mako" import="render_msg" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormBody">
+        <div class="form-row">
+            <div class="warningmessage">
+                Importing may take a while, depending upon the contents of the capsule.
+                Wait until this page refreshes after clicking the <b>Import</b> button below.
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+        
+<div class="toolForm">
+    <div class="toolFormTitle">Repository capsule information</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Date and time exported:</label>
+            ${export_info_dict.get( 'export_time', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Exported from Tool Shed:</label>
+            ${export_info_dict.get( 'tool_shed', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Repository name:</label>
+            ${export_info_dict.get( 'repository_name', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Repository owner:</label>
+            ${export_info_dict.get( 'repository_owner', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Changeset revision:</label>
+            ${export_info_dict.get( 'changeset_revision', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Repository dependencies included in capsule?:</label>
+            ${export_info_dict.get( 'export_repository_dependencies', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+    </div>
+</div>
+<div class="toolForm">
+    <div class="toolFormBody">
+        <div class="form-row">
+            <div class="warningmessage">
+                <p>
+                    Exported archives for each of the following repositories are included in the capsule.
+                </p>
+                <p>
+                    The <b>Status</b> column will display an entry starting with the word <b>Exists</b> for those repositories
+                    that already exist in this Tool Shed.  These repositories will not be created, but the existing repository
+                    will be used.  Existing repositories that are deprecated or deleted must be manually altered appropriately.
+                </p>
+                <p>
+                    If you are not an admin user in this Tool Shed and you are not a member of the <b>Intergalactic Utilities
+                    Commission</b> defined for this Tool Shed, you will only be able to import repository archives whose
+                    associated owner is you.  The <b>Status</b> column for repository archive that you are not authorized to
+                    import will display the entry <b>Not authorized to import</b>.  Contact someone that is authorized to import
+                    these repository archives in this Tool Shed if necessary.
+                </p>
+                <p>
+                    Repositories that do not yet exist in this Tool Shed (and whose archives you are authorized to import) will
+                    be created in the order defined by the following list.
+                </p>
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+<div class="toolForm">
+    <div class="toolFormTitle">Import capsule</div>
+        <form id="import_form" name="import_form" action="${h.url_for( controller='repository', action='import_capsule' )}" enctype="multipart/form-data" method="post">
+            <div class="form-row">
+                <input type="hidden" name="encoded_file_path" value="${encoded_file_path}" />
+            </div>
+            <div class="form-row">
+                <table class="grid">
+                    <tr>
+                        <th bgcolor="#D8D8D8">Name</th>
+                        <th bgcolor="#D8D8D8">Owner</th>
+                        <th bgcolor="#D8D8D8">Changeset Revision</th>
+                        <th bgcolor="#D8D8D8">Type</th>
+                        <th bgcolor="#D8D8D8">Status</th>
+                    </tr>
+                    %for repository_status_info_dict in repository_status_info_dicts:
+                        <tr>
+                            <td>${ repository_status_info_dict[ 'name' ] | h }</td>
+                            <td>${ repository_status_info_dict[ 'owner' ] | h }</td>
+                            <td>${ repository_status_info_dict[ 'changeset_revision' ] | h }</td>
+                            <td>
+                                <%
+                                    # Get the label for the repository type.
+                                    type = repository_status_info_dict[ 'type' ]
+                                    type_class = trans.app.repository_types_registry.get_class_by_label( type )
+                                    type_label = type_class.label
+                                %>
+                                ${ type_label | h }
+                            </td>
+                            <td>
+                                %if repository_status_info_dict[ 'status' ] is None:
+                                     
+                                %else:
+                                    ${ repository_status_info_dict[ 'status' ] | h }
+                                %endif
+                            </td>
+                        </tr>
+                    %endfor
+                </table>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="import_capsule_button" value="Import">
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/import_capsule_results.mako b/templates/webapps/tool_shed/repository/import_capsule_results.mako
new file mode 100644
index 0000000..5f48195
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/import_capsule_results.mako
@@ -0,0 +1,82 @@
+<%namespace file="/message.mako" import="render_msg" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li><a class="action-button" target="galaxy_main" href="${h.url_for( controller='repository', action='browse_categories' )}">Browse repositories</a></li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Repository capsule information</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Date and time exported:</label>
+            ${export_info_dict.get( 'export_time', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Exported from Tool Shed:</label>
+            ${export_info_dict.get( 'tool_shed', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Repository name:</label>
+            ${export_info_dict.get( 'repository_name', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Repository owner:</label>
+            ${export_info_dict.get( 'repository_owner', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Changeset revision:</label>
+            ${export_info_dict.get( 'changeset_revision', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+        <div class="form-row">
+            <label>Repository dependencies included in capsule?:</label>
+            ${export_info_dict.get( 'export_repository_dependencies', 'unknown' ) | h}
+        </div>
+        <div style="clear: both"></div>
+    </div>
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Results of attempt to import ${len( import_results_tups )} repositories contained in the capsule</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <table class="grid">
+                %for import_results_tup in import_results_tups:
+                    <%
+                        ok, name_owner_tup, results_message = import_results_tup
+                        name, owner = name_owner_tup
+                    %>
+                    <tr><td>Archive of repository <b>${name}</b> owned by <b>${owner}</b><br/>${results_message}</td></tr>
+                %endfor
+            </table>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/manage_repository.mako b/templates/webapps/tool_shed/repository/manage_repository.mako
new file mode 100644
index 0000000..8922843
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/manage_repository.mako
@@ -0,0 +1,411 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+    from tool_shed.util.basic_util import to_html_string
+    from tool_shed.util.metadata_util import is_malicious
+    from tool_shed.repository_types.util import TOOL_DEPENDENCY_DEFINITION
+
+    if repository.metadata_revisions:
+        has_metadata = True
+    else:
+        has_metadata = False
+
+    is_admin = trans.user_is_admin()
+    is_new = repository.is_new( trans.app )
+
+    if repository.deprecated:
+        is_deprecated = True
+    else:
+        is_deprecated = False
+
+    if is_malicious( trans.app, trans.security.encode_id( repository.id ), repository.tip( trans.app ) ):
+        changeset_is_malicious = True
+    else:
+        changeset_is_malicious = False
+
+    if not is_deprecated and trans.app.security_agent.can_push( trans.app, trans.user, repository ):
+        can_push = True
+    else:
+        can_push = False
+
+    if not is_deprecated and not is_new and ( not changeset_is_malicious or can_push ):
+        can_download = True
+    else:
+        can_download = False
+
+    if has_metadata and not is_deprecated and trans.app.security_agent.user_can_review_repositories( trans.user ):
+        can_review_repository = True
+    else:
+        can_review_repository = False
+
+    if not is_new and not is_deprecated:
+        can_set_metadata = True
+    else:
+        can_set_metadata = False
+
+    if changeset_revision == repository.tip( trans.app ):
+        changeset_revision_is_repository_tip = True
+    else:
+        changeset_revision_is_repository_tip = False
+
+    if metadata and can_set_metadata and is_admin and changeset_revision_is_repository_tip:
+        can_set_malicious = True
+    else:
+        can_set_malicious = False
+
+    can_view_change_log = not is_new
+
+    if repository_metadata and repository_metadata.includes_tools:
+        includes_tools = True
+    else:
+        includes_tools = False
+
+    if changeset_revision_is_repository_tip:
+        tip_str = 'repository tip'
+        sharable_link_label = 'Sharable link to this repository:'
+        sharable_link_changeset_revision = None
+    else:
+        tip_str = ''
+        sharable_link_label = 'Sharable link to this repository revision:'
+        sharable_link_changeset_revision = changeset_revision
+
+    if heads:
+        multiple_heads = len( heads ) > 1
+    else:
+        multiple_heads = False
+
+    if repository_metadata is None:
+        revision_installable = False
+    else:
+        if repository_metadata.downloadable is None:
+            revision_installable = 'unknown'
+        else:
+            revision_installable = repository_metadata.downloadable
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css('base','library','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+${render_tool_shed_repository_actions( repository, metadata=metadata, changeset_revision=changeset_revision )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if repository.deprecated:
+    <div class="warningmessage">
+        This repository has been marked as deprecated, so some tool shed features may be restricted.
+    </div>
+%endif:
+%if multiple_heads:
+    ${render_multiple_heads_message( heads )}
+%endif
+%if deprecated_repository_dependency_tups:
+    ${render_deprecated_repository_dependencies_message( deprecated_repository_dependency_tups )}
+%endif
+
+%if len( changeset_revision_select_field.options ) > 1:
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository revision</div>
+        <div class="toolFormBody">
+            <form name="change_revision" id="change_revision" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    ${changeset_revision_select_field.get_html()} <i>${tip_str}</i>
+                    <div class="toolParamHelp" style="clear: both;">
+                        %if can_review_repository:
+                            Select a revision to inspect for adding or managing a review or for download or installation.
+                        %else:
+                            Select a revision to inspect for download or installation.
+                        %endif
+                    </div>
+                </div>
+            </form>
+        </div>
+    </div>
+    <p/>
+%endif
+<div class="toolForm">
+    <div class="toolFormTitle">Repository <b>${repository.name | h}</b></div>
+    <div class="toolFormBody">
+        <form name="edit_repository" id="edit_repository" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <div class="form-row">
+                <b>Name:</b>
+                %if repository.times_downloaded > 0:
+                    <a title="Browse the contents of this repository" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${repository.name}</a>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Repository names cannot be changed if the repository has been cloned.
+                    </div>
+                %else:
+                    <input name="repo_name" type="textfield" value="${repository.name | h}" size="40"/>
+                %endif
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <b>Owner:</b>
+                <a title="See all repositories owned by ${ repository.user.username | h }" href="${h.url_for( controller='repository', action='browse_repositories_by_user', user_id=trans.app.security.encode_id( repository.user.id ) )}">${ repository.user.username | h }</a>
+            </div>
+            <div class="form-row">
+                <b>Synopsis:</b>
+                <input name="description" type="textfield" value="${description | h}" size="80"/>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <b>Detailed description:</b>
+                %if long_description:
+                    <pre><textarea name="long_description" rows="3" cols="80">${long_description | h}</textarea></pre>
+                %else:
+                    <textarea name="long_description" rows="3" cols="80"></textarea>
+                %endif
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <b>Content homepage:</b>
+                %if repository.homepage_url:
+                    <input name="homepage_url" type="textfield" value="${homepage_url | h}" size="80"/>
+                %else:
+                    <input name="homepage_url" type="textfield" value="" size="80"/>
+                %endif
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <b>Development repository:</b>
+                %if repository.remote_repository_url:
+                    <input name="remote_repository_url" type="textfield" value="${remote_repository_url | h}" size="80"/>
+                %else:
+                    <input name="remote_repository_url" type="textfield" value="" size="80"/>
+                %endif
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <b>${sharable_link_label}</b>
+                <a href="${ repository.share_url }" target="_blank">${ repository.share_url }</a>
+                <button title="to clipboard" class="btn btn-default btn-xs" id="share_clipboard"><span class="fa fa-clipboard"></span></button>
+            </div>
+            %if can_download or can_push:
+                <div class="form-row">
+                    <b>Clone this repository:</b>
+                    <code>hg clone <a title="Show in mercurial browser" href="${ repository.clone_url }">${ repository.clone_url }</a></code>
+                    <button title="to clipboard" class="btn btn-default btn-xs" id="clone_clipboard"><span class="fa fa-clipboard"></span></button>
+                </div>
+            %endif
+            ${render_repository_type_select_field( repository_type_select_field, render_help=True )}
+            <div class="form-row">
+                <b>Revision:</b>
+                %if can_view_change_log:
+                    <a title="See the revision history" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${revision_label}</a>
+                %else:
+                    ${revision_label}
+                %endif
+            </div>
+            <div class="form-row">
+                <b>This revision can be installed:</b>
+                ${revision_installable}
+            </div>
+            <div class="form-row">
+                <b>Times cloned / installed:</b>
+                ${repository.times_downloaded | h}
+            </div>
+            %if is_admin:
+                <div class="form-row">
+                    <b>Location:</b>
+                    ${repository.repo_path( trans.app ) | h}
+                </div>
+                <div class="form-row">
+                    <b>Deleted:</b>
+                    ${repository.deleted | h}
+                </div>
+            %endif
+            <div class="form-row">
+                <input type="submit" name="edit_repository_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
+${render_repository_items( metadata, containers_dict, can_set_metadata=True, render_repository_actions_for='tool_shed' )}
+<p/>
+<div class="toolForm">
+    <div class="toolFormTitle">Manage categories</div>
+    <div class="toolFormBody">
+        <form name="categories" id="categories" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Categories</label>
+                <select name="category_id" multiple>
+                    %for category in categories:
+                        %if category.id in selected_categories:
+                            <option value="${trans.security.encode_id( category.id )}" selected>${category.name | h}</option>
+                        %else:
+                            <option value="${trans.security.encode_id( category.id )}">${category.name | h}</option>
+                        %endif
+                    %endfor
+                </select>
+                <div class="toolParamHelp" style="clear: both;">
+                    Multi-select list - hold the appropriate key while clicking to select multiple categories.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="manage_categories_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
+%if trans.app.config.smtp_server:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Notification on update</div>
+        <div class="toolFormBody">
+            <form name="receive_email_alerts" id="receive_email_alerts" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    <label>Receive email alerts:</label>
+                    ${alerts_check_box.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        Check the box and click <b>Save</b> to receive email alerts when updates to this repository occur.
+                    </div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="receive_email_alerts_button" value="Save"/>
+                </div>
+            </form>
+        </div>
+    </div>
+%endif
+<p/>
+<div class="toolForm">
+    <div class="toolFormTitle">Grant authority to make changes</div>
+    <div class="toolFormBody">
+        <table class="grid">
+            <tr>
+                <td>${repository.user.username | h}</td>
+                <td>owner</td>
+                <td> </td>
+            </tr>
+            %for username in current_allow_push_list:
+                %if username != repository.user.username:
+                    <tr>
+                        <td>${username | h}</td>
+                        <td>write</td>
+                        <td><a class="action-button" href="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ), user_access_button='Remove', remove_auth=username )}">remove</a>
+                    </tr>
+                %endif
+            %endfor
+        </table>
+        <br clear="left"/>
+        <form name="user_access" id="user_access" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Username:</label>
+                ${allow_push_select_field.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Multi-select usernames to grant permission to make changes to this repository
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="user_access_button" value="Grant access"/>
+            </div>
+        </form>
+    </div>
+</div>
+%if repository.ratings:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Rating</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Times Rated:</label>
+                ${num_ratings | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Average Rating:</label>
+                ${render_star_rating( 'avg_rating', avg_rating, disabled=True )}
+                <div style="clear: both"></div>
+            </div>
+        </div>
+    </div>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormBody">
+            %if display_reviews:
+                <div class="form-row">
+                    <a href="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ), display_reviews=False )}"><label>Hide Reviews</label></a>
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <table class="grid">
+                        <thead>
+                            <tr>
+                                <th>Rating</th>
+                                <th>Comments</th>
+                                <th>Reviewed</th>
+                                <th>User</th>
+                            </tr>
+                        </thead>
+                        <% count = 0 %>
+                        %for review in repository.ratings:
+                            <%
+                                count += 1
+                                name = 'rating%d' % count
+                            %>
+                            <tr>
+                                <td>${render_star_rating( name, review.rating, disabled=True )}</td>
+                                <td>${render_review_comment( to_html_string( review.comment ) )}</td>
+                                <td>${time_ago( review.update_time )}</td>
+                                <td>${review.user.username | h}</td>
+                            </tr>
+                        %endfor
+                    </table>
+                </div>
+                <div style="clear: both"></div>
+            %else:
+                <div class="form-row">
+                    <a href="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ), display_reviews=True )}"><label>Display Reviews</label></a>
+                </div>
+                <div style="clear: both"></div>
+            %endif
+        </div>
+    </div>
+%endif
+<p/>
+%if can_set_malicious:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Malicious repository tip</div>
+        <div class="toolFormBody">
+            <form name="malicious" id="malicious" action="${h.url_for( controller='repository', action='set_malicious', id=trans.security.encode_id( repository.id ), ctx_str=changeset_revision )}" method="post">
+                <div class="form-row">
+                    <label>Define repository tip as malicious:</label>
+                    ${malicious_check_box.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        Check the box and click <b>Save</b> to define this repository's tip as malicious, restricting it from being download-able.
+                    </div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="malicious_button" value="Save"/>
+                </div>
+            </form>
+        </div>
+    </div>
+%endif
+<p/>
diff --git a/templates/webapps/tool_shed/repository/preview_tools_in_changeset.mako b/templates/webapps/tool_shed/repository/preview_tools_in_changeset.mako
new file mode 100644
index 0000000..8bddeaf
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/preview_tools_in_changeset.mako
@@ -0,0 +1,60 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_galaxy_repository_actions" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+${render_galaxy_repository_actions( repository=repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        %if len( changeset_revision_select_field.options ) > 1:
+            <form name="change_revision" id="change_revision" action="${h.url_for( controller='repository', action='preview_tools_in_changeset', repository_id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    <%
+                        if changeset_revision == repository.tip( trans.app ):
+                            tip_str = 'repository tip'
+                        else:
+                            tip_str = ''
+                    %>
+                    ${changeset_revision_select_field.get_html()} <i>${tip_str | h}</i>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Select a revision to inspect and download versions of Galaxy utilities from this repository.
+                    </div>
+                </div>
+            </form>
+        %else:
+            <div class="form-row">
+                <label>Revision:</label>
+                ${revision_label}
+            </div>
+        %endif
+    </div>
+</div>
+<p/>
+${render_repository_items( metadata, containers_dict, can_set_metadata=False, render_repository_actions_for='galaxy' )}
diff --git a/templates/webapps/tool_shed/repository/rate_repository.mako b/templates/webapps/tool_shed/repository/rate_repository.mako
new file mode 100644
index 0000000..22506cc
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/rate_repository.mako
@@ -0,0 +1,154 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+    is_new = repository.is_new( trans.app )
+    can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
+    can_download = not is_new and ( not is_malicious or can_push )
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css('base','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+${render_tool_shed_repository_actions( repository, metadata=None, changeset_revision=None )}
+
+%if repository.user != trans.user:
+    <div class="toolForm">
+        <div class="toolFormTitle">${repository.name | h}</div>
+        %if can_download:
+            <div class="form-row">
+                <label>Clone this repository:</label>
+                ${render_clone_str( repository )}
+            </div>
+        %endif
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Type:</label>
+                ${repository.type | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                ${repository.description | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Revision:</label>
+                ${revision_label}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Owner:</label>
+                ${repository.user.username | h}
+                <div style="clear: both"></div>
+            </div>
+        </div>
+    </div>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+        <div class="toolFormBody">
+            <form id="rate_repository" name="rate_repository" action="${h.url_for( controller='repository', action='rate_repository', id=trans.security.encode_id( repository.id ) )}" method="post">
+                <div class="form-row">
+                    <label>Times Rated:</label>
+                    ${num_ratings | h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Average Rating:</label>
+                    ${render_star_rating( 'avg_rating', avg_rating, disabled=True )}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Your Rating:</label>
+                    <%
+                        if rra and rra.rating:
+                            rating = rra.rating
+                        else:
+                            rating = 0
+                    %>
+                    ${render_star_rating( 'rating', rating )}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Review:</label>
+                    %if rra and rra.comment:
+                        <div class="form-row-input">
+                            <pre><textarea name="comment" rows="5" cols="80">${rra.comment | h}</textarea></pre>
+                        </div>
+                    %else:
+                        <div class="form-row-input">
+                            <textarea name="comment" rows="5" cols="80"></textarea>
+                        </div>
+                    %endif
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="rate_button" id="rate_button" value="Submit" />
+                </div>
+            </form>
+        </div>
+    </div>
+    <p/>
+    %if repository.ratings and ( len( repository.ratings ) > 1 or repository.ratings[0] != rra ):
+        <div class="toolForm">
+            <div class="toolFormBody">
+                %if display_reviews:
+                    <div class="form-row">
+                        <a href="${h.url_for( controller='repository', action='rate_repository', id=trans.security.encode_id( repository.id ), display_reviews=False )}"><label>Hide Reviews</label></a>
+                    </div>
+                    <table class="grid">
+                        <thead>
+                            <tr>
+                                <th>Rating</th>
+                                <th>Comments</th>
+                                <th>Reviewed</th>
+                                <th>User</th>
+                            </tr>
+                        </thead>
+                        <% count = 0 %>
+                        %for review in repository.ratings:
+                            <%
+                                count += 1
+                                name = 'rating%d' % count
+                            %>
+                            <tr>
+                                <td>${render_star_rating( name, review.rating, disabled=True )}</td>
+                                <td><pre>${review.comment | h}</pre></td>
+                                <td>${time_ago( review.update_time )}</td>
+                                <td>${review.user.username | h}</td>
+                            </tr>
+                        %endfor
+                    </table>
+                %else:
+                    <div class="form-row">
+                        <a href="${h.url_for( controller='repository', action='rate_repository', id=trans.security.encode_id( repository.id ), display_reviews=True )}"><label>Display Reviews</label></a>
+                    </div>
+                %endif
+            </div>
+        </div>
+    %endif
+%endif
diff --git a/templates/webapps/tool_shed/repository/tool_form.mako b/templates/webapps/tool_shed/repository/tool_form.mako
new file mode 100644
index 0000000..ba75c0c
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/tool_form.mako
@@ -0,0 +1,154 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="*" />
+
+<%
+    from galaxy.util.expressions import ExpressionContext
+    from galaxy import util
+    from galaxy.tools.parameters.basic import DataToolParameter, ColumnListParameter, GenomeBuildParameter, SelectToolParameter
+    from galaxy.web.form_builder import SelectField, TextField
+%>
+
+<html>
+    <head>
+        <title>Galaxy tool preview</title>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        ${h.css( "base" )}
+    </head>
+    <body>
+        <%def name="do_inputs( inputs, tool_state, prefix, other_values=None )">
+            <% other_values = ExpressionContext( tool_state, other_values ) %>
+            %for input_index, input in enumerate( inputs.itervalues() ):
+                %if not input.visible:
+                    <% pass %>
+                %elif input.type in ["repeat", "section"]:
+                    <div class="repeat-group">
+                        <div class="form-title-row">
+                            <b>${input.title_plural}</b>
+                        </div>
+                        <div class="repeat-group-item">
+                            <div class="form-title-row">
+                                <b>${input.title} 0</b>
+                            </div>
+                        </div>
+                    </div>
+                %elif input.type == "conditional":
+                    %if tool_state.items():
+                        <%
+                            try:
+                                group_state = tool_state[ input.name ][ 0 ]
+                            except Exception, e:
+                                group_state = tool_state[ input.name ]
+                            current_case = group_state[ '__current_case__' ]
+                            group_prefix = prefix + input.name + "|"
+                        %>
+                        %if input.value_ref_in_group:
+                            ${row_for_param( group_prefix, input.test_param, group_state, other_values )}
+                        %endif
+                        ${do_inputs( input.cases[current_case].inputs, group_state, group_prefix, other_values )}
+                    %endif
+                %elif input.type == "upload_dataset":
+                    %if input.get_datatype( trans, other_values ).composite_type is None:
+                        ## Have non-composite upload appear as before
+                        ${do_inputs( input.inputs, 'files', prefix + input.name + "_" + str( 0 ) + "|", other_values )}
+                    %else:
+                        <div class="repeat-group">
+                            <div class="form-title-row">
+                                <b>${input.group_title( other_values )}</b>
+                            </div>
+                            <div class="repeat-group-item">
+                            <div class="form-title-row">
+                                <b>File Contents for ${input.title_by_index( trans, 0, other_values )}</b>
+                            </div>
+                        </div>
+                    %endif
+                %else:
+                    ${row_for_param( prefix, input, tool_state, other_values )}
+                %endif
+            %endfor  
+        </%def>
+        
+        <%def name="row_for_param( prefix, param, parent_state, other_values )">
+            <%
+                # Disable refresh_on_change for select lists displayed in the tool shed. 
+                param.refresh_on_change = False
+                label = param.get_label()
+                if isinstance( param, DataToolParameter ) or isinstance( param, ColumnListParameter ) or isinstance( param, GenomeBuildParameter ):
+                    field = SelectField( param.name )
+                    field.add_option( param.name, param.name )
+                    field_html = field.get_html()
+                elif isinstance( param, SelectToolParameter ) and hasattr( param, 'data_ref' ):
+                    field = SelectField( param.name, display=param.display, multiple=param.multiple )
+                    field.add_option( param.data_ref, param.data_ref )
+                    field_html = field.get_html( prefix )
+                elif isinstance( param, SelectToolParameter ) and param.is_dynamic:
+                    field = SelectField( param.name, display=param.display, multiple=param.multiple )
+                    dynamic_options = param.options
+                    if dynamic_options is not None:
+                        if dynamic_options.index_file:
+                            option_label = "Dynamically generated from entries in file %s" % str( dynamic_options.index_file )
+                            field.add_option( option_label, "none" )
+                        elif dynamic_options.missing_index_file:
+                            option_label = "Dynamically generated from entries in missing file %s" % str( dynamic_options.missing_index_file )
+                            field.add_option( option_label, "none" )
+                    else:
+                        field.add_option( "Dynamically generated from old-style Dynamic Options.", "none" )
+                    field_html = field.get_html( prefix )
+                else:
+                    field = TextField( param.name, value="Parameter type: %s" % param.type )
+                    field_html = field.get_html( prefix, disabled=True )
+            %>
+            <div class="form-row">
+                %if label:
+                    <label for="${param.name}">${label}:</label>
+                %endif
+                <div class="form-row-input">${field_html}</div>
+                %if param.help:
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${param.help}
+                    </div>
+                %endif
+                <div style="clear: both"></div>     
+            </div>
+        </%def>
+
+        %if render_repository_actions_for == 'galaxy':
+            ${render_galaxy_repository_actions( repository=repository )}
+        %else:
+            ${render_tool_shed_repository_actions( repository, metadata=None, changeset_revision=None )}
+        %endif
+
+        %if message:
+            ${render_msg( message, status )}
+        %endif
+
+        %if tool:
+            <div class="toolForm" id="${tool.id | h}">
+                <div class="toolFormTitle">${tool.name | h} (version ${tool.version | h})</div>
+                <div class="toolFormBody">
+                    <form id="tool_form" name="tool_form" action="" method="get">
+                        <input type="hidden" name="tool_state" value="${util.object_to_string( tool_state.encode( tool, app ) )}">
+                        ${do_inputs( tool.inputs_by_page[ tool_state.page ], tool_state.inputs, "" )}
+                    </form>
+                </div>
+            </div>
+            %if tool.help:
+                <div class="toolHelp">
+                    <div class="toolHelpBody">
+                        <%
+                            tool_help = tool.help
+                            # Help is Mako template, so render using current static path.
+                            tool_help = tool_help.render( static_path=h.url_for( '/static' ) )
+                            # Convert to unicode to display non-ascii characters.
+                            if type( tool_help ) is not unicode:
+                                tool_help = unicode( tool_help, 'utf-8')
+                        %>
+                        ${tool_help}
+                    </div>
+                </div>
+            %endif
+        %else:
+            Tool not properly loaded.
+        %endif
+    </body>
+</html>
diff --git a/templates/webapps/tool_shed/repository/upload.mako b/templates/webapps/tool_shed/repository/upload.mako
new file mode 100644
index 0000000..804c6b4
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/upload.mako
@@ -0,0 +1,161 @@
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    is_new = repository.is_new( trans.app )
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "libs/jquery/jquery.rating", "dynatree_skin/ui.dynatree" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery-ui", "libs/jquery/jquery.cookie", "libs/jquery/jquery.dynatree" )}
+    ${common_javascripts(repository)}
+    <script type="text/javascript">
+    $( function() {
+        $( "select[refresh_on_change='true']").change( function() {
+            $( "#upload_form" ).submit();
+        });
+    });
+    </script>
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+${render_tool_shed_repository_actions( repository=repository)}
+
+<div class="toolForm">
+    <div class="toolFormBody">
+        <div class="form-row">
+            <div class="warningmessage">
+                Upload a single file or tarball.  Uploading may take a while, depending upon the size of the file.
+                Wait until a message is displayed in your browser after clicking the <b>Upload</b> button below.
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <form id="upload_form" name="upload_form" action="${h.url_for( controller='upload', action='upload', repository_id=trans.security.encode_id( repository.id ) )}" enctype="multipart/form-data" method="post">
+            <div class="form-row">
+                <label>File:</label>
+                <div class="form-row-input">
+                    <input type="file" name="file_data"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Url:</label>
+                <div class="form-row-input">
+                    <input name="url" type="textfield" value="${url | h}" size="40"/>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                     Enter a url to upload your files.  In addition to http and ftp urls, urls that point to mercurial repositories (urls that start
+                     with hg:// or hgs://) are allowed.  This mechanism results in the tip revision of an external mercurial repository being added
+                     to the Tool Shed repository as a single new changeset.  The revision history of the originating external mercurial repository is
+                     not uploaded to the Tool Shed repository.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <%
+                    if uncompress_file:
+                        yes_selected = 'selected'
+                        no_selected = ''
+                    else:
+                        yes_selected = ''
+                        no_selected = 'selected'
+                %>
+                <label>Uncompress files?</label>
+                <div class="form-row-input">
+                    <select name="uncompress_file">
+                        <option value="true" ${yes_selected}>Yes
+                        <option value="false" ${no_selected}>No
+                    </select>
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    Supported compression types are gz and bz2.  If <b>Yes</b> is selected, the uploaded file will be uncompressed.  However,
+                    if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed.  For
+                    example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if
+                    some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed.
+                </div>
+            </div>
+            %if not is_new:
+                <div class="form-row">
+                    <%
+                        if remove_repo_files_not_in_tar:
+                            yes_selected = 'selected'
+                            no_selected = ''
+                        else:
+                            yes_selected = ''
+                            no_selected = 'selected'
+                    %>
+                    <label>Remove files in the repository (relative to the root or selected upload point) that are not in the uploaded archive?</label>
+                    <div class="form-row-input">
+                        <select name="remove_repo_files_not_in_tar">
+                            <option value="true" ${yes_selected}>Yes
+                            <option value="false" ${no_selected}>No
+                        </select>
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        This selection pertains only to uploaded tar archives, not to single file uploads.  If <b>Yes</b> is selected, files
+                        that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive
+                        will be removed from the repository.  Otherwise, all existing repository files will remain and the uploaded archive
+                        files will be added to the repository.
+                    </div>
+                </div>
+            %endif
+            <div class="form-row">
+                <label>Change set commit message:</label>
+                <div class="form-row-input">
+                    %if commit_message:
+                        <pre><textarea name="commit_message" rows="3" cols="35">${commit_message | h}</textarea></pre>
+                    %else:
+                        <textarea name="commit_message" rows="3" cols="35"></textarea>
+                    %endif
+                </div>
+                <div class="toolParamHelp" style="clear: both;">
+                    This is the commit message for the mercurial change set that will be created by this upload.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            %if not repository.is_new( trans.app ):
+                <div class="form-row" >
+                    <label>Contents:</label>
+                    <div id="tree" >
+                        Loading...
+                    </div>
+                    <input type="hidden" id="upload_point" name="upload_point" value=""/>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Select a location within the repository to upload your files by clicking a check box next to the location.  The 
+                        selected location is considered the upload point.  If a location is not selected, the upload point will be the 
+                        repository root.
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="upload_button" value="Upload">
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/upload_capsule.mako b/templates/webapps/tool_shed/repository/upload_capsule.mako
new file mode 100644
index 0000000..36b92a5
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/upload_capsule.mako
@@ -0,0 +1,60 @@
+<%namespace file="/message.mako" import="render_msg" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormBody">
+        <div class="form-row">
+            <div class="warningmessage">
+                Upload a single exported capsule file.  Uploading may take a while, depending upon the size of the file.
+                Wait until the contents of the file are displayed in your browser after clicking the <b>Upload</b> button below.
+            </div>
+            <div style="clear: both"></div>
+        </div>
+    </div>
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Upload a repository capsule</div>
+    <div class="toolFormBody">
+        <form id="upload_capsule" name="upload_capsule" action="${h.url_for( controller='repository', action='upload_capsule' )}" enctype="multipart/form-data" method="post">
+            <div class="form-row">
+                <label>File:</label>
+                <div class="form-row-input">
+                    <input type="file" name="file_data"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Url:</label>
+                <div class="form-row-input">
+                    <input name="url" type="textfield" value="${url | h}" size="40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" class="primary-button" name="upload_capsule_button" value="Upload">
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/view_changelog.mako b/templates/webapps/tool_shed/repository/view_changelog.mako
new file mode 100644
index 0000000..f40de93
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/view_changelog.mako
@@ -0,0 +1,117 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="render_clone_str" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    is_new = repository.is_new( trans.app )
+    can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
+    can_download = not is_new and ( not is_malicious or can_push )
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository, metadata=metadata )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if can_download:
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Clone this repository:</label>
+                ${render_clone_str( repository )}
+            </div>
+        </div>
+    </div>
+    <p/>
+%endif
+<div class="toolForm">
+    <%
+        if can_download:
+            title_str = 'Changesets'
+        else:
+            title_str = '%s changesets' % repository.name
+    %>
+    <div class="toolFormTitle">${title_str | h}</div>
+    <% test_date = None %>
+    <div class="toolFormBody">
+        <table class="grid">
+            %for changeset in changesets:
+                <%
+                    ctx_str = str( changeset[ 'ctx' ] )
+                    ctx_parent = str( changeset[ 'parent' ] )
+                    ctx_parent_rev = changeset[ 'parent' ].rev()
+                    test_date = changeset[ 'display_date' ]
+                    changeset_str = "%s:%s" % ( changeset[ 'rev' ], ctx_str )
+                    if ctx_parent_rev < 0:
+                        ctx_parent_str = 'None'
+                    else:
+                        ctx_parent_str = "%s:%s" % ( ctx_parent_rev, ctx_parent )
+                    if changeset[ 'has_metadata' ]:
+                        has_metadata_str = '<table border="0"><tr><td  bgcolor="#D8D8D8">Repository metadata is associated with this change set.</td></tr></table>'
+                    else:
+                        has_metadata_str = ''
+                    display_date = changeset[ 'display_date' ]
+                %>
+                %if test_date != display_date:
+                    <tr colspan="2"><td bgcolor="#D8D8D8">${display_date}</td></tr>
+                %endif
+                <tr>
+                    <td>
+                        %if has_metadata_str:
+                            <div class="form-row">
+                                ${has_metadata_str}
+                            </div>
+                        %endif
+                        <div class="form-row">
+                            <label>Description:</label>
+                            <a href="${h.url_for( controller='repository', action='view_changeset', id=trans.security.encode_id( repository.id ), ctx_str=ctx_str )}">${ util.unicodify( changeset[ 'description' ] ) | h}</a>
+                        </div>
+                        <div class="form-row">
+                            <label>Commit:</label>
+                            <a href="${h.url_for( controller='repository', action='view_changeset', id=trans.security.encode_id( repository.id ), ctx_str=ctx_str )}">${changeset_str | h}</a>
+                        </div>
+                        <div class="form-row">
+                            <label>Parent:</label>
+                            %if ctx_parent_str == 'None':
+                                ${ctx_parent_str}
+                            %else:
+                                <a href="${h.url_for( controller='repository', action='view_changeset', id=trans.security.encode_id( repository.id ), ctx_str=ctx_parent )}">${ctx_parent_str | h}</a>
+                            %endif
+                        </div>
+                        <div class="form-row">
+                            <label>Commited by:</label>
+                            ${changeset[ 'user' ].split()[0] | h}
+                        </div>
+                        <div class="form-row">
+                            <label>Pushed:</label>
+                            ${changeset[ 'display_date' ]}
+                        </div>
+                    </td>
+                </tr>
+            %endfor
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/view_changeset.mako b/templates/webapps/tool_shed/repository/view_changeset.mako
new file mode 100644
index 0000000..3ead995
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/view_changeset.mako
@@ -0,0 +1,172 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="render_clone_str" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    is_new = repository.is_new( trans.app )
+    can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
+    can_download = not is_new and ( not is_malicious or can_push )
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository, metadata=metadata )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if can_download:
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Clone this repository:</label>
+                ${render_clone_str( repository )}
+            </div>
+        </div>
+    </div>
+    <p/>
+%endif
+<div class="toolForm">
+    <%
+        from tool_shed.util.hg_util import get_readable_ctx_date
+        changeset_revision_date = get_readable_ctx_date( ctx )
+        if can_download:
+            title_str = 'Changeset <b>%s:%s</b> <i>(%s)</i>' % ( ctx.rev(), ctx, changeset_revision_date )
+        else:
+            title_str = '%s changeset <b>%s:%s</b> <i>(%s)</i>' % ( repository.name, ctx.rev(), ctx, changeset_revision_date )
+    %>
+    <div class="toolFormTitle">
+        ${title_str}
+    </div>
+    <div class="toolFormBody">
+        <table class="grid">
+            %if prev or next:
+                <tr>
+                    <td>
+                        %if prev:
+                            <a class="action-button" href="${h.url_for( controller='repository', action='view_changeset', id=trans.security.encode_id( repository.id ), ctx_str=ctx_parent )}">Previous changeset ${prev}</a>
+                        %endif
+                        %if next:
+                            <a class="action-button" href="${h.url_for( controller='repository', action='view_changeset', id=trans.security.encode_id( repository.id ), ctx_str=ctx_child )}">Next changeset ${next}</a>
+                        %endif
+                    </td>
+                </tr>
+            %endif
+            <tr>
+                <td class="preserve-text-breaks">
+                    <b>Commit message:</b>
+                    <br/>${ util.unicodify( ctx.description() ) | h}<br/>
+                </td>
+            </tr>
+            %if modified:
+                <tr>
+                    <td>
+                        <b>modified:</b>
+                        %for item in modified:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    </td>
+                </tr>
+            %endif
+            %if added:
+                <tr>
+                    <td>
+                        <b>added:</b>
+                        %for item in added:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    </td>
+                </tr>
+            %endif
+            %if removed:
+                <tr>
+                    <td>
+                        <b>removed:</b>
+                        %for item in removed:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    </td>
+                </tr>
+            %endif
+            %if deleted:
+                <tr>
+                    <td>
+                        <b>deleted:</b>
+                        %for item in deleted:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    </td>
+                </tr>
+            %endif
+            %if unknown:
+                <tr>
+                    <td>
+                        <b>unknown:</b>
+                        %for item in unknown:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    }</td>
+                </tr>
+            %endif
+            %if ignored:
+                <tr>
+                    <td>
+                        <b>ignored:</b>
+                        %for item in ignored:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    </td>
+                </tr>
+            %endif
+            %if clean:
+                <tr>
+                    <td>
+                        clean:
+                        %for item in clean:
+                            <br/><a href="#${ util.unicodify( item ) }">${ util.unicodify( item ) | h}</a>
+                        %endfor
+                    </td>
+                </tr>
+            %endif
+            %for diff in diffs:
+                <%
+                    # Read at most the first 10 lines of diff to determine the anchor
+                    ctr = 0
+                    lines = diff.split( '\n' )
+                    anchor_str = ''
+                    for line in lines:
+                        if ctr > 9:
+                            break
+                        for anchor in anchors:
+                            if line.find( anchor ) >= 0:
+                                anchor_str = '<a name="%s">%s</a>' % ( anchor, anchor )
+                                break
+                        ctr += 1
+                %>
+                <tr><td bgcolor="#E0E0E0">${ util.unicodify( anchor_str ) }</td></tr>
+                <tr><td>${ util.unicodify( diff ) }</td></tr>
+            %endfor
+        </table>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository/view_repository.mako b/templates/webapps/tool_shed/repository/view_repository.mako
new file mode 100644
index 0000000..496c877
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/view_repository.mako
@@ -0,0 +1,277 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="*" />
+
+<%
+    from galaxy.web.framework.helpers import time_ago
+    from tool_shed.util.basic_util import to_html_string
+
+    is_new = repository.is_new( trans.app )
+    is_deprecated = repository.deprecated
+
+    can_browse_contents = trans.webapp.name == 'tool_shed' and not is_new
+    can_push = not is_deprecated and trans.app.security_agent.can_push( trans.app, trans.user, repository )
+    can_download = not is_deprecated and not is_new and ( not is_malicious or can_push )
+    can_view_change_log = trans.webapp.name == 'tool_shed' and not is_new
+    changeset_revision_is_repository_tip = changeset_revision == repository.tip( trans.app )
+
+    if changeset_revision_is_repository_tip:
+        tip_str = 'repository tip'
+        sharable_link_label = 'Link to this repository:'
+        sharable_link_changeset_revision = None
+    else:
+        tip_str = ''
+        sharable_link_label = 'Link to this repository revision:'
+        sharable_link_changeset_revision = changeset_revision
+
+    if heads:
+        multiple_heads = len( heads ) > 1
+    else:
+        multiple_heads = False
+
+    if repository_metadata is None:
+        revision_installable = False
+    else:
+        if repository_metadata.downloadable is None:
+            revision_installable = 'unknown'
+        else:
+            revision_installable = repository_metadata.downloadable
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${parent.stylesheets()}
+    ${h.css('base','library','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+    ${container_javascripts()}
+</%def>
+
+%if trans.webapp.name == 'tool_shed':
+    ${render_tool_shed_repository_actions( repository=repository, metadata=metadata, changeset_revision=changeset_revision )}
+%else:
+    ${render_galaxy_repository_actions( repository=repository )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+%if repository.deprecated:
+    <div class="warningmessage">
+        This repository has been marked as deprecated, so some tool shed features may be restricted.
+    </div>
+%endif:
+%if multiple_heads:
+    ${render_multiple_heads_message( heads )}
+%endif
+%if deprecated_repository_dependency_tups:
+    ${render_deprecated_repository_dependencies_message( deprecated_repository_dependency_tups )}
+%endif
+
+%if len( changeset_revision_select_field.options ) > 1:
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository revision</div>
+        <div class="toolFormBody">
+            <form name="change_revision" id="change_revision" action="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    ${changeset_revision_select_field.get_html()} <i>${tip_str}</i>
+                    <div class="toolParamHelp" style="clear: both;">
+                        Select a revision to inspect and download versions of Galaxy utilities from this repository.
+                    </div>
+                </div>
+            </form>
+        </div>
+    </div>
+    <p/>
+%endif
+<div class="toolForm">
+    <div class="toolFormTitle">Repository <b>${repository.name | h}</b></div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <b>Name:</b>\
+            %if can_browse_contents:
+                <a title="Browse the contents of this repository" href="${h.url_for( controller='repository', action='browse_repository', id=trans.app.security.encode_id( repository.id ) )}">${repository.name}</a>
+            %else:
+                ${repository.name | h}
+            %endif
+        </div>
+        <div class="form-row">
+            <b>Owner:</b>
+            <a title="See all repositories owned by ${ repository.user.username | h }" href="${h.url_for( controller='repository', action='browse_repositories_by_user', user_id=trans.app.security.encode_id( repository.user.id ) )}">${ repository.user.username | h }</a>
+        </div>
+        <div class="form-row">
+            <b>Synopsis:</b>
+            ${repository.description | h}
+        </div>
+        %if repository.long_description:
+            <div class="form-row">
+                ${render_long_description( to_html_string( repository.long_description ) )}
+            </div>
+        %endif
+        %if repository.homepage_url:
+        <div class="form-row">
+            <b>Content homepage:</b>
+            <a href="${repository.homepage_url | h}" target="_blank">${repository.homepage_url | h}</a>
+        </div>
+        %endif
+        %if repository.remote_repository_url:
+        <div class="form-row">
+            <b>Development repository:</b>
+            <a href="${repository.remote_repository_url | h}" target="_blank">${repository.remote_repository_url | h}</a>
+        </div>
+        %endif
+        <div class="form-row">
+            <b>${sharable_link_label}</b>
+            <a href="${ repository.share_url }" target="_blank">${ repository.share_url }</a>
+            <button title="to clipboard" class="btn btn-default btn-xs" id="share_clipboard"><span class="fa fa-clipboard"></span></button>
+        </div>
+        %if can_download or can_push:
+            <div class="form-row">
+                <b>Clone this repository:</b>
+                <code>hg clone <a title="Show in mercurial browser" href="${ repository.clone_url }">${ repository.clone_url }</a></code>
+                <button title="to clipboard" class="btn btn-default btn-xs" id="clone_clipboard"><span class="fa fa-clipboard"></span></button>
+            </div>
+        %endif
+        <div class="form-row">
+            <b>Type:</b>
+            ${repository.type | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <b>Revision:</b>
+            %if can_view_change_log:
+                <a title="See the revision history" href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${revision_label}</a>
+            %else:
+                ${revision_label}
+            %endif
+        </div>
+        <div class="form-row">
+            <b>This revision can be installed:</b>
+            ${revision_installable}
+        </div>
+        <div class="form-row">
+            <b>Times cloned / installed:</b>
+            ${repository.times_downloaded}
+        </div>
+        %if trans.user_is_admin():
+            <div class="form-row">
+                <b>Location:</b>
+                ${repository.repo_path( trans.app ) | h}
+            </div>
+            <div class="form-row">
+                <b>Deleted:</b>
+                ${repository.deleted}
+            </div>
+        %endif
+    </div>
+</div>
+${render_repository_items( metadata, containers_dict, can_set_metadata=False, render_repository_actions_for='tool_shed' )}
+%if repository.categories:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Categories</div>
+        <div class="toolFormBody">
+            %for rca in repository.categories:
+                <div class="form-row">
+                    <a href="${h.url_for( controller='repository', action='browse_repositories_in_category', id=trans.security.encode_id( rca.category.id ) )}">${rca.category.name | h}</a> - ${rca.category.description | h}
+                </div>
+            %endfor
+            <div style="clear: both"></div>
+        </div>
+    </div>
+%endif
+%if trans.webapp.name == 'tool_shed' and trans.user and trans.app.config.smtp_server:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Notification on update</div>
+        <div class="toolFormBody">
+            <form name="receive_email_alerts" id="receive_email_alerts" action="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+                <div class="form-row">
+                    <label>Receive email alerts:</label>
+                    ${alerts_check_box.get_html()}
+                    <div class="toolParamHelp" style="clear: both;">
+                        Check the box and click <b>Save</b> to receive email alerts when updates to this repository occur.
+                    </div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="receive_email_alerts_button" value="Save"/>
+                </div>
+            </form>
+        </div>
+    </div>
+%endif
+%if repository.ratings:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">Rating</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Times Rated:</label>
+                ${num_ratings}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Average Rating:</label>
+                ${render_star_rating( 'avg_rating', avg_rating, disabled=True )}
+                <div style="clear: both"></div>
+            </div>
+        </div>
+    </div>
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormBody">
+            %if display_reviews:
+                <div class="form-row">
+                    <a href="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ), display_reviews=False )}"><label>Hide Reviews</label></a>
+                </div>
+                <div style="clear: both"></div>
+                <div class="form-row">
+                    <table class="grid">
+                        <thead>
+                            <tr>
+                                <th>Rating</th>
+                                <th>Comments</th>
+                                <th>Reviewed</th>
+                                <th>User</th>
+                            </tr>
+                        </thead>
+                        <% count = 0 %>
+                        %for review in repository.ratings:
+                            <%
+                                count += 1
+                                name = 'rating%d' % count
+                            %>
+                            <tr>
+                                <td>${render_star_rating( name, review.rating, disabled=True )}</td>
+                                <td>${render_review_comment( to_html_string( review.comment ) )}</td>
+                                <td>${time_ago( review.update_time )}</td>
+                                <td>${review.user.username}</td>
+                            </tr>
+                        %endfor
+                    </table>
+                </div>
+                <div style="clear: both"></div>
+            %else:
+                <div class="form-row">
+                    <a href="${h.url_for( controller='repository', action='view_repository', id=trans.security.encode_id( repository.id ), display_reviews=True )}"><label>Display Reviews</label></a>
+                </div>
+                <div style="clear: both"></div>
+            %endif
+        </div>
+    </div>
+%endif
+<p/>
diff --git a/templates/webapps/tool_shed/repository/view_tool_metadata.mako b/templates/webapps/tool_shed/repository/view_tool_metadata.mako
new file mode 100644
index 0000000..a84993a
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/view_tool_metadata.mako
@@ -0,0 +1,285 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="*" />
+
+<%
+    is_new = repository.is_new( trans.app )
+
+    can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
+    can_download = not is_new and ( not is_malicious or can_push )
+    can_view_change_log = trans.webapp.name == 'tool_shed' and not is_new
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+%if render_repository_actions_for == 'tool_shed':
+    ${render_tool_shed_repository_actions( repository=repository, metadata=metadata, changeset_revision=changeset_revision )}
+%else:
+    ${render_galaxy_repository_actions( repository=repository )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Repository revision</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Revision:</label>
+            %if can_view_change_log:
+                <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${revision_label}</a>
+            %else:
+                ${revision_label}
+            %endif
+        </div>
+    </div>
+</div>
+<p/>
+%if can_download:
+    <div class="toolForm">
+        <div class="toolFormTitle">Repository '${repository.name | h}'</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <label>Clone this repository:</label>
+                ${render_clone_str( repository )}
+            </div>
+        </div>
+    </div>
+%else:
+    <b>Repository name:</b><br/>
+    ${repository.name}
+%endif
+%if tool_metadata_dict:
+    <p/>
+    <div class="toolForm">
+        <div class="toolFormTitle">${tool_metadata_dict[ 'name' ]} tool metadata</div>
+        <div class="toolFormBody">
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Miscellaneous</td></tr>
+                </table>
+            </div>
+            <div class="form-row">
+                <label>Name:</label>
+                <a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_metadata_dict[ 'tool_config' ], changeset_revision=changeset_revision )}">${tool_metadata_dict[ 'name' ]}</a>
+                <div style="clear: both"></div>
+            </div>
+            %if 'description' in tool_metadata_dict:
+                <div class="form-row">
+                    <label>Description:</label>
+                    ${tool_metadata_dict[ 'description' ] | h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'id' in tool_metadata_dict:
+                <div class="form-row">
+                    <label>Id:</label>
+                    ${tool_metadata_dict[ 'id' ] | h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'guid' in tool_metadata_dict:
+                <div class="form-row">
+                    <label>Guid:</label>
+                    ${tool_metadata_dict[ 'guid' ] | h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'version' in tool_metadata_dict:
+                <div class="form-row">
+                    <label>Version:</label>
+                    ${tool_metadata_dict[ 'version' ] | h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'version_string_cmd' in tool_metadata_dict:
+                <div class="form-row">
+                    <label>Version command string:</label>
+                    ${tool_metadata_dict[ 'version_string_cmd' ] | h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            %if 'add_to_tool_panel' in tool_metadata_dict:
+                <div class="form-row">
+                    <label>Display in tool panel:</label>
+                    ${tool_metadata_dict[ 'add_to_tool_panel' ] | h}
+                    <div style="clear: both"></div>
+                </div>
+            %endif
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Version lineage of this tool (guids ordered most recent to oldest)</td></tr>
+                </table>
+            </div>
+            <div class="form-row">
+                %if tool_lineage:
+                    <table class="grid">
+                        %for guid in tool_lineage:
+                            <tr>
+                                <td>
+                                    %if guid == tool_metadata_dict[ 'guid' ]:
+                                        ${guid | h} <b>(this tool)</b>
+                                    %else:
+                                        ${guid | h}
+                                    %endif
+                                </td>
+                            </tr>
+                        %endfor
+                    </table>
+                %else:
+                    No tool versions are defined for this tool so it is critical that you <b>Reset all repository metadata</b> from the
+                    <b>Manage repository</b> page.
+                %endif
+            </div>
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Requirements (dependencies defined in the <requirements> tag set)</td></tr>
+                </table>
+            </div>
+            <%
+                if 'requirements' in tool_metadata_dict:
+                    requirements = tool_metadata_dict[ 'requirements' ]
+                else:
+                    requirements = None
+            %>
+            %if requirements:
+                <div class="form-row">
+                    <label>Requirements:</label>
+                    <table class="grid">
+                        <tr>
+                            <td><b>name</b></td>
+                            <td><b>version</b></td>
+                            <td><b>type</b></td>
+                        </tr>
+                        %for requirement_dict in requirements:
+                            <%
+                                requirement_name = requirement_dict[ 'name' ] or 'not provided'
+                                requirement_version = requirement_dict[ 'version' ] or 'not provided'
+                                requirement_type = requirement_dict[ 'type' ] or 'not provided'
+                            %>
+                            <tr>
+                                <td>${requirement_name | h}</td>
+                                <td>${requirement_version | h}</td>
+                                <td>${requirement_type | h}</td>
+                            </tr>
+                        %endfor
+                    </table>
+                    <div style="clear: both"></div>
+                </div>
+            %else:
+                <div class="form-row">
+                    No requirements defined
+                </div>
+            %endif
+            %if tool:
+                <div class="form-row">
+                    <table width="100%">
+                        <tr bgcolor="#D8D8D8" width="100%"><td><b>Additional information about this tool</td></tr>
+                    </table>
+                </div>
+                <div class="form-row">
+                    <label>Command:</label>
+                    <pre>${tool.command | h}</pre>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Interpreter:</label>
+                    ${tool.interpreter | h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Is multi-byte:</label>
+                    ${tool.is_multi_byte | h}
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <label>Forces a history refresh:</label>
+                    ${tool.force_history_refresh | h}
+                    <div style="clear: both"></div>
+                </div>
+                <% parallelism_info = tool.parallelism %>
+                %if parallelism_info:
+                    <div class="form-row">
+                        <table width="100%">
+                            <tr bgcolor="#D8D8D8" width="100%"><td><b>Parallelism</td></tr>
+                        </table>
+                    </div>
+                    <div class="form-row">
+                        <label>Method:</label>
+                        ${parallelism_info.method | h}
+                        <div style="clear: both"></div>
+                    </div>
+                    %for key, val in parallelism_info.attributes.items():
+                        <div class="form-row">
+                            <label>${key}:</label>
+                            ${val | h}
+                            <div style="clear: both"></div>
+                        </div>
+                    %endfor
+                %endif
+            %endif
+            <div class="form-row">
+                <table width="100%">
+                    <tr bgcolor="#D8D8D8" width="100%"><td><b>Functional tests</td></tr>
+                </table>
+            </div>
+            <%
+                if 'tests' in tool_metadata_dict:
+                    tests = tool_metadata_dict[ 'tests' ]
+                else:
+                    tests = None
+            %>
+            %if tests:
+                <div class="form-row">
+                    <table class="grid">
+                        <tr>
+                            <td><b>name</b></td>
+                            <td><b>inputs</b></td>
+                            <td><b>outputs</b></td>
+                            <td><b>required files</b></td>
+                        </tr>
+                        %for test_dict in tests:
+                            <%
+                                inputs = test_dict[ 'inputs' ]
+                                outputs = test_dict[ 'outputs' ]
+                                required_files = test_dict[ 'required_files' ]
+                            %>
+                            <tr>
+                                <td>${test_dict[ 'name' ]}</td>
+                                <td>
+                                    %for input in inputs:
+                                        <b>${input[0]}:</b> ${input[1] | h}<br/>
+                                    %endfor
+                                </td>
+                                <td>
+                                    %for output in outputs:
+                                        <b>${output[0]}:</b> ${output[1] | h}<br/>
+                                    %endfor
+                                </td>
+                                <td>
+                                    %for required_file in required_files:
+                                        ${required_file | h}<br/>
+                                    %endfor
+                                </td>
+                            </tr>
+                        %endfor
+                    </table>
+                </div>
+            %else:
+                <div class="form-row">
+                    No functional tests defined
+                </div>
+            %endif
+        </div>
+    </div>
+%endif
diff --git a/templates/webapps/tool_shed/repository/view_workflow.mako b/templates/webapps/tool_shed/repository/view_workflow.mako
new file mode 100644
index 0000000..05a6f44
--- /dev/null
+++ b/templates/webapps/tool_shed/repository/view_workflow.mako
@@ -0,0 +1,43 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="*" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+
+<%inherit file="${inherit(context)}"/>
+
+<% from tool_shed.util.encoding_util import tool_shed_encode %>
+
+<%def name="render_workflow( workflow_name, repository_metadata_id )">
+    <% center_url = h.url_for( controller='repository', action='generate_workflow_image', workflow_name=tool_shed_encode( workflow_name ), repository_metadata_id=repository_metadata_id ) %>
+    <iframe name="workflow_image" id="workflow_image" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"> </iframe>
+</%def>
+
+%if render_repository_actions_for == 'tool_shed':
+    ${render_tool_shed_repository_actions( repository=repository )}
+%else:
+    ${render_galaxy_repository_actions( repository=repository )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolFormTitle">${workflow_name | h}</div>
+<div class="form-row">
+    <b>Boxes are red when tools are not available in this repository</b>
+    <div class="toolParamHelp" style="clear: both;">
+        (this page displays SVG graphics)
+    </div>
+</div>
+<br clear="left"/>
+
+${render_workflow( workflow_name, repository_metadata_id )}
diff --git a/templates/webapps/tool_shed/repository_review/browse_review.mako b/templates/webapps/tool_shed/repository_review/browse_review.mako
new file mode 100644
index 0000000..136622a
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/browse_review.mako
@@ -0,0 +1,119 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    from galaxy.web.form_builder import CheckboxField
+    from tool_shed.util.container_util import STRSEP
+    from tool_shed.util.basic_util import to_html_string
+%>
+
+<%def name="stylesheets()">
+    ${h.css('base','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository, changeset_revision=review.changeset_revision )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Review of repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Reviewer:</label>
+            ${review.user.username}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Repository revision:</label>
+            <a class="action-button" href="${h.url_for( controller='repository_review', action='view_or_manage_repository', id=trans.security.encode_id( repository.id ), changeset_revision=review.changeset_revision )}">${changeset_revision_label}</a>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Repository owner:</label>
+            ${repository.user.username | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Repository synopsis:</label>
+            ${repository.description | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            %if review.component_reviews:
+                <table class="grid">
+                    %for component_review in review.component_reviews:
+                        <%
+                            can_browse = trans.app.security_agent.user_can_browse_component_review( trans.app, repository, component_review, trans.user )
+                            component = component_review.component
+                            if can_browse:
+                                # Initialize Private check box.
+                                private_check_box_name = '%s%sprivate' % ( component.name, STRSEP )
+                                private_check_box = CheckboxField( name=private_check_box_name, checked=component_review.private )
+                                
+                                # Initialize star rating.
+                                rating_name = '%s%srating' % ( component.name, STRSEP )
+                        %>
+                        <tr>
+                            <td bgcolor="#D8D8D8"><b>${component.name | h}</b></td>
+                            <td bgcolor="#D8D8D8">${component.description | h}</td>
+                        </tr>
+                        <tr>
+                            <td colspan="2">
+                            %if can_browse:
+                                    <table class="grid">
+                                        <tr>
+                                            <td>
+                                                <label>Private:</label>
+                                                ${private_check_box.get_html( disabled=True )}
+                                                <div class="toolParamHelp" style="clear: both;">
+                                                    A private review can be accessed only by the owner of the repository and authorized repository reviewers.
+                                                </div>
+                                                <div style="clear: both"></div>
+                                            </td>
+                                        </tr>
+                                        %if component_review.comment:
+                                            <tr>
+                                                <td>
+                                                    <div overflow-wrap:normal;overflow:hidden;word-break:keep-all;word-wrap:break-word;line-break:strict;>
+                                                        ${ to_html_string( component_review.comment ) }
+                                                    </div>
+                                                </td>
+                                            </tr>
+                                        %endif
+                                        <tr>
+                                            <td>
+                                                <label>Approved:</label>
+                                                ${component_review.approved | h}
+                                                <div style="clear: both"></div>
+                                            </td>
+                                        </tr>
+                                        <tr>
+                                            <td>
+                                                <label>Rating:</label>
+                                                ${render_star_rating( rating_name, component_review.rating, disabled=True )}
+                                                <div style="clear: both"></div>
+                                            </td>
+                                        </tr>
+                                    </table>
+                                %else:
+                                    You are not authorized to access the review of this component since it has been marked private.
+                                %endif
+                            </td>
+                        </tr>
+                    %endfor
+                </table>
+            %else:
+                This review has not yet been started.
+            %endif
+        </div>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository_review/create_component.mako b/templates/webapps/tool_shed/repository_review/create_component.mako
new file mode 100644
index 0000000..2639989
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/create_component.mako
@@ -0,0 +1,34 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Create Component</div>
+    <div class="toolFormBody">
+        <form name="create_component" id="create_component" action="${h.url_for( controller='repository_review', action='create_component' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <input name="name" type="textfield" value="${name | h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <input name="description" type="textfield" value="${description | h}" size=40"/>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="create_component_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository_review/edit_component.mako b/templates/webapps/tool_shed/repository_review/edit_component.mako
new file mode 100644
index 0000000..483f402
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/edit_component.mako
@@ -0,0 +1,37 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Change component description</div>
+    <div class="toolFormBody">
+        <form name="edit_component" action="${h.url_for( controller='repository_review', action='edit_component' )}" method="post" >
+            <div class="form-row">
+                <label>Name:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    ${component.name | h}
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Description:</label>
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input  name="description" type="textfield" value="${component.description | h}" size=40"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; width: 250px; margin-right: 10px;">
+                    <input type="hidden" name="id" value="${trans.security.encode_id( component.id )}"/>
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="edit_component_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository_review/edit_review.mako b/templates/webapps/tool_shed/repository_review/edit_review.mako
new file mode 100644
index 0000000..68b1e06
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/edit_review.mako
@@ -0,0 +1,163 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    from galaxy.web.form_builder import CheckboxField
+    from tool_shed.grids.util import build_approved_select_field
+    from tool_shed.util.container_util import STRSEP
+%>
+
+<%def name="stylesheets()">
+    ${h.css('base','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository, changeset_revision=review.changeset_revision )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">My review of repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <form name="edit_review" action="${h.url_for( controller='repository_review', action='edit_review', id=trans.security.encode_id( review.id ) )}" method="post" >
+            <div class="form-row">
+                <label>Repository revision:</label>
+                <a class="action-button" href="${h.url_for( controller='repository_review', action='view_or_manage_repository', id=trans.security.encode_id( repository.id ), changeset_revision=review.changeset_revision )}">${changeset_revision_label}</a>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Repository owner:</label>
+                ${repository.user.username | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Repository synopsis:</label>
+                ${repository.description | h}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Approve this repository revision?</label>
+                ${revision_approved_select_field.get_html()}
+                <div class="toolParamHelp" style="clear: both;">
+                    Individual components below may be approved without approving the repository revision.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <input type="submit" name="revision_approved_button" value="Save"/>
+                <div class="toolParamHelp" style="clear: both;">
+                    All changes made on this page will be saved when any <b>Save</b> button is clicked.
+                </div>
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <table class="grid">
+                    %for component_name, component_review_dict in components_dict.items():
+                        <%
+                            component = component_review_dict[ 'component' ]
+                            encoded_component_id = trans.security.encode_id( component.id )
+                            
+                            component_review = component_review_dict[ 'component_review' ]
+                            if component_review:
+                                comment = component_review.comment or ''
+                                rating = component_review.rating
+                                approved_select_field_selected_value = component_review.approved
+                                private = component_review.private
+                            else:
+                                comment = ''
+                                rating = 0
+                                approved_select_field_selected_value = None
+                                private = False
+                            
+                            # Initialize Approved select field.
+                            approved_select_field_name = '%s%sapproved' % ( component_name, STRSEP )
+                            approved_select_field = build_approved_select_field( trans, name=approved_select_field_name, selected_value=approved_select_field_selected_value, for_component=True )
+                            
+                            # Initialize Private check box.
+                            private_check_box_name = '%s%sprivate' % ( component_name, STRSEP )
+                            private_check_box = CheckboxField( name=private_check_box_name, checked=private )
+                            
+                            # Initialize star rating.
+                            rating_name = '%s%srating' % ( component_name, STRSEP )
+                            
+                            # Initialize comment text area.
+                            comment_name = '%s%scomment' % ( component_name, STRSEP )
+                            
+                            # Initialize the component id form field name.
+                            component_id_name = '%s%scomponent_id' % ( component_name, STRSEP )
+                            
+                            # Initialize the Save button.
+                            review_button_name = '%s%sreview_button' % ( component_name, STRSEP )
+                        %>
+                        <tr>
+                            <td bgcolor="#D8D8D8"><b>${component.name | h}</b></td>
+                            <td bgcolor="#D8D8D8">${component.description | h}</td>
+                        </tr>
+                        <tr>
+                            <td colspan="2">
+                                <table class="grid">
+                                    <tr>
+                                        <td>
+                                            <label>Mark private:</label>
+                                            ${private_check_box.get_html()}
+                                            <div class="toolParamHelp" style="clear: both;">
+                                                A private review can be accessed only by the owner of the repository and authorized repository reviewers.
+                                            </div>
+                                            <div style="clear: both"></div>
+                                        </td>
+                                    </tr>
+                                    <tr>
+                                        <td>
+                                            <label>Comments:</label>
+                                            %if component_review:
+                                                <pre><textarea name="${comment_name}" rows="3" cols="80">${comment | h}</textarea></pre>
+                                            %else:
+                                                <textarea name="${comment_name}" rows="3" cols="80"></textarea>
+                                            %endif
+                                            <div style="clear: both"></div>
+                                        </td>
+                                    </tr>
+                                    <tr>
+                                        <td>
+                                            <label>Approved:</label>
+                                            ${approved_select_field.get_html()}
+                                            <div style="clear: both"></div>
+                                        </td>
+                                    </tr>
+                                    <tr>
+                                        <td>
+                                            <label>Rating:</label>
+                                            ${render_star_rating( rating_name, rating )}
+                                            <div style="clear: both"></div>
+                                            <div class="toolParamHelp" style="clear: both;">
+                                                Rate this component only - the average of all component ratings defines the value of the repository rating.
+                                            </div>
+                                        </td>
+                                    </tr>
+                                    <tr>
+                                        <td>
+                                            <input type="hidden" name="${component_id_name}" value="${encoded_component_id}"/>
+                                            <input type="submit" name="${review_button_name}" value="Save"/>
+                                            <div style="clear: both"></div>
+                                            <div class="toolParamHelp" style="clear: both;">
+                                                All changes made on this page will be saved when any <b>Save</b> button is clicked.
+                                            </div>
+                                        </td>
+                                    </tr>
+                                </table>
+                            </td>
+                        </tr>
+                    %endfor
+                </table>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository_review/grid.mako b/templates/webapps/tool_shed/repository_review/grid.mako
new file mode 100644
index 0000000..6a41b68
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/grid.mako
@@ -0,0 +1 @@
+<%inherit file="/grid_base.mako"/>
diff --git a/templates/webapps/tool_shed/repository_review/reviews_of_changeset_revision.mako b/templates/webapps/tool_shed/repository_review/reviews_of_changeset_revision.mako
new file mode 100644
index 0000000..000dec4
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/reviews_of_changeset_revision.mako
@@ -0,0 +1,113 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    from tool_shed.grids.util import build_approved_select_field
+    from tool_shed.util.container_util import STRSEP
+
+    if installable:
+        installable_str = 'yes'
+    else:
+        installable_str = 'no'
+    can_review_repositories = trans.app.security_agent.user_can_review_repositories( trans.user )
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${h.css('base','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+    ${common_javascripts(repository)}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository, changeset_revision=changeset_revision )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Revision reviews of repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Revision:</label>
+            <a class="action-button" href="${h.url_for( controller='repository_review', action='view_or_manage_repository', id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">${changeset_revision_label}</a>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <label>Revision is installable:</label>
+            ${installable_str | h}
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            %if reviews:
+                <table class="grid">
+                    <tr>
+                        <th>Reviewer</th>
+                        <th>Repository rating</th>
+                        <th>Approved</th>
+                        <th></th>
+                    </tr>
+                    %for review in reviews:
+                        <%
+                            encoded_review_id = trans.security.encode_id( review.id )
+                            approved_select_field_name = '%s%sapproved' % ( encoded_review_id, STRSEP )
+                            approved_select_field_selected_value = review.approved
+                            approved_select_field = build_approved_select_field( trans, name=approved_select_field_name, selected_value=approved_select_field_selected_value, for_component=False )
+                            if review.approved not in [ None, 'None', 'none' ]:
+                                approved_str = review.approved
+                            else:
+                                approved_str = ''
+                            repository_rating_name = '%srepository_rating' % encoded_review_id
+                        %>
+                        <tr>
+                            <td>
+                                <div style="float:left;" class="menubutton split popup" id="${encoded_review_id}-popup">
+                                    <a class="view-info" href="${h.url_for( controller='repository_review', action='repository_reviews_by_user', id=trans.security.encode_id( review.user.id ) )}">${review.user.username | h}</a>
+                                </div>
+                                <div popupmenu="${encoded_review_id}-popup">
+                                    %if review.user == trans.user:
+                                        <a class="action-button" href="${h.url_for( controller='repository_review', action='edit_review', id=encoded_review_id )}">Edit my review</a>
+                                    %else:
+                                        <a class="action-button" href="${h.url_for( controller='repository_review', action='browse_review', id=encoded_review_id )}">Browse this review</a>
+                                    %endif
+                                </div>
+                            </td>
+                            <td>${render_star_rating( repository_rating_name, review.rating, disabled=True )}</td>
+                            %if review.user == trans.user:
+                                <form name="approve_repository_review" action="${h.url_for( controller='repository_review', action='approve_repository_review', id=encoded_review_id ) }" method="post" >
+                                    <td>${approved_select_field.get_html()}</td>
+                                    <td><input type="submit" name="approve_repository_review_button" value="Save"/></td>
+                                </form>
+                            %else:
+                                <td>${approved_str | h}</td>
+                                <td></td>
+                            %endif
+                        </tr>
+                    %endfor
+                </table>
+            %else:
+                <label>This repository revision has not yet been reviewed:</label>
+                %if can_review_repositories:
+                    <a class="action-button" href="${h.url_for( controller='repository_review', action='create_review', id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Add a review to this revision</a>
+                    <div style="clear: both"></div>
+                %endif
+            %endif
+        </div>
+        <div style="clear: both"></div>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository_review/reviews_of_repository.mako b/templates/webapps/tool_shed/repository_review/reviews_of_repository.mako
new file mode 100644
index 0000000..993e8bc
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/reviews_of_repository.mako
@@ -0,0 +1,88 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%
+    if mine:
+        title = "My reviews of repository '%s'" % repository.name
+    else:
+        title = "All reviews of repository '%s'" % repository.name
+%>
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+    ${common_javascripts(repository)}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">${title | h}</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <table class="grid">
+                <tr>
+                    <th>Revision</th>
+                    <th>Reviewers</th>
+                    <th>Installable</th>
+                </tr>
+                %for changeset_revision, revision_dict in reviews_dict.items():
+                    <%
+                        changeset_revision_label = revision_dict[ 'changeset_revision_label' ]
+                        repository_reviews = revision_dict[ 'repository_reviews' ]
+                        repository_metadata_reviews = revision_dict[ 'repository_metadata_reviews' ]
+                        reviewers_str = ''
+                        if repository_reviews:
+                            for repository_review in repository_reviews:
+                                reviewers_str += '<a class="view-info" href="'
+                                if repository_review.user == trans.user:
+                                    reviewers_str += 'edit_review'
+                                else:
+                                    reviewers_str += 'browse_review'
+                                reviewers_str += '?id=%s">%s</a>' % ( trans.security.encode_id( repository_review.id ), repository_review.user.username )
+                                reviewers_str += ' | '
+                            reviewers_str = reviewers_str.rstrip( '| ' )
+                        if revision_dict[ 'installable' ]:
+                            installable_str = 'yes'
+                        else:
+                            installable_str = ''
+                        can_add_review = revision_dict[ 'can_add_review' ]
+                    %>
+                    <tr>
+                        <td>
+                            <div style="float:left;" class="menubutton split popup" id="${changeset_revision}-popup">
+                                <a class="view-info" href="${h.url_for( controller='repository_review', action='view_or_manage_repository', id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">${changeset_revision_label}</a>
+                            </div>
+                            <div popupmenu="${changeset_revision}-popup">
+                                %if repository_reviews:
+                                    <a class="action-button" href="${h.url_for( controller='repository_review', action='manage_repository_reviews_of_revision', id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Browse reviews of this revision</a>
+                                %elif can_add_review:
+                                    <a class="action-button" href="${h.url_for( controller='repository_review', action='create_review', id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">Add a review to this revision</a>
+                                %endif
+                            </div>
+                        </td>
+                        <td>${reviewers_str}</td>
+                        <td>${installable_str | h}</td>
+                    </tr>
+                %endfor
+            </table>
+        </div>
+        <div style="clear: both"></div>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/repository_review/select_previous_review.mako b/templates/webapps/tool_shed/repository_review/select_previous_review.mako
new file mode 100644
index 0000000..8ccb9d1
--- /dev/null
+++ b/templates/webapps/tool_shed/repository_review/select_previous_review.mako
@@ -0,0 +1,90 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%!
+   def inherit(context):
+       if context.get('use_panels'):
+           return '/webapps/tool_shed/base_panels.mako'
+       else:
+           return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="stylesheets()">
+    ${h.css('base','jquery.rating')}
+</%def>
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    ${h.js( "libs/jquery/jquery.rating" )}
+    ${common_javascripts(repository)}
+</%def>
+
+${render_tool_shed_repository_actions( repository=repository )}
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    You have elected to create a new review for revision <b>${changeset_revision_label}</b>of this repository.  Since previous revisions have been reviewed, 
+    you can select a previous review to copy to your new review, or click the <b>Create a review without copying</b> button.
+</div>
+                        
+<div class="toolForm">
+    <div class="toolFormTitle">Select previous revision review of repository '${repository.name | h}'</div>
+    <div class="toolFormBody">
+        <div class="form-row">
+            <label>Revision for new review:</label>
+            <a class="action-button" href="${h.url_for( controller='repository_review', action='view_or_manage_repository', id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision )}">${changeset_revision_label}</a>
+            <div style="clear: both"></div>
+        </div>
+        <div class="form-row">
+            <table class="grid">
+                <tr>
+                </tr>
+                    <td bgcolor="#D8D8D8" colspan="4"><b>Previous revision reviews of repository '${repository.name | h}' that can be copied to your new review</b></td>
+                <tr>
+                    <th>Reviewer</th>
+                    <th>Revision reviewed</th>
+                    <th>Repository rating</th>
+                    <th>Approved</th>
+                </tr>
+                %for previous_changeset_revision, previous_changeset_revision_dict in previous_reviews_dict.items():
+                    <%
+                        previous_changeset_revision_label = previous_changeset_revision_dict[ 'changeset_revision_label' ]
+                        previous_reviews = previous_changeset_revision_dict[ 'reviews' ]
+                    %>
+                    %for review in previous_reviews:
+                        <%
+                            encoded_review_id = trans.security.encode_id( review.id )
+                            if review.approved not in [ None, 'None', 'none' ]:
+                                approved_str = review.approved
+                            else:
+                                approved_str = ''
+                            repository_rating_name = '%srepository_rating' % encoded_review_id
+                        %>
+                        <tr>
+                            <td>
+                                <div style="float:left;" class="menubutton split popup" id="${encoded_review_id}-popup">
+                                    <a class="view-info" href="${h.url_for( controller='repository_review', action='browse_review', id=encoded_review_id )}">${review.user.username | h}</a>
+                                </div>
+                                <div popupmenu="${encoded_review_id}-popup">
+                                    <a class="action-button" href="${h.url_for( controller='repository_review', action='create_review', id=trans.security.encode_id( repository.id ), changeset_revision=changeset_revision, previous_review_id=encoded_review_id )}">Copy this review</a>
+                                </div>
+                            </td>
+                            <td>${previous_changeset_revision_label}</td>
+                            <td>${render_star_rating( repository_rating_name, review.rating, disabled=True )}</td>
+                            <td>${approved_str | h}</td>
+                        </tr>
+                    %endfor
+                %endfor
+            </table>
+        </div>
+        <div style="clear: both"></div>
+        <a class="action-button" href="${h.url_for( controller='repository_review', action='create_review', id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, create_without_copying=True )}">Create a review without copying</a>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/role/role.mako b/templates/webapps/tool_shed/role/role.mako
new file mode 100644
index 0000000..a6d5b40
--- /dev/null
+++ b/templates/webapps/tool_shed/role/role.mako
@@ -0,0 +1,136 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" />
+
+<%def name="javascripts()">
+    ${parent.javascripts()}
+    <script type="text/javascript">
+        $(function(){
+            $("input:text:first").focus();
+        })
+    </script>
+</%def>
+
+<%def name="render_select( name, options )">
+    <select name="${name}" id="${name}" style="min-width: 250px; height: 150px;" multiple>
+        %for option in options:
+            <option value="${option[0]}">${option[1]}</option>
+        %endfor
+    </select>
+</%def>
+
+<script type="text/javascript">
+$().ready(function() {
+    $('#repositories_add_button').click(function() {
+        return !$('#out_repositories option:selected').remove().appendTo('#in_repositories');
+    });
+    $('#repositories_remove_button').click(function() {
+        return !$('#in_repositories option:selected').remove().appendTo('#out_repositories');
+    });
+    $('#users_add_button').click(function() {
+        return !$('#out_users option:selected').remove().appendTo('#in_users');
+    });
+    $('#users_remove_button').click(function() {
+        return !$('#in_users option:selected').remove().appendTo('#out_users');
+    });
+    $('#groups_add_button').click(function() {
+        return !$('#out_groups option:selected').remove().appendTo('#in_groups');
+    });
+    $('#groups_remove_button').click(function() {
+        return !$('#in_groups option:selected').remove().appendTo('#out_groups');
+    });
+    $('form#manage_role_associations').submit(function() {
+        $('#in_repositories option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_users option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+        $('#in_groups option').each(function(i) {
+            $(this).attr("selected", "selected");
+        });
+    });
+});
+</script>
+
+<%
+    if trans.user_is_admin() and in_admin_controller:
+        render_for_admin = True
+    else:
+        render_for_admin = False
+%>
+
+%if not render_for_admin:
+    ${render_tool_shed_repository_actions( repository, metadata=metadata, changeset_revision=changeset_revision )}
+%endif
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+    <b>${role.name}</b> is the administrator role for the repository <b>${repository.name}</b> owned by 
+    <b>${repository.user.username}</b>.  ${role.description}
+</div>
+
+<div class="toolForm">
+    <div class="toolFormTitle">Manage users and groups associated with role <b>${role.name}</b></div>
+    <div class="toolFormBody">
+        % if not render_for_admin:
+            <div class="form-row">
+                <label>Repository name:</label>
+                ${repository.name}
+                <div style="clear: both"></div>
+            </div>
+            <div class="form-row">
+                <label>Repository owner:</label>
+                ${repository.user.username}
+                <div style="clear: both"></div>
+            </div>
+        %endif
+        <%
+            if render_for_admin:
+                controller_module = 'admin'
+                controller_method = 'manage_role_associations'
+                id_param = trans.security.encode_id( role.id )
+            else:
+                controller_module = 'repository'
+                controller_method = 'manage_repository_admins'
+                id_param = trans.security.encode_id( repository.id )
+        %>
+        <form name="manage_role_associations" id="manage_role_associations" action="${h.url_for( controller=controller_module, action=controller_method, id=id_param )}" method="post" >
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Users associated with '${role.name}'</label>
+                    ${render_select( "in_users", in_users )}<br/>
+                    <input type="submit" id="users_remove_button" value=">>"/>
+                    <div style="clear: both"></div>
+                </div>
+                <div>
+                    <label>Users not associated with '${role.name}'</label>
+                    ${render_select( "out_users", out_users )}<br/>
+                    <input type="submit" id="users_add_button" value="<<"/>
+                    <div style="clear: both"></div>
+                </div>
+            </div>
+            <div class="form-row">
+                <div style="float: left; margin-right: 10px;">
+                    <label>Groups associated with '${role.name}'</label>
+                    ${render_select( "in_groups", in_groups )}<br/>
+                    <input type="submit" id="groups_remove_button" value=">>"/>
+                    <div style="clear: both"></div>
+                </div>
+                <div>
+                    <label>Groups not associated with '${role.name}'</label>
+                    ${render_select( "out_groups", out_groups )}<br/>
+                    <input type="submit" id="groups_add_button" value="<<"/>
+                    <div style="clear: both"></div>
+                </div>
+            </div>
+            <div style="clear: both"></div>
+            <div class="form-row">
+                <input type="submit" name="manage_role_associations_button" value="Save"/>
+            </div>
+        </form>
+    </div>
+</div>
diff --git a/templates/webapps/tool_shed/user/manage_email_alerts.mako b/templates/webapps/tool_shed/user/manage_email_alerts.mako
new file mode 100644
index 0000000..ac75e62
--- /dev/null
+++ b/templates/webapps/tool_shed/user/manage_email_alerts.mako
@@ -0,0 +1,54 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+<br/><br/>
+<ul class="manage-table-actions">
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='repository', action='multi_select_email_alerts' )}">Manage repository alerts</a>
+    </li>
+    <li>
+        <a class="action-button"  href="${h.url_for( controller='user', action='index', cntrller='repository' )}">User preferences</a>
+    </li>
+</ul>
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+<div class="toolForm">
+    <div class="toolFormTitle">Email alerts for new repositories</div>
+    <form name="new_repo_alert" id="new_repo_alert" action="${h.url_for( controller='repository', action='manage_email_alerts' )}" method="post" >
+        <div class="form-row">
+            <label>New repository alert:</label>
+            ${new_repo_alert_check_box.get_html()}
+            <div class="toolParamHelp" style="clear: both;">
+                Check the box and click <b>Save</b> to receive email when the first change set is created for a new repository.
+            </div>
+        </div>
+        <div class="form-row">
+            <input type="submit" name="new_repo_alert_button" value="Save"/>
+        </div>
+    </form>
+</div>
+<p/>
+%if email_alert_repositories:
+    <div class="toolForm">
+        <div class="toolFormTitle">You are registered to receive email alerts for changes to the following repositories</div>
+        <div class="form-row">
+            <table class="grid">
+                <tr>
+                    <th>Name</th>
+                    <th>Description</th>
+                </tr>
+                %for repository in email_alert_repositories:
+                    <tr>
+                        <td>${repository.name | h}</td>
+                        <td>${repository.description | h}</td>
+                    </tr>
+                %endfor
+            </table>
+        </div>
+    </div>
+    <p/>
+%endif
+
diff --git a/templates/webapps/tool_shed/user/manage_info.mako b/templates/webapps/tool_shed/user/manage_info.mako
new file mode 100644
index 0000000..a124287
--- /dev/null
+++ b/templates/webapps/tool_shed/user/manage_info.mako
@@ -0,0 +1,9 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/user/info.mako" import="render_user_info" />
+<%namespace file="/message.mako" import="render_msg" />
+
+%if message:
+    ${render_msg( message, status )}
+%endif
+
+${render_user_info()}
diff --git a/test-data/1.RData b/test-data/1.RData
new file mode 100644
index 0000000..8ce4600
Binary files /dev/null and b/test-data/1.RData differ
diff --git a/test-data/1.axt b/test-data/1.axt
new file mode 100644
index 0000000..046d579
--- /dev/null
+++ b/test-data/1.axt
@@ -0,0 +1,85 @@
+##matrix=axtChain 16 91,-114,-31,-123,-114,100,-125,-31,-31,-125,100,-114,-123,-31,-114,91
+##matrix=axtChain 16 91,-114,-31,-123,-114,100,-125,-31,-31,-125,100,-114,-123,-31,-114,91
+0 chrX 3022176 3023241 chrX 132722808 132723933 - 67671
+tcttcatgtgacttgcttgtgccttttaggacctgctcaagcctgatcacatatgtacaacttccagttgataatggattgttgctgtgcacaagctactttttagcttcattggttagataatactcagctcatgagatgcaggtcagattacatggcaatttggtagctcattgtcaaatgtttagtttctactgagacctaatatcaggccaagaactgtcactcaaatagagaatcgctatctgcaaaaatcatttgtccaaagtacctagggtttc----atctgtaatacatgtgtaggggcctgctaaatgctccaagcagcatccccatctgccactgtcacctc---atgtgcc--------------------------tagtcatagaaaaacatgcacagcagcctggacctgttaaagaaacttctcctatttctgtacccacacaaagctagcttctcttcaagtcacttggtatatg [...]
+tcttcatgtgacttgcttgtgccttttaggacctgctcaggcctgatgacacacatataacttccagttgataatgcattactgctgtgcacaagctatcttttggcttcattggctagataatactcagctcattaggtgcagttcagatcacatggtaatttggtggctaattgtcaaatgtttattttctactgagccccaatatcagtccaagaactgtcactcaaatagaggattgttgtctgcaaaa-tcatttgtccaaagtccttcgtgtttctttcctctgtaatttatacataggggccttctaaatgctc-aagcagcatccctatctgccactgtcaccaccacaagtgccctccagtctgctggatcataaaatcctagtcatagaacaacatgcacagcagcctggacctgttgaagaactttctccaattcctgtactcacacgaagctagccactccctaagtcacttggtatatg [...]
+
+1 chrX 3025132 3025662 chrX 132731310 132731841 - 23012
+ggagaaagggaatgggggatga-gagtgggactgggaggagaggagggaggagaggctgtaatcaggatgtaaagtgaataagcaaataaGTTAAGGAAAACAACAACAAAAAGAAACTGGATTAATTTCAATTTATTCAAGAACATTGGTTATTTCCATTAAGTACTGTAGTATTATGTATATGAATATAAAAATCCTTTCATCAAAGCAAGGAAGAGAGCACAAAGCTGCCCTTAAGTCATTGAAGACCTGGAGCACCTATGGGCCTCCCAAGAGGTTCTCACAAAACCTACAACCTCCATCCCAAGAGCCTGTGTCTGACAGAGCTGGTATGAGAAAAGGAGACAATTCTTCCC-----CAGG------------TTTTAGCCAGACTTTGAGTACACAATGGGATAGTGAGGAGCC----------------------CAGTGAAAGCAA------------TTAGTGATTTGGCCTGTGACCCTCTA [...]
+ggaggaggaggaggaggaataacaagaagaagaagaagaagaagaagaagaagaag--------aagaagaagaagaagaagaagaagaag--aagaagaagaagaagaagaagaaACAGGATTGATTCAAATTTATTCAGGAATACTAGTTATTTCGAATAAGCCCTATAGAATTATGTACATGAATCAAAAAATCCTTTCATCAAAGCTAGAAAGAGAGCACAAAACTGACCATAAGTCATTGAATACCTGGAG-----------------------------------------CTCCATCCCAAGAGCCTATGTCTGGCAGAGCTGGTGTGAGAAAAAGAGGCAATCCTTCCCTGCTGCAGGAATAATCAAGCATGTTGGCTGGACCTTGAGTACACATCAGGATAGTGAGGGGCCTGGCAAAAAGCACAGACTATGGCACTGAAAGAAATTAATAAGGGACTTAGTGATTTGGCTTATGACCCTCCA [...]
+
+2 chrX 3030008 3030565 chrX 132731842 132732393 - 30932
+CATTGTTTTTGACAAATCAGTTTCCTTTAGACTACACCTTCAAGCCACTTAAGGTTGCATTTACAACAAAAATTTATAATCTAAATATCAACAGTAATGGCAATATTTATCTTGATAATCTCAGATCACAGTGTCTCCTGCTTTAACTA-TTTCTAATGTTCATTTATCCATTTGTTCACTGCTATGTGATCCAAACCCTGATGATCTCCTAGTGCCAGAGACTGCACAGATCT--AAAACAGATAGTGATAAATACAACAGAATATCTTGGGAATGGACTCAGAAGTATGCCATGTGATGTTACCTTAAAGTCAGAATAACTTGCATTTTATAGCTGGAATAAACTTCAAATTCCTATTCCTTTTTTTGATCTCATGTTTTTCATTTT--TTTCTTTTTATTTCCCTCCTTTCATTCACATGTTTATCTAAG-AGACTTAAATTCTTTCAGCTTTAGACAATAACTACTTTTAGAATCTGCAAAGTAGTTA [...]
+CATTCTTTTTGACAATTCATTTTCTTACACACTACCCCTTCAAACCACCTATGAAA------ACAATAAAAATTTATCATCTAAATATCAACAGTAATGGCAGCATTTGTCTTGATATTCTCAGATCACAGTGTCTCTGGCTTTAACTAGTTTCTAAATTTCTTCTATCCATTTGTTCACTGCTATCTGATCCAAACCCAGATGACATCCTAGTGCGAGAGCCTGCATGGATCTCTGAAACAGACAGAGGTAAGTACAACAGAATATCTCAGGAATAGACTCAGAAGTATGCCATGTGACGCTGCCTTAAAGTCAGAATAATTTGCA--TTACAGCTGGAATGAACTTTAAACTGCTGCACC-TTTTTTGACCTCatttttttaattttaaattttGTTTATCTCCCCCCATTCATTCATGTGCTTACCTGAGAAGACTTAAGTTATTTAAGCCTTGGATAATAACTGATTTTACAAACTATAAAGTAGTTA [...]
+
+3 chrX 3030576 3032510 chrX 132743487 132745311 - 109736
+ttgtttgagacagtgtttctctgtgtagccacatctgttgtggaactcaatttgcaaatcaggctggcctcaaactcacagagatctacctgcctctgcctcttgagtgctagtattaaaggcatatgcaaccactgcctggctaaaataatatttttaaaataaCATGGTTACATGCCAAATTGACAAGGGGTGGATA------AGCCTATACtgatagataatttcacagaatctagaaacaccctggggaaaaatgtttgcacatgcctggg----gccagggagat--gttaatttctttggttatcgaaggagggagacctaatctaattgcctaa-tgaatgaaaagaagtaagtgaagtacctgagctcatctcctgttggttcctaaatgcaaaagcaatgagaccagctagcctcctgtttcttccatttctttcgccaatatcttctctgctatgatggattgtctttcttaaattgattgc [...]
+ttgtttgagacaggatttctctgtgtagtcgcatctgttctggaactcattctgtagaccaggttggcctcgaactcacagagatctggctgcctctgcctcccaagtgctaagattaaaggcttgtgccatcattgcctggctAAAATAATATTTTTAAAACAACATGTTTACATGTCAAGTTGACAAGAGGTGGACTtgtgatagcctatacttatagctaattttacagcatctagaaacaccctgaggacaaatgtttgggtgtgcaggggcagggcgagggagtgcagttaatagctttggttagcaaaggtgggagacctagtttaactgcctgactgaatgaaaataagcaagtgcggtatcagagctcatctcctgttgcttcctaaatgcagaagcattgggaccagctagcctcctatttcttccatttctgttgccc-catcttccctgctatgatggaataacttccttaaactgattgc [...]
+
+4 chrX 3032511 3033698 chrX 132745428 132746659 - 78115
+TTGTACAAAGAATGTAATTTACTGTGTATCTAAATTTTAATGACATACCAATATGACGGCAGGAAATGGATAGTTTAGAGACTTTGGAGGTTAGTTATTAATATAGATGAGTAAATAAGCATGAACCAATGTCAGTAGTGGTGATAAACAGAACGAAGACAATTGTAGATAATTTGGTCACTGAAA-GGGAAAATTACTTGTTATTAAGGGTGGACATAAATTAGAATGGGGGCAACATCAGACCTGATACAGAGGAGGACAAGACCAGGCCAGAAAAGTTAGCAAACTAATAACCATTGAAAATCAGAATCCTTGGGTTTTAAGTGGCCCACTTTATAACATACCCATTTAATTA---GAATCTCCAACAGGTCTTTCTGGTCTTATCTAGATATTTTGTCAGCAGAC-ATCCTTAGGAGACCAGTTTTGCTGTGGCTTTGTTGTGCTTGCTTCCATGACAACTAAAGACACAAATAGAATGTATTGTTTC [...]
+TTATACAAAGAATGCAATTTGCTGTGTATCTTAATTTTAATGAAGTAGCAATGTGAAGACAGTAAATGGATTGTT-AGAGATTTTGGAGTTTAGT----AATGTAGATGAGTAATTAAACATTAACTAGCATCAGTAGTAGTGACAAGCAGAAGGAAGACAACGGTCAGTAACTTGGTCATTGAAACGGTAAAATTACTTGTTATTAAAGGTGGGCATGAATTAGAATGGGGGCAACACCAGATCTGATA---AGGAGGACAAGACAAGGCCAGAAAAATTAGCAAACTAAAAACCATTGAAAATCAGAATCCCAGGGTTTTAAATGGCCCACTTTATAACATACCCATTTAATTATTAGAATCTCCAACAGGTCGTTCTGATCTTATCTAGGTACTCTGTCAGCAGACTACCCTAAGGAGATCAGTTTTGCTGCTGCTTAGTCGTGCCTGTTTCCATGACAACTAAAGACACAAATAGAACGTAC-ATTTC [...]
+
+5 chrX 3033699 3034238 chrX 132747054 132747589 - 33753
+aaaaataaCAGGTCCCCAATGACAACATTTTACGTTGTCACGAGTTTATTAAATCTCAATTATGGACATTGGCAAAGAATTATTAATTAAACAAGAAGCCTGCTTGAGAGCAAAGCACCTTTTTACTTGTATTCATCA--GGGTTTTT---TTTTTCTTTTTGTTATATAGTTCAGT---------TCATGTTTTGGTTCGTGCAGACATCATTGCAGTTAGCACCTAGAAATATTCCAACCTCTCAAACTATCTCTTCCTACCCACATTATTAAAAATCttcttgatccctggcaataatgatttgtgctccatctgtataaattcattatctatataatgtattgtaaaggaattatagacattttgaaattggatttttttcactctctgtaatgcccctgaaacttattcaattattgtttcttctttattattgctgactagtgttgcatttatgaattaacaaagttgttgaagatatttaggttg [...]
+AAAAATGGCAGTTCTTCAATGAGAACAGTTTATATCATCACAAGTTTATTGAAGCTCAATTATGGACATTAGCataatattattaattaaattaCAAGCCTGCTTAAGAGCAAAGCACCTTTTTACTTGTACTTATCACTGGGCTTTTTCGTTGTTGTTGTTGTTATATAGCTCTGTTCAATTGTATCATGTTTTGGTTCAGGCAGACATCATTGCAGTCAACACATAGATAG-----------TCAAACTATCTCTTACTACCCACATTATTAAAAATCttcttgatccctggcaataacaatttgtactcc-tctgtata----cgttatctagacaatgctatgtaaagaaattacagacattttgaaattggatttttttcactcagtgtaatactcctggaatttattcaactgttgtttcttctttgttattgctgactaatgttgcatttatgaattatcagagttgtt-aagatatttaggttg [...]
+
+6 chrX 3035181 3035230 chrX 132747590 132747639 - 3318
+gtgcataagttttcctacttctagaataaatgcctaaAGGATATGGCAAC
+gagcatacattttcctacttctagagtgaatgcGTGAAGGATGTGACAAC
+
+7 chrX 3035231 3035841 chr9 84391632 84392261 - 18947
+ACTTGtttgttttgtttattagatattttctttagttacatttcaaatgttattccctttccaagtttcctctccaaaaacactctattccttctccctttcccctgctcaagaa------------cccacccactcccacttc-ctagaactgtctgtcccctgtaccggggcttagaacaac-acaggaccaagggcctcttttcccatcgatgaccaactaggtcatcctctgctgcatatgcatgagtaggccc---------tctctgtattttctttgattggttgtttagtcccagggagcttaggggtgg-tggttcattcatattgttgttcctcctaggggaatgcaaatcctttcagcttcttgggaaatttctttagctccttcaatggggatcctgtgctccttctaatggatgactgtgagcatccacttctgtattagtcaggcactggcagagcctctcaggagacagctatatc [...]
+ATTATTTttctttctttattcattattttatttatttacattttaaatgttgcccctcttcctggtcccccctcaaagagttctttaccccttct-ccttccccttgcttctgagagagtgatccccaccacccactcctacttcacccccactagcattccccttccctgggacatcaagtttctactggattaagtgcatcctttcctactgagattagaaaaagcagccctctgctacatatgtactgg--ggcccacagactagcccatgtttgctctttggttggtgggttagcctctgggagctccgggaggtccagttagttgctactgttgttcttcctatggggttgcaatccccgtcagctccttcagtccatcccctaactcttccataggagtctctgacctcagtccgaaggctggctgtaaataactgcatctgtctcagtcatctgctggtagagcctctcagaggacagcaatgct [...]
+
+8 chrX 3036436 3036947 chrX 132751812 132752320 - 32759
+tagaaatacctaaacgtctataggaattcactgACAGAAAGGAAAGGTGATGCTCAAAGTCTCAGGGCAGTGCTACAATGCATAGGCAAGCAATACCATTTAAATGGAGAGATTTAAAAAAAATCATTTCTTCACCCATAAACCAGAACCAATTATTCAATATGGTCAAGTACCATGTTTTATTAGAGTTATTTCTGAATTAGATCATTGAACAGGATTCTAACCTAAATAACAAATTT-TATCTAAAACATTTTCTAGACTAAAAAGCATGCTTTGAGCCCTCCTTAAATAAAAAGGCCCATAGTGTATGAAACTAATCAACCATAGATCCTATATTTACTCCACCAATCCTCAGGACAGTTTATACTCTTtgtcctagttgggattttatttatgtgaacagacatcatgaccatggcaagtattataaaggacaacatttaattggggccggcttacaggttcagaggttcaggccattatcatTG--- [...]
+TAAAAATGCCTAAATTTCTATAGGGATTCACTAACAGCAAGGAAAGGGGATGCTCAAAGGCTCAAAGCAGTGCTATGATGCATAGGCAAGCAATATCATTTAAATAGA-----TTTTAAAAAATCATTTCTTCAACCATAAACCAGAACCAATTATATAATATAGCCAAGAAAGATGGTTTA---GAATTATTTCTGAATTAGATCATTGAATTGGATTCTAACCTAGATAACAAAAAAATAGCTAAAACTCTTTCTAAACAAAAAAGCAAGCTGTGAGCCCTCCTTAATTAAAAAGGGTCATAGTGTATGAAACTGATCAACCAGAGACCCTGTATTAACTCCACCAATCCTCAGAACAGTTTATGGTCCTTCtcttagttatagttttattgctatgaatagacatcatgaccatggcaagtcttataaagcacaacatttaattggtgaaggcttatgggttcagaggtccagtccattatcatTGTAT [...]
+
+9 chrX 3040802 3041403 chr5 51053187 51053738 - 12815
+ttttggttaaatggtctggg-ctgtgtattccttgcatctttttgggtgtatttacacttcttttcatgcagaatttttctttctagtttcttgtgtagtactgaattgctggatagatattacttaaatataataatatttgttttttaaatcaggaaatggtcttatttctccactgattataattgatagttt-gctaggtgttgtggtcctggctggcatt---ttttggtctttcataatg----tgtagaccatttgcccaggccctttgagctttcaaagtctccattaaaatcaggtt------ttattataatgggcctgactttatatataacttggtctttttcccttgcagatttcatatcatttctttgttctgtacatttaagtgttttcattattatgtgtcataggaaagggtgtgtgtgaacttttctagtcctgtctggggtgtgttctatatgcctcttgctccttgatattt [...]
+ttttagtaaactgttcagggattacttgtttcttgtgtcctcttgggtagagctaa--------ccagaatgaagttgtccttctagtgtcttctgtagagctgattc--------------atttaaat-tccttacatttgttttt---attatgaaatgttttcctttctccctcagttaagattgctggttttgctggccatggtagtccgggctggcattatgtcattgttgttcataacaaacttgtaaaatgtctgcc-aggccctctg-gcttttaaaatctcgattgcaagtccattggaaagctctgctaatgaccctgtctttatatgtgacttcatttttaccc-----------------atatccttgttctgtacatgtacgtgcc---------------------aaggagagttt-----cttttctggtcctgtctgtctggtgtcctgtatacctcatgtacttcgata--- [...]
+
+10 chrX 3042537 3043219 chr5 51053739 51054217 - 9161
+TCCAGATAGTCTGATGTTTTGTGTCTAGATTTTTTCCATTTACTTATTTATtttatttacttattcactttatatcccaatcacagctgcccctactcacagtctcttcctcacaatctttccctcaccccctccccttgtcctctaagaagggggaccccctgggttccaacccacactgTTGTTCCTAGATCtttaagatctaatatttccaatgtcagagatatccattccttccacgttgttccccagaccttaaatt----ctttcacttcattaaatcagttgattaggcttttctctgaggtctttgtttgacttcctgagtttttatt-ttcaagtcttatttcagtttgggtta-atttagtcattctacttctttgtcatattgtattttcatag-ttgaattgtttacatttcctcagtcaactatttttgtcttcagagttctcagtgaaacatttatt-catatcct-tttaaggtcct [...]
+tcc------ttgaatggtttttgcctagagttt--------------------------------aaatttatatt--------------------taacaagctctt--------------------------------------------------------------------------------tggatgattga---------------------------tccatttcttctactttgtcttcaggacctgatattttctcttccacttcattcaat-agttgacgaggctttcttctgagcctgttgtgtgacttcctgctttttttcaattcaggttttacttcaggttgacttgtctttagccagtctatccctttgttaacttctattttcttatcttgaattgtattcattatttcattcatttgtttatgttttcttgatcttcattcaggcatttttttcacagcttctttaaggtc-- [...]
+
+11 chrX 3043248 3043585 chrX 132759268 132759608 - 22408
+TGTATCTATTTTTGCCTTTTGGGTAGGCATTTAGTTCTTTGGTTGCTATTACTCCTATCTGACAAGTTGTCAACCATCAGAATGATACTTGGTTCTCATTCTTTGAATCACTCGGGATTTTAATATTCGGATTCCAACTTGTACTGGATAGGATTTTCTGGTTGCAACTCTGTGTTCAAACCATAAGCTGTCCCAGATCAATTCAGTGGGTTGTATAGGTAATAGGCTAGCAGTTGAGGAGTGGAGACTGACTCTGGACGTCTTCTAGGTAGGGTTTAGGATCTGGCTCTTAGGGTCAGG---CAAGGCAGAATTATTGTACATTAGTGGCAGTTAGTGAT
+TATAACTATTTTTGCCTTTTGGGTAGGCATTTGGTTCTTGGGTTGCTATTACTCTTACCTGACAAGTTGTGAACCATCAGAATGATACTTGGTTCTCAGTCTTTGAGTCACCCTGGATCTTAATATTCCAATTCTAACTTGTATTGGATCTAATTTTCTGATTGCAACTCTGGGTCCACACCATAAACTGTCCCAGATCAATGTAGTGGGTGGTTTAGGTGATAGGCTAGCAATTGGAGAATAGCAACTGACTCTGGAGGTCTATTAGCTAGGATATGATATCTGACTCCTGGGAAGAGGGGCAGAAGCAGAATTAGTGTACATTAGTGGTAGTTAGTGAT
+
+12 chrX 3044072 3044955 chrX 132759936 132760824 - 57490
+GTGATGAATCCCAAGGGAATGGATATGGGCTTCAAGGAGGGGTTGAGGTGGGCAGGCACGGAGTAGG----TTTAAGGGGAATTTAAAGAGACTAGAATCAAGAAGATGAAGTAAGGCACAACCAGCCTGTCAACATTCATTGTCCTATGCATATCTGTCTGTAAGACCTAGGATTTGAAGACTTTAAAGAATGAAGCAGAACACAATCTAATACTGTAGACAACTTTATTTCAGTTTCAGTGTTTTTATTCAAAAATTTAATAAAGTAAGCAATGATTCAAGGAAGGTTGTTTGAGTTCAGAAAAAC----ATAAAAGCCAGCAAGCACATGCTAAATATTAACCGAGCAGCTCTTTCAGAGAACTTTCTTAAGACAGACCAATTTAAACACAAATGCCTAGCACCTATTATA---GACTATATCTGAGAAAGCTTGAAAGCAAGGCAGAAGGCCATAACCAGATTCAGGGTACACTGAAAAAAACACTCA [...]
+GTGATGAATCCCAAGGGAATGGATATGGACTTCAAGGAGGGGTTGAGGTGGGCAGACACGGGGGGGGGGGATCTAAGGGGAATTTAGAGAGACGAGAATCAAAAAGAGAAAATAAGGCACAGGCAGCCTGTCAACTTTTGTTTTCCTATGCAGATCTGTCTATAAGACCTGGGATTTGAAGACATTAAAGGATAAGGCAGAATACAGTCTAATGCTGTAGACAACTTTATTTCAGTTTCAGTGCTTTTATACACAACTGTAA-AACGAAAGCAATGATCCAAGCAAGATTGTTTGAATTCAGAAAAGCATAAATAAAAGCCAGCAAGCACATGTTAAATATCAACAGAACAACTCTTTCCAAAAAC--TCTTAGGACAGACCAATTTAAACACAATTGCCTGGCACCTATTATACACTACTACATCTAGGAAAGTCTGAAAG-AAGGCCAAAGGCCATAACCATTTTCAGGGTACTCTGAAG-----ACTCT [...]
+
+13 chrX 3044956 3044983 chrX 132767880 132767907 - 2133
+TGACATACTTCCTCCATCTAGGCCACAC
+tgacacacttcctccctcaaggccacac
+
+14 chrX 3045356 3046390 chrX 132767908 132769035 - 54561
+CTActccaacaaggccatatctcataaccctaccaaagattctactccctgatgactaagctttcaaatatatgagcctgtgagggacattcttatttaaaccactacaGCTAGAAAAAT-TACAAATGTTTTTATTTGCATTTCCCTGATGGATACAGATTGTTTTAAtaatttcctttttgctcctgtgataaaacactgaccaaaagcagcttggatgaggaaagggtt-----gtttagtttacagattgcagccta--------------------------------------------------------------------------------------------tctttgaggtaagctaaggcaaaaactaaagTTAATTTTT---AAAATATAGTAATTTCATGTAATGATTCATTGATTCAACAAAT----------TACTGGCTATGTTATAGGTcagtagttatcaacctttctaaca [...]
+ctactccagcaagtcc-----tcataatcctatcaaagattctactccctggtgactatgcatttaaatatatgagtccatgagggacattcttatttaaaccaccccaGCTAGAAAAAAACATAAATATTTTTATTTGCATTTCTCTGATGGATGCAGATtgttttaataatttcctttctgctcctgtgaaaaaacactgaccaaaagcagcttggatgaagaaagtatttttttatttagtttacagattaaagcctatctttttttggggggggggactgaacccagggccttgcgcttcctaggcaagcgctctaccactgagctaaatccccaacccctaaagcctatctttgagataagccaaggcaaaaactcaagTTATTCTTTTTGAAAATACAGTAATTTCACATCATCattcattgattcaacacatatttatggaatattggctatgATATAGATcagtggttcttaaccttcctaatg [...]
+
+15 chrX 3046548 3047589 chrX 132769039 132770071 - 59414
+CTTTGAGCAAGTGATGTTTAAGTCAAGACAGGAAAGACATCAACTTGGAAAGGTTGTGAAAAGCATTATAAGTAAGAAATACTTTTACTGCAAAAGTTTTCAAATTCAAAACAAACTTGTTACCTTTGAGAATGTACAGGCAACATTAGTGTGGCTGTCATAAACACTCAAAAAAAAAAAGTAATAACCCAGAGCAGTACTTTA--------------------------------------------AACCTTGCATAGTGGTACATAATTAAAATCCTAGCTCTGGAGAGACTAGGGCAGGAGGGTCATGACTACATAATGGTACACTCTCAAAAAGAAGAAAAACAACCCTCAAAGTAAGAAGCTCTTATAATATG-TTCCCAATTGTTTCTTTGTGAAAATGTAATGCCATTAACATGTTCTGTATCATTTTAGTACATTGTATCTGTATATGTGTTT---Gttaatttataaaatattaaattttat [...]
+TTTTGAACAAGTGATGTTTAATTCAAGACAGGAAAGACATCAACTTTGAAAAGTTATGAAAAGCTCT----GTAAGAAATAATTTTAGTGCAAAAGCTTTCAAACCCAAAACAAACTTGTTACCTTGAAGAATGCACAGAGACCATTAGTATGGCTGTCATACATACTCAAAAGAAAGAA--AATCTCCCAGAGCAGCACTTTACTACTTTTCATCATTGTGTTTTCCCGACTAAAAATACATTTTTTAAACTGGCATAGTGGTACACAATTGAAATCCTAGCTCCTGAGACTCTAGGGCAGGAGGGTCATGGCTACATAATGGTACACTCTCAAAAAGAAAGAA---AACTCTTAAAGTAAGAAGCTTCTAAAATATGTTTTCCAATTGTTTCTTCACGAAAATTTAATATCA-----GTGTTCTGTATCTTTTTAATACA-TGTATCTGCATATGTGTTTATAGTTAATTTATAAAATAGTAAACTTTAT [...]
+
+16 chrX 3047590 3047634 chrX 14222368 14222417 + 1675
+CTGTTAGGG-----ACCTGGGTATAGCCCCAAGTGTTCAGAATGTGAGAT
+CTGGAAGGAACTACACCCAAGTACAGCCTTGAGTGTCCAGAACATAGGAT
+
+17 chrX 3047635 3047719 chrX 14222571 14222648 + 3429
+TAGTTAAACCTGGATTGGTCTCAGCAAGACTATAAGATAAAGGAACCTTACACCTTAACACTGTTTTGCCCAGTTACAAAGGTac
+TAGTTTAACCTGCAGTGGCCTCAGCAAGAATACAAGATGGAGGA-------ATTTCTGCATTGCCTTGCCTGCTGACAAAGGGAC
+
+18 chrX 3049058 3049300 chrX 132770073 132770321 - 14281
+ctcctgtaccatgcctgcctggatgctgccatgctcctgcctagatgatagtagactgaacttctgaacctctaagccagcccaaattaaatgttgtccttataaaagatgccttggtcatagtgt----tctcatcagtaaaaccctaa----GACAGAAGTTGGTACCAGATACGGGGGTTATTTGTTTCTTGAGGAATCATGAACTCTGAATACTGTG--------CCCAGGGGCATTCTGAAGATCTCATTAGAA
+ctcctgcaccatgcctgcctggatactcccatactcccaacttgatgataatggactgaacctttgaacctgtaagccagccccaattaaatgttgtccttataagagttgccttggtcatggtgtctgttcacagcagtaaaatcctaactaagacaGGAC----------ATATGGGTTTTATCTGCTTCTTGGGGAATCATGAACTCTGAACATTGTGCTAATATACCTAGGGCCAttctgaagatcatattagaa
+
+19 chrX 3049301 3049850 chr15 879492 880081 - 31160
+GGTTTTGTTAGATATTCTTTCAGATTAAAACATTCCATCCTAGAAGGAGGAAGAGGGTTTATAAAACTCAAGGACTATCACAATTTAA--GACTAATGAAACTCTGGGAA-TTCACAAAAGTCACATGGCCATCCCCAATGTTCTACAAGTAGTGAACAATTGCAGGCAGGAGTG-AGGGGTTTCCTTCAGTGGGGATTCCTGAAAGTTGGTCAGAGAGCTCTAGGGACGTAGTTTTCATTAATCATTATCAACACTGAGTAAGGCTTTCTGGTAATGGAGCTGACATTGAAtcatctgtgttcctttcagtgacctcaataaactcattggttcactaagctaaaatcaggtgCAATGATGTTGGCTTTGTTTCACTC--------------------------------------------AACGTCCTACTTGGAGTAAACATATACTTGTTCATATATTTTCAGGAAAATTTACACAACAGAAGGTAG [...]
+GTTTTTCTTAGGTGTTCCTTCAGATTAAAATATTCCATG-TGGAAGGAGGAAAAAGGTTTATAAATCTCAAGGACTATCTCAATTTAAAAGACTCATGAAACTCTGAAAACTTAACAAAAATCACATAGCCTCCCCCAATATTACATAAGTAGTGAACAATTGTGGGCAGGAGTGAAGGGGTTTCCTTCAGTGTGGGTTTTtgcaagttgggcagagagttctagggaatcaattttcataaatcagcatcagcattggggcaagctttctgttaatggagctgatg-tgagtcatccgtgttcctttcaatgacctcaacaaactcattggttctccaagctaaaatcaggtgctatggtgtagtctttgtttcactctgattaattaattaattaattaattaattaatcaagtaattgttaatgtcctacttg--gtaaacatacgtttatctaactattttcaggaaaattcacacaacaGAAGGTCG [...]
+
+20 chrX 3055000 3055956 chrX 27929767 27930764 + 46154
+CATCTGTCTAAGATGt---aattatatattttattaataa---ttatattaGCCTAGTTGTTAAACAAG----TTAAGTGTTTCTAATgtgatcctatttaaatataataag-------------cttaactgcataaatgccaaaaatatctccttctctcactcaaggtaaaggcaaaattcacctgtagagtttgtaacctgcagagctttgagctttctgactccactatgacttctagaaaatcacct--cacagttctctgcctccctccatcagttccagccatgcctctatcccagctgcttctcctcatgcatgcaaggtcttctctgacaactgagcctcagcaactgtcatcactttagctggaatgcatatttctcTTGACTGCTGCTGGAGGCCACAGTCTACCTGCATGT-----------------------------------------TTTCATTCCACACCCAAAGGTTCCTTG [...]
+CACCTACTTACAATGTGGAATTTATATATTTGATTTTCATGCTCCAAATT--CCAAGATTTTTGAGAAGATTTTCAGGAGTTGCTGcagtgtgcctgttaaaatatgacaaaaactatgagatgcttcaactttaaaagcactaa---catgtccccctctcactcaaggtaaaagcaaaattcctctgtagaagttttaacctgcagggctttgggctctctgacacccctgtgacttctgagaaatcacttttcacaattttctaccacccttcatcatatccagccatgccttcatcccagctgcttctcctcatgcatgccaggtcttctctggc---------ccagcaattgtcatcacataagctggaatgcacattccccTTGACTGCTGTTGGAtctctctctctctctctctctctctctctctctctctctctctctctctctctctctcCTTGTTCCATTCTAAACCTAGAGCTTCCTTG [...]
diff --git a/test-data/1.bam b/test-data/1.bam
new file mode 100644
index 0000000..95c65de
Binary files /dev/null and b/test-data/1.bam differ
diff --git a/test-data/1.bed b/test-data/1.bed
new file mode 100644
index 0000000..eb4c30e
--- /dev/null
+++ b/test-data/1.bed
@@ -0,0 +1,65 @@
+chr1	147962192	147962580	CCDS989.1_cds_0_0_chr1_147962193_r	0	-
+chr1	147984545	147984630	CCDS990.1_cds_0_0_chr1_147984546_f	0	+
+chr1	148078400	148078582	CCDS993.1_cds_0_0_chr1_148078401_r	0	-
+chr1	148185136	148185276	CCDS996.1_cds_0_0_chr1_148185137_f	0	+
+chr10	55251623	55253124	CCDS7248.1_cds_0_0_chr10_55251624_r	0	-
+chr11	116124407	116124501	CCDS8374.1_cds_0_0_chr11_116124408_r	0	-
+chr11	116206508	116206563	CCDS8377.1_cds_0_0_chr11_116206509_f	0	+
+chr11	116211733	116212337	CCDS8378.1_cds_0_0_chr11_116211734_r	0	-
+chr11	1812377	1812407	CCDS7726.1_cds_0_0_chr11_1812378_f	0	+
+chr12	38440094	38440321	CCDS8736.1_cds_0_0_chr12_38440095_r	0	-
+chr13	112381694	112381953	CCDS9526.1_cds_0_0_chr13_112381695_f	0	+
+chr14	98710240	98712285	CCDS9949.1_cds_0_0_chr14_98710241_r	0	-
+chr15	41486872	41487060	CCDS10096.1_cds_0_0_chr15_41486873_r	0	-
+chr15	41673708	41673857	CCDS10097.1_cds_0_0_chr15_41673709_f	0	+
+chr15	41679161	41679250	CCDS10098.1_cds_0_0_chr15_41679162_r	0	-
+chr15	41826029	41826196	CCDS10101.1_cds_0_0_chr15_41826030_f	0	+
+chr16	142908	143003	CCDS10397.1_cds_0_0_chr16_142909_f	0	+
+chr16	179963	180135	CCDS10401.1_cds_0_0_chr16_179964_r	0	-
+chr16	244413	244681	CCDS10402.1_cds_0_0_chr16_244414_f	0	+
+chr16	259268	259383	CCDS10403.1_cds_0_0_chr16_259269_r	0	-
+chr18	23786114	23786321	CCDS11891.1_cds_0_0_chr18_23786115_r	0	-
+chr18	59406881	59407046	CCDS11985.1_cds_0_0_chr18_59406882_f	0	+
+chr18	59455932	59456337	CCDS11986.1_cds_0_0_chr18_59455933_r	0	-
+chr18	59600586	59600754	CCDS11988.1_cds_0_0_chr18_59600587_f	0	+
+chr19	59068595	59069564	CCDS12866.1_cds_0_0_chr19_59068596_f	0	+
+chr19	59236026	59236146	CCDS12872.1_cds_0_0_chr19_59236027_r	0	-
+chr19	59297998	59298008	CCDS12877.1_cds_0_0_chr19_59297999_f	0	+
+chr19	59302168	59302288	CCDS12878.1_cds_0_0_chr19_59302169_r	0	-
+chr2	118288583	118288668	CCDS2120.1_cds_0_0_chr2_118288584_f	0	+
+chr2	118394148	118394202	CCDS2121.1_cds_0_0_chr2_118394149_r	0	-
+chr2	220190202	220190242	CCDS2441.1_cds_0_0_chr2_220190203_f	0	+
+chr2	220229609	220230869	CCDS2443.1_cds_0_0_chr2_220229610_r	0	-
+chr20	33330413	33330423	CCDS13249.1_cds_0_0_chr20_33330414_r	0	-
+chr20	33513606	33513792	CCDS13255.1_cds_0_0_chr20_33513607_f	0	+
+chr20	33579500	33579527	CCDS13256.1_cds_0_0_chr20_33579501_r	0	-
+chr20	33593260	33593348	CCDS13257.1_cds_0_0_chr20_33593261_f	0	+
+chr21	32707032	32707192	CCDS13614.1_cds_0_0_chr21_32707033_f	0	+
+chr21	32869641	32870022	CCDS13615.1_cds_0_0_chr21_32869642_r	0	-
+chr21	33321040	33322012	CCDS13620.1_cds_0_0_chr21_33321041_f	0	+
+chr21	33744994	33745040	CCDS13625.1_cds_0_0_chr21_33744995_r	0	-
+chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
+chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
+chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
+chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
+chr5	131424298	131424460	CCDS4149.1_cds_0_0_chr5_131424299_f	0	+
+chr5	131556601	131556672	CCDS4151.1_cds_0_0_chr5_131556602_r	0	-
+chr5	131621326	131621419	CCDS4152.1_cds_0_0_chr5_131621327_f	0	+
+chr5	131847541	131847666	CCDS4155.1_cds_0_0_chr5_131847542_r	0	-
+chr6	108299600	108299744	CCDS5061.1_cds_0_0_chr6_108299601_r	0	-
+chr6	108594662	108594687	CCDS5063.1_cds_0_0_chr6_108594663_f	0	+
+chr6	108640045	108640151	CCDS5064.1_cds_0_0_chr6_108640046_r	0	-
+chr6	108722976	108723115	CCDS5067.1_cds_0_0_chr6_108722977_f	0	+
+chr7	113660517	113660685	CCDS5760.1_cds_0_0_chr7_113660518_f	0	+
+chr7	116512159	116512389	CCDS5771.1_cds_0_0_chr7_116512160_r	0	-
+chr7	116714099	116714152	CCDS5773.1_cds_0_0_chr7_116714100_f	0	+
+chr7	116945541	116945787	CCDS5774.1_cds_0_0_chr7_116945542_r	0	-
+chr8	118881131	118881317	CCDS6324.1_cds_0_0_chr8_118881132_r	0	-
+chr9	128764156	128764189	CCDS6914.1_cds_0_0_chr9_128764157_f	0	+
+chr9	128787519	128789136	CCDS6915.1_cds_0_0_chr9_128787520_r	0	-
+chr9	128882427	128882523	CCDS6917.1_cds_0_0_chr9_128882428_f	0	+
+chr9	128937229	128937445	CCDS6919.1_cds_0_0_chr9_128937230_r	0	-
+chrX	122745047	122745924	CCDS14606.1_cds_0_0_chrX_122745048_f	0	+
+chrX	152648964	152649196	CCDS14733.1_cds_0_0_chrX_152648965_r	0	-
+chrX	152691446	152691471	CCDS14735.1_cds_0_0_chrX_152691447_f	0	+
+chrX	152694029	152694263	CCDS14736.1_cds_0_0_chrX_152694030_r	0	-
diff --git a/test-data/1.bed.spaces b/test-data/1.bed.spaces
new file mode 100644
index 0000000..164b75b
--- /dev/null
+++ b/test-data/1.bed.spaces
@@ -0,0 +1,65 @@
+chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
+chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
+chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
+chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
+chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
+chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
+chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
+chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
+chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
+chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
+chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
+chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
+chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
+chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
+chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
+chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
+chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
+chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
+chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
+chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
+chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
+chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
+chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
+chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
+chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
+chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
+chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
+chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
+chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
+chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
+chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
+chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
+chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
+chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
+chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
+chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
+chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
+chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
+chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
+chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
+chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
+chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
+chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
+chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
+chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
+chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
+chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
+chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
+chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
+chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
+chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
+chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
+chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
+chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
+chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
+chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
+chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
+chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
+chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
+chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
+chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
+chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
+chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
+chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
+chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
diff --git a/test-data/1.bedgraph b/test-data/1.bedgraph
new file mode 100644
index 0000000..30a4bfc
--- /dev/null
+++ b/test-data/1.bedgraph
@@ -0,0 +1,30 @@
+chr1	0	14361	0
+chr1	14361	14829	1
+chr1	14829	14969	0
+chr1	14969	15038	1
+chr1	15038	15795	0
+chr1	15795	15947	1
+chr1	15947	16606	0
+chr1	16606	16765	1
+chr1	16765	16857	0
+chr1	16857	17055	1
+chr1	17055	17232	0
+chr1	17232	17368	1
+chr1	17368	17605	0
+chr1	17605	17742	1
+chr1	17742	17914	0
+chr1	17914	18061	1
+chr1	18061	18267	0
+chr1	18267	18366	1
+chr1	18366	24737	0
+chr1	24737	24891	1
+chr1	24891	29320	0
+chr1	29320	29370	1
+chr1	29370	34610	0
+chr1	34610	35174	2
+chr1	35174	35276	0
+chr1	35276	35481	2
+chr1	35481	35720	0
+chr1	35720	36081	2
+chr1	36081	69090	0
+chr1	69090	69093	3
diff --git a/test-data/1.bigbed b/test-data/1.bigbed
new file mode 100644
index 0000000..2b72916
Binary files /dev/null and b/test-data/1.bigbed differ
diff --git a/test-data/1.bigwig b/test-data/1.bigwig
new file mode 100644
index 0000000..35fbd99
Binary files /dev/null and b/test-data/1.bigwig differ
diff --git a/test-data/1.customtrack b/test-data/1.customtrack
new file mode 100644
index 0000000..a6f704c
--- /dev/null
+++ b/test-data/1.customtrack
@@ -0,0 +1,3 @@
+track name="User Track" description="User Supplied Track (from Galaxy)" color=0,0,0 visibility=1
+chr7	127475281	127491632	NM_000230	0	+	127486022	127488767	0	3	29,172,3225,	0,10713,13126,
+chr7	127486011	127488900	D49487	0	+	127486022	127488767	0	2	155,490,	0,2399,
diff --git a/test-data/1.fasta b/test-data/1.fasta
new file mode 100644
index 0000000..c9ecbb6
--- /dev/null
+++ b/test-data/1.fasta
@@ -0,0 +1,2 @@
+>hg17
+gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAAAATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGGGAGATATTTGGggaaatt---ttgtatagactagctttca [...]
diff --git a/test-data/1.fastq b/test-data/1.fastq
new file mode 100644
index 0000000..01ddd7b
--- /dev/null
+++ b/test-data/1.fastq
@@ -0,0 +1,8 @@
+ at HANNIBAL_1_FC302VTAAXX:2:1:228:167
+GAATTGATCAGGACATAGGACAACTGTAGGCACCAT
++HANNIBAL_1_FC302VTAAXX:2:1:228:167
+40 40 40 40 35 40 40 40 25 40 40 26 40 9 33 11 40 35 17 40 40 33 40 7 9 15 3 22 15 30 11 17 9 4 9 4
+ at HANNIBAL_1_FC302VTAAXX:2:1:156:340
+GAGTTCTCGTCGCCTGTAGGCACCATCAATCGTATG
++HANNIBAL_1_FC302VTAAXX:2:1:156:340
+40 15 40 17 6 36 40 40 40 25 40 9 35 33 40 14 14 18 15 17 19 28 31 4 24 18 27 14 15 18 2 8 12 8 11 9
\ No newline at end of file
diff --git a/test-data/1.fastqsanger b/test-data/1.fastqsanger
new file mode 100644
index 0000000..3a1f790
--- /dev/null
+++ b/test-data/1.fastqsanger
@@ -0,0 +1,8 @@
+ at 1831_573_1004/1
+AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
++
+><C&&9952+C>5<.?<79,=42<292:<(9/-7
+ at 1831_573_1050/1
+TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
++
+;@@17?@=>7??@A8?==@4A?A4)&+.'&+'1,
\ No newline at end of file
diff --git a/test-data/1.fastqsolexa b/test-data/1.fastqsolexa
new file mode 100644
index 0000000..01ddd7b
--- /dev/null
+++ b/test-data/1.fastqsolexa
@@ -0,0 +1,8 @@
+ at HANNIBAL_1_FC302VTAAXX:2:1:228:167
+GAATTGATCAGGACATAGGACAACTGTAGGCACCAT
++HANNIBAL_1_FC302VTAAXX:2:1:228:167
+40 40 40 40 35 40 40 40 25 40 40 26 40 9 33 11 40 35 17 40 40 33 40 7 9 15 3 22 15 30 11 17 9 4 9 4
+ at HANNIBAL_1_FC302VTAAXX:2:1:156:340
+GAGTTCTCGTCGCCTGTAGGCACCATCAATCGTATG
++HANNIBAL_1_FC302VTAAXX:2:1:156:340
+40 15 40 17 6 36 40 40 40 25 40 9 35 33 40 14 14 18 15 17 19 28 31 4 24 18 27 14 15 18 2 8 12 8 11 9
\ No newline at end of file
diff --git a/test-data/1.interval b/test-data/1.interval
new file mode 100644
index 0000000..3e6c671
--- /dev/null
+++ b/test-data/1.interval
@@ -0,0 +1,5 @@
+chr1	4348187	4348589	3.70	4.90	2.55	0.24	0.46
+chr1	4488177	4488442	4.03	5.77	1.92	-0.67	0.81
+chr1	4774091	4774440	8.07	8.33	7.82	0.85	-0.40
+chr1	4800122	4800409	6.40	7.35	5.44	1.19	-0.42
+chr1	4878925	4879277	2.18	0.28	4.93	-0.96	1.24
diff --git a/test-data/1.lav b/test-data/1.lav
new file mode 100644
index 0000000..da87099
--- /dev/null
+++ b/test-data/1.lav
@@ -0,0 +1,178 @@
+#:lav
+d {
+  "blastz out.seq1m seq2data Y=3400 H=0 W=8 B=2 K=3000 C=0 m=83886080 P=0
+     A    C    G    T
+    91 -114  -31 -123
+  -114  100 -125  -31
+   -31 -125  100 -114
+  -123  -31 -114   91
+  O = 400, E = 30, K = 3000, L = 3000, M = 50"
+}
+#:lav
+s {
+  "out.seq1m" 1 1680 0 1
+  "seq2data" 1 1680 0 1
+}
+h {
+   ">seqmask seq1data out.repeats"
+   ">seq2"
+}
+a {
+  s 161304
+  b 1 1
+  e 1680 1680
+  l 1 1 1680 1680 100
+}
+a {
+  s 15173
+  b 186 251
+  e 450 505
+  l 186 251 244 309 92
+  l 256 310 280 334 84
+  l 281 336 307 362 78
+  l 308 374 329 395 91
+  l 341 396 367 422 81
+  l 369 423 419 473 94
+  l 422 474 425 477 75
+  l 426 481 450 505 96
+}
+a {
+  s 12134
+  b 191 343
+  e 361 503
+  l 191 343 215 367 80
+  l 216 369 244 397 86
+  l 256 398 361 503 95
+}
+a {
+  s 4422
+  b 192 431
+  e 276 505
+  l 192 431 212 451 100
+  l 223 452 244 473 95
+  l 247 474 254 481 63
+  l 255 484 276 505 91
+}
+a {
+  s 6872
+  b 192 311
+  e 330 450
+  l 192 311 215 334 83
+  l 216 336 275 395 73
+  l 286 396 307 417 91
+  l 308 428 330 450 96
+}
+a {
+  s 14437
+  b 192 224
+  e 472 505
+  l 192 224 223 255 84
+  l 224 257 255 288 84
+  l 257 289 277 309 90
+  l 288 310 308 330 90
+  l 309 342 362 395 85
+  l 374 396 395 417 91
+  l 396 428 417 449 95
+  l 428 450 449 471 95
+  l 450 483 472 505 91
+}
+a {
+  s 7306
+  b 192 399
+  e 308 505
+  l 192 399 244 451 94
+  l 256 452 277 473 91
+  l 280 474 283 477 100
+  l 284 481 308 505 92
+}
+a {
+  s 11180
+  b 193 290
+  e 396 505
+  l 193 290 212 309 100
+  l 223 310 280 367 76
+  l 281 369 332 420 88
+  l 333 422 335 424 100
+  l 336 429 341 434 83
+  l 342 451 396 505 87
+}
+a {
+  s 14437
+  b 224 192
+  e 505 472
+  l 224 192 255 223 84
+  l 257 224 288 255 84
+  l 289 257 309 277 90
+  l 310 288 330 308 90
+  l 342 309 395 362 85
+  l 396 374 417 395 91
+  l 428 396 449 417 95
+  l 450 428 471 449 95
+  l 483 450 505 472 91
+}
+a {
+  s 15173
+  b 251 186
+  e 505 450
+  l 251 186 309 244 92
+  l 310 256 334 280 84
+  l 336 281 362 307 78
+  l 374 308 395 329 91
+  l 396 341 422 367 81
+  l 423 369 473 419 94
+  l 474 422 477 425 75
+  l 481 426 505 450 96
+}
+a {
+  s 11180
+  b 290 193
+  e 505 396
+  l 290 193 309 212 100
+  l 310 223 367 280 76
+  l 369 281 420 332 88
+  l 422 333 424 335 100
+  l 429 336 434 341 83
+  l 451 342 505 396 87
+}
+a {
+  s 6872
+  b 311 192
+  e 450 330
+  l 311 192 334 215 83
+  l 336 216 395 275 73
+  l 396 286 417 307 91
+  l 428 308 450 330 96
+}
+a {
+  s 12134
+  b 343 191
+  e 503 361
+  l 343 191 367 215 80
+  l 369 216 397 244 86
+  l 398 256 503 361 95
+}
+a {
+  s 7306
+  b 399 192
+  e 505 308
+  l 399 192 451 244 94
+  l 452 256 473 277 91
+  l 474 280 477 283 100
+  l 481 284 505 308 92
+}
+a {
+  s 4422
+  b 431 192
+  e 505 276
+  l 431 192 451 212 100
+  l 452 223 473 244 95
+  l 474 247 481 254 63
+  l 484 255 505 276 91
+}
+x {
+  n 0
+}
+m {
+  n 0
+}
+#:eof
diff --git a/test-data/1.pileup b/test-data/1.pileup
new file mode 100644
index 0000000..df6fdf7
--- /dev/null
+++ b/test-data/1.pileup
@@ -0,0 +1,1000 @@
+chrM	42	C	1	^:.	I
+chrM	43	C	2	.^:.	II
+chrM	44	T	2	..	II
+chrM	45	A	3	..^:.	III
+chrM	46	G	4	...^:.	IIII
+chrM	47	A	5	....^:,	IIIII
+chrM	48	T	5	....,	IIIII
+chrM	49	G	5	....,	IIIII
+chrM	50	A	5	....,	IIIII
+chrM	51	G	5	....,	IIIII
+chrM	52	T	5	....,	IIIII
+chrM	53	A	5	....,	IIIII
+chrM	54	T	5	....,	IIIII
+chrM	55	T	5	....,	IIIII
+chrM	56	C	5	....,	IIIII
+chrM	57	T	5	....,	IIIII
+chrM	58	T	5	....,	IIIII
+chrM	59	A	5	....,	IIIII
+chrM	60	C	5	....,	IIIII
+chrM	61	T	5	....,	IIIII
+chrM	62	C	5	....,	IIIII
+chrM	63	C	5	....,	IIIII
+chrM	64	A	5	....,	IIIII
+chrM	65	T	5	....,	IIIII
+chrM	66	A	5	....,	IIIII
+chrM	67	A	5	....,	IIIII
+chrM	68	A	5	....,	IIICI
+chrM	69	C	5	....,	IIIII
+chrM	70	A	5	....,	IIIII
+chrM	71	C	5	....,	IIIII
+chrM	72	A	5	....,	IAIII
+chrM	73	T	5	....,	IIIII
+chrM	74	A	5	....,	IIIII
+chrM	75	G	5	....,	%IIII
+chrM	76	G	5	T...,	*IIII
+chrM	77	C	5	.$...,	GIIII
+chrM	78	T	4	.$..,	IIII
+chrM	79	T	3	..,	III
+chrM	80	G	3	.$.,	I1I
+chrM	81	G	2	.$,	II
+chrM	82	T	1	,$	I
+chrM	83	C	2	^:.^:.	II
+chrM	84	C	2	..	II
+chrM	85	T	2	..	II
+chrM	86	A	2	..	II
+chrM	87	G	2	..	II
+chrM	88	C	2	..	II
+chrM	89	C	2	..	II
+chrM	90	T	2	..	II
+chrM	91	T	2	..	II
+chrM	92	T	2	..	II
+chrM	93	T	2	..	II
+chrM	94	T	2	..	II
+chrM	95	A	2	..	II
+chrM	96	T	2	..	II
+chrM	97	T	2	..	II
+chrM	98	A	2	..	II
+chrM	99	G	2	..	II
+chrM	100	T	2	..	II
+chrM	101	T	2	..	II
+chrM	102	A	2	..	II
+chrM	103	T	2	..	II
+chrM	104	T	2	..	II
+chrM	105	A	2	..	IE
+chrM	106	A	2	..	II
+chrM	107	T	2	..	II
+chrM	108	A	2	..	II
+chrM	109	G	2	..	II
+chrM	110	A	2	..	II
+chrM	111	A	2	..	I2
+chrM	112	T	2	..	II
+chrM	113	T	2	..	II
+chrM	114	A	2	..	H7
+chrM	115	C	2	..	II
+chrM	116	A	2	..	II
+chrM	117	C	2	..	II
+chrM	118	A	2	.$.$	F8
+chrM	156	T	1	^:,	&
+chrM	157	C	1	,	I
+chrM	158	A	2	g^:g	I>
+chrM	159	C	2	,,	/F
+chrM	160	G	2	,,	II
+chrM	161	T	2	,,	I>
+chrM	162	C	2	,,	.-
+chrM	163	T	2	,,	I8
+chrM	164	C	2	,,	4F
+chrM	165	T	2	,,	II
+chrM	166	A	2	,,	II
+chrM	167	C	2	,,	;6
+chrM	168	G	2	,,	II
+chrM	169	A	2	,,	II
+chrM	170	T	2	,,	II
+chrM	171	T	2	,,	IB
+chrM	172	A	2	,,	II
+chrM	173	A	2	,,	II
+chrM	174	A	2	,,	II
+chrM	175	A	2	,,	II
+chrM	176	G	2	,,	II
+chrM	177	G	2	,,	II
+chrM	178	A	2	,,	II
+chrM	179	G	2	,,	II
+chrM	180	C	2	,,	II
+chrM	181	A	3	,,^:,	III
+chrM	182	G	3	,,,	III
+chrM	183	G	3	,,,	III
+chrM	184	T	3	,,,	III
+chrM	185	A	3	,,,	III
+chrM	186	T	3	,,,	III
+chrM	187	C	3	,,,	III
+chrM	188	A	3	,,,	III
+chrM	189	A	3	,,,	III
+chrM	190	G	3	,,,	III
+chrM	191	C	3	,$,,	III
+chrM	192	A	2	,,	II
+chrM	193	C	2	,$,	II
+chrM	194	A	1	,	I
+chrM	195	C	1	,	I
+chrM	196	T	1	,	F
+chrM	197	A	1	,	I
+chrM	198	G	1	,	I
+chrM	199	A	1	,	I
+chrM	200	A	1	,	I
+chrM	201	A	2	,^:.	I-
+chrM	202	G	2	,.	II
+chrM	203	T	2	,.	II
+chrM	204	A	2	,.	II
+chrM	205	G	2	,.	I6
+chrM	206	C	2	,.	II
+chrM	207	T	2	,.	I,
+chrM	208	C	2	,.	II
+chrM	209	A	2	,.	II
+chrM	210	T	2	,.	II
+chrM	211	A	2	,.	II
+chrM	212	A	2	,.	IE
+chrM	213	C	2	,.	IC
+chrM	214	A	2	,.	I;
+chrM	215	C	2	,.	II
+chrM	216	C	2	,$.	II
+chrM	217	T	1	.	I
+chrM	218	T	1	.	I
+chrM	219	G	1	T	I
+chrM	220	C	1	.	I
+chrM	221	T	1	.	I
+chrM	222	C	1	.	E
+chrM	223	A	1	.	4
+chrM	224	G	1	T	9
+chrM	225	C	1	.	:
+chrM	226	C	1	.	?
+chrM	227	A	1	.	.
+chrM	228	C	1	.	C
+chrM	229	A	3	.^:,^:,	%II
+chrM	230	C	3	.,,	8II
+chrM	231	C	3	.,,	9II
+chrM	232	C	3	.,,	III
+chrM	233	C	3	.,,	AI'
+chrM	234	C	3	.,,	DI+
+chrM	235	A	3	.,,	&I@
+chrM	236	C	3	.$,a	+I$
+chrM	237	G	2	,,	II
+chrM	238	G	2	,,	II
+chrM	239	G	2	,,	II
+chrM	240	A	2	,,	II
+chrM	241	C	2	,,	I+
+chrM	242	A	2	,,	II
+chrM	243	C	2	,,	II
+chrM	244	A	3	,,^:,	II;
+chrM	245	G	3	,,,	II*
+chrM	246	C	3	,,n	II"
+chrM	247	A	3	,,,	III
+chrM	248	G	4	,,,^:.	IIII
+chrM	249	T	4	,,,.	II)I
+chrM	250	G	4	,,,.	II2I
+chrM	251	A	4	,,,.	II(I
+chrM	252	T	4	,,,.	II*I
+chrM	253	A	4	,,,.	II7I
+chrM	254	A	4	,,,.	II1I
+chrM	255	A	4	,,,.	II?I
+chrM	256	A	4	,,,.	II9I
+chrM	257	A	4	,,,.	II?I
+chrM	258	T	4	,,,.	II$I
+chrM	259	T	5	,,,.^:.	II&II
+chrM	260	A	5	,,,..	II,II
+chrM	261	A	5	,,,..	II/II
+chrM	262	G	5	,,,..	II5II
+chrM	263	C	5	,,a..	II%II
+chrM	264	T	5	,$,$,..	II%II
+chrM	265	A	3	,..	)II
+chrM	266	T	3	,..	+II
+chrM	267	G	4	,..^:.	*III
+chrM	268	A	4	,...	EIII
+chrM	269	A	4	,...	=III
+chrM	270	C	4	a...	;III
+chrM	271	G	4	,...	;III
+chrM	272	A	4	,...	8 at II
+chrM	273	A	4	,...	1III
+chrM	274	A	4	,...	ICII
+chrM	275	G	4	,...	IIII
+chrM	276	T	4	,...	IIII
+chrM	277	T	4	,...	IIII
+chrM	278	C	4	,...	IIII
+chrM	279	G	4	,$...	IIII
+chrM	280	A	3	...	III
+chrM	281	C	3	...	GII
+chrM	282	T	3	...	III
+chrM	283	A	3	.$..	IFI
+chrM	284	A	3	..^:,	IAI
+chrM	285	G	3	..,	;II
+chrM	286	T	4	..,^:.	IIII
+chrM	287	C	4	..,.	II4I
+chrM	288	A	4	..,.	@III
+chrM	289	T	4	..,.	IIII
+chrM	290	A	4	..,.	@:II
+chrM	291	T	4	..,.	IIAI
+chrM	292	T	4	..,.	IIII
+chrM	293	A	4	..,.	8;II
+chrM	294	A	4	.$.,.	I<II
+chrM	295	A	3	.,.	4II
+chrM	296	T	3	.,.	III
+chrM	297	A	3	.,.	BII
+chrM	298	A	3	.,.	CII
+chrM	299	G	3	.,.	III
+chrM	300	G	3	.,.	;II
+chrM	301	G	3	.,.	III
+chrM	302	T	3	.$,.	III
+chrM	303	T	2	,.	II
+chrM	304	G	2	,.	II
+chrM	305	G	2	,.	II
+chrM	306	T	2	,.	II
+chrM	307	A	2	,.	II
+chrM	308	A	2	,.	II
+chrM	309	A	2	,.	II
+chrM	310	T	2	,.	II
+chrM	311	T	2	,.	II
+chrM	312	T	3	,.^:.	III
+chrM	313	C	3	,..	III
+chrM	314	G	3	,..	III
+chrM	315	T	3	,..	III
+chrM	316	G	3	,..	III
+chrM	317	C	3	,..	III
+chrM	318	C	3	,..	III
+chrM	319	A	3	,$..	III
+chrM	320	G	2	..	II
+chrM	321	C	2	.$.	II
+chrM	322	C	1	.	I
+chrM	323	A	1	.	I
+chrM	324	C	1	.	I
+chrM	325	C	1	.	I
+chrM	326	G	1	.	I
+chrM	327	C	1	.	I
+chrM	328	G	1	.	I
+chrM	329	G	1	.	I
+chrM	330	T	1	.	I
+chrM	331	C	1	.	I
+chrM	332	A	1	.	I
+chrM	333	T	1	.	I
+chrM	334	A	1	.	I
+chrM	335	C	1	.	I
+chrM	336	G	1	.	I
+chrM	337	A	1	.	I
+chrM	338	T	1	.	I
+chrM	339	T	1	.	I
+chrM	340	A	1	.	I
+chrM	341	A	1	.	D
+chrM	342	C	1	.	I
+chrM	343	C	1	.	I
+chrM	344	C	1	.	I
+chrM	345	A	1	.	I
+chrM	346	A	1	.	;
+chrM	347	A	1	.$	7
+chrM	360	C	1	^:.	I
+chrM	361	G	1	.	I
+chrM	362	G	1	.	I
+chrM	363	C	1	.	I
+chrM	364	G	1	.	I
+chrM	365	T	1	.	I
+chrM	366	A	1	.	I
+chrM	367	A	1	.	I
+chrM	368	A	1	.	I
+chrM	369	G	1	.	I
+chrM	370	C	1	.	I
+chrM	371	G	1	.	I
+chrM	372	T	1	.	I
+chrM	373	G	1	.	I
+chrM	374	T	1	.	I
+chrM	375	C	1	.	I
+chrM	376	A	1	.	I
+chrM	377	A	1	.	I
+chrM	378	A	1	.	I
+chrM	379	G	1	.	I
+chrM	380	A	1	.	I
+chrM	381	C	1	T	I
+chrM	382	T	1	.	I
+chrM	383	A	1	.	I
+chrM	384	A	1	.	I
+chrM	385	T	1	.	I
+chrM	386	A	1	G	I
+chrM	387	C	1	.	I
+chrM	388	C	1	.	I
+chrM	389	A	2	.^:.	II
+chrM	390	A	3	..^:.	III
+chrM	391	A	3	...	>II
+chrM	392	A	3	...	III
+chrM	393	T	3	...	III
+chrM	394	A	3	...	III
+chrM	395	A	3	.$..	III
+chrM	396	A	2	..	II
+chrM	397	G	2	..	II
+chrM	398	T	2	..	EI
+chrM	399	T	2	..	II
+chrM	400	A	3	..^:.	III
+chrM	401	A	3	...	III
+chrM	402	A	3	...	III
+chrM	403	A	3	...	III
+chrM	404	C	3	...	III
+chrM	405	C	3	...	III
+chrM	406	C	3	...	EII
+chrM	407	A	3	...	III
+chrM	408	G	3	...	III
+chrM	409	T	3	...	0II
+chrM	410	T	3	...	III
+chrM	411	A	4	...^:,	IIII
+chrM	412	A	4	...,	FIII
+chrM	413	G	4	...,	IIIH
+chrM	414	C	4	...a	III2
+chrM	415	C	4	TTTt	III7
+chrM	416	G	5	...,^:,	II?7:
+chrM	417	T	5	...,,	;IIE@
+chrM	418	A	5	...,,	IIIII
+chrM	419	A	5	...,,	IIIII
+chrM	420	A	5	...,,	FIIII
+chrM	421	A	5	...,,	IIIII
+chrM	422	A	5	...,,	>IIII
+chrM	423	G	6	...,,^:,	HII/I,
+chrM	424	C	6	.$..a,,	;II-I:
+chrM	425	T	5	.$.,,,	IIIIF
+chrM	426	A	5	.,,,^:,	III at I
+chrM	427	C	5	.,,,,	III$I
+chrM	428	A	5	.,,,,	IIIII
+chrM	429	A	5	.,,,,	IIII.
+chrM	430	C	5	.,,,a	I%I5'
+chrM	431	C	5	.,,,,	I(I5<
+chrM	432	A	5	.,,,,	IIIII
+chrM	433	A	5	.,,,,	0IIII
+chrM	434	A	5	.,,,,	=IIII
+chrM	435	G	5	.$,,,,	EIIII
+chrM	436	T	4	,,,,	III5
+chrM	437	A	4	,,,,	IIII
+chrM	438	A	4	,,,,	IIII
+chrM	439	A	4	,,,,	IIII
+chrM	440	A	4	,,,,	IIIF
+chrM	441	T	6	,,,,^:.^:.	III;II
+chrM	442	A	6	,,,,..	IIIIII
+chrM	443	G	6	,,,,..	IIIIII
+chrM	444	A	6	,,,,..	IIIIII
+chrM	445	C	6	,,,,..	IIIIII
+chrM	446	T	6	,$,,,..	IIIIII
+chrM	447	A	5	,,,..	IIIII
+chrM	448	C	5	,,,..	IIIII
+chrM	449	G	6	,,,..^:,	IIIII6
+chrM	450	A	6	,,,..,	IIIIII
+chrM	451	A	6	,$,,..,	IIIIII
+chrM	452	A	5	,,..,	IIIII
+chrM	453	G	5	,,..,	IIIII
+chrM	454	T	6	,,..,^:,	IIIIII
+chrM	455	G	6	,,..,,	IIIIII
+chrM	456	A	6	,,..,,	IIIIII
+chrM	457	C	6	,,..,,	IIIII=
+chrM	458	T	6	,$,..,,	IIIIII
+chrM	459	T	5	,..,,	IIIII
+chrM	460	T	5	,..,,	IIIII
+chrM	461	A	5	,$..,,	IIIII
+chrM	462	A	4	..,,	IIII
+chrM	463	T	4	..,,	IIIC
+chrM	464	A	4	..,,	IIII
+chrM	465	C	4	..,,	IIII
+chrM	466	C	4	..,,	IIII
+chrM	467	T	4	..,,	II>?
+chrM	468	C	4	..,,	IIII
+chrM	469	T	4	..,,	IIIG
+chrM	470	G	4	..,,	%III
+chrM	471	A	4	..,,	4;II
+chrM	472	C	4	..,,	II3I
+chrM	473	T	4	..,,	IIII
+chrM	474	A	4	..,,	;III
+chrM	475	C	4	..,,	IIII
+chrM	476	A	4	.$.$,,	32II
+chrM	477	C	2	,,	II
+chrM	478	G	2	,,	II
+chrM	479	A	2	,,	II
+chrM	480	T	3	,,^:.	III
+chrM	481	A	3	,,.	III
+chrM	482	G	3	,,.	III
+chrM	483	C	4	,,.^:,	IIIE
+chrM	484	T	4	,$,.,	III9
+chrM	485	A	3	,.,	III
+chrM	486	A	3	,.,	III
+chrM	487	G	3	,.,	III
+chrM	488	A	3	,.,	III
+chrM	489	C	3	,$.,	III
+chrM	490	C	2	.,	II
+chrM	491	C	2	.,	II
+chrM	492	A	2	.,	II
+chrM	493	A	2	.,	II
+chrM	494	A	2	.,	II
+chrM	495	C	2	.,	II
+chrM	496	T	2	.,	I2
+chrM	497	G	2	.,	II
+chrM	498	G	2	.,	II
+chrM	499	G	2	.,	II
+chrM	500	A	2	.,	II
+chrM	501	T	2	.,	II
+chrM	502	T	2	.,	II
+chrM	503	A	2	.,	II
+chrM	504	G	2	.,	II
+chrM	505	A	2	.,	GI
+chrM	506	T	2	.,	II
+chrM	507	A	2	.,	II
+chrM	508	C	2	.,	II
+chrM	509	C	2	.,	II
+chrM	510	C	3	.,^:.	III
+chrM	511	C	3	.,.	III
+chrM	512	A	3	.,.	6II
+chrM	513	C	3	.,.	III
+chrM	514	T	3	.,.	III
+chrM	515	A	3	.$,.	III
+chrM	516	T	2	,.	II
+chrM	517	G	2	,.	II
+chrM	518	C	3	,$.^:.	III
+chrM	519	T	2	..	II
+chrM	520	T	3	..^:,	III
+chrM	521	A	3	..,	III
+chrM	522	G	3	..,	II,
+chrM	523	C	3	..,	III
+chrM	524	C	3	..,	II7
+chrM	525	C	3	..,	III
+chrM	526	T	3	..,	II?
+chrM	527	A	3	..,	III
+chrM	528	A	3	..,	III
+chrM	529	A	3	..,	FII
+chrM	530	C	3	..,	II+
+chrM	531	T	3	..,	III
+chrM	532	A	3	..,	:II
+chrM	533	A	3	..,	DGI
+chrM	534	A	3	..,	III
+chrM	535	A	3	..,	?CI
+chrM	536	T	3	..,	III
+chrM	537	A	3	..,	9II
+chrM	538	G	3	..,	III
+chrM	539	C	3	..,	III
+chrM	540	T	3	..,	III
+chrM	541	T	3	..,	III
+chrM	542	A	4	..,^:.	IIII
+chrM	543	C	4	..,.	IIII
+chrM	544	C	4	..,.	IIII
+chrM	545	A	4	N$.,.	"III
+chrM	546	C	3	.,.	III
+chrM	547	A	3	.,.	DII
+chrM	548	A	3	.,.	EII
+chrM	549	C	3	.,.	III
+chrM	550	A	3	.,.	III
+chrM	551	A	3	.,.	6II
+chrM	552	A	3	.,.	GII
+chrM	553	G	3	.$,.	?II
+chrM	554	C	2	,.	I<
+chrM	555	T	2	,$.	II
+chrM	556	A	2	.^:.	II
+chrM	557	T	2	..	II
+chrM	558	T	2	..	II
+chrM	559	C	2	..	II
+chrM	560	G	2	..	II
+chrM	561	C	2	..	II
+chrM	562	C	2	..	II
+chrM	563	A	2	..	CI
+chrM	564	G	2	..	II
+chrM	565	A	2	..	GI
+chrM	566	G	2	..	II
+chrM	567	T	2	..	/I
+chrM	568	A	2	..	DI
+chrM	569	C	2	..	II
+chrM	570	T	2	..	II
+chrM	571	A	2	..	II
+chrM	572	C	2	..	FI
+chrM	573	T	2	..	9I
+chrM	574	A	2	..	II
+chrM	575	G	2	..	II
+chrM	576	C	2	..	II
+chrM	577	A	3	.$.^:.	III
+chrM	578	A	2	..	II
+chrM	579	C	2	..	II
+chrM	580	A	3	..^:,	III
+chrM	581	G	3	..,	III
+chrM	582	C	3	..,	II?
+chrM	583	C	3	..a	II-
+chrM	584	T	3	..,	IIH
+chrM	585	A	3	..,	III
+chrM	586	A	3	..,	III
+chrM	587	A	4	..,^:,	IIII
+chrM	588	A	4	..,,	IIII
+chrM	589	C	4	..,,	IIII
+chrM	590	T	4	..,,	II.5
+chrM	591	C	4	.$.,,	II<I
+chrM	592	A	3	.,,	III
+chrM	593	A	3	.,,	III
+chrM	594	A	3	.,,	III
+chrM	595	G	3	.,,	III
+chrM	596	G	3	.,,	III
+chrM	597	A	3	.,,	:EI
+chrM	598	C	3	.,,	III
+chrM	599	T	4	.,,^:.	II at I
+chrM	600	T	4	.,,.	IIII
+chrM	601	G	4	.,,.	IIII
+chrM	602	G	4	.,,.	IIII
+chrM	603	C	4	.,,.	IIII
+chrM	604	G	4	.,,.	IIII
+chrM	605	G	4	.,,.	IIII
+chrM	606	T	4	.,,.	5III
+chrM	607	G	4	.,,.	IIII
+chrM	608	C	4	.,,.	*III
+chrM	609	T	5	.,,.^:,	?IIII
+chrM	610	T	5	.,,.,	IIII8
+chrM	611	T	5	.,,.,	IIIII
+chrM	612	A	5	T$,,.,	3IIII
+chrM	613	C	4	,,.,	III)
+chrM	614	A	4	,,.,	IIII
+chrM	615	T	4	,$,.,	IIII
+chrM	616	C	3	,.,	III
+chrM	617	C	3	,.,	III
+chrM	618	C	3	,.,	III
+chrM	619	T	3	,.,	IF9
+chrM	620	C	3	,.,	II7
+chrM	621	T	3	,.,	III
+chrM	622	A	3	,$.,	I7I
+chrM	623	G	2	.,	II
+chrM	624	A	2	.,	9I
+chrM	625	G	2	.,	II
+chrM	626	G	2	.,	II
+chrM	627	A	2	.,	2I
+chrM	628	G	2	.,	II
+chrM	629	C	2	.,	II
+chrM	630	C	2	.,	II
+chrM	631	T	2	.,	II
+chrM	632	G	2	.,	II
+chrM	633	T	2	.,	II
+chrM	634	T	2	.$,	II
+chrM	635	C	1	,	I
+chrM	636	C	1	,	I
+chrM	637	A	1	,	I
+chrM	638	T	1	,	I
+chrM	639	A	1	,	I
+chrM	640	A	1	,	I
+chrM	641	T	1	,	I
+chrM	642	C	1	,	I
+chrM	643	G	1	,	I
+chrM	644	A	1	,$	I
+chrM	646	A	1	^:.	I
+chrM	647	A	1	.	I
+chrM	648	A	1	.	I
+chrM	649	C	2	.^:,	II
+chrM	650	C	2	.,	II
+chrM	651	C	2	.,	II
+chrM	652	C	3	.,^:,	II)
+chrM	653	G	3	.,,	II2
+chrM	654	A	3	.,,	III
+chrM	655	T	4	.,,^:.	II*I
+chrM	656	A	4	.,,.	IIII
+chrM	657	A	4	.,,.	IIII
+chrM	658	A	4	.,,.	IIII
+chrM	659	C	4	.,,.	IIII
+chrM	660	C	4	.,,.	IIII
+chrM	661	C	4	.,,.	IIII
+chrM	662	C	4	.,,.	IIFI
+chrM	663	A	4	.,,.	=IDI
+chrM	664	C	4	.,,.	II6I
+chrM	665	C	5	.,,.^:,	IIIII
+chrM	666	A	5	.,,.,	BIIII
+chrM	667	T	5	.,,.,	II5I+
+chrM	668	C	5	.,,.,	IIIII
+chrM	669	C	5	.,,.,	IIIII
+chrM	670	C	5	.,a.,	II.II
+chrM	671	T	5	.,,.,	IIIIA
+chrM	672	T	6	.,,.,^:.	FI9I.I
+chrM	673	G	6	.,,.,.	IIIIII
+chrM	674	C	6	.,,.,.	IIIIII
+chrM	675	T	6	.,,.,.	IIIIII
+chrM	676	A	6	.,,.,.	,IIEII
+chrM	677	A	7	.,,.,.^:.	3II;III
+chrM	678	T	7	.,,.,..	IIIIDII
+chrM	679	T	7	.,,.,..	CIIIIII
+chrM	680	C	7	.,,.,..	IIIIIII
+chrM	681	A	7	.$,,.,..	9IIIIII
+chrM	682	G	6	,,.,..	II$III
+chrM	683	C	6	,,.,..	IIIIII
+chrM	684	C	6	,$,.,..	IIIIII
+chrM	685	T	5	,.,..	IIIII
+chrM	686	A	5	,.,..	IIIII
+chrM	687	T	5	,$.,..	IIIII
+chrM	688	A	4	.,..	IIII
+chrM	689	T	4	.,..	IIII
+chrM	690	A	4	.$,..	IIII
+chrM	691	C	3	,..	III
+chrM	692	C	3	,..	III
+chrM	693	G	4	,..^:,	IIII
+chrM	694	C	4	,..,	IIII
+chrM	695	C	4	,..,	IIII
+chrM	696	A	4	,..,	IIII
+chrM	697	T	4	,..,	IIII
+chrM	698	C	4	,..,	IIII
+chrM	699	T	4	,..,	IIII
+chrM	700	T	4	,$..,	IIII
+chrM	701	C	3	..,	III
+chrM	702	A	3	..,	III
+chrM	703	G	3	..,	2II
+chrM	704	C	3	..,	III
+chrM	705	A	3	N.,	"II
+chrM	706	A	3	..,	I5I
+chrM	707	A	3	.$.,	GII
+chrM	708	C	2	.,	II
+chrM	709	C	2	.,	II
+chrM	710	C	2	.,	II
+chrM	711	T	2	.,	II
+chrM	712	A	2	.$,	II
+chrM	713	A	1	,	I
+chrM	714	A	1	,	I
+chrM	715	C	1	,	I
+chrM	716	A	1	,	I
+chrM	717	A	1	,	I
+chrM	718	G	1	,	I
+chrM	719	G	2	,^:,	I&
+chrM	720	T	2	,g	I$
+chrM	721	A	2	,,	I3
+chrM	722	C	2	,,	I$
+chrM	723	C	2	,t	I&
+chrM	724	G	2	,,	I)
+chrM	725	A	2	,,	II
+chrM	726	A	3	,,^:.	III
+chrM	727	G	3	,,.	III
+chrM	728	T	3	,$,.	III
+chrM	729	A	2	,.	II
+chrM	730	A	2	n.	"I
+chrM	731	G	2	,.	II
+chrM	732	C	3	a.^:.	$II
+chrM	733	A	3	,..	III
+chrM	734	C	3	,..	%II
+chrM	735	A	3	,..	III
+chrM	736	A	3	,..	*II
+chrM	737	A	3	,..	III
+chrM	738	T	3	,..	III
+chrM	739	A	3	,..	III
+chrM	740	T	3	,..	III
+chrM	741	C	3	,..	III
+chrM	742	C	3	,..	III
+chrM	743	A	3	,..	III
+chrM	744	A	3	,..	III
+chrM	745	C	3	,..	III
+chrM	746	A	3	,..	IIE
+chrM	747	T	3	,..	III
+chrM	748	A	3	,..	III
+chrM	749	A	3	,..	III
+chrM	750	A	3	,..	III
+chrM	751	A	3	,..	IAI
+chrM	752	A	3	,..	III
+chrM	753	C	3	,..	III
+chrM	754	G	3	,$..	III
+chrM	755	T	2	..	II
+chrM	756	T	3	..^:,	III
+chrM	757	A	3	..,	III
+chrM	758	G	3	..,	III
+chrM	759	G	3	..,	III
+chrM	760	T	3	..,	=II
+chrM	761	C	3	.$.,	IID
+chrM	762	A	2	.,	=I
+chrM	763	A	2	.,	EI
+chrM	764	G	2	.,	II
+chrM	765	G	2	.,	II
+chrM	766	T	2	.,	:I
+chrM	767	G	3	.$,^:,	III
+chrM	768	T	2	,,	I>
+chrM	769	A	3	,,^:,	III
+chrM	770	G	3	,,,	III
+chrM	771	C	3	,,,	IHI
+chrM	772	C	3	,a,	I)/
+chrM	773	C	3	,,,	I7I
+chrM	774	A	4	,,,^:.	II:I
+chrM	775	T	4	,,,.	IH.I
+chrM	776	G	4	,,,.	IIII
+chrM	777	G	4	,,,.	IIII
+chrM	778	G	4	,,,.	IIII
+chrM	779	A	4	,,,.	IIAI
+chrM	780	T	4	,,,.	IIGI
+chrM	781	G	4	,,,.	IIII
+chrM	782	G	4	,,,.	IIII
+chrM	783	A	4	,,,.	IIII
+chrM	784	G	4	,,,.	IIII
+chrM	785	A	4	,,,.	IIII
+chrM	786	G	4	,,,.	IIII
+chrM	787	A	4	,,,.	IIII
+chrM	788	A	4	,,,.	IIII
+chrM	789	A	4	,,,.	IIII
+chrM	790	T	5	,,,.^:.	IIIII
+chrM	791	G	5	,$,,..	IIIII
+chrM	792	G	4	,,..	IIII
+chrM	793	G	4	,,..	IIII
+chrM	794	C	4	,,..	IIII
+chrM	795	T	4	,,..	IIII
+chrM	796	A	4	,,..	IIII
+chrM	797	C	4	,,..	IIII
+chrM	798	A	4	,,..	IIII
+chrM	799	T	4	,,..	IIII
+chrM	800	T	4	,,..	IIII
+chrM	801	T	4	,,..	IIII
+chrM	802	T	4	,$,..	IIII
+chrM	803	C	3	,..	III
+chrM	804	T	3	,$..	III
+chrM	805	A	2	..	II
+chrM	806	C	3	..^:.	III
+chrM	807	C	3	...	III
+chrM	808	C	3	...	III
+chrM	809	T	3	.$..	III
+chrM	810	A	3	..^:,	III
+chrM	811	A	3	..,	III
+chrM	812	G	3	..,	II7
+chrM	813	A	3	..,	III
+chrM	814	A	3	..,	III
+chrM	815	C	3	..,	III
+chrM	816	A	3	..,	III
+chrM	817	A	3	..,	III
+chrM	818	G	3	..,	III
+chrM	819	A	3	..,	III
+chrM	820	A	3	..,	&II
+chrM	821	C	3	..,	III
+chrM	822	T	3	..,	III
+chrM	823	T	3	..n	II"
+chrM	824	T	3	..,	III
+chrM	825	A	3	.$.,	III
+chrM	826	A	3	.,^:.	III
+chrM	827	C	3	.,.	III
+chrM	828	C	3	.,.	III
+chrM	829	C	4	.,.^:,	IIII
+chrM	830	G	4	.,.,	IIII
+chrM	831	G	4	.,.,	IIII
+chrM	832	A	4	.,.,	IIII
+chrM	833	C	4	.,.,	IIII
+chrM	834	G	4	.,.,	IIII
+chrM	835	A	4	.,.,	IIII
+chrM	836	A	4	.,.,	8III
+chrM	837	A	4	.,.,	IIII
+chrM	838	G	4	.,.,	IIII
+chrM	839	T	5	.,.,^:,	4:IIG
+chrM	840	C	5	.,.,,	IIIII
+chrM	841	T	5	.$,.,,	IIIII
+chrM	842	C	4	,.,,	IIII
+chrM	843	C	4	,.,,	IIII
+chrM	844	A	4	,.,,	IIII
+chrM	845	T	4	,$.,,	IIII
+chrM	846	G	3	.,,	@II
+chrM	847	A	3	.,,	III
+chrM	848	A	3	.,,	III
+chrM	849	A	3	.,,	III
+chrM	850	C	3	.,,	III
+chrM	851	T	3	.,,	III
+chrM	852	G	3	.,,	III
+chrM	853	G	3	.,,	III
+chrM	854	A	3	.,,	EII
+chrM	855	G	3	.,,	DII
+chrM	856	A	3	.,,	III
+chrM	857	C	3	.,,	III
+chrM	858	T	4	.,,^:,	IIIA
+chrM	859	A	4	.,,,	@III
+chrM	860	A	4	.,,,	IIII
+chrM	861	A	4	.$,,,	EIII
+chrM	862	G	3	,,,	III
+chrM	863	G	3	,,,	III
+chrM	864	A	3	,$,,	III
+chrM	865	G	2	,,	II
+chrM	866	G	2	,,	II
+chrM	867	A	2	,,	II
+chrM	868	T	2	,,	II
+chrM	869	T	2	,,	II
+chrM	870	T	2	,,	II
+chrM	871	A	2	,,	II
+chrM	872	G	2	,,	II
+chrM	873	C	2	,,	II
+chrM	874	A	2	,$,	II
+chrM	875	G	1	,	I
+chrM	876	T	1	,	I
+chrM	877	A	1	,	I
+chrM	878	A	1	,	I
+chrM	879	A	1	,	I
+chrM	880	T	1	,	I
+chrM	881	T	1	,	I
+chrM	882	A	1	,	I
+chrM	883	A	1	,	I
+chrM	884	G	1	,	I
+chrM	885	A	1	,	I
+chrM	886	A	1	,	I
+chrM	887	T	1	,	(
+chrM	888	A	1	,	I
+chrM	889	G	1	,	I
+chrM	890	A	1	,	I
+chrM	891	G	1	,	I
+chrM	892	A	1	,	I
+chrM	893	G	1	,$	I
+chrM	898	A	1	^:,	I
+chrM	899	T	2	,^:.	/I
+chrM	900	T	2	,.	7I
+chrM	901	G	2	,.	CI
+chrM	902	A	2	,.	II
+chrM	903	A	2	,.	II
+chrM	904	T	2	,.	II
+chrM	905	C	3	,.^:,	III
+chrM	906	A	3	,.,	III
+chrM	907	G	3	,.,	III
+chrM	908	G	3	,.,	III
+chrM	909	C	3	,.,	III
+chrM	910	C	3	,.,	III
+chrM	911	A	4	,.,^:,	IIII
+chrM	912	T	4	,.,,	IIEG
+chrM	913	G	4	,.,,	III:
+chrM	914	A	4	,.,,	IIII
+chrM	915	A	4	,.,,	IIII
+chrM	916	G	4	,.,,	III5
+chrM	917	C	4	,.,,	III5
+chrM	918	G	4	,.,,	IIII
+chrM	919	C	4	,.,,	III<
+chrM	920	G	4	,.,,	IIII
+chrM	921	C	4	,.,,	IIII
+chrM	922	A	4	,.,,	IIII
+chrM	923	C	4	,.,,	III8
+chrM	924	A	4	,.,,	IFII
+chrM	925	C	4	,.,,	IIII
+chrM	926	A	4	,.,,	IIII
+chrM	927	C	4	,.,,	IIII
+chrM	928	C	5	,.,,^:,	IIII:
+chrM	929	G	5	,.,,,	IIIE:
+chrM	930	C	5	,.,,,	IIIII
+chrM	931	C	5	,.,,,	IIIIF
+chrM	932	C	5	,.,,,	IIIIC
+chrM	933	G	5	,$.,,,	I?II:
+chrM	934	T	4	.$,,,	4II>
+chrM	935	C	3	,,,	III
+chrM	936	A	3	,,,	III
+chrM	937	C	3	,,,	II1
+chrM	938	C	3	,,,	III
+chrM	939	C	3	,,,	III
+chrM	940	T	3	,$,,	III
+chrM	941	C	2	,,	II
+chrM	942	C	2	,,	I'
+chrM	943	T	2	,,	II
+chrM	944	T	2	,,	II
+chrM	945	A	2	,,	II
+chrM	946	A	2	,$,	II
+chrM	947	A	1	,	I
+chrM	948	T	1	,	I
+chrM	949	A	1	,	I
+chrM	950	T	1	,	I
+chrM	951	C	1	,	I
+chrM	952	A	1	,	I
+chrM	953	C	1	,	I
+chrM	954	A	1	,	I
+chrM	955	A	2	,^:.	II
+chrM	956	A	2	,.	II
+chrM	957	T	2	,.	II
+chrM	958	C	3	,.^:.	III
+chrM	959	A	3	,..	III
+chrM	960	T	3	,..	III
+chrM	961	A	3	,..	III
+chrM	962	A	3	,..	III
+chrM	963	C	4	,$..^:.	IIII
+chrM	964	A	3	...	I(;
+chrM	965	T	3	...	III
+chrM	966	A	4	...^:.	IIII
+chrM	967	A	4	....	IIII
+chrM	968	C	4	....	IEII
+chrM	969	A	4	....	IIII
+chrM	970	T	4	....	IIII
+chrM	971	A	4	....	IIII
+chrM	972	A	4	....	IIII
+chrM	973	A	4	....	II0I
+chrM	974	A	5	....^:.	IIIII
+chrM	975	C	5	.....	IIIII
+chrM	976	C	5	.....	IIIII
+chrM	977	G	5	.....	IIIII
+chrM	978	T	5	.....	IIIII
+chrM	979	G	5	.....	I0III
+chrM	980	A	5	.....	IIII4
+chrM	981	C	5	.....	IIIII
+chrM	982	C	5	.....	IIIII
+chrM	983	C	5	.....	IIIII
+chrM	984	A	5	.....	-IGII
+chrM	985	A	5	.....	4GIII
+chrM	986	A	5	.....	BDGII
+chrM	987	C	5	.....	IDIII
+chrM	988	A	5	.....	@<III
+chrM	989	T	5	.....	IIIII
+chrM	990	A	5	.$....	I at III
+chrM	991	T	4	....	IICI
+chrM	992	G	4	T...	)III
+chrM	993	A	4	.$...	?ICI
+chrM	994	A	3	...	III
+chrM	995	A	3	...	III
+chrM	996	G	3	...	III
+chrM	997	G	3	...	III
+chrM	998	A	3	.$..	4II
+chrM	999	G	3	..^:.	III
+chrM	1000	A	3	...	III
+chrM	1001	C	3	.$..	III
+chrM	1002	A	2	..	II
+chrM	1003	A	2	..	II
+chrM	1004	G	2	..	II
+chrM	1005	T	2	..	II
+chrM	1006	C	2	..	II
+chrM	1007	G	2	..	CI
+chrM	1008	T	2	..	II
+chrM	1009	A	2	.$.	II
+chrM	1010	A	1	.	I
+chrM	1011	C	1	.	I
+chrM	1012	A	1	.	I
+chrM	1013	A	1	.	I
+chrM	1014	G	1	.	I
+chrM	1015	G	1	.	I
+chrM	1016	T	1	.	I
+chrM	1017	A	1	.	I
+chrM	1018	A	1	.	I
+chrM	1019	G	1	.	I
+chrM	1020	T	1	.	I
+chrM	1021	A	1	.	I
+chrM	1022	T	1	.	I
+chrM	1023	A	1	.	I
+chrM	1024	C	1	.	I
+chrM	1025	C	1	.	I
+chrM	1026	G	1	.	I
+chrM	1027	G	1	.	I
+chrM	1028	A	1	.	I
+chrM	1029	A	1	.	I
+chrM	1030	G	1	.	I
+chrM	1031	G	2	.^:.	II
+chrM	1032	T	2	..	EI
+chrM	1033	G	2	..	II
+chrM	1034	T	2	.$.	II
+chrM	1035	A	2	.^:.	II
+chrM	1036	C	2	..	II
+chrM	1037	T	2	..	II
+chrM	1038	T	2	..	II
+chrM	1039	G	2	..	II
+chrM	1040	G	2	..	II
+chrM	1041	A	2	..	II
+chrM	1042	T	2	..	II
+chrM	1043	A	3	..^:,	III
+chrM	1044	A	3	..,	III
+chrM	1045	C	3	..,	II7
+chrM	1046	C	3	..,	II-
+chrM	1047	A	3	..,	III
+chrM	1048	A	3	..,	III
+chrM	1049	A	3	..,	III
+chrM	1050	G	3	..,	III
+chrM	1051	T	3	..,	III
+chrM	1052	G	3	..,	III
+chrM	1053	T	3	..,	GII
+chrM	1054	A	3	..,	III
+chrM	1055	G	3	..,	:II
+chrM	1056	C	3	..a	HI%
+chrM	1057	T	3	..,	III
+chrM	1058	T	3	..,	III
+chrM	1059	A	3	..,	BII
+chrM	1060	A	3	..,	GII
+chrM	1061	A	3	..,	HII
+chrM	1062	C	3	..,	III
+chrM	1063	A	3	..,	DII
+chrM	1064	A	3	..,	BII
+chrM	1065	A	3	..,	1II
+chrM	1066	G	3	.$.,	&II
+chrM	1067	C	2	.,	II
+chrM	1068	A	2	.,	II
+chrM	1069	T	2	.,	II
+chrM	1070	C	2	.$,	II
+chrM	1071	C	1	,	I
+chrM	1072	A	1	,	I
+chrM	1073	G	1	,	I
+chrM	1074	C	1	,	I
+chrM	1075	T	1	,	I
+chrM	1076	T	1	,	I
+chrM	1077	A	1	,	I
+chrM	1078	C	1	,$	I
+chrM	1090	T	1	^:,	I
+chrM	1091	T	1	,	I
+chrM	1092	C	1	n	"
+chrM	1093	A	1	,	I
+chrM	1094	C	1	,	I
+chrM	1095	T	1	,	7
+chrM	1096	C	1	a	B
+chrM	1097	A	1	,	I
+chrM	1098	A	1	,	I
+chrM	1099	A	1	,	I
+chrM	1100	A	1	,	I
+chrM	1101	T	1	,	I
+chrM	1102	G	2	,^:,	(I
+chrM	1103	A	2	,,	II
+chrM	1104	A	2	,,	I+
+chrM	1105	C	2	,,	.I
+chrM	1106	A	2	,,	8I
diff --git a/test-data/1.sam b/test-data/1.sam
new file mode 100644
index 0000000..5538353
--- /dev/null
+++ b/test-data/1.sam
@@ -0,0 +1,29 @@
+ at QNAME	FLAG	RNAME	POS	MAPQ	CIGAR	MRNM	MPOS	ISIZE	SEQ	QUAL	OPT
+1378_11_329	69	*	0	0	*	*	0	0	AGACCGGGCGGGGTGGCGTTCGGT	%##+'#######%###$#$##$(#
+1378_11_329	133	*	0	0	*	*	0	0	GTTCGTGGCCGGTGGGTGTTTGGG	###$$#$#$&#####$'$#$###$
+1378_17_1788	69	*	0	0	*	*	0	0	TGCCGTGTCTTGCTAACGCCGATT	#'#$$#$###%%##$$$$######
+1378_17_1788	133	*	0	0	*	*	0	0	TGGGTGGATGTGTTGTCGTTCATG	#$#$###$#$#######$#$####
+1378_25_2035	69	*	0	0	*	*	0	0	CTGCGTGTTGGTGTCTACTGGGGT	#%#'##$#$##&%#%$$$%#%#'#
+1378_25_2035	133	*	0	0	*	*	0	0	GTGCGTCGGGGAGGGTGCTGTCGG	######%#$%#$$###($###&&%
+1378_28_770	89	chr11.nib:1-134452384	72131356	37	17M1I5M	=	72131356	0	CACACTGTGACAGACAGCGCAGC	00/02!!0//1200210!!44/1	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_28_770	181	chr11.nib:1-134452384	72131356	0	24M	=	72131356	0	TTGGTGCGCGCGGTTGAGGGTTGG	$$(#%%#$%#%####$%%##$###
+1378_33_1945	113	chr2.nib:1-242951149	181247988	0	23M	chr12.nib:1-132349534	41710908	0	GAGAGAGAGAGAGAGAGAGAGAG	PQRVUMNXYRPUXYXWXSOSZ]M	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:163148	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_33_1945	177	chr12.nib:1-132349534	41710908	0	23M	chr2.nib:1-242951149	181247988	0	AGAGAGAGAGAGAGAGAGAGAGA	SQQWZYURVYWX]]YXTSY]]ZM	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:163148	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_34_789	69	*	0	0	*	*	0	0	ATGGTGGCTGACGCGTTTGACTGT	#$##%#$##$&$#%##$##$###$
+1378_34_789	133	*	0	0	*	*	0	0	GGGCTTGCGTTAGTGAGAGGTTGT	###%$%$%%###$####$###$#&
+1378_35_263	115	chr16.nib:1-88827254	19671878	0	23M	=	19671877	-1	AGAGAGAGAGAGAGAGAGAGTCT	77543:<55#"4!&=964518A>	XT:A:R	CM:i:2	SM:i:0	AM:i:0	X0:i:4	X1:i:137	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_35_263	179	chr16.nib:1-88827254	19671877	0	23M	=	19671878	1	GAGAGAGAGAGAGAGAGAGAGTC	LE7402DD34FL:27AKE>;432	XT:A:R	CM:i:0	SM:i:0	AM:i:0	X0:i:265	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
+1378_43_186	69	*	0	0	*	*	0	0	ATACTAGTTGGGACGCGTTGTGCT	#$(4%$########$#$###$$$#
+1378_43_186	133	*	0	0	*	*	0	0	GCTAGGGTTTGGGTTTGCGGTGGG	$%#$########%##%#$###'#'
+1378_51_1671	117	chr2.nib:1-242951149	190342418	0	24M	=	190342418	0	CTGGCGTTCTCGGCGTGGATGGGT	#####$$##$#%#%%###%$#$##
+1378_51_1671	153	chr2.nib:1-242951149	190342418	37	16M1I6M	=	190342418	0	TCTAACTTAGCCTCATAATAGCT	/<<!"0///////00/!!0121/	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_56_324	117	chr2.nib:1-242951149	80324999	0	24M	=	80324999	0	TCCAGTCGCGTTGTTAGGTTCGGA	#$#$$$#####%##%%###**#+/
+1378_56_324	153	chr2.nib:1-242951149	80324999	37	8M1I14M	=	80324999	0	TTTAGCCCGAAATGCCTAGAGCA	4;6//11!"11100110////00	XT:A:U	CM:i:2	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:1	XO:i:1	XG:i:1	MD:Z:22
+1378_56_773	69	*	0	0	*	*	0	0	TGTCGTGAGGTCACTTATCCCCAT	&%#%##%%#####&#$%##$%##$
+1378_56_773	133	*	0	0	*	*	0	0	TCTGGTCGGTTTCGGGGAGTGGAA	##%%#&$###$#$##%$####%%$
+1378_62_2027	69	*	0	0	*	*	0	0	CTTCCACGATCTGCTCGCTGTGGT	(#&&$##$$#$%#%$$$#$###'#
+1378_62_2027	133	*	0	0	*	*	0	0	GTTGGCCTGGCCTGCCGTGCTGCG	*##),/%##$)#%##1$#'%.#&#
+1378_62_2029	69	*	0	0	*	*	0	0	TCTGGGCTGTCTTCGGGTCGGTGT	$%$$####$##$$#)##%%#$###
+1378_62_2029	133	*	0	0	*	*	0	0	GGCGGTGTGTGGTGCGGCTGTGCG	/$$$=(####%####)$$%$-&%#
+1378_67_1795	81	chr16.nib:1-88827254	26739130	0	23M	chrY.nib:1-57772954	57401793	0	TGGCATTCCTGTAGGCAGAGAGG	AZWWZS]!"QNXZ]VQ]]]/2]]	XT:A:R	CM:i:2	SM:i:0	AM:i:0	X0:i:3	X1:i:0	XM:i:2	XO:i:0	XG:i:0	MD:Z:23
+1378_67_1795	161	chrY.nib:1-57772954	57401793	37	23M	chr16.nib:1-88827254	26739130	0	GATCACCCAGGTGATGTAACTCC	]WV]]]]WW]]]]]]]]]]PU]]	XT:A:U	CM:i:0	SM:i:37	AM:i:0	X0:i:1	X1:i:0	XM:i:0	XO:i:0	XG:i:0	MD:Z:23
\ No newline at end of file
diff --git a/test-data/1.scf b/test-data/1.scf
new file mode 100755
index 0000000..b1ce43a
Binary files /dev/null and b/test-data/1.scf differ
diff --git a/test-data/1.sff b/test-data/1.sff
new file mode 100644
index 0000000..978d88d
Binary files /dev/null and b/test-data/1.sff differ
diff --git a/test-data/1.tabular b/test-data/1.tabular
new file mode 100644
index 0000000..71b7e41
--- /dev/null
+++ b/test-data/1.tabular
@@ -0,0 +1,6 @@
+chr22	1000	NM_17
+chr22	2000	NM_18
+chr10	2200	NM_10
+chr10	hap	test
+chr10	1200	NM_11
+chr22	1600	NM_19
\ No newline at end of file
diff --git a/test-data/1.txt b/test-data/1.txt
new file mode 100644
index 0000000..9e8397b
--- /dev/null
+++ b/test-data/1.txt
@@ -0,0 +1,10 @@
+chr1    4225    19670
+chr10   6       8
+chr1    24417   24420
+chr6_hla_hap2   0       150
+chr2    1       5
+chr10   2       10
+chr1    30      55
+chrY    1       20
+chr1    1225979 42287290
+chr10   7       8
diff --git a/test-data/1.wig b/test-data/1.wig
new file mode 100644
index 0000000..95e3253
--- /dev/null
+++ b/test-data/1.wig
@@ -0,0 +1,3 @@
+track type=wiggle_0 name="tb_knownGene" description="table browser query on knownGene" visibility=3 url=
+chr7	127475281	127491632	NM_000230	0	+	127486022	127488767	0	3	29,172,3225,	0,10713,13126,
+chr7	127486011	127488900	D49487	0	+	127486022	127488767	0	2	155,490,	0,2399,
diff --git a/test-data/2.bed b/test-data/2.bed
new file mode 100644
index 0000000..4bc0578
--- /dev/null
+++ b/test-data/2.bed
@@ -0,0 +1,68 @@
+chr1	147962192	147962580	NM_005997_cds_0_0_chr1_147962193_r	0	-
+chr1	147984545	147984630	BC007833_cds_0_0_chr1_147984546_f	0	+
+chr1	148078400	148078582	AJ011123_cds_0_0_chr1_148078401_r	0	-
+chr1	148185136	148185276	NM_002796_cds_0_0_chr1_148185137_f	0	+
+chr10	55251623	55253124	AY029205_cds_0_0_chr10_55251624_r	0	-
+chr11	116124407	116124501	AK057832_cds_0_0_chr11_116124408_r	0	-
+chr11	116206508	116206563	NM_000040_cds_1_0_chr11_116206509_f	0	+
+chr11	116211733	116212337	BC005380_cds_0_0_chr11_116211734_r	0	-
+chr11	130745911	130745993	AY358331_cds_0_0_chr11_130745912_f	0	+
+chr12	38440094	38440321	NM_052885_cds_0_0_chr12_38440095_r	0	-
+chr12	38905200	38905351	AY792511_cds_0_0_chr12_38905201_f	0	+
+chr13	112381694	112381953	NM_207440_cds_1_0_chr13_112381695_f	0	+
+chr13	29680676	29680875	NM_032116_cds_0_0_chr13_29680677_r	0	-
+chr14	98521864	98521922	U88895_cds_0_0_chr14_98521865_f	0	+
+chr14	98710240	98712285	NM_022898_cds_0_0_chr14_98710241_r	0	-
+chr15	41486872	41487060	BX537418_cds_0_0_chr15_41486873_r	0	-
+chr15	41673708	41673857	AK223365_cds_0_0_chr15_41673709_f	0	+
+chr15	41679161	41679250	NM_153700_cds_0_0_chr15_41679162_r	0	-
+chr15	41773540	41773689	AK223365_cds_0_0_chr15_41773541_f	0	+
+chr16	142908	143003	NM_005332_cds_0_0_chr16_142909_f	0	+
+chr16	179197	179339	BC065198_cds_0_0_chr16_179198_r	0	-
+chr16	244413	244681	AK057165_cds_2_0_chr16_244414_f	0	+
+chr16	259268	259383	AB016929_cds_0_0_chr16_259269_r	0	-
+chr18	23786114	23786321	NM_001792_cds_0_0_chr18_23786115_r	0	-
+chr18	59406881	59407046	NM_012397_cds_1_0_chr18_59406882_f	0	+
+chr18	59455932	59456337	AB046400_cds_0_0_chr18_59455933_r	0	-
+chr18	59528407	59528575	AY792326_cds_0_0_chr18_59528408_f	0	+
+chr19	59068595	59069564	BC013995_cds_1_0_chr19_59068596_f	0	+
+chr19	59236026	59236146	NM_198481_cds_0_0_chr19_59236027_r	0	-
+chr19	59297998	59298008	NM_004542_cds_0_0_chr19_59297999_f	0	+
+chr19	59318205	59318718	AK128544_cds_3_0_chr19_59318206_r	0	-
+chr2	118288583	118288668	NM_006773_cds_0_0_chr2_118288584_f	0	+
+chr2	118390395	118390500	BC005078_cds_0_0_chr2_118390396_r	0	-
+chr2	220108689	220109267	AY125465_cds_0_0_chr2_220108690_f	0	+
+chr2	220229609	220230869	NM_024536_cds_0_0_chr2_220229610_r	0	-
+chr20	33330413	33330423	NM_181466_cds_0_0_chr20_33330414_r	0	-
+chr20	33485370	33486123	BC085019_cds_1_0_chr20_33485371_f	0	+
+chr20	33488491	33489122	NM_000557_cds_1_0_chr20_33488492_r	0	-
+chr20	33513606	33513792	AF022655_cds_1_0_chr20_33513607_f	0	+
+chr21	32687402	32687588	NM_032910_cds_0_0_chr21_32687403_f	0	+
+chr21	32869641	32870022	NM_018277_cds_3_0_chr21_32869642_r	0	-
+chr21	33321040	33322012	NM_005806_cds_1_0_chr21_33321041_f	0	+
+chr21	33728358	33728724	AK129657_cds_0_0_chr21_33728359_r	0	-
+chr22	30120223	30120265	NM_004147_cds_0_0_chr22_30120224_f	0	+
+chr22	30160419	30160661	BC032941_cds_0_0_chr22_30160420_r	0	-
+chr22	30228824	30228916	NM_001007467_cds_1_0_chr22_30228825_f	0	+
+chr22	30340151	30340376	CR456540_cds_0_0_chr22_30340152_r	0	-
+chr5	131311206	131311254	AF099740_cds_11_0_chr5_131311207_r	0	-
+chr5	131424298	131424460	NM_000588_cds_0_0_chr5_131424299_f	0	+
+chr5	131556601	131556672	BC035813_cds_0_0_chr5_131556602_r	0	-
+chr5	131621326	131621419	BC003096_cds_0_0_chr5_131621327_f	0	+
+chr6	108299600	108299744	NM_007214_cds_0_0_chr6_108299601_r	0	-
+chr6	108594662	108594687	NM_003269_cds_0_0_chr6_108594663_f	0	+
+chr6	108640045	108640151	NM_003795_cds_0_0_chr6_108640046_r	0	-
+chr6	108722976	108723115	NM_145315_cds_0_0_chr6_108722977_f	0	+
+chr7	113660517	113660685	AF467257_cds_1_0_chr7_113660518_f	0	+
+chr7	116512159	116512389	NM_003391_cds_0_0_chr7_116512160_r	0	-
+chr7	116714099	116714152	NM_000492_cds_0_0_chr7_116714100_f	0	+
+chr7	116945541	116945787	AF377960_cds_0_0_chr7_116945542_r	0	-
+chr8	118881131	118881317	NM_000127_cds_0_0_chr8_118881132_r	0	-
+chr9	128764156	128764189	BC051300_cds_0_0_chr9_128764157_f	0	+
+chr9	128787519	128789136	NM_014908_cds_0_0_chr9_128787520_r	0	-
+chr9	128789552	128789584	NM_015354_cds_0_0_chr9_128789553_f	0	+
+chr9	128850516	128850624	AB058751_cds_0_0_chr9_128850517_r	0	-
+chrX	122745047	122745924	NM_001167_cds_1_0_chrX_122745048_f	0	+
+chrX	152648964	152649196	NM_000425_cds_0_0_chrX_152648965_r	0	-
+chrX	152691446	152691471	AF101728_cds_0_0_chrX_152691447_f	0	+
+chrX	152694029	152694263	BC052303_cds_0_0_chrX_152694030_r	0	-
diff --git a/test-data/2.fasta b/test-data/2.fasta
new file mode 100644
index 0000000..3bfe7d3
--- /dev/null
+++ b/test-data/2.fasta
@@ -0,0 +1,11 @@
+>Sequence 561 BP; 135 A; 106 C; 98 G; 222 T; 0 other;
+gttcgatgcc taaaatacct tcttttgtcc ctacacagac cacagttttc ctaatggctt
+tacaccgact agaaattctt gtgcaagcac taattgaaag cggttggcct agagtgttac
+cggtttgtat agctgagcgc gtctcttgcc ctgatcaaag gttcattttc tctactttgg
+aagacgttgt ggaagaatac aacaagtacg agtctctccc ccctggtttg ctgattactg
+gatacagttg taataccctt cgcaacaccg cgtaactatc tatatgaatt attttccctt
+tattatatgt agtaggttcg tctttaatct tcctttagca agtcttttac tgttttcgac
+ctcaatgttc atgttcttag gttgttttgg ataatatgcg gtcagtttaa tcttcgttgt
+ttcttcttaa aatatttatt catggtttaa tttttggttt gtacttgttc aggggccagt
+tcattattta ctctgtttgt atacagcagt tcttttattt ttagtatgat tttaatttaa
+aacaattcta atggtcaaaa a
\ No newline at end of file
diff --git a/test-data/2.tabular b/test-data/2.tabular
new file mode 100644
index 0000000..c1e73a2
--- /dev/null
+++ b/test-data/2.tabular
@@ -0,0 +1,10 @@
+1	68	4.1
+2	71	4.6
+3	62	3.8
+4	75	4.4
+5	58	3.2
+6	60	3.1
+7	67	3.8
+8	68	4.1
+9	71	4.3
+10	69	3.7
diff --git a/test-data/2gen.fastq b/test-data/2gen.fastq
new file mode 100644
index 0000000..abb5f2f
--- /dev/null
+++ b/test-data/2gen.fastq
@@ -0,0 +1,8 @@
+ at seq1  
+GACAGCTTGGTTTTTAGTGAGTTGTTCCTTTCTTT  
++seq1  
+hhhhhhhhhhhhhhhhhhhhhhhhhhPW at hhhhhh  
+ at seq2  
+GCAATGACGGCAGCAATAAACTCAACAGGTGCTGG  
++seq2  
+hhhhhhhhhhhhhhYhhahhhhWhAhFhSIJGChO
\ No newline at end of file
diff --git a/test-data/3.bam b/test-data/3.bam
new file mode 100644
index 0000000..3ac0213
Binary files /dev/null and b/test-data/3.bam differ
diff --git a/test-data/3.bed b/test-data/3.bed
new file mode 100644
index 0000000..124167d
--- /dev/null
+++ b/test-data/3.bed
@@ -0,0 +1,25 @@
+chr1	147962006	147975713	NM_005997	0	-	147962192	147975670	0	6	574,145,177,115,153,160,	0,1543,7859,9048,9340,13547,
+chr1	147984101	148035079	BC007833	0	+	147984545	148033414	0	14	529,32,81,131,118,153,300,206,84,49,85,130,46,1668,	0,25695,28767,33118,33695,33998,35644,38005,39629,40577,41402,43885,48367,49310,
+chr1	148077485	148111797	NM_002651	0	-	148078400	148111728	0	12	1097,121,133,266,124,105,110,228,228,45,937,77,	0,2081,2472,6871,9907,10257,11604,14199,15637,18274,23636,34235,
+chr1	148185113	148187485	NM_002796	0	+	148185136	148187378	0	7	163,207,147,82,117,89,120,	0,416,877,1199,1674,1977,2252,
+chr2	118288484	118306183	NM_006773	0	+	118288583	118304530	0	14	184,285,144,136,101,200,115,140,162,153,114,57,178,1796,	0,2765,4970,6482,6971,7183,7468,9890,10261,10768,11590,14270,14610,15903,
+chr2	118389378	118390700	BC005078	0	-	118390395	118390500	0	1	1322,	0,
+chr2	220108603	220116964	NM_001927	0	+	220108689	220116217	0	9	664,61,96,162,126,221,44,83,789,	0,1718,1874,2118,2451,2963,5400,7286,7572,
+chr2	220229182	220233943	NM_024536	0	-	220229609	220233765	0	4	1687,180,574,492,	0,1990,2660,4269,
+chr5	131170738	131357870	AF099740	0	-	131311206	131357817	0	31	112,124,120,81,65,40,120,129,61,88,94,79,72,102,144,117,89,73,96,135,135,78,74,52,33,179,100,102,65,115,248,	0,11593,44117,47607,104668,109739,114675,126366,135488,137518,138009,140437,152389,153373,155388,159269,160793,162981,164403,165577,166119,167611,169501,178260,179675,180901,181658,182260,182953,183706,186884,
+chr5	131424245	131426795	NM_000588	0	+	131424298	131426383	0	5	215,42,90,42,535,	0,313,1658,1872,2015,
+chr5	131556201	131590458	NM_004199	0	-	131556601	131582218	0	15	471,97,69,66,54,100,71,177,194,240,138,152,97,100,170,	0,2316,2802,5596,6269,11138,11472,15098,16528,17674,21306,24587,25142,25935,34087,
+chr5	131621285	131637046	NM_003687	0	+	131621326	131635821	0	7	134,152,82,179,164,118,1430,	0,4915,8770,13221,13609,14097,14331,
+chr6	108298214	108386086	NM_007214	0	-	108299600	108385906	0	21	1530,105,99,102,159,174,60,83,148,155,93,133,95,109,51,59,62,113,115,100,304,	0,2490,6246,10831,12670,23164,23520,27331,31052,32526,34311,36130,36365,38609,41028,42398,43048,51479,54500,59097,87568,
+chr6	108593954	108616704	NM_003269	0	+	108594662	108615360	0	9	733,146,88,236,147,97,150,106,1507,	0,5400,8778,10445,12037,14265,14749,15488,21243,
+chr6	108639410	108689143	NM_152827	0	-	108640045	108688818	0	3	741,125,487,	0,2984,49246,
+chr6	108722790	108950942	NM_145315	0	+	108722976	108950321	0	13	325,224,52,102,131,100,59,83,71,101,141,114,750,	0,28931,52094,60760,61796,71339,107102,152319,181970,182297,215317,224802,227402,
+chr7	113320332	113924911	AK131266	0	+	113862563	113893433	0	20	285,91,178,90,58,75,138,51,201,178,214,105,88,84,77,102,122,70,164,1124,	0,201692,340175,448290,451999,484480,542213,543265,543478,545201,556083,558358,565876,567599,573029,573245,575738,577123,577946,603455,
+chr7	116511232	116557294	NM_003391	0	-	116512159	116556994	0	5	1157,265,278,227,383,	0,20384,37843,43339,45679,
+chr7	116713967	116902666	NM_000492	0	+	116714099	116901113	0	27	185,111,109,216,90,164,126,247,93,183,192,95,87,724,129,38,251,80,151,228,101,249,156,90,173,106,1754,	0,24290,29071,50936,54313,55285,56585,60137,62053,68678,79501,107776,110390,111971,114967,122863,123569,126711,130556,131618,134650,147559,162475,172879,184725,185496,186945,
+chr7	116944658	117107512	AF377960	0	-	116945541	116979926	0	23	1129,102,133,64,186,206,179,188,153,100,87,80,96,276,118,255,151,100,204,1654,225,108,173,	0,7364,8850,10413,13893,14398,17435,24259,24615,35177,35359,45901,47221,49781,56405,66857,69787,72208,73597,80474,100111,150555,162681,
+chr8	118880786	119193239	NM_000127	0	-	118881131	119192466	0	11	531,172,161,90,96,119,133,120,108,94,1735,	0,5355,7850,13505,19068,20309,23098,30863,36077,37741,310718,
+chr9	128763240	128783870	NM_174933	0	+	128764156	128783586	0	12	261,118,74,159,76,48,56,63,129,117,127,370,	0,522,875,5630,12374,12603,15040,15175,18961,19191,20037,20260,
+chr9	128787362	128789566	NM_014908	0	-	128787519	128789136	0	1	2204,	0,
+chr9	128789530	128848928	NM_015354	0	+	128789552	128848511	0	44	54,55,74,85,81,45,93,120,212,115,201,90,66,120,127,153,127,88,77,115,121,67,129,140,107,207,170,70,68,196,78,86,146,182,201,93,159,138,75,228,132,74,130,594,	0,1491,5075,8652,9254,10312,11104,11317,20808,21702,23060,25462,31564,32908,33566,34851,35204,35595,35776,37202,38860,39111,39891,40349,42422,45499,45827,46675,47158,47621,50453,50840,51474,51926,53831,54186,55119,55619,57449,57605,57947,58352,58541,58804,
+chr9	128849867	128870133	NM_020145	0	-	128850516	128869987	0	11	757,241,101,90,24,63,93,134,129,142,209,	0,1071,1736,2085,2635,4201,6376,6736,13056,14247,20057,
diff --git a/test-data/3.maf b/test-data/3.maf
new file mode 100644
index 0000000..5cdc400
--- /dev/null
+++ b/test-data/3.maf
@@ -0,0 +1,55 @@
+##maf version=1
+a score=60426.0
+s hg17.chr7    127471195 331 + 158628139 gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAA
+s panTro1.chr6 129885076 331 + 161576975 gtttgccatcttttgctgctcttgggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTGAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAA
+s mm5.chr6      28904571 357 + 149721531 CTCCACTCTCGTTTGCTGTT----------------CTGTCACCATGGAAACAAA-CGAGGGTGGTCCAGTTACTATCTTGACTGCAGCTGGCAGTCAGTT-GCCACT-----CAGGAATAAGGCTATGCCATT-GATCCACTGAACCGTGATCTGGAAACCTGGCTGTTGTTT-------CAAGCCTTGGGGCCAGTTTGCGGTGTTACTCATGA--CTCTAAGATCGTGTGCTTG----CTGCAGGAAGAGACAGCAAGGGGGTTACATTTAAAAAGCCCCCAGTTTAGCTATAGGCAGGCCAACAGGTGTAAAAATACTCACTAGTAATGGGCTGAACTCATGGAGGTAGCATTAGTGAGACACTGTAACTGTTTTTTTAAAAATCACTAA
+s rn3.chr4      56178191 282 + 187371129 CTTCACTCTCATTTGCTGTT----------------CTGTCACTATGGAGACAAACACAGGCTAGCCCAGTTACTATCTTGATCACAGCAGCT-GTCAGCTAGCTGCCACTCACAGGAATAAGGCCATACCATT-GATCCACTGAACCTTGATCTAGGAATTTGGC----------------------TGGGGCCAGTTTGCGGTGTCACTCATGA--CTCTAAGATTGTGTGTTTG----CTCCAGGAAGAGACGGCAAGAGGATTACCTTTAAAAGGTTC---------------------------------GGAGTCTAGCTGTAGACAGCCCA-----ATG--GGTA-------TAAC-------------------AATACTCACTAA
+
+a score=8157.0
+s hg17.chr7    127471526 58 + 158628139 AATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+s panTro1.chr6 129885407 58 + 161576975 AATTTGTGGTTTATTCGTTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+s mm5.chr6      28904928 54 + 149721531 AA----CGTTTCATTGATTGCTCATCATTTAAAAAAAGAAATTCCTCAGTGGAAGAGG
+
+a score=43613.0
+s hg17.chr7     127471584 104 + 158628139 GAGATATTTGGggaaatt---ttgtatagactagctttcacgatgttagggaattattattgtgtgataatggtcttgcagttaca-cagaaattcttccttattttt
+s panTro1.chr6  129885465 104 + 161576975 GAGACATTTGGggaaatt---ttgtatagactagctttcacgatgttagggagttattattgtgtgataatggtcttgcagttaca-cagaaattcttccttattttt
+s mm5.chr6       28909125 102 + 149721531 GAGACATTTGGGGGAAATACGATGTATA------CTTTCATGATCTTAAAGAATTGCTATTGTTTGATAGTGATATTATGGTTAAATTAAAAAAAAACCCTTACTTTT
+s rn3.chr4       56182196  99 + 187371129 GAGATATTTGGGGGAAATATGATGTGCA------CTTCCATGATCTTAAAGAATTGCTACTGTTTGATAGTGATCTTATGGTTAAA-TAAAAAAAAT--CTTAGTTGT
+s canFam1.chr14  52864846 108 -  63549963 GAGATATTTGGGGGAATTTGAATGTAGTGTTGCTCTTTTGTGATGCTAAGAAATTATAATTGTCTGATGATAGTCTCGTGGTTATGGGGGAAATGCTTCCTTATTTTT
+
+a score=63217.0
+s hg17.chr7     127471688 183 + 158628139 tgggaa---gcaccaaag----tagggat---aaaatgtcatgatgtgtgcaatacactttaaaatgtttttg-----ccaaaataatt----------------aatgaagc--aaatatggaaa-ataataattattaaatctaggtgatgggtatattgtagttcactatagtattgcacacttttctgtatgtttaaatttttcatttaaaaa
+s panTro1.chr6  129885569 183 + 161576975 tgggaa---acaccaaag----tagggat---aaaatgtcatgatgtgtgcaatacgctttaaaatatttttg-----ccaaaataatt----------------aatgaagc--aaatatggaaa-ataataattattaaatctaggtgatgggtatattgtagttcactatagtattgcacacttttctgtatgtttaaaattttcatttaaaaa
+s mm5.chr6       28909227 176 + 149721531 CATAAA---ATACTCAAATATTTAGGGGTATGACAATGTCATAGTGTCTGCAATTTGCTTTAAAATAATTTTG-------AAAAC---------------------CTGAGGCTAAACTATGGGAA-ATAGCAATTATTACATATGGGTAATAAGTA-----TAACTCACTATATTATTTTTCACTAT----TGTGTTTGAAATTTTCATTTTAAAA
+s rn3.chr4       56182295 194 + 187371129 TAGAAA---ATACTCAAATATTTAGGGGCGTGACAATGTCACAGTGTCTGCAATTTGCTTTAAAG-ATTTTTAAATATTTAAAAAAGTTTTAATAATTTTGAAAAACTGAAGCTACACTATGGGAA-GTGGTAATTGTTACATATGGGTAATAAGTA-----TAATTCGTTATATTATT-------------TTTCTTAGAATTTTTCATTTGAAAA
+s canFam1.chr14  52865149 163 -  63549963 tagagacacaaactgaagtatttaaggat---gaaatgtcatgatgtttgcaattggctttaaaatattttag-----ccaaaa-agta----------------aatgaagc--aaatatgggaagacaataatcattaaatctaggtgaTGCATA---------------------------CTTTTCCATATGTTTGAAATTTTCATTTAAAAA
+
+a score=8913.0
+s hg17.chr7     127471872 40 + 158628139 ctttgagc-----tagacaccaggctatgagctaggagcatagca
+s panTro1.chr6  129885753 40 + 161576975 ctttgagc-----tagacaccaggctatgagctaggagcatagca
+s mm5.chr6       28910662 44 + 149721531 CTTTGTGCTTAGGTAGGCACTGAGCTCTTAAATTGGA-TACAGCG
+s canFam1.chr14  52865538 40 -  63549963 CATCAtgc-----tagatcctggactatgagctgggtatatagca
+
+a score=91878.0
+s hg17.chr7     127471912 335 + 158628139 atgaccaa----------------------------------------------------------------------------------------------atagactcctaccaa--------------------------------------------------ctc-aaagaatgcacattctCTGGGAAACATGTTTCCATTAGGAAGCCTCGAATGCAATGTGACTGTGGTCTCCAGGACCTG-TGTGATCCTGGCTTTTCCTGTTCCCTCCG---CATCATCACTGCAGGTGTGTTTTCCCAAGTTTTAAACATTTA------CCTTCCCAGTGGCCTTGCGTCTAGAGGAATCCCTGTATAGTGGT-ACATGAATATAACACATAACAAA-AATCATCTCTATGGTGTGTGTTGTTCCTGGGGTTCAattcagcaaattttc [...]
+s panTro1.chr6  129885793 335 + 161576975 atgaccaa----------------------------------------------------------------------------------------------atagactcctaccaa--------------------------------------------------ctc-aaagaatgcacattctCTGGGAAACATGTTTCCATTAGGAAGCCTCGAATGCAATGTGACTGTGGTCTCCAGGACATG-TGTGATCCTGGCTTTTCCTGTTCCCTCTG---CATCATCACTGCAGGTGTATTTTCCCAAGTTTTAAACATTTA------CCTTCCCAGTGGCCTTGCGTCTAGAGGAATCCCTGTATAGTGGT-ACATGAATATAACACATAACAAA-AATCATCTCTATGGTGTGTGTTGTTCCTGGGGTTCAattcagcaaattttt [...]
+s mm5.chr6       28910706 350 + 149721531 ATGAGCAA----------------------------------------------------------------------------------------------AAAAACAACAACAAAAAACAAACAAACAAAAAAACCAAAAACCAAAAACCAAAAAAACCTATAGCCTC-ACAGGGTGGGTTGTCTTTGAGGAACATGCATCCGCTAGAAAGTCCCAAGTACACTATGACAGTTG--CCCAGGCCCCGCCTTAAACCTGGTTTTCCTGGTTTCTTTCA---CATCATTACCACGAATATATTTCCTCAAGTTTAAAGAAAGTACCCCCTCCTTTCCAGT-GCCTCAAATCTAGAAGAATATTCATAGTGAAGT-GCA------------------------CAGCCGGGTGGTGCATGGTAAT-CTGGAAGTCACCTCTGCAAATCTTT [...]
+s rn3.chr4       56183448 420 + 187371129 ATGACCAATATACACTGTTTACATGTATAGCATTGTGAATGGAGACATAAAAAGATAATCTAGCTTTGTGCTAGGTAGGTGCTGAGCTCTTAACAGTGCTGGGCAGAAACCTATAAC--------------------------------------------------CTC-ACAGGGTGGGTTGTCTTTGAGGAGCGTGCTAACCCTAGGAAGTCTCAAATACAATGTGATGGTTGCCCCCAGGCACCACCTTGAACCTGGTCTTCCTGGTTTCTTTCA---CACCATTACCACAAATACATTTTCTCAGGTTTAAAAGAAGTC-CCACTCCTTTCCAGT-GCCCTAGATCTAGAAGCACATTCATAATGATGT-ACACTAACCCGA---------------CAGCTGTGTGGTATATGGTATC-CCGGAAGTCACCTCAGCAAACCTTT [...]
+s canFam1.chr14  52865578 336 -  63549963 gtgaacaa---------------------------------------------------------------------------------------------aacagagccctgcagt--------------------------------------------------cttgatggagcacacaACCTTTGGGGAACATGTTTCCATAAGAAAGTCTCCAATGTGATCTGATGGT-GCCGCCAGGACCTA-TGTCAGCCTACCGTTCCATGTCCCCTCCACACCATCATCACTGCAGGTGTGTTTTCCCACATTTTAAATATCTG-------CTTCCCGGTGGCCTTGAGTCTAGAGGAGTCCCCCCACTATGGTGGCACTAATACTGAAGGTCAGAAATAATCAGTTCTGTGGTGCATGTTGCCCCTGAGGTTCTGTTCGGGAAACTTCT [...]
+
+a score=82819.0
+s hg17.chr7     127472258 423 + 158628139 gatggccca-atccctgtcctct---taaaacctaagggaggagaTGGAAAGGGG-CACCCAACCCAGACTGAGAGACAGGAATTAGCTGCAAGGGGAACTAGGAAAAGCTTCTTTA---AGGATGGAGAGGCCCTAGTGGAAT-GGGGAGATTCTTCCGGGAGAAGCGATGGATGCACAGTTGGGCATCCCCACAGACGGACTGGAAAGAAAAAAGGCCTGGAGGAATCAATGTG-------CAATGTATGTGTGTTCCCTGGTTcaagggctgg-gaactttctcta-aagggccaggtagaaaacattttaggctttctaagccaag--gcaaaat-tgaggatattacatgggtacttatacaacaagaataaacaatt---tacacaattttttgttgacagaattcaaaactttat----agacacagaaatgcaaatttcctgt
+s panTro1.chr6  129886139 423 + 161576975 gatggccca-atccctgtcctct---taaaacctaagggaggagaTGGAAAGGGG-CACCCAACCCAGACTGAGAGACAGGAATTAGCTGCAAGGGGAACTAGGAAAAGCTTCTTTA---AGGATGGAGAGACCCTAGTGGAAT-GGGGAGATTCTTCCGGGAGAAGCGATGGATGCGCAGTTGGGCATCCCCACAGACGGACTGGAAAGAAAAAAGGCCTGGAGGAATCAATGTG-------CAATGTATGTGTGTTCCCTGGTTcaagggctgg-gaactttctcta-aagggccaggtagaaaacattttaggctttctaagccaag--gcaaaat-tgaggatattacatgggtacttatacaacaagaataaacaatt---tacacaattttttgttgacagaattcaaaactttat----agacacagaaatgtaaatttcctgt
+s mm5.chr6       28911067 334 + 149721531 AATGGCAGAGGGCTCTGTTCTCT----------------------------------GTCCA---TAGACAAGGAAGC---------CTGCCAGTGG-----------------------TGGAGAGCAGGGCCTATGCAAAACAGGGGGGGTTCTGTGGGCAGAAGGGATGGACCTGACTCCAGGAACGCCCATA-AGAGACGGGCAGGACTGGGAGCCAGGAGCAGCCAGCGCA--------GGTGTGTGTAGGTTTCATAGTTT---------------------------GCAGGTTAGCAAACAATTCTCGCTTGGCAGGCCAAGA-GGGAAAT-AAAGGTTACCCCGTGGGTACTTAT---ACAAGAGAAAACAAG-------ACAATTTTTGGTTGACCAAATTCAGAACTTTATTTGAGGATGCTAAAGTTTTAATTTCTTAA
+s rn3.chr4       56183879 340 + 187371129 AATGGCAGAGGCCCCTGTTCTCT----------------------------------GTCCA---TAGTCAAAGAAGC---------CTCTCAG----------------------A---TGGAGAGCAGGGCCTATGCAAAAG-AGGGGGCTTCTGTAGGCAGAAGGGATGGACTAGCCTCCGGACATAGCCATAGAGAGGCTGGCAGGACTGAGACCCAGGAGAAGCCAGCGCAGGTGTGCGGGCGTGTGTATATTTCATAGTTT---------------------------GCAGGTTGGCAAACAATTCCTGCTTTGCAGGCCAAGA-GGAAACT-GAAGGTGACCCCGTGAGTGCTTAC---ACAAGAGAAAACAAG-------ACAATTTTTGGTTGACCAAATTCAGAACTTTATTTGAGGATGCTAAAGTTTAAATTTCTTTT
+s canFam1.chr14  52866108 405 -  63549963 GGAGACTTG-ATGCCTGCCTTCCACTTACAGCCTGTGGGCAGAGGTGGGAAGAGGTCACGCAAGCCAGTTGGAATGAGGGGAGTTGGCTGGAAAGGTGACCAGGACAAGCTACTTCAACCAGGAAGAAGAGACCCC---------GG--------TGCTTGGAGAAGGCCTGATTGAGCAGTCCTGCATGCCCGCCCAC-GACTGGCAGGAATAAAGACCCAGAAGAGCTAACGTG-------CAATGTA------TTTTCTAGTTCCAgggttggcaaactttctctctaagggtgggatgataaacattttaggcttttcagaccaagaggcgacatcagagggtat---gtaggt---------acaagagggaaaagttgcccccggaattttttg--gataaaattcaaaactttacttagggatgccaaaatgtaaacttcatat
+
+a score=15522.0
+s hg17.chr7     127473809 97 + 158628139 tttttttttcaaacttttacaaatgtagaaaccattcttagcttttgggcattaccaaacccggcagtgg-caggctcggttcaccaacgtcatttgc
+s panTro1.chr6  129887686 97 + 161576975 tttttttttcaaacttttacaaatgtagaaaccattcttagcttttgggcattaccaaacccggcagtgg-AAGGCTCGGTTCACCAACATCATTTGC
+s canFam1.chr14  52866548 97 -  63549963 tatatttttcaactctttaaaaatgcaaaaaccattcctagc-tcggggccatggtggtcctggtggtggtcggatttggctcacccacataacttgc
+
+a score=28088.0
+s hg17.chr7    127473906 331 + 158628139 agttccccgCTTTATGTTATGGgttttgttttgttttgtttttttt-attgagacagagtttcactcttgttgcccaggctgtagtgcaatggtctgatcttggctcactgcaacctccacttcccaggttcaagccattctcctgcctcagcctctcaagtagctgggattacagacactcaccaccacacctggctaattttgtatttttagtagagatgaggtttcaccatgttggccaggctggtctcgaaatcctgacctcaggtgatccacccaccttggcctcccaaagtgctgggattacaggcttgagctaccacgcctggct
+s panTro1.chr6 129887783 318 + 161576975 AGTTCCCCGCTTTATATTATGGGttttttttttttttttttttttttattgagacagagtttcactcttgttgcccaggctgtagtg--------------tggctcactgcaacctccacttcccaggttcaagccattctcctgcctcagcctctcaagtagctgggattacagacactcaccaccacacctggctaattttgtatttttagtagagatgaggtttcatcatgttggccaggctggtctcgaaatcctgacctcaggtgatccacccaccttggcctcccaaagtgctgggattacaggcttgagctaccacgcctggct
+
diff --git a/test-data/3unsorted.bam b/test-data/3unsorted.bam
new file mode 100644
index 0000000..e86c83a
Binary files /dev/null and b/test-data/3unsorted.bam differ
diff --git a/test-data/4.bed b/test-data/4.bed
new file mode 100644
index 0000000..6f32a4f
--- /dev/null
+++ b/test-data/4.bed
@@ -0,0 +1 @@
+chr22	30128507	31828507	uc003bnx.1_cds_2_0_chr22_29227_f	0	+
diff --git a/test-data/4.bed.bz2 b/test-data/4.bed.bz2
new file mode 100644
index 0000000..1be1c9e
Binary files /dev/null and b/test-data/4.bed.bz2 differ
diff --git a/test-data/4.bed.gz b/test-data/4.bed.gz
new file mode 100644
index 0000000..4493283
Binary files /dev/null and b/test-data/4.bed.gz differ
diff --git a/test-data/4.bed.zip b/test-data/4.bed.zip
new file mode 100644
index 0000000..754e031
Binary files /dev/null and b/test-data/4.bed.zip differ
diff --git a/test-data/454Score.pdf b/test-data/454Score.pdf
new file mode 100644
index 0000000..1199be2
--- /dev/null
+++ b/test-data/454Score.pdf
@@ -0,0 +1,545 @@
+%PDF-1.1
+%���ρ�\r
+1 0 obj
+<<
+/CreationDate (D:20080403110358)
+/ModDate (D:20080403110358)
+/Title (R Graphics Output)
+/Producer (R 2.6.2)
+/Creator (R)
+>>
+endobj
+2 0 obj
+<<
+/Type /Catalog
+/Pages 3 0 R
+>>
+endobj
+5 0 obj
+<<
+/Type /Font
+/Subtype /Type1
+/Name /F1
+/BaseFont /ZapfDingbats
+>>
+endobj
+6 0 obj
+<<
+/Type /Page
+/Parent 3 0 R
+/Contents 7 0 R
+/Resources 4 0 R
+>>
+endobj
+7 0 obj
+<<
+/Length 8 0 R
+>>
+stream
+q
+Q q 59.04 73.44 342.72 299.52 re W n
+0.000 0.000 0.000 RG
+2.25 w
+[] 0 d
+1 J
+1 j
+10.00 M
+73.40 149.79 m 86.76 149.79 l S
+0.75 w
+[ 3.00 5.00] 0 d
+80.08 100.85 m 80.08 149.79 l S
+80.08 296.61 m 80.08 263.98 l S
+0.75 w
+[] 0 d
+76.74 100.85 m 83.42 100.85 l S
+76.74 296.61 m 83.42 296.61 l S
+73.40 149.79 m
+86.76 149.79 l
+86.76 263.98 l
+73.40 263.98 l
+73.40 149.79 l
+S
+2.25 w
+[] 0 d
+90.11 280.30 m 103.47 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+96.79 263.98 m 96.79 280.30 l S
+96.79 296.61 m 96.79 296.61 l S
+0.75 w
+[] 0 d
+93.45 263.98 m 100.13 263.98 l S
+93.45 296.61 m 100.13 296.61 l S
+90.11 280.30 m
+103.47 280.30 l
+103.47 296.61 l
+90.11 296.61 l
+90.11 280.30 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 93.82 342.96 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+106.81 280.30 m 120.17 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+113.49 263.98 m 113.49 263.98 l S
+113.49 280.30 m 113.49 280.30 l S
+0.75 w
+[] 0 d
+110.15 263.98 m 116.83 263.98 l S
+110.15 280.30 m 116.83 280.30 l S
+106.81 263.98 m
+120.17 263.98 l
+120.17 280.30 l
+106.81 280.30 l
+106.81 263.98 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 110.53 179.82 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+123.51 247.67 m 136.87 247.67 l S
+0.75 w
+[ 3.00 5.00] 0 d
+130.19 247.67 m 130.19 247.67 l S
+130.19 247.67 m 130.19 247.67 l S
+0.75 w
+[] 0 d
+126.85 247.67 m 133.53 247.67 l S
+126.85 247.67 m 133.53 247.67 l S
+123.51 247.67 m
+136.87 247.67 l
+136.87 247.67 l
+123.51 247.67 l
+123.51 247.67 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 127.23 261.39 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+140.21 280.30 m 153.57 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+146.89 231.36 m 146.89 247.67 l S
+146.89 361.87 m 146.89 296.61 l S
+0.75 w
+[] 0 d
+143.55 231.36 m 150.23 231.36 l S
+143.55 361.87 m 150.23 361.87 l S
+140.21 247.67 m
+153.57 247.67 l
+153.57 296.61 l
+140.21 296.61 l
+140.21 247.67 l
+S
+2.25 w
+[] 0 d
+156.91 280.30 m 170.27 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+163.59 198.73 m 163.59 231.36 l S
+163.59 280.30 m 163.59 280.30 l S
+0.75 w
+[] 0 d
+160.25 198.73 m 166.93 198.73 l S
+160.25 280.30 m 166.93 280.30 l S
+156.91 231.36 m
+170.27 231.36 l
+170.27 280.30 l
+156.91 280.30 l
+156.91 231.36 l
+S
+2.25 w
+[] 0 d
+173.61 280.30 m 186.98 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+180.29 247.67 m 180.29 263.98 l S
+180.29 280.30 m 180.29 280.30 l S
+0.75 w
+[] 0 d
+176.95 247.67 m 183.64 247.67 l S
+176.95 280.30 m 183.64 280.30 l S
+173.61 263.98 m
+186.98 263.98 l
+186.98 280.30 l
+173.61 280.30 l
+173.61 263.98 l
+S
+2.25 w
+[] 0 d
+190.32 247.67 m 203.68 247.67 l S
+0.75 w
+[ 3.00 5.00] 0 d
+197.00 247.67 m 197.00 247.67 l S
+197.00 263.98 m 197.00 263.98 l S
+0.75 w
+[] 0 d
+193.66 247.67 m 200.34 247.67 l S
+193.66 263.98 m 200.34 263.98 l S
+190.32 247.67 m
+203.68 247.67 l
+203.68 263.98 l
+190.32 263.98 l
+190.32 247.67 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 194.03 294.02 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+207.02 263.98 m 220.38 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+213.70 247.67 m 213.70 247.67 l S
+213.70 263.98 m 213.70 263.98 l S
+0.75 w
+[] 0 d
+210.36 247.67 m 217.04 247.67 l S
+210.36 263.98 m 217.04 263.98 l S
+207.02 247.67 m
+220.38 247.67 l
+220.38 263.98 l
+207.02 263.98 l
+207.02 247.67 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 210.74 342.96 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+223.72 263.98 m 237.08 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+230.40 263.98 m 230.40 263.98 l S
+230.40 280.30 m 230.40 280.30 l S
+0.75 w
+[] 0 d
+227.06 263.98 m 233.74 263.98 l S
+227.06 280.30 m 233.74 280.30 l S
+223.72 263.98 m
+237.08 263.98 l
+237.08 280.30 l
+223.72 280.30 l
+223.72 263.98 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 227.44 163.51 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+240.42 280.30 m 253.78 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+247.10 263.98 m 247.10 263.98 l S
+247.10 280.30 m 247.10 280.30 l S
+0.75 w
+[] 0 d
+243.76 263.98 m 250.44 263.98 l S
+243.76 280.30 m 250.44 280.30 l S
+240.42 263.98 m
+253.78 263.98 l
+253.78 280.30 l
+240.42 280.30 l
+240.42 263.98 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 244.14 179.82 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+257.12 263.98 m 270.48 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+263.80 247.67 m 263.80 247.67 l S
+263.80 280.30 m 263.80 280.30 l S
+0.75 w
+[] 0 d
+260.46 247.67 m 267.14 247.67 l S
+260.46 280.30 m 267.14 280.30 l S
+257.12 247.67 m
+270.48 247.67 l
+270.48 280.30 l
+257.12 280.30 l
+257.12 247.67 l
+S
+2.25 w
+[] 0 d
+273.82 247.67 m 287.19 247.67 l S
+0.75 w
+[ 3.00 5.00] 0 d
+280.51 84.53 m 280.51 182.42 l S
+280.51 296.61 m 280.51 296.61 l S
+0.75 w
+[] 0 d
+277.16 84.53 m 283.85 84.53 l S
+277.16 296.61 m 283.85 296.61 l S
+273.82 182.42 m
+287.19 182.42 l
+287.19 296.61 l
+273.82 296.61 l
+273.82 182.42 l
+S
+2.25 w
+[] 0 d
+290.53 280.30 m 303.89 280.30 l S
+0.75 w
+[ 3.00 5.00] 0 d
+297.21 280.30 m 297.21 280.30 l S
+297.21 280.30 m 297.21 280.30 l S
+0.75 w
+[] 0 d
+293.87 280.30 m 300.55 280.30 l S
+293.87 280.30 m 300.55 280.30 l S
+290.53 280.30 m
+303.89 280.30 l
+303.89 280.30 l
+290.53 280.30 l
+290.53 280.30 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 294.25 294.02 Tm (l) Tj 0 Tr
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 294.25 228.76 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+307.23 263.98 m 320.59 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+313.91 247.67 m 313.91 247.67 l S
+313.91 280.30 m 313.91 280.30 l S
+0.75 w
+[] 0 d
+310.57 247.67 m 317.25 247.67 l S
+310.57 280.30 m 317.25 280.30 l S
+307.23 247.67 m
+320.59 247.67 l
+320.59 280.30 l
+307.23 280.30 l
+307.23 247.67 l
+S
+2.25 w
+[] 0 d
+323.93 231.36 m 337.29 231.36 l S
+0.75 w
+[ 3.00 5.00] 0 d
+330.61 198.73 m 330.61 215.04 l S
+330.61 231.36 m 330.61 231.36 l S
+0.75 w
+[] 0 d
+327.27 198.73 m 333.95 198.73 l S
+327.27 231.36 m 333.95 231.36 l S
+323.93 215.04 m
+337.29 215.04 l
+337.29 231.36 l
+323.93 231.36 l
+323.93 215.04 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 327.65 261.39 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+340.63 263.98 m 353.99 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+347.31 263.98 m 347.31 263.98 l S
+347.31 263.98 m 347.31 263.98 l S
+0.75 w
+[] 0 d
+343.97 263.98 m 350.65 263.98 l S
+343.97 263.98 m 350.65 263.98 l S
+340.63 263.98 m
+353.99 263.98 l
+353.99 263.98 l
+340.63 263.98 l
+340.63 263.98 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 344.35 179.82 Tm (l) Tj 0 Tr
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 344.35 277.70 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+357.33 263.98 m 370.69 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+364.01 247.67 m 364.01 247.67 l S
+364.01 280.30 m 364.01 280.30 l S
+0.75 w
+[] 0 d
+360.67 247.67 m 367.35 247.67 l S
+360.67 280.30 m 367.35 280.30 l S
+357.33 247.67 m
+370.69 247.67 l
+370.69 280.30 l
+357.33 280.30 l
+357.33 247.67 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 361.05 81.94 Tm (l) Tj 0 Tr
+2.25 w
+[] 0 d
+ET
+374.04 263.98 m 387.40 263.98 l S
+0.75 w
+[ 3.00 5.00] 0 d
+380.72 263.98 m 380.72 263.98 l S
+380.72 296.61 m 380.72 296.61 l S
+0.75 w
+[] 0 d
+377.38 263.98 m 384.06 263.98 l S
+377.38 296.61 m 384.06 296.61 l S
+374.04 263.98 m
+387.40 263.98 l
+387.40 296.61 l
+374.04 296.61 l
+374.04 263.98 l
+S
+BT
+/F1 1 Tf 1 Tr 7.48 0 0 7.48 377.75 163.51 Tm (l) Tj 0 Tr
+ET
+Q q
+0.000 0.000 0.000 RG
+0.75 w
+[] 0 d
+1 J
+1 j
+10.00 M
+59.04 133.47 m 59.04 296.61 l S
+59.04 133.47 m 51.84 133.47 l S
+59.04 215.04 m 51.84 215.04 l S
+59.04 296.61 m 51.84 296.61 l S
+BT
+0.000 0.000 0.000 rg
+/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 126.80 Tm (20) Tj
+/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 208.37 Tm (25) Tj
+/F2 1 Tf 0.00 12.00 -12.00 0.00 41.76 289.94 Tm (30) Tj
+ET
+Q q
+BT
+0.000 0.000 0.000 rg
+/F3 1 Tf 14.00 0.00 -0.00 14.00 147.76 397.45 Tm (boxplot of quality scores) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 130.36 18.72 Tm (position within read \(% of total length\)) Tj
+ET
+Q q
+0.000 0.000 0.000 RG
+0.75 w
+[] 0 d
+1 J
+1 j
+10.00 M
+59.04 73.44 m
+401.76 73.44 l
+401.76 372.96 l
+59.04 372.96 l
+59.04 73.44 l
+S
+63.38 73.44 m 380.72 73.44 l S
+63.38 73.44 m 63.38 66.24 l S
+80.08 73.44 m 80.08 66.24 l S
+96.79 73.44 m 96.79 66.24 l S
+113.49 73.44 m 113.49 66.24 l S
+130.19 73.44 m 130.19 66.24 l S
+146.89 73.44 m 146.89 66.24 l S
+163.59 73.44 m 163.59 66.24 l S
+180.29 73.44 m 180.29 66.24 l S
+197.00 73.44 m 197.00 66.24 l S
+213.70 73.44 m 213.70 66.24 l S
+230.40 73.44 m 230.40 66.24 l S
+247.10 73.44 m 247.10 66.24 l S
+263.80 73.44 m 263.80 66.24 l S
+280.51 73.44 m 280.51 66.24 l S
+297.21 73.44 m 297.21 66.24 l S
+313.91 73.44 m 313.91 66.24 l S
+330.61 73.44 m 330.61 66.24 l S
+347.31 73.44 m 347.31 66.24 l S
+364.01 73.44 m 364.01 66.24 l S
+380.72 73.44 m 380.72 66.24 l S
+BT
+0.000 0.000 0.000 rg
+/F2 1 Tf 12.00 0.00 -0.00 12.00 60.05 47.52 Tm (0) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 76.75 47.52 Tm (5) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 106.82 47.52 Tm (15) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 140.22 47.52 Tm (25) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 173.62 47.52 Tm (35) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 207.03 47.52 Tm (45) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 240.43 47.52 Tm (55) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 273.83 47.52 Tm (65) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 307.24 47.52 Tm (75) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 340.64 47.52 Tm (85) Tj
+/F2 1 Tf 12.00 0.00 -0.00 12.00 374.04 47.52 Tm (95) Tj
+ET
+Q
+endstream
+endobj
+8 0 obj
+8714
+endobj
+3 0 obj
+<<
+/Type /Pages
+/Kids [
+6 0 R
+]
+/Count 1
+/MediaBox [0 0 432 432]
+>>
+endobj
+4 0 obj
+<<
+/ProcSet [/PDF /Text]
+/Font << /F1 5 0 R /F2 10 0 R /F3 11 0 R >>
+/ExtGState << >>
+>>
+endobj
+9 0 obj
+<<
+/Type /Encoding
+/BaseEncoding /WinAnsiEncoding
+/Differences [ 45/minus 96/quoteleft
+144/dotlessi /grave /acute /circumflex /tilde /macron /breve /dotaccent
+/dieresis /.notdef /ring /cedilla /.notdef /hungarumlaut /ogonek /caron /space]
+>>
+endobj
+10 0 obj <<
+/Type /Font
+/Subtype /Type1
+/Name /F2
+/BaseFont /Helvetica
+/Encoding 9 0 R
+>> endobj
+11 0 obj <<
+/Type /Font
+/Subtype /Type1
+/Name /F3
+/BaseFont /Helvetica-Bold
+/Encoding 9 0 R
+>> endobj
+xref
+0 12
+0000000000 65535 f 
+0000000021 00000 n 
+0000000163 00000 n 
+0000009162 00000 n 
+0000009245 00000 n 
+0000000212 00000 n 
+0000000295 00000 n 
+0000000375 00000 n 
+0000009142 00000 n 
+0000009349 00000 n 
+0000009606 00000 n 
+0000009703 00000 n 
+trailer
+<<
+/Size 12
+/Info 1 0 R
+/Root 2 0 R
+>>
+startxref
+9805
+%%EOF
diff --git a/test-data/454Score.png b/test-data/454Score.png
new file mode 100644
index 0000000..fbe32fd
Binary files /dev/null and b/test-data/454Score.png differ
diff --git a/test-data/5.bed b/test-data/5.bed
new file mode 100644
index 0000000..646dca7
--- /dev/null
+++ b/test-data/5.bed
@@ -0,0 +1,134 @@
+chr7	115444712	115444739	CCDS5763.1_cds_0_0_chr7_115444713_f	0	+
+chr7	115468538	115468624	CCDS5763.1_cds_1_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5763.1_cds_2_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5763.1_cds_3_0_chr7_115484166_f	0	+
+chr7	115485764	115485980	CCDS5763.1_cds_4_0_chr7_115485765_f	0	+
+chr7	115486322	115486481	CCDS5763.1_cds_5_0_chr7_115486323_f	0	+
+chr7	115491298	115491487	CCDS5763.1_cds_6_0_chr7_115491299_f	0	+
+chr7	115468538	115468624	CCDS5764.1_cds_0_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5764.1_cds_1_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5764.1_cds_2_0_chr7_115484166_f	0	+
+chr7	115485764	115485980	CCDS5764.1_cds_3_0_chr7_115485765_f	0	+
+chr7	115486322	115486481	CCDS5764.1_cds_4_0_chr7_115486323_f	0	+
+chr7	115491298	115491487	CCDS5764.1_cds_5_0_chr7_115491299_f	0	+
+chr7	115733786	115733936	CCDS5766.1_cds_0_0_chr7_115733787_f	0	+
+chr7	115734264	115734452	CCDS5766.1_cds_1_0_chr7_115734265_f	0	+
+chr7	115739975	115740126	CCDS5766.1_cds_2_0_chr7_115739976_f	0	+
+chr7	115733786	115733936	CCDS5765.1_cds_0_0_chr7_115733787_f	0	+
+chr7	115739975	115740164	CCDS5765.1_cds_1_0_chr7_115739976_f	0	+
+chr7	115759067	115759097	CCDS5767.1_cds_0_0_chr7_115759068_f	0	+
+chr7	115760529	115760694	CCDS5767.1_cds_1_0_chr7_115760530_f	0	+
+chr7	115792950	115793292	CCDS5767.1_cds_2_0_chr7_115792951_f	0	+
+chr7	116096616	116096655	CCDS5768.1_cds_0_0_chr7_116096617_f	0	+
+chr7	116122131	116122195	CCDS5768.1_cds_1_0_chr7_116122132_f	0	+
+chr7	116126998	116127050	CCDS5768.1_cds_2_0_chr7_116126999_f	0	+
+chr7	116132776	116132840	CCDS5768.1_cds_3_0_chr7_116132777_f	0	+
+chr7	116138181	116138388	CCDS5768.1_cds_4_0_chr7_116138182_f	0	+
+chr7	116140267	116140347	CCDS5768.1_cds_5_0_chr7_116140268_f	0	+
+chr7	116144237	116144316	CCDS5768.1_cds_6_0_chr7_116144238_f	0	+
+chr7	116146073	116146145	CCDS5768.1_cds_7_0_chr7_116146074_f	0	+
+chr7	116150064	116150127	CCDS5768.1_cds_8_0_chr7_116150065_f	0	+
+chr7	116151731	116151872	CCDS5768.1_cds_9_0_chr7_116151732_f	0	+
+chr7	116187545	116187696	CCDS5770.1_cds_0_0_chr7_116187546_f	0	+
+chr7	116333766	116333849	CCDS5770.1_cds_1_0_chr7_116333767_f	0	+
+chr7	116353565	116353725	CCDS5770.1_cds_2_0_chr7_116353566_f	0	+
+chr7	116363797	116363852	CCDS5770.1_cds_3_0_chr7_116363798_f	0	+
+chr7	116364495	116364611	CCDS5770.1_cds_4_0_chr7_116364496_f	0	+
+chr7	116365889	116365965	CCDS5770.1_cds_5_0_chr7_116365890_f	0	+
+chr7	116368128	116368197	CCDS5770.1_cds_6_0_chr7_116368129_f	0	+
+chr7	116370085	116370240	CCDS5770.1_cds_7_0_chr7_116370086_f	0	+
+chr7	116372439	116372537	CCDS5770.1_cds_8_0_chr7_116372440_f	0	+
+chr7	116404866	116404981	CCDS5770.1_cds_9_0_chr7_116404867_f	0	+
+chr7	116423325	116423398	CCDS5770.1_cds_10_0_chr7_116423326_f	0	+
+chr7	116424838	116424941	CCDS5770.1_cds_11_0_chr7_116424839_f	0	+
+chr7	116443791	116443942	CCDS5770.1_cds_12_0_chr7_116443792_f	0	+
+chr7	116453088	116453181	CCDS5770.1_cds_13_0_chr7_116453089_f	0	+
+chr7	116455927	116456067	CCDS5770.1_cds_14_0_chr7_116455928_f	0	+
+chr7	116456865	116456985	CCDS5770.1_cds_15_0_chr7_116456866_f	0	+
+chr7	116187545	116187696	CCDS5769.1_cds_0_0_chr7_116187546_f	0	+
+chr7	116333766	116333849	CCDS5769.1_cds_1_0_chr7_116333767_f	0	+
+chr7	116353565	116353725	CCDS5769.1_cds_2_0_chr7_116353566_f	0	+
+chr7	116363797	116363852	CCDS5769.1_cds_3_0_chr7_116363798_f	0	+
+chr7	116364495	116364611	CCDS5769.1_cds_4_0_chr7_116364496_f	0	+
+chr7	116365889	116365965	CCDS5769.1_cds_5_0_chr7_116365890_f	0	+
+chr7	116370085	116370240	CCDS5769.1_cds_6_0_chr7_116370086_f	0	+
+chr7	116372439	116372537	CCDS5769.1_cds_7_0_chr7_116372440_f	0	+
+chr7	116404866	116404981	CCDS5769.1_cds_8_0_chr7_116404867_f	0	+
+chr7	116423325	116423398	CCDS5769.1_cds_9_0_chr7_116423326_f	0	+
+chr7	116424838	116424941	CCDS5769.1_cds_10_0_chr7_116424839_f	0	+
+chr7	116443791	116443942	CCDS5769.1_cds_11_0_chr7_116443792_f	0	+
+chr7	116453088	116453181	CCDS5769.1_cds_12_0_chr7_116453089_f	0	+
+chr7	116455927	116456067	CCDS5769.1_cds_13_0_chr7_116455928_f	0	+
+chr7	116463766	116463862	CCDS5769.1_cds_14_0_chr7_116463767_f	0	+
+chr7	116512159	116512389	CCDS5771.1_cds_0_0_chr7_116512160_r	0	-
+chr7	116531616	116531881	CCDS5771.1_cds_1_0_chr7_116531617_r	0	-
+chr7	116549075	116549353	CCDS5771.1_cds_2_0_chr7_116549076_r	0	-
+chr7	116554571	116554798	CCDS5771.1_cds_3_0_chr7_116554572_r	0	-
+chr7	116556911	116556994	CCDS5771.1_cds_4_0_chr7_116556912_r	0	-
+chr7	116597600	116597753	CCDS5772.1_cds_0_0_chr7_116597601_r	0	-
+chr7	116601356	116601470	CCDS5772.1_cds_1_0_chr7_116601357_r	0	-
+chr7	116602616	116602722	CCDS5772.1_cds_2_0_chr7_116602617_r	0	-
+chr7	116613942	116614052	CCDS5772.1_cds_3_0_chr7_116613943_r	0	-
+chr7	116615015	116615072	CCDS5772.1_cds_4_0_chr7_116615016_r	0	-
+chr7	116616073	116616149	CCDS5772.1_cds_5_0_chr7_116616074_r	0	-
+chr7	116616990	116617115	CCDS5772.1_cds_6_0_chr7_116616991_r	0	-
+chr7	116618730	116618865	CCDS5772.1_cds_7_0_chr7_116618731_r	0	-
+chr7	116619702	116619814	CCDS5772.1_cds_8_0_chr7_116619703_r	0	-
+chr7	116654167	116654279	CCDS5772.1_cds_9_0_chr7_116654168_r	0	-
+chr7	116656241	116656364	CCDS5772.1_cds_10_0_chr7_116656242_r	0	-
+chr7	116660840	116660940	CCDS5772.1_cds_11_0_chr7_116660841_r	0	-
+chr7	116661360	116661465	CCDS5772.1_cds_12_0_chr7_116661361_r	0	-
+chr7	116714099	116714152	CCDS5773.1_cds_0_0_chr7_116714100_f	0	+
+chr7	116738257	116738368	CCDS5773.1_cds_1_0_chr7_116738258_f	0	+
+chr7	116743038	116743147	CCDS5773.1_cds_2_0_chr7_116743039_f	0	+
+chr7	116764903	116765119	CCDS5773.1_cds_3_0_chr7_116764904_f	0	+
+chr7	116768280	116768370	CCDS5773.1_cds_4_0_chr7_116768281_f	0	+
+chr7	116769252	116769416	CCDS5773.1_cds_5_0_chr7_116769253_f	0	+
+chr7	116770552	116770678	CCDS5773.1_cds_6_0_chr7_116770553_f	0	+
+chr7	116774104	116774351	CCDS5773.1_cds_7_0_chr7_116774105_f	0	+
+chr7	116776020	116776113	CCDS5773.1_cds_8_0_chr7_116776021_f	0	+
+chr7	116782645	116782828	CCDS5773.1_cds_9_0_chr7_116782646_f	0	+
+chr7	116793468	116793660	CCDS5773.1_cds_10_0_chr7_116793469_f	0	+
+chr7	116821743	116821838	CCDS5773.1_cds_11_0_chr7_116821744_f	0	+
+chr7	116824357	116824444	CCDS5773.1_cds_12_0_chr7_116824358_f	0	+
+chr7	116825938	116826662	CCDS5773.1_cds_13_0_chr7_116825939_f	0	+
+chr7	116828934	116829063	CCDS5773.1_cds_14_0_chr7_116828935_f	0	+
+chr7	116836830	116836868	CCDS5773.1_cds_15_0_chr7_116836831_f	0	+
+chr7	116837536	116837787	CCDS5773.1_cds_16_0_chr7_116837537_f	0	+
+chr7	116840678	116840758	CCDS5773.1_cds_17_0_chr7_116840679_f	0	+
+chr7	116844523	116844674	CCDS5773.1_cds_18_0_chr7_116844524_f	0	+
+chr7	116845585	116845813	CCDS5773.1_cds_19_0_chr7_116845586_f	0	+
+chr7	116848617	116848718	CCDS5773.1_cds_20_0_chr7_116848618_f	0	+
+chr7	116861526	116861775	CCDS5773.1_cds_21_0_chr7_116861527_f	0	+
+chr7	116876442	116876598	CCDS5773.1_cds_22_0_chr7_116876443_f	0	+
+chr7	116886846	116886936	CCDS5773.1_cds_23_0_chr7_116886847_f	0	+
+chr7	116898692	116898865	CCDS5773.1_cds_24_0_chr7_116898693_f	0	+
+chr7	116899463	116899569	CCDS5773.1_cds_25_0_chr7_116899464_f	0	+
+chr7	116900912	116901113	CCDS5773.1_cds_26_0_chr7_116900913_f	0	+
+chr7	116945541	116945787	CCDS5774.1_cds_0_0_chr7_116945542_r	0	-
+chr7	116952022	116952124	CCDS5774.1_cds_1_0_chr7_116952023_r	0	-
+chr7	116953508	116953641	CCDS5774.1_cds_2_0_chr7_116953509_r	0	-
+chr7	116955071	116955135	CCDS5774.1_cds_3_0_chr7_116955072_r	0	-
+chr7	116958551	116958737	CCDS5774.1_cds_4_0_chr7_116958552_r	0	-
+chr7	116959056	116959262	CCDS5774.1_cds_5_0_chr7_116959057_r	0	-
+chr7	116962093	116962272	CCDS5774.1_cds_6_0_chr7_116962094_r	0	-
+chr7	116968917	116969105	CCDS5774.1_cds_7_0_chr7_116968918_r	0	-
+chr7	116969273	116969426	CCDS5774.1_cds_8_0_chr7_116969274_r	0	-
+chr7	116979835	116979935	CCDS5774.1_cds_9_0_chr7_116979836_r	0	-
+chr7	116980017	116980104	CCDS5774.1_cds_10_0_chr7_116980018_r	0	-
+chr7	116990559	116990639	CCDS5774.1_cds_11_0_chr7_116990560_r	0	-
+chr7	116991879	116991975	CCDS5774.1_cds_12_0_chr7_116991880_r	0	-
+chr7	116994439	116994715	CCDS5774.1_cds_13_0_chr7_116994440_r	0	-
+chr7	117001063	117001181	CCDS5774.1_cds_14_0_chr7_117001064_r	0	-
+chr7	117011515	117011770	CCDS5774.1_cds_15_0_chr7_117011516_r	0	-
+chr7	117014445	117014596	CCDS5774.1_cds_16_0_chr7_117014446_r	0	-
+chr7	117016866	117016966	CCDS5774.1_cds_17_0_chr7_117016867_r	0	-
+chr7	117018255	117018459	CCDS5774.1_cds_18_0_chr7_117018256_r	0	-
+chr7	117025132	117026786	CCDS5774.1_cds_19_0_chr7_117025133_r	0	-
+chr7	117044769	117044994	CCDS5774.1_cds_20_0_chr7_117044770_r	0	-
+chr7	117095213	117095321	CCDS5774.1_cds_21_0_chr7_117095214_r	0	-
+chr7	117107339	117107420	CCDS5774.1_cds_22_0_chr7_117107340_r	0	-
+chr5	131424298	131424460	CCDS4149.1_cds_0_0_chr5_131424299_f	0	+
+chr5	131424558	131424600	CCDS4149.1_cds_1_0_chr5_131424559_f	0	+
+chr5	131425903	131425993	CCDS4149.1_cds_2_0_chr5_131425904_f	0	+
+chr5	131426117	131426159	CCDS4149.1_cds_3_0_chr5_131426118_f	0	+
diff --git a/test-data/5.gff b/test-data/5.gff
new file mode 100644
index 0000000..e2a489a
--- /dev/null
+++ b/test-data/5.gff
@@ -0,0 +1,25 @@
+##gff-version 2
+##Date: Thu Mar 23 11:21:17 2006
+##bed2gff.pl $Rev: 601 $
+##Input file: ./database/files/61c6c604e0ef50b280e2fd9f1aa7da61.dat
+
+chr1	bed2gff	CCDS1000.1_cds_0_0_chr1_148325916_f	148325916	148325975	.	+	.	score "0";
+chr2	bed2gff	CCDS2120.1_cds_0_0_chr2_118288584_f	118288584	118288668	.	+	.	score "0";
+chr5	bed2gff	CCDS3972.1_cds_0_0_chr5_56241230_f	56241230	56241327	.	+	.	score "0";
+chr6	bed2gff	CCDS4855.1_cds_0_0_chr6_41411593_f	41411593	41411644	.	+	.	score "0";
+chr7	bed2gff	CCDS5401.1_cds_0_0_chr7_26907299_r	26907299	26907654	.	+	.	score "0";
+chr8	bed2gff	CCDS6324.1_cds_0_0_chr8_118881132_r	118881132	118881317	.	+	.	score "0";
+chr9	bed2gff	CCDS6914.1_cds_0_0_chr9_128764157_f	128764157	128764189	.	+	.	score "0";
+chr10	bed2gff	CCDS7248.1_cds_0_0_chr10_55251624_r	55251624	55253124	.	+	.	score "0";
+chr11	bed2gff	CCDS7725.1_cds_0_0_chr11_1731309_r	1731309	1731476	.	+	.	score "0";
+chr12	bed2gff	CCDS8736.1_cds_0_0_chr12_38440095_r	38440095	38440321	.	+	.	score "0";
+chr13	bed2gff	CCDS9526.1_cds_0_0_chr13_112381695_f	112381695	112381953	.	+	.	score "0";
+chr14	bed2gff	CCDS9949.1_cds_0_0_chr14_98710241_r	98710241	98712285	.	+	.	score "0";
+chr15	bed2gff	CCDS10096.1_cds_0_0_chr15_41486873_r	41486873	41487060	.	+	.	score "0";
+chr16	bed2gff	CCDS10395.1_cds_0_0_chr16_37430_r	37430	37557	.	+	.	score "0";
+chr18	bed2gff	CCDS11891.1_cds_0_0_chr18_23786115_r	23786115	23786321	.	+	.	score "0";
+chr19	bed2gff	CCDS12866.1_cds_0_0_chr19_59068596_f	59068596	59069564	.	+	.	score "0";
+chr20	bed2gff	CCDS13249.1_cds_0_0_chr20_33330414_r	33330414	33330423	.	+	.	score "0";
+chr21	bed2gff	CCDS13614.1_cds_0_0_chr21_32707033_f	32707033	32707192	.	+	.	score "0";
+chr22	bed2gff	CCDS13897.1_cds_0_0_chr22_30120224_f	30120224	30120265	.	+	.	score "0";
+chrX	bed2gff	CCDS14606.1_cds_0_0_chrX_122745048_f	122745048	122745924	.	+	.	score "0";
diff --git a/test-data/5.gff3 b/test-data/5.gff3
new file mode 100644
index 0000000..c6263c0
--- /dev/null
+++ b/test-data/5.gff3
@@ -0,0 +1,150 @@
+##gff-version 3
+##date Tue Jun 26 10:48:17 2007
+##sequence-region ctgA 1 50000
+##source gbrowse GFFToGalaxyDumper plugin
+##NOTE: All features dumped.
+ctgA	example	my_feature	22132	24633	.	+	.	ID=My_feature:f15
+ctgA	example	my_feature	46990	48410	.	-	.	ID=My_feature:f11
+ctgA	example	my_feature	44705	47713	.	-	.	ID=My_feature:f01
+ctgA	example	my_feature	36649	40440	.	-	.	ID=My_feature:f03
+ctgA	example	my_feature	23072	23185	.	+	.	ID=My_feature:f14
+ctgA	example	my_feature	37242	38653	.	+	.	ID=My_feature:f04
+ctgA	example	motif	37497	40559	.	-	.	ID=Motif:m15;Note=7-transmembrane
+ctgA	example	my_feature	36034	38167	.	+	.	ID=My_feature:f09
+ctgA	example	motif	28332	30033	.	-	.	ID=Motif:m02;Note=HOX
+ctgA	example	my_feature	4715	5968	.	-	.	ID=My_feature:f05
+ctgA	example	motif	48253	48366	.	+	.	ID=Motif:m01;Note=WD40
+ctgA	example	BAC	1000	20000	.	.	.	ID=BAC:b101.2;Note=Fingerprinted+BAC+with+end+reads
+ctgA	example	right_end_read	19500	20000	.	-	.	Parent=BAC:b101.2
+ctgA	example	left_end_read	1000	1500	.	+	.	Parent=BAC:b101.2
+ctgA	example	motif	13801	14007	.	-	.	ID=Motif:m05;Note=helix+loop+helix
+ctgA	example	coding	1050	9000	.	+	.	ID=mRNA:EDEN.1;Gene=EDEN
+ctgA	example	CDS	1201	1500	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	CDS	3000	3902	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	CDS	5000	5500	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	CDS	7000	7608	.	+	0	Parent=mRNA:EDEN.1
+ctgA	example	processed_transcript	1050	9000	.	+	.	ID=mRNA:EDEN.1
+ctgA	example	5'-UTR	1050	1200	.	+	.	Parent=mRNA:EDEN.1
+ctgA	example	3'-UTR	7609	9000	.	+	.	Parent=mRNA:EDEN.1
+ctgA	est	match	5410	7503	.	-	.	ID=EST:agt830.3;Target=agt830.3+1+595
+ctgA	est	HSP	7000	7503	.	-	.	Parent=EST:agt830.3;Target=agt830.3+1+504
+ctgA	est	HSP	5410	5500	.	-	.	Parent=EST:agt830.3;Target=agt830.3+505+595
+ctgA	example	motif	46012	48851	.	+	.	ID=Motif:m09;Note=kinase
+ctgA	example	match	6885	8999	.	-	.	ID=Match:seg03
+ctgA	example	HSP	8306	8999	.	-	.	Parent=Match:seg03
+ctgA	example	HSP	8055	8080	.	-	.	Parent=Match:seg03
+ctgA	example	HSP	7410	7737	.	-	.	Parent=Match:seg03
+ctgA	example	HSP	6885	7241	.	-	.	Parent=Match:seg03
+ctgA	example	my_feature	13280	16394	.	+	.	ID=My_feature:f08
+ctgA	example	match	29771	32937	.	+	.	ID=Match:seg10
+ctgA	example	HSP	29771	29942	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	30042	30340	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	30810	31307	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	31761	31984	.	+	.	Parent=Match:seg10
+ctgA	example	HSP	32374	32937	.	+	.	Parent=Match:seg10
+ctgA	example	match	36616	37227	.	-	.	ID=Match:seg09
+ctgA	example	HSP	37208	37227	.	-	.	Parent=Match:seg09
+ctgA	example	HSP	36616	37057	.	-	.	Parent=Match:seg09
+ctgA	example	motif	11911	15561	.	+	.	ID=Motif:m11;Note=kinase
+ctgA	est	match	1050	3202	.	+	.	ID=EST:agt830.5;Target=agt830.5+1+654
+ctgA	est	HSP	1050	1500	.	+	.	Parent=EST:agt830.5;Target=agt830.5+1+451
+ctgA	est	HSP	3000	3202	.	+	.	Parent=EST:agt830.5;Target=agt830.5+452+654
+ctgA	example	motif	15396	16159	.	+	.	ID=Motif:m03;Note=zinc+finger
+ctgA	est	match	1150	7200	.	+	.	ID=EST:agt767.5;Target=agt767.5+1+1153
+ctgA	est	HSP	1150	1500	.	+	.	Parent=EST:agt767.5;Target=agt767.5+1+351
+ctgA	est	HSP	5000	5500	.	+	.	Parent=EST:agt767.5;Target=agt767.5+352+852
+ctgA	est	HSP	7000	7200	.	+	.	Parent=EST:agt767.5;Target=agt767.5+853+1153
+ctgA	est	match	1050	7300	.	+	.	ID=EST:agt221.5;Target=agt221.5+1+1253
+ctgA	est	HSP	1050	1500	.	+	.	Parent=EST:agt221.5;Target=agt221.5+1+451
+ctgA	est	HSP	5000	5500	.	+	.	Parent=EST:agt221.5;Target=agt221.5+452+952
+ctgA	est	HSP	7000	7300	.	+	.	Parent=EST:agt221.5;Target=agt221.5+953+1253
+ctgA	example	my_feature	19157	22915	.	-	.	ID=My_feature:f13
+ctgA	est	match	8000	9000	.	-	.	ID=EST:agt767.3;Target=agt767.3+1+1001
+ctgA	est	HSP	8000	9000	.	-	.	Parent=EST:agt767.3;Target=agt767.3+1+1001
+ctgA	example	motif	28342	28447	.	-	.	ID=Motif:m10;Note=DEAD+box
+ctgA	example	motif	17667	17690	.	+	.	ID=Motif:m13;Note=DEAD+box
+ctgA	example	trace	44401	45925	.	+	.	ID=name:trace;trace=volvox_trace.scf
+ctgA	example	match	26122	34466	.	+	.	ID=Match:seg02
+ctgA	example	HSP	26122	26126	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	26497	26869	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27201	27325	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27372	27433	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27565	27565	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	27813	28091	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	28093	28201	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	28329	28377	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	28829	29194	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	29517	29702	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	29713	30061	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	30329	30774	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	30808	31306	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	31516	31729	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	31753	32154	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	32595	32696	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	32892	32901	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	33127	33388	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	33439	33443	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	33759	34209	.	+	.	Parent=Match:seg02
+ctgA	example	HSP	34401	34466	.	+	.	Parent=Match:seg02
+ctgA	example	contig	1	50000	.	.	.	ID=Contig:ctgA
+ctgA	example	match	41137	47829	.	-	.	ID=Match:seg14
+ctgA	example	HSP	47449	47829	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	46816	46992	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	46092	46318	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	45790	46022	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	45231	45488	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	44763	45030	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	44065	44556	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	43395	43811	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	42890	43270	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	42057	42474	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	41754	41948	.	-	.	Parent=Match:seg14
+ctgA	example	HSP	41137	41318	.	-	.	Parent=Match:seg14
+ctgA	example	match	12531	15870	.	+	.	ID=Match:seg12
+ctgA	example	HSP	12531	12895	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13122	13449	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13452	13745	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13908	13965	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	13998	14488	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	14564	14899	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	15185	15276	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	15639	15736	.	+	.	Parent=Match:seg12
+ctgA	example	HSP	15745	15870	.	+	.	Parent=Match:seg12
+ctgA	est	match	7500	8000	.	-	.	ID=EST:agt221.3;Target=agt221.3+1+501
+ctgA	est	HSP	7500	8000	.	-	.	Parent=EST:agt221.3;Target=agt221.3+1+501
+ctgA	example	coding	1300	9000	.	+	.	ID=mRNA:EDEN.3;Gene=EDEN
+ctgA	example	CDS	3301	3902	.	+	0	Parent=mRNA:EDEN.3
+ctgA	example	CDS	5000	5500	.	+	1	Parent=mRNA:EDEN.3
+ctgA	example	CDS	7000	7600	.	+	1	Parent=mRNA:EDEN.3
+ctgA	example	processed_transcript	1300	9000	.	+	.	ID=mRNA:EDEN.3
+ctgA	example	5'-UTR	1300	1500	.	+	.	Parent=mRNA:EDEN.3
+ctgA	example	5'-UTR	3000	3300	.	+	.	Parent=mRNA:EDEN.3
+ctgA	example	3'-UTR	7601	9000	.	+	.	Parent=mRNA:EDEN.3
+ctgA	example	match	26503	35904	.	-	.	ID=Match:seg05
+ctgA	example	HSP	35642	35904	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	35333	35507	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	34605	34983	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	34244	34313	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	33438	33868	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	33053	33325	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	32208	32680	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	32010	32057	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	31421	31817	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	31232	31236	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	30465	30798	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	30108	30216	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	29513	29647	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	28777	29058	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	28225	28316	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	27887	28076	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	27448	27860	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	27172	27185	.	-	.	Parent=Match:seg05
+ctgA	example	HSP	26503	26799	.	-	.	Parent=Match:seg05
+ctgA	example	match	49406	50000	.	+	.	ID=Match:seg13
+ctgA	example	HSP	49406	49476	.	+	.	Parent=Match:seg13
+ctgA	example	HSP	49762	50000	.	+	.	Parent=Match:seg13
+ctgA	example	gene	1050	9000	.	+	.	ID=Gene:EDEN;Note=protein+kinase
+ctgA	example	motif	33325	35791	.	+	.	ID=Motif:m04;Note=Ig-like
+ctgA	example	match	31785	32359	.	+	.	ID=Match:seg01
+ctgA	example	HSP	31785	31939	.	+	.	Parent=Match:seg01
+ctgA	example	HSP	32329	32359	.	+	.	Parent=Match:seg01
diff --git a/test-data/6.bed b/test-data/6.bed
new file mode 100644
index 0000000..38236d4
--- /dev/null
+++ b/test-data/6.bed
@@ -0,0 +1,10 @@
+chr7	115444712	115444739	CCDS5763.1_cds_0_0_chr7_115444713_f	0	+
+chr7	115468538	115468624	CCDS5763.1_cds_1_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5763.1_cds_2_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5763.1_cds_3_0_chr7_115484166_f	0	+
+chr7	115485764	115485980	CCDS5763.1_cds_4_0_chr7_115485765_f	0	+
+chr7	115486322	115486481	CCDS5763.1_cds_5_0_chr7_115486323_f	0	+
+chr7	115491298	115491487	CCDS5763.1_cds_6_0_chr7_115491299_f	0	+
+chr7	115468538	115468624	CCDS5764.1_cds_0_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5764.1_cds_1_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5764.1_cds_2_0_chr7_115484166_f	0	+
diff --git a/test-data/7.bed b/test-data/7.bed
new file mode 100644
index 0000000..5a071f4
--- /dev/null
+++ b/test-data/7.bed
@@ -0,0 +1,29 @@
+chr1	147962006	147975713	NM_005997	0	-	147962192	147975670	0	6	574,145,177,115,153,160,	0,1543,7859,9048,9340,13547,
+chr1	147984101	148035079	BC007833	0	+	147984545	148033414	0	14	529,32,81,131,118,153,300,206,84,49,85,130,46,1668,	0,25695,28767,33118,33695,33998,35644,38005,39629,40577,41402,43885,48367,49310,
+chr1	148077485	148111797	NM_002651	0	-	148078400	148111728	0	12	1097,121,133,266,124,105,110,228,228,45,937,77,	0,2081,2472,6871,9907,10257,11604,14199,15637,18274,23636,34235,
+chr1	148185113	148187485	NM_002796	0	+	148185136	148187378	0	7	163,207,147,82,117,89,120,	0,416,877,1199,1674,1977,2252,
+chr2	118288484	118306183	NM_006773	0	+	118288583	118304530	0	14	184,285,144,136,101,200,115,140,162,153,114,57,178,1796,	0,2765,4970,6482,6971,7183,7468,9890,10261,10768,11590,14270,14610,15903,
+chr2	118389378	118390700	BC005078	0	-	118390395	118390500	0	1	1322,	0,
+chr2	220108603	220116964	NM_001927	0	+	220108689	220116217	0	9	664,61,96,162,126,221,44,83,789,	0,1718,1874,2118,2451,2963,5400,7286,7572,
+chr2	220229182	220233943	NM_024536	0	-	220229609	220233765	0	4	1687,180,574,492,	0,1990,2660,4269,
+chr5	131170738	131357870	AF099740	0	-	131311206	131357817	0	31	112,124,120,81,65,40,120,129,61,88,94,79,72,102,144,117,89,73,96,135,135,78,74,52,33,179,100,102,65,115,248,	0,11593,44117,47607,104668,109739,114675,126366,135488,137518,138009,140437,152389,153373,155388,159269,160793,162981,164403,165577,166119,167611,169501,178260,179675,180901,181658,182260,182953,183706,186884,
+chr5	131424245	131426795	NM_000588	0	+	131424298	131426383	0	5	215,42,90,42,535,	0,313,1658,1872,2015,
+chr5	131556201	131590458	NM_004199	0	-	131556601	131582218	0	15	471,97,69,66,54,100,71,177,194,240,138,152,97,100,170,	0,2316,2802,5596,6269,11138,11472,15098,16528,17674,21306,24587,25142,25935,34087,
+chr5	131621285	131637046	NM_003687	0	+	131621326	131635821	0	7	134,152,82,179,164,118,1430,	0,4915,8770,13221,13609,14097,14331,
+chr6	108298214	108386086	NM_007214	0	-	108299600	108385906	0	21	1530,105,99,102,159,174,60,83,148,155,93,133,95,109,51,59,62,113,115,100,304,	0,2490,6246,10831,12670,23164,23520,27331,31052,32526,34311,36130,36365,38609,41028,42398,43048,51479,54500,59097,87568,
+chr6	108593954	108616704	NM_003269	0	+	108594662	108615360	0	9	733,146,88,236,147,97,150,106,1507,	0,5400,8778,10445,12037,14265,14749,15488,21243,
+chr6	108639410	108689143	NM_152827	0	-	108640045	108688818	0	3	741,125,487,	0,2984,49246,
+chr6	108722790	108950942	NM_145315	0	+	108722976	108950321	0	13	325,224,52,102,131,100,59,83,71,101,141,114,750,	0,28931,52094,60760,61796,71339,107102,152319,181970,182297,215317,224802,227402,
+chr7	113320332	113924911	AK131266	0	+	113862563	113893433	0	20	285,91,178,90,58,75,138,51,201,178,214,105,88,84,77,102,122,70,164,1124,	0,201692,340175,448290,451999,484480,542213,543265,543478,545201,556083,558358,565876,567599,573029,573245,575738,577123,577946,603455,
+chr7	116511232	116557294	NM_003391	0	-	116512159	116556994	0	5	1157,265,278,227,383,	0,20384,37843,43339,45679,
+chr7	116713967	116902666	NM_000492	0	+	116714099	116901113	0	27	185,111,109,216,90,164,126,247,93,183,192,95,87,724,129,38,251,80,151,228,101,249,156,90,173,106,1754,	0,24290,29071,50936,54313,55285,56585,60137,62053,68678,79501,107776,110390,111971,114967,122863,123569,126711,130556,131618,134650,147559,162475,172879,184725,185496,186945,
+chr7	116944658	117107512	AF377960	0	-	116945541	116979926	0	23	1129,102,133,64,186,206,179,188,153,100,87,80,96,276,118,255,151,100,204,1654,225,108,173,	0,7364,8850,10413,13893,14398,17435,24259,24615,35177,35359,45901,47221,49781,56405,66857,69787,72208,73597,80474,100111,150555,162681,
+chr8	118880786	119193239	NM_000127	0	-	118881131	119192466	0	11	531,172,161,90,96,119,133,120,108,94,1735,	0,5355,7850,13505,19068,20309,23098,30863,36077,37741,310718,
+chr9	128763240	128783870	NM_174933	0	+	128764156	128783586	0	12	261,118,74,159,76,48,56,63,129,117,127,370,	0,522,875,5630,12374,12603,15040,15175,18961,19191,20037,20260,
+chr9	128787362	128789566	NM_014908	0	-	128787519	128789136	0	1	2204,	0,
+chr9	128789530	128848928	NM_015354	0	+	128789552	128848511	0	44	54,55,74,85,81,45,93,120,212,115,201,90,66,120,127,153,127,88,77,115,121,67,129,140,107,207,170,70,68,196,78,86,146,182,201,93,159,138,75,228,132,74,130,594,	0,1491,5075,8652,9254,10312,11104,11317,20808,21702,23060,25462,31564,32908,33566,34851,35204,35595,35776,37202,38860,39111,39891,40349,42422,45499,45827,46675,47158,47621,50453,50840,51474,51926,53831,54186,55119,55619,57449,57605,57947,58352,58541,58804,
+chr9	128849867	128870133	NM_020145	0	-	128850516	128869987	0	11	757,241,101,90,24,63,93,134,129,142,209,	0,1071,1736,2085,2635,4201,6376,6736,13056,14247,20057,
+chrX	122719582	122773357	NM_001167	0	+	122745047	122766566	0	7	96,909,100,79,43,201,6985,	0,25433,28421,31040,32533,40295,46790,
+chrX	152648233	152662158	NM_000425	0	-	152648964	152662138	0	28	963,12,73,135,156,120,174,123,202,116,223,71,198,111,125,157,167,112,144,132,185,112,171,123,203,106,11,100,	0,1436,1545,1951,2390,2653,2889,3156,3367,3772,4717,5122,5424,5868,6066,6370,6629,6909,7588,7871,8124,8456,8858,9125,10220,10660,11296,13825,
+chrX	152691216	152693487	NM_000054	0	+	152691446	152693029	0	3	255,885,664,	0,616,1607,
+chrX	152693677	152712545	NM_001666	0	-	152694029	152712503	0	22	586,100,93,184,74,234,106,135,78,61,103,28,85,192,102,222,129,183,63,163,205,109,	0,1693,2066,2364,2635,2794,3129,3323,3545,3752,5323,5647,5841,6032,6401,11455,11778,13249,13719,13987,14227,18759,
diff --git a/test-data/8.bed b/test-data/8.bed
new file mode 100644
index 0000000..b9a0097
--- /dev/null
+++ b/test-data/8.bed
@@ -0,0 +1 @@
+chrX	151007096	151290393	BC028629	0	-	151007267	151203610	0	10	507,212,153,144,83,221,68,122,166,208,	0,21673,29576,39944,56706,87721,116611,177524,196374,283089,
\ No newline at end of file
diff --git a/test-data/9.bed b/test-data/9.bed
new file mode 100644
index 0000000..1d67053
--- /dev/null
+++ b/test-data/9.bed
@@ -0,0 +1 @@
+chr28	346187	388197	BC114771	0	+	346187	388197	0	9	144,81,115,63,155,96,134,105,112,	0,24095,26190,31006,32131,33534,36994,41793,41898,
\ No newline at end of file
diff --git a/test-data/GRCm38mm10_chr5_34761740-34912521.fa b/test-data/GRCm38mm10_chr5_34761740-34912521.fa
new file mode 100644
index 0000000..dc32096
--- /dev/null
+++ b/test-data/GRCm38mm10_chr5_34761740-34912521.fa
@@ -0,0 +1,5421 @@
+>mm10_knownGene_uc008xda.1 range=chr5:34761740-34814709 5'pad=0 3'pad=0 strand=+ repeatMasking=none
+GCACTCGCCGCGAGGGTTGCCGGGACGGGCCCAAGATGGCTGAGCGCCTT
+GGTTCCGCTTCTGCCTGCCGCGCAGAGCCCCATTCATTGCCTTGCTGCTA
+AGTGGCGCCGCGTAGTGCCAGTAGGCTCCAAGTCTTCAGGGTCTGTCCCA
+TCGGGCAGGAAGCCGTCATGGCAACCCTGGAAAAGCTGATGAAGGCTTTC
+GAGTCGCTCAAGTCGTTTCAGCAGCAACAGCAGCAGCAGCCACCGCCGCA
+GGCGCCGCCGCCACCGCCGCCGCCGCCTCCGCCTCAACCCCCTCAGCCGC
+CGCCTCAGGGGCAGCCGCCGCCGCCACCACCGCCGCTGCCAGGTCCGGCA
+GAGGAACCGCTGCACCGACCGTGAGTCCGGGCGCCGCAGCTCCCGCCCGG
+GCCCCGCGCCCCTGGCCTGCGTGCTGGGCATGGCCAACACTGTTCCCTGT
+CCAGAGGGTCGCGGTACCTCCCTGAGGCCAGGCTTTCCCGGCCCGGGCCC
+TCGTCTTGCGGGGTCTCTGGCCTCCCTCAGAGGAGACAGAGCCGGGTCAG
+GCCAGCCAGGGACTCGCTGAGGGGCGTCACGACTCCAGTGCCTTCGCCGT
+TCCCAGTTTGCGAAGTTAGGGAACGAACTTGTTTCTCTCTTCTGGAGAAA
+CTGGGGCGGTGGCGCACATGACTGTTGTGAAGAGAACTTGGAGAGGCAGA
+GATCTCTAGGGTTACCTCCTCATCAGGCCTAAGAGCTGGGAGTGCAGGAC
+AGCGTGAGAGATGTGCGGGTAGTGGATGACATAATGCTTTTAGGAGGTCT
+CGGCGGGAGTGCTGAGGGCGGGGGAGTGTGAACGCATCCAATGGGATATT
+CTTTTTCCAAGTGACACTTGAAGCAGCCTGTGACTCGAGGCACTTCGTAC
+TCTCCTGGCGTTTCATTTAGTTTGTGGTGTAGTGTAGTTAAACCAGGTTT
+TAAGCATAGCCAGAGAGGTGTGCTTCTGTGTGTCTGCAGGCAGTTGGATG
+AGTTGTATTTGTCAAGTACATGGTGAGTTACTTAGGTGTGATTATTAATA
+AAAAACTATATGTGTGCATATATATGAAAGAGTCGACTTATACTTAACTG
+CCTATCGATTTTTTGTTCTATATAAAACGGATACATTGGTGGTGCTCAGT
+TTTCACCGGGGAATGAATTTTACTAGTGTTGCAGACAGGCTTGTTTTAGA
+ACATAGGCCACTCTGACTCTGACTTTGTGCCAGTAAAAGTTCCTGTTTAG
+TTCTTTGCTGACATCTTATAGATCTTTGGAAGCTAGCTGCTTGTGACTGG
+AGAGAATATTGAAACAGAAGAGAGACCATGAGTCACAGTGCTCTAAGAGA
+AAAGAGACGCTCAAAACATTTCCTGGAAATCCATGCTGAGTGTTGAGCCC
+TGTGCTCTCTTGCAGCTCAGTCCTTTCTCTCAACTCTGGGCATTTTATTT
+CTAATCTGGATTTGTATAATTAATAAGGAGAACTTTTGGGAACAACCTAC
+TAAAGAATGTCATCATTAAAACTCACTTAGAAAATAAGTGTTCTGGTGAT
+ATCATTGAGCTATGTTCCCAGTCCTGAGAGTTTGTTTTTTTTTTTTTTTT
+TAAATAAAGATTTGGGGAGAAAAGGTGGCTTACTTGATAGAACAAAATAT
+AGGAATAAAATTTCCTTCTATAAGGTGAAAAGTGTGAATAGAAAACTTCT
+TATCCTCTAGATAAGTAGTTTCTTTTTGCTTTTGAGAGTCTCACTATGTA
+ACTCTTGACCTGAACTCAGAGAGATCCATCCTCCTGCCTCTGCCTCCTCT
+CTCTGGGATTAAAGGCATGTGGCACCATGCTGGGCTGTCCAAGTATGCCA
+CAGACCCTCTAGGTCCCTGGTCTTCGAGGAACGGGATTTCTTAGGCAGAT
+GGGTAAGGAGTCGGATGAAAATGACAATCAGCCACACACAAGAGAGGTGT
+TGAATCTGAATGTAATGTTCTGGTTGAGCTTCAGACTTATATAACAACGA
+ATTATCAGAGGATACAAATCACAAAAAGACAAGATACACTGAAATTCACC
+AGTTACAGCAGAAAGGAATTTGCAGGGACTAATTAAATGTTTACATTAGG
+GATAACAAGCCCTGCCTAGGATCAGCCTAATGCCAGGCAAGAATTTCACA
+CTTTAAGGTTAAAAGCATCAGGGGGTTGTTAACTCTTGACAGGCCTTAAG
+AGTAATGTGCTATCACTGAGCTCTAAATTCTTAGGTCTAGTAAAACTTAT
+CCTGTCTGGAGAGTTCCCCCTTATCAGGGTAGTATATCAACTTATACTTG
+ACATGGAATGAAGCCTGTAGTAAAACATTTCTATCTCAGTGAGACTTTTA
+GTCTCTATCTGTAAACAGCTGAGTAAAATGGCAAGTGCTTAATTGTTTAC
+TGAATGGGTTAAGCTCCTTGCTGCTATCTGGAATCTAAGAACACTGGGGA
+AAGGCTTTAGCTATGTTAGAATACAATATTAAAAGGCATTTACTATAAGG
+TGATGCTTAATAGAGTGCACGTGAATCTATACACTAGATTAATGTGGTGG
+AAATTTGAATATAATGGGTTAGGGAAAGAGATGCCATAACTCTGGGAGGA
+AAATTTCCCTGGACTCTTATCCTCGTGAAACAGCTTCCAGGCTTTTCGCC
+TGACAAACCGATCCAAACTGGAGAGTTGGCTTTCGCCAGAATATCCAGGA
+GGAGAGTCCTAGAAATTCATTTCTCATGAGCAGCTTTTTGGCATTTTTGC
+CTCACAAGCTGACTCCACCAGAGTACCCTGACACAAGTATTGTCTAGTTA
+TTTTGATTATTACCATGACTCTGCCTCTGGGTGAGAGGAATTGTGGAAGT
+TTACATATTCCCCATATCTTCTATAAACCTCTGTGTGTGTGTGTGTGTGT
+GTGTGTGTGTGTGTGTGTGTGTGTATGAGGGAGAGAGAGGGAGAGAGAGA
+GAGGGAGGGAGGAAGAGAGAGAGAGAGATTGTTCTGTGCCTGCTTCGAAC
+ACAAATTAGTTTGCAAAAGTAATTCATTAACATGATACAGTCCCAAAGAT
+AAAAATGGTTAAATAATGAAAACATCTCCCTCCCCATTTTCCTAACTTTG
+TACCCAGGAGCAAGCTCTGTTACACTTCATTTGTCCTTCCAGATAAAATT
+TGGGCATATGTTAGGACAGAATTTTAAATTATTTACAAACAAAAGTATTT
+TGGAACAAAAGCTTTTAAAAGCTTTTATTTTAATAAAATAACTTGTTACT
+ACACTGTATATAACTAACTAACATTTTCCAAAATTAGCTCCATTAGCATC
+TATCTCATATTTCTATGTACTTTGCTGTTGAAAAACCAAGTGTTCATTAA
+TAATAAGTAACAAACTCACTGCTTGGAAGCTTTGATTTTTGGCATTTTGT
+CCACTTGACTCAGTTAAAAGTCCTTTTTTTCGAAATGAGAACAGCCAAAA
+CAGTTTTAGAATGAGTCTGTTCTGCTTTTGTGACTCTCATTGTGTTCTGT
+AGAACCAGTGTCACAGCCATATGTGGGCCTCTGTTGAAGTAGCTGAGAAC
+TTGTTCTCTGCTCTGCTAGCTGCTGTCGATCTGATAGGCCTTGAACAGTT
+GACATTCACCCTTAATAGTCCTCATTAGTCTTCCTGAGCATAGTCATTCA
+TTTATCAATATTTGCTGATCATCTCCTATGTGCCTAGCATTGTTCTAGTT
+GCAGGTTTTAGCAGGGAACAAAGTCATGTCTCATGAAGCTAAAATTCTTG
+GGAGAGACATAGACAGTAAGCAGAATAGTTTGTTCATAGTGAGTGATGAG
+GCGCATGCAGTAAAGTAGGGAAGGGGATTAGGAAATGCCAGGGCTTGACA
+TGTTTTAGACAGGGTGTTTAAATAATATCTGCTTAGTTGAAGGCTTATTT
+TTGAATAAATATCTGAAGAGTCAGAAATCTACCAGGAGGGTGTATGGAAG
+AGGAGTATTCCTGCAAGGGGAAGTTGTCAAAGGCTTTCCTGTGTGGGGAT
+TAGTGATGTCATGTTTTTGCTGGATGAAATGAGTGACGGTAAGAGTTGTA
+GTGGGGGTGAAGCAAAGGGTTGAGGGAGGCTTCATTGGTGTTATCAGTTA
+CTGCATTTATCTCCAAATAGAGAACGTAGCAATGAAAGCTACAGAGAACG
+GGAAAGGTGAGGATTTATTCTAAGACAAAATAAGTGTAGGAAGTTTAACA
+ATTAGATCAGGAGCACAGACTCCAAGTCTAAGTCTTCATTCTTGCACATT
+TTTTAAAAATTTTGTTATGTGTTCTGGATACTATTTCCTTATGAGATATA
+AGTTTAAAAGCCTTTCTGTGGATTGCCTTGCCTTTGTTGTTGTTGTTTGT
+TTTGTTTTGTTTTGTTGAGACAGGTCTCTCTATGTAGCCTTGACTGTCCT
+GAAAATCACTCTGTAGACCAGGCTGGCCTGGAACTCAGAGATCTGCCTGC
+TTCTGCTTTTCAAGTGCTGATATTAAATGTATGTGCCACCACTGCCAGGC
+TAAGATTGTTCTTTCAATTTCTTTTTTTGTTTTCTTTTGAGATCAAAGTT
+TGCTATGTACTTTTGGCTGGCCTGGTATATTGTGTAGTCTAAGTTGGCTT
+CAAATCTTCATGGCACAGATTCCCAAGTACTGGGACCATAGGTATGGCCC
+ATCACAGTGGGGGGTTGGGGGGGCCAGTACATCTATCTCTTGAATGGTGT
+GGAGTGTATATATGTGTTAGGGGTTTGCAGAGGCCAAAAGAATATTGAGT
+GTCTTCCTCTATTGCTCGCCACTTCTCTGAATAAACCTAAAGTTACCAAT
+GGATTTCTAGTAAGCTGACTGACCAGCAAATATTGGGGATCTGTCTGTCC
+CTGTTCACCATAGTGAGGTTACAGACGTGAATAACCACACCCAGTTTTAA
+CGCTGAATGCTGAAGAGTTAAACTCAGGATTTAGACTTGCCTCACTTTCT
+TATGTTCTTCAAAGCATAGACATTTTAAGTTTTGATGAAGTTTAATTTTG
+TCTTTAGCTACGGTATGTTAGATACTTAATAAATCACTCTTTTATCTGAA
+ATCACAAAGATTTATTTACTCCCTCTTTTTCTAAGAGTTGTTTGTAAGCC
+TGACTCATTTTGAATTTGTGGTTAAGGTAGATGTCTGACTCTTTTCTTTT
+GCACGTGTAATTTAGCATTTGGCTAAAGAGAATGTTTTTTTCTCATTGAA
+CTGTATTGACATCTTTGTTGAAAATTATTACCTGTCCATATGTAAATGTT
+TCTACTTCCATTTTTGCCATTGATTGTGTGTCTGTTCTATGCCAGTACTA
+TACAGTCTTGATTACTGGTTTATATCATGATCTCAAATCATAAAGTATGC
+CCCCCAACATTGTCTTTTTCAATGTTGCTTTAGTTATTCTGGGTCCCTTG
+TGATTCTATCCATCAATATTGGTGAACTTCATGACCAGATAGTAAACGGT
+TAAGGCTCTGTGGCTATAATGCCATCACAGCCAGTCAGCTCTGCCACTGT
+GGCATGAAAGCAGCCATAGAAAATATGTAAGAGAATGAGTACATAGAATT
+GGAATTTCTTAAAATTTTCAAGTCATGAAACATTCTTTTATTATTTTTTT
+TTAAAGAAAATGTTTTATTTATTCTTTGACAGTTTCATGCATGTATACAA
+TGTATCTGAATCTCATGCAGGGCCCCTACTATCTCCCTCCCATCTACACC
+TCAGCATGTCCTTCTCCCACAGGGTCCCTACTCTCTCCCTCCCATCTATA
+CCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCTCCCATCTA
+CACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCTTCCCATC
+TACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCTCCCA
+TCTACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCTTCC
+CATCTACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCT
+CCCATCTATACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCC
+TTCCCATCTATACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCT
+CCCTCCCATCTATACCTCAGCATGTCCTTCTCCCACCTCCATGCCTTTTT
+TTAATGACCCACTGAATCCAGTTAGTGTTGCTTGCTGGAATGGAATGTTG
+ACTGGTTTTTGTTTGTTTGTTTGTTTTTGTTTGTTTGTTTGTTTTCCCTG
+AGGCAGGCTTTCTCTGTGTAGCCCTGGCTGTCCTGGAACTACTAGCTCTG
+TAGATCAGGCTGGCCTCAAACACACAGAGATGAGTGCTTCTGCCTCCCAG
+TGTAGGGACTAAAGGTTTATCTCCCACCCAGTTCTTGTGTGGTAACCATA
+ACTGCAGTGAGTTTATTATTACAGCAGCCATGCCATGTCTGGAAGGCAGA
+ATTTCATTGAGCTCTATAAAACCTGGCTCATAAATTCTCTCTACCCCTTC
+CCTTCCCTGAACCTGGGTGTGGGGGTCAATTTAGATGTCACATTTAGGCT
+GACATTTATCAGTCACTTATTCTCAGTATACTATGAGTTTTTGAGTTGCT
+GCCCACTGCAGAAAGAGGTTTCTTTGGCCAAGGCCTGACAGTAGCACTAG
+TCTGTAGGTATAAATGTAAATATTTACTCTTGTTTGTTTATTTGGTTGGT
+TTGTTTTTTGAGACTGGGTTTTTCTGTAGCCTGGCCATCCTGGAACTCAC
+TTTGTAGACCAGGCTGGTCTTAAACTCAGGTTCACTTGCCTCTGCCTCCC
+AAATGCTGGAATTAAAAGCATGCACCACCACATCGGGCTCCCAAACATAA
+ATATGTAGAAGGTAGTCTAACAACATGTCTACTTAGCAAAACAGCAGTAG
+TAGGGTCCTGTTTTAGGACCTGTAACCTCCTTCCCCTGAGTCATGGGCTT
+TTGACTAGGTGTGTACCTTTGGCGCACACTTTAAGTCTAGTCAGAAAACT
+GTGGGTTATCTTTGTATTCTTTACATCACTGTTGCACTAGTGGTCACATT
+TCGCCTTTGTCTATATTATAGCATTCAGGGTCCAGTGCTTAGTAAGACCA
+TTGATGTCTTTTCTCCTCCAGTGGCCTGCAAAACACCTGGCATTATAAAA
+CCTAACCAGCCAAGAGGAAGTTTTCAGATCAGTTCTATCTTGATTTCTCT
+ATGTCTTCTATGCAACCAAAGTGTGTTGTATCTTCATCAATAGGGTTTTA
+CTATTATATAGTTACATTGGGCAACCAAGAGTGATAGAAATAACCTGTGT
+TGTTTGGATAGGAAGGTGTTTCTGGGACCTCCATGACTAATAACTTGTAA
+GAGGTATCTCATGCTTAGCACATGTTTTCTGAGGATACATTGTCATGTAC
+ATACCTATGTTGAAACTCCTTTAAAAAACACTTATACTTTTAAATTAGCT
+TTCAAAATAGTTTCTATAAGTTTTTTTAAAAAAGATTATATATATATATA
+CACACACATATATGTATACACACACACACACACACACACACACACACACA
+CACACACATATATTGTAGCTGTCTTCAGACACATCAGAAGAAGGCATCAG
+ATTCCATTACAGATGGTTGTGAGCCACCATGTGGTTGCTGGGAATTGAAC
+TCAAGACCTCTGGAAGAGCAATCAGTGCTCTTAACAATTGAGCCATCTCT
+CCAGCTCTATAAGTTTTTTTTCACACATCATTTACATTCTGTAAGTAATG
+AATAATCACTACACAAAACAACAATTGTTCTCCTGAACATTAATTCTGGA
+GAATCTAAAATTAACAGTCTCATAAAACCTGTATGCAAATGATTAACAGA
+TCTAGGGGTAATAGCCTCAAGCTGGAATCAGTTTAGAAGTCGTCAATAGG
+TAGTTAAGCTACCTAACAACAAGAAGGAATTCAGCTGCTGATACCTGCAG
+CAGCTCAGGTAAATAGTGGAGATTATAGGCCATTGAGCAAGCTGATTCCT
+AATGCCTACTCATTATATGATTTTACTTATGTATCTTTTTTTCCTGTAAT
+GTTAGATCTTTGGTTATTTTGTTTTCCCCTTGTGGCAAAATAACATCACA
+TAAAACATAACCATTTTGAGTATACAATCTATGATTGTAAAAACACAGTG
+TTCCATTGTGACCACCAGCCACCACTGTTACTCTTCCTTTTTAACATTGC
+TTTTAAAAGTATATTACAAAAAAAGTATATTACAATTTTCACTTGACATT
+GTAATTGTACATGTCTATGAATAGGGTCATTTTTTTAAAATTATACATTC
+TAGTATCTTCTGTTGCATTCAGTTAACTTAAGAGCAGGAAAGACTGGTAT
+AGAAGTCCTTTTCTCTCTCTCTTTTTTTTCTTTTTCGAGACAGGGTTTCT
+CTGTGTAGCCCTGGCTGTCCTGGAACTCACTTTGTAAACCAGGTTGGCCT
+CGAACTCAGAAATCCGCCTGCCTCTGCCTCCCGAGTGCTGGGATTAAAGG
+TGTGCGCCACCACACCCGGCGAAGTCCTTTTCTATGATAGAGAGTATATC
+GTGGGCAAATCCTAGGCCTTGGCTCTTTAGTCAACCAGCATTTGTATGAT
+TAAATAAAACATTGGTGTGTGTTTGTGTGTTTGCACACTGGGTACAGCCT
+TTCCTTTATTAGCCCTGGGTGTGATTTTCTTCTCTGCTGATAGATCCTTT
+CTAAGCTGATCGCTTCATACTTAGGGTGGGGATAGTTGTGAGGACTGAGG
+AGGTGATGTGCGGTCCTGTCCCTTTCTCATTTTGCTAGTGTGACTGATAT
+GTTAGTTCTTTGCATGTGTCTCCTACTCTGGAAGGAGCTGGATGGGAATT
+GTTTGTTTTTTAGTCACTAAATCTAGACTATCAGGTTCATGGCAAGTTCT
+CAGGAAGTACTTATTACATGTATAGAGTTATAATCTGAACTTGATTAGAC
+ATATGGCACTTTTCATACTCCTACTTTTGTTTTTCAAGTTATTTTTTTCT
+ACTTACCAGTTTCATGTTTTAAAAACTTGTTTCTTTTTTTAAATTTTTTT
+AATTAGGTATTTTCCTCATTTACATTTCCAATGCTAGCCCAAAAATCCCC
+CATACCCTCCACCCCACTCCCCTACCCACCCACTCCCACTTCTTGGCCCT
+GGCATTCCCCTGTACTGGGGCATATAAAGTTTGCAAGTCCAATGGGCCTC
+TCTTTCCAGTGATGGCTGACTAGGCCATCTTCTGATACATAAGCAACTAG
+AGACACGAGCTCCAGGGGGTACTGGTTAGTTCATATCGTTGTTCCACCTA
+TAGGGTTGCAGATCCCTTTAGCTCCTTGGATACTTTCTCTAGCCTCCTCC
+ATTGGGGGCCCTGTGATCCATCCAATAGCTGACTGTGAGCATTCACTTCT
+ATGTTTGCTAGGCCCCGGCATAGTCTCACAAGAGACAGCTATATCAGGGT
+CCTTTCAGCAAAATCTTGCTAGTGTATGCAATGGTGTCTGTATTTGGTGG
+CTGATTATGGGATGGAACCCCTAGATATGGTAGTCTCTAGATGGTCCATC
+CTTTTGTCTCAGCTCCAAACTTTGTCTCTGTAACTCCTTCCATGGGTGTT
+TTGTTCCCAATTCTAAGAAGGGGCAAAGTGTCCACAATTTGGTCTTTGTT
+CTTCTTGAGTTTCATGTGCTTTGTATCTTGTATCTTGGGTATTCTAAGTT
+TCTGGGCTAATATCCACTTATCAGTGAGTACTTGTTTCTTTTAATTAAAA
+AACAAAACAAAACAAACAAAAAACTGTGTGTAGGCCGGGCGTGGTGCCCC
+TCATACTTAATCCCAGCACTCAGGATCTCTGTGAGTTTGAGGCCAGTCTG
+GTCTACATGGTGAGTTATAGGACATCTAGGGCAACATAGTCAGACCCTGT
+AGTCAAAAAGAACCAAAACTGAACCAATACAAAACTTTGTGAGCTAGTAA
+AATAGTGAAGTGCTTGCTAACATTCTGAGTTTGATCTCTGGGACCCATGT
+GGTAGAAAGAGAGGACCAGTTTCCACAAGTTGTCTTTTGATCTCCATGTG
+AGTGCCAAAGCACACATACATGTATTAAAATGTGCATGTGTAATTGTAAA
+GTTTCGTTACCACTGACAGTCAGAAACGAGCTCTGGGGCCTCAGGGTTCT
+CTTCCTTCAGTGCTGTGGGAAGTGCACCTTTAAACAATCTTATTTGGGTT
+TCTTTTGGAGACAGGAGTTATGACAGAGATTAAACTTGGCTTTGAGCAAA
+TTTCATACACATTTAACCACTGAATTTAACCACAGTGCCTATTCAGTTTT
+GAATATCACATGACTAGAATCGTAGAGCTCCTCCTATTCATAACTCACAC
+TGTGTCAAAGCTCCTTTTCTTCAGTGTTGTTGGTCACCCATTTAGTTTGT
+CTGTTTTGTATGAATATGCCCTGTTTTTCTCATCTTATTAGTGCATGCTT
+GGGTTATTAAACTTTGACAAATGCTGCTGTGAGGATATCTCATATGCTGG
+CCTGTCTTTTCCCTCACAAAATGGTGCTTTTTAATGAGCAGTTCCCATTT
+TGGTGACTTCTAATTTGTCATACTTTTCCATTATGGTAAGTGCTTTTATT
+TCTTTTTTGATGAATTTTTCATCAACTTGAGATCATGAGACGTTTCCTGA
+TACTGTTGCATAAATGGCATATTGATTTGCCCAATGAAGTTGACTTGTTT
+TTGTGTGTGGTGTAAAATAGACTTCTTGTTTGTTCTTCTAATGTGGGTAC
+TTCTTTTTTCACCAGTTTCCTCCATGTTCTGTGTGTATTTCCTCCCTTTT
+ATTGTTATTAAGTTTATATTATTAAAAGAATTCACATTAAGTAGGTTTTT
+TTTTAAAAAAAAAACTTTTTATTAATTCTTTGTGAGTTTTATATCATGTA
+CCCCACTCATCTCCCTGACCCGTTTCCCCCCTCTGCCCTTGCATCCCCCC
+CTTCAAAAGAAAAGAAAAACCACACAAACAGAAAAACCAATAATGTATAG
+AAAACATCTCATAGTAGAAACTGTATCATGTCACAGTGTGTGCCACTGTA
+TACCCCTCTGTCTGCACATCTTGACATGCAGATGATCATTGCAATGAGTC
+ACTGATCTGATTCAAGGTCTCTGACTTATGTCACACCATTAATATTGGAT
+CTTCTCCAGGACTCCTCTTGGTTTATTCAGTTGTTACTCTGTGTCATCAA
+GTTCCTGCAGCTTTGGATCAGCAGGACCGGCTCTTTTATGTACTCCAACG
+GTTCACAGATGATGTAGATGTTGGGGTGCGCCAACTCAAAGCCCTGGATC
+TGGGCCTGGGTGGTAGTTGTGCTGGTCAGCCTGCTGGCTCTCCTGCATCT
+GCATCACCAGGGCTGTTCTCCAGCACTGCTAGGCCACTCGATGCTATCAT
+TTGTAAGAAGCAGGGTCATGAGGAGGGAGGACACCTCCCTGCCCCAAAAC
+ACACACACACCACCCAATGGCAGATGAGTGACCAGTCCAGCTCTCCCTCT
+ATCTCACCCTTGAGGCTAGGTCACCTGTGCACCTGCCACCAGGGCCAGCT
+CTACTCTGCTGCCCAGTTAAGATTCAGGACCTACTCTCCTGAGTACTGCT
+GCTGGTGAGAGGTGTGCCAGCTCTCTAGAGTGCCAAAGCCAGTTCTGTAC
+AGATACATGGCTGCACAGACCAGGGACATCCCCATGGTTTCTAGTGATAA
+TGTGAGTCACGACATCAACATCAATCCCTGCCACTGCATGGCCACAGATC
+CAGACATGGTCCTCAGTAGCAGCAGGAGTTGGTACTTCACCATGGCTTCA
+AGTGGCAGGGCTAGCTACTCACAATAGGCTCTTCCTCTTCCCCCTCATGT
+CTCAGTTCCTTCTCTCTTCATACTGCGCAGGCTGTTCTGCTTCTCTTTCT
+CTTCCTTCTGTCCACCACATACTTGCACATTGCAGAGACTCCTGCTGCAG
+GCAAGCCATGATGCTGGTATGCCTCTGGGTGATCTCCTCTGCTTGTGCTG
+TTTGGCATGGTGGCATGCAGACCTCTAGGTGTCTACAGCTACCCATGTGA
+CATGGCAGCAGTGTCCTCCCCCACCCCTCTCTGCAGTGTGGCAGGCAGGT
+CTTCTGGACTTTTTTCCCTGCCAGTGCCCTGTGTCATGGCAGTGGGATGG
+CAGTGGGTGGGTCTCTCTCTCTCTCTTTTTTTTAAACACAGGGTTTCTCT
+TTGTAGCCCTGGCTGTCCTGGAACTCCCTCTGTAGAGCAGGCTGGCCTAA
+AACTCACAGAGATCTGCCTGCCTCTGCCTCCCAAGTGCTGGGATTAAAGG
+TATGTGCCACCACCACAGGCATGGCAGGTTCTTTTGGCATAATTATTTCT
+CACTTTGTTCTGGGGTTGTTTAGGACAGAGTTTTGTGTTGTAGCTCATGC
+TGGCCTAGAGCATGCTGTGGTTCTCCTATCTCTACTCCCTGCATTCTAGG
+AGTGAAGATGTTCACCATGTTTGGTTCTAGTTATCTTAAAATAAGGACCA
+TCTCTTGCTAATAATACTTTTCATTGTACAGTACATTATGCCCCGGTCTG
+TCTTAGTATTTGAAGAACCTTTGTCCTTCATACTGATTCTTATGTCTTTC
+CTAGATGGCATCTTCTGTCTCCCACATAGACAATATTGTCACATCGTTGT
+CCTTAGACAATATTGTCATATTGTTGTCCTTTGCTATTGGAAGATTGAAG
+GGTTTTGTTGCTTAAAAGACCATTTTGATAGTCTAAGTGTTGCTGCATGA
+TTTTGTGTGTGTGAATATGTGTGTTTAAGTGTAGTTTTTCAGTATTATCA
+CTTCTTGAGAAAGTATATCCTATAGTTCCAAATCAGATGTTGATTTAACC
+TTTTAAAAAAATCTTTCTGGTGCTGGAGAGATGGTTCAGTGTTTAAGAAC
+ACTAACTTGCTCTTCTAGAGGTCCTGAGTGTGATTCCCAGGAACCACGTG
+GTGGCTCACAACCATCTGTAATGGGATCTGATGCCCTCTTCTGGTGTGCC
+TGAAGACAGCTACAGTGTACTCATATACATTTAAAAAAAAAAATCTTTCT
+ACTCAGGTGGTGGTGGCAGCACATACCTTTAACCCCAGCACTTGGGGAGC
+AGAGGCAGCTAGATCTCTGTGAGTTTGAGGCTAGCCTGGTCTACAGCGTT
+TCAGGACAGCCAGGACTACCTGTCTGTCTTGAAACAAGACAAAACAAAAA
+CGCCAAACCTTTCTGAAAGTGGTCTAAGTGTTCAGCAACACTGTCATGTA
+AGGATAGGACTTGACAAAAATCAGAGAGCAACTGTTGAAGAATCAGAAGC
+TATGCATTCATGCTCCAGGCTGCTGTGCTTCTTATAGGCAGGACAGCTTC
+CAGACTTCAGTCTTGCCCCCAGTATGGAGTGTACGTTACATGTGTTCGTG
+GAGGGGGAGGAGGAAGAGGAGGGGAGAGGAGGAAGAGGAAGAAGAGGAAT
+GTTTTAAAATTCCTTGAGCGGTTCAGTCTACCCTCTTCTCTTAATGTGGA
+ATCACATTGTTAAGATTTTTATTTTTAATTAGGTGTCTATATATGTATCT
+GTATGGAAGTCTATATGCACATGAGTGCATGTACCCTGAAGAGGGTATTG
+GATCCTATGTTTCTGGAGTTAAAGGTGGTTGTGAGCCACCTGATATAGGT
+CCTGGGTAACTGAACTTGGAGTTTTTTTGGATTAGCAATAAGCACTCAGA
+ACCATTGAGCCATCTCTCCAAGCCCTGTAATCACATATTTAAAAGAACAA
+TGAGTGTAATTCTAAAGTTAAGAATTTAGATATGGGGGCTGGAGAGATGG
+TTCAGTGATTAAGAACACTGACTGTTCTTCCAAAGGTCCTGAGTTCAATT
+CTCAGCAACCACATGGTGGCTCACAACCATCTGTAATGGGATCTGACACC
+CTCTTCTGGTGTGTCTGAAGACACCTACAGTGTACTCATATAAATAAAAT
+AAATAAATCTTTTAAAAAATGAATTTAATTATGAAGGCCAAATTTATATT
+TTTAGAAGTAGTTCTTAATTTGTTACAGTGTGCTCTAGAGCCTGGATTTT
+ACATGCCCCCACAATTGTGAGCCTGTGTAGATTGCTTTGTTGCACTTAAA
+ATAGTTTGGCTGAAGCTTGTTTTCATTTTGAAGATGTAGTTTCAAGTGGT
+TGAAGAGCCAAGGTTGTTTTTACCCTATTGATACAGTTCCTACCCTGAGC
+TATTTTATTTTTCATAAAAAACAAATCAGTCTGACTTATCTCTAAAAATC
+CCATCTAATTTCATAAGGAATGAAATAGCTACAGATGTTTATTTATTTAT
+TTTATATCAGATTTTCTTATTGGACAGAATGAGGTAGAAAAAAATGTTAT
+TTCAGGCTGGGACCTAGGCTAGGTGTGGTGGTGAGGGCTGCAGTTCTAGC
+ACTAAGAGCCAGAGATGAGGCAGGAGGATCTTGACTTCTTGAGAGGGCTA
+GACTGGGTTCCAGGCCAGCCAGGAGTGAGAGAAGAAGAGGGGTGGGTGCT
+TTACAGAACTCAGCTGGAAGATGTATGCCAAACACCTGCAGCTTTATCAT
+TTCTATTCTGTCTCTCCTCTTTTTAAGCTAAAGTTTAAAGGGCTAGAGTC
+CCCTGCAGGTTGGAGAATCTAAGGAATGAAGTTGAAAGGTAGCCTGAGGT
+CAAATTGATTTGTTGTTTTGAGACAAAGTCTTGCACTGTATATTCAAGGC
+TCTTGTCAAATTCCTGATTCTCCTGGTTCCGTCTCTTGAATGCTGCCAAG
+TTATAATATTGGGATCGTGTTCTAATTGGCTGAGAAGTCTGTATTAGAAG
+TTCTAGCTTCTGACCTGCAGAGTATAGCAGAAGGATTTTCATTTTCTGAT
+ATTTTTGGTTAGTGTCATCTCTGTTCTGAGAGTGCATTCTGACTCTCATA
+CTTTAAATAAGAGTACTTGGTATGCTAAGAGGAAATGCTTGTTATAAGAC
+TGTAAAACTATCTTTTATTCTCCTGGAGTAATTGTCTCCAAGGCTTACTG
+CCTCTGTCCATTAACCTAGACTTAGTACCCAAAGGTGCTAGCCTCCATAC
+AATCTAATTTATGCCGAGACTATTTTCAACTTCTGAAACTTATTGCTCCA
+TAAGCTCACCCTTTCTTGTTCTTTCTGATCTCTGGCTGCTGATTCAATTC
+AGTTAGCTGTTCTGGCTCAGACTCCTCTCCAAGCTGACTGATTGAATCTG
+GTTTCTCTCTCTTGGCTTCTCCTGCATTGTTCTGCTTGGCCTTACACTAA
+CTTTGACAATCTGTTCTAATTTTCTGGCTCCTTCTTATTCTCTGGCTTGT
+TCTAGCTTCACCTGTGTCTAGTTTGTCCTCTCTCTATAACCTGTCTCTCT
+ATCACGGTCCAGGGAAAACTGCCTCCTTCCTCTCTCTGCCCTCCTCTGCA
+AGTAGCTTTTTTTCCCCCTTTTTCTTCTGGTGAGAGTTGGGCAGATCCTA
+TTCTAGCAAATCTTTCTCTAATTCATCACTTTGTCTGCTATTCAATTAGA
+CTTCTATAAACTACTTTTACCCTCATTGATTGAGATTAAAGGGTGTGTTT
+GTATTCCAGCCAGAAGTGGCTTAGGTGTATGCTAAGGGCTTAGCCACACC
+ACAACGAGAAATAAGTTTTGTTGTTGTTGTTGGTTTTGTTTTTTGTTTTT
+TGTTTTTTTGTCAGTAAATAACACAATCTTAGAGTTCATTGTGTGATCAA
+ATATCCTGCAACATAAGGTCTGGATGTTCTGGCCTGAATTTTAAATCTGG
+CACCATGAGAGATAGATTCTGATAGAAGAGTTGTGCTGCTCTTAGAATGT
+ACAGGGCCAGAGAACAGATGCATGATGGATATAAGAAAAGAGGAACAATA
+TCATTATTGTAAGAGCAAGTAGATGGCTTGCTTTTCACACAAAGCAGGCA
+CTTAATAACTATTGTTTGAATTTTAAGTCAAACTAGCAACTATTGGGAAC
+TAGCAAAATTTTATGATATTAGGAAGGGTCAAATTTTTCCTGAAAAGGGT
+TTAGTTTGTTGTAAATAGTTTGGGATGAGGTAAAAGAGAAAACTTGAGAT
+TTGTCTTTTCTTTGGTTGTCTGTGATGGTTTATTGTCCCGTTTTTGACAG
+TGACCTCTTAGTGATGTGAATCTGTGAACAAGTGATCTTTGCACGTGTAT
+GTTTGTATGTGTGTGTGCTCATGTGAGTGTACCTGCTGTGGGCCTGTGGA
+AGTTAGAGGACAACTTTGGAGAGTTGCATTGTTTGTATTTGTCAGGGTTC
+TCTAAAGGAGCTGAACTGAAAAGATGTATATGTGTGTGTGTATGTCTTAG
+TTATTGTTCTTTAGTGATGAAACACTATAACCAAGGCAACTTAAACAGAA
+GCATTTAATTGGGGCTTGCTTACAGTTTCAGAGGCTTAGTTCTTATCATC
+ATGGCAGGATTGTAGAGTCAGGCAGCTATGGTGCTGAGAAGTAGCTGAGA
+ACTCACATCTGACTAGCAGTTTGCAGGCAGGGAGAGAAAGAGAGAGAGAG
+AGGCAGAGAGAGAGAGAGACAGAGAGAGAGAGAGAGAGTCAGAGACAGAC
+AGAGACAGAGACAGACAGAGACACAGAGAGAGAGACAAACATAGAAAGAG
+ATACAGACAGACAGGTCCATTAACTAGAACCAAACATTTAAGCATGAGTC
+CATGGGGCCATTCTCGTTCAAACTACTATATATGGTATATAAAATGTGTA
+TATATGTATATATACATATATATATATATATACACACACACACACATGTA
+CATTTTTAAAAGGGAATTTTTTATGTTGCTTTATAAGTTGTGGTCTGGGT
+AGTCCAACAGTGACTTTCCTCTGACAGAAAGGCCAAGAATCCAACAGTTG
+TTCAAATTGAATGTCTCGGCAGTCCCAGTCTGTTGCTGGAGTCCTGGAAG
+ATTCCTAGAGAGGTGTGGGGTCTTTAGTCTGTGTTGGAGTTCTGAAAAAG
+TAGGTTCCAATACCAGTGGAGGAATCCCTCAGCAACAGGATAAATTAGTT
+CTCAACATTTGGGGGGTCCAACGGCCCTTTCACACGGGTCACCTAAGACC
+ATTGAAAAACACAGATATTTATGTGGTGATTCATAACAGTAGCAAAAATT
+ACAGTTATGAAATACTGTACGATGAGAATAATGTTATGGTTGGGGTTCAT
+TATGACTTGAGGAAGCTTATTAAAGGATCTCAGCATTAGGATGGTTCGTA
+ACCACTGTGATAGATGGATCAGCTGTGGAGAGTGAGGGCACGAGGGCAAG
+CAGCAAAGTCTTCCTTCCATGTCCATTTATGTGAGCTGCCACCAGAAGGT
+GACCTAGATTTAGGGTGGGTCTTCCCACCTTCAGTAATCCAATCAAGAAA
+GTCCCTCACAGATATGCCCAGTGCTTGGGTTTTAGTTGATTCCAGATGAT
+GTCAGTTAAGATTAGCTGTCTCCTTGGTCCTTTCCTTTCTGCTGTTTTCA
+ACCCCTCCCTCCTGGCTTTGTCCCTCTCCACTCCCCCATCTAGTCAAGCA
+ATTTTCTTACCAGACTAAAGAATTTGAGTTCAGGACATTTTAGATGAAGC
+TTTATTATTTCTACCTTTCTTTGATCTCTTGATTTTAGATACATAAGTGT
+AAAGTTAAAGATCATTGTCCTTTATCTTCAGAGTTGCCGAAGATTCACCC
+AAGAAACTAGTACTGATAATTTCTGTAATGTATTCTCCCATATTGGGAAT
+TTACTTCATTTGATGAGCTCTTGTGGAACTCTGTATAGTATGGGTACTGA
+GTGCTAGTAATATAGATAAGATATGGTCCCTTCACCTTCACTTCTGGGGA
+TACAATCTAAGGCACACTGTCTGAAGAAGTGCCAGCAGGAGCCAGAGAAG
+GGCCCTTGGTAAATAAGAGCTTCCCCCAAGGTGCCCAGTGCCTCAAAAAG
+AATTATGAATTTTCATTTCATTGTTAAGATTGGTATTGTCTATAAGTCAT
+AGTTTGATAAGCTTGACTAAGTCGAAGGTGGCAAGCCATAAGAGGTGGTA
+TCATATTCTTTGTTGCCACCTTTATCCTATGGGGTTATCTCATGAAGGAG
+GGAGAGCAAGAATGAATACCAGAGAAAACTTTTTCCAACTCATTTGTATT
+ACTTCAAAGATGTAAGATAAATGCTCCATAGGCCTACCTAGTTTATCAGA
+CATGTAGTTCGTGTGAAACTGTTGGGTTTTTTTTTTTTTAATCTTTTTTT
+AACTTGTGGATGTGTGTGGTGTTTGTATGGAGGCAGACATGTGTGTATGG
+TGTGTTTTACAGTATGGAGGCCTCTAAGATTGATGTCGGGAATCTCCCTT
+ATCTTTTTGTACTTTGTTCTTCGAGGCAGGATCTCTTAGTCAAACTCAGG
+CTTTGATAGCCAGCTAGCTCAAGGACTCAAGTCCCATCTCTCCTTTCAAT
+GCTGGATTCACAAGTGGCTGCTGCATTGTCACTTTGCATTTAAATGGGTT
+TTGGGGATCTGGATTTGGCCCCTTTGTTTGTACAGCAAATGTTAACACTA
+AGTCATCTTTTCAGGTCTGATTTTTTTTCCCCTTAAATTCTTTATTAATA
+GCCACTTCCTCCATGATTCTACTAGGTGATACAACTCCATCAGTAATTTT
+TAACTACAGAAAAGTTGGCATGTTGACATACTGCTTCTGTGGACACATGC
+CTCATAAACTTGTCAAAGCCCGAGGGGACGTTGGGAATCTCTTTATGACA
+ATTCCCGAGGGCAGTAGTCTTTCCTCTTGAAAGTGGATGAGGCCTAGACC
+TGTTTATTTGGGCAGCCATATACCACTGATCATGGTGGACCTGTCAAAAG
+TACCTATTTACCATGCTGTCTGAGTTTAGCTGAGTCTTGTCAGTTACAAT
+TGGGAAAGTTGGCGAGAGCAAAGGGACTTGGTTAGTTTGGCTTTGGATCC
+GAGGTGACTGAAAATCTCAGATAATGAATGTTTCCATAATAAATGTAATT
+AGGTCACTGAGTTCAGTGTTGTCAGCCTTCTTTTTTTGTTTACACTTTTT
+GCTTTATTATTTAATGAATGTATTAGTGCTGTGCTGCATGTACACCAGCA
+TGCCAGAAAAGGGCAACAGATGCCCTTTTTGATGGTCAGAGCCACCATGT
+GGTTGCTGGCAATTGAAGGGAATTGAACTCAGAACCTCTGGAAGAGCAGC
+TGGTGTGCTTAATTGCTGAGCCATCTCATCACCATCAGGTAGCCTTTTTG
+TCACTACTTGGTACTGGTGTAGCATGACCCCTTTAAGACGTGTAATTTCA
+GTTCATTTAAAACCTGGGACAGATTTTTCCTGTGCAAATCAAAGGATAAT
+GGGTTGGTGCTTTAGTTTGCTTCCTAAAATGTTTAATGGGGTTAGTGTTC
+TGGAAGCATCCTAATGGCTATTTTAAGGTAGGTAAAAACCTAAGCTGTTG
+CAACAGAGGCTTATTACAAGTGATTTAGAGAAGAAAGCATTTTATTCTCA
+TAGTATTTTCTTTACAGTGTGAGGAAATGTTCTTTTTGGTATACAATTCT
+GAGATTTAACTAATGCAATCCCTGTGACTACCATAATCAGGATTCATAAA
+TTTACTATTATTATATGGTGTGCATGTACATGTCAAGTGTTTGAGTGTGT
+GTTTGCATGTCCCATGTGCAAAGGAATGCGGGTGGAGGTCAGAGGACAAC
+ATTAGAGTTAAGTTTTTCTATTGTGAGCTCCAGGGATTGTACTGGCTAGT
+TTTGTGTCAACTTGACACAGCTGGAGTTATCACAGAGAAAGGAGCTTCAA
+TTGAGGAAATGCCTCCACGAGATCCAACTGTAAGGCATTTTCTCAATTAG
+TGATCAAGGGGGAAAGGCCCCTTGTGGGTGGGACCATCTCTGGGCTGGTA
+GTCTTGGTTCTATAAGAGAGCAGGCTGAGCAAGCCAGGTGAGGCAAGCCA
+GTAAAGAACATCCCTCCATGGCCTCTGCATCAGCTCCTGCTTCCTGACCT
+GCTTGAGTTCCAGTCCTGACTTCCTTTGGTGATGAACAGCAGTATGGAAG
+TGTAAGCCAAATAAACCCTTTCCTCCCCAACTTGCTTCTTGGTCGTGATG
+TTTGTGCAGGAATAGAAACCCTCACTAAGGGATCACATGCAGATCCTCAG
+GCTTGTGTGGCGAGTCATTTTACCATCTGAGCTGTCTTGCTTCCATAATC
+AGGAGTTAGGTTGGTTCTGTCGCTACCCGAAGCTCCCTATGCTGCTTTGC
+TGTCATCTCCTCACCCTGACATTACTGGTCATATTTTAATTCTTACAGTT
+TTGATTTGTTCTAAAAATAGAATGGCCCTGTGTTGGTTGTCGGAGTCTTA
+CTAAGTTCTTCCTCAAGTGTAGTGCATTGTGATCTGTCCTTGTTTCCTGT
+TAGTAGTTACTTCCTTTTCTTAGATACGTTTCCATTGTTTGGCTATTGCT
+TTTGTTTATCTTTCAGCAGGTGGATGAGGAGCGCTTGCTAGATTTCCATT
+CTAGCATTGCTGCAGTGAGCTTCTTATGAGGATGGGAGGTGTGGGAGCCT
+GTGCTTCTGAAGATGATAAAGGGACCCACGATGCTGCCGTGCTGCTCTGC
+ACTCTTCTTATTGTCTTTATTTATATAATTGACACTCGAGTGTTGGGCAT
+GTTTGTTCTGATTGGAGGAAGATAGCTTGGACATTCAGATACAATAGGAA
+TTCTGTATATCACTTGCATTCCCAATACATTTATGGGAGGAAGTTATGTG
+CTTGTGTTAGGCAAATTTTGGTGGGTGACCAGCAGTTTACGTGACCGGTC
+AGAAAAAGTCTCTTTCTGGGAAGACATAAGCTACTTTTTTTTCACATGTC
+TGACTTTTCTGAGTGTCGTGTGAGGAAGCTCTTAGTAGACCTCATCTGTC
+GTCATCCCTTCCTCATGCTGCCCTCTTCCCAGCATGGAGTTTATGATTCA
+CTAGTAGTAGCCAAAACGTACTTAGGAATGAATGAAATATAGAAACAAAC
+GAAGTAGTCACCTCAGTGGATGCTCATTTCTTTTCCTCTTGTTTTTTTGT
+AGAAAGAAGGAACTCTCAGCCACCAAGAAAGACCGTGTGAATCATTGTCT
+AACAATATGTGAAAACATTGTGGCACAGTCTCTCAGGTAATTGGCTTTTT
+AAAAAAAAGATTTATATATTTATGTATATGAGTATTCTGCTTGCATGCAT
+GCCTCCATGACAGAAGAGGGCATCAGATCCCTTTATAGATGGGTTGCTGG
+GTAGCCAGTGCTGAGCCATCTCCCCAGCCTCTTCTTTTCTTTTTGTTTTT
+GGTTATTTTTGTTGTTGTTTTTCTTTTTTTGTTTAAAAGATCTCTCTGGT
+AATTACTGAGTTGGGTGGTGGTGGATATACCTGTAACCTGGCATTCAGGA
+GGCAGAGGCAGGCAGATCTTGGTGAGTTCAGGGATAGCCTGGACTACAGA
+GCCAGTACCAGGACCTACACAAAGAAACCTTTAACTCATAAAAAACAGAA
+AACAAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGA
+AGAAGAAGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGA
+AGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGAAGAAGAAGGAAGAAGAA
+GGAAGAAGGAAGAAGGAAGAAGGAAGAAGGAAGAAGAAGAAGAAGAAGAA
+CAACAACAACAAACTGGTTGCTGGGCTGTGGTAGTGCATTTCTTTAATCC
+CAGCACAACAACAACAACAACAACAAACTGGTTGCTGGGCTGTGGTAGTG
+CATTTCTTTAATCCCAGCACTTGGGAGGCAGAAACAGGTGACTCTCAAGG
+CCAACCTGGTCTACAGAGTGAGTTCCAGGATGGCTAGAGCTTACACAGAG
+AAACTCTGTCTTGAAACTCCACCCCACCCCAAACTATATCTAAATACTCT
+GTGTTTTCACTATTAATGCATTACCATGTTCTTTGTACCCCTAGCTATCT
+CTTAAAAGTTCATTTAGGCTAAGCGTATTTTGGTACATGCTTGCAATCCC
+AGTATTTGGCAGGCTGAGACAGGAGGATCTTGAATTTGGTATTAGCCTGG
+GCAACATAGCAAAACCCTGCATCAAGAAAAATCCATTTAAAATCAGGACG
+TTTTACCACATTTGTAGTTGTGCTATAAGGGTATCTGGGTTCTCTTATAG
+GAAATGTTTTCTTCTTGTCATCTTATATATGAGAATTTTAGTCATATGAT
+AATTGAATGGCATGTTAGTAATTTAATTTGTATTCTTTTAAGGTTTATTT
+ATTTTTAAATAAATGAATGAGTGTTTTGTCCCTATTTTATATTTGTTCCC
+TGTATGTGCCTGGCACCCACAGAGACCATAAGAAGGTACTGGAGTTCCTG
+GAACTGGAATTAAAAGATGGTCATAAGTTGCTGTGTGGATGCTGGGAAAC
+AAACTTAGGTCCTCTCTGCAAGAATAGCAAGTGCTCTTATCTACTGAGCC
+TCCTACTTTCTGTTTGTTTGTCTGAGACAAGGTCTCATTTAGCCCAGGAT
+GGCTTCAAACTCACTGTATAGCAAAAGATGACTTTGAGTTCCTGCTCCTC
+TTCTTCTGCCTCCTGAATGCTTAGACTATAGACCTGATCTCTTAGAGTTT
+CTATTGCTGTGATGAAATACCATGATCAAAAGCATCTTTCACTTATACAT
+GTGTGTAACAGTCTATCACTGTAGAAATCAGGGCAGGATCTCATACAGAC
+TGTGGAGGGGTACTGCTTGCTGGCTTATTCTTCATGGCTTGATTGGCCTG
+CTTTATAGAAGCCAGGAGCATGAGCCCAGGGGTGTTCCCATCCACAATGA
+GCCTGGCCCTTCCCCACCAATCATTATTTAAGAAAATGCACTATAGACTC
+TTCTGCCTAAAGTCCCATTTTATGGAGGCATTTTCTCAATTGAAGTTTCT
+TCAAAGACGAGTTTAGCTTATGTCACCTTGGTCAGAAAACTAGCTAGGAC
+ACTTGGCTTTAGAATATAGCTTCAATGTCAAAATTCCCCATAATCAAAAC
+TGAAAGTAAATTCAACTTGGGGATTGTATTGGCAATTTATATAAATTAAA
+GGCTTAATAGTCTGTAGGGTGAACCATTACAGAGCAGAGTATTATCCTAA
+TAAGAAAATACATAACACACACACACACACAATACACATATTTCAAGAAA
+GAAATTTGACTTGGCTACTACTCAGGCAGAGGCAGGAGGTTCATGACTTA
+GAGACTTTTTATAGAGCCCTCTTTCAGGCTTTATAAAAGCATGACTTCTT
+CTCAAACAAAAAGGAAAGAAGGAAGGAATCAGGGTGGGCTCAATGTTTAT
+TTCGTGTTTTCTATATTAGAAGTTCATCTTTATTAGTTATTATGGACTAA
+ATGTGTATCTCCCCAACCCGTCTGTATGTTGAAGATCTAATCCCCCATTC
+TGAAACTGAAACCAACTATGCATTTTAGTAGGTTAATTCAGTGTTTTCTT
+GAAGTTAATCTCACAACCCACATAGTTATACAGCTTTGTCTCATGGGCTG
+TTCTTCATGTAGTAGTCACAGGCTTGTGGGAGGCCACTGAGTGACTAATC
+TGAGATTTGAATGACCTTTCTCCTTAGGTTGAGGAGTTTGTGGTGTGCTA
+GCGTGGCTTGCAGAACTCAAGAAAATACTAACATTTTCTGGCTTGCTACA
+AAGCTATAGCTCAGTAGCAGCCAGATTGTAGAGACTCAGAGGGTAAGGTA
+TGGGAGGGAAGCATGGCCCCTCTGTGCTCTCTTCTTCCAGTCTTCCATGT
+GTCTCAGCATGGACTTCTGTTACCAGAACCAGGAGCTTGATATGTGTTGT
+ACTTTAAGCATCACCATTCACATTGGCAACAAAAATCATCAGAATTTTTT
+TCAGAGGGAAGACACTTGGGGCCATAATAGAGACAGATTAAAGTGGTGTC
+TTAAAAATAAAATCTGACCACAATAAAAATCAATCAGAAATGCAACAAAG
+TATAGGAAGTTACAATTTGTAACAAGAACAGTTAGTCAATGTTGGCCCAG
+AAATTAAACTAGGTTAGCATACACAAGCACTAAAACAGTATAAGAAACAT
+TTAAACAGTTTAACTTTTTTTTTTTTTAATGTCTAAGAAGCAAAAGGTTG
+AGGGTATGGGAAGAAGAAACAAGACAGGAAAAAGGCTGAGCCTGAACTAA
+AATAAGGAGACAGCCTACTTCTTATAGGAAATGCCTGCCAAAATCCAGGG
+AAGTGCCATCTGTAAGTCATTCTATCCTCAGTAAAAATCTCTTGTAAAAT
+GAAAAGGTGAAATAAAGAGGTTCTCAGACATACCTCTGAGGAGTACCCTC
+TGTCTGACAAGCAGTGAGTTTAAGACATTTTAAAGGAAGTGTTTCAAGCA
+GATAGAAAATTATACCAGATAGAATCTGGATCTGTCCAAAGGATTGCTAG
+CAGATGGATAAACATAGGAGATGTCTGTCTGTCTGTCTCTGTCCCTCTCT
+TGATCCCTCTCCCCCTCCCCTCTCTCAGACAAGATTATGTACTGATGTAG
+AACTAGTGATCCTCTTGCCTGTGCTGCTTGGGTGCTGGGATTACAGAAAT
+GTGTCTCATGCATGCAGCAGCAACCTGTTGCATGACAGACAACATAGGAA
+CAAATGTTTGCTCAACTCATAGGGAAGCATTAACAAACTAAAGTATAGAT
+GCCTCTAAAATCCAATTTGGTGAACCAGTGGATGCATAAGGGTTACTTAC
+AGTGGTATGGGTGAGTGGTTCCTTATAGGATCATGGATGACTCAAAAGGC
+ATCACCAAAATCCCACCCTGGCATGGGACACAGCTCACTAAAGCTAGAAC
+CCTGGAACTCTCTGCGCAACTTAGACTTCAGCAGTTCAGGAATCCCCCTC
+CCCTCAGCAGTCCTTACTACTTATATAACCCAGGAGTCTTAGTCAGGGTT
+TATATTGTTGTGATAAAACACTGACTAAAAGCAACTTGGGGAGGGAAGGG
+TTTATCTCAGCCTATCCGTCTACATCACAGTCCACTGAGGGAAAGGAACT
+GATACAGAGATAATAGAGGAGTGCTGGGGGGATGGATGGCTCAGCAGGTA
+AGAGCACTGACTGCTCTTCTGAAGGTCCTGAGTTCATAGCAACCACATGG
+TAGCTCACAACCATCTGTAATGAGATCTGATGCCCTCTTCTGTGGTGTCT
+GAAGACACCTACAGTGTACTTAAATAAATAAATTAAAAAAAAAAAAAAAG
+AGTAGCTCTGCTTACTGGCTTCCTCTCATGGTTTGCTCTCCTGCTTCTTT
+TTTATATGCCAATTGTGCAGTTCCTTTTTTTTTTATATTGGATATTTTCT
+TTATTTACATTTCAAATGTTATCCCCTTTCCCGGTCCCCCCCCCCAGAAC
+CCCCCTATCCCATCCTCCCTTCTCCTGCTTCTATGAGGGTGTTCCTCCAC
+CCACCCAGCCACCCACCAACTCCCACTTCCCTGCCCTTGATTCCCCTATA
+CTGGGGCATCTATCGAGCCTTCATAGGACCAAGGACCTCTCCTCCCATTA
+ATGCCTGACAAGGCCATCCTCTGCTACATAAGCAGCTGAAGCCATGTGTA
+CTCCTTTGTTAATGGCTTAGTCCCTGGGAGCTCTGGGGGTCTGGTTGGTT
+GATATTGTTGTTCTTCCCATGGGGTTGCAAACCCCTTCAACTCCTTCAGT
+CCTTTCTCCAACTCCTCTATTGGGGACCCCATGCTCAGTCCAATGGTTGG
+CTGCGAGTATCTGCCTCTGTATTTGTAATGCTCTGGCAGGGCCTCTCAGG
+AGACAGCCATATCAGGTTCCTTTCAACATGCACTTCTTGGCATCTACAAT
+AGTCTCTGGGTTTGATAACTGTATATGGGATGAATCCCCAAGTGGGACAG
+TCTCTGGATGGCCTTTCATTCAGTCTCTGCTCTATACTATCTCCATATTT
+GTTCCTGTGAGTATTTTGTTCTCCTAAGGAGGACTGAAGTACCCACACTT
+AGGTCTTTCTTCTTCTTGAGCTTCATGTGGTCTGTGAATTGTGGTCTGTA
+TCTTGGGTATTTGGAGCTTTTGGGCTAATATCCACTTATAGGTGAGTGTA
+TACCATTTGTGTTCTTTTATGATTGGGTTACCACACTCAGGATGATATTT
+TCTAGTTCCATTCATTTGCCTAAGAATTTCATAAATTCATCATTTTTAAT
+GACTGATAGTACTCCATTGTGTAAGTGTACCACATTTTCTGTATCCATTC
+CTCTGTTGAAGGACATCTAGTTTCTTTCCAGCTCCTGGCTATTATAAATA
+AGGCTGCTATGAACATAGTGGAACATATGTCCTTATTATATGTTGGAATG
+TCTTCTGGGTATATGCCCAGGAGTGGTATAGCTGGGTCCTCAGGTAGTAC
+TATGTCCAGTTTTCTGAGGAACCGCCAAACTGACTTCCAGAGTGGTTGTA
+CAAGTTTGCAATCCCACCAGCAATGGAGGAGTGTTTCCCTTTCTCCACAT
+CCTCACCAGCATCTGCTGTCACCTGAGTTTTTTATCTTAGCCATTCTGAC
+TGGTGTGAGGTGGAGTCTCAGGGTTGTTTTGATTTGCATTTCTCTGATGA
+CTAAGGGTATTGAACATTTCTTTAGGTGCTTCTCAGCCATTTGATATTCC
+TCGGTTGTGAATTCTTTGTTTAGCTCTGTACCCCATTTTTAATAGGGTTA
+TTTGGTTCTCTTGAGTTCTTTGTATATATTGGATACTAGCCCTCTATTGG
+ATGTAGGGTTGGTAAAGATCTTTTCCCAATCTATTGGTTGCTGTTTTTGT
+TGTTGTTGGGTTTTTTTTGTTTGTTTTTGTTTTTGTTTAAGGTCACATTT
+TTAAATTCTTGATCTTAGAGCATAAGCCATTGGTGTTCTGTTCAGGAAAT
+TTGGGGACAGAGAGTCATTGTGAATCTGATAAACTTCAGGGACTTCCTAA
+GATTTCTGGGCTGTTTCCTTCCTTGAGTCTCTTTGTTTCTTTGTTTGTTT
+TTTGAAATGGAGTTTCTTTAGTCCAGCCAGTCCTTAAATGTACTATTTAG
+ATCAGGTTGGCCTTGAACTCACAGAGATTTCCTACTTCTTTCTCCTGAGT
+ACTGGGATTAAAGGCATGTGCCACCTTGCTCAGCCACTTTCTGAGTCATA
+ACAAGCATTCCTTCAGAAGTCCCTGACTCTGAAAAAGCTTGCTATACAGC
+ACAATCTTTTCTTTTTCTTTTTTTCTTTATTTTATGTTGATTGGTTAGTT
+TTGTGTGTGTTATGTCAATGGGTATGCATTGACATAGTGCTTGTGTGTAA
+GTCAGAAGCCAACTTTCAGGTGATGGTTCTCTTTTTATGATGTGTGTTCT
+GAATATTGAAATCAGGTTATCAGGCAGGCCTGCTGTTATTTGCTGACTAC
+ATGTAGAAAGTATATTTTAGATATGTCTGTTGAGTAATTAGCCTTACTTC
+TTTCTTATTATATTGATGCTACTGGAAGGAATCCTGAGCCCATGCTTAGA
+GCTGTCATTAATTTCTGTTTTGGCACTGCTAAGACTGATGTCTGAGGGAA
+AGCCTCTAGTGTCAGACTTTCCGCCTTTCATCCTAGTTTATTACTGTATT
+CCTTTCCAGTAAGGTCTATGGCATTGCTGAGTGACATGTCTTTTTGCTCT
+GTATGTGTTGAGAATTATTGATCCCTTTTCTTTCTCTTAGGTTTATGGAT
+GTCTTTCAATGCCTTTTGAGCCAGAGTCTCATTGTGTAGATCTAGCTTGC
+CTAGAACTCATAAAGATCCCCCTGCCTCTGCCTCCCTGGTGCTGGGATTA
+AAATCACATGCTAGAATTTTTTAATTAGAGGTAAAAGAGAAGGTGACTGT
+ATAGATCATAGTCCTTTAGATTAAGATCCTAGTACAACAGTCTGCTTTTG
+ATGTTTTAGAAATGGCATGCCCCATTCTTAAGTCACCAGAATCACTTTCA
+AATATGCCTTGTGTTACAGGTTTGAGATACTTTAGAGTTTTCCACTCAAG
+TGGCCCACCAGCCTCCTCTTCTAATGGGCGGGCTTAGCTTGTTGGATCAT
+GACATACCAAACAGGTTTTCTCAACTAAACTTACTAAAATACTTTACAAA
+TAGTACCCAGATTGTGATACCAGTATTCGGTTTCTTGCAACAGAAAAGCC
+TGTAAGCCTGTGGAATCTAGGGAAATGTAGTTGGACCTTTATGCACATTG
+AAAGTAAGATTGAAAAGAAATAAAGGAGATGTATTGACTTGCTGGGTTTT
+CAAGGCTTCAAGGATGCTATCTAAAAGTAAATCTACCTTTTTACAAAGCA
+TATGATTACCTGAGATTTGAGAATCCTAGACAGTATCTTCTAAAGATGTT
+TCAATGAAATCTTAAAAAGAAAAGGGGATCAGTACCAATATGCTGCTCAC
+CACAGTCCACCTTTACGCCGATATTCTTAATTTTTATGAAATGTTCTTTC
+TTGTCCTAAAATTCCATTTAGATTGCTCTTTGCATTTTGTCATCAGTTCT
+CCTAGATCTGTCATGACGATGACAACCTCTCTCTCTCTCTCTCTCTCTCT
+CTCTCTCTCTCTCTCTTTTTTACTTAGAAATTCTCCAGAATTTCAGAAAC
+TCTTGGGCATCGCTATGGAACTGTTTCTGCTGTGCAGTGACGATGCGGAG
+TCAGATGTCAGAATGGTGGCTGATGAGTGCCTCAACAAAGTCATCAAAGT
+AAGCGCCCCATAATGATGATAATGGTGATGCGTGCTCCTGTAATTGTCAT
+GCCTTAAGAGACAAAGCTCCAGATACCTACATTTTTTCCATTTTGGGCAT
+GTGGCTTGGAGGACCTGGGGTATTTTCCCATAAACAGCTAAATGTGTTCC
+TGCGGAACTTTTTTTTTTTTGCACCCTTATAGTTCATAGGTCCATTCTGA
+AGGGCATCTGTGTGACCACTGGCTGCCTATTTCTTAAGAGGAATGTCTCT
+ATGGGCTGCCACTTTTGTTTGGGCATTGCTTGGAGAACCTCAGAGCCCTG
+GGGGCACAAAGGTGGGTGTGGGGGGAATGAAGTCTTGCATGCCTTCCTTC
+ATTTCTTTTCCTGCCCCTCTTAGTGGAGGTCAGACAATACTGCTTGTTGG
+TGAAGATGTAGCTTCTCAGGTTAGGTTAATTGGAGGTAGCATGACCTGAA
+TGGGGAGAGAGGAGAGTACAGACAGGAGAATGGAGCCAGGAAGCAATAGC
+CACTCAGGTGCAGTGCCAGAAGAGGTGAAAAGGCTGGGCACCTGTCTCAC
+TATGTCATTGCTTCCTAGTACTGAGTGCAGACCACTGCAACTCCAGTCTG
+TTTCTTTGTTCAGTTTTCTGCTATGAAAGAAGATTGAAGTCCTCCTTCCC
+CGCCCTCATCCTAATGCAGGTGGGCATTAGGAAGGGCAGAGAGAACAATG
+TGACTCTAAATTGACCATATCCTTGTTACTTCAAGTGCAAGCAGCTTTTC
+CTCTCAAACCTCTCTTCTGCCGTGGCTGATGCTGGTCAGGTTGGCTTGGT
+GCATGTTTGAGGCCACAGACCACTGTTGTAGCTTGCAGCTTTATGTTCTG
+GATCTTTGGCTGCCTATCTCCACTTTCTTCCTGTTTCTTTGTGGTTTCTA
+GTCAATGTCAGGAATTCCAGCTTTTACAGCACAGCCCTGAGTCAGGCCAA
+TCTTCATTCTTTCCCTTCTGCTTATAGCTGCTTTCATGTTTCTGCCTTTT
+CAACTGCTGCAATGTTCAGGATAAGCCAGTAAGGTGGACCAGGATCCAGC
+TATTCACTATGATAGAGGAAAGGCGAGGCAAGATGGGAGAATGGGATGGC
+TTCAGGCCAAGTGTAGGGAATGGTTCACTTTCTAGTCTGGGACTTTTCTT
+TTCTTGAAAGAATTAGTGTATGTACTATAAAGACCAATTTCTAGCCCCGA
+AAATGGGCAGATATTACTTGTCTTTTATGTTCAAAATACATTGAGTTCTG
+ATAGCCAGGATGTGTATTCCTGCTTTCAAGTCTTGGGTGGTAAAGTGAAT
+GTGTCCAGAACTCACCCATGGTATAAAATGCAGAGCAGAAAGAAGGTAGT
+AATTTTTTTTTTTCCCAAGAAGATAGAAGTGGTATTCTGGAGTCAGGAAA
+GACCCTCCCATTTTTACCTACTACCTGGAGGTTTGTCTCAGAGAGGAGAG
+CAGGCCCTTTTTAGGCTGTGAAAAATGGTGTGTCTTGAGGGTCAGTTTAA
+GTATTTTGTGTCTTGGTAGAAAATGAAGGCCTCTTGCAGATCACTGGCTT
+TGTGTGGAAGCCAGTTTTGATAGGGAATGAAAAGAAGGACCCCAGCTGAG
+GGGAAGGCATGAGCTAGGATGGTGTCAGAGTGTGTTTTGTCAGCTATGTG
+TGGAGGCAGGTTGGGAGTTGGGGGTAAAGGAAGCTATAATGAGCTCTATT
+ATCACCCCATGAAGGACTCATGGAAGAGCCATGCTCTACCCTTAACAGAG
+TTGAGCTTTTGTTTTCTCTACTACAAAATAAAGTAGATTTTGGTTCATAA
+ACATTTATATAGCTCAAACAGTATTTGATGTACTCTTAAATTTCATTTGA
+AAGTAGTCTTTTAATGTTTGTCAGCATGGTTTGCTTCTTTTGTTCAAACT
+ACAGCAGCAGAATCAGGGTCTGAAATTTCCTGGTGGGGCCAGAGAGATTG
+GTTAAGAAGAGAAGTTACTGCTTTTGCAGAAGACCTGACTTCAGTTCCCA
+GTACCATAAGATGTCGTATAACCACTTGTAACCCAAATTTCAGGGGGTCC
+TGTGCCTTCTTCTGACCTCTGTATGTTCCTGTACATATACATACACTCAG
+GCACACATAAAATGAATTTTTCAAAAAAAGTTGGTAGAATGTTATACTTC
+TTCTGAATCAATTTTCCAAAGTGTGCCTCTGTTTTACCTTTGAAACTCAT
+GGCCCAGTGAATGATCCCTGTGTCCTAGTTTCATCTCTTTGGCTATGATA
+AATATCCTGGCAAAAAACTGAAGGGAGAAAGAACTTGATTTAGAACTTCA
+GGTAACAGTCCATCATTCTAGTGAACTCAAAGCAACTAGAACGCAAAGCA
+GTTGGTCACAGTCGCAGTCAAGGGCAGAAAGGAAAAGAATGTGTGTGTGC
+TTGTTGCTCAGCTGTCTTCCTCTACTTTAATCTGTCCTAGGTCTAAAACT
+TAGGCAGCGGTGTTACTCATTTTCAGCATTGGTCATCTCACATCAATTAA
+GGCAATCAAAATAGTCCCTCACAAATATGGCCACAGGACAACCTGATCTA
+GACAGGATCTTAATGAGACTATTCCCAGGTAATCTAGGTTATGTCAGGTT
+GACACAGCTAACCATCTCATCTCTCCTTGGTCTCTAGATATCAGGTTTGC
+ATGTGCCTGAAAAGGAGAGCTGGGTCACTTCCTCACATTTTTTTGACATG
+CCATTTTAGAAGAGAAAGTTCTTAGGGACAAGAATAGGGTGATTTTCTCT
+AATGTGGACTTTATGCTATTAGCCTTGATTGTGGCTCGAAGTCAGATACA
+TGAACCTTATGTTTTAGTTAGAGTTTTATTGCTGTGAATAGGCACCATGA
+GTACAGCAACTCTTGTAAGGAAAACACTTAGTTGGGGCTGGGTCACAGTT
+CACAGGTTTAGTTCATCATCATGGTGGGAAGCATGGCAGCATGCAGGCAG
+GCATGGTGCTAGAGAGGTAGCATAGAATTCTATATCTGCACTGGCAATAG
+GAAGAAAAGACTGCCATTGGCCCTGTCTTGAGCATCTGAAACCTCAAAGC
+CCACCCCCAGTGACACACTTCCTCCAACAAGGCCCCACCTACTAATAGTG
+CCATTCCCTATGGGCCTGTGGAGCCATTTTCAGTCAAACCTTCATACCTT
+ATTTCTGAGTTAGCCCCAAAGTGATCAGGAAAGGTCAAGAAATGAGCTCT
+GAGGACTGTAAGACTTCCTAGCTGACACTGAGACCTGGGTGGGTCAGGGA
+GTTGGGGCTGTGAGCATCCTCATCATTTTGCAAGTTTAGAACTTCAAAAA
+AAGATGATTCTGTTGTTACATGTTCCCTGTCAGAGAGAAGGGCTCAGATC
+CTACAAGTACAATCCTCATATCTTGCTTACTTAGAGACTTTCACCTGAGC
+TGTCCAGGTGAGCCAGTGAAGACTGTGTGCTTACTCTGAAAGCTTTAGGG
+TTAATTTTAATTTGATATTATATAGTATTTGATTACTATAAATGTTATCT
+TCTGTTTTTATCCTGGTAAAGACCTGTATTTAGAATATTCTGTAATTTTT
+ATATGTGTTTACTTTTTTCTTAATATATAGCTTCTTATTTAGTTGCTGTT
+AATGTTACTTTTCCTTAAAATTTGAACTTTTGGTTACCATTTAGCTTTAT
+GGGTAGAATTTAGATCTATAGGTAGAATGTATCTTCTATAACAAGTCTCT
+TCTATTTCTTTGCAGGCTTTGATGGATTCTAATCTTCCAAGGCTACAGTT
+AGAACTCTATAAGGAAATTAAAAAGGTGGGTGTTTGCTCTGCATTATTGA
+GAAGATGATACTGTTTTACTGTTGAGTACCCTATGAGATTTCTAACTTGC
+AAGTTATTAAATAACACTGTTAGGAAGAAGTGCCATTTGGTGAAGCAGAG
+TTTAGTTTTCTTTAAAAACGTACTCCTCATTTTCATTAATTGAAATAGAA
+ATTTATAGCACCACCTTAAATTTTTTAAAGATTTTTTTTTGTTTTATTAT
+ATGTGTATGAGTTGCCTGTGTGTACATCTCTGCACCGTGTATGTGCAGTG
+CCTTTTGAGGTCCTCTGCAAGAGCAGCAAGTGCTCTTAACCCCTGAGCTG
+TAACTCCTAGCAACCAAGCAACCAACCAACAACTTACTTCTCTTCTCTCT
+TCTCTTCTCTTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTC
+TCTCTCTCTCTCTCTCTAGCATGACCCCAGCCAGTTCTGAGGTTGAAGGC
+AGGTTTCCCCCCCCATCTACTTTTATAATATTTTAGTTACATGCAAGTAA
+TAAGGTTAAGCAGTACAATAGACACAAACAGTCAAGGAACAAGCTAGGCA
+ATAAAGAAAGTCTCTTGATCACTCCCGTGATGACTGTTTCTAAGGGCTTA
+TCTGGATGACCAAAATATCTGGGCCTACTTCCCTGTCTTAGCTCAAAGTC
+ATATTCATGCCTGAAGCCTGTTCTAGCCTAAAATTACATTCCTGCCTGAG
+CTTACTTCCTTGTCATGGCCTAGTGTCAGATTTCTGCTAAGTGGTCCCAA
+AAAGCTCTCCACATCTCCCCCTTTTTTATTTCATAAACAAGACTGCACCT
+GTCTTAGGTGGTTCTAACAAGAATGCCTTCCTTACATGTCGTGGAATATC
+TATTATCAAAGTCGTGCATTTCTGTCTTAGGTTGGTAAGGCTCTGTGCAG
+AACTTACCCGTCAGCGTCAATGGCTGCCGGCCTATTAAATTAATAATTCT
+GTCTGGGGGTTCATTTTTAGTTTCAAACCATGTATTTTGGCCACCAACAT
+GTTGATGCTATTAAAGGCAAGTTTTATTACAGTGGGCAGGAATAAAAATA
+TTCCCAGGACAAAAAGGGCTATCATGATCAAGCTATATGTGCCATTCTTG
+AAGCTTGACCAAGATGGAAATACTGACATATATTCATGAATAATTTTATC
+GACAATACCTGCAGCGTCAGAGCTCAGCAGAGTACCATTCTTTAAATTCA
+TAATCTCAATATGCAAAATTAAAACATCCAGAGAGGTGTTAGAATTATGC
+CAAATACTCTTCAAGTGTCTTTCAATTTTCCCAATTATATTGACTCTCAT
+TGTAAATTTTAGAAGTAACACAAATCCCTTGGTATTTCGCATAACACTTG
+AGATGGCTTCTTACTCTTAAACTCTAAACCTCCTCTCATAATTTGAATAT
+TATCAATAAAGAGCATAAACCATTGTTTCGGACACCTATCTAAATCAATC
+TTTCTTTCGTTCTTCCTTTTCTTTTTTTAAATCTGTATTTTTATAATTGA
+AGCCCTCTGGATTTTAAAAATCTTTGAAGTAATTACCATCTATTTCACAC
+TGTTAATTTTGACTTTCTCTGATTAAATTAACAGAGAAAGAGAATGTTTA
+ACTCTCCAAGAGAGGATGCAAGCAGCTCCAATATCATAGACCTAGCTGTA
+GAAAGTGCATGTGACTCTTAAATGTAACACATCACTTGTTAACTCCACTT
+AGAATGGTGCTCCTCGAAGTTTGCGTGCTGCCCTGTGGAGGTTTGCTGAG
+CTGGCTCACCTGGTTCGACCTCAGAAGTGCAGGTAAGTTGTACCTCTGTA
+TTATTTTTAAGATTTGTTTGGTAAATAGCTAGTCCTGCCTGTCTTTTTTG
+TTCCAGTGCATATGTCTACACCTTGAGACATCATTCTTGTCCACTCTGTG
+TTGCCTAGCTATGTCCTGTCTGGTTGCTGTCAGCATTTTGTTCTTATATT
+TCTTTCCAAAGACCCATCTCTATTAGGAATACTCTTATGTCCTCTATTAA
+TGTGCTTCTTTCTTGTCCCATCATTCCCCAAGAGACTTGTGGGATGTATT
+TCCAGAGTAACATAGTCTCACCATCTTATTCTGTGCTTCATTTCCAACAC
+GAATGTAGTGAGCACTCAATGTGTTGGTTAAGGAATACTTACTGGATGAA
+TGACAACTGTCCCTGATCCCATCAGCAGCAGAGTTAGCAATTATTGAAAA
+ATAATCATTTGTATAGTACTTGGGTTGGAGAAGTAGATGAACTGTGCATA
+AAATTTGAACTTGTTGATTGTCTTTGACTCCTATGGTGTTAGGTATTAAA
+TCCAGGAGCTCAGGCATGCTAGGCAATTGCTTTACTGCTGAGTCTCATTG
+TCAGCCCACACATCTGGCTTTGTGGCTTGAGTAACAAAACAGGATTGTAG
+TTATACCGTTCTTTCCTTTTTCTCCTCAGTTGGTAAAATGTACTGCGTAT
+GTCCCAAAGACTGTATCCGTGAGAAACATAGTAGAGTGCTCTAAATCTTC
+ACACTAACAAGGAACAATGATGTGTTCAATTTAGGTTAAGTTTCAGTAAG
+AATTTTATGTGGCCAGAAAGATGTAACCACGAAGGAAGTTTCTTTCTATA
+GAACTTTTACTTTTGCTCTGCTGATATGTTTATTTTTGTGTTGGCTTTGG
+TGCTAAGGCTAGGCATGCCTAAGTGTGCGCTTCCACACTTCCACATTTCC
+AGCTCAAGTGCCTTACTTTGAAAATTGTCATATTATTGAGGTAATACATC
+CCAGGTTTACAACACAGCAGACAAAATAGCTGTTCAAAAGTATGACTGTC
+CTGTCACAGGCCCTTCCTTAAACTCTCTTAATTATTCCCTAGTGTTGTTT
+AGATAAAATCTAGACTTCCCACTCAAGGCAAGAGATAATTTAGCCTTTGC
+TGGCCCTTTCCTATCTCATCACTAAAGAAATACTATATTGATTCATTGAT
+CCAATTTAGTTCTTCAAATAGGTTTTATTTCCTCAAGATAGCTTGATGTC
+TCAGAAAATAATCACCTCCTTCTGCAAGCATTCTAGCCTCCTTATCTTCT
+AAGTTGTAACTGATTTCTGCCTAACAAAGATGGCTGTAGTATCTGTAGGC
+CTAATTGCTATATAAGAGGGCCTTGGCCCCACTATAGCAGACACCCTACT
+CCATCCTCGTTTTTCTGTCTAAGGTAAGGAATTGTTGTTTGAAGTCCCCA
+TTTAATTTGACCACTTTTTTACCCCTAGAGATTGTGCAGCTCTCCTGGAG
+AGCATAAAAGAAACTTGTGTTGATTTCTAATTAAAAGAAGCTGGGTTACC
+AAACAAAAGTCCCATAATGTCTGGCATGCCAAATTAATGGGTTTATTTGG
+CTTATATACAAAAGCACGGACAACTTATAAGTAGCTATGCCTTTGAAGCT
+TCACCTCCAGTTAGTTTGCCTTTTACATAACTCAGTCCTTCTAAGGTTAT
+GTATCCCTGTAGTACAAAGGAATGATGATTGGAATCTTGGGGTCTTATGA
+CATTTTCTTTTTCTCCCCAAAAGGGAGTATTATCAGACTATCCCATAGAC
+CTAGGCACCTGCTCTGTAGTTCTCCTTGCTTTGTTGTCTGCTTGTGGCTC
+CAAGGTCTCCATATAAGTCACCACAGCTACTCTGCTTCATGGTAGGGATG
+GTCATGTCAAGTGTACAGGAAACAGCTAGCCCACAGTAGCTTTTAACATG
+ACTTTTTCAGTAAAATGATGTCCACATTTTATTTTGTTTTTCAAAAGTTA
+CTGTATAAGGGTACATACCAGTACCTGTATATAGAAAGGTAAGAGGAGTG
+TCATTCCTGGGCATGGTTCCTCAGGAGCTGTCCAGCTTGTTTGATAAGAT
+AAAGTCTTCACTGCCCTAGGATCTGCTGATGCGACTTGGCTGGCTGCCCC
+TTAACCTCAGAGAATCACTGGTTTCTGACTTCCTAGTGCAGGGATTACAA
+ATGAGCACTACTATGTCTGGGTTTTTATGTGGATGATAGGGATCGAACTC
+ATATCCTGTGCTTGGTGCATAAGCTATCTCCTTTGTTCCCTCCAGTTTGT
+GTTTTTAAAGTAGTAGTTTTTAATCTATTGTTTCCTATTTAAATTATATT
+TTAATAATTGCTATAATTTGTGACAAATTTTTTAAATATTTATCTTTAAT
+ATTTTTTTACAGTCCAGTCTTTATCCCTCTCCTGATCTGCCCTCCCAAAG
+TTTTTCAAACCCAAAAAAAGAAGAAAAAGAAATTGGAGAGATCGTACATT
+AGTAACTTAACAGAATACCTGAAAGCCCTAGAACAGAAAGAAGCAAACGT
+GTCCAAAAGGAGTAGACAGGAGGAAATAGTCAAACTCAGGGCCAAAATCA
+ACCAAAGAGAAACAAAGAAACTGATAAAAGAATCAACAAAACCAAAAGCT
+GGATCTTTGAAAATCAACAAGATAGATAAGCCCCTAGCCAAACTAAGGGG
+CACAGAGACAGTATCCAAACTAACAAAATCAGAACTGAAAAGGGAGACAT
+AACAACAGAACCTGAGGAAATCATCAGGTCCTACTACAAAAGCCCAACAA
+AACTCAACAAAACTGGAAAAATCTAGATGAAATGGTTGATTTTCTAGACA
+TATACCATGTCCTAAAGTTAAATCAAGATCATGTAAACTATCTAAACAGT
+CCTATATCCCCTAAAGAAATAGAAGACGTCATTAAAAACCTTCCAACCAA
+AAAGAAGCCCAGGGCCAGATGGCTTTAGTGCAGAATTCTACCAGACCTTC
+AAAGAGGAGCTAATACCAACACTCAGCAAATAATAATTTAAATCCAACTT
+TTTAAATTACATTTTATTTGTCTGTGTGTCTTTATATGTGTACCATGCCT
+TTGGGTGAGGATAACTTATAGTTAATTCTTCCTTTCCTCTATGTGGGTCC
+CAGAGATCAAACTTGGGTCTTCAGGCTTCTCCTTCTTTACCCACAAAGCC
+ATCTTGCTTGTCCCTACACCCAGCTTCTTAATATTCTTTGTAACTCATGG
+GAGAGATGACAGACAATTGAACTTCATCAGCATTTATGCCTCCTGTACTT
+GTAGTTGAGCATTGTGGTCTCCATTGAGGACTAATTCACCTATAAAACTA
+GGTTTTTTCCTGACAGGGAACCATGAGCTTGTTGTTTCTTAACAGAGGAG
+ACCTGAAGAATGATGAGTATTCCTCTTGCACATACAGGCCTTACCTGGTG
+AATCTTCTTCCATGCCTGACCCGAACAAGCAAAAGACCGGAGGAATCAGT
+TCAGGAGACCTTGGCTGCAGCTGTTCCTAAAATTATGGCTTCTTTTGGCA
+ATTTCGCAAATGACAATGAAATTAAGGTATGGCTGTTGCCTCTTGGCATG
+AGTCTTGTGTGGCTTTGGGGAGAAAGTCATTTGAGATTGCTTCTGGTGTC
+CTTTTGGCTTCACTGAGAGACATCTCAAGAACTTCTTTTTACTTCTGCTT
+TCCTTTCATGGGGTAAGTTGTCAAGGGAAATAGCTTATAGATGCAAATTC
+AAAGGCATTTCCCCAGAGTGGATTTAGGTATACTGGGTTGGCCACTTGAG
+CCAGCTAAGGAAAAGAGACTTCATAGGAAAGAGTGAAGAAGAGTTAATGG
+GCCTTGTGGGTGTGGGCGCCCTAAAGCCACCAGGACTCGAGTTTGGTTCA
+TAGTGCCCAGAAAGCAACTTATTACATAATTTGTGGGTTGCAAGATTCTT
+GGCTTTGATTTTATCTTTTTGAAAAAGTATTTTTTTTTTAATTTATTTAT
+TTATTATATCGTTACGGATGGTTGTGAGCCACCATGTGGTTGCTGGGATT
+TGAACTCCAGACCTTCGGAAGAGCAGTCGGGTGCTCTTACCCACTGAGCC
+ATCTCACCAGCCCCAGTTTTTATTTTTAAAGTATTTATTTTATATGTTTG
+GGTGTTTTGTCAATGTACTGTATACATGCCTACTGTTCTCAGAAGCCAGA
+AAAGTGTTGGATATCCTAGAACTAAAGTTATAGATGATTGTGCGCCACCA
+CATGGGTGCTGCAAACTGAATCTGGATCCTCTGAAAGAGTAACTAGTTCT
+CTTAAGCCCTGAGCCCACTCTCCAGCTTCTACCTTTTCTCATTGTTTATC
+TGTGTAAGTGCGTGTGCGTGTGTGTATGTCTGTCTGTCTGTGTGTCTGTC
+TGTATGAGCCTGTGTGTGAATGGAGGCTAGAAGAAGGTGCTAGGTGTCCG
+TCTTTATCACTCTCTGCCTGTTCTTTTTGAGGTTGAGTTTCCCTGAACCT
+GAGGCTTACTTTTTTTTTTTTTTTAAATTGGACATTTTATTTGTTTACAT
+TTCAAATGTTATCCCCTTTCCCAGTTTCCCTTCTGCAAACCCCCTATCTT
+ATCACCACCCTCACCCTGCTTCTATGAGGGTGCTTATCCACCCACCCACC
+CTCCCACTCACTCCTGCCTCACTGCCCTAGCATTCCTCTACACTGGGGTA
+TCAAACCTTTATAGGACCAAAGGCCTCCCCTCCTATTGATGTCAGATAAG
+GCCCCTTTAGCTCCTTCAGTCCTTCTCCTATCTGCTCCATTGGGGTCCCT
+GTGCTCAGTCTGATGGTTGGCTGTGAGCCTCTGCATCTGTATTGGTCAGG
+ATCTGGCGATACAGGAGATAGCTGTATCAGGCTCCGGTCAGCAAGCACTT
+CTTGACATCAGCAGTAGTGTCTCTGGGTTTGGTGTCTGCATGTGGGATGG
+ATCCCCAGGTGGGGCAGTCTCTAGATGGCCTTTCCTTCAGTCTCTGTTCC
+ACTTTTTGTCCCTGTATTTCCTTTAGACAGGAGCAATTCTTGGTTAATAT
+TTTGGAGATGGGTGGGTGGCTCAATCCCTCAACCAGGGGGCCATGCCTAA
+CCTCTGAATATGGTCTCAACAGGTTCTCTTTCTCCTTTGTGGGGTATTTC
+AGCTAATGCCATCCCTGTGGGGTCCTGGGAGGCTCTTGCTTTCCTGGCAT
+CTGCTGCTGCTGTCAGTGTTATTCCTCCCTCCTTGGAGGGGTGGAAGCTC
+CTGATGGTGCAGAAATGAGTACTGCAATACTGTCAAGAGTCTCTGTGATA
+ACTGCTGTCAGAGCCAGGGGACAGGTGTATACACACACACACACACACAC
+ACAGTGGTTGGTTCTGGATCTTTCCATGATATAGATGCCATTTGAGTAAG
+GTAATACTTTCCTTTTTTTTTTTTTTTTTTTTTTTTTTTGTATGTATCTG
+TAGCTGTACAGATGGTTGTGAGCTTCATGTGGTTGTTGGGAATTGAATTT
+TAGGACTTCTGCTTGCTCTGGTTGGCTGTACTTGCTCCGGTCAACCCTGC
+TTGCTCAGGCCCAAAGATGTATTTATTATTATTAAAAAAGTACACTGTAG
+CTGTCTTCAGATGCACCAGAGGCAGACATCAGATCTCATTATGGGTGGCT
+GTGAGCCACCATGTGGTTGCTGGGATTTGAACTCAGGACCTTTGGAAGAG
+CAGTCAGTGCTCTTATCCACTGAGCTATCTCTCCGCCCTCCCCCCCATAC
+TTACTAACTACTTCCTTCATGAACCTGTGACATTTAAGAGATCTAGTCAT
+TCTTCTGCCCATGTATCATTGCTGTGCTCTAGAAACAAATAGTGCCACCC
+TGTCCTACTTATCTTGGTTCTGTGTCAGAGGCAAACAATAATGCTTGCTT
+CCCTGGGTTTAGATTTTTAAATTTTACATTTGTTTTTTAACTGTAGAAGA
+GGTGAATTTGGCTCTAACACTTGTTTCTTTTTACAATAGTCCTGTATATA
+TTGAATATGTACTTTATTATGCCCTTATCAATAGTGATGGCTAATCGTAT
+ATGATTTTGAACACCTTTTTGTTTTCTAAACCTAATAATTATTGGTTGTT
+TCTGAAGTCTCAAACAGAAGTGCCATTTCTTACTCGTTAGCTTGCTCAAT
+AGATGGCCTATCTCCTTTGGCAGTTATCGTCCCACATCCTGCTTAATATG
+GCCAGTGATTCTTGAGTTTGTAAATTCTGTCATCCTGGAGACTCCTTTAC
+TGCTCTCCTCTTTCTGGCTGCTGTCTTCTGTCTAGGTTTGCTTCCCAAAG
+GGTTGTGCAGGAAGCAGTTGGGATTTGACATCCCTAAAAATTCCTTTGGT
+ATGCAGATCACTTTTTCCTCAGGAGAATTCAATCTTTGTTTTGTAGTACA
+GAACTGGAAGATCCTGTCCACACCAAGAGGCAGGATCCCCAAGGAGTTAG
+GTGTGTGGTGTAAAGAGGGACGCCTGTAAGGAGGCTGCAGCGGACAGAGT
+GTCTGGAGAGATGGGGTTCTAGGTCTTATGTAGTAGTGGAGTCTCCACAG
+AATGGACCCACAGTAAGAGGACACTTGCACACAGGCTGCCATAGTACTGG
+GGGTGATATCCTGAGAGATGGAACTAGGATGAAAATTGCCAGAGTCTCAC
+CCTGGTATGGACTGGGGGGGGGGGGGGTGTCGTCCTCACATAAAGTGTTT
+CTCGGTAAACAGGCTGACATGAAGGAATGGGGAAGGGCTAGAAGGTAGGG
+CTGAGAGGGTCCACAGGAAAGAGTAACTCTGAGTTTCCCTTTTTACCATT
+CTTACGTGTGTGTTTATTTCTATTAGCACTTTTATTGGTCTGAATATCAT
+TTGTGGGGTGTGTGGGATGTGTGTGTGTGCACGTGCGCACGCGCTATATC
+ATCATCAGCTAGCCATACAAGATATACAGAGACATACTTACAATCAGTTG
+CAGCCACCAATGAATGTATCACCAGTGGCCCTGAAACTGACAATGGCAGT
+TCTACTATGGAATGTGCTATGAAGTCAACAGTTAGTTAGGCTAAGTGTGG
+AAATAGTGAGTGAATGAGATAAAGGAGGGACATAGGCAAATGAAACAAAG
+ACAAGAGAGACAAGGTTAAATAAATGAAAGGAGAAGGTGGAGGGGTCAGG
+AAGAACATTATGGGCTTATTCTGGAGATTACTAGGAATATCTTCTCTGTG
+ATTTCTTAGAAAGTGGTATGTGGTATGCTGTCATGCCTATAAAGTTGCAG
+GCTTCCACTCACAGAGGCACCAGTCTAGGGAGGATGTTTTAGTACAGCAG
+CACTTCTGCAGAAAAGTCTTAGGCCAGATTATCACTGTATTTGTCTAGTG
+CTTTTTCCTTATTATTGTCAAGTTTTTTAAAAACTTTATATAGTGTTCTT
+ACTACCTCTGTTCTGGTATACCACGAGTAGCCTATAGACACTGAGACTGA
+CACAGTGAACAAGTTCCTGATGAATGTGTGTGTGTGTATTTCTTATTCAT
+GTGGGTTCTGAGGATGGAACTTAGGCCATCAACCTTGGCTACAAATTCCT
+TTATCATTTGACCTAGCACACAGCCTTCCTAATGCGATTAATAAGGAAGT
+AAATATACTGCTAATAAATACTTGGTGACTATTGAAAATTTGGTATTTTT
+TGCTCCATCTATGAAAAATGTGTCATCTTGTCACAGTTTTTGTCCTATAA
+GTTTTAGAATTCTGTGAAATGTGTAATAAGCTCCATGGGAGCTTCAGTTT
+TCATCATCTTGGTTTTGTTTGTTCTCAGGTTCTGTTGAAAGCTTTCATAG
+CAAATCTGAAGTCAAGCTCTCCCACCGTGCGGCGGACAGCAGCCGGCTCA
+GCCGTGAGCATCTGCCAACATTCTAGGAGGACACAGTACTTCTACAACTG
+GCTCCTTAATGTCCTCCTAGGTAAGAGAGAAAGGGCCTGCTGGCCCAGTC
+TTAGCATCTGCTCAATCTTCTAAACTACACTGACCCTTGCCATCATGATT
+AGACCATTTGCAGCTGCTGACTGCTAAATGTGAAGTGTGTAGGGGATGTT
+GCAAGCCCATAAATGGTCTCGGAGACTTTTCAGCTGCGGCTGTGTCTCTA
+GGACACCCAGCTGGTACACAACCTCATCCACCTTCCTGTCCTTCTGTATC
+AGAGGCCTGAGGCTATGCTTCAGCACGCTGTGGGTACTCTAGGGAAACTG
+ACATTCCCCTACCCCCTCTCTCCTGTCAAAATCAACATGAACAAGTCTTG
+CTGGAATGAGCATATGGACATTTGATACAACTCTCTGAATTCCACATGGA
+CATTTGATACAACTCTCTGAATTCCACATACAGTTCCACTCCCTATAAGG
+TCCTGCCAAGCTAAGGATATATTTTATGCTGCAAGGCTGCTTTTGATCTG
+AGTGTGCACAGCCTGTGTTTCTCAGTCTCGCTTATGTCACCTTTCCCTTT
+TACTGCTAGGTTTACAACAGGGCACTCCTGTAGGTCTCCCTTTTTCACCA
+GCATGTACTGTGGGTCTCTGAGCAGTGGACTGGCTGTTGAGCCCCTTTGG
+TTGTCTTTGCAGGTCTGCTGGTTCCCATGGAAGAAGAGCACTCCACTCTC
+CTGATCCTCGGTGTGTTGCTCACATTGAGGTGTCTAGTGCCCTTGCTCCA
+GCAGCAGGTCAAGGACACAAGTCTAAAAGGCAGCTTTGGGGTGACACGGA
+AAGAAATGGAAGTCTCTCCTTCTACAGAGCAGCTTGTCCAGGTAAGGGTG
+AATAGTGATAAGTTCATGTGGGACATGAAAGAAGTAGCATCTTTCCGCAA
+GTGCTGGGACAGAGGAAGTAGCTGGGAGATGGTGTGTTCCTTTTGCTGCT
+GAGGAGTCAGGAGATGTGTGTCCACAGATCAGGTATGAGTTGTTTGCTTA
+AAACAGGGAGCACACATGTTTTCCACAAAGGGCCAGAGTGTATGTGTGTT
+AAGCTTTGCATTCCAACTATCTTCACTGAACTCCCCCAGTGATGTAGTTT
+GAGCCACAAATAGCCCATAAATGTGGCCATATTCCAGTGAGACTTCATCC
+ACAGAAGCAGGCACACAATGAAGGTACAGTTTTGTATGCCTGTGACCCCA
+GTACTTGGGGTGTCAAGAGAAAAGTACAAGTTCTGCAAATGCCTGGCCTA
+TGTAGGAACCCCAAGCTCTCTGTAGCTGTACGATGAGACTAGATCTCAAA
+AAAGCCAAAATGGGGAGTAGAAAGCCAGATGTGGCTGTGGCTAGCAGTTT
+GCCAGTCAGGATTTAGGGGCATGCATATGCATACAGGTTGCGTGAGAAGA
+GCTAAAGCTAAGCCTTAAGGCAGCTTCCTGGGAGGCTTTCGCTCTTCCTT
+TTTTATTCTACACCAACCTTTAAAAAATAAAATGCATGGTTTTGGTTTTT
+TTTATTGTACATTGGTGTTTGGCCTGCACATATATCTTTTTGAGGGAGTT
+GGATCTACTGGAAGTTGATTTACAGACTGTTGTAAACTGCCATGTGGGTG
+CTGGGTTCCTTTGCAAAAGCAGCTCTTAACTGCTGAGCCATCTCTCTAGC
+CTGCATTTGTTTATTTTTTGCTTTTATCTTACCAACTAATGCTAGGGTTG
+GCAAACTTTGCAAAGTAAAAATATAGAGTGCCTGTGAAGCATTGATTTTA
+TATAGTGATTGTATAAGAATGGTTAAAATTGTCCAGGATATAATTATTTA
+TATTGCAAAATTATGTGTTATCTGAAATCAAGGTTTAAACTTGTGGGCTT
+TTTTCCCCTGGTAAATTTAAAGAAAAAACTAACAAACTCATTCTTTCTAT
+AGTATGGTATAGTATTAAAAACACCAAAAAATTTTGACTGCCATCCTTAA
+CATGTGTGGCTATTTTCCCCCTGGCATTCAGAGCTGTGTTTCTGATGATC
+GGATGTCCCCACTTGCTTCCATAGCAGTGTCCATTGGGATTATTGTCTTT
+TCTGTTCATCAGTTTTGGGAAATGAAGATCCTGAGTTTGCTTACTGGTGT
+TCTAGAGGAAGTGCTCTATGTATTTCCAAGGAGTTACTATAAATGAAAAT
+TAAAAACCATAGGAATTCAGAAAATAGCACAGACAATAATAACCCTACCT
+ATGGAAGTAATAGGTCTTTACAGGGAAAAACTAAGGCACAATTTTGTTGA
+CAAAGACCAGTGAAAACAAAAGTGAAATCTGGGATGCTTATGTATTTATT
+AATTTTGTTTTTGTTTTTAAGCATTTTAAGATTTATTTATTTTCTGTGTG
+TGCTCTGTCTGCATGTACACCTCTGTGCCAGAAGAGGGCATCAGATCCCA
+TTGTAGATGGTTGTGAGCCACCATGTGGTTGCTGGAAATTGAACTCAGAA
+CCTCTGGAAGCACCAGGGCAGCCTGGATTGCAGAATGAGACCTTGTCTCA
+ACAAAAGAACTAAAAACTTCCAATTCACTAAACCAGCAAATGCATTCTTT
+TCTATGACCTGATTAGCGCTTAGCTGATGATGGATGTTGTCTTTGTTGGC
+AGCTGGGGCTGAGTGACCCATCTCCTTCACCCCCCTGTCATTCCAGCACC
+TGCTTTCTCTTAACCGCTGAGCCATCTCTCCCGCCCTGGGATGCATTTTA
+AACATGATGTAAGACCTGTGTTTCTGCTCCTAGGTTTATGAACTGACTTT
+GCATCATACTCAGCACCAAGACCACAATGTGGTGACAGGGGCACTGGAGC
+TCCTGCAGCAGCTCTTCCGTACCCCTCCACCTGAACTCCTGCAAGCACTG
+ACCACACCAGGAGGGCTTGGGCAGCTCACTCTGGTTCAAGAAGAGGCCCG
+GGGCCGAGGCCGCAGCGGGAGCATCGTGGAGCTTTTAGGTGTGTTCTCAG
+CAAGGTCTTCTAACCATTGTGCATGGAGGCATGTTTCCTTCTGTTGCTTT
+ATGGGGCTGTACTGCGCTGAGCTACCCATGCCGAAATTCCTTGCCCAAGC
+TTACAATGTAGGCGTCTTGCTGCTTTTGCAAATAAATCTACAGTTTAGAA
+AGCTAGATGACACAATGAGGCCACACCTTTAAAGCTTGGTCTCCTGCCTT
+TCTGGCTTGTCACCTCCATTTTGGATGCAGTGAAATAGAAATATTAGGCA
+GTTTCCAGGACTCTCATGTTTGATTGTCAGGGATGAATAGATTTTTATGT
+CTTTTTTTGGGAATTTAGTGTTCTTTTTCTACTTGGATCCTGACTTTAGA
+GAACCCTTTCTATTCCTCATCCTTGAAGATACCTCTTTAACCTGGTCTCG
+TCTTTTTGATGCTCAAAGAGTTTGATCCATAGACTAGGCATTGGCAGCCT
+GACCTGTCTGACATGAGCTAGTCTTAGATGGTGGGACAGATAGGAATCTG
+GGCTTGCCAGCCTTTAGAAGTGACCTGGCATTTAGCAGGCTGTGACAAAT
+TCTGCTGACCCTGACTTATCATGGCTTGCCACAGTATATACATTGAGGAG
+CCATATTTATTATAGCTACACATTAGAGACAGTCTGCCTGGGAAATACTA
+TTGTGACCTTGTGCGCTTAAAAATTTGCCTGGACTATGAGCAGAAACTGT
+TTTACTGCTGTCCTTGTTAAAGAATTTTTATTTTTGTGGAAAGTATGTCA
+TACACCCTGGTAACTGTTTCCAATGAAAGCTTATGTCTGGCCTATGCTTG
+TCCAATAATGTGAGATCTTACAGTTTTAATTTGGCTTTTAAAGAGCAGTT
+TATATGAGCTTTTTTGACATTCTAGTCATATCTTTAAAACTGTGTATTTG
+AACATGAGTGTAATTTTCACCTTTAAAGTGTGACACTGTGGTGTTTAAAC
+ATGTCCTATGGAAATATGTCCACATTGTCTGTTTTAGGATTTGAGTTAAG
+CTTTTTGAGGATTTTTGAATTTCTTGCAGATTTTAGCAGCTTGTAATCTT
+ACTTTCTTGTTACTTTCTATGATTTACAGCTGGAGGGGGTTCCTCGTGCA
+GCCCTGTCCTCTCAAGAAAGCAGAAAGGTGATTATCTCAAAATCTGAGTC
+TTGTGTTGAGTTGAACTGCTGTTTCTGTGTTTGCATAATGCACTAGATTC
+TGCTTATATTTCCTCTCAGGAGATGAAGTGTATGGATATTGCTGGAATCT
+GACATTTTCTGCTGTTTAAAAATTGTTTATATCACATTATGTCTAATGTT
+CGAGGTCAAAGGTCAGCAAACTCTATAAGGGACCAGAGAACAAATATTTT
+AAACATGCAGGCTATAGACTCTTTTTTTGTTTTGTTTTTTGTTTTTTTGG
+ATTTTTGAGACAGGGTTTCTCTGTGTAGCCCTGGCCGTCCTGGAACTCAC
+TCTGTAGACCAGGCTGGCATCGAACTCAGAAATCCACCTGCCTCTGCCTC
+CCAAGTGCTGGGATTAAAGGCGTGCTCCACCACCACCCAGCCATGCTATA
+GACTCTTGCAGCTGTTTCCCTGGCTTGTGACTGCTGAAGAACAATGGTAT
+AGAAACTGCTGTGTCTAGCTGTTTTCCACTAAAATCTTAAAGATGCAGCT
+GGGCTCAGCATCACTTTAGAGAGTGCACCACTATACTTTAGAAAAGTAAC
+TTCTGTTTTTTGCTTGTTTCTTTTTAAACATTTATTTATTTGTTTTATTT
+ATGTGAGTACACTGTCGCTGTCTTCGGACACCAGAAGAGGGCATGCGATT
+CCCATTACAGATGGTTGTGATTCCCATGTGGTTGCTGGGAATTGAACTCA
+GGACCTCTGGAAGAGCAGCCAGTGCTCTCTCCAACCTAACTTCTCTTCTT
+GATGAATTTTATTATTAGGCATATTAAATAGTTTATTTTGTTTGGTTCTT
+CATTTTGTTCATGCCTAGTTATCAGTTTTCAGACATATTTAACTTCTTGC
+ATATGTGTTTTCTGACCTATTTTTATTCCAGAGTTTCTGATATGACCTGA
+TAGTTTTATATACTTGGTCACTTTGCAGCAGCTGAAATTTCATTTTATAT
+TATGAATTTCTGTGGAAAAGAATTGCTGTCATCTTTATTTTTAAAATCTT
+AAAAGATGAGTTTGTTTTCTTTGTCATAATTTGAGCATTTAAAACTTAAA
+GCTCAGTATTATTTGCTATGTTAAGTGAGGGTTTGTGTGTTTTTCCTTTT
+TTTAAGTTTTGAGACGGGGCCTTACTGTGTAGTGTGAGCTGGCCTTGAAC
+TCAGTATCTCTTCCTGTACTTCCCACTTGCTAGGATGACAGTACACCTCA
+CTTTTGAGTAAGTTCTTCCCAGGAAAAACATTGTAGTTGCCATTGAATTA
+AGAGAACATTTACTTGAAGAATTTGGGAGCTAGATGTTACCCGGAGGCTG
+AGACAGAAGTCTTTTGGGCAACTGGGAAGACCTTGCCTCTTAAAGAGAAA
+GCCGAATGTTTGATCCATTGCTGTAAGAAATACTGATTTTAGAAAGCATT
+GCCAATGTTTAAAGGAGAGTAGAATTCTAAGAAATATTACTCTCTATTCT
+TGATCTAGGAGAGATCACTGGGCACTTGGTAAAATCACTTTGATAATTTA
+CTCCACAGTCACTTTGTCCAGAGATGGGGACAAAGGTGATGTTATTGAGA
+TAAGTTCTCATCTTTACTATTTCCTGAATCTCCTGATGATTTTTTATTTA
+GACTGGTGATTTTAAAACTTTTTTTGTTATAATGAAGATTCTGTTTTTTT
+CAAGTGTTGTGTTGGCTAGTTTTTGTCAACTTGATACAAGGTAGAGTCAT
+TTTGGAAGTAGGAACCTTAGTTGAAAAAAATGTCCTCATTAGAAAGGTCT
+GTGGACAAACCTGTTATGCATTTTCTTGATTGATGATATAGAAAGGCCCG
+GCTTAAACTCTGGGCACTGCCACCCTTGGGCTGTTGGTTCTGGGTTCTAT
+AAGAAAGCCAACTGAGCAAGCCAGTAAGCAGCATTCTCCAAGGTCTCTGC
+TTCAGTTCCTGCCTCCAGGTCCCTTCCCCAATTCCCCTCAGTTTTTTCCC
+AAGTTACTTTTGGTCTTGGAGTTTTATCACAGCTATAGAAGCCCTAACTA
+AGACAAGTGTAATCTGCCAGATATAAGATAGATAAAAACAGAGTTGTGGA
+GCACATAGACCTCAGCAAAGGTCAAAGGGGGCCAGGAGTCTCTTCTCTGT
+TGGCAGCCCCTCTCCTTTTTCCATCTTCATGGGATCTGCCCTGGGGGAAC
+TTTTCCCAGATCATAATCAGCCACTGACTTGGAGAGTAGAAACTGCTTCA
+TTAAAATTCAAATTCACTGTTCTTGAGTTTTATTTGATTATTTTAAACCA
+CATGTTTTGTTAATAAAAGGTTCTGTTTGTATTTATGTCTAGTTGCTGTG
+TTGATTTTTGCATAGATTTGTGTTCTCTTTGCTAATTAGCTTGTGCCTTT
+AATGTTATATCATGTAATTTCATGGAAAGTATCACAGCTCTTATTACTTG
+AAGAACAGTAACATGAGAAGCTAACAGCTAGATAGTATCTGGTTTAGTTT
+TCCCGTGTATGAGAATATACCTGAAGTGAATAACTTCAAGGGAAAGATTT
+ACTTTGGCCACTGTTTCAGAGGTCTTGGTCTGTCACAGCAGGGCAGATTT
+GTTCGAGCAGCAACCAGAAGGCAGGTTGTTATACCTGTGTTGGTGTGCTT
+CCTCTAAGTTTTTTATTTCAACATGAGAACAATGCTACCCACACCAAGCA
+CCAGTCTTCCCTTTTAGTTAACCCTCTCTGAAAATTCCCTAAGTATATCT
+TCTTTCAATCAAGCTGACAAGTTCAAAGTGTAACTTGATGTCAATTAATG
+TTTATATATAATGTAACTGTAAAGATATTAAATCTGATTTTTCTTCCTAA
+TATAACTATATAAGCTATAAGGTATATTTCTAAAATTCTACTAGGAAATA
+TTTTGTCTTTTCAGATTTTTAGCTACTGTGTAGACTAAAAAGATAATAAA
+ATGAAAGTGACTTATTTATATGTTGGAGTTTGACATACAACTTCGTATTT
+GCCATGGATATTCCATTAGAACATGGATATCCCAAGGCCTGACTGATAGA
+ATTGGACCTTTTCAGTCATAAGCTACTCATTCATTTATTAACTGGTAGTA
+AATTATTTAACTACAACAGTAATCTAAATCAATAAAAAGTTATTATGTGG
+TATAGTTCAATAGTAATTACTTCTGCCTCTTAATTGGTTTTACAGTATTC
+TAAAAGTTACTCTTTTATCCATCCTTTAACATTGTAGTAATATTTAATTT
+ATGATGGATAAATTGTACTATGGTAAATTAAATATGCCTGTGATTTTCAA
+TTCAGAATATATGCTTATTTGATTTTTGTCTTTTGAGATAGAGCCATTAT
+ATAGCTTTGGTTTGGACTAAAACTCACTGTGTAGATGGGGCTGACCTCAC
+GCTCATAGAAACTGGCATGCTTTCTATCTCCTGAGTGTTGGGAGTAAAAG
+TGTGCACCATCATGCCTGACTATTTATTCAAGCTAAAAAAACCCAACATT
+ATTTTTAGCAAAACTAAAAAGGAATATTGCTGTATTATTTACTAGGCAAA
+GTGCTCTTAGGAGAGGAAGAAGCCTTGGAAGATGACTCGGAGTCCAGGTC
+AGATGTCAGCAGCTCAGCCTTTGCAGGTACTCGGTGGCAGCCATGAGCTG
+CCAGTGTCAGCCTCTAGTTATTATCCGCCATCTCGTGCTCCTTTCAGCAC
+CTCAGCCTGCACACAGCATTGCGAGCAGCTTTTATAATTCAGCTGCTTTT
+ATAATGTTCACTCTAAATGTGTTTGGCTATGTGCTTTTCTTGTTTTAGGC
+TATTCAAATATTGATTTATTATCCTTGAGCATATCCTTTTGGAGTGGATG
+ATAGATGGAGTTGTCTCCTGAATTAAATGGTCTATGATCAGGACAGTGGG
+TTGAAGAACTGTCTGGGTAATTTAACTTGAAAGGATATATTTTTGCTCAC
+AAGTGGTTACATAAGATTCTTTGTGTTTTCTATAAAACACAGGTATTATT
+TTAAGACACTAATAAATAGATAAAATGCAAACAGCTTTAGTTATGTTTGC
+TGTTAGGTAAATAAAGATAAGACAAAGATCCTTGGGGAGGAAACCTGAAT
+AATGTCAATGGATTTTCCCTGCTTACAAGATAAACAAGTCACAGGACAGA
+AATTTTGGGCTGCCTAGACTAGTGTAAAACTCAAGTGCCTCTGACCCAGT
+TTCCTTCATCACAAGCCACTGCTCATACCTTGTTCATGGTTTTGAAGTGA
+TTTTGTTTTTATATTTAATGTTTTGTTTTATAAGACAAACCATCCTGTCA
+GCATTCTGAAAGCTCGCTTTTATTGGTATAATCTCAGTGTTCCCAACCAT
+CAGAACCTTTCATTCCCTGCAATGTAAATCAACCCCCCCCTTTTTTTTGC
+AAGTTCTTCCAAACGTCTAAGGATGAATAAATGTTACATACTGGATTTTA
+CTATTATAGAATGGCACTGAAGTGACTTTGATCTCACATAGTGTTTGTAA
+GAGGGAATTTCAAAATTAAACTAGGAAAAGATGGAGTGTGTTTATCCTAG
+AGGAGGTTAGGATTGGAGTGGAGATGAACAACACGACTTGGAAGAAAGCA
+AGCTGATCCTAAAGGGTACTGCGCTCACTAGTGTTCTGTTGTTGCCTATG
+TAGAGTTTCTGGAAGTCTGCTTGTCCCTGCCCCAGCTGCTTGTCCCTGCC
+CCAGCTGCTTCTCAGCAACACACATTCTATGTGTGGCTTTAGAGATGCAG
+TAAGAGCTTCAGCTTGAAAATATTCACAGCCATGAAGAATTCACTTGTTC
+ACCCAGCTGAACTGTGCTCCTTGACTTTTTCTTCACTATGCTCCAAGCTG
+TTTAATAGTTAGAACATTCAATACAGTGAACTTTTTGTCATTTTGCACAG
+TTGGATTCCTTAGCTACAGTTTCCTCTGGCCATTTGACAACTGAGTTTCT
+CTGTGTCTCTAGCCTCTGTGAAGAGTGAGATTGGTGGAGAGCTCGCTGCT
+TCTTCAGGTGTTTCCACTCCTGGTTCTGTTGGTCACGACATCATCACTGA
+GCAGCCTAGATCCCAGCACACACTTCAAGCAGACTCTGTGGATTTGTCCG
+GCTGTGACCTGACCAGTGCTGCTACTGATGGGGATGAGGAGGACATCTTG
+AGCCACAGCTCCAGCCAGTTCAGTGCTGTCCCATCCGACCCTGCCATGGA
+CCTGAATGATGGGACCCAGGCCTCCTCACCCATCAGTGACAGTTCTCAGA
+CCACCACTGAAGGACCTGATTCAGCTGTGACTCCTTCGGACAGTTCTGAA
+ATTGTGAGTGGGCAGAGGGTGCCCTGGTTCTTTTGTCTTCTGAGCTTATT
+CTTGGATGCCCACACTTGGACCCTCCTGCTCATTTTTTCTGTGTTACTAC
+ACATAATAGTAAGAGGCCCCCAGCTCAGATGGTTAACAGAGAGCCTTGTT
+GGATGTCTTCACTGTAGAAATTGCCTAGTATCATTTGTATTGAGCCATGG
+AGATTAAAGTGAGGTTACTTATATGCACCTTGTACACATGATATATTTTT
+AATACCTGATTAGGCCTGTTTAAATAACTACTTTCAATTTTTCAAGGAGC
+TTGTTATTGAAAGTATCTGTGGTCTTAATGTGGGTGGTGATATTAGTACT
+CTGTATTATTTTTAGCACTTTTTGACCTCTCAATGTACTTATACCACATT
+CCATTTTAAAGTAGGATGTGCATATTTCTATCCCTGTGATGTCTGAGTTC
+ATAGACAGGAATCACCTTAAAGATTATATAATCAGAAAGTTTGGTGCAAG
+TGTGTGCTGAATTGTGGGGTATTTTTTGTTTGTTTGTTTGTGTGTTTTGT
+TTTTTTAAACTTGCTTGTCACTTTGTTTTTTTGTTTTATATTTCTGAAAC
+AGGGTCCTAGCCCAGGCTGACCTTAAATTTGAGATCTGCCTGCTTAAGCT
+TTGGACTCTTGTGAATGTGGGCATGAACCACACTTGGCCTGCATTCTAAA
+TAGTCATTTTCTTCTCCTCTTCCTCTTTCTCTTCTTAGTGTAGTAAAATA
+GCAAAATTATCTCATAGATCATTCCTACAGTTAAGTGGTATATTAATCAC
+CACCATACACCTCCATTAAGTCTTCATCTTCTGAAACCTGACCTTCTGTA
+AAGGCTGTGCCCTCCTGGAAGCCAGTGGTCTGTTTTTTATAGTACAAGTT
+TAAGGACTGTAGGTCCTTCATGCAGTATGTTTATCATGAAGTATTATCCT
+TCTATGGCTGACTTACTTAACATAATGCCTCCATGTAGCATGTGTGAGAA
+TTTTCATTTTTTAAGGGATGATTAATATTCCATTGCATGGATAGAACTAC
+ATTTTGATTATTGTCTCATCTGTTAGAAAACATGTGGGTTACTCTCACAT
+CTTGACAATTATGGATAATGTCACAATTATGAATAATAGGTCTACTAAGT
+ATTTCAAAGACTCTGTTTTCAATTCTTTTGGCTATACACCTAAAAGTAGA
+ATAGTTTCTACATCCAGCTTTAAGTAATGTAATTAAATGCTTAGCTACTA
+TAGAATATGCATATATCTTGATATATATGTACTATAGAATATAATAGTCT
+ATATAGAATATACATATATTACATAATATATAACCTATATATATTCTATA
+TAGAGCCTATACATAGGTCTTTTTGAGACAGGGTTTCTTTGTATAATAGC
+CCTGAGTGTCCTCTACCTACTTTGTAGACCAGGCTGGTTGAACTCAAAGA
+GATCCACCTGCATCTCCCTCCCAAATACTGGGATTAAAGGGGTGAGCTAT
+CACACCCAGCCTAGAATATT
+>mm10_knownGene_uc008xdb.1 range=chr5:34761740-34820754 5'pad=0 3'pad=0 strand=+ repeatMasking=none
+GCACTCGCCGCGAGGGTTGCCGGGACGGGCCCAAGATGGCTGAGCGCCTT
+GGTTCCGCTTCTGCCTGCCGCGCAGAGCCCCATTCATTGCCTTGCTGCTA
+AGTGGCGCCGCGTAGTGCCAGTAGGCTCCAAGTCTTCAGGGTCTGTCCCA
+TCGGGCAGGAAGCCGTCATGGCAACCCTGGAAAAGCTGATGAAGGCTTTC
+GAGTCGCTCAAGTCGTTTCAGCAGCAACAGCAGCAGCAGCCACCGCCGCA
+GGCGCCGCCGCCACCGCCGCCGCCGCCTCCGCCTCAACCCCCTCAGCCGC
+CGCCTCAGGGGCAGCCGCCGCCGCCACCACCGCCGCTGCCAGGTCCGGCA
+GAGGAACCGCTGCACCGACCGTGAGTCCGGGCGCCGCAGCTCCCGCCCGG
+GCCCCGCGCCCCTGGCCTGCGTGCTGGGCATGGCCAACACTGTTCCCTGT
+CCAGAGGGTCGCGGTACCTCCCTGAGGCCAGGCTTTCCCGGCCCGGGCCC
+TCGTCTTGCGGGGTCTCTGGCCTCCCTCAGAGGAGACAGAGCCGGGTCAG
+GCCAGCCAGGGACTCGCTGAGGGGCGTCACGACTCCAGTGCCTTCGCCGT
+TCCCAGTTTGCGAAGTTAGGGAACGAACTTGTTTCTCTCTTCTGGAGAAA
+CTGGGGCGGTGGCGCACATGACTGTTGTGAAGAGAACTTGGAGAGGCAGA
+GATCTCTAGGGTTACCTCCTCATCAGGCCTAAGAGCTGGGAGTGCAGGAC
+AGCGTGAGAGATGTGCGGGTAGTGGATGACATAATGCTTTTAGGAGGTCT
+CGGCGGGAGTGCTGAGGGCGGGGGAGTGTGAACGCATCCAATGGGATATT
+CTTTTTCCAAGTGACACTTGAAGCAGCCTGTGACTCGAGGCACTTCGTAC
+TCTCCTGGCGTTTCATTTAGTTTGTGGTGTAGTGTAGTTAAACCAGGTTT
+TAAGCATAGCCAGAGAGGTGTGCTTCTGTGTGTCTGCAGGCAGTTGGATG
+AGTTGTATTTGTCAAGTACATGGTGAGTTACTTAGGTGTGATTATTAATA
+AAAAACTATATGTGTGCATATATATGAAAGAGTCGACTTATACTTAACTG
+CCTATCGATTTTTTGTTCTATATAAAACGGATACATTGGTGGTGCTCAGT
+TTTCACCGGGGAATGAATTTTACTAGTGTTGCAGACAGGCTTGTTTTAGA
+ACATAGGCCACTCTGACTCTGACTTTGTGCCAGTAAAAGTTCCTGTTTAG
+TTCTTTGCTGACATCTTATAGATCTTTGGAAGCTAGCTGCTTGTGACTGG
+AGAGAATATTGAAACAGAAGAGAGACCATGAGTCACAGTGCTCTAAGAGA
+AAAGAGACGCTCAAAACATTTCCTGGAAATCCATGCTGAGTGTTGAGCCC
+TGTGCTCTCTTGCAGCTCAGTCCTTTCTCTCAACTCTGGGCATTTTATTT
+CTAATCTGGATTTGTATAATTAATAAGGAGAACTTTTGGGAACAACCTAC
+TAAAGAATGTCATCATTAAAACTCACTTAGAAAATAAGTGTTCTGGTGAT
+ATCATTGAGCTATGTTCCCAGTCCTGAGAGTTTGTTTTTTTTTTTTTTTT
+TAAATAAAGATTTGGGGAGAAAAGGTGGCTTACTTGATAGAACAAAATAT
+AGGAATAAAATTTCCTTCTATAAGGTGAAAAGTGTGAATAGAAAACTTCT
+TATCCTCTAGATAAGTAGTTTCTTTTTGCTTTTGAGAGTCTCACTATGTA
+ACTCTTGACCTGAACTCAGAGAGATCCATCCTCCTGCCTCTGCCTCCTCT
+CTCTGGGATTAAAGGCATGTGGCACCATGCTGGGCTGTCCAAGTATGCCA
+CAGACCCTCTAGGTCCCTGGTCTTCGAGGAACGGGATTTCTTAGGCAGAT
+GGGTAAGGAGTCGGATGAAAATGACAATCAGCCACACACAAGAGAGGTGT
+TGAATCTGAATGTAATGTTCTGGTTGAGCTTCAGACTTATATAACAACGA
+ATTATCAGAGGATACAAATCACAAAAAGACAAGATACACTGAAATTCACC
+AGTTACAGCAGAAAGGAATTTGCAGGGACTAATTAAATGTTTACATTAGG
+GATAACAAGCCCTGCCTAGGATCAGCCTAATGCCAGGCAAGAATTTCACA
+CTTTAAGGTTAAAAGCATCAGGGGGTTGTTAACTCTTGACAGGCCTTAAG
+AGTAATGTGCTATCACTGAGCTCTAAATTCTTAGGTCTAGTAAAACTTAT
+CCTGTCTGGAGAGTTCCCCCTTATCAGGGTAGTATATCAACTTATACTTG
+ACATGGAATGAAGCCTGTAGTAAAACATTTCTATCTCAGTGAGACTTTTA
+GTCTCTATCTGTAAACAGCTGAGTAAAATGGCAAGTGCTTAATTGTTTAC
+TGAATGGGTTAAGCTCCTTGCTGCTATCTGGAATCTAAGAACACTGGGGA
+AAGGCTTTAGCTATGTTAGAATACAATATTAAAAGGCATTTACTATAAGG
+TGATGCTTAATAGAGTGCACGTGAATCTATACACTAGATTAATGTGGTGG
+AAATTTGAATATAATGGGTTAGGGAAAGAGATGCCATAACTCTGGGAGGA
+AAATTTCCCTGGACTCTTATCCTCGTGAAACAGCTTCCAGGCTTTTCGCC
+TGACAAACCGATCCAAACTGGAGAGTTGGCTTTCGCCAGAATATCCAGGA
+GGAGAGTCCTAGAAATTCATTTCTCATGAGCAGCTTTTTGGCATTTTTGC
+CTCACAAGCTGACTCCACCAGAGTACCCTGACACAAGTATTGTCTAGTTA
+TTTTGATTATTACCATGACTCTGCCTCTGGGTGAGAGGAATTGTGGAAGT
+TTACATATTCCCCATATCTTCTATAAACCTCTGTGTGTGTGTGTGTGTGT
+GTGTGTGTGTGTGTGTGTGTGTGTATGAGGGAGAGAGAGGGAGAGAGAGA
+GAGGGAGGGAGGAAGAGAGAGAGAGAGATTGTTCTGTGCCTGCTTCGAAC
+ACAAATTAGTTTGCAAAAGTAATTCATTAACATGATACAGTCCCAAAGAT
+AAAAATGGTTAAATAATGAAAACATCTCCCTCCCCATTTTCCTAACTTTG
+TACCCAGGAGCAAGCTCTGTTACACTTCATTTGTCCTTCCAGATAAAATT
+TGGGCATATGTTAGGACAGAATTTTAAATTATTTACAAACAAAAGTATTT
+TGGAACAAAAGCTTTTAAAAGCTTTTATTTTAATAAAATAACTTGTTACT
+ACACTGTATATAACTAACTAACATTTTCCAAAATTAGCTCCATTAGCATC
+TATCTCATATTTCTATGTACTTTGCTGTTGAAAAACCAAGTGTTCATTAA
+TAATAAGTAACAAACTCACTGCTTGGAAGCTTTGATTTTTGGCATTTTGT
+CCACTTGACTCAGTTAAAAGTCCTTTTTTTCGAAATGAGAACAGCCAAAA
+CAGTTTTAGAATGAGTCTGTTCTGCTTTTGTGACTCTCATTGTGTTCTGT
+AGAACCAGTGTCACAGCCATATGTGGGCCTCTGTTGAAGTAGCTGAGAAC
+TTGTTCTCTGCTCTGCTAGCTGCTGTCGATCTGATAGGCCTTGAACAGTT
+GACATTCACCCTTAATAGTCCTCATTAGTCTTCCTGAGCATAGTCATTCA
+TTTATCAATATTTGCTGATCATCTCCTATGTGCCTAGCATTGTTCTAGTT
+GCAGGTTTTAGCAGGGAACAAAGTCATGTCTCATGAAGCTAAAATTCTTG
+GGAGAGACATAGACAGTAAGCAGAATAGTTTGTTCATAGTGAGTGATGAG
+GCGCATGCAGTAAAGTAGGGAAGGGGATTAGGAAATGCCAGGGCTTGACA
+TGTTTTAGACAGGGTGTTTAAATAATATCTGCTTAGTTGAAGGCTTATTT
+TTGAATAAATATCTGAAGAGTCAGAAATCTACCAGGAGGGTGTATGGAAG
+AGGAGTATTCCTGCAAGGGGAAGTTGTCAAAGGCTTTCCTGTGTGGGGAT
+TAGTGATGTCATGTTTTTGCTGGATGAAATGAGTGACGGTAAGAGTTGTA
+GTGGGGGTGAAGCAAAGGGTTGAGGGAGGCTTCATTGGTGTTATCAGTTA
+CTGCATTTATCTCCAAATAGAGAACGTAGCAATGAAAGCTACAGAGAACG
+GGAAAGGTGAGGATTTATTCTAAGACAAAATAAGTGTAGGAAGTTTAACA
+ATTAGATCAGGAGCACAGACTCCAAGTCTAAGTCTTCATTCTTGCACATT
+TTTTAAAAATTTTGTTATGTGTTCTGGATACTATTTCCTTATGAGATATA
+AGTTTAAAAGCCTTTCTGTGGATTGCCTTGCCTTTGTTGTTGTTGTTTGT
+TTTGTTTTGTTTTGTTGAGACAGGTCTCTCTATGTAGCCTTGACTGTCCT
+GAAAATCACTCTGTAGACCAGGCTGGCCTGGAACTCAGAGATCTGCCTGC
+TTCTGCTTTTCAAGTGCTGATATTAAATGTATGTGCCACCACTGCCAGGC
+TAAGATTGTTCTTTCAATTTCTTTTTTTGTTTTCTTTTGAGATCAAAGTT
+TGCTATGTACTTTTGGCTGGCCTGGTATATTGTGTAGTCTAAGTTGGCTT
+CAAATCTTCATGGCACAGATTCCCAAGTACTGGGACCATAGGTATGGCCC
+ATCACAGTGGGGGGTTGGGGGGGCCAGTACATCTATCTCTTGAATGGTGT
+GGAGTGTATATATGTGTTAGGGGTTTGCAGAGGCCAAAAGAATATTGAGT
+GTCTTCCTCTATTGCTCGCCACTTCTCTGAATAAACCTAAAGTTACCAAT
+GGATTTCTAGTAAGCTGACTGACCAGCAAATATTGGGGATCTGTCTGTCC
+CTGTTCACCATAGTGAGGTTACAGACGTGAATAACCACACCCAGTTTTAA
+CGCTGAATGCTGAAGAGTTAAACTCAGGATTTAGACTTGCCTCACTTTCT
+TATGTTCTTCAAAGCATAGACATTTTAAGTTTTGATGAAGTTTAATTTTG
+TCTTTAGCTACGGTATGTTAGATACTTAATAAATCACTCTTTTATCTGAA
+ATCACAAAGATTTATTTACTCCCTCTTTTTCTAAGAGTTGTTTGTAAGCC
+TGACTCATTTTGAATTTGTGGTTAAGGTAGATGTCTGACTCTTTTCTTTT
+GCACGTGTAATTTAGCATTTGGCTAAAGAGAATGTTTTTTTCTCATTGAA
+CTGTATTGACATCTTTGTTGAAAATTATTACCTGTCCATATGTAAATGTT
+TCTACTTCCATTTTTGCCATTGATTGTGTGTCTGTTCTATGCCAGTACTA
+TACAGTCTTGATTACTGGTTTATATCATGATCTCAAATCATAAAGTATGC
+CCCCCAACATTGTCTTTTTCAATGTTGCTTTAGTTATTCTGGGTCCCTTG
+TGATTCTATCCATCAATATTGGTGAACTTCATGACCAGATAGTAAACGGT
+TAAGGCTCTGTGGCTATAATGCCATCACAGCCAGTCAGCTCTGCCACTGT
+GGCATGAAAGCAGCCATAGAAAATATGTAAGAGAATGAGTACATAGAATT
+GGAATTTCTTAAAATTTTCAAGTCATGAAACATTCTTTTATTATTTTTTT
+TTAAAGAAAATGTTTTATTTATTCTTTGACAGTTTCATGCATGTATACAA
+TGTATCTGAATCTCATGCAGGGCCCCTACTATCTCCCTCCCATCTACACC
+TCAGCATGTCCTTCTCCCACAGGGTCCCTACTCTCTCCCTCCCATCTATA
+CCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCTCCCATCTA
+CACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCTTCCCATC
+TACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCTCCCA
+TCTACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCTTCC
+CATCTACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCT
+CCCATCTATACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCC
+TTCCCATCTATACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCT
+CCCTCCCATCTATACCTCAGCATGTCCTTCTCCCACCTCCATGCCTTTTT
+TTAATGACCCACTGAATCCAGTTAGTGTTGCTTGCTGGAATGGAATGTTG
+ACTGGTTTTTGTTTGTTTGTTTGTTTTTGTTTGTTTGTTTGTTTTCCCTG
+AGGCAGGCTTTCTCTGTGTAGCCCTGGCTGTCCTGGAACTACTAGCTCTG
+TAGATCAGGCTGGCCTCAAACACACAGAGATGAGTGCTTCTGCCTCCCAG
+TGTAGGGACTAAAGGTTTATCTCCCACCCAGTTCTTGTGTGGTAACCATA
+ACTGCAGTGAGTTTATTATTACAGCAGCCATGCCATGTCTGGAAGGCAGA
+ATTTCATTGAGCTCTATAAAACCTGGCTCATAAATTCTCTCTACCCCTTC
+CCTTCCCTGAACCTGGGTGTGGGGGTCAATTTAGATGTCACATTTAGGCT
+GACATTTATCAGTCACTTATTCTCAGTATACTATGAGTTTTTGAGTTGCT
+GCCCACTGCAGAAAGAGGTTTCTTTGGCCAAGGCCTGACAGTAGCACTAG
+TCTGTAGGTATAAATGTAAATATTTACTCTTGTTTGTTTATTTGGTTGGT
+TTGTTTTTTGAGACTGGGTTTTTCTGTAGCCTGGCCATCCTGGAACTCAC
+TTTGTAGACCAGGCTGGTCTTAAACTCAGGTTCACTTGCCTCTGCCTCCC
+AAATGCTGGAATTAAAAGCATGCACCACCACATCGGGCTCCCAAACATAA
+ATATGTAGAAGGTAGTCTAACAACATGTCTACTTAGCAAAACAGCAGTAG
+TAGGGTCCTGTTTTAGGACCTGTAACCTCCTTCCCCTGAGTCATGGGCTT
+TTGACTAGGTGTGTACCTTTGGCGCACACTTTAAGTCTAGTCAGAAAACT
+GTGGGTTATCTTTGTATTCTTTACATCACTGTTGCACTAGTGGTCACATT
+TCGCCTTTGTCTATATTATAGCATTCAGGGTCCAGTGCTTAGTAAGACCA
+TTGATGTCTTTTCTCCTCCAGTGGCCTGCAAAACACCTGGCATTATAAAA
+CCTAACCAGCCAAGAGGAAGTTTTCAGATCAGTTCTATCTTGATTTCTCT
+ATGTCTTCTATGCAACCAAAGTGTGTTGTATCTTCATCAATAGGGTTTTA
+CTATTATATAGTTACATTGGGCAACCAAGAGTGATAGAAATAACCTGTGT
+TGTTTGGATAGGAAGGTGTTTCTGGGACCTCCATGACTAATAACTTGTAA
+GAGGTATCTCATGCTTAGCACATGTTTTCTGAGGATACATTGTCATGTAC
+ATACCTATGTTGAAACTCCTTTAAAAAACACTTATACTTTTAAATTAGCT
+TTCAAAATAGTTTCTATAAGTTTTTTTAAAAAAGATTATATATATATATA
+CACACACATATATGTATACACACACACACACACACACACACACACACACA
+CACACACATATATTGTAGCTGTCTTCAGACACATCAGAAGAAGGCATCAG
+ATTCCATTACAGATGGTTGTGAGCCACCATGTGGTTGCTGGGAATTGAAC
+TCAAGACCTCTGGAAGAGCAATCAGTGCTCTTAACAATTGAGCCATCTCT
+CCAGCTCTATAAGTTTTTTTTCACACATCATTTACATTCTGTAAGTAATG
+AATAATCACTACACAAAACAACAATTGTTCTCCTGAACATTAATTCTGGA
+GAATCTAAAATTAACAGTCTCATAAAACCTGTATGCAAATGATTAACAGA
+TCTAGGGGTAATAGCCTCAAGCTGGAATCAGTTTAGAAGTCGTCAATAGG
+TAGTTAAGCTACCTAACAACAAGAAGGAATTCAGCTGCTGATACCTGCAG
+CAGCTCAGGTAAATAGTGGAGATTATAGGCCATTGAGCAAGCTGATTCCT
+AATGCCTACTCATTATATGATTTTACTTATGTATCTTTTTTTCCTGTAAT
+GTTAGATCTTTGGTTATTTTGTTTTCCCCTTGTGGCAAAATAACATCACA
+TAAAACATAACCATTTTGAGTATACAATCTATGATTGTAAAAACACAGTG
+TTCCATTGTGACCACCAGCCACCACTGTTACTCTTCCTTTTTAACATTGC
+TTTTAAAAGTATATTACAAAAAAAGTATATTACAATTTTCACTTGACATT
+GTAATTGTACATGTCTATGAATAGGGTCATTTTTTTAAAATTATACATTC
+TAGTATCTTCTGTTGCATTCAGTTAACTTAAGAGCAGGAAAGACTGGTAT
+AGAAGTCCTTTTCTCTCTCTCTTTTTTTTCTTTTTCGAGACAGGGTTTCT
+CTGTGTAGCCCTGGCTGTCCTGGAACTCACTTTGTAAACCAGGTTGGCCT
+CGAACTCAGAAATCCGCCTGCCTCTGCCTCCCGAGTGCTGGGATTAAAGG
+TGTGCGCCACCACACCCGGCGAAGTCCTTTTCTATGATAGAGAGTATATC
+GTGGGCAAATCCTAGGCCTTGGCTCTTTAGTCAACCAGCATTTGTATGAT
+TAAATAAAACATTGGTGTGTGTTTGTGTGTTTGCACACTGGGTACAGCCT
+TTCCTTTATTAGCCCTGGGTGTGATTTTCTTCTCTGCTGATAGATCCTTT
+CTAAGCTGATCGCTTCATACTTAGGGTGGGGATAGTTGTGAGGACTGAGG
+AGGTGATGTGCGGTCCTGTCCCTTTCTCATTTTGCTAGTGTGACTGATAT
+GTTAGTTCTTTGCATGTGTCTCCTACTCTGGAAGGAGCTGGATGGGAATT
+GTTTGTTTTTTAGTCACTAAATCTAGACTATCAGGTTCATGGCAAGTTCT
+CAGGAAGTACTTATTACATGTATAGAGTTATAATCTGAACTTGATTAGAC
+ATATGGCACTTTTCATACTCCTACTTTTGTTTTTCAAGTTATTTTTTTCT
+ACTTACCAGTTTCATGTTTTAAAAACTTGTTTCTTTTTTTAAATTTTTTT
+AATTAGGTATTTTCCTCATTTACATTTCCAATGCTAGCCCAAAAATCCCC
+CATACCCTCCACCCCACTCCCCTACCCACCCACTCCCACTTCTTGGCCCT
+GGCATTCCCCTGTACTGGGGCATATAAAGTTTGCAAGTCCAATGGGCCTC
+TCTTTCCAGTGATGGCTGACTAGGCCATCTTCTGATACATAAGCAACTAG
+AGACACGAGCTCCAGGGGGTACTGGTTAGTTCATATCGTTGTTCCACCTA
+TAGGGTTGCAGATCCCTTTAGCTCCTTGGATACTTTCTCTAGCCTCCTCC
+ATTGGGGGCCCTGTGATCCATCCAATAGCTGACTGTGAGCATTCACTTCT
+ATGTTTGCTAGGCCCCGGCATAGTCTCACAAGAGACAGCTATATCAGGGT
+CCTTTCAGCAAAATCTTGCTAGTGTATGCAATGGTGTCTGTATTTGGTGG
+CTGATTATGGGATGGAACCCCTAGATATGGTAGTCTCTAGATGGTCCATC
+CTTTTGTCTCAGCTCCAAACTTTGTCTCTGTAACTCCTTCCATGGGTGTT
+TTGTTCCCAATTCTAAGAAGGGGCAAAGTGTCCACAATTTGGTCTTTGTT
+CTTCTTGAGTTTCATGTGCTTTGTATCTTGTATCTTGGGTATTCTAAGTT
+TCTGGGCTAATATCCACTTATCAGTGAGTACTTGTTTCTTTTAATTAAAA
+AACAAAACAAAACAAACAAAAAACTGTGTGTAGGCCGGGCGTGGTGCCCC
+TCATACTTAATCCCAGCACTCAGGATCTCTGTGAGTTTGAGGCCAGTCTG
+GTCTACATGGTGAGTTATAGGACATCTAGGGCAACATAGTCAGACCCTGT
+AGTCAAAAAGAACCAAAACTGAACCAATACAAAACTTTGTGAGCTAGTAA
+AATAGTGAAGTGCTTGCTAACATTCTGAGTTTGATCTCTGGGACCCATGT
+GGTAGAAAGAGAGGACCAGTTTCCACAAGTTGTCTTTTGATCTCCATGTG
+AGTGCCAAAGCACACATACATGTATTAAAATGTGCATGTGTAATTGTAAA
+GTTTCGTTACCACTGACAGTCAGAAACGAGCTCTGGGGCCTCAGGGTTCT
+CTTCCTTCAGTGCTGTGGGAAGTGCACCTTTAAACAATCTTATTTGGGTT
+TCTTTTGGAGACAGGAGTTATGACAGAGATTAAACTTGGCTTTGAGCAAA
+TTTCATACACATTTAACCACTGAATTTAACCACAGTGCCTATTCAGTTTT
+GAATATCACATGACTAGAATCGTAGAGCTCCTCCTATTCATAACTCACAC
+TGTGTCAAAGCTCCTTTTCTTCAGTGTTGTTGGTCACCCATTTAGTTTGT
+CTGTTTTGTATGAATATGCCCTGTTTTTCTCATCTTATTAGTGCATGCTT
+GGGTTATTAAACTTTGACAAATGCTGCTGTGAGGATATCTCATATGCTGG
+CCTGTCTTTTCCCTCACAAAATGGTGCTTTTTAATGAGCAGTTCCCATTT
+TGGTGACTTCTAATTTGTCATACTTTTCCATTATGGTAAGTGCTTTTATT
+TCTTTTTTGATGAATTTTTCATCAACTTGAGATCATGAGACGTTTCCTGA
+TACTGTTGCATAAATGGCATATTGATTTGCCCAATGAAGTTGACTTGTTT
+TTGTGTGTGGTGTAAAATAGACTTCTTGTTTGTTCTTCTAATGTGGGTAC
+TTCTTTTTTCACCAGTTTCCTCCATGTTCTGTGTGTATTTCCTCCCTTTT
+ATTGTTATTAAGTTTATATTATTAAAAGAATTCACATTAAGTAGGTTTTT
+TTTTAAAAAAAAAACTTTTTATTAATTCTTTGTGAGTTTTATATCATGTA
+CCCCACTCATCTCCCTGACCCGTTTCCCCCCTCTGCCCTTGCATCCCCCC
+CTTCAAAAGAAAAGAAAAACCACACAAACAGAAAAACCAATAATGTATAG
+AAAACATCTCATAGTAGAAACTGTATCATGTCACAGTGTGTGCCACTGTA
+TACCCCTCTGTCTGCACATCTTGACATGCAGATGATCATTGCAATGAGTC
+ACTGATCTGATTCAAGGTCTCTGACTTATGTCACACCATTAATATTGGAT
+CTTCTCCAGGACTCCTCTTGGTTTATTCAGTTGTTACTCTGTGTCATCAA
+GTTCCTGCAGCTTTGGATCAGCAGGACCGGCTCTTTTATGTACTCCAACG
+GTTCACAGATGATGTAGATGTTGGGGTGCGCCAACTCAAAGCCCTGGATC
+TGGGCCTGGGTGGTAGTTGTGCTGGTCAGCCTGCTGGCTCTCCTGCATCT
+GCATCACCAGGGCTGTTCTCCAGCACTGCTAGGCCACTCGATGCTATCAT
+TTGTAAGAAGCAGGGTCATGAGGAGGGAGGACACCTCCCTGCCCCAAAAC
+ACACACACACCACCCAATGGCAGATGAGTGACCAGTCCAGCTCTCCCTCT
+ATCTCACCCTTGAGGCTAGGTCACCTGTGCACCTGCCACCAGGGCCAGCT
+CTACTCTGCTGCCCAGTTAAGATTCAGGACCTACTCTCCTGAGTACTGCT
+GCTGGTGAGAGGTGTGCCAGCTCTCTAGAGTGCCAAAGCCAGTTCTGTAC
+AGATACATGGCTGCACAGACCAGGGACATCCCCATGGTTTCTAGTGATAA
+TGTGAGTCACGACATCAACATCAATCCCTGCCACTGCATGGCCACAGATC
+CAGACATGGTCCTCAGTAGCAGCAGGAGTTGGTACTTCACCATGGCTTCA
+AGTGGCAGGGCTAGCTACTCACAATAGGCTCTTCCTCTTCCCCCTCATGT
+CTCAGTTCCTTCTCTCTTCATACTGCGCAGGCTGTTCTGCTTCTCTTTCT
+CTTCCTTCTGTCCACCACATACTTGCACATTGCAGAGACTCCTGCTGCAG
+GCAAGCCATGATGCTGGTATGCCTCTGGGTGATCTCCTCTGCTTGTGCTG
+TTTGGCATGGTGGCATGCAGACCTCTAGGTGTCTACAGCTACCCATGTGA
+CATGGCAGCAGTGTCCTCCCCCACCCCTCTCTGCAGTGTGGCAGGCAGGT
+CTTCTGGACTTTTTTCCCTGCCAGTGCCCTGTGTCATGGCAGTGGGATGG
+CAGTGGGTGGGTCTCTCTCTCTCTCTTTTTTTTAAACACAGGGTTTCTCT
+TTGTAGCCCTGGCTGTCCTGGAACTCCCTCTGTAGAGCAGGCTGGCCTAA
+AACTCACAGAGATCTGCCTGCCTCTGCCTCCCAAGTGCTGGGATTAAAGG
+TATGTGCCACCACCACAGGCATGGCAGGTTCTTTTGGCATAATTATTTCT
+CACTTTGTTCTGGGGTTGTTTAGGACAGAGTTTTGTGTTGTAGCTCATGC
+TGGCCTAGAGCATGCTGTGGTTCTCCTATCTCTACTCCCTGCATTCTAGG
+AGTGAAGATGTTCACCATGTTTGGTTCTAGTTATCTTAAAATAAGGACCA
+TCTCTTGCTAATAATACTTTTCATTGTACAGTACATTATGCCCCGGTCTG
+TCTTAGTATTTGAAGAACCTTTGTCCTTCATACTGATTCTTATGTCTTTC
+CTAGATGGCATCTTCTGTCTCCCACATAGACAATATTGTCACATCGTTGT
+CCTTAGACAATATTGTCATATTGTTGTCCTTTGCTATTGGAAGATTGAAG
+GGTTTTGTTGCTTAAAAGACCATTTTGATAGTCTAAGTGTTGCTGCATGA
+TTTTGTGTGTGTGAATATGTGTGTTTAAGTGTAGTTTTTCAGTATTATCA
+CTTCTTGAGAAAGTATATCCTATAGTTCCAAATCAGATGTTGATTTAACC
+TTTTAAAAAAATCTTTCTGGTGCTGGAGAGATGGTTCAGTGTTTAAGAAC
+ACTAACTTGCTCTTCTAGAGGTCCTGAGTGTGATTCCCAGGAACCACGTG
+GTGGCTCACAACCATCTGTAATGGGATCTGATGCCCTCTTCTGGTGTGCC
+TGAAGACAGCTACAGTGTACTCATATACATTTAAAAAAAAAAATCTTTCT
+ACTCAGGTGGTGGTGGCAGCACATACCTTTAACCCCAGCACTTGGGGAGC
+AGAGGCAGCTAGATCTCTGTGAGTTTGAGGCTAGCCTGGTCTACAGCGTT
+TCAGGACAGCCAGGACTACCTGTCTGTCTTGAAACAAGACAAAACAAAAA
+CGCCAAACCTTTCTGAAAGTGGTCTAAGTGTTCAGCAACACTGTCATGTA
+AGGATAGGACTTGACAAAAATCAGAGAGCAACTGTTGAAGAATCAGAAGC
+TATGCATTCATGCTCCAGGCTGCTGTGCTTCTTATAGGCAGGACAGCTTC
+CAGACTTCAGTCTTGCCCCCAGTATGGAGTGTACGTTACATGTGTTCGTG
+GAGGGGGAGGAGGAAGAGGAGGGGAGAGGAGGAAGAGGAAGAAGAGGAAT
+GTTTTAAAATTCCTTGAGCGGTTCAGTCTACCCTCTTCTCTTAATGTGGA
+ATCACATTGTTAAGATTTTTATTTTTAATTAGGTGTCTATATATGTATCT
+GTATGGAAGTCTATATGCACATGAGTGCATGTACCCTGAAGAGGGTATTG
+GATCCTATGTTTCTGGAGTTAAAGGTGGTTGTGAGCCACCTGATATAGGT
+CCTGGGTAACTGAACTTGGAGTTTTTTTGGATTAGCAATAAGCACTCAGA
+ACCATTGAGCCATCTCTCCAAGCCCTGTAATCACATATTTAAAAGAACAA
+TGAGTGTAATTCTAAAGTTAAGAATTTAGATATGGGGGCTGGAGAGATGG
+TTCAGTGATTAAGAACACTGACTGTTCTTCCAAAGGTCCTGAGTTCAATT
+CTCAGCAACCACATGGTGGCTCACAACCATCTGTAATGGGATCTGACACC
+CTCTTCTGGTGTGTCTGAAGACACCTACAGTGTACTCATATAAATAAAAT
+AAATAAATCTTTTAAAAAATGAATTTAATTATGAAGGCCAAATTTATATT
+TTTAGAAGTAGTTCTTAATTTGTTACAGTGTGCTCTAGAGCCTGGATTTT
+ACATGCCCCCACAATTGTGAGCCTGTGTAGATTGCTTTGTTGCACTTAAA
+ATAGTTTGGCTGAAGCTTGTTTTCATTTTGAAGATGTAGTTTCAAGTGGT
+TGAAGAGCCAAGGTTGTTTTTACCCTATTGATACAGTTCCTACCCTGAGC
+TATTTTATTTTTCATAAAAAACAAATCAGTCTGACTTATCTCTAAAAATC
+CCATCTAATTTCATAAGGAATGAAATAGCTACAGATGTTTATTTATTTAT
+TTTATATCAGATTTTCTTATTGGACAGAATGAGGTAGAAAAAAATGTTAT
+TTCAGGCTGGGACCTAGGCTAGGTGTGGTGGTGAGGGCTGCAGTTCTAGC
+ACTAAGAGCCAGAGATGAGGCAGGAGGATCTTGACTTCTTGAGAGGGCTA
+GACTGGGTTCCAGGCCAGCCAGGAGTGAGAGAAGAAGAGGGGTGGGTGCT
+TTACAGAACTCAGCTGGAAGATGTATGCCAAACACCTGCAGCTTTATCAT
+TTCTATTCTGTCTCTCCTCTTTTTAAGCTAAAGTTTAAAGGGCTAGAGTC
+CCCTGCAGGTTGGAGAATCTAAGGAATGAAGTTGAAAGGTAGCCTGAGGT
+CAAATTGATTTGTTGTTTTGAGACAAAGTCTTGCACTGTATATTCAAGGC
+TCTTGTCAAATTCCTGATTCTCCTGGTTCCGTCTCTTGAATGCTGCCAAG
+TTATAATATTGGGATCGTGTTCTAATTGGCTGAGAAGTCTGTATTAGAAG
+TTCTAGCTTCTGACCTGCAGAGTATAGCAGAAGGATTTTCATTTTCTGAT
+ATTTTTGGTTAGTGTCATCTCTGTTCTGAGAGTGCATTCTGACTCTCATA
+CTTTAAATAAGAGTACTTGGTATGCTAAGAGGAAATGCTTGTTATAAGAC
+TGTAAAACTATCTTTTATTCTCCTGGAGTAATTGTCTCCAAGGCTTACTG
+CCTCTGTCCATTAACCTAGACTTAGTACCCAAAGGTGCTAGCCTCCATAC
+AATCTAATTTATGCCGAGACTATTTTCAACTTCTGAAACTTATTGCTCCA
+TAAGCTCACCCTTTCTTGTTCTTTCTGATCTCTGGCTGCTGATTCAATTC
+AGTTAGCTGTTCTGGCTCAGACTCCTCTCCAAGCTGACTGATTGAATCTG
+GTTTCTCTCTCTTGGCTTCTCCTGCATTGTTCTGCTTGGCCTTACACTAA
+CTTTGACAATCTGTTCTAATTTTCTGGCTCCTTCTTATTCTCTGGCTTGT
+TCTAGCTTCACCTGTGTCTAGTTTGTCCTCTCTCTATAACCTGTCTCTCT
+ATCACGGTCCAGGGAAAACTGCCTCCTTCCTCTCTCTGCCCTCCTCTGCA
+AGTAGCTTTTTTTCCCCCTTTTTCTTCTGGTGAGAGTTGGGCAGATCCTA
+TTCTAGCAAATCTTTCTCTAATTCATCACTTTGTCTGCTATTCAATTAGA
+CTTCTATAAACTACTTTTACCCTCATTGATTGAGATTAAAGGGTGTGTTT
+GTATTCCAGCCAGAAGTGGCTTAGGTGTATGCTAAGGGCTTAGCCACACC
+ACAACGAGAAATAAGTTTTGTTGTTGTTGTTGGTTTTGTTTTTTGTTTTT
+TGTTTTTTTGTCAGTAAATAACACAATCTTAGAGTTCATTGTGTGATCAA
+ATATCCTGCAACATAAGGTCTGGATGTTCTGGCCTGAATTTTAAATCTGG
+CACCATGAGAGATAGATTCTGATAGAAGAGTTGTGCTGCTCTTAGAATGT
+ACAGGGCCAGAGAACAGATGCATGATGGATATAAGAAAAGAGGAACAATA
+TCATTATTGTAAGAGCAAGTAGATGGCTTGCTTTTCACACAAAGCAGGCA
+CTTAATAACTATTGTTTGAATTTTAAGTCAAACTAGCAACTATTGGGAAC
+TAGCAAAATTTTATGATATTAGGAAGGGTCAAATTTTTCCTGAAAAGGGT
+TTAGTTTGTTGTAAATAGTTTGGGATGAGGTAAAAGAGAAAACTTGAGAT
+TTGTCTTTTCTTTGGTTGTCTGTGATGGTTTATTGTCCCGTTTTTGACAG
+TGACCTCTTAGTGATGTGAATCTGTGAACAAGTGATCTTTGCACGTGTAT
+GTTTGTATGTGTGTGTGCTCATGTGAGTGTACCTGCTGTGGGCCTGTGGA
+AGTTAGAGGACAACTTTGGAGAGTTGCATTGTTTGTATTTGTCAGGGTTC
+TCTAAAGGAGCTGAACTGAAAAGATGTATATGTGTGTGTGTATGTCTTAG
+TTATTGTTCTTTAGTGATGAAACACTATAACCAAGGCAACTTAAACAGAA
+GCATTTAATTGGGGCTTGCTTACAGTTTCAGAGGCTTAGTTCTTATCATC
+ATGGCAGGATTGTAGAGTCAGGCAGCTATGGTGCTGAGAAGTAGCTGAGA
+ACTCACATCTGACTAGCAGTTTGCAGGCAGGGAGAGAAAGAGAGAGAGAG
+AGGCAGAGAGAGAGAGAGACAGAGAGAGAGAGAGAGAGTCAGAGACAGAC
+AGAGACAGAGACAGACAGAGACACAGAGAGAGAGACAAACATAGAAAGAG
+ATACAGACAGACAGGTCCATTAACTAGAACCAAACATTTAAGCATGAGTC
+CATGGGGCCATTCTCGTTCAAACTACTATATATGGTATATAAAATGTGTA
+TATATGTATATATACATATATATATATATATACACACACACACACATGTA
+CATTTTTAAAAGGGAATTTTTTATGTTGCTTTATAAGTTGTGGTCTGGGT
+AGTCCAACAGTGACTTTCCTCTGACAGAAAGGCCAAGAATCCAACAGTTG
+TTCAAATTGAATGTCTCGGCAGTCCCAGTCTGTTGCTGGAGTCCTGGAAG
+ATTCCTAGAGAGGTGTGGGGTCTTTAGTCTGTGTTGGAGTTCTGAAAAAG
+TAGGTTCCAATACCAGTGGAGGAATCCCTCAGCAACAGGATAAATTAGTT
+CTCAACATTTGGGGGGTCCAACGGCCCTTTCACACGGGTCACCTAAGACC
+ATTGAAAAACACAGATATTTATGTGGTGATTCATAACAGTAGCAAAAATT
+ACAGTTATGAAATACTGTACGATGAGAATAATGTTATGGTTGGGGTTCAT
+TATGACTTGAGGAAGCTTATTAAAGGATCTCAGCATTAGGATGGTTCGTA
+ACCACTGTGATAGATGGATCAGCTGTGGAGAGTGAGGGCACGAGGGCAAG
+CAGCAAAGTCTTCCTTCCATGTCCATTTATGTGAGCTGCCACCAGAAGGT
+GACCTAGATTTAGGGTGGGTCTTCCCACCTTCAGTAATCCAATCAAGAAA
+GTCCCTCACAGATATGCCCAGTGCTTGGGTTTTAGTTGATTCCAGATGAT
+GTCAGTTAAGATTAGCTGTCTCCTTGGTCCTTTCCTTTCTGCTGTTTTCA
+ACCCCTCCCTCCTGGCTTTGTCCCTCTCCACTCCCCCATCTAGTCAAGCA
+ATTTTCTTACCAGACTAAAGAATTTGAGTTCAGGACATTTTAGATGAAGC
+TTTATTATTTCTACCTTTCTTTGATCTCTTGATTTTAGATACATAAGTGT
+AAAGTTAAAGATCATTGTCCTTTATCTTCAGAGTTGCCGAAGATTCACCC
+AAGAAACTAGTACTGATAATTTCTGTAATGTATTCTCCCATATTGGGAAT
+TTACTTCATTTGATGAGCTCTTGTGGAACTCTGTATAGTATGGGTACTGA
+GTGCTAGTAATATAGATAAGATATGGTCCCTTCACCTTCACTTCTGGGGA
+TACAATCTAAGGCACACTGTCTGAAGAAGTGCCAGCAGGAGCCAGAGAAG
+GGCCCTTGGTAAATAAGAGCTTCCCCCAAGGTGCCCAGTGCCTCAAAAAG
+AATTATGAATTTTCATTTCATTGTTAAGATTGGTATTGTCTATAAGTCAT
+AGTTTGATAAGCTTGACTAAGTCGAAGGTGGCAAGCCATAAGAGGTGGTA
+TCATATTCTTTGTTGCCACCTTTATCCTATGGGGTTATCTCATGAAGGAG
+GGAGAGCAAGAATGAATACCAGAGAAAACTTTTTCCAACTCATTTGTATT
+ACTTCAAAGATGTAAGATAAATGCTCCATAGGCCTACCTAGTTTATCAGA
+CATGTAGTTCGTGTGAAACTGTTGGGTTTTTTTTTTTTTAATCTTTTTTT
+AACTTGTGGATGTGTGTGGTGTTTGTATGGAGGCAGACATGTGTGTATGG
+TGTGTTTTACAGTATGGAGGCCTCTAAGATTGATGTCGGGAATCTCCCTT
+ATCTTTTTGTACTTTGTTCTTCGAGGCAGGATCTCTTAGTCAAACTCAGG
+CTTTGATAGCCAGCTAGCTCAAGGACTCAAGTCCCATCTCTCCTTTCAAT
+GCTGGATTCACAAGTGGCTGCTGCATTGTCACTTTGCATTTAAATGGGTT
+TTGGGGATCTGGATTTGGCCCCTTTGTTTGTACAGCAAATGTTAACACTA
+AGTCATCTTTTCAGGTCTGATTTTTTTTCCCCTTAAATTCTTTATTAATA
+GCCACTTCCTCCATGATTCTACTAGGTGATACAACTCCATCAGTAATTTT
+TAACTACAGAAAAGTTGGCATGTTGACATACTGCTTCTGTGGACACATGC
+CTCATAAACTTGTCAAAGCCCGAGGGGACGTTGGGAATCTCTTTATGACA
+ATTCCCGAGGGCAGTAGTCTTTCCTCTTGAAAGTGGATGAGGCCTAGACC
+TGTTTATTTGGGCAGCCATATACCACTGATCATGGTGGACCTGTCAAAAG
+TACCTATTTACCATGCTGTCTGAGTTTAGCTGAGTCTTGTCAGTTACAAT
+TGGGAAAGTTGGCGAGAGCAAAGGGACTTGGTTAGTTTGGCTTTGGATCC
+GAGGTGACTGAAAATCTCAGATAATGAATGTTTCCATAATAAATGTAATT
+AGGTCACTGAGTTCAGTGTTGTCAGCCTTCTTTTTTTGTTTACACTTTTT
+GCTTTATTATTTAATGAATGTATTAGTGCTGTGCTGCATGTACACCAGCA
+TGCCAGAAAAGGGCAACAGATGCCCTTTTTGATGGTCAGAGCCACCATGT
+GGTTGCTGGCAATTGAAGGGAATTGAACTCAGAACCTCTGGAAGAGCAGC
+TGGTGTGCTTAATTGCTGAGCCATCTCATCACCATCAGGTAGCCTTTTTG
+TCACTACTTGGTACTGGTGTAGCATGACCCCTTTAAGACGTGTAATTTCA
+GTTCATTTAAAACCTGGGACAGATTTTTCCTGTGCAAATCAAAGGATAAT
+GGGTTGGTGCTTTAGTTTGCTTCCTAAAATGTTTAATGGGGTTAGTGTTC
+TGGAAGCATCCTAATGGCTATTTTAAGGTAGGTAAAAACCTAAGCTGTTG
+CAACAGAGGCTTATTACAAGTGATTTAGAGAAGAAAGCATTTTATTCTCA
+TAGTATTTTCTTTACAGTGTGAGGAAATGTTCTTTTTGGTATACAATTCT
+GAGATTTAACTAATGCAATCCCTGTGACTACCATAATCAGGATTCATAAA
+TTTACTATTATTATATGGTGTGCATGTACATGTCAAGTGTTTGAGTGTGT
+GTTTGCATGTCCCATGTGCAAAGGAATGCGGGTGGAGGTCAGAGGACAAC
+ATTAGAGTTAAGTTTTTCTATTGTGAGCTCCAGGGATTGTACTGGCTAGT
+TTTGTGTCAACTTGACACAGCTGGAGTTATCACAGAGAAAGGAGCTTCAA
+TTGAGGAAATGCCTCCACGAGATCCAACTGTAAGGCATTTTCTCAATTAG
+TGATCAAGGGGGAAAGGCCCCTTGTGGGTGGGACCATCTCTGGGCTGGTA
+GTCTTGGTTCTATAAGAGAGCAGGCTGAGCAAGCCAGGTGAGGCAAGCCA
+GTAAAGAACATCCCTCCATGGCCTCTGCATCAGCTCCTGCTTCCTGACCT
+GCTTGAGTTCCAGTCCTGACTTCCTTTGGTGATGAACAGCAGTATGGAAG
+TGTAAGCCAAATAAACCCTTTCCTCCCCAACTTGCTTCTTGGTCGTGATG
+TTTGTGCAGGAATAGAAACCCTCACTAAGGGATCACATGCAGATCCTCAG
+GCTTGTGTGGCGAGTCATTTTACCATCTGAGCTGTCTTGCTTCCATAATC
+AGGAGTTAGGTTGGTTCTGTCGCTACCCGAAGCTCCCTATGCTGCTTTGC
+TGTCATCTCCTCACCCTGACATTACTGGTCATATTTTAATTCTTACAGTT
+TTGATTTGTTCTAAAAATAGAATGGCCCTGTGTTGGTTGTCGGAGTCTTA
+CTAAGTTCTTCCTCAAGTGTAGTGCATTGTGATCTGTCCTTGTTTCCTGT
+TAGTAGTTACTTCCTTTTCTTAGATACGTTTCCATTGTTTGGCTATTGCT
+TTTGTTTATCTTTCAGCAGGTGGATGAGGAGCGCTTGCTAGATTTCCATT
+CTAGCATTGCTGCAGTGAGCTTCTTATGAGGATGGGAGGTGTGGGAGCCT
+GTGCTTCTGAAGATGATAAAGGGACCCACGATGCTGCCGTGCTGCTCTGC
+ACTCTTCTTATTGTCTTTATTTATATAATTGACACTCGAGTGTTGGGCAT
+GTTTGTTCTGATTGGAGGAAGATAGCTTGGACATTCAGATACAATAGGAA
+TTCTGTATATCACTTGCATTCCCAATACATTTATGGGAGGAAGTTATGTG
+CTTGTGTTAGGCAAATTTTGGTGGGTGACCAGCAGTTTACGTGACCGGTC
+AGAAAAAGTCTCTTTCTGGGAAGACATAAGCTACTTTTTTTTCACATGTC
+TGACTTTTCTGAGTGTCGTGTGAGGAAGCTCTTAGTAGACCTCATCTGTC
+GTCATCCCTTCCTCATGCTGCCCTCTTCCCAGCATGGAGTTTATGATTCA
+CTAGTAGTAGCCAAAACGTACTTAGGAATGAATGAAATATAGAAACAAAC
+GAAGTAGTCACCTCAGTGGATGCTCATTTCTTTTCCTCTTGTTTTTTTGT
+AGAAAGAAGGAACTCTCAGCCACCAAGAAAGACCGTGTGAATCATTGTCT
+AACAATATGTGAAAACATTGTGGCACAGTCTCTCAGGTAATTGGCTTTTT
+AAAAAAAAGATTTATATATTTATGTATATGAGTATTCTGCTTGCATGCAT
+GCCTCCATGACAGAAGAGGGCATCAGATCCCTTTATAGATGGGTTGCTGG
+GTAGCCAGTGCTGAGCCATCTCCCCAGCCTCTTCTTTTCTTTTTGTTTTT
+GGTTATTTTTGTTGTTGTTTTTCTTTTTTTGTTTAAAAGATCTCTCTGGT
+AATTACTGAGTTGGGTGGTGGTGGATATACCTGTAACCTGGCATTCAGGA
+GGCAGAGGCAGGCAGATCTTGGTGAGTTCAGGGATAGCCTGGACTACAGA
+GCCAGTACCAGGACCTACACAAAGAAACCTTTAACTCATAAAAAACAGAA
+AACAAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGA
+AGAAGAAGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGA
+AGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGAAGAAGAAGGAAGAAGAA
+GGAAGAAGGAAGAAGGAAGAAGGAAGAAGGAAGAAGAAGAAGAAGAAGAA
+CAACAACAACAAACTGGTTGCTGGGCTGTGGTAGTGCATTTCTTTAATCC
+CAGCACAACAACAACAACAACAACAAACTGGTTGCTGGGCTGTGGTAGTG
+CATTTCTTTAATCCCAGCACTTGGGAGGCAGAAACAGGTGACTCTCAAGG
+CCAACCTGGTCTACAGAGTGAGTTCCAGGATGGCTAGAGCTTACACAGAG
+AAACTCTGTCTTGAAACTCCACCCCACCCCAAACTATATCTAAATACTCT
+GTGTTTTCACTATTAATGCATTACCATGTTCTTTGTACCCCTAGCTATCT
+CTTAAAAGTTCATTTAGGCTAAGCGTATTTTGGTACATGCTTGCAATCCC
+AGTATTTGGCAGGCTGAGACAGGAGGATCTTGAATTTGGTATTAGCCTGG
+GCAACATAGCAAAACCCTGCATCAAGAAAAATCCATTTAAAATCAGGACG
+TTTTACCACATTTGTAGTTGTGCTATAAGGGTATCTGGGTTCTCTTATAG
+GAAATGTTTTCTTCTTGTCATCTTATATATGAGAATTTTAGTCATATGAT
+AATTGAATGGCATGTTAGTAATTTAATTTGTATTCTTTTAAGGTTTATTT
+ATTTTTAAATAAATGAATGAGTGTTTTGTCCCTATTTTATATTTGTTCCC
+TGTATGTGCCTGGCACCCACAGAGACCATAAGAAGGTACTGGAGTTCCTG
+GAACTGGAATTAAAAGATGGTCATAAGTTGCTGTGTGGATGCTGGGAAAC
+AAACTTAGGTCCTCTCTGCAAGAATAGCAAGTGCTCTTATCTACTGAGCC
+TCCTACTTTCTGTTTGTTTGTCTGAGACAAGGTCTCATTTAGCCCAGGAT
+GGCTTCAAACTCACTGTATAGCAAAAGATGACTTTGAGTTCCTGCTCCTC
+TTCTTCTGCCTCCTGAATGCTTAGACTATAGACCTGATCTCTTAGAGTTT
+CTATTGCTGTGATGAAATACCATGATCAAAAGCATCTTTCACTTATACAT
+GTGTGTAACAGTCTATCACTGTAGAAATCAGGGCAGGATCTCATACAGAC
+TGTGGAGGGGTACTGCTTGCTGGCTTATTCTTCATGGCTTGATTGGCCTG
+CTTTATAGAAGCCAGGAGCATGAGCCCAGGGGTGTTCCCATCCACAATGA
+GCCTGGCCCTTCCCCACCAATCATTATTTAAGAAAATGCACTATAGACTC
+TTCTGCCTAAAGTCCCATTTTATGGAGGCATTTTCTCAATTGAAGTTTCT
+TCAAAGACGAGTTTAGCTTATGTCACCTTGGTCAGAAAACTAGCTAGGAC
+ACTTGGCTTTAGAATATAGCTTCAATGTCAAAATTCCCCATAATCAAAAC
+TGAAAGTAAATTCAACTTGGGGATTGTATTGGCAATTTATATAAATTAAA
+GGCTTAATAGTCTGTAGGGTGAACCATTACAGAGCAGAGTATTATCCTAA
+TAAGAAAATACATAACACACACACACACACAATACACATATTTCAAGAAA
+GAAATTTGACTTGGCTACTACTCAGGCAGAGGCAGGAGGTTCATGACTTA
+GAGACTTTTTATAGAGCCCTCTTTCAGGCTTTATAAAAGCATGACTTCTT
+CTCAAACAAAAAGGAAAGAAGGAAGGAATCAGGGTGGGCTCAATGTTTAT
+TTCGTGTTTTCTATATTAGAAGTTCATCTTTATTAGTTATTATGGACTAA
+ATGTGTATCTCCCCAACCCGTCTGTATGTTGAAGATCTAATCCCCCATTC
+TGAAACTGAAACCAACTATGCATTTTAGTAGGTTAATTCAGTGTTTTCTT
+GAAGTTAATCTCACAACCCACATAGTTATACAGCTTTGTCTCATGGGCTG
+TTCTTCATGTAGTAGTCACAGGCTTGTGGGAGGCCACTGAGTGACTAATC
+TGAGATTTGAATGACCTTTCTCCTTAGGTTGAGGAGTTTGTGGTGTGCTA
+GCGTGGCTTGCAGAACTCAAGAAAATACTAACATTTTCTGGCTTGCTACA
+AAGCTATAGCTCAGTAGCAGCCAGATTGTAGAGACTCAGAGGGTAAGGTA
+TGGGAGGGAAGCATGGCCCCTCTGTGCTCTCTTCTTCCAGTCTTCCATGT
+GTCTCAGCATGGACTTCTGTTACCAGAACCAGGAGCTTGATATGTGTTGT
+ACTTTAAGCATCACCATTCACATTGGCAACAAAAATCATCAGAATTTTTT
+TCAGAGGGAAGACACTTGGGGCCATAATAGAGACAGATTAAAGTGGTGTC
+TTAAAAATAAAATCTGACCACAATAAAAATCAATCAGAAATGCAACAAAG
+TATAGGAAGTTACAATTTGTAACAAGAACAGTTAGTCAATGTTGGCCCAG
+AAATTAAACTAGGTTAGCATACACAAGCACTAAAACAGTATAAGAAACAT
+TTAAACAGTTTAACTTTTTTTTTTTTTAATGTCTAAGAAGCAAAAGGTTG
+AGGGTATGGGAAGAAGAAACAAGACAGGAAAAAGGCTGAGCCTGAACTAA
+AATAAGGAGACAGCCTACTTCTTATAGGAAATGCCTGCCAAAATCCAGGG
+AAGTGCCATCTGTAAGTCATTCTATCCTCAGTAAAAATCTCTTGTAAAAT
+GAAAAGGTGAAATAAAGAGGTTCTCAGACATACCTCTGAGGAGTACCCTC
+TGTCTGACAAGCAGTGAGTTTAAGACATTTTAAAGGAAGTGTTTCAAGCA
+GATAGAAAATTATACCAGATAGAATCTGGATCTGTCCAAAGGATTGCTAG
+CAGATGGATAAACATAGGAGATGTCTGTCTGTCTGTCTCTGTCCCTCTCT
+TGATCCCTCTCCCCCTCCCCTCTCTCAGACAAGATTATGTACTGATGTAG
+AACTAGTGATCCTCTTGCCTGTGCTGCTTGGGTGCTGGGATTACAGAAAT
+GTGTCTCATGCATGCAGCAGCAACCTGTTGCATGACAGACAACATAGGAA
+CAAATGTTTGCTCAACTCATAGGGAAGCATTAACAAACTAAAGTATAGAT
+GCCTCTAAAATCCAATTTGGTGAACCAGTGGATGCATAAGGGTTACTTAC
+AGTGGTATGGGTGAGTGGTTCCTTATAGGATCATGGATGACTCAAAAGGC
+ATCACCAAAATCCCACCCTGGCATGGGACACAGCTCACTAAAGCTAGAAC
+CCTGGAACTCTCTGCGCAACTTAGACTTCAGCAGTTCAGGAATCCCCCTC
+CCCTCAGCAGTCCTTACTACTTATATAACCCAGGAGTCTTAGTCAGGGTT
+TATATTGTTGTGATAAAACACTGACTAAAAGCAACTTGGGGAGGGAAGGG
+TTTATCTCAGCCTATCCGTCTACATCACAGTCCACTGAGGGAAAGGAACT
+GATACAGAGATAATAGAGGAGTGCTGGGGGGATGGATGGCTCAGCAGGTA
+AGAGCACTGACTGCTCTTCTGAAGGTCCTGAGTTCATAGCAACCACATGG
+TAGCTCACAACCATCTGTAATGAGATCTGATGCCCTCTTCTGTGGTGTCT
+GAAGACACCTACAGTGTACTTAAATAAATAAATTAAAAAAAAAAAAAAAG
+AGTAGCTCTGCTTACTGGCTTCCTCTCATGGTTTGCTCTCCTGCTTCTTT
+TTTATATGCCAATTGTGCAGTTCCTTTTTTTTTTATATTGGATATTTTCT
+TTATTTACATTTCAAATGTTATCCCCTTTCCCGGTCCCCCCCCCCAGAAC
+CCCCCTATCCCATCCTCCCTTCTCCTGCTTCTATGAGGGTGTTCCTCCAC
+CCACCCAGCCACCCACCAACTCCCACTTCCCTGCCCTTGATTCCCCTATA
+CTGGGGCATCTATCGAGCCTTCATAGGACCAAGGACCTCTCCTCCCATTA
+ATGCCTGACAAGGCCATCCTCTGCTACATAAGCAGCTGAAGCCATGTGTA
+CTCCTTTGTTAATGGCTTAGTCCCTGGGAGCTCTGGGGGTCTGGTTGGTT
+GATATTGTTGTTCTTCCCATGGGGTTGCAAACCCCTTCAACTCCTTCAGT
+CCTTTCTCCAACTCCTCTATTGGGGACCCCATGCTCAGTCCAATGGTTGG
+CTGCGAGTATCTGCCTCTGTATTTGTAATGCTCTGGCAGGGCCTCTCAGG
+AGACAGCCATATCAGGTTCCTTTCAACATGCACTTCTTGGCATCTACAAT
+AGTCTCTGGGTTTGATAACTGTATATGGGATGAATCCCCAAGTGGGACAG
+TCTCTGGATGGCCTTTCATTCAGTCTCTGCTCTATACTATCTCCATATTT
+GTTCCTGTGAGTATTTTGTTCTCCTAAGGAGGACTGAAGTACCCACACTT
+AGGTCTTTCTTCTTCTTGAGCTTCATGTGGTCTGTGAATTGTGGTCTGTA
+TCTTGGGTATTTGGAGCTTTTGGGCTAATATCCACTTATAGGTGAGTGTA
+TACCATTTGTGTTCTTTTATGATTGGGTTACCACACTCAGGATGATATTT
+TCTAGTTCCATTCATTTGCCTAAGAATTTCATAAATTCATCATTTTTAAT
+GACTGATAGTACTCCATTGTGTAAGTGTACCACATTTTCTGTATCCATTC
+CTCTGTTGAAGGACATCTAGTTTCTTTCCAGCTCCTGGCTATTATAAATA
+AGGCTGCTATGAACATAGTGGAACATATGTCCTTATTATATGTTGGAATG
+TCTTCTGGGTATATGCCCAGGAGTGGTATAGCTGGGTCCTCAGGTAGTAC
+TATGTCCAGTTTTCTGAGGAACCGCCAAACTGACTTCCAGAGTGGTTGTA
+CAAGTTTGCAATCCCACCAGCAATGGAGGAGTGTTTCCCTTTCTCCACAT
+CCTCACCAGCATCTGCTGTCACCTGAGTTTTTTATCTTAGCCATTCTGAC
+TGGTGTGAGGTGGAGTCTCAGGGTTGTTTTGATTTGCATTTCTCTGATGA
+CTAAGGGTATTGAACATTTCTTTAGGTGCTTCTCAGCCATTTGATATTCC
+TCGGTTGTGAATTCTTTGTTTAGCTCTGTACCCCATTTTTAATAGGGTTA
+TTTGGTTCTCTTGAGTTCTTTGTATATATTGGATACTAGCCCTCTATTGG
+ATGTAGGGTTGGTAAAGATCTTTTCCCAATCTATTGGTTGCTGTTTTTGT
+TGTTGTTGGGTTTTTTTTGTTTGTTTTTGTTTTTGTTTAAGGTCACATTT
+TTAAATTCTTGATCTTAGAGCATAAGCCATTGGTGTTCTGTTCAGGAAAT
+TTGGGGACAGAGAGTCATTGTGAATCTGATAAACTTCAGGGACTTCCTAA
+GATTTCTGGGCTGTTTCCTTCCTTGAGTCTCTTTGTTTCTTTGTTTGTTT
+TTTGAAATGGAGTTTCTTTAGTCCAGCCAGTCCTTAAATGTACTATTTAG
+ATCAGGTTGGCCTTGAACTCACAGAGATTTCCTACTTCTTTCTCCTGAGT
+ACTGGGATTAAAGGCATGTGCCACCTTGCTCAGCCACTTTCTGAGTCATA
+ACAAGCATTCCTTCAGAAGTCCCTGACTCTGAAAAAGCTTGCTATACAGC
+ACAATCTTTTCTTTTTCTTTTTTTCTTTATTTTATGTTGATTGGTTAGTT
+TTGTGTGTGTTATGTCAATGGGTATGCATTGACATAGTGCTTGTGTGTAA
+GTCAGAAGCCAACTTTCAGGTGATGGTTCTCTTTTTATGATGTGTGTTCT
+GAATATTGAAATCAGGTTATCAGGCAGGCCTGCTGTTATTTGCTGACTAC
+ATGTAGAAAGTATATTTTAGATATGTCTGTTGAGTAATTAGCCTTACTTC
+TTTCTTATTATATTGATGCTACTGGAAGGAATCCTGAGCCCATGCTTAGA
+GCTGTCATTAATTTCTGTTTTGGCACTGCTAAGACTGATGTCTGAGGGAA
+AGCCTCTAGTGTCAGACTTTCCGCCTTTCATCCTAGTTTATTACTGTATT
+CCTTTCCAGTAAGGTCTATGGCATTGCTGAGTGACATGTCTTTTTGCTCT
+GTATGTGTTGAGAATTATTGATCCCTTTTCTTTCTCTTAGGTTTATGGAT
+GTCTTTCAATGCCTTTTGAGCCAGAGTCTCATTGTGTAGATCTAGCTTGC
+CTAGAACTCATAAAGATCCCCCTGCCTCTGCCTCCCTGGTGCTGGGATTA
+AAATCACATGCTAGAATTTTTTAATTAGAGGTAAAAGAGAAGGTGACTGT
+ATAGATCATAGTCCTTTAGATTAAGATCCTAGTACAACAGTCTGCTTTTG
+ATGTTTTAGAAATGGCATGCCCCATTCTTAAGTCACCAGAATCACTTTCA
+AATATGCCTTGTGTTACAGGTTTGAGATACTTTAGAGTTTTCCACTCAAG
+TGGCCCACCAGCCTCCTCTTCTAATGGGCGGGCTTAGCTTGTTGGATCAT
+GACATACCAAACAGGTTTTCTCAACTAAACTTACTAAAATACTTTACAAA
+TAGTACCCAGATTGTGATACCAGTATTCGGTTTCTTGCAACAGAAAAGCC
+TGTAAGCCTGTGGAATCTAGGGAAATGTAGTTGGACCTTTATGCACATTG
+AAAGTAAGATTGAAAAGAAATAAAGGAGATGTATTGACTTGCTGGGTTTT
+CAAGGCTTCAAGGATGCTATCTAAAAGTAAATCTACCTTTTTACAAAGCA
+TATGATTACCTGAGATTTGAGAATCCTAGACAGTATCTTCTAAAGATGTT
+TCAATGAAATCTTAAAAAGAAAAGGGGATCAGTACCAATATGCTGCTCAC
+CACAGTCCACCTTTACGCCGATATTCTTAATTTTTATGAAATGTTCTTTC
+TTGTCCTAAAATTCCATTTAGATTGCTCTTTGCATTTTGTCATCAGTTCT
+CCTAGATCTGTCATGACGATGACAACCTCTCTCTCTCTCTCTCTCTCTCT
+CTCTCTCTCTCTCTCTTTTTTACTTAGAAATTCTCCAGAATTTCAGAAAC
+TCTTGGGCATCGCTATGGAACTGTTTCTGCTGTGCAGTGACGATGCGGAG
+TCAGATGTCAGAATGGTGGCTGATGAGTGCCTCAACAAAGTCATCAAAGT
+AAGCGCCCCATAATGATGATAATGGTGATGCGTGCTCCTGTAATTGTCAT
+GCCTTAAGAGACAAAGCTCCAGATACCTACATTTTTTCCATTTTGGGCAT
+GTGGCTTGGAGGACCTGGGGTATTTTCCCATAAACAGCTAAATGTGTTCC
+TGCGGAACTTTTTTTTTTTTGCACCCTTATAGTTCATAGGTCCATTCTGA
+AGGGCATCTGTGTGACCACTGGCTGCCTATTTCTTAAGAGGAATGTCTCT
+ATGGGCTGCCACTTTTGTTTGGGCATTGCTTGGAGAACCTCAGAGCCCTG
+GGGGCACAAAGGTGGGTGTGGGGGGAATGAAGTCTTGCATGCCTTCCTTC
+ATTTCTTTTCCTGCCCCTCTTAGTGGAGGTCAGACAATACTGCTTGTTGG
+TGAAGATGTAGCTTCTCAGGTTAGGTTAATTGGAGGTAGCATGACCTGAA
+TGGGGAGAGAGGAGAGTACAGACAGGAGAATGGAGCCAGGAAGCAATAGC
+CACTCAGGTGCAGTGCCAGAAGAGGTGAAAAGGCTGGGCACCTGTCTCAC
+TATGTCATTGCTTCCTAGTACTGAGTGCAGACCACTGCAACTCCAGTCTG
+TTTCTTTGTTCAGTTTTCTGCTATGAAAGAAGATTGAAGTCCTCCTTCCC
+CGCCCTCATCCTAATGCAGGTGGGCATTAGGAAGGGCAGAGAGAACAATG
+TGACTCTAAATTGACCATATCCTTGTTACTTCAAGTGCAAGCAGCTTTTC
+CTCTCAAACCTCTCTTCTGCCGTGGCTGATGCTGGTCAGGTTGGCTTGGT
+GCATGTTTGAGGCCACAGACCACTGTTGTAGCTTGCAGCTTTATGTTCTG
+GATCTTTGGCTGCCTATCTCCACTTTCTTCCTGTTTCTTTGTGGTTTCTA
+GTCAATGTCAGGAATTCCAGCTTTTACAGCACAGCCCTGAGTCAGGCCAA
+TCTTCATTCTTTCCCTTCTGCTTATAGCTGCTTTCATGTTTCTGCCTTTT
+CAACTGCTGCAATGTTCAGGATAAGCCAGTAAGGTGGACCAGGATCCAGC
+TATTCACTATGATAGAGGAAAGGCGAGGCAAGATGGGAGAATGGGATGGC
+TTCAGGCCAAGTGTAGGGAATGGTTCACTTTCTAGTCTGGGACTTTTCTT
+TTCTTGAAAGAATTAGTGTATGTACTATAAAGACCAATTTCTAGCCCCGA
+AAATGGGCAGATATTACTTGTCTTTTATGTTCAAAATACATTGAGTTCTG
+ATAGCCAGGATGTGTATTCCTGCTTTCAAGTCTTGGGTGGTAAAGTGAAT
+GTGTCCAGAACTCACCCATGGTATAAAATGCAGAGCAGAAAGAAGGTAGT
+AATTTTTTTTTTTCCCAAGAAGATAGAAGTGGTATTCTGGAGTCAGGAAA
+GACCCTCCCATTTTTACCTACTACCTGGAGGTTTGTCTCAGAGAGGAGAG
+CAGGCCCTTTTTAGGCTGTGAAAAATGGTGTGTCTTGAGGGTCAGTTTAA
+GTATTTTGTGTCTTGGTAGAAAATGAAGGCCTCTTGCAGATCACTGGCTT
+TGTGTGGAAGCCAGTTTTGATAGGGAATGAAAAGAAGGACCCCAGCTGAG
+GGGAAGGCATGAGCTAGGATGGTGTCAGAGTGTGTTTTGTCAGCTATGTG
+TGGAGGCAGGTTGGGAGTTGGGGGTAAAGGAAGCTATAATGAGCTCTATT
+ATCACCCCATGAAGGACTCATGGAAGAGCCATGCTCTACCCTTAACAGAG
+TTGAGCTTTTGTTTTCTCTACTACAAAATAAAGTAGATTTTGGTTCATAA
+ACATTTATATAGCTCAAACAGTATTTGATGTACTCTTAAATTTCATTTGA
+AAGTAGTCTTTTAATGTTTGTCAGCATGGTTTGCTTCTTTTGTTCAAACT
+ACAGCAGCAGAATCAGGGTCTGAAATTTCCTGGTGGGGCCAGAGAGATTG
+GTTAAGAAGAGAAGTTACTGCTTTTGCAGAAGACCTGACTTCAGTTCCCA
+GTACCATAAGATGTCGTATAACCACTTGTAACCCAAATTTCAGGGGGTCC
+TGTGCCTTCTTCTGACCTCTGTATGTTCCTGTACATATACATACACTCAG
+GCACACATAAAATGAATTTTTCAAAAAAAGTTGGTAGAATGTTATACTTC
+TTCTGAATCAATTTTCCAAAGTGTGCCTCTGTTTTACCTTTGAAACTCAT
+GGCCCAGTGAATGATCCCTGTGTCCTAGTTTCATCTCTTTGGCTATGATA
+AATATCCTGGCAAAAAACTGAAGGGAGAAAGAACTTGATTTAGAACTTCA
+GGTAACAGTCCATCATTCTAGTGAACTCAAAGCAACTAGAACGCAAAGCA
+GTTGGTCACAGTCGCAGTCAAGGGCAGAAAGGAAAAGAATGTGTGTGTGC
+TTGTTGCTCAGCTGTCTTCCTCTACTTTAATCTGTCCTAGGTCTAAAACT
+TAGGCAGCGGTGTTACTCATTTTCAGCATTGGTCATCTCACATCAATTAA
+GGCAATCAAAATAGTCCCTCACAAATATGGCCACAGGACAACCTGATCTA
+GACAGGATCTTAATGAGACTATTCCCAGGTAATCTAGGTTATGTCAGGTT
+GACACAGCTAACCATCTCATCTCTCCTTGGTCTCTAGATATCAGGTTTGC
+ATGTGCCTGAAAAGGAGAGCTGGGTCACTTCCTCACATTTTTTTGACATG
+CCATTTTAGAAGAGAAAGTTCTTAGGGACAAGAATAGGGTGATTTTCTCT
+AATGTGGACTTTATGCTATTAGCCTTGATTGTGGCTCGAAGTCAGATACA
+TGAACCTTATGTTTTAGTTAGAGTTTTATTGCTGTGAATAGGCACCATGA
+GTACAGCAACTCTTGTAAGGAAAACACTTAGTTGGGGCTGGGTCACAGTT
+CACAGGTTTAGTTCATCATCATGGTGGGAAGCATGGCAGCATGCAGGCAG
+GCATGGTGCTAGAGAGGTAGCATAGAATTCTATATCTGCACTGGCAATAG
+GAAGAAAAGACTGCCATTGGCCCTGTCTTGAGCATCTGAAACCTCAAAGC
+CCACCCCCAGTGACACACTTCCTCCAACAAGGCCCCACCTACTAATAGTG
+CCATTCCCTATGGGCCTGTGGAGCCATTTTCAGTCAAACCTTCATACCTT
+ATTTCTGAGTTAGCCCCAAAGTGATCAGGAAAGGTCAAGAAATGAGCTCT
+GAGGACTGTAAGACTTCCTAGCTGACACTGAGACCTGGGTGGGTCAGGGA
+GTTGGGGCTGTGAGCATCCTCATCATTTTGCAAGTTTAGAACTTCAAAAA
+AAGATGATTCTGTTGTTACATGTTCCCTGTCAGAGAGAAGGGCTCAGATC
+CTACAAGTACAATCCTCATATCTTGCTTACTTAGAGACTTTCACCTGAGC
+TGTCCAGGTGAGCCAGTGAAGACTGTGTGCTTACTCTGAAAGCTTTAGGG
+TTAATTTTAATTTGATATTATATAGTATTTGATTACTATAAATGTTATCT
+TCTGTTTTTATCCTGGTAAAGACCTGTATTTAGAATATTCTGTAATTTTT
+ATATGTGTTTACTTTTTTCTTAATATATAGCTTCTTATTTAGTTGCTGTT
+AATGTTACTTTTCCTTAAAATTTGAACTTTTGGTTACCATTTAGCTTTAT
+GGGTAGAATTTAGATCTATAGGTAGAATGTATCTTCTATAACAAGTCTCT
+TCTATTTCTTTGCAGGCTTTGATGGATTCTAATCTTCCAAGGCTACAGTT
+AGAACTCTATAAGGAAATTAAAAAGGTGGGTGTTTGCTCTGCATTATTGA
+GAAGATGATACTGTTTTACTGTTGAGTACCCTATGAGATTTCTAACTTGC
+AAGTTATTAAATAACACTGTTAGGAAGAAGTGCCATTTGGTGAAGCAGAG
+TTTAGTTTTCTTTAAAAACGTACTCCTCATTTTCATTAATTGAAATAGAA
+ATTTATAGCACCACCTTAAATTTTTTAAAGATTTTTTTTTGTTTTATTAT
+ATGTGTATGAGTTGCCTGTGTGTACATCTCTGCACCGTGTATGTGCAGTG
+CCTTTTGAGGTCCTCTGCAAGAGCAGCAAGTGCTCTTAACCCCTGAGCTG
+TAACTCCTAGCAACCAAGCAACCAACCAACAACTTACTTCTCTTCTCTCT
+TCTCTTCTCTTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTC
+TCTCTCTCTCTCTCTCTAGCATGACCCCAGCCAGTTCTGAGGTTGAAGGC
+AGGTTTCCCCCCCCATCTACTTTTATAATATTTTAGTTACATGCAAGTAA
+TAAGGTTAAGCAGTACAATAGACACAAACAGTCAAGGAACAAGCTAGGCA
+ATAAAGAAAGTCTCTTGATCACTCCCGTGATGACTGTTTCTAAGGGCTTA
+TCTGGATGACCAAAATATCTGGGCCTACTTCCCTGTCTTAGCTCAAAGTC
+ATATTCATGCCTGAAGCCTGTTCTAGCCTAAAATTACATTCCTGCCTGAG
+CTTACTTCCTTGTCATGGCCTAGTGTCAGATTTCTGCTAAGTGGTCCCAA
+AAAGCTCTCCACATCTCCCCCTTTTTTATTTCATAAACAAGACTGCACCT
+GTCTTAGGTGGTTCTAACAAGAATGCCTTCCTTACATGTCGTGGAATATC
+TATTATCAAAGTCGTGCATTTCTGTCTTAGGTTGGTAAGGCTCTGTGCAG
+AACTTACCCGTCAGCGTCAATGGCTGCCGGCCTATTAAATTAATAATTCT
+GTCTGGGGGTTCATTTTTAGTTTCAAACCATGTATTTTGGCCACCAACAT
+GTTGATGCTATTAAAGGCAAGTTTTATTACAGTGGGCAGGAATAAAAATA
+TTCCCAGGACAAAAAGGGCTATCATGATCAAGCTATATGTGCCATTCTTG
+AAGCTTGACCAAGATGGAAATACTGACATATATTCATGAATAATTTTATC
+GACAATACCTGCAGCGTCAGAGCTCAGCAGAGTACCATTCTTTAAATTCA
+TAATCTCAATATGCAAAATTAAAACATCCAGAGAGGTGTTAGAATTATGC
+CAAATACTCTTCAAGTGTCTTTCAATTTTCCCAATTATATTGACTCTCAT
+TGTAAATTTTAGAAGTAACACAAATCCCTTGGTATTTCGCATAACACTTG
+AGATGGCTTCTTACTCTTAAACTCTAAACCTCCTCTCATAATTTGAATAT
+TATCAATAAAGAGCATAAACCATTGTTTCGGACACCTATCTAAATCAATC
+TTTCTTTCGTTCTTCCTTTTCTTTTTTTAAATCTGTATTTTTATAATTGA
+AGCCCTCTGGATTTTAAAAATCTTTGAAGTAATTACCATCTATTTCACAC
+TGTTAATTTTGACTTTCTCTGATTAAATTAACAGAGAAAGAGAATGTTTA
+ACTCTCCAAGAGAGGATGCAAGCAGCTCCAATATCATAGACCTAGCTGTA
+GAAAGTGCATGTGACTCTTAAATGTAACACATCACTTGTTAACTCCACTT
+AGAATGGTGCTCCTCGAAGTTTGCGTGCTGCCCTGTGGAGGTTTGCTGAG
+CTGGCTCACCTGGTTCGACCTCAGAAGTGCAGGTAAGTTGTACCTCTGTA
+TTATTTTTAAGATTTGTTTGGTAAATAGCTAGTCCTGCCTGTCTTTTTTG
+TTCCAGTGCATATGTCTACACCTTGAGACATCATTCTTGTCCACTCTGTG
+TTGCCTAGCTATGTCCTGTCTGGTTGCTGTCAGCATTTTGTTCTTATATT
+TCTTTCCAAAGACCCATCTCTATTAGGAATACTCTTATGTCCTCTATTAA
+TGTGCTTCTTTCTTGTCCCATCATTCCCCAAGAGACTTGTGGGATGTATT
+TCCAGAGTAACATAGTCTCACCATCTTATTCTGTGCTTCATTTCCAACAC
+GAATGTAGTGAGCACTCAATGTGTTGGTTAAGGAATACTTACTGGATGAA
+TGACAACTGTCCCTGATCCCATCAGCAGCAGAGTTAGCAATTATTGAAAA
+ATAATCATTTGTATAGTACTTGGGTTGGAGAAGTAGATGAACTGTGCATA
+AAATTTGAACTTGTTGATTGTCTTTGACTCCTATGGTGTTAGGTATTAAA
+TCCAGGAGCTCAGGCATGCTAGGCAATTGCTTTACTGCTGAGTCTCATTG
+TCAGCCCACACATCTGGCTTTGTGGCTTGAGTAACAAAACAGGATTGTAG
+TTATACCGTTCTTTCCTTTTTCTCCTCAGTTGGTAAAATGTACTGCGTAT
+GTCCCAAAGACTGTATCCGTGAGAAACATAGTAGAGTGCTCTAAATCTTC
+ACACTAACAAGGAACAATGATGTGTTCAATTTAGGTTAAGTTTCAGTAAG
+AATTTTATGTGGCCAGAAAGATGTAACCACGAAGGAAGTTTCTTTCTATA
+GAACTTTTACTTTTGCTCTGCTGATATGTTTATTTTTGTGTTGGCTTTGG
+TGCTAAGGCTAGGCATGCCTAAGTGTGCGCTTCCACACTTCCACATTTCC
+AGCTCAAGTGCCTTACTTTGAAAATTGTCATATTATTGAGGTAATACATC
+CCAGGTTTACAACACAGCAGACAAAATAGCTGTTCAAAAGTATGACTGTC
+CTGTCACAGGCCCTTCCTTAAACTCTCTTAATTATTCCCTAGTGTTGTTT
+AGATAAAATCTAGACTTCCCACTCAAGGCAAGAGATAATTTAGCCTTTGC
+TGGCCCTTTCCTATCTCATCACTAAAGAAATACTATATTGATTCATTGAT
+CCAATTTAGTTCTTCAAATAGGTTTTATTTCCTCAAGATAGCTTGATGTC
+TCAGAAAATAATCACCTCCTTCTGCAAGCATTCTAGCCTCCTTATCTTCT
+AAGTTGTAACTGATTTCTGCCTAACAAAGATGGCTGTAGTATCTGTAGGC
+CTAATTGCTATATAAGAGGGCCTTGGCCCCACTATAGCAGACACCCTACT
+CCATCCTCGTTTTTCTGTCTAAGGTAAGGAATTGTTGTTTGAAGTCCCCA
+TTTAATTTGACCACTTTTTTACCCCTAGAGATTGTGCAGCTCTCCTGGAG
+AGCATAAAAGAAACTTGTGTTGATTTCTAATTAAAAGAAGCTGGGTTACC
+AAACAAAAGTCCCATAATGTCTGGCATGCCAAATTAATGGGTTTATTTGG
+CTTATATACAAAAGCACGGACAACTTATAAGTAGCTATGCCTTTGAAGCT
+TCACCTCCAGTTAGTTTGCCTTTTACATAACTCAGTCCTTCTAAGGTTAT
+GTATCCCTGTAGTACAAAGGAATGATGATTGGAATCTTGGGGTCTTATGA
+CATTTTCTTTTTCTCCCCAAAAGGGAGTATTATCAGACTATCCCATAGAC
+CTAGGCACCTGCTCTGTAGTTCTCCTTGCTTTGTTGTCTGCTTGTGGCTC
+CAAGGTCTCCATATAAGTCACCACAGCTACTCTGCTTCATGGTAGGGATG
+GTCATGTCAAGTGTACAGGAAACAGCTAGCCCACAGTAGCTTTTAACATG
+ACTTTTTCAGTAAAATGATGTCCACATTTTATTTTGTTTTTCAAAAGTTA
+CTGTATAAGGGTACATACCAGTACCTGTATATAGAAAGGTAAGAGGAGTG
+TCATTCCTGGGCATGGTTCCTCAGGAGCTGTCCAGCTTGTTTGATAAGAT
+AAAGTCTTCACTGCCCTAGGATCTGCTGATGCGACTTGGCTGGCTGCCCC
+TTAACCTCAGAGAATCACTGGTTTCTGACTTCCTAGTGCAGGGATTACAA
+ATGAGCACTACTATGTCTGGGTTTTTATGTGGATGATAGGGATCGAACTC
+ATATCCTGTGCTTGGTGCATAAGCTATCTCCTTTGTTCCCTCCAGTTTGT
+GTTTTTAAAGTAGTAGTTTTTAATCTATTGTTTCCTATTTAAATTATATT
+TTAATAATTGCTATAATTTGTGACAAATTTTTTAAATATTTATCTTTAAT
+ATTTTTTTACAGTCCAGTCTTTATCCCTCTCCTGATCTGCCCTCCCAAAG
+TTTTTCAAACCCAAAAAAAGAAGAAAAAGAAATTGGAGAGATCGTACATT
+AGTAACTTAACAGAATACCTGAAAGCCCTAGAACAGAAAGAAGCAAACGT
+GTCCAAAAGGAGTAGACAGGAGGAAATAGTCAAACTCAGGGCCAAAATCA
+ACCAAAGAGAAACAAAGAAACTGATAAAAGAATCAACAAAACCAAAAGCT
+GGATCTTTGAAAATCAACAAGATAGATAAGCCCCTAGCCAAACTAAGGGG
+CACAGAGACAGTATCCAAACTAACAAAATCAGAACTGAAAAGGGAGACAT
+AACAACAGAACCTGAGGAAATCATCAGGTCCTACTACAAAAGCCCAACAA
+AACTCAACAAAACTGGAAAAATCTAGATGAAATGGTTGATTTTCTAGACA
+TATACCATGTCCTAAAGTTAAATCAAGATCATGTAAACTATCTAAACAGT
+CCTATATCCCCTAAAGAAATAGAAGACGTCATTAAAAACCTTCCAACCAA
+AAAGAAGCCCAGGGCCAGATGGCTTTAGTGCAGAATTCTACCAGACCTTC
+AAAGAGGAGCTAATACCAACACTCAGCAAATAATAATTTAAATCCAACTT
+TTTAAATTACATTTTATTTGTCTGTGTGTCTTTATATGTGTACCATGCCT
+TTGGGTGAGGATAACTTATAGTTAATTCTTCCTTTCCTCTATGTGGGTCC
+CAGAGATCAAACTTGGGTCTTCAGGCTTCTCCTTCTTTACCCACAAAGCC
+ATCTTGCTTGTCCCTACACCCAGCTTCTTAATATTCTTTGTAACTCATGG
+GAGAGATGACAGACAATTGAACTTCATCAGCATTTATGCCTCCTGTACTT
+GTAGTTGAGCATTGTGGTCTCCATTGAGGACTAATTCACCTATAAAACTA
+GGTTTTTTCCTGACAGGGAACCATGAGCTTGTTGTTTCTTAACAGAGGAG
+ACCTGAAGAATGATGAGTATTCCTCTTGCACATACAGGCCTTACCTGGTG
+AATCTTCTTCCATGCCTGACCCGAACAAGCAAAAGACCGGAGGAATCAGT
+TCAGGAGACCTTGGCTGCAGCTGTTCCTAAAATTATGGCTTCTTTTGGCA
+ATTTCGCAAATGACAATGAAATTAAGGTATGGCTGTTGCCTCTTGGCATG
+AGTCTTGTGTGGCTTTGGGGAGAAAGTCATTTGAGATTGCTTCTGGTGTC
+CTTTTGGCTTCACTGAGAGACATCTCAAGAACTTCTTTTTACTTCTGCTT
+TCCTTTCATGGGGTAAGTTGTCAAGGGAAATAGCTTATAGATGCAAATTC
+AAAGGCATTTCCCCAGAGTGGATTTAGGTATACTGGGTTGGCCACTTGAG
+CCAGCTAAGGAAAAGAGACTTCATAGGAAAGAGTGAAGAAGAGTTAATGG
+GCCTTGTGGGTGTGGGCGCCCTAAAGCCACCAGGACTCGAGTTTGGTTCA
+TAGTGCCCAGAAAGCAACTTATTACATAATTTGTGGGTTGCAAGATTCTT
+GGCTTTGATTTTATCTTTTTGAAAAAGTATTTTTTTTTTAATTTATTTAT
+TTATTATATCGTTACGGATGGTTGTGAGCCACCATGTGGTTGCTGGGATT
+TGAACTCCAGACCTTCGGAAGAGCAGTCGGGTGCTCTTACCCACTGAGCC
+ATCTCACCAGCCCCAGTTTTTATTTTTAAAGTATTTATTTTATATGTTTG
+GGTGTTTTGTCAATGTACTGTATACATGCCTACTGTTCTCAGAAGCCAGA
+AAAGTGTTGGATATCCTAGAACTAAAGTTATAGATGATTGTGCGCCACCA
+CATGGGTGCTGCAAACTGAATCTGGATCCTCTGAAAGAGTAACTAGTTCT
+CTTAAGCCCTGAGCCCACTCTCCAGCTTCTACCTTTTCTCATTGTTTATC
+TGTGTAAGTGCGTGTGCGTGTGTGTATGTCTGTCTGTCTGTGTGTCTGTC
+TGTATGAGCCTGTGTGTGAATGGAGGCTAGAAGAAGGTGCTAGGTGTCCG
+TCTTTATCACTCTCTGCCTGTTCTTTTTGAGGTTGAGTTTCCCTGAACCT
+GAGGCTTACTTTTTTTTTTTTTTTAAATTGGACATTTTATTTGTTTACAT
+TTCAAATGTTATCCCCTTTCCCAGTTTCCCTTCTGCAAACCCCCTATCTT
+ATCACCACCCTCACCCTGCTTCTATGAGGGTGCTTATCCACCCACCCACC
+CTCCCACTCACTCCTGCCTCACTGCCCTAGCATTCCTCTACACTGGGGTA
+TCAAACCTTTATAGGACCAAAGGCCTCCCCTCCTATTGATGTCAGATAAG
+GCCCCTTTAGCTCCTTCAGTCCTTCTCCTATCTGCTCCATTGGGGTCCCT
+GTGCTCAGTCTGATGGTTGGCTGTGAGCCTCTGCATCTGTATTGGTCAGG
+ATCTGGCGATACAGGAGATAGCTGTATCAGGCTCCGGTCAGCAAGCACTT
+CTTGACATCAGCAGTAGTGTCTCTGGGTTTGGTGTCTGCATGTGGGATGG
+ATCCCCAGGTGGGGCAGTCTCTAGATGGCCTTTCCTTCAGTCTCTGTTCC
+ACTTTTTGTCCCTGTATTTCCTTTAGACAGGAGCAATTCTTGGTTAATAT
+TTTGGAGATGGGTGGGTGGCTCAATCCCTCAACCAGGGGGCCATGCCTAA
+CCTCTGAATATGGTCTCAACAGGTTCTCTTTCTCCTTTGTGGGGTATTTC
+AGCTAATGCCATCCCTGTGGGGTCCTGGGAGGCTCTTGCTTTCCTGGCAT
+CTGCTGCTGCTGTCAGTGTTATTCCTCCCTCCTTGGAGGGGTGGAAGCTC
+CTGATGGTGCAGAAATGAGTACTGCAATACTGTCAAGAGTCTCTGTGATA
+ACTGCTGTCAGAGCCAGGGGACAGGTGTATACACACACACACACACACAC
+ACAGTGGTTGGTTCTGGATCTTTCCATGATATAGATGCCATTTGAGTAAG
+GTAATACTTTCCTTTTTTTTTTTTTTTTTTTTTTTTTTTGTATGTATCTG
+TAGCTGTACAGATGGTTGTGAGCTTCATGTGGTTGTTGGGAATTGAATTT
+TAGGACTTCTGCTTGCTCTGGTTGGCTGTACTTGCTCCGGTCAACCCTGC
+TTGCTCAGGCCCAAAGATGTATTTATTATTATTAAAAAAGTACACTGTAG
+CTGTCTTCAGATGCACCAGAGGCAGACATCAGATCTCATTATGGGTGGCT
+GTGAGCCACCATGTGGTTGCTGGGATTTGAACTCAGGACCTTTGGAAGAG
+CAGTCAGTGCTCTTATCCACTGAGCTATCTCTCCGCCCTCCCCCCCATAC
+TTACTAACTACTTCCTTCATGAACCTGTGACATTTAAGAGATCTAGTCAT
+TCTTCTGCCCATGTATCATTGCTGTGCTCTAGAAACAAATAGTGCCACCC
+TGTCCTACTTATCTTGGTTCTGTGTCAGAGGCAAACAATAATGCTTGCTT
+CCCTGGGTTTAGATTTTTAAATTTTACATTTGTTTTTTAACTGTAGAAGA
+GGTGAATTTGGCTCTAACACTTGTTTCTTTTTACAATAGTCCTGTATATA
+TTGAATATGTACTTTATTATGCCCTTATCAATAGTGATGGCTAATCGTAT
+ATGATTTTGAACACCTTTTTGTTTTCTAAACCTAATAATTATTGGTTGTT
+TCTGAAGTCTCAAACAGAAGTGCCATTTCTTACTCGTTAGCTTGCTCAAT
+AGATGGCCTATCTCCTTTGGCAGTTATCGTCCCACATCCTGCTTAATATG
+GCCAGTGATTCTTGAGTTTGTAAATTCTGTCATCCTGGAGACTCCTTTAC
+TGCTCTCCTCTTTCTGGCTGCTGTCTTCTGTCTAGGTTTGCTTCCCAAAG
+GGTTGTGCAGGAAGCAGTTGGGATTTGACATCCCTAAAAATTCCTTTGGT
+ATGCAGATCACTTTTTCCTCAGGAGAATTCAATCTTTGTTTTGTAGTACA
+GAACTGGAAGATCCTGTCCACACCAAGAGGCAGGATCCCCAAGGAGTTAG
+GTGTGTGGTGTAAAGAGGGACGCCTGTAAGGAGGCTGCAGCGGACAGAGT
+GTCTGGAGAGATGGGGTTCTAGGTCTTATGTAGTAGTGGAGTCTCCACAG
+AATGGACCCACAGTAAGAGGACACTTGCACACAGGCTGCCATAGTACTGG
+GGGTGATATCCTGAGAGATGGAACTAGGATGAAAATTGCCAGAGTCTCAC
+CCTGGTATGGACTGGGGGGGGGGGGGGTGTCGTCCTCACATAAAGTGTTT
+CTCGGTAAACAGGCTGACATGAAGGAATGGGGAAGGGCTAGAAGGTAGGG
+CTGAGAGGGTCCACAGGAAAGAGTAACTCTGAGTTTCCCTTTTTACCATT
+CTTACGTGTGTGTTTATTTCTATTAGCACTTTTATTGGTCTGAATATCAT
+TTGTGGGGTGTGTGGGATGTGTGTGTGTGCACGTGCGCACGCGCTATATC
+ATCATCAGCTAGCCATACAAGATATACAGAGACATACTTACAATCAGTTG
+CAGCCACCAATGAATGTATCACCAGTGGCCCTGAAACTGACAATGGCAGT
+TCTACTATGGAATGTGCTATGAAGTCAACAGTTAGTTAGGCTAAGTGTGG
+AAATAGTGAGTGAATGAGATAAAGGAGGGACATAGGCAAATGAAACAAAG
+ACAAGAGAGACAAGGTTAAATAAATGAAAGGAGAAGGTGGAGGGGTCAGG
+AAGAACATTATGGGCTTATTCTGGAGATTACTAGGAATATCTTCTCTGTG
+ATTTCTTAGAAAGTGGTATGTGGTATGCTGTCATGCCTATAAAGTTGCAG
+GCTTCCACTCACAGAGGCACCAGTCTAGGGAGGATGTTTTAGTACAGCAG
+CACTTCTGCAGAAAAGTCTTAGGCCAGATTATCACTGTATTTGTCTAGTG
+CTTTTTCCTTATTATTGTCAAGTTTTTTAAAAACTTTATATAGTGTTCTT
+ACTACCTCTGTTCTGGTATACCACGAGTAGCCTATAGACACTGAGACTGA
+CACAGTGAACAAGTTCCTGATGAATGTGTGTGTGTGTATTTCTTATTCAT
+GTGGGTTCTGAGGATGGAACTTAGGCCATCAACCTTGGCTACAAATTCCT
+TTATCATTTGACCTAGCACACAGCCTTCCTAATGCGATTAATAAGGAAGT
+AAATATACTGCTAATAAATACTTGGTGACTATTGAAAATTTGGTATTTTT
+TGCTCCATCTATGAAAAATGTGTCATCTTGTCACAGTTTTTGTCCTATAA
+GTTTTAGAATTCTGTGAAATGTGTAATAAGCTCCATGGGAGCTTCAGTTT
+TCATCATCTTGGTTTTGTTTGTTCTCAGGTTCTGTTGAAAGCTTTCATAG
+CAAATCTGAAGTCAAGCTCTCCCACCGTGCGGCGGACAGCAGCCGGCTCA
+GCCGTGAGCATCTGCCAACATTCTAGGAGGACACAGTACTTCTACAACTG
+GCTCCTTAATGTCCTCCTAGGTAAGAGAGAAAGGGCCTGCTGGCCCAGTC
+TTAGCATCTGCTCAATCTTCTAAACTACACTGACCCTTGCCATCATGATT
+AGACCATTTGCAGCTGCTGACTGCTAAATGTGAAGTGTGTAGGGGATGTT
+GCAAGCCCATAAATGGTCTCGGAGACTTTTCAGCTGCGGCTGTGTCTCTA
+GGACACCCAGCTGGTACACAACCTCATCCACCTTCCTGTCCTTCTGTATC
+AGAGGCCTGAGGCTATGCTTCAGCACGCTGTGGGTACTCTAGGGAAACTG
+ACATTCCCCTACCCCCTCTCTCCTGTCAAAATCAACATGAACAAGTCTTG
+CTGGAATGAGCATATGGACATTTGATACAACTCTCTGAATTCCACATGGA
+CATTTGATACAACTCTCTGAATTCCACATACAGTTCCACTCCCTATAAGG
+TCCTGCCAAGCTAAGGATATATTTTATGCTGCAAGGCTGCTTTTGATCTG
+AGTGTGCACAGCCTGTGTTTCTCAGTCTCGCTTATGTCACCTTTCCCTTT
+TACTGCTAGGTTTACAACAGGGCACTCCTGTAGGTCTCCCTTTTTCACCA
+GCATGTACTGTGGGTCTCTGAGCAGTGGACTGGCTGTTGAGCCCCTTTGG
+TTGTCTTTGCAGGTCTGCTGGTTCCCATGGAAGAAGAGCACTCCACTCTC
+CTGATCCTCGGTGTGTTGCTCACATTGAGGTGTCTAGTGCCCTTGCTCCA
+GCAGCAGGTCAAGGACACAAGTCTAAAAGGCAGCTTTGGGGTGACACGGA
+AAGAAATGGAAGTCTCTCCTTCTACAGAGCAGCTTGTCCAGGTAAGGGTG
+AATAGTGATAAGTTCATGTGGGACATGAAAGAAGTAGCATCTTTCCGCAA
+GTGCTGGGACAGAGGAAGTAGCTGGGAGATGGTGTGTTCCTTTTGCTGCT
+GAGGAGTCAGGAGATGTGTGTCCACAGATCAGGTATGAGTTGTTTGCTTA
+AAACAGGGAGCACACATGTTTTCCACAAAGGGCCAGAGTGTATGTGTGTT
+AAGCTTTGCATTCCAACTATCTTCACTGAACTCCCCCAGTGATGTAGTTT
+GAGCCACAAATAGCCCATAAATGTGGCCATATTCCAGTGAGACTTCATCC
+ACAGAAGCAGGCACACAATGAAGGTACAGTTTTGTATGCCTGTGACCCCA
+GTACTTGGGGTGTCAAGAGAAAAGTACAAGTTCTGCAAATGCCTGGCCTA
+TGTAGGAACCCCAAGCTCTCTGTAGCTGTACGATGAGACTAGATCTCAAA
+AAAGCCAAAATGGGGAGTAGAAAGCCAGATGTGGCTGTGGCTAGCAGTTT
+GCCAGTCAGGATTTAGGGGCATGCATATGCATACAGGTTGCGTGAGAAGA
+GCTAAAGCTAAGCCTTAAGGCAGCTTCCTGGGAGGCTTTCGCTCTTCCTT
+TTTTATTCTACACCAACCTTTAAAAAATAAAATGCATGGTTTTGGTTTTT
+TTTATTGTACATTGGTGTTTGGCCTGCACATATATCTTTTTGAGGGAGTT
+GGATCTACTGGAAGTTGATTTACAGACTGTTGTAAACTGCCATGTGGGTG
+CTGGGTTCCTTTGCAAAAGCAGCTCTTAACTGCTGAGCCATCTCTCTAGC
+CTGCATTTGTTTATTTTTTGCTTTTATCTTACCAACTAATGCTAGGGTTG
+GCAAACTTTGCAAAGTAAAAATATAGAGTGCCTGTGAAGCATTGATTTTA
+TATAGTGATTGTATAAGAATGGTTAAAATTGTCCAGGATATAATTATTTA
+TATTGCAAAATTATGTGTTATCTGAAATCAAGGTTTAAACTTGTGGGCTT
+TTTTCCCCTGGTAAATTTAAAGAAAAAACTAACAAACTCATTCTTTCTAT
+AGTATGGTATAGTATTAAAAACACCAAAAAATTTTGACTGCCATCCTTAA
+CATGTGTGGCTATTTTCCCCCTGGCATTCAGAGCTGTGTTTCTGATGATC
+GGATGTCCCCACTTGCTTCCATAGCAGTGTCCATTGGGATTATTGTCTTT
+TCTGTTCATCAGTTTTGGGAAATGAAGATCCTGAGTTTGCTTACTGGTGT
+TCTAGAGGAAGTGCTCTATGTATTTCCAAGGAGTTACTATAAATGAAAAT
+TAAAAACCATAGGAATTCAGAAAATAGCACAGACAATAATAACCCTACCT
+ATGGAAGTAATAGGTCTTTACAGGGAAAAACTAAGGCACAATTTTGTTGA
+CAAAGACCAGTGAAAACAAAAGTGAAATCTGGGATGCTTATGTATTTATT
+AATTTTGTTTTTGTTTTTAAGCATTTTAAGATTTATTTATTTTCTGTGTG
+TGCTCTGTCTGCATGTACACCTCTGTGCCAGAAGAGGGCATCAGATCCCA
+TTGTAGATGGTTGTGAGCCACCATGTGGTTGCTGGAAATTGAACTCAGAA
+CCTCTGGAAGCACCAGGGCAGCCTGGATTGCAGAATGAGACCTTGTCTCA
+ACAAAAGAACTAAAAACTTCCAATTCACTAAACCAGCAAATGCATTCTTT
+TCTATGACCTGATTAGCGCTTAGCTGATGATGGATGTTGTCTTTGTTGGC
+AGCTGGGGCTGAGTGACCCATCTCCTTCACCCCCCTGTCATTCCAGCACC
+TGCTTTCTCTTAACCGCTGAGCCATCTCTCCCGCCCTGGGATGCATTTTA
+AACATGATGTAAGACCTGTGTTTCTGCTCCTAGGTTTATGAACTGACTTT
+GCATCATACTCAGCACCAAGACCACAATGTGGTGACAGGGGCACTGGAGC
+TCCTGCAGCAGCTCTTCCGTACCCCTCCACCTGAACTCCTGCAAGCACTG
+ACCACACCAGGAGGGCTTGGGCAGCTCACTCTGGTTCAAGAAGAGGCCCG
+GGGCCGAGGCCGCAGCGGGAGCATCGTGGAGCTTTTAGGTGTGTTCTCAG
+CAAGGTCTTCTAACCATTGTGCATGGAGGCATGTTTCCTTCTGTTGCTTT
+ATGGGGCTGTACTGCGCTGAGCTACCCATGCCGAAATTCCTTGCCCAAGC
+TTACAATGTAGGCGTCTTGCTGCTTTTGCAAATAAATCTACAGTTTAGAA
+AGCTAGATGACACAATGAGGCCACACCTTTAAAGCTTGGTCTCCTGCCTT
+TCTGGCTTGTCACCTCCATTTTGGATGCAGTGAAATAGAAATATTAGGCA
+GTTTCCAGGACTCTCATGTTTGATTGTCAGGGATGAATAGATTTTTATGT
+CTTTTTTTGGGAATTTAGTGTTCTTTTTCTACTTGGATCCTGACTTTAGA
+GAACCCTTTCTATTCCTCATCCTTGAAGATACCTCTTTAACCTGGTCTCG
+TCTTTTTGATGCTCAAAGAGTTTGATCCATAGACTAGGCATTGGCAGCCT
+GACCTGTCTGACATGAGCTAGTCTTAGATGGTGGGACAGATAGGAATCTG
+GGCTTGCCAGCCTTTAGAAGTGACCTGGCATTTAGCAGGCTGTGACAAAT
+TCTGCTGACCCTGACTTATCATGGCTTGCCACAGTATATACATTGAGGAG
+CCATATTTATTATAGCTACACATTAGAGACAGTCTGCCTGGGAAATACTA
+TTGTGACCTTGTGCGCTTAAAAATTTGCCTGGACTATGAGCAGAAACTGT
+TTTACTGCTGTCCTTGTTAAAGAATTTTTATTTTTGTGGAAAGTATGTCA
+TACACCCTGGTAACTGTTTCCAATGAAAGCTTATGTCTGGCCTATGCTTG
+TCCAATAATGTGAGATCTTACAGTTTTAATTTGGCTTTTAAAGAGCAGTT
+TATATGAGCTTTTTTGACATTCTAGTCATATCTTTAAAACTGTGTATTTG
+AACATGAGTGTAATTTTCACCTTTAAAGTGTGACACTGTGGTGTTTAAAC
+ATGTCCTATGGAAATATGTCCACATTGTCTGTTTTAGGATTTGAGTTAAG
+CTTTTTGAGGATTTTTGAATTTCTTGCAGATTTTAGCAGCTTGTAATCTT
+ACTTTCTTGTTACTTTCTATGATTTACAGCTGGAGGGGGTTCCTCGTGCA
+GCCCTGTCCTCTCAAGAAAGCAGAAAGGTGATTATCTCAAAATCTGAGTC
+TTGTGTTGAGTTGAACTGCTGTTTCTGTGTTTGCATAATGCACTAGATTC
+TGCTTATATTTCCTCTCAGGAGATGAAGTGTATGGATATTGCTGGAATCT
+GACATTTTCTGCTGTTTAAAAATTGTTTATATCACATTATGTCTAATGTT
+CGAGGTCAAAGGTCAGCAAACTCTATAAGGGACCAGAGAACAAATATTTT
+AAACATGCAGGCTATAGACTCTTTTTTTGTTTTGTTTTTTGTTTTTTTGG
+ATTTTTGAGACAGGGTTTCTCTGTGTAGCCCTGGCCGTCCTGGAACTCAC
+TCTGTAGACCAGGCTGGCATCGAACTCAGAAATCCACCTGCCTCTGCCTC
+CCAAGTGCTGGGATTAAAGGCGTGCTCCACCACCACCCAGCCATGCTATA
+GACTCTTGCAGCTGTTTCCCTGGCTTGTGACTGCTGAAGAACAATGGTAT
+AGAAACTGCTGTGTCTAGCTGTTTTCCACTAAAATCTTAAAGATGCAGCT
+GGGCTCAGCATCACTTTAGAGAGTGCACCACTATACTTTAGAAAAGTAAC
+TTCTGTTTTTTGCTTGTTTCTTTTTAAACATTTATTTATTTGTTTTATTT
+ATGTGAGTACACTGTCGCTGTCTTCGGACACCAGAAGAGGGCATGCGATT
+CCCATTACAGATGGTTGTGATTCCCATGTGGTTGCTGGGAATTGAACTCA
+GGACCTCTGGAAGAGCAGCCAGTGCTCTCTCCAACCTAACTTCTCTTCTT
+GATGAATTTTATTATTAGGCATATTAAATAGTTTATTTTGTTTGGTTCTT
+CATTTTGTTCATGCCTAGTTATCAGTTTTCAGACATATTTAACTTCTTGC
+ATATGTGTTTTCTGACCTATTTTTATTCCAGAGTTTCTGATATGACCTGA
+TAGTTTTATATACTTGGTCACTTTGCAGCAGCTGAAATTTCATTTTATAT
+TATGAATTTCTGTGGAAAAGAATTGCTGTCATCTTTATTTTTAAAATCTT
+AAAAGATGAGTTTGTTTTCTTTGTCATAATTTGAGCATTTAAAACTTAAA
+GCTCAGTATTATTTGCTATGTTAAGTGAGGGTTTGTGTGTTTTTCCTTTT
+TTTAAGTTTTGAGACGGGGCCTTACTGTGTAGTGTGAGCTGGCCTTGAAC
+TCAGTATCTCTTCCTGTACTTCCCACTTGCTAGGATGACAGTACACCTCA
+CTTTTGAGTAAGTTCTTCCCAGGAAAAACATTGTAGTTGCCATTGAATTA
+AGAGAACATTTACTTGAAGAATTTGGGAGCTAGATGTTACCCGGAGGCTG
+AGACAGAAGTCTTTTGGGCAACTGGGAAGACCTTGCCTCTTAAAGAGAAA
+GCCGAATGTTTGATCCATTGCTGTAAGAAATACTGATTTTAGAAAGCATT
+GCCAATGTTTAAAGGAGAGTAGAATTCTAAGAAATATTACTCTCTATTCT
+TGATCTAGGAGAGATCACTGGGCACTTGGTAAAATCACTTTGATAATTTA
+CTCCACAGTCACTTTGTCCAGAGATGGGGACAAAGGTGATGTTATTGAGA
+TAAGTTCTCATCTTTACTATTTCCTGAATCTCCTGATGATTTTTTATTTA
+GACTGGTGATTTTAAAACTTTTTTTGTTATAATGAAGATTCTGTTTTTTT
+CAAGTGTTGTGTTGGCTAGTTTTTGTCAACTTGATACAAGGTAGAGTCAT
+TTTGGAAGTAGGAACCTTAGTTGAAAAAAATGTCCTCATTAGAAAGGTCT
+GTGGACAAACCTGTTATGCATTTTCTTGATTGATGATATAGAAAGGCCCG
+GCTTAAACTCTGGGCACTGCCACCCTTGGGCTGTTGGTTCTGGGTTCTAT
+AAGAAAGCCAACTGAGCAAGCCAGTAAGCAGCATTCTCCAAGGTCTCTGC
+TTCAGTTCCTGCCTCCAGGTCCCTTCCCCAATTCCCCTCAGTTTTTTCCC
+AAGTTACTTTTGGTCTTGGAGTTTTATCACAGCTATAGAAGCCCTAACTA
+AGACAAGTGTAATCTGCCAGATATAAGATAGATAAAAACAGAGTTGTGGA
+GCACATAGACCTCAGCAAAGGTCAAAGGGGGCCAGGAGTCTCTTCTCTGT
+TGGCAGCCCCTCTCCTTTTTCCATCTTCATGGGATCTGCCCTGGGGGAAC
+TTTTCCCAGATCATAATCAGCCACTGACTTGGAGAGTAGAAACTGCTTCA
+TTAAAATTCAAATTCACTGTTCTTGAGTTTTATTTGATTATTTTAAACCA
+CATGTTTTGTTAATAAAAGGTTCTGTTTGTATTTATGTCTAGTTGCTGTG
+TTGATTTTTGCATAGATTTGTGTTCTCTTTGCTAATTAGCTTGTGCCTTT
+AATGTTATATCATGTAATTTCATGGAAAGTATCACAGCTCTTATTACTTG
+AAGAACAGTAACATGAGAAGCTAACAGCTAGATAGTATCTGGTTTAGTTT
+TCCCGTGTATGAGAATATACCTGAAGTGAATAACTTCAAGGGAAAGATTT
+ACTTTGGCCACTGTTTCAGAGGTCTTGGTCTGTCACAGCAGGGCAGATTT
+GTTCGAGCAGCAACCAGAAGGCAGGTTGTTATACCTGTGTTGGTGTGCTT
+CCTCTAAGTTTTTTATTTCAACATGAGAACAATGCTACCCACACCAAGCA
+CCAGTCTTCCCTTTTAGTTAACCCTCTCTGAAAATTCCCTAAGTATATCT
+TCTTTCAATCAAGCTGACAAGTTCAAAGTGTAACTTGATGTCAATTAATG
+TTTATATATAATGTAACTGTAAAGATATTAAATCTGATTTTTCTTCCTAA
+TATAACTATATAAGCTATAAGGTATATTTCTAAAATTCTACTAGGAAATA
+TTTTGTCTTTTCAGATTTTTAGCTACTGTGTAGACTAAAAAGATAATAAA
+ATGAAAGTGACTTATTTATATGTTGGAGTTTGACATACAACTTCGTATTT
+GCCATGGATATTCCATTAGAACATGGATATCCCAAGGCCTGACTGATAGA
+ATTGGACCTTTTCAGTCATAAGCTACTCATTCATTTATTAACTGGTAGTA
+AATTATTTAACTACAACAGTAATCTAAATCAATAAAAAGTTATTATGTGG
+TATAGTTCAATAGTAATTACTTCTGCCTCTTAATTGGTTTTACAGTATTC
+TAAAAGTTACTCTTTTATCCATCCTTTAACATTGTAGTAATATTTAATTT
+ATGATGGATAAATTGTACTATGGTAAATTAAATATGCCTGTGATTTTCAA
+TTCAGAATATATGCTTATTTGATTTTTGTCTTTTGAGATAGAGCCATTAT
+ATAGCTTTGGTTTGGACTAAAACTCACTGTGTAGATGGGGCTGACCTCAC
+GCTCATAGAAACTGGCATGCTTTCTATCTCCTGAGTGTTGGGAGTAAAAG
+TGTGCACCATCATGCCTGACTATTTATTCAAGCTAAAAAAACCCAACATT
+ATTTTTAGCAAAACTAAAAAGGAATATTGCTGTATTATTTACTAGGCAAA
+GTGCTCTTAGGAGAGGAAGAAGCCTTGGAAGATGACTCGGAGTCCAGGTC
+AGATGTCAGCAGCTCAGCCTTTGCAGGTACTCGGTGGCAGCCATGAGCTG
+CCAGTGTCAGCCTCTAGTTATTATCCGCCATCTCGTGCTCCTTTCAGCAC
+CTCAGCCTGCACACAGCATTGCGAGCAGCTTTTATAATTCAGCTGCTTTT
+ATAATGTTCACTCTAAATGTGTTTGGCTATGTGCTTTTCTTGTTTTAGGC
+TATTCAAATATTGATTTATTATCCTTGAGCATATCCTTTTGGAGTGGATG
+ATAGATGGAGTTGTCTCCTGAATTAAATGGTCTATGATCAGGACAGTGGG
+TTGAAGAACTGTCTGGGTAATTTAACTTGAAAGGATATATTTTTGCTCAC
+AAGTGGTTACATAAGATTCTTTGTGTTTTCTATAAAACACAGGTATTATT
+TTAAGACACTAATAAATAGATAAAATGCAAACAGCTTTAGTTATGTTTGC
+TGTTAGGTAAATAAAGATAAGACAAAGATCCTTGGGGAGGAAACCTGAAT
+AATGTCAATGGATTTTCCCTGCTTACAAGATAAACAAGTCACAGGACAGA
+AATTTTGGGCTGCCTAGACTAGTGTAAAACTCAAGTGCCTCTGACCCAGT
+TTCCTTCATCACAAGCCACTGCTCATACCTTGTTCATGGTTTTGAAGTGA
+TTTTGTTTTTATATTTAATGTTTTGTTTTATAAGACAAACCATCCTGTCA
+GCATTCTGAAAGCTCGCTTTTATTGGTATAATCTCAGTGTTCCCAACCAT
+CAGAACCTTTCATTCCCTGCAATGTAAATCAACCCCCCCCTTTTTTTTGC
+AAGTTCTTCCAAACGTCTAAGGATGAATAAATGTTACATACTGGATTTTA
+CTATTATAGAATGGCACTGAAGTGACTTTGATCTCACATAGTGTTTGTAA
+GAGGGAATTTCAAAATTAAACTAGGAAAAGATGGAGTGTGTTTATCCTAG
+AGGAGGTTAGGATTGGAGTGGAGATGAACAACACGACTTGGAAGAAAGCA
+AGCTGATCCTAAAGGGTACTGCGCTCACTAGTGTTCTGTTGTTGCCTATG
+TAGAGTTTCTGGAAGTCTGCTTGTCCCTGCCCCAGCTGCTTGTCCCTGCC
+CCAGCTGCTTCTCAGCAACACACATTCTATGTGTGGCTTTAGAGATGCAG
+TAAGAGCTTCAGCTTGAAAATATTCACAGCCATGAAGAATTCACTTGTTC
+ACCCAGCTGAACTGTGCTCCTTGACTTTTTCTTCACTATGCTCCAAGCTG
+TTTAATAGTTAGAACATTCAATACAGTGAACTTTTTGTCATTTTGCACAG
+TTGGATTCCTTAGCTACAGTTTCCTCTGGCCATTTGACAACTGAGTTTCT
+CTGTGTCTCTAGCCTCTGTGAAGAGTGAGATTGGTGGAGAGCTCGCTGCT
+TCTTCAGGTGTTTCCACTCCTGGTTCTGTTGGTCACGACATCATCACTGA
+GCAGCCTAGATCCCAGCACACACTTCAAGCAGACTCTGTGGATTTGTCCG
+GCTGTGACCTGACCAGTGCTGCTACTGATGGGGATGAGGAGGACATCTTG
+AGCCACAGCTCCAGCCAGTTCAGTGCTGTCCCATCCGACCCTGCCATGGA
+CCTGAATGATGGGACCCAGGCCTCCTCACCCATCAGTGACAGTTCTCAGA
+CCACCACTGAAGGACCTGATTCAGCTGTGACTCCTTCGGACAGTTCTGAA
+ATTGTGAGTGGGCAGAGGGTGCCCTGGTTCTTTTGTCTTCTGAGCTTATT
+CTTGGATGCCCACACTTGGACCCTCCTGCTCATTTTTTCTGTGTTACTAC
+ACATAATAGTAAGAGGCCCCCAGCTCAGATGGTTAACAGAGAGCCTTGTT
+GGATGTCTTCACTGTAGAAATTGCCTAGTATCATTTGTATTGAGCCATGG
+AGATTAAAGTGAGGTTACTTATATGCACCTTGTACACATGATATATTTTT
+AATACCTGATTAGGCCTGTTTAAATAACTACTTTCAATTTTTCAAGGAGC
+TTGTTATTGAAAGTATCTGTGGTCTTAATGTGGGTGGTGATATTAGTACT
+CTGTATTATTTTTAGCACTTTTTGACCTCTCAATGTACTTATACCACATT
+CCATTTTAAAGTAGGATGTGCATATTTCTATCCCTGTGATGTCTGAGTTC
+ATAGACAGGAATCACCTTAAAGATTATATAATCAGAAAGTTTGGTGCAAG
+TGTGTGCTGAATTGTGGGGTATTTTTTGTTTGTTTGTTTGTGTGTTTTGT
+TTTTTTAAACTTGCTTGTCACTTTGTTTTTTTGTTTTATATTTCTGAAAC
+AGGGTCCTAGCCCAGGCTGACCTTAAATTTGAGATCTGCCTGCTTAAGCT
+TTGGACTCTTGTGAATGTGGGCATGAACCACACTTGGCCTGCATTCTAAA
+TAGTCATTTTCTTCTCCTCTTCCTCTTTCTCTTCTTAGTGTAGTAAAATA
+GCAAAATTATCTCATAGATCATTCCTACAGTTAAGTGGTATATTAATCAC
+CACCATACACCTCCATTAAGTCTTCATCTTCTGAAACCTGACCTTCTGTA
+AAGGCTGTGCCCTCCTGGAAGCCAGTGGTCTGTTTTTTATAGTACAAGTT
+TAAGGACTGTAGGTCCTTCATGCAGTATGTTTATCATGAAGTATTATCCT
+TCTATGGCTGACTTACTTAACATAATGCCTCCATGTAGCATGTGTGAGAA
+TTTTCATTTTTTAAGGGATGATTAATATTCCATTGCATGGATAGAACTAC
+ATTTTGATTATTGTCTCATCTGTTAGAAAACATGTGGGTTACTCTCACAT
+CTTGACAATTATGGATAATGTCACAATTATGAATAATAGGTCTACTAAGT
+ATTTCAAAGACTCTGTTTTCAATTCTTTTGGCTATACACCTAAAAGTAGA
+ATAGTTTCTACATCCAGCTTTAAGTAATGTAATTAAATGCTTAGCTACTA
+TAGAATATGCATATATCTTGATATATATGTACTATAGAATATAATAGTCT
+ATATAGAATATACATATATTACATAATATATAACCTATATATATTCTATA
+TAGAGCCTATACATAGGTCTTTTTGAGACAGGGTTTCTTTGTATAATAGC
+CCTGAGTGTCCTCTACCTACTTTGTAGACCAGGCTGGTTGAACTCAAAGA
+GATCCACCTGCATCTCCCTCCCAAATACTGGGATTAAAGGGGTGAGCTAT
+CACACCCAGCCTAGAATATTAAAAAAAAAAAAAAAAAAAAAAAAAGCTGG
+TCTTTGTGACCACACACTTTTGTAATCCCAGCTCTGGGAAGGTAGAAACA
+CCCAAATGGGTGAGCTCCAGGTTCACTGAGAGACCTTGACTCAAAAATAC
+CATAAAGAACAACTGAGGAAAACACCTGACATTGACCTCTCACCTCCACA
+CCCATGCTTACAAATATGCATATACCCACTGTTTGCTTGTGTTACCACTC
+CACCTCAAACCTCCTCCCTCCACCCCCCATACACATTCCACATACACAGT
+CTTAGTTCGGATTTTATTGTTGTGAAGAGACACCATGACAAAGCCAACTC
+TTTTTGTTTGTTTGTTTGTTTGTTTGTTGTTGTTTTGTTTTGTTTTTGTT
+TTTCGAGGCAAGGTTTCTCTGTATAGCCCTGGCTGTCCTGGAACTCACTT
+TGTAGACCAGGCTGGCCTCGAACTCAGAAATCCGCCTGCCTCTGCCTCCC
+AAGTGCTGGGATTAAAGGCGTGCCTGGCTACAACAAAGGCAACTCTTAAG
+AAGGAAACATAGGGCTGGAGAGATGGCTCAGCAGTTAAGAGCACTGACTG
+CTCTTCCAGAGATCCTGAGTTCAAATCCCAGCAACCACATGGTTGCTCAC
+AACCATCTGTAATGAGATCTGACACCCTCTTCTTGGGTGTCTGAAGACAG
+CTACAGTGTTCTTTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGGAAACA
+TAATTGGGGCTGGCTTACAGTTTCAGAGGTTCAGTCTATTATCATCATGG
+CAGGAAGCAAGGCACCATGCAGAGCAAGTGGTGTCTCTTAAGTTCTGGGC
+TACCTTGATCTACAATTGAGTTCCAGGACACCCATTGGTACACAGATAAA
+CCCTGTGCCTGTGTCAAACCAAACCAAACCAAACCAACCAAACAAAAGAG
+GTTGTTAGGTCACATACACGTTAAAGATGTCCTAAGAGGTTTTAACTATA
+GGCTGCAGTTCTATCTTTGAGTTCTCACTGGGTTTACTTTGCTGTTCTTT
+CCAGCAACTTTTTTACCACAGAATCTGTCTGGGCATGGGAAGTATATATA
+AATTTAATGCAGATAACCTATTGTTAGACTTATCTGGAAGCCTTGTCTTT
+TTTTCTAACTTATTGTACAGTTTGTTCAGGAAGACAAGGTTTTAAAAATA
+TTAACTCATTGAGAATTGCATGCTTGTATTTTGAGGTTTACCCCTCATCT
+TTCTGACTAACTACTCCCAGATCTACTTTCCACTTCTCTTTCCAACCTTA
+TGTCTTTTTTTTTCAATTTTTTTATTAGCTATTTTCTTCATTTACATTTC
+AAATACTATCCCGAAAGTCCCCTATACCCTCCCCCCACCCAGCTCCCCTA
+CCCACCCACTCCCACTTCTTGACCCTGGCGTTCCCCTGTACTGGGGCATA
+TAACGTTTGTAAGACCAAGTGGCCTCTCTTCCCAATGATGGCCGACTAGG
+CCATCTTCTGCTACATATGCAGCTAGAGACACGAGCTCTGGGGTACTGAC
+TAGTTCATATTGTTGTTCCACCTATAGGGTTGCAGACCCTTTCAGCTTCT
+TGGGTACTTTCTCTAACTCCTCCATTGGGAGCCTTGTGTTCCATCTTATA
+GCTGACTGTGAGCATCCACTTCTGTATTTGCCAGGTACTGGCATAGCCTC
+ACAAGAGACAGCCATATCTGGGTCCTTTCAGCAAAATCTTGCTGGCGTAT
+GCAATAGTATCTGAGTTTGGTGGCTGATTATGGAATGGATCCCTGGGTGG
+GGTAGTCTCTGGATGGTCCATTCTTTTGTCTTAGCTTCAAACTTTGTCTC
+TGTACTTTTGTAATATCAAAAATGTTTACAAACAGAAATTTCTTTACGTT
+TTCTAGAGCTATAAAAGGTTGGTATGACCTTCTCCTGGGGGAGACAAACA
+AATATCTGATTACCACAGATAGGATACCAGTGACAGACCAAAGTAATGAT
+TCCACCTAAGTCTAGTTTGACAAGCCAGTTAGTTTATTTAACACTACTTC
+AAAGGAACACAAGCCACGGCTACCCACCAGGGCATGCGCAACTTATAAAC
+ATCTATACCATTGAAGAGTATGTTTATCCCAGCGATCATTAACCACTTAT
+ATATCCTTAGGAAGGAGCAGGGTTCCACAAGCCTATCCCCAGAATGTTAC
+TTCCTATCTAGTGCAGGCCTTGTCCAGGTGGCATCCCACAGCAAAGCTTT
+TCTTGCCATAGTAGTGAAGAGCTCTTGCTTGTTGCTTTTAACACATGCAT
+TTACCTGTGGCCACTGACTAGGTAATTGCCCTTTGCATTCTGTATGTGTT
+ACTGATGCAACATGGTCTTTGCATTCTGCGTGCTCAGCTCTGTTGGTGGC
+TTTTCCTTCATGTTGAAGGGCTTTCCCCTGACAGTCCCCCTTTATCTGTA
+CAGGTGTTAGATGGTGCCGATAGCCAGTATTTAGGCATGCAGATAGGACA
+GCCACAGGAGGACGATGAGGAGGGAGCTGCAGGTGTTCTTTCTGGTGAAG
+TCTCAGATGTTTTCAGAAACTCTTCTCTGGGTAAGCTCTTATATGATGGA
+AATGTTTTTAGCCTTAGACATCTTTATCTTTTCTTGTTTGTTTGTTTGTT
+TTTGTTTGTCCAGACGGAGTTTCTCTTTGCAGCATTAGCTGTCCTGGAAC
+TTACTCTATAGACCAAGCTGGTCTCAAACTTGGAAGATCTTTATGCCTCT
+GTCATCCAAATGCTGGGATTAAAGGCATGAACCACCACTAGCCAGCGAGA
+CTTTTTTATCTTTATTTCAAAAAGAAACCTTTTGGTATCATTATTTTTTA
+AATTGAAAATGGCATTAATTTTCATTTCAATTCAAAATGAAAATGGCAAT
+TTAGGTATAATCAGACTGATTTAAATTGGTACTTGTATATTATCTCTATA
+TAAATATATACATATTTTGATTGGACTTGTCACTTATTTATGATTTCTAT
+TTTTAAAGCCCTTCAACAGGCACACTTGTTGGAAAGAATGGGCCATAGCA
+GGCAGCCTTCCGACAGCAGTATAGATAAGTATGTAACAAGAGATGAGGTT
+GCTGAAGCCAGTGATCCAGAAAGCAAGGTGAGCTTCATAGGAAGGAACAG
+CTTGTGTGTGAGGGGTTGGAATTGTTCTGGCTTTTGCCAATTCCATTTGT
+TCCTAGCCCATCTCTGGCTTATTTCTTTCCCCTAGAAACACTGGACACTC
+CCAGGCCTTGTCTGTTTATGCCTCACCAGGGATACCCAAACTCTTAACAG
+TTGCATTAGTTCTGCCTCCAGAGACTCCTCCCTACACAGATGCACCCTGT
+GCTGAGCTCCACCCTGCCTCATTTGAGGCTTGTGCAGAGCTTTGCAGCAG
+TTCTTGTTTTTGCTCACTCAACTGATTAAAACACCTCTCTCTTTCTCTGT
+TTGCTTTACAAGCATATTACATATAAATTTACAGTTAACTTAAATTCCTT
+AAGGTCAGGAATATGTTTTGTCATTTAATTGTATCATCAAAATTACTTCC
+TTTGAGAGTGCTCTAGGTTCTTTTATATCCTTTCGACCTTTTTTTTTTTC
+AGACAGGGTCTCACTATATAGACCTGGCTGGACTAGAACTCATAGTTTTC
+AGTAGACAATTGTGCCTTGCCAATGTGAACAGCCTTTCCTGAATGCTATG
+TTTTAGCACCTTCCTGTCTAGTGAACCTTCTCCATACTAAGCTTGTCTCT
+TGCTGCATCCTACGGCCCTGTGTTTTAGGGTTCCAGTTACGTTCTGTTTG
+AGATCAGCTATGGGGGTGGCCAAGCATAGGCATCTCTGTGTCTAGCACCC
+TGATGTGGATTACTTTCGTGACTGAACTAGTGAATCAAATGTTTACTTCT
+CTGTTCTAGCCTTGCCGAATCAAAGGTGACATAGGACAGCCTAATGATGA
+TGATTCTGCTCCTCTGGTACATTGTGTCCGTCTTTTATCTGCTTCCTTTT
+TGTTAACTGGTGAAAAGAAAGGTAAGCATAGCAGAGTAGGTACAGAGTTG
+AGGGGACACTTACAGGTTCAGGAGTCAGTTTGTTGGTCTGTTGGTGTCTG
+GTTATTGGGGTCGTTTACTTTCCATTTCTGCTGTCAGAGGGAGGGAATGA
+GAGGGTGAGTTTTGTTCCTTGGAAAAGGCTAAAGGGGCCTGGGTGGTTCC
+TTTGCAGCACTGGTTCCAGACAGAGACGTGAGAGTCAGTGTGAAGGCCCT
+GGCCCTCAGCTGCATTGGTGCGGCTGTGGCCCTTCATCCAGAGTCGTTCT
+TCAGCAGACTGTACAAAGTACCTCTTAATACCACGGAAAGTACTGGTATG
+TTACAATTCACTTTTTTTCACCAGCTAATTTGTACTTAAGCTATCTCACA
+GTCTTGCCTTCTTTTGTCTTAGAGTAGTGTTTCTAGGTAGCTTATATGTT
+TCAGCTGTGTTAGAACTCTCCAGGTGTGCATATGGTCATGTTCTTAGTCC
+CATGACTCCCCTATGTGTAGTTACATACAAATGACTACATAAGTCATAAA
+GTAGAAATCTGGAAATGTGAAAGTTATTTATACACATACTTATTTCTTCT
+TAAATCATAAGCTGCATTGTTAAATTGCTCTGGGCCACATCCATATTAAA
+TTGCTCTGGGCCACATCCATGTTCAGGCTAAGCCTGGTCAGGACAGCACC
+GTGACACTGGGTCTGCTTTTGGACATAGTTGTTCTGGAGTAGAAAGTATC
+TGCCACCTTCTTTCTTTCTTTGAGTGATAAGGCAAATCTGTGTTATTGAG
+TTTAAAGATAACAAATATATAATAGATTGATTGTTTCTGATCTTTATTTT
+CAAAAGAAATGTCAGAAGCTTGATTTTATTTTTTAATTTTTTTTTTTTTA
+TTTTTGGCTTTTCGAGACAGGGTTTCTCTGTGTAGCCCTGGCTGTCCTGG
+AACTCACTTTGTAGACCAGGCTGGCCTTGAACTCAGAAATCCACCTGCCT
+CTGCCTCCCGAGTGCTGGGATTAAAGGCGTGCGCCACCACGCCTGGCTTT
+TTTTTTAATCTGACAACCTGGAATCACAATTTAGAAATCCTGACGTATTG
+AGATAATTTCAACGTGAGAACTCTAGAAACTAAATCCCAAACATCTTTTA
+CTACTTAGGAAATTATAGTCAGGTCCTTTGCAAATGTTCCCTTAAGCTTG
+CTTCATTTGTATATAAATTTGATGATGGAAGAAGGTACAGCTGGGCCATT
+TATGTCCGCAGAGGAACAGTATGTTTCTGACATCTTGAACTACATCGATC
+ATGGAGACCCACAGGTCCGAGGAGCTACTGCCATTCTCTGTGGGACCCTT
+GTCTACTCCATCCTCAGTAGGTCCCGTCTCCGTGTTGGTGACTGGCTGGG
+CAACATCAGAACCCTGACAGGTAACGGGACAGTTTGCTCTGGTGTCTTTT
+CTGGATACTCTGCCCATGTTCATGTTTCTATAGAGATATTTCTTGCTCAT
+TTTTCTGGTTAGGAAATACATTTTCTCTGGTGGACTGCATTCCTTTACTG
+CAGAAAACGTTGAAGGATGAATCTTCTGTTACTTGCAAGTTGGCTTGTAC
+AGCTGTGAGGGTGAGTACAATGCTTTACATAAACTGTTCCTTGCCTTAGT
+GAGCTTACCATTGATACAGTTAAATTTGGAGCTTAATAGGTCACATTTCC
+GTAAGTTGTAAACAGTTCTTTTCCGAAATTTACCACTCAGCCTTTGAAAA
+AACGTTGCCATCATATTAAAATTCATTAAAACTTTTAATTCTTGGACTCC
+TTATTTGAAACGTTCTTTTCTCTAAAGATAGTGTTTAGAAATATACCTTT
+GCTATTTTGAAATATAAAGTTTGTTGAATAATTACAATTACTGTTTTAAG
+ATACTAGAATGTTGAGCTGCAATGAAATTATGGGTGTTATTTAACTGGGC
+CTTTACTAAAAGAGCCTTGATTCCTCAAGTGACAGTAAGGTGAAACATTT
+CCTATTAGCTGCATCATAAGTCACAATTGGGCATTCAGTAGCAGAAAATT
+TAACTAGAGAAAATC
+>mm10_knownGene_uc008xdc.2 range=chr5:34761740-34912521 5'pad=0 3'pad=0 strand=+ repeatMasking=none
+GCACTCGCCGCGAGGGTTGCCGGGACGGGCCCAAGATGGCTGAGCGCCTT
+GGTTCCGCTTCTGCCTGCCGCGCAGAGCCCCATTCATTGCCTTGCTGCTA
+AGTGGCGCCGCGTAGTGCCAGTAGGCTCCAAGTCTTCAGGGTCTGTCCCA
+TCGGGCAGGAAGCCGTCATGGCAACCCTGGAAAAGCTGATGAAGGCTTTC
+GAGTCGCTCAAGTCGTTTCAGCAGCAACAGCAGCAGCAGCCACCGCCGCA
+GGCGCCGCCGCCACCGCCGCCGCCGCCTCCGCCTCAACCCCCTCAGCCGC
+CGCCTCAGGGGCAGCCGCCGCCGCCACCACCGCCGCTGCCAGGTCCGGCA
+GAGGAACCGCTGCACCGACCGTGAGTCCGGGCGCCGCAGCTCCCGCCCGG
+GCCCCGCGCCCCTGGCCTGCGTGCTGGGCATGGCCAACACTGTTCCCTGT
+CCAGAGGGTCGCGGTACCTCCCTGAGGCCAGGCTTTCCCGGCCCGGGCCC
+TCGTCTTGCGGGGTCTCTGGCCTCCCTCAGAGGAGACAGAGCCGGGTCAG
+GCCAGCCAGGGACTCGCTGAGGGGCGTCACGACTCCAGTGCCTTCGCCGT
+TCCCAGTTTGCGAAGTTAGGGAACGAACTTGTTTCTCTCTTCTGGAGAAA
+CTGGGGCGGTGGCGCACATGACTGTTGTGAAGAGAACTTGGAGAGGCAGA
+GATCTCTAGGGTTACCTCCTCATCAGGCCTAAGAGCTGGGAGTGCAGGAC
+AGCGTGAGAGATGTGCGGGTAGTGGATGACATAATGCTTTTAGGAGGTCT
+CGGCGGGAGTGCTGAGGGCGGGGGAGTGTGAACGCATCCAATGGGATATT
+CTTTTTCCAAGTGACACTTGAAGCAGCCTGTGACTCGAGGCACTTCGTAC
+TCTCCTGGCGTTTCATTTAGTTTGTGGTGTAGTGTAGTTAAACCAGGTTT
+TAAGCATAGCCAGAGAGGTGTGCTTCTGTGTGTCTGCAGGCAGTTGGATG
+AGTTGTATTTGTCAAGTACATGGTGAGTTACTTAGGTGTGATTATTAATA
+AAAAACTATATGTGTGCATATATATGAAAGAGTCGACTTATACTTAACTG
+CCTATCGATTTTTTGTTCTATATAAAACGGATACATTGGTGGTGCTCAGT
+TTTCACCGGGGAATGAATTTTACTAGTGTTGCAGACAGGCTTGTTTTAGA
+ACATAGGCCACTCTGACTCTGACTTTGTGCCAGTAAAAGTTCCTGTTTAG
+TTCTTTGCTGACATCTTATAGATCTTTGGAAGCTAGCTGCTTGTGACTGG
+AGAGAATATTGAAACAGAAGAGAGACCATGAGTCACAGTGCTCTAAGAGA
+AAAGAGACGCTCAAAACATTTCCTGGAAATCCATGCTGAGTGTTGAGCCC
+TGTGCTCTCTTGCAGCTCAGTCCTTTCTCTCAACTCTGGGCATTTTATTT
+CTAATCTGGATTTGTATAATTAATAAGGAGAACTTTTGGGAACAACCTAC
+TAAAGAATGTCATCATTAAAACTCACTTAGAAAATAAGTGTTCTGGTGAT
+ATCATTGAGCTATGTTCCCAGTCCTGAGAGTTTGTTTTTTTTTTTTTTTT
+TAAATAAAGATTTGGGGAGAAAAGGTGGCTTACTTGATAGAACAAAATAT
+AGGAATAAAATTTCCTTCTATAAGGTGAAAAGTGTGAATAGAAAACTTCT
+TATCCTCTAGATAAGTAGTTTCTTTTTGCTTTTGAGAGTCTCACTATGTA
+ACTCTTGACCTGAACTCAGAGAGATCCATCCTCCTGCCTCTGCCTCCTCT
+CTCTGGGATTAAAGGCATGTGGCACCATGCTGGGCTGTCCAAGTATGCCA
+CAGACCCTCTAGGTCCCTGGTCTTCGAGGAACGGGATTTCTTAGGCAGAT
+GGGTAAGGAGTCGGATGAAAATGACAATCAGCCACACACAAGAGAGGTGT
+TGAATCTGAATGTAATGTTCTGGTTGAGCTTCAGACTTATATAACAACGA
+ATTATCAGAGGATACAAATCACAAAAAGACAAGATACACTGAAATTCACC
+AGTTACAGCAGAAAGGAATTTGCAGGGACTAATTAAATGTTTACATTAGG
+GATAACAAGCCCTGCCTAGGATCAGCCTAATGCCAGGCAAGAATTTCACA
+CTTTAAGGTTAAAAGCATCAGGGGGTTGTTAACTCTTGACAGGCCTTAAG
+AGTAATGTGCTATCACTGAGCTCTAAATTCTTAGGTCTAGTAAAACTTAT
+CCTGTCTGGAGAGTTCCCCCTTATCAGGGTAGTATATCAACTTATACTTG
+ACATGGAATGAAGCCTGTAGTAAAACATTTCTATCTCAGTGAGACTTTTA
+GTCTCTATCTGTAAACAGCTGAGTAAAATGGCAAGTGCTTAATTGTTTAC
+TGAATGGGTTAAGCTCCTTGCTGCTATCTGGAATCTAAGAACACTGGGGA
+AAGGCTTTAGCTATGTTAGAATACAATATTAAAAGGCATTTACTATAAGG
+TGATGCTTAATAGAGTGCACGTGAATCTATACACTAGATTAATGTGGTGG
+AAATTTGAATATAATGGGTTAGGGAAAGAGATGCCATAACTCTGGGAGGA
+AAATTTCCCTGGACTCTTATCCTCGTGAAACAGCTTCCAGGCTTTTCGCC
+TGACAAACCGATCCAAACTGGAGAGTTGGCTTTCGCCAGAATATCCAGGA
+GGAGAGTCCTAGAAATTCATTTCTCATGAGCAGCTTTTTGGCATTTTTGC
+CTCACAAGCTGACTCCACCAGAGTACCCTGACACAAGTATTGTCTAGTTA
+TTTTGATTATTACCATGACTCTGCCTCTGGGTGAGAGGAATTGTGGAAGT
+TTACATATTCCCCATATCTTCTATAAACCTCTGTGTGTGTGTGTGTGTGT
+GTGTGTGTGTGTGTGTGTGTGTGTATGAGGGAGAGAGAGGGAGAGAGAGA
+GAGGGAGGGAGGAAGAGAGAGAGAGAGATTGTTCTGTGCCTGCTTCGAAC
+ACAAATTAGTTTGCAAAAGTAATTCATTAACATGATACAGTCCCAAAGAT
+AAAAATGGTTAAATAATGAAAACATCTCCCTCCCCATTTTCCTAACTTTG
+TACCCAGGAGCAAGCTCTGTTACACTTCATTTGTCCTTCCAGATAAAATT
+TGGGCATATGTTAGGACAGAATTTTAAATTATTTACAAACAAAAGTATTT
+TGGAACAAAAGCTTTTAAAAGCTTTTATTTTAATAAAATAACTTGTTACT
+ACACTGTATATAACTAACTAACATTTTCCAAAATTAGCTCCATTAGCATC
+TATCTCATATTTCTATGTACTTTGCTGTTGAAAAACCAAGTGTTCATTAA
+TAATAAGTAACAAACTCACTGCTTGGAAGCTTTGATTTTTGGCATTTTGT
+CCACTTGACTCAGTTAAAAGTCCTTTTTTTCGAAATGAGAACAGCCAAAA
+CAGTTTTAGAATGAGTCTGTTCTGCTTTTGTGACTCTCATTGTGTTCTGT
+AGAACCAGTGTCACAGCCATATGTGGGCCTCTGTTGAAGTAGCTGAGAAC
+TTGTTCTCTGCTCTGCTAGCTGCTGTCGATCTGATAGGCCTTGAACAGTT
+GACATTCACCCTTAATAGTCCTCATTAGTCTTCCTGAGCATAGTCATTCA
+TTTATCAATATTTGCTGATCATCTCCTATGTGCCTAGCATTGTTCTAGTT
+GCAGGTTTTAGCAGGGAACAAAGTCATGTCTCATGAAGCTAAAATTCTTG
+GGAGAGACATAGACAGTAAGCAGAATAGTTTGTTCATAGTGAGTGATGAG
+GCGCATGCAGTAAAGTAGGGAAGGGGATTAGGAAATGCCAGGGCTTGACA
+TGTTTTAGACAGGGTGTTTAAATAATATCTGCTTAGTTGAAGGCTTATTT
+TTGAATAAATATCTGAAGAGTCAGAAATCTACCAGGAGGGTGTATGGAAG
+AGGAGTATTCCTGCAAGGGGAAGTTGTCAAAGGCTTTCCTGTGTGGGGAT
+TAGTGATGTCATGTTTTTGCTGGATGAAATGAGTGACGGTAAGAGTTGTA
+GTGGGGGTGAAGCAAAGGGTTGAGGGAGGCTTCATTGGTGTTATCAGTTA
+CTGCATTTATCTCCAAATAGAGAACGTAGCAATGAAAGCTACAGAGAACG
+GGAAAGGTGAGGATTTATTCTAAGACAAAATAAGTGTAGGAAGTTTAACA
+ATTAGATCAGGAGCACAGACTCCAAGTCTAAGTCTTCATTCTTGCACATT
+TTTTAAAAATTTTGTTATGTGTTCTGGATACTATTTCCTTATGAGATATA
+AGTTTAAAAGCCTTTCTGTGGATTGCCTTGCCTTTGTTGTTGTTGTTTGT
+TTTGTTTTGTTTTGTTGAGACAGGTCTCTCTATGTAGCCTTGACTGTCCT
+GAAAATCACTCTGTAGACCAGGCTGGCCTGGAACTCAGAGATCTGCCTGC
+TTCTGCTTTTCAAGTGCTGATATTAAATGTATGTGCCACCACTGCCAGGC
+TAAGATTGTTCTTTCAATTTCTTTTTTTGTTTTCTTTTGAGATCAAAGTT
+TGCTATGTACTTTTGGCTGGCCTGGTATATTGTGTAGTCTAAGTTGGCTT
+CAAATCTTCATGGCACAGATTCCCAAGTACTGGGACCATAGGTATGGCCC
+ATCACAGTGGGGGGTTGGGGGGGCCAGTACATCTATCTCTTGAATGGTGT
+GGAGTGTATATATGTGTTAGGGGTTTGCAGAGGCCAAAAGAATATTGAGT
+GTCTTCCTCTATTGCTCGCCACTTCTCTGAATAAACCTAAAGTTACCAAT
+GGATTTCTAGTAAGCTGACTGACCAGCAAATATTGGGGATCTGTCTGTCC
+CTGTTCACCATAGTGAGGTTACAGACGTGAATAACCACACCCAGTTTTAA
+CGCTGAATGCTGAAGAGTTAAACTCAGGATTTAGACTTGCCTCACTTTCT
+TATGTTCTTCAAAGCATAGACATTTTAAGTTTTGATGAAGTTTAATTTTG
+TCTTTAGCTACGGTATGTTAGATACTTAATAAATCACTCTTTTATCTGAA
+ATCACAAAGATTTATTTACTCCCTCTTTTTCTAAGAGTTGTTTGTAAGCC
+TGACTCATTTTGAATTTGTGGTTAAGGTAGATGTCTGACTCTTTTCTTTT
+GCACGTGTAATTTAGCATTTGGCTAAAGAGAATGTTTTTTTCTCATTGAA
+CTGTATTGACATCTTTGTTGAAAATTATTACCTGTCCATATGTAAATGTT
+TCTACTTCCATTTTTGCCATTGATTGTGTGTCTGTTCTATGCCAGTACTA
+TACAGTCTTGATTACTGGTTTATATCATGATCTCAAATCATAAAGTATGC
+CCCCCAACATTGTCTTTTTCAATGTTGCTTTAGTTATTCTGGGTCCCTTG
+TGATTCTATCCATCAATATTGGTGAACTTCATGACCAGATAGTAAACGGT
+TAAGGCTCTGTGGCTATAATGCCATCACAGCCAGTCAGCTCTGCCACTGT
+GGCATGAAAGCAGCCATAGAAAATATGTAAGAGAATGAGTACATAGAATT
+GGAATTTCTTAAAATTTTCAAGTCATGAAACATTCTTTTATTATTTTTTT
+TTAAAGAAAATGTTTTATTTATTCTTTGACAGTTTCATGCATGTATACAA
+TGTATCTGAATCTCATGCAGGGCCCCTACTATCTCCCTCCCATCTACACC
+TCAGCATGTCCTTCTCCCACAGGGTCCCTACTCTCTCCCTCCCATCTATA
+CCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCTCCCATCTA
+CACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCTTCCCATC
+TACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCTCCCA
+TCTACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCTTCC
+CATCTACACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCCCT
+CCCATCTATACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCTCC
+TTCCCATCTATACCTCAGCATGTCCTTCTCCCACAGGGCCCCTACTATCT
+CCCTCCCATCTATACCTCAGCATGTCCTTCTCCCACCTCCATGCCTTTTT
+TTAATGACCCACTGAATCCAGTTAGTGTTGCTTGCTGGAATGGAATGTTG
+ACTGGTTTTTGTTTGTTTGTTTGTTTTTGTTTGTTTGTTTGTTTTCCCTG
+AGGCAGGCTTTCTCTGTGTAGCCCTGGCTGTCCTGGAACTACTAGCTCTG
+TAGATCAGGCTGGCCTCAAACACACAGAGATGAGTGCTTCTGCCTCCCAG
+TGTAGGGACTAAAGGTTTATCTCCCACCCAGTTCTTGTGTGGTAACCATA
+ACTGCAGTGAGTTTATTATTACAGCAGCCATGCCATGTCTGGAAGGCAGA
+ATTTCATTGAGCTCTATAAAACCTGGCTCATAAATTCTCTCTACCCCTTC
+CCTTCCCTGAACCTGGGTGTGGGGGTCAATTTAGATGTCACATTTAGGCT
+GACATTTATCAGTCACTTATTCTCAGTATACTATGAGTTTTTGAGTTGCT
+GCCCACTGCAGAAAGAGGTTTCTTTGGCCAAGGCCTGACAGTAGCACTAG
+TCTGTAGGTATAAATGTAAATATTTACTCTTGTTTGTTTATTTGGTTGGT
+TTGTTTTTTGAGACTGGGTTTTTCTGTAGCCTGGCCATCCTGGAACTCAC
+TTTGTAGACCAGGCTGGTCTTAAACTCAGGTTCACTTGCCTCTGCCTCCC
+AAATGCTGGAATTAAAAGCATGCACCACCACATCGGGCTCCCAAACATAA
+ATATGTAGAAGGTAGTCTAACAACATGTCTACTTAGCAAAACAGCAGTAG
+TAGGGTCCTGTTTTAGGACCTGTAACCTCCTTCCCCTGAGTCATGGGCTT
+TTGACTAGGTGTGTACCTTTGGCGCACACTTTAAGTCTAGTCAGAAAACT
+GTGGGTTATCTTTGTATTCTTTACATCACTGTTGCACTAGTGGTCACATT
+TCGCCTTTGTCTATATTATAGCATTCAGGGTCCAGTGCTTAGTAAGACCA
+TTGATGTCTTTTCTCCTCCAGTGGCCTGCAAAACACCTGGCATTATAAAA
+CCTAACCAGCCAAGAGGAAGTTTTCAGATCAGTTCTATCTTGATTTCTCT
+ATGTCTTCTATGCAACCAAAGTGTGTTGTATCTTCATCAATAGGGTTTTA
+CTATTATATAGTTACATTGGGCAACCAAGAGTGATAGAAATAACCTGTGT
+TGTTTGGATAGGAAGGTGTTTCTGGGACCTCCATGACTAATAACTTGTAA
+GAGGTATCTCATGCTTAGCACATGTTTTCTGAGGATACATTGTCATGTAC
+ATACCTATGTTGAAACTCCTTTAAAAAACACTTATACTTTTAAATTAGCT
+TTCAAAATAGTTTCTATAAGTTTTTTTAAAAAAGATTATATATATATATA
+CACACACATATATGTATACACACACACACACACACACACACACACACACA
+CACACACATATATTGTAGCTGTCTTCAGACACATCAGAAGAAGGCATCAG
+ATTCCATTACAGATGGTTGTGAGCCACCATGTGGTTGCTGGGAATTGAAC
+TCAAGACCTCTGGAAGAGCAATCAGTGCTCTTAACAATTGAGCCATCTCT
+CCAGCTCTATAAGTTTTTTTTCACACATCATTTACATTCTGTAAGTAATG
+AATAATCACTACACAAAACAACAATTGTTCTCCTGAACATTAATTCTGGA
+GAATCTAAAATTAACAGTCTCATAAAACCTGTATGCAAATGATTAACAGA
+TCTAGGGGTAATAGCCTCAAGCTGGAATCAGTTTAGAAGTCGTCAATAGG
+TAGTTAAGCTACCTAACAACAAGAAGGAATTCAGCTGCTGATACCTGCAG
+CAGCTCAGGTAAATAGTGGAGATTATAGGCCATTGAGCAAGCTGATTCCT
+AATGCCTACTCATTATATGATTTTACTTATGTATCTTTTTTTCCTGTAAT
+GTTAGATCTTTGGTTATTTTGTTTTCCCCTTGTGGCAAAATAACATCACA
+TAAAACATAACCATTTTGAGTATACAATCTATGATTGTAAAAACACAGTG
+TTCCATTGTGACCACCAGCCACCACTGTTACTCTTCCTTTTTAACATTGC
+TTTTAAAAGTATATTACAAAAAAAGTATATTACAATTTTCACTTGACATT
+GTAATTGTACATGTCTATGAATAGGGTCATTTTTTTAAAATTATACATTC
+TAGTATCTTCTGTTGCATTCAGTTAACTTAAGAGCAGGAAAGACTGGTAT
+AGAAGTCCTTTTCTCTCTCTCTTTTTTTTCTTTTTCGAGACAGGGTTTCT
+CTGTGTAGCCCTGGCTGTCCTGGAACTCACTTTGTAAACCAGGTTGGCCT
+CGAACTCAGAAATCCGCCTGCCTCTGCCTCCCGAGTGCTGGGATTAAAGG
+TGTGCGCCACCACACCCGGCGAAGTCCTTTTCTATGATAGAGAGTATATC
+GTGGGCAAATCCTAGGCCTTGGCTCTTTAGTCAACCAGCATTTGTATGAT
+TAAATAAAACATTGGTGTGTGTTTGTGTGTTTGCACACTGGGTACAGCCT
+TTCCTTTATTAGCCCTGGGTGTGATTTTCTTCTCTGCTGATAGATCCTTT
+CTAAGCTGATCGCTTCATACTTAGGGTGGGGATAGTTGTGAGGACTGAGG
+AGGTGATGTGCGGTCCTGTCCCTTTCTCATTTTGCTAGTGTGACTGATAT
+GTTAGTTCTTTGCATGTGTCTCCTACTCTGGAAGGAGCTGGATGGGAATT
+GTTTGTTTTTTAGTCACTAAATCTAGACTATCAGGTTCATGGCAAGTTCT
+CAGGAAGTACTTATTACATGTATAGAGTTATAATCTGAACTTGATTAGAC
+ATATGGCACTTTTCATACTCCTACTTTTGTTTTTCAAGTTATTTTTTTCT
+ACTTACCAGTTTCATGTTTTAAAAACTTGTTTCTTTTTTTAAATTTTTTT
+AATTAGGTATTTTCCTCATTTACATTTCCAATGCTAGCCCAAAAATCCCC
+CATACCCTCCACCCCACTCCCCTACCCACCCACTCCCACTTCTTGGCCCT
+GGCATTCCCCTGTACTGGGGCATATAAAGTTTGCAAGTCCAATGGGCCTC
+TCTTTCCAGTGATGGCTGACTAGGCCATCTTCTGATACATAAGCAACTAG
+AGACACGAGCTCCAGGGGGTACTGGTTAGTTCATATCGTTGTTCCACCTA
+TAGGGTTGCAGATCCCTTTAGCTCCTTGGATACTTTCTCTAGCCTCCTCC
+ATTGGGGGCCCTGTGATCCATCCAATAGCTGACTGTGAGCATTCACTTCT
+ATGTTTGCTAGGCCCCGGCATAGTCTCACAAGAGACAGCTATATCAGGGT
+CCTTTCAGCAAAATCTTGCTAGTGTATGCAATGGTGTCTGTATTTGGTGG
+CTGATTATGGGATGGAACCCCTAGATATGGTAGTCTCTAGATGGTCCATC
+CTTTTGTCTCAGCTCCAAACTTTGTCTCTGTAACTCCTTCCATGGGTGTT
+TTGTTCCCAATTCTAAGAAGGGGCAAAGTGTCCACAATTTGGTCTTTGTT
+CTTCTTGAGTTTCATGTGCTTTGTATCTTGTATCTTGGGTATTCTAAGTT
+TCTGGGCTAATATCCACTTATCAGTGAGTACTTGTTTCTTTTAATTAAAA
+AACAAAACAAAACAAACAAAAAACTGTGTGTAGGCCGGGCGTGGTGCCCC
+TCATACTTAATCCCAGCACTCAGGATCTCTGTGAGTTTGAGGCCAGTCTG
+GTCTACATGGTGAGTTATAGGACATCTAGGGCAACATAGTCAGACCCTGT
+AGTCAAAAAGAACCAAAACTGAACCAATACAAAACTTTGTGAGCTAGTAA
+AATAGTGAAGTGCTTGCTAACATTCTGAGTTTGATCTCTGGGACCCATGT
+GGTAGAAAGAGAGGACCAGTTTCCACAAGTTGTCTTTTGATCTCCATGTG
+AGTGCCAAAGCACACATACATGTATTAAAATGTGCATGTGTAATTGTAAA
+GTTTCGTTACCACTGACAGTCAGAAACGAGCTCTGGGGCCTCAGGGTTCT
+CTTCCTTCAGTGCTGTGGGAAGTGCACCTTTAAACAATCTTATTTGGGTT
+TCTTTTGGAGACAGGAGTTATGACAGAGATTAAACTTGGCTTTGAGCAAA
+TTTCATACACATTTAACCACTGAATTTAACCACAGTGCCTATTCAGTTTT
+GAATATCACATGACTAGAATCGTAGAGCTCCTCCTATTCATAACTCACAC
+TGTGTCAAAGCTCCTTTTCTTCAGTGTTGTTGGTCACCCATTTAGTTTGT
+CTGTTTTGTATGAATATGCCCTGTTTTTCTCATCTTATTAGTGCATGCTT
+GGGTTATTAAACTTTGACAAATGCTGCTGTGAGGATATCTCATATGCTGG
+CCTGTCTTTTCCCTCACAAAATGGTGCTTTTTAATGAGCAGTTCCCATTT
+TGGTGACTTCTAATTTGTCATACTTTTCCATTATGGTAAGTGCTTTTATT
+TCTTTTTTGATGAATTTTTCATCAACTTGAGATCATGAGACGTTTCCTGA
+TACTGTTGCATAAATGGCATATTGATTTGCCCAATGAAGTTGACTTGTTT
+TTGTGTGTGGTGTAAAATAGACTTCTTGTTTGTTCTTCTAATGTGGGTAC
+TTCTTTTTTCACCAGTTTCCTCCATGTTCTGTGTGTATTTCCTCCCTTTT
+ATTGTTATTAAGTTTATATTATTAAAAGAATTCACATTAAGTAGGTTTTT
+TTTTAAAAAAAAAACTTTTTATTAATTCTTTGTGAGTTTTATATCATGTA
+CCCCACTCATCTCCCTGACCCGTTTCCCCCCTCTGCCCTTGCATCCCCCC
+CTTCAAAAGAAAAGAAAAACCACACAAACAGAAAAACCAATAATGTATAG
+AAAACATCTCATAGTAGAAACTGTATCATGTCACAGTGTGTGCCACTGTA
+TACCCCTCTGTCTGCACATCTTGACATGCAGATGATCATTGCAATGAGTC
+ACTGATCTGATTCAAGGTCTCTGACTTATGTCACACCATTAATATTGGAT
+CTTCTCCAGGACTCCTCTTGGTTTATTCAGTTGTTACTCTGTGTCATCAA
+GTTCCTGCAGCTTTGGATCAGCAGGACCGGCTCTTTTATGTACTCCAACG
+GTTCACAGATGATGTAGATGTTGGGGTGCGCCAACTCAAAGCCCTGGATC
+TGGGCCTGGGTGGTAGTTGTGCTGGTCAGCCTGCTGGCTCTCCTGCATCT
+GCATCACCAGGGCTGTTCTCCAGCACTGCTAGGCCACTCGATGCTATCAT
+TTGTAAGAAGCAGGGTCATGAGGAGGGAGGACACCTCCCTGCCCCAAAAC
+ACACACACACCACCCAATGGCAGATGAGTGACCAGTCCAGCTCTCCCTCT
+ATCTCACCCTTGAGGCTAGGTCACCTGTGCACCTGCCACCAGGGCCAGCT
+CTACTCTGCTGCCCAGTTAAGATTCAGGACCTACTCTCCTGAGTACTGCT
+GCTGGTGAGAGGTGTGCCAGCTCTCTAGAGTGCCAAAGCCAGTTCTGTAC
+AGATACATGGCTGCACAGACCAGGGACATCCCCATGGTTTCTAGTGATAA
+TGTGAGTCACGACATCAACATCAATCCCTGCCACTGCATGGCCACAGATC
+CAGACATGGTCCTCAGTAGCAGCAGGAGTTGGTACTTCACCATGGCTTCA
+AGTGGCAGGGCTAGCTACTCACAATAGGCTCTTCCTCTTCCCCCTCATGT
+CTCAGTTCCTTCTCTCTTCATACTGCGCAGGCTGTTCTGCTTCTCTTTCT
+CTTCCTTCTGTCCACCACATACTTGCACATTGCAGAGACTCCTGCTGCAG
+GCAAGCCATGATGCTGGTATGCCTCTGGGTGATCTCCTCTGCTTGTGCTG
+TTTGGCATGGTGGCATGCAGACCTCTAGGTGTCTACAGCTACCCATGTGA
+CATGGCAGCAGTGTCCTCCCCCACCCCTCTCTGCAGTGTGGCAGGCAGGT
+CTTCTGGACTTTTTTCCCTGCCAGTGCCCTGTGTCATGGCAGTGGGATGG
+CAGTGGGTGGGTCTCTCTCTCTCTCTTTTTTTTAAACACAGGGTTTCTCT
+TTGTAGCCCTGGCTGTCCTGGAACTCCCTCTGTAGAGCAGGCTGGCCTAA
+AACTCACAGAGATCTGCCTGCCTCTGCCTCCCAAGTGCTGGGATTAAAGG
+TATGTGCCACCACCACAGGCATGGCAGGTTCTTTTGGCATAATTATTTCT
+CACTTTGTTCTGGGGTTGTTTAGGACAGAGTTTTGTGTTGTAGCTCATGC
+TGGCCTAGAGCATGCTGTGGTTCTCCTATCTCTACTCCCTGCATTCTAGG
+AGTGAAGATGTTCACCATGTTTGGTTCTAGTTATCTTAAAATAAGGACCA
+TCTCTTGCTAATAATACTTTTCATTGTACAGTACATTATGCCCCGGTCTG
+TCTTAGTATTTGAAGAACCTTTGTCCTTCATACTGATTCTTATGTCTTTC
+CTAGATGGCATCTTCTGTCTCCCACATAGACAATATTGTCACATCGTTGT
+CCTTAGACAATATTGTCATATTGTTGTCCTTTGCTATTGGAAGATTGAAG
+GGTTTTGTTGCTTAAAAGACCATTTTGATAGTCTAAGTGTTGCTGCATGA
+TTTTGTGTGTGTGAATATGTGTGTTTAAGTGTAGTTTTTCAGTATTATCA
+CTTCTTGAGAAAGTATATCCTATAGTTCCAAATCAGATGTTGATTTAACC
+TTTTAAAAAAATCTTTCTGGTGCTGGAGAGATGGTTCAGTGTTTAAGAAC
+ACTAACTTGCTCTTCTAGAGGTCCTGAGTGTGATTCCCAGGAACCACGTG
+GTGGCTCACAACCATCTGTAATGGGATCTGATGCCCTCTTCTGGTGTGCC
+TGAAGACAGCTACAGTGTACTCATATACATTTAAAAAAAAAAATCTTTCT
+ACTCAGGTGGTGGTGGCAGCACATACCTTTAACCCCAGCACTTGGGGAGC
+AGAGGCAGCTAGATCTCTGTGAGTTTGAGGCTAGCCTGGTCTACAGCGTT
+TCAGGACAGCCAGGACTACCTGTCTGTCTTGAAACAAGACAAAACAAAAA
+CGCCAAACCTTTCTGAAAGTGGTCTAAGTGTTCAGCAACACTGTCATGTA
+AGGATAGGACTTGACAAAAATCAGAGAGCAACTGTTGAAGAATCAGAAGC
+TATGCATTCATGCTCCAGGCTGCTGTGCTTCTTATAGGCAGGACAGCTTC
+CAGACTTCAGTCTTGCCCCCAGTATGGAGTGTACGTTACATGTGTTCGTG
+GAGGGGGAGGAGGAAGAGGAGGGGAGAGGAGGAAGAGGAAGAAGAGGAAT
+GTTTTAAAATTCCTTGAGCGGTTCAGTCTACCCTCTTCTCTTAATGTGGA
+ATCACATTGTTAAGATTTTTATTTTTAATTAGGTGTCTATATATGTATCT
+GTATGGAAGTCTATATGCACATGAGTGCATGTACCCTGAAGAGGGTATTG
+GATCCTATGTTTCTGGAGTTAAAGGTGGTTGTGAGCCACCTGATATAGGT
+CCTGGGTAACTGAACTTGGAGTTTTTTTGGATTAGCAATAAGCACTCAGA
+ACCATTGAGCCATCTCTCCAAGCCCTGTAATCACATATTTAAAAGAACAA
+TGAGTGTAATTCTAAAGTTAAGAATTTAGATATGGGGGCTGGAGAGATGG
+TTCAGTGATTAAGAACACTGACTGTTCTTCCAAAGGTCCTGAGTTCAATT
+CTCAGCAACCACATGGTGGCTCACAACCATCTGTAATGGGATCTGACACC
+CTCTTCTGGTGTGTCTGAAGACACCTACAGTGTACTCATATAAATAAAAT
+AAATAAATCTTTTAAAAAATGAATTTAATTATGAAGGCCAAATTTATATT
+TTTAGAAGTAGTTCTTAATTTGTTACAGTGTGCTCTAGAGCCTGGATTTT
+ACATGCCCCCACAATTGTGAGCCTGTGTAGATTGCTTTGTTGCACTTAAA
+ATAGTTTGGCTGAAGCTTGTTTTCATTTTGAAGATGTAGTTTCAAGTGGT
+TGAAGAGCCAAGGTTGTTTTTACCCTATTGATACAGTTCCTACCCTGAGC
+TATTTTATTTTTCATAAAAAACAAATCAGTCTGACTTATCTCTAAAAATC
+CCATCTAATTTCATAAGGAATGAAATAGCTACAGATGTTTATTTATTTAT
+TTTATATCAGATTTTCTTATTGGACAGAATGAGGTAGAAAAAAATGTTAT
+TTCAGGCTGGGACCTAGGCTAGGTGTGGTGGTGAGGGCTGCAGTTCTAGC
+ACTAAGAGCCAGAGATGAGGCAGGAGGATCTTGACTTCTTGAGAGGGCTA
+GACTGGGTTCCAGGCCAGCCAGGAGTGAGAGAAGAAGAGGGGTGGGTGCT
+TTACAGAACTCAGCTGGAAGATGTATGCCAAACACCTGCAGCTTTATCAT
+TTCTATTCTGTCTCTCCTCTTTTTAAGCTAAAGTTTAAAGGGCTAGAGTC
+CCCTGCAGGTTGGAGAATCTAAGGAATGAAGTTGAAAGGTAGCCTGAGGT
+CAAATTGATTTGTTGTTTTGAGACAAAGTCTTGCACTGTATATTCAAGGC
+TCTTGTCAAATTCCTGATTCTCCTGGTTCCGTCTCTTGAATGCTGCCAAG
+TTATAATATTGGGATCGTGTTCTAATTGGCTGAGAAGTCTGTATTAGAAG
+TTCTAGCTTCTGACCTGCAGAGTATAGCAGAAGGATTTTCATTTTCTGAT
+ATTTTTGGTTAGTGTCATCTCTGTTCTGAGAGTGCATTCTGACTCTCATA
+CTTTAAATAAGAGTACTTGGTATGCTAAGAGGAAATGCTTGTTATAAGAC
+TGTAAAACTATCTTTTATTCTCCTGGAGTAATTGTCTCCAAGGCTTACTG
+CCTCTGTCCATTAACCTAGACTTAGTACCCAAAGGTGCTAGCCTCCATAC
+AATCTAATTTATGCCGAGACTATTTTCAACTTCTGAAACTTATTGCTCCA
+TAAGCTCACCCTTTCTTGTTCTTTCTGATCTCTGGCTGCTGATTCAATTC
+AGTTAGCTGTTCTGGCTCAGACTCCTCTCCAAGCTGACTGATTGAATCTG
+GTTTCTCTCTCTTGGCTTCTCCTGCATTGTTCTGCTTGGCCTTACACTAA
+CTTTGACAATCTGTTCTAATTTTCTGGCTCCTTCTTATTCTCTGGCTTGT
+TCTAGCTTCACCTGTGTCTAGTTTGTCCTCTCTCTATAACCTGTCTCTCT
+ATCACGGTCCAGGGAAAACTGCCTCCTTCCTCTCTCTGCCCTCCTCTGCA
+AGTAGCTTTTTTTCCCCCTTTTTCTTCTGGTGAGAGTTGGGCAGATCCTA
+TTCTAGCAAATCTTTCTCTAATTCATCACTTTGTCTGCTATTCAATTAGA
+CTTCTATAAACTACTTTTACCCTCATTGATTGAGATTAAAGGGTGTGTTT
+GTATTCCAGCCAGAAGTGGCTTAGGTGTATGCTAAGGGCTTAGCCACACC
+ACAACGAGAAATAAGTTTTGTTGTTGTTGTTGGTTTTGTTTTTTGTTTTT
+TGTTTTTTTGTCAGTAAATAACACAATCTTAGAGTTCATTGTGTGATCAA
+ATATCCTGCAACATAAGGTCTGGATGTTCTGGCCTGAATTTTAAATCTGG
+CACCATGAGAGATAGATTCTGATAGAAGAGTTGTGCTGCTCTTAGAATGT
+ACAGGGCCAGAGAACAGATGCATGATGGATATAAGAAAAGAGGAACAATA
+TCATTATTGTAAGAGCAAGTAGATGGCTTGCTTTTCACACAAAGCAGGCA
+CTTAATAACTATTGTTTGAATTTTAAGTCAAACTAGCAACTATTGGGAAC
+TAGCAAAATTTTATGATATTAGGAAGGGTCAAATTTTTCCTGAAAAGGGT
+TTAGTTTGTTGTAAATAGTTTGGGATGAGGTAAAAGAGAAAACTTGAGAT
+TTGTCTTTTCTTTGGTTGTCTGTGATGGTTTATTGTCCCGTTTTTGACAG
+TGACCTCTTAGTGATGTGAATCTGTGAACAAGTGATCTTTGCACGTGTAT
+GTTTGTATGTGTGTGTGCTCATGTGAGTGTACCTGCTGTGGGCCTGTGGA
+AGTTAGAGGACAACTTTGGAGAGTTGCATTGTTTGTATTTGTCAGGGTTC
+TCTAAAGGAGCTGAACTGAAAAGATGTATATGTGTGTGTGTATGTCTTAG
+TTATTGTTCTTTAGTGATGAAACACTATAACCAAGGCAACTTAAACAGAA
+GCATTTAATTGGGGCTTGCTTACAGTTTCAGAGGCTTAGTTCTTATCATC
+ATGGCAGGATTGTAGAGTCAGGCAGCTATGGTGCTGAGAAGTAGCTGAGA
+ACTCACATCTGACTAGCAGTTTGCAGGCAGGGAGAGAAAGAGAGAGAGAG
+AGGCAGAGAGAGAGAGAGACAGAGAGAGAGAGAGAGAGTCAGAGACAGAC
+AGAGACAGAGACAGACAGAGACACAGAGAGAGAGACAAACATAGAAAGAG
+ATACAGACAGACAGGTCCATTAACTAGAACCAAACATTTAAGCATGAGTC
+CATGGGGCCATTCTCGTTCAAACTACTATATATGGTATATAAAATGTGTA
+TATATGTATATATACATATATATATATATATACACACACACACACATGTA
+CATTTTTAAAAGGGAATTTTTTATGTTGCTTTATAAGTTGTGGTCTGGGT
+AGTCCAACAGTGACTTTCCTCTGACAGAAAGGCCAAGAATCCAACAGTTG
+TTCAAATTGAATGTCTCGGCAGTCCCAGTCTGTTGCTGGAGTCCTGGAAG
+ATTCCTAGAGAGGTGTGGGGTCTTTAGTCTGTGTTGGAGTTCTGAAAAAG
+TAGGTTCCAATACCAGTGGAGGAATCCCTCAGCAACAGGATAAATTAGTT
+CTCAACATTTGGGGGGTCCAACGGCCCTTTCACACGGGTCACCTAAGACC
+ATTGAAAAACACAGATATTTATGTGGTGATTCATAACAGTAGCAAAAATT
+ACAGTTATGAAATACTGTACGATGAGAATAATGTTATGGTTGGGGTTCAT
+TATGACTTGAGGAAGCTTATTAAAGGATCTCAGCATTAGGATGGTTCGTA
+ACCACTGTGATAGATGGATCAGCTGTGGAGAGTGAGGGCACGAGGGCAAG
+CAGCAAAGTCTTCCTTCCATGTCCATTTATGTGAGCTGCCACCAGAAGGT
+GACCTAGATTTAGGGTGGGTCTTCCCACCTTCAGTAATCCAATCAAGAAA
+GTCCCTCACAGATATGCCCAGTGCTTGGGTTTTAGTTGATTCCAGATGAT
+GTCAGTTAAGATTAGCTGTCTCCTTGGTCCTTTCCTTTCTGCTGTTTTCA
+ACCCCTCCCTCCTGGCTTTGTCCCTCTCCACTCCCCCATCTAGTCAAGCA
+ATTTTCTTACCAGACTAAAGAATTTGAGTTCAGGACATTTTAGATGAAGC
+TTTATTATTTCTACCTTTCTTTGATCTCTTGATTTTAGATACATAAGTGT
+AAAGTTAAAGATCATTGTCCTTTATCTTCAGAGTTGCCGAAGATTCACCC
+AAGAAACTAGTACTGATAATTTCTGTAATGTATTCTCCCATATTGGGAAT
+TTACTTCATTTGATGAGCTCTTGTGGAACTCTGTATAGTATGGGTACTGA
+GTGCTAGTAATATAGATAAGATATGGTCCCTTCACCTTCACTTCTGGGGA
+TACAATCTAAGGCACACTGTCTGAAGAAGTGCCAGCAGGAGCCAGAGAAG
+GGCCCTTGGTAAATAAGAGCTTCCCCCAAGGTGCCCAGTGCCTCAAAAAG
+AATTATGAATTTTCATTTCATTGTTAAGATTGGTATTGTCTATAAGTCAT
+AGTTTGATAAGCTTGACTAAGTCGAAGGTGGCAAGCCATAAGAGGTGGTA
+TCATATTCTTTGTTGCCACCTTTATCCTATGGGGTTATCTCATGAAGGAG
+GGAGAGCAAGAATGAATACCAGAGAAAACTTTTTCCAACTCATTTGTATT
+ACTTCAAAGATGTAAGATAAATGCTCCATAGGCCTACCTAGTTTATCAGA
+CATGTAGTTCGTGTGAAACTGTTGGGTTTTTTTTTTTTTAATCTTTTTTT
+AACTTGTGGATGTGTGTGGTGTTTGTATGGAGGCAGACATGTGTGTATGG
+TGTGTTTTACAGTATGGAGGCCTCTAAGATTGATGTCGGGAATCTCCCTT
+ATCTTTTTGTACTTTGTTCTTCGAGGCAGGATCTCTTAGTCAAACTCAGG
+CTTTGATAGCCAGCTAGCTCAAGGACTCAAGTCCCATCTCTCCTTTCAAT
+GCTGGATTCACAAGTGGCTGCTGCATTGTCACTTTGCATTTAAATGGGTT
+TTGGGGATCTGGATTTGGCCCCTTTGTTTGTACAGCAAATGTTAACACTA
+AGTCATCTTTTCAGGTCTGATTTTTTTTCCCCTTAAATTCTTTATTAATA
+GCCACTTCCTCCATGATTCTACTAGGTGATACAACTCCATCAGTAATTTT
+TAACTACAGAAAAGTTGGCATGTTGACATACTGCTTCTGTGGACACATGC
+CTCATAAACTTGTCAAAGCCCGAGGGGACGTTGGGAATCTCTTTATGACA
+ATTCCCGAGGGCAGTAGTCTTTCCTCTTGAAAGTGGATGAGGCCTAGACC
+TGTTTATTTGGGCAGCCATATACCACTGATCATGGTGGACCTGTCAAAAG
+TACCTATTTACCATGCTGTCTGAGTTTAGCTGAGTCTTGTCAGTTACAAT
+TGGGAAAGTTGGCGAGAGCAAAGGGACTTGGTTAGTTTGGCTTTGGATCC
+GAGGTGACTGAAAATCTCAGATAATGAATGTTTCCATAATAAATGTAATT
+AGGTCACTGAGTTCAGTGTTGTCAGCCTTCTTTTTTTGTTTACACTTTTT
+GCTTTATTATTTAATGAATGTATTAGTGCTGTGCTGCATGTACACCAGCA
+TGCCAGAAAAGGGCAACAGATGCCCTTTTTGATGGTCAGAGCCACCATGT
+GGTTGCTGGCAATTGAAGGGAATTGAACTCAGAACCTCTGGAAGAGCAGC
+TGGTGTGCTTAATTGCTGAGCCATCTCATCACCATCAGGTAGCCTTTTTG
+TCACTACTTGGTACTGGTGTAGCATGACCCCTTTAAGACGTGTAATTTCA
+GTTCATTTAAAACCTGGGACAGATTTTTCCTGTGCAAATCAAAGGATAAT
+GGGTTGGTGCTTTAGTTTGCTTCCTAAAATGTTTAATGGGGTTAGTGTTC
+TGGAAGCATCCTAATGGCTATTTTAAGGTAGGTAAAAACCTAAGCTGTTG
+CAACAGAGGCTTATTACAAGTGATTTAGAGAAGAAAGCATTTTATTCTCA
+TAGTATTTTCTTTACAGTGTGAGGAAATGTTCTTTTTGGTATACAATTCT
+GAGATTTAACTAATGCAATCCCTGTGACTACCATAATCAGGATTCATAAA
+TTTACTATTATTATATGGTGTGCATGTACATGTCAAGTGTTTGAGTGTGT
+GTTTGCATGTCCCATGTGCAAAGGAATGCGGGTGGAGGTCAGAGGACAAC
+ATTAGAGTTAAGTTTTTCTATTGTGAGCTCCAGGGATTGTACTGGCTAGT
+TTTGTGTCAACTTGACACAGCTGGAGTTATCACAGAGAAAGGAGCTTCAA
+TTGAGGAAATGCCTCCACGAGATCCAACTGTAAGGCATTTTCTCAATTAG
+TGATCAAGGGGGAAAGGCCCCTTGTGGGTGGGACCATCTCTGGGCTGGTA
+GTCTTGGTTCTATAAGAGAGCAGGCTGAGCAAGCCAGGTGAGGCAAGCCA
+GTAAAGAACATCCCTCCATGGCCTCTGCATCAGCTCCTGCTTCCTGACCT
+GCTTGAGTTCCAGTCCTGACTTCCTTTGGTGATGAACAGCAGTATGGAAG
+TGTAAGCCAAATAAACCCTTTCCTCCCCAACTTGCTTCTTGGTCGTGATG
+TTTGTGCAGGAATAGAAACCCTCACTAAGGGATCACATGCAGATCCTCAG
+GCTTGTGTGGCGAGTCATTTTACCATCTGAGCTGTCTTGCTTCCATAATC
+AGGAGTTAGGTTGGTTCTGTCGCTACCCGAAGCTCCCTATGCTGCTTTGC
+TGTCATCTCCTCACCCTGACATTACTGGTCATATTTTAATTCTTACAGTT
+TTGATTTGTTCTAAAAATAGAATGGCCCTGTGTTGGTTGTCGGAGTCTTA
+CTAAGTTCTTCCTCAAGTGTAGTGCATTGTGATCTGTCCTTGTTTCCTGT
+TAGTAGTTACTTCCTTTTCTTAGATACGTTTCCATTGTTTGGCTATTGCT
+TTTGTTTATCTTTCAGCAGGTGGATGAGGAGCGCTTGCTAGATTTCCATT
+CTAGCATTGCTGCAGTGAGCTTCTTATGAGGATGGGAGGTGTGGGAGCCT
+GTGCTTCTGAAGATGATAAAGGGACCCACGATGCTGCCGTGCTGCTCTGC
+ACTCTTCTTATTGTCTTTATTTATATAATTGACACTCGAGTGTTGGGCAT
+GTTTGTTCTGATTGGAGGAAGATAGCTTGGACATTCAGATACAATAGGAA
+TTCTGTATATCACTTGCATTCCCAATACATTTATGGGAGGAAGTTATGTG
+CTTGTGTTAGGCAAATTTTGGTGGGTGACCAGCAGTTTACGTGACCGGTC
+AGAAAAAGTCTCTTTCTGGGAAGACATAAGCTACTTTTTTTTCACATGTC
+TGACTTTTCTGAGTGTCGTGTGAGGAAGCTCTTAGTAGACCTCATCTGTC
+GTCATCCCTTCCTCATGCTGCCCTCTTCCCAGCATGGAGTTTATGATTCA
+CTAGTAGTAGCCAAAACGTACTTAGGAATGAATGAAATATAGAAACAAAC
+GAAGTAGTCACCTCAGTGGATGCTCATTTCTTTTCCTCTTGTTTTTTTGT
+AGAAAGAAGGAACTCTCAGCCACCAAGAAAGACCGTGTGAATCATTGTCT
+AACAATATGTGAAAACATTGTGGCACAGTCTCTCAGGTAATTGGCTTTTT
+AAAAAAAAGATTTATATATTTATGTATATGAGTATTCTGCTTGCATGCAT
+GCCTCCATGACAGAAGAGGGCATCAGATCCCTTTATAGATGGGTTGCTGG
+GTAGCCAGTGCTGAGCCATCTCCCCAGCCTCTTCTTTTCTTTTTGTTTTT
+GGTTATTTTTGTTGTTGTTTTTCTTTTTTTGTTTAAAAGATCTCTCTGGT
+AATTACTGAGTTGGGTGGTGGTGGATATACCTGTAACCTGGCATTCAGGA
+GGCAGAGGCAGGCAGATCTTGGTGAGTTCAGGGATAGCCTGGACTACAGA
+GCCAGTACCAGGACCTACACAAAGAAACCTTTAACTCATAAAAAACAGAA
+AACAAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGAAGA
+AGAAGAAGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGA
+AGAAGGAAGGAAGAAGGAAGGAAGAAGGAAGGAAGAAGAAGGAAGAAGAA
+GGAAGAAGGAAGAAGGAAGAAGGAAGAAGGAAGAAGAAGAAGAAGAAGAA
+CAACAACAACAAACTGGTTGCTGGGCTGTGGTAGTGCATTTCTTTAATCC
+CAGCACAACAACAACAACAACAACAAACTGGTTGCTGGGCTGTGGTAGTG
+CATTTCTTTAATCCCAGCACTTGGGAGGCAGAAACAGGTGACTCTCAAGG
+CCAACCTGGTCTACAGAGTGAGTTCCAGGATGGCTAGAGCTTACACAGAG
+AAACTCTGTCTTGAAACTCCACCCCACCCCAAACTATATCTAAATACTCT
+GTGTTTTCACTATTAATGCATTACCATGTTCTTTGTACCCCTAGCTATCT
+CTTAAAAGTTCATTTAGGCTAAGCGTATTTTGGTACATGCTTGCAATCCC
+AGTATTTGGCAGGCTGAGACAGGAGGATCTTGAATTTGGTATTAGCCTGG
+GCAACATAGCAAAACCCTGCATCAAGAAAAATCCATTTAAAATCAGGACG
+TTTTACCACATTTGTAGTTGTGCTATAAGGGTATCTGGGTTCTCTTATAG
+GAAATGTTTTCTTCTTGTCATCTTATATATGAGAATTTTAGTCATATGAT
+AATTGAATGGCATGTTAGTAATTTAATTTGTATTCTTTTAAGGTTTATTT
+ATTTTTAAATAAATGAATGAGTGTTTTGTCCCTATTTTATATTTGTTCCC
+TGTATGTGCCTGGCACCCACAGAGACCATAAGAAGGTACTGGAGTTCCTG
+GAACTGGAATTAAAAGATGGTCATAAGTTGCTGTGTGGATGCTGGGAAAC
+AAACTTAGGTCCTCTCTGCAAGAATAGCAAGTGCTCTTATCTACTGAGCC
+TCCTACTTTCTGTTTGTTTGTCTGAGACAAGGTCTCATTTAGCCCAGGAT
+GGCTTCAAACTCACTGTATAGCAAAAGATGACTTTGAGTTCCTGCTCCTC
+TTCTTCTGCCTCCTGAATGCTTAGACTATAGACCTGATCTCTTAGAGTTT
+CTATTGCTGTGATGAAATACCATGATCAAAAGCATCTTTCACTTATACAT
+GTGTGTAACAGTCTATCACTGTAGAAATCAGGGCAGGATCTCATACAGAC
+TGTGGAGGGGTACTGCTTGCTGGCTTATTCTTCATGGCTTGATTGGCCTG
+CTTTATAGAAGCCAGGAGCATGAGCCCAGGGGTGTTCCCATCCACAATGA
+GCCTGGCCCTTCCCCACCAATCATTATTTAAGAAAATGCACTATAGACTC
+TTCTGCCTAAAGTCCCATTTTATGGAGGCATTTTCTCAATTGAAGTTTCT
+TCAAAGACGAGTTTAGCTTATGTCACCTTGGTCAGAAAACTAGCTAGGAC
+ACTTGGCTTTAGAATATAGCTTCAATGTCAAAATTCCCCATAATCAAAAC
+TGAAAGTAAATTCAACTTGGGGATTGTATTGGCAATTTATATAAATTAAA
+GGCTTAATAGTCTGTAGGGTGAACCATTACAGAGCAGAGTATTATCCTAA
+TAAGAAAATACATAACACACACACACACACAATACACATATTTCAAGAAA
+GAAATTTGACTTGGCTACTACTCAGGCAGAGGCAGGAGGTTCATGACTTA
+GAGACTTTTTATAGAGCCCTCTTTCAGGCTTTATAAAAGCATGACTTCTT
+CTCAAACAAAAAGGAAAGAAGGAAGGAATCAGGGTGGGCTCAATGTTTAT
+TTCGTGTTTTCTATATTAGAAGTTCATCTTTATTAGTTATTATGGACTAA
+ATGTGTATCTCCCCAACCCGTCTGTATGTTGAAGATCTAATCCCCCATTC
+TGAAACTGAAACCAACTATGCATTTTAGTAGGTTAATTCAGTGTTTTCTT
+GAAGTTAATCTCACAACCCACATAGTTATACAGCTTTGTCTCATGGGCTG
+TTCTTCATGTAGTAGTCACAGGCTTGTGGGAGGCCACTGAGTGACTAATC
+TGAGATTTGAATGACCTTTCTCCTTAGGTTGAGGAGTTTGTGGTGTGCTA
+GCGTGGCTTGCAGAACTCAAGAAAATACTAACATTTTCTGGCTTGCTACA
+AAGCTATAGCTCAGTAGCAGCCAGATTGTAGAGACTCAGAGGGTAAGGTA
+TGGGAGGGAAGCATGGCCCCTCTGTGCTCTCTTCTTCCAGTCTTCCATGT
+GTCTCAGCATGGACTTCTGTTACCAGAACCAGGAGCTTGATATGTGTTGT
+ACTTTAAGCATCACCATTCACATTGGCAACAAAAATCATCAGAATTTTTT
+TCAGAGGGAAGACACTTGGGGCCATAATAGAGACAGATTAAAGTGGTGTC
+TTAAAAATAAAATCTGACCACAATAAAAATCAATCAGAAATGCAACAAAG
+TATAGGAAGTTACAATTTGTAACAAGAACAGTTAGTCAATGTTGGCCCAG
+AAATTAAACTAGGTTAGCATACACAAGCACTAAAACAGTATAAGAAACAT
+TTAAACAGTTTAACTTTTTTTTTTTTTAATGTCTAAGAAGCAAAAGGTTG
+AGGGTATGGGAAGAAGAAACAAGACAGGAAAAAGGCTGAGCCTGAACTAA
+AATAAGGAGACAGCCTACTTCTTATAGGAAATGCCTGCCAAAATCCAGGG
+AAGTGCCATCTGTAAGTCATTCTATCCTCAGTAAAAATCTCTTGTAAAAT
+GAAAAGGTGAAATAAAGAGGTTCTCAGACATACCTCTGAGGAGTACCCTC
+TGTCTGACAAGCAGTGAGTTTAAGACATTTTAAAGGAAGTGTTTCAAGCA
+GATAGAAAATTATACCAGATAGAATCTGGATCTGTCCAAAGGATTGCTAG
+CAGATGGATAAACATAGGAGATGTCTGTCTGTCTGTCTCTGTCCCTCTCT
+TGATCCCTCTCCCCCTCCCCTCTCTCAGACAAGATTATGTACTGATGTAG
+AACTAGTGATCCTCTTGCCTGTGCTGCTTGGGTGCTGGGATTACAGAAAT
+GTGTCTCATGCATGCAGCAGCAACCTGTTGCATGACAGACAACATAGGAA
+CAAATGTTTGCTCAACTCATAGGGAAGCATTAACAAACTAAAGTATAGAT
+GCCTCTAAAATCCAATTTGGTGAACCAGTGGATGCATAAGGGTTACTTAC
+AGTGGTATGGGTGAGTGGTTCCTTATAGGATCATGGATGACTCAAAAGGC
+ATCACCAAAATCCCACCCTGGCATGGGACACAGCTCACTAAAGCTAGAAC
+CCTGGAACTCTCTGCGCAACTTAGACTTCAGCAGTTCAGGAATCCCCCTC
+CCCTCAGCAGTCCTTACTACTTATATAACCCAGGAGTCTTAGTCAGGGTT
+TATATTGTTGTGATAAAACACTGACTAAAAGCAACTTGGGGAGGGAAGGG
+TTTATCTCAGCCTATCCGTCTACATCACAGTCCACTGAGGGAAAGGAACT
+GATACAGAGATAATAGAGGAGTGCTGGGGGGATGGATGGCTCAGCAGGTA
+AGAGCACTGACTGCTCTTCTGAAGGTCCTGAGTTCATAGCAACCACATGG
+TAGCTCACAACCATCTGTAATGAGATCTGATGCCCTCTTCTGTGGTGTCT
+GAAGACACCTACAGTGTACTTAAATAAATAAATTAAAAAAAAAAAAAAAG
+AGTAGCTCTGCTTACTGGCTTCCTCTCATGGTTTGCTCTCCTGCTTCTTT
+TTTATATGCCAATTGTGCAGTTCCTTTTTTTTTTATATTGGATATTTTCT
+TTATTTACATTTCAAATGTTATCCCCTTTCCCGGTCCCCCCCCCCAGAAC
+CCCCCTATCCCATCCTCCCTTCTCCTGCTTCTATGAGGGTGTTCCTCCAC
+CCACCCAGCCACCCACCAACTCCCACTTCCCTGCCCTTGATTCCCCTATA
+CTGGGGCATCTATCGAGCCTTCATAGGACCAAGGACCTCTCCTCCCATTA
+ATGCCTGACAAGGCCATCCTCTGCTACATAAGCAGCTGAAGCCATGTGTA
+CTCCTTTGTTAATGGCTTAGTCCCTGGGAGCTCTGGGGGTCTGGTTGGTT
+GATATTGTTGTTCTTCCCATGGGGTTGCAAACCCCTTCAACTCCTTCAGT
+CCTTTCTCCAACTCCTCTATTGGGGACCCCATGCTCAGTCCAATGGTTGG
+CTGCGAGTATCTGCCTCTGTATTTGTAATGCTCTGGCAGGGCCTCTCAGG
+AGACAGCCATATCAGGTTCCTTTCAACATGCACTTCTTGGCATCTACAAT
+AGTCTCTGGGTTTGATAACTGTATATGGGATGAATCCCCAAGTGGGACAG
+TCTCTGGATGGCCTTTCATTCAGTCTCTGCTCTATACTATCTCCATATTT
+GTTCCTGTGAGTATTTTGTTCTCCTAAGGAGGACTGAAGTACCCACACTT
+AGGTCTTTCTTCTTCTTGAGCTTCATGTGGTCTGTGAATTGTGGTCTGTA
+TCTTGGGTATTTGGAGCTTTTGGGCTAATATCCACTTATAGGTGAGTGTA
+TACCATTTGTGTTCTTTTATGATTGGGTTACCACACTCAGGATGATATTT
+TCTAGTTCCATTCATTTGCCTAAGAATTTCATAAATTCATCATTTTTAAT
+GACTGATAGTACTCCATTGTGTAAGTGTACCACATTTTCTGTATCCATTC
+CTCTGTTGAAGGACATCTAGTTTCTTTCCAGCTCCTGGCTATTATAAATA
+AGGCTGCTATGAACATAGTGGAACATATGTCCTTATTATATGTTGGAATG
+TCTTCTGGGTATATGCCCAGGAGTGGTATAGCTGGGTCCTCAGGTAGTAC
+TATGTCCAGTTTTCTGAGGAACCGCCAAACTGACTTCCAGAGTGGTTGTA
+CAAGTTTGCAATCCCACCAGCAATGGAGGAGTGTTTCCCTTTCTCCACAT
+CCTCACCAGCATCTGCTGTCACCTGAGTTTTTTATCTTAGCCATTCTGAC
+TGGTGTGAGGTGGAGTCTCAGGGTTGTTTTGATTTGCATTTCTCTGATGA
+CTAAGGGTATTGAACATTTCTTTAGGTGCTTCTCAGCCATTTGATATTCC
+TCGGTTGTGAATTCTTTGTTTAGCTCTGTACCCCATTTTTAATAGGGTTA
+TTTGGTTCTCTTGAGTTCTTTGTATATATTGGATACTAGCCCTCTATTGG
+ATGTAGGGTTGGTAAAGATCTTTTCCCAATCTATTGGTTGCTGTTTTTGT
+TGTTGTTGGGTTTTTTTTGTTTGTTTTTGTTTTTGTTTAAGGTCACATTT
+TTAAATTCTTGATCTTAGAGCATAAGCCATTGGTGTTCTGTTCAGGAAAT
+TTGGGGACAGAGAGTCATTGTGAATCTGATAAACTTCAGGGACTTCCTAA
+GATTTCTGGGCTGTTTCCTTCCTTGAGTCTCTTTGTTTCTTTGTTTGTTT
+TTTGAAATGGAGTTTCTTTAGTCCAGCCAGTCCTTAAATGTACTATTTAG
+ATCAGGTTGGCCTTGAACTCACAGAGATTTCCTACTTCTTTCTCCTGAGT
+ACTGGGATTAAAGGCATGTGCCACCTTGCTCAGCCACTTTCTGAGTCATA
+ACAAGCATTCCTTCAGAAGTCCCTGACTCTGAAAAAGCTTGCTATACAGC
+ACAATCTTTTCTTTTTCTTTTTTTCTTTATTTTATGTTGATTGGTTAGTT
+TTGTGTGTGTTATGTCAATGGGTATGCATTGACATAGTGCTTGTGTGTAA
+GTCAGAAGCCAACTTTCAGGTGATGGTTCTCTTTTTATGATGTGTGTTCT
+GAATATTGAAATCAGGTTATCAGGCAGGCCTGCTGTTATTTGCTGACTAC
+ATGTAGAAAGTATATTTTAGATATGTCTGTTGAGTAATTAGCCTTACTTC
+TTTCTTATTATATTGATGCTACTGGAAGGAATCCTGAGCCCATGCTTAGA
+GCTGTCATTAATTTCTGTTTTGGCACTGCTAAGACTGATGTCTGAGGGAA
+AGCCTCTAGTGTCAGACTTTCCGCCTTTCATCCTAGTTTATTACTGTATT
+CCTTTCCAGTAAGGTCTATGGCATTGCTGAGTGACATGTCTTTTTGCTCT
+GTATGTGTTGAGAATTATTGATCCCTTTTCTTTCTCTTAGGTTTATGGAT
+GTCTTTCAATGCCTTTTGAGCCAGAGTCTCATTGTGTAGATCTAGCTTGC
+CTAGAACTCATAAAGATCCCCCTGCCTCTGCCTCCCTGGTGCTGGGATTA
+AAATCACATGCTAGAATTTTTTAATTAGAGGTAAAAGAGAAGGTGACTGT
+ATAGATCATAGTCCTTTAGATTAAGATCCTAGTACAACAGTCTGCTTTTG
+ATGTTTTAGAAATGGCATGCCCCATTCTTAAGTCACCAGAATCACTTTCA
+AATATGCCTTGTGTTACAGGTTTGAGATACTTTAGAGTTTTCCACTCAAG
+TGGCCCACCAGCCTCCTCTTCTAATGGGCGGGCTTAGCTTGTTGGATCAT
+GACATACCAAACAGGTTTTCTCAACTAAACTTACTAAAATACTTTACAAA
+TAGTACCCAGATTGTGATACCAGTATTCGGTTTCTTGCAACAGAAAAGCC
+TGTAAGCCTGTGGAATCTAGGGAAATGTAGTTGGACCTTTATGCACATTG
+AAAGTAAGATTGAAAAGAAATAAAGGAGATGTATTGACTTGCTGGGTTTT
+CAAGGCTTCAAGGATGCTATCTAAAAGTAAATCTACCTTTTTACAAAGCA
+TATGATTACCTGAGATTTGAGAATCCTAGACAGTATCTTCTAAAGATGTT
+TCAATGAAATCTTAAAAAGAAAAGGGGATCAGTACCAATATGCTGCTCAC
+CACAGTCCACCTTTACGCCGATATTCTTAATTTTTATGAAATGTTCTTTC
+TTGTCCTAAAATTCCATTTAGATTGCTCTTTGCATTTTGTCATCAGTTCT
+CCTAGATCTGTCATGACGATGACAACCTCTCTCTCTCTCTCTCTCTCTCT
+CTCTCTCTCTCTCTCTTTTTTACTTAGAAATTCTCCAGAATTTCAGAAAC
+TCTTGGGCATCGCTATGGAACTGTTTCTGCTGTGCAGTGACGATGCGGAG
+TCAGATGTCAGAATGGTGGCTGATGAGTGCCTCAACAAAGTCATCAAAGT
+AAGCGCCCCATAATGATGATAATGGTGATGCGTGCTCCTGTAATTGTCAT
+GCCTTAAGAGACAAAGCTCCAGATACCTACATTTTTTCCATTTTGGGCAT
+GTGGCTTGGAGGACCTGGGGTATTTTCCCATAAACAGCTAAATGTGTTCC
+TGCGGAACTTTTTTTTTTTTGCACCCTTATAGTTCATAGGTCCATTCTGA
+AGGGCATCTGTGTGACCACTGGCTGCCTATTTCTTAAGAGGAATGTCTCT
+ATGGGCTGCCACTTTTGTTTGGGCATTGCTTGGAGAACCTCAGAGCCCTG
+GGGGCACAAAGGTGGGTGTGGGGGGAATGAAGTCTTGCATGCCTTCCTTC
+ATTTCTTTTCCTGCCCCTCTTAGTGGAGGTCAGACAATACTGCTTGTTGG
+TGAAGATGTAGCTTCTCAGGTTAGGTTAATTGGAGGTAGCATGACCTGAA
+TGGGGAGAGAGGAGAGTACAGACAGGAGAATGGAGCCAGGAAGCAATAGC
+CACTCAGGTGCAGTGCCAGAAGAGGTGAAAAGGCTGGGCACCTGTCTCAC
+TATGTCATTGCTTCCTAGTACTGAGTGCAGACCACTGCAACTCCAGTCTG
+TTTCTTTGTTCAGTTTTCTGCTATGAAAGAAGATTGAAGTCCTCCTTCCC
+CGCCCTCATCCTAATGCAGGTGGGCATTAGGAAGGGCAGAGAGAACAATG
+TGACTCTAAATTGACCATATCCTTGTTACTTCAAGTGCAAGCAGCTTTTC
+CTCTCAAACCTCTCTTCTGCCGTGGCTGATGCTGGTCAGGTTGGCTTGGT
+GCATGTTTGAGGCCACAGACCACTGTTGTAGCTTGCAGCTTTATGTTCTG
+GATCTTTGGCTGCCTATCTCCACTTTCTTCCTGTTTCTTTGTGGTTTCTA
+GTCAATGTCAGGAATTCCAGCTTTTACAGCACAGCCCTGAGTCAGGCCAA
+TCTTCATTCTTTCCCTTCTGCTTATAGCTGCTTTCATGTTTCTGCCTTTT
+CAACTGCTGCAATGTTCAGGATAAGCCAGTAAGGTGGACCAGGATCCAGC
+TATTCACTATGATAGAGGAAAGGCGAGGCAAGATGGGAGAATGGGATGGC
+TTCAGGCCAAGTGTAGGGAATGGTTCACTTTCTAGTCTGGGACTTTTCTT
+TTCTTGAAAGAATTAGTGTATGTACTATAAAGACCAATTTCTAGCCCCGA
+AAATGGGCAGATATTACTTGTCTTTTATGTTCAAAATACATTGAGTTCTG
+ATAGCCAGGATGTGTATTCCTGCTTTCAAGTCTTGGGTGGTAAAGTGAAT
+GTGTCCAGAACTCACCCATGGTATAAAATGCAGAGCAGAAAGAAGGTAGT
+AATTTTTTTTTTTCCCAAGAAGATAGAAGTGGTATTCTGGAGTCAGGAAA
+GACCCTCCCATTTTTACCTACTACCTGGAGGTTTGTCTCAGAGAGGAGAG
+CAGGCCCTTTTTAGGCTGTGAAAAATGGTGTGTCTTGAGGGTCAGTTTAA
+GTATTTTGTGTCTTGGTAGAAAATGAAGGCCTCTTGCAGATCACTGGCTT
+TGTGTGGAAGCCAGTTTTGATAGGGAATGAAAAGAAGGACCCCAGCTGAG
+GGGAAGGCATGAGCTAGGATGGTGTCAGAGTGTGTTTTGTCAGCTATGTG
+TGGAGGCAGGTTGGGAGTTGGGGGTAAAGGAAGCTATAATGAGCTCTATT
+ATCACCCCATGAAGGACTCATGGAAGAGCCATGCTCTACCCTTAACAGAG
+TTGAGCTTTTGTTTTCTCTACTACAAAATAAAGTAGATTTTGGTTCATAA
+ACATTTATATAGCTCAAACAGTATTTGATGTACTCTTAAATTTCATTTGA
+AAGTAGTCTTTTAATGTTTGTCAGCATGGTTTGCTTCTTTTGTTCAAACT
+ACAGCAGCAGAATCAGGGTCTGAAATTTCCTGGTGGGGCCAGAGAGATTG
+GTTAAGAAGAGAAGTTACTGCTTTTGCAGAAGACCTGACTTCAGTTCCCA
+GTACCATAAGATGTCGTATAACCACTTGTAACCCAAATTTCAGGGGGTCC
+TGTGCCTTCTTCTGACCTCTGTATGTTCCTGTACATATACATACACTCAG
+GCACACATAAAATGAATTTTTCAAAAAAAGTTGGTAGAATGTTATACTTC
+TTCTGAATCAATTTTCCAAAGTGTGCCTCTGTTTTACCTTTGAAACTCAT
+GGCCCAGTGAATGATCCCTGTGTCCTAGTTTCATCTCTTTGGCTATGATA
+AATATCCTGGCAAAAAACTGAAGGGAGAAAGAACTTGATTTAGAACTTCA
+GGTAACAGTCCATCATTCTAGTGAACTCAAAGCAACTAGAACGCAAAGCA
+GTTGGTCACAGTCGCAGTCAAGGGCAGAAAGGAAAAGAATGTGTGTGTGC
+TTGTTGCTCAGCTGTCTTCCTCTACTTTAATCTGTCCTAGGTCTAAAACT
+TAGGCAGCGGTGTTACTCATTTTCAGCATTGGTCATCTCACATCAATTAA
+GGCAATCAAAATAGTCCCTCACAAATATGGCCACAGGACAACCTGATCTA
+GACAGGATCTTAATGAGACTATTCCCAGGTAATCTAGGTTATGTCAGGTT
+GACACAGCTAACCATCTCATCTCTCCTTGGTCTCTAGATATCAGGTTTGC
+ATGTGCCTGAAAAGGAGAGCTGGGTCACTTCCTCACATTTTTTTGACATG
+CCATTTTAGAAGAGAAAGTTCTTAGGGACAAGAATAGGGTGATTTTCTCT
+AATGTGGACTTTATGCTATTAGCCTTGATTGTGGCTCGAAGTCAGATACA
+TGAACCTTATGTTTTAGTTAGAGTTTTATTGCTGTGAATAGGCACCATGA
+GTACAGCAACTCTTGTAAGGAAAACACTTAGTTGGGGCTGGGTCACAGTT
+CACAGGTTTAGTTCATCATCATGGTGGGAAGCATGGCAGCATGCAGGCAG
+GCATGGTGCTAGAGAGGTAGCATAGAATTCTATATCTGCACTGGCAATAG
+GAAGAAAAGACTGCCATTGGCCCTGTCTTGAGCATCTGAAACCTCAAAGC
+CCACCCCCAGTGACACACTTCCTCCAACAAGGCCCCACCTACTAATAGTG
+CCATTCCCTATGGGCCTGTGGAGCCATTTTCAGTCAAACCTTCATACCTT
+ATTTCTGAGTTAGCCCCAAAGTGATCAGGAAAGGTCAAGAAATGAGCTCT
+GAGGACTGTAAGACTTCCTAGCTGACACTGAGACCTGGGTGGGTCAGGGA
+GTTGGGGCTGTGAGCATCCTCATCATTTTGCAAGTTTAGAACTTCAAAAA
+AAGATGATTCTGTTGTTACATGTTCCCTGTCAGAGAGAAGGGCTCAGATC
+CTACAAGTACAATCCTCATATCTTGCTTACTTAGAGACTTTCACCTGAGC
+TGTCCAGGTGAGCCAGTGAAGACTGTGTGCTTACTCTGAAAGCTTTAGGG
+TTAATTTTAATTTGATATTATATAGTATTTGATTACTATAAATGTTATCT
+TCTGTTTTTATCCTGGTAAAGACCTGTATTTAGAATATTCTGTAATTTTT
+ATATGTGTTTACTTTTTTCTTAATATATAGCTTCTTATTTAGTTGCTGTT
+AATGTTACTTTTCCTTAAAATTTGAACTTTTGGTTACCATTTAGCTTTAT
+GGGTAGAATTTAGATCTATAGGTAGAATGTATCTTCTATAACAAGTCTCT
+TCTATTTCTTTGCAGGCTTTGATGGATTCTAATCTTCCAAGGCTACAGTT
+AGAACTCTATAAGGAAATTAAAAAGGTGGGTGTTTGCTCTGCATTATTGA
+GAAGATGATACTGTTTTACTGTTGAGTACCCTATGAGATTTCTAACTTGC
+AAGTTATTAAATAACACTGTTAGGAAGAAGTGCCATTTGGTGAAGCAGAG
+TTTAGTTTTCTTTAAAAACGTACTCCTCATTTTCATTAATTGAAATAGAA
+ATTTATAGCACCACCTTAAATTTTTTAAAGATTTTTTTTTGTTTTATTAT
+ATGTGTATGAGTTGCCTGTGTGTACATCTCTGCACCGTGTATGTGCAGTG
+CCTTTTGAGGTCCTCTGCAAGAGCAGCAAGTGCTCTTAACCCCTGAGCTG
+TAACTCCTAGCAACCAAGCAACCAACCAACAACTTACTTCTCTTCTCTCT
+TCTCTTCTCTTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTCTC
+TCTCTCTCTCTCTCTCTAGCATGACCCCAGCCAGTTCTGAGGTTGAAGGC
+AGGTTTCCCCCCCCATCTACTTTTATAATATTTTAGTTACATGCAAGTAA
+TAAGGTTAAGCAGTACAATAGACACAAACAGTCAAGGAACAAGCTAGGCA
+ATAAAGAAAGTCTCTTGATCACTCCCGTGATGACTGTTTCTAAGGGCTTA
+TCTGGATGACCAAAATATCTGGGCCTACTTCCCTGTCTTAGCTCAAAGTC
+ATATTCATGCCTGAAGCCTGTTCTAGCCTAAAATTACATTCCTGCCTGAG
+CTTACTTCCTTGTCATGGCCTAGTGTCAGATTTCTGCTAAGTGGTCCCAA
+AAAGCTCTCCACATCTCCCCCTTTTTTATTTCATAAACAAGACTGCACCT
+GTCTTAGGTGGTTCTAACAAGAATGCCTTCCTTACATGTCGTGGAATATC
+TATTATCAAAGTCGTGCATTTCTGTCTTAGGTTGGTAAGGCTCTGTGCAG
+AACTTACCCGTCAGCGTCAATGGCTGCCGGCCTATTAAATTAATAATTCT
+GTCTGGGGGTTCATTTTTAGTTTCAAACCATGTATTTTGGCCACCAACAT
+GTTGATGCTATTAAAGGCAAGTTTTATTACAGTGGGCAGGAATAAAAATA
+TTCCCAGGACAAAAAGGGCTATCATGATCAAGCTATATGTGCCATTCTTG
+AAGCTTGACCAAGATGGAAATACTGACATATATTCATGAATAATTTTATC
+GACAATACCTGCAGCGTCAGAGCTCAGCAGAGTACCATTCTTTAAATTCA
+TAATCTCAATATGCAAAATTAAAACATCCAGAGAGGTGTTAGAATTATGC
+CAAATACTCTTCAAGTGTCTTTCAATTTTCCCAATTATATTGACTCTCAT
+TGTAAATTTTAGAAGTAACACAAATCCCTTGGTATTTCGCATAACACTTG
+AGATGGCTTCTTACTCTTAAACTCTAAACCTCCTCTCATAATTTGAATAT
+TATCAATAAAGAGCATAAACCATTGTTTCGGACACCTATCTAAATCAATC
+TTTCTTTCGTTCTTCCTTTTCTTTTTTTAAATCTGTATTTTTATAATTGA
+AGCCCTCTGGATTTTAAAAATCTTTGAAGTAATTACCATCTATTTCACAC
+TGTTAATTTTGACTTTCTCTGATTAAATTAACAGAGAAAGAGAATGTTTA
+ACTCTCCAAGAGAGGATGCAAGCAGCTCCAATATCATAGACCTAGCTGTA
+GAAAGTGCATGTGACTCTTAAATGTAACACATCACTTGTTAACTCCACTT
+AGAATGGTGCTCCTCGAAGTTTGCGTGCTGCCCTGTGGAGGTTTGCTGAG
+CTGGCTCACCTGGTTCGACCTCAGAAGTGCAGGTAAGTTGTACCTCTGTA
+TTATTTTTAAGATTTGTTTGGTAAATAGCTAGTCCTGCCTGTCTTTTTTG
+TTCCAGTGCATATGTCTACACCTTGAGACATCATTCTTGTCCACTCTGTG
+TTGCCTAGCTATGTCCTGTCTGGTTGCTGTCAGCATTTTGTTCTTATATT
+TCTTTCCAAAGACCCATCTCTATTAGGAATACTCTTATGTCCTCTATTAA
+TGTGCTTCTTTCTTGTCCCATCATTCCCCAAGAGACTTGTGGGATGTATT
+TCCAGAGTAACATAGTCTCACCATCTTATTCTGTGCTTCATTTCCAACAC
+GAATGTAGTGAGCACTCAATGTGTTGGTTAAGGAATACTTACTGGATGAA
+TGACAACTGTCCCTGATCCCATCAGCAGCAGAGTTAGCAATTATTGAAAA
+ATAATCATTTGTATAGTACTTGGGTTGGAGAAGTAGATGAACTGTGCATA
+AAATTTGAACTTGTTGATTGTCTTTGACTCCTATGGTGTTAGGTATTAAA
+TCCAGGAGCTCAGGCATGCTAGGCAATTGCTTTACTGCTGAGTCTCATTG
+TCAGCCCACACATCTGGCTTTGTGGCTTGAGTAACAAAACAGGATTGTAG
+TTATACCGTTCTTTCCTTTTTCTCCTCAGTTGGTAAAATGTACTGCGTAT
+GTCCCAAAGACTGTATCCGTGAGAAACATAGTAGAGTGCTCTAAATCTTC
+ACACTAACAAGGAACAATGATGTGTTCAATTTAGGTTAAGTTTCAGTAAG
+AATTTTATGTGGCCAGAAAGATGTAACCACGAAGGAAGTTTCTTTCTATA
+GAACTTTTACTTTTGCTCTGCTGATATGTTTATTTTTGTGTTGGCTTTGG
+TGCTAAGGCTAGGCATGCCTAAGTGTGCGCTTCCACACTTCCACATTTCC
+AGCTCAAGTGCCTTACTTTGAAAATTGTCATATTATTGAGGTAATACATC
+CCAGGTTTACAACACAGCAGACAAAATAGCTGTTCAAAAGTATGACTGTC
+CTGTCACAGGCCCTTCCTTAAACTCTCTTAATTATTCCCTAGTGTTGTTT
+AGATAAAATCTAGACTTCCCACTCAAGGCAAGAGATAATTTAGCCTTTGC
+TGGCCCTTTCCTATCTCATCACTAAAGAAATACTATATTGATTCATTGAT
+CCAATTTAGTTCTTCAAATAGGTTTTATTTCCTCAAGATAGCTTGATGTC
+TCAGAAAATAATCACCTCCTTCTGCAAGCATTCTAGCCTCCTTATCTTCT
+AAGTTGTAACTGATTTCTGCCTAACAAAGATGGCTGTAGTATCTGTAGGC
+CTAATTGCTATATAAGAGGGCCTTGGCCCCACTATAGCAGACACCCTACT
+CCATCCTCGTTTTTCTGTCTAAGGTAAGGAATTGTTGTTTGAAGTCCCCA
+TTTAATTTGACCACTTTTTTACCCCTAGAGATTGTGCAGCTCTCCTGGAG
+AGCATAAAAGAAACTTGTGTTGATTTCTAATTAAAAGAAGCTGGGTTACC
+AAACAAAAGTCCCATAATGTCTGGCATGCCAAATTAATGGGTTTATTTGG
+CTTATATACAAAAGCACGGACAACTTATAAGTAGCTATGCCTTTGAAGCT
+TCACCTCCAGTTAGTTTGCCTTTTACATAACTCAGTCCTTCTAAGGTTAT
+GTATCCCTGTAGTACAAAGGAATGATGATTGGAATCTTGGGGTCTTATGA
+CATTTTCTTTTTCTCCCCAAAAGGGAGTATTATCAGACTATCCCATAGAC
+CTAGGCACCTGCTCTGTAGTTCTCCTTGCTTTGTTGTCTGCTTGTGGCTC
+CAAGGTCTCCATATAAGTCACCACAGCTACTCTGCTTCATGGTAGGGATG
+GTCATGTCAAGTGTACAGGAAACAGCTAGCCCACAGTAGCTTTTAACATG
+ACTTTTTCAGTAAAATGATGTCCACATTTTATTTTGTTTTTCAAAAGTTA
+CTGTATAAGGGTACATACCAGTACCTGTATATAGAAAGGTAAGAGGAGTG
+TCATTCCTGGGCATGGTTCCTCAGGAGCTGTCCAGCTTGTTTGATAAGAT
+AAAGTCTTCACTGCCCTAGGATCTGCTGATGCGACTTGGCTGGCTGCCCC
+TTAACCTCAGAGAATCACTGGTTTCTGACTTCCTAGTGCAGGGATTACAA
+ATGAGCACTACTATGTCTGGGTTTTTATGTGGATGATAGGGATCGAACTC
+ATATCCTGTGCTTGGTGCATAAGCTATCTCCTTTGTTCCCTCCAGTTTGT
+GTTTTTAAAGTAGTAGTTTTTAATCTATTGTTTCCTATTTAAATTATATT
+TTAATAATTGCTATAATTTGTGACAAATTTTTTAAATATTTATCTTTAAT
+ATTTTTTTACAGTCCAGTCTTTATCCCTCTCCTGATCTGCCCTCCCAAAG
+TTTTTCAAACCCAAAAAAAGAAGAAAAAGAAATTGGAGAGATCGTACATT
+AGTAACTTAACAGAATACCTGAAAGCCCTAGAACAGAAAGAAGCAAACGT
+GTCCAAAAGGAGTAGACAGGAGGAAATAGTCAAACTCAGGGCCAAAATCA
+ACCAAAGAGAAACAAAGAAACTGATAAAAGAATCAACAAAACCAAAAGCT
+GGATCTTTGAAAATCAACAAGATAGATAAGCCCCTAGCCAAACTAAGGGG
+CACAGAGACAGTATCCAAACTAACAAAATCAGAACTGAAAAGGGAGACAT
+AACAACAGAACCTGAGGAAATCATCAGGTCCTACTACAAAAGCCCAACAA
+AACTCAACAAAACTGGAAAAATCTAGATGAAATGGTTGATTTTCTAGACA
+TATACCATGTCCTAAAGTTAAATCAAGATCATGTAAACTATCTAAACAGT
+CCTATATCCCCTAAAGAAATAGAAGACGTCATTAAAAACCTTCCAACCAA
+AAAGAAGCCCAGGGCCAGATGGCTTTAGTGCAGAATTCTACCAGACCTTC
+AAAGAGGAGCTAATACCAACACTCAGCAAATAATAATTTAAATCCAACTT
+TTTAAATTACATTTTATTTGTCTGTGTGTCTTTATATGTGTACCATGCCT
+TTGGGTGAGGATAACTTATAGTTAATTCTTCCTTTCCTCTATGTGGGTCC
+CAGAGATCAAACTTGGGTCTTCAGGCTTCTCCTTCTTTACCCACAAAGCC
+ATCTTGCTTGTCCCTACACCCAGCTTCTTAATATTCTTTGTAACTCATGG
+GAGAGATGACAGACAATTGAACTTCATCAGCATTTATGCCTCCTGTACTT
+GTAGTTGAGCATTGTGGTCTCCATTGAGGACTAATTCACCTATAAAACTA
+GGTTTTTTCCTGACAGGGAACCATGAGCTTGTTGTTTCTTAACAGAGGAG
+ACCTGAAGAATGATGAGTATTCCTCTTGCACATACAGGCCTTACCTGGTG
+AATCTTCTTCCATGCCTGACCCGAACAAGCAAAAGACCGGAGGAATCAGT
+TCAGGAGACCTTGGCTGCAGCTGTTCCTAAAATTATGGCTTCTTTTGGCA
+ATTTCGCAAATGACAATGAAATTAAGGTATGGCTGTTGCCTCTTGGCATG
+AGTCTTGTGTGGCTTTGGGGAGAAAGTCATTTGAGATTGCTTCTGGTGTC
+CTTTTGGCTTCACTGAGAGACATCTCAAGAACTTCTTTTTACTTCTGCTT
+TCCTTTCATGGGGTAAGTTGTCAAGGGAAATAGCTTATAGATGCAAATTC
+AAAGGCATTTCCCCAGAGTGGATTTAGGTATACTGGGTTGGCCACTTGAG
+CCAGCTAAGGAAAAGAGACTTCATAGGAAAGAGTGAAGAAGAGTTAATGG
+GCCTTGTGGGTGTGGGCGCCCTAAAGCCACCAGGACTCGAGTTTGGTTCA
+TAGTGCCCAGAAAGCAACTTATTACATAATTTGTGGGTTGCAAGATTCTT
+GGCTTTGATTTTATCTTTTTGAAAAAGTATTTTTTTTTTAATTTATTTAT
+TTATTATATCGTTACGGATGGTTGTGAGCCACCATGTGGTTGCTGGGATT
+TGAACTCCAGACCTTCGGAAGAGCAGTCGGGTGCTCTTACCCACTGAGCC
+ATCTCACCAGCCCCAGTTTTTATTTTTAAAGTATTTATTTTATATGTTTG
+GGTGTTTTGTCAATGTACTGTATACATGCCTACTGTTCTCAGAAGCCAGA
+AAAGTGTTGGATATCCTAGAACTAAAGTTATAGATGATTGTGCGCCACCA
+CATGGGTGCTGCAAACTGAATCTGGATCCTCTGAAAGAGTAACTAGTTCT
+CTTAAGCCCTGAGCCCACTCTCCAGCTTCTACCTTTTCTCATTGTTTATC
+TGTGTAAGTGCGTGTGCGTGTGTGTATGTCTGTCTGTCTGTGTGTCTGTC
+TGTATGAGCCTGTGTGTGAATGGAGGCTAGAAGAAGGTGCTAGGTGTCCG
+TCTTTATCACTCTCTGCCTGTTCTTTTTGAGGTTGAGTTTCCCTGAACCT
+GAGGCTTACTTTTTTTTTTTTTTTAAATTGGACATTTTATTTGTTTACAT
+TTCAAATGTTATCCCCTTTCCCAGTTTCCCTTCTGCAAACCCCCTATCTT
+ATCACCACCCTCACCCTGCTTCTATGAGGGTGCTTATCCACCCACCCACC
+CTCCCACTCACTCCTGCCTCACTGCCCTAGCATTCCTCTACACTGGGGTA
+TCAAACCTTTATAGGACCAAAGGCCTCCCCTCCTATTGATGTCAGATAAG
+GCCCCTTTAGCTCCTTCAGTCCTTCTCCTATCTGCTCCATTGGGGTCCCT
+GTGCTCAGTCTGATGGTTGGCTGTGAGCCTCTGCATCTGTATTGGTCAGG
+ATCTGGCGATACAGGAGATAGCTGTATCAGGCTCCGGTCAGCAAGCACTT
+CTTGACATCAGCAGTAGTGTCTCTGGGTTTGGTGTCTGCATGTGGGATGG
+ATCCCCAGGTGGGGCAGTCTCTAGATGGCCTTTCCTTCAGTCTCTGTTCC
+ACTTTTTGTCCCTGTATTTCCTTTAGACAGGAGCAATTCTTGGTTAATAT
+TTTGGAGATGGGTGGGTGGCTCAATCCCTCAACCAGGGGGCCATGCCTAA
+CCTCTGAATATGGTCTCAACAGGTTCTCTTTCTCCTTTGTGGGGTATTTC
+AGCTAATGCCATCCCTGTGGGGTCCTGGGAGGCTCTTGCTTTCCTGGCAT
+CTGCTGCTGCTGTCAGTGTTATTCCTCCCTCCTTGGAGGGGTGGAAGCTC
+CTGATGGTGCAGAAATGAGTACTGCAATACTGTCAAGAGTCTCTGTGATA
+ACTGCTGTCAGAGCCAGGGGACAGGTGTATACACACACACACACACACAC
+ACAGTGGTTGGTTCTGGATCTTTCCATGATATAGATGCCATTTGAGTAAG
+GTAATACTTTCCTTTTTTTTTTTTTTTTTTTTTTTTTTTGTATGTATCTG
+TAGCTGTACAGATGGTTGTGAGCTTCATGTGGTTGTTGGGAATTGAATTT
+TAGGACTTCTGCTTGCTCTGGTTGGCTGTACTTGCTCCGGTCAACCCTGC
+TTGCTCAGGCCCAAAGATGTATTTATTATTATTAAAAAAGTACACTGTAG
+CTGTCTTCAGATGCACCAGAGGCAGACATCAGATCTCATTATGGGTGGCT
+GTGAGCCACCATGTGGTTGCTGGGATTTGAACTCAGGACCTTTGGAAGAG
+CAGTCAGTGCTCTTATCCACTGAGCTATCTCTCCGCCCTCCCCCCCATAC
+TTACTAACTACTTCCTTCATGAACCTGTGACATTTAAGAGATCTAGTCAT
+TCTTCTGCCCATGTATCATTGCTGTGCTCTAGAAACAAATAGTGCCACCC
+TGTCCTACTTATCTTGGTTCTGTGTCAGAGGCAAACAATAATGCTTGCTT
+CCCTGGGTTTAGATTTTTAAATTTTACATTTGTTTTTTAACTGTAGAAGA
+GGTGAATTTGGCTCTAACACTTGTTTCTTTTTACAATAGTCCTGTATATA
+TTGAATATGTACTTTATTATGCCCTTATCAATAGTGATGGCTAATCGTAT
+ATGATTTTGAACACCTTTTTGTTTTCTAAACCTAATAATTATTGGTTGTT
+TCTGAAGTCTCAAACAGAAGTGCCATTTCTTACTCGTTAGCTTGCTCAAT
+AGATGGCCTATCTCCTTTGGCAGTTATCGTCCCACATCCTGCTTAATATG
+GCCAGTGATTCTTGAGTTTGTAAATTCTGTCATCCTGGAGACTCCTTTAC
+TGCTCTCCTCTTTCTGGCTGCTGTCTTCTGTCTAGGTTTGCTTCCCAAAG
+GGTTGTGCAGGAAGCAGTTGGGATTTGACATCCCTAAAAATTCCTTTGGT
+ATGCAGATCACTTTTTCCTCAGGAGAATTCAATCTTTGTTTTGTAGTACA
+GAACTGGAAGATCCTGTCCACACCAAGAGGCAGGATCCCCAAGGAGTTAG
+GTGTGTGGTGTAAAGAGGGACGCCTGTAAGGAGGCTGCAGCGGACAGAGT
+GTCTGGAGAGATGGGGTTCTAGGTCTTATGTAGTAGTGGAGTCTCCACAG
+AATGGACCCACAGTAAGAGGACACTTGCACACAGGCTGCCATAGTACTGG
+GGGTGATATCCTGAGAGATGGAACTAGGATGAAAATTGCCAGAGTCTCAC
+CCTGGTATGGACTGGGGGGGGGGGGGGTGTCGTCCTCACATAAAGTGTTT
+CTCGGTAAACAGGCTGACATGAAGGAATGGGGAAGGGCTAGAAGGTAGGG
+CTGAGAGGGTCCACAGGAAAGAGTAACTCTGAGTTTCCCTTTTTACCATT
+CTTACGTGTGTGTTTATTTCTATTAGCACTTTTATTGGTCTGAATATCAT
+TTGTGGGGTGTGTGGGATGTGTGTGTGTGCACGTGCGCACGCGCTATATC
+ATCATCAGCTAGCCATACAAGATATACAGAGACATACTTACAATCAGTTG
+CAGCCACCAATGAATGTATCACCAGTGGCCCTGAAACTGACAATGGCAGT
+TCTACTATGGAATGTGCTATGAAGTCAACAGTTAGTTAGGCTAAGTGTGG
+AAATAGTGAGTGAATGAGATAAAGGAGGGACATAGGCAAATGAAACAAAG
+ACAAGAGAGACAAGGTTAAATAAATGAAAGGAGAAGGTGGAGGGGTCAGG
+AAGAACATTATGGGCTTATTCTGGAGATTACTAGGAATATCTTCTCTGTG
+ATTTCTTAGAAAGTGGTATGTGGTATGCTGTCATGCCTATAAAGTTGCAG
+GCTTCCACTCACAGAGGCACCAGTCTAGGGAGGATGTTTTAGTACAGCAG
+CACTTCTGCAGAAAAGTCTTAGGCCAGATTATCACTGTATTTGTCTAGTG
+CTTTTTCCTTATTATTGTCAAGTTTTTTAAAAACTTTATATAGTGTTCTT
+ACTACCTCTGTTCTGGTATACCACGAGTAGCCTATAGACACTGAGACTGA
+CACAGTGAACAAGTTCCTGATGAATGTGTGTGTGTGTATTTCTTATTCAT
+GTGGGTTCTGAGGATGGAACTTAGGCCATCAACCTTGGCTACAAATTCCT
+TTATCATTTGACCTAGCACACAGCCTTCCTAATGCGATTAATAAGGAAGT
+AAATATACTGCTAATAAATACTTGGTGACTATTGAAAATTTGGTATTTTT
+TGCTCCATCTATGAAAAATGTGTCATCTTGTCACAGTTTTTGTCCTATAA
+GTTTTAGAATTCTGTGAAATGTGTAATAAGCTCCATGGGAGCTTCAGTTT
+TCATCATCTTGGTTTTGTTTGTTCTCAGGTTCTGTTGAAAGCTTTCATAG
+CAAATCTGAAGTCAAGCTCTCCCACCGTGCGGCGGACAGCAGCCGGCTCA
+GCCGTGAGCATCTGCCAACATTCTAGGAGGACACAGTACTTCTACAACTG
+GCTCCTTAATGTCCTCCTAGGTAAGAGAGAAAGGGCCTGCTGGCCCAGTC
+TTAGCATCTGCTCAATCTTCTAAACTACACTGACCCTTGCCATCATGATT
+AGACCATTTGCAGCTGCTGACTGCTAAATGTGAAGTGTGTAGGGGATGTT
+GCAAGCCCATAAATGGTCTCGGAGACTTTTCAGCTGCGGCTGTGTCTCTA
+GGACACCCAGCTGGTACACAACCTCATCCACCTTCCTGTCCTTCTGTATC
+AGAGGCCTGAGGCTATGCTTCAGCACGCTGTGGGTACTCTAGGGAAACTG
+ACATTCCCCTACCCCCTCTCTCCTGTCAAAATCAACATGAACAAGTCTTG
+CTGGAATGAGCATATGGACATTTGATACAACTCTCTGAATTCCACATGGA
+CATTTGATACAACTCTCTGAATTCCACATACAGTTCCACTCCCTATAAGG
+TCCTGCCAAGCTAAGGATATATTTTATGCTGCAAGGCTGCTTTTGATCTG
+AGTGTGCACAGCCTGTGTTTCTCAGTCTCGCTTATGTCACCTTTCCCTTT
+TACTGCTAGGTTTACAACAGGGCACTCCTGTAGGTCTCCCTTTTTCACCA
+GCATGTACTGTGGGTCTCTGAGCAGTGGACTGGCTGTTGAGCCCCTTTGG
+TTGTCTTTGCAGGTCTGCTGGTTCCCATGGAAGAAGAGCACTCCACTCTC
+CTGATCCTCGGTGTGTTGCTCACATTGAGGTGTCTAGTGCCCTTGCTCCA
+GCAGCAGGTCAAGGACACAAGTCTAAAAGGCAGCTTTGGGGTGACACGGA
+AAGAAATGGAAGTCTCTCCTTCTACAGAGCAGCTTGTCCAGGTAAGGGTG
+AATAGTGATAAGTTCATGTGGGACATGAAAGAAGTAGCATCTTTCCGCAA
+GTGCTGGGACAGAGGAAGTAGCTGGGAGATGGTGTGTTCCTTTTGCTGCT
+GAGGAGTCAGGAGATGTGTGTCCACAGATCAGGTATGAGTTGTTTGCTTA
+AAACAGGGAGCACACATGTTTTCCACAAAGGGCCAGAGTGTATGTGTGTT
+AAGCTTTGCATTCCAACTATCTTCACTGAACTCCCCCAGTGATGTAGTTT
+GAGCCACAAATAGCCCATAAATGTGGCCATATTCCAGTGAGACTTCATCC
+ACAGAAGCAGGCACACAATGAAGGTACAGTTTTGTATGCCTGTGACCCCA
+GTACTTGGGGTGTCAAGAGAAAAGTACAAGTTCTGCAAATGCCTGGCCTA
+TGTAGGAACCCCAAGCTCTCTGTAGCTGTACGATGAGACTAGATCTCAAA
+AAAGCCAAAATGGGGAGTAGAAAGCCAGATGTGGCTGTGGCTAGCAGTTT
+GCCAGTCAGGATTTAGGGGCATGCATATGCATACAGGTTGCGTGAGAAGA
+GCTAAAGCTAAGCCTTAAGGCAGCTTCCTGGGAGGCTTTCGCTCTTCCTT
+TTTTATTCTACACCAACCTTTAAAAAATAAAATGCATGGTTTTGGTTTTT
+TTTATTGTACATTGGTGTTTGGCCTGCACATATATCTTTTTGAGGGAGTT
+GGATCTACTGGAAGTTGATTTACAGACTGTTGTAAACTGCCATGTGGGTG
+CTGGGTTCCTTTGCAAAAGCAGCTCTTAACTGCTGAGCCATCTCTCTAGC
+CTGCATTTGTTTATTTTTTGCTTTTATCTTACCAACTAATGCTAGGGTTG
+GCAAACTTTGCAAAGTAAAAATATAGAGTGCCTGTGAAGCATTGATTTTA
+TATAGTGATTGTATAAGAATGGTTAAAATTGTCCAGGATATAATTATTTA
+TATTGCAAAATTATGTGTTATCTGAAATCAAGGTTTAAACTTGTGGGCTT
+TTTTCCCCTGGTAAATTTAAAGAAAAAACTAACAAACTCATTCTTTCTAT
+AGTATGGTATAGTATTAAAAACACCAAAAAATTTTGACTGCCATCCTTAA
+CATGTGTGGCTATTTTCCCCCTGGCATTCAGAGCTGTGTTTCTGATGATC
+GGATGTCCCCACTTGCTTCCATAGCAGTGTCCATTGGGATTATTGTCTTT
+TCTGTTCATCAGTTTTGGGAAATGAAGATCCTGAGTTTGCTTACTGGTGT
+TCTAGAGGAAGTGCTCTATGTATTTCCAAGGAGTTACTATAAATGAAAAT
+TAAAAACCATAGGAATTCAGAAAATAGCACAGACAATAATAACCCTACCT
+ATGGAAGTAATAGGTCTTTACAGGGAAAAACTAAGGCACAATTTTGTTGA
+CAAAGACCAGTGAAAACAAAAGTGAAATCTGGGATGCTTATGTATTTATT
+AATTTTGTTTTTGTTTTTAAGCATTTTAAGATTTATTTATTTTCTGTGTG
+TGCTCTGTCTGCATGTACACCTCTGTGCCAGAAGAGGGCATCAGATCCCA
+TTGTAGATGGTTGTGAGCCACCATGTGGTTGCTGGAAATTGAACTCAGAA
+CCTCTGGAAGCACCAGGGCAGCCTGGATTGCAGAATGAGACCTTGTCTCA
+ACAAAAGAACTAAAAACTTCCAATTCACTAAACCAGCAAATGCATTCTTT
+TCTATGACCTGATTAGCGCTTAGCTGATGATGGATGTTGTCTTTGTTGGC
+AGCTGGGGCTGAGTGACCCATCTCCTTCACCCCCCTGTCATTCCAGCACC
+TGCTTTCTCTTAACCGCTGAGCCATCTCTCCCGCCCTGGGATGCATTTTA
+AACATGATGTAAGACCTGTGTTTCTGCTCCTAGGTTTATGAACTGACTTT
+GCATCATACTCAGCACCAAGACCACAATGTGGTGACAGGGGCACTGGAGC
+TCCTGCAGCAGCTCTTCCGTACCCCTCCACCTGAACTCCTGCAAGCACTG
+ACCACACCAGGAGGGCTTGGGCAGCTCACTCTGGTTCAAGAAGAGGCCCG
+GGGCCGAGGCCGCAGCGGGAGCATCGTGGAGCTTTTAGGTGTGTTCTCAG
+CAAGGTCTTCTAACCATTGTGCATGGAGGCATGTTTCCTTCTGTTGCTTT
+ATGGGGCTGTACTGCGCTGAGCTACCCATGCCGAAATTCCTTGCCCAAGC
+TTACAATGTAGGCGTCTTGCTGCTTTTGCAAATAAATCTACAGTTTAGAA
+AGCTAGATGACACAATGAGGCCACACCTTTAAAGCTTGGTCTCCTGCCTT
+TCTGGCTTGTCACCTCCATTTTGGATGCAGTGAAATAGAAATATTAGGCA
+GTTTCCAGGACTCTCATGTTTGATTGTCAGGGATGAATAGATTTTTATGT
+CTTTTTTTGGGAATTTAGTGTTCTTTTTCTACTTGGATCCTGACTTTAGA
+GAACCCTTTCTATTCCTCATCCTTGAAGATACCTCTTTAACCTGGTCTCG
+TCTTTTTGATGCTCAAAGAGTTTGATCCATAGACTAGGCATTGGCAGCCT
+GACCTGTCTGACATGAGCTAGTCTTAGATGGTGGGACAGATAGGAATCTG
+GGCTTGCCAGCCTTTAGAAGTGACCTGGCATTTAGCAGGCTGTGACAAAT
+TCTGCTGACCCTGACTTATCATGGCTTGCCACAGTATATACATTGAGGAG
+CCATATTTATTATAGCTACACATTAGAGACAGTCTGCCTGGGAAATACTA
+TTGTGACCTTGTGCGCTTAAAAATTTGCCTGGACTATGAGCAGAAACTGT
+TTTACTGCTGTCCTTGTTAAAGAATTTTTATTTTTGTGGAAAGTATGTCA
+TACACCCTGGTAACTGTTTCCAATGAAAGCTTATGTCTGGCCTATGCTTG
+TCCAATAATGTGAGATCTTACAGTTTTAATTTGGCTTTTAAAGAGCAGTT
+TATATGAGCTTTTTTGACATTCTAGTCATATCTTTAAAACTGTGTATTTG
+AACATGAGTGTAATTTTCACCTTTAAAGTGTGACACTGTGGTGTTTAAAC
+ATGTCCTATGGAAATATGTCCACATTGTCTGTTTTAGGATTTGAGTTAAG
+CTTTTTGAGGATTTTTGAATTTCTTGCAGATTTTAGCAGCTTGTAATCTT
+ACTTTCTTGTTACTTTCTATGATTTACAGCTGGAGGGGGTTCCTCGTGCA
+GCCCTGTCCTCTCAAGAAAGCAGAAAGGTGATTATCTCAAAATCTGAGTC
+TTGTGTTGAGTTGAACTGCTGTTTCTGTGTTTGCATAATGCACTAGATTC
+TGCTTATATTTCCTCTCAGGAGATGAAGTGTATGGATATTGCTGGAATCT
+GACATTTTCTGCTGTTTAAAAATTGTTTATATCACATTATGTCTAATGTT
+CGAGGTCAAAGGTCAGCAAACTCTATAAGGGACCAGAGAACAAATATTTT
+AAACATGCAGGCTATAGACTCTTTTTTTGTTTTGTTTTTTGTTTTTTTGG
+ATTTTTGAGACAGGGTTTCTCTGTGTAGCCCTGGCCGTCCTGGAACTCAC
+TCTGTAGACCAGGCTGGCATCGAACTCAGAAATCCACCTGCCTCTGCCTC
+CCAAGTGCTGGGATTAAAGGCGTGCTCCACCACCACCCAGCCATGCTATA
+GACTCTTGCAGCTGTTTCCCTGGCTTGTGACTGCTGAAGAACAATGGTAT
+AGAAACTGCTGTGTCTAGCTGTTTTCCACTAAAATCTTAAAGATGCAGCT
+GGGCTCAGCATCACTTTAGAGAGTGCACCACTATACTTTAGAAAAGTAAC
+TTCTGTTTTTTGCTTGTTTCTTTTTAAACATTTATTTATTTGTTTTATTT
+ATGTGAGTACACTGTCGCTGTCTTCGGACACCAGAAGAGGGCATGCGATT
+CCCATTACAGATGGTTGTGATTCCCATGTGGTTGCTGGGAATTGAACTCA
+GGACCTCTGGAAGAGCAGCCAGTGCTCTCTCCAACCTAACTTCTCTTCTT
+GATGAATTTTATTATTAGGCATATTAAATAGTTTATTTTGTTTGGTTCTT
+CATTTTGTTCATGCCTAGTTATCAGTTTTCAGACATATTTAACTTCTTGC
+ATATGTGTTTTCTGACCTATTTTTATTCCAGAGTTTCTGATATGACCTGA
+TAGTTTTATATACTTGGTCACTTTGCAGCAGCTGAAATTTCATTTTATAT
+TATGAATTTCTGTGGAAAAGAATTGCTGTCATCTTTATTTTTAAAATCTT
+AAAAGATGAGTTTGTTTTCTTTGTCATAATTTGAGCATTTAAAACTTAAA
+GCTCAGTATTATTTGCTATGTTAAGTGAGGGTTTGTGTGTTTTTCCTTTT
+TTTAAGTTTTGAGACGGGGCCTTACTGTGTAGTGTGAGCTGGCCTTGAAC
+TCAGTATCTCTTCCTGTACTTCCCACTTGCTAGGATGACAGTACACCTCA
+CTTTTGAGTAAGTTCTTCCCAGGAAAAACATTGTAGTTGCCATTGAATTA
+AGAGAACATTTACTTGAAGAATTTGGGAGCTAGATGTTACCCGGAGGCTG
+AGACAGAAGTCTTTTGGGCAACTGGGAAGACCTTGCCTCTTAAAGAGAAA
+GCCGAATGTTTGATCCATTGCTGTAAGAAATACTGATTTTAGAAAGCATT
+GCCAATGTTTAAAGGAGAGTAGAATTCTAAGAAATATTACTCTCTATTCT
+TGATCTAGGAGAGATCACTGGGCACTTGGTAAAATCACTTTGATAATTTA
+CTCCACAGTCACTTTGTCCAGAGATGGGGACAAAGGTGATGTTATTGAGA
+TAAGTTCTCATCTTTACTATTTCCTGAATCTCCTGATGATTTTTTATTTA
+GACTGGTGATTTTAAAACTTTTTTTGTTATAATGAAGATTCTGTTTTTTT
+CAAGTGTTGTGTTGGCTAGTTTTTGTCAACTTGATACAAGGTAGAGTCAT
+TTTGGAAGTAGGAACCTTAGTTGAAAAAAATGTCCTCATTAGAAAGGTCT
+GTGGACAAACCTGTTATGCATTTTCTTGATTGATGATATAGAAAGGCCCG
+GCTTAAACTCTGGGCACTGCCACCCTTGGGCTGTTGGTTCTGGGTTCTAT
+AAGAAAGCCAACTGAGCAAGCCAGTAAGCAGCATTCTCCAAGGTCTCTGC
+TTCAGTTCCTGCCTCCAGGTCCCTTCCCCAATTCCCCTCAGTTTTTTCCC
+AAGTTACTTTTGGTCTTGGAGTTTTATCACAGCTATAGAAGCCCTAACTA
+AGACAAGTGTAATCTGCCAGATATAAGATAGATAAAAACAGAGTTGTGGA
+GCACATAGACCTCAGCAAAGGTCAAAGGGGGCCAGGAGTCTCTTCTCTGT
+TGGCAGCCCCTCTCCTTTTTCCATCTTCATGGGATCTGCCCTGGGGGAAC
+TTTTCCCAGATCATAATCAGCCACTGACTTGGAGAGTAGAAACTGCTTCA
+TTAAAATTCAAATTCACTGTTCTTGAGTTTTATTTGATTATTTTAAACCA
+CATGTTTTGTTAATAAAAGGTTCTGTTTGTATTTATGTCTAGTTGCTGTG
+TTGATTTTTGCATAGATTTGTGTTCTCTTTGCTAATTAGCTTGTGCCTTT
+AATGTTATATCATGTAATTTCATGGAAAGTATCACAGCTCTTATTACTTG
+AAGAACAGTAACATGAGAAGCTAACAGCTAGATAGTATCTGGTTTAGTTT
+TCCCGTGTATGAGAATATACCTGAAGTGAATAACTTCAAGGGAAAGATTT
+ACTTTGGCCACTGTTTCAGAGGTCTTGGTCTGTCACAGCAGGGCAGATTT
+GTTCGAGCAGCAACCAGAAGGCAGGTTGTTATACCTGTGTTGGTGTGCTT
+CCTCTAAGTTTTTTATTTCAACATGAGAACAATGCTACCCACACCAAGCA
+CCAGTCTTCCCTTTTAGTTAACCCTCTCTGAAAATTCCCTAAGTATATCT
+TCTTTCAATCAAGCTGACAAGTTCAAAGTGTAACTTGATGTCAATTAATG
+TTTATATATAATGTAACTGTAAAGATATTAAATCTGATTTTTCTTCCTAA
+TATAACTATATAAGCTATAAGGTATATTTCTAAAATTCTACTAGGAAATA
+TTTTGTCTTTTCAGATTTTTAGCTACTGTGTAGACTAAAAAGATAATAAA
+ATGAAAGTGACTTATTTATATGTTGGAGTTTGACATACAACTTCGTATTT
+GCCATGGATATTCCATTAGAACATGGATATCCCAAGGCCTGACTGATAGA
+ATTGGACCTTTTCAGTCATAAGCTACTCATTCATTTATTAACTGGTAGTA
+AATTATTTAACTACAACAGTAATCTAAATCAATAAAAAGTTATTATGTGG
+TATAGTTCAATAGTAATTACTTCTGCCTCTTAATTGGTTTTACAGTATTC
+TAAAAGTTACTCTTTTATCCATCCTTTAACATTGTAGTAATATTTAATTT
+ATGATGGATAAATTGTACTATGGTAAATTAAATATGCCTGTGATTTTCAA
+TTCAGAATATATGCTTATTTGATTTTTGTCTTTTGAGATAGAGCCATTAT
+ATAGCTTTGGTTTGGACTAAAACTCACTGTGTAGATGGGGCTGACCTCAC
+GCTCATAGAAACTGGCATGCTTTCTATCTCCTGAGTGTTGGGAGTAAAAG
+TGTGCACCATCATGCCTGACTATTTATTCAAGCTAAAAAAACCCAACATT
+ATTTTTAGCAAAACTAAAAAGGAATATTGCTGTATTATTTACTAGGCAAA
+GTGCTCTTAGGAGAGGAAGAAGCCTTGGAAGATGACTCGGAGTCCAGGTC
+AGATGTCAGCAGCTCAGCCTTTGCAGGTACTCGGTGGCAGCCATGAGCTG
+CCAGTGTCAGCCTCTAGTTATTATCCGCCATCTCGTGCTCCTTTCAGCAC
+CTCAGCCTGCACACAGCATTGCGAGCAGCTTTTATAATTCAGCTGCTTTT
+ATAATGTTCACTCTAAATGTGTTTGGCTATGTGCTTTTCTTGTTTTAGGC
+TATTCAAATATTGATTTATTATCCTTGAGCATATCCTTTTGGAGTGGATG
+ATAGATGGAGTTGTCTCCTGAATTAAATGGTCTATGATCAGGACAGTGGG
+TTGAAGAACTGTCTGGGTAATTTAACTTGAAAGGATATATTTTTGCTCAC
+AAGTGGTTACATAAGATTCTTTGTGTTTTCTATAAAACACAGGTATTATT
+TTAAGACACTAATAAATAGATAAAATGCAAACAGCTTTAGTTATGTTTGC
+TGTTAGGTAAATAAAGATAAGACAAAGATCCTTGGGGAGGAAACCTGAAT
+AATGTCAATGGATTTTCCCTGCTTACAAGATAAACAAGTCACAGGACAGA
+AATTTTGGGCTGCCTAGACTAGTGTAAAACTCAAGTGCCTCTGACCCAGT
+TTCCTTCATCACAAGCCACTGCTCATACCTTGTTCATGGTTTTGAAGTGA
+TTTTGTTTTTATATTTAATGTTTTGTTTTATAAGACAAACCATCCTGTCA
+GCATTCTGAAAGCTCGCTTTTATTGGTATAATCTCAGTGTTCCCAACCAT
+CAGAACCTTTCATTCCCTGCAATGTAAATCAACCCCCCCCTTTTTTTTGC
+AAGTTCTTCCAAACGTCTAAGGATGAATAAATGTTACATACTGGATTTTA
+CTATTATAGAATGGCACTGAAGTGACTTTGATCTCACATAGTGTTTGTAA
+GAGGGAATTTCAAAATTAAACTAGGAAAAGATGGAGTGTGTTTATCCTAG
+AGGAGGTTAGGATTGGAGTGGAGATGAACAACACGACTTGGAAGAAAGCA
+AGCTGATCCTAAAGGGTACTGCGCTCACTAGTGTTCTGTTGTTGCCTATG
+TAGAGTTTCTGGAAGTCTGCTTGTCCCTGCCCCAGCTGCTTGTCCCTGCC
+CCAGCTGCTTCTCAGCAACACACATTCTATGTGTGGCTTTAGAGATGCAG
+TAAGAGCTTCAGCTTGAAAATATTCACAGCCATGAAGAATTCACTTGTTC
+ACCCAGCTGAACTGTGCTCCTTGACTTTTTCTTCACTATGCTCCAAGCTG
+TTTAATAGTTAGAACATTCAATACAGTGAACTTTTTGTCATTTTGCACAG
+TTGGATTCCTTAGCTACAGTTTCCTCTGGCCATTTGACAACTGAGTTTCT
+CTGTGTCTCTAGCCTCTGTGAAGAGTGAGATTGGTGGAGAGCTCGCTGCT
+TCTTCAGGTGTTTCCACTCCTGGTTCTGTTGGTCACGACATCATCACTGA
+GCAGCCTAGATCCCAGCACACACTTCAAGCAGACTCTGTGGATTTGTCCG
+GCTGTGACCTGACCAGTGCTGCTACTGATGGGGATGAGGAGGACATCTTG
+AGCCACAGCTCCAGCCAGTTCAGTGCTGTCCCATCCGACCCTGCCATGGA
+CCTGAATGATGGGACCCAGGCCTCCTCACCCATCAGTGACAGTTCTCAGA
+CCACCACTGAAGGACCTGATTCAGCTGTGACTCCTTCGGACAGTTCTGAA
+ATTGTGAGTGGGCAGAGGGTGCCCTGGTTCTTTTGTCTTCTGAGCTTATT
+CTTGGATGCCCACACTTGGACCCTCCTGCTCATTTTTTCTGTGTTACTAC
+ACATAATAGTAAGAGGCCCCCAGCTCAGATGGTTAACAGAGAGCCTTGTT
+GGATGTCTTCACTGTAGAAATTGCCTAGTATCATTTGTATTGAGCCATGG
+AGATTAAAGTGAGGTTACTTATATGCACCTTGTACACATGATATATTTTT
+AATACCTGATTAGGCCTGTTTAAATAACTACTTTCAATTTTTCAAGGAGC
+TTGTTATTGAAAGTATCTGTGGTCTTAATGTGGGTGGTGATATTAGTACT
+CTGTATTATTTTTAGCACTTTTTGACCTCTCAATGTACTTATACCACATT
+CCATTTTAAAGTAGGATGTGCATATTTCTATCCCTGTGATGTCTGAGTTC
+ATAGACAGGAATCACCTTAAAGATTATATAATCAGAAAGTTTGGTGCAAG
+TGTGTGCTGAATTGTGGGGTATTTTTTGTTTGTTTGTTTGTGTGTTTTGT
+TTTTTTAAACTTGCTTGTCACTTTGTTTTTTTGTTTTATATTTCTGAAAC
+AGGGTCCTAGCCCAGGCTGACCTTAAATTTGAGATCTGCCTGCTTAAGCT
+TTGGACTCTTGTGAATGTGGGCATGAACCACACTTGGCCTGCATTCTAAA
+TAGTCATTTTCTTCTCCTCTTCCTCTTTCTCTTCTTAGTGTAGTAAAATA
+GCAAAATTATCTCATAGATCATTCCTACAGTTAAGTGGTATATTAATCAC
+CACCATACACCTCCATTAAGTCTTCATCTTCTGAAACCTGACCTTCTGTA
+AAGGCTGTGCCCTCCTGGAAGCCAGTGGTCTGTTTTTTATAGTACAAGTT
+TAAGGACTGTAGGTCCTTCATGCAGTATGTTTATCATGAAGTATTATCCT
+TCTATGGCTGACTTACTTAACATAATGCCTCCATGTAGCATGTGTGAGAA
+TTTTCATTTTTTAAGGGATGATTAATATTCCATTGCATGGATAGAACTAC
+ATTTTGATTATTGTCTCATCTGTTAGAAAACATGTGGGTTACTCTCACAT
+CTTGACAATTATGGATAATGTCACAATTATGAATAATAGGTCTACTAAGT
+ATTTCAAAGACTCTGTTTTCAATTCTTTTGGCTATACACCTAAAAGTAGA
+ATAGTTTCTACATCCAGCTTTAAGTAATGTAATTAAATGCTTAGCTACTA
+TAGAATATGCATATATCTTGATATATATGTACTATAGAATATAATAGTCT
+ATATAGAATATACATATATTACATAATATATAACCTATATATATTCTATA
+TAGAGCCTATACATAGGTCTTTTTGAGACAGGGTTTCTTTGTATAATAGC
+CCTGAGTGTCCTCTACCTACTTTGTAGACCAGGCTGGTTGAACTCAAAGA
+GATCCACCTGCATCTCCCTCCCAAATACTGGGATTAAAGGGGTGAGCTAT
+CACACCCAGCCTAGAATATTAAAAAAAAAAAAAAAAAAAAAAAAAGCTGG
+TCTTTGTGACCACACACTTTTGTAATCCCAGCTCTGGGAAGGTAGAAACA
+CCCAAATGGGTGAGCTCCAGGTTCACTGAGAGACCTTGACTCAAAAATAC
+CATAAAGAACAACTGAGGAAAACACCTGACATTGACCTCTCACCTCCACA
+CCCATGCTTACAAATATGCATATACCCACTGTTTGCTTGTGTTACCACTC
+CACCTCAAACCTCCTCCCTCCACCCCCCATACACATTCCACATACACAGT
+CTTAGTTCGGATTTTATTGTTGTGAAGAGACACCATGACAAAGCCAACTC
+TTTTTGTTTGTTTGTTTGTTTGTTTGTTGTTGTTTTGTTTTGTTTTTGTT
+TTTCGAGGCAAGGTTTCTCTGTATAGCCCTGGCTGTCCTGGAACTCACTT
+TGTAGACCAGGCTGGCCTCGAACTCAGAAATCCGCCTGCCTCTGCCTCCC
+AAGTGCTGGGATTAAAGGCGTGCCTGGCTACAACAAAGGCAACTCTTAAG
+AAGGAAACATAGGGCTGGAGAGATGGCTCAGCAGTTAAGAGCACTGACTG
+CTCTTCCAGAGATCCTGAGTTCAAATCCCAGCAACCACATGGTTGCTCAC
+AACCATCTGTAATGAGATCTGACACCCTCTTCTTGGGTGTCTGAAGACAG
+CTACAGTGTTCTTTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGGAAACA
+TAATTGGGGCTGGCTTACAGTTTCAGAGGTTCAGTCTATTATCATCATGG
+CAGGAAGCAAGGCACCATGCAGAGCAAGTGGTGTCTCTTAAGTTCTGGGC
+TACCTTGATCTACAATTGAGTTCCAGGACACCCATTGGTACACAGATAAA
+CCCTGTGCCTGTGTCAAACCAAACCAAACCAAACCAACCAAACAAAAGAG
+GTTGTTAGGTCACATACACGTTAAAGATGTCCTAAGAGGTTTTAACTATA
+GGCTGCAGTTCTATCTTTGAGTTCTCACTGGGTTTACTTTGCTGTTCTTT
+CCAGCAACTTTTTTACCACAGAATCTGTCTGGGCATGGGAAGTATATATA
+AATTTAATGCAGATAACCTATTGTTAGACTTATCTGGAAGCCTTGTCTTT
+TTTTCTAACTTATTGTACAGTTTGTTCAGGAAGACAAGGTTTTAAAAATA
+TTAACTCATTGAGAATTGCATGCTTGTATTTTGAGGTTTACCCCTCATCT
+TTCTGACTAACTACTCCCAGATCTACTTTCCACTTCTCTTTCCAACCTTA
+TGTCTTTTTTTTTCAATTTTTTTATTAGCTATTTTCTTCATTTACATTTC
+AAATACTATCCCGAAAGTCCCCTATACCCTCCCCCCACCCAGCTCCCCTA
+CCCACCCACTCCCACTTCTTGACCCTGGCGTTCCCCTGTACTGGGGCATA
+TAACGTTTGTAAGACCAAGTGGCCTCTCTTCCCAATGATGGCCGACTAGG
+CCATCTTCTGCTACATATGCAGCTAGAGACACGAGCTCTGGGGTACTGAC
+TAGTTCATATTGTTGTTCCACCTATAGGGTTGCAGACCCTTTCAGCTTCT
+TGGGTACTTTCTCTAACTCCTCCATTGGGAGCCTTGTGTTCCATCTTATA
+GCTGACTGTGAGCATCCACTTCTGTATTTGCCAGGTACTGGCATAGCCTC
+ACAAGAGACAGCCATATCTGGGTCCTTTCAGCAAAATCTTGCTGGCGTAT
+GCAATAGTATCTGAGTTTGGTGGCTGATTATGGAATGGATCCCTGGGTGG
+GGTAGTCTCTGGATGGTCCATTCTTTTGTCTTAGCTTCAAACTTTGTCTC
+TGTACTTTTGTAATATCAAAAATGTTTACAAACAGAAATTTCTTTACGTT
+TTCTAGAGCTATAAAAGGTTGGTATGACCTTCTCCTGGGGGAGACAAACA
+AATATCTGATTACCACAGATAGGATACCAGTGACAGACCAAAGTAATGAT
+TCCACCTAAGTCTAGTTTGACAAGCCAGTTAGTTTATTTAACACTACTTC
+AAAGGAACACAAGCCACGGCTACCCACCAGGGCATGCGCAACTTATAAAC
+ATCTATACCATTGAAGAGTATGTTTATCCCAGCGATCATTAACCACTTAT
+ATATCCTTAGGAAGGAGCAGGGTTCCACAAGCCTATCCCCAGAATGTTAC
+TTCCTATCTAGTGCAGGCCTTGTCCAGGTGGCATCCCACAGCAAAGCTTT
+TCTTGCCATAGTAGTGAAGAGCTCTTGCTTGTTGCTTTTAACACATGCAT
+TTACCTGTGGCCACTGACTAGGTAATTGCCCTTTGCATTCTGTATGTGTT
+ACTGATGCAACATGGTCTTTGCATTCTGCGTGCTCAGCTCTGTTGGTGGC
+TTTTCCTTCATGTTGAAGGGCTTTCCCCTGACAGTCCCCCTTTATCTGTA
+CAGGTGTTAGATGGTGCCGATAGCCAGTATTTAGGCATGCAGATAGGACA
+GCCACAGGAGGACGATGAGGAGGGAGCTGCAGGTGTTCTTTCTGGTGAAG
+TCTCAGATGTTTTCAGAAACTCTTCTCTGGGTAAGCTCTTATATGATGGA
+AATGTTTTTAGCCTTAGACATCTTTATCTTTTCTTGTTTGTTTGTTTGTT
+TTTGTTTGTCCAGACGGAGTTTCTCTTTGCAGCATTAGCTGTCCTGGAAC
+TTACTCTATAGACCAAGCTGGTCTCAAACTTGGAAGATCTTTATGCCTCT
+GTCATCCAAATGCTGGGATTAAAGGCATGAACCACCACTAGCCAGCGAGA
+CTTTTTTATCTTTATTTCAAAAAGAAACCTTTTGGTATCATTATTTTTTA
+AATTGAAAATGGCATTAATTTTCATTTCAATTCAAAATGAAAATGGCAAT
+TTAGGTATAATCAGACTGATTTAAATTGGTACTTGTATATTATCTCTATA
+TAAATATATACATATTTTGATTGGACTTGTCACTTATTTATGATTTCTAT
+TTTTAAAGCCCTTCAACAGGCACACTTGTTGGAAAGAATGGGCCATAGCA
+GGCAGCCTTCCGACAGCAGTATAGATAAGTATGTAACAAGAGATGAGGTT
+GCTGAAGCCAGTGATCCAGAAAGCAAGGTGAGCTTCATAGGAAGGAACAG
+CTTGTGTGTGAGGGGTTGGAATTGTTCTGGCTTTTGCCAATTCCATTTGT
+TCCTAGCCCATCTCTGGCTTATTTCTTTCCCCTAGAAACACTGGACACTC
+CCAGGCCTTGTCTGTTTATGCCTCACCAGGGATACCCAAACTCTTAACAG
+TTGCATTAGTTCTGCCTCCAGAGACTCCTCCCTACACAGATGCACCCTGT
+GCTGAGCTCCACCCTGCCTCATTTGAGGCTTGTGCAGAGCTTTGCAGCAG
+TTCTTGTTTTTGCTCACTCAACTGATTAAAACACCTCTCTCTTTCTCTGT
+TTGCTTTACAAGCATATTACATATAAATTTACAGTTAACTTAAATTCCTT
+AAGGTCAGGAATATGTTTTGTCATTTAATTGTATCATCAAAATTACTTCC
+TTTGAGAGTGCTCTAGGTTCTTTTATATCCTTTCGACCTTTTTTTTTTTC
+AGACAGGGTCTCACTATATAGACCTGGCTGGACTAGAACTCATAGTTTTC
+AGTAGACAATTGTGCCTTGCCAATGTGAACAGCCTTTCCTGAATGCTATG
+TTTTAGCACCTTCCTGTCTAGTGAACCTTCTCCATACTAAGCTTGTCTCT
+TGCTGCATCCTACGGCCCTGTGTTTTAGGGTTCCAGTTACGTTCTGTTTG
+AGATCAGCTATGGGGGTGGCCAAGCATAGGCATCTCTGTGTCTAGCACCC
+TGATGTGGATTACTTTCGTGACTGAACTAGTGAATCAAATGTTTACTTCT
+CTGTTCTAGCCTTGCCGAATCAAAGGTGACATAGGACAGCCTAATGATGA
+TGATTCTGCTCCTCTGGTACATTGTGTCCGTCTTTTATCTGCTTCCTTTT
+TGTTAACTGGTGAAAAGAAAGGTAAGCATAGCAGAGTAGGTACAGAGTTG
+AGGGGACACTTACAGGTTCAGGAGTCAGTTTGTTGGTCTGTTGGTGTCTG
+GTTATTGGGGTCGTTTACTTTCCATTTCTGCTGTCAGAGGGAGGGAATGA
+GAGGGTGAGTTTTGTTCCTTGGAAAAGGCTAAAGGGGCCTGGGTGGTTCC
+TTTGCAGCACTGGTTCCAGACAGAGACGTGAGAGTCAGTGTGAAGGCCCT
+GGCCCTCAGCTGCATTGGTGCGGCTGTGGCCCTTCATCCAGAGTCGTTCT
+TCAGCAGACTGTACAAAGTACCTCTTAATACCACGGAAAGTACTGGTATG
+TTACAATTCACTTTTTTTCACCAGCTAATTTGTACTTAAGCTATCTCACA
+GTCTTGCCTTCTTTTGTCTTAGAGTAGTGTTTCTAGGTAGCTTATATGTT
+TCAGCTGTGTTAGAACTCTCCAGGTGTGCATATGGTCATGTTCTTAGTCC
+CATGACTCCCCTATGTGTAGTTACATACAAATGACTACATAAGTCATAAA
+GTAGAAATCTGGAAATGTGAAAGTTATTTATACACATACTTATTTCTTCT
+TAAATCATAAGCTGCATTGTTAAATTGCTCTGGGCCACATCCATATTAAA
+TTGCTCTGGGCCACATCCATGTTCAGGCTAAGCCTGGTCAGGACAGCACC
+GTGACACTGGGTCTGCTTTTGGACATAGTTGTTCTGGAGTAGAAAGTATC
+TGCCACCTTCTTTCTTTCTTTGAGTGATAAGGCAAATCTGTGTTATTGAG
+TTTAAAGATAACAAATATATAATAGATTGATTGTTTCTGATCTTTATTTT
+CAAAAGAAATGTCAGAAGCTTGATTTTATTTTTTAATTTTTTTTTTTTTA
+TTTTTGGCTTTTCGAGACAGGGTTTCTCTGTGTAGCCCTGGCTGTCCTGG
+AACTCACTTTGTAGACCAGGCTGGCCTTGAACTCAGAAATCCACCTGCCT
+CTGCCTCCCGAGTGCTGGGATTAAAGGCGTGCGCCACCACGCCTGGCTTT
+TTTTTTAATCTGACAACCTGGAATCACAATTTAGAAATCCTGACGTATTG
+AGATAATTTCAACGTGAGAACTCTAGAAACTAAATCCCAAACATCTTTTA
+CTACTTAGGAAATTATAGTCAGGTCCTTTGCAAATGTTCCCTTAAGCTTG
+CTTCATTTGTATATAAATTTGATGATGGAAGAAGGTACAGCTGGGCCATT
+TATGTCCGCAGAGGAACAGTATGTTTCTGACATCTTGAACTACATCGATC
+ATGGAGACCCACAGGTCCGAGGAGCTACTGCCATTCTCTGTGGGACCCTT
+GTCTACTCCATCCTCAGTAGGTCCCGTCTCCGTGTTGGTGACTGGCTGGG
+CAACATCAGAACCCTGACAGGTAACGGGACAGTTTGCTCTGGTGTCTTTT
+CTGGATACTCTGCCCATGTTCATGTTTCTATAGAGATATTTCTTGCTCAT
+TTTTCTGGTTAGGAAATACATTTTCTCTGGTGGACTGCATTCCTTTACTG
+CAGAAAACGTTGAAGGATGAATCTTCTGTTACTTGCAAGTTGGCTTGTAC
+AGCTGTGAGGGTGAGTACAATGCTTTACATAAACTGTTCCTTGCCTTAGT
+GAGCTTACCATTGATACAGTTAAATTTGGAGCTTAATAGGTCACATTTCC
+GTAAGTTGTAAACAGTTCTTTTCCGAAATTTACCACTCAGCCTTTGAAAA
+AACGTTGCCATCATATTAAAATTCATTAAAACTTTTAATTCTTGGACTCC
+TTATTTGAAACGTTCTTTTCTCTAAAGATAGTGTTTAGAAATATACCTTT
+GCTATTTTGAAATATAAAGTTTGTTGAATAATTACAATTACTGTTTTAAG
+ATACTAGAATGTTGAGCTGCAATGAAATTATGGGTGTTATTTAACTGGGC
+CTTTACTAAAAGAGCCTTGATTCCTCAAGTGACAGTAAGGTGAAACATTT
+CCTATTAGCTGCATCATAAGTCACAATTGGGCATTCAGTAGCAGAAAATT
+TAACTAGAGAAAATCAAAACAAAAACAGTGATTAAGTCTCAGGAAGGGAC
+TATCATTCTTTTTAGAAATGTAATGGCCTCAAAGTAGTGTTTTTCTAGAT
+CTAATTTTTTAAAAAGATTTTATTTTTTTAATGGTGTATATGTGTGTGTG
+TGTGTGTGTGTGTGTCAGAGTATGTCAGAGTGTGAGTTTCTATCTGTGAG
+GGTAATAGCAACAGATGCCAGAAGAGGGCACTGGATCCCCTAAAACTGGA
+ATTCTAGGTGACTATGAGCCACCTGATGTGGATGCTGGGAACCAAACTCG
+GGTCCTTCAGAAGAGCAGTAAGTAGGCACTTTTGACCAGTGAGCCATCTT
+TCCAGCCCCCAGCACCATTGGGTTTTTTTGCTTTTTTTTTTTTTTTTTTT
+TTTTAAGTTAAGTTTTAGTTAGTTGTGTCTTTTGAGCAATGAAGGCATGC
+TGATAGCACAGGTGCTATGCTGATAGATATAAGTGTGTTATCCTTGTATA
+GGATAACTACAGGATAATTAATGCCTTTAAGCTCTGAGGCTGAAGGTCCT
+ATAGCAATATAAGATCCACCTTGATTCCTTCCTTGTCCATCAAGAAAGTT
+GAGTCACATCTAAGATACTCTTTGATATGGGTCTCTTCTCCTTATGCTGG
+ACCTGAGACTTCTTTTCACATGTGGCAGGACTATGTTGTGTCATCTTCCT
+CTAAACCAGTGGTTAGTGTTCCTGAGATTGAGGCTCAAGAGTCAAGGCAA
+GTAATCAGAGGCAGAAAGAAACAAAATATAATGGGCACATTTACTTTTAA
+ACTCAAGCATAATAAGATAAAGATGTATCTTGAGTACTTCTGGGAACCTG
+TATTGCTTCTTGTTGCTGCTTAAAGATAGACTAGAACAAACAGGTGCATG
+CATAAGAGTGCTGTTCAAAGACGCGGTGCGGTGTCTGACCTGATGCCTTC
+TGTGGTGGGATGGGCTTTCAGCACTGTGTCCTGAGTCTTTGCAGCAGCAG
+CTACAGTGACTTGGGATTACAACTGCTTATTGATATGCTGCCTCTGAAGA
+ACAGCTCCTACTGGCTGGTGAGGACCGAACTGCTGGACACTCTGGCAGAG
+ATTGACTTCAGGTAAGGGAGCCAAGTTACAATTCAGAAGTTCAAATTAAA
+AATTGAAAGTCCTGAGGTCTCTGCAGTTGGCATGGCTGTCATGTGTACTG
+TCTGTTCAGCTCATCTCCAGTTTAGTTAGAGAACATGTGATAGTCACAGT
+ACTTTTTATTGAACTCTGAACTTGGAGATTTTGCTATTTTAAATGAGATA
+AGTTTTTCTGGTTGTCCTGTTTTTCTAGATGGTAGGAGTAAAAAAAAAAA
+TCAGTTATATTATTTTTAATTTTGTCCAAACCTGTCTGTCTGTATTAGGC
+ATATGGGTTTGTGCACATGAGTGCAAGTGCCTGAGAAGACCAGAGGCGTC
+ATAGCCCCTGGGGCAAAGTAAGATGGTTTAAGCCTCTTAACATGTCTGCC
+GAGACTACACTCCAGGTCCCTGGAAGACCTGAACGCATTTTTAAATGCTG
+AGCTAGCTTTCTAGCCCCACTGAGTTGTTTGTTTTGAGAAACCTTGTTTC
+AGACTTTTAAAATAGGCTATCAACTCTGCTTGGTTTTTTTTTGTTGTTGT
+TGTTTTTTTTTTTAATGTACCTTTATTCTGATTTAAGGGAGATAGCCGAA
+TAGTATTTTGTTGCAGTTTAAAAAAATAACTTGAGGACTGGGGAGATGGG
+TGGTGGCCACACAAGCAGAGCTTGAGTTTGATAGCAGCACCTACGTGAAA
+GCAGGTGTGGTGGTAACCATAGTGCTGGAGAAGTGGAGGCAGCTGGCTCC
+ATGGGCCTTGCTGGTCAGCCACTTTGCCTAGCATAGTAGGGAGAGTCCCT
+GTCTCAAAGAAAAAGGTGGCTTTTCGCCAGCATGGTGCTGAAAGTGAAGT
+AGAGAACTCTTGCCCCTCCCAAAACTGAAGTCAAAGTGAAGACAGGGAAA
+GTTTAAAAGGCAGTGTGGAAGGTGCCCTAACATCAGCCACCTTCCCCTTG
+GGCAGTCCACATACCCTCATCAGAACACTCCTAGGAGAAATGAAGCTTGA
+CAGTGCCACCATAATCTAGTCTGTCCCCTAACCAGTCAGTCATGAAGAAG
+CTAGAAGACAAGTATGTTGATTGTGGTTGCCAAGACCAACAAGCACAGGA
+TCAAACAGGCTATAAACAGCTCTGGGACACTGATGTGGTCTGATGGAGAG
+AAGACAGCATGGTTGACTACTTCCTGATTATGATGCTTTGGGTGCTGCAA
+GTAAAATTGAGATTATCTAAACTGAGTCCAGCTGGAAAATTCTAAATACA
+CATTTTTTTAACCATTAAATGCCCCCACTCCAAAACAAAAACAAAAAAAT
+TCCAAAATTTAGCATATTAGATCTTTCATTTCCTTAGAGCAAGTTTTAGT
+TAGCAGTTCTTTTAACAGTCCCTTTTAGAAGGGCAATGTTCCTTTTTTTA
+TTCACTTTTTTTGTGTGTGTGTGGTTATTGTTGTTTGCTTGATTTTGTTT
+TTGTTTTGTTTTTCCTCTATGTCTGTGTTTGTACCATATGCATGTAGTGC
+CCATGGAATCTAGAAAAAGGAAGTTGCAAGCCTTGGAACTGGAGTTATAG
+AGTTGTAAGCTTCTGTAGCTTCTGGGAATTGAACCCTGCTCCTCTGGAAG
+AGCAGCCTATGCTCTTAACCATGGAGTCATCTCTCCAGCCTGTTTGATTG
+ATTGTTAAAGCCACTGTAGACCTTGTGGGTTATTGTGTCTCATCTTCAGA
+CGATCTCTGAAAGAAGGATTATTCTGTTGTTGCAATTAGGACTGTGGAAC
+AGGGCACACAGCTGGTCACTTGGGTGGGAGTTTCAGTGTGCTGTCCTCTC
+TGTATTTAAGCTCACTCATGGAACACTTACTCATGAAGTAGTAGGTTTGT
+ATTTTGATGAAAAACGGTTTATCCAGTCTTACTTGTTTAGTCAAGAATTT
+GTAGAAGACAAATTGCCTCAGCTGCCTCTGAGAGACTGTTTTCATGTTGA
+GCTGAGAGCTACAGGCCAGCACTGTGTCTGCTAAGTGAATGACATCTCTG
+TTGAATGTGCTCTTTTGTTAGGCTCGTGAGTTTTTTGGAGGCAAAAGCAG
+AAAGTTTACACCGAGGGGCTCATCATTATACAGGGGTAAGCAGTTCATTT
+TGTGAGACTGTGGGCCCCTATCTTCTGGAAACATTCTGAGCAGGGTCTCC
+CTGGTGGTATGAATCCTGCTGAAAGCCTGTGTTGTGCTATATCTTCAACA
+CTTTATGTCATGAAACTTGTCAGTATGTGGCATGGATTTGAAATAAGTCA
+CCCTGAGTTCTTCCTGACTAGCTTTCCAAAGTGCCTTCCTTAACTAACTA
+GATATAGCAGCCCATGACTTTAATTCCAGCACTTGGGAGAGAGAGGCAGG
+CAGATCTTGGTGAGTTCAGCACTAGCCAACCTTATCTAATTAGTGAGTTT
+CAAATAGCCAAAGCCACATAATGAGACTGTATTTCAAAAAATCCAAATTC
+CCCCAAATAAAAAACCAAGAAAAACAACATTATAGAAATTAAAATATTTT
+TATTTTCTGTCTCTAAAGTTTCTAAAACTACAAGAACGAGTACTCAATAA
+TGTGGTCATTTATTTGCTTGGAGATGAAGACCCCAGGGTTCGACATGTTG
+CTGCAACATCATTAACAAGGTGTTTTATCAGTATTTATTTCTTTACTCTT
+TGGTTGAAATATATAGTAAGAGAATGGGAGAGGCAAAAGGAATCCCATTT
+AATTATTTTAAAAACTTACTAAAGTGATAATTTGAAAAAACAAATCACTA
+TCCTTTTATTATAAAACTAATGTGCTTTCATTGTCAAATTTGTAGTTTAG
+AGATCAGCACTAAGAATGAAAGAGAGTTCTGCTTCTTTCTACCTCCTGGA
+TGTAATTGCTGTGCACAGCTCTGATGGCTGTCATGGGCATAGGTTGGGTT
+AGCATCTGGGAAAAGGTGTAGTGACGGGTGAATTTCACATACCTCCTGAG
+TAGATCAGTTTTCCTGTCACGGCAGGCTTGTCCCAAAGCTGTTTTACAAG
+TGTGACCAAGGACAAGCTGATCCAGTTGTGGCTGTAGCGAGGGATCAGAG
+CAGTGTCTACCTGAAGCTCCTCATGCATGAGACCCAGCCACCATCACACT
+TTTCTGTCAGCACCATCACCAGGTACGCTGCCCCCAGCACCTTGCTTTGT
+TCATTAACAGGATATTTATCTGAGACACCATGGTTTGCCACAGCCCCCTG
+TGACAGTTTAAGTCCCTTCTCAGTCACATAACGGTGATGTCCTCAGTCTT
+GCAATGAGTTCTTTAAGGTCTTCATCTGATAATTGTATTTTTGCTTGTGA
+ATGTCACATGCCATCTGAAAACCAGTCCATGGAAAAGTTTCACTCTTTTA
+GTGAATTTAGCTAATGAGTGGGTGGGCTGATGCCAAGGGTCAGACGGCTG
+TGTGTAGTCTTGATTCTCAGGCCTATATTGCTAGCACATGGGCAGGGTCT
+GTTGGCCACGTAACCTCTTCTACCTGGCCAGTAGAATGGCAGAGCAAAAG
+ATCACAGTGAATGTACTGGGTTCGTTGGAAATCTAGTTTGTAAGGAAGAT
+TGCAGTAAGAGTAGTAAAACATGCCCTTGGGACACTTGTCTTTTAAATTC
+TTAAGACTGGATATACAGGATACCTTTCTTTTTTTGATGATTTGAAACAA
+CAAAAACACAACTCAACCAAAAGACTTCCAAGCTCACCTGGTAATTTGTT
+TTTTGGTTTTTTTTTTCCCTGAATTGGGAACTCAACTCATCAAATTTTAA
+AGTCATTACTCCTAAATGACTCTCATCTTTTCTCATGGAAAAAACAACTT
+TAAAAAAAAAAAAAATCAACCACCAGCAGCACACGGTAATGAGTGTGTAG
+GGGCATCTAGTGAAGGAGAAGTCAGGCCTTTTTTTTTTTTTTAATGATGA
+AAAACTTATTCCAGTCTTTTTGGAAAGACCTTTTGCATGCTAAGCAAGCG
+CTCTGCTGCTGAGCCAGACCCAGCCTCATTCCCAGCCTCTGGTTTACTCT
+AAAGTCAATGCTGCTTTCCTTTCTTCTGGACCTTAATCATACACGTTTAA
+ATGTCATTTGATCATGAAAAATATATGCTTTTAGAGCCTCTGCATTTTTC
+TTACTAGAATCTATAGAGGCTATAGCTTACTGCCAAGTATAACAGATGTC
+ACCATGGAAAACAATCTCTCAAGAGTTGTTGCCGCAGTTTCTCATGAACT
+CATTACGTCAACAACACGGGCACTCACAGTAAGTTTTCTCGCTTGGGCTT
+AAGATAATTGCCTTGCACGTCTTTTACAATGGGAAGGACCTGTGTGAGGC
+CTCTCTCTCTTGAGGTGCTTTATAAAAAAGTGTAGTTCTTTCAACAGATT
+CCAAAGTAGCACATACTCATTGTAATATTATCCAATCTAAATTGTAAATT
+TATCCAACCTAGAAAGTCATCAAATAAAAGTTCTAAAATGGGCTGGATGA
+TACTGGCTCGTGCCTTTAATCTTAGCACTTGGGAGGCAGACGCAGGTAGA
+AAAAAACCTAATATATAAGAATCCAATAATGGTTATATTCCAATTACAAT
+AAAATGCTAAATAACATATAGTAATATGTTAGACACGTGTTGATGTGTTG
+CTTTGGAGAAACTTAATGATATAGAGACATGTAACCTAAGTTTTAATGTT
+GAATGGCAGAGATAGAATAAAAATGTCTTGGAAATGTAGTTTGATTCTAG
+GTTAAAAAAGTAATATTCATATGTGTACAGACAGTCTTTACAAACTCAGA
+GAAAGGACAAGCAAAAAATTGCTAGGATGATTTTTAGGGTTGTGACTAAC
+TTTCATTTTCTTACATACAGTTTTCAAGTATTCATAAATTTTCTAAAATA
+ATCACATGTAACTTTTAAAATCAGAAATAGAATATTATGCAGAATTATTG
+TGTAATGGTTCCTCAAAAGACCTTTATTTGTACAATGGAAGAAATACTTG
+GACATTGTTGAGAGTCACACAGTTGAGATTTCAGCTGTCTTCTAAATGCC
+TTTCAAATTTAAATTTGTTCAAGATAGGAGATTCATTTCAATTGTGATCT
+GCACATGTCAAAGCATGGGTCTGATCTGGAGGTTTATTTTAATCATTTTG
+AGCTGAGGTCTTGAGGGAGGAAGAGAGAACCTTCTAGGGAGACTTTGAGT
+CTCCCTGGGGAACTAGAGTTCCTAGCAGAAGCTGCTAGCTGCCTATCTGT
+CAGCTGTAACAGTGTTTTACTGACAGTTGTGTCCAGCTCTTTTGTTTAAA
+GTATGCAGTGTCTAGTGGTAGGATGACTACCCTGTAATTGCTTACTGGTC
+TCAGTTTTCATTTCATTTGTTGTGTGTGTGTGTGTGTGTGTGTATGTGTG
+TAGGTTTAATTCAGTAAGCCTTTGGTGGTGTTTGGGTATGATTTTTCCAT
+TCCTTTCTTCTGCAGTTTGGATGCTGTGAAGCCTTGTGTCTTCTCTCAGC
+AGCCTTTCCAGTTTGCACTTGGAGTTTAGGATGGCACTGTGGGTATGTAC
+ATTCCTCAGTGTACAAGTCAGCTACCCACCCAGAGCAAGCACAGCTGCCG
+GGTCTCTCTTTTCTACTCCAACTTCATTCCCCTTTCTCCCTTTCCATCCC
+CTCCTTTTCCTTTTATGTTGGATCTCTTTGCCTCTTTTTTTTTTTTTTTT
+TTTTTTAATATATGCTGGAATCCTTGTTTCTGCCGTGTGAGATGTGTGAA
+ATTGGTGAGAGAACTGTTGGGGATGCTGAAGCAGTTGTGACTCTTATGAA
+TGTCAGGTGCTTGAAAGCATCTTTAGGGGTCTGAGGGTATCATCTGTGGA
+CTCATTAAGGCATTTTCATATACAGGTAAAAACTTTCTCATTAGTTTGAT
+TCTTTCTAAAATTACTAGGTAATTTTAAAGTCAGTTAGCATCCAGAGTTG
+TAATGAGCTATAAGAGTGTGCTCTATTATGTTCCCTTACAAAATAATTTC
+TCCAGTAGATGGCCTACTGCTGTCTGGGTATAATTTAGTTTTCTCAGCAC
+TAATAAGCAAGCCTTCATTTGTGCTGATGACCGAGAAACTTTGGGGATTC
+CAATCATATCTGAAAGGAAATCTAAAGATTTCTTCTGAGTGTAAAATATA
+TGGCCAACTAATGGTCTCATGCTTTGTCATCTAGAATGATTGTTCTCTTC
+TTTGTGAGAATCATGACTCAACAGTTACAGGTTCCAGTCATAGGTCCTGC
+ACTTTTGAGCAACTTTGACATTGGAGTCTGCTAAGTCAGAAAGGGAAGAA
+AATTTGAACTTCATCAGAATAATCATTCTAGCTCTCTCAATGAACTCTAT
+TGTGTGATTTCATTTCCCAAGTGACTTTTGTGAAGTCGTGATTTACAGGA
+AGAGTGTTAGTTGATTAGCAGTTTTAAAATGATAATGGCTCACACTTGTA
+ACCTCAGTACTTGGTAAGCAGAGGTAGGAGGTTTGACACATTTTGCAGCA
+CAGCACTGGCTACATAGTGTGTTCCAGGCCATGGGGACTACATAAATGAG
+AGTGAGACTTAAGTAACCCAAACCACATCAACAAATAAGGTAAAAAGGTA
+AAATGGGTGGTTCTCGGGGGCATTAAAGAGCAAGACTGTATGCTTGGAAC
+TTGATTTTTGACTTTGGAAATGTCTGCTTTCAGAGTGCCCCCACTGAGTG
+CCTCTGATGAGTCCAGGAAGAGCTGCACTGTTGGGATGGCCTCCATGATT
+CTCACCTTGCTTTCATCAGCTTGGTTCCCACTGGATCTCTCAGCCCATCA
+GGATGCCTTGATTTTGGCTGGAAACTTGCTAGCAGGTACTGACAGAGATA
+GACTATTAGACTGAGTTCTGATTTGCTGCTAAGGGGTACTCAAGTTACAA
+GAATACAATTGTTATTTTGGGAGACAGCTGGTTTCCTTAATACAGATTTG
+AGATATTGTCTGTTTATGTGAATGTGTATGAAGGGAGTAGTTATGTGCTG
+AGTATTTTTAGGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTAGGTTACAA
+GATGTTTAATTCAGTGAGCCTTTGGTGGTGGTGTTTGGATGTGATTTTAT
+TTTCTTCCATTGCACTGTGGATAAAACTGTTTTGAGTTTTGTTTTCTGGG
+TGCTGTGTGCCAGGCAGTGGCTTGGTCTATAGCAGGTAGGGTTGTATTCT
+TCAAGGGGTTATAGAGTAGCAGGAGATGCAGACAGACAAGCACAGGAGCA
+GCAGAGCAGATGTACACTTCTTATTTGGTCAGATTTATAGGTTAATAGAA
+AAGGATACAGCAGGTGAGGTATTGCATATGTAAGGATTTGGAGAACTAAA
+AAGTTATTTAATATTGAATATAAGTTACTGTGGATATCACCTGAGGACAG
+GAGTAAGAATGGAGAATACAGCCAGCCAGGTCTGAAACCAGTGAATCTAC
+TTGGGGATAGGGTACCAGGTCTTAGTGAGGAATATTGACTTGTTCTGTTT
+GTCATTGATATGTTTTATGGAGATCTCCTGACTTGAGTTTGGGCTGGAAT
+TTGTAGGGAGGATCAGGTGTTTATGGGAAAAGACTGAGCCAAGAGTGTAC
+TTTAGGTGTAGAGAAAGGGGATAGAAAGCATAATGGAGGCAGCATCCCCA
+GGCCTGATCTCCAGCTGCATCTCGATTGTGAGGATCACACCCATAGCTGG
+AGGAGAGTGGTGACTTTGTGGAAGATTGGGTTTTGGGGGCCAAATAGCAC
+TTTTATGTAAGTTTGAGACATCTGTTTAGATGTAGAGGTTCATCCATATG
+TTAGCACAGATCAGTGAGAAACTTAGGCCACAACCTCTTCCCACCTAACA
+CATTGCGGGTAGAGTAATGTGTGTGCCTATGTCTCAGCGAGTGCCCCCAA
+GTCTCTGAGAAGTTCATGGACCTCTGAAGAAGAAGCCAACTCAGCAGCCA
+CCAGACAGGAGGAAATCTGGCCTGCTCTGGGGGATCGGACTCTAGTGCCC
+TTGGTGGAGCAGCTTTTCTCCCACCTGCTGAAGGTGATCAATATCTGTGC
+TCATGTCTTGGACGATGTGACTCCTGGACCAGCAATCAAGGTAACTGTTC
+CTTGGGGGCAGCCATTATCATCTGCCTAATGACTGGACTTCCTGAACATC
+ATGCTAGTTGTCTGTTTCTTCTCTATTTCTTGCTATAAGTAAAGCTTATT
+CAGACACTGTATTTAACATAAAGATGTAGTTATTTTGTTTTGTTTTGTTT
+TGATGTCAGCCATGAAGCTCAGAAGTGATAATTTCCCTTTAATCCCCAGG
+TTCCTTCACTTGCTTTCTCTTCCCCATCACAGCAAAGTCTTTGAAGAATC
+ATCCTAGTCACAAAGTCCCTCCATCTGCTCAGGCATGGACTTGTGTGTTG
+AGGTCTTTGGCCCTTTCTGGCAGGAAGGACCCTGTTCCCCTTCTTGAGCC
+ATCCATTCTTCCACTCCAGTCTGTCTGGCTTCTGCTGCCCCCTTTCAGAG
+TACTTCCTGCTGCGGCAGTCATGTCAGTTGTATGGATTCTTTGACAGCCT
+TCCTATCCTCTGGGCTACTTTTTACCCCAAGTGATATGGCTTTCTGTGTT
+TTTTTTCCAACATCCTCTACTGTTGGGTTCAGTTTTTCTTGTCCACTGTT
+AAAGGTGCTGTCCTGTGGATCATCTACTTTCTTTTTATTCTTGACTTTCT
+GCTCTAGCGGGAATTTTCCATGTCCATACCAGCCACAACAGTAGTCACTG
+ACCACATGAGGCACTTGTGTTTTTGAAATGTGGCTGTGTGACTTAAACTG
+AACTGACTAAATAGACTATTGAATCAGGCTGCCGATGTGCCTGCATCCTT
+TGGGGGCTGGCTTCAACTCTGTGGAACCTTAGTCCTGTGTGCCTGCTATA
+TTGTAGATGCTCTGAGGCTGTCTCACATGTGTCAGAAGCCTCAGCCAGAG
+CCTGGCCTTCTTATCTTCTTCCCTCCTCTGAACCTGCTTCTCTTCTCACA
+CTTTGTACTTTATCAGGATAGCAACGGGAGCCACCATAGTTTTATTTCAG
+CTCACTATTCTTTATTTTTCCTACTAAATACTTTGTTTAAAATTTATATA
+AAGTGAGGTGGTAGTGGTACATGCCTTTAATTCTAGCACTTGGGAGGTAG
+AAGTAGATCTCTGAGTTTGAGGCCAGCCTGGTCTACAGAGTGAGTTCCAG
+GACATCCAAGGCTACACAGAGAAACCCTGTCTCAGAAAAAAACAAAACAG
+CTTAAGTATACCTACATATATGTGCACAAACATCCCTTTGGTGTTAGTGT
+ACATGTTCATTGATTCATACCTTCTCTTTTCTGGATTCTGGTATAAACTG
+GTGTATCTGCACTGGCCTATTTTTGGAAATCATATTATTGAAGTAATTCA
+TATTCCATAACATTCAAGTATACAATTTAATTTTGTTTGAGTCTAGGCCA
+ACTGACCTGAAAGTTATGTATCCCAGACTGCCCTCTGACACATGGTGATT
+CTCTCAGTTCAACCTCCCAAATACTGGCATTGCAGTGTGAGTCACCACAT
+GGTGGTGGCGCTTGTTATTTTTGTTATTGTGTTGTTGTTGTTTTAAATTA
+TACCACAGAGTTATATGGACATGACTACAGTCAACCCTGTAACTGTAACT
+GACTCCAAAAACCTTCCATGCCCCTGTCTCTGGATAGCCATTACTCTGCT
+TTTTGTCTGTGGGTGTGCTTCTCCCGGACTATTTTGCTGATGGCCTCATA
+CACTTGGTGGATTTGTCACAGGTTCTTTATACCAAAAATGTCTTCAGTTT
+GATAGTTCCTCACCTGCCCCCTGCAGCCTTCCTGCCTCCTTCCATGCACT
+GCCTGACCACAGCCATTCATCCTGATATGCTGCTTGTCTTGTCTGCTTGA
+TGAGGATCAGAGGTGCTTAGTGACTCACGCAGTCTGGGCTACCATCATTA
+CAGAGCCTCAGTGGCTACTTATTCTCCCAGTTCTAAAAGCTGAGTATTCT
+GACCCCTGGTAAGTCTGGCCTTCTCATGGAGAATGGGGAATGGGAACTGC
+TGGAGTCTCTTTTCATAGGAGCAATGATCCTATTTATGAGGGTCCTGCCC
+CCATGACCTCATTCTGAAGCTCACTCCCTTCTGTTGCATGCTCTTGATTT
+CAGCAGATGAGTAGGGCAGAGGCTGAGGCCAGAAAGGGAGTGAGGAGCAC
+AGCAAAGACCTGATGTGCCTGCCTTTTACCATGTTGTGTCTATCCTGTGC
+CACCTTTCTTCTTTGTGTGTAGGAGCCTGATTGTCCCGTGTATCCTTGGG
+GTTGCATATTGGAGAGCATGGGTCACATCTGGGCTGTTGACTTGTTGGGT
+TTTAGCTATCACATTGAGCAAGTGGCTTAATTCAGAACATTGTTTCCTCA
+CTTAAAAATGAAAATGTAAATGTTTCACTGTGCTTACTCATGTGATGTTC
+ACATAAAATGTCCTGATTGCCTTCTCTTAGAGGGGACATGGCCGGGGTGC
+CAGTGACTTTAGGTCCTTTTAAATCCCTGGAGGCCCTCCCTTCTCCAACC
+CATTCTCAGTGCTGGTATGAGGAAGTTCAGAGCCATGCCTGCCATGTGCA
+GTTATGTGGGTGAAAGTAGAATTAATTATCTCTAGTATCCCAGCTCTTTA
+GTAGGAGCTCAAAGGCCATGAGTAGCCTGAGCTAGTACTTTCCAATATGT
+GGTTGAACTGTATATTAGTATTTTTAGGGTTTGGCTCCTGTATTAATTTT
+ATCCCATTATATATTTTTTTATTTTAGGCAGCCTTGCCTTCTCTAACAAA
+CCCCCCTTCTCTAAGTCCTATTCGACGGAAAGGGAAGGAGAAAGAACCTG
+GAGAACAAGCTTCTACTCCAATGAGTCCCAAGAAAGTTGGTGAGGCCAGT
+GCAGGTAGGAAGGCGTTTCTGGAGGGCAGAGGGCTGCCTGTGGTGCAGGA
+GTGTAAGCACCGTAAGTAGGAAAGCTGGTCCTCTGCAGTACAGTCAGGGA
+GGAGTTCTCTGGGTGCTTCAGAAGTGTCTGTGGTAAACTGTCACTTTACA
+GAGTCAAGCCATAGAAAGGAGCCTGCTTTTCTGTGCTTACTCTCCCCTTC
+TTGTAAGGAAGAACTCCGCATTATTATTAAAGAGGCAAAGAAGATGTGTA
+GGAATAGATGTGGATGGTGATAGTCAAAAACAAACACAACAAATGATGAT
+GGTATGGGAAGCGTTCTGATAAGGCAAATGAGGAATCAGTTTTGGGAGAG
+GTAACAAGACAGTGGAAGCCAGAGATGATGTATCTTTAATCTAACCCTCA
+GAAGGCAGAGTGGATTGAGAGTTTGAAGCCACTTTGGTTTATGAAGTGAG
+AACTTACCTAAAACAAAGAGGACAGTGACAGAAAGACACAGTGGAGATAG
+TCATCTGATTTGGAGGAATCTAGAAATCACACAGATGTTTGGAGGGGCTT
+TTGTATTTGGGGAGATATCAGGTCAAAGGGCCAGAGTTTTGTCATAGTCG
+AAGTAACAGAGTCCCATCTATGCCCCTTTGGGATGAGAAGCAGAGTGATG
+CCTCTGTCTTGTGGCTACTTTCTTTCTGTTCATGTTCATCTCACAGCAGG
+CTTTGTAGACAAGTACCCAAGGAGTTGAAGGGATCTGCAATCCTATAGGT
+GGAACAACAATATGAACTAACCAATACCCCCCCAGAGCTCGTGTCTCTAG
+CTGCATATGAGTCAGAAAATGGCCTAGTCGGCCATCAGTGAAAAGAGAGG
+CCCATTAGTCGTGCAAACTTTATATGCCCCAGTACAGGGGAACAACAGAG
+CCAAGAAGTGGGTGGGTGGCGGAGTGGGTGGGGGAGCGTGTGGGGGACTT
+TTGGGATAGCATTGGAAATGTAAATGAAATAAATACCAATAAAAGAAAAG
+AAAAAAAAAAGCTACGGACTTGAAAATGTAACTGATGACCCCTCAAACCA
+AGCCCTCCAAGTGAGAAAGCCTTAGAAATTTAGATAGAGATGAAGAGCAG
+GAGACTGCTCTGCTTGTCACTTGGCTCTTTAGTCTCAGTGTAGTAATGTA
+TACACCCTTTACAGTTTTCAAAGCATGTCCACAGCACTTAATGAATATCC
+CAGAACTCGTTTGTCTCAAGGTCAGCCCCATGTACCTCAGGAGCCTTACT
+TAGACCAAGGGGACCTGTGTTTGGCTTCAGCCCTTCGGTCAGCAGAGGAC
+AGCTGAGGCCACAGATGACTCCATCAGCTGATGCCCTTCTGCCCACCATT
+CCTTTCCATTCTTTTACTCTCAGGCTCTTAGGATTTTACTTTATTCCTTA
+CTACCTCCCAGAGCACATACTTCAGAAGTCAGGGGAAAGACTTGAATGTT
+TCTGCTAGACCATTCTGAAAAGTTATCATTGCAGTAATTGCTGTTAAACT
+TAGGAAGTTTTTTCCTTAGATTTAAAAGTTTAATTTTTGGGTTTTGGTGT
+TTTTGTTTGTTTTTTTCTAAGCCTCTCGACAATCAGACACCTCAGGACCT
+GTCACAGCAAGTAAATCATCCTCACTGGGGAGTTTCTACCATCTCCCCTC
+CTACCTCAAACTGCATGATGTCCTGAAAGCCACTCACGCCAACTATAAGG
+TACTGCTCCTTGCTTATTTCTGAACATGTATTCCAGCGGGATGCATGTCC
+ATGTACCCATGTTCATGCTTACATAGAAAGAAGCCCTGTAACATAGGATT
+TAAATTTTGCTTCTCATTTGTCTTCTAGGTTTATGGAAGCTTTACAAATA
+TTTAGGAATTTTGTATGAGAACAAAGATCTGTGGTCTCTGGGTGTTGTTT
+GTTTATTGTCTCCCCAAGAATACTTGGTTTAGAAAGATATGCCTCAAGGA
+CTCCTGGGGCTCAGTACAGAGTCCTGTTTATAGGTGAGGCTCATTGCCAT
+GTCCTGAAGACACACAGCCAAGCCAAGCAGGTTTCATGTGCCTTCTCCCT
+CTATCAGCCTTAAAAGATAGTGCAACTGCCCAACTCAAACCAAAATTGTA
+GCTTCCGGAGAGAAAAGCAGATGCTCACTATAGACCTATAACCTTGGTGT
+TCTTGTCAGCTAGGGTTTTCTGTCAGTGTAGGGACTCTTTTGCCAGATGG
+TCCTGATTCAGCTTTGTATTGGCTATTGGATGATTGTGTCTGGGTTTACA
+TTCTTTTTTTTTCCCCATGATTATCCCAGCATTAAAGAATGATTTGTAGA
+TTTACCCTTATTGATTTGAAATCCTATTTTTATCAAATGTTAAGATCTCA
+TGATTGTATAGTTTCATTTATGTTTTTGTTACATCAGTTGTTCACTCATT
+TAATAGCACATTTTAATTTTTTTATTTAATTAATGAATTATTTTTGAGAC
+TGGGGCCCAAGCTTGACTAGAACTGGCCTCAAACATCAGTGTCTCAAACT
+CACAGTGACCCACCTGCCTCTGCCTCCCAAGTGCTGGGATTAAAGGCATG
+AGACACTATGCCCTGTAGAAGCACATTTTAAAATTATTGTGGTTTCCTGA
+TATAATCCCCATTTTTCTTTAGAATTTTCTGGATATTACCTATTTTTACA
+TGTGAAAATTTGAATAGACTGTCTAATTCTAAAAACCATGGAAATTCTTT
+CCCTTTAGGATCGTGTCTGCTGTGTGTCTTCCCTCAGACAGAGCTAATCT
+AAGGGAATGGTAGACTGAGATGGATGTTTTGATCACAGGATACCTTTCTG
+TTTGTTAAAGCTCCTTTCCTTACCTCAGAAATCTGAGAGTTAAAAGAATT
+TTTAAAAGAACTTAATAGATTATTCACATAGCCCTGGTATTTCTTACTGT
+TTTCCAGGTGTGCAGCTGCTTCATTGAGAACTAGTGATAATGTTCAGAAC
+TTCTGTTCATGAACTCTGTCAGCCCATGTAGAGGAGGGCACATGTGGTGC
+TTGCTGTTTGTGCTTTCGTCCTCTGTCATTAGGAGCCCTTTCCCATGCTT
+CCCACCTTGATCCTCTTTTTGTACTGAAACCTCAAGCATTTGCTTTTAGT
+TTTGGCCTCCCCAATATCTCAGATTTCTGTTCCTTTGCTCTCTGGCCCTG
+CTTCTGTAACTAACTCTTCTGTAACTTTTAAGTGTTTTGAGTAGCTGTTC
+TGCCGTAGTGTGTCTTCTTACCTTGTCTTTCCTTCACTCTTAACTCTGAG
+TCCCTTTCACTACCCACTCCTCTCCTGCTTCCTCTTGTTCTATTTCTCCT
+CTTCCCCTTCTATTTCGCCTTCCTTGTTGTCTGTGTCTTAGTTAGGGTTT
+TACTGCAGGCACCATGACCAAGGCAACTCTTACAAAGGAAAGCATTTAAT
+TGGGACTGGCTTACAGGGTCAGAAGTTCAGTCCATATCATCAAGGCATAA
+GCATGGCAGCATCTAGGCAGATGTAGGGCTAGAGGAACTGAGAGTTGTAC
+ATCTTCATCCAAAGGCAGACAGGAGAAGACTGGCTTCCAGTCAGTTAGGA
+CAAGGGTCTCAATGCCCACCCCTACAGTGATCCACTTTCTCCAACAAGGC
+CATACCTCCTAATAGTGCCACTCCCTGGGCCAAGCATATTCAAACCACCA
+CAATTGGGAAAGAAGCATGATGGGAGTGAAGCCATTCCTCACTAGGCCTT
+TGATCCCACATATAACATCACTGCATTGTTCTGGGCTGTAGGATCTCAGG
+TCTCTCTCACTGCTGGTGTCCCTTCCTGTCCCTGACATACTGAGGCTGAT
+TCCAGTGCACCCCAGACCTAGACCTGTTAGATTATATAAAAGCAGTGTAG
+AGAGACAGAGGATGGAAAGAAATAGGCAGAACCATGTTCATGGCAGAGGA
+GTATTTTGTCACCTACAAGTGATGCCTAAAATGTCTGTAGAAGATAGGAT
+GCTGCCCCTTATAGGCCAGAAATAACTCTCATAGTCTGCAAGGGAAGCTG
+GGAAGTATAGTCTTTAAGTGGCAAACCATCATTCCTAACTCTTAAGATGA
+TATAAAAAATTCAGAGCAGGGGAGTTATGTATAGCTGTTTAATGCTGAGG
+AGGGTTTTTGCTGGAAGCAGTAGTTTTGGGTCTAATGAAGAGGGGTTCTC
+TGGTCATGTATCAGGGAGAGTCTTAACTTGATTAGCTGAGGTGGATCGGG
+CTGTTGTCTTATATTTCTGAAGAGACTTGGAAAATAATTCTCAGAGGGCA
+GTAAGCAGATGACATGGTTACAGAGTAGAAGTAAAAGATTTCCAGGAAGG
+TTGAGGAGTAGGGAGAAGCAGAGGTAGAGTGGACTGGTAAGAGACAGTGA
+GGATAGAATGGATTCAGGGTGGCAGTGACTGCTTCCATTAGCATAAGTAG
+CAGATAACTAGAGTAGCAAAGGAAGGAATAAGAAACAATAAAAAGACTCT
+GATTAAAGAGTTGTACCCAGCCGGCCATGGTGGCGCATGCCTAAATCCCA
+GCACTTGGGAGGCAGAGGCAGGAGGATTTCTGAGTTCAAGGCCAGTTTGG
+TCTACAAAGTGAGTTCCAGGACAGCCAGGGCTACACAGAGAAACCCTGTC
+TTGAAAAAAGAAAGAAAAAAAAAAAAAGAGAAAGAGTTGTACCTGGCCTA
+AAGTGAAAGGGACGATGTTGCTTTTAGTCAGCTTGCCTCAGTATCTAACA
+GATCTTTTTATAGATATATCAGGAGCAAATGTAGCTGTTGTAGGATGAGG
+GCTATTGGGAAAGCTTACGTCAAATATTTAGTTTAGGTGTAAGGTAAAGT
+TCAGTAAGCAAAACATTTAAAAGACAGGTCAAAACATAAATATAAGTATA
+ATAAGGAGAAAAAGAGTGATCTAGGGGAGGAAGCACGGTGGGAGGGAACC
+AGAAGCCCACACGTAGGAATAGGCTTGTAGTGGACGCTTTTTCAAAGAAG
+TTAAGATTTACTGGCCTGGAAACAGCACGTTCATAGAAGAGAACACAAAT
+CTCATCCCAGTGTTAGACCTAGTCCTTTTAAATTTTGAAGTACTGCAGCA
+GTCAAGGAGTCAAGCTGGCCCTAAAGGATATTTAAGCTGATCAGAGCTGA
+TGAGACAGAGGGGTTAATAGATGCAAATGTCTAGAGTGGTAAGAAGAAAG
+ATGAGTTAGATGGCCGGTGCCCATTAGTTCCCTCTCAGAGGTGACCCAGA
+TTGCTTTTATGGGCTTAAGGGGGTGGATCTTTATAACTACCTCAAGGCTG
+ACCTCTGAGGGGGCAGCAGTTAGAGTGTCTCAGATGAGGGTCCATCCAAG
+GGGCCACAATAAAACCTAACAGATGACAAAGATGTAGGGCATAAGTCAGA
+AGGTAATGACTGTGCAACCTTGTGAGGAAATTAAGAGACAAAAGTTAAAT
+GAATAAAACTTTGCCATCAAGATTAACATGAATCAAAATAAGGTTAAATT
+ATAAATTTAACATAAGGTTAAATGCAATCATACAACTGGCTGGACCATGT
+ACCAGAGCTATGTAGTAGGATAAAGTAACTTTTGTTAGCCGTTGGCTTAT
+ATCAGACTCCTGAATGAGGAGAGAGCACTCAGGACCTTTGGAAAAGATGC
+CAGTTAAAGTCAGGGTCCAAGGGAAGCATCCGAACAGAGCGTCTCCGTGA
+TAGTCGCTGCTCCCAAGTACGAGGAATTTTGTATGTTGTCTCAAATATTA
+ATAATAAGGCTATTATTTATTGTTGCTTAGTTCCTTGAGTTTGTAAGCTT
+TCTATTGTTTTGTTGTTGATATCCATCTTTAATCCATGGTAGTTTGATGG
+GATACAAGAAGTTATTTCATTTTTCTTGTATATGTGGAAGACTTGCTTTG
+TATCCAAGTACGTGTCCATTTAGAGAACGGGCTTGAGAAGAAGGTGTGTT
+CTTTCGTGTTTGGATGAAATGTTCTGTAAGCATCTGTTAGGTCCACTTGA
+TTTCTAATATCTTAGCTCCAACATTTCTCTGTTTAGTTTTTGTCTGGATG
+TCCTGTCCGTTTGTGAGAGTGGGGTATTGAAGTTTTCCCTATAAGTGTGT
+GAGGGTCAATGTGTGATTTAAGTTTCAGTAATGTTCCTTTCACAAAACTG
+AGTGCCCTTATGTTTGGGGCATATGTTGGACATAGTCCTGAGTTCAATTC
+CCAGCAACCACATGGTGGCTCACAACCATCTGTAATGGGATGCCCTCTTC
+TAGTGTGTCTGAAGACAGCTATAGTGTACTCACATACATAAAATAAATAA
+ATAATTCTTTTAAAAAAAAACTGAAATGTCATCTTGGTGATTTTTTTTCT
+TTGATGAGTATGTAGTGTACTTCCCATCTCTGTGATTAGTTTTGGTTTGA
+AGTCTATCTTGTTAAATAGTAAAATGGCTATACCAGATCATTTACATATA
+GATCATTTATAGATTGCTTCATAGATCCATTTATTTGGAAATTTGTTTCC
+ATTATTGGGGAATTGAGACCATTGAGAGATATCAATAACCCTTGATTGTT
+GATTCATGCATGTTATTTCTTGTTATTATGGTGGTGGTGGTGGTGGTGGT
+GGTGGTGGTGGTGGTGTGCATCCATTTTGTTTTTGCTGGTGTGAGATTGT
+TTATTTCCTGTGTTTTCATGGGTGTAGTTAATGTCCTTGGATTTTTCTTC
+TAGCATCTTCTTCAGTGCTGGATTTGTAGATTCATACTCCTTAAATTTGG
+TTTTATCGGAGAACTTCTTATTTTTTCCATCTATGGTAATTGAAACGTTT
+GCTGGGTATGGTAGTCTGGGCTGGCATCTGTGGTCTCTTAGAGACTGCAG
+CACTTTTGTTCAGGCTCTTCTGGGTTTTAGGGTTTCCATTGAGAAGTCGA
+GTATAATTCTGATAGGTCGGCTTTTGTAGGTTACTTGACCTTTTTCCCTT
+GCAGCTTTTAATATTCTTTCTTTGTTCTGTATGTCTAGTGTTTTGATTAT
+TATGTGGCCAGAGGACTTTCTTTTCTTTCTTTTCAGGACCAATTTATTTA
+GTGATTTGTATGCTGCTTGTACCTTTATAGGCATCTCTTTCTTGAGGTTA
+GAATTTTTTTTTCTTCTATGATTTTGTTGAAAATATTTTCTAGGCCTTGG
+AGCTGGGTTTCTTCTCCCTCCTCTATTCTTACTATCCTTAAATTTGGTCT
+TTTCATAGCATCCCAGATTTCCTGGATGATTTATATCAGGAAATTTTTAA
+ACTTAACATTTTCTTTGACTGATGTACCATTTCTTCTGACATCTTCAATG
+TCTGAGCTTCTCTCTTCCATTTCTCATATTCTATTGGTGAACCTTGCCAT
+GTAGTTTCTCTTTGAGTTCCTAAATTTTTCATTTCTAGAATTCCCTGGTT
+TTTTTTTTTTTTTTTTTTTTTGCTTCTATTTCCATTTTCAGGTCCTGAAT
+AGTTTTATTCATTTCCTTCAACTGTTTTTTTTATTGTTATTGTTTTCCAT
+TTTCTTGACTTTCTTTTAAGATATTGTTTTCATTTTTTCCAATTGTTTGT
+GGTTTTCTGGCATTATTTAAGGGACTTACTTGTTTTCTCTTTAAGGATCT
+CTGTCAACTTCATGTAGTTGGTTTTAAGATCTTTTTCTTGAGCTTCAGCT
+GTGTTGGAATATTCAGGGCCTATGGTGGTAGGACAGGTGAGCTCTAGTGG
+AGATTTATTGTTCTGGCTGTTATTGATTGTGTTTCTAACACAGGCTTCTA
+GGTGTCTGGGTTTGGTGTGATTATAGGTCTAGGTGCTGACTTCTGTGTTT
+GTCTTTGTTGGTTGGGTGCTTTGTTGCTTGTTTCTCTGGTGTGTTCAGCT
+GGTGTGTTCCCAGAGTATGCCTGATGTTGTTGGAAGCTGGGATGTAGTGA
+AGAGTAGCAGAAGGAGGTCAGGAGGTGATGGTCCATGGGATGCATGCCCT
+ATGGCAGCAGTGGGGAAGGAGGACTGCAGCAGTGCTAGGGAGGAGACGGA
+GGTTTGTGGCACCCCACCTGGTTTTCTGACAAGCATGACCTAGGTGAGCA
+GGAATGTTGCCCAAGTTAGGGGCTGGGATTCAACAATGAATTGAGGAAGG
+GAAGCCAGGAGAAGATGGTCTATAGGAGCCATGGATAGGGGCAAGAAAGA
+CTGCAGCTGGTGTTGGCTGCAGTGCTTCAGAGGAGACTGAAGAGTTGGCT
+CTAGGTGAACAGAGAGTTCTAAGGAATTTCTTGAACCTGGGTGTGTCCTG
+CCAGTGTGTTATAAATTTAATTCATTTTACTTAAGCATTCTCTTTTCATT
+ATTAATTTTATATACTTGATTTTGAATATTGTTCTACATTAATTTAAACA
+CTGTAAGTTTACTTTAACCTTGTTTTGAGTTACCTGTTTCAGGCTTAATT
+TTGACAATATATACAGAATATTAGGTGATAGAAGATAGAAGTAACTCAAG
+CTCGGTTGTGATGGCACACATCTTTAATCCCAGCACTTAGGGGGCAGAAG
+TAGGCAGATCTGAGTTCATACAGCCTGGTCTACAGTGTGAGTTCCAGAAT
+AGTTAGGGCTTCATAGAGAAACCCTGTCTATAAAAAATAAAATGTAACAA
+AACAAAAGTTGGTTTTTCCCTCCAACTTTATATTCCTGTAAATGAATTAA
+TTTTTACTTAGCATGACTACAAACATAGTTCTGAGCACATTGAGTAATTT
+GTTCATTTATAAGATAACCGAGCATTAACTTGTATATCTTAGTTAATTAA
+AATAATTTACAATATATAAGATTAGGGTTTTATAATTTTATATCTGATAT
+GTTTAAACTTACATGTAAAAAATTACACATACACACATATATTTTTTAAT
+ATCAAGTAAACTTTAAACATAAATACAGTCCAGAGAGACTGGCCTCTTAT
+AGTGTACTTTTATATCAGCTTTTTATATGATTTAGTTTCTCCAAATAGCT
+AAAGCTTAACAAAGATAGCAAAAATATCACAGGTTTTTTTGGATGACCCA
+GTTTTAAGACAAACCATCTTGGAAGCCTGGTGTTGCCTTGTTAGTCCAAA
+AAAAATAGGATACAGTGGTAAGCAGAGGGATATGACATACTTAGCTGAGG
+TCACATGTCATACTTTAACCCTGATGAAGTCACCAACCAAAATGTTGGGA
+AGGTGAGCCCTTCTGCATTTGCCTTCTGCATTTCTAAGCTAGAGTTGAAT
+CCAATTTACATACATGGTATGCTATAGCACATTAAGGTTAGCTGAACATA
+GACTTTTACCTATTAACCCTTTTTTGTTACAAATTTTAAGTTAACTTTTG
+TTTGGAATTTTAAACCATACTTAATGAACTTATAAATCCTGAGATGCAGA
+ACTTTACACAGTGCTCTTATAAAGCCTGAGATGAAAGAAGGTCTATACTT
+TAGTAAAGTTTTAGAGCTCAGATTTCCCATTGGCACCATATGTTAAATTG
+TTAAAGGAATTGTAGAGAGATGGATGGATGGATGGATGGATGGAAGGGAG
+GGAGGGAGGAAGGAAGAAAGGAAAGAAGAAAGGAAGGGAAGGGAAGGAAG
+GAAAGGAGGGAGGGAAGCTTGGTTTAACCTTAGGTGACCACAGTAAGGGG
+AATATTTTTGTCACCCACATGTGGGGATGGCCAAAATGCCTAGAGAAGAT
+GGGATACCGCCCCATACTAGGGCATAAGTGACTTCCATAGTCCTCAAGGG
+TAGCTGGAAAGTGTAGTCATTCAACAGGAAACTATCAAGACCCATTCTCA
+TTTTCTGTTCAGTATCTTTTCCCTGCATTTGTCCAGTATTCTCTTCATCA
+TAGGAGGTTAGGTTCAAGTTCAGTGGACAGATTCTCTTGTAAAGTTTTTT
+GAACTGGTTCTTTGCATGATCAGCCCCTCCCCTGCCATCAGAATCTGTAG
+AAATAGGGGAATTGTTATAGACTTCAGTTAAGCCAAAGCTTTTAGAATCT
+TCATCATCCTAAGATTAATTTGACTACATCTAGAATTGACAGTGACCACC
+TCCCACCCCCACCCCACCCCCGGGATGGTGAGAGTCTAGGTCAGCATGAA
+GAAGCACCTCCCCGCAGCAGACGGCATTTGTGTCTTTGTTGTAGATAACC
+AGACTTCGGTGGTGCCAGTAACCGTGTGCTCTCTCCTTCCACCTTGCCAA
+GGTCACCTTAGATCTTCAGAACAGCACTGAAAAGTTTGGGGGGTTCCTGC
+GCTCTGCCTTGGACGTCCTTTCTCAGATTCTAGAGCTGGCGACACTGCAG
+GACATTGGAAAGGTTTGTGTGTGGTCTCTTTTCCTTGAACCTGGGTCAGA
+GTACCTCAGATGATACCTAGTCACATGGTGTAGGCAGGGGAGACTGCATC
+CTATTTGTGTCCTAGTACTACAGGATGCCAGGGCACCTGTGTTAGGTCTG
+TTACCAGTGTGTCAGGTCTTATTGCCACAGTTTTTCATTCAGTCTAGAAC
+ATGTTGAAAATTTGCTTACAGAATCCTTCTTCTCTCCCTTGAGCTTTTAA
+ATGGAAAGAGACAAAACCAGATTAACAAAGGTAACTGACCTACTCCTTCC
+ATAGCCCAGAAAGCAGATCTAAGCTCATTCATGTTCTGTGGTTCTGAGTA
+GAATAAATCTTCCTCCCAGCCCATATGCTCACACTTAATCCTGCCATTGC
+TAACAATTTTGCTTGCACTCCACTAGAACATCTCGTTTCTAATATACTCC
+ACAGTGAGTTAGACAGCCATCTTCAACTTACATCTTCAAGTGAAATATAG
+TCCAGGGCCTGCCAACCACTGATGTCAAAATCCATTCATGCTAACGCTCC
+TTATATGAAATGGTATCAAATTTTGAAAGCCTACCCACATCTGCTTATAA
+GACTGAGTCATCTTGAAATGATTTACAGCCTCAAGCATTTGTAACAGTTC
+CGTGTAGTTGTTATTCTGTATTGTTTAGGGAATAATGGTAAGGAAAGACA
+CCTGCACAAATCCAGTACAGATTGAATCCTGCTTTTCTTCCTCATTTGTT
+TGTTTGTTTGACCCATACTATCAGGCAACACACTATTTTACATTTTCGTC
+CCTTCATTGCATGCAGCAGTGCCTGAACTATTAACATTCATCAAACATTT
+GGTCAGTAAACAGATACTCTTTTCAAGAATCAGTTTGAAAGTTGGGTATA
+GTGTTCACATCTGGTAAGCTAGGTATAATGGCACATACCCCTCCGAGGCT
+GAACTGGGGGAGCTGTGGTAGGAGCTAAAAGACAATCCATATCAAAAGCA
+AATGAACAGGGAAAATTGATGTGAGTGTCTCATAGACCTTAGAGCCGTGC
+TCCATGTATGATTAGTCTGTGTCATCACATGACATTGATAAAATGCTTTC
+CTTTTCCCACTGTCCAGATCTTGTTATTCCATACTAATTTTTATACATAA
+TTGAATGTATTATGTGCGAGTTTTGCTTAGATTATGTATAATACACATTT
+CCCCACATATCTCCTTACTGTTCTGTATTGTTTTCCTATGACTTCTCCCA
+AGACTTCATACTATTTTTGAGTGATTTTACTATGATTTAGCTTGATGTGG
+TTTTCCTTTCAGTTGTGTTTCTTGTACTCAGGTTTGTTGCGTTTCTTGGT
+TTTGGGCAAGTATAGTTTTCTACAGTTTAGAAAGGTTGGGGCTACTTCCT
+TATATCTCTCCAACCTTTTCCCCTCACTCTATCTCTTAATTACCTTCTAA
+AAACCCATTCTTGGTTACTTAAAATTGCTCTTAGTTTACTGTTGCTCTTT
+TTAAGGATTTGGGTTTGATTATTCTAGATTCATTTTTACCATTTTTAATG
+TGTTTGTGTGTATGTATGTGCGTGTGAGTGGAGGTGTTGGCAGAATTCAG
+AAGAGGGCACCAGAGCTGCTGAAGTTTTAGTTGTTGGGAGTTGTGAGCCA
+ATTAATGTAGGTGCTGGGAACCCAACTCAGGTTCTCTGCAAGAAAGCATC
+GGCTCTTAACTGTCTTTCCAGCCCCTCCTTTTAGGTCTGTTTGTTTTTAA
+TCACTATCTACTTTTCCTTTTGTAGTTTCTGTTACAGTCTTTAGGTTCTC
+TAAGTTTTCCTTTCCCCACATCTGCTCTACTATAAAATCCCATCTGTTAT
+ATCTTTTATCATTAACATTATAGCCTTTCTCACTAGAAGTTTGATTATTT
+CTTTTCAAATCTCCTATCTTTATTTAACTTAAATTATTTGTTTATTATTT
+ATTTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGT
+ATACACACGTGTTTGTTACGTGGGCACCTGTCCACATGGAGGCCAGAAAA
+GTGTTAGAGTCCTCCACTACAACTCTCAACCTATTTCTTTAAGGCAGAGT
+CTTTCCCTAAATTAGACACTCCCATTTTTTCTGCTGTTCTGGAAGCCAGC
+AGGTTCAGCCTTGTGTGAGTGCCTACTGCTCCTTCAAGTTCTTCTGATGA
+TTGCTTCTCAGGCCTTGGTTTTGTTAGCTCCCTTAGTATCTTTGAAGTAC
+TCAAGGAACATCCTCCCCCAATCTCCAGGGGTCTCTGAATGACTTGTTTT
+CTTGGTGCCATATCATTTGAGCTGCCATTTTCTTGGGGTCTTTGGATTCA
+CTGTGCCCTCACAGCTTGCCTTCTGTCCCCTTGTTGTGCCACAGCCTGGG
+AATGCTCTCAGATCCTTATCTAGGGCCATCCTAGGACTTTGTCTGTATTT
+TATTTTCAGGATCACTGATATCTATGGCCCGATGTTCAGTGTTTGAAAAC
+TATGATATCAAAGCTGGAATCTTTAAAATACATTTTTTGATCATTTCAGG
+CAGGAGAGGGTAAATGCAGAGATGGAAGTCTCTCAATATTACATTGTATA
+GTGATGCCTGTGGTAGTGTCCTGATTCCCTGTAACCCTGGAGCTCCTGGA
+AGGACAGGAATTACATTCTTTATCTGTATTTGCAGCACCTGGTATCATTT
+CTACAGCACACAAAGTGTTCTATACAGACTGTGGTGAGGCTCATTGGCAG
+TGGCTCTCACAGTGTTCTGTTCAGCCGATCATATTTAGGACTCTACAAGT
+ACAGGGCTTTTGACTCCTTTGCACACTCCATAATGCCTGCCCTGAAAACA
+GGTATTAGATATAGATACTTGATTATTTACTGAATGATTGGGTGACAGAT
+TGGTTGTGTAAATGTACAAGAAAGCAGTTTTTACACAATTGTAGATACAG
+GAGTTCTTGTGATTGTTAAGATGATTAATTGAAAGCATTACTCAGAATTT
+CTCTGACATTTATATAGTTTATATGGTCACTGTCTATTTCTGAGTGGCAA
+CAATAATGGGTCACCTATGAGTCACTGATGTTGTCCTCTTCCTTTCACAG
+TGTGTTGAAGAGGTCCTTGGATACCTGAAATCCTGCTTTAGTCGAGAACC
+AATGATGGCAACTGTCTGTGTGCAGCAGGTGTGTGTCATTTCCTATCTTT
+TGGGATTTGAGCTAACCCCTTTGGGCCAAGTGACAAAGCCTAGTTCCTTT
+TTGGAAGCTGGTGCTGTGGTGTGGTATGAGGAAGAGTTTGGTGCCCCTAG
+CTATCACTCCCGCGTGCTCTGTGATTGGGACTGTGGGGGGAGAGGGCATC
+AAGAAGCAGCTCTGGCCCTCACTGCTTCGCCTTTGCCACCGCACCCCAGT
+GGGCTTGTTTCTCTTAGAACCACAGAGAATCTCAGGCACAGGGGACCACA
+TGGTGCGATGGATCTTCTGGGATGCTAACTCCTAGGGATTCACAACAAAG
+CCTTCCATCATACGTTCCTGCTGGATTTGCTACTGGGAAAGGATTGTCCC
+GGCGGATGTTTACTTTTGCTTTTGTTTTTTTCCCTTTTATTTTTTCATTA
+GTGGTACTAATGGTTGAACCTGGCTTATGCATTCCAGGAAAGCACTCTAT
+CACTGAGACTTTTAAATTTTTGAAATATGATCTCTGTAGCAGTTCCCAGT
+TAATGCTACTCAGTCGATCTTGGTGGCTGTGGTGGTGCTCCTCCTTGCTA
+CCCACCCAATATTTTGGGTTCCTGAATGAGAGACACATACATGCAGCCTT
+TATATTTTTATATGTCTTAAACACCTCAAGAACTGAACCACTTCCTAACC
+ACCATGTGGCTAACCCACCCTTTGATATCCCCGAGTTATTGCTTACTACA
+TCTATATTTTATCTTTGCTGCCCTGGACCCAGATGTGTAGTCCTCTTGGA
+CCACAATCCCCTGATTCCTACATGGTGGCTATGTTCTCTGTCAGGCATGG
+CATCTTGGTTCTTCCTCTCCCAACATAGTGGATCTCTCTTTCCTTTCTCT
+CCCTGTCCCCAGCCCTGGAATCCTAAAAGTCCCACCTCTGTATGCCCTGC
+CCAGCCATTGGCTCTCAGCATCTTTATTGACCAGCCAGAACCAACTGTGG
+GGAGGGTCTCTTGGTGTCTTATGTGTGAGGACACTGCAAACAGGTTTTTA
+ACATGATTAGCATACAAGCATGCATTAGACCAAACCCACAACATTTCCCC
+CTTTTTGTCCATTAAAAAGGTCTTTTCTCTCAGATATATATTGAACATAA
+TTATAACAGTTATGTAAAATATAAGGTATGATATATATTAGTGTCCAGTC
+ATTCAATTTTGTCAGTTTAAATAAATTATTCTATCATCTATCGTAACCTA
+AGTTGCCCAGGCTTTCCTGGAACTTGCAGATTGTCTATTTCCATCCTGAC
+CACTGTCTATCCTAACCGCTGTCCACTAGCTGGCATTGCAGACAGGCCTG
+TGCTGCTCATCTACATTAAATTTATATTAAGTGAAGTACTGATGTTTCCA
+TTCTTTCATTCTAGCTATTGAAGACTCTCTTTGGGACAAACTTAGCCTCA
+CAGTTTGATGGCTTATCTTCCAACCCCAGCAAGTCTCAGTGCCGAGCTCA
+GCGCCTTGGCTCTTCAAGTGTGAGGCCCGGCTTATATCACTACTGCTTCA
+TGGCACCATACACGCACTTCACACAGGCCTTGGCTGACGCAAGCCTGAGG
+AACATGGTGCAGGCGGAGCAGGAGCGTGATGCCTCGGGGTAATATTTATG
+GTGCAAGTTTGTCACTGATGATGCAGACACGAGTTATGTGCAGCCCTTCC
+TTGAAAGTCATTGGCTGAGTTATGGTGTAGTCAGCTATCCAGAGGTGACA
+GGCTAGAGAGGAGGAACCCAAGCTGCCTGTGAAGCATAGGTTCTAAGTTC
+CGAATCTTGCCCTAAACTAGAAGTGGCACACTCACTGTCCTCACCTGTTG
+AGAGTGTTTGGTACTTGCAGTGTTTATAGTAAGGTGGCTTGTGTCAGTGG
+TGCTTCTCTATGCTGTGACTCAGAGGACCACCTTCAAAATGGCACACAGT
+GGTGCTCGTGTTTAGAAGTATTGAAGGGCATAAAATAGTCTGGGGTGTGT
+TGTTTTTTTATTTCTATATGAATTTATGTTAATCTTTTTCAGTAGCTGAA
+ATTTGGAAGTCCTTCCGTCCCTTTCTCCTTCCCTTCAGGGCTTTATATGG
+TAGGCGTGTACTTTACCACTGAGCTACATTATAGCCCTAGAAGGTGTTTT
+GTAAACTTCTTTACCATAATTATCAAATACTTGAGATGTTACTCAGTGTA
+GTAATGTTAGCAGCATTCTTACTTTCCTTTTTTGGATGACCAGACAATAT
+TGGAATCAAGGAAAATGCTCCTTTTCTTGGATTTATCTTGGTAGTGTGCT
+TTTATGTGTGTGCCATAATACCTGCTTTGCTGTCTTTTCAAACATTTTAT
+GAGATGGTTCTCTCTCTACATTGCCTAGGCTGATGTTGAACTTGTGACCT
+CCATCAGTCTTCCCCCAAGAAAACCGAATGATATCACCTATCTTTGAGTT
+GATTTTCACCTCTCTCAAGCCCTCTTAGAGAGTGTGCTGGGTAGTGAGTT
+TTCTGTAGCATTGCACACTCAATTGAATCCTCTTGTCTCTAGCACATGCT
+ACTCCTAAACCCAATGGCCTTTCTAACTCTCATTTTGAAATGATCTGATT
+TTTTTGAACATGAAGTTGAATTGATGTATGGCTGAGTAGTACAGGGGAGA
+TGATTAAAGATATTTGTTTTCTGCTTTGGGCCATTTGGCAGGTGGTTTGA
+TGTACTCCAGAAAGTGTCTGCCCAATTGAAGACGAACCTAACAAGCGTCA
+CAAAGAACCGTGCAGATAAGGTGAATGGCACTGCAGCTAGAGATGACATG
+CGGATATCACTGGGGTGGAAACAGAGCTCAGACTTTTCTAGATTAGTTGC
+CAGAAGATTCTAATTGCAACTGTGGTTTCTTTCACTTTTTCCTATAGAAT
+GCTATTCATAATCACATTAGGTTATTTGAGCCTCTTGTTATAAAAGCATT
+GAAGCAGTACACCACGACAACATCTGTACAATTGCAGAAGCAGGTTTTGG
+ATTTGCTGGCACAGCTGGTTCAGCTACGGGTCAATTACTGTCTACTGGAT
+TCAGACCAGGTTTGTCTCTCGGCCTTGTAGTCACTATACTTTCTCCTAAC
+TGAATACAAATTACCCTGAAAAGACACCACCCAAGACTGGCCCGTACTCC
+AGTGGGGTTAGGCTTTAGAAATTTCCACAAGTTTTCTACATCTGTATACC
+CACTCATAACTTTATAATAAGCTGTCTTTAACTTGTAAGATAGAATTTTA
+GATTTATTCTGGTGGGCCAGGTCATTTATGCAAAATTCAGATTTCTGTAA
+AACAGGTGATCATCAGCCAAACAGTGGTGGCGCACGCCTTTATTCCCAGA
+ACTCGGGAGGCAGAGGCAGGCAGATCTCTGAGTTCAAGCCTAGCCTGGTC
+TACAGAATGAGTTCCAGGATACCCAGGGCTACACAGAGAAATCCTGTCTC
+AATAAACTAAAAATAAATAAAACAGAAACAAGTGATGGCTTTGCTCAGAG
+AAGCTTCTTTCTCAGAGGACAGTGGTCACATAAGTAAGTGGTCGTTGAGT
+ACTCAACCCTAAATGGGACATCAGGCCTCCTCTCCCCTCAGGGAGCACTG
+CTGAAGAGGGGGATGGAAAAAAGAGAGAGCTAGAGGGTAGGGAGGAATGC
+TGTGAAGTGTGAAATGCCAGAGCTGTCATAGTCACGAACTCACAGCAGTT
+GTGGTCACCTGCACAAGATCAAACCAGTCAACCTCCCAGCATGACTGATG
+TTGGGGCTCAGCTGATGGCTGTTTGGGGAGGAATGTCATTTTTCTTTGCA
+GGGGAGTAGCACATAGGCAGCACTAATTCAACTCAATGGATTATAAGAAA
+AATAACATTAAAACATCAAGATATCATTTACAGACTATACTTTCAAGCAC
+TGATGTAATCAACAGGCCCAATCACTTTTACACCATTCTGGGGGCTGTGT
+AACTTGGATCAGAACCCCTCTTCTCAAACTCTCCTTGCCTTCCTCTTGCA
+GTTCCTGTGGTCATAAAGTTCCAATAGTCCTGCTTGTAATACAAATTCTA
+TTCCTAGGGAGATCTTCTATCCATGCCTTCACTGAGAAGAAATTATCTTT
+AGGTTGTTGAAAATTGACCATCATTTTTGTGCTCTGATTGTGGCCTACCC
+TTTCTGAGTGAGTTACTAGGATGAGCCAGTTGGGGAGACTCCTTGGCCCT
+CTTAGGCTCATGGTCATAGTGCAAAATGTATTTAGTTCAACTTCAAAAGC
+ATTTATAGTCTTGCAGTCGGAAAGTACAAAGTCCTGTTTGAGATTTCTCA
+ATTGTAATACCCTTGGATTTGTAAAATCAAAAATTACATATTTCTACTAT
+ACTATGATGTAGAATATACATTCCCATTCCGAAATGGAGGAATGGGGATA
+TAGTGAGGAAATACTGGACCAAAGTAAGATTGAAACCTAGCAGGGCGAAC
+TGTAAGACCTGAAGTTAGCCTGTAAGCCCCACCTACCCAAGGACTAGGTA
+ACTTCCCGGAATACTGAGAGTTGTAGTATTACAAAAAAAACAACGCCACA
+TTGGGGCGAGCGGGGGGTAGGGGGGGTGAGGGGGGGTGAGGGGGGGAGGA
+ACAGATGGCCTTTGAGTTTGTCCTTTTGTGAGATAGAGTCCTACCACGTA
+GCCCAGGCTGGCCTCAAACTTCCATCTGGCTGCCTTTTGCCCAAGTCCTG
+AGAACTTGCCTGAGGCTATAATTAAAAAATAGACAAATTTCTTTGATAAA
+GTGAATTTCAAAGACAGTATAACATTGAGTCTGTGGATTGGTTATTACAG
+ATACCTTATGCAGGTCTACAGTGAAAAAGAGCAATTAGGATAGAAAGAGA
+TAATAGAGTTTGGAGAGAAAAAGAGCACTGGGAAGTTTTGCGTTCCACCT
+CACTCATGCACGAGCTAGGGTAGTTGGCATAGGTAAGTGCTGTTAACCAG
+AGGCTTCCAGGCCTACATCTACCTGTCTGCCTTGACCACTGTGTTCCCAA
+AGGCATGACTGAAGGCACTCTGCACTTAGCTGACTTCACAGTAGCTTTGA
+AAATGTGGTTATAAACAAGCTAATCTTCTGTGTAGGGGCTTGGGAACAGT
+ACACAGATGGGTTGAAGAAATGACCCCCGGTTTTTGTGTAAGCCTGTGAC
+TGACGGCAGCTTTTCTCTCCCTGCATTAGGTGTTCATCGGGTTTGTGCTG
+AAGCAGTTTGAGTACATTGAAGTGGGCCAGTTCAGGTACTGATGCTTAGT
+TACTTGAGTTGTTGTCCTTGTTATGTACACATGTGCAGCCCTAGGTCCTT
+ACAGTGACAGGTGCTTTCCACTTTCACACATCTGACCACTTGCAGAAAGC
+CTTTCTCAAGACTTGCTTAGAAACAGGTCTAATCTAGCCATTAGTGTGTT
+TTCATTTCACATTCATAGTATGTGATGATGGTAACATGTCACTAAGTGCC
+ATCTTACCCAGTGGGTCCCTGTTCCCTAACTTTAGAGGAAGCACTTTTAG
+GCCGTGGTTTAGTAAAGGGAGTGATTGTGGTCTTTAGTTGCATTATCTGT
+AGTTTTTCTTGTTGGACATTAAAGTTTTAATAATCTTGCAGTATGTATAA
+CAGATTTGCAATTTTTGAAAATTACAGTTCTTAGCAAGTAGTTTGAAAGG
+CGCAACAATGGAGAGTTTTGTGAGTTTCCCTGAGATCGAGACTCAGCTCT
+TGTTCTGATTATGTGACACCTTGACTCTGTAGTTTTAACCAGGGAACATT
+TTCTCCTCGGTGTCTGAATTTTTTCAGGAGAAGAAAGGAAAGTGAAGAGC
+TAGAAAGGTCTTACCTGCTAGCATCCAGTGCTTAAGGTGCAGCTTCACAC
+CGTAACCATCAGGCTCTCCATGTGGCAGAGGCAGGAAGACCAAGGGTACG
+GGTGGGACTAAATGAGGTCTCAGAGTCTGCTTTAATTTAGGACTTTCTCC
+TGGGCTTGACTTGTAACTTTTTTTTTTGTATATGTAGATTACACATCTTA
+TTTTTTAAAAAAGAAATGTCTAAATGGGTGTTTTTGTTTCTAGGGAATCA
+GAGGCAATTATTCCAAATATATTTTTCTTCCTGGTATTACTGTCTTATGA
+GCGCTACCATTCAAAACAGATCATTGGAATTCCTAAAATCATCCAGCTGT
+GTGATGGCATCATGGCCAGTGGAAGGAAGGCCGTTACACATGGTAATGTG
+TGCATCTCTGCTTGTTGTCCTCGGTCATCCACTTTGACTGCAAGTGCTGT
+GTGTATGTGTATGAGGGCTCGTCATGCGTGCACATGGGTTAGAGATGTTT
+AAGAAGAAAAAAGGAACACATTGCTTACTCATAAAATTTGTAAACTACTT
+GTAGAAACCATAAAAAATAGTGACATCATTAAGGTATAGATAGATTCTGA
+TATGTGCCTCTATTTATGAAGAAAAATTTTTATTTTACCATCTTTATTTG
+CTTTGGTCTTTTTTTTAGAGAGGGCATAGGCTCAGATCTGGGCCCTCACA
+TGTGCTAAGCAGTTTACCTCCAATCACACCCTAAACTCACAACATTTTAC
+ATCTAGCTTGTCACAGTCAAATTTTAACTTTTGTGTTATGTTTTGTCTTT
+TGTTTGTTGGTTGGTTTGTTGTTTGTTTTTCATTTGTAAAAACTTGTTTG
+GAGTCTGATAGCTGTCTCTCATGAGTCATTTCCACCTGTGGTCTCACTTG
+CTCCCTCTTCGCGCCTATAGCTATACCTGCTCTGCAGCCCATTGTCCATG
+ACCTCTTTGTGTTACGAGGAACAAATAAAGCTGATGCAGGGAAAGAGCTT
+GAGACACAGAAGGAGGTGGTGGTCTCCATGCTGTTACGACTCATCCAGTA
+CCATCAGGTAAGAGGAAAGCACAGGGATACCCACATCACAGCATGGAGAG
+ACACGCCATAGCCGGGTCCTGTGTGTGGTGAGCAGTCTCGGGAACACTGG
+GCTCTCTGGCGCGAGGGAGAAAGTGCACTAGGCTAGGAGCTGGCACAGCC
+CAGAAGGCTAGCAGACATATGGCTGACCACTAGAGTTTCAGTCGAGATGC
+TAATGTCACAGGGGCTGTTTATTCTCAAATGTAGCTCAAGGATCAAGTAA
+TTGACAGTGTTAATCTAAACCATTAACATTTTAAAAATAGTAAAACCGTC
+AAATCTGAAATAAGTAGATAGCATATTTACCTCTATCTTAACGTAGTCAT
+GGTTTTTGTTTTGGTTTGGTTTGTTGTTGTTATTGTTGTTGTTGTTTTGA
+GACAGGATTTCTCTGTATAATAGAGTCCTAGCTGTCCTAGGCTTGCCTTC
+TAGACCAGGCTAACCTCAAACTCACAGAGATTTGCCTCCCGAGTGCTGAG
+ACTGAAACCATACACCAACGTGCCTGGCCAGCCATGTTTTTTATTTGTGC
+TCATATAGTCTTGTGTCTCTTAAGCATGAGCATGGACTTTACATGCTTCA
+ATCAGCCCATGTGTAGAAATAGTTTTTAACATTTTTGTCCTTTTAAAACT
+TGTACTTTTGATTATTCCTCCCTTATTTTGTTAGCTCCTTTCAGCAGCCT
+GTTTCTGAAACTTCATAAAACCACAAGCTTTTCTTTTTCTTTTTCTTTTT
+CTTTTTTTTTTTTGAGACAGGGTTAGCCTTTTTCAAAGAAACAGTACTTT
+CTTTGTTTCATTTATGTTTTGATCAATCTTTATAATAGTTAATTAAATAT
+TTAATGCTTAGTTAAGCCAAGGCATAAAGTAGTTTGAGAGTAAGCACATG
+CCTTTGCTAAGCACAGTGGAGCTAGTAGTGCCTTGGGAATAGTATACGCC
+TTGTGCATGCTCATCAGGGTCTGCCCCGTGGAGATCCTCGAGGGCCCAGG
+AAATAAGGGCTTGTGTGCATAGGCAGTGTACATGGACCTTGAAGCAAGGC
+AGCTTTGGGGATAAGGTAGTTGGGCTAGGCTTAACGTTGTATATTGTCTC
+TGAGGTGGTGTGCATTTTTTGGTTTTGGTTTGGGTTTTTTGAGACGAAAA
+GAACAGCTTAGTTTTTGTGGACTTCCTGATCTATTGTAGCACAGAAGTGA
+ATACTTGACCATGCTCCTCTAAAATGCTATCTACAAAATGGGCAGTGGGC
+CAGATGTCTCTGTTACCAGCCTTTGCCAGGTCACAGGTTTATCTATTTAG
+AATGACCAAAATGACCACCAGTAAACTCACACTCAGCTTTGTATGACTGT
+TGGCAAGTGAGGGCCACTCAACAATAAAATGCCTTTGCAAAGAGCACTGA
+AGTGGATTTAAAGTACATAGGAATATTAGATTGTTCCACAGTATTTAGTC
+AAGGGAGTGTTTGGGCTCTCCTACAGATTAATGTAAGTTTGATTTTCACT
+TGATCCCTTCTTTATGAAATGTCGAAATACTCATCATGAGTCTTCCGGAG
+CCCAGCACAGCGCACTCATCCCCTTGGAGGTTACTGCATCTCTTCTATGG
+CTTTTTGTTCCCTTCACATTCACATTTCACAGAGTGAAGATTGTGTGCTT
+TCCTCGGTCTTCCCTGGGGATTATATCTCTTTGAACCTTAGAGTTACCAG
+CTAAGCTGGGACCGGTGTGGAACATGGGGAAGTTGAGGGCCCCTGGGCCA
+GGAAGCATTCTGATCCTACCCAAACATTCTTGCAGGTGCTGGAGATGTTC
+ATCCTTGTCCTGCAGCAGTGCCACAAGGAGAATGAGGACAAGTGGAAACG
+GCTCTCTCGGCAGGTCGCAGACATCATCCTGCCCATGTTGGCCAAGCAGC
+AGGTTTGTCTTCATTGCCCTGGTTTGCCATTATGTAGTGTGATTTATTTC
+AGAGTGGACTGAGATGCTTATGAAGGTTGGTTGTGATCATTATATCCTGA
+CAGGTTCTTGCTCTGAAAGTTGGTCTTGGTGCTGGTTGTGGGAAAAGTGG
+CTTCTTTTATTTCCATTACCCTTTGAGGTTGAATTGTCTTCTCACTTTCC
+AAGTGAGGGAACACAGAGTCAGAGAGGTTATTGAGGTTTTCCATCCTCAC
+AGAGCTGGAGGGTGATAGGTCTATAGTTGCTTGGCCCAGTGTTGACTTGA
+AAACTAAGTCCTTTCTTATTTTATTACCCTGACTCCTGTATTCTGGTAAG
+TCAGGTATTTTATTTGATCCTTAGCTTTCTTTTCTTTTTTTTTTTATTGG
+ATATTTTCTTTATATACATTTCAAATGCTGTCCCAAAAGTTCCCTATACC
+CTCCCTCCGCTCTGCTCCCCTACCCACCCACTCCCACTCATTGGCCCTGG
+CGTTCCCCTGTACTGAGGCATATAAAGTTTGCAAGACCAAGGGACCTCTC
+TTCCCAGTGATGGCCGATTAGGCCGTCTTCTGCTACATATGCAGCTAGAG
+ACACAAGCTCTGGGGGTACTGGTTAGTTCATATTGTTGTTCCACCTATAG
+GGTTGCAGACCCCTTCAGCTCCTTGGGTGCTTTCTCTAGCTTCTCCATTG
+GGGGCCCTGTGTTCCATCTTATAGATGACTGTGAGCCTCCACTTCTGTAT
+TTGCCAGGCACTGGCATAACCTCATACAAGACAGCTATATCAGGGTCCCT
+TCAGCAAAGTCTTGCTGGCATATGCAATAGTGTCTGCGTTTGGTGGCTGA
+TTATGGGATGGATCCCCGGGTGGGGTATTCTCTGGATAGTCCATCCTTTC
+GTCTTAGCTGCAAACTTTGTCTTTATAACTCCTTTCATAAGTATTTTGTT
+CCCTAGTCTAAAGAGGAATGAAGTATCCACACATTGGTCATCTCTCTTCT
+TGATTTTCTTGTGTTTTGCAAATCGTATCTTGGGTGTCCTATGTTTCTGG
+GTTAATATCCACTTATCAGTGATTGATTATCAAATGACTTCCTTTGTGAT
+TGGGTTACCTCACTCAGGATGATATCTTCCAGATACATCCATTTGTCCAG
+GAATTTCATAAATCCATTGTTTTTAATAGCTGAGTAGTACTCCATTGTGT
+AAATATACCACATTTTCTGTATCCATTCTTCTGTTGAGAGACATCTGGGT
+TCTTTCCAGCTTCTGGCTATTATAAATAAGGCTGCTATGAACATAGTGGA
+GCAGGTGTTCTTATTACCAGTTGGAACTTCTTCTGGGTATATGCCCATGA
+GAGGTATTGCGGGATCCTCCGATAGTACTATGTCCAATTTTCTGAGGAAC
+CTCCAGACTGGTTGTACAAGCTTGTAATCCCACCAGCAGTGGAGGAGTGT
+TCCTCTTTCTCCACATCCTCGCCAGCATCTGCTATCACCTGCATTTTTGA
+TCTTAGCCATTCTGACTGGTGTGAGGTGGAATCTCAGGATTGTTTTAATT
+TGCATTTCCCTGATGATTAAGGATGTTGAACATTTTTTCAGGTGCTTCTC
+AGCCATTCAGGTTTCCTCAGGTAAGAATTCTTTGTTTAGCTCTGAACCCC
+ATTTTTAATGGGGTTATTTGAATTTCTGGAGTCCACCTTCTTGAGCTATT
+TGTATATATTGGATATTAGTCCCCTATCAGATTTAAGATTGGTAAAAATT
+CTTTCCCAATCTGTTGGTGGCCTTTTTGTCTTATTGACAGTATCTTTTGC
+CTTACAGAAGCTTTGTAATTTTATGGGGTCCCATTTGTCAATGCTCTATC
+TTACAGCACAAGCCATTGCTGTTCTGTTTAGGAATTTTTCCCCTCTGCCC
+ATATCTTCGAGGCTTTTCTCTACTTTCTCCTCTATTAATTTCAGTGTCTC
+TGGTCTTATGTAGAGGTCTTTGATTCACTTAGACTTGAGCTTTGTACAAG
+GAAATAAGAATGGATCAATTCTCCGATGCTCATGGATTGGCAGGATCAAC
+ATTGTAAAAATGGCTATCTTGCCAAAAGCAATCTACAGATTCAATGCAAT
+CCCCATCAAAATTCCAACTCAATTCTTCAACGAATTGGAAAGGGCAATCT
+GCAAATTCATCTGGAATAACAAAAAACCTAGGATAGCAAAAACTCTTCTC
+AAGGATAAAAGAACCTCTGGTGGAATCACCATGCCTGACCTAAAGCTGTA
+CTACAGAGCAATTGTGATAAAAAGCTGCATGGTACTGGTATAGTGACAGA
+CAAGTAGACCAATGGAATAGAATTGAAGACCCAGAAATGTACCCACTCAC
+CTATGGTCACTTGATCTTTGACAAGGGAGCTAAAACCATCCAGTGGAAAA
+AGCTGGCACAACTGGTAGTTATCATGTAGAAGAATGCGAATTGATCCATT
+CCTATCTCCTTGTACTAAGGTCAAATCTAAGTTGATTAAGGAACTCCACA
+TAAAACCAGAGACACTGAAACTTATAGAGGAGAAAGTGGGGAAAAGCCTC
+GAAGATATGGGCACAGGGGAAAAATTCCTGAATAGAACAGCAGTGGCTTG
+TGCAGTAAGATCGAGAATCGACAAATGGGGCCCCATAAAATTGCAAAGCT
+TCTGTAAGGCAAAAGACACCGTCAATAAGACAAAAAGGCCACCAACAGAT
+TGGGAAAGGATCTTTACCTATCCTAAATCAGATAGGGAACTAATATCCAA
+TATATATAAAGAACTCAAGAAGGTGGACTCCAGAAAATCAAATAACCCCA
+TTAAAAATGGGGCTCAGAGCTAAACAAAGAATTCTCACCTGAGGAATACC
+GAATGGCTGAGAAGCACCTGAAAAAATGTTCAACATCTTTAATCATCAGG
+GAAATGCAAATTAAAACAATCCTGAGATTCCACCTCACACCAGTCAAAAT
+GGCTAAGATCAAAAATTCAGGTGACAGCAGATGCTGGCAAGGATGTGGAG
+AAGGAGGAATACTCCTCCATTGTTGGTGGGATTGCAAGCTTGTACAACCA
+CTCTGGAAGTCAGTCTGGAGGTTCCTCAGAAAATTGGACATAGTACTACT
+AGAGGATCCAGCAATACCTCTCCTGGGCATATATCCAGAAGATGTTCCAA
+CCGGTAAGAAGGATACATGCTCTACTATGTTCATAGCAGCCTTATTTATA
+CTAGCCAGAAGCTGGAAAGAACCCAGATGCCCCTCAACAGAGGAATGGAT
+ACAGAAAATGTGGTACATTTACACAATGGAGTACTACTCAGCTATTAAAA
+AGAATAAATTTATGAAATTCCTAGGCAAATGGATGGACCTGGAGGGCATC
+ATCCTGAGTGAGGTAACTCAATCACAAAAGAACTCAAATGATATGTACTC
+ACTGATAAGTGGATATTAGCCCAGAAACTTAGAATACCCAAGATATAAGA
+TAAAATTTGCAAAACACATGAAGCTTGGGAAGAACGAAGACCAAGGTGTG
+GATACTTTGCCCCATCTTGGAATTGGGAGCAAGGCACCTATAGAAGGAGC
+TACAGAGACAGAGTTTGGAGCTGAGACAAAAGGATGGACCATCTAGAGGC
+TGCCATACCCGGGGATCCATCCCATAATCAGCCTCCAAACGCTGACACCA
+TTGCATACACTAGCGAGATTTTGCTGAAAGGACCCTGATATAGCTGTCTC
+TTGTGAGACTATGCCGGAGCCTAGCAAACACTGAAGTGGATGCTCACAAT
+CAGCTATTGGGTGGATCACAGGGCCCCCAATGGAGGAGCTGGAGGAAGTA
+CCCAGGGAGCTGGGGAGATCTGCAACCCTATAGGTGGAGCAACAATATGA
+ACTAACCAGTGCACCACCACCACCACCACCACCACCACCACCACCACCAC
+CACCACCACCACCCCCAGAGCTCGTGTCTCTAGCTGCATATGTAAGAAGA
+TGGCCTGGCCATCAGTGGAAGAGAGGCCCATTGGTCCTGCAGACTTTATA
+TGCCTCAGTACAGGGGAACGCCAAGGCCAAGAAGTGGGTGTGGGAGGGTG
+TGGGGGACTTGTGGGATAGAATTGGAAATAAAATACCCAATAATAAAAAA
+AAGTGTAAAAAAAAAGAAAAAAAAAGAGTGGATCAATTTGCATTCTTCTA
+CATGATAACTGCCAGTTGTGCCAGCACCATTTCTTGAAAATGCTGTCTTT
+TTTCCACTGGATGGTTTTAGCTCCCTTGTCAAAGATCAAGTGACCATAGG
+GGTGAGGATTCATTTCTGGGTCTTCAATTCTATTCCATTGATCTACCCAT
+CTGTCACTGTACCAGTGTACTATGCAGTTTTTATCACAATTGCTCTGTAG
+TACAGCTTAATGTCAGACATGGTGATTCCACTAGAGGTTCTTTTATTGTT
+GAGAATAGTTTTTGCTGTCCTAGGCTTTTTATTTTTCCAGATGAATTTGC
+AAATTGCCCTTTCTATCTCAGTGAAGAATTGAGTTGGAATTTTGATGGGG
+ATTGCATTGAATCTGTAGATTGCTTTCGGAAGGTTAGCCATTTTTACTAT
+ATTAATCCTGCCAATCCATGAGCATGGGAGATCTTTCCATCTTCTGAGAT
+CTTCAATTTCTTTCTTCAGAGGCTTGAAGTTATTATCATACAGATCTTTC
+ACTTCCTTAGGTAGAGTCACTCCAAGGTATTTTATATTATTTGTGACTAT
+TGTGAAGGGTGTTTCCCTAATTTCTTTCTTTTTCCGTTTATCATTTGTGT
+AGAAAAAGGCCATTGATTTATTTGAGTTAATTTTATATCCAGCTACTTCA
+CTGAAGCTGTTTATCAGGTTTAGGAGTTCTCTGGTGGAATTTTTGGGGTC
+AGTTATATATACTATCATATTATCTGCAAATAGTGATATTTTGACTTCTT
+CCTTTCCAATTTGTATCCCCTTGATGTCCTTTTGTTGTTGAATTGCTCTG
+GCTAGAACTTCAAGCACTATATTGAATAGGTAGGGAGAAAGTGGACATCC
+TTATCTAGTCCCTGATTTTAGTGGGATTGCTTCAAGTTTCTTTCCATTTA
+GTTTGATGTTGGCTACTGGTCTGCTGTAGATTGCTTTTATTATGTTTAGG
+TATGGGCCTTGAATTCCTGATCTTTCCAAAACTTTTATCATGAATGGGTG
+TTAGATTTTGTCAAAATCTTTTTCAGCATCTAACGAGATGATCATGTGGT
+TTTTGTCTTTGAGTTTGTTTATATAGTGGATTACGCTGATGGGTTTCCAT
+ATATTAAACCATCCCTGCATCCCTGGGATGAAGCCTGCTTGGTCAGGATG
+GATGATTGTTTTGATGTGTTCTTGGATTCGGTTTGCGAGGATTTTATTGA
+GTATTTTTGCATCGATATTCATAAGGGAAATTGGTCTGAAGTTCTCTTTC
+TTTGTTGAGTCTTTGTGTGGTTTAGGTATCAGAGTAATTGTGACTTCATA
+GAATGAAGAATAGGGTAGAGTACTTTCTGTTTCTATTTTGTGGCATAATT
+TGAGATTAGTTGGAATTAGGTCTTCTTTGAAGATCTGATAGAACTCTGCA
+CTAAACCCATCTGGTCCTAGGCTTTTTTTGGTTGGGAGACTATTGATGAC
+TGCTTCTATTTCTTTAGGGGAAATGGGAATGTTTAGATTGTTAATCTGAT
+CCTGATTTAACTTTGGTATCTGGTATATGTCTAGGAAGTTGTCCATTTCA
+TCCAGGTTTTCTAGTTTTGTTGAGTATAGCCTTTTGTAGTAGGATCTGAT
+GATGTTTTGGATTTCCACAGGTTCTGTTGTTATATCTCCTTTTTCATTTT
+TGATTTTATTAATTAGGATACTGTCCCTGTACCCTCTAGTTAGTCTGGCT
+AAGGGTTTATCTATCTTGTTGATTTTCTCAAAGAACCAGCTCCTGATTTG
+GTTGATTCTTTGAATAGTTCTTTTTGTTTCCACTTGGTAGATTTCAGCCC
+TGAGTTTGATTATTTCCTGCCATCTACTCCTCTTGGATGAATTTGCTTCC
+TTTAGTTCTAGAGCTTCTGGGTGTGCTGTCAGGCTGCTAGTGTATGCTCT
+CTCTAGTTCCTTTTTGGAGGCACTCAGGGCTATGAGTTTTCCTCTTAGAT
+CTGCCTTCATTGTGTCCCATAAGTTTGGGTATGTTGTGGCTTCATTTTCA
+TTAAACTCTAAAAAGTCTCTAATCTATCTCTTTATTTCATCCTTGACAAG
+GAATCATTGAATAAAGTATTGTTCAGTTTCTACGGGAATGTTGTATCTGT
+TGAGGCCTGTTTTGTGACCAATTATATGGTCAGTTTTGGAGGAGGTACGA
+TGTGGCACTGAGAAGAAGGTATATCCTTTTGTTTTAGGATAAAATGTTCT
+GTAGATATTAATTAAATCCATTTGTTTCATAACTTCTGTTAGTGTCCATG
+TGTCTCTGTTTAGTTTCTGTTTCCAAGATCTGTCCATTGGTGAGAGTGGG
+GTGTTGAAGTCTCCCACTATTATTGTGTGAGGTGCAATGTGTGCTTTGAG
+CTTTGCTAAAGTTTCTTTAATGAATGTGGCTGCCCTTGTAGAGTTCCTCT
+TGGTAGATTTTACCTTTGTTGAGTATGAAGTGCCCCTCCTTGTCTTTTTT
+GGTAACTTTGGTTTGGAAGTCAATTTTATTCGATATTAGAATGGCTACTC
+CAACTTGTTTCTTCGGACCATTTGCTTGGAAATTGTTTTCCAGCCTTTCA
+CTCTGAGGTAGTGTCTGTCTTTTTCCCTGAGGTAGGTTTCCTGTAAGCAA
+CACAATGTTGGGTCCTGTTTGTGTAGCCAGTCTGTTAGTCTATGTCTTTT
+TATTAGGGGATTGAGTCCATTGATATTAAGAGAAATTAAAGAAAAGTAAT
+TGTTGCTTCCTATTATTTTTGTTGTTAGAGTTGGGATTCTGTTCTTGCGG
+CTGTCTTCTTTTAGGTTTGCTGAAGGATTACTTTCTTGCTTTTCCTAGCG
+TATAGTTTCCATCCTTGTATTGGTGTTTTCCCTTTATTATCCTTTGAAGG
+GCTGGATTCATGGAAAGATATTGTGTGAATTTGGTTTTATCATGGAATAC
+TTTGGTTTCGCCATCTATGGTAATTGAGAGTTTGGCTGGGTATAGTAGCT
+TGGGCTGGCATTTGTGTTCTCTTAGGGTCTGCATAACATCTGTCCAGGAT
+CTTCTGGCTTTAATAGTCTCTGGTGAGAAGTATGTTATAATTTTAATAGG
+CCTGCCTTTATATGTTACTTGACCCTTTTTCCTTAATGCTTTTAATATTC
+TATCTTTATTTAGTGCATTTGTTGTTCTGATTATTATGTGTCGGGAGGAA
+TTTCTTTTCTGGTCCAGTCTATTTGGAGTTCTGTAGGCTTCTTTTATGTT
+CATGGGCATGTCTTTCTTTAGGTTTGGGAAGTTTTCTTCTATAATTTTGT
+TGAAGATATTTGCTGGCCCTTTAAGTTGAAAATCTTCATTCTCATCTACT
+CCTATTATCCGTAGGTTTGGTCTTCTCATTGTGTCCTGGATTTCCTGGAT
+GTTTTGAGTTAGGATCTTTTTGCATTTTGCATTTTCTTTGATTGTTGTGC
+CTATGTTCTCTATGGAATCTTCTGCACCTGAGATTCTCTCTTCCATCTCT
+TGTATTCTGTTGCTGATGCTCGCATATATGGTTCCAGATTTCTTTCCTAG
+GGTTTCTATCTCCAGCGTTGCCTCACTTTGGGTTTTCTTTATTGTGTCTA
+CTTCCCTTTTTATGTCTTGGATGGTTTTATTCAATTCCATCACCTGTTTG
+GTCGTGTTTTCCTGAAATTCTTTAAGGGATTTTTGTGTTTCCTCTTTAAG
+GTCTTCTACCTGTTTAGCAGTGTTCTCCTGTATTTCTTTAAGTGAGTTAT
+TAAAGTCCTTCTTGATGTCCTCTATCAGCATCATGAGATATGATTTTAAA
+CCCGAGTCTTGCTTATCGGGTGTGTTGGGGTATCCAGGACTGGCTGAGGT
+GGGAGTGCTGGGTTCCGATGATGGTAAGTGGCCTTGGTTTCTGTTAGTAA
+GATTCTTATGTTTGCCTTTTGCCATCTGGTAATCTCTAGAGTTAGTTGTT
+ATAGTTGTCTCTGGTTGGAGCTTGTTCCTCTTGTGATTCTATTAGCCTCT
+ATCAGCAGACCTGGGAGTCTAGCTCTCTCCTGAGTCTCAGTGGTCAGAGT
+ACTCTCTGTAGGCAAGCTCTTCTCTTGCAGGGAAGGTGCACCAATATCTG
+GCGTTTGGACTTGCCTCCTGGCTGAAGATGAAGGCCCGATAGAGGGCCTG
+TCTCAGAAGCTGTGTAGCTTCTGTAGTCCACACTCTCACCTGCGCAGACT
+AGTCTCTGAGGGAACCAGGACGAAAGATGGCTTCCCCAGGTGCTCCAGCA
+GAGCCCTTCCAGGCGGGGTGGATACCTCTCCTCTGTCGGGGAAGGTGCCC
+AGATGTCTGGATCCCGAAATGGGGTCTGTCCCAGAAGCTGTGTCGATCCT
+TAGCTTTCTTTATGTTTATTTGTTTTTTCATAGCACTTACAAAGTAACCT
+AGCAATAATTTATGACTTAGTCAGAATTTCTGTAATCAGATGACCCTTAC
+TAGTTAATTAAGAAAATGCAGTAGTCCGGCTGTGCCATATCTTGTGGAAA
+CAAAGAGCCCTGGCTCCTGGACTGTGTGGTCCTCAGAACACAGGAACCTG
+TGTGACTCTCACCAAAGCACCATTACAACAGAAGATAGAACACAGAGCCT
+GTCTATACACAGTCCTACCAGGAAGTCTGAGCAGCTGCCAGATGGATCTC
+TGGCTATGCTTTGAATGAAACTCCTAACAGATGACAAGGCACTACTTTGA
+AATAGGTTCTGTAGAAAGCACAAGGGAGCCATGTTTCTCGGCATCTCATA
+AACTCTTTTAGGCATACTCTATTGGTAGTTGCTGTCTGCATATGCTAGGC
+AGAATTGCAGGCACTGATAGGAAAAATTTCACTGAAGAAAATGTATTGAA
+AAGCAGTGTTCTCCCACCTAGATAACAATGAAATAAATACACTATTAAAG
+TATATACAATATCAGAGGATAGTAGTTTTGATGGCAGAGGAGGAGGGTAG
+TGTAGGGTAGAGAATGCATTTGTTGTTCTAACAGTGACAAAATGAGAAAG
+CCTTTTTTAGGAGGTGACATTGAGTAAACATGATTCTAGCAGTTACCTGC
+AAGAGAGTGTTTCAGCAGAGCAGAGAGAAAAGCAAGTCCCTGGGATTGCA
+GCATGACATTTGGATACAAGCTAGTCCCAGGAAAAGGAAGGATGGGAGGG
+TAGACATAGAGAAATAGTGTCCTGAAACTTTGCTAAGGTTACTGCTGAGA
+GTAAAATGCAGTGGATACACTGCTGAAAGTAAAATGCTTTTAACTTTTAA
+CTGCGAGGATGATGGTGATTGTGTGCCCCAGAGGCGCAGTGATGACGTGT
+GTGAAGCTGCTATTCCCATCGTATTACAGACATATCCATGATGCTTAATT
+CCACAGATGCATATTGACTCTCATGAAGCCCTTGGAGTGTTAAATACCTT
+GTTTGAGATTTTGGCTCCTTCCTCCCTACGTCCTGTGGACATGCTTTTGC
+GGAGTATGTTCATCACTCCAAGCACAATGGTGAGTGTCACCATATGTTTG
+GGATGCACTGGAGGAAGATCCCAGCCCAGGTCTGCCACAGCTGGTGAGGG
+AGGACTTGAGGGTTTGCGTTTTACCAAATCTCCAACCGAGTCTGATCTGC
+TTCTCTGGGACCATACTTGGAACCAGTGCTCTAGGGACTTTTCTGTTGAA
+TGATTTTCTTCCATGGGATTGACAAGCAGGGAAAATTTAATCTTTCCTTA
+GCTTTAGCATATGCATTTTCTTTTTCTTTTTCTTTTCTTTTCTTTTTTCT
+TTTTAGGCATCTGTAAGCACTGTGCAGCTGTGGATATCTGGAATCCTCGC
+CATTCTGAGGGTTCTCATTTCCCAGTCAACCGAGGACATTGTTCTTTGTC
+GTATTCAGGAGCTCTCCTTCTCTCCACACTTGCTCTCCTGTCCAGTGATT
+AACAGGTTAAGGGGTGGAGGCGGTAATGTAACACTAGGAGAATGCAGCGA
+AGGGAAACAAAAGAGTTTGCCAGAAGATACATTCTCAAGGTATGTTTTCT
+GTCTGAACCTGTAAACTGACCATGTCTTTTAGCTGATCCTATGGTGTCGC
+AGCAGCAGTAGCCAACAATAATCACTGTTCTTTAGTGAGGCTGATATCAC
+TCACCTAGCTTTTTTTTTTTTAAGAAAAAAATGAAAAGATTTGTTTCTTT
+TTATTGATTGTGTATGAGTGTTTTGCCTATGAGTAAGTAAGTATACTTAG
+GGTGTGTCTCATACCCATGGAGGCCAAAAGAAGGCATTGAATCTCTGGAA
+TTGCAGTTTCAGACAGTTGAAGTGTCATTCTCTGAACCCAGGTTCTCTGC
+CAGATCAGCAAGTTCTTTTGAACCCTGATTTTTCTTCTTACCTTCCTCAC
+CTTTTTTTTCATGTCTTTCCTAAGGTCTGTCCTTGTGACAGACTTGACAA
+GAGTGGCTCACCATGATTAAGGAGGGGTGGCCTTCCCTGCATGCAGCCCA
+GTAAGGGGTGAATTGCCTGTAGCATGGTCAAAGCCTAGGGTAAGCCACTG
+AGAAGCAGCCCTGTACTTATTTTCACCATGTGCTACTTGAAGGACCTTTC
+GTTGCCAAGGTCAATAGCATCTACCAGATACCTGCAGGATCAGCTCACAT
+GCTACACACACACACACACACACACACACCCGGTGTGCTAGTGCCTCAAG
+GGCAGTGACAGGGGAGACTGCTTATCAACGCTCTGCACAGAGAAGTATCT
+GTGACTCCTAGAGTTTTCACTAAGTTAGAGCTGTGCTGCAGATGTGCTCC
+TTAGGACCTCTCATTTGCAGGGGTAAGCACCTGTAGTCTCTGATAAACAC
+ACGATGCCTTTGTGTTTGCAGGTTCAACTGGCAGCCCATTGCTTTCCCTC
+CGTTAGGTTGAAGGAGTGGCTAAAACTGATGTTACACTAAGTTTCTTTTT
+TTATTTCTTTTTTTTTTAATTGAGTATTTATTTCATTTACATTTCCAATG
+CTATCCCAAAAGTCCCATACACACTCCCCCACCCTCCCACTCCCACTTCT
+TGGCCCTGGCGTTCCCCTGTACTGAGGCATATAAAGTTTGCACGACCAAT
+GGGCCTCTCTTTCCACTGATGGCCGACTAGGCCATCTTCTGATTCATATG
+CAGCGAGAGACACGAGCTCCGGTGGGGTGGGGGGTATTGGTTTGTTCATA
+TTGTTGTTCCACCTATAGGATTGCAGATCCCTTCAGCTCCTTGGGTACTT
+TCTCTCGCTCCTCCATTGGGGCCCTGTATTCCATCCGATAGCTGACTGTG
+AGCATCCACTTCTGTGTTTGCCAGGCCCTGGAATAGTCTCACAAGAGACA
+GCCATATCTGGGTCCTTTCAGCAAAATCTTGCTAGTGTAAGCAATGGTGT
+CAGCGTTTGGAAGCTTACACTAAGTTTCTATGGCGAATTACCGTCTTATA
+TTTATGTAACACTCCTACAGCTGATAAAGCTTAAAAAAATACATTGTCAA
+TTTGCCTTGATGGGCCTTTGAAAGTGGTATGATTACCATTTCACATTTAA
+AGAAGGTAATAGAGAGAAGGAGAGACTCAGTCCACAGTGATGGTCTGAAA
+CATTTCACTTGGTTTAGTGTACTGTCTTTGTGTTTTCTTACTTTACAAAA
+ACCAATGGTCTTTATTTCACACTGTGCTTAGCACAGCTCACTGATCATGG
+CCTAATCTGCTGTTATGTGCTCCCTTTCTTGGAAGGTTTCTTTTACAGCT
+GGTTGGTATTCTTCTAGAAGACATCGTTACAAAACAGCTCAAAGTGGACA
+TGAGTGAACAGCAGCATACGTTCTACTGCCAAGAGCTAGGCACACTGCTC
+ATGTGTCTGATCCACATATTCAAATCTGGTAAGTGGATCCGATTAGACTT
+CATAATACTTTGTGTTCCCTGTCTCCAGCAGTGCCCGCTTCTCATAGAAA
+CTGTTCTCCATGTTCCTTGTGCCATGTGAGAAAATGTTGATAGTAGGAGT
+GAATCTGATACCATGTGGTCCAGCAATTACATTGATAGGCAGATACCTGA
+CAGGATGAGGAGCAGAGTGCTTGGAAGATAGCTGTATACTAATCTTGATA
+GTAGCATTCATTACATAGCTGAAAGAAGCAACTCTGACATCCAATGCTCA
+GCTGGTTGATAAGCCAAATATGGCATATACATAAGTTATAGAGTCAAATG
+TGTCTATGTGGGATATTTTTCAGCCTGAAAAGGTAAAATAGTTCTGACAT
+AAGAAACAACATGGGGTGTTGGAGACATAGCTCCTCAGTTAAGAGTATTT
+GCTGTTCTTGCAGAAGACCCTCCTTCACTTCCCAGTGAAGAGGCTCACCA
+TTACTGTAACTCCAGTTCCAGGGATCTGATGCGGTCTTTTAGACTCTACC
+AGCACCAGACATACACAAGCTGCATATCCATACATGCAGGCAAAACACTC
+ATATACATAACATTTAAAAGCCAACATAGGTGACCCTTGAAGACATTGTG
+CTAAGCAAAATAAGCCAGGCCCAAAAAGACGATGGATGAGATACTCAGAG
+TACTCCAAATCTAGAGACAAAAAACAAAATGGTAGTTGCCAGTGGCTTAG
+GAGAGGATTTAATGACTTTGGATTTTCAGTTTTACAGATGATGGTGATAT
+TGTATAACATAAGTGCATTTAATACCAGAACTGTATATTTAAAGATTATC
+AAGGTGGTAAATTTTATTTTATATGTATTTTACCACAATAAAAAATAATT
+GGAGGCCCATAGGCCTTAGTAGATAGATGTTTGTAAGCAACCCTGACAAC
+CTAAGTATGATCTCTAAGACAAACATGGCAGAAGGAGAGAATGAACTGCT
+TCCAAGTTATTCTCTGCCTCCAGGTACGTGCACCCCACCACCACCAGTAA
+ACAAATAAACCTAAAGAGTCATGCTGGGAATGCAGTGAATCCATTGGTAG
+TGATTGCCAGACACGTACAAAACCCTGGTTTTGATCTCTACCTAACCTAA
+ACGTGATGGCACCCCAGAGAATCAGAAGTTCAAACTCATCTTCAGCTGCA
+TAGAGAGTTTGAGGCAGGACGATCTACATGAAGACCTGTCTTAGAACAGA
+ATATTTTTTAAACTTGTTTGTTTAATTGGTGGGGTGTAGGTGGGAGTGTG
+ATATGGTGGCTTCATCCTGTAGTCTTCGTAGCATATTTCCACATTCCAGC
+TTTAGTTTCTATCATGGCCTTGCTCTTTGGGGGACTCATTTCATGTGGCA
+GAAGTAGGGCCTCTAGCCACCGTGGTCTGAGCCTCTGAAGTATGCTGATT
+AGCCTCCATTCACCCATGTAGGCCATATGAGCTCCTAGCTGCTCCGACCA
+ACTTGAGCAGCCTTCTTTTGAAGGGGGTGTGTTGCCTCCTGGGCAGAAAG
+GAAAAGAAACAGATGATGAGCGGATAGTTTGGGCCAGAGAAACTGTTAGA
+ACACTGGAGGCAACTTCTAGAAACCCAGCCAGAAGTGCCAGGAAATGGAC
+GATGTGGCAGAGTGTTTCTCCAGTCAGAAGGTGTGCTCTGTATTTGGAGT
+TCTATGTGCATTTTATGTACATATCTTGGAGAAAAAAGTAGTTATTGTCC
+TTAGCCTTCATCCAACAGGACTTATATTCTTACCTGTTGCTATACTGGGC
+TCTGACGTACTGGGTTGAGACAAGCAGATGTGCTTTTTGTGCCCTTGTAG
+AGCTTCACAGTCAGTGGGGAGGGTAGTACAGCAATACACAACCACTGTCC
+CAGGAAGTTCATTGGTACACATTTCTGAGGGTGCCTGGTTTATCTGGTTT
+ACCCAAAGAGGTCAGCAGAATCAACCTTTGGAGTTGGAAGAACAAACTAG
+AGAAATGGAGTTGAGAAGAGAGTAGAGAGTAAAGCAAGCTGCTCAGGTGA
+ACAGAGAGAAAGGCATTATGTAGACTGCCTCCCTTGTATCTGGCACCCTT
+GGCATAGTAGTGTGCCAGAATTTAATTTTGATAATGGGTGTTAGTTTCAA
+ACTATATTTATTCCTAATATTGATTTAGGTATTGTTTTTTGTCTAAAGAT
+TTATATACTTGTTTGTTGTGTGTGCAGGAGAGAGACAGGCAGGTCATGTG
+CATGCCAGGATGGGCAACTCCTCAGGAATCTCTTCTCTCTTTCCACCTTG
+TTTTGAGGCCGCTTCTTCTGTTTCTGTGATTGCACTGCCAAAAAGAGCTT
+TTCACCAATCCTCCTGTCTCTGCCTTCCATCTTGCCGTACAAATTAAATG
+CCCACCCCCACATCTGACATTTTCCATGGGTTTTGGGGATTAAGCTTGTA
+CAGTAAGTGTTTTTACTAACTGAGCCAACTGTCTGGCCCTTAAAGATCTT
+TTGATTTATGCTTCATTGGTCACTGTAGCAAATTGAAAGTCAAGAAGTAT
+GCCTGTTTATGAATAGCAAGTTGTTTTTAGTTTGTTGGTCACTTTAATAT
+TTTCCATTGGTTGGTTTGGATCTGATACCAGAAGAGCTTTCTCTTGCCTT
+CTTTGGATGGTAGATGTCTATGTTGTGTCCTGCTCACCTGGTGGGCTGCC
+ATGTGTGGCTGCTATATGGTTTCACTACTTACTATCCAGTTGCTCTAGCT
+TCTCTTGCTTCCTTTTTCAGGAACATCAAAGAGGAAAAAGATTCAAGGGC
+ATATTTTTACTGTGTGTTGTGTCAGTGTTGTCCAGCAAAGTGACCTTTAG
+CTGCCCAGTGACTGGGTAGCTAATGCAGATAGCATATCTTAGGTACTTTG
+TGACTTTGTTTAACTTCAGACACAATGGCTTAAATGGCTTCATGTGGTCA
+GTGGTTCCACTTTTTTTTTTTTCATCATGCATACATCCTTCTGGAAGAAC
+ATTCAATGGTTAGACATACAGGCTTTGCCCTAGTGCTGATAGATTATATA
+TAAATGTCTATTTCTTATTTAGTATGACACTAGGTGTCAGAACTACTGTT
+TGGGTATTTGCTTGGTCTTAAGATTGGGTTGAAGTAAACAGCTATGTGTT
+TATAAAGATATATATTTAGGTCTCTTGACAGTTATCCTGTTTGCTTGGGT
+AGCCCCTTGTGCTTCCGTCTGCTGGCTGTGGCTCAGTTCTGTGTTCTCCC
+AGGTGTGGCCTGAATCCTTTCCCCTGTGTTGGCTGGCTCTTTCTGGCCAC
+CTGTGTCTAGGCCTCCACAGCAGCTGTTTTGGGGCTCCTAAAGAATGTGG
+CAATTGAGCCTGGCAGTGGTGGCGCACACCTTCAATCCTAGTACTTGGGA
+GGCAGAGACAGGGGGATTTCTGAGTTGGAGGCCAGCCTGGTCTACAGAGT
+GAGTTCCAGGACAGCCAGGGCTACACAGAGAAACCCTGTCTCTATACCCT
+GTCTCGAAAAAACAAAACAAAAAGAATGTGTCAATCATCAGAGGTGCAAA
+AGTGGACTGACTTGTTAAGTAAGTTGTGCCTGTTTCTGCTTAGAAAAGTT
+AGAGTTTGGGGCTGGGGAAAACCCTAATCAGAGAAGTGCTTGCAGTGCAA
+AGTGTGAGAGCTGAGTTTGGATTGCCAGCACCTTATTCCAAGCGTTCCTT
+GGCAGCCAGTATATCTAATCAGTGAGCTCCAGCTCCAGGTTCAGTGTAGG
+AGACCTTGTGTCAAAAAATCAGATAGAGAAGTAATATTGACCCTACATCA
+GCTTTTAACTTCAGCACATATGTACCCAGGTACACACATGCATCCTCACG
+TACATAGAAGCACAAGTATGCACACACAAAGGCCACAATTTTCTAAGTGC
+CAAGTGCTATAAAAGTATGGAAAAGGACAATGAATAGTCCTTGGTCACCT
+GGTACTGGACCAGGTAGAACAGTAGAGGCATACCCCATAATCTGTTTTTC
+TTATTTTTGTTTGGTTCTAGGAATGTTCCGGAGAATCACAGCAGCTGCCA
+CTAGACTCTTCACCAGTGATGGCTGTGAAGGCAGCTTCTATACTCTAGAG
+AGCCTGAATGCACGGGTCCGATCCATGGTGCCCACGCACCCAGCCCTGGT
+ACTGCTCTGGTGTCAGATCCTACTTCTCATCAACCACACTGACCACCGGT
+GGTGGGCAGAGGTGCAGCAGACACCCAAGTAGGTGCACAGCTCCCCAGGG
+CCAGGCCCCAGCCCAGTGTTTGGCCTGAGGCAAAGCTGCTCTGAGAGCAT
+TCTCATTTTCCATTCTTTATAAAGCTTTGTAAAATTCAGGCTGCATATTA
+ATCTTTCTTTCATGGGTACTGTTTTGTAGGGAAATGTGGTCTGGCTACAG
+GCATTCAGACCAAACTGTTTGACTGTGATTTTCTTTGACAAGCGCTTTGA
+CACTGTTCCATCGTTTGGGCTATGCTTGTCAGGCTCTATCCCTCCTGCCA
+CGTCCTACGGCTCTCATTGGTTCTACAGCCAGACATGTTGCAATGTCTTA
+ACTTTGTTATGAGTAAATGTGTTCTGGGTATTCTTAGATAATGAAGTAAT
+TATTTAGCAAATTTCGAAACTGATTGGAAGTATTTTATTAATTTATTTTT
+ACTATTCAGATAGACTGTTTCTGGTTGTGGGTGGCCCTCTTTTTTTTTGC
+AAAGAGTTTGTAGTCTTAAATCTCAGTGCCCAGGTACTAACTGACTGACT
+TGCTCAGTCAGCTCTATGAGCATGTTTGGGAGCTAGAAGCTGTGAGCCCC
+GATGAGTCGGTCTTCAGTGTGCTTTCTGGACAGTTTATGAACACTTGTGG
+GGAAATTTTTCCTAAGGAAAAGTATAGGTATTGTTAGCTCTTCAGCTTGG
+TGTAGGGAGACCAGAGCCTCCCATCCAGACATGCTTTTACATCCCTGTGT
+CCGGCATTTTTTGCCCATCTGCTGTGTGCTCTTTATAATGTCATCTGCAA
+AGGAAATAGAAACACTACTTCCTGCCCCCACGTGTGATCACTTGGAGAGG
+TACCCACAACATCATTTGAAATGCTTAGAGGATCTCTTAAGCCTGTCACA
+TTAACTGATCATTATAAGAGCTAGGACAGGAAGCCAGCTACATAGCTCTC
+TGGTTCTTACATGTACATGTAAATCAGCCCTGAAACTGCTTAAAGCATTT
+CAGTCCAGGAAGCTTCGTAGGGGCTAGGAGTTTGCACATATTCTAAGCCC
+CTGCTGCTGCCAGTGCGGTTGTTGCACTGATGCTCTAGCACAGGGACAGC
+TCAGCACCACCAGTGTTCCTTCCTTTCTTCTCTTTCTTTCTTTTCTATTT
+TTTCAGTTTTTTTAAAATTTTTATAAAAATGTTTTTATTTGAAATAGAAT
+CACATCATTTCCCCCCTTCCAACTACCCTCCAAAGCCTCCTCTATACCTC
+TCTTCTCACATTGATAGCCTTTTCTCCAATTTGTTACATAAACATGTAAA
+TGGTATGTGTGTGTATGTACAGTATATATAAGTTCAATTTACTGTGTCTG
+ATTTTGTTATTTGTGTTTATATGGATTCATTGTTGGCTACTCTACATTAG
+ACAGCCAGTAAGCAGGCTCATCCCTGGAAGAGGCTAATTCTCCTTTCAAA
+AAGTTATTAGCTACTTATATTTTTTGTCTAGGGATAAGATCTATGAAAAC
+TTCCCCCATCCATATTAATATACCCATGAACACTGCCTTTATTGTAGTCT
+TATTTGTGTATCCATTTCTCTCCTAGACCGCTTCACAGCAGACTTTCTGG
+TATTCTGGCCCTTACAATCTTTCTGCTCCTCTTTCATAATGTTCCCTGAG
+CCACAGATGCAGGAACTGTGATGTAGATGTATCCACTGGGCTAGACTCCC
+CTACAGTCCATGGATCTCTAGTTTTGTCCAGTTGTGGTTTTCTATGATTG
+TCTTCATTTGCTATAAAGAGAAGTTTCTTTGATAAGGGTGGTAGCTACAA
+ATTGAAGCTCAGAGCTGTGTATAATGAAGTCTATCTTTAATCTCAGCCTT
+CAGGAGGCAGAAGCAGGTGGACCTCAGTCAATTCAAAGCCAGCCTGATCT
+ACAAACCAAGTTCCAGGCCTGCCAGTGCTACACAGAGTAAACTTGTCTCA
+AGTAAATAAACAAAAACCTGGCAGCTTTGACCTTAGTAAGGAGACTCAGT
+GCATAAAGGAACTTCCTGCTAAGCCTGAGACTTGAGTTCTAACCCTAAGA
+CCCGTATGGTAAAAAGGGAATCTGTTCCCATATGTTGTCCTCTGATATCC
+ACAGGCACATGGGTGCACAGGCACACAAATCATTTGTTTAAAGGCTTAAA
+AAACAAAAATAATTGGGGCCTGAGAACTTGCAGTAGGTTCCCAGGACAGT
+CTAGTAAACATCCAAGCCTGAGTTGTGTTGGGTAAGACACGGGAGCCTCC
+ATTAACTATGGATGAGCCAAGAAGGCAGGAGAAGGTAAGAATGGCTTTAG
+CCCACAGTAAGCCATAATTCTCAGCAACATCTGCCCACCCGGTTGGTGTA
+TAATAGTGTTGAAGATTAAAAGACATACTATGGTTTGTTCAAGAGGAAAA
+TGGATTTTGACCTTAGCTGATGGATCTTTTACCAAGATGTCTACTGGACA
+CGCTGAAACATCTGTGGTAAAAGATGAAAGTGTTATGTTAATAGAAGAAA
+ACAAGTAGGACAAGTGAATTTATTTTAGAATCCTTGGCTGTGCTCTGGAT
+CTGGAATATAAGACTGCTAGAATGATTATTATTGGGTAGTCATAAATATA
+AGGATGAGTTTTCTGTGAGTGATGGCTTTGTAGCTGTGAACATGTTTGGG
+AGACAACTGCTGCAGAGTTTATAGTGAAGTGTGCTCTGGAATCATTCATC
+CGTGTTGCAGCCAAAGAAATGCATGTGTGCAAGTATTCAGACTGCAATAA
+CGTTTCACATGAGTGTGCAGTGTGTGTAGAGTCCAAGTAGGATATTTGGT
+ATGAACCTAGTCTGCATGTGTAAAGTGTGTTCATGAATTCAAACATGACC
+AATATTAATAGTTGAATATAGGCAAAAATCAAGGGCTGTTCATTGAATTG
+TTCCTTCAAGCTTTTCTGTTTGACACTTTCAATAAACCGGGGAAGAAGGT
+GGAAAAGATAGCAATCAGAGTCAGCTCTTAGGCCTTACTATAAGGGGATT
+GGAACTATGAGAATTGGGATCATAGTTTCTTTTCATTTATATTTGTGTGT
+GTCTCTCTCTGTCTCTCTGTCTGTTTTGTCTGTCTGTGTCTGTGTTAATG
+TGCCTGAGCCCACATGTAGAGGTCAGAGGACAACTTTGAGGAGTGACTCA
+GTCTTCTTCTTCCACTGTGGAATCCAGGGATTGAACTCAGGTTGCCAGGC
+TTGTGCAGCAAGTGCTTTTACTTGTTGAGCCATCTTGCCAGTTCAAGAGG
+GGATTTCTAGTGATAGAAATAATAGTAACATTTATCCAAAGGGCTACAGA
+TTGGAGTGTACAGAAGAAGCTTCCTTGAGTTGGTGAATGTGGGATTTGGA
+CACAAACATTGGTTTTATTGTAGTAGTTATTGGTAGTTTTTCAAAGTTGA
+AATTACAGTTTCTTTCTTTCCTTCCTTCTTTATTATGTGTGTTGGTATTT
+TGTCTGTATGAGGGTGAGATCACCTGGAACTGGAGTGATAGATAGTAGTA
+GGCCACCATGTGGGTGCTTGGAATTGAATCCACTGGCTGGAAGAATAGCC
+AGTGCTCTTCAATACTAAGCCATCTCTCTAGCTCCAAAATTACCACGTCT
+CACCATACACAATATATTAAAATGAGAAACCTATCTATCACTTTCAGTTT
+TTAGTGCAGTATTCTTCAGAGAAGCCGCCCATGCTTTTCCTCATTTGCTT
+AGATCTCAAGGTCTGCACTTAGGTAACTTCTGCTCCTCAGTAGCAAGGTC
+ATAATTGAGCATTATTTGTGTGATGATTTGATTAGTAGCTGTCTCTTCTT
+TCTATGGTTGCTGTTATATTCCAGTCAGTACCATTTCACATTCAAACCAT
+AACAATGTCTCCTGCTAATTATTTAATTGCAAATTTCACCTATTGTACGT
+GAGTAGTTCTTCGCTGTACATGTGGCTATGAGTGTAAGCTACAAAGCTCC
+AGCTGTGAAAGGGCAGGGGTTCTGTATATGTTTCCTAACTGACAGAGGGT
+GCCTGGTGAAATGCCTTTTGTACCATCTGTTGTCAGATACAAACAGTACC
+ATCATTCCAGGAAGTTCCCTCTTGTCCCTTTATCACCATCCCTTCCCTTG
+TCCATAAGAACCTCCGTGAATACTCTGTGCCCAGAGCAGAAATCCATGGA
+AATGAACACTGCATCCGGTTGTTGATCAGCAACCATTGATCAGTTTAAAG
+TTTATTTCCCTGGTGGTGATCTGTGGTCTGCTCCTGCATTACTGAGTTGA
+TCCCACTACAGATACACTAGGTTATTCATCCATCTGCTACCTAGGAAGCA
+GAAATGGTTGGCAGCTTTAACAAACCTTTAGCTTGCAGCACAAGTTTTAT
+CGATAATCTATTAGTACTGAGGACTTACAGGACTGGTCAACAAATTTCAA
+GTGTGCTTTCTCTCCTGTGACTTCTGTCCCTATTGAGACTGAGCAAAAGA
+CTAGAAATATGTCAGACATGGTCTTTTAAATATTTGGCTATAACTGAAAG
+TCTCCAGGGTTATATTTAGAAAGATGCAATGAGAAAGGAGTATGAGTGGA
+GTAACCACTGCAGTAGAGAAGAAGCACTTTGAACAGTAATTCCACAACCA
+TGACTTTCTCAGTAATGTTTTTGAAAATTAGTAGGAAAGTGTGAAATGAG
+GCTTCTCCCTGGCACTTGAAGGTGTTTGTAGAACATGAGAAATATCAGAA
+CAACTGCTCCATTATCAAACATGGCCAGTGTGAATTCTGGAATGGACATG
+CTATTTGGAATTGCAGCCTTAAATAGTGGGTTTTGCCCTTCAGTCAAGCA
+CTGTAATCATCTGGCTGCTAAACTCTCTGTGTCCTTCAATGATTTCTAGG
+GACAGAAGTCTGAGGTGCATAAACACACACCACCAGTCTGATAGAGCAAT
+ATCTGGAATAACTTGTGGGTTTGAGAAGGAAAATCCATCATAGCTTTGTT
+CAATATTGTGTGTCTAAGTTTGTGGCTATCCCAGACATTCCCTCATTGCA
+TATTCATGCCTTGTAGGAGACACAGTCTGTCCTGCACGAAGTCACTTAAC
+CCCCAGAAGTCTGGCGAAGAGGAGGATTCTGGCTCGGCAGCTCAGCTGGG
+AATGTGCAATAGAGAAATAGTGCGAAGAGGGGCCCTTATTCTCTTCTGTG
+ATTATGTCGTAAGTGCCCACAAGAGCTCTTATGGTAGAGGGTGGCATAGA
+TGCTGCTTATATGCACCTGCTAGGCAACCAAATTATTCACTGTGCCACAG
+ATATATCAAAGCTGAGGAGAGGTAGCAATGTTTACTCTGGAGTTTAATTA
+GAGCAGTCTGGTGACATTTTTCCTTGTATTGGGCAGCTGTGTTTTTTGAT
+TCAAGAACTCTATCAAATCTGTGGCATTTAGAGTCTGTTTTCTTTACTAA
+GCATTGCAGACAGAGTAAGTAGAACAGCCCATGCTAGGCTGGCCTGCAAC
+CTGGTAGCAAGTTGTATTCCTACATGGGGCTTCCTTTGTTCCCATGCATG
+CAAACTCCAGGCCAGTCGCTAGGGAGGGCTACACCAGAGTGCTCACCGTG
+CTCCTGTGGGCATCTACCTCTTGACTTGTCCAAGGAGCCAATTCTCTGAT
+ATTGAGGCATTTGCTGGTGTCTCTAGGAGCTGGATGTTGCCCTTGGTTCT
+TGGCTTCCTGTGGCCTCTACCACATGTGATCAGAGGTGTAAGGTTCTATA
+AGTTTCCTTCCTTTCTCTCATTTACTTATACTCAGACACTTGTCTTCACC
+AAGGCAAAATTTGTATTTCAAGTGTGTTTTTTCTTACACTTTGTAGCCTT
+CTTGTCCACTTGAAATATATCTTTTATTATATCTTTCTAGTGTCAGAATC
+TCCATGACTCAGAACACTTAACATGGCTCATTGTGAATCACATTCAAGAT
+CTGATCAGCTTGTCTCATGAGCCTCCAGTACAAGACTTTATTAGTGCCAT
+TCATCGTAATTCTGCAGCTAGTGGTCTTTTTATCCAGGCAATTCAGTCTC
+GCTGTGAAAATCTTTCAACGGTAAGTCTTTAGCCTGCCAGTTTGCTTTCT
+CCAACTTAAAAATGGGATACTGGGATTTTGTCAGTACTAGTTATCAGTCT
+GAGGAATAATAAATTTCGTTCCTTCTCAACATTAGCCAACCACTCTGAAG
+AAAACACTTCAGTGCTTGGAAGGCATCCATCTCAGCCAGTCTGGTGCTGT
+GCTCACACTATATGTGGACAGGCTCCTGGGCACCCCCTTCCGTGCGCTGG
+CTCGCATGGTCGACACCCTGGCCTGTCGCCGGGTAGAAATGCTTTTGGCT
+GCAAATTTACAGGTACTGAAAATGGTAATTTATATCAAAACTTAGAAAGT
+CAATCAAAACATTTGGTCTATTGACCTGGTCTTGATTGGCCACTGATAAA
+GAGCATGTATGTCATATTTGTTATTTGTGTATCTGACCAACGGCTCTTTT
+TAGATATACTGTGTATAGTAATTTATCCTTTTTAAGTGGGTGTGAGGTTG
+TATATCACAAAAGCCCTGATGTGTTCTTGTCTGTGTAGAGCAGCATGGCC
+CAGTTGCCAGAGGAGGAACTAAACAGAATCCAAGAACACCTCCAGAACAG
+TGGGCTTGCACAAAGGTAAGACTGCAGCGTGGGGTCCTGGCACTTGGGCA
+ACCAGCGTATTAACACATAGATATGTTCAGGAACAAATAGGTAGACAAAG
+GAATTAGTGTACAGTGAGTTTACTACAGCAATGCCAGAGTAGAAAAGACT
+ATCTAAATATCAGATGAAATTTAGTCATGTCTCACTTTAGTAGACATGAA
+GAAGTGGCACTCAAATACCTGTTGCACAGAGAAGGGACTCCTGAACTTTG
+TGACTGTTGAAGGGATAAGAAGTATGTTTGTTACCTCTGCTACTGGACCC
+TGACTGAGTGGGAATAAAGCTAGAACCTAATTGCCAGGTTGGAGGGAGTA
+AACATGAGAGCACTACCTGGCTCTTGGCATGTGCACATTATGTTAAGTGA
+CATCTCTTCATAGCTATTCTTTTGCCTAATTGTTTGAAAGTCTTTTAGAA
+GCCTTTATTAGAAACATTTCCATCTGTAGTGTAAGTGTAGTTCCTTGACT
+ACAAGATAAATTAAGAAAAGCTTTCACCTCTTTTCCATTGCTGAAGGGGG
+AAGGGAGCATTCGAGAGGGTCTCTCATAACTTTAATCTTCAGAGGATTTT
+TCATGGTGTTTCATAATGGGACAGGGCATGGTCTGGATGATTTTTCTCAA
+TCTGCAGAGCCACATGATGCTGACATTTGATCATTTGACATATGAAGTAT
+CACATTGACTCTGTATAGTAAGAGAATAAATTATCATTGTGTTTGACAGT
+AGTTGTAGTCATAGACCAAGATAAGGAAGATTATGACTTCATAAATAATT
+TAGGCCAGATGAAATGGTTTTCTGTGAAATGACCATTTCTAATGGAATGA
+ATTGTGTCATTATTGGGGATTAAATGGAGTTTGTGCACTTGGATCTTAAA
+ATTTATCTGTTTTGCACACATTACAAATACGATGGCTAGGATTATGTAAT
+TCAGTGGTAAAGCACTTGTCTAGTGTGCACAAGTCCCTAGGTTCAATCTC
+TAATATTGCCAAGAAAAGAAAGAGTAGAATAGAATAGCTTTGTTAGCGGT
+TATATAGTTCATTGCCTAATGGAATGTTGAGCATAAATGAAACTTCTGGA
+AAATATCAGTGAGGTATAAATTTTGGTAATTTAAAGTAACTAGGGCGTAA
+ATGTGCATCATGACTTTAGAATGTTGAAGGGAAAGTCCAAAACCTGTTGC
+CCTGTCTTAAGAAGCTCCTAGTGCTCCTTGGTATTACATGTTTCTAGAAC
+TCATCTGTGCAAAAACTGAGATTTCAAACCAAAGAACAAACTACTCTGGC
+TTTTTTATTCCAGACACCAAAGGCTCTATTCACTGCTGGACAGATTCCGA
+CTCTCTACTGTGCAGGACTCACTTAGCCCCTTGCCCCCAGTCACTTCCCA
+CCCACTGGATGGGGATGGGCACACATCTCTGGAAACAGTGAGTCCAGACA
+AAGTAAGTGTCCCGAATGTCTAAGTGTGATGACCAGGAACCCTGTGGAGA
+CAATGACAGCCTCTGTCTACAATGAGGATAGTGGTGGCTGTCAGTATACA
+TGGGACCTGACACTCAGCTCAGGTCATTAGATGCCCTGCTTGGGATTAGA
+GTGCAGGATGGAGGCCAAGAGGTCCTACTGAGAACAGGAGTGCTGATGTG
+AGGCTTTTGTGGAGGACTGTGGGGGCAAGTCAGGTGGCTAGTCAGCAAGT
+CAGGAAAAGTTGGTTGTGGTCCAGGACCTATGACTGCAGACATTGTCCAG
+CACATGCTGACAAAACTTGGCCTGCCTCCCCCTCTGAACCTTCTATCTCC
+TATAAATTGATGCACCTACTGCCACCAGCTATATAATGTATTGTCATCTG
+TACCCTTTCTCATTCACACTTGAGAATTAGAAACTGTTAGGGCCTTTGCC
+TTTTAGGCAAGGAGATGAGATTTTAGAAGCCTGCAGCCCATGACAGAAAA
+CACACATTTGCCCCAGGCTCACTCTCCAGCTTTGTGGGAGGCATTTCTTT
+GGCTTTGGCTGCTGGGAAAGATGAGGGAGGCAGATACTCCAGTATAGTAT
+AGATGGTGCATCATCTAGAGTGCAGGTAGAGCAAAAATTGTGAACACTGA
+GAACTTGGCTGAGTTTGCAAGGACTGCTGGAAGGTCCACAGGTGGAAAAG
+AAGAGGGCATTCAAGCACAGAACAAGAGAGGGAAGACTAGCTGTCTAGAG
+AGTGTAAGCCCAAGATGTGTCTGATGTCTGTACAGCCAGCCGTCGGTGCT
+TCTATCACAGCCCAGAGAAGCCGAAGTGCCTACCCAGTCCCATTCAATTT
+TCTTTTCTTCTCAGGACTGGTACCTCCAGCTTGTCAGATCCCAGTGTTGG
+ACCAGATCAGATTCTGCACTGCTGGAAGGTGCAGAGCTGGTCAACCGTAT
+CCCTGCTGAAGATATGAATGACTTCATGATGAGCTCGGTAGGCAATAATC
+CGTTGAGTCCAGGAAATCCTCAGCTCTGCTTGTCAGAAAGTTAGATTTGT
+GTCTTAGTTAGGGTTTCTGTTGCTATGATATAACACTATGACCAAAAAGC
+AAGTTGGGGAAGAAAGAGCTTATTTGGCTTAACACCATCATTGAAGAAAG
+TCAGGATAGGAACTTAACAGGGCCAGAACCTGGAGTCAGGAGCTGATGCA
+GAGGCCATAGAGGAGTACTGCTTACTTGCTTGTTCTCCATAGTTGGTCAG
+CCTACTTTCTTACAGAACCCACGACCACCAACCCAGGGATGACACCACTC
+ACTATGGGCTGGGTCCTCTGCCATCTACCACTAATTAAGAAAACACCCAC
+AGGCTTTTGATGACATTTTTTTTCTTGTTTGTTTGTTTTGAGACAGGGTT
+TCTCTGTGTAGCCCTGGTTGTCCTGGAACTCACTTTGTAGATCAGGCTGG
+CCTCAAACTCAGAAATCCACCTGTCTCTGGCTCCCAAGTGCTGGGATTAA
+AGGAGTGCACCACCACCCCCTGGCTTGGTGGCATTTTCTTATCTGAGGTT
+TCCGCCTCTTAGATGACTTTAGCTTGTGCCAAGTTGATAAAACTAGCCAG
+CACAATTTGTCTCATCTTTGTGTGATTAATATATGGGAAACCTGAGTTCA
+AGCAAGGGACACCATCAAGCAAAGCACATGGGAGGCTCTAACAAAAAATG
+GCACACGAGAGCTATAATCCAGAATAGCTAGCACATGAGATGTGAATAGG
+ATGCCGTTTTAAACAAACTAGAATCCTAAAAGAAGAAAATTGTTTTAACT
+TGATCTTTCATACCTTTAAAGAAAACGGGTGGTCATAGGCTATGGTCAGT
+TGATCATTCATTTCCTGACTATAAGAAAGTATTGTGTGTTCATTGTCTTC
+TGGAAATTGATCTCTAGTAACCCCTGTATTAATTACTTTCTCGTTGCTGT
+GATAAGACACTGAGGAAAAGCAGTTGAGATGAGGAGGGTTTGTTTTGGCT
+TACAGTTCAAGGGTACAATCCTTGGCAGGGGAAGCATCACGGTAAGCTTG
+ATGTGTCTGGTCACATTGTGCCCGTGACCAGAAAGCAGAGAGAAGTGACT
+GCTAGTGCTGAGTTTGCTCTGCCCTTTGTGTTCAGTCCAGGATCCCTGCC
+CACAGTTAAAGTGGGTCTTCCTACCTCAACCTCATCAAGATAGGCACACC
+AAGAGGTTATTATCTCCTCAGCAGTTGTAGAACTGCTACGCTGACATCAG
+TATTAACCATTACACCCATCATGTAGTGAGGCACCTTGTCCCTGTAGATA
+AAGAGGCATTCTGTCATGTAGTGAGGTACCCCGTCCTCTCTAGATATAGA
+GGAATTACCTCATGTAGTCAGATGCCCTGTACTGTCTAGATACAGAGCAA
+TTCTCCTCCACTTACCCCTCGAATACCAGAAAGCATACTGAGAGCTGGTG
+CAGGCCTTGAAAGCATTCAATTCCCTTCCTTGTCTTCTTTGCCAAGCACT
+CTTAGGCCACTACCTTAGTGGGGTTCTTTGTTGCCCAGTGAAGACAAGGA
+CCTCATTGCCCCTTGATACATGCCAAATGGTTATGGGGAAGCAGGAACTG
+AGCAGGTTAATAGAAGGTGTGTGTGTTGTGGAGAGAGAGGGTTCTCACAT
+AGGAAGATATCTAAAGCACAGGACCCAGTTTGTTATATTTTCCAAGTCGT
+TAGGTGGACTATTAGCAGCTTGCAAGTTCCATCCATGACCATAGAAATGT
+TTGATTTGGGGGAACTAATGATGAAATACAGTGTTTAATATTAAAGCTTA
+TGTTCTACTTGAAAAAATTGTGACTCTCTCTAAATCCTTAAATGGCTTAA
+AATAAGTTTTTGACAAAACATAATAAAAACTGTCATATGAGGCCAGACAT
+GGTAGTGCATATCTTTAATTCCAATACTTGGGAGTCAGACACTTGAGGAT
+CTCTGTTTGTGACATGTCTGGTTGACTTAAGTTCCAGGCCAGCCAGGGCT
+ACATAGTAAGACTATCTCCAAATCAAAAAAAAAAAAAGAAAATTAAAAGT
+TTTTGGCATGTGAAATGTTGTGTGTGTGTTTTTTTAAGCAGATTTTTGTC
+TAATATAAGATGCTCTGTGTGCCTTCTCAGGCTGCAGCATTGCTTGGCAT
+CCCACTGGATTCTTAGATGGCATATTAAACTTGGTGCGCTGTCTACATCA
+ATTAAGATTTGTCATCCTAGAATTATTTCAATGAAATATAAGATCATAAA
+AATTAAAAATATTGCTCTTTCTCTCTTTCCCTCCCCCCTCTCTCCACGTG
+GCCATGGCCAGTCTCTCTCTCTTTCTACCTTCTCTCCTTTCTCCCTGACT
+TTCTACAATAAAGCTCTAAAACCATTTTAAAAAATTAAAAATATTACTTT
+AAAATTCAAATATGACAGTGACCAGAAATATTTATTAAGCATGTTAAGTG
+GAGTTGTTGATATATTTATTAATATATATAACATAGGATATACTTTTTAA
+AATAGAGAATTCAACTTAGTTTTATCTGTCTTTTAACTTTATTTGTAGTC
+TAAGATCTTTTCTAGAGAGTATTTCCCACTTTTATTATTATAAGTTACTT
+GAGACAAGCTACATCATAAGAGAAAAAGATTTATTTTGACTGATAGTTCT
+GCACATACAACATCCAAGGGCTCATCTGGTGATGACTTTACTGTCAGAGT
+CCCAGTGTGGTGCAGAAAACCTCCCATGGCAAACAATAAGGAGCTTGAGT
+GTCTCTGTTTCTAGAATATTCTCAGAAGCATTCCTTACAGTTCTTTGGTC
+TGGATTATCTCAGAAACAAATGCTTATTGCATTAACTGTGTGTGTTCCAG
+CCTGAAGGAAAGCTTACTGTCTTTGCTGTTGTTTGTCTTGCATGTAAACT
+TCTGACCCAGGAGTTCAACCTAAGCCTTTTGGCTCCCTGTTTAAGCCTTG
+GCATGAGCGAGATTGCTAATGGCCAAAAGAGTCCCCTCTTTGAAGCAGCC
+CGTGGGGTGATTCTGAACCGGGTGACCAGTGTTGTTCAGCAGCTTCCTGC
+TGTCCATCAAGTCTTCCAGCCCTTCCTGCCTATAGAGCCCACGGCCTACT
+GGAACAAGTTGAATGATCTGCTTGGTAATTAAATACAGTTCCCTTGGATG
+CTTGTCTGTCTATCTTCTCTGTCACTCTGTCTCTCTTTATGGGTGATAGG
+AATGGCAGTAGCAGAATGGACAAGCCAGAGGGACACTGAGTCACACATTG
+AACCTAGAGCTGCCAACTCTGGTAGATCAGCTGACCAAGCCTCTAGGACC
+CTCCTGTCTCAGCCCTAAGTGCTGAGGTTACAGGTGTACACCCACAGCCA
+GGTTTTACATAAGATCTTAAATTCCAAACTCAAGTCCTCATGCTTGCACA
+GGAAGCACTTATCCACCGACTCATCCTCTCAGCTCAAGTTATCTTAGTGT
+TTTAGTTATTTTATATTATGTTATAGTTGTCTGCATGTATGTCTTCACCA
+GATGCATACAGTGCCCATGGAGACAGAAGATGGCATCAAATCCCATGGGA
+CTGGAATTACAGGTGGCTGTGAACACACTATGTGGCAGCTAGAGATTTAA
+CTTAGGTCCTCTGGAAGTGCAGCTCATGCTCTCAGCTCCCGAGCTGTCTA
+TCTAGCTACAAGTTGTCACCGTTTTTAAAAGTATTACAGATTCAGCACCG
+TGCTTTTCCTCAAGCACGCATATAGTCAGGACTGTTGATCTAAAAGGCTG
+ACAAAAATAGCTGAGAAACTGCACCAAATCCTTAGCTCTAAACTTCTTTC
+TTTGTTGCTTGACCTGGACATAGAAAGTCAGGTTCTAAGCCCTTCAGGAT
+CAGTGGGTTAGACTCAGGGCAAACCATGTCCTGACTTTATGTAGCACGTA
+TGAGTGAGCATGTACAGATGTGCTTGCTCTCTTGGTCTTGGCAACCTCAA
+ATTCACATAGTTGTGTGAAGGCTTCTGAAGGGGCGGGCCTGTGCTCACAG
+TCAAAGTCACTCATGTCAGTCTCATGTTTCAGGTGATACCACATCATACC
+AGTCTCTGACCATACTTGCCCGTGCCCTGGCACAGTACCTGGTGGTGCTC
+TCCAAAGTGCCTGCTCATTTGCACCTTCCTCCTGAGAAGGAGGGGGACAC
+GGTGAAGTTTGTGGTAATGACAGTTGAGGTAAGAGCAGCTCTGAAATTAT
+GTGTCCCTGTGAGGACAGGATATGTGAGTAGCACTAAGATGAAAGTCCTT
+GAAAACCGACAGTGTGGAGTACAATAGTGCACACATTAGCCCAGCTGCCT
+TGGAGGCAGAGGCAGAATTGTGGGTTCCTGGTCTGTAAGGATGTGCCTGA
+GTATACAGCTAGACCCTATTTGAATAAACAGGAAGGCAGGGAATACCTAT
+TGGCAAAGTCTGATTCACCTGATGGTACAGAGTGCCTTTCACCCTCACCA
+CTGGGAAGCAAGGAGGTCTGTAAGACATCCTGTTATCCCTACACTATAAA
+CCTAATGTGGGTCCTAAATAAAATCTAGACAGTGTTACATTTTAAATTGG
+GCAGTGAAGCTGGACATTTCACCCAGAAACACTTGGCCCCTCAAAATGTA
+TCTATACGTGCACTATAGTTTTATTACCTTGCCATGGGCATGCTGGGAAA
+GAGCCTCACTGTGCCAGAGCTGTGCTGCCAATCCTGAACAAGGGTTGACA
+CCTTACCCTAAGAGAAGAAAGTCAGTATCCTGAGGGTGTATGGTACAAAG
+GCACCAGGTGAACCAGGCTAAGTTAGGTGGTCTTTGAGCTTGTCTTAGCC
+CAGTGAAGACAGGAAAGCAAATGTGTGTGTAAAGTATTGGGTGGCAGCTC
+CTAGTCATACTCTGCGCTGCACAGGCCATGCCATGACACTTGTTTCCTAT
+AAAAACTCTGTCCCCATTTCACACATGGGGAAAGAAGCTCAGAGAGGTTC
+GGGGACTTGCTAGAAGTCACTAGTCATAAATCATACTCCAAAACTCAGTG
+TTGTGACTGAGATACAAAACAAAACACATTCTGTTTCTTTAAAAAAAAAA
+AAAAAAGTCCAATGTTACAGGAGCCCTCAAGACCCTGGCTGGAGGCTTTG
+TATATGGCTCAGATCAAGTTTCTATGGGCACCCATGGTTCTAAAGGAAAA
+AGACACCTAGGGTAAGGTTGGGCATTCTGGCAGAGGGAAGGCTGGAACTT
+GGGGTATAGGTGGATCAGGACTGAATATTAAGAAAACTAGGAATGACAAC
+CTAGAAATGTGGGTCAGGGGCCATTCTTCTGCAAGGAGGTTGTCTGATCT
+CCTGCCCTCTCCATCTATCCATGGTCTTCCATATCTTTTATTGGCACTGC
+TTCCCTAGAGGTCCCTGAGATAGAGTCCTGGGTGAGCATTCTAACACAGT
+GCTTCCCTTTAGGCCCTGTCATGGCATTTGATCCATGAGCAGATCCCACT
+GAGTCTGGACCTCCAAGCCGGGCTAGACTGCTGCTGCCTGGCACTACAGG
+TGCCTGGCCTCTGGGGGGTGCTGTCCTCCCCAGAGTACGTGACTCATGCC
+TGCTCCCTCATCCATTGTGTGCGATTCATCCTGGAAGCCAGTAAGTTTTT
+GTCTATGAATGATTTTCTTGTCTTCACACAGCTCAACTGATAATCAGCAA
+TACATGTCAGGCTGGAATATTTTTTCTTCCTGTCTTGTATTTCCCAAGAA
+CCAAGTTAGGACTTGGGGTGGAATGGATAGATTGTAGGTGCTGCCTTCAG
+AAGGCCCATTTGTCCACCCCCAGACCTTGTCATTACACTAGAACTTAACT
+TGAATTTGCTTTTCTAGCTATTTTGTTTTAGAGTTTGGAGTTCTAGCTCT
+TAAAAAATATTTTTTAAGTTTTATCCCTTTGATCTTTAGTCTTAGTTGAC
+TATTTGATGTTCTTAGTCCTAGATGATTATAAGATTAATAAGATTACATT
+CAATAGATGACTTCTGTTAGAGTTGAAGTGTGTGCTTGCATACTGCATTG
+TAATGCTAATATGTTGTAAAATAAAAGGGATTCATTCTTTTCAAGGAACA
+GTGTCCTCAACAAGGGTCATTAGCTAAAAATTTTTAAAAATTGGACATTA
+TAGTTTACATGTTAGAGGATGTTTTGAAGTTTTATGTTTTCAAATTAAAC
+ATTATAGAGTGGTGTTTTGATCTTTCATTGTTTAAATTGTTTTCATCTGT
+GCATTGTAGTCAACTTGGAAACAAAGATCCAGGGATTAATTTTAAAAAGC
+TAGGCTTCTTAGTCAAAGTGACGCTTTTAGCAGTATTGAGTTGTGAATAG
+TCTGATAAAAACTCTCAGGGTGGAGATGGCAGACGGTGCATTTAGAAGCC
+TCAGTGCGGAAGCACAAGTCTTTGTCTGTTCATGACTAGCCAAAGCACTG
+GTGCCTGCATCTGCTGTTCTCCAGTGCTTTTCAGTTTTACAGAAACTGCT
+TCTAGAAATCTAGCCCTCAGTTGACCTGTCATCGTACGTTTCTATGAGGC
+TGTACAGGCATGAAGTACCTTAAGTACAAAGAAGAGTTAAAGTTTATGTT
+CTGCTGTAATTTCAGTTGCAGTACAACCTGGAGACCAGCTTCTCGGTCCT
+GAAAGCAGGTCACATACTCCAAGAGCTGTCAGAAAGGAGGAAGTAGACTC
+AGATATACAAAGTAAGTCTTAGGACCATTTTTTCCCCTTCTGTGTTTCTC
+TGGAGCCTTCCAATTCATTGGCAAAAGAAACTCACGAAGTGGACTCTGGG
+AAACATTGCTCTTGCTGTCCTGAGGGCTCATCTAGACTTTAAGGGGCAAG
+AGGGCTTTTTGACCATGGCTGCATATATGTTCCTGGTTTTGTAAGCCCTG
+TTTTTCTAGTGAAGATTCCCCTTTCCTTAATCAGCTGGTCTGACTCATCC
+TGCATCCTTCCTCACAACCTGGATTTGTACCTCTTCCCTGAGAAGCACTG
+CCTCACCCCCCTGTCTTAGCCTCAGCACTTAGGCTACTTTAGGATAAATA
+TCTTTTCCTTGTCTGTGCTCTCAAGGTTGCTGCTTGGTTTGCTGTAAGTG
+ACTGGCAAATATGTATTAAATTCTGAATGAGTAACTCAAAATTTTAAGGA
+ATTTGAAGTTAGTGCAGATGCTATATGATATACGGTAACCTTAGTGACTA
+TTTCAGACATCTCTTTAGTGACATTTGTTGACATGTTTGCTTTGCCACAG
+GGTCTTCCTCTGTAGCCCAGGCTGACAATGAATTCATGACTCTCACACCT
+CAACCTCTTAAGTAGTAGATGCTGTCATTACTTTGAAAAATAATAATGAA
+ATCACCATCTAGACTTAGGAACAATGTCTTTTTCTGATCTTCCTTATGAG
+TCAAGGGTAGCTGTTATCTTAGTTAACGATTTCCATTGCTGTGAAGAGAC
+ACCGTGACCAAGGCAAGTCTTATCAAGAACAACATTTAATTGGGGCTGGC
+TTACAGGTTCAGAGGTTCAGTCCATTATCATCATGGGAGTAAGCATGGCA
+GTGTGCCAGCAGACATGGTGCTGGGGAAACTGAGAGTTCTGTGTCTTGAT
+CCAACTACAACCAGGAGAGACTATCTCATGAGCAGCTAGGAGGAGGGTCT
+CAAAGCCCACCTCCACAGTGACACACTTCCTCCAATAAGGCCATACCTCC
+TAATAGTGCTACTCCTGGAACAAGCATATTGAAACCACCACAGCTCTCTT
+ACTGTGACAGTTGTGGCAAGGCCAACTGAGTTCTTGACTCCTAACTGCCA
+GGCTTCTAGACTCTTTTTGAGTTACTTGATCTCAAAGGCTCAAATACTAT
+AGCTACACCACTTCTTCCACAAGCAGGTAACATCGAACATTATTTTTCCT
+TGGGTCACCAGGAGCCTTTGTAATCACAAGTATAAAACTGTCTAGTCACA
+GCCCAATATTATGACACAGGCCTGGAAATGCAGCACTAAGGAGGCCAAAG
+CAGGAGGATCATGACTTTGAGTCTACTCTGAGCTATATAATGCCAGAAAC
+TATTTTAAAAGACAGGGCTTCCCTGTCTTTAGCTTTTCAATTCTTTCTCA
+AGATTATATAGTTGCACAGAGGCCCATGAGCAACGCCATCTTAGTACAGC
+CTGGGCTCTCACTTGGTTTCTGACTGGAGCCCTTCAGTTCCTGGAGTTCA
+CTTCACCTCATAGTCTGGTCGTTTCCTGAGCAACAACTCAAATTATTCTG
+TGCTTCTGGTATGAGGATAAGACACACATAAGGAAATCAGACCATGGAAA
+ACACTGGAATAAATGCCCACCATCTTGAGAATGGGTAGGTGGGCCCAGTG
+GCAGGAGAGGACTTAATTCAGGCAGATGAAGTTTTGCTTATCCTCTGTGA
+CTTGAGGTCAGTTAATGAAGTTCTGGTCAGAAGAAGCAACCTGCATTTTG
+CTTTAAAAAAAAAAAAAATTGGGTTTTTGTTGTTGTTGTTGTTGTTTTGT
+TTTGTTTTGTTTTGTTTTGTTTTGTTTTTTTGAGACAGGGTTTCTCTGTG
+TAGCTCTGGCTGTCCTGGAACTCACTCTGAGAACCAGACTGGCCTCGAAC
+TCAGAAATCCCCCTGCCTCTGCCTCCTGAGTGCTGGGATTAAAGGTGTGC
+GCCACCATGCCCGGTGCTTTTTAAATTTTTAAGTCACATCCATAGATTAG
+CATTTTTTTTTAAAAAAATGTTATATGTGAGGGTGTTTTGCCTGTCTGTA
+GGTCTGCACCACATGCATGCAGTGCCCAGGGAGTCCAGAACAAAGTGCTA
+GATCCCATGGGAATGGAGTTATACATTGTTATGAGCTACTATGTGAGTGC
+TTGGAATTAAGCCCAGGTCCTCTGAAAGAGCAGACAGTGCTCTTAACCAC
+TGAGCCATCTCTCCCATCTAAGCTGGTACTACTAGAAGTTAGTCTGACAC
+ACATCATTTCTTTTAGCTTGGGGCTAAATTCCTTAAGCTCAAAAAGGATC
+CTTTTTCTGTACCAGGAAGTGCCTAAATTGTTGAATCTCATAGACAAGGG
+TAACTATCTGTTTATTTAAACTTTCACCAACTAAACAAGTTGTTCTTAAA
+TTCTATGCTGTATCAAGACTCAGTTACTATGAACAGTCCCTGCCCTCAAG
+ACTCTTACAGGGCAGATGGGTGGTTTTCATGCTTTCTCACTCCACTGCTA
+GAACTCCCATATACGGCTGAAACTCAAGTTCAAAACCATTGCTGTATTCC
+TGGTAGAAATGGAAAGAATTGCAGGGTTTAGATGCATACTAAGGAAGTAA
+AACCTCAGGCCTTAAGTGAGCAGCCCAAAAATCTGAGTCAACTGGAAGGG
+CTCTTAGGCTGGGGTTCTCTATGGCCATGCAGAGGAAGGGTGACACTGTA
+TTCTTACAGACTTCTCTCTTTATCACCATTGCCTGTGTAGACCTCAGTCA
+TGTCACTTCGGCCTGCGAGATGGTGGCAGACATGGTGGAATCCCTGCAGT
+CAGTGCTGGCCTTGGGCCACAAGAGGAACAGCACCCTGCCTTCATTTCTC
+ACAGCTGTGCTGAAGAACATTGTTATCAGTCTGGCCCGACTCCCCCTAGT
+TAACAGCTATACTCGTGTGCCTCCTCTGGTAAGTTGGATCTTGCTCAATT
+TGATATGTAACCAGGCAGCAAACTTGGGATTCTCCTCTCTACCTCCCAAA
+GCTAGAATTACATGCCCAGCTTGTCACATAGTCTTCATTATTGTGACACT
+CCCTGTGATAGTCCCAGCATTTTCATATGGTTGTGACACTGTGTTGTCCC
+CCAGGCATTCTGTGTAGTAGGTATTAGTAAAAATACTGCATTTCAAAAAA
+CTGACTGAAGTACTAAACTTCAAAACTTCAAAAGTGTCACCTCTGAAGAG
+ATTCGTACAGAGCTGGGCATAGTGGTACATGCAGAGGCATGCAGATCTCT
+GAGTTCTAGGTCAGCCAGAGCTACATAGTGAGACTCTGTCTAGAGAGAGA
+GAGAAAGAAGGATTGATTCATACATTTAGGGACAACCACTATTGTGGGTC
+TTCTCTTGAAATTTTCTTCATGAATCAACTTAAAATAGGCTGGTTCTTAG
+GTTTTGTTCCACTTCACAGTCATGGAAATAGGGTTAACAACAGCTAGGCT
+GACCTCAGTCTGCTAAAATAGCACACCAGACATATTCTTTCCTACAAAAA
+TCCTCATCAAGAAAGCAAAGGTGGCCCGCAGCTGATTTGAATCATAGCGC
+AGAGCCAGACCAGGAAGCCAGATAAGAAAGGGTGATTTATTCTGCATAGA
+TAGGGCATATGCCTGCTGCTGGGCCATAGCTACAGCGTGTGTGTGCTTGC
+ATGTAAGATCCTAGAAAAGTTCACATCTAGAACATGACATTCATTGGACC
+TTAGAGTTGCTGGGGCCCAGTCTGAGTGCTGTGACCCACCTATGCTGGAA
+GTGCTTCTAGAACAGAGAGGTGTGGACATTGGGAAGAGAGCAATAGAAAG
+CCAAGAGATCATCTAACACTGCTGCATGAGGCTCAGCCCTGAGCAGGAGT
+ATTCCTTATAAAGACTGTATAAGAAGGTGGTATGGGGTCATGGAAGGCTC
+TTGTGGAATCTACCTCATAGGCTATGTGCTGTAGGTAAATCTCCATCAAG
+AGCTTTTAATCCAGGCCAGGCAGTGGTGGTGCACGCCTTTAATCCCAGCA
+CTTGAAAGACCGAGACAGGTGGATTTCTGAGTTTGAGGACAGCCAGGGCT
+ACACAGAAGAAACCCTGTCTCAAAAAAACAAAAAACTCTTAATCCGATAC
+CTTGAGTGCCCTGGGCAGAAAAGTAGCTGTAGCAAACACTGCAGAACCTC
+CCCTGGGCATGCTCCAGAGACTTCTGTGGGTGGTGTAAGAGATTTATAGA
+AGGTTGGTTGTGTTGTAGAAATCTGGGGAGGTTCCCTTAGGTCCTCGTTG
+CTTTACTGAGGCACATAGCTGAGCTGGCTAGATGGTCCTGCCACTGGAAC
+AGCGTGGGGTATACCTCAGGGCTTCCTTTGTGCATGGTGCTTATCTACAT
+ATCGAATGGCAAAACTCAGCCTCTCACAGTTGATAGAAATGAGCAGTGGG
+GTTGTCCTTGAGACTGAATTTCATTAGTGTTTGCCTCTTTTCCAACACAC
+TTGATGTTTGTGGGTAGCAGCATTTCCTACAAGGAATGTGGCTGTGTACA
+GGCAGCCTGAGGGGTGTAACAAGCAGGTGATGGGCTGGCCTTGGGGGAGT
+GGGGGGCAGGGCAGAGTGCTGGGAGTCAAGCTTGGCATTGAAGGTTCTAG
+GCACAAGGGTGGGAGCCTCTGTGAAAGGGCACAGGCTCTGGACAAATCAG
+AGTAGTAAAGGGGGGGGAGGGCAGTTGAGAGACAAGGGACACTGGACGCT
+GGGGGTCTCTTGTCCTCCTCATGCCATCTCCATCCACCTGGCACGCTTTT
+TATCTTCTCAGGTATGGAAACTCGGGTGGTCACCCAAGCCTGGAGGGGAT
+TTTGGCACAGTGTTTCCTGAGATCCCTGTAGAGTTCCTCCAGGAGAAGGA
+GATCCTCAAGGAGTTCATCTACCGCATCAACACCCTAGGTATCCCACCAC
+AGTCCTCTTCAGTCCCCATGTGCCACCTCCGAGACCTGAAGCCTCAGGGT
+AGGGCCATCGCACTCGGCAACTGAAAAGGTTCTGGGGTGGTAATGATGCA
+GTACAAATAGAATTATGGTGAAAAGTAGACCTAGGTGTAGATGGATGAGT
+ATAGTGTGGGGGTTGCATACCAGGCTTCTTTGTAACTGTCAGAGGAAGCC
+AGTTCTGTCTTCACATTGTCTATTCAAACTAAGCCATATAGGTGGGTTGG
+GGATGTGCTTCACTGTAGTGTTTGTCTAGCACGAGGAAGCCCAGGCGTCA
+GTCTCCAGAGCCGCAACAACAGACTAAGTAGTGGAAGTCATGTCCTCATG
+TCTTCCTGCTTGCCACTTTGACATTGTGTTCTCACTCGAATCATTTTTCT
+TATATGCAGAATGAGCAGCTTGGGAAAACATGGCAAGCTAGGTGTCACAG
+GGACAGAGTGATAGAATGAGGGAAGGTAGATTTGGCCAGACAGCCCTGCT
+CATCCCTTTGCTGACAGGGTAGGATCTTCAGTGCTGTGGTCCATAGAATG
+GAGACTGGGTCTATTTTTTATGTTTGCTACGGATGTGAACATGAAAAATA
+CACTTAGTGTCTCAGTAAGCAGTAGCATGAGTAGTTTTCCTGTCAGGACT
+CTTCTGCTTTCTGAAGCACAGTCTAAATTGCTGCTGCTGCTGCTGCTGCT
+TATCATTATTATTATTATACCTCCTCCTCCCTCTCTTCCTCCTCCTCCCT
+CTCCTCTTCCTCCTTCTTTGATTCCTTCTTTTTTGCCCATTTTCTGGATG
+AGGGTGTGTCTTTTGTTTTTGTTTGTTTTTTGTTTTTTGGGTTTTTTTGT
+TTTTCAAGACAGGGTTCTCTGTGTAGCCCTGGCTGTCCTGGAACTCACTC
+TGTAGACCAGGCTGGCCTTGAACTCAGAAATCCACTTTCCTCTGCCTCCC
+AAGTGCTGGGATTAAAGGCGTGCGCCACCACCACCTGCCTGAGGATGTGT
+CTTAATCACTGTTCTGTGAAGAGACACCATGACCAAGGCAACTCTTATGA
+AGGAAAACATTTAGTTGGTGGCTTACTTATAGTTTCAGAAAGTTAATTCA
+TTATCATCATGGCGAAAGCAGACAGTCATGGTGCCGGAGCAGTAGCTGAG
+AGCTTTACATCCTAATCCACAGGCAGCAGGCAGAGAGAGGGAGATAGAGA
+TAGAGAGACAGGGCCTGGTGTGGGCTTTTCAAACCTCAAGGCCCAGTCAC
+AGTGACAAACCTCCTCCAGCAAGGCCATACCTCCTAATCCTTCACAAATA
+GTATATCACCTAGTTACCAAGGATGTAAATGTATGAGCCTATGGACACCA
+ATCTGACTTGGTTTTGTGGTTATAGTCCTCATAGCCTTTCCCTTCTGGGG
+TATTTCCTAAGAGTCTGGTCACCAGACCCACTGCCTGGTCCAAGTTAGGC
+CCTAGGGATTCTTCAAGTTTTCTTATTCGGATCTCAGCAACCTACCTCTA
+CTTCATCACTGTGCGCAGTAACTCCTGAGAAGGAATCCGGCACCTCCCAT
+GAAGGACATTTTACTTCCAATCTTAGTTGCTTTTCTGTCGCTATGATAAA
+CACCATGACCAAAAGCAACTTGAGGAAGAAAAGGTTTATTTCTCTTTATA
+GCTTATAAGTATATCACTGAAGGAAGCCAGGGCAGGAACACAGGACAGGA
+ACCTAGAGGCAGAAACAGGAGCAGAGGCCATAGAGGAGCGCCACTTACCA
+GCTTGCTCACCAAGGCTTGCTCAGCCCGCTTTCTCAAAGCACTCAGGACT
+GTCACCCCAGAGGTGGTTCTACCAATATAGGCTGGGCCCTACCACATTGA
+TCACTAATTAAGAAAACTCCCTTCAAACTTACTTATAGGCCAATATTTGG
+GGGTAGCTTTTCAACTGAGAGTTCCTCTTCCTAGATGATTCTAGCTTGTG
+TCAAGTTGACATAACTAGCCAGCACATTGCCTTTGCTCCTGCCCACTTAT
+GCCTTCCCTGGGCAGTTATCAGTTGGGTCTGTCTCCTGGTGTCAGTATTC
+ACTTGAATCTGAGTTATACCCCATGTTCCAGGAGGTCCTAGCACTTGATG
+CTCTACTTTTCTGCTCCTCTTTACTGCATTCTGGCCTTACAAGGAGTGCT
+GTAGATATTTTTGGGTCTTGGTGGGAACATCAAGTGCAGCTATTTATTGA
+ACTTTCTCTGATTGGCCTCCTAGACTGCTAGAGTGGTCCTAGCCTGATAC
+CATGCTTTTTTTTTTTTTTTTTTTTTAAAGTCTGTCTCCGGGGTGGCTTT
+CACTGTTTGCATTTACTTACTTTGTGGAGGCAGGACCCACGTGAACATCT
+CATAGTGCAGCATGCACATGGAGGGCAGAGGACAGCTGACAGGAATTGGT
+TCCCTCCTCCTTCCATGTGGTCCCTGAATTGAGAATTGGACTCAGATTGT
+CAGGCATCTCGCTGGCAGTCTGTTTAATCTCACTGGTTTTATAGAGACAG
+AATTATTATCTCTATGGTTCTGTGGTCTGCTCTGAGGTCTCACTTAAGGT
+GAGACTTTTAAAATGCTTCGTGGGCAGGACTAGCCTAGGTGATTAATGCC
+TTTGTAGACAGCCTGTCACTGTCCTATACAGTGGGGGATTGAAGGGCAAT
+CTGGCCTGTGCCTCTTGGCCTTGATAGCATAACTGGTCTCCATGTGCTGC
+TATATCCTTTATGATAGATGGAGAGAATGGCAGCCCTCATGCTGTAGGAT
+TGCTCTGAAGGTGGTGTCCTAGGAGCAGCACTGATGGGATTACATCGTCA
+GTGTCTCCTCATGGGTTTGGCGAGTCAGGAAACTAATGTAATGATTGATT
+CCCAGGGTGGACCAATCGTACCCAGTTCGAAGAAACTTGGGCCACCCTCC
+TTGGTGTCCTGGTGACTCAGCCCCTGGTGATGGAACAGGAAGAGAGCCCA
+CCAGAGGTGAGACTTCCATAACTAGGGGGTGGCTAACTGGAATCCTATAG
+CTGTGAAAGCTGGTGACTCTGTGTTTTGAAACAGAAAAACAGGCTGATCA
+TGTACCAACTGATGGTGCAGAGTAAAGACAAATGCCCTGAGAGGCTTGGA
+GACTGAGGCTTCCAGTAGCTAGGTTGTGTCTTTCACTACATACATTCCAC
+TCTGATGATCAATGAACTGGTTCTTCAGTACTGTTATGTTTTACTTACTT
+ACCATGATCTGAAAGAATGTCATTGAGCTAAAACAAAAAAGACAATGAAT
+CTAGGCCATGCACATGTACTTGTTTGGTTGGGTTTTATTGCTGTTGTTGT
+TTGTTTGTTTGTTTTAACTAACTTATAGGTTTTGCTTTGTTAGCTTAATT
+GCTTTGTTAGTATTAATATGACATGAATAACCGCCATATATTTGTAAAAT
+GAAGGAGGTTCTGAATGTTAAAGTACTGGGATATAAGCCCTTGATTCTAA
+GAAATATGTAGTAAACTGTGGAAATGACAGAGAAGAAGGTAAATCTATAT
+AGATCACAGGCCATCAGTATTATCTTCAAACTCATCTGTAACTGCCAGGC
+TGCCATAGGCAGTGTTCTAAAATGATGATAGAATTTAAGAACAATTTTCT
+CCAAGGTAAATTTCTATTTGGAGTTCTATGCCAAACATTTGGATACTCCA
+AGTCTCAGGTATGTAGGTAGGTATAACCAGTGCTGAGCAAACTTGGACTT
+AAGACCCTGGCTGTGGGAAGCCATAGCTCTTAAGTGATCTGATATTCCTG
+TATGAGGCCATGTGGCCTATGGGCACTGTGTGAATTAGAGGCAGGATGAG
+TAGTTGGTATCTTCTTATCATGTTGTCAATGTGTGGACTGTTTCTCTAGG
+AAGACACAGAAAGAACCCAGATCCATGTCCTGGCTGTGCAGGCCATCACC
+TCTCTAGTGCTCAGTGCAATGACCGTGCCTGTGGCTGGCAATCCAGCTGT
+AAGCTGCTTGGAGCAACAGCCCCGGAACAAGCCACTGAAGGCTCTCGATA
+CCAGGTTTGCCTGCGTTCTTATGTGGGCCAGTGCAGAGGATGGTGAGGTA
+GCCCACTTCCCCTAGCCCTCTCCTTTGTAATAAGAATTGATGTAACAATT
+AATTCTTACTGGTTCTTTCAGTACTGTTAATTTTTTCGTCTGTGGCCTTG
+CCCTGAGATACAGATACAAAAGGGATTTTTGTCAATGGTGACAACTGAGA
+ATTTAGTCTGATATGTTATGGACTTCACTGGGTGTTCTGGCTCAGACTTG
+TGGGTCAAGAATGAGCACCTACTGAAGGGTTTGTTTTTTGTTAAACCAGC
+TGCTTCAGGCAAGTGAAAGATAATTCCTGGTGGCTTTCACTTCTCTGATG
+TTTGAAGGAAGCAAACATGTTTTTTCCTTGGTTCTGAATTTTAGATTTGG
+AAGAAAGCTGAGCATGATCAGAGGGATTGTAGAACAAGAAATCCAAGAGA
+TGGTTTCCCAGAGAGAGAATACTGCCACTCACCATTCTCACCAGGCGTGG
+GATCCTGTCCCTTCTCTGTTACCAGCTACTACAGGTACAGGAAGAAGCTA
+GAACAATAGTGTGGCTTAACAAGGAGGCTTTGTGCTGAGTGTATTGCCCC
+CATTCACAAGTCTCATGCTTCGTCTAGACCACCATGAGCATCAGGGTGTA
+TCACCTCACTTAAATGAAAAGCACTTTCTTTCCATTGTGTATGTTAATTG
+TACAAAGTTTTGTATTTCATAAGGACATTTTCATACAAGCACATAATGTA
+TATTGAATGAGCCACCCCCAAAACCTCCTTACCATTCCCTTTGCCCATCT
+TCCCTTCCCTTTGCTGTCCCCACTATGTTATTGTTGTACAAACTGCTTCC
+ATCTCATAATTACTAAATTGTTTTAGGAATTGTTTGTTGTTGTAAATTTA
+AGTACCTTGAAGACTTTATTTGGGAAAGTTTGTGTATGGAAATGCAATGT
+GTCATTTATCCCAAAACGGTTTTGGCAGCTTTGAAGTAAACAGAGCACTG
+AGACTTTAAATACTCATGGGACTTAGACCGTGAGGAGCTTCTTTCTGTGC
+AGGTAGCATGGGTGAGAAATGGCATACTTGCTCTGGAACGTCTTGAGAGA
+GGTGTGGATAGATGGCATGGCAGCTCAGAGTACAGCTGTGGGGAGGACAG
+TAGAGTACCTTGGGGGAGGGTGGGCAGTGTGAGATCCAATGGAGTTAACT
+TGAGGCAGCCATCGAAGAAGAGATAGTCTAGTTTGCCCACTGGGCCTGAC
+AGCTCAACAGGCTGCAAACGGACAGTATTCCAAGACCTACAGCCTCTGCA
+TAGCTGGAGTATCCTGCCTGTGGGGAGCAGTCTCATTGGGCAAATATCTG
+TCCAGGCAGGAGCAGGGTCTCATGCAGTGCTGTGCTGATGTTTGGCAGGT
+GCTCTTATCAGCCATGACAAGCTGCTGCTGCAGATCAACCCAGAGCGGGA
+GCCAGGCAACATGAGCTACAAGCTGGGCCAGGTAGGTCAGTTTTACCAAT
+CCACACCCTTCTATAAGGACTGTAGGCTGGAATTATAACCACTTTGAGCT
+TGAGCCACATACCACATTCAGAGAAAAGTTTTTATGTACTTTATTCTAGA
+TTTTAATATATTTTACATATAAGCATACAGATGTTTATAAAACTTGTGAG
+ACAGGAGAGAAACAAGAAGCATGAAAGGTCAGGGAGGAGAGAATGGAGTC
+AGCTGAGTGCTGTCAGGCAGATGCACTACTGAAAGTTTCCTGGCTGCTAG
+CTGAGAGGGTAGCTAAGTATCCAGCTCAGCAGGCAGCTGGCAGTTGGACA
+GTTCACTTCATTCAGAACAGTAGGGATAGCTGACAGCTACTGACTGTAGG
+GTCAGGGAGATGTGGTACCTGCTGCCTTTCTAGACAAACTCTTCTGTGCC
+TTCCTACCTTTACAGAGAAACTTCCACCCTAAGATAAGTCACAGTGATGG
+TTAGAATGTGATGTGCTTTTGATGCAGTGTTTCTGAAGATGAGGTGATGA
+GTAAGATGCCTGGGGCTCCTGGGCACATGATCATATTCTAGGTTCCATGT
+CCCTGTTGTCACATTGTCCCTTATGGCTGTGTTGAACTCATCTGTCTGTT
+TCCTTCAGTACTTGAGGCCAGCTCCAATGTCTGCAACCTGAATGTAGTAG
+GACTGTGTGGGGCCAAAGTATAGATTTAGCCCTGCCTTGCCTGGGATACC
+TGCCAGCTATTAAAAAAAACTGCCGGGCTGCGCTGCCACTCCATTTTGGG
+GAAAGAGATGGTAGAAGGGGCTGGGCATGGAGAACACTGGCACCTGGAGT
+CACAGCTCACAGCTTGTCTTCACTGAAGCTGAGATCTGAGATCATAAGGA
+AAAATGCAGGAGGATTTGTAGAGGAGCAATCAAAACGGAAGTCACTGTTC
+ATCCTGAGAATGCAAAGACCACTTCATCTGCAACTACATTTAGGAGCTTG
+GGGCTCTTGGCTGCTCCTTATACCTTCCTAAAACAAACGCATCTCCTTCT
+GCATCCTTGGAGAAATTTCTGTAACCAAGCAGCCCAGAAAATCAGGTTTT
+ATAATTGGAAAACACAAATGAGTGGGACCAGGTGTATAAAATCCATGTAT
+GGGCATTCTCTCCAGGGTGTGGTCAGCTGTCACTTAGAAGCACTTAGTGT
+CACTCTTTGGTGTTGCTTTTACTTCATACCCTCTCCAGACACTCATTCAG
+GCTTTGAACCTGAGGTCAGACATTGGAGCTGGCCTCAAGTACTGAAGGAA
+ACAGACAGATGAGTTCAACACAGCCATAAGGGACAATGTGACAACAGGGA
+CATGGAACCTAGAATATGATCATGTGCCCAGGAGCCCCAGGCATCTTACT
+CATCACCTGCCAACAGTTTAGAACATTTACCTTACTACCCAGGGGTTCCA
+CTGATAGTAACATTTGGGTCTGAGTGAATCATTACTGTGGCCATATGTGC
+TCAGACCAGGAAGACCTGATTGTACCTACCCAAGAAGAACCTGAGGCCTT
+TACCTATGTCCTTATCTCTGGTGCCTGCTTCTATGACAGGTGTCCATACA
+CTCCGTGTGGCTGGGAAATAACATCACACCCCTGAGAGAGGAGGAATGGG
+ATGAGGAAGAAGAGGAAGAAAGTGATGTCCCTGCACCAACGTCACCACCT
+GTGTCTCCAGTCAATTCCAGGTTTACTGGCTCTTTTTTTTTTTTTTTTTT
+TTTTAATAAGAAATTTGAGATTTCTTCTCAGTCACTTATTTGGGGTCCTC
+TTGAGGCTAACCTCTCATTTCTGTATGGGGAAAATATCCATGTTTCACAC
+TCTGCAGAAAACACCGTGCCGGGGTTGATATTCACTCCTGTTCGCAGTTT
+CTGCTTGAATTGTACAGCCGATGGATCCTGCCATCCAGTGCAGCCAGAAG
+GACCCCCGTCATCCTGATCAGTGAAGTGGTTCGATCTGTAAGTTTGCTTT
+CCCCTCACCCAGAGGCATCTGTACACCATACACACACACTTGAACGTGTG
+CATACACACACACACAATCATGCACAGGCACATATGCACACACGTGTACT
+AAATACAAGCGACAGACACATTACACAAACTTCACCTATATGCACCAGAT
+ACCATATACATAAATACACACATGTGCCATGCATTCACCAAATAGATACA
+CAGACACATACACACACAAATATATACAAATAAACAAGCACACACAGATA
+CGCACACATAGACACATACACACCTCATCTCTAATGTCTCAGAACCTGTA
+AAGGACTCCTGCAGGCCTCCCAGGTATGGAGGGACAGAATGTGTGAAGTT
+GGTGGCGAGACAGATATTTTTTTTTCAGATCCAGATCAGGTAAATACTCT
+GAAATGTAACAGCAGTGAGTGGTGTGCCTTCCAGAGACCCAGCGTGTCTC
+TCTCTTCCAGAGTAGTAACAAACAACTGTGTGCCTTATCCTTCTAAGCCA
+ATACTCTCCAAGAAGCACTGGTTCAGAAGAGCAGTGTCAGAGAAGGAGTA
+GTCTCATTATACTTCACTACTCCACACTTCCCGACAGTCCCAAACAAAGG
+GTCGTCAGTGACATCTTTCAATATCGTCAGGGCTGGATCGAAAGGCTCTA
+AGACACTGACTTGGTCTGAAAGTGATAGAGGGAGCTGGAGAGAGTGGTAG
+TTAAGAGGACAACAGCTGCTTTTCTGAAGGACCCGGATCTGTTCCCAGAA
+CTTACACAGGGGCTCACATTTTAGGCTGCCTTTCTAACATCCCTCCAGGT
+GTAATTCTTGTTCTGATTCTCCATGTCTCCAGCTTCTTGTAGTGTCAGAC
+TTATTCACCGAACGTACCCAGTTTGAAATGATGTATCTGACGCTGACAGA
+ACTACGGAGAGTGCACCCTTCAGAAGATGAGATCCTCATTCAGTACCTGG
+TGCCTGCCACCTGTAAGGCAGCTGCTGTCCTTGGAATGGTGAGTGAGGGT
+AGTGCAGAGGCCGCCCCCATTGAAGCTGCTTGGGACTGCACTGCTTTGGG
+ACCTTGTATTGGTCACATGTGCTACATGCATCTGCATAGTTCAGAGTCCT
+GTCCCAAGCCAAGCTCCCTGGCACATGTGGACCATGGCTCTGGTCAGCTA
+AAAGACTTCGGTGCCTTTTGGTGCTGTTCCCACAGAGACTGGGATGTGGT
+GAGCATGGCTGGAGTACTTGTCTCCCTTGGCTCACTGTATGCTGTTACAG
+TGAGCACCTCTCCAGAATACAGAATGCAGAAGGTATGGTAGCACTAGGAC
+AGCACAGAGTGAAACTGGGTCACACTTCTGCCAGTCTGTCACTTGGCAGC
+CCCACTCCATTTCTCTGGTGGGGTCTAGCTCCCACTGTACTGCTCAGCCT
+ACACAGGCTTCCCTGGTGAGCTTGTTGTCATAGTAACCTTTCACTTCTAC
+TTTGAGAAATGTAGATCTTGATCCTTAACATCATGATTTTCTTGGTCTAG
+TTGTGTTGCTGTCACAAAACACCTTAGACAGGGTAAATAAACAACAGATA
+GGAACTGCTTATAATTCTCGAGACTGGGAAGCCCCAAATCAAGGTGCTCT
+GGGACGCCTGGTGTGCACCTCCTGGCTTCTCCAGCTCATCGGCATTTCCC
+ATTATTTTGCTGCCGAGTTATCAGCATTTAGCTAGTACTACTTGGTTGTC
+TTTCATATGTACATATTTTTAGGTGCTTCTCAGAACATGGTTCAGACATA
+CACGTGAAAGTAATCGCCTATGGGTTCTGCCTCTTGTTGGAGCATATTGG
+AGGGCATCAACACTCAGGAAAGGGGACAGACCTTAGACTGTTGATATTGG
+CTCTTTTGTCCTGTCACTTACTATTAAATTATTATTGTGTTTTTAAATTA
+TGTTAAAATGTTATCATTAAATGTTGAAAATTATCATGTTTTTAATCTCT
+GTTTCCTGGGATCCAATTTTGTGACAACATAAGAAACTGTTTAGGGATTG
+GAAAGATGGCTCAGTGGTTAAGAGCACTTGGTTCTCTTACAGAATATCTG
+GGTTCATTCTTAGCACTCACATTGTCAGCTCATAACCGGCACCCTCTTCT
+GGCTTCTACAAGTACACATGCATATTTACCATGTGGACACATACATGTAT
+ACCACATATAAAAATAAGAGTAATTTTAATAAGGAAACTTCTTTGTTCAT
+AGACACGTGAGTGGCTGCTATGTGTGATGTAGTTAGGACTGCTGGCAGGA
+GTAGTGTGGAAGAGCCTGCTGGGGACCATTCCCCATGGGTCATATGTGCC
+ATGCCCTCTCTTGTTAGTAAGACAGTGCTATTATCACTGTGGCCACAATT
+ATCTGATGGCATCTTTTACATACCTGTGTAACCCTGTGCAGTCTCACAGA
+GCAGCAGTACACATAGATCATGGCACAGCATGACTCCCACCTCTCCTTAC
+TGCTCTGCTAGAGGATCTGTTGTCACTGAACCTAGGGGCTAGAACAGGTA
+TACAGCATGGGTGGCCATAGCACCATCCAGAGAACAGGAACAATGAGCAC
+TTAGTCCCCCTATGTGAGCACACTCTTCAAAGCAGGCGTCCTCTGGGTGC
+TGTCAGGACTGACCGTGTGCTTGTGGGAGGTCCATTTTCCTTCTTGGGGT
+GGCTTCAGTGGCTTAACTTTCCTTGTAACTGTGGTTTTGTGCTCAAGCCC
+AAGTTCCTCAGGCAAAATGTAAATCAAATGCATCAGGCAAATAAAGAATG
+GTCATTTTTAAGAAACGACCCCAAGGCAGAATGGCAAATGGCTCCAGTTC
+CCAGCTCTGTGTATACAAGCAGCTTGCTTGAAGGCAGTGTCTTCTGAGAG
+GCTTCCAGCCTGTTTTGTGCTGCTACAAGCTGTGCTGCATTGCTTTTCCA
+GGGGAAAGGGACGCTCACCTCAGATGCGCTACTGATGATCCAAGCCCAGC
+TTGTGAATCACTGTATTAACTGGCTTTCCTCATAGGAACTTGAGTGAAGG
+CTTATTTATTGGAGCTGAGGGACCCCAAAACAGCTGCATACTAAAAAATT
+CTACCACGGCATGGGTGATAATACTCCCAAAGCTACATAAATAGAGTCAT
+AGTCTCCTCCTTCACTTAACCTTTCCCAAGACCAGGTGCAGTGGGGGCAG
+GATGGTGTGCAGCATCTGATAAGGGAACACTAGAATCCTGTGCTTGGGAG
+AGAGAGTCTGGTAGGAATACAGGTAATCCCAGCTCCTCTGATTTCTAGAT
+GGCATAACCATGTCATGTCCAGAGGACAGTGTTTCACAGTATGTGTCAGA
+TTGTCACTTTTCTTTGCCCGTCATTAGATCTGCAGCCTTCAACAACGTAT
+CCTGATGTTATGCTTGCAGGAGTGTGGCCAGAGCCCTAGGTTGTACTGTC
+CTTAGGAAGGACTCACAGCTCAATGGGGATAGCCTGGGAAGGGGGATGAG
+AAGGAAGTAACTGGCAAAGACTGTTAACCCTCTTTAGCCTGACACCATCA
+GATACAGACTCCCCTGGAGGGCTAGATGACCAGAAGGCCTCTTACATCAT
+GAGATCAAATGAGTCCTTAGCCAGCCTTTCCTGGGGGTGGGGCAGTCAGA
+CAATGAAATGCTGCCCTTGGATTGCAGAACACAACAGCAGTCTTCAGTGC
+TGAAGGAGTCATGTTTCAAGGTGTGTACTCCCACATTTAGAAAACCTAGT
+GGAAGTGATACCATGTCAGTCAAGGTTAGCACAAATTAATGTCAGGAGCT
+TCACAACTACCAACAGAGGATCCAGACCAAGGTTTATTATAGTGAAGAGA
+CATGAGGCAAAAATCAGCTGAAGGAGAAGGTTCCTGGGTAAAGCCTAGAA
+AACCAGTGTGAATACTATTCCTGAATAGTCATACAGATCACAAATCATCC
+AGTAGGTAGACAAGGAGCAGACAACATATATGAGCTGTTGCTACAGGGGC
+CAGCAGAACTCAGAATCCAGGGTTGTTAGAAGTAGCAGGTTACAGGAACA
+CCCGCTGCCCAGCACATACCACAGCTCCAAAAGAGAAAGTAAGTGTGGTC
+ACAAATCACATTTTCTTCATAAGGTAAACCCTCTCTCAACTTGAAGTGTT
+TAGATAGGGTTGGCACTGTACACCAACATCTCCTGGATGAATTTCAATCA
+TTTTGTGGACTCTGCTATGTGCGCAACTATTGCTAGGCTCTTTCAACCTC
+AGGGAAGCAGGCTGTTGGCAATCAGCCATCAGTCTGCCTCCAACAGCTCA
+TGTCTGTGCTTGTATCCAGGACAAAACTGTGGCAGAGCCAGTCAGCCGCC
+TACTGGAGAGCACACTGAGGAGCAGCCACCTGCCCAGCCAGATCGGAGCC
+CTGCACGGCATCCTCTATGTGTTGGAGTGTGACCTCTTGGATGACACTGC
+AAAGCAGCTCATTCCAGTTGTTAGTGACTATCTGCTGTCCAACCTCAAAG
+GAATAGCCCAGTGAGTGGGGCTGGTTGGGTGGGCTACAGGCTTTGGTGTG
+GTCTTAACAAAAACAGAAAAAAGAAAAAAGAAAATAAAAGTCTCTAATTC
+GATTTTCAAAGTATATGCCAGAACAGACATGGTAACAAATGCTTATGATC
+CAGCATTTAAGAAACGAAGGCAGGCCAGGTGGTGGTGGCGCACACCTTTG
+ATCCCAGCACTCGGGAGGCAGAGGCAGGCAGATTTCTGAGTTTGAGGCCA
+GCCTGGTCTACAAAGTGAGTTCCAGTACAGCCAGCAGAGCTATACAGAAA
+AACCCTGTCTCGAAAAACCAAAAAGAAAGAAAGAAAGAAAGAAAGAAAGA
+AAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAGGAAGGAAA
+GGAAGGAAAGGAAGGAAAGGAAGGAAAGGAAGGAAAGGAAGGAAAGGAAG
+GAAGGAAGGAAGGAAGGAAGGAAGGAAGGAAGGAAGGAAGGAAGGAAGGA
+AGGAAAGGCAGGAAAATTGACATAAGCTTGAAACCAGCCAGGGCTACAGA
+TGAGATCCTGTCCCAAAAAGAGTAACAGAGTGTATGCCAGGTCCTGGTAA
+GTAGCAGACAGGAAACCCAGGGAAATAGTAGTTATGAGACACAGGACACA
+AATCTACATCAGATGGTTTCTTGGATTTTTCTAGCAAGATGCTCTCCACT
+GGCACTTAGCACACTGCTGTGTGGATAGCAGCCTTATCCCGTGCTGCCAG
+ACTCAGCCTCCTCTTCAGTTTATGATCAGCACCTTGTTTCCTGTGAGGCC
+CTTTTCCCTGCTCTGATCTCCCACCTCCCCCTAAAGACAGCTCACATGCA
+GCTGTACCTGGATATGTTGCTGGTCCTTTTGAAAATACGTTGGGTCCATC
+ACAGCTCTATCTTAGAAGCAGAAGGAGGTGTTATGGTGTGATGGAGCATA
+GCTAGGCACTCAGAGGCACGAGCTAGGGATGCAACGGGTCTTGGAGGAAA
+AGTCCAGGTGTTCTGTGACGGGATTGTAGATTGAGAGGTGGAGAGTAAAT
+TTGGAGATGGTAAAGTCTTAGGCTGACAGACAGCTACTGGGAGGAGGTGG
+CATTCTGACAGATGATCAGAAGTGCATTTTGGGGGCCCAACAGGCAGTGG
+CAAGTTGAACTGGAACAGAGATAGTGGCTTGTAGCCAGCCTTCCTTGTAG
+TCCTTGGCTGAATAGTAACCTACTATGTACAGGGTGGGCAAGCAGCCACC
+CATTGCCCCTGTGATCACACATCCTGGCCTGGAGCAGATTTGGGTAGGTT
+CTGTGAAATCATAATCTGTGCTAAGGAATGAAGAGGACACAGAGGACTGA
+AGACAGGTAGGAGGAGCTTACAAGTCCAAATCAGATAATCACATAGACCT
+TGCTCAATGCTTTGGGCAGAGGGTCCTGTACAAAACAGTGGCCAGTTCCT
+AGGAGTGGCAGTGTCATTTGAACAGATCTGAAAGGGGGTAGAAGAGATAG
+AGTGTGGCTCCTTTGTTTATGGTGCCATTGGTCTAACTCATATTCCAGCC
+AGGCCCCTGCTGCTGCCATTGGGCAAAGAGGCACCAACTGGAGATCCCAT
+GAGTTAGAGTGTTACAAGTTAGCAAATGCATGGAGCAAACAGACACCCTG
+AAGAACCCTGACTAAGAACATAAAGAATGTTCAGGGAGAGTGAAGTGGTC
+TATGGACAGGTACAGAATTGGAGATGGCATGCTAAGCTAAGGCCATGCAG
+CTCTGGAGTGTAATCCCTAGGCCATCCAGGTGAAAAGGCCTGGAGAAGCT
+GACATGACCCCAGAGCTCTGTACTCAATAGATGTGGGTGGATAATGCTTT
+AAAACTGTCCCCTCTGCAGACAGGCAGGGGCTGCTGTATGTGACTGGGTA
+TGATTATGTGTCTCCCATTCTTAGCTGCGTGAACATTCACAGCCAGCAGC
+ATGTGCTGGTAATGTGTGCCACTGCTTTCTACCTGATGGAAAACTACCCT
+CTGGATGTGGGACCAGAATTTTCAGCATCTGTGATACAGGTGAGAGGGCT
+CTATTGAACATAGGCAGGTTACCATATTAACTGTACCAGTGGGTCATTGT
+GCTTTTGGGGAAGATAAGAATAAGCCTTTCTTCTTGTCTAGATGTGTGGA
+GTAATGCTGTCTGGAAGTGAGGAGTCCACCCCCTCCATCATTTACCACTG
+TGCCCTCCGGGGTCTGGAGCGGCTCCTGCTGTCTGAGCAGCTATCTCGGC
+TAGACACAGAGTCCTTGGTCAAGCTAAGTGTGGACAGAGTGAATGTACAA
+AGCCCACACAGGGCCATGGCAGCCCTAGGCCTGATGCTCACCTGCATGTA
+CACAGGTGAGTGAGTTGTAAGGGTCATAGACTCACCCAAAGACTCAAGCC
+AGGCCTCATGGTGTGGCAGTCTTAGTGTGGCCCTAAGACATCCTGGTCAC
+CTTTCCAGAAATTTAGGGCTCAAGGCAAGAGAGTAGCTCACATGATATCA
+CCAAGTATTGATATCAGAGCTGCCTCTGGCCACTGCATCCCTGAGAGTTG
+GAAGAAAGTTGGGCTGGGTCCTGCCTTGCTAGGGACTGTAATCACCTGTT
+TTTGAAGGATCTCTGCCTATCTGGTAAAGTCCTGTTTGAGCCACTGAGTG
+CAGATTTCAAAACTCTTGGGCTTCTGTTCTGTATAGCCTACTTAGTGTTT
+TTGTTTCAGGAAAGGAAAAAGCCAGTCCAGGCAGAGCTTCTGACCCCAGC
+CCTGCTACACCTGACAGCGAGTCTGTGATTGTAGCTATGGAGCGAGTGTC
+TGTTCTCTTTGATAGGTAAGACATGCAGCAAATCTCTACCTCTAACCTCA
+GTAGTCATTGACTGCCCTAGAGCCACAGCCAGAGCAGTTCTTCTGTGTGT
+ATTTGTTCATTCTCTTGAACTCTAAAATGTATCTTTGCAGCCATTTTTCC
+AGTGCATGCATATTCACAGAGCACCACCTGCAAACCAGACTCAGAGACAA
+CTGGACCCATGCAGGTGTACAACCAAACCACTGCCCTGGTGGATCCCATA
+GGCCACTGCTGAGGAAATAGATACTCCTGGAGTATAGCCAAGTGCTGTGG
+TGGGGCTCAGGCCATATCCACAGGAAGGAGGCATGGCTAAGGGTCTGAGT
+CACTTGTGTACTTCTCAGCTGTATACCACAGTTGTGTGGCATTAGTGGAA
+TAATCATAGGTCAGCGGTCTTTAAGGATGCTGCTGAGGTAAGGGTAAAGT
+GTTGGATGTTGAGAGAAGAATCAACAGGAGCATAGATGGGTGTCCAATTT
+TGAAGGGAAGAAAAGTTGCTGATTGACAGTAGTACTCTGGGTATTAGGAC
+ATACAACAGTGAAAAGTTTTGGTACATTTTGATGTGGAGAGTCTCTTCAG
+TAATATCACCACCCCACCCTTTTGCTGCTATTCCTTTACAAGGCTGTCCC
+ACAACTGCATTATTCTCCCCAGAGTGGGAAAGATCACCTTGTGCCCAGAT
+CAATCAGGGCAGCAAAAGAACAGCCTGGATATCATCTTTGGCTTTTAAAA
+CTCATACCCTAGTTGTGGTCTGAGCCCCACTTGGAACATTCCTGTGGGTC
+AAGGACCTCTGAGCCTTAGTCCATGATGGCTCTATGGGCAAGGTTGAGAG
+GCCAAGAGCCAGGGTGAGGCTCAAATCAGCTCCTCTCATTTCAGGATCCG
+CAAGGGATTTCCCTGTGAAGCCAGGGTTGTGGCAAGGATCCTGCCTCAGT
+TCCTAGATGACTTCTTTCCACCTCAAGATGTCATGAACAAAGTCATTGGA
+GAGTTCCTGTCCAATCAGCAGCCATACCCACAGTTCATGGCCACTGTAGT
+TTACAAGGTGAGGGTGTACTTGCCTTGTGGGGTAAGGACAGAGCAGGAGG
+AGGAAGGGGGAGCCAATCCCACACTTGCCGTAGGCCTGTCATCAGGGCTA
+GACTCATCCTTTAAGATGAGTGGCAGCTGTGGCCCCAGTCCCCTCACCCC
+ACCCCACAGTCTGACCCTGTGCTCAGGGCTCTCTTGTCCCTAGGTTTTTC
+AGACTCTGCACAGTGCTGGGCAGTCATCCATGGTCCGGGACTGGGTCATG
+CTGTCCCTGTCCAACTTCACACAAAGAACTCCAGTTGCCATGGCCATGTG
+GAGCCTCTCCTGCTTCCTTGTTAGCGCATCTACCAGCCCATGGGTTTCTG
+CGATGTATCCTTCCTTCCATGGGACTTTGGCCAGGTTCCTTGTTCACTTA
+GCATCCAGTTCAGGTTTCACTGAATGTTTTCAAACCTAAACTCTAAAGAA
+CCTCACAGGTGGGTGGTGGTGGCGCACGCCTTTAATCCCAGCACTTGGGA
+GGCAGAGGCAGGAGGACTTCTGAGTTCCAGGCCAGCCTGGTCTACAGAGT
+GAGTTCCAGGACAGCCAGGGCTACACAGAGAAACCCTGTCTCAAAAAAAC
+AAGAACCCCACAGGTGATGCTTACCCTTCCCTAAAATGTTGACAGGGACA
+CGAAACAGAAGGTCTAACCATTTGCCAGCCAGGGTTTATGGCAGTTTTAC
+TGCTGAGGGAAAGGGAAGTCCAAAGGGAGGCAGGGCAGCTCAGGCCAGCC
+AGCCACTGGCCCTGCGGCTGCCCCATCATCTGGCATAATCTGTCCCTCTG
+AGGTTTTCTCAATGCTGCTTCTCATTAGCTCTCATCTTTACGCTGTGGTC
+ACCCTCCTGGGGAAAGCCGTAAGTAAAGCTGCAGTTCCCGCCCTAACAGT
+GATGCCAGGAGTTCCTCTTGGCAGCCTCCTTCTCAGTAGACCACAAGAGT
+TACTAGCAGCAAAGCTGTCTTGGTGGTGACAGTACAGCCTCACCCTAAGT
+ACTGGGAAAGCCTTGCAGGCAGGGTGCTAGCTAGCTCTGCCCTCCCTGCA
+CTGGAGCAGTTTGAGCAGGAACACCAGCCACTAGCACTGTGTGGGGAGCA
+CAGCCCAGGTAAGTGCTGTTGTGCAGAGCACTGGGAACCAGCATCCTGTC
+TGCACTGCATGACTCCCACTTCCTGGGCCTCTCTGCCTACCCACCCCTGT
+CCTCCTGGGCAGACAGCAAGCTGCAGCTGAGAAAGGATTACAGGCAGCTG
+CTGCTGTTAATGTGGTCTAGGCTGCCCTCTATTTTGGTTGCCCTCAGTCT
+TCCCTGGGCCTCTTGGTGGACTTAGGAGGGGAACCGCTTGGGGAGGCTGT
+CTTTCCACCCTTGCCATCGTTCCTCCTTAACTCTTCTACCAGCCTTCCAC
+ATGTCATCAGCAGGATGGGCAAACTGGAACAGGTGGATGTGAACCTTTTC
+TGCCTGGTTGCCACAGACTTCTACAGACACCAGATAGAGGAGGAATTCGA
+CCGCAGGGCTTTCCAGTCTGTGTTTGAGGTGGTGGCTGCACCAGGAAGTC
+CATACCACAGGCTGCTTGCTTGTTTGCAAAATGTTCACAAGGTCACCACC
+TGCTGAGTAGTGCCTGTGGGACAAAAGGCTGAAAGAAGGCAGCTGCTGGG
+GCCTGAGCCTCCAGGAGCCTGCTCCAAGCTTCTGCTGGGGCTGCCTTGGC
+CGTGCAGGCTTCCACTTGTGTCAAGTGGACAGCCAGGCAATGGCAGGAGT
+GCTTTGCAATGAGGGCTATGCAGGGAACATGCACTATGTTGGGGTTGAGC
+CTGAGTCCTGGGTCCTGGCCTCGCTGCAGCTGGTGACAGTGCTAGGTTGA
+CCAGGTGTTTGTCTTTTTCCTAGTGTTCCCCTGGCCATAGTCGCCAGGTT
+GCAGCTGCCCTGGTATGTGGATCAGAAGTCCTAGCTCTTGCCAGATGGTT
+CTGAGCCCGCCTGCTCCACTGGGCTGGAGAGCTCCCTCCCACATTTACCC
+AGTAGGCATACCTGCCACACCAGTGTCTGGACACAAAATGAATGGTGTGT
+GGGGCTGGGAACTGGGGCTGCCAGGTGTCCAGCACCATTTTCCTTTCTGT
+GTTTTCTTCTCAGGAGTTAAAATTTAATTATATCAGTAAAGAGATTAATT
+TTAATGTAACTTTTCCTATGCCCGTGTAAAGTGTGTGACTTGGCAAGGCC
+TGTGCTGCATGTGACAAAGTTTATGGAAGTGGAGGGGCCTTCTGGCCGCC
+ACTCCCTCTCCTGTAGCTACTCAGTCTAGTCGGGCAGGTCCCTCCTGTAG
+CCCTCCCAACACCCTGTGGCACTTGCACTTCATACAGCTCCCTTTTCTTA
+TGCATTCCATTAAGCCAGCACAGAGAGAGGTGTTGGTATTGACTGCCTGT
+GTGAGAATCCTGCCTGTGGCCTAACTGAGGAACTGAAAAACTGACTTCCA
+CTGTTAGAGTTATAAGAGGCTTGCCCTGTGGCAGCTGCCCTCCTCTCCCC
+TTCCCAGGCATGACTGTCAAGCTATCTCCTCCCTGGTGTTGATGCACTCT
+CCTAGTCTCTCAGCCTGGGTAGAAACAGCATCTGCTGGACCCAAAGTGGC
+TATCCCAATAACCTCATCCCTGGTTGTGGCTGACCTGCACTGTAGCCTGC
+CCACACACCAGCTGACCATTGTGGATGCTGTCTGTCCCTTTGTATCTTCT
+GCATGGTTGGGACCTGAGAAGTGCTGACCTGATTACCCCAAAGGTGTCTC
+TGAGCTATGGTTTGTTGGTTTGTCTCAGTTTCTCATAGTCAAGGGAAAGC
+TTGGTGTCCTAGCAACAGTTAAGAATGGACCCAGAGCCTCTTTTGCCCCT
+TCCCATCTTGCCTTCTGTCAGCCCAGTAGAGTACAGACCTATGCCTGTCA
+GAGCCCAGGGAGGACTCAGCTGACAAGATGAGGCACCAAAGGGAAGGTTC
+AAAATCAGGTCAGCCTCTGGCCTCAGACAGCTTCCCATGCTGGTCAGAGC
+CACCTCTTCCCAAAGCCCAAGCCCAGAGTAACCAGGTCATGTTAATGAAA
+ATGAGCTACCTTCATTTCCTGGCTTGGTTTGGGAACTCTGTTTGCTGTTT
+GACTATATGACCAAGCAGATTTTCTGCTGTTCCGCTAAGTCATATCTGTA
+TTTCTCAGCTGTAGAGTAGGGGAGTGGAATAGTTTGGAGATGTTTCTAGG
+CTACACAGGAGGAAAGAGCTTGCAGCCTGTGATTAACTAACTGTGCTTCA
+GTCCATGGATTGCTTTCTTGAGACCCTTGAATTTCCCTCTATCTTTCCAT
+CATGACAAGTAGCCTTGCTGCTGGGATGCAAGGTTCCCTACCAAACACAG
+GTTGTGGGGAGCCTCACACTTGGCCTGACTCTCCTCCTATCTGCCCTGGC
+AAAAACCACCCCAAGGCGTGGTAACAGGAACAGTGGACATGGATTAGGTC
+TTTCAAGAGGACGTTAAGGGAAGCTACTGAATTTTAATGAAAGAAATTCA
+CCAATGCCCCTTTGCTGATTTAGGGCTTCTTCTTGTCACCCTCAATTTCC
+CGCCTAGAAGTGCTCGGGGACCATGTGAAAGTTCTTACAGTGCTGCTGCC
+ACACTCTGAGGTTGGTCCAACCGCTCTGAGATGAGCATGGTGCAGGCCTG
+ATTACTCCTCATGGTAGATGTTCATAAGGAAACTCAATATAAAATCTAGA
+GCCATTCACCAGGGGATTATATCAGTGAGCTCAACCTCAAGTTTAGTTGG
+CCTCTTGTTTAGTGTGATCAGAAACAATTCTTAGTATGGGGCAAGGACAG
+CCTCTGCCACAAAGTTGTTGTCTGCTCATGGGTGCCACAACCTAGAGATG
+CACCTGGGTACAGGCAGGTATGTATTTGTGTACACACATAAACACACACA
+CAATCCTCAAAGACATATGCAAGGCCTCTAAAAATGCCTGCCTGTTTTTT
+CTGAAAGCAGACTTTTCTTGCAACTGCCACATACAGTCAGCTTTGTGAGT
+CTAGCATCTGAGAATGGGACTCAATTTTTAAAAGTCCATAGCTCATTAAA
+GTCTCACTGGAGACATTGCCCCACCTGTCTAACTGCAGGAGGGACTAAAA
+CTTTTTATCAAATTCCTCAAAAATCTAAAGATTTCCAAGCTTTATTTAAA
+AACAAAAGTTATTTTGACTATGAGGTTTTAGGGGTAGGAGGTGGGATGTT
+GTTTCTGTTTCCATGGTGGTACTGTCAGGAAAGATTTTAATAAAACCAGG
+GTAGAACTTTTGGCAATGCACTTCAGCATGTTTCTTCTCCAAAATGTGCC
+TCCCTCCCTCCCACTGATGGCCCCCTTGACATGTAGGTGACTTAGCCACT
+GCCAAGTGCCCTTTATGGTTCTCTCATTTTGTCTGCACATGTACCCTTCA
+GGAGGGAAGAACTGGAGTGGAACCACCTCCTGCCCTGTAGAATGCAGTGC
+CAGGGAAGGGACCAATCCTAACAGGTGCCTTCCCTGGCAGGAAGTACCTT
+CCCGTGAGTGAGTGAAGCAGCTCTGCTTCCGGCTCATGGGACAGGTTTTA
+TACAGCAATAGCTTGTCTCACAGCCACGTCACAAGGAGTCTTGCCTCCCA
+TTGTGGGGCTGCAGAATTGGTCTCCTTGCCACCTGTGAGCATCCTTCCCC
+ACACAGTCTCCTTCCCTCCCTCCTTCCCTCCCTCCCTCCCTCCCTCCCTC
+CGTCCCTCCCTCCCTCCCTCAGCATTGAGCACTAGGATCATGGCTGCTAC
+CAGGACAGGCATGAAGCTGTCCTCCAGGGATTGGTATGTGGGAGTCGAAG
+ACACTGAGCTGCTGATGCTGGGTGTGGGCTCAGGATATCATGGTTGGGAA
+AAGAATTGTTCCTCAGTGGGTCTGGAGCCTCCAGGAAAGAAGAACCAATG
+CTGAGCAGTGTGACAACTAAAGATGATATCAAGGTTCAGGGCCACCCTCC
+ATGTGTGCTTGTCACACTCTAGAGCCATCGAAGGAACTGCTCCCCTCAAG
+TGTCTCTGGAAACACCCTCTGCCGCAAGCTGGGTGTAAGATAATAGGTGG
+CAGAGACCTATCTGCAGAGATTTGGCTGCATTCTAGGGGGCTCCTGTCCA
+AGCCTTGCTGCTGTATGCCATGGGCTTCACTGGGAACTAGGAGGGCTGTG
+ATGGGTGTGCCCCGGAGCCCAGCCTAGACCTGGCTGTCCATTTCCAAAAG
+GAAGGACTGACATGAAATGTATATTTAAAATTTTTAAATTGCAGATATTG
+TACAGTTGAATTAAAGAAGCGATTAAACCACC
+>mm10_knownGene_uc008xdd.1 range=chr5:34877025-34884978 5'pad=0 3'pad=0 strand=+ repeatMasking=none
+CCAACCACTCTGAAGAAAACACTTCAGTGCTTGGAAGGCATCCATCTCAG
+CCAGTCTGGTGCTGTGCTCACACTATATGTGGACAGGCTCCTGGGCACCC
+CCTTCCGTGCGCTGGCTCGCATGGTCGACACCCTGGCCTGTCGCCGGGTA
+GAAATGCTTTTGGCTGCAAATTTACAGGTACTGAAAATGGTAATTTATAT
+CAAAACTTAGAAAGTCAATCAAAACATTTGGTCTATTGACCTGGTCTTGA
+TTGGCCACTGATAAAGAGCATGTATGTCATATTTGTTATTTGTGTATCTG
+ACCAACGGCTCTTTTTAGATATACTGTGTATAGTAATTTATCCTTTTTAA
+GTGGGTGTGAGGTTGTATATCACAAAAGCCCTGATGTGTTCTTGTCTGTG
+TAGAGCAGCATGGCCCAGTTGCCAGAGGAGGAACTAAACAGAATCCAAGA
+ACACCTCCAGAACAGTGGGCTTGCACAAAGGTAAGACTGCAGCGTGGGGT
+CCTGGCACTTGGGCAACCAGCGTATTAACACATAGATATGTTCAGGAACA
+AATAGGTAGACAAAGGAATTAGTGTACAGTGAGTTTACTACAGCAATGCC
+AGAGTAGAAAAGACTATCTAAATATCAGATGAAATTTAGTCATGTCTCAC
+TTTAGTAGACATGAAGAAGTGGCACTCAAATACCTGTTGCACAGAGAAGG
+GACTCCTGAACTTTGTGACTGTTGAAGGGATAAGAAGTATGTTTGTTACC
+TCTGCTACTGGACCCTGACTGAGTGGGAATAAAGCTAGAACCTAATTGCC
+AGGTTGGAGGGAGTAAACATGAGAGCACTACCTGGCTCTTGGCATGTGCA
+CATTATGTTAAGTGACATCTCTTCATAGCTATTCTTTTGCCTAATTGTTT
+GAAAGTCTTTTAGAAGCCTTTATTAGAAACATTTCCATCTGTAGTGTAAG
+TGTAGTTCCTTGACTACAAGATAAATTAAGAAAAGCTTTCACCTCTTTTC
+CATTGCTGAAGGGGGAAGGGAGCATTCGAGAGGGTCTCTCATAACTTTAA
+TCTTCAGAGGATTTTTCATGGTGTTTCATAATGGGACAGGGCATGGTCTG
+GATGATTTTTCTCAATCTGCAGAGCCACATGATGCTGACATTTGATCATT
+TGACATATGAAGTATCACATTGACTCTGTATAGTAAGAGAATAAATTATC
+ATTGTGTTTGACAGTAGTTGTAGTCATAGACCAAGATAAGGAAGATTATG
+ACTTCATAAATAATTTAGGCCAGATGAAATGGTTTTCTGTGAAATGACCA
+TTTCTAATGGAATGAATTGTGTCATTATTGGGGATTAAATGGAGTTTGTG
+CACTTGGATCTTAAAATTTATCTGTTTTGCACACATTACAAATACGATGG
+CTAGGATTATGTAATTCAGTGGTAAAGCACTTGTCTAGTGTGCACAAGTC
+CCTAGGTTCAATCTCTAATATTGCCAAGAAAAGAAAGAGTAGAATAGAAT
+AGCTTTGTTAGCGGTTATATAGTTCATTGCCTAATGGAATGTTGAGCATA
+AATGAAACTTCTGGAAAATATCAGTGAGGTATAAATTTTGGTAATTTAAA
+GTAACTAGGGCGTAAATGTGCATCATGACTTTAGAATGTTGAAGGGAAAG
+TCCAAAACCTGTTGCCCTGTCTTAAGAAGCTCCTAGTGCTCCTTGGTATT
+ACATGTTTCTAGAACTCATCTGTGCAAAAACTGAGATTTCAAACCAAAGA
+ACAAACTACTCTGGCTTTTTTATTCCAGACACCAAAGGCTCTATTCACTG
+CTGGACAGATTCCGACTCTCTACTGTGCAGGACTCACTTAGCCCCTTGCC
+CCCAGTCACTTCCCACCCACTGGATGGGGATGGGCACACATCTCTGGAAA
+CAGTGAGTCCAGACAAAGTAAGTGTCCCGAATGTCTAAGTGTGATGACCA
+GGAACCCTGTGGAGACAATGACAGCCTCTGTCTACAATGAGGATAGTGGT
+GGCTGTCAGTATACATGGGACCTGACACTCAGCTCAGGTCATTAGATGCC
+CTGCTTGGGATTAGAGTGCAGGATGGAGGCCAAGAGGTCCTACTGAGAAC
+AGGAGTGCTGATGTGAGGCTTTTGTGGAGGACTGTGGGGGCAAGTCAGGT
+GGCTAGTCAGCAAGTCAGGAAAAGTTGGTTGTGGTCCAGGACCTATGACT
+GCAGACATTGTCCAGCACATGCTGACAAAACTTGGCCTGCCTCCCCCTCT
+GAACCTTCTATCTCCTATAAATTGATGCACCTACTGCCACCAGCTATATA
+ATGTATTGTCATCTGTACCCTTTCTCATTCACACTTGAGAATTAGAAACT
+GTTAGGGCCTTTGCCTTTTAGGCAAGGAGATGAGATTTTAGAAGCCTGCA
+GCCCATGACAGAAAACACACATTTGCCCCAGGCTCACTCTCCAGCTTTGT
+GGGAGGCATTTCTTTGGCTTTGGCTGCTGGGAAAGATGAGGGAGGCAGAT
+ACTCCAGTATAGTATAGATGGTGCATCATCTAGAGTGCAGGTAGAGCAAA
+AATTGTGAACACTGAGAACTTGGCTGAGTTTGCAAGGACTGCTGGAAGGT
+CCACAGGTGGAAAAGAAGAGGGCATTCAAGCACAGAACAAGAGAGGGAAG
+ACTAGCTGTCTAGAGAGTGTAAGCCCAAGATGTGTCTGATGTCTGTACAG
+CCAGCCGTCGGTGCTTCTATCACAGCCCAGAGAAGCCGAAGTGCCTACCC
+AGTCCCATTCAATTTTCTTTTCTTCTCAGGACTGGTACCTCCAGCTTGTC
+AGATCCCAGTGTTGGACCAGATCAGATTCTGCACTGCTGGAAGGTGCAGA
+GCTGGTCAACCGTATCCCTGCTGAAGATATGAATGACTTCATGATGAGCT
+CGGTAGGCAATAATCCGTTGAGTCCAGGAAATCCTCAGCTCTGCTTGTCA
+GAAAGTTAGATTTGTGTCTTAGTTAGGGTTTCTGTTGCTATGATATAACA
+CTATGACCAAAAAGCAAGTTGGGGAAGAAAGAGCTTATTTGGCTTAACAC
+CATCATTGAAGAAAGTCAGGATAGGAACTTAACAGGGCCAGAACCTGGAG
+TCAGGAGCTGATGCAGAGGCCATAGAGGAGTACTGCTTACTTGCTTGTTC
+TCCATAGTTGGTCAGCCTACTTTCTTACAGAACCCACGACCACCAACCCA
+GGGATGACACCACTCACTATGGGCTGGGTCCTCTGCCATCTACCACTAAT
+TAAGAAAACACCCACAGGCTTTTGATGACATTTTTTTTCTTGTTTGTTTG
+TTTTGAGACAGGGTTTCTCTGTGTAGCCCTGGTTGTCCTGGAACTCACTT
+TGTAGATCAGGCTGGCCTCAAACTCAGAAATCCACCTGTCTCTGGCTCCC
+AAGTGCTGGGATTAAAGGAGTGCACCACCACCCCCTGGCTTGGTGGCATT
+TTCTTATCTGAGGTTTCCGCCTCTTAGATGACTTTAGCTTGTGCCAAGTT
+GATAAAACTAGCCAGCACAATTTGTCTCATCTTTGTGTGATTAATATATG
+GGAAACCTGAGTTCAAGCAAGGGACACCATCAAGCAAAGCACATGGGAGG
+CTCTAACAAAAAATGGCACACGAGAGCTATAATCCAGAATAGCTAGCACA
+TGAGATGTGAATAGGATGCCGTTTTAAACAAACTAGAATCCTAAAAGAAG
+AAAATTGTTTTAACTTGATCTTTCATACCTTTAAAGAAAACGGGTGGTCA
+TAGGCTATGGTCAGTTGATCATTCATTTCCTGACTATAAGAAAGTATTGT
+GTGTTCATTGTCTTCTGGAAATTGATCTCTAGTAACCCCTGTATTAATTA
+CTTTCTCGTTGCTGTGATAAGACACTGAGGAAAAGCAGTTGAGATGAGGA
+GGGTTTGTTTTGGCTTACAGTTCAAGGGTACAATCCTTGGCAGGGGAAGC
+ATCACGGTAAGCTTGATGTGTCTGGTCACATTGTGCCCGTGACCAGAAAG
+CAGAGAGAAGTGACTGCTAGTGCTGAGTTTGCTCTGCCCTTTGTGTTCAG
+TCCAGGATCCCTGCCCACAGTTAAAGTGGGTCTTCCTACCTCAACCTCAT
+CAAGATAGGCACACCAAGAGGTTATTATCTCCTCAGCAGTTGTAGAACTG
+CTACGCTGACATCAGTATTAACCATTACACCCATCATGTAGTGAGGCACC
+TTGTCCCTGTAGATAAAGAGGCATTCTGTCATGTAGTGAGGTACCCCGTC
+CTCTCTAGATATAGAGGAATTACCTCATGTAGTCAGATGCCCTGTACTGT
+CTAGATACAGAGCAATTCTCCTCCACTTACCCCTCGAATACCAGAAAGCA
+TACTGAGAGCTGGTGCAGGCCTTGAAAGCATTCAATTCCCTTCCTTGTCT
+TCTTTGCCAAGCACTCTTAGGCCACTACCTTAGTGGGGTTCTTTGTTGCC
+CAGTGAAGACAAGGACCTCATTGCCCCTTGATACATGCCAAATGGTTATG
+GGGAAGCAGGAACTGAGCAGGTTAATAGAAGGTGTGTGTGTTGTGGAGAG
+AGAGGGTTCTCACATAGGAAGATATCTAAAGCACAGGACCCAGTTTGTTA
+TATTTTCCAAGTCGTTAGGTGGACTATTAGCAGCTTGCAAGTTCCATCCA
+TGACCATAGAAATGTTTGATTTGGGGGAACTAATGATGAAATACAGTGTT
+TAATATTAAAGCTTATGTTCTACTTGAAAAAATTGTGACTCTCTCTAAAT
+CCTTAAATGGCTTAAAATAAGTTTTTGACAAAACATAATAAAAACTGTCA
+TATGAGGCCAGACATGGTAGTGCATATCTTTAATTCCAATACTTGGGAGT
+CAGACACTTGAGGATCTCTGTTTGTGACATGTCTGGTTGACTTAAGTTCC
+AGGCCAGCCAGGGCTACATAGTAAGACTATCTCCAAATCAAAAAAAAAAA
+AAGAAAATTAAAAGTTTTTGGCATGTGAAATGTTGTGTGTGTGTTTTTTT
+AAGCAGATTTTTGTCTAATATAAGATGCTCTGTGTGCCTTCTCAGGCTGC
+AGCATTGCTTGGCATCCCACTGGATTCTTAGATGGCATATTAAACTTGGT
+GCGCTGTCTACATCAATTAAGATTTGTCATCCTAGAATTATTTCAATGAA
+ATATAAGATCATAAAAATTAAAAATATTGCTCTTTCTCTCTTTCCCTCCC
+CCCTCTCTCCACGTGGCCATGGCCAGTCTCTCTCTCTTTCTACCTTCTCT
+CCTTTCTCCCTGACTTTCTACAATAAAGCTCTAAAACCATTTTAAAAAAT
+TAAAAATATTACTTTAAAATTCAAATATGACAGTGACCAGAAATATTTAT
+TAAGCATGTTAAGTGGAGTTGTTGATATATTTATTAATATATATAACATA
+GGATATACTTTTTAAAATAGAGAATTCAACTTAGTTTTATCTGTCTTTTA
+ACTTTATTTGTAGTCTAAGATCTTTTCTAGAGAGTATTTCCCACTTTTAT
+TATTATAAGTTACTTGAGACAAGCTACATCATAAGAGAAAAAGATTTATT
+TTGACTGATAGTTCTGCACATACAACATCCAAGGGCTCATCTGGTGATGA
+CTTTACTGTCAGAGTCCCAGTGTGGTGCAGAAAACCTCCCATGGCAAACA
+ATAAGGAGCTTGAGTGTCTCTGTTTCTAGAATATTCTCAGAAGCATTCCT
+TACAGTTCTTTGGTCTGGATTATCTCAGAAACAAATGCTTATTGCATTAA
+CTGTGTGTGTTCCAGCCTGAAGGAAAGCTTACTGTCTTTGCTGTTGTTTG
+TCTTGCATGTAAACTTCTGACCCAGGAGTTCAACCTAAGCCTTTTGGCTC
+CCTGTTTAAGCCTTGGCATGAGCGAGATTGCTAATGGCCAAAAGAGTCCC
+CTCTTTGAAGCAGCCCGTGGGGTGATTCTGAACCGGGTGACCAGTGTTGT
+TCAGCAGCTTCCTGCTGTCCATCAAGTCTTCCAGCCCTTCCTGCCTATAG
+AGCCCACGGCCTACTGGAACAAGTTGAATGATCTGCTTGGTAATTAAATA
+CAGTTCCCTTGGATGCTTGTCTGTCTATCTTCTCTGTCACTCTGTCTCTC
+TTTATGGGTGATAGGAATGGCAGTAGCAGAATGGACAAGCCAGAGGGACA
+CTGAGTCACACATTGAACCTAGAGCTGCCAACTCTGGTAGATCAGCTGAC
+CAAGCCTCTAGGACCCTCCTGTCTCAGCCCTAAGTGCTGAGGTTACAGGT
+GTACACCCACAGCCAGGTTTTACATAAGATCTTAAATTCCAAACTCAAGT
+CCTCATGCTTGCACAGGAAGCACTTATCCACCGACTCATCCTCTCAGCTC
+AAGTTATCTTAGTGTTTTAGTTATTTTATATTATGTTATAGTTGTCTGCA
+TGTATGTCTTCACCAGATGCATACAGTGCCCATGGAGACAGAAGATGGCA
+TCAAATCCCATGGGACTGGAATTACAGGTGGCTGTGAACACACTATGTGG
+CAGCTAGAGATTTAACTTAGGTCCTCTGGAAGTGCAGCTCATGCTCTCAG
+CTCCCGAGCTGTCTATCTAGCTACAAGTTGTCACCGTTTTTAAAAGTATT
+ACAGATTCAGCACCGTGCTTTTCCTCAAGCACGCATATAGTCAGGACTGT
+TGATCTAAAAGGCTGACAAAAATAGCTGAGAAACTGCACCAAATCCTTAG
+CTCTAAACTTCTTTCTTTGTTGCTTGACCTGGACATAGAAAGTCAGGTTC
+TAAGCCCTTCAGGATCAGTGGGTTAGACTCAGGGCAAACCATGTCCTGAC
+TTTATGTAGCACGTATGAGTGAGCATGTACAGATGTGCTTGCTCTCTTGG
+TCTTGGCAACCTCAAATTCACATAGTTGTGTGAAGGCTTCTGAAGGGGCG
+GGCCTGTGCTCACAGTCAAAGTCACTCATGTCAGTCTCATGTTTCAGGTG
+ATACCACATCATACCAGTCTCTGACCATACTTGCCCGTGCCCTGGCACAG
+TACCTGGTGGTGCTCTCCAAAGTGCCTGCTCATTTGCACCTTCCTCCTGA
+GAAGGAGGGGGACACGGTGAAGTTTGTGGTAATGACAGTTGAGGTAAGAG
+CAGCTCTGAAATTATGTGTCCCTGTGAGGACAGGATATGTGAGTAGCACT
+AAGATGAAAGTCCTTGAAAACCGACAGTGTGGAGTACAATAGTGCACACA
+TTAGCCCAGCTGCCTTGGAGGCAGAGGCAGAATTGTGGGTTCCTGGTCTG
+TAAGGATGTGCCTGAGTATACAGCTAGACCCTATTTGAATAAACAGGAAG
+GCAGGGAATACCTATTGGCAAAGTCTGATTCACCTGATGGTACAGAGTGC
+CTTTCACCCTCACCACTGGGAAGCAAGGAGGTCTGTAAGACATCCTGTTA
+TCCCTACACTATAAACCTAATGTGGGTCCTAAATAAAATCTAGACAGTGT
+TACATTTTAAATTGGGCAGTGAAGCTGGACATTTCACCCAGAAACACTTG
+GCCCCTCAAAATGTATCTATACGTGCACTATAGTTTTATTACCTTGCCAT
+GGGCATGCTGGGAAAGAGCCTCACTGTGCCAGAGCTGTGCTGCCAATCCT
+GAACAAGGGTTGACACCTTACCCTAAGAGAAGAAAGTCAGTATCCTGAGG
+GTGTATGGTACAAAGGCACCAGGTGAACCAGGCTAAGTTAGGTGGTCTTT
+GAGCTTGTCTTAGCCCAGTGAAGACAGGAAAGCAAATGTGTGTGTAAAGT
+ATTGGGTGGCAGCTCCTAGTCATACTCTGCGCTGCACAGGCCATGCCATG
+ACACTTGTTTCCTATAAAAACTCTGTCCCCATTTCACACATGGGGAAAGA
+AGCTCAGAGAGGTTCGGGGACTTGCTAGAAGTCACTAGTCATAAATCATA
+CTCCAAAACTCAGTGTTGTGACTGAGATACAAAACAAAACACATTCTGTT
+TCTT
diff --git a/test-data/a.tab b/test-data/a.tab
new file mode 100644
index 0000000..6023b4f
--- /dev/null
+++ b/test-data/a.tab
@@ -0,0 +1,15 @@
+CHR	SNP	BP	A1	TEST	NMISS	BETA	STAT	P
+1	rs1181876	3671541	T	DOMDEV	958	-1.415	-3.326	0.0009161
+1	rs10492923	5092886	C	ADD	1007	5.105	4.368	1.382e-05
+1	rs10492923	5092886	C	DOMDEV	1007	-5.612	-4.249	2.35e-05
+1	rs10492923	5092886	C	GENO_2DF	1007	NA	19.9	4.775e-05
+1	rs1801133	11778965	T	ADD	1022	1.23	3.97	7.682e-05
+1	rs1801133	11778965	T	GENO_2DF	1022	NA	16.07	0.0003233
+1	rs1361912	12663121	A	ADD	1021	12.69	4.093	4.596e-05
+1	rs1361912	12663121	A	DOMDEV	1021	-12.37	-3.945	8.533e-05
+1	rs1361912	12663121	A	GENO_2DF	1021	NA	17.05	0.0001982
+1	rs1009806	19373138	G	ADD	1021	-1.334	-3.756	0.0001826
+1	rs1009806	19373138	G	GENO_2DF	1021	NA	19.36	6.244e-05
+1	rs873654	29550948	A	DOMDEV	1012	1.526	3.6	0.0003339
+1	rs10489527	36800027	C	ADD	1016	12.67	4.114	4.211e-05
+1	rs10489527	36800027	C	DOMDEV	1016	-13.05	-4.02	6.249e-05
diff --git a/test-data/a.txt b/test-data/a.txt
new file mode 100644
index 0000000..b86cb4b
--- /dev/null
+++ b/test-data/a.txt
@@ -0,0 +1,15 @@
+ CHR        SNP         BP   A1       TEST    NMISS       BETA         STAT            P 
+   1  rs1181876    3671541    T     DOMDEV      958     -1.415       -3.326    0.0009161
+   1 rs10492923    5092886    C        ADD     1007      5.105        4.368    1.382e-05
+   1 rs10492923    5092886    C     DOMDEV     1007     -5.612       -4.249     2.35e-05
+   1 rs10492923    5092886    C   GENO_2DF     1007         NA         19.9    4.775e-05
+   1  rs1801133   11778965    T        ADD     1022       1.23         3.97    7.682e-05
+   1  rs1801133   11778965    T   GENO_2DF     1022         NA        16.07    0.0003233
+   1  rs1361912   12663121    A        ADD     1021      12.69        4.093    4.596e-05
+   1  rs1361912   12663121    A     DOMDEV     1021     -12.37       -3.945    8.533e-05
+   1  rs1361912   12663121    A   GENO_2DF     1021         NA        17.05    0.0001982
+   1  rs1009806   19373138    G        ADD     1021     -1.334       -3.756    0.0001826
+   1  rs1009806   19373138    G   GENO_2DF     1021         NA        19.36    6.244e-05
+   1   rs873654   29550948    A     DOMDEV     1012      1.526          3.6    0.0003339
+   1 rs10489527   36800027    C        ADD     1016      12.67        4.114    4.211e-05
+   1 rs10489527   36800027    C     DOMDEV     1016     -13.05        -4.02    6.249e-05
\ No newline at end of file
diff --git a/test-data/asian_chars_1.txt b/test-data/asian_chars_1.txt
new file mode 100644
index 0000000..4f1aaa5
--- /dev/null
+++ b/test-data/asian_chars_1.txt
@@ -0,0 +1 @@
+蛋白質核酸酵素:制癌性物質の化学修飾による効果の向上.
\ No newline at end of file
diff --git a/test-data/bam_from_sam.bam b/test-data/bam_from_sam.bam
new file mode 100644
index 0000000..a67e8b3
Binary files /dev/null and b/test-data/bam_from_sam.bam differ
diff --git a/test-data/bcf_index_metadata_test.bcf b/test-data/bcf_index_metadata_test.bcf
new file mode 100644
index 0000000..2445188
Binary files /dev/null and b/test-data/bcf_index_metadata_test.bcf differ
diff --git a/test-data/bcf_index_metadata_test.txt b/test-data/bcf_index_metadata_test.txt
new file mode 100644
index 0000000..b531318
--- /dev/null
+++ b/test-data/bcf_index_metadata_test.txt
@@ -0,0 +1 @@
+gzip compressed data, extra field
diff --git a/test-data/biom1_metadata_test.txt b/test-data/biom1_metadata_test.txt
new file mode 100644
index 0000000..c451480
--- /dev/null
+++ b/test-data/biom1_metadata_test.txt
@@ -0,0 +1,11 @@
+table_rows: __ob__u__sq__2__sq__, u__sq__3__sq__, u__sq__4__sq__, u__sq__5__sq__, u__sq__8__sq__, u__sq__9__sq__, u__sq__10__sq__, u__sq__11__sq__, u__sq__12__sq__, u__sq__14__sq__, u__sq__15__sq__, u__sq__16__sq__, u__sq__17__sq__, u__sq__18__sq__, u__sq__22__sq__, u__sq__23__sq__, u__sq__25__sq__, u__sq__28__sq__, u__sq__29__sq____cb__
+table_matrix_element_type: float
+table_format: Biological Observation Matrix 1.0.0
+table_generated_by: BIOM-Format 2.1.5
+table_matrix_type: sparse
+table_shape: __ob__19, 2__cb__
+table_format_url: http://biom-format.org
+table_date: 2016-05-26T16:43:45.614267
+table_type: OTU table
+table_id: None
+table_columns: __ob__u__sq__SAMPLE_1__sq__, u__sq__SAMPLE_2__sq____cb__
\ No newline at end of file
diff --git a/test-data/cat_wrapper_out1.bed b/test-data/cat_wrapper_out1.bed
new file mode 100644
index 0000000..8ebd30a
--- /dev/null
+++ b/test-data/cat_wrapper_out1.bed
@@ -0,0 +1,133 @@
+chr1	147962192	147962580	CCDS989.1_cds_0_0_chr1_147962193_r	0	-
+chr1	147984545	147984630	CCDS990.1_cds_0_0_chr1_147984546_f	0	+
+chr1	148078400	148078582	CCDS993.1_cds_0_0_chr1_148078401_r	0	-
+chr1	148185136	148185276	CCDS996.1_cds_0_0_chr1_148185137_f	0	+
+chr10	55251623	55253124	CCDS7248.1_cds_0_0_chr10_55251624_r	0	-
+chr11	116124407	116124501	CCDS8374.1_cds_0_0_chr11_116124408_r	0	-
+chr11	116206508	116206563	CCDS8377.1_cds_0_0_chr11_116206509_f	0	+
+chr11	116211733	116212337	CCDS8378.1_cds_0_0_chr11_116211734_r	0	-
+chr11	1812377	1812407	CCDS7726.1_cds_0_0_chr11_1812378_f	0	+
+chr12	38440094	38440321	CCDS8736.1_cds_0_0_chr12_38440095_r	0	-
+chr13	112381694	112381953	CCDS9526.1_cds_0_0_chr13_112381695_f	0	+
+chr14	98710240	98712285	CCDS9949.1_cds_0_0_chr14_98710241_r	0	-
+chr15	41486872	41487060	CCDS10096.1_cds_0_0_chr15_41486873_r	0	-
+chr15	41673708	41673857	CCDS10097.1_cds_0_0_chr15_41673709_f	0	+
+chr15	41679161	41679250	CCDS10098.1_cds_0_0_chr15_41679162_r	0	-
+chr15	41826029	41826196	CCDS10101.1_cds_0_0_chr15_41826030_f	0	+
+chr16	142908	143003	CCDS10397.1_cds_0_0_chr16_142909_f	0	+
+chr16	179963	180135	CCDS10401.1_cds_0_0_chr16_179964_r	0	-
+chr16	244413	244681	CCDS10402.1_cds_0_0_chr16_244414_f	0	+
+chr16	259268	259383	CCDS10403.1_cds_0_0_chr16_259269_r	0	-
+chr18	23786114	23786321	CCDS11891.1_cds_0_0_chr18_23786115_r	0	-
+chr18	59406881	59407046	CCDS11985.1_cds_0_0_chr18_59406882_f	0	+
+chr18	59455932	59456337	CCDS11986.1_cds_0_0_chr18_59455933_r	0	-
+chr18	59600586	59600754	CCDS11988.1_cds_0_0_chr18_59600587_f	0	+
+chr19	59068595	59069564	CCDS12866.1_cds_0_0_chr19_59068596_f	0	+
+chr19	59236026	59236146	CCDS12872.1_cds_0_0_chr19_59236027_r	0	-
+chr19	59297998	59298008	CCDS12877.1_cds_0_0_chr19_59297999_f	0	+
+chr19	59302168	59302288	CCDS12878.1_cds_0_0_chr19_59302169_r	0	-
+chr2	118288583	118288668	CCDS2120.1_cds_0_0_chr2_118288584_f	0	+
+chr2	118394148	118394202	CCDS2121.1_cds_0_0_chr2_118394149_r	0	-
+chr2	220190202	220190242	CCDS2441.1_cds_0_0_chr2_220190203_f	0	+
+chr2	220229609	220230869	CCDS2443.1_cds_0_0_chr2_220229610_r	0	-
+chr20	33330413	33330423	CCDS13249.1_cds_0_0_chr20_33330414_r	0	-
+chr20	33513606	33513792	CCDS13255.1_cds_0_0_chr20_33513607_f	0	+
+chr20	33579500	33579527	CCDS13256.1_cds_0_0_chr20_33579501_r	0	-
+chr20	33593260	33593348	CCDS13257.1_cds_0_0_chr20_33593261_f	0	+
+chr21	32707032	32707192	CCDS13614.1_cds_0_0_chr21_32707033_f	0	+
+chr21	32869641	32870022	CCDS13615.1_cds_0_0_chr21_32869642_r	0	-
+chr21	33321040	33322012	CCDS13620.1_cds_0_0_chr21_33321041_f	0	+
+chr21	33744994	33745040	CCDS13625.1_cds_0_0_chr21_33744995_r	0	-
+chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
+chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
+chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
+chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
+chr5	131424298	131424460	CCDS4149.1_cds_0_0_chr5_131424299_f	0	+
+chr5	131556601	131556672	CCDS4151.1_cds_0_0_chr5_131556602_r	0	-
+chr5	131621326	131621419	CCDS4152.1_cds_0_0_chr5_131621327_f	0	+
+chr5	131847541	131847666	CCDS4155.1_cds_0_0_chr5_131847542_r	0	-
+chr6	108299600	108299744	CCDS5061.1_cds_0_0_chr6_108299601_r	0	-
+chr6	108594662	108594687	CCDS5063.1_cds_0_0_chr6_108594663_f	0	+
+chr6	108640045	108640151	CCDS5064.1_cds_0_0_chr6_108640046_r	0	-
+chr6	108722976	108723115	CCDS5067.1_cds_0_0_chr6_108722977_f	0	+
+chr7	113660517	113660685	CCDS5760.1_cds_0_0_chr7_113660518_f	0	+
+chr7	116512159	116512389	CCDS5771.1_cds_0_0_chr7_116512160_r	0	-
+chr7	116714099	116714152	CCDS5773.1_cds_0_0_chr7_116714100_f	0	+
+chr7	116945541	116945787	CCDS5774.1_cds_0_0_chr7_116945542_r	0	-
+chr8	118881131	118881317	CCDS6324.1_cds_0_0_chr8_118881132_r	0	-
+chr9	128764156	128764189	CCDS6914.1_cds_0_0_chr9_128764157_f	0	+
+chr9	128787519	128789136	CCDS6915.1_cds_0_0_chr9_128787520_r	0	-
+chr9	128882427	128882523	CCDS6917.1_cds_0_0_chr9_128882428_f	0	+
+chr9	128937229	128937445	CCDS6919.1_cds_0_0_chr9_128937230_r	0	-
+chrX	122745047	122745924	CCDS14606.1_cds_0_0_chrX_122745048_f	0	+
+chrX	152648964	152649196	CCDS14733.1_cds_0_0_chrX_152648965_r	0	-
+chrX	152691446	152691471	CCDS14735.1_cds_0_0_chrX_152691447_f	0	+
+chrX	152694029	152694263	CCDS14736.1_cds_0_0_chrX_152694030_r	0	-
+chr1	147962192	147962580	NM_005997_cds_0_0_chr1_147962193_r	0	-
+chr1	147984545	147984630	BC007833_cds_0_0_chr1_147984546_f	0	+
+chr1	148078400	148078582	AJ011123_cds_0_0_chr1_148078401_r	0	-
+chr1	148185136	148185276	NM_002796_cds_0_0_chr1_148185137_f	0	+
+chr10	55251623	55253124	AY029205_cds_0_0_chr10_55251624_r	0	-
+chr11	116124407	116124501	AK057832_cds_0_0_chr11_116124408_r	0	-
+chr11	116206508	116206563	NM_000040_cds_1_0_chr11_116206509_f	0	+
+chr11	116211733	116212337	BC005380_cds_0_0_chr11_116211734_r	0	-
+chr11	130745911	130745993	AY358331_cds_0_0_chr11_130745912_f	0	+
+chr12	38440094	38440321	NM_052885_cds_0_0_chr12_38440095_r	0	-
+chr12	38905200	38905351	AY792511_cds_0_0_chr12_38905201_f	0	+
+chr13	112381694	112381953	NM_207440_cds_1_0_chr13_112381695_f	0	+
+chr13	29680676	29680875	NM_032116_cds_0_0_chr13_29680677_r	0	-
+chr14	98521864	98521922	U88895_cds_0_0_chr14_98521865_f	0	+
+chr14	98710240	98712285	NM_022898_cds_0_0_chr14_98710241_r	0	-
+chr15	41486872	41487060	BX537418_cds_0_0_chr15_41486873_r	0	-
+chr15	41673708	41673857	AK223365_cds_0_0_chr15_41673709_f	0	+
+chr15	41679161	41679250	NM_153700_cds_0_0_chr15_41679162_r	0	-
+chr15	41773540	41773689	AK223365_cds_0_0_chr15_41773541_f	0	+
+chr16	142908	143003	NM_005332_cds_0_0_chr16_142909_f	0	+
+chr16	179197	179339	BC065198_cds_0_0_chr16_179198_r	0	-
+chr16	244413	244681	AK057165_cds_2_0_chr16_244414_f	0	+
+chr16	259268	259383	AB016929_cds_0_0_chr16_259269_r	0	-
+chr18	23786114	23786321	NM_001792_cds_0_0_chr18_23786115_r	0	-
+chr18	59406881	59407046	NM_012397_cds_1_0_chr18_59406882_f	0	+
+chr18	59455932	59456337	AB046400_cds_0_0_chr18_59455933_r	0	-
+chr18	59528407	59528575	AY792326_cds_0_0_chr18_59528408_f	0	+
+chr19	59068595	59069564	BC013995_cds_1_0_chr19_59068596_f	0	+
+chr19	59236026	59236146	NM_198481_cds_0_0_chr19_59236027_r	0	-
+chr19	59297998	59298008	NM_004542_cds_0_0_chr19_59297999_f	0	+
+chr19	59318205	59318718	AK128544_cds_3_0_chr19_59318206_r	0	-
+chr2	118288583	118288668	NM_006773_cds_0_0_chr2_118288584_f	0	+
+chr2	118390395	118390500	BC005078_cds_0_0_chr2_118390396_r	0	-
+chr2	220108689	220109267	AY125465_cds_0_0_chr2_220108690_f	0	+
+chr2	220229609	220230869	NM_024536_cds_0_0_chr2_220229610_r	0	-
+chr20	33330413	33330423	NM_181466_cds_0_0_chr20_33330414_r	0	-
+chr20	33485370	33486123	BC085019_cds_1_0_chr20_33485371_f	0	+
+chr20	33488491	33489122	NM_000557_cds_1_0_chr20_33488492_r	0	-
+chr20	33513606	33513792	AF022655_cds_1_0_chr20_33513607_f	0	+
+chr21	32687402	32687588	NM_032910_cds_0_0_chr21_32687403_f	0	+
+chr21	32869641	32870022	NM_018277_cds_3_0_chr21_32869642_r	0	-
+chr21	33321040	33322012	NM_005806_cds_1_0_chr21_33321041_f	0	+
+chr21	33728358	33728724	AK129657_cds_0_0_chr21_33728359_r	0	-
+chr22	30120223	30120265	NM_004147_cds_0_0_chr22_30120224_f	0	+
+chr22	30160419	30160661	BC032941_cds_0_0_chr22_30160420_r	0	-
+chr22	30228824	30228916	NM_001007467_cds_1_0_chr22_30228825_f	0	+
+chr22	30340151	30340376	CR456540_cds_0_0_chr22_30340152_r	0	-
+chr5	131311206	131311254	AF099740_cds_11_0_chr5_131311207_r	0	-
+chr5	131424298	131424460	NM_000588_cds_0_0_chr5_131424299_f	0	+
+chr5	131556601	131556672	BC035813_cds_0_0_chr5_131556602_r	0	-
+chr5	131621326	131621419	BC003096_cds_0_0_chr5_131621327_f	0	+
+chr6	108299600	108299744	NM_007214_cds_0_0_chr6_108299601_r	0	-
+chr6	108594662	108594687	NM_003269_cds_0_0_chr6_108594663_f	0	+
+chr6	108640045	108640151	NM_003795_cds_0_0_chr6_108640046_r	0	-
+chr6	108722976	108723115	NM_145315_cds_0_0_chr6_108722977_f	0	+
+chr7	113660517	113660685	AF467257_cds_1_0_chr7_113660518_f	0	+
+chr7	116512159	116512389	NM_003391_cds_0_0_chr7_116512160_r	0	-
+chr7	116714099	116714152	NM_000492_cds_0_0_chr7_116714100_f	0	+
+chr7	116945541	116945787	AF377960_cds_0_0_chr7_116945542_r	0	-
+chr8	118881131	118881317	NM_000127_cds_0_0_chr8_118881132_r	0	-
+chr9	128764156	128764189	BC051300_cds_0_0_chr9_128764157_f	0	+
+chr9	128787519	128789136	NM_014908_cds_0_0_chr9_128787520_r	0	-
+chr9	128789552	128789584	NM_015354_cds_0_0_chr9_128789553_f	0	+
+chr9	128850516	128850624	AB058751_cds_0_0_chr9_128850517_r	0	-
+chrX	122745047	122745924	NM_001167_cds_1_0_chrX_122745048_f	0	+
+chrX	152648964	152649196	NM_000425_cds_0_0_chrX_152648965_r	0	-
+chrX	152691446	152691471	AF101728_cds_0_0_chrX_152691447_f	0	+
+chrX	152694029	152694263	BC052303_cds_0_0_chrX_152694030_r	0	-
diff --git a/test-data/composite_output_expected_log b/test-data/composite_output_expected_log
new file mode 100644
index 0000000..500be17
--- /dev/null
+++ b/test-data/composite_output_expected_log
@@ -0,0 +1,2 @@
+Fri May 28 13:27:22 2010
+ ./velveth /Users/jj/src/wf/galaxy/galaxy-central/test-data/velveth_test1 21 -shortPaired /Users/jj/src/wf/galaxy/galaxy-central/test-data/velvet_test_reads.fa
diff --git a/test-data/filter1_in3.sam b/test-data/filter1_in3.sam
new file mode 100644
index 0000000..573025c
--- /dev/null
+++ b/test-data/filter1_in3.sam
@@ -0,0 +1,100 @@
+HWI-EAS269B:8:34:797:623	145	chr1	16632182	255	27M	=	16631977	0	CCATTTCCTGTATGCTGTAAAGTACAA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:40:39:1184	145	chr1	24620331	3	27M	=	24620115	0	ATTTATGTGGTTTCGTTTACCTTCTAT	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:2	CC:Z:chrM	CP:i:9060
+HWI-EAS269B:8:58:533:1198	81	chr1	88426079	255	27M	=	88423429	0	GAAGAGGAAGAAGGTGGGGAGGAAGAG	IIIG?IIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:89:1776:1815	97	chr1	134085638	255	27M	=	134085824	0	GAATGATTCTCTGGGTGTTACTTTGCA	IIIIIIIIIIIIIIIDIII:IIII>F5	NM:i:0	NH:i:1
+HWI-EAS269B:8:74:1134:1670	161	chr1	138166886	255	27M	=	138167084	0	TTACTAGTGTCTCTCTTACCATCATAT	.IIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:1	NH:i:1
+HWI-EAS269:3:59:1321:1427	147	chr1	173149715	255	27M	=	173149555	0	AAGGGCTAGGGTGACAGGCAGGGGACG	-C<CID?IIIIIIIDIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:8:164:1678	145	chr1	178660716	255	27M	=	178660493	0	CAATTGGTGTTTTTCTTAAGAGACTCA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:1:1048:638	137	chr10	12456478	1	27M	*	0	0	TAAAANAAATAAAACAAAACAATAAAA	$'&(+")'%$(&*&#&$$#%%$$%%$$	NM:i:2	NH:i:4	CC:Z:chr16	CP:i:21639623
+HWI-EAS269:3:62:1211:798	147	chr10	28135294	255	27M	=	28135117	0	ACATGGTGTGGGGACAGAGATGTGAAG	I;IIIIIIAIIIIIIIIIIIIIIIIII	NM:i:1	NH:i:1
+HWI-EAS269:3:27:410:424	145	chr10	76950070	255	27M	=	76949871	0	GAGTCTGTGTCCAGGCCAATTCACTAT	25-8I+.6B.IIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:48:1180:1122	73	chr10	83772157	0	27M	*	0	0	GATCATCTTTTCTAAAACAATAAAGAC	/IIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:6	CC:Z:chr11	CP:i:93811140
+HWI-EAS269:3:77:654:131	99	chr10	93419810	255	27M	=	93419993	0	TTGCATCCCTAGGAACTGGAGTTATAG	IIIIIIGIHIIIED at CIIH5I3D9G6:	NM:i:0	NH:i:1
+HWI-EAS269B:8:98:895:1810	177	chr11	3371472	255	27M	=	37952212	0	CTACATAGTGGGAACCGGCGACCGCTG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:2	NH:i:1
+HWI-EAS269B:8:58:1126:883	97	chr11	30080486	255	27M	=	30080702	0	TACTTCCACCAGGCTCCAGTTTTGTGA	IIIIIIIIIIIIIIIIIIIIIIIIII5	NM:i:0	NH:i:1
+HWI-EAS269B:8:89:1763:1446	81	chr11	59589605	255	27M	=	59589400	0	GTACCTGGCCACTGATGCAGCTTAGAA	II<IIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:6:739:1329	161	chr11	68629628	3	27M	=	68629835	0	GTTTTTGGTTGATAGTTGAGCAAACTG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:2	CC:Z:chr4	CP:i:126661877
+HWI-EAS269:3:19:1683:1350	81	chr11	70846915	255	27M	=	70846725	0	CACGGGCAGATAAGCTGCTGAGACTAA	H50BB:IF=IIGIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:9:1691:1087	97	chr11	84640783	255	27M	=	84641010	0	ACATTATCCATCTCTCTGTCATTGTCC	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:48:1350:424	81	chr11	94150331	255	27M	=	94150113	0	TTGCAAGCCAACCCTGAGTGAAGTGTC	IIDHIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:22:1176:1405	97	chr11	98629843	255	27M	=	98630030	0	CCCTCGGAGGGCAACGGGAAAGGAGAA	IIIIIIIIIIIDIIIII?III/79E14	NM:i:0	NH:i:1
+HWI-EAS269:3:67:1311:1512	97	chr11	100488190	255	27M	=	100488374	0	CATGCCCTGAGACTTAGCAAGACTCTT	IIIIIIIIIIIIIIIEIA3,I57GBI@	NM:i:0	NH:i:1
+HWI-EAS269B:8:41:1142:1307	161	chr11	101320424	255	27M	=	101320625	0	GCCTCCCTACATAGCAAAGGAAAGAAA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:31:1101:21	97	chr11	116711287	255	27M	=	116711485	0	CATAAGCAAAAGATACACCATGTTTTA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:84:637:1203	145	chr12	35829424	255	27M	=	35829229	0	CTCCCTCAAGGATCTTGTGGGGCATCA	</'>I,I5B26$II;EB=%IIII1III	NM:i:0	NH:i:1
+HWI-EAS269:3:65:1158:1081	163	chr12	52234332	255	27M	=	52234505	0	TTTTTTTTTTAAGACAGGGAGTTTTTT	IIIIIIIIIIIC4,,*>II%-,%III>	NM:i:1	NH:i:1
+HWI-EAS269B:8:65:1325:1287	137	chr12	52234513	3	27M	*	0	0	GCAGCACCTTGTGACATTTAATTTAGT	IIIII+IIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:2	CC:Z:chr15	CP:i:98780443
+HWI-EAS269:3:38:488:1093	97	chr12	80471642	255	27M	=	80471839	0	TGGAGAGATGACTCCTTGTTTGAGACA	IIIIIIIIIIIIIIIIIIIBAE0B<7D	NM:i:0	NH:i:1
+HWI-EAS269:3:76:290:1451	99	chr12	117745566	0	27M	=	117745739	0	CAATCTGAAGATCCACAATCTTTTATA	IIIIIIIIIIIIIIFII5I9IIII+I7	NM:i:0	NH:i:5	CC:Z:chr4	CP:i:133520935
+HWI-EAS269B:8:39:353:1526	65	chr13	18122558	255	27M	=	18122761	0	ACTCTACTCAAAACCACACTAAGCCTC	@IIIIII6IIIIIDII9IIIIII<III	NM:i:0	NH:i:1
+HWI-EAS269:3:63:1260:365	99	chr13	41278147	255	27M	=	41278319	0	AAGACAAGAACTTATCCACCAATATGT	IIIIIIIIIIIIIII<IIA<CIIDII>	NM:i:0	NH:i:1
+HWI-EAS269:3:70:152:1609	161	chr13	55421426	255	27M	=	55421687	0	CGCTTGACCATTCCAGCCCAGACAAGA	IIIIIIIII8IIII3IIII-C(3-%B'	NM:i:0	NH:i:1
+HWI-EAS269:3:9:1343:1414	99	chr13	83721797	255	27M	=	83721967	0	AACTACAGCTGAGGCAGCCTCCTGCCT	IIIIIIIIIIIIIIIII;E+?7&819&	NM:i:0	NH:i:1
+HWI-EAS269:3:4:1480:1956	137	chr14	46703849	0	27M	*	0	0	GTGGAGCCCAGTGACACCATCGAGAAC	IIIIIIIIIIIII?C3IH?1 at I@@=27	NM:i:0	NH:i:8	CC:Z:=	CP:i:46704077
+HWI-EAS269:3:86:616:1519	137	chr14	56244763	255	27M	*	0	0	CATGGCCTGAAGTTCCTGAGCTTTATC	IIIIIIIII3DIBI9II3)73BIG'G+	NM:i:0	NH:i:1
+HWI-EAS269:3:42:656:1216	97	chr14	100271567	255	27M	=	100271751	0	ATAAGTCTCAGTTCTTGGGCCAGATCA	IIIIIIIIICIIIIII3IA854I1C+)	NM:i:0	NH:i:1
+HWI-EAS269:3:87:95:767	73	chr16	7368880	255	27M	*	0	0	AATATCTGAAACATTCAAATGAGCAAT	F>II/4I at 9H=II?IIIIIIIIIIII-	NM:i:0	NH:i:1
+HWI-EAS269B:8:50:1572:711	145	chr16	32230400	255	27M	=	32230196	0	TACAGCAACAACAAATTCAACGACACG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:56:1436:121	147	chr16	49887529	255	27M	=	49887356	0	TTAAGGCCCAGCTCTACATAAAACACT	IIIIAHIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:53:584:1696	99	chr17	45705815	255	27M	=	45705987	0	CCTTCTTGTCCAAGATCTCCTTCATGA	IIIIIIIIIIIIBI6IIII=II:GI@<	NM:i:0	NH:i:1
+HWI-EAS269B:8:16:643:1950	177	chr18	38000738	255	27M	=	38000570	0	ATTCTGGTTCAGCCTGGGCAGCTTGGG	III@==I0<IIGIII;IIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:66:1760:1539	137	chr18	38624681	3	27M	*	0	0	TTTGGAACTTTTGAGAGGATCCCTAGC	IIIIIIIIIIIIIII1II.IIIII1II	NM:i:0	NH:i:2
+HWI-EAS269:3:2:1723:1277	163	chr19	16341705	255	27M	=	16341870	0	ATGGGCTCGTCGCAGCTCAGCGGCTGG	IIIIIIIIIIIIICI=8<&I7:F(+;2	NM:i:1	NH:i:1
+HWI-EAS269B:8:66:317:1676	97	chr19	47099316	255	27M	=	47099535	0	ATCTGGTCAATCAACACCACCAGCAGA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:10:1428:1315	81	chr2	22444224	3	27M	=	22444039	0	GGAATTGCGATAATTATAGTGGCTGAT	?6>CI>GIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:2	CC:Z:chrM	CP:i:6245
+HWI-EAS269:3:72:152:785	97	chr2	22796881	255	27M	=	22797074	0	TGAGTACTCCAGATAATCGTTACACAA	IIIIIIII6IIIIIIII0IA@?,7752	NM:i:0	NH:i:1
+HWI-EAS269:3:79:321:1095	99	chr2	62083760	255	27M	=	62083938	0	CAAAAATTGAGATATCAAAAAGCTCTT	IIIIIIIII2IGIBIIC8:4IB1H4I;	NM:i:0	NH:i:1
+HWI-EAS269:3:93:1529:881	97	chr2	129998362	255	27M	=	129998827	0	CGCATGCCAGGAGGGGGCATGCCCATT	IIIFIIIIFIIIIIFII?72I.,020*	NM:i:0	NH:i:1
+HWI-EAS269B:8:20:813:376	113	chr2	130534278	255	27M	=	130534454	0	TGTATTAGCAGGAGGTGGGGAGGCTGA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:89:1221:1082	83	chr2	151285071	0	27M	=	151284944	0	ACCAGTGCACAGGTCTCCAGGGCTTCT	-IC=B+8IIIEIIFI5IIIIIIIIIII	NM:i:0	NH:i:5	CC:Z:chr9	CP:i:31743980
+HWI-EAS269:3:66:46:336	163	chr2	156679837	255	27M	=	156680016	0	TATTTTCCTTTTGCTGTGGTTTGTGTT	IIIIIIIIIIIIIEI at E-I?GI&B%3I	NM:i:0	NH:i:1
+HWI-EAS269B:8:52:1139:1381	73	chr2	174953004	0	27M	*	0	0	AATGCTCAACTCTTAGTTTCTTATTCA	IIIIIIIIIIIIIIIIB@?IIIII?+0	NM:i:0	NH:i:19	CC:Z:=	CP:i:175018018
+HWI-EAS269:3:98:585:19	161	chr3	26005087	255	27M	=	26005271	0	ACCTAACACATGTAAACTTAAATTCAT	I:IIIIIIII<IIE==DDI):,D1--8	NM:i:0	NH:i:1
+HWI-EAS269:3:74:447:1309	83	chr3	80492396	255	27M	=	80492220	0	TCGGATGCCTCTCACCACTTTGACAAT	)H():F=85IIIIEIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:74:425:1427	161	chr3	96447603	255	27M	=	96447820	0	TGGAGTAGGAGTCTCAGGAGGAGTAGA	IIIIIIIIIIIIIIIIIIIII:I?II<	NM:i:0	NH:i:1
+HWI-EAS269:3:10:739:1474	163	chr3	124089371	255	27M	=	124089535	0	AAAAAAACAATCTTATTCCGAGCATTC	IIIIIIIIIIIIIIIIIIIIIIIGIII	NM:i:0	NH:i:1
+HWI-EAS269:3:20:357:1709	65	chr4	128895987	255	27M	=	128895822	0	CCTACCTTCTTCCCTTGGCAGCTGACT	IIII>IIIBIIA:I18):,*3&,/*'+	NM:i:0	NH:i:1
+HWI-EAS269B:8:22:623:129	113	chr4	135492867	255	27M	=	135490613	0	CCACTTTCCTGTACTGGCCAGAAAATG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:48:824:702	65	chr4	139584010	255	27M	=	139582899	0	CTGGGCAGTGCAGCGGTACATGGAGCC	IIIIIIIIIIIIIIIIII<AIIB;I>:	NM:i:0	NH:i:1
+HWI-EAS269B:8:33:527:1211	73	chr5	37627410	255	27M	*	0	0	GGGAAGGTGTGAGTACAACAGCCAAAG	IIIIIIIIIIIIIIIIIIIIIIII at II	NM:i:0	NH:i:1
+HWI-EAS269:3:2:1518:599	163	chr5	136908928	255	27M	=	136909101	0	CTATTGCCAAAAAACTATGTTCACAAA	IIIIIIIIIIIIIIIIIIIIIIIII=I	NM:i:0	NH:i:1
+HWI-EAS269B:8:62:1493:1455	161	chr5	138715332	255	27M	=	138715534	0	CCCAAATGAAAAAATAAATATTATGAA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:69:384:1894	147	chr5	143665243	255	27M	=	143665064	0	TGTTTGCTCCAACCAACTGCTGTCGCC	&++*,2H-IGI+IFIGI?IIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:56:1358:762	113	chr6	15024174	255	7M12235N20M	=	51485453	0	TGGGTACTTTCTCTAGCTCCTCCATTG	/@)3I?IIIIIIIIIIIIIIIIIIIII	NM:i:2	XS:A:-	NH:i:1
+HWI-EAS269:3:100:228:1799	163	chr6	37756378	3	27M	=	37756558	0	GTGTGGGGCTGCGTGGCCTGGCTGGTG	DII4A+>5EI.)F634820&1(0%&&&	NM:i:2	NH:i:2
+HWI-EAS269B:8:50:188:1253	161	chr6	52296684	1	27M	=	52296901	0	ACTTTTCAGGGTTTTCAATAGTCACAC	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:1	NH:i:3	CC:Z:=	CP:i:140695441
+HWI-EAS269B:8:19:440:1687	161	chr6	52850377	255	27M	=	52850580	0	TACAAAGATGGACTTTTAAAATTCATT	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:47:1555:1520	161	chr6	67231655	255	27M	=	67233919	0	ATTCAGTTATGGACCATCATTTCCGGA	IIIIIIIIIIIIIIIIIIIIII<AII1	NM:i:0	NH:i:1
+HWI-EAS269B:8:79:1563:1318	161	chr6	89267414	255	27M	=	89267614	0	GTGCGTGTTCCAGGCAGAGCTGGAAGA	IIIIIIIIIIIIIIIIIIIIIII=;IA	NM:i:0	NH:i:1
+HWI-EAS269B:8:54:954:565	161	chr6	120882513	255	27M	=	120882716	0	ACGTCATGGCTGACCAGGACAGAGGTG	IIIIIIIIIIIIIII?IEG;II<II*8	NM:i:0	NH:i:1
+HWI-EAS269B:8:16:570:1775	145	chr7	26081268	255	27M	=	26080313	0	GAAAGAGTGACACAAATCAATAGTAAA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:13:280:189	161	chr7	29184615	255	27M	=	29184814	0	CTTTTCCAACAGCGAGAAAAATGTACA	IIIIIIIIIIIIIIIIIIIIIIIIDII	NM:i:0	NH:i:1
+HWI-EAS269B:8:19:72:273	137	chr7	52258521	255	27M	*	0	0	ATCAGAGGCACAGGGACAGGGTAAGGA	&&2IIIII;E(IIIIIIIIIIIIIIII	NM:i:1	NH:i:1
+HWI-EAS269B:8:40:235:869	161	chr7	52631974	255	27M	=	52632179	0	GGCGCTGACTCCATCAGATATCCATTC	IIIIIII7IIIIIII6IIIIIIIBIII	NM:i:1	NH:i:1
+HWI-EAS269B:8:18:304:346	129	chr7	107789557	255	27M	=	33158390	0	CACGTACTGTCACCTTGTAACATTTGG	IIIIIIIIIIIIIIIIIIIIIIIIIIF	NM:i:0	NH:i:1
+HWI-EAS269B:8:3:698:297	161	chr7	110976065	255	27M	=	110976385	0	GCAGTTATCACTTTCTTGCCATGGGCC	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:50:1269:965	145	chr7	118217605	255	27M	=	118216239	0	ACCTGTAGATCCACATGATCATGAAGA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:38:1381:1429	83	chr7	136141344	255	27M	=	136141280	0	ATCTGAAGTATCCCACATGTTGAGCTC	<III at IIIIIIIII>IIIIIIIIIIII	NM:i:1	NH:i:1
+HWI-EAS269:3:25:781:386	83	chr8	29844005	255	27M	=	29843841	0	TAAGGGAGGAAAGTGTTTCAGAGTGTA	;83<159<<III at IIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:25:979:679	163	chr8	72664237	255	27M	=	72664413	0	CTGATGGGAGCCCTGCGTGGTAAGAGG	IIIIIIIIIII=III at I(II$27I>I4	NM:i:0	NH:i:1
+HWI-EAS269B:8:23:1069:816	145	chr8	74704433	255	27M	=	74704222	0	TGTTCTCAGTTGTGGACAAGTGACAGC	I<GII at IIIIIIII;IIIIIIIIII0I	NM:i:0	NH:i:1
+HWI-EAS269B:8:34:435:1679	73	chr8	87366211	3	27M	*	0	0	AAGCCTAGGGCTTCTCCTCTACACCCC	I556I;FCAIIIFI<IIIIIIIIIIII	NM:i:0	NH:i:2
+HWI-EAS269B:8:32:1486:294	97	chr8	124121215	255	27M	=	124121411	0	TGTTACCATACGCCCTTCTGCTGAGGC	IIIIIIIIIIIIIIIIIIDIIIIGIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:32:142:1471	81	chr8	124496752	3	27M	=	124496536	0	GGATCTGCTTCATGAGTTGCCACATTG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:1	NH:i:2
+HWI-EAS269B:8:60:82:1806	97	chr8	125945215	255	27M	=	125945423	0	AGATGCTGGCCATCCAGAGTAAGAACA	IIIIIIIIIIIIIIIIIIIHIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:16:1162:495	81	chr8	125945584	255	27M	=	125945381	0	GTTGCTCGCAGCTGGGGTGTGGGGCCA	<CIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:1:165:1889	81	chr8	126016885	255	27M	=	126016469	0	TGAGCAGGAAAACACTTTAAACCAGAT	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:66:939:1154	145	chr9	17537149	255	27M	=	17536959	0	CTCCTTCAAGTACGCCTGGGTCTTAGA	IDIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:68:1189:285	81	chr9	61249807	255	27M	=	61242460	0	AGTCAAGCTCACTTGGCGGTGAAGGAT	@.0;;A,?-F5I;7IIIIGIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:15:784:1754	163	chr9	74846937	255	27M	=	74847108	0	GCACAGCACTGAGGAAAGGATCATCTC	IIIIIIIIIIIIIIIIIIGII1CI7II	NM:i:0	NH:i:1
+HWI-EAS269B:8:26:126:1382	145	chrM	3413	255	27M	=	3190	0	GTTATTCTTTATAGCAGAGTACACTAA	IIII8IB328I9IIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:90:320:1336	73	chrM	6326	255	27M	*	0	0	ATGATCTCCAGCTATCCTATGAGCCTT	IIIIIIIIA.ICI.4'(=,C>7-*&@8	NM:i:1	NH:i:1
+HWI-EAS269B:8:23:469:1215	161	chrM	7472	3	27M	=	7676	0	ATTTCATCTGAAGACGTCCTCCACTCA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:2
+HWI-EAS269:3:73:36:783	161	chrM	7958	3	27M	=	8152	0	CCCCAACAATAATAGGATTCCCAATCG	IIIIIIIIIIIIIIIIIIIII09I>>I	NM:i:0	NH:i:2
+HWI-EAS269:3:33:1528:954	99	chrM	8012	3	27M	=	8186	0	TCCTATTCCCATCCTCAAAACGCCTAA	IIIIIIIIIIIIIIIIIIIIIIIDII:	NM:i:0	NH:i:2
+HWI-EAS269:3:2:192:1456	161	chrM	9071	3	27M	=	9255	0	ACGAAACCACATAAATCAAGCCCTACT	III;IEIIDI7III+III*?I at CH+5I	NM:i:0	NH:i:2
+HWI-EAS269:3:63:192:470	97	chrM	14787	255	27M	=	14982	0	GCAGATAAAATTCCATTTCACCCCTAC	IIIIIIIIIIIIIIFIII=B5042:4E	NM:i:0	NH:i:1
+HWI-EAS269B:8:49:528:63	83	chrM	14963	255	27M	=	14829	0	TTTCCTATTTGCATACGCCATTCTACG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:98:856:1516	145	chrX	20257960	255	27M	=	20256746	0	TACCCGGATTTAAGATGTACCCCATTG	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:28:1174:110	145	chrX	53916348	255	27M	=	53916153	0	TGAATGTCAGCATCATTGACCCACAAA	IIIIFIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:58:663:410	147	chrX	75006902	255	27M	=	75006834	0	TCAGGTGGTTTACAGTGTTCTGACAAA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
\ No newline at end of file
diff --git a/test-data/filter1_in5.tab b/test-data/filter1_in5.tab
new file mode 100644
index 0000000..9bf6d18
--- /dev/null
+++ b/test-data/filter1_in5.tab
@@ -0,0 +1,5 @@
+tracking_id	class_code	nearest_ref_id	gene_id	gene_short_name	tss_id	locus	length	coverage	replicate 1_FPKM	replicate 1_conf_lo	replicate 1_conf_hi	replicate 1_status	replicate 2_FPKM	replicate 2_conf_lo	replicate 2_conf_hi	replicate 2_status
+CUFF.1.1	-	-	CUFF.1	-	-	chr19:305598-306225	627	-	0	0	0	OK	206.177	0	694.583	OK
+CUFF.10.1	-	-	CUFF.10	-	-	chr19:618402-618611	209	-	0	0	0	OK	767.201	0	2801.16	OK
+CUFF.100.1	-	-	CUFF.100	-	-	chr19:1625589-1652356	888	-	0	0	0	OK	172.566	0	2879.98	OK
+CUFF.100.2	-	-	CUFF.100	-	-	chr19:1625589-1652356	581	-	0	0	0	OK	1147.8	0	5922.31	OK
diff --git a/test-data/filter1_inbad.bed b/test-data/filter1_inbad.bed
new file mode 100644
index 0000000..70c7992
--- /dev/null
+++ b/test-data/filter1_inbad.bed
@@ -0,0 +1,6 @@
+# Should skip this line
+chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
+chr22	foo	foo	foo	foo	foo
+chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
+chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
+chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
diff --git a/test-data/filter1_test1.bed b/test-data/filter1_test1.bed
new file mode 100644
index 0000000..2ef3a69
--- /dev/null
+++ b/test-data/filter1_test1.bed
@@ -0,0 +1,4 @@
+chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
+chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
+chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
+chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
diff --git a/test-data/filter1_test2.bed b/test-data/filter1_test2.bed
new file mode 100644
index 0000000..715e66b
--- /dev/null
+++ b/test-data/filter1_test2.bed
@@ -0,0 +1,2 @@
+chr1	147984101	148035079	BC007833	0	+	147984545	148033414	0	14	529,32,81,131,118,153,300,206,84,49,85,130,46,1668,	0,25695,28767,33118,33695,33998,35644,38005,39629,40577,41402,43885,48367,49310,
+chr1	148185113	148187485	NM_002796	0	+	148185136	148187378	0	7	163,207,147,82,117,89,120,	0,416,877,1199,1674,1977,2252,
diff --git a/test-data/filter1_test3.sam b/test-data/filter1_test3.sam
new file mode 100644
index 0000000..2a1e8a0
--- /dev/null
+++ b/test-data/filter1_test3.sam
@@ -0,0 +1,6 @@
+HWI-EAS269B:8:34:797:623	145	chr1	16632182	255	27M	=	16631977	0	CCATTTCCTGTATGCTGTAAAGTACAA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:58:533:1198	81	chr1	88426079	255	27M	=	88423429	0	GAAGAGGAAGAAGGTGGGGAGGAAGAG	IIIG?IIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269:3:89:1776:1815	97	chr1	134085638	255	27M	=	134085824	0	GAATGATTCTCTGGGTGTTACTTTGCA	IIIIIIIIIIIIIIIDIII:IIII>F5	NM:i:0	NH:i:1
+HWI-EAS269B:8:74:1134:1670	161	chr1	138166886	255	27M	=	138167084	0	TTACTAGTGTCTCTCTTACCATCATAT	.IIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:1	NH:i:1
+HWI-EAS269:3:59:1321:1427	147	chr1	173149715	255	27M	=	173149555	0	AAGGGCTAGGGTGACAGGCAGGGGACG	-C<CID?IIIIIIIDIIIIIIIIIIII	NM:i:0	NH:i:1
+HWI-EAS269B:8:8:164:1678	145	chr1	178660716	255	27M	=	178660493	0	CAATTGGTGTTTTTCTTAAGAGACTCA	IIIIIIIIIIIIIIIIIIIIIIIIIII	NM:i:0	NH:i:1
diff --git a/test-data/filter1_test4.bed b/test-data/filter1_test4.bed
new file mode 100644
index 0000000..d4a2b36
--- /dev/null
+++ b/test-data/filter1_test4.bed
@@ -0,0 +1,5 @@
+chr22	30120223	30120265	CCDS13897.1_cds_0_0_chr22_30120224_f	0	+
+chr22	foo	foo	foo	foo	foo
+chr22	30160419	30160661	CCDS13898.1_cds_0_0_chr22_30160420_r	0	-
+chr22	30665273	30665360	CCDS13901.1_cds_0_0_chr22_30665274_f	0	+
+chr22	30939054	30939266	CCDS13903.1_cds_0_0_chr22_30939055_r	0	-
diff --git a/test-data/filter1_test5.tab b/test-data/filter1_test5.tab
new file mode 100644
index 0000000..785309a
--- /dev/null
+++ b/test-data/filter1_test5.tab
@@ -0,0 +1,4 @@
+tracking_id	class_code	nearest_ref_id	gene_id	gene_short_name	tss_id	locus	length	coverage	replicate 1_FPKM	replicate 1_conf_lo	replicate 1_conf_hi	replicate 1_status	replicate 2_FPKM	replicate 2_conf_lo	replicate 2_conf_hi	replicate 2_status
+CUFF.1.1	-	-	CUFF.1	-	-	chr19:305598-306225	627	-	0	0	0	OK	206.177	0	694.583	OK
+CUFF.100.1	-	-	CUFF.100	-	-	chr19:1625589-1652356	888	-	0	0	0	OK	172.566	0	2879.98	OK
+CUFF.100.2	-	-	CUFF.100	-	-	chr19:1625589-1652356	581	-	0	0	0	OK	1147.8	0	5922.31	OK
diff --git a/test-data/html_file.txt b/test-data/html_file.txt
new file mode 100644
index 0000000..6fa1288
--- /dev/null
+++ b/test-data/html_file.txt
@@ -0,0 +1,74 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN">
+<HTML>
+<HEAD>
+
+	
+	<META HTTP-EQUIV="Content-Type" CONTENT="text/html;CHARSET=iso-8859-1">
+	<META http-equiv="Content-Script-Type" content="text/javascript">
+	<META HTTP-EQUIV="Pragma" CONTENT="no-cache">
+	<META HTTP-EQUIV="Expires" CONTENT="-1">
+	<TITLE>
+Hyperlinks to Genome Browser	</TITLE>
+	<LINK REL="STYLESHEET" HREF="/style/HGStyle.css">
+
+</HEAD>
+<BODY BGCOLOR="#FFF9D2" LINK="0000CC" VLINK="#330066" ALINK="#6600FF">
+<A NAME="TOP"></A>
+
+<TABLE BORDER=0 CELLPADDING=0 CELLSPACING=0 WIDTH="100%">
+
+<!-- +++++++++++++++++++++ HOTLINKS BAR +++++++++++++++++++ -->
+<TR><TD COLSPAN=3 HEIGHT=40 >
+<table bgcolor="#000000" cellpadding="1" cellspacing="1" width="100%%" height="27">
+<tr bgcolor="#2636D1"><td valign="middle">
+	<table BORDER=0 CELLSPACING=0 CELLPADDING=0 bgcolor="#2636D1" height="24"><TR>
+	 	<TD VALIGN="middle"><font color="#89A1DE"> 
+
+ <A HREF="/index.html?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Home</A>    
+       <A HREF="/cgi-bin/hgGateway?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Genomes</A>    
+       <A HREF="/cgi-bin/hgTracks?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Genome Browser</A>    
+       <A HREF="/cgi-bin/hgBlat?command=start&org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">           Blat</A>    
+       <A HREF="/cgi-bin/hgTables?org=Bushbaby&db=otoGar1&hgsid=1118408&hgta_doMainPage=1" class="topbar">
+           Tables</A>    
+       <A HREF="/cgi-bin/hgNear?org=Bushbaby&db=otoGar1&hgsid=1118408" class="topbar">
+           Gene Sorter</A>    
+<A HREF="/cgi-bin/hgSession?org=Bushbaby&db=otoGar1&hgsid=1118408&hgS_doMainPage=1" class="topbar">Session</A>   
+       <A HREF="/FAQ/" class="topbar">
+           FAQ</A>    
+
+       <A HREF="/goldenPath/help/hgTablesHelp.html"
+       class="topbar">
+           Help</A> 
+ </font></TD>
+       </TR></TABLE>
+</TD></TR></TABLE>
+</TD></TR>	
+
+
+<!-- +++++++++++++++++++++ CONTENT TABLES +++++++++++++++++++ -->
+<TR><TD COLSPAN=3>	
+  	<!--outer table is for border purposes-->
+  	<TABLE WIDTH="100%" BGCOLOR="#888888" BORDER="0" CELLSPACING="0" CELLPADDING="1"><TR><TD>	
+    <TABLE BGCOLOR="#FFFEE8" WIDTH="100%"  BORDER="0" CELLSPACING="0" CELLPADDING="0"><TR><TD>	
+	<TABLE BGCOLOR="#D9E4F8" BACKGROUND="/images/hr.gif" WIDTH="100%"><TR><TD>
+		<FONT SIZE="4"><b> 
+Hyperlinks to Genome Browser</b></FONT></TD></TR></TABLE>
+	<TABLE BGCOLOR="#FFFEE8" WIDTH="100%" CELLPADDING=0><TR><TH HEIGHT=10></TH></TR>
+	<TR><TD WIDTH=10> </TD><TD>
+	
+
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:96554-98437&gold=pack" TARGET=_blank>scaffold_0.1-193456_25 at scaffold_0.1-193456:96554-98437</A><BR>
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:100227-101729&gold=pack" TARGET=_blank>scaffold_0.1-193456_26 at scaffold_0.1-193456:100227-101729</A><BR>
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:101830-103994&gold=pack" TARGET=_blank>scaffold_0.1-193456_27 at scaffold_0.1-193456:101830-103994</A><BR>
+<A HREF="http://hgwdev.cse.ucsc.edu/cgi-bin/hgTracks?db=otoGar1&position=scaffold_0.1-193456:105267-107614&gold=pack" TARGET=_blank>scaffold_0.1-193456_28 at scaffold_0.1-193456:105267-107614</A><BR>
+
+	</TD><TD WIDTH=15></TD></TR></TABLE>
+	<br></TD></TR></TABLE>
+	</TD></TR></TABLE>
+	
+</TD></TR></TABLE>
+
+</BODY></HTML>
diff --git a/test-data/input_taxonomy.biom1 b/test-data/input_taxonomy.biom1
new file mode 100644
index 0000000..e1c0a53
--- /dev/null
+++ b/test-data/input_taxonomy.biom1
@@ -0,0 +1 @@
+{"id": "None","format": "Biological Observation Matrix 1.0.0","format_url": "http://biom-format.org","matrix_type": "sparse","generated_by": "BIOM-Format 2.1.5","date": "2016-05-26T16:43:45.614267","type": "OTU table","matrix_element_type": "float","shape": [19, 2],"data": [[1,0,160.0],[1,1,242.0],[6,0,1.0],[6,1,1.0],[7,0,3.0],[7,1,4.0],[12,0,13.0],[12,1,36.0],[14,0,1.0],[14,1,5.0],[15,0,1.0],[16,0,1.0],[16,1,3.0]],"rows": [{"id": "2", "metadata": {"taxonomy": ["d__Archaea"]}},{"id": "3" [...]
\ No newline at end of file
diff --git a/test-data/library/3.bed b/test-data/library/3.bed
new file mode 100644
index 0000000..124167d
--- /dev/null
+++ b/test-data/library/3.bed
@@ -0,0 +1,25 @@
+chr1	147962006	147975713	NM_005997	0	-	147962192	147975670	0	6	574,145,177,115,153,160,	0,1543,7859,9048,9340,13547,
+chr1	147984101	148035079	BC007833	0	+	147984545	148033414	0	14	529,32,81,131,118,153,300,206,84,49,85,130,46,1668,	0,25695,28767,33118,33695,33998,35644,38005,39629,40577,41402,43885,48367,49310,
+chr1	148077485	148111797	NM_002651	0	-	148078400	148111728	0	12	1097,121,133,266,124,105,110,228,228,45,937,77,	0,2081,2472,6871,9907,10257,11604,14199,15637,18274,23636,34235,
+chr1	148185113	148187485	NM_002796	0	+	148185136	148187378	0	7	163,207,147,82,117,89,120,	0,416,877,1199,1674,1977,2252,
+chr2	118288484	118306183	NM_006773	0	+	118288583	118304530	0	14	184,285,144,136,101,200,115,140,162,153,114,57,178,1796,	0,2765,4970,6482,6971,7183,7468,9890,10261,10768,11590,14270,14610,15903,
+chr2	118389378	118390700	BC005078	0	-	118390395	118390500	0	1	1322,	0,
+chr2	220108603	220116964	NM_001927	0	+	220108689	220116217	0	9	664,61,96,162,126,221,44,83,789,	0,1718,1874,2118,2451,2963,5400,7286,7572,
+chr2	220229182	220233943	NM_024536	0	-	220229609	220233765	0	4	1687,180,574,492,	0,1990,2660,4269,
+chr5	131170738	131357870	AF099740	0	-	131311206	131357817	0	31	112,124,120,81,65,40,120,129,61,88,94,79,72,102,144,117,89,73,96,135,135,78,74,52,33,179,100,102,65,115,248,	0,11593,44117,47607,104668,109739,114675,126366,135488,137518,138009,140437,152389,153373,155388,159269,160793,162981,164403,165577,166119,167611,169501,178260,179675,180901,181658,182260,182953,183706,186884,
+chr5	131424245	131426795	NM_000588	0	+	131424298	131426383	0	5	215,42,90,42,535,	0,313,1658,1872,2015,
+chr5	131556201	131590458	NM_004199	0	-	131556601	131582218	0	15	471,97,69,66,54,100,71,177,194,240,138,152,97,100,170,	0,2316,2802,5596,6269,11138,11472,15098,16528,17674,21306,24587,25142,25935,34087,
+chr5	131621285	131637046	NM_003687	0	+	131621326	131635821	0	7	134,152,82,179,164,118,1430,	0,4915,8770,13221,13609,14097,14331,
+chr6	108298214	108386086	NM_007214	0	-	108299600	108385906	0	21	1530,105,99,102,159,174,60,83,148,155,93,133,95,109,51,59,62,113,115,100,304,	0,2490,6246,10831,12670,23164,23520,27331,31052,32526,34311,36130,36365,38609,41028,42398,43048,51479,54500,59097,87568,
+chr6	108593954	108616704	NM_003269	0	+	108594662	108615360	0	9	733,146,88,236,147,97,150,106,1507,	0,5400,8778,10445,12037,14265,14749,15488,21243,
+chr6	108639410	108689143	NM_152827	0	-	108640045	108688818	0	3	741,125,487,	0,2984,49246,
+chr6	108722790	108950942	NM_145315	0	+	108722976	108950321	0	13	325,224,52,102,131,100,59,83,71,101,141,114,750,	0,28931,52094,60760,61796,71339,107102,152319,181970,182297,215317,224802,227402,
+chr7	113320332	113924911	AK131266	0	+	113862563	113893433	0	20	285,91,178,90,58,75,138,51,201,178,214,105,88,84,77,102,122,70,164,1124,	0,201692,340175,448290,451999,484480,542213,543265,543478,545201,556083,558358,565876,567599,573029,573245,575738,577123,577946,603455,
+chr7	116511232	116557294	NM_003391	0	-	116512159	116556994	0	5	1157,265,278,227,383,	0,20384,37843,43339,45679,
+chr7	116713967	116902666	NM_000492	0	+	116714099	116901113	0	27	185,111,109,216,90,164,126,247,93,183,192,95,87,724,129,38,251,80,151,228,101,249,156,90,173,106,1754,	0,24290,29071,50936,54313,55285,56585,60137,62053,68678,79501,107776,110390,111971,114967,122863,123569,126711,130556,131618,134650,147559,162475,172879,184725,185496,186945,
+chr7	116944658	117107512	AF377960	0	-	116945541	116979926	0	23	1129,102,133,64,186,206,179,188,153,100,87,80,96,276,118,255,151,100,204,1654,225,108,173,	0,7364,8850,10413,13893,14398,17435,24259,24615,35177,35359,45901,47221,49781,56405,66857,69787,72208,73597,80474,100111,150555,162681,
+chr8	118880786	119193239	NM_000127	0	-	118881131	119192466	0	11	531,172,161,90,96,119,133,120,108,94,1735,	0,5355,7850,13505,19068,20309,23098,30863,36077,37741,310718,
+chr9	128763240	128783870	NM_174933	0	+	128764156	128783586	0	12	261,118,74,159,76,48,56,63,129,117,127,370,	0,522,875,5630,12374,12603,15040,15175,18961,19191,20037,20260,
+chr9	128787362	128789566	NM_014908	0	-	128787519	128789136	0	1	2204,	0,
+chr9	128789530	128848928	NM_015354	0	+	128789552	128848511	0	44	54,55,74,85,81,45,93,120,212,115,201,90,66,120,127,153,127,88,77,115,121,67,129,140,107,207,170,70,68,196,78,86,146,182,201,93,159,138,75,228,132,74,130,594,	0,1491,5075,8652,9254,10312,11104,11317,20808,21702,23060,25462,31564,32908,33566,34851,35204,35595,35776,37202,38860,39111,39891,40349,42422,45499,45827,46675,47158,47621,50453,50840,51474,51926,53831,54186,55119,55619,57449,57605,57947,58352,58541,58804,
+chr9	128849867	128870133	NM_020145	0	-	128850516	128869987	0	11	757,241,101,90,24,63,93,134,129,142,209,	0,1071,1736,2085,2635,4201,6376,6736,13056,14247,20057,
diff --git a/test-data/library/4.bed b/test-data/library/4.bed
new file mode 100644
index 0000000..6f32a4f
--- /dev/null
+++ b/test-data/library/4.bed
@@ -0,0 +1 @@
+chr22	30128507	31828507	uc003bnx.1_cds_2_0_chr22_29227_f	0	+
diff --git a/test-data/library/5.bed b/test-data/library/5.bed
new file mode 100644
index 0000000..646dca7
--- /dev/null
+++ b/test-data/library/5.bed
@@ -0,0 +1,134 @@
+chr7	115444712	115444739	CCDS5763.1_cds_0_0_chr7_115444713_f	0	+
+chr7	115468538	115468624	CCDS5763.1_cds_1_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5763.1_cds_2_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5763.1_cds_3_0_chr7_115484166_f	0	+
+chr7	115485764	115485980	CCDS5763.1_cds_4_0_chr7_115485765_f	0	+
+chr7	115486322	115486481	CCDS5763.1_cds_5_0_chr7_115486323_f	0	+
+chr7	115491298	115491487	CCDS5763.1_cds_6_0_chr7_115491299_f	0	+
+chr7	115468538	115468624	CCDS5764.1_cds_0_0_chr7_115468539_f	0	+
+chr7	115483024	115483277	CCDS5764.1_cds_1_0_chr7_115483025_f	0	+
+chr7	115484165	115484501	CCDS5764.1_cds_2_0_chr7_115484166_f	0	+
+chr7	115485764	115485980	CCDS5764.1_cds_3_0_chr7_115485765_f	0	+
+chr7	115486322	115486481	CCDS5764.1_cds_4_0_chr7_115486323_f	0	+
+chr7	115491298	115491487	CCDS5764.1_cds_5_0_chr7_115491299_f	0	+
+chr7	115733786	115733936	CCDS5766.1_cds_0_0_chr7_115733787_f	0	+
+chr7	115734264	115734452	CCDS5766.1_cds_1_0_chr7_115734265_f	0	+
+chr7	115739975	115740126	CCDS5766.1_cds_2_0_chr7_115739976_f	0	+
+chr7	115733786	115733936	CCDS5765.1_cds_0_0_chr7_115733787_f	0	+
+chr7	115739975	115740164	CCDS5765.1_cds_1_0_chr7_115739976_f	0	+
+chr7	115759067	115759097	CCDS5767.1_cds_0_0_chr7_115759068_f	0	+
+chr7	115760529	115760694	CCDS5767.1_cds_1_0_chr7_115760530_f	0	+
+chr7	115792950	115793292	CCDS5767.1_cds_2_0_chr7_115792951_f	0	+
+chr7	116096616	116096655	CCDS5768.1_cds_0_0_chr7_116096617_f	0	+
+chr7	116122131	116122195	CCDS5768.1_cds_1_0_chr7_116122132_f	0	+
+chr7	116126998	116127050	CCDS5768.1_cds_2_0_chr7_116126999_f	0	+
+chr7	116132776	116132840	CCDS5768.1_cds_3_0_chr7_116132777_f	0	+
+chr7	116138181	116138388	CCDS5768.1_cds_4_0_chr7_116138182_f	0	+
+chr7	116140267	116140347	CCDS5768.1_cds_5_0_chr7_116140268_f	0	+
+chr7	116144237	116144316	CCDS5768.1_cds_6_0_chr7_116144238_f	0	+
+chr7	116146073	116146145	CCDS5768.1_cds_7_0_chr7_116146074_f	0	+
+chr7	116150064	116150127	CCDS5768.1_cds_8_0_chr7_116150065_f	0	+
+chr7	116151731	116151872	CCDS5768.1_cds_9_0_chr7_116151732_f	0	+
+chr7	116187545	116187696	CCDS5770.1_cds_0_0_chr7_116187546_f	0	+
+chr7	116333766	116333849	CCDS5770.1_cds_1_0_chr7_116333767_f	0	+
+chr7	116353565	116353725	CCDS5770.1_cds_2_0_chr7_116353566_f	0	+
+chr7	116363797	116363852	CCDS5770.1_cds_3_0_chr7_116363798_f	0	+
+chr7	116364495	116364611	CCDS5770.1_cds_4_0_chr7_116364496_f	0	+
+chr7	116365889	116365965	CCDS5770.1_cds_5_0_chr7_116365890_f	0	+
+chr7	116368128	116368197	CCDS5770.1_cds_6_0_chr7_116368129_f	0	+
+chr7	116370085	116370240	CCDS5770.1_cds_7_0_chr7_116370086_f	0	+
+chr7	116372439	116372537	CCDS5770.1_cds_8_0_chr7_116372440_f	0	+
+chr7	116404866	116404981	CCDS5770.1_cds_9_0_chr7_116404867_f	0	+
+chr7	116423325	116423398	CCDS5770.1_cds_10_0_chr7_116423326_f	0	+
+chr7	116424838	116424941	CCDS5770.1_cds_11_0_chr7_116424839_f	0	+
+chr7	116443791	116443942	CCDS5770.1_cds_12_0_chr7_116443792_f	0	+
+chr7	116453088	116453181	CCDS5770.1_cds_13_0_chr7_116453089_f	0	+
+chr7	116455927	116456067	CCDS5770.1_cds_14_0_chr7_116455928_f	0	+
+chr7	116456865	116456985	CCDS5770.1_cds_15_0_chr7_116456866_f	0	+
+chr7	116187545	116187696	CCDS5769.1_cds_0_0_chr7_116187546_f	0	+
+chr7	116333766	116333849	CCDS5769.1_cds_1_0_chr7_116333767_f	0	+
+chr7	116353565	116353725	CCDS5769.1_cds_2_0_chr7_116353566_f	0	+
+chr7	116363797	116363852	CCDS5769.1_cds_3_0_chr7_116363798_f	0	+
+chr7	116364495	116364611	CCDS5769.1_cds_4_0_chr7_116364496_f	0	+
+chr7	116365889	116365965	CCDS5769.1_cds_5_0_chr7_116365890_f	0	+
+chr7	116370085	116370240	CCDS5769.1_cds_6_0_chr7_116370086_f	0	+
+chr7	116372439	116372537	CCDS5769.1_cds_7_0_chr7_116372440_f	0	+
+chr7	116404866	116404981	CCDS5769.1_cds_8_0_chr7_116404867_f	0	+
+chr7	116423325	116423398	CCDS5769.1_cds_9_0_chr7_116423326_f	0	+
+chr7	116424838	116424941	CCDS5769.1_cds_10_0_chr7_116424839_f	0	+
+chr7	116443791	116443942	CCDS5769.1_cds_11_0_chr7_116443792_f	0	+
+chr7	116453088	116453181	CCDS5769.1_cds_12_0_chr7_116453089_f	0	+
+chr7	116455927	116456067	CCDS5769.1_cds_13_0_chr7_116455928_f	0	+
+chr7	116463766	116463862	CCDS5769.1_cds_14_0_chr7_116463767_f	0	+
+chr7	116512159	116512389	CCDS5771.1_cds_0_0_chr7_116512160_r	0	-
+chr7	116531616	116531881	CCDS5771.1_cds_1_0_chr7_116531617_r	0	-
+chr7	116549075	116549353	CCDS5771.1_cds_2_0_chr7_116549076_r	0	-
+chr7	116554571	116554798	CCDS5771.1_cds_3_0_chr7_116554572_r	0	-
+chr7	116556911	116556994	CCDS5771.1_cds_4_0_chr7_116556912_r	0	-
+chr7	116597600	116597753	CCDS5772.1_cds_0_0_chr7_116597601_r	0	-
+chr7	116601356	116601470	CCDS5772.1_cds_1_0_chr7_116601357_r	0	-
+chr7	116602616	116602722	CCDS5772.1_cds_2_0_chr7_116602617_r	0	-
+chr7	116613942	116614052	CCDS5772.1_cds_3_0_chr7_116613943_r	0	-
+chr7	116615015	116615072	CCDS5772.1_cds_4_0_chr7_116615016_r	0	-
+chr7	116616073	116616149	CCDS5772.1_cds_5_0_chr7_116616074_r	0	-
+chr7	116616990	116617115	CCDS5772.1_cds_6_0_chr7_116616991_r	0	-
+chr7	116618730	116618865	CCDS5772.1_cds_7_0_chr7_116618731_r	0	-
+chr7	116619702	116619814	CCDS5772.1_cds_8_0_chr7_116619703_r	0	-
+chr7	116654167	116654279	CCDS5772.1_cds_9_0_chr7_116654168_r	0	-
+chr7	116656241	116656364	CCDS5772.1_cds_10_0_chr7_116656242_r	0	-
+chr7	116660840	116660940	CCDS5772.1_cds_11_0_chr7_116660841_r	0	-
+chr7	116661360	116661465	CCDS5772.1_cds_12_0_chr7_116661361_r	0	-
+chr7	116714099	116714152	CCDS5773.1_cds_0_0_chr7_116714100_f	0	+
+chr7	116738257	116738368	CCDS5773.1_cds_1_0_chr7_116738258_f	0	+
+chr7	116743038	116743147	CCDS5773.1_cds_2_0_chr7_116743039_f	0	+
+chr7	116764903	116765119	CCDS5773.1_cds_3_0_chr7_116764904_f	0	+
+chr7	116768280	116768370	CCDS5773.1_cds_4_0_chr7_116768281_f	0	+
+chr7	116769252	116769416	CCDS5773.1_cds_5_0_chr7_116769253_f	0	+
+chr7	116770552	116770678	CCDS5773.1_cds_6_0_chr7_116770553_f	0	+
+chr7	116774104	116774351	CCDS5773.1_cds_7_0_chr7_116774105_f	0	+
+chr7	116776020	116776113	CCDS5773.1_cds_8_0_chr7_116776021_f	0	+
+chr7	116782645	116782828	CCDS5773.1_cds_9_0_chr7_116782646_f	0	+
+chr7	116793468	116793660	CCDS5773.1_cds_10_0_chr7_116793469_f	0	+
+chr7	116821743	116821838	CCDS5773.1_cds_11_0_chr7_116821744_f	0	+
+chr7	116824357	116824444	CCDS5773.1_cds_12_0_chr7_116824358_f	0	+
+chr7	116825938	116826662	CCDS5773.1_cds_13_0_chr7_116825939_f	0	+
+chr7	116828934	116829063	CCDS5773.1_cds_14_0_chr7_116828935_f	0	+
+chr7	116836830	116836868	CCDS5773.1_cds_15_0_chr7_116836831_f	0	+
+chr7	116837536	116837787	CCDS5773.1_cds_16_0_chr7_116837537_f	0	+
+chr7	116840678	116840758	CCDS5773.1_cds_17_0_chr7_116840679_f	0	+
+chr7	116844523	116844674	CCDS5773.1_cds_18_0_chr7_116844524_f	0	+
+chr7	116845585	116845813	CCDS5773.1_cds_19_0_chr7_116845586_f	0	+
+chr7	116848617	116848718	CCDS5773.1_cds_20_0_chr7_116848618_f	0	+
+chr7	116861526	116861775	CCDS5773.1_cds_21_0_chr7_116861527_f	0	+
+chr7	116876442	116876598	CCDS5773.1_cds_22_0_chr7_116876443_f	0	+
+chr7	116886846	116886936	CCDS5773.1_cds_23_0_chr7_116886847_f	0	+
+chr7	116898692	116898865	CCDS5773.1_cds_24_0_chr7_116898693_f	0	+
+chr7	116899463	116899569	CCDS5773.1_cds_25_0_chr7_116899464_f	0	+
+chr7	116900912	116901113	CCDS5773.1_cds_26_0_chr7_116900913_f	0	+
+chr7	116945541	116945787	CCDS5774.1_cds_0_0_chr7_116945542_r	0	-
+chr7	116952022	116952124	CCDS5774.1_cds_1_0_chr7_116952023_r	0	-
+chr7	116953508	116953641	CCDS5774.1_cds_2_0_chr7_116953509_r	0	-
+chr7	116955071	116955135	CCDS5774.1_cds_3_0_chr7_116955072_r	0	-
+chr7	116958551	116958737	CCDS5774.1_cds_4_0_chr7_116958552_r	0	-
+chr7	116959056	116959262	CCDS5774.1_cds_5_0_chr7_116959057_r	0	-
+chr7	116962093	116962272	CCDS5774.1_cds_6_0_chr7_116962094_r	0	-
+chr7	116968917	116969105	CCDS5774.1_cds_7_0_chr7_116968918_r	0	-
+chr7	116969273	116969426	CCDS5774.1_cds_8_0_chr7_116969274_r	0	-
+chr7	116979835	116979935	CCDS5774.1_cds_9_0_chr7_116979836_r	0	-
+chr7	116980017	116980104	CCDS5774.1_cds_10_0_chr7_116980018_r	0	-
+chr7	116990559	116990639	CCDS5774.1_cds_11_0_chr7_116990560_r	0	-
+chr7	116991879	116991975	CCDS5774.1_cds_12_0_chr7_116991880_r	0	-
+chr7	116994439	116994715	CCDS5774.1_cds_13_0_chr7_116994440_r	0	-
+chr7	117001063	117001181	CCDS5774.1_cds_14_0_chr7_117001064_r	0	-
+chr7	117011515	117011770	CCDS5774.1_cds_15_0_chr7_117011516_r	0	-
+chr7	117014445	117014596	CCDS5774.1_cds_16_0_chr7_117014446_r	0	-
+chr7	117016866	117016966	CCDS5774.1_cds_17_0_chr7_117016867_r	0	-
+chr7	117018255	117018459	CCDS5774.1_cds_18_0_chr7_117018256_r	0	-
+chr7	117025132	117026786	CCDS5774.1_cds_19_0_chr7_117025133_r	0	-
+chr7	117044769	117044994	CCDS5774.1_cds_20_0_chr7_117044770_r	0	-
+chr7	117095213	117095321	CCDS5774.1_cds_21_0_chr7_117095214_r	0	-
+chr7	117107339	117107420	CCDS5774.1_cds_22_0_chr7_117107340_r	0	-
+chr5	131424298	131424460	CCDS4149.1_cds_0_0_chr5_131424299_f	0	+
+chr5	131424558	131424600	CCDS4149.1_cds_1_0_chr5_131424559_f	0	+
+chr5	131425903	131425993	CCDS4149.1_cds_2_0_chr5_131425904_f	0	+
+chr5	131426117	131426159	CCDS4149.1_cds_3_0_chr5_131426118_f	0	+
diff --git a/test-data/neostore.zip b/test-data/neostore.zip
new file mode 100644
index 0000000..1165399
Binary files /dev/null and b/test-data/neostore.zip differ
diff --git a/test-data/phiX.fasta b/test-data/phiX.fasta
new file mode 100644
index 0000000..53df885
--- /dev/null
+++ b/test-data/phiX.fasta
@@ -0,0 +1,79 @@
+>phiX174
+GAGTTTTATCGCTTCCATGACGCAGAAGTTAACACTTTCGGATATTTCTGATGAGTCGAAAAATTATCTT
+GATAAAGCAGGAATTACTACTGCTTGTTTACGAATTAAATCGAAGTGGACTGCTGGCGGAAAATGAGAAA
+ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTACTTTGCGACCTTTCGCCATCAACTAACGATTCTG
+TCAAAAACTGACGCGTTGGATGAGGAGAAGTGGCTTAATATGCTTGGCACGTTCGTCAAGGACTGGTTTA
+GATATGAGTCACATTTTGTTCATGGTAGAGATTCTCTTGTTGACATTTTAAAAGAGCGTGGATTACTATC
+TGAGTCCGATGCTGTTCAACCACTAATAGGTAAGAAATCATGAGTCAAGTTACTGAACAATCCGTACGTT
+TCCAGACCGCTTTGGCCTCTATTAAGCTCATTCAGGCTTCTGCCGTTTTGGATTTAACCGAAGATGATTT
+CGATTTTCTGACGAGTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTGCTCGTCGCTGCGTTGAGGCT
+TGCGTTTATGGTACGCTGGACTTTGTGGGATACCCTCGCTTTCCTGCTCCTGTTGAGTTTATTGCTGCCG
+TCATTGCTTATTATGTTCATCCCGTCAACATTCAAACGGCCTGTCTCATCATGGAAGGCGCTGAATTTAC
+GGAAAACATTATTAATGGCGTCGAGCGTCCGGTTAAAGCCGCTGAATTGTTCGCGTTTACCTTGCGTGTA
+CGCGCAGGAAACACTGACGTTCTTACTGACGCAGAAGAAAACGTGCGTCAAAAATTACGTGCAGAAGGAG
+TGATGTAATGTCTAAAGGTAAAAAACGTTCTGGCGCTCGCCCTGGTCGTCCGCAGCCGTTGCGAGGTACT
+AAAGGCAAGCGTAAAGGCGCTCGTCTTTGGTATGTAGGTGGTCAACAATTTTAATTGCAGGGGCTTCGGC
+CCCTTACTTGAGGATAAATTATGTCTAATATTCAAACTGGCGCCGAGCGTATGCCGCATGACCTTTCCCA
+TCTTGGCTTCCTTGCTGGTCAGATTGGTCGTCTTATTACCATTTCAACTACTCCGGTTATCGCTGGCGAC
+TCCTTCGAGATGGACGCCGTTGGCGCTCTCCGTCTTTCTCCATTGCGTCGTGGCCTTGCTATTGACTCTA
+CTGTAGACATTTTTACTTTTTATGTCCCTCATCGTCACGTTTATGGTGAACAGTGGATTAAGTTCATGAA
+GGATGGTGTTAATGCCACTCCTCTCCCGACTGTTAACACTACTGGTTATATTGACCATGCCGCTTTTCTT
+GGCACGATTAACCCTGATACCAATAAAATCCCTAAGCATTTGTTTCAGGGTTATTTGAATATCTATAACA
+ACTATTTTAAAGCGCCGTGGATGCCTGACCGTACCGAGGCTAACCCTAATGAGCTTAATCAAGATGATGC
+TCGTTATGGTTTCCGTTGCTGCCATCTCAAAAACATTTGGACTGCTCCGCTTCCTCCTGAGACTGAGCTT
+TCTCGCCAAATGACGACTTCTACCACATCTATTGACATTATGGGTCTGCAAGCTGCTTATGCTAATTTGC
+ATACTGACCAAGAACGTGATTACTTCATGCAGCGTTACCGTGATGTTATTTCTTCATTTGGAGGTAAAAC
+CTCTTATGACGCTGACAACCGTCCTTTACTTGTCATGCGCTCTAATCTCTGGGCATCTGGCTATGATGTT
+GATGGAACTGACCAAACGTCGTTAGGCCAGTTTTCTGGTCGTGTTCAACAGACCTATAAACATTCTGTGC
+CGCGTTTCTTTGTTCCTGAGCATGGCACTATGTTTACTCTTGCGCTTGTTCGTTTTCCGCCTACTGCGAC
+TAAAGAGATTCAGTACCTTAACGCTAAAGGTGCTTTGACTTATACCGATATTGCTGGCGACCCTGTTTTG
+TATGGCAACTTGCCGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCTGGTGATTCGTCTAAGAAGT
+TTAAGATTGCTGAGGGTCAGTGGTATCGTTATGCGCCTTCGTATGTTTCTCCTGCTTATCACCTTCTTGA
+AGGCTTCCCATTCATTCAGGAACCGCCTTCTGGTGATTTGCAAGAACGCGTACTTATTCGCCACCATGAT
+TATGACCAGTGTTTCCAGTCCGTTCAGTTGTTGCAGTGGAATAGTCAGGTTAAATTTAATGTGACCGTTT
+ATCGCAATCTGCCGACCACTCGCGATTCAATCATGACTTCGTGATAAAAGATTGAGTGTGAGGTTATAAC
+GCCGAAGCGGTAAAAATTTTAATTTTTGCCGCTGAGGGGTTGACCAAGCGAAGCGCGGTAGGTTTTCTGC
+TTAGGAGTTTAATCATGTTTCAGACTTTTATTTCTCGCCATAATTCAAACTTTTTTTCTGATAAGCTGGT
+TCTCACTTCTGTTACTCCAGCTTCTTCGGCACCTGTTTTACAGACACCTAAAGCTACATCGTCAACGTTA
+TATTTTGATAGTTTGACGGTTAATGCTGGTAATGGTGGTTTTCTTCATTGCATTCAGATGGATACATCTG
+TCAACGCCGCTAATCAGGTTGTTTCTGTTGGTGCTGATATTGCTTTTGATGCCGACCCTAAATTTTTTGC
+CTGTTTGGTTCGCTTTGAGTCTTCTTCGGTTCCGACTACCCTCCCGACTGCCTATGATGTTTATCCTTTG
+AATGGTCGCCATGATGGTGGTTATTATACCGTCAAGGACTGTGTGACTATTGACGTCCTTCCCCGTACGC
+CGGGCAATAATGTTTATGTTGGTTTCATGGTTTGGTCTAACTTTACCGCTACTAAATGCCGCGGATTGGT
+TTCGCTGAATCAGGTTATTAAAGAGATTATTTGTCTCCAGCCACTTAAGTGAGGTGATTTATGTTTGGTG
+CTATTGCTGGCGGTATTGCTTCTGCTCTTGCTGGTGGCGCCATGTCTAAATTGTTTGGAGGCGGTCAAAA
+AGCCGCCTCCGGTGGCATTCAAGGTGATGTGCTTGCTACCGATAACAATACTGTAGGCATGGGTGATGCT
+GGTATTAAATCTGCCATTCAAGGCTCTAATGTTCCTAACCCTGATGAGGCCGCCCCTAGTTTTGTTTCTG
+GTGCTATGGCTAAAGCTGGTAAAGGACTTCTTGAAGGTACGTTGCAGGCTGGCACTTCTGCCGTTTCTGA
+TAAGTTGCTTGATTTGGTTGGACTTGGTGGCAAGTCTGCCGCTGATAAAGGAAAGGATACTCGTGATTAT
+CTTGCTGCTGCATTTCCTGAGCTTAATGCTTGGGAGCGTGCTGGTGCTGATGCTTCCTCTGCTGGTATGG
+TTGACGCCGGATTTGAGAATCAAAAAGAGCTTACTAAAATGCAACTGGACAATCAGAAAGAGATTGCCGA
+GATGCAAAATGAGACTCAAAAAGAGATTGCTGGCATTCAGTCGGCGACTTCACGCCAGAATACGAAAGAC
+CAGGTATATGCACAAAATGAGATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGCGTTGCGTCTATTA
+TGGAAAACACCAATCTTTCCAAGCAACAGCAGGTTTCCGAGATTATGCGCCAAATGCTTACTCAAGCTCA
+AACGGCTGGTCAGTATTTTACCAATGACCAAATCAAAGAAATGACTCGCAAGGTTAGTGCTGAGGTTGAC
+TTAGTTCATCAGCAAACGCAGAATCAGCGGTATGGCTCTTCTCATATTGGCGCTACTGCAAAGGATATTT
+CTAATGTCGTCACTGATGCTGCTTCTGGTGTGGTTGATATTTTTCATGGTATTGATAAAGCTGTTGCCGA
+TACTTGGAACAATTTCTGGAAAGACGGTAAAGCTGATGGTATTGGCTCTAATTTGTCTAGGAAATAACCG
+TCAGGATTGACACCCTCCCAATTGTATGTTTTCATGCCTCCAAATCTTGGAGGCTTTTTTATGGTTCGTT
+CTTATTACCCTTCTGAATGTCACGCTGATTATTTTGACTTTGAGCGTATCGAGGCTCTTAAACCTGCTAT
+TGAGGCTTGTGGCATTTCTACTCTTTCTCAATCCCCAATGCTTGGCTTCCATAAGCAGATGGATAACCGC
+ATCAAGCTCTTGGAAGAGATTCTGTCTTTTCGTATGCAGGGCGTTGAGTTCGATAATGGTGATATGTATG
+TTGACGGCCATAAGGCTGCTTCTGACGTTCGTGATGAGTTTGTATCTGTTACTGAGAAGTTAATGGATGA
+ATTGGCACAATGCTACAATGTGCTCCCCCAACTTGATATTAATAACACTATAGACCACCGCCCCGAAGGG
+GACGAAAAATGGTTTTTAGAGAACGAGAAGACGGTTACGCAGTTTTGCCGCAAGCTGGCTGCTGAACGCC
+CTCTTAAGGATATTCGCGATGAGTATAATTACCCCAAAAAGAAAGGTATTAAGGATGAGTGTTCAAGATT
+GCTGGAGGCCTCCACTATGAAATCGCGTAGAGGCTTTACTATTCAGCGTTTGATGAATGCAATGCGACAG
+GCTCATGCTGATGGTTGGTTTATCGTTTTTGACACTCTCACGTTGGCTGACGACCGATTAGAGGCGTTTT
+ATGATAATCCCAATGCTTTGCGTGACTATTTTCGTGATATTGGTCGTATGGTTCTTGCTGCCGAGGGTCG
+CAAGGCTAATGATTCACACGCCGACTGCTATCAGTATTTTTGTGTGCCTGAGTATGGTACAGCTAATGGC
+CGTCTTCATTTCCATGCGGTGCATTTTATGCGGACACTTCCTACAGGTAGCGTTGACCCTAATTTTGGTC
+GTCGGGTACGCAATCGCCGCCAGTTAAATAGCTTGCAAAATACGTGGCCTTATGGTTACAGTATGCCCAT
+CGCAGTTCGCTACACGCAGGACGCTTTTTCACGTTCTGGTTGGTTGTGGCCTGTTGATGCTAAAGGTGAG
+CCGCTTAAAGCTACCAGTTATATGGCTGTTGGTTTCTATGTGGCTAAATACGTTAACAAAAAGTCAGATA
+TGGACCTTGCTGCTAAAGGTCTAGGAGCTAAAGAATGGAACAACTCACTAAAAACCAAGCTGTCGCTACT
+TCCCAAGAAGCTGTTCAGAATCAGAATGAGCCGCAACTTCGGGATGAAAATGCTCACAATGACAAATCTG
+TCCACGGAGTGCTTAATCCAACTTACCAAGCTGGGTTACGACGCGACGCCGTTCAACCAGATATTGAAGC
+AGAACGCAAAAAGAGAGATGAGATTGAGGCTGGGAAAAGTTACTGTAGCCGACGTTTTGGCGGCGCAACC
+TGTGACGACAAATCTGCTCAAATTTATGCGCGCTTCGATAAAAATGATTGGCGTATCCAACCTGCA
+
diff --git a/test-data/qualscores.qual454 b/test-data/qualscores.qual454
new file mode 100644
index 0000000..ed43ee6
--- /dev/null
+++ b/test-data/qualscores.qual454
@@ -0,0 +1,49 @@
+>EYKX4VC04IWAEA length=68 xy=3531_0528 region=4 run=R_2007_11_07_16_15_57_
+22 13 9 6 4 3 2 2 1 1 1 1 24 44 33 23 16 11 7 2 28 33 23 18 28 27 27 28 20 21 42 35 21 6 24 25 31 21 28 27 41 34 15 28 28 27 28 28 33 24 27 28 28 24 27 36 27 28 28 28
+28 28 36 30 8 34 25 18
+>EYKX4VC04JKOGH length=48 xy=3808_3903 region=4 run=R_2007_11_07_16_15_57_
+28 28 27 28 38 31 10 28 28 27 27 34 25 28 24 26 27 28 27 37 29 34 25 31 21 28 21 36 28 31 20 24 27 37 28 28 34 27 3 34 25 24 28 28 26 28 35 28
+>EYKX4VC04JIUVK length=84 xy=3788_0830 region=4 run=R_2007_11_07_16_15_57_
+29 20 14 11 8 6 3 1 25 27 25 28 28 27 28 27 28 28 36 28 27 28 36 29 7 28 28 28 27 27 27 35 26 35 26 27 36 28 28 28 38 32 11 28 36 28 27 26 35 25 28 38 31 11 27 28 37 28 27 27
+28 36 29 8 33 24 41 34 19 3 26 28 28 28 35 26 36 29 8 38 32 11 28 28
+>EYKX4VC04JWDRY length=78 xy=3942_1068 region=4 run=R_2007_11_07_16_15_57_
+36 24 14 5 27 20 28 27 28 32 22 28 27 43 36 23 11 27 28 28 28 32 23 36 27 28 28 26 38 32 11 34 25 27 43 36 23 11 38 31 11 37 28 28 28 27 28 30 20 28 32 22 28 36 27 37 30 9 27 28
+28 27 28 42 35 20 5 28 28 28 35 26 27 27 26 39 32 12
+>EYKX4VC04JWMUW length=55 xy=3945_0550 region=4 run=R_2007_11_07_16_15_57_
+36 24 14 4 28 17 34 25 35 25 31 20 28 28 36 27 28 28 24 27 28 28 37 28 27 27 35 25 31 21 27 39 32 12 28 36 28 28 26 27 28 27 26 28 42 35 20 6 28 27 28 28 28 28 28
+>EYKX4VC04JH4RG length=85 xy=3779_3850 region=4 run=R_2007_11_07_16_15_57_
+37 28 35 26 38 31 10 27 37 28 28 38 31 10 27 35 25 25 28 28 28 28 28 28 28 28 27 28 33 23 28 32 22 35 25 31 20 34 25 31 21 26 28 27 26 26 15 36 29 7 27 27 24 36 27 28 37 28 36 28
+27 28 28 28 37 28 28 40 34 14 37 28 28 26 28 36 28 26 28 37 28 28 28 28 27
+>EYKX4VC04JDAWO length=117 xy=3724_3814 region=4 run=R_2007_11_07_16_15_57_
+25 28 28 28 34 25 28 28 28 28 28 28 28 27 27 30 19 28 28 34 25 28 28 26 32 23 28 28 27 28 28 34 25 28 27 28 25 25 36 27 36 29 7 36 27 33 23 28 28 36 27 31 21 28 30 20 28 34 24 28
+27 34 25 28 28 28 28 28 28 27 28 27 37 30 9 28 28 27 28 27 28 28 28 27 33 23 28 28 28 17 28 31 20 28 21 26 28 33 23 26 28 27 26 28 35 26 28 28 21 28 26 28 33 23 36 27 27
+>EYKX4VC04JEY0S length=57 xy=3743_3898 region=4 run=R_2007_11_07_16_15_57_
+23 28 35 28 6 26 24 27 33 23 26 28 28 28 28 33 27 3 27 30 19 28 28 36 27 33 24 24 32 23 25 26 27 28 31 21 34 27 5 28 22 27 28 24 26 28 28 27 28 24 37 29 26 35 26 26 15
+>EYKX4VC04JKOGB length=68 xy=3808_3897 region=4 run=R_2007_11_07_16_15_57_
+28 34 27 4 27 28 24 28 28 28 28 27 27 31 20 28 27 27 26 28 35 26 37 28 28 28 28 28 41 34 19 4 35 26 27 32 22 28 25 36 28 26 28 28 25 36 29 8 28 28 28 27 24 28 41 34 16 27 21 28
+21 26 33 26 21 40 34 14
+>EYKX4VC04JOZA4 length=160 xy=3857_3886 region=4 run=R_2007_11_07_16_15_57_
+35 24 15 7 1 26 28 41 34 15 28 28 28 28 28 27 25 40 34 14 28 28 44 35 24 15 8 2 27 24 27 27 35 26 28 27 36 27 26 36 28 24 27 37 29 27 28 26 35 26 28 28 28 27 26 35 26 37 29 36
+28 28 26 36 28 28 28 37 28 28 28 28 28 28 28 28 36 28 43 36 22 10 19 37 28 27 37 28 24 27 37 28 38 32 11 28 37 29 26 25 34 25 36 27 24 25 24 36 27 23 27 28 39 32 12 39 32 12 28 37
+29 25 27 27 27 24 28 39 32 12 28 26 44 36 24 14 5 33 26 2 33 24 26 24 28 28 27 35 25 34 25 26 41 34 16 43 36 22 8 28
+>EYKX4VC04JLDPN length=75 xy=3816_3865 region=4 run=R_2007_11_07_16_15_57_
+28 28 36 27 28 28 36 27 28 27 28 28 28 27 26 36 27 28 28 27 28 28 28 28 28 28 28 28 27 28 40 33 14 28 28 28 40 33 13 24 27 28 28 28 27 23 32 22 25 18 26 21 24 36 27 26 24 25 28 26
+27 37 28 22 28 28 34 24 28 25 23 26 23 25 27
+>EYKX4VC04IEKBT length=167 xy=3329_0983 region=4 run=R_2007_11_07_16_15_57_
+26 17 12 9 7 5 4 2 1 28 44 31 21 15 11 8 5 2 34 25 44 35 24 15 7 1 35 26 37 29 28 36 27 37 28 28 27 36 27 27 39 32 12 36 27 28 37 28 28 35 26 36 27 28 28 24 27 27 28 28
+28 36 27 40 33 14 27 28 43 36 23 12 2 28 27 27 36 27 43 36 22 8 27 37 28 35 25 28 28 28 28 36 27 41 35 16 39 32 12 28 36 28 28 27 28 38 31 11 28 27 28 28 28 37 28 35 25 28 39 33
+13 28 28 28 28 28 26 26 28 28 27 27 28 27 25 39 32 12 28 28 35 25 25 27 28 28 28 26 36 27 28 28 35 25 28 28 34 25 28 27 36 28 26 28 28 28 28
+>EYKX4VC04H76LH length=104 xy=3256_2259 region=4 run=R_2007_11_07_16_15_57_
+20 12 7 5 3 2 2 1 1 1 1 1 1 28 30 19 26 28 33 23 27 37 28 28 36 28 28 37 28 28 36 28 27 42 35 21 7 27 27 30 19 31 21 28 27 33 23 28 28 28 27 35 25 28 35 26 27 36 27 28
+36 27 28 36 27 28 36 27 28 36 28 28 36 28 28 36 27 27 35 25 27 33 23 28 36 27 28 31 21 28 35 25 27 34 25 28 33 23 28 32 22 26 34 24
+>EYKX4VC04I6APD length=156 xy=3645_0543 region=4 run=R_2007_11_07_16_15_57_
+21 12 8 5 4 2 2 1 1 1 1 1 1 28 38 32 11 28 44 36 24 14 5 24 28 28 28 21 27 42 35 21 7 27 28 27 24 26 28 37 28 35 26 28 26 28 27 24 28 28 27 28 44 18 9 5 3 2 1 1
+1 1 1 1 1 1 1 1 1 35 26 41 34 17 25 24 12 28 28 27 38 32 11 28 28 23 28 27 28 27 20 39 32 12 22 26 36 27 27 27 24 36 28 28 36 27 35 26 40 33 18 1 26 34 27 28 24 22 40 34
+15 26 32 22 28 28 28 25 28 28 39 33 13 27 28 32 22 32 23 42 35 21 7 28 43 36 23 10 27 43 36 23 10 36 28 25
+>EYKX4VC04IPT6U length=143 xy=3457_2692 region=4 run=R_2007_11_07_16_15_57_
+30 20 15 11 8 5 3 1 28 19 27 34 24 44 36 23 13 3 28 35 25 28 27 28 28 28 28 27 35 25 28 36 27 27 28 28 28 28 36 27 36 28 28 42 35 21 6 28 28 27 27 44 35 24 15 8 1 27 27 28
+36 27 28 28 27 28 28 35 26 43 36 23 12 2 28 36 27 26 28 28 36 27 28 28 36 27 41 34 16 25 28 41 34 16 36 28 40 34 14 39 33 13 36 27 40 34 18 2 28 40 34 18 2 35 25 28 27 41 34 19
+4 26 27 28 27 28 28 34 25 36 27 28 28 28 36 27 37 28 28 27 28 27 28
+>EYKX4VC04JX6Y2 length=68 xy=3962_3644 region=4 run=R_2007_11_07_16_15_57_
+28 28 28 27 35 28 6 27 28 27 36 27 28 25 37 28 33 26 2 28 27 28 24 28 37 29 28 28 28 28 28 23 28 32 22 28 36 27 28 26 34 25 28 28 35 26 28 28 26 27 25 28 28 24 28 28 27 27 28 28
+28 28 33 23 28 40 34 14
\ No newline at end of file
diff --git a/test-data/qualscores.qualsolid b/test-data/qualscores.qualsolid
new file mode 100644
index 0000000..74fd0f6
--- /dev/null
+++ b/test-data/qualscores.qualsolid
@@ -0,0 +1,48 @@
+>946_21_302_F3
+2 10 2 2 4 2 21 2 2 4 4 17 5 2 5 2 11 4 2 2 2 10 7 3 15 2 2 2 3 19 3 2 6 3 2 
+>946_21_659_F3
+3 31 3 2 2 2 34 3 2 2 2 31 2 2 3 4 31 8 3 2 2 30 4 8 3 3 2 2 6 9 4 4 6 2 2 
+>946_21_1071_F3
+5 5 2 2 2 8 5 3 2 3 7 7 2 3 4 6 5 2 2 2 5 5 2 2 2 3 8 2 3 3 3 8 2 3 2 
+>946_21_1115_F3
+21 5 2 8 13 31 6 2 17 24 10 27 4 21 29 8 20 2 11 21 13 24 5 5 6 24 31 2 13 6 22 17 6 27 10 
+>946_21_1218_F3
+11 21 2 13 13 16 27 16 19 27 22 28 14 26 24 23 29 10 15 13 6 4 7 16 26 22 11 6 16 22 21 6 4 7 21 
+>946_21_1232_F3
+17 16 2 28 21 31 15 16 10 11 8 20 6 5 18 6 13 23 7 13 4 12 19 8 6 9 10 19 7 10 6 10 20 14 8 
+>946_21_1368_F3
+28 30 31 31 31 20 29 24 27 31 31 31 24 26 31 31 26 15 27 31 27 30 29 27 30 27 30 21 23 26 24 31 17 30 19 
+>946_21_1406_F3
+4 29 3 2 4 5 34 8 2 2 3 29 4 2 8 2 11 4 2 2 2 28 8 5 3 4 31 2 2 2 2 28 5 2 2 
+>946_21_1695_F3
+25 31 8 29 19 31 19 12 16 31 30 13 16 21 31 22 13 13 19 23 32 16 6 14 16 24 13 6 6 14 8 8 5 11 6 
+>946_21_1945_F3
+23 27 14 10 17 31 29 31 10 13 31 29 23 8 24 30 31 28 10 20 26 28 31 5 22 31 24 28 9 7 15 7 20 5 4 
+>946_21_2013_F3
+2 26 2 2 2 2 2 2 9 2 2 2 2 2 2 2 2 4 2 2 2 2 3 2 2 2 2 2 2 2 2 2 4 2 2 
+>946_22_108_F3
+17 14 12 28 12 17 18 28 19 13 14 6 17 5 3 6 2 7 6 12 16 7 13 9 11 8 2 5 5 6 6 7 20 11 19 
+>946_22_1241_F3
+16 8 5 20 11 6 13 5 19 13 8 9 17 31 8 17 2 6 31 21 12 11 8 12 5 8 15 8 5 5 7 5 11 6 10 
+>946_22_1296_F3
+10 8 33 31 27 31 26 5 17 11 6 23 8 24 6 8 14 14 27 11 16 8 8 29 5 21 8 5 14 8 7 16 4 5 5 
+>946_22_1356_F3
+2 31 4 3 2 6 31 4 2 2 4 2 3 8 2 2 2 3 4 8 2 2 3 3 11 2 2 2 4 2 2 2 9 2 2 
+>946_22_1520_F3
+8 11 12 21 13 15 16 25 20 21 14 23 31 23 30 18 25 23 27 30 21 30 15 14 25 22 22 21 21 22 16 23 26 13 21 
+>946_22_1532_F3
+27 30 34 26 25 29 31 31 24 29 29 33 28 22 24 31 30 30 24 22 25 31 19 9 26 29 23 22 13 28 30 31 24 7 29 
+>946_22_1582_F3
+28 19 4 5 6 25 30 4 5 8 15 19 5 9 8 29 27 4 4 2 27 17 2 2 5 30 22 2 11 6 26 16 3 2 2 
+>946_22_1598_F3
+23 5 33 6 19 8 13 4 30 17 11 13 6 16 8 8 27 5 24 8 8 26 16 8 17 2 23 25 4 23 10 16 10 7 13 
+>946_22_1834_F3
+31 28 30 31 31 34 27 29 31 28 31 29 16 31 31 25 31 11 27 21 26 29 16 18 21 19 29 13 24 24 27 24 8 24 10 
+>946_22_1939_F3
+30 29 31 31 25 31 33 24 31 30 25 31 27 33 31 27 31 26 30 19 16 27 29 31 21 25 27 9 26 27 23 15 20 27 27 
+>946_23_975_F3
+31 31 3 3 2 32 29 3 3 4 28 27 2 4 4 30 24 3 7 3 28 24 5 7 8 29 22 2 6 6 2 24 2 2 2 
+>946_23_1133_F3
+19 28 22 31 25 14 28 30 32 27 10 33 26 31 31 14 30 30 16 30 10 28 23 16 6 14 17 10 6 8 18 5 8 9 5 
+>946_23_1221_F3
+30 26 30 32 23 29 29 27 28 20 26 23 14 29 27 29 28 29 23 14 30 27 30 27 8 24 13 26 11 12 20 17 15 5 21
\ No newline at end of file
diff --git a/test-data/rgenetics.bed b/test-data/rgenetics.bed
new file mode 100644
index 0000000..e8b3867
--- /dev/null
+++ b/test-data/rgenetics.bed
@@ -0,0 +1 @@
+l�����+�󪿊�������������������ﮨ�"��������������������"������:*�����.��:*�����.����������������������:*����,��:*����,�������+���������������������������+�󪿺�����������������������������m��������� ��������(:��������(:��������(:��������(
\ No newline at end of file
diff --git a/test-data/rgenetics.bim b/test-data/rgenetics.bim
new file mode 100644
index 0000000..3a2bf2c
--- /dev/null
+++ b/test-data/rgenetics.bim
@@ -0,0 +1,25 @@
+22	rs2283802	0	21784722	4	2
+22	rs2267000	0	21785366	4	2
+22	rs16997606	0	21794754	1	3
+22	rs4820537	0	21794810	1	3
+22	rs3788347	0	21797804	3	1
+22	rs756632	0	21799918	4	2
+22	rs4820539	0	21807970	1	3
+22	rs2283804	0	21820335	1	2
+22	rs2267006	0	21820990	3	1
+22	rs4822363	0	21821000	4	2
+22	rs5751592	0	21827674	4	2
+22	rs5759608	0	21832708	2	4
+22	rs5759612	0	21833170	3	1
+22	rs2267009	0	21860168	3	4
+22	rs2267010	0	21864366	3	1
+22	rs5759636	0	21868698	4	2
+22	rs2071436	0	21871488	4	2
+22	rs2267013	0	21875879	3	1
+22	rs6003566	0	21889806	3	1
+22	rs2256725	0	21892891	2	1
+22	rs12160770	0	21892925	1	3
+22	rs5751611	0	21896019	2	4
+22	rs762601	0	21898858	1	3
+22	rs2156921	0	21899063	3	1
+22	rs4822375	0	21905642	1	3
diff --git a/test-data/rgenetics.fam b/test-data/rgenetics.fam
new file mode 100644
index 0000000..dc70cff
--- /dev/null
+++ b/test-data/rgenetics.fam
@@ -0,0 +1,40 @@
+101 1 3 2 2 2
+101 2 0 0 2 1
+101 3 0 0 1 1
+105 1 3 2 2 2
+105 2 0 0 2 1
+105 3 0 0 1 1
+112 1 3 2 1 2
+112 2 0 0 2 1
+112 3 0 0 1 1
+117 1 3 2 2 2
+117 2 0 0 2 1
+117 3 0 0 1 1
+12 1 3 2 1 2
+12 2 0 0 2 1
+12 3 0 0 1 1
+13 1 3 2 1 2
+13 2 0 0 2 1
+13 3 0 0 1 1
+1334 1 10 11 1 2
+1334 10 0 0 1 1
+1334 11 0 0 2 1
+1334 12 0 0 1 1
+1334 13 0 0 2 1
+1334 2 12 13 2 2
+1340 1 9 10 1 2
+1340 10 0 0 2 1
+1340 11 0 0 1 1
+1340 12 0 0 2 1
+1340 2 11 12 2 2
+1340 9 0 0 1 1
+1341 1 11 12 1 1
+1341 11 0 0 1 1
+1341 12 0 0 2 1
+1341 13 0 0 1 1
+1341 14 0 0 2 1
+1341 2 13 14 2 1
+1344 1 12 13 1 1
+1344 12 0 0 1 1
+1344 13 0 0 2 1
+1345 12 0 0 1 1
diff --git a/test-data/rgenetics.map b/test-data/rgenetics.map
new file mode 100644
index 0000000..cf0e641
--- /dev/null
+++ b/test-data/rgenetics.map
@@ -0,0 +1,25 @@
+22	rs2283802	0	21784722
+22	rs2267000	0	21785366
+22	rs16997606	0	21794754
+22	rs4820537	0	21794810
+22	rs3788347	0	21797804
+22	rs756632	0	21799918
+22	rs4820539	0	21807970
+22	rs2283804	0	21820335
+22	rs2267006	0	21820990
+22	rs4822363	0	21821000
+22	rs5751592	0	21827674
+22	rs5759608	0	21832708
+22	rs5759612	0	21833170
+22	rs2267009	0	21860168
+22	rs2267010	0	21864366
+22	rs5759636	0	21868698
+22	rs2071436	0	21871488
+22	rs2267013	0	21875879
+22	rs6003566	0	21889806
+22	rs2256725	0	21892891
+22	rs12160770	0	21892925
+22	rs5751611	0	21896019
+22	rs762601	0	21898858
+22	rs2156921	0	21899063
+22	rs4822375	0	21905642
diff --git a/test-data/rgenetics.ped b/test-data/rgenetics.ped
new file mode 100644
index 0000000..1092f70
--- /dev/null
+++ b/test-data/rgenetics.ped
@@ -0,0 +1,40 @@
+101 1 3 2 2 2 2 2 4 2 1 3 3 3 3 3 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 3 1 3 1 2 1 0 0 2 2 1 3 3 1 1 3
+101 2 0 0 2 1 2 2 4 2 1 3 3 3 3 3 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 3 1 3 1 2 1 3 3 2 2 1 3 3 1 1 3
+101 3 0 0 1 1 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 1 3 2 4 3 3 1 1 3 3
+105 1 3 2 2 2 2 2 4 2 3 3 3 3 3 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 3 1 1 1 2 1 0 0 2 2 1 1 3 3 1 1
+105 2 0 0 2 1 2 2 4 4 3 3 3 3 3 1 2 2 1 3 1 2 3 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+105 3 0 0 1 1 4 2 2 2 3 3 3 3 3 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+112 1 3 2 1 2 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+112 2 0 0 2 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+112 3 0 0 1 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 2 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+117 1 3 2 2 2 2 2 4 2 3 3 3 3 3 3 4 2 1 1 2 2 1 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+117 2 0 0 2 1 2 2 4 4 1 3 3 3 3 3 2 2 1 3 2 2 1 1 2 2 4 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+117 3 0 0 1 1 2 2 4 2 3 3 3 3 3 3 4 2 1 1 2 2 1 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+12 1 3 2 1 2 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+12 2 0 0 2 1 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+12 3 0 0 1 1 2 2 4 2 1 3 1 3 3 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+13 1 3 2 1 2 4 2 4 2 1 3 3 3 3 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 4 1 1 2 2 4 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+13 2 0 0 2 1 4 2 2 2 3 3 1 3 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 0 0 0 0 3 3 1 1 3 3
+13 3 0 0 1 1 2 2 4 4 1 3 3 3 3 3 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 3 1 2 2 2 2 1 1 3 1 1 1 3 3 2 2 3 3 1 1 3 3
+1334 1 10 11 1 2 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1334 10 0 0 1 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1334 11 0 0 2 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1334 12 0 0 1 1 4 2 2 2 3 3 1 3 1 1 2 2 3 3 2 2 1 1 2 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1334 13 0 0 2 1 4 2 4 2 3 3 1 3 3 1 4 2 1 3 2 2 1 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1334 2 12 13 2 2 4 4 2 2 3 3 1 3 1 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 3 1 1 2 2 4 4 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1340 1 9 10 1 2 4 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 3 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1340 10 0 0 2 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 3 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1340 11 0 0 1 1 4 2 4 2 3 3 1 3 3 1 4 2 1 3 2 2 1 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 12 0 0 2 1 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 4 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 2 11 12 2 2 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 9 0 0 1 1 4 4 2 2 3 3 1 1 1 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 3 1 1 2 2 4 4 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 1 11 12 1 1 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 0 0 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+1341 11 0 0 1 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 4 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1341 12 0 0 2 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 4 2 4 2 3 1 1 1 2 1 3 3 2 2 1 1 3 3 1 1
+1341 13 0 0 1 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 14 0 0 2 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 2 13 14 2 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1344 1 12 13 1 1 2 2 4 4 3 3 3 3 3 3 4 4 1 1 2 2 1 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+1344 12 0 0 1 1 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1344 13 0 0 2 1 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1345 12 0 0 1 1 4 2 4 2 3 3 3 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 4 2 4 2 3 1 1 1 2 1 3 3 2 2 1 1 3 3 1 1
diff --git a/test-data/sam_with_header.sam b/test-data/sam_with_header.sam
new file mode 100644
index 0000000..33449b1
--- /dev/null
+++ b/test-data/sam_with_header.sam
@@ -0,0 +1,14 @@
+ at SQ	SN:ref	LN:45
+ at SQ	SN:ref2	LN:40
+r001	163	ref	7	30	8M4I4M1D3M	=	37	39	TTAGATAAAGAGGATACTG	*	XX:B:S,12561,2,20,112
+r002	0	ref	9	30	1S2I6M1P1I1P1I4M2I	*	0	0	AAAAGATAAGGGATAAA	*
+r003	0	ref	9	30	5H6M	*	0	0	AGCTAA	*
+r004	0	ref	16	30	6M14N1I5M	*	0	0	ATAGCTCTCAGC	*
+r003	16	ref	29	30	6H5M	*	0	0	TAGGC	*
+r001	83	ref	37	30	9M	=	7	-39	CAGCGCCAT	*
+x1	0	ref2	1	30	20M	*	0	0	aggttttataaaacaaataa	????????????????????
+x2	0	ref2	2	30	21M	*	0	0	ggttttataaaacaaataatt	?????????????????????
+x3	0	ref2	6	30	9M4I13M	*	0	0	ttataaaacAAATaattaagtctaca	??????????????????????????
+x4	0	ref2	10	30	25M	*	0	0	CaaaTaattaagtctacagagcaac	?????????????????????????
+x5	0	ref2	12	30	24M	*	0	0	aaTaattaagtctacagagcaact	????????????????????????
+x6	0	ref2	14	30	23M	*	0	0	Taattaagtctacagagcaacta	???????????????????????
diff --git a/test-data/shrimp_cs_test1.csfasta b/test-data/shrimp_cs_test1.csfasta
new file mode 100644
index 0000000..f3e3ef8
--- /dev/null
+++ b/test-data/shrimp_cs_test1.csfasta
@@ -0,0 +1,5000 @@
+>2_14_26_F3,-1282216.0
+T011213122200221123032111221021210131332222101
+>2_14_192_F3,-1383225.3
+T110021221100310030120022032222111321022112223
+>2_14_233_F3,-1082751.1
+T011001332311121212312022310203312201132111223
+>2_14_294_F3,-687179.1
+T213012132300000021323212232103300033102330332
+>2_14_463_F3
+T132032030200202202003211302222202230022110222
+>2_14_578_F3
+T131013032310120222321211010130110221312110222
+>2_14_956_F3,-1625621.2,-1625360.0
+T210213030022120032001012021321220011232201231
+>2_14_988_F3,1687674.3
+T221202031310031102033002302330301301010023133
+>2_14_1028_F3,754444.2
+T112230301101101120201331111302110031102111321
+>2_14_1035_F3,-1570954.1
+T003033103303232110201102100032203301023110332
+>2_14_1157_F3
+T330302102023212332202300023010210001331011220
+>2_14_1185_F3
+T310110110113210213231123311333112011100111111
+>2_14_1256_F3
+T220332330033103230031220313312210032300011103
+>2_14_1452_F3,408767.1
+T300032023211022130002011021201330102122112133
+>2_14_1467_F3,1725735.1
+T002002011223202300213312213112203002031020300
+>2_14_1501_F3,-296824.0
+T232300003200003322211030002010000100202220111
+>2_15_37_F3,-831611.2
+T332102232121210322013113212312023121222213322
+>2_15_85_F3
+T213021013012303002332212012112221222112212222
+>2_15_455_F3
+T021111033122331023012312113232232212010121222
+>2_15_474_F3,861313.0
+T301130020200230322222222220200002112323223130
+>2_15_711_F3
+T110123132023222210122220013231212001010211310
+>2_15_728_F3,1727534.3
+T332301303200212322102330330101032222303230132
+>2_15_800_F3
+T311311202021333312003202011210112202323222222
+>2_15_913_F3,234081.0
+T000133010220201231210102121030231022310100300
+>2_15_922_F3
+T030210011320000021213202010022330022200221332
+>2_15_1344_F3,1441878.0
+T332010233223132213021210102102010032012311132
+>2_15_1457_F3,1814674.3
+T332130102020103032112122301011033022232300322
+>2_15_1654_F3,-685937.3
+T021101123300222011231311001111022223111303310
+>2_15_1694_F3
+T223113213133313213321223223222222222232212222
+>2_15_1870_F3
+T313131331322312222132123233231312122232212222
+>2_16_63_F3,1668299.1
+T303131033120202201213020323120023331033121300
+>2_16_78_F3
+T000211121303022003030023222233201120002201132
+>2_16_134_F3,1731243.3
+T002111200322221100012320021022112010022102302
+>2_16_136_F3,273653.2
+T021302220232322200021321011302020332331011303
+>2_16_184_F3,906220.1
+T021113301020231031012202112232212033232211222
+>2_16_208_F3,1586951.1
+T200111311132333233203003121211321322021301323
+>2_16_352_F3,-440913.3
+T202211102330120210333212230200322113322330322
+>2_16_384_F3,471462.2
+T300110020200133020123100133230332231112212222
+>2_16_388_F3,1736527.0
+T220113223002202022200002301210332321222212222
+>2_16_511_F3,-83281.0
+T302202101020121312112301223021310022302130120
+>2_16_583_F3
+T133212213000131222021302012130103231012110221
+>2_16_603_F3,67584.0
+T201232013201122131203021002300011313200111220
+>2_16_718_F3,-1557566.0
+T001233013301002003323331332031310120010100222
+>2_16_744_F3,109209.0
+T003130133013323012321020100123320102233012313
+>2_16_793_F3
+T313103331222221002110222203130031231112011222
+>2_16_902_F3
+T202223032112020202222023022222010102232022322
+>2_16_928_F3
+T313323103333032220021303211002220222200212322
+>2_16_946_F3,1842581.3
+T233222200101013032120210120330203220000333321
+>2_16_1093_F3
+T333113130033001221231333333130120202212012202
+>2_16_1165_F3,109943.1
+T023312020003001203012211213211103301322213233
+>2_16_1316_F3,-349764.0
+T323000112000202112010232201023320330213000302
+>2_16_1408_F3
+T002000213101010101232032230000200133302000100
+>2_16_1463_F3,728745.1
+T223301022333022233202002033100111300311302022
+>2_16_1563_F3,1967863.0
+T021302001311233233021200323122211012203011112
+>2_16_1600_F3,1757811.0
+T312132320301230220131230021202103230201321111
+>2_16_1634_F3,1008981.2
+T010111123102110122103101222312312322112101231
+>2_16_1659_F3
+T122221221322213022331311010021111221211312222
+>2_17_38_F3
+T132202222121330222012033212332122321232212322
+>2_17_52_F3,1613188.1
+T000110311321101321011012131301332221132200331
+>2_17_58_F3,1175988.1
+T001302332101120201233032222022021332220231312
+>2_17_290_F3
+T211231130220332102022223332023312232112211222
+>2_17_306_F3
+T122223122011121213331231231302322222222211220
+>2_17_422_F3,-674586.3
+T333301103001033221203000003300022022330223222
+>2_17_496_F3
+T332221212110102012312022213322311221122210322
+>2_17_517_F3
+T013011130212312100010022223002200130000201121
+>2_17_558_F3
+T033102123011100300010021010103220001231020220
+>2_17_635_F3
+T310111210021031230322112011121213313202312221
+>2_17_689_F3
+T123303121221301313003110322022330301231211222
+>2_17_692_F3,-37965.0
+T122301011200101012001110322021333301021310122
+>2_17_808_F3
+T212322100121122210221021221212112221122021222
+>2_17_854_F3,-757864.3
+T022003022332200030012020213222300123302011222
+>2_17_979_F3,1169971.0
+T033201201220003220311201022101102132202132322
+>2_17_1040_F3
+T330110020003123330120221333331121010011001211
+>2_17_1050_F3,637684.1
+T102030001303031320331210233002233130200212302
+>2_17_1099_F3
+T031201322333110311303210010110203231232311123
+>2_17_1124_F3
+T013021002100321130211300103112031120210100310
+>2_17_1142_F3,-1903625.0
+T112133102102012000212110313000032012112003321
+>2_17_1250_F3
+T122322313213032310002223110332310322202013102
+>2_17_1287_F3,1360760.1
+T313033033321320130021102022113332233200012121
+>2_17_1336_F3
+T320023220232232122020222212212122120220222222
+>2_17_1393_F3,-879512.1
+T002012302000000232302100020330130020122103333
+>2_17_1416_F3
+T323320012223311113211311111121111311113111221
+>2_17_1582_F3,1804825.2
+T033103102230120301030111212110201210312302220
+>2_17_1614_F3,-1674195.0
+T100230330123132302221212021233110231311212221
+>2_18_45_F3,-472247.0
+T022132320031303331200022302201212323212232302
+>2_18_49_F3,115167.3
+T013311303331010321010213023101302003130211322
+>2_18_99_F3,1936808.2
+T210101133220100121021011110111122020311122221
+>2_18_358_F3
+T223201023120233222001102231022230332202220222
+>2_18_538_F3
+T001201120322202322322023020130232200232020222
+>2_18_542_F3,1187596.1
+T001201120322202202322023002110033002220030221
+>2_18_600_F3,813204.0
+T302301210300103310120303000121203233230030220
+>2_18_618_F3,1729062.0
+T300213212001322020123212123031012023321003222
+>2_18_697_F3,-369193.0
+T000020010310132312121111210202102321010003102
+>2_18_706_F3,-373364.0
+T031303322323202300212220100220102300202011100
+>2_18_773_F3
+T322003132013300232232312333010230212232012322
+>2_18_833_F3
+T213133313133311312232323223212322122332212222
+>2_18_881_F3
+T032011122031200120031320012000110001200012102
+>2_18_887_F3,-1294634.0
+T103101132002312233001303011210002200000010100
+>2_18_896_F3,-727155.0
+T122132230333212121223323010022000222223111232
+>2_18_970_F3
+T321231320032111320031121032313113211232022122
+>2_18_1204_F3,1579914.0
+T232032102121203122100221133122332211020211133
+>2_18_1261_F3,-236852.2
+T131320201010230133003032332000000201302210110
+>2_18_1264_F3
+T333310032013100330103032101030112301000033100
+>2_18_1283_F3
+T112123301031222131021110313030321001211012102
+>2_18_1493_F3,1887476.0
+T022000012231012021303323032021223312102011130
+>2_18_1568_F3
+T113001212022301220223220223012331212212112202
+>2_18_1788_F3
+T331113331133312322132321121232312122222212222
+>2_19_141_F3
+T320200100210322310120012023222002330322220222
+>2_19_260_F3
+T122300202121231010012300023103313202322020222
+>2_19_361_F3,-1752120.3
+T210303230330213321132213233222130321203322202
+>2_19_464_F3
+T333223002301202032112023033300222312022233222
+>2_19_595_F3
+T022103002101201033110002110002200332231113321
+>2_19_662_F3
+T332303120331221020202222133231310322212202222
+>2_19_666_F3
+T333323210332011023333300133003330032212322222
+>2_19_720_F3
+T001203302201200012301301032202210201302212220
+>2_19_752_F3
+T231313133132113332133222231232112222322231222
+>2_19_921_F3,97278.0
+T110211111331230002301300021032301231200020330
+>2_19_947_F3,-448190.1
+T223232303300322130200231021020213221000133301
+>2_19_988_F3
+T203222331221133200332333322300201202312020320
+>2_19_1064_F3
+T111123131310311331221312233322120002221011232
+>2_19_1109_F3,1203608.1
+T220020023010211003010122322021003112320213222
+>2_19_1185_F3
+T310103131213233230211121312032112021200012110
+>2_19_1191_F3,1432999.3
+T020203302032311103302010033030020010101113312
+>2_19_1255_F3
+T020202012200112213301033011102312233202300120
+>2_19_1622_F3
+T221100210013223320231233301000010100100001233
+>2_19_1656_F3
+T010122303120202101031322002012010220111111220
+>2_19_1682_F3
+T120133212021221011321112012111121211212112220
+>2_19_1868_F3
+T233013110032312322232313013012230123213012331
+>2_19_1970_F3
+T033102111031120332303122312222312222132212222
+>2_20_57_F3
+T230102102111300202313300022021212112332222222
+>2_20_74_F3,224123.1
+T000001110200032002330021212210322321112211221
+>2_20_187_F3,151509.1
+T320110032102111001002210010232302301110122322
+>2_20_239_F3,708874.2
+T023320131013312300322013130130322310332311222
+>2_20_295_F3,481300.2
+T103310223200330313220000232332331201332212222
+>2_20_349_F3
+T330332201020132103021102021212120222202202222
+>2_20_354_F3
+T231231133030133123031210211232211301232312222
+>2_20_400_F3,545612.0
+T101231310103233013121100110223021123212201312
+>2_20_416_F3
+T330013333123201210323221222222122233232322222
+>2_20_631_F3,1739499.2
+T010120100001330100012121211131310312123001230
+>2_20_714_F3
+T310213301033121310232322012120212122332212221
+>2_20_734_F3
+T320313201133210330212301233022333231302211222
+>2_20_783_F3,1646545.0
+T033201021000112200132110001110212333110120122
+>2_20_912_F3
+T330211100223201330023122323003320302300112322
+>2_20_1000_F3,-721404.3
+T312220230022112220000131213210320003210320321
+>2_20_1155_F3
+T230133100022310333002312213222330211210212212
+>2_20_1359_F3,-1328721.2
+T323021210122232200213101230103312321002211310
+>2_20_1408_F3,-1310926.3
+T223021003031211312330113112220310110100001232
+>2_20_1562_F3
+T001102031001333200031000013002131021202013322
+>2_20_1663_F3
+T000123021131323332312011001001031110311100223
+>2_20_1687_F3
+T333133233312212311322223113212222321232212222
+>2_20_1740_F3
+T213313111333313312122122332232312222222212222
+>2_21_195_F3
+T030132102111101232130320120113012220213210222
+>2_21_330_F3
+T213303130331201310312122131232112212212112222
+>2_21_474_F3
+T300012202111003122033232200211122223221232222
+>2_21_678_F3,-102240.0
+T201302221012022020002003331222022212310332200
+>2_21_687_F3
+T013123121321003000013110121212313313131231312
+>2_21_767_F3,-890350.1
+T020231103010321130312321233001133213202312122
+>2_21_955_F3
+T231033230012002331103230133022220331330312300
+>2_21_1135_F3
+T133130023322333120330100113112121121211010223
+>2_21_1325_F3,7856.1
+T022022222010030120323312032310020023220100120
+>2_21_1386_F3
+T300311322203120122023220100002330120112210222
+>2_21_1397_F3
+T220201231302203302203120230230101301001000122
+>2_21_1541_F3,656729.0
+T222211011031123211021323012211202110002100133
+>2_21_1625_F3,-1640957.2
+T021312310133022101011032200112330200310000313
+>2_22_183_F3,613617.3
+T011113302132213012130310002323010333122221222
+>2_22_428_F3,1893387.2
+T023212223322220121201203021320003332101302330
+>2_22_673_F3,759463.1
+T000003112323302010001131120131122312022210222
+>2_22_727_F3,-1290204.2
+T300120030113212221110332012001123223331222222
+>2_22_778_F3
+T131013112101323113132323313332111122210211201
+>2_22_903_F3
+T331020000011021311122022120222200202012221121
+>2_22_1032_F3,68579.0
+T023031321320100002312000320101330210033000102
+>2_22_1253_F3,1507416.2
+T123000020212101233321033330101112111311311200
+>2_22_1289_F3,353981.0
+T023031313321030101231110122012120212300113100
+>2_22_1300_F3,1798529.0
+T231212200302023211032202213110001130210020100
+>2_22_1314_F3,1907717.0
+T032320222101101202130121010122202003102113313
+>2_22_1377_F3,628045.1
+T000233133032102012032123230000202110230122220
+>2_22_1415_F3,-553050.0
+T233200220313301113210330320123010331213011220
+>2_22_1507_F3,899477.0
+T013020030321120030011313203320212330130013330
+>2_22_1512_F3
+T012102010031010031101101011010311030103101212
+>2_22_1558_F3,-49062.2
+T202103230000333203331002033002130311102003301
+>2_22_1570_F3,-1030692.0
+T111021230002010220333030201213300012020311212
+>2_22_1603_F3
+T310222101301023102123311102022220001111020322
+>2_22_1694_F3
+T213113311122333222121322322222322122232212222
+>2_22_1756_F3
+T313133211322302212111111331222212221332212222
+>2_22_1986_F3
+T002010201000210222222302110100000010030110321
+>2_23_139_F3,731256.0
+T023112320203132202123311323013210110312030202
+>2_23_214_F3,719486.0
+T202212231213223032031031011030313023131112322
+>2_23_466_F3,1867800.3
+T033323013321222032211113122200122322011330212
+>2_23_547_F3
+T302201103220011330102113122100201202212000220
+>2_23_616_F3
+T003121202112333200301010030022221000011210322
+>2_23_619_F3
+T000122002222013012232012020022221202012223222
+>2_23_695_F3
+T021102310323123222203102132031121101212312102
+>2_23_702_F3
+T130233311133202233021201021002210122212012222
+>2_23_747_F3,-916145.1
+T122202121002230020203132231322111223221133203
+>2_23_1356_F3
+T310121232102000000012122332022310111112233132
+>2_23_1365_F3
+T223000303111232022312313112112310333010101111
+>2_23_1496_F3
+T221312013003203300100013210000101111210212222
+>2_23_1517_F3,-837837.0
+T031020223302321133121320333311022110210112212
+>2_23_1523_F3
+T012110112111333113312103220121113211121121110
+>2_23_1742_F3
+T311133231333113232333323333212312322222211222
+>2_23_1745_F3
+T321112133132311313333321313222122122222212222
+>2_23_1760_F3
+T311222321021000313112110130100211001112011222
+>2_23_1783_F3
+T313312311312112212333331221222312223312212222
+>2_24_34_F3,-1926132.3
+T132210123231203201121022230332202231222212222
+>2_24_161_F3
+T330033333001332333230033310012212222202022222
+>2_24_320_F3,-901444.1
+T303011323121200012120130022113333323113011222
+>2_24_438_F3,1938595.1
+T001210122103303231212121001220110120122013201
+>2_24_544_F3,-1957478.0
+T003112200003012112310210031121010202211020230
+>2_24_930_F3
+T331313113113111112121323121222312221222212222
+>2_24_987_F3,1213761.0
+T201313202203231011001331321203122031210100333
+>2_24_1004_F3
+T032301230220201032211022201220130110212203220
+>2_24_1208_F3
+T330101233033110310203320113032330001300023122
+>2_24_1249_F3,1964903.0
+T311202030021002032002122332230021133130130111
+>2_24_1389_F3,-568554.1
+T323302013210010320203100101003320223303020110
+>2_24_1398_F3,-1148490.3
+T320301032122300002323302233232011302102000100
+>2_24_1407_F3,-1603549.3
+T001033203010011321030013013120221010230000132
+>2_24_1670_F3,-458921.0
+T201122013221323131201212332223210123221321232
+>2_24_1696_F3
+T233113111322332221331321113222122132222222222
+>2_24_1737_F3
+T311111123131313333333321333232112222332212222
+>2_24_1977_F3,-193418.2
+T030222210002313200022102303120220100302103322
+>2_25_148_F3,-67940.2
+T003102122322222102103130032222031223130001200
+>2_25_202_F3
+T130100120323110232013333012212122221222013322
+>2_25_246_F3
+T303000002223000221323321202231022223300232212
+>2_25_347_F3
+T322113122223203210021311031022120322133222222
+>2_25_365_F3
+T131022011130100201121322210010202010113022200
+>2_25_417_F3,687721.0
+T033003313210222313320002121133023101033200222
+>2_25_565_F3
+T333320113303232103221013103202333322132221302
+>2_25_634_F3
+T123300030333231110211022311022312122200211222
+>2_25_654_F3,-383408.0
+T022220012001000130233130110032202110303033120
+>2_25_951_F3,389324.1
+T220001230002130122133022312132312210201203323
+>2_25_1075_F3,-1378002.2
+T001121210303021301133221210012310110201221223
+>2_25_1128_F3,-1203223.1
+T122122020200003321301123320003013132220013122
+>2_25_1229_F3,1778931.0
+T003321101102012330131133230312021231303231222
+>2_25_1236_F3
+T331001313022330323333210321120110221320031230
+>2_25_1478_F3
+T301200101012031220002102023030210000303022332
+>2_25_1601_F3,-1846817.3
+T321122012200221020222331221021310221112200322
+>2_25_1608_F3
+T123003013020212103303311133112121122112111322
+>2_25_1748_F3
+T313112313112113212132322232221112122232312222
+>2_25_1764_F3
+T321031313133312212111122033322122222212222222
+>2_25_2029_F3
+T322133333332323321123122322322112221232212222
+>2_26_80_F3
+T020201212102210013231202232231132222112211222
+>2_26_270_F3
+T113101310032312313213021030011112222112112222
+>2_26_396_F3
+T210210213222222322210221023000012121001230222
+>2_26_429_F3
+T001103320332020122201223020322230312102013330
+>2_26_734_F3
+T000100020233020310022031212202010321102212320
+>2_26_854_F3
+T332222101323221231001310333320213322202122222
+>2_26_923_F3
+T130323321233200332021210212100111003110012220
+>2_26_1053_F3
+T330002332002100200220002023202210220000100322
+>2_26_1132_F3
+T130132000203102310131100022010110132211012110
+>2_26_1137_F3,469876.1
+T212030011313101302202111032202313020130000223
+>2_26_1219_F3,251319.3
+T323030230213103212032121100200323311300000322
+>2_26_1297_F3,793434.3
+T312013300212012022001122333003301110313232220
+>2_26_1404_F3,-1155688.3
+T133231121002003213100323013100100001012010200
+>2_26_1433_F3,-380276.1
+T022221000021320213020022022301012033110211303
+>2_26_1665_F3
+T001202001301233230012000031103000210002210302
+>2_26_1674_F3,-562960.2
+T201010230001333122301002033023131212210021230
+>2_26_1740_F3
+T213112133313233122223221313222332221232331222
+>2_26_1802_F3
+T333101111122113331132122311222312222222312222
+>2_26_2004_F3
+T331113133221321212302331323313323221132232222
+>2_27_39_F3
+T311030003123230133322321123332331010100321320
+>2_27_68_F3
+T010110320311133213311121121100211220122311222
+>2_27_124_F3,-1902089.1
+T000132123020123120222022100221302113311123110
+>2_27_144_F3,167563.2
+T102100223313213102100222312213232122003311130
+>2_27_185_F3,-298336.2
+T002121010010113300123022110013002031020113222
+>2_27_229_F3,-657419.0
+T333031230013210101010031323222233002122300222
+>2_27_401_F3,-1283735.1
+T012003012333121020121321013200101120130323212
+>2_27_432_F3,1480921.1
+T011303303332021203301122220112230212102303221
+>2_27_532_F3
+T212223320313233110223113221133112211202212220
+>2_27_592_F3,-1719375.3
+T210022000023233102021322221002033210332100220
+>2_27_624_F3
+T031200303211331130233221210000110121310010320
+>2_27_768_F3
+T111013310212112313112333231032220322310311222
+>2_27_1008_F3
+T120323301333322232032323113322320321302212322
+>2_27_1124_F3,996346.1
+T320222311223322102330021121233021122302200112
+>2_27_1261_F3
+T130310221101000111022111112032110121202211222
+>2_27_1313_F3
+T320202111121110102130102000012000030122001210
+>2_27_1418_F3,-469148.0
+T332221122101300212303311213300201322222123321
+>2_27_1503_F3
+T230003202021030212002000231301300302000112120
+>2_27_1537_F3,923182.1
+T022303322312301311231222332121301121112010120
+>2_27_1544_F3,-939082.1
+T112302333012032330121000320113120001202112300
+>2_27_1566_F3,93300.0,23173.0
+T132213301003202012313201021003202132200310112
+>2_27_1631_F3,415570.0
+T212130001222020021200301311001000121102310332
+>2_27_1775_F3
+T321113331321331222333221211232122221232212222
+>2_28_101_F3
+T001222030332100022320330311022131302132210323
+>2_28_104_F3,1196921.2
+T330022213211101300010110311022230331122301222
+>2_28_176_F3,-1396265.1
+T033110113202030332001032203120110231022132222
+>2_28_281_F3,97669.2
+T031212003001100020012102202221022222112000222
+>2_28_314_F3,1733115.0
+T301221203120021221002332032300123201032022320
+>2_28_408_F3
+T023100130202220022003100212002212200102211222
+>2_28_600_F3
+T231133131011211231101321321102222131223211322
+>2_28_657_F3
+T011200012001000010033032111002101210012003220
+>2_28_666_F3,766032.2
+T222213013033330112200203333000000103223310123
+>2_28_699_F3
+T211230232103121222123120120300122232112312203
+>2_28_736_F3
+T310032320223022210121331112110211301101232330
+>2_28_896_F3
+T333313302123220230103323233110230113301031133
+>2_28_910_F3
+T313100131223123233123330322313133312132012300
+>2_28_938_F3
+T330103312100310121023220020010010221002000222
+>2_28_1145_F3
+T332003231212032223020212331231313033012210222
+>2_28_1447_F3
+T320103321123011130012120332002331012212112220
+>2_28_1531_F3,-164632.2
+T231030210323210230002101122000210121012111022
+>2_28_1586_F3
+T000210011320020002312233122031020310102300221
+>2_28_1660_F3
+T000301000001212000020133021000300203312031322
+>2_28_1757_F3
+T323000100032100322022220222000320000232032320
+>2_29_131_F3,-355600.0
+T221211101130331022321021031010021032000102300
+>2_29_157_F3
+T002001022311220312010330213302012120002210222
+>2_29_238_F3,225666.0
+T303011322333103130103122002332202333022210100
+>2_29_252_F3
+T131010002202013131202201110002030013300122332
+>2_29_553_F3,-353824.0
+T012113030101101020313233200222023133133101202
+>2_29_690_F3,1467195.0
+T123330131002133102212332022103301120102210120
+>2_29_695_F3,1689990.3
+T322322323123111202103020231002300011020100122
+>2_29_773_F3
+T333203301333222220321320133322312322212222222
+>2_29_900_F3,-1555853.3
+T231303333003110201220303202300110002320332220
+>2_29_1016_F3,47678.1
+T010221023103311011130120310323313230122101222
+>2_29_1221_F3,-360838.0
+T120030011213012111212110102002220012001020300
+>2_29_1236_F3,1636910.2
+T010321001022203301120220221010112021120011112
+>2_29_1502_F3
+T030000002001200010000000000000000101000000120
+>2_29_1605_F3
+T320113123201110123122101111111310322112201122
+>2_29_1613_F3,1087606.0
+T201003202322010100300301103112103123311231110
+>2_29_1615_F3
+T213201231121010220021001003112311113012021300
+>2_29_1735_F3
+T313113130312223132121321333222312321232212222
+>2_30_141_F3,-688328.1
+T221221130203012022012000002003120101132003322
+>2_30_203_F3,675179.1
+T120300323102331101222100212100132232312102222
+>2_30_341_F3
+T021331232303310322203020022021322120221322221
+>2_30_518_F3
+T313033133122320213210321022201222220112213232
+>2_30_731_F3,773453.1
+T000102112233210012120330012002000100112233220
+>2_30_788_F3,1677437.1
+T310112010010010232313010211112101320110123333
+>2_30_1078_F3,701007.1
+T020212321223012020203020133031010101233112221
+>2_30_1088_F3,-287936.1
+T220101232230323330121212132230221020222202320
+>2_30_1094_F3,1313139.0
+T022300033320201032020111322112101110332101231
+>2_30_1160_F3
+T331021020002202103221200210000030020300210302
+>2_30_1201_F3,-1537174.0
+T331320012031022312231312100310201322100300231
+>2_30_1322_F3
+T011131101121110001100022311210112020122201012
+>2_30_1431_F3
+T022123323021112211003121232212313302113222310
+>2_30_1473_F3,1331235.1
+T023312133013200200132321013123230301333013113
+>2_30_1556_F3
+T302001030012012030131302130200100130012011222
+>2_30_1589_F3
+T330130121302020032322230220002000221011310013
+>2_30_1644_F3
+T010130110231021030320100123002110101101001232
+>2_30_1647_F3,1606539.3
+T200112311232023121321130210302332121103313103
+>2_30_1747_F3
+T013122133321233331333323123222332323232312222
+>2_30_1802_F3
+T001300323332232302233323211213312113332232222
+>2_30_1964_F3
+T131302230133112233001222213210132122322122222
+>2_31_153_F3
+T003302100000223320000103001332312321012311222
+>2_31_220_F3,-1721041.1
+T120011210021223311221323000203100212312322222
+>2_31_277_F3,-1652498.1
+T210212331220110213220231322211200302331120322
+>2_31_294_F3,960332.3
+T032130210012223330002102221231011222132302322
+>2_31_374_F3
+T101302310321223122310102211302110022113002300
+>2_31_435_F3,421917.0,327876.0,88595.0,17798.0
+T013232011101001202002030021221211302112321322
+>2_31_444_F3
+T120313003220211220131221201322022220112012222
+>2_31_574_F3,732748.0
+T230131320011011030002323023130213321300010103
+>2_31_585_F3
+T201123100022203200203332000011233332100020123
+>2_31_671_F3,-1906397.0
+T210103322120122022210132311202210201003220122
+>2_31_811_F3,-1172501.1
+T011022002223121221100022003133113200113211232
+>2_31_846_F3,1360859.0
+T021332112333020200110013222312200221131001220
+>2_31_892_F3
+T330213130323110132101321333221313012101312220
+>2_31_974_F3,-955042.2
+T033000120330102110210231001111232021001010220
+>2_31_1056_F3,1019201.2
+T000312013212010211121202202202212223310130323
+>2_31_1085_F3
+T001122310020221033300010232000220020210002230
+>2_31_1129_F3,-1612979.1
+T233033303100323321032222001310012101203032320
+>2_31_1173_F3,1747035.0
+T322223031123233220322011001100011322001122312
+>2_31_1363_F3
+T020221231203102122011102302002000020302200100
+>2_31_1368_F3,-1034855.3
+T310012233010332231213222032202231022212000121
+>2_31_1696_F3
+T231313211323213212322223231222212123222212222
+>2_32_250_F3,1455309.1
+T322003212302031202221012213202220110100032133
+>2_32_389_F3,1211025.3
+T200212222233220103223200001300123222232312123
+>2_32_559_F3
+T223201231313213101012023031233113221212212222
+>2_32_700_F3
+T320213232012300100220300111320021221312230222
+>2_32_740_F3
+T230213100233211330101330022300123012302013221
+>2_32_747_F3
+T022021031230221230203301122232002120022303322
+>2_32_804_F3
+T110101103021031233111321011332110321202012222
+>2_32_887_F3
+T322033212123333233211221231230323333300322220
+>2_32_898_F3,1213685.3
+T321213301021210230201100003012230011102311300
+>2_32_987_F3
+T011211033201332011000020211000010222103311100
+>2_32_1008_F3
+T121013002200121132312320112000131231212112221
+>2_32_1066_F3,132268.0
+T302212102112020031321022230122301101112111222
+>2_32_1427_F3
+T123133310202203031333111231032300202200220032
+>2_32_1549_F3,-612143.0
+T002223123201200201323103031032022100303210202
+>2_32_1554_F3,1629994.1
+T302231030110312022131322132102233130011010122
+>2_32_1593_F3,-1712418.1
+T023120020002231212000320010020032200012000100
+>2_32_1685_F3
+T333133313333211112232223321222122123232221222
+>2_32_1759_F3
+T323121111113110313321322222220322222232212222
+>2_32_1765_F3,1683529.1
+T320211301023313110031120022213201201012203220
+>2_32_1788_F3
+T013112333132312212133332112222312223222212222
+>2_32_1805_F3
+T202320323130133300220300211313320213302333322
+>2_33_246_F3
+T232103120302103322201122031222212122122212222
+>2_33_291_F3,1943586.0
+T001131010013312200031212311001210321212112222
+>2_33_588_F3,2006372.2
+T122233110321121130120330310331133332122001200
+>2_33_872_F3,1816211.2
+T202220111031223320310322011220110300122001222
+>2_33_948_F3,1512878.1
+T122002001120313201022230110013120323002210300
+>2_33_997_F3
+T330123331323020310211222032110110031213211222
+>2_33_1140_F3
+T013203221002022100200323111321311123223211110
+>2_33_1331_F3
+T130113301211303231332321223023120202213022121
+>2_33_1420_F3,1114741.0
+T112200133203311222333102112012020103210100022
+>2_33_1612_F3
+T331211303323320023311032110233300301102010300
+>2_33_1616_F3
+T220123333332303002021032022010130200012022300
+>2_33_1658_F3
+T010332201201210132001133321102313330130311322
+>2_33_1934_F3,-1808343.2
+T320023000010332000220310322033210100201330132
+>2_33_1971_F3,1785111.1
+T120231202202031032301022310021231211332113320
+>2_34_107_F3
+T321022033222010211122230012102132121202202222
+>2_34_150_F3
+T120222133233323232112023030332122233232210222
+>2_34_269_F3,1815062.0,-2000834.2
+T112233020012212110021032001100003311123221101
+>2_34_296_F3
+T032101023321300300032112221211011222312102222
+>2_34_321_F3,772647.1
+T202312103011303120201123011020122301122000210
+>2_34_865_F3,73625.0
+T311012131023313120121132331003131221100120120
+>2_34_876_F3
+T001202312001332330021120212212001211220001220
+>2_34_906_F3,-1647138.3
+T331100102003100232230133103032310210102013300
+>2_34_1127_F3,-188355.1
+T233332213202011210002131323023220220002223322
+>2_34_1143_F3,-1050655.2
+T010320210002323000200321113322111213110011100
+>2_34_1308_F3
+T300010111333221021202302323330323031310110010
+>2_34_1353_F3
+T000210210212323101322000213002200101012310222
+>2_34_1431_F3
+T231213321032330022113121123012330301321322220
+>2_34_1457_F3,-1708135.2
+T001000312230100212120211200101021000310101222
+>2_34_1468_F3,-1663177.3
+T220120232101003101003333000000300312011023301
+>2_34_1542_F3,-1288882.3
+T210000112132131311022013203011200231211322222
+>2_34_1579_F3,-1544975.0
+T012001013012300123132120321331202031103111200
+>2_34_1633_F3
+T213302020122220202232222222222222222222222222
+>2_34_1703_F3
+T313113311113313232122321211232312323132312222
+>2_34_1793_F3
+T233111111132213332122322113222112222232212222
+>2_34_2032_F3,-1063420.2
+T311212133132101312303011013301011111111111212
+>2_35_54_F3
+T230112113121111212131211112112212221112112222
+>2_35_129_F3
+T321310120132313322321121111222311111112212222
+>2_35_135_F3
+T013220323300223213210222231212312223112211222
+>2_35_183_F3,163273.0
+T300310012123011222211111022310103322301220222
+>2_35_694_F3
+T323223330223211312013321332333330122222212222
+>2_35_801_F3,-1018302.3
+T110101101021031233110021010030112311110011311
+>2_35_1282_F3,949191.0
+T002322013010233332010101212103210213032110332
+>2_35_1481_F3,-1893137.1
+T023130130202301301323001201220112102022002300
+>2_35_1557_F3,337503.2
+T330030110212101100030102312011323113312132221
+>2_35_1638_F3
+T321102102122311230201331321032320322232222222
+>2_35_1692_F3
+T233111111122232232321323113222312322322312222
+>2_35_1735_F3
+T331132111333312332321121223231312222112222222
+>2_35_1746_F3
+T032123132301213310312030210010330303122011122
+>2_35_1927_F3
+T013123111021112312133232132222312121222212222
+>2_35_1959_F3,-983136.3
+T112120132200301130211202112220110001102211222
+>2_35_1984_F3,-471911.0
+T233100320011222203202031220232002303330023310
+>2_36_60_F3
+T012113221023210033113321012300313232122321222
+>2_36_118_F3
+T331202133033121211221323131222313123212222222
+>2_36_326_F3,-702330.0
+T002000232323011021202003301322202132011200213
+>2_36_345_F3,1432143.3
+T001023033010313011012313100321102021112210222
+>2_36_487_F3,-192137.2
+T200331122210000132221313120331020110332202322
+>2_36_627_F3,1225560.2
+T300010103202022103001223330203101120202100302
+>2_36_631_F3,1968065.0
+T210202113212010211132231111210121121103023100
+>2_36_743_F3,1921770.0
+T233331000231201330002102102130002132320103221
+>2_36_885_F3,1939660.0
+T320123230121011230101021231221303303032302300
+>2_36_909_F3
+T332323113303201232201303003332210201213011222
+>2_36_1132_F3
+T130333021303223000231300010220122231302210222
+>2_36_1162_F3,-1013429.1
+T202322301010123001330202200100033011001203222
+>2_36_1275_F3,271802.3
+T223022332131001130033000123133023033002020300
+>2_36_1403_F3,-1244995.1
+T012203013000123012123311022020230221211010222
+>2_36_1572_F3
+T220212102003221310211311013020200211100013300
+>2_36_1598_F3,-514874.1
+T120102101003222233012012101133023230021321233
+>2_36_1654_F3
+T001123123003003332000003300113231231011320333
+>2_36_1668_F3
+T330313023122211202001332010001021033110102100
+>2_36_1804_F3
+T201123101232013300113103311011000103112111110
+>2_36_1807_F3,64073.2
+T331300201210223120111100110231300203312112020
+>2_37_233_F3,306970.3
+T322222230202233130101012032203322121212132222
+>2_37_309_F3
+T011201120321122202100131201233113330112211222
+>2_37_377_F3
+T301002323112321230201103011233210221102212222
+>2_37_411_F3,649341.0
+T310121330000212000232120110212000210010202113
+>2_37_534_F3,-274783.0
+T000213203233333203131321001022110223002200320
+>2_37_593_F3
+T320211120033023333033322312200331223202321322
+>2_37_748_F3,521482.2
+T312220311001213101123200322000101121012311222
+>2_37_926_F3,158423.2
+T300120000103330203102311122102200201000000222
+>2_37_942_F3,-754202.0
+T030010122011333222313322032101201113021230103
+>2_37_1073_F3,31528.0
+T311023021121021001330103211312010031122201222
+>2_37_1110_F3,-1491008.0
+T201000033323201103210011111201223100122120113
+>2_37_1169_F3
+T200313331023033130332311031130310322201222231
+>2_37_1175_F3
+T200101212121122102002012010100010001102031110
+>2_37_1309_F3
+T000132313312122320201200013302130031110310220
+>2_37_1448_F3
+T330302100121000022232220020023112300233111330
+>2_37_1537_F3,1803710.3
+T000121132103332100302211201000001120013322211
+>2_37_1595_F3
+T022120131203222023020103123032032221231312132
+>2_38_100_F3,1218347.0
+T013132133101023221233233133112103101221110330
+>2_38_217_F3,-274116.1
+T332211221321020121313122232020212132222322222
+>2_38_252_F3
+T221001302121232022022022221222222220212011221
+>2_38_321_F3
+T002101100021302122210003011222112221122210222
+>2_38_391_F3,-1215434.0
+T232231033321032220130200223203021100213321222
+>2_38_418_F3,927607.0
+T010310032011020210331013212202301213133221300
+>2_38_430_F3
+T232021102121021110101201112332111313111212220
+>2_38_545_F3
+T022300131000010032121202030122022213212030202
+>2_38_550_F3
+T232313123323011323301121322222111303102312100
+>2_38_681_F3,652716.3
+T110303212211011200222031220122130201101223200
+>2_38_812_F3
+T231213032230222022201223212200000202022033222
+>2_38_867_F3
+T312313122121203123321233223013110211102222221
+>2_38_996_F3
+T121213123112102313103330232100120232202112221
+>2_38_1013_F3,1334707.2
+T021032222000020313331103022033011020132301121
+>2_38_1213_F3,1073325.1
+T012230323022131031330210000221020300032220233
+>2_38_1221_F3,-1414976.2
+T012033002202233302121332123222222202010033323
+>2_38_1345_F3,-1868690.1
+T003102123010203033002303233013202330013121132
+>2_38_1378_F3,-46038.0
+T012021201120123011200131230133303100131001310
+>2_38_1662_F3,-72197.2
+T023311302312301111022023231300221101020011120
+>2_38_1785_F3
+T212212000012213311130102212000310222102211222
+>2_39_42_F3
+T033100113303213232312222310222212222222221222
+>2_39_193_F3,-283503.0
+T113311310121031102312030121030112310102221101
+>2_39_227_F3,-1664949.3
+T011031010322330012001210010201030311102021222
+>2_39_372_F3,-58450.3
+T113023202112033322033321330322102201212210222
+>2_39_483_F3,-511837.1
+T210003303002200101212121301302132230002121221
+>2_39_699_F3
+T132032313231121313131300212001113102313312322
+>2_39_836_F3,-355833.1
+T031331101313210220200311201321020331113223300
+>2_39_873_F3
+T203110101311303332222331013330111102201112211
+>2_39_966_F3
+T310121123323310133313330121101213023322212222
+>2_39_971_F3
+T313111300121000313011102222113011311111111312
+>2_39_1101_F3,965539.3
+T130120121233213303210101322022013230000111111
+>2_39_1245_F3
+T010103021301220100020001010110110121112210222
+>2_39_1272_F3,1085078.1
+T023301321333213303223122211200001200222011220
+>2_39_1395_F3,-774508.1
+T110122101113013312332302002002120011202312222
+>2_39_1428_F3,-1552075.0
+T000133022122111211030110211033031123202320221
+>2_39_1469_F3,1499620.1
+T123323010233210220101011010322330222021200122
+>2_39_1481_F3,-141912.1
+T020202011210122102332303230000011010103113322
+>2_39_1520_F3
+T000000012110210101022010031100010101110210321
+>2_39_1674_F3
+T311102122211331113000011111002312130111021012
+>2_39_1757_F3
+T313133113321132112323131132220132222322212222
+>2_40_34_F3,1768515.1
+T310212103303010201230031012132021311222221222
+>2_40_53_F3,-823139.2
+T210102030212131022112011012311012231333103211
+>2_40_151_F3
+T300221201011023302221222122221112211112212222
+>2_40_177_F3,35183.2
+T210312022031110221000230311011000211122321223
+>2_40_220_F3,124031.2
+T010002323202110213010222012301230311230321222
+>2_40_282_F3,1848077.1
+T202000212103002022301003200332010001021313311
+>2_40_466_F3
+T231100301112222200003332130211332132222311223
+>2_40_525_F3,-216251.3
+T012121320022121011321001312231302222132211223
+>2_40_668_F3
+T112211321012012023121222210121122331320113101
+>2_40_690_F3,419167.0
+T001032001133201212100112200032330201200032201
+>2_40_728_F3,279164.0
+T002311111022330220331130000310133301321021223
+>2_40_828_F3
+T102022010310000110123300110002110001000012220
+>2_40_1151_F3
+T310212320333022233203200212232313002232033230
+>2_40_1313_F3,181346.1
+T331212330031120312231113010130100000113202101
+>2_40_1354_F3,1336299.3
+T330000210122120201012122031001010012203202100
+>2_40_1370_F3
+T010322032023223320130101011033010121112211222
+>2_40_1552_F3
+T230003111212212112023223133123112311233021130
+>2_40_1618_F3,-1818620.3
+T003123123101303120010112021301123111032120322
+>2_40_1646_F3,-1579951.1
+T220112233221321122001221302121201230231111213
+>2_40_1652_F3
+T031122103102113231012310233113132221300223223
+>2_40_1659_F3
+T011000202011002122121032310020230221113011310
+>2_40_1741_F3
+T223103131212321232322223113222222221232312222
+>2_41_109_F3,-1620493.2
+T303111320033133002323122101223211013023100220
+>2_41_223_F3
+T010101013220111123011200113311123301122311230
+>2_41_411_F3,1718372.1,1356888.1,-1671474.1,-1471842.1,-972534.1
+T133120011133120012330113022202230001120211122
+>2_41_419_F3
+T000302231021130221221113222220213223133222322
+>2_41_596_F3,168753.2
+T320101122031012300031020312023230223032300323
+>2_41_618_F3,1922237.0
+T020212003101111230301200002312200211132032222
+>2_41_907_F3
+T330123300223223332323333133232310222213322322
+>2_41_913_F3,1345020.0
+T022113301202302312203302033210333220222230120
+>2_41_923_F3,-1219011.1
+T120220232220120032202100001323200031102101120
+>2_41_1033_F3
+T332301101332333332231333032110211122322022220
+>2_41_1105_F3,-875791.3
+T001230021101311103311102321322212003121130323
+>2_41_1190_F3
+T311323133232321213321122322222321102302212222
+>2_41_1327_F3,1859754.3
+T202022313201030230321010220200221030313123233
+>2_41_1558_F3,-386623.0
+T322131000001013022101011010100101002210002223
+>2_41_1607_F3,-1286925.0
+T300110232222103332310123110330101120013010332
+>2_41_1678_F3,-589443.1
+T000232231231302202212122321020211230200021113
+>2_41_1686_F3
+T313332311332212111112132323212222222232221222
+>2_41_1969_F3,458424.2
+T000201022231100032310012330333121121003003002
+>2_42_55_F3,-13543.2
+T002130010203321302231021113212212322323002201
+>2_42_113_F3,-101004.0
+T211113320222233012330032322000202232332132222
+>2_42_124_F3,-250693.0
+T232122202201033233032310021122013203211102312
+>2_42_198_F3,-1175811.0
+T032202312011200210032032000031012000212032122
+>2_42_274_F3,-408497.0
+T032112312212020202012003303221120003310021212
+>2_42_316_F3
+T210300020222031200203101300202023003000220322
+>2_42_339_F3,-1671137.0
+T133011323132121222300023011101101001312313223
+>2_42_628_F3,830476.1
+T303013202221330002131012300133003332230003223
+>2_42_977_F3,1809909.0
+T303322113223102121011333310020321021211013221
+>2_42_1024_F3
+T121213203033232112211211321110213102300212201
+>2_42_1344_F3,511039.2
+T023102332022020321021112223110023111012211330
+>2_42_1407_F3,1761811.1
+T232333000022212310022301010133121331111000110
+>2_42_1410_F3
+T230332133023123210023000002230111330112000222
+>2_42_1485_F3,1734125.0
+T200230013200032100032212112333213202130003301
+>2_42_1507_F3,-1919312.3
+T331032223213112202233020333110211001210212222
+>2_42_1540_F3
+T120303122200031133200003212131202202222023220
+>2_43_72_F3,79300.2
+T001312230000112111333302313013323010102312102
+>2_43_264_F3
+T332203103011012002000231312312220110220203322
+>2_43_293_F3
+T021313221332311002111221122212322122212212222
+>2_43_395_F3,-1340648.0
+T000201011000113131032301003011021121112200122
+>2_43_450_F3,-1226909.1
+T331103301200101010213132230203322131001222222
+>2_43_723_F3,429029.0
+T032130200200202003211302231202210023310232122
+>2_43_983_F3,1549207.2
+T102120222200023330121230120221123031210113201
+>2_43_1097_F3,1691339.2
+T322032322332101020001020011101011020202112130
+>2_43_1133_F3
+T010123320220210333221033100030010021102011120
+>2_43_1137_F3,-759261.3
+T302321200120202123323203300003020003322113320
+>2_43_1165_F3,-882677.2
+T112200201212002233222202132332200200101010111
+>2_43_1308_F3,-189366.0
+T113322332201211032112022301310132031222300210
+>2_43_1320_F3,1375600.0
+T200212201211232203202130132012300103202201133
+>2_43_1480_F3
+T020120320233231202000013101020031030200010300
+>2_43_1525_F3,1786638.3
+T133113210300010310301033112001112231312010330
+>2_43_1531_F3,-236379.1
+T313302211202000310212222100212132322231200132
+>2_43_1964_F3
+T330020213100231212203322023032113323221000302
+>2_44_129_F3
+T201212023232221321102001222131002232212110220
+>2_44_183_F3,-1277514.3
+T213213203130210130213211112001112221222102222
+>2_44_247_F3,-1206830.2
+T012201121123201123031103321230313101123211322
+>2_44_326_F3,1360293.2
+T223012031021002221002200322210221003230022333
+>2_44_357_F3
+T322220031123220200301323313320000100001302230
+>2_44_521_F3,-1838630.3
+T022212011213210012002313312202333010332023320
+>2_44_595_F3
+T020210102111320123020220000023100222022200131
+>2_44_645_F3,-1010603.0
+T310122301202032010101123011200031133201310133
+>2_44_652_F3,1290196.0
+T003002100021230101133230213110132221202010330
+>2_44_667_F3,1952609.0
+T202213022010130203302302220130213332122321122
+>2_44_720_F3,-52755.3
+T312212112212012133022212022220330320300023222
+>2_44_754_F3
+T032033302113202210023303122031223213200112222
+>2_44_1037_F3
+T312323213233132333013332133231121131212312211
+>2_44_1382_F3,-277061.0
+T103130220221120321203000012213221323223303203
+>2_44_1605_F3,1592475.1
+T113232011310020332301220111112101301211012231
+>2_44_1653_F3,-1318454.1
+T232122113100212232012322231331333210332013330
+>2_44_1658_F3
+T031110113031202222122322131333312121132223222
+>2_44_1971_F3
+T230200000122100002001112001023121231011112231
+>2_45_48_F3,-803866.0
+T110000221202230201112132321322020221213312211
+>2_45_229_F3,-683883.2
+T330213322120210211310322022022223333120312203
+>2_45_269_F3
+T130220211121032012213213200230102221112110221
+>2_45_633_F3,1796155.3
+T023233233233121230130002232113032203002033223
+>2_45_958_F3,-1761896.0
+T233212000120200103221010012011023221010133210
+>2_45_1005_F3
+T033103101012000233312330232213323101200011310
+>2_45_1009_F3
+T033220012112200222222120000002220020322220122
+>2_45_1053_F3,-1216028.0
+T002233023121210022111212101001222101321000322
+>2_45_1152_F3,1729006.1
+T012102100321123221200203011111310321120333200
+>2_45_1217_F3
+T010112303122110122000100022230211122102011222
+>2_45_1222_F3,598712.1
+T132010103313301010021120101102330021210012301
+>2_45_1562_F3,-250170.3
+T331010012132022120120011220112011130112002222
+>2_45_1746_F3
+T313133113031110331131323113222312132222222222
+>2_45_1781_F3
+T313112102011113222233233122220321111213112212
+>2_46_170_F3
+T110033113111021212122111111232212221112211222
+>2_46_280_F3
+T210231022302001011312231310323220232222122322
+>2_46_316_F3,-1773731.2
+T233311131330220121220311023121013033203011322
+>2_46_567_F3
+T031221112031032232311100313012332221322012220
+>2_46_748_F3,-953985.1
+T002301033022113131002112120011220110231000220
+>2_46_771_F3,-1864420.0
+T023012131010233010011001102013110031303100313
+>2_46_898_F3
+T000201021011220020010203130232330211022211120
+>2_46_927_F3,229713.2
+T211130311302003303202212310132100220330010122
+>2_46_935_F3
+T113323231212132331123303220032330202221222232
+>2_46_1269_F3
+T230011332321301330013212011130223301212012322
+>2_46_1287_F3
+T201123133023330102203322123000111312110020100
+>2_46_1331_F3
+T301201200230013013203200212100120301210210222
+>2_46_1356_F3
+T002012100130222010201130323113210312210000201
+>2_46_1454_F3,409701.3
+T220122010102223032112020330201120323113203110
+>2_46_1483_F3,-383378.2
+T300110303033020020330000202013201012233003300
+>2_46_1536_F3,181313.2
+T201301013112110223210103212330111101212010121
+>2_46_1566_F3,-1340723.3
+T220102220202210130030312002302020102000212223
+>2_46_1661_F3,1789025.0
+T003120121031202022132310102233011112131031102
+>2_47_109_F3
+T301112010222203033220002121111332112320033221
+>2_47_241_F3
+T112203313122000303313000213213132132302212323
+>2_47_321_F3,-1745451.0
+T203210103210000121230210132231000022032300221
+>2_47_329_F3,301857.1
+T331202231221222230000220132012132301330022202
+>2_47_548_F3,724928.1
+T212023032302001230033010233110210013000030102
+>2_47_657_F3,-1067251.3
+T022000022021231213321000133002121320100123100
+>2_47_675_F3,-1323026.2
+T223231102123133230310332102230100302002210223
+>2_47_834_F3
+T330032300032300310211203022310233231221322222
+>2_47_877_F3
+T132333321212330113311223123333220321232211222
+>2_47_1206_F3,-225624.0
+T222113230122322312133222320133022003223002111
+>2_47_1250_F3,1084974.1
+T311330111202231132211210133330013213203222232
+>2_47_1294_F3,-2005757.0
+T111212003322132123330000333011232333321220223
+>2_47_1303_F3
+T021311203032202220320101020302101002011013313
+>2_47_1322_F3
+T210013202201310000001030020000000000202010322
+>2_47_1430_F3,1979216.0
+T333010133323131223233231332300312033132132210
+>2_47_1501_F3,-1851931.1
+T321003013312002010210101102302310123013231221
+>2_47_1591_F3
+T322022303213011231203301233200200121302222222
+>2_47_1610_F3,750137.0
+T330203133120212101102032110001012102002020122
+>2_47_1614_F3
+T330231200332223021321333223002212210123000122
+>2_47_1647_F3,272327.1
+T202102312102210200302210111002220302203321103
+>2_47_1944_F3
+T333133111313313213132311221312332222332232222
+>2_48_69_F3
+T202112321220202103132332130020200123332102321
+>2_48_112_F3,1870738.3
+T203332102133233002021222211222311100223102222
+>2_48_126_F3
+T020112112322220221123100022222211202212112222
+>2_48_225_F3,415813.1
+T333110031020230103022003323002100010100012120
+>2_48_291_F3,348441.0
+T100200110013020003211200330101112101222210222
+>2_48_301_F3,186220.2
+T112103123112000133221103230022212132120222211
+>2_48_370_F3,1286926.1
+T033002121303000332020012002311132200220232222
+>2_48_376_F3
+T333122122110200112202102001313113301130011222
+>2_48_507_F3,-1091524.1
+T330202010200032302210101200120013023232202221
+>2_48_543_F3,-158425.2
+T000203323332213223322222302221310222200211313
+>2_48_636_F3
+T100333330312211031000332133012233313232233222
+>2_48_902_F3,1149946.1
+T002031233100212100101021231332302211213110300
+>2_48_905_F3
+T010031321131212330111321121222322131302032120
+>2_48_914_F3,-47478.3
+T323010003102310323311221121231122103222202211
+>2_48_1081_F3,-642402.0
+T022223331200303123101013233013202010201033112
+>2_48_1122_F3,-1915393.0
+T101332301311120110002111030113201001221312331
+>2_48_1146_F3,-1983646.3
+T221002132111302312000310320101111121111111202
+>2_48_1219_F3
+T232300332333112112020200003200332003230022120
+>2_48_1236_F3,-51997.0
+T223001201321123302213223231003300323033110223
+>2_48_1463_F3,-705141.3
+T331231233000031231203201011130110221202010222
+>2_48_1491_F3,1642395.0
+T033321210120000031303310012001033011321310110
+>2_48_1496_F3,1343553.1
+T201000211300103310200310131230221300200031101
+>2_48_1621_F3
+T332023101033011330002101112212112131312322100
+>2_48_1978_F3,-712551.0
+T221321311321230211002022113000202201331010321
+>2_49_82_F3
+T332230103303131021012122333203311232122222303
+>2_49_87_F3,-995256.0
+T012211231121100222002132130113001330223320200
+>2_49_122_F3,1352301.1
+T000112110312333202322101213222311322110121322
+>2_49_520_F3,-1770990.0
+T201003000001203100233132213022331010102000221
+>2_49_581_F3,-1275753.1
+T000330312023123012232120130313120212131123121
+>2_49_663_F3,476986.3
+T303010332120200322010200100323202310110120121
+>2_49_998_F3,-137481.0
+T012021102132203102302210320323001111110101201
+>2_49_1033_F3
+T331213200223112333233331021300212332311012200
+>2_49_1098_F3
+T310113133032120312001311022220210122210112230
+>2_49_1150_F3,1390112.1
+T021332202132000132000023320132123110311003200
+>2_49_1168_F3
+T011012001102221311123002123022220220202212322
+>2_49_1279_F3,882479.3
+T201022101123113303210232113021000012000111001
+>2_49_1346_F3,-1434859.0
+T112200021302020003310020323020102011012310022
+>2_49_1364_F3,-226508.0
+T303332323112323032210101103001120132231202222
+>2_49_1372_F3,212044.0
+T022211320003233232031222311002112321322103210
+>2_49_1584_F3
+T120302203223022112200122320232100321102211112
+>2_50_59_F3
+T000112220300202012301231201132213012112221222
+>2_50_66_F3
+T300103121321232332132122030000221021112210222
+>2_50_235_F3
+T000100332021112332120221112212022322122301222
+>2_50_243_F3,1782220.3
+T033221320113002000211202021110000122002012323
+>2_50_249_F3,574478.2
+T001121222103302121231302223302212132122231203
+>2_50_266_F3,-514336.3
+T011132211322100111031321021033212131102321223
+>2_50_340_F3,-866425.1
+T331010011010212313202031031101122021200110332
+>2_50_350_F3,-184317.0
+T003103311230331221031200032100032122123210121
+>2_50_501_F3,-1512450.2
+T002002220213220030102112010300132320320000220
+>2_50_588_F3
+T210203010311003111023121033002332111102013320
+>2_50_592_F3
+T210023121011303310233133231020310011202221222
+>2_50_753_F3
+T000100002021220202022022023011301300112001222
+>2_50_796_F3
+T031133223131102200021010010320131232112200230
+>2_50_962_F3,1516474.1
+T130131212002103301103330021312131101300013111
+>2_50_976_F3
+T131311100132000113312200111011110311222012231
+>2_50_985_F3,-1951744.0
+T003132033212122221212301033203012330212002220
+>2_50_1038_F3,1861073.0
+T322003202033212310113303201101021210310003100
+>2_50_1117_F3
+T103332321023001113022300011213211321202112101
+>2_50_1255_F3
+T100100000002200030001002033000020301002010320
+>2_50_1329_F3,101620.2
+T000012210230222032103002222000010101010220112
+>2_50_1333_F3,1555634.1
+T201032013210313013221121013030121313001200101
+>2_50_1423_F3
+T302202030033210311233202211332332212012122122
+>2_50_1504_F3,1110010.1
+T023121223000211020202011023021320220232233302
+>2_50_1644_F3
+T210331202201220121022110112002130031122310222
+>2_50_1656_F3,-336932.1
+T312332323003122130110233323102220020200003330
+>2_51_295_F3
+T200111111000113203210031201322212222122212222
+>2_51_463_F3,-1409099.0
+T020231212301320033012132300330312331032313313
+>2_51_510_F3,1224649.0
+T122212021011300300010320012132302311032212221
+>2_51_599_F3,-1898786.1
+T023133102311022323021012112200211002122111220
+>2_51_634_F3,1957561.2
+T000013310212211030000332103112200313112213232
+>2_51_641_F3,1464302.0,1425278.0,967463.0,-665396.3
+T232330300223133020033302131111001312303033300
+>2_51_688_F3,-1959600.0
+T010221033031103202332223232101033201332103332
+>2_51_767_F3,-57334.2
+T230311200220220210232101133001302311231101022
+>2_51_1221_F3
+T321100020211210011320102013232331001130012220
+>2_51_1259_F3
+T313001203002321023113002000010300003103010130
+>2_51_1432_F3,-370537.0
+T330230101302110221101231131310021022223011200
+>2_51_1611_F3,1708662.1
+T112200001331221301001300022212320132222200123
+>2_52_21_F3
+T212231021321001231212022130233232132012010222
+>2_52_163_F3,-1244772.2
+T223201213110123000310323311220302111232112222
+>2_52_188_F3,1435694.2
+T020103210221100220122002222201203332102221320
+>2_52_202_F3,-1169471.1
+T312312332221122200222123123202130200002010102
+>2_52_228_F3
+T210122211133221030010102223002211222112112222
+>2_52_345_F3
+T310103100133230222201213032100121221112212222
+>2_52_564_F3,1200704.1
+T211012202233301122032231231102222213212221321
+>2_52_813_F3
+T033323001213323213031132023120333223312212222
+>2_52_982_F3
+T002000300002103300322300021003010302332000330
+>2_52_1020_F3,1337277.0
+T221023010311013210220300023312301322031012301
+>2_52_1309_F3
+T200211002233130132123300223000301002002030120
+>2_52_1387_F3
+T230023100013120222021201121130210002210012222
+>2_52_1438_F3,1186749.3
+T022201300213201123120000110222122121312310222
+>2_52_1443_F3,1308727.1
+T310221022220103022231213330020022130122310230
+>2_52_1448_F3,192638.3
+T201230133322210102222010322210022022202022220
+>2_52_1469_F3,1239786.2
+T220312312033033323111200122001000212202122320
+>2_52_1510_F3,-1446750.1
+T330011030300223300220023302202230122223332130
+>2_52_1689_F3
+T332112321132333123331322202222012231332211222
+>2_52_1782_F3,86449.1
+T312121201212120210131000322132220211231211222
+>2_52_1935_F3
+T011133133012312112333321131212332222232212222
+>2_52_1996_F3
+T320122132332221333221330021130112122202222222
+>2_53_80_F3,-1728355.3
+T033120211021102301003123212222332131022121221
+>2_53_306_F3,1299669.1
+T211233021321102123233233200002121310233213222
+>2_53_500_F3
+T012202202013220000102012010332300223222020220
+>2_53_584_F3,1677101.1
+T010320000212230222301122022301100303302213111
+>2_53_637_F3,-1161818.2
+T010030320321212121003110213120033232131011200
+>2_53_672_F3,-87043.1
+T003131022131330132201132030021222201103000222
+>2_53_701_F3
+T331003320331020110002132122221110112202212222
+>2_53_955_F3
+T212010223203320213333300011321222122232112322
+>2_53_1194_F3
+T232313312131021330012111031010210002232311222
+>2_53_1306_F3,426121.1,92799.1,22002.1
+T200211032233130132123320323100300002200030322
+>2_53_1344_F3
+T022313311002010031121013013120022101012002120
+>2_53_1478_F3,-1696139.1
+T000032332303320101002000222310031101123231222
+>2_53_1535_F3
+T212101302223323310323322033233212203222332222
+>2_53_1741_F3
+T233113331121113332323321211222312321232212222
+>2_53_1776_F3
+T311121111033113212312222332222312221122212222
+>2_53_1939_F3
+T000103202012220200030310011002110112302011122
+>2_54_119_F3
+T020113122222220322022322013022222111222222222
+>2_54_149_F3
+T302313111302320101013332120023112122222212222
+>2_54_166_F3
+T321123003111222022113110013212312020111212200
+>2_54_176_F3,-77369.1
+T021311210300022202002120101230302312330121302
+>2_54_190_F3
+T300202212221132201122102132122322222332220220
+>2_54_222_F3,1865633.0
+T120201232301231121322011220102331031301001211
+>2_54_368_F3,-536150.1
+T213333201301033222310002010313103211113212211
+>2_54_786_F3,-1438761.1
+T111320332012001123022101332301102221122211201
+>2_54_874_F3,-1075048.3
+T032120331023311021203321100222022220222200212
+>2_54_1082_F3,436651.1
+T113021213101332202213322022101200010322012220
+>2_54_1139_F3,-1680375.1
+T030300002130121132233021100301230201101020120
+>2_54_1268_F3
+T323013311233003223132312032030110101202011312
+>2_54_1608_F3
+T113000103131032303111301222211321102220011212
+>2_55_60_F3,-1768721.1
+T102023100310110322201102333221131001112310221
+>2_55_65_F3
+T322111201232213322131021220022132221312212222
+>2_55_297_F3,1283855.1
+T001232202211113133213000022222200220000212222
+>2_55_399_F3,-260328.2,-260148.0
+T233202032032200220320223202013001110100112123
+>2_55_406_F3,-1164501.2
+T000232211130121133211332123013132312232000220
+>2_55_457_F3,251342.2
+T210023032330333310332310330210022000022233223
+>2_55_464_F3,1950906.1
+T213220202003311230110130133200103120133201332
+>2_55_507_F3,-722347.3
+T330231200213033000033001222010132333102030332
+>2_55_573_F3,-1554327.0
+T032201221121220302003002310210010202122222220
+>2_55_629_F3,-1972984.0
+T023112300011020320030301122203121322230032211
+>2_55_644_F3,1644509.0,1279780.0,-1054059.0,-520083.0
+T021010210100110233133321011233121001312320220
+>2_55_894_F3
+T213333223323012332001300333010320321302012232
+>2_55_932_F3,-1742418.1
+T010332033211330031301010132211012322100021232
+>2_55_1008_F3,857699.0
+T031211312000113031320311210112001030020121112
+>2_55_1035_F3,1054861.2,520885.2,-1278978.2
+T000012212222003120302211123010220132212221222
+>2_55_1053_F3,-479867.2
+T223122233000022300302222000332220132023320100
+>2_55_1094_F3
+T332213322133303313031203133003120331220012110
+>2_55_1248_F3,423260.0,329219.0,89938.0,19141.0
+T323100023323013020321323303322332001303120322
+>2_55_1336_F3
+T010003101222120002022111012010111001112000310
+>2_55_1415_F3
+T312013212030102220202202122222222222222222122
+>2_55_1504_F3,-1748707.2
+T221323122112011302222001123010012300313322112
+>2_55_1613_F3,1425202.2,967387.2
+T202333012320123311120303233002202033121210301
+>2_55_1678_F3
+T233133333133333222133211133212322222232222222
+>2_55_1685_F3
+T331132111131332122221331223212222222221222222
+>2_56_73_F3
+T011023111330031210013112301111113201111111222
+>2_56_92_F3,1482392.2
+T110030322230303331023310112002013221133011331
+>2_56_113_F3,1287677.2
+T330030122200201023020112213312003310212202223
+>2_56_204_F3
+T102223331102131132231021030203112211120011201
+>2_56_230_F3
+T310110010220220010122002122010032222211211222
+>2_56_241_F3
+T133011020210131133221301131123112221012212222
+>2_56_439_F3,-79501.1
+T333012000221103100101203133002102203311130331
+>2_56_814_F3
+T233233223010321013330003120023033123013020122
+>2_56_818_F3,350064.0
+T111230233201221021231221100103123321222111103
+>2_56_956_F3
+T012310230301120013010000102331020210112201222
+>2_56_1041_F3
+T310122332223212312222320231002222101212232322
+>2_56_1098_F3
+T310312300013121313122322033223230212223022300
+>2_56_1288_F3
+T132122122101233231011022211122102111222110222
+>2_56_1422_F3,-809514.2
+T031200101331012231123122032320220201302120102
+>2_56_1427_F3,-1290638.2
+T132210131111011033233001011100111102201113102
+>2_56_1478_F3,1834220.3
+T010032112122320011033232202300311223130031120
+>2_56_1522_F3,1785931.0
+T101132110100032332132220310130103310000210112
+>2_56_1562_F3,-215112.0
+T120013001220330001022010010312123101321010101
+>2_56_1596_F3,-221035.2
+T023010102110323003103001011210112111133011322
+>2_56_1659_F3,-1262425.0
+T000221021102101013321110023010032021323310102
+>2_56_2012_F3
+T313111111233320212311223211222312122132212222
+>2_57_80_F3
+T333120110331230031200221212022332121112211222
+>2_57_89_F3
+T310110322232300332021210032122312221112012222
+>2_57_123_F3
+T000220202101212210110322121211330213222323222
+>2_57_416_F3,1917147.1
+T011132323031332202303133021230211100133231223
+>2_57_471_F3
+T321322113123220213222113212123110112211011330
+>2_57_590_F3
+T212012203223123100331223213203302313200210221
+>2_57_682_F3
+T323203103321320323202201221022210222332132220
+>2_57_755_F3
+T210300102021110102223211233323003221132111222
+>2_57_869_F3,-141062.2
+T011012210220201310112220320000010322202221222
+>2_57_906_F3,-1816072.0
+T111231233233021211221011021100023101330013222
+>2_57_1075_F3,-1196190.0
+T032202231223233032321123221011001113120312202
+>2_57_1112_F3
+T331013321033201221023220131202123222222322220
+>2_57_1322_F3
+T230021301033101212132202023323220211223012223
+>2_57_1411_F3,423839.1,329798.1,90517.1,19720.1
+T001110012233022200302010013201201022012222202
+>2_57_1531_F3,-244582.1
+T003330102220203323310123121010210211002331110
+>2_57_1585_F3,-1543438.0
+T020311221331211001331323312032001300131002321
+>2_58_628_F3
+T202122300001030320002020032202231301200220020
+>2_58_900_F3,-63542.1
+T020213222003321132012123032330222232232102120
+>2_58_1347_F3,-1546446.1
+T001021021010102001023323031203330023101123233
+>2_58_1384_F3
+T321133330222323212103310031012320131213012132
+>2_58_1433_F3,-307488.0
+T331132321031301011301013322221232302032013322
+>2_58_1463_F3,-664751.1
+T001323203103001030332210131211323123203233320
+>2_58_1481_F3
+T202223301301000023203221023110300003002210130
+>2_58_1616_F3,1904703.2
+T333013233311031210323300002212211221111131200
+>2_58_1648_F3,-504255.1
+T210320032320313023121220231330300200103223213
+>2_58_1689_F3
+T311113111332213232122132221211332222332122222
+>2_59_210_F3
+T313101020003203112220023031212122023122022222
+>2_59_249_F3,1009689.1
+T020010221033321020123320302231022223322332333
+>2_59_282_F3
+T320221323213132130323320221222012121122221222
+>2_59_313_F3
+T112022322203123332130011211222111001122031222
+>2_59_363_F3,-1708512.2
+T003300210101311333011220210101102112122210222
+>2_59_512_F3
+T311323123223001330033321221232122221222212222
+>2_59_524_F3,-145208.0
+T220231210012032332301212032021032311212122222
+>2_59_560_F3
+T200011022331303013223213230032300221202132231
+>2_59_586_F3
+T003123120202232212033333223202121323212222222
+>2_59_662_F3
+T333000300331220112023322131022332233332211222
+>2_59_914_F3,58239.2
+T301011233332122231121211132030221222022111320
+>2_59_1130_F3,-1593287.2
+T132213321020131201020120212220101201330110200
+>2_59_1168_F3
+T230211100333010312012323123022210213202021321
+>2_59_1250_F3,1116590.1
+T222213230013102231132103123022231022202012230
+>2_59_1274_F3,-322882.0
+T000001023011031020122210223230220211330013310
+>2_59_1486_F3,1332853.0
+T323311223132010301213233122000201201210020102
+>2_59_1592_F3
+T001022213033222032221330211013200100203123102
+>2_59_1630_F3
+T332000322120202103132322333200330123310003320
+>2_59_1652_F3
+T210300020133002020321222031130301103113022223
+>2_59_1754_F3
+T333102113123311322132121322212322223222222222
+>2_59_2000_F3
+T223103111031220213311111311232332222222221222
+>2_60_159_F3
+T013200001331333113120111121203113121112211222
+>2_60_201_F3
+T303033331311133022021323331303002022112313322
+>2_60_552_F3
+T000101001020330302011122011222101221112211222
+>2_60_624_F3,-1945205.2
+T100023013010102223122122313222020133232211322
+>2_60_787_F3
+T310200121310203200311223112001232120132211222
+>2_60_811_F3,-1217370.2
+T320320120033231002201233121202130022120031102
+>2_60_931_F3
+T020311320223032133001201332021232331230231223
+>2_60_938_F3
+T102200323032031001203301003302203020013310200
+>2_60_973_F3
+T110112131012313223210122330000010112120011322
+>2_60_998_F3,1974016.1
+T030131000302310010212302311312022330202102320
+>2_60_1081_F3
+T213011311121321210303302333102232101212312222
+>2_60_1099_F3
+T302002030011302112132000011000112211123311332
+>2_60_1142_F3,1948547.0
+T312331011132310110331010122201130013031000112
+>2_60_1310_F3,-223231.0
+T303210201211130220000210001020002200121332113
+>2_60_1390_F3,1692457.0
+T303033012222133201021330101010121231012302021
+>2_60_1492_F3,298409.0
+T132123102012001202031030022122200331131111202
+>2_60_1607_F3
+T313010201121300330000110212203200330303123321
+>2_60_1625_F3,105211.0
+T101133212022212020030330212030201311020210200
+>2_60_1751_F3
+T231311313121111212113322231222112221232212222
+>2_60_1976_F3,-1069725.2
+T312120323022110321101002212330312323311130300
+>2_61_81_F3
+T020100003002232310010221031200231221112032222
+>2_61_144_F3
+T300101232001213303002333010222322223222212223
+>2_61_181_F3,-25423.0
+T030322211331301313030111322200112313211322322
+>2_61_185_F3
+T120323300021230311100200032232222222222222222
+>2_61_266_F3,-1325495.1
+T010133210302031212331321010333012220202130222
+>2_61_564_F3,-132730.0
+T000031201302312003121122110132112211232310212
+>2_61_583_F3
+T001013032201231202002330221221122210202220222
+>2_61_635_F3,1557932.2
+T122233302203102302012001022113310213202111222
+>2_61_821_F3
+T133323310031320332011312332112310111132111221
+>2_61_846_F3,584866.0
+T210302332310101220021132302221322101221023222
+>2_61_901_F3
+T020003212303302132212120032011331001200032120
+>2_61_1032_F3,-1746074.3
+T011223323000200321103302313303012301122231330
+>2_61_1102_F3,1053186.0
+T302101231213312210110330111010012220032101331
+>2_61_1360_F3,-991795.0
+T001020203130123213221012020323122320210011121
+>2_61_1427_F3,-1743185.3
+T122111101112002113303311011133010001303013110
+>2_61_1517_F3
+T322113000111320301320323300033030000300010310
+>2_61_1770_F3
+T031133313323113232333322121222112222232232222
+>2_62_122_F3
+T332233123201033321002022031233231321122322222
+>2_62_312_F3,-1711670.1
+T332233113010013321010011220232331210201031221
+>2_62_374_F3,274328.0
+T001322002022010201221230312213022201000020123
+>2_62_386_F3,-409314.1
+T221301322211232310333103030311303121011001302
+>2_62_433_F3,-1298391.0
+T032330111000130121320301300013302113002200222
+>2_62_481_F3,1454058.0
+T320013320100320220011010032013000122123030332
+>2_62_754_F3,1343482.2
+T130133003013202211203300110230100221312311122
+>2_62_783_F3,-183319.1,-183178.1
+T111303020120231202220023011303122111203010221
+>2_62_796_F3
+T331103310133121310201222213032211112301112203
+>2_62_948_F3
+T332301220031223210013321123133220312102022222
+>2_62_982_F3
+T211131100033312322023210332130321031212112222
+>2_62_1003_F3,-1628041.0
+T000012111203100200223120230102201333201313223
+>2_62_1242_F3,652798.1
+T333102031312132202330213103310033110123300111
+>2_62_1476_F3,1897562.0
+T332332111202200321320100203101222101130111130
+>2_62_1556_F3
+T333112203023232103211031113022130111312010322
+>2_62_1572_F3,-1031193.1
+T220201210120000112010001230001130132001210100
+>2_62_1588_F3,-599027.1
+T312012130213032022320122001013003123122231100
+>2_62_1763_F3
+T031323313133213311121321233222222221232232222
+>2_63_32_F3
+T202113112010112012301001021022112222132311222
+>2_63_318_F3
+T120331132012031121012110322122122001210122211
+>2_63_423_F3,1294634.1
+T221320033333201223312033102330113300220113321
+>2_63_604_F3
+T321213021002200002100102013012000323103010222
+>2_63_609_F3,1458404.1
+T301123221012003132121122320111000322113201111
+>2_63_706_F3
+T332023332231013312011322332122210222220212222
+>2_63_742_F3
+T033021312003023112000321311022030121302220222
+>2_63_829_F3
+T213102233232030331012311133100311032212032223
+>2_63_962_F3
+T332021333213021330012323323300310322202232222
+>2_63_1218_F3,-675020.0
+T020020220201032022332210301302010123310001130
+>2_63_1225_F3,-1728450.3
+T010321202011301221301010331020210301100211202
+>2_63_1482_F3,-652501.1
+T022110021202000322132003000320330320201213320
+>2_63_1518_F3
+T122123231121323311300333312003031301311013300
+>2_63_1535_F3
+T322212222002120030100221013212001020210010030
+>2_63_1596_F3
+T333323102133110332201311113310312212300012210
+>2_64_47_F3,-491134.1
+T332100322220132012011100023203011030020111102
+>2_64_206_F3,883024.3
+T010032210020122031301120311003112213122001221
+>2_64_210_F3
+T230213322133233020111012030123311232212120223
+>2_64_214_F3
+T331110231302330000321222320002302012011322222
+>2_64_250_F3
+T300120331210301123311030312202202220222212322
+>2_64_278_F3,1547675.1
+T322101331010213223010011003031011011321032322
+>2_64_322_F3,-301355.1
+T102121012301213020223103220313123021220311113
+>2_64_364_F3
+T100313212322213301001223113022132221202112232
+>2_64_371_F3
+T122331032202130333221300111333313303322113222
+>2_64_459_F3,1690806.1
+T213122112223111203330110133220013031232330200
+>2_64_541_F3
+T333001111013233313102202313000312321212001220
+>2_64_558_F3,-1731202.2
+T302233020023331020232220001230012003323102221
+>2_64_577_F3
+T332313101023110320233220123212112122222312220
+>2_64_585_F3
+T001213322201232302222032120222330113232221232
+>2_64_624_F3,1876734.1
+T213022033221103112211030022201022213233202133
+>2_64_642_F3
+T332222303321303023222030333210030222202220222
+>2_64_702_F3
+T213313311133211313321321323232112121222212222
+>2_64_924_F3,1711981.0
+T032102210203310131031031123330122000102230111
+>2_64_1098_F3
+T310102331211102212022321311221112111113312132
+>2_64_1190_F3,-685543.3
+T103013000223131011202010111130003201020322210
+>2_64_1228_F3
+T013311202211011211211122033201110211202112222
+>2_64_1250_F3
+T201123133022033000302212021230020101103011320
+>2_64_1254_F3
+T302111222010231123031022112312100121223012320
+>2_64_1381_F3,1058332.0
+T022212331001100022023311320301100202030322221
+>2_64_1543_F3
+T311002032322330200111201023312222222232022320
+>2_64_1605_F3,1824904.0
+T303210331201320333001010120202120221211212211
+>2_64_1611_F3,-790180.1
+T231101210131212221021203223312301310123121302
+>2_64_1983_F3
+T032223121212223123113303030112310121112211222
+>2_65_97_F3,-1506985.3
+T210222032330123031001222200011002203312320222
+>2_65_181_F3
+T011202101111233210032111121222111002111211322
+>2_65_347_F3,316591.0
+T212310120311033310202110311102102313232111222
+>2_65_406_F3
+T320113331312220332222022012232111221112211222
+>2_65_680_F3,-1331424.0
+T221230010103321031011103301032011101222201223
+>2_65_748_F3,-716076.0
+T002231231212110032202021120102011133020132223
+>2_65_843_F3,1778242.1
+T320302031133111010110102011210322113201023132
+>2_65_859_F3
+T331101313323120112322213132002222222332212222
+>2_65_871_F3,1761121.1
+T312011020102221020111322311311210100133223333
+>2_65_899_F3
+T130031312332033311101312212013111113312010230
+>2_65_1006_F3,-1376283.0
+T103100220111121221032122002322000001203310310
+>2_65_1012_F3,1247584.2
+T320330313210100120123221000211001001203112302
+>2_65_1085_F3
+T022101313020030221001110010000221302213000210
+>2_65_1105_F3,1602424.1
+T222000211302320010130001213031220033230022123
+>2_65_1143_F3
+T000101101111020120101012112230112211112011222
+>2_65_1168_F3,-66865.0
+T031011010112323001012321023022113233302121101
+>2_65_1186_F3,169031.3
+T230002010200102201010002330003320110000220210
+>2_65_1204_F3,1668476.0
+T232320021331232022310122223300213210111230132
+>2_65_1373_F3
+T123201231323122113011233112220122121211112322
+>2_65_1575_F3,1726268.2
+T210322021001033323012122003332030012100320102
+>2_65_1618_F3
+T322220312101111111111112111111111111112111021
+>2_65_1626_F3,-422085.2,-328044.2,-88763.2,-17966.2
+T322102101302010220233300132023200030010111313
+>2_66_127_F3,761920.2
+T213303122311133313103213030100210330110011222
+>2_66_136_F3,735802.1
+T011301122331110111202220122020311110133132211
+>2_66_185_F3,118076.1
+T020020120010101221322201122130331320202012220
+>2_66_307_F3,-1506663.2
+T330120301021020000020223031102201132010123212
+>2_66_398_F3,160319.3
+T212011333000001033201301312130103301022000221
+>2_66_498_F3
+T311012110121220112021222210320110000220221232
+>2_66_631_F3
+T212203323233220211221122223221321221212012200
+>2_66_670_F3,801845.2
+T132210300012330023130022100022120211222110321
+>2_66_885_F3
+T131033311032100132233311223301123002212312201
+>2_66_918_F3
+T032231223223210231221331211020323122302022330
+>2_66_959_F3,-975389.0
+T220120330311112103232300223320100312003031213
+>2_66_1049_F3,-394222.3
+T033102231021121120000011032032130231312112220
+>2_66_1062_F3,-310637.0
+T110002230110003220321211031323222202030033320
+>2_66_1264_F3,-480238.2
+T331201032300123300330330122102010101122223202
+>2_66_1410_F3,-292955.1
+T000001111310102012200103000031122001323010332
+>2_66_1422_F3,-1494230.3
+T231123132020231213210213202113301003111220012
+>2_66_1471_F3,-157121.2
+T300122012123223102203133023113013323212000213
+>2_66_1510_F3,590022.1
+T000031110233210110300120030202212210000000130
+>2_66_1541_F3
+T313122021322330200200220023022001100231002310
+>2_66_1556_F3,695307.3
+T010302331132132321101111011301110011112101111
+>2_66_1588_F3
+T211222102231211322222122021013101003102032120
+>2_66_1601_F3,-1232354.1
+T003311321230320131202313013000320022310230223
+>2_66_1677_F3,-194451.3
+T300103323203333031132012113200020213202212310
+>2_66_1691_F3
+T333113331121133112331231323213322222332212222
+>2_66_1975_F3
+T301122322220032120111302213002211022111023300
+>2_66_1989_F3,-1751682.1
+T330220331103220003301033003003120303033122130
+>2_67_87_F3
+T213110330120312013110131112212303132002111222
+>2_67_146_F3
+T333103122303311021323221321111212121232112222
+>2_67_159_F3
+T211100131122320002121012123221022002221122101
+>2_67_217_F3,636656.1
+T212320312001332120112220000200222012110332222
+>2_67_281_F3,1175831.2
+T303203322121021120003320122001211031122232221
+>2_67_315_F3,-115856.3
+T110001312322000131002012310333211330310012300
+>2_67_333_F3,-1395597.1
+T210100322310021320102200030010201121010312223
+>2_67_415_F3
+T030213022212300333023323030302323120230203322
+>2_67_476_F3,1732478.0
+T323022103002213233000221320032233122132130221
+>2_67_591_F3
+T233033203112221123111310333123211002212022202
+>2_67_667_F3
+T332212301102303023102002110010020221222210222
+>2_67_793_F3,-1542698.2
+T031210110102103031101223330321213230012200222
+>2_67_1029_F3
+T120210311023233222120203330002132023201122221
+>2_67_1149_F3,1651419.1
+T221233102021231230112022021230330002102031030
+>2_67_1434_F3
+T331303111232321320201212112131211012312312222
+>2_67_1458_F3,-103179.2
+T230213130000110110311212311322121111211113200
+>2_67_2011_F3
+T233132313323323232332221232222312222232112222
+>2_68_41_F3
+T333223210013310202122001123302202323012320222
+>2_68_132_F3
+T010301122331210202212223122023312122132211222
+>2_68_176_F3,-449474.0
+T200130323011221120301203312300001020011312133
+>2_68_644_F3
+T301200103122300210030130100202201222012221222
+>2_68_707_F3
+T303221011000202112331101113132113221112211222
+>2_68_800_F3
+T002201030000200020001000312210210301032010222
+>2_68_895_F3
+T211313311132313311133231323222312122232212222
+>2_68_991_F3,256049.0
+T012210111002311220100002000310021220233203333
+>2_68_1219_F3,-1789620.0
+T031231132303320012022321022002011223322211332
+>2_68_1266_F3,1075014.0
+T220220220001123302120112320133021231310222200
+>2_68_1299_F3,2001396.2
+T210121032022131000003312202200112231022022110
+>2_68_1315_F3
+T333333210223033221031300022220320113212312222
+>2_68_1375_F3,857125.2
+T303212032213112101123312020102102021012210320
+>2_68_1584_F3,1643690.2,1278961.0,-1054878.0,-520902.0
+T202130022231221000022122020313100113031300212
+>2_68_1643_F3,-424622.1,-330581.1,-91300.1,-20503.1
+T030120233231203012023322000332111232210311331
+>2_68_1661_F3
+T102100311122333102011113111222102231102011222
+>2_68_1674_F3,-890077.3
+T020330223113223201330013213033021001101302120
+>2_68_2034_F3
+T323113221013120221322222232222312221122222222
+>2_69_274_F3
+T031201322011302023103332020202131221331111222
+>2_69_325_F3
+T313211113131333120001110013102212021113112122
+>2_69_359_F3,1476860.1
+T231210013223223033232101113233212221332210203
+>2_69_374_F3,143232.2
+T000221312223333323012022011323212101322111221
+>2_69_392_F3
+T233103223232202332111202031231222221213212222
+>2_69_641_F3
+T231010103122302223032030312002222222222221222
+>2_69_740_F3
+T330313131133313331131222311232122222212222222
+>2_69_805_F3
+T112201032022232332202002312012123201020010322
+>2_69_1033_F3
+T132203302221220331231203322202132322200012112
+>2_69_1160_F3,-1842540.1
+T221230323320221001203022312222022102112120111
+>2_69_1190_F3
+T020032330320031303001032013222002221020203121
+>2_69_1485_F3,-1262538.3
+T021201132020200013103100023003031320112210200
+>2_69_1547_F3,-401723.0
+T312030030220221310300220222301312031322320120
+>2_69_1559_F3,1238195.1
+T000201210221212221302110211232100212122302222
+>2_69_1648_F3
+T122201203330022022223221011110123332033013310
+>2_70_80_F3,-1242251.2
+T002223202030122111321110231221222313212130221
+>2_70_126_F3
+T010213122221200011012310221201112221132231222
+>2_70_154_F3,151825.2
+T230222021322000120033102331002232011222222222
+>2_70_228_F3,1209595.0
+T203301011033102032323020222221320111132211211
+>2_70_263_F3,-1999969.2
+T321000210121103022201212031032302211312221230
+>2_70_418_F3,1857699.3
+T330123332313320112012310023311123021130313322
+>2_70_488_F3,1441569.0
+T330121132200033020200211313000230000001210211
+>2_70_503_F3,1090867.3
+T312220221112332312301200000203002131320001301
+>2_70_519_F3,-398948.1
+T312231012101000003012330022320112200320012103
+>2_70_601_F3,-1786053.1
+T031102001300101323121202330121233211213102211
+>2_70_774_F3
+T211101221233213230002303322210132223233122222
+>2_70_921_F3,-1925586.0
+T032321230320013223000122111332312130032100102
+>2_70_933_F3,-1417715.0
+T032302330002123120200112000200330123102033310
+>2_70_937_F3,-936417.1
+T323212021332320021013122213301020012332020223
+>2_70_983_F3
+T023301120021202332333310333201221121110211330
+>2_70_1039_F3
+T003301130230130031131210333122211222110101200
+>2_70_1113_F3
+T212233302010333203331021013201330000302030330
+>2_70_1117_F3
+T302201312033332200131021013201302102022201320
+>2_70_1243_F3,-704484.0
+T030200212311220230013100222300202131002030030
+>2_70_1298_F3
+T201021212023031311003012200202112221122212110
+>2_70_1505_F3,-610743.0
+T000110221133221002001131311010002301111121101
+>2_70_1515_F3,1651760.1
+T313020021031120113000210010110110100111113111
+>2_70_1591_F3
+T130202132031300220033333311031110112202012310
+>2_70_1606_F3,1545035.2
+T333010032210030213012012221321101221211301223
+>2_70_1659_F3,1065606.1
+T011200203332323102012113211122010033132111210
+>2_70_1665_F3,1647904.2
+T310333012232102102010322230002101210113010330
+>2_71_23_F3
+T010211002302001111022311021213111121112111222
+>2_71_355_F3
+T300201302221332223032211100223302111330123222
+>2_71_578_F3
+T101002222223002230001212010202202323212233220
+>2_71_620_F3
+T131231020301003203302112210122111331113311200
+>2_71_625_F3,628467.1
+T002100230012310032102230001302110231222210123
+>2_71_675_F3,-131484.3
+T322300021002202031100030000022300023002220322
+>2_71_872_F3
+T210223221113100330101301311011130212101312223
+>2_71_966_F3
+T220211333332032300030002100102020312200232302
+>2_71_986_F3,1363749.0
+T023201120321200333303132003102021111110001330
+>2_71_1020_F3,1538240.0
+T000001121311033020222022333311112100302113122
+>2_71_1048_F3
+T231220301102203013321013000201110130103223323
+>2_71_1173_F3
+T120012220131000110032221032333120031103011120
+>2_71_1262_F3
+T322123200331302332123230022123310023102222222
+>2_71_1340_F3
+T331211110222002230212212222322220122200112221
+>2_71_1347_F3,1119380.1
+T301220132132312022223101123211233102032022303
+>2_71_1395_F3,209767.0
+T123112020210212033002101120112230201210123300
+>2_71_1402_F3,1027193.1
+T030020131003320232331323220130101120123110221
+>2_71_1538_F3,-1276186.0
+T022332102013320202100112311201122010033013330
+>2_71_1554_F3
+T002001212331000322102110011010110002112021020
+>2_71_1973_F3,-141366.2
+T001032022022000123031013112131230122222000120
+>2_72_40_F3,-1487181.1
+T303231021010322202121001223322201322312102210
+>2_72_141_F3,14040.1
+T301021200210223031021233101012011103322002220
+>2_72_309_F3
+T030201032323101330112322221020223322022221222
+>2_72_438_F3,1512397.2
+T110020201132131012302323113331021121103012221
+>2_72_450_F3
+T311011012331223210332101101022112322123131223
+>2_72_500_F3,619754.1
+T100313321000120010211120330302131101230010232
+>2_72_524_F3,-773919.0
+T201300022202223000113203312302011210000112223
+>2_72_540_F3
+T310301210323220033100021011332012221312000322
+>2_72_558_F3,700216.0
+T001330230011333300330001012120120121013331220
+>2_72_637_F3
+T130201110031130330331313113101111212212122222
+>2_72_718_F3
+T121331111123313122332323332322332332332132222
+>2_72_1071_F3
+T320303301123321313121332121010210231303211322
+>2_72_1220_F3,1269710.0
+T020222002221321323121322033000222201332302331
+>2_72_1246_F3,-51106.2
+T330003121230302231222200102130023032002220100
+>2_72_1255_F3,-244959.3
+T102022300011010022132301020020131103212010330
+>2_72_1311_F3,1357252.3
+T110012223322122122001330022230330013222022300
+>2_72_1438_F3,-1399634.1
+T003013200120023020002020301022120032121212310
+>2_72_1570_F3
+T313123302133202333102211222012322112202012212
+>2_72_1596_F3,1075311.1
+T302132132013200121212330000032001301011100002
+>2_73_189_F3,775475.0
+T033202302110230130203221201101000030202212322
+>2_73_246_F3,-803094.2
+T213002132323301213202022132011233211330032202
+>2_73_382_F3,-233624.0
+T001222033212202230323321123131132212032311221
+>2_73_446_F3
+T010132211103302222332021202033222122121121222
+>2_73_468_F3
+T233213320013220233203220121233122233232211322
+>2_73_534_F3
+T302132321203101332012030210332232120222200122
+>2_73_631_F3,-1679603.1
+T000113323013030200023022010212000130002300332
+>2_73_707_F3
+T333111131121113112132231132222222221222212222
+>2_73_749_F3
+T332203301031110332233332232112111212213122322
+>2_73_805_F3
+T223032100311100222002320132233121201112310221
+>2_73_905_F3,1209064.2
+T221203222310002101010020100220021303132333123
+>2_73_1075_F3
+T330303223323332111221331031211332301222022312
+>2_73_1190_F3,925048.0
+T230203332321210222302222313210101220020123101
+>2_73_1318_F3
+T200031000103211001031000220012100013001010330
+>2_73_1371_F3
+T202220212202202302010210110000300211010300120
+>2_73_1387_F3,-931612.0
+T000111020112011101020321011031321131011010231
+>2_73_1480_F3,-1740573.0
+T001312002301323310012212322212231013101030311
+>2_73_1515_F3,959877.2
+T220333300330322313110220222002022110331112111
+>2_73_1739_F3
+T313112331312213312121323221222112322332212222
+>2_73_1936_F3,703131.3
+T231022201223133120321010210101130102311011332
+>2_74_55_F3,620096.1
+T123330220000033031202330033023222111202022222
+>2_74_106_F3
+T303322221331231210022222221132310322122231222
+>2_74_376_F3,1390398.0
+T000222021111331320022012013302221323111002131
+>2_74_431_F3,-1630920.0
+T330130123020213213230100101010113231132312320
+>2_74_537_F3,80654.2
+T203130022020130001120130033333232212212200222
+>2_74_542_F3,-1454693.2
+T110312102123021033102011001031022323210011122
+>2_74_648_F3
+T000203001201002332132013022313311221112210220
+>2_74_671_F3
+T002200022002220230122032022112000102212030222
+>2_74_687_F3
+T000220231220113311332220101001031122101112121
+>2_74_846_F3,677930.1
+T303202201032222013312221012012300221031032313
+>2_74_875_F3
+T332113332212320233011311033111321122302232222
+>2_74_1256_F3,1617245.3
+T312123210302013302132101222020331300100121111
+>2_74_1358_F3
+T333201310123001210213312032201320202211132221
+>2_74_1415_F3
+T303102232233233232220021210021021320101112121
+>2_74_1427_F3
+T211212112113311001311002111103110111102011222
+>2_74_1449_F3
+T231232300123212233023302033000130332211012222
+>2_74_1485_F3,-1378072.2
+T323103320111311032203033000000230001210012222
+>2_74_1623_F3,1567840.2
+T011322022001231302332130022022312213202203220
+>2_74_1638_F3,-355328.0
+T030302101020101300200111030222102103002013320
+>2_74_1647_F3,177736.3
+T032000132312321022003222202232022311003013311
+>2_74_1672_F3
+T012330112313333203110101122031220101110233200
+>2_74_1683_F3
+T213112331132113322131231232232222222222222222
+>2_75_66_F3,543321.1
+T113312002211110220110231032301333303111132232
+>2_75_299_F3,115389.0
+T212103302011012230013312032310102121022231221
+>2_75_351_F3
+T201132232202132020123111222312230122221322222
+>2_75_485_F3,-1587225.1
+T313221220122120331212210010303203001200002200
+>2_75_495_F3,-1168739.2
+T102102110101332031022000112201032121222310222
+>2_75_678_F3,-112067.1
+T231000221312023032313031010321103321110111332
+>2_75_775_F3,1578891.1
+T110121212330003222200313000201003220223122233
+>2_75_821_F3
+T310210113320023333031300033000220122302011222
+>2_75_840_F3
+T000132220123232210322011210121110111102000222
+>2_75_909_F3,-969361.0
+T123223301203022002011220020000110001111012103
+>2_75_1039_F3,1559365.0
+T312322331220230032133011003200132103222211131
+>2_75_1065_F3
+T201021212321230012102120020023010221112001022
+>2_75_1102_F3,889921.0
+T001202222310330223210131032210230201122000322
+>2_75_1110_F3,-1378189.0
+T222323030122010301122221233233113013000012300
+>2_75_1172_F3
+T122312122331323111312211232003311031213012220
+>2_75_1181_F3
+T232023312233002321323203022232320002202022322
+>2_75_1382_F3,-819535.3
+T012001202021331112002201110000301300002313310
+>2_75_1540_F3
+T321212102233021212232220011233212102202222210
+>2_75_1577_F3,-1879284.2
+T303111212110100031021111210231100301101211212
+>2_75_1942_F3
+T200022100201302103322003011022213211112111322
+>2_76_34_F3
+T310332210013113311122113201101202212102231222
+>2_76_69_F3
+T210212010331111122012222012221223020212222222
+>2_76_76_F3,-1940211.2
+T310130000020313020332102012000312132232222222
+>2_76_209_F3,1809190.3
+T302002100002032123312121121212203312232322222
+>2_76_224_F3,-1441290.1,-987409.1,-593552.1,-576166.1
+T220312213202331200120013030112013303123301222
+>2_76_273_F3,-1711247.0
+T031222300213201323111222100113322233312123221
+>2_76_364_F3,-353530.3
+T020312332211330101301030322132233312332122222
+>2_76_509_F3,1241289.1,-210777.3
+T010302100000213033222021121231312110102223222
+>2_76_653_F3,989834.2
+T212013221300033022200220211312303220233332233
+>2_76_734_F3
+T111311111333111333131321321222222232232212222
+>2_76_950_F3,1481643.0
+T102003110223301002123032200021132120101010100
+>2_76_1046_F3,-590992.2
+T132300332122220203301031012200321011300112220
+>2_76_1368_F3,1349814.0
+T202220212222202312312210120003302212210302100
+>2_76_1441_F3,-755469.0
+T011120030011120031111310100101323233102103303
+>2_76_1552_F3,-941398.2
+T210322212320001020222120031121210301201121112
+>2_76_1931_F3
+T122321101001323313332233211222112221212211222
+>2_77_249_F3,-1715757.1
+T201322331121101323231330230313223201133333222
+>2_77_290_F3,-638046.3
+T212330033100101122120000332312223322212010222
+>2_77_311_F3
+T220100130022322201132332003320102222222122222
+>2_77_360_F3,-1192611.3
+T230121202002210102121012102212003213002101123
+>2_77_406_F3,-1075271.2
+T003323333211223322210020110200101322301012222
+>2_77_424_F3,10810.1
+T110002010033223121102001031130220121111221222
+>2_77_572_F3,153122.2
+T020330200332132022231011103130012310203002132
+>2_77_688_F3,-1308613.0
+T110101223120210322132220201201230122021012231
+>2_77_768_F3,-1866130.1
+T303010212312332232133233113320111233210111310
+>2_77_934_F3,561697.1
+T333322303030333012201223000132302100003103333
+>2_77_1000_F3,-615481.1
+T021021203231130312202022020330303321202310331
+>2_77_1011_F3,1443897.0
+T033201012001310121321123021230332301023013210
+>2_77_1017_F3
+T202330113002332002022220023231332302022010223
+>2_77_1249_F3
+T210313231033023130202333222030210312202012322
+>2_77_1309_F3
+T110232332033013223021303322012310202212322122
+>2_77_1399_F3,1172071.1
+T021112200003130123020220302002300331200333122
+>2_77_1422_F3,1332957.1
+T322133320223022001112332230003130331011110300
+>2_77_1436_F3
+T022202111021121333122222312332111232102212322
+>2_77_1465_F3,1737926.0
+T113302100033233033023101332100023011311112313
+>2_77_1594_F3,-1193919.2
+T122012330130001303003323011003110031000110301
+>2_77_1951_F3,-112818.1
+T112331013031012310213203211120013120131200322
+>2_78_153_F3
+T332212022003223020321222230023332112232220222
+>2_78_259_F3
+T310230322211003220323313313312120020222221222
+>2_78_401_F3,1875667.0
+T212213210110110100232012300323031310132213322
+>2_78_539_F3
+T122310210311221023001103111303112211222212222
+>2_78_642_F3
+T123103333311303121331330310122122321222110222
+>2_78_723_F3
+T232131103331320331302321233103121212232312323
+>2_78_891_F3,1233926.1
+T332100023011102200111001230322103203332200322
+>2_78_925_F3
+T330202202013210111211030211210100300032010322
+>2_78_948_F3
+T122000211003211003130230311032110121111011220
+>2_78_1106_F3
+T220103303332033320233221233023113001202022221
+>2_78_1162_F3
+T132103131133002230223221032222232223330022322
+>2_78_1243_F3
+T330322131111001212203300333210222112203112122
+>2_78_1547_F3
+T111233230113301010011120112300111011210111321
+>2_78_1555_F3,-1904008.2
+T013121212123320032010122021103330103112311322
+>2_78_1663_F3,858541.2
+T100312101233222200220222310222300001311113312
+>2_78_1674_F3
+T022312121322311000100103001031010111122121310
+>2_79_175_F3,1934876.1
+T322233021010013010311020120202010211220122200
+>2_79_444_F3
+T130111300021210123001302113030122021111010232
+>2_79_455_F3
+T330111213223001322311303233212322221221022223
+>2_79_531_F3,-1795164.0
+T111230131231232120220232332100231221302211231
+>2_79_675_F3
+T031002021223320301212203030102010003102300110
+>2_79_840_F3
+T230132320133232130121311223121310112222212222
+>2_79_851_F3,-1501565.1
+T313220202211112121312310010021233112232002322
+>2_79_916_F3
+T131103222103211132303130013200132001232012220
+>2_79_1031_F3,-350382.1
+T202021023311000013000323301023000303102210321
+>2_79_1216_F3
+T110300222110000033010311212101111301112011201
+>2_79_1258_F3
+T330002310033231323112310033110332303103022222
+>2_79_1285_F3,691221.1
+T033012303101312031211013323232021311212111221
+>2_79_1347_F3
+T000202202302300300313303220000220112202020120
+>2_79_1418_F3,-151780.1
+T002100022101211233012210322321213222130130102
+>2_79_1444_F3,-424832.1,-330791.1,-91510.1,-20713.1
+T000001233001121013201003023302112333032111200
+>2_79_1459_F3
+T031021100211013001200323232022211123021200232
+>2_79_1561_F3,1258723.0
+T312303321323031333001010213313101000123122332
+>2_79_1577_F3
+T000100200002100101021110000100111101111211203
+>2_80_40_F3
+T010233112121032121030030230311012221100112220
+>2_80_106_F3
+T333322323102120210131303313221133323122210212
+>2_80_220_F3
+T122233032312312202033022022230122221222212222
+>2_80_684_F3,135021.1
+T313231110231033202322202023111200102221011222
+>2_80_941_F3
+T322101133002302332210222101220032300203010122
+>2_80_954_F3
+T121033132323311220231311223022320302212212322
+>2_80_1305_F3,146526.1
+T000102332010330220322321300001000300003000100
+>2_80_1411_F3
+T322131223133011313112230222211311111301120322
+>2_80_1510_F3,729956.3
+T222100210100013310231201210123120231300210112
+>2_80_1534_F3,875452.3
+T021303332212130131130101321000000002010023100
+>2_80_1565_F3,-1216991.2
+T102022123022222130012000320111231021001022022
+>2_80_1586_F3
+T111101022012330102011002313023303120203010330
+>2_80_1970_F3,1495699.0
+T222112010310211300202222022013020313001122310
+>2_81_25_F3
+T302020022033310332032122002313112322232112222
+>2_81_34_F3,-1722912.2
+T030210111121201000123321010203302233012221220
+>2_81_45_F3,-1798613.0
+T221103012103323311022220223101111022313001200
+>2_81_60_F3
+T200031210133201222210330012320103103011012220
+>2_81_110_F3,-292838.2
+T313210102121220110112033213013011213122111201
+>2_81_292_F3
+T313100110121302030122203212333112232112211222
+>2_81_312_F3
+T032132022213331230131201323222322221322310222
+>2_81_336_F3,-1696170.0
+T332221232203301011000132231220030003233230322
+>2_81_390_F3,-1894460.1
+T323120311021323022311222021222203000012211222
+>2_81_482_F3,1795789.1
+T123332110312121002101010313200002233322013301
+>2_81_510_F3,1996672.0
+T011030102033003000332101230200133231123202212
+>2_81_599_F3,1674404.0
+T302103331113301031230003322131021122100012330
+>2_81_627_F3,-659218.0
+T001211132012221222021103200102120000130012302
+>2_81_655_F3,-1672089.3
+T302311320212020130031200023233301230212200212
+>2_81_663_F3,1209827.2
+T013230302200202300330300023031100022321220122
+>2_81_689_F3
+T123000000332103322112110330211121123212332222
+>2_81_856_F3
+T332211132211012220023200221213311133223022001
+>2_81_884_F3,1852238.0
+T012302202220203332331012033300312000303011311
+>2_81_898_F3
+T212103302013033110021330013202213322322212222
+>2_81_1001_F3,-1179398.3
+T301012101032100330311211300220033331110203301
+>2_81_1055_F3
+T220113120003013033102201030200111112000122323
+>2_81_1100_F3,-1812054.1
+T120223130231320021100023302221012230121002113
+>2_81_1120_F3,791118.3
+T220213020130102330010112223200103032230122233
+>2_81_1138_F3,-1428019.1
+T120110233212231021331332210332300100203013331
+>2_81_1142_F3,-31143.1
+T323012102232200203103111102132310101300020132
+>2_81_1157_F3
+T132222221131000130222222031102110001122211110
+>2_81_1224_F3,-1678599.1
+T331130002031022000321112221232012121111020232
+>2_81_1254_F3,903221.2
+T011022003031030010022012033220102000010131202
+>2_81_1454_F3,-145453.1
+T020210202311200310202322022122013012313110211
+>2_81_1642_F3,-1723427.0
+T321213212312101002223212230130200131021101022
+>2_82_68_F3,2004808.0
+T210311212233320200121230030220203231020030210
+>2_82_237_F3,670285.0
+T213000311203321131313130130310111231001101330
+>2_82_246_F3
+T012232131110311210020101121231112121112121222
+>2_82_260_F3
+T330330102131322320012020110000320033331130332
+>2_82_332_F3,809738.1
+T021132103303300202322110201323010330033220233
+>2_82_355_F3
+T200302202300210020320003012022120222232010222
+>2_82_588_F3,310463.2
+T323201121101222011032102231321201231032010233
+>2_82_651_F3
+T123011010111012123102110013013312101212211222
+>2_82_753_F3,1930864.1
+T123301030331010331123130000330000031020202132
+>2_82_768_F3,-1268073.0
+T001121331212202332230103223032013212301013130
+>2_82_944_F3
+T321001121031022100012230121000100201202212302
+>2_82_959_F3,1465633.1
+T322333012003233212300320003330021202232001222
+>2_82_975_F3
+T012230122230101311330213313022210220111231312
+>2_82_1087_F3
+T310203103023002203313301233323300303200122312
+>2_82_1250_F3
+T012313331221000232302312031000122302200012312
+>2_82_1513_F3,222719.3
+T122303210103232220222231122223021311223202120
+>2_82_1573_F3,-1758208.0
+T102022021101011002302212120310200102211012103
+>2_82_1594_F3,-1747339.3
+T120000003232323303123120120110103133102313303
+>2_82_1652_F3,998367.1
+T212121313020001131012002301320000110031303330
+>2_82_1677_F3,1343545.0
+T003101032010002113001033102003101311302213300
+>2_82_1928_F3
+T002113313331333321332221122313233331232312222
+>2_83_264_F3,-811649.0
+T320112111220200222200311120021301133132030333
+>2_83_377_F3,1105742.1
+T312230310013222032123012020203100332201011222
+>2_83_632_F3
+T010121212133303312131322023221122232210322220
+>2_83_739_F3
+T000301000100001300010020130022000020000000120
+>2_83_822_F3
+T211031312012332233233331133221123113202222233
+>2_83_1011_F3,626403.3
+T331212320020022322013013011230120220030210330
+>2_83_1028_F3,908603.2
+T303312012111120131013311022020120233212212222
+>2_83_1032_F3
+T323122221201200030012211011220001020012002220
+>2_83_1079_F3
+T000031302031023320011222310012110003102010132
+>2_83_1311_F3,-1435055.0
+T120010200333220133102021322110002030132220100
+>2_83_1479_F3
+T202002032020000122032100010000212002220030230
+>2_83_1591_F3,-1411486.1
+T000220333212320312030303312210112000321000111
+>2_83_1607_F3,1362917.3
+T000020102200122021230012220231302033230022301
+>2_83_1937_F3
+T333122131333312313312322132232232221232212222
+>2_84_171_F3,-727143.0
+T112121223323010022000222213101232331222003201
+>2_84_185_F3
+T320302313311020310022310012202213221202212222
+>2_84_213_F3,1267722.0
+T113322312233211222002311111012202201300101301
+>2_84_454_F3
+T120231222203220120032320012212221131222312123
+>2_84_489_F3
+T332212310322223223330220303133022221022121202
+>2_84_550_F3,1862630.0
+T233231111321111102010210323121110000200123303
+>2_84_671_F3
+T210223112123112120201311121002213202220021200
+>2_84_817_F3
+T211231330020332002022322220022210213132221222
+>2_84_1054_F3,150638.3
+T002113333323333313110202010302212112123030320
+>2_84_1112_F3,1811969.1
+T020201031000131001233232130020222300001312210
+>2_84_1163_F3,-1564104.3
+T001113103302101210103220220201332310022322222
+>2_84_1288_F3,1238707.1
+T112201002232120001123032030131112211102020300
+>2_84_1346_F3,153160.0
+T320002221020112010313302120112123022232220322
+>2_84_1473_F3,-446289.1
+T202220133131210331212101212101100330023130322
+>2_84_1624_F3
+T220011221022100200201303033000320232302022330
+>2_84_1631_F3
+T113300012321210211331003311102310201332311322
+>2_85_33_F3
+T301122022311301123023312112020211111122311222
+>2_85_145_F3
+T001013301303333323213323313332330121322332222
+>2_85_197_F3,288304.1
+T221101033212212132311300110010220201310012122
+>2_85_244_F3
+T332322101310123013000201030201222221112111223
+>2_85_358_F3
+T210322202310211020300101322121222122202022222
+>2_85_682_F3
+T030003131231222312122323133010322012221312232
+>2_85_724_F3
+T213113201332113212332322213222212222322222222
+>2_85_911_F3,1525069.0
+T020331321012020210030010330320101033233012303
+>2_85_983_F3,773872.0
+T002032121100001211020321330231100032220220122
+>2_85_1129_F3,-1664965.3
+T321010222320110001103101032233001200112011100
+>2_85_1366_F3
+T331013301331331220211310033003231202110322201
+>2_85_1459_F3,501087.0
+T221200000303321022123301200111201212010210330
+>2_85_1492_F3
+T323103121300003210010302133330022301201021330
+>2_85_1521_F3,-1592160.0
+T313102103022001230021011101102021030132001120
+>2_85_1528_F3,1122680.0
+T220012301332311002233132011011000001231010332
+>2_86_29_F3,-117280.1
+T310103123301303110031102120231100301121001222
+>2_86_38_F3
+T020001223003003321331333111021123311232122322
+>2_86_262_F3,159409.3
+T103311323122022230011111311301131101313012231
+>2_86_367_F3,1196461.2
+T111121303020230021312302112011033222022211122
+>2_86_501_F3,-34037.1
+T220022322012202201331201212132123231230313332
+>2_86_692_F3,-1717995.3,-1356511.3,1671851.3,1472219.3,972911.3
+T222103222133001113212032313011001220331120222
+>2_86_978_F3
+T030030102232020310201301233002220212132121321
+>2_86_1035_F3
+T332212222213220333001300213130131102132022100
+>2_86_1141_F3,899131.3
+T020200130232203022033200011313101131111212222
+>2_86_1188_F3,426342.3,22223.3
+T030022122310323221210101122233030111200030322
+>2_86_1301_F3,776044.1
+T100320313111213023113020011223112201210011120
+>2_86_1351_F3
+T130012312202310202220113212102100221212010122
+>2_86_1374_F3,1378325.2
+T300302301111221230330310202031120313230011322
+>2_86_1389_F3,-1209682.1
+T220022013323302032212312202013230003221122002
+>2_86_1431_F3,-1714832.1
+T323023110011102220103102123222010112331022302
+>2_86_1489_F3
+T213001130330223200010002320310222330200000330
+>2_86_1505_F3
+T003103120333130022332221213003321000010212132
+>2_86_1534_F3
+T012120101131333231321303321012112100132301220
+>2_86_1545_F3
+T321100202221130032010230322203320121200002322
+>2_86_1596_F3
+T322220302232120311121110112110111211112111221
+>2_86_1644_F3
+T321113211331113322322222233223222232212312222
+>2_86_1942_F3
+T231303110132233010220103320322310221112001322
+>2_87_51_F3
+T303202331212130203130123220310023321312221222
+>2_87_98_F3,-1511829.1
+T030102132210231112121203232332231302133011222
+>2_87_309_F3,-1821757.2
+T201020232023011231202233221020002032103320203
+>2_87_598_F3,366905.2
+T130333033002221001331212103213012221322010020
+>2_87_630_F3,-705771.0
+T002010212302122132230221110100313323332230202
+>2_87_661_F3,-1213251.2
+T120002211131221213320132021320233331222122322
+>2_87_706_F3,-1521117.2
+T030211203120321321302110031301100132122320222
+>2_87_871_F3
+T310100003002311000200000020220300122222012222
+>2_87_973_F3
+T012213120132220312003211013033110102112011220
+>2_87_996_F3,1843459.1
+T201022220322000220001032212231333321011032322
+>2_87_1256_F3,-1663827.0
+T212203022033221310322332211210223003311333322
+>2_87_1308_F3
+T120021301000220201322000030021010101113210210
+>2_87_1335_F3,1962218.1
+T020010302133320111012122302132023320211011113
+>2_87_1623_F3
+T332230121320010000200103103003020031332030111
+>2_87_1627_F3
+T101312033120010220000203302002121131112013220
+>2_87_1983_F3,989270.1
+T130132220123000120121322020330012330010110101
+>2_88_34_F3
+T302202132011202213102312101221011211322123220
+>2_88_56_F3,-1863990.0
+T322310100131022302121300320112210103310333321
+>2_88_167_F3,-1783644.3
+T013321032321103112320220122002022212031320323
+>2_88_205_F3,1078225.0
+T003122020030011003003001131330023220032302222
+>2_88_253_F3
+T323021022113100013011223211222312121133210222
+>2_88_446_F3
+T001233003300002023303311322132000122312203222
+>2_88_486_F3,1889828.3
+T220010212123101300302200122213123011300302210
+>2_88_532_F3,-1520260.3
+T012003020032103031211130001300013000012000220
+>2_88_702_F3
+T021111200320320120010022020022230311302200222
+>2_88_725_F3
+T233333311133113222133322211223132222222211222
+>2_88_815_F3
+T300331203101220002202112111210201223333002300
+>2_88_820_F3,1628795.0
+T122233032310213210010321100120222113131113330
+>2_88_884_F3,1513914.2
+T121011003233232013030300210123100001230210300
+>2_88_967_F3,-1178412.1
+T210001010003221332233230112233331001200001103
+>2_88_1163_F3,-1201392.1
+T011012030321332022112011221023330201323322233
+>2_88_1200_F3,-48345.1
+T122132323011112200011033001023210022102002312
+>2_88_1411_F3
+T331213331312300302120022001200222003200012302
+>2_88_1567_F3
+T002213220310312311030310212020320121012010222
+>2_88_1994_F3
+T300220220010323300021212012100111221212012220
+>2_89_149_F3
+T033302102223312021101333322332323102221320321
+>2_89_174_F3,-1547879.1
+T133111001230321132222220111033013110033203322
+>2_89_244_F3
+T330110000022110132210112213222132123113211222
+>2_89_344_F3
+T330322122211202033121330011302312320221231322
+>2_89_424_F3
+T311032133323301132101122023113110122113012222
+>2_89_548_F3,588137.0
+T020321101121310331002320033111032311200211222
+>2_89_782_F3
+T131103122321300331312233023300223022232212320
+>2_89_831_F3
+T111032223030022120020102011001112031003110331
+>2_89_864_F3,379504.1
+T011030031022102202202230213233013302322232322
+>2_89_988_F3,-37584.0
+T212021013020323310013330120223002100113000202
+>2_89_1006_F3,-1212643.0
+T203100012101003120000313212302122330201223332
+>2_89_1154_F3,1893518.0
+T232022032133102021010103022102211130120122302
+>2_89_1313_F3
+T312232312231130120101103032221220002212222122
+>2_89_1343_F3,573677.1
+T301322120131233212201112301033230212312211132
+>2_89_1422_F3,1251852.0
+T001301033132321101030002002300330103033110312
+>2_89_1479_F3,36703.0
+T110121312320233002333120210112320131132112222
+>2_89_1496_F3,818697.3
+T210110022301301123323202221020313313100211301
+>2_89_1518_F3,-411136.1
+T033031010230202132231122233211233202202211123
+>2_89_1652_F3,-976885.2
+T030120033203331021133011223000112132002310322
+>2_89_1964_F3
+T302101121301031010020103011022012201132011322
+>2_90_50_F3,982648.1
+T000302313330220000130131020310030022213222230
+>2_90_137_F3,-1793161.1
+T122323212013123001321202213331001121323012222
+>2_90_307_F3,1827537.1
+T100122332213203101000130003323033103000101233
+>2_90_434_F3
+T211103132333111211233321323200312212322213322
+>2_90_449_F3,-66784.2
+T031302111321021003321210313022011302112021221
+>2_90_462_F3
+T123202132132200203333320211232222221312111222
+>2_90_538_F3,-1529642.1
+T021002323131102200222301020101213032320202223
+>2_90_841_F3,1920097.2
+T333010213212333112331002001002102131200033330
+>2_90_1012_F3
+T213123322121022312231310213031110022212212223
+>2_90_1125_F3,868313.0
+T310320121311000120203200013222112300120001123
+>2_90_1207_F3
+T323101320131232333002320133213210103232212332
+>2_90_1323_F3,-636697.2
+T311301121022200200002221102023310021012210222
+>2_90_1383_F3,1812999.2
+T022121211201101030210322132201033020300101121
+>2_90_1415_F3,129039.2
+T300320032330232001130302301301202203222200222
+>2_90_1454_F3,-1555343.1
+T031110100000133200102221201231231332212011231
+>2_90_1491_F3,-1238628.1
+T023103110030102132331211021021120302212212330
+>2_90_1585_F3
+T020103102023021112120212332012310132202212332
+>2_90_1589_F3
+T021223011120300131022212123122120200212012222
+>2_90_1631_F3
+T131233311331210212321231021101210201332212312
+>2_90_1981_F3,-750501.2
+T130130300100222313231112020331002222132000120
+>2_91_41_F3,1896116.2
+T100112101013313102300331101221312330200201233
+>2_91_233_F3
+T000202003333232213322031320221012222222110221
+>2_91_293_F3,215894.1
+T103102320102222101202231230212231221212212222
+>2_91_326_F3
+T010122222332211102213323211222323321232212222
+>2_91_340_F3,-954773.3
+T221322102321203012310222023001210200222201322
+>2_91_349_F3
+T332213313321033313122312013222311312212212222
+>2_91_392_F3,630649.1
+T223032101013320202310200222001031233121231212
+>2_91_417_F3
+T212230012101321023321331021201012122113301222
+>2_91_834_F3
+T110121221230211112231300032001122231223312212
+>2_91_893_F3,185556.3
+T031013010300103230302012102210030020310200202
+>2_91_975_F3
+T123003322033133232201321022110330201102011120
+>2_91_993_F3,40511.0
+T000132221012103103321010300031130221303211230
+>2_91_1063_F3,-1711825.1
+T111021101330101311101301310012310301303110311
+>2_91_1083_F3
+T301211322033030323330132232103330023002002221
+>2_91_1148_F3
+T330112122112302323322222222222222222222222222
+>2_91_1169_F3
+T001020222010020310001312002000230102312010100
+>2_91_1234_F3
+T220202310321001321300012321100310110330010122
+>2_91_1409_F3
+T001220321300313032111032310220102001200033320
+>2_91_1425_F3
+T311032221021300130030212003100001100012110020
+>2_91_1532_F3
+T000031101110201033001013110221112211112111211
+>2_91_1557_F3
+T200033311121113303201212233110201221312111222
+>2_91_1577_F3
+T232013322213231310202230021331132103230012321
+>2_91_1971_F3,2006427.0
+T030210121111302231122022012032332033030121310
+>2_92_91_F3,1118790.2
+T220020221030200211332130223232223122201302100
+>2_92_169_F3,406252.2
+T000130212300023012202212233222020201210132301
+>2_92_253_F3,213134.1
+T333122303232100013010223210122303020033230222
+>2_92_363_F3
+T313031322111130332202210111200210021112231222
+>2_92_512_F3
+T000000132030323223323000102020010220112131222
+>2_92_577_F3,1938041.3
+T002133022111220113000031233211322303331031200
+>2_92_758_F3
+T331133110132101123101333321202222132211012201
+>2_92_961_F3,818088.1
+T213013102302012300001212230000313232031330322
+>2_92_997_F3,808191.1
+T303020122301110202021122210200013202223102330
+>2_92_1066_F3
+T331200122102302333331201012032001200220000301
+>2_92_1257_F3,-177913.1
+T012212001101302132220332123310003331320321102
+>2_92_1359_F3,1315921.1
+T131030022322001203320102031133111101201211201
+>2_92_1460_F3,1937574.0,-58592.0
+T000002122133211003313021123200030022012222222
+>2_92_1499_F3,-1702510.2
+T221122211011020330112101100022013310220222300
+>2_93_202_F3,-633986.1
+T201231000130313010201330231102123133011330220
+>2_93_230_F3
+T300220022222211222222112102222212222112212222
+>2_93_518_F3,-845869.3
+T330321221312003330333312012203121120200332322
+>2_93_782_F3
+T321222122113301121323233233301213112330022332
+>2_93_868_F3
+T210001133212020220001230223323312322310112303
+>2_93_980_F3
+T311120312023102331333310233032123322201212202
+>2_93_1038_F3,1991935.0
+T022112023120120312113231223301233103230012332
+>2_93_1090_F3
+T320320223000332203330211213313203302033302322
+>2_93_1101_F3,1410274.1
+T022123201003222033102331002221311302023011330
+>2_93_1177_F3,1638586.1
+T331121033322230212201103201221300221013010221
+>2_93_1202_F3
+T331333003123333300211111233122312202202222111
+>2_93_1237_F3
+T301300120232301101323112232220310222102121120
+>2_93_1249_F3,-1705004.0
+T210010101200211110221111212331210112120211112
+>2_93_1304_F3,48738.0
+T122203011032102013211100313233001101302112222
+>2_93_1315_F3
+T210002322122220120022202121021111221212210222
+>2_93_1494_F3,-164580.3
+T000113202013012000311223112000100311300022101
+>2_93_1664_F3,278825.2
+T212322200103102010010020031303223331000000121
+>2_93_1945_F3,-1925905.1
+T020332003313222002321013032100313321302030112
+>2_94_81_F3,-84334.3
+T301321112012112230221032011213222211312123222
+>2_94_114_F3,-702904.0
+T213213011013302003210223322111033011010321332
+>2_94_151_F3,1010639.2
+T232130200011133202103320032332202100231102100
+>2_94_175_F3,195271.0
+T122230000222330232232311101222112312131220320
+>2_94_285_F3,478335.0
+T021011233320111010123120210130200132113222231
+>2_94_354_F3,95213.0
+T331233131003220210202021222200101020031012220
+>2_94_383_F3,1127002.0
+T231002022211213221222113033000002111033212220
+>2_94_433_F3,1660673.0
+T201030130103121101203321322101311013212113231
+>2_94_449_F3,-522416.1
+T321201222203231303212103330332023232000132131
+>2_94_463_F3,-1760483.3
+T111212132122000203321320212131212221122011222
+>2_94_535_F3,-381977.0
+T332130012101012001200130032102213031100131122
+>2_94_561_F3
+T331233302133201321032222122022312222222212222
+>2_94_668_F3
+T231221332222313210102233022333222020003332220
+>2_94_699_F3
+T232131331220200122031001323210022132212110222
+>2_94_772_F3,-1087747.2
+T301200332130201300212231220221220211131101231
+>2_94_842_F3
+T333220230210220211031311211223213022210212232
+>2_94_1079_F3,-808083.1
+T132010121032203323230132131000320113111000000
+>2_94_1214_F3,-546062.1
+T220022101131012122301232123000130320332332322
+>2_94_1369_F3
+T200212120221122103231202223223330120212130232
+>2_94_1395_F3
+T220023021201000131213020032100120131201010222
+>2_94_1464_F3
+T000000102101010000002012010100110101112011010
+>2_94_1471_F3,-317045.0
+T200010121323312113312202011300033010010020311
+>2_94_1624_F3
+T212321013031030002032013033102330100333121322
+>2_94_2019_F3,-1876221.1
+T111021001231220210101201011123001201233110311
+>2_95_48_F3,-1544222.3
+T123031200111310101300321011123302301032212222
+>2_95_155_F3
+T200333320101120202102323012212202200111102232
+>2_95_263_F3,-156477.2
+T300320312102130022013100101232300033300032233
+>2_95_274_F3,206103.1
+T320121100301202231100210003213002122110032221
+>2_95_388_F3
+T300023110321000001233000332232122222212322222
+>2_95_409_F3,-699078.1
+T030003013032033331222012233030222303232200222
+>2_95_419_F3,-733668.1
+T130303012000112302311120001313011122223300303
+>2_95_458_F3
+T220220021131213220033323310023202212222322222
+>2_95_474_F3,559670.2
+T213010133101230123221210313311220222232020222
+>2_95_487_F3
+T313110323102321213232300330212002222102212322
+>2_95_566_F3
+T323023222113020310131233132120310321232221322
+>2_95_592_F3,271957.1
+T001301023310013120003210032200123001011300130
+>2_95_610_F3,1867095.3
+T201000322002012200100333321332310011200113211
+>2_95_628_F3,-1282511.1
+T131321032311211011122203132020122223212111222
+>2_95_794_F3,1954581.3
+T210010332230322233023103013113200220300101322
+>2_95_950_F3,470557.1
+T111200123102212221133023012101200302021101322
+>2_95_1003_F3
+T213221332321011131303320013023112101302312202
+>2_95_1053_F3,1797179.2
+T332222332223130122221321032013320121100231322
+>2_95_1058_F3
+T332102101323300232331332232232321232201212320
+>2_95_1088_F3,1479845.0
+T310130303201322203320211213013203302131301312
+>2_95_1220_F3,550537.1
+T000010032113030020332333220330320300202102122
+>2_95_1409_F3
+T331010222102320211120103010101212220130201231
+>2_95_1928_F3
+T133112313333211311322222232222122221232211222
+>2_96_87_F3,-792069.1
+T302210211113211231032123112101121132212101223
+>2_96_249_F3
+T120322203320331212311322112321102221112212220
+>2_96_309_F3,1755955.2
+T133202201012000010212110131001130101120321211
+>2_96_465_F3
+T302103332122233232321322012031202201222012222
+>2_96_539_F3,27431.3
+T030220312120323023012201002323323021122222222
+>2_96_695_F3
+T232201123222223120212311132320130101321121210
+>2_96_816_F3,-1380359.3
+T020100331030321022223002320322110022033032320
+>2_96_865_F3
+T203321233032322222232320213102120232200322322
+>2_96_990_F3,130556.3
+T301131202200321303202000032020120201112010222
+>2_96_1067_F3
+T330201120002300232023203032002210202202012320
+>2_96_1226_F3,621484.1
+T223031203102031221023202130212102210212232221
+>2_96_1309_F3,-1029079.3
+T220102233130210121221222010033222100223302200
+>2_96_1364_F3
+T231313120331100222232113333100320102212032322
+>2_96_1497_F3,1999167.0
+T033002112010210122132013302002211311313011220
+>2_96_1522_F3,-1562979.1
+T002030100233213201220002201213000111112001112
+>2_96_1557_F3,994360.0
+T301110212312023123100202003213013010322110111
+>2_96_1616_F3
+T332013012220211122021312110232110121112211222
+>2_96_1669_F3
+T220000210121100303300013012100230001302000130
+>2_97_237_F3,-1344519.0
+T312201223322110310123212010122202211122021120
+>2_97_351_F3
+T311311302221022211212332233200213112001121222
+>2_97_377_F3
+T011123320133220103233013130222110323123110212
+>2_97_406_F3,-559054.3
+T202003001101202221331221321011230100320010122
+>2_97_547_F3
+T210121321023110333321330122223222123201012222
+>2_97_715_F3,310632.0
+T120001022010121300103101212012203221103233300
+>2_97_743_F3
+T010333312331322211233331232311320011313322322
+>2_97_809_F3
+T313313201333012313313312021011213121210112211
+>2_97_853_F3
+T132120003002300012310303032220300200232022222
+>2_97_967_F3,-1647842.1
+T021013033123022311010322113321320310333332222
+>2_97_984_F3
+T012220200010012300321321210012000231132310321
+>2_97_1016_F3,-338665.1
+T000221301012213210111301013003311111331313113
+>2_97_1041_F3,288092.1
+T102311101213210311230331112210130211111111210
+>2_97_1199_F3,-779925.1
+T302223003323202303311111000220011012011130203
+>2_97_1235_F3,1727575.1
+T203320001120220132002010031210012202001030123
+>2_97_1402_F3,427213.0
+T003210110022123001011101020030111221000110110
+>2_97_1472_F3
+T330212302123203001212202021020000120020220100
+>2_97_1517_F3,1034801.2
+T100032020001101001222201322222300223111322230
+>2_97_1633_F3,-213160.0
+T001232201031000123230322133103130200103300331
+>2_97_1643_F3
+T300033210030330000212103200021222322332030220
+>2_97_1953_F3,1256120.2
+T231113112323000023001100033332231323233121113
+>2_97_1985_F3,1084424.3
+T203121110310100222330103211100031111010330222
+>2_98_151_F3,161397.1
+T212101003312103222202010303302202232101303210
+>2_98_519_F3
+T330220300303103122120312210212321312132232222
+>2_98_619_F3
+T120033210233112230103311311002223001232012113
+>2_98_657_F3
+T331200220301030023201221020122332022101231322
+>2_98_686_F3
+T002110101200100310100220110012031011110011232
+>2_98_725_F3
+T331022333002210032302131100201202312023122320
+>2_98_792_F3
+T130322333023020200011133011232100331300212002
+>2_98_1124_F3
+T210111123133310122222211033011212113212012220
+>2_98_1186_F3,-431350.1
+T333120132332201222022022130100132032110212312
+>2_98_1421_F3,697327.1
+T321102130332123132023321010023011221122110122
+>2_98_1490_F3,536179.0
+T001103112222012330030000133022130211102200112
+>2_98_1621_F3,-728735.0
+T221010103033110011132210233200210100031121210
+>2_99_56_F3,-138306.0
+T000210302312130011123002202110202102013300223
+>2_99_178_F3,1914101.3
+T003120002200000002200112200000000320222002221
+>2_99_229_F3,-155612.0
+T323032223013011110102111013021313002201113322
+>2_99_307_F3,808973.2
+T003101202123311023322122203122011201132003222
+>2_99_363_F3,-1009745.0
+T202001200320123313322332222013120302032200322
+>2_99_500_F3,1875254.0
+T021102102321322130102202101302101011130211222
+>2_99_583_F3,438131.2
+T220032100322211121203020130000313031330312222
+>2_99_597_F3
+T313001301102211202310020311022322031022010321
+>2_99_704_F3
+T331010003003033032322300323111211222012103220
+>2_99_720_F3,-41665.2
+T000020211020201032000322130023212230332001200
+>2_99_760_F3
+T230022211033202232121121223222122223222222222
+>2_99_843_F3
+T330221333013202230301212211001320012213212232
+>2_99_1088_F3
+T310210301101302202122221013312330012101022120
+>2_99_1171_F3
+T201103133022230000102212023220200201011020110
+>2_99_1276_F3,-1265146.3
+T322011113002110002102012213010232000303013101
+>2_99_1306_F3
+T222201320032330131002301113111221232223311200
+>2_99_1351_F3
+T023033300122022221310201103021122111300013311
+>2_99_1433_F3
+T211113322110130121113313113111130111111110221
+>2_99_1552_F3
+T201331210122213222210132231113111202010020112
+>2_99_1560_F3,-1061456.2
+T221011330100123131220212023330231031012213211
+>2_99_1652_F3
+T201321012023233012001110011111111121111111222
+>2_100_61_F3,424460.1,330419.1,91138.1,20341.1
+T001233302021131232221331202112311222022311223
+>2_100_113_F3,1305315.0
+T302130032100032132323002201322032003332002213
+>2_100_171_F3,1336826.0
+T201312330033220312112221121120202211312101121
+>2_100_336_F3
+T321001213133231123231021221231112311032011323
+>2_100_449_F3,103376.1
+T003322003121002310013301032000000020310103210
+>2_100_460_F3,1941738.0
+T002001000332122221032000221313210312333322302
+>2_100_472_F3
+T330113120231212113001231112020110032323111222
+>2_100_489_F3
+T311233321121320231231320021301112232222012221
+>2_100_567_F3,1147687.0,1147087.0,1146787.0,1146487.0
+T023320221233003122101101102011021311021121101
+>2_100_602_F3,-847100.1
+T003302333023231321323023310022032210231201321
+>2_100_669_F3,256721.0
+T201011310122023333020031110100022232210221222
+>2_100_838_F3
+T331023323331231132103322131322313111210122122
+>2_100_859_F3
+T122303102113330230103212022030222332222012120
+>2_100_863_F3
+T202311121031322122332321120012221032323012202
+>2_100_866_F3,1672211.3,1472579.3,973271.3,-1717635.3,-1356151.3
+T203121022000302222010321210202220132123012202
+>2_100_981_F3,-624656.0
+T321022311210200133123320100011100331321003300
+>2_100_1004_F3
+T202210321001322030330020002203212203022001223
+>2_100_1009_F3
+T233332331013111213301211332210222103330012321
+>2_100_1019_F3,1414221.0
+T012213010023013321100121020102122033231013210
+>2_100_1044_F3,1118156.3
+T300212202233223312233231232230033330300220220
+>2_100_1237_F3
+T332302300121113203303130032100213202021011110
+>2_100_1264_F3,164694.2
+T012020032213012230212313003112103112212211022
+>2_100_1329_F3
+T300221102203000030012222212221133301100000102
+>2_100_1395_F3,6784.1
+T000011211313103212200111020310320332203000100
+>2_100_1604_F3
+T310021110201033320303213133100221322313222222
+>2_101_138_F3,1194939.1
+T103312112323021000220100120323032033013323200
+>2_101_144_F3,-1368614.2
+T010302013332022333201200001100313202122201222
+>2_101_190_F3,-385404.1
+T000201002302100233321000020203331022132233212
+>2_101_280_F3,-425476.0,-331435.0,-92154.0,-21357.0
+T332031012233231030020201331302010110303321223
+>2_101_563_F3,1520640.1
+T330221323003011213322023320113223312033200303
+>2_101_589_F3,-1447290.0
+T120311001133231101332322310231210110103112221
+>2_101_608_F3,893900.1
+T112010310313212231331021321011122231322111220
+>2_101_663_F3,-831801.0
+T133133213010230220021231022312111231213213212
+>2_101_736_F3,1899631.2
+T013103201230210022230002131002030021300200112
+>2_101_874_F3
+T221232230133013200322301223221110022220222302
+>2_101_1024_F3
+T012213300223310333123220122020212123220012213
+>2_101_1108_F3
+T030301200211202011013012032311330210302033202
+>2_101_1219_F3,1301491.1
+T102122202200221121002233300000110301132112101
+>2_101_1286_F3,248295.2
+T311132121303100001233213312122201302110103231
+>2_101_1303_F3
+T002123302001332001002203123213330223203322122
+>2_101_1628_F3,1011423.0,926231.0,-730379.0
+T031212121133000120000001230132312213232120100
+>2_101_1966_F3
+T122211223001121133102113012120010201232012300
+>2_101_2034_F3,-845302.0
+T003320211223232011022223300230122231100330122
+>2_102_93_F3,390613.0
+T001220320012033332313200231032122231112201202
+>2_102_166_F3,-408647.0
+T213333003201013313300120002013203132223323221
+>2_102_263_F3,-1657730.0
+T110321220120001022200123310020300231202122120
+>2_102_328_F3,-1652908.3
+T313203210312012010322121100102012213112101222
+>2_102_616_F3,1344650.1
+T220103101122130020001013213022100320200000330
+>2_102_905_F3,-643474.1
+T300101322110131101210101223010312031121000333
+>2_102_1031_F3,-286827.2
+T100031210123112100223013012330211210011210132
+>2_102_1276_F3,646392.0
+T322311233132332201020212023231233303303012130
+>2_102_1335_F3,-1303980.3
+T110032122021111233013023320310111332331330313
+>2_102_1467_F3,-1990746.3
+T002221213302122030312000311000010323310021103
+>2_102_1492_F3,-114649.0
+T202210030220120110330321231220310231102001322
+>2_102_1540_F3,-1373213.2
+T000002311320000022312033211230000100102331323
+>2_102_1552_F3,-1872070.0
+T212230002103212320220132212123011202310002110
+>2_102_1565_F3
+T311131333312133233123233323223232222332322222
+>2_102_1584_F3,744277.1
+T303031100331103231310123020010210200103013100
+>2_102_1660_F3
+T130212232011301110001122121030011322110111202
+>2_102_1923_F3
+T333133133322323231323222123222312223212232222
+>2_103_204_F3
+T300100112100310212201122311303121321212113222
+>2_103_343_F3
+T133222130032322320203230323222110231222312333
+>2_103_526_F3
+T300203020003223333023032120222312222102222222
+>2_103_602_F3,-164810.2
+T010102313022030330311221123011013210133012311
+>2_103_658_F3
+T231013132011301221121233122230110302102021320
+>2_103_724_F3
+T330303303232313332022231232000312212203112332
+>2_103_742_F3
+T320223112203310311101110032301130012202322120
+>2_103_750_F3,722034.0
+T320200032001000301311231100113003303302323320
+>2_103_765_F3,-818693.1
+T221002030230331001010323113210321300211031203
+>2_103_787_F3,-1352766.0
+T000203213212021123122100323121002020013232220
+>2_103_821_F3
+T330231331130301232301121033132221011322112121
+>2_103_1140_F3,1285019.3
+T323132102331300310203311012301011130113002030
+>2_103_1156_F3,1715517.2
+T220202322000031230203212320320300012000020130
+>2_103_1227_F3,291417.1
+T221313120133201321222103032331012211011121321
+>2_103_1402_F3,-302050.2
+T000212212102112000010032202332022202130021230
+>2_103_1603_F3,1913606.0
+T012031132200213321103230000310202310312122211
+>2_104_234_F3,1331946.1
+T020000023322110010123103120030220222003320312
+>2_104_388_F3
+T122312122001233301020222202221222210112212222
+>2_104_436_F3
+T020021131112220122222101010010100021332312220
+>2_104_458_F3,576595.0
+T230233313312120002301212222223302332221122100
+>2_104_561_F3,-950189.1
+T113031103030003000101210320122012321033210330
+>2_104_594_F3
+T022201321120130231323022112300322201023010222
+>2_104_597_F3
+T020101301120130231023022010000100221002000220
+>2_104_711_F3,256579.2
+T021002231322010022102202201200020301203012300
+>2_104_715_F3,358986.0
+T000130200112210021110233122000022310133103223
+>2_104_871_F3,-777200.0
+T223302202133030202202330220221103012232122300
+>2_104_977_F3
+T313103330133131311303320132121213212212212223
+>2_104_1087_F3,-792244.3
+T302332233203220101011213200023000002223023130
+>2_104_1144_F3
+T312000312001033010303033033000300200113002300
+>2_104_1238_F3,1970189.0
+T303120320013312123210011002302021003230312313
+>2_104_1293_F3,-786833.1
+T133302230002130020132230113002112233022110112
+>2_104_1302_F3,-74164.3
+T001103312101232001322202100231011013112320100
+>2_104_1320_F3,1903775.1
+T001120320300301212301212113320131003123220321
+>2_104_1475_F3,1742927.0
+T032310220110333222022310002323121201321030120
+>2_104_1639_F3,1672315.0,1472694.0,973386.0,-1717520.0,-1356036.0
+T330310212033312113022132222233020210202103222
+>2_105_99_F3,1138734.2
+T210220300101100002011200000031332322212211222
+>2_105_152_F3
+T321311011321321220032202122122213212102301221
+>2_105_179_F3,1360505.2
+T003210011200221003112202102100111201012210221
+>2_105_319_F3
+T123210233110222001012031030231323323213112222
+>2_105_468_F3
+T332103332023322311311201111103132311222222223
+>2_105_510_F3,1431857.0
+T310122323121002211102131110032133211130320132
+>2_105_587_F3,-1490209.1
+T233202201201100031233123002100300310130121232
+>2_105_915_F3,-893339.0
+T110131202232021230130100013200211230101200311
+>2_105_1060_F3,653003.3
+T111001322103311000302303110110311033212313211
+>2_105_1207_F3,-1848037.3
+T002200102322103132133123320210233121111300302
+>2_105_1271_F3,1555589.1
+T232312112221302323220203212330110111222232202
+>2_105_1429_F3,568446.0
+T202012310122220223323320022000031010300001103
+>2_106_70_F3,1658343.2
+T032023101210212301301122313210113121112312222
+>2_106_127_F3,-155349.3
+T331320230300332220023222121130332323032222222
+>2_106_157_F3,953594.1
+T021101302102333132112000310002131010120132201
+>2_106_398_F3,-1408545.2
+T030301022312100001103331130031132331011110222
+>2_106_725_F3
+T130221302221311312021331223002311221102022222
+>2_106_798_F3
+T112220133103331000221001230122101321022310122
+>2_106_840_F3,-1767659.3
+T322003212232222212020021201021220222222202220
+>2_106_928_F3
+T302300132012302230020111013312011220130200210
+>2_106_1045_F3,1631310.0
+T002110121321233211010310010102003101011200232
+>2_106_1075_F3,919375.0
+T223102223313212101232201210012112112233230113
+>2_106_1185_F3,-669882.1
+T331303331013312311012033000233133300000031230
+>2_106_1197_F3,768720.1
+T001011230203101223130012112210122012222332223
+>2_106_1250_F3
+T031003023312223011200130211303110101102233310
+>2_106_1306_F3,740902.1
+T112210210122020212012312201300101201212302220
+>2_106_1348_F3
+T331122100022021013013000301002310111100220110
+>2_106_1537_F3,-1403184.0
+T101121010302112321012311322332133000032010130
+>2_107_217_F3,1049292.2
+T023032113333221100302002000022300132202032221
+>2_107_236_F3,-95159.0
+T323311123020123320021320223323323121203032201
+>2_107_277_F3
+T021312303222002331302201323210232212222232222
+>2_107_287_F3,731681.2
+T133213203221121312003021322312122031332020220
+>2_107_301_F3
+T010123220322202013221002023132001020312312222
+>2_107_367_F3,-1107529.3
+T302103322013313012021331110010102221220233222
+>2_107_394_F3
+T022001010122003022201022312123120223232232322
+>2_107_525_F3
+T100310311123213330002322020223202202022123221
+>2_107_705_F3
+T300223300123212230223203213322233323300232322
+>2_107_895_F3
+T110333310322303312131303032122220033212232220
+>2_107_1140_F3,-1538208.1
+T033122333012013222033110131321003100031300211
+>2_107_1241_F3
+T102133202003333112301011030002031001002012320
+>2_107_1259_F3
+T330023321333201220023220033322331003223233103
+>2_107_1370_F3,-425377.0,-331336.0,-92055.0,-21258.0
+T310200311122213320312300201010130221200020102
+>2_107_1385_F3
+T010311230002112230033111101121210000112331223
+>2_107_1426_F3
+T002010332303220021111300210002032030122201120
+>2_107_1932_F3
+T333111111233110323203123211332212322322222222
+>2_107_1948_F3,-233273.1
+T230123032120110222023223302201013131030232330
+>2_108_50_F3,3497.0
+T220222133132302123130030112000132231132110222
+>2_108_495_F3,-1478016.3
+T002013212312233020023330330301010031010000330
+>2_108_584_F3,-112989.1
+T201323111012233233330130331102122123112231220
+>2_108_689_F3
+T001230320301223001230011132010132222302210222
+>2_108_745_F3,-619664.1
+T331313121103110110213121101213032310221010123
+>2_108_814_F3
+T111103311223132122013313312202221122211322222
+>2_108_834_F3
+T120003311113233033030031121033130333221121222
+>2_108_923_F3
+T000222332022302210320313211033203202100012330
+>2_108_936_F3
+T102000201101210232300322103212020100332011220
+>2_108_1027_F3,1454780.0
+T000333311233202312222222001031102013002232103
+>2_108_1050_F3
+T030130021200202023010021210120130100312000222
+>2_108_1053_F3,1727583.0
+T312022013200201003121001220200133002330303300
+>2_108_1116_F3
+T331111112323111211013233132323122333201122213
+>2_108_1176_F3,1431527.1
+T021003220220300003012223303000022000202002100
+>2_108_1256_F3
+T332001223132201021021200000032330003200223100
+>2_108_1265_F3
+T002130300012013103330322330302100331010001200
+>2_108_1273_F3,826665.3
+T332103002212021231233112223120120201202212120
+>2_108_1350_F3,-367031.0
+T001033100030032130211010313012320112200220110
+>2_108_1475_F3
+T201032002332312232130200012333111101313001113
+>2_108_1562_F3,-1108439.2
+T312031303310101233120232003201233210012131100
+>2_108_1576_F3,-369532.3
+T220213210322223220300221031012000110000122101
+>2_108_1634_F3,-563581.0
+T330010130103020002100033022310132201322213313
+>2_109_58_F3,84354.2
+T320211230103213012321303323113110032132111221
+>2_109_167_F3,-413488.2
+T122110233223210213233032021211002321333223221
+>2_109_187_F3
+T330010130001331112120133221222311221112221222
+>2_109_297_F3,-246589.1
+T002332111203201330323322003320110123021313222
+>2_109_313_F3
+T000120220332133300012121001220003322222112222
+>2_109_363_F3
+T020102020003220202222021313022312222122210222
+>2_109_391_F3
+T220233131122033002010002122021220222222122222
+>2_109_482_F3,-1709742.2
+T133231231302130332112312330221123301222313322
+>2_109_645_F3,1905723.1
+T213310130130100321000312033000023133201000220
+>2_109_739_F3,-1810702.2
+T001231330200012312312313213230302011010010122
+>2_109_762_F3
+T322233323033201312001310122122322222202012222
+>2_109_775_F3
+T310120003133200032220032010200200200121011330
+>2_109_799_F3
+T230213133101311212322113200101211212112321121
+>2_109_889_F3,-1364385.2
+T113301030001132203003312110222212000022003320
+>2_109_1080_F3,-1793376.0
+T330333323202201133321002332300032232120020212
+>2_109_1153_F3,-1955009.0
+T000220102331200231321033202103212213301023133
+>2_109_1205_F3
+T002020131122033130233232021230213121102232332
+>2_109_1226_F3,-190922.0
+T221233302012312010021323233002222013202221322
+>2_109_1417_F3
+T300101231032211233120111101011331101011010210
+>2_109_1455_F3,-1555751.2
+T120301203113300101211131013113100001210212133
+>2_109_1519_F3
+T322123210101221322003210332003320002213022212
+>2_109_1523_F3
+T233001302222000332123120011232220222210012210
+>2_109_1952_F3
+T022132201031033230121120301020221331030003330
+>2_109_2001_F3
+T301012103323020112212122020013010201203113130
+>2_109_2015_F3
+T021322221013312331122203112021020001113210110
+>2_110_68_F3
+T332211322210310031031201112220332202112322222
+>2_110_292_F3
+T033002103101213331220020023032111220032211222
+>2_110_447_F3,-1860630.1
+T311330103300332212122020130202101000020133222
+>2_110_502_F3,1463021.0
+T323102033231101230200101010000123012000002232
+>2_110_551_F3,1336591.0
+T322011020302323101220130232120113301233101100
+>2_110_598_F3
+T332313100121231221231201231023112201222222201
+>2_110_610_F3,-411847.2
+T013212022332332312101322101012313101122321332
+>2_110_653_F3
+T213231233233312131232303132100211111233212203
+>2_110_679_F3,-435649.1
+T013010132331012310121311011001000321133131331
+>2_110_845_F3,-692620.2
+T202120000130333303323321101321130322123301120
+>2_110_849_F3,1848814.0
+T010101302013223233010200201320201220213211230
+>2_110_855_F3
+T233313322123013333213112133221312212212032222
+>2_110_1043_F3,-239113.3
+T133013320300321012020130021011011310232010332
+>2_110_1245_F3
+T222313102012301303231012011301110211102012220
+>2_110_1259_F3,-165205.1
+T110023321331210220113220110332301201023133120
+>2_110_1284_F3
+T320203100222321220321333231000210202212022122
+>2_110_1469_F3
+T101333033110212130020003231210323101001013310
+>2_110_1640_F3
+T100000010031000001001000013200010210000000322
+>2_110_1865_F3,-679946.0
+T110220101000110003003033000210303113220000121
+>2_110_1993_F3,-625506.1
+T222102330121332020000212222102020002132332320
+>2_111_23_F3,1013130.2
+T112111210233313101010222222131200011032212321
+>2_111_200_F3
+T330000221312331333221121011322132301332010220
+>2_111_259_F3,-546870.2
+T200112021002122202023220032011211112213122211
+>2_111_320_F3,-1105962.3
+T120032021012223322312212111032011223330212322
+>2_111_429_F3,-1879039.2
+T223132001100030102311120013031122223133210202
+>2_111_434_F3,-1117355.2
+T110010302001221221322132220331022222222300121
+>2_111_518_F3
+T310303300232232322332000323330330223202210322
+>2_111_556_F3
+T332223103333223333331110012311112322232212222
+>2_111_627_F3,-718044.2
+T210033000113213001221020103303020030331010210
+>2_111_705_F3
+T302223310323110011312220013320233322103002300
+>2_111_870_F3,1015261.3
+T032333022232300202222321231223333003320030100
+>2_111_907_F3
+T221123303033100210312102033300112312233022322
+>2_111_954_F3,-811349.3
+T212023012202122312310311001331101310111013101
+>2_111_1020_F3,-772906.0
+T201231120012101332320221321133232201303113122
+>2_111_1107_F3,-296904.3
+T210211201211103200001201033102231033202112112
+>2_111_1191_F3,750627.0
+T013101222310332012101033103211301112102011100
+>2_111_1240_F3
+T100133102213321113021210231302122201203212322
+>2_111_1253_F3,-751719.0
+T021010221332221001302110001021213113112102130
+>2_111_1302_F3
+T123231120233222210013212033032221302212012122
+>2_111_1534_F3
+T000002120121110030203022011222110211100100131
+>2_111_1619_F3,-1839744.0
+T331321123010301233130200021201111020111212302
+>2_112_324_F3,1413248.0
+T000012023331223300322111233132303303322300223
+>2_112_702_F3,1702476.0
+T313310220001101211033020110112223122223110331
+>2_112_863_F3
+T312113333112032210002320222201212122221112221
+>2_112_900_F3
+T200002300301100010322000200010010100000023020
+>2_112_968_F3
+T203120203021321000222030002002230222132323122
+>2_112_1057_F3
+T301220332022102300200332021300100200002000320
+>2_112_1102_F3,157891.0
+T110331013110130212330000222032120001222010113
+>2_112_1220_F3
+T210120331200113001321320112201230101301210322
+>2_112_1323_F3,1623292.2
+T000033311331302000201000101020130302200311201
+>2_112_1344_F3
+T103233303303202132211212312322132221212010222
+>2_112_1350_F3
+T001212110033112131321313013001131112312222130
+>2_112_1405_F3,-102257.2
+T001321220130231203013022210120220200020030312
+>2_112_1427_F3,544581.0
+T201010113320330002221230132203020210112232122
+>2_112_1471_F3
+T131111001020122031023111313201121101310013010
+>2_112_1502_F3,1443374.1
+T132130213221002030201121110100131101010210101
+>2_112_1605_F3,-1741296.1
+T212101013020200002312231110223202222102110210
+>2_112_1650_F3,-274447.0
+T210110232211021102332221320220000213201000112
+>2_112_1931_F3
+T300000201023330213033111002000000120203023322
+>2_112_1948_F3
+T021122111131013232112122331221110111112012221
+>2_113_362_F3,-1747110.2
+T113211023003321201101223310123131202211312221
+>2_113_448_F3
+T130332012023332010021200110132121220310013222
+>2_113_479_F3
+T130323032100101000122032320221122232122222221
+>2_113_511_F3,-424119.1,-330078.1,-90797.1,-20000.1
+T111310100011132020002011203011030131001002302
+>2_113_521_F3
+T213303220023233322301110321330222131222012333
+>2_113_535_F3
+T000222010223022210031223311312230222202320300
+>2_113_611_F3
+T033102232301300022020332122201120201220021233
+>2_113_631_F3,488537.1
+T121101310310233003321130211302312311030310332
+>2_113_644_F3,-917179.1
+T330101321031301310330312300031003320102002220
+>2_113_653_F3
+T211223323223012311031321232122212222212212222
+>2_113_983_F3,921068.0
+T022311130222221102010022301001313100023102222
+>2_113_1038_F3,1758557.3
+T012030202121223112222222031321300103211010330
+>2_113_1111_F3
+T211201330211100233131230233311220333233222212
+>2_113_1137_F3,1665119.0
+T303301011010001103230310130230222120020310210
+>2_113_1182_F3
+T021130201002220333321012011212201211110101212
+>2_113_1530_F3,1673292.0
+T222212110021202011313020332213223020103031201
+>2_113_1538_F3
+T132021320103332122033002202321122121222112133
+>2_113_1668_F3,662277.3
+T010101032210210033213231111130312301013210120
+>2_114_68_F3,198448.0
+T232313022010302201000001020201323121032012222
+>2_114_97_F3
+T103332111123322303321202212022122222232212222
+>2_114_243_F3
+T210211001130201003132122230212302301112111223
+>2_114_313_F3
+T303212120102133010112202022220020000230130221
+>2_114_317_F3
+T130303002012032000031322011010110301211012222
+>2_114_516_F3
+T000303100311230030020011230031312112332210222
+>2_114_601_F3
+T222012333031011202031121213023130022130221222
+>2_114_607_F3,-358810.0
+T310331323200322200111102020122230132133322332
+>2_114_730_F3,-1085856.0
+T210120031100231331012211331021300123012300211
+>2_114_766_F3
+T320211230132000123211211322010113003211012222
+>2_114_830_F3
+T110003202033232210223200323020120122300212222
+>2_114_888_F3,-478536.0
+T113301112301320320320210001020122323121320212
+>2_114_1001_F3,1844947.2
+T333120201101320100210323202223032220101001331
+>2_114_1054_F3,224390.1
+T330001302212202023210322313302320220000310311
+>2_114_1185_F3,-1602137.0
+T002202321020113313121210032230012111021203312
+>2_114_1213_F3
+T031031300010111001110031113111113111011210112
+>2_114_1337_F3
+T302110320232012021202021031100011301212122231
+>2_114_1372_F3,1433937.3
+T133012310300122230022220230123120021212210322
+>2_114_1497_F3,1800327.2
+T212233201333121232203000130002000330000000103
+>2_114_1513_F3,-1126218.1
+T031201122211222001321133012132113221032011232
+>2_114_1572_F3,1701638.1
+T032201201031321001013221211300312133311202302
+>2_115_223_F3
+T202233021010111212011202213020132222012012223
+>2_115_286_F3
+T102000101212111323121031223213112131311111221
+>2_115_334_F3
+T220212321220221230123313130130011222212201222
+>2_115_428_F3,172302.0
+T220202120220300113223300003121102001112210220
+>2_115_447_F3,523770.0
+T212322011013302230001200120132113232210022220
+>2_115_633_F3
+T002011331312222200321130331302303013000311100
+>2_115_688_F3,1508923.1
+T232311230322101011000020132230131000212110222
+>2_115_819_F3
+T212313302212021223303112021021122012211212231
+>2_115_937_F3
+T223303310132112331003201331230311102212012330
+>2_115_942_F3,1932707.1
+T312233021102100330323021211003301120000000221
+>2_115_1005_F3,1322983.1
+T223012000003101032201213013232331320201102331
+>2_115_1295_F3,-964019.2
+T112100012121103003223003120013000330110000113
+>2_115_1364_F3,1541289.0
+T021102210003123322230020202211021121013020020
+>2_115_1434_F3
+T100311330323233101031312313110320021130210321
+>2_116_24_F3,1877494.3
+T300211201113320211230213212103010102202221212
+>2_116_310_F3,-768817.3
+T013120230203211000012232331221321023112100221
+>2_116_341_F3,-1285504.1
+T210312210203100313222022300003300213322320222
+>2_116_496_F3,1002610.3
+T321120121111220223003011212202111221212222222
+>2_116_555_F3,-449195.1
+T333321022332121012330222110221210021302012322
+>2_116_639_F3,1347791.2
+T322311211201322201203312230003013031133012320
+>2_116_779_F3
+T312203120131003331002323022202113211220112122
+>2_116_847_F3,1677557.0
+T002321130210230101202200201101023100113301211
+>2_116_858_F3
+T302232021301220313230212020100312222122111222
+>2_116_1078_F3,1441822.1
+T020211011000003320020120121330230220033302121
+>2_116_1471_F3
+T312121001310132202003000121000100001013223120
+>2_116_1535_F3,-1197447.2
+T323200222133012123123110122232223221021111231
+>2_116_1601_F3
+T223103120113213223112202231333213321133211313
+>2_116_1654_F3,-425216.2,-331175.2,-91894.2,-21097.2
+T020010112333022121232001210032200321202113303
+>2_117_200_F3,-857075.3
+T121102200001011212112012203032121121112212222
+>2_117_358_F3
+T300313102213301201032100123322222122232121222
+>2_117_422_F3,-161486.1
+T300310031022300020111321220031300000321002310
+>2_117_578_F3,1843191.2
+T201221321211310031101213000113321221321031202
+>2_117_586_F3
+T212303320302330222223302232330333301312011222
+>2_117_590_F3,1093777.1
+T212312322000120220001320032320231221302012122
+>2_117_681_F3,855802.2
+T231101221302301033332323132022022230120000130
+>2_117_693_F3,-844765.2
+T210303032130120330000322130322022200022002222
+>2_117_826_F3
+T122233301011031333303330333320323002212212122
+>2_117_855_F3
+T210332321001213333230321233000312022222131220
+>2_117_862_F3
+T222322322133230312002232022303212202220022310
+>2_117_866_F3
+T311303222033320223002320231300320202222022220
+>2_117_906_F3
+T121201322113320202031322111332101011030311200
+>2_117_914_F3,424192.1,330151.1,90870.1,20073.1
+T202133303230031131030003230001103003103202321
+>2_117_1097_F3,136212.2
+T311230313000102102100110013100212211011100111
+>2_117_1115_F3
+T120221132211012120011300111111100221202213322
+>2_117_1142_F3,1277931.2
+T222110211302223213022202322010122002212023202
+>2_117_1151_F3,1777686.0
+T022102110220023100210232101001122000022200200
+>2_117_1254_F3,1260430.0
+T223311312221203013221000322122312012133011122
+>2_117_1266_F3,-799356.1
+T103220022031020002310021101100122111211010331
+>2_117_1287_F3,-1420455.1
+T201333023120120132122220120000022211022132110
+>2_117_1410_F3,-1991323.0
+T331222030001231111000332132010213011312001120
+>2_117_1443_F3,836841.0
+T332031223031223212032111213321030002203311102
+>2_117_1488_F3,-82647.2
+T310232113320001210020030313313323130020210101
+>2_117_1608_F3,-95858.1
+T311011001313310201101203231023101223201132111
+>2_117_1615_F3
+T001010120311212010112201032012120010001010111
+>2_117_1621_F3,1420984.0
+T311030000013112021101321231301332033032033301
+>2_117_1959_F3,-64673.1
+T321333221121211202221321021201120211102113322
+>2_118_31_F3,-77639.3
+T000111222211302312330121312313122213232012222
+>2_118_66_F3,1849331.2
+T021021021310310132122021012232003123031010232
+>2_118_143_F3,1661598.1
+T210312333203320011030331001311222202222002322
+>2_118_269_F3,-1928591.3
+T220022210202111033122101022302322122222213222
+>2_118_365_F3
+T332211123321230310103120133212123101200322323
+>2_118_452_F3
+T301132021310033012320121313212212231113232222
+>2_118_510_F3,-807198.1
+T311323112033201221302300031032320033113022301
+>2_118_552_F3
+T302220103023220322221121111222111013113011220
+>2_118_570_F3
+T220200101330033202012232333000320300100000322
+>2_118_603_F3,1580012.0
+T002212330001011202101021213003100220130200321
+>2_118_659_F3
+T120223220031031333303122322001222202212022230
+>2_118_822_F3
+T233113321323201330323312322201212212320112111
+>2_118_955_F3,953903.2
+T231300301233101132011100110132113332300133333
+>2_118_1219_F3
+T333213323023112220333310122000323021331012301
+>2_118_1351_F3,1222107.0
+T000002230333331213020321230112033012311201132
+>2_118_1433_F3
+T102213320133133222211310323100120011122112121
+>2_118_1483_F3,-252175.2
+T331110011132330013031321332202000233100030230
+>2_118_1528_F3
+T212132012312222331122012120221311213212112220
+>2_118_1760_F3
+T333110311131311232331123212211332232232212222
+>2_119_93_F3
+T332232223103332331133213033122232121112211222
+>2_119_195_F3,1754283.0
+T022103210233223112111330321320330223233332222
+>2_119_230_F3
+T122301231033101001203200202030123302212332331
+>2_119_480_F3
+T130333103333001312313303131212111121212213222
+>2_119_494_F3
+T220122022210023120333100220032210321132032222
+>2_119_566_F3,-1806150.2
+T230001121000221102022313102301000013100230303
+>2_119_574_F3,809328.3
+T300322310021322133223332331203311203110231322
+>2_119_605_F3
+T022202130000010310131022013003100220100000220
+>2_119_617_F3,-55548.0
+T302311021320210331322103030103331112212210201
+>2_119_633_F3,1089669.3
+T131323323101102130031330113322103201101111101
+>2_119_714_F3
+T330333120011013330221310132203220212232112223
+>2_119_818_F3
+T210303130133200230221221222202113212201221220
+>2_119_1226_F3,1229027.0
+T000223033331311223300322330100133230012200221
+>2_119_1237_F3,-1361764.1
+T120023023232210011031010033213322030221303212
+>2_119_1270_F3,1751007.0
+T312022112032121301011103310320112300103003322
+>2_119_1316_F3
+T001023302013322230120003000120010003000023100
+>2_119_1504_F3
+T231232201231012232113312213222212122230212122
+>2_119_1642_F3
+T233312313313333313132322322212312221222212222
+>2_119_1984_F3
+T010213000103103211022220012120132111102112321
+>2_119_2009_F3
+T000000310031211211121313013002211211312113122
+>2_120_48_F3
+T202201331013131213322322222322223222012212222
+>2_120_88_F3,-1547547.1
+T032132333121202112330021003311320222122221222
+>2_120_118_F3
+T210100231101213212321122023212112222212222222
+>2_120_284_F3
+T331312201301111330112030230033220011312231132
+>2_120_381_F3,-606709.0
+T000033210300031102011320103211001131010213122
+>2_120_407_F3
+T031333213103313212001003110221302300022202223
+>2_120_669_F3,-1328955.0
+T311012020221320201231223032101010112000233303
+>2_120_687_F3
+T232012102211002111012200012110221002212112122
+>2_120_947_F3
+T323213110221330231301000131122312302233031102
+>2_120_1006_F3,-796478.2
+T221203332202313212100001010222002310120013233
+>2_120_1024_F3
+T002113030000122320113202012212000310001100231
+>2_120_1042_F3,-1951695.1
+T032223230130333211010031121120132200121002230
+>2_120_1303_F3
+T201231003320010112133313123200020201012113121
+>2_120_1426_F3
+T223103112031330232203303120132210302312011321
+>2_120_1463_F3,-554900.0
+T301022013301012131102210211013010223310220123
+>2_120_1582_F3
+T111010323222231233202301033103322023230122311
+>2_120_1618_F3
+T010132122121210210300221033132112122111231001
+>2_120_1627_F3,742891.2
+T022001312130231220230012112220222222300013111
+>2_120_1935_F3,1906191.3
+T203101012020002112122130011022110111021010112
+>2_120_1941_F3,-678078.1
+T300002230300120020220230102111323333212010320
+>2_121_27_F3,-654264.2
+T211223233112312121022031202312301121230123322
+>2_121_168_F3,-1111013.3
+T021123303303310131322021012300132201012323222
+>2_121_222_F3
+T321203131002202332013222132222312221232211222
+>2_121_362_F3,-969180.3
+T301310010202312323000311203200222002222020223
+>2_121_457_F3,803589.0
+T323213212223100111012113210201012001010130222
+>2_121_466_F3,-1379291.2
+T311033200022212321201022033201330101313032201
+>2_121_485_F3,147950.0
+T213103131133133210200331030102302302102001220
+>2_121_524_F3,-1878525.3
+T002203221303222102303101221302102211222220332
+>2_121_636_F3
+T120003013032012000033300033322101201102112220
+>2_121_745_F3,-742081.2
+T010032330020130002210130002200112132103001123
+>2_121_839_F3
+T132203330033330333211222322033220011212212220
+>2_121_957_F3,-1399048.1
+T120121102212000100313300310033013201333013330
+>2_121_1027_F3
+T112300021000123012111001012022113221122011320
+>2_121_1113_F3
+T000213210301122020300000030232331020112203302
+>2_121_1147_F3,-1592778.3
+T213220033312313010202311031113110221032121322
+>2_121_1297_F3,-787909.1
+T213030320100001131130200003300210233121120321
+>2_121_1312_F3
+T100323222012222121312322330032230001200111122
+>2_121_1381_F3
+T201012300001103311120030331111110230200022333
+>2_121_1405_F3,989779.0
+T201011003200120212203301001232303211112203323
+>2_121_1448_F3,510556.0
+T021312023102111013302210330103022213223133231
+>2_121_1458_F3,-1826119.1
+T021301121001132122310221030111100312101321100
+>2_121_1473_F3
+T012031002303110201202011211030011201111011100
+>2_121_1576_F3,-1879547.2
+T203101212003001201031003313033300311101210312
+>2_121_1660_F3,423722.2,329681.2,90400.2,19603.2
+T230230321330331123332210032000013111210231102
+>2_121_1673_F3
+T233112112111112132332221332212212222222222222
+>2_121_1914_F3,-834421.1
+T000220330200222002222211331203030320132211111
+>2_121_1921_F3
+T123220332022322023023121131222312321332310222
+>2_122_22_F3,1168174.2
+T001023300003231102121233312302323101222212222
+>2_122_52_F3
+T230101210013111211122122112212112232112112222
+>2_122_286_F3,-95501.2
+T303332102302131231102100020312220222311322222
+>2_122_395_F3,534262.2
+T311202332100130100321203131230021232230001222
+>2_122_562_F3,-1311409.2
+T201022001122031201222323321223102323032230102
+>2_122_573_F3,-1276031.2
+T301310302122310031010322332200310330101201302
+>2_122_792_F3,435359.1
+T220100311200222311311202012100002220300201320
+>2_122_807_F3
+T112201311313020113002123322132231332200222222
+>2_122_811_F3,-1885810.2
+T220131300312223011202110322000000220002213321
+>2_122_816_F3
+T222111330113210310022121322003223113222112220
+>2_122_925_F3,-1416177.3
+T010301023210220032313333231222101312133121330
+>2_122_963_F3
+T223130222021300223003221232020320232221212222
+>2_122_1115_F3
+T030000102201200000002000033030321020002310300
+>2_122_1232_F3,-760030.1
+T001120131013013212102130233011200113300110210
+>2_122_1944_F3
+T030000110313220000221110002101111201112011220
+>2_123_96_F3
+T000123232123032201120210010112111211212122222
+>2_123_249_F3
+T220223001020132202202002122010022212033221303
+>2_123_448_F3,-686352.0
+T213110002123101312122113113121131231202332123
+>2_123_492_F3,1285333.0
+T211202132120130013102202021313313233101023120
+>2_123_524_F3
+T302021221202222021230101211332220223222200302
+>2_123_568_F3,-53327.1
+T222013111210301303200100323003013312100121302
+>2_123_601_F3
+T302222223310200322300022003022031210312000320
+>2_123_893_F3
+T011201001011010110010110031200110211132211220
+>2_123_948_F3
+T320322100321332030222001231002312121212032100
+>2_123_1200_F3
+T121233330211332332022223132032220021202222222
+>2_123_1340_F3
+T003300310221110331211023210202330111311123223
+>2_123_1358_F3,-781687.0
+T002103002211323200122310300002132033211213332
+>2_123_1593_F3
+T311003010222323313212100033001130122002021222
+>2_123_1602_F3,1692655.2
+T022011212320221301030220112303310100300000310
+>2_123_1663_F3
+T330120301223103002332110011010221111112110322
+>2_123_2007_F3,-63592.2
+T001012322131201212122323023022210200312221112
+>2_124_315_F3,280982.0
+T021230231131210131023131202101003112102011322
+>2_124_503_F3
+T123021222202222222222222222222222222222222222
+>2_124_532_F3
+T113023321021122312033131311212212332212222322
+>2_124_661_F3
+T123022110011102312112330020011212222132222322
+>2_124_753_F3,1959111.2
+T303110000330020011202133113220220222003020131
+>2_124_759_F3,-1755275.0
+T212010220223200201113321320100322210322200210
+>2_124_831_F3,-460767.2
+T030102113030200103130200022111230202002000100
+>2_124_843_F3
+T213331111132333231232322223222122223232222222
+>2_124_982_F3
+T210100222010000000200002113222000100332003320
+>2_124_1076_F3,-494684.2
+T003223003022213130330033321021300022102123220
+>2_124_1170_F3
+T002310011333311310212131311013110111301012210
+>2_124_1174_F3,66557.1
+T000330002133130312101011211031202230310013023
+>2_124_1304_F3
+T201331021220310112221311323200211231310012200
+>2_124_1345_F3,465280.0
+T021003210301302322103213230120013212022002232
+>2_124_1452_F3,-276991.1
+T030033233232213010331212122233022112122232322
+>2_124_1470_F3,-1376048.1
+T212233022301101212222012000220332011110311321
+>2_124_1481_F3
+T201001201023032310131302230001320302000130300
+>2_124_1549_F3,1460640.3
+T212300101120311120000001002010021102211031110
+>2_124_1581_F3,834512.0
+T110310330222212233102010031123223121100012310
+>2_124_1626_F3,1936582.1
+T210232333121210131001101303310203200100021312
+>2_124_1923_F3,-643344.2
+T120220322022220021003132121322310321310330311
+>2_125_76_F3
+T201123133022033200202222023220222122201022222
+>2_125_93_F3,829967.0
+T002123232123022201020210010011131212212202321
+>2_125_190_F3,-1841426.0
+T210113230110112121001033201220013112130323200
+>2_125_199_F3
+T021120200100030222221200202222322222223212202
+>2_125_209_F3,-732871.0
+T012023101121002132123020301101021113110123210
+>2_125_478_F3
+T320133330101122223213321021022212123002022332
+>2_125_540_F3,1741354.0
+T322211320201011321120220221233030220010311103
+>2_125_883_F3
+T332323122232102111321232322203211322211112222
+>2_125_902_F3,352059.2
+T301330003323220322201230103210132201111022010
+>2_125_937_F3,-806223.0
+T310100312111323010130032332310321110213202202
+>2_125_942_F3
+T303021321022323313000312132010121302212032320
+>2_125_965_F3,309730.1
+T223230200001022020023103130030001231012001220
+>2_125_1095_F3
+T300301200302310000100000011010330100000000320
+>2_125_1154_F3
+T002202310022302230013030003232331100132000210
+>2_125_1163_F3,-1776464.0
+T000010330111333020222020133121100132300311120
+>2_125_1269_F3
+T212232211232122330331202022222212102302112122
+>2_125_1335_F3,-1869467.1
+T023112113020130001310021101221313302123300221
+>2_125_1485_F3
+T223103102223231332103332133203123202230132120
+>2_125_1561_F3,-1041736.0
+T220133100132122023303102023010300332302021321
+>2_125_1678_F3
+T133333331133323222321221322223222222212212222
+>2_126_161_F3
+T103133330330201013101211213322112121011212222
+>2_126_229_F3,-1076930.2
+T002230030030013111013100010131122123031200233
+>2_126_326_F3
+T333223113332011010100102110201122213210332221
+>2_126_363_F3,262006.1
+T320133230200211323023010033011202002303123323
+>2_126_445_F3,-342946.3
+T013110101211202102010110211112122010332111220
+>2_126_601_F3
+T322222113210200322111222323022131211312312220
+>2_126_772_F3,-254323.1
+T131010211332020002101200112301131111020110101
+>2_126_856_F3,-1878942.1
+T231002233303102221110303103121100323211020212
+>2_126_900_F3
+T110231003332103323202010012010130201111010122
+>2_126_945_F3,-1933617.3
+T202311310022330002010022002300123202331030130
+>2_126_1004_F3,-1970033.0
+T210120123331032023110031220002311010102101222
+>2_126_1017_F3
+T200313231221210313213120111010110011130021231
+>2_126_1021_F3,-1740473.1
+T101323210131223112130133211123002001000102333
+>2_126_1128_F3,652588.1
+T001031300230331230332330122303123323032123301
+>2_126_1222_F3
+T302223330112300000223310133011000330012200221
+>2_126_1253_F3,1154590.2
+T331000323033202210100002202203310321320111112
+>2_126_1314_F3
+T333110112310320103001030110301103111002110120
+>2_126_1410_F3
+T301031111201020221130321103321031111112101212
+>2_126_1571_F3
+T020222331221132132302221210102120121101210322
+>2_127_166_F3
+T113112132132321231203233000020122323011110322
+>2_127_225_F3
+T031231320000333111211222031222112221112211222
+>2_127_249_F3,-424861.1,-330820.1,-91539.1,-20742.1
+T332132203132300323122222321111002012332011221
+>2_127_282_F3
+T001330030100301202322021211332011122212212222
+>2_127_320_F3,-176637.1
+T300121112202331033013022032322231122001202100
+>2_127_820_F3,-1962198.1
+T220211303233223321202222232101130122230130121
+>2_127_1185_F3,-1139753.0
+T020310003210333032001301100000120301033003320
+>2_127_1203_F3,-1043287.1
+T123123001211212131322200112001112101300110210
+>2_127_1242_F3,1854006.2
+T120111330022013021301123022322201301002010211
+>2_127_1257_F3,-1440387.1
+T030020322102100000113313203210233301212103322
+>2_127_1323_F3
+T303203322020302211003231133010323002200121120
+>2_127_1354_F3,1950281.3
+T010120210132331210110110221300112101112111221
+>2_127_1360_F3,-479551.3
+T001320010223333203112310202203101020021103331
+>2_127_1423_F3
+T221131133231233313323202322012222003222212313
+>2_127_1491_F3,-1290392.3
+T123000202202000031211333313201030001230123100
+>2_127_1597_F3
+T110210110032323001113301333101202333322012323
+>2_127_1914_F3
+T300322210332201332203331332203300013330233121
+>2_128_64_F3
+T300032031013010130211121212220203301212012222
+>2_128_134_F3
+T320220200010202321121211210102121021212202222
+>2_128_169_F3,-180789.3
+T103100032203312312011313100011312213330102322
+>2_128_187_F3,1671556.0,1471924.0,972616.0,-1718290.0,-1356806.0
+T100230123111221302031021021220211030130223212
+>2_128_218_F3,1448410.3
+T032102221010222122222303202210322030122333223
+>2_128_371_F3
+T210322000120233233231320222322212122232220222
+>2_128_486_F3
+T023033332312010032130121213022123132102022222
+>2_128_514_F3
+T033032223213020112010201213010112311122211222
+>2_128_618_F3,-96666.0
+T313220103031033222000321000121202202102132102
+>2_128_670_F3,954543.0
+T101212313330031310012120220223030231220010330
+>2_128_850_F3
+T210111111232030310201300032220222103201112112
+>2_128_866_F3,1464244.0,1425220.0,967405.0,-665454.0
+T220303233302202033121212301133312011312320203
+>2_128_1025_F3
+T102011120332213312321132312211223322211112331
+>2_128_1119_F3
+T331323201333121221013211032100323203200022212
+>2_128_1311_F3
+T312211012320120131321001130201301013112111120
+>2_128_1397_F3
+T221101211013112011011111311021113321112013210
+>2_128_1594_F3,1801857.0
+T112100120332200000123301033231200310123010130
+>2_128_1614_F3,1790974.0
+T001332102000221120312102211103121320102202122
+>2_128_2019_F3,1388656.0
+T322032002132123311022102012033021201000101130
+>2_129_31_F3
+T131220011320330100223111033202232132111313233
+>2_129_128_F3,-254517.0
+T000222200333112312211001221010131123312202121
+>2_129_293_F3,1223626.2
+T103323120032320002013023113210232223101012212
+>2_129_311_F3,424031.0,329990.0,90709.0,19912.0
+T223022320313212320020020011230012210110123220
+>2_129_326_F3,1613416.2
+T121222113232312311000132210201112213110332121
+>2_129_425_F3
+T030133300211013330121332312222233331002002133
+>2_129_528_F3
+T211201130213123323222102332002113322212312222
+>2_129_921_F3
+T211130131300103020103033021011300010202000220
+>2_129_1125_F3
+T033003212131301120031330312003121222232322320
+>2_129_1151_F3
+T230212120121211222123300123002212022321111112
+>2_129_1216_F3,-188086.1
+T311133330201002321213110111102132001213301332
+>2_129_1458_F3
+T321020320210331102010021110130110011100110021
+>2_129_1488_F3,-1901368.3
+T213303110300121231010300333221230012130120321
+>2_129_1583_F3,1868395.1
+T030300223122300130321210222031130311003223320
+>2_129_1903_F3
+T211302113232212332322122332222312221332132222
+>2_129_1923_F3,-528857.1
+T232300220221311321320121101301311323111020232
+>2_130_207_F3,1819564.1
+T002213200013230301102122110223002220032101131
+>2_130_257_F3,-1938627.1
+T201102210012121213230130022101210012101211222
+>2_130_391_F3,-563678.0
+T210311013031210110333213212203223110011330333
+>2_130_545_F3,-1442661.3
+T220123020223011203302132232221302123101322223
+>2_130_582_F3,-1936688.1
+T011003023212331033231010211002031221012212222
+>2_130_588_F3,-451486.0
+T000220030200203002010032232110322101213200332
+>2_130_636_F3,-708239.0
+T001233231221200200112331302020202020013102223
+>2_130_676_F3,-1887158.3
+T331130013300323310011110303003230000220231121
+>2_130_682_F3
+T221213232132231311213323033001112222302122221
+>2_130_839_F3
+T222123222332020223301303332122122201221112222
+>2_130_876_F3
+T232122100111102210331211131210123302233112210
+>2_130_948_F3
+T320320222020332012330131010022121020330131231
+>2_130_1052_F3,-55452.0
+T121313111332112010030202033231021330010111122
+>2_130_1057_F3
+T322303103132333011202222033110212312200122103
+>2_130_1165_F3
+T133013211201310121111202211112100311111223320
+>2_130_1324_F3
+T003303321020300011203202033000100102200020020
+>2_130_1470_F3,-623112.0
+T110301220101222232323201213220133031002110102
+>2_130_1494_F3,1251987.2
+T330202011011230212201232131033033032133220210
+>2_130_1548_F3,1181829.0
+T030120002200232100032313312213010101100213212
+>2_130_1554_F3,1160522.2
+T332101010113122321202212101231130130201301030
+>2_130_1656_F3,-1143440.2
+T221121032100021010103120333101333013312120222
+>2_130_1946_F3,-869500.0
+T101212311032303102210221122020213002110010112
+>2_130_1966_F3,290801.0
+T201223122100132230120211012113102123103231131
+>2_131_59_F3,1260745.0
+T311332223312013302200133210200113300112022222
+>2_131_80_F3,-602636.2
+T030120021221102020121220111202312331212221222
+>2_131_237_F3
+T001320233002332220123020202022322122222222222
+>2_131_246_F3,-735893.0
+T213312220211020012002112301122032010011011221
+>2_131_262_F3
+T111332100110223133331202110032012321133211132
+>2_131_361_F3
+T311203131122223212132233221320310003310022222
+>2_131_403_F3,-782080.3
+T213211211010210303110212333211201111132212220
+>2_131_860_F3,1075530.0
+T201023211221103210012312300020013131230012122
+>2_131_1315_F3,881280.1
+T122103220020220211013120320232100121030122300
+>2_131_1503_F3,-348991.0
+T302301232202112132032231101022012202222013220
+>2_131_1517_F3,59594.1
+T201210013121020303213021311233210201112010221
+>2_131_1636_F3,1216747.0
+T001201323120303311000322030312210010202101122
+>2_131_1775_F3
+T331112111131133222331221213222312221232212222
+>2_131_1931_F3,75905.3
+T330103212313312101132101103032100231003113211
+>2_131_2041_F3,-151771.0
+T201213233012210322321213222130130002133001210
+>2_132_191_F3,-320777.3
+T001321023223213132220203302322022333323013223
+>2_132_227_F3
+T001333110330223202131022321022120231012322222
+>2_132_241_F3,1755210.3
+T221310312212102012023013333221302222300122312
+>2_132_267_F3,-259835.0,-259427.2
+T132011101110013302213322020202203201022022203
+>2_132_355_F3,444852.2
+T332210323000313233033033302100212330312220222
+>2_132_568_F3,36886.1
+T132332012131321100111321110132101211022113221
+>2_132_735_F3,1715092.3
+T300210220221300103301122300001302111121111231
+>2_132_828_F3
+T320313301331013223333220032001220132311122222
+>2_132_883_F3
+T212123110131000222023310332103222122212012222
+>2_132_943_F3,-1758968.2
+T232102311321011302012233112221032003132122122
+>2_132_984_F3,1423841.3
+T000131001100020202011300031033101132011310220
+>2_132_1181_F3,1489329.0
+T212233012013323012001032321111331112221031322
+>2_132_1235_F3,856634.1
+T311303130332302120033332332030011001302200122
+>2_132_1293_F3
+T202122300000033321002020031232231301200221122
+>2_132_1327_F3
+T002113301120000013013112133211122122232112120
+>2_132_1459_F3,-595341.3
+T222001120220201000010321110100030012100113101
+>2_132_1665_F3
+T310203330333312021333302213101321122110002323
+>2_132_1937_F3,1950217.0
+T211230321212103311102021012021011012022223321
+>2_133_42_F3
+T133322132330200012112323323100310130122011222
+>2_133_93_F3,-441262.0
+T303021101230101223102133213103123123132131221
+>2_133_110_F3,916152.3
+T223123331202100113202330223211233000300132222
+>2_133_196_F3
+T331221231303313211221311101322111223212211222
+>2_133_321_F3
+T312303021230221200203022212102112123002230222
+>2_133_376_F3
+T012311202223022311021102133220312202222212222
+>2_133_415_F3,-1770367.0
+T002220231010232310112021011200030210100233221
+>2_133_494_F3
+T130131302113231221112301012110211112231112210
+>2_133_508_F3,-807447.2
+T130222021320120031002031301212101221012010222
+>2_133_549_F3,-590724.0
+T220132220201021000212033100031011000011022220
+>2_133_688_F3
+T122233312223102330201122233202222212232222302
+>2_133_722_F3
+T103000020320011020220011113303103201000010320
+>2_133_741_F3,160253.1
+T010003212312230233030322331303012211122011100
+>2_133_1017_F3
+T020131200213202311120210012013131100212112121
+>2_133_1122_F3,-1868068.0
+T033023212031032023331000012323021120202322122
+>2_133_1165_F3
+T330020210100002301221202112112132210020223120
+>2_133_1198_F3
+T331202212110320012011222010211221221020111021
+>2_133_1260_F3
+T230302300332301310023310122000220102200012100
+>2_133_1341_F3
+T210001010101020122133022212320200300030000100
+>2_133_1384_F3
+T032112110033123123211201132332110222213111233
+>2_133_1616_F3,-424573.1,-330532.1,-91251.1,-20454.1
+T311023130012122133321111022112132021213003312
+>2_133_1663_F3,1733973.2
+T310213330333312021330322122131321322112002213
+>2_133_1769_F3
+T231112311323333312323221311211122222232221222
+>2_133_1988_F3,-104113.1
+T331100212203330032313201002313220030012210332
+>2_134_114_F3
+T011332200300110221202133120020221333012222222
+>2_134_143_F3,1506979.0
+T202220120112100021112020123102210333222221121
+>2_134_276_F3
+T302332000223100023130222312233023322120022322
+>2_134_311_F3,-376351.1
+T210201220221310312301020201222010221102220222
+>2_134_363_F3
+T312302101122013320031132111303130032230021222
+>2_134_712_F3,-1053177.0
+T310201321012103313200013012222333313032111332
+>2_134_808_F3
+T212113101122131113233323232113210103213021101
+>2_134_908_F3
+T301013010033232311323303132132321222212202222
+>2_134_979_F3,1171256.1
+T000223021000233010030000021030201232011320312
+>2_134_1061_F3
+T302220200322213022031220211100003222200013220
+>2_134_1378_F3
+T100121311021321002312032313313122211202010122
+>2_134_1482_F3,1014711.0
+T010211030210312120101223113032322000131101301
+>2_134_1596_F3
+T110000120220003103021200212020110221132210222
+>2_134_1670_F3,672769.1
+T133002120113321033331032113302010101012110222
+>2_134_1952_F3,1979440.2
+T100300103200020121103000212000113301133013331
+>2_135_66_F3
+T322202021300021231301112023022212222232122222
+>2_135_182_F3
+T033010110222012103022311010120111212312212222
+>2_135_282_F3,936751.0
+T300032121332313032303230330212012331100322222
+>2_135_372_F3,1328276.2
+T012313230322223313212322032320112301132311120
+>2_135_396_F3
+T312323212323223331103203211332111022232122320
+>2_135_602_F3,-1580356.1
+T300301003210023101212211211010103011113010332
+>2_135_651_F3
+T232203123022200332132220223222232122320211222
+>2_135_682_F3,-1392313.1
+T223113231310120100100222313220112103312110230
+>2_135_758_F3,-1588749.0
+T112131231010021320202020123110123011222012220
+>2_135_776_F3,865759.0
+T120003330202202113122222031121022123200000112
+>2_135_798_F3
+T210023132223202210311303122011222232212022222
+>2_135_1058_F3,-1312354.0
+T311230101010112013322121021233011222100020100
+>2_135_1195_F3,-38094.0
+T233211210100121120121121111011200221301111111
+>2_135_1466_F3
+T220202320222203101300231332100131121100221322
+>2_135_1603_F3,1725181.0
+T213020213023332100332213203322120101232020330
+>2_135_1972_F3,129535.0
+T011002111122010032212101303102001100111001310
+>2_136_100_F3,246506.1
+T233210313201023032310111010231201002010110230
+>2_136_108_F3,-1223989.2
+T213112323300132220032000010232333002002022220
+>2_136_166_F3
+T100310101323231332312000333221101111012112221
+>2_136_270_F3,-509485.2
+T322332102231103300023100021222213120313223322
+>2_136_610_F3
+T000222100022321122003122131032322323022011222
+>2_136_678_F3
+T022001022312232122211013030302021311312210203
+>2_136_845_F3
+T213221332331233210213212322110323302322012222
+>2_136_939_F3
+T131022221321333332332300133232332001222222122
+>2_136_1077_F3,846263.2
+T223030220000002300000230032000000000100000100
+>2_136_1131_F3
+T020300311000002102100311201132003100222020122
+>2_136_1135_F3
+T311302201332222023101030212231021110232011222
+>2_136_1151_F3,-786381.0
+T220221121322032200321311312212231001302111103
+>2_136_1205_F3,-233212.1
+T203300123103233002222000120201222100223332222
+>2_136_1267_F3,-40246.3
+T000103100212000012101122232322010303102211312
+>2_136_1363_F3
+T111323310033031210122311333103211232202212330
+>2_136_1368_F3,490173.3
+T022302000223322200320220331013332130122100323
+>2_136_1473_F3
+T020300302033311230133212333223333122222122300
+>2_136_1533_F3,1231296.1
+T223000113012012110231023132133200110000220031
+>2_136_1586_F3,-428863.2
+T332000203000032100132001003132100302210003332
+>2_136_1776_F3
+T331112133113313313222221322222222233212312222
+>2_136_2025_F3
+T332200122120121210211120320002111020010031110
+>2_137_71_F3,1888601.1
+T003302220100132033131301303132100320030222122
+>2_137_80_F3,-917953.2
+T330113222101113101322021020201221133301322222
+>2_137_118_F3,-912708.0
+T022012201110020120232132020102302331211122221
+>2_137_151_F3
+T101011013200212000011033120002112221102212222
+>2_137_171_F3
+T032031202202200210302331012030123122202322222
+>2_137_278_F3,-1573112.3
+T132203231033233033121032212003000331102010223
+>2_137_298_F3
+T013202220201002300013223102210310220010000222
+>2_137_407_F3,-1352982.3
+T302323012022221303320111233112302121122202222
+>2_137_513_F3,1387940.2
+T333131021000331010213110310321301110012111221
+>2_137_535_F3,1717676.0,-1472538.0,-973230.0,-1672170.2,1356192.2
+T030213023011113320112122003203100012022112222
+>2_137_616_F3,-667347.0
+T220222330122030013200012032123221022012103222
+>2_137_628_F3
+T300330203102321101103332013020332221212112222
+>2_137_729_F3,-1704372.0
+T001200133331201220220311131121332233332010220
+>2_137_817_F3
+T310331330211330112331321122022110222212122222
+>2_137_1069_F3,-1765278.0
+T222232012102000311032303012102101323020031111
+>2_137_1098_F3,498028.2
+T233122313200120101202021121033200123313123220
+>2_137_1270_F3
+T203303312012010012031101032012330103212012320
+>2_137_1415_F3,-913585.1
+T312331003010031021321022013201120031103110132
+>2_137_1491_F3,-1069987.3
+T122121022301022202223301010031132221200211332
+>2_137_1548_F3
+T112100010123220233011213211223112001002011122
+>2_137_1606_F3,301958.3
+T212322122003330202020103212302013200000303311
+>2_137_1656_F3,493299.2
+T120101020112220000012311202232320001230012222
+>2_137_1666_F3,-1085177.0
+T222103010220122033331100213000310111120002132
+>2_137_1783_F3
+T311102213023113232232223112212122222212211222
+>2_138_35_F3,-384980.0
+T021021021211213101201222010230202123022003221
+>2_138_66_F3
+T300223133003222231332233023112211222112221222
+>2_138_138_F3,1133969.2
+T202011130002012220032130131300112201132211222
+>2_138_360_F3
+T013312112321313312121220211233210011320211212
+>2_138_411_F3
+T310220221321020002232102022133001132023103220
+>2_138_436_F3
+T320200032212001303320131202222212221012212222
+>2_138_480_F3,357587.1
+T300130003202131320330013120230020200301203322
+>2_138_491_F3
+T001210120331110003122110322102111011112231332
+>2_138_569_F3,1602678.0
+T211022133010132010301231231101321233112222322
+>2_138_597_F3,1668250.1
+T310212010310303020100133202003311320130020222
+>2_138_626_F3
+T103132301102322132103332230222012221302113222
+>2_138_646_F3
+T110120000321211330103310021210211122122210222
+>2_138_766_F3,62383.0
+T031012122110212211013023031200313230033210102
+>2_138_920_F3
+T202100002100220010010201030103330210102011322
+>2_138_935_F3
+T220031300322001232223202032112110032330322100
+>2_138_948_F3
+T033321111110111202112102210131131101112011211
+>2_138_1095_F3,-373238.1
+T003322121020032302111100122301110220121121111
+>2_138_1466_F3
+T222200313302221010102031222220021120222221130
+>2_138_1769_F3
+T323113321123113121131222111222212222232222222
+>2_139_97_F3,-1330898.1
+T202221103210203230312010223032131013320123220
+>2_139_158_F3,1801682.3
+T002013222200033230221220302000122322020220222
+>2_139_193_F3,-411062.1
+T021300221132320000210130000121012233113212102
+>2_139_247_F3,83667.1
+T320313210231030322320323102022030222212222222
+>2_139_364_F3,1717849.1,-1671997.1,-973057.1,-1472365.3
+T012102113022102011131203301000012322120111222
+>2_139_965_F3,-1216865.1
+T232000220310211202303103200213201222333000200
+>2_139_1024_F3
+T132011012130013110123203111030120121312113221
+>2_139_1493_F3
+T121221032301133222303301012022132021223212132
+>2_139_1561_F3
+T322103023003233331211231010212101300022111220
+>2_139_1586_F3
+T321001201012200300120030013023120001201203120
+>2_139_1637_F3,-462668.0
+T223020023001022230110312112220200310202321311
+>2_139_1661_F3,1037315.1
+T212210132103220210302111223333131111201220212
+>2_139_1747_F3
+T211113233122333222332231222222222322222222222
+>2_140_47_F3,134817.3
+T300112201010231110131202013313113023210111122
+>2_140_182_F3
+T322021330130222200110120311123212000012211223
+>2_140_204_F3
+T002100202023233020033333033002302323302323222
+>2_140_210_F3
+T133322220000000022011312012200000111222213322
+>2_140_251_F3,-1247252.1
+T030322132202322000331300001312310312132311232
+>2_140_278_F3,-1981677.1
+T132000001120213121000232222000010321300000233
+>2_140_468_F3,743016.2
+T001122022112010222101201320133203332032120332
+>2_140_656_F3,1807373.1
+T212133130321102021132312312221001102102121222
+>2_140_875_F3
+T221203311023221323223331232102220232222212320
+>2_140_1043_F3,-1504861.3
+T330022232100012020303302100311330210002302100
+>2_140_1058_F3
+T312231131023223210122201221211210222112212233
+>2_140_1143_F3
+T131231130231332132023320312003220032222033222
+>2_140_1264_F3
+T000003332122001201030100300020000201220220322
+>2_140_1268_F3
+T203303102011000020003302322000000200302020320
+>2_140_1320_F3,-195427.0
+T003003323032022231333012320330111000011120020
+>2_140_1356_F3
+T023121102002211230002013011003010010102310122
+>2_140_1420_F3,-839749.1
+T210211032202032130033120001322210011013210322
+>2_140_1453_F3,384965.0
+T313121121201201200312320011103212202133211201
+>2_140_1478_F3,-1320582.0
+T001212103221001332312013122012331200031101101
+>2_140_1504_F3,1237582.0
+T331302011021331212122100102132012330130030300
+>2_140_1535_F3,-1715932.1
+T222233033200202333220202212323330132002220122
+>2_140_1650_F3,-677306.0
+T333013230112101323210203202133110000122112220
+>2_140_1806_F3
+T221222003221200203000320010232120223220222220
+>2_140_1811_F3,-139220.2
+T022313222102000021010332100221200021130102110
+>2_140_1813_F3
+T022323100002000021110132033020010220012102110
+>2_141_59_F3
+T301331201322112302323021120010302111111112211
+>2_141_141_F3
+T201312200233302201112322200322112322000202113
+>2_141_220_F3
+T000120331311331023023001210032022010322213332
+>2_141_391_F3,1239721.0
+T313203022003120102333320211312331110223320222
+>2_141_410_F3
+T213023322333032000012122012210203122002233320
+>2_141_430_F3,102506.1
+T310321101120023102313120021010200313200003301
+>2_141_451_F3
+T120220133212000110020330031220202211033211230
+>2_141_459_F3,1327884.2
+T330220112130022001210102001122021302101211233
+>2_141_530_F3
+T331003021212202000220022303201020111203132102
+>2_141_545_F3,-215576.3
+T010323103203302223002022001332303331020302220
+>2_141_729_F3,1803810.0
+T011120100113200102113011300103313303132010211
+>2_141_748_F3
+T112323122223222212222303033121210302200012100
+>2_141_777_F3
+T331202200212020202321321023101123132322212200
+>2_141_869_F3
+T123033213113003132230210112110120001023112222
+>2_141_976_F3
+T000100020300210113300023332000313131030201331
+>2_141_1005_F3
+T331310113311112332322322331130223312232020232
+>2_141_1009_F3
+T210220011211112030222123131102021232212130200
+>2_141_1110_F3,893659.2
+T012332032303022103010011201032102222101303300
+>2_141_1136_F3
+T030132021311313311001210311121210302132031123
+>2_141_1246_F3
+T132021230013201222321300232000230211212022010
+>2_141_1291_F3
+T330233121032211211323200333220322102222022220
+>2_141_1410_F3,-607445.0
+T033011003110002310332321320210120023013231310
+>2_141_1549_F3,-866383.0
+T132012120331102133122013001203012313100101122
+>2_141_1554_F3,1908928.1
+T211301211220213213101212011232121230211012020
+>2_141_1740_F3
+T201123033022033200233202332230322223232322222
+>2_141_2020_F3
+T322223100002320000222002011002110220202030122
+>2_142_135_F3
+T300310322222003310022312210031120102332213223
+>2_142_214_F3,424707.1,330666.1,91385.1,20588.1
+T032302212011223322222012222330302020122303200
+>2_142_337_F3,-1191166.2
+T303121300032022123111020301231111210012311212
+>2_142_386_F3
+T331022022102223333223301331030333032230323322
+>2_142_600_F3,1403023.0
+T231201031030101112020222220112010012223101202
+>2_142_676_F3
+T231313132313002133222321231120211011322312132
+>2_142_695_F3
+T003113103311021321201301131112313212233111221
+>2_142_699_F3
+T002023222210321321301312101120011320213131222
+>2_142_733_F3
+T000210022021103222233322331300223313201012112
+>2_142_790_F3
+T222112110020333223232122122230212132121021231
+>2_142_979_F3,914430.1
+T333110220011220232310111031211321101331022320
+>2_142_1031_F3
+T020010002101012111020122230000320102211000322
+>2_142_1097_F3
+T031202101120120300101132333130321032300312230
+>2_142_1114_F3
+T002301033312123201220013203030221003201220112
+>2_142_1163_F3
+T133321123201201002101022320332213231121011200
+>2_142_1434_F3,1556197.3
+T330210023301210321022231112021012311231200112
+>2_142_1443_F3
+T002231010211211330023131031103130133130020200
+>2_142_1487_F3,787594.1
+T001022200030033230330111000332010013132210222
+>2_142_1798_F3
+T233113111311333222323231223222212223332222222
+>2_142_1928_F3,1106551.0
+T320120132102100012011212102102202202222022102
+>2_143_34_F3,-532776.1,-532605.1,-532434.1
+T230203033223333313323000201013122022112323223
+>2_143_114_F3,-1137342.0
+T021302030013132210003302022100331300110233222
+>2_143_188_F3
+T003200202032101331313010230120211310110031222
+>2_143_557_F3,-1543999.1
+T331103323122201123133212031020202011112130231
+>2_143_605_F3
+T122010223013022010031122021232221122220112222
+>2_143_644_F3,383069.3
+T330010202301322332222312021321200110122323223
+>2_143_662_F3
+T312201203032200232101321112212311023310012223
+>2_143_760_F3,-706118.0
+T322002223131023020201101301102111303110000102
+>2_143_764_F3,-641755.0
+T231020132132300231032022000123113320331023230
+>2_143_899_F3
+T321323110223003320201233222322321021232222222
+>2_143_1027_F3
+T322220312202010110020211111030311111112110022
+>2_143_1139_F3
+T031032202031103131032330311111211133113033230
+>2_143_1146_F3,1618968.2
+T110001012331020133013100312223102231221013302
+>2_143_1219_F3,437850.1
+T100022023301021301013000112203220221111211200
+>2_143_1286_F3
+T122020220301023310213233333010100010201220122
+>2_143_1350_F3
+T300302022223232010321000031021110322112201030
+>2_143_1363_F3
+T320301321101010010121023023013211132212312220
+>2_143_1382_F3,361830.0
+T000032222230212312312313220012201231321020220
+>2_143_1602_F3
+T110000202113021230022113113032310011302113121
+>2_143_1629_F3
+T330321123231123230021333132002310001302212312
+>2_143_1672_F3
+T023103010032233133012211113002111112010111220
diff --git a/test-data/simple_line.txt b/test-data/simple_line.txt
new file mode 100644
index 0000000..f71ace9
--- /dev/null
+++ b/test-data/simple_line.txt
@@ -0,0 +1 @@
+This is a line of text.
diff --git a/test-data/simple_line_alternative.txt b/test-data/simple_line_alternative.txt
new file mode 100644
index 0000000..6966fea
--- /dev/null
+++ b/test-data/simple_line_alternative.txt
@@ -0,0 +1 @@
+This is a different line of text.
\ No newline at end of file
diff --git a/test-data/simple_line_x2.txt b/test-data/simple_line_x2.txt
new file mode 100644
index 0000000..0fbb0d8
--- /dev/null
+++ b/test-data/simple_line_x2.txt
@@ -0,0 +1,2 @@
+This is a line of text.
+This is a line of text.
diff --git a/test-data/simple_line_x3.txt b/test-data/simple_line_x3.txt
new file mode 100644
index 0000000..348d985
--- /dev/null
+++ b/test-data/simple_line_x3.txt
@@ -0,0 +1,3 @@
+This is a line of text.
+This is a line of text.
+This is a line of text.
diff --git a/test-data/simple_line_x5.txt b/test-data/simple_line_x5.txt
new file mode 100644
index 0000000..bdd333b
--- /dev/null
+++ b/test-data/simple_line_x5.txt
@@ -0,0 +1,5 @@
+This is a line of text.
+This is a line of text.
+This is a line of text.
+This is a line of text.
+This is a line of text.
diff --git a/test-data/simple_lines_both.txt b/test-data/simple_lines_both.txt
new file mode 100644
index 0000000..2c3f9c9
--- /dev/null
+++ b/test-data/simple_lines_both.txt
@@ -0,0 +1,2 @@
+This is a line of text.
+This is a different line of text.
\ No newline at end of file
diff --git a/test-data/simple_lines_interleaved.txt b/test-data/simple_lines_interleaved.txt
new file mode 100644
index 0000000..a1ec72a
--- /dev/null
+++ b/test-data/simple_lines_interleaved.txt
@@ -0,0 +1,4 @@
+This is a line of text.
+This is a different line of text.
+This is a line of text.
+This is a different line of text.
\ No newline at end of file
diff --git a/test-data/tinywga.bed b/test-data/tinywga.bed
new file mode 100644
index 0000000..e8b3867
--- /dev/null
+++ b/test-data/tinywga.bed
@@ -0,0 +1 @@
+l�����+�󪿊�������������������ﮨ�"��������������������"������:*�����.��:*�����.����������������������:*����,��:*����,�������+���������������������������+�󪿺�����������������������������m��������� ��������(:��������(:��������(:��������(
\ No newline at end of file
diff --git a/test-data/tinywga.bim b/test-data/tinywga.bim
new file mode 100644
index 0000000..3a2bf2c
--- /dev/null
+++ b/test-data/tinywga.bim
@@ -0,0 +1,25 @@
+22	rs2283802	0	21784722	4	2
+22	rs2267000	0	21785366	4	2
+22	rs16997606	0	21794754	1	3
+22	rs4820537	0	21794810	1	3
+22	rs3788347	0	21797804	3	1
+22	rs756632	0	21799918	4	2
+22	rs4820539	0	21807970	1	3
+22	rs2283804	0	21820335	1	2
+22	rs2267006	0	21820990	3	1
+22	rs4822363	0	21821000	4	2
+22	rs5751592	0	21827674	4	2
+22	rs5759608	0	21832708	2	4
+22	rs5759612	0	21833170	3	1
+22	rs2267009	0	21860168	3	4
+22	rs2267010	0	21864366	3	1
+22	rs5759636	0	21868698	4	2
+22	rs2071436	0	21871488	4	2
+22	rs2267013	0	21875879	3	1
+22	rs6003566	0	21889806	3	1
+22	rs2256725	0	21892891	2	1
+22	rs12160770	0	21892925	1	3
+22	rs5751611	0	21896019	2	4
+22	rs762601	0	21898858	1	3
+22	rs2156921	0	21899063	3	1
+22	rs4822375	0	21905642	1	3
diff --git a/test-data/tinywga.fam b/test-data/tinywga.fam
new file mode 100644
index 0000000..dc70cff
--- /dev/null
+++ b/test-data/tinywga.fam
@@ -0,0 +1,40 @@
+101 1 3 2 2 2
+101 2 0 0 2 1
+101 3 0 0 1 1
+105 1 3 2 2 2
+105 2 0 0 2 1
+105 3 0 0 1 1
+112 1 3 2 1 2
+112 2 0 0 2 1
+112 3 0 0 1 1
+117 1 3 2 2 2
+117 2 0 0 2 1
+117 3 0 0 1 1
+12 1 3 2 1 2
+12 2 0 0 2 1
+12 3 0 0 1 1
+13 1 3 2 1 2
+13 2 0 0 2 1
+13 3 0 0 1 1
+1334 1 10 11 1 2
+1334 10 0 0 1 1
+1334 11 0 0 2 1
+1334 12 0 0 1 1
+1334 13 0 0 2 1
+1334 2 12 13 2 2
+1340 1 9 10 1 2
+1340 10 0 0 2 1
+1340 11 0 0 1 1
+1340 12 0 0 2 1
+1340 2 11 12 2 2
+1340 9 0 0 1 1
+1341 1 11 12 1 1
+1341 11 0 0 1 1
+1341 12 0 0 2 1
+1341 13 0 0 1 1
+1341 14 0 0 2 1
+1341 2 13 14 2 1
+1344 1 12 13 1 1
+1344 12 0 0 1 1
+1344 13 0 0 2 1
+1345 12 0 0 1 1
diff --git a/test-data/tinywga.map b/test-data/tinywga.map
new file mode 100644
index 0000000..cf0e641
--- /dev/null
+++ b/test-data/tinywga.map
@@ -0,0 +1,25 @@
+22	rs2283802	0	21784722
+22	rs2267000	0	21785366
+22	rs16997606	0	21794754
+22	rs4820537	0	21794810
+22	rs3788347	0	21797804
+22	rs756632	0	21799918
+22	rs4820539	0	21807970
+22	rs2283804	0	21820335
+22	rs2267006	0	21820990
+22	rs4822363	0	21821000
+22	rs5751592	0	21827674
+22	rs5759608	0	21832708
+22	rs5759612	0	21833170
+22	rs2267009	0	21860168
+22	rs2267010	0	21864366
+22	rs5759636	0	21868698
+22	rs2071436	0	21871488
+22	rs2267013	0	21875879
+22	rs6003566	0	21889806
+22	rs2256725	0	21892891
+22	rs12160770	0	21892925
+22	rs5751611	0	21896019
+22	rs762601	0	21898858
+22	rs2156921	0	21899063
+22	rs4822375	0	21905642
diff --git a/test-data/tinywga.ped b/test-data/tinywga.ped
new file mode 100644
index 0000000..1092f70
--- /dev/null
+++ b/test-data/tinywga.ped
@@ -0,0 +1,40 @@
+101 1 3 2 2 2 2 2 4 2 1 3 3 3 3 3 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 3 1 3 1 2 1 0 0 2 2 1 3 3 1 1 3
+101 2 0 0 2 1 2 2 4 2 1 3 3 3 3 3 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 3 1 3 1 2 1 3 3 2 2 1 3 3 1 1 3
+101 3 0 0 1 1 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 1 3 2 4 3 3 1 1 3 3
+105 1 3 2 2 2 2 2 4 2 3 3 3 3 3 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 3 1 1 1 2 1 0 0 2 2 1 1 3 3 1 1
+105 2 0 0 2 1 2 2 4 4 3 3 3 3 3 1 2 2 1 3 1 2 3 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+105 3 0 0 1 1 4 2 2 2 3 3 3 3 3 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+112 1 3 2 1 2 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+112 2 0 0 2 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+112 3 0 0 1 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 2 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+117 1 3 2 2 2 2 2 4 2 3 3 3 3 3 3 4 2 1 1 2 2 1 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+117 2 0 0 2 1 2 2 4 4 1 3 3 3 3 3 2 2 1 3 2 2 1 1 2 2 4 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+117 3 0 0 1 1 2 2 4 2 3 3 3 3 3 3 4 2 1 1 2 2 1 1 2 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+12 1 3 2 1 2 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+12 2 0 0 2 1 2 2 4 4 1 3 3 3 3 3 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+12 3 0 0 1 1 2 2 4 2 1 3 1 3 3 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+13 1 3 2 1 2 4 2 4 2 1 3 3 3 3 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 4 1 1 2 2 4 2 1 1 3 1 1 1 3 3 2 4 3 3 1 1 3 3
+13 2 0 0 2 1 4 2 2 2 3 3 1 3 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 0 0 0 0 3 3 1 1 3 3
+13 3 0 0 1 1 2 2 4 4 1 3 3 3 3 3 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 4 4 3 1 2 2 2 2 1 1 3 1 1 1 3 3 2 2 3 3 1 1 3 3
+1334 1 10 11 1 2 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1334 10 0 0 1 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1334 11 0 0 2 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1334 12 0 0 1 1 4 2 2 2 3 3 1 3 1 1 2 2 3 3 2 2 1 1 2 2 4 2 4 4 1 1 3 4 1 1 2 2 4 2 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1334 13 0 0 2 1 4 2 4 2 3 3 1 3 3 1 4 2 1 3 2 2 1 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1334 2 12 13 2 2 4 4 2 2 3 3 1 3 1 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 3 1 1 2 2 4 4 3 1 1 1 2 1 3 3 2 4 1 3 3 1 1 3
+1340 1 9 10 1 2 4 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 3 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1340 10 0 0 2 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 3 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1340 11 0 0 1 1 4 2 4 2 3 3 1 3 3 1 4 2 1 3 2 2 1 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 12 0 0 2 1 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 4 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 2 11 12 2 2 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1340 9 0 0 1 1 4 4 2 2 3 3 1 1 1 1 2 2 3 3 2 2 1 1 2 2 2 2 4 4 1 1 3 3 1 1 2 2 4 4 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 1 11 12 1 1 2 2 4 2 3 3 1 3 3 1 2 2 1 3 1 2 3 1 4 2 4 2 2 4 3 1 4 4 1 1 0 0 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+1341 11 0 0 1 1 2 2 2 2 3 3 1 1 1 1 2 2 3 3 1 1 3 3 2 2 2 2 2 2 3 3 4 4 1 1 4 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1341 12 0 0 2 1 4 2 4 2 3 3 1 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 4 2 4 2 3 1 1 1 2 1 3 3 2 2 1 1 3 3 1 1
+1341 13 0 0 1 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 14 0 0 2 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1341 2 13 14 2 1 4 2 2 2 3 3 1 1 1 1 2 2 3 3 1 2 3 1 2 2 2 2 2 4 3 1 3 4 1 1 2 2 4 2 1 1 1 1 1 1 3 3 4 4 3 3 1 1 3 3
+1344 1 12 13 1 1 2 2 4 4 3 3 3 3 3 3 4 4 1 1 2 2 1 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 2 1 1 3 3 1 1
+1344 12 0 0 1 1 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1344 13 0 0 2 1 2 2 4 2 3 3 1 3 3 1 4 2 1 3 1 2 3 1 2 2 2 2 2 2 3 3 4 4 1 1 2 2 2 2 1 1 1 1 1 1 3 3 2 4 1 3 3 1 1 3
+1345 12 0 0 1 1 4 2 4 2 3 3 3 3 3 1 2 2 1 3 2 2 1 1 4 2 4 2 4 4 1 1 3 4 1 1 4 2 4 2 3 1 1 1 2 1 3 3 2 2 1 1 3 3 1 1
diff --git a/test-data/tinywga.ped.space_to_tab b/test-data/tinywga.ped.space_to_tab
new file mode 100644
index 0000000..e6544b4
--- /dev/null
+++ b/test-data/tinywga.ped.space_to_tab
@@ -0,0 +1,40 @@
+101	1	3	2	2	2	2	2	4	2	1	3	3	3	3	3	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	4	4	1	1	2	2	2	2	3	1	3	1	2	1	0	0	2	2	1	3	3	1	1	3
+101	2	0	0	2	1	2	2	4	2	1	3	3	3	3	3	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	4	4	1	1	2	2	2	2	3	1	3	1	2	1	3	3	2	2	1	3	3	1	1	3
+101	3	0	0	1	1	2	2	4	4	1	3	3	3	3	3	2	2	3	3	2	2	1	1	2	2	2	2	4	4	1	1	4	4	1	1	2	2	2	2	1	1	3	1	1	1	1	3	2	4	3	3	1	1	3	3
+105	1	3	2	2	2	2	2	4	2	3	3	3	3	3	1	2	2	3	3	1	1	3	3	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	3	1	1	1	2	1	0	0	2	2	1	1	3	3	1	1
+105	2	0	0	2	1	2	2	4	4	3	3	3	3	3	1	2	2	1	3	1	2	3	1	2	2	4	2	2	4	3	1	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+105	3	0	0	1	1	4	2	2	2	3	3	3	3	3	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	3	1	1	1	2	1	3	3	2	4	1	3	3	1	1	3
+112	1	3	2	1	2	4	2	2	2	3	3	1	1	1	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+112	2	0	0	2	1	2	2	2	2	3	3	1	1	1	1	2	2	3	3	1	1	3	3	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+112	3	0	0	1	1	4	2	4	2	3	3	1	3	3	1	2	2	1	3	2	2	1	1	2	2	4	2	4	4	1	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+117	1	3	2	2	2	2	2	4	2	3	3	3	3	3	3	4	2	1	1	2	2	1	1	2	2	4	2	2	4	3	1	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	2	1	1	3	3	1	1
+117	2	0	0	2	1	2	2	4	4	1	3	3	3	3	3	2	2	1	3	2	2	1	1	2	2	4	2	4	4	1	1	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+117	3	0	0	1	1	2	2	4	2	3	3	3	3	3	3	4	2	1	1	2	2	1	1	2	2	4	2	2	4	3	1	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+12	1	3	2	1	2	2	2	4	4	1	3	3	3	3	3	2	2	3	3	2	2	1	1	2	2	2	2	4	4	1	1	4	4	1	1	2	2	2	2	1	1	3	1	1	1	3	3	2	4	3	3	1	1	3	3
+12	2	0	0	2	1	2	2	4	4	1	3	3	3	3	3	2	2	3	3	2	2	1	1	2	2	2	2	4	4	1	1	4	4	1	1	2	2	2	2	1	1	3	1	1	1	3	3	2	4	3	3	1	1	3	3
+12	3	0	0	1	1	2	2	4	2	1	3	1	3	3	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	4	4	1	1	2	2	2	2	1	1	3	1	1	1	3	3	2	4	3	3	1	1	3	3
+13	1	3	2	1	2	4	2	4	2	1	3	3	3	3	1	2	2	3	3	2	2	1	1	2	2	2	2	4	4	1	1	3	4	1	1	2	2	4	2	1	1	3	1	1	1	3	3	2	4	3	3	1	1	3	3
+13	2	0	0	2	1	4	2	2	2	3	3	1	3	1	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	0	0	0	0	3	3	1	1	3	3
+13	3	0	0	1	1	2	2	4	4	1	3	3	3	3	3	2	2	1	3	1	2	3	1	2	2	2	2	2	4	3	1	4	4	3	1	2	2	2	2	1	1	3	1	1	1	3	3	2	2	3	3	1	1	3	3
+1334	1	10	11	1	2	2	2	4	2	3	3	1	3	3	1	2	2	1	3	1	2	3	1	4	2	4	2	2	4	3	1	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1334	10	0	0	1	1	4	2	4	2	3	3	1	3	3	1	2	2	1	3	2	2	1	1	4	2	4	2	4	4	1	1	3	4	1	1	2	2	4	2	3	1	1	1	2	1	3	3	2	4	1	3	3	1	1	3
+1334	11	0	0	2	1	2	2	2	2	3	3	1	1	1	1	2	2	3	3	1	1	3	3	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1334	12	0	0	1	1	4	2	2	2	3	3	1	3	1	1	2	2	3	3	2	2	1	1	2	2	4	2	4	4	1	1	3	4	1	1	2	2	4	2	3	1	1	1	2	1	3	3	2	4	1	3	3	1	1	3
+1334	13	0	0	2	1	4	2	4	2	3	3	1	3	3	1	4	2	1	3	2	2	1	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1334	2	12	13	2	2	4	4	2	2	3	3	1	3	1	1	2	2	3	3	2	2	1	1	2	2	2	2	4	4	1	1	3	3	1	1	2	2	4	4	3	1	1	1	2	1	3	3	2	4	1	3	3	1	1	3
+1340	1	9	10	1	2	4	2	4	2	3	3	1	3	3	1	2	2	1	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	3	1	2	2	4	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1340	10	0	0	2	1	4	2	4	2	3	3	1	3	3	1	2	2	1	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	3	1	2	2	4	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1340	11	0	0	1	1	4	2	4	2	3	3	1	3	3	1	4	2	1	3	2	2	1	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1340	12	0	0	2	1	2	2	4	2	3	3	1	3	3	1	2	2	1	3	1	2	3	1	4	2	4	2	2	4	3	1	4	4	1	1	4	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1340	2	11	12	2	2	2	2	4	2	3	3	1	3	3	1	4	2	1	3	1	2	3	1	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1340	9	0	0	1	1	4	4	2	2	3	3	1	1	1	1	2	2	3	3	2	2	1	1	2	2	2	2	4	4	1	1	3	3	1	1	2	2	4	4	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1341	1	11	12	1	1	2	2	4	2	3	3	1	3	3	1	2	2	1	3	1	2	3	1	4	2	4	2	2	4	3	1	4	4	1	1	0	0	2	2	1	1	1	1	1	1	3	3	2	2	1	1	3	3	1	1
+1341	11	0	0	1	1	2	2	2	2	3	3	1	1	1	1	2	2	3	3	1	1	3	3	2	2	2	2	2	2	3	3	4	4	1	1	4	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1341	12	0	0	2	1	4	2	4	2	3	3	1	3	3	1	2	2	1	3	2	2	1	1	4	2	4	2	4	4	1	1	3	4	1	1	4	2	4	2	3	1	1	1	2	1	3	3	2	2	1	1	3	3	1	1
+1341	13	0	0	1	1	4	2	2	2	3	3	1	1	1	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1341	14	0	0	2	1	4	2	2	2	3	3	1	1	1	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1341	2	13	14	2	1	4	2	2	2	3	3	1	1	1	1	2	2	3	3	1	2	3	1	2	2	2	2	2	4	3	1	3	4	1	1	2	2	4	2	1	1	1	1	1	1	3	3	4	4	3	3	1	1	3	3
+1344	1	12	13	1	1	2	2	4	4	3	3	3	3	3	3	4	4	1	1	2	2	1	1	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	2	1	1	3	3	1	1
+1344	12	0	0	1	1	2	2	4	2	3	3	1	3	3	1	4	2	1	3	1	2	3	1	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1344	13	0	0	2	1	2	2	4	2	3	3	1	3	3	1	4	2	1	3	1	2	3	1	2	2	2	2	2	2	3	3	4	4	1	1	2	2	2	2	1	1	1	1	1	1	3	3	2	4	1	3	3	1	1	3
+1345	12	0	0	1	1	4	2	4	2	3	3	3	3	3	1	2	2	1	3	2	2	1	1	4	2	4	2	4	4	1	1	3	4	1	1	4	2	4	2	3	1	1	1	2	1	3	3	2	2	1	1	3	3	1	1
diff --git a/test-data/users/test1 at bx.psu.edu/1.fasta b/test-data/users/test1 at bx.psu.edu/1.fasta
new file mode 100644
index 0000000..c9ecbb6
--- /dev/null
+++ b/test-data/users/test1 at bx.psu.edu/1.fasta
@@ -0,0 +1,2 @@
+>hg17
+gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAAAATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGGGAGATATTTGGggaaatt---ttgtatagactagctttca [...]
diff --git a/test-data/users/test3 at bx.psu.edu/run1/2.fasta b/test-data/users/test3 at bx.psu.edu/run1/2.fasta
new file mode 100644
index 0000000..c2a830c
--- /dev/null
+++ b/test-data/users/test3 at bx.psu.edu/run1/2.fasta
@@ -0,0 +1,11 @@
+>Sequence 561 BP; 135 A; 106 C; 98 G; 222 T; 0 other;
+gttcgatgcc taaaatacct tcttttgtcc ctacacagac cacagttttc ctaatggctt
+tacaccgact agaaattctt gtgcaagcac taattgaaag cggttggcct agagtgttac
+cggtttgtat agctgagcgc gtctcttgcc ctgatcaaag gttcattttc tctactttgg
+aagacgttgt ggaagaatac aacaagtacg agtctctccc ccctggtttg ctgattactg
+gatacagttg taataccctt cgcaacaccg cgtaactatc tatatgaatt attttccctt
+tattatatgt agtaggttcg tctttaatct tcctttagca agtcttttac tgttttcgac
+ctcaatgttc atgttcttag gttgttttgg ataatatgcg gtcagtttaa tcttcgttgt
+ttcttcttaa aatatttatt catggtttaa tttttggttt gtacttgttc aggggccagt
+tcattattta ctctgtttgt atacagcagt tcttttattt ttagtatgat tttaatttaa
+aacaattcta atggtcaaaa a
diff --git a/test/TESTING.md b/test/TESTING.md
new file mode 100644
index 0000000..8bab918
--- /dev/null
+++ b/test/TESTING.md
@@ -0,0 +1,7 @@
+Galaxy Testing
+==============
+
+The Galaxy code base is large and contains many kinds of tests. Please
+consult the [Galaxy
+wiki](http://wiki.galaxyproject.org/Admin/Running%20Tests) for more
+information on testing Galaxy.
diff --git a/test/api/__init__.py b/test/api/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/api/helpers.py b/test/api/helpers.py
new file mode 100644
index 0000000..a4276a0
--- /dev/null
+++ b/test/api/helpers.py
@@ -0,0 +1,489 @@
+import json
+import time
+from operator import itemgetter
+
+from pkg_resources import resource_string
+from six import StringIO
+
+from base import api_asserts
+
+# Simple workflow that takes an input and call cat wrapper on it.
+workflow_str = resource_string( __name__, "test_workflow_1.ga" )
+# Simple workflow that takes an input and filters with random lines twice in a
+# row - first grabbing 8 lines at random and then 6.
+workflow_random_x2_str = resource_string( __name__, "test_workflow_2.ga" )
+
+
+DEFAULT_TIMEOUT = 15  # Secs to wait for state to turn ok
+
+
+def skip_without_tool( tool_id ):
+    """ Decorate an API test method as requiring a specific tool,
+    have nose skip the test case is the tool is unavailable.
+    """
+
+    def method_wrapper( method ):
+
+        def get_tool_ids( api_test_case ):
+            index = api_test_case.galaxy_interactor.get( "tools", data=dict(in_panel=False) )
+            tools = index.json()
+            # In panels by default, so flatten out sections...
+            tool_ids = [itemgetter( "id" )(_) for _ in tools]
+            return tool_ids
+
+        def wrapped_method( api_test_case, *args, **kwargs ):
+            if tool_id not in get_tool_ids( api_test_case ):
+                from nose.plugins.skip import SkipTest
+                raise SkipTest( )
+
+            return method( api_test_case, *args, **kwargs )
+
+        # Must preserve method name so nose can detect and report tests by
+        # name.
+        wrapped_method.__name__ = method.__name__
+        return wrapped_method
+
+    return method_wrapper
+
+
+# Deprecated mixin, use dataset populator instead.
+# TODO: Rework existing tests to target DatasetPopulator in a setup method instead.
+class TestsDatasets:
+
+    def _new_dataset( self, history_id, content='TestData123', **kwds ):
+        return DatasetPopulator( self.galaxy_interactor ).new_dataset( history_id, content=content, **kwds)
+
+    def _wait_for_history( self, history_id, assert_ok=False ):
+        return DatasetPopulator( self.galaxy_interactor ).wait_for_history( history_id, assert_ok=assert_ok )
+
+    def _new_history( self, **kwds ):
+        return DatasetPopulator( self.galaxy_interactor ).new_history( **kwds )
+
+    def _upload_payload( self, history_id, content, **kwds ):
+        return DatasetPopulator( self.galaxy_interactor ).upload_payload( history_id, content, **kwds )
+
+    def _run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
+        return DatasetPopulator( self.galaxy_interactor ).run_tool_payload( tool_id, inputs, history_id, **kwds )
+
+
+class BaseDatasetPopulator( object ):
+    """ Abstract description of API operations optimized for testing
+    Galaxy - implementations must implement _get and _post.
+    """
+
+    def new_dataset( self, history_id, content='TestData123', wait=False, **kwds ):
+        payload = self.upload_payload( history_id, content, **kwds )
+        run_response = self._post( "tools", data=payload ).json()
+        if wait:
+            job = run_response["jobs"][0]
+            self.wait_for_job(job["id"])
+            self.wait_for_history(history_id, assert_ok=True)
+        return run_response["outputs"][0]
+
+    def wait_for_history( self, history_id, assert_ok=False, timeout=DEFAULT_TIMEOUT ):
+        try:
+            return wait_on_state( lambda: self._get( "histories/%s" % history_id ), assert_ok=assert_ok, timeout=timeout )
+        except AssertionError:
+            self._summarize_history_errors( history_id )
+            raise
+
+    def wait_for_job( self, job_id, assert_ok=False, timeout=DEFAULT_TIMEOUT ):
+        return wait_on_state( lambda: self.get_job_details( job_id ), assert_ok=assert_ok, timeout=timeout )
+
+    def get_job_details( self, job_id, full=False ):
+        return self._get( "jobs/%s?full=%s" % (job_id, full) )
+
+    def _summarize_history_errors( self, history_id ):
+        pass
+
+    def new_history( self, **kwds ):
+        name = kwds.get( "name", "API Test History" )
+        create_history_response = self._post( "histories", data=dict( name=name ) )
+        history_id = create_history_response.json()[ "id" ]
+        return history_id
+
+    def upload_payload( self, history_id, content, **kwds ):
+        name = kwds.get( "name", "Test Dataset" )
+        dbkey = kwds.get( "dbkey", "?" )
+        file_type = kwds.get( "file_type", 'txt' )
+        upload_params = {
+            'files_0|NAME': name,
+            'dbkey': dbkey,
+            'file_type': file_type,
+        }
+        if hasattr(content, 'read'):
+            upload_params[ "files_0|file_data"] = content
+        else:
+            upload_params[ 'files_0|url_paste' ] = content
+
+        if "to_posix_lines" in kwds:
+            upload_params[ "files_0|to_posix_lines"] = kwds[ "to_posix_lines" ]
+        if "space_to_tab" in kwds:
+            upload_params[ "files_0|space_to_tab" ] = kwds[ "space_to_tab" ]
+        return self.run_tool_payload(
+            tool_id='upload1',
+            inputs=upload_params,
+            history_id=history_id,
+            upload_type='upload_dataset'
+        )
+
+    def run_tool_payload( self, tool_id, inputs, history_id, **kwds ):
+        if "files_0|file_data" in inputs:
+            kwds[ "__files" ] = { "files_0|file_data": inputs[ "files_0|file_data" ] }
+            del inputs[ "files_0|file_data" ]
+
+        return dict(
+            tool_id=tool_id,
+            inputs=json.dumps(inputs),
+            history_id=history_id,
+            **kwds
+        )
+
+    def run_tool( self, tool_id, inputs, history_id, **kwds ):
+        payload = self.run_tool_payload( tool_id, inputs, history_id, **kwds )
+        tool_response = self._post( "tools", data=payload )
+        api_asserts.assert_status_code_is( tool_response, 200 )
+        return tool_response.json()
+
+    def get_history_dataset_content( self, history_id, wait=True, **kwds ):
+        dataset_id = self.__history_content_id( history_id, wait=wait, **kwds )
+        display_response = self.__get_contents_request( history_id, "/%s/display" % dataset_id )
+        assert display_response.status_code == 200, display_response.content
+        return display_response.content
+
+    def get_history_dataset_details( self, history_id, **kwds ):
+        dataset_id = self.__history_content_id( history_id, **kwds )
+        details_response = self.__get_contents_request( history_id, "/datasets/%s" % dataset_id )
+        assert details_response.status_code == 200
+        return details_response.json()
+
+    def get_history_collection_details( self, history_id, **kwds ):
+        hdca_id = self.__history_content_id( history_id, **kwds )
+        details_response = self.__get_contents_request( history_id, "/dataset_collections/%s" % hdca_id )
+        assert details_response.status_code == 200, details_response.content
+        return details_response.json()
+
+    def __history_content_id( self, history_id, wait=True, **kwds ):
+        if wait:
+            assert_ok = kwds.get( "assert_ok", True )
+            self.wait_for_history( history_id, assert_ok=assert_ok )
+        # kwds should contain a 'dataset' object response, a 'dataset_id' or
+        # the last dataset in the history will be fetched.
+        if "dataset_id" in kwds:
+            dataset_id = kwds[ "dataset_id" ]
+        elif "dataset" in kwds:
+            dataset_id = kwds[ "dataset" ][ "id" ]
+        else:
+            hid = kwds.get( "hid", None )  # If not hid, just grab last dataset
+            if hid:
+                index = hid - 1
+            else:
+                # No hid specified - just grab most recent element.
+                index = -1
+            dataset_contents = self.__get_contents_request( history_id ).json()
+            dataset_id = dataset_contents[ index ][ "id" ]
+        return dataset_id
+
+    def __get_contents_request( self, history_id, suffix=""):
+        url = "histories/%s/contents" % history_id
+        if suffix:
+            url = "%s%s" % ( url, suffix )
+        return self._get( url )
+
+
+class DatasetPopulator( BaseDatasetPopulator ):
+
+    def __init__( self, galaxy_interactor ):
+        self.galaxy_interactor = galaxy_interactor
+
+    def _post( self, route, data={}, files=None ):
+        files = data.get( "__files", None )
+        if files is not None:
+            del data[ "__files" ]
+
+        return self.galaxy_interactor.post( route, data, files=files )
+
+    def _get( self, route ):
+        return self.galaxy_interactor.get( route )
+
+    def _summarize_history_errors( self, history_id ):
+        self.galaxy_interactor._summarize_history_errors( history_id )
+
+
+class BaseWorkflowPopulator( object ):
+
+    def load_workflow( self, name, content=workflow_str, add_pja=False ):
+        workflow = json.loads( content )
+        workflow[ "name" ] = name
+        if add_pja:
+            tool_step = workflow[ "steps" ][ "2" ]
+            tool_step[ "post_job_actions" ][ "RenameDatasetActionout_file1" ] = dict(
+                action_type="RenameDatasetAction",
+                output_name="out_file1",
+                action_arguments=dict( newname="foo ${replaceme}" ),
+            )
+        return workflow
+
+    def load_random_x2_workflow( self, name ):
+        return self.load_workflow( name, content=workflow_random_x2_str )
+
+    def load_workflow_from_resource( self, name, filename=None ):
+        if filename is None:
+            filename = "%s.ga" % name
+        content = resource_string( __name__, filename )
+        return self.load_workflow( name, content=content )
+
+    def simple_workflow( self, name, **create_kwds ):
+        workflow = self.load_workflow( name )
+        return self.create_workflow( workflow, **create_kwds )
+
+    def create_workflow( self, workflow, **create_kwds ):
+        upload_response = self.create_workflow_response( workflow, **create_kwds )
+        uploaded_workflow_id = upload_response.json()[ "id" ]
+        return uploaded_workflow_id
+
+    def create_workflow_response( self, workflow, **create_kwds ):
+        data = dict(
+            workflow=json.dumps( workflow ),
+            **create_kwds
+        )
+        upload_response = self._post( "workflows/upload", data=data )
+        return upload_response
+
+    def wait_for_invocation( self, workflow_id, invocation_id, timeout=DEFAULT_TIMEOUT ):
+        url = "workflows/%s/usage/%s" % ( workflow_id, invocation_id )
+        return wait_on_state( lambda: self._get( url ), timeout=timeout  )
+
+    def wait_for_workflow( self, workflow_id, invocation_id, history_id, assert_ok=True, timeout=DEFAULT_TIMEOUT ):
+        """ Wait for a workflow invocation to completely schedule and then history
+        to be complete. """
+        self.wait_for_invocation( workflow_id, invocation_id, timeout=timeout )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=assert_ok, timeout=timeout )
+
+
+class WorkflowPopulator( BaseWorkflowPopulator ):
+
+    def __init__( self, galaxy_interactor ):
+        self.galaxy_interactor = galaxy_interactor
+        self.dataset_populator = DatasetPopulator( galaxy_interactor )
+
+    def _post( self, route, data={} ):
+        return self.galaxy_interactor.post( route, data )
+
+    def _get( self, route ):
+        return self.galaxy_interactor.get( route )
+
+
+class LibraryPopulator( object ):
+
+    def __init__( self, api_test_case ):
+        self.api_test_case = api_test_case
+        self.galaxy_interactor = api_test_case.galaxy_interactor
+
+    def new_private_library( self, name ):
+        library = self.new_library( name )
+        library_id = library[ "id" ]
+
+        role_id = self.user_private_role_id()
+        self.set_permissions( library_id, role_id )
+        return library
+
+    def new_library( self, name ):
+        data = dict( name=name )
+        create_response = self.galaxy_interactor.post( "libraries", data=data, admin=True )
+        return create_response.json()
+
+    def set_permissions( self, library_id, role_id=None ):
+        if role_id:
+            perm_list = json.dumps( role_id )
+        else:
+            perm_list = json.dumps( [] )
+
+        permissions = dict(
+            LIBRARY_ACCESS_in=perm_list,
+            LIBRARY_MODIFY_in=perm_list,
+            LIBRARY_ADD_in=perm_list,
+            LIBRARY_MANAGE_in=perm_list,
+        )
+        self.galaxy_interactor.post( "libraries/%s/permissions" % library_id, data=permissions, admin=True )
+
+    def user_email( self ):
+        users_response = self.galaxy_interactor.get( "users" )
+        users = users_response.json()
+        assert len( users ) == 1
+        return users[ 0 ][ "email" ]
+
+    def user_private_role_id( self ):
+        user_email = self.user_email()
+        roles_response = self.api_test_case.galaxy_interactor.get( "roles", admin=True )
+        users_roles = [ r for r in roles_response.json() if r[ "name" ] == user_email ]
+        assert len( users_roles ) == 1
+        return users_roles[ 0 ][ "id" ]
+
+    def create_dataset_request( self, library, **kwds ):
+        create_data = {
+            "folder_id": kwds.get( "folder_id", library[ "root_folder_id" ] ),
+            "create_type": "file",
+            "files_0|NAME": kwds.get( "name", "NewFile" ),
+            "upload_option": kwds.get( "upload_option", "upload_file" ),
+            "file_type": kwds.get( "file_type", "auto" ),
+            "db_key": kwds.get( "db_key", "?" ),
+        }
+        files = {
+            "files_0|file_data": kwds.get( "file", StringIO( kwds.get( "contents", "TestData" ) ) ),
+        }
+        return create_data, files
+
+    def new_library_dataset( self, name, **create_dataset_kwds ):
+        library = self.new_private_library( name )
+        payload, files = self.create_dataset_request( library, **create_dataset_kwds )
+        url_rel = "libraries/%s/contents" % ( library[ "id" ] )
+        dataset = self.api_test_case.galaxy_interactor.post( url_rel, payload, files=files ).json()[0]
+
+        def show():
+            return self.api_test_case.galaxy_interactor.get( "libraries/%s/contents/%s" % ( library[ "id" ], dataset[ "id" ] ) )
+
+        wait_on_state(show, timeout=DEFAULT_TIMEOUT)
+        return show().json()
+
+
+class BaseDatasetCollectionPopulator( object ):
+
+    def create_list_from_pairs( self, history_id, pairs ):
+        element_identifiers = []
+        for i, pair in enumerate( pairs ):
+            element_identifiers.append( dict(
+                name="test%d" % i,
+                src="hdca",
+                id=pair
+            ) )
+
+        payload = dict(
+            instance_type="history",
+            history_id=history_id,
+            element_identifiers=json.dumps(element_identifiers),
+            collection_type="list:paired",
+        )
+        return self.__create( payload )
+
+    def create_list_of_pairs_in_history( self, history_id, **kwds ):
+        pair1 = self.create_pair_in_history( history_id, **kwds ).json()["id"]
+        return self.create_list_from_pairs( history_id, [ pair1 ] )
+
+    def create_pair_in_history( self, history_id, **kwds ):
+        payload = self.create_pair_payload(
+            history_id,
+            instance_type="history",
+            **kwds
+        )
+        return self.__create( payload )
+
+    def create_list_in_history( self, history_id, **kwds ):
+        payload = self.create_list_payload(
+            history_id,
+            instance_type="history",
+            **kwds
+        )
+        return self.__create( payload )
+
+    def create_list_payload( self, history_id, **kwds ):
+        return self.__create_payload( history_id, identifiers_func=self.list_identifiers, collection_type="list", **kwds )
+
+    def create_pair_payload( self, history_id, **kwds ):
+        return self.__create_payload( history_id, identifiers_func=self.pair_identifiers, collection_type="paired", **kwds )
+
+    def __create_payload( self, history_id, identifiers_func, collection_type, **kwds ):
+        contents = None
+        if "contents" in kwds:
+            contents = kwds[ "contents" ]
+            del kwds[ "contents" ]
+
+        if "element_identifiers" not in kwds:
+            kwds[ "element_identifiers" ] = json.dumps( identifiers_func( history_id, contents=contents ) )
+
+        payload = dict(
+            history_id=history_id,
+            collection_type=collection_type,
+            **kwds
+        )
+        return payload
+
+    def pair_identifiers( self, history_id, contents=None ):
+        hda1, hda2 = self.__datasets( history_id, count=2, contents=contents )
+
+        element_identifiers = [
+            dict( name="forward", src="hda", id=hda1[ "id" ] ),
+            dict( name="reverse", src="hda", id=hda2[ "id" ] ),
+        ]
+        return element_identifiers
+
+    def list_identifiers( self, history_id, contents=None ):
+        count = 3 if not contents else len( contents )
+        # Contents can be a list of strings (with name auto-assigned here) or a list of
+        # 2-tuples of form (name, dataset_content).
+        if contents and isinstance(contents[0], tuple):
+            hdas = self.__datasets( history_id, count=count, contents=[c[1] for c in contents] )
+
+            def hda_to_identifier(i, hda):
+                return dict(name=contents[i][0], src="hda", id=hda["id"])
+        else:
+            hdas = self.__datasets( history_id, count=count, contents=contents )
+
+            def hda_to_identifier(i, hda):
+                return dict(name="data%d" % (i + 1), src="hda", id=hda["id"])
+        element_identifiers = [hda_to_identifier(i, hda) for (i, hda) in enumerate(hdas)]
+        return element_identifiers
+
+    def __create( self, payload ):
+        return self._create_collection( payload )
+
+    def __datasets( self, history_id, count, contents=None ):
+        datasets = []
+        for i in range( count ):
+            new_kwds = {}
+            if contents:
+                new_kwds[ "content" ] = contents[ i ]
+            datasets.append( self.dataset_populator.new_dataset( history_id, **new_kwds ) )
+        return datasets
+
+
+class DatasetCollectionPopulator( BaseDatasetCollectionPopulator ):
+
+    def __init__( self, galaxy_interactor ):
+        self.galaxy_interactor = galaxy_interactor
+        self.dataset_populator = DatasetPopulator( galaxy_interactor )
+
+    def _create_collection( self, payload ):
+        create_response = self.galaxy_interactor.post( "dataset_collections", data=payload )
+        return create_response
+
+
+def wait_on_state( state_func, assert_ok=False, timeout=DEFAULT_TIMEOUT ):
+    def get_state( ):
+        response = state_func()
+        assert response.status_code == 200, "Failed to fetch state update while waiting."
+        state = response.json()[ "state" ]
+        if state not in [ "running", "queued", "new", "ready" ]:
+            if assert_ok:
+                assert state == "ok", "Final state - %s - not okay." % state
+            return state
+        else:
+            return None
+    return wait_on( get_state, desc="state", timeout=timeout)
+
+
+def wait_on( function, desc, timeout=DEFAULT_TIMEOUT ):
+    delta = .25
+    iteration = 0
+    while True:
+        total_wait = delta * iteration
+        if total_wait > timeout:
+            timeout_message = "Timed out after %s seconds waiting on %s." % (
+                total_wait, desc
+            )
+            assert False, timeout_message
+        iteration += 1
+        value = function()
+        if value is not None:
+            return value
+        time.sleep( delta )
diff --git a/test/api/test_api_batch.py b/test/api/test_api_batch.py
new file mode 100644
index 0000000..d85f0c8
--- /dev/null
+++ b/test/api/test_api_batch.py
@@ -0,0 +1,89 @@
+import json
+import logging
+import pprint
+
+from requests import post
+
+from base import api
+
+log = logging.getLogger( "functional_tests.py" )
+
+
+class ApiBatchTestCase( api.ApiTestCase ):
+
+    def _get_api_key( self, admin=False ):
+        return self.galaxy_interactor.api_key if not admin else self.galaxy_interactor.master_api_key
+
+    def _with_key( self, url, admin=False ):
+        sep = '&' if '?' in url else '?'
+        return url + sep + 'key=' + self._get_api_key( admin=admin )
+
+    def _post_batch( self, batch ):
+        data = json.dumps({ "batch" : batch })
+        return post( "%s/batch" % ( self.galaxy_interactor.api_url ), data=data )
+
+    def _log_reponse( self, response ):
+        log.debug( 'RESPONSE %s\n%s', ( '-' * 40 ), pprint.pformat( response ) )
+
+    # ---- tests
+    def test_simple_array( self ):
+        batch = [
+            dict( url=self._with_key( '/api/histories' ) ),
+            dict( url=self._with_key( '/api/histories' ),
+                  method='POST', body=json.dumps( dict( name='Wat' ) ) ),
+            dict( url=self._with_key( '/api/histories' ) ),
+        ]
+        response = self._post_batch( batch )
+        response = response.json()
+        # self._log_reponse( response )
+        self.assertIsInstance( response, list )
+        self.assertEquals( len( response ), 3 )
+
+    def test_unallowed_route( self ):
+        batch = [
+            dict( url=self._with_key( '/api/workflow' ) )
+        ]
+        response = self._post_batch( batch )
+        response = response.json()
+        self.assertIsInstance( response, list )
+        self.assertEquals( response[0][ 'status' ], 403 )
+
+    def test_404_route( self ):
+        # needs to be within the allowed routes
+        batch = [
+            dict( url=self._with_key( '/api/histories_bler' ) )
+        ]
+        response = self._post_batch( batch )
+        response = response.json()
+        self.assertIsInstance( response, list )
+        self.assertEquals( response[0][ 'status' ], 404 )
+
+    def test_errors( self ):
+        batch = [
+            dict( url=self._with_key( '/api/histories/abc123' ) ),
+            dict( url=self._with_key( '/api/jobs' ), method='POST', body=json.dumps( dict( name='Wat' ) ) ),
+        ]
+        response = self._post_batch( batch )
+        response = response.json()
+        # self._log_reponse( response )
+        self.assertIsInstance( response, list )
+        self.assertEquals( response[0][ 'status' ], 400 )
+        self.assertEquals( response[1][ 'status' ], 501 )
+
+    def test_querystring_params( self ):
+        post_data = dict( name='test' )
+        create_response = self._post( 'histories', data=post_data ).json()
+
+        history_url = '/api/histories/' + create_response[ 'id' ]
+        history_url_with_keys = history_url + '?v=dev&keys=size,non_ready_jobs'
+        contents_url_with_filters = history_url + '/contents?v=dev&q=deleted&qv=True'
+        batch = [
+            dict( url=self._with_key( history_url_with_keys ) ),
+            dict( url=self._with_key( contents_url_with_filters ) ),
+        ]
+        response = self._post_batch( batch )
+        response = response.json()
+        self._log_reponse( response )
+        self.assertEquals( len( response ), 2 )
+        self.assertEquals( len( response[0][ 'body' ].keys() ), 2 )
+        self.assertEquals( response[1][ 'body' ], [] )
diff --git a/test/api/test_authenticate.py b/test/api/test_authenticate.py
new file mode 100644
index 0000000..d713e27
--- /dev/null
+++ b/test/api/test_authenticate.py
@@ -0,0 +1,29 @@
+import base64
+
+from requests import get
+
+from base import api
+
+TEST_USER_EMAIL = "auth_user_test at bx.psu.edu"
+TEST_USER_PASSWORD = "testpassword1"
+
+
+class AuthenticationApiTestCase( api.ApiTestCase ):
+
+    def test_auth( self ):
+        self._setup_user( TEST_USER_EMAIL, TEST_USER_PASSWORD )
+        baseauth_url = self._api_url( "authenticate/baseauth", use_key=False )
+        unencoded_credentials = "%s:%s" % ( TEST_USER_EMAIL, TEST_USER_PASSWORD )
+        authorization = base64.b64encode(unencoded_credentials)
+        headers = {
+            "Authorization": authorization,
+        }
+        auth_response = get( baseauth_url, headers=headers )
+        self._assert_status_code_is( auth_response, 200 )
+        auth_dict = auth_response.json()
+        self._assert_has_keys( auth_dict, "api_key" )
+
+        # Verify key...
+        random_api_url = self._api_url( "users", use_key=False )
+        random_api_response = get( random_api_url, params=dict( key=auth_dict[ "api_key" ] ) )
+        self._assert_status_code_is( random_api_response, 200 )
diff --git a/test/api/test_dataset_collections.py b/test/api/test_dataset_collections.py
new file mode 100644
index 0000000..7e38350
--- /dev/null
+++ b/test/api/test_dataset_collections.py
@@ -0,0 +1,131 @@
+import json
+
+from base import api
+
+from .helpers import DatasetCollectionPopulator, DatasetPopulator
+
+
+class DatasetCollectionApiTestCase( api.ApiTestCase ):
+
+    def setUp( self ):
+        super( DatasetCollectionApiTestCase, self ).setUp()
+        self.dataset_populator = DatasetPopulator( self.galaxy_interactor )
+        self.dataset_collection_populator = DatasetCollectionPopulator( self.galaxy_interactor )
+        self.history_id = self.dataset_populator.new_history()
+
+    def test_create_pair_from_history( self ):
+        payload = self.dataset_collection_populator.create_pair_payload(
+            self.history_id,
+            instance_type="history",
+        )
+        create_response = self._post( "dataset_collections", payload )
+        dataset_collection = self._check_create_response( create_response )
+        returned_datasets = dataset_collection[ "elements" ]
+        assert len( returned_datasets ) == 2, dataset_collection
+
+    def test_create_list_from_history( self ):
+        element_identifiers = self.dataset_collection_populator.list_identifiers( self.history_id )
+
+        payload = dict(
+            instance_type="history",
+            history_id=self.history_id,
+            element_identifiers=json.dumps(element_identifiers),
+            collection_type="list",
+        )
+
+        create_response = self._post( "dataset_collections", payload )
+        dataset_collection = self._check_create_response( create_response )
+        returned_datasets = dataset_collection[ "elements" ]
+        assert len( returned_datasets ) == 3, dataset_collection
+
+    def test_create_list_of_existing_pairs( self ):
+        pair_payload = self.dataset_collection_populator.create_pair_payload(
+            self.history_id,
+            instance_type="history",
+        )
+        pair_create_response = self._post( "dataset_collections", pair_payload )
+        dataset_collection = self._check_create_response( pair_create_response )
+        hdca_id = dataset_collection[ "id" ]
+
+        element_identifiers = [
+            dict( name="test1", src="hdca", id=hdca_id )
+        ]
+
+        payload = dict(
+            instance_type="history",
+            history_id=self.history_id,
+            element_identifiers=json.dumps(element_identifiers),
+            collection_type="list",
+        )
+        create_response = self._post( "dataset_collections", payload )
+        dataset_collection = self._check_create_response( create_response )
+        returned_collections = dataset_collection[ "elements" ]
+        assert len( returned_collections ) == 1, dataset_collection
+
+    def test_create_list_of_new_pairs( self ):
+        pair_identifiers = self.dataset_collection_populator.pair_identifiers( self.history_id )
+        element_identifiers = [ dict(
+            src="new_collection",
+            name="test_pair",
+            collection_type="paired",
+            element_identifiers=pair_identifiers,
+        ) ]
+        payload = dict(
+            collection_type="list:paired",
+            instance_type="history",
+            history_id=self.history_id,
+            name="a nested collection",
+            element_identifiers=json.dumps( element_identifiers ),
+        )
+        create_response = self._post( "dataset_collections", payload )
+        dataset_collection = self._check_create_response( create_response )
+        assert dataset_collection[ "collection_type" ] == "list:paired"
+        assert dataset_collection[ "name" ] == "a nested collection"
+        returned_collections = dataset_collection[ "elements" ]
+        assert len( returned_collections ) == 1, dataset_collection
+        pair_1_element = returned_collections[ 0 ]
+        self._assert_has_keys( pair_1_element, "element_index" )
+        pair_1_object = pair_1_element[ "object" ]
+        self._assert_has_keys( pair_1_object, "collection_type", "elements" )
+        self.assertEquals( pair_1_object[ "collection_type" ], "paired" )
+        self.assertEquals( pair_1_object[ "populated" ], True )
+        pair_elements = pair_1_object[ "elements" ]
+        assert len( pair_elements ) == 2
+        pair_1_element_1 = pair_elements[ 0 ]
+        assert pair_1_element_1[ "element_index" ] == 0
+
+    def test_hda_security( self ):
+        element_identifiers = self.dataset_collection_populator.pair_identifiers( self.history_id )
+
+        with self._different_user( ):
+            history_id = self.dataset_populator.new_history()
+            payload = dict(
+                instance_type="history",
+                history_id=history_id,
+                element_identifiers=json.dumps(element_identifiers),
+                collection_type="paired",
+            )
+
+            self._post( "dataset_collections", payload )
+            # TODO: re-enable once there is a way to restrict access
+            # to this dataset via the API.
+            # self._assert_status_code_is( create_response, 403 )
+
+    def test_enforces_unique_names( self ):
+        element_identifiers = self.dataset_collection_populator.list_identifiers( self.history_id )
+        element_identifiers[ 2 ][ "name" ] = element_identifiers[ 0 ][ "name" ]
+        payload = dict(
+            instance_type="history",
+            history_id=self.history_id,
+            element_identifiers=json.dumps(element_identifiers),
+            collection_type="list",
+        )
+
+        create_response = self._post( "dataset_collections", payload )
+        self._assert_status_code_is( create_response, 400 )
+
+    def _check_create_response( self, create_response ):
+        self._assert_status_code_is( create_response, 200 )
+        dataset_collection = create_response.json()
+        self._assert_has_keys( dataset_collection, "elements", "url", "name", "collection_type" )
+        return dataset_collection
diff --git a/test/api/test_datasets.py b/test/api/test_datasets.py
new file mode 100644
index 0000000..e777d83
--- /dev/null
+++ b/test/api/test_datasets.py
@@ -0,0 +1,43 @@
+from __future__ import print_function
+import textwrap
+
+from base import api
+from .helpers import TestsDatasets
+
+
+class DatasetsApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+    def setUp( self ):
+        super( DatasetsApiTestCase, self ).setUp()
+        self.history_id = self._new_history()
+
+    def test_index( self ):
+        index_response = self._get( "datasets" )
+        print(index_response)
+        print(dir( index_response ))
+        self._assert_status_code_is( index_response, 501 )
+
+    def test_show( self ):
+        hda1 = self._new_dataset( self.history_id )
+        show_response = self._get( "datasets/%s" % ( hda1[ "id" ] ) )
+        self._assert_status_code_is( show_response, 200 )
+        self.__assert_matches_hda( hda1, show_response.json() )
+
+    def __assert_matches_hda( self, input_hda, query_hda ):
+        self._assert_has_keys( query_hda, "id", "name" )
+        assert input_hda[ "name" ] == query_hda[ "name" ]
+        assert input_hda[ "id" ] == query_hda[ "id" ]
+
+    def test_display( self ):
+        contents = textwrap.dedent( """\
+        1   2   3   4
+        A   B   C   D
+        10  20  30  40
+        """ )
+        hda1 = self._new_dataset( self.history_id, content=contents )
+        display_response = self._get( "histories/%s/contents/%s/display" % ( self.history_id, hda1[ "id" ] ), {
+            'raw': 'True'
+        })
+        self._assert_status_code_is( display_response, 200 )
+        # TODO: doesn't work
+        # assert display_response.text == contents
diff --git a/test/api/test_datatypes.py b/test/api/test_datatypes.py
new file mode 100644
index 0000000..2f560b6
--- /dev/null
+++ b/test/api/test_datatypes.py
@@ -0,0 +1,64 @@
+from base import api
+
+
+HIDDEN_DURING_UPLOAD_DATATYPE = "fli"
+
+
+class DatatypesApiTestCase( api.ApiTestCase ):
+
+    def test_index( self ):
+        datatypes = self._index_datatypes()
+        for common_type in ["tabular", "fasta"]:
+            assert common_type in datatypes, "%s not in %s" % (common_type, datatypes)
+
+    def test_index_upload_only( self ):
+        # fli is not displayed in upload - so only show it if upload_only
+        # is explicitly false.
+        datatypes = self._index_datatypes( data={ "upload_only": False } )
+        assert HIDDEN_DURING_UPLOAD_DATATYPE in datatypes
+
+        datatypes = self._index_datatypes( data={ "upload_only": True } )
+        assert HIDDEN_DURING_UPLOAD_DATATYPE not in datatypes
+
+        datatypes = self._index_datatypes( )
+        assert HIDDEN_DURING_UPLOAD_DATATYPE not in datatypes
+
+    def test_full_index( self ):
+        datatypes = self._index_datatypes( data={ "extension_only": False } )
+        for datatype in datatypes:
+            self._assert_has_keys( datatype, "extension", "description", "description_url" )
+            assert datatype["extension"] != HIDDEN_DURING_UPLOAD_DATATYPE
+
+    def test_mapping( self ):
+        response = self._get( "datatypes/mapping" )
+        self._assert_status_code_is( response, 200 )
+        mapping_dict = response.json()
+        self._assert_has_keys( mapping_dict, "ext_to_class_name", "class_to_classes" )
+
+    def test_sniffers( self ):
+        response = self._get( "datatypes/sniffers" )
+        self._assert_status_code_is( response, 200 )
+        sniffer_list = response.json()
+        owl_index = sniffer_list.index( "galaxy.datatypes.xml:Owl" )
+        xml_index = sniffer_list.index( "galaxy.datatypes.xml:GenericXml" )
+        assert owl_index < xml_index
+
+    def test_converters( self ):
+        response = self._get( "datatypes/converters" )
+        self._assert_status_code_is( response, 200 )
+        converters_list = response.json()
+        found_fasta_to_tabular = False
+
+        for converter in converters_list:
+            self._assert_has_key( converter, "source", "target", "tool_id" )
+            if converter["source"] == "fasta" and converter["target"] == "tabular":
+                found_fasta_to_tabular = True
+
+        assert found_fasta_to_tabular
+
+    def _index_datatypes( self, data={} ):
+        response = self._get( "datatypes", data=data )
+        self._assert_status_code_is( response, 200 )
+        datatypes = response.json()
+        assert isinstance( datatypes, list )
+        return datatypes
diff --git a/test/api/test_framework.py b/test/api/test_framework.py
new file mode 100644
index 0000000..32ff8da
--- /dev/null
+++ b/test/api/test_framework.py
@@ -0,0 +1,27 @@
+# This file doesn't test any API in particular but is meant to functionally
+# test the API framework itself.
+from base import api
+
+
+class ApiFrameworkTestCase( api.ApiTestCase ):
+
+    # Next several tests test the API's run_as functionality.
+    def test_user_cannont_run_as( self ):
+        post_data = dict( name="TestHistory1", run_as="another_user" )
+        # Normal user cannot run_as...
+        create_response = self._post( "histories", data=post_data )
+        self._assert_status_code_is( create_response, 403 )
+
+    def test_run_as_invalid_user( self ):
+        post_data = dict( name="TestHistory1", run_as="another_user" )
+        # admin user can run_as, but this user doesn't exist, expect 400.
+        create_response = self._post( "histories", data=post_data, admin=True )
+        self._assert_status_code_is( create_response, 400 )
+
+    def test_run_as_valid_user( self ):
+        run_as_user = self._setup_user( "for_run_as at bx.psu.edu" )
+        post_data = dict( name="TestHistory1", run_as=run_as_user[ "id" ] )
+        # Use run_as with admin user and for another user just created, this
+        # should work.
+        create_response = self._post( "histories", data=post_data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
diff --git a/test/api/test_histories.py b/test/api/test_histories.py
new file mode 100644
index 0000000..16b851b
--- /dev/null
+++ b/test/api/test_histories.py
@@ -0,0 +1,102 @@
+from requests import (
+    get,
+    post,
+    put
+)
+
+from base import api
+
+from .helpers import DatasetPopulator, wait_on
+
+
+class HistoriesApiTestCase( api.ApiTestCase ):
+
+    def setUp( self ):
+        super( HistoriesApiTestCase, self ).setUp( )
+        self.dataset_populator = DatasetPopulator( self.galaxy_interactor )
+
+    def test_create_history( self ):
+        # Create a history.
+        post_data = dict( name="TestHistory1" )
+        create_response = self._post( "histories", data=post_data ).json()
+        self._assert_has_keys( create_response, "name", "id" )
+        self.assertEquals( create_response[ "name" ], "TestHistory1" )
+        created_id = create_response[ "id" ]
+
+        # Make sure new history appears in index of user's histories.
+        index_response = self._get( "histories" ).json()
+        indexed_history = [ h for h in index_response if h[ "id" ] == created_id ][0]
+        self.assertEquals(indexed_history[ "name" ], "TestHistory1")
+
+    def test_create_anonymous_fails( self ):
+        post_data = dict( name="CannotCreate" )
+        # Using lower-level _api_url will cause key to not be injected.
+        histories_url = self._api_url( "histories" )
+        create_response = post( url=histories_url, data=post_data )
+        self._assert_status_code_is( create_response, 403 )
+
+    def test_import_export( self ):
+        history_id = self.dataset_populator.new_history( name="for_export" )
+        self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        download_path = self._export( history_id )
+        full_download_url = "%s%s?key=%s" % ( self.url, download_path, self.galaxy_interactor.api_key )
+        download_response = get( full_download_url )
+        self._assert_status_code_is( download_response, 200 )
+
+        def history_names():
+            history_index = self._get( "histories" )
+            return dict( map( lambda h: ( h[ "name" ], h ), history_index.json() ) )
+
+        import_name = "imported from archive: for_export"
+        assert import_name not in history_names()
+
+        import_data = dict( archive_source=full_download_url, archive_type="url" )
+        import_response = self._post( "histories", data=import_data )
+
+        self._assert_status_code_is( import_response, 200 )
+
+        def has_history_with_name():
+            histories = history_names()
+            return histories.get( import_name, None )
+
+        imported_history = wait_on( has_history_with_name, desc="import history" )
+        imported_history_id = imported_history[ "id" ]
+        self.dataset_populator.wait_for_history( imported_history_id )
+        contents_response = self._get( "histories/%s/contents" % imported_history_id )
+        self._assert_status_code_is( contents_response, 200 )
+        contents = contents_response.json()
+        assert len( contents ) == 1
+        imported_content = self.dataset_populator.get_history_dataset_content(
+            history_id=imported_history_id,
+            dataset_id=contents[ 0 ][ "id" ]
+        )
+        assert imported_content == "1 2 3\n"
+
+    def test_create_tag( self ):
+        post_data = dict( name="TestHistoryForTag" )
+        history_id = self._post( "histories", data=post_data ).json()["id"]
+        tag_data = dict( value="awesometagvalue" )
+        tag_url = "histories/%s/tags/awesometagname" % history_id
+        tag_create_response = self._post( tag_url, data=tag_data )
+        self._assert_status_code_is( tag_create_response, 200 )
+
+    def _export(self, history_id):
+        export_url = self._api_url( "histories/%s/exports" % history_id, use_key=True )
+        put_response = put( export_url )
+        self._assert_status_code_is( put_response, 202 )
+
+        def export_ready_response():
+            put_response = put( export_url )
+            if put_response.status_code == 202:
+                return None
+            return put_response
+
+        put_response = wait_on( export_ready_response, desc="export ready" )
+        self._assert_status_code_is( put_response, 200 )
+        response = put_response.json()
+        self._assert_has_keys( response, "download_url" )
+        download_path = response[ "download_url" ]
+        return download_path
+
+    # TODO: (CE) test_create_from_copy
diff --git a/test/api/test_history_contents.py b/test/api/test_history_contents.py
new file mode 100644
index 0000000..8f58bb3
--- /dev/null
+++ b/test/api/test_history_contents.py
@@ -0,0 +1,185 @@
+import json
+
+from requests import delete, put
+
+from base import api
+
+from .helpers import DatasetCollectionPopulator, LibraryPopulator, TestsDatasets
+
+
+# TODO: Test anonymous access.
+class HistoryContentsApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+    def setUp( self ):
+        super( HistoryContentsApiTestCase, self ).setUp()
+        self.history_id = self._new_history()
+        self.dataset_collection_populator = DatasetCollectionPopulator( self.galaxy_interactor )
+
+    def test_index_hda_summary( self ):
+        hda1 = self._new_dataset( self.history_id )
+        contents_response = self._get( "histories/%s/contents" % self.history_id )
+        hda_summary = self.__check_for_hda( contents_response, hda1 )
+        assert "display_types" not in hda_summary  # Quick summary, not full details
+
+    def test_index_hda_all_details( self ):
+        hda1 = self._new_dataset( self.history_id )
+        contents_response = self._get( "histories/%s/contents?details=all" % self.history_id )
+        hda_details = self.__check_for_hda( contents_response, hda1 )
+        self.__assert_hda_has_full_details( hda_details )
+
+    def test_index_hda_detail_by_id( self ):
+        hda1 = self._new_dataset( self.history_id )
+        contents_response = self._get( "histories/%s/contents?details=%s" % ( self.history_id, hda1[ "id" ] ) )
+        hda_details = self.__check_for_hda( contents_response, hda1 )
+        self.__assert_hda_has_full_details( hda_details )
+
+    def test_show_hda( self ):
+        hda1 = self._new_dataset( self.history_id )
+        show_response = self.__show( hda1 )
+        self._assert_status_code_is( show_response, 200 )
+        self.__assert_matches_hda( hda1, show_response.json() )
+
+    def test_hda_copy( self ):
+        hda1 = self._new_dataset( self.history_id )
+        create_data = dict(
+            source='hda',
+            content=hda1[ "id" ],
+        )
+        second_history_id = self._new_history()
+        assert self.__count_contents( second_history_id ) == 0
+        create_response = self._post( "histories/%s/contents" % second_history_id, create_data )
+        self._assert_status_code_is( create_response, 200 )
+        assert self.__count_contents( second_history_id ) == 1
+
+    def test_library_copy( self ):
+        ld = LibraryPopulator( self ).new_library_dataset( "lda_test_library" )
+        create_data = dict(
+            source='library',
+            content=ld[ "id" ],
+        )
+        assert self.__count_contents( self.history_id ) == 0
+        create_response = self._post( "histories/%s/contents" % self.history_id, create_data )
+        self._assert_status_code_is( create_response, 200 )
+        assert self.__count_contents( self.history_id ) == 1
+
+    def test_update( self ):
+        hda1 = self._new_dataset( self.history_id )
+        self._wait_for_history( self.history_id )
+        assert str( hda1[ "deleted" ] ).lower() == "false"
+        update_url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1[ "id" ] ), use_key=True )
+        # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
+        body = json.dumps( dict( deleted=True ) )
+        update_response = put( update_url, data=body )
+        self._assert_status_code_is( update_response, 200 )
+        show_response = self.__show( hda1 )
+        assert str( show_response.json()[ "deleted" ] ).lower() == "true"
+
+    def test_delete( self ):
+        hda1 = self._new_dataset( self.history_id )
+        self._wait_for_history( self.history_id )
+        assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "false"
+        url = self._api_url( "histories/%s/contents/%s" % ( self.history_id, hda1["id" ] ), use_key=True )
+        delete_response = delete( url )
+        assert delete_response.status_code < 300  # Something in the 200s :).
+        assert str( self.__show( hda1 ).json()[ "deleted" ] ).lower() == "true"
+
+    def test_dataset_collections( self ):
+        payload = self.dataset_collection_populator.create_pair_payload(
+            self.history_id,
+            type="dataset_collection"
+        )
+        pre_collection_count = self.__count_contents( type="dataset_collection" )
+        pre_dataset_count = self.__count_contents( type="dataset" )
+        pre_combined_count = self.__count_contents( type="dataset,dataset_collection" )
+
+        dataset_collection_response = self._post( "histories/%s/contents" % self.history_id, payload )
+
+        dataset_collection = self.__check_create_collection_response( dataset_collection_response )
+
+        post_collection_count = self.__count_contents( type="dataset_collection" )
+        post_dataset_count = self.__count_contents( type="dataset" )
+        post_combined_count = self.__count_contents( type="dataset,dataset_collection" )
+
+        # Test filtering types with index.
+        assert pre_collection_count == 0
+        assert post_collection_count == 1
+        assert post_combined_count == pre_dataset_count + 1
+        assert post_combined_count == pre_combined_count + 1
+        assert pre_dataset_count == post_dataset_count
+
+        # Test show dataset colleciton.
+        collection_url = "histories/%s/contents/dataset_collections/%s" % ( self.history_id, dataset_collection[ "id" ] )
+        show_response = self._get( collection_url )
+        self._assert_status_code_is( show_response, 200 )
+        dataset_collection = show_response.json()
+        self._assert_has_keys( dataset_collection, "url", "name", "deleted" )
+
+        assert not dataset_collection[ "deleted" ]
+
+        delete_response = delete( self._api_url( collection_url, use_key=True ) )
+        self._assert_status_code_is( delete_response, 200 )
+
+        show_response = self._get( collection_url )
+        dataset_collection = show_response.json()
+        assert dataset_collection[ "deleted" ]
+
+    def test_update_dataset_collection( self ):
+        payload = self.dataset_collection_populator.create_pair_payload(
+            self.history_id,
+            type="dataset_collection"
+        )
+        dataset_collection_response = self._post( "histories/%s/contents" % self.history_id, payload )
+        self._assert_status_code_is( dataset_collection_response, 200 )
+        hdca = dataset_collection_response.json()
+        update_url = self._api_url( "histories/%s/contents/dataset_collections/%s" % ( self.history_id, hdca[ "id" ] ), use_key=True )
+        # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
+        body = json.dumps( dict( name="newnameforpair" ) )
+        update_response = put( update_url, data=body )
+        self._assert_status_code_is( update_response, 200 )
+        show_response = self.__show( hdca )
+        assert str( show_response.json()[ "name" ] ) == "newnameforpair"
+
+    def test_hdca_copy( self ):
+        hdca = self.dataset_collection_populator.create_pair_in_history( self.history_id ).json()
+        hdca_id = hdca[ "id" ]
+        second_history_id = self._new_history()
+        create_data = dict(
+            source='hdca',
+            content=hdca_id,
+        )
+        assert len( self._get( "histories/%s/contents/dataset_collections" % second_history_id ).json() ) == 0
+        create_response = self._post( "histories/%s/contents/dataset_collections" % second_history_id, create_data )
+        self.__check_create_collection_response( create_response )
+        assert len( self._get( "histories/%s/contents/dataset_collections" % second_history_id ).json() ) == 1
+
+    def __check_create_collection_response( self, response ):
+        self._assert_status_code_is( response, 200 )
+        dataset_collection = response.json()
+        self._assert_has_keys( dataset_collection, "url", "name", "deleted", "visible", "elements" )
+        return dataset_collection
+
+    def __show( self, contents ):
+        show_response = self._get( "histories/%s/contents/%ss/%s" % ( self.history_id, contents["history_content_type"], contents[ "id" ] ) )
+        return show_response
+
+    def __count_contents( self, history_id=None, **kwds ):
+        if history_id is None:
+            history_id = self.history_id
+        contents_response = self._get( "histories/%s/contents" % history_id, kwds )
+        return len( contents_response.json() )
+
+    def __assert_hda_has_full_details( self, hda_details ):
+        self._assert_has_keys( hda_details, "display_types", "display_apps" )
+
+    def __check_for_hda( self, contents_response, hda ):
+        self._assert_status_code_is( contents_response, 200 )
+        contents = contents_response.json()
+        assert len( contents ) == 1
+        hda_summary = contents[ 0 ]
+        self.__assert_matches_hda( hda, hda_summary )
+        return hda_summary
+
+    def __assert_matches_hda( self, input_hda, query_hda ):
+        self._assert_has_keys( query_hda, "id", "name" )
+        assert input_hda[ "name" ] == query_hda[ "name" ]
+        assert input_hda[ "id" ] == query_hda[ "id" ]
diff --git a/test/api/test_history_contents_provenance.py b/test/api/test_history_contents_provenance.py
new file mode 100644
index 0000000..8e36c9b
--- /dev/null
+++ b/test/api/test_history_contents_provenance.py
@@ -0,0 +1,16 @@
+from base import api
+from .helpers import DatasetPopulator
+
+
+class TestProvenance( api.ApiTestCase ):
+
+    def setUp( self ):
+        super( TestProvenance, self ).setUp( )
+        self.dataset_populator = DatasetPopulator( self.galaxy_interactor )
+
+    def test_show_prov( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='for prov' )
+        prov_response = self._get( "histories/%s/contents/%s/provenance" % ( history_id, new_dataset1[ "id" ] ) )
+        self._assert_status_code_is( prov_response, 200 )
+        self._assert_has_keys( prov_response.json(), "job_id", "id", "stdout", "stderr", "parameters", "tool_id" )
diff --git a/test/api/test_jobs.py b/test/api/test_jobs.py
new file mode 100644
index 0000000..18c850f
--- /dev/null
+++ b/test/api/test_jobs.py
@@ -0,0 +1,212 @@
+import datetime
+import json
+import time
+from operator import itemgetter
+
+from base import api
+
+from .helpers import TestsDatasets
+
+
+class JobsApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+    def test_index( self ):
+        # Create HDA to ensure at least one job exists...
+        self.__history_with_new_dataset()
+        jobs = self.__jobs_index()
+        assert "upload1" in map( itemgetter( "tool_id" ), jobs )
+
+    def test_system_details_admin_only( self ):
+        self.__history_with_new_dataset()
+        jobs = self.__jobs_index( admin=False )
+        job = jobs[0]
+        self._assert_not_has_keys( job, "command_line", "external_id" )
+
+        jobs = self.__jobs_index( admin=True )
+        job = jobs[0]
+        self._assert_has_keys( job, "command_line", "external_id" )
+
+    def test_index_state_filter( self ):
+        # Initial number of ok jobs
+        original_count = len( self.__uploads_with_state( "ok" ) )
+        # Run through dataset upload to ensure num uplaods at least greater
+        # by 1.
+        self.__history_with_ok_dataset()
+
+        # Verify number of ok jobs is actually greater.
+        count_increased = False
+        for i in range(10):
+            new_count = len( self.__uploads_with_state( "ok" ) )
+            if original_count < new_count:
+                count_increased = True
+                break
+            time.sleep(.1)
+
+        if not count_increased:
+            template = "Jobs in ok state did not increase (was %d, now %d)"
+            message = template % (original_count, new_count)
+            raise AssertionError(message)
+
+    def test_index_date_filter( self ):
+        self.__history_with_new_dataset()
+        two_weeks_ago = (datetime.datetime.utcnow() - datetime.timedelta(7)).isoformat()
+        last_week = (datetime.datetime.utcnow() - datetime.timedelta(7)).isoformat()
+        next_week = (datetime.datetime.utcnow() + datetime.timedelta(7)).isoformat()
+        today = datetime.datetime.utcnow().isoformat()
+        tomorrow = (datetime.datetime.utcnow() + datetime.timedelta(1)).isoformat()
+
+        jobs = self.__jobs_index( data={"date_range_min": today[0:10], "date_range_max": tomorrow[0:10]} )
+        assert len( jobs ) > 0
+        today_job_id = jobs[0]["id"]
+
+        jobs = self.__jobs_index( data={"date_range_min": two_weeks_ago, "date_range_max": last_week} )
+        assert today_job_id not in map(itemgetter("id"), jobs)
+
+        jobs = self.__jobs_index( data={"date_range_min": last_week, "date_range_max": next_week} )
+        assert today_job_id in map(itemgetter("id"), jobs)
+
+    def test_index_history( self ):
+        history_id, _ = self.__history_with_new_dataset()
+        jobs = self.__jobs_index( data={"history_id": history_id} )
+        assert len( jobs ) > 0
+
+        history_id = self._new_history()
+        jobs = self.__jobs_index( data={"history_id": history_id} )
+        assert len( jobs ) == 0
+
+    def test_index_multiple_states_filter( self ):
+        # Initial number of ok jobs
+        original_count = len( self.__uploads_with_state( "ok", "new" ) )
+
+        # Run through dataset upload to ensure num uplaods at least greater
+        # by 1.
+        self.__history_with_ok_dataset()
+
+        # Verify number of ok jobs is actually greater.
+        new_count = len( self.__uploads_with_state( "new", "ok" ) )
+        assert original_count < new_count, new_count
+
+    def test_show( self ):
+        # Create HDA to ensure at least one job exists...
+        self.__history_with_new_dataset()
+
+        jobs_response = self._get( "jobs" )
+        first_job = jobs_response.json()[ 0 ]
+        self._assert_has_key( first_job, 'id', 'state', 'exit_code', 'update_time', 'create_time' )
+
+        job_id = first_job[ "id" ]
+        show_jobs_response = self._get( "jobs/%s" % job_id )
+        self._assert_status_code_is( show_jobs_response, 200 )
+
+        job_details = show_jobs_response.json()
+        self._assert_has_key( job_details, 'id', 'state', 'exit_code', 'update_time', 'create_time' )
+
+    def test_show_security( self ):
+        history_id, _ = self.__history_with_new_dataset()
+        jobs_response = self._get( "jobs", data={"history_id": history_id} )
+        job = jobs_response.json()[ 0 ]
+        job_id = job[ "id" ]
+
+        show_jobs_response = self._get( "jobs/%s" % job_id, admin=False )
+        self._assert_not_has_keys( show_jobs_response.json(), "command_line", "external_id" )
+
+        # TODO: Re-activate test case when API accepts privacy settings
+        # with self._different_user():
+        #    show_jobs_response = self._get( "jobs/%s" % job_id, admin=False )
+        #    self._assert_status_code_is( show_jobs_response, 200 )
+
+        show_jobs_response = self._get( "jobs/%s" % job_id, admin=True )
+        self._assert_has_keys( show_jobs_response.json(), "command_line", "external_id" )
+
+    def test_search( self ):
+        history_id, dataset_id = self.__history_with_ok_dataset()
+
+        inputs = json.dumps(
+            dict(
+                input1=dict(
+                    src='hda',
+                    id=dataset_id,
+                )
+            )
+        )
+        search_payload = dict(
+            tool_id="cat1",
+            inputs=inputs,
+            state="ok",
+        )
+
+        empty_search_response = self._post( "jobs/search", data=search_payload )
+        self._assert_status_code_is( empty_search_response, 200 )
+        self.assertEquals( len( empty_search_response.json() ), 0 )
+
+        self.__run_cat_tool( history_id, dataset_id )
+        self._wait_for_history( history_id, assert_ok=True )
+
+        search_count = -1
+        # in case job and history aren't updated at exactly the same
+        # time give time to wait
+        for i in range(5):
+            search_count = self._search_count(search_payload)
+            if search_count == 1:
+                break
+            time.sleep(.1)
+
+        self.assertEquals( search_count, 1 )
+
+    def _search_count( self, search_payload ):
+        search_response = self._post( "jobs/search", data=search_payload )
+        self._assert_status_code_is( search_response, 200 )
+        search_json = search_response.json()
+        return len(search_json)
+
+    def __run_cat_tool( self, history_id, dataset_id ):
+        # Code duplication with test_jobs.py, eliminate
+        payload = self._run_tool_payload(
+            tool_id='cat1',
+            inputs=dict(
+                input1=dict(
+                    src='hda',
+                    id=dataset_id
+                ),
+            ),
+            history_id=history_id,
+        )
+        self._post( "tools", data=payload )
+
+    def __run_randomlines_tool( self, lines, history_id, dataset_id ):
+        payload = self._run_tool_payload(
+            tool_id="random_lines1",
+            inputs=dict(
+                num_lines=lines,
+                input=dict(
+                    src='hda',
+                    id=dataset_id,
+                ),
+            ),
+            history_id=history_id,
+        )
+        self._post( "tools", data=payload )
+
+    def __uploads_with_state( self, *states ):
+        jobs_response = self._get( "jobs", data=dict( state=states ) )
+        self._assert_status_code_is( jobs_response, 200 )
+        jobs = jobs_response.json()
+        assert not filter( lambda j: j[ "state" ] not in states, jobs )
+        return filter( lambda j: j[ "tool_id" ] == "upload1", jobs )
+
+    def __history_with_new_dataset( self ):
+        history_id = self._new_history()
+        dataset_id = self._new_dataset( history_id )[ "id" ]
+        return history_id, dataset_id
+
+    def __history_with_ok_dataset( self ):
+        history_id = self._new_history()
+        dataset_id = self._new_dataset( history_id, wait=True )[ "id" ]
+        return history_id, dataset_id
+
+    def __jobs_index( self, **kwds ):
+        jobs_response = self._get( "jobs", **kwds )
+        self._assert_status_code_is( jobs_response, 200 )
+        jobs = jobs_response.json()
+        assert isinstance( jobs, list )
+        return jobs
diff --git a/test/api/test_libraries.py b/test/api/test_libraries.py
new file mode 100644
index 0000000..a6fafdf
--- /dev/null
+++ b/test/api/test_libraries.py
@@ -0,0 +1,95 @@
+from base import api
+
+from .helpers import (
+    LibraryPopulator,
+    TestsDatasets,
+    wait_on_state
+)
+
+
+class LibrariesApiTestCase( api.ApiTestCase, TestsDatasets ):
+
+    def setUp( self ):
+        super( LibrariesApiTestCase, self ).setUp()
+        self.library_populator = LibraryPopulator( self )
+
+    def test_create( self ):
+        data = dict( name="CreateTestLibrary" )
+        create_response = self._post( "libraries", data=data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        library = create_response.json()
+        self._assert_has_keys( library, "name" )
+        assert library[ "name" ] == "CreateTestLibrary"
+
+    def test_delete( self ):
+        library = self.library_populator.new_library( "DeleteTestLibrary" )
+        create_response = self._delete( "libraries/%s" % library[ "id" ], admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        library = create_response.json()
+        self._assert_has_keys( library, "deleted" )
+        assert library[ "deleted" ] is True
+        # Test undeleting
+        data = dict( undelete='true' )
+        create_response = self._delete( "libraries/%s" % library[ "id" ], data=data, admin=True )
+        library = create_response.json()
+        self._assert_status_code_is( create_response, 200 )
+        assert library[ "deleted" ] is False
+
+    def test_nonadmin( self ):
+        # Anons can't create libs
+        data = dict( name="CreateTestLibrary" )
+        create_response = self._post( "libraries", data=data, admin=False, anon=True )
+        self._assert_status_code_is( create_response, 403 )
+        # Anons can't delete libs
+        library = self.library_populator.new_library( "AnonDeleteTestLibrary" )
+        create_response = self._delete( "libraries/%s" % library[ "id" ], admin=False, anon=True )
+        self._assert_status_code_is( create_response, 403 )
+        # Anons can't update libs
+        data = dict( name="ChangedName", description="ChangedDescription", synopsis='ChangedSynopsis' )
+        create_response = self._patch( "libraries/%s" % library[ "id" ], data=data, admin=False, anon=True )
+        self._assert_status_code_is( create_response, 403 )
+
+    def test_update( self ):
+        library = self.library_populator.new_library( "UpdateTestLibrary" )
+        data = dict( name='ChangedName', description='ChangedDescription', synopsis='ChangedSynopsis' )
+        create_response = self._patch( "libraries/%s" % library[ "id" ], data=data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        library = create_response.json()
+        self._assert_has_keys( library, 'name', 'description', 'synopsis' )
+        assert library['name'] == 'ChangedName'
+        assert library['description'] == 'ChangedDescription'
+        assert library['synopsis'] == 'ChangedSynopsis'
+
+    def test_create_private_library_permissions( self ):
+        library = self.library_populator.new_library( "PermissionTestLibrary" )
+        library_id = library[ "id" ]
+        role_id = self.library_populator.user_private_role_id()
+        self.library_populator.set_permissions( library_id, role_id )
+        create_response = self._create_folder( library )
+        self._assert_status_code_is( create_response, 200 )
+
+    def test_create_dataset( self ):
+        library = self.library_populator.new_private_library( "ForCreateDatasets" )
+        payload, files = self.library_populator.create_dataset_request( library, file_type="txt", contents="create_test" )
+        create_response = self._post( "libraries/%s/contents" % library[ "id" ], payload, files=files )
+        self._assert_status_code_is( create_response, 200 )
+        library_datasets = create_response.json()
+        assert len( library_datasets ) == 1
+        library_dataset = library_datasets[ 0 ]
+
+        def show():
+            return self._get( "libraries/%s/contents/%s" % ( library[ "id" ], library_dataset[ "id" ] ) )
+
+        wait_on_state( show, assert_ok=True )
+        library_dataset = show().json()
+        self._assert_has_keys( library_dataset, "peek", "data_type" )
+        assert library_dataset[ "peek" ].find("create_test") >= 0
+        assert library_dataset[ "file_ext" ] == "txt", library_dataset[ "file_ext" ]
+
+    def _create_folder( self, library ):
+        create_data = dict(
+            folder_id=library[ "root_folder_id" ],
+            create_type="folder",
+            name="New Folder",
+        )
+        return self._post( "libraries/%s/contents" % library[ "id" ], data=create_data )
diff --git a/test/api/test_page_revisions.py b/test/api/test_page_revisions.py
new file mode 100644
index 0000000..8f6013c
--- /dev/null
+++ b/test/api/test_page_revisions.py
@@ -0,0 +1,36 @@
+from galaxy.exceptions import error_codes
+
+from .test_pages import BasePageApiTestCase
+
+
+class PageRevisionsApiTestCase( BasePageApiTestCase ):
+
+    def test_create( self ):
+        page_json = self._create_valid_page_with_slug( "pr1" )
+        revision_data = dict( content="<p>NewContent!</p>" )
+        page_revision_response = self._post( "pages/%s/revisions" % page_json[ 'id' ], data=revision_data )
+        self._assert_status_code_is( page_revision_response, 200 )
+        page_revision_json = page_revision_response.json()
+        self._assert_has_keys( page_revision_json, 'id', 'content' )
+
+    def test_403_if_create_revision_on_unowned_page( self ):
+        page_json = self._create_valid_page_as( "pr2 at bx.psu.edu", "pr2" )
+        revision_data = dict( content="<p>NewContent!</p>" )
+        page_revision_response = self._post( "pages/%s/revisions" % page_json[ 'id' ], data=revision_data )
+        self._assert_status_code_is( page_revision_response, 403 )
+
+    def test_revision_index( self ):
+        page_json = self._create_valid_page_with_slug( "pr3" )
+        revision_data = dict( content="<p>NewContent!</p>" )
+        revisions_url = "pages/%s/revisions" % page_json[ 'id' ]
+        self._post( revisions_url, data=revision_data )
+        revisions_response = self._get( revisions_url )
+        self._assert_status_code_is( revisions_response, 200 )
+        revisions_json = revisions_response.json()
+        assert len( revisions_json ) == 2  # Original revision and new one
+
+    def test_404_if_index_unknown_page( self ):
+        revisions_url = "pages/%s/revisions" % self._random_key()
+        revisions_response = self._get( revisions_url )
+        self._assert_status_code_is( revisions_response, 404 )
+        self._assert_error_code_is( revisions_response, error_codes.USER_OBJECT_NOT_FOUND )
diff --git a/test/api/test_pages.py b/test/api/test_pages.py
new file mode 100644
index 0000000..1e72de9
--- /dev/null
+++ b/test/api/test_pages.py
@@ -0,0 +1,104 @@
+from requests import delete
+
+from base import api
+from galaxy.exceptions import error_codes
+
+
+class BasePageApiTestCase( api.ApiTestCase ):
+
+    def _create_valid_page_with_slug( self, slug ):
+        page_request = self._test_page_payload( slug=slug )
+        page_response = self._post( "pages", page_request )
+        self._assert_status_code_is( page_response, 200 )
+        return page_response.json()
+
+    def _create_valid_page_as( self, other_email, slug ):
+        run_as_user = self._setup_user( other_email )
+        page_request = self._test_page_payload( slug=slug )
+        page_request[ "run_as" ] = run_as_user[ "id" ]
+        page_response = self._post( "pages", page_request, admin=True )
+        self._assert_status_code_is( page_response, 200 )
+        return page_response.json()
+
+    def _test_page_payload( self, **kwds ):
+        request = dict(
+            slug="mypage",
+            title="MY PAGE",
+            content="<p>Page!</p>",
+        )
+        request.update( **kwds )
+        return request
+
+
+class PageApiTestCase( BasePageApiTestCase ):
+
+    def test_create( self ):
+        response_json = self._create_valid_page_with_slug( "mypage" )
+        self._assert_has_keys( response_json, "slug", "title", "id" )
+
+    def test_index( self ):
+        create_response_json = self._create_valid_page_with_slug( "indexpage" )
+        assert self._users_index_has_page_with_id( create_response_json[ "id" ] )
+
+    def test_index_doesnt_show_unavailable_pages( self ):
+        create_response_json = self._create_valid_page_as( "others_page_index at bx.psu.edu", "otherspageindex" )
+        assert not self._users_index_has_page_with_id( create_response_json[ "id" ] )
+
+    def test_cannot_create_pages_with_same_slug( self ):
+        page_request = self._test_page_payload( slug="mypage1" )
+        page_response_1 = self._post( "pages", page_request )
+        self._assert_status_code_is( page_response_1, 200 )
+        page_response_2 = self._post( "pages", page_request )
+        self._assert_status_code_is( page_response_2, 400 )
+        self._assert_error_code_is( page_response_2, error_codes.USER_SLUG_DUPLICATE )
+
+    def test_page_requires_name( self ):
+        page_request = self._test_page_payload()
+        del page_request[ 'title' ]
+        page_response = self._post( "pages", page_request )
+        self._assert_status_code_is( page_response, 400 )
+        self._assert_error_code_is( page_response, error_codes.USER_OBJECT_ATTRIBUTE_MISSING )
+
+    def test_page_requires_slug( self ):
+        page_request = self._test_page_payload()
+        del page_request[ 'slug' ]
+        page_response = self._post( "pages", page_request )
+        self._assert_status_code_is( page_response, 400 )
+
+    def test_delete( self ):
+        response_json = self._create_valid_page_with_slug( "testdelete" )
+        delete_response = delete( self._api_url( "pages/%s" % response_json[ 'id' ], use_key=True ) )
+        self._assert_status_code_is( delete_response, 200 )
+
+    def test_404_on_delete_unknown_page( self ):
+        delete_response = delete( self._api_url( "pages/%s" % self._random_key(), use_key=True ) )
+        self._assert_status_code_is( delete_response, 404 )
+        self._assert_error_code_is( delete_response, error_codes.USER_OBJECT_NOT_FOUND )
+
+    def test_403_on_delete_unowned_page( self ):
+        page_response = self._create_valid_page_as( "others_page at bx.psu.edu", "otherspage" )
+        delete_response = delete( self._api_url( "pages/%s" % page_response[ "id" ], use_key=True ) )
+        self._assert_status_code_is( delete_response, 403 )
+        self._assert_error_code_is( delete_response, error_codes.USER_DOES_NOT_OWN_ITEM )
+
+    def test_show( self ):
+        response_json = self._create_valid_page_with_slug( "pagetoshow" )
+        show_response = self._get( "pages/%s" % response_json['id'] )
+        self._assert_status_code_is( show_response, 200 )
+        show_json = show_response.json()
+        self._assert_has_keys( show_json, "slug", "title", "id" )
+        self.assertEquals( show_json["slug"], "pagetoshow" )
+        self.assertEquals( show_json["title"], "MY PAGE" )
+        self.assertEquals( show_json["content"], "<p>Page!</p>" )
+
+    def test_403_on_unowner_show( self ):
+        response_json = self._create_valid_page_as( "others_page_show at bx.psu.edu", "otherspageshow" )
+        show_response = self._get( "pages/%s" % response_json['id'] )
+        self._assert_status_code_is( show_response, 403 )
+        self._assert_error_code_is( show_response, error_codes.USER_DOES_NOT_OWN_ITEM )
+
+    def _users_index_has_page_with_id( self, id ):
+        index_response = self._get( "pages" )
+        self._assert_status_code_is( index_response, 200 )
+        pages = index_response.json()
+        return id in ( _["id"] for _ in pages )
diff --git a/test/api/test_search.py b/test/api/test_search.py
new file mode 100644
index 0000000..25699b0
--- /dev/null
+++ b/test/api/test_search.py
@@ -0,0 +1,32 @@
+from base import api
+from requests import delete
+
+from .helpers import WorkflowPopulator
+
+
+class SearchApiTestCase( api.ApiTestCase ):
+
+    def test_search_workflows( self ):
+        workflow_populator = WorkflowPopulator( self.galaxy_interactor )
+        workflow_id = workflow_populator.simple_workflow( "test_for_search" )
+        search_response = self.__search( "select * from workflow" )
+        assert self.__has_result_with_name( search_response, "test_for_search (imported from API)" ), search_response.json()
+
+        # Deleted
+        delete_url = self._api_url( "workflows/%s" % workflow_id, use_key=True )
+        delete( delete_url )
+
+        search_response = self.__search( "select * from workflow where deleted = False" )
+        assert not self.__has_result_with_name( search_response, "test_for_search (imported from API)" ), search_response.json()
+
+    def __search( self, query ):
+        data = dict( query=query )
+        search_response = self._post( "search", data=data )
+        self._assert_status_code_is( search_response, 200 )
+        return search_response
+
+    def __has_result_with_name( self, search_response, name ):
+        search_response_object = search_response.json()
+        assert "results" in search_response_object, search_response_object
+        results = search_response_object[ "results" ]
+        return name in map( lambda r: r.get( "name", None ), results )
diff --git a/test/api/test_tool_data.py b/test/api/test_tool_data.py
new file mode 100644
index 0000000..0a0f3b1
--- /dev/null
+++ b/test/api/test_tool_data.py
@@ -0,0 +1,76 @@
+""" Tests for the tool data API.
+"""
+from __future__ import print_function
+
+import operator
+
+from base import api
+
+
+class ToolDataApiTestCase( api.ApiTestCase ):
+
+    def test_admin_only( self ):
+        index_response = self._get( "tool_data", admin=False )
+        self._assert_status_code_is( index_response, 403 )
+
+    def test_list(self):
+        index_response = self._get( "tool_data", admin=True )
+        self._assert_status_code_is( index_response, 200 )
+        print(index_response.content)
+        index = index_response.json()
+        assert "testalpha" in [operator.itemgetter("name")(_) for _ in index]
+
+    def test_show(self):
+        show_response = self._get( "tool_data/testalpha", admin=True )
+        self._assert_status_code_is( show_response, 200 )
+        print(show_response.content)
+        data_table = show_response.json()
+        assert data_table["columns"] == ["value", "name", "path"]
+        first_entry = data_table["fields"][0]
+        assert first_entry[0] == "data1"
+        assert first_entry[1] == "data1name"
+        assert first_entry[2].endswith("test/functional/tool-data/data1/entry.txt")
+
+    def test_show_field(self):
+        show_field_response = self._get( "tool_data/testalpha/fields/data1", admin=True )
+        self._assert_status_code_is( show_field_response, 200 )
+        field = show_field_response.json()
+        self._assert_has_keys( field, "files", "name", "fields", "fingerprint", "base_dir" )
+        files = field[ "files" ]
+        assert len( files ) == 2, "Length of files [%s] was not 2." % files
+
+    def test_download_field_file(self):
+        show_field_response = self._get( "tool_data/testalpha/fields/data1/files/entry.txt", admin=True )
+        self._assert_status_code_is( show_field_response, 200 )
+        content = show_field_response.content
+        assert content == "This is data 1.", content
+
+    # Following test case rendered invalid by the fix in
+    # https://github.com/galaxyproject/galaxy/commit/48f77dc742acf01ddbafafcc4634e69378f1f020#diff-bfb557a99c1f7d646d4968d8d680b885R154.
+    # TODO: Restore the test case when test framework allows actions from
+    # admin users.
+
+    # def test_delete_entry(self):
+    #     show_response = self._get( "tool_data/testbeta", admin=True )
+    #     original_count = len(show_response.json()["fields"])
+
+    #     dataset_populator = DatasetPopulator( self.galaxy_interactor )
+    #     history_id = dataset_populator.new_history()
+    #     payload = dataset_populator.run_tool_payload(
+    #         tool_id="data_manager",
+    #         inputs={"ignored_value": "moo"},
+    #         history_id=history_id,
+    #     )
+    #     create_response = self._post( "tools", data=payload )
+    #     self._assert_status_code_is( create_response, 200 )
+    #     dataset_populator.wait_for_history( history_id, assert_ok=True )
+    #     show_response = self._get( "tool_data/testbeta", admin=True )
+    #     updated_fields = show_response.json()["fields"]
+    #     assert len(updated_fields) == original_count + 1
+    #     field0 = updated_fields[0]
+    #     url = self._api_url( "tool_data/testbeta?key=%s" % self.galaxy_interactor.master_api_key )
+    #     delete( url, data=json.dumps({"values": "\t".join(field0)}) )
+
+    #     show_response = self._get( "tool_data/testbeta", admin=True )
+    #     updated_fields = show_response.json()["fields"]
+    #     assert len(updated_fields) == original_count
diff --git a/test/api/test_tools.py b/test/api/test_tools.py
new file mode 100644
index 0000000..2db9ed5
--- /dev/null
+++ b/test/api/test_tools.py
@@ -0,0 +1,1254 @@
+# Test tools API.
+import json
+
+from base import api
+from galaxy.tools.verify.test_data import TestDataResolver
+
+from .helpers import (DatasetCollectionPopulator, DatasetPopulator,
+    LibraryPopulator, skip_without_tool)
+
+
+class ToolsTestCase( api.ApiTestCase ):
+
+    def setUp( self ):
+        super( ToolsTestCase, self ).setUp( )
+        self.dataset_populator = DatasetPopulator( self.galaxy_interactor )
+        self.dataset_collection_populator = DatasetCollectionPopulator( self.galaxy_interactor )
+
+    def test_index( self ):
+        tool_ids = self.__tool_ids()
+        assert "upload1" in tool_ids
+
+    def test_no_panel_index( self ):
+        index = self._get( "tools", data=dict( in_panel="false" ) )
+        tools_index = index.json()
+        # No need to flatten out sections, with in_panel=False, only tools are
+        # returned.
+        tool_ids = [_["id"] for _ in tools_index]
+        assert "upload1" in tool_ids
+
+    @skip_without_tool( "cat1" )
+    def test_show_repeat( self ):
+        tool_info = self._show_valid_tool( "cat1" )
+        parameters = tool_info[ "inputs" ]
+        assert len( parameters ) == 2, "Expected two inputs - got [%s]" % parameters
+        assert parameters[ 0 ][ "name" ] == "input1"
+        assert parameters[ 1 ][ "name" ] == "queries"
+
+        repeat_info = parameters[ 1 ]
+        self._assert_has_keys( repeat_info, "min", "max", "title", "help" )
+        repeat_params = repeat_info[ "inputs" ]
+        assert len( repeat_params ) == 1
+        assert repeat_params[ 0 ][ "name" ] == "input2"
+
+    @skip_without_tool( "random_lines1" )
+    def test_show_conditional( self ):
+        tool_info = self._show_valid_tool( "random_lines1" )
+
+        cond_info = tool_info[ "inputs" ][ 2 ]
+        self._assert_has_keys( cond_info, "cases", "test_param" )
+        self._assert_has_keys( cond_info[ "test_param" ], 'name', 'type', 'label', 'help' )
+
+        cases = cond_info[ "cases" ]
+        assert len( cases ) == 2
+        case1 = cases[ 0 ]
+        self._assert_has_keys( case1, "value", "inputs" )
+        assert case1[ "value" ] == "no_seed"
+        assert len( case1[ "inputs" ] ) == 0
+
+        case2 = cases[ 1 ]
+        self._assert_has_keys( case2, "value", "inputs" )
+        case2_inputs = case2[ "inputs" ]
+        assert len( case2_inputs ) == 1
+        self._assert_has_keys( case2_inputs[ 0 ], 'name', 'type', 'label', 'help', 'argument' )
+        assert case2_inputs[ 0 ][ "name" ] == "seed"
+
+    @skip_without_tool( "multi_data_param" )
+    def test_show_multi_data( self ):
+        tool_info = self._show_valid_tool( "multi_data_param" )
+
+        f1_info, f2_info = tool_info[ "inputs" ][ 0 ], tool_info[ "inputs" ][ 1 ]
+        self._assert_has_keys( f1_info, "min", "max" )
+        assert f1_info["min"] == 1
+        assert f1_info["max"] == 1235
+
+        self._assert_has_keys( f2_info, "min", "max" )
+        assert f2_info["min"] is None
+        assert f2_info["max"] is None
+
+    def _show_valid_tool( self, tool_id ):
+        tool_show_response = self._get( "tools/%s" % tool_id, data=dict( io_details=True ) )
+        self._assert_status_code_is( tool_show_response, 200 )
+        tool_info = tool_show_response.json()
+        self._assert_has_keys( tool_info, "inputs", "outputs", "panel_section_id" )
+        return tool_info
+
+    def test_upload1_paste( self ):
+        history_id = self.dataset_populator.new_history()
+        payload = self.dataset_populator.upload_payload( history_id, 'Hello World' )
+        create_response = self._post( "tools", data=payload )
+        self._assert_has_keys( create_response.json(), 'outputs' )
+
+    def test_upload_posix_newline_fixes( self ):
+        windows_content = "1\t2\t3\r4\t5\t6\r"
+        posix_content = windows_content.replace("\r", "\n")
+        result_content = self._upload_and_get_content( windows_content )
+        self.assertEquals( result_content, posix_content )
+
+    def test_upload_disable_posix_fix( self ):
+        windows_content = "1\t2\t3\r4\t5\t6\r"
+        result_content = self._upload_and_get_content( windows_content, to_posix_lines=None )
+        self.assertEquals( result_content, windows_content )
+
+    def test_upload_tab_to_space( self ):
+        table = "1 2 3\n4 5 6\n"
+        result_content = self._upload_and_get_content( table, space_to_tab="Yes" )
+        self.assertEquals( result_content, "1\t2\t3\n4\t5\t6\n" )
+
+    def test_upload_tab_to_space_off_by_default( self ):
+        table = "1 2 3\n4 5 6\n"
+        result_content = self._upload_and_get_content( table )
+        self.assertEquals( result_content, table )
+
+    def test_rdata_not_decompressed( self ):
+        # Prevent regression of https://github.com/galaxyproject/galaxy/issues/753
+        rdata_path = TestDataResolver().get_filename("1.RData")
+        rdata_metadata = self._upload_and_get_details( open(rdata_path, "rb"), file_type="auto" )
+        self.assertEquals( rdata_metadata[ "file_ext" ], "rdata" )
+
+    def test_unzip_collection( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "input": { "src": "hdca", "id": hdca_id },
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        response = self._run( "__UNZIP_COLLECTION__", history_id, inputs, assert_ok=True )
+        outputs = response[ "outputs" ]
+        self.assertEquals( len(outputs), 2 )
+        output_forward = outputs[ 0 ]
+        output_reverse = outputs[ 1 ]
+        output_forward_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output_forward )
+        output_reverse_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output_reverse )
+        assert output_forward_content.strip() == "123"
+        assert output_reverse_content.strip() == "456"
+
+        output_forward = self.dataset_populator.get_history_dataset_details( history_id, dataset=output_forward )
+        output_reverse = self.dataset_populator.get_history_dataset_details( history_id, dataset=output_reverse )
+
+        assert output_forward["history_id"] == history_id
+        assert output_reverse["history_id"] == history_id
+
+    def test_unzip_nested( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_list_id = self.__build_nested_list( history_id )
+        inputs = {
+            "input": {
+                'batch': True,
+                'values': [ { 'src': 'hdca', 'map_over_type': 'paired', 'id': hdca_list_id }],
+            }
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self._run( "__UNZIP_COLLECTION__", history_id, inputs, assert_ok=True )
+
+    def test_zip_inputs( self ):
+        history_id = self.dataset_populator.new_history()
+        hda1 = dataset_to_param( self.dataset_populator.new_dataset( history_id, content='1\t2\t3' ) )
+        hda2 = dataset_to_param( self.dataset_populator.new_dataset( history_id, content='4\t5\t6' ) )
+        inputs = {
+            "input_forward": hda1,
+            "input_reverse": hda2,
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        response = self._run( "__ZIP_COLLECTION__", history_id, inputs, assert_ok=True )
+        output_collections = response[ "output_collections" ]
+        self.assertEquals( len(output_collections), 1 )
+
+    def test_zip_list_inputs( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.dataset_collection_populator.create_list_in_history( history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"] ).json()["id"]
+        hdca2_id = self.dataset_collection_populator.create_list_in_history( history_id, contents=["1\n2\n3\n4", "5\n6\n7\n8"] ).json()["id"]
+        inputs = {
+            "input_forward": { 'batch': True, 'values': [ {"src": "hdca", "id": hdca1_id} ] },
+            "input_reverse": { 'batch': True, 'values': [ {"src": "hdca", "id": hdca2_id} ] },
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        response = self._run( "__ZIP_COLLECTION__", history_id, inputs, assert_ok=True )
+        implicit_collections = response[ "implicit_collections" ]
+        self.assertEquals( len(implicit_collections), 1 )
+
+    def test_filter_failed( self ):
+        history_id = self.dataset_populator.new_history()
+        ok_hdca_id = self.dataset_collection_populator.create_list_in_history( history_id, contents=["0", "1", "0", "1"] ).json()["id"]
+        exit_code_inputs = {
+            "input": { 'batch': True, 'values': [ {"src": "hdca", "id": ok_hdca_id} ] },
+        }
+        response = self._run( "exit_code_from_file", history_id, exit_code_inputs, assert_ok=False ).json()
+        self.dataset_populator.wait_for_history( history_id, assert_ok=False )
+
+        mixed_implicit_collections = response[ "implicit_collections" ]
+        self.assertEquals( len(mixed_implicit_collections), 1 )
+        mixed_hdca_hid = mixed_implicit_collections[0]["hid"]
+        mixed_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=mixed_hdca_hid, wait=False)
+
+        def get_state(dce):
+            return dce["object"]["state"]
+
+        mixed_states = [get_state(_) for _ in mixed_hdca["elements"]]
+        assert mixed_states == [u"ok", u"error", u"ok", u"error"], mixed_states
+        inputs = {
+            "input": { "src": "hdca", "id": mixed_hdca["id"] },
+        }
+        response = self._run( "__FILTER_FAILED_DATASETS__", history_id, inputs, assert_ok=False ).json()
+        self.dataset_populator.wait_for_history( history_id, assert_ok=False )
+        filter_output_collections = response[ "output_collections" ]
+        self.assertEquals( len(filter_output_collections), 1 )
+        filtered_hid = filter_output_collections[0]["hid"]
+        filtered_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=filtered_hid, wait=False)
+        filtered_states = [get_state(_) for _ in filtered_hdca["elements"]]
+        assert filtered_states == [u"ok", u"ok"], filtered_states
+
+    @skip_without_tool( "multi_select" )
+    def test_multi_select_as_list( self ):
+        history_id = self.dataset_populator.new_history()
+        inputs = {
+            "select_ex": ["--ex1", "ex2"],
+        }
+        response = self._run( "multi_select", history_id, inputs, assert_ok=True )
+        output = response[ "outputs" ][ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output )
+
+        assert output1_content == "--ex1,ex2"
+
+    @skip_without_tool( "multi_select" )
+    def test_multi_select_optional( self ):
+        history_id = self.dataset_populator.new_history()
+        inputs = {
+            "select_ex": ["--ex1"],
+            "select_optional": None,
+        }
+        response = self._run( "multi_select", history_id, inputs, assert_ok=True )
+        output = response[ "outputs" ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output[ 0 ] )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output[ 1 ] )
+        assert output1_content.strip() == "--ex1"
+        assert output2_content.strip() == "None", output2_content
+
+    @skip_without_tool( "library_data" )
+    def test_library_data_param( self ):
+        history_id = self.dataset_populator.new_history()
+        ld = LibraryPopulator( self ).new_library_dataset( "lda_test_library" )
+        inputs = {
+            "library_dataset": ld[ "ldda_id" ],
+            "library_dataset_multiple": [ld[ "ldda_id" ], ld[ "ldda_id" ]]
+        }
+        response = self._run( "library_data", history_id, inputs, assert_ok=True )
+        output = response[ "outputs" ]
+        output_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output[ 0 ] )
+        assert output_content == "TestData\n", output_content
+        output_multiple_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output[ 1 ] )
+        assert output_multiple_content == "TestData\nTestData\n", output_multiple_content
+
+    @skip_without_tool( "multi_data_param" )
+    def test_multidata_param( self ):
+        history_id = self.dataset_populator.new_history()
+        hda1 = dataset_to_param( self.dataset_populator.new_dataset( history_id, content='1\t2\t3' ) )
+        hda2 = dataset_to_param( self.dataset_populator.new_dataset( history_id, content='4\t5\t6' ) )
+        inputs = {
+            "f1": { 'batch': False, 'values': [ hda1, hda2 ] },
+            "f2": { 'batch': False, 'values': [ hda2, hda1 ] },
+        }
+        response = self._run( "multi_data_param", history_id, inputs, assert_ok=True )
+        output1 = response[ "outputs" ][ 0 ]
+        output2 = response[ "outputs" ][ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        assert output1_content == "1\t2\t3\n4\t5\t6\n", output1_content
+        assert output2_content == "4\t5\t6\n1\t2\t3\n", output2_content
+
+    @skip_without_tool( "cat1" )
+    def test_run_cat1( self ):
+        # Run simple non-upload tool with an input data parameter.
+        history_id = self.dataset_populator.new_history()
+        new_dataset = self.dataset_populator.new_dataset( history_id, content='Cat1Test' )
+        inputs = dict(
+            input1=dataset_to_param( new_dataset ),
+        )
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEqual( output1_content.strip(), "Cat1Test" )
+
+    @skip_without_tool( "cat1" )
+    def test_run_cat1_listified_param( self ):
+        # Run simple non-upload tool with an input data parameter.
+        history_id = self.dataset_populator.new_history()
+        new_dataset = self.dataset_populator.new_dataset( history_id, content='Cat1Testlistified' )
+        inputs = dict(
+            input1=[dataset_to_param( new_dataset )],
+        )
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEqual( output1_content.strip(), "Cat1Testlistified" )
+
+    @skip_without_tool( "multiple_versions" )
+    def test_run_by_versions( self ):
+        for version in ["0.1", "0.2"]:
+            # Run simple non-upload tool with an input data parameter.
+            history_id = self.dataset_populator.new_history()
+            inputs = dict()
+            outputs = self._run_and_get_outputs( tool_id="multiple_versions", history_id=history_id, inputs=inputs, tool_version=version )
+            self.assertEquals( len( outputs ), 1 )
+            output1 = outputs[ 0 ]
+            output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+            self.assertEqual( output1_content.strip(), "Version " + version )
+
+    @skip_without_tool( "cat1" )
+    def test_run_cat1_single_meta_wrapper( self ):
+        # Wrap input in a no-op meta parameter wrapper like Sam is planning to
+        # use for all UI API submissions.
+        history_id = self.dataset_populator.new_history()
+        new_dataset = self.dataset_populator.new_dataset( history_id, content='123' )
+        inputs = dict(
+            input1={ 'batch': False, 'values': [ dataset_to_param( new_dataset ) ] },
+        )
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEqual( output1_content.strip(), "123" )
+
+    @skip_without_tool( "validation_default" )
+    def test_validation( self ):
+        history_id = self.dataset_populator.new_history()
+        inputs = {
+            'select_param': "\" ; echo \"moo",
+        }
+        response = self._run( "validation_default", history_id, inputs )
+        self._assert_status_code_is( response, 400 )
+
+    @skip_without_tool( "validation_empty_dataset" )
+    def test_validation_empty_dataset( self ):
+        history_id = self.dataset_populator.new_history()
+        inputs = {
+        }
+        outputs = self._run_and_get_outputs( 'empty_output', history_id, inputs )
+        empty_dataset = outputs[0]
+        inputs = {
+            'input1': dataset_to_param(empty_dataset),
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        response = self._run( "validation_empty_dataset", history_id, inputs )
+        self._assert_status_code_is( response, 400 )
+
+    @skip_without_tool( "validation_repeat" )
+    def test_validation_in_repeat( self ):
+        history_id = self.dataset_populator.new_history()
+        inputs = {
+            'r1_0|text': "123",
+            'r2_0|text': "",
+        }
+        response = self._run( "validation_repeat", history_id, inputs )
+        self._assert_status_code_is( response, 400 )
+
+    @skip_without_tool( "multi_select" )
+    def test_select_legal_values( self ):
+        history_id = self.dataset_populator.new_history()
+        inputs = {
+            'select_ex': 'not_option',
+        }
+        response = self._run( "multi_select", history_id, inputs )
+        self._assert_status_code_is( response, 400 )
+
+    @skip_without_tool( "column_param" )
+    def test_column_legal_values( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='#col1\tcol2' )
+        inputs = {
+            'input1': { "src": "hda", "id": new_dataset1["id"] },
+            'col': "' ; echo 'moo",
+        }
+        response = self._run( "column_param", history_id, inputs )
+        assert response.status_code != 200
+
+    @skip_without_tool( "collection_paired_test" )
+    def test_collection_parameter( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "f1": { "src": "hdca", "id": hdca_id },
+        }
+        output = self._run( "collection_paired_test", history_id, inputs, assert_ok=True )
+        assert len( output[ 'jobs' ] ) == 1
+        assert len( output[ 'implicit_collections' ] ) == 0
+        assert len( output[ 'outputs' ] ) == 1
+        contents = self.dataset_populator.get_history_dataset_content( history_id, hid=4 )
+        assert contents.strip() == "123\n456", contents
+
+    @skip_without_tool( "collection_creates_pair" )
+    def test_paired_collection_output( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123\n456\n789\n0ab' )
+        inputs = {
+            "input1": {"src": "hda", "id": new_dataset1["id"]},
+        }
+        # TODO: shouldn't need this wait
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        create = self._run( "collection_creates_pair", history_id, inputs, assert_ok=True )
+        output_collection = self._assert_one_job_one_collection_run( create )
+        element0, element1 = self._assert_elements_are( output_collection, "forward", "reverse" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self._verify_element( history_id, element0, contents="123\n789\n", file_ext="txt", visible=False )
+        self._verify_element( history_id, element1, contents="456\n0ab\n", file_ext="txt", visible=False )
+
+    @skip_without_tool( "collection_creates_list" )
+    def test_list_collection_output( self ):
+        history_id = self.dataset_populator.new_history()
+        create_response = self.dataset_collection_populator.create_list_in_history( history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"] )
+        hdca_id = create_response.json()[ "id" ]
+        inputs = {
+            "input1": { "src": "hdca", "id": hdca_id },
+        }
+        # TODO: real problem here - shouldn't have to have this wait.
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        create = self._run( "collection_creates_list", history_id, inputs, assert_ok=True )
+        output_collection = self._assert_one_job_one_collection_run( create )
+        element0, element1 = self._assert_elements_are( output_collection, "data1", "data2" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self._verify_element( history_id, element0, contents="identifier is data1\n", file_ext="txt" )
+        self._verify_element( history_id, element1, contents="identifier is data2\n", file_ext="txt" )
+
+    @skip_without_tool( "collection_creates_list_2" )
+    def test_list_collection_output_format_source( self ):
+        # test using format_source with a tool
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='#col1\tcol2' )
+        create_response = self.dataset_collection_populator.create_list_in_history( history_id, contents=["a\tb\nc\td", "e\tf\ng\th"] )
+        hdca_id = create_response.json()[ "id" ]
+        inputs = {
+            "header": { "src": "hda", "id": new_dataset1["id"] },
+            "input_collect": { "src": "hdca", "id": hdca_id },
+        }
+        # TODO: real problem here - shouldn't have to have this wait.
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        create = self._run( "collection_creates_list_2", history_id, inputs, assert_ok=True )
+        output_collection = self._assert_one_job_one_collection_run( create )
+        element0, element1 = self._assert_elements_are( output_collection, "data1", "data2" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self._verify_element( history_id, element0, contents="#col1\tcol2\na\tb\nc\td\n", file_ext="txt" )
+        self._verify_element( history_id, element1, contents="#col1\tcol2\ne\tf\ng\th\n", file_ext="txt" )
+
+    @skip_without_tool( "collection_split_on_column" )
+    def test_dynamic_list_output( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='samp1\t1\nsamp1\t3\nsamp2\t2\nsamp2\t4\n' )
+        inputs = {
+            'input1': dataset_to_param( new_dataset1 ),
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        create = self._run( "collection_split_on_column", history_id, inputs, assert_ok=True )
+
+        output_collection = self._assert_one_job_one_collection_run( create )
+        self._assert_has_keys( output_collection, "id", "name", "elements", "populated" )
+        assert not output_collection[ "populated" ]
+        assert len( output_collection[ "elements" ] ) == 0
+        self.assertEquals( output_collection[ "name" ], "Table split on first column" )
+        self.dataset_populator.wait_for_job( create["jobs"][0]["id"], assert_ok=True )
+
+        get_collection_response = self._get( "dataset_collections/%s" % output_collection[ "id" ], data={"instance_type": "history"} )
+        self._assert_status_code_is( get_collection_response, 200 )
+
+        output_collection = get_collection_response.json()
+        self._assert_has_keys( output_collection, "id", "name", "elements", "populated" )
+        assert output_collection[ "populated" ]
+        assert len( output_collection[ "elements" ] ) == 2
+        self.assertEquals( output_collection[ "name" ], "Table split on first column" )
+
+        # TODO: verify element identifiers
+
+    @skip_without_tool( "cat1" )
+    def test_run_cat1_with_two_inputs( self ):
+        # Run tool with an multiple data parameter and grouping (repeat)
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='Cat1Test' )
+        new_dataset2 = self.dataset_populator.new_dataset( history_id, content='Cat2Test' )
+        inputs = {
+            'input1': dataset_to_param( new_dataset1 ),
+            'queries_0|input2': dataset_to_param( new_dataset2 )
+        }
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEqual( output1_content.strip(), "Cat1Test\nCat2Test" )
+
+    @skip_without_tool( "cat1" )
+    def test_multirun_cat1( self ):
+        history_id, datasets = self._prepare_cat1_multirun()
+        inputs = {
+            "input1": {
+                'batch': True,
+                'values': datasets,
+            },
+        }
+        self._check_cat1_multirun( history_id, inputs )
+
+    def _prepare_cat1_multirun( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+        new_dataset2 = self.dataset_populator.new_dataset( history_id, content='456' )
+        return history_id, [ dataset_to_param( new_dataset1 ), dataset_to_param( new_dataset2 ) ]
+
+    def _check_cat1_multirun( self, history_id, inputs ):
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 2 )
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        self.assertEquals( output1_content.strip(), "123" )
+        self.assertEquals( output2_content.strip(), "456" )
+
+    @skip_without_tool( "random_lines1" )
+    def test_multirun_non_data_parameter( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123\n456\n789' )
+        inputs = {
+            'input': dataset_to_param( new_dataset1 ),
+            'num_lines': { 'batch': True, 'values': [ 1, 2, 3 ] }
+        }
+        outputs = self._run_and_get_outputs( 'random_lines1', history_id, inputs )
+        # Assert we have three outputs with 1, 2, and 3 lines respectively.
+        assert len( outputs ) == 3
+        outputs_contents = [ self.dataset_populator.get_history_dataset_content( history_id, dataset=o ).strip() for o in outputs ]
+        assert sorted( len( c.split( "\n" ) ) for c in outputs_contents ) == [ 1, 2, 3 ]
+
+    @skip_without_tool( "cat1" )
+    def test_multirun_in_repeat( self ):
+        history_id, common_dataset, repeat_datasets = self._setup_repeat_multirun( )
+        inputs = {
+            "input1": common_dataset,
+            'queries_0|input2': { 'batch': True, 'values': repeat_datasets },
+        }
+        self._check_repeat_multirun( history_id, inputs )
+
+    @skip_without_tool( "cat1" )
+    def test_multirun_in_repeat_mismatch( self ):
+        history_id, common_dataset, repeat_datasets = self._setup_repeat_multirun( )
+        inputs = {
+            "input1": {'batch': False, 'values': [ common_dataset ] },
+            'queries_0|input2': { 'batch': True, 'values': repeat_datasets },
+        }
+        self._check_repeat_multirun( history_id, inputs )
+
+    @skip_without_tool( "cat1" )
+    def test_multirun_on_multiple_inputs( self ):
+        history_id, first_two, second_two = self._setup_two_multiruns()
+        inputs = {
+            "input1": { 'batch': True, 'values': first_two },
+            'queries_0|input2': { 'batch': True, 'values': second_two },
+        }
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 2 )
+        outputs_contents = [ self.dataset_populator.get_history_dataset_content( history_id, dataset=o ).strip() for o in outputs ]
+        assert "123\n789" in outputs_contents
+        assert "456\n0ab" in outputs_contents
+
+    @skip_without_tool( "cat1" )
+    def test_multirun_on_multiple_inputs_unlinked( self ):
+        history_id, first_two, second_two = self._setup_two_multiruns()
+        inputs = {
+            "input1": { 'batch': True, 'linked': False, 'values': first_two },
+            'queries_0|input2': { 'batch': True, 'linked': False, 'values': second_two },
+        }
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        outputs_contents = [ self.dataset_populator.get_history_dataset_content( history_id, dataset=o ).strip() for o in outputs ]
+        self.assertEquals( len( outputs ), 4 )
+        assert "123\n789" in outputs_contents
+        assert "456\n0ab" in outputs_contents
+        assert "123\n0ab" in outputs_contents
+        assert "456\n789" in outputs_contents
+
+    def _assert_one_job_one_collection_run( self, create ):
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        collections = create[ 'output_collections' ]
+
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( implicit_collections ), 0 )
+        self.assertEquals( len( collections ), 1 )
+
+        output_collection = collections[ 0 ]
+        return output_collection
+
+    def _assert_elements_are( self, collection, *args ):
+        elements = collection["elements"]
+        self.assertEquals(len(elements), len(args))
+        for index, element in enumerate(elements):
+            arg = args[index]
+            self.assertEquals(arg, element["element_identifier"])
+        return elements
+
+    def _verify_element( self, history_id, element, **props ):
+        object_id = element["object"]["id"]
+
+        if "contents" in props:
+            expected_contents = props["contents"]
+
+            contents = self.dataset_populator.get_history_dataset_content( history_id, dataset_id=object_id)
+            self.assertEquals( contents, expected_contents )
+
+            del props["contents"]
+
+        if props:
+            details = self.dataset_populator.get_history_dataset_details( history_id, dataset_id=object_id)
+            for key, value in props.items():
+                self.assertEquals( details[key], value )
+
+    def _setup_repeat_multirun( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+        new_dataset2 = self.dataset_populator.new_dataset( history_id, content='456' )
+        common_dataset = self.dataset_populator.new_dataset( history_id, content='Common' )
+        return (
+            history_id,
+            dataset_to_param( common_dataset ),
+            [ dataset_to_param( new_dataset1 ), dataset_to_param( new_dataset2 ) ]
+        )
+
+    def _check_repeat_multirun( self, history_id, inputs ):
+        outputs = self._cat1_outputs( history_id, inputs=inputs )
+        self.assertEquals( len( outputs ), 2 )
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        self.assertEquals( output1_content.strip(), "Common\n123" )
+        self.assertEquals( output2_content.strip(), "Common\n456" )
+
+    def _setup_two_multiruns( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+        new_dataset2 = self.dataset_populator.new_dataset( history_id, content='456' )
+        new_dataset3 = self.dataset_populator.new_dataset( history_id, content='789' )
+        new_dataset4 = self.dataset_populator.new_dataset( history_id, content='0ab' )
+        return (
+            history_id,
+            [ dataset_to_param( new_dataset1 ), dataset_to_param( new_dataset2 ) ],
+            [ dataset_to_param( new_dataset3 ), dataset_to_param( new_dataset4 ) ]
+        )
+
+    @skip_without_tool( "cat1" )
+    def test_map_over_collection( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "input1": { 'batch': True, 'values': [ { 'src': 'hdca', 'id': hdca_id } ] },
+        }
+        self._run_and_check_simple_collection_mapping( history_id, inputs )
+
+    @skip_without_tool( "output_action_change_format" )
+    def test_map_over_with_output_format_actions( self ):
+        for use_action in ["do", "dont"]:
+            history_id = self.dataset_populator.new_history()
+            hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+            inputs = {
+                "input_cond|dispatch": use_action,
+                "input_cond|input": { 'batch': True, 'values': [ { 'src': 'hdca', 'id': hdca_id } ] },
+            }
+            create = self._run( 'output_action_change_format', history_id, inputs ).json()
+            outputs = create[ 'outputs' ]
+            jobs = create[ 'jobs' ]
+            implicit_collections = create[ 'implicit_collections' ]
+            self.assertEquals( len( jobs ), 2 )
+            self.assertEquals( len( outputs ), 2 )
+            self.assertEquals( len( implicit_collections ), 1 )
+            output1 = outputs[ 0 ]
+            output2 = outputs[ 1 ]
+            output1_details = self.dataset_populator.get_history_dataset_details( history_id, dataset=output1 )
+            output2_details = self.dataset_populator.get_history_dataset_details( history_id, dataset=output2 )
+            assert output1_details[ "file_ext" ] == "txt" if (use_action == "do") else "data"
+            assert output2_details[ "file_ext" ] == "txt" if (use_action == "do") else "data"
+
+    @skip_without_tool( "Cut1" )
+    def test_map_over_with_complex_output_actions( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self._bed_list(history_id)
+        inputs = {
+            "columnList": "c1,c2,c3,c4,c5",
+            "delimiter": "T",
+            "input": { 'batch': True, 'values': [ { 'src': 'hdca', 'id': hdca_id } ] },
+        }
+        create = self._run( 'Cut1', history_id, inputs ).json()
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 2 )
+        self.assertEquals( len( outputs ), 2 )
+        self.assertEquals( len( implicit_collections ), 1 )
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        assert output1_content.startswith("chr1")
+        assert output2_content.startswith("chr1")
+
+    def _bed_list(self, history_id):
+        bed1_contents = open(self.get_filename("1.bed"), "r").read()
+        bed2_contents = open(self.get_filename("2.bed"), "r").read()
+        contents = [bed1_contents, bed2_contents]
+        hdca = self.dataset_collection_populator.create_list_in_history( history_id, contents=contents ).json()
+        return hdca["id"]
+
+    def _run_and_check_simple_collection_mapping( self, history_id, inputs ):
+        create = self._run_cat1( history_id, inputs=inputs, assert_ok=True )
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 2 )
+        self.assertEquals( len( outputs ), 2 )
+        self.assertEquals( len( implicit_collections ), 1 )
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        self.assertEquals( output1_content.strip(), "123" )
+        self.assertEquals( output2_content.strip(), "456" )
+
+    @skip_without_tool( "identifier_single" )
+    def test_identifier_in_map( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "input1": { 'batch': True, 'values': [ { 'src': 'hdca', 'id': hdca_id } ] },
+        }
+        create_response = self._run( "identifier_single", history_id, inputs )
+        self._assert_status_code_is( create_response, 200 )
+        create = create_response.json()
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 2 )
+        self.assertEquals( len( outputs ), 2 )
+        self.assertEquals( len( implicit_collections ), 1 )
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        self.assertEquals( output1_content.strip(), "forward" )
+        self.assertEquals( output2_content.strip(), "reverse" )
+
+    @skip_without_tool( "identifier_single" )
+    def test_identifier_outside_map( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+        inputs = {
+            "input1": { 'src': 'hda', 'id': new_dataset1["id"] },
+        }
+        create_response = self._run( "identifier_single", history_id, inputs )
+        self._assert_status_code_is( create_response, 200 )
+        create = create_response.json()
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 1 )
+        self.assertEquals( len( implicit_collections ), 0 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEquals( output1_content.strip(), "Pasted Entry" )
+
+    @skip_without_tool( "identifier_multiple" )
+    def test_identifier_in_multiple_reduce( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "input1": { 'src': 'hdca', 'id': hdca_id },
+        }
+        create_response = self._run( "identifier_multiple", history_id, inputs )
+        self._assert_status_code_is( create_response, 200 )
+        create = create_response.json()
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 1 )
+        self.assertEquals( len( implicit_collections ), 0 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEquals( output1_content.strip(), "forward\nreverse" )
+
+    @skip_without_tool( "identifier_multiple" )
+    def test_identifier_with_multiple_normal_datasets( self ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+        new_dataset2 = self.dataset_populator.new_dataset( history_id, content='456' )
+        inputs = {
+            "input1": [
+                { 'src': 'hda', 'id': new_dataset1["id"] },
+                { 'src': 'hda', 'id': new_dataset2["id"] }
+            ]
+        }
+        create_response = self._run( "identifier_multiple", history_id, inputs )
+        self._assert_status_code_is( create_response, 200 )
+        create = create_response.json()
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 1 )
+        self.assertEquals( len( implicit_collections ), 0 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEquals( output1_content.strip(), "Pasted Entry\nPasted Entry" )
+
+    @skip_without_tool( "identifier_collection" )
+    def test_identifier_with_data_collection( self ):
+        history_id = self.dataset_populator.new_history()
+
+        element_identifiers = self.dataset_collection_populator.list_identifiers( history_id )
+
+        payload = dict(
+            instance_type="history",
+            history_id=history_id,
+            element_identifiers=json.dumps(element_identifiers),
+            collection_type="list",
+        )
+
+        create_response = self._post( "dataset_collections", payload )
+        dataset_collection = create_response.json()
+
+        inputs = {
+            "input1": {'src': 'hdca', 'id': dataset_collection['id']},
+        }
+
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        create_response = self._run( "identifier_collection", history_id, inputs )
+        self._assert_status_code_is( create_response, 200 )
+        create = create_response.json()
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[ 0 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        self.assertEquals( output1_content.strip(), '\n'.join([d['name'] for d in element_identifiers]) )
+
+    @skip_without_tool( "cat1" )
+    def test_map_over_nested_collections( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_id = self.__build_nested_list( history_id )
+        inputs = {
+            "input1": { 'batch': True, 'values': [ dict( src="hdca", id=hdca_id ) ] },
+        }
+        self._check_simple_cat1_over_nested_collections( history_id, inputs )
+
+    def _check_simple_cat1_over_nested_collections( self, history_id, inputs ):
+        create = self._run_cat1( history_id, inputs=inputs, assert_ok=True )
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 4 )
+        self.assertEquals( len( outputs ), 4 )
+        self.assertEquals( len( implicit_collections ), 1 )
+        implicit_collection = implicit_collections[ 0 ]
+        self._assert_has_keys( implicit_collection, "collection_type", "elements" )
+        assert implicit_collection[ "collection_type" ] == "list:paired"
+        assert len( implicit_collection[ "elements" ] ) == 2
+        first_element, second_element = implicit_collection[ "elements" ]
+        assert first_element[ "element_identifier" ] == "test0"
+        assert second_element[ "element_identifier" ] == "test1"
+
+        first_object = first_element[ "object" ]
+        assert first_object[ "collection_type" ] == "paired"
+        assert len( first_object[ "elements" ] ) == 2
+        first_object_forward_element = first_object[ "elements" ][ 0 ]
+        self.assertEquals( outputs[ 0 ][ "id" ], first_object_forward_element[ "object" ][ "id" ] )
+
+    @skip_without_tool( "cat1" )
+    def test_map_over_two_collections( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.__build_pair( history_id, [ "789", "0ab" ] )
+        inputs = {
+            "input1": { 'batch': True, 'values': [ {'src': 'hdca', 'id': hdca1_id } ] },
+            "queries_0|input2": { 'batch': True, 'values': [ { 'src': 'hdca', 'id': hdca2_id } ] },
+        }
+        self._check_map_cat1_over_two_collections( history_id, inputs )
+
+    def _check_map_cat1_over_two_collections( self, history_id, inputs ):
+        response = self._run_cat1( history_id, inputs )
+        self._assert_status_code_is( response, 200 )
+        response_object = response.json()
+        outputs = response_object[ 'outputs' ]
+        self.assertEquals( len( outputs ), 2 )
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        self.dataset_populator.wait_for_history( history_id, timeout=25 )
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        self.assertEquals( output1_content.strip(), "123\n789" )
+        self.assertEquals( output2_content.strip(), "456\n0ab" )
+
+        self.assertEquals( len( response_object[ 'jobs' ] ), 2 )
+        self.assertEquals( len( response_object[ 'implicit_collections' ] ), 1 )
+
+    @skip_without_tool( "cat1" )
+    def test_map_over_two_collections_unlinked( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.__build_pair( history_id, [ "789", "0ab" ] )
+        inputs = {
+            "input1": { 'batch': True, 'linked': False, 'values': [ {'src': 'hdca', 'id': hdca1_id } ] },
+            "queries_0|input2": { 'batch': True, 'linked': False, 'values': [ { 'src': 'hdca', 'id': hdca2_id } ] },
+        }
+        response = self._run_cat1( history_id, inputs )
+        self._assert_status_code_is( response, 200 )
+        response_object = response.json()
+        outputs = response_object[ 'outputs' ]
+        self.assertEquals( len( outputs ), 4 )
+
+        self.assertEquals( len( response_object[ 'jobs' ] ), 4 )
+        implicit_collections = response_object[ 'implicit_collections' ]
+        self.assertEquals( len( implicit_collections ), 1 )
+        implicit_collection = implicit_collections[ 0 ]
+        self.assertEquals( implicit_collection[ "collection_type" ], "paired:paired" )
+
+        outer_elements = implicit_collection[ "elements" ]
+        assert len( outer_elements ) == 2
+        element0, element1 = outer_elements
+        assert element0[ "element_identifier" ] == "forward"
+        assert element1[ "element_identifier" ] == "reverse"
+
+        elements0 = element0[ "object" ][ "elements" ]
+        elements1 = element1[ "object" ][ "elements" ]
+
+        assert len( elements0 ) == 2
+        assert len( elements1 ) == 2
+
+        element00, element01 = elements0
+        assert element00[ "element_identifier" ] == "forward"
+        assert element01[ "element_identifier" ] == "reverse"
+
+        element10, element11 = elements1
+        assert element10[ "element_identifier" ] == "forward"
+        assert element11[ "element_identifier" ] == "reverse"
+
+        expected_contents_list = [
+            (element00, "123\n789\n"),
+            (element01, "123\n0ab\n"),
+            (element10, "456\n789\n"),
+            (element11, "456\n0ab\n"),
+        ]
+        for (element, expected_contents) in expected_contents_list:
+            dataset_id = element["object"]["id"]
+            contents = self.dataset_populator.get_history_dataset_content( history_id, dataset_id=dataset_id )
+            self.assertEquals(expected_contents, contents)
+
+    @skip_without_tool( "cat1" )
+    def test_map_over_collected_and_individual_datasets( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        new_dataset1 = self.dataset_populator.new_dataset( history_id, content='789' )
+        new_dataset2 = self.dataset_populator.new_dataset( history_id, content='0ab' )
+
+        inputs = {
+            "input1": { 'batch': True, 'values': [ {'src': 'hdca', 'id': hdca1_id } ] },
+            "queries_0|input2": { 'batch': True, 'values': [ dataset_to_param( new_dataset1 ), dataset_to_param( new_dataset2 ) ] },
+        }
+        response = self._run_cat1( history_id, inputs )
+        self._assert_status_code_is( response, 200 )
+        response_object = response.json()
+        outputs = response_object[ 'outputs' ]
+        self.assertEquals( len( outputs ), 2 )
+
+        self.assertEquals( len( response_object[ 'jobs' ] ), 2 )
+        self.assertEquals( len( response_object[ 'implicit_collections' ] ), 1 )
+
+    @skip_without_tool( "collection_creates_pair" )
+    def test_map_over_collection_output( self ):
+        history_id = self.dataset_populator.new_history()
+        create_response = self.dataset_collection_populator.create_list_in_history( history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"] )
+        hdca_id = create_response.json()[ "id" ]
+        inputs = {
+            "input1": { 'batch': True, 'values': [ dict( src="hdca", id=hdca_id ) ] },
+        }
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        create = self._run( "collection_creates_pair", history_id, inputs, assert_ok=True )
+        jobs = create[ 'jobs' ]
+        implicit_collections = create[ 'implicit_collections' ]
+        self.assertEquals( len( jobs ), 2 )
+        self.assertEquals( len( implicit_collections ), 1 )
+        implicit_collection = implicit_collections[ 0 ]
+        assert implicit_collection[ "collection_type" ] == "list:paired", implicit_collection
+        outer_elements = implicit_collection[ "elements" ]
+        assert len( outer_elements ) == 2
+        element0, element1 = outer_elements
+        assert element0[ "element_identifier" ] == "data1"
+        assert element1[ "element_identifier" ] == "data2"
+
+        pair0, pair1 = element0["object"], element1["object"]
+        pair00, pair01 = pair0["elements"]
+        pair10, pair11 = pair1["elements"]
+
+        for pair in pair0, pair1:
+            assert "collection_type" in pair, pair
+            assert pair["collection_type"] == "paired", pair
+
+        pair_ids = []
+        for pair_element in pair00, pair01, pair10, pair11:
+            assert "object" in pair_element
+            pair_ids.append(pair_element["object"]["id"])
+
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        expected_contents = [
+            "a\nc\n",
+            "b\nd\n",
+            "e\ng\n",
+            "f\nh\n",
+        ]
+        for i in range(4):
+            contents = self.dataset_populator.get_history_dataset_content( history_id, dataset_id=pair_ids[i])
+            self.assertEquals(expected_contents[i], contents)
+
+    @skip_without_tool( "cat1" )
+    def test_cannot_map_over_incompatible_collections( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.dataset_collection_populator.create_list_in_history( history_id  ).json()[ "id" ]
+        inputs = {
+            "input1": {
+                'batch': True,
+                'values': [ { 'src': 'hdca', 'id': hdca1_id }],
+            },
+            "queries_0|input2": {
+                'batch': True,
+                'values': [ { 'src': 'hdca', 'id': hdca2_id }],
+            },
+        }
+        run_response = self._run_cat1( history_id, inputs )
+        # TODO: Fix this error checking once switch over to new API decorator
+        # on server.
+        assert run_response.status_code >= 400
+
+    @skip_without_tool( "multi_data_param" )
+    def test_reduce_collections_legacy( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.dataset_collection_populator.create_list_in_history( history_id  ).json()[ "id" ]
+        inputs = {
+            "f1": "__collection_reduce__|%s" % hdca1_id,
+            "f2": "__collection_reduce__|%s" % hdca2_id,
+        }
+        self._check_simple_reduce_job( history_id, inputs )
+
+    @skip_without_tool( "multi_data_param" )
+    def test_reduce_collections( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.dataset_collection_populator.create_list_in_history( history_id  ).json()[ "id" ]
+        inputs = {
+            "f1": { 'src': 'hdca', 'id': hdca1_id },
+            "f2": { 'src': 'hdca', 'id': hdca2_id },
+        }
+        self._check_simple_reduce_job( history_id, inputs )
+
+    @skip_without_tool( "multi_data_repeat" )
+    def test_reduce_collections_in_repeat( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "outer_repeat_0|f1": { 'src': 'hdca', 'id': hdca1_id },
+        }
+        create = self._run( "multi_data_repeat", history_id, inputs, assert_ok=True )
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[0]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        assert output1_content.strip() == "123\n456", output1_content
+
+    @skip_without_tool( "multi_data_repeat" )
+    def test_reduce_collections_in_repeat_legacy( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        inputs = {
+            "outer_repeat_0|f1": "__collection_reduce__|%s" % hdca1_id,
+        }
+        create = self._run( "multi_data_repeat", history_id, inputs, assert_ok=True )
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 1 )
+        output1 = outputs[0]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        assert output1_content.strip() == "123\n456", output1_content
+
+    @skip_without_tool( "multi_data_param" )
+    def test_reduce_multiple_lists_on_multi_data( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.dataset_collection_populator.create_list_in_history( history_id  ).json()[ "id" ]
+        inputs = {
+            "f1": [{ 'src': 'hdca', 'id': hdca1_id }, { 'src': 'hdca', 'id': hdca2_id }],
+            "f2": [{ 'src': 'hdca', 'id': hdca1_id }],
+        }
+        create = self._run( "multi_data_param", history_id, inputs, assert_ok=True )
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 2 )
+        output1, output2 = outputs
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        self.assertEquals( output1_content.strip(), "123\n456\nTestData123\nTestData123\nTestData123" )
+        self.assertEquals( output2_content.strip(), "123\n456" )
+
+    def _check_simple_reduce_job( self, history_id, inputs ):
+        create = self._run( "multi_data_param", history_id, inputs, assert_ok=True )
+        outputs = create[ 'outputs' ]
+        jobs = create[ 'jobs' ]
+        self.assertEquals( len( jobs ), 1 )
+        self.assertEquals( len( outputs ), 2 )
+        output1, output2 = outputs
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        assert output1_content.strip() == "123\n456"
+        assert len( output2_content.strip().split("\n") ) == 3, output2_content
+
+    @skip_without_tool( "collection_paired_test" )
+    def test_subcollection_mapping( self ):
+        history_id = self.dataset_populator.new_history()
+        hdca_list_id = self.__build_nested_list( history_id )
+        inputs = {
+            "f1": {
+                'batch': True,
+                'values': [ { 'src': 'hdca', 'map_over_type': 'paired', 'id': hdca_list_id }],
+            }
+        }
+        self._check_simple_subcollection_mapping( history_id, inputs )
+
+    def _check_simple_subcollection_mapping( self, history_id, inputs ):
+        # Following wait not really needed - just getting so many database
+        # locked errors with sqlite.
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        outputs = self._run_and_get_outputs( "collection_paired_test", history_id, inputs )
+        assert len( outputs ), 2
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        assert output1_content.strip() == "123\n456", output1_content
+        assert output2_content.strip() == "789\n0ab", output2_content
+
+    @skip_without_tool( "collection_mixed_param" )
+    def test_combined_mapping_and_subcollection_mapping( self ):
+        history_id = self.dataset_populator.new_history()
+        nested_list_id = self.__build_nested_list( history_id )
+        create_response = self.dataset_collection_populator.create_list_in_history( history_id, contents=["xxx", "yyy"] )
+        list_id = create_response.json()[ "id" ]
+        inputs = {
+            "f1": {
+                'batch': True,
+                'values': [ { 'src': 'hdca', 'map_over_type': 'paired', 'id': nested_list_id }],
+            },
+            "f2": {
+                'batch': True,
+                'values': [ { 'src': 'hdca', 'id': list_id }],
+            },
+        }
+        self._check_combined_mapping_and_subcollection_mapping( history_id, inputs )
+
+    def _check_combined_mapping_and_subcollection_mapping( self, history_id, inputs ):
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        outputs = self._run_and_get_outputs( "collection_mixed_param", history_id, inputs )
+        assert len( outputs ), 2
+        output1 = outputs[ 0 ]
+        output2 = outputs[ 1 ]
+        output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+        output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+        assert output1_content.strip() == "123\n456\nxxx", output1_content
+        assert output2_content.strip() == "789\n0ab\nyyy", output2_content
+
+    def _cat1_outputs( self, history_id, inputs ):
+        return self._run_outputs( self._run_cat1( history_id, inputs ) )
+
+    def _run_and_get_outputs( self, tool_id, history_id, inputs, tool_version=None ):
+        return self._run_outputs( self._run( tool_id, history_id, inputs, tool_version=tool_version ) )
+
+    def _run_outputs( self, create_response ):
+        self._assert_status_code_is( create_response, 200 )
+        return create_response.json()[ 'outputs' ]
+
+    def _run_cat1( self, history_id, inputs, assert_ok=False ):
+        return self._run( 'cat1', history_id, inputs, assert_ok=assert_ok )
+
+    def _run( self, tool_id, history_id, inputs, assert_ok=False, tool_version=None ):
+        payload = self.dataset_populator.run_tool_payload(
+            tool_id=tool_id,
+            inputs=inputs,
+            history_id=history_id,
+        )
+        if tool_version is not None:
+            payload[ "tool_version" ] = tool_version
+        create_response = self._post( "tools", data=payload )
+        if assert_ok:
+            self._assert_status_code_is( create_response, 200 )
+            create = create_response.json()
+            self._assert_has_keys( create, 'outputs' )
+            return create
+        else:
+            return create_response
+
+    def _upload( self, content, **upload_kwds ):
+        history_id = self.dataset_populator.new_history()
+        new_dataset = self.dataset_populator.new_dataset( history_id, content=content, **upload_kwds )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        return history_id, new_dataset
+
+    def _upload_and_get_content( self, content, **upload_kwds ):
+        history_id, new_dataset = self._upload( content, **upload_kwds )
+        return self.dataset_populator.get_history_dataset_content( history_id, dataset=new_dataset )
+
+    def _upload_and_get_details( self, content, **upload_kwds ):
+        history_id, new_dataset = self._upload( content, **upload_kwds )
+        return self.dataset_populator.get_history_dataset_details( history_id, dataset=new_dataset )
+
+    def __tool_ids( self ):
+        index = self._get( "tools" )
+        tools_index = index.json()
+        # In panels by default, so flatten out sections...
+        tools = []
+        for tool_or_section in tools_index:
+            if "elems" in tool_or_section:
+                tools.extend( tool_or_section[ "elems" ] )
+            else:
+                tools.append( tool_or_section )
+
+        tool_ids = [_["id"] for _ in tools]
+        return tool_ids
+
+    def __build_nested_list( self, history_id ):
+        hdca1_id = self.__build_pair( history_id, [ "123", "456" ] )
+        hdca2_id = self.__build_pair( history_id, [ "789", "0ab" ] )
+
+        response = self.dataset_collection_populator.create_list_from_pairs( history_id, [ hdca1_id, hdca2_id ] )
+        self._assert_status_code_is( response, 200 )
+        hdca_list_id = response.json()[ "id" ]
+        return hdca_list_id
+
+    def __build_pair( self, history_id, contents ):
+        create_response = self.dataset_collection_populator.create_pair_in_history( history_id, contents=contents )
+        hdca_id = create_response.json()[ "id" ]
+        return hdca_id
+
+
+def dataset_to_param( dataset ):
+    return dict(
+        src='hda',
+        id=dataset[ 'id' ]
+    )
diff --git a/test/api/test_tours.py b/test/api/test_tours.py
new file mode 100644
index 0000000..9e94a43
--- /dev/null
+++ b/test/api/test_tours.py
@@ -0,0 +1,17 @@
+from base import api
+
+
+class TourApiTestCase( api.ApiTestCase ):
+
+    def test_index(self):
+        response = self._get( "tours" )
+        self._assert_status_code_is( response, 200 )
+        tours = response.json()
+        tour_keys = map(lambda t: t["id"], tours)
+        assert "core.history" in tour_keys
+
+    def test_show(self):
+        response = self._get( "tours/core.history" )
+        self._assert_status_code_is( response, 200 )
+        tour = response.json()
+        self._assert_has_keys(tour, "name", "description", "title_default", "steps")
diff --git a/test/api/test_users.py b/test/api/test_users.py
new file mode 100644
index 0000000..9a7a142
--- /dev/null
+++ b/test/api/test_users.py
@@ -0,0 +1,85 @@
+import json
+
+from requests import put
+
+from base import api
+
+TEST_USER_EMAIL = "user_for_users_index_test at bx.psu.edu"
+
+
+class UsersApiTestCase( api.ApiTestCase ):
+
+    def test_index( self ):
+        self._setup_user( TEST_USER_EMAIL )
+        all_users_response = self._get( "users", admin=True )
+        self._assert_status_code_is( all_users_response, 200 )
+        all_users = all_users_response.json()
+        # New user is in list
+        assert len( [ u for u in all_users if u[ "email" ] == TEST_USER_EMAIL ] ) == 1
+        # Request made from admin user, so should at least self and this
+        # new user.
+        assert len( all_users ) > 1
+
+    def test_index_only_self_for_nonadmins( self ):
+        self._setup_user( TEST_USER_EMAIL )
+        with self._different_user():
+            all_users_response = self._get( "users" )
+            # Non admin users can only see themselves
+            assert len( all_users_response.json() ) == 1
+
+    def test_show( self ):
+        user = self._setup_user( TEST_USER_EMAIL )
+        with self._different_user( email=TEST_USER_EMAIL ):
+            show_response = self.__show( user )
+            self._assert_status_code_is( show_response, 200 )
+            self.__assert_matches_user( user, show_response.json() )
+
+    def test_update( self ):
+        new_name = 'linnaeus'
+        user = self._setup_user( TEST_USER_EMAIL )
+        not_the_user = self._setup_user( 'email at example.com' )
+        with self._different_user( email=TEST_USER_EMAIL ):
+
+            # working
+            update_response = self.__update( user, username=new_name )
+            self._assert_status_code_is( update_response, 200 )
+            update_json = update_response.json()
+            self.assertEqual( update_json[ 'username' ], new_name )
+
+            # too short
+            update_response = self.__update( user, username='mu' )
+            self._assert_status_code_is( update_response, 400 )
+
+            # not them
+            update_response = self.__update( not_the_user, username=new_name )
+            self._assert_status_code_is( update_response, 403 )
+
+            # non-existent
+            no_user_id = self.security.encode_id( 100 )
+            update_url = self._api_url( "users/%s" % ( no_user_id ), use_key=True )
+            update_response = put( update_url, data=json.dumps( dict( username=new_name ) ) )
+            self._assert_status_code_is( update_response, 404 )
+
+    def test_admin_update( self ):
+        new_name = 'flexo'
+        user = self._setup_user( TEST_USER_EMAIL )
+
+        update_url = self._api_url( "users/%s" % ( user[ "id" ] ), params=dict( key=self.master_api_key ) )
+        update_response = put( update_url, data=json.dumps( dict( username=new_name ) ) )
+        self._assert_status_code_is( update_response, 200 )
+        update_json = update_response.json()
+        self.assertEqual( update_json[ 'username' ], new_name )
+
+    def __show( self, user ):
+        return self._get( "users/%s" % ( user[ 'id' ] ) )
+
+    def __update( self, user, **new_data ):
+        update_url = self._api_url( "users/%s" % ( user[ "id" ] ), use_key=True )
+        # TODO: Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
+        body = json.dumps( new_data )
+        return put( update_url, data=body )
+
+    def __assert_matches_user( self, userA, userB ):
+        self._assert_has_keys( userB, "id", "username", "total_disk_usage" )
+        assert userA[ "id" ] == userB[ "id" ]
+        assert userA[ "username" ] == userB[ "username" ]
diff --git a/test/api/test_workflow_1.ga b/test/api/test_workflow_1.ga
new file mode 100644
index 0000000..1f06969
--- /dev/null
+++ b/test/api/test_workflow_1.ga
@@ -0,0 +1,87 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "simple workflow",
+    "format-version": "0.1", 
+    "name": "TestWorkflow1", 
+    "steps": {
+        "0": {
+            "annotation": "input1 description", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "input1 description", 
+                    "name": "WorkflowInput1"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 199.55555772781372, 
+                "top": 200.66666460037231
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"WorkflowInput1\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "WorkflowInput2"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 206.22221422195435, 
+                "top": 327.33335161209106
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"WorkflowInput2\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input1": {
+                    "id": 0, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 1, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 419.33335876464844, 
+                "top": 200.44446563720703
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central/tool-data/shared/ucsc/chrom/?.len\\\"\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_2.ga b/test/api/test_workflow_2.ga
new file mode 100644
index 0000000..f297d7d
--- /dev/null
+++ b/test/api/test_workflow_2.ga
@@ -0,0 +1,92 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "random_lines_x2", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 10, 
+                "top": 10
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "uuid": "58dffcc9-bcb7-4117-a0e1-61513524b3b0",
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {
+                "input": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Select random lines", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 230, 
+                "top": 10
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "random_lines1", 
+            "tool_state": "{\"__page__\": 0, \"num_lines\": \"\\\"8\\\"\", \"seed_source\": \"{\\\"__current_case__\\\": 0, \\\"seed_source_selector\\\": \\\"no_seed\\\"}\", \"input\": \"null\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central-workflows-params/tool-data/shared/ucsc/chrom/?.len\\\"\", \"__rerun_remap_job_id__\": null}", 
+            "tool_version": null, 
+            "type": "tool", 
+            "uuid": "58dffcc9-bcb7-4117-a0e1-61513524b3b1",
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input": {
+                    "id": 1, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Select random lines", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 450, 
+                "top": 10
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "random_lines1", 
+            "tool_state": "{\"__page__\": 0, \"num_lines\": \"\\\"6\\\"\", \"seed_source\": \"{\\\"__current_case__\\\": 0, \\\"seed_source_selector\\\": \\\"no_seed\\\"}\", \"input\": \"null\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central-workflows-params/tool-data/shared/ucsc/chrom/?.len\\\"\", \"__rerun_remap_job_id__\": null}", 
+            "tool_version": null, 
+            "type": "tool", 
+            "uuid": "58dffcc9-bcb7-4117-a0e1-61513524b3b2",
+            "user_outputs": []
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_batch.ga b/test/api/test_workflow_batch.ga
new file mode 100644
index 0000000..3c279c7
--- /dev/null
+++ b/test/api/test_workflow_batch.ga
@@ -0,0 +1,145 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "test", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "content_id": null, 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "label": null, 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 200, 
+                "top": 200
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "uuid": "ef60789e-60fd-4c5a-baa5-598aeac0b5dc", 
+            "workflow_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "content_id": "addValue", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Add column", 
+                    "name": "input"
+                }
+            ], 
+            "label": null, 
+            "name": "Add column", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 204, 
+                "top": 319
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "addValue", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"__job_resource\": \"{\\\"__current_case__\\\": 0, \\\"__job_resource__select\\\": \\\"no\\\"}\", \"exp\": \"\\\"1\\\"\", \"iterate\": \"\\\"no\\\"\", \"input\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "uuid": "1daceb2a-719c-49a6-881c-5301076de918", 
+            "workflow_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "content_id": "addValue", 
+            "id": 2, 
+            "input_connections": {
+                "input": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Add column", 
+                    "name": "input"
+                }
+            ], 
+            "label": null, 
+            "name": "Add column", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 418, 
+                "top": 234
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "addValue", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"__job_resource\": \"{\\\"__current_case__\\\": 0, \\\"__job_resource__select\\\": \\\"no\\\"}\", \"exp\": \"\\\"1\\\"\", \"iterate\": \"\\\"no\\\"\", \"input\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "uuid": "f5349127-d008-44b2-a41e-af024de92d2e", 
+            "workflow_outputs": []
+        }, 
+        "3": {
+            "annotation": "", 
+            "content_id": "cat1", 
+            "id": 3, 
+            "input_connections": {
+                "input1": {
+                    "id": 2, 
+                    "output_name": "out_file1"
+                }, 
+                "queries_0|input2": {
+                    "id": 1, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Concatenate datasets", 
+                    "name": "input1"
+                }
+            ], 
+            "label": null, 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 389.5, 
+                "top": 396
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__job_resource\": \"{\\\"__current_case__\\\": 0, \\\"__job_resource__select\\\": \\\"no\\\"}\", \"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"queries\": \"[{\\\"input2\\\": {\\\"__class__\\\": \\\"RuntimeValue\\\"}, \\\"__index__\\\": 0}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "uuid": "32205465-a47e-4d8a-aa45-2560b1a38f54", 
+            "workflow_outputs": []
+        }
+    }, 
+    "uuid": "9f791470-2fca-4f63-aa18-72ae0211b077"
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_extraction.py b/test/api/test_workflow_extraction.py
new file mode 100644
index 0000000..5386439
--- /dev/null
+++ b/test/api/test_workflow_extraction.py
@@ -0,0 +1,470 @@
+from __future__ import print_function
+
+import functools
+import operator
+from collections import namedtuple
+from json import dumps, loads
+
+from .helpers import skip_without_tool
+from .test_workflows import BaseWorkflowsApiTestCase
+
+
+class WorkflowExtractionApiTestCase( BaseWorkflowsApiTestCase ):
+
+    def setUp( self ):
+        super( WorkflowExtractionApiTestCase, self ).setUp()
+        self.history_id = self.dataset_populator.new_history()
+
+    @skip_without_tool( "cat1" )
+    def test_extract_from_history( self ):
+        # Run the simple test workflow and extract it back out from history
+        cat1_job_id = self.__setup_and_run_cat1_workflow( history_id=self.history_id )
+        contents = self._history_contents()
+        input_hids = [c[ "hid" ] for c in contents[ 0:2 ]]
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_ids=input_hids,
+            job_ids=[ cat1_job_id ],
+        )
+        self.assertEqual( downloaded_workflow[ "name" ], "test import from history" )
+        self.__assert_looks_like_cat1_example_workflow( downloaded_workflow )
+
+    def test_extract_with_copied_inputs( self ):
+        old_history_id = self.dataset_populator.new_history()
+        # Run the simple test workflow and extract it back out from history
+        self.__setup_and_run_cat1_workflow( history_id=old_history_id )
+
+        # Bug cannot mess up hids or these don't extract correctly. See Trello card here:
+        # https://trello.com/c/mKzLbM2P
+        # # create dummy dataset to complicate hid mapping
+        # self.dataset_populator.new_dataset( history_id, content="dummydataset" )
+        # offset = 1
+
+        offset = 0
+        old_contents = self._history_contents( old_history_id )
+        for old_dataset in old_contents:
+            self.__copy_content_to_history( self.history_id, old_dataset )
+        new_contents = self._history_contents()
+        input_hids = [c[ "hid" ] for c in new_contents[ (offset + 0):(offset + 2) ]]
+        cat1_job_id = self.__job_id( self.history_id, new_contents[ (offset + 2) ][ "id" ] )
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_ids=input_hids,
+            job_ids=[ cat1_job_id ],
+        )
+        self.__assert_looks_like_cat1_example_workflow( downloaded_workflow )
+
+    @skip_without_tool( "random_lines1" )
+    def test_extract_mapping_workflow_from_history( self ):
+        hdca, job_id1, job_id2 = self.__run_random_lines_mapped_over_pair( self.history_id )
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_collection_ids=[ hdca[ "hid" ] ],
+            job_ids=[ job_id1, job_id2 ],
+        )
+        self.__assert_looks_like_randomlines_mapping_workflow( downloaded_workflow )
+
+    def test_extract_copied_mapping_from_history( self ):
+        old_history_id = self.dataset_populator.new_history()
+        hdca, job_id1, job_id2 = self.__run_random_lines_mapped_over_pair( old_history_id )
+
+        old_contents = self._history_contents( old_history_id )
+        for old_content in old_contents:
+            self.__copy_content_to_history( self.history_id, old_content )
+        # API test is somewhat contrived since there is no good way
+        # to retrieve job_id1, job_id2 like this for copied dataset
+        # collections I don't think.
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_collection_ids=[ hdca[ "hid" ] ],
+            job_ids=[ job_id1, job_id2 ],
+        )
+        self.__assert_looks_like_randomlines_mapping_workflow( downloaded_workflow )
+
+    @skip_without_tool( "random_lines1" )
+    @skip_without_tool( "multi_data_param" )
+    def test_extract_reduction_from_history( self ):
+        hdca = self.dataset_collection_populator.create_pair_in_history( self.history_id, contents=["1 2 3\n4 5 6", "7 8 9\n10 11 10"] ).json()
+        hdca_id = hdca[ "id" ]
+        inputs1 = {
+            "input": { "batch": True, "values": [ { "src": "hdca", "id": hdca_id } ] },
+            "num_lines": 2
+        }
+        implicit_hdca1, job_id1 = self._run_tool_get_collection_and_job_id( self.history_id, "random_lines1", inputs1 )
+        inputs2 = {
+            "f1": { "src": "hdca", "id": implicit_hdca1[ "id" ] },
+            "f2": { "src": "hdca", "id": implicit_hdca1[ "id" ] },
+        }
+        reduction_run_output = self.dataset_populator.run_tool(
+            tool_id="multi_data_param",
+            inputs=inputs2,
+            history_id=self.history_id,
+        )
+        job_id2 = reduction_run_output[ "jobs" ][ 0 ][ "id" ]
+        self.dataset_populator.wait_for_history( self.history_id, assert_ok=True, timeout=20 )
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_collection_ids=[ hdca[ "hid" ] ],
+            job_ids=[ job_id1, job_id2 ],
+        )
+        assert len( downloaded_workflow[ "steps" ] ) == 3
+        collect_step_idx = self._assert_first_step_is_paired_input( downloaded_workflow )
+        tool_steps = self._get_steps_of_type( downloaded_workflow, "tool", expected_len=2 )
+        random_lines_map_step = tool_steps[ 0 ]
+        reduction_step = tool_steps[ 1 ]
+        random_lines_input = random_lines_map_step[ "input_connections" ][ "input" ]
+        assert random_lines_input[ "id" ] == collect_step_idx
+        reduction_step_input = reduction_step[ "input_connections" ][ "f1" ]
+        assert reduction_step_input[ "id"] == random_lines_map_step[ "id" ]
+
+    @skip_without_tool( "collection_paired_test" )
+    def test_extract_workflows_with_dataset_collections( self ):
+        jobs_summary = self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input1
+    type: input_collection
+  - tool_id: collection_paired_test
+    state:
+      f1:
+        $link: text_input1
+test_data:
+  text_input1:
+    type: paired
+""")
+        job_id = self._job_id_for_tool( jobs_summary.jobs, "collection_paired_test" )
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_collection_ids=[ jobs_summary.inputs["text_input1"]["hid"] ],
+            job_ids=[ job_id ],
+        )
+        self.__check_workflow(
+            downloaded_workflow,
+            step_count=2,
+            verify_connected=True,
+            data_input_count=0,
+            data_collection_input_count=1,
+            tool_ids=["collection_paired_test"]
+        )
+
+        collection_step = self._get_steps_of_type( downloaded_workflow, "data_collection_input", expected_len=1 )[ 0 ]
+        collection_step_state = loads( collection_step[ "tool_state" ] )
+        self.assertEqual( collection_step_state[ "collection_type" ], "paired" )
+
+    @skip_without_tool( "cat_collection" )
+    def test_subcollection_mapping( self ):
+        jobs_summary = self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input1
+    type: input_collection
+  - label: noop
+    tool_id: cat1
+    state:
+      input1:
+        $link: text_input1
+  - tool_id: cat_collection
+    state:
+      input1:
+        $link: noop#out_file1
+test_data:
+  text_input1:
+    type: "list:paired"
+        """)
+        job1_id = self._job_id_for_tool( jobs_summary.jobs, "cat1" )
+        job2_id = self._job_id_for_tool( jobs_summary.jobs, "cat_collection" )
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_collection_ids=[ jobs_summary.inputs["text_input1"]["hid"] ],
+            job_ids=[ job1_id, job2_id ],
+        )
+        print(jobs_summary.inputs["text_input1"])
+        self.__check_workflow(
+            downloaded_workflow,
+            step_count=3,
+            verify_connected=True,
+            data_input_count=0,
+            data_collection_input_count=1,
+            tool_ids=["cat_collection", "cat1"],
+        )
+
+        collection_step = self._get_steps_of_type( downloaded_workflow, "data_collection_input", expected_len=1 )[ 0 ]
+        collection_step_state = loads( collection_step[ "tool_state" ] )
+        self.assertEqual( collection_step_state[ "collection_type" ], "list:paired" )
+
+    @skip_without_tool( "collection_split_on_column" )
+    def test_extract_workflow_with_output_collections( self ):
+        jobs_summary = self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input1
+    type: input
+  - label: text_input2
+    type: input
+  - label: cat_inputs
+    tool_id: cat1
+    state:
+      input1:
+        $link: text_input1
+      queries:
+        - input2:
+            $link: text_input2
+  - label: split_up
+    tool_id: collection_split_on_column
+    state:
+      input1:
+        $link: cat_inputs#out_file1
+  - tool_id: cat_list
+    state:
+      input1:
+        $link: split_up#split_output
+test_data:
+  text_input1: "samp1\t10.0\nsamp2\t20.0\n"
+  text_input2: "samp1\t30.0\nsamp2\t40.0\n"
+""")
+        tool_ids = [ "cat1", "collection_split_on_column", "cat_list" ]
+        job_ids = [ functools.partial(self._job_id_for_tool, jobs_summary.jobs )(_) for _ in tool_ids ]
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_ids=[ "1", "2" ],
+            job_ids=job_ids,
+        )
+        self.__check_workflow(
+            downloaded_workflow,
+            step_count=5,
+            verify_connected=True,
+            data_input_count=2,
+            data_collection_input_count=0,
+            tool_ids=tool_ids,
+        )
+
+    @skip_without_tool( "collection_creates_pair" )
+    def test_extract_with_mapped_output_collections( self ):
+        jobs_summary = self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input1
+    type: input_collection
+  - label: cat_inputs
+    tool_id: cat1
+    state:
+      input1:
+        $link: text_input1
+  - label: pair_off
+    tool_id: collection_creates_pair
+    state:
+      input1:
+        $link: cat_inputs#out_file1
+  - label: cat_pairs
+    tool_id: cat_collection
+    state:
+      input1:
+        $link: pair_off#paired_output
+  - tool_id: cat_list
+    state:
+      input1:
+        $link: cat_pairs#out_file1
+test_data:
+  text_input1:
+    type: list
+    elements:
+      - identifier: samp1
+        content: "samp1\t10.0\nsamp2\t20.0\n"
+      - identifier: samp2
+        content: "samp1\t30.0\nsamp2\t40.0\n"
+""")
+        tool_ids = [ "cat1", "collection_creates_pair", "cat_collection", "cat_list" ]
+        job_ids = [ functools.partial(self._job_id_for_tool, jobs_summary.jobs )(_) for _ in tool_ids ]
+        downloaded_workflow = self._extract_and_download_workflow(
+            dataset_collection_ids=[ "3" ],
+            job_ids=job_ids,
+        )
+        self.__check_workflow(
+            downloaded_workflow,
+            step_count=5,
+            verify_connected=True,
+            data_input_count=0,
+            data_collection_input_count=1,
+            tool_ids=tool_ids,
+        )
+
+    def _job_id_for_tool( self, jobs, tool_id ):
+        return self._job_for_tool( jobs, tool_id )[ "id" ]
+
+    def _job_for_tool( self, jobs, tool_id ):
+        tool_jobs = [j for j in jobs if j["tool_id"] == tool_id]
+        if not tool_jobs:
+            assert False, "Failed to find job for tool %s" % tool_id
+        # if len( tool_jobs ) > 1:
+        #     assert False, "Found multiple jobs for tool %s" % tool_id
+        return tool_jobs[ -1 ]
+
+    def __run_random_lines_mapped_over_pair( self, history_id ):
+        hdca = self.dataset_collection_populator.create_pair_in_history( history_id, contents=["1 2 3\n4 5 6", "7 8 9\n10 11 10"] ).json()
+        hdca_id = hdca[ "id" ]
+        inputs1 = {
+            "input": { "batch": True, "values": [ { "src": "hdca", "id": hdca_id } ] },
+            "num_lines": 2
+        }
+        implicit_hdca1, job_id1 = self._run_tool_get_collection_and_job_id( history_id, "random_lines1", inputs1 )
+        inputs2 = {
+            "input": { "batch": True, "values": [ { "src": "hdca", "id": implicit_hdca1[ "id" ] } ] },
+            "num_lines": 1
+        }
+        _, job_id2 = self._run_tool_get_collection_and_job_id( history_id, "random_lines1", inputs2 )
+        return hdca, job_id1, job_id2
+
+    def __assert_looks_like_randomlines_mapping_workflow( self, downloaded_workflow ):
+        # Assert workflow is input connected to a tool step with one output
+        # connected to another tool step.
+        assert len( downloaded_workflow[ "steps" ] ) == 3
+        collect_step_idx = self._assert_first_step_is_paired_input( downloaded_workflow )
+        tool_steps = self._get_steps_of_type( downloaded_workflow, "tool", expected_len=2 )
+        tool_step_idxs = []
+        tool_input_step_idxs = []
+        for tool_step in tool_steps:
+            self._assert_has_key( tool_step[ "input_connections" ], "input" )
+            input_step_idx = tool_step[ "input_connections" ][ "input" ][ "id" ]
+            tool_step_idxs.append( tool_step[ "id" ] )
+            tool_input_step_idxs.append( input_step_idx )
+
+        assert collect_step_idx not in tool_step_idxs
+        assert tool_input_step_idxs[ 0 ] == collect_step_idx
+        assert tool_input_step_idxs[ 1 ] == tool_step_idxs[ 0 ]
+
+    def __assert_looks_like_cat1_example_workflow( self, downloaded_workflow ):
+        assert len( downloaded_workflow[ "steps" ] ) == 3
+        input_steps = self._get_steps_of_type( downloaded_workflow, "data_input", expected_len=2 )
+        tool_step = self._get_steps_of_type( downloaded_workflow, "tool", expected_len=1 )[ 0 ]
+
+        input1 = tool_step[ "input_connections" ][ "input1" ]
+        input2 = tool_step[ "input_connections" ][ "queries_0|input2" ]
+
+        self.assertEqual( input_steps[ 0 ][ "id" ], input1[ "id" ] )
+        self.assertEqual( input_steps[ 1 ][ "id" ], input2[ "id" ] )
+
+    def _history_contents( self, history_id=None ):
+        if history_id is None:
+            history_id = self.history_id
+        return self._get( "histories/%s/contents" % history_id ).json()
+
+    def __copy_content_to_history( self, history_id, content ):
+        if content[ "history_content_type" ] == "dataset":
+            payload = dict(
+                source="hda",
+                content=content["id"]
+            )
+            response = self._post( "histories/%s/contents/datasets" % history_id, payload )
+
+        else:
+            payload = dict(
+                source="hdca",
+                content=content["id"]
+            )
+            response = self._post( "histories/%s/contents/dataset_collections" % history_id, payload )
+        self._assert_status_code_is( response, 200 )
+        return response.json()
+
+    def __setup_and_run_cat1_workflow( self, history_id ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_extract" )
+        workflow_request, history_id = self._setup_workflow_run( workflow, history_id=history_id )
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        return self.__cat_job_id( history_id )
+
+    def _assert_first_step_is_paired_input( self, downloaded_workflow ):
+        collection_steps = self._get_steps_of_type( downloaded_workflow, "data_collection_input", expected_len=1 )
+        collection_step = collection_steps[ 0 ]
+        collection_step_state = loads( collection_step[ "tool_state" ] )
+        self.assertEqual( collection_step_state[ "collection_type" ], "paired" )
+        collect_step_idx = collection_step[ "id" ]
+        return collect_step_idx
+
+    def _extract_and_download_workflow( self, **extract_payload ):
+        if "from_history_id" not in extract_payload:
+            extract_payload[ "from_history_id" ] = self.history_id
+
+        if "workflow_name" not in extract_payload:
+            extract_payload[ "workflow_name" ] = "test import from history"
+
+        for key in "job_ids", "dataset_ids", "dataset_collection_ids":
+            if key in extract_payload:
+                value = extract_payload[ key ]
+                if isinstance(value, list):
+                    extract_payload[ key ] = dumps( value )
+
+        create_workflow_response = self._post( "workflows", data=extract_payload )
+        self._assert_status_code_is( create_workflow_response, 200 )
+
+        new_workflow_id = create_workflow_response.json()[ "id" ]
+        download_response = self._get( "workflows/%s/download" % new_workflow_id )
+        self._assert_status_code_is( download_response, 200 )
+        downloaded_workflow = download_response.json()
+        return downloaded_workflow
+
+    def _get_steps_of_type( self, downloaded_workflow, type, expected_len=None ):
+        steps = [ s for s in downloaded_workflow[ "steps" ].values() if s[ "type" ] == type ]
+        if expected_len is not None:
+            n = len( steps )
+            assert n == expected_len, "Expected %d steps of type %s, found %d" % ( expected_len, type, n )
+        return sorted( steps, key=operator.itemgetter("id") )
+
+    def __job_id( self, history_id, dataset_id ):
+        url = "histories/%s/contents/%s/provenance" % ( history_id, dataset_id )
+        prov_response = self._get( url, data=dict( follow=False ) )
+        self._assert_status_code_is( prov_response, 200 )
+        return prov_response.json()[ "job_id" ]
+
+    def __cat_job_id( self, history_id ):
+        data = dict( history_id=history_id, tool_id="cat1" )
+        jobs_response = self._get( "jobs", data=data )
+        self._assert_status_code_is( jobs_response, 200 )
+        cat1_job_id = jobs_response.json()[ 0 ][ "id" ]
+        return cat1_job_id
+
+    def _run_tool_get_collection_and_job_id( self, history_id, tool_id, inputs ):
+        run_output1 = self.dataset_populator.run_tool(
+            tool_id=tool_id,
+            inputs=inputs,
+            history_id=history_id,
+        )
+        implicit_hdca = run_output1[ "implicit_collections" ][ 0 ]
+        job_id = run_output1[ "jobs" ][ 0 ][ "id" ]
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True, timeout=20 )
+        return implicit_hdca, job_id
+
+    def __check_workflow(
+        self,
+        workflow,
+        step_count=None,
+        verify_connected=False,
+        data_input_count=None,
+        data_collection_input_count=None,
+        tool_ids=None,
+    ):
+        steps = workflow[ 'steps' ]
+
+        if step_count is not None:
+            assert len( steps ) == step_count
+        if verify_connected:
+            self.__assert_connected( workflow, steps )
+        if tool_ids is not None:
+            tool_steps = self._get_steps_of_type( workflow, "tool" )
+            found_steps = set(map(operator.itemgetter("tool_id"), tool_steps))
+            expected_steps = set(tool_ids)
+            assert found_steps == expected_steps
+        if data_input_count is not None:
+            self._get_steps_of_type( workflow, "data_input", expected_len=data_input_count )
+        if data_collection_input_count is not None:
+            self._get_steps_of_type( workflow, "data_collection_input", expected_len=data_collection_input_count )
+
+    def __assert_connected( self, workflow, steps ):
+        disconnected_inputs = []
+
+        for key, value in steps.items():
+            if value[ 'type' ] == "tool":
+                input_connections = value[ "input_connections" ]
+                if not input_connections:
+                    disconnected_inputs.append( value )
+
+        if disconnected_inputs:
+            template = "%d step(s_ disconnected in extracted workflow - disconnectect steps are %s - workflow is %s"
+            message = template % ( len( disconnected_inputs ), disconnected_inputs, workflow )
+            raise AssertionError( message )
+
+
+RunJobsSummary = namedtuple('RunJobsSummary', ['history_id', 'workflow_id', 'inputs', 'jobs'])
diff --git a/test/api/test_workflow_map_reduce_pause.ga b/test/api/test_workflow_map_reduce_pause.ga
new file mode 100644
index 0000000..b9a6d0e
--- /dev/null
+++ b/test/api/test_workflow_map_reduce_pause.ga
@@ -0,0 +1,198 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "map_reduce_pause", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 172.83680772781372, 
+                "top": 200.96180772781372
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset Collection"
+                }
+            ], 
+            "name": "Input dataset collection", 
+            "outputs": [], 
+            "position": {
+                "left": 161.46528673171997, 
+                "top": 422.5764012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"collection_type\": \"list\", \"name\": \"Input Dataset Collection\"}", 
+            "tool_version": null, 
+            "type": "data_collection_input", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Select first", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 410.9444432258606, 
+                "top": 195.05903673171997
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "head", 
+            "tool_state": "{\"__job_resource\": \"{\\\"__job_resource__select\\\": \\\"no\\\", \\\"__current_case__\\\": 0}\", \"input\": \"null\", \"__page__\": 0, \"__rerun_remap_job_id__\": null, \"lineNum\": \"\\\"1\\\"\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "3": {
+            "annotation": "", 
+            "id": 3, 
+            "input_connections": {
+                "input": {
+                    "id": 1, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Select first", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 632.9756932258606, 
+                "top": 360.57988023757935
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "head", 
+            "tool_state": "{\"__job_resource\": \"{\\\"__job_resource__select\\\": \\\"no\\\", \\\"__current_case__\\\": 0}\", \"input\": \"null\", \"__page__\": 0, \"__rerun_remap_job_id__\": null, \"lineNum\": \"\\\"1\\\"\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "4": {
+            "annotation": "", 
+            "id": 4, 
+            "input_connections": {
+                "input": {
+                    "id": 2, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Pause for dataset review", 
+            "outputs": [], 
+            "position": {
+                "left": 657.0903172492981, 
+                "top": 197.71528673171997
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Pause for Dataset Review\"}", 
+            "tool_version": null, 
+            "type": "pause", 
+            "user_outputs": []
+        }, 
+        "5": {
+            "annotation": "", 
+            "id": 5, 
+            "input_connections": {
+                "input1": {
+                    "id": 4, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 3, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets (for test workflows)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 977.8889012336731, 
+                "top": 228.01042222976685
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat", 
+            "tool_state": "{\"__job_resource\": \"{\\\"__job_resource__select\\\": \\\"no\\\", \\\"__current_case__\\\": 0}\", \"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "6": {
+            "annotation": "", 
+            "id": 6, 
+            "input_connections": {
+                "input1": {
+                    "id": 5, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate multiple datasets (for test workflows)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 1342.545150756836, 
+                "top": 233.55210876464844
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat_list", 
+            "tool_state": "{\"__job_resource\": \"{\\\"__job_resource__select\\\": \\\"no\\\", \\\"__current_case__\\\": 0}\", \"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }, 
+    "uuid": "a27044e7-2efe-4661-9a09-00708d3520f1"
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_matching_lists.ga b/test/api/test_workflow_matching_lists.ga
new file mode 100644
index 0000000..ab82dc2
--- /dev/null
+++ b/test/api/test_workflow_matching_lists.ga
@@ -0,0 +1,117 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "test_workflow_matching_lists", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "list1"
+                }
+            ], 
+            "name": "Input dataset collection", 
+            "outputs": [], 
+            "position": {
+                "left": 139.833336353302, 
+                "top": 162.33334398269653
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"collection_type\": \"list\", \"name\": \"list1\"}", 
+            "tool_version": null, 
+            "type": "data_collection_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "list2"
+                }
+            ], 
+            "name": "Input dataset collection", 
+            "outputs": [], 
+            "position": {
+                "left": 141.864586353302, 
+                "top": 272.3680577278137
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"collection_type\": \"list\", \"name\": \"list2\"}", 
+            "tool_version": null, 
+            "type": "data_collection_input", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input1": {
+                    "id": 0, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 1, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets (for test workflows)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 453.40974473953247, 
+                "top": 203.4097294807434
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "3": {
+            "annotation": "", 
+            "id": 3, 
+            "input_connections": {
+                "input1": {
+                    "id": 2, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate dataset list (for test workflows)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 828.93061876297, 
+                "top": 217.4201512336731
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat_list", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }, 
+    "uuid": "54aadd3b-9d2b-436a-acfa-246a8c251651"
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_missing_tool.ga b/test/api/test_workflow_missing_tool.ga
new file mode 100644
index 0000000..77812f7
--- /dev/null
+++ b/test/api/test_workflow_missing_tool.ga
@@ -0,0 +1,87 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "simple workflow",
+    "format-version": "0.1", 
+    "name": "TestWorkflow1", 
+    "steps": {
+        "0": {
+            "annotation": "input1 description", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "input1 description", 
+                    "name": "WorkflowInput1"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 199.55555772781372, 
+                "top": 200.66666460037231
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"WorkflowInput1\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "WorkflowInput2"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 206.22221422195435, 
+                "top": 327.33335161209106
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"WorkflowInput2\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input1": {
+                    "id": 0, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 1, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 419.33335876464844, 
+                "top": 200.44446563720703
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat_missing_tool", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central/tool-data/shared/ucsc/chrom/?.len\\\"\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_pause.ga b/test/api/test_workflow_pause.ga
new file mode 100644
index 0000000..6f5b4cd
--- /dev/null
+++ b/test/api/test_workflow_pause.ga
@@ -0,0 +1,118 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "test_workflow_pause", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 199.9201512336731, 
+                "top": 251.4826512336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {
+                "input1": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets (for test workflows)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 516.7257237434387, 
+                "top": 187.28126573562622
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input": {
+                    "id": 1, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Pause for Dataset Review"
+                }
+            ], 
+            "name": "Pause for dataset review", 
+            "outputs": [], 
+            "position": {
+                "left": 862.715301990509, 
+                "top": 197.28126573562622
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Pause for Dataset Review\"}", 
+            "tool_version": null, 
+            "type": "pause", 
+            "user_outputs": []
+        }, 
+        "3": {
+            "annotation": "", 
+            "id": 3, 
+            "input_connections": {
+                "input1": {
+                    "id": 2, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets (for test workflows)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 1181.9722595214844, 
+                "top": 181.52084350585938
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }, 
+    "uuid": "9058956e-76b6-4909-bab3-c12b2cc394c7"
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_topoambigouity.ga b/test/api/test_workflow_topoambigouity.ga
new file mode 100644
index 0000000..8137134
--- /dev/null
+++ b/test/api/test_workflow_topoambigouity.ga
@@ -0,0 +1,471 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "topoambigouity", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "l"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 193.7604217529297, 
+                "top": 156.00001525878906
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"l\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "m"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 199.7639012336731, 
+                "top": 282.9757237434387
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"m\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "n"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 213.73957872390747, 
+                "top": 372.92363023757935
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"n\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "3": {
+            "annotation": "", 
+            "id": 3, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "a"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 387.21183824539185, 
+                "top": 414.39933824539185
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"a\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "4": {
+            "annotation": "", 
+            "id": 4, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "b"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 389.2083592414856, 
+                "top": 484.4514012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"b\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "5": {
+            "annotation": "", 
+            "id": 5, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "c"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 402.2465672492981, 
+                "top": 568.4618382453918
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"c\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "6": {
+            "annotation": "", 
+            "id": 6, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "d"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 398.2881932258606, 
+                "top": 641.5000462532043
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"d\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "7": {
+            "annotation": "", 
+            "id": 7, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "k"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 931.6736912727356, 
+                "top": 787.8889012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"k\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "8": {
+            "annotation": "", 
+            "id": 8, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "h"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 923.6006932258606, 
+                "top": 599.7986302375793
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"h\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "9": {
+            "annotation": "", 
+            "id": 9, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "i"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 921.5694432258606, 
+                "top": 694.7639012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"i\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "10": {
+            "annotation": "", 
+            "id": 10, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "e"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 683.3750462532043, 
+                "top": 600.5972752571106
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"e\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "11": {
+            "annotation": "", 
+            "id": 11, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "f"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 692.3854222297668, 
+                "top": 694.5729222297668
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"f\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "12": {
+            "annotation": "", 
+            "id": 12, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "g"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 681.5173802375793, 
+                "top": 760.7014012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"g\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "13": {
+            "annotation": "", 
+            "id": 13, 
+            "input_connections": {
+                "input1": {
+                    "id": 3, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 4, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 5, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 6, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 660.3368382453918, 
+                "top": 404.57295274734497
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "14": {
+            "annotation": "", 
+            "id": 14, 
+            "input_connections": {
+                "input1": {
+                    "id": 13, 
+                    "output_name": "out_file1"
+                }, 
+                "queries_0|input2": {
+                    "id": 10, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 11, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 12, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 915.5451512336731, 
+                "top": 394.7639012336731
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "15": {
+            "annotation": "", 
+            "id": 15, 
+            "input_connections": {
+                "input1": {
+                    "id": 14, 
+                    "output_name": "out_file1"
+                }, 
+                "queries_0|input2": {
+                    "id": 8, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 9, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 7, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 217.61113023757935, 
+                "top": 760.8055882453918
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "16": {
+            "annotation": "", 
+            "id": 16, 
+            "input_connections": {
+                "input1": {
+                    "id": 0, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 1, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 2, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 15, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 462.9583592414856, 
+                "top": 194.15626573562622
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_topoambigouity_auto_laidout.ga b/test/api/test_workflow_topoambigouity_auto_laidout.ga
new file mode 100644
index 0000000..acf748e
--- /dev/null
+++ b/test/api/test_workflow_topoambigouity_auto_laidout.ga
@@ -0,0 +1,471 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "topoambigouity2", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "l"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 193.7604217529297, 
+                "top": 156.00001525878906
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"l\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "m"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 199.7639012336731, 
+                "top": 282.9757237434387
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"m\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "n"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 213.73957872390747, 
+                "top": 372.92363023757935
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"n\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "3": {
+            "annotation": "", 
+            "id": 3, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "a"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 387.21183824539185, 
+                "top": 414.39933824539185
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"a\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "4": {
+            "annotation": "", 
+            "id": 4, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "b"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 389.2083592414856, 
+                "top": 484.4514012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"b\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "5": {
+            "annotation": "", 
+            "id": 5, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "c"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 402.2465672492981, 
+                "top": 568.4618382453918
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"c\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "6": {
+            "annotation": "", 
+            "id": 6, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "d"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 398.2881932258606, 
+                "top": 641.5000462532043
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"d\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "7": {
+            "annotation": "", 
+            "id": 7, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "k"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 931.6736912727356, 
+                "top": 787.8889012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"k\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "8": {
+            "annotation": "", 
+            "id": 8, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "h"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 923.6006932258606, 
+                "top": 599.7986302375793
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"h\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "9": {
+            "annotation": "", 
+            "id": 9, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "i"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 921.5694432258606, 
+                "top": 694.7639012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"i\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "10": {
+            "annotation": "", 
+            "id": 10, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "e"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 683.3750462532043, 
+                "top": 600.5972752571106
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"e\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "11": {
+            "annotation": "", 
+            "id": 11, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "f"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 692.3854222297668, 
+                "top": 694.5729222297668
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"f\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "12": {
+            "annotation": "", 
+            "id": 12, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "g"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 681.5173802375793, 
+                "top": 760.7014012336731
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"g\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "13": {
+            "annotation": "", 
+            "id": 13, 
+            "input_connections": {
+                "input1": {
+                    "id": 3, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 4, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 5, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 6, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 660.3368382453918, 
+                "top": 404.57295274734497
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "14": {
+            "annotation": "", 
+            "id": 14, 
+            "input_connections": {
+                "input1": {
+                    "id": 13, 
+                    "output_name": "out_file1"
+                }, 
+                "queries_0|input2": {
+                    "id": 10, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 11, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 12, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 915.5451512336731, 
+                "top": 394.7639012336731
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "15": {
+            "annotation": "", 
+            "id": 15, 
+            "input_connections": {
+                "input1": {
+                    "id": 14, 
+                    "output_name": "out_file1"
+                }, 
+                "queries_0|input2": {
+                    "id": 8, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 9, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 7, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 217.61113023757935, 
+                "top": 760.8055882453918
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "16": {
+            "annotation": "", 
+            "id": 16, 
+            "input_connections": {
+                "input1": {
+                    "id": 0, 
+                    "output_name": "output"
+                }, 
+                "queries_0|input2": {
+                    "id": 1, 
+                    "output_name": "output"
+                }, 
+                "queries_1|input2": {
+                    "id": 2, 
+                    "output_name": "output"
+                }, 
+                "queries_2|input2": {
+                    "id": 15, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Concatenate datasets", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 462.9583592414856, 
+                "top": 194.15626573562622
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "cat1", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[{\\\"input2\\\": null, \\\"__index__\\\": 0}, {\\\"input2\\\": null, \\\"__index__\\\": 1}, {\\\"input2\\\": null, \\\"__index__\\\": 2}]\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_validation_1.ga b/test/api/test_workflow_validation_1.ga
new file mode 100644
index 0000000..a3291a0
--- /dev/null
+++ b/test/api/test_workflow_validation_1.ga
@@ -0,0 +1,33 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "test_validation_1", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [], 
+            "name": "Validation (default)", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "data"
+                }
+            ], 
+            "position": {
+                "left": 10, 
+                "top": 10
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "validation_default", 
+            "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"\\\"cow\\\"\", \"float_param\": \"8.0\", \"select_param\": \"\\\"opt1\\\"\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central/tool-data/shared/ucsc/chrom/?.len\\\"\"}", 
+            "tool_version": null, 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }, 
+    "uuid": "d058c6a0-6d70-44f4-a177-01859fc6aa37"
+}
\ No newline at end of file
diff --git a/test/api/test_workflow_with_runtime_input.ga b/test/api/test_workflow_with_runtime_input.ga
new file mode 100644
index 0000000..d2b763b
--- /dev/null
+++ b/test/api/test_workflow_with_runtime_input.ga
@@ -0,0 +1,66 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "test_workflow_with_runtime_input", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 248.49653673171997, 
+                "top": 154.98959398269653
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {
+                "input": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [
+                {
+                    "description": "runtime parameter for tool Select random lines", 
+                    "name": "num_lines"
+                }
+            ], 
+            "name": "Select random lines", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 514.0208587646484, 
+                "top": 116.01736450195312
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "random_lines1", 
+            "tool_state": "{\"input\": \"null\", \"seed_source\": \"{\\\"__current_case__\\\": 0, \\\"seed_source_selector\\\": \\\"no_seed\\\"}\", \"__rerun_remap_job_id__\": null, \"num_lines\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"__page__\": 0}", 
+            "tool_version": "2.0.1", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }, 
+    "uuid": "ebb0e189-e1ae-4f08-a0aa-97d8674f9f76"
+}
\ No newline at end of file
diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py
new file mode 100644
index 0000000..a8d3476
--- /dev/null
+++ b/test/api/test_workflows.py
@@ -0,0 +1,1705 @@
+from __future__ import print_function
+
+import time
+from collections import namedtuple
+from json import dumps
+from uuid import uuid4
+
+import yaml
+from requests import delete, put
+
+from base import api
+from galaxy.exceptions import error_codes
+from galaxy.tools.verify.test_data import TestDataResolver
+
+from .helpers import (DatasetCollectionPopulator, DatasetPopulator,
+    skip_without_tool, WorkflowPopulator)
+from .workflows_format_2 import (
+    convert_and_import_workflow,
+    ImporterGalaxyInterface,
+)
+
+SIMPLE_NESTED_WORKFLOW_YAML = """
+class: GalaxyWorkflow
+inputs:
+  - id: outer_input
+steps:
+  - tool_id: cat1
+    label: first_cat
+    state:
+      input1:
+        $link: outer_input
+  - run:
+      class: GalaxyWorkflow
+      inputs:
+        - id: inner_input
+      outputs:
+        - id: workflow_output
+          source: random_lines#out_file1
+      steps:
+        - tool_id: random_lines1
+          label: random_lines
+          state:
+            num_lines: 1
+            input:
+              $link: inner_input
+            seed_source:
+              seed_source_selector: set_seed
+              seed: asdf
+    label: nested_workflow
+    connect:
+      inner_input: first_cat#out_file1
+  - tool_id: cat1
+    label: second_cat
+    state:
+      input1:
+        $link: nested_workflow#workflow_output
+      queries:
+        - input2:
+            $link: nested_workflow#workflow_output
+
+test_data:
+  outer_input:
+    value: 1.bed
+    type: File
+"""
+
+
+class BaseWorkflowsApiTestCase( api.ApiTestCase, ImporterGalaxyInterface ):
+    # TODO: Find a new file for this class.
+
+    def setUp( self ):
+        super( BaseWorkflowsApiTestCase, self ).setUp()
+        self.workflow_populator = WorkflowPopulator( self.galaxy_interactor )
+        self.dataset_populator = DatasetPopulator( self.galaxy_interactor )
+        self.dataset_collection_populator = DatasetCollectionPopulator( self.galaxy_interactor )
+
+    def _assert_user_has_workflow_with_name( self, name ):
+        names = self._workflow_names()
+        assert name in names, "No workflows with name %s in users workflows <%s>" % ( name, names )
+
+    def _workflow_names( self ):
+        index_response = self._get( "workflows" )
+        self._assert_status_code_is( index_response, 200 )
+        names = [w[ "name" ] for w in index_response.json()]
+        return names
+
+    # Import importer interface...
+    def import_workflow(self, workflow, **kwds):
+        workflow_str = dumps(workflow, indent=4)
+        data = {
+            'workflow': workflow_str,
+        }
+        data.update(**kwds)
+        upload_response = self._post( "workflows", data=data )
+        self._assert_status_code_is( upload_response, 200 )
+        return upload_response.json()
+
+    def _upload_yaml_workflow(self, has_yaml, **kwds):
+        workflow = convert_and_import_workflow(has_yaml, galaxy_interface=self, **kwds)
+        return workflow[ "id" ]
+
+    def _setup_workflow_run( self, workflow, inputs_by='step_id', history_id=None ):
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        if not history_id:
+            history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        hda2 = self.dataset_populator.new_dataset( history_id, content="4 5 6" )
+        workflow_request = dict(
+            history="hist_id=%s" % history_id,
+            workflow_id=uploaded_workflow_id,
+        )
+        label_map = {
+            'WorkflowInput1': self._ds_entry(hda1),
+            'WorkflowInput2': self._ds_entry(hda2)
+        }
+        if inputs_by == 'step_id':
+            ds_map = self._build_ds_map( uploaded_workflow_id, label_map )
+            workflow_request[ "ds_map" ] = ds_map
+        elif inputs_by == "step_index":
+            index_map = {
+                '0': self._ds_entry(hda1),
+                '1': self._ds_entry(hda2)
+            }
+            workflow_request[ "inputs" ] = dumps( index_map )
+            workflow_request[ "inputs_by" ] = 'step_index'
+        elif inputs_by == "name":
+            workflow_request[ "inputs" ] = dumps( label_map )
+            workflow_request[ "inputs_by" ] = 'name'
+        elif inputs_by in [ "step_uuid", "uuid_implicitly" ]:
+            uuid_map = {
+                workflow["steps"]["0"]["uuid"]: self._ds_entry(hda1),
+                workflow["steps"]["1"]["uuid"]: self._ds_entry(hda2),
+            }
+            workflow_request[ "inputs" ] = dumps( uuid_map )
+            if inputs_by == "step_uuid":
+                workflow_request[ "inputs_by" ] = "step_uuid"
+
+        return workflow_request, history_id
+
+    def _build_ds_map( self, workflow_id, label_map ):
+        workflow_inputs = self._workflow_inputs( workflow_id )
+        ds_map = {}
+        for key, value in workflow_inputs.items():
+            label = value[ "label" ]
+            if label in label_map:
+                ds_map[ key ] = label_map[ label ]
+        return dumps( ds_map )
+
+    def _ds_entry( self, hda ):
+        src = 'hda'
+        if 'history_content_type' in hda and hda[ 'history_content_type' ] == "dataset_collection":
+            src = 'hdca'
+        return dict( src=src, id=hda[ "id" ] )
+
+    def _workflow_inputs( self, uploaded_workflow_id ):
+        workflow_show_resposne = self._get( "workflows/%s" % uploaded_workflow_id )
+        self._assert_status_code_is( workflow_show_resposne, 200 )
+        workflow_inputs = workflow_show_resposne.json()[ "inputs" ]
+        return workflow_inputs
+
+    def _invocation_details( self, workflow_id, invocation_id ):
+        invocation_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, invocation_id ) )
+        self._assert_status_code_is( invocation_details_response, 200 )
+        invocation_details = invocation_details_response.json()
+        return invocation_details
+
+    def _run_jobs( self, has_workflow, history_id=None, wait=True, source_type=None, jobs_descriptions=None, expected_response=200, assert_ok=True ):
+        def read_test_data(test_dict):
+            test_data_resolver = TestDataResolver()
+            filename = test_data_resolver.get_filename(test_dict["value"])
+            content = open(filename, "r").read()
+            return content
+
+        if history_id is None:
+            history_id = self.history_id
+        workflow_id = self._upload_yaml_workflow(
+            has_workflow, source_type=source_type
+        )
+        if jobs_descriptions is None:
+            assert source_type != "path"
+            jobs_descriptions = yaml.load( has_workflow )
+
+        test_data = jobs_descriptions.get("test_data", {})
+
+        label_map = {}
+        inputs = {}
+        has_uploads = False
+
+        for key, value in test_data.items():
+            is_dict = isinstance( value, dict )
+            if is_dict and ("elements" in value or value.get("type", None) in ["list:paired", "list", "paired"]):
+                elements_data = value.get( "elements", [] )
+                elements = []
+                for element_data in elements_data:
+                    identifier = element_data[ "identifier" ]
+                    input_type = element_data.get("type", "raw")
+                    if input_type == "File":
+                        content = read_test_data(element_data)
+                    else:
+                        content = element_data["content"]
+                    elements.append( ( identifier, content ) )
+                # TODO: make this collection_type
+                collection_type = value["type"]
+                if collection_type == "list:paired":
+                    hdca = self.dataset_collection_populator.create_list_of_pairs_in_history( history_id ).json()
+                elif collection_type == "list":
+                    hdca = self.dataset_collection_populator.create_list_in_history( history_id, contents=elements ).json()
+                else:
+                    hdca = self.dataset_collection_populator.create_pair_in_history( history_id, contents=elements ).json()
+                label_map[key] = self._ds_entry( hdca )
+                inputs[key] = hdca
+                has_uploads = True
+            elif is_dict and "type" in value:
+                input_type = value["type"]
+                if input_type == "File":
+                    content = read_test_data(value)
+                    hda = self.dataset_populator.new_dataset( history_id, content=content )
+                    label_map[key] = self._ds_entry( hda )
+                    has_uploads = True
+                elif input_type == "raw":
+                    label_map[key] = value["value"]
+                    inputs[key] = value["value"]
+            elif not is_dict:
+                has_uploads = True
+                hda = self.dataset_populator.new_dataset( history_id, content=value )
+                label_map[key] = self._ds_entry( hda )
+                inputs[key] = hda
+            else:
+                raise ValueError("Invalid test_data def %" % test_data)
+        workflow_request = dict(
+            history="hist_id=%s" % history_id,
+            workflow_id=workflow_id,
+        )
+        workflow_request[ "inputs" ] = dumps( label_map )
+        workflow_request[ "inputs_by" ] = 'name'
+        if has_uploads:
+            self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        url = "workflows/%s/usage" % ( workflow_id )
+        invocation_response = self._post( url, data=workflow_request )
+        self._assert_status_code_is( invocation_response, expected_response )
+        invocation = invocation_response.json()
+        invocation_id = invocation.get( 'id' )
+        if invocation_id:
+            # Wait for workflow to become fully scheduled and then for all jobs
+            # complete.
+            if wait:
+                self.workflow_populator.wait_for_workflow( workflow_id, invocation_id, history_id, assert_ok=assert_ok )
+            jobs = self._history_jobs( history_id )
+            return RunJobsSummary(
+                history_id=history_id,
+                workflow_id=workflow_id,
+                invocation_id=invocation_id,
+                inputs=inputs,
+                jobs=jobs,
+            )
+
+    def _history_jobs( self, history_id ):
+        return self._get("jobs", { "history_id": history_id, "order_by": "create_time" } ).json()
+
+
+# Workflow API TODO:
+# - Allow history_id as param to workflow run action. (hist_id)
+# - Allow post to workflows/<workflow_id>/run in addition to posting to
+#    /workflows with id in payload.
+# - Much more testing obviously, always more testing.
+class WorkflowsApiTestCase( BaseWorkflowsApiTestCase ):
+
+    def setUp( self ):
+        super( WorkflowsApiTestCase, self ).setUp()
+
+    def test_show_valid( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "dummy" )
+        workflow_id = self.workflow_populator.simple_workflow( "test_regular" )
+        show_response = self._get( "workflows/%s" % workflow_id, {"style": "instance"} )
+        workflow = show_response.json()
+        self._assert_looks_like_instance_workflow_representation( workflow )
+        assert len(workflow["steps"]) == 3
+        self.assertEqual(sorted(step["id"] for step in workflow["steps"].values()), [0, 1, 2])
+
+        show_response = self._get( "workflows/%s" % workflow_id, {"legacy": True} )
+        workflow = show_response.json()
+        self._assert_looks_like_instance_workflow_representation( workflow )
+        assert len(workflow["steps"]) == 3
+        # Can't reay say what the legacy IDs are but must be greater than 3 because dummy
+        # workflow was created first in this instance.
+        self.assertNotEqual(sorted(step["id"] for step in workflow["steps"].values()), [0, 1, 2])
+
+    def test_show_invalid_key_is_400( self ):
+        show_response = self._get( "workflows/%s" % self._random_key() )
+        self._assert_status_code_is( show_response, 400 )
+
+    def test_cannot_show_private_workflow( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_not_importportable" )
+        with self._different_user():
+            show_response = self._get( "workflows/%s" % workflow_id )
+            self._assert_status_code_is( show_response, 403 )
+
+    def test_delete( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_delete" )
+        workflow_name = "test_delete (imported from API)"
+        self._assert_user_has_workflow_with_name( workflow_name )
+        workflow_url = self._api_url( "workflows/%s" % workflow_id, use_key=True )
+        delete_response = delete( workflow_url )
+        self._assert_status_code_is( delete_response, 200 )
+        # Make sure workflow is no longer in index by default.
+        assert workflow_name not in self._workflow_names()
+
+    def test_other_cannot_delete( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_other_delete" )
+        with self._different_user():
+            workflow_url = self._api_url( "workflows/%s" % workflow_id, use_key=True )
+            delete_response = delete( workflow_url )
+            self._assert_status_code_is( delete_response, 403 )
+
+    def test_index( self ):
+        index_response = self._get( "workflows" )
+        self._assert_status_code_is( index_response, 200 )
+        assert isinstance( index_response.json(), list )
+
+    def test_upload( self ):
+        self.__test_upload( use_deprecated_route=False )
+
+    def test_upload_deprecated( self ):
+        self.__test_upload( use_deprecated_route=True )
+
+    def __test_upload( self, use_deprecated_route=False, name="test_import", workflow=None, assert_ok=True ):
+        if workflow is None:
+            workflow = self.workflow_populator.load_workflow( name=name )
+        data = dict(
+            workflow=dumps( workflow ),
+        )
+        if use_deprecated_route:
+            route = "workflows/upload"
+        else:
+            route = "workflows"
+        upload_response = self._post( route, data=data )
+        if assert_ok:
+            self._assert_status_code_is( upload_response, 200 )
+            self._assert_user_has_workflow_with_name( "%s (imported from API)" % name )
+        return upload_response
+
+    def test_update( self ):
+        original_workflow = self.workflow_populator.load_workflow( name="test_import" )
+        uuids = {}
+        labels = {}
+
+        for order_index, step_dict in original_workflow["steps"].items():
+            uuid = str(uuid4())
+            step_dict["uuid"] = uuid
+            uuids[order_index] = uuid
+            label = "label_%s" % order_index
+            step_dict["label"] = label
+            labels[order_index] = label
+
+        def check_label_and_uuid(order_index, step_dict):
+            assert order_index in uuids
+            assert order_index in labels
+
+            self.assertEqual(uuids[order_index], step_dict["uuid"])
+            self.assertEqual(labels[order_index], step_dict["label"])
+
+        upload_response = self.__test_upload( workflow=original_workflow )
+        workflow_id = upload_response.json()["id"]
+
+        def update(workflow_object):
+            put_response = self._update_workflow(workflow_id, workflow_object)
+            self._assert_status_code_is( put_response, 200 )
+            return put_response
+
+        workflow_content = self._download_workflow(workflow_id)
+        steps = workflow_content["steps"]
+
+        def tweak_step(step):
+            order_index, step_dict = step
+            check_label_and_uuid( order_index, step_dict)
+            assert step_dict['position']['top'] != 1
+            assert step_dict['position']['left'] != 1
+            step_dict['position'] = {'top': 1, 'left': 1}
+
+        map(tweak_step, steps.items())
+
+        update(workflow_content)
+
+        def check_step(step):
+            order_index, step_dict = step
+            check_label_and_uuid(order_index, step_dict)
+            assert step_dict['position']['top'] == 1
+            assert step_dict['position']['left'] == 1
+
+        updated_workflow_content = self._download_workflow(workflow_id)
+        map(check_step, updated_workflow_content['steps'].items())
+
+        # Re-update against original worklfow...
+        update(original_workflow)
+
+        updated_workflow_content = self._download_workflow(workflow_id)
+
+        # Make sure the positions have been updated.
+        map(tweak_step, updated_workflow_content['steps'].items())
+
+    def test_update_no_tool_id( self ):
+        workflow_object = self.workflow_populator.load_workflow( name="test_import" )
+        upload_response = self.__test_upload( workflow=workflow_object )
+        workflow_id = upload_response.json()["id"]
+        del workflow_object["steps"]["2"]["tool_id"]
+        put_response = self._update_workflow(workflow_id, workflow_object)
+        self._assert_status_code_is( put_response, 400 )
+
+    def test_update_missing_tool( self ):
+        # Create allows missing tools, update doesn't currently...
+        workflow_object = self.workflow_populator.load_workflow( name="test_import" )
+        upload_response = self.__test_upload( workflow=workflow_object )
+        workflow_id = upload_response.json()["id"]
+        workflow_object["steps"]["2"]["tool_id"] = "cat-not-found"
+        put_response = self._update_workflow(workflow_id, workflow_object)
+        self._assert_status_code_is( put_response, 400 )
+
+    def test_require_unique_step_uuids( self ):
+        workflow_dup_uuids = self.workflow_populator.load_workflow( name="test_import" )
+        uuid0 = str(uuid4())
+        for step_dict in workflow_dup_uuids["steps"].values():
+            step_dict["uuid"] = uuid0
+        response = self.workflow_populator.create_workflow_response( workflow_dup_uuids )
+        self._assert_status_code_is( response, 400 )
+
+    def test_require_unique_step_labels( self ):
+        workflow_dup_label = self.workflow_populator.load_workflow( name="test_import" )
+        for step_dict in workflow_dup_label["steps"].values():
+            step_dict["label"] = "my duplicated label"
+        response = self.workflow_populator.create_workflow_response( workflow_dup_label )
+        self._assert_status_code_is( response, 400 )
+
+    def test_import_deprecated( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_import_published_deprecated", publish=True )
+        with self._different_user():
+            other_import_response = self.__import_workflow( workflow_id )
+            self._assert_status_code_is( other_import_response, 200 )
+            self._assert_user_has_workflow_with_name( "imported: test_import_published_deprecated (imported from API)")
+
+    def test_import_annotations( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_import_annotations", publish=True )
+        with self._different_user():
+            other_import_response = self.__import_workflow( workflow_id )
+            self._assert_status_code_is( other_import_response, 200 )
+
+            # Test annotations preserved during upload and copied over during
+            # import.
+            other_id = other_import_response.json()["id"]
+            imported_workflow = self._show_workflow( other_id )
+            assert imported_workflow["annotation"] == "simple workflow"
+            step_annotations = set(step["annotation"] for step in imported_workflow["steps"].values())
+            assert "input1 description" in step_annotations
+
+    def test_import_subworkflows( self ):
+        def get_subworkflow_content_id(workflow_id):
+            workflow_contents = self._download_workflow(workflow_id, style="editor")
+            steps = workflow_contents['steps']
+            subworkflow_step = next(s for s in steps.values() if s["type"] == "subworkflow")
+            return subworkflow_step['content_id']
+
+        workflow_id = self._upload_yaml_workflow(SIMPLE_NESTED_WORKFLOW_YAML, publish=True)
+        subworkflow_content_id = get_subworkflow_content_id(workflow_id)
+        with self._different_user():
+            other_import_response = self.__import_workflow( workflow_id )
+            self._assert_status_code_is( other_import_response, 200 )
+            imported_workflow_id = other_import_response.json()["id"]
+            imported_subworkflow_content_id = get_subworkflow_content_id(imported_workflow_id)
+            assert subworkflow_content_id != imported_subworkflow_content_id
+
+    def test_not_importable_prevents_import( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_not_importportable" )
+        with self._different_user():
+            other_import_response = self.__import_workflow( workflow_id )
+            self._assert_status_code_is( other_import_response, 403 )
+
+    def test_import_published( self ):
+        workflow_id = self.workflow_populator.simple_workflow( "test_import_published", publish=True )
+        with self._different_user():
+            other_import_response = self.__import_workflow( workflow_id, deprecated_route=True )
+            self._assert_status_code_is( other_import_response, 200 )
+            self._assert_user_has_workflow_with_name( "imported: test_import_published (imported from API)")
+
+    def test_export( self ):
+        uploaded_workflow_id = self.workflow_populator.simple_workflow( "test_for_export" )
+        downloaded_workflow = self._download_workflow( uploaded_workflow_id )
+        assert downloaded_workflow[ "name" ] == "test_for_export (imported from API)"
+        assert len( downloaded_workflow[ "steps" ] ) == 3
+        first_input = downloaded_workflow[ "steps" ][ "0" ][ "inputs" ][ 0 ]
+        assert first_input[ "name" ] == "WorkflowInput1"
+        assert first_input[ "description" ] == "input1 description"
+        self._assert_has_keys( downloaded_workflow, "a_galaxy_workflow", "format-version", "annotation", "uuid", "steps" )
+        for step in downloaded_workflow["steps"].values():
+            self._assert_has_keys(
+                step,
+                'id',
+                'type',
+                'tool_id',
+                'tool_version',
+                'name',
+                'tool_state',
+                'tool_errors',
+                'annotation',
+                'inputs',
+                'workflow_outputs',
+                'outputs'
+            )
+            if step['type'] == "tool":
+                self._assert_has_keys( step, "post_job_actions" )
+
+    def test_export_editor( self ):
+        uploaded_workflow_id = self.workflow_populator.simple_workflow( "test_for_export" )
+        downloaded_workflow = self._download_workflow( uploaded_workflow_id, style="editor" )
+        self._assert_has_keys( downloaded_workflow, "name", "steps", "upgrade_messages" )
+        for step in downloaded_workflow["steps"].values():
+            self._assert_has_keys(
+                step,
+                'id',
+                'type',
+                'content_id',
+                'name',
+                'tool_state',
+                'tooltip',
+                'tool_errors',
+                'data_inputs',
+                'data_outputs',
+                'form_html',
+                'annotation',
+                'post_job_actions',
+                'workflow_outputs',
+                'uuid',
+                'label',
+            )
+
+    def test_import_missing_tool( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( name="test_workflow_missing_tool" )
+        workflow_id = self.workflow_populator.create_workflow( workflow )
+        workflow_description = self._show_workflow( workflow_id )
+        steps = workflow_description["steps"]
+        missing_tool_steps = [v for v in steps.values() if v['tool_id'] == 'cat_missing_tool']
+        assert len(missing_tool_steps) == 1
+
+    def test_import_no_tool_id( self ):
+        # Import works with missing tools, but not with absent content/tool id.
+        workflow = self.workflow_populator.load_workflow_from_resource( name="test_workflow_missing_tool" )
+        del workflow["steps"]["2"]["tool_id"]
+        create_response = self.__test_upload(workflow=workflow, assert_ok=False)
+        self._assert_status_code_is( create_response, 400 )
+
+    def test_import_export_with_runtime_inputs( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( name="test_workflow_with_runtime_input" )
+        workflow_id = self.workflow_populator.create_workflow( workflow )
+        downloaded_workflow = self._download_workflow( workflow_id )
+        assert len( downloaded_workflow[ "steps" ] ) == 2
+        runtime_input = downloaded_workflow[ "steps" ][ "1" ][ "inputs" ][ 0 ]
+        assert runtime_input[ "description" ].startswith( "runtime parameter for tool" )
+        assert runtime_input[ "name" ] == "num_lines"
+
+    @skip_without_tool( "cat1" )
+    def test_run_workflow_by_index( self ):
+        self.__run_cat_workflow( inputs_by='step_index' )
+
+    @skip_without_tool( "cat1" )
+    def test_run_workflow_by_uuid( self ):
+        self.__run_cat_workflow( inputs_by='step_uuid' )
+
+    @skip_without_tool( "cat1" )
+    def test_run_workflow_by_uuid_implicitly( self ):
+        self.__run_cat_workflow( inputs_by='uuid_implicitly' )
+
+    @skip_without_tool( "cat1" )
+    def test_run_workflow_by_name( self ):
+        self.__run_cat_workflow( inputs_by='name' )
+
+    @skip_without_tool( "cat1" )
+    def test_run_workflow( self ):
+        self.__run_cat_workflow( inputs_by='step_id' )
+
+    @skip_without_tool( "multiple_versions" )
+    def test_run_versioned_tools( self ):
+        history_01_id = self.dataset_populator.new_history()
+        workflow_version_01 = self._upload_yaml_workflow( """
+class: GalaxyWorkflow
+steps:
+  - tool_id: multiple_versions
+    tool_version: "0.1"
+    state:
+      inttest: 0
+""" )
+        self.__invoke_workflow( history_01_id, workflow_version_01 )
+        self.dataset_populator.wait_for_history( history_01_id, assert_ok=True )
+
+        history_02_id = self.dataset_populator.new_history()
+        workflow_version_02 = self._upload_yaml_workflow( """
+class: GalaxyWorkflow
+steps:
+  - tool_id: multiple_versions
+    tool_version: "0.2"
+    state:
+      inttest: 1
+""" )
+        self.__invoke_workflow( history_02_id, workflow_version_02 )
+        self.dataset_populator.wait_for_history( history_02_id, assert_ok=True )
+
+    def __run_cat_workflow( self, inputs_by ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_run" )
+        workflow["steps"]["0"]["uuid"] = str(uuid4())
+        workflow["steps"]["1"]["uuid"] = str(uuid4())
+        workflow_request, history_id = self._setup_workflow_run( workflow, inputs_by=inputs_by )
+        # TODO: This should really be a post to workflows/<workflow_id>/run or
+        # something like that.
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        invocation_id = run_workflow_response.json()[ "id" ]
+        invocation = self._invocation_details( workflow_request[ "workflow_id" ], invocation_id )
+        assert invocation[ "state" ] == "scheduled", invocation
+
+        self._assert_status_code_is( run_workflow_response, 200 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+    @skip_without_tool( "collection_creates_pair" )
+    def test_workflow_run_output_collections(self):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input
+    type: input
+  - label: split_up
+    tool_id: collection_creates_pair
+    state:
+      input1:
+        $link: text_input
+  - tool_id: collection_paired_test
+    state:
+      f1:
+        $link: split_up#paired_output
+""")
+        history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="a\nb\nc\nd\n" )
+        inputs = {
+            '0': self._ds_entry(hda1),
+        }
+        self.__invoke_workflow( history_id, workflow_id, inputs )
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self.assertEqual("a\nc\nb\nd\n", self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) )
+
+    @skip_without_tool( "collection_creates_pair" )
+    def test_workflow_run_output_collection_mapping(self):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - type: input_collection
+  - tool_id: collection_creates_pair
+    state:
+      input1:
+        $link: 0
+  - tool_id: collection_paired_test
+    state:
+      f1:
+        $link: 1#paired_output
+  - tool_id: cat_list
+    state:
+      input1:
+        $link: 2#out1
+""")
+        history_id = self.dataset_populator.new_history()
+        hdca1 = self.dataset_collection_populator.create_list_in_history( history_id, contents=["a\nb\nc\nd\n", "e\nf\ng\nh\n"] ).json()
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        inputs = {
+            '0': self._ds_entry(hdca1),
+        }
+        self.__invoke_workflow( history_id, workflow_id, inputs )
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self.assertEqual("a\nc\nb\nd\ne\ng\nf\nh\n", self.dataset_populator.get_history_dataset_content( history_id, hid=0 ) )
+
+    @skip_without_tool( "collection_split_on_column" )
+    def test_workflow_run_dynamic_output_collections(self):
+        history_id = self.dataset_populator.new_history()
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input1
+    type: input
+  - label: text_input2
+    type: input
+  - label: cat_inputs
+    tool_id: cat1
+    state:
+      input1:
+        $link: text_input1
+      queries:
+        - input2:
+            $link: text_input2
+  - label: split_up
+    tool_id: collection_split_on_column
+    state:
+      input1:
+        $link: cat_inputs#out_file1
+  - tool_id: cat_list
+    state:
+      input1:
+        $link: split_up#split_output
+""")
+        hda1 = self.dataset_populator.new_dataset( history_id, content="samp1\t10.0\nsamp2\t20.0\n" )
+        hda2 = self.dataset_populator.new_dataset( history_id, content="samp1\t30.0\nsamp2\t40.0\n" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        inputs = {
+            '0': self._ds_entry(hda1),
+            '1': self._ds_entry(hda2),
+        }
+        invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs )
+        self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id )
+        details = self.dataset_populator.get_history_dataset_details( history_id, hid=0 )
+        last_item_hid = details["hid"]
+        assert last_item_hid == 7, "Expected 7 history items, got %s" % last_item_hid
+        content = self.dataset_populator.get_history_dataset_content( history_id, hid=0 )
+        self.assertEqual("10.0\n30.0\n20.0\n40.0\n", content )
+
+    @skip_without_tool( "collection_split_on_column" )
+    @skip_without_tool( "min_repeat" )
+    def test_workflow_run_dynamic_output_collections_2( self ):
+        # A more advanced output collection workflow, testing regression of
+        # https://github.com/galaxyproject/galaxy/issues/776
+        history_id = self.dataset_populator.new_history()
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: test_input_1
+    type: input
+  - label: test_input_2
+    type: input
+  - label: test_input_3
+    type: input
+  - label: split_up
+    tool_id: collection_split_on_column
+    state:
+      input1:
+        $link: test_input_2
+  - label: min_repeat
+    tool_id: min_repeat
+    state:
+      queries:
+        - input:
+            $link: test_input_1
+      queries2:
+        - input2:
+            $link: split_up#split_output
+""")
+        hda1 = self.dataset_populator.new_dataset( history_id, content="samp1\t10.0\nsamp2\t20.0\n" )
+        hda2 = self.dataset_populator.new_dataset( history_id, content="samp1\t20.0\nsamp2\t40.0\n" )
+        hda3 = self.dataset_populator.new_dataset( history_id, content="samp1\t30.0\nsamp2\t60.0\n" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        inputs = {
+            '0': self._ds_entry(hda1),
+            '1': self._ds_entry(hda2),
+            '2': self._ds_entry(hda3),
+        }
+        invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs )
+        self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id )
+        content = self.dataset_populator.get_history_dataset_content( history_id, hid=7 )
+        self.assertEqual(content.strip(), "samp1\t10.0\nsamp2\t20.0")
+
+    @skip_without_tool( "collection_split_on_column" )
+    def test_workflow_run_dynamic_output_collections_3(self):
+        # Test a workflow that create a list:list:list followed by a mapping step.
+        history_id = self.dataset_populator.new_history()
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: text_input1
+    type: input
+  - label: text_input2
+    type: input
+  - label: cat_inputs
+    tool_id: cat1
+    state:
+      input1:
+        $link: text_input1
+      queries:
+        - input2:
+            $link: text_input2
+  - label: split_up_1
+    tool_id: collection_split_on_column
+    state:
+      input1:
+        $link: cat_inputs#out_file1
+  - label: split_up_2
+    tool_id: collection_split_on_column
+    state:
+      input1:
+        $link: split_up_1#split_output
+  - tool_id: cat
+    state:
+      input1:
+        $link: split_up_2#split_output
+""")
+        hda1 = self.dataset_populator.new_dataset( history_id, content="samp1\t10.0\nsamp2\t20.0\n" )
+        hda2 = self.dataset_populator.new_dataset( history_id, content="samp1\t30.0\nsamp2\t40.0\n" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        inputs = {
+            '0': self._ds_entry(hda1),
+            '1': self._ds_entry(hda2),
+        }
+        invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs )
+        self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id )
+
+    @skip_without_tool( "mapper" )
+    @skip_without_tool( "pileup" )
+    def test_workflow_metadata_validation_0( self ):
+        # Testing regression of
+        # https://github.com/galaxyproject/galaxy/issues/1514
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+  - label: input_fastqs
+    type: input_collection
+  - label: reference
+    type: input
+  - label: map_over_mapper
+    tool_id: mapper
+    state:
+      input1:
+        $link: input_fastqs
+      reference:
+        $link: reference
+  - label: pileup
+    tool_id: pileup
+    state:
+      input1:
+        $link: map_over_mapper#out_file1
+      reference:
+        $link: reference
+test_data:
+  input_fastqs:
+    type: list
+    elements:
+      - identifier: samp1
+        value: 1.fastq
+        type: File
+      - identifier: samp2
+        value: 1.fastq
+        type: File
+  reference:
+    value: 1.fasta
+    type: File
+""", history_id=history_id)
+
+    def test_run_subworkflow_simple( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs(SIMPLE_NESTED_WORKFLOW_YAML, history_id=history_id)
+
+        content = self.dataset_populator.get_history_dataset_content( history_id )
+        self.assertEqual("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\nchr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content)
+
+    @skip_without_tool( "cat1" )
+    @skip_without_tool( "collection_paired_test" )
+    def test_workflow_run_zip_collections( self ):
+        # A more advanced output collection workflow, testing regression of
+        # https://github.com/galaxyproject/galaxy/issues/776
+        history_id = self.dataset_populator.new_history()
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: test_input_1
+    type: input
+  - label: test_input_2
+    type: input
+  - label: first_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: test_input_1
+  - label: zip_it
+    tool_id: "__ZIP_COLLECTION__"
+    state:
+      input_forward:
+        $link: first_cat#out_file1
+      input_reverse:
+        $link: test_input_2
+  - label: concat_pair
+    tool_id: collection_paired_test
+    state:
+      f1:
+        $link: zip_it#output
+""")
+        hda1 = self.dataset_populator.new_dataset( history_id, content="samp1\t10.0\nsamp2\t20.0\n" )
+        hda2 = self.dataset_populator.new_dataset( history_id, content="samp1\t20.0\nsamp2\t40.0\n" )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        inputs = {
+            '0': self._ds_entry(hda1),
+            '1': self._ds_entry(hda2),
+        }
+        invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs )
+        self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id )
+        content = self.dataset_populator.get_history_dataset_content( history_id )
+        self.assertEqual(content.strip(), "samp1\t10.0\nsamp2\t20.0\nsamp1\t20.0\nsamp2\t40.0")
+
+    def test_workflow_request( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_queue" )
+        workflow_request, history_id = self._setup_workflow_run( workflow )
+        url = "workflows/%s/usage" % ( workflow_request[ "workflow_id" ] )
+        del workflow_request[ "workflow_id" ]
+        run_workflow_response = self._post( url, data=workflow_request )
+
+        self._assert_status_code_is( run_workflow_response, 200 )
+        # Give some time for workflow to get scheduled before scanning the history.
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+    @skip_without_tool( "cat" )
+    def test_workflow_pause( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_pause" )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        index_map = {
+            '0': self._ds_entry(hda1),
+        }
+        invocation_id = self.__invoke_workflow(
+            history_id,
+            uploaded_workflow_id,
+            index_map,
+        )
+        # Give some time for workflow to get scheduled before scanning the history.
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+        # Wait for all the datasets to complete, make sure the workflow invocation
+        # is not complete.
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] != 'scheduled', invocation
+
+        self.__review_paused_steps( uploaded_workflow_id, invocation_id, order_index=2, action=True )
+
+        invocation_scheduled = False
+        for i in range( 25 ):
+            invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+            if invocation[ 'state' ] == 'scheduled':
+                invocation_scheduled = True
+                break
+
+            time.sleep( .5 )
+
+        assert invocation_scheduled, "Workflow state is not scheduled..."
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+    @skip_without_tool( "cat" )
+    def test_workflow_pause_cancel( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_pause" )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        index_map = {
+            '0': self._ds_entry(hda1),
+        }
+        invocation_id = self.__invoke_workflow( history_id, uploaded_workflow_id, index_map )
+        # Give some time for workflow to get scheduled before scanning the history.
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+        # Wait for all the datasets to complete, make sure the workflow invocation
+        # is not complete.
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] != 'scheduled'
+
+        self.__review_paused_steps( uploaded_workflow_id, invocation_id, order_index=2, action=False )
+        # Not immediately cancelled, must wait until workflow scheduled again.
+        time.sleep( 4 )
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] == 'cancelled', invocation
+
+    @skip_without_tool( "head" )
+    def test_workflow_map_reduce_pause( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_map_reduce_pause" )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="reviewed\nunreviewed" )
+        hdca1 = self.dataset_collection_populator.create_list_in_history( history_id, contents=["1\n2\n3", "4\n5\n6"] ).json()
+        index_map = {
+            '0': self._ds_entry(hda1),
+            '1': self._ds_entry(hdca1),
+        }
+        invocation_id = self.__invoke_workflow( history_id, uploaded_workflow_id, index_map )
+        # Give some time for workflow to get scheduled before scanning the history.
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+        # Wait for all the datasets to complete, make sure the workflow invocation
+        # is not complete.
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] != 'scheduled'
+
+        self.__review_paused_steps( uploaded_workflow_id, invocation_id, order_index=4, action=True )
+
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] == 'scheduled'
+        self.assertEqual("reviewed\n1\nreviewed\n4\n", self.dataset_populator.get_history_dataset_content( history_id ) )
+
+    @skip_without_tool( "cat" )
+    def test_cancel_workflow_invocation( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_pause" )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        index_map = {
+            '0': self._ds_entry(hda1),
+        }
+        invocation_id = self.__invoke_workflow( history_id, uploaded_workflow_id, index_map )
+        # Give some time for workflow to get scheduled before scanning the history.
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+
+        # Wait for all the datasets to complete, make sure the workflow invocation
+        # is not complete.
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] != 'scheduled'
+
+        invocation_url = self._api_url( "workflows/%s/usage/%s" % (uploaded_workflow_id, invocation_id), use_key=True )
+        delete_response = delete( invocation_url )
+        self._assert_status_code_is( delete_response, 200 )
+
+        # Wait for all the datasets to complete, make sure the workflow invocation
+        # is not complete.
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        assert invocation[ 'state' ] == 'cancelled'
+
+    def test_run_with_implicit_connection( self ):
+        history_id = self.dataset_populator.new_history()
+        run_summary = self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+- label: test_input
+  type: input
+- label: first_cat
+  tool_id: cat1
+  state:
+    input1:
+      $link: test_input
+- label: the_pause
+  type: pause
+  connect:
+    input:
+    - first_cat#out_file1
+- label: second_cat
+  tool_id: cat1
+  state:
+    input1:
+      $link: the_pause
+- label: third_cat
+  tool_id: random_lines1
+  connect:
+    $step: second_cat
+  state:
+    num_lines: 1
+    input:
+      $link: test_input
+    seed_source:
+      seed_source_selector: set_seed
+      seed: asdf
+test_data:
+  test_input: "hello world"
+""", history_id=history_id, wait=False)
+        time.sleep( 2 )
+        history_id = run_summary.history_id
+        workflow_id = run_summary.workflow_id
+        invocation_id = run_summary.invocation_id
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        invocation = self._invocation_details( workflow_id, invocation_id )
+        assert invocation[ 'state' ] != 'scheduled'
+        # Expect two jobs - the upload and first cat. randomlines shouldn't run
+        # it is implicitly dependent on second cat.
+        assert len(  self._history_jobs( history_id ) ) == 2
+
+        self.__review_paused_steps( workflow_id, invocation_id, order_index=2, action=True )
+        self.wait_for_invocation_and_jobs( history_id, workflow_id, invocation_id )
+        assert len(  self._history_jobs( history_id ) ) == 4
+
+    def test_run_with_validated_parameter_connection_valid( self ):
+        history_id = self.dataset_populator.new_history()
+        run_summary = self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+  - label: text_input
+    type: text
+steps:
+- tool_id: validation_repeat
+  state:
+    r2:
+     - text:
+        $link: text_input
+test_data:
+  text_input:
+    value: "abd"
+    type: raw
+""", history_id=history_id, wait=True)
+        time.sleep(10)
+        self.workflow_populator.wait_for_invocation( run_summary.workflow_id, run_summary.invocation_id )
+        jobs = self._history_jobs( history_id )
+        assert len(jobs) == 1
+
+    def test_run_with_validated_parameter_connection_invalid( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+  - label: text_input
+    type: text
+steps:
+- tool_id: validation_repeat
+  state:
+    r2:
+     - text:
+        $link: text_input
+test_data:
+  text_input:
+    value: ""
+    type: raw
+""", history_id=history_id, wait=True, assert_ok=False )
+
+    def test_run_with_text_connection( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+  - label: data_input
+    type: data
+  - label: text_input
+    type: text
+steps:
+- label: randomlines
+  tool_id: random_lines1
+  state:
+    num_lines: 1
+    input:
+      $link: data_input
+    seed_source:
+      seed_source_selector: set_seed
+      seed:
+        $link: text_input
+test_data:
+  data_input:
+    value: 1.bed
+    type: File
+  text_input:
+    value: asdf
+    type: raw
+""", history_id=history_id)
+
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        content = self.dataset_populator.get_history_dataset_content( history_id )
+        self.assertEqual("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content)
+
+    def wait_for_invocation_and_jobs( self, history_id, workflow_id, invocation_id, assert_ok=True ):
+        self.workflow_populator.wait_for_invocation( workflow_id, invocation_id )
+        time.sleep(.5)
+        self.dataset_populator.wait_for_history( history_id, assert_ok=assert_ok )
+        time.sleep(.5)
+
+    def test_cannot_run_inaccessible_workflow( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_run_cannot_access" )
+        workflow_request, history_id = self._setup_workflow_run( workflow )
+        with self._different_user():
+            run_workflow_response = self._post( "workflows", data=workflow_request )
+            self._assert_status_code_is( run_workflow_response, 403 )
+
+    def test_400_on_invalid_workflow_id( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_run_does_not_exist" )
+        workflow_request, history_id = self._setup_workflow_run( workflow )
+        workflow_request[ "workflow_id" ] = self._random_key()
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 400 )
+
+    def test_cannot_run_against_other_users_history( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_run_does_not_exist" )
+        workflow_request, history_id = self._setup_workflow_run( workflow )
+        with self._different_user():
+            other_history_id = self.dataset_populator.new_history()
+        workflow_request[ "history" ] = "hist_id=%s" % other_history_id
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 403 )
+
+    @skip_without_tool( "cat" )
+    @skip_without_tool( "cat_list" )
+    def test_workflow_run_with_matching_lists( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_matching_lists" )
+        workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        hdca1 = self.dataset_collection_populator.create_list_in_history( history_id, contents=[("sample1-1", "1 2 3"), ("sample2-1", "7 8 9")] ).json()
+        hdca2 = self.dataset_collection_populator.create_list_in_history( history_id, contents=[("sample1-2", "4 5 6"), ("sample2-2", "0 a b")] ).json()
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        label_map = { "list1": self._ds_entry( hdca1 ), "list2": self._ds_entry( hdca2 ) }
+        workflow_request = dict(
+            history="hist_id=%s" % history_id,
+            workflow_id=workflow_id,
+            ds_map=self._build_ds_map( workflow_id, label_map ),
+        )
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self.assertEqual("1 2 3\n4 5 6\n7 8 9\n0 a b\n", self.dataset_populator.get_history_dataset_content( history_id ) )
+
+    def test_workflow_stability( self ):
+        # Run this index stability test with following command:
+        #   ./run_tests.sh test/api/test_workflows.py:WorkflowsApiTestCase.test_workflow_stability
+        from pkg_resources import resource_string
+        num_tests = 1
+        for workflow_file in [ "test_workflow_topoambigouity.ga", "test_workflow_topoambigouity_auto_laidout.ga" ]:
+            workflow_str = resource_string( __name__, workflow_file )
+            workflow = self.workflow_populator.load_workflow( "test1", content=workflow_str )
+            last_step_map = self._step_map( workflow )
+            for i in range(num_tests):
+                uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+                downloaded_workflow = self._download_workflow( uploaded_workflow_id )
+                step_map = self._step_map(downloaded_workflow)
+                assert step_map == last_step_map
+                last_step_map = step_map
+
+    def _step_map(self, workflow):
+        # Build dict mapping 'tep index to input name.
+        step_map = {}
+        for step_index, step in workflow["steps"].items():
+            if step[ "type" ] == "data_input":
+                step_map[step_index] = step["inputs"][0]["name"]
+        return step_map
+
+    def test_empty_create( self ):
+        response = self._post( "workflows" )
+        self._assert_status_code_is( response, 400 )
+        self._assert_error_code_is( response, error_codes.USER_REQUEST_MISSING_PARAMETER )
+
+    def test_invalid_create_multiple_types( self ):
+        data = {
+            'shared_workflow_id': '1234567890abcdef',
+            'from_history_id': '1234567890abcdef'
+        }
+        response = self._post( "workflows", data )
+        self._assert_status_code_is( response, 400 )
+        self._assert_error_code_is( response, error_codes.USER_REQUEST_INVALID_PARAMETER )
+
+    @skip_without_tool( "cat1" )
+    def test_run_with_pja( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_pja_run", add_pja=True )
+        workflow_request, history_id = self._setup_workflow_run( workflow, inputs_by='step_index' )
+        workflow_request[ "replacement_params" ] = dumps( dict( replaceme="was replaced" ) )
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        content = self.dataset_populator.get_history_dataset_details( history_id, wait=True, assert_ok=True )
+        assert content[ "name" ] == "foo was replaced"
+
+    @skip_without_tool( "cat1" )
+    def test_run_with_runtime_pja( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_pja_runtime" )
+        uuid0, uuid1, uuid2 = str(uuid4()), str(uuid4()), str(uuid4())
+        workflow["steps"]["0"]["uuid"] = uuid0
+        workflow["steps"]["1"]["uuid"] = uuid1
+        workflow["steps"]["2"]["uuid"] = uuid2
+        workflow_request, history_id = self._setup_workflow_run( workflow, inputs_by='step_index' )
+        workflow_request[ "replacement_params" ] = dumps( dict( replaceme="was replaced" ) )
+
+        pja_map = {
+            "RenameDatasetActionout_file1": dict(
+                action_type="RenameDatasetAction",
+                output_name="out_file1",
+                action_arguments=dict( newname="foo ${replaceme}" ),
+            )
+        }
+        workflow_request[ "parameters" ] = dumps({
+            uuid2: { "__POST_JOB_ACTIONS__": pja_map }
+        })
+
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        content = self.dataset_populator.get_history_dataset_details( history_id, wait=True, assert_ok=True )
+        assert content[ "name" ] == "foo was replaced", content[ "name" ]
+
+        # Test for regression of previous behavior where runtime post job actions
+        # would be added to the original workflow post job actions.
+        workflow_id = workflow_request["workflow_id"]
+        downloaded_workflow = self._download_workflow( workflow_id )
+        pjas = list(downloaded_workflow[ "steps" ][ "2" ][ "post_job_actions" ].values())
+        assert len( pjas ) == 0, len( pjas )
+
+    @skip_without_tool( "cat1" )
+    def test_run_with_delayed_runtime_pja( self ):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: test_input
+    type: input
+  - label: first_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: test_input
+  - label: the_pause
+    type: pause
+    connect:
+      input:
+      - first_cat#out_file1
+  - label: second_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: the_pause
+""")
+        downloaded_workflow = self._download_workflow( workflow_id )
+        print(downloaded_workflow)
+        uuid_dict = dict((int(index), step["uuid"]) for index, step in downloaded_workflow["steps"].items())
+        history_id = self.dataset_populator.new_history()
+        hda = self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        self.dataset_populator.wait_for_history( history_id )
+        inputs = {
+            '0': self._ds_entry( hda ),
+        }
+        print(inputs)
+        uuid2 = uuid_dict[ 3 ]
+        workflow_request = {}
+        workflow_request[ "replacement_params" ] = dumps( dict( replaceme="was replaced" ) )
+        pja_map = {
+            "RenameDatasetActionout_file1": dict(
+                action_type="RenameDatasetAction",
+                output_name="out_file1",
+                action_arguments=dict( newname="foo ${replaceme}" ),
+            )
+        }
+        workflow_request[ "parameters" ] = dumps({
+            uuid2: { "__POST_JOB_ACTIONS__": pja_map }
+        })
+        invocation_id = self.__invoke_workflow( history_id, workflow_id, inputs=inputs, request=workflow_request )
+
+        time.sleep( 2 )
+        self.dataset_populator.wait_for_history( history_id )
+        self.__review_paused_steps( workflow_id, invocation_id, order_index=2, action=True )
+
+        self.workflow_populator.wait_for_workflow( workflow_id, invocation_id, history_id )
+        time.sleep( 1 )
+        content = self.dataset_populator.get_history_dataset_details( history_id )
+        assert content[ "name" ] == "foo was replaced", content[ "name" ]
+
+    @skip_without_tool( "cat1" )
+    def test_delete_intermediate_datasets_pja_1( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+  - id: input1
+outputs:
+  - id: wf_output_1
+    source: third_cat#out_file1
+steps:
+  - tool_id: cat1
+    label: first_cat
+    state:
+      input1:
+        $link: input1
+  - tool_id: cat1
+    label: second_cat
+    state:
+      input1:
+        $link: first_cat#out_file1
+  - tool_id: cat1
+    label: third_cat
+    state:
+      input1:
+        $link: second_cat#out_file1
+    outputs:
+      out_file1:
+        delete_intermediate_datasets: true
+test_data:
+  input1: "hello world"
+""", history_id=history_id)
+        hda1 = self.dataset_populator.get_history_dataset_details(history_id, hid=1)
+        hda2 = self.dataset_populator.get_history_dataset_details(history_id, hid=2)
+        hda3 = self.dataset_populator.get_history_dataset_details(history_id, hid=3)
+        hda4 = self.dataset_populator.get_history_dataset_details(history_id, hid=4)
+        assert not hda1["deleted"]
+        assert hda2["deleted"]
+        # I think hda3 should be deleted, but the inputs to
+        # steps with workflow outputs are not deleted.
+        # assert hda3["deleted"]
+        print(hda3["deleted"])
+        assert not hda4["deleted"]
+
+    @skip_without_tool( "random_lines1" )
+    def test_run_replace_params_by_tool( self ):
+        workflow_request, history_id = self._setup_random_x2_workflow( "test_for_replace_tool_params" )
+        workflow_request[ "parameters" ] = dumps( dict( random_lines1=dict( num_lines=5 ) ) )
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        # Would be 8 and 6 without modification
+        self.__assert_lines_hid_line_count_is( history_id, 2, 5 )
+        self.__assert_lines_hid_line_count_is( history_id, 3, 5 )
+
+    @skip_without_tool( "random_lines1" )
+    def test_run_replace_params_by_uuid( self ):
+        workflow_request, history_id = self._setup_random_x2_workflow( "test_for_replace_tool_params" )
+        workflow_request[ "parameters" ] = dumps( {
+            "58dffcc9-bcb7-4117-a0e1-61513524b3b1": dict( num_lines=4 ),
+            "58dffcc9-bcb7-4117-a0e1-61513524b3b2": dict( num_lines=3 ),
+        } )
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        # Would be 8 and 6 without modification
+        self.__assert_lines_hid_line_count_is( history_id, 2, 4 )
+        self.__assert_lines_hid_line_count_is( history_id, 3, 3 )
+
+    @skip_without_tool( "cat1" )
+    @skip_without_tool( "addValue" )
+    def test_run_batch( self ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_batch" )
+        workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        hda1 = self.dataset_populator.new_dataset( history_id, content="1 2 3" )
+        hda2 = self.dataset_populator.new_dataset( history_id, content="4 5 6" )
+        workflow_request = {
+            "history_id" : history_id,
+            "batch"      : True,
+            "parameters_normalized": True,
+            "parameters" : dumps( { "0": { "input": { "batch": True, "values": [ { "id" : hda1.get( "id" ), "hid": hda1.get( "hid" ), "src": "hda" }, { "id" : hda2.get( "id" ), "hid": hda2.get( "hid" ), "src": "hda" } ] } }, "1": { "input": { "batch": False, "values": [ { "id" : hda1.get( "id" ), "hid": hda1.get( "hid" ), "src": "hda" } ] }, "exp": "2" } } )
+        }
+        invocation_response = self._post( "workflows/%s/usage" % workflow_id, data=workflow_request )
+        self._assert_status_code_is( invocation_response, 200 )
+        time.sleep( 5 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        r1 = "1 2 3\t1\n1 2 3\t2\n"
+        r2 = "4 5 6\t1\n1 2 3\t2\n"
+        t1 = self.dataset_populator.get_history_dataset_content( history_id, hid=5 )
+        t2 = self.dataset_populator.get_history_dataset_content( history_id, hid=8 )
+        assert ( r1 == t1 and r2 == t2 ) or ( r1 == t2 and r2 == t1 )
+
+    @skip_without_tool( "validation_default" )
+    def test_parameter_substitution_sanitization( self ):
+        substitions = dict( input1="\" ; echo \"moo" )
+        run_workflow_response, history_id = self._run_validation_workflow_with_substitions( substitions )
+
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self.assertEqual("__dq__ X echo __dq__moo\n", self.dataset_populator.get_history_dataset_content( history_id, hid=1 ) )
+
+    @skip_without_tool( "validation_repeat" )
+    def test_parameter_substitution_validation_value_errors_0( self ):
+        history_id = self.dataset_populator.new_history()
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+ - tool_id: validation_repeat
+   state:
+     r2:
+      - text: "abd"
+""")
+        workflow_request = dict(
+            history="hist_id=%s" % history_id,
+            parameters=dumps( dict( validation_repeat={"r2_0|text": ""} ) )
+        )
+        url = "workflows/%s/invocations" % workflow_id
+        invocation_response = self._post( url, data=workflow_request )
+        # Take a valid stat and make it invalid, assert workflow won't run.
+        self._assert_status_code_is( invocation_response, 400 )
+
+    @skip_without_tool( "validation_default" )
+    def test_parameter_substitution_validation_value_errors_1( self ):
+        substitions = dict( select_param="\" ; echo \"moo" )
+        run_workflow_response, history_id = self._run_validation_workflow_with_substitions( substitions )
+
+        self._assert_status_code_is( run_workflow_response, 400 )
+
+    @skip_without_tool( "validation_repeat" )
+    def test_workflow_import_state_validation_1( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+ - tool_id: validation_repeat
+   state:
+     r2:
+     - text: ""
+""", history_id=history_id, wait=False, expected_response=400 )
+
+    def _run_validation_workflow_with_substitions( self, substitions ):
+        workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_validation_1" )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        history_id = self.dataset_populator.new_history()
+        workflow_request = dict(
+            history="hist_id=%s" % history_id,
+            workflow_id=uploaded_workflow_id,
+            parameters=dumps( dict( validation_default=substitions ) )
+        )
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        return run_workflow_response, history_id
+
+    @skip_without_tool( "random_lines1" )
+    def test_run_replace_params_by_steps( self ):
+        workflow_request, history_id, steps = self._setup_random_x2_workflow_steps( "test_for_replace_step_params" )
+        params = dumps( { str(steps[1]["id"]): dict( num_lines=5 ) } )
+        workflow_request[ "parameters" ] = params
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        # Would be 8 and 6 without modification
+        self.__assert_lines_hid_line_count_is( history_id, 2, 8 )
+        self.__assert_lines_hid_line_count_is( history_id, 3, 5 )
+
+    @skip_without_tool( "random_lines1" )
+    def test_run_replace_params_nested( self ):
+        workflow_request, history_id, steps = self._setup_random_x2_workflow_steps( "test_for_replace_step_params_nested" )
+        seed_source = dict(
+            seed_source_selector="set_seed",
+            seed="moo",
+        )
+        params = dumps( { str(steps[0]["id"]): dict( num_lines=1, seed_source=seed_source ),
+                          str(steps[1]["id"]): dict( num_lines=1, seed_source=seed_source ) } )
+        workflow_request[ "parameters" ] = params
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+        self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+        self.assertEqual("3\n", self.dataset_populator.get_history_dataset_content( history_id ) )
+
+    def test_pja_import_export( self ):
+        workflow = self.workflow_populator.load_workflow( name="test_for_pja_import", add_pja=True )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        downloaded_workflow = self._download_workflow( uploaded_workflow_id )
+        self._assert_has_keys( downloaded_workflow[ "steps" ], "0", "1", "2" )
+        pjas = list(downloaded_workflow[ "steps" ][ "2" ][ "post_job_actions" ].values())
+        assert len( pjas ) == 1, len( pjas )
+        pja = pjas[ 0 ]
+        self._assert_has_keys( pja, "action_type", "output_name", "action_arguments" )
+
+    @skip_without_tool( "cat1" )
+    def test_only_own_invocations_accessible( self ):
+        workflow_id, usage = self._run_workflow_once_get_invocation( "test_usage")
+        with self._different_user():
+            usage_details_response = self._get( "workflows/%s/usage/%s" % ( workflow_id, usage[ "id" ] ) )
+            self._assert_status_code_is( usage_details_response, 403 )
+
+    @skip_without_tool( "cat1" )
+    def test_invocation_usage( self ):
+        workflow_id, usage = self._run_workflow_once_get_invocation( "test_usage")
+        invocation_id = usage[ "id" ]
+        usage_details = self._invocation_details( workflow_id, invocation_id )
+        # Assert some high-level things about the structure of data returned.
+        self._assert_has_keys( usage_details, "inputs", "steps" )
+        invocation_steps = usage_details[ "steps" ]
+        for step in invocation_steps:
+            self._assert_has_keys( step, "workflow_step_id", "order_index", "id" )
+        an_invocation_step = invocation_steps[ 0 ]
+        step_id = an_invocation_step[ "id" ]
+        step_response = self._get( "workflows/%s/usage/%s/steps/%s" % ( workflow_id, invocation_id, step_id ) )
+        self._assert_status_code_is( step_response, 200 )
+        self._assert_has_keys( step_response.json(), "id", "order_index" )
+
+    def _update_workflow(self, workflow_id, workflow_object):
+        data = dict(
+            workflow=workflow_object
+        )
+        raw_url = 'workflows/%s' % workflow_id
+        url = self._api_url( raw_url, use_key=True )
+        put_response = put( url, data=dumps(data) )
+        return put_response
+
+    def _invocation_step_details( self, workflow_id, invocation_id, step_id ):
+        invocation_step_response = self._get( "workflows/%s/usage/%s/steps/%s" % ( workflow_id, invocation_id, step_id ) )
+        self._assert_status_code_is( invocation_step_response, 200 )
+        invocation_step_details = invocation_step_response.json()
+        return invocation_step_details
+
+    def _execute_invocation_step_action( self, workflow_id, invocation_id, step_id, action ):
+        raw_url = "workflows/%s/usage/%s/steps/%s" % ( workflow_id, invocation_id, step_id )
+        url = self._api_url( raw_url, use_key=True )
+        payload = dumps( dict( action=action ) )
+        action_response = put( url, data=payload )
+        self._assert_status_code_is( action_response, 200 )
+        invocation_step_details = action_response.json()
+        return invocation_step_details
+
+    def _run_workflow_once_get_invocation( self, name ):
+        workflow = self.workflow_populator.load_workflow( name=name )
+        workflow_request, history_id = self._setup_workflow_run( workflow )
+        workflow_id = workflow_request[ "workflow_id" ]
+        response = self._get( "workflows/%s/usage" % workflow_id )
+        self._assert_status_code_is( response, 200 )
+        assert len( response.json() ) == 0
+        run_workflow_response = self._post( "workflows", data=workflow_request )
+        self._assert_status_code_is( run_workflow_response, 200 )
+
+        response = self._get( "workflows/%s/usage" % workflow_id )
+        self._assert_status_code_is( response, 200 )
+        usages = response.json()
+        assert len( usages ) == 1
+        return workflow_id, usages[ 0 ]
+
+    def _setup_random_x2_workflow_steps( self, name ):
+        workflow_request, history_id = self._setup_random_x2_workflow( "test_for_replace_step_params" )
+        random_line_steps = self._random_lines_steps( workflow_request )
+        return workflow_request, history_id, random_line_steps
+
+    def _random_lines_steps( self, workflow_request ):
+        workflow_summary_response = self._get( "workflows/%s" % workflow_request[ "workflow_id" ] )
+        self._assert_status_code_is( workflow_summary_response, 200 )
+        steps = workflow_summary_response.json()[ "steps" ]
+        return sorted( (step for step in steps.values() if step["tool_id"] == "random_lines1"), key=lambda step: step["id"] )
+
+    def _setup_random_x2_workflow( self, name ):
+        workflow = self.workflow_populator.load_random_x2_workflow( name )
+        uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
+        workflow_inputs = self._workflow_inputs( uploaded_workflow_id )
+        key = next(iter(workflow_inputs.keys()))
+        history_id = self.dataset_populator.new_history()
+        ten_lines = "\n".join( str(_) for _ in range(10) )
+        hda1 = self.dataset_populator.new_dataset( history_id, content=ten_lines )
+        workflow_request = dict(
+            history="hist_id=%s" % history_id,
+            workflow_id=uploaded_workflow_id,
+            ds_map=dumps( {
+                key: self._ds_entry(hda1),
+            } ),
+        )
+        return workflow_request, history_id
+
+    def __review_paused_steps( self, uploaded_workflow_id, invocation_id, order_index, action=True ):
+        invocation = self._invocation_details( uploaded_workflow_id, invocation_id )
+        invocation_steps = invocation[ "steps" ]
+        pause_steps = [ s for s in invocation_steps if s[ 'order_index' ] == order_index ]
+        for pause_step in pause_steps:
+            pause_step_id = pause_step[ 'id' ]
+
+            self._execute_invocation_step_action( uploaded_workflow_id, invocation_id, pause_step_id, action=action )
+
+    def __assert_lines_hid_line_count_is( self, history, hid, lines ):
+        contents_url = "histories/%s/contents" % history
+        history_contents_response = self._get( contents_url )
+        self._assert_status_code_is( history_contents_response, 200 )
+        hda_summary = next(hc for hc in history_contents_response.json() if hc[ "hid" ] == hid)
+        hda_info_response = self._get( "%s/%s" % ( contents_url, hda_summary[ "id" ] ) )
+        self._assert_status_code_is( hda_info_response, 200 )
+        self.assertEqual( hda_info_response.json()[ "metadata_data_lines" ], lines )
+
+    def __invoke_workflow( self, history_id, workflow_id, inputs={}, request={}, assert_ok=True ):
+        request["history"] = "hist_id=%s" % history_id,
+        if inputs:
+            request[ "inputs" ] = dumps( inputs )
+            request[ "inputs_by" ] = 'step_index'
+        url = "workflows/%s/usage" % ( workflow_id )
+        invocation_response = self._post( url, data=request )
+        if assert_ok:
+            self._assert_status_code_is( invocation_response, 200 )
+            invocation_id = invocation_response.json()[ "id" ]
+            return invocation_id
+        else:
+            return invocation_response
+
+    def __import_workflow( self, workflow_id, deprecated_route=False ):
+        if deprecated_route:
+            route = "workflows/import"
+            import_data = dict(
+                workflow_id=workflow_id,
+            )
+        else:
+            route = "workflows"
+            import_data = dict(
+                shared_workflow_id=workflow_id,
+            )
+        return self._post( route, import_data )
+
+    def _download_workflow(self, workflow_id, style=None):
+        params = {}
+        if style:
+            params = {"style": style}
+        download_response = self._get( "workflows/%s/download" % workflow_id, params )
+        self._assert_status_code_is( download_response, 200 )
+        downloaded_workflow = download_response.json()
+        return downloaded_workflow
+
+    def _show_workflow(self, workflow_id):
+        show_response = self._get( "workflows/%s" % workflow_id )
+        self._assert_status_code_is( show_response, 200 )
+        return show_response.json()
+
+    def _assert_looks_like_instance_workflow_representation(self, workflow):
+        self._assert_has_keys(
+            workflow,
+            'url',
+            'owner',
+            'inputs',
+            'annotation',
+            'steps'
+        )
+        for step in workflow["steps"].values():
+            self._assert_has_keys(
+                step,
+                'id',
+                'type',
+                'tool_id',
+                'tool_version',
+                'annotation',
+                'tool_inputs',
+                'input_steps',
+            )
+
+
+RunJobsSummary = namedtuple('RunJobsSummary', ['history_id', 'workflow_id', 'invocation_id', 'inputs', 'jobs'])
diff --git a/test/api/test_workflows_from_yaml.py b/test/api/test_workflows_from_yaml.py
new file mode 100644
index 0000000..cc26914
--- /dev/null
+++ b/test/api/test_workflows_from_yaml.py
@@ -0,0 +1,284 @@
+from __future__ import print_function
+
+import os
+
+from .test_workflows import BaseWorkflowsApiTestCase
+
+WORKFLOWS_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
+
+
+class WorkflowsFromYamlApiTestCase( BaseWorkflowsApiTestCase ):
+
+    def setUp( self ):
+        super( WorkflowsFromYamlApiTestCase, self ).setUp()
+
+    def test_simple_upload(self):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - type: input
+    label: the_input
+  - tool_id: cat1
+    state:
+      input1:
+        $link: 0
+  - tool_id: cat1
+    state:
+      input1:
+        $link: 1#out_file1
+  - tool_id: random_lines1
+    label: random_line_label
+    state:
+      num_lines: 10
+      input:
+        $link: 2#out_file1
+      seed_source:
+        seed_source_selector: set_seed
+        seed: asdf
+""")
+        workflow = self._get("workflows/%s/download" % workflow_id).json()
+
+        tool_count = {'random_lines1': 0, 'cat1': 0}
+        input_found = False
+        for step in workflow['steps'].values():
+            step_type = step['type']
+            if step_type == "data_input":
+                assert step['label'] == 'the_input'
+                input_found = True
+            else:
+                tool_id = step['tool_id']
+                tool_count[tool_id] += 1
+                if tool_id == "random_lines1":
+                    assert step['label'] == "random_line_label"
+
+        assert input_found
+        assert tool_count['random_lines1'] == 1
+        assert tool_count['cat1'] == 2
+
+# FIXME:  This test fails on some machines due to (we're guessing) yaml loading
+# order being not guaranteed and inconsistent across platforms.  The workflow
+# yaml loader probably needs to enforce order using something like the
+# approach described here:
+# https://stackoverflow.com/questions/13297744/pyyaml-control-ordering-of-items-called-by-yaml-load
+#     def test_multiple_input( self ):
+#         history_id = self.dataset_populator.new_history()
+#         self._run_jobs("""
+# steps:
+#   - type: input
+#     label: input1
+#   - type: input
+#     label: input2
+#   - tool_id: cat_list
+#     state:
+#       input1:
+#       - $link: input1
+#       - $link: input2
+# test_data:
+#   input1: "hello world"
+#   input2: "123"
+# """, history_id=history_id)
+#         contents1 = self.dataset_populator.get_history_dataset_content(history_id)
+#         assert contents1 == "hello world\n123\n"
+
+    def test_simple_output_actions( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+  - type: input
+    label: input1
+  - tool_id: cat1
+    label: first_cat
+    state:
+      input1:
+        $link: 0
+    outputs:
+       out_file1:
+         hide: true
+         rename: "the new value"
+  - tool_id: cat1
+    state:
+      input1:
+        $link: first_cat#out_file1
+test_data:
+  input1: "hello world"
+""", history_id=history_id)
+
+        details1 = self.dataset_populator.get_history_dataset_details(history_id, hid=2)
+        assert not details1["visible"]
+        assert details1["name"] == "the new value", details1
+        details2 = self.dataset_populator.get_history_dataset_details(history_id, hid=3)
+        assert details2["visible"]
+
+    def test_inputs_to_steps( self ):
+        history_id = self.dataset_populator.new_history()
+        self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+  - id: input1
+steps:
+  - tool_id: cat1
+    label: first_cat
+    state:
+      input1:
+        $link: input1
+      queries:
+        - input2:
+            $link: input1
+
+test_data:
+  input1: "hello world"
+""", history_id=history_id)
+        contents1 = self.dataset_populator.get_history_dataset_content(history_id)
+        self.assertEquals(contents1.strip(), "hello world\nhello world")
+
+    def test_outputs( self ):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+inputs:
+  - id: input1
+outputs:
+  - id: wf_output_1
+    source: first_cat#out_file1
+steps:
+  - tool_id: cat1
+    label: first_cat
+    state:
+      input1:
+        $link: input1
+      queries:
+        - input2:
+            $link: input1
+
+test_data:
+  input1: "hello world"
+""")
+        workflow = self._get("workflows/%s/download" % workflow_id).json()
+        self.assertEquals(workflow["steps"]["1"]["workflow_outputs"][0]["output_name"], "out_file1")
+        self.assertEquals(workflow["steps"]["1"]["workflow_outputs"][0]["label"], "wf_output_1")
+
+    def test_subworkflow_simple( self ):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+inputs:
+  - id: outer_input
+steps:
+  - tool_id: cat1
+    label: first_cat
+    state:
+      input1:
+        $link: outer_input
+  - run:
+      class: GalaxyWorkflow
+      inputs:
+        - id: inner_input
+      steps:
+        - tool_id: random_lines1
+          state:
+            num_lines: 1
+            input:
+              $link: inner_input
+            seed_source:
+              seed_source_selector: set_seed
+              seed: asdf
+    label: nested_workflow
+    connect:
+      inner_input: first_cat#out_file1
+
+test_data:
+  outer_input:
+    value: 1.bed
+    type: File
+""")
+        workflow = self._get("workflows/%s/download" % workflow_id).json()
+        by_label = self._steps_by_label(workflow)
+        if "nested_workflow" not in by_label:
+            template = "Workflow [%s] does not contain label 'nested_workflow'."
+            message = template % workflow
+            raise AssertionError(message)
+
+        subworkflow_step = by_label["nested_workflow"]
+        assert subworkflow_step["type"] == "subworkflow"
+        assert len(subworkflow_step["subworkflow"]["steps"]) == 2
+
+        subworkflow_connections = subworkflow_step["input_connections"]
+        assert len(subworkflow_connections) == 1
+        subworkflow_connection = subworkflow_connections["inner_input"]
+        assert subworkflow_connection["input_subworkflow_step_id"] == 0
+
+        workflow_reupload_id = self.import_workflow(workflow)["id"]
+        workflow_reupload = self._get("workflows/%s/download" % workflow_reupload_id).json()
+        by_label = self._steps_by_label(workflow_reupload)
+        subworkflow_step = by_label["nested_workflow"]
+        assert subworkflow_step["type"] == "subworkflow"
+        assert len(subworkflow_step["subworkflow"]["steps"]) == 2
+
+        subworkflow_connections = subworkflow_step["input_connections"]
+        assert len(subworkflow_connections) == 1
+        subworkflow_connection = subworkflow_connections["inner_input"]
+        assert subworkflow_connection["input_subworkflow_step_id"] == 0
+
+        # content = self.dataset_populator.get_history_dataset_content( history_id )
+        # self.assertEquals("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content)
+
+    def test_pause( self ):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: test_input
+    type: input
+  - label: first_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: test_input
+  - label: the_pause
+    type: pause
+    connect:
+      input:
+      - first_cat#out_file1
+  - label: second_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: the_pause
+""")
+        print(self._get("workflows/%s/download" % workflow_id).json())
+
+    def test_implicit_connections( self ):
+        workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+  - label: test_input
+    type: input
+  - label: first_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: test_input
+  - label: the_pause
+    type: pause
+    connect:
+      input:
+      - first_cat#out_file1
+  - label: second_cat
+    tool_id: cat1
+    state:
+      input1:
+        $link: the_pause
+  - label: third_cat
+    tool_id: cat1
+    connect:
+      $step: second_cat
+    state:
+      input1:
+        $link: test_input
+""")
+        workflow = self._get("workflows/%s/download" % workflow_id).json()
+        print(workflow)
+
+    def _steps_by_label(self, workflow_as_dict):
+        by_label = {}
+        for step in workflow_as_dict["steps"].values():
+            by_label[step['label']] = step
+        return by_label
diff --git a/test/api/workflows_format_2/README.txt b/test/api/workflows_format_2/README.txt
new file mode 100644
index 0000000..146f619
--- /dev/null
+++ b/test/api/workflows_format_2/README.txt
@@ -0,0 +1,12 @@
+Format 2 Workflows
+---------------------------------
+
+This module defines a high-level Galaxy workflow description deemed "Format 2". At this point, these workflows are defined entirely client side and
+transcoded into traditional (or Format 1?) Galaxy workflows.
+
+The traditional Galaxy workflow description is not meant to be concise and is neither readily human readable or human writable. Format 2 addresses all three
+of these limitations.
+
+Format 2 workflow is a highly experimental format and will change rapidly in
+potentially backward incompatible ways until the code is merged into the
+Galaxy server and enabled by default.
diff --git a/test/api/workflows_format_2/__init__.py b/test/api/workflows_format_2/__init__.py
new file mode 100644
index 0000000..c5e73c5
--- /dev/null
+++ b/test/api/workflows_format_2/__init__.py
@@ -0,0 +1,11 @@
+""" This module defines the public interface or entry point for the
+Format 2 workflow code.
+"""
+from .interface import ImporterGalaxyInterface
+from .main import convert_and_import_workflow
+
+
+__all__ = (
+    'convert_and_import_workflow',
+    'ImporterGalaxyInterface',
+)
diff --git a/test/api/workflows_format_2/converter.py b/test/api/workflows_format_2/converter.py
new file mode 100644
index 0000000..05a9a31
--- /dev/null
+++ b/test/api/workflows_format_2/converter.py
@@ -0,0 +1,518 @@
+"""Functionality for converting a Format 2 workflow into a standard Galaxy workflow."""
+from __future__ import print_function
+
+import json
+import os
+import sys
+import uuid
+from collections import OrderedDict
+
+import yaml
+
+
+STEP_TYPES = [
+    "subworkflow",
+    "data_input",
+    "data_collection_input",
+    "tool",
+    "pause",
+    "parameter_input",
+]
+
+STEP_TYPE_ALIASES = {
+    'input': 'data_input',
+    'input_collection': 'data_collection_input',
+    'parameter': 'parameter_input',
+}
+
+RUN_ACTIONS_TO_STEPS = {
+    'GalaxyWorkflow': 'run_workflow_to_step',
+}
+
+
+def yaml_to_workflow(has_yaml, galaxy_interface, workflow_directory):
+    """Convert a Format 2 workflow into standard Galaxy format from supplied stream."""
+    as_python = yaml.load(has_yaml)
+    return python_to_workflow(as_python, galaxy_interface, workflow_directory)
+
+
+def python_to_workflow(as_python, galaxy_interface, workflow_directory):
+    """Convert a Format 2 workflow into standard Galaxy format from supplied dictionary."""
+    if workflow_directory is None:
+        workflow_directory = os.path.abspath(".")
+
+    conversion_context = ConversionContext(
+        galaxy_interface,
+        workflow_directory,
+    )
+    return _python_to_workflow(as_python, conversion_context)
+
+
+def _python_to_workflow(as_python, conversion_context):
+
+    if not isinstance(as_python, dict):
+        raise Exception("This is not a not a valid Galaxy workflow definition.")
+
+    if "class" not in as_python:
+        raise Exception("This is not a not a valid Galaxy workflow definition, must define a class.")
+
+    if as_python["class"] != "GalaxyWorkflow":
+        raise Exception("This is not a not a valid Galaxy workflow definition, 'class' must be 'GalaxyWorkflow'.")
+
+    _ensure_defaults(as_python, {
+        "a_galaxy_workflow": "true",
+        "format-version": "0.1",
+        "annotation": "",
+        "name": "Workflow",
+        "uuid": str(uuid.uuid4()),
+    })
+
+    steps = as_python["steps"]
+
+    # If an inputs section is defined, build steps for each
+    # and add to steps array.
+    if "inputs" in as_python:
+        inputs = as_python["inputs"]
+        convert_inputs_to_steps(inputs, steps)
+
+    if isinstance(steps, list):
+        steps_as_dict = OrderedDict()
+        for i, step in enumerate(steps):
+            steps_as_dict[str(i)] = step
+            if "id" not in step:
+                step["id"] = i
+
+            if "label" in step:
+                label = step["label"]
+                conversion_context.labels[label] = i
+
+            if "position" not in step:
+                # TODO: this really should be optional in Galaxy API.
+                step["position"] = {
+                    "left": 10 * i,
+                    "top": 10 * i
+                }
+
+        as_python["steps"] = steps_as_dict
+        steps = steps_as_dict
+
+    for step in steps.values():
+        step_type = step.get("type", None)
+        if "run" in step:
+            if step_type is not None:
+                raise Exception("Steps specified as run actions cannot specify a type.")
+            run_action = step.get("run")
+            if "@import" in run_action:
+                if len(run_action) > 1:
+                    raise Exception("@import must be only key if present.")
+
+                run_action_path = run_action["@import"]
+                runnable_path = os.path.join(conversion_context.workflow_directory, run_action_path)
+                with open(runnable_path, "r") as f:
+                    runnable_description = yaml.load(f)
+                    run_action = runnable_description
+
+            run_class = run_action["class"]
+            run_to_step_function = eval(RUN_ACTIONS_TO_STEPS[run_class])
+
+            run_to_step_function(conversion_context, step, run_action)
+            del step["run"]
+
+    for step in steps.values():
+        step_type = step.get("type", "tool")
+        step_type = STEP_TYPE_ALIASES.get(step_type, step_type)
+        if step_type not in STEP_TYPES:
+            raise Exception("Unknown step type encountered %s" % step_type)
+        step["type"] = step_type
+        eval("transform_%s" % step_type)(conversion_context, step)
+
+    for output in as_python.get("outputs", []):
+        assert isinstance(output, dict), "Output definition must be dictionary"
+        assert "source" in output, "Output definition must specify source"
+
+        if "label" in output and "id" in output:
+            raise Exception("label and id are aliases for outputs, may only define one")
+        if "label" not in output and "id" not in output:
+            raise Exception("Output must define a label.")
+
+        raw_label = output.pop("label", None)
+        raw_id = output.pop("id", None)
+        label = raw_label or raw_id
+
+        source = output["source"]
+        id, output_name = conversion_context.step_output(source)
+        step = steps[str(id)]
+        if "workflow_output" not in step:
+            step["workflow_outputs"] = []
+
+        step["workflow_outputs"].append({
+            "output_name": output_name,
+            "label": label,
+            "uuid": output.get("uuid", None)
+        })
+
+    return as_python
+
+
+def convert_inputs_to_steps(inputs, steps):
+    new_steps = []
+    for input_def_raw in inputs:
+        input_def = input_def_raw.copy()
+
+        if "label" in input_def and "id" in input_def:
+            raise Exception("label and id are aliases for inputs, may only define one")
+        if "label" not in input_def and "id" not in input_def:
+            raise Exception("Input must define a label.")
+
+        raw_label = input_def.pop("label", None)
+        raw_id = input_def.pop("id", None)
+        label = raw_label or raw_id
+
+        if not label:
+            raise Exception("Input label must not be empty.")
+
+        input_type = input_def.pop("type", "data")
+        if input_type in ["File", "data", "data_input"]:
+            step_type = "data_input"
+        elif input_type in ["collection", "data_collection", "data_collection_input"]:
+            step_type = "data_collection_input"
+        elif input_type in ["text", "integer", "float", "color", "boolean"]:
+            step_type = "parameter_input"
+            input_def["parameter_type"] = input_type
+        else:
+            raise Exception("Input type must be a data file or collection.")
+
+        step_def = input_def
+        step_def.update({
+            "type": step_type,
+            "label": label,
+        })
+        new_steps.append(step_def)
+
+    for i, new_step in enumerate(new_steps):
+        steps.insert(i, new_step)
+
+
+def run_workflow_to_step(conversion_context, step, run_action):
+    subworkflow_conversion_context = conversion_context.get_subworkflow_conversion_context(step)
+
+    step["type"] = "subworkflow"
+    step["subworkflow"] = _python_to_workflow(
+        run_action,
+        subworkflow_conversion_context,
+    )
+
+
+def transform_data_input(context, step):
+    transform_input(context, step, default_name="Input dataset")
+
+
+def transform_data_collection_input(context, step):
+    transform_input(context, step, default_name="Input dataset collection")
+
+
+def transform_parameter_input(context, step):
+    transform_input(context, step, default_name="input_parameter")
+
+
+def transform_input(context, step, default_name):
+    default_name = step.get("label", default_name)
+    _ensure_defaults(step, {
+        "annotation": "",
+    })
+
+    _ensure_inputs_connections(step)
+
+    if "inputs" not in step:
+        step["inputs"] = [{}]
+
+    step_inputs = step["inputs"][0]
+    if "name" in step_inputs:
+        name = step_inputs["name"]
+    else:
+        name = default_name
+
+    _ensure_defaults(step_inputs, {
+        "name": name,
+        "description": "",
+    })
+    tool_state = {
+        "name": name
+    }
+    for attrib in ["collection_type", "parameter_type", "optional"]:
+        if attrib in step:
+            tool_state[attrib] = step[attrib]
+
+    _populate_tool_state(step, tool_state)
+
+
+def transform_pause(context, step, default_name="Pause for dataset review"):
+    default_name = step.get("label", default_name)
+    _ensure_defaults(step, {
+        "annotation": "",
+    })
+
+    _ensure_inputs_connections(step)
+
+    if "inputs" not in step:
+        step["inputs"] = [{}]
+
+    step_inputs = step["inputs"][0]
+    if "name" in step_inputs:
+        name = step_inputs["name"]
+    else:
+        name = default_name
+
+    _ensure_defaults(step_inputs, {
+        "name": name,
+    })
+    tool_state = {
+        "name": name
+    }
+
+    connect = _init_connect_dict(step)
+    _populate_input_connections(context, step, connect)
+    _populate_tool_state(step, tool_state)
+
+
+def transform_subworkflow(context, step):
+    _ensure_defaults(step, {
+        "annotation": "",
+    })
+
+    _ensure_inputs_connections(step)
+
+    tool_state = {
+    }
+
+    connect = _init_connect_dict(step)
+    _populate_input_connections(context, step, connect)
+    _populate_tool_state(step, tool_state)
+
+
+def transform_tool(context, step):
+    if "tool_id" not in step:
+        raise Exception("Tool steps must define a tool_id.")
+
+    _ensure_defaults(step, {
+        "annotation": "",
+        "name": step['tool_id'],
+        "post_job_actions": {},
+        "tool_version": None,
+    })
+    post_job_actions = step["post_job_actions"]
+
+    tool_state = {
+        # TODO: Galaxy should not require tool state actually specify a __page__.
+        "__page__": 0,
+    }
+
+    connect = _init_connect_dict(step)
+
+    def append_link(key, value):
+        if key not in connect:
+            connect[key] = []
+        connect[key].append(value["$link"])
+
+    def replace_links(value, key=""):
+        if _is_link(value):
+            append_link(key, value)
+            # Filled in by the connection, so to force late
+            # validation of the field just mark as RuntimeValue.
+            # It would be better I guess if this were some other
+            # value dedicated to this purpose (e.g. a ficitious
+            # {"__class__": "ConnectedValue"}) that could be further
+            # validated by Galaxy.
+            return {"__class__": "RuntimeValue"}
+        if isinstance(value, dict):
+            new_values = {}
+            for k, v in value.items():
+                new_key = _join_prefix(key, k)
+                new_values[k] = replace_links(v, new_key)
+            return new_values
+        elif isinstance(value, list):
+            new_values = []
+            for i, v in enumerate(value):
+                # If we are a repeat we need to modify the key
+                # but not if values are actually $links.
+                if _is_link(v):
+                    append_link(key, v)
+                    new_values.append(None)
+                else:
+                    new_key = "%s_%d" % (key, i)
+                    new_values.append(replace_links(v, new_key))
+            return new_values
+        else:
+            return value
+
+    if "state" in step:
+        step_state = step["state"]
+        step_state = replace_links(step_state)
+
+        for key, value in step_state.items():
+            tool_state[key] = json.dumps(value)
+        del step["state"]
+
+    # Fill in input connections
+    _populate_input_connections(context, step, connect)
+
+    _populate_tool_state(step, tool_state)
+
+    # Handle outputs.
+    if "outputs" in step:
+        for name, output in step.get("outputs", {}).items():
+            if output.get("hide", False):
+                action_name = "HideDatasetAction%s" % name
+                action = _action(
+                    "HideDatasetAction",
+                    name,
+                )
+                post_job_actions[action_name] = action
+
+            if output.get("rename", None):
+                new_name = output.get("rename")
+                action_name = "RenameDatasetAction%s" % name
+                arguments = dict(newname=new_name)
+                action = _action(
+                    "RenameDatasetAction",
+                    name,
+                    arguments,
+                )
+                post_job_actions[action_name] = action
+
+            if output.get("delete_intermediate_datasets", None):
+                action_name = "DeleteIntermediatesAction%s" % name
+                arguments = dict()
+                action = _action(
+                    "DeleteIntermediatesAction",
+                    name,
+                    arguments,
+                )
+                post_job_actions[action_name] = action
+
+        del step["outputs"]
+
+
+def run_tool_to_step(conversion_context, step, run_action):
+    tool_description = conversion_context.galaxy_interface.import_tool(
+        run_action
+    )
+    step["type"] = "tool"
+    step["tool_id"] = tool_description["tool_id"]
+    step["tool_version"] = tool_description["tool_version"]
+    step["tool_hash"] = tool_description["tool_hash"]
+
+
+class ConversionContext(object):
+
+    def __init__(self, galaxy_interface, workflow_directory):
+        self.labels = {}
+        self.subworkflow_conversion_contexts = {}
+        self.galaxy_interface = galaxy_interface
+        self.workflow_directory = workflow_directory
+
+    def step_id(self, label_or_id):
+        if label_or_id in self.labels:
+            id = self.labels[label_or_id]
+        else:
+            id = label_or_id
+        return int(id)
+
+    def step_output(self, value):
+        value_parts = str(value).split("#")
+        if len(value_parts) == 1:
+            value_parts.append("output")
+        id = self.step_id(value_parts[0])
+        return id, value_parts[1]
+
+    def get_subworkflow_conversion_context(self, step):
+        step_id = step["id"]
+        if step_id not in self.subworkflow_conversion_contexts:
+            subworkflow_conversion_context = ConversionContext(
+                self.galaxy_interface,
+                self.workflow_directory,
+            )
+            self.subworkflow_conversion_contexts[step_id] = subworkflow_conversion_context
+        return self.subworkflow_conversion_contexts[step_id]
+
+
+def _action(type, name, arguments={}):
+    return {
+        "action_arguments": arguments,
+        "action_type": type,
+        "output_name": name,
+    }
+
+
+def _is_link(value):
+    return isinstance(value, dict) and "$link" in value
+
+
+def _join_prefix(prefix, key):
+    if prefix:
+        new_key = "%s|%s" % (prefix, key)
+    else:
+        new_key = key
+    return new_key
+
+
+def _init_connect_dict(step):
+    if "connect" not in step:
+        step["connect"] = {}
+
+    connect = step["connect"]
+    del step["connect"]
+    return connect
+
+
+def _populate_input_connections(context, step, connect):
+    _ensure_inputs_connections(step)
+    input_connections = step["input_connections"]
+    is_subworkflow_step = step.get("type") == "subworkflow"
+
+    for key, values in connect.items():
+        input_connection_value = []
+        if not isinstance(values, list):
+            values = [values]
+        for value in values:
+            if not isinstance(value, dict):
+                if key == "$step":
+                    value += "#__NO_INPUT_OUTPUT_NAME__"
+                id, output_name = context.step_output(value)
+                value = {"id": id, "output_name": output_name}
+                if is_subworkflow_step:
+                    subworkflow_conversion_context = context.get_subworkflow_conversion_context(step)
+                    input_subworkflow_step_id = subworkflow_conversion_context.step_id(key)
+                    value["input_subworkflow_step_id"] = input_subworkflow_step_id
+            input_connection_value.append(value)
+        if key == "$step":
+            key = "__NO_INPUT_OUTPUT_NAME__"
+        input_connections[key] = input_connection_value
+
+
+def _ensure_inputs_connections(step):
+    if "input_connections" not in step:
+        step["input_connections"] = {}
+
+
+def _ensure_defaults(in_dict, defaults):
+    for key, value in defaults.items():
+        if key not in in_dict:
+            in_dict[key] = value
+
+
+def _populate_tool_state(step, tool_state):
+    step["tool_state"] = json.dumps(tool_state)
+
+
+def main(argv):
+    print(json.dumps(yaml_to_workflow(argv[0])))
+
+
+if __name__ == "__main__":
+    main(sys.argv)
+
+__all__ = (
+    'yaml_to_workflow',
+    'python_to_workflow',
+)
diff --git a/test/api/workflows_format_2/interface.py b/test/api/workflows_format_2/interface.py
new file mode 100644
index 0000000..5c7ee34
--- /dev/null
+++ b/test/api/workflows_format_2/interface.py
@@ -0,0 +1,75 @@
+"""This module contains an interface and implementation describing Galaxy interactions used by gxformat2.
+
+The interface is :class:`ImporterGalaxyInterface` and the default
+implementation based on `bioblend <http://bioblend.readthedocs.io/>`__
+is :class:`BioBlendImporterGalaxyInterface`.
+"""
+import abc
+
+import bioblend
+import six
+
+
+ at six.add_metaclass(abc.ABCMeta)
+class ImporterGalaxyInterface(object):
+    """An abstract interface describing Galaxy operations used by gxformat2.
+
+    Specifically containing definitions of operations required to load
+    workflows into Galaxy.
+    """
+
+    @abc.abstractmethod
+    def import_workflow(self, workflow, **kwds):
+        """Import a workflow via POST /api/workflows or comparable interface into Galaxy."""
+
+
+class BioBlendImporterGalaxyInterface(object):
+    """Implementation of :class:`ImporterGalaxyInterface` using bioblend."""
+
+    def __init__(self, **kwds):
+        """Build a :class:`bioblend.GalaxyInstance` from supplied arguments."""
+        url = None
+
+        admin_key = None
+        admin_gi = None
+        if "admin_gi" in kwds:
+            admin_gi = kwds["admin_gi"]
+        elif "gi" in kwds:
+            admin_gi = kwds["gi"]
+        elif "url" in kwds and "admin_key" in kwds:
+            url = kwds["url"]
+            admin_key = kwds["admin_key"]
+
+        if admin_gi is None:
+            assert url is not None
+            assert admin_key is not None
+            admin_gi = bioblend.GalaxyInstance(url=url, key=admin_key)
+
+        user_key = None
+        user_gi = None
+        if "user_gi" in kwds:
+            user_gi = kwds["user_gi"]
+        elif "gi" in kwds:
+            user_gi = kwds["gi"]
+        elif "url" in kwds and "user_key" in kwds:
+            url = kwds["url"]
+            user_key = kwds["user_key"]
+
+        if user_gi is None:
+            assert url is not None
+            assert user_key is not None
+            user_gi = bioblend.GalaxyInstance(url=url, key=user_key)
+
+        self._admin_gi = admin_gi
+        self._user_gi = user_gi
+
+    def import_workflow(self, workflow, **kwds):
+        """Import Galaxy workflow using instance :class:`bioblend.GalaxyInstance` object."""
+        return self._user_gi.workflows.import_workflow_json(
+            workflow,
+            **kwds
+        )
+
+    def import_tool(self, tool_representation):
+        """Import Galaxy tool using instance :class:`bioblend.GalaxyInstance` object."""
+        pass
diff --git a/test/api/workflows_format_2/main.py b/test/api/workflows_format_2/main.py
new file mode 100644
index 0000000..7745856
--- /dev/null
+++ b/test/api/workflows_format_2/main.py
@@ -0,0 +1,42 @@
+"""Module containing :func:`convert_and_import_workflow`."""
+import os
+
+import yaml
+
+from .converter import python_to_workflow, yaml_to_workflow
+from .interface import BioBlendImporterGalaxyInterface
+
+
+def convert_and_import_workflow(has_workflow, **kwds):
+    """Function is main entry for conversion and import of Format 2 workflows."""
+    galaxy_interface = kwds.get("galaxy_interface", None)
+    if galaxy_interface is None:
+        galaxy_interface = BioBlendImporterGalaxyInterface(**kwds)
+
+    source_type = kwds.get("source_type", None)
+    workflow_directory = kwds.get("workflow_directory", None)
+    if source_type == "path":
+        workflow_path = has_workflow
+        if workflow_directory is None:
+            workflow_directory = os.path.dirname(has_workflow)
+        with open(workflow_path, "r") as f:
+            has_workflow = yaml.load(f)
+
+    if workflow_directory is not None:
+        workflow_directory = os.path.abspath(workflow_directory)
+
+    if isinstance(has_workflow, dict):
+        workflow = python_to_workflow(has_workflow, galaxy_interface, workflow_directory)
+    else:
+        workflow = yaml_to_workflow(has_workflow, galaxy_interface, workflow_directory)
+
+    publish = kwds.get("publish", False)
+    import_kwds = {}
+    if publish:
+        import_kwds["publish"] = True
+    return galaxy_interface.import_workflow(workflow, **import_kwds)
+
+
+__all__ = (
+    'convert_and_import_workflow',
+)
diff --git a/test/base/__init__.py b/test/base/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/base/api.py b/test/base/api.py
new file mode 100644
index 0000000..3cd3142
--- /dev/null
+++ b/test/base/api.py
@@ -0,0 +1,123 @@
+from contextlib import contextmanager
+
+from six.moves.urllib.parse import urlencode
+
+from .api_asserts import (
+    assert_error_code_is,
+    assert_has_keys,
+    assert_not_has_keys,
+    assert_status_code_is,
+)
+from .api_util import get_master_api_key, get_user_api_key
+from .interactor import GalaxyInteractorApi as BaseInteractor
+# TODO: We don't need all of TwillTestCase, strip down to a common super class
+# shared by API and Twill test cases.
+from .twilltestcase import TwillTestCase
+
+TEST_USER = "user at bx.psu.edu"
+DEFAULT_OTHER_USER = "otheruser at bx.psu.edu"  # A second user for API testing.
+
+
+# TODO: Allow these to point at existing Galaxy instances.
+class ApiTestCase( TwillTestCase ):
+
+    def setUp( self ):
+        super( ApiTestCase, self ).setUp( )
+        self.user_api_key = get_user_api_key()
+        self.master_api_key = get_master_api_key()
+        self.galaxy_interactor = ApiTestInteractor( self )
+
+    def _api_url( self, path, params=None, use_key=None ):
+        if not params:
+            params = {}
+        url = "%s/api/%s" % ( self.url, path )
+        if use_key:
+            params[ "key" ] = self.galaxy_interactor.api_key
+        query = urlencode( params )
+        if query:
+            url = "%s?%s" % ( url, query )
+        return url
+
+    def _setup_user( self, email, password=None ):
+        self.galaxy_interactor.ensure_user_with_email( email, password=password )
+        users = self._get( "users", admin=True ).json()
+        user = [ user for user in users if user["email"] == email ][0]
+        return user
+
+    def _setup_user_get_key( self, email ):
+        self.galaxy_interactor.ensure_user_with_email( email )
+        users = self._get( "users", admin=True ).json()
+        user = [ user for user in users if user["email"] == email ][0]
+        return self._post( "users/%s/api_key" % user[ "id" ], admin=True ).json()
+
+    @contextmanager
+    def _different_user( self, email=DEFAULT_OTHER_USER ):
+        """ Use in test cases to switch get/post operations to act as new user,
+
+            with self._different_user( "other_user at bx.psu.edu" ):
+                self._get( "histories" )  # Gets other_user at bx.psu.edu histories.
+        """
+        original_api_key = self.user_api_key
+        original_interactor_key = self.galaxy_interactor.api_key
+        new_key = self._setup_user_get_key( email )
+        try:
+            self.user_api_key = new_key
+            self.galaxy_interactor.api_key = new_key
+            yield
+        finally:
+            self.user_api_key = original_api_key
+            self.galaxy_interactor.api_key = original_interactor_key
+
+    def _get( self, *args, **kwds ):
+        return self.galaxy_interactor.get( *args, **kwds )
+
+    def _post( self, *args, **kwds ):
+        return self.galaxy_interactor.post( *args, **kwds )
+
+    def _delete( self, *args, **kwds ):
+        return self.galaxy_interactor.delete( *args, **kwds )
+
+    def _patch( self, *args, **kwds ):
+        return self.galaxy_interactor.patch( *args, **kwds )
+
+    def _assert_status_code_is( self, response, expected_status_code ):
+        assert_status_code_is( response, expected_status_code )
+
+    def _assert_has_keys( self, response, *keys ):
+        assert_has_keys( response, *keys )
+
+    def _assert_not_has_keys( self, response, *keys ):
+        assert_not_has_keys( response, *keys )
+
+    def _assert_error_code_is( self, response, error_code ):
+        assert_error_code_is( response, error_code )
+
+    def _random_key( self ):  # Used for invalid request testing...
+        return "1234567890123456"
+
+    _assert_has_key = _assert_has_keys
+
+
+class ApiTestInteractor( BaseInteractor ):
+    """ Specialized variant of the API interactor (originally developed for
+    tool functional tests) for testing the API generally.
+    """
+
+    def __init__( self, test_case ):
+        super( ApiTestInteractor, self ).__init__( test_case, test_user=TEST_USER )
+
+    # This variant the lower level get and post methods are meant to be used
+    # directly to test API - instead of relying on higher-level constructs for
+    # specific pieces of the API (the way it is done with the variant for tool)
+    # testing.
+    def get( self, *args, **kwds ):
+        return self._get( *args, **kwds )
+
+    def post( self, *args, **kwds ):
+        return self._post( *args, **kwds )
+
+    def delete( self, *args, **kwds ):
+        return self._delete( *args, **kwds )
+
+    def patch( self, *args, **kwds ):
+        return self._patch( *args, **kwds )
diff --git a/test/base/api_asserts.py b/test/base/api_asserts.py
new file mode 100644
index 0000000..1987885
--- /dev/null
+++ b/test/base/api_asserts.py
@@ -0,0 +1,36 @@
+""" Utility methods for making assertions about Galaxy API responses, etc...
+"""
+ASSERT_FAIL_ERROR_CODE = "Expected Galaxy error code %d, obtained %d"
+ASSERT_FAIL_STATUS_CODE = "Request status code (%d) was not expected value %d. Body was %s"
+
+
+def assert_status_code_is( response, expected_status_code ):
+    response_status_code = response.status_code
+    if expected_status_code != response_status_code:
+        try:
+            body = response.json()
+        except Exception:
+            body = "INVALID JSON RESPONSE <%s>" % response.content
+        assertion_message = ASSERT_FAIL_STATUS_CODE % ( response_status_code, expected_status_code, body )
+        raise AssertionError( assertion_message )
+
+
+def assert_has_keys( response, *keys ):
+    for key in keys:
+        assert key in response, "Response [%s] does not contain key [%s]" % ( response, key )
+
+
+def assert_not_has_keys( response, *keys ):
+    for key in keys:
+        assert key not in response, "Response [%s] contains invalid key [%s]" % ( response, key )
+
+
+def assert_error_code_is( response, error_code ):
+    if hasattr( response, "json" ):
+        response = response.json()
+    assert_has_keys( response, "err_code" )
+    err_code = response[ "err_code" ]
+    assert err_code == int( error_code ), ASSERT_FAIL_ERROR_CODE % ( err_code, int( error_code ) )
+
+
+assert_has_key = assert_has_keys
diff --git a/test/base/api_util.py b/test/base/api_util.py
new file mode 100644
index 0000000..a07c843
--- /dev/null
+++ b/test/base/api_util.py
@@ -0,0 +1,24 @@
+import os
+
+DEFAULT_GALAXY_MASTER_API_KEY = "TEST123"
+DEFAULT_GALAXY_USER_API_KEY = None
+
+
+def get_master_api_key():
+    """ Test master API key to use for functional test. This key should be
+    configured as a master API key and should be able to create additional
+    users and keys.
+    """
+    for key in ["GALAXY_CONFIG_MASTER_API_KEY", "GALAXY_CONFIG_OVERRIDE_MASTER_API_KEY"]:
+        value = os.environ.get(key, None)
+        if value:
+            return value
+    return DEFAULT_GALAXY_MASTER_API_KEY
+
+
+def get_user_api_key():
+    """ Test user API key to use for functional tests. If set, this should drive
+    API based testing - if not set master API key should be used to create a new
+    user and API key for tests.
+    """
+    return os.environ.get( "GALAXY_TEST_USER_API_KEY", DEFAULT_GALAXY_USER_API_KEY )
diff --git a/test/base/driver_util.py b/test/base/driver_util.py
new file mode 100644
index 0000000..1219121
--- /dev/null
+++ b/test/base/driver_util.py
@@ -0,0 +1,707 @@
+"""Scripts for drivers of Galaxy functional tests."""
+
+import collections
+import httplib
+import json
+import logging
+import os
+import random
+import shutil
+import socket
+import sys
+import tempfile
+import threading
+import time
+
+import nose.config
+import nose.core
+import nose.loader
+import nose.plugins.manager
+from paste import httpserver
+
+from functional import database_contexts
+from galaxy.app import UniverseApplication as GalaxyUniverseApplication
+from galaxy.util import asbool, download_to_file
+from galaxy.util.properties import load_app_properties
+from galaxy.web import buildapp
+from galaxy.webapps.tool_shed.app import UniverseApplication as ToolshedUniverseApplication
+
+from .api_util import get_master_api_key, get_user_api_key
+from .instrument import StructuredTestDataPlugin
+from .nose_util import run
+from .test_logging import logging_config_file
+from .tool_shed_util import parse_tool_panel_config
+
+galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
+DEFAULT_WEB_HOST = "localhost"
+GALAXY_TEST_DIRECTORY = os.path.join(galaxy_root, "test")
+GALAXY_TEST_FILE_DIR = "test-data,https://github.com/galaxyproject/galaxy-test-data.git"
+TOOL_SHED_TEST_DATA = os.path.join(GALAXY_TEST_DIRECTORY, "shed_functional", "test_data")
+FRAMEWORK_TOOLS_DIR = os.path.join(GALAXY_TEST_DIRECTORY, "functional", "tools")
+FRAMEWORK_UPLOAD_TOOL_CONF = os.path.join(FRAMEWORK_TOOLS_DIR, "upload_tool_conf.xml")
+FRAMEWORK_SAMPLE_TOOLS_CONF = os.path.join(FRAMEWORK_TOOLS_DIR, "samples_tool_conf.xml")
+FRAMEWORK_DATATYPES_CONF = os.path.join(FRAMEWORK_TOOLS_DIR, "sample_datatypes_conf.xml")
+MIGRATED_TOOL_PANEL_CONFIG = 'config/migrated_tools_conf.xml'
+INSTALLED_TOOL_PANEL_CONFIGS = [
+    os.environ.get('GALAXY_TEST_SHED_TOOL_CONF', 'config/shed_tool_conf.xml')
+]
+
+DEFAULT_LOCALES = "en"
+
+log = logging.getLogger("test_driver")
+
+
+def setup_tool_shed_tmp_dir():
+    tool_shed_test_tmp_dir = os.environ.get('TOOL_SHED_TEST_TMP_DIR', None)
+    if tool_shed_test_tmp_dir is None:
+        tool_shed_test_tmp_dir = tempfile.mkdtemp()
+    # Here's the directory where everything happens.  Temporary directories are created within this directory to contain
+    # the hgweb.config file, the database, new repositories, etc.  Since the tool shed browses repository contents via HTTP,
+    # the full path to the temporary directroy wher eht repositories are located cannot contain invalid url characters.
+    os.environ[ 'TOOL_SHED_TEST_TMP_DIR' ] = tool_shed_test_tmp_dir
+    return tool_shed_test_tmp_dir
+
+
+def get_galaxy_test_tmp_dir():
+    """Create test directory for use by Galaxy server being setup for testing."""
+    galaxy_test_tmp_dir = os.environ.get('GALAXY_TEST_TMP_DIR', None)
+    if galaxy_test_tmp_dir is None:
+        galaxy_test_tmp_dir = tempfile.mkdtemp()
+    return galaxy_test_tmp_dir
+
+
+def configure_environment():
+    """Hack up environment for test cases."""
+    # no op remove if unused
+    if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
+        os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = DEFAULT_LOCALES
+
+    # Used by get_filename in tool shed's twilltestcase.
+    if "TOOL_SHED_TEST_FILE_DIR" not in os.environ:
+        os.environ["TOOL_SHED_TEST_FILE_DIR"] = TOOL_SHED_TEST_DATA
+
+
+def build_logger():
+    """Build a logger for test driver script."""
+    return log
+
+
+def setup_galaxy_config(
+    tmpdir,
+    use_test_file_dir=False,
+    default_install_db_merged=True,
+    default_tool_data_table_config_path=None,
+    default_shed_tool_data_table_config=None,
+    default_job_config_file=None,
+    enable_tool_shed_check=False,
+    default_tool_conf=None,
+    shed_tool_conf=None,
+    datatypes_conf=None,
+    update_integrated_tool_panel=False,
+):
+    """Setup environment and build config for test Galaxy instance."""
+    if not os.path.exists(tmpdir):
+        os.makedirs(tmpdir)
+    file_path = os.path.join(tmpdir, 'files')
+    template_cache_path = tempfile.mkdtemp(prefix='compiled_templates_', dir=tmpdir)
+    new_file_path = tempfile.mkdtemp(prefix='new_files_path_', dir=tmpdir )
+    job_working_directory = tempfile.mkdtemp(prefix='job_working_directory_', dir=tmpdir)
+
+    if use_test_file_dir:
+        galaxy_test_file_dir = os.environ.get('GALAXY_TEST_FILE_DIR', GALAXY_TEST_FILE_DIR)
+        os.environ['GALAXY_TEST_FILE_DIR'] = galaxy_test_file_dir
+        first_test_file_dir = galaxy_test_file_dir.split(",")[0]
+        if not os.path.isabs(first_test_file_dir):
+            first_test_file_dir = os.path.join(galaxy_root, first_test_file_dir)
+        library_import_dir = first_test_file_dir
+        import_dir = os.path.join(first_test_file_dir, 'users')
+        if os.path.exists(import_dir):
+            user_library_import_dir = import_dir
+        else:
+            user_library_import_dir = None
+    else:
+        user_library_import_dir = None
+        library_import_dir = None
+    job_config_file = os.environ.get('GALAXY_TEST_JOB_CONFIG_FILE', default_job_config_file)
+    tool_path = os.environ.get('GALAXY_TEST_TOOL_PATH', 'tools')
+    tool_dependency_dir = os.environ.get('GALAXY_TOOL_DEPENDENCY_DIR', None)
+    if tool_dependency_dir is None:
+        tool_dependency_dir = tempfile.mkdtemp(dir=tmpdir, prefix="tool_dependencies")
+    tool_data_table_config_path = _tool_data_table_config_path(default_tool_data_table_config_path)
+    default_data_manager_config = 'config/data_manager_conf.xml.sample'
+    for data_manager_config in ['config/data_manager_conf.xml', 'data_manager_conf.xml' ]:
+        if os.path.exists( data_manager_config ):
+            default_data_manager_config = data_manager_config
+    data_manager_config_file = "%s,test/functional/tools/sample_data_manager_conf.xml" % default_data_manager_config
+    master_api_key = get_master_api_key()
+
+    # Data Manager testing temp path
+    # For storing Data Manager outputs and .loc files so that real ones don't get clobbered
+    galaxy_data_manager_data_path = tempfile.mkdtemp(prefix='data_manager_tool-data', dir=tmpdir)
+
+    tool_conf = os.environ.get('GALAXY_TEST_TOOL_CONF', default_tool_conf)
+    if tool_conf is None:
+        # As a fallback always at least allow upload.
+        tool_conf = FRAMEWORK_UPLOAD_TOOL_CONF
+
+    if shed_tool_conf is not None:
+        tool_conf = "%s,%s" % (tool_conf, shed_tool_conf)
+
+    shed_tool_data_table_config = default_shed_tool_data_table_config
+    if shed_tool_data_table_config is None:
+        shed_tool_data_table_config = 'config/shed_tool_data_table_conf.xml'
+
+    config = dict(
+        admin_users='test at bx.psu.edu',
+        allow_library_path_paste=True,
+        allow_user_creation=True,
+        allow_user_deletion=True,
+        api_allow_run_as='test at bx.psu.edu',
+        auto_configure_logging=logging_config_file is None,
+        check_migrate_tools=False,
+        cleanup_job='onsuccess',
+        data_manager_config_file=data_manager_config_file,
+        enable_beta_tool_formats=True,
+        file_path=file_path,
+        galaxy_data_manager_data_path=galaxy_data_manager_data_path,
+        id_secret='changethisinproductiontoo',
+        job_config_file=job_config_file,
+        job_queue_workers=5,
+        job_working_directory=job_working_directory,
+        library_import_dir=library_import_dir,
+        log_destination="stdout",
+        new_file_path=new_file_path,
+        master_api_key=master_api_key,
+        running_functional_tests=True,
+        shed_tool_data_table_config=shed_tool_data_table_config,
+        template_cache_path=template_cache_path,
+        template_path='templates',
+        tool_config_file=tool_conf,
+        tool_data_table_config_path=tool_data_table_config_path,
+        tool_parse_help=False,
+        tool_path=tool_path,
+        update_integrated_tool_panel=update_integrated_tool_panel,
+        use_tasked_jobs=True,
+        use_heartbeat=False,
+        user_library_import_dir=user_library_import_dir,
+    )
+    config.update(database_conf(tmpdir))
+    config.update(install_database_conf(tmpdir, default_merged=default_install_db_merged))
+    if datatypes_conf is not None:
+        config['datatypes_config_file'] = datatypes_conf
+    if enable_tool_shed_check:
+        config["enable_tool_shed_check"] = enable_tool_shed_check
+        config["hours_between_check"] = 0.001
+    if tool_dependency_dir:
+        config["tool_dependency_dir"] = tool_dependency_dir
+        # Used by shed's twill dependency stuff - todo read from
+        # Galaxy's config API.
+        os.environ["GALAXY_TEST_TOOL_DEPENDENCY_DIR"] = tool_dependency_dir
+    return config
+
+
+def _tool_data_table_config_path(default_tool_data_table_config_path=None):
+    tool_data_table_config_path = os.environ.get('GALAXY_TEST_TOOL_DATA_TABLE_CONF', default_tool_data_table_config_path)
+    if tool_data_table_config_path is None:
+        # ... otherise find whatever Galaxy would use as the default and
+        # the sample data for fucntional tests to that.
+        default_tool_data_config = 'config/tool_data_table_conf.xml.sample'
+        for tool_data_config in ['config/tool_data_table_conf.xml', 'tool_data_table_conf.xml' ]:
+            if os.path.exists( tool_data_config ):
+                default_tool_data_config = tool_data_config
+        tool_data_table_config_path = '%s,test/functional/tool-data/sample_tool_data_tables.xml' % default_tool_data_config
+    return tool_data_table_config_path
+
+
+def nose_config_and_run( argv=None, env=None, ignore_files=[], plugins=None ):
+    """Setup a nose context and run tests.
+
+    Tests are specified by argv (defaulting to sys.argv).
+    """
+    if env is None:
+        env = os.environ
+    if plugins is None:
+        plugins = nose.plugins.manager.DefaultPluginManager()
+    if argv is None:
+        argv = sys.argv
+
+    test_config = nose.config.Config(
+        env=os.environ,
+        ignoreFiles=ignore_files,
+        plugins=plugins,
+    )
+
+    # Add custom plugin to produce JSON data used by planemo.
+    test_config.plugins.addPlugin( StructuredTestDataPlugin() )
+    test_config.configure( argv )
+
+    result = run( test_config )
+
+    success = result.wasSuccessful()
+    return success
+
+
+def copy_database_template( source, db_path ):
+    """Copy a 'clean' sqlite template database.
+
+    From file or URL to specified path for sqlite database.
+    """
+    db_path_dir = os.path.dirname(db_path)
+    if not os.path.exists(db_path_dir):
+        os.makedirs(db_path_dir)
+    if os.path.exists(source):
+        shutil.copy(source, db_path)
+        assert os.path.exists(db_path)
+    elif source.lower().startswith(("http://", "https://", "ftp://")):
+        download_to_file(source, db_path)
+    else:
+        raise Exception( "Failed to copy database template from source %s" % source )
+
+
+def database_conf(db_path, prefix="GALAXY"):
+    """Find (and populate if needed) Galaxy database connection."""
+    database_auto_migrate = False
+    dburi_var = "%s_TEST_DBURI" % prefix
+    if dburi_var in os.environ:
+        database_connection = os.environ[dburi_var]
+    else:
+        default_db_filename = "%s.sqlite" % prefix.lower()
+        template_var = "%s_TEST_DB_TEMPLATE" % prefix
+        db_path = os.path.join(db_path, default_db_filename)
+        if template_var in os.environ:
+            # Middle ground between recreating a completely new
+            # database and pointing at existing database with
+            # GALAXY_TEST_DBURI. The former requires a lot of setup
+            # time, the latter results in test failures in certain
+            # cases (namely tool shed tests expecting clean database).
+            copy_database_template(os.environ[template_var], db_path)
+            database_auto_migrate = True
+        database_connection = 'sqlite:///%s' % db_path
+    config = {
+        "database_connection": database_connection,
+        "database_auto_migrate": database_auto_migrate
+    }
+    if not database_connection.startswith("sqlite://"):
+        config["database_engine_option_max_overflow"] = "20"
+        config["database_engine_option_pool_size"] = "10"
+    return config
+
+
+def install_database_conf(db_path, default_merged=False):
+    if 'GALAXY_TEST_INSTALL_DBURI' in os.environ:
+        install_galaxy_database_connection = os.environ['GALAXY_TEST_INSTALL_DBURI']
+    elif asbool(os.environ.get('GALAXY_TEST_INSTALL_DB_MERGED', default_merged)):
+        install_galaxy_database_connection = None
+    else:
+        install_galaxy_db_path = os.path.join(db_path, 'install.sqlite')
+        install_galaxy_database_connection = 'sqlite:///%s' % install_galaxy_db_path
+    conf = {}
+    if install_galaxy_database_connection is not None:
+        conf["install_database_connection"] = install_galaxy_database_connection
+    return conf
+
+
+def database_files_path(test_tmpdir, prefix="GALAXY"):
+    """Create a mock database/ directory like in GALAXY_ROOT.
+
+    Use prefix to default this if TOOL_SHED_TEST_DBPATH or
+    GALAXY_TEST_DBPATH is set in the environment.
+    """
+    environ_var = "%s_TEST_DBPATH" % prefix
+    if environ_var in os.environ:
+        db_path = os.environ[environ_var]
+    else:
+        tempdir = tempfile.mkdtemp(dir=test_tmpdir)
+        db_path = os.path.join(tempdir, 'database')
+    return db_path
+
+
+def _get_static_settings():
+    """Configuration required for Galaxy static middleware.
+
+    Returns dictionary of the settings necessary for a galaxy App
+    to be wrapped in the static middleware.
+
+    This mainly consists of the filesystem locations of url-mapped
+    static resources.
+    """
+    static_dir = os.path.join(galaxy_root, "static")
+
+    # TODO: these should be copied from config/galaxy.ini
+    return dict(
+        static_enabled=True,
+        static_cache_time=360,
+        static_dir=static_dir,
+        static_images_dir=os.path.join(static_dir, 'images', ''),
+        static_favicon_dir=os.path.join(static_dir, 'favicon.ico'),
+        static_scripts_dir=os.path.join(static_dir, 'scripts', ''),
+        static_style_dir=os.path.join(static_dir, 'june_2007_style', 'blue'),
+        static_robots_txt=os.path.join(static_dir, 'robots.txt'),
+    )
+
+
+def get_webapp_global_conf():
+    """Get the global_conf dictionary sent to ``app_factory``."""
+    # (was originally sent 'dict()') - nothing here for now except static settings
+    global_conf = dict()
+    global_conf.update( _get_static_settings() )
+    return global_conf
+
+
+def wait_for_http_server(host, port):
+    """Wait for an HTTP server to boot up."""
+    # Test if the server is up
+    for i in range( 10 ):
+        # directly test the app, not the proxy
+        conn = httplib.HTTPConnection(host, port)
+        conn.request( "GET", "/" )
+        if conn.getresponse().status == 200:
+            break
+        time.sleep( 0.1 )
+    else:
+        template = "Test HTTP server on host %s and port %s did not return '200 OK' after 10 tries"
+        message = template % (host, port)
+        raise Exception(message)
+
+
+def serve_webapp(webapp, port=None, host=None):
+    """Serve the webapp on a recommend port or a free one.
+
+    Return the port the webapp is running one.
+    """
+    server = None
+    if port is not None:
+        server = httpserver.serve( webapp, host=host, port=port, start_loop=False )
+    else:
+        random.seed()
+        for i in range( 0, 9 ):
+            try:
+                port = str( random.randint( 8000, 10000 ) )
+                server = httpserver.serve( webapp, host=host, port=port, start_loop=False )
+                break
+            except socket.error as e:
+                if e[0] == 98:
+                    continue
+                raise
+        else:
+            raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( 8000, 1000 ) )
+
+    t = threading.Thread( target=server.serve_forever )
+    t.start()
+
+    return server, port
+
+
+def cleanup_directory(tempdir):
+    """Clean up temporary files used by test unless GALAXY_TEST_NO_CLEANUP is set.
+
+    Also respect TOOL_SHED_TEST_NO_CLEANUP for legacy reasons.
+    """
+    skip_cleanup = "GALAXY_TEST_NO_CLEANUP" in os.environ or "TOOL_SHED_TEST_NO_CLEANUP" in os.environ
+    if skip_cleanup:
+        log.info( "GALAXY_TEST_NO_CLEANUP is on. Temporary files in %s" % tempdir )
+        return
+    try:
+        if os.path.exists(tempdir) and skip_cleanup:
+            shutil.rmtree(tempdir)
+    except Exception:
+        pass
+
+
+def setup_shed_tools_for_test(app, tmpdir, testing_migrated_tools, testing_installed_tools):
+    """Modify Galaxy app's toolbox for migrated or installed tool tests."""
+    # Store a jsonified dictionary of tool_id : GALAXY_TEST_FILE_DIR pairs.
+    galaxy_tool_shed_test_file = os.path.join(tmpdir, 'shed_tools_dict')
+    shed_tools_dict = {}
+    if testing_migrated_tools:
+        has_test_data, shed_tools_dict = parse_tool_panel_config(MIGRATED_TOOL_PANEL_CONFIG, shed_tools_dict)
+    elif testing_installed_tools:
+        for shed_tool_config in INSTALLED_TOOL_PANEL_CONFIGS:
+            has_test_data, shed_tools_dict = parse_tool_panel_config(shed_tool_config, shed_tools_dict)
+    # Persist the shed_tools_dict to the galaxy_tool_shed_test_file.
+    with open(galaxy_tool_shed_test_file, 'w') as shed_tools_file:
+        shed_tools_file.write(json.dumps(shed_tools_dict))
+    if not os.path.isabs(galaxy_tool_shed_test_file):
+        galaxy_tool_shed_test_file = os.path.join(galaxy_root, galaxy_tool_shed_test_file)
+    os.environ['GALAXY_TOOL_SHED_TEST_FILE'] = galaxy_tool_shed_test_file
+    if testing_installed_tools:
+        # TODO: Do this without modifying app - that is a pretty violation
+        # of Galaxy's abstraction - we shouldn't require app at all let alone
+        # be modifying it.
+
+        tool_configs = app.config.tool_configs
+        # Eliminate the migrated_tool_panel_config from the app's tool_configs, append the list of installed_tool_panel_configs,
+        # and reload the app's toolbox.
+        relative_migrated_tool_panel_config = os.path.join(app.config.root, MIGRATED_TOOL_PANEL_CONFIG)
+        if relative_migrated_tool_panel_config in tool_configs:
+            tool_configs.remove(relative_migrated_tool_panel_config)
+        for installed_tool_panel_config in INSTALLED_TOOL_PANEL_CONFIGS:
+            tool_configs.append(installed_tool_panel_config)
+        from galaxy import tools  # delay import because this brings in so many modules for small tests # noqa: E402
+        app.toolbox = tools.ToolBox(tool_configs, app.config.tool_path, app)
+
+
+def build_galaxy_app(simple_kwargs):
+    """Build a Galaxy app object from a simple keyword arguments.
+
+    Construct paste style complex dictionary and use load_app_properties so
+    Galaxy override variables are respected. Also setup "global" references
+    to sqlalchemy database context for Galaxy and install databases.
+    """
+    log.info("Galaxy database connection: %s", simple_kwargs["database_connection"])
+    simple_kwargs['global_conf'] = get_webapp_global_conf()
+    simple_kwargs['global_conf']['__file__'] = "config/galaxy.ini.sample"
+    simple_kwargs = load_app_properties(
+        kwds=simple_kwargs
+    )
+    # Build the Universe Application
+    app = GalaxyUniverseApplication( **simple_kwargs )
+    log.info( "Embedded Galaxy application started" )
+    database_contexts.galaxy_context = app.model.context
+    database_contexts.install_context = app.install_model.context
+    return app
+
+
+def build_shed_app(simple_kwargs):
+    """Build a Galaxy app object from a simple keyword arguments.
+
+    Construct paste style complex dictionary. Also setup "global" reference
+    to sqlalchemy database context for tool shed database.
+    """
+    log.info("Tool shed database connection: %s", simple_kwargs["database_connection"])
+    # TODO: Simplify global_conf to match Galaxy above...
+    simple_kwargs['__file__'] = 'tool_shed_wsgi.ini.sample'
+    simple_kwargs['global_conf'] = get_webapp_global_conf()
+
+    app = ToolshedUniverseApplication( **simple_kwargs )
+    database_contexts.tool_shed_context = app.model.context
+    log.info( "Embedded Toolshed application started" )
+    return app
+
+
+ServerWrapper = collections.namedtuple('ServerWrapper', ['app', 'server', 'name', 'host', 'port'])
+
+
+def _stop(self):
+    if self.server is not None:
+        log.info("Shutting down embedded %s web server" % self.name)
+        self.server.server_close()
+        log.info("Embedded web server %s stopped" % self.name)
+
+    if self.app is not None:
+        log.info("Stopping application %s" % self.name)
+        self.app.shutdown()
+        log.info("Application %s stopped." % self.name)
+
+
+ServerWrapper.stop = _stop
+
+
+def launch_server(app, webapp_factory, kwargs, prefix="GALAXY"):
+    """Launch a web server for a given app using supplied factory.
+
+    Consistently read either GALAXY_TEST_HOST and GALAXY_TEST_PORT or
+    TOOL_SHED_TEST_HOST and TOOL_SHED_TEST_PORT and ensure these are
+    all set after this method has been called.
+    """
+    name = prefix.lower()
+
+    host_env_key = "%s_TEST_HOST" % prefix
+    port_env_key = "%s_TEST_PORT" % prefix
+    host = os.environ.get(host_env_key, DEFAULT_WEB_HOST)
+    port = os.environ.get(port_env_key, None)
+
+    webapp = webapp_factory(
+        kwargs[ 'global_conf' ],
+        app=app,
+        use_translogger=False,
+        static_enabled=True
+    )
+    server, port = serve_webapp(
+        webapp,
+        host=host, port=port
+    )
+    os.environ[host_env_key] = host
+    os.environ[port_env_key] = port
+    wait_for_http_server(host, port)
+    log.info("Embedded web server for %s started" % name)
+    return ServerWrapper(
+        app, server, name, host, port
+    )
+
+
+class TestDriver(object):
+    """Responsible for the life-cycle of a Galaxy-style functional test.
+
+    Sets up servers, configures tests, runs nose, and tears things
+    down. This is somewhat like a Python TestCase - but different
+    because it is meant to provide a main() endpoint.
+    """
+
+    def __init__(self):
+        """Setup tracked resources."""
+        self.server_wrappers = []
+        self.temp_directories = []
+
+    def setup(self):
+        """Called before tests are built."""
+
+    def build_tests(self):
+        """After environment is setup, setup nose tests."""
+
+    def tear_down(self):
+        """Cleanup resources tracked by this object."""
+        for server_wrapper in self.server_wrappers:
+            server_wrapper.stop()
+        for temp_directory in self.temp_directories:
+            cleanup_directory(temp_directory)
+
+    def run(self):
+        """Driver whole test.
+
+        Setup environment, build tests (if needed), run test,
+        and finally cleanup resources.
+        """
+        configure_environment()
+        self.setup()
+        self.build_tests()
+        try:
+            success = nose_config_and_run()
+            return 0 if success else 1
+        except Exception as e:
+            log.info("Failure running tests")
+            raise e
+        finally:
+            log.info( "Shutting down")
+            self.tear_down()
+
+
+class GalaxyTestDriver(TestDriver):
+    """Instantial a Galaxy-style nose TestDriver for testing Galaxy."""
+
+    testing_shed_tools = False
+
+    def setup(self, config_object=None):
+        """Setup a Galaxy server for functional test (if needed).
+
+        Configuration options can be specified as attributes on the supplied
+        ```config_object``` (defaults to self).
+        """
+        if config_object is None:
+            config_object = self
+        self.external_galaxy = os.environ.get('GALAXY_TEST_EXTERNAL', None)
+        self.galaxy_test_tmp_dir = get_galaxy_test_tmp_dir()
+        self.temp_directories.append(self.galaxy_test_tmp_dir)
+
+        testing_shed_tools = getattr(config_object, "testing_shed_tools", False)
+
+        if getattr(config_object, "framework_tool_and_types", False):
+            default_tool_conf = FRAMEWORK_SAMPLE_TOOLS_CONF
+            datatypes_conf_override = FRAMEWORK_DATATYPES_CONF
+        else:
+            default_tool_conf = getattr(config_object, "default_tool_conf", None)
+            datatypes_conf_override = getattr(config_object, "datatypes_conf_override", None)
+
+        if self.external_galaxy is None:
+            tempdir = tempfile.mkdtemp(dir=self.galaxy_test_tmp_dir)
+            # Configure the database path.
+            galaxy_db_path = database_files_path(tempdir)
+            # Allow config object to specify a config dict or a method to produce
+            # one - other just read the properties above and use the default
+            # implementation from this file.
+            galaxy_config = getattr(config_object, "galaxy_config", None)
+            if hasattr(galaxy_config, '__call__'):
+                galaxy_config = galaxy_config()
+            if galaxy_config is None:
+                setup_galaxy_config_kwds = dict(
+                    use_test_file_dir=not testing_shed_tools,
+                    default_install_db_merged=True,
+                    default_tool_conf=default_tool_conf,
+                    datatypes_conf=datatypes_conf_override,
+                )
+                galaxy_config = setup_galaxy_config(
+                    galaxy_db_path,
+                    **setup_galaxy_config_kwds
+                )
+
+                handle_galaxy_config_kwds = getattr(
+                    config_object, "handle_galaxy_config_kwds", None
+                )
+                if handle_galaxy_config_kwds is not None:
+                    handle_galaxy_config_kwds(galaxy_config)
+
+            # ---- Build Application --------------------------------------------------
+            self.app = build_galaxy_app(galaxy_config)
+            server_wrapper = launch_server(
+                self.app,
+                buildapp.app_factory,
+                galaxy_config,
+            )
+            self.server_wrappers.append(server_wrapper)
+            log.info("Functional tests will be run against %s:%s" % (server_wrapper.host, server_wrapper.port))
+        else:
+            log.info("Functional tests will be run against %s" % self.external_galaxy)
+
+    def setup_shed_tools(self, testing_migrated_tools=False, testing_installed_tools=True):
+        setup_shed_tools_for_test(
+            self.app,
+            self.galaxy_test_tmp_dir,
+            testing_migrated_tools,
+            testing_installed_tools
+        )
+
+    def build_tool_tests(self, testing_shed_tools=None):
+        if self.app is None:
+            return
+
+        if testing_shed_tools is None:
+            testing_shed_tools = getattr(self, "testing_shed_tools", False)
+
+        # We must make sure that functional.test_toolbox is always imported after
+        # database_contexts.galaxy_content is set (which occurs in this method above).
+        # If functional.test_toolbox is imported before database_contexts.galaxy_content
+        # is set, sa_session will be None in all methods that use it.
+        import functional.test_toolbox
+        functional.test_toolbox.toolbox = self.app.toolbox
+        # When testing data managers, do not test toolbox.
+        functional.test_toolbox.build_tests(
+            app=self.app,
+            testing_shed_tools=testing_shed_tools,
+            master_api_key=get_master_api_key(),
+            user_api_key=get_user_api_key(),
+        )
+        return functional.test_toolbox
+
+    def run_tool_test(self, tool_id, index=0):
+        import functional.test_toolbox
+        functional.test_toolbox.toolbox = self.app.toolbox
+        tool = self.app.toolbox.get_tool(tool_id)
+        testdef = tool.tests[index]
+        test_case_cls = functional.test_toolbox.ToolTestCase
+        test_case = test_case_cls(methodName="setUp")  # NO-OP
+        test_case.shed_tool_id = None
+        test_case.master_api_key = get_master_api_key()
+        test_case.user_api_key = get_user_api_key()
+        test_case.setUp()
+        test_case.do_it(testdef)
+
+
+def drive_test(test_driver_class):
+    """Instantiate driver class, run, and exit appropriately."""
+    sys.exit(test_driver_class().run())
+
+
+__all__ = (
+    "copy_database_template",
+    "build_logger",
+    "drive_test",
+    "FRAMEWORK_UPLOAD_TOOL_CONF",
+    "FRAMEWORK_SAMPLE_TOOLS_CONF",
+    "FRAMEWORK_DATATYPES_CONF",
+    "database_conf",
+    "get_webapp_global_conf",
+    "nose_config_and_run",
+    "setup_galaxy_config",
+    "TestDriver",
+    "wait_for_http_server",
+)
diff --git a/test/base/instrument.py b/test/base/instrument.py
new file mode 100644
index 0000000..a515527
--- /dev/null
+++ b/test/base/instrument.py
@@ -0,0 +1,82 @@
+""" Utilities to help instrument tool tests.
+
+Including structed data nose plugin that allows storing arbitrary structured
+data on a per test case basis - used by tool test to store inputs,
+output problems, job tests, etc... but could easily by used by other test
+types in a different way.
+"""
+
+import json
+import threading
+
+from nose.plugins import Plugin
+
+NO_JOB_DATA = object()
+JOB_DATA = threading.local()
+JOB_DATA.new = True
+JOB_DATA.data = NO_JOB_DATA
+
+
+def register_job_data(data):
+    if not JOB_DATA.new:
+        return
+    JOB_DATA.data = data
+    JOB_DATA.new = False
+
+
+def fetch_job_data():
+    try:
+        if JOB_DATA.new:
+            return NO_JOB_DATA
+        else:
+            return JOB_DATA.data
+    finally:
+        JOB_DATA.new = True
+
+
+class StructuredTestDataPlugin( Plugin ):
+    name = 'structureddata'
+
+    def options(self, parser, env):
+        super(StructuredTestDataPlugin, self).options(parser, env=env)
+        parser.add_option(
+            '--structured-data-file', action='store',
+            dest='structured_data_file', metavar="FILE",
+            default=env.get('NOSE_STRUCTURED_DATA', 'structured_test_data.json'),
+            help=("Path to JSON file to store the Galaxy structured data report in."
+                  "Default is structured_test_data.json in the working directory "
+                  "[NOSE_STRUCTURED_DATA]"))
+
+    def configure(self, options, conf):
+        super(StructuredTestDataPlugin, self).configure(options, conf)
+        self.conf = conf
+        if not self.enabled:
+            return
+        self.tests = []
+        self.structured_data_report_file = open(options.structured_data_file, 'w')
+
+    def finalize(self, result):
+        pass
+
+    def _handle_result(self, test, *args, **kwds):
+        job_data = fetch_job_data()
+        id = test.id()
+        has_data = job_data is not NO_JOB_DATA
+        entry = {
+            'id': id,
+            'has_data': has_data,
+            'data': job_data if has_data else None,
+        }
+        self.tests.append(entry)
+
+    addError = _handle_result
+    addFailure = _handle_result
+    addSuccess = _handle_result
+
+    def report(self, stream):
+        report_obj = {
+            'version': '0.1',
+            'tests': self.tests,
+        }
+        json.dump(report_obj, self.structured_data_report_file)
+        self.structured_data_report_file.close()
diff --git a/test/base/integration_util.py b/test/base/integration_util.py
new file mode 100644
index 0000000..76db3cf
--- /dev/null
+++ b/test/base/integration_util.py
@@ -0,0 +1,63 @@
+"""Utilities for constructing Galaxy integration tests.
+
+Tests that start an actual Galaxy server with a particular configuration in
+order to test something that cannot be tested with the default functional/api
+tessting configuration.
+"""
+from unittest import TestCase
+
+from .driver_util import GalaxyTestDriver
+
+NO_APP_MESSAGE = "test_case._app called though no Galaxy has been configured."
+
+
+class IntegrationTestCase(TestCase):
+    """Unit test case with utilities for spinning up Galaxy."""
+
+    @classmethod
+    def setUpClass(cls):
+        """Configure and start Galaxy for a test."""
+        cls._app_available = False
+        cls._test_driver = GalaxyTestDriver()
+        cls._prepare_galaxy()
+        cls._test_driver.setup(config_object=cls)
+        cls._app_available = True
+        cls._configure_app()
+
+    @classmethod
+    def tearDownClass(cls):
+        """Shutdown Galaxy server and cleanup temp directory."""
+        cls._test_driver.tear_down()
+        cls._app_available = False
+
+    @property
+    def _app(self):
+        assert self._app_available, NO_APP_MESSAGE
+        return self._test_driver.app
+
+    @property
+    def _tempdir(self):
+        return self._test_driver.galaxy_test_tmp_dir
+
+    @classmethod
+    def _prepare_galaxy(cls):
+        """Extension point for subclasses called before Galaxy is launched."""
+
+    @classmethod
+    def _configure_app(cls):
+        """Extension point for subclasses called after Galaxy is launched.
+
+        ```self._app``` can be used to access Galaxy core app.
+        """
+
+    @classmethod
+    def handle_galaxy_config_kwds(cls, galaxy_config_kwds):
+        """Extension point for subclasses to modify arguments used to configure Galaxy.
+
+        This method will be passed the keyword argument pairs used to call
+        Galaxy Config object and can modify the Galaxy instance created for
+        the test as needed.
+        """
+
+    def _run_tool_test(self, *args, **kwargs):
+        return self._test_driver.run_tool_test(*args, **kwargs)
diff --git a/test/base/interactor.py b/test/base/interactor.py
new file mode 100644
index 0000000..feac086
--- /dev/null
+++ b/test/base/interactor.py
@@ -0,0 +1,496 @@
+from __future__ import print_function
+
+import os
+import re
+import time
+from json import dumps
+from logging import getLogger
+
+from requests import delete, get, patch, post
+from six import StringIO, text_type
+
+from galaxy import util
+from galaxy.tools.parser.interface import TestCollectionDef
+from galaxy.util.bunch import Bunch
+from galaxy.util.odict import odict
+
+log = getLogger( __name__ )
+
+# Off by default because it can pound the database pretty heavily
+# and result in sqlite errors on larger tests or larger numbers of
+# tests.
+VERBOSE_ERRORS = util.asbool( os.environ.get( "GALAXY_TEST_VERBOSE_ERRORS", False ) )
+UPLOAD_ASYNC = util.asbool( os.environ.get( "GALAXY_TEST_UPLOAD_ASYNC", True ) )
+ERROR_MESSAGE_DATASET_SEP = "--------------------------------------"
+
+
+def build_interactor( test_case, type="api" ):
+    interactor_class = GALAXY_INTERACTORS[ type ]
+    return interactor_class( test_case )
+
+
+def stage_data_in_history( galaxy_interactor, all_test_data, history, shed_tool_id=None ):
+    # Upload any needed files
+    upload_waits = []
+
+    if UPLOAD_ASYNC:
+        for test_data in all_test_data:
+            upload_waits.append( galaxy_interactor.stage_data_async( test_data, history, shed_tool_id ) )
+        for upload_wait in upload_waits:
+            upload_wait()
+    else:
+        for test_data in all_test_data:
+            upload_wait = galaxy_interactor.stage_data_async( test_data, history, shed_tool_id )
+            upload_wait()
+
+
+class GalaxyInteractorApi( object ):
+
+    def __init__( self, twill_test_case, test_user=None ):
+        self.twill_test_case = twill_test_case
+        self.api_url = "%s/api" % twill_test_case.url.rstrip("/")
+        self.master_api_key = twill_test_case.master_api_key
+        self.api_key = self.__get_user_key( twill_test_case.user_api_key, twill_test_case.master_api_key, test_user=test_user )
+        self.uploads = {}
+
+    def verify_output( self, history_id, jobs, output_data, output_testdef, shed_tool_id, maxseconds ):
+        outfile = output_testdef.outfile
+        attributes = output_testdef.attributes
+        name = output_testdef.name
+        self.wait_for_jobs( history_id, jobs, maxseconds )
+        hid = self.__output_id( output_data )
+        # TODO: Twill version verifys dataset is 'ok' in here.
+        self.verify_output_dataset( history_id=history_id, hda_id=hid, outfile=outfile, attributes=attributes, shed_tool_id=shed_tool_id )
+
+        primary_datasets = attributes.get( 'primary_datasets', {} )
+        if primary_datasets:
+            job_id = self._dataset_provenance( history_id, hid )[ "job_id" ]
+            outputs = self._get( "jobs/%s/outputs" % ( job_id ) ).json()
+
+        for designation, ( primary_outfile, primary_attributes ) in primary_datasets.items():
+            primary_output = None
+            for output in outputs:
+                if output[ "name" ] == '__new_primary_file_%s|%s__' % ( name, designation ):
+                    primary_output = output
+                    break
+
+            if not primary_output:
+                msg_template = "Failed to find primary dataset with designation [%s] for output with name [%s]"
+                msg_args = ( designation, name )
+                raise Exception( msg_template % msg_args )
+
+            primary_hda_id = primary_output[ "dataset" ][ "id" ]
+            self.verify_output_dataset( history_id, primary_hda_id, primary_outfile, primary_attributes, shed_tool_id=shed_tool_id )
+
+    def wait_for_jobs( self, history_id, jobs, maxseconds ):
+        for job in jobs:
+            self.wait_for_job( job[ 'id' ], history_id, maxseconds )
+
+    def verify_output_dataset( self, history_id, hda_id, outfile, attributes, shed_tool_id ):
+        fetcher = self.__dataset_fetcher( history_id )
+        self.twill_test_case.verify_hid(
+            outfile,
+            hda_id=hda_id,
+            attributes=attributes,
+            dataset_fetcher=fetcher,
+            shed_tool_id=shed_tool_id
+        )
+        self._verify_metadata( history_id, hda_id, attributes )
+
+    def _verify_metadata( self, history_id, hid, attributes ):
+        """Check dataset metadata.
+
+        ftype on output maps to `file_ext` on the hda's API description, `name`, `info`,
+        and `dbkey` all map to the API description directly. Other metadata attributes
+        are assumed to be datatype-specific and mapped with a prefix of `metadata_`.
+        """
+        metadata = attributes.get( 'metadata', {} ).copy()
+        for key, value in metadata.copy().items():
+            if key not in ['name', 'info']:
+                new_key = "metadata_%s" % key
+                metadata[ new_key ] = metadata[ key ]
+                del metadata[ key ]
+            elif key == "info":
+                metadata[ "misc_info" ] = metadata[ "info" ]
+                del metadata[ "info" ]
+        expected_file_type = attributes.get( 'ftype', None )
+        if expected_file_type:
+            metadata[ "file_ext" ] = expected_file_type
+
+        if metadata:
+            time.sleep(5)
+            dataset = self._get( "histories/%s/contents/%s" % ( history_id, hid ) ).json()
+            for key, value in metadata.items():
+                try:
+                    dataset_value = dataset.get( key, None )
+
+                    def compare(val, expected):
+                        if text_type(val) != text_type(expected):
+                            msg = "Dataset metadata verification for [%s] failed, expected [%s] but found [%s]. Dataset API value was [%s]."
+                            msg_params = ( key, value, dataset_value, dataset )
+                            msg = msg % msg_params
+                            raise Exception( msg )
+
+                    if isinstance(dataset_value, list):
+                        value = text_type(value).split(",")
+                        if len(value) != len(dataset_value):
+                            msg = "Dataset metadata verification for [%s] failed, expected [%s] but found [%s], lists differ in length. Dataset API value was [%s]."
+                            msg_params = ( key, value, dataset_value, dataset )
+                            msg = msg % msg_params
+                            raise Exception( msg )
+                        for val, expected in zip(dataset_value, value):
+                            compare(val, expected)
+                    else:
+                        compare(dataset_value, value)
+                except KeyError:
+                    msg = "Failed to verify dataset metadata, metadata key [%s] was not found." % key
+                    raise Exception( msg )
+
+    def wait_for_job( self, job_id, history_id, maxseconds ):
+        self.twill_test_case.wait_for( lambda: not self.__job_ready( job_id, history_id ), maxseconds=maxseconds)
+
+    def get_job_stdio( self, job_id ):
+        job_stdio = self.__get_job_stdio( job_id ).json()
+        return job_stdio
+
+    def __get_job( self, job_id ):
+        return self._get( 'jobs/%s' % job_id )
+
+    def __get_job_stdio( self, job_id ):
+        return self._get( 'jobs/%s?full=true' % job_id )
+
+    def new_history( self ):
+        history_json = self._post( "histories", {"name": "test_history"} ).json()
+        return history_json[ 'id' ]
+
+    def __output_id( self, output_data ):
+        # Allow data structure coming out of tools API - {id: <id>, output_name: <name>, etc...}
+        # or simple id as comes out of workflow API.
+        try:
+            output_id = output_data.get( 'id' )
+        except AttributeError:
+            output_id = output_data
+        return output_id
+
+    def stage_data_async( self, test_data, history_id, shed_tool_id, async=True ):
+        fname = test_data[ 'fname' ]
+        tool_input = {
+            "file_type": test_data[ 'ftype' ],
+            "dbkey": test_data[ 'dbkey' ],
+        }
+        for elem in test_data.get('metadata', []):
+            tool_input["files_metadata|%s" % elem.get( 'name' )] = elem.get( 'value' )
+
+        composite_data = test_data[ 'composite_data' ]
+        if composite_data:
+            files = {}
+            for i, composite_file in enumerate( composite_data ):
+                file_name = self.twill_test_case.get_filename( composite_file.get( 'value' ), shed_tool_id=shed_tool_id )
+                files["files_%s|file_data" % i] = open( file_name, 'rb' )
+                tool_input.update({
+                    # "files_%d|NAME" % i: name,
+                    "files_%d|type" % i: "upload_dataset",
+                    # TODO:
+                    # "files_%d|space_to_tab" % i: composite_file.get( 'space_to_tab', False )
+                })
+            name = test_data[ 'name' ]
+        else:
+            file_name = self.twill_test_case.get_filename( fname, shed_tool_id=shed_tool_id )
+            name = test_data.get( 'name', None )
+            if not name:
+                name = os.path.basename( file_name )
+
+            tool_input.update({
+                "files_0|NAME": name,
+                "files_0|type": "upload_dataset",
+            })
+            files = {
+                "files_0|file_data": open( file_name, 'rb')
+            }
+        submit_response_object = self.__submit_tool( history_id, "upload1", tool_input, extra_data={"type": "upload_dataset"}, files=files )
+        submit_response = submit_response_object.json()
+        try:
+            dataset = submit_response["outputs"][0]
+        except KeyError:
+            raise Exception(submit_response)
+        # raise Exception(str(dataset))
+        hid = dataset['id']
+        self.uploads[ os.path.basename(fname) ] = self.uploads[ fname ] = self.uploads[ name ] = {"src": "hda", "id": hid}
+        return self.__wait_for_history( history_id )
+
+    def run_tool( self, testdef, history_id ):
+        # We need to handle the case where we've uploaded a valid compressed file since the upload
+        # tool will have uncompressed it on the fly.
+
+        inputs_tree = testdef.inputs.copy()
+        for key, value in inputs_tree.items():
+            values = [value] if not isinstance(value, list) else value
+            new_values = []
+            for value in values:
+                if isinstance( value, TestCollectionDef ):
+                    hdca_id = self._create_collection( history_id, value )
+                    new_values = [ dict( src="hdca", id=hdca_id ) ]
+                elif value in self.uploads:
+                    new_values.append( self.uploads[ value ] )
+                else:
+                    new_values.append( value )
+            inputs_tree[ key ] = new_values
+
+        # HACK: Flatten single-value lists. Required when using expand_grouping
+        for key, value in inputs_tree.items():
+            if isinstance(value, list) and len(value) == 1:
+                inputs_tree[key] = value[0]
+
+        submit_response = self.__submit_tool( history_id, tool_id=testdef.tool.id, tool_input=inputs_tree )
+        submit_response_object = submit_response.json()
+        try:
+            return Bunch(
+                inputs=inputs_tree,
+                outputs=self.__dictify_outputs( submit_response_object ),
+                output_collections=self.__dictify_output_collections( submit_response_object ),
+                jobs=submit_response_object[ 'jobs' ],
+            )
+        except KeyError:
+            message = "Error creating a job for these tool inputs - %s" % submit_response_object[ 'err_msg' ]
+            raise RunToolException( message, inputs_tree )
+
+    def _create_collection( self, history_id, collection_def ):
+        create_payload = dict(
+            name=collection_def.name,
+            element_identifiers=dumps( self._element_identifiers( collection_def ) ),
+            collection_type=collection_def.collection_type,
+            history_id=history_id,
+        )
+        return self._post( "dataset_collections", data=create_payload ).json()[ "id" ]
+
+    def _element_identifiers( self, collection_def ):
+        element_identifiers = []
+        for ( element_identifier, element ) in collection_def.elements:
+            if isinstance( element, TestCollectionDef ):
+                subelement_identifiers = self._element_identifiers( element )
+                element = dict(
+                    name=element_identifier,
+                    src="new_collection",
+                    collection_type=element.collection_type,
+                    element_identifiers=subelement_identifiers
+                )
+            else:
+                element_name = element[ 0 ]
+                element = self.uploads[ element[ 1 ] ].copy()
+                element[ "name" ] = element_name
+            element_identifiers.append( element )
+        return element_identifiers
+
+    def __dictify_output_collections( self, submit_response ):
+        output_collections_dict = odict()
+        for output_collection in submit_response[ 'output_collections' ]:
+            output_collections_dict[ output_collection.get("output_name") ] = output_collection
+        return output_collections_dict
+
+    def __dictify_outputs( self, datasets_object ):
+        # Convert outputs list to a dictionary that can be accessed by
+        # output_name so can be more flexiable about ordering of outputs
+        # but also allows fallback to legacy access as list mode.
+        outputs_dict = odict()
+        index = 0
+        for output in datasets_object[ 'outputs' ]:
+            outputs_dict[ index ] = outputs_dict[ output.get("output_name") ] = output
+            index += 1
+        # Adding each item twice (once with index for backward compat),
+        # overiding length to reflect the real number of outputs.
+        outputs_dict.__len__ = lambda: index
+        return outputs_dict
+
+    def output_hid( self, output_data ):
+        return output_data[ 'id' ]
+
+    def delete_history( self, history ):
+        return None
+
+    def __wait_for_history( self, history_id ):
+        def wait():
+            while not self.__history_ready( history_id ):
+                pass
+        return wait
+
+    def __job_ready( self, job_id, history_id ):
+        if job_id is None:
+            raise ValueError("__job_ready passed empty job_id")
+        job_json = self._get( "jobs/%s" % job_id ).json()
+        state = job_json[ 'state' ]
+        try:
+            return self._state_ready( state, error_msg="Job in error state." )
+        except Exception:
+            if VERBOSE_ERRORS:
+                self._summarize_history_errors( history_id )
+            raise
+
+    def __history_ready( self, history_id ):
+        if history_id is None:
+            raise ValueError("__history_ready passed empty history_id")
+        history_json = self._get( "histories/%s" % history_id ).json()
+        state = history_json[ 'state' ]
+        try:
+            return self._state_ready( state, error_msg="History in error state." )
+        except Exception:
+            if VERBOSE_ERRORS:
+                self._summarize_history_errors( history_id )
+            raise
+
+    def _summarize_history_errors( self, history_id ):
+        if history_id is None:
+            raise ValueError("_summarize_history_errors passed empty history_id")
+        print("History with id %s in error - summary of datasets in error below." % history_id)
+        try:
+            history_contents = self.__contents( history_id )
+        except Exception:
+            print("*TEST FRAMEWORK FAILED TO FETCH HISTORY DETAILS*")
+
+        for history_content in history_contents:
+            if history_content[ 'history_content_type'] != 'dataset':
+                continue
+
+            dataset = history_content
+            if dataset[ 'state' ] != 'error':
+                continue
+
+            print(ERROR_MESSAGE_DATASET_SEP)
+            dataset_id = dataset.get( 'id', None )
+            print("| %d - %s (HID - NAME) " % ( int( dataset['hid'] ), dataset['name'] ))
+            try:
+                dataset_info = self._dataset_info( history_id, dataset_id )
+                print("| Dataset Blurb:")
+                print(self.format_for_error( dataset_info.get( "misc_blurb", "" ), "Dataset blurb was empty." ))
+                print("| Dataset Info:")
+                print(self.format_for_error( dataset_info.get( "misc_info", "" ), "Dataset info is empty." ))
+            except Exception:
+                print("| *TEST FRAMEWORK ERROR FETCHING DATASET DETAILS*")
+            try:
+                provenance_info = self._dataset_provenance( history_id, dataset_id )
+                print("| Dataset Job Standard Output:")
+                print(self.format_for_error( provenance_info.get( "stdout", "" ), "Standard output was empty." ))
+                print("| Dataset Job Standard Error:")
+                print(self.format_for_error( provenance_info.get( "stderr", "" ), "Standard error was empty." ))
+            except Exception:
+                print("| *TEST FRAMEWORK ERROR FETCHING JOB DETAILS*")
+            print("|")
+        print(ERROR_MESSAGE_DATASET_SEP)
+
+    def format_for_error( self, blob, empty_message, prefix="|  " ):
+        contents = "\n".join([ "%s%s" % (prefix, line.strip()) for line in StringIO(blob).readlines() if line.rstrip("\n\r") ] )
+        return contents or "%s*%s*" % ( prefix, empty_message )
+
+    def _dataset_provenance( self, history_id, id ):
+        provenance = self._get( "histories/%s/contents/%s/provenance" % ( history_id, id ) ).json()
+        return provenance
+
+    def _dataset_info( self, history_id, id ):
+        dataset_json = self._get( "histories/%s/contents/%s" % ( history_id, id ) ).json()
+        return dataset_json
+
+    def __contents( self, history_id ):
+        history_contents_json = self._get( "histories/%s/contents" % history_id ).json()
+        return history_contents_json
+
+    def _state_ready( self, state_str, error_msg ):
+        if state_str == 'ok':
+            return True
+        elif state_str == 'error':
+            raise Exception( error_msg )
+        return False
+
+    def __submit_tool( self, history_id, tool_id, tool_input, extra_data={}, files=None ):
+        data = dict(
+            history_id=history_id,
+            tool_id=tool_id,
+            inputs=dumps( tool_input ),
+            **extra_data
+        )
+        return self._post( "tools", files=files, data=data )
+
+    def ensure_user_with_email( self, email, password=None ):
+        admin_key = self.master_api_key
+        all_users = self._get( 'users', key=admin_key ).json()
+        try:
+            test_user = [ user for user in all_users if user["email"] == email ][0]
+        except IndexError:
+            username = re.sub('[^a-z-]', '--', email.lower())
+            password = password or 'testpass'
+            # If remote user middleware is enabled - this endpoint consumes
+            # ``remote_user_email`` otherwise it requires ``email``, ``password``
+            # and ``username``.
+            data = dict(
+                remote_user_email=email,
+                email=email,
+                password=password,
+                username=username,
+            )
+            test_user = self._post( 'users', data, key=admin_key ).json()
+        return test_user
+
+    def __get_user_key( self, user_key, admin_key, test_user=None ):
+        if not test_user:
+            test_user = "test at bx.psu.edu"
+        if user_key:
+            return user_key
+        test_user = self.ensure_user_with_email(test_user)
+        return self._post( "users/%s/api_key" % test_user['id'], key=admin_key ).json()
+
+    def __dataset_fetcher( self, history_id ):
+        def fetcher( hda_id, base_name=None ):
+            url = "histories/%s/contents/%s/display?raw=true" % (history_id, hda_id)
+            if base_name:
+                url += "&filename=%s" % base_name
+            return self._get( url ).content
+
+        return fetcher
+
+    def _post( self, path, data={}, files=None, key=None, admin=False, anon=False ):
+        if not anon:
+            if not key:
+                key = self.api_key if not admin else self.master_api_key
+            data = data.copy()
+            data['key'] = key
+        return post( "%s/%s" % (self.api_url, path), data=data, files=files )
+
+    def _delete( self, path, data={}, key=None, admin=False, anon=False ):
+        if not anon:
+            if not key:
+                key = self.api_key if not admin else self.master_api_key
+            data = data.copy()
+            data['key'] = key
+        return delete( "%s/%s" % (self.api_url, path), params=data )
+
+    def _patch( self, path, data={}, key=None, admin=False, anon=False ):
+        if not anon:
+            if not key:
+                key = self.api_key if not admin else self.master_api_key
+            params = dict( key=key )
+            data = data.copy()
+            data['key'] = key
+        else:
+            params = {}
+        return patch( "%s/%s" % (self.api_url, path), params=params, data=data )
+
+    def _get( self, path, data={}, key=None, admin=False, anon=False ):
+        if not anon:
+            if not key:
+                key = self.api_key if not admin else self.master_api_key
+            data = data.copy()
+            data['key'] = key
+        if path.startswith("/api"):
+            path = path[ len("/api"): ]
+        url = "%s/%s" % (self.api_url, path)
+        return get( url, params=data )
+
+
+class RunToolException(Exception):
+
+    def __init__(self, message, inputs=None):
+        super(RunToolException, self).__init__(message)
+        self.inputs = inputs
+
+
+GALAXY_INTERACTORS = {
+    'api': GalaxyInteractorApi,
+}
diff --git a/test/base/nose_util.py b/test/base/nose_util.py
new file mode 100644
index 0000000..67d792a
--- /dev/null
+++ b/test/base/nose_util.py
@@ -0,0 +1,27 @@
+""" Utilities for dealing with nose.
+
+There was some duplication between Galaxy, Tool Shed, and Install/Test,
+trying to reduce that here.
+"""
+
+import nose
+
+
+def run( test_config, plugins=[] ):
+    loader = nose.loader.TestLoader( config=test_config )
+    for plugin in plugins:
+        test_config.plugins.addPlugin( plugin )
+    plug_loader = test_config.plugins.prepareTestLoader( loader )
+    if plug_loader is not None:
+        loader = plug_loader
+    tests = loader.loadTestsFromNames( test_config.testNames )
+    test_runner = nose.core.TextTestRunner(
+        stream=test_config.stream,
+        verbosity=test_config.verbosity,
+        config=test_config
+    )
+    plug_runner = test_config.plugins.prepareTestRunner( test_runner )
+    if plug_runner is not None:
+        test_runner = plug_runner
+    result = test_runner.run( tests )
+    return result
diff --git a/test/base/test_db_util.py b/test/base/test_db_util.py
new file mode 100644
index 0000000..4332072
--- /dev/null
+++ b/test/base/test_db_util.py
@@ -0,0 +1,224 @@
+from sqlalchemy import and_, desc, false
+
+import galaxy.model
+from functional import database_contexts
+
+# Deprecated - import database_contexts and use galaxy_context
+sa_session = database_contexts.galaxy_context
+
+
+def gx_context():
+    return database_contexts.galaxy_context
+
+
+def delete_obj( obj ):
+    gx_context().delete( obj )
+    gx_context().flush()
+
+
+def delete_request_type_permissions( id ):
+    rtps = gx_context().query( galaxy.model.RequestTypePermissions ) \
+                       .filter( and_( galaxy.model.RequestTypePermissions.table.c.request_type_id == id ) ) \
+                       .order_by( desc( galaxy.model.RequestTypePermissions.table.c.create_time ) )
+    for rtp in rtps:
+        gx_context().delete( rtp )
+    gx_context().flush()
+
+
+def delete_user_roles( user ):
+    for ura in user.roles:
+        gx_context().delete( ura )
+    gx_context().flush()
+
+
+def flush( obj ):
+    gx_context().add( obj )
+    gx_context().flush()
+
+
+def get_all_histories_for_user( user ):
+    return gx_context().query( galaxy.model.History ) \
+                       .filter( and_( galaxy.model.History.table.c.user_id == user.id,
+                                      galaxy.model.History.table.c.deleted == false() ) ) \
+                       .all()
+
+
+def get_dataset_permissions_by_dataset( dataset ):
+    return gx_context().query( galaxy.model.DatasetPermissions ) \
+                       .filter( galaxy.model.DatasetPermissions.table.c.dataset_id == dataset.id ) \
+                       .all()
+
+
+def get_dataset_permissions_by_role( role ):
+    return gx_context().query( galaxy.model.DatasetPermissions ) \
+                       .filter( galaxy.model.DatasetPermissions.table.c.role_id == role.id ) \
+                       .first()
+
+
+def get_default_history_permissions_by_history( history ):
+    return gx_context().query( galaxy.model.DefaultHistoryPermissions ) \
+                       .filter( galaxy.model.DefaultHistoryPermissions.table.c.history_id == history.id ) \
+                       .all()
+
+
+def get_default_history_permissions_by_role( role ):
+    return gx_context().query( galaxy.model.DefaultHistoryPermissions ) \
+                       .filter( galaxy.model.DefaultHistoryPermissions.table.c.role_id == role.id ) \
+                       .all()
+
+
+def get_default_user_permissions_by_role( role ):
+    return gx_context().query( galaxy.model.DefaultUserPermissions ) \
+                       .filter( galaxy.model.DefaultUserPermissions.table.c.role_id == role.id ) \
+                       .all()
+
+
+def get_default_user_permissions_by_user( user ):
+    return gx_context().query( galaxy.model.DefaultUserPermissions ) \
+                       .filter( galaxy.model.DefaultUserPermissions.table.c.user_id == user.id ) \
+                       .all()
+
+
+def get_form( name ):
+    fdc_list = gx_context().query( galaxy.model.FormDefinitionCurrent ) \
+                           .filter( galaxy.model.FormDefinitionCurrent.table.c.deleted == false() ) \
+                           .order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
+    for fdc in fdc_list:
+        gx_context().refresh( fdc )
+        gx_context().refresh( fdc.latest_form )
+        if fdc.latest_form.name == name:
+            return fdc.latest_form
+    return None
+
+
+def get_folder( parent_id, name, description ):
+    return gx_context().query( galaxy.model.LibraryFolder ) \
+                       .filter( and_( galaxy.model.LibraryFolder.table.c.parent_id == parent_id,
+                                      galaxy.model.LibraryFolder.table.c.name == name,
+                                      galaxy.model.LibraryFolder.table.c.description == description ) ) \
+                       .first()
+
+
+def get_group_by_name( name ):
+    return gx_context().query( galaxy.model.Group ).filter( galaxy.model.Group.table.c.name == name ).first()
+
+
+def get_group_role_associations_by_group( group ):
+    return gx_context().query( galaxy.model.GroupRoleAssociation ) \
+                       .filter( galaxy.model.GroupRoleAssociation.table.c.group_id == group.id ) \
+                       .all()
+
+
+def get_group_role_associations_by_role( role ):
+    return gx_context().query( galaxy.model.GroupRoleAssociation ) \
+                       .filter( galaxy.model.GroupRoleAssociation.table.c.role_id == role.id ) \
+                       .all()
+
+
+def get_latest_dataset():
+    return gx_context().query( galaxy.model.Dataset ) \
+                       .order_by( desc( galaxy.model.Dataset.table.c.create_time ) ) \
+                       .first()
+
+
+def get_latest_hda():
+    return gx_context().query( galaxy.model.HistoryDatasetAssociation ) \
+                       .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
+                       .first()
+
+
+def get_latest_history_for_user( user ):
+    return gx_context().query( galaxy.model.History ) \
+                       .filter( and_( galaxy.model.History.table.c.deleted == false(),
+                                      galaxy.model.History.table.c.user_id == user.id ) ) \
+                       .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+                       .first()
+
+
+def get_latest_ldda_by_name( name ):
+    return gx_context().query( galaxy.model.LibraryDatasetDatasetAssociation ) \
+                       .filter( and_( galaxy.model.LibraryDatasetDatasetAssociation.table.c.name == name,
+                                      galaxy.model.LibraryDatasetDatasetAssociation.table.c.deleted == false() ) ) \
+                       .order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ) \
+                       .first()
+
+
+def get_latest_lddas( limit ):
+    return gx_context().query( galaxy.model.LibraryDatasetDatasetAssociation ) \
+                       .order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.update_time ) ) \
+                       .limit( limit )
+
+
+def get_library( name, description, synopsis ):
+    return gx_context().query( galaxy.model.Library ) \
+                       .filter( and_( galaxy.model.Library.table.c.name == name,
+                                      galaxy.model.Library.table.c.description == description,
+                                      galaxy.model.Library.table.c.synopsis == synopsis,
+                                      galaxy.model.Library.table.c.deleted == false() ) ) \
+                       .first()
+
+
+def get_private_role( user ):
+    for role in user.all_roles():
+        if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+            return role
+    raise AssertionError( "Private role not found for user '%s'" % user.email )
+
+
+def get_request_by_name( name ):
+    return gx_context().query( galaxy.model.Request ) \
+                       .filter( and_( galaxy.model.Request.table.c.name == name,
+                                      galaxy.model.Request.table.c.deleted == false() ) ) \
+                       .first()
+
+
+def get_request_type_by_name( name ):
+    return gx_context().query( galaxy.model.RequestType ) \
+                       .filter( and_( galaxy.model.RequestType.table.c.name == name ) ) \
+                       .order_by( desc( galaxy.model.RequestType.table.c.create_time ) ) \
+                       .first()
+
+
+def get_role_by_name( name ):
+    return gx_context().query( galaxy.model.Role ).filter( galaxy.model.Role.table.c.name == name ).first()
+
+
+def get_user( email ):
+    return gx_context().query( galaxy.model.User ) \
+                       .filter( galaxy.model.User.table.c.email == email ) \
+                       .first()
+
+
+def get_user_address( user, short_desc ):
+    return gx_context().query( galaxy.model.UserAddress ) \
+                       .filter( and_( galaxy.model.UserAddress.table.c.user_id == user.id,
+                                      galaxy.model.UserAddress.table.c.desc == short_desc,
+                                      galaxy.model.UserAddress.table.c.deleted == false() ) ) \
+                       .order_by( desc( galaxy.model.UserAddress.table.c.create_time ) ) \
+                       .first()
+
+
+def get_user_group_associations_by_group( group ):
+    return gx_context().query( galaxy.model.UserGroupAssociation ) \
+                       .filter( galaxy.model.UserGroupAssociation.table.c.group_id == group.id ) \
+                       .all()
+
+
+def get_user_info_form_definition():
+    return galaxy.model.FormDefinition.types.USER_INFO
+
+
+def get_user_role_associations_by_role( role ):
+    return gx_context().query( galaxy.model.UserRoleAssociation ) \
+                       .filter( galaxy.model.UserRoleAssociation.table.c.role_id == role.id ) \
+                       .all()
+
+
+def mark_obj_deleted( obj ):
+    obj.deleted = True
+    gx_context().add( obj )
+    gx_context().flush()
+
+
+def refresh( obj ):
+    gx_context().refresh( obj )
diff --git a/test/base/test_logging.py b/test/base/test_logging.py
new file mode 100644
index 0000000..de11008
--- /dev/null
+++ b/test/base/test_logging.py
@@ -0,0 +1,16 @@
+from __future__ import absolute_import
+
+import logging
+import logging.config
+import os
+
+# This is done in paster or galaxy.main for server app, provide
+# an entry point testing as well.
+logging_config = os.environ.get("GALAXY_TEST_LOGGING_CONFIG", None)
+logging_config_file = None
+if logging_config:
+    logging_config_file = os.path.abspath(logging_config)
+    logging.config.fileConfig(
+        logging_config_file,
+        dict(__file__=logging_config_file, here=os.path.dirname(logging_config_file)),
+    )
diff --git a/test/base/tool_shed_util.py b/test/base/tool_shed_util.py
new file mode 100644
index 0000000..fd7c10d
--- /dev/null
+++ b/test/base/tool_shed_util.py
@@ -0,0 +1,90 @@
+import logging
+import os
+import sys
+
+galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
+sys.path.insert(1, os.path.join(galaxy_root, 'lib'))
+
+from galaxy.util import parse_xml
+
+log = logging.getLogger(__name__)
+
+# Set a 10 minute timeout for repository installation.
+repository_installation_timeout = 600
+
+
+def get_installed_repository_info( elem, last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision, tool_path ):
+    """
+    Return the GALAXY_TEST_FILE_DIR, the containing repository name and the
+    change set revision for the tool elem. This only happens when testing
+    tools installed from the tool shed.
+    """
+    tool_config_path = elem.get( 'file' )
+    installed_tool_path_items = tool_config_path.split( '/repos/' )
+    sans_shed = installed_tool_path_items[ 1 ]
+    path_items = sans_shed.split( '/' )
+    repository_owner = path_items[ 0 ]
+    repository_name = path_items[ 1 ]
+    changeset_revision = path_items[ 2 ]
+    if repository_name != last_tested_repository_name or changeset_revision != last_tested_changeset_revision:
+        # Locate the test-data directory.
+        installed_tool_path = os.path.join( installed_tool_path_items[ 0 ], 'repos', repository_owner, repository_name, changeset_revision )
+        for root, dirs, files in os.walk( os.path.join(tool_path, installed_tool_path )):
+            if '.' in dirs:
+                dirs.remove( '.hg' )
+            if 'test-data' in dirs:
+                return os.path.join( root, 'test-data' ), repository_name, changeset_revision
+        return None, repository_name, changeset_revision
+    return last_galaxy_test_file_dir, last_tested_repository_name, last_tested_changeset_revision
+
+
+def parse_tool_panel_config( config, shed_tools_dict ):
+    """
+    Parse a shed-related tool panel config to generate the shed_tools_dict. This only happens when testing tools installed from the tool shed.
+    """
+    last_galaxy_test_file_dir = None
+    last_tested_repository_name = None
+    last_tested_changeset_revision = None
+    tool_path = None
+    has_test_data = False
+    tree = parse_xml( config )
+    root = tree.getroot()
+    tool_path = root.get('tool_path')
+    for elem in root:
+        if elem.tag == 'tool':
+            galaxy_test_file_dir, \
+                last_tested_repository_name, \
+                last_tested_changeset_revision = get_installed_repository_info( elem,
+                                                                                last_galaxy_test_file_dir,
+                                                                                last_tested_repository_name,
+                                                                                last_tested_changeset_revision,
+                                                                                tool_path )
+            if galaxy_test_file_dir:
+                if not has_test_data:
+                    has_test_data = True
+                if galaxy_test_file_dir != last_galaxy_test_file_dir:
+                    if not os.path.isabs( galaxy_test_file_dir ):
+                        galaxy_test_file_dir = os.path.join( galaxy_root, galaxy_test_file_dir )
+                guid = elem.get( 'guid' )
+                shed_tools_dict[ guid ] = galaxy_test_file_dir
+                last_galaxy_test_file_dir = galaxy_test_file_dir
+        elif elem.tag == 'section':
+            for section_elem in elem:
+                if section_elem.tag == 'tool':
+                    galaxy_test_file_dir, \
+                        last_tested_repository_name, \
+                        last_tested_changeset_revision = get_installed_repository_info( section_elem,
+                                                                                        last_galaxy_test_file_dir,
+                                                                                        last_tested_repository_name,
+                                                                                        last_tested_changeset_revision,
+                                                                                        tool_path )
+                    if galaxy_test_file_dir:
+                        if not has_test_data:
+                            has_test_data = True
+                        if galaxy_test_file_dir != last_galaxy_test_file_dir:
+                            if not os.path.isabs( galaxy_test_file_dir ):
+                                galaxy_test_file_dir = os.path.join( galaxy_root, galaxy_test_file_dir )
+                        guid = section_elem.get( 'guid' )
+                        shed_tools_dict[ guid ] = galaxy_test_file_dir
+                        last_galaxy_test_file_dir = galaxy_test_file_dir
+    return has_test_data, shed_tools_dict
diff --git a/test/base/twilltestcase.py b/test/base/twilltestcase.py
new file mode 100644
index 0000000..d077448
--- /dev/null
+++ b/test/base/twilltestcase.py
@@ -0,0 +1,2332 @@
+from __future__ import print_function
+
+import logging
+import os
+import pprint
+import shutil
+import tarfile
+import tempfile
+import time
+import unittest
+import zipfile
+from json import loads
+from xml.etree import ElementTree
+
+# Be sure to use Galaxy's vanilla pyparsing instead of the older version
+# imported by twill.
+import pyparsing  # noqa: F401
+import twill
+import twill.commands as tc
+from markupsafe import escape
+from six import string_types, StringIO
+from six.moves.urllib.parse import unquote, urlencode, urlparse
+from twill.other_packages._mechanize_dist import ClientForm
+
+from galaxy.tools.verify import (
+    check_command,
+    files_diff,
+    make_temp_fname,
+    verify,
+)
+from galaxy.tools.verify.test_data import TestDataResolver
+from galaxy.web import security
+from galaxy.web.framework.helpers import iff
+
+# Force twill to log to a buffer -- FIXME: Should this go to stdout and be captured by nose?
+buffer = StringIO()
+twill.set_output( buffer )
+tc.config( 'use_tidy', 0 )
+
+# Dial ClientCookie logging down (very noisy)
+logging.getLogger( "ClientCookie.cookies" ).setLevel( logging.WARNING )
+log = logging.getLogger( __name__ )
+
+DEFAULT_TOOL_TEST_WAIT = os.environ.get("GALAXY_TEST_DEFAULT_WAIT", 86400)
+
+
+class TwillTestCase( unittest.TestCase ):
+
+    def setUp( self ):
+        # Security helper
+        self.security = security.SecurityHelper( id_secret='changethisinproductiontoo' )
+        self.history_id = os.environ.get( 'GALAXY_TEST_HISTORY_ID', None )
+        self.host = os.environ.get( 'GALAXY_TEST_HOST' )
+        self.port = os.environ.get( 'GALAXY_TEST_PORT' )
+        default_url = "http://%s:%s" % (self.host, self.port)
+        self.url = os.environ.get('GALAXY_TEST_EXTERNAL', default_url)
+        self.test_data_resolver = TestDataResolver( )
+        self.tool_shed_test_file = os.environ.get( 'GALAXY_TOOL_SHED_TEST_FILE', None )
+        if self.tool_shed_test_file:
+            f = open( self.tool_shed_test_file, 'r' )
+            text = f.read()
+            f.close()
+            self.shed_tools_dict = loads( text )
+        else:
+            self.shed_tools_dict = {}
+        self.keepOutdir = os.environ.get( 'GALAXY_TEST_SAVE', '' )
+        if self.keepOutdir > '':
+            try:
+                os.makedirs(self.keepOutdir)
+            except:
+                pass
+
+    def act_on_multiple_datasets( self, cntrller, library_id, do_action, ldda_ids='', strings_displayed=[] ):
+        # Can't use the ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
+        # by going directly to the form action
+        self.visit_url( '%s/library_common/act_on_multiple_datasets?cntrller=%s&library_id=%s&ldda_ids=%s&do_action=%s' %
+                        ( self.url, cntrller, library_id, ldda_ids, do_action ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def add_bar_codes( self, cntrller, request_id, bar_codes, strings_displayed=[], strings_displayed_after_submit=[] ):
+        url = "%s/requests_common/edit_samples?cntrller=%s&id=%s" % ( self.url, cntrller, request_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for sample_index, bar_code in enumerate( bar_codes ):
+            tc.fv( "1", "sample_%i_bar_code" % sample_index, bar_code )
+        tc.submit( "save_samples_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def add_datasets_to_sample( self, request_id, sample_id, external_service_id, sample_datasets, strings_displayed=[], strings_displayed_after_submit=[] ):
+        # visit the dataset selection page
+        url = "%s/requests_admin/select_datasets_to_transfer?cntrller=requests_admin&sample_id=%s&request_id=%s&external_service_id=%s" % \
+            ( self.url, sample_id, request_id, external_service_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        # Datasets are associated with the given by the building the appropriate url
+        # and calling it as the dataset selection UI is a javascript dynatree
+        url = "%s/requests_admin/select_datasets_to_transfer?cntrller=requests_admin&sample_id=%s&request_id=%s" % ( self.url, sample_id, request_id )
+        url += '&select_datasets_to_transfer_button=Select%20datasets'
+        url += '&selected_datasets_to_transfer=%s' % ','.join( sample_datasets )
+        self.visit_url( url )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def add_folder( self, cntrller, library_id, folder_id, name='Folder One', description='This is Folder One' ):
+        """Create a new folder"""
+        url = "%s/library_common/create_folder?cntrller=%s&library_id=%s&parent_id=%s" % ( self.url, cntrller, library_id, folder_id )
+        self.visit_url( url )
+        self.check_page_for_string( 'Create a new folder' )
+        tc.fv( "1", "name", name )
+        tc.fv( "1", "description", description )
+        tc.submit( "new_folder_button" )
+        check_str = escape( "The new folder named '%s' has been added to the data library." % name )
+        self.check_page_for_string( check_str )
+
+    def add_samples( self, cntrller, request_id, sample_value_tuples, folder_options=[], strings_displayed=[], strings_displayed_after_submit=[] ):
+        url = "%s/requests_common/add_sample?cntrller=%s&request_id=%s&add_sample_button=Add+sample" % ( self.url, cntrller, request_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for sample_index, ( sample_name, target_library_info, sample_field_values ) in enumerate( sample_value_tuples ):
+            tc.fv( "add_samples", "sample_%i_name" % sample_index, sample_name )
+            if target_library_info[ 'library' ] is not None:
+                tc.fv( "add_samples", "sample_%i_library_id" % sample_index, target_library_info[ 'library' ] )
+                self.refresh_form( "sample_%i_library_id" % sample_index, target_library_info[ 'library' ] )
+            # check if the folder selectfield has been correctly populated
+            for check_str in folder_options:
+                self.check_page_for_string( check_str )
+            if target_library_info[ 'folder' ] is not None:
+                tc.fv( "add_samples", "sample_%i_folder_id" % sample_index, target_library_info[ 'folder' ] )
+            for field_index, field_value in enumerate( sample_field_values ):
+                tc.fv( "add_samples", "sample_%i_field_%i" % ( sample_index, field_index ), field_value )
+            # Do not click on Add sample button when all the sample have been added
+            if sample_index < len( sample_value_tuples ) - 1:
+                tc.submit( "add_sample_button" )
+        # select the correct form before submitting it
+        tc.fv( "add_samples", "copy_sample_index", "-1" )
+        tc.submit( "save_samples_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def add_tag( self, item_id, item_class, context, new_tag ):
+        self.visit_url( "%s/tag/add_tag_async?item_id=%s&item_class=%s&context=%s&new_tag=%s" %
+                        ( self.url, item_id, item_class, context, new_tag ) )
+
+    def add_template( self, cntrller, item_type, form_type, form_id, form_name,
+                      library_id=None, folder_id=None, ldda_id=None, request_type_id=None, sample_id=None ):
+        """
+        Add a new template to an item - for library items, the template will ALWAYS BE SET TO INHERITABLE here.  If you want to
+        dis-inherit your template, call the manage_library_template_inheritance() below immediately after you call this
+        method in your test code.  Templates added to Requesttype objects are always inherited to samples.
+        """
+        params = dict( cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id )
+        url = "/library_common/add_template"
+        if item_type == 'folder':
+            params[ 'folder_id' ] = folder_id
+        elif item_type == 'ldda':
+            params[ 'ldda_id' ] = ldda_id
+        self.visit_url( url, params )
+        self.check_page_for_string( "Select a template for the" )
+        self.refresh_form( "form_id", form_id )
+        # For some unknown reason, twill barfs if the form number ( 1 ) is used in the following
+        # rather than the form anme ( select_template ), so we have to use the form name.
+        tc.fv( "select_template", "inheritable", '1' )
+        tc.submit( "add_template_button" )
+        self.check_page_for_string = 'A template based on the form "%s" has been added to this' % form_name
+
+    def add_user_address( self, user_id, address_dict ):
+        self.visit_url( "%s/user/new_address?cntrller=user&user_id=%s" % ( self.url, user_id ) )
+        self.check_page_for_string( 'Add new address' )
+        for field_name, value in address_dict.items():
+            tc.fv( "1", field_name, value )
+        tc.submit( "new_address_button" )
+        self.check_page_for_string( 'Address (%s) has been added' % address_dict[ 'short_desc' ] )
+
+    def associate_users_and_groups_with_role( self, role_id, role_name, user_ids=[], group_ids=[] ):
+        url = "%s/admin/role?id=%s&role_members_edit_button=Save" % ( self.url, role_id )
+        if user_ids:
+            url += "&in_users=%s" % ','.join( user_ids )
+        if group_ids:
+            url += "&in_groups=%s" % ','.join( group_ids )
+        self.visit_url( url )
+        check_str = "Role '%s' has been updated with %d associated users and %d associated groups" % ( role_name, len( user_ids ), len( group_ids ) )
+        self.check_page_for_string( check_str )
+
+    def associate_users_and_roles_with_group( self, group_id, group_name, user_ids=[], role_ids=[] ):
+        url = "%s/admin/manage_users_and_roles_for_group?id=%s&group_roles_users_edit_button=Save" % ( self.url, group_id )
+        if user_ids:
+            url += "&in_users=%s" % ','.join( user_ids )
+        if role_ids:
+            url += "&in_roles=%s" % ','.join( role_ids )
+        self.visit_url( url )
+        check_str = "Group '%s' has been updated with %d associated roles and %d associated users" % ( group_name, len( role_ids ), len( user_ids ) )
+        self.check_page_for_string( check_str )
+
+    def auto_detect_metadata( self, hda_id ):
+        """Auto-detect history_dataset_association metadata"""
+        self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
+        self.check_page_for_string( 'This will inspect the dataset and attempt' )
+        tc.fv( 'auto_detect', 'detect', 'Auto-detect' )
+        tc.submit( 'detect' )
+        try:
+            self.check_page_for_string( 'Attributes have been queued to be updated' )
+            self.wait()
+        except AssertionError:
+            self.check_page_for_string( 'Attributes updated' )
+
+    def browse_groups( self, strings_displayed=[] ):
+        self.visit_url( '%s/admin/groups' % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def browse_libraries_admin( self, deleted=False, strings_displayed=[], strings_not_displayed=[] ):
+        self.visit_url( '%s/library_admin/browse_libraries?sort=name&f-description=All&f-name=All&f-deleted=%s' % ( self.url, str( deleted ) ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) incorrectly displayed when browsing library." % check_str )
+            except:
+                pass
+
+    def browse_libraries_regular_user( self, strings_displayed=[], strings_not_displayed=[] ):
+        self.visit_url( '%s/library/browse_libraries' % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) incorrectly displayed when browsing library." % check_str )
+            except:
+                pass
+
+    def browse_library( self, cntrller, library_id, show_deleted=False, strings_displayed=[], strings_not_displayed=[] ):
+        self.visit_url( '%s/library_common/browse_library?cntrller=%s&id=%s&show_deleted=%s' % ( self.url, cntrller, library_id, str( show_deleted ) ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) incorrectly displayed when browsing library." % check_str )
+            except:
+                pass
+
+    def browse_roles( self, strings_displayed=[] ):
+        self.visit_url( '%s/admin/roles' % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def change_sample_state( self, request_id, sample_ids, new_sample_state_id, comment='', strings_displayed=[], strings_displayed_after_submit=[] ):
+        url = "%s/requests_common/edit_samples?cntrller=requests_admin&id=%s" % ( self.url, request_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for sample_id in sample_ids:
+            tc.fv( "1", "select_sample_%i" % sample_id, True )
+        tc.fv( "1", "sample_operation", 'Change state' )
+        # refresh on change to show the sample states selectfield
+        self.refresh_form( "sample_operation", 'Change state' )
+        self.check_page_for_string( "Change current state" )
+        tc.fv( "1", "sample_state_id", new_sample_state_id )
+        tc.fv( "1", "sample_event_comment", comment )
+        tc.submit( "save_samples_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def change_sample_target_data_library( self, cntrller, request_id, sample_ids, new_library_id, new_folder_id, folder_options=[], comment='', strings_displayed=[], strings_displayed_after_submit=[] ):
+        url = "%s/requests_common/edit_samples?cntrller=%s&id=%s" % ( self.url, cntrller, request_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for sample_id in sample_ids:
+            tc.fv( "edit_samples", "select_sample_%i" % sample_id, True )
+        tc.fv( "edit_samples", "sample_operation", 'Select data library and folder' )
+        # refresh on change to show the data libraries selectfield
+        self.refresh_form( "sample_operation", 'Select data library and folder' )
+        self.check_page_for_string( "Select data library:" )
+        tc.fv( "1", "sample_operation_library_id", new_library_id )
+        # refresh on change to show the selectfield with the list of
+        # folders in the selected data library above
+        self.refresh_form( "sample_operation_library_id", new_library_id )
+        self.check_page_for_string( "Select folder:" )
+        # check if the folder selectfield has been correctly populated
+        for check_str in folder_options:
+            self.check_page_for_string( check_str )
+        tc.fv( "1", "sample_operation_folder_id", new_folder_id )
+        tc.submit( "save_samples_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def change_datatype( self, hda_id, datatype ):
+        """Change format of history_dataset_association"""
+        self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
+        self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
+        tc.fv( 'change_datatype', 'datatype', datatype )
+        tc.submit( 'change' )
+        self.check_page_for_string( 'Changed the type of dataset' )
+
+    def check_archive_contents( self, archive, lddas ):
+        def get_ldda_path( ldda ):
+            path = ""
+            parent_folder = ldda.library_dataset.folder
+            while parent_folder is not None:
+                if parent_folder.parent is None:
+                    path = os.path.join( parent_folder.library_root[0].name, path )
+                    break
+                path = os.path.join( parent_folder.name, path )
+                parent_folder = parent_folder.parent
+            path += ldda.name
+            return path
+
+        def mkdir( file ):
+            dir = os.path.join( tmpd, os.path.dirname( file ) )
+            if not os.path.exists( dir ):
+                os.makedirs( dir )
+        tmpd = tempfile.mkdtemp()
+        if tarfile.is_tarfile( archive ):
+            t = tarfile.open( archive )
+            for n in t.getnames():
+                mkdir( n )
+                t.extract( n, tmpd )
+            t.close()
+        elif zipfile.is_zipfile( archive ):
+            z = zipfile.ZipFile( archive, 'r' )
+            for n in z.namelist():
+                mkdir( n )
+                open( os.path.join( tmpd, n ), 'wb' ).write( z.read( n ) )
+            z.close()
+        else:
+            raise Exception( 'Unable to read archive: %s' % archive )
+        for ldda in lddas:
+            orig_file = self.get_filename( ldda.name )
+            downloaded_file = os.path.join( tmpd, get_ldda_path( ldda ) )
+            assert os.path.exists( downloaded_file )
+            try:
+                self.files_diff( orig_file, downloaded_file )
+            except AssertionError as err:
+                errmsg = 'Library item %s different than expected, difference:\n' % ldda.name
+                errmsg += str( err )
+                errmsg += 'Unpacked archive remains in: %s\n' % tmpd
+                raise AssertionError( errmsg )
+        shutil.rmtree( tmpd )
+
+    def check_for_strings( self, strings_displayed=[], strings_not_displayed=[] ):
+        if strings_displayed:
+            for string in strings_displayed:
+                self.check_page_for_string( string )
+        if strings_not_displayed:
+            for string in strings_not_displayed:
+                self.check_string_not_in_page( string )
+
+    def check_hda_attribute_info( self, hda_id, strings_displayed=[] ):
+        """Edit history_dataset_association attribute information"""
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def check_hda_json_for_key_value( self, hda_id, key, value, use_string_contains=False ):
+        """
+        Uses the history API to determine whether the current history:
+        (1) Has a history dataset with the required ID.
+        (2) That dataset has the required key.
+        (3) The contents of that key match the provided value.
+        If use_string_contains=True, this will perform a substring match, otherwise an exact match.
+        """
+        # TODO: multi key, value
+        hda = dict()
+        for history_item in self.get_history_from_api():
+            if history_item[ 'id' ] == hda_id:
+                hda = self.json_from_url( history_item[ 'url' ] )
+                break
+        if hda:
+            if key in hda:
+                if use_string_contains:
+                    return value in hda[ key ]
+                else:
+                    return value == hda[ key ]
+        return False
+
+    def check_history_for_errors( self ):
+        """Raises an exception if there are errors in a history"""
+        self.visit_url( "/history" )
+        page = self.last_page()
+        if page.find( 'error' ) > -1:
+            raise AssertionError( 'Errors in the history for user %s' % self.user )
+
+    def check_history_for_string( self, patt, show_deleted=False ):
+        """Breaks patt on whitespace and searches for each element seperately in the history"""
+        if show_deleted:
+            params = dict( show_deleted=True )
+            self.visit_url( "/history", params )
+        else:
+            self.visit_url( "/history" )
+        for subpatt in patt.split():
+            try:
+                tc.find( subpatt )
+            except:
+                fname = self.write_temp_file( tc.browser.get_html() )
+                errmsg = "no match to '%s'\npage content written to '%s'" % ( subpatt, fname )
+                raise AssertionError( errmsg )
+
+    def check_history_for_exact_string( self, string, show_deleted=False ):
+        """Looks for exact match to 'string' in history page"""
+        if show_deleted:
+            self.visit_url( "/history?show_deleted=True" )
+        else:
+            self.visit_url( "/history" )
+        try:
+            tc.find( string )
+        except:
+            fname = self.write_temp_file( tc.browser.get_html() )
+            errmsg = "no match to '%s'\npage content written to '%s'" % ( string, fname )
+            raise AssertionError( errmsg )
+
+    def check_history_json( self, check_fn, show_deleted=None ):
+        """
+        Tries to find a JSON string in the history page using the regex pattern,
+        parse it, and assert check_fn returns True when called on that parsed
+        data.
+        """
+        try:
+            json_data = self.get_history_from_api( show_deleted=show_deleted, show_details=True )
+            check_result = check_fn( json_data )
+            assert check_result, 'failed check_fn: %s (got %s)' % ( check_fn.__name__, str( check_result ) )
+        except Exception as e:
+            log.exception( e )
+            log.debug( 'json_data: %s', ( '\n' + pprint.pformat( json_data ) if json_data else '(no match)' ) )
+            fname = self.write_temp_file( tc.browser.get_html() )
+            errmsg = ( "json could not be read\npage content written to '%s'" % ( fname ) )
+            raise AssertionError( errmsg )
+
+    def check_metadata_for_string( self, patt, hid=None ):
+        """Looks for 'patt' in the edit page when editing a dataset"""
+        data_list = self.get_history_as_data_list()
+        self.assertTrue( data_list )
+        if hid is None:  # take last hid
+            elem = data_list[-1]
+            hid = int( elem.get('hid') )
+        self.assertTrue( hid )
+        self.visit_url( "/dataset/edit?hid=%s" % hid )
+        for subpatt in patt.split():
+            tc.find(subpatt)
+
+    def check_page(self, strings_displayed, strings_displayed_count, strings_not_displayed):
+        """Checks a page for strings displayed, not displayed and number of occurrences of a string"""
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str, count in strings_displayed_count:
+            self.check_string_count_in_page( check_str, count )
+        for check_str in strings_not_displayed:
+            self.check_string_not_in_page( check_str )
+
+    def check_page_for_string( self, patt ):
+        """Looks for 'patt' in the current browser page"""
+        page = self.last_page()
+        if page.find( patt ) == -1:
+            fname = self.write_temp_file( page )
+            errmsg = "no match to '%s'\npage content written to '%s'\npage: [[%s]]" % ( patt, fname, page )
+            raise AssertionError( errmsg )
+
+    def check_request_grid( self, cntrller, state, deleted=False, strings_displayed=[] ):
+        params = { 'f-state': state, 'f-deleted': deleted, 'sort': 'create_time' }
+        self.visit_url( '/%s/browse_requests' % cntrller, params )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def check_string_count_in_page( self, patt, min_count ):
+        """Checks the number of 'patt' occurrences in the current browser page"""
+        page = self.last_page()
+        patt_count = page.count( patt )
+        # The number of occurrences of patt in the page should be at least min_count
+        # so show error if patt_count is less than min_count
+        if patt_count < min_count:
+            fname = self.write_temp_file( page )
+            errmsg = "%i occurrences of '%s' found instead of %i.\npage content written to '%s' " % ( min_count, patt, patt_count, fname )
+            raise AssertionError( errmsg )
+
+    def check_string_not_in_page( self, patt ):
+        """Checks to make sure 'patt' is NOT in the page."""
+        page = self.last_page()
+        if page.find( patt ) != -1:
+            fname = self.write_temp_file( page )
+            errmsg = "string (%s) incorrectly displayed in page.\npage content written to '%s'" % ( patt, fname )
+            raise AssertionError( errmsg )
+
+    def clear_cookies( self ):
+        tc.clear_cookies()
+
+    def clear_form( self, form=0 ):
+        """Clears a form"""
+        tc.formclear(str(form))
+
+    def copy_history( self, history_id, copy_choice, strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( "/history/copy?id=%s" % history_id )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( '1', 'copy_choice', copy_choice )
+        tc.submit( 'copy_choice_button' )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def convert_format( self, hda_id, target_type ):
+        """Convert format of history_dataset_association"""
+        self.visit_url( "%s/datasets/%s/edit" % ( self.url, self.security.encode_id( hda_id ) ) )
+        self.check_page_for_string( 'This will inspect the dataset and attempt' )
+        tc.fv( 'convert_data', 'target_type', target_type )
+        tc.submit( 'convert_data' )
+        self.check_page_for_string( 'The file conversion of Convert BED to GFF on data' )
+        self.wait()  # wait for the format convert tool to finish before returning
+
+    def copy_history_item( self, source_history_id=None, source_dataset_id=None, target_history_id=None, all_target_history_ids=[],
+                           deleted_history_ids=[] ):
+        """
+        Copy 1 history_dataset_association to 1 history (Limited by twill since it doesn't support multiple
+        field names, such as checkboxes
+        """
+        self.visit_url( "/dataset/copy_datasets" )
+        self.check_page_for_string( 'Source History:' )
+        # Make sure all of users active histories are displayed
+        for id in all_target_history_ids:
+            self.check_page_for_string( id )
+        # Make sure only active histories are displayed
+        for id in deleted_history_ids:
+            try:
+                self.check_page_for_string( id )
+                raise AssertionError( "deleted history id %d displayed in list of target histories" % id )
+            except:
+                pass
+        form_values = [ ( 'source_history', source_history_id ),
+                        ( 'target_history_id', target_history_id ),
+                        ( 'source_content_ids', 'dataset|%s' % source_dataset_id ),
+                        ( 'do_copy', True ) ]
+        self.visit_url( "/dataset/copy_datasets", params=form_values )
+        check_str = '1 dataset copied to 1 history'
+        self.check_page_for_string( check_str )
+
+    # Functions associated with user accounts
+
+    def create( self, cntrller='user', email='test at bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
+        # HACK: don't use panels because late_javascripts() messes up the twill browser and it
+        # can't find form fields (and hence user can't be logged in).
+        params = dict( cntrller=cntrller, use_panels=False )
+        self.visit_url( "/user/create", params )
+        tc.fv( 'registration', 'email', email )
+        tc.fv( 'registration', 'redirect', redirect )
+        tc.fv( 'registration', 'password', password )
+        tc.fv( 'registration', 'confirm', password )
+        tc.fv( 'registration', 'username', username )
+        tc.submit( 'create_user_button' )
+        previously_created = False
+        username_taken = False
+        invalid_username = False
+        try:
+            self.check_page_for_string( "Created new user account" )
+        except:
+            try:
+                # May have created the account in a previous test run...
+                self.check_page_for_string( "User with that email already exists" )
+                previously_created = True
+            except:
+                try:
+                    self.check_page_for_string( 'Public name is taken; please choose another' )
+                    username_taken = True
+                except:
+                    try:
+                        # Note that we're only checking if the usr name is >< 4 chars here...
+                        self.check_page_for_string( 'Public name must be at least 4 characters in length' )
+                        invalid_username = True
+                    except:
+                        pass
+        return previously_created, username_taken, invalid_username
+
+    def create_library( self, name='Library One', description='This is Library One', synopsis='Synopsis for Library One' ):
+        """Create a new library"""
+        self.visit_url( "%s/library_admin/create_library" % self.url )
+        self.check_page_for_string( 'Create a new data library' )
+        tc.fv( "1", "name", name )
+        tc.fv( "1", "description", description )
+        tc.fv( "1", "synopsis", synopsis )
+        tc.submit( "create_library_button" )
+        check_str = escape( "The new library named '%s' has been created" % name )
+        self.check_page_for_string( check_str )
+
+    def create_user_with_info( self, email, password, username, user_info_values, user_type_fd_id='', cntrller='user',
+                               strings_displayed=[], strings_displayed_after_submit=[] ):
+        # This method creates a new user with associated info
+        self.visit_url( "%s/user/create?cntrller=%s&use_panels=False" % ( self.url, cntrller ) )
+        tc.fv( "registration", "email", email )
+        tc.fv( "registration", "password", password )
+        tc.fv( "registration", "confirm", password )
+        tc.fv( "registration", "username", username )
+        if user_type_fd_id:
+            # The user_type_fd_id SelectField requires a refresh_on_change
+            self.refresh_form( 'user_type_fd_id', user_type_fd_id, form_id='registration' )
+            tc.fv( "registration", "password", password )
+            tc.fv( "registration", "confirm", password )
+            for index, ( field_name, info_value ) in enumerate( user_info_values ):
+                tc.fv( "registration", field_name, info_value )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str)
+        tc.submit( "create_user_button" )
+
+    def create_group( self, name='Group One', in_user_ids=[], in_role_ids=[], create_role_for_group=False, strings_displayed=[] ):
+        """Create a new group"""
+        url = "/admin/groups"
+        params = dict( operation='create', create_group_button='Save', name=name )
+        if in_user_ids:
+            params[ 'in_users' ] = ','.join( in_user_ids )
+        if in_role_ids:
+            params[ 'in_roles' ] = ','.join( in_role_ids )
+        if create_role_for_group:
+            params[ 'create_role_for_group' ] = [ 'yes', 'yes' ]
+            doseq = True
+        else:
+            params[ 'create_role_for_group' ] = 'no'
+            doseq = False
+        self.visit_url( url, params=params, doseq=doseq )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        self.visit_url( "/admin/groups" )
+        self.check_page_for_string( name )
+
+    def create_form( self, name, description, form_type, field_type='TextField', form_layout_name='',
+                     num_fields=1, num_options=0, field_name='1_field_name', strings_displayed=[],
+                     strings_displayed_after_submit=[] ):
+        """Create a new form definition."""
+        self.visit_url( "%s/forms/create_form_definition" % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "create_form_definition", "name", name )
+        tc.fv( "create_form_definition", "description", description )
+        tc.fv( "create_form_definition", "form_type_select_field", form_type )
+        tc.submit( "create_form_button" )
+        if form_type == "Sequencing Sample Form":
+            tc.submit( "add_layout_grid" )
+            tc.fv( "edit_form_definition", "grid_layout0", form_layout_name )
+        # if not adding any fields at this time, remove the default empty field
+        if num_fields == 0:
+            tc.submit( "remove_button" )
+        # Add fields to the new form definition
+        for index1 in range( num_fields ):
+            field_label = 'field_label_%i' % index1
+            field_contents = field_type
+            field_help_name = 'field_helptext_%i' % index1
+            field_help_contents = 'Field %i help' % index1
+            field_default = 'field_default_0'
+            field_default_contents = '%s default contents' % form_type
+            tc.fv( "edit_form_definition", field_label, field_contents )
+            tc.fv( "edit_form_definition", field_help_name, field_help_contents )
+            if field_type == 'SelectField':
+                # SelectField field_type requires a refresh_on_change
+                self.refresh_form( 'field_type_0', field_type )
+                # Add options so our select list is functional
+                if num_options == 0:
+                    # Default to 2 options
+                    num_options = 2
+                for index2 in range( 1, num_options + 1 ):
+                    tc.submit( "addoption_0" )
+                # Add contents to the new options fields
+                for index2 in range( num_options ):
+                    option_field_name = 'field_0_option_%i' % index2
+                    option_field_value = 'Option%i' % index2
+                    tc.fv( "edit_form_definition", option_field_name, option_field_value )
+            else:
+                tc.fv( "edit_form_definition", "field_type_0", field_type )
+            tc.fv( "edit_form_definition", 'field_name_0', field_name )
+            tc.fv( "edit_form_definition", field_default, field_default_contents )
+        # All done... now save
+        tc.submit( "save_changes_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def create_external_service( self, name, description, version, external_service_type_id, field_values={}, strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( '%s/external_service/create_external_service' % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "1", "name", name )
+        tc.fv( "1", "description", description )
+        tc.fv( "1", "version", version )
+        self.refresh_form( "external_service_type_id", external_service_type_id )
+        for field, value in field_values.items():
+            tc.fv( "1", field, value )
+        tc.submit( "create_external_service_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def create_new_account_as_admin( self, email='test4 at bx.psu.edu', password='testuser',
+                                     username='regular-user4', redirect='' ):
+        """Create a new account for another user"""
+        # HACK: don't use panels because late_javascripts() messes up the twill browser and it
+        # can't find form fields (and hence user can't be logged in).
+        self.visit_url( "%s/user/create?cntrller=admin" % self.url )
+        self.submit_form( 1, 'create_user_button', email=email, redirect=redirect, password=password, confirm=password, username=username )
+        previously_created = False
+        username_taken = False
+        invalid_username = False
+        try:
+            self.check_page_for_string( "Created new user account" )
+        except:
+            try:
+                # May have created the account in a previous test run...
+                self.check_page_for_string( "User with that email already exists" )
+                previously_created = True
+            except:
+                try:
+                    self.check_page_for_string( 'Public name is taken; please choose another' )
+                    username_taken = True
+                except:
+                    try:
+                        # Note that we're only checking if the usr name is >< 4 chars here...
+                        self.check_page_for_string( 'Public name must be at least 4 characters in length' )
+                        invalid_username = True
+                    except:
+                        pass
+        return previously_created, username_taken, invalid_username
+
+    def create_request_type( self, name, desc, request_form_id, sample_form_id, states, strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( "%s/request_type/create_request_type" % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "1", "name", name )
+        tc.fv( "1", "desc", desc )
+        tc.fv( "1", "request_form_id", request_form_id )
+        tc.fv( "1", "sample_form_id", sample_form_id )
+        for index, state in enumerate(states):
+            tc.fv("1", "state_name_%i" % index, state[0])
+            tc.fv("1", "state_desc_%i" % index, state[1])
+            tc.submit( "add_state_button" )
+        tc.submit( "create_request_type_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def create_request( self, cntrller, request_type_id, name, desc, field_value_tuples, other_users_id='',
+                        strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( "%s/requests_common/create_request?cntrller=%s" % ( self.url, cntrller ) )
+        # The request_type SelectList requires a refresh_on_change
+        self.refresh_form( 'request_type_id', request_type_id )
+        if cntrller == 'requests_admin' and other_users_id:
+            # The admin is creating a request on behalf of another user
+            # The user_id SelectField requires a refresh_on_change so that the selected
+            # user's addresses will be populated in the AddressField widget
+            self.refresh_form( "user_id", other_users_id )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "1", "name", name )
+        tc.fv( "1", "desc", desc )
+        for index, field_value_tuple in enumerate( field_value_tuples ):
+            field_name, field_value, refresh_on_change = field_value_tuple
+            if refresh_on_change:
+                # Only the AddressField type has a refresh on change setup on selecting an option
+                address_option = field_value[0]
+                address_value = field_value[1]
+                self.refresh_form( field_name, address_option )
+                if address_option == 'new':
+                    # handle new address
+                    self.check_page_for_string( 'Short address description' )
+                    for address_field, value in address_value.items():
+                        tc.fv( "1", field_name + '_' + address_field, value )
+                else:
+                    # existing address
+                    tc.fv( "1", field_name, address_value )
+            else:
+                tc.fv( "1", field_name, field_value )
+        tc.submit( "create_request_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def create_role( self,
+                     name='Role One',
+                     description="This is Role One",
+                     in_user_ids=[],
+                     in_group_ids=[],
+                     create_group_for_role='',
+                     private_role='',
+                     strings_displayed=[] ):
+        """Create a new role"""
+        url = "/admin/roles"
+        url_params = dict( operation='create', create_role_button='Save', name=name, description=description )
+        if in_user_ids:
+            url_params[ 'in_users' ] = ','.join( in_user_ids )
+        if in_group_ids:
+            url_params[ 'in_groups' ] = ','.join( in_group_ids )
+        if create_group_for_role == 'yes':
+            url_params[ 'create_group_for_role' ] = [ 'yes', 'yes' ]
+            doseq = True
+        else:
+            doseq = False
+        self.visit_url( url, params=url_params, doseq=doseq )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        if private_role:
+            # Make sure no private roles are displayed
+            try:
+                self.check_page_for_string( private_role )
+                errmsg = 'Private role %s displayed on Roles page' % private_role
+                raise AssertionError( errmsg )
+            except AssertionError:
+                # Reaching here is the behavior we want since no private roles should be displayed
+                pass
+        self.visit_url( "%s/admin/roles" % self.url )
+        self.check_page_for_string( name )
+
+    def delete_current_history( self, strings_displayed=[] ):
+        """Deletes the current history"""
+        self.visit_url( "/history/delete_current" )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def delete_history( self, id ):
+        """Deletes one or more histories"""
+        history_ids = self.get_all_history_ids_from_api()
+        self.assertTrue( history_ids )
+        num_deleted = len( id.split( ',' ) )
+        self.visit_url( "/history/list?operation=delete&id=%s" % ( id ) )
+        check_str = 'Deleted %d %s' % ( num_deleted, iff( num_deleted != 1, "histories", "history" ) )
+        self.check_page_for_string( check_str )
+
+    def delete_history_item( self, hda_id, strings_displayed=[] ):
+        """Deletes an item from a history"""
+        try:
+            hda_id = int( hda_id )
+        except:
+            raise AssertionError( "Invalid hda_id '%s' - must be int" % hda_id )
+        self.visit_url( "%s/datasets/%s/delete?show_deleted_on_refresh=False" % ( self.url, self.security.encode_id( hda_id ) ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def delete_library_item( self, cntrller, library_id, item_id, item_name, item_type='library_dataset' ):
+        """Mark a library item as deleted"""
+        params = dict( cntrller=cntrller, library_id=library_id, item_id=item_id, item_type=item_type )
+        self.visit_url( "/library_common/delete_library_item", params )
+        if item_type == 'library_dataset':
+            item_desc = 'Dataset'
+        else:
+            item_desc = item_type.capitalize()
+        check_str = "marked deleted"
+        self.check_for_strings( strings_displayed=[ item_desc, check_str ] )
+
+    def delete_sample_datasets( self, sample_id, sample_dataset_ids, strings_displayed=[], strings_displayed_after_submit=[], strings_not_displayed=[] ):
+        url = '%s/requests_admin/manage_datasets?cntrller=requests_admin&sample_id=%s' % ( self.url, sample_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        # simulate selecting datasets and clicking the delete button on the sample datasets grid
+        sample_dataset_ids_string = ','.join( sample_dataset_ids )
+        params = dict( operation='delete', id=sample_dataset_ids_string )
+        url = "/requests_admin/manage_datasets"
+        self.visit_url( url, params )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            self.check_string_not_in_page( check_str )
+
+    def disable_access_via_link( self, history_id, strings_displayed=[], strings_displayed_after_submit=[] ):
+        # twill barfs on this form, possibly because it contains no fields, but not sure.
+        # In any case, we have to mimic the form submission
+        self.visit_url( '/history/sharing', dict( id=history_id, disable_link_access=True ) )
+        self.check_for_strings( strings_displayed=strings_displayed_after_submit )
+
+    def display_history_item( self, hda_id, strings_displayed=[] ):
+        """Displays a history item - simulates eye icon click"""
+        self.visit_url( '%s/datasets/%s/display/' % ( self.url, self.security.encode_id( hda_id ) ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def download_archive_of_library_files( self, cntrller, library_id, ldda_ids, format ):
+        # Here it would be ideal to have twill set form values and submit the form, but
+        # twill barfs on that due to the recently introduced page wrappers around the contents
+        # of the browse_library.mako template which enable panel layout when visiting the
+        # page from an external URL.  By "barfs", I mean that twill somehow loses hod on the
+        # cntrller param.  We'll just simulate the form submission by building the URL manually.
+        # Here's the old, better approach...
+        # self.visit_url( "%s/library_common/browse_library?cntrller=%s&id=%s" % ( self.url, cntrller, library_id ) )
+        # for ldda_id in ldda_ids:
+        #    tc.fv( "1", "ldda_ids", ldda_id )
+        # tc.fv( "1", "do_action", format )
+        # tc.submit( "action_on_datasets_button" )
+        # Here's the new approach...
+        params = dict( cntrller=cntrller, library_id=library_id, do_action=format, ldda_ids=ldda_ids )
+        url = "/library_common/act_on_multiple_datasets"
+        self.visit_url( url, params, doseq=True )
+        tc.code( 200 )
+        archive = self.write_temp_file( self.last_page(), suffix='.' + format )
+        return archive
+
+    def edit_basic_request_info( self, cntrller, request_id, name, new_name='', new_desc='', new_fields=[],
+                                 strings_displayed=[], strings_displayed_after_submit=[] ):
+        params = dict( cntrller=cntrller, id=request_id )
+        self.visit_url( "/requests_common/edit_basic_request_info", params )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        if new_name:
+            tc.fv( "1", "name", new_name )
+        if new_desc:
+            tc.fv( "1", "desc", new_desc )
+        for index, ( field_name, field_value ) in enumerate( new_fields ):
+            tc.fv( "1", field_name, field_value )
+        tc.submit( "edit_basic_request_info_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def edit_external_service( self, external_service_id, field_values={}, strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( '%s/external_service/edit_external_service?id=%s' % ( self.url, external_service_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for field, value in field_values.items():
+            tc.fv( "1", field, value )
+        tc.submit( "edit_external_service_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def edit_form( self, id, form_type='', new_form_name='', new_form_desc='', field_dicts=[], field_index=0,
+                   strings_displayed=[], strings_not_displayed=[], strings_displayed_after_submit=[] ):
+        """Edit form details; name and description"""
+        self.visit_url( "/forms/edit_form_definition", params=dict( id=id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        if new_form_name:
+            tc.fv( "edit_form_definition", "name", new_form_name )
+        if new_form_desc:
+            tc.fv( "edit_form_definition", "description", new_form_desc )
+        for i, field_dict in enumerate( field_dicts ):
+            index = i + field_index
+            tc.submit( "add_field_button" )
+            field_label = "field_label_%i" % index
+            field_label_value = field_dict[ 'label' ]
+            field_help = "field_helptext_%i" % index
+            field_help_value = field_dict[ 'desc' ]
+            field_type = "field_type_%i" % index
+            field_type_value = field_dict[ 'type' ]
+            field_required = "field_required_%i" % index
+            field_required_value = field_dict[ 'required' ]
+            field_name = "field_name_%i" % index
+            field_name_value = field_dict.get( 'name', '%i_field_name' % index )
+            tc.fv( "edit_form_definition", field_label, field_label_value )
+            tc.fv( "edit_form_definition", field_help, field_help_value )
+            tc.fv( "edit_form_definition", field_required, field_required_value )
+            tc.fv( "edit_form_definition", field_name, field_name_value )
+            if field_type_value.lower() == 'selectfield':
+                # SelectFields require a refresh_on_change
+                self.refresh_form( field_type, field_type_value )
+                for option_index, option in enumerate( field_dict[ 'selectlist' ] ):
+                    tc.submit( "addoption_%i" % index )
+                    tc.fv( "edit_form_definition", "field_%i_option_%i" % ( index, option_index ), option )
+            else:
+                tc.fv( "edit_form_definition", field_type, field_type_value )
+        tc.submit( "save_changes_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def edit_hda_attribute_info( self, hda_id, new_name='', new_info='', new_dbkey='', new_startcol='',
+                                 strings_displayed=[], strings_not_displayed=[] ):
+        """Edit history_dataset_association attribute information"""
+        self.visit_url( "/datasets/%s/edit" % self.security.encode_id( hda_id ) )
+        submit_required = False
+        self.check_page_for_string( 'Edit Attributes' )
+        if new_name:
+            tc.fv( 'edit_attributes', 'name', new_name )
+            submit_required = True
+        if new_info:
+            tc.fv( 'edit_attributes', 'info', new_info )
+            submit_required = True
+        if new_dbkey:
+            tc.fv( 'edit_attributes', 'dbkey', new_dbkey )
+            submit_required = True
+        if new_startcol:
+            tc.fv( 'edit_attributes', 'startCol', new_startcol )
+            submit_required = True
+        if submit_required:
+            tc.submit( 'save' )
+            self.check_page_for_string( 'Attributes updated' )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) incorrectly displayed on Edit Attributes page." % check_str )
+            except:
+                pass
+
+    def edit_request_email_settings( self, cntrller, request_id, check_request_owner=True, additional_emails='',
+                                     check_sample_states=[], strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( "/requests_common/edit_basic_request_info", params=dict( cntrller=cntrller, id=request_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "2", "email_address", check_request_owner )
+        tc.fv( "2", "additional_email_addresses", additional_emails )
+        for state_name, state_id, is_checked in check_sample_states:
+            tc.fv( "2", "sample_state_%i" % state_id, is_checked )
+        tc.submit( "edit_email_settings_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def edit_samples( self, cntrller, request_id, sample_value_tuples, strings_displayed=[], strings_displayed_after_submit=[] ):
+        params = dict( cntrller=cntrller, id=request_id )
+        url = "/requests_common/edit_samples"
+        self.visit_url( url, params=params )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for sample_index, ( sample_name, target_library_info, sample_field_values ) in enumerate( sample_value_tuples ):
+            tc.fv( "1", "sample_%i_name" % sample_index, sample_name )
+            tc.fv( "1", "sample_%i_library_id" % sample_index, target_library_info[ 'library' ] )
+            self.refresh_form( "sample_%i_library_id" % sample_index, target_library_info[ 'library' ] )
+            tc.fv( "1", "sample_%i_folder_id" % sample_index, target_library_info[ 'folder' ] )
+            for field_index, field_value in enumerate( sample_field_values ):
+                tc.fv( "1", "sample_%i_field_%i" % ( sample_index, field_index ), field_value )
+        tc.submit( "save_samples_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def edit_template( self, cntrller, item_type, form_type, library_id, field_type, field_label_1, field_helptext_1, field_default_1,
+                       folder_id='', ldda_id='', action='add_field'  ):
+        """Edit the form fields defining a library template"""
+        params = dict( cntrller=cntrller, item_type=item_type, form_type=form_type, library_id=library_id )
+        self.visit_url( "/library_common/edit_template", params=params )
+        self.check_page_for_string( "Edit form definition" )
+        if action == 'add_field':
+            tc.submit( "add_field_button" )
+            tc.fv( "edit_form", "field_label_1", field_label_1 )
+            tc.fv( "edit_form", "field_helptext_1", field_helptext_1 )
+            if field_type == 'SelectField':
+                # Performs a refresh_on_change in this case
+                self.refresh_form( "field_type_1", field_type )
+            else:
+                tc.fv( "edit_form", "field_type_1", field_type )
+            tc.fv( "edit_form", "field_default_1", field_default_1 )
+        tc.submit( 'save_changes_button' )
+        self.check_page_for_string( "The template for this data library has been updated with your changes." )
+
+    def edit_user_info( self, cntrller='user', id='', new_email='', new_username='', password='', new_password='',
+                        info_values=[], strings_displayed=[], strings_displayed_after_submit=[] ):
+        if cntrller == 'admin':
+            url = "%s/admin/users?id=%s&operation=information" % ( self.url, id )
+        else:  # cntrller == 'user:
+            # The user is editing his own info, so the user id is gotten from trans.user.
+            url = "%s/user/manage_user_info?cntrller=user" % self.url
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        if new_email or new_username:
+            if new_email:
+                tc.fv( "login_info", "email", new_email )
+            if new_username:
+                tc.fv( "login_info", "username", new_username )
+            tc.submit( "login_info_button" )
+        if password and new_password:
+            tc.fv( "change_password", "current", password )
+            tc.fv( "change_password", "password", new_password )
+            tc.fv( "change_password", "confirm", new_password )
+            tc.submit( "change_password_button" )
+        if info_values:
+            for index, ( field_name, info_value ) in enumerate( info_values ):
+                tc.fv( "user_info", field_name, info_value )
+            tc.submit( "edit_user_info_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def files_diff( self, file1, file2, attributes=None ):
+        """Checks the contents of 2 files for differences"""
+        return files_diff(file1, file2, attributes=attributes)
+
+    def find_hda_by_dataset_name( self, name, history=None ):
+        if history is None:
+            history = self.get_history_from_api()
+        for hda in history:
+            if hda[ 'name' ] == name:
+                return hda
+
+    def folder_info( self, cntrller, folder_id, library_id, name='', new_name='', description='', template_refresh_field_name='1_field_name',
+                     template_refresh_field_contents='', template_fields=[], strings_displayed=[], strings_not_displayed=[],
+                     strings_displayed_after_submit=[], strings_not_displayed_after_submit=[] ):
+        """Add information to a library using an existing template with 2 elements"""
+        self.visit_url( "%s/library_common/folder_info?cntrller=%s&id=%s&library_id=%s" %
+                        ( self.url, cntrller, folder_id, library_id ) )
+        if name and new_name and description:
+            tc.fv( '1', "name", new_name )
+            tc.fv( '1', "description", description )
+            tc.submit( 'rename_folder_button' )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) incorrectly displayed." % check_str )
+            except:
+                pass
+        if template_refresh_field_contents:
+            # A template containing an AddressField is displayed on the form, so we need to refresh the form
+            # with the received template_refresh_field_contents.  There are 2 forms on the folder_info page
+            # when in edit mode, and the 2nd one is the one we want.
+            self.refresh_form( template_refresh_field_name, template_refresh_field_contents, form_no=2 )
+        if template_fields:
+            # We have an information template associated with the folder, so
+            # there are 2 forms on this page and the template is the 2nd form
+            for field_name, field_value in template_fields:
+                tc.fv( "edit_info", field_name, field_value )
+            tc.submit( 'edit_info_button' )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed_after_submit:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) incorrectly displayed." % check_str )
+            except:
+                pass
+
+    def get_all_history_ids_from_api( self ):
+        return [ history['id'] for history in self.json_from_url( '/api/histories' ) ]
+
+    def get_filename( self, filename, shed_tool_id=None ):
+        if shed_tool_id and self.shed_tools_dict:
+            file_dir = self.shed_tools_dict[ shed_tool_id ]
+            if file_dir:
+                return os.path.abspath( os.path.join( file_dir, filename))
+        return self.test_data_resolver.get_filename( filename )
+
+    def get_form_controls( self, form ):
+        formcontrols = []
+        for i, control in enumerate( form.controls ):
+            formcontrols.append( "control %d: %s" % ( i, str( control ) ) )
+        return formcontrols
+
+    def get_hids_in_history( self, history_id ):
+        """Returns the list of hid values for items in a history"""
+        hids = []
+        api_url = '/api/histories/%s/contents' % history_id
+        hids = [ history_item[ 'hid' ] for history_item in self.json_from_url( api_url ) ]
+        return hids
+
+    def get_hids_in_histories( self ):
+        """Returns the list of hids values for items in all histories"""
+        history_ids = self.get_all_history_ids_from_api()
+        hids = []
+        for history_id in history_ids:
+            hids.extend( self.get_hids_in_history( history_id ) )
+        return hids
+
+    def get_history_as_data_list( self, show_deleted=False ):
+        """Returns the data elements of a history"""
+        tree = self.history_as_xml_tree( show_deleted=show_deleted )
+        data_list = [ elem for elem in tree.findall("data") ]
+        return data_list
+
+    def get_history_from_api( self, encoded_history_id=None, show_deleted=None, show_details=False ):
+        if encoded_history_id is None:
+            history = self.get_latest_history()
+            encoded_history_id = history[ 'id' ]
+        params = dict()
+        if show_deleted is not None:
+            params[ 'deleted' ] = show_deleted
+        api_url = '/api/histories/%s/contents' % encoded_history_id
+        json_data = self.json_from_url( api_url, params=params )
+        if show_deleted is not None:
+            hdas = []
+            for hda in json_data:
+                if show_deleted:
+                    hdas.append( hda )
+                else:
+                    if not hda[ 'deleted' ]:
+                        hdas.append( hda )
+            json_data = hdas
+        if show_details:
+            params[ 'details' ] = ','.join( [ hda[ 'id' ] for hda in json_data ] )
+            api_url = '/api/histories/%s/contents' % encoded_history_id
+            json_data = self.json_from_url( api_url, params=params )
+        return json_data
+
+    def get_job_stdout( self, hda_id, format=False ):
+        return self._get_job_stream_output( hda_id, 'stdout', format )
+
+    def get_job_stderr( self, hda_id, format=False ):
+        return self._get_job_stream_output( hda_id, 'stderr', format )
+
+    def get_latest_history( self ):
+        return self.json_from_url( '/api/histories' )[ 0 ]
+
+    def get_running_datasets( self ):
+        self.visit_url( '/api/histories' )
+        history_id = loads( self.last_page() )[0][ 'id' ]
+        self.visit_url( '/api/histories/%s' % history_id )
+        jsondata = loads( self.last_page() )
+        return jsondata[ 'state' ] in [ 'queued', 'running' ]
+
+    def get_tags( self, item_id, item_class ):
+        self.visit_url( "%s/tag/get_tagging_elt_async?item_id=%s&item_class=%s" %
+                        ( self.url, item_id, item_class ) )
+
+    def history_as_xml_tree( self, show_deleted=False ):
+        """Returns a parsed xml object of a history"""
+        self.visit_url( '/history?as_xml=True&show_deleted=%s' % show_deleted )
+        xml = self.last_page()
+        tree = ElementTree.fromstring(xml)
+        return tree
+
+    def history_set_default_permissions( self, permissions_out=[], permissions_in=[], role_id=3 ):  # role.id = 3 is Private Role for test3 at bx.psu.edu
+        # NOTE: Twill has a bug that requires the ~/user/permissions page to contain at least 1 option value
+        # in each select list or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+        # Due to this bug, we'll bypass visiting the page, and simply pass the permissions on to the
+        # /user/set_default_permissions method.
+        url = "root/history_set_default_permissions?update_roles_button=Save&id=None&dataset=True"
+        for po in permissions_out:
+            key = '%s_out' % po
+            url = "%s&%s=%s" % ( url, key, str( role_id ) )
+        for pi in permissions_in:
+            key = '%s_in' % pi
+            url = "%s&%s=%s" % ( url, key, str( role_id ) )
+        self.visit_url( "%s/%s" % ( self.url, url ) )
+        self.check_page_for_string( 'Default history permissions have been changed.' )
+
+    def histories_as_xml_tree( self ):
+        """Returns a parsed xml object of all histories"""
+        self.visit_url( '/history/list_as_xml' )
+        xml = self.last_page()
+        tree = ElementTree.fromstring(xml)
+        return tree
+
+    def history_options( self, user=False, active_datasets=False, activatable_datasets=False, histories_shared_by_others=False ):
+        """Mimics user clicking on history options link"""
+        self.visit_url( "/root/history_options" )
+        if user:
+            self.check_page_for_string( 'Previously</a> stored histories' )
+            if active_datasets:
+                self.check_page_for_string( 'Create</a> a new empty history' )
+                self.check_page_for_string( 'Construct workflow</a> from current history' )
+                self.check_page_for_string( 'Copy</a> current history' )
+            self.check_page_for_string( 'Share</a> current history' )
+            self.check_page_for_string( 'Change default permissions</a> for current history' )
+            if histories_shared_by_others:
+                self.check_page_for_string( 'Histories</a> shared with you by others' )
+        if activatable_datasets:
+            self.check_page_for_string( 'Show deleted</a> datasets in current history' )
+        self.check_page_for_string( 'Rename</a> current history' )
+        self.check_page_for_string( 'Delete</a> current history' )
+
+    def import_datasets_to_histories( self, cntrller, library_id, ldda_ids='', new_history_name='Unnamed history', strings_displayed=[] ):
+        # Can't use the ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission
+        # by going directly to the form action
+        self.visit_url( '%s/library_common/import_datasets_to_histories?cntrller=%s&library_id=%s&ldda_ids=%s&new_history_name=%s&import_datasets_to_histories_button=Import+library+datasets' %
+                        ( self.url, cntrller, library_id, ldda_ids, new_history_name ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def import_history_via_url( self, history_id, email, strings_displayed_after_submit=[] ):
+        self.visit_url( "/history/imp", params=dict( id=history_id ) )
+        self.check_for_strings( strings_displayed=strings_displayed_after_submit )
+
+    def is_binary( self, filename ):
+        temp = open( filename, "U" )
+        lineno = 0
+        for line in temp:
+            lineno += 1
+            line = line.strip()
+            if line:
+                for char in line:
+                    if ord( char ) > 128:
+                        return True
+            if lineno > 10:
+                break
+        return False
+
+    def is_history_empty( self ):
+        """
+        Uses history page JSON to determine whether this history is empty
+        (i.e. has no undeleted datasets).
+        """
+        return len( self.get_history_from_api() ) == 0
+
+    def is_zipped( self, filename ):
+        if not zipfile.is_zipfile( filename ):
+            return False
+        return True
+
+    def json_from_url( self, url, params={} ):
+        self.visit_url( url, params )
+        return loads( self.last_page() )
+
+    def last_page( self ):
+        return tc.browser.get_html()
+
+    def last_url( self ):
+        return tc.browser.get_url()
+
+    def ldda_permissions( self, cntrller, library_id, folder_id, id, role_ids_str,
+                          permissions_in=[], permissions_out=[], strings_displayed=[], ldda_name='' ):
+        # role_ids_str must be a comma-separated string of role ids
+        params = dict( cntrller=cntrller, library_id=library_id, folder_id=folder_id, id=id )
+        url = "/library_common/ldda_permissions"
+        for po in permissions_out:
+            params[ '%s_out' % po ] = role_ids_str
+        for pi in permissions_in:
+            params[ '%s_in' % pi ] = role_ids_str
+        if permissions_in or permissions_out:
+            params[ 'update_roles_button' ] = 'Save'
+            self.visit_url( url, params )
+        if not strings_displayed:
+            strings_displayed = [ "Permissions updated for dataset '%s'." % ldda_name ]
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def ldda_info( self, cntrller, library_id, folder_id, ldda_id, strings_displayed=[], strings_not_displayed=[] ):
+        """View library_dataset_dataset_association information"""
+        self.visit_url( "%s/library_common/ldda_info?cntrller=%s&library_id=%s&folder_id=%s&id=%s" %
+                        ( self.url, cntrller, library_id, folder_id, ldda_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) should not have been displayed on ldda info page." % check_str )
+            except:
+                pass
+
+    def ldda_edit_info( self, cntrller, library_id, folder_id, ldda_id, ldda_name, new_ldda_name='', template_refresh_field_name='1_field_name',
+                        template_refresh_field_contents='', template_fields=[], strings_displayed=[], strings_not_displayed=[] ):
+        """Edit library_dataset_dataset_association information, optionally template element information"""
+        self.visit_url( "%s/library_common/ldda_edit_info?cntrller=%s&library_id=%s&folder_id=%s&id=%s" %
+                        ( self.url, cntrller, library_id, folder_id, ldda_id ) )
+        check_str = 'Edit attributes of %s' % ldda_name
+        self.check_page_for_string( check_str )
+        if new_ldda_name:
+            tc.fv( '1', 'name', new_ldda_name )
+            tc.submit( 'save' )
+            check_str = escape( "Attributes updated for library dataset '%s'." % new_ldda_name )
+            self.check_page_for_string( check_str )
+        if template_refresh_field_contents:
+            # A template containing an AddressField is displayed on the upload form, so we need to refresh the form
+            # with the received template_refresh_field_contents.  There are 4 forms on this page, and the template is
+            # contained in the 4th form named "edit_info".
+            self.refresh_form( template_refresh_field_name, template_refresh_field_contents, form_no=4 )
+        if template_fields:
+            # We have an information template associated with the folder, so
+            # there are 2 forms on this page and the template is the 2nd form
+            for field_name, field_value in template_fields:
+                tc.fv( "edit_info", field_name, field_value )
+            tc.submit( 'edit_info_button' )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            try:
+                self.check_page_for_string( check_str )
+                raise AssertionError( "String (%s) should not have been displayed on ldda Edit Attributes page." % check_str )
+            except:
+                pass
+
+    def library_info( self, cntrller, library_id, library_name='', new_name='', new_description='', new_synopsis='',
+                      template_fields=[], strings_displayed=[] ):
+        """Edit information about a library, optionally using an existing template with up to 2 elements"""
+        self.visit_url( "%s/library_common/library_info?cntrller=%s&id=%s" % ( self.url, cntrller, library_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        if new_name and new_description and new_synopsis:
+            tc.fv( '1', 'name', new_name )
+            tc.fv( '1', 'description', new_description )
+            tc.fv( '1', 'synopsis', new_synopsis )
+            tc.submit( 'library_info_button' )
+            self.check_page_for_string( "Information updated for library" )
+        if template_fields:
+            for field_name, field_value in template_fields:
+                # The 2nd form on the page contains the template, and the form is named edit_info.
+                # Set the template field value
+                tc.fv( "edit_info", field_name, field_value )
+            tc.submit( 'edit_info_button' )
+
+    def library_permissions( self, library_id, library_name, role_ids_str, permissions_in, permissions_out, cntrller='library_admin' ):
+        # role_ids_str must be a comma-separated string of role ids
+        url = "library_common/library_permissions?id=%s&cntrller=%s&update_roles_button=Save" % ( library_id, cntrller )
+        for po in permissions_out:
+            key = '%s_out' % po
+            url = "%s&%s=%s" % ( url, key, role_ids_str )
+        for pi in permissions_in:
+            key = '%s_in' % pi
+            url = "%s&%s=%s" % ( url, key, role_ids_str )
+        self.visit_url( "%s/%s" % ( self.url, url ) )
+        check_str = escape( "Permissions updated for library '%s'." % library_name )
+        self.check_page_for_string( check_str )
+
+    def library_wait( self, library_id, cntrller='library_admin', maxiter=90 ):
+        """Waits for the tools to finish"""
+        count = 0
+        sleep_amount = 1
+        while count < maxiter:
+            count += 1
+            self.visit_url( "%s/library_common/browse_library?cntrller=%s&id=%s" % ( self.url, cntrller, library_id ) )
+            page = tc.browser.get_html()
+            if page.find( '<!-- running: do not change this comment, used by TwillTestCase.library_wait -->' ) > -1:
+                time.sleep( sleep_amount )
+                sleep_amount += 1
+            else:
+                break
+        self.assertNotEqual(count, maxiter)
+
+    def login( self, email='test at bx.psu.edu', password='testuser', username='admin-user', redirect='', logout_first=True ):
+        # Clear cookies.
+        if logout_first:
+            self.logout()
+        # test at bx.psu.edu is configured as an admin user
+        previously_created, username_taken, invalid_username = \
+            self.create( email=email, password=password, username=username, redirect=redirect )
+        if previously_created:
+            # The acount has previously been created, so just login.
+            # HACK: don't use panels because late_javascripts() messes up the twill browser and it
+            # can't find form fields (and hence user can't be logged in).
+            self.visit_url( "/user/login?use_panels=False" )
+            self.submit_form( 'login', 'login_button', login=email, redirect=redirect, password=password )
+
+    def logout( self ):
+        self.visit_url( "%s/user/logout" % self.url )
+        self.check_page_for_string( "You have been logged out" )
+        tc.browser.cj.clear()
+
+    def make_accessible_via_link( self, history_id, strings_displayed=[], strings_displayed_after_submit=[] ):
+        # twill barfs on this form, possibly because it contains no fields, but not sure.
+        # In any case, we have to mimic the form submission
+        self.visit_url( '/history/sharing', dict( id=history_id, make_accessible_via_link=True ) )
+        self.check_for_strings( strings_displayed=strings_displayed_after_submit )
+
+    def make_library_item_public( self, library_id, id, cntrller='library_admin', item_type='library',
+                                  contents=False, library_name='', folder_name='', ldda_name='' ):
+        url = "%s/library_common/make_library_item_public?cntrller=%s&library_id=%s&item_type=%s&id=%s&contents=%s" % \
+            ( self.url, cntrller, library_id, item_type, id, str( contents ) )
+        self.visit_url( url )
+        if item_type == 'library':
+            if contents:
+                check_str = "The data library (%s) and all its contents have been made publicly accessible." % library_name
+            else:
+                check_str = "The data library (%s) has been made publicly accessible, but access to its contents has been left unchanged." % library_name
+        elif item_type == 'folder':
+            check_str = "All of the contents of folder (%s) have been made publicly accessible." % folder_name
+        elif item_type == 'ldda':
+            check_str = "The libary dataset (%s) has been made publicly accessible." % ldda_name
+        self.check_page_for_string( check_str )
+
+    def makeTfname(self, fname=None):
+        """safe temp name - preserve the file extension for tools that interpret it"""
+        return make_temp_fname(fname)
+
+    def manage_library_template_inheritance( self, cntrller, item_type, library_id, folder_id=None, ldda_id=None, inheritable=True ):
+        # If inheritable is True, the item is currently inheritable.
+        if item_type == 'library':
+            url = "%s/library_common/manage_template_inheritance?cntrller=%s&item_type=%s&library_id=%s" % \
+                ( self.url, cntrller, item_type, library_id )
+        elif item_type == 'folder':
+            url = "%s/library_common/manage_template_inheritance?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s" % \
+                ( self.url, cntrller, item_type, library_id, folder_id )
+        elif item_type == 'ldda':
+            url = "%s/library_common/manage_template_inheritance?cntrller=%s&item_type=%s&library_id=%s&folder_id=%s&ldda_id=%s" % \
+                ( self.url, cntrller, item_type, library_id, folder_id, ldda_id )
+        self.visit_url( url )
+        if inheritable:
+            self.check_page_for_string = 'will no longer be inherited to contained folders and datasets'
+        else:
+            self.check_page_for_string = 'will now be inherited to contained folders and datasets'
+
+    def manage_roles_and_groups_for_user( self, user_id, in_role_ids=[], out_role_ids=[],
+                                          in_group_ids=[], out_group_ids=[], strings_displayed=[] ):
+        url = "%s/admin/manage_roles_and_groups_for_user?id=%s" % ( self.url, user_id )
+        if in_role_ids:
+            url += "&in_roles=%s" % ','.join( in_role_ids )
+        if out_role_ids:
+            url += "&out_roles=%s" % ','.join( out_role_ids )
+        if in_group_ids:
+            url += "&in_groups=%s" % ','.join( in_group_ids )
+        if out_group_ids:
+            url += "&out_groups=%s" % ','.join( out_group_ids )
+        if in_role_ids or out_role_ids or in_group_ids or out_group_ids:
+            url += "&user_roles_groups_edit_button=Save"
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def mark_form_deleted( self, form_id ):
+        """Mark a form_definition as deleted"""
+        url = "%s/forms/delete_form_definition?id=%s" % ( self.url, form_id )
+        self.visit_url( url )
+        check_str = "1 forms have been deleted."
+        self.check_page_for_string( check_str )
+
+    def mark_group_deleted( self, group_id, group_name ):
+        """Mark a group as deleted"""
+        self.visit_url( "%s/admin/groups?operation=delete&id=%s" % ( self.url, group_id ) )
+        check_str = "Deleted 1 groups:  %s" % group_name
+        self.check_page_for_string( check_str )
+
+    def mark_role_deleted( self, role_id, role_name ):
+        """Mark a role as deleted"""
+        self.visit_url( "%s/admin/roles?operation=delete&id=%s" % ( self.url, role_id ) )
+        check_str = "Deleted 1 roles:  %s" % role_name
+        self.check_page_for_string( check_str )
+
+    def mark_user_deleted( self, user_id, email='' ):
+        """Mark a user as deleted"""
+        self.visit_url( "%s/admin/users?operation=delete&id=%s" % ( self.url, user_id ) )
+        check_str = "Deleted 1 users"
+        self.check_page_for_string( check_str )
+
+    def move_library_item( self, cntrller, item_type, item_id, source_library_id, make_target_current,
+                           target_library_id=None, target_folder_id=None, strings_displayed=[], strings_displayed_after_submit=[] ):
+        params = dict( cntrller=cntrller,
+                       item_type=item_type,
+                       item_id=item_id,
+                       source_library_id=source_library_id,
+                       make_target_current=make_target_current )
+        if target_library_id is not None:
+            params[ 'target_library_id' ] = target_library_id
+        if target_folder_id is not None:
+            params[ 'target_folder_id' ] = target_folder_id
+        self.visit_url( "%s/library_common/move_library_item" % self.url, params=params )
+        if target_library_id:
+            self.refresh_form( 'target_library_id', target_library_id, form_name='move_library_item' )
+        if target_folder_id:
+            tc.fv( '1', 'target_folder_id', target_folder_id )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.submit( 'move_library_item_button' )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def new_history( self, name=None ):
+        """Creates a new, empty history"""
+        if name:
+            self.visit_url( "%s/history_new?name=%s" % ( self.url, name ) )
+        else:
+            self.visit_url( "%s/history_new" % self.url )
+        self.check_page_for_string( 'New history created' )
+        assert self.is_history_empty(), 'Creating new history did not result in an empty history.'
+
+    def purge_group( self, group_id, group_name ):
+        """Purge an existing group"""
+        self.visit_url( "%s/admin/groups?operation=purge&id=%s" % ( self.url, group_id ) )
+        check_str = "Purged 1 groups:  %s" % group_name
+        self.check_page_for_string( check_str )
+
+    def purge_library( self, library_id, library_name ):
+        """Purge a library"""
+        params = dict( id=library_id )
+        self.visit_url( "/library_admin/purge_library", params )
+        check_str = "Library '%s' and all of its contents have been purged" % library_name
+        self.check_page_for_string( check_str )
+
+    def purge_role( self, role_id, role_name ):
+        """Purge an existing role"""
+        self.visit_url( "%s/admin/roles?operation=purge&id=%s" % ( self.url, role_id ) )
+        check_str = "Purged 1 roles:  %s" % role_name
+        self.check_page_for_string( check_str )
+
+    def purge_user( self, user_id, email ):
+        """Purge a user account"""
+        self.visit_url( "%s/admin/users?operation=purge&id=%s" % ( self.url, user_id ) )
+        check_str = "Purged 1 users"
+        self.check_page_for_string( check_str )
+
+    def refresh_form( self, control_name, value, form_no=0, form_id=None, form_name=None, **kwd ):
+        """Handles Galaxy's refresh_on_change for forms without ultimately submitting the form"""
+        # control_name is the name of the form field that requires refresh_on_change, and value is
+        # the value to which that field is being set.
+        for i, f in enumerate( self.showforms() ):
+            if i == form_no or ( form_id is not None and f.id == form_id ) or ( form_name is not None and f.name == form_name ):
+                break
+        formcontrols = self.get_form_controls( f )
+        try:
+            control = f.find_control( name=control_name )
+        except:
+            log.debug( '\n'.join( formcontrols ) )
+            # This assumes we always want the first control of the given name, which may not be ideal...
+            control = f.find_control( name=control_name, nr=0 )
+        # Check for refresh_on_change attribute, submit a change if required
+        if 'refresh_on_change' in control.attrs.keys():
+            # Clear Control and set to proper value
+            control.clear()
+            tc.fv( f.name, control.name, value )
+            # Create a new submit control, allows form to refresh, instead of going to next page
+            control = ClientForm.SubmitControl( 'SubmitControl', '___refresh_grouping___', {'name': 'refresh_grouping'} )
+            control.add_to_form( f )
+            control.fixup()
+            # Submit for refresh
+            tc.submit( '___refresh_grouping___' )
+
+    def reject_request( self, request_id, request_name, comment, strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( "%s/requests_admin/reject_request?id=%s" % ( self.url, request_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "1", "comment", comment )
+        tc.submit( "reject_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def reload_external_service( self, external_service_type_id, strings_displayed=[], strings_displayed_after_submit=[] ):
+        self.visit_url( '%s/external_service/reload_external_service_types' % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( "1", "external_service_type_id", external_service_type_id )
+        tc.submit( "reload_external_service_type_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def reload_page( self ):
+        tc.reload()
+        tc.code(200)
+
+    def rename_role( self, role_id, name='Role One Renamed', description='This is Role One Re-described' ):
+        """Rename a role"""
+        self.visit_url( "%s/admin/roles?operation=rename&id=%s" % ( self.url, role_id ) )
+        self.check_page_for_string( 'Change role name and description' )
+        tc.fv( "1", "name", name )
+        tc.fv( "1", "description", description )
+        tc.submit( "rename_role_button" )
+
+    def rename_group( self, group_id, name='Group One Renamed' ):
+        """Rename a group"""
+        self.visit_url( "%s/admin/groups?operation=rename&id=%s" % ( self.url, group_id ) )
+        self.check_page_for_string( 'Change group name' )
+        tc.fv( "1", "name", name )
+        tc.submit( "rename_group_button" )
+
+    def rename_history( self, id, old_name, new_name ):
+        """Rename an existing history"""
+        self.visit_url( "/history/rename", params=dict( id=id, name=new_name ) )
+        check_str = 'History: %s renamed to: %s' % ( old_name, unquote( new_name ) )
+        self.check_page_for_string( check_str )
+
+    def rename_sample_datasets( self, sample_id, sample_dataset_ids, new_sample_dataset_names, strings_displayed=[], strings_displayed_after_submit=[] ):
+        sample_dataset_ids_string = ','.join( sample_dataset_ids )
+        url = "%s/requests_admin/manage_datasets?operation=rename&id=%s" % ( self.url, sample_dataset_ids_string )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        for sample_dataset_id, ( prefix, new_name ) in zip( sample_dataset_ids, new_sample_dataset_names ):
+            tc.fv( "1", 'rename_datasets_for_sample_%s' % sample_dataset_id, prefix )
+            tc.fv( "1", 'new_name_%s' % sample_dataset_id, new_name )
+        tc.submit( "rename_datasets_button" )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def request_type_permissions( self, request_type_id, request_type_name, role_ids_str, permissions_in, permissions_out ):
+        # role_ids_str must be a comma-separated string of role ids
+        url = "request_type/request_type_permissions?id=%s&update_roles_button=Save" % ( request_type_id )
+        for po in permissions_out:
+            key = '%s_out' % po
+            url = "%s&%s=%s" % ( url, key, role_ids_str )
+        for pi in permissions_in:
+            key = '%s_in' % pi
+            url = "%s&%s=%s" % ( url, key, role_ids_str )
+        self.visit_url( "%s/%s" % ( self.url, url ) )
+        check_str = "Permissions updated for request type '%s'" % request_type_name
+        self.check_page_for_string( check_str )
+
+    def reset_password_as_admin( self, user_id, password='testreset' ):
+        """Reset a user password"""
+        self.visit_url( "%s/admin/reset_user_password?id=%s" % ( self.url, user_id ) )
+        tc.fv( "1", "password", password )
+        tc.fv( "1", "confirm", password )
+        tc.submit( "reset_user_password_button" )
+        self.check_page_for_string( "Passwords reset for 1 user." )
+
+    def run_ucsc_main( self, track_params, output_params ):
+        """Gets Data From UCSC"""
+        tool_id = "ucsc_table_direct1"
+        galaxy_url = "%s/tool_runner/index?" % self.url
+        track_params.update( dict( GALAXY_URL=galaxy_url, hgta_compressType='none', tool_id=tool_id ) )
+        self.visit_url( "http://genome.ucsc.edu/cgi-bin/hgTables", params=track_params )
+        tc.fv( 'mainForm', 'checkboxGalaxy', 'on' )
+        tc.submit( 'hgta_doTopSubmit' )
+        tc.fv( 2, "hgta_geneSeqType", "genomic" )
+        tc.submit( 'hgta_doGenePredSequence' )
+        tc.fv( 2, 'hgSeq.casing', 'upper' )
+        tc.submit( 'hgta_doGalaxyQuery' )
+
+    def save_log( *path ):
+        """Saves the log to a file"""
+        filename = os.path.join( *path )
+        open(filename, 'wt').write(buffer.getvalue())
+
+    def set_history( self ):
+        """Sets the history (stores the cookies for this run)"""
+        if self.history_id:
+            self.visit_url( "/history", params=dict( id=self.history_id ) )
+        else:
+            self.new_history()
+
+    def share_current_history( self, email, strings_displayed=[], strings_displayed_after_submit=[],
+                               action='', action_strings_displayed=[], action_strings_displayed_after_submit=[] ):
+        """Share the current history with different users"""
+        self.visit_url( "%s/history/share" % self.url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( 'share', 'email', email )
+        tc.submit( 'share_button' )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+        if action:
+            # If we have an action, then we are sharing datasets with users that do not have access permissions on them
+            for check_str in action_strings_displayed:
+                self.check_page_for_string( check_str )
+            tc.fv( 'share_restricted', 'action', action )
+
+            tc.submit( "share_restricted_button" )
+            for check_str in action_strings_displayed_after_submit:
+                self.check_page_for_string( check_str )
+
+    def share_histories_with_users( self, ids, emails, strings_displayed=[], strings_displayed_after_submit=[],
+                                    action=None, action_strings_displayed=[] ):
+        """Share one or more histories with one or more different users"""
+        self.visit_url( "%s/history/list?id=%s&operation=Share" % ( self.url, ids ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        tc.fv( 'share', 'email', emails )
+        tc.submit( 'share_button' )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+        if action:
+            # If we have an action, then we are sharing datasets with users that do not have access permissions on them
+            tc.fv( 'share_restricted', 'action', action )
+            tc.submit( "share_restricted_button" )
+
+            for check_str in action_strings_displayed:
+                self.check_page_for_string( check_str )
+
+    def show_cookies( self ):
+        return tc.show_cookies()
+
+    def showforms( self ):
+        """Shows form, helpful for debugging new tests"""
+        return tc.showforms()
+
+    def start_sample_datasets_transfer( self, sample_id, sample_dataset_ids, strings_displayed=[], strings_displayed_after_submit=[], strings_displayed_count=[], strings_not_displayed=[] ):
+        url = '%s/requests_admin/manage_datasets?cntrller=requests_admin&sample_id=%s' % ( self.url, sample_id )
+        self.visit_url( url )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        # simulate selecting datasets and clicking the transfer button on the sample datasets grid
+        sample_dataset_ids_string = ','.join( sample_dataset_ids )
+        url = "%s/requests_admin/manage_datasets?operation=transfer&id=%s" % ( self.url, sample_dataset_ids_string )
+        self.visit_url( url )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+        for check_str in strings_not_displayed:
+            self.check_string_not_in_page( check_str )
+        for check_str, count in strings_displayed_count:
+            self.check_string_count_in_page( check_str, count )
+
+    def submit_form( self, form_no=0, button="runtool_btn", **kwd ):
+        """Populates and submits a form from the keyword arguments."""
+        # An HTMLForm contains a sequence of Controls.  Supported control classes are:
+        # TextControl, FileControl, ListControl, RadioControl, CheckboxControl, SelectControl,
+        # SubmitControl, ImageControl
+        for i, f in enumerate( self.showforms() ):
+            if i == form_no:
+                break
+        # To help with debugging a tool, print out the form controls when the test fails
+        print("form '%s' contains the following controls ( note the values )" % f.name)
+        controls = {}
+        formcontrols = self.get_form_controls( f )
+        hc_prefix = '<HiddenControl('
+        for i, control in enumerate( f.controls ):
+            if hc_prefix not in str( control ):
+                try:
+                    # check if a repeat element needs to be added
+                    if control.name is not None:
+                        if control.name not in kwd and control.name.endswith( '_add' ):
+                            # control name doesn't exist, could be repeat
+                            repeat_startswith = control.name[0:-4]
+                            if repeat_startswith and not [ c_name for c_name in controls.keys() if c_name.startswith( repeat_startswith ) ] and [ c_name for c_name in kwd.keys() if c_name.startswith( repeat_startswith ) ]:
+                                tc.browser.clicked( f, control )
+                                tc.submit( control.name )
+                                return self.submit_form( form_no=form_no, button=button, **kwd )
+                    # Check for refresh_on_change attribute, submit a change if required
+                    if hasattr( control, 'attrs' ) and 'refresh_on_change' in control.attrs.keys():
+                        changed = False
+                        # For DataToolParameter, control.value is the HDA id, but kwd contains the filename.
+                        # This loop gets the filename/label for the selected values.
+                        item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ]
+                        for value in kwd[ control.name ]:
+                            if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
+                                changed = True
+                                break
+                        if changed:
+                            # Clear Control and set to proper value
+                            control.clear()
+                            # kwd[control.name] should be a singlelist
+                            for elem in kwd[ control.name ]:
+                                tc.fv( f.name, control.name, str( elem ) )
+                            # Create a new submit control, allows form to refresh, instead of going to next page
+                            control = ClientForm.SubmitControl( 'SubmitControl', '___refresh_grouping___', {'name': 'refresh_grouping'} )
+                            control.add_to_form( f )
+                            control.fixup()
+                            # Submit for refresh
+                            tc.submit( '___refresh_grouping___' )
+                            return self.submit_form( form_no=form_no, button=button, **kwd )
+                except Exception:
+                    log.exception( "In submit_form, continuing, but caught exception." )
+                    for formcontrol in formcontrols:
+                        log.debug( formcontrol )
+                    continue
+                controls[ control.name ] = control
+        # No refresh_on_change attribute found in current form, so process as usual
+        for control_name, control_value in kwd.items():
+            if control_name not in controls:
+                continue  # these cannot be handled safely - cause the test to barf out
+            if not isinstance( control_value, list ):
+                control_value = [ control_value ]
+            control = controls[ control_name ]
+            control.clear()
+            if control.is_of_kind( "text" ):
+                tc.fv( f.name, control.name, ",".join( control_value ) )
+            elif control.is_of_kind( "list" ):
+                try:
+                    if control.is_of_kind( "multilist" ):
+                        if control.type == "checkbox":
+                            def is_checked( value ):
+                                # Copied from form_builder.CheckboxField
+                                if value is True:
+                                    return True
+                                if isinstance( value, list ):
+                                    value = value[0]
+                                return isinstance( value, string_types ) and value.lower() in ( "yes", "true", "on" )
+                            try:
+                                checkbox = control.get()
+                                checkbox.selected = is_checked( control_value )
+                            except Exception as e1:
+                                print("Attempting to set checkbox selected value threw exception: ", e1)
+                                # if there's more than one checkbox, probably should use the behaviour for
+                                # ClientForm.ListControl ( see twill code ), but this works for now...
+                                for elem in control_value:
+                                    control.get( name=elem ).selected = True
+                        else:
+                            for elem in control_value:
+                                try:
+                                    # Doubt this case would ever work, but want
+                                    # to preserve backward compat.
+                                    control.get( name=elem ).selected = True
+                                except Exception:
+                                    # ... anyway this is really what we want to
+                                    # do, probably even want to try the len(
+                                    # elem ) > 30 check below.
+                                    control.get( label=elem ).selected = True
+                    else:  # control.is_of_kind( "singlelist" )
+                        for elem in control_value:
+                            try:
+                                tc.fv( f.name, control.name, str( elem ) )
+                            except Exception:
+                                try:
+                                    # Galaxy truncates long file names in the dataset_collector in galaxy/tools/parameters/basic.py
+                                    if len( elem ) > 30:
+                                        elem_name = '%s..%s' % ( elem[:17], elem[-11:] )
+                                        tc.fv( f.name, control.name, str( elem_name ) )
+                                        pass
+                                    else:
+                                        raise
+                                except Exception:
+                                    raise
+                            except Exception:
+                                for formcontrol in formcontrols:
+                                    log.debug( formcontrol )
+                                log.exception( "Attempting to set control '%s' to value '%s' (also tried '%s') threw exception.", control.name, elem, elem_name )
+                                pass
+                except Exception as exc:
+                    for formcontrol in formcontrols:
+                        log.debug( formcontrol )
+                    errmsg = "Attempting to set field '%s' to value '%s' in form '%s' threw exception: %s\n" % ( control_name, str( control_value ), f.name, str( exc ) )
+                    errmsg += "control: %s\n" % str( control )
+                    errmsg += "If the above control is a DataToolparameter whose data type class does not include a sniff() method,\n"
+                    errmsg += "make sure to include a proper 'ftype' attribute to the tag for the control within the <test> tag set.\n"
+                    raise AssertionError( errmsg )
+            else:
+                # Add conditions for other control types here when necessary.
+                pass
+        tc.submit( button )
+
+    def submit_request( self, cntrller, request_id, request_name, strings_displayed_after_submit=[] ):
+        self.visit_url( "%s/requests_common/submit_request?cntrller=%s&id=%s" % ( self.url, cntrller, request_id ) )
+        for check_str in strings_displayed_after_submit:
+            self.check_page_for_string( check_str )
+
+    def switch_history( self, id='', name='' ):
+        """Switches to a history in the current list of histories"""
+        params = dict( operation='switch', id=id )
+        self.visit_url( "/history/list", params )
+        if name:
+            self.check_history_for_exact_string( name )
+
+    def undelete_group( self, group_id, group_name ):
+        """Undelete an existing group"""
+        self.visit_url( "%s/admin/groups?operation=undelete&id=%s" % ( self.url, group_id ) )
+        check_str = "Undeleted 1 groups:  %s" % group_name
+        self.check_page_for_string( check_str )
+
+    def undelete_history_item( self, hda_id, strings_displayed=[] ):
+        """Un-deletes a deleted item in a history"""
+        try:
+            hda_id = int( hda_id )
+        except:
+            raise AssertionError( "Invalid hda_id '%s' - must be int" % hda_id )
+        self.visit_url( "%s/datasets/%s/undelete" % ( self.url, self.security.encode_id( hda_id ) ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def undelete_library_item( self, cntrller, library_id, item_id, item_name, item_type='library_dataset' ):
+        """Mark a library item as deleted"""
+        params = dict( cntrller=cntrller, library_id=library_id, item_id=item_id, item_type=item_type )
+        self.visit_url( "/library_common/undelete_library_item", params )
+        if item_type == 'library_dataset':
+            item_desc = 'Dataset'
+        else:
+            item_desc = item_type.capitalize()
+        check_str = "marked undeleted"
+        self.check_for_strings( strings_displayed=[ item_desc, check_str ] )
+
+    def undelete_role( self, role_id, role_name ):
+        """Undelete an existing role"""
+        self.visit_url( "%s/admin/roles?operation=undelete&id=%s" % ( self.url, role_id ) )
+        check_str = "Undeleted 1 roles:  %s" % role_name
+        self.check_page_for_string( check_str )
+
+    def undelete_user( self, user_id, email='' ):
+        """Undelete a user"""
+        self.visit_url( "%s/admin/users?operation=undelete&id=%s" % ( self.url, user_id ) )
+        check_str = "Undeleted 1 users"
+        self.check_page_for_string( check_str )
+
+    def unshare_history( self, history_id, user_id, strings_displayed=[] ):
+        """Unshare a history that has been shared with another user"""
+        self.visit_url( "/history/sharing", params=dict( id=history_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+        self.visit_url( "/history/sharing", params=dict( unshare_user=user_id, id=history_id ) )
+
+    def upload_library_dataset( self, cntrller, library_id, folder_id, filename='', server_dir='', replace_id='',
+                                upload_option='upload_file', file_type='auto', dbkey='hg18', space_to_tab='',
+                                link_data_only='copy_files', preserve_dirs='Yes', filesystem_paths='', roles=[],
+                                ldda_message='', hda_ids='', template_refresh_field_name='1_field_name',
+                                template_refresh_field_contents='', template_fields=[], show_deleted='False', strings_displayed=[] ):
+        """Add datasets to library using any upload_option"""
+        # NOTE: due to the library_wait() method call at the end of this method, no tests should be done
+        # for strings_displayed_after_submit.
+        params = dict( cntrller=cntrller, library_id=library_id, folder_id=folder_id )
+        url = "/library_common/upload_library_dataset"
+        if replace_id:
+            # If we're uploading a new version of a library dataset, we have to include the replace_id param in the
+            # request because the form field named replace_id will not be displayed on the upload form if we dont.
+            params[ 'replace_id' ] = replace_id
+        self.visit_url( url, params=params )
+        if template_refresh_field_contents:
+            # A template containing an AddressField is displayed on the upload form, so we need to refresh the form
+            # with the received template_refresh_field_contents.
+            self.refresh_form( template_refresh_field_name, template_refresh_field_contents )
+        for tup in template_fields:
+            tc.fv( "1", tup[0], tup[1] )
+        tc.fv( "upload_library_dataset", "library_id", library_id )
+        tc.fv( "upload_library_dataset", "folder_id", folder_id )
+        tc.fv( "upload_library_dataset", "show_deleted", show_deleted )
+        tc.fv( "upload_library_dataset", "ldda_message", ldda_message )
+        tc.fv( "upload_library_dataset", "file_type", file_type )
+        tc.fv( "upload_library_dataset", "dbkey", dbkey )
+        if space_to_tab:
+            tc.fv( "upload_library_dataset", "space_to_tab", space_to_tab )
+        for role_id in roles:
+            tc.fv( "upload_library_dataset", "roles", role_id )
+        # Refresh the form by selecting the upload_option - we do this here to ensure
+        # all previously entered form contents are retained.
+        self.refresh_form( 'upload_option', upload_option )
+        if upload_option == 'import_from_history':
+            for check_str in strings_displayed:
+                self.check_page_for_string( check_str )
+            if hda_ids:
+                # Twill cannot handle multi-checkboxes, so the form can only have 1 hda_ids checkbox
+                try:
+                    tc.fv( "add_history_datasets_to_library", "hda_ids", hda_ids )
+                except:
+                    tc.fv( "add_history_datasets_to_library", "hda_ids", '1' )
+            tc.submit( 'add_history_datasets_to_library_button' )
+        else:
+            if upload_option in [ 'upload_paths', 'upload_directory' ]:
+                tc.fv( "upload_library_dataset", "link_data_only", link_data_only )
+            if upload_option == 'upload_paths':
+                tc.fv( "upload_library_dataset", "filesystem_paths", filesystem_paths )
+            if upload_option == 'upload_directory' and server_dir:
+                tc.fv( "upload_library_dataset", "server_dir", server_dir )
+            if upload_option == 'upload_file':
+                if filename:
+                    filename = self.get_filename( filename )
+                    tc.formfile( "upload_library_dataset", "files_0|file_data", filename )
+            for check_str in strings_displayed:
+                self.check_page_for_string( check_str )
+            tc.submit( "runtool_btn" )
+        # Give the files some time to finish uploading
+        self.library_wait( library_id )
+
+    def upload_file( self, filename, ftype='auto', dbkey='unspecified (?)', space_to_tab=False, metadata=None, composite_data=None, name=None, shed_tool_id=None, wait=True ):
+        """
+        Uploads a file.  If shed_tool_id has a value, we're testing tools migrated from the distribution to the tool shed,
+        so the tool-data directory of test data files is contained in the installed tool shed repository.
+        """
+        self.visit_url( "%s/tool_runner?tool_id=upload1" % self.url )
+        try:
+            self.refresh_form( "file_type", ftype )  # Refresh, to support composite files
+            tc.fv( "tool_form", "dbkey", dbkey )
+            if metadata:
+                for elem in metadata:
+                    tc.fv( "tool_form", "files_metadata|%s" % elem.get( 'name' ), elem.get( 'value' ) )
+            if composite_data:
+                for i, composite_file in enumerate( composite_data ):
+                    filename = self.get_filename( composite_file.get( 'value' ), shed_tool_id=shed_tool_id )
+                    tc.formfile( "tool_form", "files_%i|file_data" % i, filename )
+                    tc.fv( "tool_form", "files_%i|space_to_tab" % i, composite_file.get( 'space_to_tab', False ) )
+            else:
+                filename = self.get_filename( filename, shed_tool_id=shed_tool_id )
+                tc.formfile( "tool_form", "file_data", filename )
+                tc.fv( "tool_form", "space_to_tab", space_to_tab )
+                if name:
+                    # NAME is a hidden form element, so the following prop must
+                    # set to use it.
+                    tc.config("readonly_controls_writeable", 1)
+                    tc.fv( "tool_form", "NAME", name )
+            tc.submit( "runtool_btn" )
+        except AssertionError as err:
+            errmsg = "Uploading file resulted in the following exception.  Make sure the file (%s) exists.  " % filename
+            errmsg += str( err )
+            raise AssertionError( errmsg )
+        if not wait:
+            return
+        # Make sure every history item has a valid hid
+        hids = self.get_hids_in_history( self.get_latest_history()[ 'id' ] )
+        for hid in hids:
+            try:
+                int( hid )
+            except:
+                raise AssertionError( "Invalid hid (%s) created when uploading file %s" % ( hid, filename ) )
+        # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+        self.wait()
+
+    def upload_url_paste( self, url_paste, ftype='auto', dbkey='unspecified (?)' ):
+        """Pasted data in the upload utility"""
+        self.visit_url( "/tool_runner/index?tool_id=upload1" )
+        try:
+            self.refresh_form( "file_type", ftype )  # Refresh, to support composite files
+            tc.fv( "tool_form", "dbkey", dbkey )
+            tc.fv( "tool_form", "url_paste", url_paste )
+            tc.submit( "runtool_btn" )
+        except Exception as e:
+            errmsg = "Problem executing upload utility using url_paste: %s" % str( e )
+            raise AssertionError( errmsg )
+        # Make sure every history item has a valid hid
+        hids = self.get_hids_in_history( self.get_latest_history()[ 'id' ] )
+        for hid in hids:
+            try:
+                int( hid )
+            except:
+                raise AssertionError( "Invalid hid (%s) created when pasting %s" % ( hid, url_paste ) )
+        # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+        self.wait()
+
+    def user_set_default_permissions( self, cntrller='user', permissions_out=[], permissions_in=[], role_id='2' ):
+        # role.id = 2 is Private Role for test2 at bx.psu.edu
+        # NOTE: Twill has a bug that requires the ~/user/permissions page to contain at least 1 option value
+        # in each select list or twill throws an exception, which is: ParseError: OPTION outside of SELECT
+        # Due to this bug, we'll bypass visiting the page, and simply pass the permissions on to the
+        # /user/set_default_permissions method.
+        url = "user/set_default_permissions?cntrller=%s&update_roles_button=Save&id=None" % cntrller
+        for po in permissions_out:
+            key = '%s_out' % po
+            url = "%s&%s=%s" % ( url, key, str( role_id ) )
+        for pi in permissions_in:
+            key = '%s_in' % pi
+            url = "%s&%s=%s" % ( url, key, str( role_id ) )
+        self.visit_url( "%s/%s" % ( self.url, url ) )
+        self.check_page_for_string( 'Default new history permissions have been changed.' )
+
+    def verify_composite_datatype_file_content( self, file_name, hda_id, base_name=None, attributes=None, dataset_fetcher=None, shed_tool_id=None ):
+        dataset_fetcher = dataset_fetcher or self.__default_dataset_fetcher()
+
+        def get_filename(test_filename):
+            return self.get_filename(test_filename, shed_tool_id=shed_tool_id)
+
+        data = dataset_fetcher( hda_id, base_name )
+        item_label = "History item %s" % hda_id
+        try:
+            verify(
+                item_label,
+                data,
+                attributes=attributes,
+                filename=file_name,
+                get_filename=get_filename,
+                keep_outputs_dir=self.keepOutdir,
+            )
+        except AssertionError as err:
+            errmsg = 'Composite file (%s) of %s different than expected, difference:\n' % ( base_name, item_label )
+            errmsg += str( err )
+            raise AssertionError( errmsg )
+
+    def verify_dataset_correctness( self, filename, hid=None, wait=True, maxseconds=120, attributes=None, shed_tool_id=None ):
+        """Verifies that the attributes and contents of a history item meet expectations"""
+        if wait:
+            self.wait( maxseconds=maxseconds )  # wait for job to finish
+        data_list = self.get_history_from_api( encoded_history_id=None, show_deleted=False, show_details=False )
+        self.assertTrue( data_list )
+        if hid is None:  # take last hid
+            dataset = data_list[-1]
+            hid = str( dataset.get('hid') )
+        else:
+            datasets = [ dataset for dataset in data_list if str( dataset.get('hid') ) == str( hid ) ]
+            self.assertTrue( len( datasets ) == 1 )
+            dataset = datasets[0]
+        self.assertTrue( hid )
+        dataset = self.json_from_url( dataset[ 'url' ] )
+        self._assert_dataset_state( dataset, 'ok' )
+        if filename is not None and self.is_zipped( filename ):
+            errmsg = 'History item %s is a zip archive which includes invalid files:\n' % hid
+            zip_file = zipfile.ZipFile( filename, "r" )
+            name = zip_file.namelist()[0]
+            test_ext = name.split( "." )[1].strip().lower()
+            if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
+                raise AssertionError( errmsg )
+            for name in zip_file.namelist():
+                ext = name.split( "." )[1].strip().lower()
+                if ext != test_ext:
+                    raise AssertionError( errmsg )
+        else:
+            # See not in controllers/root.py about encoded_id.
+            hda_id = dataset.get( 'id' )
+            self.verify_hid( filename, hid=hid, hda_id=hda_id, attributes=attributes, shed_tool_id=shed_tool_id)
+
+    def verify_extra_files_content( self, extra_files, hda_id, dataset_fetcher, shed_tool_id=None ):
+        files_list = []
+        for extra_type, extra_value, extra_name, extra_attributes in extra_files:
+            if extra_type == 'file':
+                files_list.append( ( extra_name, extra_value, extra_attributes ) )
+            elif extra_type == 'directory':
+                for filename in os.listdir( self.get_filename( extra_value, shed_tool_id=shed_tool_id ) ):
+                    files_list.append( ( filename, os.path.join( extra_value, filename ), extra_attributes ) )
+            else:
+                raise ValueError( 'unknown extra_files type: %s' % extra_type )
+        for filename, filepath, attributes in files_list:
+            self.verify_composite_datatype_file_content( filepath, hda_id, base_name=filename, attributes=attributes, dataset_fetcher=dataset_fetcher, shed_tool_id=shed_tool_id )
+
+    def verify_hid( self, filename, hda_id, attributes, shed_tool_id, hid="", dataset_fetcher=None):
+        dataset_fetcher = dataset_fetcher or self.__default_dataset_fetcher()
+
+        def get_filename(test_filename):
+            return self.get_filename(test_filename, shed_tool_id=shed_tool_id)
+
+        def verify_extra_files(extra_files):
+            self.verify_extra_files_content(extra_files, hda_id, shed_tool_id=shed_tool_id, dataset_fetcher=dataset_fetcher)
+
+        data = dataset_fetcher( hda_id )
+        item_label = "History item %s" % hid
+        verify(
+            item_label,
+            data,
+            attributes=attributes,
+            filename=filename,
+            get_filename=get_filename,
+            keep_outputs_dir=self.keepOutdir,
+            verify_extra_files=verify_extra_files,
+        )
+
+    def view_external_service( self, external_service_id, strings_displayed=[] ):
+        self.visit_url( '%s/external_service/view_external_service?id=%s' % ( self.url, external_service_id ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def view_form( self, id, form_type='', form_name='', form_desc='', form_layout_name='', field_dicts=[] ):
+        '''View form details'''
+        self.visit_url( "%s/forms/view_latest_form_definition?id=%s" % ( self.url, id ) )
+        # self.check_page_for_string( form_type )
+        self.check_page_for_string( form_name )
+        # self.check_page_for_string( form_desc )
+        self.check_page_for_string( form_layout_name )
+        for i, field_dict in enumerate( field_dicts ):
+            self.check_page_for_string( field_dict[ 'label' ] )
+            self.check_page_for_string( field_dict[ 'desc' ] )
+            self.check_page_for_string( field_dict[ 'type' ] )
+            if field_dict[ 'type' ].lower() == 'selectfield':
+                for option_index, option in enumerate( field_dict[ 'selectlist' ] ):
+                    self.check_page_for_string( option )
+
+    def view_history( self, history_id, strings_displayed=[] ):
+        """Displays a history for viewing"""
+        self.visit_url( '%s/history/view?id=%s' % ( self.url, self.security.encode_id( history_id ) ) )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def view_request( self, cntrller, request_id, strings_displayed=[], strings_displayed_count=[], strings_not_displayed=[] ):
+        self.visit_url( "%s/%s/browse_requests?operation=view_request&id=%s" % ( self.url, cntrller, request_id ) )
+        self.check_page( strings_displayed, strings_displayed_count, strings_not_displayed )
+
+    def view_request_history( self, cntrller, request_id, strings_displayed=[], strings_displayed_count=[], strings_not_displayed=[] ):
+        self.visit_url( "%s/requests_common/view_request_history?cntrller=%s&id=%s" % ( self.url, cntrller, request_id ) )
+        self.check_page( strings_displayed, strings_displayed_count, strings_not_displayed )
+
+    def view_request_type( self, request_type_id, request_type_name, sample_states, strings_displayed=[] ):
+        '''View request_type details'''
+        self.visit_url( "%s/request_type/view_request_type?id=%s" % ( self.url, request_type_id ) )
+        self.check_page_for_string( '"%s" request type' % request_type_name )
+        for name, desc in sample_states:
+            self.check_page_for_string( name )
+            self.check_page_for_string( desc )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def view_sample_dataset( self, sample_dataset_id, strings_displayed=[], strings_displayed_count=[], strings_not_displayed=[] ):
+        self.visit_url( "%s/requests_admin/manage_datasets?operation=view&id=%s" % ( self.url, sample_dataset_id ) )
+        self.check_page( strings_displayed, strings_displayed_count, strings_not_displayed )
+
+    def view_sample_history( self, cntrller, sample_id, strings_displayed=[], strings_displayed_count=[], strings_not_displayed=[] ):
+        self.visit_url( "%s/requests_common/view_sample_history?cntrller=%s&sample_id=%s" % ( self.url, cntrller, sample_id ) )
+        self.check_page( strings_displayed, strings_displayed_count, strings_not_displayed )
+
+    def view_shared_histories( self, strings_displayed=[] ):
+        self.visit_url( "/history/list_shared" )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def view_stored_active_histories( self, strings_displayed=[] ):
+        self.visit_url( "/history/list" )
+        self.check_page_for_string( 'Saved Histories' )
+        self.check_page_for_string( 'operation=Rename' )
+        self.check_page_for_string( 'operation=Switch' )
+        self.check_page_for_string( 'operation=Delete' )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def view_stored_deleted_histories( self, strings_displayed=[] ):
+        self.visit_url( "/history/list?f-deleted=True" )
+        self.check_page_for_string( 'Saved Histories' )
+        self.check_page_for_string( 'operation=Undelete' )
+        for check_str in strings_displayed:
+            self.check_page_for_string( check_str )
+
+    def visit_url( self, url, params=None, doseq=False, allowed_codes=[ 200 ] ):
+        if params is None:
+            params = dict()
+        parsed_url = urlparse( url )
+        if len( parsed_url.netloc ) == 0:
+            url = 'http://%s:%s%s' % ( self.host, self.port, parsed_url.path )
+        else:
+            url = '%s://%s%s' % ( parsed_url.scheme, parsed_url.netloc, parsed_url.path )
+        if parsed_url.query:
+            for query_parameter in parsed_url.query.split( '&' ):
+                key, value = query_parameter.split( '=' )
+                params[ key ] = value
+        if params:
+            url += '?%s' % urlencode( params, doseq=doseq )
+        new_url = tc.go( url )
+        return_code = tc.browser.get_code()
+        assert return_code in allowed_codes, 'Invalid HTTP return code %s, allowed codes: %s' % \
+            ( return_code, ', '.join( str( code ) for code in allowed_codes ) )
+        return new_url
+
+    def wait( self, **kwds ):
+        """Waits for the tools to finish"""
+        return self.wait_for(lambda: self.get_running_datasets(), **kwds)
+
+    def wait_for( self, func, **kwd ):
+        sleep_amount = 0.2
+        slept = 0
+        walltime_exceeded = kwd.get("maxseconds", None)
+        if walltime_exceeded is None:
+            walltime_exceeded = DEFAULT_TOOL_TEST_WAIT
+
+        exceeded = True
+        while slept <= walltime_exceeded:
+            result = func()
+            if result:
+                time.sleep( sleep_amount )
+                slept += sleep_amount
+                sleep_amount *= 2
+            else:
+                exceeded = False
+                break
+
+        if exceeded:
+            message = 'Tool test run exceeded walltime [total %s, max %s], terminating.' % (slept, walltime_exceeded)
+            log.info(message)
+            raise AssertionError(message)
+
+    def write_temp_file( self, content, suffix='.html' ):
+        fd, fname = tempfile.mkstemp( suffix=suffix, prefix='twilltestcase-' )
+        f = os.fdopen( fd, "w" )
+        f.write( content )
+        f.close()
+        return fname
+
+    def _assert_dataset_state( self, dataset, state ):
+        if dataset.get( 'state' ) != state:
+            blurb = dataset.get( 'misc_blurb' )
+            errmsg = "Expecting dataset state '%s', but state is '%s'. Dataset blurb: %s\n\n" % ( state, dataset.get('state'), blurb.strip() )
+            errmsg += self.get_job_stderr( dataset.get( 'id' ), format=True )
+            raise AssertionError( errmsg )
+
+    def _check_command(self, command, description):
+        check_command(command, description)
+
+    def _bam_to_sam( self, local_name, temp_name ):
+        temp_local = tempfile.NamedTemporaryFile( suffix='.sam', prefix='local_bam_converted_to_sam_' )
+        fd, temp_temp = tempfile.mkstemp( suffix='.sam', prefix='history_bam_converted_to_sam_' )
+        os.close( fd )
+        command = 'samtools view -h -o "%s" "%s"' % ( temp_local.name, local_name  )
+        self._check_command( command, 'Converting local (test-data) bam to sam' )
+        command = 'samtools view -h -o "%s" "%s"' % ( temp_temp, temp_name  )
+        self._check_command( command, 'Converting history bam to sam ' )
+        os.remove( temp_name )
+        return temp_local, temp_temp
+
+    def _format_stream( self, output, stream, format ):
+        output = output or ''
+        if format:
+            msg = "---------------------- >> begin tool %s << -----------------------\n" % stream
+            msg += output + "\n"
+            msg += "----------------------- >> end tool %s << ------------------------\n" % stream
+        else:
+            msg = output
+        return msg
+
+    def _get_job_stream_output( self, hda_id, stream, format ):
+        self.visit_url( "/datasets/%s/%s" % ( hda_id, stream ) )
+
+        output = self.last_page()
+        return self._format_stream( output, stream, format )
+
+    def __default_dataset_fetcher( self ):
+        def fetcher( hda_id, filename=None ):
+            if filename is None:
+                page_url = "/display?encoded_id=%s" % hda_id
+            else:
+                page_url = "/datasets/%s/display/%s" % ( hda_id, filename )
+            self.visit_url( page_url )
+            data = self.last_page()
+            return data
+        return fetcher
diff --git a/test/casperjs/README.txt b/test/casperjs/README.txt
new file mode 100644
index 0000000..489c851
--- /dev/null
+++ b/test/casperjs/README.txt
@@ -0,0 +1,4 @@
+This directory contains the Galaxy framework written by Carl Eberhard
+for running headless browser tests using CasperJS and PhantomJS.
+
+See notes at the top of casperjs_runner.py for more information.
diff --git a/test/casperjs/anon-history-tests.js b/test/casperjs/anon-history-tests.js
new file mode 100644
index 0000000..80e16a1
--- /dev/null
+++ b/test/casperjs/anon-history-tests.js
@@ -0,0 +1,116 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Testing histories for anonymous users', 0, function suite( test ){
+    spaceghost.start();
+
+    // ===================================================================
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+        spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+    }
+
+    var editableTextClass   = spaceghost.data.selectors.editableText,
+        editableTextInput   = spaceghost.data.selectors.editableTextInput,
+
+        unnamedName         = spaceghost.historypanel.data.text.history.newName,
+        nameSelector        = spaceghost.historypanel.data.selectors.history.name,
+        sizeSelector        = spaceghost.historypanel.data.selectors.history.size,
+        initialSizeStr      = spaceghost.historypanel.data.text.history.newSize,
+        tagIconSelector     = spaceghost.historypanel.data.selectors.history.tagIcon,
+        annoIconSelector    = spaceghost.historypanel.data.selectors.history.annoIcon,
+        emptyMsgSelector    = spaceghost.historypanel.data.selectors.history.emptyMsg,
+        emptyMsgStr         = spaceghost.historypanel.data.text.history.emptyMsg;
+
+    var filenameToUpload = '1.txt',
+        filepathToUpload = '../../test-data/' + filenameToUpload;
+
+    // ------------------------------------------------------------------- check the anonymous new, history for form
+    spaceghost.openHomePage().historypanel.waitForHdas( function testPanelStructure(){
+        this.test.comment( 'history panel for anonymous user, new history' );
+
+        this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
+        this.test.assertExists( nameSelector, nameSelector + ' exists' );
+        this.test.assertVisible( nameSelector, 'History name is visible' );
+        this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
+
+        this.test.comment( "history should display size and size should be " + initialSizeStr );
+        this.test.assertExists( sizeSelector, 'Found ' + sizeSelector );
+        this.test.assertVisible( sizeSelector, 'History size is visible' );
+        this.test.assertSelectorHasText( sizeSelector, initialSizeStr,
+            'History size has "' + initialSizeStr + '"' );
+
+        this.test.comment( "NO tags or annotations icons should be available for an anonymous user" );
+        this.test.assertDoesntExist( tagIconSelector,  'Tag icon button not found' );
+        this.test.assertDoesntExist( annoIconSelector, 'Annotation icon button not found' );
+
+        this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
+        this.test.comment( "A message about the current history being empty should be displayed" );
+        this.test.assertVisible( emptyMsgSelector, 'Empty history message is visible' );
+        this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
+            'Message contains "' + emptyMsgStr + '"' );
+
+        this.test.comment( 'name should NOT be editable when clicked by anon-user' );
+        this.assertDoesntHaveClass( nameSelector, editableTextClass, "Name field is not classed as editable text" );
+        this.click( nameSelector );
+        this.test.assertDoesntExist( editableTextInput, "Clicking on name does not create an input" );
+    });
+
+    // ------------------------------------------------------------------- anon user can upload file
+    var uploadedId = null;
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: filepathToUpload }, function( id, json ){
+        uploadedId = id;
+    });
+    spaceghost.openHomePage().historypanel.waitForHdas( function testAnonUpload(){
+        this.test.comment( 'anon-user should be able to upload files' );
+        this.test.assertExists( '#dataset-' + uploadedId, "found hda" );
+        var hdaElement = this.elementInfoOrNull( '#dataset-' + uploadedId );
+        this.debug( 'hdaElement: ' + hdaElement );
+        if( hdaElement ){
+            this.test.assert( hdaElement.attributes[ 'class' ].indexOf( 'state-ok' ) !== -1,
+                              "Uploaded file: " + hdaElement.text );
+        }
+
+        this.test.comment( "empty should be NO LONGER be displayed" );
+        this.test.assertNotVisible( emptyMsgSelector, 'Empty history message is not visible' );
+    });
+
+    // ------------------------------------------------------------------- anon user can run tool on file
+
+    // ------------------------------------------------------------------- anon user registers/logs in -> same history
+    spaceghost.user.loginOrRegisterUser( email, password ).openHomePage( function(){
+        this.test.comment( 'anon-user should login and be associated with previous history' );
+
+        var loggedInAs = spaceghost.user.loggedInAs();
+        this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
+
+        this.historypanel.waitForHdas( function(){
+            var hdaInfo = this.historypanel.hdaElementInfoByTitle( filenameToUpload );
+            this.test.assert( hdaInfo !== null, "After logging in - found a matching hda by name and hid" );
+            if( hdaInfo ){
+                this.test.assert( 'dataset-' + uploadedId === hdaInfo.attributes.id,
+                    "After logging in - found a matching hda by hda view id: " + hdaInfo.attributes.id );
+            }
+        });
+    });
+
+    // ------------------------------------------------------------------- logs out -> new history
+    spaceghost.user.logout().openHomePage( function(){
+        this.test.comment( 'logging out should create a new, anonymous history' );
+
+        this.historypanel.waitForHdas( function(){
+            this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
+            this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
+                'Message contains "' + emptyMsgStr + '"' );
+        });
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/api-anon-history-permission-tests.js b/test/casperjs/api-anon-history-permission-tests.js
new file mode 100644
index 0000000..6540197
--- /dev/null
+++ b/test/casperjs/api-anon-history-permission-tests.js
@@ -0,0 +1,205 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test permissions for accessible, published, and inaccessible histories '
+                        + 'with anonymous users over the API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    var inaccessibleHistory, accessibleHistory, publishedHistory,
+        inaccessibleHdas, accessibleHdas, publishedHdas;
+
+    //// ------------------------------------------------------------------------------------------- create 3 histories
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.openHomePage().then( function(){
+        // create three histories: make the 2nd importable (via the API), and the third published
+
+        // make the current the inaccessible one
+        inaccessibleHistory = this.api.histories.index()[0];
+        this.api.histories.update( inaccessibleHistory.id, { name: 'inaccessible' });
+        inaccessibleHistory = this.api.histories.index()[0];
+
+        accessibleHistory = this.api.histories.create({ name: 'accessible' });
+        var returned = this.api.histories.update( accessibleHistory.id, {
+            importable  : true
+        });
+        //this.debug( this.jsonStr( returned ) );
+        accessibleHistory = this.api.histories.show( accessibleHistory.id );
+
+        publishedHistory =  this.api.histories.create({ name: 'published' });
+        returned = this.api.histories.update( publishedHistory.id, {
+            published  : true
+        });
+        //this.debug( this.jsonStr( returned ) );
+        publishedHistory = this.api.histories.show( publishedHistory.id );
+
+    });
+
+    //// ------------------------------------------------------------------------------------------- upload some files
+    spaceghost.then( function(){
+        this.api.tools.thenUpload( inaccessibleHistory.id, { filepath: '../../test-data/1.bed' });
+        this.api.tools.thenUpload(   accessibleHistory.id, { filepath: '../../test-data/1.bed' });
+        this.api.tools.thenUpload(    publishedHistory.id, { filepath: '../../test-data/1.bed' });
+    });
+    spaceghost.then( function(){
+        // check that they're there
+        inaccessibleHdas = this.api.hdas.index( inaccessibleHistory.id ),
+          accessibleHdas = this.api.hdas.index(   accessibleHistory.id ),
+           publishedHdas = this.api.hdas.index(    publishedHistory.id );
+    });
+    spaceghost.user.logout();
+
+    // =================================================================== TESTS
+    //// ------------------------------------------------------------------------------------------- anon user
+    function testAnonReadFunctionsOnAccessible( history, hdas ){
+        this.test.comment( '---- testing read/accessibility functions for ACCESSIBLE history: ' + history.name );
+
+        // read functions for history
+        this.test.comment( 'show should work for history: ' + history.name );
+        this.test.assert( this.api.histories.show( history.id ).id === history.id,
+            'show worked' );
+
+        this.test.comment( 'copying should work for history (replacing the original history): ' + history.name );
+        var copiedHistory = this.api.histories.create({ history_id : history.id });
+        var historiesIndex = this.api.histories.index();
+        this.test.assert( historiesIndex.length === 1, 'only one history after copy' );
+        this.test.assert( historiesIndex[0].id === copiedHistory.id, 'original history with copy' );
+
+        // read functions for history contents
+        this.test.comment( 'index of history contents should work for history: ' + history.name );
+        this.test.assert( this.api.hdas.index( history.id ).length === 1,
+            'hda index worked' );
+        this.test.comment( 'showing of history contents should work for history: ' + history.name );
+        this.test.assert( this.api.hdas.show( history.id, hdas[0].id ).id === hdas[0].id,
+            'hda show worked' );
+
+        this.test.comment( 'Attempting to copy an accessible hda (default is accessible)'
+                         + ' should work from accessible history: ' + history.name );
+        this.api.hdas.create( this.api.histories.index()[0].id, {
+            source  : 'hda',
+            content : hdas[0].id
+        });
+    }
+
+    function testAnonReadFunctionsOnInaccessible( history, hdas ){
+        this.test.comment( '---- testing read/accessibility functions for INACCESSIBLE history: ' + history.name );
+
+        // read functions for history
+        this.test.comment( 'show should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.show( history.id );
+        }, 403, 'History is not accessible by user', 'show failed with error' );
+        this.test.comment( 'copying should fail for history (implicit multiple histories): ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.create({ history_id : history.id });
+        }, 403, 'History is not accessible by user', 'copy failed with error' );
+
+        // read functions for history contents
+        this.test.comment( 'index and show of history contents should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.index( history.id );
+        }, 403, 'History is not accessible by user', 'hda index failed with error' );
+        // 150721: accessible hdas in an inaccessible history are considered accessible (since api/datasets does)
+        // this.api.assertRaises( function(){
+        //     this.api.hdas.show( history.id, hdas[0].id );
+        // }, 403, 'History is not accessible by user', 'hda show failed with error' );
+        this.test.assertTrue( utils.isObject( this.api.hdas.show( history.id, hdas[0].id ) ) );
+
+        this.test.comment( 'Attempting to copy an accessible hda (default is accessible)'
+                         + ' from an inaccessible history should fail for: ' + history.name );
+        this.api.assertRaises( function(){
+            var returned = this.api.hdas.create( this.api.histories.index()[0].id, {
+                source  : 'hda',
+                content : hdas[0].id
+            });
+            this.debug( this.jsonStr( returned ) );
+        }, 403, 'History is not accessible by user', 'hda copy from failed with error' );
+
+    }
+
+    function testAnonWriteFunctions( history, hdas ){
+        this.test.comment( '---- testing write/ownership functions for history: ' + history.name );
+
+        this.test.comment( 'update should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.update( history.id, { deleted: true });
+        }, 403, 'API authentication required for this request', 'update authentication required' );
+        this.test.comment( 'delete should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.delete_( history.id );
+        }, 403, 'API authentication required for this request', 'delete authentication required' );
+
+        this.test.comment( 'hda updating should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.update( history.id, hdas[0].id, { deleted: true });
+        // anon hda update fails w/ this msg if trying to update non-current history hda
+        }, 403, 'API authentication required for this request', 'hda update failed with error' );
+        this.test.comment( 'hda deletion should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.delete_( history.id, hdas[0].id );
+        }, 403, 'API authentication required for this request', 'hda delete failed with error' );
+
+        this.test.comment( 'copying hda into history should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.create( history.id, {
+                source  : 'hda',
+                // should error before it checks the id
+                content : 'bler'
+            });
+        }, 403, 'History is not owned by user', 'hda copy to failed' );
+    }
+
+    function testAnonInaccessible( history, hdas ){
+        testAnonReadFunctionsOnInaccessible.call( this, history, hdas );
+        testAnonWriteFunctions.call( this, history, hdas );
+    }
+
+    function testAnonAccessible( history, hdas ){
+        testAnonReadFunctionsOnAccessible.call( this, history, hdas );
+        testAnonWriteFunctions.call( this, history, hdas );
+    }
+
+    spaceghost.thenOpen( spaceghost.baseUrl ).then( function(){
+        testAnonInaccessible.call( spaceghost, inaccessibleHistory, inaccessibleHdas );
+        testAnonAccessible.call( spaceghost, accessibleHistory, accessibleHdas );
+        testAnonAccessible.call( spaceghost, publishedHistory, publishedHdas );
+    });
+
+
+    // ------------------------------------------------------------------------------------------- user1 revoke perms
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.openHomePage().then( function(){
+        this.test.comment( 'revoking perms should prevent access' );
+        this.api.histories.update( accessibleHistory.id, {
+            importable : false
+        });
+        var returned = this.api.histories.show( accessibleHistory.id );
+
+        this.api.histories.update( publishedHistory.id, {
+            importable : false,
+            published  : false
+        });
+        returned = this.api.histories.show( publishedHistory.id );
+    });
+    spaceghost.user.logout();
+
+
+    // ------------------------------------------------------------------------------------------- anon retry perms
+    spaceghost.openHomePage().then( function(){
+        testAnonInaccessible.call( spaceghost, accessibleHistory, accessibleHdas );
+        testAnonInaccessible.call( spaceghost, publishedHistory, publishedHdas );
+    });
+
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/api-anon-history-tests.js b/test/casperjs/api-anon-history-tests.js
new file mode 100644
index 0000000..9bc595e
--- /dev/null
+++ b/test/casperjs/api-anon-history-tests.js
@@ -0,0 +1,120 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test API functions for histories with an anonymous user', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== TESTS
+    spaceghost.thenOpen( spaceghost.baseUrl ).waitForSelector( spaceghost.historypanel.data.selectors.history.name );
+    spaceghost.then( function(){
+
+        // ------------------------------------------------------------------------------------------- anon allowed
+        this.test.comment( 'index should get a list of histories' );
+        var index = this.api.histories.index();
+        this.test.assert( utils.isArray( index ), "index returned an array: length " + index.length );
+        this.test.assert( index.length === 1, 'Has at least one history' );
+
+        this.test.comment( 'show should get a history details object' );
+        var historyShow = this.api.histories.show( index[0].id );
+        //this.debug( this.jsonStr( historyShow ) );
+        this.test.assert( historyShow.id === index[0].id, 'Is the first history' );
+        this.test.assert( this.hasKeys( historyShow, [ 'id', 'name', 'user_id' ] ) );
+
+        this.test.comment( 'Calling create should work for an anonymous user' );
+        this.test.assert( this.api.histories.index().length === 1, 'Now has two histories' );
+
+
+        // ------------------------------------------------------------------------------------------- anon forbidden
+        //TODO: why not return the current history?
+        this.test.comment( 'calling show with "most_recently_used" should return current history for an anon user' );
+        var recent = this.api.histories.show( 'most_recently_used' );
+        this.test.assert( recent.id === historyShow.id, 'most_recently_used returned current' );
+
+        this.test.comment( 'Calling delete should fail for an anonymous user' );
+        this.api.assertRaises( function(){
+            this.api.histories.delete_( historyShow.id );
+        }, 403, 'API authentication required for this request', 'create failed with error' );
+
+        this.test.comment( 'Calling update should fail for an anonymous user' );
+        this.api.assertRaises( function(){
+            this.api.histories.update( historyShow.id, {} );
+        }, 403, 'API authentication required for this request', 'update failed with error' );
+
+        //TODO: need these two in api.js
+        //this.test.comment( 'Calling archive_import should fail for an anonymous user' );
+        //this.api.assertRaises( function(){
+        //    this.api.histories.archive_import( historyShow.id, {} );
+        //}, 403, 'API authentication required for this request', 'archive_import failed with error' );
+
+        //this.test.comment( 'Calling archive_download should fail for an anonymous user' );
+        //this.api.assertRaises( function(){
+        //    this.api.histories.archive_download( historyShow.id, {} );
+        //}, 403, 'API authentication required for this request', 'archive_download failed with error' );
+
+        // test server bad id protection
+        spaceghost.test.comment( 'A bad id should throw an error' );
+        this.api.assertRaises( function(){
+            this.api.histories.show( '1234123412341234' );
+        }, 400, 'unable to decode', 'Bad Request with invalid id: show' );
+
+    });
+
+    // ------------------------------------------------------------------------------------------- hdas
+    spaceghost.thenOpen( spaceghost.baseUrl ).waitForSelector( spaceghost.historypanel.data.selectors.history.name );
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: '../../test-data/1.sam' });
+    spaceghost.then( function(){
+        var current = this.api.histories.index()[0];
+
+        // ------------------------------------------------------------------------------------------- anon allowed
+        this.test.comment( 'anonymous users can index hdas in their current history' );
+        var hdaIndex = this.api.hdas.index( current.id );
+        this.test.assert( hdaIndex.length === 1, 'indexed hdas' );
+
+        this.test.comment( 'anonymous users can show hdas in their current history' );
+        var hda = this.api.hdas.show( current.id, hdaIndex[0].id );
+        this.test.assert( this.hasKeys( hda, [ 'id', 'name' ] ), 'showed hda: ' + hda.name );
+
+        this.test.comment( 'anonymous users can hide hdas in their current history' );
+        var changed = this.api.hdas.update( current.id, hda.id, { visible: false });
+        hda = this.api.hdas.show( current.id, hda.id );
+        this.test.assert( hda.visible === false, 'successfully hidden' );
+
+        this.test.comment( 'anonymous users can mark their hdas as deleted in their current history' );
+        changed = this.api.hdas.update( current.id, hda.id, { deleted: true });
+        hda = this.api.hdas.show( current.id, hda.id );
+        this.test.assert( hda.deleted, 'successfully deleted' );
+
+        // ------------------------------------------------------------------------------------------- anon forbidden
+        this.test.comment( 'Creating an hda should work for an anonymous user' );
+        var returned = this.api.hdas.create( current.id, { source: 'hda', content: hda.id });
+        //this.debug( this.jsonStr( returned ) );
+        this.test.assert( returned.name === hda.name, 'name matches: ' + returned.name );
+        this.test.assert( returned.id !== hda.id, 'new id: ' + returned.id );
+
+        //TODO: should be allowed
+        this.test.comment( 'Calling hda delete should fail for an anonymous user' );
+        this.api.assertRaises( function(){
+            this.api.hdas.delete_( current.id, hda.id );
+        }, 403, 'API authentication required for this request', 'delete failed with error' );
+
+        //TODO: only sharing, tags, annotations should be blocked/prevented
+        this.test.comment( 'Calling update with keys other than "visible" or "deleted" should fail silently' );
+        this.test.comment( 'Calling update on tags should fail silently' );
+        changed = this.api.hdas.update( current.id, hda.id, { tags: [ 'one' ] });
+        hda = this.api.hdas.show( current.id, hda.id );
+        this.test.assert( hda.tags.length === 0, 'tags were not set: ' + this.jsonStr( hda.tags ) );
+
+        this.test.comment( 'Calling update on annotation should fail silently' );
+        changed = this.api.hdas.update( current.id, hda.id, { annotation: 'yup yup yup' });
+        hda = this.api.hdas.show( current.id, hda.id );
+        this.test.assert( !hda.annotation, 'annotation was not set: ' + hda.annotation );
+
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
+
diff --git a/test/casperjs/api-batch-tests.js b/test/casperjs/api-batch-tests.js
new file mode 100644
index 0000000..30d8611
--- /dev/null
+++ b/test/casperjs/api-batch-tests.js
@@ -0,0 +1,83 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+function apiBatch( batch ){
+    return spaceghost.api._ajax( 'api/batch', {
+        type        : 'POST',
+        contentType : 'application/json',
+        data        : { batch : batch }
+    });
+}
+
+spaceghost.test.begin( 'Test the API batch system', 0, function suite( test ){
+    spaceghost.start();
+
+    // ======================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    spaceghost.user.registerUser( email, password );
+
+    var responseKeys = [ 'body', 'headers', 'status' ];
+
+    // ======================================================================== TESTS
+    spaceghost.then( function(){
+        // --------------------------------------------------------------------
+        this.test.comment( 'API batching should allow multiple requests and responses, executed in order' );
+        var responses = apiBatch([
+                { url : '/api/histories' },
+                { url : '/api/histories', type: 'POST', body: JSON.stringify({ name: 'wert' }) },
+                { url : '/api/histories' },
+            ]);
+        // this.debug( 'responses:' + this.jsonStr( responses ) );
+        this.test.assert( utils.isArray( responses ), "returned an array: length " + responses.length );
+        this.test.assert( responses.length === 3, 'Has three responses' );
+
+        var historiesBeforeCreate = responses[0],
+            createdHistory = responses[1],
+            historiesAfterCreate = responses[2];
+        this.test.assert( utils.isArray( historiesBeforeCreate.body ),
+            "first histories call returned an array" + historiesBeforeCreate.body.length );
+        this.test.assert( utils.isObject( createdHistory.body ), 'history create returned an object' );
+        this.test.assert( historiesAfterCreate.body[0].id === createdHistory.body.id,
+            "second histories call includes the newly created history:" + historiesAfterCreate.body[0].id );
+
+
+        this.test.comment( 'API batching should handle bad routes well' );
+        responses = apiBatch([
+            { url : '/api/bler' },
+        ]);
+        // this.debug( 'responses:' + this.jsonStr( responses ) );
+        this.test.assert( responses.length === 1 );
+        var badRouteResponse = responses[0];
+        this.test.assert( badRouteResponse.status === 404 );
+        this.test.assert( utils.isObject( badRouteResponse.body )
+                       && this.countKeys( badRouteResponse.body ) === 0 );
+
+        this.test.comment( 'API batching should handle errors well' );
+        responses = apiBatch([
+            { url : '/api/histories/abc123' },
+            { url : '/api/users/123', method: 'PUT' }
+        ]);
+        // this.debug( 'responses:' + this.jsonStr( responses ) );
+        this.test.assert( responses.length === 2 );
+        var badIdResponse = responses[0],
+            notImplemented = responses[1];
+        this.test.assert( badIdResponse.status === 400 );
+        this.test.assert( notImplemented.status === 501 );
+
+    /*
+    */
+    });
+    //spaceghost.user.logout();
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
+
diff --git a/test/casperjs/api-configuration-tests.js b/test/casperjs/api-configuration-tests.js
new file mode 100644
index 0000000..faeb301
--- /dev/null
+++ b/test/casperjs/api-configuration-tests.js
@@ -0,0 +1,75 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the Galaxy configuration API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    spaceghost.user.loginOrRegisterUser( email, password );
+
+    // =================================================================== TESTS
+    var normKeys = [
+            'enable_unique_workflow_defaults',
+            'ftp_upload_site',
+            'ftp_upload_dir',
+            'wiki_url',
+            'support_url',
+            'logo_url',
+            'terms_url',
+            'allow_user_dataset_purge'
+        ],
+        adminKeys = normKeys.concat([
+            'library_import_dir',
+            'user_library_import_dir',
+            'allow_library_path_paste',
+            'allow_user_creation',
+            'allow_user_deletion'
+        ]);
+
+    // ------------------------------------------------------------------------------------------- INDEX
+    spaceghost.openHomePage().then( function(){
+        this.test.comment( 'index should get a (shortened) list of configuration settings '
+                         + 'when requested by a normal user' );
+
+        var configIndex = this.api.configuration.index();
+        this.debug( this.jsonStr( configIndex ) );
+        this.test.assert( utils.isObject( configIndex ), "index returned an object" );
+        this.test.assert( this.hasKeys( configIndex, normKeys ), 'Has the proper keys' );
+
+    });
+    spaceghost.user.logout();
+
+    // ------------------------------------------------------------------------------------------- INDEX (admin)
+    spaceghost.tryStepsCatch( function tryAdminLogin(){
+        spaceghost.user.loginAdmin();
+    }, function(){} );
+
+    //}, function failedLoginRegister(){
+    //    this.info( 'Admin level configuration API tests not run: no admin account available' );
+    spaceghost.openHomePage().waitForMasthead( function(){
+        if( spaceghost.user.userIsAdmin() ){
+            this.test.comment( 'index should get a (full) list of configuration settings '
+                             + 'when requested by an admin user' );
+            configIndex = this.api.configuration.index();
+            this.debug( this.jsonStr( configIndex ) );
+            this.test.assert( utils.isObject( configIndex ), "index returned an object" );
+            this.test.assert( this.hasKeys( configIndex, adminKeys ), 'Has the proper keys' );
+
+        } else {
+            this.info( 'Admin level configuration API tests not run: no admin account available' );
+        }
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
+
diff --git a/test/casperjs/api-dataset-tests.js b/test/casperjs/api-dataset-tests.js
new file mode 100644
index 0000000..efbe6bf
--- /dev/null
+++ b/test/casperjs/api-dataset-tests.js
@@ -0,0 +1,59 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the HDA API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+
+    var detailKeys  = [
+            // the following are always present regardless of datatype
+            'id', 'name', 'api_type', 'model_class',
+            'history_id', 'hid',
+            'accessible', 'deleted', 'visible', 'purged',
+            'state', 'data_type', 'file_ext', 'file_size',
+            'misc_info', 'misc_blurb',
+            'download_url', 'visualizations', 'display_apps', 'display_types',
+            'genome_build'
+        ];
+
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.openHomePage();
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: '../../test-data/1.bed' });
+
+    spaceghost.then( function(){
+        // ------------------------------------------------------------------------------------------- INDEX
+        this.test.comment( 'index should error with not implemented' );
+        this.api.assertRaises( function(){
+            this.api.datasets.index();
+        }, 501, 'not implemented', 'throws unimplemented' );
+
+        // ------------------------------------------------------------------------------------------- SHOW
+        this.test.comment( 'show should get an HDA details object' );
+        var history = this.api.histories.show( 'most_recently_used', { keys : 'id,hdas' } ),
+            hdaId = history.hdas[0],
+            show = this.api.datasets.show( hdaId );
+        this.debug( this.jsonStr( history ) );
+        this.debug( this.jsonStr( show ) );
+        this.test.assert( this.hasKeys( show, detailKeys ), 'Has the proper keys' );
+
+        // ------------------------------------------------------------------------------------------- DISPLAY
+        this.test.comment( 'show should get an HDA details object' );
+        var fileContents = this.api.datasets.display( history.id, hdaId, { raw: 'True' });
+        this.test.assert( fileContents.split( '\n' ).length === 66, '1.bed has 66 lines' );
+    });
+
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
+
diff --git a/test/casperjs/api-hda-tests.js b/test/casperjs/api-hda-tests.js
new file mode 100644
index 0000000..8f0395a
--- /dev/null
+++ b/test/casperjs/api-hda-tests.js
@@ -0,0 +1,350 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the HDA API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    spaceghost.user.loginOrRegisterUser( email, password );
+
+    spaceghost.openHomePage().api.tools.thenUploadToCurrent({
+        filepath: '../../test-data/1.sam'
+    });
+
+    // =================================================================== TESTS
+    var summaryKeys = [ 'id', 'name', 'history_id', 'state', 'deleted', 'purged', 'visible', 'url', 'type' ],
+        detailKeys  = [
+            // the following are always present regardless of datatype
+            'id', 'name', 'api_type', 'model_class',
+            'history_id', 'hid',
+            'accessible', 'deleted', 'visible', 'purged',
+            'state', 'data_type', 'file_ext', 'file_size',
+            'misc_info', 'misc_blurb',
+            'download_url', 'visualizations', 'display_apps', 'display_types',
+            'genome_build',
+            // the following are NOT always present DEPENDING ON datatype
+            'metadata_dbkey',
+            'metadata_column_names', 'metadata_column_types', 'metadata_columns',
+            'metadata_comment_lines', 'metadata_data_lines'
+        ];
+
+    // ------------------------------------------------------------------------------------------- logged in user
+    spaceghost.then( function(){
+        // ------------------------------------------------------------------------------------------- INDEX
+        this.test.comment( 'index should return a list of summary data for each hda' );
+        var histories = this.api.histories.index(),
+            lastHistory = histories[0],
+            hdaIndex = this.api.hdas.index( lastHistory.id );
+        //this.debug( 'hdaIndex:' + this.jsonStr( hdaIndex ) );
+
+        this.test.assert( utils.isArray( hdaIndex ), "index returned an array: length " + hdaIndex.length );
+        this.test.assert( hdaIndex.length >= 1, 'Has at least one hda' );
+
+        var firstHda = hdaIndex[0];
+        this.test.assert( this.hasKeys( firstHda, summaryKeys ), 'Has the proper keys' );
+
+        this.test.assert( this.api.isEncodedId( firstHda.id ), 'Id appears well-formed: ' + firstHda.id );
+        this.test.assert( firstHda.name === '1.sam', 'Title matches: ' + firstHda.name );
+
+
+        // ------------------------------------------------------------------------------------------- SHOW
+        this.test.comment( 'show should get an HDA details object' );
+        var hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        //this.debug( this.jsonStr( hdaShow ) );
+        this.test.assert( this.hasKeys( hdaShow, detailKeys ), 'Has the proper keys' );
+
+        //TODO: validate data in each hdaShow attribute...
+
+
+        // ------------------------------------------------------------------------------------------- INDEX (detailed)
+        this.test.comment( 'index should return a list of detailed data for each hda in "ids" when passed' );
+        hdaIndex = this.api.hdas.index( lastHistory.id, [ firstHda.id ] );
+        this.debug( 'hdaIndex:' + this.jsonStr( hdaIndex ) );
+
+        this.test.assert( utils.isArray( hdaIndex ), "index returned an array: length " + hdaIndex.length );
+        this.test.assert( hdaIndex.length >= 1, 'Has at least one hda' );
+
+        firstHda = hdaIndex[0];
+        this.test.assert( this.hasKeys( firstHda, detailKeys ), 'Has the proper keys' );
+
+        //TODO??: validate data in firstHda attribute? we ASSUME it's from a common method as show...
+
+
+        // ------------------------------------------------------------------------------------------- CREATE
+        //TODO: create from_ld_id
+        this.test.comment( 'create should allow copying an accessible hda' );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        var returned = this.api.hdas.create( lastHistory.id, {
+            source  : 'hda',
+            content : hdaShow.id
+        });
+        //this.debug( 'returned:' + this.jsonStr( returned ) );
+        this.test.assert( this.hasKeys( returned, detailKeys ), 'Has the proper keys' );
+        this.test.assert( typeof returned.id !== 'number' && isNaN( Number( returned.id ) ),
+            'id seems to be encoded: ' + returned.id );
+        this.test.assert( typeof returned.history_id !== 'number' && isNaN( Number( returned.history_id ) ),
+            'history_id seems to be encoded: ' + returned.history_id );
+
+
+        // ------------------------------------------------------------------------------------------- UPDATE
+        // ........................................................................................... idiot proofing
+        this.test.comment( 'updating using a nonsense key should NOT fail with an error' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            konamiCode : 'uuddlrlrba'
+        });
+        this.test.assert( returned.id === firstHda.id );
+
+        this.test.comment( 'updating by attempting to change type should cause an error' );
+        this.api.assertRaises( function(){
+            returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+                //name : false
+                deleted : 'sure why not'
+            });
+        }, 400, "must be a type: <type 'bool'>", 'changing deleted type failed' );
+
+        // ........................................................................................... name
+        this.test.comment( 'update should allow changing the name' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            name : 'New name'
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.name === 'New name', "Name successfully set via update: " + hdaShow.name );
+
+        // no sanitizing input
+        //this.test.comment( 'update should sanitize any new name' );
+
+        this.test.comment( 'update should allow unicode in names' );
+        var unicodeName = 'ржевский сапоги';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            name : unicodeName
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.name === unicodeName, "Update accepted unicode name: " + hdaShow.name );
+
+        this.test.comment( 'update should allow escaped quotations in names' );
+        var quotedName = '"Bler"';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            name : quotedName
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.name === quotedName,
+            "Update accepted escaped quotations in name: " + hdaShow.name );
+
+
+        // ........................................................................................... deleted
+        this.test.comment( 'update should allow changing the deleted flag' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            deleted: true
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id, true );
+        this.test.assert( hdaShow.deleted === true, "Update set the deleted flag: " + hdaShow.deleted );
+
+        this.test.comment( 'update should allow changing the deleted flag back' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            deleted: false
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.deleted === false, "Update set the deleted flag: " + hdaShow.deleted );
+
+
+        // ........................................................................................... visible/hidden
+        this.test.comment( 'update should allow changing the visible flag' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            visible: false
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.visible === false, "Update set the visible flag: " + hdaShow.visible );
+
+
+        // ........................................................................................... genome/dbkey
+        this.test.comment( 'update should allow changing the genome_build' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            genome_build : 'hg18'
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        //this.debug( 'hdaShow:\n' + this.jsonStr( hdaShow ) );
+        this.test.assert( hdaShow.genome_build === 'hg18',
+            "genome_build successfully set via update: " + hdaShow.genome_build );
+        this.test.assert( hdaShow.metadata_dbkey === 'hg18',
+            "metadata_dbkey successfully set via the same update: " + hdaShow.metadata_dbkey );
+
+        // no sanitizing input
+        //this.test.comment( 'update should sanitize any genome_build' );
+
+        //TODO: this actually throws an 'invalid genome_build' error
+        //this.test.comment( 'update should allow unicode in genome builds' );
+        //var unicodeBuild = 'Ржевский18';
+        //returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+        //    genome_build : unicodeBuild
+        //});
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        //hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        //this.debug( 'hdaShow:\n' + this.jsonStr( hdaShow ) );
+        //this.test.assert( hdaShow.genome_build === unicodeBuild,
+        //    "Update accepted unicode genome_build: " + hdaShow.genome_build );
+
+        // ........................................................................................... misc_info/info
+        this.test.comment( 'update should allow changing the misc_info' );
+        var newInfo = 'I\'ve made a huge mistake.';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            misc_info : newInfo
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.misc_info === newInfo,
+            "misc_info successfully set via update: " + hdaShow.misc_info );
+
+        // no sanitizing input
+        //this.test.comment( 'update should sanitize any misc_info' );
+
+        this.test.comment( 'update should allow unicode in misc_info' );
+        var unicodeInfo = '여보!';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            misc_info : unicodeInfo
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.misc_info === unicodeInfo,
+            "Update accepted unicode misc_info: " + hdaShow.misc_info );
+
+        this.test.comment( 'update should set misc_info to the empty string if sent null/None' );
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            misc_info : null
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.misc_info === '',
+            "Update used null as the empty string: " + hdaShow.misc_info );
+
+        // ........................................................................................... annotation
+        // currently fails because no annotation is returned in details
+        this.test.comment( 'update should allow changing the annotation' );
+        var newAnnotation = 'Found this sample on a movie theatre floor';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            annotation : newAnnotation
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.annotation === newAnnotation,
+            "Annotation successfully set via update: " + hdaShow.annotation );
+
+        // no sanitizing input
+        //this.test.comment( 'update should sanitize any new annotation' );
+
+        this.test.comment( 'update should allow unicode in annotations' );
+        var unicodeAnnotation = 'お願いは、それが落下させない';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            annotation : unicodeAnnotation
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.annotation === unicodeAnnotation,
+            "Update accepted unicode annotation: " + hdaShow.annotation );
+
+        this.test.comment( 'update should allow escaped quotations in annotations' );
+        var quotedAnnotation = '"Bler"';
+        returned = this.api.hdas.update( lastHistory.id, firstHda.id, {
+            annotation : quotedAnnotation
+        });
+        //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.annotation === quotedAnnotation,
+            "Update accepted escaped quotations in annotation: " + hdaShow.annotation );
+
+        // ------------------------------------------------------------------------------------------- ERRORS
+        this.test.comment( 'create should error with "Please define the source" when the param "from_ld_id" is not used' );
+        this.api.assertRaises( function(){
+            this.api.hdas.create( lastHistory.id, { bler: 'bler' } );
+        }, 400, "must be either 'library' or 'hda'", 'create with no source failed' );
+
+        this.test.comment( 'updating using a nonsense key should fail silently' );
+        returned = this.api.hdas.update( lastHistory.id, hdaShow.id, {
+            konamiCode : 'uuddlrlrba'
+        });
+        this.test.assert( returned.konamiCode === undefined, 'key was not set: ' + returned.konamiCode );
+
+        spaceghost.test.comment( 'A bad id should throw an error when using show' );
+        this.api.assertRaises( function(){
+            this.api.hdas.show( lastHistory.id, '1234123412341234' );
+        }, 400, 'unable to decode', 'Bad Request with invalid id: show' );
+        spaceghost.test.comment( 'A bad id should throw an error when using update' );
+        this.api.assertRaises( function(){
+            this.api.hdas.update( lastHistory.id, '1234123412341234', {} );
+        }, 400, 'unable to decode', 'Bad Request with invalid id: update' );
+        spaceghost.test.comment( 'A bad id should throw an error when using delete' );
+        this.api.assertRaises( function(){
+            this.api.hdas.delete_( lastHistory.id, '1234123412341234' );
+        }, 400, 'unable to decode', 'Bad Request with invalid id: delete' );
+        spaceghost.test.comment( 'A bad id should throw an error when using undelete' );
+
+        this.test.comment( 'updating by attempting to change type should cause an error' );
+        [ 'name', 'misc_info' ].forEach( function( key ){
+            var updatedAttrs = {};
+            updatedAttrs[ key ] = false;
+            spaceghost.api.assertRaises( function(){
+                returned = spaceghost.api.hdas.update( hdaShow.history_id, hdaShow.id, updatedAttrs );
+            }, 400, "must be a type: <type 'basestring'>", 'type validation error' );
+        });
+        spaceghost.api.assertRaises( function(){
+            returned = spaceghost.api.hdas.update( hdaShow.history_id, hdaShow.id, { annotation: false } );
+        }, 400, "must be a type: (<type 'basestring'>, <type 'NoneType'>)", 'type validation error' );
+        spaceghost.api.assertRaises( function(){
+            returned = spaceghost.api.hdas.update( hdaShow.history_id, hdaShow.id, { genome_build: false } );
+        }, 400, "must be a type: <type 'basestring'>", 'type validation error (genome_build must be string)' );
+        [ 'deleted', 'visible' ].forEach( function( key ){
+            var updatedAttrs = {};
+            updatedAttrs[ key ] = 'straaang';
+            spaceghost.api.assertRaises( function(){
+                returned = spaceghost.api.hdas.update( hdaShow.history_id, hdaShow.id, updatedAttrs );
+            }, 400, "must be a type: <type 'bool'>", 'type validation error' );
+        });
+        spaceghost.api.assertRaises( function(){
+            returned = spaceghost.api.hdas.update( hdaShow.history_id, hdaShow.id, { tags: 'you\'re it' });
+        }, 400, "must be a type: <type 'list'>", 'type validation error' );
+        spaceghost.api.assertRaises( function(){
+            returned = spaceghost.api.hdas.update( hdaShow.history_id, hdaShow.id, { tags: [ true ] });
+        }, 400, "must be a type: <type 'basestring'>", 'type validation error' );
+
+        // ------------------------------------------------------------------------------------------- DELETE
+        this.test.comment( 'calling delete on an hda should mark it as deleted but not change the history size' );
+        lastHistory = this.api.histories.show( lastHistory.id );
+        var sizeBeforeDelete = lastHistory.nice_size;
+
+        returned = this.api.hdas.delete_( lastHistory.id, firstHda.id );
+        //this.debug( this.jsonStr( returned ) );
+
+        hdaShow = this.api.hdas.show( lastHistory.id, firstHda.id );
+        this.test.assert( hdaShow.deleted, 'hda is marked deleted' );
+        lastHistory = this.api.histories.show( lastHistory.id );
+        this.test.assert( lastHistory.nice_size === sizeBeforeDelete, 'history size has not changed' );
+
+        // by default, purging fails bc uni.ini:allow_user_dataset_purge=False
+        this.api.assertRaises( function(){
+            returned = this.api.hdas.delete_( lastHistory.id, firstHda.id, { purge : true });
+        }, 403, 'This instance does not allow user dataset purging', 'Purge failed' );
+    /*
+    */
+    });
+    //spaceghost.user.logout();
+
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
+
diff --git a/test/casperjs/api-history-permission-tests.js b/test/casperjs/api-history-permission-tests.js
new file mode 100644
index 0000000..5cadb06
--- /dev/null
+++ b/test/casperjs/api-history-permission-tests.js
@@ -0,0 +1,263 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test permissions for accessible, published, and inaccessible histories '
+                        + 'over the API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    var email2 = spaceghost.user.getRandomEmail(),
+        password2 = '123456';
+    if( spaceghost.fixtureData.testUser2 ){
+        email2 = spaceghost.fixtureData.testUser2.email;
+        password2 = spaceghost.fixtureData.testUser2.password;
+    }
+
+    var inaccessibleHistory, accessibleHistory, publishedHistory,
+        inaccessibleHdas, accessibleHdas, publishedHdas,
+        accessibleLink;
+
+    // =================================================================== TESTS
+    //// ------------------------------------------------------------------------------------------- create 3 histories
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.openHomePage().then( function(){
+        this.test.comment( '(logged in as ' + this.user.loggedInAs() + ')' );
+        // create three histories: make the 2nd importable (via the API), and the third published
+
+        this.test.comment( 'importable, slug, and published should all be returned by show and initially off' );
+        // make the current the inaccessible one
+        inaccessibleHistory = this.api.histories.show( this.api.histories.index()[0].id );
+        this.test.assert( this.hasKeys( inaccessibleHistory, [ 'id', 'name', 'slug', 'importable', 'published' ] ),
+            'Has the proper keys' );
+        this.test.assert( inaccessibleHistory.slug === null,
+            'initial slug is null: ' + inaccessibleHistory.slug );
+        this.test.assert( inaccessibleHistory.importable === false,
+            'initial importable is false: ' + inaccessibleHistory.importable );
+        this.test.assert( inaccessibleHistory.published === false,
+            'initial published is false: ' + inaccessibleHistory.published );
+        this.api.histories.update( inaccessibleHistory.id, { name: 'inaccessible' });
+        inaccessibleHistory = this.api.histories.show( inaccessibleHistory.id );
+
+        this.test.comment( 'Setting importable to true should create a slug, ' +
+                           'username_and_slug, and importable === true' );
+        accessibleHistory = this.api.histories.create({ name: 'accessible' });
+        var returned = this.api.histories.update( accessibleHistory.id, {
+            importable  : true
+        });
+        this.debug( this.jsonStr( returned ) );
+        accessibleHistory = this.api.histories.show( accessibleHistory.id );
+        this.test.assert( this.hasKeys( accessibleHistory, [ 'username_and_slug' ] ),
+            'Has username_and_slug' );
+        this.test.assert( accessibleHistory.slug === 'accessible',
+            'slug is not null: ' + accessibleHistory.slug );
+        this.test.assert( accessibleHistory.importable,
+            'importable is true: ' + accessibleHistory.importable );
+        accessibleLink = 'u/' + email.replace( '@test.test', '' ) + '/h/accessible';
+        this.test.assert( accessibleHistory.username_and_slug === accessibleLink,
+            'username_and_slug is proper: ' + accessibleHistory.username_and_slug );
+
+        this.test.comment( 'Setting published to true should make accessible and published === true' );
+        publishedHistory =  this.api.histories.create({ name: 'published' });
+        returned = this.api.histories.update( publishedHistory.id, {
+            published  : true
+        });
+        this.debug( this.jsonStr( returned ) );
+        publishedHistory = this.api.histories.show( publishedHistory.id );
+        this.test.assert( this.hasKeys( publishedHistory, [ 'username_and_slug' ] ),
+            'Has the proper keys' );
+        this.test.assert( publishedHistory.published,
+            'published is true: ' + publishedHistory.published );
+        this.test.assert( publishedHistory.importable,
+            'importable is true: ' + publishedHistory.importable );
+        this.test.assert( publishedHistory.slug === 'published',
+            'slug is not null: ' + publishedHistory.slug );
+        accessibleLink = 'u/' + email.replace( '@test.test', '' ) + '/h/published';
+        this.test.assert( publishedHistory.username_and_slug === accessibleLink,
+            'username_and_slug is proper: ' + publishedHistory.username_and_slug );
+
+    });
+
+    // ------------------------------------------------------------------------------------------- upload some files
+    spaceghost.then( function(){
+        this.api.tools.thenUpload( inaccessibleHistory.id, { filepath: '../../test-data/1.bed' });
+        this.api.tools.thenUpload(   accessibleHistory.id, { filepath: '../../test-data/1.bed' });
+        this.api.tools.thenUpload(    publishedHistory.id, { filepath: '../../test-data/1.bed' });
+    });
+    spaceghost.then( function(){
+        // check that they're there
+        inaccessibleHdas = this.api.hdas.index( inaccessibleHistory.id ),
+          accessibleHdas = this.api.hdas.index(   accessibleHistory.id ),
+           publishedHdas = this.api.hdas.index(    publishedHistory.id );
+        this.test.comment( '---- adding datasets' );
+        this.test.assert( inaccessibleHdas.length === 1,
+            'uploaded file to inaccessible: ' + inaccessibleHdas.length );
+        this.test.assert( accessibleHdas.length === 1,
+            'uploaded file to accessible: ' + accessibleHdas.length );
+        this.test.assert( publishedHdas.length === 1,
+            'uploaded file to published: ' + publishedHdas.length );
+    });
+    spaceghost.user.logout();
+
+    //// ------------------------------------------------------------------------------------------- log in user2
+    function testReadFunctionsOnAccessible( history, hdas ){
+        this.test.comment( '---- testing read/accessibility functions for ACCESSIBLE history: ' + history.name );
+
+        // read functions for history
+        this.test.comment( 'show should work for history: ' + history.name );
+        this.test.assert( this.api.histories.show( history.id ).id === history.id,
+            'show worked' );
+        this.test.comment( 'copying should work for history: ' + history.name );
+        var returned = this.api.histories.create({ history_id : history.id });
+        this.test.assert( returned.name === "Copy of '" + history.name + "'",
+            'copied name matches: ' + returned.name );
+
+        // read functions for history contents
+        this.test.comment( 'index of history contents should work for history: ' + history.name );
+        this.test.assert( this.api.hdas.index( history.id ).length === 1,
+            'hda index worked' );
+        this.test.comment( 'showing of history contents should work for history: ' + history.name );
+        this.test.assert( this.api.hdas.show( history.id, hdas[0].id ).id === hdas[0].id,
+            'hda show worked' );
+
+        this.test.comment( 'Attempting to copy an accessible hda (default is accessible)'
+                         + ' should work from accessible history: ' + history.name );
+        returned = this.api.hdas.create( this.api.histories.index()[0].id, {
+            source  : 'hda',
+            content : hdas[0].id
+        });
+        this.test.assert( returned.name === hdas[0].name, 'successful hda copy from: ' + returned.name );
+    }
+
+    function testReadFunctionsOnInaccessible( history, hdas ){
+        this.test.comment( '---- testing read/accessibility functions for INACCESSIBLE history: ' + history.name );
+
+        // read functions for history
+        this.test.comment( 'show should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.show( history.id );
+        }, 403, 'History is not accessible by user', 'show failed with error' );
+        this.test.comment( 'copying should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.create({ history_id : history.id });
+        }, 403, 'History is not accessible by user', 'copy failed with error' );
+
+        // read functions for history contents
+        this.test.comment( 'index and show of history contents should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.index( history.id );
+        }, 403, 'History is not accessible by user', 'hda index failed with error' );
+        // 150721: accessible hdas in an inaccessible history are considered accessible (since api/datasets does)
+        // this.api.assertRaises( function(){
+        //     this.api.hdas.show( history.id, hdas[0].id );
+        // }, 403, 'History is not accessible by user', 'hda show failed with error' );
+        this.test.assertTrue( utils.isObject( this.api.hdas.show( history.id, hdas[0].id ) ) );
+
+        this.test.comment( 'Attempting to copy an accessible hda (default is accessible)'
+                         + ' from an inaccessible history should fail for: ' + history.name );
+        this.api.assertRaises( function(){
+            var returned = this.api.hdas.create( this.api.histories.index()[0].id, {
+                source  : 'hda',
+                content : hdas[0].id
+            });
+            this.debug( this.jsonStr( returned ) );
+        }, 403, 'History is not accessible by user', 'hda copy from failed with error' );
+    }
+
+    function testWriteFunctions( history, hdas ){
+        this.test.comment( '---- testing write/ownership functions for history: ' + history.name );
+
+        this.test.comment( 'update should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.update( history.id, { deleted: true });
+        }, 403, 'History is not owned by user', 'update failed with error' );
+        this.test.comment( 'delete should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.histories.delete_( history.id );
+        }, 403, 'History is not owned by user', 'delete failed with error' );
+
+        this.test.comment( 'hda updating should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.update( history.id, hdas[0].id, { deleted: true });
+        }, 403, 'HistoryDatasetAssociation is not owned by user', 'hda update failed with error' );
+        this.test.comment( 'hda deletion should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.delete_( history.id, hdas[0].id );
+        }, 403, 'HistoryDatasetAssociation is not owned by user', 'hda delete failed with error' );
+
+        this.test.comment( 'copying hda into history should fail for history: ' + history.name );
+        this.api.assertRaises( function(){
+            this.api.hdas.create( history.id, {
+                source  : 'hda',
+                // should error before it checks the id
+                content : 'bler'
+            });
+        }, 403, 'History is not owned by user', 'hda copy to failed' );
+    }
+
+    function testInaccessible( history, hdas ){
+        testReadFunctionsOnInaccessible.call( this, history, hdas );
+        testWriteFunctions.call( this, history, hdas );
+    }
+
+    function testAccessible( history, hdas ){
+        testReadFunctionsOnAccessible.call( this, history, hdas );
+        testWriteFunctions.call( this, history, hdas );
+    }
+
+    spaceghost.user.loginOrRegisterUser( email2, password2 );
+    spaceghost.openHomePage().then( function(){
+        this.test.comment( '(logged in as ' + this.user.loggedInAs() + ')' );
+        testInaccessible.call( spaceghost, inaccessibleHistory, inaccessibleHdas );
+        testAccessible.call( spaceghost, accessibleHistory, accessibleHdas );
+        testAccessible.call( spaceghost, publishedHistory, publishedHdas );
+    });
+    spaceghost.user.logout();
+
+
+    //// ------------------------------------------------------------------------------------------- user1 revoke perms
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.thenOpen( spaceghost.baseUrl ).then( function(){
+        this.test.comment( '(logged in as ' + this.user.loggedInAs() + ')' );
+        this.test.comment( 'revoking perms should prevent access' );
+        this.api.histories.update( accessibleHistory.id, {
+            importable : false
+        });
+        var returned = this.api.histories.show( accessibleHistory.id );
+        this.test.assert( !returned.importable, 'now not importable' );
+        this.test.assert( !returned.published, '(still not published)' );
+        this.test.assert( !!returned.slug, '(slug still set) ' + returned.slug );
+
+        this.api.histories.update( publishedHistory.id, {
+            importable : false,
+            published  : false
+        });
+        returned = this.api.histories.show( publishedHistory.id );
+        this.test.assert( !returned.importable, 'now not importable' );
+        this.test.assert( !returned.published, 'now not published' );
+        this.test.assert( !!returned.slug, '(slug still set) ' + returned.slug );
+    });
+    spaceghost.user.logout();
+
+
+    //// ------------------------------------------------------------------------------------------- user2 retry perms
+    spaceghost.user.loginOrRegisterUser( email2, password2 );
+    spaceghost.thenOpen( spaceghost.baseUrl ).then( function(){
+        this.test.comment( '(logged in as ' + this.user.loggedInAs() + ')' );
+        testInaccessible.call( spaceghost, accessibleHistory, accessibleHdas );
+        testInaccessible.call( spaceghost, publishedHistory, publishedHdas );
+    });
+    // spaceghost.user.logout();
+
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/api-history-tests.js b/test/casperjs/api-history-tests.js
new file mode 100644
index 0000000..31fdc2a
--- /dev/null
+++ b/test/casperjs/api-history-tests.js
@@ -0,0 +1,321 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the history API', 0, function suite( test ){
+    spaceghost.start();
+
+// =================================================================== SET UP
+var email = spaceghost.user.getRandomEmail(),
+    password = '123456';
+if( spaceghost.fixtureData.testUser ){
+    email = spaceghost.fixtureData.testUser.email;
+    password = spaceghost.fixtureData.testUser.password;
+}
+spaceghost.user.loginOrRegisterUser( email, password );
+
+
+// =================================================================== TESTS
+spaceghost.openHomePage().then( function(){
+
+    // ------------------------------------------------------------------------------------------- INDEX
+    this.test.comment( 'index should get a list of histories' );
+    var historyIndex = this.api.histories.index();
+    //this.debug( this.jsonStr( historyIndex ) );
+    this.test.assert( utils.isArray( historyIndex ), "index returned an array: length " + historyIndex.length );
+    this.test.assert( historyIndex.length >= 1, 'Has at least one history' );
+
+    var firstHistory = historyIndex[0];
+    this.test.assert( this.hasKeys( firstHistory, [ 'id', 'name', 'url' ] ), 'Has the proper keys' );
+    this.test.assert( this.api.isEncodedId( firstHistory.id ), 'Id appears well-formed' );
+
+
+    // ------------------------------------------------------------------------------------------- SHOW
+    this.test.comment( 'show should get a history details object' );
+    var historyShow = this.api.histories.show( firstHistory.id );
+    //this.debug( this.jsonStr( historyShow ) );
+    this.test.assert( this.hasKeys( historyShow, [
+            'id', 'name', 'annotation', 'size', 'contents_url',
+            'state', 'state_details', 'state_ids' ]),
+        'Has the proper keys' );
+
+    this.test.comment( 'a history details object should contain two objects named state_details and state_ids' );
+    var states = [
+            'discarded', 'empty', 'error', 'failed_metadata', 'new',
+            'ok', 'paused', 'queued', 'running', 'setting_metadata', 'upload' ],
+        state_details = historyShow.state_details,
+        state_ids = historyShow.state_ids;
+    this.test.assert( this.hasKeys( state_details, states ), 'state_details has the proper keys' );
+    this.test.assert( this.hasKeys( state_ids, states ),     'state_ids has the proper keys' );
+    var state_detailsAreNumbers = true;
+        state_idsAreArrays = true;
+    states.forEach( function( state ){
+        if( !utils.isArray( state_ids[ state ] ) ){ state_idsAreArrays = false; }
+        if( !utils.isNumber( state_details[ state ] ) ){ state_detailsAreNumbers = false; }
+    });
+    this.test.assert( state_idsAreArrays, 'state_ids values are arrays' );
+    this.test.assert( state_detailsAreNumbers, 'state_details values are numbers' );
+
+    this.test.comment( 'calling show with "most_recently_used" should return the first history' );
+    historyShow = this.api.histories.show( 'most_recently_used' );
+    //this.debug( this.jsonStr( historyShow ) );
+    this.test.assert( historyShow.id === firstHistory.id, 'Is the first history' );
+
+    this.test.comment( 'Should be able to combine calls' );
+    this.test.assert( this.api.histories.show( this.api.histories.index()[0].id ).id === firstHistory.id,
+        'combining function calls works' );
+
+    // ------------------------------------------------------------------------------------------- CREATE
+    this.test.comment( 'Calling create should create a new history and allow setting the name' );
+    var newHistoryName = 'Created History',
+        createdHistory = this.api.histories.create({ name: newHistoryName });
+    //this.debug( 'returned from create:\n' + this.jsonStr( createdHistory ) );
+    this.test.assert( createdHistory.name === newHistoryName,
+        "Name of created history (from create) is correct: " + createdHistory.name );
+
+    // check the index
+    var newFirstHistory = this.api.histories.index()[0];
+    //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+    this.test.assert( newFirstHistory.name === newHistoryName,
+        "Name of last history (from index) is correct: " + newFirstHistory.name );
+    this.test.assert( newFirstHistory.id === createdHistory.id,
+        "Id of last history (from index) is correct: " + newFirstHistory.id );
+
+
+    // ------------------------------------------------------------------------------------------- DELETE
+    this.test.comment( 'calling delete should delete the given history and remove it from the standard index' );
+    var deletedHistory = this.api.histories.delete_( createdHistory.id );
+    //this.debug( 'returned from delete:\n' + this.jsonStr( deletedHistory ) );
+    this.test.assert( deletedHistory.id === createdHistory.id,
+        "Deletion returned id matching created history: " + deletedHistory.id );
+    this.test.assert( deletedHistory.deleted === true,
+        "Deletion return 'deleted: true': " + deletedHistory.deleted );
+
+    newFirstHistory = this.api.histories.index()[0];
+    //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+    this.test.assert( newFirstHistory.id !== createdHistory.id,
+        "Id of last history (from index) DOES NOT appear: " + newFirstHistory.id );
+
+    this.test.comment( 'calling index with delete=true should include the deleted history' );
+    newFirstHistory = this.api.histories.index({ deleted: true })[0];
+    //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+    this.test.assert( newFirstHistory.id === createdHistory.id,
+        "Id of last history (from index) DOES appear using index( deleted=true ): " + newFirstHistory.id );
+
+
+    // ------------------------------------------------------------------------------------------- UNDELETE
+    this.test.comment( 'calling undelete should undelete the given history and re-include it in index' );
+    var undeletedHistory = this.api.histories.undelete( createdHistory.id );
+    this.debug( 'returned from undelete:\n' + this.jsonStr( undeletedHistory ) );
+    this.test.assert( ( undeletedHistory.id === createdHistory.id ) && ( !undeletedHistory.deleted ),
+        "Undeletion new, updated JSON for the history" );
+
+    newFirstHistory = this.api.histories.index()[0];
+    //this.debug( 'newFirstHistory:\n' + this.jsonStr( newFirstHistory ) );
+    this.test.assert( newFirstHistory.id === createdHistory.id,
+        "Id of last history (from index) DOES appear after undeletion: " + newFirstHistory.id );
+
+    //TODO: show, deleted flag
+
+
+    // ------------------------------------------------------------------------------------------- UPDATE
+    // ........................................................................................... name
+    this.test.comment( 'update should allow changing the name' );
+    returned = this.api.histories.update( newFirstHistory.id, {
+        name : 'New name'
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.name === 'New name', "Name successfully set via update: " + historyShow.name );
+
+    // no sanitizing input
+    //this.test.comment( 'update should sanitize any new name' );
+
+    //NOTE!: this fails on sqlite3 (with default setup)
+    this.test.comment( 'update should allow unicode in names' );
+    var unicodeName = '桜ゲノム';
+    returned = this.api.histories.update( newFirstHistory.id, {
+        name : unicodeName
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.name === unicodeName, "Update accepted unicode name: " + historyShow.name );
+
+    this.test.comment( 'update should allow escaped quotations in names' );
+    var quotedName = '"Bler"';
+    returned = this.api.histories.update( newFirstHistory.id, {
+        name : quotedName
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.name === quotedName,
+        "Update accepted escaped quotations in name: " + historyShow.name );
+
+
+    // ........................................................................................... deleted
+    this.test.comment( 'update should allow changing the deleted flag' );
+    returned = this.api.histories.update( newFirstHistory.id, {
+        deleted: true
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id, true );
+    this.test.assert( historyShow.deleted === true, "Update set the deleted flag: " + historyShow.deleted );
+
+    this.test.comment( 'update should allow changing the deleted flag back' );
+    returned = this.api.histories.update( newFirstHistory.id, {
+        deleted: false
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.deleted === false, "Update set the deleted flag: " + historyShow.deleted );
+
+
+    // ........................................................................................... published
+    this.test.comment( 'update should allow changing the published flag' );
+    returned = this.api.histories.update( newFirstHistory.id, {
+        published: true
+    });
+    this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.published === true, "Update set the published flag: " + historyShow.published );
+
+
+    // ........................................................................................... genome_build
+    this.test.comment( 'update should allow changing the genome_build' );
+    returned = this.api.histories.update( newFirstHistory.id, {
+        genome_build : 'hg18'
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.genome_build === 'hg18',
+        "genome_build successfully set via update: " + historyShow.genome_build );
+
+    // no sanitizing input
+    //this.test.comment( 'update should sanitize any genome_build' );
+
+    // removed since we have no pre-installed unicode builds
+    //this.test.comment( 'update should allow unicode in genome builds' );
+
+    // removed since many data sources consider dbkey to be an open field
+    // spaceghost.test.comment( 'An unknown reference/genome_build should return a 400' );
+    // 400, 'invalid reference', 'Bad Request with invalid id: show' );
+
+
+    // ........................................................................................... annotation
+    this.test.comment( 'update should allow changing the annotation' );
+    var newAnnotation = 'Here are some notes that I stole from the person next to me';
+    returned = this.api.histories.update( newFirstHistory.id, {
+        annotation : newAnnotation
+    });
+    this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.annotation === newAnnotation,
+        "Annotation successfully set via update: " + historyShow.annotation );
+
+    // no sanitizing input
+    //this.test.comment( 'update should sanitize any new annotation' );
+
+    this.test.comment( 'update should allow unicode in annotations' );
+    var unicodeAnnotation = 'お願いは、それが落下させない';
+    returned = this.api.histories.update( newFirstHistory.id, {
+        annotation : unicodeAnnotation
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.annotation === unicodeAnnotation,
+        "Update accepted unicode annotation: " + historyShow.annotation );
+
+    this.test.comment( 'update should allow escaped quotations in annotations' );
+    var quotedAnnotation = '"Bler"';
+    returned = this.api.histories.update( newFirstHistory.id, {
+        annotation : quotedAnnotation
+    });
+    //this.debug( 'returned:\n' + this.jsonStr( returned ) );
+    historyShow = this.api.histories.show( newFirstHistory.id );
+    this.test.assert( historyShow.annotation === quotedAnnotation,
+        "Update accepted escaped quotations in annotation: " + historyShow.annotation );
+
+
+    // ------------------------------------------------------------------------------------------- ERRORS
+    // ........................................................................................... idiot proofing
+    this.test.comment( 'updating using a nonsense key should fail silently' );
+    returned = this.api.histories.update( newFirstHistory.id, {
+        konamiCode : 'uuddlrlrba'
+    });
+    this.test.assert( returned.konamiCode === undefined, 'key was not set: ' + returned.konamiCode );
+
+    // test server bad id protection
+    spaceghost.test.comment( 'A bad id should throw an error' );
+    this.api.assertRaises( function(){
+        this.api.histories.show( '1234123412341234' );
+    }, 400, 'unable to decode', 'Bad Request with invalid id: show' );
+    spaceghost.test.comment( 'A bad id should throw an error when using update' );
+    this.api.assertRaises( function(){
+        this.api.histories.update( '1234123412341234', {} );
+    }, 400, 'unable to decode', 'Bad Request with invalid id: update' );
+    spaceghost.test.comment( 'A bad id should throw an error when using delete' );
+    this.api.assertRaises( function(){
+        this.api.histories.delete_( '1234123412341234' );
+    }, 400, 'unable to decode', 'Bad Request with invalid id: delete' );
+    spaceghost.test.comment( 'A bad id should throw an error when using undelete' );
+    this.api.assertRaises( function(){
+        this.api.histories.undelete( '1234123412341234' );
+    }, 400, 'unable to decode', 'Bad Request with invalid id: undelete' );
+
+    this.test.comment( 'updating by attempting to change type should cause an error' );
+    [ 'name', 'annotation' ].forEach( function( key ){
+        var updatedAttrs = {};
+        updatedAttrs[ key ] = false;
+        spaceghost.api.assertRaises( function(){
+            returned = spaceghost.api.histories.update( newFirstHistory.id, updatedAttrs );
+        // note: annotation can be basestring or null (a tuple) so just use the partial error string
+        }, 400, 'must be a type', 'type validation error' );
+    });
+    [ 'deleted', 'importable', 'published' ].forEach( function( key ){
+        var updatedAttrs = {};
+        updatedAttrs[ key ] = 'straaang';
+        spaceghost.api.assertRaises( function(){
+            returned = spaceghost.api.histories.update( newFirstHistory.id, updatedAttrs );
+        }, 400, "must be a type: <type 'bool'>", 'type validation error' );
+    });
+    spaceghost.api.assertRaises( function(){
+        returned = spaceghost.api.histories.update( newFirstHistory.id, { tags: 'you\'re it' });
+    }, 400, "must be a type: <type 'list'>", 'type validation error' );
+    spaceghost.api.assertRaises( function(){
+        returned = spaceghost.api.histories.update( newFirstHistory.id, { tags: [ true ] });
+    }, 400, "must be a type: <type 'basestring'>", 'type validation error' );
+
+    // no longer throws if showing deleted...
+    //this.test.comment( 'calling show with /deleted should raise a bad request' );
+/*
+*/
+
+    // ------------------------------------------------------------------------------------------- PURGE
+    var newHistoryId = null;
+    spaceghost.then( function(){
+        this.test.comment( 'calling purge should mark a history, its datasets, and remove the files' );
+        // create new history
+        var newHistory = this.api.histories.create({ name: 'To Purge' });
+        newHistoryId = newHistory.id;
+    });
+    spaceghost.openHomePage().api.tools.thenUploadToCurrent({
+        filepath: '../../test-data/1.sam'
+    });
+    spaceghost.then( function(){
+        var recent = this.api.histories.show( newHistoryId );
+        this.debug( 'size:\n' + recent.size );
+
+        this.api.assertRaises( function(){
+            this.api.histories.delete_( recent.id, true );
+        }, 403, 'This instance does not allow user dataset purging', 'Config does not allow thrown' );
+    });
+    //TODO: a way to set the config to allow user purging and then being able to test this would be good.
+
+    //this.debug( this.jsonStr( historyShow ) );
+});
+
+// ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/api-tool-tests.js b/test/casperjs/api-tool-tests.js
new file mode 100644
index 0000000..ff7e5c0
--- /dev/null
+++ b/test/casperjs/api-tool-tests.js
@@ -0,0 +1,219 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the tools API', 0, function suite( test ){
+    spaceghost.start();
+
+// =================================================================== SET UP
+var email = spaceghost.user.getRandomEmail(),
+    password = '123456';
+if( spaceghost.fixtureData.testUser ){
+    email = spaceghost.fixtureData.testUser.email;
+    password = spaceghost.fixtureData.testUser.password;
+}
+spaceghost.user.loginOrRegisterUser( email, password );
+
+function compareObjs( obj1, where ){
+    for( var key in where ){
+        if( where.hasOwnProperty( key ) ){
+            if( !obj1.hasOwnProperty( key )  ){ return false; }
+            if( obj1[ key ] !== where[ key ] ){ return false; }
+        }
+    }
+    return true;
+}
+
+function findObject( objectArray, where, start ){
+    start = start || 0;
+    for( var i=start; i<objectArray.length; i += 1 ){
+        if( compareObjs( objectArray[i], where ) ){ return objectArray[i]; }
+    }
+    return null;
+}
+
+// =================================================================== TESTS
+var panelSectionKeys = [
+        'elems', 'id', 'name', 'version'
+    ],
+    panelToolKeys = [
+        'id', 'name', 'description', 'version', 'link', 'target', 'min_width'
+    ],
+    toolSummaryKeys = [
+        'id', 'name', 'description', 'version'
+    ],
+    toolDetailKeys = [
+        'id', 'name', 'description', 'version', 'inputs'
+    ],
+    toolInputKeys = [
+        'label', 'name', 'type'
+        // there are others, but it's not consistent across all inputs
+    ];
+
+function attemptShowOnAllTools(){
+    //NOTE: execute like: attemptShowOnAllTools.call( spaceghost )
+    toolIndex = this.api.tools.index( false );
+    var toolErrors = {};
+    function ObjectKeySet(){
+        var self = this;
+        function addOne( key ){
+            if( !self.hasOwnProperty( key ) ){
+                self[ key ] = true;
+            }
+        }
+        self.__add = function( obj ){
+            for( var key in obj ){
+                if( obj.hasOwnProperty( key ) ){
+                    addOne( key );
+                }
+            }
+        };
+        return self;
+    }
+    var set = new ObjectKeySet();
+    for( i=0; i<toolIndex.length; i+=1 ){
+        var tool = toolIndex[i];
+        try {
+            toolShow = this.api.tools.show( tool.id );
+            this.info( 'checking: ' + tool.id );
+            for( var j=0; j<toolShow.inputs.length; j+=1 ){
+                var input = toolShow.inputs[j];
+                set.__add( input );
+            }
+        } catch( err ){
+            var message = JSON.parse( err.message ).error;
+            this.error( '\t error: ' + message );
+            toolErrors[ tool.id ] = message;
+        }
+    }
+    this.debug( this.jsonStr( toolErrors ) );
+    this.debug( this.jsonStr( set ) );
+}
+
+spaceghost.openHomePage().then( function(){
+
+    // ------------------------------------------------------------------------------------------- INDEX
+    // ........................................................................................... (defaults)
+    this.test.comment( 'index should get a list of tools in panel form (by default)' );
+    var toolIndex = this.api.tools.index();
+    //this.debug( this.jsonStr( toolIndex ) );
+    this.test.assert( utils.isArray( toolIndex ), "index returned an array: length " + toolIndex.length );
+    this.test.assert( toolIndex.length >= 1, 'Has at least one tool section' );
+
+    this.test.comment( 'index panel form should be separated into sections (by default)' );
+    var firstSection = toolIndex[0]; // get data
+    //this.debug( this.jsonStr( firstSection ) );
+    this.test.assert( this.hasKeys( firstSection, panelSectionKeys ), 'Has the proper keys' );
+    //TODO: test form of indiv. keys
+
+    this.test.comment( 'index sections have a list of tool "elems"' );
+    this.test.assert( utils.isArray( firstSection.elems ), firstSection.name + ".elems is an array: "
+        + "length " + firstSection.elems.length );
+    this.test.assert( firstSection.elems.length >= 1, 'Has at least one tool' );
+
+    var firstTool = firstSection.elems[0]; // get data
+    //this.debug( this.jsonStr( firstTool ) );
+    this.test.assert( this.hasKeys( firstTool, panelToolKeys ), 'Has the proper keys' );
+
+    // ........................................................................................... in_panel=False
+    this.test.comment( 'index should get a list of all tools when in_panel=false' );
+    toolIndex = this.api.tools.index( false );
+    //this.debug( this.jsonStr( toolIndex ) );
+    this.test.assert( utils.isArray( toolIndex ), "index returned an array: length " + toolIndex.length );
+    this.test.assert( toolIndex.length >= 1, 'Has at least one tool' );
+
+    this.test.comment( 'index non-panel form should be a simple list of tool summaries' );
+    firstSection = toolIndex[0];
+    //this.debug( this.jsonStr( firstSection ) );
+    this.test.assert( this.hasKeys( firstSection, toolSummaryKeys ), 'Has the proper keys' );
+    //TODO: test uniqueness of ids
+    //TODO: test form of indiv. keys
+
+    // ........................................................................................... trackster=True
+    this.test.comment( '(like in_panel=True) index with trackster=True should '
+                     + 'get a (smaller) list of tools in panel form (by default)' );
+    toolIndex = this.api.tools.index( undefined, true );
+    //this.debug( this.jsonStr( toolIndex ) );
+    this.test.assert( utils.isArray( toolIndex ), "index returned an array: length " + toolIndex.length );
+    this.test.assert( toolIndex.length >= 1, 'Has at least one tool section' );
+
+    this.test.comment( 'index with trackster=True should be separated into sections (by default)' );
+    firstSection = toolIndex[0]; // get data
+    //this.debug( this.jsonStr( firstSection ) );
+    this.test.assert( this.hasKeys( firstSection, panelSectionKeys ), 'Has the proper keys' );
+    //TODO: test form of indiv. keys
+
+    this.test.comment( 'index sections with trackster=True have a list of tool "elems"' );
+    this.test.assert( utils.isArray( firstSection.elems ), firstSection.name + ".elems is an array: "
+        + "length " + firstSection.elems.length );
+    this.test.assert( firstSection.elems.length >= 1, 'Has at least one tool' );
+
+    firstTool = firstSection.elems[0]; // get data
+    //this.debug( this.jsonStr( firstTool ) );
+    this.test.assert( this.hasKeys( firstTool, panelToolKeys ), 'Has the proper keys' );
+
+    // ............................................................................ trackster=True, in_panel=False
+    // this yields the same as in_panel=False...
+
+
+    // ------------------------------------------------------------------------------------------- SHOW
+    this.test.comment( 'show should get detailed data about the tool with the given id' );
+    // get the tool select first from tool index
+    toolIndex = this.api.tools.index();
+    var selectFirst = findObject( findObject( toolIndex, { id: 'textutil' }).elems, { id: 'Show beginning1' });
+    //this.debug( this.jsonStr( selectFirst ) );
+
+    var toolShow = this.api.tools.show( selectFirst.id );
+    //this.debug( this.jsonStr( toolShow ) );
+    this.test.assert( utils.isObject( toolShow ), "show returned an object" );
+    this.test.assert( this.hasKeys( toolShow, toolDetailKeys ), 'Has the proper keys' );
+
+    this.test.comment( 'show data should include an array of input objects' );
+    this.test.assert( utils.isArray( toolShow.inputs ), "inputs is an array: "
+        + "length " + toolShow.inputs.length );
+    this.test.assert( toolShow.inputs.length >= 1, 'Has at least one element' );
+    for( var i=0; i<toolShow.inputs.length; i += 1 ){
+        var input = toolShow.inputs[i];
+        this.test.comment( 'checking input #' + i + ': ' + ( input.name || '(no name)' ) );
+        this.test.assert( utils.isObject( input ), "input is an object" );
+        this.test.assert( this.hasKeys( input, toolInputKeys ), 'Has the proper keys' );
+    }
+    //TODO: test form of indiv. keys
+
+
+    // ------------------------------------------------------------------------------------------- CREATE
+    // this is a method of running a job. Shouldn't that be in jobs.create?
+
+    this.test.comment( 'create should work' );
+    var upload_params = {
+        'files_0|NAME': 'Test Dataset',
+        'files_0|url_paste': 'Hello World',
+        'dbkey': '?',
+        'file_type': 'txt'
+    };
+    var payload = {
+        'tool_id': 'upload1',
+        'inputs': upload_params,
+        'upload_type': 'upload_dataset',
+    };
+    var toolCreate = this.api.tools.create( payload );
+    this.test.assert( this.hasKeys( toolCreate, ['outputs'] ), 'Has outputs' );
+    var outputs = toolCreate['outputs'];
+    this.test.assert( utils.isArray( outputs ), 'outputs is an array' );
+    this.test.assert( outputs.length == 1, 'one dataset is created' );
+
+    var output = outputs[0]
+    this.test.assert( utils.isObject( output ), 'output0 is an array' );
+    this.test.assert( this.hasKeys( output, ['data_type', 'deleted', 'hid', 'history_id', 'id', 'name' ] ),
+        'Dataset information defined' );
+    this.test.assert( this.hasKeys( output, ['output_name' ] ), 'Output name labelled' );
+
+    // ------------------------------------------------------------------------------------------- MISC
+    //attemptShowOnAllTools.call( spaceghost );
+});
+
+// ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/api-user-tests.js b/test/casperjs/api-user-tests.js
new file mode 100644
index 0000000..7c66b1b
--- /dev/null
+++ b/test/casperjs/api-user-tests.js
@@ -0,0 +1,52 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the user API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    spaceghost.user.loginOrRegisterUser( email, password );
+
+    // =================================================================== TESTS
+    spaceghost.openHomePage().then( function(){
+
+        // ------------------------------------------------------------------------------------------- INDEX
+        this.test.comment( 'index should get a list of users' );
+        var userIndex = this.api.users.index();
+        this.debug( this.jsonStr( userIndex ) );
+        this.test.assert( utils.isArray( userIndex ), "index returned an array: length " + userIndex.length );
+
+        // need a way to import/create a user here for testing
+        if( userIndex.length <= 0 ){
+            log.warn( 'No users available' );
+            return;
+        }
+        this.test.assert( userIndex.length >= 1, 'Has at least one user' );
+
+        //TODO: index( deleted )
+
+        // ------------------------------------------------------------------------------------------- SHOW
+        this.test.comment( 'show should get detailed data about the user with the given id' );
+        var userShow = this.api.users.show( userIndex[0].id );
+        this.debug( this.jsonStr( userShow ) );
+
+        //TODO: show( current )
+        //TODO: show( deleted )
+
+        // ------------------------------------------------------------------------------------------- CREATE
+
+        // ------------------------------------------------------------------------------------------- MISC
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/api-visualizations-tests.js b/test/casperjs/api-visualizations-tests.js
new file mode 100644
index 0000000..9f2a791
--- /dev/null
+++ b/test/casperjs/api-visualizations-tests.js
@@ -0,0 +1,199 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the visualizations API', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== SET UP
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+    spaceghost.user.loginOrRegisterUser( email, password );
+
+    // =================================================================== TESTS
+    spaceghost.openHomePage().then( function(){
+        var ALWAYS_CREATE = true,
+            indexKeys = [
+                'id', 'title', 'type', 'dbkey', 'url'
+            ],
+            showKeys  = indexKeys.concat([
+                'user_id', 'model_class', 'revisions', 'latest_revision', 'annotation'
+            ]),
+            revisionKeys = [
+                'id', 'title', 'visualization_id', 'dbkey', 'model_class', 'config'
+            ];
+
+        // ------------------------------------------------------------------------------------------- set up
+        var visualizationIndex = this.api.visualizations.index();
+        if( ALWAYS_CREATE || !visualizationIndex.length ){
+            // post a visualization
+            this.info( 'creating new visualization for tests' );
+            var testVisualization = this.api.visualizations.create({
+                title   : 'Test Visualization',
+                // needs to be unique
+                slug    : 'test-visualization-' + Date.now(),
+                type    : 'test',
+                dbkey   : 'hg17',
+                annotation : 'this is a test of the emergency visualization system',
+                config  : {
+                    x       : 10,
+                    y       : 12
+                }
+            });
+            this.debug( this.jsonStr( testVisualization ) );
+        }
+
+        // ------------------------------------------------------------------------------------------- INDEX
+        this.test.comment( 'index should get a list of visualizations' );
+        visualizationIndex = this.api.visualizations.index();
+        this.debug( this.jsonStr( visualizationIndex ) );
+        this.test.assert( utils.isArray( visualizationIndex ),
+            "index returned an array: length " + visualizationIndex.length );
+        this.test.assert( visualizationIndex.length >= 1, 'Has at least one visualization' );
+
+        var firstVisualization = visualizationIndex[0];
+        this.test.assert( this.hasKeys( firstVisualization, indexKeys ), 'Has the proper keys' );
+        this.test.assert( this.api.isEncodedId( firstVisualization.id ), 'Id appears well-formed' );
+
+        //TODO: index searching
+        //TODO: anon user
+        //TODO: admin user
+
+        // ------------------------------------------------------------------------------------------- SHOW
+        this.test.comment( 'show should get a visualization details object' );
+        var visualizationShow = this.api.visualizations.show( firstVisualization.id );
+        this.debug( this.jsonStr( visualizationShow ) );
+        this.test.assert( this.hasKeys( visualizationShow, showKeys ), 'Has the proper keys' );
+        this.test.assert( visualizationShow.model_class === 'Visualization',
+            'Has the proper model_class: ' + visualizationShow.model_class );
+
+        this.test.comment( 'a visualization details object should contain an array of revision ids' );
+        var revisions = visualizationShow.revisions;
+        this.test.assert( utils.isArray( revisions ), 'revisions is an array' );
+        this.test.assert( revisions.length >= 1, 'revisions has at least one entry' );
+        var areIds = true;
+        revisions.forEach( function( revision ){
+            if( !spaceghost.api.isEncodedId( revision ) ){ areIds = false; }
+        });
+        this.test.assert( areIds, 'all revisions are ids' );
+
+        this.test.comment( 'a visualization details object should contain a subobject of the latest revision' );
+        var latestRevision = visualizationShow.latest_revision;
+        this.test.assert( utils.isObject( latestRevision ), 'latestRevision is an object' );
+        this.test.assert( this.hasKeys( latestRevision, revisionKeys ), 'latestRevision has the proper keys' );
+        this.test.assert( latestRevision.model_class === 'VisualizationRevision',
+            'Has the proper model_class: ' + latestRevision.model_class );
+        this.test.assert( latestRevision.visualization_id === visualizationShow.id,
+            'revision visualization_id matches containing visualization id: ' + latestRevision.visualization_id );
+        this.test.assert( visualizationShow.revisions.indexOf( latestRevision.id ) !== -1,
+            'found latest_revision id in revisions' );
+
+        this.test.comment( 'a visualization revision should contain a subobject for the config' );
+        var config = latestRevision.config;
+        this.test.assert( utils.isObject( config ), 'config is an object:\n' + this.jsonStr( config ) );
+
+        //TODO: url in visualizationIndex == show url
+        //TODO: non existing id throws error
+        //TODO: anon user
+        //TODO: user1 has no permissions to show user2
+
+        // ------------------------------------------------------------------------------------------- CREATE
+        this.test.comment( 'Calling create should create a new visualization and allow setting the name' );
+        var visualizationData = {
+            title   : 'Created Visualization',
+            // needs to be unique
+            slug    : 'created-visualization-' + Date.now(),
+            type    : 'test',
+            dbkey   : 'hg17',
+            annotation : 'invisible visualization',
+            config  : {
+                x       : 10,
+                y       : 12
+            }
+        };
+        var created = this.api.visualizations.create( visualizationData );
+        this.debug( 'returned from create:\n' + this.jsonStr( created ) );
+        this.test.assert( this.api.isEncodedId( created.id ), "create returned an id: " + created.id );
+
+        // check v. show
+        visualizationShow = this.api.visualizations.show( created.id );
+        this.debug( 'visualizationShow:\n' + this.jsonStr( visualizationShow ) );
+        // config is re-located into a revision and won't be there
+        //this.test.assert( compareObjs( visualizationShow, visualizationData, [ 'config' ] ),
+        //    "show results seem to match create data" );
+
+        // the following errors are produced within base.controller.UsesVisualizationsMixin._create_visualization
+        this.test.comment( 'Calling create with a non-unique slug will cause an API error' );
+        this.api.assertRaises( function(){
+            created = this.api.visualizations.create( visualizationData );
+        }, 400, 'visualization identifier must be unique' );
+
+        this.test.comment( 'Calling create with no title will cause an API error' );
+        visualizationData.title = '';
+        this.api.assertRaises( function(){
+            created = this.api.visualizations.create( visualizationData );
+        }, 400, 'visualization name is required' );
+        visualizationData.title = 'Created Visualization';
+
+        this.test.comment( 'Calling create with improper characters in the slug will cause an API error' );
+        var oldSlug = visualizationData.slug;
+        visualizationData.slug = '123_()';
+        this.api.assertRaises( function(){
+            created = this.api.visualizations.create( visualizationData );
+        }, 400, "visualization identifier must consist of only lowercase letters, numbers, and the '-' character" );
+        visualizationData.slug = oldSlug;
+
+        this.test.comment( 'Calling create with an unrecognized key will be ignored' );
+        visualizationData.title = 'Unrecognized key';
+        visualizationData.slug = 'unrecognized-key';
+        visualizationData.bler = 'blah';
+        created = this.api.visualizations.create( visualizationData );
+        this.test.assert( created.bler === undefined );
+        delete visualizationData.bler;
+
+        this.test.comment( 'Calling create with an unparsable JSON config will cause an API error' );
+        visualizationData.title = 'Unparsable';
+        visualizationData.slug = 'unparsable';
+        visualizationData.config = '3 = nime';
+        this.api.assertRaises( function(){
+            created = this.api.visualizations.create( visualizationData );
+        }, 400, "config must be a dictionary: <type 'unicode'>" );
+
+        // ------------------------------------------------------------------------------------------ UPDATE
+        // ........................................................................................... idiot proofing
+        //this.test.comment( 'updating using a nonsense key should fail with an error' );
+        //returned = this.api.visualizations.update( created.id, { bler : 'blah' });
+        ////TODO: this isn't returning an object...
+        //this.debug( 'returned:' + this.jsonStr( returned ) );
+        //this.test.assert( returned.bler === undefined );
+
+        this.test.comment( 'updating by attempting to change type should cause an error' );
+        this.api.assertRaises( function(){
+            returned = this.api.visualizations.update( created.id, { title : 30 });
+        }, 400, 'title must be a string or unicode' );
+        //TODO: the other types...
+
+        // ........................................................................................... title
+        //this.test.comment( 'update should create a new visualization revision' );
+        //
+        //this.test.comment( 'updating with a new title should NOT change the visualization title...' );
+        //latestRevision = this.api.visualizations.show( created.id ).latest_revision;
+        //returned = this.api.visualizations.update( created.id, {
+        //    title : 'New title'
+        //});
+        //visualizationShow = this.api.visualizations.show( created.id );
+        //this.debug( this.jsonStr( visualizationShow ) );
+        //this.test.assert( visualizationShow.title === visualizationData.title,
+        //    "Title does not set via update: " + visualizationShow.title );
+
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/casperjs_runner.py b/test/casperjs/casperjs_runner.py
new file mode 100644
index 0000000..1b643cd
--- /dev/null
+++ b/test/casperjs/casperjs_runner.py
@@ -0,0 +1,463 @@
+"""Test runner for casperjs headless browser tests with the Galaxy distribution.
+
+Allows integration of casperjs tests with run_functional_tests.sh
+
+Tests can be run in any of the following ways:
+* casperjs test mytests.js --url='http://localhost:8080'
+* python casperjs_runner.py
+* nosetests
+* sh run_tests.sh -j
+* sh run_tests.sh test/casperjs/casperjs_runner.py
+* sh run_tests.sh
+
+Note: that you can enable (lots of) debugging info using cli options:
+* casperjs test api-user-tests.js --url='http://localhost:8080' --verbose=true --logLevel=debug
+
+(see casperjs.org for more information)
+
+Note: This works with CasperJS 1.1 and PhantomJS 1.9.2 and these libraries seem to break backward
+compatibility a lot.
+
+Note: You can pass in extra data using --data='<some JSON object>'
+    and it will be available in your script as spaceghost.fixtureData.
+
+    Example using a specific user account:
+        casperjs test api-history-tests.js --url="http://localhost:8080" \
+            --data='{ "testUser": { "email": "foo at example.com", "password": "123456" } }'
+        // ...then, in script:
+        spaceghost.user.loginOrRegisterUser(
+            spaceghost.fixtureData.testUser.email,
+            spaceghost.fixtureData.testUser.password );
+
+    Example of specifing user and admin user credentials:
+        casperjs test api-configuration-tests.js --url="http://localhost:8080" \
+            --admin='{"email": "foo at example.com", "password": "123456" }' \
+"""
+
+import errno
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+import unittest
+
+from server_env import TestEnvironment
+
+# -------------------------------------------------------------------- can't do 2.5
+
+( major, minor, micro, releaselevel, serial ) = sys.version_info
+if minor < 6:
+    msg = 'casperjs requires python 2.6 or newer. Using: %s' % ( sys.version )
+    try:
+        # if nose is installed do a skip test
+        from nose.plugins.skip import SkipTest
+        raise SkipTest( msg )
+    except ImportError as i_err:
+        raise AssertionError( msg )
+
+# --------------------------------------------------------------------
+
+logging.basicConfig( stream=sys.stderr, name=__name__ )
+log = logging.getLogger( __name__ )
+
+# ==================================================================== MODULE VARS
+_PATH_TO_HEADLESS = 'casperjs'
+
+_TODO = """
+    get data back from js scripts (uploaded files, etc.)
+    use returned json to output list of failed assertions if code == 2
+    better way to turn debugging on from the environment
+"""
+
+
+# ====================================================================
+class HeadlessJSJavascriptError( Exception ):
+    """An error that occurrs in the javascript test file.
+    """
+    pass
+
+
+class CasperJSTestCase( unittest.TestCase ):
+    """Casper tests running in a unittest framework.
+    """
+    # casper uses a lot of escape codes to colorize output - these capture those and allow removal
+    escape_code_compiled_pattern = None
+    escape_code_pattern = r'\x1b\[[\d|;]+m'
+
+    # info on where to get casper js - shown when the exec can't be found
+    casper_info = """
+    CasperJS is a navigation scripting & testing utility for PhantomJS, written in Javascript.
+    More information is available at: casperjs.org
+    """
+
+    # debugging flag - set to true to have casperjs tests output with --verbose=true and --logLevel=debug
+    # debug = True
+    debug = False
+    # bit of a hack - this is the beginning of the last string when capserjs --verbose=true --logLevel=debug
+    #   use this to get subprocess to stop waiting for output
+    casper_done_str = '# Stopping'
+
+    # convert js test results to unittest.TestResults
+    results_adapter = None  # CasperJsonToUnittestResultsConverter()
+
+    # ---------------------------------------------------------------- run the js script
+    def run_js_script( self, rel_script_path, *args, **kwargs ):
+        """Start the headless browser tests in a separate process and use both
+        the subprocess return code and the stdout output (formatted as JSON)
+        to determine which tests failed and which passed.
+        """
+        log.debug( 'beginning headless browser tests: %s', rel_script_path )
+        process_command_list = self.build_command_line( rel_script_path, *args, **kwargs )
+        log.debug( 'process_command_list: %s', str( process_command_list ) )
+        try:
+            process = subprocess.Popen( process_command_list,
+                                        shell=False,
+                                        stdout=subprocess.PIPE,
+                                        stderr=subprocess.PIPE )
+
+            # output from the browser (stderr only) immediately
+            while process.poll() is None:
+                stderr_msg = process.stderr.readline()
+                stderr_msg = self.strip_escape_codes( stderr_msg.strip() )
+                if stderr_msg:
+                    log.debug( '(%s): %s', rel_script_path, stderr_msg )
+                    # HACK: this is the last string displayed using the debug settings - afterwards it hangs
+                    #   so: bail on this string
+                    if stderr_msg.startswith( self.casper_done_str ):
+                        break
+
+            # stdout is assumed to have the json test data/results
+            ( stdout_output, stderr_output ) = process.communicate()
+            # log.debug( '%s stdout output:\n%s', rel_script_path, stdout_output )
+            # log.debug( '%s stderr output:\n%s', rel_script_path, stderr_output )
+
+            log.debug( 'process.returncode: %d', process.returncode )
+
+            # 1.1 has an annoying info bar that happens before it gets to our stuff, so...
+            stdout_output = '\n'.join( stdout_output.split( '\n' )[1:] )
+            # log.debug( 'stdout_output:\n' + stdout_output )
+
+            if process.returncode == 1:
+                # TODO: this is a fail on first effect
+                raise self.browser_error_to_exception( rel_script_path, stdout_output )
+
+        # couldn't find the headless browser,
+        #   provide information (as it won't be included by default with galaxy)
+        except OSError as os_err:
+            if os_err.errno == errno.ENOENT:
+                log.error( 'No path to headless browser executable: %s\n' +
+                           'These tests were designed to use the following headless browser:\n%s',
+                           self.exec_path, self.casper_info )
+            raise
+
+        return self.handle_js_results( stdout_output )
+
+    def build_command_line( self, rel_script_path, *args, **kwargs ):
+        """Build the headless browser command line list for subprocess.
+        """
+        command_line_list = [ self.exec_path ]
+
+        # as of casperjs 1.1, we always need to use the 'test' command
+        command_line_list.append( 'test' )
+
+        # make rel_script_path an absolute path (when this is not run from its dir - i.e. run_tests.sh)
+        curr_dir = os.path.dirname( __file__ )
+        script_path = os.path.join( curr_dir, rel_script_path )
+        command_line_list.append( script_path )
+
+        # let browser know where the server is (from the TestEnvironment created in setUp)
+        command_line_list.append( '--url=' + self.env.url )
+
+        # add the return json only option
+        #   - has script send normal output to stderr and results, errors, logs to stdout as json
+        command_line_list.append( '--return-json' )
+
+        # check flag to output (very) verbose debugging messages from casperjs and tests
+        # NOTE: this can be set in the class or by using the debug_these_tests flag in server_env
+        if self.debug or ( rel_script_path in self.env.debug_these_tests ):
+            command_line_list.extend([ '--verbose=true', '--logLevel=debug' ])
+            # TODO: add capture, html output flags
+
+        # TODO: allow casperjs cli options ('--includes='), ?in args, kwargs?
+        command_line_list.extend( args )
+
+        # send extra data - encode kwargs as json to pass to casper for decoding
+        # as of 1.1, we need to pass this under a opt
+        command_line_list.append( '--data=' + json.dumps( kwargs ) )
+        return command_line_list
+
+    def strip_escape_codes( self, msg ):
+        """Removes colorizing escape codes from casper output strings.
+        """
+        if not self.escape_code_compiled_pattern:
+            self.escape_code_compiled_pattern = re.compile( self.escape_code_pattern )
+        return re.sub( self.escape_code_compiled_pattern, '', msg )
+
+    # ---------------------------------------------------------------- convert js error to python error
+    def browser_error_to_exception( self, script_path, stdout_output ):
+        """Converts the headless' error from JSON into a more informative
+        python HeadlessJSJavascriptError.
+        """
+        try:
+            # assume it's json and located in errors (and first)
+            js_test_results = json.loads( stdout_output )
+            last_error = js_test_results['errors'][0]
+            err_string = ( "%s\n%s" % ( last_error['msg'],
+                           self.browser_backtrace_to_string( last_error['backtrace'] ) ) )
+
+        # if we couldn't parse json from what's returned on the error, dump stdout
+        except ValueError as val_err:
+            if str( val_err ) == 'No JSON object could be decoded':
+                log.debug( '(error parsing returned JSON from casperjs, dumping stdout...)\n:%s', stdout_output )
+                return HeadlessJSJavascriptError( 'see log for details' )
+            else:
+                raise
+
+        # otherwise, raise a vanilla exc
+        except Exception as exc:
+            log.debug( '(failed to parse error returned from %s: %s)', _PATH_TO_HEADLESS, str( exc ) )
+            return HeadlessJSJavascriptError(
+                "ERROR in headless browser script %s" % ( script_path ) )
+
+        # otherwise, raise with msg and backtrace
+        return HeadlessJSJavascriptError( err_string )
+
+    def browser_backtrace_to_string( self, backtrace ):
+        """Converts list of trace dictionaries (as might be returned from
+        json results) to a string similar to a python backtrace.
+        """
+        template = '  File "%s", line %s, in %s'
+        traces = []
+        for trace in backtrace:
+            traces.append( template % ( trace[ 'file' ], trace[ 'line' ], trace[ 'function' ] ) )
+        return '\n'.join( traces )
+
+    # ---------------------------------------------------------------- results
+    def handle_js_results( self, results ):
+        """Handle the results of the js tests by either converting them
+        with the results adapter or checking for a failure list.
+        """
+
+        # if given an adapter - use it
+        if self.results_adapter:
+            self.results_adapter.convert( results, self )
+
+        # - otherwise, assert no failures found
+        else:
+            js_test_results = json.loads( results )
+            failures = js_test_results[ 'failures' ]
+            assert len( failures ) == 0, (
+                "%d assertions failed in the headless browser tests" % ( len( failures ) ) +
+                " (see the log for details)" )
+
+    # ---------------------------------------------------------------- TestCase overrides
+    def setUp( self ):
+        # set up the env for each test
+        self.env = TestEnvironment.instance()
+        self.exec_path = _PATH_TO_HEADLESS
+
+    def run( self, result=None ):
+        # wrap this in order to save ref to result
+        # TODO: gotta be a better way
+        self.result = result
+        unittest.TestCase.run( self, result=result )
+
+
+# ==================================================================== RESULTS CONVERSION
+class CasperJsonToUnittestResultsConverter( object ):
+    """Convert casper failures, success to individual unittest.TestResults
+    """
+    # TODO: So far I can add result instances - but each has the id, shortDescription
+    #   of the TestCase.testMethod that called it. Can't find out how to change these.
+
+    def convert( self, json_results, test ):
+        """Converts JSON test results into unittest.TestResults.
+
+        precondition: test should have attribute 'result' which
+        is a unittest.TestResult (for that test).
+        """
+        results_dict = json.loads( json_results )
+        failures = results_dict[ 'testResults' ][ 'failures' ]
+        passes = results_dict[ 'testResults' ][ 'passes' ]
+        self.add_json_failures_to_results( failures, test )
+        self.add_json_successes_to_results( passes, test )
+
+    def add_json_failures_to_results( self, failures, test ):
+        """Converts JSON test failures.
+        """
+        # precondition: result should be an attr of test (a TestResult)
+        # TODO: no way to change test.desc, name in output?
+        for failure in failures:
+            # TODO: doesn't change shortDescription
+            # if 'standard' in failure:
+            #    self.__doc__ = failure[ 'standard' ]
+            test.result.addFailure( test, self.casper_failure_to_unittest_failure( failure ) )
+            test.result.testsRun += 1
+
+    def casper_failure_to_unittest_failure( self, casper_failure, failure_class=AssertionError ):
+        """Returns a casper test failure (in dictionary form) as a 3-tuple of
+        the form used by unittest.TestResult.addFailure.
+
+        Used to add failures to a casperjs TestCase.
+        """
+        # TODO: this is all too elaborate
+        fail_type = casper_failure[ 'type' ]
+        values = json.dumps( casper_failure[ 'values' ] )
+        desc = casper_failure[ 'standard' ]
+        if 'messgae' in casper_failure:
+            desc = casper_failure[ 'message' ]
+        failure_msg = "(%s) %s: %s" % ( fail_type, desc, values )
+        # TODO: tb is empty ([]) - can we get file info from casper, covert to py trace?
+        return ( failure_class, failure_msg, [] )
+
+    def add_json_successes_to_results( self, successes, test ):
+        """Converts JSON test successes.
+        """
+        for success in successes:
+            # attempt to re-write test result description - doesn't work
+            # if 'standard' in success:
+            #    self.__doc__ = success[ 'standard' ]
+            test.result.addSuccess( test )
+            test.result.testsRun += 1
+
+
+# ==================================================================== MODULE FIXTURE
+# NOTE: nose will run these automatically
+def setup_module():
+    log.debug( '\n--------------- setting up module' )
+
+
+def teardown_module():
+    log.debug( '\n--------------- tearing down module' )
+
+
+test_user = {
+    'email': 'test1 at test.test',
+    'password': '123456'
+}
+
+
+# ==================================================================== TESTCASE EXAMPLE
+# these could be broken out into other py files - shouldn't be necc. ATM
+class Test_01_User( CasperJSTestCase ):
+    """Tests for the Galaxy user centered functionality:
+    registration, login, etc.
+    """
+    def test_10_registration( self ):
+        """User registration tests:
+        register new user, logout, attempt bad registrations.
+        """
+        # all keywords will be compiled into a single JSON obj and passed to the server
+        # self.run_js_script( 'registration-tests.js',
+        #    testUser=test_user )
+        #    # this causes a time out in history-panel-tests: why?
+        #    # also: I can't seem to bump the timeout to an error (using a handler) - causes script to hang
+        #    #   removing for the sake of bbot
+        self.run_js_script( 'registration-tests.js' )
+
+        # TODO:?? could theoretically do db cleanup, checks here with SQLALX
+        # TODO: have run_js_script return other persistent fixture data (uploaded files, etc.)
+
+    def test_20_login( self ):
+        """User log in tests.
+        """
+        self.run_js_script( 'login-tests.js' )
+
+
+class Test_02_Tools( CasperJSTestCase ):
+    """(Minimal) casperjs tests for tools.
+    """
+    def test_10_upload( self ):
+        """Tests uploading files
+        """
+        self.run_js_script( 'upload-tests.js' )
+
+
+class Test_03_HistoryPanel( CasperJSTestCase ):
+    """Tests for History fetching, rendering, and modeling.
+    """
+    def test_00_history_panel( self ):
+        """Test history panel basics (controls, structure, refresh, history options menu, etc.).
+        """
+        self.run_js_script( 'history-panel-tests.js' )
+
+    def test_10_history_options( self ):
+        """Test history options button.
+        """
+        self.run_js_script( 'history-options-tests.js' )
+
+    def test_20_anonymous_histories( self ):
+        """Test history panel basics with an anonymous user.
+        """
+        self.run_js_script( 'anon-history-tests.js' )
+
+
+class Test_04_HDAs( CasperJSTestCase ):
+    """Tests for HistoryDatasetAssociation fetching, rendering, and modeling.
+    """
+    def test_00_HDA_states( self ):
+        """Test structure rendering of HDAs in all the possible HDA states
+        """
+        self.run_js_script( 'hda-state-tests.js' )
+
+
+class Test_05_API( CasperJSTestCase ):
+    """Tests for API functionality and security.
+    """
+    def test_00_history_api( self ):
+        """Test history API.
+        """
+        self.run_js_script( 'api-history-tests.js' )
+
+    def test_01_hda_api( self ):
+        """Test HDA API.
+        """
+        self.run_js_script( 'api-hda-tests.js' )
+
+    def test_02_history_permissions_api( self ):
+        """Test API permissions for importable, published histories.
+        """
+        self.run_js_script( 'api-history-permission-tests.js' )
+
+    def test_03_anon_history_api( self ):
+        """Test API for histories using anonymous user.
+        """
+        self.run_js_script( 'api-anon-history-tests.js' )
+
+    def test_04_anon_history_permissions_api( self ):
+        """Test API permissions for importable, published histories using anonymous user.
+        """
+        self.run_js_script( 'api-anon-history-permission-tests.js' )
+
+    def test_06_visualization_api( self ):
+        """Test API for visualizations.
+        """
+        self.run_js_script( 'api-visualizations-tests.js' )
+
+    def test_07_tools_api( self ):
+        """Test API for tools.
+        """
+        self.run_js_script( 'api-tool-tests.js' )
+
+    def test_08_configuration_api( self ):
+        """Test API for configuration.
+        """
+        self.run_js_script( 'api-configuration-tests.js' )
+
+    def test_09_user_api( self ):
+        """Test API for users.
+        """
+        self.run_js_script( 'api-user-tests.js' )
+
+
+# ==================================================================== MAIN
+if __name__ == '__main__':
+    log.setLevel( logging.DEBUG )
+    from server_env import log as server_env_log
+    server_env_log.setLevel( logging.DEBUG )
+    setup_module()
+    # TODO: server_env config doesn't work with unittest's lame main fn
+    unittest.main()
+    # teardown_module() isn't called when unittest.main is used
diff --git a/test/casperjs/hda-state-tests.js b/test/casperjs/hda-state-tests.js
new file mode 100644
index 0000000..e158488
--- /dev/null
+++ b/test/casperjs/hda-state-tests.js
@@ -0,0 +1,397 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test the form of various HDA states', 0, function suite( test ){
+    spaceghost.start();
+
+    // ===================================================================
+    /* TODO:
+        currently going to fake states via JS
+            - better if we can capture actual hdas in these states
+            - easier said than done - API?
+    */
+    // =================================================================== globals and helpers
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+        spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+    }
+
+    var tooltipSelector = spaceghost.data.selectors.tooltipBalloon,
+        filenameToUpload = '1.txt',
+        filepathToUpload = '../../test-data/' + filenameToUpload,
+        uploadId = null,
+        //TODO: get from the api module - that doesn't exist yet
+        summaryShouldBeArray = [ '10 lines' ],
+        infoShouldBe = 'uploaded txt file',
+        metadataFiles = null,
+        peekShouldBeArray = [];
+
+    // ------------------------------------------------------------------- set up
+    // start a new user and upload a file
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.api.tools.thenUploadToCurrent({
+        filepath: filepathToUpload,
+        ext: 'txt'
+
+    }, function upload( id, json ){
+        uploadId = id;
+    });
+    spaceghost.openHomePage();
+
+    // =================================================================== TEST HELPERS
+    //NOTE: to be called with fn.call( spaceghost, ... )
+
+    function testTitle( hdaSelector, name ){
+        var titleSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.title;
+        this.test.assertVisible( titleSelector,
+            'HDA title is visible' );
+        this.test.assertSelectorHasText( titleSelector, name,
+            'HDA contains name (' + name + '): ' + this.fetchText( titleSelector ) );
+    }
+
+    function testIconButton( hdaDbId, containerSelector, buttonName, expectedButtonData ){
+        this.test.comment( buttonName + ' should exist, be visible, and well formed' );
+        this.debug( 'checking button "' + buttonName + '" within "' + containerSelector + '":\n' +
+            this.jsonStr( expectedButtonData ) );
+
+        if( !expectedButtonData.selector ){ this.test.fail( 'BAD TEST DATA: no selector given' ); }
+        var btnSelector = containerSelector + ' ' + expectedButtonData.selector;
+        this.test.assertExists( btnSelector,  buttonName + ' button exists' );
+        this.test.assertVisible( btnSelector, buttonName + ' button is visible' );
+
+        var buttonElement = this.getElementInfo( btnSelector );
+        this.debug( 'buttonElement:' + this.jsonStr( this.quickInfo( buttonElement ) ) );
+
+        if( expectedButtonData.nodeName ){
+            this.test.assert( buttonElement.nodeName === expectedButtonData.nodeName,
+                buttonName + ' is proper node type (' + expectedButtonData.nodeName + '): ' + buttonElement.nodeName );
+        }
+
+        if( expectedButtonData.hrefTpl ){
+            var href = buttonElement.attributes.href,
+                hrefShouldBe = ( expectedButtonData.hrefTpl.indexOf( '%s' ) !== -1 )?
+                    ( utils.format( expectedButtonData.hrefTpl, hdaDbId ) ):( expectedButtonData.hrefTpl );
+            this.assertTextContains( href, hrefShouldBe,
+                buttonName + ' has proper href (' + hrefShouldBe + '): ' + href );
+        }
+
+        if( expectedButtonData.tooltip ){
+            this.hoverOver( btnSelector );
+            var tooltipText = expectedButtonData.tooltip;
+            this.test.assertVisible( tooltipSelector, buttonName + ' button tooltip is visible when hovering' );
+            this.test.assertSelectorHasText( tooltipSelector, tooltipText,
+                buttonName + ' button has tooltip text: "' + tooltipText + '"' );
+            // clear the tooltip
+            this.page.sendEvent( 'mouseover', 0, 0 );
+        }
+    }
+
+    function testTitleButtonStructure( hdaSelector, shouldHaveTheseButtons ){
+        // defaults to the current buttons most states should have
+        shouldHaveTheseButtons = shouldHaveTheseButtons || [ 'display', 'edit', 'delete' ];
+
+        var hdaDbId = this.getElementAttribute( hdaSelector, 'id' ).split( '-' )[1],
+            buttonsArea = hdaSelector + ' ' + this.historypanel.data.selectors.hda.titleButtonArea,
+            buttons = this.historypanel.data.hdaTitleButtons;
+
+        this.test.assertVisible( buttonsArea, 'Button area is visible' );
+
+        for( var i=0; i<shouldHaveTheseButtons.length; i += 1 ){
+            // don't use button names we don't have data for
+            var buttonName = shouldHaveTheseButtons[ i ];
+            if( !buttons.hasOwnProperty( buttonName ) ){ continue; }
+            var button = buttons[ buttonName ];
+
+            testIconButton.call( this, hdaDbId, buttonsArea, buttonName, button );
+        }
+    }
+
+    function testDbkey( hdaSelector, dbkeySetTo ){
+        var dbkeySelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+                                        + ' ' + this.historypanel.data.selectors.hda.dbkey,
+            unspecifiedDbkeyText     = '?',
+            unspecifiedDbkeyNodeName = 'a',
+            specifiedDbkeyNodeName   = 'span',
+            editAttrHrefRegex = /\/datasets\/\w+\/edit/;
+
+        this.test.assertExists( dbkeySelector, 'dbkey exists' );
+        this.test.assertVisible( dbkeySelector, 'dbkey is visible' );
+        var dbkey = this.elementInfoOrNull( dbkeySelector );
+        if( !dbkey ){ return; }
+
+        // dbkey is set, check text
+        if( dbkeySetTo ){
+            this.test.comment( '(specified) dbkey should be displayed correctly' );
+            this.test.assertSelectorHasText( dbkeySelector, dbkeySetTo,
+                'dbkey is specified: ' + dbkey.text );
+            this.test.assert( dbkey.nodeName === specifiedDbkeyNodeName,
+                'dbkey has proper nodeName (' + specifiedDbkeyNodeName + '): ' + dbkey.nodeName );
+
+        // dbkey expected to be not set
+        } else {
+            this.test.comment( '(unspecified) dbkey should be displayed correctly' );
+            this.test.assertSelectorHasText( dbkeySelector, unspecifiedDbkeyText,
+                'dbkey is not specified: ' + dbkey.text );
+            this.test.assert( dbkey.nodeName === unspecifiedDbkeyNodeName,
+                'dbkey has proper nodeName (' + unspecifiedDbkeyNodeName + '):' + dbkey.nodeName );
+
+            this.test.comment( '(unspecified) dbkey href should point to edit attributes' );
+            this.test.assertMatch( dbkey.attributes.href, editAttrHrefRegex,
+                'dbkey has a proper href: ' + dbkey.attributes.href );
+        }
+    }
+
+    function testDownloadMenu( hdaSelector, expectedMetadataFiles ){
+        var hdaDbId = this.getElementAttribute( hdaSelector, 'id' ).split( '-' )[1];
+
+        // assert has classes: menubutton split popup
+        // click popup
+    }
+
+    function testMetadataDownloadLink( menuSelector, metadataFile ){
+
+    }
+
+    function testPrimaryActionButtons( hdaSelector, expectedMetadataFiles ){
+        //TODO: not abstracted well for all states
+        var hdaDbId = this.getElementAttribute( hdaSelector, 'id' ).split( '-' )[1],
+            buttonsSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+                                          + ' ' + this.historypanel.data.selectors.hda.primaryActionButtons,
+            dropdownSelector = '#' + utils.format(
+                this.historypanel.data.hdaPrimaryActionButtons.downloadDropdownButtonIdTpl, hdaDbId );
+
+        this.test.comment( 'Primary action buttons div should exist and be visible' );
+        this.test.assertExists( buttonsSelector, 'Primary action buttons div exists' );
+        this.test.assertVisible( buttonsSelector, 'Primary action buttons div is visible' );
+        //TODO: ...
+        // different states, datatypes will have different action buttons
+        testIconButton.call( this, hdaDbId, buttonsSelector, 'info',
+            this.historypanel.data.hdaPrimaryActionButtons.info );
+        // testIconButton.call( this, hdaDbId, buttonsSelector, 'rerun',
+        //     this.historypanel.data.hdaPrimaryActionButtons.rerun );
+
+        //TODO: move to testDownloadButton as its own step
+        if( !expectedMetadataFiles ){
+            this.test.comment( 'no expected metadata, download button should be an icon button' );
+            this.test.assertDoesntExist( dropdownSelector, 'no dropdown selector exists:' + dropdownSelector );
+            testIconButton.call( this, hdaDbId, buttonsSelector, 'download',
+                this.historypanel.data.hdaPrimaryActionButtons.download );
+
+        } else {
+            this.test.comment( 'expecting metadata, download button should be a popup menu' );
+
+            // will be a drop down and should contain links to all metadata files
+            this.test.assertVisible( dropdownSelector, 'dropdown menu button visible: ' + dropdownSelector );
+            testIconButton.call( this, hdaDbId, dropdownSelector, 'download',
+                this.historypanel.data.hdaPrimaryActionButtons.download );
+
+            this.test.comment( 'clicking the button should show a popup menu with download links' );
+            this.click( dropdownSelector );
+            this.wait( 100, function(){
+                //TODO: abstract to popup menu checker
+                var menuSelector = '#' + utils.format(
+                    this.historypanel.data.hdaPrimaryActionButtons.downloadDropdownMenuIdTpl, hdaDbId );
+                this.test.assertVisible( menuSelector, 'menu visible: ' + menuSelector );
+
+                var liCounter = 1;
+                var mainDataSelector = menuSelector + ' ' + 'li:nth-child(' + liCounter + ') a';
+                this.assertVisibleWithText( mainDataSelector, 'Download Dataset',
+                    mainDataSelector + ' (main data download) has proper text: ' + 'Download Dataset' );
+                liCounter += 1;
+
+                var splitLabelSelector = menuSelector + ' ' + 'li:nth-child(' + liCounter + ') a';
+                this.test.assertVisible( splitLabelSelector, 'split label visible' );
+                this.test.assertSelectorHasText( splitLabelSelector, 'Additional Files',
+                    'split label has proper text' );
+                liCounter += 1;
+
+                var self = this;
+                expectedMetadataFiles.forEach( function( file ){
+                    var linkSelector = menuSelector + ' ' + 'li:nth-child(' + liCounter + ') a';
+                    self.test.assertVisible( linkSelector, '#' + liCounter + ' link visible' );
+                    self.test.assertSelectorHasText( linkSelector, 'Download ' + file,
+                        '#' + liCounter + ' link has proper text: Download ' + file );
+                    liCounter += 1;
+                });
+            });
+        }
+    }
+
+    function testSecondaryActionButtons( hdaSelector ){
+        var buttonsSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+                                          + ' ' + this.historypanel.data.selectors.hda.secondaryActionButtons;
+        this.test.comment( 'Secondary action buttons div should exist and be visible' );
+        this.test.assertExists( buttonsSelector, 'Secondary action buttons div exists' );
+        this.test.assertVisible( buttonsSelector, 'Secondary action buttons div is visible' );
+        //TODO: ...
+        // tags, annotations
+    }
+
+    function testPeek( hdaSelector, expectedPeekArray ){
+        var peekSelector = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body
+                                       + ' ' + this.historypanel.data.selectors.hda.peek;
+        this.test.comment( 'Peek div should exist and be visible' );
+        this.test.assertExists( peekSelector, 'peek exists' );
+        this.test.assertVisible( peekSelector, 'peek is visible' );
+        expectedPeekArray.forEach( function( string, i ){
+            spaceghost.test.assertSelectorHasText( peekSelector, string, 'peek has proper text (' + string + ')' );
+        });
+    }
+
+    function testExpandedBody( hdaSelector, expectedSummaryTextArray, expectedInfoText, dbkeySetTo, expectedMetadata ){
+        var body = hdaSelector + ' ' + this.historypanel.data.selectors.hda.body;
+        this.test.assertExists( body, 'body exists' );
+        this.test.assertVisible( body, 'body is visible' );
+
+        //TODO: create api module, match with api history_contents
+
+        this.test.comment( 'Summary should be displayed correctly' );
+        var summary = body + ' ' + this.historypanel.data.selectors.hda.summary;
+        this.test.assertExists( summary, 'summary exists' );
+        this.test.assertVisible( summary, 'summary is visible' );
+        // summary text is broken up by whitespace making it inconv. to test in one go
+        expectedSummaryTextArray.forEach( function( string, i ){
+            spaceghost.test.assertSelectorHasText( summary, string, 'summary has proper text (' + string + ')' );
+        });
+        this.debug( 'summary text: ' + this.fetchText( summary ) );
+
+        testDbkey.call( this, hdaSelector, dbkeySetTo );
+
+        this.test.comment( 'Info should be displayed correctly' );
+        var info = body + ' ' + this.historypanel.data.selectors.hda.info;
+        this.test.assertExists( info, 'info exists' );
+        this.test.assertVisible( info, 'info is visible' );
+        this.test.assertSelectorHasText( info, expectedInfoText,
+            'info has proper text (' + expectedInfoText + '): ' + this.fetchText( info ) );
+
+        testPrimaryActionButtons.call( this, hdaSelector, expectedMetadata );
+        testSecondaryActionButtons.call( this, hdaSelector ); //TODO: isAnonymous
+        testPeek.call( this, hdaSelector, peekShouldBeArray );
+    }
+
+    // =================================================================== TESTS
+    // ------------------------------------------------------------------- ok state
+    spaceghost.then( function(){
+        this.test.comment( 'HDAs in the "ok" state should be well formed' );
+
+        var uploadSelector = '#dataset-' + uploadId;
+        this.test.assertVisible( uploadSelector, 'HDA is visible' );
+
+        this.test.comment( 'should have the proper state class' );
+        this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses.ok,
+            'HDA has ok state class' );
+
+        // since we're using css there's no great way to test state icon (.state-icon is empty)
+
+        this.test.comment( 'should have proper title and hid' );
+        testTitle.call( spaceghost, uploadSelector, filenameToUpload );
+
+        this.test.comment( 'should have all of the three, main buttons' );
+        testTitleButtonStructure.call( spaceghost, uploadSelector );
+
+        this.test.comment( 'body is not visible before clicking the hda title' );
+        var body = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
+        this.test.assertNotVisible( body, 'body is not visible' );
+
+        this.test.comment( 'clicking the hda title should expand its body' );
+        this.historypanel.thenExpandHda( uploadSelector, function(){
+            testExpandedBody.call( spaceghost, uploadSelector,
+                summaryShouldBeArray, infoShouldBe, false, metadataFiles );
+            //testExpandedBody.call( spaceghost, uploadSelector,
+            //    summaryShouldBeArray, infoShouldBe, false );
+        });
+    });
+    // restore to collapsed
+    spaceghost.then( function(){
+        this.test.comment( "Collapsing hda in 'ok' state should hide body again" );
+        var uploadSelector = '#dataset-' + uploadId;
+
+        spaceghost.historypanel.thenCollapseHda( uploadSelector, function collapseOkState(){
+            this.test.assertNotVisible( uploadSelector + ' ' + this.historypanel.data.selectors.hda.body,
+                'body is not visible' );
+        });
+    });
+
+    // ------------------------------------------------------------------- new state
+    spaceghost.then( function(){
+        // set state directly through model, wait for re-render
+        //TODO: not ideal to test this
+        this.evaluate( function(){
+            return Galaxy.currHistoryPanel.model.contents.at( 0 ).set( 'state', 'new' );
+        });
+        this.wait( 1000, function(){
+            this.test.comment( 'HDAs in the "new" state should be well formed' );
+
+            var uploadSelector = '#dataset-' + uploadId;
+            this.test.assertVisible( uploadSelector, 'HDA is visible' );
+            // should have proper title and hid
+            testTitle.call( spaceghost, uploadSelector, filenameToUpload );
+
+            this.test.comment( 'new HDA should have the new state class' );
+            this.assertHasClass( uploadSelector, this.historypanel.data.selectors.hda.wrapper.stateClasses['new'],
+                'HDA has new state class' );
+
+            this.test.comment( 'new HDA should NOT have any of the three, main buttons' );
+            var buttonSelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.titleButtons + ' a';
+            this.test.assertDoesntExist( buttonSelector, 'No display, edit, or delete buttons' );
+
+            this.test.comment( 'clicking the title of the new HDA will expand the body' );
+
+            this.historypanel.thenExpandHda( uploadSelector, function(){
+                var bodySelector = uploadSelector + ' ' + this.historypanel.data.selectors.hda.body;
+                this.test.assertVisible( bodySelector, 'HDA body is visible (after expanding)' );
+
+                var expectedBodyText = 'This is a new dataset';
+                this.test.comment( 'the body should have the text: ' + expectedBodyText );
+                this.test.assertSelectorHasText( bodySelector, expectedBodyText,
+                    'HDA body has text: ' + expectedBodyText );
+            });
+
+            this.then( function(){
+                this.test.comment( 'a simulated error on a new dataset should appear in a message box' );
+                // datasets that error on fetching their data appear as 'new', so do this here
+                // more of a unit test, but ok
+                var errorString = 'Blah!';
+
+                this.evaluate( function( errorString ){
+                    return Galaxy.currHistoryPanel.model.contents.getByHid( 1 ).set( 'error', errorString );
+                }, errorString );
+
+                // wait for re-render
+                this.wait( 1000, function(){
+                    var errorMessage = this.historypanel.data.selectors.hda.errorMessage;
+
+                    this.test.assertExists( errorMessage, 'error message exists' );
+                    this.test.assertVisible( errorMessage, 'error message is visible' );
+                    this.test.assertSelectorHasText( errorMessage, this.historypanel.data.text.hda.datasetFetchErrorMsg,
+                        'error message has text: ' + this.historypanel.data.text.hda.datasetFetchErrorMsg );
+                    this.test.assertSelectorHasText( errorMessage, errorString,
+                        'error message has error string: ' + errorString );
+                });
+            });
+        });
+    });
+    // restore state, collapse
+    spaceghost.then( function revertStateAndCollapse(){
+        var uploadSelector = '#dataset-' + uploadId;
+
+        this.historypanel.thenCollapseHda( uploadSelector, function(){
+            this.evaluate( function(){
+                Galaxy.currHistoryPanel.model.contents.getByHid( 1 ).unset( 'error' );
+                return Galaxy.currHistoryPanel.model.contents.at( 0 ).set( 'state', 'ok' );
+            });
+        });
+        this.wait( 1000 );
+    });
+    /*
+    */
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/history-options-tests.js b/test/casperjs/history-options-tests.js
new file mode 100644
index 0000000..abaf563
--- /dev/null
+++ b/test/casperjs/history-options-tests.js
@@ -0,0 +1,77 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Testing the history options menu', 0, function suite( test ){
+    spaceghost.start();
+    // ===================================================================
+
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+        spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+    }
+
+    var includeDeletedOptionsLabel = spaceghost.historyoptions.data.labels.options.includeDeleted,
+        filepathToUpload = '../../test-data/1.txt',
+        uploadId = null;
+
+    // ------------------------------------------------------------------- set up
+    // start a new user and upload a file
+    spaceghost.user.loginOrRegisterUser( email, password );
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: filepathToUpload }, function uploadCallback( id, json ){
+        uploadId = id;
+    });
+
+    // ------------------------------------------------------------------- history options menu structure
+    //NOTE: options menu should be functionally tested elsewhere
+    spaceghost.openHomePage().historypanel.waitForHdas().then( function checkHistoryOptions(){
+        this.test.comment( 'History options icon should be in place and menu should have the proper structure' );
+
+        // check the button and icon
+        this.test.assertExists(  this.historyoptions.data.selectors.button, "Found history options button" );
+        this.test.assertVisible( this.historyoptions.data.selectors.button, "History options button is visible" );
+        this.test.assertVisible( this.historyoptions.data.selectors.buttonIcon, "History options icon is visible" );
+
+        // open the menu
+        this.click( this.historyoptions.data.selectors.button );
+        this.test.assertVisible( this.historyoptions.data.selectors.menu,
+            "Menu is visible when options button is clicked" );
+
+        // check the options
+        var historyOptions = this.historyoptions.data.labels.options;
+        for( var optionKey in historyOptions ){
+            if( historyOptions.hasOwnProperty( optionKey ) ){
+                var optionLabel = historyOptions[ optionKey ];
+                this.test.assertVisible( this.historyoptions.data.selectors.optionXpathByLabelFn( optionLabel ),
+                    'Option label is visible: ' + optionLabel );
+            }
+        }
+
+        // clear the menu
+        this.click( 'body' );
+        this.test.assertNotVisible( this.historyoptions.data.selectors.menu,
+            "Clicking away from the menu closes it" );
+    });
+
+    // ------------------------------------------------------------------- history options collapses all expanded hdas
+    spaceghost.then( function(){
+        this.historypanel.thenExpandHda( '#dataset-' + uploadId );
+    });
+    spaceghost.then( function(){
+        this.test.comment( 'History option collapses all expanded hdas' );
+
+        this.historyoptions.collapseExpanded( function(){
+            var uploadedSelector = '#dataset-' + uploadId;
+            this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+                "Body for uploaded file is not visible" );
+        });
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/history-panel-tests.js b/test/casperjs/history-panel-tests.js
new file mode 100644
index 0000000..bf6488b
--- /dev/null
+++ b/test/casperjs/history-panel-tests.js
@@ -0,0 +1,271 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Testing the form of the main/current history panel', 0, function suite( test ){
+    spaceghost.start();
+
+    // ===================================================================
+    /* TODO:
+        possibly break this file up
+    */
+    // =================================================================== globals and helpers
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+        spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+    }
+
+    // selectors and labels
+    var tooltipSelector     = spaceghost.data.selectors.tooltipBalloon,
+        editableTextClass   = spaceghost.data.selectors.editableText,
+        editableTextInput   = spaceghost.historypanel.data.selectors.history.nameEditableTextInput,
+
+        nameSelector     = spaceghost.historypanel.data.selectors.history.name,
+        sizeSelector     = spaceghost.historypanel.data.selectors.history.size,
+        unnamedName      = spaceghost.historypanel.data.text.history.newName,
+        initialSizeStr   = spaceghost.historypanel.data.text.history.newSize,
+        tagIconSelector  = spaceghost.historypanel.data.selectors.history.tagIcon,
+        annoIconSelector = spaceghost.historypanel.data.selectors.history.annoIcon,
+        emptyMsgSelector = spaceghost.historypanel.data.selectors.history.emptyMsg,
+        emptyMsgStr      = spaceghost.historypanel.data.text.history.emptyMsg,
+        tagAreaSelector  = spaceghost.historypanel.data.selectors.history.tagArea,
+        annoAreaSelector = spaceghost.historypanel.data.selectors.history.annoArea,
+        nameTooltip      = spaceghost.historypanel.data.text.history.tooltips.name,
+
+        refreshButtonSelector       = 'a#history-refresh-button',
+        refreshButtonIconSelector   = 'span.fa-refresh',
+        bytesString = 'b';
+
+    // local
+    var newHistoryName = "Test History",
+        filepathToUpload = '../../test-data/1.txt',
+        uploadId = null;
+
+    // =================================================================== TESTS
+    // ------------------------------------------------------------------- set up
+    // start a new user
+    spaceghost.user.loginOrRegisterUser( email, password );
+
+    // ------------------------------------------------------------------- check structure of empty history
+    spaceghost.openHomePage().historypanel.waitForHdas( function(){
+        this.test.comment( 'history panel with a new, empty history should be well formed' );
+
+        this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
+        this.test.assertExists( nameSelector, nameSelector + ' exists' );
+        this.test.assertVisible( nameSelector, 'History name is visible' );
+        this.test.assertSelectorHasText( nameSelector, unnamedName, 'History name is ' + unnamedName );
+
+        this.test.comment( "history should display size and size should be: " + initialSizeStr );
+        this.test.assertExists( sizeSelector, 'Found ' + sizeSelector );
+        this.test.assertVisible( sizeSelector, 'History size is visible' );
+        this.test.assertSelectorHasText( sizeSelector, initialSizeStr,
+            'History size has "' + initialSizeStr + '"' );
+
+        this.test.comment( "tags and annotation icons should be available" );
+        this.test.assertExists( tagIconSelector,  'Tag icon button found' );
+        this.test.assertExists( annoIconSelector, 'Annotation icon button found' );
+
+        this.test.comment( "A message about the current history being empty should be displayed" );
+        this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
+        this.test.assertVisible( emptyMsgSelector, 'Empty history message is visible' );
+        this.test.assertSelectorHasText( emptyMsgSelector, emptyMsgStr,
+            'Message contains\n"' + emptyMsgStr + '":\n"' + this.fetchText( emptyMsgSelector ) + '"' );
+    });
+
+    // ------------------------------------------------------------------- name editing
+    spaceghost.then( function(){
+        this.test.comment( 'history panel, editing the history name' );
+
+        this.test.comment( 'name should have a tooltip with proper info on name editing' );
+        this.hoverOver( nameSelector );
+        this.test.assertExists( tooltipSelector, "Found tooltip after name hover" );
+        this.test.assertSelectorHasText( tooltipSelector, nameTooltip );
+        // clear the tooltip
+        this.page.sendEvent( 'mousemove', -1, -1 );
+
+        this.test.comment( 'name should be create an input when clicked' );
+        this.assertHasClass( nameSelector, editableTextClass, "Name field classed for editable text" );
+        this.click( nameSelector );
+        spaceghost.debug( editableTextInput );
+        this.test.assertExists( editableTextInput, "Clicking on name creates an input" );
+
+        this.test.comment( 'name should be editable by entering keys and pressing enter' );
+        //NOTE: casperjs.sendKeys adds a click before and a selector.blur after sending - won't work here
+        this.page.sendEvent( 'keypress', newHistoryName );
+        this.page.sendEvent( 'keypress', this.page.event.key.Enter );
+        // wait for send and re-render name
+        this.wait( 1000, function(){
+            this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
+            this.test.assertDoesntExist( editableTextInput, "Input disappears after pressing enter" );
+        });
+    });
+
+    spaceghost.then( function(){
+        this.test.comment( 'name should revert if user clicks away while editing' );
+
+        this.click( nameSelector );
+        this.page.sendEvent( 'keypress', "Woodchipper metagenomics, Fargo, ND" );
+
+        this.page.sendEvent( 'mousedown', -1, -1 );
+        this.wait( 1000, function(){
+            this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
+            this.test.assertDoesntExist( editableTextInput, "Input disappears after clicking away" );
+        });
+    });
+
+    spaceghost.then( function(){
+        this.test.comment( 'name should revert if user hits ESC while editing' );
+
+        this.click( nameSelector );
+        this.page.sendEvent( 'keypress', "Arsenic Bacteria" );
+
+        this.page.sendEvent( 'keypress', this.page.event.key.Escape );
+        this.wait( 1000, function(){
+            this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is STILL ' + newHistoryName );
+            this.test.assertDoesntExist( editableTextInput, "Input disappears after hitting ESC" );
+        });
+    });
+
+    // ------------------------------------------------------------------- check structure of NON empty history
+    // upload file: 1.txt
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: filepathToUpload }, function uploadCallback( id, json ){
+        uploadId = id;
+    });
+
+    spaceghost.openHomePage().then( function checkPanelStructure(){
+        this.test.comment( 'checking structure of non-empty panel' );
+
+        this.test.comment( "history name should exist, be visible, and have text " + unnamedName );
+        this.test.assertExists( nameSelector, nameSelector + ' exists' );
+        this.test.assertVisible( nameSelector, 'History name is visible' );
+        this.test.assertSelectorHasText( nameSelector, newHistoryName, 'History name is ' + newHistoryName );
+
+        var onetxtFilesize = require( 'fs' ).size( filepathToUpload ),
+            expectedSize = onetxtFilesize + ' ' + bytesString;
+        this.test.comment( "history should display size and size should be " + onetxtFilesize + " " + bytesString );
+        this.test.assertExists( sizeSelector, 'Found ' + sizeSelector );
+        this.test.assertVisible( sizeSelector, 'History size is visible' );
+        this.test.assertSelectorHasText( sizeSelector, expectedSize,
+            'History size has "' + expectedSize + '": ' + this.fetchText( sizeSelector ).trim() );
+
+        this.test.comment( "tags and annotation icons should be available" );
+        this.test.assertExists( tagIconSelector,  'Tag icon button found' );
+        this.test.assertExists( annoIconSelector, 'Annotation icon button found' );
+
+        this.test.comment( "A message about the current history being empty should NOT be displayed" );
+        this.test.assertExists( emptyMsgSelector, emptyMsgSelector + ' exists' );
+        this.test.assertNotVisible( emptyMsgSelector, 'Empty history message is NOT visible' );
+    });
+
+    // ------------------------------------------------------------------- tags
+    // keeping this light here - better for its own test file
+    //TODO: check tooltips
+    spaceghost.then( function openTags(){
+        this.test.comment( 'tag area should open when the history panel tag icon is clicked' );
+
+        this.click( tagIconSelector );
+        this.wait( 1000, function(){
+            this.test.assertVisible( tagAreaSelector, 'Tag area is now displayed' );
+        });
+    });
+    spaceghost.then( function closeAnnotation(){
+        this.test.comment( 'annotation area should close when the history panel tag icon is clicked again' );
+
+        this.click( tagIconSelector );
+        this.wait( 1000, function(){
+            this.test.assertNotVisible( tagAreaSelector, 'Tag area is now hidden' );
+        });
+    });
+
+    // ------------------------------------------------------------------- annotation
+    // keeping this light here - better for its own test file
+    //TODO: check tooltips
+    spaceghost.then( function openAnnotation(){
+        this.test.comment( 'annotation area should open when the history panel annotation icon is clicked' );
+
+        this.click( annoIconSelector );
+        this.wait( 1000, function(){
+            this.test.assertVisible( annoAreaSelector, 'Annotation area is now displayed' );
+        });
+    });
+    spaceghost.then( function closeAnnotation(){
+        this.test.comment( 'annotation area should close when the history panel tag icon is clicked again' );
+
+        this.click( annoIconSelector );
+        this.wait( 1000, function(){
+            this.test.assertNotVisible( annoAreaSelector, 'Annotation area is now hidden' );
+        });
+    });
+
+    // ------------------------------------------------------------------- refresh button
+    spaceghost.then( function refreshButton(){
+        this.test.comment( 'History panel should refresh when the history refresh icon is clicked' );
+
+        this.test.assertExists(  refreshButtonSelector, "Found refresh button" );
+        this.test.assertVisible( refreshButtonSelector, "Refresh button is visible" );
+        this.test.assertVisible( refreshButtonSelector + ' ' + refreshButtonIconSelector, "Refresh icon is visible" );
+
+        //this.assertNavigationRequested( refreshButtonHref, "History refreshed when clicking refresh icon", function(){
+        //    this.click( refreshButtonSelector );
+        //});
+    });
+
+    // ------------------------------------------------------------------- hdas can be expanded by clicking on the hda
+    // broken in webkit w/ jq 1.7
+    spaceghost.historypanel.waitForHdas( function(){
+        this.test.comment( 'HDAs can be expanded by clicking on the name' );
+        var uploadedSelector = '#dataset-' + uploadId;
+
+        this.click( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.title );
+        this.wait( 1000, function(){
+            this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+                "Body for uploaded file is visible" );
+        });
+    });
+
+    // ------------------------------------------------------------------- expanded hdas still expanded after refresh
+    spaceghost.then( function(){
+        this.test.comment( 'Expanded hdas are still expanded after a refresh' );
+        var uploadedSelector = '#dataset-' + uploadId;
+
+        this.click( refreshButtonSelector );
+        this.historypanel.waitForHdas( function(){
+            this.test.assertVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+                "Body for uploaded file is visible" );
+        });
+        // this will break: webkit + jq 1.7
+    });
+
+    // ------------------------------------------------------------------- expanded hdas collapse by clicking name again
+    spaceghost.then( function(){
+        this.test.comment( 'Expanded hdas collapse by clicking name again' );
+        var uploadedSelector = '#dataset-' + uploadId;
+
+        this.click( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.title );
+        this.wait( 500, function(){
+            this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+                "Body for uploaded file is not visible" );
+        });
+    });
+
+    // ------------------------------------------------------------------- collapsed hdas still collapsed after refresh
+    spaceghost.then( function(){
+        this.test.comment( 'collapsed hdas still collapsed after a refresh' );
+        var uploadedSelector = '#dataset-' + uploadId;
+
+        this.click( refreshButtonSelector );
+        this.historypanel.waitForHdas( function(){
+            this.test.assertNotVisible( uploadedSelector + ' ' + this.historypanel.data.selectors.hda.body,
+                "Body for uploaded file is not visible" );
+        });
+    });
+    /*
+    */
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/history-share-tests.js b/test/casperjs/history-share-tests.js
new file mode 100644
index 0000000..53fe704
--- /dev/null
+++ b/test/casperjs/history-share-tests.js
@@ -0,0 +1,227 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Testing the user share form for histories', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== globals and helpers
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+        spaceghost.info( 'Will use fixtureData.testUser: ' + email );
+    }
+    var email2 = spaceghost.user.getRandomEmail(),
+        password2 = '123456';
+    if( spaceghost.fixtureData.testUser2 ){
+        email2 = spaceghost.fixtureData.testUser2.email;
+        password2 = spaceghost.fixtureData.testUser2.password;
+    }
+
+    var shareLink = 'a[href^="/history/share?"]',
+        shareSubmit = 'input[name="share_button"]',
+        firstUserShareButton = '#user-0-popup',
+        shareHistoryId = null,
+        shareUserId = null;
+
+    function fromUserSharePage( fn ){
+        spaceghost.then( function(){
+            this.openHomePage( function(){
+                this.historyoptions.clickOption( 'Share or Publish' );
+            });
+            this.waitForNavigation( 'history/sharing', function(){
+                this.jumpToMain( function(){
+                    this.click( shareLink );
+                });
+            });
+            this.waitForNavigation( 'history/share', function(){
+                this.jumpToMain( function(){
+                    fn.call( this );
+                });
+            });
+        });
+    }
+
+    function thenSwitchUser( email, password ){
+        spaceghost.then( function(){
+            spaceghost.user.logout();
+            spaceghost.user.loginOrRegisterUser( email, password );
+        });
+        return spaceghost;
+    }
+
+    function thenShareWithUser( comment, emailOrId, thenFn ){
+        spaceghost.then( function(){
+            fromUserSharePage( function(){
+                this.test.comment( comment );
+                this.fill( 'form#share', {
+                    email : emailOrId
+                });
+                // strangely, casper's submit=true flag doesn't work well here - need to manually push the button
+                this.click( shareSubmit );
+            });
+            spaceghost.then( function(){
+                this.jumpToMain( function(){
+                    thenFn.call( this );
+                });
+            });
+        });
+        return spaceghost;
+    }
+
+    // =================================================================== TESTS
+    // create user 1 and the test/target history
+    spaceghost.user.loginOrRegisterUser( email, password ).openHomePage( function(){
+        shareHistoryId = this.api.histories.index()[0].id;
+        this.info( 'shareHistoryId: ' + shareHistoryId );
+    });
+    spaceghost.then( function(){
+        // can't share an empty history (for some reason)
+        this.api.tools.thenUpload( shareHistoryId, { filepath: '../../test-data/1.bed' });
+    });
+
+    // create user 2 and make sure they can't access the history right now
+    thenSwitchUser( email2, password2 ).openHomePage( function(){
+        shareUserId = this.api.users.index()[0].id;
+        this.info( 'shareUserId: ' + shareUserId );
+
+        this.test.comment( 'user2 should not have access to test history' );
+        this.api.assertRaises( function(){
+            this.api.histories.show( shareHistoryId );
+        }, 403, 'History is not accessible by user', 'show failed with error' );
+    });
+
+    thenSwitchUser( email, password );
+    thenShareWithUser( "should NOT work: share using non-existant user email", 'chunkylover53 at aol.com', function(){
+        this.test.assertExists( '.errormessage', 'found error message' );
+        this.test.assertSelectorHasText( '.errormessage', 'is not a valid Galaxy user', 'wording is good' );
+    });
+    thenShareWithUser( "should NOT work: share using current user email", email, function(){
+        this.test.assertExists( '.errormessage', 'found error message' );
+        this.test.assertSelectorHasText( '.errormessage', 'You cannot send histories to yourself', 'wording is good' );
+    });
+    thenShareWithUser( "should work: share using email", email2, function(){
+        this.test.assertExists( firstUserShareButton, 'found user share button' );
+        this.test.assertSelectorHasText( firstUserShareButton, email2, 'share button text is email2' );
+    });
+
+    // user 2 can now access the history
+    thenSwitchUser( email2, password2 ).openHomePage( function(){
+        this.test.comment( 'user 2 can now access the history' );
+        this.test.assert( !!this.api.histories.show( shareHistoryId ).id );
+    });
+
+
+    // remove share
+    thenSwitchUser( email, password ).thenOpen( spaceghost.baseUrl + '/history/sharing', function(){
+        this.jumpToMain( function(){
+            this.click( firstUserShareButton );
+            this.wait( 100, function(){
+                this.click( 'a[href^="/history/sharing?unshare_user"]' );
+            });
+        });
+    });
+    spaceghost.then( function(){
+        this.test.assertDoesntExist( firstUserShareButton, 'no user share button seen' );
+    });
+
+    thenSwitchUser( email2, password2 ).openHomePage( function(){
+        this.test.comment( 'user2 should not have access to test history (again)' );
+        this.api.assertRaises( function(){
+            this.api.histories.show( shareHistoryId );
+        }, 403, 'History is not accessible by user', 'show failed with error' );
+    });
+
+
+    // should NOT work: share using malformed id
+    thenSwitchUser( email, password );
+    thenShareWithUser( "should NOT work: share using malformed id", '1234xyz', function(){
+        this.test.assertExists( '.errormessage', 'found error message' );
+        this.test.assertSelectorHasText( '.errormessage', 'is not a valid Galaxy user', 'wording is good' );
+    });
+    //spaceghost.then( function(){
+    //    // test user share using email
+    //    fromUserSharePage( function(){
+    //        this.test.comment( 'should NOT work: share using malformed id' );
+    //        this.fill( '#share', {
+    //            email : '1234xyz'
+    //        });
+    //        this.click( shareSubmit );
+    //    });
+    //    spaceghost.then( function(){
+    //        this.jumpToMain( function(){
+    //            this.test.assertExists( '.errormessage', 'found error message' );
+    //            this.test.assertSelectorHasText( '.errormessage', 'is not a valid Galaxy user', 'wording is good' );
+    //        });
+    //    });
+    //});
+
+    // should NOT work: share using current user id
+    spaceghost.then( function(){
+        var currUserId = spaceghost.api.users.index()[0].id;
+        thenShareWithUser( "should NOT work: share using current user id", currUserId, function(){
+            this.test.assertExists( '.errormessage', 'found error message' );
+            this.test.assertSelectorHasText( '.errormessage',
+                'You cannot send histories to yourself', 'wording is good' );
+        });
+    });
+    //// should NOT work: share using current user id
+    //spaceghost.then( function(){
+    //    var currUserId = spaceghost.api.users.index()[0].id;
+    //    // test user share using email
+    //    fromUserSharePage( function(){
+    //        this.test.comment( 'should NOT work: share using current user id' );
+    //        this.debug( 'currUserId: ' + currUserId );
+    //        this.fill( 'form#share', {
+    //            email : currUserId
+    //        });
+    //        this.click( shareSubmit );
+    //    });
+    //    spaceghost.then( function(){
+    //        this.jumpToMain( function(){
+    //            this.test.assertExists( '.errormessage', 'found error message' );
+    //            this.test.assertSelectorHasText( '.errormessage',
+    //                'You cannot send histories to yourself', 'wording is good' );
+    //        });
+    //    });
+    //});
+
+    spaceghost.then( function(){
+        thenShareWithUser( "should work: share using id", shareUserId, function(){
+            this.test.assertExists( firstUserShareButton, 'found user share button' );
+            this.test.assertSelectorHasText( firstUserShareButton, email2, 'share button text is email2' );
+        });
+    });
+    //// should work: share using id
+    //spaceghost.then( function(){
+    //    // test user share using email
+    //    fromUserSharePage( function(){
+    //        this.test.comment( 'should work: share using id' );
+    //        this.fill( '#share', {
+    //            email : shareUserId
+    //        });
+    //        this.click( shareSubmit );
+    //    });
+    //    spaceghost.then( function(){
+    //        this.jumpToMain( function(){
+    //            this.test.assertExists( firstUserShareButton, 'found user share button' );
+    //            this.test.assertSelectorHasText( firstUserShareButton, email2, 'share button text is email2' );
+    //        });
+    //    });
+    //});
+
+    // user 2 can now access the history
+    thenSwitchUser( email2, password2 ).openHomePage( function(){
+        this.test.comment( 'user 2 can now access the history' );
+        this.test.assert( !!this.api.histories.show( shareHistoryId ).id );
+    });
+
+    /*
+    */
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/login-tests.js b/test/casperjs/login-tests.js
new file mode 100644
index 0000000..0c54a25
--- /dev/null
+++ b/test/casperjs/login-tests.js
@@ -0,0 +1,96 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    utils = require( 'utils' ),
+    xpath = require( 'casper' ).selectXPath,
+    format = utils.format;
+
+spaceghost.test.begin( 'Testing logging in and logging out', 0, function suite( test ){
+    spaceghost.start();
+    //console.debug( 'suiteResults: ' + test.suiteResults );
+
+
+// =================================================================== globals and helpers
+var email = spaceghost.user.getRandomEmail(),
+    password = '123456';
+if( spaceghost.fixtureData.testUser ){
+    email = spaceghost.fixtureData.testUser.email;
+    password = spaceghost.fixtureData.testUser.password;
+}
+
+//var userEmailSelector = '//a[contains(text(),"Logged in as")]';
+var userEmailSelector = spaceghost.data.selectors.masthead.userMenu.userEmail_xpath;
+
+// =================================================================== TESTS
+// register a user (again...)
+spaceghost.openHomePage()
+    .user.registerUser( email, password )
+    .user.logout();
+
+spaceghost.openHomePage( function(){
+    this.test.comment( 'log out should be reflected in user menu' );
+    this.test.assertDoesntExist( xpath( userEmailSelector ) );
+    this.test.assert( spaceghost.user.loggedInAs() === '', 'loggedInAs() is empty string' );
+});
+
+// log them back in - check for email in logged in text
+spaceghost.then( function(){
+    this.test.comment( 'logging back in: ' + email );
+    spaceghost.user._submitLogin( email, password ); //No such user
+});
+spaceghost.openHomePage( function(){
+    this.test.assertSelectorHasText( xpath( userEmailSelector ), email );
+    this.test.assert( spaceghost.user.loggedInAs() === email, 'loggedInAs() matches email' );
+});
+
+// finally log back out for next tests
+spaceghost.user.logout();
+
+// ------------------------------------------------------------------- shouldn't work
+// can't log in: users that don't exist, bad emails, sql injection (hurhur)
+var badEmails = [ 'test2 at test.org', 'test', '', "'; SELECT * FROM galaxy_user WHERE 'u' = 'u';" ];
+spaceghost.each( badEmails, function( self, badEmail ){
+    self.then( function(){
+        this.test.comment( 'attempting bad email: ' + badEmail );
+        this.user._submitLogin( badEmail, password );
+    });
+    self.then(function(){
+        this.assertErrorMessage( 'No such user' );
+    });
+});
+
+// can't use passwords that wouldn't be accepted in registration
+var badPasswords = [ '1234', '', '; SELECT * FROM galaxy_user' ];
+spaceghost.each( badPasswords, function( self, badPassword ){
+    self.then( function(){
+        this.test.comment( 'attempting bad password: ' + badPassword );
+        this.user._submitLogin( email, badPassword );
+    });
+    self.then(function(){
+        this.assertErrorMessage( 'Invalid password' );
+    });
+});
+
+// ------------------------------------------------------------------- test yoself
+// these versions are for conv. use in other tests, they should throw errors if used improperly
+spaceghost.then( function(){
+    this.assertStepsRaise( 'LoginError', function(){
+        this.then( function(){
+            this.test.comment( 'testing (js) error thrown on bad email' );
+            this.user.login( 'nihilist', '1234' );
+        });
+    });
+});
+
+spaceghost.then( function(){
+    this.assertStepsRaise( 'LoginError', function(){
+        this.then( function(){
+            this.test.comment( 'testing (js) error thrown on bad password' );
+            this.user.login( email, '1234' );
+        });
+    });
+});
+/*
+*/
+// ===================================================================
+spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/modules/api.js b/test/casperjs/modules/api.js
new file mode 100644
index 0000000..ad05772
--- /dev/null
+++ b/test/casperjs/modules/api.js
@@ -0,0 +1,906 @@
+// =================================================================== module object, exports
+/** User object constructor.
+ *  @param {SpaceGhost} spaceghost  a spaceghost instance
+ *  @param {String} apikey          apikey for use when not using session authentication
+ */
+var API = function API( spaceghost, apikey ){
+    this.spaceghost = spaceghost;
+    this.apikey = apikey;
+
+    this.encodedIdExpectedLength = 16;
+    this.jQueryLocation = '../../static/scripts/libs/jquery/jquery.js';
+
+    this.configuration = new ConfigurationAPI( this );
+    this.histories  = new HistoriesAPI( this );
+    this.hdas       = new HDAAPI( this );
+    this.datasets   = new DatasetsAPI( this );
+    this.tools      = new ToolsAPI( this );
+    this.workflows  = new WorkflowsAPI( this );
+    this.users      = new UsersAPI( this );
+    this.visualizations = new VisualizationsAPI( this );
+};
+exports.API = API;
+
+/** Creates a new api module object.
+ *  @param {SpaceGhost} spaceghost a spaceghost instance
+ *  @exported
+ */
+exports.create = function createAPI( spaceghost, apikey ){
+    return new API( spaceghost );
+};
+
+
+API.prototype.toString = function toString(){
+    return ( this.spaceghost + '.API:'
+        + (( this.apikey )?( this.apikey ):( '(session)' )) );
+};
+
+// ------------------------------------------------------------------- APIError
+/** @class Thrown when Galaxy the API returns an error from a request */
+function APIError( msg, status ){
+    Error.apply( this, arguments );
+    this.name = "APIError";
+    this.message = msg;
+    this.status = status;
+}
+APIError.prototype = new Error();
+APIError.prototype.constructor = Error;
+API.prototype.APIError = APIError;
+exports.APIError = APIError;
+
+/* ------------------------------------------------------------------- TODO:
+    can we component-ize this to become the basis for js-based api binding/resource
+
+*/
+// =================================================================== INTERNAL
+var require = patchRequire( require ),
+    utils = require( 'utils' );
+
+API.prototype._ajax = function _ajax( url, options ){
+    options = options || {};
+    options.async = false;
+
+    // PUT data needs to be stringified in jq.ajax and the content changed
+    //TODO: server side handling could change this?
+    if( ( options.type && [ 'PUT', 'POST' ].indexOf( options.type ) !== -1 )
+    &&  ( options.data ) ){
+        options.contentType = 'application/json';
+        options.data = JSON.stringify( options.data );
+    }
+
+    this.ensureJQuery( '../../static/scripts/libs/jquery/jquery.js' );
+    var resp = this.spaceghost.evaluate( function( url, options ){
+        return jQuery.ajax( url, options );
+    }, url, options );
+    //this.spaceghost.debug( 'resp: ' + this.spaceghost.jsonStr( resp ) );
+
+    if( resp.status !== 200 ){
+        // grrr... this doesn't lose the \n\r\t
+        //throw new APIError( resp.responseText.replace( /[\s\n\r\t]+/gm, ' ' ).replace( /"/, '' ) );
+        this.spaceghost.debug( 'API error: code: ' + resp.status + ', response:\n' +
+            ( resp.responseJSON? this.spaceghost.jsonStr( resp.responseJSON ) : resp.responseText ) );
+        throw new APIError( resp.responseText, resp.status );
+    }
+    if( options.dataType === undefined || options.dataType === 'json' ){
+        return JSON.parse( resp.responseText );
+    }
+    return resp.responseText;
+};
+
+// =================================================================== TESTING
+/** Checks whether fn raises an error with a message that contains a status and given string.
+ *      NOTE: DOES NOT work with steps. @see SpaceGhost#assertStepsRaise
+ *  @param {Function} testFn        a function that may throw an error
+ *  @param {Integer} statusExpected the HTTP status code expected
+ *  @param {String} errMsgContains  some portion of the correct error msg
+ *  @private
+ */
+API.prototype._APIRaises = function _APIRaises( testFn, statusExpected, errMsgContains ){
+    var failed = false;
+    try {
+        testFn.call( this.spaceghost );
+    } catch( err ){
+        if( ( err.name === 'APIError' )
+        &&  ( err.status && err.status === statusExpected )
+        &&  ( err.message.indexOf( errMsgContains ) !== -1 ) ){
+            failed = true;
+
+        // re-raise other, non-searched-for errors
+        } else {
+            throw err;
+        }
+    }
+    return failed;
+};
+
+/** Simple assert raises.
+ *      NOTE: DOES NOT work with steps. @see SpaceGhost#assertStepsRaise
+ *  @param {Function} testFn        a function that may throw an error
+ *  @param {Integer} statusExpected the HTTP status code expected
+ *  @param {String} errMsgContains  some portion of the correct error msg
+ *  @param {String} msg             assertion message to display
+ */
+API.prototype.assertRaises = function assertRaises( testFn, statusExpected, errMsgContains, msg ){
+    return this.spaceghost.test.assert( this._APIRaises( testFn, statusExpected, errMsgContains ), msg  );
+};
+
+/** Simple assert does not raise.
+ *      NOTE: DOES NOT work with steps. @see SpaceGhost#assertStepsRaise
+ *  @param {Function} testFn        a function that may throw an error
+ *  @param {Integer} statusExpected the HTTP status code expected
+ *  @param {String} errMsgContains  some portion of the correct error msg
+ *  @param {String} msg             assertion message to display
+ */
+API.prototype.assertDoesntRaise = function assertDoesntRaise( testFn, statusExpected, errMsgContains, msg ){
+    return this.spaceghost.test.assert( !this._APIRaises( testFn, statusExpected, errMsgContains ), msg  );
+};
+
+// =================================================================== MISC
+API.prototype.isEncodedId = function isEncodedId( id ){
+    if( typeof id !== 'string' ){ return false; }
+    if( id.match( /[g-zG-Z]/ ) ){ return false; }
+    return ( id.length === this.encodedIdExpectedLength );
+};
+
+// ------------------------------------------------------------------- is type or throw err
+API.prototype.ensureId = function ensureId( id ){
+    if( !this.isEncodedId( id ) ){
+        throw new APIError( 'ID is not a valid encoded id: ' + id );
+    }
+    return id;
+};
+
+API.prototype.ensureObject = function ensureObject( obj ){
+    if( !utils.isObject( obj ) ){
+        throw new APIError( 'Not a valid object: ' + obj );
+    }
+    return obj;
+};
+
+// ------------------------------------------------------------------- jquery
+// using jq for the ajax in this module - that's why these are here
+//TODO:?? could go in spaceghost
+API.prototype.hasJQuery = function hasJQuery(){
+    return this.spaceghost.evaluate( function pageHasJQuery(){
+        var has = false;
+        try {
+            has = typeof ( jQuery + '' ) === 'string';
+        } catch( err ){}
+        return has;
+    });
+};
+
+API.prototype.ensureJQuery = function ensureJQuery(){
+    if( !this.hasJQuery() ){
+        var absLoc = this.jQueryLocation,
+            injected = this.spaceghost.page.injectJs( absLoc );
+        if( !injected ){
+            throw new APIError( 'Could not inject jQuery' );
+        }
+    }
+};
+
+
+// =================================================================== CONFIGURATION
+var ConfigurationAPI = function ConfigurationAPI( api ){
+    this.api = api;
+};
+ConfigurationAPI.prototype.toString = function toString(){
+    return this.api + '.ConfigurationAPI';
+};
+
+// -------------------------------------------------------------------
+ConfigurationAPI.prototype.urlTpls = {
+    index   : '/api/configuration'
+};
+
+ConfigurationAPI.prototype.index = function index( deleted ){
+    this.api.spaceghost.info( 'configuration.index' );
+
+    return this.api._ajax( this.urlTpls.index, {
+        data : {}
+    });
+};
+
+
+// =================================================================== HISTORIES
+var HistoriesAPI = function HistoriesAPI( api ){
+    this.api = api;
+};
+HistoriesAPI.prototype.toString = function toString(){
+    return this.api + '.HistoriesAPI';
+};
+
+// -------------------------------------------------------------------
+HistoriesAPI.prototype.urlTpls = {
+    index   : '/api/histories',
+    show    : '/api/histories/%s',
+    create  : '/api/histories',
+    delete_ : '/api/histories/%s',
+    undelete: '/api/histories/deleted/%s/undelete',
+    update  : '/api/histories/%s',
+};
+
+HistoriesAPI.prototype.index = function index( params ){
+    var spaceghost = this.api.spaceghost;
+    spaceghost.info( 'histories.index:\n' + spaceghost.jsonStr( params ) );
+    if( params === undefined ){
+        params = { deleted: false };
+    }
+    if( params.deleted === undefined ){
+        params.deleted = false;
+    }
+    spaceghost.debug( 'params (now):' + spaceghost.jsonStr( params ) );
+    return this.api._ajax( this.urlTpls.index, {
+        data : params
+    });
+};
+
+HistoriesAPI.prototype.show = function show( id, params ){
+    var spaceghost = this.api.spaceghost;
+    this.api.spaceghost.info( 'histories.show: ' + id + '\n' + spaceghost.jsonStr( params ) );
+    id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
+    if( params === undefined ){
+        params = { deleted: false };
+    }
+    if( params.deleted === undefined ){
+        params.deleted = false;
+    }
+    spaceghost.debug( 'params (now):' + spaceghost.jsonStr( params ) );
+    return this.api._ajax( utils.format( this.urlTpls.show, id ), {
+        data : params
+    });
+};
+
+HistoriesAPI.prototype.create = function create( payload ){
+    this.api.spaceghost.info( 'histories.create: ' + this.api.spaceghost.jsonStr( payload ) );
+
+    // py.payload <-> ajax.data
+    payload = this.api.ensureObject( payload );
+    return this.api._ajax( utils.format( this.urlTpls.create ), {
+        type : 'POST',
+        data : payload
+    });
+};
+
+HistoriesAPI.prototype.delete_ = function delete_( id, purge ){
+    this.api.spaceghost.info( 'histories.delete: ' + [ id, (( purge )?( '(purge!)' ):( '' )) ] );
+
+    // py.payload <-> ajax.data
+    var url = utils.format( this.urlTpls.delete_, this.api.ensureId( id ) );
+    if( purge ){
+        url += '?purge=True';
+    }
+    return this.api._ajax( url, {
+        type : 'DELETE'
+    });
+};
+
+HistoriesAPI.prototype.undelete = function undelete( id ){
+    //throw ( 'unimplemented' );
+    this.api.spaceghost.info( 'histories.undelete: ' + id );
+
+    return this.api._ajax( utils.format( this.urlTpls.undelete, this.api.ensureId( id ) ), {
+        type : 'POST'
+    });
+};
+
+HistoriesAPI.prototype.update = function update( id, payload ){
+    this.api.spaceghost.info( 'histories.update: ' + id + ',' + this.api.spaceghost.jsonStr( payload ) );
+
+    // py.payload <-> ajax.data
+    id = this.api.ensureId( id );
+    payload = this.api.ensureObject( payload );
+    url = utils.format( this.urlTpls.update, id );
+
+    return this.api._ajax( url, {
+        type : 'PUT',
+        data : payload
+    });
+};
+
+
+// =================================================================== HDAS
+var HDAAPI = function HDAAPI( api ){
+    this.api = api;
+};
+HDAAPI.prototype.toString = function toString(){
+    return this.api + '.HDAAPI';
+};
+
+// -------------------------------------------------------------------
+HDAAPI.prototype.urlTpls = {
+    index   : '/api/histories/%s/contents',
+    show    : '/api/histories/%s/contents/%s',
+    create  : '/api/histories/%s/contents',
+    update  : '/api/histories/%s/contents/%s'
+};
+
+HDAAPI.prototype.index = function index( historyId, ids ){
+    this.api.spaceghost.info( 'hdas.index: ' + [ historyId, ids ] );
+    var data = {};
+    if( ids ){
+        ids = ( utils.isArray( ids ) )?( ids.join( ',' ) ):( ids );
+        data.ids = ids;
+    }
+
+    return this.api._ajax( utils.format( this.urlTpls.index, this.api.ensureId( historyId ) ), {
+        data : data
+    });
+};
+
+HDAAPI.prototype.show = function show( historyId, id, deleted ){
+    this.api.spaceghost.info( 'hdas.show: ' + [ historyId, id, (( deleted )?( 'w/deleted' ):( '' )) ] );
+
+    id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
+    deleted = deleted || false;
+    return this.api._ajax( utils.format( this.urlTpls.show, this.api.ensureId( historyId ), id ), {
+        data : { deleted: deleted }
+    });
+};
+
+HDAAPI.prototype.create = function create( historyId, payload ){
+    this.api.spaceghost.info( 'hdas.create: ' + [ historyId, this.api.spaceghost.jsonStr( payload ) ] );
+
+    // py.payload <-> ajax.data
+    payload = this.api.ensureObject( payload );
+    return this.api._ajax( utils.format( this.urlTpls.create, this.api.ensureId( historyId ) ), {
+        type : 'POST',
+        data : payload
+    });
+};
+
+HDAAPI.prototype.update = function create( historyId, id, payload ){
+    this.api.spaceghost.info( 'hdas.update: ' + [ historyId, id, this.api.spaceghost.jsonStr( payload ) ] );
+
+    // py.payload <-> ajax.data
+    historyId = this.api.ensureId( historyId );
+    id = this.api.ensureId( id );
+    payload = this.api.ensureObject( payload );
+    url = utils.format( this.urlTpls.update, historyId, id );
+
+    return this.api._ajax( url, {
+        type : 'PUT',
+        data : payload
+    });
+};
+
+HDAAPI.prototype.delete_ = function create( historyId, id, purge ){
+    this.api.spaceghost.info( 'hdas.delete_: ' + [ historyId, id ] );
+    historyId = this.api.ensureId( historyId );
+    id = this.api.ensureId( id );
+
+    // have to attach like GET param - due to body loss in jq
+    url = utils.format( this.urlTpls.update, historyId, id );
+    if( purge ){
+        url += '?purge=True';
+    }
+    return this.api._ajax( url, {
+        type : 'DELETE'
+    });
+};
+
+//TODO: delete_
+
+
+// =================================================================== HDAS
+var DatasetsAPI = function DatasetsAPI( api ){
+    this.api = api;
+};
+DatasetsAPI.prototype.toString = function toString(){
+    return this.api + '.DatasetsAPI';
+};
+
+// -------------------------------------------------------------------
+DatasetsAPI.prototype.urlTpls = {
+    index   : '/api/datasets',
+    show    : '/api/datasets/%s',
+    // wut.
+    display : '/api/histories/%s/contents/%s/display'
+};
+
+DatasetsAPI.prototype.index = function index(){
+    this.api.spaceghost.info( 'datasets.index: ' );
+    var data = {};
+
+    return this.api._ajax( utils.format( this.urlTpls.index ), {
+        data : data
+    });
+};
+
+DatasetsAPI.prototype.show = function show( id ){
+    this.api.spaceghost.info( 'datasets.show: ' + [ id ] );
+    var data = {};
+
+    return this.api._ajax( utils.format( this.urlTpls.show, id ), {
+        data : data
+    });
+};
+
+DatasetsAPI.prototype.display = function show( historyId, id, params ){
+    this.api.spaceghost.info( 'datasets.display: ' + [ historyId, id ] );
+    if( params === undefined ){
+        params = {};
+    }
+    return this.api._ajax( utils.format( this.urlTpls.display, this.api.ensureId( historyId ), id ), {
+        data : params,
+        dataType : 'text'
+    });
+};
+
+
+
+// =================================================================== TOOLS
+var ToolsAPI = function ToolsAPI( api ){
+    this.api = api;
+};
+ToolsAPI.prototype.toString = function toString(){
+    return this.api + '.ToolsAPI';
+};
+
+// -------------------------------------------------------------------
+ToolsAPI.prototype.urlTpls = {
+    index   : '/api/tools',
+    show    : '/api/tools/%s',
+    create  : '/api/tools'
+};
+
+ToolsAPI.prototype.index = function index( in_panel, trackster ){
+    this.api.spaceghost.info( 'tools.index: ' + [ in_panel, trackster ] );
+    var data = {};
+    // in_panel defaults to true, trackster defaults to false
+    if( in_panel !== undefined ){
+        data.in_panel = ( in_panel )?( true ):( false );
+    }
+    if( in_panel !== undefined ){
+        data.trackster = ( trackster )?( true ):( false );
+    }
+    return this.api._ajax( utils.format( this.urlTpls.index ), {
+        data : data
+    });
+};
+
+ToolsAPI.prototype.show = function show( id ){
+    this.api.spaceghost.info( 'tools.show: ' + [ id ] );
+    var data = {};
+
+    data.io_details = true;
+
+    return this.api._ajax( utils.format( this.urlTpls.show, id ), {
+        data : data
+    });
+};
+
+ToolsAPI.prototype.create = function create( payload ){
+    this.api.spaceghost.info( 'tools.create: ' + [ this.api.spaceghost.jsonStr( payload ) ] );
+
+    // py.payload <-> ajax.data
+    payload = this.api.ensureObject( payload );
+    return this.api._ajax( utils.format( this.urlTpls.create ), {
+        type : 'POST',
+        data : payload
+    });
+};
+
+//ToolsAPI.prototype.uploadByForm = function upload( historyId, options ){
+//    this.api.spaceghost.debug( '-------------------------------------------------' );
+//    this.api.spaceghost.info( 'tools.upload: ' + [ historyId, '(contents)', this.api.spaceghost.jsonStr( options ) ] );
+//    this.api.ensureId( historyId );
+//    options = options || {};
+//
+//    this.api.spaceghost.evaluate( function( url ){
+//        var html = [
+//            '<form action="', url, '" method="post" enctype="multipart/form-data">',
+//                '<input type="file" name="files_0|file_data">',
+//                '<input type="hidden" name="tool_id" />',
+//                '<input type="hidden" name="history_id" />',
+//                '<input type="hidden" name="inputs" />',
+//                '<button type="submit">Submit</button>',
+//            '</form>'
+//        ];
+//        document.write( html.join('') );
+//        //document.getElementsByTagName( 'body' )[0].innerHTML = html.join('');
+//    }, utils.format( this.urlTpls.create ) );
+//
+//    this.api.spaceghost.fill( 'form', {
+//        'files_0|file_data' : '1.txt',
+//        'tool_id'           : 'upload1',
+//        'history_id'        : historyId,
+//        'inputs'            : JSON.stringify({
+//            'file_type'         : 'auto',
+//            'files_0|type'      : 'upload_dataset',
+//            'to_posix_lines'    : true,
+//            'space_to_tabs'     : false,
+//            'dbkey'             : '?'
+//        })
+//    }, true );
+//    // this causes the page to switch...I think
+//};
+
+/** paste a file - either from a string (options.paste) or from a filesystem file (options.filepath) */
+ToolsAPI.prototype.uploadByPaste = function upload( historyId, options ){
+    this.api.spaceghost.info( 'tools.upload: ' + [ historyId, this.api.spaceghost.jsonStr( options ) ] );
+    this.api.ensureId( historyId );
+    options = options || {};
+
+    var inputs = {
+        'files_0|NAME'      : options.name  || 'Test Dataset',
+        'dbkey'             : options.dbkey || '?',
+        'file_type'         : options.ext   || 'auto'
+    };
+    if( options.filepath ){
+        var fs = require( 'fs' );
+        inputs[ 'files_0|url_paste' ] = fs.read( options.filepath );
+
+    } else if( options.paste ){
+        inputs[ 'files_0|url_paste' ] = options.paste;
+    }
+    if( options.posix ){
+        inputs[ 'files_0|to_posix_lines' ] = 'Yes';
+    }
+    if( options.tabs ){
+        inputs[ 'files_0|space_to_tab' ] = 'Yes';
+    }
+    return this.api._ajax( utils.format( this.urlTpls.create ), {
+        type : 'POST',
+        data : {
+            tool_id     : 'upload1',
+            upload_type : 'upload_dataset',
+            history_id  : historyId,
+            inputs      : inputs
+        }
+    });
+};
+
+/** post a file to the upload1 tool over ajax */
+ToolsAPI.prototype.upload = function upload( historyId, options ){
+    this.api.spaceghost.info( 'api.tools.upload: ' + [ historyId, this.api.spaceghost.jsonStr( options ) ] );
+    this.api.ensureId( historyId );
+    options = options || {};
+
+    // We can post an upload using jquery and formdata (see below), the more
+    //  difficult part is attaching the file without user intervention.
+    //  To do this we need to (unfortunately) create a form phantom can attach the file to first.
+    this.api.spaceghost.evaluate( function(){
+        $( 'body' ).append( '<input type="file" name="casperjs-upload-file" />' );
+    });
+    this.api.spaceghost.page.uploadFile( 'input[name="casperjs-upload-file"]', options.filepath );
+
+    var inputs = {
+        'file_type'         : options.ext || 'auto',
+        'files_0|type'      : 'upload_dataset',
+        'dbkey'             : options.dbkey || '?'
+    };
+    if( options.posix ){
+        inputs[ 'files_0|to_posix_lines' ] = 'Yes';
+    }
+    if( options.tabs ){
+        inputs[ 'files_0|space_to_tab' ] = 'Yes';
+    }
+
+    var response = this.api.spaceghost.evaluate( function( url, historyId, inputs ){
+        var file = $( 'input[name="casperjs-upload-file"]' )[0].files[0],
+            formData = new FormData();
+
+        formData.append( 'files_0|file_data', file );
+        formData.append( 'history_id', historyId );
+        formData.append( 'tool_id', 'upload1' );
+        formData.append( 'inputs', JSON.stringify( inputs ) );
+        return $.ajax({
+            url         : url,
+            async       : false,
+            type        : 'POST',
+            data        : formData,
+            // when sending FormData don't have jq process or cache the data
+            cache       : false,
+            contentType : false,
+            processData : false,
+            // if we don't add this, payload isn't processed as JSON
+            headers     : { 'Accept': 'application/json' }
+        });
+    }, utils.format( this.urlTpls.create ), historyId, inputs );
+
+    if( response.status !== 200 ){
+        // grrr... this doesn't lose the \n\r\t
+        //throw new APIError( response.responseText.replace( /[\s\n\r\t]+/gm, ' ' ).replace( /"/, '' ) );
+        this.api.spaceghost.debug( 'API error: code: ' + response.status + ', response:\n' +
+            ( response.responseJSON? this.api.spaceghost.jsonStr( response.responseJSON ) : response.responseText ) );
+        throw new APIError( response.responseText, response.status );
+    }
+    return JSON.parse( response.responseText );
+};
+
+/** amount of time allowed to upload a file (before erroring) */
+ToolsAPI.prototype.DEFAULT_UPLOAD_TIMEOUT = 30 * 1000;
+
+/** add two casperjs steps - upload a file, wait for the job to complete, and run 'then' when they are */
+ToolsAPI.prototype.thenUpload = function thenUpload( historyId, options, then ){
+    var spaceghost = this.api.spaceghost,
+        uploadedId;
+
+    // upload via the api
+    spaceghost.then( function(){
+        var returned = this.api.tools.upload( historyId, options );
+        this.debug( 'returned: ' + this.jsonStr( returned ) );
+        uploadedId = returned.outputs[0].id;
+        this.debug( 'uploadedId: ' + uploadedId );
+    });
+
+    spaceghost.then( function(){
+        var hda = null;
+        this.waitFor(
+            function testHdaState(){
+                hda = spaceghost.api.hdas.show( historyId, uploadedId );
+                spaceghost.debug( spaceghost.jsonStr( hda.state ) );
+                return !( hda.state === 'upload' || hda.state === 'queued' || hda.state === 'running' );
+            },
+            function _then(){
+                spaceghost.info( 'upload finished: ' + uploadedId );
+                if( then ){
+                    then.call( spaceghost, uploadedId, hda );
+                }
+                //var hda = spaceghost.api.hdas.show( historyId, uploadedId );
+                //spaceghost.debug( spaceghost.jsonStr( hda ) );
+            },
+            function timeout(){
+                throw new APIError( 'timeout uploading file', 408 );
+            },
+            options.timeout || spaceghost.api.tools.DEFAULT_UPLOAD_TIMEOUT
+        );
+    });
+    return spaceghost;
+};
+
+/** add two casperjs steps - upload multiple files (described in optionsArray) and wait for all jobs to complete */
+ToolsAPI.prototype.thenUploadMultiple = function thenUploadMultiple( historyId, optionsArray, then ){
+    var spaceghost = this.api.spaceghost,
+        uploadedIds = [];
+
+    this.api.spaceghost.then( function(){
+        var spaceghost = this;
+        optionsArray.forEach( function( options ){
+            var returned = spaceghost.api.tools.upload( historyId, options );
+            spaceghost.debug( 'uploaded:' + spaceghost.jsonStr( returned ) );
+            uploadedIds.push( returned.outputs[0].id );
+        });
+    });
+
+    // wait for every hda to finish running - IOW, don't use uploadedIds
+    this.api.spaceghost.then( function(){
+        this.debug( this.jsonStr( uploadedIds ) );
+        this.waitFor(
+            function testHdaStates(){
+                var hdas = spaceghost.api.hdas.index( historyId ),
+                    running = hdas.filter( function( hda ){
+                        return ( hda.state === 'upload' || hda.state === 'queued' || hda.state === 'running' );
+                    }).map( function( hda ){
+                        return hda.id;
+                    });
+                //spaceghost.debug( 'still uploading: ' + spaceghost.jsonStr( running ) );
+                return running.length === 0;
+            },
+            function _then(){
+                var hdas = spaceghost.api.hdas.index( historyId );
+                spaceghost.debug( spaceghost.jsonStr( hdas ) );
+                if( then ){
+                    then.call( spaceghost, uploadedIds );
+                }
+            },
+            function timeout(){
+                throw new APIError( 'timeout uploading files', 408 );
+            },
+            ( options.timeout || spaceghost.api.tools.DEFAULT_UPLOAD_TIMEOUT ) * optionsArray.length
+        );
+    });
+    return spaceghost;
+};
+
+
+/** get the current history's id, then upload there */
+ToolsAPI.prototype.thenUploadToCurrent = function thenUploadToCurrent( options, then ){
+    var spaceghost = this.api.spaceghost;
+    return spaceghost.then( function(){
+        var currentHistoryId = this.api.histories.index()[0].id;
+        spaceghost.api.tools.thenUpload( currentHistoryId, options, then );
+    });
+};
+
+
+// =================================================================== WORKFLOWS
+var WorkflowsAPI = function WorkflowsAPI( api ){
+    this.api = api;
+};
+WorkflowsAPI.prototype.toString = function toString(){
+    return this.api + '.WorkflowsAPI';
+};
+
+// -------------------------------------------------------------------
+WorkflowsAPI.prototype.urlTpls = {
+    index   : '/api/workflows',
+    show    : '/api/workflows/%s',
+    // run a workflow
+    create  : '/api/workflows',
+    update  : '/api/workflows/%s',
+
+    upload  : '/api/workflows/upload', // POST
+    download: '/api/workflows/download/%s' // GET
+};
+
+WorkflowsAPI.prototype.index = function index(){
+    this.api.spaceghost.info( 'workflows.index: ' + [] );
+    var data = {};
+
+    return this.api._ajax( utils.format( this.urlTpls.index ), {
+        data : data
+    });
+};
+
+WorkflowsAPI.prototype.show = function show( id ){
+    this.api.spaceghost.info( 'workflows.show: ' + [ id ] );
+    var data = {};
+
+    id = ( id === 'most_recently_used' )?( id ):( this.api.ensureId( id ) );
+    return this.api._ajax( utils.format( this.urlTpls.show, this.api.ensureId( id ) ), {
+        data : data
+    });
+};
+
+WorkflowsAPI.prototype.create = function create( payload ){
+    this.api.spaceghost.info( 'workflows.create: ' + [ this.api.spaceghost.jsonStr( payload ) ] );
+
+    // py.payload <-> ajax.data
+    payload = this.api.ensureObject( payload );
+    return this.api._ajax( utils.format( this.urlTpls.create ), {
+        type : 'POST',
+        data : payload
+    });
+};
+
+WorkflowsAPI.prototype.upload = function create( workflowJSON ){
+    this.api.spaceghost.info( 'workflows.upload: ' + [ this.api.spaceghost.jsonStr( workflowJSON ) ] );
+
+    return this.api._ajax( utils.format( this.urlTpls.upload ), {
+        type : 'POST',
+        data : { 'workflow': this.api.ensureObject( workflowJSON ) }
+    });
+};
+
+
+// =================================================================== USERS
+var UsersAPI = function UsersAPI( api ){
+    this.api = api;
+};
+UsersAPI.prototype.toString = function toString(){
+    return this.api + '.UsersAPI';
+};
+
+// -------------------------------------------------------------------
+//NOTE: lots of admin only functionality in this section
+UsersAPI.prototype.urlTpls = {
+    index   : '/api/users',
+    show    : '/api/users/%s',
+    create  : '/api/users',
+    delete_ : '/api/users/%s',
+    undelete: '/api/users/deleted/%s/undelete',
+    update  : '/api/users/%s'
+};
+
+UsersAPI.prototype.index = function index( deleted ){
+    this.api.spaceghost.info( 'users.index: ' + (( deleted )?( 'w deleted' ):( '(wo deleted)' )) );
+
+    deleted = deleted || false;
+    return this.api._ajax( this.urlTpls.index, {
+        data : { deleted: deleted }
+    });
+};
+
+UsersAPI.prototype.show = function show( id, deleted ){
+    this.api.spaceghost.info( 'users.show: ' + [ id, (( deleted )?( 'w deleted' ):( '' )) ] );
+
+    id = ( id === 'current' )?( id ):( this.api.ensureId( id ) );
+    deleted = deleted || false;
+    return this.api._ajax( utils.format( this.urlTpls.show, id ), {
+        data : { deleted: deleted }
+    });
+};
+
+UsersAPI.prototype.create = function create( payload ){
+    this.api.spaceghost.info( 'users.create: ' + this.api.spaceghost.jsonStr( payload ) );
+
+    // py.payload <-> ajax.data
+    payload = this.api.ensureObject( payload );
+    return this.api._ajax( utils.format( this.urlTpls.create ), {
+        type : 'POST',
+        data : payload
+    });
+};
+
+UsersAPI.prototype.delete_ = function delete_( id, purge ){
+    this.api.spaceghost.info( 'users.delete: ' + [ id, (( purge )?( '(purge!)' ):( '' )) ] );
+
+    // py.payload <-> ajax.data
+    var payload = ( purge )?({ purge: true }):({});
+    return this.api._ajax( utils.format( this.urlTpls.delete_, this.api.ensureId( id ) ), {
+        type : 'DELETE',
+        data : payload
+    });
+};
+
+UsersAPI.prototype.undelete = function undelete( id ){
+    //throw ( 'unimplemented' );
+    this.api.spaceghost.info( 'users.undelete: ' + id );
+
+    return this.api._ajax( utils.format( this.urlTpls.undelete, this.api.ensureId( id ) ), {
+        type : 'POST'
+    });
+};
+
+UsersAPI.prototype.update = function create( id, payload ){
+    this.api.spaceghost.info( 'users.update: ' + id + ',' + this.api.spaceghost.jsonStr( payload ) );
+
+    // py.payload <-> ajax.data
+    id = this.api.ensureId( id );
+    payload = this.api.ensureObject( payload );
+    url = utils.format( this.urlTpls.update, id );
+
+    return this.api._ajax( url, {
+        type : 'PUT',
+        data : payload
+    });
+};
+
+
+// =================================================================== VISUALIZATIONS
+var VisualizationsAPI = function VisualizationsAPI( api ){
+    this.api = api;
+};
+VisualizationsAPI.prototype.toString = function toString(){
+    return this.api + '.VisualizationsAPI';
+};
+
+// -------------------------------------------------------------------
+VisualizationsAPI.prototype.urlTpls = {
+    index   : '/api/visualizations',
+    show    : '/api/visualizations/%s',
+    create  : '/api/visualizations',
+    //delete_ : '/api/visualizations/%s',
+    //undelete: '/api/visualizations/deleted/%s/undelete',
+    update  : '/api/visualizations/%s'
+};
+
+VisualizationsAPI.prototype.index = function index(){
+    this.api.spaceghost.info( 'visualizations.index' );
+
+    return this.api._ajax( this.urlTpls.index );
+};
+
+VisualizationsAPI.prototype.show = function show( id ){
+    this.api.spaceghost.info( 'visualizations.show' );
+
+    return this.api._ajax( utils.format( this.urlTpls.show, this.api.ensureId( id ) ) );
+};
+
+VisualizationsAPI.prototype.create = function create( payload ){
+    this.api.spaceghost.info( 'visualizations.create: ' + this.api.spaceghost.jsonStr( payload ) );
+
+    // py.payload <-> ajax.data
+    payload = this.api.ensureObject( payload );
+    return this.api._ajax( utils.format( this.urlTpls.create ), {
+        type : 'POST',
+        data : payload
+    });
+};
+
+VisualizationsAPI.prototype.update = function create( id, payload ){
+    this.api.spaceghost.info( 'visualizations.update: ' + id + ',' + this.api.spaceghost.jsonStr( payload ) );
+
+    // py.payload <-> ajax.data
+    id = this.api.ensureId( id );
+    payload = this.api.ensureObject( payload );
+    url = utils.format( this.urlTpls.update, id );
+
+    return this.api._ajax( url, {
+        type : 'PUT',
+        data : payload
+    });
+};
diff --git a/test/casperjs/modules/historyoptions.js b/test/casperjs/modules/historyoptions.js
new file mode 100644
index 0000000..a032634
--- /dev/null
+++ b/test/casperjs/modules/historyoptions.js
@@ -0,0 +1,200 @@
+// =================================================================== module object, exports
+/** Creates a new historyoptions module object.
+ *  @exported
+ */
+exports.create = function createHistoryOptions( spaceghost ){
+    return new HistoryOptions( spaceghost );
+};
+
+/** HistoryOptions object constructor.
+ *  @param {SpaceGhost} spaceghost a spaceghost instance
+ */
+var HistoryOptions = function HistoryOptions( spaceghost ){
+    //??: circ ref?
+    this.spaceghost = spaceghost;
+};
+exports.HistoryOptions = HistoryOptions;
+
+HistoryOptions.prototype.toString = function toString(){
+    return this.spaceghost + '.HistoryOptions';
+};
+
+// -------------------------------------------------------------------
+/* TODO:
+    some of the fns below can be applied to any popup
+
+
+*/
+// =================================================================== internal
+var require = patchRequire( require ),
+    xpath = require( 'casper' ).selectXPath;
+
+// =================================================================== API (external)
+/** Just open the menu
+ *  @param {Function} fn function to call when the menu opens
+ *  @returns {Any} the return value of fn
+ */
+HistoryOptions.prototype.openMenu = function openMenu( fn ){
+
+    return this.spaceghost.jumpToTop( function(){
+        if( !spaceghost.visible( this.historyoptions.data.selectors.menu ) ){
+            this.click( this.historyoptions.data.selectors.button );
+        }
+        return fn.call( this );
+    });
+};
+
+/** Click an option by Label
+ *  @param {String} optionLabel the label of the option to click (can be partial?)
+ *  @returns {SpaceGhost} for chaining
+ */
+HistoryOptions.prototype.clickOption = function clickOption( optionLabel ){
+    this.openMenu( function(){
+        this.click( this.historyoptions.data.selectors.optionXpathByLabelFn( optionLabel ) );
+        // shouldnt need to clear - clicking an option will do that
+    });
+    return this.spaceghost;
+};
+
+/** Is the history option with the given label showing as toggled?
+ *  @param {String} optionLabel the label of the option to check (can be partial?)
+ *  @returns {Boolean} true if the option is on, false if off OR not a toggle
+ */
+HistoryOptions.prototype.isOn = function isOn( optionLabel ){
+    return this.openMenu( function(){
+        var toggleIconInfo = this.elementInfoOrNull(
+            this.historyoptions.data.selectors.optionIsOnXpathByLabelFn( optionLabel ) );
+        // have to clear manually
+        this.click( 'body' );
+        return !!toggleIconInfo;
+    });
+};
+
+/** Toggle the option - optionally forcing to on or off.
+ *  @param {String} optionLabel the label of the option to check (can be partial?)
+ *  @param {Boolean} force  if true ensure option is on, if false ensure it's off,
+ *      if undefined simply toggle
+ *  @returns {Boolean} true if the option is now on, false if now off or not a toggle
+ */
+HistoryOptions.prototype.toggle = function toggle( optionLabel, force ){
+    var isOn = this.isOn( optionLabel );
+    if( ( force === false && isOn )
+    ||  ( force === true  && !isOn )
+    ||  ( force === undefined ) ){
+        return this.clickOption( optionLabel );
+    }
+    return force;
+};
+
+// -------------------------------------------------------------------
+// these options lead to controller pages - encapsulate those pages here
+/** corresponds to history options menu: 'Saved Histories'
+ *  @param {String} historyName the name of the history
+ */
+//HistoryOptions.prototype.savedHistoryByName = function savedHistoryByName( historyName ){
+//};
+/** corresponds to history options menu: 'Histories Shared with Me'
+ *  @param {String} historyName the name of the history
+ */
+//HistoryOptions.prototype.sharedHistoryByName = function sharedHistoryByName( historyName ){
+//};
+
+/** corresponds to history options menu: 'Create New'
+ */
+//HistoryOptions.prototype.createNew = function createNew(){
+//};
+
+/** corresponds to history options menu: 'Copy History'
+ */
+//HistoryOptions.prototype.copyHistory = function copyHistory(){
+//};
+
+/** corresponds to history options menu: 'Copy Datasets'
+ */
+//HistoryOptions.prototype.copyDatasets = function copyDatasets(){
+//};
+
+/** corresponds to history options menu: 'Extract Workflow'
+ */
+//HistoryOptions.prototype.extractWorkflow = function extractWorkflow(){
+//};
+
+/** corresponds to history options menu: 'Share or Publish'
+ */
+//HistoryOptions.prototype.shareHistoryViaLink = function shareHistoryViaLink(){
+//};
+/** corresponds to history options menu: 'Share or Publish'
+ */
+//HistoryOptions.prototype.publishHistory = function publishHistory(){
+//};
+/** corresponds to history options menu: 'Share or Publish'
+ */
+//HistoryOptions.prototype.shareHistoryWithUser = function shareHistoryWithUser(){
+//};
+
+/** corresponds to history options menu: 'Dataset Security'
+ */
+//HistoryOptions.prototype.managePermissions = function managePermissions(){
+//};
+/** corresponds to history options menu: 'Dataset Security'
+ */
+//HistoryOptions.prototype.accessPermissions = function accessPermissions(){
+//};
+
+/** corresponds to history options menu: 'Resume Paused Jobs'
+ */
+//HistoryOptions.prototype.resumePausedJobs = function resumePausedJobs(){
+//};
+
+
+// ------------------------------------------------------------------- options that control the hpanel
+/** corresponds to history options menu: 'Collapse Expanded Datasets'
+ */
+HistoryOptions.prototype.collapseExpanded = function collapseExpanded( then ){
+    return this.spaceghost.then( function(){
+        this.historyoptions.clickOption( this.historyoptions.data.labels.options.collapseExpanded );
+        this.wait( 500, then );
+    });
+};
+
+// =================================================================== SELECTORS
+//TODO: data is not a very good name
+HistoryOptions.prototype.data = {
+    selectors : {
+        button      : '#history-options-button',
+        buttonIcon  : '#history-options-button span.fa-cog',
+        menu        : '#history-options-button-menu',
+        optionXpathByLabelFn : function optionXpathByLabelFn( label ){
+            return xpath( '//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"' + label + '")]]' );
+        },
+        optionIsOnXpathByLabelFn : function optionIsOnXpathByLabelFn( label ){
+            return xpath( '//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"' + label + '")]]'
+                        + '/span[@class="fa fa-check"]' );
+        }
+    },
+    labels : {
+        options : {
+            //History Lists
+            savedHistories          : "Saved Histories",
+            sharedHistories         : "Histories Shared with Me",
+            //Current History
+            createNew               : "Create New",
+            copyHistory             : "Copy History",
+            copyDatasets            : "Copy Datasets",
+            shareOrPublish          : "Share or Publish",
+            extractWorkflow         : "Extract Workflow",
+            datasetSecurity         : "Dataset Security",
+            resumePausedJobs        : "Resume Paused Jobs",
+            collapseExpanded        : 'Collapse Expanded Datasets',
+            unhideHiddenDatasets    : "Unhide Hidden Datasets",
+            deleteHiddenDatasets    : "Delete Hidden Datasets",
+            showStructure           : "Show Structure",
+            exportCitations         : "Export Tool Citations",
+            exportToFile            : "Export History to File",
+            deleteHistory           : "Delete",
+            //deleteHistoryPermanently : "Delete Permanently",
+            //Other Actions
+            importFromFile          : "Import from File"
+        }
+    }
+};
diff --git a/test/casperjs/modules/historypanel.js b/test/casperjs/modules/historypanel.js
new file mode 100644
index 0000000..963bafd
--- /dev/null
+++ b/test/casperjs/modules/historypanel.js
@@ -0,0 +1,424 @@
+// =================================================================== module object, exports
+/** Creates a new historypanel module object.
+ *  @exported
+ */
+exports.create = function createHistoryPanel( spaceghost ){
+    return new HistoryPanel( spaceghost );
+};
+
+/** HistoryPanel object constructor.
+ *  @param {SpaceGhost} spaceghost a spaceghost instance
+ */
+var HistoryPanel = function HistoryPanel( spaceghost ){
+    this.spaceghost = spaceghost;
+};
+exports.HistoryPanel = HistoryPanel;
+
+HistoryPanel.prototype.toString = function toString(){
+    return this.spaceghost + '.HistoryPanel';
+};
+
+// -------------------------------------------------------------------
+/* TODO:
+    conv.fns:
+        switch to history frame and wait for any possible rendering - waitForHistoryPanel
+        undelete hda
+        rename history
+    consider removing all timeouts, callbacks - just use thens in test
+
+*/
+// =================================================================== INTERNAL
+var require = patchRequire( require ),
+    xpath = require( 'casper' ).selectXPath;
+
+// =================================================================== API (external)
+// ------------------------------------------------------------------- frame control
+///** Hover over an element in the history panel.
+// *  @param {String} selector        a css or xpath selector for an historyItemWrapper
+// */
+//HistoryPanel.prototype.hoverOver = function hoverOver( selector ){
+//    var spaceghost = this.spaceghost,
+//        elementInfo = spaceghost.getElementInfo( selector );
+//    spaceghost.page.sendEvent( 'mousemove', elementInfo.x + 1, elementInfo.y + 1 );
+//    return spaceghost;
+//};
+
+// ------------------------------------------------------------------- hdas
+///** Parse the hid and name from an HDA title.
+// *      NOTE: if more than one is found, will return the first found.
+// *  @param {String} title   the title of the hda
+// *  @returns {Object}       of the form { hid: <hid>, name: <name> }
+// */
+//HistoryPanel.prototype.hdaHidAndNameFromTitle = function hdaHidAndNameFromTitle( title ){
+//    var sep = ': ', split = title.split( sep, 1 );
+//    return {
+//        name : (( split.length >= 2 )?( split[1] ):( split[0] )),
+//        hid  : (( split.length >= 2 )?( parseInt( split[0], 10 ) ):( undefined ))
+//    };
+//};
+
+/** Find the casper element info of the hda wrapper given the hda title.
+ *      NOTE: if more than one is found, will return the first found.
+ *  @param {String} title   the title of the hda
+ *  @returns {Object|null} ElementInfo of the historyItemWrapper found, null if not found
+ */
+HistoryPanel.prototype.hdaElementInfoByTitle = function hdaElementInfoByTitle( title ){
+    var wrapperXpath = xpath( '//span[@class="name" and contains(text(),"' + title + '")]/../../..' );
+    return this.spaceghost.elementInfoOrNull( wrapperXpath );
+};
+
+/** Get the state string of the given hda.
+ *      NOTE: if more than one is found, will return the first found.
+ *  @param {Selector} title a selector for the desired hdaWrapper
+ *  @returns {String|undefined}  class string of the historyItemWrapper found, undefined if not found or set
+ */
+HistoryPanel.prototype.getHdaState = function getHdaState( hdaSelector ){
+    var found = null,
+        hdaInfo = this.spaceghost.elementInfoOrNull( hdaSelector );
+    if( !hdaInfo ){ return undefined; }
+    return (( found = hdaInfo.attributes[ 'class' ].match( /state\-(\w+)/ ) )?( found[1] ):( undefined ));
+};
+
+/** Get the encoded database/API id of the given hda.
+ *      NOTE: if more than one is found, will return the first found.
+ *  @param {Selector} title a selector for the desired hdaWrapper
+ *  @returns {String|undefined}  db id string of the hda found, undefined if not found or set
+ */
+HistoryPanel.prototype.getHdaEncodedId = function getHdaEncodedId( hdaSelector ){
+    var hdaInfo = spaceghost.elementInfoOrNull( hdaSelector );
+    if( !hdaInfo ){ return undefined; }
+    return (( found = hdaInfo.attributes.id.match( /historyItem\-(\w+)/ ) )?( found[1] ):( undefined ));
+};
+
+// ------------------------------------------------------------------- step functions
+/** Moves into history iframe and waits until hdas are visible or empty message is.
+ *      NOTE: is more than one Casper step.
+ *  @see Casper at waitFor
+ */
+HistoryPanel.prototype.waitForHdas = function waitForHdas( then, timeout, maxWait ){
+    //TODO:?? should this wait until the seletors are in AND they are opaque?
+    var spaceghost = this.spaceghost;
+    spaceghost.then( function waitingForHdas(){
+        this.waitFor(
+            function checkHpanel(){
+                var subtitleOpacity = this.evaluate( function( selector ){
+                    return $( selector ).css( 'opacity' );
+                }, this.historypanel.data.selectors.history.subtitle );
+                // wait until the subtitle is faded in and either the hdas or the empty history msg is displayed
+                return ( subtitleOpacity !== 1
+                       && ( ( this.visible( this.historypanel.data.selectors.hda.wrapper.itemClass ) )
+                          ||( this.visible( this.historypanel.data.selectors.history.emptyMsg ) ) ) );
+            }, then, timeout, maxWait );
+    });
+    return spaceghost;
+};
+
+/** Moves into history iframe and waits until hdas are visible or empty message is.
+ *      NOTE: is more than one Casper step.
+ *  @see Casper at waitFor
+ */
+HistoryPanel.prototype.waitForHda = function waitForHda( hdaName, then, timeout, maxWait ){
+    //TODO:?? should this wait until the seletors are in AND they are opaque?
+    var spaceghost = this.spaceghost,
+        hdaElement = null,
+        previousState = null;
+    spaceghost.then( function waitingForHda(){
+        this.waitFor(
+            function checkHpanel(){
+                var wrapperXpath = xpath( '//span[contains(text(),"' + hdaName + '")]/../../..' );
+                hdaElement = this.elementInfoOrNull( wrapperXpath );
+                //NOTE: this will probably fail if the name was used on a previous HDA
+                if( !hdaElement ){ return false; }
+
+                var state = hdaElement.attributes[ "class" ].match( /state\-([\w\-_]*)/ )[1];
+                if( state !== previousState ){
+                    spaceghost.info( 'state: ' + state );
+                    previousState = state;
+                }
+                if( state !== 'ok' ){ return false; }
+
+                var hdaOpacity = this.evaluate( function( name ){
+                    // locate the hda by name and return its opacity
+                    return $( '.name:contains("' + name + '")' ).parents( '.dataset' ).css( 'opacity' );
+                }, hdaName );
+                //this.debug( 'fading in: ' + hdaOpacity );
+                return hdaOpacity >= 1;
+            },
+            function _then(){ then.call( this, hdaElement ); },
+            function _timeout(){ timeout.call( this, hdaElement ); },
+            maxWait );
+    });
+    return spaceghost;
+};
+
+//TODO: combine with above
+/** Wait for the hda with given id to move into the given state.
+ *      whenInStateFn and timeoutFn will be passed the hda element info (see Casper#getElementInfo)
+ *      NOTE: is more than one Casper step.
+ *  @param {String} hdaSelector     selector for hda (should be historyItemWrapper)
+ *  @param {String} finalState      hda state to wait for (e.g. 'ok', 'error', 'running', 'queued', etc.)
+ *  @param {Function} whenInStateFn called when hda goes into finalState
+ *  @param {Function} timeoutFn     called when maxWaitMs have passed without the desired state
+ *      (defaults to throwing and error)
+ *  @param {Int} maxWaitMs          number of milliseconds to wait before timing out
+ *      (defaults to options.waitTimeout)
+ */
+HistoryPanel.prototype.waitForHdaState = function waitForHdaState( hdaSelector, finalState,
+                                                                   whenInStateFn, timeoutFn, maxWaitMs ){
+    // maxWaitMs default - we need a larger timeout option, some things can take a bit
+    maxWaitMs = maxWaitMs || this.spaceghost.options.waitTimeout;
+    var hpanel = this,
+        spaceghost = this.spaceghost;
+
+    this.spaceghost.then( function(){
+        // get initial state, cache old timeout, set new timeout
+        var prevState = hpanel.getHdaState( hdaSelector ),
+            oldWaitTimeout = spaceghost.options.waitTimeout;
+        spaceghost.info( hdaSelector + ': ' + prevState );
+        spaceghost.options.waitTimeout = maxWaitMs;
+
+        // begin waiting for desired state
+        spaceghost.waitFor(
+            function _checkForState(){
+                var newState = hpanel.getHdaState( hdaSelector );
+                // report state changes
+                if( newState !== prevState ){
+                    spaceghost.info( hdaSelector + ': ' + newState );
+                    prevState = newState;
+                }
+                return newState === finalState;
+            },
+            // if the hda state happened, call the whenInStateFn
+            //  and close down the progress interval and reset the wait timeout to what it was
+            function _whenInState(){
+                spaceghost.options.waitTimeout = oldWaitTimeout;
+                whenInStateFn.call( spaceghost, spaceghost.elementInfoOrNull( hdaSelector ) );
+            },
+            // if we've timed out, call the timeoutFn and close up
+            function _timeout(){
+                spaceghost.options.waitTimeout = oldWaitTimeout;
+                var hdaInfo = spaceghost.elementInfoOrNull( hdaSelector );
+                if( utils.isFunction( timeoutFn ) ){
+                    timeoutFn.call( spaceghost, hdaInfo );
+
+                // timeoutFn default - raise an error on timeout
+                } else {
+                    spaceghost.error( 'Timeout: final hda: ' + spaceghost.jsonStr( hdaInfo ) );
+                    throw new spaceghost.GalaxyError(
+                        'Timeout: waiting for ' + hdaSelector + ' to enter state: ' + finalState );
+                }
+            }
+        );
+    });
+    return spaceghost;
+};
+
+/** Expands or collapses an HDA by clicking the title (does nothing if already in desired state).
+ *      NOTE: is more than one Casper step.
+ *  @param {String} hdaSelector     a css or xpath selector for an historyItemWrapper
+ *  @param {Function} then          function called when the change is made
+ *  @param {Boolean} desiredClosed  true if you want to collapse, false if you want open
+ *  @private
+ */
+HistoryPanel.prototype._thenExpandOrCollapseHda = function _thenExpandOrCollapseHda( hdaSelector, then, desiredClosed ){
+    // using a step here (instead of a jump) bc we need the wait for function
+    this.spaceghost.then( function checkingHda(){
+        this.info( (( desiredClosed )?( 'collapsing' ):( 'expanding' )) + ' hda: ' + hdaSelector );
+
+        // click to open if the body isn't visible and call wait to account for opening animation
+        if( this.visible( hdaSelector + ' ' + this.historypanel.data.selectors.hda.body ) === desiredClosed ){
+            this.click( hdaSelector + ' ' + this.historypanel.data.selectors.hda.title );
+            //NOTE: then is executed in the top frame
+            //TODO: magic number
+            this.wait( 500, then );
+
+        // otherwise, just call then
+        } else if( then ){
+            then.call( this );
+        }
+    });
+    return this.spaceghost;
+};
+
+/** Collapses an HDA by clicking the title (does nothing if already collapsed).
+ *      NOTE: is more than one Casper step.
+ *  @param {String} hdaSelector     a css or xpath selector for an historyItemWrapper
+ *  @param {Function} then          function called when the change is made
+ */
+HistoryPanel.prototype.thenCollapseHda = function thenCollapseHda( hdaSelector, then ){
+    return this._thenExpandOrCollapseHda( hdaSelector, then, true );
+};
+
+/** Expands an HDA by clicking the title (does nothing if already expanded).
+ *      NOTE: is more than one Casper step.
+ *  @param {String} hdaSelector     a css or xpath selector for an historyItemWrapper
+ *  @param {Function} then          function called when the change is made
+ */
+HistoryPanel.prototype.thenExpandHda = function thenExpandHda( hdaSelector, then ){
+    return this._thenExpandOrCollapseHda( hdaSelector, then, false );
+};
+
+/** Deletes an hda by finding an hda with the given title and clicking on the delete icon.
+ *      NOTE: if more than one is found, the first found will be deleted.
+ *      NOTE: is more than one Casper step.
+ *  @param {String} hdaSelector     a css or xpath selector for an historyItemWrapper
+ *  @param {Function} whenDeletedFn function to be called when the hda is deleted (optional)
+ *  @param {Function} timeoutFn     function to be called if/when the deleted attempted times out (optional)
+ */
+HistoryPanel.prototype.deleteHda = function deleteHda( hdaSelector, whenDeletedFn, timeoutFn ){
+    this.spaceghost.then( function deletingHda(){
+        var hdaId = this.getElementInfo( hdaSelector ).attributes.id,
+            deleteIconSelector = '#' + hdaId + ' ' + this.historypanel.data.hdaTitleButtons['delete'].selector;
+        this.click( deleteIconSelector );
+
+        this.waitWhileSelector( '#' + hdaId,
+            function hdaNoLongerInDom(){
+                this.info( 'hda deleted: ' + hdaSelector );
+                if( utils.isFunction( whenDeletedFn ) ){ whenDeletedFn.call( spaceghost ); }
+
+            //TODO: test timeouts by cutting delete fn
+            }, function timeout(){
+                if( utils.isFunction( timeoutFn ) ){
+                    timeoutFn.call( spaceghost );
+                } else {
+                    throw new this.GalaxyError( 'Timeout: attempting to delete hda : ' + hdaSelector );
+                }
+            });
+    });
+    return spaceghost;
+};
+
+/** Undeletes an hda by including deleted in the panel, clicking undelete, and turning off include deleted
+ *      NOTE: if more than one is found, the first found will be undeleted.
+ *      NOTE: is more than one Casper step.
+ *  @param {String} hdaSelector     a css or xpath selector for an historyItemWrapper
+ *  @param {Function} whenDeletedFn function to be called when the hda is deleted (optional)
+ */
+HistoryPanel.prototype.undeleteHda = function undeleteHda( hdaSelector, whenUndeletedFn ){
+    this.spaceghost.historyoptions.includeDeleted( function(){
+        this.click( hdaSelector + ' ' + this.historypanel.data.selectors.hda.undeleteLink );
+        this.historyoptions.excludeDeleted( function(){
+            this.info( 'hda undeleted: ' + hdaSelector );
+            if( utils.isFunction( whenUndeletedFn ) ){ whenUndeletedFn.call( this ); }
+        });
+        //TODO:?? no timeout fn?
+    });
+};
+
+// =================================================================== SELECTORS
+//TODO: data is not a very good name
+HistoryPanel.prototype.data = {
+    hdaTitleButtons : {
+        // mixing text and selectors here
+        display : {
+            selector : '.icon-btn.display-btn',
+            tooltip  : 'View data',
+            hrefTpl  : '/datasets/%s/display',
+            nodeName : 'a'
+        },
+        edit : {
+            selector : '.icon-btn.edit-btn',
+            tooltip  : 'Edit attributes',
+            hrefTpl  : '/datasets/%s/edit',
+            nodeName : 'a'
+        },
+        'delete' : {
+            selector : '.icon-btn.delete-btn',
+            tooltip  : 'Delete',
+            hrefTpl  : 'javascript:void(0);',
+            nodeName : 'a'
+        }
+    },
+    hdaPrimaryActionButtons : {
+        download : {
+            selector : '.icon-btn.download-btn',
+            tooltip  : 'Download',
+            hrefTpl  : '/datasets/%s/display?to_ext=',
+            nodeName : 'a'
+        },
+        info : {
+            selector : '.icon-btn.params-btn',
+            tooltip  : 'View details',
+            hrefTpl  : '/datasets/%s/show_params',
+            nodeName : 'a'
+        },
+        rerun : {
+            selector : '.icon-btn.rerun-btn',
+            tooltip  : 'Run this job again',
+            hrefTpl  : '/tool_runner/rerun?id=%s',
+            nodeName : 'a'
+        },
+        downloadDropdownButtonIdTpl : 'dataset-%s-popup',
+        downloadDropdownMenuIdTpl : 'dataset-%s-popup-menu'
+    },
+    selectors : {
+        history : {
+            title       : '.controls .title',
+            name        : '.title .name',
+            nameEditableTextInput : '.name input',
+            subtitle    : '.subtitle',
+            size        : '.history-size',
+            tagIcon     : '.actions .history-tag-btn',
+            tagArea     : '.controls .tags-display',
+            annoIcon    : '.actions .history-annotate-btn',
+            annoArea    : '.controls .annotation-display',
+            emptyMsg    : '.empty-message',
+            hdaContainer: '.datasets-list'
+        },
+        hda : {
+            wrapper : {
+                itemClass   : '.history-content.dataset',
+                stateClasses : {
+                    prefix  : 'state-',
+                    ok      : 'state-ok',
+                    'new'   : 'state-new'
+                }
+            },
+            errorMessage    : '.errormessagesmall',
+
+            title           : '.title',
+            titleButtonArea : '.primary-actions',
+            summary         : '.summary',
+            blurb           : '.blurb .value',
+            dbkey           : '.dbkey .value',
+            info            : '.info .value',
+            body            : '.details',
+
+            primaryActionButtons    : '.actions .left',
+            secondaryActionButtons  : '.actions .right',
+
+            undeleteLink    : '.undelete-link',
+            purgeLink       : '.purge-link',
+
+            peek            : '.dataset-peek'
+        }
+    },
+    labels : {
+        history : {
+        },
+        hda : {
+        }
+    },
+    text : {
+        windowTitle : 'History',
+        //frameTitle : 'Galaxy History',
+        anonymous : {
+            tooltips : {
+                name    : 'You must be logged in to edit your history name'
+            }
+        },
+        history : {
+            tooltips : {
+                name     : 'Click to rename history',
+                tagIcon  : 'Edit history tags',
+                annoIcon : 'Edit history annotation'
+            },
+            newName  : 'Unnamed history',
+            newSize  : '0 b',
+            emptyMsg : "This history is empty. You can load your own data or get data from an external source"
+        },
+        hda : {
+            datasetFetchErrorMsg : 'There was an error getting the data for this dataset'
+        }
+    }
+};
diff --git a/test/casperjs/modules/tools.js b/test/casperjs/modules/tools.js
new file mode 100644
index 0000000..de75997
--- /dev/null
+++ b/test/casperjs/modules/tools.js
@@ -0,0 +1,203 @@
+// =================================================================== module object, exports
+/** Creates a new tools module object.
+ *  @exported
+ */
+exports.create = function createTools( spaceghost ){
+    return new Tools( spaceghost );
+};
+
+/** Tools object constructor.
+ *  @param {SpaceGhost} spaceghost a spaceghost instance
+ */
+var Tools = function Tools( spaceghost ){
+    //??: circ ref?
+    this.options = {};
+    /** Default amount of ms to wait for upload to finish */
+    this.options.defaultUploadWait = ( 45 * 1000 );
+    this.spaceghost = spaceghost;
+};
+exports.Tools = Tools;
+
+Tools.prototype.toString = function toString(){
+    return this.spaceghost + '.Tools';
+};
+
+
+// -------------------------------------------------------------------
+/* TODO:
+    move selectors from sg to here
+
+*/
+// =================================================================== INTERNAL
+var require = patchRequire( require ),
+    xpath = require( 'casper' ).selectXPath;
+
+// ------------------------------------------------------------------- get avail. tools
+// list available tools
+//spaceghost.then( function(){
+//    spaceghost.withFrame( 'galaxy_tools', function(){
+//        //var availableTools = this.fetchText( 'a.tool-link' );
+//
+//        var availableTools = this.evaluate( function(){
+//            //var toolTitles = __utils__.findAll( 'div.toolTitle' );
+//            //return Array.prototype.map.call( toolTitles, function( e ){
+//            //    //return e.innerHtml;
+//            //    return e.textContent || e.innerText;
+//            //}).join( '\n' );
+//
+//            var toolLinks = __utils__.findAll( 'a.tool-link' );
+//            return Array.prototype.map.call( toolLinks, function( e ){
+//                //return e.innerHtml;
+//                return e.textContent || e.innerText;
+//            }).join( '\n' );
+//        });
+//        this.debug( 'availableTools: ' + availableTools );
+//    });
+//});
+
+/** Parses the hid and name of a new file from the tool execution donemessagelarge
+ *  @param {String} doneMsgText     the text extracted from the donemessagelarge after a tool execution
+ */
+Tools.prototype._parseDoneMessageForTool = function parseDoneMessageForTool( doneMsgText ){
+    //TODO: test
+    var executionInfo = {};
+    var textMatch = doneMsgText.match( /added to the queue:\n\n(\d+)\: (.*)\n/m );
+    if( textMatch ){
+        if( textMatch.length > 1 ){
+            executionInfo.hid = parseInt( textMatch[1], 10 );
+        }
+        if( textMatch.length > 2 ){
+            executionInfo.name = textMatch[2];
+        }
+        executionInfo.name = textMatch[2];
+    }
+    return executionInfo;
+};
+
+// ------------------------------------------------------------------- upload (internal)
+/** Tests uploading a file.
+ *      NOTE: this version does NOT throw an error on a bad upload.
+ *      It is meant for testing the upload functionality and, therefore, is marked as private.
+ *      Other tests should use uploadFile
+ *  @param {String} filepath    the local filesystem path of the file to upload (absolute (?))
+ */
+Tools.prototype._uploadFile = function _uploadFile( filepath ){
+    //TODO: generalize for all tools
+    var spaceghost = this.spaceghost,
+        uploadInfo = {};
+    uploadInfo[ spaceghost.data.selectors.tools.upload.fileInput ] = filepath;
+
+    spaceghost.openHomePage( function(){
+        // load the upload tool form
+        // (we can apprently click a tool label without expanding the tool container for it)
+        this.click( xpath( '//a[contains(text(),"Upload File")]' ) );
+        this.jumpToMain( function(){
+            this.waitForSelector( 'body' );
+        });
+    });
+
+    // fill in the form and click execute - wait for reload
+    spaceghost.withMainPanel( function(){
+        //?? no wait for page to load?
+        this.fill( this.data.selectors.tools.general.form, uploadInfo, false );
+
+        // the following throws:
+        //  [error] [remote] Failed dispatching clickmouse event on xpath selector: //input[@value="Execute"]:
+        //  PageError: TypeError: 'undefined' is not a function (evaluating '$(spaceghost).formSerialize()')
+        // ...and yet the upload still seems to work
+        this.click( xpath( this.data.selectors.tools.general.executeButton_xpath ) );
+
+        // wait for main panel, history reload
+        ////NOTE!: assumes tool execution reloads the history panel
+        this.waitForMultipleNavigation( [ 'tool_runner/upload_async_message' ],
+            function thenAfterUploadRefreshes(){
+                // debugging
+                this.jumpToMain( function(){
+                    var messageInfo = this.elementInfoOrNull( this.data.selectors.messages.all );
+                    this.debug( ( messageInfo )?( messageInfo.attributes['class'] + ':\n' + messageInfo.text )
+                                               :( 'NO post upload message' ) );
+                });
+            },
+            function timeoutWaitingForUploadRefreshes( urlsStillWaitingOn ){
+                throw new this.GalaxyError( 'Upload Error: '
+                    + 'timeout waiting for upload "' + filepath + '" refreshes: ' + urlsStillWaitingOn );
+            },
+            this.tools.options.defaultUploadWait
+        );
+    });
+};
+
+// =================================================================== API (external)
+// ------------------------------------------------------------------- misc
+/** get filename from filepath
+ *  @param {String} filepath    (POSIX) filepath
+ *  @returns {String} filename part of filepath
+ */
+Tools.prototype.filenameFromFilepath = function filenameFromFilepath( filepath ){
+    var lastSepIndex = filepath.lastIndexOf( '/' );
+    if( lastSepIndex !== -1 ){
+        return filepath.slice( lastSepIndex + 1 );
+    }
+    return filepath;
+};
+
+// ------------------------------------------------------------------- upload (conv.)
+/** Convenience function for uploading a file.
+ *      callback function will be passed an uploadInfo object in the form:
+ *          filepath:   the filepath of the uploaded file
+ *          filename:   the filename of the uploaded file
+ *          hid:        the hid of the uploaded file hda in the current history
+ *          name:       the name of the uploaded file hda
+ *          hdaElement: the hda DOM (casperjs form) element info object (see Casper#getElementInfo)
+ *  @param {String} filepath        (POSIX) filepath relative to the script's directory
+ *  @param {Function} callback      callback function called after hda moves into ok state (will be passed uploadInfo)
+ *  @param {Integer} timeoutAfterMs milliseconds to wait before timing out (defaults to options.defaultUploadWait)
+ */
+Tools.prototype.uploadFile = function uploadFile( filepath, callback, timeoutAfterMs ){
+    timeoutAfterMs = timeoutAfterMs || this.options.defaultUploadWait;
+    var spaceghost = this.spaceghost,
+        filename = this.filenameFromFilepath( filepath ),
+        uploadInfo = {};
+
+    spaceghost.info( 'uploading file: ' + filepath + ' (timeout after ' + timeoutAfterMs + ')' );
+    this._uploadFile( filepath );
+
+    // error if an info message wasn't found
+    spaceghost.withMainPanel( function checkUploadMessage(){
+        var infoInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.infolarge );
+        if( ( infoInfo )
+        &&  ( infoInfo.text.indexOf( this.data.text.upload.success ) !== -1 ) ){
+            // safe to store these
+            uploadInfo.filename = filename;
+            uploadInfo.filepath = filepath;
+
+        } else {
+            // capture any other messages on the page
+            var otherInfo = spaceghost.elementInfoOrNull( this.data.selectors.messages.all ),
+                message   = ( otherInfo && otherInfo.text )?( otherInfo.text ):( '' );
+            throw new this.GalaxyError( 'Upload Error: no success message uploading "' + filepath + '": ' + message );
+        }
+    });
+
+    // the hpanel should refresh and display the uploading file, wait for that to go into the ok state
+    // throw if uploaded HDA doesn't appear, or it doesn't move to 'ok' after allotted time
+    //spaceghost.historypanel.waitForHdas()
+    spaceghost.historypanel.waitForHda( filename,
+        // success: update the upload info and run callback
+        function whenInStateFn( newHdaInfo ){
+            this.info( 'Upload complete: ' + newHdaInfo.text );
+            uploadInfo.hdaElement = newHdaInfo;
+            callback.call( spaceghost, uploadInfo );
+        },
+        function timeoutFn( newHdaInfo ){
+            this.warning( 'timeout waiting for upload: ' + filename + ', ' + this.jsonStr( newHdaInfo ) );
+            throw new spaceghost.GalaxyError( 'Upload Error: timeout waiting for ok state: '
+                + '"' + uploadInfo.filepath + '" (waited ' + timeoutAfterMs + ' ms)' );
+        },
+        timeoutAfterMs
+    );
+
+    return spaceghost;
+};
+//TODO: upload via url
+//TODO: upload by textarea
diff --git a/test/casperjs/modules/user.js b/test/casperjs/modules/user.js
new file mode 100644
index 0000000..1cb40da
--- /dev/null
+++ b/test/casperjs/modules/user.js
@@ -0,0 +1,296 @@
+// =================================================================== module object, exports
+/** User object constructor.
+ *  @param {SpaceGhost} spaceghost a spaceghost instance
+ */
+var User = function User( spaceghost ){
+    //??: circ ref?
+    this.spaceghost = spaceghost;
+};
+exports.User = User;
+
+/** Creates a new user module object.
+ *  @exported
+ */
+exports.create = function createUser( spaceghost ){
+    return new User( spaceghost );
+};
+
+User.prototype.toString = function toString(){
+    return this.spaceghost + '.User';
+};
+
+
+// =================================================================== INTERNAL
+var require = patchRequire( require ),
+    xpath = require( 'casper' ).selectXPath;
+
+/** Tests registering a new user on the Galaxy instance by submitting the registration form.
+ *      NOTE: this version does NOT throw an error on a bad registration.
+ *      It is meant for testing the registration functionality and, therefore, is marked as private.
+ *      Other tests should use registerUser
+ *  @param {String} email       the users email address
+ *  @param {String} password    the users password
+ *  @param {String} username    the users ...username! (optional: will use 1st part of email)
+ *  @param {String} confirm     password confirmation (optional: defaults to password)
+ */
+User.prototype._submitRegistration = function _submitRegistration( email, password, username, confirm ){
+    var spaceghost = this.spaceghost,
+        userInfo = {
+            email   : email,
+            password: password,
+            // default username to first part of email
+            username:( !username && email.match( /^\w*/ ) )?( email.match( /^\w*/ )[0] ):( username ),
+            // default confirm: duplicate of password
+            confirm : ( confirm !== undefined )?( confirm ):( password )
+        };
+
+    spaceghost.openHomePage( function(){
+        this.click( xpath( spaceghost.data.selectors.masthead.user ) )
+        this.clickLabel( spaceghost.data.labels.masthead.userMenu.register );
+
+        this.waitForNavigation( 'user/create', function beforeRegister(){
+            this.withMainPanel( function mainBeforeRegister(){
+                spaceghost.debug( '(' + spaceghost.getCurrentUrl() + ') registering user:\n' +
+                    spaceghost.jsonStr( userInfo ) );
+                this.fill( spaceghost.data.selectors.registrationPage.form, userInfo, false );
+                // need manual submit (not a normal html form)
+                this.click( xpath( spaceghost.data.selectors.registrationPage.submit_xpath ) );
+            });
+            this.waitForNavigation( 'user/create', function afterRegister(){
+            //    this.withMainPanel( function mainAfterRegister(){
+            //        var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
+            //        spaceghost.debug( 'post registration message:\n' + spaceghost.jsonStr( messageInfo ) );
+            //    });
+            });
+        });
+
+    });
+};
+
+/** Tests logging in a user on the Galaxy instance by submitting the login form.
+ *      NOTE: this version does NOT throw an error on a bad login.
+ *      It is meant for testing the login functionality and, therefore, is marked as private.
+ *      Other tests should use login
+ *  @param {String} email       the users email address
+ *  @param {String} password    the users password
+ */
+User.prototype._submitLogin = function _submitLogin( email, password ){
+    var spaceghost = this.spaceghost,
+        loginInfo = {
+        //NOTE: keys are used as name selectors in the fill fn - must match the names of the inputs
+            login: email,
+            password: password
+        };
+
+    spaceghost.openHomePage( function(){
+        this.click( xpath( spaceghost.data.selectors.masthead.user ) )
+        this.clickLabel( spaceghost.data.labels.masthead.userMenu.login );
+
+        this.waitForNavigation( 'user/login', function beforeLogin(){
+            this.withMainPanel( function mainBeforeLogin(){
+                spaceghost.debug( '(' + spaceghost.getCurrentUrl() + ') logging in user:\n' +
+                    spaceghost.jsonStr( loginInfo ) );
+                spaceghost.fill( spaceghost.data.selectors.loginPage.form, loginInfo, false );
+                spaceghost.click( xpath( spaceghost.data.selectors.loginPage.submit_xpath ) );
+            });
+        });
+
+        this.waitForNavigation( 'user/login', function afterLogin(){
+            //this.withMainPanel( function mainAfterLogin(){
+            //    var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
+            //    spaceghost.debug( 'post login message:\n' + spaceghost.jsonStr( messageInfo ) );
+            //});
+        });
+    });
+};
+
+
+// =================================================================== API (external)
+/** Register a new user on the Galaxy instance.
+ *  @param {String} email       the users email address
+ *  @param {String} password    the users password
+ *  @param {String} username    the users ...username! (optional: will use 1st part of email)
+ *  @returns {SpaceGhost} the spaceghost instance (for chaining)
+ */
+User.prototype.registerUser = function registerUser( email, password, username ){
+    //TODO: callback
+    var spaceghost = this.spaceghost;
+    this._submitRegistration( email, password, username );
+    spaceghost.withMainPanel( function mainAfterRegister(){
+        var errorMessage = this.elementInfoOrNull( spaceghost.data.selectors.messages.error );
+        if( errorMessage ){
+            this.warning( 'Registration failed: ' + errorMessage.text );
+            throw new spaceghost.GalaxyError( 'RegistrationError: ' + errorMessage.text );
+        }
+
+        var messageInfo = this.elementInfoOrNull( spaceghost.data.selectors.messages.done );
+        this.debug( 'post registration message:\n' + messageInfo.text );
+
+        this.clickLabel( 'Return to the home page.' );
+        this.waitForNavigation( '' );
+    });
+    return spaceghost;
+};
+
+/** Logs in a user. Throws error on bad log in.
+ *  @param {String} email       the users email address
+ *  @param {String} password    the users password
+ *  @returns {SpaceGhost} the spaceghost instance (for chaining)
+ */
+User.prototype.login = function login( email, password ){
+    var spaceghost = this.spaceghost;
+    spaceghost.debug( '(' + spaceghost.getCurrentUrl() + ') attempting login with ' + email + ' using password ' + password );
+
+    this._submitLogin( email, password );
+    //spaceghost.withMainPanel( function mainAfterLogin(){
+    //    if( spaceghost.getCurrentUrl().search( spaceghost.data.selectors.loginPage.url_regex ) !== -1 ){
+    //        var messageInfo = spaceghost.getElementInfo( spaceghost.data.selectors.messages.all );
+    //        if( messageInfo && messageInfo.attributes[ 'class' ] === 'errormessage' ){
+    //            this.warning( 'Login failed: ' + messageInfo.text );
+    //            throw new spaceghost.GalaxyError( 'LoginError: ' + messageInfo.text );
+    //        }
+    //    }
+    //});
+    this.spaceghost.then( function checkLogin(){
+        if( spaceghost.user.loggedInAs() !== email ){
+            throw new spaceghost.GalaxyError( 'LoginError' );
+        } else {
+            spaceghost.info( 'logged in as ' + email );
+        }
+    });
+    return spaceghost;
+};
+
+/** Fetch the email of the currently logged in user (or '' if not logged in)
+ *  @returns {String} email of currently logged in user or '' if no one logged in
+ */
+User.prototype.loggedInAs = function loggedInAs(){
+    var currUser = this.spaceghost.api.users.show( 'current' );
+    //this.spaceghost.debug( this.spaceghost.jsonStr( currUser ) );
+    return currUser.email || '';
+//TODO: due to late rendering of masthead this is no longer reliable - need a wait for in the main page
+    //return this.spaceghost.jumpToTop( function(){
+    //    var userEmail = '';
+    //    try {
+    //        var emailSelector = xpath( this.data.selectors.masthead.userMenu.userEmail_xpath ),
+    //            loggedInInfo = this.elementInfoOrNull( emailSelector );
+    //        this.debug( '\n\n' + this.jsonStr( loggedInInfo ) + '\n' );
+    //        if( loggedInInfo !== null ){
+    //            userEmail = loggedInInfo.text.replace( 'Logged in as ', '' );
+    //        }
+    //    } catch( err ){
+    //        this.warn( err );
+    //    }
+    //    return userEmail;
+    //});
+};
+
+/** Log out the current user
+ *  @returns {SpaceGhost} the spaceghost instance (for chaining)
+ */
+User.prototype.logout = function logout(){
+    var spaceghost = this.spaceghost;
+    this.spaceghost.openHomePage( function(){
+        if( spaceghost.user.loggedInAs() ){
+            this.click( xpath( spaceghost.data.selectors.masthead.user ) )
+            spaceghost.clickLabel( spaceghost.data.labels.masthead.userMenu.logout );
+            spaceghost.waitForNavigation( 'user/logout', function _toLogoutPage() {
+                spaceghost.clickLabel( 'go to the home page' );
+                spaceghost.waitForNavigation( '' );
+            });
+        }
+    });
+    return spaceghost;
+};
+
+/** Attempts to login a user - if that raises an error (LoginError), register the user
+ *  @param {String} email       the users email address
+ *  @param {String} password    the users password
+ *  @param {String} username    the users ...username! (optional: will use 1st part of email)
+ *  @returns {SpaceGhost} the spaceghost instance (for chaining)
+ */
+User.prototype.loginOrRegisterUser = function loginOrRegisterUser( email, password, username ){
+    var spaceghost = this.spaceghost;
+    // attempt a login, if that fails - register
+    spaceghost.tryStepsCatch( function tryToLogin(){
+        spaceghost.openHomePage().user.login( email, password );
+
+    }, function failedLoginRegister(){
+        spaceghost.openHomePage().user.registerUser( email, password, username );
+    });
+    return spaceghost;
+};
+
+// ------------------------------------------------------------------- Admin
+/** Gets the admin user data from spaceghost if set and checks the galaxy.ini file for the email.
+ *  @returns {Object|null} the admin data object (email, pasword, username)
+ *      or null if no admin is set in both the galaxy.ini and spaceghost.
+ */
+User.prototype.getAdminData = function getAdminData(){
+    //TODO: this might be better inside sg
+    // check for the setting in sg and the galaxy.ini file
+    var adminData = this.spaceghost.options.adminUser,
+        iniAdminEmails = this.spaceghost.getUniverseSetting( 'admin_users' );
+    iniAdminEmails = ( iniAdminEmails )?
+        ( iniAdminEmails.split( ',' ).map( function( email ) { return email.trim(); } ) ):( null );
+
+    //TODO: seems like we only need the wsgi setting - that's the only thing we can't change
+    if( adminData ){
+        if( iniAdminEmails.indexOf( adminData.email ) !== -1 ){ return adminData; }
+
+    // if not set in options, but there are entries in the ini and a default admin pass:
+    //  return the first email with the default pass
+    //  Hopefully this is no less secure than the user/pwd in twilltestcase
+    } else if( iniAdminEmails.length && this.spaceghost.options.adminPassword ){
+        return { email: iniAdminEmails[0], password: this.spaceghost.options.adminPassword };
+    }
+
+    return null;
+};
+
+/** Logs in the admin user (if available) as the current user.
+ *      Note: logs out any other current users.
+ *  @throws {GalaxyError} err   if specified user is not admin or no admin found
+ *  @returns {SpaceGhost} the spaceghost instance (for chaining)
+ */
+User.prototype.loginAdmin = function loginAdmin(){
+    this.spaceghost.then( function(){
+        var adminData = this.user.getAdminData();
+        if( !adminData ){
+            throw new this.GalaxyError( 'No admin users found' );
+        }
+        this.info( 'logging in administrator' );
+        return this.user.loginOrRegisterUser( adminData.email, adminData.password );
+    });
+};
+
+/** Is the currently logged in user an admin?
+ *  @returns {Boolean} true if the currently logged in user is admin, false if not.
+ */
+User.prototype.userIsAdmin = function userIsAdmin(){
+    // simple test of whether the Admin tab is displayed in the masthead
+    return this.spaceghost.jumpToTop( function(){
+        if( this.visible( this.data.selectors.masthead.adminLink ) ){
+            return true;
+        }
+        return false;
+    });
+};
+
+
+// ------------------------------------------------------------------- Utility
+/** Gets a psuedo-random (unique?) email based on the time stamp.
+ *      Helpful for testing registration.
+ *  @param {String} username    email user (defaults to 'test')
+ *  @param {String} domain      email domain (defaults to 'test.test')
+ *  @returns {String}           new email as string
+ */
+User.prototype.getRandomEmail = function getRandomEmail( username, domain ){
+    username = username || 'test';
+    domain = domain || 'test.test';
+    var number = Math.ceil( Math.random() * 10000000000000 );
+    // doesn't work so well when creating two users at once
+    //var number = Date.now();
+    return username + number + '@' + domain;
+};
+
diff --git a/test/casperjs/page-data/selectors.json b/test/casperjs/page-data/selectors.json
new file mode 100644
index 0000000..c1f060a
--- /dev/null
+++ b/test/casperjs/page-data/selectors.json
@@ -0,0 +1,35 @@
+{
+    "cookies" : {
+        "galaxyCookieName" : "galaxysession"
+    },
+
+    "selectors" : {
+        "historyPanel" : {
+            "name"      : "div#history-name",
+            "subtitle"  : "div#history-subtitle-area",
+            "tagIcon"   : "#history-tag.icon-button",
+            "annoIcon"  : "#history-annotate.icon-button",
+            "emptyMsg"  : ".infomessagesmall"
+        },
+
+        "bootstrap" : {
+            "activeTooltip" : ".bs-tooltip"
+        },
+
+        "editableText" : {
+            "class"         : "editable-text",
+            "activeInput"   : "input#renaming-active"
+        }
+    },
+
+    "text" : {
+        "historyPanel" : {
+            "newName"          : "Unnamed history",
+            "initialSizeStr"   : "0 bytes",
+            "emptyMsgStr"      : "Your history is empty. Click 'Get Data' on the left pane to start",
+            "tooltips" : {
+                "anonUserName" : "You must be logged in to edit your history name"
+            }
+        }
+    }
+}
diff --git a/test/casperjs/registration-tests.js b/test/casperjs/registration-tests.js
new file mode 100644
index 0000000..3a1c59c
--- /dev/null
+++ b/test/casperjs/registration-tests.js
@@ -0,0 +1,137 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Testing registration of new users', 0, function suite( test ){
+    spaceghost.start();
+
+    // =================================================================== globals and helpers
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456',
+        confirm = password,
+        username = 'test' + Date.now();
+
+    // =================================================================== TESTS
+    spaceghost.openHomePage( function(){
+        this.test.comment( 'loading galaxy homepage' );
+        this.test.assertTitle( 'Galaxy' );
+        this.test.assertExists( xpath( "//div[@id='masthead']" ), 'found masthead' );
+    });
+
+    // ------------------------------------------------------------------- register a new user
+    spaceghost.then( function(){
+        this.test.comment( 'registering user: ' + email );
+        this.user._submitRegistration( email, password, username, confirm );
+    });
+    spaceghost.openHomePage( function(){
+        this.click( xpath( spaceghost.data.selectors.masthead.user ) )
+        var loggedInAs = this.fetchText( xpath( spaceghost.data.selectors.masthead.userMenu.userEmail_xpath ) );
+        this.test.assert( loggedInAs.indexOf( email ) !== -1, 'found proper email in user menu: ' + loggedInAs );
+    });
+
+    // ------------------------------------------------------------------- log out that user
+    spaceghost.user.logout().openHomePage( function(){
+        var emailSelector = xpath( this.data.selectors.masthead.userMenu.userEmail_xpath );
+        this.test.assert( !this.elementInfoOrNull( emailSelector ), 'user email not found' );
+    });
+
+    // ------------------------------------------------------------------- bad user registrations
+    spaceghost.then( function(){
+        this.test.comment( 'attempting to re-register user: ' + email );
+        this.user._submitRegistration( email, password, username, confirm );
+    });
+    spaceghost.then(function(){
+        this.assertErrorMessage( 'User with that email already exists' );
+    });
+
+    // emails must be in the form - at -.- (which is an email on main, btw)
+    var badEmails = [ 'bob', 'bob@', 'bob at idontwanttocleanup', 'bob.cantmakeme' ];
+    spaceghost.each( badEmails, function( self, badEmail ){
+        self.then( function(){
+            this.test.comment( 'attempting bad email: ' + badEmail );
+            this.user._submitRegistration( badEmail, password, username, confirm );
+        });
+        self.then(function(){
+            this.assertErrorMessage( 'Please enter your valid email address' );
+        });
+    });
+
+    // passwords must be at least 6 chars long
+    var badPasswords = [ '1234' ];
+    spaceghost.each( badPasswords, function( self, badPassword ){
+        self.then( function(){
+            this.test.comment( 'attempting bad password: ' + badPassword );
+            this.user._submitRegistration( spaceghost.user.getRandomEmail(), badPassword, username, badPassword );
+        });
+        self.then(function(){
+            this.assertErrorMessage( 'Please use a password of at least 6 characters' );
+        });
+    });
+
+    // and confirm must match
+    var badConfirms = [ '1234', '12345678', '123456 7', '' ];
+    spaceghost.each( badConfirms, function( self, badConfirm ){
+        self.then( function(){
+            this.test.comment( 'attempting bad password confirmation: ' + badConfirm );
+            this.user._submitRegistration( spaceghost.user.getRandomEmail(), password, username, badConfirm );
+        });
+        self.then(function(){
+            this.assertErrorMessage( 'Passwords don\'t match' );
+        });
+    });
+
+    // usernames must be >=3 chars...
+    //NOTE: that short username errors only show AFTER checking for existing/valid emails
+    //  so: we need to generate new emails for each one
+    spaceghost.then( function(){
+        var newEmail = spaceghost.user.getRandomEmail(),
+            badUsername = 'bp';
+        this.test.comment( 'attempting short username: ' + badUsername );
+        this.user._submitRegistration( newEmail, password, badUsername, confirm );
+    });
+    spaceghost.then(function(){
+        this.assertErrorMessage( 'It also has to be shorter than 255 characters but longer than 2' );
+    });
+
+    // ...and be lower-case letters, numbers and '-'...
+    var badUsernames = [ 'BOBERT', 'Robert Paulson', 'bobert!', 'bob_dobbs' ];
+    spaceghost.each( badUsernames, function( self, badUsername ){
+        self.then( function(){
+            var newEmail = spaceghost.user.getRandomEmail();
+            this.test.comment( 'attempting bad username: ' + badUsername );
+            this.user._submitRegistration( newEmail, password, badUsername, confirm );
+        });
+        self.then(function(){
+            this.assertErrorMessage( 'Public name must contain only lowercase letters, numbers and "-"' );
+        });
+    });
+
+    // ...and the name can't be used already
+    spaceghost.then( function(){
+        var newEmail = spaceghost.user.getRandomEmail();
+        this.test.comment( 'attempting previously used username with new user: ' + newEmail );
+        this.user._submitRegistration( newEmail, password, username, confirm );
+    });
+    spaceghost.then(function(){
+        this.assertErrorMessage( 'Public name is taken; please choose another' );
+    });
+
+    // ------------------------------------------------------------------- test the convenience fns
+    // these versions are for conv. use in other tests, they should throw errors if used improperly
+    spaceghost.then( function(){
+        this.assertStepsRaise( 'RegistrationError', function(){
+            this.then( function(){
+                this.test.comment( 'testing (js) error thrown on bad email' );
+                this.user.registerUser( '@internet', '123456', 'ignobel' );
+            });
+        });
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){
+        test.done();
+    });
+});
+
diff --git a/test/casperjs/server_env.py b/test/casperjs/server_env.py
new file mode 100644
index 0000000..739b89f
--- /dev/null
+++ b/test/casperjs/server_env.py
@@ -0,0 +1,143 @@
+"""
+Classes to handle fetching the proper environment and urls for the selenium
+tests to run against.
+"""
+
+import logging
+import os
+from json import loads
+
+log = logging.getLogger( __name__ )
+
+
+class TestEnvironment( object ):
+    """Provides basic information on the server being tested.
+
+    Implemented as a singleton class so that it may persist between tests
+    without needing to be reset/re-created.
+    """
+    _instance = None
+
+    ENV_PROTOCOL = None
+    ENV_HOST = 'GALAXY_TEST_HOST'
+    ENV_PORT = 'GALAXY_TEST_PORT'
+    ENV_HISTORY_ID = 'GALAXY_TEST_HISTORY_ID'
+    ENV_FILE_DIR = 'GALAXY_TEST_FILE_DIR'
+    ENV_TOOL_SHED_TEST_FILE = 'GALAXY_TOOL_SHED_TEST_FILE'
+    ENV_SAVED_FILES_DIR = 'GALAXY_TEST_SAVE'  # AKA: twilltestcase.keepOutdir
+    ENV_DEBUG_THESE_TESTS = 'GALAXY_DEBUG_THESE_TESTS'
+
+    DEFAULT_PROTOCOL = 'http'
+    DEFAULT_HOST = 'localhost'
+    DEFAULT_PORT = '8080'
+
+    @classmethod
+    def instance( cls, config=None ):
+        """Returns the singleton of TestEnvironment, instantiating it first if it
+        does not yet exist.
+        """
+        # singleton pattern
+        if not cls._instance:
+            log.debug( 'creating singleton instance of "%s", config: %s', str( cls ), str( config ) )
+            cls._instance = cls( config )
+        return cls._instance
+
+    def __init__( self, env_config_dict=None ):
+        self.config = env_config_dict or {}
+
+        self.protocol = self._get_setting_from_config_or_env(
+            'protocol', self.ENV_PROTOCOL, self.DEFAULT_PROTOCOL )  # TODO: required=True )
+        self.host = self._get_setting_from_config_or_env(
+            'host', self.ENV_HOST, self.DEFAULT_HOST )  # TODO: required=True )
+        self.port = self._get_setting_from_config_or_env(
+            'port', self.ENV_PORT, self.DEFAULT_PORT )  # TODO: required=True )
+
+        # TODO: move these setters/init'rs into a parser dict
+        self.history_id = self._get_setting_from_config_or_env(
+            'history_id', self.ENV_HISTORY_ID )
+        self.file_dir = self._get_setting_from_config_or_env(
+            'file_dir', self.ENV_FILE_DIR )
+        self.tool_shed_test_file = self._get_setting_from_config_or_env(
+            'tool_shed_test_file', self.ENV_TOOL_SHED_TEST_FILE )
+        self.shed_tools_dict = self._get_shed_tools_dict()
+
+        # saved output goes here: test diffs, screenshots, html, etc.
+        self.saved_output_dir = self._get_setting_from_config_or_env(
+            'saved_output_dir', self.ENV_SAVED_FILES_DIR )
+        self._create_saved_output_dir()
+
+        # if a test script (e.g. 'history-panel-tests.js') is listed in this var,
+        #   the test will output additional/full debug info
+        self.debug_these_tests = self._get_setting_from_config_or_env(
+            'debug_these_tests', self.ENV_DEBUG_THESE_TESTS )
+        self._parse_debug_these_tests()
+
+        log.debug( 'server_env: %s', str( self.as_dict() ) )
+
+    def as_dict( self, attributes=None ):
+        if not attributes:
+            # TODO:?? raise to class scope?
+            attributes = [ 'protocol', 'host', 'port', 'history_id', 'file_dir',
+                           'tool_shed_test_file', 'shed_tools_dict', 'saved_output_dir', 'debug_these_tests' ]
+        this_dict = {}
+        for attr_name in attributes:
+            attr_val = getattr( self, attr_name )
+            this_dict[ attr_name ] = attr_val
+        return this_dict
+
+    def _get_setting_from_config_or_env( self, config_name, env_name, default=None ):
+        """Try to get a setting from (in order):
+        TestEnvironment.config, the os env, or some default (if not False).
+        """
+        config_val = self.config.get( config_name, None )
+        env_val = os.environ.get( env_name, None )
+        return config_val or env_val or default
+
+    def _get_shed_tools_dict( self ):
+        """Read the shed tools from the tool shed test file if given,
+        otherwise an empty dict.
+        """
+        shed_tools_dict = {}
+        if self.tool_shed_test_file:
+            try:
+                f = open( self.tool_shed_test_file, 'r' )
+                text = f.read()
+                f.close()
+                shed_tools_dict = loads( text )
+            except Exception as exc:
+                log.error( 'Error reading tool shed test file "%s": %s', self.tool_shed_test_file, exc, exc_info=True )
+
+        return shed_tools_dict
+
+    def _create_saved_output_dir( self ):
+        """Set up the desired directory to save test output.
+        """
+        if self.saved_output_dir:
+            try:
+                if not os.path.exists( self.saved_output_dir ):
+                    os.makedirs( self.saved_output_dir )
+            except Exception as exc:
+                log.error( 'unable to create saved files directory "%s": %s',
+                    self.saved_output_dir, exc, exc_info=True )
+                self.saved_output_dir = None
+
+    def _parse_debug_these_tests( self, delim=',' ):
+        """Simple parser for the list of test scripts on which to set debug=True.
+        """
+        debug_list = []
+        if self.debug_these_tests:
+            try:
+                debug_list = self.debug_these_tests.split( delim )
+            except Exception as exc:
+                log.error( 'unable to parse debug_these_tests "%s": %s',
+                    self.debug_these_tests, exc, exc_info=True )
+        self.debug_these_tests = debug_list
+
+    @property
+    def url( self ):
+        """Builds and returns the url of the test server.
+        """
+        url = '%s://%s' % ( self.protocol, self.host )
+        if self.port and self.port != 80:
+            url += ':%s' % ( str( self.port ) )
+        return url
diff --git a/test/casperjs/spaceghost.js b/test/casperjs/spaceghost.js
new file mode 100644
index 0000000..b656ea3
--- /dev/null
+++ b/test/casperjs/spaceghost.js
@@ -0,0 +1,1295 @@
+/* TODO:
+
+    normalize names of steps to 'then<action>' or with
+    support method chaining pattern
+    move selectors, text to class level (spaceghost.data, module.data)
+    make any callbacks optional (that can be)
+
+    BUGS:
+        bug: filenames in backtrace not bubbling up properly
+        bug: assertStepsRaise used with wait throws more than once
+        trace filename not showing for errors here
+        ?: assertStepsRaise raise errors (all the way) when used in 'casperjs test .'
+
+    Does casperjs_runner:
+        work with fail on first = false
+
+    FEATURE CREEP:
+        assertTooltip( selector, textShouldBe ){
+            hoverover selector
+            assert tooltip
+            assert tooltip text
+            hoverover 0, 0 // clear tooltip
+        }
+        screenshotting on all step.complete (see captureSteps.js)
+        save html/sshots to GALAXY_TEST_SAVE (test_runner)
+
+    Use in test command
+    can we pass the entire test_env (instead of just url) from test_runner to sg?
+*/
+// ===================================================================
+/** Extended version of casper object for use with Galaxy
+ */
+
+// ------------------------------------------------------------------- modules
+var require = patchRequire( require ),
+    Casper = require( 'casper' ).Casper,
+    system = require( 'system' ),
+    fs = require( 'fs' ),
+    utils = require( 'utils' );
+
+// ------------------------------------------------------------------- inheritance
+/** @class An extension of the Casper object with methods and overrides specifically
+ *      for interacting with a Galaxy web page.
+ */
+function SpaceGhost(){
+    // an empty object just to store functions in a prototype for patching onto a casper isntance
+}
+
+exports.fromCasper = function fromCasper( casper, options ){
+    "use strict";
+    // patch the sg prototype over the casper instance proto
+    for( var k in SpaceGhost.prototype ){
+        if( SpaceGhost.prototype.hasOwnProperty( k ) ){
+            // monkey patch directly onto the casper instance - we need the prototype
+            casper[ k ] = SpaceGhost.prototype[ k ];
+        }
+    }
+    casper._init( options );
+    return casper;
+};
+
+// =================================================================== METHODS / OVERRIDES
+/** String representation */
+SpaceGhost.prototype.toString = function(){
+    var currentUrl = '';
+    try {
+        currentUrl = this.getCurrentUrl();
+    } catch( err ){}
+    return 'SpaceGhost(' + currentUrl + ')';
+};
+
+// ------------------------------------------------------------------- set up
+/** More initialization: cli, event handlers, etc.
+ *  @param {Object} options  option hash
+ *  @private
+ */
+SpaceGhost.prototype._init = function _init( options ){
+    ////console.debug( 'init, options:', JSON.stringify( options, null, 2 ) );
+    //
+    //NOTE: cli will override in-script options
+    this._setOptionsFromCli();
+
+    this.on( 'step.error', function stepErrorHandler( error ){
+        //console.debug( 'step.error: ' + error.name + ', ' + error.message );
+        this.errors.push({ msg: error.message, backtrace: error.stackArray });
+        //if( error.name !== 'AssertionError' ){
+        //    throw error;
+        //}
+    });
+    // save errors for later output
+    //  set this now so ALL errors are processed well (including errors during set up)
+    /** cache of errors that have occurred */
+    this.errors = [];
+    this.on( 'error', function pushErrorToStack( msg, backtrace ){
+        //this.debug( 'adding error to stack: ' + msg + ', trace:' + this.jsonStr( backtrace ) );
+        this.errors.push({ msg: msg, backtrace: backtrace });
+    });
+    this._processCLIArguments();
+    this._setUpEventHandlers();
+
+    /** cache of test failures */
+    this.failures = [];
+    /** cache of test passes */
+    this.passes = [];
+
+    // inject these scripts by default
+    this.options.clientScripts = [
+        //'../../static/scripts/libs/jquery/jquery.js'
+        //...
+    ].concat( this.options.clientScripts );
+    this.debug( 'clientScripts: ' + this.jsonStr( this.options.clientScripts ) );
+
+    this.changeToScriptDir();
+    this._loadModules();
+};
+
+/** Allow CLI arguments to set options if the proper option name is used.
+ *  @example
+ *      casperjs myscript.js --verbose=true --logLevel=debug
+ *  @private
+ */
+SpaceGhost.prototype._setOptionsFromCli = function setOptionsFromCli(){
+    // get and remove any casper options passed on the command line
+    for( var optionName in this.options ){
+        if( this.cli.has( optionName ) ){
+            //console.debug( optionName + ': '
+            //    + '(was) ' + this.options[ optionName ]
+            //    + ', (now) ' + this.cli.get( optionName ) );
+            this.options[ optionName ] = this.cli.get( optionName );
+            this.cli.drop( optionName );
+        }
+    }
+};
+
+/** Change the working directory to that of the current script */
+SpaceGhost.prototype.changeToScriptDir = function changeToScriptDir(){
+    var fs = require( 'fs' ),
+        args = require( 'system' ).args,
+        scriptPathArray = args[4].split( '/' );
+    //console.debug( this.jsonStr( scriptPathArray ) );
+
+    if( scriptPathArray.length > 1 ){
+        scriptPathArray.pop();
+        //console.debug( this.jsonStr( scriptPathArray.join( '/' ) ) );
+        fs.changeWorkingDirectory( scriptPathArray.join( '/' ) );
+    }
+};
+
+// ------------------------------------------------------------------- cli args and options
+/** Set up any SG specific options passed in on the cli.
+ *  @private
+ */
+SpaceGhost.prototype._processCLIArguments = function _processCLIArguments(){
+    //this.debug( 'cli: ' + this.jsonStr( this.cli ) );
+
+    //TODO: init these programmitically
+    //TODO: need to document these
+    var CLI_OPTIONS = {
+        returnJsonOnly  : { defaultsTo: false, flag: 'return-json',    help: 'send output to stderr, json to stdout' },
+        raisePageError  : { defaultsTo: true,  flag: 'page-error',     help: 'raise errors thrown on the page' },
+        errorOnAlert    : { defaultsTo: false, flag: 'error-on-alert', help: 'throw errors when a page calls alert' },
+        failOnAlert     : { defaultsTo: true,  flag: 'fail-on-alert',  help: 'fail a test when a page calls alert' },
+        //screenOnError   : { defaultsTo: false, flag: 'error-screen',   help: 'capture a screenshot on a page error' },
+        //textOnError     : { defaultsTo: false, flag: 'error-text',     help: 'output page text on a page error' },
+        //htmlOnError     : { defaultsTo: false, flag: 'error-html',   help: 'output page html on a page error' }
+        //htmlOnFail      : { defaultsTo: false, flag: 'fail-html',   help: 'output page html on a test failure' },
+        //screenOnFail    : { defaultsTo: false, flag: 'fail-screen',   help: 'capture a screenshot on a test failure' }
+        logNamespace    : { defaultsTo: false,  flag: 'log-namespace', help: 'filter log messages to this namespace' },
+
+        adminUser       : { defaultsTo: null,   flag: 'admin', help: 'JSON string with email and password of admin' }
+    };
+
+    // no switches/hardcoded options:
+    this.options.adminPassword = 'testuser';
+
+    // --url parameter required (the url of the server to test with)
+    if( !this.cli.has( 'url' ) ){
+        this.die( 'Test server URL is required - ' +
+                  'Usage: capserjs <test_script.js> --url=<test_server_url>', 1 );
+    }
+    this.baseUrl = this.cli.get( 'url' );
+
+    //TODO: move these handlers into _setUpEventHandlers
+    // --return-json: supress all output except for JSON logs, test results, and errors at finish
+    //  this switch allows a testing suite to send JSON data back via stdout (w/o logs, echos interferring)
+    this.options.returnJsonOnly = CLI_OPTIONS.returnJsonOnly.defaultsTo;
+    if( this.cli.has( CLI_OPTIONS.returnJsonOnly.flag ) ){
+        this.options.returnJsonOnly = true;
+
+        this._redirectOutputToStderr();
+        this.test.removeAllListeners( 'tests.complete' );
+
+        // output json on fail-first error
+        this.on( 'error', function outputJSONOnError( msg, backtrace ){
+            if( spaceghost.options.exitOnError ){
+                this._sendStopSignal();
+                this.outputStateAsJson();
+                spaceghost.exit( 1 );
+            }
+        });
+        // non-error finshes/json-output are handled in run() for now
+    }
+
+    //TODO: remove boilerplate
+    // --error-on-alert=false: don't throw an error if the page calls alert (default: true)
+    this.options.raisePageError = CLI_OPTIONS.raisePageError.defaultsTo;
+    if( this.cli.has( CLI_OPTIONS.raisePageError.flag ) ){
+        this.options.raisePageError = this.cli.get( CLI_OPTIONS.raisePageError.flag );
+    }
+
+    // --error-on-alert=false: don't throw an error if the page calls alert (default: true)
+    this.options.errorOnAlert = CLI_OPTIONS.errorOnAlert.defaultsTo;
+    if( this.cli.has( CLI_OPTIONS.errorOnAlert.flag ) ){
+        this.options.errorOnAlert = this.cli.get( CLI_OPTIONS.errorOnAlert.flag );
+    }
+
+    // --fail-on-alert=false: don't fail a test if the page calls alert (default: true)
+    this.options.failOnAlert = CLI_OPTIONS.failOnAlert.defaultsTo;
+    if( this.cli.has( CLI_OPTIONS.failOnAlert.flag ) ){
+        this.options.failOnAlert = this.cli.get( CLI_OPTIONS.failOnAlert.flag );
+    }
+
+    /* not implemented
+    // --error-page: print the casper.debugPage (the page's text) output on an error
+    if( this.cli.has( 'error-page' ) ){
+        this.on( 'page.error', this._saveTextOnErrorHandler );
+
+    // --error-html: print the casper.debugHTML (the page's html) output on an error (mut.exc w error-text)
+    } else if( this.cli.has( 'error-html' ) ){
+        this.on( 'page.error', this._saveHtmlOnErrorHandler );
+    }
+
+    // --error-screen: capture the casper browser screen on an error
+    if( this.cli.has( 'error-screen' ) ){
+        this.on( 'page.error', this._saveScreenOnErrorHandler );
+    }
+
+    // --fail-html: print the casper.debugHTML (the page's html) output on an test failure
+    // --fail-screen: print the casper browser screen output on an test failure
+    */
+
+    // get any fixture data passed in as JSON (e.g. --data='{ "one": 1 }')
+    this.fixtureData = ( this.cli.has( 'data' ) )?( JSON.parse( this.cli.get( 'data' ) ) ):( {} );
+    this.debug( 'fixtureData:' + this.jsonStr( this.fixtureData ) );
+
+    /** only output log messages with the given namespace */
+    this.options.logNamespace = CLI_OPTIONS.logNamespace.defaultsTo;
+    if( this.cli.has( CLI_OPTIONS.logNamespace.flag ) ){
+        this.options.logNamespace = this.cli.get( CLI_OPTIONS.logNamespace.flag );
+        this._setLogNamespaceFilter( this.options.logNamespace );
+    }
+
+    /** email and password JSON string for admin user */
+    this.options.adminUser = CLI_OPTIONS.adminUser.defaultsTo;
+    if( this.cli.has( CLI_OPTIONS.adminUser.flag ) ){
+        this.options.adminUser = JSON.parse( this.cli.get( CLI_OPTIONS.adminUser.flag ) );
+        this.warning( 'Using admin user from CLI: ' + this.jsonStr( this.options.adminUser ) );
+    }
+
+};
+
+/** Suppress the normal output from the casper object (echo, errors)
+ *  @param {String} namespace   the namespace to filter log msgs to
+ *  @private
+ */
+SpaceGhost.prototype._setLogNamespaceFilter = function _setLogNamespaceFilter( namespace ){
+    var regex = RegExp( '\\[' + namespace + '\\]' );
+    // this will fail if there's [namespace] in the actual message - NBD
+    this.setFilter( 'log.message', function( message ) {
+        return ( message.match( regex ) )?( message ):( ' ' );
+    });
+};
+
+/** Suppress the normal output from the casper object (echo, errors)
+ *  @private
+ */
+SpaceGhost.prototype._redirectOutputToStderr = function _redirectOutputToStderr(){
+    // currently (1.0) the only way to suppress test pass/fail messages
+    //  (no way to re-route to log either - circular)
+    var spaceghost = this;
+    this.echo = function( msg ){
+        spaceghost.stderr( msg );
+    };
+
+    // clear the casper listener that outputs formatted error messages
+    this.removeListener( 'error', this.listeners( 'error' )[0] );
+    //this.removeListener( 'error', this.listeners( 'error' )[1] );
+};
+
+/** Outputs logs, test results and errors in a single JSON formatted object to the console.
+ */
+SpaceGhost.prototype.outputStateAsJson = function outputStateAsJson(){
+    var returnedJSON = {
+        logs        : this.result,
+        passes      : this.passes,
+        failures    : this.failures,
+        errors      : this.errors
+    };
+    // use phantomjs console since echo can't be used (suppressed - see init)
+    console.debug( JSON.stringify( returnedJSON, null, 2 ) );
+};
+
+
+// ------------------------------------------------------------------- event handling
+/** Sets up event handlers.
+ *  @private
+ */
+SpaceGhost.prototype._setUpEventHandlers = function _setUpEventHandlers(){
+    //console.debug( '_setUpEventHandlers' );
+    var spaceghost = this;
+
+    // ........................ page errors
+    this.on( 'page.error',  this._pageErrorHandler );
+    //this.on( 'load.failed', this._loadFailedHandler );
+
+    // ........................ page info/debugging
+    this.on( 'remote.alert',    this._alertHandler );
+    //this.on( 'remote.message',       function( msg ){ this.debug( 'remote: ' + msg ); });
+    //this.on( 'navigation.requested', function( url ){ this.debug( 'navigation: ' + url ); });
+
+    // ........................ timeouts
+    this._setUpTimeoutHandlers();
+
+    // ........................ test results
+    this.test.on( "fail", function( failure ){
+        spaceghost.failures.push( failure );
+    });
+    this.test.on( "success", function( pass ){
+        spaceghost.passes.push( pass );
+    });
+};
+
+//note: using non-anon fns to allow removal if needed
+
+/** 'load failed' Event handler for failed page loads that only records to the log
+ *  @private
+ */
+SpaceGhost.prototype._loadFailedHandler = function _loadFailedHandler( object ){
+    this.error( 'load.failed: ' + spaceghost.jsonStr( object ) );
+    //TODO: throw error?
+};
+
+/** 'page.error' Event handler that re-raises as PageError
+ *      NOTE: this has some special handling for DOM exc 12 which some casper selectors are throwing
+ *          (even tho the selector still works)
+ *  @throws {PageError} (with original's msg and backtrace)
+ *  @private
+ */
+SpaceGhost.prototype._pageErrorHandler = function _pageErrorHandler( msg, backtrace ){
+    // add a page error handler to catch page errors (what we're most interested with here)
+    //  normally, casper seems to let these pass unhandled
+
+    //TODO:!! lots of casper selectors are throwing this - even tho they still work
+    if( msg === 'SYNTAX_ERR: DOM Exception 12: An invalid or illegal string was specified.' ){
+        void( 0 ); // no op
+
+    //TODO: these are from requirejs erroring (potentially) when navigating away from a page still loading scripts
+    } else if( msg.indexOf( 'http://requirejs.org/docs/errors.html#scripterror' ) !== -1 ){
+        this.warn( 'requirejs script error:  ' + msg.replace( /\n/g, '  ' ) );
+
+    } else if( this.options.raisePageError ){
+        //console.debug( '(page) Error: ' + msg );
+        //this.bypassOnError = true;
+
+        // ugh - these bounce back and forth between here and phantom.page.onError
+        //  if we don't do this replace you end up with 'PageError: PageError: PageError: ...'
+        // I haven't found a great way to prevent the bouncing
+        msg = msg.replace( 'PageError: ', '' );
+        throw new PageError( msg, backtrace );
+    }
+};
+
+/** 'alert' Event handler that raises an AlertError with the alert message
+ *  @throws {AlertError} (the alert message)
+ *  @private
+ */
+SpaceGhost.prototype._alertHandler = function _alertHandler( message ){
+    //TODO: this still isn't working well...
+
+    // casper info level already has outputs these
+    //this.warning( this + '(page alert)\n"' + message + '"' );
+    var ALERT_MARKER = '(page alert) ';
+
+    // either throw an error or fail the test
+    //console.debug( 'this.options.errorOnAlert: ' + this.options.errorOnAlert );
+    this.stderr( 'this.options.failOnAlert: ' + this.options.failOnAlert );
+    if( this.options.errorOnAlert ){
+        throw new PageError( ALERT_MARKER + message );
+
+    } else if( this.options.failOnAlert ){
+        //this.test.fail( ALERT_MARKER + message );
+        //this.test.fail();
+        this.test.assert( false, 'found alert message' );
+        //this.stderr( 'this.options.failOnAlert: ' + this.options.failOnAlert );
+    }
+};
+
+/** 'error' Event handler that saves html from the errored page.
+ *  @private
+ */
+SpaceGhost.prototype._saveHtmlOnErrorHandler = function _saveHtmlOnErrorHandler( msg, backtrace ){
+    // needs to output to a file in GALAXY_SAVE
+    //this.debugHTML();
+};
+
+/** 'error' Event handler that saves text from the errored page.
+ *  @private
+ */
+SpaceGhost.prototype._saveTextOnErrorHandler = function _saveTextOnErrorHandler( msg, backtrace ){
+    // needs to output to a file in GALAXY_SAVE
+    //this.debugPage();
+};
+
+/** 'error' Event handler that saves a screenshot of the errored page.
+ *  @private
+ */
+SpaceGhost.prototype._saveScreenOnErrorHandler = function _saveScreenOnErrorHandler( msg, backtrace ){
+    // needs to output to a pic in GALAXY_SAVE
+    //var filename = ...??
+    //?? this.getCurrentUrl(), this.getCurrent
+    //this.capture( filename );
+};
+
+/** 'timeout' Event handler for step/casper timeouts - raises as PageError
+ *  @throws {PageError} Timeout occurred
+ *  @private
+ */
+SpaceGhost.prototype._timeoutHandler = function _timeoutHandler(){
+    //msg = msg.replace( 'PageError: ', '' );
+    throw new PageError( 'Timeout occurred' );
+};
+
+/** By default, Casper dies on timeouts - which kills our runner. Throw errors instead.
+ *  @private
+ */
+SpaceGhost.prototype._setUpTimeoutHandlers = function _setUpTimeoutHandlers(){
+    this.options.onStepTimeout = function _onStepTimeout( timeout, stepNum ){
+        throw new PageError( "Maximum step execution timeout exceeded for step " + stepNum );
+    };
+    this.options.onTimeout = function _onTimeout( timeout ){
+        throw new PageError( "Script timeout reached: " + timeout );
+    };
+    this.options.onWaitTimeout = function _onWaitTimeout( timeout ){
+        throw new PageError( "Wait timeout reached: " + timeout );
+    };
+};
+
+// ------------------------------------------------------------------- sub modules
+/** Load sub modules (similar to casperjs.test)
+ *  @requires User              modules/user.js
+ *  @requires Tools             modules/tools.js
+ *  @requires HistoryPanel      modules/historypanel.js
+ *  @requires HistoryOptions    modules/historyoptions.js
+ *  @private
+ */
+SpaceGhost.prototype._loadModules = function _loadModules(){
+    this.user           = require( './modules/user'  ).create( this );
+    this.tools          = require( './modules/tools' ).create( this );
+    this.historypanel   = require( './modules/historypanel' ).create( this );
+    this.historyoptions = require( './modules/historyoptions' ).create( this );
+    this.api            = require( './modules/api' ).create( this );
+};
+
+// =================================================================== PAGE CONTROL
+// ------------------------------------------------------------------- overrides
+/** An override of casper.start for additional set up.
+ *      (Currently only used to change viewport)
+ *  @see Casper#start
+ */
+SpaceGhost.prototype.start = function start(){
+    var returned = Casper.prototype.start.apply( this, arguments );
+    this.viewport( 1024, 728 );
+    return returned;
+};
+
+/** An override of casper.open for additional page control.
+ *      (Currently only used to change language headers)
+ *  @see Casper#open
+ */
+SpaceGhost.prototype.open = function open(){
+    //TODO: this can be moved to start (I think...?)
+    //!! override bc phantom has its lang as 'en-US,*' and galaxy doesn't handle the '*' well (server error)
+    this.page.customHeaders = { 'Accept-Language': 'en-US' };
+    return Casper.prototype.open.apply( this, arguments );
+};
+
+/** Send a signal that we're done - used by py wrapper subprocess.
+ *  @private
+ */
+SpaceGhost.prototype._sendStopSignal = function _sendStopSignal(){
+    this.echo( '# Stopping' );
+};
+
+/** An override to provide json output and more informative error codes.
+ *      Exits with 2 if a test has failed.
+ *      Exits with 1 if some error has occurred.
+ *      Exits with 0 if all tests passed.
+ */
+SpaceGhost.prototype.run = function run( onComplete, time ){
+    var oldFn = spaceghost.test.done;
+    spaceghost.test.done = function(){
+        oldFn.call( spaceghost.test );
+    }
+    var new_onComplete = function(){
+        onComplete.call( this );
+        //var returnCode = ( this.test.getFailures() )?( 2 ):( 0 );
+        var returnCode = ( this.failures.length )?( 2 ):( 0 );
+
+        // if --return-json is used: output json and exit
+        //NOTE: used by the test runner to gather JSON test info from stdout
+        if( this.options.returnJsonOnly ){
+            // echo a string to indicate that tests are complete (used in casperjs_runner.py to stop process)
+            this._sendStopSignal();
+            this.outputStateAsJson();
+            this.exit( returnCode );
+
+        // otherwise, render the nice casper output and exit
+        //} else {
+        //    this.test.renderResults( true, returnCode );
+        }
+    };
+    Casper.prototype.run.call( this, new_onComplete, time );
+    //Casper.prototype.run.call( this, onComplete, time );
+};
+
+// ------------------------------------------------------------------- home page
+/** Wait for the homepage/index/Analyze Data to load fully.
+ */
+SpaceGhost.prototype.openHomePage = function openHomePage( then, delay ){
+//TODO: delay doesn't seem to work
+    this.thenOpen( this.baseUrl, function _openHomePage(){
+        this.waitFor(
+            function waitForCheck(){
+                return this.homePageIsLoaded();
+            },
+            then,
+            function openHomePageTimeout(){
+                throw new GalaxyError( 'Homepage timed out - are you sure your instance is running?' );
+            },
+            delay
+        );
+    });
+    return this;
+};
+
+/** Check for visibility of main home page elements: masthead, tool menu, history panel.
+ */
+SpaceGhost.prototype.homePageIsLoaded = function homePageIsLoaded(){
+    //this.debug( 'homePageIsLoaded: ' + [
+    //    this.visible( '#masthead' ),
+    //    this.visible( this.data.selectors.toolMenu.container ),
+    //    this.visible( '#current-history-panel' )].join( ', ' ) );
+    return ( this.visible( '#masthead' )
+          && this.visible( this.data.selectors.toolMenu.container )
+          && this.visible( '#current-history-panel' ) );
+};
+
+// ------------------------------------------------------------------- try step
+/** Install a function as an error handler temporarily, run a function with steps, then remove the handler.
+ *      A rough stand-in for try catch with steps.
+ *      CatchFn will be passed error's msg and trace.
+ *  @param {Function} stepsFn   a function that puts casper steps on the stack (then, thenOpen, etc.)
+ *  @param {Function} catchFn   some portion of the correct error msg
+ */
+SpaceGhost.prototype.tryStepsCatch = function tryStepsCatch( stepsFn, catchFn ){
+    // create three steps: 1) set up new error handler, 2) try the fn, 3) check for errors and rem. handler
+    var originalExitOnError,
+        originalErrorHandlers = [],
+        errorCaught,
+        recordError = function( error ){
+            errorCaught = error;
+        };
+
+    // dont bail on the error (but preserve option), uninstall other handlers,
+    //  and install hndlr to simply record msg, trace
+    this.then( function replaceHandlers(){
+        originalExitOnError = this.options.exitOnError;
+        this.options.exitOnError = false;
+        originalErrorHandlers = this.popAllListeners( 'step.error' );
+        this.on( 'step.error', recordError );
+    });
+
+    // try the step...
+    this.then( stepsFn );
+    //TODO: this doesn't work well with wait for (see upload-tests.js)
+    //  possibly combine above and below?
+
+    this.then( function catchWrapper(){
+        // remove that listener either way, restore original handlers, and restore the bail option
+        this.removeListener( 'step.error', recordError );
+        this.addListeners( 'step.error', originalErrorHandlers );
+        this.options.exitOnError = originalExitOnError;
+        // ...and if an error was recorded call the catch with the info
+        if( errorCaught ){
+            catchFn.call( this, errorCaught );
+        }
+    });
+};
+
+
+// ------------------------------------------------------------------- misc
+/** Hover over an element.
+ *      NOTE: not for use with iframes (main, tool, history) - they need to re-calc
+ *      for the iframe bounds and should be implemented in their own modules
+ *  @param {String} selector        a css or xpath selector for an historyItemWrapper
+ *  @param {Function} whenHovering  a function to call after the hover (will be scoped to spaceghost)
+ */
+SpaceGhost.prototype.hoverOver = function hoverOver( selector, whenHovering ){
+    var elementInfo = this.getElementInfo( selector );
+    this.page.sendEvent( 'mousemove', elementInfo.x + 1, elementInfo.y + 1 );
+    if( whenHovering ){ whenHovering.call( this ); }
+    return this;
+};
+
+/** Wait for a navigation request then call a function.
+ *      NOTE: uses string indexOf - doesn't play well with urls like [ 'history', 'history/bler' ]
+ *  @param {String} urlToWaitFor    the url to wait for (rel. to spaceghost.baseUrl)
+ *  @param {Function} then          the function to call after the nav request
+ *  @param {Function} timeoutFn     the function to call on timeout (optional)
+ *  @param {Integer} waitMs         manual setting of ms to wait (optional)
+ */
+SpaceGhost.prototype.waitForNavigation = function waitForNavigation( urlToWaitFor, then, timeoutFn, waitMs ){
+    return this.waitForMultipleNavigation( [ urlToWaitFor ], then, timeoutFn, waitMs );
+};
+
+/** Wait for a multiple navigation requests then call a function.
+ *      NOTE: waitFor time is set to <number of urls> * options.waitTimeout
+ *      NOTE: uses string indexOf - doesn't play well with urls like [ 'history', 'history/bler' ]
+ *  @param {String[]} urlsToWaitFor the relative urls to wait for
+ *  @param {Function} then          the function to call after the nav request
+ *  @param {Function} timeoutFn     the function to call on timeout (optional)
+ *  @param {Integer} waitMs         manual setting of ms to wait (optional)
+ */
+SpaceGhost.prototype.waitForMultipleNavigation = function waitForMultipleNavigation( urlsToWaitFor,
+        then, timeoutFn, waitMs ){
+    waitMs = waitMs || ( this.options.waitTimeout * urlsToWaitFor.length );
+
+    this.info( 'waiting for navigation: ' + this.jsonStr( urlsToWaitFor ) + ', timeout after: ' + waitMs );
+    function urlMatches( urlToMatch, url ){
+        return ( url.indexOf( spaceghost.baseUrl + '/' + urlToMatch ) !== -1 );
+    }
+
+    function catchNavReq( url ){
+        this.debug( 'nav.req: ' + url );
+        for( var i=( urlsToWaitFor.length - 1 ); i>=0; i -= 1 ){
+            //this.debug( '\t checking: ' + urlsToWaitFor[i] );
+            if( urlMatches( urlsToWaitFor[i], url ) ){
+                this.info( 'Navigation (' + urlsToWaitFor[i] + ') found: ' + url );
+                urlsToWaitFor.splice( i, 1 );
+            }
+        }
+        //this.debug( 'urlsToWaitFor: ' + this.jsonStr( urlsToWaitFor ) );
+    }
+    this.on( 'navigation.requested', catchNavReq );
+
+    this.waitFor(
+        function checkForNav(){
+            if( urlsToWaitFor.length === 0 ){
+                this.removeListener( 'navigation.requested', catchNavReq );
+                return true;
+            }
+            return false;
+        },
+        function callThen(){
+            if( utils.isFunction( then ) ){ then.call( this ); }
+        },
+        function timeout(){
+            this.removeListener( 'navigation.requested', catchNavReq );
+            if( utils.isFunction( timeoutFn ) ){ timeoutFn.call( this, urlsToWaitFor ); }
+        },
+        waitMs
+    );
+    return this;
+};
+
+
+// ------------------------------------------------------------------- iframes, damnable iframes
+/** Version of Casper#withFrame for the main iframe.
+ *  @param {Function} then  function called when in the frame
+ */
+SpaceGhost.prototype.withMainPanel = function withMainPanel( then ){
+    return this.withFrame( this.data.selectors.frames.main, then );
+};
+
+/** Jumps into given frame, exectutes fn, and jumps back to original frame.
+ *      NOTE: this doesn't use steps like casper's withFrame but uses phantom's switchTo[Main]Frame,
+ *      so you can safely return values from fn
+ *  @param {Selector} frame the selector for the frame to jump to (use 'top' to jump to top frame)
+ *  @param {Function} fn    function called when in the frame
+ *  @returns {Any} the return value of fn
+ */
+SpaceGhost.prototype.jumpToFrame = function jumpToFrame( frame, fn ){
+    //TODO: plainly maintains that main frame has no frameName, namely: ''
+    var origFrameName = this.page.frameName || 'top';
+    //(??) if we're already there...
+    if( origFrameName === frame ){ return fn.call( this ); }
+
+    if( origFrameName ){
+        // if there's a frame name we assume we're in some child frame,
+        //  we need to move up before moving into the new frame
+        this.page.switchToMainFrame();
+    }
+    if( frame !== 'top' ){ this.page.switchToFrame( frame ); }
+    var returned = fn.call( this );
+
+    // move back into main, then into the orig child frame if given
+    if( frame !== 'top' ){ this.page.switchToMainFrame(); }
+    if( origFrameName ){
+        this.page.switchToFrame( origFrameName );
+    }
+    return returned;
+};
+
+/** Jumps into main frame, exectutes fn, and jumps back to original frame.
+ *  @param {Selector} frame the selector for the frame to jump to
+ *  @param {Function} fn    function called when in the frame
+ *  @returns {Any} the return value of fn
+ */
+SpaceGhost.prototype.jumpToMain = function jumpToMain( fn ){
+    return this.jumpToFrame( this.data.selectors.frames.main, fn );
+};
+
+/** Jumps into top frame, exectutes fn, and jumps back to original frame.
+ *  @param {Selector} frame the selector for the frame to jump to
+ *  @param {Function} fn    function called when in the frame
+ *  @returns {Any} the return value of fn
+ */
+SpaceGhost.prototype.jumpToTop = function jumpToTop( fn ){
+    return this.jumpToFrame( 'top', fn );
+};
+
+
+// =================================================================== TESTING
+//TODO: form fill doesn't work as casperjs would want it - often a button -> controller url
+//TODO: saveScreenshot (to GALAXY_TEST_SAVE)
+//TODO: saveHtml (to GALAXY_TEST_SAVE)
+
+/** Checks whether fn raises an error with a message that contains a given string.
+ *      NOTE: DOES NOT work with steps. @see SpaceGhost#assertStepsRaise
+ *  @param {Function} testFn        a function that may throw an error
+ *  @param {String} errMsgContains  some portion of the correct error msg
+ *  @private
+ */
+SpaceGhost.prototype._raises = function _raises( testFn, errMsgContains ){
+    var failed = false;
+    try {
+        testFn.call( this );
+    } catch( err ){
+        if( err.message.indexOf( errMsgContains ) !== -1 ){
+            failed = true;
+
+        // re-raise other, non-searched-for errors
+        } else {
+            throw err;
+        }
+    }
+    return failed;
+};
+
+/** Simple assert raises.
+ *      NOTE: DOES NOT work with steps. @see SpaceGhost#assertStepsRaise
+ *  @param {Function} testFn        a function that may throw an error
+ *  @param {String} errMsgContains  some portion of the correct error msg
+ *  @param {String} msg             assertion message to display
+ */
+SpaceGhost.prototype.assertRaises = function assertRaises( testFn, errMsgContains, msg ){
+    return this.test.assert( this._raises( testFn, errMsgContains ), msg  );
+};
+
+/** Simple assert does not raise.
+ *      NOTE: DOES NOT work with steps. @see SpaceGhost#assertStepsRaise
+ *  @param {Function} testFn        a function that may throw an error
+ *  @param {String} errMsgContains  some portion of the correct error msg
+ *  @param {String} msg             assertion message to display
+ */
+SpaceGhost.prototype.assertDoesntRaise = function assertDoesntRaise( testFn, errMsgContains, msg ){
+    return this.test.assert( !this._raises( testFn, errMsgContains ), msg  );
+};
+
+/** Casper has an (undocumented?) skip test feature. This is a conv. wrapper for that.
+ */
+SpaceGhost.prototype.skipTest = function skipTest( msg ){
+    this.warn( 'Skipping test. ' + msg );
+    //throw this.test.SKIP_MESSAGE;
+};
+
+/** Test helper - within frame, assert selector, and assert text in selector
+ *  @param {CasperJS selector} selector     what element in which to search for the text
+ *  @param {String} text                    what text to search for
+ *  @param {String} frame                   frame selector (gen. name) in which to search for selector (defaults to top)
+ */
+SpaceGhost.prototype.assertSelectorAndTextInFrame = function assertSelectorAndTextInFrame( selector, text, frame ){
+    var spaceghost = this;
+    function assertSelectorAndText( selector, text ){
+        spaceghost.test.assertExists( selector,
+            format( "found '%s' in %s", selector, frame ) );
+        spaceghost.test.assertSelectorHasText( selector, text,
+            format( "%s contains '%s'", selector, text ) );
+    }
+    if( frame ){
+        this.withFrame( frame, function(){
+            assertSelectorAndText( selector, text );
+        });
+    } else {
+        assertSelectorAndText( selector, text );
+    }
+};
+
+/** Test helper - assert selector exists, is visible, and has text
+ *  @param {CasperJS selector} selector     what element in which to search for the text
+ *  @param {String} text                    what text to search for (optional)
+ */
+SpaceGhost.prototype.assertVisibleWithText = function assertVisibleWithText( selector, text, msg ){
+    var visible = this.test.casper.visible( selector ),
+        hasText = this.test.casper.fetchText( selector ).indexOf( text ) !== -1;
+    this.test.assert( visible && hasText, msg );
+};
+
+/** Test helper - within frame, assert errormessage, and assert text in errormessage
+ *      *message is a common UI feedback motif in Galaxy (often displayed in the main panel)
+ *  @param {String} message     what the message should contain
+ *  @param {String} frame       frame selector (gen. name) in which to search for selector
+ *      (defaults to 'galaxy_main')
+ *  @param {CasperJS selector} messageSelector what element in which to search for the text
+ *      (defaults to '.errormessage')
+ */
+SpaceGhost.prototype.assertErrorMessage = function assertErrorMessage( message, frame, messageSelector ){
+    messageSelector = messageSelector || this.data.selectors.messages.error;
+    frame = frame || this.data.selectors.frames.main;
+    this.assertSelectorAndTextInFrame( messageSelector, message, frame );
+};
+
+/** Assert that stepsFn (which contains casper.then or some other casper step function) raises an error with
+ *      a msg that contains some text (msgContains).
+ *  @param {String} msgContains     some portion of the correct error msg
+ *  @param {Function} stepsFn       a function that puts casper steps on the stack (then, thenOpen, etc.)
+ */
+SpaceGhost.prototype.assertStepsRaise = function assertStepsRaise( msgContains, stepsFn, removeOtherListeners ){
+    // casper provides an assertRaises but this doesn't work well with steps
+    //TODO:  *  @param {Boolean} removeOtherListeners option to remove other listeners while this fires
+    var spaceghost = this;
+    function testTheError( errorCaught ){
+        if( errorCaught.message.indexOf( msgContains ) !== -1 ){
+            spaceghost.test.pass( 'Raised correct error: ' + errorCaught.message );
+        } else {
+            throw errorCaught;
+        }
+    }
+    this.tryStepsCatch( stepsFn, testTheError );
+};
+
+/** Assert that a function causes a navigation request with (at least partially) the given url.
+ *      NOTE: _should_ play well with steps (e.g. then, thenOpen, etc.)
+ *  @param {String} url                 some portion of the expected url for the nav request
+ *  @param {String} message             the assertion message
+ *  @param {Function} fnThatRequests    a function that causes a navigation request (e.g. click a link)
+ */
+SpaceGhost.prototype.assertNavigationRequested = function assertNavigationRequested( expectedUrl, message,
+                                                                                     fnThatRequests ){
+    var requested = false;
+    function captureNavReq( url, navigationType, navigationLocked, isMainFrame ){
+        this.debug( 'Checking navigation.requested for url: ' + expectedUrl );
+        // use || here to handle multiple requests, if any one url works -> test will pass
+        requested = requested || ( url.indexOf( expectedUrl ) !== -1 );
+    }
+    this.then( function(){
+        this.on( 'navigation.requested', captureNavReq );
+    });
+    this.then( function(){
+        fnThatRequests.call( this );
+    });
+    this.then( function(){
+        this.removeListener( 'navigation.requested', captureNavReq );
+        this.test.assert( requested, message );
+    });
+};
+
+/** Assert that a given string (toSearch) contains some given string (searchFor).
+ *  @param {String} toSearch    the string to search
+ *  @param {String} searchFor   the string to search for
+ *  @param {String} msg         assertion msg to display
+ */
+SpaceGhost.prototype.assertTextContains = function assertTextContains( toSearch, searchFor, msg ){
+    this.test.assert( toSearch.indexOf( searchFor ) !== -1, msg );
+};
+
+/** Assert that a given element has a given class.
+ *  @param {CasperJS selector} selector what element to test
+ *  @param {String} className  the class to test for (classes passed in with a leading '.' will have it trimmed)
+ */
+SpaceGhost.prototype.assertHasClass = function assertHasClass( selector, className, msg ){
+    className = ( className[0] === '.' )?( className.slice( 1 ) ):( className );
+    msg = msg || 'selector "' + selector + '" has class: "' + className + '"';
+    var classes = this.getElementAttribute( selector, 'class' );
+    this.test.assert( classes.indexOf( className ) !== -1, msg );
+};
+
+/** Assert that a given element doesn't have a given class.
+ *  @param {CasperJS selector} selector what element to test
+ *  @param {String} className  the class to test for (classes passed in with a leading '.' will have it trimmed)
+ */
+SpaceGhost.prototype.assertDoesntHaveClass = function assertDoesntHaveClass( selector, className, msg ){
+    className = ( className[0] === '.' )?( className.slice( 1 ) ):( className );
+    msg = msg || 'selector "' + selector + '" has class: "' + className + '"';
+    var classes = this.getElementAttribute( selector, 'class' );
+    this.test.assert( classes.indexOf( className ) === -1, msg );
+};
+
+/** Return true if object has all keys in keysArray (useful in API testing of return values).
+ *  @param {Object} object       the object to test
+ *  @param {String[]} keysArray  an array of expected keys
+ */
+SpaceGhost.prototype.hasKeys = function hasKeys( object, keysArray ){
+    if( !utils.isObject( object ) ){ return false; }
+    for( var i=0; i<keysArray.length; i += 1 ){
+        if( !object.hasOwnProperty( keysArray[i] ) ){
+            this.debug( 'missing key: ' + keysArray[i] );
+            return false;
+        }
+    }
+    return true;
+};
+
+/** Returns count of keys in object. */
+SpaceGhost.prototype.countKeys = function countKeys( object ){
+    if( !utils.isObject( object ) ){ return 0; }
+    var count = 0;
+    for( var key in object ){
+        if( object.hasOwnProperty( key ) ){ count += 1; }
+    }
+    return count;
+};
+
+
+// =================================================================== CONVENIENCE
+/** Wraps casper.getElementInfo in try, returning null if element not found instead of erroring.
+ *  @param {String} selector    css or xpath selector for the element to find
+ *  @returns {Object|null}      element info if found, null if not
+ */
+SpaceGhost.prototype.elementInfoOrNull = function elementInfoOrNull( selector ){
+    var found = null;
+    try {
+        found = this.getElementInfo( selector );
+    } catch( err ){}
+    return found;
+};
+
+/** Wraps casper.click in try to prevent error if element isn't found
+ *  @param {String} selector    css or xpath selector for the element to find
+ *  @returns {Boolean}          true if element found and clicked, false if not instead of erroring
+ */
+SpaceGhost.prototype.tryClick = function tryClick( selector ){
+    var done = false;
+    try {
+        found = this.click( selector );
+        done = true;
+    } catch( err ){}
+    return done;
+};
+
+// =================================================================== GALAXY CONVENIENCE
+
+
+// =================================================================== MISCELAIN
+/** Override echo to not print empty lines (only way to do log filtering)
+ *  @param {String} msg    the msg to output
+ *  @param {String} style  the casper style to use
+ */
+SpaceGhost.prototype.echo = function echo( msg, style ){
+    if( msg.trim() ){
+        Casper.prototype.echo.call( this, msg, style );
+    }
+};
+
+/** Override capture to save to environ: GALAXY_TEST_SAVE (or passed in from CLI)
+ *  @param {String} filename    the image filename
+ */
+SpaceGhost.prototype.capture = function capture( filename, clipRect_or_selector ){
+    //TODO: override with saved output dir
+    if( clipRect_or_selector && ( !utils.isClipRect( clipRect_or_selector ) ) ){
+        return this.captureSelector( filename, clipRect_or_selector );
+    }
+    return Casper.prototype.capture.apply( this, arguments );
+};
+
+/** Capture a progression of sshots with a delay inbetween.
+ *  @param {String} filepath
+ *  @param {String} filename
+ *  @param {String} ext
+ *  @param {Integer} count
+ *  @param {Integer} delay
+ */
+SpaceGhost.prototype.captureProgression = function captureProgression( filepath, filename, ext, count, delay ){
+    if( !count ){ return this; }
+    var spaceghost = this,
+        interval = setInterval( function(){
+            var imageName = filepath + filename + '.' + count + '.' + ext;
+            spaceghost.capture( imageName );
+            count -= 1;
+            if( count <= 0 ){ clearInterval( interval ); }
+        }, delay );
+    return this;
+};
+
+/** Pop all handlers for eventName from casper and return them in order.
+ *  @param {String} eventName   the name of the event from which to remove handlers
+ *  @returns {Function[]}       the array of functions no longer bound to the event
+ */
+SpaceGhost.prototype.popAllListeners = function popAllListeners( eventName ){
+    var returnedListeners = this.listeners( eventName );
+    this.removeAllListeners( eventName );
+    return returnedListeners;
+};
+
+/** Add the given list of handler functions to the listener for eventName in order.
+ *  @param {String} eventName   the name of the event to which to add handlers
+ *  @param {Function[]} handlerArray an array of event handler functions to add
+ */
+SpaceGhost.prototype.addListeners = function addListeners( eventName, handlerArray ){
+    for( var i=0; i<handlerArray.length; i++ ){
+        this.addListener( eventName, handlerArray[i] );
+    }
+};
+
+/** Send message to stderr using the phantom fs module.
+ *  @param {String} the msg to output
+ */
+SpaceGhost.prototype.stderr = function( msg ){
+    if( msg.trim() ){
+        system.stderr.writeLine( msg );
+    }
+};
+
+// ------------------------------------------------------------------- convenience logging funcs
+/** log using level = 'debug' and default namespace = 'spaceghost'
+ */
+SpaceGhost.prototype.debug = function( msg, namespace ){
+    namespace = namespace || 'spaceghost';
+    this.log( msg, 'debug', namespace );
+};
+
+/** log using level = 'info' and default namespace = 'spaceghost'
+ */
+SpaceGhost.prototype.info = function( msg, namespace ){
+    namespace = namespace || 'spaceghost';
+    this.log( msg, 'info', namespace );
+};
+
+/** log using level = 'warning' and default namespace = 'spaceghost'
+ */
+SpaceGhost.prototype.warning = function( msg, namespace ){
+    namespace = namespace || 'spaceghost';
+    this.log( msg, 'warning', namespace );
+};
+
+/** log using level = 'error' and default namespace = 'spaceghost'
+ */
+SpaceGhost.prototype.error = function( msg, namespace ){
+    namespace = namespace || 'spaceghost';
+    this.log( msg, 'error', namespace );
+};
+
+/** log despite logLevel settings, unless returnJsonOnly is set
+ */
+SpaceGhost.prototype.out = function( msg, namespace ){
+    namespace = namespace || 'spaceghost';
+    if( !this.options.returnJsonOnly ){
+        console.debug( msg );
+    }
+};
+
+// ------------------------------------------------------------------- debugging
+/** JSON formatter
+ */
+SpaceGhost.prototype.jsonStr = function( obj ){
+    return JSON.stringify( obj, null, 2 );
+};
+
+/** output the JSON of the selector (or null if not found) to debug level
+ */
+SpaceGhost.prototype.debugElement = function debugElement( selector ){
+    this.debug( selector + ':\n' + this.jsonStr( this.elementInfoOrNull( selector ) ) );
+};
+
+/** return a more limited version of a Casper ElementInfo object.
+ *  @params {Casper ElementInfo} info   the Casper ElementInfo object to simplify
+ *  @returns {Object} of the form { attributes: <attributes>, text: <text> }
+ */
+SpaceGhost.prototype.quickInfo = function quickInfo( info ){
+    return { attributes: info.attributes, text: info.text };
+};
+
+/** Debug SG itself
+ */
+SpaceGhost.prototype.debugMe = function(){
+    console.debug( 'options:\n' + this.jsonStr( this.options ) );
+    console.debug( 'cli:\n' + this.jsonStr( this.cli ) );
+};
+
+/** Get the last error on the stack.
+ *  @returns {Error} the last error
+ */
+SpaceGhost.prototype.lastError = function(){
+    return this.errors[( this.errors.length - 1 )];
+};
+
+// ------------------------------------------------------------------- file system
+/** Load and parse a JSON file into an object.
+ *  @param {String} filepath     filepath relative to the current script
+ *  @returns {Object} the object parsed
+ */
+SpaceGhost.prototype.loadJSONFile = function loadJSONFile( filepath ){
+    //precondition: filepath is relative to script dir
+    return JSON.parse( fs.read( filepath ) );
+};
+
+/** Write an object to a JSON file.
+ *  @param {String} filepath     filepath relative to the current script
+ *  @param {Object} object       the object to write
+ *  @param {String} mode         'w' for a new file, 'a' for append
+ */
+SpaceGhost.prototype.writeJSONFile = function writeJSONFile( filepath, object, mode ){
+    mode = mode || 'w';
+    //precondition: filepath is relative to script dir
+    return fs.write( filepath, this.jsonStr( object ), mode );
+};
+
+/** Save the HTML from the current page to file.
+ *  @param {String} filepath    filepath relative to the current script
+ *  @param {String} selector    A DOM CSS3/XPath selector (optional)
+ *  @param {Boolean} outer      Whether to fetch outer HTML contents (default: false)
+ */
+SpaceGhost.prototype.writeHTMLFile = function writeHTMLFile( filepath, selector, outer ){
+    return fs.write( filepath, this.getHTML( selector, outer ), 'w' );
+};
+
+/** Read and search a file for the given regex.
+ *  @param {String} filepath     filepath relative to the current script
+ *  @param {Regex} searchFor     regex to search for
+ *  @returns {Object} search results
+ */
+SpaceGhost.prototype.searchFile = function searchFile( filepath, regex ){
+    //precondition: filepath is relative to script dir
+    var read = fs.read( filepath );
+    return read.match( regex );
+};
+
+/** Read a configuration setting from the galaxy.ini file.
+ *  @param {String} iniKey     the setting key to find
+ *  @returns {String} value from file for iniKey (or null if not found or commented out)
+ */
+SpaceGhost.prototype.getUniverseSetting = function getUniverseSetting( iniKey ){
+    var iniFilepath = '../../config/galaxy.ini',
+        regex = new RegExp( '^([#]*)\\\s*' + iniKey + '\\\s*=\\\s*(.*)$', 'm' ),
+        match = this.searchFile( iniFilepath, regex );
+    this.debug( 'regex: ' + regex );
+    // if nothing found or found and first group (the ini comment char) is not empty
+    if( match === null || match[1] || !match[2] ){
+        return null;
+    }
+    return match[2];
+};
+
+SpaceGhost.prototype.waitForMasthead = function wait( then ) {
+    return this.waitForText( this.data.labels.masthead.menus.analyze, then );
+};
+
+
+// =================================================================== TEST DATA
+/** General use selectors, labels, and text. Kept here to allow a centralized location.
+ */
+SpaceGhost.prototype.data = {
+    selectors : {
+        tooltipBalloon          : '.tooltip',
+
+        editableText            : '.editable-text',
+
+        messages : {
+            all         : '[class*="message"]',
+            error       : '.errormessage',
+            done        : '.donemessage',
+            info        : '.infomessage',
+            donelarge   : '.donemessagelarge',
+            infolarge   : '.infomessagelarge'
+        },
+
+        frames : {
+            main    : 'galaxy_main'
+        },
+
+        masthead : {
+            id          : '#masthead',
+            adminLink   : '#masthead a[href="/admin/index"]',
+            userMenu    : {
+                userEmail_xpath : '//a[contains(text(),"Logged in as")]'
+            },
+            user        : '//ul[@id="user"]',
+        },
+        toolMenu : {
+            container   : '.toolMenuContainer'
+        },
+        historyPanel : {
+            current     : '#current-history-panel'
+        },
+
+        loginPage : {
+            form            : 'form#login',
+            submit_xpath    : "//input[@value='Login']",
+            url_regex       : /\/user\/login/
+        },
+        registrationPage : {
+            form            : 'form#registration',
+            submit_xpath    : "//input[@value='Submit']",
+            returnLink      : '//a[contains(text(),"Return to the home page")]'
+        },
+        tools : {
+            general : {
+                form : 'form#tool_form',
+                executeButton_xpath : '//input[@value="Execute"]'
+            },
+            upload : {
+                fileInput   : 'files_0|file_data'   // is this general?
+            }
+        }
+    },
+    labels : {
+        masthead : {
+            menus : {
+                analyze: 'Analyze Data',
+                workflow: 'Workflow',
+                libraries: 'Shared Data',
+                visualization: 'Visualization',
+                help: 'Help',
+                user : 'User',
+            },
+            userMenu : {
+                register    : 'Register',
+                login       : 'Login',
+                logout      : 'Logout'
+            }
+        },
+        tools : {
+            upload : {
+                panelLabel  : 'Upload File'
+            }
+        }
+    },
+    text : {
+        registrationPage : {
+            badEmailError   : 'Enter a real email address'
+        },
+        upload : {
+            success : 'Your upload has been queued'
+        },
+        tool : {
+            success : 'The following job has been successfully added to the queue'
+        }
+    }
+};
+
+
+// =================================================================== error types
+/** @class Represents a javascript error on the page casper is browsing
+ *      (as opposed to an error in the test script).
+ */
+function PageError(){
+    CasperError.apply( this, arguments );
+    this.name = "PageError";
+}
+//TODO: change to inheriting from Error
+PageError.prototype = new CasperError();
+PageError.prototype.constructor = CasperError;
+SpaceGhost.prototype.PageError = PageError;
+
+/** @class Thrown when Galaxy has (gracefully?) indicated pilot error. */
+function GalaxyError(){
+    CasperError.apply( this, arguments );
+    this.name = "GalaxyError";
+}
+GalaxyError.prototype = new CasperError();
+GalaxyError.prototype.constructor = CasperError;
+SpaceGhost.prototype.GalaxyError = GalaxyError;
+
+/** @class Thrown when Galaxy has displayed a javascript alert. */
+function AlertError(){
+    CasperError.apply( this, arguments );
+    this.name = "AlertError";
+}
+AlertError.prototype = new CasperError();
+AlertError.prototype.constructor = CasperError;
+SpaceGhost.prototype.AlertError = AlertError;
+
+exports.PageError   = PageError;
+exports.GalaxyError = GalaxyError;
+exports.AlertError  = AlertError;
diff --git a/test/casperjs/test-data/simple_test.ga b/test/casperjs/test-data/simple_test.ga
new file mode 100644
index 0000000..db468c5
--- /dev/null
+++ b/test/casperjs/test-data/simple_test.ga
@@ -0,0 +1,96 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "simple_test", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 200, 
+                "top": 231
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {
+                "input": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Select first", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 409, 
+                "top": 365
+            }, 
+            "post_job_actions": {
+                "HideDatasetActionout_file1": {
+                    "action_arguments": {}, 
+                    "action_type": "HideDatasetAction", 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "tool_errors": null, 
+            "tool_id": "Show beginning1", 
+            "tool_state": "{\"__page__\": 0, \"input\": \"null\", \"__rerun_remap_job_id__\": null, \"lineNum\": \"\\\"10\\\"\"}", 
+            "tool_version": "1.0.0", 
+            "type": "tool", 
+            "user_outputs": []
+        }, 
+        "2": {
+            "annotation": "", 
+            "id": 2, 
+            "input_connections": {
+                "input": {
+                    "id": 1, 
+                    "output_name": "out_file1"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Select random lines", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 609.5, 
+                "top": 234
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "random_lines1", 
+            "tool_state": "{\"input\": \"null\", \"seed_source\": \"{\\\"__current_case__\\\": 0, \\\"seed_source_selector\\\": \\\"no_seed\\\"}\", \"__rerun_remap_job_id__\": null, \"num_lines\": \"\\\"5\\\"\", \"__page__\": 0}", 
+            "tool_version": "2.0.1", 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }, 
+    "uuid": "675b4aa8-c885-46d3-b63a-3c52b3bec8c4"
+}
\ No newline at end of file
diff --git a/test/casperjs/upload-tests.js b/test/casperjs/upload-tests.js
new file mode 100644
index 0000000..a6fb8a5
--- /dev/null
+++ b/test/casperjs/upload-tests.js
@@ -0,0 +1,45 @@
+var require = patchRequire( require ),
+    spaceghost = require( 'spaceghost' ).fromCasper( casper ),
+    xpath = require( 'casper' ).selectXPath,
+    utils = require( 'utils' ),
+    format = utils.format;
+
+spaceghost.test.begin( 'Test uploading data to a history', 0, function suite( test ){
+    spaceghost.start();
+    // ===================================================================
+
+    var email = spaceghost.user.getRandomEmail(),
+        password = '123456';
+    if( spaceghost.fixtureData.testUser ){
+        email = spaceghost.fixtureData.testUser.email;
+        password = spaceghost.fixtureData.testUser.password;
+    }
+
+    // ------------------------------------------------------------------- start a new user
+    spaceghost.user.loginOrRegisterUser( email, password ).openHomePage( function(){
+        var loggedInAs = spaceghost.user.loggedInAs();
+        this.test.assert( loggedInAs === email, 'loggedInAs() matches email: "' + loggedInAs + '"' );
+    });
+
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: '../../test-data/1.sam' }, function( uploadedId, json ){
+        var currentHistoryId = this.api.histories.index()[0].id,
+            contents = this.api.hdas.index( currentHistoryId );
+        this.test.assert( contents.length === 1, 'found one hda in history' );
+        this.test.assert( contents[0].id === uploadedId, 'id matches' );
+        this.test.assert( contents[0].name === '1.sam', 'name matches' );
+    });
+    spaceghost.user.logout();
+
+    // ------------------------------------------------------------------- anon user
+    spaceghost.openHomePage();
+    spaceghost.api.tools.thenUploadToCurrent({ filepath: '../../test-data/1.bed' }, function( uploadedId, json ){
+        var currentHistoryId = this.api.histories.index()[0].id,
+            contents = this.api.hdas.index( currentHistoryId );
+        this.test.assert( contents.length === 1, 'found one hda in history' );
+        this.test.assert( contents[0].id === uploadedId, 'id matches' );
+        this.test.assert( contents[0].name === '1.bed', 'name matches' );
+    });
+
+    // ===================================================================
+    spaceghost.run( function(){ test.done(); });
+});
diff --git a/test/casperjs/utils/simple-galaxy.js b/test/casperjs/utils/simple-galaxy.js
new file mode 100755
index 0000000..7d459da
--- /dev/null
+++ b/test/casperjs/utils/simple-galaxy.js
@@ -0,0 +1,65 @@
+/* Utility to load a specific page and output html, page text, or a screenshot
+ *  Optionally wait for some time, text, or dom selector
+ */
+try {
+    //...if there's a better way - please let me know, universe
+    var scriptDir = require( 'system' ).args[3]
+            // remove the script filename
+            .replace( /[\w|\.|\-|_]*$/, '' )
+            // if given rel. path, prepend the curr dir
+            .replace( /^(?!\/)/, './' ),
+        spaceghost = require( scriptDir + 'spaceghost' ).create({
+            // script options here (can be overridden by CLI)
+            //verbose: true,
+            //logLevel: debug,
+            scriptDir: scriptDir
+        });
+
+
+} catch( error ){
+    console.debug( error );
+    phantom.exit( 1 );
+}
+
+spaceghost.start();
+
+// Use the --url to load a specific page (e.g. --url="http://localhost:8080/history")
+spaceghost.thenOpen( spaceghost.baseUrl, function(){
+
+    // options for waiting before output/screenshot:
+    //      --waitMs=<some number of Ms> --> wait some number of Ms before output
+    //      --waitForText=<some text> --> wait for some text to be rendered before output
+    //      --waitForSelector=<css selector> --> wait for some DOM to be rendered before output
+    if( 'waitMs' in spaceghost.cli.options ){
+        spaceghost.wait( spaceghost.cli.get( 'waitMs' ) );
+
+    } else if( 'waitForText' in spaceghost.cli.options ){
+        spaceghost.waitForText( spaceghost.cli.get( 'waitForText' ) );
+
+    } else if( 'waitForSelector' in spaceghost.cli.options ){
+        spaceghost.waitForSelector( spaceghost.cli.get( 'waitForSelector' ) );
+    }
+
+    // --capture=<myscreenshot.png> --> capture a screenshot of the page
+    // --html --> output the html of the page
+    // (if not --html) --> output the text of the page
+    spaceghost.then( function(){
+        if( 'capture' in spaceghost.cli.options ){
+            var sshotFilename = spaceghost.cli.get( 'capture' );
+            spaceghost.debug( 'screenshot stored at: ' + sshotFilename );
+            spaceghost.capture( sshotFilename );
+
+        } else {
+            if( spaceghost.cli.args.indexOf( 'html' ) != -1 ){
+                spaceghost.debugHTML();
+
+            } else {
+                spaceghost.debugPage();
+            }
+        }
+    });
+
+});
+
+spaceghost.run( function(){
+});
diff --git a/test/docker/README.md b/test/docker/README.md
new file mode 100644
index 0000000..ed9bd8c
--- /dev/null
+++ b/test/docker/README.md
@@ -0,0 +1,13 @@
+Setup Docker environment for use with testing using the following
+commands.
+
+    % cd test/base
+    % docker build -t galaxy/testing-base .
+
+Alternatively, this can be fetched from [Dockerhub](https://hub.docker.com/).
+
+    % docker pull galaxy/testing-base
+
+The resulting docker container is ready to run most of Galaxy tests
+(functional tests for specific bioinformatics tools may still fail for
+now until they are migrated out of Galaxy and to the tool shed).
diff --git a/test/docker/base/Dockerfile b/test/docker/base/Dockerfile
new file mode 100644
index 0000000..264fdac
--- /dev/null
+++ b/test/docker/base/Dockerfile
@@ -0,0 +1,111 @@
+FROM toolshed/requirements
+MAINTAINER John Chilton <jmchilton at gmail.com>
+
+ENV MYSQL_MAJOR=5.6
+ENV GALAXY_ROOT=/galaxy
+ENV GALAXY_VIRTUAL_ENV /galaxy_venv
+
+# Pre-install a bunch of packages to speed up ansible steps.
+RUN apt-get update -y && apt-get install -y software-properties-common curl && \
+    apt-add-repository -y ppa:ansible/ansible && \
+    curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - && \
+    echo 'deb https://deb.nodesource.com/node trusty main' > /etc/apt/sources.list.d/nodesource.list && \
+    apt-key adv --keyserver pool.sks-keyservers.net --recv-keys A4A9406876FCBD3C456770C88C718D3B5072E1F5 && \
+    echo "deb http://repo.mysql.com/apt/ubuntu/ trusty mysql-${MYSQL_MAJOR}" > /etc/apt/sources.list.d/mysql.list && \
+    { \
+		echo mysql-community-server mysql-community-server/data-dir select ''; \
+		echo mysql-community-server mysql-community-server/root-pass password ''; \
+		echo mysql-community-server mysql-community-server/re-root-pass password ''; \
+		echo mysql-community-server mysql-community-server/remove-test-db select false; \
+	} | debconf-set-selections && \
+    apt-get update -y && \
+    apt-get install -y libpq-dev postgresql postgresql-client postgresql-plpython-9.3 \
+            ansible postgresql-server-dev-9.3 wget mysql-server="${MYSQL_MAJOR}"* libmysqlclient-dev="${MYSQL_MAJOR}"* \
+            slurm-llnl libmunge-dev slurm-drmaa-dev ant atop axel bioperl cmake curl \
+            g++ gcc gfortran git-core htop iftop iotop ipython libffi-dev liblapack-dev \
+            libncurses5-dev libopenblas-dev libpam0g-dev libpq-dev libsparsehash-dev make \
+            mercurial nginx-extras nmon patch postgresql postgresql \
+            postgresql-client postgresql-plpython-9.3 python-boto python-dev \
+            python-prettytable python-psycopg2 python-virtualenv python-pip \
+            postgresql-server-dev-9.3 rsync slurm-drmaa-dev supervisor swig sysstat unzip \
+            wget zlib1g-dev nodejs && \
+    apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+   
+RUN npm install -g grunt-contrib-qunit grunt grunt-cli && \
+    cd /tmp && \
+    wget https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 && \
+    tar xf phantomjs-2.1.1-linux-x86_64.tar.bz2 && \
+    mv phantomjs-2.1.1-linux-x86_64/bin/phantomjs /usr/local/bin/phantomjs && \
+    mkdir -p /opt/casperjs /casperjs && wget https://github.com/n1k0/casperjs/tarball/master -O- | tar -xzf- --strip-components=1 -C /opt/casperjs && \
+    ln -sf /opt/casperjs/bin/casperjs /usr/local/bin/casperjs
+
+RUN mkdir -p /tmp/ansible && \
+    mkdir -p /opt/galaxy/db && \
+    chown -R postgres:postgres /opt/galaxy/db && \
+    sed -Ei 's/^(bind-address|log)/#&/' /etc/mysql/my.cnf
+
+ADD start_mysql.sh /opt/galaxy/start_mysql.sh
+ADD ansible_vars.yml /tmp/ansible/ansible_vars.yml
+ADD provision.yml /tmp/ansible/provision.yml
+
+
+RUN mkdir /etc/galaxy && cd /tmp/ansible && mkdir roles && \
+    mkdir roles/galaxyprojectdotorg.galaxy-os && \
+    wget --quiet -O- https://github.com/galaxyproject/ansible-galaxy-os/archive/master.tar.gz | tar -xzf- --strip-components=1 -C roles/galaxyprojectdotorg.galaxy-os && \
+    mkdir roles/galaxyprojectdotorg.cloudman-database && \
+    wget --quiet -O- https://github.com/galaxyproject/ansible-cloudman-database/archive/master.tar.gz | tar -xzf- --strip-components=1 -C roles/galaxyprojectdotorg.cloudman-database && \
+    mkdir roles/galaxyprojectdotorg.galaxy && \
+    wget --quiet -O- https://github.com/galaxyproject/ansible-galaxy/archive/master.tar.gz | tar -xzf- --strip-components=1 -C roles/galaxyprojectdotorg.galaxy && \
+    mkdir roles/galaxyprojectdotorg.galaxy-extras && \
+    wget --quiet -O- https://github.com/galaxyproject/ansible-galaxy-extras/archive/dynamic_uwsgi_config.tar.gz | tar -xzf- --strip-components=1 -C roles/galaxyprojectdotorg.galaxy-extras && \
+    mkdir roles/galaxyprojectdotorg.galaxy-toolshed && \
+    wget --quiet -O- https://github.com/galaxyproject/ansible-galaxy-toolshed/archive/master.tar.gz | tar -xzf- --strip-components=1 -C roles/galaxyprojectdotorg.galaxy-toolshed && \
+    ANSIBLE_FORCE_COLOR=1 PYTHONUNBUFFERED=1 ansible-playbook /tmp/ansible/provision.yml --tags=image -c local -e "@ansible_vars.yml" && \
+    ANSIBLE_FORCE_COLOR=1 PYTHONUNBUFFERED=1 ansible-playbook /tmp/ansible/provision.yml --tags=database -c local -e "@ansible_vars.yml" && \
+    ANSIBLE_FORCE_COLOR=1 PYTHONUNBUFFERED=1 ansible-playbook /tmp/ansible/provision.yml --tags=galaxy -c local -e "@ansible_vars.yml" && \
+    ANSIBLE_FORCE_COLOR=1 PYTHONUNBUFFERED=1 ansible-playbook /tmp/ansible/provision.yml --tags=toolshed -c local -e "@ansible_vars.yml" && \
+    apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+RUN cd $GALAXY_ROOT && \
+    ./scripts/common_startup.sh || { echo "common_startup.sh failed"; exit 1; } && \
+    dev_requirements=./lib/galaxy/dependencies/dev-requirements.txt && \
+    [ -f $dev_requirements ] && $GALAXY_VIRTUAL_ENV/bin/pip install -r $dev_requirements
+
+RUN . $GALAXY_VIRTUAL_ENV/bin/activate && \
+    pip install psycopg2 && \
+    pip install mysql && \
+    cd $GALAXY_ROOT && \
+    echo "Prepopulating postgres database" && \
+    su -c '/usr/lib/postgresql/9.3/bin/pg_ctl -o "-F" start -D /opt/galaxy/db' postgres && \
+    sleep 3 && \
+    GALAXY_CONFIG_DATABASE_CONNECTION="postgresql://root@localhost:5930/galaxy" bash create_db.sh && \
+    echo "Prepopulating sqlite database" && \
+    GALAXY_CONFIG_DATABASE_CONNECTION="sqlite:////opt/galaxy/galaxy.sqlite" bash create_db.sh && \
+    sh /opt/galaxy/start_mysql.sh && \
+    echo "Prepopulating mysql database" && \
+    GALAXY_CONFIG_DATABASE_CONNECTION="mysql://galaxy:galaxy@localhost/galaxy?unix_socket=/var/run/mysqld/mysqld.sock" bash create_db.sh && \
+    echo "Prepopulating toolshed postgres database" && \
+    su -c '/usr/lib/postgresql/9.3/bin/pg_ctl -o "-F" start -D /opt/galaxy/db' postgres && \
+    GALAXY_CONFIG_DATABASE_CONNECTION="postgresql://root@localhost:5930/toolshed" bash create_db.sh tool_shed && \
+    echo "Prepopulating toolshed sqlite database" && \
+    GALAXY_CONFIG_DATABASE_CONNECTION="sqlite:////opt/galaxy/toolshed.sqlite" bash create_db.sh tool_shed && \
+    sh /opt/galaxy/start_mysql.sh && \
+    echo "Prepopulating toolshed mysql database" && \
+    GALAXY_CONFIG_DATABASE_CONNECTION="mysql://galaxy:galaxy@localhost/toolshed?unix_socket=/var/run/mysqld/mysqld.sock" bash create_db.sh tool_shed
+
+# bcftools for Galaxy.
+RUN mkdir -p /tmp/install && \
+    cd /tmp/install && \
+    wget https://github.com/samtools/bcftools/releases/download/1.2/bcftools-1.2.tar.bz2 && \
+    tar xvjf bcftools-1.2.tar.bz2 && \
+    cd bcftools-1.2 && \
+    make && \
+    make install && \
+    cd && rm -rf /tmp/install
+
+ADD run_test_wrapper.sh /usr/local/bin/run_test_wrapper.sh
+
+EXPOSE :9009
+EXPOSE :8080
+EXPOSE :80
+ENTRYPOINT ["/bin/bash", "/usr/local/bin/run_test_wrapper.sh"]
diff --git a/test/docker/base/ansible_vars.yml b/test/docker/base/ansible_vars.yml
new file mode 100644
index 0000000..1db6a9f
--- /dev/null
+++ b/test/docker/base/ansible_vars.yml
@@ -0,0 +1,48 @@
+---
+add_system_users: no
+galaxyFS_base_dir: /opt/galaxy
+# TODO: use GALAXY_ROOT via cmd line instead
+galaxy_server_dir: "/galaxy"
+galaxy_venv_dir: "{{ galaxyFS_base_dir }}/.venv"
+galaxy_repo: https://bitbucket.org/galaxy/galaxy-central
+galaxy_changeset_id: stable
+galaxy_manage_clone: no
+galaxy_manage_download: yes
+galaxy_root: "{{ galaxy_server_dir}}"
+galaxy_db_dir: "{{ galaxyFS_base_dir }}/db"
+galaxy_db_port: 5930
+galaxy_db_log: /tmp/pSQL.log
+psql_galaxyftp_password: ignoredthis
+galaxy_config_file: "{{ galaxy_server_dir }}/config/galaxy.ini"
+configure_docker: no
+postgresql_bin_dir: /usr/lib/postgresql/9.3/bin
+galaxy_manage_database: no
+
+galaxy_user_name: "root"
+galaxy_extras_install_packages: true
+galaxy_job_conf_path: "/etc/galaxy/job_conf.xml"
+
+galaxy_web_processes: 1
+galaxy_handler_processes: 2
+galaxy_log_dir: "/root"
+
+supervisor_nodaemon: true
+supervisor_postgres_database_path: "{{ galaxy_db_dir }}"
+
+galaxy_extras_config_nginx: true
+galaxy_extras_config_proftpd: false
+galaxy_extras_config_slurm: true
+galaxy_extras_config_supervisor: true
+galaxy_extras_config_galaxy_root: false
+galaxy_extras_config_galaxy_job_metrics: false
+galaxy_extras_config_uwsgi: true
+
+manage_shed_db: yes
+galaxy_toolshed_manage_static_setup: yes
+galaxy_toolshed_manage_database: no
+galaxy_toolshed_server_dir: "/galaxy"
+galaxy_toolshed_venv_dir: "{{ galaxy_venv_dir }}"
+
+galaxy_toolshed_config_file: "{{ galaxy_toolshed_server_dir }}/config/tool_shed.ini"
+galaxy_toolshed_config_templates:
+- {dest: '{{ galaxy_toolshed_config_file }}', src: tool_shed.ini.j2}
diff --git a/test/docker/base/provision.yml b/test/docker/base/provision.yml
new file mode 100644
index 0000000..adbeb69
--- /dev/null
+++ b/test/docker/base/provision.yml
@@ -0,0 +1,16 @@
+- hosts: localhost
+  connection: local
+  roles:
+    - role: galaxyprojectdotorg.galaxy-os
+      tags: image
+      sudo: yes
+    - role: galaxyprojectdotorg.cloudman-database
+      sudo: yes
+      sudo_user: postgres
+      tags: database
+    - role: galaxyprojectdotorg.galaxy
+      sudo: yes
+      tags: galaxy
+    - role: galaxyprojectdotorg.galaxy-toolshed
+      sudo: yes
+      tags: toolshed
\ No newline at end of file
diff --git a/test/docker/base/run_test_wrapper.sh b/test/docker/base/run_test_wrapper.sh
new file mode 100644
index 0000000..ad702b9
--- /dev/null
+++ b/test/docker/base/run_test_wrapper.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+set -e
+
+GALAXY_TEST_DATABASE_TYPE=${GALAXY_TEST_DATABASE_TYPE:-"postgres"}
+if [ "$GALAXY_TEST_DATABASE_TYPE" = "postgres" ];
+then
+    su -c '/usr/lib/postgresql/9.3/bin/pg_ctl -o "-F" start -D /opt/galaxy/db' postgres
+    sleep 3
+    GALAXY_TEST_INSTALL_DB_MERGED="true"
+    GALAXY_TEST_DBURI="postgres://root@localhost:5930/galaxy?client_encoding=utf8"
+    TOOL_SHED_TEST_DBURI="postgres://root@localhost:5930/toolshed?client_encoding=utf8"
+elif [ "$GALAXY_TEST_DATABASE_TYPE" = "mysql" ];
+then
+    sh /opt/galaxy/start_mysql.sh
+    GALAXY_TEST_INSTALL_DB_MERGED="true"
+    GALAXY_TEST_DBURI="mysql://galaxy:galaxy@localhost/galaxy?unix_socket=/var/run/mysqld/mysqld.sock"
+    TOOL_SHED_TEST_DBURI="mysql://galaxy:galaxy@localhost/toolshed?unix_socket=/var/run/mysqld/mysqld.sock"
+elif [ "$GALAXY_TEST_DATABASE_TYPE" = "sqlite" ];
+then
+    GALAXY_TEST_INSTALL_DB_MERGED="true"
+    GALAXY_TEST_DBURI="sqlite:////opt/galaxy/galaxy.sqlite"
+    TOOL_SHED_TEST_DBURI="sqlite:////opt/galaxy/toolshed.sqlite"
+else
+	echo "Unknown database type"
+	exit 1
+fi
+export GALAXY_TEST_DBURI
+export TOOL_SHED_TEST_DBURI
+export GALAXY_TEST_INSTALL_DB_MERGED
+
+cd /galaxy
+GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION="$GALAXY_TEST_DBURI";
+export GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION
+
+: ${GALAXY_VIRTUAL_ENV:=.venv}
+
+./scripts/common_startup.sh || { echo "common_startup.sh failed"; exit 1; }
+
+dev_requirements=./lib/galaxy/dependencies/dev-requirements.txt
+[ -f $dev_requirements ] && $GALAXY_VIRTUAL_ENV/bin/pip install -r $dev_requirements
+
+sh manage_db.sh upgrade
+sh manage_db.sh upgrade tool_shed
+
+if [ -z "$GALAXY_NO_TESTS" ];
+then
+    sh run_tests.sh --skip-common-startup $@
+else
+    GALAXY_CONFIG_MASTER_API_KEY=${GALAXY_CONFIG_MASTER_API_KEY:-"testmasterapikey"}
+    GALAXY_CONFIG_FILE=${GALAXY_CONFIG_FILE:-config/galaxy.ini.sample}
+    TOOL_SHED_CONFIG_FILE=${GALAXY_CONFIG_FILE:-config/tool_shed.ini.sample}
+    GALAXY_CONFIG_CHECK_MIGRATE_TOOLS=false
+    if [ -z "$GALAXY_MULTI_PROCESS" ];
+    then
+        GALAXY_CONFIG_JOB_CONFIG_FILE=${GALAXY_CONFIG_JOB_CONFIG_FILE:-config/job_conf.xml.sample}
+    else
+        GALAXY_CONFIG_JOB_CONFIG_FILE=/etc/galaxy/job_conf.xml
+    fi
+    GALAXY_CONFIG_FILE_PATH=${GALAXY_CONFIG_FILE_PATH:-/tmp/gx1}
+    GALAXY_CONFIG_NEW_FILE_PATH=${GALAXY_CONFIG_NEW_FILE_PATH:-/tmp/gxtmp}
+
+    export GALAXY_CONFIG_MASTER_API_KEY
+    export GALAXY_CONFIG_FILE
+    export TOOL_SHED_CONFIG_FILE
+    export GALAXY_CONFIG_CHECK_MIGRATE_TOOLS
+    export GALAXY_CONFIG_JOB_CONFIG_FILE
+    export GALAXY_CONFIG_FILE_PATH
+    export GALAXY_CONFIG_NEW_FILE_PATH
+
+    if [ -z "$GALAXY_MULTI_PROCESS" ];
+    then
+        sh run.sh $@
+    else
+        /usr/bin/supervisord
+    fi
+fi
diff --git a/test/docker/base/start_mysql.sh b/test/docker/base/start_mysql.sh
new file mode 100644
index 0000000..cd378a0
--- /dev/null
+++ b/test/docker/base/start_mysql.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+set -e
+
+MYSQL_USER=galaxy
+MYSQL_PASSWORD=galaxy
+MYSQL_DATABASE=galaxy
+SHED_MYSQL_DATABASE=toolshed
+
+mkdir -p /var/lib/mysql
+chown -R mysql:mysql /var/lib/mysql
+
+# Derived from
+# https://github.com/docker-library/mysql/blob/master/5.7/docker-entrypoint.sh
+
+DATADIR="$(mysqld --verbose --help 2>/dev/null | awk '$1 == "datadir" { print $2; exit }')"
+
+tempSqlFile='/tmp/mysql-first-time.sql'
+cat > "$tempSqlFile" <<-EOSQL
+	DELETE FROM mysql.user ;
+	CREATE USER 'root'@'%' IDENTIFIED BY '${MYSQL_ROOT_PASSWORD}' ;
+	GRANT ALL ON *.* TO 'root'@'%' WITH GRANT OPTION ;
+	DROP DATABASE IF EXISTS test ;
+EOSQL
+
+if [ "$MYSQL_DATABASE" ]; then
+    echo "CREATE DATABASE IF NOT EXISTS \`$MYSQL_DATABASE\` ;" >> "$tempSqlFile"
+fi
+
+if [ "$SHED_MYSQL_DATABASE" ]; then
+    echo "CREATE DATABASE IF NOT EXISTS \`$SHED_MYSQL_DATABASE\` ;" >> "$tempSqlFile"
+fi
+
+if [ "$MYSQL_USER" -a "$MYSQL_PASSWORD" ]; then
+    echo "CREATE USER '$MYSQL_USER'@'%' IDENTIFIED BY '$MYSQL_PASSWORD' ;" >> "$tempSqlFile"
+
+    if [ "$MYSQL_DATABASE" ]; then
+        echo "GRANT ALL ON \`$MYSQL_DATABASE\`.* TO '$MYSQL_USER'@'%' ;" >> "$tempSqlFile"
+    fi
+    if [ "$SHED_MYSQL_DATABASE" ]; then
+        echo "GRANT ALL ON \`$SHED_MYSQL_DATABASE\`.* TO '$MYSQL_USER'@'%' ;" >> "$tempSqlFile"
+    fi
+fi
+
+echo 'FLUSH PRIVILEGES ;' >> "$tempSqlFile"
+
+mysqld_safe --init-file="$tempSqlFile" &
+sleep 5
\ No newline at end of file
diff --git a/test/functional/__init__.py b/test/functional/__init__.py
new file mode 100644
index 0000000..e9f39aa
--- /dev/null
+++ b/test/functional/__init__.py
@@ -0,0 +1,3 @@
+"""
+Functional Tests
+"""
diff --git a/test/functional/database_contexts.py b/test/functional/database_contexts.py
new file mode 100644
index 0000000..502f303
--- /dev/null
+++ b/test/functional/database_contexts.py
@@ -0,0 +1,6 @@
+# Global variables to pass database contexts around. Fairly hackish that they
+# are shared this way, but at least they have been moved out of Galaxy's lib/
+# code base.
+galaxy_context = None
+tool_shed_context = None
+install_context = None
diff --git a/test/functional/test_data_managers.py b/test/functional/test_data_managers.py
new file mode 100644
index 0000000..b75925e
--- /dev/null
+++ b/test/functional/test_data_managers.py
@@ -0,0 +1,112 @@
+import logging
+import new
+import os.path
+import shutil
+import tempfile
+
+from base.interactor import stage_data_in_history
+
+from .test_toolbox import ToolTestCase
+
+log = logging.getLogger( __name__ )
+data_managers = None
+
+
+class DataManagerToolTestCase( ToolTestCase ):
+    """Test case that runs Data Manager tests based on a `galaxy.tools.test.ToolTest`"""
+
+    def do_it( self, testdef ):
+        """
+        Run through a tool test case.
+        """
+        shed_tool_id = self.shed_tool_id
+
+        self._handle_test_def_errors( testdef )
+
+        galaxy_interactor = self._galaxy_interactor( testdef )
+
+        test_history = galaxy_interactor.new_history()  # history where inputs will be put, if any
+
+        stage_data_in_history( galaxy_interactor, testdef.test_data(), test_history, shed_tool_id )
+
+        galaxy_interactor.run_tool( testdef, test_history )  # test_history will have inputs only, outputs are placed in the specialized data manager history
+
+        # FIXME: Move history determination and switching into the interactor
+        data_manager_history = None
+        for assoc in reversed( test_history.user.data_manager_histories ):
+            if not assoc.history.deleted:
+                data_manager_history = assoc.history
+                break
+        self.switch_history( id=self.security.encode_id( data_manager_history.id ) )
+        data_list = self.get_history_as_data_list()
+        # end
+
+        self.assertTrue( data_list )
+
+        self._verify_outputs( testdef, data_manager_history, shed_tool_id, data_list, galaxy_interactor )
+
+        self.switch_history( id=self.security.encode_id( test_history.id ) )
+
+        galaxy_interactor.delete_history( test_history )
+
+
+def build_tests( tmp_dir=None, testing_shed_tools=False, master_api_key=None, user_api_key=None ):
+    """
+    If the module level variable `data_managers` is set, generate `DataManagerToolTestCase`
+    classes for all of its tests and put them into this modules globals() so
+    they can be discovered by nose.
+    """
+
+    if data_managers is None:
+        log.warning( 'data_managers was not set for Data Manager functional testing. Will not test.' )
+        return
+
+    # Push all the data_managers tests to module level
+    G = globals()
+
+    # Eliminate all previous tests from G.
+    for key, val in G.items():
+        if key.startswith( 'TestForDataManagerTool_' ):
+            del G[ key ]
+
+    # first we will loop through data table loc files and copy them to temporary location, then swap out filenames:
+    for data_table_name, data_table in data_managers.app.tool_data_tables.get_tables().items():
+        for filename, value in list( data_table.filenames.items() ):
+            new_filename = tempfile.NamedTemporaryFile( prefix=os.path.basename( filename ), dir=tmp_dir ).name
+            try:
+                shutil.copy( filename, new_filename )
+            except IOError as e:
+                log.warning( "Failed to copy '%s' to '%s', will create empty file at '%s': %s", filename, new_filename, new_filename, e )
+                open( new_filename, 'wb' ).close()
+            if 'filename' in value:
+                value[ 'filename' ] = new_filename
+            del data_table.filenames[ filename ]  # remove filename:value pair
+            data_table.filenames[ new_filename ] = value  # add new value by
+
+    for i, ( data_manager_id, data_manager ) in enumerate( data_managers.data_managers.items() ):
+        tool = data_manager.tool
+        if not tool:
+            log.warning( "No Tool has been specified for Data Manager: %s", data_manager_id )
+        if tool.tests:
+            # fixme data_manager.tool_shed_repository_info_dict should be filled when is toolshed based
+            shed_tool_id = None if not testing_shed_tools else tool.id
+            # Create a new subclass of ToolTestCase, dynamically adding methods
+            # named test_tool_XXX that run each test defined in the tool config.
+            name = "TestForDataManagerTool_" + data_manager_id.replace( ' ', '_' )
+            baseclasses = ( DataManagerToolTestCase, )
+            namespace = dict()
+            for j, testdef in enumerate( tool.tests ):
+                def make_test_method( td ):
+                    def test_tool( self ):
+                        self.do_it( td )
+                    return test_tool
+                test_method = make_test_method( testdef )
+                test_method.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
+                namespace[ 'test_tool_%06d' % j ] = test_method
+                namespace[ 'shed_tool_id' ] = shed_tool_id
+                namespace[ 'master_api_key' ] = master_api_key
+                namespace[ 'user_api_key' ] = user_api_key
+            # The new.classobj function returns a new class object, with name name, derived
+            # from baseclasses (which should be a tuple of classes) and with namespace dict.
+            new_class_obj = new.classobj( name, baseclasses, namespace )
+            G[ name ] = new_class_obj
diff --git a/test/functional/test_library_templates.py b/test/functional/test_library_templates.py
new file mode 100644
index 0000000..7986fcf
--- /dev/null
+++ b/test/functional/test_library_templates.py
@@ -0,0 +1,737 @@
+import galaxy.model
+from base.test_db_util import (
+    get_folder,
+    get_form,
+    get_latest_hda,
+    get_latest_ldda_by_name,
+    get_library,
+    get_private_role,
+    get_user,
+    get_user_address,
+    mark_obj_deleted,
+    refresh
+)
+from base.twilltestcase import TwillTestCase
+
+AddressField_form = None
+CheckboxField_form = None
+SelectField_form = None
+TextArea_form = None
+TextField_form = None
+WorkflowField_form = None
+address_field_name = checkbox_field_name = select_field_name = None
+workflow_field_name = textfield_name = textarea_name = None
+user_address1 = user_address2 = None
+ldda1 = library1 = library2 = library3 = library4 = library5 = library6 = None
+folder1 = folder2 = folder3 = folder4 = folder5 = folder6 = None
+admin_user = None
+regular_user1 = regular_user2 = regular_user3 = None
+
+
+class TestLibraryFeatures( TwillTestCase ):
+
+    def test_000_initiate_users( self ):
+        """Ensuring all required user accounts exist"""
+        self.login( email='test1 at bx.psu.edu', username='regular-user1' )
+        global regular_user1
+        regular_user1 = get_user( 'test1 at bx.psu.edu' )
+        assert regular_user1 is not None, 'Problem retrieving user with email "test1 at bx.psu.edu" from the database'
+        global regular_user1_private_role
+        regular_user1_private_role = get_private_role( regular_user1 )
+        self.login( email='test2 at bx.psu.edu', username='regular-user2' )
+        global regular_user2
+        regular_user2 = get_user( 'test2 at bx.psu.edu' )
+        assert regular_user2 is not None, 'Problem retrieving user with email "test2 at bx.psu.edu" from the database'
+        global regular_user2_private_role
+        regular_user2_private_role = get_private_role( regular_user2 )
+        self.login( email='test3 at bx.psu.edu', username='regular-user3' )
+        global regular_user3
+        regular_user3 = get_user( 'test3 at bx.psu.edu' )
+        assert regular_user3 is not None, 'Problem retrieving user with email "test3 at bx.psu.edu" from the database'
+        global regular_user3_private_role
+        regular_user3_private_role = get_private_role( regular_user3 )
+        self.login( email='test at bx.psu.edu', username='admin-user' )
+        global admin_user
+        admin_user = get_user( 'test at bx.psu.edu' )
+        assert admin_user is not None, 'Problem retrieving user with email "test at bx.psu.edu" from the database'
+        global admin_user_private_role
+        admin_user_private_role = get_private_role( admin_user )
+
+    def test_005_create_library_templates( self ):
+        """Testing creating several LibraryInformationTemplate form definitions"""
+        # Logged in as admin_user
+        for type in [ 'AddressField', 'CheckboxField', 'SelectField', 'TextArea', 'TextField', 'WorkflowField' ]:
+            field_name = type.lower()
+            form_desc = '%s description' % type
+            num_options = 0
+            if type == 'SelectField':
+                # Pass number of options we want in our SelectField
+                num_options = 2
+            # Create form for library template
+            strings_displayed_after_submit = [ "The form '%s' has been updated with the changes." % type ]
+            self.create_form( name=type,
+                              description=form_desc,
+                              form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                              field_type=type,
+                              num_options=num_options,
+                              field_name=field_name,
+                              strings_displayed_after_submit=strings_displayed_after_submit )
+        # Get all of the new form definitions for later use
+        global AddressField_form
+        AddressField_form = get_form( 'AddressField' )
+        # NOTE: each of these names need to be the same as field_name defined above
+        # for each type.
+        global address_field_name
+        address_field_name = 'addressfield'
+
+        global CheckboxField_form
+        CheckboxField_form = get_form( 'CheckboxField' )
+        global checkbox_field_name
+        checkbox_field_name = 'checkboxfield'
+
+        global SelectField_form
+        SelectField_form = get_form( 'SelectField' )
+        global select_field_name
+        select_field_name = 'selectfield'
+
+        global TextArea_form
+        TextArea_form = get_form( 'TextArea' )
+        global textarea_name
+        textarea_name = 'textarea'
+
+        global TextField_form
+        TextField_form = get_form( 'TextField' )
+        global textfield_name
+        textfield_name = 'textfield'
+
+        global WorkflowField_form
+        WorkflowField_form = get_form( 'WorkflowField' )
+        global workflow_field_name
+        workflow_field_name = 'workflowfield'
+
+    def test_010_create_libraries( self ):
+        """Testing creating a new library for each template"""
+        # Logged in as admin_user
+        # library1 -> AddressField
+        # library2 -> CheckboxField
+        # library3 -> SelectField
+        # library4 -> TextArea
+        # library5 -> TextField
+        # library6 -> WorkflowField
+        for index, form in enumerate( [ AddressField_form, CheckboxField_form, SelectField_form, TextArea_form, TextField_form, WorkflowField_form ] ):
+            name = 'library%s' % str( index + 1 )
+            description = '%s description' % name
+            synopsis = '%s synopsis' % name
+            self.create_library( name=name, description=description, synopsis=synopsis )
+        # Get the libraries for later use
+        global library1
+        library1 = get_library( 'library1', 'library1 description', 'library1 synopsis' )
+        global library2
+        library2 = get_library( 'library2', 'library2 description', 'library2 synopsis' )
+        global library3
+        library3 = get_library( 'library3', 'library3 description', 'library3 synopsis' )
+        global library4
+        library4 = get_library( 'library4', 'library4 description', 'library4 synopsis' )
+        global library5
+        library5 = get_library( 'library5', 'library5 description', 'library5 synopsis' )
+        global library6
+        library6 = get_library( 'library6', 'library6 description', 'library6 synopsis' )
+
+    def test_015_add_template_to_library1( self ):
+        """Testing add an inheritable template containing an AddressField to library1"""
+        # Logged in as admin_user
+        # Add a template containing an AddressField to library1
+        self.add_template( cntrller='library_admin',
+                           item_type='library',
+                           form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                           form_id=self.security.encode_id( AddressField_form.id ),
+                           form_name=AddressField_form.name,
+                           library_id=self.security.encode_id( library1.id ) )
+
+    def test_020_add_folder_to_library1( self ):
+        """Testing adding a folder to library1"""
+        # Logged in as admin_user
+        # Add a folder to library1
+        folder = library1.root_folder
+        name = "folder"
+        description = "folder description"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library1.id ),
+                         self.security.encode_id( folder.id ),
+                         name=name,
+                         description=description )
+        global folder1
+        folder1 = get_folder( folder.id, name, description )
+
+    def test_025_check_library1( self ):
+        """Checking library1 and its root folder"""
+        # Logged in as admin_user
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library1.id ),
+                             strings_displayed=[ folder1.name, folder1.description ] )
+        # Make sure the template and contents were inherited to folder1
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder1.id ),
+                          library_id=self.security.encode_id( library1.id ),
+                          template_refresh_field_name=address_field_name,
+                          strings_displayed=[ AddressField_form.name,
+                                              'This is an inherited template and is not required to be used with this folder' ] )
+
+    def test_030_add_dataset_to_folder1( self ):
+        """Testing adding a ldda1 to folder1, and adding a new UserAddress on the upload form."""
+        # Logged in as admin_user
+        # The AddressField template should be inherited to the library dataset upload form.  Passing
+        # the value 'new' should submit the form via refresh_on_change and allow new UserAddress information
+        # to be posted as part of the upload.
+        filename = '1.bed'
+        ldda_message = '1.bed message'
+        short_desc = 'Office'
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library1.id ),
+                                     folder_id=self.security.encode_id( folder1.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     template_refresh_field_name=address_field_name,
+                                     ldda_message=ldda_message,
+                                     template_refresh_field_contents='new',
+                                     template_fields=[ ( '%s_short_desc' % address_field_name, short_desc ),
+                                                       ( '%s_name' % address_field_name, 'Dick' ),
+                                                       ( '%s_institution' % address_field_name, 'PSU' ),
+                                                       ( '%s_address' % address_field_name, '32 O Street' ),
+                                                       ( '%s_city' % address_field_name, 'Anywhere' ),
+                                                       ( '%s_state' % address_field_name, 'AK' ),
+                                                       ( '%s_postal_code' % address_field_name, '0000000' ),
+                                                       ( '%s_country' % address_field_name, 'USA' ) ],
+                                     strings_displayed=[ 'Upload files' ] )
+        global user_address1
+        user_address1 = get_user_address( admin_user, short_desc )
+        assert user_address1 is not None, 'Problem retrieving user_address1 from the database'
+        global ldda1
+        ldda1 = get_latest_ldda_by_name( filename )
+        assert ldda1 is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda1 from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library1.id ),
+                             strings_displayed=[ ldda1.name, ldda1.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library1.id ),
+                             self.security.encode_id( folder1.id ),
+                             self.security.encode_id( ldda1.id ),
+                             ldda1.name,
+                             strings_displayed=[ 'Dick' ] )
+
+    def test_035_edit_contents_of_ldda1_tempplate( self ):
+        """Testing editing the contents of ldda1 AddressField template by adding a new user_address"""
+        short_desc = 'Home'
+        # Now add a new user_address to ldda1
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library1.id ),
+                             self.security.encode_id( folder1.id ),
+                             self.security.encode_id( ldda1.id ),
+                             ldda1.name,
+                             template_refresh_field_name=address_field_name,
+                             template_refresh_field_contents='new',
+                             template_fields=[ ( '%s_short_desc' % address_field_name, short_desc ),
+                                               ( '%s_name' % address_field_name, 'Richard' ),
+                                               ( '%s_institution' % address_field_name, 'PSU' ),
+                                               ( '%s_address' % address_field_name, '32 O Street' ),
+                                               ( '%s_city' % address_field_name, 'Anywhere' ),
+                                               ( '%s_state' % address_field_name, 'AK' ),
+                                               ( '%s_postal_code' % address_field_name, '0000000' ),
+                                               ( '%s_country' % address_field_name, 'USA' ) ],
+                             strings_displayed=[ short_desc ] )
+        global user_address2
+        user_address2 = get_user_address( admin_user, short_desc )
+        assert user_address2 is not None, 'Problem retrieving user_address2 from the database'
+
+    def test_040_edit_contents_of_folder1_template( self ):
+        """Testing editing the contents of folder1 AddressField template"""
+        # Make sure the template and contents were inherited to folder1
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder1.id ),
+                          library_id=self.security.encode_id( library1.id ),
+                          template_refresh_field_name=address_field_name,
+                          template_refresh_field_contents=str( user_address2.id ),
+                          strings_displayed=[ AddressField_form.name,
+                                              'This is an inherited template and is not required to be used with this folder' ],
+                          strings_displayed_after_submit=[ 'Richard' ] )
+
+    def test_045_add_dataset_to_folder1( self ):
+        """Testing adding another ldda to folder1"""
+        # The upload form should now inherit user_address2 on the upload form
+        filename = '2.bed'
+        ldda_message = '2.bed message'
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library1.id ),
+                                     folder_id=self.security.encode_id( folder1.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     template_refresh_field_name=address_field_name,
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'Upload files' ] )
+        # Make sure user_address2 is associated with ldda.
+        self.ldda_edit_info( cntrller='library_admin',
+                             library_id=self.security.encode_id( library1.id ),
+                             folder_id=self.security.encode_id( folder1.id ),
+                             ldda_id=self.security.encode_id( ldda1.id ),
+                             ldda_name=ldda1.name,
+                             template_refresh_field_name=address_field_name,
+                             strings_displayed=[ user_address2.desc ] )
+
+    def test_050_add_template_to_library2( self ):
+        """ Testing add an inheritable template containing an CheckboxField to library2"""
+        # Add a template containing an CheckboxField to library1
+        self.add_template( cntrller='library_admin',
+                           item_type='library',
+                           form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                           form_id=self.security.encode_id( CheckboxField_form.id ),
+                           form_name=CheckboxField_form.name,
+                           library_id=self.security.encode_id( library2.id ) )
+        # Check the CheckboxField to make sure the template contents are inherited
+        self.library_info( 'library_admin',
+                           self.security.encode_id( library2.id ),
+                           template_fields=[ ( checkbox_field_name, '1' ) ] )
+
+    def test_055_add_folder2_to_library2( self ):
+        """Testing adding a folder to library2"""
+        # Logged in as admin_user
+        # Add a folder to library2
+        folder = library2.root_folder
+        name = "folder"
+        description = "folder description"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library2.id ),
+                         self.security.encode_id( folder.id ),
+                         name=name,
+                         description=description )
+        global folder2
+        folder2 = get_folder( folder.id, name, description )
+
+    def test_060_check_library2( self ):
+        """Checking library2 and its root folder"""
+        # Logged in as admin_user
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library2.id ),
+                             strings_displayed=[ folder2.name, folder2.description ] )
+
+    def test_065_save_folder2_inherited_template( self ):
+        """Saving the inherited template for folder2"""
+        # Logged in as admin_user
+        # Save the inherited template
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder2.id ),
+                          library_id=self.security.encode_id( library2.id ),
+                          template_fields=[ ( checkbox_field_name, '1' ) ],
+                          strings_displayed=[ CheckboxField_form.name,
+                                              'This is an inherited template and is not required to be used with this folder' ] )
+
+    def test_070_add_ldda_to_folder2( self ):
+        """
+        Testing adding a new library dataset to library2's folder, making sure the CheckboxField is
+        checked on the upload form.
+        """
+        # Logged in as admin_user
+        filename = '1.bed'
+        ldda_message = '1.bed message'
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library2.id ),
+                                     folder_id=self.security.encode_id( folder2.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'CheckboxField', 'checked' ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library2.id ),
+                             strings_displayed=[ ldda.name, ldda.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library2.id ),
+                             self.security.encode_id( folder2.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'CheckboxField', 'checked' ] )
+
+    def test_080_add_template_to_library3( self ):
+        """ Testing add an inheritable template containing an SelectField to library3"""
+        # Logged in as admin_user
+        self.add_template( cntrller='library_admin',
+                           item_type='library',
+                           form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                           form_id=self.security.encode_id( SelectField_form.id ),
+                           form_name=SelectField_form.name,
+                           library_id=self.security.encode_id( library3.id ) )
+        # Select the 2nd option in the SelectField to make sure the template contents are inherited
+        # SelectField option names are zero-based
+        self.library_info( 'library_admin',
+                           self.security.encode_id( library3.id ),
+                           template_fields=[ ( select_field_name, 'Option1' ) ] )
+
+    def test_085_add_folder3_to_library3( self ):
+        """Testing adding a folder to library3"""
+        # Logged in as admin_user
+        # Add a folder to library3
+        folder = library3.root_folder
+        name = "folder"
+        description = "folder description"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library3.id ),
+                         self.security.encode_id( folder.id ),
+                         name=name,
+                         description=description )
+        global folder3
+        folder3 = get_folder( folder.id, name, description )
+
+    def test_090_check_library3( self ):
+        """Checking library3 and its root folder"""
+        # Logged in as admin_user
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library3.id ),
+                             strings_displayed=[ folder3.name, folder3.description ] )
+
+    def test_095_save_folder3_inherited_template( self ):
+        """Saving the inherited template for folder3"""
+        # Logged in as admin_user
+        # Save the inherited template
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder3.id ),
+                          library_id=self.security.encode_id( library3.id ),
+                          template_fields=[ ( select_field_name, 'Option1' ) ],
+                          strings_displayed=[ SelectField_form.name,
+                                              'This is an inherited template and is not required to be used with this folder',
+                                              'Option1' ] )
+
+    def test_100_add_ldda_to_folder3( self ):
+        """
+        Testing adding a new library dataset to library3's folder, making sure the SelectField setting is correct on the upload form.
+        """
+        filename = '3.bed'
+        ldda_message = '3.bed message'
+        # Logged in as admin_user
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library3.id ),
+                                     folder_id=self.security.encode_id( folder3.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'SelectField', 'selected>Option1' ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library3.id ),
+                             strings_displayed=[ ldda.name, ldda.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library3.id ),
+                             self.security.encode_id( folder3.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'SelectField', 'Option1' ] )
+        # Import a dataset from the current history
+        filename = '8.bed'
+        self.new_history( name='import+with+SelectField' )
+        self.upload_file( filename )
+        hda = get_latest_hda()
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library3.id ),
+                                     folder_id=self.security.encode_id( folder3.id ),
+                                     upload_option='import_from_history',
+                                     hda_ids=self.security.encode_id( hda.id ),
+                                     strings_displayed=[ '<select name="%s" last_selected_value="Option1">' % select_field_name ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library3.id ),
+                             strings_displayed=[ ldda.name, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library3.id ),
+                             self.security.encode_id( folder3.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'SelectField', 'Option1' ] )
+
+    def test_105_add_template_to_library4( self ):
+        """ Testing add an inheritable template containing an TextArea to library4"""
+        # Logged in as admin_user
+        # Add an inheritable template to library4
+        self.add_template( cntrller='library_admin',
+                           item_type='library',
+                           form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                           form_id=self.security.encode_id( TextArea_form.id ),
+                           form_name=TextArea_form.name,
+                           library_id=self.security.encode_id( library4.id ) )
+        # Select the 2nd option in the SelectField to make sure the template contents are inherited
+        self.library_info( 'library_admin',
+                           self.security.encode_id( library4.id ),
+                           template_fields=[ ( textarea_name, 'This text should be inherited' ) ] )
+
+    def test_110_add_folder4_to_library4( self ):
+        """Testing adding a folder to library4"""
+        # Logged in as admin_user
+        # Add a folder to library4
+        folder = library4.root_folder
+        name = "folder"
+        description = "folder description"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library4.id ),
+                         self.security.encode_id( folder.id ),
+                         name=name,
+                         description=description )
+        global folder4
+        folder4 = get_folder( folder.id, name, description )
+
+    def test_115_save_folder4_inherited_template( self ):
+        """Saving the inherited template for folder4"""
+        # Logged in as admin_user
+        # Save the inherited template
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder4.id ),
+                          library_id=self.security.encode_id( library4.id ),
+                          template_fields=[ ( textarea_name, 'This text should be inherited' ) ],
+                          strings_displayed=[ TextArea_form.name,
+                                              'This is an inherited template and is not required to be used with this folder',
+                                              'This text should be inherited' ] )
+
+    def test_120_add_ldda_to_folder4( self ):
+        """
+        Testing adding a new library dataset to library4's folder, making sure the TextArea setting is correct on the upload form.
+        """
+        filename = '4.bed'
+        ldda_message = '4.bed message'
+        # Logged in as admin_user
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library4.id ),
+                                     folder_id=self.security.encode_id( folder4.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'TextArea', 'This text should be inherited' ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library4.id ),
+                             strings_displayed=[ ldda.name, ldda.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library4.id ),
+                             self.security.encode_id( folder4.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'TextArea', 'This text should be inherited' ] )
+
+    def test_125_add_template_to_library5( self ):
+        """ Testing add an inheritable template containing an TextField to library5"""
+        # Add an inheritable template to library5
+        self.add_template( cntrller='library_admin',
+                           item_type='library',
+                           form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                           form_id=self.security.encode_id( TextField_form.id ),
+                           form_name=TextField_form.name,
+                           library_id=self.security.encode_id( library5.id ) )
+        # Select the 2nd option in the SelectField to make sure the template contents are inherited
+        self.library_info( 'library_admin',
+                           self.security.encode_id( library5.id ),
+                           template_fields=[ ( textfield_name, 'This text should be inherited' ) ] )
+
+    def test_130_add_folder5_to_library5( self ):
+        """Testing adding a folder to library5"""
+        # Logged in as admin_user
+        # Add a folder to library5
+        folder = library5.root_folder
+        name = "folder"
+        description = "folder description"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library5.id ),
+                         self.security.encode_id( folder.id ),
+                         name=name,
+                         description=description )
+        global folder5
+        folder5 = get_folder( folder.id, name, description )
+
+    def test_135_save_folder5_inherited_template( self ):
+        """Saving the inherited template for folder5"""
+        # Logged in as admin_user
+        # Save the inherited template
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder5.id ),
+                          library_id=self.security.encode_id( library5.id ),
+                          template_fields=[ ( textfield_name, 'This text should be inherited' ) ],
+                          strings_displayed=[ TextField_form.name,
+                                              'This is an inherited template and is not required to be used with this folder',
+                                              'This text should be inherited' ] )
+
+    def test_140_add_ldda_to_folder5( self ):
+        """
+        Testing adding a new library dataset to library5's folder, making sure the TextField setting is correct on the upload form.
+        """
+        # Logged in as admin_user
+        filename = '5.bed'
+        ldda_message = '5.bed message'
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library5.id ),
+                                     folder_id=self.security.encode_id( folder5.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'TextField', 'This text should be inherited' ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library5.id ),
+                             strings_displayed=[ ldda.name, ldda.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library5.id ),
+                             self.security.encode_id( folder5.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'TextField', 'This text should be inherited' ] )
+
+    def test_145_edit_library5_template_layout( self ):
+        """Test editing the layout of library5's template"""
+        # Currently there is only a TextField, and we'll add a TextArea.
+        self.edit_template( cntrller='library_admin',
+                            item_type='library',
+                            form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                            library_id=self.security.encode_id( library5.id ),
+                            field_type='TextArea',
+                            field_label_1=TextArea_form.name,
+                            field_helptext_1='%s help' % TextArea_form.name,
+                            field_default_1='%s default' % TextArea_form.name )
+
+    def test_150_add_ldda_to_library5( self ):
+        """
+        Testing adding a new library dataset to library5's folder, making sure the TextField and new TextArea settings are correct on the upload form.
+        """
+        filename = '6.bed'
+        ldda_message = '6.bed message'
+        # Logged in as admin_user
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library5.id ),
+                                     folder_id=self.security.encode_id( library5.root_folder.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'TextField',
+                                                         'This text should be inherited',
+                                                         'TextArea' ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library5.id ),
+                             strings_displayed=[ ldda.name, ldda.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library5.id ),
+                             self.security.encode_id( library5.root_folder.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'TextField',
+                                                 'This text should be inherited',
+                                                 'TextArea' ] )
+
+    def test_155_add_template_to_library6( self ):
+        """ Testing add an inheritable template containing an WorkflowField to library6"""
+        # Add an inheritable template to library6
+        # We won't select an option since we have no workflow to select
+        self.add_template( cntrller='library_admin',
+                           item_type='library',
+                           form_type=galaxy.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE,
+                           form_id=self.security.encode_id( WorkflowField_form.id ),
+                           form_name=WorkflowField_form.name,
+                           library_id=self.security.encode_id( library6.id ) )
+
+    def test_160_add_folder6_to_library6( self ):
+        """Testing adding a folder to library6"""
+        # Logged in as admin_user
+        # Add a folder to library5
+        folder = library6.root_folder
+        name = "folder"
+        description = "folder description"
+        self.add_folder( 'library_admin',
+                         self.security.encode_id( library6.id ),
+                         self.security.encode_id( folder.id ),
+                         name=name,
+                         description=description )
+        global folder6
+        folder6 = get_folder( folder.id, name, description )
+
+    def test_165_save_folder6_inherited_template( self ):
+        """Saving the inherited template for folder6"""
+        # Logged in as admin_user
+        # Save the inherited template - we won't select an option since we have no workflow to select
+        self.folder_info( cntrller='library_admin',
+                          folder_id=self.security.encode_id( folder6.id ),
+                          library_id=self.security.encode_id( library6.id ),
+                          template_fields=[ ( workflow_field_name, 'none' ) ],
+                          strings_displayed=[ WorkflowField_form.name,
+                                              'This is an inherited template and is not required to be used with this folder',
+                                              'none' ] )
+
+    def test_170_add_ldda_to_folder6( self ):
+        """
+        Testing adding a new library dataset to library6's folder, making sure the WorkflowField setting is correct on the upload form.
+        """
+        # Logged in as admin_user
+        filename = '7.bed'
+        ldda_message = '7.bed message'
+        self.upload_library_dataset( cntrller='library_admin',
+                                     library_id=self.security.encode_id( library6.id ),
+                                     folder_id=self.security.encode_id( folder6.id ),
+                                     filename=filename,
+                                     file_type='bed',
+                                     dbkey='hg18',
+                                     ldda_message=ldda_message,
+                                     strings_displayed=[ 'WorkflowField', 'none' ] )
+        ldda = get_latest_ldda_by_name( filename )
+        assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
+        self.browse_library( cntrller='library_admin',
+                             library_id=self.security.encode_id( library6.id ),
+                             strings_displayed=[ ldda.name, ldda.message, 'bed' ] )
+        # Make sure the library template contents were correctly saved
+        self.ldda_edit_info( 'library_admin',
+                             self.security.encode_id( library6.id ),
+                             self.security.encode_id( folder6.id ),
+                             self.security.encode_id( ldda.id ),
+                             ldda.name,
+                             strings_displayed=[ 'WorkflowField', 'none' ] )
+
+    def test_999_reset_data_for_later_test_runs( self ):
+        """Reseting data to enable later test runs to pass"""
+        # Logged in as admin_user
+        ##################
+        # Delete all form definitions
+        ##################
+        for form in [ AddressField_form, CheckboxField_form, SelectField_form, TextArea_form, TextField_form, WorkflowField_form ]:
+            self.mark_form_deleted( self.security.encode_id( form.form_definition_current.id ) )
+        ##################
+        # Mark all user_addresses deleted
+        ##################
+        for user_address in [ user_address1, user_address2 ]:
+            mark_obj_deleted( user_address )
+        ##################
+        # Purge all libraries
+        ##################
+        for library in [ library1, library2, library3, library4, library5, library6 ]:
+            self.delete_library_item( 'library_admin',
+                                      self.security.encode_id( library.id ),
+                                      self.security.encode_id( library.id ),
+                                      library.name,
+                                      item_type='library' )
+            self.purge_library( self.security.encode_id( library.id ), library.name )
+        ##################
+        # Make sure all users are associated only with their private roles
+        ##################
+        for user in [ admin_user, regular_user1, regular_user2, regular_user3 ]:
+            refresh( user )
+            if len( user.roles) != 1:
+                raise AssertionError( '%d UserRoleAssociations are associated with %s ( should be 1 )' % ( len( user.roles ), user.email ) )
diff --git a/test/functional/test_toolbox.py b/test/functional/test_toolbox.py
new file mode 100644
index 0000000..99d5f97
--- /dev/null
+++ b/test/functional/test_toolbox.py
@@ -0,0 +1,316 @@
+from __future__ import print_function
+
+import logging
+import new
+import sys
+
+try:
+    from nose.tools import nottest
+except ImportError:
+    def nottest(x):
+        return x
+
+from base.instrument import register_job_data
+from base.interactor import build_interactor, RunToolException, stage_data_in_history
+from base.twilltestcase import TwillTestCase
+from galaxy.tools import DataManagerTool
+from galaxy.tools.verify.asserts import verify_assertions
+from galaxy.util import bunch
+
+log = logging.getLogger( __name__ )
+
+toolbox = None
+
+# Do not test Data Managers as part of the standard Tool Test Framework.
+TOOL_TYPES_NO_TEST = ( DataManagerTool, )
+
+
+class ToolTestCase( TwillTestCase ):
+    """Abstract test case that runs tests based on a `galaxy.tools.test.ToolTest`"""
+
+    def do_it( self, testdef ):
+        """
+        Run through a tool test case.
+        """
+        shed_tool_id = self.shed_tool_id
+
+        self._handle_test_def_errors( testdef )
+
+        galaxy_interactor = self._galaxy_interactor( testdef )
+
+        test_history = galaxy_interactor.new_history()
+
+        stage_data_in_history( galaxy_interactor, testdef.test_data(), test_history, shed_tool_id )
+
+        # Once data is ready, run the tool and check the outputs - record API
+        # input, job info, tool run exception, as well as exceptions related to
+        # job output checking and register they with the test plugin so it can
+        # record structured information.
+        tool_inputs = None
+        job_stdio = None
+        job_output_exceptions = None
+        tool_execution_exception = None
+        expected_failure_occurred = False
+        try:
+            try:
+                tool_response = galaxy_interactor.run_tool( testdef, test_history )
+                data_list, jobs, tool_inputs = tool_response.outputs, tool_response.jobs, tool_response.inputs
+                data_collection_list = tool_response.output_collections
+            except RunToolException as e:
+                tool_inputs = e.inputs
+                tool_execution_exception = e
+                if not testdef.expect_failure:
+                    raise e
+                else:
+                    expected_failure_occurred = True
+            except Exception as e:
+                tool_execution_exception = e
+                raise e
+
+            if not expected_failure_occurred:
+                self.assertTrue( data_list or data_collection_list )
+
+                try:
+                    job_stdio = self._verify_outputs( testdef, test_history, jobs, shed_tool_id, data_list, data_collection_list, galaxy_interactor )
+                except JobOutputsError as e:
+                    job_stdio = e.job_stdio
+                    job_output_exceptions = e.output_exceptions
+                    raise e
+                except Exception as e:
+                    job_output_exceptions = [e]
+                    raise e
+        finally:
+            job_data = {}
+            if tool_inputs is not None:
+                job_data["inputs"] = tool_inputs
+            if job_stdio is not None:
+                job_data["job"] = job_stdio
+            if job_output_exceptions:
+                job_data["output_problems"] = [str(_) for _ in job_output_exceptions]
+            if tool_execution_exception:
+                job_data["execution_problem"] = str(tool_execution_exception)
+            register_job_data(job_data)
+
+        galaxy_interactor.delete_history( test_history )
+
+    def _galaxy_interactor( self, testdef ):
+        return build_interactor( self, testdef.interactor )
+
+    def _handle_test_def_errors(self, testdef):
+        # If the test generation had an error, raise
+        if testdef.error:
+            if testdef.exception:
+                raise testdef.exception
+            else:
+                raise Exception( "Test parse failure" )
+
+    def _verify_outputs( self, testdef, history, jobs, shed_tool_id, data_list, data_collection_list, galaxy_interactor ):
+        assert len(jobs) == 1, "Test framework logic error, somehow tool test resulted in more than one job."
+        job = jobs[ 0 ]
+
+        maxseconds = testdef.maxseconds
+        if testdef.num_outputs is not None:
+            expected = testdef.num_outputs
+            actual = len( data_list )
+            if expected != actual:
+                messaage_template = "Incorrect number of outputs - expected %d, found %s."
+                message = messaage_template % ( expected, actual )
+                raise Exception( message )
+        found_exceptions = []
+
+        def register_exception(e):
+            if not found_exceptions:
+                # Only print this stuff out once.
+                for stream in ['stdout', 'stderr']:
+                    if stream in job_stdio:
+                        print(self._format_stream( job_stdio[ stream ], stream=stream, format=True ), file=sys.stderr)
+            found_exceptions.append(e)
+
+        if testdef.expect_failure:
+            if testdef.outputs:
+                raise Exception("Cannot specify outputs in a test expecting failure.")
+
+        # Wait for the job to complete and register expections if the final
+        # status was not what test was expecting.
+        job_failed = False
+        try:
+            galaxy_interactor.wait_for_job( job[ 'id' ], history, maxseconds )
+        except Exception as e:
+            job_failed = True
+            if not testdef.expect_failure:
+                found_exceptions.append(e)
+
+        job_stdio = galaxy_interactor.get_job_stdio( job[ 'id' ] )
+
+        if not job_failed and testdef.expect_failure:
+            error = AssertionError("Expected job to fail but Galaxy indicated the job successfully completed.")
+            register_exception(error)
+
+        expect_exit_code = testdef.expect_exit_code
+        if expect_exit_code is not None:
+            exit_code = job_stdio["exit_code"]
+            if str(expect_exit_code) != str(exit_code):
+                error = AssertionError("Expected job to complete with exit code %s, found %s" % (expect_exit_code, exit_code))
+                register_exception(error)
+
+        for output_index, output_tuple in enumerate(testdef.outputs):
+            # Get the correct hid
+            name, outfile, attributes = output_tuple
+            output_testdef = bunch.Bunch( name=name, outfile=outfile, attributes=attributes )
+            try:
+                output_data = data_list[ name ]
+            except (TypeError, KeyError):
+                # Legacy - fall back on ordered data list access if data_list is
+                # just a list (case with twill variant or if output changes its
+                # name).
+                if hasattr(data_list, "values"):
+                    output_data = list(data_list.values())[ output_index ]
+                else:
+                    output_data = data_list[ len(data_list) - len(testdef.outputs) + output_index ]
+            self.assertTrue( output_data is not None )
+            try:
+                galaxy_interactor.verify_output( history, jobs, output_data, output_testdef=output_testdef, shed_tool_id=shed_tool_id, maxseconds=maxseconds )
+            except Exception as e:
+                register_exception(e)
+
+        other_checks = {
+            "command_line": "Command produced by the job",
+            "stdout": "Standard output of the job",
+            "stderr": "Standard error of the job",
+        }
+        for what, description in other_checks.items():
+            if getattr( testdef, what, None ) is not None:
+                try:
+                    data = job_stdio[what]
+                    verify_assertions( data, getattr( testdef, what ) )
+                except AssertionError as err:
+                    errmsg = '%s different than expected\n' % description
+                    errmsg += str( err )
+                    register_exception( AssertionError( errmsg ) )
+
+        for output_collection_def in testdef.output_collections:
+            try:
+                name = output_collection_def.name
+                # TODO: data_collection_list is clearly a bad name for dictionary.
+                if name not in data_collection_list:
+                    template = "Failed to find output [%s], tool outputs include [%s]"
+                    message = template % (name, ",".join(data_collection_list.keys()))
+                    raise AssertionError(message)
+
+                # Data collection returned from submission, elements may have been populated after
+                # the job completed so re-hit the API for more information.
+                data_collection_returned = data_collection_list[ name ]
+                data_collection = galaxy_interactor._get( "dataset_collections/%s" % data_collection_returned[ "id" ], data={"instance_type": "history"} ).json()
+
+                def get_element( elements, id ):
+                    for element in elements:
+                        if element["element_identifier"] == id:
+                            return element
+                    return False
+
+                expected_collection_type = output_collection_def.collection_type
+                if expected_collection_type:
+                    collection_type = data_collection[ "collection_type"]
+                    if expected_collection_type != collection_type:
+                        template = "Expected output collection [%s] to be of type [%s], was of type [%s]."
+                        message = template % (name, expected_collection_type, collection_type)
+                        raise AssertionError(message)
+
+                expected_element_count = output_collection_def.count
+                if expected_element_count:
+                    actual_element_count = len(data_collection[ "elements" ])
+                    if expected_element_count != actual_element_count:
+                        template = "Expected output collection [%s] to have %s elements, but it had %s."
+                        message = template % (name, expected_element_count, actual_element_count)
+                        raise AssertionError(message)
+
+                def verify_elements( element_objects, element_tests ):
+                    for element_identifier, ( element_outfile, element_attrib ) in element_tests.items():
+                        element = get_element( element_objects, element_identifier )
+                        if not element:
+                            template = "Failed to find identifier [%s] for testing, tool generated collection elements [%s]"
+                            message = template % (element_identifier, element_objects)
+                            raise AssertionError(message)
+
+                        element_type = element["element_type"]
+                        if element_type != "dataset_collection":
+                            hda = element[ "object" ]
+                            galaxy_interactor.verify_output_dataset(
+                                history,
+                                hda_id=hda["id"],
+                                outfile=element_outfile,
+                                attributes=element_attrib,
+                                shed_tool_id=shed_tool_id
+                            )
+                        if element_type == "dataset_collection":
+                            elements = element[ "object" ][ "elements" ]
+                            verify_elements( elements, element_attrib.get( "elements", {} ) )
+
+                verify_elements( data_collection[ "elements" ], output_collection_def.element_tests )
+            except Exception as e:
+                register_exception(e)
+
+        if found_exceptions:
+            raise JobOutputsError(found_exceptions, job_stdio)
+        else:
+            return job_stdio
+
+
+class JobOutputsError(AssertionError):
+
+    def __init__(self, output_exceptions, job_stdio):
+        big_message = "\n".join(map(str, output_exceptions))
+        super(JobOutputsError, self).__init__(big_message)
+        self.job_stdio = job_stdio
+        self.output_exceptions = output_exceptions
+
+
+ at nottest
+def build_tests( app=None, testing_shed_tools=False, master_api_key=None, user_api_key=None ):
+    """
+    If the module level variable `toolbox` is set, generate `ToolTestCase`
+    classes for all of its tests and put them into this modules globals() so
+    they can be discovered by nose.
+    """
+    if app is None:
+        return
+
+    # Push all the toolbox tests to module level
+    G = globals()
+
+    # Eliminate all previous tests from G.
+    for key, val in G.items():
+        if key.startswith( 'TestForTool_' ):
+            del G[ key ]
+    for i, tool_id in enumerate( app.toolbox.tools_by_id ):
+        tool = app.toolbox.get_tool( tool_id )
+        if isinstance( tool, TOOL_TYPES_NO_TEST ):
+            # We do not test certain types of tools (e.g. Data Manager tools) as part of ToolTestCase
+            continue
+        if tool.tests:
+            shed_tool_id = None if not testing_shed_tools else tool.id
+            # Create a new subclass of ToolTestCase, dynamically adding methods
+            # named test_tool_XXX that run each test defined in the tool config.
+            name = "TestForTool_" + tool.id.replace( ' ', '_' )
+            baseclasses = ( ToolTestCase, )
+            namespace = dict()
+            for j, testdef in enumerate( tool.tests ):
+                test_function_name = 'test_tool_%06d' % j
+
+                def make_test_method( td ):
+                    def test_tool( self ):
+                        self.do_it( td )
+                    test_tool.__name__ = test_function_name
+
+                    return test_tool
+
+                test_method = make_test_method( testdef )
+                test_method.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name )
+                namespace[ test_function_name ] = test_method
+                namespace[ 'shed_tool_id' ] = shed_tool_id
+                namespace[ 'master_api_key' ] = master_api_key
+                namespace[ 'user_api_key' ] = user_api_key
+            # The new.classobj function returns a new class object, with name name, derived
+            # from baseclasses (which should be a tuple of classes) and with namespace dict.
+            new_class_obj = new.classobj( name, baseclasses, namespace )
+            G[ name ] = new_class_obj
diff --git a/test/functional/tool-data/data1/entry.txt b/test/functional/tool-data/data1/entry.txt
new file mode 100644
index 0000000..f70b104
--- /dev/null
+++ b/test/functional/tool-data/data1/entry.txt
@@ -0,0 +1 @@
+This is data 1.
\ No newline at end of file
diff --git a/test/functional/tool-data/data1/entry.txt.index b/test/functional/tool-data/data1/entry.txt.index
new file mode 100644
index 0000000..f3a6edd
--- /dev/null
+++ b/test/functional/tool-data/data1/entry.txt.index
@@ -0,0 +1 @@
+fancy compressed map built with cool algorithms...
\ No newline at end of file
diff --git a/test/functional/tool-data/data2/entry.txt b/test/functional/tool-data/data2/entry.txt
new file mode 100644
index 0000000..12c526a
--- /dev/null
+++ b/test/functional/tool-data/data2/entry.txt
@@ -0,0 +1 @@
+This is data 2.
\ No newline at end of file
diff --git a/test/functional/tool-data/data2/entry.txt.index b/test/functional/tool-data/data2/entry.txt.index
new file mode 100644
index 0000000..8965f83
--- /dev/null
+++ b/test/functional/tool-data/data2/entry.txt.index
@@ -0,0 +1 @@
+fancy compressed map built with cool algorithms...2
\ No newline at end of file
diff --git a/test/functional/tool-data/fasta_indexes.loc b/test/functional/tool-data/fasta_indexes.loc
new file mode 100644
index 0000000..3383ed8
--- /dev/null
+++ b/test/functional/tool-data/fasta_indexes.loc
@@ -0,0 +1,2 @@
+hg19	hg19	hg19	hg19
+hg18	hg18	hg18	hg18
\ No newline at end of file
diff --git a/test/functional/tool-data/sample_tool_data_tables.xml b/test/functional/tool-data/sample_tool_data_tables.xml
new file mode 100644
index 0000000..7264fa3
--- /dev/null
+++ b/test/functional/tool-data/sample_tool_data_tables.xml
@@ -0,0 +1,14 @@
+<tables>
+  <table name="testalpha" comment_char="#">
+    <columns>value, name, path</columns>
+    <file path="${__HERE__}/testalpha.loc" />
+  </table>
+  <table name="testbeta" comment_char="#">
+    <columns>value, path</columns>
+    <file path="${__HERE__}/testbeta.loc" />
+  </table>
+  <table name="test_fasta_indexes" comment_char="#">
+    <columns>value, dbkey, name, path</columns>
+    <file path="${__HERE__}/fasta_indexes.loc" />
+  </table>
+</tables>
diff --git a/test/functional/tool-data/testalpha.loc b/test/functional/tool-data/testalpha.loc
new file mode 100644
index 0000000..0e1d12f
--- /dev/null
+++ b/test/functional/tool-data/testalpha.loc
@@ -0,0 +1,2 @@
+data1	data1name	${__HERE__}/data1/entry.txt
+data2	data2name	${__HERE__}/data2/entry.txt
diff --git a/test/functional/tool-data/testbeta.loc b/test/functional/tool-data/testbeta.loc
new file mode 100644
index 0000000..e69de29
diff --git a/test/functional/tools/README.txt b/test/functional/tools/README.txt
new file mode 100644
index 0000000..f4d0877
--- /dev/null
+++ b/test/functional/tools/README.txt
@@ -0,0 +1,15 @@
+This directory contains tools only useful for testing and
+demonstrating aspects of the tool syntax. Run the test driver script
+'run_tests.sh' with the '-framework' as first argument to run through
+these tests. Pass in an '-id' along with one of these tool ids to test
+a single tool.
+
+Some API tests use these tools to test various features of the API,
+tool, and workflow subsystems. Pass the argument
+'-with_framework_test_tools' to 'run_tests.sh' in addition to '-api'
+to ensure these tools get loaded during the testing process.
+
+Finally, to play around with these tools interactively - simply
+replace the 'galaxy.ini' option 'tool_config_file' with:
+
+tool_config_file = test/functional/tools/samples_tool_conf.xml
diff --git a/test/functional/tools/bibtex.xml b/test/functional/tools/bibtex.xml
new file mode 100644
index 0000000..031dbb0
--- /dev/null
+++ b/test/functional/tools/bibtex.xml
@@ -0,0 +1,322 @@
+<tool id="bibtex" name="Concatenate datasets (with a bunch of citations)">
+    <command>
+      cat $input1
+      #for $q in $queries
+            ${q.input2}
+      #end for
+      > $out_file1;
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset" />
+        <repeat name="queries" title="Dataset">
+            <param name="input2" type="data" label="Select" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+    </outputs>
+    <help>
+
+.. class:: warningmark
+
+**WARNING:** Be careful not to concatenate datasets of different kinds (e.g., sequences with intervals). This tool does not check if the datasets being concatenated are in the same format. 
+
+    </help>
+    <citations>
+      <citation type="doi">10.1101/gr.4086505</citation>
+      <citation type="bibtex">
+ at misc{Garrison2015,
+  author = {Garrison, Erik},
+  year = {2015},
+  title = {vcflib},
+  publisher = {GitHub},
+  journal = {GitHub repository},
+  url = {https://github.com/ekg/vcflib},
+}
+      </citation>
+      <!-- BibTeX examples taken from:
+      http://www2.galcit.caltech.edu/~jeshep/GraphicsBib/NatBib/node3.html
+      -->
+      <citation type="bibtex">
+ at ARTICLE{article-minimal,
+   author = {L[eslie] A. Aamport},
+   title = {The Gnats and Gnus Document Preparation System},
+   journal = {\mbox{G-Animal's} Journal},
+   year = 1986,
+}
+      </citation>
+      <citation type="bibtex">
+ at ARTICLE{article-full,
+   author = {L[eslie] A. Aamport},
+   title = {The Gnats and Gnus Document Preparation System},
+   journal = {\mbox{G-Animal's} Journal},
+   year = 1986,
+   volume = 41,
+   number = 7,
+   pages = "73+",
+   month = jul,
+   note = "This is a full ARTICLE entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at INBOOK{inbook-minimal,
+   author = "Donald E. Knuth",
+   title = "Fundamental Algorithms",
+   publisher = "Addison-Wesley",
+   year = "{\noopsort{1973b}}1973",
+   chapter = "1.2",
+
+}      </citation>
+      <citation type="bibtex">
+ at INBOOK{inbook-full,
+   author = "Donald E. Knuth",
+   title = "Fundamental Algorithms",
+   volume = 1,
+   series = "The Art of Computer Programming",
+   publisher = "Addison-Wesley",
+   address = "Reading, Massachusetts",
+   edition = "Second",
+   month = "10~" # jan,
+   year = "{\noopsort{1973b}}1973",
+   type = "Section",
+   chapter = "1.2",
+   pages = "10--119",
+   note = "This is a full INBOOK entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at BOOK{book-minimal,
+   author = "Donald E. Knuth",
+   title = "Seminumerical Algorithms",
+   publisher = "Addison-Wesley",
+   year = "{\noopsort{1973c}}1981",
+}
+      </citation>
+      <citation type="bibtex">
+ at BOOK{book-full,
+   author = "Donald E. Knuth",
+   title = "Seminumerical Algorithms",
+   volume = 2,
+   series = "The Art of Computer Programming",
+   publisher = "Addison-Wesley",
+   address = "Reading, Massachusetts",
+   edition = "Second",
+   month = "10~" # jan,
+   year = "{\noopsort{1973c}}1981",
+   note = "This is a full BOOK entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at BOOK{whole-set,
+   author = "Donald E. Knuth",
+   publisher = "Addison-Wesley",
+   title = "The Art of Computer Programming",
+   series = "Four volumes",
+   year = "{\noopsort{1973a}}{\switchargs{--90}{1968}}",
+   note = "Seven volumes planned (this is a cross-referenced set of BOOKs)",
+}
+      </citation>
+      <citation type="bibtex">
+ at BOOKLET{booklet-full,
+   author = "Jill C. Knvth",
+   title = "The Programming of Computer Art",
+   howpublished = "Vernier Art Center",
+   address = "Stanford, California",
+   month = feb,
+   year = 1988,
+   note = "This is a full BOOKLET entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at INCOLLECTION{incollection-minimal,
+   author = "Daniel D. Lincoll",
+   title = "Semigroups of Recurrences",
+   booktitle = "High Speed Computer and Algorithm Organization",
+   publisher = "Academic Press",
+   year = 1977,
+}
+      </citation>
+      <citation type="bibtex">
+ at INCOLLECTION{incollection-full,
+   author = "Daniel D. Lincoll",
+   title = "Semigroups of Recurrences",
+   editor = "David J. Lipcoll and D. H. Lawrie and A. H. Sameh",
+   booktitle = "High Speed Computer and Algorithm Organization",
+   number = 23,
+   series = "Fast Computers",
+   chapter = 3,
+   type = "Part",
+   pages = "179--183",
+   publisher = "Academic Press",
+   address = "New York",
+   edition = "Third",
+   month = sep,
+   year = 1977,
+   note = "This is a full INCOLLECTION entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at BOOK{whole-collection,
+   editor = "David J. Lipcoll and D. H. Lawrie and A. H. Sameh",
+   title = "High Speed Computer and Algorithm Organization",
+   booktitle = "High Speed Computer and Algorithm Organization",
+   number = 23,
+   series = "Fast Computers",
+   publisher = "Academic Press",
+   address = "New York",
+   edition = "Third",
+   month = sep,
+   year = 1977,
+}
+      </citation>
+      <citation type="bibtex">
+ at MANUAL{manual-full,
+   author = "Larry Manmaker",
+   title = "The Definitive Computer Manual",
+   organization = "Chips-R-Us",
+   address = "Silicon Valley",
+   edition = "Silver",
+   month = apr # "-" # may,
+   year = 1986,
+   note = "This is a full MANUAL entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at MASTERSTHESIS{mastersthesis-minimal,
+   author = "{\'{E}}douard Masterly",
+   title = "Mastering Thesis Writing",
+   school = "Stanford University",
+   year = 1988,
+}
+      </citation>
+      <citation type="bibtex">
+ at MASTERSTHESIS{mastersthesis-full,
+   author = "{\'{E}}douard Masterly",
+   title = "Mastering Thesis Writing",
+   school = "Stanford University",
+   type = "Master's project",
+   address = "English Department",
+   month = jun # "-" # aug,
+   year = 1988,
+   note = "This is a full MASTERSTHESIS entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at MISC{misc-full,
+   author = "Joe-Bob Missilany",
+   title = "Handing out random pamphlets in airports",
+   howpublished = "Handed out at O'Hare",
+   month = oct,
+   year = 1984,
+   note = "This is a full MISC entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at INPROCEEDINGS{inproceedings-minimal,
+   author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis",
+   title = "On Notions of Information Transfer in {VLSI} Circuits",
+   booktitle = "Proc. Fifteenth Annual ACM" # STOC,
+   year = 1983,
+}
+      </citation>
+      <citation type="bibtex">
+ at INPROCEEDINGS{inproceedings-full,
+   author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis",
+   title = "On Notions of Information Transfer in {VLSI} Circuits",
+   editor = "Wizard V. Oz and Mihalis Yannakakis",
+   booktitle = "Proc. Fifteenth Annual ACM" # STOC,
+   number = 17,
+   series = "All ACM Conferences",
+   pages = "133--139",
+   month = mar,
+   year = 1983,
+   address = "Boston",
+   organization = ACM,
+   publisher = "Academic Press",
+   note = "This is a full INPROCEDINGS entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at INPROCEEDINGS{inproceedings-crossref,
+   crossref = "whole-proceedings",
+   author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis",
+   title = "On Notions of Information Transfer in {VLSI} Circuits",
+   organization = "",
+   pages = "133--139",
+   note = "This is a cross-referencing INPROCEEDINGS entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at PROCEEDINGS{proceedings-full,
+   editor = "Wizard V. Oz and Mihalis Yannakakis",
+   title = "Proc. Fifteenth Annual" # STOC,
+   number = 17,
+   series = "All ACM Conferences",
+   month = mar,
+   year = 1983,
+   address = "Boston",
+   organization = ACM,
+   publisher = "Academic Press",
+   note = "This is a full PROCEEDINGS entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at PHDTHESIS{phdthesis-minimal,
+   author = "F. Phidias Phony-Baloney",
+   title = "Fighting Fire with Fire: Festooning {F}rench Phrases",
+   school = "Fanstord University",
+   year = 1988,
+}
+      </citation>
+      <citation type="bibtex">
+ at PHDTHESIS{phdthesis-full,
+   author = "F. Phidias Phony-Baloney",
+   title = "Fighting Fire with Fire: Festooning {F}rench Phrases",
+   school = "Fanstord University",
+   type = "{PhD} Dissertation",
+   address = "Department of French",
+   month = jun # "-" # aug,
+   year = 1988,
+   note = "This is a full PHDTHESIS entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at TECHREPORT{techreport-minimal,
+   author = "Tom Terrific",
+   title = "An {$O(n \log n / \! \log\log n)$} Sorting Algorithm",
+   institution = "Fanstord University",
+   year = 1988,
+}
+      </citation>
+      <citation type="bibtex">
+ at TECHREPORT{techreport-full,
+   author = "Tom T{\'{e}}rrific",
+   title = "An {$O(n \log n / \! \log\log n)$} Sorting Algorithm",
+   institution = "Fanstord University",
+   type = "Wishful Research Result",
+   number = "7",
+   address = "Computer Science Department, Fanstord, California",
+   month = oct,
+   year = 1988,
+   note = "This is a full TECHREPORT entry",
+}
+      </citation>
+      <citation type="bibtex">
+ at UNPUBLISHED{unpublished-minimal,
+   author = "Ulrich {\"{U}}nderwood and Ned {\~N}et and Paul {\={P}}ot",
+   title = "Lower Bounds for Wishful Research Results",
+   note = "Talk at Fanstord University (this is a minimal UNPUBLISHED entry)",
+}
+      </citation>
+      <citation type="bibtex">
+ at UNPUBLISHED{unpublished-full,
+   author = "Ulrich {\"{U}}nderwood and Ned {\~N}et and Paul {\={P}}ot",
+   title = "Lower Bounds for Wishful Research Results",
+   month = nov # ", " # dec,
+   year = 1988,
+   note = "Talk at Fanstord University (this is a full UNPUBLISHED entry)",
+}
+      </citation>
+      <citation type="doi">10.1101/gr.4086505</citation>
+    </citations>
+</tool>
diff --git a/test/functional/tools/boolean_conditional.xml b/test/functional/tools/boolean_conditional.xml
new file mode 100644
index 0000000..df699e2
--- /dev/null
+++ b/test/functional/tools/boolean_conditional.xml
@@ -0,0 +1,53 @@
+<tool id="boolean_conditional" name="boolean_conditional" version="1.0.0">
+    <command>
+        echo "$p1.p1val" >> $out_file1;
+    </command>
+    <inputs>
+        <conditional name="p1">
+            <param type="boolean" name="p1use" truevalue="booltrue" falsevalue="boolfalse" />
+            <when value="booltrue">
+                <param name="p1val" value="p1used" type="text" />
+            </when>
+            <when value="boolfalse">
+                <param name="p1val" value="p1notused" type="text" />
+            </when>
+        </conditional>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="p1use" value="true" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1used" />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <param name="p1use" value="booltrue" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1used" />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <param name="p1use" value="false" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1notused" />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <param name="p1use" value="boolfalse" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1notused" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/catDocker.xml b/test/functional/tools/catDocker.xml
new file mode 100644
index 0000000..1437ebb
--- /dev/null
+++ b/test/functional/tools/catDocker.xml
@@ -0,0 +1,28 @@
+<tool id="catdc" name="Concatenate datasets (in docker)">
+    <description>tail-to-head</description>
+    <requirements>
+      <container type="docker">busybox:ubuntu-14.04</container>
+    </requirements>
+    <command>
+      echo "Galaxy slots passed through contain as \$GALAXY_SLOTS";
+      cat $input1
+      #for $q in $queries
+            ${q.input2}
+      #end for
+      > $out_file1;
+      echo "Work dir output" > working_file
+
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset"/>
+        <repeat name="queries" title="Dataset">
+            <param name="input2" type="data" label="Select" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+        <data name="out_file2" format="txt" from_work_dir="working_file" />
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/checksum.xml b/test/functional/tools/checksum.xml
new file mode 100644
index 0000000..6398bb1
--- /dev/null
+++ b/test/functional/tools/checksum.xml
@@ -0,0 +1,17 @@
+<tool id="checksum" name="checksum" version="1.0.0">
+    <command>
+        cp $input $output
+    </command>
+    <inputs>
+        <param name="input" type="data" format="txt" />
+    </inputs>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt" />
+            <output name="out_file1" checksum="sha1$8156d7ca0f46ed7abac98f82e36cfaddb2aca041" />
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/cheetah_casting.xml b/test/functional/tools/cheetah_casting.xml
new file mode 100644
index 0000000..d12bb65
--- /dev/null
+++ b/test/functional/tools/cheetah_casting.xml
@@ -0,0 +1,27 @@
+<tool id="cheetah_casting" name="cheetah_casting" version="1.0.0">
+    <command>
+        #set $int_val_inc = int($inttest) + 1
+        #set $float_val_inc = float($floattest) + 1
+        echo $int_val_inc   >> $out_file1;
+        echo $float_val_inc >> $out_file1;
+    </command>
+    <inputs>
+        <param name="inttest" value="1" type="integer" />
+        <param name="floattest" value="1.0" type="float" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="inttest" value="1" />
+            <param name="floattest" value="2.5" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="2" />
+                    <has_line line="3.5" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/cheetah_problem_syntax_error.xml b/test/functional/tools/cheetah_problem_syntax_error.xml
new file mode 100644
index 0000000..298f993
--- /dev/null
+++ b/test/functional/tools/cheetah_problem_syntax_error.xml
@@ -0,0 +1,17 @@
+<tool id="cheetah_problem_syntax_error" name="cheetah_problem_syntax_error" version="0.1.0">
+    <command>
+        #for i inx
+        moo
+    </command>
+    <inputs>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_failure="true">
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/cheetah_problem_unbound_var.xml b/test/functional/tools/cheetah_problem_unbound_var.xml
new file mode 100644
index 0000000..d2db7e9
--- /dev/null
+++ b/test/functional/tools/cheetah_problem_unbound_var.xml
@@ -0,0 +1,16 @@
+<tool id="cheetah_problem_unbound_var" name="cheetah_problem_unbound_var" version="0.1.0">
+    <command>
+        echo "Moo" > $out_file2
+    </command>
+    <inputs>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_failure="true">
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/cheetah_problem_unbound_var_input.xml b/test/functional/tools/cheetah_problem_unbound_var_input.xml
new file mode 100644
index 0000000..4f8908b
--- /dev/null
+++ b/test/functional/tools/cheetah_problem_unbound_var_input.xml
@@ -0,0 +1,19 @@
+<tool id="cheetah_problem_unbound_var_input" name="cheetah_problem_unbound_var_input" version="0.1.0">
+    <!-- input is special because we want to throw a syntax
+         error when it is undefined even though it appears 
+         in Python's default environment. -->
+    <command>
+        echo "$input" > $out_file1
+    </command>
+    <inputs>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_failure="true">
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/code_file.py b/test/functional/tools/code_file.py
new file mode 100644
index 0000000..a590129
--- /dev/null
+++ b/test/functional/tools/code_file.py
@@ -0,0 +1,8 @@
+def validate_input( trans, error_map, param_values, page_param_map ):
+    """
+        Validates the user input, before execution.
+    """
+    first = param_values['name1']
+    second = param_values['name2']
+    if first == second:
+        error_map['name1'] = "The value names should be different."
diff --git a/test/functional/tools/code_file.xml b/test/functional/tools/code_file.xml
new file mode 100644
index 0000000..9bf5a7b
--- /dev/null
+++ b/test/functional/tools/code_file.xml
@@ -0,0 +1,15 @@
+<tool id="code_file" name="code_file" version="1.0.0">
+  <description>code_file</description>
+  <code file="code_file.py" />
+  <command>cat $input1 > $output</command>
+  <inputs>
+    <param name="input1" type="data" label="Concatenate Dataset" />
+    <param name="name1" type="text" value="FactorName" label="value one should be different than value two" />
+    <param name="name2" type="text" value="FactorName" label="value one should be different than value two" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="output" />
+  </outputs>
+  <tests>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_dynamic_list_of_pairs.xml b/test/functional/tools/collection_creates_dynamic_list_of_pairs.xml
new file mode 100644
index 0000000..bd56401
--- /dev/null
+++ b/test/functional/tools/collection_creates_dynamic_list_of_pairs.xml
@@ -0,0 +1,71 @@
+<tool id="collection_creates_dynamic_list_of_pairs" name="collection_creates_dynamic_list_of_pairs" version="0.1.0">
+  <command><![CDATA[
+    echo "A" > samp1_1.fq &&
+    echo "B" > samp1_2.fq &&
+    echo "C" > samp2_1.fq &&
+    echo "D" > samp2_2.fq &&
+    echo "E" > samp3_1.fq &&
+    echo "F" > samp3_2.fq &&
+    ## Galaxy wants forward and reverse in pattern not _1 and _2.
+    ##   There is bash magic that would be more concise then this basename
+    ##   pattern but I think this is more portable.
+    for f in *_1.fq; do mv "\$f" "`basename \$f _1.fq`_forward.fq"; done && 
+    for f in *_2.fq; do mv "\$f" "`basename \$f _2.fq`_reverse.fq"; done
+  ]]></command>
+  <inputs>
+    <param name="foo" type="text" label="Dummy Parameter" />
+  </inputs>
+  <outputs>
+    <collection name="list_output" type="list:paired" label="Duplicate List">
+      <!-- Use named regex group to grab pattern
+           <identifier_0>_<identifier_1>.fq. Here identifier_0 is the list
+           identifier in the nested collection and identifier_1 is either
+           forward or reverse (for instance samp1_forward.fq).
+      -->
+      <discover_datasets pattern="(?P<identifier_0>[^_]+)_(?P<identifier_1>[^_]+)\.fq" ext="fastqsanger" visible="true" />
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="foo" value="bar" />
+      <output_collection name="list_output" type="list:paired">
+        <element name="samp1">
+          <element name="forward">
+            <assert_contents>
+              <has_text_matching expression="^A\n$" />
+            </assert_contents>
+          </element>
+          <element name="reverse">
+            <assert_contents>
+              <has_text_matching expression="^B\n$" />
+            </assert_contents>
+          </element>
+        </element>
+        <element name="samp2">
+          <element name="forward">
+            <assert_contents>
+              <has_text_matching expression="^C\n$" />
+            </assert_contents>
+          </element>
+          <element name="reverse">
+            <assert_contents>
+              <has_text_matching expression="^D\n$" />
+            </assert_contents>
+          </element>
+        </element>
+        <element name="samp3">
+          <element name="forward">
+            <assert_contents>
+              <has_text_matching expression="^E\n$" />
+            </assert_contents>
+          </element>
+          <element name="reverse">
+            <assert_contents>
+              <has_text_matching expression="^F\n$" />
+            </assert_contents>
+          </element>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_dynamic_nested.xml b/test/functional/tools/collection_creates_dynamic_nested.xml
new file mode 100644
index 0000000..68edc0c
--- /dev/null
+++ b/test/functional/tools/collection_creates_dynamic_nested.xml
@@ -0,0 +1,66 @@
+<tool id="collection_creates_dynamic_nested" name="collection_creates_dynamic_nested" version="0.1.0">
+  <command>
+    echo "A" > oe1_ie1.fq ;
+    echo "B" > oe1_ie2.fq ;
+    echo "C" > oe2_ie1.fq ;
+    echo "D" > oe2_ie2.fq ;
+    echo "E" > oe3_ie1.fq ;
+    echo "F" > oe3_ie2.fq
+  </command>
+  <inputs>
+    <param name="foo" type="text" label="Dummy Parameter" />
+  </inputs>
+  <outputs>
+    <collection name="list_output" type="list:list" label="Duplicate List">
+      <!-- Use named regex group to grab pattern
+           <identifier_0>_<identifier_1>.fq. Here identifier_0 is the list
+           identifier of the outer list and identifier_1 is the list identifier
+           of the inner list (for instance oe1_ie2.fq in above example).
+      -->
+      <discover_datasets pattern="(?P<identifier_0>[^_]+)_(?P<identifier_1>[^_]+)\.fq" ext="fastqsanger" visible="true" />
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="foo" value="bar" />
+      <output_collection name="list_output" type="list:list">
+        <element name="oe1">
+          <element name="ie1">
+            <assert_contents>
+              <has_text_matching expression="^A\n$" />
+            </assert_contents>
+          </element>
+          <element name="ie2">
+            <assert_contents>
+              <has_text_matching expression="^B\n$" />
+            </assert_contents>
+          </element>
+        </element>
+        <element name="oe2">
+          <element name="ie1">
+            <assert_contents>
+              <has_text_matching expression="^C\n$" />
+            </assert_contents>
+          </element>
+          <element name="ie2">
+            <assert_contents>
+              <has_text_matching expression="^D\n$" />
+            </assert_contents>
+          </element>
+        </element>
+        <element name="oe3">
+          <element name="ie1">
+            <assert_contents>
+              <has_text_matching expression="^E\n$" />
+            </assert_contents>
+          </element>
+          <element name="ie2">
+            <assert_contents>
+              <has_text_matching expression="^F\n$" />
+            </assert_contents>
+          </element>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_list.xml b/test/functional/tools/collection_creates_list.xml
new file mode 100644
index 0000000..8cb5728
--- /dev/null
+++ b/test/functional/tools/collection_creates_list.xml
@@ -0,0 +1,39 @@
+<tool id="collection_creates_list" name="collection_creates_list" version="0.1.0">
+  <command>
+    #for $key in $list_output.keys()#
+    echo "identifier is $key" > "$list_output[$key]";
+    #end for#
+    echo 'ensure not empty';
+  </command>
+  <inputs>
+    <param name="input1" type="data_collection" collection_type="list" label="Input" help="Input collection..." format="txt" />
+  </inputs>
+  <outputs>
+    <collection name="list_output" type="list" label="Duplicate List" structured_like="input1" inherit_format="true">
+      <!-- inherit_format can be used in conjunction with structured_like
+           to perserve format. -->
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1">
+        <collection type="list">
+          <element name="l11" value="simple_line.txt" />
+          <element name="l12" value="simple_line.txt" />
+        </collection>
+      </param>
+      <output_collection name="list_output" type="list" count="2">
+        <element name="l11">
+          <assert_contents>
+            <has_text_matching expression="^identifier is l11\n$" />
+          </assert_contents>
+        </element>
+        <element name="l12">
+          <assert_contents>
+            <has_text_matching expression="^identifier is l12\n$" />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_list_2.xml b/test/functional/tools/collection_creates_list_2.xml
new file mode 100644
index 0000000..bf5445e
--- /dev/null
+++ b/test/functional/tools/collection_creates_list_2.xml
@@ -0,0 +1,22 @@
+<tool id="collection_creates_list_2" name="collection_creates_list_2" version="0.1.0">
+  <!-- go through and a header to each item in a collection - should use implicit
+       mapping the non-collectiony add header tool to do this in a real analysis.
+  -->
+  <command>
+    #for $key in $list_output.keys()#
+    cat "$header" > "$list_output[$key]";
+    cat "$input_collect[$key]" >> "$list_output[$key]";
+    #end for#
+    echo 'ensure not empty';
+  </command>
+  <inputs>
+    <param name="header" type="data" label="Input Data" help="Input data..." />
+    <param name="input_collect" type="data_collection" collection_type="list" label="Input Collect" help="Input collection..." />
+  </inputs>
+  <outputs>
+    <collection name="list_output" type="list" label="Duplicate List" structured_like="input_collect" format_source="header">
+    </collection>
+  </outputs>
+  <tests>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_list_of_pairs.xml b/test/functional/tools/collection_creates_list_of_pairs.xml
new file mode 100644
index 0000000..3b7b780
--- /dev/null
+++ b/test/functional/tools/collection_creates_list_of_pairs.xml
@@ -0,0 +1,50 @@
+<tool id="collection_creates_list_of_pairs" name="collection_creates_list_or_pairs" version="0.1.0">
+  <!-- You usually wouldn't want to do this - just write the operation for
+       a single dataset and allow the user to map that tool over the whole
+       collection. -->
+  <command>
+    #for $list_key in $list_output.keys()#
+    #for $pair_key in $list_output[$list_key].keys()#
+    echo "identifier is $list_key:$pair_key" > "$list_output[$list_key][$pair_key]";
+    #end for#
+    #end for#
+    echo 'ensure not empty';
+  </command>
+  <inputs>
+    <param name="input1" type="data_collection" collection_type="list:paired" label="Input" help="Input collection..." format="txt" />
+  </inputs>
+  <outputs>
+    <collection name="list_output" type="list:paired" label="Duplicate List" structured_like="input1" inherit_format="true">
+      <!-- inherit_format can be used in conjunction with structured_like
+           to perserve format. -->
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1">
+        <collection type="list:paired">
+          <element name="i1">
+            <collection type="paired">
+              <element name="forward" value="simple_line.txt" />
+              <element name="reverse" value="simple_line_alternative.txt" />
+            </collection>
+          </element>
+        </collection>
+      </param>
+      <output_collection name="list_output" type="list:paired">
+        <element name="i1">
+          <element name="forward">
+            <assert_contents>
+              <has_text_matching expression="^identifier is i1:forward\n$" />
+            </assert_contents>
+          </element>
+          <element name="reverse">
+            <assert_contents>
+              <has_text_matching expression="^identifier is i1:reverse\n$" />
+            </assert_contents>
+          </element>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_pair.xml b/test/functional/tools/collection_creates_pair.xml
new file mode 100644
index 0000000..942a6ee
--- /dev/null
+++ b/test/functional/tools/collection_creates_pair.xml
@@ -0,0 +1,37 @@
+<tool id="collection_creates_pair" name="collection_creates_pair" version="0.1.0">
+  <command>
+    sed 'n;d' $input1 > $forward ;
+    sed -n 'g;n;p' $input1 > "reverse.txt";
+  </command>
+  <inputs>
+    <param name="input1" type="data" label="Input" help="Input to be split." />
+  </inputs>
+  <outputs>
+    <collection name="paired_output" type="paired" label="Split Pair">
+      <!-- command can reference parts directly or find via from_work_dir. -->
+      <data name="forward" format="txt" />
+      <data name="reverse" format_source="input1" from_work_dir="reverse.txt" />
+      <!-- data elements can use format, format_source, metadata_from,
+           from_work_dir. The format="input" idiom is not supported,
+           it should be considered deprecated and format_source is superior.
+      -->
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="simple_lines_interleaved.txt" />
+      <output_collection name="paired_output" type="paired">
+        <element name="forward">
+          <assert_contents>
+            <has_text_matching expression="^This is a line of text.\nThis is a line of text.\n$" />
+          </assert_contents>
+        </element>
+        <element name="reverse">
+          <assert_contents>
+            <has_text_matching expression="^This is a different line of text.\nThis is a different line of text.\n$" />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_creates_pair_from_type.xml b/test/functional/tools/collection_creates_pair_from_type.xml
new file mode 100644
index 0000000..73101c0
--- /dev/null
+++ b/test/functional/tools/collection_creates_pair_from_type.xml
@@ -0,0 +1,35 @@
+<tool id="collection_creates_pair_from_type" name="collection_creates_pair_from_type" version="0.1.0">
+  <command>
+    sed 'n;d' $input1 > $paired_output.forward;
+    sed -n 'g;n;p' $input1 > $paired_output.reverse;
+  </command>
+  <inputs>
+    <param name="input1" type="data" label="Input" help="Input to be split." />
+  </inputs>
+  <outputs>
+    <!-- unlike lists - structure of paired outputs can be predetermined
+         so we can use $output.identifier in command block, see
+         collection_creates_pair.xml for examples of actually labelling
+         pieces and allowing stuff like from_work_dir.
+    -->
+    <collection name="paired_output" type="paired" label="Split Pair" format_source="input1">
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="simple_lines_interleaved.txt" ftype="txt" />
+      <output_collection name="paired_output" type="paired">
+        <element name="forward" ftype="txt">
+          <assert_contents>
+            <has_text_matching expression="^This is a line of text.\nThis is a line of text.\n$" />
+          </assert_contents>
+        </element>
+        <element name="reverse" ftype="txt">
+          <assert_contents>
+            <has_text_matching expression="^This is a different line of text.\nThis is a different line of text.\n$" />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_mixed_param.xml b/test/functional/tools/collection_mixed_param.xml
new file mode 100644
index 0000000..73b3eb8
--- /dev/null
+++ b/test/functional/tools/collection_mixed_param.xml
@@ -0,0 +1,35 @@
+<tool id="collection_mixed_param" name="collection_mixed_param" version="0.1.0">
+  <command>
+    cat #for $f in $f1# ${f} #end for# $f2 >> $out1;
+  </command>
+  <inputs>
+    <param name="f1" type="data_collection" collection_type="paired" label="collection param" />
+    <param name="f2" type="data" format="txt" label="data param" />
+    <conditional name="advanced">
+      <param name="full" type="select" label="Parameter Settings">
+        <option value="no">Use defaults</option>
+        <option value="yes">Full parameter list</option>
+      </param>
+      <when value="yes">
+        <param name="advanced_threshold" type="integer" value="8" />
+      </when>
+      <when value="no">
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="f1">
+        <collection type="paired">
+          <element name="forward" value="simple_line.txt" />
+          <element name="reverse" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <param name="f2" value="simple_lines_both.txt" />
+      <output name="out1" file="simple_lines_interleaved.txt"/>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_nested_test.xml b/test/functional/tools/collection_nested_test.xml
new file mode 100644
index 0000000..fbd90ec
--- /dev/null
+++ b/test/functional/tools/collection_nested_test.xml
@@ -0,0 +1,51 @@
+<tool id="collection_nested_test" name="collection_nested_test" version="0.1.0">
+  <command>
+    echo #for $f in $f1# ${f.is_collection} #end for# >> $out1;
+    cat #for $f in $f1# #if $f.is_collection# #for $inner in $f# ${inner} #end for# #else# $f # #end if# #end for# >> $out2
+  </command>
+  <inputs>
+    <param name="f1" type="data_collection" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+    <data format="txt" name="out2" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="f1">
+        <collection type="list:paired">
+          <element name="i1">
+            <collection type="paired">
+              <element name="forward" value="simple_line.txt" />
+              <element name="reverse" value="simple_line_alternative.txt" />
+            </collection>
+          </element>
+        </collection>
+      </param>
+      <output name="out1">
+        <assert_contents>
+          <has_line line="True" />
+        </assert_contents>
+      </output>
+      <output name="out2">
+        <assert_contents>
+          <has_line line="This is a line of text." />
+          <has_line line="This is a different line of text." />
+        </assert_contents>
+      </output>
+    </test>
+    <test>
+      <param name="f1">
+        <collection type="paired">
+          <element name="forward" value="simple_line.txt" />
+          <element name="reverse" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <output name="out1">
+        <assert_contents>
+          <has_line line="False False" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_optional_param.xml b/test/functional/tools/collection_optional_param.xml
new file mode 100644
index 0000000..2839461
--- /dev/null
+++ b/test/functional/tools/collection_optional_param.xml
@@ -0,0 +1,38 @@
+<tool id="collection_optional_param" name="collection_optional_param" version="0.1.0">
+  <command>
+    #if $f1
+      cat $f1.forward $f1['reverse'] >> $out1;
+    #else
+      echo "No input specified." >> $out1;
+    #end if
+  </command>
+  <inputs>
+    <param name="f1" type="data_collection" collection_type="paired" optional="true" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="f1">
+        <collection type="paired">
+          <element name="forward" value="simple_line.txt" />
+          <element name="reverse" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <output name="out1">
+        <assert_contents>
+          <has_line line="This is a line of text." />
+          <has_line line="This is a different line of text." />
+        </assert_contents>
+      </output>
+    </test>
+    <test>
+      <output name="out1">
+        <assert_contents>
+          <has_line line="No input specified." />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_paired_test.xml b/test/functional/tools/collection_paired_test.xml
new file mode 100644
index 0000000..329a841
--- /dev/null
+++ b/test/functional/tools/collection_paired_test.xml
@@ -0,0 +1,27 @@
+<tool id="collection_paired_test" name="paired_test" version="0.1.0">
+  <command>
+    cat $f1.forward $f1['reverse'] >> $out1;
+  </command>
+  <inputs>
+    <param name="f1" type="data_collection" collection_type="paired" label="Input pair" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="f1">
+        <collection type="paired">
+          <element name="forward" value="simple_line.txt" />
+          <element name="reverse" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <output name="out1">
+        <assert_contents>
+          <has_line line="This is a line of text." />
+          <has_line line="This is a different line of text." />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_split_on_column.xml b/test/functional/tools/collection_split_on_column.xml
new file mode 100644
index 0000000..b446a15
--- /dev/null
+++ b/test/functional/tools/collection_split_on_column.xml
@@ -0,0 +1,30 @@
+<tool id="collection_split_on_column" name="collection_split_on_column" version="0.1.0">
+  <command>
+    mkdir outputs; cd outputs; awk '{ print \$2 > \$1 ".tabular" }' $input1
+  </command>
+  <inputs>
+    <param name="input1" type="data" label="Input Table" help="Table to split on first column" format="tabular" />
+  </inputs>
+  <outputs>
+    <collection name="split_output" type="list" label="Table split on first column">
+      <discover_datasets pattern="__name_and_ext__" directory="outputs" />
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="tinywga.fam" />
+      <output_collection name="split_output" type="list">
+        <element name="101">
+          <assert_contents>
+            <has_text_matching expression="^1\n2\n3\n$" />
+          </assert_contents>
+        </element>
+        <element name="1334">
+          <assert_contents>
+            <has_text_matching expression="^1\n10\n11\n12\n13\n2\n$" />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_two_paired.xml b/test/functional/tools/collection_two_paired.xml
new file mode 100644
index 0000000..af4c607
--- /dev/null
+++ b/test/functional/tools/collection_two_paired.xml
@@ -0,0 +1,69 @@
+<tool id="collection_two_paired" name="collection_two_paired" version="0.1.0">
+  <command>
+    #if $kind.collection_type == "paired"
+      cat $kind.f1.forward $kind.f1['reverse'] >> $out1;
+      cat $kind.f2.forward $kind.f2['reverse'] >> $out1;
+    #else
+      #for $i, $_ in enumerate($kind.f1):
+        cat $kind.f1[$i] $kind.f2[$i] >> $out1;
+      #end for
+    #end if
+  </command>
+  <inputs>
+    <conditional name="kind">
+      <param type="select" name="collection_type">
+        <option value="paired">Paired Datasets</option>
+        <option value="list">List of Datasets</option>
+      </param>
+      <when value="paired">
+        <param name="f1" type="data_collection" collection_type="paired" label="F1" />
+        <param name="f2" type="data_collection" collection_type="paired" label="F2" />
+      </when>
+      <when value="list">
+        <param name="f1" type="data_collection" collection_type="list" label="F1" />
+        <param name="f2" type="data_collection" collection_type="list" label="F2" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+  </outputs>
+  <tests>
+    <test>
+      <conditional name="kind">
+        <param name="collection_type" value="paired" />
+        <param name="f1">
+          <collection type="paired">
+            <element name="forward" value="simple_line.txt" />
+            <element name="reverse" value="simple_line_alternative.txt" />
+          </collection>
+        </param>
+        <param name="f2">
+          <collection type="paired">
+            <element name="forward" value="simple_line.txt" />
+            <element name="reverse" value="simple_line_alternative.txt" />
+          </collection>
+        </param>
+      </conditional>
+      <output name="out1" file="simple_lines_interleaved.txt"/>
+    </test>
+    <test>
+      <conditional name="kind">
+        <param name="collection_type" value="list" />
+        <param name="f1">
+          <collection type="list">
+            <element name="l11" value="simple_line.txt" />
+            <element name="l12" value="simple_line.txt" />
+          </collection>
+        </param>
+        <param name="f2">
+          <collection type="list">
+            <element name="l21" value="simple_line_alternative.txt" />
+            <element name="l22" value="simple_line_alternative.txt" />
+          </collection>
+        </param>
+      </conditional>
+      <output name="out1" file="simple_lines_interleaved.txt"/>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/collection_type_source.xml b/test/functional/tools/collection_type_source.xml
new file mode 100644
index 0000000..93aa654
--- /dev/null
+++ b/test/functional/tools/collection_type_source.xml
@@ -0,0 +1,62 @@
+<tool id="collection_type_source" name="collection_type_source" version="0.1.0">
+  <command>
+    mkdir output;
+    #for $key in $input_collect.keys()#
+    cat "$header" > output/"$key";
+    cat "$input_collect[$key]" >> output/"$key";
+    #end for#
+  </command>
+  <inputs>
+    <param name="header" type="data" label="Input Data" help="Input data..." />
+    <param name="input_collect" type="data_collection" label="Input Collect" help="Input collection..." />
+  </inputs>
+  <outputs>
+    <collection name="list_output" type_source="input_collect" label="Duplicate List" format_source="header">
+      <discover_datasets pattern="__name__" directory="output" visible="true" />
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="header" value="simple_line.txt" />
+      <param name="input_collect">
+        <collection type="list">
+          <element name="samp1" value="simple_line.txt" />
+          <element name="samp2" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <output_collection name="list_output" type="list">
+        <element name="samp1">
+            <assert_contents>
+              <has_text_matching expression="This is a line of text.\nThis is a line of text." />
+            </assert_contents>
+        </element>
+        <element name="samp2">
+          <assert_contents>
+            <has_text_matching expression="This is a line of text.\nThis is a different line of text." />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+    <test>
+      <param name="header" value="simple_line.txt" />
+      <param name="input_collect">
+        <collection type="paired">
+          <element name="forward" value="simple_line.txt" />
+          <element name="reverse" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <output_collection name="list_output" type="paired">
+        <element name="forward">
+            <assert_contents>
+              <has_text_matching expression="This is a line of text.\nThis is a line of text." />
+            </assert_contents>
+        </element>
+        <element name="reverse">
+          <assert_contents>
+            <has_text_matching expression="This is a line of text.\nThis is a different line of text." />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/color_param.xml b/test/functional/tools/color_param.xml
new file mode 100644
index 0000000..fb4d016
--- /dev/null
+++ b/test/functional/tools/color_param.xml
@@ -0,0 +1,30 @@
+<tool id="color_param" name="color_param" version="1.0.0">
+    <command>
+        echo "$color_default"  > $out_file1;
+        echo "$color_rgb"      > $out_file2;
+    </command>
+    <inputs>
+        <param name="color_default" type="color" value="#aabbcc" />
+        <param name="color_rgb" type="color" value="#aabbcc" rgb="true" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+        <data name="out_file2" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="color_default" value="#aaaaaa" />
+            <param name="color_rgb" value="#aaaaaa" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="#aaaaaa" />
+                </assert_contents>
+            </output>
+            <output name="out_file2">
+                <assert_contents>
+                    <has_line line="(170, 170, 170)" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/column_multi_param.xml b/test/functional/tools/column_multi_param.xml
new file mode 100644
index 0000000..cc368c5
--- /dev/null
+++ b/test/functional/tools/column_multi_param.xml
@@ -0,0 +1,25 @@
+<tool id="column_multi_param" name="Column Param Multi">
+  <command>
+    #for $input in $input1#
+      cut -f '$col' '$input' >> 'col_output';
+    #end for#
+  </command>
+  <inputs>
+    <param type="data" format="tabular" name="input1" label="Input 1" multiple="true" />
+    <param name="col" type="data_column" data_ref="input1" label="Column to Use" />
+  </inputs>
+  <outputs>
+    <data name="output1" format="tabular" from_work_dir="col_output" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="2.tabular,2.tabular" />
+      <param name="col" value="2" />
+      <output name="outpu1">
+        <assert_contents>
+          <has_line line="68" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/column_param.xml b/test/functional/tools/column_param.xml
new file mode 100644
index 0000000..c0eea67
--- /dev/null
+++ b/test/functional/tools/column_param.xml
@@ -0,0 +1,23 @@
+<tool id="column_param" name="Column Param">
+  <command>
+    cut -f '$col' '$input1' > 'col_output'
+  </command>
+  <inputs>
+    <param type="data" format="tabular" name="input1" label="Input 1" />
+    <param name="col" type="data_column" data_ref="input1" label="Column to Use" />
+  </inputs>
+  <outputs>
+    <data name="output1" format="tabular" from_work_dir="col_output" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="2.tabular" />
+      <param name="col" value="2" />
+      <output name="output1">
+        <assert_contents>
+          <has_line line="68" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/composite.xml b/test/functional/tools/composite.xml
new file mode 100644
index 0000000..8f8edbe
--- /dev/null
+++ b/test/functional/tools/composite.xml
@@ -0,0 +1,20 @@
+<tool id="composite" name="composite" version="1.0.0">
+  <command>cat '$input.extra_files_path/Sequences' > $output</command>
+  <inputs>
+    <param name="input" type="data" format="velvet" label="Velvet Dataset" help="Prepared by velveth."/>
+  </inputs>
+  <outputs>
+    <data format="txt" name="output" label="${tool.name} on ${on_string}: LastGraph">
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="velveth_test1/output.html" ftype="velvet" >
+        <composite_data value='velveth_test1/Sequences' ftype="Sequences"/>
+        <composite_data value='velveth_test1/Roadmaps' ftype="Roadmaps"/>
+        <composite_data value='velveth_test1/Log'/>
+      </param>
+      <output name="unused_reads_fasta" file="velveth_test1/Sequences" compare="diff"/>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/composite_output.xml b/test/functional/tools/composite_output.xml
new file mode 100644
index 0000000..6bb1bf1
--- /dev/null
+++ b/test/functional/tools/composite_output.xml
@@ -0,0 +1,26 @@
+<tool id="composite_output" name="composite_output" version="1.0.0">
+  <command>mkdir $output.extra_files_path; cp $input.extra_files_path/* $output.extra_files_path; cp $input.extra_files_path/Log $output.extra_files_path/second_log; mkdir $output.extra_files_path/nested; cp $input.extra_files_path/Log $output.extra_files_path/nested/nested_log</command>
+  <inputs>
+    <param name="input" type="data" format="velvet" label="Velvet Dataset" help="Prepared by velveth."/>
+  </inputs>
+  <outputs>
+    <data format="velvet" name="output" label="">
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="velveth_test1/output.html" ftype="velvet" >
+        <composite_data value='velveth_test1/Sequences' ftype="Sequences"/>
+        <composite_data value='velveth_test1/Roadmaps' ftype="Roadmaps"/>
+        <composite_data value='velveth_test1/Log'/>
+      </param>
+      <output name="output" file="velveth_test1/output.html">
+        <extra_files type="file" name="Sequences" value="velveth_test1/Sequences" />
+        <extra_files type="file" name="Roadmaps" value="velveth_test1/Roadmaps" />
+        <extra_files type="file" name="Log" value="composite_output_expected_log" />
+        <extra_files type="file" name="second_log" value="composite_output_expected_log" />
+        <extra_files type="file" name="nested/nested_log" value="composite_output_expected_log" />
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/composite_output_tests.xml b/test/functional/tools/composite_output_tests.xml
new file mode 100644
index 0000000..d12a5be
--- /dev/null
+++ b/test/functional/tools/composite_output_tests.xml
@@ -0,0 +1,29 @@
+<tool id="composite_output_tests" name="composite_output_tests" version="1.0.0">
+  <command>
+    mkdir $output.extra_files_path;
+    cp $input.extra_files_path/* $output.extra_files_path;
+    echo "1 2 3" > $output.extra_files_path/md5out;
+  </command>
+  <inputs>
+    <param name="input" type="data" format="velvet" label="Velvet Dataset" help="Prepared by velveth."/>
+  </inputs>
+  <outputs>
+    <data format="velvet" name="output" label="">
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="velveth_test1/output.html" ftype="velvet" >
+        <composite_data value='velveth_test1/Sequences' ftype="Sequences"/>
+        <composite_data value='velveth_test1/Roadmaps' ftype="Roadmaps"/>
+        <composite_data value='velveth_test1/Log'/>
+      </param>
+      <output name="output" file="velveth_test1/output.html">
+        <extra_files type="file" name="Sequences" value="velveth_test1/Sequences" />
+        <extra_files type="file" name="Roadmaps" value="velveth_test1/Roadmaps" />
+        <extra_files type="file" name="Log" value="composite_output_expected_log" />
+        <extra_files type="file" name="md5out" md5="f2b33fb7b3d0eb95090a16060e6a24f9" /><!-- md5sum or "1 2 3" -->
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/create_10.xml b/test/functional/tools/create_10.xml
new file mode 100644
index 0000000..af87225
--- /dev/null
+++ b/test/functional/tools/create_10.xml
@@ -0,0 +1,35 @@
+<tool id="create_10" name="Create 10">
+    <description>create 10</description>
+    <command>
+        echo "1" > 1;
+        echo "2" > 2;
+        echo "3" > 3;
+        echo "4" > 4;
+        echo "5" > 5;
+        echo "6" > 6;
+        echo "7" > 7;
+        echo "8" > 8;
+        echo "9" > 9;
+        echo "10" > 10;
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset"/>
+        <param name="input2" type="data" label="Concatenate Dataset"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" from_work_dir="1" />
+        <data name="out_file2" format="txt" from_work_dir="2" />
+        <data name="out_file3" format="txt" from_work_dir="3" />
+        <data name="out_file4" format="txt" from_work_dir="4" />
+        <data name="out_file5" format="txt" from_work_dir="5" />
+        <data name="out_file6" format="txt" from_work_dir="6" />
+        <data name="out_file7" format="txt" from_work_dir="7" />
+        <data name="out_file8" format="txt" from_work_dir="8" />
+        <data name="out_file9" format="txt" from_work_dir="9" />
+        <data name="out_file10" format="txt" from_work_dir="10" />
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/data_manager.xml b/test/functional/tools/data_manager.xml
new file mode 100644
index 0000000..3326148
--- /dev/null
+++ b/test/functional/tools/data_manager.xml
@@ -0,0 +1,16 @@
+<tool id="data_manager" name="Test Data Manager" tool_type="manage_data" version="0.0.1">
+    <configfiles>
+        <configfile name="static_test_data">{"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}}</configfile>
+    </configfiles>
+    <command>
+        mkdir $out_file.files_path ;
+        echo "A new value" > $out_file.files_path/newvalue.txt;
+        cp $static_test_data $out_file
+    </command>
+    <inputs>
+        <param type="text" name="ignored_value" value="" label="Ignored" />
+    </inputs>
+    <outputs>
+        <data name="out_file" format="data_manager_json"/>
+    </outputs>    
+</tool>
diff --git a/test/functional/tools/dbkey_filter_input.xml b/test/functional/tools/dbkey_filter_input.xml
new file mode 100644
index 0000000..a4f0bdc
--- /dev/null
+++ b/test/functional/tools/dbkey_filter_input.xml
@@ -0,0 +1,39 @@
+<tool id="dbkey_filter_input" name="dbkey_filter_input" version="0.1.0">
+    <description>Filter (single) input on a dbkey</description>
+    <command>
+        cat $inputs > $output
+    </command>
+    <inputs>
+        <param format="txt" name="inputs" type="data" label="Inputs" help="" />
+        <param name="index" type="select" label="Using reference genome">
+          <options from_data_table="test_fasta_indexes">
+            <filter type="data_meta" ref="inputs" key="dbkey" column="1" />
+            <validator type="no_options" message="No reference genome is available for the build associated with the selected input dataset" />
+          </options>
+        </param>
+    </inputs>
+
+    <outputs>
+        <data format="txt" name="output" />
+    </outputs>
+
+    <tests>
+        <!-- can choose a dbkey if it matches input -->
+        <test>
+            <param name="inputs" value="simple_line.txt" dbkey="hg19" />
+            <param name="index" value="hg19" />
+            <output name="output" file="simple_line.txt"/>
+        </test>
+        <!-- cannot pick index otherwise -->
+        <!-- Does this make sense - if no dbkey is defined there is no option
+             available? -->
+        <test expect_failure="true">
+            <param name="inputs" value="simple_line.txt" />
+            <param name="index" value="hg18" />
+            <output name="output" file="simple_line.txt"/>
+        </test>
+    </tests>
+
+    <help>
+    </help>
+</tool>
\ No newline at end of file
diff --git a/test/functional/tools/dbkey_filter_multi_input.xml b/test/functional/tools/dbkey_filter_multi_input.xml
new file mode 100644
index 0000000..7ecaeaa
--- /dev/null
+++ b/test/functional/tools/dbkey_filter_multi_input.xml
@@ -0,0 +1,34 @@
+<tool id="dbkey_filter_multi_input" name="dbkey_filter_multi_input" version="0.1.0">
+    <description>Filter select on dbkey of multiple inputs</description>
+    <command><![CDATA[
+        #for $input in $inputs#
+        cat $input >> $output;
+        #end for#
+    ]]>
+    </command>
+    <inputs>
+        <param format="txt" name="inputs" type="data" label="Inputs" multiple="true" help="" />
+        <param name="index" type="select" label="Using reference genome">
+          <options from_data_table="test_fasta_indexes">
+            <filter type="data_meta" ref="inputs" key="dbkey" column="1" />
+            <validator type="no_options" message="No reference genome is available for the build associated with the selected input dataset" />
+          </options>
+        </param>
+    </inputs>
+
+    <outputs>
+        <data format="txt" name="output" />
+    </outputs>
+
+    <tests>
+        <!-- can choose a dbkey if it matches input -->
+        <test>
+            <param name="inputs" value="simple_line.txt,simple_line.txt" dbkey="hg19" />
+            <param name="index" value="hg19" />
+            <output name="output" file="simple_line_x2.txt"/>
+        </test>
+    </tests>
+
+    <help>
+    </help>
+</tool>
\ No newline at end of file
diff --git a/test/functional/tools/dbkey_output_action.xml b/test/functional/tools/dbkey_output_action.xml
new file mode 100644
index 0000000..9d10010
--- /dev/null
+++ b/test/functional/tools/dbkey_output_action.xml
@@ -0,0 +1,36 @@
+<tool id="dbkey_output_action" name="dbkey_output_action" version="0.1.0">
+    <command>echo foo > $mapped_reads</command>
+    <inputs>
+        <param name="input" type="data" />
+        <param name="index" type="select" label="Using reference genome">
+          <options from_data_table="test_fasta_indexes">
+            <filter type="data_meta" ref="input" key="dbkey" column="1" />
+            <validator type="no_options" message="No reference genome is available for the build associated with the selected input dataset" />
+          </options>
+        </param>
+    </inputs>
+    <outputs>
+        <data format="txt" name="mapped_reads">
+            <actions>
+                <action type="metadata" name="dbkey">
+                    <option type="from_data_table" name="test_fasta_indexes" column="1" offset="0">
+                        <filter type="param_value" column="0" value="#" compare="startswith" keep="False"/>
+                        <filter type="param_value" ref="index" column="0"/>
+                    </option>
+                </action>
+            </actions>
+        </data>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt" dbkey="hg18" />
+            <param name="index" value="hg18"/>
+            <output name="mapped_reads">
+                <metadata name="dbkey" value="hg18" />
+                <assert_contents>
+                    <has_text text="foo" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
\ No newline at end of file
diff --git a/test/functional/tools/detect_errors_aggressive.xml b/test/functional/tools/detect_errors_aggressive.xml
new file mode 100644
index 0000000..3ece4c2
--- /dev/null
+++ b/test/functional/tools/detect_errors_aggressive.xml
@@ -0,0 +1,51 @@
+<tool id="detect_errors_aggressive" name="detect_errors_aggressive" version="1.0.0">
+    <command detect_errors="aggressive">
+        #if $error_bool
+            echo "ERROR: Problem...."
+        #elif $exception_bool
+            echo "Exception: Problem..."
+        #else
+            echo "Everything is OK."
+        #end if
+        ; sh -c "exit $exit_code"
+
+    </command>
+    <inputs>
+        <param name="error_bool" type="boolean" label="error bool" />
+        <param name="exception_bool" type="boolean" label="exception bool" checked="false" />
+        <param name="exit_code" type="integer" value="0" label="exit code"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_exit_code="0" expect_failure="false">
+            <param name="error_bool" value="false" />
+            <param name="exception_bool" value="false" />
+            <param name="exit_code" value="0" />
+            <assert_stdout>
+                <has_line line="Everything is OK." />
+            </assert_stdout>
+        </test>
+        <test expect_exit_code="1" expect_failure="true">
+            <param name="error_bool" value="false" />
+            <param name="exception_bool" value="false" />
+            <param name="exit_code" value="1" />
+            <assert_stdout>
+                <has_line line="Everything is OK." />
+            </assert_stdout>
+        </test>
+        <test expect_exit_code="0" expect_failure="true">
+            <param name="error_bool" value="true" />
+            <param name="exception_bool" value="false" />
+            <param name="exit_code" value="0" />
+        </test>
+        <test expect_exit_code="0" expect_failure="true">
+            <param name="error_bool" value="false" />
+            <param name="exception_bool" value="true" />
+            <param name="exit_code" value="0" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/disambiguate_cond.xml b/test/functional/tools/disambiguate_cond.xml
new file mode 100644
index 0000000..2465721
--- /dev/null
+++ b/test/functional/tools/disambiguate_cond.xml
@@ -0,0 +1,96 @@
+<tool id="disambiguate_cond" name="disambiguate_cond">
+    <command>
+        echo "$p1.p1v $p2.p2v $p3.p3v" > $out_file1; cat "$files.p4.file" >> $out_file1;
+    </command>
+    <inputs>
+        <conditional name="p1">
+            <param type="boolean" name="use" />
+            <when value="true">
+                <param name="p1v" value="4" type="integer" />
+            </when>
+            <when value="false">
+                <param name="p1v" value="7" type="integer" />
+            </when>
+        </conditional>
+        <conditional name="p2">
+            <param type="boolean" name="use" />
+            <when value="true">
+                <param name="p2v" value="4" type="integer" />
+            </when>
+            <when value="false">
+                <param name="p2v" value="7" type="integer" />
+            </when>            
+        </conditional>
+        <conditional name="p3">
+            <param type="boolean" name="use" />
+            <when value="true">
+                <param name="p3v" value="4" type="integer" />
+            </when>
+            <when value="false">
+                <param name="p3v" value="7" type="integer" />
+            </when>            
+        </conditional>
+        <conditional name="files">
+            <param name="attach_files" type="boolean" checked="true" />
+            <when value="true">
+                <conditional name="p4">
+                    <param type="boolean" name="use" />
+                    <when value="true">
+                        <param type="data" name="file" />
+                    </when>
+                    <when value="false" />
+                </conditional>
+            </when>
+            <when value="false" />
+        </conditional>               
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <!-- Can use nested conditional blocks as shown below to disambiguate
+             various nested parameters. -->
+        <test>
+            <conditional name="p1">
+                <param name="use" value="False"/>
+            </conditional>
+            <conditional name="p2">
+                <param name="use" value="True"/>
+            </conditional>
+            <conditional name="p3">
+                <param name="use" value="False"/>
+            </conditional>            
+            <conditional name="files">
+                <param name="attach_files" value="True" />
+                <conditional name="p4">
+                    <param name="use" value="True"/>
+                    <param name="file" value="simple_line_alternative.txt" />
+                </conditional>
+            </conditional>
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="7 4 7" />
+                    <has_line line="This is a different line of text." />
+                </assert_contents>
+            </output>
+        </test>
+        <!-- Can also use a more concise, flat verison of this, but it the
+             above version is more clear and should be considered preferable.
+        -->
+        <test>
+            <param name="p1|use" value="True"/>
+            <param name="p2|use" value="False"/>
+            <param name="p3|use" value="True"/>
+            <param name="p4|use" value="True" />
+            <!-- Only need to specify enough of a suffix to disambiguate,
+                 but don't do this - it is too clever. -->
+            <param name="p4|file" value="simple_line.txt" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="4 7 4" />
+                    <has_line line="This is a line of text." />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/disambiguate_repeats.xml b/test/functional/tools/disambiguate_repeats.xml
new file mode 100644
index 0000000..7537a1f
--- /dev/null
+++ b/test/functional/tools/disambiguate_repeats.xml
@@ -0,0 +1,63 @@
+<tool id="disambiguate_repeats" name="disambiguate_repeats">
+    <command>
+        cat #for $q in $queries# ${q.input} #end for# #for $q in $more_queries# ${q.input} #end for# > $out_file1
+    </command>
+    <inputs>
+        <repeat name="queries" title="Dataset">
+            <param name="input" type="data" label="Select" />
+        </repeat>
+        <repeat name="more_queries" title="Dataset">
+            <param name="input" type="data" label="Select" />
+        </repeat>        
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <!-- Can disambiguate repeats and specify multiple blocks using,
+             nested structure. -->
+        <test>
+            <repeat name="queries">
+                <param name="input" value="simple_line.txt"/>
+            </repeat>
+            <repeat name="more_queries">
+                <param name="input" value="simple_line_alternative.txt"/>
+            </repeat>
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="This is a line of text." />
+                    <has_line line="This is a different line of text." />
+                </assert_contents>
+            </output>
+        </test>
+        <!-- Multiple such blocks can be specified but only with newer API
+             driven tests. -->
+        <test>
+            <repeat name="queries">
+                <param name="input" value="simple_line.txt"/>
+            </repeat>
+            <repeat name="queries">
+                <param name="input" value="simple_line_alternative.txt"/>
+            </repeat>
+            <repeat name="more_queries">
+                <param name="input" value="simple_line.txt"/>
+            </repeat>
+            <repeat name="more_queries">
+                <param name="input" value="simple_line_alternative.txt"/>
+            </repeat>
+            <output name="out_file1" file="simple_lines_interleaved.txt"/>
+        </test>
+        <!-- Can also use prefixes to disambiguate inputs or force order, but
+             the above nested structure is preferable. -->
+        <test>
+            <param name="queries_1|input" value="simple_line_alternative.txt"/>
+            <param name="queries_0|input" value="simple_line.txt"/>
+
+            <param name="more_queries_1|input" value="simple_line_alternative.txt" />
+            <param name="more_queries_0|input" value="simple_line.txt"/>
+
+            <output name="out_file1" file="simple_lines_interleaved.txt"/>
+        </test>
+
+    </tests>
+</tool>
diff --git a/test/functional/tools/empty_output.xml b/test/functional/tools/empty_output.xml
new file mode 100644
index 0000000..71caf2b
--- /dev/null
+++ b/test/functional/tools/empty_output.xml
@@ -0,0 +1,12 @@
+<tool id="empty_output" name="empty_output" version="1.0.0">
+    <command>
+        touch $out_file1;
+    </command>
+    <inputs>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+    </tests>
+</tool>
diff --git a/test/functional/tools/environment_variables.xml b/test/functional/tools/environment_variables.xml
new file mode 100644
index 0000000..5834bdb
--- /dev/null
+++ b/test/functional/tools/environment_variables.xml
@@ -0,0 +1,34 @@
+<tool id="environment_variables" name="environment_variables" version="1.0.0">
+    <environment_variables>
+        <environment_variable name="INTVAR">$inttest</environment_variable>
+        <environment_variable name="FORTEST">#for i in ['m', 'o', 'o']#$i#end for#</environment_variable>
+        <environment_variable name="IFTEST">#if int($inttest) == 3
+ISTHREE
+#else#
+NOTTHREE
+#end if#</environment_variable>
+    </environment_variables>
+    <command>
+        echo "\$INTVAR"  >  $out_file1;
+        echo "\$FORTEST" >> $out_file1;
+        echo "\$IFTEST"  >> $out_file1;
+    </command>
+    <inputs>
+        <param name="inttest" value="1" type="integer" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="inttest" value="2" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="2" />
+                    <has_line line="moo" />
+                    <has_line line="NOTTHREE" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/exit_code_from_file.xml b/test/functional/tools/exit_code_from_file.xml
new file mode 100644
index 0000000..5d66d96
--- /dev/null
+++ b/test/functional/tools/exit_code_from_file.xml
@@ -0,0 +1,13 @@
+<tool id="exit_code_from_file" name="exit_code_from_file">
+    <command detect_errors="exit_code">
+        sh -c "exit `cat $input`"
+    </command>
+    <inputs>
+        <param name="input" type="data" label="Exit code file" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/1.bam b/test/functional/tools/for_workflows/1.bam
new file mode 120000
index 0000000..27a1dd5
--- /dev/null
+++ b/test/functional/tools/for_workflows/1.bam
@@ -0,0 +1 @@
+../../../../test-data/1.bam
\ No newline at end of file
diff --git a/test/functional/tools/for_workflows/cat.xml b/test/functional/tools/for_workflows/cat.xml
new file mode 100644
index 0000000..19016e3
--- /dev/null
+++ b/test/functional/tools/for_workflows/cat.xml
@@ -0,0 +1,19 @@
+<tool id="cat" name="Concatenate datasets (for test workflows)">
+    <description>tail-to-head</description>
+    <command>
+        cat $input1 #for $q in $queries# ${q.input2} #end for# > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset"/>
+        <repeat name="queries" title="Dataset">
+            <param name="input2" type="data" label="Select" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/cat_collection.xml b/test/functional/tools/for_workflows/cat_collection.xml
new file mode 100644
index 0000000..3690db7
--- /dev/null
+++ b/test/functional/tools/for_workflows/cat_collection.xml
@@ -0,0 +1,16 @@
+<tool id="cat_collection" name="Concatenate dataset collection (for test workflows)">
+    <description>tail-to-head</description>
+    <command>
+        cat #for $q in $input1# $q #end for# > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data_collection" label="Concatenate Dataset" collection_type="paired" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" />
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/cat_interleave.xml b/test/functional/tools/for_workflows/cat_interleave.xml
new file mode 100644
index 0000000..61d8d1a
--- /dev/null
+++ b/test/functional/tools/for_workflows/cat_interleave.xml
@@ -0,0 +1,18 @@
+<tool id="cat_interleave" name="Interleave two inputs (for test workflows)">
+    <command>
+        cat $input1 $input2 > $out_file1;
+        cat $input2 $input1 > $out_file2;
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Input 1"/>
+        <param name="input2" type="data" label="Input 2"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+        <data name="out_file2" format="input" metadata_source="input2"/>
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/cat_list.xml b/test/functional/tools/for_workflows/cat_list.xml
new file mode 100644
index 0000000..fd761b7
--- /dev/null
+++ b/test/functional/tools/for_workflows/cat_list.xml
@@ -0,0 +1,16 @@
+<tool id="cat_list" name="Concatenate multiple datasets (for test workflows)">
+    <description>tail-to-head</description>
+    <command>
+        cat #for $q in $input1# $q #end for# > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset" multiple="true" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/create_input_collection.xml b/test/functional/tools/for_workflows/create_input_collection.xml
new file mode 100644
index 0000000..efe80fb
--- /dev/null
+++ b/test/functional/tools/for_workflows/create_input_collection.xml
@@ -0,0 +1,40 @@
+<tool id="create_input_collection" name="create_input_collection" version="0.1.0">
+  <description>This tool is used to create a collection of text files.</description>
+  <command detect_errors="exit_code">
+    mkdir outputs; cd outputs; python $script
+  </command>
+  <configfiles>
+    <configfile name="script">
+for i in range($collection_size):
+    template = "File number %s\n"
+    contents = template % i
+    with open(str(i), "w") as f:
+        f.write(contents)
+</configfile>
+  </configfiles>
+  <inputs>
+    <param name="collection_size" type="integer" format="txt" label="Collection Size" value="1" />
+  </inputs>
+  <outputs>
+    <collection name="output" type="list" label="lines">
+      <discover_datasets pattern="__name__" directory="outputs" format="txt" />
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="collection_size" value="2" />
+      <output_collection name="output" type="list" count="2">
+        <element name="0">
+          <assert_contents>
+            <has_line line="File number 0" />
+          </assert_contents>
+        </element>
+        <element name="1">
+          <assert_contents>
+            <has_line line="File number 1" />
+          </assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/for_workflows/head.xml b/test/functional/tools/for_workflows/head.xml
new file mode 100644
index 0000000..536152d
--- /dev/null
+++ b/test/functional/tools/for_workflows/head.xml
@@ -0,0 +1,13 @@
+<tool id="head" name="Select first">
+  <description>lines from a dataset</description>
+  <command>head -n $lineNum $input > $out_file1</command>
+  <inputs>
+    <param name="lineNum" size="5" type="integer" value="10" label="Select first" help="lines"/>
+    <param format="txt" name="input" type="data" label="from"/>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <help>
+  </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/mapper.xml b/test/functional/tools/for_workflows/mapper.xml
new file mode 100644
index 0000000..505e517
--- /dev/null
+++ b/test/functional/tools/for_workflows/mapper.xml
@@ -0,0 +1,16 @@
+<tool id="mapper" name="Mapper" version="0.1.0">
+    <command>
+        cp $__tool_directory__/1.bam $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" format="fastq" label="Fastq Input"/>
+        <param name="reference" type="data" format="fasta" label="Fasta Input"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="bam" />
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/pileup.xml b/test/functional/tools/for_workflows/pileup.xml
new file mode 100644
index 0000000..df6c6af
--- /dev/null
+++ b/test/functional/tools/for_workflows/pileup.xml
@@ -0,0 +1,18 @@
+<tool id="pileup" name="Pileup" version="0.1.0">
+    <command>
+        printf "Summary" > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" format="bam" multiple="true" label="BAM Inputs" min="1">
+            <validator check="bam_index" message="Metadata missing, click the pencil icon in the history item and use the auto-detect feature to correct this issue." type="metadata" />
+        </param>
+        <param name="reference" type="data" format="fasta" label="Fasta Input"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/for_workflows/split.xml b/test/functional/tools/for_workflows/split.xml
new file mode 100644
index 0000000..def9c17
--- /dev/null
+++ b/test/functional/tools/for_workflows/split.xml
@@ -0,0 +1,33 @@
+<tool id="split" name="split" version="0.1.0">
+  <command detect_errors="exit_code">
+    bash $script
+  </command>
+  <configfiles>
+    <configfile name="script">
+      mkdir outputs;
+      cd outputs;
+      i=1;
+      while read -r line || [[ -n "\$line" ]]; do
+        printf "\$line\n" > \$i ;
+        i=\$[\$i +1];
+      done < "$input1";
+    </configfile>
+  </configfiles>
+  <inputs>
+    <param name="input1" type="data" format="txt" label="Input Text" />
+  </inputs>
+  <outputs>
+    <collection name="output" type="list" label="lines">
+      <discover_datasets pattern="__name__" directory="outputs" />
+    </collection>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="simple_lines_both.txt" />
+      <output_collection name="output" type="list">
+        <element name="1" file="simple_line.txt" />
+        <element name="2" file="simple_line_alternative.txt" />
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/gzipped_inputs.xml b/test/functional/tools/gzipped_inputs.xml
new file mode 100644
index 0000000..5b73d34
--- /dev/null
+++ b/test/functional/tools/gzipped_inputs.xml
@@ -0,0 +1,19 @@
+<tool id="gzipped_inputs" name="Echo Dataset">
+    <command>
+        cat $input1 > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="4.bed.gz" />
+            <output name="out_file1" file="4.bed" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/identifier_collection.xml b/test/functional/tools/identifier_collection.xml
new file mode 100644
index 0000000..e5f0a3e
--- /dev/null
+++ b/test/functional/tools/identifier_collection.xml
@@ -0,0 +1,15 @@
+<tool id="identifier_collection" name="identifier_collection">
+  <command>
+    #for $input in $input1:
+      echo '$input.element_identifier' >> 'output1';
+    #end for
+  </command>
+  <inputs>
+    <param type="data_collection" collection_type="list" name="input1" label="Input 1" />
+  </inputs>
+  <outputs>
+    <data name="output1" format="tabular" from_work_dir="output1" />
+  </outputs>
+  <tests>
+  </tests>
+</tool>
diff --git a/test/functional/tools/identifier_multiple.xml b/test/functional/tools/identifier_multiple.xml
new file mode 100644
index 0000000..4fdac74
--- /dev/null
+++ b/test/functional/tools/identifier_multiple.xml
@@ -0,0 +1,15 @@
+<tool id="identifier_multiple" name="identifier_multiple">
+  <command>
+    #for $input in $input1#
+    echo '$input.element_identifier' >> 'output1';
+    #end for#
+  </command>
+  <inputs>
+    <param type="data" name="input1" label="Input 1" multiple="true" />
+  </inputs>
+  <outputs>
+    <data name="output1" format="tabular" from_work_dir="output1" />
+  </outputs>
+  <tests>
+  </tests>
+</tool>
diff --git a/test/functional/tools/identifier_single.xml b/test/functional/tools/identifier_single.xml
new file mode 100644
index 0000000..992873b
--- /dev/null
+++ b/test/functional/tools/identifier_single.xml
@@ -0,0 +1,13 @@
+<tool id="identifier_single" name="identifier_single">
+  <command>
+    echo '$input1.element_identifier' > 'output1'
+  </command>
+  <inputs>
+    <param type="data" name="input1" label="Input 1" />
+  </inputs>
+  <outputs>
+    <data name="output1" format="tabular" from_work_dir="output1" />
+  </outputs>
+  <tests>
+  </tests>
+</tool>
diff --git a/test/functional/tools/implicit_default_conds.xml b/test/functional/tools/implicit_default_conds.xml
new file mode 100644
index 0000000..0a5fe3b
--- /dev/null
+++ b/test/functional/tools/implicit_default_conds.xml
@@ -0,0 +1,49 @@
+<tool id="implicit_default_conds" name="implicit_default_conds">
+    <command>
+        echo "$param_group[0].p1.val"  >> $out_file1;
+        echo "$param_group[0].p2.val"  >> $out_file1;
+    </command>
+    <inputs>
+        <repeat name="param_group" title="Param Group" min="1">
+            <conditional name="p1">
+                <param name="type" type="select">
+                    <option value="default">THE DEFAULT</option>
+                    <option value="different">A different value</option>
+                </param>
+                <when value="default">
+                    <param name="val" value="p1default" type="text" />
+                </when>
+                <when value="different">
+                    <param name="val" value="p1different" type="text" />
+                </when>
+            </conditional>
+            <conditional name="p2">
+                <param name="type" type="select">
+                    <option value="default">THE DEFAULT</option>
+                    <option value="different" selected="true">A different value</option>
+                </param>
+                <when value="default">
+                    <param name="val" value="p2default" type="text" />
+                </when>
+                <when value="different">
+                    <param name="val" value="p2different" type="text" />
+                </when>
+            </conditional>
+            <param name="int_param" type="integer" value="8" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="int_param" value="7" /> <!-- Specify at least one value in repeat to force one instance. -->
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1default" />
+                    <has_line line="p2different" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/inheritance_simple.xml b/test/functional/tools/inheritance_simple.xml
new file mode 100644
index 0000000..218e965
--- /dev/null
+++ b/test/functional/tools/inheritance_simple.xml
@@ -0,0 +1,24 @@
+<tool id="inheritance_simple" name="inheritance_simple" version="1.0.0">
+    <description>(demonstrates subtypes are usable for parent format)</description>
+    <command>
+        cat $input1 > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Copy FASTQ" format="fastq" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format_source="input1" metadata_source="input1"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="1.fastqsanger" ftype="fastqsanger" />
+            <output name="out_file1" file="1.fastqsanger" ftype="fastqsanger" />
+        </test>
+        <test>
+            <param name="input1" value="1.fastqsolexa" ftype="fastqsolexa" />
+            <output name="out_file1" file="1.fastqsolexa" ftype="fastqsolexa" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/inputs_as_json.xml b/test/functional/tools/inputs_as_json.xml
new file mode 100644
index 0000000..7cf8daf
--- /dev/null
+++ b/test/functional/tools/inputs_as_json.xml
@@ -0,0 +1,129 @@
+<tool id="inputs_as_json" name="inputs_as_json" version="1.0.0">
+    <command detect_errors="exit_code">
+        python $check_inputs $inputs $test_case
+    </command>
+    <configfiles>
+        <inputs name="inputs" />
+        <!-- Can specify with fixed path in working directory instead:
+        <inputs name="inputs" filename="input.json" />
+        -->
+        <configfile name="check_inputs"><![CDATA[
+import json
+import sys
+
+input_json_path = sys.argv[1]
+test_case = sys.argv[2]
+as_dict = json.load(open(input_json_path, "r"))
+
+
+def assert_equals(x, y):
+    assert x == y, "%s != %s" % (x, y)
+
+if test_case == "1":
+    assert_equals(as_dict["test_case"], 1)
+    assert_equals(as_dict["text_test"], "foo")
+    assert_equals(as_dict["booltest"], True)
+    assert_equals(as_dict["booltest2"], True)
+    assert_equals(as_dict["inttest"], 12456)
+    assert_equals(as_dict["floattest"], 6.789)
+    assert_equals(as_dict["radio_select"], "a_radio")
+    assert_equals(as_dict["repeat"][0]["r"], "000000")
+    assert_equals(as_dict["repeat"][1]["r"], "FFFFFF")
+    assert_equals(as_dict["cond"]["more_text"], "fdefault")
+    assert_equals(as_dict["section_example"]["section_text"], "section_default")
+elif test_case == "2":
+    assert_equals(as_dict["test_case"], 2)
+    assert_equals(as_dict["text_test"], "bar")
+    assert_equals(as_dict["booltest"], False)
+    assert_equals(as_dict["booltest2"], False)
+    assert_equals(as_dict["inttest"], None)
+    assert_equals(as_dict["floattest"], 1.0)
+    assert_equals(as_dict["radio_select"], "a_radio")
+    assert_equals(as_dict["repeat"][0]["r"], "000000")
+    assert_equals(as_dict["cond"]["cond_test"], "second")
+    assert_equals(as_dict["cond"]["more_text"], "sdefault")
+    assert_equals(as_dict["section_example"]["section_text"], "section_default")
+
+with open("output", "w") as f:
+    f.write("okay\n")
+]]></configfile>
+    </configfiles>
+    <inputs>
+        <param name="test_case" type="integer" value="0" />
+        <param name="text_test" type="text" />
+        <param name="booltest" truevalue="booltrue" falsevalue="boolfalse" checked="false" type="boolean" />
+        <param name="booltest2" truevalue="booltrue" falsevalue="boolfalse" checked="false" type="boolean" />
+        <param name="inttest" type="integer" optional="true" />
+        <param name="floattest" value="1.0" type="float" />
+        <param name="radio_select" type="select" display="radio">
+            <option value="a_radio" selected="true">A Radio</option>
+            <option value="b_radio">B Radio</option>
+            <option value="c_radio">C Radio</option>
+        </param>
+        <repeat name="repeat" title="Repeat" min="1">
+            <param name="r" type="color" />
+        </repeat>
+        <conditional name="cond">
+            <param name="cond_test" type="select">
+                <option value="first" selected="true">First</option>
+                <option value="second">Second</option>
+            </param>
+            <when value="first">
+                <param name="more_text" type="text" value="fdefault" />
+            </when>
+            <when value="second">
+                <param name="more_text" type="text" value="sdefault" />
+            </when> 
+        </conditional>
+        <section name="section_example" title="Section Example">
+            <param name="section_text" type="text" value="section_default" />
+        </section>
+    </inputs>
+    <outputs>
+        <data name="out_file1" from_work_dir="output" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="test_case" value="1" />
+            <param name="text_test" value="foo" />
+            <param name="booltest" value="true" />
+            <param name="booltest2" value="booltrue" />
+            <param name="inttest" value="12456" />
+            <param name="floattest" value="6.789" />
+            <repeat name="repeat">
+                <param name="r" value="000000"/>
+            </repeat>
+            <repeat name="repeat">
+                <param name="r" value="FFFFFF"/>
+            </repeat>
+            <conditional name="cond">
+                <param name="cond_test" value="first" />
+            </conditional>
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="okay" />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <param name="test_case" value="2" />
+            <param name="text_test" value="bar" />
+            <param name="booltest" value="false" />
+            <param name="booltest2" value="boolfalse" />
+            <!-- Testing null integers -->
+            <!-- <param name="inttest" value="12456" /> -->
+            <param name="r" value="000000" />
+            <conditional name="cond">
+                <param name="cond_test" value="second" />
+            </conditional>
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="okay" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+    <help>
+        Test tool demonstrating the special inputs config file.
+    </help>
+</tool>
diff --git a/test/functional/tools/job_properties.xml b/test/functional/tools/job_properties.xml
new file mode 100644
index 0000000..00247e2
--- /dev/null
+++ b/test/functional/tools/job_properties.xml
@@ -0,0 +1,62 @@
+<tool id="job_properties" name="Test Job Properties">
+    <command>
+        #if $thebool
+            echo "The bool is true";
+            echo "The bool is really true" 1>&2;
+            echo "This is a line of text." > $out_file1
+        #else
+            echo "The bool is not true";
+            echo "The bool is very not true" 1>&2;
+            echo "This is a different line of text." > $out_file1;
+            sh -c "exit 2"
+        #end if
+        #if $failbool
+            ; sh -c "exit 127"
+        #end if
+
+    </command>
+    <inputs>
+        <param name="thebool" type="boolean" label="The boolean property" />
+        <param name="failbool" type="boolean" label="The failure property" checked="false" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <stdio>
+        <exit_code range="127"   level="fatal"   description="Failing exit code." />
+    </stdio>
+    <tests>
+        <test expect_exit_code="0">
+            <param name="thebool" value="true" />
+            <output name="out_file1" file="simple_line.txt" />
+            <assert_command>
+                <has_text text="really" />
+            </assert_command>
+            <assert_stdout>
+                <has_line line="The bool is true" />
+            </assert_stdout>
+            <assert_stderr>
+                <has_line line="The bool is really true" />
+            </assert_stderr>
+        </test>
+        <test expect_exit_code="2">
+            <param name="thebool" value="false" />
+            <output name="out_file1" file="simple_line_alternative.txt" />
+            <assert_command>
+                <has_text text="very not" />
+            </assert_command>
+            <assert_stdout>
+                <has_line line="The bool is not true" />
+            </assert_stdout>
+            <assert_stderr>
+                <has_line line="The bool is very not true" />
+            </assert_stderr>
+        </test>
+        <test expect_exit_code="127" expect_failure="true">
+            <param name="thebool" value="true" />
+            <param name="failbool" value="true" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/library_data.xml b/test/functional/tools/library_data.xml
new file mode 100644
index 0000000..8fd1a14
--- /dev/null
+++ b/test/functional/tools/library_data.xml
@@ -0,0 +1,19 @@
+<tool id="library_data" name="library_data" version="1.0.0">
+    <command>
+        cat $library_dataset >> $output;
+        #for $input in $library_dataset_multiple
+            cat $input >> $output_multiple;
+        #end for
+    </command>
+    <inputs>
+        <param name="library_dataset" type="library_data" multiple="false" />
+        <param name="library_dataset_multiple" type="library_data" />
+    </inputs>
+    <outputs>
+        <data name="output" format="data" />
+        <data name="output_multiple" format="data" />
+    </outputs>
+    <tests>
+        <!-- These parameters cannot be tested. -->
+    </tests>
+</tool>
diff --git a/test/functional/tools/maxseconds.xml b/test/functional/tools/maxseconds.xml
new file mode 100644
index 0000000..9c2ec0e
--- /dev/null
+++ b/test/functional/tools/maxseconds.xml
@@ -0,0 +1,16 @@
+<tool id="maxseconds" name="maxseconds" version="0.1.0">
+    <command detect_errors="exit_code">
+        sleep 100
+    </command>
+    <inputs>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_failure="true" maxseconds="5">
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/md5sum.xml b/test/functional/tools/md5sum.xml
new file mode 100644
index 0000000..7370571
--- /dev/null
+++ b/test/functional/tools/md5sum.xml
@@ -0,0 +1,17 @@
+<tool id="md5sum" name="md5sum" version="1.0.0">
+    <command>
+        cp $input $output
+    </command>
+    <inputs>
+        <param name="input" type="data" format="txt" />
+    </inputs>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt" />
+            <output name="out_file1" md5="ac94233685713ce99d1e712b0023c381" />
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/metadata.xml b/test/functional/tools/metadata.xml
new file mode 100644
index 0000000..cc68946
--- /dev/null
+++ b/test/functional/tools/metadata.xml
@@ -0,0 +1,29 @@
+<tool id="metadata" name="metadata" version="1.0.0">
+  <command>mkdir $output_copy_of_input.extra_files_path; cp $input.extra_files_path/* $output_copy_of_input.extra_files_path; echo "$input.metadata.base_name" > $output_of_input_metadata</command>
+  <inputs>
+    <param name="input" type="data" format="velvet" label="Velvet Dataset" help="Prepared by velveth."/>
+  </inputs>
+  <outputs>
+    <data format="txt" name="output_of_input_metadata" />
+    <data format="velvet" name="output_copy_of_input" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="velveth_test1/output.html" ftype="velvet" >
+        <composite_data value='velveth_test1/Sequences' ftype="Sequences"/>
+        <composite_data value='velveth_test1/Roadmaps' ftype="Roadmaps"/>
+        <composite_data value='velveth_test1/Log'/>
+        <metadata name="base_name" value="Example Metadata" />
+      </param>
+      <!-- This ouptut tests setting input metadata above -->
+      <output name="output_of_input_metadata" ftype="txt">
+        <assert_contents>
+          <has_line line="Example Metadata" />
+        </assert_contents>
+      </output>
+      <!-- This output tests an assertion about output metadata -->
+      <output name="output_copy_of_input" file="velveth_test1/output.html">
+        <metadata name="base_name" value="velvet" />
+      </output>
+    </test>
+  </tests></tool>
diff --git a/test/functional/tools/metadata_bam.xml b/test/functional/tools/metadata_bam.xml
new file mode 100644
index 0000000..2f20be5
--- /dev/null
+++ b/test/functional/tools/metadata_bam.xml
@@ -0,0 +1,25 @@
+<tool id="metadata_bam" name="metadata BAM" version="1.0.0">
+  <command>echo "${ref_names}" > "${output_of_input_metadata}"</command>
+  <inputs>
+    <param name="input_bam" type="data" format="bam" label="BAM File"/>
+    <param name="ref_names" type="select" optional="False" label="Select references you would like to restrict bam to" multiple="True">
+        <options>
+            <filter type="data_meta" ref="input_bam" key="reference_names" />
+        </options>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="txt" name="output_of_input_metadata" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input_bam" value="3.bam" ftype="bam" />
+      <!-- This output tests setting using bam reference name metadata above -->
+      <param name="ref_names" value="chr10_random,chr11,chrM,chrX,chr16" />
+      <output name="output_of_input_metadata" ftype="txt">
+        <assert_contents>
+          <has_line line="chr10_random,chr11,chrM,chrX,chr16" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests></tool>
diff --git a/test/functional/tools/metadata_bcf.xml b/test/functional/tools/metadata_bcf.xml
new file mode 100644
index 0000000..396a8bd
--- /dev/null
+++ b/test/functional/tools/metadata_bcf.xml
@@ -0,0 +1,16 @@
+<tool id="metadata_bcf" name="metadata_BCF" version="1.0.0">
+    <command>file "${input_bcf.metadata.bcf_index}" > "${output_of_input_metadata}"</command>
+    <inputs>
+      <param name="input_bcf" type="data" format="bcf" label="BCF File"/>
+    </inputs>
+    <outputs>
+      <data format="txt" name="output_of_input_metadata" />
+    </outputs>
+    <tests>
+      <test>
+        <param name="input_bcf" value="bcf_index_metadata_test.bcf" ftype="bcf" />
+        <!-- Tests whether the .bcf.csi file is of "gzip compressed data, extra field" type -->
+        <output name="output_of_input_metadata" ftype="txt" file="bcf_index_metadata_test.txt" compare="contains"/>
+      </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/metadata_biom1.xml b/test/functional/tools/metadata_biom1.xml
new file mode 100644
index 0000000..e20ec36
--- /dev/null
+++ b/test/functional/tools/metadata_biom1.xml
@@ -0,0 +1,28 @@
+<tool id="metadata_biom1" name="metadata_BIOM1" version="1.0.0">
+    <command>cp "${input_metadata_values}" "${output_of_input_metadata}"</command>
+    <configfiles>
+        <configfile name="input_metadata_values">table_rows: ${input_biom1.metadata.table_rows}
+table_matrix_element_type: ${input_biom1.metadata.table_matrix_element_type}
+table_format: ${input_biom1.metadata.table_format}
+table_generated_by: ${input_biom1.metadata.table_generated_by}
+table_matrix_type: ${input_biom1.metadata.table_matrix_type}
+table_shape: ${input_biom1.metadata.table_shape}
+table_format_url: ${input_biom1.metadata.table_format_url}
+table_date: ${input_biom1.metadata.table_date}
+table_type: ${input_biom1.metadata.table_type}
+table_id: ${input_biom1.metadata.table_id}
+table_columns: ${input_biom1.metadata.table_columns}</configfile>
+    </configfiles>
+    <inputs>
+      <param name="input_biom1" type="data" format="biom1" label="BIOM1 File"/>
+    </inputs>
+    <outputs>
+      <data format="txt" name="output_of_input_metadata" />
+    </outputs>
+    <tests>
+      <test>
+        <param name="input_biom1" value="input_taxonomy.biom1" ftype="biom1" />
+        <output name="output_of_input_metadata" ftype="txt" file="biom1_metadata_test.txt"/>
+      </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/metadata_column_names.xml b/test/functional/tools/metadata_column_names.xml
new file mode 100644
index 0000000..c7edd8f
--- /dev/null
+++ b/test/functional/tools/metadata_column_names.xml
@@ -0,0 +1,24 @@
+<tool id="metadata_columns" name="metadata_columns" version="1.0.0">
+    <description>Tests whether metadata is being set correctly.</description>
+    <command><![CDATA[
+        cp '$input' '$output'
+    ]]></command>
+    <inputs>
+        <param name="input" type="data" multiple="false" />
+    </inputs>
+    <outputs>
+        <data format="tabular" name="output">
+            <actions>
+                <action name="column_names" type="metadata" default="First,${input.name}" />
+            </actions>
+        </data>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="2.tabular" />
+            <output name="output">
+                <metadata name="column_names" value="First,2.tabular"/>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/min_repeat.xml b/test/functional/tools/min_repeat.xml
new file mode 100644
index 0000000..10c0121
--- /dev/null
+++ b/test/functional/tools/min_repeat.xml
@@ -0,0 +1,27 @@
+<tool id="min_repeat" name="min_repeat" version="0.1.0">
+    <command>
+        cat #for $q in $queries# ${q.input} #end for# > $out_file1 ;
+        cat #for $q in $queries2# ${q.input2} #end for# > $out_file2
+    </command>
+    <inputs>
+        <repeat name="queries" title="Dataset" min="1">
+            <param name="input" type="data" label="Select" />
+        </repeat>
+        <repeat name="queries2" title="Dataset" min="1">
+            <param name="input2" type="data" label="Select" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" label="Repeat 1 Datasets on ${on_string}" />
+        <data name="out_file2" format="txt" label="Repeat 2 Datasets on ${on_string}" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt"/>
+            <param name="input" value="simple_line.txt"/>
+            <param name="input2" value="simple_line_alternative.txt"/>
+            <output name="out_file1" file="simple_line_x2.txt"/>
+            <output name="out_file2" file="simple_line_alternative.txt"/>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/mulled_example_multi_1.xml b/test/functional/tools/mulled_example_multi_1.xml
new file mode 100644
index 0000000..1f02559
--- /dev/null
+++ b/test/functional/tools/mulled_example_multi_1.xml
@@ -0,0 +1,18 @@
+<tool id="mulled_example_multi_1" name="mulled_example_multi_1" version="0.1.0">
+    <command><![CDATA[
+        bedtools --version > $out_file1 ;
+        echo "Moo" >> $out_file1 ;
+        samtools >> $out_file1 2>&1 ;
+        echo "Cow" >> $out_file1 ;
+    ]]></command>
+    <requirements>
+        <requirement type="package" version="1.3.1">samtools</requirement>
+        <requirement type="package" version="2.26.0">bedtools</requirement>
+    </requirements>
+    <inputs>
+        <param name="input1" type="data" optional="true" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+</tool>
diff --git a/test/functional/tools/multi_data_optional.xml b/test/functional/tools/multi_data_optional.xml
new file mode 100644
index 0000000..5f91425
--- /dev/null
+++ b/test/functional/tools/multi_data_optional.xml
@@ -0,0 +1,27 @@
+<tool id="multi_data_optional" name="multi_data_optional" version="0.1.0">
+  <command>
+    touch $out1;
+    #for $input in $input1
+    #if $input
+    cat $input >> $out1;
+    #end if
+    #end for
+  </command>
+  <inputs>
+    <param name="input1" type="data" format="txt" multiple="true" label="Data 1" optional="true" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="simple_line.txt,simple_line_alternative.txt" />
+      <output name="out1">
+        <assert_contents>
+          <has_line line="This is a line of text." />
+          <has_line line="This is a different line of text." />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/multi_data_param.xml b/test/functional/tools/multi_data_param.xml
new file mode 100644
index 0000000..1fcec1b
--- /dev/null
+++ b/test/functional/tools/multi_data_param.xml
@@ -0,0 +1,52 @@
+<tool id="multi_data_param" name="multi_data_param" version="0.1.0">
+  <!-- Demonstrate looping and using functional-style access to multi-data
+       parameters. -->
+  <command>
+    cat #for $f in $f1# ${f} #end for# >> $out1;
+    cat ${" ".join(map(str, $f2))} >> $out2
+  </command>
+  <inputs>
+    <param name="f1" type="data" format="txt" multiple="true" label="Data 1" min="1" max="1235" />
+    <param name="f2" type="data" format="txt" multiple="true" label="Data 2" />
+    <conditional name="advanced">
+      <param name="full" type="select" label="Parameter Settings">
+        <option value="no">Use defaults</option>
+        <option value="yes">Full parameter list</option>
+      </param>
+      <when value="yes">
+        <param name="advanced_threshold" type="integer" value="8" />
+      </when>
+      <when value="no">
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+    <data format="txt" name="out2" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="f1" value="simple_line.txt" />
+      <param name="f2" value="simple_line_alternative.txt" />
+      <output name="out1" file="simple_line.txt" />
+      <output name="out2" file="simple_line_alternative.txt" />
+    </test>
+    <test>
+      <param name="f1" value="simple_line.txt,simple_line_alternative.txt" />
+      <param name="f2" value="simple_line_alternative.txt" />
+      <output name="out1">
+        <assert_contents>
+          <has_line line="This is a line of text." />
+          <has_line line="This is a different line of text." />
+        </assert_contents>
+      </output>
+    </test>
+    <!-- UI widget can do this next one, but API can. -->
+    <test>
+      <param name="f1" value="simple_line.txt,simple_line.txt" />
+      <param name="f2" value="simple_line_alternative.txt" />
+      <output name="out1" file="simple_line_x2.txt" />
+      <output name="out2" file="simple_line_alternative.txt" />
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/multi_data_repeat.xml b/test/functional/tools/multi_data_repeat.xml
new file mode 100644
index 0000000..87cf2cf
--- /dev/null
+++ b/test/functional/tools/multi_data_repeat.xml
@@ -0,0 +1,13 @@
+<tool id="multi_data_repeat" name="multi_data_repeat" version="0.1.0">
+  <command>
+    cat #for o in $outer_repeat# #for $f in $o.f1# ${f} #end for# #end for# >> $out1;
+  </command>
+  <inputs>
+    <repeat name="outer_repeat" title="Outer Repeat" min="1">
+      <param name="f1" type="data" format="txt" multiple="true" label="Data 1" min="1" max="1235" />
+    </repeat>
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+  </outputs>
+</tool>
diff --git a/test/functional/tools/multi_output.xml b/test/functional/tools/multi_output.xml
new file mode 100644
index 0000000..8348b71
--- /dev/null
+++ b/test/functional/tools/multi_output.xml
@@ -0,0 +1,27 @@
+<tool id="multi_output" name="Multi_Output" version="0.1.0">
+  <command>
+    echo "Hello" > $report;
+    echo "World Contents" > '${__new_file_path__}/primary_${report.id}_world_visible_?'
+  </command>
+  <inputs>
+    <param name="input" type="integer" value="7" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="report" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="7" />
+      <output name="report">
+        <assert_contents>
+          <has_line line="Hello" />
+        </assert_contents>
+        <discovered_dataset designation="world">
+          <assert_contents>
+            <has_line line="World Contents" />
+          </assert_contents>
+        </discovered_dataset>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/multi_output_assign_primary.xml b/test/functional/tools/multi_output_assign_primary.xml
new file mode 100644
index 0000000..cb98e09
--- /dev/null
+++ b/test/functional/tools/multi_output_assign_primary.xml
@@ -0,0 +1,34 @@
+<tool id="multi_output_assign_primary" name="multi_output_assign_primary" version="0.1.0">
+  <command>
+    echo "1" > sample1.report.tsv;
+    echo "2" > sample2.report.tsv;
+    echo "3" > sample3.report.tsv;
+  </command>
+  <inputs>
+    <param name="num_param" type="integer" value="7" />
+    <param name="input" type="data" />
+  </inputs>
+  <outputs>
+    <data format="tabular" name="sample">
+      <discover_datasets pattern="(?P<designation>.+)\.report\.tsv" ext="tabular" visible="true" assign_primary_output="true" />
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="num_param" value="7" />
+      <param name="input" ftype="txt" value="simple_line.txt"/>
+      <output name="sample">
+        <assert_contents>
+          <has_line line="1" />
+        </assert_contents>
+        <!-- no sample1 it was consumed by named output "sample" -->
+        <discovered_dataset designation="sample2" ftype="tabular">
+          <assert_contents><has_line line="2" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="sample3" ftype="tabular">
+          <assert_contents><has_line line="3" /></assert_contents>
+        </discovered_dataset>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/multi_output_configured.xml b/test/functional/tools/multi_output_configured.xml
new file mode 100644
index 0000000..dc3baeb
--- /dev/null
+++ b/test/functional/tools/multi_output_configured.xml
@@ -0,0 +1,67 @@
+<tool id="multi_output_configured" name="Multi_Output_Configured" version="0.1.0">
+  <command>
+    echo "Hello" > $report;
+    mkdir subdir1;
+    echo "This" > subdir1/this.txt;
+    echo "That" > subdir1/that.txt;
+    mkdir subdir2;
+    echo "1" > subdir2/CUSTOM_1.txt;
+    echo "2" > subdir2/CUSTOM_2.tabular;
+    echo "3" > subdir2/CUSTOM_3.txt;
+    mkdir subdir3;
+    echo "Foo" > subdir3/Foo;
+    echo "mapped reads" > split_bam_.MAPPED.bam;
+    echo "unmapped reads" > split_bam_.UNMAPPED.bam;
+    echo "1" > sample1.report.tsv;
+    echo "2" > sample2.report.tsv;
+    echo "3" > sample3.report.tsv;
+  </command>
+  <inputs>
+    <param name="num_param" type="integer" value="7" />
+    <param name="input" type="data" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="report">
+      <discover_datasets pattern="__designation_and_ext__" directory="subdir1" visible="true" />
+      <discover_datasets pattern="CUSTOM_(?P<designation>.+)\.(?P<ext>.+)" directory="subdir2" visible="true" />
+      <discover_datasets pattern="__designation__" directory="subdir3" ext="input" visible="true" />
+      <discover_datasets pattern="split_bam_\.(?P<designation>([A-Z-])\w+)\.bam" format="txt" visible="true" /> <!-- can use ext or format attribute. -->
+      <discover_datasets pattern="(?P<designation>.+)\.report\.tsv" ext="tabular" visible="true" />
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="num_param" value="7" />
+      <param name="input" ftype="txt" value="simple_line.txt"/>
+      <output name="report">
+        <assert_contents>
+          <has_line line="Hello" />
+        </assert_contents>
+        <discovered_dataset designation="this" ftype="txt">
+          <assert_contents><has_line line="This" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="that" ftype="txt">
+          <assert_contents><has_line line="That" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="1" ftype="txt">
+          <assert_contents><has_line line="1" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="2" ftype="tabular">
+          <assert_contents><has_line line="2" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="Foo" ftype="txt">
+          <assert_contents><has_line line="Foo" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="MAPPED" ftype="txt">
+          <assert_contents><has_line line="mapped reads" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="UNMAPPED" ftype="txt">
+          <assert_contents><has_line line="unmapped reads" /></assert_contents>
+        </discovered_dataset>
+        <discovered_dataset designation="sample1" ftype="tabular">
+          <assert_contents><has_line line="1" /></assert_contents>
+        </discovered_dataset>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/multi_repeats.xml b/test/functional/tools/multi_repeats.xml
new file mode 100644
index 0000000..745cb86
--- /dev/null
+++ b/test/functional/tools/multi_repeats.xml
@@ -0,0 +1,44 @@
+<tool id="multi_repeats" name="multi_repeats">
+    <command>
+        cat $input1 #for $q in $queries# ${q.input2} #end for# #for $q in $more_queries# ${q.more_queries_input} #end for# > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset"/>
+        <repeat name="queries" title="Dataset">
+            <param name="input2" type="data" label="Select" />
+        </repeat>
+        <repeat name="more_queries" title="Dataset">
+            <param name="more_queries_input" type="data" label="Select" />
+        </repeat>        
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="simple_line.txt"/>
+            <param name="input2" value="simple_line.txt"/>
+            <output name="out_file1" file="simple_line_x2.txt"/>
+        </test>
+        <!--
+            Following tests continue to work, but for anything more
+            advanced than this simple case these should be considered
+            something of an anti-pattern - see disambiguate_repeats.xml
+            for superior syntax.
+        -->
+        <test>
+            <param name="input1" value="simple_line.txt"/>
+            <param name="input2" value="simple_line.txt"/>
+            <param name="input2" value="simple_line.txt"/>
+            <output name="out_file1" file="simple_line_x3.txt"/>
+        </test>
+        <test>
+            <param name="input1" value="simple_line.txt"/>
+            <param name="input2" value="simple_line.txt"/>
+            <param name="input2" value="simple_line.txt"/>
+            <param name="more_queries_input" value="simple_line.txt"/>
+            <param name="more_queries_input" value="simple_line.txt"/>            
+            <output name="out_file1" file="simple_line_x5.txt"/>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/multi_select.xml b/test/functional/tools/multi_select.xml
new file mode 100644
index 0000000..0c5fd56
--- /dev/null
+++ b/test/functional/tools/multi_select.xml
@@ -0,0 +1,45 @@
+<tool id="multi_select" name="multi_select" version="1.0.0">
+  <description>multi_select</description>
+  <configfiles>
+    <configfile name="config">${select_ex}</configfile>
+  </configfiles>
+  <command>cat $config > $output; echo '$select_optional' > $output2</command>
+  <inputs>
+    <param name="select_ex" type="select" display="checkboxes" multiple="true">
+      <option value="--ex1">Ex1</option>
+      <option value="ex2">Ex2</option>
+      <option value="--ex3">Ex3</option>
+      <option value="--ex4">Ex4</option>
+      <option value="ex5">Ex5</option>
+    </param>
+    <param name="select_optional" type="select" optional="true">
+      <option value="--ex1">Ex1</option>
+      <option value="ex2">Ex2</option>
+      <option value="--ex3">Ex3</option>
+      <option value="--ex4">Ex4</option>
+      <option value="ex5">Ex5</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="txt" name="output" />
+    <data format="txt" name="output2" />    
+  </outputs>
+  <tests>
+    <test>
+      <param name="select_ex" value="--ex1,ex2,--ex3" />
+      <output name="output">
+        <assert_contents>
+          <has_line line="--ex1,ex2,--ex3" />
+        </assert_contents>
+      </output>
+    </test>
+    <test>
+      <param name="select_ex" value="Ex1" />
+      <output name="output">
+        <assert_contents>
+          <has_line line="--ex1" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/multiple_versions_v01.xml b/test/functional/tools/multiple_versions_v01.xml
new file mode 100644
index 0000000..8b05454
--- /dev/null
+++ b/test/functional/tools/multiple_versions_v01.xml
@@ -0,0 +1,11 @@
+<tool id="multiple_versions" name="multiple_versions" version="0.1">
+    <command>
+        echo "Version 0.1" > $out_file1
+    </command>
+    <inputs>
+        <param name="inttest" value="1" type="integer" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+</tool>
diff --git a/test/functional/tools/multiple_versions_v02.xml b/test/functional/tools/multiple_versions_v02.xml
new file mode 100644
index 0000000..7a7a3a1
--- /dev/null
+++ b/test/functional/tools/multiple_versions_v02.xml
@@ -0,0 +1,11 @@
+<tool id="multiple_versions" name="multiple_versions" version="0.2">
+    <command>
+        echo "Version 0.2" > $out_file1
+    </command>
+    <inputs>
+        <param name="inttest" value="1" type="integer" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+</tool>
diff --git a/test/functional/tools/output_action_change_format.xml b/test/functional/tools/output_action_change_format.xml
new file mode 100644
index 0000000..6586a12
--- /dev/null
+++ b/test/functional/tools/output_action_change_format.xml
@@ -0,0 +1,52 @@
+<tool id="output_action_change_format" name="output_action_change_format" version="1.0.0">
+    <command>
+        printf "1\t2\n" > out1;
+    </command>
+    <inputs>
+        <conditional name="input_cond">
+            <param type="select" name="dispatch" label="Dispatch on type">
+                <option value="dont">Dont</option>
+                <option value="do">Do</option>
+            </param>
+            <when value="dont">
+                <param type="data" name="input" format="data" />
+            </when>
+            <when value="do">
+                <param type="data" name="input" format="data" />
+            </when>
+        </conditional>
+    </inputs>
+    <outputs>
+        <data name="out1" from_work_dir="out1">
+            <actions>
+                <conditional name="input_cond.dispatch">
+                    <when value="do">
+                        <action type="format">
+                            <option type="from_param" name="input_cond.input" param_attribute="ext" />
+                        </action>
+                    </when>
+                </conditional>
+            </actions>
+        </data>
+    </outputs>
+    <tests>
+        <test>
+            <param name="dispatch" value="dont" />
+            <param name="input" value="simple_line.txt" />
+            <output name="out1" ftype="data">
+                <assert_contents>
+                    <has_line line="1	2" />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <param name="dispatch" value="do" />
+            <param name="input" value="simple_line.txt" />
+            <output name="out1" ftype="txt">
+                <assert_contents>
+                    <has_line line="1	2" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/output_auto_format.xml b/test/functional/tools/output_auto_format.xml
new file mode 100644
index 0000000..6a2c8f0
--- /dev/null
+++ b/test/functional/tools/output_auto_format.xml
@@ -0,0 +1,16 @@
+<tool id="output_auto_format" name="output_auto_format" version="1.0.0">
+  <command>cp $input 'out'</command>
+  <inputs>
+    <param name="input" type="data" format="data" label="An input dataset" help=""/>
+  </inputs>
+  <outputs>
+    <data auto_format="true" name="output" label="Auto Output" from_work_dir="out">
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="simple_line.txt" ftype="txt" />
+      <output name="output" file="simple_line.txt" ftype="txt" />
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/output_collection_filter.xml b/test/functional/tools/output_collection_filter.xml
new file mode 100644
index 0000000..e66e3dd
--- /dev/null
+++ b/test/functional/tools/output_collection_filter.xml
@@ -0,0 +1,52 @@
+<tool id="output_collection_filter" name="output_collection_filter" version="1.0.0">
+  <command>
+    echo "AF" > af.txt;
+    echo "AR" > ar.txt;
+    echo "BF" > bf.txt;
+    echo "BR" > br.txt;
+  </command>
+  <inputs>
+    <param name="output_type" type="select" label="Output Type">
+      <option value="a">A</option>
+      <option value="b">B</option>
+    </param>
+  </inputs>
+  <outputs>
+    <collection name="a_paired_output" type="paired" label="A Pair">
+      <data name="forward" format="txt" from_work_dir="af.txt"  />
+      <data name="reverse" format="txt" from_work_dir="ar.txt"  />
+      <filter>output_type == "a"</filter>
+    </collection>
+    <collection name="b_paired_output" type="paired" label="B Pair">
+      <data name="forward" format="txt" from_work_dir="bf.txt"  />
+      <data name="reverse" format="txt" from_work_dir="br.txt"  />
+      <filter>output_type == "b"</filter>
+    </collection>
+  </outputs>
+  <tests>
+    <!-- TODO: Enhance test cases to actually verify other collection not 
+         created. -->
+    <test>
+      <param name="output_type" value="a" />
+      <output_collection name="a_paired_output" type="paired">
+        <element name="forward" ftype="txt">
+          <assert_contents><has_line line="AF" /></assert_contents>
+        </element>
+        <element name="reverse" ftype="txt">
+          <assert_contents><has_line line="AR" /></assert_contents>
+        </element>
+      </output_collection>
+    </test>
+    <test>
+      <param name="output_type" value="b" />
+      <output_collection name="b_paired_output" type="paired">
+        <element name="forward" ftype="txt">
+          <assert_contents><has_line line="BF" /></assert_contents>
+        </element>
+        <element name="reverse" ftype="txt">
+          <assert_contents><has_line line="BR" /></assert_contents>
+        </element>
+      </output_collection>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/output_filter.xml b/test/functional/tools/output_filter.xml
new file mode 100644
index 0000000..480e6d6
--- /dev/null
+++ b/test/functional/tools/output_filter.xml
@@ -0,0 +1,46 @@
+<tool id="output_filter" name="output_filter" version="1.0.0">
+  <command>
+    echo "test" > 1;
+    echo "test" > 2;
+    echo "test" > 3;
+    echo "test" > 4;
+    echo "test" > 5;
+  </command>
+  <inputs>
+    <param name="produce_out_1" type="boolean" truevalue="true" falsevalue="false" checked="False" label="Do Filter 1" />
+    <param name="filter_text_1" type="text" value="1" />
+  </inputs>
+  <outputs>
+    <data format="txt" from_work_dir="1" name="out_1">
+      <filter>produce_out_1 is True</filter>
+    </data>
+    <data format="txt" from_work_dir="2" name="out_2">
+      <filter>filter_text_1 in ["foo", "bar"]</filter>
+      <!-- Must pass all filters... -->
+      <filter>filter_text_1 == "foo"</filter>
+    </data>
+    <data format="txt" from_work_dir="3" name="out_3">
+    </data>
+  </outputs>
+  <tests>
+    <test expect_num_outputs="3">
+      <param name="produce_out_1" value="true" />
+      <param name="filter_text_1" value="foo" />
+      <output name="out_1"><assert_contents><has_line line="test" /></assert_contents></output>
+      <output name="out_2"><assert_contents><has_line line="test" /></assert_contents></output>
+      <output name="out_3"><assert_contents><has_line line="test" /></assert_contents></output>
+    </test>
+    <test expect_num_outputs="2">
+      <param name="produce_out_1" value="true" />
+      <param name="filter_text_1" value="bar" /> <!-- fails second filter in out2 -->
+      <output name="out_1"><assert_contents><has_line line="test" /></assert_contents></output>
+      <output name="out_3"><assert_contents><has_line line="test" /></assert_contents></output>
+    </test>
+    <test expect_num_outputs="1">
+      <param name="produce_out_1" value="false" />
+      <param name="filter_text_1" value="not_foo_or_bar" />
+      <output name="out_3"><assert_contents><has_line line="test" /></assert_contents></output>
+    </test>
+  </tests>
+</tool>
+
diff --git a/test/functional/tools/output_filter_exception_1.xml b/test/functional/tools/output_filter_exception_1.xml
new file mode 100644
index 0000000..2bbef5d
--- /dev/null
+++ b/test/functional/tools/output_filter_exception_1.xml
@@ -0,0 +1,35 @@
+<tool id="output_filter_exception_1" name="output_filter_exception_1" version="1.0.0">
+  <command>
+    echo "test" > 1;
+    echo "test" > 2;
+    echo "test" > 3;
+    echo "test" > 4;
+    echo "test" > 5;
+  </command>
+  <inputs>
+    <conditional name="options">
+      <param help="" label="Options" name="options" type="select">
+        <option selected="True" value="default">Use defaults</option>
+        <option value="advanced">Specify advanced options</option>
+      </param>
+      <when value="default" />
+      <when value="advanced">
+        <param falsevalue="" truevalue="--adv_opt1" help="" name="adv_opt1" type="boolean" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="txt" from_work_dir="1" name="out_1">
+    </data>
+    <data format="txt" from_work_dir="2" name="out_2">
+      <filter>options['adv_opt1'] is True</filter>
+    </data>
+  </outputs>
+  <tests>
+    <!-- Filter condition throws exception, filter "fails" and so output is produced. -->
+    <test expect_num_outputs="2">
+      <output name="out_1"><assert_contents><has_line line="test" /></assert_contents></output>
+      <output name="out_2"><assert_contents><has_line line="test" /></assert_contents></output>
+    </test>
+  </tests>
+</tool>
\ No newline at end of file
diff --git a/test/functional/tools/output_format.xml b/test/functional/tools/output_format.xml
new file mode 100644
index 0000000..4f95a20
--- /dev/null
+++ b/test/functional/tools/output_format.xml
@@ -0,0 +1,72 @@
+<tool id="output_format" name="output_format" version="1.0.0">
+  <command>
+    echo "test" > 1;
+    echo "test" > 2;
+    echo "test" > 3;
+    echo "test" > 4;
+    echo "test" > 5;
+  </command>
+  <inputs>
+    <param name="input_data_1" type="data" format="data" label="input_data_1" />
+    <param name="input_data_2" type="data" format="data" label="input_data_2" />
+    <param name="input_text" type="text" value="1"  label="input_text" />
+  </inputs>
+  <outputs>
+    <data format="txt" from_work_dir="1" name="direct_output" />
+    <!-- TODO: fixme, following input gets random type fastqsanger or
+    fastqsolexa. -->
+    <data format="input" from_work_dir="2" name="input_based_output" />
+    <data format="txt" from_work_dir="3" name="format_source_1_output" format_source="input_data_1" />
+    <data format="txt" from_work_dir="4" name="format_source_2_output" format_source="input_data_2" />
+    <data format="txt" from_work_dir="5" name="change_format_output">
+      <change_format>
+        <when input="input_text" value="foo" format="fastqsolexa" />
+        <when input="input_text" value="bar" format="fastqillumina" />
+      </change_format>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input_data_1" value="1.fastqsanger" ftype="fastqsanger" />
+      <param name="input_data_2" value="1.fastqsolexa" ftype="fastqsolexa" />
+      <param name="input_text" value="foo" />
+      <output name="direct_output" ftype="txt">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+      <!-- In this case input_based_output ftype is "randomly" either
+           fastqsanger or fastqsolexa -->
+      <output name="format_source_1_output" ftype="fastqsanger">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+      <output name="format_source_2_output" ftype="fastqsolexa">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+      <!-- input_text == foo => format set to fastsolexa -->
+      <output name="change_format_output" ftype="fastqsolexa">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+    </test>
+    <test>
+      <param name="input_data_1" value="1.fastqsanger" ftype="fastqsanger" />
+      <param name="input_data_2" value="1.fastqsanger" ftype="fastqsanger" />
+      <param name="input_text" value="bar" />
+      <output name="input_based_output" ftype="fastqsanger">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+      <!-- input_text == bar => format set to fastqillumina -->
+      <output name="change_format_output" ftype="fastqillumina">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+    </test>
+    <test>
+      <param name="input_data_1" value="1.fastqsanger" ftype="fastqsanger" />
+      <param name="input_data_2" value="1.fastqsanger" ftype="fastqsanger" />
+      <param name="input_text" value="not_foo_or_bar" />
+      <!-- input_text doesn't match any when, default to explicitly declared
+      type. -->
+      <output name="change_format_output" ftype="txt">
+        <assert_contents><has_line line="test" /></assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/output_format_collection.xml b/test/functional/tools/output_format_collection.xml
new file mode 100644
index 0000000..1795179
--- /dev/null
+++ b/test/functional/tools/output_format_collection.xml
@@ -0,0 +1,35 @@
+<tool id="output_format_collection" name="output_format_collection" version="1.0.0">
+  <command>
+    echo "test" > 1;
+    echo "test" > 2;
+  </command>
+  <inputs>
+    <param name="input_collection" type="data_collection" format="data" />
+  </inputs>
+  <outputs>
+    <!-- Access by element name (for paired data) -->
+    <data from_work_dir="1" name="format_source_1_output" format_source="input_collection['forward']" />
+    <!-- Access by element index (for any collection) -->
+    <data from_work_dir="2" name="format_source_2_output" format_source="input_collection[0]" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input_collection">
+        <collection type="paired">
+          <element name="forward" value="simple_line.txt" />
+          <element name="reverse" value="simple_line_alternative.txt" />
+        </collection>
+      </param>
+      <output name="format_source_1_output" ftype="txt">
+          <assert_contents>
+            <has_line line="test" />
+          </assert_contents>
+      </output>
+      <output name="format_source_2_output" ftype="txt">
+          <assert_contents>
+            <has_line line="test" />
+          </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/output_order.xml b/test/functional/tools/output_order.xml
new file mode 100644
index 0000000..0658735
--- /dev/null
+++ b/test/functional/tools/output_order.xml
@@ -0,0 +1,25 @@
+<tool id="output_order" name="output_order" version="0.1.0">
+  <command>echo $pa > $output_a; echo $pb > $output_b</command>
+  <inputs>
+    <param name="pa" type="integer" value="1" />
+    <param name="pb" type="integer" value="2" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="output_a" />
+    <data format="txt" name="output_b" />
+  </outputs>
+  <tests>
+    <test>
+      <output name="output_b">
+        <assert_contents>
+          <has_line line="2" />
+        </assert_contents>
+      </output>
+      <output name="output_a">
+        <assert_contents>
+          <has_line line="1" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/parallelism.xml b/test/functional/tools/parallelism.xml
new file mode 100644
index 0000000..b82ce19
--- /dev/null
+++ b/test/functional/tools/parallelism.xml
@@ -0,0 +1,20 @@
+<tool id="parallelism" name="Split file line-by-line and rebuild dataset">
+    <parallelism method="multi" split_inputs="input1" split_mode="to_size" split_size="1" merge_outputs="out_file1" />
+    <command>
+        cat $input1 > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Dataset"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="simple_line_x2.txt"/>
+            <output name="out_file1" file="simple_line_x2.txt" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/parallelism_optional.xml b/test/functional/tools/parallelism_optional.xml
new file mode 100644
index 0000000..6941647
--- /dev/null
+++ b/test/functional/tools/parallelism_optional.xml
@@ -0,0 +1,21 @@
+<tool id="parallelism_optional" name="Split file line-by-line and rebuild dataset (with optional dataset)">
+    <parallelism method="multi" split_inputs="input1" split_mode="to_size" split_size="1" merge_outputs="out_file1" />
+    <command>
+        cat $input1 > $out_file1
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Dataset"/>
+        <param name="input2" type="data" label="Optional Dataset" optional="true" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="simple_line_x2.txt"/>
+            <output name="out_file1" file="simple_line_x2.txt" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/paths_as_file.xml b/test/functional/tools/paths_as_file.xml
new file mode 100644
index 0000000..b6dd3b0
--- /dev/null
+++ b/test/functional/tools/paths_as_file.xml
@@ -0,0 +1,37 @@
+<tool id="paths_as_file" name="paths_as_file" version="0.1.0">
+    <configfiles>
+        <configfile name="check_paths_file"><![CDATA[
+import sys
+paths_file = sys.argv[1]
+sep = sys.argv[2]
+if sep == "NEWLINE":
+    sep = "\n"
+with open(paths_file, "r") as f:
+    paths = f.read()
+assert paths == sep.join(sys.argv[3:])
+]]></configfile>
+    </configfiles>
+    <command detect_errors="exit_code"><![CDATA[
+      python $check_paths_file $inputs.paths_as_file NEWLINE #for $f in $inputs# ${f} #end for#
+      &&
+      python $check_paths_file $inputs.paths_as_file(sep=',') ',' #for $f in $inputs# ${f} #end for#
+      &&
+      printf 'All Done' > $out1
+    ]]></command>
+    <inputs>
+        <param name="inputs" type="data" format="txt" multiple="true" label="Data 1" />
+    </inputs>
+    <outputs>
+        <data format="txt" name="out1" />
+    </outputs>
+    <tests>
+    <test>
+      <param name="inputs" value="simple_line.txt,simple_line_alternative.txt" />
+      <output name="out1">
+          <assert_contents>
+              <has_line line="All Done" />
+          </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/sam_to_bam.xml b/test/functional/tools/sam_to_bam.xml
new file mode 100644
index 0000000..46b92f2
--- /dev/null
+++ b/test/functional/tools/sam_to_bam.xml
@@ -0,0 +1,19 @@
+<tool id="sam_to_conversion" name="Test sam to bam conversion">
+    <command>
+        cat '$input1' > '$out_file1'
+    </command>
+    <inputs>
+        <param name="input1" type="data" format="bam" label="Concatenate Dataset"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="bam"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="sam_with_header.sam" ftype="sam"/>
+            <output name="out_file1" file="bam_from_sam.bam"/>
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/sample_data_manager_conf.xml b/test/functional/tools/sample_data_manager_conf.xml
new file mode 100644
index 0000000..e325f6b
--- /dev/null
+++ b/test/functional/tools/sample_data_manager_conf.xml
@@ -0,0 +1,16 @@
+<data_managers tool_path="test/functional/tools">
+  <data_manager tool_file="data_manager.xml" id="test_data_manager" version="1.0">
+    <data_table name="testbeta">
+      <output>
+        <column name="value" />
+        <column name="path" output_ref="out_file" >
+		  <move type="directory" relativize_symlinks="True">
+		  	<target base="${GALAXY_DATA_MANAGER_DATA_PATH}">testbeta/${value}</target>
+		  </move>
+ 		  <value_translation>${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path}</value_translation>
+		  <value_translation type="function">abspath</value_translation>
+		</column>
+      </output>
+    </data_table>
+  </data_manager>
+</data_managers>
diff --git a/test/functional/tools/sample_datatypes_conf.xml b/test/functional/tools/sample_datatypes_conf.xml
new file mode 100644
index 0000000..3891cad
--- /dev/null
+++ b/test/functional/tools/sample_datatypes_conf.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<datatypes>
+  <registration converters_path="lib/galaxy/datatypes/converters" display_path="display_applications">
+    <datatype extension="velvet" type="galaxy.datatypes.assembly:Velvet" display_in_upload="true"/>
+    <datatype extension="txt" type="galaxy.datatypes.data:Text" display_in_upload="true"/>
+    <datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
+    <datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true" />
+    <datatype extension="fastq" type="galaxy.datatypes.sequence:Fastq" display_in_upload="true" />
+    <datatype extension="fastqsanger" type="galaxy.datatypes.sequence:FastqSanger" display_in_upload="true" />
+    <datatype extension="fastqsolexa" type="galaxy.datatypes.sequence:FastqSolexa" display_in_upload="true" />
+    <datatype extension="fastqcssanger" type="galaxy.datatypes.sequence:FastqCSSanger" display_in_upload="true" />
+    <datatype extension="fastqillumina" type="galaxy.datatypes.sequence:FastqIllumina" display_in_upload="true" />
+    <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true" />
+    <datatype extension="bam" type="galaxy.datatypes.binary:Bam" mimetype="application/octet-stream" display_in_upload="true" description="A binary file compressed in the BGZF format with a '.bam' file extension." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#BAM" />
+    <datatype extension="bcf" type="galaxy.datatypes.binary:Bcf" mimetype="application/octet-stream" display_in_upload="true" description="A binary file compressed in the BGZF format with a '.bcf' file extension." description_url="https://wiki.galaxyproject.org/Learn/Datatypes#BCF" />
+    <datatype extension="biom1" type="galaxy.datatypes.text:Biom1" display_in_upload="True" subclass="True" mimetype="application/json"/>
+  </registration>
+</datatypes>
diff --git a/test/functional/tools/samples_tool_conf.xml b/test/functional/tools/samples_tool_conf.xml
new file mode 100644
index 0000000..54e6ad0
--- /dev/null
+++ b/test/functional/tools/samples_tool_conf.xml
@@ -0,0 +1,131 @@
+<?xml version="1.0"?>
+<toolbox tool_path="${tool_conf_dir}" is_shed_conf="false">
+  <tool file="upload.xml"/>
+  <tool file="simple_constructs.xml" />
+  <tool file="color_param.xml" />
+  <tool file="inheritance_simple.xml" />
+  <tool file="boolean_conditional.xml" />
+  <tool file="composite.xml" />
+  <tool file="environment_variables.xml" />
+  <tool file="code_file.xml" />
+  <tool file="disambiguate_cond.xml" />
+  <tool file="multi_repeats.xml"/>
+  <tool file="library_data.xml"/>
+  <tool file="multi_select.xml" />
+  <tool file="multi_output.xml" />
+  <tool file="multi_output_configured.xml" />
+  <tool file="multi_output_assign_primary.xml" />
+  <tool file="tool_provided_metadata_1.xml" />
+  <tool file="tool_provided_metadata_2.xml" />
+  <tool file="tool_provided_metadata_3.xml" />
+  <tool file="inputs_as_json.xml" />
+  <tool file="dbkey_filter_input.xml" />
+  <tool file="dbkey_filter_multi_input.xml" />
+  <tool file="dbkey_output_action.xml" />
+  <tool file="composite_output.xml" />
+  <tool file="composite_output_tests.xml" />
+  <tool file="unicode_stream.xml" />
+  <tool file="metadata.xml" />
+  <tool file="metadata_bam.xml" />
+  <tool file="metadata_bcf.xml" />
+  <tool file="metadata_biom1.xml" />
+  <tool file="metadata_column_names.xml" />
+  <tool file="strict_shell.xml" />
+  <tool file="strict_shell_default_off.xml" />
+  <tool file="detect_errors_aggressive.xml" />
+  <tool file="md5sum.xml" />
+  <tool file="checksum.xml" />
+  <!--
+  TODO: Figure out why this transiently fails on Jenkins.
+  <tool file="maxseconds.xml" />
+  -->
+  <tool file="job_properties.xml" />
+  <tool file="version_command_plain.xml" />
+  <tool file="version_command_interpreter.xml" />
+  <tool file="version_command_tool_dir.xml" />
+  <tool file="exit_code_from_file.xml" />
+  <tool file="gzipped_inputs.xml" />
+  <tool file="output_order.xml" />
+  <tool file="output_format.xml" />
+  <tool file="output_format_collection.xml" />
+  <tool file="output_filter.xml" />
+  <tool file="output_filter_exception_1.xml" />
+  <tool file="output_collection_filter.xml" />
+  <tool file="output_auto_format.xml" />
+  <tool file="create_10.xml" />
+  <tool file="disambiguate_repeats.xml" />
+  <tool file="min_repeat.xml" />
+  <tool file="parallelism.xml" />
+  <tool file="parallelism_optional.xml" />
+  <tool file="implicit_default_conds.xml" />
+  <tool file="multi_data_param.xml" />
+  <tool file="multi_data_repeat.xml" />
+  <tool file="multi_data_optional.xml" />
+  <tool file="paths_as_file.xml" />
+  <tool file="column_param.xml" />
+  <tool file="column_multi_param.xml" />
+  <tool file="special_params.xml" />
+  <tool file="section.xml" />
+  <tool file="top_level_data.xml" />
+  <tool file="validation_default.xml" />
+  <tool file="validation_sanitizer.xml" />
+  <tool file="validation_repeat.xml" />
+  <tool file="empty_output.xml" />
+  <tool file="validation_empty_dataset.xml" />
+  <tool file="identifier_single.xml" />
+  <tool file="identifier_multiple.xml" />
+  <tool file="identifier_collection.xml" />
+  <tool file="tool_directory.xml" />
+  <tool file="output_action_change_format.xml" />
+  <tool file="collection_paired_test.xml" />
+  <tool file="collection_nested_test.xml" />
+  <tool file="collection_mixed_param.xml" />
+  <tool file="collection_two_paired.xml" />
+  <tool file="collection_creates_pair.xml" />
+  <tool file="collection_creates_pair_from_type.xml" />
+  <tool file="collection_creates_list.xml" />
+  <tool file="collection_creates_list_2.xml" />
+  <tool file="collection_creates_list_of_pairs.xml" />
+  <tool file="collection_optional_param.xml" />
+  <tool file="collection_split_on_column.xml" />
+  <tool file="collection_creates_dynamic_nested.xml" />
+  <tool file="collection_creates_dynamic_list_of_pairs.xml" />
+  <tool file="collection_type_source.xml" />
+  <tool file="cheetah_casting.xml" />
+
+  <tool file="cheetah_problem_unbound_var.xml" />
+  <tool file="cheetah_problem_unbound_var_input.xml" />
+  <tool file="cheetah_problem_syntax_error.xml" />
+
+  <tool file="multiple_versions_v01.xml" />
+  <tool file="multiple_versions_v02.xml" />
+
+  <!-- Tools interesting only for building up test workflows. -->
+
+  <!-- Next three tools demonstrate concatenating multiple datasets
+       with a repeat, multiple datasets with a multiple input data
+       parameter, and multiple datasets from a collection. -->
+  <tool file="for_workflows/cat.xml" />
+  <tool file="for_workflows/cat_list.xml" />
+  <tool file="for_workflows/cat_collection.xml" />
+  <tool file="for_workflows/head.xml" />
+  <tool file="for_workflows/cat_interleave.xml" />
+  <tool file="for_workflows/pileup.xml" />
+  <tool file="for_workflows/mapper.xml" />
+  <tool file="for_workflows/split.xml" />
+  <tool file="for_workflows/create_input_collection.xml" />
+
+  <tool file="mulled_example_multi_1.xml" />
+
+  <tool file="simple_constructs.yml" />
+
+  <!-- Load collection operation tools - I consider these part of the
+       "framework" and they are used to in API tests to test the underlying
+       ToolActions. -->
+  <tool file="${model_tools_path}/unzip_collection.xml" />
+  <tool file="${model_tools_path}/zip_collection.xml" />
+  <tool file="${model_tools_path}/filter_failed_collection.xml" />
+  <tool file="${model_tools_path}/flatten_collection.xml" />
+  <tool file="${model_tools_path}/merge_collection.xml" />
+
+</toolbox>
diff --git a/test/functional/tools/section.xml b/test/functional/tools/section.xml
new file mode 100644
index 0000000..29bb7fe
--- /dev/null
+++ b/test/functional/tools/section.xml
@@ -0,0 +1,43 @@
+<tool id="section" name="section">
+    <command>
+        echo "$int.inttest"   >> $out_file1; 
+        echo "$float.floattest" >> $out_file1; 
+    </command>
+    <inputs>
+        <section name="int" title="Integer Section" expanded="true">
+            <param name="inttest" value="1" type="integer" />
+        </section>
+        <section name="float" title="Float Section" expanded="false">
+            <param name="floattest" value="1.0" type="float" />
+        </section>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="inttest" value="12456" />
+            <param name="floattest" value="6.789" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="12456" />
+                    <has_line line="6.789" />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <section name="int">
+                <param name="inttest" value="12456" />
+            </section>
+            <section name="float">
+                <param name="floattest" value="6.789" />
+            </section>
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="12456" />
+                    <has_line line="6.789" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/simple_constructs.xml b/test/functional/tools/simple_constructs.xml
new file mode 100644
index 0000000..6fdde0f
--- /dev/null
+++ b/test/functional/tools/simple_constructs.xml
@@ -0,0 +1,77 @@
+<tool id="simple_constructs" name="simple_constructs" version="1.0.0">
+    <command>
+        echo "$p1.p1val"  >> $out_file1;
+        echo "$booltest"  >> $out_file1;
+        echo "$inttest"   >> $out_file1; 
+        echo "$floattest" >> $out_file1;
+        echo "$radio_select" >> $out_file1;
+        echo "$check_select" >> $out_file1;
+        echo "$drop_select"  >> $out_file1;
+        cat "$files[0].file" >> $out_file1;
+    </command>
+    <inputs>
+        <conditional name="p1">
+            <param type="boolean" name="p1use" />
+            <when value="true">
+                <param name="p1val" value="p1used" type="text" />
+            </when>
+            <when value="false">
+                <param name="p1val" value="p1notused" type="text" />
+            </when>
+        </conditional>
+        <param name="booltest" truevalue="booltrue" falsevalue="boolfalse" checked="false" type="boolean" />
+        <param name="inttest" value="1" type="integer" />
+        <param name="floattest" value="1.0" type="float" />
+        <param name="radio_select" type="select" display="radio">
+            <option value="a_radio" selected="true">A Radio</option>
+            <option value="b_radio">B Radio</option>
+            <option value="c_radio">C Radio</option>
+        </param>
+        <param name="check_select" type="select" display="checkboxes" multiple="true">
+            <option value="a_check" selected="true">A Check</option>
+            <option value="b_check">B Check</option>
+            <option value="c_check">C Check</option>
+        </param>
+        <param name="drop_select" type="select">
+            <option value="a_drop" selected="true">A Drop</option>
+            <option value="b_drop">B Drop</option>
+            <option value="c_drop">C Drop</option>
+        </param>
+        <repeat name="files" title="Files">
+            <param name="file" type="data" format="txt" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="p1use" value="true" />
+            <param name="booltest" value="true" />
+            <param name="inttest" value="12456" />
+            <param name="floattest" value="6.789" />
+            <param name="file" value="simple_line.txt" /> <!-- This is a line of text. -->
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1used" />
+                    <has_line line="booltrue" />
+                    <has_line line="12456" />
+                    <has_line line="6.789" />
+                    <has_line line="This is a line of text." />
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <!-- Again but using boolean's truevalue -->
+            <param name="p1use" value="true" />
+            <param name="booltest" value="booltrue" />
+            <param name="file" value="simple_line.txt" />
+            <output name="out_file1">
+                <assert_contents>
+                    <has_line line="p1used" />
+                    <has_line line="booltrue" />
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/simple_constructs.yml b/test/functional/tools/simple_constructs.yml
new file mode 100644
index 0000000..cc8be9d
--- /dev/null
+++ b/test/functional/tools/simple_constructs.yml
@@ -0,0 +1,140 @@
+id: simple_constructs_y
+name: simple_constructs_y
+class: GalaxyTool
+version: 1.0
+description: Simple constructs
+edam_operations:
+  - operation_0004
+  - operation_0336
+edam_topics:
+  - topic_0003
+  - topic_3372
+command:
+  >
+    echo "$booltest"  >> $out_file1;
+    echo "$inttest"   >> $out_file1;
+    echo "$floattest" >> $out_file1;
+    cat "$simp_file"   >> $out_file1;
+    echo "$drop_select"  >> $out_file1;
+    echo "$radio_select" >> $out_file1;
+    echo "$check_select" >> $out_file1;
+    cat "$more_files[0].nestinput"   >> $out_file1;
+    echo "$p1.p1val"  >> $out_file1;
+inputs:
+- name: booltest
+  type: boolean
+  truevalue: booltrue
+  falsevalue: boolfalse
+  checked: false
+- name: inttest
+  type: integer
+  value: 1
+- name: floattest
+  type: float
+  value: 1.0
+- name: simp_file
+  type: data
+- name: drop_select
+  type: select
+  options:
+    - value: a_drop
+      selected: true
+      label: A Drop
+    - value: b_drop
+      label: B Drop
+    - value: c_drop
+      label: C Drop
+- name: radio_select
+  type: select
+  display: radio
+  options:
+    - value: a_radio
+      label: A Radio
+      selected: true
+    - value: b_radio
+      label: B Radio
+    - value: c_radio
+      label: C Radio
+- name: check_select
+  display: checkboxes
+  type: select
+  multiple: true
+  options:
+    - value: a_check
+      selected: true
+      label: A Check
+    - value: b_check
+      label: B Check
+    - value: c_check
+      label: C Check
+- name: more_files
+  label: "More Files"
+  type: repeat
+  blocks:
+    - type: data
+      name: nestinput
+- name: p1
+  type: conditional
+  test:
+    type: boolean
+    name: p1use
+  when:
+    true:
+      - name: p1val
+        type: text
+        value: p1used
+    false:
+      - name: p1val
+        type: text
+        value: p1notused
+
+outputs:
+  out_file1:
+    format: txt
+tests:
+- inputs:
+    booltest: true
+    inttest: 12456
+    floattest: 6.789
+    simp_file: simple_line.txt
+    nestinput: simple_line_alternative.txt
+    p1use: true
+  outputs:
+    out_file1:
+      asserts:
+        has_line:
+          line: booltrue
+        has_line:
+          line: 12456
+        has_line:
+          line: 6.789
+        has_line:
+          line: "This is a line of text."
+        has_line:
+          line: "This is a different line of text."
+        has_line:
+          line: p1used
+        has_line:
+          line: a_drop
+        has_line:
+          line: a_check
+        has_line:
+          line: a_radio
+- inputs:
+    simp_file: simple_line.txt
+    nestinput: simple_line_alternative.txt
+    check_select: "a_check,b_check"
+  outputs:
+    out_file1:
+      asserts:
+        has_line:
+          line: "a_check,b_check"
+- inputs:
+    simp_file: simple_line.txt
+    nestinput: simple_line_alternative.txt
+    p1use: false
+  outputs:
+    out_file1:
+      asserts:
+        has_line:
+          line: p1notused
diff --git a/test/functional/tools/special_params.xml b/test/functional/tools/special_params.xml
new file mode 100644
index 0000000..464b218
--- /dev/null
+++ b/test/functional/tools/special_params.xml
@@ -0,0 +1,36 @@
+<tool id="special_params" name="special_params" version="1.0.0">
+  <command>echo $__root_dir__ > out_root_dir;
+    echo $__datatypes_config__ > out_datatypes_config;
+    echo $__admin_users__ > out_admin_users;
+    echo $__user_email__ > out_user_email
+  </command>
+  <inputs>
+    <param name="ignored" type="integer" value="0" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="out_root_dir" from_work_dir="out_root_dir" />
+    <data format="txt" name="out_datatypes_config" from_work_dir="out_datatypes_config" />
+    <data format="txt" name="out_admin_users" from_work_dir="out_admin_users" />
+    <data format="txt" name="out_user_email" from_work_dir="out_user_email" />
+  </outputs>
+  <tests>
+    <test>
+      <output name="out_root_dir">
+        <!-- Is an absolute path. -->
+        <assert_contents><has_line_matching expression="^\/.*$" /></assert_contents>
+      </output>
+      <output name="out_datatypes_config">
+        <!-- Is an absolute path. -->
+        <assert_contents><has_line_matching expression="^\/.*$" /></assert_contents>
+      </output>
+      <output name="out_admin_users">
+        <!-- Has at least on e-mail address. -->
+        <assert_contents><has_text text="@" /></assert_contents>
+      </output>
+      <output name="out_user_email">
+        <!-- Looks like an e-mail address. -->
+        <assert_contents><has_line_matching expression="[^@]+@[^@]+\.[^@]+" /></assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/strict_shell.xml b/test/functional/tools/strict_shell.xml
new file mode 100644
index 0000000..58626c9
--- /dev/null
+++ b/test/functional/tools/strict_shell.xml
@@ -0,0 +1,23 @@
+<tool id="strict_shell" name="strict_shell" version="1.0.0">
+    <command strict="true" detect_errors="exit_code">
+        echo "Hello" > $out_file1
+        ; sh -c "exit $exit_code"
+        ; sh -c "exit 0"
+    </command>
+    <inputs>
+        <param name="exit_code" type="integer" value="0" label="exit code"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_exit_code="0" expect_failure="false">
+            <param name="exit_code" value="0" />
+        </test>
+        <test expect_exit_code="1" expect_failure="true">
+            <param name="exit_code" value="1" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/strict_shell_default_off.xml b/test/functional/tools/strict_shell_default_off.xml
new file mode 100644
index 0000000..50d1751
--- /dev/null
+++ b/test/functional/tools/strict_shell_default_off.xml
@@ -0,0 +1,20 @@
+<tool id="strict_shell_default_off" name="strict_shell_default_off" version="1.0.0">
+    <command detect_errors="exit_code">
+        echo "Hello" > $out_file1
+        ; sh -c "exit $exit_code"
+        ; sh -c "exit 0"
+    </command>
+    <inputs>
+        <param name="exit_code" type="integer" value="0" label="exit code"/>
+    </inputs>
+    <outputs>
+        <data name="out_file1" />
+    </outputs>
+    <tests>
+        <test expect_exit_code="0" expect_failure="false">
+            <param name="exit_code" value="1" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/tool_directory.xml b/test/functional/tools/tool_directory.xml
new file mode 100644
index 0000000..53532f5
--- /dev/null
+++ b/test/functional/tools/tool_directory.xml
@@ -0,0 +1,20 @@
+<tool id="tool_directory" name="tool_directory">
+  <command>
+    cp $__tool_directory__/tool_directory.xml output1
+  </command>
+  <inputs>
+    <param type="integer" name="ignored" label="Ignored" value="0" />
+  </inputs>
+  <outputs>
+    <data name="output1" format="xml" from_work_dir="output1" />
+  </outputs>
+  <tests>
+    <test>
+      <output name="output1">
+        <assert_contents>
+          <has_text text="QUINE" />
+        </assert_contents>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/tool_provided_metadata_1.xml b/test/functional/tools/tool_provided_metadata_1.xml
new file mode 100644
index 0000000..02d971a
--- /dev/null
+++ b/test/functional/tools/tool_provided_metadata_1.xml
@@ -0,0 +1,29 @@
+<tool id="tool_provided_metadata_1" name="tool_provided_metadata_1">
+    <command>
+      echo "This is a line of text." > $out1;
+      cp $c1 galaxy.json;
+    </command>
+    <configfiles>
+      <configfile name="c1">{"type": "dataset", "dataset_id": $out1.dataset.dataset.id, "name": "my dynamic name", "ext": "txt", "info": "my dynamic info", "dbkey": "cust1"}</configfile>
+    </configfiles>
+    <inputs>
+        <param name="input1" type="data" label="Input Dataset"/>
+    </inputs>
+    <outputs>
+        <!-- Set format="auto" to read from galaxy.json, use auto_format="true"
+             to sniff. -->
+        <data name="out1" format="auto" />
+    </outputs>
+    <help>
+    </help>
+    <tests>
+      <test>
+        <param name="input1" value="simple_line.txt" />
+        <output name="out1" file="simple_line.txt" ftype="txt">
+          <metadata name="name" value="my dynamic name" />
+          <metadata name="info" value="my dynamic info" />
+          <metadata name="dbkey" value="cust1" />
+        </output>
+      </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/tool_provided_metadata_2.xml b/test/functional/tools/tool_provided_metadata_2.xml
new file mode 100644
index 0000000..8879d0f
--- /dev/null
+++ b/test/functional/tools/tool_provided_metadata_2.xml
@@ -0,0 +1,38 @@
+<tool id="tool_provided_metadata_2" name="tool_provided_metadata_2">
+  <command>
+    echo "1" > sample1.report.tsv;
+    echo "2" > sample2.report.tsv;
+    cp $c1 galaxy.json;
+  </command>
+  <configfiles>
+      <configfile name="c1">{"type": "new_primary_dataset", "filename": "sample1.report.tsv", "name": "cool name 1", "ext": "txt", "info": "cool 1 info", "dbkey": "hg19"}
+{"type": "new_primary_dataset", "filename": "sample2.report.tsv", "name": "cool name 2", "ext": "txt", "info": "cool 2 info", "dbkey": "hg19"}
+</configfile>
+  </configfiles>
+  <inputs>
+    <param name="input" type="data" />
+  </inputs>
+  <outputs>
+    <data name="sample">
+      <discover_datasets pattern="(?P<designation>.+)\.report\.tsv" visible="true" />
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" ftype="txt" value="simple_line.txt"/>
+      <output name="sample">
+        <discovered_dataset designation="sample1" ftype="txt">
+          <assert_contents><has_line line="1" /></assert_contents>
+          <metadata name="name" value="cool name 1" />
+          <metadata name="dbkey" value="hg19" />
+          <metadata name="info" value="cool 1 info" />
+        </discovered_dataset>
+        <discovered_dataset designation="sample2" ftype="txt">
+          <assert_contents><has_line line="2" /></assert_contents>
+          <metadata name="name" value="cool name 2" />
+          <metadata name="info" value="cool 2 info" />
+        </discovered_dataset>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/tool_provided_metadata_3.xml b/test/functional/tools/tool_provided_metadata_3.xml
new file mode 100644
index 0000000..a9a64f6
--- /dev/null
+++ b/test/functional/tools/tool_provided_metadata_3.xml
@@ -0,0 +1,43 @@
+<tool id="tool_provided_metadata_3" name="tool_provided_metadata_3">
+  <command>
+    echo "1" > sample1.report.tsv;
+    echo "2" > sample2.report.tsv;
+    cp $c1 galaxy.json;
+  </command>
+  <configfiles>
+      <configfile name="c1">{"type": "new_primary_dataset", "filename": "sample1.report.tsv", "name": "cool name 1", "ext": "txt", "info": "cool 1 info", "dbkey": "hg19", "metadata": {"data_lines": 10, "foo": "bar"}}
+{"type": "new_primary_dataset", "filename": "sample2.report.tsv", "name": "cool name 2", "ext": "txt", "info": "cool 2 info", "dbkey": "hg19", "metadata": {"data_lines": 20, "foo": "bar"}}
+</configfile>
+  </configfiles>
+  <inputs>
+    <param name="input" type="data" />
+  </inputs>
+  <outputs>
+    <data name="sample">
+      <discover_datasets pattern="(?P<designation>.+)\.report\.tsv" visible="true" />
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" ftype="txt" value="simple_line.txt"/>
+      <output name="sample">
+        <discovered_dataset designation="sample1" ftype="txt">
+          <assert_contents><has_line line="1" /></assert_contents>
+          <!-- Datatype defined metadata can be overridden/specified directly.
+          -->
+          <metadata name="data_lines" value="10" />
+          <!-- Non-datatype defined metadata values are ignored by the framework.
+               Uncommenting the following test will break this test.
+          -->
+          <!--
+          <metadata name="foo" value="bar" />
+          -->
+        </discovered_dataset>
+        <discovered_dataset designation="sample2" ftype="txt">
+          <assert_contents><has_line line="2" /></assert_contents>
+          <metadata name="data_lines" value="20" />
+        </discovered_dataset>
+      </output>
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/top_level_data.xml b/test/functional/tools/top_level_data.xml
new file mode 100644
index 0000000..37d2ba7
--- /dev/null
+++ b/test/functional/tools/top_level_data.xml
@@ -0,0 +1,34 @@
+<tool id="top_level_data" name="top_level_data" version="0.1.0">
+  <command>
+    cat '${f1}' >> $out1;
+    echo '${library.f2}' >> $out2; <!-- cannot use just f2 here -->
+  </command>
+  <inputs>
+    <conditional name="library">
+      <param name="type" type="select" label="Parameter Settings">
+        <option value="no">Use defaults</option>
+        <option value="yes">Full parameter list</option>
+      </param>
+      <when value="yes">
+        <param name="f1" type="data" format="txt" label="Data 1" />
+        <param name="f2" type="text" label="Text 1" />
+      </when>
+      <when value="no">
+        <param name="f1" type="data" format="txt" label="Data 1" />
+        <param name="f2" type="text" label="TExt 1" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="txt" name="out1" />
+    <data format="txt" name="out2" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="f1" value="simple_line.txt" />
+      <param name="f2" value="This is a line of text." />
+      <output name="out1" file="simple_line.txt" />
+      <output name="out2" file="simple_line.txt" />
+    </test>
+  </tests>
+</tool>
diff --git a/test/functional/tools/unicode_stream.xml b/test/functional/tools/unicode_stream.xml
new file mode 100644
index 0000000..941f3f9
--- /dev/null
+++ b/test/functional/tools/unicode_stream.xml
@@ -0,0 +1,41 @@
+<tool id="unicode_stream" name="unicode_stream" version="0.1.0">
+    <description>
+    </description>
+    <configfiles>
+        <configfile name="cf">ვეპხის ტყაოსანი შოთა რუსთაველი
+</configfile>
+    </configfiles>
+    <command detect_errors="exit_code">
+        echo '$input1' > $out_file1;
+        cat $cf;
+        >&2 cat $cf;
+        sh -c "exit $exit"
+    </command>
+    <inputs>
+        <param name="input1" type="text" label="Input">
+            <sanitizer sanitize="False">
+            </sanitizer>
+        </param>
+        <param name="exit" type="integer" value="0" label="Exit Code" />
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="txt" />
+    </outputs>
+    <tests>
+        <test expect_exit_code="0" expect_failure="false">
+            <param name="input1" value="This is a line of text."/>
+            <param name="exit" value="0" />
+            <output name="out_file1" file="simple_line.txt" />
+        </test>
+        <test expect_exit_code="1" expect_failure="true">
+            <param name="input1" value="This is a line of text."/>
+            <param name="exit" value="1" />
+        </test>
+        <test expect_exit_code="0" expect_failure="false">
+            <param name="input1" value="ვვვვვ"/>
+            <param name="exit" value="0" />
+        </test>
+    </tests>
+    <help>
+    </help>
+</tool>
diff --git a/test/functional/tools/upload.py b/test/functional/tools/upload.py
new file mode 120000
index 0000000..6cf43a5
--- /dev/null
+++ b/test/functional/tools/upload.py
@@ -0,0 +1 @@
+../../../tools/data_source/upload.py
\ No newline at end of file
diff --git a/test/functional/tools/upload.xml b/test/functional/tools/upload.xml
new file mode 120000
index 0000000..3096f6a
--- /dev/null
+++ b/test/functional/tools/upload.xml
@@ -0,0 +1 @@
+../../../tools/data_source/upload.xml
\ No newline at end of file
diff --git a/test/functional/tools/upload_tool_conf.xml b/test/functional/tools/upload_tool_conf.xml
new file mode 100644
index 0000000..29a644b
--- /dev/null
+++ b/test/functional/tools/upload_tool_conf.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0"?>
+<toolbox tool_path="${tool_conf_dir}" is_shed_conf="false">
+  <section id="getext" name="Get Data">
+    <tool file="upload.xml"/>
+  </section>
+</toolbox>
diff --git a/test/functional/tools/validation_default.xml b/test/functional/tools/validation_default.xml
new file mode 100644
index 0000000..0aba807
--- /dev/null
+++ b/test/functional/tools/validation_default.xml
@@ -0,0 +1,34 @@
+<tool id="validation_default" name="Validation (default)">
+  <command>
+    echo "$input1" > out1;
+    echo $float_param > out2;
+    echo $select_param > out3;
+  </command>
+  <inputs>
+    <param name="input1" type="text" label="text input" />
+    <param name="float_param" type="float" label="float input" value="8.0" />
+    <param name="select_param" type="select" label="select_param">
+      <option value="opt1">Option 1</option>
+      <option value="opt2">Option 2</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data name="out_file1" from_work_dir="out1" />
+    <data name="out_file2" from_work_dir="out2" />
+    <data name="out_file3" from_work_dir="out3" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="" ; echo "moo" />
+      <param name="float_param" value="5" />
+      <param name="select_param" value="opt1" />
+      <output name="out_file1">
+         <assert_contents>
+            <has_line line="__dq__ X echo __dq__moo" />
+         </assert_contents>
+      </output>
+    </test>
+  </tests>
+  <help>
+  </help>
+</tool>
diff --git a/test/functional/tools/validation_empty_dataset.xml b/test/functional/tools/validation_empty_dataset.xml
new file mode 100644
index 0000000..589121b
--- /dev/null
+++ b/test/functional/tools/validation_empty_dataset.xml
@@ -0,0 +1,17 @@
+<tool id="validation_empty_dataset" name="validation_empty_dataset">
+  <command>
+    echo "Hello World" > out1;
+  </command>
+  <inputs>
+    <param name="input1" type="data" label="non-empty input">
+      <validator type="empty_dataset" />
+    </param>
+  </inputs>
+  <outputs>
+    <data name="out_file1" from_work_dir="out1" />
+  </outputs>
+  <tests>
+  </tests>
+  <help>
+  </help>
+</tool>
diff --git a/test/functional/tools/validation_repeat.xml b/test/functional/tools/validation_repeat.xml
new file mode 100644
index 0000000..ad9ee1b
--- /dev/null
+++ b/test/functional/tools/validation_repeat.xml
@@ -0,0 +1,57 @@
+<tool id="validation_repeat" name="Validation (default)">
+  <command>
+    #for $r in $r1
+    echo "${r.text}" >> out1;
+    #end for
+    #for $r in $r2
+    echo "${r.text}" >> out2;
+    #end for
+  </command>
+  <inputs>
+    <repeat name="r1" title="Repeat 1">
+      <param name="text" type="text" label="text input" />
+    </repeat>
+    <repeat name="r2" title="Repeat 2">
+      <param name="text" type="text" label="text input">
+        <validator type="empty_field" />
+        <sanitizer>
+          <valid initial="none">
+            <add value="a"/>
+            <add value="b"/>
+            <add value="d"/>
+            <add value="e"/>
+          </valid>
+          <mapping initial="none">
+            <add source="@" target="c"/>
+          </mapping>
+        </sanitizer>
+      </param>
+    </repeat>
+  </inputs>
+  <outputs>
+    <data name="out_file1" from_work_dir="out1" />
+    <data name="out_file2" from_work_dir="out2" />
+  </outputs>
+  <tests>
+    <test>
+      <repeat name="r1">
+        <param name="text" value="" ; echo "moo" />
+      </repeat>
+      <repeat name="r2">
+        <param name="text" value="ab at de" />
+      </repeat>
+      <output name="out_file1">
+         <assert_contents>
+            <has_line line="__dq__ X echo __dq__moo" />
+         </assert_contents>
+      </output>
+      <output name="out_file2">
+         <assert_contents>
+            <has_line line="abcde" />
+         </assert_contents>
+      </output>
+    </test>
+  </tests>
+  <help>
+  </help>
+</tool>
diff --git a/test/functional/tools/validation_sanitizer.xml b/test/functional/tools/validation_sanitizer.xml
new file mode 100644
index 0000000..e8a47a8
--- /dev/null
+++ b/test/functional/tools/validation_sanitizer.xml
@@ -0,0 +1,35 @@
+<tool id="validation_sanitizer" name="Validation (simple sanitizer)">
+  <command>
+    echo "${text}" >> out1;
+  </command>
+  <inputs>
+    <param name="text" type="text" label="text input">
+      <sanitizer>
+        <valid initial="none">
+          <add value="a"/>
+          <add value="b"/>
+          <add value="d"/>
+          <add value="e"/>
+        </valid>
+        <mapping initial="none">
+          <add source="@" target="c"/>
+        </mapping>
+      </sanitizer>
+    </param>
+  </inputs>
+  <outputs>
+    <data name="out_file1" from_work_dir="out1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="text" value="ab at de" />
+      <output name="out_file1">
+         <assert_contents>
+            <has_line line="abcde" />
+         </assert_contents>
+      </output>
+    </test>
+  </tests>
+  <help>
+  </help>
+</tool>
diff --git a/test/functional/tools/version_command.py b/test/functional/tools/version_command.py
new file mode 100644
index 0000000..7781eb1
--- /dev/null
+++ b/test/functional/tools/version_command.py
@@ -0,0 +1,2 @@
+from __future__ import print_function
+print("4.0.0")
diff --git a/test/functional/tools/version_command_interpreter.xml b/test/functional/tools/version_command_interpreter.xml
new file mode 100644
index 0000000..15b6ce8
--- /dev/null
+++ b/test/functional/tools/version_command_interpreter.xml
@@ -0,0 +1,20 @@
+<tool id="version_command_interpreter" name="version_command_interpreter" version="1.0.0">
+    <version_command interpreter="python">
+        version_command.py
+    </version_command>
+    <command>
+        cp $input $output
+    </command>
+    <inputs>
+        <param name="input" type="data" format="txt" />
+    </inputs>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt" />
+            <output name="out_file1" checksum="sha1$8156d7ca0f46ed7abac98f82e36cfaddb2aca041" />
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/version_command_plain.xml b/test/functional/tools/version_command_plain.xml
new file mode 100644
index 0000000..8211fde
--- /dev/null
+++ b/test/functional/tools/version_command_plain.xml
@@ -0,0 +1,20 @@
+<tool id="version_command_plain" name="version_command_plain" version="1.0.0">
+    <version_command>
+        echo "4.0.0"
+    </version_command>
+    <command>
+        cp $input $output
+    </command>
+    <inputs>
+        <param name="input" type="data" format="txt" />
+    </inputs>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt" />
+            <output name="out_file1" checksum="sha1$8156d7ca0f46ed7abac98f82e36cfaddb2aca041" />
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/tools/version_command_tool_dir.xml b/test/functional/tools/version_command_tool_dir.xml
new file mode 100644
index 0000000..87023ee
--- /dev/null
+++ b/test/functional/tools/version_command_tool_dir.xml
@@ -0,0 +1,20 @@
+<tool id="version_command_tool_dir" name="version_command_tool_dir" version="1.0.0">
+    <version_command>
+        python $__tool_directory__/version_command.py
+    </version_command>
+    <command>
+        cp $input $output
+    </command>
+    <inputs>
+        <param name="input" type="data" format="txt" />
+    </inputs>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="simple_line.txt" />
+            <output name="out_file1" checksum="sha1$8156d7ca0f46ed7abac98f82e36cfaddb2aca041" />
+        </test>
+    </tests>
+</tool>
diff --git a/test/functional/workflow.py b/test/functional/workflow.py
new file mode 100644
index 0000000..2d8e466
--- /dev/null
+++ b/test/functional/workflow.py
@@ -0,0 +1,187 @@
+from __future__ import print_function
+
+import os
+import sys
+from json import dumps, load
+from logging import getLogger
+
+from base.interactor import GalaxyInteractorApi, stage_data_in_history
+from base.twilltestcase import TwillTestCase
+from galaxy.tools.test import parse_output_elems, parse_param_elem, require_file, test_data_iter
+from galaxy.util import bunch, parse_xml
+
+log = getLogger( __name__ )
+
+
+class WorkflowTestCase( TwillTestCase ):
+    """
+    Kind of a shell of a test case for running workflow tests. Probably
+    needs to look more like test_toolbox.
+    """
+    workflow_test_file = os.environ.get("GALAXY_TEST_WORKFLOW_FILE", None)
+    user_api_key = None
+    master_api_key = None
+
+    def test_workflow( self, workflow_test_file=None ):
+        maxseconds = 120
+        workflow_test_file = workflow_test_file or WorkflowTestCase.workflow_test_file
+        assert workflow_test_file
+        workflow_test = parse_test_file( workflow_test_file )
+        galaxy_interactor = GalaxyWorkflowInteractor( self )
+
+        # Calling workflow https://github.com/jmchilton/blend4j/blob/master/src/test/java/com/github/jmchilton/blend4j/galaxy/WorkflowsTest.java
+
+        # Import workflow
+        workflow_id, step_id_map, output_defs = self.__import_workflow( galaxy_interactor, workflow_test.workflow )
+
+        # Stage data and history for workflow
+        test_history = galaxy_interactor.new_history()
+        stage_data_in_history( galaxy_interactor, workflow_test.test_data(), test_history )
+
+        # Build workflow parameters
+        uploads = galaxy_interactor.uploads
+        ds_map = {}
+        for step_index, input_dataset_label in workflow_test.input_datasets():
+            # Upload is {"src": "hda", "id": hid}
+            try:
+                upload = uploads[ workflow_test.upload_name( input_dataset_label ) ]
+            except KeyError:
+                raise AssertionError( "Failed to find upload with label %s in uploaded datasets %s" % ( input_dataset_label, uploads ) )
+
+            ds_map[ step_id_map[ step_index ] ] = upload
+
+        payload = {
+            "history": "hist_id=%s" % test_history,
+            "ds_map": dumps( ds_map ),
+            "workflow_id": workflow_id,
+        }
+        run_response = galaxy_interactor.run_workflow( payload ).json()
+
+        outputs = run_response[ 'outputs' ]
+        if not len( outputs ) == len( output_defs ):
+            msg_template = "Number of outputs [%d] created by workflow execution does not equal expected number from input file [%d]."
+            msg = msg_template % ( len( outputs ), len( output_defs ) )
+            raise AssertionError( msg )
+
+        galaxy_interactor.wait_for_ids( test_history, outputs )
+
+        for expected_output_def in workflow_test.outputs:
+            # Get the correct hid
+            name, outfile, attributes = expected_output_def
+            output_testdef = bunch.Bunch( name=name, outfile=outfile, attributes=attributes )
+
+            output_data = outputs[ int( name ) ]
+            try:
+                galaxy_interactor.verify_output( test_history, output_data, output_testdef=output_testdef, shed_tool_id=None, maxseconds=maxseconds )
+            except Exception:
+                for stream in ['stdout', 'stderr']:
+                    stream_output = galaxy_interactor.get_job_stream( test_history, output_data, stream=stream )
+                    print(self._format_stream( stream_output, stream=stream, format=True ), file=sys.stderr)
+                raise
+
+    def __import_workflow( self, galaxy_interactor, workflow ):
+        """
+        Import workflow into Galaxy and return id and mapping of step ids.
+        """
+        workflow_info = galaxy_interactor.import_workflow( workflow ).json()
+        try:
+            workflow_id = workflow_info[ 'id' ]
+        except KeyError:
+            raise AssertionError( "Failed to find id for workflow import response %s" % workflow_info )
+
+        # Well ideally the local copy of the workflow would have the same step ids
+        # as the one imported through the API, but API workflow imports are 1-indexed
+        # and GUI exports 0-indexed as of mid-november 2013.
+
+        imported_workflow = galaxy_interactor.read_workflow( workflow_id )
+        step_id_map = {}
+        local_steps_ids = sorted( int( step_id ) for step_id in workflow[ 'steps' ].keys() )
+        imported_steps_ids = sorted( int( step_id ) for step_id in imported_workflow[ 'steps' ].keys() )
+        for local_step_id, imported_step_id in zip( local_steps_ids, imported_steps_ids ):
+            step_id_map[ local_step_id ] = imported_step_id
+
+        output_defs = []
+        for local_step_id in local_steps_ids:
+            step_def = workflow['steps'][ str( local_step_id ) ]
+            output_defs.extend( step_def.get( "outputs", [] ) )
+
+        return workflow_id, step_id_map, output_defs
+
+
+def parse_test_file( workflow_test_file ):
+    tree = parse_xml( workflow_test_file )
+    root = tree.getroot()
+    input_elems = root.findall( "input" )
+    required_files = []
+    dataset_dict = {}
+    for input_elem in input_elems:
+        name, value, attrib = parse_param_elem( input_elem )
+        require_file( name, value, attrib, required_files )
+        dataset_dict[ name ] = value
+
+    outputs = parse_output_elems( root )
+
+    workflow_file_rel_path = root.get( 'file' )
+    if not workflow_file_rel_path:
+        raise Exception( "Workflow test XML must declare file attribute pointing to workflow under test." )
+
+    # TODO: Normalize this path, prevent it from accessing arbitrary files on system.
+    worfklow_file_abs_path = os.path.join( os.path.dirname( workflow_test_file ), workflow_file_rel_path )
+
+    return WorkflowTest(
+        dataset_dict,
+        required_files,
+        worfklow_file_abs_path,
+        outputs=outputs,
+    )
+
+
+class WorkflowTest( object ):
+
+    def __init__( self, dataset_dict, required_files, workflow_file, outputs ):
+        self.dataset_dict = dataset_dict
+        self.required_files = required_files
+        self.workflow = load( open( workflow_file, "r" ) )
+        self.outputs = outputs
+
+    def test_data( self ):
+        return test_data_iter( self.required_files )
+
+    def upload_name( self, input_dataset_label ):
+        return self.dataset_dict[ input_dataset_label ]
+
+    def input_datasets( self ):
+        steps = self.workflow[ "steps" ]
+        log.info("in input_datasets with steps %s" % steps)
+        for step_index, step_dict in steps.items():
+            if step_dict.get( "name", None ) == "Input dataset":
+                yield int( step_index ), step_dict[ "inputs" ][0][ "name" ]
+
+
+class GalaxyWorkflowInteractor(GalaxyInteractorApi):
+
+    def __init__( self, twill_test_case ):
+        super(GalaxyWorkflowInteractor, self).__init__( twill_test_case )
+
+    def import_workflow( self, workflow_rep ):
+        payload = { "workflow": dumps( workflow_rep ) }
+        return self._post( "workflows/upload", data=payload )
+
+    def run_workflow( self, data ):
+        return self._post( "workflows", data=data )
+
+    def read_workflow( self, id ):
+        return self._get( "workflows/%s" % id ).json()
+
+    def wait_for_ids( self, history_id, ids ):
+        self.twill_test_case.wait_for( lambda: not all( [ self.__dataset_ready( history_id, id ) for id in ids ] ), maxseconds=120 )
+
+    def __dataset_ready( self, history_id, id ):
+        contents = self._get( 'histories/%s/contents' % history_id ).json()
+        for content in contents:
+
+            if content["id"] == id:
+                state = content[ 'state' ]
+                state_ready = self._state_ready( state, error_msg="Dataset creation failed for dataset with name %s." % content[ 'name' ] )
+                return state_ready
+        return False
diff --git a/test/integration/__init__.py b/test/integration/__init__.py
new file mode 100644
index 0000000..d3e8d32
--- /dev/null
+++ b/test/integration/__init__.py
@@ -0,0 +1,6 @@
+"""This module contains Galaxy integration tests.
+
+Tests that start an actual Galaxy server with a particular configuration in
+order to test something that cannot be tested with the default functional/api
+testing configuration.
+"""
diff --git a/test/integration/embedded_pulsar_job_conf.xml b/test/integration/embedded_pulsar_job_conf.xml
new file mode 100644
index 0000000..cabf0c7
--- /dev/null
+++ b/test/integration/embedded_pulsar_job_conf.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<!-- A job config for testing the Pulsar embedded runner -->
+<job_conf>
+    <plugins>
+        <plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner" workers="4"/>
+        <plugin id="pulsar_embed" type="runner" load="galaxy.jobs.runners.pulsar:PulsarEmbeddedJobRunner">
+        </plugin>
+    </plugins>
+    <handlers>
+        <handler id="main"/>
+    </handlers>
+    <destinations default="pulsar_embed">
+        <destination id="local" runner="local">
+        </destination>
+        <destination id="pulsar_embed" runner="pulsar_embed">
+        </destination>
+    </destinations>
+    <tools>
+        <tool id="upload1" destination="local" />
+    </tools>
+</job_conf>
diff --git a/test/integration/test_pulsar_embedded.py b/test/integration/test_pulsar_embedded.py
new file mode 100644
index 0000000..0399809
--- /dev/null
+++ b/test/integration/test_pulsar_embedded.py
@@ -0,0 +1,27 @@
+"""Integration tests for the Pulsar embedded runner."""
+
+import os
+
+from base import integration_util
+
+SCRIPT_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
+EMBEDDED_PULSAR_JOB_CONFIG_FILE = os.path.join(SCRIPT_DIRECTORY, "embedded_pulsar_job_conf.xml")
+
+
+class EmbeddedPulsarIntegrationTestCase(integration_util.IntegrationTestCase):
+    """Start a Pulsar job."""
+
+    framework_tool_and_types = True
+
+    @classmethod
+    def handle_galaxy_config_kwds(cls, config):
+        config["job_config_file"] = EMBEDDED_PULSAR_JOB_CONFIG_FILE
+
+    def test_tool_simple_constructs(self):
+        self._run_tool_test("simple_constructs")
+
+    def test_multi_data_param(self):
+        self._run_tool_test("multi_data_param")
+
+    def test_work_dir_outputs(self):
+        self._run_tool_test("output_filter")
diff --git a/test/integration/test_resolvers.py b/test/integration/test_resolvers.py
new file mode 100644
index 0000000..5b134ac
--- /dev/null
+++ b/test/integration/test_resolvers.py
@@ -0,0 +1,104 @@
+"""Integration tests for conda dependency resolution."""
+import os
+import shutil
+from tempfile import mkdtemp
+
+from base import integration_util
+from base.api import ApiTestCase
+
+GNUPLOT = {u'version': u'4.6', u'type': u'package', u'name': u'gnuplot'}
+
+
+class CondaResolutionIntegrationTestCase(integration_util.IntegrationTestCase, ApiTestCase):
+    """Test conda dependency resolution through API."""
+
+    framework_tool_and_types = True
+
+    @classmethod
+    def handle_galaxy_config_kwds(cls, config):
+        cls.conda_tmp_prefix = mkdtemp()
+        config["use_cached_dep_manager"] = True
+        config["conda_auto_init"] = True
+        config["conda_prefix"] = os.path.join(cls.conda_tmp_prefix, 'conda')
+
+    @classmethod
+    def tearDownClass(cls):
+        """Shutdown Galaxy server and cleanup temp directory."""
+        shutil.rmtree(cls.conda_tmp_prefix)
+        cls._test_driver.tear_down()
+        cls._app_available = False
+
+    def test_dependency_before_install( self ):
+        """
+        Test that dependency is not installed (response['dependency_type'] == 'null').
+        """
+        data = GNUPLOT
+        create_response = self._get( "dependency_resolvers/dependency", data=data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        response = create_response.json()
+        assert response['dependency_type'] is None and response['exact']
+
+    def test_dependency_install( self ):
+        """
+        Test installation of GNUPLOT dependency.
+        """
+        data = GNUPLOT
+        create_response = self._post( "dependency_resolvers/dependency", data=data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        response = create_response.json()
+        assert response['dependency_type'] == 'conda' and response['exact']
+
+    def test_dependency_install_not_exact(self):
+        """
+        Test installation of gnuplot with a version that does not exist.
+        Sh
+        """
+        data = GNUPLOT.copy()
+        data['version'] = '4.9999'
+        create_response = self._post("dependency_resolvers/dependency", data=data, admin=True)
+        self._assert_status_code_is(create_response, 200)
+        response = create_response.json()
+        assert response['dependency_type'] == 'conda' and not response['exact']
+
+    def test_dependency_status_installed_exact( self ):
+        """
+        GET request to dependency_resolvers/dependency with GNUPLOT dependency.
+        Should be installed through conda (response['dependency_type'] == 'conda').
+        """
+        data = GNUPLOT
+        create_response = self._get( "dependency_resolvers/dependency", data=data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        response = create_response.json()
+        assert response['dependency_type'] == 'conda' and response['exact']
+
+    def test_dependency_status_installed_not_exact( self ):
+        """
+        GET request to dependency_resolvers/dependency with GNUPLOT dependency.
+        Should be installed through conda (response['dependency_type'] == 'conda'),
+        but version 4.9999 does not exist.
+        """
+        data = GNUPLOT.copy()
+        data['version'] = '4.9999'
+        create_response = self._get( "dependency_resolvers/dependency", data=data, admin=True )
+        self._assert_status_code_is( create_response, 200 )
+        response = create_response.json()
+        assert response['dependency_type'] == 'conda' and not response['exact']
+
+    def test_conda_install_through_tools_api( self ):
+        tool_id = 'mulled_example_multi_1'
+        endpoint = "tools/%s/install_dependencies" % tool_id
+        data = {'id': tool_id}
+        create_response = self._post(endpoint, data=data, admin=True)
+        self._assert_status_code_is( create_response, 200 )
+        response = create_response.json()
+        assert any([True for d in response if d['dependency_type'] == 'conda'])
+        endpoint = "tools/%s/build_dependency_cache" % tool_id
+        create_response = self._post(endpoint, data=data, admin=True)
+        self._assert_status_code_is( create_response, 200 )
+
+    def test_conda_clean( self ):
+        endpoint = 'dependency_resolvers/clean'
+        create_response = self._post(endpoint, data={}, admin=True)
+        self._assert_status_code_is(create_response, 200)
+        response = create_response.json()
+        assert response == "OK"
diff --git a/test/manual/__init__.py b/test/manual/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/manual/launch_and_run.sh b/test/manual/launch_and_run.sh
new file mode 100755
index 0000000..de38930
--- /dev/null
+++ b/test/manual/launch_and_run.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+#set -e
+
+# Open and few the contents of a docker-galaxy-stable container.
+# docker run --rm -i -t bgruening/galaxy-stable /bin/bash
+
+pwd_dir=$(pwd)
+GALAXY_ROOT=`dirname $0`/../..
+cd $GALAXY_ROOT
+GALAXY_ROOT=$(pwd)
+SCRIPT_DIR="$GALAXY_ROOT/test/manual"
+
+manual_test_script=$1
+shift
+manual_test_script_args="$@"
+
+GALAXY_VIRTUAL_ENV="${GALAXY_VIRTUAL_ENV:-.venv}"
+
+# Docker options defined to reflect run_tests.sh names and behavior.
+DOCKER_DEFAULT_IMAGE='bgruening/galaxy-stable'
+
+DOCKER_EXTRA_ARGS=${DOCKER_ARGS:-""}
+DOCKER_RUN_EXTRA_ARGS=${DOCKER_RUN_EXTRA_ARGS:-""}
+DOCKER_IMAGE=${DOCKER_IMAGE:-${DOCKER_DEFAULT_IMAGE}}
+# Root for Galaxy in the docker container
+DOCKER_GALAXY_ROOT=${DOCKER_GALAXY_ROOT:-/galaxy-central}
+
+# Location of this script's directory when mounted into the container.
+DOCKER_SCRIPT_DIR=/etc/galaxy/manual
+
+GALAXY_PORT=${GALAXY_PORT:-"any_free"}
+if [ "$GALAXY_PORT" == "any_free" ];
+then
+    GALAXY_PORT=`python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()'`
+fi
+
+GALAXY_URL=${GALAXY_URL:-http://localhost:${GALAXY_PORT}}
+GALAXY_MASTER_API_KEY=${GALAXY_MASTER_API_KEY:-HSNiugRFvgT574F43jZ7N9F3}
+
+LOGS_DIR=`cd "$LOGS_DIR"; pwd`
+WORK_DIR=`mktemp --tmpdir=$LOGS_DIR -d -t gxperfXXXX`
+echo "WORK_DIR is ${WORK_DIR}"
+NAME=`basename $WORK_DIR`
+
+GALAXY_HANDLER_NUMPROCS=${GALAXY_HANDLER_NUMPROCS:-1}
+
+DOCKER_ENVIRONMENT="\
+-e NONUSE=nodejs,proftp,reports \
+-e GALAXY_HANDLER_NUMPROCS=$GALAXY_HANDLER_NUMPROCS \
+-e GALAXY_CONFIG_OVERRIDE_TOOL_CONFIG_FILE=$DOCKER_GALAXY_ROOT/test/functional/tools/samples_tool_conf.xml \
+-e GALAXY_CONFIG_ENABLE_BETA_WORKFLOW_MODULES=true \
+-e GALAXY_CONFIG_OVERRIDE_ENABLE_BETA_TOOL_FORMATS=true \
+"
+
+if [ $manual_test_script == "workflows_scaling" ];
+then
+    DOCKER_ENVIRONMENT="$DOCKER_ENVIRONMENT -e GALAXY_CONFIG_JOB_CONFIG_FILE=$DOCKER_SCRIPT_DIR/workflow_job_conf.xml "
+fi
+
+# Mount logs, local galaxy changes, and local galaxy config.
+DOCKER_VOLUMES="\
+-v $WORK_DIR:/galaxy_logs \
+-v $GALAXY_ROOT/lib:$DOCKER_GALAXY_ROOT/lib \
+-v $GALAXY_ROOT/test:/galaxy-central/test \
+-v $SCRIPT_DIR:$DOCKER_SCRIPT_DIR \
+"
+DOCKER_RUN_ARGS="$DOCKER_RUN_EXTRA_ARGS -d -p ${GALAXY_PORT}:80 -i -t $DOCKER_VOLUMES $DOCKER_ENVIRONMENT"
+
+docker_image_id=`docker $DOCKER_EXTRA_ARGS run $DOCKER_RUN_ARGS ${DOCKER_IMAGE}`
+
+echo "Docker container with id $docker_image_id launched. Inspect with 'docker exec -i -t $docker_image_id /bin/bash'."
+
+# Wait for Galaxy to be available
+for i in {1..40}; do curl --silent --fail ${GALAXY_URL}/api/version && break || sleep 5; done
+
+${GALAXY_VIRTUAL_ENV}/bin/python test/manual/$manual_test_script.py --api_key ${GALAXY_MASTER_API_KEY} --host ${GALAXY_URL} $manual_test_script_args
+docker exec -i -t $docker_image_id /bin/bash -c "cp /home/galaxy/*log /galaxy_logs"
+docker kill $docker_image_id
diff --git a/test/manual/workflow_job_conf.xml b/test/manual/workflow_job_conf.xml
new file mode 100644
index 0000000..0e5ebef
--- /dev/null
+++ b/test/manual/workflow_job_conf.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<job_conf>
+    <plugins workers="2">
+        <plugin id="slurm" type="runner" load="galaxy.jobs.runners.slurm:SlurmJobRunner">
+            <param id="drmaa_library_path">/usr/lib/slurm-drmaa/lib/libdrmaa.so</param>
+        </plugin>
+    </plugins>
+    <handlers default="handlers">
+        <handler id="handler0" tags="handlers"/>
+        <handler id="noophandler" />
+    </handlers>
+    <destinations default="noopcluster">
+        <destination id="cluster" runner="slurm" handler="handlers">
+        </destination>
+        <destination id="noopcluster" runner="slurm" handler="noophandler">
+        </destination>
+    </destinations>
+    <limits>
+    </limits>
+    <tools>
+        <tool id="upload1" destination="cluster" />
+        <tool id="create_input_collection" destination="cluster" />
+        <tool id="split" destination="cluster" />
+        <tool id="cat" destination="noopcluster" handler="noophandler" />
+    </tools>
+</job_conf>
diff --git a/test/manual/workflows_scaling.py b/test/manual/workflows_scaling.py
new file mode 100644
index 0000000..354c8a5
--- /dev/null
+++ b/test/manual/workflows_scaling.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+"""A small script to drive workflow performance testing.
+
+% ./test/manual/launch_and_run.sh workflows_scaling --collection_size 500 --workflow_depth 4
+$ .venv/bin/python scripts/summarize_timings.py --file /tmp/<work_dir>/handler1.log --pattern 'Workflow step'
+$ .venv/bin/python scripts/summarize_timings.py --file /tmp/<work_dir>/handler1.log --pattern 'Created step'
+"""
+import functools
+import json
+import os
+import random
+import sys
+from argparse import ArgumentParser
+from threading import Thread
+from uuid import uuid4
+
+import requests
+from bioblend import galaxy
+
+galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
+sys.path[1:1] = [ os.path.join( galaxy_root, "lib" ), os.path.join( galaxy_root, "test" ) ]
+
+from api import helpers
+from api.workflows_format_2.converter import python_to_workflow
+
+LONG_TIMEOUT = 1000000000
+DESCRIPTION = "Script to exercise the workflow engine."
+
+
+def main(argv=None):
+    """Entry point for workflow driving."""
+    arg_parser = ArgumentParser(description=DESCRIPTION)
+    arg_parser.add_argument("--api_key", default="testmasterapikey")
+    arg_parser.add_argument("--host", default="http://localhost:8080/")
+
+    arg_parser.add_argument("--collection_size", type=int, default=20)
+
+    arg_parser.add_argument("--schedule_only_test", default=False, action="store_true")
+    arg_parser.add_argument("--workflow_depth", type=int, default=10)
+    arg_parser.add_argument("--workflow_count", type=int, default=1)
+
+    group = arg_parser.add_mutually_exclusive_group()
+    group.add_argument("--two_outputs", default=False, action="store_true")
+    group.add_argument("--wave_simple", default=False, action="store_true")
+
+    args = arg_parser.parse_args(argv)
+
+    uuid = str(uuid4())
+    workflow_struct = _workflow_struct(args, uuid)
+
+    has_input = any([s.get("type", "tool") == "input_collection" for s in workflow_struct])
+    if not has_input:
+        uuid = None
+
+    gi = _gi(args)
+
+    workflow = python_to_workflow(workflow_struct)
+    workflow_info = gi.workflows.import_workflow_json(workflow)
+    workflow_id = workflow_info["id"]
+
+    target = functools.partial(_run, args, gi, workflow_id, uuid)
+    threads = []
+    for i in range(args.workflow_count):
+        t = Thread(target=target)
+        t.daemon = True
+        t.start()
+        threads.append(t)
+
+    for t in threads:
+        t.join()
+
+
+def _run(args, gi, workflow_id, uuid):
+    dataset_populator = GiDatasetPopulator(gi)
+    dataset_collection_populator = GiDatasetCollectionPopulator(gi)
+
+    history_id = dataset_populator.new_history()
+    if uuid is not None:
+        contents = []
+        for i in range(args.collection_size):
+            contents.append("random dataset number #%d" % i)
+        hdca = dataset_collection_populator.create_list_in_history( history_id, contents=contents ).json()
+        label_map = {
+            uuid: {"src": "hdca", "id": hdca["id"]},
+        }
+    else:
+        label_map = {}
+
+    workflow_request = dict(
+        history="hist_id=%s" % history_id,
+    )
+    workflow_request[ "inputs" ] = json.dumps( label_map )
+    url = "workflows/%s/usage" % ( workflow_id )
+    invoke_response = dataset_populator._post( url, data=workflow_request ).json()
+    invocation_id = invoke_response["id"]
+    workflow_populator = GiWorkflowPopulator(gi)
+    if args.schedule_only_test:
+        workflow_populator.wait_for_invocation(
+            workflow_id,
+            invocation_id,
+            timeout=LONG_TIMEOUT,
+        )
+    else:
+        workflow_populator.wait_for_workflow(
+            workflow_id,
+            invocation_id,
+            history_id,
+            timeout=LONG_TIMEOUT,
+        )
+
+
+class GiPostGetMixin:
+    """Mixin for adapting Galaxy API testing helpers to bioblend."""
+
+    def _get(self, route):
+        return self._gi.make_get_request(self.__url(route))
+
+    def _post(self, route, data={}):
+        data = data.copy()
+        data['key'] = self._gi.key
+        return requests.post(self.__url(route), data=data)
+
+    def __url(self, route):
+        return self._gi.url + "/" + route
+
+
+class GiDatasetPopulator(helpers.BaseDatasetPopulator, GiPostGetMixin):
+    """Utility class for dealing with datasets and histories."""
+
+    def __init__(self, gi):
+        """Construct a dataset populator from a bioblend GalaxyInstance."""
+        self._gi = gi
+
+
+class GiDatasetCollectionPopulator(helpers.BaseDatasetCollectionPopulator, GiPostGetMixin):
+    """Utility class for dealing with dataset collections."""
+
+    def __init__(self, gi):
+        """Construct a dataset collection populator from a bioblend GalaxyInstance."""
+        self._gi = gi
+        self.dataset_populator = GiDatasetPopulator(gi)
+
+    def _create_collection(self, payload):
+        create_response = self._post( "dataset_collections", data=payload )
+        return create_response
+
+
+class GiWorkflowPopulator(helpers.BaseWorkflowPopulator, GiPostGetMixin):
+    """Utility class for dealing with workflows."""
+
+    def __init__(self, gi):
+        """Construct a workflow populator from a bioblend GalaxyInstance."""
+        self._gi = gi
+        self.dataset_populator = GiDatasetPopulator(gi)
+
+
+def _workflow_struct(args, input_uuid):
+    if args.two_outputs:
+        return _workflow_struct_two_outputs(args, input_uuid)
+    elif args.wave_simple:
+        return _workflow_struct_wave(args, input_uuid)
+    else:
+        return _workflow_struct_simple(args, input_uuid)
+
+
+def _workflow_struct_simple(args, input_uuid):
+    workflow_struct = [
+        {"tool_id": "create_input_collection", "state": {"collection_size": args.collection_size}},
+        {"tool_id": "cat", "state": {"input1": _link(0, "output")}}
+    ]
+
+    workflow_depth = args.workflow_depth
+    for i in range(workflow_depth):
+        link = str(i + 1) + "#out_file1"
+        workflow_struct.append(
+            {"tool_id": "cat", "state": {"input1": _link(link)}}
+        )
+    return workflow_struct
+
+
+def _workflow_struct_two_outputs(args, input_uuid):
+    workflow_struct = [
+        {"type": "input_collection", "uuid": input_uuid},
+        {"tool_id": "cat", "state": {"input1": _link(0), "input2": _link(0)}}
+    ]
+
+    workflow_depth = args.workflow_depth
+    for i in range(workflow_depth):
+        link1 = str(i + 1) + "#out_file1"
+        link2 = str(i + 1) + "#out_file2"
+        workflow_struct.append(
+            {"tool_id": "cat", "state": {"input1": _link(link1), "input2": _link(link2)}}
+        )
+    return workflow_struct
+
+
+def _workflow_struct_wave(args, input_uuid):
+    workflow_struct = [
+        {"tool_id": "create_input_collection", "state": {"collection_size": args.collection_size}},
+        {"tool_id": "cat_list", "state": {"input1": _link(0, "output")}}
+    ]
+
+    workflow_depth = args.workflow_depth
+    for i in range(workflow_depth):
+        step = i + 2
+        if step % 2 == 1:
+            workflow_struct += [{"tool_id": "cat_list", "state": {"input1": _link(step - 1, "output")}}]
+        else:
+            workflow_struct += [{"tool_id": "split", "state": {"input1": _link(step - 1, "out_file1") }}]
+    return workflow_struct
+
+
+def _link(link, output_name=None):
+    if output_name is not None:
+        link = str(link) + "#" + output_name
+    return {"$link": link}
+
+
+def _gi(args):
+    gi = galaxy.GalaxyInstance(args.host, key=args.api_key)
+    name = "wftest-user-%d" % random.randint(0, 1000000)
+
+    user = gi.users.create_local_user(name, "%s at galaxytesting.dev" % name, "pass123")
+    user_id = user["id"]
+    api_key = gi.users.create_user_apikey(user_id)
+    user_gi = galaxy.GalaxyInstance(args.host, api_key)
+    return user_gi
+
+
+if __name__ == "__main__":
+    main()
diff --git a/test/qunit/Gruntfile.js b/test/qunit/Gruntfile.js
new file mode 100644
index 0000000..650fd7d
--- /dev/null
+++ b/test/qunit/Gruntfile.js
@@ -0,0 +1,33 @@
+module.exports = function( grunt ){
+    grunt.initConfig({
+        pkg: grunt.file.readJSON( 'package.json' ),
+
+        qunit: {
+            all: [ 'tests/**/*.html' ],
+            options: {
+            }
+        },
+
+        watch: {
+            // watch for changes in the src dir
+            files: [ 'tests/**.js', '../../static/scripts/**/**.js' ],
+            tasks: [ 'default' ]
+        }
+    });
+
+    // use 'grunt --test=my-tests.html' or 'grunt watch --test=my-tests.html'
+    //  to only run the tests in tests/my-tests.html
+    if( grunt.option( 'test' ) ){
+        grunt.config.set( 'qunit.all', 'tests/' + grunt.option( 'test' ) );
+        grunt.log.writeln( '(only testing ' + grunt.config.get( 'qunit.all' ) + ')' );
+    }
+
+    grunt.loadNpmTasks( 'grunt-contrib-qunit' );
+
+    // use 'grunt watch' to have the qunit tests run when scripts or tests are changed
+    grunt.loadNpmTasks( 'grunt-contrib-watch' );
+
+    // register one or more task lists (you should ALWAYS have a "default" task list)
+    grunt.registerTask( 'default', [ 'qunit' ] );
+};
+
diff --git a/test/qunit/README.txt b/test/qunit/README.txt
new file mode 100644
index 0000000..113f632
--- /dev/null
+++ b/test/qunit/README.txt
@@ -0,0 +1,33 @@
+Running Tests in qunit Directory
+--------------------------------
+
+From Command-line (with Grunt and Phantom):
+
+ % npm install -g grunt-cli
+ % npm install
+ % grunt
+
+To watch for changes to scripts and tests and automatically re-run tests:
+
+ % grunt watch
+
+You can limit the tests run to a single file using the test option (note:
+that 'tests/' is prepended to the path):
+
+ % grunt --test=metrics-logger.html
+ (only testing tests/metrics-logger.html)
+
+ % grunt watch --test=metrics-logger.html
+ (only testing tests/metrics-logger.html)
+
+From Web Browser (no additional dependencies):
+
+  Just open test HTML file in Web Browser.
+
+Note:
+	The combination of requirejs and phantomjs used to load some of these
+scripts can lead to error suppression. If any of the dependencies of the
+scripts you're requiring throw an error, grunt+phantom+require will not
+show a visible error (even with --verbose and/or --debug). You will instead
+see a timeout error thrown from phantomjs.
+    This generally(?) applies only to errors when evaluating the dependency.
diff --git a/test/qunit/package.json b/test/qunit/package.json
new file mode 100644
index 0000000..f075457
--- /dev/null
+++ b/test/qunit/package.json
@@ -0,0 +1,11 @@
+{
+  "name": "galaxy-qunit-tests",
+  "version": "1.0.0",
+  "dependencies": {
+    "grunt": "^1.0.1",
+    "grunt-cli": "^1.2.0",
+    "grunt-contrib-qunit": "^1.2.0",
+    "grunt-contrib-watch": "^1.0.0",
+    "phantomjs-prebuilt": "^2.1.13"
+  }
+}
diff --git a/test/qunit/scripts b/test/qunit/scripts
new file mode 120000
index 0000000..9ae5a45
--- /dev/null
+++ b/test/qunit/scripts
@@ -0,0 +1 @@
+../../static/scripts
\ No newline at end of file
diff --git a/test/qunit/test-app.js b/test/qunit/test-app.js
new file mode 100644
index 0000000..267e822
--- /dev/null
+++ b/test/qunit/test-app.js
@@ -0,0 +1,40 @@
+/** Creates a generic/global Galaxy environment, loads shared libraries and a fake server */
+define([
+    "jquery",
+    "sinon-qunit",
+    "libs/bootstrap",
+    "test-data/bootstrapped",
+    "test-data/fakeserver",
+    "galaxy",
+    "libs/jquery/select2",
+    "libs/jquery/jstorage"
+], function(
+    $,
+    sinon,
+    bootstrap,
+    bootstrapped,
+    serverdata,
+    appBase
+){
+    return {
+        create: function() {
+            window.Galaxy = new appBase.GalaxyApp( bootstrapped );
+            window.Galaxy.currHistoryPanel = { model: new Backbone.Model() };
+            window.Galaxy.emit = {
+                debug : function(){},
+                error : function( v ){ window.console.error( v ) }
+            };
+            window.WAIT_FADE = 300;
+            window.fakeserver = sinon.fakeServer.create();
+            for (var route in serverdata) {
+                window.fakeserver.respondWith('GET', Galaxy.root + route, [ 200, { 'Content-Type': 'application/json' }, serverdata[ route ].data ]);
+            }
+        },
+        destroy: function() {
+            if (window.fakeserver) {
+                window.fakeserver.restore();
+                delete window.fakeserver;
+            }
+        }
+    };
+});
\ No newline at end of file
diff --git a/test/qunit/test-common.js b/test/qunit/test-common.js
new file mode 100644
index 0000000..a1649a6
--- /dev/null
+++ b/test/qunit/test-common.js
@@ -0,0 +1,134 @@
+var pathname = location.pathname;
+var qunit_absolute_directory = pathname.substring( 0, pathname.lastIndexOf( "qunit/" ) + 6 );
+var filename = pathname.substr( pathname.lastIndexOf( "/" ) + 1 );
+
+
+function bridge_phantomjs( QUnit ) {
+    // Needed because the grunt task will attempt to inject this bridge assuming
+    // QUnit is loaded directly - not using require.js.
+    // https://github.com/gruntjs/grunt-contrib-qunit/blob/master/phantomjs/bridge.js
+    var userAgent = navigator && navigator.userAgent;
+    if( ! userAgent || userAgent.indexOf( "PhantomJS" ) < 0 ) {
+        return;
+    }
+
+    /*global QUnit:true, alert:true*/
+    (function () {
+      'use strict';
+      console.log(  );
+      // Don't re-order tests.
+      // QUnit.config.reorder = false;
+
+      // Send messages to the parent PhantomJS process via alert! Good times!!
+      function sendMessage() {
+        var args = [].slice.call(arguments);
+        alert(JSON.stringify(args));
+      }
+
+      // These methods connect QUnit to PhantomJS.
+      QUnit.log(function(obj) {
+        // What is this I don’t even
+        if (obj.message === '[object Object], undefined:undefined') { return; }
+        // Parse some stuff before sending it.
+        var actual = QUnit.jsDump.parse(obj.actual);
+        var expected = QUnit.jsDump.parse(obj.expected);
+        // Send it.
+        sendMessage('qunit.log', obj.result, actual, expected, obj.message, obj.source);
+      });
+
+      QUnit.testStart(function(obj) {
+        sendMessage('qunit.testStart', obj.name);
+      });
+
+      QUnit.testDone(function(obj) {
+        sendMessage('qunit.testDone', obj.name, obj.failed, obj.passed, obj.total);
+      });
+
+      QUnit.moduleStart(function(obj) {
+        sendMessage('qunit.moduleStart', obj.name);
+      });
+
+      QUnit.moduleDone(function(obj) {
+        sendMessage('qunit.moduleDone', obj.name, obj.failed, obj.passed, obj.total);
+      });
+
+      QUnit.begin(function() {
+        sendMessage('qunit.begin');
+      });
+
+      QUnit.done(function(obj) {
+        sendMessage('qunit.done', obj.failed, obj.passed, obj.total, obj.runtime);
+      });
+    }());
+}
+
+
+// Configure require.js for unit testing.
+require.config({
+    baseUrl: qunit_absolute_directory + "scripts",
+    paths: {
+        // Custom paths for Galaxy dependencies...
+        "jquery": "libs/jquery/jquery",
+        "backbone": "libs/backbone",
+        // Custom paths for qunit testing dependencies...
+        "QUnit": qunit_absolute_directory + "test-libs/qunit-1.23.1", // .. because baseUrl is scripts to match Galaxy.
+        "sinon": qunit_absolute_directory + "test-libs/sinon-1.17.3",
+        "sinon-qunit": qunit_absolute_directory + "test-libs/sinon-qunit-1.0.0",
+        // (optional) test data
+        "test-data" : qunit_absolute_directory + "test-data/",
+        // (optional) test app/environment with server data
+        "test-app"  : qunit_absolute_directory + "test-app"
+    },
+    shim: {
+        // Ensure correct Qunit order in light of requirejs loading...
+        // https://gist.github.com/drewwells/920405
+        "QUnit": {
+            exports: "QUnit",
+        },
+        "sinon": {
+            exports: "sinon"
+        },
+        "sinon-qunit": {
+            deps: [ 'sinon', "QUnit" ],
+            exports: "sinon"  // Odd but seems to work
+        },
+        "libs/underscore": {
+            exports: "_"
+        },
+        "backbone": {
+            deps: [ 'libs/underscore', 'jquery' ],
+            exports: "Backbone"
+        },
+        "libs/backbone": {
+            deps: [ 'libs/underscore', 'jquery' ],
+            exports: "Backbone"
+        }
+    }
+} );
+
+// Mock out Galaxy globals.
+var Galaxy = {
+    root: '/'
+};
+
+require( [ "jquery", "QUnit" ], function( $, QUnit ) {
+    QUnit.config.autostart = false;
+    bridge_phantomjs( QUnit );
+    // Bootstrap HTML for displaying Qunit results.
+    $('head').append( $('<link rel="stylesheet" type="text/css"  />')
+        .attr( "href", qunit_absolute_directory + "test-libs/qunit-1.23.1.css") );
+    $('body').append( $('<div id="qunit">') );
+    $('body').append( $('<div id="qunit-fixture">') );
+
+    var test_module_path = "./" + filename.replace( ".html", ".js" );
+
+    // underscore + backbone loaded here because they are assumed globals by
+    // much of the Galaxy client code.
+    require( [ "libs/underscore", "libs/backbone" ], function( _, Backbone ) {
+        require( [ test_module_path ], function( ) {
+            QUnit.load();
+            QUnit.start();
+        } );
+    } );
+});
+
diff --git a/test/qunit/test-data/bootstrapped.js b/test/qunit/test-data/bootstrapped.js
new file mode 100644
index 0000000..906d414
--- /dev/null
+++ b/test/qunit/test-data/bootstrapped.js
@@ -0,0 +1,31 @@
+define([], function(){
+    return {
+        config  : {
+            "allow_user_deletion": false,
+            "allow_user_creation": true,
+            "wiki_url": "https://wiki.galaxyproject.org/",
+            "ftp_upload_site": null,
+            "support_url": "https://wiki.galaxyproject.org/Support",
+            "allow_user_dataset_purge": false,
+            "allow_library_path_paste": false,
+            "user_library_import_dir": null,
+            "terms_url": null,
+            "ftp_upload_dir": null,
+            "library_import_dir": null,
+            "logo_url": null,
+            "enable_unique_workflow_defaults": false
+        },
+        user    : {
+            "username": "test",
+            "quota_percent": null,
+            "total_disk_usage": 61815527,
+            "nice_total_disk_usage": "59.0 MB",
+            "email": "test at test.test",
+            "tags_used": [
+              "test"
+            ],
+            "model_class": "User",
+            "id": "f2db41e1fa331b3e"
+        }
+    };
+});
\ No newline at end of file
diff --git a/test/qunit/test-data/fakeserver.js b/test/qunit/test-data/fakeserver.js
new file mode 100644
index 0000000..4ed17fb
--- /dev/null
+++ b/test/qunit/test-data/fakeserver.js
@@ -0,0 +1,11 @@
+define([], function() { return {
+    'api/datatypes/mapping': {
+        data: '{"ext_to_class_name" : {"txt" : "Text", "data":"Data","tabular":"Tabular", "binary": "Binary", "bam": "Bam" }, "class_to_classes": { "Data": { "Data": true }, "Text": { "Text": true, "Data": true }, "Tabular": { "Tabular": true, "Text": true, "Data": true }, "Binary": { "Data": true, "Binary": true }, "Bam": { "Data": true, "Binary": true, "Bam": true }}}'
+    },
+    'api/datatypes': {
+        data: '["RData", "ab1", "affybatch", "txt"]'
+    },
+    'api/tools/test/build': {
+        data: '{ "id": "test", "name": "_name", "version": "_version", "description": "_description", "display": "true", "requirements": [ { "name": "req_name_a", "version": "req_version_a" }, { "name": "req_name_b", "version": "req_version_b" } ], "inputs": [ { "name": "a", "type": "text" }, { "name": "b",  "type": "conditional", "test_param": { "name": "c",  "type": "select", "value": "h", "options": [ [ "d", "d", false ], [ "h", "h", false ] ] }, "cases": [ { "name": "d", "value": "d" [...]
+    }
+}});
\ No newline at end of file
diff --git a/test/qunit/test-data/job-dag-1.js b/test/qunit/test-data/job-dag-1.js
new file mode 100644
index 0000000..627c8c0
--- /dev/null
+++ b/test/qunit/test-data/job-dag-1.js
@@ -0,0 +1,534 @@
+define([], function(){
+// ============================================================================
+var tools = {
+    "upload1": {},
+    "Show beginning1": {},
+    "Show tail1": {},
+    "random_lines1": {},
+    "__SET_METADATA__": {},
+    "cat1": {}
+};
+
+// ============================================================================
+// plain 3 step job chain
+var jobs1 = [
+    {
+        "tool_id": "upload1",
+        "update_time": "2014-10-03T15:12:25.904033",
+        "inputs": {},
+        "outputs": {
+            "output0": {
+                "src": "hda",
+                "id": "8c959c9304a2bc4b"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-03T15:12:22.589152",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "8a81cf6f989c4467",
+        "tool": null
+    },
+    {
+        "tool_id": "Show beginning1",
+        "update_time": "2014-10-03T15:14:04.328484",
+        "inputs": {
+            "input": {
+                "src": "hda",
+                "id": "8c959c9304a2bc4b"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "132016f833b57406"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-03T15:14:01.060662",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "6505e875ddb66fd2",
+        "tool": null
+    },
+    {
+        "tool_id": "Show tail1",
+        "update_time": "2014-10-03T15:14:21.596871",
+        "inputs": {
+            "input": {
+                "src": "hda",
+                "id": "132016f833b57406"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "846fb0a2a64137c0"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-03T15:14:18.425681",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "77f74776fd03cbc5",
+        "tool": null
+    }
+];
+
+var historyContents1 = [
+  {
+    "deleted": false,
+    "extension": "interval",
+    "hid": 1,
+    "history_content_type": "dataset",
+    "history_id": "911dde3ddb677bcd",
+    "id": "8c959c9304a2bc4b",
+    "name": "1.interval",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/911dde3ddb677bcd/contents/datasets/8c959c9304a2bc4b",
+    "visible": true
+  },
+  {
+    "deleted": false,
+    "extension": "interval",
+    "hid": 2,
+    "history_content_type": "dataset",
+    "history_id": "911dde3ddb677bcd",
+    "id": "132016f833b57406",
+    "name": "Select first on data 1",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/911dde3ddb677bcd/contents/datasets/132016f833b57406",
+    "visible": true
+  },
+  {
+    "deleted": false,
+    "extension": "interval",
+    "hid": 3,
+    "history_content_type": "dataset",
+    "history_id": "911dde3ddb677bcd",
+    "id": "846fb0a2a64137c0",
+    "name": "Select last on data 2",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/911dde3ddb677bcd/contents/datasets/846fb0a2a64137c0",
+    "visible": true
+  }
+];
+
+// ============================================================================
+// single job chain with a __SET_METADATA__ job
+var jobs2 = [
+    {
+        "tool_id": "upload1",
+        "update_time": "2014-10-03T16:09:49.590769",
+        "inputs": {},
+        "outputs": {
+            "output0": {
+                "src": "hda",
+                "id": "eca0af6fb47bf90c"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-03T16:09:45.190023",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "bf60fd5f5f7f44bf",
+        "tool": null
+    },
+    {
+        "tool_id": "random_lines1",
+        "update_time": "2014-10-03T16:10:44.743610",
+        "inputs": {
+            "input": {
+                "src": "hda",
+                "id": "eca0af6fb47bf90c"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "6fc9fbb81c497f69"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-03T16:10:41.446413",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "90240358ebde1489",
+        "tool": null
+    },
+    {
+        "tool_id": "__SET_METADATA__",
+        "update_time": "2014-10-03T16:14:44.196697",
+        "inputs": {
+            "input1": {
+                "src": "hda",
+                "id": "eca0af6fb47bf90c"
+            }
+        },
+        "outputs": {},
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-03T16:14:37.901222",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "86cf1d3beeec9f1c",
+        "tool": null
+    }
+];
+
+var historyContents2 = [
+    {
+        "deleted": false,
+        "extension": "interval",
+        "hid": 1,
+        "history_content_type": "dataset",
+        "history_id": "ff5476bcf6c921fa",
+        "id": "eca0af6fb47bf90c",
+        "name": "1.interval",
+        "purged": false,
+        "resubmitted": false,
+        "state": "ok",
+        "type": "file",
+        "url": "/api/histories/ff5476bcf6c921fa/contents/datasets/eca0af6fb47bf90c",
+        "visible": true
+    },
+    {
+        "deleted": false,
+        "extension": "interval",
+        "hid": 2,
+        "history_content_type": "dataset",
+        "history_id": "ff5476bcf6c921fa",
+        "id": "6fc9fbb81c497f69",
+        "name": "Select random lines on data 1",
+        "purged": false,
+        "resubmitted": false,
+        "state": "ok",
+        "type": "file",
+        "url": "/api/histories/ff5476bcf6c921fa/contents/datasets/6fc9fbb81c497f69",
+        "visible": true
+    }
+];
+
+
+var jobs3 = [
+    {
+        "tool_id": "upload1",
+        "update_time": "2014-10-13T20:28:58.549844",
+        "inputs": {},
+        "outputs": {
+            "output0": {
+                "src": "hda",
+                "id": "6fb17d0cc6e8fae5"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-13T20:28:43.162803",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "8c959c9304a2bc4b",
+        "tool": null
+    },
+    {
+        "tool_id": "upload1",
+        "update_time": "2014-10-13T20:28:58.932152",
+        "inputs": {},
+        "outputs": {
+            "output0": {
+                "src": "hda",
+                "id": "5114a2a207b7caff"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-13T20:28:47.421452",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "132016f833b57406",
+        "tool": null
+    },
+    {
+        "tool_id": "Show beginning1",
+        "update_time": "2014-10-13T20:29:31.424058",
+        "inputs": {
+            "input": {
+                "src": "hda",
+                "id": "6fb17d0cc6e8fae5"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "06ec17aefa2d49dd"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-13T20:29:28.769495",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "846fb0a2a64137c0",
+        "tool": null
+    },
+    {
+        "tool_id": "Show beginning1",
+        "update_time": "2014-10-13T20:29:55.851096",
+        "inputs": {
+            "input": {
+                "src": "hda",
+                "id": "5114a2a207b7caff"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "b8a0d6158b9961df"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-13T20:29:53.291703",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "eca0af6fb47bf90c",
+        "tool": null
+    },
+    {
+        "tool_id": "Show tail1",
+        "update_time": "2014-10-13T20:30:16.225937",
+        "inputs": {
+            "input": {
+                "src": "hda",
+                "id": "b8a0d6158b9961df"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "24d84bcf64116fe7"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-13T20:30:13.789842",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "6fc9fbb81c497f69",
+        "tool": null
+    }
+];
+
+
+var historyContents3 = [
+  {
+    "deleted": false,
+    "extension": "bed",
+    "hid": 1,
+    "history_content_type": "dataset",
+    "history_id": "5564089c81cf7fe8",
+    "id": "6fb17d0cc6e8fae5",
+    "name": "1.bed",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/5564089c81cf7fe8/contents/datasets/6fb17d0cc6e8fae5",
+    "visible": true
+  },
+  {
+    "deleted": false,
+    "extension": "interval",
+    "hid": 2,
+    "history_content_type": "dataset",
+    "history_id": "5564089c81cf7fe8",
+    "id": "5114a2a207b7caff",
+    "name": "1.interval",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/5564089c81cf7fe8/contents/datasets/5114a2a207b7caff",
+    "visible": true
+  },
+  {
+    "deleted": false,
+    "extension": "bed",
+    "hid": 3,
+    "history_content_type": "dataset",
+    "history_id": "5564089c81cf7fe8",
+    "id": "06ec17aefa2d49dd",
+    "name": "Select first on data 1",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/5564089c81cf7fe8/contents/datasets/06ec17aefa2d49dd",
+    "visible": true
+  },
+  {
+    "deleted": false,
+    "extension": "interval",
+    "hid": 4,
+    "history_content_type": "dataset",
+    "history_id": "5564089c81cf7fe8",
+    "id": "b8a0d6158b9961df",
+    "name": "Select first on data 2",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/5564089c81cf7fe8/contents/datasets/b8a0d6158b9961df",
+    "visible": true
+  },
+  {
+    "deleted": false,
+    "extension": "interval",
+    "hid": 5,
+    "history_content_type": "dataset",
+    "history_id": "5564089c81cf7fe8",
+    "id": "24d84bcf64116fe7",
+    "name": "Select last on data 4",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/5564089c81cf7fe8/contents/datasets/24d84bcf64116fe7",
+    "visible": true
+  }
+];
+
+
+var jobs4 = [
+    {
+        "tool_id": "cat1",
+        "update_time": "2014-10-21T17:33:36.960857",
+        "inputs": {
+            "input1": {
+                "src": "hda",
+                "id": "422eef6b1b545329",
+                "name": "input1"
+            },
+            "queries_0|input2": {
+                "src": "hda",
+                "id": "c86c1b73aa7102dd",
+                "name": "queries_0|input2"
+            }
+        },
+        "outputs": {
+            "out_file1": {
+                "src": "hda",
+                "id": "52d6bdfafedbb5e5",
+                "name": "out_file1"
+            }
+        },
+        "exit_code": 0,
+        "state": "ok",
+        "create_time": "2014-10-21T17:33:34.302245",
+        "params": {
+            // ...
+        },
+        "model_class": "Job",
+        "id": "92b83968e0b52980"
+    }
+];
+
+var historyContents4 = [
+  {
+    "dataset_id": 29,
+    "deleted": false,
+    "extension": "vcf",
+    "hid": 1,
+    "history_content_type": "dataset",
+    "history_id": "c24141d7e4e77705",
+    "id": "422eef6b1b545329",
+    "name": "1.vcf",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/c24141d7e4e77705/contents/datasets/422eef6b1b545329",
+    "visible": true
+  },
+  {
+    "dataset_id": 56,
+    "deleted": false,
+    "extension": "maf",
+    "hid": 2,
+    "history_content_type": "dataset",
+    "history_id": "c24141d7e4e77705",
+    "id": "c86c1b73aa7102dd",
+    "name": "3.maf",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/c24141d7e4e77705/contents/datasets/c86c1b73aa7102dd",
+    "visible": true
+  },
+  {
+    "dataset_id": 131,
+    "deleted": false,
+    "extension": "maf",
+    "hid": 3,
+    "history_content_type": "dataset",
+    "history_id": "c24141d7e4e77705",
+    "id": "52d6bdfafedbb5e5",
+    "name": "Concatenate datasets on data 1 and data 2",
+    "purged": false,
+    "resubmitted": false,
+    "state": "ok",
+    "type": "file",
+    "url": "/api/histories/c24141d7e4e77705/contents/datasets/52d6bdfafedbb5e5",
+    "visible": true
+  }
+];
+
+// ============================================================================
+    return {
+        tools               : tools,
+        jobs1               : jobs1,
+        historyContents1    : historyContents1,
+        jobs2               : jobs2,
+        historyContents2    : historyContents2,
+        jobs3               : jobs3,
+        historyContents3    : historyContents3,
+        jobs4               : jobs4,
+        historyContents4    : historyContents4
+    };
+});
diff --git a/test/qunit/test-data/paired-collection-creator.data.js b/test/qunit/test-data/paired-collection-creator.data.js
new file mode 100644
index 0000000..f196b3c
--- /dev/null
+++ b/test/qunit/test-data/paired-collection-creator.data.js
@@ -0,0 +1,153 @@
+define([], function(){
+// ============================================================================
+// plain 3 step job chain
+var datasets1 = [
+    { name: 'SET1-01_1.fastq' },
+    { name: 'SET1-01_2.fastq' },
+    { name: 'SET1-02_1.fastq' },
+    { name: 'SET1-02_2.fastq' },
+    { name: 'SET1-03_1.fastq' },
+    { name: 'SET1-03_2.fastq' },
+    { name: 'SET1-04_1.fastq' },
+    { name: 'SET1-04_2.fastq' },
+    { name: 'SET1-05_1.fastq' },
+    { name: 'SET1-05_2.fastq' },
+    { name: 'SET1-06_1.fastq' },
+    { name: 'SET1-06_2.fastq' },
+    { name: 'SET1-07_1.fastq' },
+    { name: 'SET1-07_2.fastq' }
+];
+
+var datasets1CreateRequestJSON = {
+  "type": "dataset_collection",
+  "collection_type": "list:paired",
+  "name": "Heres a collection",
+  "element_identifiers": [
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-07",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "2",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "3",
+          "src": "hda"
+        }
+      ]
+    },
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-06",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "4",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "5",
+          "src": "hda"
+        }
+      ]
+    },
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-05",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "6",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "7",
+          "src": "hda"
+        }
+      ]
+    },
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-04",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "8",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "9",
+          "src": "hda"
+        }
+      ]
+    },
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-03",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "10",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "11",
+          "src": "hda"
+        }
+      ]
+    },
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-02",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "12",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "13",
+          "src": "hda"
+        }
+      ]
+    },
+    {
+      "collection_type": "paired",
+      "src": "new_collection",
+      "name": "SET1-01",
+      "element_identifiers": [
+        {
+          "name": "forward",
+          "id": "14",
+          "src": "hda"
+        },
+        {
+          "name": "reverse",
+          "id": "15",
+          "src": "hda"
+        }
+      ]
+    }
+  ]
+};
+
+// ============================================================================
+    return {
+        _1              : datasets1,
+        _1requestJSON   : datasets1CreateRequestJSON
+    };
+});
diff --git a/test/qunit/test-libs/qunit-1.23.1.css b/test/qunit/test-libs/qunit-1.23.1.css
new file mode 100644
index 0000000..88ff9df
--- /dev/null
+++ b/test/qunit/test-libs/qunit-1.23.1.css
@@ -0,0 +1,305 @@
+/*!
+ * QUnit 1.23.1
+ * https://qunitjs.com/
+ *
+ * Copyright jQuery Foundation and other contributors
+ * Released under the MIT license
+ * https://jquery.org/license
+ *
+ * Date: 2016-04-12T17:29Z
+ */
+
+/** Font Family and Sizes */
+
+#qunit-tests, #qunit-header, #qunit-banner, #qunit-testrunner-toolbar, #qunit-filteredTest, #qunit-userAgent, #qunit-testresult {
+	font-family: "Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial, sans-serif;
+}
+
+#qunit-testrunner-toolbar, #qunit-filteredTest, #qunit-userAgent, #qunit-testresult, #qunit-tests li { font-size: small; }
+#qunit-tests { font-size: smaller; }
+
+
+/** Resets */
+
+#qunit-tests, #qunit-header, #qunit-banner, #qunit-filteredTest, #qunit-userAgent, #qunit-testresult, #qunit-modulefilter {
+	margin: 0;
+	padding: 0;
+}
+
+
+/** Header */
+
+#qunit-header {
+	padding: 0.5em 0 0.5em 1em;
+
+	color: #8699A4;
+	background-color: #0D3349;
+
+	font-size: 1.5em;
+	line-height: 1em;
+	font-weight: 400;
+
+	border-radius: 5px 5px 0 0;
+}
+
+#qunit-header a {
+	text-decoration: none;
+	color: #C2CCD1;
+}
+
+#qunit-header a:hover,
+#qunit-header a:focus {
+	color: #FFF;
+}
+
+#qunit-testrunner-toolbar label {
+	display: inline-block;
+	padding: 0 0.5em 0 0.1em;
+}
+
+#qunit-banner {
+	height: 5px;
+}
+
+#qunit-testrunner-toolbar {
+	padding: 0.5em 1em 0.5em 1em;
+	color: #5E740B;
+	background-color: #EEE;
+	overflow: hidden;
+}
+
+#qunit-filteredTest {
+	padding: 0.5em 1em 0.5em 1em;
+	background-color: #F4FF77;
+	color: #366097;
+}
+
+#qunit-userAgent {
+	padding: 0.5em 1em 0.5em 1em;
+	background-color: #2B81AF;
+	color: #FFF;
+	text-shadow: rgba(0, 0, 0, 0.5) 2px 2px 1px;
+}
+
+#qunit-modulefilter-container {
+	float: right;
+	padding: 0.2em;
+}
+
+.qunit-url-config {
+	display: inline-block;
+	padding: 0.1em;
+}
+
+.qunit-filter {
+	display: block;
+	float: right;
+	margin-left: 1em;
+}
+
+/** Tests: Pass/Fail */
+
+#qunit-tests {
+	list-style-position: inside;
+}
+
+#qunit-tests li {
+	padding: 0.4em 1em 0.4em 1em;
+	border-bottom: 1px solid #FFF;
+	list-style-position: inside;
+}
+
+#qunit-tests > li {
+	display: none;
+}
+
+#qunit-tests li.running,
+#qunit-tests li.pass,
+#qunit-tests li.fail,
+#qunit-tests li.skipped {
+	display: list-item;
+}
+
+#qunit-tests.hidepass {
+	position: relative;
+}
+
+#qunit-tests.hidepass li.running,
+#qunit-tests.hidepass li.pass {
+	visibility: hidden;
+	position: absolute;
+	width:   0;
+	height:  0;
+	padding: 0;
+	border:  0;
+	margin:  0;
+}
+
+#qunit-tests li strong {
+	cursor: pointer;
+}
+
+#qunit-tests li.skipped strong {
+	cursor: default;
+}
+
+#qunit-tests li a {
+	padding: 0.5em;
+	color: #C2CCD1;
+	text-decoration: none;
+}
+
+#qunit-tests li p a {
+	padding: 0.25em;
+	color: #6B6464;
+}
+#qunit-tests li a:hover,
+#qunit-tests li a:focus {
+	color: #000;
+}
+
+#qunit-tests li .runtime {
+	float: right;
+	font-size: smaller;
+}
+
+.qunit-assert-list {
+	margin-top: 0.5em;
+	padding: 0.5em;
+
+	background-color: #FFF;
+
+	border-radius: 5px;
+}
+
+.qunit-source {
+	margin: 0.6em 0 0.3em;
+}
+
+.qunit-collapsed {
+	display: none;
+}
+
+#qunit-tests table {
+	border-collapse: collapse;
+	margin-top: 0.2em;
+}
+
+#qunit-tests th {
+	text-align: right;
+	vertical-align: top;
+	padding: 0 0.5em 0 0;
+}
+
+#qunit-tests td {
+	vertical-align: top;
+}
+
+#qunit-tests pre {
+	margin: 0;
+	white-space: pre-wrap;
+	word-wrap: break-word;
+}
+
+#qunit-tests del {
+	background-color: #E0F2BE;
+	color: #374E0C;
+	text-decoration: none;
+}
+
+#qunit-tests ins {
+	background-color: #FFCACA;
+	color: #500;
+	text-decoration: none;
+}
+
+/*** Test Counts */
+
+#qunit-tests b.counts                       { color: #000; }
+#qunit-tests b.passed                       { color: #5E740B; }
+#qunit-tests b.failed                       { color: #710909; }
+
+#qunit-tests li li {
+	padding: 5px;
+	background-color: #FFF;
+	border-bottom: none;
+	list-style-position: inside;
+}
+
+/*** Passing Styles */
+
+#qunit-tests li li.pass {
+	color: #3C510C;
+	background-color: #FFF;
+	border-left: 10px solid #C6E746;
+}
+
+#qunit-tests .pass                          { color: #528CE0; background-color: #D2E0E6; }
+#qunit-tests .pass .test-name               { color: #366097; }
+
+#qunit-tests .pass .test-actual,
+#qunit-tests .pass .test-expected           { color: #999; }
+
+#qunit-banner.qunit-pass                    { background-color: #C6E746; }
+
+/*** Failing Styles */
+
+#qunit-tests li li.fail {
+	color: #710909;
+	background-color: #FFF;
+	border-left: 10px solid #EE5757;
+	white-space: pre;
+}
+
+#qunit-tests > li:last-child {
+	border-radius: 0 0 5px 5px;
+}
+
+#qunit-tests .fail                          { color: #000; background-color: #EE5757; }
+#qunit-tests .fail .test-name,
+#qunit-tests .fail .module-name             { color: #000; }
+
+#qunit-tests .fail .test-actual             { color: #EE5757; }
+#qunit-tests .fail .test-expected           { color: #008000; }
+
+#qunit-banner.qunit-fail                    { background-color: #EE5757; }
+
+/*** Skipped tests */
+
+#qunit-tests .skipped {
+	background-color: #EBECE9;
+}
+
+#qunit-tests .qunit-skipped-label {
+	background-color: #F4FF77;
+	display: inline-block;
+	font-style: normal;
+	color: #366097;
+	line-height: 1.8em;
+	padding: 0 0.5em;
+	margin: -0.4em 0.4em -0.4em 0;
+}
+
+/** Result */
+
+#qunit-testresult {
+	padding: 0.5em 1em 0.5em 1em;
+
+	color: #2B81AF;
+	background-color: #D2E0E6;
+
+	border-bottom: 1px solid #FFF;
+}
+#qunit-testresult .module-name {
+	font-weight: 700;
+}
+
+/** Fixture */
+
+#qunit-fixture {
+	position: absolute;
+	top: -10000px;
+	left: -10000px;
+	width: 1000px;
+	height: 1000px;
+}
\ No newline at end of file
diff --git a/test/qunit/test-libs/qunit-1.23.1.js b/test/qunit/test-libs/qunit-1.23.1.js
new file mode 100644
index 0000000..f4ed59e
--- /dev/null
+++ b/test/qunit/test-libs/qunit-1.23.1.js
@@ -0,0 +1,4334 @@
+/*!
+ * QUnit 1.23.1
+ * https://qunitjs.com/
+ *
+ * Copyright jQuery Foundation and other contributors
+ * Released under the MIT license
+ * https://jquery.org/license
+ *
+ * Date: 2016-04-12T17:29Z
+ */
+
+( function( global ) {
+
+var QUnit = {};
+
+var Date = global.Date;
+var now = Date.now || function() {
+	return new Date().getTime();
+};
+
+var setTimeout = global.setTimeout;
+var clearTimeout = global.clearTimeout;
+
+// Store a local window from the global to allow direct references.
+var window = global.window;
+
+var defined = {
+	document: window && window.document !== undefined,
+	setTimeout: setTimeout !== undefined,
+	sessionStorage: ( function() {
+		var x = "qunit-test-string";
+		try {
+			sessionStorage.setItem( x, x );
+			sessionStorage.removeItem( x );
+			return true;
+		} catch ( e ) {
+			return false;
+		}
+	}() )
+};
+
+var fileName = ( sourceFromStacktrace( 0 ) || "" ).replace( /(:\d+)+\)?/, "" ).replace( /.+\//, "" );
+var globalStartCalled = false;
+var runStarted = false;
+
+var toString = Object.prototype.toString,
+	hasOwn = Object.prototype.hasOwnProperty;
+
+// Returns a new Array with the elements that are in a but not in b
+function diff( a, b ) {
+	var i, j,
+		result = a.slice();
+
+	for ( i = 0; i < result.length; i++ ) {
+		for ( j = 0; j < b.length; j++ ) {
+			if ( result[ i ] === b[ j ] ) {
+				result.splice( i, 1 );
+				i--;
+				break;
+			}
+		}
+	}
+	return result;
+}
+
+// From jquery.js
+function inArray( elem, array ) {
+	if ( array.indexOf ) {
+		return array.indexOf( elem );
+	}
+
+	for ( var i = 0, length = array.length; i < length; i++ ) {
+		if ( array[ i ] === elem ) {
+			return i;
+		}
+	}
+
+	return -1;
+}
+
+/**
+ * Makes a clone of an object using only Array or Object as base,
+ * and copies over the own enumerable properties.
+ *
+ * @param {Object} obj
+ * @return {Object} New object with only the own properties (recursively).
+ */
+function objectValues ( obj ) {
+	var key, val,
+		vals = QUnit.is( "array", obj ) ? [] : {};
+	for ( key in obj ) {
+		if ( hasOwn.call( obj, key ) ) {
+			val = obj[ key ];
+			vals[ key ] = val === Object( val ) ? objectValues( val ) : val;
+		}
+	}
+	return vals;
+}
+
+function extend( a, b, undefOnly ) {
+	for ( var prop in b ) {
+		if ( hasOwn.call( b, prop ) ) {
+
+			// Avoid "Member not found" error in IE8 caused by messing with window.constructor
+			// This block runs on every environment, so `global` is being used instead of `window`
+			// to avoid errors on node.
+			if ( prop !== "constructor" || a !== global ) {
+				if ( b[ prop ] === undefined ) {
+					delete a[ prop ];
+				} else if ( !( undefOnly && typeof a[ prop ] !== "undefined" ) ) {
+					a[ prop ] = b[ prop ];
+				}
+			}
+		}
+	}
+
+	return a;
+}
+
+function objectType( obj ) {
+	if ( typeof obj === "undefined" ) {
+		return "undefined";
+	}
+
+	// Consider: typeof null === object
+	if ( obj === null ) {
+		return "null";
+	}
+
+	var match = toString.call( obj ).match( /^\[object\s(.*)\]$/ ),
+		type = match && match[ 1 ];
+
+	switch ( type ) {
+		case "Number":
+			if ( isNaN( obj ) ) {
+				return "nan";
+			}
+			return "number";
+		case "String":
+		case "Boolean":
+		case "Array":
+		case "Set":
+		case "Map":
+		case "Date":
+		case "RegExp":
+		case "Function":
+		case "Symbol":
+			return type.toLowerCase();
+	}
+	if ( typeof obj === "object" ) {
+		return "object";
+	}
+}
+
+// Safe object type checking
+function is( type, obj ) {
+	return QUnit.objectType( obj ) === type;
+}
+
+// Doesn't support IE6 to IE9, it will return undefined on these browsers
+// See also https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Error/Stack
+function extractStacktrace( e, offset ) {
+	offset = offset === undefined ? 4 : offset;
+
+	var stack, include, i;
+
+	if ( e.stack ) {
+		stack = e.stack.split( "\n" );
+		if ( /^error$/i.test( stack[ 0 ] ) ) {
+			stack.shift();
+		}
+		if ( fileName ) {
+			include = [];
+			for ( i = offset; i < stack.length; i++ ) {
+				if ( stack[ i ].indexOf( fileName ) !== -1 ) {
+					break;
+				}
+				include.push( stack[ i ] );
+			}
+			if ( include.length ) {
+				return include.join( "\n" );
+			}
+		}
+		return stack[ offset ];
+
+	// Support: Safari <=6 only
+	} else if ( e.sourceURL ) {
+
+		// Exclude useless self-reference for generated Error objects
+		if ( /qunit.js$/.test( e.sourceURL ) ) {
+			return;
+		}
+
+		// For actual exceptions, this is useful
+		return e.sourceURL + ":" + e.line;
+	}
+}
+
+function sourceFromStacktrace( offset ) {
+	var error = new Error();
+
+	// Support: Safari <=7 only, IE <=10 - 11 only
+	// Not all browsers generate the `stack` property for `new Error()`, see also #636
+	if ( !error.stack ) {
+		try {
+			throw error;
+		} catch ( err ) {
+			error = err;
+		}
+	}
+
+	return extractStacktrace( error, offset );
+}
+
+/**
+ * Config object: Maintain internal state
+ * Later exposed as QUnit.config
+ * `config` initialized at top of scope
+ */
+var config = {
+
+	// The queue of tests to run
+	queue: [],
+
+	// Block until document ready
+	blocking: true,
+
+	// By default, run previously failed tests first
+	// very useful in combination with "Hide passed tests" checked
+	reorder: true,
+
+	// By default, modify document.title when suite is done
+	altertitle: true,
+
+	// HTML Reporter: collapse every test except the first failing test
+	// If false, all failing tests will be expanded
+	collapse: true,
+
+	// By default, scroll to top of the page when suite is done
+	scrolltop: true,
+
+	// Depth up-to which object will be dumped
+	maxDepth: 5,
+
+	// When enabled, all tests must call expect()
+	requireExpects: false,
+
+	// Placeholder for user-configurable form-exposed URL parameters
+	urlConfig: [],
+
+	// Set of all modules.
+	modules: [],
+
+	// Stack of nested modules
+	moduleStack: [],
+
+	// The first unnamed module
+	currentModule: {
+		name: "",
+		tests: []
+	},
+
+	callbacks: {}
+};
+
+// Push a loose unnamed module to the modules collection
+config.modules.push( config.currentModule );
+
+var loggingCallbacks = {};
+
+// Register logging callbacks
+function registerLoggingCallbacks( obj ) {
+	var i, l, key,
+		callbackNames = [ "begin", "done", "log", "testStart", "testDone",
+			"moduleStart", "moduleDone" ];
+
+	function registerLoggingCallback( key ) {
+		var loggingCallback = function( callback ) {
+			if ( objectType( callback ) !== "function" ) {
+				throw new Error(
+					"QUnit logging methods require a callback function as their first parameters."
+				);
+			}
+
+			config.callbacks[ key ].push( callback );
+		};
+
+		// DEPRECATED: This will be removed on QUnit 2.0.0+
+		// Stores the registered functions allowing restoring
+		// at verifyLoggingCallbacks() if modified
+		loggingCallbacks[ key ] = loggingCallback;
+
+		return loggingCallback;
+	}
+
+	for ( i = 0, l = callbackNames.length; i < l; i++ ) {
+		key = callbackNames[ i ];
+
+		// Initialize key collection of logging callback
+		if ( objectType( config.callbacks[ key ] ) === "undefined" ) {
+			config.callbacks[ key ] = [];
+		}
+
+		obj[ key ] = registerLoggingCallback( key );
+	}
+}
+
+function runLoggingCallbacks( key, args ) {
+	var i, l, callbacks;
+
+	callbacks = config.callbacks[ key ];
+	for ( i = 0, l = callbacks.length; i < l; i++ ) {
+		callbacks[ i ]( args );
+	}
+}
+
+// DEPRECATED: This will be removed on 2.0.0+
+// This function verifies if the loggingCallbacks were modified by the user
+// If so, it will restore it, assign the given callback and print a console warning
+function verifyLoggingCallbacks() {
+	var loggingCallback, userCallback;
+
+	for ( loggingCallback in loggingCallbacks ) {
+		if ( QUnit[ loggingCallback ] !== loggingCallbacks[ loggingCallback ] ) {
+
+			userCallback = QUnit[ loggingCallback ];
+
+			// Restore the callback function
+			QUnit[ loggingCallback ] = loggingCallbacks[ loggingCallback ];
+
+			// Assign the deprecated given callback
+			QUnit[ loggingCallback ]( userCallback );
+
+			if ( global.console && global.console.warn ) {
+				global.console.warn(
+					"QUnit." + loggingCallback + " was replaced with a new value.\n" +
+					"Please, check out the documentation on how to apply logging callbacks.\n" +
+					"Reference: https://api.qunitjs.com/category/callbacks/"
+				);
+			}
+		}
+	}
+}
+
+( function() {
+	if ( !defined.document ) {
+		return;
+	}
+
+	// `onErrorFnPrev` initialized at top of scope
+	// Preserve other handlers
+	var onErrorFnPrev = window.onerror;
+
+	// Cover uncaught exceptions
+	// Returning true will suppress the default browser handler,
+	// returning false will let it run.
+	window.onerror = function( error, filePath, linerNr ) {
+		var ret = false;
+		if ( onErrorFnPrev ) {
+			ret = onErrorFnPrev( error, filePath, linerNr );
+		}
+
+		// Treat return value as window.onerror itself does,
+		// Only do our handling if not suppressed.
+		if ( ret !== true ) {
+			if ( QUnit.config.current ) {
+				if ( QUnit.config.current.ignoreGlobalErrors ) {
+					return true;
+				}
+				QUnit.pushFailure( error, filePath + ":" + linerNr );
+			} else {
+				QUnit.test( "global failure", extend( function() {
+					QUnit.pushFailure( error, filePath + ":" + linerNr );
+				}, { validTest: true } ) );
+			}
+			return false;
+		}
+
+		return ret;
+	};
+}() );
+
+// Figure out if we're running the tests from a server or not
+QUnit.isLocal = !( defined.document && window.location.protocol !== "file:" );
+
+// Expose the current QUnit version
+QUnit.version = "1.23.1";
+
+extend( QUnit, {
+
+	// Call on start of module test to prepend name to all tests
+	module: function( name, testEnvironment, executeNow ) {
+		var module, moduleFns;
+		var currentModule = config.currentModule;
+
+		if ( arguments.length === 2 ) {
+			if ( objectType( testEnvironment ) === "function" ) {
+				executeNow = testEnvironment;
+				testEnvironment = undefined;
+			}
+		}
+
+		// DEPRECATED: handles setup/teardown functions,
+		// beforeEach and afterEach should be used instead
+		if ( testEnvironment && testEnvironment.setup ) {
+			testEnvironment.beforeEach = testEnvironment.setup;
+			delete testEnvironment.setup;
+		}
+		if ( testEnvironment && testEnvironment.teardown ) {
+			testEnvironment.afterEach = testEnvironment.teardown;
+			delete testEnvironment.teardown;
+		}
+
+		module = createModule();
+
+		moduleFns = {
+			beforeEach: setHook( module, "beforeEach" ),
+			afterEach: setHook( module, "afterEach" )
+		};
+
+		if ( objectType( executeNow ) === "function" ) {
+			config.moduleStack.push( module );
+			setCurrentModule( module );
+			executeNow.call( module.testEnvironment, moduleFns );
+			config.moduleStack.pop();
+			module = module.parentModule || currentModule;
+		}
+
+		setCurrentModule( module );
+
+		function createModule() {
+			var parentModule = config.moduleStack.length ?
+				config.moduleStack.slice( -1 )[ 0 ] : null;
+			var moduleName = parentModule !== null ?
+				[ parentModule.name, name ].join( " > " ) : name;
+			var module = {
+				name: moduleName,
+				parentModule: parentModule,
+				tests: [],
+				moduleId: generateHash( moduleName )
+			};
+
+			var env = {};
+			if ( parentModule ) {
+				extend( env, parentModule.testEnvironment );
+				delete env.beforeEach;
+				delete env.afterEach;
+			}
+			extend( env, testEnvironment );
+			module.testEnvironment = env;
+
+			config.modules.push( module );
+			return module;
+		}
+
+		function setCurrentModule( module ) {
+			config.currentModule = module;
+		}
+
+	},
+
+	// DEPRECATED: QUnit.asyncTest() will be removed in QUnit 2.0.
+	asyncTest: asyncTest,
+
+	test: test,
+
+	skip: skip,
+
+	only: only,
+
+	// DEPRECATED: The functionality of QUnit.start() will be altered in QUnit 2.0.
+	// In QUnit 2.0, invoking it will ONLY affect the `QUnit.config.autostart` blocking behavior.
+	start: function( count ) {
+		var globalStartAlreadyCalled = globalStartCalled;
+
+		if ( !config.current ) {
+			globalStartCalled = true;
+
+			if ( runStarted ) {
+				throw new Error( "Called start() outside of a test context while already started" );
+			} else if ( globalStartAlreadyCalled || count > 1 ) {
+				throw new Error( "Called start() outside of a test context too many times" );
+			} else if ( config.autostart ) {
+				throw new Error( "Called start() outside of a test context when " +
+					"QUnit.config.autostart was true" );
+			} else if ( !config.pageLoaded ) {
+
+				// The page isn't completely loaded yet, so bail out and let `QUnit.load` handle it
+				config.autostart = true;
+				return;
+			}
+		} else {
+
+			// If a test is running, adjust its semaphore
+			config.current.semaphore -= count || 1;
+
+			// If semaphore is non-numeric, throw error
+			if ( isNaN( config.current.semaphore ) ) {
+				config.current.semaphore = 0;
+
+				QUnit.pushFailure(
+					"Called start() with a non-numeric decrement.",
+					sourceFromStacktrace( 2 )
+				);
+				return;
+			}
+
+			// Don't start until equal number of stop-calls
+			if ( config.current.semaphore > 0 ) {
+				return;
+			}
+
+			// Throw an Error if start is called more often than stop
+			if ( config.current.semaphore < 0 ) {
+				config.current.semaphore = 0;
+
+				QUnit.pushFailure(
+					"Called start() while already started (test's semaphore was 0 already)",
+					sourceFromStacktrace( 2 )
+				);
+				return;
+			}
+		}
+
+		resumeProcessing();
+	},
+
+	// DEPRECATED: QUnit.stop() will be removed in QUnit 2.0.
+	stop: function( count ) {
+
+		// If there isn't a test running, don't allow QUnit.stop() to be called
+		if ( !config.current ) {
+			throw new Error( "Called stop() outside of a test context" );
+		}
+
+		// If a test is running, adjust its semaphore
+		config.current.semaphore += count || 1;
+
+		pauseProcessing();
+	},
+
+	config: config,
+
+	is: is,
+
+	objectType: objectType,
+
+	extend: extend,
+
+	load: function() {
+		config.pageLoaded = true;
+
+		// Initialize the configuration options
+		extend( config, {
+			stats: { all: 0, bad: 0 },
+			moduleStats: { all: 0, bad: 0 },
+			started: 0,
+			updateRate: 1000,
+			autostart: true,
+			filter: ""
+		}, true );
+
+		config.blocking = false;
+
+		if ( config.autostart ) {
+			resumeProcessing();
+		}
+	},
+
+	stack: function( offset ) {
+		offset = ( offset || 0 ) + 2;
+		return sourceFromStacktrace( offset );
+	}
+} );
+
+registerLoggingCallbacks( QUnit );
+
+function begin() {
+	var i, l,
+		modulesLog = [];
+
+	// If the test run hasn't officially begun yet
+	if ( !config.started ) {
+
+		// Record the time of the test run's beginning
+		config.started = now();
+
+		verifyLoggingCallbacks();
+
+		// Delete the loose unnamed module if unused.
+		if ( config.modules[ 0 ].name === "" && config.modules[ 0 ].tests.length === 0 ) {
+			config.modules.shift();
+		}
+
+		// Avoid unnecessary information by not logging modules' test environments
+		for ( i = 0, l = config.modules.length; i < l; i++ ) {
+			modulesLog.push( {
+				name: config.modules[ i ].name,
+				tests: config.modules[ i ].tests
+			} );
+		}
+
+		// The test run is officially beginning now
+		runLoggingCallbacks( "begin", {
+			totalTests: Test.count,
+			modules: modulesLog
+		} );
+	}
+
+	config.blocking = false;
+	process( true );
+}
+
+function process( last ) {
+	function next() {
+		process( last );
+	}
+	var start = now();
+	config.depth = ( config.depth || 0 ) + 1;
+
+	while ( config.queue.length && !config.blocking ) {
+		if ( !defined.setTimeout || config.updateRate <= 0 ||
+				( ( now() - start ) < config.updateRate ) ) {
+			if ( config.current ) {
+
+				// Reset async tracking for each phase of the Test lifecycle
+				config.current.usedAsync = false;
+			}
+			config.queue.shift()();
+		} else {
+			setTimeout( next, 13 );
+			break;
+		}
+	}
+	config.depth--;
+	if ( last && !config.blocking && !config.queue.length && config.depth === 0 ) {
+		done();
+	}
+}
+
+function pauseProcessing() {
+	config.blocking = true;
+
+	if ( config.testTimeout && defined.setTimeout ) {
+		clearTimeout( config.timeout );
+		config.timeout = setTimeout( function() {
+			if ( config.current ) {
+				config.current.semaphore = 0;
+				QUnit.pushFailure( "Test timed out", sourceFromStacktrace( 2 ) );
+			} else {
+				throw new Error( "Test timed out" );
+			}
+			resumeProcessing();
+		}, config.testTimeout );
+	}
+}
+
+function resumeProcessing() {
+	runStarted = true;
+
+	// A slight delay to allow this iteration of the event loop to finish (more assertions, etc.)
+	if ( defined.setTimeout ) {
+		setTimeout( function() {
+			if ( config.current && config.current.semaphore > 0 ) {
+				return;
+			}
+			if ( config.timeout ) {
+				clearTimeout( config.timeout );
+			}
+
+			begin();
+		}, 13 );
+	} else {
+		begin();
+	}
+}
+
+function done() {
+	var runtime, passed;
+
+	config.autorun = true;
+
+	// Log the last module results
+	if ( config.previousModule ) {
+		runLoggingCallbacks( "moduleDone", {
+			name: config.previousModule.name,
+			tests: config.previousModule.tests,
+			failed: config.moduleStats.bad,
+			passed: config.moduleStats.all - config.moduleStats.bad,
+			total: config.moduleStats.all,
+			runtime: now() - config.moduleStats.started
+		} );
+	}
+	delete config.previousModule;
+
+	runtime = now() - config.started;
+	passed = config.stats.all - config.stats.bad;
+
+	runLoggingCallbacks( "done", {
+		failed: config.stats.bad,
+		passed: passed,
+		total: config.stats.all,
+		runtime: runtime
+	} );
+}
+
+function setHook( module, hookName ) {
+	if ( module.testEnvironment === undefined ) {
+		module.testEnvironment = {};
+	}
+
+	return function( callback ) {
+		module.testEnvironment[ hookName ] = callback;
+	};
+}
+
+var focused = false;
+var priorityCount = 0;
+var unitSampler;
+
+function Test( settings ) {
+	var i, l;
+
+	++Test.count;
+
+	extend( this, settings );
+	this.assertions = [];
+	this.semaphore = 0;
+	this.usedAsync = false;
+	this.module = config.currentModule;
+	this.stack = sourceFromStacktrace( 3 );
+
+	// Register unique strings
+	for ( i = 0, l = this.module.tests; i < l.length; i++ ) {
+		if ( this.module.tests[ i ].name === this.testName ) {
+			this.testName += " ";
+		}
+	}
+
+	this.testId = generateHash( this.module.name, this.testName );
+
+	this.module.tests.push( {
+		name: this.testName,
+		testId: this.testId
+	} );
+
+	if ( settings.skip ) {
+
+		// Skipped tests will fully ignore any sent callback
+		this.callback = function() {};
+		this.async = false;
+		this.expected = 0;
+	} else {
+		this.assert = new Assert( this );
+	}
+}
+
+Test.count = 0;
+
+Test.prototype = {
+	before: function() {
+		if (
+
+			// Emit moduleStart when we're switching from one module to another
+			this.module !== config.previousModule ||
+
+				// They could be equal (both undefined) but if the previousModule property doesn't
+				// yet exist it means this is the first test in a suite that isn't wrapped in a
+				// module, in which case we'll just emit a moduleStart event for 'undefined'.
+				// Without this, reporters can get testStart before moduleStart  which is a problem.
+				!hasOwn.call( config, "previousModule" )
+		) {
+			if ( hasOwn.call( config, "previousModule" ) ) {
+				runLoggingCallbacks( "moduleDone", {
+					name: config.previousModule.name,
+					tests: config.previousModule.tests,
+					failed: config.moduleStats.bad,
+					passed: config.moduleStats.all - config.moduleStats.bad,
+					total: config.moduleStats.all,
+					runtime: now() - config.moduleStats.started
+				} );
+			}
+			config.previousModule = this.module;
+			config.moduleStats = { all: 0, bad: 0, started: now() };
+			runLoggingCallbacks( "moduleStart", {
+				name: this.module.name,
+				tests: this.module.tests
+			} );
+		}
+
+		config.current = this;
+
+		if ( this.module.testEnvironment ) {
+			delete this.module.testEnvironment.beforeEach;
+			delete this.module.testEnvironment.afterEach;
+		}
+		this.testEnvironment = extend( {}, this.module.testEnvironment );
+
+		this.started = now();
+		runLoggingCallbacks( "testStart", {
+			name: this.testName,
+			module: this.module.name,
+			testId: this.testId
+		} );
+
+		if ( !config.pollution ) {
+			saveGlobal();
+		}
+	},
+
+	run: function() {
+		var promise;
+
+		config.current = this;
+
+		if ( this.async ) {
+			QUnit.stop();
+		}
+
+		this.callbackStarted = now();
+
+		if ( config.notrycatch ) {
+			runTest( this );
+			return;
+		}
+
+		try {
+			runTest( this );
+		} catch ( e ) {
+			this.pushFailure( "Died on test #" + ( this.assertions.length + 1 ) + " " +
+				this.stack + ": " + ( e.message || e ), extractStacktrace( e, 0 ) );
+
+			// Else next test will carry the responsibility
+			saveGlobal();
+
+			// Restart the tests if they're blocking
+			if ( config.blocking ) {
+				QUnit.start();
+			}
+		}
+
+		function runTest( test ) {
+			promise = test.callback.call( test.testEnvironment, test.assert );
+			test.resolvePromise( promise );
+		}
+	},
+
+	after: function() {
+		checkPollution();
+	},
+
+	queueHook: function( hook, hookName ) {
+		var promise,
+			test = this;
+		return function runHook() {
+			config.current = test;
+			if ( config.notrycatch ) {
+				callHook();
+				return;
+			}
+			try {
+				callHook();
+			} catch ( error ) {
+				test.pushFailure( hookName + " failed on " + test.testName + ": " +
+				( error.message || error ), extractStacktrace( error, 0 ) );
+			}
+
+			function callHook() {
+				promise = hook.call( test.testEnvironment, test.assert );
+				test.resolvePromise( promise, hookName );
+			}
+		};
+	},
+
+	// Currently only used for module level hooks, can be used to add global level ones
+	hooks: function( handler ) {
+		var hooks = [];
+
+		function processHooks( test, module ) {
+			if ( module.parentModule ) {
+				processHooks( test, module.parentModule );
+			}
+			if ( module.testEnvironment &&
+				QUnit.objectType( module.testEnvironment[ handler ] ) === "function" ) {
+				hooks.push( test.queueHook( module.testEnvironment[ handler ], handler ) );
+			}
+		}
+
+		// Hooks are ignored on skipped tests
+		if ( !this.skip ) {
+			processHooks( this, this.module );
+		}
+		return hooks;
+	},
+
+	finish: function() {
+		config.current = this;
+		if ( config.requireExpects && this.expected === null ) {
+			this.pushFailure( "Expected number of assertions to be defined, but expect() was " +
+				"not called.", this.stack );
+		} else if ( this.expected !== null && this.expected !== this.assertions.length ) {
+			this.pushFailure( "Expected " + this.expected + " assertions, but " +
+				this.assertions.length + " were run", this.stack );
+		} else if ( this.expected === null && !this.assertions.length ) {
+			this.pushFailure( "Expected at least one assertion, but none were run - call " +
+				"expect(0) to accept zero assertions.", this.stack );
+		}
+
+		var i,
+			bad = 0;
+
+		this.runtime = now() - this.started;
+		config.stats.all += this.assertions.length;
+		config.moduleStats.all += this.assertions.length;
+
+		for ( i = 0; i < this.assertions.length; i++ ) {
+			if ( !this.assertions[ i ].result ) {
+				bad++;
+				config.stats.bad++;
+				config.moduleStats.bad++;
+			}
+		}
+
+		runLoggingCallbacks( "testDone", {
+			name: this.testName,
+			module: this.module.name,
+			skipped: !!this.skip,
+			failed: bad,
+			passed: this.assertions.length - bad,
+			total: this.assertions.length,
+			runtime: this.runtime,
+
+			// HTML Reporter use
+			assertions: this.assertions,
+			testId: this.testId,
+
+			// Source of Test
+			source: this.stack,
+
+			// DEPRECATED: this property will be removed in 2.0.0, use runtime instead
+			duration: this.runtime
+		} );
+
+		// QUnit.reset() is deprecated and will be replaced for a new
+		// fixture reset function on QUnit 2.0/2.1.
+		// It's still called here for backwards compatibility handling
+		QUnit.reset();
+
+		config.current = undefined;
+	},
+
+	queue: function() {
+		var priority,
+			test = this;
+
+		if ( !this.valid() ) {
+			return;
+		}
+
+		function run() {
+
+			// Each of these can by async
+			synchronize( [
+				function() {
+					test.before();
+				},
+
+				test.hooks( "beforeEach" ),
+				function() {
+					test.run();
+				},
+
+				test.hooks( "afterEach" ).reverse(),
+
+				function() {
+					test.after();
+				},
+				function() {
+					test.finish();
+				}
+			] );
+		}
+
+		// Prioritize previously failed tests, detected from sessionStorage
+		priority = QUnit.config.reorder && defined.sessionStorage &&
+				+sessionStorage.getItem( "qunit-test-" + this.module.name + "-" + this.testName );
+
+		return synchronize( run, priority, config.seed );
+	},
+
+	pushResult: function( resultInfo ) {
+
+		// Destructure of resultInfo = { result, actual, expected, message, negative }
+		var source,
+			details = {
+				module: this.module.name,
+				name: this.testName,
+				result: resultInfo.result,
+				message: resultInfo.message,
+				actual: resultInfo.actual,
+				expected: resultInfo.expected,
+				testId: this.testId,
+				negative: resultInfo.negative || false,
+				runtime: now() - this.started
+			};
+
+		if ( !resultInfo.result ) {
+			source = sourceFromStacktrace();
+
+			if ( source ) {
+				details.source = source;
+			}
+		}
+
+		runLoggingCallbacks( "log", details );
+
+		this.assertions.push( {
+			result: !!resultInfo.result,
+			message: resultInfo.message
+		} );
+	},
+
+	pushFailure: function( message, source, actual ) {
+		if ( !( this instanceof Test ) ) {
+			throw new Error( "pushFailure() assertion outside test context, was " +
+				sourceFromStacktrace( 2 ) );
+		}
+
+		var details = {
+				module: this.module.name,
+				name: this.testName,
+				result: false,
+				message: message || "error",
+				actual: actual || null,
+				testId: this.testId,
+				runtime: now() - this.started
+			};
+
+		if ( source ) {
+			details.source = source;
+		}
+
+		runLoggingCallbacks( "log", details );
+
+		this.assertions.push( {
+			result: false,
+			message: message
+		} );
+	},
+
+	resolvePromise: function( promise, phase ) {
+		var then, message,
+			test = this;
+		if ( promise != null ) {
+			then = promise.then;
+			if ( QUnit.objectType( then ) === "function" ) {
+				QUnit.stop();
+				then.call(
+					promise,
+					function() { QUnit.start(); },
+					function( error ) {
+						message = "Promise rejected " +
+							( !phase ? "during" : phase.replace( /Each$/, "" ) ) +
+							" " + test.testName + ": " + ( error.message || error );
+						test.pushFailure( message, extractStacktrace( error, 0 ) );
+
+						// Else next test will carry the responsibility
+						saveGlobal();
+
+						// Unblock
+						QUnit.start();
+					}
+				);
+			}
+		}
+	},
+
+	valid: function() {
+		var filter = config.filter,
+			regexFilter = /^(!?)\/([\w\W]*)\/(i?$)/.exec( filter ),
+			module = config.module && config.module.toLowerCase(),
+			fullName = ( this.module.name + ": " + this.testName );
+
+		function moduleChainNameMatch( testModule ) {
+			var testModuleName = testModule.name ? testModule.name.toLowerCase() : null;
+			if ( testModuleName === module ) {
+				return true;
+			} else if ( testModule.parentModule ) {
+				return moduleChainNameMatch( testModule.parentModule );
+			} else {
+				return false;
+			}
+		}
+
+		function moduleChainIdMatch( testModule ) {
+			return inArray( testModule.moduleId, config.moduleId ) > -1 ||
+				testModule.parentModule && moduleChainIdMatch( testModule.parentModule );
+		}
+
+		// Internally-generated tests are always valid
+		if ( this.callback && this.callback.validTest ) {
+			return true;
+		}
+
+		if ( config.moduleId && config.moduleId.length > 0 &&
+			!moduleChainIdMatch( this.module ) ) {
+
+			return false;
+		}
+
+		if ( config.testId && config.testId.length > 0 &&
+			inArray( this.testId, config.testId ) < 0 ) {
+
+			return false;
+		}
+
+		if ( module && !moduleChainNameMatch( this.module ) ) {
+			return false;
+		}
+
+		if ( !filter ) {
+			return true;
+		}
+
+		return regexFilter ?
+			this.regexFilter( !!regexFilter[ 1 ], regexFilter[ 2 ], regexFilter[ 3 ], fullName ) :
+			this.stringFilter( filter, fullName );
+	},
+
+	regexFilter: function( exclude, pattern, flags, fullName ) {
+		var regex = new RegExp( pattern, flags );
+		var match = regex.test( fullName );
+
+		return match !== exclude;
+	},
+
+	stringFilter: function( filter, fullName ) {
+		filter = filter.toLowerCase();
+		fullName = fullName.toLowerCase();
+
+		var include = filter.charAt( 0 ) !== "!";
+		if ( !include ) {
+			filter = filter.slice( 1 );
+		}
+
+		// If the filter matches, we need to honour include
+		if ( fullName.indexOf( filter ) !== -1 ) {
+			return include;
+		}
+
+		// Otherwise, do the opposite
+		return !include;
+	}
+};
+
+// Resets the test setup. Useful for tests that modify the DOM.
+/*
+DEPRECATED: Use multiple tests instead of resetting inside a test.
+Use testStart or testDone for custom cleanup.
+This method will throw an error in 2.0, and will be removed in 2.1
+*/
+QUnit.reset = function() {
+
+	// Return on non-browser environments
+	// This is necessary to not break on node tests
+	if ( !defined.document ) {
+		return;
+	}
+
+	var fixture = defined.document && document.getElementById &&
+			document.getElementById( "qunit-fixture" );
+
+	if ( fixture ) {
+		fixture.innerHTML = config.fixture;
+	}
+};
+
+QUnit.pushFailure = function() {
+	if ( !QUnit.config.current ) {
+		throw new Error( "pushFailure() assertion outside test context, in " +
+			sourceFromStacktrace( 2 ) );
+	}
+
+	// Gets current test obj
+	var currentTest = QUnit.config.current;
+
+	return currentTest.pushFailure.apply( currentTest, arguments );
+};
+
+// Based on Java's String.hashCode, a simple but not
+// rigorously collision resistant hashing function
+function generateHash( module, testName ) {
+	var hex,
+		i = 0,
+		hash = 0,
+		str = module + "\x1C" + testName,
+		len = str.length;
+
+	for ( ; i < len; i++ ) {
+		hash  = ( ( hash << 5 ) - hash ) + str.charCodeAt( i );
+		hash |= 0;
+	}
+
+	// Convert the possibly negative integer hash code into an 8 character hex string, which isn't
+	// strictly necessary but increases user understanding that the id is a SHA-like hash
+	hex = ( 0x100000000 + hash ).toString( 16 );
+	if ( hex.length < 8 ) {
+		hex = "0000000" + hex;
+	}
+
+	return hex.slice( -8 );
+}
+
+function synchronize( callback, priority, seed ) {
+	var last = !priority,
+		index;
+
+	if ( QUnit.objectType( callback ) === "array" ) {
+		while ( callback.length ) {
+			synchronize( callback.shift() );
+		}
+		return;
+	}
+
+	if ( priority ) {
+		config.queue.splice( priorityCount++, 0, callback );
+	} else if ( seed ) {
+		if ( !unitSampler ) {
+			unitSampler = unitSamplerGenerator( seed );
+		}
+
+		// Insert into a random position after all priority items
+		index = Math.floor( unitSampler() * ( config.queue.length - priorityCount + 1 ) );
+		config.queue.splice( priorityCount + index, 0, callback );
+	} else {
+		config.queue.push( callback );
+	}
+
+	if ( config.autorun && !config.blocking ) {
+		process( last );
+	}
+}
+
+function unitSamplerGenerator( seed ) {
+
+	// 32-bit xorshift, requires only a nonzero seed
+	// http://excamera.com/sphinx/article-xorshift.html
+	var sample = parseInt( generateHash( seed ), 16 ) || -1;
+	return function() {
+		sample ^= sample << 13;
+		sample ^= sample >>> 17;
+		sample ^= sample << 5;
+
+		// ECMAScript has no unsigned number type
+		if ( sample < 0 ) {
+			sample += 0x100000000;
+		}
+
+		return sample / 0x100000000;
+	};
+}
+
+function saveGlobal() {
+	config.pollution = [];
+
+	if ( config.noglobals ) {
+		for ( var key in global ) {
+			if ( hasOwn.call( global, key ) ) {
+
+				// In Opera sometimes DOM element ids show up here, ignore them
+				if ( /^qunit-test-output/.test( key ) ) {
+					continue;
+				}
+				config.pollution.push( key );
+			}
+		}
+	}
+}
+
+function checkPollution() {
+	var newGlobals,
+		deletedGlobals,
+		old = config.pollution;
+
+	saveGlobal();
+
+	newGlobals = diff( config.pollution, old );
+	if ( newGlobals.length > 0 ) {
+		QUnit.pushFailure( "Introduced global variable(s): " + newGlobals.join( ", " ) );
+	}
+
+	deletedGlobals = diff( old, config.pollution );
+	if ( deletedGlobals.length > 0 ) {
+		QUnit.pushFailure( "Deleted global variable(s): " + deletedGlobals.join( ", " ) );
+	}
+}
+
+// Will be exposed as QUnit.asyncTest
+function asyncTest( testName, expected, callback ) {
+	if ( arguments.length === 2 ) {
+		callback = expected;
+		expected = null;
+	}
+
+	QUnit.test( testName, expected, callback, true );
+}
+
+// Will be exposed as QUnit.test
+function test( testName, expected, callback, async ) {
+	if ( focused )  { return; }
+
+	var newTest;
+
+	if ( arguments.length === 2 ) {
+		callback = expected;
+		expected = null;
+	}
+
+	newTest = new Test( {
+		testName: testName,
+		expected: expected,
+		async: async,
+		callback: callback
+	} );
+
+	newTest.queue();
+}
+
+// Will be exposed as QUnit.skip
+function skip( testName ) {
+	if ( focused )  { return; }
+
+	var test = new Test( {
+		testName: testName,
+		skip: true
+	} );
+
+	test.queue();
+}
+
+// Will be exposed as QUnit.only
+function only( testName, expected, callback, async ) {
+	var newTest;
+
+	if ( focused )  { return; }
+
+	QUnit.config.queue.length = 0;
+	focused = true;
+
+	if ( arguments.length === 2 ) {
+		callback = expected;
+		expected = null;
+	}
+
+	newTest = new Test( {
+		testName: testName,
+		expected: expected,
+		async: async,
+		callback: callback
+	} );
+
+	newTest.queue();
+}
+
+function Assert( testContext ) {
+	this.test = testContext;
+}
+
+// Assert helpers
+QUnit.assert = Assert.prototype = {
+
+	// Specify the number of expected assertions to guarantee that failed test
+	// (no assertions are run at all) don't slip through.
+	expect: function( asserts ) {
+		if ( arguments.length === 1 ) {
+			this.test.expected = asserts;
+		} else {
+			return this.test.expected;
+		}
+	},
+
+	// Increment this Test's semaphore counter, then return a function that
+	// decrements that counter a maximum of once.
+	async: function( count ) {
+		var test = this.test,
+			popped = false,
+			acceptCallCount = count;
+
+		if ( typeof acceptCallCount === "undefined" ) {
+			acceptCallCount = 1;
+		}
+
+		test.semaphore += 1;
+		test.usedAsync = true;
+		pauseProcessing();
+
+		return function done() {
+
+			if ( popped ) {
+				test.pushFailure( "Too many calls to the `assert.async` callback",
+					sourceFromStacktrace( 2 ) );
+				return;
+			}
+			acceptCallCount -= 1;
+			if ( acceptCallCount > 0 ) {
+				return;
+			}
+
+			test.semaphore -= 1;
+			popped = true;
+			resumeProcessing();
+		};
+	},
+
+	// Exports test.push() to the user API
+	// Alias of pushResult.
+	push: function( result, actual, expected, message, negative ) {
+		var currentAssert = this instanceof Assert ? this : QUnit.config.current.assert;
+		return currentAssert.pushResult( {
+			result: result,
+			actual: actual,
+			expected: expected,
+			message: message,
+			negative: negative
+		} );
+	},
+
+	pushResult: function( resultInfo ) {
+
+		// Destructure of resultInfo = { result, actual, expected, message, negative }
+		var assert = this,
+			currentTest = ( assert instanceof Assert && assert.test ) || QUnit.config.current;
+
+		// Backwards compatibility fix.
+		// Allows the direct use of global exported assertions and QUnit.assert.*
+		// Although, it's use is not recommended as it can leak assertions
+		// to other tests from async tests, because we only get a reference to the current test,
+		// not exactly the test where assertion were intended to be called.
+		if ( !currentTest ) {
+			throw new Error( "assertion outside test context, in " + sourceFromStacktrace( 2 ) );
+		}
+
+		if ( currentTest.usedAsync === true && currentTest.semaphore === 0 ) {
+			currentTest.pushFailure( "Assertion after the final `assert.async` was resolved",
+				sourceFromStacktrace( 2 ) );
+
+			// Allow this assertion to continue running anyway...
+		}
+
+		if ( !( assert instanceof Assert ) ) {
+			assert = currentTest.assert;
+		}
+
+		return assert.test.pushResult( resultInfo );
+	},
+
+	ok: function( result, message ) {
+		message = message || ( result ? "okay" : "failed, expected argument to be truthy, was: " +
+			QUnit.dump.parse( result ) );
+		this.pushResult( {
+			result: !!result,
+			actual: result,
+			expected: true,
+			message: message
+		} );
+	},
+
+	notOk: function( result, message ) {
+		message = message || ( !result ? "okay" : "failed, expected argument to be falsy, was: " +
+			QUnit.dump.parse( result ) );
+		this.pushResult( {
+			result: !result,
+			actual: result,
+			expected: false,
+			message: message
+		} );
+	},
+
+	equal: function( actual, expected, message ) {
+		/*jshint eqeqeq:false */
+		this.pushResult( {
+			result: expected == actual,
+			actual: actual,
+			expected: expected,
+			message: message
+		} );
+	},
+
+	notEqual: function( actual, expected, message ) {
+		/*jshint eqeqeq:false */
+		this.pushResult( {
+			result: expected != actual,
+			actual: actual,
+			expected: expected,
+			message: message,
+			negative: true
+		} );
+	},
+
+	propEqual: function( actual, expected, message ) {
+		actual = objectValues( actual );
+		expected = objectValues( expected );
+		this.pushResult( {
+			result: QUnit.equiv( actual, expected ),
+			actual: actual,
+			expected: expected,
+			message: message
+		} );
+	},
+
+	notPropEqual: function( actual, expected, message ) {
+		actual = objectValues( actual );
+		expected = objectValues( expected );
+		this.pushResult( {
+			result: !QUnit.equiv( actual, expected ),
+			actual: actual,
+			expected: expected,
+			message: message,
+			negative: true
+		} );
+	},
+
+	deepEqual: function( actual, expected, message ) {
+		this.pushResult( {
+			result: QUnit.equiv( actual, expected ),
+			actual: actual,
+			expected: expected,
+			message: message
+		} );
+	},
+
+	notDeepEqual: function( actual, expected, message ) {
+		this.pushResult( {
+			result: !QUnit.equiv( actual, expected ),
+			actual: actual,
+			expected: expected,
+			message: message,
+			negative: true
+		} );
+	},
+
+	strictEqual: function( actual, expected, message ) {
+		this.pushResult( {
+			result: expected === actual,
+			actual: actual,
+			expected: expected,
+			message: message
+		} );
+	},
+
+	notStrictEqual: function( actual, expected, message ) {
+		this.pushResult( {
+			result: expected !== actual,
+			actual: actual,
+			expected: expected,
+			message: message,
+			negative: true
+		} );
+	},
+
+	"throws": function( block, expected, message ) {
+		var actual, expectedType,
+			expectedOutput = expected,
+			ok = false,
+			currentTest = ( this instanceof Assert && this.test ) || QUnit.config.current;
+
+		// 'expected' is optional unless doing string comparison
+		if ( message == null && typeof expected === "string" ) {
+			message = expected;
+			expected = null;
+		}
+
+		currentTest.ignoreGlobalErrors = true;
+		try {
+			block.call( currentTest.testEnvironment );
+		} catch ( e ) {
+			actual = e;
+		}
+		currentTest.ignoreGlobalErrors = false;
+
+		if ( actual ) {
+			expectedType = QUnit.objectType( expected );
+
+			// We don't want to validate thrown error
+			if ( !expected ) {
+				ok = true;
+				expectedOutput = null;
+
+			// Expected is a regexp
+			} else if ( expectedType === "regexp" ) {
+				ok = expected.test( errorString( actual ) );
+
+			// Expected is a string
+			} else if ( expectedType === "string" ) {
+				ok = expected === errorString( actual );
+
+			// Expected is a constructor, maybe an Error constructor
+			} else if ( expectedType === "function" && actual instanceof expected ) {
+				ok = true;
+
+			// Expected is an Error object
+			} else if ( expectedType === "object" ) {
+				ok = actual instanceof expected.constructor &&
+					actual.name === expected.name &&
+					actual.message === expected.message;
+
+			// Expected is a validation function which returns true if validation passed
+			} else if ( expectedType === "function" && expected.call( {}, actual ) === true ) {
+				expectedOutput = null;
+				ok = true;
+			}
+		}
+
+		currentTest.assert.pushResult( {
+			result: ok,
+			actual: actual,
+			expected: expectedOutput,
+			message: message
+		} );
+	}
+};
+
+// Provide an alternative to assert.throws(), for environments that consider throws a reserved word
+// Known to us are: Closure Compiler, Narwhal
+( function() {
+	/*jshint sub:true */
+	Assert.prototype.raises = Assert.prototype [ "throws" ]; //jscs:ignore requireDotNotation
+}() );
+
+function errorString( error ) {
+	var name, message,
+		resultErrorString = error.toString();
+	if ( resultErrorString.substring( 0, 7 ) === "[object" ) {
+		name = error.name ? error.name.toString() : "Error";
+		message = error.message ? error.message.toString() : "";
+		if ( name && message ) {
+			return name + ": " + message;
+		} else if ( name ) {
+			return name;
+		} else if ( message ) {
+			return message;
+		} else {
+			return "Error";
+		}
+	} else {
+		return resultErrorString;
+	}
+}
+
+// Test for equality any JavaScript type.
+// Author: Philippe Rathé <prathe at gmail.com>
+QUnit.equiv = ( function() {
+
+	// Stack to decide between skip/abort functions
+	var callers = [];
+
+	// Stack to avoiding loops from circular referencing
+	var parents = [];
+	var parentsB = [];
+
+	var getProto = Object.getPrototypeOf || function( obj ) {
+
+		/*jshint proto: true */
+		return obj.__proto__;
+	};
+
+	function useStrictEquality( b, a ) {
+
+		// To catch short annotation VS 'new' annotation of a declaration. e.g.:
+		// `var i = 1;`
+		// `var j = new Number(1);`
+		if ( typeof a === "object" ) {
+			a = a.valueOf();
+		}
+		if ( typeof b === "object" ) {
+			b = b.valueOf();
+		}
+
+		return a === b;
+	}
+
+	function compareConstructors( a, b ) {
+		var protoA = getProto( a );
+		var protoB = getProto( b );
+
+		// Comparing constructors is more strict than using `instanceof`
+		if ( a.constructor === b.constructor ) {
+			return true;
+		}
+
+		// Ref #851
+		// If the obj prototype descends from a null constructor, treat it
+		// as a null prototype.
+		if ( protoA && protoA.constructor === null ) {
+			protoA = null;
+		}
+		if ( protoB && protoB.constructor === null ) {
+			protoB = null;
+		}
+
+		// Allow objects with no prototype to be equivalent to
+		// objects with Object as their constructor.
+		if ( ( protoA === null && protoB === Object.prototype ) ||
+				( protoB === null && protoA === Object.prototype ) ) {
+			return true;
+		}
+
+		return false;
+	}
+
+	function getRegExpFlags( regexp ) {
+		return "flags" in regexp ? regexp.flags : regexp.toString().match( /[gimuy]*$/ )[ 0 ];
+	}
+
+	var callbacks = {
+		"string": useStrictEquality,
+		"boolean": useStrictEquality,
+		"number": useStrictEquality,
+		"null": useStrictEquality,
+		"undefined": useStrictEquality,
+		"symbol": useStrictEquality,
+		"date": useStrictEquality,
+
+		"nan": function() {
+			return true;
+		},
+
+		"regexp": function( b, a ) {
+			return a.source === b.source &&
+
+				// Include flags in the comparison
+				getRegExpFlags( a ) === getRegExpFlags( b );
+		},
+
+		// - skip when the property is a method of an instance (OOP)
+		// - abort otherwise,
+		// initial === would have catch identical references anyway
+		"function": function() {
+			var caller = callers[ callers.length - 1 ];
+			return caller !== Object && typeof caller !== "undefined";
+		},
+
+		"array": function( b, a ) {
+			var i, j, len, loop, aCircular, bCircular;
+
+			len = a.length;
+			if ( len !== b.length ) {
+
+				// Safe and faster
+				return false;
+			}
+
+			// Track reference to avoid circular references
+			parents.push( a );
+			parentsB.push( b );
+			for ( i = 0; i < len; i++ ) {
+				loop = false;
+				for ( j = 0; j < parents.length; j++ ) {
+					aCircular = parents[ j ] === a[ i ];
+					bCircular = parentsB[ j ] === b[ i ];
+					if ( aCircular || bCircular ) {
+						if ( a[ i ] === b[ i ] || aCircular && bCircular ) {
+							loop = true;
+						} else {
+							parents.pop();
+							parentsB.pop();
+							return false;
+						}
+					}
+				}
+				if ( !loop && !innerEquiv( a[ i ], b[ i ] ) ) {
+					parents.pop();
+					parentsB.pop();
+					return false;
+				}
+			}
+			parents.pop();
+			parentsB.pop();
+			return true;
+		},
+
+		"set": function( b, a ) {
+			var innerEq,
+				outerEq = true;
+
+			if ( a.size !== b.size ) {
+				return false;
+			}
+
+			a.forEach( function( aVal ) {
+				innerEq = false;
+
+				b.forEach( function( bVal ) {
+					if ( innerEquiv( bVal, aVal ) ) {
+						innerEq = true;
+					}
+				} );
+
+				if ( !innerEq ) {
+					outerEq = false;
+				}
+			} );
+
+			return outerEq;
+		},
+
+		"map": function( b, a ) {
+			var innerEq,
+				outerEq = true;
+
+			if ( a.size !== b.size ) {
+				return false;
+			}
+
+			a.forEach( function( aVal, aKey ) {
+				innerEq = false;
+
+				b.forEach( function( bVal, bKey ) {
+					if ( innerEquiv( [ bVal, bKey ], [ aVal, aKey ] ) ) {
+						innerEq = true;
+					}
+				} );
+
+				if ( !innerEq ) {
+					outerEq = false;
+				}
+			} );
+
+			return outerEq;
+		},
+
+		"object": function( b, a ) {
+			var i, j, loop, aCircular, bCircular;
+
+			// Default to true
+			var eq = true;
+			var aProperties = [];
+			var bProperties = [];
+
+			if ( compareConstructors( a, b ) === false ) {
+				return false;
+			}
+
+			// Stack constructor before traversing properties
+			callers.push( a.constructor );
+
+			// Track reference to avoid circular references
+			parents.push( a );
+			parentsB.push( b );
+
+			// Be strict: don't ensure hasOwnProperty and go deep
+			for ( i in a ) {
+				loop = false;
+				for ( j = 0; j < parents.length; j++ ) {
+					aCircular = parents[ j ] === a[ i ];
+					bCircular = parentsB[ j ] === b[ i ];
+					if ( aCircular || bCircular ) {
+						if ( a[ i ] === b[ i ] || aCircular && bCircular ) {
+							loop = true;
+						} else {
+							eq = false;
+							break;
+						}
+					}
+				}
+				aProperties.push( i );
+				if ( !loop && !innerEquiv( a[ i ], b[ i ] ) ) {
+					eq = false;
+					break;
+				}
+			}
+
+			parents.pop();
+			parentsB.pop();
+
+			// Unstack, we are done
+			callers.pop();
+
+			for ( i in b ) {
+
+				// Collect b's properties
+				bProperties.push( i );
+			}
+
+			// Ensures identical properties name
+			return eq && innerEquiv( aProperties.sort(), bProperties.sort() );
+		}
+	};
+
+	function typeEquiv( a, b ) {
+		var type = QUnit.objectType( a );
+		return QUnit.objectType( b ) === type && callbacks[ type ]( b, a );
+	}
+
+	// The real equiv function
+	function innerEquiv( a, b ) {
+
+		// We're done when there's nothing more to compare
+		if ( arguments.length < 2 ) {
+			return true;
+		}
+
+		// Require type-specific equality
+		return ( a === b || typeEquiv( a, b ) ) &&
+
+			// ...across all consecutive argument pairs
+			( arguments.length === 2 || innerEquiv.apply( this, [].slice.call( arguments, 1 ) ) );
+	}
+
+	return innerEquiv;
+}() );
+
+// Based on jsDump by Ariel Flesler
+// http://flesler.blogspot.com/2008/05/jsdump-pretty-dump-of-any-javascript.html
+QUnit.dump = ( function() {
+	function quote( str ) {
+		return "\"" + str.toString().replace( /\\/g, "\\\\" ).replace( /"/g, "\\\"" ) + "\"";
+	}
+	function literal( o ) {
+		return o + "";
+	}
+	function join( pre, arr, post ) {
+		var s = dump.separator(),
+			base = dump.indent(),
+			inner = dump.indent( 1 );
+		if ( arr.join ) {
+			arr = arr.join( "," + s + inner );
+		}
+		if ( !arr ) {
+			return pre + post;
+		}
+		return [ pre, inner + arr, base + post ].join( s );
+	}
+	function array( arr, stack ) {
+		var i = arr.length,
+			ret = new Array( i );
+
+		if ( dump.maxDepth && dump.depth > dump.maxDepth ) {
+			return "[object Array]";
+		}
+
+		this.up();
+		while ( i-- ) {
+			ret[ i ] = this.parse( arr[ i ], undefined, stack );
+		}
+		this.down();
+		return join( "[", ret, "]" );
+	}
+
+	var reName = /^function (\w+)/,
+		dump = {
+
+			// The objType is used mostly internally, you can fix a (custom) type in advance
+			parse: function( obj, objType, stack ) {
+				stack = stack || [];
+				var res, parser, parserType,
+					inStack = inArray( obj, stack );
+
+				if ( inStack !== -1 ) {
+					return "recursion(" + ( inStack - stack.length ) + ")";
+				}
+
+				objType = objType || this.typeOf( obj  );
+				parser = this.parsers[ objType ];
+				parserType = typeof parser;
+
+				if ( parserType === "function" ) {
+					stack.push( obj );
+					res = parser.call( this, obj, stack );
+					stack.pop();
+					return res;
+				}
+				return ( parserType === "string" ) ? parser : this.parsers.error;
+			},
+			typeOf: function( obj ) {
+				var type;
+				if ( obj === null ) {
+					type = "null";
+				} else if ( typeof obj === "undefined" ) {
+					type = "undefined";
+				} else if ( QUnit.is( "regexp", obj ) ) {
+					type = "regexp";
+				} else if ( QUnit.is( "date", obj ) ) {
+					type = "date";
+				} else if ( QUnit.is( "function", obj ) ) {
+					type = "function";
+				} else if ( obj.setInterval !== undefined &&
+						obj.document !== undefined &&
+						obj.nodeType === undefined ) {
+					type = "window";
+				} else if ( obj.nodeType === 9 ) {
+					type = "document";
+				} else if ( obj.nodeType ) {
+					type = "node";
+				} else if (
+
+					// Native arrays
+					toString.call( obj ) === "[object Array]" ||
+
+					// NodeList objects
+					( typeof obj.length === "number" && obj.item !== undefined &&
+					( obj.length ? obj.item( 0 ) === obj[ 0 ] : ( obj.item( 0 ) === null &&
+					obj[ 0 ] === undefined ) ) )
+				) {
+					type = "array";
+				} else if ( obj.constructor === Error.prototype.constructor ) {
+					type = "error";
+				} else {
+					type = typeof obj;
+				}
+				return type;
+			},
+
+			separator: function() {
+				return this.multiline ? this.HTML ? "<br />" : "\n" : this.HTML ? " " : " ";
+			},
+
+			// Extra can be a number, shortcut for increasing-calling-decreasing
+			indent: function( extra ) {
+				if ( !this.multiline ) {
+					return "";
+				}
+				var chr = this.indentChar;
+				if ( this.HTML ) {
+					chr = chr.replace( /\t/g, "   " ).replace( / /g, " " );
+				}
+				return new Array( this.depth + ( extra || 0 ) ).join( chr );
+			},
+			up: function( a ) {
+				this.depth += a || 1;
+			},
+			down: function( a ) {
+				this.depth -= a || 1;
+			},
+			setParser: function( name, parser ) {
+				this.parsers[ name ] = parser;
+			},
+
+			// The next 3 are exposed so you can use them
+			quote: quote,
+			literal: literal,
+			join: join,
+			depth: 1,
+			maxDepth: QUnit.config.maxDepth,
+
+			// This is the list of parsers, to modify them, use dump.setParser
+			parsers: {
+				window: "[Window]",
+				document: "[Document]",
+				error: function( error ) {
+					return "Error(\"" + error.message + "\")";
+				},
+				unknown: "[Unknown]",
+				"null": "null",
+				"undefined": "undefined",
+				"function": function( fn ) {
+					var ret = "function",
+
+						// Functions never have name in IE
+						name = "name" in fn ? fn.name : ( reName.exec( fn ) || [] )[ 1 ];
+
+					if ( name ) {
+						ret += " " + name;
+					}
+					ret += "(";
+
+					ret = [ ret, dump.parse( fn, "functionArgs" ), "){" ].join( "" );
+					return join( ret, dump.parse( fn, "functionCode" ), "}" );
+				},
+				array: array,
+				nodelist: array,
+				"arguments": array,
+				object: function( map, stack ) {
+					var keys, key, val, i, nonEnumerableProperties,
+						ret = [];
+
+					if ( dump.maxDepth && dump.depth > dump.maxDepth ) {
+						return "[object Object]";
+					}
+
+					dump.up();
+					keys = [];
+					for ( key in map ) {
+						keys.push( key );
+					}
+
+					// Some properties are not always enumerable on Error objects.
+					nonEnumerableProperties = [ "message", "name" ];
+					for ( i in nonEnumerableProperties ) {
+						key = nonEnumerableProperties[ i ];
+						if ( key in map && inArray( key, keys ) < 0 ) {
+							keys.push( key );
+						}
+					}
+					keys.sort();
+					for ( i = 0; i < keys.length; i++ ) {
+						key = keys[ i ];
+						val = map[ key ];
+						ret.push( dump.parse( key, "key" ) + ": " +
+							dump.parse( val, undefined, stack ) );
+					}
+					dump.down();
+					return join( "{", ret, "}" );
+				},
+				node: function( node ) {
+					var len, i, val,
+						open = dump.HTML ? "<" : "<",
+						close = dump.HTML ? ">" : ">",
+						tag = node.nodeName.toLowerCase(),
+						ret = open + tag,
+						attrs = node.attributes;
+
+					if ( attrs ) {
+						for ( i = 0, len = attrs.length; i < len; i++ ) {
+							val = attrs[ i ].nodeValue;
+
+							// IE6 includes all attributes in .attributes, even ones not explicitly
+							// set. Those have values like undefined, null, 0, false, "" or
+							// "inherit".
+							if ( val && val !== "inherit" ) {
+								ret += " " + attrs[ i ].nodeName + "=" +
+									dump.parse( val, "attribute" );
+							}
+						}
+					}
+					ret += close;
+
+					// Show content of TextNode or CDATASection
+					if ( node.nodeType === 3 || node.nodeType === 4 ) {
+						ret += node.nodeValue;
+					}
+
+					return ret + open + "/" + tag + close;
+				},
+
+				// Function calls it internally, it's the arguments part of the function
+				functionArgs: function( fn ) {
+					var args,
+						l = fn.length;
+
+					if ( !l ) {
+						return "";
+					}
+
+					args = new Array( l );
+					while ( l-- ) {
+
+						// 97 is 'a'
+						args[ l ] = String.fromCharCode( 97 + l );
+					}
+					return " " + args.join( ", " ) + " ";
+				},
+
+				// Object calls it internally, the key part of an item in a map
+				key: quote,
+
+				// Function calls it internally, it's the content of the function
+				functionCode: "[code]",
+
+				// Node calls it internally, it's a html attribute value
+				attribute: quote,
+				string: quote,
+				date: quote,
+				regexp: literal,
+				number: literal,
+				"boolean": literal
+			},
+
+			// If true, entities are escaped ( <, >, \t, space and \n )
+			HTML: false,
+
+			// Indentation unit
+			indentChar: "  ",
+
+			// If true, items in a collection, are separated by a \n, else just a space.
+			multiline: true
+		};
+
+	return dump;
+}() );
+
+// Back compat
+QUnit.jsDump = QUnit.dump;
+
+// Deprecated
+// Extend assert methods to QUnit for Backwards compatibility
+( function() {
+	var i,
+		assertions = Assert.prototype;
+
+	function applyCurrent( current ) {
+		return function() {
+			var assert = new Assert( QUnit.config.current );
+			current.apply( assert, arguments );
+		};
+	}
+
+	for ( i in assertions ) {
+		QUnit[ i ] = applyCurrent( assertions[ i ] );
+	}
+}() );
+
+// For browser, export only select globals
+if ( defined.document ) {
+
+	( function() {
+		var i, l,
+			keys = [
+				"test",
+				"module",
+				"expect",
+				"asyncTest",
+				"start",
+				"stop",
+				"ok",
+				"notOk",
+				"equal",
+				"notEqual",
+				"propEqual",
+				"notPropEqual",
+				"deepEqual",
+				"notDeepEqual",
+				"strictEqual",
+				"notStrictEqual",
+				"throws",
+				"raises"
+			];
+
+		for ( i = 0, l = keys.length; i < l; i++ ) {
+			window[ keys[ i ] ] = QUnit[ keys[ i ] ];
+		}
+	}() );
+
+	window.QUnit = QUnit;
+}
+
+// For nodejs
+if ( typeof module !== "undefined" && module && module.exports ) {
+	module.exports = QUnit;
+
+	// For consistency with CommonJS environments' exports
+	module.exports.QUnit = QUnit;
+}
+
+// For CommonJS with exports, but without module.exports, like Rhino
+if ( typeof exports !== "undefined" && exports ) {
+	exports.QUnit = QUnit;
+}
+
+if ( typeof define === "function" && define.amd ) {
+	define( function() {
+		return QUnit;
+	} );
+	QUnit.config.autostart = false;
+}
+
+// Get a reference to the global object, like window in browsers
+}( ( function() {
+	return this;
+}() ) ) );
+
+( function() {
+
+// Only interact with URLs via window.location
+var location = typeof window !== "undefined" && window.location;
+if ( !location ) {
+	return;
+}
+
+var urlParams = getUrlParams();
+
+QUnit.urlParams = urlParams;
+
+// Match module/test by inclusion in an array
+QUnit.config.moduleId = [].concat( urlParams.moduleId || [] );
+QUnit.config.testId = [].concat( urlParams.testId || [] );
+
+// Exact case-insensitive match of the module name
+QUnit.config.module = urlParams.module;
+
+// Regular expression or case-insenstive substring match against "moduleName: testName"
+QUnit.config.filter = urlParams.filter;
+
+// Test order randomization
+if ( urlParams.seed === true ) {
+
+	// Generate a random seed if the option is specified without a value
+	QUnit.config.seed = Math.random().toString( 36 ).slice( 2 );
+} else if ( urlParams.seed ) {
+	QUnit.config.seed = urlParams.seed;
+}
+
+// Add URL-parameter-mapped config values with UI form rendering data
+QUnit.config.urlConfig.push(
+	{
+		id: "hidepassed",
+		label: "Hide passed tests",
+		tooltip: "Only show tests and assertions that fail. Stored as query-strings."
+	},
+	{
+		id: "noglobals",
+		label: "Check for Globals",
+		tooltip: "Enabling this will test if any test introduces new properties on the " +
+			"global object (`window` in Browsers). Stored as query-strings."
+	},
+	{
+		id: "notrycatch",
+		label: "No try-catch",
+		tooltip: "Enabling this will run tests outside of a try-catch block. Makes debugging " +
+			"exceptions in IE reasonable. Stored as query-strings."
+	}
+);
+
+QUnit.begin( function() {
+	var i, option,
+		urlConfig = QUnit.config.urlConfig;
+
+	for ( i = 0; i < urlConfig.length; i++ ) {
+
+		// Options can be either strings or objects with nonempty "id" properties
+		option = QUnit.config.urlConfig[ i ];
+		if ( typeof option !== "string" ) {
+			option = option.id;
+		}
+
+		if ( QUnit.config[ option ] === undefined ) {
+			QUnit.config[ option ] = urlParams[ option ];
+		}
+	}
+} );
+
+function getUrlParams() {
+	var i, param, name, value;
+	var urlParams = {};
+	var params = location.search.slice( 1 ).split( "&" );
+	var length = params.length;
+
+	for ( i = 0; i < length; i++ ) {
+		if ( params[ i ] ) {
+			param = params[ i ].split( "=" );
+			name = decodeURIComponent( param[ 0 ] );
+
+			// Allow just a key to turn on a flag, e.g., test.html?noglobals
+			value = param.length === 1 ||
+				decodeURIComponent( param.slice( 1 ).join( "=" ) ) ;
+			if ( urlParams[ name ] ) {
+				urlParams[ name ] = [].concat( urlParams[ name ], value );
+			} else {
+				urlParams[ name ] = value;
+			}
+		}
+	}
+
+	return urlParams;
+}
+
+// Don't load the HTML Reporter on non-browser environments
+if ( typeof window === "undefined" || !window.document ) {
+	return;
+}
+
+// Deprecated QUnit.init - Ref #530
+// Re-initialize the configuration options
+QUnit.init = function() {
+	var config = QUnit.config;
+
+	config.stats = { all: 0, bad: 0 };
+	config.moduleStats = { all: 0, bad: 0 };
+	config.started = 0;
+	config.updateRate = 1000;
+	config.blocking = false;
+	config.autostart = true;
+	config.autorun = false;
+	config.filter = "";
+	config.queue = [];
+
+	appendInterface();
+};
+
+var config = QUnit.config,
+	document = window.document,
+	collapseNext = false,
+	hasOwn = Object.prototype.hasOwnProperty,
+	unfilteredUrl = setUrl( { filter: undefined, module: undefined,
+		moduleId: undefined, testId: undefined } ),
+	defined = {
+		sessionStorage: ( function() {
+			var x = "qunit-test-string";
+			try {
+				sessionStorage.setItem( x, x );
+				sessionStorage.removeItem( x );
+				return true;
+			} catch ( e ) {
+				return false;
+			}
+		}() )
+	},
+	modulesList = [];
+
+/**
+* Escape text for attribute or text content.
+*/
+function escapeText( s ) {
+	if ( !s ) {
+		return "";
+	}
+	s = s + "";
+
+	// Both single quotes and double quotes (for attributes)
+	return s.replace( /['"<>&]/g, function( s ) {
+		switch ( s ) {
+		case "'":
+			return "'";
+		case "\"":
+			return """;
+		case "<":
+			return "<";
+		case ">":
+			return ">";
+		case "&":
+			return "&";
+		}
+	} );
+}
+
+/**
+ * @param {HTMLElement} elem
+ * @param {string} type
+ * @param {Function} fn
+ */
+function addEvent( elem, type, fn ) {
+	if ( elem.addEventListener ) {
+
+		// Standards-based browsers
+		elem.addEventListener( type, fn, false );
+	} else if ( elem.attachEvent ) {
+
+		// Support: IE <9
+		elem.attachEvent( "on" + type, function() {
+			var event = window.event;
+			if ( !event.target ) {
+				event.target = event.srcElement || document;
+			}
+
+			fn.call( elem, event );
+		} );
+	}
+}
+
+/**
+ * @param {Array|NodeList} elems
+ * @param {string} type
+ * @param {Function} fn
+ */
+function addEvents( elems, type, fn ) {
+	var i = elems.length;
+	while ( i-- ) {
+		addEvent( elems[ i ], type, fn );
+	}
+}
+
+function hasClass( elem, name ) {
+	return ( " " + elem.className + " " ).indexOf( " " + name + " " ) >= 0;
+}
+
+function addClass( elem, name ) {
+	if ( !hasClass( elem, name ) ) {
+		elem.className += ( elem.className ? " " : "" ) + name;
+	}
+}
+
+function toggleClass( elem, name, force ) {
+	if ( force || typeof force === "undefined" && !hasClass( elem, name ) ) {
+		addClass( elem, name );
+	} else {
+		removeClass( elem, name );
+	}
+}
+
+function removeClass( elem, name ) {
+	var set = " " + elem.className + " ";
+
+	// Class name may appear multiple times
+	while ( set.indexOf( " " + name + " " ) >= 0 ) {
+		set = set.replace( " " + name + " ", " " );
+	}
+
+	// Trim for prettiness
+	elem.className = typeof set.trim === "function" ? set.trim() : set.replace( /^\s+|\s+$/g, "" );
+}
+
+function id( name ) {
+	return document.getElementById && document.getElementById( name );
+}
+
+function getUrlConfigHtml() {
+	var i, j, val,
+		escaped, escapedTooltip,
+		selection = false,
+		urlConfig = config.urlConfig,
+		urlConfigHtml = "";
+
+	for ( i = 0; i < urlConfig.length; i++ ) {
+
+		// Options can be either strings or objects with nonempty "id" properties
+		val = config.urlConfig[ i ];
+		if ( typeof val === "string" ) {
+			val = {
+				id: val,
+				label: val
+			};
+		}
+
+		escaped = escapeText( val.id );
+		escapedTooltip = escapeText( val.tooltip );
+
+		if ( !val.value || typeof val.value === "string" ) {
+			urlConfigHtml += "<input id='qunit-urlconfig-" + escaped +
+				"' name='" + escaped + "' type='checkbox'" +
+				( val.value ? " value='" + escapeText( val.value ) + "'" : "" ) +
+				( config[ val.id ] ? " checked='checked'" : "" ) +
+				" title='" + escapedTooltip + "' /><label for='qunit-urlconfig-" + escaped +
+				"' title='" + escapedTooltip + "'>" + val.label + "</label>";
+		} else {
+			urlConfigHtml += "<label for='qunit-urlconfig-" + escaped +
+				"' title='" + escapedTooltip + "'>" + val.label +
+				": </label><select id='qunit-urlconfig-" + escaped +
+				"' name='" + escaped + "' title='" + escapedTooltip + "'><option></option>";
+
+			if ( QUnit.is( "array", val.value ) ) {
+				for ( j = 0; j < val.value.length; j++ ) {
+					escaped = escapeText( val.value[ j ] );
+					urlConfigHtml += "<option value='" + escaped + "'" +
+						( config[ val.id ] === val.value[ j ] ?
+							( selection = true ) && " selected='selected'" : "" ) +
+						">" + escaped + "</option>";
+				}
+			} else {
+				for ( j in val.value ) {
+					if ( hasOwn.call( val.value, j ) ) {
+						urlConfigHtml += "<option value='" + escapeText( j ) + "'" +
+							( config[ val.id ] === j ?
+								( selection = true ) && " selected='selected'" : "" ) +
+							">" + escapeText( val.value[ j ] ) + "</option>";
+					}
+				}
+			}
+			if ( config[ val.id ] && !selection ) {
+				escaped = escapeText( config[ val.id ] );
+				urlConfigHtml += "<option value='" + escaped +
+					"' selected='selected' disabled='disabled'>" + escaped + "</option>";
+			}
+			urlConfigHtml += "</select>";
+		}
+	}
+
+	return urlConfigHtml;
+}
+
+// Handle "click" events on toolbar checkboxes and "change" for select menus.
+// Updates the URL with the new state of `config.urlConfig` values.
+function toolbarChanged() {
+	var updatedUrl, value, tests,
+		field = this,
+		params = {};
+
+	// Detect if field is a select menu or a checkbox
+	if ( "selectedIndex" in field ) {
+		value = field.options[ field.selectedIndex ].value || undefined;
+	} else {
+		value = field.checked ? ( field.defaultValue || true ) : undefined;
+	}
+
+	params[ field.name ] = value;
+	updatedUrl = setUrl( params );
+
+	// Check if we can apply the change without a page refresh
+	if ( "hidepassed" === field.name && "replaceState" in window.history ) {
+		QUnit.urlParams[ field.name ] = value;
+		config[ field.name ] = value || false;
+		tests = id( "qunit-tests" );
+		if ( tests ) {
+			toggleClass( tests, "hidepass", value || false );
+		}
+		window.history.replaceState( null, "", updatedUrl );
+	} else {
+		window.location = updatedUrl;
+	}
+}
+
+function setUrl( params ) {
+	var key, arrValue, i,
+		querystring = "?",
+		location = window.location;
+
+	params = QUnit.extend( QUnit.extend( {}, QUnit.urlParams ), params );
+
+	for ( key in params ) {
+
+		// Skip inherited or undefined properties
+		if ( hasOwn.call( params, key ) && params[ key ] !== undefined ) {
+
+			// Output a parameter for each value of this key (but usually just one)
+			arrValue = [].concat( params[ key ] );
+			for ( i = 0; i < arrValue.length; i++ ) {
+				querystring += encodeURIComponent( key );
+				if ( arrValue[ i ] !== true ) {
+					querystring += "=" + encodeURIComponent( arrValue[ i ] );
+				}
+				querystring += "&";
+			}
+		}
+	}
+	return location.protocol + "//" + location.host +
+		location.pathname + querystring.slice( 0, -1 );
+}
+
+function applyUrlParams() {
+	var selectedModule,
+		modulesList = id( "qunit-modulefilter" ),
+		filter = id( "qunit-filter-input" ).value;
+
+	selectedModule = modulesList ?
+		decodeURIComponent( modulesList.options[ modulesList.selectedIndex ].value ) :
+		undefined;
+
+	window.location = setUrl( {
+		module: ( selectedModule === "" ) ? undefined : selectedModule,
+		filter: ( filter === "" ) ? undefined : filter,
+
+		// Remove moduleId and testId filters
+		moduleId: undefined,
+		testId: undefined
+	} );
+}
+
+function toolbarUrlConfigContainer() {
+	var urlConfigContainer = document.createElement( "span" );
+
+	urlConfigContainer.innerHTML = getUrlConfigHtml();
+	addClass( urlConfigContainer, "qunit-url-config" );
+
+	// For oldIE support:
+	// * Add handlers to the individual elements instead of the container
+	// * Use "click" instead of "change" for checkboxes
+	addEvents( urlConfigContainer.getElementsByTagName( "input" ), "click", toolbarChanged );
+	addEvents( urlConfigContainer.getElementsByTagName( "select" ), "change", toolbarChanged );
+
+	return urlConfigContainer;
+}
+
+function toolbarLooseFilter() {
+	var filter = document.createElement( "form" ),
+		label = document.createElement( "label" ),
+		input = document.createElement( "input" ),
+		button = document.createElement( "button" );
+
+	addClass( filter, "qunit-filter" );
+
+	label.innerHTML = "Filter: ";
+
+	input.type = "text";
+	input.value = config.filter || "";
+	input.name = "filter";
+	input.id = "qunit-filter-input";
+
+	button.innerHTML = "Go";
+
+	label.appendChild( input );
+
+	filter.appendChild( label );
+	filter.appendChild( button );
+	addEvent( filter, "submit", function( ev ) {
+		applyUrlParams();
+
+		if ( ev && ev.preventDefault ) {
+			ev.preventDefault();
+		}
+
+		return false;
+	} );
+
+	return filter;
+}
+
+function toolbarModuleFilterHtml() {
+	var i,
+		moduleFilterHtml = "";
+
+	if ( !modulesList.length ) {
+		return false;
+	}
+
+	moduleFilterHtml += "<label for='qunit-modulefilter'>Module: </label>" +
+		"<select id='qunit-modulefilter' name='modulefilter'><option value='' " +
+		( QUnit.urlParams.module === undefined ? "selected='selected'" : "" ) +
+		">< All Modules ></option>";
+
+	for ( i = 0; i < modulesList.length; i++ ) {
+		moduleFilterHtml += "<option value='" +
+			escapeText( encodeURIComponent( modulesList[ i ] ) ) + "' " +
+			( QUnit.urlParams.module === modulesList[ i ] ? "selected='selected'" : "" ) +
+			">" + escapeText( modulesList[ i ] ) + "</option>";
+	}
+	moduleFilterHtml += "</select>";
+
+	return moduleFilterHtml;
+}
+
+function toolbarModuleFilter() {
+	var toolbar = id( "qunit-testrunner-toolbar" ),
+		moduleFilter = document.createElement( "span" ),
+		moduleFilterHtml = toolbarModuleFilterHtml();
+
+	if ( !toolbar || !moduleFilterHtml ) {
+		return false;
+	}
+
+	moduleFilter.setAttribute( "id", "qunit-modulefilter-container" );
+	moduleFilter.innerHTML = moduleFilterHtml;
+
+	addEvent( moduleFilter.lastChild, "change", applyUrlParams );
+
+	toolbar.appendChild( moduleFilter );
+}
+
+function appendToolbar() {
+	var toolbar = id( "qunit-testrunner-toolbar" );
+
+	if ( toolbar ) {
+		toolbar.appendChild( toolbarUrlConfigContainer() );
+		toolbar.appendChild( toolbarLooseFilter() );
+		toolbarModuleFilter();
+	}
+}
+
+function appendHeader() {
+	var header = id( "qunit-header" );
+
+	if ( header ) {
+		header.innerHTML = "<a href='" + escapeText( unfilteredUrl ) + "'>" + header.innerHTML +
+			"</a> ";
+	}
+}
+
+function appendBanner() {
+	var banner = id( "qunit-banner" );
+
+	if ( banner ) {
+		banner.className = "";
+	}
+}
+
+function appendTestResults() {
+	var tests = id( "qunit-tests" ),
+		result = id( "qunit-testresult" );
+
+	if ( result ) {
+		result.parentNode.removeChild( result );
+	}
+
+	if ( tests ) {
+		tests.innerHTML = "";
+		result = document.createElement( "p" );
+		result.id = "qunit-testresult";
+		result.className = "result";
+		tests.parentNode.insertBefore( result, tests );
+		result.innerHTML = "Running...<br /> ";
+	}
+}
+
+function storeFixture() {
+	var fixture = id( "qunit-fixture" );
+	if ( fixture ) {
+		config.fixture = fixture.innerHTML;
+	}
+}
+
+function appendFilteredTest() {
+	var testId = QUnit.config.testId;
+	if ( !testId || testId.length <= 0 ) {
+		return "";
+	}
+	return "<div id='qunit-filteredTest'>Rerunning selected tests: " +
+		escapeText( testId.join( ", " ) ) +
+		" <a id='qunit-clearFilter' href='" +
+		escapeText( unfilteredUrl ) +
+		"'>Run all tests</a></div>";
+}
+
+function appendUserAgent() {
+	var userAgent = id( "qunit-userAgent" );
+
+	if ( userAgent ) {
+		userAgent.innerHTML = "";
+		userAgent.appendChild(
+			document.createTextNode(
+				"QUnit " + QUnit.version + "; " + navigator.userAgent
+			)
+		);
+	}
+}
+
+function appendInterface() {
+	var qunit = id( "qunit" );
+
+	if ( qunit ) {
+		qunit.innerHTML =
+			"<h1 id='qunit-header'>" + escapeText( document.title ) + "</h1>" +
+			"<h2 id='qunit-banner'></h2>" +
+			"<div id='qunit-testrunner-toolbar'></div>" +
+			appendFilteredTest() +
+			"<h2 id='qunit-userAgent'></h2>" +
+			"<ol id='qunit-tests'></ol>";
+	}
+
+	appendHeader();
+	appendBanner();
+	appendTestResults();
+	appendUserAgent();
+	appendToolbar();
+}
+
+function appendTestsList( modules ) {
+	var i, l, x, z, test, moduleObj;
+
+	for ( i = 0, l = modules.length; i < l; i++ ) {
+		moduleObj = modules[ i ];
+
+		for ( x = 0, z = moduleObj.tests.length; x < z; x++ ) {
+			test = moduleObj.tests[ x ];
+
+			appendTest( test.name, test.testId, moduleObj.name );
+		}
+	}
+}
+
+function appendTest( name, testId, moduleName ) {
+	var title, rerunTrigger, testBlock, assertList,
+		tests = id( "qunit-tests" );
+
+	if ( !tests ) {
+		return;
+	}
+
+	title = document.createElement( "strong" );
+	title.innerHTML = getNameHtml( name, moduleName );
+
+	rerunTrigger = document.createElement( "a" );
+	rerunTrigger.innerHTML = "Rerun";
+	rerunTrigger.href = setUrl( { testId: testId } );
+
+	testBlock = document.createElement( "li" );
+	testBlock.appendChild( title );
+	testBlock.appendChild( rerunTrigger );
+	testBlock.id = "qunit-test-output-" + testId;
+
+	assertList = document.createElement( "ol" );
+	assertList.className = "qunit-assert-list";
+
+	testBlock.appendChild( assertList );
+
+	tests.appendChild( testBlock );
+}
+
+// HTML Reporter initialization and load
+QUnit.begin( function( details ) {
+	var i, moduleObj, tests;
+
+	// Sort modules by name for the picker
+	for ( i = 0; i < details.modules.length; i++ ) {
+		moduleObj = details.modules[ i ];
+		if ( moduleObj.name ) {
+			modulesList.push( moduleObj.name );
+		}
+	}
+	modulesList.sort( function( a, b ) {
+		return a.localeCompare( b );
+	} );
+
+	// Capture fixture HTML from the page
+	storeFixture();
+
+	// Initialize QUnit elements
+	appendInterface();
+	appendTestsList( details.modules );
+	tests = id( "qunit-tests" );
+	if ( tests && config.hidepassed ) {
+		addClass( tests, "hidepass" );
+	}
+} );
+
+QUnit.done( function( details ) {
+	var i, key,
+		banner = id( "qunit-banner" ),
+		tests = id( "qunit-tests" ),
+		html = [
+			"Tests completed in ",
+			details.runtime,
+			" milliseconds.<br />",
+			"<span class='passed'>",
+			details.passed,
+			"</span> assertions of <span class='total'>",
+			details.total,
+			"</span> passed, <span class='failed'>",
+			details.failed,
+			"</span> failed."
+		].join( "" );
+
+	if ( banner ) {
+		banner.className = details.failed ? "qunit-fail" : "qunit-pass";
+	}
+
+	if ( tests ) {
+		id( "qunit-testresult" ).innerHTML = html;
+	}
+
+	if ( config.altertitle && document.title ) {
+
+		// Show ✖ for good, ✔ for bad suite result in title
+		// use escape sequences in case file gets loaded with non-utf-8-charset
+		document.title = [
+			( details.failed ? "\u2716" : "\u2714" ),
+			document.title.replace( /^[\u2714\u2716] /i, "" )
+		].join( " " );
+	}
+
+	// Clear own sessionStorage items if all tests passed
+	if ( config.reorder && defined.sessionStorage && details.failed === 0 ) {
+		for ( i = 0; i < sessionStorage.length; i++ ) {
+			key = sessionStorage.key( i++ );
+			if ( key.indexOf( "qunit-test-" ) === 0 ) {
+				sessionStorage.removeItem( key );
+			}
+		}
+	}
+
+	// Scroll back to top to show results
+	if ( config.scrolltop && window.scrollTo ) {
+		window.scrollTo( 0, 0 );
+	}
+} );
+
+function getNameHtml( name, module ) {
+	var nameHtml = "";
+
+	if ( module ) {
+		nameHtml = "<span class='module-name'>" + escapeText( module ) + "</span>: ";
+	}
+
+	nameHtml += "<span class='test-name'>" + escapeText( name ) + "</span>";
+
+	return nameHtml;
+}
+
+QUnit.testStart( function( details ) {
+	var running, testBlock, bad;
+
+	testBlock = id( "qunit-test-output-" + details.testId );
+	if ( testBlock ) {
+		testBlock.className = "running";
+	} else {
+
+		// Report later registered tests
+		appendTest( details.name, details.testId, details.module );
+	}
+
+	running = id( "qunit-testresult" );
+	if ( running ) {
+		bad = QUnit.config.reorder && defined.sessionStorage &&
+			+sessionStorage.getItem( "qunit-test-" + details.module + "-" + details.name );
+
+		running.innerHTML = ( bad ?
+			"Rerunning previously failed test: <br />" :
+			"Running: <br />" ) +
+			getNameHtml( details.name, details.module );
+	}
+
+} );
+
+function stripHtml( string ) {
+
+	// Strip tags, html entity and whitespaces
+	return string.replace( /<\/?[^>]+(>|$)/g, "" ).replace( /\"/g, "" ).replace( /\s+/g, "" );
+}
+
+QUnit.log( function( details ) {
+	var assertList, assertLi,
+		message, expected, actual, diff,
+		showDiff = false,
+		testItem = id( "qunit-test-output-" + details.testId );
+
+	if ( !testItem ) {
+		return;
+	}
+
+	message = escapeText( details.message ) || ( details.result ? "okay" : "failed" );
+	message = "<span class='test-message'>" + message + "</span>";
+	message += "<span class='runtime'>@ " + details.runtime + " ms</span>";
+
+	// The pushFailure doesn't provide details.expected
+	// when it calls, it's implicit to also not show expected and diff stuff
+	// Also, we need to check details.expected existence, as it can exist and be undefined
+	if ( !details.result && hasOwn.call( details, "expected" ) ) {
+		if ( details.negative ) {
+			expected = "NOT " + QUnit.dump.parse( details.expected );
+		} else {
+			expected = QUnit.dump.parse( details.expected );
+		}
+
+		actual = QUnit.dump.parse( details.actual );
+		message += "<table><tr class='test-expected'><th>Expected: </th><td><pre>" +
+			escapeText( expected ) +
+			"</pre></td></tr>";
+
+		if ( actual !== expected ) {
+
+			message += "<tr class='test-actual'><th>Result: </th><td><pre>" +
+				escapeText( actual ) + "</pre></td></tr>";
+
+			// Don't show diff if actual or expected are booleans
+			if ( !( /^(true|false)$/.test( actual ) ) &&
+					!( /^(true|false)$/.test( expected ) ) ) {
+				diff = QUnit.diff( expected, actual );
+				showDiff = stripHtml( diff ).length !==
+					stripHtml( expected ).length +
+					stripHtml( actual ).length;
+			}
+
+			// Don't show diff if expected and actual are totally different
+			if ( showDiff ) {
+				message += "<tr class='test-diff'><th>Diff: </th><td><pre>" +
+					diff + "</pre></td></tr>";
+			}
+		} else if ( expected.indexOf( "[object Array]" ) !== -1 ||
+				expected.indexOf( "[object Object]" ) !== -1 ) {
+			message += "<tr class='test-message'><th>Message: </th><td>" +
+				"Diff suppressed as the depth of object is more than current max depth (" +
+				QUnit.config.maxDepth + ").<p>Hint: Use <code>QUnit.dump.maxDepth</code> to " +
+				" run with a higher max depth or <a href='" +
+				escapeText( setUrl( { maxDepth: -1 } ) ) + "'>" +
+				"Rerun</a> without max depth.</p></td></tr>";
+		} else {
+			message += "<tr class='test-message'><th>Message: </th><td>" +
+				"Diff suppressed as the expected and actual results have an equivalent" +
+				" serialization</td></tr>";
+		}
+
+		if ( details.source ) {
+			message += "<tr class='test-source'><th>Source: </th><td><pre>" +
+				escapeText( details.source ) + "</pre></td></tr>";
+		}
+
+		message += "</table>";
+
+	// This occurs when pushFailure is set and we have an extracted stack trace
+	} else if ( !details.result && details.source ) {
+		message += "<table>" +
+			"<tr class='test-source'><th>Source: </th><td><pre>" +
+			escapeText( details.source ) + "</pre></td></tr>" +
+			"</table>";
+	}
+
+	assertList = testItem.getElementsByTagName( "ol" )[ 0 ];
+
+	assertLi = document.createElement( "li" );
+	assertLi.className = details.result ? "pass" : "fail";
+	assertLi.innerHTML = message;
+	assertList.appendChild( assertLi );
+} );
+
+QUnit.testDone( function( details ) {
+	var testTitle, time, testItem, assertList,
+		good, bad, testCounts, skipped, sourceName,
+		tests = id( "qunit-tests" );
+
+	if ( !tests ) {
+		return;
+	}
+
+	testItem = id( "qunit-test-output-" + details.testId );
+
+	assertList = testItem.getElementsByTagName( "ol" )[ 0 ];
+
+	good = details.passed;
+	bad = details.failed;
+
+	// Store result when possible
+	if ( config.reorder && defined.sessionStorage ) {
+		if ( bad ) {
+			sessionStorage.setItem( "qunit-test-" + details.module + "-" + details.name, bad );
+		} else {
+			sessionStorage.removeItem( "qunit-test-" + details.module + "-" + details.name );
+		}
+	}
+
+	if ( bad === 0 ) {
+
+		// Collapse the passing tests
+		addClass( assertList, "qunit-collapsed" );
+	} else if ( bad && config.collapse && !collapseNext ) {
+
+		// Skip collapsing the first failing test
+		collapseNext = true;
+	} else {
+
+		// Collapse remaining tests
+		addClass( assertList, "qunit-collapsed" );
+	}
+
+	// The testItem.firstChild is the test name
+	testTitle = testItem.firstChild;
+
+	testCounts = bad ?
+		"<b class='failed'>" + bad + "</b>, " + "<b class='passed'>" + good + "</b>, " :
+		"";
+
+	testTitle.innerHTML += " <b class='counts'>(" + testCounts +
+		details.assertions.length + ")</b>";
+
+	if ( details.skipped ) {
+		testItem.className = "skipped";
+		skipped = document.createElement( "em" );
+		skipped.className = "qunit-skipped-label";
+		skipped.innerHTML = "skipped";
+		testItem.insertBefore( skipped, testTitle );
+	} else {
+		addEvent( testTitle, "click", function() {
+			toggleClass( assertList, "qunit-collapsed" );
+		} );
+
+		testItem.className = bad ? "fail" : "pass";
+
+		time = document.createElement( "span" );
+		time.className = "runtime";
+		time.innerHTML = details.runtime + " ms";
+		testItem.insertBefore( time, assertList );
+	}
+
+	// Show the source of the test when showing assertions
+	if ( details.source ) {
+		sourceName = document.createElement( "p" );
+		sourceName.innerHTML = "<strong>Source: </strong>" + details.source;
+		addClass( sourceName, "qunit-source" );
+		if ( bad === 0 ) {
+			addClass( sourceName, "qunit-collapsed" );
+		}
+		addEvent( testTitle, "click", function() {
+			toggleClass( sourceName, "qunit-collapsed" );
+		} );
+		testItem.appendChild( sourceName );
+	}
+} );
+
+// Avoid readyState issue with phantomjs
+// Ref: #818
+var notPhantom = ( function( p ) {
+	return !( p && p.version && p.version.major > 0 );
+} )( window.phantom );
+
+if ( notPhantom && document.readyState === "complete" ) {
+	QUnit.load();
+} else {
+	addEvent( window, "load", QUnit.load );
+}
+
+/*
+ * This file is a modified version of google-diff-match-patch's JavaScript implementation
+ * (https://code.google.com/p/google-diff-match-patch/source/browse/trunk/javascript/diff_match_patch_uncompressed.js),
+ * modifications are licensed as more fully set forth in LICENSE.txt.
+ *
+ * The original source of google-diff-match-patch is attributable and licensed as follows:
+ *
+ * Copyright 2006 Google Inc.
+ * https://code.google.com/p/google-diff-match-patch/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * More Info:
+ *  https://code.google.com/p/google-diff-match-patch/
+ *
+ * Usage: QUnit.diff(expected, actual)
+ *
+ */
+QUnit.diff = ( function() {
+	function DiffMatchPatch() {
+	}
+
+	//  DIFF FUNCTIONS
+
+	/**
+	 * The data structure representing a diff is an array of tuples:
+	 * [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']]
+	 * which means: delete 'Hello', add 'Goodbye' and keep ' world.'
+	 */
+	var DIFF_DELETE = -1,
+		DIFF_INSERT = 1,
+		DIFF_EQUAL = 0;
+
+	/**
+	 * Find the differences between two texts.  Simplifies the problem by stripping
+	 * any common prefix or suffix off the texts before diffing.
+	 * @param {string} text1 Old string to be diffed.
+	 * @param {string} text2 New string to be diffed.
+	 * @param {boolean=} optChecklines Optional speedup flag. If present and false,
+	 *     then don't run a line-level diff first to identify the changed areas.
+	 *     Defaults to true, which does a faster, slightly less optimal diff.
+	 * @return {!Array.<!DiffMatchPatch.Diff>} Array of diff tuples.
+	 */
+	DiffMatchPatch.prototype.DiffMain = function( text1, text2, optChecklines ) {
+		var deadline, checklines, commonlength,
+			commonprefix, commonsuffix, diffs;
+
+		// The diff must be complete in up to 1 second.
+		deadline = ( new Date() ).getTime() + 1000;
+
+		// Check for null inputs.
+		if ( text1 === null || text2 === null ) {
+			throw new Error( "Null input. (DiffMain)" );
+		}
+
+		// Check for equality (speedup).
+		if ( text1 === text2 ) {
+			if ( text1 ) {
+				return [
+					[ DIFF_EQUAL, text1 ]
+				];
+			}
+			return [];
+		}
+
+		if ( typeof optChecklines === "undefined" ) {
+			optChecklines = true;
+		}
+
+		checklines = optChecklines;
+
+		// Trim off common prefix (speedup).
+		commonlength = this.diffCommonPrefix( text1, text2 );
+		commonprefix = text1.substring( 0, commonlength );
+		text1 = text1.substring( commonlength );
+		text2 = text2.substring( commonlength );
+
+		// Trim off common suffix (speedup).
+		commonlength = this.diffCommonSuffix( text1, text2 );
+		commonsuffix = text1.substring( text1.length - commonlength );
+		text1 = text1.substring( 0, text1.length - commonlength );
+		text2 = text2.substring( 0, text2.length - commonlength );
+
+		// Compute the diff on the middle block.
+		diffs = this.diffCompute( text1, text2, checklines, deadline );
+
+		// Restore the prefix and suffix.
+		if ( commonprefix ) {
+			diffs.unshift( [ DIFF_EQUAL, commonprefix ] );
+		}
+		if ( commonsuffix ) {
+			diffs.push( [ DIFF_EQUAL, commonsuffix ] );
+		}
+		this.diffCleanupMerge( diffs );
+		return diffs;
+	};
+
+	/**
+	 * Reduce the number of edits by eliminating operationally trivial equalities.
+	 * @param {!Array.<!DiffMatchPatch.Diff>} diffs Array of diff tuples.
+	 */
+	DiffMatchPatch.prototype.diffCleanupEfficiency = function( diffs ) {
+		var changes, equalities, equalitiesLength, lastequality,
+			pointer, preIns, preDel, postIns, postDel;
+		changes = false;
+		equalities = []; // Stack of indices where equalities are found.
+		equalitiesLength = 0; // Keeping our own length var is faster in JS.
+		/** @type {?string} */
+		lastequality = null;
+
+		// Always equal to diffs[equalities[equalitiesLength - 1]][1]
+		pointer = 0; // Index of current position.
+
+		// Is there an insertion operation before the last equality.
+		preIns = false;
+
+		// Is there a deletion operation before the last equality.
+		preDel = false;
+
+		// Is there an insertion operation after the last equality.
+		postIns = false;
+
+		// Is there a deletion operation after the last equality.
+		postDel = false;
+		while ( pointer < diffs.length ) {
+
+			// Equality found.
+			if ( diffs[ pointer ][ 0 ] === DIFF_EQUAL ) {
+				if ( diffs[ pointer ][ 1 ].length < 4 && ( postIns || postDel ) ) {
+
+					// Candidate found.
+					equalities[ equalitiesLength++ ] = pointer;
+					preIns = postIns;
+					preDel = postDel;
+					lastequality = diffs[ pointer ][ 1 ];
+				} else {
+
+					// Not a candidate, and can never become one.
+					equalitiesLength = 0;
+					lastequality = null;
+				}
+				postIns = postDel = false;
+
+			// An insertion or deletion.
+			} else {
+
+				if ( diffs[ pointer ][ 0 ] === DIFF_DELETE ) {
+					postDel = true;
+				} else {
+					postIns = true;
+				}
+
+				/*
+				 * Five types to be split:
+				 * <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
+				 * <ins>A</ins>X<ins>C</ins><del>D</del>
+				 * <ins>A</ins><del>B</del>X<ins>C</ins>
+				 * <ins>A</del>X<ins>C</ins><del>D</del>
+				 * <ins>A</ins><del>B</del>X<del>C</del>
+				 */
+				if ( lastequality && ( ( preIns && preDel && postIns && postDel ) ||
+						( ( lastequality.length < 2 ) &&
+						( preIns + preDel + postIns + postDel ) === 3 ) ) ) {
+
+					// Duplicate record.
+					diffs.splice(
+						equalities[ equalitiesLength - 1 ],
+						0,
+						[ DIFF_DELETE, lastequality ]
+					);
+
+					// Change second copy to insert.
+					diffs[ equalities[ equalitiesLength - 1 ] + 1 ][ 0 ] = DIFF_INSERT;
+					equalitiesLength--; // Throw away the equality we just deleted;
+					lastequality = null;
+					if ( preIns && preDel ) {
+
+						// No changes made which could affect previous entry, keep going.
+						postIns = postDel = true;
+						equalitiesLength = 0;
+					} else {
+						equalitiesLength--; // Throw away the previous equality.
+						pointer = equalitiesLength > 0 ? equalities[ equalitiesLength - 1 ] : -1;
+						postIns = postDel = false;
+					}
+					changes = true;
+				}
+			}
+			pointer++;
+		}
+
+		if ( changes ) {
+			this.diffCleanupMerge( diffs );
+		}
+	};
+
+	/**
+	 * Convert a diff array into a pretty HTML report.
+	 * @param {!Array.<!DiffMatchPatch.Diff>} diffs Array of diff tuples.
+	 * @param {integer} string to be beautified.
+	 * @return {string} HTML representation.
+	 */
+	DiffMatchPatch.prototype.diffPrettyHtml = function( diffs ) {
+		var op, data, x,
+			html = [];
+		for ( x = 0; x < diffs.length; x++ ) {
+			op = diffs[ x ][ 0 ]; // Operation (insert, delete, equal)
+			data = diffs[ x ][ 1 ]; // Text of change.
+			switch ( op ) {
+			case DIFF_INSERT:
+				html[ x ] = "<ins>" + escapeText( data ) + "</ins>";
+				break;
+			case DIFF_DELETE:
+				html[ x ] = "<del>" + escapeText( data ) + "</del>";
+				break;
+			case DIFF_EQUAL:
+				html[ x ] = "<span>" + escapeText( data ) + "</span>";
+				break;
+			}
+		}
+		return html.join( "" );
+	};
+
+	/**
+	 * Determine the common prefix of two strings.
+	 * @param {string} text1 First string.
+	 * @param {string} text2 Second string.
+	 * @return {number} The number of characters common to the start of each
+	 *     string.
+	 */
+	DiffMatchPatch.prototype.diffCommonPrefix = function( text1, text2 ) {
+		var pointermid, pointermax, pointermin, pointerstart;
+
+		// Quick check for common null cases.
+		if ( !text1 || !text2 || text1.charAt( 0 ) !== text2.charAt( 0 ) ) {
+			return 0;
+		}
+
+		// Binary search.
+		// Performance analysis: https://neil.fraser.name/news/2007/10/09/
+		pointermin = 0;
+		pointermax = Math.min( text1.length, text2.length );
+		pointermid = pointermax;
+		pointerstart = 0;
+		while ( pointermin < pointermid ) {
+			if ( text1.substring( pointerstart, pointermid ) ===
+					text2.substring( pointerstart, pointermid ) ) {
+				pointermin = pointermid;
+				pointerstart = pointermin;
+			} else {
+				pointermax = pointermid;
+			}
+			pointermid = Math.floor( ( pointermax - pointermin ) / 2 + pointermin );
+		}
+		return pointermid;
+	};
+
+	/**
+	 * Determine the common suffix of two strings.
+	 * @param {string} text1 First string.
+	 * @param {string} text2 Second string.
+	 * @return {number} The number of characters common to the end of each string.
+	 */
+	DiffMatchPatch.prototype.diffCommonSuffix = function( text1, text2 ) {
+		var pointermid, pointermax, pointermin, pointerend;
+
+		// Quick check for common null cases.
+		if ( !text1 ||
+				!text2 ||
+				text1.charAt( text1.length - 1 ) !== text2.charAt( text2.length - 1 ) ) {
+			return 0;
+		}
+
+		// Binary search.
+		// Performance analysis: https://neil.fraser.name/news/2007/10/09/
+		pointermin = 0;
+		pointermax = Math.min( text1.length, text2.length );
+		pointermid = pointermax;
+		pointerend = 0;
+		while ( pointermin < pointermid ) {
+			if ( text1.substring( text1.length - pointermid, text1.length - pointerend ) ===
+					text2.substring( text2.length - pointermid, text2.length - pointerend ) ) {
+				pointermin = pointermid;
+				pointerend = pointermin;
+			} else {
+				pointermax = pointermid;
+			}
+			pointermid = Math.floor( ( pointermax - pointermin ) / 2 + pointermin );
+		}
+		return pointermid;
+	};
+
+	/**
+	 * Find the differences between two texts.  Assumes that the texts do not
+	 * have any common prefix or suffix.
+	 * @param {string} text1 Old string to be diffed.
+	 * @param {string} text2 New string to be diffed.
+	 * @param {boolean} checklines Speedup flag.  If false, then don't run a
+	 *     line-level diff first to identify the changed areas.
+	 *     If true, then run a faster, slightly less optimal diff.
+	 * @param {number} deadline Time when the diff should be complete by.
+	 * @return {!Array.<!DiffMatchPatch.Diff>} Array of diff tuples.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffCompute = function( text1, text2, checklines, deadline ) {
+		var diffs, longtext, shorttext, i, hm,
+			text1A, text2A, text1B, text2B,
+			midCommon, diffsA, diffsB;
+
+		if ( !text1 ) {
+
+			// Just add some text (speedup).
+			return [
+				[ DIFF_INSERT, text2 ]
+			];
+		}
+
+		if ( !text2 ) {
+
+			// Just delete some text (speedup).
+			return [
+				[ DIFF_DELETE, text1 ]
+			];
+		}
+
+		longtext = text1.length > text2.length ? text1 : text2;
+		shorttext = text1.length > text2.length ? text2 : text1;
+		i = longtext.indexOf( shorttext );
+		if ( i !== -1 ) {
+
+			// Shorter text is inside the longer text (speedup).
+			diffs = [
+				[ DIFF_INSERT, longtext.substring( 0, i ) ],
+				[ DIFF_EQUAL, shorttext ],
+				[ DIFF_INSERT, longtext.substring( i + shorttext.length ) ]
+			];
+
+			// Swap insertions for deletions if diff is reversed.
+			if ( text1.length > text2.length ) {
+				diffs[ 0 ][ 0 ] = diffs[ 2 ][ 0 ] = DIFF_DELETE;
+			}
+			return diffs;
+		}
+
+		if ( shorttext.length === 1 ) {
+
+			// Single character string.
+			// After the previous speedup, the character can't be an equality.
+			return [
+				[ DIFF_DELETE, text1 ],
+				[ DIFF_INSERT, text2 ]
+			];
+		}
+
+		// Check to see if the problem can be split in two.
+		hm = this.diffHalfMatch( text1, text2 );
+		if ( hm ) {
+
+			// A half-match was found, sort out the return data.
+			text1A = hm[ 0 ];
+			text1B = hm[ 1 ];
+			text2A = hm[ 2 ];
+			text2B = hm[ 3 ];
+			midCommon = hm[ 4 ];
+
+			// Send both pairs off for separate processing.
+			diffsA = this.DiffMain( text1A, text2A, checklines, deadline );
+			diffsB = this.DiffMain( text1B, text2B, checklines, deadline );
+
+			// Merge the results.
+			return diffsA.concat( [
+				[ DIFF_EQUAL, midCommon ]
+			], diffsB );
+		}
+
+		if ( checklines && text1.length > 100 && text2.length > 100 ) {
+			return this.diffLineMode( text1, text2, deadline );
+		}
+
+		return this.diffBisect( text1, text2, deadline );
+	};
+
+	/**
+	 * Do the two texts share a substring which is at least half the length of the
+	 * longer text?
+	 * This speedup can produce non-minimal diffs.
+	 * @param {string} text1 First string.
+	 * @param {string} text2 Second string.
+	 * @return {Array.<string>} Five element Array, containing the prefix of
+	 *     text1, the suffix of text1, the prefix of text2, the suffix of
+	 *     text2 and the common middle.  Or null if there was no match.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffHalfMatch = function( text1, text2 ) {
+		var longtext, shorttext, dmp,
+			text1A, text2B, text2A, text1B, midCommon,
+			hm1, hm2, hm;
+
+		longtext = text1.length > text2.length ? text1 : text2;
+		shorttext = text1.length > text2.length ? text2 : text1;
+		if ( longtext.length < 4 || shorttext.length * 2 < longtext.length ) {
+			return null; // Pointless.
+		}
+		dmp = this; // 'this' becomes 'window' in a closure.
+
+		/**
+		 * Does a substring of shorttext exist within longtext such that the substring
+		 * is at least half the length of longtext?
+		 * Closure, but does not reference any external variables.
+		 * @param {string} longtext Longer string.
+		 * @param {string} shorttext Shorter string.
+		 * @param {number} i Start index of quarter length substring within longtext.
+		 * @return {Array.<string>} Five element Array, containing the prefix of
+		 *     longtext, the suffix of longtext, the prefix of shorttext, the suffix
+		 *     of shorttext and the common middle.  Or null if there was no match.
+		 * @private
+		 */
+		function diffHalfMatchI( longtext, shorttext, i ) {
+			var seed, j, bestCommon, prefixLength, suffixLength,
+				bestLongtextA, bestLongtextB, bestShorttextA, bestShorttextB;
+
+			// Start with a 1/4 length substring at position i as a seed.
+			seed = longtext.substring( i, i + Math.floor( longtext.length / 4 ) );
+			j = -1;
+			bestCommon = "";
+			while ( ( j = shorttext.indexOf( seed, j + 1 ) ) !== -1 ) {
+				prefixLength = dmp.diffCommonPrefix( longtext.substring( i ),
+					shorttext.substring( j ) );
+				suffixLength = dmp.diffCommonSuffix( longtext.substring( 0, i ),
+					shorttext.substring( 0, j ) );
+				if ( bestCommon.length < suffixLength + prefixLength ) {
+					bestCommon = shorttext.substring( j - suffixLength, j ) +
+						shorttext.substring( j, j + prefixLength );
+					bestLongtextA = longtext.substring( 0, i - suffixLength );
+					bestLongtextB = longtext.substring( i + prefixLength );
+					bestShorttextA = shorttext.substring( 0, j - suffixLength );
+					bestShorttextB = shorttext.substring( j + prefixLength );
+				}
+			}
+			if ( bestCommon.length * 2 >= longtext.length ) {
+				return [ bestLongtextA, bestLongtextB,
+					bestShorttextA, bestShorttextB, bestCommon
+				];
+			} else {
+				return null;
+			}
+		}
+
+		// First check if the second quarter is the seed for a half-match.
+		hm1 = diffHalfMatchI( longtext, shorttext,
+			Math.ceil( longtext.length / 4 ) );
+
+		// Check again based on the third quarter.
+		hm2 = diffHalfMatchI( longtext, shorttext,
+			Math.ceil( longtext.length / 2 ) );
+		if ( !hm1 && !hm2 ) {
+			return null;
+		} else if ( !hm2 ) {
+			hm = hm1;
+		} else if ( !hm1 ) {
+			hm = hm2;
+		} else {
+
+			// Both matched.  Select the longest.
+			hm = hm1[ 4 ].length > hm2[ 4 ].length ? hm1 : hm2;
+		}
+
+		// A half-match was found, sort out the return data.
+		text1A, text1B, text2A, text2B;
+		if ( text1.length > text2.length ) {
+			text1A = hm[ 0 ];
+			text1B = hm[ 1 ];
+			text2A = hm[ 2 ];
+			text2B = hm[ 3 ];
+		} else {
+			text2A = hm[ 0 ];
+			text2B = hm[ 1 ];
+			text1A = hm[ 2 ];
+			text1B = hm[ 3 ];
+		}
+		midCommon = hm[ 4 ];
+		return [ text1A, text1B, text2A, text2B, midCommon ];
+	};
+
+	/**
+	 * Do a quick line-level diff on both strings, then rediff the parts for
+	 * greater accuracy.
+	 * This speedup can produce non-minimal diffs.
+	 * @param {string} text1 Old string to be diffed.
+	 * @param {string} text2 New string to be diffed.
+	 * @param {number} deadline Time when the diff should be complete by.
+	 * @return {!Array.<!DiffMatchPatch.Diff>} Array of diff tuples.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffLineMode = function( text1, text2, deadline ) {
+		var a, diffs, linearray, pointer, countInsert,
+			countDelete, textInsert, textDelete, j;
+
+		// Scan the text on a line-by-line basis first.
+		a = this.diffLinesToChars( text1, text2 );
+		text1 = a.chars1;
+		text2 = a.chars2;
+		linearray = a.lineArray;
+
+		diffs = this.DiffMain( text1, text2, false, deadline );
+
+		// Convert the diff back to original text.
+		this.diffCharsToLines( diffs, linearray );
+
+		// Eliminate freak matches (e.g. blank lines)
+		this.diffCleanupSemantic( diffs );
+
+		// Rediff any replacement blocks, this time character-by-character.
+		// Add a dummy entry at the end.
+		diffs.push( [ DIFF_EQUAL, "" ] );
+		pointer = 0;
+		countDelete = 0;
+		countInsert = 0;
+		textDelete = "";
+		textInsert = "";
+		while ( pointer < diffs.length ) {
+			switch ( diffs[ pointer ][ 0 ] ) {
+			case DIFF_INSERT:
+				countInsert++;
+				textInsert += diffs[ pointer ][ 1 ];
+				break;
+			case DIFF_DELETE:
+				countDelete++;
+				textDelete += diffs[ pointer ][ 1 ];
+				break;
+			case DIFF_EQUAL:
+
+				// Upon reaching an equality, check for prior redundancies.
+				if ( countDelete >= 1 && countInsert >= 1 ) {
+
+					// Delete the offending records and add the merged ones.
+					diffs.splice( pointer - countDelete - countInsert,
+						countDelete + countInsert );
+					pointer = pointer - countDelete - countInsert;
+					a = this.DiffMain( textDelete, textInsert, false, deadline );
+					for ( j = a.length - 1; j >= 0; j-- ) {
+						diffs.splice( pointer, 0, a[ j ] );
+					}
+					pointer = pointer + a.length;
+				}
+				countInsert = 0;
+				countDelete = 0;
+				textDelete = "";
+				textInsert = "";
+				break;
+			}
+			pointer++;
+		}
+		diffs.pop(); // Remove the dummy entry at the end.
+
+		return diffs;
+	};
+
+	/**
+	 * Find the 'middle snake' of a diff, split the problem in two
+	 * and return the recursively constructed diff.
+	 * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+	 * @param {string} text1 Old string to be diffed.
+	 * @param {string} text2 New string to be diffed.
+	 * @param {number} deadline Time at which to bail if not yet complete.
+	 * @return {!Array.<!DiffMatchPatch.Diff>} Array of diff tuples.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffBisect = function( text1, text2, deadline ) {
+		var text1Length, text2Length, maxD, vOffset, vLength,
+			v1, v2, x, delta, front, k1start, k1end, k2start,
+			k2end, k2Offset, k1Offset, x1, x2, y1, y2, d, k1, k2;
+
+		// Cache the text lengths to prevent multiple calls.
+		text1Length = text1.length;
+		text2Length = text2.length;
+		maxD = Math.ceil( ( text1Length + text2Length ) / 2 );
+		vOffset = maxD;
+		vLength = 2 * maxD;
+		v1 = new Array( vLength );
+		v2 = new Array( vLength );
+
+		// Setting all elements to -1 is faster in Chrome & Firefox than mixing
+		// integers and undefined.
+		for ( x = 0; x < vLength; x++ ) {
+			v1[ x ] = -1;
+			v2[ x ] = -1;
+		}
+		v1[ vOffset + 1 ] = 0;
+		v2[ vOffset + 1 ] = 0;
+		delta = text1Length - text2Length;
+
+		// If the total number of characters is odd, then the front path will collide
+		// with the reverse path.
+		front = ( delta % 2 !== 0 );
+
+		// Offsets for start and end of k loop.
+		// Prevents mapping of space beyond the grid.
+		k1start = 0;
+		k1end = 0;
+		k2start = 0;
+		k2end = 0;
+		for ( d = 0; d < maxD; d++ ) {
+
+			// Bail out if deadline is reached.
+			if ( ( new Date() ).getTime() > deadline ) {
+				break;
+			}
+
+			// Walk the front path one step.
+			for ( k1 = -d + k1start; k1 <= d - k1end; k1 += 2 ) {
+				k1Offset = vOffset + k1;
+				if ( k1 === -d || ( k1 !== d && v1[ k1Offset - 1 ] < v1[ k1Offset + 1 ] ) ) {
+					x1 = v1[ k1Offset + 1 ];
+				} else {
+					x1 = v1[ k1Offset - 1 ] + 1;
+				}
+				y1 = x1 - k1;
+				while ( x1 < text1Length && y1 < text2Length &&
+					text1.charAt( x1 ) === text2.charAt( y1 ) ) {
+					x1++;
+					y1++;
+				}
+				v1[ k1Offset ] = x1;
+				if ( x1 > text1Length ) {
+
+					// Ran off the right of the graph.
+					k1end += 2;
+				} else if ( y1 > text2Length ) {
+
+					// Ran off the bottom of the graph.
+					k1start += 2;
+				} else if ( front ) {
+					k2Offset = vOffset + delta - k1;
+					if ( k2Offset >= 0 && k2Offset < vLength && v2[ k2Offset ] !== -1 ) {
+
+						// Mirror x2 onto top-left coordinate system.
+						x2 = text1Length - v2[ k2Offset ];
+						if ( x1 >= x2 ) {
+
+							// Overlap detected.
+							return this.diffBisectSplit( text1, text2, x1, y1, deadline );
+						}
+					}
+				}
+			}
+
+			// Walk the reverse path one step.
+			for ( k2 = -d + k2start; k2 <= d - k2end; k2 += 2 ) {
+				k2Offset = vOffset + k2;
+				if ( k2 === -d || ( k2 !== d && v2[ k2Offset - 1 ] < v2[ k2Offset + 1 ] ) ) {
+					x2 = v2[ k2Offset + 1 ];
+				} else {
+					x2 = v2[ k2Offset - 1 ] + 1;
+				}
+				y2 = x2 - k2;
+				while ( x2 < text1Length && y2 < text2Length &&
+					text1.charAt( text1Length - x2 - 1 ) ===
+					text2.charAt( text2Length - y2 - 1 ) ) {
+					x2++;
+					y2++;
+				}
+				v2[ k2Offset ] = x2;
+				if ( x2 > text1Length ) {
+
+					// Ran off the left of the graph.
+					k2end += 2;
+				} else if ( y2 > text2Length ) {
+
+					// Ran off the top of the graph.
+					k2start += 2;
+				} else if ( !front ) {
+					k1Offset = vOffset + delta - k2;
+					if ( k1Offset >= 0 && k1Offset < vLength && v1[ k1Offset ] !== -1 ) {
+						x1 = v1[ k1Offset ];
+						y1 = vOffset + x1 - k1Offset;
+
+						// Mirror x2 onto top-left coordinate system.
+						x2 = text1Length - x2;
+						if ( x1 >= x2 ) {
+
+							// Overlap detected.
+							return this.diffBisectSplit( text1, text2, x1, y1, deadline );
+						}
+					}
+				}
+			}
+		}
+
+		// Diff took too long and hit the deadline or
+		// number of diffs equals number of characters, no commonality at all.
+		return [
+			[ DIFF_DELETE, text1 ],
+			[ DIFF_INSERT, text2 ]
+		];
+	};
+
+	/**
+	 * Given the location of the 'middle snake', split the diff in two parts
+	 * and recurse.
+	 * @param {string} text1 Old string to be diffed.
+	 * @param {string} text2 New string to be diffed.
+	 * @param {number} x Index of split point in text1.
+	 * @param {number} y Index of split point in text2.
+	 * @param {number} deadline Time at which to bail if not yet complete.
+	 * @return {!Array.<!DiffMatchPatch.Diff>} Array of diff tuples.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffBisectSplit = function( text1, text2, x, y, deadline ) {
+		var text1a, text1b, text2a, text2b, diffs, diffsb;
+		text1a = text1.substring( 0, x );
+		text2a = text2.substring( 0, y );
+		text1b = text1.substring( x );
+		text2b = text2.substring( y );
+
+		// Compute both diffs serially.
+		diffs = this.DiffMain( text1a, text2a, false, deadline );
+		diffsb = this.DiffMain( text1b, text2b, false, deadline );
+
+		return diffs.concat( diffsb );
+	};
+
+	/**
+	 * Reduce the number of edits by eliminating semantically trivial equalities.
+	 * @param {!Array.<!DiffMatchPatch.Diff>} diffs Array of diff tuples.
+	 */
+	DiffMatchPatch.prototype.diffCleanupSemantic = function( diffs ) {
+		var changes, equalities, equalitiesLength, lastequality,
+			pointer, lengthInsertions2, lengthDeletions2, lengthInsertions1,
+			lengthDeletions1, deletion, insertion, overlapLength1, overlapLength2;
+		changes = false;
+		equalities = []; // Stack of indices where equalities are found.
+		equalitiesLength = 0; // Keeping our own length var is faster in JS.
+		/** @type {?string} */
+		lastequality = null;
+
+		// Always equal to diffs[equalities[equalitiesLength - 1]][1]
+		pointer = 0; // Index of current position.
+
+		// Number of characters that changed prior to the equality.
+		lengthInsertions1 = 0;
+		lengthDeletions1 = 0;
+
+		// Number of characters that changed after the equality.
+		lengthInsertions2 = 0;
+		lengthDeletions2 = 0;
+		while ( pointer < diffs.length ) {
+			if ( diffs[ pointer ][ 0 ] === DIFF_EQUAL ) { // Equality found.
+				equalities[ equalitiesLength++ ] = pointer;
+				lengthInsertions1 = lengthInsertions2;
+				lengthDeletions1 = lengthDeletions2;
+				lengthInsertions2 = 0;
+				lengthDeletions2 = 0;
+				lastequality = diffs[ pointer ][ 1 ];
+			} else { // An insertion or deletion.
+				if ( diffs[ pointer ][ 0 ] === DIFF_INSERT ) {
+					lengthInsertions2 += diffs[ pointer ][ 1 ].length;
+				} else {
+					lengthDeletions2 += diffs[ pointer ][ 1 ].length;
+				}
+
+				// Eliminate an equality that is smaller or equal to the edits on both
+				// sides of it.
+				if ( lastequality && ( lastequality.length <=
+						Math.max( lengthInsertions1, lengthDeletions1 ) ) &&
+						( lastequality.length <= Math.max( lengthInsertions2,
+							lengthDeletions2 ) ) ) {
+
+					// Duplicate record.
+					diffs.splice(
+						equalities[ equalitiesLength - 1 ],
+						0,
+						[ DIFF_DELETE, lastequality ]
+					);
+
+					// Change second copy to insert.
+					diffs[ equalities[ equalitiesLength - 1 ] + 1 ][ 0 ] = DIFF_INSERT;
+
+					// Throw away the equality we just deleted.
+					equalitiesLength--;
+
+					// Throw away the previous equality (it needs to be reevaluated).
+					equalitiesLength--;
+					pointer = equalitiesLength > 0 ? equalities[ equalitiesLength - 1 ] : -1;
+
+					// Reset the counters.
+					lengthInsertions1 = 0;
+					lengthDeletions1 = 0;
+					lengthInsertions2 = 0;
+					lengthDeletions2 = 0;
+					lastequality = null;
+					changes = true;
+				}
+			}
+			pointer++;
+		}
+
+		// Normalize the diff.
+		if ( changes ) {
+			this.diffCleanupMerge( diffs );
+		}
+
+		// Find any overlaps between deletions and insertions.
+		// e.g: <del>abcxxx</del><ins>xxxdef</ins>
+		//   -> <del>abc</del>xxx<ins>def</ins>
+		// e.g: <del>xxxabc</del><ins>defxxx</ins>
+		//   -> <ins>def</ins>xxx<del>abc</del>
+		// Only extract an overlap if it is as big as the edit ahead or behind it.
+		pointer = 1;
+		while ( pointer < diffs.length ) {
+			if ( diffs[ pointer - 1 ][ 0 ] === DIFF_DELETE &&
+					diffs[ pointer ][ 0 ] === DIFF_INSERT ) {
+				deletion = diffs[ pointer - 1 ][ 1 ];
+				insertion = diffs[ pointer ][ 1 ];
+				overlapLength1 = this.diffCommonOverlap( deletion, insertion );
+				overlapLength2 = this.diffCommonOverlap( insertion, deletion );
+				if ( overlapLength1 >= overlapLength2 ) {
+					if ( overlapLength1 >= deletion.length / 2 ||
+							overlapLength1 >= insertion.length / 2 ) {
+
+						// Overlap found.  Insert an equality and trim the surrounding edits.
+						diffs.splice(
+							pointer,
+							0,
+							[ DIFF_EQUAL, insertion.substring( 0, overlapLength1 ) ]
+						);
+						diffs[ pointer - 1 ][ 1 ] =
+							deletion.substring( 0, deletion.length - overlapLength1 );
+						diffs[ pointer + 1 ][ 1 ] = insertion.substring( overlapLength1 );
+						pointer++;
+					}
+				} else {
+					if ( overlapLength2 >= deletion.length / 2 ||
+							overlapLength2 >= insertion.length / 2 ) {
+
+						// Reverse overlap found.
+						// Insert an equality and swap and trim the surrounding edits.
+						diffs.splice(
+							pointer,
+							0,
+							[ DIFF_EQUAL, deletion.substring( 0, overlapLength2 ) ]
+						);
+
+						diffs[ pointer - 1 ][ 0 ] = DIFF_INSERT;
+						diffs[ pointer - 1 ][ 1 ] =
+							insertion.substring( 0, insertion.length - overlapLength2 );
+						diffs[ pointer + 1 ][ 0 ] = DIFF_DELETE;
+						diffs[ pointer + 1 ][ 1 ] =
+							deletion.substring( overlapLength2 );
+						pointer++;
+					}
+				}
+				pointer++;
+			}
+			pointer++;
+		}
+	};
+
+	/**
+	 * Determine if the suffix of one string is the prefix of another.
+	 * @param {string} text1 First string.
+	 * @param {string} text2 Second string.
+	 * @return {number} The number of characters common to the end of the first
+	 *     string and the start of the second string.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffCommonOverlap = function( text1, text2 ) {
+		var text1Length, text2Length, textLength,
+			best, length, pattern, found;
+
+		// Cache the text lengths to prevent multiple calls.
+		text1Length = text1.length;
+		text2Length = text2.length;
+
+		// Eliminate the null case.
+		if ( text1Length === 0 || text2Length === 0 ) {
+			return 0;
+		}
+
+		// Truncate the longer string.
+		if ( text1Length > text2Length ) {
+			text1 = text1.substring( text1Length - text2Length );
+		} else if ( text1Length < text2Length ) {
+			text2 = text2.substring( 0, text1Length );
+		}
+		textLength = Math.min( text1Length, text2Length );
+
+		// Quick check for the worst case.
+		if ( text1 === text2 ) {
+			return textLength;
+		}
+
+		// Start by looking for a single character match
+		// and increase length until no match is found.
+		// Performance analysis: https://neil.fraser.name/news/2010/11/04/
+		best = 0;
+		length = 1;
+		while ( true ) {
+			pattern = text1.substring( textLength - length );
+			found = text2.indexOf( pattern );
+			if ( found === -1 ) {
+				return best;
+			}
+			length += found;
+			if ( found === 0 || text1.substring( textLength - length ) ===
+					text2.substring( 0, length ) ) {
+				best = length;
+				length++;
+			}
+		}
+	};
+
+	/**
+	 * Split two texts into an array of strings.  Reduce the texts to a string of
+	 * hashes where each Unicode character represents one line.
+	 * @param {string} text1 First string.
+	 * @param {string} text2 Second string.
+	 * @return {{chars1: string, chars2: string, lineArray: !Array.<string>}}
+	 *     An object containing the encoded text1, the encoded text2 and
+	 *     the array of unique strings.
+	 *     The zeroth element of the array of unique strings is intentionally blank.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffLinesToChars = function( text1, text2 ) {
+		var lineArray, lineHash, chars1, chars2;
+		lineArray = []; // E.g. lineArray[4] === 'Hello\n'
+		lineHash = {};  // E.g. lineHash['Hello\n'] === 4
+
+		// '\x00' is a valid character, but various debuggers don't like it.
+		// So we'll insert a junk entry to avoid generating a null character.
+		lineArray[ 0 ] = "";
+
+		/**
+		 * Split a text into an array of strings.  Reduce the texts to a string of
+		 * hashes where each Unicode character represents one line.
+		 * Modifies linearray and linehash through being a closure.
+		 * @param {string} text String to encode.
+		 * @return {string} Encoded string.
+		 * @private
+		 */
+		function diffLinesToCharsMunge( text ) {
+			var chars, lineStart, lineEnd, lineArrayLength, line;
+			chars = "";
+
+			// Walk the text, pulling out a substring for each line.
+			// text.split('\n') would would temporarily double our memory footprint.
+			// Modifying text would create many large strings to garbage collect.
+			lineStart = 0;
+			lineEnd = -1;
+
+			// Keeping our own length variable is faster than looking it up.
+			lineArrayLength = lineArray.length;
+			while ( lineEnd < text.length - 1 ) {
+				lineEnd = text.indexOf( "\n", lineStart );
+				if ( lineEnd === -1 ) {
+					lineEnd = text.length - 1;
+				}
+				line = text.substring( lineStart, lineEnd + 1 );
+				lineStart = lineEnd + 1;
+
+				if ( lineHash.hasOwnProperty ? lineHash.hasOwnProperty( line ) :
+							( lineHash[ line ] !== undefined ) ) {
+					chars += String.fromCharCode( lineHash[ line ] );
+				} else {
+					chars += String.fromCharCode( lineArrayLength );
+					lineHash[ line ] = lineArrayLength;
+					lineArray[ lineArrayLength++ ] = line;
+				}
+			}
+			return chars;
+		}
+
+		chars1 = diffLinesToCharsMunge( text1 );
+		chars2 = diffLinesToCharsMunge( text2 );
+		return {
+			chars1: chars1,
+			chars2: chars2,
+			lineArray: lineArray
+		};
+	};
+
+	/**
+	 * Rehydrate the text in a diff from a string of line hashes to real lines of
+	 * text.
+	 * @param {!Array.<!DiffMatchPatch.Diff>} diffs Array of diff tuples.
+	 * @param {!Array.<string>} lineArray Array of unique strings.
+	 * @private
+	 */
+	DiffMatchPatch.prototype.diffCharsToLines = function( diffs, lineArray ) {
+		var x, chars, text, y;
+		for ( x = 0; x < diffs.length; x++ ) {
+			chars = diffs[ x ][ 1 ];
+			text = [];
+			for ( y = 0; y < chars.length; y++ ) {
+				text[ y ] = lineArray[ chars.charCodeAt( y ) ];
+			}
+			diffs[ x ][ 1 ] = text.join( "" );
+		}
+	};
+
+	/**
+	 * Reorder and merge like edit sections.  Merge equalities.
+	 * Any edit section can move as long as it doesn't cross an equality.
+	 * @param {!Array.<!DiffMatchPatch.Diff>} diffs Array of diff tuples.
+	 */
+	DiffMatchPatch.prototype.diffCleanupMerge = function( diffs ) {
+		var pointer, countDelete, countInsert, textInsert, textDelete,
+			commonlength, changes, diffPointer, position;
+		diffs.push( [ DIFF_EQUAL, "" ] ); // Add a dummy entry at the end.
+		pointer = 0;
+		countDelete = 0;
+		countInsert = 0;
+		textDelete = "";
+		textInsert = "";
+		commonlength;
+		while ( pointer < diffs.length ) {
+			switch ( diffs[ pointer ][ 0 ] ) {
+			case DIFF_INSERT:
+				countInsert++;
+				textInsert += diffs[ pointer ][ 1 ];
+				pointer++;
+				break;
+			case DIFF_DELETE:
+				countDelete++;
+				textDelete += diffs[ pointer ][ 1 ];
+				pointer++;
+				break;
+			case DIFF_EQUAL:
+
+				// Upon reaching an equality, check for prior redundancies.
+				if ( countDelete + countInsert > 1 ) {
+					if ( countDelete !== 0 && countInsert !== 0 ) {
+
+						// Factor out any common prefixes.
+						commonlength = this.diffCommonPrefix( textInsert, textDelete );
+						if ( commonlength !== 0 ) {
+							if ( ( pointer - countDelete - countInsert ) > 0 &&
+									diffs[ pointer - countDelete - countInsert - 1 ][ 0 ] ===
+									DIFF_EQUAL ) {
+								diffs[ pointer - countDelete - countInsert - 1 ][ 1 ] +=
+									textInsert.substring( 0, commonlength );
+							} else {
+								diffs.splice( 0, 0, [ DIFF_EQUAL,
+									textInsert.substring( 0, commonlength )
+								] );
+								pointer++;
+							}
+							textInsert = textInsert.substring( commonlength );
+							textDelete = textDelete.substring( commonlength );
+						}
+
+						// Factor out any common suffixies.
+						commonlength = this.diffCommonSuffix( textInsert, textDelete );
+						if ( commonlength !== 0 ) {
+							diffs[ pointer ][ 1 ] = textInsert.substring( textInsert.length -
+									commonlength ) + diffs[ pointer ][ 1 ];
+							textInsert = textInsert.substring( 0, textInsert.length -
+								commonlength );
+							textDelete = textDelete.substring( 0, textDelete.length -
+								commonlength );
+						}
+					}
+
+					// Delete the offending records and add the merged ones.
+					if ( countDelete === 0 ) {
+						diffs.splice( pointer - countInsert,
+							countDelete + countInsert, [ DIFF_INSERT, textInsert ] );
+					} else if ( countInsert === 0 ) {
+						diffs.splice( pointer - countDelete,
+							countDelete + countInsert, [ DIFF_DELETE, textDelete ] );
+					} else {
+						diffs.splice(
+							pointer - countDelete - countInsert,
+							countDelete + countInsert,
+							[ DIFF_DELETE, textDelete ], [ DIFF_INSERT, textInsert ]
+						);
+					}
+					pointer = pointer - countDelete - countInsert +
+						( countDelete ? 1 : 0 ) + ( countInsert ? 1 : 0 ) + 1;
+				} else if ( pointer !== 0 && diffs[ pointer - 1 ][ 0 ] === DIFF_EQUAL ) {
+
+					// Merge this equality with the previous one.
+					diffs[ pointer - 1 ][ 1 ] += diffs[ pointer ][ 1 ];
+					diffs.splice( pointer, 1 );
+				} else {
+					pointer++;
+				}
+				countInsert = 0;
+				countDelete = 0;
+				textDelete = "";
+				textInsert = "";
+				break;
+			}
+		}
+		if ( diffs[ diffs.length - 1 ][ 1 ] === "" ) {
+			diffs.pop(); // Remove the dummy entry at the end.
+		}
+
+		// Second pass: look for single edits surrounded on both sides by equalities
+		// which can be shifted sideways to eliminate an equality.
+		// e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
+		changes = false;
+		pointer = 1;
+
+		// Intentionally ignore the first and last element (don't need checking).
+		while ( pointer < diffs.length - 1 ) {
+			if ( diffs[ pointer - 1 ][ 0 ] === DIFF_EQUAL &&
+					diffs[ pointer + 1 ][ 0 ] === DIFF_EQUAL ) {
+
+				diffPointer = diffs[ pointer ][ 1 ];
+				position = diffPointer.substring(
+					diffPointer.length - diffs[ pointer - 1 ][ 1 ].length
+				);
+
+				// This is a single edit surrounded by equalities.
+				if ( position === diffs[ pointer - 1 ][ 1 ] ) {
+
+					// Shift the edit over the previous equality.
+					diffs[ pointer ][ 1 ] = diffs[ pointer - 1 ][ 1 ] +
+						diffs[ pointer ][ 1 ].substring( 0, diffs[ pointer ][ 1 ].length -
+							diffs[ pointer - 1 ][ 1 ].length );
+					diffs[ pointer + 1 ][ 1 ] =
+						diffs[ pointer - 1 ][ 1 ] + diffs[ pointer + 1 ][ 1 ];
+					diffs.splice( pointer - 1, 1 );
+					changes = true;
+				} else if ( diffPointer.substring( 0, diffs[ pointer + 1 ][ 1 ].length ) ===
+						diffs[ pointer + 1 ][ 1 ] ) {
+
+					// Shift the edit over the next equality.
+					diffs[ pointer - 1 ][ 1 ] += diffs[ pointer + 1 ][ 1 ];
+					diffs[ pointer ][ 1 ] =
+						diffs[ pointer ][ 1 ].substring( diffs[ pointer + 1 ][ 1 ].length ) +
+						diffs[ pointer + 1 ][ 1 ];
+					diffs.splice( pointer + 1, 1 );
+					changes = true;
+				}
+			}
+			pointer++;
+		}
+
+		// If shifts were made, the diff needs reordering and another shift sweep.
+		if ( changes ) {
+			this.diffCleanupMerge( diffs );
+		}
+	};
+
+	return function( o, n ) {
+		var diff, output, text;
+		diff = new DiffMatchPatch();
+		output = diff.DiffMain( o, n );
+		diff.diffCleanupEfficiency( output );
+		text = diff.diffPrettyHtml( output );
+
+		return text;
+	};
+}() );
+
+}() );
\ No newline at end of file
diff --git a/test/qunit/test-libs/sinon-1.17.3.js b/test/qunit/test-libs/sinon-1.17.3.js
new file mode 100644
index 0000000..a8ddd6f
--- /dev/null
+++ b/test/qunit/test-libs/sinon-1.17.3.js
@@ -0,0 +1,6437 @@
+/**
+ * Sinon.JS 1.17.3, 2016/01/27
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @author Contributors: https://github.com/cjohansen/Sinon.JS/blob/master/AUTHORS
+ *
+ * (The BSD License)
+ * 
+ * Copyright (c) 2010-2014, Christian Johansen, christian at cjohansen.no
+ * All rights reserved.
+ * 
+ * Redistribution and use in source and binary forms, with or without modification,
+ * are permitted provided that the following conditions are met:
+ * 
+ *     * Redistributions of source code must retain the above copyright notice,
+ *       this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above copyright notice,
+ *       this list of conditions and the following disclaimer in the documentation
+ *       and/or other materials provided with the distribution.
+ *     * Neither the name of Christian Johansen nor the names of his contributors
+ *       may be used to endorse or promote products derived from this software
+ *       without specific prior written permission.
+ * 
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+(function (root, factory) {
+  'use strict';
+  if (typeof define === 'function' && define.amd) {
+    define('sinon', [], function () {
+      return (root.sinon = factory());
+    });
+  } else if (typeof exports === 'object') {
+    module.exports = factory();
+  } else {
+    root.sinon = factory();
+  }
+}(this, function () {
+  'use strict';
+  var samsam, formatio, lolex;
+  (function () {
+                function define(mod, deps, fn) {
+                  if (mod == "samsam") {
+                    samsam = deps();
+                  } else if (typeof deps === "function" && mod.length === 0) {
+                    lolex = deps();
+                  } else if (typeof fn === "function") {
+                    formatio = fn(samsam);
+                  }
+                }
+    define.amd = {};
+((typeof define === "function" && define.amd && function (m) { define("samsam", m); }) ||
+ (typeof module === "object" &&
+      function (m) { module.exports = m(); }) || // Node
+ function (m) { this.samsam = m(); } // Browser globals
+)(function () {
+    var o = Object.prototype;
+    var div = typeof document !== "undefined" && document.createElement("div");
+
+    function isNaN(value) {
+        // Unlike global isNaN, this avoids type coercion
+        // typeof check avoids IE host object issues, hat tip to
+        // lodash
+        var val = value; // JsLint thinks value !== value is "weird"
+        return typeof value === "number" && value !== val;
+    }
+
+    function getClass(value) {
+        // Returns the internal [[Class]] by calling Object.prototype.toString
+        // with the provided value as this. Return value is a string, naming the
+        // internal class, e.g. "Array"
+        return o.toString.call(value).split(/[ \]]/)[1];
+    }
+
+    /**
+     * @name samsam.isArguments
+     * @param Object object
+     *
+     * Returns ``true`` if ``object`` is an ``arguments`` object,
+     * ``false`` otherwise.
+     */
+    function isArguments(object) {
+        if (getClass(object) === 'Arguments') { return true; }
+        if (typeof object !== "object" || typeof object.length !== "number" ||
+                getClass(object) === "Array") {
+            return false;
+        }
+        if (typeof object.callee == "function") { return true; }
+        try {
+            object[object.length] = 6;
+            delete object[object.length];
+        } catch (e) {
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * @name samsam.isElement
+     * @param Object object
+     *
+     * Returns ``true`` if ``object`` is a DOM element node. Unlike
+     * Underscore.js/lodash, this function will return ``false`` if ``object``
+     * is an *element-like* object, i.e. a regular object with a ``nodeType``
+     * property that holds the value ``1``.
+     */
+    function isElement(object) {
+        if (!object || object.nodeType !== 1 || !div) { return false; }
+        try {
+            object.appendChild(div);
+            object.removeChild(div);
+        } catch (e) {
+            return false;
+        }
+        return true;
+    }
+
+    /**
+     * @name samsam.keys
+     * @param Object object
+     *
+     * Return an array of own property names.
+     */
+    function keys(object) {
+        var ks = [], prop;
+        for (prop in object) {
+            if (o.hasOwnProperty.call(object, prop)) { ks.push(prop); }
+        }
+        return ks;
+    }
+
+    /**
+     * @name samsam.isDate
+     * @param Object value
+     *
+     * Returns true if the object is a ``Date``, or *date-like*. Duck typing
+     * of date objects work by checking that the object has a ``getTime``
+     * function whose return value equals the return value from the object's
+     * ``valueOf``.
+     */
+    function isDate(value) {
+        return typeof value.getTime == "function" &&
+            value.getTime() == value.valueOf();
+    }
+
+    /**
+     * @name samsam.isNegZero
+     * @param Object value
+     *
+     * Returns ``true`` if ``value`` is ``-0``.
+     */
+    function isNegZero(value) {
+        return value === 0 && 1 / value === -Infinity;
+    }
+
+    /**
+     * @name samsam.equal
+     * @param Object obj1
+     * @param Object obj2
+     *
+     * Returns ``true`` if two objects are strictly equal. Compared to
+     * ``===`` there are two exceptions:
+     *
+     *   - NaN is considered equal to NaN
+     *   - -0 and +0 are not considered equal
+     */
+    function identical(obj1, obj2) {
+        if (obj1 === obj2 || (isNaN(obj1) && isNaN(obj2))) {
+            return obj1 !== 0 || isNegZero(obj1) === isNegZero(obj2);
+        }
+    }
+
+
+    /**
+     * @name samsam.deepEqual
+     * @param Object obj1
+     * @param Object obj2
+     *
+     * Deep equal comparison. Two values are "deep equal" if:
+     *
+     *   - They are equal, according to samsam.identical
+     *   - They are both date objects representing the same time
+     *   - They are both arrays containing elements that are all deepEqual
+     *   - They are objects with the same set of properties, and each property
+     *     in ``obj1`` is deepEqual to the corresponding property in ``obj2``
+     *
+     * Supports cyclic objects.
+     */
+    function deepEqualCyclic(obj1, obj2) {
+
+        // used for cyclic comparison
+        // contain already visited objects
+        var objects1 = [],
+            objects2 = [],
+        // contain pathes (position in the object structure)
+        // of the already visited objects
+        // indexes same as in objects arrays
+            paths1 = [],
+            paths2 = [],
+        // contains combinations of already compared objects
+        // in the manner: { "$1['ref']$2['ref']": true }
+            compared = {};
+
+        /**
+         * used to check, if the value of a property is an object
+         * (cyclic logic is only needed for objects)
+         * only needed for cyclic logic
+         */
+        function isObject(value) {
+
+            if (typeof value === 'object' && value !== null &&
+                    !(value instanceof Boolean) &&
+                    !(value instanceof Date)    &&
+                    !(value instanceof Number)  &&
+                    !(value instanceof RegExp)  &&
+                    !(value instanceof String)) {
+
+                return true;
+            }
+
+            return false;
+        }
+
+        /**
+         * returns the index of the given object in the
+         * given objects array, -1 if not contained
+         * only needed for cyclic logic
+         */
+        function getIndex(objects, obj) {
+
+            var i;
+            for (i = 0; i < objects.length; i++) {
+                if (objects[i] === obj) {
+                    return i;
+                }
+            }
+
+            return -1;
+        }
+
+        // does the recursion for the deep equal check
+        return (function deepEqual(obj1, obj2, path1, path2) {
+            var type1 = typeof obj1;
+            var type2 = typeof obj2;
+
+            // == null also matches undefined
+            if (obj1 === obj2 ||
+                    isNaN(obj1) || isNaN(obj2) ||
+                    obj1 == null || obj2 == null ||
+                    type1 !== "object" || type2 !== "object") {
+
+                return identical(obj1, obj2);
+            }
+
+            // Elements are only equal if identical(expected, actual)
+            if (isElement(obj1) || isElement(obj2)) { return false; }
+
+            var isDate1 = isDate(obj1), isDate2 = isDate(obj2);
+            if (isDate1 || isDate2) {
+                if (!isDate1 || !isDate2 || obj1.getTime() !== obj2.getTime()) {
+                    return false;
+                }
+            }
+
+            if (obj1 instanceof RegExp && obj2 instanceof RegExp) {
+                if (obj1.toString() !== obj2.toString()) { return false; }
+            }
+
+            var class1 = getClass(obj1);
+            var class2 = getClass(obj2);
+            var keys1 = keys(obj1);
+            var keys2 = keys(obj2);
+
+            if (isArguments(obj1) || isArguments(obj2)) {
+                if (obj1.length !== obj2.length) { return false; }
+            } else {
+                if (type1 !== type2 || class1 !== class2 ||
+                        keys1.length !== keys2.length) {
+                    return false;
+                }
+            }
+
+            var key, i, l,
+                // following vars are used for the cyclic logic
+                value1, value2,
+                isObject1, isObject2,
+                index1, index2,
+                newPath1, newPath2;
+
+            for (i = 0, l = keys1.length; i < l; i++) {
+                key = keys1[i];
+                if (!o.hasOwnProperty.call(obj2, key)) {
+                    return false;
+                }
+
+                // Start of the cyclic logic
+
+                value1 = obj1[key];
+                value2 = obj2[key];
+
+                isObject1 = isObject(value1);
+                isObject2 = isObject(value2);
+
+                // determine, if the objects were already visited
+                // (it's faster to check for isObject first, than to
+                // get -1 from getIndex for non objects)
+                index1 = isObject1 ? getIndex(objects1, value1) : -1;
+                index2 = isObject2 ? getIndex(objects2, value2) : -1;
+
+                // determine the new pathes of the objects
+                // - for non cyclic objects the current path will be extended
+                //   by current property name
+                // - for cyclic objects the stored path is taken
+                newPath1 = index1 !== -1
+                    ? paths1[index1]
+                    : path1 + '[' + JSON.stringify(key) + ']';
+                newPath2 = index2 !== -1
+                    ? paths2[index2]
+                    : path2 + '[' + JSON.stringify(key) + ']';
+
+                // stop recursion if current objects are already compared
+                if (compared[newPath1 + newPath2]) {
+                    return true;
+                }
+
+                // remember the current objects and their pathes
+                if (index1 === -1 && isObject1) {
+                    objects1.push(value1);
+                    paths1.push(newPath1);
+                }
+                if (index2 === -1 && isObject2) {
+                    objects2.push(value2);
+                    paths2.push(newPath2);
+                }
+
+                // remember that the current objects are already compared
+                if (isObject1 && isObject2) {
+                    compared[newPath1 + newPath2] = true;
+                }
+
+                // End of cyclic logic
+
+                // neither value1 nor value2 is a cycle
+                // continue with next level
+                if (!deepEqual(value1, value2, newPath1, newPath2)) {
+                    return false;
+                }
+            }
+
+            return true;
+
+        }(obj1, obj2, '$1', '$2'));
+    }
+
+    var match;
+
+    function arrayContains(array, subset) {
+        if (subset.length === 0) { return true; }
+        var i, l, j, k;
+        for (i = 0, l = array.length; i < l; ++i) {
+            if (match(array[i], subset[0])) {
+                for (j = 0, k = subset.length; j < k; ++j) {
+                    if (!match(array[i + j], subset[j])) { return false; }
+                }
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * @name samsam.match
+     * @param Object object
+     * @param Object matcher
+     *
+     * Compare arbitrary value ``object`` with matcher.
+     */
+    match = function match(object, matcher) {
+        if (matcher && typeof matcher.test === "function") {
+            return matcher.test(object);
+        }
+
+        if (typeof matcher === "function") {
+            return matcher(object) === true;
+        }
+
+        if (typeof matcher === "string") {
+            matcher = matcher.toLowerCase();
+            var notNull = typeof object === "string" || !!object;
+            return notNull &&
+                (String(object)).toLowerCase().indexOf(matcher) >= 0;
+        }
+
+        if (typeof matcher === "number") {
+            return matcher === object;
+        }
+
+        if (typeof matcher === "boolean") {
+            return matcher === object;
+        }
+
+        if (typeof(matcher) === "undefined") {
+            return typeof(object) === "undefined";
+        }
+
+        if (matcher === null) {
+            return object === null;
+        }
+
+        if (getClass(object) === "Array" && getClass(matcher) === "Array") {
+            return arrayContains(object, matcher);
+        }
+
+        if (matcher && typeof matcher === "object") {
+            if (matcher === object) {
+                return true;
+            }
+            var prop;
+            for (prop in matcher) {
+                var value = object[prop];
+                if (typeof value === "undefined" &&
+                        typeof object.getAttribute === "function") {
+                    value = object.getAttribute(prop);
+                }
+                if (matcher[prop] === null || typeof matcher[prop] === 'undefined') {
+                    if (value !== matcher[prop]) {
+                        return false;
+                    }
+                } else if (typeof  value === "undefined" || !match(value, matcher[prop])) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        throw new Error("Matcher was not a string, a number, a " +
+                        "function, a boolean or an object");
+    };
+
+    return {
+        isArguments: isArguments,
+        isElement: isElement,
+        isDate: isDate,
+        isNegZero: isNegZero,
+        identical: identical,
+        deepEqual: deepEqualCyclic,
+        match: match,
+        keys: keys
+    };
+});
+((typeof define === "function" && define.amd && function (m) {
+    define("formatio", ["samsam"], m);
+}) || (typeof module === "object" && function (m) {
+    module.exports = m(require("samsam"));
+}) || function (m) { this.formatio = m(this.samsam); }
+)(function (samsam) {
+    
+    var formatio = {
+        excludeConstructors: ["Object", /^.$/],
+        quoteStrings: true,
+        limitChildrenCount: 0
+    };
+
+    var hasOwn = Object.prototype.hasOwnProperty;
+
+    var specialObjects = [];
+    if (typeof global !== "undefined") {
+        specialObjects.push({ object: global, value: "[object global]" });
+    }
+    if (typeof document !== "undefined") {
+        specialObjects.push({
+            object: document,
+            value: "[object HTMLDocument]"
+        });
+    }
+    if (typeof window !== "undefined") {
+        specialObjects.push({ object: window, value: "[object Window]" });
+    }
+
+    function functionName(func) {
+        if (!func) { return ""; }
+        if (func.displayName) { return func.displayName; }
+        if (func.name) { return func.name; }
+        var matches = func.toString().match(/function\s+([^\(]+)/m);
+        return (matches && matches[1]) || "";
+    }
+
+    function constructorName(f, object) {
+        var name = functionName(object && object.constructor);
+        var excludes = f.excludeConstructors ||
+                formatio.excludeConstructors || [];
+
+        var i, l;
+        for (i = 0, l = excludes.length; i < l; ++i) {
+            if (typeof excludes[i] === "string" && excludes[i] === name) {
+                return "";
+            } else if (excludes[i].test && excludes[i].test(name)) {
+                return "";
+            }
+        }
+
+        return name;
+    }
+
+    function isCircular(object, objects) {
+        if (typeof object !== "object") { return false; }
+        var i, l;
+        for (i = 0, l = objects.length; i < l; ++i) {
+            if (objects[i] === object) { return true; }
+        }
+        return false;
+    }
+
+    function ascii(f, object, processed, indent) {
+        if (typeof object === "string") {
+            var qs = f.quoteStrings;
+            var quote = typeof qs !== "boolean" || qs;
+            return processed || quote ? '"' + object + '"' : object;
+        }
+
+        if (typeof object === "function" && !(object instanceof RegExp)) {
+            return ascii.func(object);
+        }
+
+        processed = processed || [];
+
+        if (isCircular(object, processed)) { return "[Circular]"; }
+
+        if (Object.prototype.toString.call(object) === "[object Array]") {
+            return ascii.array.call(f, object, processed);
+        }
+
+        if (!object) { return String((1/object) === -Infinity ? "-0" : object); }
+        if (samsam.isElement(object)) { return ascii.element(object); }
+
+        if (typeof object.toString === "function" &&
+                object.toString !== Object.prototype.toString) {
+            return object.toString();
+        }
+
+        var i, l;
+        for (i = 0, l = specialObjects.length; i < l; i++) {
+            if (object === specialObjects[i].object) {
+                return specialObjects[i].value;
+            }
+        }
+
+        return ascii.object.call(f, object, processed, indent);
+    }
+
+    ascii.func = function (func) {
+        return "function " + functionName(func) + "() {}";
+    };
+
+    ascii.array = function (array, processed) {
+        processed = processed || [];
+        processed.push(array);
+        var pieces = [];
+        var i, l;
+        l = (this.limitChildrenCount > 0) ? 
+            Math.min(this.limitChildrenCount, array.length) : array.length;
+
+        for (i = 0; i < l; ++i) {
+            pieces.push(ascii(this, array[i], processed));
+        }
+
+        if(l < array.length)
+            pieces.push("[... " + (array.length - l) + " more elements]");
+
+        return "[" + pieces.join(", ") + "]";
+    };
+
+    ascii.object = function (object, processed, indent) {
+        processed = processed || [];
+        processed.push(object);
+        indent = indent || 0;
+        var pieces = [], properties = samsam.keys(object).sort();
+        var length = 3;
+        var prop, str, obj, i, k, l;
+        l = (this.limitChildrenCount > 0) ? 
+            Math.min(this.limitChildrenCount, properties.length) : properties.length;
+
+        for (i = 0; i < l; ++i) {
+            prop = properties[i];
+            obj = object[prop];
+
+            if (isCircular(obj, processed)) {
+                str = "[Circular]";
+            } else {
+                str = ascii(this, obj, processed, indent + 2);
+            }
+
+            str = (/\s/.test(prop) ? '"' + prop + '"' : prop) + ": " + str;
+            length += str.length;
+            pieces.push(str);
+        }
+
+        var cons = constructorName(this, object);
+        var prefix = cons ? "[" + cons + "] " : "";
+        var is = "";
+        for (i = 0, k = indent; i < k; ++i) { is += " "; }
+
+        if(l < properties.length)
+            pieces.push("[... " + (properties.length - l) + " more elements]");
+
+        if (length + indent > 80) {
+            return prefix + "{\n  " + is + pieces.join(",\n  " + is) + "\n" +
+                is + "}";
+        }
+        return prefix + "{ " + pieces.join(", ") + " }";
+    };
+
+    ascii.element = function (element) {
+        var tagName = element.tagName.toLowerCase();
+        var attrs = element.attributes, attr, pairs = [], attrName, i, l, val;
+
+        for (i = 0, l = attrs.length; i < l; ++i) {
+            attr = attrs.item(i);
+            attrName = attr.nodeName.toLowerCase().replace("html:", "");
+            val = attr.nodeValue;
+            if (attrName !== "contenteditable" || val !== "inherit") {
+                if (!!val) { pairs.push(attrName + "=\"" + val + "\""); }
+            }
+        }
+
+        var formatted = "<" + tagName + (pairs.length > 0 ? " " : "");
+        var content = element.innerHTML;
+
+        if (content.length > 20) {
+            content = content.substr(0, 20) + "[...]";
+        }
+
+        var res = formatted + pairs.join(" ") + ">" + content +
+                "</" + tagName + ">";
+
+        return res.replace(/ contentEditable="inherit"/, "");
+    };
+
+    function Formatio(options) {
+        for (var opt in options) {
+            this[opt] = options[opt];
+        }
+    }
+
+    Formatio.prototype = {
+        functionName: functionName,
+
+        configure: function (options) {
+            return new Formatio(options);
+        },
+
+        constructorName: function (object) {
+            return constructorName(this, object);
+        },
+
+        ascii: function (object, processed, indent) {
+            return ascii(this, object, processed, indent);
+        }
+    };
+
+    return Formatio.prototype;
+});
+!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.lolex=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find [...]
+(function (global){
+/*global global, window*/
+/**
+ * @author Christian Johansen (christian at cjohansen.no) and contributors
+ * @license BSD
+ *
+ * Copyright (c) 2010-2014 Christian Johansen
+ */
+
+(function (global) {
+    
+    // Make properties writable in IE, as per
+    // http://www.adequatelygood.com/Replacing-setTimeout-Globally.html
+    // JSLint being anal
+    var glbl = global;
+
+    global.setTimeout = glbl.setTimeout;
+    global.clearTimeout = glbl.clearTimeout;
+    global.setInterval = glbl.setInterval;
+    global.clearInterval = glbl.clearInterval;
+    global.Date = glbl.Date;
+
+    // setImmediate is not a standard function
+    // avoid adding the prop to the window object if not present
+    if('setImmediate' in global) {
+        global.setImmediate = glbl.setImmediate;
+        global.clearImmediate = glbl.clearImmediate;
+    }
+
+    // node expects setTimeout/setInterval to return a fn object w/ .ref()/.unref()
+    // browsers, a number.
+    // see https://github.com/cjohansen/Sinon.JS/pull/436
+
+    var NOOP = function () { return undefined; };
+    var timeoutResult = setTimeout(NOOP, 0);
+    var addTimerReturnsObject = typeof timeoutResult === "object";
+    clearTimeout(timeoutResult);
+
+    var NativeDate = Date;
+    var uniqueTimerId = 1;
+
+    /**
+     * Parse strings like "01:10:00" (meaning 1 hour, 10 minutes, 0 seconds) into
+     * number of milliseconds. This is used to support human-readable strings passed
+     * to clock.tick()
+     */
+    function parseTime(str) {
+        if (!str) {
+            return 0;
+        }
+
+        var strings = str.split(":");
+        var l = strings.length, i = l;
+        var ms = 0, parsed;
+
+        if (l > 3 || !/^(\d\d:){0,2}\d\d?$/.test(str)) {
+            throw new Error("tick only understands numbers, 'm:s' and 'h:m:s'. Each part must be two digits");
+        }
+
+        while (i--) {
+            parsed = parseInt(strings[i], 10);
+
+            if (parsed >= 60) {
+                throw new Error("Invalid time " + str);
+            }
+
+            ms += parsed * Math.pow(60, (l - i - 1));
+        }
+
+        return ms * 1000;
+    }
+
+    /**
+     * Used to grok the `now` parameter to createClock.
+     */
+    function getEpoch(epoch) {
+        if (!epoch) { return 0; }
+        if (typeof epoch.getTime === "function") { return epoch.getTime(); }
+        if (typeof epoch === "number") { return epoch; }
+        throw new TypeError("now should be milliseconds since UNIX epoch");
+    }
+
+    function inRange(from, to, timer) {
+        return timer && timer.callAt >= from && timer.callAt <= to;
+    }
+
+    function mirrorDateProperties(target, source) {
+        var prop;
+        for (prop in source) {
+            if (source.hasOwnProperty(prop)) {
+                target[prop] = source[prop];
+            }
+        }
+
+        // set special now implementation
+        if (source.now) {
+            target.now = function now() {
+                return target.clock.now;
+            };
+        } else {
+            delete target.now;
+        }
+
+        // set special toSource implementation
+        if (source.toSource) {
+            target.toSource = function toSource() {
+                return source.toSource();
+            };
+        } else {
+            delete target.toSource;
+        }
+
+        // set special toString implementation
+        target.toString = function toString() {
+            return source.toString();
+        };
+
+        target.prototype = source.prototype;
+        target.parse = source.parse;
+        target.UTC = source.UTC;
+        target.prototype.toUTCString = source.prototype.toUTCString;
+
+        return target;
+    }
+
+    function createDate() {
+        function ClockDate(year, month, date, hour, minute, second, ms) {
+            // Defensive and verbose to avoid potential harm in passing
+            // explicit undefined when user does not pass argument
+            switch (arguments.length) {
+            case 0:
+                return new NativeDate(ClockDate.clock.now);
+            case 1:
+                return new NativeDate(year);
+            case 2:
+                return new NativeDate(year, month);
+            case 3:
+                return new NativeDate(year, month, date);
+            case 4:
+                return new NativeDate(year, month, date, hour);
+            case 5:
+                return new NativeDate(year, month, date, hour, minute);
+            case 6:
+                return new NativeDate(year, month, date, hour, minute, second);
+            default:
+                return new NativeDate(year, month, date, hour, minute, second, ms);
+            }
+        }
+
+        return mirrorDateProperties(ClockDate, NativeDate);
+    }
+
+    function addTimer(clock, timer) {
+        if (timer.func === undefined) {
+            throw new Error("Callback must be provided to timer calls");
+        }
+
+        if (!clock.timers) {
+            clock.timers = {};
+        }
+
+        timer.id = uniqueTimerId++;
+        timer.createdAt = clock.now;
+        timer.callAt = clock.now + (timer.delay || (clock.duringTick ? 1 : 0));
+
+        clock.timers[timer.id] = timer;
+
+        if (addTimerReturnsObject) {
+            return {
+                id: timer.id,
+                ref: NOOP,
+                unref: NOOP
+            };
+        }
+
+        return timer.id;
+    }
+
+
+    function compareTimers(a, b) {
+        // Sort first by absolute timing
+        if (a.callAt < b.callAt) {
+            return -1;
+        }
+        if (a.callAt > b.callAt) {
+            return 1;
+        }
+
+        // Sort next by immediate, immediate timers take precedence
+        if (a.immediate && !b.immediate) {
+            return -1;
+        }
+        if (!a.immediate && b.immediate) {
+            return 1;
+        }
+
+        // Sort next by creation time, earlier-created timers take precedence
+        if (a.createdAt < b.createdAt) {
+            return -1;
+        }
+        if (a.createdAt > b.createdAt) {
+            return 1;
+        }
+
+        // Sort next by id, lower-id timers take precedence
+        if (a.id < b.id) {
+            return -1;
+        }
+        if (a.id > b.id) {
+            return 1;
+        }
+
+        // As timer ids are unique, no fallback `0` is necessary
+    }
+
+    function firstTimerInRange(clock, from, to) {
+        var timers = clock.timers,
+            timer = null,
+            id,
+            isInRange;
+
+        for (id in timers) {
+            if (timers.hasOwnProperty(id)) {
+                isInRange = inRange(from, to, timers[id]);
+
+                if (isInRange && (!timer || compareTimers(timer, timers[id]) === 1)) {
+                    timer = timers[id];
+                }
+            }
+        }
+
+        return timer;
+    }
+
+    function firstTimer(clock) {
+        var timers = clock.timers,
+            timer = null,
+            id;
+
+        for (id in timers) {
+            if (timers.hasOwnProperty(id)) {
+                if (!timer || compareTimers(timer, timers[id]) === 1) {
+                    timer = timers[id];
+                }
+            }
+        }
+
+        return timer;
+    }
+
+    function callTimer(clock, timer) {
+        var exception;
+
+        if (typeof timer.interval === "number") {
+            clock.timers[timer.id].callAt += timer.interval;
+        } else {
+            delete clock.timers[timer.id];
+        }
+
+        try {
+            if (typeof timer.func === "function") {
+                timer.func.apply(null, timer.args);
+            } else {
+                eval(timer.func);
+            }
+        } catch (e) {
+            exception = e;
+        }
+
+        if (!clock.timers[timer.id]) {
+            if (exception) {
+                throw exception;
+            }
+            return;
+        }
+
+        if (exception) {
+            throw exception;
+        }
+    }
+
+    function timerType(timer) {
+        if (timer.immediate) {
+            return "Immediate";
+        } else if (typeof timer.interval !== "undefined") {
+            return "Interval";
+        } else {
+            return "Timeout";
+        }
+    }
+
+    function clearTimer(clock, timerId, ttype) {
+        if (!timerId) {
+            // null appears to be allowed in most browsers, and appears to be
+            // relied upon by some libraries, like Bootstrap carousel
+            return;
+        }
+
+        if (!clock.timers) {
+            clock.timers = [];
+        }
+
+        // in Node, timerId is an object with .ref()/.unref(), and
+        // its .id field is the actual timer id.
+        if (typeof timerId === "object") {
+            timerId = timerId.id;
+        }
+
+        if (clock.timers.hasOwnProperty(timerId)) {
+            // check that the ID matches a timer of the correct type
+            var timer = clock.timers[timerId];
+            if (timerType(timer) === ttype) {
+                delete clock.timers[timerId];
+            } else {
+				throw new Error("Cannot clear timer: timer created with set" + ttype + "() but cleared with clear" + timerType(timer) + "()");
+			}
+        }
+    }
+
+    function uninstall(clock, target) {
+        var method,
+            i,
+            l;
+
+        for (i = 0, l = clock.methods.length; i < l; i++) {
+            method = clock.methods[i];
+
+            if (target[method].hadOwnProperty) {
+                target[method] = clock["_" + method];
+            } else {
+                try {
+                    delete target[method];
+                } catch (ignore) {}
+            }
+        }
+
+        // Prevent multiple executions which will completely remove these props
+        clock.methods = [];
+    }
+
+    function hijackMethod(target, method, clock) {
+        var prop;
+
+        clock[method].hadOwnProperty = Object.prototype.hasOwnProperty.call(target, method);
+        clock["_" + method] = target[method];
+
+        if (method === "Date") {
+            var date = mirrorDateProperties(clock[method], target[method]);
+            target[method] = date;
+        } else {
+            target[method] = function () {
+                return clock[method].apply(clock, arguments);
+            };
+
+            for (prop in clock[method]) {
+                if (clock[method].hasOwnProperty(prop)) {
+                    target[method][prop] = clock[method][prop];
+                }
+            }
+        }
+
+        target[method].clock = clock;
+    }
+
+    var timers = {
+        setTimeout: setTimeout,
+        clearTimeout: clearTimeout,
+        setImmediate: global.setImmediate,
+        clearImmediate: global.clearImmediate,
+        setInterval: setInterval,
+        clearInterval: clearInterval,
+        Date: Date
+    };
+
+    var keys = Object.keys || function (obj) {
+        var ks = [],
+            key;
+
+        for (key in obj) {
+            if (obj.hasOwnProperty(key)) {
+                ks.push(key);
+            }
+        }
+
+        return ks;
+    };
+
+    exports.timers = timers;
+
+    function createClock(now) {
+        var clock = {
+            now: getEpoch(now),
+            timeouts: {},
+            Date: createDate()
+        };
+
+        clock.Date.clock = clock;
+
+        clock.setTimeout = function setTimeout(func, timeout) {
+            return addTimer(clock, {
+                func: func,
+                args: Array.prototype.slice.call(arguments, 2),
+                delay: timeout
+            });
+        };
+
+        clock.clearTimeout = function clearTimeout(timerId) {
+            return clearTimer(clock, timerId, "Timeout");
+        };
+
+        clock.setInterval = function setInterval(func, timeout) {
+            return addTimer(clock, {
+                func: func,
+                args: Array.prototype.slice.call(arguments, 2),
+                delay: timeout,
+                interval: timeout
+            });
+        };
+
+        clock.clearInterval = function clearInterval(timerId) {
+            return clearTimer(clock, timerId, "Interval");
+        };
+
+        clock.setImmediate = function setImmediate(func) {
+            return addTimer(clock, {
+                func: func,
+                args: Array.prototype.slice.call(arguments, 1),
+                immediate: true
+            });
+        };
+
+        clock.clearImmediate = function clearImmediate(timerId) {
+            return clearTimer(clock, timerId, "Immediate");
+        };
+
+        clock.tick = function tick(ms) {
+            ms = typeof ms === "number" ? ms : parseTime(ms);
+            var tickFrom = clock.now, tickTo = clock.now + ms, previous = clock.now;
+            var timer = firstTimerInRange(clock, tickFrom, tickTo);
+            var oldNow;
+
+            clock.duringTick = true;
+
+            var firstException;
+            while (timer && tickFrom <= tickTo) {
+                if (clock.timers[timer.id]) {
+                    tickFrom = clock.now = timer.callAt;
+                    try {
+                        oldNow = clock.now;
+                        callTimer(clock, timer);
+                        // compensate for any setSystemTime() call during timer callback
+                        if (oldNow !== clock.now) {
+                            tickFrom += clock.now - oldNow;
+                            tickTo += clock.now - oldNow;
+                            previous += clock.now - oldNow;
+                        }
+                    } catch (e) {
+                        firstException = firstException || e;
+                    }
+                }
+
+                timer = firstTimerInRange(clock, previous, tickTo);
+                previous = tickFrom;
+            }
+
+            clock.duringTick = false;
+            clock.now = tickTo;
+
+            if (firstException) {
+                throw firstException;
+            }
+
+            return clock.now;
+        };
+
+        clock.next = function next() {
+            var timer = firstTimer(clock);
+            if (!timer) {
+                return clock.now;
+            }
+
+            clock.duringTick = true;
+            try {
+                clock.now = timer.callAt;
+                callTimer(clock, timer);
+                return clock.now;
+            } finally {
+                clock.duringTick = false;
+            }
+        };
+
+        clock.reset = function reset() {
+            clock.timers = {};
+        };
+
+        clock.setSystemTime = function setSystemTime(now) {
+            // determine time difference
+            var newNow = getEpoch(now);
+            var difference = newNow - clock.now;
+
+            // update 'system clock'
+            clock.now = newNow;
+
+            // update timers and intervals to keep them stable
+            for (var id in clock.timers) {
+                if (clock.timers.hasOwnProperty(id)) {
+                    var timer = clock.timers[id];
+                    timer.createdAt += difference;
+                    timer.callAt += difference;
+                }
+            }
+        };
+
+        return clock;
+    }
+    exports.createClock = createClock;
+
+    exports.install = function install(target, now, toFake) {
+        var i,
+            l;
+
+        if (typeof target === "number") {
+            toFake = now;
+            now = target;
+            target = null;
+        }
+
+        if (!target) {
+            target = global;
+        }
+
+        var clock = createClock(now);
+
+        clock.uninstall = function () {
+            uninstall(clock, target);
+        };
+
+        clock.methods = toFake || [];
+
+        if (clock.methods.length === 0) {
+            clock.methods = keys(timers);
+        }
+
+        for (i = 0, l = clock.methods.length; i < l; i++) {
+            hijackMethod(target, clock.methods[i], clock);
+        }
+
+        return clock;
+    };
+
+}(global || this));
+
+}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
+},{}]},{},[1])(1)
+});
+  })();
+  var define;
+/**
+ * Sinon core utilities. For internal use only.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+var sinon = (function () {
+"use strict";
+ // eslint-disable-line no-unused-vars
+    
+    var sinonModule;
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        sinonModule = module.exports = require("./sinon/util/core");
+        require("./sinon/extend");
+        require("./sinon/walk");
+        require("./sinon/typeOf");
+        require("./sinon/times_in_words");
+        require("./sinon/spy");
+        require("./sinon/call");
+        require("./sinon/behavior");
+        require("./sinon/stub");
+        require("./sinon/mock");
+        require("./sinon/collection");
+        require("./sinon/assert");
+        require("./sinon/sandbox");
+        require("./sinon/test");
+        require("./sinon/test_case");
+        require("./sinon/match");
+        require("./sinon/format");
+        require("./sinon/log_error");
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require, module.exports, module);
+        sinonModule = module.exports;
+    } else {
+        sinonModule = {};
+    }
+
+    return sinonModule;
+}());
+
+/**
+ * @depend ../../sinon.js
+ */
+/**
+ * Sinon core utilities. For internal use only.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    var div = typeof document !== "undefined" && document.createElement("div");
+    var hasOwn = Object.prototype.hasOwnProperty;
+
+    function isDOMNode(obj) {
+        var success = false;
+
+        try {
+            obj.appendChild(div);
+            success = div.parentNode === obj;
+        } catch (e) {
+            return false;
+        } finally {
+            try {
+                obj.removeChild(div);
+            } catch (e) {
+                // Remove failed, not much we can do about that
+            }
+        }
+
+        return success;
+    }
+
+    function isElement(obj) {
+        return div && obj && obj.nodeType === 1 && isDOMNode(obj);
+    }
+
+    function isFunction(obj) {
+        return typeof obj === "function" || !!(obj && obj.constructor && obj.call && obj.apply);
+    }
+
+    function isReallyNaN(val) {
+        return typeof val === "number" && isNaN(val);
+    }
+
+    function mirrorProperties(target, source) {
+        for (var prop in source) {
+            if (!hasOwn.call(target, prop)) {
+                target[prop] = source[prop];
+            }
+        }
+    }
+
+    function isRestorable(obj) {
+        return typeof obj === "function" && typeof obj.restore === "function" && obj.restore.sinon;
+    }
+
+    // Cheap way to detect if we have ES5 support.
+    var hasES5Support = "keys" in Object;
+
+    function makeApi(sinon) {
+        sinon.wrapMethod = function wrapMethod(object, property, method) {
+            if (!object) {
+                throw new TypeError("Should wrap property of object");
+            }
+
+            if (typeof method !== "function" && typeof method !== "object") {
+                throw new TypeError("Method wrapper should be a function or a property descriptor");
+            }
+
+            function checkWrappedMethod(wrappedMethod) {
+                var error;
+
+                if (!isFunction(wrappedMethod)) {
+                    error = new TypeError("Attempted to wrap " + (typeof wrappedMethod) + " property " +
+                                        property + " as function");
+                } else if (wrappedMethod.restore && wrappedMethod.restore.sinon) {
+                    error = new TypeError("Attempted to wrap " + property + " which is already wrapped");
+                } else if (wrappedMethod.calledBefore) {
+                    var verb = wrappedMethod.returns ? "stubbed" : "spied on";
+                    error = new TypeError("Attempted to wrap " + property + " which is already " + verb);
+                }
+
+                if (error) {
+                    if (wrappedMethod && wrappedMethod.stackTrace) {
+                        error.stack += "\n--------------\n" + wrappedMethod.stackTrace;
+                    }
+                    throw error;
+                }
+            }
+
+            var error, wrappedMethod, i;
+
+            // IE 8 does not support hasOwnProperty on the window object and Firefox has a problem
+            // when using hasOwn.call on objects from other frames.
+            var owned = object.hasOwnProperty ? object.hasOwnProperty(property) : hasOwn.call(object, property);
+
+            if (hasES5Support) {
+                var methodDesc = (typeof method === "function") ? {value: method} : method;
+                var wrappedMethodDesc = sinon.getPropertyDescriptor(object, property);
+
+                if (!wrappedMethodDesc) {
+                    error = new TypeError("Attempted to wrap " + (typeof wrappedMethod) + " property " +
+                                        property + " as function");
+                } else if (wrappedMethodDesc.restore && wrappedMethodDesc.restore.sinon) {
+                    error = new TypeError("Attempted to wrap " + property + " which is already wrapped");
+                }
+                if (error) {
+                    if (wrappedMethodDesc && wrappedMethodDesc.stackTrace) {
+                        error.stack += "\n--------------\n" + wrappedMethodDesc.stackTrace;
+                    }
+                    throw error;
+                }
+
+                var types = sinon.objectKeys(methodDesc);
+                for (i = 0; i < types.length; i++) {
+                    wrappedMethod = wrappedMethodDesc[types[i]];
+                    checkWrappedMethod(wrappedMethod);
+                }
+
+                mirrorProperties(methodDesc, wrappedMethodDesc);
+                for (i = 0; i < types.length; i++) {
+                    mirrorProperties(methodDesc[types[i]], wrappedMethodDesc[types[i]]);
+                }
+                Object.defineProperty(object, property, methodDesc);
+            } else {
+                wrappedMethod = object[property];
+                checkWrappedMethod(wrappedMethod);
+                object[property] = method;
+                method.displayName = property;
+            }
+
+            method.displayName = property;
+
+            // Set up a stack trace which can be used later to find what line of
+            // code the original method was created on.
+            method.stackTrace = (new Error("Stack Trace for original")).stack;
+
+            method.restore = function () {
+                // For prototype properties try to reset by delete first.
+                // If this fails (ex: localStorage on mobile safari) then force a reset
+                // via direct assignment.
+                if (!owned) {
+                    // In some cases `delete` may throw an error
+                    try {
+                        delete object[property];
+                    } catch (e) {} // eslint-disable-line no-empty
+                    // For native code functions `delete` fails without throwing an error
+                    // on Chrome < 43, PhantomJS, etc.
+                } else if (hasES5Support) {
+                    Object.defineProperty(object, property, wrappedMethodDesc);
+                }
+
+                // Use strict equality comparison to check failures then force a reset
+                // via direct assignment.
+                if (object[property] === method) {
+                    object[property] = wrappedMethod;
+                }
+            };
+
+            method.restore.sinon = true;
+
+            if (!hasES5Support) {
+                mirrorProperties(method, wrappedMethod);
+            }
+
+            return method;
+        };
+
+        sinon.create = function create(proto) {
+            var F = function () {};
+            F.prototype = proto;
+            return new F();
+        };
+
+        sinon.deepEqual = function deepEqual(a, b) {
+            if (sinon.match && sinon.match.isMatcher(a)) {
+                return a.test(b);
+            }
+
+            if (typeof a !== "object" || typeof b !== "object") {
+                return isReallyNaN(a) && isReallyNaN(b) || a === b;
+            }
+
+            if (isElement(a) || isElement(b)) {
+                return a === b;
+            }
+
+            if (a === b) {
+                return true;
+            }
+
+            if ((a === null && b !== null) || (a !== null && b === null)) {
+                return false;
+            }
+
+            if (a instanceof RegExp && b instanceof RegExp) {
+                return (a.source === b.source) && (a.global === b.global) &&
+                    (a.ignoreCase === b.ignoreCase) && (a.multiline === b.multiline);
+            }
+
+            var aString = Object.prototype.toString.call(a);
+            if (aString !== Object.prototype.toString.call(b)) {
+                return false;
+            }
+
+            if (aString === "[object Date]") {
+                return a.valueOf() === b.valueOf();
+            }
+
+            var prop;
+            var aLength = 0;
+            var bLength = 0;
+
+            if (aString === "[object Array]" && a.length !== b.length) {
+                return false;
+            }
+
+            for (prop in a) {
+                if (a.hasOwnProperty(prop)) {
+                    aLength += 1;
+
+                    if (!(prop in b)) {
+                        return false;
+                    }
+
+                    if (!deepEqual(a[prop], b[prop])) {
+                        return false;
+                    }
+                }
+            }
+
+            for (prop in b) {
+                if (b.hasOwnProperty(prop)) {
+                    bLength += 1;
+                }
+            }
+
+            return aLength === bLength;
+        };
+
+        sinon.functionName = function functionName(func) {
+            var name = func.displayName || func.name;
+
+            // Use function decomposition as a last resort to get function
+            // name. Does not rely on function decomposition to work - if it
+            // doesn't debugging will be slightly less informative
+            // (i.e. toString will say 'spy' rather than 'myFunc').
+            if (!name) {
+                var matches = func.toString().match(/function ([^\s\(]+)/);
+                name = matches && matches[1];
+            }
+
+            return name;
+        };
+
+        sinon.functionToString = function toString() {
+            if (this.getCall && this.callCount) {
+                var thisValue,
+                    prop;
+                var i = this.callCount;
+
+                while (i--) {
+                    thisValue = this.getCall(i).thisValue;
+
+                    for (prop in thisValue) {
+                        if (thisValue[prop] === this) {
+                            return prop;
+                        }
+                    }
+                }
+            }
+
+            return this.displayName || "sinon fake";
+        };
+
+        sinon.objectKeys = function objectKeys(obj) {
+            if (obj !== Object(obj)) {
+                throw new TypeError("sinon.objectKeys called on a non-object");
+            }
+
+            var keys = [];
+            var key;
+            for (key in obj) {
+                if (hasOwn.call(obj, key)) {
+                    keys.push(key);
+                }
+            }
+
+            return keys;
+        };
+
+        sinon.getPropertyDescriptor = function getPropertyDescriptor(object, property) {
+            var proto = object;
+            var descriptor;
+
+            while (proto && !(descriptor = Object.getOwnPropertyDescriptor(proto, property))) {
+                proto = Object.getPrototypeOf(proto);
+            }
+            return descriptor;
+        };
+
+        sinon.getConfig = function (custom) {
+            var config = {};
+            custom = custom || {};
+            var defaults = sinon.defaultConfig;
+
+            for (var prop in defaults) {
+                if (defaults.hasOwnProperty(prop)) {
+                    config[prop] = custom.hasOwnProperty(prop) ? custom[prop] : defaults[prop];
+                }
+            }
+
+            return config;
+        };
+
+        sinon.defaultConfig = {
+            injectIntoThis: true,
+            injectInto: null,
+            properties: ["spy", "stub", "mock", "clock", "server", "requests"],
+            useFakeTimers: true,
+            useFakeServer: true
+        };
+
+        sinon.timesInWords = function timesInWords(count) {
+            return count === 1 && "once" ||
+                count === 2 && "twice" ||
+                count === 3 && "thrice" ||
+                (count || 0) + " times";
+        };
+
+        sinon.calledInOrder = function (spies) {
+            for (var i = 1, l = spies.length; i < l; i++) {
+                if (!spies[i - 1].calledBefore(spies[i]) || !spies[i].called) {
+                    return false;
+                }
+            }
+
+            return true;
+        };
+
+        sinon.orderByFirstCall = function (spies) {
+            return spies.sort(function (a, b) {
+                // uuid, won't ever be equal
+                var aCall = a.getCall(0);
+                var bCall = b.getCall(0);
+                var aId = aCall && aCall.callId || -1;
+                var bId = bCall && bCall.callId || -1;
+
+                return aId < bId ? -1 : 1;
+            });
+        };
+
+        sinon.createStubInstance = function (constructor) {
+            if (typeof constructor !== "function") {
+                throw new TypeError("The constructor should be a function.");
+            }
+            return sinon.stub(sinon.create(constructor.prototype));
+        };
+
+        sinon.restore = function (object) {
+            if (object !== null && typeof object === "object") {
+                for (var prop in object) {
+                    if (isRestorable(object[prop])) {
+                        object[prop].restore();
+                    }
+                }
+            } else if (isRestorable(object)) {
+                object.restore();
+            }
+        };
+
+        return sinon;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports) {
+        makeApi(exports);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+
+        // Adapted from https://developer.mozilla.org/en/docs/ECMAScript_DontEnum_attribute#JScript_DontEnum_Bug
+        var hasDontEnumBug = (function () {
+            var obj = {
+                constructor: function () {
+                    return "0";
+                },
+                toString: function () {
+                    return "1";
+                },
+                valueOf: function () {
+                    return "2";
+                },
+                toLocaleString: function () {
+                    return "3";
+                },
+                prototype: function () {
+                    return "4";
+                },
+                isPrototypeOf: function () {
+                    return "5";
+                },
+                propertyIsEnumerable: function () {
+                    return "6";
+                },
+                hasOwnProperty: function () {
+                    return "7";
+                },
+                length: function () {
+                    return "8";
+                },
+                unique: function () {
+                    return "9";
+                }
+            };
+
+            var result = [];
+            for (var prop in obj) {
+                if (obj.hasOwnProperty(prop)) {
+                    result.push(obj[prop]());
+                }
+            }
+            return result.join("") !== "0123456789";
+        })();
+
+        /* Public: Extend target in place with all (own) properties from sources in-order. Thus, last source will
+         *         override properties in previous sources.
+         *
+         * target - The Object to extend
+         * sources - Objects to copy properties from.
+         *
+         * Returns the extended target
+         */
+        function extend(target /*, sources */) {
+            var sources = Array.prototype.slice.call(arguments, 1);
+            var source, i, prop;
+
+            for (i = 0; i < sources.length; i++) {
+                source = sources[i];
+
+                for (prop in source) {
+                    if (source.hasOwnProperty(prop)) {
+                        target[prop] = source[prop];
+                    }
+                }
+
+                // Make sure we copy (own) toString method even when in JScript with DontEnum bug
+                // See https://developer.mozilla.org/en/docs/ECMAScript_DontEnum_attribute#JScript_DontEnum_Bug
+                if (hasDontEnumBug && source.hasOwnProperty("toString") && source.toString !== target.toString) {
+                    target.toString = source.toString;
+                }
+            }
+
+            return target;
+        }
+
+        sinon.extend = extend;
+        return sinon.extend;
+    }
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        module.exports = makeApi(sinon);
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+
+        function timesInWords(count) {
+            switch (count) {
+                case 1:
+                    return "once";
+                case 2:
+                    return "twice";
+                case 3:
+                    return "thrice";
+                default:
+                    return (count || 0) + " times";
+            }
+        }
+
+        sinon.timesInWords = timesInWords;
+        return sinon.timesInWords;
+    }
+
+    function loadDependencies(require, exports, module) {
+        var core = require("./util/core");
+        module.exports = makeApi(core);
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ */
+/**
+ * Format functions
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2014 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        function typeOf(value) {
+            if (value === null) {
+                return "null";
+            } else if (value === undefined) {
+                return "undefined";
+            }
+            var string = Object.prototype.toString.call(value);
+            return string.substring(8, string.length - 1).toLowerCase();
+        }
+
+        sinon.typeOf = typeOf;
+        return sinon.typeOf;
+    }
+
+    function loadDependencies(require, exports, module) {
+        var core = require("./util/core");
+        module.exports = makeApi(core);
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ * @depend typeOf.js
+ */
+/*jslint eqeqeq: false, onevar: false, plusplus: false*/
+/*global module, require, sinon*/
+/**
+ * Match functions
+ *
+ * @author Maximilian Antoni (mail at maxantoni.de)
+ * @license BSD
+ *
+ * Copyright (c) 2012 Maximilian Antoni
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        function assertType(value, type, name) {
+            var actual = sinon.typeOf(value);
+            if (actual !== type) {
+                throw new TypeError("Expected type of " + name + " to be " +
+                    type + ", but was " + actual);
+            }
+        }
+
+        var matcher = {
+            toString: function () {
+                return this.message;
+            }
+        };
+
+        function isMatcher(object) {
+            return matcher.isPrototypeOf(object);
+        }
+
+        function matchObject(expectation, actual) {
+            if (actual === null || actual === undefined) {
+                return false;
+            }
+            for (var key in expectation) {
+                if (expectation.hasOwnProperty(key)) {
+                    var exp = expectation[key];
+                    var act = actual[key];
+                    if (isMatcher(exp)) {
+                        if (!exp.test(act)) {
+                            return false;
+                        }
+                    } else if (sinon.typeOf(exp) === "object") {
+                        if (!matchObject(exp, act)) {
+                            return false;
+                        }
+                    } else if (!sinon.deepEqual(exp, act)) {
+                        return false;
+                    }
+                }
+            }
+            return true;
+        }
+
+        function match(expectation, message) {
+            var m = sinon.create(matcher);
+            var type = sinon.typeOf(expectation);
+            switch (type) {
+            case "object":
+                if (typeof expectation.test === "function") {
+                    m.test = function (actual) {
+                        return expectation.test(actual) === true;
+                    };
+                    m.message = "match(" + sinon.functionName(expectation.test) + ")";
+                    return m;
+                }
+                var str = [];
+                for (var key in expectation) {
+                    if (expectation.hasOwnProperty(key)) {
+                        str.push(key + ": " + expectation[key]);
+                    }
+                }
+                m.test = function (actual) {
+                    return matchObject(expectation, actual);
+                };
+                m.message = "match(" + str.join(", ") + ")";
+                break;
+            case "number":
+                m.test = function (actual) {
+                    // we need type coercion here
+                    return expectation == actual; // eslint-disable-line eqeqeq
+                };
+                break;
+            case "string":
+                m.test = function (actual) {
+                    if (typeof actual !== "string") {
+                        return false;
+                    }
+                    return actual.indexOf(expectation) !== -1;
+                };
+                m.message = "match(\"" + expectation + "\")";
+                break;
+            case "regexp":
+                m.test = function (actual) {
+                    if (typeof actual !== "string") {
+                        return false;
+                    }
+                    return expectation.test(actual);
+                };
+                break;
+            case "function":
+                m.test = expectation;
+                if (message) {
+                    m.message = message;
+                } else {
+                    m.message = "match(" + sinon.functionName(expectation) + ")";
+                }
+                break;
+            default:
+                m.test = function (actual) {
+                    return sinon.deepEqual(expectation, actual);
+                };
+            }
+            if (!m.message) {
+                m.message = "match(" + expectation + ")";
+            }
+            return m;
+        }
+
+        matcher.or = function (m2) {
+            if (!arguments.length) {
+                throw new TypeError("Matcher expected");
+            } else if (!isMatcher(m2)) {
+                m2 = match(m2);
+            }
+            var m1 = this;
+            var or = sinon.create(matcher);
+            or.test = function (actual) {
+                return m1.test(actual) || m2.test(actual);
+            };
+            or.message = m1.message + ".or(" + m2.message + ")";
+            return or;
+        };
+
+        matcher.and = function (m2) {
+            if (!arguments.length) {
+                throw new TypeError("Matcher expected");
+            } else if (!isMatcher(m2)) {
+                m2 = match(m2);
+            }
+            var m1 = this;
+            var and = sinon.create(matcher);
+            and.test = function (actual) {
+                return m1.test(actual) && m2.test(actual);
+            };
+            and.message = m1.message + ".and(" + m2.message + ")";
+            return and;
+        };
+
+        match.isMatcher = isMatcher;
+
+        match.any = match(function () {
+            return true;
+        }, "any");
+
+        match.defined = match(function (actual) {
+            return actual !== null && actual !== undefined;
+        }, "defined");
+
+        match.truthy = match(function (actual) {
+            return !!actual;
+        }, "truthy");
+
+        match.falsy = match(function (actual) {
+            return !actual;
+        }, "falsy");
+
+        match.same = function (expectation) {
+            return match(function (actual) {
+                return expectation === actual;
+            }, "same(" + expectation + ")");
+        };
+
+        match.typeOf = function (type) {
+            assertType(type, "string", "type");
+            return match(function (actual) {
+                return sinon.typeOf(actual) === type;
+            }, "typeOf(\"" + type + "\")");
+        };
+
+        match.instanceOf = function (type) {
+            assertType(type, "function", "type");
+            return match(function (actual) {
+                return actual instanceof type;
+            }, "instanceOf(" + sinon.functionName(type) + ")");
+        };
+
+        function createPropertyMatcher(propertyTest, messagePrefix) {
+            return function (property, value) {
+                assertType(property, "string", "property");
+                var onlyProperty = arguments.length === 1;
+                var message = messagePrefix + "(\"" + property + "\"";
+                if (!onlyProperty) {
+                    message += ", " + value;
+                }
+                message += ")";
+                return match(function (actual) {
+                    if (actual === undefined || actual === null ||
+                            !propertyTest(actual, property)) {
+                        return false;
+                    }
+                    return onlyProperty || sinon.deepEqual(value, actual[property]);
+                }, message);
+            };
+        }
+
+        match.has = createPropertyMatcher(function (actual, property) {
+            if (typeof actual === "object") {
+                return property in actual;
+            }
+            return actual[property] !== undefined;
+        }, "has");
+
+        match.hasOwn = createPropertyMatcher(function (actual, property) {
+            return actual.hasOwnProperty(property);
+        }, "hasOwn");
+
+        match.bool = match.typeOf("boolean");
+        match.number = match.typeOf("number");
+        match.string = match.typeOf("string");
+        match.object = match.typeOf("object");
+        match.func = match.typeOf("function");
+        match.array = match.typeOf("array");
+        match.regexp = match.typeOf("regexp");
+        match.date = match.typeOf("date");
+
+        sinon.match = match;
+        return match;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./typeOf");
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ */
+/**
+ * Format functions
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2014 Christian Johansen
+ */
+(function (sinonGlobal, formatio) {
+    
+    function makeApi(sinon) {
+        function valueFormatter(value) {
+            return "" + value;
+        }
+
+        function getFormatioFormatter() {
+            var formatter = formatio.configure({
+                    quoteStrings: false,
+                    limitChildrenCount: 250
+                });
+
+            function format() {
+                return formatter.ascii.apply(formatter, arguments);
+            }
+
+            return format;
+        }
+
+        function getNodeFormatter() {
+            try {
+                var util = require("util");
+            } catch (e) {
+                /* Node, but no util module - would be very old, but better safe than sorry */
+            }
+
+            function format(v) {
+                var isObjectWithNativeToString = typeof v === "object" && v.toString === Object.prototype.toString;
+                return isObjectWithNativeToString ? util.inspect(v) : v;
+            }
+
+            return util ? format : valueFormatter;
+        }
+
+        var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+        var formatter;
+
+        if (isNode) {
+            try {
+                formatio = require("formatio");
+            }
+            catch (e) {} // eslint-disable-line no-empty
+        }
+
+        if (formatio) {
+            formatter = getFormatioFormatter();
+        } else if (isNode) {
+            formatter = getNodeFormatter();
+        } else {
+            formatter = valueFormatter;
+        }
+
+        sinon.format = formatter;
+        return sinon.format;
+    }
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        module.exports = makeApi(sinon);
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon, // eslint-disable-line no-undef
+    typeof formatio === "object" && formatio // eslint-disable-line no-undef
+));
+
+/**
+  * @depend util/core.js
+  * @depend match.js
+  * @depend format.js
+  */
+/**
+  * Spy calls
+  *
+  * @author Christian Johansen (christian at cjohansen.no)
+  * @author Maximilian Antoni (mail at maxantoni.de)
+  * @license BSD
+  *
+  * Copyright (c) 2010-2013 Christian Johansen
+  * Copyright (c) 2013 Maximilian Antoni
+  */
+(function (sinonGlobal) {
+    
+    var slice = Array.prototype.slice;
+
+    function makeApi(sinon) {
+        function throwYieldError(proxy, text, args) {
+            var msg = sinon.functionName(proxy) + text;
+            if (args.length) {
+                msg += " Received [" + slice.call(args).join(", ") + "]";
+            }
+            throw new Error(msg);
+        }
+
+        var callProto = {
+            calledOn: function calledOn(thisValue) {
+                if (sinon.match && sinon.match.isMatcher(thisValue)) {
+                    return thisValue.test(this.thisValue);
+                }
+                return this.thisValue === thisValue;
+            },
+
+            calledWith: function calledWith() {
+                var l = arguments.length;
+                if (l > this.args.length) {
+                    return false;
+                }
+                for (var i = 0; i < l; i += 1) {
+                    if (!sinon.deepEqual(arguments[i], this.args[i])) {
+                        return false;
+                    }
+                }
+
+                return true;
+            },
+
+            calledWithMatch: function calledWithMatch() {
+                var l = arguments.length;
+                if (l > this.args.length) {
+                    return false;
+                }
+                for (var i = 0; i < l; i += 1) {
+                    var actual = this.args[i];
+                    var expectation = arguments[i];
+                    if (!sinon.match || !sinon.match(expectation).test(actual)) {
+                        return false;
+                    }
+                }
+                return true;
+            },
+
+            calledWithExactly: function calledWithExactly() {
+                return arguments.length === this.args.length &&
+                    this.calledWith.apply(this, arguments);
+            },
+
+            notCalledWith: function notCalledWith() {
+                return !this.calledWith.apply(this, arguments);
+            },
+
+            notCalledWithMatch: function notCalledWithMatch() {
+                return !this.calledWithMatch.apply(this, arguments);
+            },
+
+            returned: function returned(value) {
+                return sinon.deepEqual(value, this.returnValue);
+            },
+
+            threw: function threw(error) {
+                if (typeof error === "undefined" || !this.exception) {
+                    return !!this.exception;
+                }
+
+                return this.exception === error || this.exception.name === error;
+            },
+
+            calledWithNew: function calledWithNew() {
+                return this.proxy.prototype && this.thisValue instanceof this.proxy;
+            },
+
+            calledBefore: function (other) {
+                return this.callId < other.callId;
+            },
+
+            calledAfter: function (other) {
+                return this.callId > other.callId;
+            },
+
+            callArg: function (pos) {
+                this.args[pos]();
+            },
+
+            callArgOn: function (pos, thisValue) {
+                this.args[pos].apply(thisValue);
+            },
+
+            callArgWith: function (pos) {
+                this.callArgOnWith.apply(this, [pos, null].concat(slice.call(arguments, 1)));
+            },
+
+            callArgOnWith: function (pos, thisValue) {
+                var args = slice.call(arguments, 2);
+                this.args[pos].apply(thisValue, args);
+            },
+
+            "yield": function () {
+                this.yieldOn.apply(this, [null].concat(slice.call(arguments, 0)));
+            },
+
+            yieldOn: function (thisValue) {
+                var args = this.args;
+                for (var i = 0, l = args.length; i < l; ++i) {
+                    if (typeof args[i] === "function") {
+                        args[i].apply(thisValue, slice.call(arguments, 1));
+                        return;
+                    }
+                }
+                throwYieldError(this.proxy, " cannot yield since no callback was passed.", args);
+            },
+
+            yieldTo: function (prop) {
+                this.yieldToOn.apply(this, [prop, null].concat(slice.call(arguments, 1)));
+            },
+
+            yieldToOn: function (prop, thisValue) {
+                var args = this.args;
+                for (var i = 0, l = args.length; i < l; ++i) {
+                    if (args[i] && typeof args[i][prop] === "function") {
+                        args[i][prop].apply(thisValue, slice.call(arguments, 2));
+                        return;
+                    }
+                }
+                throwYieldError(this.proxy, " cannot yield to '" + prop +
+                    "' since no callback was passed.", args);
+            },
+
+            getStackFrames: function () {
+                // Omit the error message and the two top stack frames in sinon itself:
+                return this.stack && this.stack.split("\n").slice(3);
+            },
+
+            toString: function () {
+                var callStr = this.proxy ? this.proxy.toString() + "(" : "";
+                var args = [];
+
+                if (!this.args) {
+                    return ":(";
+                }
+
+                for (var i = 0, l = this.args.length; i < l; ++i) {
+                    args.push(sinon.format(this.args[i]));
+                }
+
+                callStr = callStr + args.join(", ") + ")";
+
+                if (typeof this.returnValue !== "undefined") {
+                    callStr += " => " + sinon.format(this.returnValue);
+                }
+
+                if (this.exception) {
+                    callStr += " !" + this.exception.name;
+
+                    if (this.exception.message) {
+                        callStr += "(" + this.exception.message + ")";
+                    }
+                }
+                if (this.stack) {
+                    callStr += this.getStackFrames()[0].replace(/^\s*(?:at\s+|@)?/, " at ");
+
+                }
+
+                return callStr;
+            }
+        };
+
+        callProto.invokeCallback = callProto.yield;
+
+        function createSpyCall(spy, thisValue, args, returnValue, exception, id, stack) {
+            if (typeof id !== "number") {
+                throw new TypeError("Call id is not a number");
+            }
+            var proxyCall = sinon.create(callProto);
+            proxyCall.proxy = spy;
+            proxyCall.thisValue = thisValue;
+            proxyCall.args = args;
+            proxyCall.returnValue = returnValue;
+            proxyCall.exception = exception;
+            proxyCall.callId = id;
+            proxyCall.stack = stack;
+
+            return proxyCall;
+        }
+        createSpyCall.toString = callProto.toString; // used by mocks
+
+        sinon.spyCall = createSpyCall;
+        return createSpyCall;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./match");
+        require("./format");
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+  * @depend times_in_words.js
+  * @depend util/core.js
+  * @depend extend.js
+  * @depend call.js
+  * @depend format.js
+  */
+/**
+  * Spy functions
+  *
+  * @author Christian Johansen (christian at cjohansen.no)
+  * @license BSD
+  *
+  * Copyright (c) 2010-2013 Christian Johansen
+  */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        var push = Array.prototype.push;
+        var slice = Array.prototype.slice;
+        var callId = 0;
+
+        function spy(object, property, types) {
+            if (!property && typeof object === "function") {
+                return spy.create(object);
+            }
+
+            if (!object && !property) {
+                return spy.create(function () { });
+            }
+
+            if (types) {
+                var methodDesc = sinon.getPropertyDescriptor(object, property);
+                for (var i = 0; i < types.length; i++) {
+                    methodDesc[types[i]] = spy.create(methodDesc[types[i]]);
+                }
+                return sinon.wrapMethod(object, property, methodDesc);
+            }
+
+            return sinon.wrapMethod(object, property, spy.create(object[property]));
+        }
+
+        function matchingFake(fakes, args, strict) {
+            if (!fakes) {
+                return undefined;
+            }
+
+            for (var i = 0, l = fakes.length; i < l; i++) {
+                if (fakes[i].matches(args, strict)) {
+                    return fakes[i];
+                }
+            }
+        }
+
+        function incrementCallCount() {
+            this.called = true;
+            this.callCount += 1;
+            this.notCalled = false;
+            this.calledOnce = this.callCount === 1;
+            this.calledTwice = this.callCount === 2;
+            this.calledThrice = this.callCount === 3;
+        }
+
+        function createCallProperties() {
+            this.firstCall = this.getCall(0);
+            this.secondCall = this.getCall(1);
+            this.thirdCall = this.getCall(2);
+            this.lastCall = this.getCall(this.callCount - 1);
+        }
+
+        var vars = "a,b,c,d,e,f,g,h,i,j,k,l";
+        function createProxy(func, proxyLength) {
+            // Retain the function length:
+            var p;
+            if (proxyLength) {
+                eval("p = (function proxy(" + vars.substring(0, proxyLength * 2 - 1) + // eslint-disable-line no-eval
+                    ") { return p.invoke(func, this, slice.call(arguments)); });");
+            } else {
+                p = function proxy() {
+                    return p.invoke(func, this, slice.call(arguments));
+                };
+            }
+            p.isSinonProxy = true;
+            return p;
+        }
+
+        var uuid = 0;
+
+        // Public API
+        var spyApi = {
+            reset: function () {
+                if (this.invoking) {
+                    var err = new Error("Cannot reset Sinon function while invoking it. " +
+                                        "Move the call to .reset outside of the callback.");
+                    err.name = "InvalidResetException";
+                    throw err;
+                }
+
+                this.called = false;
+                this.notCalled = true;
+                this.calledOnce = false;
+                this.calledTwice = false;
+                this.calledThrice = false;
+                this.callCount = 0;
+                this.firstCall = null;
+                this.secondCall = null;
+                this.thirdCall = null;
+                this.lastCall = null;
+                this.args = [];
+                this.returnValues = [];
+                this.thisValues = [];
+                this.exceptions = [];
+                this.callIds = [];
+                this.stacks = [];
+                if (this.fakes) {
+                    for (var i = 0; i < this.fakes.length; i++) {
+                        this.fakes[i].reset();
+                    }
+                }
+
+                return this;
+            },
+
+            create: function create(func, spyLength) {
+                var name;
+
+                if (typeof func !== "function") {
+                    func = function () { };
+                } else {
+                    name = sinon.functionName(func);
+                }
+
+                if (!spyLength) {
+                    spyLength = func.length;
+                }
+
+                var proxy = createProxy(func, spyLength);
+
+                sinon.extend(proxy, spy);
+                delete proxy.create;
+                sinon.extend(proxy, func);
+
+                proxy.reset();
+                proxy.prototype = func.prototype;
+                proxy.displayName = name || "spy";
+                proxy.toString = sinon.functionToString;
+                proxy.instantiateFake = sinon.spy.create;
+                proxy.id = "spy#" + uuid++;
+
+                return proxy;
+            },
+
+            invoke: function invoke(func, thisValue, args) {
+                var matching = matchingFake(this.fakes, args);
+                var exception, returnValue;
+
+                incrementCallCount.call(this);
+                push.call(this.thisValues, thisValue);
+                push.call(this.args, args);
+                push.call(this.callIds, callId++);
+
+                // Make call properties available from within the spied function:
+                createCallProperties.call(this);
+
+                try {
+                    this.invoking = true;
+
+                    if (matching) {
+                        returnValue = matching.invoke(func, thisValue, args);
+                    } else {
+                        returnValue = (this.func || func).apply(thisValue, args);
+                    }
+
+                    var thisCall = this.getCall(this.callCount - 1);
+                    if (thisCall.calledWithNew() && typeof returnValue !== "object") {
+                        returnValue = thisValue;
+                    }
+                } catch (e) {
+                    exception = e;
+                } finally {
+                    delete this.invoking;
+                }
+
+                push.call(this.exceptions, exception);
+                push.call(this.returnValues, returnValue);
+                push.call(this.stacks, new Error().stack);
+
+                // Make return value and exception available in the calls:
+                createCallProperties.call(this);
+
+                if (exception !== undefined) {
+                    throw exception;
+                }
+
+                return returnValue;
+            },
+
+            named: function named(name) {
+                this.displayName = name;
+                return this;
+            },
+
+            getCall: function getCall(i) {
+                if (i < 0 || i >= this.callCount) {
+                    return null;
+                }
+
+                return sinon.spyCall(this, this.thisValues[i], this.args[i],
+                                        this.returnValues[i], this.exceptions[i],
+                                        this.callIds[i], this.stacks[i]);
+            },
+
+            getCalls: function () {
+                var calls = [];
+                var i;
+
+                for (i = 0; i < this.callCount; i++) {
+                    calls.push(this.getCall(i));
+                }
+
+                return calls;
+            },
+
+            calledBefore: function calledBefore(spyFn) {
+                if (!this.called) {
+                    return false;
+                }
+
+                if (!spyFn.called) {
+                    return true;
+                }
+
+                return this.callIds[0] < spyFn.callIds[spyFn.callIds.length - 1];
+            },
+
+            calledAfter: function calledAfter(spyFn) {
+                if (!this.called || !spyFn.called) {
+                    return false;
+                }
+
+                return this.callIds[this.callCount - 1] > spyFn.callIds[spyFn.callCount - 1];
+            },
+
+            withArgs: function () {
+                var args = slice.call(arguments);
+
+                if (this.fakes) {
+                    var match = matchingFake(this.fakes, args, true);
+
+                    if (match) {
+                        return match;
+                    }
+                } else {
+                    this.fakes = [];
+                }
+
+                var original = this;
+                var fake = this.instantiateFake();
+                fake.matchingAguments = args;
+                fake.parent = this;
+                push.call(this.fakes, fake);
+
+                fake.withArgs = function () {
+                    return original.withArgs.apply(original, arguments);
+                };
+
+                for (var i = 0; i < this.args.length; i++) {
+                    if (fake.matches(this.args[i])) {
+                        incrementCallCount.call(fake);
+                        push.call(fake.thisValues, this.thisValues[i]);
+                        push.call(fake.args, this.args[i]);
+                        push.call(fake.returnValues, this.returnValues[i]);
+                        push.call(fake.exceptions, this.exceptions[i]);
+                        push.call(fake.callIds, this.callIds[i]);
+                    }
+                }
+                createCallProperties.call(fake);
+
+                return fake;
+            },
+
+            matches: function (args, strict) {
+                var margs = this.matchingAguments;
+
+                if (margs.length <= args.length &&
+                    sinon.deepEqual(margs, args.slice(0, margs.length))) {
+                    return !strict || margs.length === args.length;
+                }
+            },
+
+            printf: function (format) {
+                var spyInstance = this;
+                var args = slice.call(arguments, 1);
+                var formatter;
+
+                return (format || "").replace(/%(.)/g, function (match, specifyer) {
+                    formatter = spyApi.formatters[specifyer];
+
+                    if (typeof formatter === "function") {
+                        return formatter.call(null, spyInstance, args);
+                    } else if (!isNaN(parseInt(specifyer, 10))) {
+                        return sinon.format(args[specifyer - 1]);
+                    }
+
+                    return "%" + specifyer;
+                });
+            }
+        };
+
+        function delegateToCalls(method, matchAny, actual, notCalled) {
+            spyApi[method] = function () {
+                if (!this.called) {
+                    if (notCalled) {
+                        return notCalled.apply(this, arguments);
+                    }
+                    return false;
+                }
+
+                var currentCall;
+                var matches = 0;
+
+                for (var i = 0, l = this.callCount; i < l; i += 1) {
+                    currentCall = this.getCall(i);
+
+                    if (currentCall[actual || method].apply(currentCall, arguments)) {
+                        matches += 1;
+
+                        if (matchAny) {
+                            return true;
+                        }
+                    }
+                }
+
+                return matches === this.callCount;
+            };
+        }
+
+        delegateToCalls("calledOn", true);
+        delegateToCalls("alwaysCalledOn", false, "calledOn");
+        delegateToCalls("calledWith", true);
+        delegateToCalls("calledWithMatch", true);
+        delegateToCalls("alwaysCalledWith", false, "calledWith");
+        delegateToCalls("alwaysCalledWithMatch", false, "calledWithMatch");
+        delegateToCalls("calledWithExactly", true);
+        delegateToCalls("alwaysCalledWithExactly", false, "calledWithExactly");
+        delegateToCalls("neverCalledWith", false, "notCalledWith", function () {
+            return true;
+        });
+        delegateToCalls("neverCalledWithMatch", false, "notCalledWithMatch", function () {
+            return true;
+        });
+        delegateToCalls("threw", true);
+        delegateToCalls("alwaysThrew", false, "threw");
+        delegateToCalls("returned", true);
+        delegateToCalls("alwaysReturned", false, "returned");
+        delegateToCalls("calledWithNew", true);
+        delegateToCalls("alwaysCalledWithNew", false, "calledWithNew");
+        delegateToCalls("callArg", false, "callArgWith", function () {
+            throw new Error(this.toString() + " cannot call arg since it was not yet invoked.");
+        });
+        spyApi.callArgWith = spyApi.callArg;
+        delegateToCalls("callArgOn", false, "callArgOnWith", function () {
+            throw new Error(this.toString() + " cannot call arg since it was not yet invoked.");
+        });
+        spyApi.callArgOnWith = spyApi.callArgOn;
+        delegateToCalls("yield", false, "yield", function () {
+            throw new Error(this.toString() + " cannot yield since it was not yet invoked.");
+        });
+        // "invokeCallback" is an alias for "yield" since "yield" is invalid in strict mode.
+        spyApi.invokeCallback = spyApi.yield;
+        delegateToCalls("yieldOn", false, "yieldOn", function () {
+            throw new Error(this.toString() + " cannot yield since it was not yet invoked.");
+        });
+        delegateToCalls("yieldTo", false, "yieldTo", function (property) {
+            throw new Error(this.toString() + " cannot yield to '" + property +
+                "' since it was not yet invoked.");
+        });
+        delegateToCalls("yieldToOn", false, "yieldToOn", function (property) {
+            throw new Error(this.toString() + " cannot yield to '" + property +
+                "' since it was not yet invoked.");
+        });
+
+        spyApi.formatters = {
+            c: function (spyInstance) {
+                return sinon.timesInWords(spyInstance.callCount);
+            },
+
+            n: function (spyInstance) {
+                return spyInstance.toString();
+            },
+
+            C: function (spyInstance) {
+                var calls = [];
+
+                for (var i = 0, l = spyInstance.callCount; i < l; ++i) {
+                    var stringifiedCall = "    " + spyInstance.getCall(i).toString();
+                    if (/\n/.test(calls[i - 1])) {
+                        stringifiedCall = "\n" + stringifiedCall;
+                    }
+                    push.call(calls, stringifiedCall);
+                }
+
+                return calls.length > 0 ? "\n" + calls.join("\n") : "";
+            },
+
+            t: function (spyInstance) {
+                var objects = [];
+
+                for (var i = 0, l = spyInstance.callCount; i < l; ++i) {
+                    push.call(objects, sinon.format(spyInstance.thisValues[i]));
+                }
+
+                return objects.join(", ");
+            },
+
+            "*": function (spyInstance, args) {
+                var formatted = [];
+
+                for (var i = 0, l = args.length; i < l; ++i) {
+                    push.call(formatted, sinon.format(args[i]));
+                }
+
+                return formatted.join(", ");
+            }
+        };
+
+        sinon.extend(spy, spyApi);
+
+        spy.spyCall = sinon.spyCall;
+        sinon.spy = spy;
+
+        return spy;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var core = require("./util/core");
+        require("./call");
+        require("./extend");
+        require("./times_in_words");
+        require("./format");
+        module.exports = makeApi(core);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ * @depend extend.js
+ */
+/**
+ * Stub behavior
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @author Tim Fischbach (mail at timfischbach.de)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    var slice = Array.prototype.slice;
+    var join = Array.prototype.join;
+    var useLeftMostCallback = -1;
+    var useRightMostCallback = -2;
+
+    var nextTick = (function () {
+        if (typeof process === "object" && typeof process.nextTick === "function") {
+            return process.nextTick;
+        }
+
+        if (typeof setImmediate === "function") {
+            return setImmediate;
+        }
+
+        return function (callback) {
+            setTimeout(callback, 0);
+        };
+    })();
+
+    function throwsException(error, message) {
+        if (typeof error === "string") {
+            this.exception = new Error(message || "");
+            this.exception.name = error;
+        } else if (!error) {
+            this.exception = new Error("Error");
+        } else {
+            this.exception = error;
+        }
+
+        return this;
+    }
+
+    function getCallback(behavior, args) {
+        var callArgAt = behavior.callArgAt;
+
+        if (callArgAt >= 0) {
+            return args[callArgAt];
+        }
+
+        var argumentList;
+
+        if (callArgAt === useLeftMostCallback) {
+            argumentList = args;
+        }
+
+        if (callArgAt === useRightMostCallback) {
+            argumentList = slice.call(args).reverse();
+        }
+
+        var callArgProp = behavior.callArgProp;
+
+        for (var i = 0, l = argumentList.length; i < l; ++i) {
+            if (!callArgProp && typeof argumentList[i] === "function") {
+                return argumentList[i];
+            }
+
+            if (callArgProp && argumentList[i] &&
+                typeof argumentList[i][callArgProp] === "function") {
+                return argumentList[i][callArgProp];
+            }
+        }
+
+        return null;
+    }
+
+    function makeApi(sinon) {
+        function getCallbackError(behavior, func, args) {
+            if (behavior.callArgAt < 0) {
+                var msg;
+
+                if (behavior.callArgProp) {
+                    msg = sinon.functionName(behavior.stub) +
+                        " expected to yield to '" + behavior.callArgProp +
+                        "', but no object with such a property was passed.";
+                } else {
+                    msg = sinon.functionName(behavior.stub) +
+                        " expected to yield, but no callback was passed.";
+                }
+
+                if (args.length > 0) {
+                    msg += " Received [" + join.call(args, ", ") + "]";
+                }
+
+                return msg;
+            }
+
+            return "argument at index " + behavior.callArgAt + " is not a function: " + func;
+        }
+
+        function callCallback(behavior, args) {
+            if (typeof behavior.callArgAt === "number") {
+                var func = getCallback(behavior, args);
+
+                if (typeof func !== "function") {
+                    throw new TypeError(getCallbackError(behavior, func, args));
+                }
+
+                if (behavior.callbackAsync) {
+                    nextTick(function () {
+                        func.apply(behavior.callbackContext, behavior.callbackArguments);
+                    });
+                } else {
+                    func.apply(behavior.callbackContext, behavior.callbackArguments);
+                }
+            }
+        }
+
+        var proto = {
+            create: function create(stub) {
+                var behavior = sinon.extend({}, sinon.behavior);
+                delete behavior.create;
+                behavior.stub = stub;
+
+                return behavior;
+            },
+
+            isPresent: function isPresent() {
+                return (typeof this.callArgAt === "number" ||
+                        this.exception ||
+                        typeof this.returnArgAt === "number" ||
+                        this.returnThis ||
+                        this.returnValueDefined);
+            },
+
+            invoke: function invoke(context, args) {
+                callCallback(this, args);
+
+                if (this.exception) {
+                    throw this.exception;
+                } else if (typeof this.returnArgAt === "number") {
+                    return args[this.returnArgAt];
+                } else if (this.returnThis) {
+                    return context;
+                }
+
+                return this.returnValue;
+            },
+
+            onCall: function onCall(index) {
+                return this.stub.onCall(index);
+            },
+
+            onFirstCall: function onFirstCall() {
+                return this.stub.onFirstCall();
+            },
+
+            onSecondCall: function onSecondCall() {
+                return this.stub.onSecondCall();
+            },
+
+            onThirdCall: function onThirdCall() {
+                return this.stub.onThirdCall();
+            },
+
+            withArgs: function withArgs(/* arguments */) {
+                throw new Error(
+                    "Defining a stub by invoking \"stub.onCall(...).withArgs(...)\" " +
+                    "is not supported. Use \"stub.withArgs(...).onCall(...)\" " +
+                    "to define sequential behavior for calls with certain arguments."
+                );
+            },
+
+            callsArg: function callsArg(pos) {
+                if (typeof pos !== "number") {
+                    throw new TypeError("argument index is not number");
+                }
+
+                this.callArgAt = pos;
+                this.callbackArguments = [];
+                this.callbackContext = undefined;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            callsArgOn: function callsArgOn(pos, context) {
+                if (typeof pos !== "number") {
+                    throw new TypeError("argument index is not number");
+                }
+                if (typeof context !== "object") {
+                    throw new TypeError("argument context is not an object");
+                }
+
+                this.callArgAt = pos;
+                this.callbackArguments = [];
+                this.callbackContext = context;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            callsArgWith: function callsArgWith(pos) {
+                if (typeof pos !== "number") {
+                    throw new TypeError("argument index is not number");
+                }
+
+                this.callArgAt = pos;
+                this.callbackArguments = slice.call(arguments, 1);
+                this.callbackContext = undefined;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            callsArgOnWith: function callsArgWith(pos, context) {
+                if (typeof pos !== "number") {
+                    throw new TypeError("argument index is not number");
+                }
+                if (typeof context !== "object") {
+                    throw new TypeError("argument context is not an object");
+                }
+
+                this.callArgAt = pos;
+                this.callbackArguments = slice.call(arguments, 2);
+                this.callbackContext = context;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            yields: function () {
+                this.callArgAt = useLeftMostCallback;
+                this.callbackArguments = slice.call(arguments, 0);
+                this.callbackContext = undefined;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            yieldsRight: function () {
+                this.callArgAt = useRightMostCallback;
+                this.callbackArguments = slice.call(arguments, 0);
+                this.callbackContext = undefined;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            yieldsOn: function (context) {
+                if (typeof context !== "object") {
+                    throw new TypeError("argument context is not an object");
+                }
+
+                this.callArgAt = useLeftMostCallback;
+                this.callbackArguments = slice.call(arguments, 1);
+                this.callbackContext = context;
+                this.callArgProp = undefined;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            yieldsTo: function (prop) {
+                this.callArgAt = useLeftMostCallback;
+                this.callbackArguments = slice.call(arguments, 1);
+                this.callbackContext = undefined;
+                this.callArgProp = prop;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            yieldsToOn: function (prop, context) {
+                if (typeof context !== "object") {
+                    throw new TypeError("argument context is not an object");
+                }
+
+                this.callArgAt = useLeftMostCallback;
+                this.callbackArguments = slice.call(arguments, 2);
+                this.callbackContext = context;
+                this.callArgProp = prop;
+                this.callbackAsync = false;
+
+                return this;
+            },
+
+            throws: throwsException,
+            throwsException: throwsException,
+
+            returns: function returns(value) {
+                this.returnValue = value;
+                this.returnValueDefined = true;
+                this.exception = undefined;
+
+                return this;
+            },
+
+            returnsArg: function returnsArg(pos) {
+                if (typeof pos !== "number") {
+                    throw new TypeError("argument index is not number");
+                }
+
+                this.returnArgAt = pos;
+
+                return this;
+            },
+
+            returnsThis: function returnsThis() {
+                this.returnThis = true;
+
+                return this;
+            }
+        };
+
+        function createAsyncVersion(syncFnName) {
+            return function () {
+                var result = this[syncFnName].apply(this, arguments);
+                this.callbackAsync = true;
+                return result;
+            };
+        }
+
+        // create asynchronous versions of callsArg* and yields* methods
+        for (var method in proto) {
+            // need to avoid creating anotherasync versions of the newly added async methods
+            if (proto.hasOwnProperty(method) && method.match(/^(callsArg|yields)/) && !method.match(/Async/)) {
+                proto[method + "Async"] = createAsyncVersion(method);
+            }
+        }
+
+        sinon.behavior = proto;
+        return proto;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./extend");
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        function walkInternal(obj, iterator, context, originalObj, seen) {
+            var proto, prop;
+
+            if (typeof Object.getOwnPropertyNames !== "function") {
+                // We explicitly want to enumerate through all of the prototype's properties
+                // in this case, therefore we deliberately leave out an own property check.
+                /* eslint-disable guard-for-in */
+                for (prop in obj) {
+                    iterator.call(context, obj[prop], prop, obj);
+                }
+                /* eslint-enable guard-for-in */
+
+                return;
+            }
+
+            Object.getOwnPropertyNames(obj).forEach(function (k) {
+                if (!seen[k]) {
+                    seen[k] = true;
+                    var target = typeof Object.getOwnPropertyDescriptor(obj, k).get === "function" ?
+                        originalObj : obj;
+                    iterator.call(context, target[k], k, target);
+                }
+            });
+
+            proto = Object.getPrototypeOf(obj);
+            if (proto) {
+                walkInternal(proto, iterator, context, originalObj, seen);
+            }
+        }
+
+        /* Public: walks the prototype chain of an object and iterates over every own property
+         * name encountered. The iterator is called in the same fashion that Array.prototype.forEach
+         * works, where it is passed the value, key, and own object as the 1st, 2nd, and 3rd positional
+         * argument, respectively. In cases where Object.getOwnPropertyNames is not available, walk will
+         * default to using a simple for..in loop.
+         *
+         * obj - The object to walk the prototype chain for.
+         * iterator - The function to be called on each pass of the walk.
+         * context - (Optional) When given, the iterator will be called with this object as the receiver.
+         */
+        function walk(obj, iterator, context) {
+            return walkInternal(obj, iterator, context, obj, {});
+        }
+
+        sinon.walk = walk;
+        return sinon.walk;
+    }
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        module.exports = makeApi(sinon);
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ * @depend extend.js
+ * @depend spy.js
+ * @depend behavior.js
+ * @depend walk.js
+ */
+/**
+ * Stub functions
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        function stub(object, property, func) {
+            if (!!func && typeof func !== "function" && typeof func !== "object") {
+                throw new TypeError("Custom stub should be a function or a property descriptor");
+            }
+
+            var wrapper;
+
+            if (func) {
+                if (typeof func === "function") {
+                    wrapper = sinon.spy && sinon.spy.create ? sinon.spy.create(func) : func;
+                } else {
+                    wrapper = func;
+                    if (sinon.spy && sinon.spy.create) {
+                        var types = sinon.objectKeys(wrapper);
+                        for (var i = 0; i < types.length; i++) {
+                            wrapper[types[i]] = sinon.spy.create(wrapper[types[i]]);
+                        }
+                    }
+                }
+            } else {
+                var stubLength = 0;
+                if (typeof object === "object" && typeof object[property] === "function") {
+                    stubLength = object[property].length;
+                }
+                wrapper = stub.create(stubLength);
+            }
+
+            if (!object && typeof property === "undefined") {
+                return sinon.stub.create();
+            }
+
+            if (typeof property === "undefined" && typeof object === "object") {
+                sinon.walk(object || {}, function (value, prop, propOwner) {
+                    // we don't want to stub things like toString(), valueOf(), etc. so we only stub if the object
+                    // is not Object.prototype
+                    if (
+                        propOwner !== Object.prototype &&
+                        prop !== "constructor" &&
+                        typeof sinon.getPropertyDescriptor(propOwner, prop).value === "function"
+                    ) {
+                        stub(object, prop);
+                    }
+                });
+
+                return object;
+            }
+
+            return sinon.wrapMethod(object, property, wrapper);
+        }
+
+
+        /*eslint-disable no-use-before-define*/
+        function getParentBehaviour(stubInstance) {
+            return (stubInstance.parent && getCurrentBehavior(stubInstance.parent));
+        }
+
+        function getDefaultBehavior(stubInstance) {
+            return stubInstance.defaultBehavior ||
+                    getParentBehaviour(stubInstance) ||
+                    sinon.behavior.create(stubInstance);
+        }
+
+        function getCurrentBehavior(stubInstance) {
+            var behavior = stubInstance.behaviors[stubInstance.callCount - 1];
+            return behavior && behavior.isPresent() ? behavior : getDefaultBehavior(stubInstance);
+        }
+        /*eslint-enable no-use-before-define*/
+
+        var uuid = 0;
+
+        var proto = {
+            create: function create(stubLength) {
+                var functionStub = function () {
+                    return getCurrentBehavior(functionStub).invoke(this, arguments);
+                };
+
+                functionStub.id = "stub#" + uuid++;
+                var orig = functionStub;
+                functionStub = sinon.spy.create(functionStub, stubLength);
+                functionStub.func = orig;
+
+                sinon.extend(functionStub, stub);
+                functionStub.instantiateFake = sinon.stub.create;
+                functionStub.displayName = "stub";
+                functionStub.toString = sinon.functionToString;
+
+                functionStub.defaultBehavior = null;
+                functionStub.behaviors = [];
+
+                return functionStub;
+            },
+
+            resetBehavior: function () {
+                var i;
+
+                this.defaultBehavior = null;
+                this.behaviors = [];
+
+                delete this.returnValue;
+                delete this.returnArgAt;
+                this.returnThis = false;
+
+                if (this.fakes) {
+                    for (i = 0; i < this.fakes.length; i++) {
+                        this.fakes[i].resetBehavior();
+                    }
+                }
+            },
+
+            onCall: function onCall(index) {
+                if (!this.behaviors[index]) {
+                    this.behaviors[index] = sinon.behavior.create(this);
+                }
+
+                return this.behaviors[index];
+            },
+
+            onFirstCall: function onFirstCall() {
+                return this.onCall(0);
+            },
+
+            onSecondCall: function onSecondCall() {
+                return this.onCall(1);
+            },
+
+            onThirdCall: function onThirdCall() {
+                return this.onCall(2);
+            }
+        };
+
+        function createBehavior(behaviorMethod) {
+            return function () {
+                this.defaultBehavior = this.defaultBehavior || sinon.behavior.create(this);
+                this.defaultBehavior[behaviorMethod].apply(this.defaultBehavior, arguments);
+                return this;
+            };
+        }
+
+        for (var method in sinon.behavior) {
+            if (sinon.behavior.hasOwnProperty(method) &&
+                !proto.hasOwnProperty(method) &&
+                method !== "create" &&
+                method !== "withArgs" &&
+                method !== "invoke") {
+                proto[method] = createBehavior(method);
+            }
+        }
+
+        sinon.extend(stub, proto);
+        sinon.stub = stub;
+
+        return stub;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var core = require("./util/core");
+        require("./behavior");
+        require("./spy");
+        require("./extend");
+        module.exports = makeApi(core);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend times_in_words.js
+ * @depend util/core.js
+ * @depend call.js
+ * @depend extend.js
+ * @depend match.js
+ * @depend spy.js
+ * @depend stub.js
+ * @depend format.js
+ */
+/**
+ * Mock functions.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        var push = [].push;
+        var match = sinon.match;
+
+        function mock(object) {
+            // if (typeof console !== undefined && console.warn) {
+            //     console.warn("mock will be removed from Sinon.JS v2.0");
+            // }
+
+            if (!object) {
+                return sinon.expectation.create("Anonymous mock");
+            }
+
+            return mock.create(object);
+        }
+
+        function each(collection, callback) {
+            if (!collection) {
+                return;
+            }
+
+            for (var i = 0, l = collection.length; i < l; i += 1) {
+                callback(collection[i]);
+            }
+        }
+
+        function arrayEquals(arr1, arr2, compareLength) {
+            if (compareLength && (arr1.length !== arr2.length)) {
+                return false;
+            }
+
+            for (var i = 0, l = arr1.length; i < l; i++) {
+                if (!sinon.deepEqual(arr1[i], arr2[i])) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        sinon.extend(mock, {
+            create: function create(object) {
+                if (!object) {
+                    throw new TypeError("object is null");
+                }
+
+                var mockObject = sinon.extend({}, mock);
+                mockObject.object = object;
+                delete mockObject.create;
+
+                return mockObject;
+            },
+
+            expects: function expects(method) {
+                if (!method) {
+                    throw new TypeError("method is falsy");
+                }
+
+                if (!this.expectations) {
+                    this.expectations = {};
+                    this.proxies = [];
+                }
+
+                if (!this.expectations[method]) {
+                    this.expectations[method] = [];
+                    var mockObject = this;
+
+                    sinon.wrapMethod(this.object, method, function () {
+                        return mockObject.invokeMethod(method, this, arguments);
+                    });
+
+                    push.call(this.proxies, method);
+                }
+
+                var expectation = sinon.expectation.create(method);
+                push.call(this.expectations[method], expectation);
+
+                return expectation;
+            },
+
+            restore: function restore() {
+                var object = this.object;
+
+                each(this.proxies, function (proxy) {
+                    if (typeof object[proxy].restore === "function") {
+                        object[proxy].restore();
+                    }
+                });
+            },
+
+            verify: function verify() {
+                var expectations = this.expectations || {};
+                var messages = [];
+                var met = [];
+
+                each(this.proxies, function (proxy) {
+                    each(expectations[proxy], function (expectation) {
+                        if (!expectation.met()) {
+                            push.call(messages, expectation.toString());
+                        } else {
+                            push.call(met, expectation.toString());
+                        }
+                    });
+                });
+
+                this.restore();
+
+                if (messages.length > 0) {
+                    sinon.expectation.fail(messages.concat(met).join("\n"));
+                } else if (met.length > 0) {
+                    sinon.expectation.pass(messages.concat(met).join("\n"));
+                }
+
+                return true;
+            },
+
+            invokeMethod: function invokeMethod(method, thisValue, args) {
+                var expectations = this.expectations && this.expectations[method] ? this.expectations[method] : [];
+                var expectationsWithMatchingArgs = [];
+                var currentArgs = args || [];
+                var i, available;
+
+                for (i = 0; i < expectations.length; i += 1) {
+                    var expectedArgs = expectations[i].expectedArguments || [];
+                    if (arrayEquals(expectedArgs, currentArgs, expectations[i].expectsExactArgCount)) {
+                        expectationsWithMatchingArgs.push(expectations[i]);
+                    }
+                }
+
+                for (i = 0; i < expectationsWithMatchingArgs.length; i += 1) {
+                    if (!expectationsWithMatchingArgs[i].met() &&
+                        expectationsWithMatchingArgs[i].allowsCall(thisValue, args)) {
+                        return expectationsWithMatchingArgs[i].apply(thisValue, args);
+                    }
+                }
+
+                var messages = [];
+                var exhausted = 0;
+
+                for (i = 0; i < expectationsWithMatchingArgs.length; i += 1) {
+                    if (expectationsWithMatchingArgs[i].allowsCall(thisValue, args)) {
+                        available = available || expectationsWithMatchingArgs[i];
+                    } else {
+                        exhausted += 1;
+                    }
+                }
+
+                if (available && exhausted === 0) {
+                    return available.apply(thisValue, args);
+                }
+
+                for (i = 0; i < expectations.length; i += 1) {
+                    push.call(messages, "    " + expectations[i].toString());
+                }
+
+                messages.unshift("Unexpected call: " + sinon.spyCall.toString.call({
+                    proxy: method,
+                    args: args
+                }));
+
+                sinon.expectation.fail(messages.join("\n"));
+            }
+        });
+
+        var times = sinon.timesInWords;
+        var slice = Array.prototype.slice;
+
+        function callCountInWords(callCount) {
+            if (callCount === 0) {
+                return "never called";
+            }
+
+            return "called " + times(callCount);
+        }
+
+        function expectedCallCountInWords(expectation) {
+            var min = expectation.minCalls;
+            var max = expectation.maxCalls;
+
+            if (typeof min === "number" && typeof max === "number") {
+                var str = times(min);
+
+                if (min !== max) {
+                    str = "at least " + str + " and at most " + times(max);
+                }
+
+                return str;
+            }
+
+            if (typeof min === "number") {
+                return "at least " + times(min);
+            }
+
+            return "at most " + times(max);
+        }
+
+        function receivedMinCalls(expectation) {
+            var hasMinLimit = typeof expectation.minCalls === "number";
+            return !hasMinLimit || expectation.callCount >= expectation.minCalls;
+        }
+
+        function receivedMaxCalls(expectation) {
+            if (typeof expectation.maxCalls !== "number") {
+                return false;
+            }
+
+            return expectation.callCount === expectation.maxCalls;
+        }
+
+        function verifyMatcher(possibleMatcher, arg) {
+            var isMatcher = match && match.isMatcher(possibleMatcher);
+
+            return isMatcher && possibleMatcher.test(arg) || true;
+        }
+
+        sinon.expectation = {
+            minCalls: 1,
+            maxCalls: 1,
+
+            create: function create(methodName) {
+                var expectation = sinon.extend(sinon.stub.create(), sinon.expectation);
+                delete expectation.create;
+                expectation.method = methodName;
+
+                return expectation;
+            },
+
+            invoke: function invoke(func, thisValue, args) {
+                this.verifyCallAllowed(thisValue, args);
+
+                return sinon.spy.invoke.apply(this, arguments);
+            },
+
+            atLeast: function atLeast(num) {
+                if (typeof num !== "number") {
+                    throw new TypeError("'" + num + "' is not number");
+                }
+
+                if (!this.limitsSet) {
+                    this.maxCalls = null;
+                    this.limitsSet = true;
+                }
+
+                this.minCalls = num;
+
+                return this;
+            },
+
+            atMost: function atMost(num) {
+                if (typeof num !== "number") {
+                    throw new TypeError("'" + num + "' is not number");
+                }
+
+                if (!this.limitsSet) {
+                    this.minCalls = null;
+                    this.limitsSet = true;
+                }
+
+                this.maxCalls = num;
+
+                return this;
+            },
+
+            never: function never() {
+                return this.exactly(0);
+            },
+
+            once: function once() {
+                return this.exactly(1);
+            },
+
+            twice: function twice() {
+                return this.exactly(2);
+            },
+
+            thrice: function thrice() {
+                return this.exactly(3);
+            },
+
+            exactly: function exactly(num) {
+                if (typeof num !== "number") {
+                    throw new TypeError("'" + num + "' is not a number");
+                }
+
+                this.atLeast(num);
+                return this.atMost(num);
+            },
+
+            met: function met() {
+                return !this.failed && receivedMinCalls(this);
+            },
+
+            verifyCallAllowed: function verifyCallAllowed(thisValue, args) {
+                if (receivedMaxCalls(this)) {
+                    this.failed = true;
+                    sinon.expectation.fail(this.method + " already called " + times(this.maxCalls));
+                }
+
+                if ("expectedThis" in this && this.expectedThis !== thisValue) {
+                    sinon.expectation.fail(this.method + " called with " + thisValue + " as thisValue, expected " +
+                        this.expectedThis);
+                }
+
+                if (!("expectedArguments" in this)) {
+                    return;
+                }
+
+                if (!args) {
+                    sinon.expectation.fail(this.method + " received no arguments, expected " +
+                        sinon.format(this.expectedArguments));
+                }
+
+                if (args.length < this.expectedArguments.length) {
+                    sinon.expectation.fail(this.method + " received too few arguments (" + sinon.format(args) +
+                        "), expected " + sinon.format(this.expectedArguments));
+                }
+
+                if (this.expectsExactArgCount &&
+                    args.length !== this.expectedArguments.length) {
+                    sinon.expectation.fail(this.method + " received too many arguments (" + sinon.format(args) +
+                        "), expected " + sinon.format(this.expectedArguments));
+                }
+
+                for (var i = 0, l = this.expectedArguments.length; i < l; i += 1) {
+
+                    if (!verifyMatcher(this.expectedArguments[i], args[i])) {
+                        sinon.expectation.fail(this.method + " received wrong arguments " + sinon.format(args) +
+                            ", didn't match " + this.expectedArguments.toString());
+                    }
+
+                    if (!sinon.deepEqual(this.expectedArguments[i], args[i])) {
+                        sinon.expectation.fail(this.method + " received wrong arguments " + sinon.format(args) +
+                            ", expected " + sinon.format(this.expectedArguments));
+                    }
+                }
+            },
+
+            allowsCall: function allowsCall(thisValue, args) {
+                if (this.met() && receivedMaxCalls(this)) {
+                    return false;
+                }
+
+                if ("expectedThis" in this && this.expectedThis !== thisValue) {
+                    return false;
+                }
+
+                if (!("expectedArguments" in this)) {
+                    return true;
+                }
+
+                args = args || [];
+
+                if (args.length < this.expectedArguments.length) {
+                    return false;
+                }
+
+                if (this.expectsExactArgCount &&
+                    args.length !== this.expectedArguments.length) {
+                    return false;
+                }
+
+                for (var i = 0, l = this.expectedArguments.length; i < l; i += 1) {
+                    if (!verifyMatcher(this.expectedArguments[i], args[i])) {
+                        return false;
+                    }
+
+                    if (!sinon.deepEqual(this.expectedArguments[i], args[i])) {
+                        return false;
+                    }
+                }
+
+                return true;
+            },
+
+            withArgs: function withArgs() {
+                this.expectedArguments = slice.call(arguments);
+                return this;
+            },
+
+            withExactArgs: function withExactArgs() {
+                this.withArgs.apply(this, arguments);
+                this.expectsExactArgCount = true;
+                return this;
+            },
+
+            on: function on(thisValue) {
+                this.expectedThis = thisValue;
+                return this;
+            },
+
+            toString: function () {
+                var args = (this.expectedArguments || []).slice();
+
+                if (!this.expectsExactArgCount) {
+                    push.call(args, "[...]");
+                }
+
+                var callStr = sinon.spyCall.toString.call({
+                    proxy: this.method || "anonymous mock expectation",
+                    args: args
+                });
+
+                var message = callStr.replace(", [...", "[, ...") + " " +
+                    expectedCallCountInWords(this);
+
+                if (this.met()) {
+                    return "Expectation met: " + message;
+                }
+
+                return "Expected " + message + " (" +
+                    callCountInWords(this.callCount) + ")";
+            },
+
+            verify: function verify() {
+                if (!this.met()) {
+                    sinon.expectation.fail(this.toString());
+                } else {
+                    sinon.expectation.pass(this.toString());
+                }
+
+                return true;
+            },
+
+            pass: function pass(message) {
+                sinon.assert.pass(message);
+            },
+
+            fail: function fail(message) {
+                var exception = new Error(message);
+                exception.name = "ExpectationError";
+
+                throw exception;
+            }
+        };
+
+        sinon.mock = mock;
+        return mock;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./times_in_words");
+        require("./call");
+        require("./extend");
+        require("./match");
+        require("./spy");
+        require("./stub");
+        require("./format");
+
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ * @depend spy.js
+ * @depend stub.js
+ * @depend mock.js
+ */
+/**
+ * Collections of stubs, spies and mocks.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    var push = [].push;
+    var hasOwnProperty = Object.prototype.hasOwnProperty;
+
+    function getFakes(fakeCollection) {
+        if (!fakeCollection.fakes) {
+            fakeCollection.fakes = [];
+        }
+
+        return fakeCollection.fakes;
+    }
+
+    function each(fakeCollection, method) {
+        var fakes = getFakes(fakeCollection);
+
+        for (var i = 0, l = fakes.length; i < l; i += 1) {
+            if (typeof fakes[i][method] === "function") {
+                fakes[i][method]();
+            }
+        }
+    }
+
+    function compact(fakeCollection) {
+        var fakes = getFakes(fakeCollection);
+        var i = 0;
+        while (i < fakes.length) {
+            fakes.splice(i, 1);
+        }
+    }
+
+    function makeApi(sinon) {
+        var collection = {
+            verify: function resolve() {
+                each(this, "verify");
+            },
+
+            restore: function restore() {
+                each(this, "restore");
+                compact(this);
+            },
+
+            reset: function restore() {
+                each(this, "reset");
+            },
+
+            verifyAndRestore: function verifyAndRestore() {
+                var exception;
+
+                try {
+                    this.verify();
+                } catch (e) {
+                    exception = e;
+                }
+
+                this.restore();
+
+                if (exception) {
+                    throw exception;
+                }
+            },
+
+            add: function add(fake) {
+                push.call(getFakes(this), fake);
+                return fake;
+            },
+
+            spy: function spy() {
+                return this.add(sinon.spy.apply(sinon, arguments));
+            },
+
+            stub: function stub(object, property, value) {
+                if (property) {
+                    var original = object[property];
+
+                    if (typeof original !== "function") {
+                        if (!hasOwnProperty.call(object, property)) {
+                            throw new TypeError("Cannot stub non-existent own property " + property);
+                        }
+
+                        object[property] = value;
+
+                        return this.add({
+                            restore: function () {
+                                object[property] = original;
+                            }
+                        });
+                    }
+                }
+                if (!property && !!object && typeof object === "object") {
+                    var stubbedObj = sinon.stub.apply(sinon, arguments);
+
+                    for (var prop in stubbedObj) {
+                        if (typeof stubbedObj[prop] === "function") {
+                            this.add(stubbedObj[prop]);
+                        }
+                    }
+
+                    return stubbedObj;
+                }
+
+                return this.add(sinon.stub.apply(sinon, arguments));
+            },
+
+            mock: function mock() {
+                return this.add(sinon.mock.apply(sinon, arguments));
+            },
+
+            inject: function inject(obj) {
+                var col = this;
+
+                obj.spy = function () {
+                    return col.spy.apply(col, arguments);
+                };
+
+                obj.stub = function () {
+                    return col.stub.apply(col, arguments);
+                };
+
+                obj.mock = function () {
+                    return col.mock.apply(col, arguments);
+                };
+
+                return obj;
+            }
+        };
+
+        sinon.collection = collection;
+        return collection;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./mock");
+        require("./spy");
+        require("./stub");
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * Fake timer API
+ * setTimeout
+ * setInterval
+ * clearTimeout
+ * clearInterval
+ * tick
+ * reset
+ * Date
+ *
+ * Inspired by jsUnitMockTimeOut from JsUnit
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function () {
+    
+    function makeApi(s, lol) {
+        /*global lolex */
+        var llx = typeof lolex !== "undefined" ? lolex : lol;
+
+        s.useFakeTimers = function () {
+            var now;
+            var methods = Array.prototype.slice.call(arguments);
+
+            if (typeof methods[0] === "string") {
+                now = 0;
+            } else {
+                now = methods.shift();
+            }
+
+            var clock = llx.install(now || 0, methods);
+            clock.restore = clock.uninstall;
+            return clock;
+        };
+
+        s.clock = {
+            create: function (now) {
+                return llx.createClock(now);
+            }
+        };
+
+        s.timers = {
+            setTimeout: setTimeout,
+            clearTimeout: clearTimeout,
+            setImmediate: (typeof setImmediate !== "undefined" ? setImmediate : undefined),
+            clearImmediate: (typeof clearImmediate !== "undefined" ? clearImmediate : undefined),
+            setInterval: setInterval,
+            clearInterval: clearInterval,
+            Date: Date
+        };
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, epxorts, module, lolex) {
+        var core = require("./core");
+        makeApi(core, lolex);
+        module.exports = core;
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require, module.exports, module, require("lolex"));
+    } else {
+        makeApi(sinon); // eslint-disable-line no-undef
+    }
+}());
+
+/**
+ * Minimal Event interface implementation
+ *
+ * Original implementation by Sven Fuchs: https://gist.github.com/995028
+ * Modifications and tests by Christian Johansen.
+ *
+ * @author Sven Fuchs (svenfuchs at artweb-design.de)
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2011 Sven Fuchs, Christian Johansen
+ */
+if (typeof sinon === "undefined") {
+    this.sinon = {};
+}
+
+(function () {
+    
+    var push = [].push;
+
+    function makeApi(sinon) {
+        sinon.Event = function Event(type, bubbles, cancelable, target) {
+            this.initEvent(type, bubbles, cancelable, target);
+        };
+
+        sinon.Event.prototype = {
+            initEvent: function (type, bubbles, cancelable, target) {
+                this.type = type;
+                this.bubbles = bubbles;
+                this.cancelable = cancelable;
+                this.target = target;
+            },
+
+            stopPropagation: function () {},
+
+            preventDefault: function () {
+                this.defaultPrevented = true;
+            }
+        };
+
+        sinon.ProgressEvent = function ProgressEvent(type, progressEventRaw, target) {
+            this.initEvent(type, false, false, target);
+            this.loaded = progressEventRaw.loaded || null;
+            this.total = progressEventRaw.total || null;
+            this.lengthComputable = !!progressEventRaw.total;
+        };
+
+        sinon.ProgressEvent.prototype = new sinon.Event();
+
+        sinon.ProgressEvent.prototype.constructor = sinon.ProgressEvent;
+
+        sinon.CustomEvent = function CustomEvent(type, customData, target) {
+            this.initEvent(type, false, false, target);
+            this.detail = customData.detail || null;
+        };
+
+        sinon.CustomEvent.prototype = new sinon.Event();
+
+        sinon.CustomEvent.prototype.constructor = sinon.CustomEvent;
+
+        sinon.EventTarget = {
+            addEventListener: function addEventListener(event, listener) {
+                this.eventListeners = this.eventListeners || {};
+                this.eventListeners[event] = this.eventListeners[event] || [];
+                push.call(this.eventListeners[event], listener);
+            },
+
+            removeEventListener: function removeEventListener(event, listener) {
+                var listeners = this.eventListeners && this.eventListeners[event] || [];
+
+                for (var i = 0, l = listeners.length; i < l; ++i) {
+                    if (listeners[i] === listener) {
+                        return listeners.splice(i, 1);
+                    }
+                }
+            },
+
+            dispatchEvent: function dispatchEvent(event) {
+                var type = event.type;
+                var listeners = this.eventListeners && this.eventListeners[type] || [];
+
+                for (var i = 0; i < listeners.length; i++) {
+                    if (typeof listeners[i] === "function") {
+                        listeners[i].call(this, event);
+                    } else {
+                        listeners[i].handleEvent(event);
+                    }
+                }
+
+                return !!event.defaultPrevented;
+            }
+        };
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require) {
+        var sinon = require("./core");
+        makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require);
+    } else {
+        makeApi(sinon); // eslint-disable-line no-undef
+    }
+}());
+
+/**
+ * @depend util/core.js
+ */
+/**
+ * Logs errors
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2014 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    // cache a reference to setTimeout, so that our reference won't be stubbed out
+    // when using fake timers and errors will still get logged
+    // https://github.com/cjohansen/Sinon.JS/issues/381
+    var realSetTimeout = setTimeout;
+
+    function makeApi(sinon) {
+
+        function log() {}
+
+        function logError(label, err) {
+            var msg = label + " threw exception: ";
+
+            function throwLoggedError() {
+                err.message = msg + err.message;
+                throw err;
+            }
+
+            sinon.log(msg + "[" + err.name + "] " + err.message);
+
+            if (err.stack) {
+                sinon.log(err.stack);
+            }
+
+            if (logError.useImmediateExceptions) {
+                throwLoggedError();
+            } else {
+                logError.setTimeout(throwLoggedError, 0);
+            }
+        }
+
+        // When set to true, any errors logged will be thrown immediately;
+        // If set to false, the errors will be thrown in separate execution frame.
+        logError.useImmediateExceptions = false;
+
+        // wrap realSetTimeout with something we can stub in tests
+        logError.setTimeout = function (func, timeout) {
+            realSetTimeout(func, timeout);
+        };
+
+        var exports = {};
+        exports.log = sinon.log = log;
+        exports.logError = sinon.logError = logError;
+
+        return exports;
+    }
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        module.exports = makeApi(sinon);
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend core.js
+ * @depend ../extend.js
+ * @depend event.js
+ * @depend ../log_error.js
+ */
+/**
+ * Fake XDomainRequest object
+ */
+
+/**
+ * Returns the global to prevent assigning values to 'this' when this is undefined.
+ * This can occur when files are interpreted by node in strict mode.
+ * @private
+ */
+function getGlobal() {
+    
+    return typeof window !== "undefined" ? window : global;
+}
+
+if (typeof sinon === "undefined") {
+    if (typeof this === "undefined") {
+        getGlobal().sinon = {};
+    } else {
+        this.sinon = {};
+    }
+}
+
+// wrapper for global
+(function (global) {
+    
+    var xdr = { XDomainRequest: global.XDomainRequest };
+    xdr.GlobalXDomainRequest = global.XDomainRequest;
+    xdr.supportsXDR = typeof xdr.GlobalXDomainRequest !== "undefined";
+    xdr.workingXDR = xdr.supportsXDR ? xdr.GlobalXDomainRequest : false;
+
+    function makeApi(sinon) {
+        sinon.xdr = xdr;
+
+        function FakeXDomainRequest() {
+            this.readyState = FakeXDomainRequest.UNSENT;
+            this.requestBody = null;
+            this.requestHeaders = {};
+            this.status = 0;
+            this.timeout = null;
+
+            if (typeof FakeXDomainRequest.onCreate === "function") {
+                FakeXDomainRequest.onCreate(this);
+            }
+        }
+
+        function verifyState(x) {
+            if (x.readyState !== FakeXDomainRequest.OPENED) {
+                throw new Error("INVALID_STATE_ERR");
+            }
+
+            if (x.sendFlag) {
+                throw new Error("INVALID_STATE_ERR");
+            }
+        }
+
+        function verifyRequestSent(x) {
+            if (x.readyState === FakeXDomainRequest.UNSENT) {
+                throw new Error("Request not sent");
+            }
+            if (x.readyState === FakeXDomainRequest.DONE) {
+                throw new Error("Request done");
+            }
+        }
+
+        function verifyResponseBodyType(body) {
+            if (typeof body !== "string") {
+                var error = new Error("Attempted to respond to fake XDomainRequest with " +
+                                    body + ", which is not a string.");
+                error.name = "InvalidBodyException";
+                throw error;
+            }
+        }
+
+        sinon.extend(FakeXDomainRequest.prototype, sinon.EventTarget, {
+            open: function open(method, url) {
+                this.method = method;
+                this.url = url;
+
+                this.responseText = null;
+                this.sendFlag = false;
+
+                this.readyStateChange(FakeXDomainRequest.OPENED);
+            },
+
+            readyStateChange: function readyStateChange(state) {
+                this.readyState = state;
+                var eventName = "";
+                switch (this.readyState) {
+                case FakeXDomainRequest.UNSENT:
+                    break;
+                case FakeXDomainRequest.OPENED:
+                    break;
+                case FakeXDomainRequest.LOADING:
+                    if (this.sendFlag) {
+                        //raise the progress event
+                        eventName = "onprogress";
+                    }
+                    break;
+                case FakeXDomainRequest.DONE:
+                    if (this.isTimeout) {
+                        eventName = "ontimeout";
+                    } else if (this.errorFlag || (this.status < 200 || this.status > 299)) {
+                        eventName = "onerror";
+                    } else {
+                        eventName = "onload";
+                    }
+                    break;
+                }
+
+                // raising event (if defined)
+                if (eventName) {
+                    if (typeof this[eventName] === "function") {
+                        try {
+                            this[eventName]();
+                        } catch (e) {
+                            sinon.logError("Fake XHR " + eventName + " handler", e);
+                        }
+                    }
+                }
+            },
+
+            send: function send(data) {
+                verifyState(this);
+
+                if (!/^(get|head)$/i.test(this.method)) {
+                    this.requestBody = data;
+                }
+                this.requestHeaders["Content-Type"] = "text/plain;charset=utf-8";
+
+                this.errorFlag = false;
+                this.sendFlag = true;
+                this.readyStateChange(FakeXDomainRequest.OPENED);
+
+                if (typeof this.onSend === "function") {
+                    this.onSend(this);
+                }
+            },
+
+            abort: function abort() {
+                this.aborted = true;
+                this.responseText = null;
+                this.errorFlag = true;
+
+                if (this.readyState > sinon.FakeXDomainRequest.UNSENT && this.sendFlag) {
+                    this.readyStateChange(sinon.FakeXDomainRequest.DONE);
+                    this.sendFlag = false;
+                }
+            },
+
+            setResponseBody: function setResponseBody(body) {
+                verifyRequestSent(this);
+                verifyResponseBodyType(body);
+
+                var chunkSize = this.chunkSize || 10;
+                var index = 0;
+                this.responseText = "";
+
+                do {
+                    this.readyStateChange(FakeXDomainRequest.LOADING);
+                    this.responseText += body.substring(index, index + chunkSize);
+                    index += chunkSize;
+                } while (index < body.length);
+
+                this.readyStateChange(FakeXDomainRequest.DONE);
+            },
+
+            respond: function respond(status, contentType, body) {
+                // content-type ignored, since XDomainRequest does not carry this
+                // we keep the same syntax for respond(...) as for FakeXMLHttpRequest to ease
+                // test integration across browsers
+                this.status = typeof status === "number" ? status : 200;
+                this.setResponseBody(body || "");
+            },
+
+            simulatetimeout: function simulatetimeout() {
+                this.status = 0;
+                this.isTimeout = true;
+                // Access to this should actually throw an error
+                this.responseText = undefined;
+                this.readyStateChange(FakeXDomainRequest.DONE);
+            }
+        });
+
+        sinon.extend(FakeXDomainRequest, {
+            UNSENT: 0,
+            OPENED: 1,
+            LOADING: 3,
+            DONE: 4
+        });
+
+        sinon.useFakeXDomainRequest = function useFakeXDomainRequest() {
+            sinon.FakeXDomainRequest.restore = function restore(keepOnCreate) {
+                if (xdr.supportsXDR) {
+                    global.XDomainRequest = xdr.GlobalXDomainRequest;
+                }
+
+                delete sinon.FakeXDomainRequest.restore;
+
+                if (keepOnCreate !== true) {
+                    delete sinon.FakeXDomainRequest.onCreate;
+                }
+            };
+            if (xdr.supportsXDR) {
+                global.XDomainRequest = sinon.FakeXDomainRequest;
+            }
+            return sinon.FakeXDomainRequest;
+        };
+
+        sinon.FakeXDomainRequest = FakeXDomainRequest;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./core");
+        require("../extend");
+        require("./event");
+        require("../log_error");
+        makeApi(sinon);
+        module.exports = sinon;
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require, module.exports, module);
+    } else {
+        makeApi(sinon); // eslint-disable-line no-undef
+    }
+})(typeof global !== "undefined" ? global : self);
+
+/**
+ * @depend core.js
+ * @depend ../extend.js
+ * @depend event.js
+ * @depend ../log_error.js
+ */
+/**
+ * Fake XMLHttpRequest object
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal, global) {
+    
+    function getWorkingXHR(globalScope) {
+        var supportsXHR = typeof globalScope.XMLHttpRequest !== "undefined";
+        if (supportsXHR) {
+            return globalScope.XMLHttpRequest;
+        }
+
+        var supportsActiveX = typeof globalScope.ActiveXObject !== "undefined";
+        if (supportsActiveX) {
+            return function () {
+                return new globalScope.ActiveXObject("MSXML2.XMLHTTP.3.0");
+            };
+        }
+
+        return false;
+    }
+
+    var supportsProgress = typeof ProgressEvent !== "undefined";
+    var supportsCustomEvent = typeof CustomEvent !== "undefined";
+    var supportsFormData = typeof FormData !== "undefined";
+    var supportsArrayBuffer = typeof ArrayBuffer !== "undefined";
+    var supportsBlob = typeof Blob === "function";
+    var sinonXhr = { XMLHttpRequest: global.XMLHttpRequest };
+    sinonXhr.GlobalXMLHttpRequest = global.XMLHttpRequest;
+    sinonXhr.GlobalActiveXObject = global.ActiveXObject;
+    sinonXhr.supportsActiveX = typeof sinonXhr.GlobalActiveXObject !== "undefined";
+    sinonXhr.supportsXHR = typeof sinonXhr.GlobalXMLHttpRequest !== "undefined";
+    sinonXhr.workingXHR = getWorkingXHR(global);
+    sinonXhr.supportsCORS = sinonXhr.supportsXHR && "withCredentials" in (new sinonXhr.GlobalXMLHttpRequest());
+
+    var unsafeHeaders = {
+        "Accept-Charset": true,
+        "Accept-Encoding": true,
+        Connection: true,
+        "Content-Length": true,
+        Cookie: true,
+        Cookie2: true,
+        "Content-Transfer-Encoding": true,
+        Date: true,
+        Expect: true,
+        Host: true,
+        "Keep-Alive": true,
+        Referer: true,
+        TE: true,
+        Trailer: true,
+        "Transfer-Encoding": true,
+        Upgrade: true,
+        "User-Agent": true,
+        Via: true
+    };
+
+    // An upload object is created for each
+    // FakeXMLHttpRequest and allows upload
+    // events to be simulated using uploadProgress
+    // and uploadError.
+    function UploadProgress() {
+        this.eventListeners = {
+            progress: [],
+            load: [],
+            abort: [],
+            error: []
+        };
+    }
+
+    UploadProgress.prototype.addEventListener = function addEventListener(event, listener) {
+        this.eventListeners[event].push(listener);
+    };
+
+    UploadProgress.prototype.removeEventListener = function removeEventListener(event, listener) {
+        var listeners = this.eventListeners[event] || [];
+
+        for (var i = 0, l = listeners.length; i < l; ++i) {
+            if (listeners[i] === listener) {
+                return listeners.splice(i, 1);
+            }
+        }
+    };
+
+    UploadProgress.prototype.dispatchEvent = function dispatchEvent(event) {
+        var listeners = this.eventListeners[event.type] || [];
+
+        for (var i = 0, listener; (listener = listeners[i]) != null; i++) {
+            listener(event);
+        }
+    };
+
+    // Note that for FakeXMLHttpRequest to work pre ES5
+    // we lose some of the alignment with the spec.
+    // To ensure as close a match as possible,
+    // set responseType before calling open, send or respond;
+    function FakeXMLHttpRequest() {
+        this.readyState = FakeXMLHttpRequest.UNSENT;
+        this.requestHeaders = {};
+        this.requestBody = null;
+        this.status = 0;
+        this.statusText = "";
+        this.upload = new UploadProgress();
+        this.responseType = "";
+        this.response = "";
+        if (sinonXhr.supportsCORS) {
+            this.withCredentials = false;
+        }
+
+        var xhr = this;
+        var events = ["loadstart", "load", "abort", "loadend"];
+
+        function addEventListener(eventName) {
+            xhr.addEventListener(eventName, function (event) {
+                var listener = xhr["on" + eventName];
+
+                if (listener && typeof listener === "function") {
+                    listener.call(this, event);
+                }
+            });
+        }
+
+        for (var i = events.length - 1; i >= 0; i--) {
+            addEventListener(events[i]);
+        }
+
+        if (typeof FakeXMLHttpRequest.onCreate === "function") {
+            FakeXMLHttpRequest.onCreate(this);
+        }
+    }
+
+    function verifyState(xhr) {
+        if (xhr.readyState !== FakeXMLHttpRequest.OPENED) {
+            throw new Error("INVALID_STATE_ERR");
+        }
+
+        if (xhr.sendFlag) {
+            throw new Error("INVALID_STATE_ERR");
+        }
+    }
+
+    function getHeader(headers, header) {
+        header = header.toLowerCase();
+
+        for (var h in headers) {
+            if (h.toLowerCase() === header) {
+                return h;
+            }
+        }
+
+        return null;
+    }
+
+    // filtering to enable a white-list version of Sinon FakeXhr,
+    // where whitelisted requests are passed through to real XHR
+    function each(collection, callback) {
+        if (!collection) {
+            return;
+        }
+
+        for (var i = 0, l = collection.length; i < l; i += 1) {
+            callback(collection[i]);
+        }
+    }
+    function some(collection, callback) {
+        for (var index = 0; index < collection.length; index++) {
+            if (callback(collection[index]) === true) {
+                return true;
+            }
+        }
+        return false;
+    }
+    // largest arity in XHR is 5 - XHR#open
+    var apply = function (obj, method, args) {
+        switch (args.length) {
+        case 0: return obj[method]();
+        case 1: return obj[method](args[0]);
+        case 2: return obj[method](args[0], args[1]);
+        case 3: return obj[method](args[0], args[1], args[2]);
+        case 4: return obj[method](args[0], args[1], args[2], args[3]);
+        case 5: return obj[method](args[0], args[1], args[2], args[3], args[4]);
+        }
+    };
+
+    FakeXMLHttpRequest.filters = [];
+    FakeXMLHttpRequest.addFilter = function addFilter(fn) {
+        this.filters.push(fn);
+    };
+    var IE6Re = /MSIE 6/;
+    FakeXMLHttpRequest.defake = function defake(fakeXhr, xhrArgs) {
+        var xhr = new sinonXhr.workingXHR(); // eslint-disable-line new-cap
+
+        each([
+            "open",
+            "setRequestHeader",
+            "send",
+            "abort",
+            "getResponseHeader",
+            "getAllResponseHeaders",
+            "addEventListener",
+            "overrideMimeType",
+            "removeEventListener"
+        ], function (method) {
+            fakeXhr[method] = function () {
+                return apply(xhr, method, arguments);
+            };
+        });
+
+        var copyAttrs = function (args) {
+            each(args, function (attr) {
+                try {
+                    fakeXhr[attr] = xhr[attr];
+                } catch (e) {
+                    if (!IE6Re.test(navigator.userAgent)) {
+                        throw e;
+                    }
+                }
+            });
+        };
+
+        var stateChange = function stateChange() {
+            fakeXhr.readyState = xhr.readyState;
+            if (xhr.readyState >= FakeXMLHttpRequest.HEADERS_RECEIVED) {
+                copyAttrs(["status", "statusText"]);
+            }
+            if (xhr.readyState >= FakeXMLHttpRequest.LOADING) {
+                copyAttrs(["responseText", "response"]);
+            }
+            if (xhr.readyState === FakeXMLHttpRequest.DONE) {
+                copyAttrs(["responseXML"]);
+            }
+            if (fakeXhr.onreadystatechange) {
+                fakeXhr.onreadystatechange.call(fakeXhr, { target: fakeXhr });
+            }
+        };
+
+        if (xhr.addEventListener) {
+            for (var event in fakeXhr.eventListeners) {
+                if (fakeXhr.eventListeners.hasOwnProperty(event)) {
+
+                    /*eslint-disable no-loop-func*/
+                    each(fakeXhr.eventListeners[event], function (handler) {
+                        xhr.addEventListener(event, handler);
+                    });
+                    /*eslint-enable no-loop-func*/
+                }
+            }
+            xhr.addEventListener("readystatechange", stateChange);
+        } else {
+            xhr.onreadystatechange = stateChange;
+        }
+        apply(xhr, "open", xhrArgs);
+    };
+    FakeXMLHttpRequest.useFilters = false;
+
+    function verifyRequestOpened(xhr) {
+        if (xhr.readyState !== FakeXMLHttpRequest.OPENED) {
+            throw new Error("INVALID_STATE_ERR - " + xhr.readyState);
+        }
+    }
+
+    function verifyRequestSent(xhr) {
+        if (xhr.readyState === FakeXMLHttpRequest.DONE) {
+            throw new Error("Request done");
+        }
+    }
+
+    function verifyHeadersReceived(xhr) {
+        if (xhr.async && xhr.readyState !== FakeXMLHttpRequest.HEADERS_RECEIVED) {
+            throw new Error("No headers received");
+        }
+    }
+
+    function verifyResponseBodyType(body) {
+        if (typeof body !== "string") {
+            var error = new Error("Attempted to respond to fake XMLHttpRequest with " +
+                                 body + ", which is not a string.");
+            error.name = "InvalidBodyException";
+            throw error;
+        }
+    }
+
+    function convertToArrayBuffer(body) {
+        var buffer = new ArrayBuffer(body.length);
+        var view = new Uint8Array(buffer);
+        for (var i = 0; i < body.length; i++) {
+            var charCode = body.charCodeAt(i);
+            if (charCode >= 256) {
+                throw new TypeError("arraybuffer or blob responseTypes require binary string, " +
+                                    "invalid character " + body[i] + " found.");
+            }
+            view[i] = charCode;
+        }
+        return buffer;
+    }
+
+    function isXmlContentType(contentType) {
+        return !contentType || /(text\/xml)|(application\/xml)|(\+xml)/.test(contentType);
+    }
+
+    function convertResponseBody(responseType, contentType, body) {
+        if (responseType === "" || responseType === "text") {
+            return body;
+        } else if (supportsArrayBuffer && responseType === "arraybuffer") {
+            return convertToArrayBuffer(body);
+        } else if (responseType === "json") {
+            try {
+                return JSON.parse(body);
+            } catch (e) {
+                // Return parsing failure as null
+                return null;
+            }
+        } else if (supportsBlob && responseType === "blob") {
+            var blobOptions = {};
+            if (contentType) {
+                blobOptions.type = contentType;
+            }
+            return new Blob([convertToArrayBuffer(body)], blobOptions);
+        } else if (responseType === "document") {
+            if (isXmlContentType(contentType)) {
+                return FakeXMLHttpRequest.parseXML(body);
+            }
+            return null;
+        }
+        throw new Error("Invalid responseType " + responseType);
+    }
+
+    function clearResponse(xhr) {
+        if (xhr.responseType === "" || xhr.responseType === "text") {
+            xhr.response = xhr.responseText = "";
+        } else {
+            xhr.response = xhr.responseText = null;
+        }
+        xhr.responseXML = null;
+    }
+
+    FakeXMLHttpRequest.parseXML = function parseXML(text) {
+        // Treat empty string as parsing failure
+        if (text !== "") {
+            try {
+                if (typeof DOMParser !== "undefined") {
+                    var parser = new DOMParser();
+                    return parser.parseFromString(text, "text/xml");
+                }
+                var xmlDoc = new window.ActiveXObject("Microsoft.XMLDOM");
+                xmlDoc.async = "false";
+                xmlDoc.loadXML(text);
+                return xmlDoc;
+            } catch (e) {
+                // Unable to parse XML - no biggie
+            }
+        }
+
+        return null;
+    };
+
+    FakeXMLHttpRequest.statusCodes = {
+        100: "Continue",
+        101: "Switching Protocols",
+        200: "OK",
+        201: "Created",
+        202: "Accepted",
+        203: "Non-Authoritative Information",
+        204: "No Content",
+        205: "Reset Content",
+        206: "Partial Content",
+        207: "Multi-Status",
+        300: "Multiple Choice",
+        301: "Moved Permanently",
+        302: "Found",
+        303: "See Other",
+        304: "Not Modified",
+        305: "Use Proxy",
+        307: "Temporary Redirect",
+        400: "Bad Request",
+        401: "Unauthorized",
+        402: "Payment Required",
+        403: "Forbidden",
+        404: "Not Found",
+        405: "Method Not Allowed",
+        406: "Not Acceptable",
+        407: "Proxy Authentication Required",
+        408: "Request Timeout",
+        409: "Conflict",
+        410: "Gone",
+        411: "Length Required",
+        412: "Precondition Failed",
+        413: "Request Entity Too Large",
+        414: "Request-URI Too Long",
+        415: "Unsupported Media Type",
+        416: "Requested Range Not Satisfiable",
+        417: "Expectation Failed",
+        422: "Unprocessable Entity",
+        500: "Internal Server Error",
+        501: "Not Implemented",
+        502: "Bad Gateway",
+        503: "Service Unavailable",
+        504: "Gateway Timeout",
+        505: "HTTP Version Not Supported"
+    };
+
+    function makeApi(sinon) {
+        sinon.xhr = sinonXhr;
+
+        sinon.extend(FakeXMLHttpRequest.prototype, sinon.EventTarget, {
+            async: true,
+
+            open: function open(method, url, async, username, password) {
+                this.method = method;
+                this.url = url;
+                this.async = typeof async === "boolean" ? async : true;
+                this.username = username;
+                this.password = password;
+                clearResponse(this);
+                this.requestHeaders = {};
+                this.sendFlag = false;
+
+                if (FakeXMLHttpRequest.useFilters === true) {
+                    var xhrArgs = arguments;
+                    var defake = some(FakeXMLHttpRequest.filters, function (filter) {
+                        return filter.apply(this, xhrArgs);
+                    });
+                    if (defake) {
+                        return FakeXMLHttpRequest.defake(this, arguments);
+                    }
+                }
+                this.readyStateChange(FakeXMLHttpRequest.OPENED);
+            },
+
+            readyStateChange: function readyStateChange(state) {
+                this.readyState = state;
+
+                var readyStateChangeEvent = new sinon.Event("readystatechange", false, false, this);
+
+                if (typeof this.onreadystatechange === "function") {
+                    try {
+                        this.onreadystatechange(readyStateChangeEvent);
+                    } catch (e) {
+                        sinon.logError("Fake XHR onreadystatechange handler", e);
+                    }
+                }
+
+                switch (this.readyState) {
+                    case FakeXMLHttpRequest.DONE:
+                        if (supportsProgress) {
+                            this.upload.dispatchEvent(new sinon.ProgressEvent("progress", {loaded: 100, total: 100}));
+                            this.dispatchEvent(new sinon.ProgressEvent("progress", {loaded: 100, total: 100}));
+                        }
+                        this.upload.dispatchEvent(new sinon.Event("load", false, false, this));
+                        this.dispatchEvent(new sinon.Event("load", false, false, this));
+                        this.dispatchEvent(new sinon.Event("loadend", false, false, this));
+                        break;
+                }
+
+                this.dispatchEvent(readyStateChangeEvent);
+            },
+
+            setRequestHeader: function setRequestHeader(header, value) {
+                verifyState(this);
+
+                if (unsafeHeaders[header] || /^(Sec-|Proxy-)/.test(header)) {
+                    throw new Error("Refused to set unsafe header \"" + header + "\"");
+                }
+
+                if (this.requestHeaders[header]) {
+                    this.requestHeaders[header] += "," + value;
+                } else {
+                    this.requestHeaders[header] = value;
+                }
+            },
+
+            // Helps testing
+            setResponseHeaders: function setResponseHeaders(headers) {
+                verifyRequestOpened(this);
+                this.responseHeaders = {};
+
+                for (var header in headers) {
+                    if (headers.hasOwnProperty(header)) {
+                        this.responseHeaders[header] = headers[header];
+                    }
+                }
+
+                if (this.async) {
+                    this.readyStateChange(FakeXMLHttpRequest.HEADERS_RECEIVED);
+                } else {
+                    this.readyState = FakeXMLHttpRequest.HEADERS_RECEIVED;
+                }
+            },
+
+            // Currently treats ALL data as a DOMString (i.e. no Document)
+            send: function send(data) {
+                verifyState(this);
+
+                if (!/^(get|head)$/i.test(this.method)) {
+                    var contentType = getHeader(this.requestHeaders, "Content-Type");
+                    if (this.requestHeaders[contentType]) {
+                        var value = this.requestHeaders[contentType].split(";");
+                        this.requestHeaders[contentType] = value[0] + ";charset=utf-8";
+                    } else if (supportsFormData && !(data instanceof FormData)) {
+                        this.requestHeaders["Content-Type"] = "text/plain;charset=utf-8";
+                    }
+
+                    this.requestBody = data;
+                }
+
+                this.errorFlag = false;
+                this.sendFlag = this.async;
+                clearResponse(this);
+                this.readyStateChange(FakeXMLHttpRequest.OPENED);
+
+                if (typeof this.onSend === "function") {
+                    this.onSend(this);
+                }
+
+                this.dispatchEvent(new sinon.Event("loadstart", false, false, this));
+            },
+
+            abort: function abort() {
+                this.aborted = true;
+                clearResponse(this);
+                this.errorFlag = true;
+                this.requestHeaders = {};
+                this.responseHeaders = {};
+
+                if (this.readyState > FakeXMLHttpRequest.UNSENT && this.sendFlag) {
+                    this.readyStateChange(FakeXMLHttpRequest.DONE);
+                    this.sendFlag = false;
+                }
+
+                this.readyState = FakeXMLHttpRequest.UNSENT;
+
+                this.dispatchEvent(new sinon.Event("abort", false, false, this));
+
+                this.upload.dispatchEvent(new sinon.Event("abort", false, false, this));
+
+                if (typeof this.onerror === "function") {
+                    this.onerror();
+                }
+            },
+
+            getResponseHeader: function getResponseHeader(header) {
+                if (this.readyState < FakeXMLHttpRequest.HEADERS_RECEIVED) {
+                    return null;
+                }
+
+                if (/^Set-Cookie2?$/i.test(header)) {
+                    return null;
+                }
+
+                header = getHeader(this.responseHeaders, header);
+
+                return this.responseHeaders[header] || null;
+            },
+
+            getAllResponseHeaders: function getAllResponseHeaders() {
+                if (this.readyState < FakeXMLHttpRequest.HEADERS_RECEIVED) {
+                    return "";
+                }
+
+                var headers = "";
+
+                for (var header in this.responseHeaders) {
+                    if (this.responseHeaders.hasOwnProperty(header) &&
+                        !/^Set-Cookie2?$/i.test(header)) {
+                        headers += header + ": " + this.responseHeaders[header] + "\r\n";
+                    }
+                }
+
+                return headers;
+            },
+
+            setResponseBody: function setResponseBody(body) {
+                verifyRequestSent(this);
+                verifyHeadersReceived(this);
+                verifyResponseBodyType(body);
+                var contentType = this.getResponseHeader("Content-Type");
+
+                var isTextResponse = this.responseType === "" || this.responseType === "text";
+                clearResponse(this);
+                if (this.async) {
+                    var chunkSize = this.chunkSize || 10;
+                    var index = 0;
+
+                    do {
+                        this.readyStateChange(FakeXMLHttpRequest.LOADING);
+
+                        if (isTextResponse) {
+                            this.responseText = this.response += body.substring(index, index + chunkSize);
+                        }
+                        index += chunkSize;
+                    } while (index < body.length);
+                }
+
+                this.response = convertResponseBody(this.responseType, contentType, body);
+                if (isTextResponse) {
+                    this.responseText = this.response;
+                }
+
+                if (this.responseType === "document") {
+                    this.responseXML = this.response;
+                } else if (this.responseType === "" && isXmlContentType(contentType)) {
+                    this.responseXML = FakeXMLHttpRequest.parseXML(this.responseText);
+                }
+                this.readyStateChange(FakeXMLHttpRequest.DONE);
+            },
+
+            respond: function respond(status, headers, body) {
+                this.status = typeof status === "number" ? status : 200;
+                this.statusText = FakeXMLHttpRequest.statusCodes[this.status];
+                this.setResponseHeaders(headers || {});
+                this.setResponseBody(body || "");
+            },
+
+            uploadProgress: function uploadProgress(progressEventRaw) {
+                if (supportsProgress) {
+                    this.upload.dispatchEvent(new sinon.ProgressEvent("progress", progressEventRaw));
+                }
+            },
+
+            downloadProgress: function downloadProgress(progressEventRaw) {
+                if (supportsProgress) {
+                    this.dispatchEvent(new sinon.ProgressEvent("progress", progressEventRaw));
+                }
+            },
+
+            uploadError: function uploadError(error) {
+                if (supportsCustomEvent) {
+                    this.upload.dispatchEvent(new sinon.CustomEvent("error", {detail: error}));
+                }
+            }
+        });
+
+        sinon.extend(FakeXMLHttpRequest, {
+            UNSENT: 0,
+            OPENED: 1,
+            HEADERS_RECEIVED: 2,
+            LOADING: 3,
+            DONE: 4
+        });
+
+        sinon.useFakeXMLHttpRequest = function () {
+            FakeXMLHttpRequest.restore = function restore(keepOnCreate) {
+                if (sinonXhr.supportsXHR) {
+                    global.XMLHttpRequest = sinonXhr.GlobalXMLHttpRequest;
+                }
+
+                if (sinonXhr.supportsActiveX) {
+                    global.ActiveXObject = sinonXhr.GlobalActiveXObject;
+                }
+
+                delete FakeXMLHttpRequest.restore;
+
+                if (keepOnCreate !== true) {
+                    delete FakeXMLHttpRequest.onCreate;
+                }
+            };
+            if (sinonXhr.supportsXHR) {
+                global.XMLHttpRequest = FakeXMLHttpRequest;
+            }
+
+            if (sinonXhr.supportsActiveX) {
+                global.ActiveXObject = function ActiveXObject(objId) {
+                    if (objId === "Microsoft.XMLHTTP" || /^Msxml2\.XMLHTTP/i.test(objId)) {
+
+                        return new FakeXMLHttpRequest();
+                    }
+
+                    return new sinonXhr.GlobalActiveXObject(objId);
+                };
+            }
+
+            return FakeXMLHttpRequest;
+        };
+
+        sinon.FakeXMLHttpRequest = FakeXMLHttpRequest;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./core");
+        require("../extend");
+        require("./event");
+        require("../log_error");
+        makeApi(sinon);
+        module.exports = sinon;
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon, // eslint-disable-line no-undef
+    typeof global !== "undefined" ? global : self
+));
+
+/**
+ * @depend fake_xdomain_request.js
+ * @depend fake_xml_http_request.js
+ * @depend ../format.js
+ * @depend ../log_error.js
+ */
+/**
+ * The Sinon "server" mimics a web server that receives requests from
+ * sinon.FakeXMLHttpRequest and provides an API to respond to those requests,
+ * both synchronously and asynchronously. To respond synchronuously, canned
+ * answers have to be provided upfront.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function () {
+    
+    var push = [].push;
+
+    function responseArray(handler) {
+        var response = handler;
+
+        if (Object.prototype.toString.call(handler) !== "[object Array]") {
+            response = [200, {}, handler];
+        }
+
+        if (typeof response[2] !== "string") {
+            throw new TypeError("Fake server response body should be string, but was " +
+                                typeof response[2]);
+        }
+
+        return response;
+    }
+
+    var wloc = typeof window !== "undefined" ? window.location : {};
+    var rCurrLoc = new RegExp("^" + wloc.protocol + "//" + wloc.host);
+
+    function matchOne(response, reqMethod, reqUrl) {
+        var rmeth = response.method;
+        var matchMethod = !rmeth || rmeth.toLowerCase() === reqMethod.toLowerCase();
+        var url = response.url;
+        var matchUrl = !url || url === reqUrl || (typeof url.test === "function" && url.test(reqUrl));
+
+        return matchMethod && matchUrl;
+    }
+
+    function match(response, request) {
+        var requestUrl = request.url;
+
+        if (!/^https?:\/\//.test(requestUrl) || rCurrLoc.test(requestUrl)) {
+            requestUrl = requestUrl.replace(rCurrLoc, "");
+        }
+
+        if (matchOne(response, this.getHTTPMethod(request), requestUrl)) {
+            if (typeof response.response === "function") {
+                var ru = response.url;
+                var args = [request].concat(ru && typeof ru.exec === "function" ? ru.exec(requestUrl).slice(1) : []);
+                return response.response.apply(response, args);
+            }
+
+            return true;
+        }
+
+        return false;
+    }
+
+    function makeApi(sinon) {
+        sinon.fakeServer = {
+            create: function (config) {
+                var server = sinon.create(this);
+                server.configure(config);
+                if (!sinon.xhr.supportsCORS) {
+                    this.xhr = sinon.useFakeXDomainRequest();
+                } else {
+                    this.xhr = sinon.useFakeXMLHttpRequest();
+                }
+                server.requests = [];
+
+                this.xhr.onCreate = function (xhrObj) {
+                    server.addRequest(xhrObj);
+                };
+
+                return server;
+            },
+            configure: function (config) {
+                var whitelist = {
+                    "autoRespond": true,
+                    "autoRespondAfter": true,
+                    "respondImmediately": true,
+                    "fakeHTTPMethods": true
+                };
+                var setting;
+
+                config = config || {};
+                for (setting in config) {
+                    if (whitelist.hasOwnProperty(setting) && config.hasOwnProperty(setting)) {
+                        this[setting] = config[setting];
+                    }
+                }
+            },
+            addRequest: function addRequest(xhrObj) {
+                var server = this;
+                push.call(this.requests, xhrObj);
+
+                xhrObj.onSend = function () {
+                    server.handleRequest(this);
+
+                    if (server.respondImmediately) {
+                        server.respond();
+                    } else if (server.autoRespond && !server.responding) {
+                        setTimeout(function () {
+                            server.responding = false;
+                            server.respond();
+                        }, server.autoRespondAfter || 10);
+
+                        server.responding = true;
+                    }
+                };
+            },
+
+            getHTTPMethod: function getHTTPMethod(request) {
+                if (this.fakeHTTPMethods && /post/i.test(request.method)) {
+                    var matches = (request.requestBody || "").match(/_method=([^\b;]+)/);
+                    return matches ? matches[1] : request.method;
+                }
+
+                return request.method;
+            },
+
+            handleRequest: function handleRequest(xhr) {
+                if (xhr.async) {
+                    if (!this.queue) {
+                        this.queue = [];
+                    }
+
+                    push.call(this.queue, xhr);
+                } else {
+                    this.processRequest(xhr);
+                }
+            },
+
+            log: function log(response, request) {
+                var str;
+
+                str = "Request:\n" + sinon.format(request) + "\n\n";
+                str += "Response:\n" + sinon.format(response) + "\n\n";
+
+                sinon.log(str);
+            },
+
+            respondWith: function respondWith(method, url, body) {
+                if (arguments.length === 1 && typeof method !== "function") {
+                    this.response = responseArray(method);
+                    return;
+                }
+
+                if (!this.responses) {
+                    this.responses = [];
+                }
+
+                if (arguments.length === 1) {
+                    body = method;
+                    url = method = null;
+                }
+
+                if (arguments.length === 2) {
+                    body = url;
+                    url = method;
+                    method = null;
+                }
+
+                push.call(this.responses, {
+                    method: method,
+                    url: url,
+                    response: typeof body === "function" ? body : responseArray(body)
+                });
+            },
+
+            respond: function respond() {
+                if (arguments.length > 0) {
+                    this.respondWith.apply(this, arguments);
+                }
+
+                var queue = this.queue || [];
+                var requests = queue.splice(0, queue.length);
+
+                for (var i = 0; i < requests.length; i++) {
+                    this.processRequest(requests[i]);
+                }
+            },
+
+            processRequest: function processRequest(request) {
+                try {
+                    if (request.aborted) {
+                        return;
+                    }
+
+                    var response = this.response || [404, {}, ""];
+
+                    if (this.responses) {
+                        for (var l = this.responses.length, i = l - 1; i >= 0; i--) {
+                            if (match.call(this, this.responses[i], request)) {
+                                response = this.responses[i].response;
+                                break;
+                            }
+                        }
+                    }
+
+                    if (request.readyState !== 4) {
+                        this.log(response, request);
+
+                        request.respond(response[0], response[1], response[2]);
+                    }
+                } catch (e) {
+                    sinon.logError("Fake server request processing", e);
+                }
+            },
+
+            restore: function restore() {
+                return this.xhr.restore && this.xhr.restore.apply(this.xhr, arguments);
+            }
+        };
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./core");
+        require("./fake_xdomain_request");
+        require("./fake_xml_http_request");
+        require("../format");
+        makeApi(sinon);
+        module.exports = sinon;
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require, module.exports, module);
+    } else {
+        makeApi(sinon); // eslint-disable-line no-undef
+    }
+}());
+
+/**
+ * @depend fake_server.js
+ * @depend fake_timers.js
+ */
+/**
+ * Add-on for sinon.fakeServer that automatically handles a fake timer along with
+ * the FakeXMLHttpRequest. The direct inspiration for this add-on is jQuery
+ * 1.3.x, which does not use xhr object's onreadystatehandler at all - instead,
+ * it polls the object for completion with setInterval. Dispite the direct
+ * motivation, there is nothing jQuery-specific in this file, so it can be used
+ * in any environment where the ajax implementation depends on setInterval or
+ * setTimeout.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function () {
+    
+    function makeApi(sinon) {
+        function Server() {}
+        Server.prototype = sinon.fakeServer;
+
+        sinon.fakeServerWithClock = new Server();
+
+        sinon.fakeServerWithClock.addRequest = function addRequest(xhr) {
+            if (xhr.async) {
+                if (typeof setTimeout.clock === "object") {
+                    this.clock = setTimeout.clock;
+                } else {
+                    this.clock = sinon.useFakeTimers();
+                    this.resetClock = true;
+                }
+
+                if (!this.longestTimeout) {
+                    var clockSetTimeout = this.clock.setTimeout;
+                    var clockSetInterval = this.clock.setInterval;
+                    var server = this;
+
+                    this.clock.setTimeout = function (fn, timeout) {
+                        server.longestTimeout = Math.max(timeout, server.longestTimeout || 0);
+
+                        return clockSetTimeout.apply(this, arguments);
+                    };
+
+                    this.clock.setInterval = function (fn, timeout) {
+                        server.longestTimeout = Math.max(timeout, server.longestTimeout || 0);
+
+                        return clockSetInterval.apply(this, arguments);
+                    };
+                }
+            }
+
+            return sinon.fakeServer.addRequest.call(this, xhr);
+        };
+
+        sinon.fakeServerWithClock.respond = function respond() {
+            var returnVal = sinon.fakeServer.respond.apply(this, arguments);
+
+            if (this.clock) {
+                this.clock.tick(this.longestTimeout || 0);
+                this.longestTimeout = 0;
+
+                if (this.resetClock) {
+                    this.clock.restore();
+                    this.resetClock = false;
+                }
+            }
+
+            return returnVal;
+        };
+
+        sinon.fakeServerWithClock.restore = function restore() {
+            if (this.clock) {
+                this.clock.restore();
+            }
+
+            return sinon.fakeServer.restore.apply(this, arguments);
+        };
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require) {
+        var sinon = require("./core");
+        require("./fake_server");
+        require("./fake_timers");
+        makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require);
+    } else {
+        makeApi(sinon); // eslint-disable-line no-undef
+    }
+}());
+
+/**
+ * @depend util/core.js
+ * @depend extend.js
+ * @depend collection.js
+ * @depend util/fake_timers.js
+ * @depend util/fake_server_with_clock.js
+ */
+/**
+ * Manages fake collections as well as fake utilities such as Sinon's
+ * timers and fake XHR implementation in one convenient object.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        var push = [].push;
+
+        function exposeValue(sandbox, config, key, value) {
+            if (!value) {
+                return;
+            }
+
+            if (config.injectInto && !(key in config.injectInto)) {
+                config.injectInto[key] = value;
+                sandbox.injectedKeys.push(key);
+            } else {
+                push.call(sandbox.args, value);
+            }
+        }
+
+        function prepareSandboxFromConfig(config) {
+            var sandbox = sinon.create(sinon.sandbox);
+
+            if (config.useFakeServer) {
+                if (typeof config.useFakeServer === "object") {
+                    sandbox.serverPrototype = config.useFakeServer;
+                }
+
+                sandbox.useFakeServer();
+            }
+
+            if (config.useFakeTimers) {
+                if (typeof config.useFakeTimers === "object") {
+                    sandbox.useFakeTimers.apply(sandbox, config.useFakeTimers);
+                } else {
+                    sandbox.useFakeTimers();
+                }
+            }
+
+            return sandbox;
+        }
+
+        sinon.sandbox = sinon.extend(sinon.create(sinon.collection), {
+            useFakeTimers: function useFakeTimers() {
+                this.clock = sinon.useFakeTimers.apply(sinon, arguments);
+
+                return this.add(this.clock);
+            },
+
+            serverPrototype: sinon.fakeServer,
+
+            useFakeServer: function useFakeServer() {
+                var proto = this.serverPrototype || sinon.fakeServer;
+
+                if (!proto || !proto.create) {
+                    return null;
+                }
+
+                this.server = proto.create();
+                return this.add(this.server);
+            },
+
+            inject: function (obj) {
+                sinon.collection.inject.call(this, obj);
+
+                if (this.clock) {
+                    obj.clock = this.clock;
+                }
+
+                if (this.server) {
+                    obj.server = this.server;
+                    obj.requests = this.server.requests;
+                }
+
+                obj.match = sinon.match;
+
+                return obj;
+            },
+
+            restore: function () {
+                sinon.collection.restore.apply(this, arguments);
+                this.restoreContext();
+            },
+
+            restoreContext: function () {
+                if (this.injectedKeys) {
+                    for (var i = 0, j = this.injectedKeys.length; i < j; i++) {
+                        delete this.injectInto[this.injectedKeys[i]];
+                    }
+                    this.injectedKeys = [];
+                }
+            },
+
+            create: function (config) {
+                if (!config) {
+                    return sinon.create(sinon.sandbox);
+                }
+
+                var sandbox = prepareSandboxFromConfig(config);
+                sandbox.args = sandbox.args || [];
+                sandbox.injectedKeys = [];
+                sandbox.injectInto = config.injectInto;
+                var prop,
+                    value;
+                var exposed = sandbox.inject({});
+
+                if (config.properties) {
+                    for (var i = 0, l = config.properties.length; i < l; i++) {
+                        prop = config.properties[i];
+                        value = exposed[prop] || prop === "sandbox" && sandbox;
+                        exposeValue(sandbox, config, prop, value);
+                    }
+                } else {
+                    exposeValue(sandbox, config, "sandbox", value);
+                }
+
+                return sandbox;
+            },
+
+            match: sinon.match
+        });
+
+        sinon.sandbox.useFakeXMLHttpRequest = sinon.sandbox.useFakeServer;
+
+        return sinon.sandbox;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./extend");
+        require("./util/fake_server_with_clock");
+        require("./util/fake_timers");
+        require("./collection");
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend util/core.js
+ * @depend sandbox.js
+ */
+/**
+ * Test function, sandboxes fakes
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    function makeApi(sinon) {
+        var slice = Array.prototype.slice;
+
+        function test(callback) {
+            var type = typeof callback;
+
+            if (type !== "function") {
+                throw new TypeError("sinon.test needs to wrap a test function, got " + type);
+            }
+
+            function sinonSandboxedTest() {
+                var config = sinon.getConfig(sinon.config);
+                config.injectInto = config.injectIntoThis && this || config.injectInto;
+                var sandbox = sinon.sandbox.create(config);
+                var args = slice.call(arguments);
+                var oldDone = args.length && args[args.length - 1];
+                var exception, result;
+
+                if (typeof oldDone === "function") {
+                    args[args.length - 1] = function sinonDone(res) {
+                        if (res) {
+                            sandbox.restore();
+                        } else {
+                            sandbox.verifyAndRestore();
+                        }
+                        oldDone(res);
+                    };
+                }
+
+                try {
+                    result = callback.apply(this, args.concat(sandbox.args));
+                } catch (e) {
+                    exception = e;
+                }
+
+                if (typeof oldDone !== "function") {
+                    if (typeof exception !== "undefined") {
+                        sandbox.restore();
+                        throw exception;
+                    } else {
+                        sandbox.verifyAndRestore();
+                    }
+                }
+
+                return result;
+            }
+
+            if (callback.length) {
+                return function sinonAsyncSandboxedTest(done) { // eslint-disable-line no-unused-vars
+                    return sinonSandboxedTest.apply(this, arguments);
+                };
+            }
+
+            return sinonSandboxedTest;
+        }
+
+        test.config = {
+            injectIntoThis: true,
+            injectInto: null,
+            properties: ["spy", "stub", "mock", "clock", "server", "requests"],
+            useFakeTimers: true,
+            useFakeServer: true
+        };
+
+        sinon.test = test;
+        return test;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var core = require("./util/core");
+        require("./sandbox");
+        module.exports = makeApi(core);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+    } else if (isNode) {
+        loadDependencies(require, module.exports, module);
+    } else if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(typeof sinon === "object" && sinon || null)); // eslint-disable-line no-undef
+
+/**
+ * @depend util/core.js
+ * @depend test.js
+ */
+/**
+ * Test case, sandboxes all test functions
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal) {
+    
+    function createTest(property, setUp, tearDown) {
+        return function () {
+            if (setUp) {
+                setUp.apply(this, arguments);
+            }
+
+            var exception, result;
+
+            try {
+                result = property.apply(this, arguments);
+            } catch (e) {
+                exception = e;
+            }
+
+            if (tearDown) {
+                tearDown.apply(this, arguments);
+            }
+
+            if (exception) {
+                throw exception;
+            }
+
+            return result;
+        };
+    }
+
+    function makeApi(sinon) {
+        function testCase(tests, prefix) {
+            if (!tests || typeof tests !== "object") {
+                throw new TypeError("sinon.testCase needs an object with test functions");
+            }
+
+            prefix = prefix || "test";
+            var rPrefix = new RegExp("^" + prefix);
+            var methods = {};
+            var setUp = tests.setUp;
+            var tearDown = tests.tearDown;
+            var testName,
+                property,
+                method;
+
+            for (testName in tests) {
+                if (tests.hasOwnProperty(testName) && !/^(setUp|tearDown)$/.test(testName)) {
+                    property = tests[testName];
+
+                    if (typeof property === "function" && rPrefix.test(testName)) {
+                        method = property;
+
+                        if (setUp || tearDown) {
+                            method = createTest(property, setUp, tearDown);
+                        }
+
+                        methods[testName] = sinon.test(method);
+                    } else {
+                        methods[testName] = tests[testName];
+                    }
+                }
+            }
+
+            return methods;
+        }
+
+        sinon.testCase = testCase;
+        return testCase;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var core = require("./util/core");
+        require("./test");
+        module.exports = makeApi(core);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon // eslint-disable-line no-undef
+));
+
+/**
+ * @depend times_in_words.js
+ * @depend util/core.js
+ * @depend match.js
+ * @depend format.js
+ */
+/**
+ * Assertions matching the test spy retrieval interface.
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ * @license BSD
+ *
+ * Copyright (c) 2010-2013 Christian Johansen
+ */
+(function (sinonGlobal, global) {
+    
+    var slice = Array.prototype.slice;
+
+    function makeApi(sinon) {
+        var assert;
+
+        function verifyIsStub() {
+            var method;
+
+            for (var i = 0, l = arguments.length; i < l; ++i) {
+                method = arguments[i];
+
+                if (!method) {
+                    assert.fail("fake is not a spy");
+                }
+
+                if (method.proxy && method.proxy.isSinonProxy) {
+                    verifyIsStub(method.proxy);
+                } else {
+                    if (typeof method !== "function") {
+                        assert.fail(method + " is not a function");
+                    }
+
+                    if (typeof method.getCall !== "function") {
+                        assert.fail(method + " is not stubbed");
+                    }
+                }
+
+            }
+        }
+
+        function failAssertion(object, msg) {
+            object = object || global;
+            var failMethod = object.fail || assert.fail;
+            failMethod.call(object, msg);
+        }
+
+        function mirrorPropAsAssertion(name, method, message) {
+            if (arguments.length === 2) {
+                message = method;
+                method = name;
+            }
+
+            assert[name] = function (fake) {
+                verifyIsStub(fake);
+
+                var args = slice.call(arguments, 1);
+                var failed = false;
+
+                if (typeof method === "function") {
+                    failed = !method(fake);
+                } else {
+                    failed = typeof fake[method] === "function" ?
+                        !fake[method].apply(fake, args) : !fake[method];
+                }
+
+                if (failed) {
+                    failAssertion(this, (fake.printf || fake.proxy.printf).apply(fake, [message].concat(args)));
+                } else {
+                    assert.pass(name);
+                }
+            };
+        }
+
+        function exposedName(prefix, prop) {
+            return !prefix || /^fail/.test(prop) ? prop :
+                prefix + prop.slice(0, 1).toUpperCase() + prop.slice(1);
+        }
+
+        assert = {
+            failException: "AssertError",
+
+            fail: function fail(message) {
+                var error = new Error(message);
+                error.name = this.failException || assert.failException;
+
+                throw error;
+            },
+
+            pass: function pass() {},
+
+            callOrder: function assertCallOrder() {
+                verifyIsStub.apply(null, arguments);
+                var expected = "";
+                var actual = "";
+
+                if (!sinon.calledInOrder(arguments)) {
+                    try {
+                        expected = [].join.call(arguments, ", ");
+                        var calls = slice.call(arguments);
+                        var i = calls.length;
+                        while (i) {
+                            if (!calls[--i].called) {
+                                calls.splice(i, 1);
+                            }
+                        }
+                        actual = sinon.orderByFirstCall(calls).join(", ");
+                    } catch (e) {
+                        // If this fails, we'll just fall back to the blank string
+                    }
+
+                    failAssertion(this, "expected " + expected + " to be " +
+                                "called in order but were called as " + actual);
+                } else {
+                    assert.pass("callOrder");
+                }
+            },
+
+            callCount: function assertCallCount(method, count) {
+                verifyIsStub(method);
+
+                if (method.callCount !== count) {
+                    var msg = "expected %n to be called " + sinon.timesInWords(count) +
+                        " but was called %c%C";
+                    failAssertion(this, method.printf(msg));
+                } else {
+                    assert.pass("callCount");
+                }
+            },
+
+            expose: function expose(target, options) {
+                if (!target) {
+                    throw new TypeError("target is null or undefined");
+                }
+
+                var o = options || {};
+                var prefix = typeof o.prefix === "undefined" && "assert" || o.prefix;
+                var includeFail = typeof o.includeFail === "undefined" || !!o.includeFail;
+
+                for (var method in this) {
+                    if (method !== "expose" && (includeFail || !/^(fail)/.test(method))) {
+                        target[exposedName(prefix, method)] = this[method];
+                    }
+                }
+
+                return target;
+            },
+
+            match: function match(actual, expectation) {
+                var matcher = sinon.match(expectation);
+                if (matcher.test(actual)) {
+                    assert.pass("match");
+                } else {
+                    var formatted = [
+                        "expected value to match",
+                        "    expected = " + sinon.format(expectation),
+                        "    actual = " + sinon.format(actual)
+                    ];
+
+                    failAssertion(this, formatted.join("\n"));
+                }
+            }
+        };
+
+        mirrorPropAsAssertion("called", "expected %n to have been called at least once but was never called");
+        mirrorPropAsAssertion("notCalled", function (spy) {
+            return !spy.called;
+        }, "expected %n to not have been called but was called %c%C");
+        mirrorPropAsAssertion("calledOnce", "expected %n to be called once but was called %c%C");
+        mirrorPropAsAssertion("calledTwice", "expected %n to be called twice but was called %c%C");
+        mirrorPropAsAssertion("calledThrice", "expected %n to be called thrice but was called %c%C");
+        mirrorPropAsAssertion("calledOn", "expected %n to be called with %1 as this but was called with %t");
+        mirrorPropAsAssertion(
+            "alwaysCalledOn",
+            "expected %n to always be called with %1 as this but was called with %t"
+        );
+        mirrorPropAsAssertion("calledWithNew", "expected %n to be called with new");
+        mirrorPropAsAssertion("alwaysCalledWithNew", "expected %n to always be called with new");
+        mirrorPropAsAssertion("calledWith", "expected %n to be called with arguments %*%C");
+        mirrorPropAsAssertion("calledWithMatch", "expected %n to be called with match %*%C");
+        mirrorPropAsAssertion("alwaysCalledWith", "expected %n to always be called with arguments %*%C");
+        mirrorPropAsAssertion("alwaysCalledWithMatch", "expected %n to always be called with match %*%C");
+        mirrorPropAsAssertion("calledWithExactly", "expected %n to be called with exact arguments %*%C");
+        mirrorPropAsAssertion("alwaysCalledWithExactly", "expected %n to always be called with exact arguments %*%C");
+        mirrorPropAsAssertion("neverCalledWith", "expected %n to never be called with arguments %*%C");
+        mirrorPropAsAssertion("neverCalledWithMatch", "expected %n to never be called with match %*%C");
+        mirrorPropAsAssertion("threw", "%n did not throw exception%C");
+        mirrorPropAsAssertion("alwaysThrew", "%n did not always throw exception%C");
+
+        sinon.assert = assert;
+        return assert;
+    }
+
+    var isNode = typeof module !== "undefined" && module.exports && typeof require === "function";
+    var isAMD = typeof define === "function" && typeof define.amd === "object" && define.amd;
+
+    function loadDependencies(require, exports, module) {
+        var sinon = require("./util/core");
+        require("./match");
+        require("./format");
+        module.exports = makeApi(sinon);
+    }
+
+    if (isAMD) {
+        define(loadDependencies);
+        return;
+    }
+
+    if (isNode) {
+        loadDependencies(require, module.exports, module);
+        return;
+    }
+
+    if (sinonGlobal) {
+        makeApi(sinonGlobal);
+    }
+}(
+    typeof sinon === "object" && sinon, // eslint-disable-line no-undef
+    typeof global !== "undefined" ? global : self
+));
+
+  return sinon;
+}));
\ No newline at end of file
diff --git a/test/qunit/test-libs/sinon-qunit-1.0.0.js b/test/qunit/test-libs/sinon-qunit-1.0.0.js
new file mode 100644
index 0000000..c26232f
--- /dev/null
+++ b/test/qunit/test-libs/sinon-qunit-1.0.0.js
@@ -0,0 +1,62 @@
+/**
+ * sinon-qunit 1.0.0, 2010/12/09
+ *
+ * @author Christian Johansen (christian at cjohansen.no)
+ *
+ * (The BSD License)
+ * 
+ * Copyright (c) 2010-2011, Christian Johansen, christian at cjohansen.no
+ * All rights reserved.
+ * 
+ * Redistribution and use in source and binary forms, with or without modification,
+ * are permitted provided that the following conditions are met:
+ * 
+ *     * Redistributions of source code must retain the above copyright notice,
+ *       this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above copyright notice,
+ *       this list of conditions and the following disclaimer in the documentation
+ *       and/or other materials provided with the distribution.
+ *     * Neither the name of Christian Johansen nor the names of his contributors
+ *       may be used to endorse or promote products derived from this software
+ *       without specific prior written permission.
+ * 
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+/*global sinon, QUnit, test*/
+sinon.assert.fail = function (msg) {
+    QUnit.ok(false, msg);
+};
+
+sinon.assert.pass = function (assertion) {
+    QUnit.ok(true, assertion);
+};
+
+sinon.config = {
+    injectIntoThis: true,
+    injectInto: null,
+    properties: ["spy", "stub", "mock", "clock", "sandbox"],
+    useFakeTimers: true,
+    useFakeServer: false
+};
+
+(function (global) {
+    var qTest = QUnit.test;
+    
+    QUnit.test = global.test = function (testName, expected, callback, async) {
+        if (arguments.length === 2) {
+            callback = expected;
+            expected = null;
+        }
+
+        return qTest(testName, expected, sinon.test(callback), async);
+    };
+}(this));
diff --git a/test/qunit/tests/form_tests.html b/test/qunit/tests/form_tests.html
new file mode 100644
index 0000000..c0add87
--- /dev/null
+++ b/test/qunit/tests/form_tests.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <link href="../../../static/style/blue/base.css" media="screen" rel="stylesheet" type="text/css">
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/form_tests.js b/test/qunit/tests/form_tests.js
new file mode 100644
index 0000000..92c380d
--- /dev/null
+++ b/test/qunit/tests/form_tests.js
@@ -0,0 +1,104 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ 'test-app', 'mvc/form/form-input', 'mvc/ui/ui-misc', 'mvc/form/form-data', 'mvc/tool/tool-form', 'utils/utils',
+], function( testApp, InputElement, Ui, FormData, ToolForm, Utils ){
+    'use strict';
+    module( 'Form test', {
+        setup: function() {
+            testApp.create();
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    test( 'tool-form', function() {
+        var toolform = new ToolForm.View( { id: 'test' } );
+        var form = toolform.form;
+        $( 'body' ).prepend( toolform.$el );
+        window.fakeserver.respond();
+        ok( form.$( '.portlet-title-text' ).html() == '<b>_name</b> _description (Galaxy Version _version)', 'Title correct' );
+        var tour_ids = [];
+        $( '[tour_id]' ).each( function() { tour_ids.push( $( this ).attr( 'tour_id' ) ) } );
+        ok( JSON.stringify( tour_ids ) == '["a","b|c","b|i","b|j","k_0|l","k_0|m|n","k_0|m|s","k_0|m|t"]', 'Tour ids correct' );
+        ok( JSON.stringify( form.data.create() ) == '{"a":"","b|c":"h","b|i":"i","b|j":"j","k_0|l":"l","k_0|m|n":"r","k_0|m|s":"s","k_0|m|t":"t"}', 'Created data correct' );
+        var mapped_ids = [];
+        form.data.matchModel( form.options, function( input, id ) { mapped_ids.push( $( '#' + id ).attr( 'tour_id' ) ) } );
+        ok( JSON.stringify( mapped_ids ) == '["a","b|c","b|i","b|j","k_0|l","k_0|m|n","k_0|m|s","k_0|m|t"]', 'Remapped tour ids correct' );
+        this.clock.tick ( window.WAIT_FADE );
+        var dropdown = form.$( '#menu > .dropdown-menu' );
+        ok( dropdown.children().length == 2, 'Found two menu items' );
+        dropdown.find( '.fa-info-circle' ).parent().click();
+        this.clock.tick ( window.WAIT_FADE );
+        ok( form.$( '.ui-message' ).html() === '<span>This tool requires req_name_a (Version req_version_a) and req_name_b (Version req_version_b). Click <a target="_blank" href="https://wiki.galaxyproject.org/Tools/Requirements">here</a> for more information.</span>', 'Check requirements message' );
+        ok( form.$( '.form-repeat-delete' ).css( 'display' ) == 'none', 'Delete button disabled' );
+        var $add = form.$( '.form-repeat-add' );
+        ok( !$add.attr( 'disabled' ), 'Adding new repeat possible' );
+        $add.click();
+        this.clock.tick ( window.WAIT_FADE );
+        ok( $add.attr( 'disabled' ), 'Adding new repeat has been disabled' );
+        form.$( '.form-repeat-delete' ).each( function( i, d ) { ok( $( d ).css( 'display' ) == 'block', 'Delete buttons ' + i + ' enabled' ) } );
+        ok( JSON.stringify( form.data.create() ) == '{"a":"","b|c":"h","b|i":"i","b|j":"j","k_0|l":"l","k_0|m|n":"r","k_0|m|s":"s","k_0|m|t":"t","k_1|l":"l","k_1|m|n":"o","k_1|m|p":"p","k_1|m|q":"q"}', 'Created data correct, after adding repeat' );
+        form.$( '.form-repeat-delete:first' ).click();
+        ok( form.$( '.form-repeat-delete' ).css( 'display' ) == 'none', 'Delete button disabled' );
+        this.clock.tick ( window.WAIT_FADE );
+        ok( JSON.stringify( form.data.create() ) == '{"a":"","b|c":"h","b|i":"i","b|j":"j","k_0|l":"l","k_0|m|n":"o","k_0|m|p":"p","k_0|m|q":"q"}', 'Created data correct, after removing first repeat' );
+    });
+
+    test( 'data', function() {
+        var visits = [];
+        Utils.get( { url: Galaxy.root + 'api/tools/test/build', success: function( response ) {
+            FormData.visitInputs( response.inputs, function( node, name, context ) {
+                visits.push( { name: name, node: node } );
+            } );
+        } } );
+        window.fakeserver.respond();
+        ok( JSON.stringify( visits ) == '[{"name":"a","node":{"name":"a","type":"text"}},{"name":"b|c","node":{"name":"c","type":"select","value":"h","options":[["d","d",false],["h","h",false]]}},{"name":"b|i","node":{"name":"i","type":"text","value":"i"}},{"name":"b|j","node":{"name":"j","type":"text","value":"j"}},{"name":"k_0|l","node":{"name":"l","type":"text","value":"l"}},{"name":"k_0|m|n","node":{"name":"n","type":"select","value":"r","options":[["o","o",false],["r","r",false]]}}, [...]
+    });
+
+    test( 'input', function() {
+        var input = new InputElement( {}, {
+            field: new Ui.Input({})
+        });
+        $( 'body' ).prepend( input.$el );
+        ok( input.$field.css( 'display' ) == 'block', 'Input field shown' );
+        ok( input.$preview.css( 'display' ) == 'none', 'Preview hidden' );
+        ok( input.$collapsible.css( 'display' ) == 'none', 'Collapsible hidden' );
+        ok( input.$title_text.css( 'display' ) == 'inline', 'Title visible' );
+        ok( input.$title_text.html() == '', 'Title content unavailable' );
+        input.model.set( 'label', '_label' );
+        ok( input.$title_text.html() == '_label', 'Title content available' );
+        ok( input.$error.css( 'display' ) == 'none', 'Error hidden' );
+        input.model.set( 'error_text', '_error_text' );
+        ok( input.$error.css( 'display' ) == 'block', 'Error visible' );
+        ok( input.$error_text.html() == '_error_text', 'Error text correct' );
+        input.model.set( 'error_text', null );
+        ok( input.$error.css( 'display' ) == 'none', 'Error hidden, again' );
+        ok( input.$backdrop.css( 'display' ) == 'none', 'Backdrop hidden' );
+        input.model.set( 'backdrop', true );
+        ok( input.$backdrop.css( 'display' ) == 'block', 'Backdrop shown' );
+        ok( input.$backdrop.css( 'opacity' ) == 0, 'Backdrop transparent' );
+        ok( input.$backdrop.css( 'cursor' ) == 'default', 'Backdrop regular cursor' );
+        input.model.set( 'backdrop', false );
+        ok( input.$backdrop.css( 'display' ) == 'none', 'Backdrop hidden, again' );
+        input.model.set( 'disabled', true );
+        ok( input.$field.css( 'display' ) == 'none', 'Input field hidden' );
+        input.model.set( 'disabled', false );
+        this.clock.tick ( window.WAIT_FADE );
+        ok( input.$field.css( 'display' ) == 'block', 'Input field shown, again' );
+        this.clock.tick ( window.WAIT_FADE );
+        input.model.set( 'color', 'red' );
+        ok( input.$field.children().first().css( 'color' ) == 'rgb(255, 0, 0)', 'Shows correct new color' );
+        input.model.set( 'color', null );
+        ok( input.$field.children().first().css( 'color' ) == 'rgb(85, 85, 85)', 'Shows correct old color' );
+        input.model.set( 'collapsible_value' , '_collapsible_value' );
+        ok( input.$collapsible.css( 'display' ) == 'block', 'Collapsible field' );
+        ok( input.$collapsible_text.html() == '_label', 'Title content available' );
+        ok( input.$title_text.css( 'display' ) == 'none', 'Regular title not visible' );
+        input.model.set( 'help', '_help' );
+        ok( input.$info.html() == '_help', 'Correct help text' );
+        input.model.set( 'argument', '_argument' );
+        ok( input.$info.html() == '_help (_argument)', 'Correct help text with argument' );
+        input.model.set( 'help', '_help (_argument)' );
+        ok( input.$info.html() == '_help (_argument)', 'Correct help text with argument from help' );
+    } );
+});
\ No newline at end of file
diff --git a/test/qunit/tests/galaxy-app-base.html b/test/qunit/tests/galaxy-app-base.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/galaxy-app-base.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/galaxy-app-base.js b/test/qunit/tests/galaxy-app-base.js
new file mode 100644
index 0000000..176edc3
--- /dev/null
+++ b/test/qunit/tests/galaxy-app-base.js
@@ -0,0 +1,110 @@
+define([
+    "galaxy",
+    "jquery",
+    "sinon-qunit"
+], function(
+    appBase,
+    $,
+    sinon
+){
+    /*globals equal test module expect deepEqual strictEqual throws ok */
+    "use strict";
+
+    module( "Galaxy client app tests" );
+
+    var options = {
+        config : {
+            "allow_user_deletion": false,
+            "allow_user_creation": true,
+            "wiki_url": "https://wiki.galaxyproject.org/",
+            "ftp_upload_site": null,
+            "support_url": "https://wiki.galaxyproject.org/Support",
+            "allow_user_dataset_purge": false,
+            "allow_library_path_paste": false,
+            "user_library_import_dir": null,
+            "terms_url": null,
+            "ftp_upload_dir": null,
+            "library_import_dir": null,
+            "logo_url": null,
+            "enable_unique_workflow_defaults": false
+        },
+        user : {
+            "username": "test",
+            "quota_percent": null,
+            "total_disk_usage": 61815527,
+            "nice_total_disk_usage": "59.0 MB",
+            "email": "test at test.test",
+            "tags_used": [
+              "test"
+            ],
+            "model_class": "User",
+            "id": "f2db41e1fa331b3e"
+        }
+    };
+
+    test( "App base construction/initializiation defaults", function() {
+        var app = new appBase.GalaxyApp({});
+        ok( app.hasOwnProperty( 'options' )     && typeof app.options === 'object' );
+        ok( app.hasOwnProperty( 'logger' )      && typeof app.logger === 'object' );
+        ok( app.hasOwnProperty( 'localize' )    && typeof app.localize === 'function' );
+        ok( app.hasOwnProperty( 'config' )      && typeof app.config === 'object' );
+        ok( app.hasOwnProperty( 'user' )        && typeof app.config === 'object' );
+
+        // equal( true );
+        equal( app.localize, window._l );
+    });
+
+    test( "App base default options", function() {
+        var app = new appBase.GalaxyApp({});
+        ok( app.hasOwnProperty( 'options' ) && typeof app.options === 'object' );
+        equal( app.options.root,            '/' );
+        equal( app.options.patchExisting,   true );
+    });
+
+    test( "App base extends from Backbone.Events", function() {
+        var app = new appBase.GalaxyApp({});
+        [ 'on', 'off', 'trigger', 'listenTo', 'stopListening' ].forEach( function( fn ){
+            ok( app.hasOwnProperty( fn ) && typeof app[ fn ] === 'function' );
+        });
+    });
+
+    test( "App base has logging methods from utils/add-logging.js", function() {
+        var app = new appBase.GalaxyApp({});
+        [ 'debug', 'info', 'warn', 'error', 'metric' ].forEach( function( fn ){
+            ok( typeof app[ fn ] === 'function' );
+        });
+        ok( app._logNamespace === 'GalaxyApp' );
+    });
+
+    test( 'App base will patch in attributes from existing Galaxy objects', function(){
+        window.Galaxy = {
+            attribute : {
+                subattr : 1
+            }
+        };
+        var app = new appBase.GalaxyApp({});
+        ok( typeof app.attribute === 'object' && app.attribute.subattr === 1 );
+    });
+
+    test( "App base logger", function() {
+        var app = new appBase.GalaxyApp({});
+        ok( app.hasOwnProperty( 'logger' ) && typeof app.config === 'object' );
+    });
+
+    test( "App base config", function() {
+        var app = new appBase.GalaxyApp( options );
+        ok( app.hasOwnProperty( 'config' ) && typeof app.config === 'object' );
+        equal( app.config.allow_user_deletion,  false );
+        equal( app.config.allow_user_creation,  true );
+        equal( app.config.wiki_url,             "https://wiki.galaxyproject.org/" );
+        equal( app.config.ftp_upload_site,      null );
+        //...
+    });
+
+    test( "App base user", function() {
+        var app = new appBase.GalaxyApp({});
+        ok( app.hasOwnProperty( 'user' ) && typeof app.user === 'object' );
+        ok( app.user.isAdmin() === false );
+    });
+
+});
diff --git a/test/qunit/tests/graph.html b/test/qunit/tests/graph.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/graph.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/graph.js b/test/qunit/tests/graph.js
new file mode 100644
index 0000000..baead23
--- /dev/null
+++ b/test/qunit/tests/graph.js
@@ -0,0 +1,540 @@
+define([
+    "utils/graph",
+    "jquery",
+    "sinon-qunit"
+], function( GRAPH, $, sinon ){
+
+    /*globals equal ok, test module expect deepEqual strictEqual */
+    "use strict";
+
+    module( "utils/graph.js library tests" );
+///*
+    function testEmptyObject( o ){
+        ok( typeof o === 'object' );
+        ok( Object.keys( o ).length === 0 );
+    }
+
+    // ------------------------------------------------------------------------ vertices
+    test( "Empty vertex construction", function() {
+        var vert = new GRAPH.Vertex();
+        ok( vert instanceof GRAPH.Vertex );
+        ok( vert.name === '(unnamed)' );
+        ok( vert.data === null );
+        testEmptyObject( vert.edges );
+        ok( vert.degree === 0 );
+        ok( ( vert + '' ) === 'Vertex((unnamed))' );
+        deepEqual( vert.toJSON(), { name : '(unnamed)', data: null });
+    });
+
+    test( "Vertex construction", function() {
+        var vert = new GRAPH.Vertex( 'blah', { blorp: 1, bleep: 2 });
+        ok( vert instanceof GRAPH.Vertex );
+        ok( vert.name === 'blah' );
+        deepEqual( vert.data, { blorp: 1, bleep: 2 });
+        testEmptyObject( vert.edges );
+        ok( vert.degree === 0 );
+        ok( ( vert + '' ) === 'Vertex(blah)' );
+        deepEqual( vert.toJSON(), { name : 'blah', data: { blorp: 1, bleep: 2 } });
+    });
+
+    // ------------------------------------------------------------------------ edges
+    test( "Empty edge construction", function() {
+        var edge = new GRAPH.Edge();
+        ok( edge instanceof GRAPH.Edge );
+        ok( edge.source === null );
+        ok( edge.target === null );
+        ok( edge.data === null );
+        ok( ( edge + '' ) === 'null->null' );
+        deepEqual( edge.toJSON(), { source : null, target: null });
+    });
+
+    test( "Edge construction", function() {
+        var edge = new GRAPH.Edge( 'A', 'B', { one: 1, two: 2 });
+        ok( edge instanceof GRAPH.Edge );
+        ok( edge.source === 'A' );
+        ok( edge.target === 'B' );
+        deepEqual( edge.data, { one: 1, two: 2 } );
+        ok( ( edge + '' ) === 'A->B' );
+        deepEqual( edge.toJSON(), { source : 'A', target: 'B', data : { one: 1, two: 2 } });
+    });
+
+    // ------------------------------------------------------------------------ graphs
+    function testEmptyGraph( graph ){
+        ok( graph instanceof GRAPH.Graph );
+        testEmptyObject( graph.vertices );
+        ok( graph.numEdges === 0 );
+    }
+
+    test( "Empty graph construction", function() {
+        var graph = new GRAPH.Graph();
+
+        ok( graph.directed === false );
+        ok( graph.allowReflexiveEdges === false );
+
+        testEmptyGraph( graph );
+    });
+
+    test( "Bad data graph construction", function() {
+        var graph = new GRAPH.Graph( false, {} );
+        testEmptyGraph( graph );
+
+        graph = new GRAPH.Graph( false, null );
+        testEmptyGraph( graph );
+    });
+
+    test( "Test directed and options", function() {
+        var graph = new GRAPH.Graph( true, {}, { allowReflexiveEdges : true });
+
+        ok( graph.directed );
+        ok( graph.allowReflexiveEdges );
+
+        testEmptyGraph( graph );
+    });
+
+    function testSampleDirectedGraph( graph ){
+        ok( !graph.directed );
+
+        ok( Object.keys( graph.vertices ).length === 3 );
+        ok( graph.vertices.A instanceof GRAPH.Vertex );
+        ok( graph.vertices.B instanceof GRAPH.Vertex );
+        ok( graph.vertices.C instanceof GRAPH.Vertex );
+
+        deepEqual( Object.keys( graph.vertices.A.edges ), [ 'B', 'C' ] );
+        deepEqual( Object.keys( graph.vertices.B.edges ), [ 'A', 'C' ] );
+        deepEqual( Object.keys( graph.vertices.C.edges ), [ 'A', 'B' ] );
+
+        ok( graph.vertices.A.degree === 2 );
+        ok( graph.vertices.B.degree === 2 );
+        ok( graph.vertices.C.degree === 2 );
+
+        deepEqual( graph.vertices.A.edges.B.toJSON(), { source: 'A', target: 'B' });
+        deepEqual( graph.vertices.A.edges.C.toJSON(), { source: 'A', target: 'C' });
+        deepEqual( graph.vertices.B.edges.A.toJSON(), { source: 'B', target: 'A' });
+        deepEqual( graph.vertices.B.edges.C.toJSON(), { source: 'B', target: 'C' });
+        deepEqual( graph.vertices.C.edges.A.toJSON(), { source: 'C', target: 'A' });
+        deepEqual( graph.vertices.C.edges.B.toJSON(), { source: 'C', target: 'B' });
+
+        ok( graph.numEdges === 6 );
+    }
+
+    function testSampleNonDirectedGraph( graph ){
+        ok( graph.directed );
+
+        ok( Object.keys( graph.vertices ).length === 3 );
+        ok( graph.vertices.A instanceof GRAPH.Vertex );
+        ok( graph.vertices.B instanceof GRAPH.Vertex );
+        ok( graph.vertices.C instanceof GRAPH.Vertex );
+
+        deepEqual( Object.keys( graph.vertices.A.edges ), [ 'B', 'C' ] );
+        deepEqual( Object.keys( graph.vertices.B.edges ), [ 'C' ] );
+        deepEqual( Object.keys( graph.vertices.C.edges ), [] );
+
+        ok( graph.vertices.A.degree === 2 );
+        ok( graph.vertices.B.degree === 1 );
+        ok( graph.vertices.C.degree === 0 );
+
+        deepEqual( graph.vertices.A.edges.B.toJSON(), { source: 'A', target: 'B' });
+        deepEqual( graph.vertices.A.edges.C.toJSON(), { source: 'A', target: 'C' });
+        deepEqual( graph.vertices.B.edges.C.toJSON(), { source: 'B', target: 'C' });
+
+        ok( graph.numEdges === 3 );
+    }
+
+    var nodeLinkData = {
+        nodes : [
+            { name : 'A', data: 100 },
+            { name : 'B', data: 200 },
+            { name : 'C', data: 300 }
+        ],
+        links : [
+            { source: 0, target: 1 },
+            { source: 0, target: 2 },
+            { source: 1, target: 2 }
+        ]
+    };
+
+    test( "Test nodes and links data input on *non-directed* graph", function() {
+        var graph = new GRAPH.Graph( false, nodeLinkData );
+        testSampleDirectedGraph( graph );
+    });
+
+    test( "Test nodes and links data input on *directed* graph", function() {
+        var graph = new GRAPH.Graph( true, nodeLinkData );
+        testSampleNonDirectedGraph( graph );
+    });
+
+    var vertexEdgeData = {
+        vertices : [
+            { name : 'A', data: 100 },
+            { name : 'B', data: 200 },
+            { name : 'C', data: 300 }
+        ],
+        edges : [
+            { source: 'A', target: 'B' },
+            { source: 'A', target: 'C' },
+            { source: 'B', target: 'C' }
+        ]
+    };
+
+    test( "Test vertex and edge data input on *non-directed* graph", function() {
+        var graph = new GRAPH.Graph( false, vertexEdgeData );
+        testSampleDirectedGraph( graph );
+    });
+
+    test( "Test vertex and edge data input on *directed* graph", function() {
+        var graph = new GRAPH.Graph( true, vertexEdgeData );
+        testSampleNonDirectedGraph( graph );
+    });
+
+    test( "Test vertex eachEdge", function() {
+        var graph = new GRAPH.Graph( false, nodeLinkData );
+        ok( typeof graph.vertices.A.eachEdge === 'function' );
+        deepEqual( graph.vertices.A.eachEdge( function( e ){ return e.target; }), [ 'B', 'C' ] );
+        ok( graph.vertices.A.eachEdge({ target: 'B' }).length === 1 );
+    });
+
+    test( "Test graph eachVertex", function() {
+        var graph = new GRAPH.Graph( true, nodeLinkData );
+        ok( typeof graph.eachVertex === 'function' );
+        deepEqual( graph.eachVertex( function( v ){ return { n: v.name, d: v.degree }; }), [
+            { n: 'A', d: 2 },
+            { n: 'B', d: 1 },
+            { n: 'C', d: 0 }
+        ]);
+        ok( graph.eachVertex({ degree: 2 })[0] === graph.vertices.A );
+    });
+
+    test( "Test createVertex", function() {
+        var graph = new GRAPH.Graph();
+        var vert1 = graph.createVertex( 'A', { blah: 1 });
+        ok( vert1 instanceof GRAPH.Vertex );
+        ok( vert1 === graph.vertices.A );
+        ok( graph.createVertex( 'A', { blah: 1 } ) === vert1 );
+    });
+
+    test( "Test createEdge", function() {
+        var graph, A, B, edge;
+
+        graph = new GRAPH.Graph();
+        A = graph.createVertex( 'A' );
+        B = graph.createVertex( 'B' );
+        edge = graph.createEdge( 'A', 'B' );
+        ok( edge instanceof GRAPH.Edge );
+        ok( A.degree === 1 );
+        ok( B.degree === 1 );
+        ok( A.edges.B );
+        ok( B.edges.A );
+        ok( graph.numEdges === 2 );
+
+        // bad target
+        graph = new GRAPH.Graph();
+        A = graph.createVertex( 'A' );
+        B = graph.createVertex( 'B' );
+        edge = graph.createEdge( 'A', 'C' );
+        ok( edge === null );
+        ok( A.degree === 0 );
+        ok( B.degree === 0 );
+        ok( !A.edges.B );
+        ok( !B.edges.A );
+        ok( graph.numEdges === 0 );
+
+        // bad source
+        graph = new GRAPH.Graph();
+        A = graph.createVertex( 'A' );
+        B = graph.createVertex( 'B' );
+        edge = graph.createEdge( 'C', 'A' );
+        ok( graph.numEdges === 0 );
+
+        // reflexive
+        graph = new GRAPH.Graph();
+        A = graph.createVertex( 'A' );
+        B = graph.createVertex( 'B' );
+        edge = graph.createEdge( 'A', 'A' );
+        ok( graph.numEdges === 0 );
+
+        // reflexive (allowed)
+        graph = new GRAPH.Graph( false, {}, { allowReflexiveEdges: true });
+        A = graph.createVertex( 'A' );
+        edge = graph.createEdge( 'A', 'A' );
+        // reflexive edges shouldn't mirror
+        ok( graph.numEdges === 1 );
+        ok( A.edges.A );
+    });
+
+    test( "Test graph.edges", function() {
+        var graph = new GRAPH.Graph( false, nodeLinkData );
+        ok( graph.edges().length === 6 );
+        deepEqual( graph.edges( function( e ){ return e.source; }), [ 'A', 'A', 'B', 'B', 'C', 'C' ] );
+        deepEqual( graph.edges({ source: 'A' }), [ graph.vertices.A.edges.B, graph.vertices.A.edges.C ] );
+
+        graph = new GRAPH.Graph( true, nodeLinkData );
+        ok( graph.edges().length === 3 );
+        deepEqual( graph.edges( function( e ){ return e.source; }), [ 'A', 'A', 'B' ] );
+        deepEqual( graph.edges({ source: 'A' }), [ graph.vertices.A.edges.B, graph.vertices.A.edges.C ] );
+    });
+
+    test( "Test graph.adjacent", function() {
+        var graph = new GRAPH.Graph( true, nodeLinkData );
+        deepEqual( graph.adjacent( graph.vertices.A ), [ graph.vertices.B, graph.vertices.C ] );
+        deepEqual( graph.adjacent( graph.vertices.B ), [ graph.vertices.C ] );
+        deepEqual( graph.adjacent( graph.vertices.C ), [] );
+    });
+
+    test( "Test graph.eachAdjacent", function() {
+        var graph = new GRAPH.Graph( true, nodeLinkData );
+        deepEqual( graph.eachAdjacent( graph.vertices.A, function( v, e ){ return v; }),
+                  [ graph.vertices.B, graph.vertices.C ] );
+    });
+
+    // ------------------------------------------------------------------------ breadth first search
+    test( "Empty BreadthFirstSearch", function(){
+        var search = new GRAPH.BreadthFirstSearch();
+        ok( search instanceof GRAPH.BreadthFirstSearch );
+        ok( search.graph === undefined );
+        ok( typeof search.processFns === 'object' );
+        ok( typeof search.processFns.vertexEarly === 'function' );
+        ok( typeof search.processFns.edge === 'function' );
+        ok( typeof search.processFns.vertexLate === 'function' );
+        ok( typeof search._cache === 'object' );
+    });
+
+    test( "BreadthFirstSearch on undirected graph", function(){
+        var graph = new GRAPH.Graph( false, nodeLinkData ),
+            bfs = new GRAPH.BreadthFirstSearch( graph );
+        ok( bfs instanceof GRAPH.BreadthFirstSearch );
+        ok( bfs.graph === graph );
+
+        var search = bfs.search( 'A' ),
+            tree = bfs.searchTree( 'A' );
+        deepEqual( search, {
+            discovered : { A: true, B: true, C: true },
+            edges : [
+                { source : 'A', target: 'B' },
+                { source : 'A', target: 'C' }
+            ]
+        });
+        ok( tree instanceof GRAPH.Graph );
+        deepEqual( tree.vertices.A.toJSON(), graph.vertices.A.toJSON() );
+        deepEqual( tree.eachVertex( function( v ){ return v.degree; }), [ 2, 0, 0 ] );
+
+        deepEqual( bfs.search( 'B' ).edges, [
+            { source : 'B', target: 'A' },
+            { source : 'B', target: 'C' }
+        ]);
+        deepEqual( bfs.search( 'C' ).edges, [
+            { source : 'C', target: 'A' },
+            { source : 'C', target: 'B' }
+        ]);
+        ok( typeof bfs._cache.A === 'object' );
+        deepEqual( Object.keys( bfs._cache ), [ 'A', 'B', 'C' ] );
+    });
+
+    test( "BreadthFirstSearch on directed graph", function(){
+        var graph = new GRAPH.Graph( true, nodeLinkData ),
+            bfs = new GRAPH.BreadthFirstSearch( graph );
+        ok( bfs instanceof GRAPH.BreadthFirstSearch );
+        ok( bfs.graph === graph );
+
+        var search = bfs.search( 'A' ),
+            tree = bfs.searchTree( 'A' );
+        deepEqual( search, {
+            discovered : { A: true, B: true, C: true },
+            edges : [
+                { source : 'A', target: 'B' },
+                { source : 'A', target: 'C' }
+            ]
+        });
+        ok( tree instanceof GRAPH.Graph );
+        deepEqual( tree.vertices.A.toJSON(), graph.vertices.A.toJSON() );
+        deepEqual( tree.eachVertex( function( v ){ return v.degree; }), [ 2, 0, 0 ] );
+
+        deepEqual( bfs.search( 'B' ).edges, [
+            { source : 'B', target: 'C' }
+        ]);
+        deepEqual( bfs.search( 'C' ).edges, []);
+        ok( typeof bfs._cache.A === 'object' );
+        deepEqual( Object.keys( bfs._cache ), [ 'A', 'B', 'C' ] );
+    });
+
+    // ------------------------------------------------------------------------ depth first search
+    var DFSData = {
+        vertices : [
+            { name : 'A' },
+            { name : 'B' },
+            { name : 'C' },
+            { name : 'D' },
+            { name : 'E' },
+            { name : 'F' }
+        ],
+        edges : [
+            { source: 'A', target: 'B' },
+            { source: 'B', target: 'C' },
+            // confound it
+            { source: 'A', target: 'C' },
+            { source: 'C', target: 'D' },
+            { source: 'A', target: 'E' },
+            { source: 'E', target: 'F' },
+            // confound it
+            { source: 'F', target: 'A' }
+        ]
+    };
+
+    test( "Empty DepthFirstSearch", function(){
+        var search = new GRAPH.DepthFirstSearch();
+        ok( search instanceof GRAPH.DepthFirstSearch );
+        ok( search.graph === undefined );
+        ok( typeof search.processFns === 'object' );
+        ok( typeof search.processFns.vertexEarly === 'function' );
+        ok( typeof search.processFns.edge === 'function' );
+        ok( typeof search.processFns.vertexLate === 'function' );
+        ok( typeof search._cache === 'object' );
+    });
+
+    test( "DepthFirstSearch on undirected graph", function(){
+        var graph = new GRAPH.Graph( false, DFSData ),
+            dfs = new GRAPH.DepthFirstSearch( graph );
+        ok( dfs instanceof GRAPH.DepthFirstSearch );
+        ok( dfs.graph === graph );
+
+        var search = dfs.search( 'A' ),
+            tree = dfs.searchTree( 'A' );
+        deepEqual( search, {
+            discovered : { A: true, B: true, C: true, D: true, E: true, F: true },
+            edges : [
+                { source : 'A', target: 'B' },
+                { source : 'B', target: 'C' },
+                { source : 'C', target: 'D' },
+                { source : 'A', target: 'E' },
+                { source : 'E', target: 'F' }
+            ],
+            entryTimes : { A: 0, B: 1, C: 2, D: 3, E: 7, F: 8 },
+            exitTimes  : { A: 11, B: 6, C: 5, D: 4, E: 10, F: 9 }
+        });
+        ok( tree instanceof GRAPH.Graph );
+        deepEqual( tree.vertices.A.toJSON(), graph.vertices.A.toJSON() );
+        deepEqual( tree.eachVertex( function( v ){ return v.degree; }), [ 2, 1, 1, 0, 1, 0 ] );
+
+        deepEqual( dfs.search( 'B' ).edges, [
+            { source : 'B', target: 'A' },
+            { source : 'A', target: 'C' },
+            { source : 'C', target: 'D' },
+            { source : 'A', target: 'E' },
+            { source : 'E', target: 'F' }
+        ]);
+
+        ok( typeof dfs._cache.A === 'object' );
+        deepEqual( Object.keys( dfs._cache ), [ 'A', 'B' ] );
+    });
+
+    test( "DepthFirstSearch on directed graph", function(){
+        var graph = new GRAPH.Graph( true, DFSData ),
+            dfs = new GRAPH.DepthFirstSearch( graph );
+        ok( dfs instanceof GRAPH.DepthFirstSearch );
+        ok( dfs.graph === graph );
+
+        var search = dfs.search( 'A' ),
+            tree = dfs.searchTree( 'A' );
+        deepEqual( search, {
+            discovered : { A: true, B: true, C: true, D: true, E: true, F: true },
+            edges : [
+                { source : 'A', target: 'B' },
+                { source : 'B', target: 'C' },
+                { source : 'C', target: 'D' },
+                { source : 'A', target: 'E' },
+                { source : 'E', target: 'F' }
+            ],
+            entryTimes : { A: 0, B: 1, C: 2, D: 3, E: 7, F: 8 },
+            exitTimes  : { A: 11, B: 6, C: 5, D: 4, E: 10, F: 9 }
+        });
+        ok( tree instanceof GRAPH.Graph );
+        deepEqual( tree.vertices.A.toJSON(), graph.vertices.A.toJSON() );
+        deepEqual( tree.eachVertex( function( v ){ return v.degree; }), [ 2, 1, 1, 0, 1, 0 ] );
+
+        deepEqual( dfs.search( 'B' ).edges, [
+            { source : 'B', target: 'C' },
+            { source : 'C', target: 'D' }
+        ]);
+
+        ok( typeof dfs._cache.A === 'object' );
+        deepEqual( Object.keys( dfs._cache ), [ 'A', 'B' ] );
+    });
+
+    // ------------------------------------------------------------------------ components
+//*/
+    test( "weakComponents on undirected graph", function(){
+        var graph = new GRAPH.Graph( false, {
+            vertices : [
+                { name : 'A' },
+                { name : 'B' },
+                { name : 'C' },
+                { name : 'D' },
+                { name : 'E' }
+            ],
+            edges : [
+                { source: 'A', target: 'B' },
+                { source: 'C', target: 'D' }
+            ]
+        });
+        equal( graph.numEdges, 4 );
+        var components = graph.weakComponents();
+        equal( components.length, 3 );
+    });
+
+    test( "weakComponents on directed graph", function(){
+        var graph, components;
+        graph = new GRAPH.Graph( true, {
+            vertices : [
+                { name : 'A' },
+                { name : 'B' },
+                { name : 'C' },
+                { name : 'D' },
+                { name : 'E' }
+            ],
+            edges : [
+                { source: 'A', target: 'B' },
+                { source: 'D', target: 'C' }
+            ]
+        });
+        equal( graph.numEdges, 2 );
+
+        components = graph.weakComponents();
+        equal( components.length, 3 );
+
+        graph = new GRAPH.Graph( true, {
+            vertices : [
+                { name : 'A', data: 100 },
+                { name : 'B', data: 200 },
+                { name : 'C', data: 30 },
+                { name : 'D', data: 40 },
+                { name : 'E', data: 500 },
+                { name : 'F', data: 600 },
+                { name : 'G', data: 7 }
+            ],
+            edges : [
+                { source: 'A', target: 'B' },
+                { source: 'D', target: 'C' },
+                { source: 'E', target: 'A' },
+                { source: 'F', target: 'E' }
+            ]
+        });
+        components = graph.weakComponents();
+        equal( components.length, 3 );
+        // data retained
+        equal( components[0].vertices[0].data, 100 );
+
+        equal( components[0].vertices.length, 4 );
+        deepEqual( components[0].edges, [
+            { source: 'A', target: 'B' },
+            { source: 'E', target: 'A' },
+            { source: 'F', target: 'E' }
+        ]);
+
+        equal( components[1].vertices.length, 2 );
+        deepEqual( components[1].edges, [
+            { source: 'D', target: 'C' }
+        ]);
+
+        deepEqual( components[2].vertices, [{ name : 'G', data: 7 }]);
+        deepEqual( components[2].edges.length, 0 );
+    });
+});
diff --git a/test/qunit/tests/hda-base.html b/test/qunit/tests/hda-base.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/hda-base.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/hda-base.js b/test/qunit/tests/hda-base.js
new file mode 100644
index 0000000..7870880
--- /dev/null
+++ b/test/qunit/tests/hda-base.js
@@ -0,0 +1,33 @@
+// This file isn't really testing anything useful yet, it is just testing
+// (or demonstrating) qunit+backbone interactions.
+define([
+    "mvc/history/hda-model",
+    "mvc/history/hda-li",
+    "jquery",
+    "sinon-qunit"
+], function(
+    HDA_MODEL,
+    HDA_BASE,
+    $,
+    sinon
+){
+    /*globals equal test module expect deepEqual strictEqual */
+    "use strict";
+    module( "HDA base backbone view tests" );
+
+    test( "Base HDA view default construction, initialize", function() {
+        var hda = new HDA_MODEL.HistoryDatasetAssociation({
+                    id          : '123'
+                }),
+            view = new HDA_BASE.HDAListItemView({ model: hda });
+
+        strictEqual( view.model, hda );
+
+        equal( view.linkTarget, '_blank' );
+        equal( view.selectable, false );
+        equal( view.selected,   false );
+        equal( view.expanded,   false );
+        equal( view.draggable,  false );
+        equal( view.id(), 'dataset-123' );
+    });
+});
diff --git a/test/qunit/tests/history_contents_model_tests.html b/test/qunit/tests/history_contents_model_tests.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/history_contents_model_tests.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/history_contents_model_tests.js b/test/qunit/tests/history_contents_model_tests.js
new file mode 100644
index 0000000..5f12291
--- /dev/null
+++ b/test/qunit/tests/history_contents_model_tests.js
@@ -0,0 +1,44 @@
+define([
+    "mvc/history/hda-model",
+    "jquery",
+    "sinon-qunit"
+], function(
+    HDA_MODEL,
+    $,
+    sinon
+){
+    module( "History Contents Model Tests" );
+
+    test( "HDA Constructions with Default Attributes", function() {
+        var hda = new HDA_MODEL.HistoryDatasetAssociation({});
+        equal( hda.get( 'name' ), "(unnamed dataset)" );
+        equal( hda.get( 'state' ), "new" );
+    });
+
+    test( "HDA Construction with Supplied Attributes", function() {
+        var hda = new HDA_MODEL.HistoryDatasetAssociation({
+            history_content_type : 'dataset',
+            name: "my dataset",
+            state: "ok"
+        });
+        equal( hda.get( 'name' ), "my dataset" );
+        equal( hda.get( 'state' ), "ok" );
+    });
+
+    test( "HDA Deletion", function() {
+        var hda = new HDA_MODEL.HistoryDatasetAssociation({
+            history_content_type : 'dataset',
+            id: "hda1",
+            history_id: "h1",
+            deleted: false
+        });
+        equal( hda.get( 'deleted' ), false );
+
+        sinon.stub( $, "ajax" ).yieldsTo( 'success', { deleted: true });
+        hda[ 'delete' ]();
+        // to get the url sinon used:
+        //console.debug( $.ajax.lastCall.args[0].url )
+        ok( $.ajax.calledWithMatch( { url: "/api/histories/h1/contents/datasets/hda1" } ) );
+        equal( hda.get( 'deleted' ), true );
+    });
+});
diff --git a/test/qunit/tests/job-dag.html b/test/qunit/tests/job-dag.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/job-dag.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/job-dag.js b/test/qunit/tests/job-dag.js
new file mode 100644
index 0000000..0bb51ca
--- /dev/null
+++ b/test/qunit/tests/job-dag.js
@@ -0,0 +1,261 @@
+define([
+    "mvc/history/job-dag",
+    "jquery",
+    "sinon-qunit",
+    'test-data/job-dag-1'
+], function( JobDAG, $, sinon, testData ){
+    /*globals equal ok, test module expect deepEqual strictEqual */
+    "use strict";
+
+    module( "mvc/history/job-dag.js tests" );
+///*
+    function testEmptyObject( o ){
+        ok( typeof o === 'object' );
+        ok( Object.keys( o ).length === 0 );
+    }
+
+    // ------------------------------------------------------------------------
+    test( "Empty JobDAG construction", function() {
+        var dag = new JobDAG();
+        ok( dag instanceof JobDAG );
+
+        // default options
+        deepEqual( dag.filters, [] );
+        deepEqual( dag.options, {
+            excludeSetMetadata : false
+        });
+
+        // test (empty) instance vars
+        deepEqual( dag._jobsData, [] );
+        deepEqual( dag._historyContentsMap, {} );
+        deepEqual( dag._toolMap, {} );
+        equal( dag.noInputJobs.length, 0 );
+        equal( dag.noOutputJobs.length, 0 );
+
+        // logging
+        equal( typeof dag.debug, 'function' );
+        equal( typeof dag.info, 'function' );
+        equal( typeof dag.warn, 'function' );
+        equal( typeof dag.error, 'function' );
+    });
+
+    test( "Empty JobDAG construction - changing options", function() {
+        var dag;
+        dag = new JobDAG({
+            excludeSetMetadata : true
+        });
+
+        // excludeSetMetadata
+        deepEqual( dag.options, {
+            excludeSetMetadata : true
+        });
+        equal( dag.filters.length, 1 );
+        equal( typeof dag.filters[0], 'function' );
+
+        // filters
+        function testFilter( job, index, jobData ){ return true; }
+        dag = new JobDAG({
+            filters : [ testFilter ]
+        });
+        equal( dag.filters[0], testFilter );
+    });
+
+    test( "JobDAG construction with history and jobs", function() {
+        equal( testData.jobs1.length, 3 );
+        equal( testData.historyContents1.length, 3 );
+
+        var history = testData.historyContents1,
+            jobs = testData.jobs1,
+            dag;
+        dag = new JobDAG({
+            historyContents : history,
+            tools : testData.tools,
+            jobs : jobs
+        });
+
+        deepEqual( dag._outputIdToJobMap, {
+            "8c959c9304a2bc4b": "8a81cf6f989c4467",
+            "132016f833b57406": "6505e875ddb66fd2",
+            "846fb0a2a64137c0": "77f74776fd03cbc5"
+        });
+        deepEqual( dag._jobsData, [
+            {
+                "job": _.findWhere( jobs, { id : "8a81cf6f989c4467" }),
+                "inputs": {},
+                "outputs": {
+                    "8c959c9304a2bc4b": { "src": "hda", "id": "8c959c9304a2bc4b", "name": "output0",
+                                          "content": _.findWhere( history, { id: "8c959c9304a2bc4b" }) }
+                },
+                "tool": {}
+            },
+            {
+                "job": _.findWhere( jobs, { id : "6505e875ddb66fd2" }),
+                "inputs": {
+                    "8c959c9304a2bc4b": { "src": "hda", "id": "8c959c9304a2bc4b", "name": "input",
+                                          "content": _.findWhere( history, { id: "8c959c9304a2bc4b" }) }
+                },
+                "outputs": {
+                    "132016f833b57406": { "src": "hda", "id": "132016f833b57406", "name": "out_file1",
+                                          "content": _.findWhere( history, { id: "132016f833b57406" }) }
+                },
+                "tool": {}
+            },
+            {
+                "job": _.findWhere( jobs, { id : "77f74776fd03cbc5" }),
+                "inputs": {
+                    "132016f833b57406": { "src": "hda", "id": "132016f833b57406", "name": "input",
+                                          "content": _.findWhere( history, { id: "132016f833b57406" }) }
+                },
+                "outputs": {
+                    "846fb0a2a64137c0": { "src": "hda", "id": "846fb0a2a64137c0", "name": "out_file1",
+                                          "content": _.findWhere( history, { id: "846fb0a2a64137c0" }) }
+                },
+                "tool": {}
+            }
+        ]);
+
+        var jobsDataMap = dag._jobsDataMap();
+        deepEqual( dag.toNodesAndLinks(), {
+            "nodes": [
+                { "name": "8a81cf6f989c4467", "data": jobsDataMap[ "8a81cf6f989c4467" ] },
+                { "name": "6505e875ddb66fd2", "data": jobsDataMap[ "6505e875ddb66fd2" ] },
+                { "name": "77f74776fd03cbc5", "data": jobsDataMap[ "77f74776fd03cbc5" ] }
+            ],
+            "links": [
+                { "source": 0, "target": 1, "data": { "dataset": "8c959c9304a2bc4b" } },
+                { "source": 1, "target": 2, "data": { "dataset": "132016f833b57406" } }
+            ]
+        });
+
+        deepEqual( dag.toVerticesAndEdges(), {
+            "vertices": [
+                { "name": "8a81cf6f989c4467", "data": jobsDataMap[ "8a81cf6f989c4467" ] },
+                { "name": "6505e875ddb66fd2", "data": jobsDataMap[ "6505e875ddb66fd2" ] },
+                { "name": "77f74776fd03cbc5", "data": jobsDataMap[ "77f74776fd03cbc5" ] }
+            ],
+            "edges": [
+                { "source": "8a81cf6f989c4467", "target": "6505e875ddb66fd2",
+                  "data": { "dataset": "8c959c9304a2bc4b" } },
+                { "source": "6505e875ddb66fd2", "target": "77f74776fd03cbc5",
+                  "data": { "dataset": "132016f833b57406" } }
+            ]
+        });
+
+        // test cloning
+    });
+
+    test( "JobDAG removal of __SET_METADATA__ jobs", function() {
+        equal( testData.jobs2.length, 3 );
+        equal( testData.historyContents2.length, 2 );
+
+        var history = testData.historyContents2,
+            jobs = testData.jobs2,
+            dag;
+        dag = new JobDAG({
+            historyContents : history,
+            tools : testData.tools,
+            jobs : jobs,
+            excludeSetMetadata : true
+        });
+
+        var jobsDataMap = dag._jobsDataMap();
+        deepEqual( dag.toNodesAndLinks(), {
+            "nodes": [
+                { "name": "bf60fd5f5f7f44bf", "data": jobsDataMap[ "bf60fd5f5f7f44bf" ] },
+                { "name": "90240358ebde1489", "data": jobsDataMap[ "90240358ebde1489" ] }
+            ],
+            "links": [
+                { "source": 0, "target": 1, "data": { "dataset": "eca0af6fb47bf90c" } }
+            ]
+        });
+    });
+
+    //TODO: test filtering out errored jobs
+    test( "JobDAG construction with history and jobs", function() {
+        equal( testData.jobs3.length, 5 );
+        equal( testData.historyContents3.length, 5 );
+
+        var history = testData.historyContents3,
+            jobs = testData.jobs3,
+            dag;
+        dag = new JobDAG({
+            historyContents : history,
+            tools : testData.tools,
+            jobs : jobs
+        });
+
+        var jobsDataMap = dag._jobsDataMap();
+        deepEqual( dag.toVerticesAndEdges(), {
+            "vertices": [
+                { "name": "8c959c9304a2bc4b", "data": jobsDataMap[ "8c959c9304a2bc4b" ] },
+                { "name": "132016f833b57406", "data": jobsDataMap[ "132016f833b57406" ] },
+                { "name": "846fb0a2a64137c0", "data": jobsDataMap[ "846fb0a2a64137c0" ] },
+                { "name": "eca0af6fb47bf90c", "data": jobsDataMap[ "eca0af6fb47bf90c" ] },
+                { "name": "6fc9fbb81c497f69", "data": jobsDataMap[ "6fc9fbb81c497f69" ] }
+            ],
+            "edges": [
+                { "source": "8c959c9304a2bc4b", "target": "846fb0a2a64137c0",
+                    "data": { "dataset": "6fb17d0cc6e8fae5" } },
+                { "source": "132016f833b57406", "target": "eca0af6fb47bf90c",
+                    "data": { "dataset": "5114a2a207b7caff" } },
+                { "source": "eca0af6fb47bf90c", "target": "6fc9fbb81c497f69",
+                    "data": { "dataset": "b8a0d6158b9961df" } }
+            ]
+        });
+
+        var components = dag.weakComponents();
+        deepEqual( components, [
+            {
+                "vertices": [
+                    { "name": "8c959c9304a2bc4b", "data": jobsDataMap[ "8c959c9304a2bc4b" ] },
+                    { "name": "846fb0a2a64137c0", "data": jobsDataMap[ "846fb0a2a64137c0" ] }
+                ],
+                "edges": [
+                    { "source": "8c959c9304a2bc4b", "target": "846fb0a2a64137c0" }
+                ]
+            },
+            {
+                "vertices": [
+                    { "name": "132016f833b57406", "data": jobsDataMap[ "132016f833b57406" ] },
+                    { "name": "eca0af6fb47bf90c", "data": jobsDataMap[ "eca0af6fb47bf90c" ] },
+                    { "name": "6fc9fbb81c497f69", "data": jobsDataMap[ "6fc9fbb81c497f69" ] }
+                ],
+                "edges": [
+                    { "source": "132016f833b57406", "target": "eca0af6fb47bf90c" },
+                    { "source": "eca0af6fb47bf90c", "target": "6fc9fbb81c497f69" }
+                ]
+            }
+        ]);
+
+    });
+
+    //TODO: test filtering out errored jobs
+    test( "JobDAG construction with copied history contents", function() {
+        equal( testData.jobs4.length, 1 );
+        equal( testData.historyContents4.length, 3 );
+
+        var history = testData.historyContents4,
+            jobs = testData.jobs4,
+            dag;
+        dag = new JobDAG({
+            historyContents : history,
+            tools : testData.tools,
+            jobs : jobs
+        });
+
+        var jobsDataMap = dag._jobsDataMap();
+        deepEqual( dag.toVerticesAndEdges(), {
+            "vertices": [
+                { "name": "92b83968e0b52980", "data": jobsDataMap[ "92b83968e0b52980" ] },
+                { "name": "copy-422eef6b1b545329", "data": _.findWhere( history, { id: '422eef6b1b545329' }) },
+                { "name": "copy-c86c1b73aa7102dd", "data": _.findWhere( history, { id: 'c86c1b73aa7102dd' }) }
+            ],
+            "edges": [
+                { "source": "copy-422eef6b1b545329", "target": "92b83968e0b52980",
+                    "data": { "dataset": "422eef6b1b545329" } },
+                { "source": "copy-c86c1b73aa7102dd", "target": "92b83968e0b52980",
+                    "data": { "dataset": "c86c1b73aa7102dd" } }
+            ]
+        });
+    });
+});
diff --git a/test/qunit/tests/list-of-pairs-collection-creator.html b/test/qunit/tests/list-of-pairs-collection-creator.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/list-of-pairs-collection-creator.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/list-of-pairs-collection-creator.js b/test/qunit/tests/list-of-pairs-collection-creator.js
new file mode 100644
index 0000000..ed6bbe1
--- /dev/null
+++ b/test/qunit/tests/list-of-pairs-collection-creator.js
@@ -0,0 +1,106 @@
+define([
+    "mvc/collection/list-of-pairs-collection-creator",
+    "test-data/paired-collection-creator.data",
+    "jquery",
+    "sinon-qunit"
+], function(
+    PAIRED_COLLECTION_CREATOR,
+    // why am I yelling?
+    DATA,
+    $,
+    sinon
+){
+    /*globals equal test module expect deepEqual strictEqual throws ok */
+    "use strict";
+    var PCC = PAIRED_COLLECTION_CREATOR.PairedCollectionCreator;
+
+    module( "Galaxy client app tests" );
+
+    test( "Collection creation", function() {
+        var pcc = new PCC({
+                datasets    : DATA._1,
+                historyId   : 'fakeHistoryId'
+            }),
+            server = sinon.fakeServer.create();
+
+        var requestJSON;
+        server.respondWith( 'POST', '/api/histories/fakeHistoryId/contents/dataset_collections', function( request ){
+            requestJSON = JSON.parse( request.requestBody );
+            request.respond(
+                200,
+                { "Content-Type": "application/json" },
+                JSON.stringify({
+                    fakeResponse: 'yes'
+                })
+            );
+        });
+
+        //console.debug( 'requestBody:', JSON.stringify( requestJSON, null, '  ' ) );
+        pcc.createList( 'Heres a collection' );
+        server.respond();
+        deepEqual( requestJSON, DATA._1requestJSON );
+    });
+
+    test( "Creator base/empty construction/initializiation defaults", function() {
+        var pcc = new PCC([]);
+        ok( pcc instanceof PCC );
+        deepEqual( pcc.filters, pcc.commonFilters[ pcc.DEFAULT_FILTERS ] );
+        ok( pcc.automaticallyPair );
+        equal( pcc.matchPercentage, 0.9 );
+        equal( pcc.strategy, 'autopairLCS' );
+    });
+
+    test( "Creator construction/initializiation with datasets", function() {
+        var pcc = new PCC({
+            datasets    : DATA._1
+        });
+        //pcc.initialList.forEach( function( dataset, i ){
+        //    console.log( i + ':\n' + JSON.stringify( dataset ) );
+        //});
+        // pcc maintains the original list - which, in this case, is already sorted
+        deepEqual( pcc.initialList, DATA._1 );
+        // datasets 1 has no ids, so the pcc will create them
+        ok( _.every( pcc.initialList, function( dataset ){
+            return dataset.id;
+        }));
+        // datasets 1 is very easy to auto pair
+        equal( pcc.unpaired.length, 0 );
+        equal( pcc.paired.length, pcc.initialList.length / 2 );
+    });
+
+    test( "Try easy autopairing with simple exact matching", function() {
+        var pcc = new PCC({
+            datasets    : DATA._1,
+            strategy    : 'simple',
+            twoPassAutopairing : false
+        });
+        equal( pcc.unpaired.length, 0 );
+        equal( pcc.paired.length, pcc.initialList.length / 2 );
+    });
+
+    test( "Try easy autopairing with LCS", function() {
+        var pcc = new PCC({
+            datasets    : DATA._1,
+            strategy    : 'lcs',
+            twoPassAutopairing : false
+        });
+        equal( pcc.unpaired.length, 0 );
+        equal( pcc.paired.length, pcc.initialList.length / 2 );
+    });
+
+    test( "Try easy autopairing with Levenshtein", function() {
+        var pcc = new PCC({
+            datasets    : DATA._1,
+            strategy    : 'levenshtein',
+            twoPassAutopairing : false
+        });
+        equal( pcc.unpaired.length, 0 );
+        equal( pcc.paired.length, pcc.initialList.length / 2 );
+    });
+
+    //TODO:
+    //  filters: clearing, setting via popover, regex
+    //  partition: maximize paired, maximize unpaired, split evenly
+    //  pairing: manually pairing and unpairing
+    //  misc: renaming pairs, removing file extensions
+});
diff --git a/test/qunit/tests/masthead_tests.html b/test/qunit/tests/masthead_tests.html
new file mode 100644
index 0000000..c0add87
--- /dev/null
+++ b/test/qunit/tests/masthead_tests.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <link href="../../../static/style/blue/base.css" media="screen" rel="stylesheet" type="text/css">
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/masthead_tests.js b/test/qunit/tests/masthead_tests.js
new file mode 100644
index 0000000..9961285
--- /dev/null
+++ b/test/qunit/tests/masthead_tests.js
@@ -0,0 +1,122 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ "test-app", "layout/masthead"
+], function( testApp, Masthead ){
+    "use strict";
+    module( "Masthead test", {
+        setup: function() {
+            testApp.create();
+            var self = this;
+            this.masthead = new Masthead.View({
+                'brand'                     : 'brand',
+                'use_remote_user'           : 'use_remote_user',
+                'remote_user_logout_href'   : 'remote_user_logout_href',
+                'lims_doc_url'              : 'lims_doc_url',
+                'biostar_url'               : 'biostar_url',
+                'biostar_url_redirect'      : 'biostar_url_redirect',
+                'support_url'               : 'support_url',
+                'search_url'                : 'search_url',
+                'mailing_lists'             : 'mailing_lists',
+                'screencasts_url'           : 'screencasts_url',
+                'wiki_url'                  : 'wiki_url',
+                'citation_url'              : 'citation_url',
+                'terms_url'                 : 'terms_url',
+                'logo_url'                  : 'logo_url',
+                'logo_src'                  : '../../../static/images/galaxyIcon_noText.png',
+                'is_admin_user'             : 'is_admin_user',
+                'active_view'               : 'analysis',
+                'ftp_upload_dir'            : 'ftp_upload_dir',
+                'ftp_upload_site'           : 'ftp_upload_site',
+                'datatypes_disable_auto'    : true,
+                'allow_user_creation'       : true,
+                'enable_cloud_launch'       : true,
+                'user_requests'             : true
+            });
+            $( 'body' ).append( this.masthead.render().$el );
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    test( 'tabs', function() {
+        var tab = this.masthead.collection.findWhere( { id: 'analysis' } );
+        var $tab = $( '#analysis' ).find( '.dropdown' );
+        var $toggle = $tab.find( '.dropdown-toggle' );
+        var $note = $tab.find( '.dropdown-note' );
+        var $menu = $tab.find( 'ul' );
+        ok( tab && $tab.length == 1, 'Found analysis tab' );
+        tab.set( 'title', 'Analyze' );
+        ok( $toggle.html() == 'Analyze', 'Correct title' );
+        ok( tab.get( 'target' ) == '_parent', 'Correct initial target' );
+        tab.set( 'target', '_target' );
+        ok( $toggle.attr( 'target' ) == '_target', 'Correct test target' );
+        ok( $tab.css( 'visibility' ) == 'visible', 'Tab visible' );
+        tab.set( 'visible', false );
+        ok( $tab.css( 'visibility' ) == 'hidden', 'Tab hidden' );
+        tab.set( 'visible', true );
+        ok( $tab.css( 'visibility' ) == 'visible', 'Tab visible, again' );
+        ok( $toggle.attr( 'href' ) == Galaxy.root, 'Correct initial url' );
+        tab.set( 'url', '_url' );
+        ok( $toggle.attr( 'href' ) == '/_url', 'Correct test url' );
+        tab.set( 'url', 'http://_url' );
+        ok( $toggle.attr( 'href' ) == 'http://_url', 'Correct http url' );
+        tab.set( 'tooltip', '_tooltip' );
+        $toggle.trigger( 'mouseover' );
+        ok( $( '.tooltip-inner' ).html() == '_tooltip', 'Correct tooltip' );
+        tab.set( 'tooltip', null );
+        $toggle.trigger( 'mouseover' );
+        ok( $( '.tooltip-inner' ).length == 0, 'Tooltip removed' );
+        tab.set( 'tooltip', '_tooltip_new' );
+        $toggle.trigger( 'mouseover' );
+        ok( $( '.tooltip-inner' ).html() == '_tooltip_new', 'Correct new tooltip' );
+        tab.set( 'cls', '_cls' );
+        ok( $toggle.hasClass ( '_cls' ), 'Correct extra class' );
+        tab.set( 'cls', '_cls_new' );
+        ok( $toggle.hasClass ( '_cls_new' ) && !$toggle.hasClass ( '_cls' ), 'Correct new extra class' );
+        ok( $note.html() == '', 'Correct empty note' );
+        tab.set( { 'note' : '_note', 'show_note' : true } );
+        ok( $note.html() == '_note', 'Correct new note' );
+        tab.set( 'toggle', true );
+        ok( $toggle.hasClass( 'toggle' ), 'Toggled' );
+        tab.set( 'toggle', false );
+        ok( !$toggle.hasClass( 'toggle' ), 'Untoggled' );
+        tab.set( 'disabled', true );
+        ok( $tab.hasClass( 'disabled' ), 'Correctly disabled' );
+        tab.set( 'disabled', false );
+        ok( !$tab.hasClass( 'disabled' ), 'Correctly enabled' );
+        ok( $tab.hasClass( 'active' ), 'Highlighted' );
+        tab.set( 'active', false );
+        ok( !$tab.hasClass( 'active' ), 'Not highlighted' );
+        tab.set( 'active', true );
+        ok( $tab.hasClass( 'active' ), 'Highlighted, again' );
+        tab.set( 'menu', [ { title: '_menu_title', url: '_menu_url', target: '_menu_target' } ] );
+        ok( $menu.hasClass( 'dropdown-menu' ), 'Menu has correct class' );
+        ok( $menu.css( 'display' ) == 'none', 'Menu hidden' );
+        $toggle.trigger( 'click' );
+        ok( $menu.css( 'display' ) == 'block', 'Menu shown' );
+        var $item = $menu.find( 'a' );
+        ok( $item.length == 1, 'Added one menu item' );
+        ok( $item.html() == '_menu_title', 'Menu item has correct title' );
+        ok( $item.attr( 'href' ) == '/_menu_url', 'Menu item has correct url' );
+        ok( $item.attr( 'target' ) == '_menu_target', 'Menu item has correct target' );
+        tab.set( 'menu', null );
+        $item = $menu.find( 'a' );
+        ok( $item.length == 0, 'All menu items removed' );
+        tab.set( 'menu', [ { title: '_menu_title_0', url: '_menu_url_0', target: '_menu_target_0' },
+                           { title: '_menu_title_1', url: '_menu_url_1', target: '_menu_target_1' } ] );
+        $item = $menu.find( 'a' );
+        ok( $item.length == 2, 'Two menu items added' );
+        tab.set( 'show_menu', false );
+        ok( $menu.css( 'display', 'none' ), 'Menu manually hidden' );
+        tab.set( 'show_menu', true );
+        ok( $menu.css( 'display', 'block' ), 'Menu manually shown, again' );
+        var tab = this.masthead.collection.findWhere( { id: 'enable-scratchbook' } );
+        var $tab = $( '#enable-scratchbook' ).find( '.dropdown' );
+        ok( tab && $tab.length == 1, 'Found tab to enable scratchbook' );
+        var $toggle = $tab.find( '.dropdown-toggle' );
+        ok( !$toggle.hasClass( 'toggle' ), 'Untoggled before click' );
+        $toggle.trigger( 'click' );
+        ok( $toggle.hasClass( 'toggle' ), 'Toggled after click' );
+        ok( Galaxy.frame.active, 'Scratchbook is active' );
+    } );
+});
\ No newline at end of file
diff --git a/test/qunit/tests/metrics-logger.html b/test/qunit/tests/metrics-logger.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/metrics-logger.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/metrics-logger.js b/test/qunit/tests/metrics-logger.js
new file mode 100644
index 0000000..7cd5bc4
--- /dev/null
+++ b/test/qunit/tests/metrics-logger.js
@@ -0,0 +1,357 @@
+// This file isn't really testing anything useful yet, it is just testing
+// (or demonstrating) qunit+backbone interactions.
+define([
+    "utils/metrics-logger",
+    "jquery",
+    "sinon-qunit"
+], function(
+    metrics,
+    $,
+    sinon
+){
+    /*globals equal test module expect deepEqual strictEqual throws ok */
+    "use strict";
+
+    var MockConsole = function(){
+        var self = this;
+        self.lastMessage = null;
+        [ 'log', 'debug', 'info', 'warn', 'error' ].forEach( function( fnName ){
+            self[ fnName ] = function(){
+                var args = Array.prototype.slice.call( arguments, 0 );
+                self.lastMessage = { level: fnName, args: args };
+            };
+        });
+        return self;
+    };
+
+    ( window.bootstrapped = {} ).user = {
+        id : 'test'
+    };
+
+    module( "Metrics logger tests" );
+    // ======================================================================== MetricsLogger
+    test( "logger construction/initializiation defaults", function() {
+        var logger = new metrics.MetricsLogger({});
+        equal( logger.consoleLogger, null );
+        equal( logger.options.logLevel,         metrics.MetricsLogger.NONE );
+        equal( logger.options.consoleLevel,     metrics.MetricsLogger.NONE );
+        equal( logger.options.defaultNamespace, 'Galaxy' );
+        equal( logger.options.clientPrefix,     'client.' );
+        equal( logger.options.postSize,         1000 );
+        equal( logger.options.maxCacheSize,     3000 );
+        equal( logger.options.addTime,          true );
+        equal( logger.options.postUrl,          '/api/metrics' );
+        equal( logger.options.getPingData,      undefined );
+        equal( logger.options.onServerResponse, undefined );
+
+        equal( logger._postSize, 1000 );
+        equal( logger.cache.constructor, metrics.LoggingCache );
+    });
+
+    test( "_parseLevel", function() {
+        var logger = new metrics.MetricsLogger({});
+        equal( logger._parseLevel( 'all' ),     metrics.MetricsLogger.ALL );
+        equal( logger._parseLevel( 'debug' ),   metrics.MetricsLogger.DEBUG );
+        equal( logger._parseLevel( 'info' ),    metrics.MetricsLogger.INFO );
+        equal( logger._parseLevel( 'warn' ),    metrics.MetricsLogger.WARN );
+        equal( logger._parseLevel( 'error' ),   metrics.MetricsLogger.ERROR );
+        equal( logger._parseLevel( 'metric' ),  metrics.MetricsLogger.METRIC );
+        equal( logger._parseLevel( 'none' ),    metrics.MetricsLogger.NONE );
+        equal( logger._parseLevel( 15 ),        15 );
+
+        throws( function(){
+            logger._parseLevel( undefined );
+        }, /Unknown log level/, 'Unknown log level throws error' );
+        throws( function(){
+            logger._parseLevel( 'nope' );
+        }, /Unknown log level/, 'Unknown log level throws error' );
+    });
+
+    // ------------------------------------------------------------------------ Emit to cache
+    test( "emit to cache at level", function() {
+        var logger = new metrics.MetricsLogger({
+            logLevel : 'metric'
+        });
+        logger.cache.empty();
+
+        equal( logger.options.logLevel, metrics.MetricsLogger.METRIC );
+        logger.emit( 'metric', 'test', [ 1, 2, { three: 3 }] );
+        equal( logger.cache.length(), 1 );
+
+        var cached = logger.cache.get( 1 )[0];
+        //console.debug( 'cached:', JSON.stringify( cached ) );
+        equal( cached.level, metrics.MetricsLogger.METRIC );
+        equal( cached.namespace, 'client.test' );
+        equal( cached.args.length, 3 );
+        equal( cached.args[2].three, 3 );
+        ok( typeof cached.time === 'string' );
+        ok( cached.time === new Date( cached.time ).toISOString() );
+    });
+
+    test( "emit to cache below does not cache", function() {
+        var logger = new metrics.MetricsLogger({
+            logLevel : 'metric'
+        });
+        logger.cache.empty();
+
+        logger.emit( 'error', 'test', [ 1, 2, { three: 3 }] );
+        equal( logger.cache.length(), 0 );
+    });
+
+    test( "emit to cache (silently) drops non-parsable", function() {
+        var logger = new metrics.MetricsLogger({
+            logLevel : 'metric'
+        });
+        logger.cache.empty();
+
+        logger.emit( 'metric', 'test', [{ window: window }] );
+        equal( logger.cache.length(), 0 );
+    });
+
+    function metricsFromRequestBody( request ){
+        // assumes 'metrics' is only entry in requestBody
+        return JSON.parse( decodeURIComponent( request.requestBody.replace( 'metrics=', '' ) ) );
+    }
+
+    test( "_postCache success", function () {
+        var callback = sinon.spy(),
+            logger = new metrics.MetricsLogger({
+                logLevel : 'metric',
+                onServerResponse : function( response ){ callback(); }
+            });
+        logger.cache.empty();
+
+        var server = sinon.fakeServer.create(),
+            metricsOnServer;
+        server.respondWith( 'POST', '/api/metrics', function( request ){
+            metricsOnServer = metricsFromRequestBody( request );
+            //console.debug( 'requestBody:', request.requestBody );
+            //console.debug( 'metricsOnServer:', JSON.stringify( metricsOnServer, null, '  ' ) );
+            request.respond(
+                200,
+                { "Content-Type": "application/json" },
+                JSON.stringify({
+                    fakeResponse: 'yes'
+                })
+            );
+        });
+
+        logger.emit( 'metric', 'test', [ 1, 2, { three: 3 }] );
+        logger._postCache();
+        server.respond();
+
+        ok( callback.calledOnce, 'onServerResponse was called' );
+        equal( logger.cache.length(), 0, 'should have emptied cache (on success)' );
+        equal( logger._postSize, 1000, '_postSize still at default' );
+
+        // metrics were in proper form on server
+        equal( metricsOnServer.length, 1 );
+        var metric = metricsOnServer[0];
+        equal( metric.level, metrics.MetricsLogger.METRIC );
+        equal( metric.namespace, 'client.test' );
+        equal( metric.args.length, 3 );
+        equal( metric.args[2].three, 3 );
+        ok( typeof metric.time === 'string' );
+        ok( metric.time === new Date( metric.time ).toISOString() );
+
+        server.restore();
+    });
+
+    test( "_postCache failure", function () {
+        var callback = sinon.spy(),
+            logger = new metrics.MetricsLogger({
+                logLevel : 'metric',
+                onServerResponse : function( response ){ callback(); }
+            });
+        logger.cache.empty();
+
+        var server = sinon.fakeServer.create();
+        server.respondWith( 'POST', '/api/metrics', function( request ){
+            request.respond(
+                500,
+                { "Content-Type": "application/json" },
+                JSON.stringify({
+                    err_msg: 'NoooOPE!'
+                })
+            );
+        });
+
+        logger.emit( 'metric', 'test', [ 1, 2, { three: 3 }] );
+        logger._postCache();
+        server.respond();
+        //TODO: is the following what we want?
+        ok( !callback.calledOnce, 'onServerResponse was NOT called' );
+        equal( logger.cache.length(), 1, 'should NOT have emptied cache' );
+        equal( logger._postSize, logger.options.maxCacheSize, '_postSize changed to max' );
+
+        server.restore();
+    });
+
+    // ------------------------------------------------------------------------ Emit to console
+    test( "emit to console at level", function() {
+        var mockConsole = new MockConsole(),
+            logger = new metrics.MetricsLogger({
+                consoleLevel    : 'debug',
+                consoleLogger   : mockConsole
+            });
+        equal( logger.options.consoleLevel, metrics.MetricsLogger.DEBUG );
+        equal( logger.consoleLogger.constructor, MockConsole );
+
+        logger.emit( 'debug', 'test', [ 1, 2, { three: 3 }] );
+        equal( logger.cache.length(), 1 );
+        //console.debug( JSON.stringify( mockConsole.lastMessage ) );
+        equal( mockConsole.lastMessage.level, 'debug' );
+        equal( mockConsole.lastMessage.args.length, 4 );
+        equal( mockConsole.lastMessage.args[0], 'test' );
+        equal( mockConsole.lastMessage.args[3].three, 3 );
+    });
+
+    test( "emit to console below does not output", function() {
+        var mockConsole = new MockConsole(),
+            logger = new metrics.MetricsLogger({
+                consoleLevel    : 'error',
+                consoleLogger   : mockConsole
+            });
+        logger.emit( 'debug', 'test', [ 1, 2, { three: 3 }] );
+        equal( mockConsole.lastMessage, null );
+    });
+
+    // ------------------------------------------------------------------------ Shortcuts
+    test( "logger shortcuts emit to default namespace properly", function() {
+        var logger = new metrics.MetricsLogger({
+                logLevel    : 'all'
+            });
+        logger.cache.empty();
+
+        equal( logger.options.logLevel, metrics.MetricsLogger.ALL );
+        logger.log( 0 );
+        logger.debug( 1 );
+        logger.info( 2 );
+        logger.warn( 3 );
+        logger.error( 4 );
+        logger.metric( 5 );
+
+        equal( logger.cache.length(), 6 );
+        var cached = logger.cache.remove( 6 ),
+            entry;
+
+        cached.forEach( function( entry ){
+            ok( entry.namespace === logger.options.clientPrefix + logger.options.defaultNamespace );
+            ok( jQuery.type( entry.args ) === 'array' );
+            ok( typeof entry.time === 'string' );
+        });
+
+        // log is different
+        entry = cached[0];
+        ok( entry.level === 1 );
+        ok( entry.args[0] === 0 );
+
+        [ 'debug', 'info', 'warn', 'error', 'metric' ].forEach( function( level, i ){
+            entry = cached[( i + 1 )];
+            ok( entry.level === logger._parseLevel( level ) );
+            ok( entry.args[0] === ( i + 1 ) );
+        });
+    });
+
+
+    // ======================================================================== LoggingCache
+    test( "cache construction/initializiation defaults", function() {
+        // use empty to prevent tests stepping on one another due to persistence
+        var cache = new metrics.LoggingCache({ key: 'logs-test' }).empty();
+        equal( cache.maxSize,   5000 );
+        equal( window.localStorage.getItem( 'logs-test' ), '[]' );
+    });
+
+    test( "cache construction/initializiation failure", function() {
+        ////TODO: doesn't work - readonly
+        //window.localStorage = null;
+        //console.debug( 'localStorage:', window.localStorage );
+        var oldFn = metrics.LoggingCache.prototype._hasStorage;
+        metrics.LoggingCache.prototype._hasStorage = function(){ return false; };
+        throws( function(){
+            return new metrics.LoggingCache({ key: 'logs-test' });
+        }, /LoggingCache needs localStorage/, 'lack of localStorage throws error' );
+        metrics.LoggingCache.prototype._hasStorage = oldFn;
+
+        throws( function(){
+            return new metrics.LoggingCache();
+        }, /LoggingCache needs key for localStorage/, 'lack of key throws error' );
+    });
+
+    test( "cache construction/initializiation setting max cache size", function() {
+        var cache = new metrics.LoggingCache({
+            key     : 'logs-test',
+            maxSize : 5
+        }).empty();
+        equal( cache.maxSize, 5 );
+    });
+
+    test( "cache plays well with no data", function() {
+        var cache = new metrics.LoggingCache({ key: 'logs-test' }).empty();
+
+        equal( cache.length(), 0 );
+        var get = cache.get( 10 );
+        ok( jQuery.type( get ) === 'array' && get.length === 0 );
+        var remove = cache.remove( 10 );
+        ok( jQuery.type( remove ) === 'array' && remove.length === 0 );
+        equal( cache.length(), 0 );
+    });
+
+    test( "cache add properly adds and removes data", function() {
+        var cache = new metrics.LoggingCache({
+            key     : 'logs-test',
+            maxSize : 5
+        }).empty();
+
+        var entry1 = [{ one: 1 }, 'two' ];
+        cache.add( entry1 );
+
+        equal( cache.length(), 1 );
+        equal( JSON.stringify( cache.get( 1 )[0] ), JSON.stringify( entry1 ) );
+
+        var entry2 = { blah: { one: 1 }, bler: [ 'three', { two: 2 } ] };
+        cache.add( entry2 );
+        equal( cache.length(), 2 );
+        equal( cache.stringify( 2 ), '[' + JSON.stringify( entry1 ) + ',' + JSON.stringify( entry2 ) + ']' );
+
+        // FIFO
+        var returned = cache.remove( 1 );
+        equal( cache.length(), 1 );
+        ok( jQuery.type( returned ) === 'array' && returned.length === 1 );
+        var returned0 = returned[0];
+        ok( jQuery.type( returned0 ) === 'array' && JSON.stringify( returned0 ) === JSON.stringify( entry1 ) );
+    });
+
+    test( "cache past max loses oldest", function() {
+        var cache = new metrics.LoggingCache({
+            key     : 'logs-test',
+            maxSize : 5
+        }).empty();
+
+        for( var i=0; i<10; i+=1 ){
+            cache.add({ index: i });
+        }
+        equal( cache.length(), 5 );
+        var get = cache.get( 5 );
+        ok( get[0].index === 5 );
+        ok( get[1].index === 6 );
+        ok( get[2].index === 7 );
+        ok( get[3].index === 8 );
+        ok( get[4].index === 9 );
+    });
+
+    test( "cache is properly persistent", function() {
+        var cache1 = new metrics.LoggingCache({ key : 'logs-test' }).empty(),
+            entry = [{ one: 1 }, 'two' ];
+        cache1.add( entry );
+        equal( cache1.length(), 1 );
+
+        var cache2 = new metrics.LoggingCache({ key : 'logs-test' });
+        equal( cache2.length(), 1, 'old key gets previously stored' );
+        equal( JSON.stringify( cache2.get( 1 )[0] ), JSON.stringify( entry ) );
+
+        var cache3 = new metrics.LoggingCache({ key : 'logs-bler' });
+        equal( cache3.length(), 0, 'new key causes new storage' );
+    });
+
+});
diff --git a/test/qunit/tests/modal_tests.html b/test/qunit/tests/modal_tests.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/modal_tests.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/modal_tests.js b/test/qunit/tests/modal_tests.js
new file mode 100644
index 0000000..d2c329c
--- /dev/null
+++ b/test/qunit/tests/modal_tests.js
@@ -0,0 +1,87 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ "test-app", "mvc/ui/ui-modal"
+], function( testApp, GalaxyModal ){
+    "use strict";
+    module( "Modal dialog test", {
+        setup: function() {
+            testApp.create();
+            var self = this;
+            this.app = new GalaxyModal.View({
+                title   : 'Test title',
+                body    : 'Test body',
+                buttons : {
+                    'Ok' : function() {},
+                    'Cancel' : function() { self.app.hide() }
+                }
+            });
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    test( "test dialog attributes", function() {
+        ok( this.app.$header.find( '.title' ).html() == 'Test title', 'Modal header has correct title.');
+        ok( this.app.$body.html() == 'Test body', 'Modal header has correct body.');
+    } );
+
+    test( "test dialog visibility", function() {
+        ok( this.app.$el.css( 'display' ) == 'block', 'Modal is initially visible' );
+        this.app.hide();
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'none', 'Modal hidden manually' );
+        this.app.show();
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'block', 'Modal shown manually' );
+        this.app.getButton( 'Ok' ).trigger( 'click' );
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'block', 'Modal still visible after clicking Ok' );
+        this.app.getButton( 'Cancel' ).trigger( 'click' );
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'none', 'Modal hidden after clicking Cancel' );
+        this.app.show();
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'block', 'Modal manually shown again' );
+        ok( !this.app.$header.hasClass( 'no-separator' ), 'Title separator tagged as visible.' );
+        this.app.show({ title_separator: false });
+        ok( this.app.$header.hasClass( 'no-separator' ), 'Title separator tagged as hidden.' );
+        ok( this.app.$backdrop.hasClass( 'in' ), 'Backdrop tagged as shown.');
+        this.app.show({ backdrop: false });
+        ok( !this.app.$backdrop.hasClass( 'in' ), 'Backdrop tagged as hidden.');
+    } );
+
+    test( "test dialog closing events", function() {
+        this.app.$backdrop.trigger( 'click' );
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'block', 'Modal shown after backdrop click' );
+        this.app.show({ closing_events: true });
+        ok( this.app.$el.css( 'display' ) == 'block', 'Modal shown with closing events' );
+        this.app.$backdrop.trigger( 'click' );
+        this.clock.tick( WAIT_FADE );
+        ok( this.app.$el.css( 'display' ) == 'none', 'Modal hidden after backdrop click' );
+    } );
+
+    test( "test dialog rendering", function() {
+        var before = this.app.$el.html();
+        this.app.render();
+        ok( before == this.app.$el.html(), 'Re-rendering successful' );
+        this.app.options.title = 'New Title';
+        this.app.render();
+        ok( this.app.$header.find( '.title' ).html() == 'New Title', 'Modal header has correct new title.' );
+    });
+
+    test( "test button states", function() {
+        ok( this.app.getButton( 'Ok' ).html() === 'Ok', 'Ok has correct label' );
+        ok( !this.app.getButton( 'Ok' ).prop( 'disabled' ), 'Ok is active' );
+        ok( !this.app.getButton( 'Cancel' ).prop( 'disabled' ), 'Cancel is active' );
+        this.app.disableButton( 'Ok' );
+        ok( this.app.getButton( 'Ok' ).prop( 'disabled' ), 'Ok is disabled' );
+        ok( !this.app.getButton( 'Cancel' ).prop( 'disabled' ), 'Cancel is still active' );
+        this.app.disableButton( 'Cancel' );
+        ok( this.app.getButton( 'Cancel' ).prop( 'disabled' ), 'Cancel is also disabled' );
+        ok( this.app.getButton( 'Ok' ).prop( 'disabled' ), 'Ok is still disabled' );
+        this.app.enableButton( 'Ok' );
+        ok( this.app.getButton( 'Cancel' ).prop( 'disabled' ), 'Cancel is still disabled' );
+        ok( !this.app.getButton( 'Ok' ).prop( 'disabled' ), 'Ok is active again' );
+    } );
+});
\ No newline at end of file
diff --git a/test/qunit/tests/page_tests.html b/test/qunit/tests/page_tests.html
new file mode 100644
index 0000000..c0add87
--- /dev/null
+++ b/test/qunit/tests/page_tests.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <link href="../../../static/style/blue/base.css" media="screen" rel="stylesheet" type="text/css">
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/page_tests.js b/test/qunit/tests/page_tests.js
new file mode 100644
index 0000000..720061b
--- /dev/null
+++ b/test/qunit/tests/page_tests.js
@@ -0,0 +1,50 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ "test-app", "layout/page", "layout/panel",
+], function( testApp, Page, Panel ){
+    "use strict";
+    module( "Page test", {
+        setup: function() {
+            testApp.create();
+            $( 'body' ).append( this.$container = $( '<div/>' ).css( 'display', 'none' ) );
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    function _check( page, sidePanels ) {
+        ok( page.$( '#center' ).length == 1, 'Center panel found.' );
+        _.each( sidePanels, function( panelVisible, panelId ) {
+            ok( page.$( '#' + panelId ).length == panelVisible ? 1 : 0, ( panelVisible ? '' : 'No' ) + ' ' + panelId + ' panel found.' );
+            ok ( _.has( page, panelId ) == panelVisible, 'Panel attribute valid.' );
+            panelVisible && ok( page.$( '#' + panelId ).find( '.panel-header-text' ).text() == '_title', 'Title correct' );
+        });
+    }
+
+    test( "test center/right", function() {
+        this.$container.empty();
+        var page = new Page.PageLayoutView({
+            el      : this.$container,
+            center  : new Panel.CenterPanel({}),
+            right   : new Panel.RightPanel({ title: '_title' })
+        }).render();
+        _check( page, { left: false, right: true } );
+    });
+    test( "test center", function() {
+        this.$container.empty();
+        var page = new Page.PageLayoutView({
+            el      : this.$container,
+            center  : new Panel.CenterPanel({})
+        }).render();
+        _check( page, { left: false, right: false } );
+    });
+    test( "test left/center", function() {
+        this.$container.empty();
+        var page = new Page.PageLayoutView({
+            el      : this.$container,
+            center  : new Panel.CenterPanel({}),
+            left    : new Panel.LeftPanel({ title: '_title' })
+        }).render();
+        _check( page, { left: true, right: false } );
+    });
+});
\ No newline at end of file
diff --git a/test/qunit/tests/popover_tests.html b/test/qunit/tests/popover_tests.html
new file mode 100644
index 0000000..c0add87
--- /dev/null
+++ b/test/qunit/tests/popover_tests.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <link href="../../../static/style/blue/base.css" media="screen" rel="stylesheet" type="text/css">
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/popover_tests.js b/test/qunit/tests/popover_tests.js
new file mode 100644
index 0000000..51eaff6
--- /dev/null
+++ b/test/qunit/tests/popover_tests.js
@@ -0,0 +1,40 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ "test-app", "mvc/ui/ui-misc", "mvc/ui/ui-popover"
+], function( testApp, Ui, Popover ){
+    "use strict";
+    module( "Popover test", {
+        setup: function() {
+            testApp.create();
+            var self = this;
+            this.button = new Ui.Button({
+                title   : 'Test button',
+                onclick : function() {
+                    self.popover.show();
+                }
+            });
+            this.$parent = $( '<div/>' ).append( this.button.$el );
+            this.popover = new Popover.View({
+                title       : 'Test Title',
+                body        : 'Test Body',
+                placement   : 'bottom',
+                container   : this.button.$el
+            });
+            $( 'body' ).append( this.$parent );
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    test( "test popover visibility", function() {
+        ok( this.popover.$el.css( 'display' ) == 'none', 'Popover is hidden.' );
+        this.button.$el.trigger( 'click' );
+        ok( this.popover.$el.css( 'display' ) == 'block', 'Popover is shown.' );
+        ok( this.popover.$el.hasClass( 'bottom' ), 'Popover at bottom.' );
+        this.popover.hide();
+        ok( this.popover.$el.css( 'display' ) == 'none', 'Popover is hidden manually.' );
+        ok( this.popover.$title.html() == 'Test Title', 'Initial title correct.' );
+        this.popover.title( 'New Title' );
+        ok( this.popover.$title.html() == 'New Title', 'New title correct.' );
+    } );
+});
\ No newline at end of file
diff --git a/test/qunit/tests/ui_tests.html b/test/qunit/tests/ui_tests.html
new file mode 100644
index 0000000..c0add87
--- /dev/null
+++ b/test/qunit/tests/ui_tests.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <link href="../../../static/style/blue/base.css" media="screen" rel="stylesheet" type="text/css">
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/ui_tests.js b/test/qunit/tests/ui_tests.js
new file mode 100644
index 0000000..3a1ab6a
--- /dev/null
+++ b/test/qunit/tests/ui_tests.js
@@ -0,0 +1,787 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ 'test-app', 'mvc/ui/ui-misc', 'mvc/ui/ui-select-content', 'mvc/ui/ui-drilldown', 'mvc/ui/ui-thumbnails', 'mvc/ui/ui-tabs'
+], function( testApp, Ui, SelectContent, Drilldown, Thumbnails, Tabs ){
+    'use strict';
+    module( 'Ui test', {
+        setup: function() {
+            testApp.create();
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    test( 'tabs', function() {
+        var self = this;
+        var tabs = new Tabs.View({});
+        var collection = tabs.collection;
+        collection.add( { id: 'id_a', title: 'title_a', icon: 'icon_a', $el: 'el_a' } );
+        var _test = function() {
+            self.clock.tick ( window.WAIT_FADE );
+            collection.each( function( model, index ) {
+                var $tab_element = tabs.$( '#tab-' + model.id );
+                var $tab_content = tabs.$( '#' + model.id );
+                var is_current = model.id == tabs.model.get( 'current' );
+                ok( $tab_content.hasClass( 'active' ) == is_current, 'Active state of content.' );
+                ok( $tab_element.hasClass( 'active' ) == is_current, 'Active state of element.' );
+                ok( $tab_element.css( 'display' ) == ( model.get( 'hidden' ) ? 'none' : 'list-item' ), 'Element visibility.' );
+            });
+        };
+        $( 'body' ).prepend( tabs.$el );
+        _test();
+        collection.add( { id: 'id_b', title: 'title_b', icon: 'icon_b', $el: 'el_b' } );
+        _test();
+        tabs.collection.get( 'id_b' ).set( 'hidden', true );
+        _test();
+        collection.add( { id: 'id_c', title: 'title_c', icon: 'icon_c', $el: 'el_c' } );
+        tabs.model.set( 'current', 'id_c' );
+        _test();
+        tabs.collection.get( 'id_b' ).set( 'hidden', false );
+        _test();
+        tabs.model.set( 'current', 'id_b' );
+        _test();
+        tabs.model.set( 'visible', false );
+        tabs.collection.reset();
+        self.clock.tick ( window.WAIT_FADE );
+        ok( tabs.$el.css( 'display', 'none' ), 'Everything hidden.' );
+        tabs.model.set( 'visible', true );
+        self.clock.tick ( window.WAIT_FADE );
+        ok( tabs.$el.css( 'display', 'block' ), 'Everything shown.' );
+        collection.add( { id: 'id_c', title: 'title_c', icon: 'icon_c', $el: 'el_c' } );
+        tabs.model.set( 'current', 'id_c' );
+        _test();
+    });
+
+    test( 'thumbnails', function() {
+        var _test = function( options ) {
+            ok( thumb.$( '.tab-pane' ).length == options.ntabs, 'Two tabs found.' );
+            ok( thumb.$( '.ui-thumbnails-item' ).length == options.nitems, 'Thumbnail item.' );
+            ok( $(thumb.$( '.ui-thumbnails-image' )[ options.index || 0 ]).attr( 'src' ) == options.image_src, 'Correct image source' );
+            ok( $(thumb.$( '.ui-thumbnails-title' )[ options.index || 0 ]).html() == options.title, 'Correct title with icon' );
+            ok( $(thumb.$( '.ui-thumbnails-description-text' )[ options.index || 0 ]).html() == options.description, 'Correct description' );
+        };
+        var thumb = new Thumbnails.View({
+            title_default   : 'title_default',
+            title_list      : 'title_list',
+            collection      : [{
+                id          : 'id',
+                keywords    : 'default',
+                title       : 'title',
+                title_icon  : 'title_icon',
+                image_src   : 'image_src',
+                description : 'description'
+            }]
+        });
+        var model = thumb.model;
+        $( 'body' ).prepend( thumb.$el );
+        _test({
+            ntabs       : 2,
+            nitems      : 2,
+            image_src   : 'image_src',
+            title       : '<span class="fa title_icon"></span>title',
+            description : 'description'
+        });
+        thumb.collection.add({
+            id          : 'id_a',
+            keywords    : 'default_a',
+            title       : 'title_a',
+            title_icon  : 'title_icon_a',
+            image_src   : 'image_src_a',
+            description : 'description_a'
+        });
+        this.clock.tick ( window.WAIT_FADE );
+        _test({
+            index       : 1,
+            ntabs       : 2,
+            nitems      : 4,
+            image_src   : 'image_src_a',
+            title       : '<span class="fa title_icon_a"></span>title_a',
+            description : 'description_a'
+        });
+    });
+
+    test( 'button-default', function() {
+        var button = new Ui.Button( { title: 'title' } );
+        var model = button.model;
+        $( 'body' ).prepend( button.$el );
+        ok( button.$title.html() == 'title', 'Has correct title' );
+        model.set( 'title', '_title' );
+        ok( button.$title.html() == '_title', 'Has correct new title' );
+        ok( !button.$el.attr( 'disabled' ), 'Button active' );
+        model.set( 'disabled', true );
+        ok( button.$el.attr( 'disabled' ), 'Button disabled' );
+        model.set( 'disabled', false );
+        ok( !button.$el.attr( 'disabled' ), 'Button active, again' );
+        model.set( 'wait', true );
+        ok( button.$title.html() == model.get( 'wait_text' ), 'Shows correct wait text' );
+        model.set( 'wait_text', 'wait_text' );
+        ok( button.$title.html() == 'wait_text', 'Shows correct new wait text' );
+        model.set( 'wait', false );
+        ok( button.$title.html() == model.get( 'title' ), 'Shows correct regular title' );
+    });
+    test( 'button-default', function() {
+        var button = new Ui.Button( { title: 'title' } );
+        var model = button.model;
+        $( 'body' ).prepend( button.$el );
+        ok( button.$title.html() == 'title', 'Has correct title' );
+        model.set( 'title', '_title' );
+        ok( button.$title.html() == '_title', 'Has correct new title' );
+        ok( !button.$el.attr( 'disabled' ), 'Button active' );
+        model.set( 'disabled', true );
+        ok( button.$el.attr( 'disabled' ), 'Button disabled' );
+        model.set( 'disabled', false );
+        ok( !button.$el.attr( 'disabled' ), 'Button active, again' );
+        model.set( 'wait', true );
+        ok( button.$title.html() == model.get( 'wait_text' ), 'Shows correct wait text' );
+        model.set( 'wait_text', 'wait_text' );
+        ok( button.$title.html() == 'wait_text', 'Shows correct new wait text' );
+        model.set( 'wait', false );
+        ok( button.$title.html() == model.get( 'title' ), 'Shows correct regular title' );
+    });
+
+    test( 'button-icon', function() {
+        var button = new Ui.ButtonIcon( { title: 'title' } );
+        var model = button.model;
+        $( 'body' ).prepend( button.$el );
+        ok( button.$title.html() == 'title', 'Has correct title' );
+        model.set( 'title', '_title' );
+        ok( button.$title.html() == '_title', 'Has correct new title' );
+        ok( !button.$el.attr( 'disabled' ), 'Button active' );
+        model.set( 'disabled', true );
+        ok( button.$el.attr( 'disabled' ), 'Button disabled' );
+        model.set( 'disabled', false );
+        ok( !button.$el.attr( 'disabled' ), 'Button active, again' );
+    });
+
+    test( 'button-check', function() {
+        var button = new Ui.ButtonCheck( { title: 'title' } );
+        var model = button.model;
+        $( 'body' ).prepend( button.$el );
+        ok( button.$title.html() == 'title', 'Has correct title' );
+        model.set( 'title', '_title' );
+        ok( button.$title.html() == '_title', 'Has correct new title' );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 0 ] ), 'Has correct ' + model.get( 'value' ) + ' value' );
+        button.value( 1 );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 1 ] ), 'Has correct ' + model.get( 'value' ) + ' value' );
+        button.value( 2 );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 2 ] ), 'Has correct ' + model.get( 'value' ) + ' value' );
+        button.value( 0, 100 );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 0 ] ), 'Has correct ' + model.get( 'value' ) + ' value after fraction' );
+        button.value( 10, 100 );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 1 ] ), 'Has correct ' + model.get( 'value' ) + ' value after fraction' );
+        button.value( 100, 100 );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 2 ] ), 'Has correct ' + model.get( 'value' ) + ' value after fraction' );
+        button.$el.trigger( 'click' );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 0 ] ), 'Has correct ' + model.get( 'value' ) + ' value after click' );
+        button.$el.trigger( 'click' );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 2 ] ), 'Has correct ' + model.get( 'value' ) + ' value after click' );
+        button.$el.trigger( 'click' );
+        ok( button.$icon.hasClass( button.model.get( 'icons' )[ 0 ] ), 'Has correct ' + model.get( 'value' ) + ' value after click' );
+    });
+
+    test( 'options', function() {
+        function _test( obj, options ) {
+            ok( JSON.stringify( obj.value() ) == JSON.stringify( options.value ), 'Selected value is ' + options.value );
+            ok( obj.$menu.css( 'display' ) == ( options.menu_visible ? 'block' : 'none' ), 'Menu visibility: ' + options.menu_visible );
+            ok( obj.$message.css( 'display' ) == ( options.message_visible ? 'block' : 'none' ), 'Message visibility: ' + options.message_visible );
+            ok( obj.$options.css( 'display' ) == ( options.options_visible ? 'inline-block' : 'none' ), 'Options visibility: ' + options.options_visible );
+            options.message_cls && ok( obj.$message.hasClass( options.message_cls ), 'Message has class: ' + options.message_cls );
+            ok( obj.length() === options.length, 'Number of options: ' + options.length );
+            options.message_text && ok( obj.$message.html() === options.message_text, 'Message text is: ' + options.message_text );
+            options.first && ok( obj.first() === options.first, 'First value is: ' + options.first );
+            options.all_icon && ok( obj.all_button.$( '.icon' ).hasClass( options.all_icon ), 'All button in correct state: ' + options.all_icon );
+            ok( obj.$menu.find( '.ui-button-check' ).length === ( Boolean( options.all_icon ) ? 1 : 0 ), 'All button available: ' + Boolean( options.all_active ) );
+        }
+
+        var radio = new Ui.Radio.View({});
+        $( 'body' ).prepend( radio.$el );
+        radio.model.set( 'visible', false );
+        ok( radio.value() === null, 'Initial value is `null`.' );
+        ok( radio.$el.css( 'display' ) === 'none', 'Options hidden.' );
+        radio.model.set( 'visible', true );
+        ok( radio.$el.css( 'display' ) === 'block', 'Options shown.' );
+        radio.model.set( 'value', 'Unavailable.' );
+        ok( radio.value() === null, 'Unavailable value ignored.' );
+        _test( radio, {
+            menu_visible: false,
+            message_visible: true,
+            message_text: 'No options available.',
+            message_cls: 'alert-danger',
+            options_visible: false,
+            value: null,
+            length: 0
+        });
+        radio.model.set( 'wait', true );
+        _test( radio, {
+            menu_visible: false,
+            message_visible: true,
+            message_text: 'Please wait...',
+            message_cls: 'alert-info',
+            options_visible: false,
+            value: null,
+            length: 0
+        });
+        radio.model.set( 'wait', false );
+        _test( radio, {
+            menu_visible: false,
+            message_visible: true,
+            message_text: 'No options available.',
+            message_cls: 'alert-danger',
+            options_visible: false,
+            value: null,
+            length: 0
+        });
+        radio.model.set( 'data', [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' } ] );
+        _test( radio, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valuea',
+            first: 'valuea',
+            length: 2
+        });
+        radio.model.set( 'value', 'valueb' );
+        _test( radio, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valueb',
+            first: 'valuea',
+            length: 2
+        });
+        radio.model.set( 'data', null );
+        _test( radio, {
+            menu_visible: false,
+            message_visible: true,
+            message_text: 'No options available.',
+            message_cls: 'alert-danger',
+            options_visible: false,
+            value: null,
+            first: null,
+            length: 0
+        });
+        radio.model.set( 'data', [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' }, { value: 'valuec', label: 'labelc' } ] );
+        _test( radio, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valueb',
+            first: 'valuea',
+            length: 3
+        });
+        radio.$( 'input' ).last().click();
+        _test( radio, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valuec',
+            first: 'valuea',
+            length: 3
+        });
+
+        var check = new Ui.Checkbox.View({});
+        $( 'body' ).prepend( check.$el );
+        _test( check, {
+            menu_visible: false,
+            message_visible: true,
+            message_text: 'No options available.',
+            message_cls: 'alert-danger',
+            options_visible: false,
+            value: null,
+            length: 0,
+            all_icon: 'fa-square-o'
+        });
+        check.model.set( 'data', [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' }, { value: 'valuec', label: 'labelc' } ] );
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: null,
+            length: 3,
+            all_icon: 'fa-square-o'
+        });
+        check.model.set( 'value', [ 'valuea', 'valuec' ] );
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuea', 'valuec' ],
+            length: 3,
+            all_icon: 'fa-minus-square-o'
+        });
+        check.model.set( 'value', [ 'valuea', 'valueb', 'valuec' ] );
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuea', 'valueb', 'valuec' ],
+            length: 3,
+            all_icon: 'fa-check-square-o'
+        });
+        check.model.set( 'data', [] );
+        _test( check, {
+            menu_visible: false,
+            message_visible: true,
+            options_visible: false,
+            value: null,
+            length: 0,
+            all_icon: 'fa-square-o'
+        });
+        check.model.set( 'data', [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' } ] );
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuea', 'valueb' ],
+            first: 'valuea',
+            length: 2,
+            all_icon: 'fa-check-square-o'
+        });
+        check.all_button.$el.click();
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: null,
+            first: 'valuea',
+            length: 2,
+            all_icon: 'fa-square-o'
+        });
+        check.all_button.$el.click();
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuea', 'valueb' ],
+            first: 'valuea',
+            length: 2,
+            all_icon: 'fa-check-square-o'
+        });
+        check.$( 'input' ).last().click();
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuea' ],
+            first: 'valuea',
+            length: 2,
+            all_icon: 'fa-minus-square-o'
+        });
+        check.$( 'input' ).last().click();
+        _test( check, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuea', 'valueb' ],
+            first: 'valuea',
+            length: 2,
+            all_icon: 'fa-check-square-o'
+        });
+
+        var radiobutton = new Ui.RadioButton.View({});
+        $( 'body' ).prepend( radiobutton.$el );
+        radiobutton.model.set( 'data', [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' } ] );
+        _test( radiobutton, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valuea',
+            first: 'valuea',
+            length: 2
+        });
+        radiobutton.$( 'input' ).last().click();
+        _test( radiobutton, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valueb',
+            first: 'valuea',
+            length: 2
+        });
+
+        var drilldown = new Drilldown.View({});
+        $( 'body' ).prepend( drilldown.$el );
+        drilldown.model.set( 'data', [ { value: 'valuea', name: 'labela', options: [
+                                            { value: 'valueb', name: 'labelb' },
+                                            { value: 'valuec', name: 'labelc' },
+                                            { value: 'valued', name: 'labeld', options: [
+                                                { value: 'valuee', name: 'labele' },
+                                                { value: 'valuef', name: 'labelf' } ] } ] },
+                                       { value: 'valueg', name: 'labelg', options: [
+                                            { value: 'valueh', name: 'labelh' },
+                                            { value: 'valuei', name: 'labeli' },
+                                            { value: 'valuej', name: 'labelj', options: [
+                                                { value: 'valuek', name: 'labelk' },
+                                                { value: 'valuel', name: 'labell' },
+                                                { value: 'valuem', name: 'labelm' } ] } ] },
+                                       { value: 'valuen', name: 'labeln' } ] );
+        _test( drilldown, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: null,
+            first: 'valuea',
+            length: 14,
+            all_icon: 'fa-square-o'
+        });
+        drilldown.model.set( 'value', [ 'valuek', 'valuen' ] );
+        _test( drilldown, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: [ 'valuek', 'valuen' ],
+            first: 'valuea',
+            length: 14,
+            all_icon: 'fa-minus-square-o'
+        });
+        var drillradio = new Drilldown.View( { display: 'radio' } );
+        $( 'body' ).prepend( drillradio.$el );
+        _test( drillradio, {
+            menu_visible: false,
+            message_visible: true,
+            options_visible: false,
+            value: null,
+            length: 0
+        });
+        drillradio.model.set( 'data', drilldown.model.get( 'data' ) );
+        _test( drillradio, {
+            menu_visible: true,
+            message_visible: false,
+            options_visible: true,
+            value: 'valuea',
+            first: 'valuea',
+            length: 14
+        });
+    });
+
+    test( 'select-default', function() {
+        function _test( options ) {
+            ok( JSON.stringify( select.value() ) == JSON.stringify( options.value ), 'Selected value is ' + options.value );
+            ok( select.text() == options.label, 'Selected label is ' + options.label );
+            ok( select.$el.display === options.visible ? 'block' : 'none', options.visible ? 'Visible' : 'Hidden' );
+            ok( select.data.length === options.count && select.length(), 'Found ' + options.count + ' option' );
+            options.exists && ok( select.exists( options.exists ), 'Found value: ' + options.exists );
+            ok( select.$select.prop( 'multiple' ) === Boolean( options.multiple ), 'Multiple state set to: ' + options.multiple );
+            ok( Boolean( select.all_button ) === Boolean( options.multiple ), 'Visiblity of select all button correct.' );
+            options.multiple && ok( select.all_button.$( '.icon' ).hasClass( options.all_icon ), 'All button in correct state: ' + options.all_icon );
+        }
+        var select = new Ui.Select.View({});
+        $( 'body' ).prepend( select.$el );
+        ok( select.first() === '__null__', 'First select is \'__null__\'' );
+        ok( select.$dropdown.hasClass( 'fa-caret-down' ), 'Caret down shown.' );
+        select.model.set( 'data', [ { value: 'value', label: 'label' } ] );
+        _test({
+            value   : 'value',
+            label   : 'label',
+            visible : true,
+            count   : 1
+        });
+        select.model.set( 'data', [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' } ] );
+        _test({
+            value   : 'valuea',
+            label   : 'labela',
+            visible : true,
+            count   : 2,
+            exists  : 'valueb'
+        });
+        select.value( 'valueb' );
+        _test({
+            value   : 'valueb',
+            label   : 'labelb',
+            visible : true,
+            count   : 2
+        });
+        select.model.set( 'data', [ { value: 'value', label: 'label' } ] );
+        _test({
+            value   : 'value',
+            label   : 'label',
+            visible : true,
+            count   : 1
+        });
+        select.model.set( { visible: false, value: 'unavailable' } );
+        _test({
+            value   : 'value',
+            label   : 'label',
+            visible : false,
+            count   : 1
+        });
+        select.model.set( { visible: true, value: 'valueb', data: [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' } ] } );
+        _test({
+            value   : 'valueb',
+            label   : 'labelb',
+            visible : true,
+            count   : 2,
+            exists  : 'valuea'
+        });
+        select.model.set( { multiple: true } );
+        _test({
+            value   : [ 'valueb' ],
+            label   : 'labelb',
+            visible : true,
+            count   : 2,
+            exists  : 'valuea',
+            multiple: true,
+            all_icon: 'fa-minus-square-o'
+        });
+        select.model.set( 'value', [ 'valueb', 'valuea' ] );
+        _test({
+            value   : [ 'valuea', 'valueb' ],
+            label   : 'labela',
+            visible : true,
+            count   : 2,
+            exists  : 'valueb',
+            multiple: true,
+            all_icon: 'fa-check-square-o'
+        });
+        select.model.set( 'value', [] );
+        _test({
+            value   : null,
+            label   : '',
+            visible : true,
+            count   : 2,
+            exists  : 'valuea',
+            multiple: true,
+            all_icon: 'fa-square-o'
+        });
+        select.model.set( { multiple: false } );
+        _test({
+            value   : 'valuea',
+            label   : 'labela',
+            visible : true,
+            count   : 2,
+            exists  : 'valuea'
+        });
+        select.model.set( { visible: false } );
+        _test({
+            value   : 'valuea',
+            label   : 'labela',
+            visible : false,
+            count   : 2,
+            exists  : 'valuea'
+        });
+        select.model.set( { multiple: true, visible: true, value: [ 'valueb', 'valuec' ],  data: [ { value: 'valuea', label: 'labela' }, { value: 'valueb', label: 'labelb' }, { value: 'valuec', label: 'labelc' } ] } );
+        _test({
+            value   : [ 'valueb', 'valuec' ],
+            label   : 'labelb',
+            visible : true,
+            count   : 3,
+            exists  : 'valuea',
+            multiple: true,
+            all_icon: 'fa-minus-square-o'
+        });
+    } );
+
+    test( 'label', function() {
+        var label = new Ui.Label({
+            title   : '_title'
+        });
+        $( 'body' ).prepend( label.$el );
+        ok( label.$el.html() === '_title', 'Correct title' );
+        label.model.set( 'title', '_new_title' );
+        ok( label.$el.html() === '_new_title', 'Correct new title' );
+    } );
+
+    test( 'input', function() {
+        var input = new Ui.Input();
+        $( 'body' ).prepend( input.$el );
+        ok( input.tagName === 'input', 'Created input.' );
+        ok( input.value() === undefined, 'Input empty.' );
+        input.model.set( 'value', '_value' );
+        ok( input.$el.val() === '_value', 'Input with value.' );
+        ok( !input.$el.hasClass( '_cls' ), 'Has no custom class.' );
+        input.model.set( 'cls', '_cls' );
+        ok( input.$el.hasClass( '_cls' ), 'Has custom class.' );
+        ok( !input.$el.attr( 'placeholder' ), 'Has no placeholder' );
+        input.model.set( 'placeholder', '_placeholder' );
+        ok( input.$el.attr( 'placeholder' ) === '_placeholder', 'Has correct placeholder' );
+        input.model.set( 'disabled', true );
+        ok( input.$el.attr( 'disabled' ), 'Disabled' );
+        input.model.set( 'disabled', false );
+        ok( !input.$el.attr( 'disabled' ), 'Enabled' );
+        input.model.set( 'visible', false );
+        ok( input.$el.css( 'display' ) === 'none', 'Hidden' );
+        input.model.set( 'visible', true );
+        ok( input.$el.css( 'display' ) === 'inline-block', 'Shown' );
+    } );
+
+    test( 'textarea', function() {
+        var input = new Ui.Input( { area: true } );
+        $( 'body' ).prepend( input.$el );
+        ok( input.tagName === 'textarea', 'Created textarea.' );
+        ok( input.value() === undefined, 'Unavailable value.' );
+        input.model.set( 'value', '_value' );
+        ok( input.value() === '_value', 'Correct new value.' );
+        ok( !input.$el.hasClass( '_cls' ), 'Has no custom class.' );
+        input.model.set( 'cls', '_cls' );
+        ok( input.$el.hasClass( '_cls' ), 'Has custom class.' );
+    } );
+
+    test( 'message', function() {
+        var message = new Ui.Message({
+            persistent  : true,
+            message     : '_message',
+            status      : 'danger'
+        });
+        $( 'body' ).prepend( message.$el );
+        ok( message.$el.hasClass( 'alert-danger' ), 'Alert danger.' );
+        message.model.set( 'status', 'info' );
+        ok( !message.$el.hasClass( 'alert-danger' ), 'Alert danger (disabled).' );
+        ok( message.$el.hasClass( 'alert-info' ), 'Alert info.' );
+        ok( message.$el.html() === '_message', 'Correct message.' );
+        message.model.set( 'message', '_new_message' );
+        ok( message.$el.html() === '_new_message', 'Correct new message.' );
+    } );
+
+    test( 'hidden', function() {
+        var hidden = new Ui.Hidden();
+        $( 'body' ).prepend( hidden.$el );
+        hidden.model.set( 'info', '_info' );
+        ok( hidden.$info.css( 'display', 'block' ), 'Info shown.' );
+        ok( hidden.$info.html() === '_info', 'Info text correct.' );
+        hidden.model.set( 'info', '' );
+        ok( hidden.$info.css( 'display', 'none' ), 'Info hidden.' );
+        hidden.model.set( 'value', '_value' );
+        ok( hidden.$hidden.val() === '_value', 'Correct value' );
+    } );
+
+    test( 'select-content', function() {
+        var select = new SelectContent.View({});
+        $( 'body' ).prepend( select.$el );
+        var _testSelect = function( tag, options ) {
+            var field = select.fields[ tag == 'first' ? 0 : select.fields.length - 1 ];
+            var $select = select.$( '.ui-select:' + tag );
+            var $button = select.$( '.ui-radiobutton' ).find( 'label:' + tag );
+            ok ( field.length() == options[ tag + 'length' ], tag + ' one has ' + options[ tag + 'length' ] + ' options' );
+            ok ( field.data[ 0 ].value == options[ tag + 'value' ], tag + ' option has correct value' );
+            ok ( field.data[ 0 ].label == options[ tag + 'label' ], tag + ' option has correct label' );
+            ok ( $select.hasClass( 'ui-select-multiple' ) == options[ tag + 'multiple' ], 'Check multiple option' );
+            $button.trigger( 'mouseover' );
+            var tooltip = $( '.tooltip-inner:last' ).text();
+            $button.trigger( 'mouseleave' );
+            ok( tooltip.indexOf( 'dataset' ) != -1 || tooltip.indexOf( 'collection' ) != -1, 'Basic tooltip check' );
+        };
+        var _test = function( options ) {
+            ok ( select.button_type.$( '.ui-option:first' ).hasClass( 'active' ), 'First one is toggled' );
+            ok ( select.$( '.ui-select' ).length == options.selectfields, 'Found ' + options.selectfields + ' select fields' );
+            ok ( select.button_type.$( '.ui-option' ).length == options.selectfields, 'Found ' + options.selectfields + ' radio button options' );
+            ok ( select.$( '.ui-select-multiple' ).length == options.totalmultiple, 'Contains ' + options.totalmultiple + ' multiselect fields' );
+            ok ( select.$el.children( '.ui-options' ).find( '.ui-option' ).length === ( options.selectfields > 1 ? options.selectfields : 0 ), 'Radio button count' );
+            ok ( select.$( '.ui-select:first' ).css( 'display' ) == 'block', 'Check select visibility' );
+            ok ( select.$( '.ui-select:last' ).css( 'display' ) == ( options.selectfields == 1 ? 'block' : 'none' ), 'Last select visibility' );
+            _testSelect( 'first', options );
+            _testSelect( 'last', options );
+        };
+
+        ok ( select.button_type.value() == 0, 'Initial mode selected by default.' );
+        select.model.set( 'data', { 'hda':  [{ id: 'id0', name: 'name0', hid: 'hid0' },
+                                             { id: 'id1', name: 'name1', hid: 'hid1' }],
+                                    'hdca': [{ id: 'id2', name: 'name2', hid: 'hid2' },
+                                             { id: 'id3', name: 'name3', hid: 'hid3' },
+                                             { id: 'id4', name: 'name4', hid: 'hid4' }] } );
+
+        var initial = { selectfields    : 3,
+                        firstlength     : 2,
+                        firstvalue      : 'id0',
+                        firstlabel      : 'hid0: name0',
+                        firstmultiple   : false,
+                        totalmultiple   : 1,
+                        lastvalue       : 'id2',
+                        lastlabel       : 'hid2: name2',
+                        lastlength      : 3,
+                        lastmultiple    : false };
+        _test( initial );
+
+        select.model.set( 'multiple', true );
+        select.model.set( 'type', 'data' );
+        _test({ selectfields    : 2,
+                firstlength     : 2,
+                firstvalue      : 'id0',
+                firstlabel      : 'hid0: name0',
+                firstmultiple   : true,
+                totalmultiple   : 1,
+                lastvalue       : 'id2',
+                lastlabel       : 'hid2: name2',
+                lastlength      : 3,
+                lastmultiple    : false });
+
+        select.model.set( 'multiple', false );
+        select.model.set( 'type', 'data_collection' );
+        _test({ selectfields    : 1,
+                firstlength     : 3,
+                firstvalue      : 'id2',
+                firstlabel      : 'hid2: name2',
+                firstmultiple   : false,
+                totalmultiple   : 0,
+                lastvalue       : 'id2',
+                lastlabel       : 'hid2: name2',
+                lastlength      : 3,
+                lastmultiple    : false });
+
+        select.model.set( 'type', 'module_data_collection' );
+        _test({ selectfields    : 2,
+                firstlength     : 3,
+                firstvalue      : 'id2',
+                firstlabel      : 'hid2: name2',
+                firstmultiple   : false,
+                totalmultiple   : 1,
+                lastvalue       : 'id2',
+                lastlabel       : 'hid2: name2',
+                lastlength      : 3,
+                lastmultiple    : true });
+
+        select.model.set( 'type', 'module_data' );
+        _test({ selectfields    : 2,
+                firstlength     : 2,
+                firstvalue      : 'id0',
+                firstlabel      : 'hid0: name0',
+                firstmultiple   : false,
+                totalmultiple   : 1,
+                lastvalue       : 'id0',
+                lastlabel       : 'hid0: name0',
+                lastlength      : 2,
+                lastmultiple    : true });
+
+        select.model.set( 'type', 'data' );
+        _test( initial );
+
+        select.model.set( 'wait', true );
+        ok ( select.$( '.icon-dropdown' ).hasClass( 'fa-spinner' ), 'Shows spinner' );
+        select.model.set( 'wait', false );
+        ok ( select.$( '.icon-dropdown' ).hasClass( 'fa-caret-down' ), 'Shows caret' );
+        select.model.set( 'optional', true );
+        ok ( select.fields[ 0 ].data[ 0 ].value == '__null__', 'First option is optional value' );
+        select.model.set( 'optional', false );
+        ok ( select.fields[ 0 ].data[ 0 ].value != '__null__', 'First option is not optional value' );
+
+        select.model.set( 'value', { values: [ { id: 'id1', src: 'hda' } ] } );
+        ok( JSON.stringify( select.value() ) == '{"values":[{"id":"id1","name":"name1","hid":"hid1"}],"batch":false}', 'Checking single value' );
+
+        ok( select.config[ select.model.get( 'current' ) ].src == 'hda', 'Matched dataset field' );
+        ok( !select.config[ select.model.get( 'current' ) ].multiple, 'Matched single select field' );
+        select.model.set( 'value', { values: [ { id: 'id0', src: 'hda' }, { id: 'id1', src: 'hda' } ] } );
+        ok( select.config[ select.model.get( 'current' ) ].multiple, 'Matched multiple field' );
+        ok( JSON.stringify( select.value() ) == '{"values":[{"id":"id0","name":"name0","hid":"hid0"},{"id":"id1","name":"name1","hid":"hid1"}],"batch":true}', 'Checking multiple values' );
+        select.model.set( 'value', { values: [ { id: 'id2', src: 'hdca' } ] } );
+        ok( select.config[ select.model.get( 'current' ) ].src == 'hdca', 'Matched collection field' );
+        ok( JSON.stringify( select.value() ) == '{"values":[{"id":"id2","name":"name2","hid":"hid2"}],"batch":true}', 'Checking collection value' );
+
+        select = new SelectContent.View({});
+        $( 'body' ).prepend( select.$el );
+        var _testEmptySelect = function( tag, txt_extension, txt_label ) {
+            var field = select.fields[ tag == 'first' ? 0 : select.fields.length - 1 ];
+            var $select = select.$( '.ui-select:' + tag );
+            ok ( field.data[ 0 ].value == '__null__', tag + ' option has correct empty value.' );
+            ok ( field.data[ 0 ].label == 'No ' + txt_extension + txt_label + ' available.', tag + ' option has correct empty label.' );
+        };
+
+        var labels = select.model.get( 'src_labels' );
+        _testEmptySelect( 'first', '', labels.hda );
+        _testEmptySelect( 'last', '', labels.hdca );
+        select.model.set( 'extensions', [ 'txt', 'bam' ] );
+        _testEmptySelect( 'first', 'txt or bam ', labels.hda );
+        _testEmptySelect( 'last', 'txt or bam ', labels.hdca );
+        select.model.set( 'extensions', [ 'txt' ] );
+        _testEmptySelect( 'first', 'txt ', labels.hda );
+        _testEmptySelect( 'last', 'txt ', labels.hdca );
+    } );
+});
\ No newline at end of file
diff --git a/test/qunit/tests/upload_dialog_tests.html b/test/qunit/tests/upload_dialog_tests.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/upload_dialog_tests.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/upload_dialog_tests.js b/test/qunit/tests/upload_dialog_tests.js
new file mode 100644
index 0000000..d969726
--- /dev/null
+++ b/test/qunit/tests/upload_dialog_tests.js
@@ -0,0 +1,49 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ "test-app", "mvc/upload/upload-view"
+], function( testApp, GalaxyUpload ){
+    "use strict";
+    module( "Upload dialog test", {
+        setup: function( ) {
+            testApp.create();
+            this.app = new GalaxyUpload();
+        },
+        teardown: function() {
+            testApp.destroy();
+        }
+    } );
+
+    test( "test initial dialog state", function() {
+        $(this.app.ui_button.$el).trigger('click');
+        ok(this.app.default_view.collection.length == 0, 'Invalid initial upload item collection.');
+        ok($('#btn-start').hasClass('disabled'), 'Start button should be disabled.');
+        ok($('#btn-stop').hasClass('disabled'), 'Stop button should be disabled.');
+        ok($('#btn-reset').hasClass('disabled'), 'Reset button should be disabled.');
+    } );
+
+    test( "test adding/removing paste/fetch upload item", function() {
+        $(this.app.ui_button.$el).trigger('click');
+        $('#btn-new').trigger('click');
+        ok(!$('#btn-start').hasClass('disabled'), 'Start button should be enabled.');
+        ok($('#btn-stop').hasClass('disabled'), 'Stop button should (still) be disabled.');
+        ok(!$('#btn-reset').hasClass('disabled'), 'Reset button should be enabled.');
+        ok(this.app.default_view.collection.length == 1, 'Invalid upload item collection length after adding item.');
+        ok($('#btn-new').find('i').hasClass('fa-edit'), 'Paste/fetch icon changed');
+        ok($('#btn-start').hasClass('btn-primary'), 'Start button should be enabled/highlighted.');
+        ok($('#upload-row-0').find('.upload-symbol').hasClass('fa-trash-o'), 'Should show regular trash icon.');
+        $('#upload-row-0').find('.upload-settings').trigger('click');
+        ok($('#upload-row-0').find('.upload-settings-cover').css('display') == 'none', 'Settings should be enabled.');
+        $('#upload-row-0').find('.popover-close').trigger('click');
+        $('#btn-start').trigger('click');
+        ok($('#upload-row-0').find('.upload-symbol').hasClass('fa-exclamation-triangle'), 'Upload attempt should have failed.');
+        ok($('#upload-row-0').find('.upload-settings').trigger('click'));
+        ok($('#upload-row-0').find('.upload-settings-cover').css('display') == 'block', 'Settings should be disabled.');
+        $('#upload-row-0').find('.upload-symbol').trigger('click');
+        ok(this.app.default_view.collection.length == 0, 'Removing item from collection failed.');
+    } );
+
+    test( "test ftp popup", function() {
+        $(this.app.ui_button.$el).trigger('click');
+        $('#btn-ftp').trigger('click');
+        ok($('.upload-ftp-help').length == 1, 'Should show ftp help text.');
+    } );
+});
\ No newline at end of file
diff --git a/test/qunit/tests/utils_test.html b/test/qunit/tests/utils_test.html
new file mode 100644
index 0000000..c0add87
--- /dev/null
+++ b/test/qunit/tests/utils_test.html
@@ -0,0 +1,11 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <link href="../../../static/style/blue/base.css" media="screen" rel="stylesheet" type="text/css">
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/utils_test.js b/test/qunit/tests/utils_test.js
new file mode 100644
index 0000000..a577231
--- /dev/null
+++ b/test/qunit/tests/utils_test.js
@@ -0,0 +1,20 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([ 'test-app', 'utils/utils',
+], function( testApp, Utils ){
+    'use strict';
+    module( 'Utils test', {} );
+
+    test( 'isEmpty', function() {
+        ok( Utils.isEmpty( [] ), 'Empty array' );
+        ok( Utils.isEmpty( [ 'data', undefined ] ), 'Array contains `undefined`' );
+        ok( Utils.isEmpty( [ 'data', null ] ), 'Array contains `null`' );
+        ok( Utils.isEmpty( [ 'data', '__null__' ] ), 'Array contains `__null__`' );
+        ok( Utils.isEmpty( [ 'data', '__undefined__' ] ), 'Array contains `__undefined__`' );
+        ok( Utils.isEmpty( null ), 'Array is null' );
+        ok( Utils.isEmpty( '__null__' ), 'Array is __null__' );
+        ok( Utils.isEmpty( '__undefined__' ), 'Array is __undefined__' );
+        ok( !Utils.isEmpty( [ 'data' ] ), 'Array contains `data`' );
+        ok( !Utils.isEmpty( 1 ), 'Value is int' );
+        ok( !Utils.isEmpty( 0 ), 'Value is zero' );
+    });
+});
\ No newline at end of file
diff --git a/test/qunit/tests/workflow_editor_tests.html b/test/qunit/tests/workflow_editor_tests.html
new file mode 100644
index 0000000..00e97f6
--- /dev/null
+++ b/test/qunit/tests/workflow_editor_tests.html
@@ -0,0 +1,10 @@
+<!doctype html>
+<!-- Minimal outline test page for a requirejs+qunit testing setup,
+     test environment is bootstrapped in test-common.js -->
+<html>
+  <head>
+    <script data-main="../test-common" src="../scripts/libs/require.js"></script>
+  </head>
+  <body>
+  </body>
+</html>
diff --git a/test/qunit/tests/workflow_editor_tests.js b/test/qunit/tests/workflow_editor_tests.js
new file mode 100644
index 0000000..5b46ec8
--- /dev/null
+++ b/test/qunit/tests/workflow_editor_tests.js
@@ -0,0 +1,1137 @@
+/* global define, QUnit, module, test, ok, equal, deepEqual, notEqual */
+define([
+    "sinon-qunit",
+    "test-app",
+    'utils/utils',
+    "mvc/workflow/workflow-view",
+    "mvc/workflow/workflow-node",
+    "mvc/workflow/workflow-view-node",
+    "mvc/workflow/workflow-terminals",
+    "mvc/workflow/workflow-view-terminals",
+    "mvc/workflow/workflow-connector"
+], function(
+    sinon,
+    testApp,
+    Utils,
+    App,
+    Node,
+    NodeView,
+    Terminals,
+    TerminalsView,
+    Connector
+){
+    "use strict";
+    window.show_modal = function(a, b, c) {}
+    window.hide_modal = function() {}
+
+    // create body and app
+    var create_app = function() {
+        // build body
+        $('body').append(   '<div id="canvas-viewport">' +
+                                '<div id="canvas-container"/>' +
+                            '</div>' +
+                            '<div id="overview">' +
+                                '<canvas id="overview-canvas"/>' +
+                                '<div id="overview-viewport"/>' +
+                            '</div>');
+
+        // build app
+        return new App({
+            id      : null,
+            urls    : { get_datatypes : Galaxy.root + 'api/datatypes/mapping' },
+            workflows : []
+        });
+    };
+
+    module( "Input terminal model test", {
+        setup: function( ) {
+            testApp.create();
+            this.node = new Node( create_app(), {  } );
+            this.input = { extensions: [ "txt" ], multiple: false };
+            this.input_terminal = new Terminals.InputTerminal( { input: this.input } );
+            this.input_terminal.node = this.node;
+        },
+        teardown: function() {
+            testApp.destroy();
+        },
+        multiple: function( ) {
+            this.input.multiple = true;
+            this.input_terminal.update( this.input );
+        },
+        test_connector: function( ) {
+            var outputTerminal = new Terminals.OutputTerminal( { datatypes: [ 'input' ] } );
+            var inputTerminal = this.input_terminal;
+            var connector = new Connector( outputTerminal, inputTerminal );
+            return connector;
+        },
+        with_test_connector: function( f ) {
+            this.test_connector( );
+            f();
+            this.reset_connectors();
+        },
+        reset_connectors: function( ) {
+            this.input_terminal.connectors = [];
+        },
+        test_accept: function( other ) {
+            other = other || { node: {}, datatypes: [ "txt" ] };
+            if( ! other.mapOver ) {
+                other.mapOver = function() { return Terminals.NULL_COLLECTION_TYPE_DESCRIPTION; };
+            }
+            return this.input_terminal.canAccept( other );
+        },
+        pja_change_datatype_node: function( output_name, newtype ) {
+            var pja = { action_type: "ChangeDatatypeAction", output_name: output_name, action_arguments: { newtype: newtype } };
+            var otherNode = { post_job_actions: [ pja ] };
+            return otherNode;
+        }
+    } );
+
+    test( "test update", function() {
+        deepEqual( this.input_terminal.datatypes, [ 'txt' ] );
+        equal( this.input_terminal.multiple, false );
+        this.input_terminal.update( { extensions: [ 'bam' ], multiple: true } );
+        deepEqual( this.input_terminal.datatypes, [ 'bam' ] );
+        equal( this.input_terminal.multiple, true );
+    } );
+
+    test( "test connect", function() {
+        this.node.markChanged = sinon.spy();
+        var connector = {};
+        this.input_terminal.connect( connector );
+        // Assert node markChanged called
+        ok( this.node.markChanged.called );
+        // Assert connectors updated
+        ok( this.input_terminal.connectors[ 0 ] === connector );
+    } );
+
+    test( "test disconnect", function() {
+        this.node.markChanged = sinon.spy();
+        var connector = this.test_connector( );
+        this.input_terminal.disconnect( connector );
+        // Assert node markChanged called
+        ok( this.node.markChanged.called );
+        // Assert connectors updated
+        equal( this.input_terminal.connectors.length, 0 );
+    } );
+
+    test( "test redraw", function() {
+        var connector = this.test_connector(  );
+        connector.redraw = sinon.spy();
+        this.input_terminal.redraw();
+        // Assert connectors were redrawn
+        ok( connector.redraw.called );
+    } );
+
+    test( "test destroy", function() {
+        var connector = this.test_connector();
+        connector.destroy = sinon.spy();
+        this.input_terminal.destroy();
+        // Assert connectors were destroyed
+        ok( connector.destroy.called );
+    } );
+
+    test( "can accept exact datatype", function() {
+        var other = { node: {}, datatypes: [ "txt" ] }; // input also txt
+        ok( this.test_accept( other ) );
+    } );
+
+    test( "can accept subclass datatype", function() {
+        var other = { node: {}, datatypes: [ "tabular" ] }; // tabular subclass of input txt
+        ok( this.test_accept( other ) ) ;
+    } );
+
+    test( "cannot accept incorrect datatype", function() {
+        var other = { node: {}, datatypes: [ "binary" ] }; // binary is not txt
+        ok( ! this.test_accept( other ) );
+    } );
+
+    test( "can accept incorrect datatype if converted with PJA", function() {
+        var otherNode = this.pja_change_datatype_node( "out1", "txt" );
+        var other = { node: otherNode, datatypes: [ "binary" ], name: "out1" }; // Was binary but converted to txt
+        ok( this.test_accept( other ) );
+    } );
+
+    test( "cannot accept incorrect datatype if converted with PJA to incompatible type", function() {
+        var otherNode = this.pja_change_datatype_node( "out1", "bam" ); // bam's are not txt
+        var other = { node: otherNode, datatypes: [ "binary" ], name: "out1" };
+        ok( ! this.test_accept( other ) );
+    } );
+
+    test( "cannot accept incorrect datatype if some other output converted with PJA to compatible type", function() {
+        var otherNode = this.pja_change_datatype_node( "out2", "txt" );
+        var other = { node: otherNode, datatypes: [ "binary" ], name: "out1" };
+        ok( ! this.test_accept( other ) );
+    } );
+
+    test( "can accept inputs", function() {
+        // Other is data input module - always accept (currently - could be
+        // more intelligent by looking at what else input is connected to.
+        var other = { node: {}, datatypes: [ "input" ] };
+        ok( this.test_accept( other ) );
+    } );
+
+    test( "input type can accept any datatype", function() {
+        this.input.extensions = [ "input" ];
+        this.input_terminal.update( this.input );
+        var other = { node: {}, datatypes: [ "binary" ] };
+        ok( this.test_accept( other ) );
+    } );
+
+    test( "cannot accept when already connected", function() {
+        var self = this;
+        // If other is subtype but already connected, cannot accept
+        this.with_test_connector( function() {
+            ok( ! self.test_accept() );
+        } );
+    } );
+
+    test( "can accept already connected inputs if input is multiple", function() {
+        var self = this;
+        this.multiple();
+        this.with_test_connector( function() {
+            ok( self.test_accept() );
+        } );
+    } );
+
+    test( "cannot accept already connected inputs if input is multiple but datatypes don't match", function() {
+        var other = { node: {}, datatypes: [ "binary" ] }; // binary is not txt
+
+        var self = this;
+        this.multiple();
+        this.with_test_connector( function() {
+            ok( ! self.test_accept( other ) );
+        } );
+    } );
+
+    test( "can accept list collection for multiple input parameters if datatypes match", function() {
+        var self = this;
+        this.multiple();
+        ok( self.test_accept() );
+    } );
+
+    test( "can accept list collection for empty multiple inputs", function() {
+        var other = { node: {}, datatypes: [ "tabular" ], mapOver: function() { return new Terminals.CollectionTypeDescription( "list" ) } };
+        var self = this;
+        this.multiple();
+        ok( self.test_accept( other ) );
+    } );
+
+    test( "cannot accept list collection for multiple input if collection already connected", function() {
+        var other = { node: {}, datatypes: [ "tabular" ], mapOver: function() { return new Terminals.CollectionTypeDescription( "list" ) } };
+        var self = this;
+        this.multiple();
+        this.with_test_connector( function() {
+            ok( ! self.test_accept( other ) );
+        } );
+    } );
+
+    module( "Connector test", {});
+
+    test( "connects only if both valid handles", function() {
+        var input = { connect: sinon.spy() };
+        var output = { connect: sinon.spy() };
+        new Connector( input, null );
+        new Connector( null, output );
+        // Not attempts to connect...
+        ok( ! input.connect.called );
+        ok( ! output.connect.called );
+        new Connector( input, output );
+        ok( input.connect.called );
+        ok( output.connect.called );
+    });
+
+    test( "default attributes", function() {
+        var input = { connect: sinon.spy() };
+        var output = { connect: sinon.spy() };
+        var connector = new Connector( input, output );
+        equal( connector.dragging, false );
+        equal( connector.canvas, null );
+        equal( connector.inner_color, "#FFFFFF" );
+        equal( connector.outer_color, "#D8B365" );
+    } );
+
+    test( "destroy", function() {
+        var input = { connect: sinon.spy(), disconnect: sinon.spy() };
+        var output = { connect: sinon.spy(), disconnect: sinon.spy() };
+        var connector = new Connector( input, output );
+        connector.destroy();
+        ok( input.disconnect.called );
+        ok( output.disconnect.called );
+    } );
+
+    test( "initial redraw", function() {
+        var input = { connect: sinon.spy(), element: $("<div>"), isMappedOver: function() { return false; } };
+        var output = { connect: sinon.spy(), element: $("<div>"), isMappedOver: function() { return false; } };
+        var connector = new Connector( input, output );
+        var n = $('#canvas-container').find('canvas').length;
+        connector.redraw();
+        // Ensure canvas gets set
+        ok( connector.canvas );
+        // Ensure it got added to canvas container
+        equal (n + 1, $('#canvas-container').find('canvas').length);
+    } );
+
+    module( "Input collection terminal model test", {
+        setup: function( ) {
+            testApp.create();
+            this.node = new Node(  create_app(), {  } );
+            this.input = { extensions: [ "txt" ], collection_types: ["list"] };
+            this.input_terminal = new Terminals.InputCollectionTerminal( { input: this.input } );
+            this.input_terminal.node = this.node;
+        }
+    } );
+
+    test( "Collection output can connect to same collection input type", function() {
+        var self = this;
+        var inputTerminal = self.input_terminal;
+        ok( inputTerminal );
+        var outputTerminal = new Terminals.OutputCollectionTerminal( {
+            datatypes: 'txt',
+            collection_type: 'list'
+        } );
+        outputTerminal.node = {};
+        ok( inputTerminal.canAccept( outputTerminal ), "Input terminal " + inputTerminal + " can not accept " + outputTerminal );
+    } );
+
+    test( "Collection output cannot connect to different collection input type", function() {
+        var self = this;
+        var inputTerminal = self.input_terminal;
+        var outputTerminal = new Terminals.OutputCollectionTerminal( {
+            datatypes: 'txt',
+            collection_type: 'paired'
+        } );
+        outputTerminal.node = {};
+        ok( ! inputTerminal.canAccept( outputTerminal ) );
+    } );
+
+    module( "Node unit test", {
+        setup: function() {
+            testApp.create();
+            this.input_terminal = { destroy: sinon.spy(), redraw: sinon.spy() };
+            this.output_terminal = { destroy: sinon.spy(), redraw: sinon.spy() };
+            this.app = create_app();
+            this.element = this.app.$newNodeElement( "tool", "newnode" );
+            this.node = new Node( this.app, { element: this.element } );
+            this.node.input_terminals.i1 = this.input_terminal;
+            this.node.output_terminals.o1 = this.output_terminal;
+        },
+        $: function( selector ) {
+            return $( this.node.element.find( selector ) );
+        },
+        expect_workflow_node_changed: function( f ) {
+            var node = this.node;
+            var node_changed_spy = sinon.spy( this.app.workflow, "node_changed" );
+            f();
+            ok( node_changed_spy.calledWith( node ) );
+        },
+        init_field_data_simple: function(option_overrides) {
+            var data = Utils.merge(option_overrides, {
+                data_inputs: [ {name: "input1", extensions: [ "data" ] } ],
+                data_outputs: [ {name: "output1", extensions: [ "data" ] } ],
+                label: null,
+            });
+            this.node.init_field_data( data );
+        },
+        update_field_data_with_new_input: function(option_overrides) {
+            var new_data = Utils.merge(option_overrides, {
+                data_inputs: [
+                    { name: "input1", extensions: [ "data" ] },
+                    { name: "extra_0|input1", extensions: [ "data" ] },
+                ],
+                data_outputs: [ {name: "output1", extensions: [ "data" ] } ],
+                post_job_actions: "{}",
+                label: "New Label"
+            });
+            this.node.update_field_data( new_data );
+        }
+    } );
+
+    test( "make active", function() {
+        ok( ! this.element.hasClass( "toolForm-active" ) );
+        this.node.make_active();
+        ok( this.element.hasClass( "toolForm-active" ) );
+    } );
+
+    test( "destroy", function() {
+        var remove_node_spy = sinon.spy( this.app.workflow, "remove_node" );
+        this.node.destroy();
+        ok( this.input_terminal.destroy.called );
+        ok( this.output_terminal.destroy.called );
+        ok( remove_node_spy.calledWith( this.node ) );
+    } );
+
+    test( "error", function() {
+        // Test body of toolFormBody div updated and workflow notified of change.
+        var test = this;
+        this.expect_workflow_node_changed( function() {
+            test.node.error( "TOOL ERROR" );
+            equal( $( test.$(".toolFormBody").children()[ 0 ] ).html(), "TOOL ERROR" );
+        } );
+    } );
+
+    test( "init_field_data properties", function() {
+        var node = this.node;
+        this.expect_workflow_node_changed( function( ) {
+            var data = {
+                data_inputs: [],
+                data_outputs: [],
+                type: "tool",
+                name: "cat1",
+                form_html: "<form>",
+                tool_state: "ok",
+                tool_errors: false,
+                tooltip: "tool tooltip",
+                annotation: "tool annotation",
+                workflow_outputs: [ {"output_name": "out1"} ],
+                label: "Cat that data.",
+            };
+            node.init_field_data( data );
+            equal( node.type, "tool" );
+            equal( node.name, "cat1" );
+            equal( node.form_html, "<form>" );
+            equal( node.tool_state, "ok" );
+            equal( node.tooltip, "tool tooltip" );
+            equal( node.annotation, "tool annotation" );
+            equal( node.label, "Cat that data." );
+            deepEqual( node.post_job_actions, {} );
+            deepEqual( node.workflow_outputs, [  {"output_name": "out1"} ] );
+        } );
+    } );
+
+    test( "init_field_data data", function() {
+        var test = this;
+        this.expect_workflow_node_changed( function( ) {
+            // pre-init not tool form body...
+            equal( test.$( ".output-terminal" ).length, 0 );
+            equal( test.$( ".input-terminal" ).length, 0 );
+            equal( test.$( ".rule" ).length, 0 );
+            test.init_field_data_simple();
+            // After init tool form should have three "rows"/divs - , inputs div, one output, and rule...
+            equal( test.$( ".output-terminal" ).length, 1 );
+            equal( test.$( ".input-terminal" ).length, 1 );
+            equal( test.$( ".rule" ).length, 1 );
+            equal( test.$( ".toolFormBody" ).children().length, 3 );
+            equal( test.$( ".nodeTitle" ).text(), "newnode" );
+            ok( test.$( ".toolFormTitle" ).find("i").hasClass("fa-wrench") );
+        } );
+    } );
+
+    test( "node title behavior", function() {
+        var test = this;
+        this.expect_workflow_node_changed( function( ) {
+            // Node created with name newnode
+            equal( test.$( ".nodeTitle" ).text(), "newnode" );
+            // init_field_data_simple doesn't change label, so it should
+            // remain original name.
+            test.init_field_data_simple();
+            equal( test.$( ".nodeTitle" ).text(), "newnode" );
+            // Despite awkward name, update does change the label...
+            test.update_field_data_with_new_input();
+            equal( test.$( ".nodeTitle" ).text(), "New Label" );
+        });
+    });
+
+    test( "update_field_data updated data inputs and outputs", function() {
+        var test = this;
+        this.expect_workflow_node_changed( function( ) {
+            // Call init with one input and output.
+            test.init_field_data_simple();
+
+            test.update_field_data_with_new_input();
+
+            // Now there are 2 inputs...
+            equal( test.$( ".input-terminal" ).length, 2 );
+            equal( test.$( ".output-terminal" ).length, 1 );
+            equal( test.$( ".rule" ).length, 1 );
+        } );
+    } );
+
+    test( "update_field_data preserves connectors", function() {
+        var test = this;
+        var node = this.node;
+        this.expect_workflow_node_changed( function( ) {
+            // Call init with one input and output.
+            test.init_field_data_simple();
+
+            var connector = new Connector();
+            var old_input_terminal = node.input_terminals.input1;
+            old_input_terminal.connectors.push( connector );
+
+            // Update node, make sure connector still the same...
+            test.update_field_data_with_new_input();
+            var new_input_terminal = node.input_terminals.input1;
+            equal( connector, new_input_terminal.connectors[ 0 ] );
+
+            // Update a second time, make sure connector still the same...
+            test.update_field_data_with_new_input();
+            new_input_terminal = node.input_terminals.input1;
+            equal( connector, new_input_terminal.connectors[ 0 ] );
+        } );
+    } );
+
+    test( "update_field_data destroys old terminals", function() {
+        var test = this;
+        var node = this.node;
+        this.expect_workflow_node_changed( function( ) {
+            var data = {
+                data_inputs: [ { name: "input1", extensions: [ "data" ] },
+                               { name: "willDisappear", extensions: [ "data" ] } ],
+                data_outputs: [ {name: "output1", extensions: [ "data" ] } ],
+            };
+            node.init_field_data( data );
+            var old_input_terminal = node.input_terminals.willDisappear;
+            var destroy_spy = sinon.spy( old_input_terminal, "destroy" );
+            // Update
+            test.update_field_data_with_new_input();
+            ok( destroy_spy.called );
+        } );
+    } );
+
+    module( "create_node", {
+        setup: function() {
+            this.app = create_app();
+        }
+    });
+
+    test( "node added to workflow", function() {
+        var add_node_spy = sinon.spy( this.app.workflow, "add_node" );
+        var node = this.app.workflow.create_node( "tool", "Cat Files", "cat1" );
+        ok( add_node_spy.calledWith( node ) );
+    } );
+
+    // global NodeView
+    module( "Node view ", {
+       setup: function() {
+            this.set_for_node( { input_terminals: {}, output_terminals: {}, markChanged: function() {}, terminalMapping: { disableMapOver: function() {} } } );
+        },
+        set_for_node: function( node ) {
+            var element = $("<div><div class='toolFormBody'></div></div>");
+            this.view = new NodeView( { node: node, el: element[ 0 ] } );
+        },
+        connectAttachedTerminal: function( inputType, outputType ) {
+            this.view.addDataInput( { name: "TestName", extensions: [ inputType ] } );
+            var terminal = this.view.node.input_terminals[ "TestName" ];
+
+            var outputTerminal = new Terminals.OutputTerminal( { name: "TestOuptut", datatypes: [ outputType ] } );
+            outputTerminal.node = { markChanged: function() {}, post_job_actions: [], hasMappedOverInputTerminals: function() { return false; }, hasConnectedOutputTerminals: function() { return true; } };
+            outputTerminal.terminalMapping = { disableMapOver: function() {}, mapOver: Terminals.NULL_COLLECTION_TYPE_DESCRIPTION };
+            var c = new Connector( outputTerminal, terminal );
+
+            return c;
+        },
+        connectAttachedMultiInputTerminal: function( inputType, outputType ) {
+            this.view.addDataInput( { name: "TestName", extensions: [ inputType ], multiple: true } );
+            var terminal = this.view.node.input_terminals[ "TestName" ];
+
+            var outputTerminal = new Terminals.OutputTerminal( { name: "TestOuptut", datatypes: [ "txt" ] } );
+            outputTerminal.node = { markChanged: function() {}, post_job_actions: [], hasMappedOverInputTerminals: function() { return false; }, hasConnectedOutputTerminals: function() { return true; } };
+            outputTerminal.terminalMapping = { disableMapOver: function() {}, mapOver: new Terminals.CollectionTypeDescription( "list" ) };
+            var c = new Connector( outputTerminal, terminal );
+
+            return c;
+        },
+        connectAttachedMappedOutput: function( ) {
+            this.view.addDataInput( { name: "TestName", extensions: [ "txt" ], input_type: "dataset_collection" } );
+            var terminal = this.view.node.input_terminals[ "TestName" ];
+
+            var outputTerminal = new Terminals.OutputTerminal( { name: "TestOuptut", datatypes: [ "txt" ] } );
+            outputTerminal.node = { markChanged: function() {}, post_job_actions: [], hasMappedOverInputTerminals: function() { return false; }, hasConnectedOutputTerminals: function() { return true; } };
+            outputTerminal.terminalMapping = { disableMapOver: function() {}, mapOver: new Terminals.CollectionTypeDescription( "list" ) };
+            var c = new Connector( outputTerminal, terminal );
+
+            return c;
+        }
+    } );
+
+    test( "tool error styling", function() {
+        this.set_for_node( { tool_errors: false } );
+        this.view.render();
+        ok( ! this.view.$el.hasClass( "tool-node-error" ) );
+        this.set_for_node( { tool_errors: true } );
+        this.view.render();
+        ok( this.view.$el.hasClass( "tool-node-error" ) );
+    } );
+
+    test( "rendering correct width", function() {
+        // Default width is 150
+        this.view.render();
+        equal( this.view.$el.width(), 150 );
+
+        // If any data rows are greater, it will update
+        this.view.updateMaxWidth( 200 );
+        this.view.render();
+        equal( this.view.$el.width(), 200 );
+
+        // However 250 is the maximum width of node
+        this.view.updateMaxWidth( 300 );
+        this.view.render();
+        equal( this.view.$el.width(), 250 );
+
+    } );
+
+    test( "replacing terminal on data input update preserves connections", function() {
+        var connector = this.connectAttachedTerminal( "txt", "txt" );
+        var newElement = $("<div class='inputs'></div>");
+        this.view.addDataInput( { name: "TestName", extensions: ["txt"] }, newElement );
+        var terminal = newElement.find(".input-terminal")[ 0 ].terminal;
+        ok( connector.handle2 === terminal );
+    } );
+
+    test( "replacing terminal on data multiple input update preserves collection connections", function() {
+        var connector = this.connectAttachedMultiInputTerminal( "txt", "txt" );
+        var connector_destroy_spy = sinon.spy( connector, "destroy" );
+        var newElement = $("<div class='inputs'></div>");
+        this.view.addDataInput( { name: "TestName", extensions: ["txt"], multiple: true }, newElement );
+        ok( ! connector_destroy_spy.called );
+    } );
+
+    test( "replacing mapped terminal on data collection input update preserves connections", function() {
+        var connector = this.connectAttachedMappedOutput();
+        var newElement = $("<div class='inputs'></div>");
+        this.view.addDataInput( { name: "TestName", extensions: ["txt"], input_type: "dataset_collection" }, newElement );
+        var terminal = newElement.find(".input-terminal")[ 0 ].terminal;
+        ok( connector.handle2 === terminal );
+    } );
+
+    test( "replacing terminal on data input destroys invalid connections", function() {
+        var connector = this.connectAttachedTerminal( "txt", "txt" );
+        var newElement = $("<div class='inputs'></div>");
+        var connector_destroy_spy = sinon.spy( connector, "destroy" );
+        // Replacing input with same name - but now of type bam should destroy connection.
+        this.view.addDataInput( { name: "TestName", extensions: ["bam"] }, newElement );
+        var terminal = newElement.find(".input-terminal")[ 0 ].terminal;
+        ok( connector_destroy_spy.called );
+    } );
+
+    test( "replacing terminal on data input with collection changes mapping view type", function() {
+        var connector = this.connectAttachedTerminal( "txt", "txt" );
+        var newElement = $("<div class='inputs'></div>");
+        var connector_destroy_spy = sinon.spy( connector, "destroy" );
+        this.view.addDataInput( { name: "TestName", extensions: ["txt"], input_type: "dataset_collection" }, newElement );
+        // Input type changed to dataset_collection - old connections are reset.
+        // Would be nice to preserve these connections and make them map over.
+        var terminal = newElement.find(".input-terminal")[ 0 ].terminal;
+        ok( connector_destroy_spy.called );
+    } );
+
+    test( "replacing terminal on data collection input with simple input changes mapping view type", function() {
+        var connector = this.connectAttachedMappedOutput();
+        var newElement = $("<div class='inputs'></div>");
+        var connector_destroy_spy = sinon.spy( connector, "destroy" );
+        this.view.addDataInput( { name: "TestName", extensions: ["txt"], input_type: "dataset" }, newElement );
+        var terminal = newElement.find(".input-terminal")[ 0 ].terminal;
+        ok( connector_destroy_spy.called );
+    } );
+
+    // global InputTerminalView
+    module( "Input terminal view", {
+        setup: function() {
+            this.node = { input_terminals: [] };
+            this.input = { name: "i1", extensions: "txt", multiple: false };
+            this.view = new TerminalsView.InputTerminalView( {
+                node: this.node,
+                input: this.input,
+            });
+        }
+    } );
+
+    test( "terminal added to node", function() {
+        ok( this.node.input_terminals.i1 );
+        equal( this.node.input_terminals.i1.datatypes, [ "txt" ] );
+        equal( this.node.input_terminals.i1.multiple, false );
+    } );
+
+    test( "terminal element", function() {
+        var el = this.view.el;
+        equal( el.tagName, "DIV" );
+        equal( el.className, "terminal input-terminal");
+    } );
+
+    // global OutputTerminalView
+    module( "Output terminal view", {
+        setup: function() {
+            this.node = { output_terminals: [] };
+            this.output = { name: "o1", extensions: "txt" };
+            this.view = new TerminalsView.OutputTerminalView( {
+                node: this.node,
+                output: this.output,
+            });
+        }
+    } );
+
+    test( "terminal added to node", function() {
+        ok( this.node.output_terminals.o1 );
+        equal( this.node.output_terminals.o1.datatypes, [ "txt" ] );
+    } );
+
+    test( "terminal element", function() {
+        var el = this.view.el;
+        equal( el.tagName, "DIV" );
+        equal( el.className, "terminal output-terminal");
+    } );
+
+    module( "CollectionTypeDescription", {
+        listType: function() {
+            return new Terminals.CollectionTypeDescription( "list" );
+        },
+        pairedType: function() {
+            return new Terminals.CollectionTypeDescription( "paired" );
+        },
+        pairedListType: function() {
+            return new Terminals.CollectionTypeDescription( "list:paired" );
+        }
+    } );
+
+    test( "canMatch", function() {
+        ok( this.listType().canMatch( this.listType() ) );
+        ok( ! this.listType().canMatch( this.pairedType() ) );
+        ok( ! this.listType().canMatch( this.pairedListType() ) );
+    } );
+
+    test( "canMatch special types", function() {
+        ok( this.listType().canMatch( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION.canMatch( this.pairedListType() ) );
+
+        ok( ! this.listType().canMatch( Terminals.NULL_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.NULL_COLLECTION_TYPE_DESCRIPTION.canMatch( this.pairedListType() ) );
+    } );
+
+    test( "canMapOver", function() {
+        ok( ! this.listType().canMapOver( this.listType() ) );
+        ok( ! this.listType().canMapOver( this.pairedType() ) );
+        ok( this.pairedListType().canMapOver( this.pairedType() ) );
+        ok( ! this.listType().canMapOver( this.pairedListType() ) );
+    } );
+
+    test( "canMapOver special types", function() {
+        ok( ! this.listType().canMapOver( Terminals.NULL_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.NULL_COLLECTION_TYPE_DESCRIPTION.canMapOver( this.pairedListType() ) );
+
+        // Following two should be able to be relaxed someday maybe - but the
+        // tracking gets tricky I think. For now mapping only works for explicitly
+        // defined collection types.
+        ok( ! this.listType().canMapOver( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.ANY_COLLECTION_TYPE_DESCRIPTION.canMapOver( this.pairedListType() ) );
+    } );
+
+    test( "append", function( other ) {
+        var appendedType = this.listType().append( this.pairedType() );
+        equal( appendedType.collectionType, "list:paired" );
+    } );
+
+    test( "isCollection", function() {
+        ok( this.listType().isCollection );
+        ok( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION.isCollection );
+        ok( ! Terminals.NULL_COLLECTION_TYPE_DESCRIPTION.isCollection );
+    } );
+
+    test( "equal", function() {
+        ok( ! this.listType().equal( this.pairedType() ) );
+        ok( this.listType().equal( this.listType() ) );
+
+        ok( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION.equal( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.ANY_COLLECTION_TYPE_DESCRIPTION.equal( Terminals.NULL_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.ANY_COLLECTION_TYPE_DESCRIPTION.equal( this.pairedType() ) );
+        ok( ! this.pairedType().equal( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION ) );
+
+        ok( Terminals.NULL_COLLECTION_TYPE_DESCRIPTION.equal( Terminals.NULL_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.NULL_COLLECTION_TYPE_DESCRIPTION.equal( Terminals.ANY_COLLECTION_TYPE_DESCRIPTION ) );
+        ok( ! Terminals.NULL_COLLECTION_TYPE_DESCRIPTION.equal( this.listType() ) );
+        ok( ! this.listType().equal( Terminals.NULL_COLLECTION_TYPE_DESCRIPTION ) );
+
+    } );
+
+    module( "TerminalMapping", {
+    } );
+
+    test( "default constructor", function() {
+        var terminal = {};
+        var mapping = new Terminals.TerminalMapping( { terminal: terminal } );
+        ok( terminal.terminalMapping === mapping );
+        ok( mapping.mapOver === Terminals.NULL_COLLECTION_TYPE_DESCRIPTION );
+    } );
+
+    test( "constructing with mapOver", function() {
+        var terminal = {};
+        var mapping = new Terminals.TerminalMapping( { terminal: terminal, mapOver: new Terminals.CollectionTypeDescription( "list" ) } );
+        ok( mapping.mapOver.collectionType == "list" );
+    } );
+
+    test( "disableMapOver", function() {
+        var terminal = {};
+        var mapping = new Terminals.TerminalMapping( { terminal: terminal, mapOver: new Terminals.CollectionTypeDescription( "list" ) } );
+        var changeSpy = sinon.spy();
+        mapping.bind( "change", changeSpy );
+        mapping.disableMapOver();
+        ok( mapping.mapOver === Terminals.NULL_COLLECTION_TYPE_DESCRIPTION );
+        ok( changeSpy.called );
+    } );
+
+    module( "terminal mapping logic", {
+        newInputTerminal: function( mapOver, input, node ) {
+            input = input || {};
+            node = node || this.newNode();
+            if( ! ( 'extensions' in input ) ) {
+                input[ 'extensions'] = [ 'data' ];
+            }
+            var inputEl = $("<div>")[ 0 ];
+            var inputTerminal = new Terminals.InputTerminal( { element: inputEl, input: input } );
+            var inputTerminalMapping = new Terminals.TerminalMapping( { terminal: inputTerminal } );
+            inputTerminal.node = node;
+            if( mapOver ) {
+                inputTerminal.setMapOver( new Terminals.CollectionTypeDescription( mapOver ) );
+            }
+            return inputTerminal;
+        },
+        newInputCollectionTerminal: function( input, node ) {
+            input = input || {};
+            node = node || this.newNode();
+            if( ! ( 'extensions' in input ) ) {
+                input[ 'extensions'] = [ 'data' ];
+            }
+            var inputEl = $("<div>")[ 0 ];
+            var inputTerminal = new Terminals.InputCollectionTerminal( { element: inputEl, input: input } );
+            var inputTerminalMapping = new Terminals.TerminalMapping( { terminal: inputTerminal } );
+            inputTerminal.node = node;
+            return inputTerminal;
+        },
+        newOutputTerminal: function( mapOver, output, node ) {
+            output = output || {};
+            node = node || this.newNode();
+            if( ! ( 'extensions' in output ) ) {
+                output[ 'extensions'] = [ 'data' ];
+            }
+            var outputEl = $("<div>")[ 0 ];
+            var outputTerminal = new Terminals.OutputTerminal( { element: outputEl, datatypes: output.extensions } );
+            var outputTerminalMapping = new Terminals.TerminalMapping( { terminal: outputTerminal } );
+            outputTerminal.node = node;
+            if( mapOver ) {
+                outputTerminal.setMapOver( new Terminals.CollectionTypeDescription( mapOver ) );
+            }
+            return outputTerminal;
+        },
+        newOutputCollectionTerminal: function( collectionType, output, node, mapOver ) {
+            collectionType = collectionType || "list";
+            output = output || {};
+            node = node || this.newNode();
+            if( ! ( 'extensions' in output ) ) {
+                output[ 'extensions'] = [ 'data' ];
+            }
+            var outputEl = $("<div>")[ 0 ];
+            var outputTerminal = new Terminals.OutputCollectionTerminal( { element: outputEl, datatypes: output.extensions, collection_type: collectionType } );
+            var outputTerminalMapping = new Terminals.TerminalMapping( { terminal: outputTerminal } );
+            outputTerminal.node = node;
+            if( mapOver ) {
+                outputTerminal.setMapOver( new Terminals.CollectionTypeDescription( mapOver ) );
+            }
+            return outputTerminal;
+        },
+        newNode: function( ) {
+            var nodeEl = $("<div>")[ 0 ];
+            var node = new Node( create_app(), { element: nodeEl } );
+            return node;
+        },
+        _addExistingOutput: function( terminal, output, connected ) {
+            var self = this;
+            var node = terminal.node;
+            if( connected ) {
+                var inputTerminal = self.newInputTerminal();
+                new Connector( inputTerminal, output );
+            }
+            this._addTerminalTo( output, node.output_terminals );
+            return output;
+        },
+        addOutput: function( terminal, connected ) {
+            var connectedOutput = this.newOutputTerminal();
+            return this._addExistingOutput( terminal, connectedOutput, connected );
+        },
+        addCollectionOutput: function( terminal, connected ) {
+            var collectionOutput = this.newOutputCollectionTerminal();
+            return this._addExistingOutput( terminal, collectionOutput, connected );
+        },
+        addConnectedOutput: function( terminal ) {
+            return this.addOutput( terminal, true );
+        },
+        addConnectedCollectionOutput: function( terminal ) {
+            var connectedOutput = this.newOutputCollectionTerminal();
+            return this._addExistingOutput( terminal, connectedOutput, true );
+        },
+        addConnectedInput: function( terminal ) {
+            var self = this;
+            var connectedInput = this.newInputTerminal();
+            var node = terminal.node;
+            var outputTerminal = self.newOutputTerminal();
+            new Connector( connectedInput, outputTerminal );
+            this._addTerminalTo( connectedInput, node.input_terminals );
+            return connectedInput;
+        },
+        _addTerminalTo: function( terminal, terminals ) {
+            var name = "other";
+            while( name in terminals ) {
+                name += "_";
+            }
+            terminals[ name ] = terminal;
+        },
+        verifyNotAttachable: function( inputTerminal, output ) {
+            var outputTerminal;
+            var outputTerminal;
+            if( typeof( output ) == "string" ) {
+                // Just given a collection type... create terminal out of it.
+                outputTerminal = this.newOutputTerminal( output );
+            } else {
+                outputTerminal = output;
+            }
+
+            ok( ! inputTerminal.attachable( outputTerminal ) );
+        },
+        verifyAttachable: function( inputTerminal, output ) {
+            var outputTerminal;
+            if( typeof( output ) == "string" ) {
+                // Just given a collection type... create terminal out of it.
+                outputTerminal = this.newOutputTerminal( output );
+            } else {
+                outputTerminal = output;
+            }
+
+            ok( inputTerminal.attachable( outputTerminal ), 'Cannot attach '+ outputTerminal + " to " + inputTerminal );
+
+            // Go further... make sure datatypes are being enforced
+            inputTerminal.datatypes = [ "bam" ];
+            outputTerminal.datatypes = [ "txt" ];
+            ok( ! inputTerminal.attachable( outputTerminal ) );
+        },
+        verifyMappedOver: function( terminal ) {
+            ok( terminal.terminalMapping.mapOver.isCollection );
+        },
+        verifyNotMappedOver: function( terminal ) {
+            ok( ! terminal.terminalMapping.mapOver.isCollection );
+        },
+    } );
+
+    test( "unconstrained input can be mapped over", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        this.verifyAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unmapped input can be mapped over if matching connected input terminals map type", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        var connectedInput2 = this.addConnectedInput( inputTerminal1 );
+        connectedInput2.setMapOver( new Terminals.CollectionTypeDescription( "list") );
+        this.verifyAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unmapped input cannot be mapped over if not matching connected input terminals map type", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput = this.addConnectedInput( inputTerminal1 );
+        connectedInput.setMapOver( new Terminals.CollectionTypeDescription( "paired" ) );
+        this.verifyNotAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unmapped input can be attached to by output collection if matching connected input terminals map type", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        var connectedInput2 = this.addConnectedInput( inputTerminal1 );
+        connectedInput2.setMapOver( new Terminals.CollectionTypeDescription( "list") );
+        var outputTerminal = this.newOutputCollectionTerminal( "list" );
+        this.verifyAttachable( inputTerminal1, outputTerminal );
+    } );
+
+    test( "unmapped input cannot be attached to by output collection if matching connected input terminals don't match map type", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        var connectedInput2 = this.addConnectedInput( inputTerminal1 );
+        connectedInput2.setMapOver( new Terminals.CollectionTypeDescription( "list") );
+        var outputTerminal = this.newOutputCollectionTerminal( "paired" );
+        this.verifyNotAttachable( inputTerminal1, outputTerminal );
+    } );
+
+    test( "unmapped input can be attached to by output collection if effective output type (output+mapover) is same as mapped over input", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        var connectedInput2 = this.addConnectedInput( inputTerminal1 );
+        connectedInput2.setMapOver( new Terminals.CollectionTypeDescription( "list:paired") );
+        var outputTerminal = this.newOutputCollectionTerminal( "paired" );
+        outputTerminal.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        this.verifyAttachable( inputTerminal1, outputTerminal );
+    } );
+
+    test( "unmapped input cannot be attached to by output collection if effective output type (output+mapover) is not same as mapped over input (1)", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        var connectedInput2 = this.addConnectedInput( inputTerminal1 );
+        connectedInput2.setMapOver( new Terminals.CollectionTypeDescription( "list:paired") );
+        var outputTerminal = this.newOutputCollectionTerminal( "list" );
+        outputTerminal.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        this.verifyNotAttachable( inputTerminal1, outputTerminal );
+    } );
+
+    test( "unmapped input cannot be attached to by output collection if effective output type (output+mapover) is not same as mapped over input (2)", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        var connectedInput2 = this.addConnectedInput( inputTerminal1 );
+        connectedInput2.setMapOver( new Terminals.CollectionTypeDescription( "list:paired") );
+        var outputTerminal = this.newOutputCollectionTerminal( "list" );
+        outputTerminal.setMapOver( new Terminals.CollectionTypeDescription( "paired" ) );
+        this.verifyNotAttachable( inputTerminal1, outputTerminal );
+    } );
+
+    test( "unmapped input with unmapped, connected outputs cannot be mapped over", function() {
+        // It would invalidate the connections - someday maybe we could try to
+        // recursively map over everything down the DAG - it would be expensive
+        // to check that though.
+        var inputTerminal1 = this.newInputTerminal();
+        this.addConnectedOutput( inputTerminal1 );
+        this.verifyNotAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unmapped input with connected mapped outputs can be mapped over if matching", function() {
+        // It would invalidate the connections - someday maybe we could try to
+        // recursively map over everything down the DAG - it would be expensive
+        // to check that though.
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedOutput = this.addConnectedOutput( inputTerminal1 );
+        connectedOutput.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        this.verifyAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unmapped input with connected mapped outputs cannot be mapped over if mapover not matching", function() {
+        // It would invalidate the connections - someday maybe we could try to
+        // recursively map over everything down the DAG - it would be expensive
+        // to check that though.
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedOutput = this.addConnectedOutput( inputTerminal1 );
+        connectedOutput.setMapOver( new Terminals.CollectionTypeDescription( "paired" ) );
+        this.verifyNotAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "explicitly constrained input can not be mapped over by incompatible collection type", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        inputTerminal1.setMapOver( new Terminals.CollectionTypeDescription( "paired" ) );
+        this.verifyNotAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "explicitly constrained input can be mapped over by compatible collection type", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        inputTerminal1.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        this.verifyAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unconstrained collection input can be mapped over", function() {
+        var inputTerminal1 = this.newInputCollectionTerminal( { collection_types: ["paired"] } );
+        this.verifyAttachable( inputTerminal1, "list:paired" );
+    } );
+
+    test( "unconstrained collection input cannot be mapped over by incompatible type", function() {
+        var inputTerminal1 = this.newInputCollectionTerminal( { collection_types: ["list"] } ); // Would need to be paired...
+        this.verifyNotAttachable( inputTerminal1, "list:paired" );
+    } );
+
+    test( "explicitly mapped over collection input can be attached by explicit mapping", function() {
+        var inputTerminal1 = this.newInputCollectionTerminal( { collection_types: ["paired"] } );
+        inputTerminal1.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        this.verifyAttachable( inputTerminal1, "list:paired" );
+    } );
+
+    test( "explicitly mapped over collection input can be attached by explicit mapping", function() {
+        var inputTerminal1 = this.newInputCollectionTerminal( { collection_types: ["list:paired"] } );
+        inputTerminal1.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        // effectively input is list:list:paired so shouldn't be able to attach
+        this.verifyNotAttachable( inputTerminal1, "list:paired" );
+    } );
+
+    test( "unconnected multiple inputs can be connected to rank 1 collections", function() {
+        var inputTerminal1 = this.newInputTerminal( null, { multiple: true } );
+        this.verifyAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "multiple input attachable by collections", function() {
+        var inputTerminal1 = this.newInputTerminal( null, { multiple: true } );
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        this.addConnectedOutput( connectedInput1 );
+        this.verifyAttachable( inputTerminal1, "list" );
+    } );
+
+    test( "unconnected multiple inputs cannot be connected to rank > 1 collections (yet...)", function() {
+        var inputTerminal1 = this.newInputTerminal( null, { multiple: true } );
+        this.verifyNotAttachable( inputTerminal1, "list:paired" );
+    } );
+
+    test( "resetMappingIfNeeded does nothing if not mapped", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        inputTerminal1.resetMappingIfNeeded();
+        this.verifyNotMappedOver( inputTerminal1 );
+    } );
+
+    test( "resetMappingIfNeeded resets unconstrained input", function() {
+        var inputTerminal1 = this.newInputTerminal( "list" );
+        this.verifyMappedOver( inputTerminal1 );
+        inputTerminal1.resetMappingIfNeeded();
+        this.verifyNotMappedOver( inputTerminal1 );
+    } );
+
+    test( "resetMappingIfNeeded does not reset if connected output depends on being mapped", function() {
+        var inputTerminal1 = this.newInputTerminal( "list" );
+        var connectedOutput = this.addConnectedOutput( inputTerminal1 );
+        connectedOutput.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        inputTerminal1.resetMappingIfNeeded();
+        this.verifyMappedOver( inputTerminal1 );
+    } );
+
+    test( "resetMappingIfNeeded resets if node outputs are not connected to anything", function() {
+        var inputTerminal1 = this.newInputTerminal( "list" );
+        var output = this.addOutput( inputTerminal1 );
+        output.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        inputTerminal1.resetMappingIfNeeded();
+        this.verifyNotMappedOver( inputTerminal1 );
+    } );
+
+    test( "resetMappingIfNeeded an input resets node outputs if they not connected to anything", function() {
+        var inputTerminal1 = this.newInputTerminal( "list" );
+        var output = this.addOutput( inputTerminal1 );
+        output.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        inputTerminal1.resetMappingIfNeeded();
+        this.verifyNotMappedOver( output );
+    } );
+
+    test( "resetMappingIfNeeded an input resets node collection outputs if they not connected to anything", function() {
+        var inputTerminal1 = this.newInputTerminal( "list" );
+        var output = this.addCollectionOutput( inputTerminal1 );
+        output.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        inputTerminal1.resetMappingIfNeeded();
+        this.verifyNotMappedOver( output );
+    } );
+
+    test( "resetMappingIfNeeded resets if not last mapped over input", function() {
+        // Idea here is that other nodes are forcing output to still be mapped
+        // over so don't need to disconnect output nodes.
+        var inputTerminal1 = this.newInputTerminal( "list" );
+        var connectedInput1 = this.addConnectedInput( inputTerminal1 );
+        connectedInput1.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        var connectedOutput = this.addConnectedOutput( inputTerminal1 );
+        connectedOutput.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+        inputTerminal1.resetMappingIfNeeded();
+        // inputTerminal1 can be reset because connectedInput1
+        // is still forcing connectedOutput to be mapped over,
+        // so verify inputTerminal1 is rest and connectedInput1
+        // and connectedOutput are untouched.
+        this.verifyNotMappedOver( inputTerminal1 );
+        this.verifyMappedOver( connectedInput1 );
+        this.verifyMappedOver( connectedOutput );
+    } );
+
+    test( "simple mapping over collection outputs works correctly", function() {
+        var inputTerminal1 = this.newInputTerminal();
+        var connectedOutput = this.addConnectedCollectionOutput( inputTerminal1 );
+        inputTerminal1.setMapOver( new Terminals.CollectionTypeDescription( "list" ) );
+
+        // Can attach list output of collection type list that is being mapped
+        // over another list to a list:list (because this is what it is) but not
+        // to a list:list:list.
+        var testTerminal2 = this.newInputTerminal( "list:list" );
+        this.verifyAttachable( testTerminal2, connectedOutput );
+
+        var testTerminal1 = this.newInputTerminal( "list:list:list" );
+        this.verifyNotAttachable( testTerminal1, connectedOutput );
+    } );
+});
\ No newline at end of file
diff --git a/test/shed_functional/__init__.py b/test/shed_functional/__init__.py
new file mode 100644
index 0000000..1f89ece
--- /dev/null
+++ b/test/shed_functional/__init__.py
@@ -0,0 +1 @@
+"""Tool shed functional Tests"""
diff --git a/test/shed_functional/base/__init__.py b/test/shed_functional/base/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/shed_functional/base/common.py b/test/shed_functional/base/common.py
new file mode 100644
index 0000000..2b15cbb
--- /dev/null
+++ b/test/shed_functional/base/common.py
@@ -0,0 +1,35 @@
+admin_user = None
+admin_user_private_role = None
+admin_email = 'test at bx.psu.edu'
+admin_username = 'admin-user'
+
+test_user_1 = None
+test_user_1_private_role = None
+test_user_1_email = 'test-1 at bx.psu.edu'
+test_user_1_name = 'user1'
+
+test_user_2 = None
+test_user_2_private_role = None
+test_user_2_email = 'test-2 at bx.psu.edu'
+test_user_2_name = 'user2'
+
+test_user_3 = None
+test_user_3_private_role = None
+test_user_3_email = 'test-3 at bx.psu.edu'
+test_user_3_name = 'user3'
+
+complex_repository_dependency_template = '''<?xml version="1.0"?>
+<tool_dependency>
+    <package name="${package}" version="${version}">
+${dependency_lines}
+    </package>
+</tool_dependency>
+'''
+
+new_repository_dependencies_xml = '''<?xml version="1.0"?>
+<repositories${description}>
+${dependency_lines}
+</repositories>
+'''
+
+new_repository_dependencies_line = '''    <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}"${prior_installation_required} />'''
diff --git a/test/shed_functional/base/test_db_util.py b/test/shed_functional/base/test_db_util.py
new file mode 100644
index 0000000..d475002
--- /dev/null
+++ b/test/shed_functional/base/test_db_util.py
@@ -0,0 +1,231 @@
+import logging
+
+from sqlalchemy import and_, false, true
+
+import galaxy.model
+import galaxy.model.tool_shed_install
+import galaxy.webapps.tool_shed.model as model
+
+from functional.database_contexts import (galaxy_context as ga_session,
+    install_context as install_session, tool_shed_context as sa_session)
+
+log = logging.getLogger( 'test.tool_shed.test_db_util' )
+
+
+def delete_obj( obj ):
+    sa_session.delete( obj )
+    sa_session.flush()
+
+
+def delete_user_roles( user ):
+    for ura in user.roles:
+        sa_session.delete( ura )
+    sa_session.flush()
+
+
+def flush( obj ):
+    sa_session.add( obj )
+    sa_session.flush()
+
+
+def get_all_repositories():
+    return sa_session.query( model.Repository ).all()
+
+
+def get_all_installed_repositories( actually_installed=False ):
+    if actually_installed:
+        return install_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+                              .filter( and_( galaxy.model.tool_shed_install.ToolShedRepository.table.c.deleted == false(),
+                                             galaxy.model.tool_shed_install.ToolShedRepository.table.c.uninstalled == false(),
+                                             galaxy.model.tool_shed_install.ToolShedRepository.table.c.status == galaxy.model.tool_shed_install.ToolShedRepository.installation_status.INSTALLED ) ) \
+                              .all()
+    else:
+        return install_session.query( galaxy.model.tool_shed_install.ToolShedRepository ).all()
+
+
+def get_category_by_name( name ):
+    return sa_session.query( model.Category ) \
+                     .filter( model.Category.table.c.name == name ) \
+                     .first()
+
+
+def get_default_user_permissions_by_role( role ):
+    return sa_session.query( model.DefaultUserPermissions ) \
+                     .filter( model.DefaultUserPermissions.table.c.role_id == role.id ) \
+                     .all()
+
+
+def get_default_user_permissions_by_user( user ):
+    return sa_session.query( model.DefaultUserPermissions ) \
+                     .filter( model.DefaultUserPermissions.table.c.user_id == user.id ) \
+                     .all()
+
+
+def get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision ):
+    return install_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+                          .filter( and_( galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name,
+                                         galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner,
+                                         galaxy.model.tool_shed_install.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+                          .first()
+
+
+def get_installed_repository_by_id( repository_id ):
+    return install_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+                          .filter( galaxy.model.tool_shed_install.ToolShedRepository.table.c.id == repository_id ) \
+                          .first()
+
+
+def get_installed_repository_by_name_owner( repository_name, owner, return_multiple=False ):
+    query = install_session.query( galaxy.model.tool_shed_install.ToolShedRepository ) \
+                           .filter( and_( galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name,
+                                          galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner ) )
+    if return_multiple:
+        return query.all()
+    return query.first()
+
+
+def get_private_role( user ):
+    for role in user.all_roles():
+        if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+            return role
+    raise AssertionError( "Private role not found for user '%s'" % user.email )
+
+
+def get_role( user, role_name ):
+    for role in user.all_roles():
+        if role.name == role_name:
+            return role
+    return None
+
+
+def get_repository_role_association( repository_id, role_id ):
+    rra = sa_session.query( model.RepositoryRoleAssociation ) \
+                    .filter( and_( model.RepositoryRoleAssociation.table.c.role_id == role_id,
+                                   model.RepositoryRoleAssociation.table.c.repository_id == repository_id ) ) \
+                    .first()
+    return rra
+
+
+def get_repository_reviews( repository_id, reviewer_user_id=None, changeset_revision=None ):
+    if reviewer_user_id and changeset_revision:
+        reviews = sa_session.query( model.RepositoryReview ) \
+                            .filter( and_( model.RepositoryReview.table.c.repository_id == repository_id,
+                                           model.RepositoryReview.table.c.deleted == false(),
+                                           model.RepositoryReview.table.c.changeset_revision == changeset_revision,
+                                           model.RepositoryReview.table.c.user_id == reviewer_user_id ) ) \
+                            .all()
+    elif reviewer_user_id:
+        reviews = sa_session.query( model.RepositoryReview ) \
+                            .filter( and_( model.RepositoryReview.table.c.repository_id == repository_id,
+                                           model.RepositoryReview.table.c.deleted == false(),
+                                           model.RepositoryReview.table.c.user_id == reviewer_user_id ) ) \
+                            .all()
+    else:
+        reviews = sa_session.query( model.RepositoryReview ) \
+                            .filter( and_( model.RepositoryReview.table.c.repository_id == repository_id,
+                                           model.RepositoryReview.table.c.deleted == false() ) ) \
+                            .all()
+    return reviews
+
+
+def get_reviews_ordered_by_changeset_revision( repository_id, changelog_tuples, reviewer_user_id=None ):
+    reviews = get_repository_reviews( repository_id, reviewer_user_id=reviewer_user_id )
+    ordered_reviews = []
+    for ctx_rev, changeset_hash in changelog_tuples:
+        for review in reviews:
+            if str( review.changeset_revision ) == str( changeset_hash ):
+                ordered_reviews.append( review )
+    return ordered_reviews
+
+
+def get_repository_by_id( repository_id ):
+    return sa_session.query( model.Repository ) \
+                     .filter( model.Repository.table.c.id == repository_id ) \
+                     .first()
+
+
+def get_repository_downloadable_revisions( repository_id ):
+    revisions = sa_session.query( model.RepositoryMetadata ) \
+                          .filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
+                                         model.RepositoryMetadata.table.c.downloadable == true() ) ) \
+                          .all()
+    return revisions
+
+
+def get_repository_metadata_for_changeset_revision( repository_id, changeset_revision ):
+    repository_metadata = sa_session.query( model.RepositoryMetadata ) \
+                                    .filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
+                                                   model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+                                    .first()
+    return repository_metadata
+
+
+def get_repository_review_by_user_id_changeset_revision( user_id, repository_id, changeset_revision ):
+    review = sa_session.query( model.RepositoryReview ) \
+                       .filter( and_( model.RepositoryReview.table.c.user_id == user_id,
+                                      model.RepositoryReview.table.c.repository_id == repository_id,
+                                      model.RepositoryReview.table.c.changeset_revision == changeset_revision ) ) \
+                       .first()
+    return review
+
+
+def get_role_by_name( role_name ):
+    return sa_session.query( model.Role ) \
+                     .filter( model.Role.table.c.name == role_name ) \
+                     .first()
+
+
+def get_user( email ):
+    return sa_session.query( model.User ) \
+                     .filter( model.User.table.c.email == email ) \
+                     .first()
+
+
+def get_user_by_name( username ):
+    return sa_session.query( model.User ) \
+                     .filter( model.User.table.c.username == username ) \
+                     .first()
+
+
+def mark_obj_deleted( obj ):
+    obj.deleted = True
+    sa_session.add( obj )
+    sa_session.flush()
+
+
+def refresh( obj ):
+    sa_session.refresh( obj )
+
+
+def ga_refresh( obj ):
+    install_session.refresh( obj )
+
+
+def get_galaxy_private_role( user ):
+    for role in user.all_roles():
+        if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+            return role
+    raise AssertionError( "Private role not found for user '%s'" % user.email )
+
+
+def get_galaxy_user( email ):
+    return ga_session.query( galaxy.model.User ) \
+                     .filter( galaxy.model.User.table.c.email == email ) \
+                     .first()
+
+
+def get_repository_by_name_and_owner( name, owner_username, return_multiple=False ):
+    owner = get_user_by_name( owner_username )
+    repository = sa_session.query( model.Repository ) \
+                           .filter( and_( model.Repository.table.c.name == name,
+                                          model.Repository.table.c.user_id == owner.id ) ) \
+                           .first()
+    return repository
+
+
+def get_repository_metadata_by_repository_id_changeset_revision( repository_id, changeset_revision ):
+    repository_metadata = sa_session.query( model.RepositoryMetadata ) \
+                                    .filter( and_( model.RepositoryMetadata.table.c.repository_id == repository_id,
+                                                   model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+                                    .first()
+    return repository_metadata
diff --git a/test/shed_functional/base/twilltestcase.py b/test/shed_functional/base/twilltestcase.py
new file mode 100644
index 0000000..6c3b505
--- /dev/null
+++ b/test/shed_functional/base/twilltestcase.py
@@ -0,0 +1,1530 @@
+import logging
+import os
+import re
+import shutil
+import string
+import tarfile
+import tempfile
+import time
+from json import loads
+
+import twill.commands as tc
+from mercurial import commands, hg, ui
+from six.moves.urllib.parse import quote_plus, urlencode
+
+import galaxy.model.tool_shed_install as galaxy_model
+import galaxy.util
+import galaxy.webapps.tool_shed.util.hgweb_config
+from base.tool_shed_util import repository_installation_timeout
+from base.twilltestcase import TwillTestCase
+from galaxy.web import security
+from tool_shed.util import hg_util, xml_util
+from tool_shed.util.encoding_util import tool_shed_encode
+
+from . import common, test_db_util
+
+log = logging.getLogger( __name__ )
+
+
+class ShedTwillTestCase( TwillTestCase ):
+
+    def setUp( self ):
+        # Security helper
+        self.security = security.SecurityHelper( id_secret='changethisinproductiontoo' )
+        self.history_id = None
+        self.hgweb_config_dir = os.environ.get( 'TEST_HG_WEB_CONFIG_DIR' )
+        self.hgweb_config_manager = galaxy.webapps.tool_shed.util.hgweb_config.HgWebConfigManager()
+        self.hgweb_config_manager.hgweb_config_dir = self.hgweb_config_dir
+        self.tool_shed_test_tmp_dir = os.environ.get( 'TOOL_SHED_TEST_TMP_DIR', None)
+        self.host = os.environ.get( 'TOOL_SHED_TEST_HOST' )
+        self.port = os.environ.get( 'TOOL_SHED_TEST_PORT' )
+        self.url = "http://%s:%s" % ( self.host, self.port )
+        self.galaxy_host = os.environ.get( 'GALAXY_TEST_HOST' )
+        self.galaxy_port = os.environ.get( 'GALAXY_TEST_PORT' )
+        self.galaxy_url = "http://%s:%s" % ( self.galaxy_host, self.galaxy_port )
+        self.shed_tool_data_table_conf = os.environ.get( 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF' )
+        self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
+        self.tool_shed_test_file = None
+        self.tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
+        self.shed_tool_conf = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF' )
+        self.test_db_util = test_db_util
+        # TODO: Figure out a way to alter these attributes during tests.
+        self.galaxy_tool_dependency_dir = os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
+        self.shed_tools_dict = {}
+
+    def add_repository_review_component( self, **kwd ):
+        url = '/repository_review/create_component?operation=create'
+        self.visit_url( url )
+        self.submit_form( 1, 'create_component_button', **kwd )
+
+    def assign_admin_role( self, repository, user ):
+        # As elsewhere, twill limits the possibility of submitting the form, this time due to not executing the javascript
+        # attached to the role selection form. Visit the action url directly with the necessary parameters.
+        url = '/repository/manage_repository_admins?id=%s&in_users=%d&manage_role_associations_button=Save' % \
+            ( self.security.encode_id( repository.id ), user.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed=[ 'Role', 'has been associated' ] )
+
+    def browse_category( self, category, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/browse_valid_categories?sort=name&operation=valid_repositories_by_category&id=%s' % \
+              self.security.encode_id( category.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_component_review( self, review, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository_review/browse_review?id=%s' % self.security.encode_id( review.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_custom_datatypes( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/browse_datatypes'
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_repository( self, repository, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_repository_dependencies( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/browse_repository_dependencies'
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_tool_shed( self, url, strings_displayed=None, strings_not_displayed=None ):
+        self.visit_galaxy_url( '/admin_toolshed/browse_tool_shed?tool_shed_url=%s' % url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_tool_dependencies( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/browse_tool_dependencies'
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def browse_tools( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/browse_tools'
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def check_count_of_metadata_revisions_associated_with_repository( self, repository, metadata_count ):
+        self.check_repository_changelog( repository )
+        self.check_string_count_in_page( 'Repository metadata is associated with this change set.', metadata_count )
+
+    def check_exported_repository_dependency( self, dependency_filename, repository_name, repository_owner ):
+        root, error_message = xml_util.parse_xml( dependency_filename )
+        for elem in root.findall( 'repository' ):
+            if 'changeset_revision' in elem:
+                raise AssertionError( 'Exported repository %s with owner %s has a dependency with a defined changeset revision.' %
+                                      ( repository_name, repository_owner ) )
+            if 'toolshed' in elem:
+                raise AssertionError( 'Exported repository %s with owner %s has a dependency with a defined tool shed.' %
+                                      ( repository_name, repository_owner ) )
+
+    def check_for_valid_tools( self, repository, strings_displayed=None, strings_not_displayed=None ):
+        if strings_displayed is None:
+            strings_displayed = [ 'Valid tools' ]
+        else:
+            strings_displayed.append( 'Valid tools' )
+        self.display_manage_repository_page( repository, strings_displayed, strings_not_displayed )
+
+    def check_galaxy_repository_db_status( self, repository_name, owner, expected_status ):
+        installed_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, owner )
+        assert installed_repository.status == expected_status, 'Status in database is %s, expected %s' % \
+            ( installed_repository.status, expected_status )
+
+    def check_galaxy_repository_tool_panel_section( self, repository, expected_tool_panel_section ):
+        metadata = repository.metadata
+        assert 'tools' in metadata, 'Tools not found in repository metadata: %s' % metadata
+        # If integrated_tool_panel.xml is to be tested, this test method will need to be enhanced to handle tools
+        # from the same repository in different tool panel sections. Getting the first tool guid is ok, because
+        # currently all tools contained in a single repository will be loaded into the same tool panel section.
+        if repository.status in [ galaxy_model.ToolShedRepository.installation_status.UNINSTALLED,
+                                  galaxy_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+            tool_panel_section = self.get_tool_panel_section_from_repository_metadata( metadata )
+        else:
+            tool_panel_section = self.get_tool_panel_section_from_api( metadata )
+        assert tool_panel_section == expected_tool_panel_section, 'Expected to find tool panel section *%s*, but instead found *%s*\nMetadata: %s\n' % \
+            ( expected_tool_panel_section, tool_panel_section, metadata )
+
+    def check_installed_repository_tool_dependencies( self,
+                                                      installed_repository,
+                                                      strings_displayed=None,
+                                                      strings_not_displayed=None,
+                                                      dependencies_installed=False ):
+        # Tool dependencies are not being installed in these functional tests. If this is changed, the test method will also need to be updated.
+        if not dependencies_installed:
+            strings_displayed.append( 'Missing tool dependencies' )
+        else:
+            strings_displayed.append( 'Tool dependencies' )
+        if dependencies_installed:
+            strings_displayed.append( 'Installed' )
+        else:
+            strings_displayed.append( 'Never installed' )
+        url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def check_manifest( self, manifest_filepath, owner=None ):
+        root, error_message = xml_util.parse_xml( manifest_filepath )
+        for elem in root.findall( 'repository' ):
+            repository_name = elem.get( 'name' )
+            manifest_owner = elem.get( 'username' )
+            if owner is not None:
+                assert manifest_owner == owner, 'Expected repository %s to be owned by %s, but found %s' % \
+                    ( elem.get( 'name' ), owner, manifest_owner )
+            toolshed = elem.get( 'toolshed' )
+            changeset_revision = elem.get( 'changeset_revision' )
+            assert toolshed is None, 'Repository definition %s has a tool shed attribute %s.' % ( repository_name, toolshed )
+            assert changeset_revision is None, 'Repository definition %s specifies a changeset revision %s.' % \
+                ( repository_name, changeset_revision )
+            repository_archive = elem.find( 'archive' ).text
+            filepath, filename = os.path.split( manifest_filepath )
+            repository_path = os.path.join( filepath, repository_archive )
+            self.verify_repository_in_capsule( repository_path, repository_name, owner )
+
+    def check_repository_changelog( self, repository, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision=None, changeset_revision=None ):
+        strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username  ]
+        if depends_on_changeset_revision:
+            strings_displayed.append( depends_on_changeset_revision )
+        self.display_manage_repository_page( repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed )
+
+    def check_repository_metadata( self, repository, tip_only=True ):
+        if tip_only:
+            assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \
+                'Repository tip is not a metadata revision: Repository tip - %s, metadata revisions - %s.'
+        else:
+            assert len( self.get_repository_metadata_revisions( repository ) ) > 0, \
+                'Repository tip is not a metadata revision: Repository tip - %s, metadata revisions - %s.' % \
+                ( self.get_repository_tip( repository ), ', '.join( self.get_repository_metadata_revisions( repository ) ) )
+
+    def check_repository_tools_for_changeset_revision( self, repository, changeset_revision, tool_metadata_strings_displayed=None, tool_page_strings_displayed=None ):
+        '''
+        Loop through each tool dictionary in the repository metadata associated with the received changeset_revision.
+        For each of these, check for a tools attribute, and load the tool metadata page if it exists, then display that tool's page.
+        '''
+        test_db_util.refresh( repository )
+        repository_metadata = self.get_repository_metadata_by_changeset_revision( repository, changeset_revision )
+        metadata = repository_metadata.metadata
+        if 'tools' not in metadata:
+            raise AssertionError( 'No tools in %s revision %s.' % ( repository.name, changeset_revision ) )
+        for tool_dict in metadata[ 'tools' ]:
+            tool_id = tool_dict[ 'id' ]
+            tool_xml = tool_dict[ 'tool_config' ]
+            url = '/repository/view_tool_metadata?repository_id=%s&changeset_revision=%s&tool_id=%s' % \
+                  ( self.security.encode_id( repository.id ), changeset_revision, tool_id )
+            self.visit_url( url )
+            self.check_for_strings( tool_metadata_strings_displayed )
+            self.load_display_tool_page( repository, tool_xml_path=tool_xml,
+                                         changeset_revision=changeset_revision,
+                                         strings_displayed=tool_page_strings_displayed,
+                                         strings_not_displayed=None )
+
+    def check_repository_invalid_tools_for_changeset_revision( self, repository, changeset_revision, strings_displayed=None, strings_not_displayed=None ):
+        '''Load the invalid tool page for each invalid tool associated with this changeset revision and verify the received error messages.'''
+        repository_metadata = self.get_repository_metadata_by_changeset_revision( repository, changeset_revision )
+        metadata = repository_metadata.metadata
+        assert 'invalid_tools' in metadata, 'Metadata for changeset revision %s does not define invalid tools' % changeset_revision
+        for tool_xml in metadata[ 'invalid_tools' ]:
+            self.load_invalid_tool_page( repository,
+                                         tool_xml=tool_xml,
+                                         changeset_revision=changeset_revision,
+                                         strings_displayed=strings_displayed,
+                                         strings_not_displayed=strings_not_displayed )
+
+    def check_string_count_in_page( self, pattern, min_count, max_count=None ):
+        """Checks the number of 'pattern' occurrences in the current browser page"""
+        page = self.last_page()
+        pattern_count = page.count( pattern )
+        if max_count is None:
+            max_count = min_count
+        # The number of occurrences of pattern in the page should be between min_count
+        # and max_count, so show error if pattern_count is less than min_count or greater
+        # than max_count.
+        if pattern_count < min_count or pattern_count > max_count:
+            fname = self.write_temp_file( page )
+            errmsg = "%i occurrences of '%s' found (min. %i, max. %i).\npage content written to '%s' " % \
+                     ( pattern_count, pattern, min_count, max_count, fname )
+            raise AssertionError( errmsg )
+
+    def clone_repository( self, repository, destination_path ):
+        url = '%s/repos/%s/%s' % ( self.url, repository.user.username, repository.name )
+        success, message = hg_util.clone_repository( url, destination_path, self.get_repository_tip( repository ) )
+        assert success is True, message
+
+    def commit_and_push( self, repository, hgrepo, options, username, password ):
+        url = 'http://%s:%s@%s:%s/repos/%s/%s' % ( username, password, self.host, self.port, repository.user.username, repository.name )
+        commands.commit( ui.ui(), hgrepo, **options )
+        #  Try pushing multiple times as it transiently fails on Jenkins.
+        #  TODO: Figure out why that happens
+        for i in range(5):
+            try:
+                commands.push( ui.ui(), hgrepo, dest=url )
+            except Exception as e:
+                if str(e).find('Pushing to Tool Shed is disabled') != -1:
+                    return False
+            else:
+                return True
+        raise
+
+    def create_category( self, **kwd ):
+        category = test_db_util.get_category_by_name( kwd[ 'name' ] )
+        if category is None:
+            self.visit_url( '/admin/manage_categories?operation=create' )
+            self.submit_form( form_no=1, button="create_category_button", **kwd )
+            category = test_db_util.get_category_by_name( kwd[ 'name' ] )
+        return category
+
+    def create_repository_dependency( self,
+                                      repository=None,
+                                      repository_tuples=[],
+                                      filepath=None,
+                                      prior_installation_required=False,
+                                      complex=False,
+                                      package=None,
+                                      version=None,
+                                      strings_displayed=None,
+                                      strings_not_displayed=None ):
+        repository_names = []
+        if complex:
+            filename = 'tool_dependencies.xml'
+            self.generate_complex_dependency_xml( filename=filename, filepath=filepath, repository_tuples=repository_tuples, package=package, version=version )
+        else:
+            for toolshed_url, name, owner, changeset_revision in repository_tuples:
+                repository_names.append( name )
+            dependency_description = '%s depends on %s.' % ( repository.name, ', '.join( repository_names ) )
+            filename = 'repository_dependencies.xml'
+            self.generate_simple_dependency_xml( repository_tuples=repository_tuples,
+                                                 filename=filename,
+                                                 filepath=filepath,
+                                                 dependency_description=dependency_description,
+                                                 prior_installation_required=prior_installation_required )
+        self.upload_file( repository,
+                          filename=filename,
+                          filepath=filepath,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded dependency on %s.' % ', '.join( repository_names ),
+                          strings_displayed=None,
+                          strings_not_displayed=None )
+
+    def create_repository_review( self, repository, review_contents_dict, changeset_revision=None, copy_from=None):
+        strings_displayed = []
+        if not copy_from:
+            strings_displayed.append( 'Begin your review' )
+        strings_not_displayed = []
+        if not changeset_revision:
+            changeset_revision = self.get_repository_tip( repository )
+        url = '/repository_review/create_review?changeset_revision=%s&id=%s' % ( changeset_revision, self.security.encode_id( repository.id ) )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        strings_displayed = []
+        if copy_from:
+            old_changeset_revision, review_id = copy_from
+            strings_displayed = [ 'You have elected to create a new review', 'Select previous revision', changeset_revision ]
+            self.check_for_strings( strings_displayed )
+            strings_displayed = []
+            url = '/repository_review/create_review?changeset_revision=%s&id=%s&previous_review_id=%s' % \
+                ( self.get_repository_tip( repository ), self.security.encode_id( repository.id ), self.security.encode_id( review_id ) )
+            self.visit_url( url )
+        self.fill_review_form( review_contents_dict, strings_displayed, strings_not_displayed )
+
+    def create_user_in_galaxy( self, cntrller='user', email='test at bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
+        self.visit_galaxy_url( "/user/create?cntrller=%s&use_panels=False" % cntrller )
+        self.submit_form( '1', 'create_user_button', email=email, password=password, confirm=password, username=username, redirect=redirect )
+        previously_created = False
+        username_taken = False
+        invalid_username = False
+        try:
+            self.check_page_for_string( "Created new user account" )
+        except:
+            try:
+                # May have created the account in a previous test run...
+                self.check_page_for_string( "User with that email already exists" )
+                previously_created = True
+            except:
+                try:
+                    self.check_page_for_string( 'Public name is taken; please choose another' )
+                    username_taken = True
+                except:
+                    try:
+                        # Note that we're only checking if the usr name is >< 4 chars here...
+                        self.check_page_for_string( 'Public name must be at least 4 characters in length' )
+                        invalid_username = True
+                    except:
+                        pass
+        return previously_created, username_taken, invalid_username
+
+    def deactivate_repository( self, installed_repository, strings_displayed=None, strings_not_displayed=None ):
+        url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        form = tc.browser.get_form( 'deactivate_or_uninstall_repository' )
+        self.set_form_value( form, {}, 'remove_from_disk', False )
+        tc.submit( 'deactivate_or_uninstall_repository_button' )
+        strings_displayed = [ 'The repository named', 'has been deactivated' ]
+        self.check_for_strings( strings_displayed, strings_not_displayed=None )
+
+    def delete_files_from_repository( self, repository, filenames=[], strings_displayed=[ 'were deleted from the repository' ], strings_not_displayed=None ):
+        files_to_delete = []
+        basepath = self.get_repo_path( repository )
+        repository_files = self.get_repository_file_list( repository=repository, base_path=basepath, current_path=None )
+        # Verify that the files to delete actually exist in the repository.
+        for filename in repository_files:
+            if filename in filenames:
+                files_to_delete.append( os.path.join( basepath, filename ) )
+        self.browse_repository( repository )
+        # Twill sets hidden form fields to read-only by default. We need to write to this field.
+        form = tc.browser.get_form( 'select_files_to_delete' )
+        form.find_control( "selected_files_to_delete" ).readonly = False
+        tc.fv( "2", "selected_files_to_delete", ','.join( files_to_delete ) )
+        tc.submit( 'select_files_to_delete_button' )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def delete_repository( self, repository ):
+        repository_id = self.security.encode_id( repository.id )
+        self.visit_url( '/admin/browse_repositories' )
+        url = '/admin/browse_repositories?operation=Delete&id=%s' % repository_id
+        strings_displayed = [ 'Deleted 1 repository', repository.name ]
+        strings_not_displayed = []
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_all_workflows( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/workflow'
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_galaxy_browse_repositories_page( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/admin_toolshed/browse_repositories'
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_installed_manage_data_manager_page( self, installed_repository, data_manager_names=None, strings_displayed=None, strings_not_displayed=None ):
+        data_managers = installed_repository.metadata.get( 'data_manager', {} ).get( 'data_managers', {} )
+        if data_manager_names:
+            if not isinstance( data_manager_names, list ):
+                data_manager_names = [data_manager_names]
+            for data_manager_name in data_manager_names:
+                assert data_manager_name in data_managers, "The requested Data Manager '%s' was not found in repository metadata." % data_manager_name
+        else:
+            data_manager_name = list(data_managers.keys())
+        for data_manager_name in data_manager_names:
+            url = '/data_manager/manage_data_manager?id=%s' % data_managers[data_manager_name]['guid']
+            self.visit_galaxy_url( url )
+            self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_installed_repository_manage_page( self, installed_repository, strings_displayed=None, strings_not_displayed=None ):
+        if strings_displayed is None:
+            strings_displayed = []
+        if strings_not_displayed is None:
+            strings_not_displayed = []
+        url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
+        self.visit_galaxy_url( url )
+        strings_displayed.append( str( installed_repository.installed_changeset_revision ) )
+        # Every place Galaxy's XXXX tool appears in attribute - need to quote.
+        strings_displayed = [x.replace("'", "'") for x in strings_displayed]
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_installed_workflow_image( self, repository, workflow_name, strings_displayed=None, strings_not_displayed=None ):
+        url = '/admin_toolshed/generate_workflow_image?repository_id=%s&workflow_name=%s' % \
+              ( self.security.encode_id( repository.id ), tool_shed_encode( workflow_name ) )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_manage_repository_page( self, repository, changeset_revision=None, strings_displayed=None, strings_not_displayed=None ):
+        base_url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
+        if changeset_revision:
+            url = '%s&changeset_revision=%s' % ( base_url, changeset_revision )
+        else:
+            changeset_revision = self.get_repository_tip( repository )
+            url = base_url
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_repository_clone_page( self, owner_name, repository_name, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repos/%s/%s' % ( owner_name, repository_name )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_repository_file_contents( self, repository, filename, filepath=None, strings_displayed=None, strings_not_displayed=None ):
+        '''Find a file in the repository and display the contents.'''
+        basepath = self.get_repo_path( repository )
+        repository_file_list = []
+        if filepath:
+            relative_path = os.path.join( basepath, filepath )
+        else:
+            relative_path = basepath
+        repository_file_list = self.get_repository_file_list( repository=repository, base_path=relative_path, current_path=None )
+        assert filename in repository_file_list, 'File %s not found in the repository under %s.' % ( filename, relative_path )
+        params = dict( file_path=os.path.join( relative_path, filename ), repository_id=self.security.encode_id( repository.id ) )
+        url = '/repository/get_file_contents'
+        self.visit_url( url, params=params )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_reviewed_repositories_owned_by_user( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository_review/reviewed_repositories_i_own'
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def display_repository_reviews_by_user( self, user, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository_review/repository_reviews_by_user?id=%s' % self.security.encode_id( user.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def edit_repository_categories( self, repository, categories_to_add=[], categories_to_remove=[], restore_original=True ):
+        url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        strings_displayed = []
+        strings_not_displayed = []
+        for category in categories_to_add:
+            tc.fv( "2", "category_id", '+%s' % category)
+            strings_displayed.append( "selected>%s" % category )
+        for category in categories_to_remove:
+            tc.fv( "2", "category_id", '-%s' % category)
+            strings_not_displayed.append( "selected>%s" % category )
+        tc.submit( "manage_categories_button" )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        if restore_original:
+            strings_displayed = []
+            strings_not_displayed = []
+            for category in categories_to_remove:
+                tc.fv( "2", "category_id", '+%s' % category)
+                strings_displayed.append( "selected>%s" % category )
+            for category in categories_to_add:
+                tc.fv( "2", "category_id", '-%s' % category)
+                strings_not_displayed.append( "selected>%s" % category )
+            tc.submit( "manage_categories_button" )
+            self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def edit_repository_information( self, repository, revert=True, **kwd ):
+        url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        original_information = dict( repo_name=repository.name, description=repository.description, long_description=repository.long_description )
+        strings_displayed = []
+        for input_elem_name in [ 'repo_name', 'description', 'long_description', 'repository_type' ]:
+            if input_elem_name in kwd:
+                tc.fv( "edit_repository", input_elem_name, kwd[ input_elem_name ] )
+                strings_displayed.append( self.escape_html( kwd[ input_elem_name ] ) )
+        tc.submit( "edit_repository_button" )
+        self.check_for_strings( strings_displayed )
+        if revert:
+            strings_displayed = []
+            for input_elem_name in [ 'repo_name', 'description', 'long_description' ]:
+                tc.fv( "edit_repository", input_elem_name, original_information[ input_elem_name ] )
+                strings_displayed.append( self.escape_html( original_information[ input_elem_name ] ) )
+            tc.submit( "edit_repository_button" )
+            self.check_for_strings( strings_displayed )
+
+    def enable_email_alerts( self, repository, strings_displayed=None, strings_not_displayed=None ):
+        repository_id = self.security.encode_id( repository.id )
+        params = dict( operation='Receive email alerts', id=repository_id )
+        self.visit_url( '/repository/browse_repositories', params )
+        self.check_for_strings( strings_displayed )
+
+    def escape_html( self, string, unescape=False ):
+        html_entities = [ ('&', 'X' ), ( "'", ''' ), ( '"', '"' ) ]
+        for character, replacement in html_entities:
+            if unescape:
+                string = string.replace( replacement, character )
+            else:
+                string = string.replace( character, replacement )
+        return string
+
+    def expect_repo_created_strings( self, name ):
+        return [
+            'Repository <b>%s</b>' % name,
+            'Repository <b>%s</b> has been created' % name,
+        ]
+
+    def export_capsule( self, repository, aggressive=True, includes_dependencies=None ):
+        # TODO: Remove this method and restore _exort_capsule as export_capsule
+        # after transient problem is fixed.
+        if not aggressive:
+            return self._export_capsule(repository, includes_dependencies=includes_dependencies)
+        else:
+            try:
+                return self._export_capsule(repository, includes_dependencies=includes_dependencies)
+            except Exception:
+                # Empirically this fails occasionally, we don't know
+                # why however.
+                time.sleep(1)
+                return self._export_capsule( repository, includes_dependencies=includes_dependencies)
+
+    def _export_capsule( self, repository, includes_dependencies=None ):
+        url = '/repository/export?repository_id=%s&changeset_revision=%s' % \
+            ( self.security.encode_id( repository.id ), self.get_repository_tip( repository ) )
+        self.visit_url( url )
+        log.info( "Visited url %s looking for export capsule button" % url )
+        self.check_page_for_string( "Repository '" )
+        self.check_page_for_string( "Export" )
+        # Explicit check for True/False since None means we don't know if this
+        # includes dependencies and so we skip both checks...
+        if includes_dependencies is True:
+            self.check_page_for_string( "Export repository dependencies?" )
+        elif includes_dependencies is False:
+            self.check_page_for_string( "No repository dependencies are defined for revision" )
+        self.submit_form( 'export_repository', 'export_repository_button' )
+        fd, capsule_filename = tempfile.mkstemp()
+        os.close( fd )
+        with open( capsule_filename, 'w' ) as f:
+            f.write( self.last_page() )
+        return capsule_filename
+
+    def fetch_repository_metadata( self, repository, strings_displayed=None, strings_not_displayed=None ):
+        url = '/api/repositories/%s/metadata' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def fill_review_form( self, review_contents_dict, strings_displayed=None, strings_not_displayed=None ):
+        kwd = dict()
+        changed = False
+        for label, contents in review_contents_dict.items():
+            if contents:
+                changed = True
+                kwd[ '%s__ESEP__comment' % label ] = contents[ 'comment' ]
+                kwd[ '%s__ESEP__rating' % label ] = contents[ 'rating' ]
+                if 'private' in contents:
+                    kwd[ '%s__ESEP__private' % label ] = contents[ 'private' ]
+                kwd[ '%s__ESEP__approved' % label ] = contents[ 'approved' ]
+            else:
+                kwd[ '%s__ESEP__approved' % label ] = 'not_applicable'
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        self.submit_form( 1, 'Workflows__ESEP__review_button', **kwd )
+        if changed:
+            strings_displayed.append( 'Reviews were saved' )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def galaxy_login( self, email='test at bx.psu.edu', password='testuser', username='admin-user', redirect='', logout_first=True ):
+        if logout_first:
+            self.galaxy_logout()
+        previously_created, username_taken, invalid_username = \
+            self.create_user_in_galaxy( email=email, password=password, username=username, redirect=redirect )
+        if previously_created:
+            self.visit_galaxy_url( "/user/login?use_panels=False" )
+            self.submit_form( '1', 'login_button', login=email, redirect=redirect, password=password )
+
+    def galaxy_logout( self ):
+        self.visit_galaxy_url( "/user/logout" )
+        self.check_page_for_string( "You have been logged out" )
+        tc.browser.cj.clear()
+
+    def generate_complex_dependency_xml( self, filename, filepath, repository_tuples, package, version ):
+        file_path = os.path.join( filepath, filename )
+        dependency_entries = []
+        template = string.Template( common.new_repository_dependencies_line )
+        for toolshed_url, name, owner, changeset_revision in repository_tuples:
+            dependency_entries.append( template.safe_substitute( toolshed_url=toolshed_url,
+                                                                 owner=owner,
+                                                                 repository_name=name,
+                                                                 changeset_revision=changeset_revision,
+                                                                 prior_installation_required='' ) )
+        if not os.path.exists( filepath ):
+            os.makedirs( filepath )
+        dependency_template = string.Template( common.complex_repository_dependency_template )
+        repository_dependency_xml = dependency_template.safe_substitute( package=package, version=version, dependency_lines='\n'.join( dependency_entries ) )
+        # Save the generated xml to the specified location.
+        open( file_path, 'w' ).write( repository_dependency_xml )
+
+    def generate_simple_dependency_xml( self,
+                                        repository_tuples,
+                                        filename,
+                                        filepath,
+                                        dependency_description='',
+                                        complex=False,
+                                        package=None,
+                                        version=None,
+                                        prior_installation_required=False ):
+        if not os.path.exists( filepath ):
+            os.makedirs( filepath )
+        dependency_entries = []
+        if prior_installation_required:
+            prior_installation_value = ' prior_installation_required="True"'
+        else:
+            prior_installation_value = ''
+        for toolshed_url, name, owner, changeset_revision in repository_tuples:
+            template = string.Template( common.new_repository_dependencies_line )
+            dependency_entries.append( template.safe_substitute( toolshed_url=toolshed_url,
+                                                                 owner=owner,
+                                                                 repository_name=name,
+                                                                 changeset_revision=changeset_revision,
+                                                                 prior_installation_required=prior_installation_value ) )
+        if dependency_description:
+            description = ' description="%s"' % dependency_description
+        else:
+            description = dependency_description
+        template_parser = string.Template( common.new_repository_dependencies_xml )
+        repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
+        # Save the generated xml to the specified location.
+        full_path = os.path.join( filepath, filename )
+        open( full_path, 'w' ).write( repository_dependency_xml )
+
+    def generate_temp_path( self, test_script_path, additional_paths=[] ):
+        temp_path = os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+        if not os.path.exists( temp_path ):
+            os.makedirs( temp_path )
+        return temp_path
+
+    def get_datatypes_count( self ):
+        url = '/api/datatypes?upload_only=false'
+        self.visit_galaxy_url( url )
+        html = self.last_page()
+        datatypes = loads( html )
+        return len( datatypes )
+
+    def get_env_sh_path( self, tool_dependency_name, tool_dependency_version, repository ):
+        '''Return the absolute path to an installed repository's env.sh file.'''
+        env_sh_path = os.path.join( self.get_tool_dependency_path( tool_dependency_name, tool_dependency_version, repository ),
+                                    'env.sh' )
+        return env_sh_path
+
+    def get_filename( self, filename, filepath=None ):
+        if filepath is not None:
+            return os.path.abspath( os.path.join( filepath, filename ) )
+        else:
+            return os.path.abspath( os.path.join( self.file_dir, filename ) )
+
+    def get_hg_repo( self, path ):
+        return hg.repository( ui.ui(), path )
+
+    def get_last_reviewed_revision_by_user( self, user, repository ):
+        changelog_tuples = self.get_repository_changelog_tuples( repository )
+        reviews = test_db_util.get_reviews_ordered_by_changeset_revision( repository.id, changelog_tuples, reviewer_user_id=user.id )
+        if reviews:
+            last_review = reviews[ -1 ]
+        else:
+            last_review = None
+        return last_review
+
+    def get_repositories_category_api( self, categories, strings_displayed=None, strings_not_displayed=None ):
+        for category in categories:
+            url = '/api/categories/%s/repositories' % self.security.encode_id( category.id )
+            self.visit_url( url )
+            self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def get_tool_dependency_path( self, tool_dependency_name, tool_dependency_version, repository ):
+        '''Return the absolute path for an installed tool dependency.'''
+        return os.path.join( self.galaxy_tool_dependency_dir,
+                             tool_dependency_name,
+                             tool_dependency_version,
+                             repository.owner,
+                             repository.name,
+                             repository.installed_changeset_revision )
+
+    def get_or_create_repository( self, owner=None, strings_displayed=None, strings_not_displayed=None, **kwd ):
+        # If not checking for a specific string, it should be safe to assume that
+        # we expect repository creation to be successful.
+        if strings_displayed is None:
+            strings_displayed = [ 'Repository', kwd[ 'name' ], 'has been created' ]
+        if strings_not_displayed is None:
+            strings_not_displayed = []
+        repository = test_db_util.get_repository_by_name_and_owner( kwd[ 'name' ], owner )
+        if repository is None:
+            self.visit_url( '/repository/create_repository' )
+            self.submit_form( 1, 'create_repository_button', **kwd )
+            self.check_for_strings( strings_displayed, strings_not_displayed )
+            repository = test_db_util.get_repository_by_name_and_owner( kwd[ 'name' ], owner )
+        return repository
+
+    def get_repo_path( self, repository ):
+        # An entry in the hgweb.config file looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
+        lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
+        try:
+            return self.hgweb_config_manager.get_entry( lhs )
+        except:
+            raise Exception( "Entry for repository %s missing in hgweb config file %s." % ( lhs, self.hgweb_config_manager.hgweb_config ) )
+
+    def get_repository_changelog_tuples( self, repository ):
+        repo = self.get_hg_repo( self.get_repo_path( repository ) )
+        changelog_tuples = []
+        for changeset in repo.changelog:
+            ctx = repo.changectx( changeset )
+            changelog_tuples.append( ( ctx.rev(), repo.changectx( changeset ) ) )
+        return changelog_tuples
+
+    def get_repository_datatypes_count( self, repository ):
+        metadata = self.get_repository_metadata( repository )[0].metadata
+        if 'datatypes' not in metadata:
+            return 0
+        else:
+            return len( metadata[ 'datatypes' ] )
+
+    def get_repository_file_list( self, repository, base_path, current_path=None ):
+        '''Recursively load repository folder contents and append them to a list. Similar to os.walk but via /repository/open_folder.'''
+        if current_path is None:
+            request_param_path = base_path
+        else:
+            request_param_path = os.path.join( base_path, current_path )
+        # Get the current folder's contents.
+        params = dict( folder_path=request_param_path, repository_id=self.security.encode_id( repository.id ) )
+        url = '/repository/open_folder'
+        self.visit_url( url, params=params )
+        file_list = loads( self.last_page() )
+        returned_file_list = []
+        if current_path is not None:
+            returned_file_list.append( current_path )
+        # Loop through the json dict returned by /repository/open_folder.
+        for file_dict in file_list:
+            if file_dict[ 'isFolder' ]:
+                # This is a folder. Get the contents of the folder and append it to the list,
+                # prefixed with the path relative to the repository root, if any.
+                if current_path is None:
+                    returned_file_list.extend( self.get_repository_file_list( repository=repository, base_path=base_path, current_path=file_dict[ 'title' ] ) )
+                else:
+                    sub_path = os.path.join( current_path, file_dict[ 'title' ] )
+                    returned_file_list.extend( self.get_repository_file_list( repository=repository, base_path=base_path, current_path=sub_path ) )
+            else:
+                # This is a regular file, prefix the filename with the current path and append it to the list.
+                if current_path is not None:
+                    returned_file_list.append( os.path.join( current_path, file_dict[ 'title' ] ) )
+                else:
+                    returned_file_list.append( file_dict[ 'title' ] )
+        return returned_file_list
+
+    def get_repository_metadata( self, repository ):
+        return [ metadata_revision for metadata_revision in repository.metadata_revisions ]
+
+    def get_repository_metadata_by_changeset_revision( self, repository, changeset_revision ):
+        return test_db_util.get_repository_metadata_for_changeset_revision( repository.id, changeset_revision )
+
+    def get_repository_metadata_revisions( self, repository ):
+        return [ str( repository_metadata.changeset_revision ) for repository_metadata in repository.metadata_revisions ]
+
+    def get_repository_tip( self, repository ):
+        repo = self.get_hg_repo( self.get_repo_path( repository ) )
+        return str( repo.changectx( repo.changelog.tip() ) )
+
+    def get_sniffers_count( self ):
+        url = '/api/datatypes/sniffers'
+        self.visit_galaxy_url( url )
+        html = self.last_page()
+        sniffers = loads( html )
+        return len( sniffers )
+
+    def get_tools_from_repository_metadata( self, repository, include_invalid=False ):
+        '''Get a list of valid and (optionally) invalid tool dicts from the repository metadata.'''
+        valid_tools = []
+        invalid_tools = []
+        for repository_metadata in repository.metadata_revisions:
+            if 'tools' in repository_metadata.metadata:
+                valid_tools.append( dict( tools=repository_metadata.metadata[ 'tools' ], changeset_revision=repository_metadata.changeset_revision ) )
+            if include_invalid and 'invalid_tools' in repository_metadata.metadata:
+                invalid_tools.append( dict( tools=repository_metadata.metadata[ 'invalid_tools' ], changeset_revision=repository_metadata.changeset_revision ) )
+        return valid_tools, invalid_tools
+
+    def get_tool_panel_section_from_api( self, metadata ):
+        tool_metadata = metadata[ 'tools' ]
+        tool_guid = quote_plus( tool_metadata[ 0 ][ 'guid' ], safe='' )
+        api_url = '/%s' % '/'.join( [ 'api', 'tools', tool_guid ] )
+        self.visit_galaxy_url( api_url )
+        tool_dict = loads( self.last_page() )
+        tool_panel_section = tool_dict[ 'panel_section_name' ]
+        return tool_panel_section
+
+    def get_tool_panel_section_from_repository_metadata( self, metadata ):
+        tool_metadata = metadata[ 'tools' ]
+        tool_guid = tool_metadata[ 0 ][ 'guid' ]
+        assert 'tool_panel_section' in metadata, 'Tool panel section not found in metadata: %s' % metadata
+        tool_panel_section_metadata = metadata[ 'tool_panel_section' ]
+        # tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
+        #                           id=section_id,
+        #                           name=section_name,
+        #                           version=section_version )
+        # This dict is appended to tool_panel_section_metadata[ tool_guid ]
+        tool_panel_section = tool_panel_section_metadata[ tool_guid ][ 0 ][ 'name' ]
+        return tool_panel_section
+
+    def grant_role_to_user( self, user, role ):
+        strings_displayed = [ self.security.encode_id( role.id ), role.name ]
+        strings_not_displayed = []
+        self.visit_url( '/admin/roles' )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        params = dict( operation='manage users and groups', id=self.security.encode_id( role.id ) )
+        url = '/admin/roles'
+        self.visit_url( url, params )
+        strings_displayed = [ common.test_user_1_email, common.test_user_2_email ]
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        # As elsewhere, twill limits the possibility of submitting the form, this time due to not executing the javascript
+        # attached to the role selection form. Visit the action url directly with the necessary parameters.
+        params = dict( id=self.security.encode_id( role.id ),
+                       in_users=user.id,
+                       operation='manage users and groups',
+                       role_members_edit_button='Save' )
+        url = '/admin/manage_users_and_groups_for_role'
+        self.visit_url( url, params )
+        strings_displayed = [ "Role '%s' has been updated" % role.name ]
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def grant_write_access( self,
+                            repository,
+                            usernames=[],
+                            strings_displayed=None,
+                            strings_not_displayed=None,
+                            post_submit_strings_displayed=None,
+                            post_submit_strings_not_displayed=None ):
+        self.display_manage_repository_page( repository )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        for username in usernames:
+            tc.fv( "user_access", "allow_push", '+%s' % username )
+        tc.submit( 'user_access_button' )
+        self.check_for_strings( post_submit_strings_displayed, post_submit_strings_not_displayed )
+
+    def import_capsule( self, filename, strings_displayed=None, strings_not_displayed=None,
+                        strings_displayed_after_submit=[], strings_not_displayed_after_submit=[] ):
+        url = '/repository/upload_capsule'
+        self.visit_url( url )
+        tc.formfile( 'upload_capsule', 'file_data', filename )
+        tc.submit( 'upload_capsule_button' )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        self.submit_form( 'import_capsule', 'import_capsule_button' )
+        self.check_for_strings( strings_displayed_after_submit, strings_not_displayed_after_submit )
+
+    def import_workflow( self, repository, workflow_name, strings_displayed=None, strings_not_displayed=None ):
+        if strings_displayed is None:
+            strings_displayed = []
+        if strings_not_displayed is None:
+            strings_not_displayed = []
+        url = '/admin_toolshed/import_workflow?repository_id=%s&workflow_name=%s' % \
+            ( self.security.encode_id( repository.id ), tool_shed_encode( workflow_name ) )
+        self.visit_galaxy_url( url )
+        if workflow_name not in strings_displayed:
+            strings_displayed.append( workflow_name )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def initiate_installation_process( self,
+                                       install_tool_dependencies=False,
+                                       install_repository_dependencies=True,
+                                       no_changes=True,
+                                       new_tool_panel_section_label=None ):
+        html = self.last_page()
+        # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
+        # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
+        # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
+        # admin_toolshed controller.
+        install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
+        if install_parameters:
+            iri_ids = install_parameters.group(1)
+            # In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']"
+            # This regex ensures that non-hex characters are stripped out of the list, so that galaxy.util.listify/decode_id
+            # will handle them correctly. It's safe to pass the cleaned list to manage_repositories, because it can parse
+            # comma-separated values.
+            repository_ids = str( iri_ids )
+            repository_ids = re.sub( '[^a-fA-F0-9,]+', '', repository_ids )
+            encoded_kwd = install_parameters.group(2)
+            reinstalling = install_parameters.group(3)
+            url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
+                ( ','.join( galaxy.util.listify( repository_ids ) ), encoded_kwd, reinstalling )
+            self.visit_galaxy_url( url )
+            return galaxy.util.listify( repository_ids )
+
+    def install_repositories_from_search_results( self, repositories, install_tool_dependencies=False,
+                                                  strings_displayed=None, strings_not_displayed=None, **kwd ):
+        '''
+        Normally, it would be possible to check the appropriate boxes in the search results, and click the install button. This works
+        in a browser, but Twill manages to lose the 'toolshedgalaxyurl' cookie between one page and the next, so it's necessary to work
+        around this by explicitly visiting the prepare_for_install method on the Galaxy side.
+        '''
+        url = '/admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+            ( self.url, ','.join( self.security.encode_id( repository.id ) for repository in repositories ),
+              ','.join( self.get_repository_tip( repository ) for repository in repositories ) )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        if 'install_tool_dependencies' in self.last_page():
+            form = tc.browser.get_form( 'select_tool_panel_section' )
+            checkbox = form.find_control( id="install_tool_dependencies" )
+            checkbox.disabled = False
+            if install_tool_dependencies:
+                checkbox.selected = True
+                kwd[ 'install_tool_dependencies' ] = 'True'
+            else:
+                checkbox.selected = False
+                kwd[ 'install_tool_dependencies' ] = 'False'
+        self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
+        repository_ids = self.initiate_installation_process()
+        self.wait_for_repository_installation( repository_ids )
+
+    def install_repository( self, name, owner, category_name, install_resolver_dependencies=False, install_tool_dependencies=False,
+                            install_repository_dependencies=True, changeset_revision=None,
+                            strings_displayed=None, strings_not_displayed=None, preview_strings_displayed=None,
+                            post_submit_strings_displayed=None, new_tool_panel_section_label=None, includes_tools_for_display_in_tool_panel=True,
+                            **kwd ):
+        self.browse_tool_shed( url=self.url )
+        self.browse_category( test_db_util.get_category_by_name( category_name ) )
+        self.preview_repository_in_tool_shed( name, owner, strings_displayed=preview_strings_displayed )
+        repository = test_db_util.get_repository_by_name_and_owner( name, owner )
+        repository_id = self.security.encode_id( repository.id )
+        if changeset_revision is None:
+            changeset_revision = self.get_repository_tip( repository )
+        url = '/repository/install_repositories_by_revision?changeset_revisions=%s&repository_ids=%s&galaxy_url=%s' % \
+              ( changeset_revision, repository_id, self.galaxy_url )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        # This section is tricky, due to the way twill handles form submission. The tool dependency checkbox needs to
+        # be hacked in through tc.browser, putting the form field in kwd doesn't work.
+        form = tc.browser.get_form( 'select_tool_panel_section' )
+        if form is None:
+            form = tc.browser.get_form( 'select_shed_tool_panel_config' )
+        assert form is not None, 'Could not find form select_shed_tool_panel_config or select_tool_panel_section.'
+        kwd = self.set_form_value( form, kwd, 'install_tool_dependencies', install_tool_dependencies )
+        kwd = self.set_form_value( form, kwd, 'install_repository_dependencies', install_repository_dependencies )
+        kwd = self.set_form_value( form, kwd, 'install_resolver_dependencies', install_resolver_dependencies )
+        kwd = self.set_form_value( form, kwd, 'shed_tool_conf', self.shed_tool_conf )
+        if new_tool_panel_section_label is not None:
+            kwd = self.set_form_value( form, kwd, 'new_tool_panel_section_label', new_tool_panel_section_label )
+        submit_button_control = form.find_control( type='submit' )
+        assert submit_button_control is not None, 'No submit button found for form %s.' % form.attrs.get( 'id' )
+        self.submit_form( form.attrs.get( 'id' ), str( submit_button_control.name ), **kwd )
+        self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
+        repository_ids = self.initiate_installation_process( new_tool_panel_section_label=new_tool_panel_section_label )
+        log.debug( 'Waiting for the installation of repository IDs: %s' % str( repository_ids ) )
+        self.wait_for_repository_installation( repository_ids )
+
+    def load_citable_url( self,
+                          username,
+                          repository_name,
+                          changeset_revision,
+                          encoded_user_id,
+                          encoded_repository_id,
+                          strings_displayed=None,
+                          strings_not_displayed=None,
+                          strings_displayed_in_iframe=[],
+                          strings_not_displayed_in_iframe=[] ):
+        url = '%s/view/%s' % ( self.url, username )
+        # If repository name is passed in, append that to the url.
+        if repository_name:
+            url += '/%s' % repository_name
+        if changeset_revision:
+            # Changeset revision should never be provided unless repository name also is.
+            assert repository_name is not None, 'Changeset revision is present, but repository name is not - aborting.'
+            url += '/%s' % changeset_revision
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        # Now load the page that should be displayed inside the iframe and check for strings.
+        if encoded_repository_id:
+            url = '/repository/view_repository?id=%s&operation=view_or_manage_repository' % encoded_repository_id
+            if changeset_revision:
+                url += '&changeset_revision=%s' % changeset_revision
+            self.visit_url( url )
+            self.check_for_strings( strings_displayed_in_iframe, strings_not_displayed_in_iframe )
+        elif encoded_user_id:
+            url = '/repository/browse_repositories?user_id=%s&operation=repositories_by_user' % encoded_user_id
+            self.visit_url( url )
+            self.check_for_strings( strings_displayed_in_iframe, strings_not_displayed_in_iframe )
+
+    def load_changeset_in_tool_shed( self, repository_id, changeset_revision, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/view_changeset?ctx_str=%s&id=%s' % ( changeset_revision, repository_id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def load_checkable_revisions( self, strings_displayed=None, strings_not_displayed=None ):
+        params = urlencode( dict( do_not_test='false',
+                                  downloadable='true',
+                                  includes_tools='true',
+                                  malicious='false',
+                                  missing_test_components='false',
+                                  skip_tool_test='false' ) )
+        api_url = '%s?%s' % ( '/'.join( [ self.url, 'api', 'repository_revisions' ] ), params )
+        self.visit_url( api_url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def load_display_tool_page( self, repository, tool_xml_path, changeset_revision, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/display_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
+              ( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def load_galaxy_tool_migrations_page( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/admin/review_tool_migration_stages'
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
+              ( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def load_page_for_installed_tool( self, tool_guid, strings_displayed=None, strings_not_displayed=None ):
+        url = '/tool_runner?tool_id=%s' % tool_guid
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def load_workflow_image_in_tool_shed( self, repository, workflow_name, changeset_revision=None, strings_displayed=None, strings_not_displayed=None ):
+        if not changeset_revision:
+            changeset_revision = self.get_repository_tip( repository )
+        metadata = self.get_repository_metadata_by_changeset_revision( repository, changeset_revision )
+        if not metadata:
+            raise AssertionError( 'Metadata not found for changeset revision %s.' % changeset_revision )
+        url = '/repository/generate_workflow_image?repository_metadata_id=%s&workflow_name=%s' % \
+              ( self.security.encode_id( metadata.id ), tool_shed_encode( workflow_name ) )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def manage_review_components( self, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository_review/manage_components'
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=None, strings_not_displayed=None ):
+        repository = test_db_util.get_repository_by_name_and_owner( name, owner )
+        if not changeset_revision:
+            changeset_revision = self.get_repository_tip( repository )
+        self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' %
+                        ( self.security.encode_id( repository.id ), changeset_revision ) )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def preview_workflow_in_tool_shed( self, repository_name, owner, workflow_name, strings_displayed=None, strings_not_displayed=None ):
+        repository = test_db_util.get_repository_by_name_and_owner( repository_name, owner )
+        metadata = self.get_repository_metadata( repository )
+        url = '/repository/view_workflow?workflow_name=%s&repository_metadata_id=%s' % \
+              ( tool_shed_encode( workflow_name ), self.security.encode_id( metadata[0].id ) )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def reactivate_repository( self, installed_repository ):
+        params = dict( operation='activate or reinstall', id=self.security.encode_id( installed_repository.id ) )
+        url = '/admin_toolshed/browse_repositories'
+        self.visit_galaxy_url( url, params )
+        strings_displayed = [ installed_repository.name, 'repository has been activated' ]
+        self.check_for_strings( strings_displayed, [] )
+
+    def reinstall_repository( self,
+                              installed_repository,
+                              install_repository_dependencies=True,
+                              install_tool_dependencies=False,
+                              no_changes=True,
+                              new_tool_panel_section_label='',
+                              strings_displayed=None,
+                              strings_not_displayed=None ):
+        url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed=None )
+        # Build the url that will simulate a filled-out form being submitted. Due to a limitation in twill, the reselect_tool_panel_section
+        # form doesn't get parsed correctly.
+        encoded_repository_id = self.security.encode_id( installed_repository.id )
+        params = dict( id=encoded_repository_id, no_changes=no_changes, new_tool_panel_section_label=new_tool_panel_section_label )
+        doseq = False
+        if install_repository_dependencies:
+            params[ 'install_repository_dependencies' ] = [ 'True', 'True' ]
+            doseq = True
+        else:
+            params[ 'install_repository_dependencies' ] = False
+        if install_tool_dependencies:
+            params[ 'install_tool_dependencies' ] = [ 'True', 'True' ]
+            doseq = True
+        else:
+            params[ 'install_tool_dependencies' ] = False
+        url = '/admin_toolshed/reinstall_repository'
+        self.visit_galaxy_url( url, params=params, doseq=doseq )
+        # Manually initiate the install process, as with installing a repository. See comments in the
+        # initiate_installation_process method for details.
+        repository_ids = self.initiate_installation_process( install_tool_dependencies,
+                                                             install_repository_dependencies,
+                                                             no_changes,
+                                                             new_tool_panel_section_label )
+        # Finally, wait until all repositories are in a final state (either Error or Installed) before returning.
+        self.wait_for_repository_installation( repository_ids )
+
+    def repository_is_new( self, repository ):
+        repo = self.get_hg_repo( self.get_repo_path( repository ) )
+        tip_ctx = repo.changectx( repo.changelog.tip() )
+        return tip_ctx.rev() < 0
+
+    def reset_installed_repository_metadata( self, repository ):
+        url = '/admin_toolshed/reset_repository_metadata?id=%s' % self.security.encode_id( repository.id )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( [ 'Metadata has been reset' ] )
+
+    def reset_metadata_on_selected_repositories( self, repository_ids ):
+        self.visit_url( '/admin/reset_metadata_on_selected_repositories_in_tool_shed' )
+        kwd = dict( repository_ids=repository_ids )
+        self.submit_form( form_no=1, button="reset_metadata_on_selected_repositories_button", **kwd )
+
+    def reset_metadata_on_selected_installed_repositories( self, repository_ids ):
+        self.visit_galaxy_url( '/admin_toolshed/reset_metadata_on_selected_installed_repositories' )
+        kwd = dict( repository_ids=repository_ids )
+        self.submit_form( form_no=1, button="reset_metadata_on_selected_repositories_button", **kwd )
+
+    def reset_repository_metadata( self, repository ):
+        url = '/repository/reset_all_metadata?id=%s' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        self.check_for_strings( [ 'All repository metadata has been reset.' ] )
+
+    def repair_installed_repository( self, repository ):
+        repository_id = self.security.encode_id( repository.id )
+        url = '/admin_toolshed/repair_repository?id=%s' % repository_id
+        self.visit_galaxy_url( url )
+        self.submit_form( 'repair_repository', 'repair_repository_button' )
+
+    def review_repository( self, repository, review_contents_dict, user=None, changeset_revision=None ):
+        strings_displayed = []
+        strings_not_displayed = []
+        if not changeset_revision:
+            changeset_revision = self.get_repository_tip( repository )
+        if user:
+            review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
+        url = '/repository_review/edit_review?id=%s' % self.security.encode_id( review.id )
+        self.visit_url( url )
+        self.fill_review_form( review_contents_dict, strings_displayed, strings_not_displayed )
+
+    def revoke_write_access( self, repository, username ):
+        url = '/repository/manage_repository?user_access_button=Remove&id=%s&remove_auth=%s' % \
+            ( self.security.encode_id( repository.id ), username )
+        self.visit_url( url )
+
+    def search_for_valid_tools( self, search_fields={}, exact_matches=False, strings_displayed=None, strings_not_displayed=None, from_galaxy=False ):
+        if from_galaxy:
+            galaxy_url = '?galaxy_url=%s' % self.galaxy_url
+        else:
+            galaxy_url = ''
+        for field_name, search_string in search_fields.items():
+            url = '/repository/find_tools%s' % galaxy_url
+            self.visit_url( url )
+            tc.fv( "1", "exact_matches", exact_matches )
+            tc.fv( "1", field_name, search_string )
+            tc.submit()
+            self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def send_message_to_repository_owner( self,
+                                          repository,
+                                          message,
+                                          strings_displayed=None,
+                                          strings_not_displayed=None,
+                                          post_submit_strings_displayed=None,
+                                          post_submit_strings_not_displayed=None ):
+        url = '/repository/contact_owner?id=%s' % self.security.encode_id( repository.id )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        tc.fv( 1, 'message', message )
+        tc.submit()
+        self.check_for_strings( post_submit_strings_displayed, post_submit_strings_not_displayed )
+
+    def set_form_value( self, form, kwd, field_name, field_value ):
+        '''
+        Set the form field field_name to field_value if it exists, and return the provided dict containing that value. If
+        the field does not exist in the provided form, return a dict without that index.
+        '''
+        form_id = form.attrs.get( 'id' )
+        controls = [ control for control in form.controls if str( control.name ) == field_name ]
+        if len( controls ) > 0:
+            log.debug( 'Setting field %s of form %s to %s.' % ( field_name, form_id, str( field_value ) ) )
+            tc.formvalue( form_id, field_name, str( field_value ) )
+            kwd[ field_name ] = str( field_value )
+        else:
+            if field_name in kwd:
+                log.debug( 'No field %s in form %s, discarding from return value.', field_name, form_id )
+                del( kwd[ field_name ] )
+        return kwd
+
+    def set_repository_deprecated( self, repository, set_deprecated=True, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/deprecate?id=%s&mark_deprecated=%s' % ( self.security.encode_id( repository.id ), set_deprecated )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def set_repository_malicious( self, repository, set_malicious=True, strings_displayed=None, strings_not_displayed=None ):
+        self.display_manage_repository_page( repository )
+        tc.fv( "malicious", "malicious", set_malicious )
+        tc.submit( "malicious_button" )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def set_skip_tool_tsts_flag( self, repository, flag_value, reason, changeset_revision=None ):
+        if changeset_revision is None:
+            changeset_revision = self.get_repository_tip( repository )
+        self.display_manage_repository_page( repository, changeset_revision=changeset_revision )
+        form = tc.browser.get_form( 'skip_tool_tests' )
+        assert form is not None, 'Could not find form skip_tool_tests.'
+        for control in form.controls:
+            control_name = str( control.name )
+            if control_name == 'skip_tool_tests' and control.type == 'checkbox':
+                checkbox = control.get()
+                checkbox.selected = flag_value
+            elif control_name == 'skip_tool_tests_comment':
+                tc.browser.clicked( form, control )
+                tc.formvalue( 'skip_tool_tests', control_name, reason )
+        kwd = dict()
+        self.submit_form( 'skip_tool_tests', 'skip_tool_tests_button', **kwd )
+        if flag_value is True:
+            self.check_for_strings( strings_displayed=[ 'Tools in this revision will not be tested by the automated test framework' ] )
+        else:
+            self.check_for_strings( strings_displayed=[ 'Tools in this revision will be tested by the automated test framework' ] )
+
+    def tip_has_metadata( self, repository ):
+        tip = self.get_repository_tip( repository )
+        return test_db_util.get_repository_metadata_by_repository_id_changeset_revision( repository.id, tip )
+
+    def undelete_repository( self, repository ):
+        repository_id = self.security.encode_id( repository.id )
+        url = '/admin/browse_repositories?operation=Undelete&id=%s' % repository_id
+        strings_displayed = [ 'Undeleted 1 repository', repository.name ]
+        strings_not_displayed = []
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def uninstall_repository( self, installed_repository, strings_displayed=None, strings_not_displayed=None ):
+        url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        form = tc.browser.get_form( 'deactivate_or_uninstall_repository' )
+        self.set_form_value( form, {}, 'remove_from_disk', True )
+        tc.submit( 'deactivate_or_uninstall_repository_button' )
+        strings_displayed = [ 'The repository named', 'has been uninstalled' ]
+        self.check_for_strings( strings_displayed, strings_not_displayed=None )
+
+    def update_installed_repository( self, installed_repository, strings_displayed=None, strings_not_displayed=None ):
+        url = '/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&galaxy_url=%s' % ( installed_repository.name,
+            installed_repository.owner, installed_repository.installed_changeset_revision, self.galaxy_url )
+        self.visit_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def update_tool_shed_status( self ):
+        url = '/admin_toolshed/update_tool_shed_status_for_installed_repository?all_installed_repositories=True'
+        self.visit_galaxy_url( url )
+
+    def upload_file( self,
+                     repository,
+                     filename,
+                     filepath,
+                     valid_tools_only,
+                     uncompress_file,
+                     remove_repo_files_not_in_tar,
+                     commit_message,
+                     strings_displayed=None,
+                     strings_not_displayed=None ):
+        if strings_displayed is None:
+            strings_displayed = []
+        if strings_not_displayed is None:
+            strings_not_displayed = []
+        removed_message = 'files were removed from the repository'
+        if remove_repo_files_not_in_tar:
+            if not self.repository_is_new( repository ):
+                if removed_message not in strings_displayed:
+                    strings_displayed.append( removed_message )
+        else:
+            if removed_message not in strings_not_displayed:
+                strings_not_displayed.append( removed_message )
+        self.visit_url( '/upload/upload?repository_id=%s' % self.security.encode_id( repository.id ) )
+        if valid_tools_only:
+            strings_displayed.extend( [ 'has been successfully', 'uploaded to the repository.' ] )
+        tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
+        if uncompress_file:
+            tc.fv( 1, 'uncompress_file', 'Yes' )
+        else:
+            tc.fv( 1, 'uncompress_file', 'No' )
+        if not self.repository_is_new( repository ):
+            if remove_repo_files_not_in_tar:
+                tc.fv( 1, 'remove_repo_files_not_in_tar', 'Yes' )
+            else:
+                tc.fv( 1, 'remove_repo_files_not_in_tar', 'No' )
+        tc.fv( 1, 'commit_message', commit_message )
+        tc.submit( "upload_button" )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+        # Uncomment this if it becomes necessary to wait for an asynchronous process to complete after submitting an upload.
+        # for i in range( 5 ):
+        #    try:
+        #        self.check_for_strings( strings_displayed, strings_not_displayed )
+        #        break
+        #    except Exception as e:
+        #        if i == 4:
+        #            raise e
+        #        else:
+        #            time.sleep( 1 )
+        #            continue
+
+    def upload_url( self,
+                    repository,
+                    url,
+                    filepath,
+                    valid_tools_only,
+                    uncompress_file,
+                    remove_repo_files_not_in_tar,
+                    commit_message,
+                    strings_displayed=None,
+                    strings_not_displayed=None ):
+        removed_message = 'files were removed from the repository'
+        if remove_repo_files_not_in_tar:
+            if not self.repository_is_new( repository ):
+                if removed_message not in strings_displayed:
+                    strings_displayed.append( removed_message )
+        else:
+            if removed_message not in strings_not_displayed:
+                strings_not_displayed.append( removed_message )
+        self.visit_url( '/upload/upload?repository_id=%s' % self.security.encode_id( repository.id ) )
+        if valid_tools_only:
+            strings_displayed.extend( [ 'has been successfully', 'uploaded to the repository.' ] )
+        tc.fv( "1", "url", url )
+        if uncompress_file:
+            tc.fv( 1, 'uncompress_file', 'Yes' )
+        else:
+            tc.fv( 1, 'uncompress_file', 'No' )
+        if not self.repository_is_new( repository ):
+            if remove_repo_files_not_in_tar:
+                tc.fv( 1, 'remove_repo_files_not_in_tar', 'Yes' )
+            else:
+                tc.fv( 1, 'remove_repo_files_not_in_tar', 'No' )
+        tc.fv( 1, 'commit_message', commit_message )
+        tc.submit( "upload_button" )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def verify_capsule_contents( self, capsule_filepath, owner ):
+        tar_object = tarfile.open( capsule_filepath, 'r:*' )
+        extraction_path = tempfile.mkdtemp()
+        tar_object.extractall( extraction_path )
+        for root, dirs, files in os.walk( extraction_path ):
+            if 'manifest.xml' in files:
+                self.check_manifest( os.path.join( root, 'manifest.xml' ), owner=owner )
+        shutil.rmtree( extraction_path )
+
+    def verify_installed_repositories( self, installed_repositories=[], uninstalled_repositories=[] ):
+        for repository_name, repository_owner in installed_repositories:
+            galaxy_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, repository_owner )
+            if galaxy_repository:
+                assert galaxy_repository.status == 'Installed', \
+                    'Repository %s should be installed, but is %s' % ( repository_name, galaxy_repository.status )
+
+    def verify_installed_repository_metadata_unchanged( self, name, owner ):
+        installed_repository = test_db_util.get_installed_repository_by_name_owner( name, owner )
+        metadata = installed_repository.metadata
+        self.reset_installed_repository_metadata( installed_repository )
+        new_metadata = installed_repository.metadata
+        assert metadata == new_metadata, 'Metadata for installed repository %s differs after metadata reset.' % name
+
+    def verify_installed_repository_no_tool_panel_section( self, repository ):
+        '''Verify that there is no 'tool_panel_section' entry in the repository metadata.'''
+        metadata = repository.metadata
+        assert 'tool_panel_section' not in metadata, 'Tool panel section incorrectly found in metadata: %s' % metadata
+
+    def verify_installed_repository_data_table_entries( self, required_data_table_entries ):
+        # The value of the received required_data_table_entries will be something like: [ 'sam_fa_indexes' ]
+        data_tables, error_message = xml_util.parse_xml( self.shed_tool_data_table_conf )
+        found = False
+        # With the tool shed, the "path" attribute that is hard-coded into the tool_data_tble_conf.xml
+        # file is ignored.  This is because the tool shed requires the directory location to which this
+        # path points to be empty except when a specific tool is loaded.  The default location for this
+        # directory configured for the tool shed is <Galaxy root>/shed-tool-data.  When a tool is loaded
+        # in the tool shed, all contained .loc.sample files are copied to this directory and the
+        # ToolDataTableManager parses and loads the files in the same way that Galaxy does with a very
+        # important exception.  When the tool shed loads a tool and parses and loads the copied ,loc.sample
+        # files, the ToolDataTableManager is already instantiated, and so its add_new_entries_from_config_file()
+        # method is called and the tool_data_path parameter is used to over-ride the hard-coded "tool-data"
+        # directory that Galaxy always uses.
+        #
+        # Tool data table xml structure:
+        # <tables>
+        #     <table comment_char="#" name="sam_fa_indexes">
+        #        <columns>line_type, value, path</columns>
+        #        <file path="tool-data/sam_fa_indices.loc" />
+        #     </table>
+        # </tables>
+        required_data_table_entry = None
+        for table_elem in data_tables.findall( 'table' ):
+            # The value of table_elem will be something like: <table comment_char="#" name="sam_fa_indexes">
+            for required_data_table_entry in required_data_table_entries:
+                # The value of required_data_table_entry will be something like: 'sam_fa_indexes'
+                if 'name' in table_elem.attrib and table_elem.attrib[ 'name' ] == required_data_table_entry:
+                    found = True
+                    # We're processing something like: sam_fa_indexes
+                    file_elem = table_elem.find( 'file' )
+                    # We have something like: <file path="tool-data/sam_fa_indices.loc" />
+                    # The "path" attribute of the "file" tag is the location that Galaxy always uses because the
+                    # Galaxy ToolDataTableManager was implemented in such a way that the hard-coded path is used
+                    # rather than allowing the location to be a configurable setting like the tool shed requires.
+                    file_path = file_elem.get( 'path', None )
+                    # The value of file_path will be something like: "tool-data/all_fasta.loc"
+                    assert file_path is not None, 'The "path" attribute is missing for the %s entry.' % required_data_table_entry
+                    # The following test is probably not necesary, but the tool-data directory should exist!
+                    galaxy_tool_data_dir, loc_file_name = os.path.split( file_path )
+                    assert galaxy_tool_data_dir is not None, 'The hard-coded Galaxy tool-data directory is missing for the %s entry.' % required_data_table_entry
+                    assert os.path.exists( galaxy_tool_data_dir ), 'The Galaxy tool-data directory does not exist.'
+                    # Make sure the loc_file_name was correctly copied into the configured directory location.
+                    configured_file_location = os.path.join( self.tool_data_path, loc_file_name )
+                    assert os.path.isfile( configured_file_location ), 'The expected copied file "%s" is missing.' % configured_file_location
+                    # We've found the value of the required_data_table_entry in data_tables, which is the parsed
+                    # shed_tool_data_table_conf.xml, so all is well!
+                    break
+            if found:
+                break
+        # We better have an entry like: <table comment_char="#" name="sam_fa_indexes"> in our parsed data_tables
+        # or we know that the repository was not correctly installed!
+        assert found, 'No entry for %s in %s.' % ( required_data_table_entry, self.shed_tool_data_table_conf )
+
+    def verify_repository_in_capsule( self, repository_archive, repository_name, repository_owner ):
+        repository_extraction_dir = tempfile.mkdtemp()
+        repository_tar_object = tarfile.open( repository_archive, 'r:*' )
+        repository_tar_object.extractall( repository_extraction_dir )
+        for root, dirs, files in os.walk( repository_extraction_dir ):
+            for filename in files:
+                if filename in [ 'tool_dependencies.xml', 'repository_dependencies.xml' ]:
+                    dependency_filepath = os.path.join( root, filename )
+                    self.check_exported_repository_dependency( dependency_filepath, repository_name, repository_owner )
+        shutil.rmtree( repository_extraction_dir )
+
+    def verify_repository_reviews( self, repository, reviewer=None, strings_displayed=None, strings_not_displayed=None ):
+        changeset_revision = self.get_repository_tip( repository )
+        # Verify that the currently logged in user has a repository review for the specified repository, reviewer, and changeset revision.
+        strings_displayed = [ repository.name, reviewer.username ]
+        self.display_reviewed_repositories_owned_by_user( strings_displayed=strings_displayed )
+        # Verify that the reviewer has reviewed the specified repository's changeset revision.
+        strings_displayed = [ repository.name, repository.description ]
+        self.display_repository_reviews_by_user( reviewer, strings_displayed=strings_displayed )
+        # Load the review and check for the components passed in strings_displayed.
+        review = test_db_util.get_repository_review_by_user_id_changeset_revision( reviewer.id, repository.id, changeset_revision )
+        self.browse_component_review( review, strings_displayed=strings_displayed )
+
+    def verify_tool_metadata_for_installed_repository( self, installed_repository, strings_displayed=None, strings_not_displayed=None ):
+        if strings_displayed is None:
+            strings_displayed = []
+        if strings_not_displayed is None:
+            strings_not_displayed = []
+        repository_id = self.security.encode_id( installed_repository.id )
+        for tool in installed_repository.metadata[ 'tools' ]:
+            strings = list( strings_displayed )
+            strings.extend( [ tool[ 'id' ], tool[ 'description' ], tool[ 'version' ], tool[ 'guid' ], tool[ 'name' ] ] )
+            params = dict( repository_id=repository_id, tool_id=tool[ 'id' ] )
+            url = '/admin_toolshed/view_tool_metadata'
+            self.visit_galaxy_url( url, params )
+            self.check_for_strings( strings, strings_not_displayed )
+
+    def verify_unchanged_repository_metadata( self, repository ):
+        old_metadata = dict()
+        new_metadata = dict()
+        for metadata in self.get_repository_metadata( repository ):
+            old_metadata[ metadata.changeset_revision ] = metadata.metadata
+        self.reset_repository_metadata( repository )
+        for metadata in self.get_repository_metadata( repository ):
+            new_metadata[ metadata.changeset_revision ] = metadata.metadata
+        # Python's dict comparison recursively compares sorted key => value pairs and returns true if any key or value differs,
+        # or if the number of keys differs.
+        assert old_metadata == new_metadata, 'Metadata changed after reset on repository %s.' % repository.name
+
+    def view_installed_workflow( self, repository, workflow_name, strings_displayed=None, strings_not_displayed=None ):
+        url = '/admin_toolshed/view_workflow?repository_id=%s&workflow_name=%s' % \
+            ( self.security.encode_id( repository.id ), tool_shed_encode( workflow_name ) )
+        self.visit_galaxy_url( url )
+        self.check_for_strings( strings_displayed, strings_not_displayed )
+
+    def visit_galaxy_url( self, url, params=None, doseq=False ):
+        url = '%s%s' % ( self.galaxy_url, url )
+        self.visit_url( url, params=params, doseq=doseq )
+
+    def wait_for_repository_installation( self, repository_ids ):
+        final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
+                         galaxy_model.ToolShedRepository.installation_status.INSTALLED ]
+        # Wait until all repositories are in a final state before returning. This ensures that subsequent tests
+        # are running against an installed repository, and not one that is still in the process of installing.
+        if repository_ids:
+            for repository_id in repository_ids:
+                galaxy_repository = test_db_util.get_installed_repository_by_id( self.security.decode_id( repository_id ) )
+                timeout_counter = 0
+                while galaxy_repository.status not in final_states:
+                    test_db_util.ga_refresh( galaxy_repository )
+                    timeout_counter = timeout_counter + 1
+                    # This timeout currently defaults to 10 minutes.
+                    if timeout_counter > repository_installation_timeout:
+                        raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' %
+                                              ( timeout_counter, galaxy_repository.status ) )
+                        break
+                    time.sleep( 1 )
diff --git a/test/shed_functional/functional/__init__.py b/test/shed_functional/functional/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/shed_functional/functional/test_0000_basic_repository_features.py b/test/shed_functional/functional/test_0000_basic_repository_features.py
new file mode 100644
index 0000000..94fae6b
--- /dev/null
+++ b/test/shed_functional/functional/test_0000_basic_repository_features.py
@@ -0,0 +1,358 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'filtering_0000'
+repository_description = "Galaxy's filtering tool for test 0000"
+repository_long_description = "Long description of Galaxy's filtering tool for test 0000"
+
+log = logging.getLogger( __name__ )
+
+
+class TestBasicRepositoryFeatures( ShedTwillTestCase ):
+    '''Test core repository features.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_repository_without_categories( self ):
+        '''Verify that a repository cannot be created unless at least one category has been defined.'''
+        strings_displayed = [ 'No categories have been configured in this instance of the Galaxy Tool Shed' ]
+        self.visit_url( '/repository/create_repository' )
+        self.check_for_strings( strings_displayed=strings_displayed, strings_not_displayed=[] )
+
+    def test_0010_create_categories( self ):
+        """Create categories for this test suite"""
+        self.create_category( name='Test 0000 Basic Repository Features 1', description='Test 0000 Basic Repository Features 1' )
+        self.create_category( name='Test 0000 Basic Repository Features 2', description='Test 0000 Basic Repository Features 2' )
+
+    def test_0015_create_repository( self ):
+        """Create the filtering repository"""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
+        strings_displayed = self.expect_repo_created_strings(repository_name)
+        self.get_or_create_repository( name=repository_name,
+                                       description=repository_description,
+                                       long_description=repository_long_description,
+                                       owner=common.test_user_1_name,
+                                       category_id=self.security.encode_id( category.id ),
+                                       strings_displayed=strings_displayed )
+
+    def test_0020_edit_repository( self ):
+        """Edit the repository name, description, and long description"""
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        new_name = "renamed_filtering"
+        new_description = "Edited filtering tool"
+        new_long_description = "Edited long description"
+        self.edit_repository_information( repository, repo_name=new_name, description=new_description, long_description=new_long_description )
+
+    def test_0025_change_repository_category( self ):
+        """Change the categories associated with the filtering repository"""
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.edit_repository_categories( repository,
+                                         categories_to_add=[ "Test 0000 Basic Repository Features 2" ],
+                                         categories_to_remove=[ "Test 0000 Basic Repository Features 1" ] )
+
+    def test_0030_grant_write_access( self ):
+        '''Grant write access to another user'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.grant_write_access( repository, usernames=[ common.test_user_2_name ] )
+        self.revoke_write_access( repository, common.test_user_2_name )
+
+    def test_0035_upload_filtering_1_1_0( self ):
+        """Upload filtering_1.1.0.tar to the repository"""
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message="Uploaded filtering 1.1.0",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0040_verify_repository( self ):
+        '''Display basic repository pages'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        latest_changeset_revision = self.get_repository_tip( repository )
+        self.check_for_valid_tools( repository, strings_displayed=[ 'Filter1' ] )
+        self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=1 )
+        tip = self.get_repository_tip( repository )
+        tool_guid = '%s/repos/user1/filtering_0000/Filter1/1.1.0' % self.url.replace( 'http://', '' ).rstrip( '/' )
+        tool_metadata_strings_displayed = [ tool_guid,
+                                            '1.1.0',  # The tool version.
+                                            'Filter1',  # The tool ID.
+                                            'Filter',  # The tool name.
+                                            'data on any column using simple expressions' ]  # The tool description.
+        tool_page_strings_displayed = [ 'Filter (version 1.1.0)' ]
+        self.check_repository_tools_for_changeset_revision( repository,
+                                                            tip,
+                                                            tool_metadata_strings_displayed=tool_metadata_strings_displayed,
+                                                            tool_page_strings_displayed=tool_page_strings_displayed )
+        self.check_repository_metadata( repository, tip_only=False )
+        self.browse_repository( repository, strings_displayed=[ "Repository '%s' revision" % repository.name, '(repository tip)' ] )
+        self.display_repository_clone_page( common.test_user_1_name,
+                                            repository_name,
+                                            strings_displayed=[ 'Uploaded filtering 1.1.0', latest_changeset_revision ] )
+
+    def test_0045_alter_repository_states( self ):
+        '''Test toggling the malicious and deprecated repository flags.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.set_repository_malicious( repository,
+                                       set_malicious=True,
+                                       strings_displayed=[ 'The repository tip has been defined as malicious.' ] )
+        self.set_repository_malicious( repository,
+                                       set_malicious=False,
+                                       strings_displayed=[ 'The repository tip has been defined as <b>not</b> malicious.' ] )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.set_repository_deprecated( repository,
+                                        strings_displayed=[ 'has been marked as deprecated' ] )
+        strings_displayed = [ 'This repository has been marked as deprecated', 'Mark repository as not deprecated' ]
+        self.display_manage_repository_page( repository,
+                                             strings_displayed=strings_displayed,
+                                             strings_not_displayed=[ 'Upload files', 'Reset all repository metadata' ] )
+        self.browse_repository( repository, strings_not_displayed=[ 'Upload files' ] )
+        self.set_repository_deprecated( repository,
+                                        strings_displayed=[ 'has been marked as not deprecated' ],
+                                        set_deprecated=False )
+        strings_displayed = [ 'Mark repository as deprecated', 'Upload files', 'Reset all repository metadata' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+
+    def test_0050_display_repository_tip_file( self ):
+        '''Display the contents of filtering.xml in the repository tip revision'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.display_repository_file_contents( repository=repository,
+                                               filename='filtering.xml',
+                                               filepath=None,
+                                               strings_displayed=[ '1.1.0' ],
+                                               strings_not_displayed=[] )
+
+    def test_0055_upload_filtering_txt_file( self ):
+        '''Upload filtering.txt file associated with tool version 1.1.0.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_0000.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded filtering.txt",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
+
+    def test_0060_upload_filtering_test_data( self ):
+        '''Upload filtering test data.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_test_data.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded filtering test data",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.display_repository_file_contents( repository=repository,
+                                               filename='1.bed',
+                                               filepath='test-data',
+                                               strings_displayed=[],
+                                               strings_not_displayed=[] )
+        self.check_repository_metadata( repository, tip_only=True )
+
+    def test_0065_upload_filtering_2_2_0( self ):
+        '''Upload filtering version 2.2.0'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_2.2.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded filtering 2.2.0",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0070_verify_filtering_repository( self ):
+        '''Verify the new tool versions and repository metadata.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        tip = self.get_repository_tip( repository )
+        self.check_for_valid_tools( repository )
+        strings_displayed = [ 'Select a revision' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
+        tool_guid = '%s/repos/user1/filtering_0000/Filter1/2.2.0' % self.url.replace( 'http://', '' ).rstrip( '/' )
+        tool_metadata_strings_displayed = [ tool_guid,
+                                            '2.2.0',  # The tool version.
+                                            'Filter1',  # The tool ID.
+                                            'Filter',  # The tool name.
+                                            'data on any column using simple expressions' ]  # The tool description.
+        tool_page_strings_displayed = [ 'Filter (version 2.2.0)' ]
+        self.check_repository_tools_for_changeset_revision( repository,
+                                                            tip,
+                                                            tool_metadata_strings_displayed=tool_metadata_strings_displayed,
+                                                            tool_page_strings_displayed=tool_page_strings_displayed )
+        self.check_repository_metadata( repository, tip_only=False )
+
+    def test_0075_upload_readme_txt_file( self ):
+        '''Upload readme.txt file associated with tool version 2.2.0.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded readme.txt",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] )
+        # Verify that there is a different readme file for each metadata revision.
+        self.display_manage_repository_page( repository,
+                                             strings_displayed=[ 'Readme file for filtering 1.1.0',
+                                                                 'This is a readme file.' ] )
+
+    def test_0080_delete_readme_txt_file( self ):
+        '''Delete the readme.txt file.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] )
+        self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
+
+    def test_0085_search_for_valid_filter_tool( self ):
+        '''Search for the filtering tool by tool ID, name, and version.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        tip_changeset = self.get_repository_tip( repository )
+        search_fields = dict( tool_id='Filter1', tool_name='filter', tool_version='2.2.0' )
+        self.search_for_valid_tools( search_fields=search_fields, strings_displayed=[ tip_changeset ], strings_not_displayed=[] )
+
+    def test_0090_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.verify_unchanged_repository_metadata( repository )
+
+    def test_0095_verify_reserved_repository_name_handling( self ):
+        '''Check that reserved repository names are handled correctly.'''
+        category = self.test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
+        error_message = 'The term <b>repos</b> is a reserved word in the tool shed, so it cannot be used as a repository name.'
+        self.get_or_create_repository( name='repos',
+                                       description=repository_description,
+                                       long_description=repository_long_description,
+                                       owner=common.test_user_1_name,
+                                       category_id=self.security.encode_id( category.id ),
+                                       strings_displayed=[ error_message ] )
+
+    def test_0100_verify_reserved_username_handling( self ):
+        '''Check that reserved usernames are handled correctly.'''
+        self.login( email='baduser at bx.psu.edu', username='repos' )
+        test_user_1 = self.test_db_util.get_user( 'baduser at bx.psu.edu' )
+        assert test_user_1 is None, 'Creating user with public name "repos" succeeded.'
+        error_message = 'The term <b>repos</b> is a reserved word in the tool shed, so it cannot be used as a public user name.'
+        self.check_for_strings( strings_displayed=[ error_message ] )
+
+    def test_0105_contact_repository_owner( self ):
+        '''Fill out and submit the form to contact the owner of a repository.'''
+        '''
+        This test should not actually send the email, since functional tests are designed to function without
+        any external network connection. The embedded tool shed server these tests are running against has been configured
+        with an SMTP server address that will not and should not resolve correctly. However, since the successful sending of
+        the email is the last step in the process, this will verify functional correctness of all preceding steps.
+        '''
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        message = 'This is a test message.'
+        strings_displayed = [ 'Contact the owner of the repository named', repository.name, 'streamline appropriate communication' ]
+        post_submit_strings_displayed = [ 'An error occurred sending your message by email' ]
+        self.send_message_to_repository_owner( repository=repository,
+                                               message=message,
+                                               strings_displayed=strings_displayed,
+                                               post_submit_strings_displayed=post_submit_strings_displayed )
+
+    def test_0110_delete_filtering_repository( self ):
+        '''Delete the filtering_0000 repository and verify that it no longer has any downloadable revisions.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.delete_repository( repository )
+        # Explicitly reload all metadata revisions from the database, to ensure that we have the current status of the downloadable flag.
+        for metadata_revision in repository.metadata_revisions:
+            self.test_db_util.refresh( metadata_revision )
+        # Marking a repository as deleted should result in no metadata revisions being downloadable.
+        assert True not in [ metadata.downloadable for metadata in repository.metadata_revisions ]
+
+    def test_0115_undelete_filtering_repository( self ):
+        '''Undelete the filtering_0000 repository and verify that it now has two downloadable revisions.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.undelete_repository( repository )
+        # Explicitly reload all metadata revisions from the database, to ensure that we have the current status of the downloadable flag.
+        for metadata_revision in repository.metadata_revisions:
+            self.test_db_util.refresh( metadata_revision )
+        # Marking a repository as undeleted should result in all previously downloadable metadata revisions being downloadable again.
+        # In this case, there should be two downloadable revisions, one for filtering 1.1.0 and one for filtering 2.2.0.
+        assert True in [ metadata.downloadable for metadata in repository.metadata_revisions ]
+        assert len( repository.downloadable_revisions ) == 2
+
+    def test_0120_enable_email_notifications( self ):
+        '''Enable email notifications for test user 2 on filtering_0000.'''
+        # Log in as test_user_2
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        # Get the repository, so we can pass the encoded repository id and browse_repositories method to the set_email_alerts method.
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Total alerts added: 1, total alerts removed: 0' ]
+        self.enable_email_alerts( repository, strings_displayed=strings_displayed )
+
+    def test_0125_upload_new_readme_file( self ):
+        '''Upload a new readme file to the filtering_0000 repository and verify that there is no error.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        # Upload readme.txt to the filtering_0000 repository and verify that it is now displayed.
+        self.upload_file( repository,
+                          filename='filtering/readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded new readme.txt with invalid ascii characters.",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'These characters should not' ] )
+
+    def test_0130_verify_handling_of_invalid_characters( self ):
+        '''Load the above changeset in the change log and confirm that there is no server error displayed.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        changeset_revision = self.get_repository_tip( repository )
+        repository_id = self.security.encode_id( repository.id )
+        changelog_tuples = self.get_repository_changelog_tuples( repository )
+        revision_number = -1
+        revision_hash = '000000000000'
+        for numeric_changeset, changeset_hash in changelog_tuples:
+            if str( changeset_hash ) == str( changeset_revision ):
+                revision_number = numeric_changeset
+                revision_hash = changeset_hash
+                break
+        # Check for the changeset revision, repository name, owner username, 'repos' in the clone url, and the captured
+        # unicode decoding error message.
+        strings_displayed = [ '%d:%s' % ( revision_number, revision_hash ), 'filtering_0000', 'user1', 'repos', 'added:',
+                              '+These characters should not' ]
+        self.load_changeset_in_tool_shed( repository_id, changeset_revision, strings_displayed=strings_displayed )
+
+    def test_0135_api_get_repositories_in_category( self ):
+        '''Load the api endpoint for repositories in a category.'''
+        categories = []
+        categories.append( self.test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' ) )
+        categories.append( self.test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 2' ) )
+        self.get_repositories_category_api( categories )
diff --git a/test/shed_functional/functional/test_0010_repository_with_tool_dependencies.py b/test/shed_functional/functional/test_0010_repository_with_tool_dependencies.py
new file mode 100644
index 0000000..f11f262
--- /dev/null
+++ b/test/shed_functional/functional/test_0010_repository_with_tool_dependencies.py
@@ -0,0 +1,165 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'freebayes_0010'
+repository_description = "Galaxy's freebayes tool"
+repository_long_description = "Long description of Galaxy's freebayes tool"
+
+'''
+1. Create repository freebayes_0020 and upload only the tool XML.
+2. Upload the tool_data_table_conf.xml.sample file.
+3. Upload sam_fa_indices.loc.sample.
+4. Upload a tool_dependencies.xml file that should not parse correctly.
+5. Upload a tool_dependencies.xml file that specifies a version that does not match the tool's requirements.
+6. Upload a valid tool_dependencies.xml file.
+7. Check for the appropriate strings on the manage repository page.
+'''
+
+
+class TestFreebayesRepository( ShedTwillTestCase ):
+    '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+
+    def test_0000_create_or_login_admin_user( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category( self ):
+        """Create a category for this test suite"""
+        self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+
+    def test_0010_create_freebayes_repository_and_upload_tool_xml( self ):
+        '''Create freebayes repository and upload only freebayes.xml.'''
+        '''
+        We are at step 1 - Create repository freebayes_0020 and upload only the tool XML.
+        Uploading only the tool XML file should result in an invalid tool and an error message on
+        upload, as well as on the manage repository page.
+        '''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0010 Repository With Tool Dependencies' )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='freebayes/freebayes.xml',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded the tool xml.',
+                          strings_displayed=[ 'Metadata may have been defined', 'This file requires an entry', 'tool_data_table_conf' ],
+                          strings_not_displayed=[] )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Invalid tools' ], strings_not_displayed=[ 'Valid tools' ] )
+        tip = self.get_repository_tip( repository )
+        strings_displayed = [ 'requires an entry', 'tool_data_table_conf.xml' ]
+        self.check_repository_invalid_tools_for_changeset_revision( repository, tip, strings_displayed=strings_displayed )
+
+    def test_0015_upload_missing_tool_data_table_conf_file( self ):
+        '''Upload the missing tool_data_table_conf.xml.sample file to the repository.'''
+        '''
+        We are at step 2 - Upload the tool_data_table_conf.xml.sample file.
+        Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='freebayes/tool_data_table_conf.xml.sample',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded the tool data table sample file.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Invalid tools' ], strings_not_displayed=[ 'Valid tools' ] )
+        tip = self.get_repository_tip( repository )
+        strings_displayed = [ 'refers to a file', 'sam_fa_indices.loc' ]
+        self.check_repository_invalid_tools_for_changeset_revision( repository, tip, strings_displayed=strings_displayed )
+
+    def test_0020_upload_missing_sample_loc_file( self ):
+        '''Upload the missing sam_fa_indices.loc.sample file to the repository.'''
+        '''
+        We are at step 3 - Upload the tool_data_table_conf.xml.sample file.
+        Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='freebayes/sam_fa_indices.loc.sample',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded tool data table .loc file.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_upload_malformed_tool_dependency_xml( self ):
+        '''Upload tool_dependencies.xml with bad characters in the readme tag.'''
+        '''
+        We are at step 4 - Upload a tool_dependencies.xml file that should not parse correctly.
+        Upload a tool_dependencies.xml file that contains <> in the text of the readme tag. This should show an error message about malformed xml.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename=os.path.join( 'freebayes', 'malformed_tool_dependencies', 'tool_dependencies.xml' ),
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded malformed tool dependency XML.',
+                          strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
+                          strings_not_displayed=[] )
+
+    def test_0030_upload_invalid_tool_dependency_xml( self ):
+        '''Upload tool_dependencies.xml defining version 0.9.5 of the freebayes package.'''
+        '''
+        We are at step 5 - Upload a tool_dependencies.xml file that specifies a version that does not match the tool's requirements.
+        This should result in a message about the tool dependency configuration not matching the tool's requirements.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded invalid tool dependency XML.',
+                          strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
+                          strings_not_displayed=[] )
+
+    def test_0035_upload_valid_tool_dependency_xml( self ):
+        '''Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package.'''
+        '''
+        We are at step 6 - Upload a valid tool_dependencies.xml file.
+        At this stage, there should be no errors on the upload page, as every missing or invalid file has been corrected.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename=os.path.join( 'freebayes', 'tool_dependencies.xml' ),
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded valid tool dependency XML.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0040_verify_tool_dependencies( self ):
+        '''Verify that the uploaded tool_dependencies.xml specifies the correct package versions.'''
+        '''
+        We are at step 7 - Check for the appropriate strings on the manage repository page.
+        Verify that the manage repository page now displays the valid tool dependencies, and that there are no invalid tools shown on the manage page.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        strings_displayed = [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools', 'package' ]
+        strings_not_displayed = [ 'Invalid tools' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
diff --git a/test/shed_functional/functional/test_0020_basic_repository_dependencies.py b/test/shed_functional/functional/test_0020_basic_repository_dependencies.py
new file mode 100644
index 0000000..b76f096
--- /dev/null
+++ b/test/shed_functional/functional/test_0020_basic_repository_dependencies.py
@@ -0,0 +1,102 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0020'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0020'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0020'
+
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+    '''Testing emboss 5 with repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category( self ):
+        """Create a category for this test suite"""
+        self.create_category( name='Test 0020 Basic Repository Dependencies', description='Testing basic repository dependency features.' )
+
+    def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+        '''Create and populate the emboss_datatypes repository.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0020 Basic Repository Dependencies' )
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                             description=datatypes_repository_description,
+                                             long_description=datatypes_repository_long_description,
+                                             owner=common.test_user_1_name,
+                                             category_id=self.security.encode_id( category.id ),
+                                             strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded datatypes_conf.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_verify_datatypes_in_datatypes_repository( self ):
+        '''Verify that the emboss_datatypes repository contains datatype entries.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+
+    def test_0020_create_emboss_5_repository_and_upload_files( self ):
+        '''Create and populate the emboss_5_0020 repository.'''
+        category = self.test_db_util.get_category_by_name( 'Test 0020 Basic Repository Dependencies' )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                             description=emboss_repository_description,
+                                             long_description=emboss_repository_long_description,
+                                             owner=common.test_user_1_name,
+                                             category_id=self.security.encode_id( category.id ),
+                                             strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded emboss.tar',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_generate_and_upload_repository_dependencies_xml( self ):
+        '''Generate and upload the repository_dependencies.xml file'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] )
+        repository_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        self.create_repository_dependency( repository=repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0030_verify_emboss_5_dependencies( self ):
+        '''Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        changeset_revision = self.get_repository_tip( datatypes_repository )
+        strings_displayed = [ 'Tool dependencies',
+                              'emboss',
+                              '5.0.0',
+                              'package',
+                              'emboss_datatypes_0020',
+                              'user1',
+                              changeset_revision,
+                              'Repository dependencies' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+
+    def test_0040_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        self.verify_unchanged_repository_metadata( emboss_repository )
+        self.verify_unchanged_repository_metadata( datatypes_repository )
diff --git a/test/shed_functional/functional/test_0030_repository_dependency_revisions.py b/test/shed_functional/functional/test_0030_repository_dependency_revisions.py
new file mode 100644
index 0000000..7aa00ac
--- /dev/null
+++ b/test/shed_functional/functional/test_0030_repository_dependency_revisions.py
@@ -0,0 +1,163 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0030'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0030'
+emboss_5_repository_name = 'emboss_5_0030'
+emboss_6_repository_name = 'emboss_6_0030'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0030'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0030'
+
+
+class TestRepositoryDependencyRevisions( ShedTwillTestCase ):
+    '''Test dependencies on different revisions of a repository.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category( self ):
+        """Create a category for this test suite"""
+        self.create_category( name='Test 0030 Repository Dependency Revisions', description='Testing repository dependencies by revision.' )
+
+    def test_0010_create_emboss_5_repository( self ):
+        '''Create and populate the emboss_5_0030 repository.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0030 Repository Dependency Revisions' )
+        repository = self.get_or_create_repository( name=emboss_5_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_emboss_6_repository( self ):
+        '''Create and populate the emboss_6_0030 repository.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0030 Repository Dependency Revisions' )
+        repository = self.get_or_create_repository( name=emboss_6_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_emboss_datatypes_repository( self ):
+        '''Create and populate the emboss_datatypes_0030 repository.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0030 Repository Dependency Revisions' )
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded datatypes_conf.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_create_emboss_repository( self ):
+        '''Create and populate the emboss_0030 repository.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( 'Test 0030 Repository Dependency Revisions' )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded the tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0030_generate_repository_dependencies_for_emboss_5( self ):
+        '''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        emboss_5_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss5' ] )
+        datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        self.create_repository_dependency( repository=emboss_5_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+
+    def test_0035_generate_repository_dependencies_for_emboss_6( self ):
+        '''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_6 repository.'''
+        emboss_6_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss6' ] )
+        datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        self.create_repository_dependency( repository=emboss_6_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+
+    def test_0040_generate_repository_dependency_on_emboss_5( self ):
+        '''Create and upload repository_dependencies.xml for the emboss_5_0030 repository.'''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        emboss_5_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
+        emboss_tuple = ( self.url, emboss_5_repository.name, emboss_5_repository.user.username, self.get_repository_tip( emboss_5_repository ) )
+        self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ emboss_tuple ], filepath=repository_dependencies_path )
+
+    def test_0045_generate_repository_dependency_on_emboss_6( self ):
+        '''Create and upload repository_dependencies.xml for the emboss_6_0030 repository.'''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        emboss_6_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
+        emboss_tuple = ( self.url, emboss_6_repository.name, emboss_6_repository.user.username, self.get_repository_tip( emboss_6_repository ) )
+        self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ emboss_tuple ], filepath=repository_dependencies_path )
+
+    def test_0050_verify_repository_dependency_revisions( self ):
+        '''Verify that different metadata revisions of the emboss repository have different repository dependencies.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        datatypes_tip = self.get_repository_tip( datatypes_repository )
+        strings_displayed = []
+        # Iterate through all metadata revisions and check for repository dependencies.
+        for metadata, changeset_revision in repository_metadata:
+            # Add the dependency description and datatypes repository details to the strings to check.
+            strings_displayed = [ 'emboss_datatypes_0030', 'user1', datatypes_tip ]
+            strings_displayed.extend( [ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
+            self.display_manage_repository_page( repository,
+                                                 changeset_revision=changeset_revision,
+                                                 strings_displayed=strings_displayed )
+
+    def test_0055_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        emboss_5_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+        emboss_6_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        for repository in [ emboss_repository, emboss_5_repository, emboss_6_repository, datatypes_repository ]:
+            self.verify_unchanged_repository_metadata( repository )
diff --git a/test/shed_functional/functional/test_0040_repository_circular_dependencies.py b/test/shed_functional/functional/test_0040_repository_circular_dependencies.py
new file mode 100644
index 0000000..6d01c3c
--- /dev/null
+++ b/test/shed_functional/functional/test_0040_repository_circular_dependencies.py
@@ -0,0 +1,116 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+freebayes_repository_name = 'freebayes_0040'
+freebayes_repository_description = "Galaxy's freebayes tool for test 0040"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool for test 0040"
+
+filtering_repository_name = 'filtering_0040'
+filtering_repository_description = "Galaxy's filtering tool for test 0040"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool for test 0040"
+
+
+class TestRepositoryCircularDependencies( ShedTwillTestCase ):
+    '''Verify that the code correctly displays repositories with circular repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category( self ):
+        """Create a category for this test suite"""
+        self.create_category( name='test_0040_repository_circular_dependencies', description='Testing handling of circular repository dependencies.' )
+
+    def test_0010_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0040.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    categories=[ 'test_0040_repository_circular_dependencies' ],
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='freebayes/freebayes.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded the tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_filtering_repository( self ):
+        '''Create and populate filtering_0040.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                    description=filtering_repository_description,
+                                                    long_description=filtering_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    categories=[ 'test_0040_repository_circular_dependencies' ],
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded the tool tarball for filtering 1.1.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_dependency_on_freebayes( self ):
+        '''Upload a repository_dependencies.xml file that specifies the current revision of freebayes to the filtering_0040 repository.'''
+        # The dependency structure should look like:
+        # Filtering revision 0 -> freebayes revision 0.
+        # Freebayes revision 0 -> filtering revision 1.
+        # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+        repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
+        repository_tuple = ( self.url, repository.name, repository.user.username, self.get_repository_tip( repository ) )
+        self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0025_create_dependency_on_filtering( self ):
+        '''Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository.'''
+        # The dependency structure should look like:
+        # Filtering revision 0 -> freebayes revision 0.
+        # Freebayes revision 0 -> filtering revision 1.
+        # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+        repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
+        repository_tuple = ( self.url, repository.name, repository.user.username, self.get_repository_tip( repository ) )
+        self.create_repository_dependency( repository=freebayes_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0030_verify_repository_dependencies( self ):
+        '''Verify that each repository can depend on the other without causing an infinite loop.'''
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        # The dependency structure should look like:
+        # Filtering revision 0 -> freebayes revision 0.
+        # Freebayes revision 0 -> filtering revision 1.
+        # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+        # In this case, the displayed dependency will specify the tip revision, but this will not always be the case.
+        self.check_repository_dependency( filtering_repository, freebayes_repository, self.get_repository_tip( freebayes_repository ) )
+        self.check_repository_dependency( freebayes_repository, filtering_repository, self.get_repository_tip( filtering_repository ) )
+
+    def test_0035_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        for repository in [ freebayes_repository, filtering_repository ]:
+            self.verify_unchanged_repository_metadata( repository )
+
+    def test_0040_verify_tool_dependencies( self ):
+        '''Verify that freebayes displays tool dependencies.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        self.display_manage_repository_page( repository,
+                                             strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools', 'package' ],
+                                             strings_not_displayed=[ 'Invalid tools' ] )
diff --git a/test/shed_functional/functional/test_0050_circular_dependencies_4_levels.py b/test/shed_functional/functional/test_0050_circular_dependencies_4_levels.py
new file mode 100644
index 0000000..67cef36
--- /dev/null
+++ b/test/shed_functional/functional/test_0050_circular_dependencies_4_levels.py
@@ -0,0 +1,266 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0050'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0050'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+filtering_repository_name = 'filtering_0050'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0050'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+column_repository_name = 'column_maker_0050'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0050'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+bismark_repository_name = 'bismark_0050'
+bismark_repository_description = "A flexible aligner."
+bismark_repository_long_description = "A flexible aligner and methylation caller for Bisulfite-Seq applications."
+
+category_name = 'Test 0050 Circular Dependencies 5 Levels'
+category_description = 'Test circular dependency features'
+
+
+class TestRepositoryCircularDependenciesToNLevels( ShedTwillTestCase ):
+    '''Verify that the code correctly handles circular dependencies down to n levels.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_convert_repository( self ):
+        '''Create and populate convert_chars_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='convert_chars/convert_chars.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded convert_chars tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate convert_chars_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column_maker tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_emboss_datatypes_repository( self ):
+        '''Create and populate emboss_datatypes_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+                                                    description=emboss_datatypes_repository_description,
+                                                    long_description=emboss_datatypes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded datatypes_conf.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_emboss_repository( self ):
+        '''Create and populate emboss_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded emboss tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_create_filtering_repository( self ):
+        '''Create and populate filtering_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        filtering_repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                              description=filtering_repository_description,
+                                                              long_description=filtering_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        self.upload_file( filtering_repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0030_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='freebayes/freebayes.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded freebayes tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0035_create_bismark_repository( self ):
+        '''Create and populate bismark_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=bismark_repository_name,
+                                                    description=bismark_repository_description,
+                                                    long_description=bismark_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='bismark/bismark.tar',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded bismark tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0040_create_and_upload_dependency_definitions( self ):
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        bismark_repository = self.test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+        # convert_chars depends on column_maker
+        # column_maker depends on convert_chars
+        # emboss depends on emboss_datatypes
+        # emboss_datatypes depends on bismark
+        # freebayes depends on freebayes, emboss, emboss_datatypes, and column_maker
+        # filtering depends on emboss
+        column_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+        convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+        freebayes_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+        emboss_tuple = ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) )
+        datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        bismark_tuple = ( self.url, bismark_repository.name, bismark_repository.user.username, self.get_repository_tip( bismark_repository ) )
+        self.create_repository_dependency( repository=convert_repository, repository_tuples=[ column_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=column_repository, repository_tuples=[ convert_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=datatypes_repository, repository_tuples=[ bismark_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=freebayes_repository,
+                                           repository_tuples=[ freebayes_tuple, datatypes_tuple, emboss_tuple, column_tuple ],
+                                           filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ emboss_tuple ], filepath=dependency_xml_path )
+
+    def test_0045_verify_repository_dependencies( self ):
+        '''Verify that the generated dependency circle does not cause an infinite loop.
+        Expected structure:
+
+        id: 2 key: http://toolshed.local:10001__ESEP__filtering__ESEP__test__ESEP__871602b4276b
+            ['http://toolshed.local:10001', 'emboss_5', 'test', '8de5fe0d7b04']
+             id: 3 key: http://toolshed.local:10001__ESEP__emboss_datatypes__ESEP__test__ESEP__dbd4f68bf507
+                 ['http://toolshed.local:10001', 'freebayes', 'test', 'f40028114098']
+             id: 4 key: http://toolshed.local:10001__ESEP__freebayes__ESEP__test__ESEP__f40028114098
+                 ['http://toolshed.local:10001', 'emboss_datatypes', 'test', 'dbd4f68bf507']
+                 ['http://toolshed.local:10001', 'emboss_5', 'test', '8de5fe0d7b04']
+                 ['http://toolshed.local:10001', 'column_maker', 'test', '83e956bdbac0']
+             id: 5 key: http://toolshed.local:10001__ESEP__column_maker__ESEP__test__ESEP__83e956bdbac0
+                 ['http://toolshed.local:10001', 'convert_chars', 'test', 'b28134220c8a']
+             id: 6 key: http://toolshed.local:10001__ESEP__convert_chars__ESEP__test__ESEP__b28134220c8a
+                 ['http://toolshed.local:10001', 'column_maker', 'test', '83e956bdbac0']
+             id: 7 key: http://toolshed.local:10001__ESEP__emboss_5__ESEP__test__ESEP__8de5fe0d7b04
+                 ['http://toolshed.local:10001', 'emboss_datatypes', 'test', 'dbd4f68bf507']
+        '''
+        emboss_datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        bismark_repository = self.test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( convert_repository, column_repository )
+        self.check_repository_dependency( column_repository, convert_repository )
+        self.check_repository_dependency( emboss_datatypes_repository, bismark_repository )
+        self.check_repository_dependency( emboss_repository, emboss_datatypes_repository )
+        self.check_repository_dependency( filtering_repository, emboss_repository )
+        for repository in [ emboss_datatypes_repository, emboss_repository, column_repository ]:
+            self.check_repository_dependency( freebayes_repository, repository )
+        strings_displayed = [ 'freebayes_0050 depends on freebayes_0050, emboss_datatypes_0050, emboss_0050, column_maker_0050.' ]
+        self.display_manage_repository_page( freebayes_repository,
+                                             strings_displayed=strings_displayed )
+
+    def test_0050_verify_tool_dependencies( self ):
+        '''Check that freebayes and emboss display tool dependencies.'''
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        self.display_manage_repository_page( freebayes_repository,
+                                             strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Tool dependencies', 'package' ] )
+        self.display_manage_repository_page( emboss_repository, strings_displayed=[ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
+
+    def test_0055_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        emboss_datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        for repository in [ emboss_datatypes_repository, emboss_repository, freebayes_repository, filtering_repository ]:
+            self.verify_unchanged_repository_metadata( repository )
diff --git a/test/shed_functional/functional/test_0060_workflows.py b/test/shed_functional/functional/test_0060_workflows.py
new file mode 100644
index 0000000..2d3ce93
--- /dev/null
+++ b/test/shed_functional/functional/test_0060_workflows.py
@@ -0,0 +1,122 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'filtering_0060'
+repository_description = "Galaxy's filtering tool for test 0060"
+repository_long_description = "Long description of Galaxy's filtering tool for test 0060"
+
+workflow_filename = 'Workflow_for_0060_filter_workflow_repository.ga'
+workflow_name = 'Workflow for 0060_filter_workflow_repository'
+
+category_name = 'Test 0060 Workflow Features'
+category_description = 'Test 0060 for workflow features'
+
+workflow_repository_name = 'filtering_workflow_0060'
+workflow_repository_description = "Workflow referencing the filtering tool for test 0060"
+workflow_repository_long_description = "Long description of the workflow for test 0060"
+
+
+class TestToolShedWorkflowFeatures( ShedTwillTestCase ):
+    '''Test valid and invalid workflows.'''
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_categories( self ):
+        """Create categories for this test suite"""
+        self.create_category( name='Test 0060 Workflow Features', description='Test 0060 - Workflow Features' )
+
+    def test_0010_create_repository( self ):
+        """Create and populate the filtering repository"""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.get_or_create_repository( name=repository_name,
+                                       description=repository_description,
+                                       long_description=repository_long_description,
+                                       owner=common.test_user_1_name,
+                                       category_id=self.security.encode_id( category.id ),
+                                       strings_displayed=[] )
+
+    def test_0015_upload_workflow( self ):
+        '''Upload a workflow with a missing tool, and verify that the tool specified is marked as missing.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        workflow = open( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+        workflow = workflow.replace(  '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+        workflow_filepath = self.generate_temp_path( 'test_0060', additional_paths=[ 'filtering_workflow' ] )
+        if not os.path.exists( workflow_filepath ):
+            os.makedirs( workflow_filepath )
+        open( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+        self.upload_file( repository,
+                          filename=workflow_filename,
+                          filepath=workflow_filepath,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering workflow.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.load_workflow_image_in_tool_shed( repository, workflow_name, strings_displayed=[ '#EBBCB2' ] )
+
+    def test_0020_upload_tool( self ):
+        '''Upload the missing tool for the workflow in the previous step, and verify that the error is no longer present.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_2.2.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 2.2.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.load_workflow_image_in_tool_shed( repository, workflow_name, strings_not_displayed=[ '#EBBCB2' ] )
+
+    def test_0025_create_repository_with_only_workflow( self ):
+        """Create and populate the filtering_workflow_0060 repository"""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.get_or_create_repository( name=workflow_repository_name,
+                                       description=workflow_repository_description,
+                                       long_description=workflow_repository_long_description,
+                                       owner=common.test_user_1_name,
+                                       category_id=self.security.encode_id( category.id ),
+                                       strings_displayed=[] )
+        workflow = open( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+        workflow = workflow.replace(  '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+        workflow = workflow.replace( 'Workflow for 0060_filter_workflow_repository',
+                                     'New workflow for 0060_filter_workflow_repository' )
+        workflow_filepath = self.generate_temp_path( 'test_0060', additional_paths=[ 'filtering_workflow_2' ] )
+        if not os.path.exists( workflow_filepath ):
+            os.makedirs( workflow_filepath )
+        open( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+        repository = self.test_db_util.get_repository_by_name_and_owner( workflow_repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename=workflow_filename,
+                          filepath=workflow_filepath,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering workflow.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.load_workflow_image_in_tool_shed( repository, workflow_name, strings_displayed=[ '#EBBCB2' ] )
+
+    def test_0030_check_workflow_repository( self ):
+        """Check for strings on the manage page for the filtering_workflow_0060 repository."""
+        repository = self.test_db_util.get_repository_by_name_and_owner( workflow_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Workflows', 'New workflow for 0060_filter', '0.1' ]
+        strings_not_displayed = [ 'Valid tools', 'Invalid tools' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0035_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.verify_unchanged_repository_metadata( repository )
diff --git a/test/shed_functional/functional/test_0070_invalid_tool.py b/test/shed_functional/functional/test_0070_invalid_tool.py
new file mode 100644
index 0000000..8002812
--- /dev/null
+++ b/test/shed_functional/functional/test_0070_invalid_tool.py
@@ -0,0 +1,67 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'bismark_0070'
+repository_description = "Galaxy's bismark wrapper"
+repository_long_description = "Long description of Galaxy's bismark wrapper"
+category_name = 'Test 0070 Invalid Tool Revisions'
+category_description = 'Tests for a repository with invalid tool revisions.'
+
+
+class TestBismarkRepository( ShedTwillTestCase ):
+    '''Testing bismark with valid and invalid tool entries.'''
+
+    def test_0000_create_or_login_admin_user( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category_and_repository( self ):
+        """Create a category for this test suite, then create and populate a bismark repository. It should contain at least one each valid and invalid tool."""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='bismark/bismark.tar',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded bismark tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Invalid tools' ] )
+        invalid_revision = self.get_repository_tip( repository )
+        self.upload_file( repository,
+                          filename='bismark/bismark_methylation_extractor.xml',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded an updated tool xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        valid_revision = self.get_repository_tip( repository )
+        self.test_db_util.refresh( repository )
+        tool_guid = '%s/repos/user1/bismark_0070/bismark_methylation_extractor/0.7.7.3' % self.url.replace( 'http://', '' ).rstrip( '/' )
+        tool_metadata_strings_displayed = [ tool_guid,
+                                            '0.7.7.3',  # The tool version.
+                                            'bismark_methylation_extractor',  # The tool ID.
+                                            'Bismark',  # The tool name.
+                                            'methylation extractor' ]  # The tool description.
+        tool_page_strings_displayed = [ 'Bismark (version 0.7.7.3)' ]
+        self.check_repository_tools_for_changeset_revision( repository,
+                                                            valid_revision,
+                                                            tool_metadata_strings_displayed=tool_metadata_strings_displayed,
+                                                            tool_page_strings_displayed=tool_page_strings_displayed )
+        self.check_repository_invalid_tools_for_changeset_revision( repository, invalid_revision )
diff --git a/test/shed_functional/functional/test_0080_advanced_circular_dependencies.py b/test/shed_functional/functional/test_0080_advanced_circular_dependencies.py
new file mode 100644
index 0000000..757935c
--- /dev/null
+++ b/test/shed_functional/functional/test_0080_advanced_circular_dependencies.py
@@ -0,0 +1,98 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+column_repository_name = 'column_maker_0080'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0080'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0080 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+
+class TestRepositoryCircularDependencies( ShedTwillTestCase ):
+    '''Verify that the code correctly handles circular dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_column_repository( self ):
+        """Create and populate the column_maker repository."""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column_maker tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0005_create_convert_repository( self ):
+        """Create and populate the convert_chars repository."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='convert_chars/convert_chars.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded convert_chars tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_repository_dependencies( self ):
+        '''Upload a repository_dependencies.xml file that specifies the current revision of convert_chars_0080 to the column_maker_0080 repository.'''
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0080', additional_paths=[ 'convert' ] )
+        repository_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+        self.create_repository_dependency( repository=column_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0025_create_dependency_on_filtering( self ):
+        '''Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository.'''
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_0080', additional_paths=[ 'convert' ] )
+        repository_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+        self.create_repository_dependency( repository=convert_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0030_verify_repository_dependencies( self ):
+        '''Verify that each repository can depend on the other without causing an infinite loop.'''
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( convert_repository, column_repository, self.get_repository_tip( column_repository ) )
+        self.check_repository_dependency( column_repository, convert_repository, self.get_repository_tip( convert_repository ) )
+
+    def test_0035_verify_repository_metadata( self ):
+        '''Verify that resetting the metadata does not change it.'''
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        for repository in [ column_repository, convert_repository ]:
+            self.verify_unchanged_repository_metadata( repository )
diff --git a/test/shed_functional/functional/test_0090_tool_search.py b/test/shed_functional/functional/test_0090_tool_search.py
new file mode 100644
index 0000000..6388c2f
--- /dev/null
+++ b/test/shed_functional/functional/test_0090_tool_search.py
@@ -0,0 +1,191 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0090'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0090'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+filtering_repository_name = 'filtering_0090'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0090'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+bwa_base_repository_name = 'bwa_base_0090'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "NT space mapping with BWA"
+
+bwa_color_repository_name = 'bwa_color_0090'
+bwa_color_repository_description = "BWA Color"
+bwa_color_repository_long_description = "Color space mapping with BWA"
+
+category_name = 'Test 0090 Tool Search And Installation'
+category_description = 'Test 0090 Tool Search And Installation'
+
+
+class TestRepositoryCircularDependenciesAgain( ShedTwillTestCase ):
+    '''Test more features related to repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bwa_base_repository( self ):
+        '''Create and populate bwa_base_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=bwa_base_repository_name,
+                                                    description=bwa_base_repository_description,
+                                                    long_description=bwa_base_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='bwa/bwa_base.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded BWA tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_bwa_color_repository( self ):
+        '''Create and populate bwa_color_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=bwa_color_repository_name,
+                                                    description=bwa_color_repository_description,
+                                                    long_description=bwa_color_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='bwa/bwa_color.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded BWA color tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_emboss_datatypes_repository( self ):
+        '''Create and populate emboss_datatypes_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+                                                    description=emboss_datatypes_repository_description,
+                                                    long_description=emboss_datatypes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded datatypes_conf.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_emboss_repository( self ):
+        '''Create and populate emboss_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded emboss tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_create_filtering_repository( self ):
+        '''Create and populate filtering_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        filtering_repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                              description=filtering_repository_description,
+                                                              long_description=filtering_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        self.upload_file( filtering_repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0030_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='freebayes/freebayes.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded freebayes tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0035_create_and_upload_dependency_definitions( self ):
+        '''Create and upload repository dependency definitions.'''
+        bwa_color_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+        bwa_base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0090', additional_paths=[ 'freebayes' ] )
+        freebayes_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+        emboss_tuple = ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) )
+        datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        filtering_tuple = ( self.url, filtering_repository.name, filtering_repository.user.username, self.get_repository_tip( filtering_repository ) )
+        self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ freebayes_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=bwa_base_repository, repository_tuples=[ emboss_tuple ], filepath=dependency_xml_path )
+        self.create_repository_dependency( repository=bwa_color_repository, repository_tuples=[ filtering_tuple ], filepath=dependency_xml_path )
+
+    def test_0040_verify_repository_dependencies( self ):
+        '''Verify the generated dependency structure.'''
+        bwa_color_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+        bwa_base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        emboss_datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( emboss_repository, emboss_datatypes_repository )
+        self.check_repository_dependency( filtering_repository, freebayes_repository )
+        self.check_repository_dependency( bwa_base_repository, emboss_repository )
+        self.check_repository_dependency( bwa_color_repository, filtering_repository )
diff --git a/test/shed_functional/functional/test_0100_complex_repository_dependencies.py b/test/shed_functional/functional/test_0100_complex_repository_dependencies.py
new file mode 100644
index 0000000..9d48f7d
--- /dev/null
+++ b/test/shed_functional/functional/test_0100_complex_repository_dependencies.py
@@ -0,0 +1,225 @@
+import logging
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+bwa_base_repository_name = 'bwa_base_repository_0100'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "BWA tool that depends on bwa 0.5.9, with a complex repository dependency pointing at package_bwa_0_5_9_0100"
+
+bwa_package_repository_name = 'package_bwa_0_5_9_0100'
+bwa_package_repository_description = "BWA Tool"
+bwa_package_repository_long_description = "BWA repository with a package tool dependency defined for BWA 0.5.9."
+
+category_name = 'Test 0100 Complex Repository Dependencies'
+category_description = 'Test 0100 Complex Repository Dependencies'
+
+
+class TestComplexRepositoryDependencies( ShedTwillTestCase ):
+    '''Test features related to complex repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bwa_package_repository( self ):
+        '''Create and populate package_bwa_0_5_9_0100.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named package_bwa_0_5_9_0100 owned by user1.
+        repository = self.get_or_create_repository( name=bwa_package_repository_name,
+                                                    description=bwa_package_repository_description,
+                                                    long_description=bwa_package_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='bwa/complex/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded tool_dependencies.xml.',
+                          strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                          strings_not_displayed=[] )
+        # Visit the manage repository page for package_bwa_0_5_9_0100.
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'will not be', 'to this repository' ] )
+
+    def test_0010_create_bwa_base_repository( self ):
+        '''Create and populate bwa_base_0100.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named bwa_base_repository_0100 owned by user1.
+        repository = self.get_or_create_repository( name=bwa_base_repository_name,
+                                                    description=bwa_base_repository_description,
+                                                    long_description=bwa_base_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Populate the repository named bwa_base_repository_0100 with a bwa_base tool archive.
+        self.upload_file( repository,
+                          filename='bwa/complex/bwa_base.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_generate_complex_repository_dependency_invalid_shed_url( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
+        dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
+        # The repository named bwa_base_repository_0100 is the dependent repository.
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # The repository named package_bwa_0_5_9_0100 is the required repository.
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        url = 'http://http://this is not an url!'
+        name = 'package_bwa_0_5_9_0100'
+        owner = 'user1'
+        changeset_revision = self.get_repository_tip( tool_repository )
+        strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
+        # Populate the dependent repository named bwa_base_repository_0100 with an invalid tool_dependencies.xml file.
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=base_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=True,
+                                           package='bwa',
+                                           version='0.5.9' )
+
+    def test_0020_generate_complex_repository_dependency_invalid_repository_name( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid repository name.'''
+        dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
+        # The base_repository named bwa_base_repository_0100 is the dependent repository.
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # The repository named package_bwa_0_5_9_0100 is the required repository.
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        url = self.url
+        name = 'invalid_repository!?'
+        owner = 'user1'
+        changeset_revision = self.get_repository_tip( tool_repository )
+        strings_displayed = [ 'because the name is invalid' ]
+        # Populate the dependent base_repository named package_bwa_0_5_9_0100 with an invalid tool_dependencies.xml file.
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=base_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=True,
+                                           package='bwa',
+                                           version='0.5.9' )
+
+    def test_0025_generate_complex_repository_dependency_invalid_owner_name( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid owner.'''
+        dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
+        # The base_repository named bwa_base_repository_0100 is the dependent repository.
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # The repository named package_bwa_0_5_9_0100 is the required repository.
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        url = self.url
+        name = 'package_bwa_0_5_9_0100'
+        owner = 'invalid_owner!?'
+        changeset_revision = self.get_repository_tip( tool_repository )
+        strings_displayed = [ 'because the owner is invalid.' ]
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=base_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=True,
+                                           package='bwa',
+                                           version='0.5.9' )
+
+    def test_0030_generate_complex_repository_dependency_invalid_changeset_revision( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
+        dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
+        # The base_repository named bwa_base_repository_0100 is the dependent repository.
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # The repository named package_bwa_0_5_9_0100 is the required repository.
+        url = self.url
+        name = 'package_bwa_0_5_9_0100'
+        owner = 'user1'
+        changeset_revision = '1234abcd'
+        strings_displayed = [ 'because the changeset revision is invalid.' ]
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=base_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=True,
+                                           package='bwa',
+                                           version='0.5.9' )
+
+    def test_0035_generate_complex_repository_dependency( self ):
+        '''Generate and upload a valid tool_dependencies.xml file that specifies package_bwa_0_5_9_0100.'''
+        # The base_repository named bwa_base_repository_0100 is the dependent repository.
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # The repository named package_bwa_0_5_9_0100 is the required repository.
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
+        url = self.url
+        name = 'package_bwa_0_5_9_0100'
+        owner = 'user1'
+        changeset_revision = self.get_repository_tip( tool_repository )
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=base_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           complex=True,
+                                           package='bwa',
+                                           version='0.5.9' )
+        self.check_repository_dependency( base_repository, depends_on_repository=tool_repository )
+        self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package', changeset_revision ] )
+
+    def test_0040_generate_tool_dependency( self ):
+        '''Generate and upload a new tool_dependencies.xml file that specifies an arbitrary file on the filesystem, and verify that bwa_base depends on the new changeset revision.'''
+        # The base_repository named bwa_base_repository_0100 is the dependent repository.
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # The repository named package_bwa_0_5_9_0100 is the required repository.
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        previous_changeset = self.get_repository_tip( tool_repository )
+        old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'readme', 'tool_dependencies.xml' ) )
+        new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+        xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+        # Generate a tool_dependencies.xml file that points to an arbitrary file in the local filesystem.
+        open( xml_filename, 'w' ).write( open( old_tool_dependency, 'r' )
+                                 .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
+        self.upload_file( tool_repository,
+                          filename=xml_filename,
+                          filepath=new_tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded new tool_dependencies.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
+        repository_tip = self.get_repository_tip( tool_repository )
+        strings_displayed = [ 'bwa', '0.5.9', 'package' ]
+        strings_displayed.append( repository_tip )
+        strings_not_displayed = [ previous_changeset ]
+        self.display_manage_repository_page( tool_repository,
+                                             strings_displayed=strings_displayed,
+                                             strings_not_displayed=strings_not_displayed )
+        # Visit the manage page of the package_bwa_0_5_9_0100 to confirm the valid tool dependency definition.
+        self.display_manage_repository_page( tool_repository,
+                                             strings_displayed=strings_displayed,
+                                             strings_not_displayed=strings_not_displayed )
+        # Visit the manage page of the bwa_base_repository_0100 to confirm the valid tool dependency definition
+        # and the updated changeset revision (updated tip) of the package_bwa_0_5_9_0100 repository is displayed
+        # as the required repository revision.  The original revision defined in the previously uploaded
+        # tool_dependencies.xml file will be updated.
+        self.display_manage_repository_page( base_repository,
+                                             strings_displayed=strings_displayed,
+                                             strings_not_displayed=strings_not_displayed )
diff --git a/test/shed_functional/functional/test_0110_invalid_simple_repository_dependencies.py b/test/shed_functional/functional/test_0110_invalid_simple_repository_dependencies.py
new file mode 100644
index 0000000..da4ee4a
--- /dev/null
+++ b/test/shed_functional/functional/test_0110_invalid_simple_repository_dependencies.py
@@ -0,0 +1,143 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0110'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0110'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+category_name = 'Test 0110 Invalid Repository Dependencies'
+category_desc = 'Test 0110 Invalid Repository Dependencies'
+
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+    '''Testing emboss 5 with repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category( self ):
+        """Create a category for this test suite"""
+        self.create_category( name=category_name, description=category_desc )
+
+    def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+        '''Create and populate the emboss_datatypes repository.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                             description=datatypes_repository_description,
+                                             long_description=datatypes_repository_long_description,
+                                             owner=common.test_user_1_name,
+                                             category_id=self.security.encode_id( category.id ),
+                                             strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded datatypes_conf.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_verify_datatypes_in_datatypes_repository( self ):
+        '''Verify that the emboss_datatypes repository contains datatype entries.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+
+    def test_0020_create_emboss_5_repository_and_upload_files( self ):
+        '''Create and populate the emboss_5_0110 repository.'''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                             description=emboss_repository_description,
+                                             long_description=emboss_repository_long_description,
+                                             owner=common.test_user_1_name,
+                                             category_id=self.security.encode_id( category.id ),
+                                             strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded emboss tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_generate_repository_dependency_with_invalid_url( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid URL.'''
+        dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        url = 'http://http://this is not an url!'
+        name = datatypes_repository.name
+        owner = datatypes_repository.user.username
+        changeset_revision = self.get_repository_tip( datatypes_repository )
+        strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=emboss_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=False )
+
+    def test_0030_generate_repository_dependency_with_invalid_name( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid name.'''
+        dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        url = self.url
+        name = '!?invalid?!'
+        owner = repository.user.username
+        changeset_revision = self.get_repository_tip( repository )
+        strings_displayed = [ 'because the name is invalid.' ]
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=emboss_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=False )
+
+    def test_0035_generate_repository_dependency_with_invalid_owner( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid owner.'''
+        dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        url = self.url
+        name = repository.name
+        owner = '!?invalid?!'
+        changeset_revision = self.get_repository_tip( repository )
+        strings_displayed = [ 'because the owner is invalid.' ]
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=emboss_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=False )
+
+    def test_0040_generate_repository_dependency_with_invalid_changeset_revision( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid changeset revision.'''
+        dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple', 'invalid' ] )
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        url = self.url
+        name = repository.name
+        owner = repository.user.username
+        changeset_revision = '!?invalid?!'
+        strings_displayed = [ 'because the changeset revision is invalid.' ]
+        repository_tuple = ( url, name, owner, changeset_revision )
+        self.create_repository_dependency( repository=emboss_repository,
+                                           filepath=dependency_path,
+                                           repository_tuples=[ repository_tuple ],
+                                           strings_displayed=strings_displayed,
+                                           complex=False )
diff --git a/test/shed_functional/functional/test_0120_simple_repository_dependency_multiple_owners.py b/test/shed_functional/functional/test_0120_simple_repository_dependency_multiple_owners.py
new file mode 100644
index 0000000..1e40daf
--- /dev/null
+++ b/test/shed_functional/functional/test_0120_simple_repository_dependency_multiple_owners.py
@@ -0,0 +1,141 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'blast_datatypes_0120'
+datatypes_repository_description = 'Galaxy applicable datatypes for BLAST'
+datatypes_repository_long_description = 'Galaxy datatypes for the BLAST top hit descriptons tool'
+
+tool_repository_name = 'blastxml_to_top_descr_0120'
+tool_repository_description = 'BLAST top hit descriptions'
+tool_repository_long_description = 'Make a table from BLAST XML'
+
+'''
+Tool shed side:
+
+1) Create and populate blast_datatypes_0120.
+1a) Check for appropriate strings.
+2) Create and populate blastxml_to_top_descr_0120.
+2a) Check for appropriate strings.
+3) Upload repository_dependencies.xml to blastxml_to_top_descr_0120 that defines a relationship to blast_datatypes_0120.
+3a) Check for appropriate strings.
+'''
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+
+
+class TestRepositoryMultipleOwners( ShedTwillTestCase ):
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other tests.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_datatypes_repository( self ):
+        """Create and populate the blast_datatypes_0120 repository"""
+        """
+        We are at step 1.
+        Create and populate blast_datatypes.
+        """
+        category = self.create_category( name='Test 0120', description='Description of test 0120' )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        strings_displayed = self.expect_repo_created_strings(datatypes_repository_name)
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                    description=datatypes_repository_description,
+                                                    long_description=datatypes_repository_long_description,
+                                                    owner=common.test_user_2_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='blast/blast_datatypes.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded blast_datatypes tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_verify_datatypes_repository( self ):
+        '''Verify the blast_datatypes_0120 repository.'''
+        '''
+        We are at step 1a.
+        Check for appropriate strings, most importantly BlastXml, BlastNucDb, and BlastProtDb,
+        the datatypes that are defined in datatypes_conf.xml.
+        '''
+        global repository_datatypes_count
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+        strings_displayed = [ 'BlastXml', 'BlastNucDb', 'BlastProtDb', 'application/xml', 'text/html', 'blastxml', 'blastdbn', 'blastdbp']
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        repository_datatypes_count = int( self.get_repository_datatypes_count( repository ) )
+
+    def test_0015_create_tool_repository( self ):
+        """Create and populate the blastxml_to_top_descr_0120 repository"""
+        """
+        We are at step 2.
+        Create and populate blastxml_to_top_descr_0120.
+        """
+        category = self.create_category( name='Test 0120', description='Description of test 0120' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(tool_repository_name)
+        repository = self.get_or_create_repository( name=tool_repository_name,
+                                                    description=tool_repository_description,
+                                                    long_description=tool_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='blast/blastxml_to_top_descr.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded blastxml_to_top_descr tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_verify_tool_repository( self ):
+        '''Verify the blastxml_to_top_descr_0120 repository.'''
+        '''
+        We are at step 2a.
+        Check for appropriate strings, such as tool name, description, and version.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'blastxml_to_top_descr_0120', 'BLAST top hit descriptions', 'Make a table from BLAST XML' ]
+        strings_displayed.extend( [ '0.0.1', 'Valid tools'] )
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+
+    def test_0025_create_repository_dependency( self ):
+        '''Create a repository dependency on blast_datatypes_0120.'''
+        '''
+        We are at step 3.
+        Create a simple repository dependency for blastxml_to_top_descr_0120 that defines a dependency on blast_datatypes_0120.
+        '''
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0120', additional_paths=[ 'dependencies' ] )
+        datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        self.create_repository_dependency( repository=tool_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+
+    def test_0040_verify_repository_dependency( self ):
+        '''Verify the created repository dependency.'''
+        '''
+        We are at step 3a.
+        Check the newly created repository dependency to ensure that it was defined and displays correctly.
+        '''
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( tool_repository, datatypes_repository )
diff --git a/test/shed_functional/functional/test_0130_datatype_converters.py b/test/shed_functional/functional/test_0130_datatype_converters.py
new file mode 100644
index 0000000..8bbbb84
--- /dev/null
+++ b/test/shed_functional/functional/test_0130_datatype_converters.py
@@ -0,0 +1,78 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'bed_to_gff_0130'
+repository_description = "Converter: BED to GFF"
+repository_long_description = "Convert bed to gff"
+
+category_name = 'Test 0130 Datatype Converters'
+category_description = 'Test 0130 Datatype Converters'
+
+'''
+1) Create a populate the bed_to_gff_converter repository
+2) Visit the manage repository page and make sure there is the appropriate valid too and datatype
+3) Visit the view tool metadata page and make sure that "Display in tool panel" is False
+'''
+
+
+class TestDatatypeConverters( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bed_to_gff_repository( self ):
+        '''Create and populate bed_to_gff_0130.'''
+        '''
+        We are at step 1 - Create and populate the bed_to_gff_0130 repository.
+        Create the bed_to_gff_0130 repository and populate it with the files needed for this test.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named bed_to_gff_0130 owned by user1.
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload bed_to_gff_converter.tar to the repository.
+        self.upload_file( repository,
+                          filename='bed_to_gff_converter/bed_to_gff_converter.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded bed_to_gff_converter.tar.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_verify_tool_and_datatype( self ):
+        '''Verify that a valid tool and datatype are contained within the repository.'''
+        '''
+        We are at step 2 - Visit the manage repository page and make sure there is the appropriate valid tool and datatype.
+        There should be a 'Convert BED to GFF' tool and a 'galaxy.datatypes.interval:Bed' datatype with extension 'bed'
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Convert BED to GFF', 'galaxy.datatypes.interval:Bed', 'bed', 'Valid tools', 'Datatypes' ]
+        strings_not_displayed = [ 'Invalid tools' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0015_verify_tool_panel_display( self ):
+        '''Verify that the tool is configured not to be displayed in the tool panel.'''
+        '''
+        We are at step 3
+        Datatype converters that are associated with a datatype should have display in tool panel = False in the tool metadata.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        tool_metadata_strings_displayed = '<label>Display in tool panel:</label>\n                    False'
+        self.check_repository_tools_for_changeset_revision( repository,
+                                                            self.get_repository_tip( repository ),
+                                                            tool_metadata_strings_displayed=tool_metadata_strings_displayed )
diff --git a/test/shed_functional/functional/test_0140_tool_help_images.py b/test/shed_functional/functional/test_0140_tool_help_images.py
new file mode 100644
index 0000000..36786d6
--- /dev/null
+++ b/test/shed_functional/functional/test_0140_tool_help_images.py
@@ -0,0 +1,83 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger(__name__)
+
+repository_name = 'htseq_count_0140'
+repository_description = "Converter: BED to GFF"
+repository_long_description = "Convert bed to gff"
+
+category_name = 'Test 0140 Tool Help Images'
+category_description = 'Test 0140 Tool Help Images'
+
+'''
+1) Create and populate the htseq_count_0140 repository.
+2) Visit the manage_repository page.
+3) Simulate clicking the htseq_count tool button.
+4) On the resulting display tool page, look for the image string similar to the following string where the encoded repository_id is previously determined:
+
+src="/repository/static/images/<id>/count_modes.png"
+'''
+
+
+class TestToolHelpImages( ShedTwillTestCase ):
+    '''Test features related to tool help images.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_htseq_count_repository( self ):
+        '''Create and populate htseq_count_0140.'''
+        '''
+        We are at step 1 - Create and populate the htseq_count_0140 repository.
+        Create the htseq_count_0140 repository and upload the tarball.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named htseq_count_0140 owned by user1.
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload htseq_count.tar to the repository.
+        self.upload_file( repository,
+                          filename='htseq_count/htseq_count.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded htseq_count.tar.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_load_tool_page( self ):
+        '''Load the tool page and check for the image.'''
+        '''
+        We are at step 2
+        Visit the manage_repository page and the tool page, and look for the image url
+        similar to the following string:
+
+        src="/repository/static/images/<id>/count_modes.png"
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        # Get the repository tip.
+        changeset_revision = self.get_repository_tip( repository )
+        self.display_manage_repository_page( repository )
+        # Generate the image path.
+        image_path = 'src="/repository/static/images/%s/count_modes.png"' % self.security.encode_id( repository.id )
+        # The repository uploaded in this test should only have one metadata revision, with one tool defined, which
+        # should be the tool that contains a link to the image.
+        repository_metadata = repository.metadata_revisions[ 0 ].metadata
+        tool_path = repository_metadata[ 'tools' ][ 0 ][ 'tool_config' ]
+        self.load_display_tool_page( repository, tool_path, changeset_revision, strings_displayed=[ image_path ], strings_not_displayed=[] )
diff --git a/test/shed_functional/functional/test_0150_prior_installation_required.py b/test/shed_functional/functional/test_0150_prior_installation_required.py
new file mode 100644
index 0000000..ffd0cdd
--- /dev/null
+++ b/test/shed_functional/functional/test_0150_prior_installation_required.py
@@ -0,0 +1,104 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+column_repository_name = 'column_maker_0150'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0150'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0150 Simple Prior Installation'
+category_description = 'Test 0150 Simple Prior Installation'
+
+'''
+Create column_maker and convert_chars.
+
+Column maker repository dependency:
+<repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+
+Verify display.
+
+Galaxy side:
+
+Install column_maker.
+Verify that convert_chars was installed first, contrary to the ordering that would be present without prior_installation_required.
+'''
+
+
+class TestSimplePriorInstallation( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_convert_repository( self ):
+        '''Create and populate convert_chars_0150.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='convert_chars/convert_chars.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded convert_chars tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate convert_chars_0150.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column_maker tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_repository_dependency( self ):
+        '''Create a repository dependency specifying convert_chars.'''
+        '''
+        Column maker repository dependency:
+            <repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="<tip>" prior_installation_required="True" />
+        '''
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0150', additional_paths=[ 'column' ] )
+        convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+        self.create_repository_dependency( repository=column_repository,
+                                           repository_tuples=[ convert_tuple ],
+                                           filepath=dependency_xml_path,
+                                           prior_installation_required=True )
+
+    def test_0020_verify_repository_dependency( self ):
+        '''Verify that the previously generated repositiory dependency displays correctly.'''
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( repository=column_repository,
+                                          depends_on_repository=convert_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
diff --git a/test/shed_functional/functional/test_0160_circular_prior_installation_required.py b/test/shed_functional/functional/test_0160_circular_prior_installation_required.py
new file mode 100644
index 0000000..c3fa3f4
--- /dev/null
+++ b/test/shed_functional/functional/test_0160_circular_prior_installation_required.py
@@ -0,0 +1,156 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+filter_repository_name = 'filtering_0160'
+filter_repository_description = "Galaxy's filtering tool for test 0160"
+filter_repository_long_description = "Long description of Galaxy's filtering tool for test 0160"
+
+column_repository_name = 'column_maker_0160'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0160'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0160 Simple Prior Installation'
+category_description = 'Test 0160 Simple Prior Installation'
+
+'''
+Create column_maker and convert_chars.
+
+Column maker repository dependency:
+<repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+
+Verify display.
+'''
+
+
+class TestSimplePriorInstallation( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_convert_repository( self ):
+        '''Create and populate convert_chars_0160.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='convert_chars/convert_chars.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded convert_chars tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate convert_chars_0160.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column_maker tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_filtering_repository( self ):
+        '''Create and populate filtering_0160.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=filter_repository_name,
+                                                    description=filter_repository_description,
+                                                    long_description=filter_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_repository_dependency( self ):
+        '''Create a repository dependency specifying convert_chars.'''
+        '''
+        Each of the three repositories should depend on the other two, to make this as circular as possible.
+        '''
+        filter_repository = self.test_db_util.get_repository_by_name_and_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0160', additional_paths=[ 'column' ] )
+        filter_revision = self.get_repository_tip( filter_repository )
+        column_revision = self.get_repository_tip( column_repository )
+        convert_revision = self.get_repository_tip( convert_repository )
+        column_tuple = ( self.url, column_repository.name, column_repository.user.username, column_revision )
+        convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, convert_revision )
+        filter_tuple = ( self.url, filter_repository.name, filter_repository.user.username, filter_revision )
+        self.create_repository_dependency( repository=column_repository,
+                                           repository_tuples=[ convert_tuple, filter_tuple],
+                                           filepath=dependency_xml_path,
+                                           prior_installation_required=False )
+        self.create_repository_dependency( repository=convert_repository,
+                                           repository_tuples=[ column_tuple, filter_tuple ],
+                                           filepath=dependency_xml_path,
+                                           prior_installation_required=False )
+        self.create_repository_dependency( repository=filter_repository,
+                                           repository_tuples=[ convert_tuple, column_tuple ],
+                                           filepath=dependency_xml_path,
+                                           prior_installation_required=True )
+
+    def test_0025_verify_repository_dependency( self ):
+        '''Verify that the previously generated repositiory dependency displays correctly.'''
+        filter_repository = self.test_db_util.get_repository_by_name_and_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( repository=column_repository,
+                                          depends_on_repository=convert_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=column_repository,
+                                          depends_on_repository=filter_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=convert_repository,
+                                          depends_on_repository=column_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=convert_repository,
+                                          depends_on_repository=filter_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=filter_repository,
+                                          depends_on_repository=column_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=filter_repository,
+                                          depends_on_repository=convert_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
diff --git a/test/shed_functional/functional/test_0170_complex_prior_installation_required.py b/test/shed_functional/functional/test_0170_complex_prior_installation_required.py
new file mode 100644
index 0000000..828e364
--- /dev/null
+++ b/test/shed_functional/functional/test_0170_complex_prior_installation_required.py
@@ -0,0 +1,132 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+matplotlib_repository_name = 'package_matplotlib_1_2_0170'
+matplotlib_repository_description = "Contains a tool dependency definition that downloads and compiles version 1.2.x of the the python matplotlib package."
+matplotlib_repository_long_description = "This repository is intended to be defined as a complex repository dependency within a separate repository."
+
+numpy_repository_name = 'package_numpy_1_7_0170'
+numpy_repository_description = "Contains a tool dependency definition that downloads and compiles version 1.7 of the the python numpy package."
+numpy_repository_long_description = "This repository is intended to be defined as a complex repository dependency within a separate repository."
+
+category_name = 'Test 0170 Prior Installation Complex Dependencies'
+category_description = 'Test 0170 Prior Installation Complex Dependencies'
+
+'''
+1. Create and populate repositories package_matplotlib_1_2_0170 and package_numpy_1_7_0170.
+2. Create a complex repository dependency on package_numpy_1_7_0170, and upload this to package_matplotlib_1_2_0170.
+3. Verify that package_matplotlib_1_2_0170 now depends on package_numpy_1_7_0170, and that the inherited tool dependency displays correctly.
+'''
+
+
+class TestComplexPriorInstallation( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_matplotlib_repository( self ):
+        '''Create and populate the package_matplotlib_1_2_0170 repository.'''
+        '''
+        This is step 1 - Create and populate repositories package_matplotlib_1_2_0170 and package_numpy_1_7_0170.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=matplotlib_repository_name,
+                                                    description=matplotlib_repository_description,
+                                                    long_description=matplotlib_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='package_matplotlib/package_matplotlib_1_2.tar',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded matplotlib tool dependency tarball.',
+                          strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_numpy_repository( self ):
+        '''Create and populate the package_numpy_1_7_0170 repository.'''
+        '''
+        This is step 1 - Create and populate repositories package_matplotlib_1_2_0170 and package_numpy_1_7_0170.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=numpy_repository_name,
+                                                    description=numpy_repository_description,
+                                                    long_description=numpy_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='package_numpy/package_numpy_1_7.tar',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded numpy tool dependency tarball.',
+                          strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_complex_repository_dependency( self ):
+        '''Create a dependency on package_numpy_1_7_0170.'''
+        '''
+        This is step 2 - Create a complex repository dependency on package_numpy_1_7_0170, and upload this to package_matplotlib_1_2_0170.
+        package_matplotlib_1_2_0170 should depend on package_numpy_1_7_0170, with prior_installation_required
+        set to True. When matplotlib is selected for installation, the result should be that numpy is compiled
+        and installed first.
+        '''
+        numpy_repository = self.test_db_util.get_repository_by_name_and_owner( numpy_repository_name, common.test_user_1_name )
+        matplotlib_repository = self.test_db_util.get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name )
+        # Generate the new dependency XML. Normally, the create_repository_dependency method would be used for this, but
+        # it replaces any existing tool or repository dependency XML file with the generated contents. This is undesirable
+        # in this case, because matplotlib already has an additional tool dependency definition that we don't want to
+        # overwrite.
+        new_xml = '    <package name="numpy" version="1.7">\n'
+        new_xml += '        <repository toolshed="%s" name="%s" owner="%s" changeset_revision="%s" prior_installation_required="True" />\n'
+        new_xml += '    </package>\n'
+        url = self.url
+        name = numpy_repository.name
+        owner = numpy_repository.user.username
+        changeset_revision = self.get_repository_tip( numpy_repository )
+        processed_xml = new_xml % ( url, name, owner, changeset_revision )
+        original_xml = open( self.get_filename( 'package_matplotlib/tool_dependencies.xml' ), 'r' ).read()
+        dependency_xml_path = self.generate_temp_path( 'test_0170', additional_paths=[ 'matplotlib' ] )
+        new_xml_file = os.path.join( dependency_xml_path, 'tool_dependencies.xml' )
+        open( new_xml_file, 'w' ).write( original_xml.replace( '<!--NUMPY-->', processed_xml ) )
+        # Upload the generated complex repository dependency XML to the matplotlib repository.
+        self.upload_file( matplotlib_repository,
+                          filename='tool_dependencies.xml',
+                          filepath=dependency_xml_path,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded complex repository dependency on numpy 1.7.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_verify_generated_dependency( self ):
+        '''Verify that matplotlib now has a package tool dependency and a complex repository dependency.'''
+        '''
+        This is step 3 - Verify that package_matplotlib_1_2_0170 now depends on package_numpy_1_7_0170, and that the inherited tool
+                         dependency displays correctly.
+        'Inhherited' in this case means that matplotlib should show a package tool dependency on numpy version 1.7, and a repository
+        dependency on the latest revision of package_numpy_1_7_0170.
+        '''
+        numpy_repository = self.test_db_util.get_repository_by_name_and_owner( numpy_repository_name, common.test_user_1_name )
+        matplotlib_repository = self.test_db_util.get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name )
+        changeset_revision = self.get_repository_tip( numpy_repository )
+        self.check_repository_dependency( matplotlib_repository, depends_on_repository=numpy_repository )
+        self.display_manage_repository_page( matplotlib_repository, strings_displayed=[ 'numpy', '1.7', 'package', changeset_revision ] )
diff --git a/test/shed_functional/functional/test_0300_reset_all_metadata.py b/test/shed_functional/functional/test_0300_reset_all_metadata.py
new file mode 100644
index 0000000..a8a8e10
--- /dev/null
+++ b/test/shed_functional/functional/test_0300_reset_all_metadata.py
@@ -0,0 +1,603 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+workflow_filename = 'Workflow_for_0060_filter_workflow_repository.ga'
+workflow_name = 'Workflow for 0060_filter_workflow_repository'
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0050'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+
+emboss_repository_name = 'emboss_0050'
+emboss_5_repository_name = 'emboss_5_0050'
+emboss_6_repository_name = 'emboss_6_0050'
+
+filtering_repository_name = 'filtering_0050'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0050'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+column_repository_name = 'column_maker_0050'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0050'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+bismark_repository_name = 'bismark_0050'
+bismark_repository_description = "A flexible aligner."
+bismark_repository_long_description = "A flexible aligner and methylation caller for Bisulfite-Seq applications."
+
+category_0050_name = 'Test 0050 Circular Dependencies 5 Levels'
+category_0050_description = 'Test circular dependency features'
+
+running_standalone = False
+
+
+class TestResetAllRepositoryMetadata( ShedTwillTestCase ):
+    '''Verify that the "Reset selected metadata" feature works.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_filtering_repository( self ):
+        '''Create and populate the filtering_0000 repository.'''
+        global running_standalone
+        self.login( email=common.admin_email, username=common.admin_username )
+        category_0000 = self.create_category( name='Test 0000 Basic Repository Features 1', description='Test 0000 Basic Repository Features 1' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name='filtering_0000',
+                                                    description="Galaxy's filtering tool",
+                                                    long_description="Long description of Galaxy's filtering tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category_0000.id ) )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_freebayes_repository( self ):
+        '''Create and populate the freebayes_0010 repository.'''
+        global running_standalone
+        self.login( email=common.admin_email, username=common.admin_username )
+        category_0010 = self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name='freebayes_0010',
+                                                    description="Galaxy's freebayes tool",
+                                                    long_description="Long description of Galaxy's freebayes tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category_0010.id ),
+                                                    strings_displayed=[] )
+        if running_standalone:
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/tool_data_table_conf.xml.sample',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool_data_table_conf.xml.sample',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/sam_fa_indices.loc.sample',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded sam_fa_indices.loc.sample',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/tool_dependencies.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool_dependencies.xml',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_datatypes_0020_repository( self ):
+        '''Create and populate the emboss_datatypes_0020 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0020 = self.create_category( name='Test 0020 Basic Repository Dependencies', description='Testing basic repository dependency features.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name='emboss_datatypes_0020',
+                                                        description=datatypes_repository_description,
+                                                        long_description=datatypes_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category_0020.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_create_emboss_0020_repository( self ):
+        '''Create and populate the emboss_0020 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0020 = self.create_category( name='Test 0020 Basic Repository Dependencies', description='Testing basic repository dependency features.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name='emboss_0020',
+                                                        description=emboss_repository_long_description,
+                                                        long_description=emboss_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category_0020.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0025_create_emboss_datatypes_0030_repository( self ):
+        '''Create and populate the emboss_0030 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0030 = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Testing repository dependencies by revision.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            datatypes_repository = self.get_or_create_repository( name='emboss_datatypes_0030',
+                                                                  description=datatypes_repository_description,
+                                                                  long_description=datatypes_repository_long_description,
+                                                                  owner=common.test_user_1_name,
+                                                                  category_id=self.security.encode_id( category_0030.id ),
+                                                                  strings_displayed=[] )
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0030_create_emboss_5_repository( self ):
+        '''Create and populate the emboss_5_0030 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0030 = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Testing repository dependencies by revision.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            emboss_5_repository = self.get_or_create_repository( name='emboss_5_0030',
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category_0030.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_5_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0035_create_emboss_6_repository( self ):
+        '''Create and populate the emboss_6_0030 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0030 = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Testing repository dependencies by revision.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            emboss_6_repository = self.get_or_create_repository( name='emboss_6_0030',
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category_0030.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_6_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0040_create_emboss_0030_repository( self ):
+        '''Create and populate the emboss_0030 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0030 = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Testing repository dependencies by revision.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            emboss_repository = self.get_or_create_repository( name='emboss_0030',
+                                                               description=emboss_repository_description,
+                                                               long_description=emboss_repository_long_description,
+                                                               owner=common.test_user_1_name,
+                                                               category_id=self.security.encode_id( category_0030.id ),
+                                                               strings_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0045_create_repository_dependencies_for_0030( self ):
+        '''Create the dependency structure for test 0030.'''
+        global running_standalone
+        if running_standalone:
+            datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_datatypes_0030', common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_0030', common.test_user_1_name )
+            emboss_5_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_5_0030', common.test_user_1_name )
+            emboss_6_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_6_0030', common.test_user_1_name )
+            repository_dependencies_path = self.generate_temp_path( 'test_0330', additional_paths=[ 'emboss' ] )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            emboss_5_tuple = ( self.url, emboss_5_repository.name, emboss_5_repository.user.username, self.get_repository_tip( emboss_5_repository ) )
+            emboss_6_tuple = ( self.url, emboss_6_repository.name, emboss_6_repository.user.username, self.get_repository_tip( emboss_6_repository ) )
+            self.create_repository_dependency( repository=emboss_5_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+            self.create_repository_dependency( repository=emboss_6_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ emboss_5_tuple ], filepath=repository_dependencies_path )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ emboss_6_tuple ], filepath=repository_dependencies_path )
+
+    def test_0050_create_freebayes_repository( self ):
+        '''Create and populate the freebayes_0040 repository.'''
+        global running_standalone
+        self.login( email=common.admin_email, username=common.admin_username )
+        category_0040 = self.create_category( name='test_0040_repository_circular_dependencies', description='Testing handling of circular repository dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name='freebayes_0040',
+                                                    description="Galaxy's freebayes tool",
+                                                    long_description="Long description of Galaxy's freebayes tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category_0040.id ),
+                                                    strings_displayed=[] )
+        if running_standalone:
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0055_create_filtering_repository( self ):
+        '''Create and populate the filtering_0040 repository.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category_0040 = self.create_category( name='test_0040_repository_circular_dependencies', description='Testing handling of circular repository dependencies.' )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name='filtering_0040',
+                                                        description="Galaxy's filtering tool",
+                                                        long_description="Long description of Galaxy's filtering tool",
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category_0040.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0060_create_dependency_structure( self ):
+        '''Create the dependency structure for test 0040.'''
+        global running_standalone
+        if running_standalone:
+            freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( 'freebayes_0040', common.test_user_1_name )
+            filtering_repository = self.test_db_util.get_repository_by_name_and_owner( 'filtering_0040', common.test_user_1_name )
+            repository_dependencies_path = self.generate_temp_path( 'test_0340', additional_paths=[ 'dependencies' ] )
+            freebayes_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+            filtering_tuple = ( self.url, filtering_repository.name, filtering_repository.user.username, self.get_repository_tip( filtering_repository ) )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ freebayes_tuple ], filepath=repository_dependencies_path )
+            self.create_repository_dependency( repository=freebayes_repository, repository_tuples=[ filtering_tuple ], filepath=repository_dependencies_path )
+
+    def test_0065_create_convert_repository( self ):
+        '''Create and populate convert_chars_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=convert_repository_name,
+                                                        description=convert_repository_description,
+                                                        long_description=convert_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0070_create_column_repository( self ):
+        '''Create and populate convert_chars_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=column_repository_name,
+                                                        description=column_repository_description,
+                                                        long_description=column_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0075_create_emboss_datatypes_repository( self ):
+        '''Create and populate emboss_datatypes_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+                                                        description=datatypes_repository_description,
+                                                        long_description=datatypes_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0080_create_emboss_repository( self ):
+        '''Create and populate emboss_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                        description=emboss_repository_description,
+                                                        long_description=emboss_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0085_create_filtering_repository( self ):
+        '''Create and populate filtering_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            filtering_repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                                  description=filtering_repository_description,
+                                                                  long_description=filtering_repository_long_description,
+                                                                  owner=common.test_user_1_name,
+                                                                  category_id=self.security.encode_id( category.id ),
+                                                                  strings_displayed=[] )
+            self.upload_file( filtering_repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0090_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                        description=freebayes_repository_description,
+                                                        long_description=freebayes_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0095_create_bismark_repository( self ):
+        '''Create and populate bismark_0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.admin_email, username=common.admin_username )
+            category = self.create_category( name=category_0050_name, description=category_0050_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=bismark_repository_name,
+                                                        description=bismark_repository_description,
+                                                        long_description=bismark_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='bismark/bismark.tar',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded bismark tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0100_create_and_upload_dependency_definitions( self ):
+        '''Create the dependency structure for test 0050.'''
+        global running_standalone
+        if running_standalone:
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+            convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+            datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+            freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+            bismark_repository = self.test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
+            dependency_xml_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+            # convert_chars depends on column_maker
+            # column_maker depends on convert_chars
+            # emboss depends on emboss_datatypes
+            # emboss_datatypes depends on bismark
+            # freebayes depends on freebayes, emboss, emboss_datatypes, and column_maker
+            # filtering depends on emboss
+            column_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+            convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+            freebayes_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+            emboss_tuple = ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            bismark_tuple = ( self.url, bismark_repository.name, bismark_repository.user.username, self.get_repository_tip( bismark_repository ) )
+            self.create_repository_dependency( repository=convert_repository, repository_tuples=[ column_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=column_repository, repository_tuples=[ convert_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=datatypes_repository, repository_tuples=[ bismark_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=freebayes_repository,
+                                               repository_tuples=[ freebayes_tuple, datatypes_tuple, emboss_tuple, column_tuple ],
+                                               filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ emboss_tuple ], filepath=dependency_xml_path )
+
+    def test_0105_create_filtering_repository( self ):
+        '''Create and populate the filtering_0060 repository.'''
+        self.login( email=common.admin_email, username=common.admin_username )
+        category_0060 = self.create_category( name='Test 0060 Workflow Features', description='Test 0060 - Workflow Features' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        workflow_repository = self.get_or_create_repository( name='filtering_0060',
+                                                             description="Galaxy's filtering tool",
+                                                             long_description="Long description of Galaxy's filtering tool",
+                                                             owner=common.test_user_1_name,
+                                                             category_id=self.security.encode_id( category_0060.id ),
+                                                             strings_displayed=[] )
+        if self.repository_is_new( workflow_repository ):
+            workflow = open( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+            workflow = workflow.replace(  '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+            workflow_filepath = self.generate_temp_path( 'test_0360', additional_paths=[ 'filtering_workflow' ] )
+            if not os.path.exists( workflow_filepath ):
+                os.makedirs( workflow_filepath )
+            open( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+            self.upload_file( workflow_repository,
+                              filename=workflow_filename,
+                              filepath=workflow_filepath,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering workflow.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( workflow_repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0110_reset_metadata_on_all_repositories( self ):
+        '''Reset metadata on all repositories, then verify that it has not changed.'''
+        self.login( email=common.admin_email, username=common.admin_username )
+        old_metadata = dict()
+        new_metadata = dict()
+        repositories = self.test_db_util.get_all_repositories()
+        for repository in repositories:
+            old_metadata[ self.security.encode_id( repository.id ) ] = dict()
+            for metadata in self.get_repository_metadata( repository ):
+                old_metadata[ self.security.encode_id( repository.id ) ][ metadata.changeset_revision ] = metadata.metadata
+        self.reset_metadata_on_selected_repositories( list(old_metadata.keys()) )
+        for repository in repositories:
+            new_metadata[ self.security.encode_id( repository.id ) ] = dict()
+            for metadata in self.get_repository_metadata( repository ):
+                new_metadata[ self.security.encode_id( repository.id ) ][ metadata.changeset_revision ] = metadata.metadata
+            if old_metadata[ self.security.encode_id( repository.id ) ] != new_metadata[ self.security.encode_id( repository.id ) ]:
+                raise AssertionError( 'Metadata changed after reset for repository %s.' % repository.name )
diff --git a/test/shed_functional/functional/test_0310_hg_api_features.py b/test/shed_functional/functional/test_0310_hg_api_features.py
new file mode 100644
index 0000000..2937b60
--- /dev/null
+++ b/test/shed_functional/functional/test_0310_hg_api_features.py
@@ -0,0 +1,94 @@
+import logging
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+repository_name = 'filtering_0310'
+repository_description = "Galaxy's filtering tool for test 0310"
+repository_long_description = "Long description of Galaxy's filtering tool for test 0310"
+
+category_name = 'Test 0310 - HTTP Repo features'
+category_description = 'Test 0310 for verifying the tool shed http interface to mercurial.'
+
+'''
+1. Create a repository.
+2. Clone the repository to a local path.
+'''
+
+
+class TestHgWebFeatures( ShedTwillTestCase ):
+    '''Test http mercurial interface.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_filtering_repository( self ):
+        '''Create and populate the filtering_0310 repository.'''
+        '''
+        We are at step 1 - Create a repository.
+        Create and populate the filtering_0310 repository.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message="Uploaded filtering 1.1.0.",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_test_data.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded filtering test data.",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_clone( self ):
+        '''Clone the repository to a local path.'''
+        '''
+        We are at step 2 - Clone the repository to a local path.
+        The repository should have the following files:
+
+        filtering.py
+        filtering.xml
+        test-data/
+        test-data/1.bed
+        test-data/7.bed
+        test-data/filter1_in3.sam
+        test-data/filter1_inbad.bed
+        test-data/filter1_test1.bed
+        test-data/filter1_test2.bed
+        test-data/filter1_test3.sam
+        test-data/filter1_test4.bed
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        clone_path = self.generate_temp_path( 'test_0310', additional_paths=[ 'filtering_0310', 'user2' ] )
+        self.clone_repository( repository, clone_path )
+        files_in_repository = os.listdir( clone_path )
+        assert 'filtering.py' in files_in_repository, 'File not found in repository: filtering.py'
diff --git a/test/shed_functional/functional/test_0400_repository_component_reviews.py b/test/shed_functional/functional/test_0400_repository_component_reviews.py
new file mode 100644
index 0000000..960ac5d
--- /dev/null
+++ b/test/shed_functional/functional/test_0400_repository_component_reviews.py
@@ -0,0 +1,578 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'filtering_0400'
+repository_description = 'Galaxy filtering tool for test 0400'
+repository_long_description = 'Long description of Galaxy filtering tool for test 0400'
+
+'''
+1. Create users.
+2. Grant reviewer role to test_user_2.
+3. Check that the review components that are to be tested are defined in this tool shed instance.
+4. Create a repository, owned by test_user_1, to be reviewed by test_user_2.
+5. Review the datatypes component on the repository.
+6. Check that no other components besides datatypes display as reviewed.
+7. Review the functional tests component on the repository.
+8. Check that only functional tests and datatypes display as reviewed.
+9. Review the readme component on the repository.
+10. Check that only functional tests, datatypes, and readme display as reviewed.
+11. Review the repository dependencies component.
+12. Check that only repository dependencies, functional tests, datatypes, and readme display as reviewed.
+13. Review the tool dependencies component.
+14. Check that only tool dependencies, repository dependencies, functional tests, datatypes, and readme display as reviewed.
+15. Review the tools component.
+16. Check that only tools, tool dependencies, repository dependencies, functional tests, datatypes, and readme display as reviewed.
+17. Review the workflows component.
+18. Check that all components display as reviewed.
+19. Upload readme.txt to the repository.
+20. Copy the previous review, and update the readme component review to reflect the existence of a readme file.
+21. Check that the readme component review has been updated, and the other component reviews are present.
+22. Upload test data to the repository. This will also create a new changeset revision.
+23. Review the functional tests component on the repository, copying the other components from the previous review.
+24. Verify that the functional tests component review has been updated, and as in step 21, the other reviews are unchanged.
+25. Upload a new version of the tool.
+26. Review the new revision's functional tests component.
+27. Verify that the functional tests component review displays correctly.
+'''
+
+
+class TestRepositoryComponentReviews( ShedTwillTestCase ):
+    '''Test repository component review features.'''
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        We are at step 1.
+        Create all the user accounts that are needed for this test script to run independently of other test.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_grant_reviewer_role( self ):
+        '''Grant the repository reviewer role to test_user_2.'''
+        """
+        We are at step 2.
+        We now have an admin user (admin_user) and two non-admin users (test_user_1 and test_user_2). Grant the repository
+        reviewer role to test_user_2, who will not be the owner of the reviewed repositories.
+        """
+        reviewer_role = self.test_db_util.get_role_by_name( 'Repository Reviewer' )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        self.grant_role_to_user( test_user_2, reviewer_role )
+
+    def test_0010_verify_repository_review_components( self ):
+        '''Ensure that the required review components exist.'''
+        """
+        We are at step 3.
+        We now have an admin user (admin_user) and two non-admin users (test_user_1 and test_user_2). Grant the repository
+        reviewer role to test_user_2, who will not be the owner of the reviewed repositories.
+        """
+        strings_not_displayed = [ 'Repository dependencies' ]
+        self.manage_review_components( strings_not_displayed=strings_not_displayed )
+        self.add_repository_review_component( name='Repository dependencies',
+                                              description='Repository dependencies defined in a file named repository_dependencies.xml included in the repository' )
+        strings_displayed = [ 'Data types', 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+        self.manage_review_components( strings_displayed=strings_displayed )
+
+    def test_0015_create_repository( self ):
+        """Create and populate the filtering repository"""
+        """
+        We are at step 4.
+        Log in as test_user_1 and create the filtering repository, then upload a basic set of
+        components to be reviewed in subsequent tests.
+        """
+        category = self.create_category( name='Test 0400 Repository Component Reviews', description='Test 0400 Repository Component Reviews' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(repository_name)
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_review_initial_revision_data_types( self ):
+        '''Review the datatypes component for the current tip revision.'''
+        """
+        We are at step 5.
+        Log in as test_user_2 and review the data types component of the filtering repository owned by test_user_1.
+        # Review this revision:
+        #    Data types (N/A)
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Data types': dict() }
+        self.create_repository_review( repository, review_contents_dict )
+
+    def test_0025_verify_datatype_review( self ):
+        '''Verify that the datatypes component review displays correctly.'''
+        """
+        We are at step 6.
+        Log in as test_user_1 and check that the filtering repository only has a review for the data types component.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Data types', 'not_applicable' ]
+        strings_not_displayed = [ 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0030_review_initial_revision_functional_tests( self ):
+        '''Review the functional tests component for the current tip revision.'''
+        """
+        We are at step 7.
+        Log in as test_user_2 and review the functional tests component for this repository. Since the repository
+        has not been altered, this will update the existing review to add a component.
+        # Review this revision:
+        #    Data types (N/A)
+        #    Functional tests (One star, comment 'functional tests missing')
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Functional tests': dict( rating=1, comment='Functional tests missing', approved='no', private='yes' ) }
+        self.review_repository( repository, review_contents_dict, user )
+
+#    def test_0030_verify_review_display( self ):
+#        '''Verify that private reviews are restricted to owner and reviewer, and non-private views are viewable by others.'''
+#        # Currently not implemented because third parties cannot view reviews whether they are private or not.
+#        self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+
+    def test_0035_verify_functional_test_review( self ):
+        '''Verify that the functional tests component review displays correctly.'''
+        """
+        We are at step 8.
+        Log in as test_user_1 and check that the filtering repository now has reviews
+        for the data types and functional tests components. Since the functional tests component was not marked as 'Not applicable',
+        also check for the review comment.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Functional tests', 'Functional tests missing', 'no' ]
+        strings_not_displayed = [ 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0040_review_readme( self ):
+        '''Review the readme component for the current tip revision.'''
+        """
+        We are at step 9.
+        Log in as test_user_2 and update the review with the readme component marked as 'Not applicable'.
+        # Review this revision:
+        #    Data types (N/A)
+        #    Functional tests (One star, comment 'functional tests missing')
+        #    README (N/A)
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'README': dict() }
+        self.review_repository( repository, review_contents_dict, user )
+
+    def test_0045_verify_readme_review( self ):
+        '''Verify that the readme component review displays correctly.'''
+        """
+        We are at step 10.
+        Log in as test_user_1 and verify that the repository component reviews now include a review for the readme component.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'README', 'not_applicable' ]
+        strings_not_displayed = [ 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0050_review_repository_dependencies( self ):
+        '''Review the repository dependencies component for the current tip revision.'''
+        """
+        We are at step 11.
+        Log in as test_user_2 and update the review with the repository dependencies component marked as 'Not applicable'.
+        # Review this revision:
+        #    Data types (N/A)
+        #    Functional tests (One star, comment 'functional tests missing')
+        #    README (N/A)
+        #    Repository dependencies (N/A)
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Repository dependencies': dict() }
+        self.review_repository( repository, review_contents_dict, user )
+
+    def test_0055_verify_repository_dependency_review( self ):
+        '''Verify that the repository dependencies component review displays correctly.'''
+        """
+        We are at step 12.
+        Log in as test_user_1 and verify that the repository component reviews now include a review
+        for the repository dependencies component.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Repository dependencies', 'not_applicable' ]
+        strings_not_displayed = [ 'Tool dependencies', 'Tools', 'Workflows' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0060_review_tool_dependencies( self ):
+        '''Review the tool dependencies component for the current tip revision.'''
+        """
+        We are at step 13.
+        Log in as test_user_2 and update the review with the tool dependencies component marked as 'Not applicable'.
+        # Review this revision:
+        #    Data types (N/A)
+        #    Functional tests (One star, comment 'functional tests missing')
+        #    README (N/A)
+        #    Repository dependencies (N/A)
+        #    Tool dependencies (N/A)
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Tool dependencies': dict() }
+        self.review_repository( repository, review_contents_dict, user )
+
+    def test_0065_verify_tool_dependency_review( self ):
+        '''Verify that the tool dependencies component review displays correctly.'''
+        """
+        We are at step 14.
+        Log in as test_user_1 and verify that the repository component reviews now include a review
+        for the tool dependencies component.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Tool dependencies', 'not_applicable' ]
+        strings_not_displayed = [ 'Tools', 'Workflows' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0070_review_tools( self ):
+        '''Review the tools component for the current tip revision.'''
+        """
+        We are at step 15.
+        Log in as test_user_2 and update the review with the tools component given
+        a favorable review, with 5 stars, and approved status.
+        # Review this revision:
+        #    Data types (N/A)
+        #    Functional tests (One star, comment 'functional tests missing')
+        #    README (N/A)
+        #    Repository dependencies (N/A)
+        #    Tool dependencies (N/A)
+        #    Tools (5 stars, good review)
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Tools': dict( rating=5, comment='Excellent tool, easy to use.', approved='yes', private='no' ) }
+        self.review_repository( repository, review_contents_dict, user )
+
+    def test_0075_verify_tools_review( self ):
+        '''Verify that the tools component review displays correctly.'''
+        """
+        We are at step 16.
+        Log in as test_user_1 and verify that the repository component reviews now include a review
+        for the tools component. As before, check for the presence of the comment on this review.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Tools', 'yes', 'Excellent tool, easy to use.' ]
+        strings_not_displayed = [ 'Workflows' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0080_review_workflows( self ):
+        '''Review the workflows component for the current tip revision.'''
+        """
+        We are at step 17.
+        Log in as test_user_2 and update the review with the workflows component marked as 'Not applicable'.
+        # Review this revision:
+        #    Data types (N/A)
+        #    Functional tests (One star, comment 'functional tests missing')
+        #    README (N/A)
+        #    Repository dependencies (N/A)
+        #    Tool dependencies (N/A)
+        #    Tools (5 stars, good review)
+        #    Workflows (N/A)
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Workflows': dict() }
+        self.review_repository( repository, review_contents_dict, user )
+
+    def test_0085_verify_workflows_review( self ):
+        '''Verify that the workflows component review displays correctly.'''
+        """
+        We are at step 18.
+        Log in as test_user_1 and verify that the repository component reviews now include a review
+        for the workflows component.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Workflows', 'not_applicable' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+
+    def test_0090_upload_readme_file( self ):
+        '''Upload a readme file to the filtering repository.'''
+        """
+        We are at step 19.
+        Log in as test_user_1, the repository owner, and upload readme.txt to the repository. This will create
+        a new changeset revision for this repository, which will need to be reviewed.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded readme.txt.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0095_review_new_changeset_readme_component( self ):
+        '''Update the filtering repository's readme component review to reflect the presence of the readme file.'''
+        """
+        We are at step 20.
+        There is now a new changeset revision in the repository's changelog, but it has no review associated with it.
+        Get the previously reviewed changeset hash, and pass that and the review id to the create_repository_review
+        method, in order to copy the previous review's contents. Then update the new review to reflect the presence of
+        a readme file.
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # Get the last changeset revision that has a review associated with it.
+        last_review = self.get_last_reviewed_revision_by_user( user, repository )
+        if last_review is None:
+            raise AssertionError( 'Previous review expected, none found.' )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'README': dict( rating=5, comment='Clear and concise readme file, a true pleasure to read.', approved='yes', private='no' ) }
+        self.create_repository_review( repository,
+                                       review_contents_dict,
+                                       changeset_revision=self.get_repository_tip( repository ),
+                                       copy_from=( str( last_review.changeset_revision ), last_review.id ) )
+
+    def test_0100_verify_readme_review( self ):
+        '''Verify that the readme component review displays correctly.'''
+        """
+        We are at step 21.
+        Log in as the repository owner (test_user_1) and check the repository component reviews to
+        verify that the readme component is now reviewed and approved.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'README', 'yes', 'Clear and concise readme file, a true pleasure to read.' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+
+    def test_0105_upload_test_data( self ):
+        '''Upload the missing test data to the filtering repository.'''
+        """
+        We are at step 22.
+        Remain logged in as test_user_1 and upload test data to the repository. This will also create a
+        new changeset revision that needs to be reviewed. This will replace the changeset hash associated with
+        the last dowloadable revision, but the last repository review will still be associated with the
+        last dowloadable revision hash.
+        """
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_test_data.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering test data.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0110_review_new_changeset_functional_tests( self ):
+        '''Update the filtering repository's readme component review to reflect the presence of the readme file.'''
+        """
+        We are at step 23.
+        Log in as test_user_2 and get the last reviewed changeset hash, and pass that and the review id to
+        the create_repository_review method, then update the copied review to approve the functional tests
+        component.
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        # Get the changeset immediately prior to the tip, and pass it to the create review method.
+        last_review = self.get_last_reviewed_revision_by_user( user, repository )
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Functional tests': dict( rating=5, comment='A good set of functional tests.', approved='yes', private='no' ) }
+        self.create_repository_review( repository,
+                                       review_contents_dict,
+                                       changeset_revision=self.get_repository_tip( repository ),
+                                       copy_from=( str( last_review.changeset_revision ), last_review.id ) )
+
+    def test_0115_verify_functional_tests_review( self ):
+        '''Verify that the functional tests component review displays correctly.'''
+        """
+        We are at step 24.
+        Log in as the repository owner, test_user_1, and verify that the new revision's functional tests component
+        review has been updated with an approved status and favorable comment.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Functional tests', 'yes', 'A good set of functional tests.' ]
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+
+    def test_0120_upload_new_tool_version( self ):
+        '''Upload filtering 2.2.0 to the filtering repository.'''
+        """
+        We are at step 25.
+        Log in as test_user_1 and upload a new version of the tool to the filtering repository. This will create
+        a new downloadable revision, with no associated repository component reviews.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/filtering_2.2.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 2.2.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0125_review_new_changeset_functional_tests( self ):
+        '''Update the filtering repository's review to apply to the new changeset with filtering 2.2.0.'''
+        """
+        We are at step 26.
+        Log in as test_user_2 and copy the last review for this repository to the new changeset. Then
+        update the tools component review to refer to the new tool version.
+        """
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        last_review = self.get_last_reviewed_revision_by_user( user, repository )
+        # Something needs to change so that the review will save.
+        # The create_repository_review method takes a dict( component label=review contents ).
+        # If review_contents is empty, it marks that component as not applicable. The review
+        # contents dict should have the structure:
+        # {
+        #   rating: 1-5,
+        #   comment: <text>
+        #   approved: yes/no
+        #   private: yes/no
+        # }
+        review_contents_dict = { 'Tools': dict( rating=5, comment='Version 2.2.0 does the impossible and improves this tool.', approved='yes', private='yes' ) }
+        self.create_repository_review( repository,
+                                       review_contents_dict,
+                                       changeset_revision=self.get_repository_tip( repository ),
+                                       copy_from=( str( last_review.changeset_revision ), last_review.id ) )
+
+    def test_0135_verify_review_for_new_version( self ):
+        '''Verify that the reviews display correctly for this changeset revision.'''
+        """
+        We are at step 27.
+        Log in as test_user_1 and check that the tools component review is for filtering 2.2.0, but that the other component
+        reviews had their contents copied from the last reviewed changeset.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Data types', 'Functional tests', 'yes', 'A good set of functional tests.', 'README', 'yes', 'Workflows', 'Tools' ]
+        strings_displayed.extend( [ 'Clear and concise readme file, a true pleasure to read.', 'Tool dependencies', 'not_applicable' ] )
+        strings_displayed.extend( [ 'Repository dependencies', 'Version 2.2.0 does the impossible and improves this tool.'  ] )
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_0410_repository_component_review_access_control.py b/test/shed_functional/functional/test_0410_repository_component_review_access_control.py
new file mode 100644
index 0000000..be60d53
--- /dev/null
+++ b/test/shed_functional/functional/test_0410_repository_component_review_access_control.py
@@ -0,0 +1,189 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'filtering_0410'
+repository_description = 'Galaxy filtering tool for test 0410'
+repository_long_description = 'Long description of Galaxy filtering tool for test 0410'
+
+'''
+1. Create a repository in the tool shed owned by test_user_1.
+2. Have test_user_2 complete a review of the repository.
+3. Have test_user_1 browse the review.
+4. Have test_user_3 browse the repository and make sure they are not allowed to browse the review.
+5. Have test_user_1 give write permission on the repository to the test_user_3.
+6. Have test_user_3 browse the repository again and they should now have the ability to browse the review.
+7. Have test_user_3 browse the review.
+'''
+
+
+class TestRepositoryComponentReviews( ShedTwillTestCase ):
+    '''Test repository component review features.'''
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other test.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+        test_user_3 = self.test_db_util.get_user( common.test_user_3_email )
+        assert test_user_3 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_3_email
+        self.test_db_util.get_private_role( test_user_3 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_grant_reviewer_role( self ):
+        '''Grant the repository reviewer role to test_user_2.'''
+        """
+        We now have an admin user (admin_user) and three non-admin users (test_user_1, test_user_2, and test_user_3). Grant the repository
+        reviewer role to test_user_2, who will not be the owner of the reviewed repositories, and do not grant any roles to test_user_3 yet.
+        """
+        reviewer_role = self.test_db_util.get_role_by_name( 'Repository Reviewer' )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        self.grant_role_to_user( test_user_2, reviewer_role )
+
+    def test_0010_verify_repository_review_components( self ):
+        '''Ensure that the required review components exist.'''
+        """
+        Make sure all the components we are to review are recorded in the database.
+        """
+        self.add_repository_review_component( name='Repository dependencies',
+                                              description='Repository dependencies defined in a file named repository_dependencies.xml included in the repository' )
+        strings_displayed = [ 'Data types', 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+        self.manage_review_components( strings_displayed=strings_displayed )
+
+    def test_0015_create_repository( self ):
+        """Create and populate the filtering repository"""
+        """
+        We are at step 1.
+        Log in as test_user_1 and create the filtering repository, then upload a basic set of
+        components to be reviewed in subsequent tests.
+        """
+        category = self.create_category( name='Test 0400 Repository Component Reviews', description='Test 0400 Repository Component Reviews' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(repository_name)
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_test_data.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering test data.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.upload_file( repository,
+                          filename='readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded readme.txt.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_review_repository( self ):
+        '''Complete a review of the filtering repository.'''
+        '''
+        We are at step 2 - Have test_user_2 complete a review of the repository.
+        Review all components of the filtering repository, with the appropriate contents and approved/not approved/not applicable status.
+        '''
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        review_contents_dict = { 'Data types': dict(),
+                                 'README': dict( rating=5, comment='Clear and concise readme file, a true pleasure to read.', approved='yes', private='no' ),
+                                 'Functional tests': dict( rating=5, comment='A good set of functional tests.', approved='yes', private='no' ),
+                                 'Repository dependencies': dict(),
+                                 'Tool dependencies': dict(),
+                                 'Tools': dict( rating=5, comment='Excellent tool, easy to use.', approved='yes', private='no' ),
+                                 'Workflows': dict() }
+        self.create_repository_review( repository, review_contents_dict )
+
+    def test_0025_verify_repository_review( self ):
+        '''Verify that the review was completed and displays properly.'''
+        '''
+        We are at step 3 - Have test_user_1 browse the review.
+        Verify that all the review components were submitted, and that the repository owner can see the review.
+        '''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'Data types', 'Functional tests', 'yes', 'A good set of functional tests.', 'README', 'yes', 'Workflows', 'Tools' ]
+        strings_displayed.extend( [ 'Clear and concise readme file, a true pleasure to read.', 'Tool dependencies', 'not_applicable' ] )
+        strings_displayed.extend( [ 'Repository dependencies', 'Excellent tool, easy to use.'  ] )
+        strings_displayed = [ 'Browse reviews of this repository' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+
+    def test_0030_browse_with_other_user( self ):
+        '''Verify that test_user_3 is blocked from browsing the review.'''
+        '''
+        We are at step 4 - Have test_user_3 browse the repository and make sure they are not allowed to browse the review.
+        '''
+        self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_not_displayed = [ 'Browse reviews of this repository' ]
+        self.display_manage_repository_page( repository, strings_not_displayed=strings_not_displayed )
+        strings_not_displayed = [ 'A good set of functional tests.', 'Clear and concise readme file, a true pleasure to read.' ]
+        strings_not_displayed.append( 'Excellent tool, easy to use.' )
+        changeset_revision = self.get_repository_tip( repository )
+        review = self.test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
+        self.browse_component_review( review, strings_not_displayed=strings_not_displayed )
+
+    def test_0035_grant_write_access_to_other_user( self ):
+        '''Grant write access on the filtering_0410 repository to test_user_3.'''
+        '''
+        We are at step 5 - Have test_user_1 give write permission on the repository to the test_user_3.
+        '''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.grant_write_access( repository, usernames=[ common.test_user_3_name ] )
+
+    def test_0040_verify_test_user_3_can_browse_reviews( self ):
+        '''Check that test_user_3 can now browse reviews.'''
+        '''
+        We are at step 6 - Have test_user_3 browse the repository again and they should now have the ability to browse the review.
+        '''
+        self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Browse reviews of this repository' ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+
+    def test_0045_verify_browse_review_with_write_access( self ):
+        '''Check that test_user_3 can now display reviews.'''
+        '''
+        We are at step 7 - Have test_user_3 browse the review.
+        '''
+        self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        user = self.test_db_util.get_user( common.test_user_2_email )
+        strings_displayed = [ 'A good set of functional tests.',
+                              'Clear and concise readme file',
+                              'a true pleasure to read.',
+                              'Excellent tool, easy to use.' ]
+        changeset_revision = self.get_repository_tip( repository )
+        review = self.test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
+        self.browse_component_review( review, strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_0420_citable_urls_for_repositories.py b/test/shed_functional/functional/test_0420_citable_urls_for_repositories.py
new file mode 100644
index 0000000..5ec8440
--- /dev/null
+++ b/test/shed_functional/functional/test_0420_citable_urls_for_repositories.py
@@ -0,0 +1,229 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger(__name__)
+
+repository_name = 'filtering_0420'
+repository_description = 'Galaxy filtering tool for test 0420'
+repository_long_description = 'Long description of Galaxy filtering tool for test 0410'
+
+first_changeset_hash = ''
+
+'''
+1. Add and populate a repository to the tool shed with change set revision 0 (assume owner is test).
+2. Add valid change set revision 1.
+3. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1
+4. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420
+    Resulting page should contain change set revision 1
+5. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420/<revision 0>
+    Resulting page should not contain change set revision 1, but should contain change set revision 0.
+6. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420/<invalid revision>
+7. Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/<invalid repository name>
+8. Visit the following url and check for appropriate strings: <tool shed base url>/view/<invalid owner>
+'''
+
+
+class TestRepositoryCitableURLs( ShedTwillTestCase ):
+    '''Test repository citable url features.'''
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other tests.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_repository( self ):
+        """Create and populate the filtering_0420 repository"""
+        """
+        We are at step 1.
+        Add and populate a repository to the tool shed with change set revision 0 (assume owner is test_user_1).
+        """
+        global first_changeset_hash
+        category = self.create_category( name='Test 0400 Repository Citable URLs',
+                                         description='Test 0400 Repository Citable URLs category' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(repository_name)
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='filtering/filtering_2.2.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 2.2.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        # We'll be checking for this hash later, after uploading another file to the repository, making get_repository_tip() not usable.
+        first_changeset_hash = self.get_repository_tip( repository )
+
+    def test_0010_upload_new_file_to_repository( self ):
+        '''Upload a readme file to the repository in order to create a second changeset revision.'''
+        '''
+        We are at step 2.
+        Add valid change set revision 1.
+        The repository should now contain two changeset revisions, 0:<revision hash> and 1:<revision hash>.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded readme.txt.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_load_user_view_page( self ):
+        '''Load the /view/<username> page amd check for strings.'''
+        '''
+        We are at step 3.
+        Visit the following url and check for appropriate strings: <tool shed base url>/view/user1
+        '''
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        encoded_user_id = self.security.encode_id( test_user_1.id )
+        # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+        # then directly load the url that the iframe should be loading and check for the expected strings.
+        # The iframe should point to /repository/browse_repositories?user_id=<encoded user ID>&operation=repositories_by_user
+        strings_displayed = [ '/repository/browse_repositories', encoded_user_id, 'operation=repositories_by_user' ]
+        strings_displayed.append( encoded_user_id )
+        strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420' ]
+        self.load_citable_url( username='user1',
+                               repository_name=None,
+                               changeset_revision=None,
+                               encoded_user_id=encoded_user_id,
+                               encoded_repository_id=None,
+                               strings_displayed=strings_displayed,
+                               strings_displayed_in_iframe=strings_displayed_in_iframe )
+
+    def test_0020_load_repository_view_page( self ):
+        '''Load the /view/<user>/<repository> page and check for the appropriate strings.'''
+        '''
+        We are at step 4.
+        Visit the following url and check for strings: <tool shed base url>/view/user1/filtering_0420
+            Resulting page should contain change set revision 1
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        encoded_user_id = self.security.encode_id( test_user_1.id )
+        encoded_repository_id = self.security.encode_id( repository.id )
+        # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+        # then directly load the url that the iframe should be loading and check for the expected strings.
+        # The iframe should point to /repository/bview_repository?id=<encoded repository ID>
+        strings_displayed = [ '/repository', 'view_repository', 'id=', encoded_repository_id ]
+        strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420' ]
+        strings_displayed_in_iframe.append( self.get_repository_tip( repository ) )
+        strings_displayed_in_iframe.append( 'Link to this repository:' )
+        strings_displayed_in_iframe.append( '%s/view/user1/filtering_0420' % self.url )
+        self.load_citable_url( username='user1',
+                               repository_name='filtering_0420',
+                               changeset_revision=None,
+                               encoded_user_id=encoded_user_id,
+                               encoded_repository_id=encoded_repository_id,
+                               strings_displayed=strings_displayed,
+                               strings_displayed_in_iframe=strings_displayed_in_iframe )
+
+    def test_0025_load_view_page_for_previous_revision( self ):
+        '''Load a citable url for a past changeset revision and verify that strings display.'''
+        '''
+        We are at step 5.
+        Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/filtering_0420/<revision 0>
+            Resulting page should not contain change set revision 1, but should contain change set revision 0.
+        '''
+        global first_changeset_hash
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        encoded_user_id = self.security.encode_id( test_user_1.id )
+        encoded_repository_id = self.security.encode_id( repository.id )
+        # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+        # then directly load the url that the iframe should be loading and check for the expected strings.
+        # The iframe should point to /repository/view_repository?id=<encoded repository ID>
+        strings_displayed = [ '/repository', 'view_repository', 'id=' + encoded_repository_id ]
+        strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420', first_changeset_hash ]
+        strings_displayed_in_iframe.append( 'Link to this repository revision:' )
+        strings_displayed_in_iframe.append( '%s/view/user1/filtering_0420/%s' % ( self.url, first_changeset_hash ) )
+        strings_not_displayed_in_iframe = []
+        self.load_citable_url( username='user1',
+                               repository_name='filtering_0420',
+                               changeset_revision=first_changeset_hash,
+                               encoded_user_id=encoded_user_id,
+                               encoded_repository_id=encoded_repository_id,
+                               strings_displayed=strings_displayed,
+                               strings_displayed_in_iframe=strings_displayed_in_iframe,
+                               strings_not_displayed_in_iframe=strings_not_displayed_in_iframe )
+
+    def test_0030_load_sharable_url_with_invalid_changeset_revision( self ):
+        '''Load a citable url with an invalid changeset revision specified.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        encoded_user_id = self.security.encode_id( test_user_1.id )
+        encoded_repository_id = self.security.encode_id( repository.id )
+        invalid_changeset_hash = 'invalid'
+        # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+        # then directly load the url that the iframe should be loading and check for the expected strings.
+        # The iframe should point to /repository/view_repository?id=<encoded repository ID>&status=error
+        strings_displayed = [ '/repository', 'view_repository', 'id=' + encoded_repository_id ]
+        strings_displayed.extend( [ 'The+change+log', 'does+not+include+revision', invalid_changeset_hash, 'status=error' ] )
+        strings_displayed_in_iframe = [ 'user1', 'filtering_0420', 'Galaxy filtering tool for test 0420' ]
+        strings_displayed_in_iframe.append( 'Link to this repository revision:' )
+        strings_displayed_in_iframe.append( '%s/view/user1/filtering_0420/%s' % ( self.url, invalid_changeset_hash ) )
+        strings_not_displayed_in_iframe = []
+        self.load_citable_url( username='user1',
+                               repository_name='filtering_0420',
+                               changeset_revision=invalid_changeset_hash,
+                               encoded_user_id=encoded_user_id,
+                               encoded_repository_id=encoded_repository_id,
+                               strings_displayed=strings_displayed,
+                               strings_displayed_in_iframe=strings_displayed_in_iframe,
+                               strings_not_displayed_in_iframe=strings_not_displayed_in_iframe )
+
+    def test_0035_load_sharable_url_with_invalid_repository_name( self ):
+        '''Load a citable url with an invalid changeset revision specified.'''
+        '''
+        We are at step 7
+        Visit the following url and check for appropriate strings: <tool shed base url>/view/user1/!!invalid!!
+        '''
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        encoded_user_id = self.security.encode_id( test_user_1.id )
+        # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly,
+        # then directly load the url that the iframe should be loading and check for the expected strings.
+        # The iframe should point to /repository/browse_repositories?user_id=<encoded user ID>&operation=repositories_by_user
+        strings_displayed = [ '/repository', 'browse_repositories', 'user1' ]
+        strings_displayed.extend( [ 'list+of+repositories+owned', 'does+not+include+one+named', '%21%21invalid%21%21', 'status=error' ] )
+        strings_displayed_in_iframe = [ 'user1', 'filtering_0420' ]
+        strings_displayed_in_iframe.append( 'Repositories Owned by user1' )
+        self.load_citable_url( username='user1',
+                               repository_name='!!invalid!!',
+                               changeset_revision=None,
+                               encoded_user_id=encoded_user_id,
+                               encoded_repository_id=None,
+                               strings_displayed=strings_displayed,
+                               strings_displayed_in_iframe=strings_displayed_in_iframe )
+
+    def test_0040_load_sharable_url_with_invalid_owner( self ):
+        '''Load a citable url with an invalid owner.'''
+        '''
+        We are at step 8.
+        Visit the following url and check for appropriate strings: <tool shed base url>/view/!!invalid!!
+        '''
+        strings_displayed = [ 'The tool shed', self.url, 'contains no repositories owned by', '!!invalid!!' ]
+        self.load_citable_url( username='!!invalid!!',
+                               repository_name=None,
+                               changeset_revision=None,
+                               encoded_user_id=None,
+                               encoded_repository_id=None,
+                               strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_0430_browse_utilities.py b/test/shed_functional/functional/test_0430_browse_utilities.py
new file mode 100644
index 0000000..68672fb
--- /dev/null
+++ b/test/shed_functional/functional/test_0430_browse_utilities.py
@@ -0,0 +1,176 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+datatypes_repository_name = 'emboss_datatypes_0430'
+datatypes_repository_description = 'Galaxy applicable datatypes for EMBOSS for test 0430'
+datatypes_repository_long_description = 'Long description of Galaxy applicable datatypes for EMBOSS for test 0430'
+
+emboss_repository_name = 'emboss_0430'
+emboss_repository_description = 'EMBOSS tools for test 0430'
+emboss_repository_long_description = 'Long description of EMBOSS tools for test 0430'
+
+freebayes_repository_name = 'freebayes_0430'
+freebayes_repository_description = 'Freebayes tool for test 0430'
+freebayes_repository_long_description = 'Long description of Freebayes tool for test 0430'
+'''
+1. Create and populate repositories.
+2. Browse Custom Datatypes.
+3. Browse Tools.
+4. Browse Repository Dependencies.
+5. Browse Tool Dependencies.
+'''
+
+
+class TestToolShedBrowseUtilities( ShedTwillTestCase ):
+    '''Test browsing for Galaxy utilities.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other tests.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_datatypes_repository( self ):
+        """Create and populate the emboss_datatypes_0430 repository"""
+        """
+        We are at step 1.
+        Create and populate the repository that will contain one or more datatypes.
+        """
+        category = self.create_category( name='Test 0430 Galaxy Utilities',
+                                         description='Description of Test 0430 Galaxy Utilities category' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(datatypes_repository_name)
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                    description=datatypes_repository_description,
+                                                    long_description=datatypes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded datatypes_conf.xml.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_emboss_repository( self ):
+        """Create and populate the emboss_0430 repository"""
+        """
+        We are at step 1.
+        Create the emboss_0430 repository, and populate it with tools.
+        """
+        category = self.create_category( name='Test 0430 Galaxy Utilities',
+                                         description='Description of Test 0430 Galaxy Utilities category' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(emboss_repository_name)
+        emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( emboss_repository,
+                          filename='emboss/emboss.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded emboss.tar.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_dependency_on_datatypes_repository( self ):
+        '''Create a dependency definition file that specifies emboss_datatypes_0430 and upload it to emboss_0430.'''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0430', additional_paths=[ 'dependencies' ] )
+        datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+        self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+        self.check_repository_dependency( emboss_repository, datatypes_repository )
+
+    def test_0020_create_tool_dependency_repository( self ):
+        """Create and populate the freebayes_0430 repository"""
+        """
+        We are at step 1.
+        Create and populate the repository that will have a tool dependency defined.
+        """
+        category = self.create_category( name='Test 0430 Galaxy Utilities',
+                                         description='Description of Test 0430 Galaxy Utilities category' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(freebayes_repository_name)
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='freebayes/freebayes.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded freebayes.tar.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_browse_custom_datatypes( self ):
+        '''Load the page to browse custom datatypes.'''
+        '''
+        We are at step 2.
+        Verify that the uploaded emboss datatypes repository has added to the custom datatypes page.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        changeset_revision = self.get_repository_tip( repository )
+        strings_displayed = [ 'equicktandem', 'est2genome', 'supermatcher', 'galaxy.datatypes.data:Text', changeset_revision, 'user1', 'emboss_datatypes_0430' ]
+        self.browse_custom_datatypes( strings_displayed=strings_displayed )
+
+    def test_0030_browse_tools( self ):
+        '''Load the page to browse tools.'''
+        '''
+        We are at step 3.
+        Verify the existence of emboss tools in the browse tools page.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        changeset_revision = self.get_repository_tip( repository )
+        strings_displayed = [ 'EMBOSS', 'antigenic1', '5.0.0', changeset_revision, 'user1', 'emboss_0430' ]
+        self.browse_tools( strings_displayed=strings_displayed )
+
+    def test_0035_browse_repository_dependencies( self ):
+        '''Browse repository dependencies and look for a dependency on emboss_datatypes_0430.'''
+        '''
+        We are at step 3.
+        Verify that the browse repository dependencies page shows emboss_datatypes_0430 as a dependency of emboss_0430.
+        '''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        datatypes_changeset_revision = self.get_repository_tip( datatypes_repository )
+        emboss_changeset_revision = self.get_repository_tip( emboss_repository )
+        strings_displayed = [ datatypes_changeset_revision, emboss_changeset_revision, 'emboss_datatypes_0430', 'user1', 'emboss_0430' ]
+        self.browse_repository_dependencies( strings_displayed=strings_displayed )
+
+    def test_0040_browse_tool_dependencies( self ):
+        '''Browse tool dependencies and look for the right versions of freebayes and samtools.'''
+        '''
+        We are at step 4.
+        Verify that the browse tool dependencies page shows the correct dependencies defined for freebayes_0430.
+        '''
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        freebayes_changeset_revision = self.get_repository_tip( freebayes_repository )
+        strings_displayed = [ freebayes_changeset_revision, 'freebayes_0430', 'user1', '0.9.4_9696d0ce8a96', 'freebayes', 'samtools', '0.1.18' ]
+        self.browse_tool_dependencies( strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_0440_deleting_dependency_definitions.py b/test/shed_functional/functional/test_0440_deleting_dependency_definitions.py
new file mode 100644
index 0000000..7f379c4
--- /dev/null
+++ b/test/shed_functional/functional/test_0440_deleting_dependency_definitions.py
@@ -0,0 +1,357 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+column_repository_name = 'column_maker_0440'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0440'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+bwa_package_repository_name = 'bwa_package_0440'
+bwa_package_repository_description = "BWA Package Repository"
+bwa_package_repository_long_description = "BWA repository with a package tool dependency defined for BWA 0.5.9."
+
+bwa_base_repository_name = 'bwa_base_0440'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "NT space mapping with BWA"
+
+bwa_tool_dependency_repository_name = 'bwa_tool_dependency_0440'
+bwa_tool_dependency_repository_description = "BWA Base"
+bwa_tool_dependency_repository_long_description = "NT space mapping with BWA"
+
+'''
+Simple repository dependencies:
+1. Create and populate column_maker_0440 so that it has an installable revision 0.
+2. Create and populate convert_chars_0440 so that it has an installable revision 0.
+3. Add a valid simple repository_dependencies.xml to convert_chars_0440 that points to the installable revision of column_maker_0440.
+4. Make sure the installable revision of convert_chars_0440 is now revision 1 instead of revision 0.
+5. Delete repository_dependencies.xml from convert_chars_0440, and make sure convert_chars_0440 now has two installable revisions: 1 and 2
+
+Complex repository dependencies:
+1. Create and populate bwa_package_0440 so that it has a valid tool dependency definition and an installable revision 0.
+2. Create and populate bwa_base_0440 so that it has an installable revision 0.
+3. Add a valid complex repository dependency tool_dependencies.xml to bwa_base_0440 that points to the installable revision 0 of bwa_package_0440.
+4. Make sure that bwa_base_0440 installable revision is now revision 1 instead of revision 0.
+5. Delete tool_dependencies.xml from bwa_base_0440, and make sure bwa_base_0440 now has two installable revisions: 1 and 2
+
+Tool dependencies:
+1. Create and populate bwa_tool_dependency_0440 so that it has a valid tool dependency definition and an installable revision 0.
+2. Delete tool_dependencies.xml from bwa_tool_dependency_0440, and make sure that bwa_tool_dependency_0440 still has
+   a single installable revision 0.
+3. Add the same tool_dependencies.xml file to bwa_tool_dependency_0440, and make sure that bwa_tool_dependency_0440
+   still has a single installable revision 0.
+'''
+
+
+class TestDeletedDependencies( ShedTwillTestCase ):
+    '''Test metadata setting when dependency definitions are deleted.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other tests.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_column_maker_repository( self ):
+        '''Create and populate a repository named column_maker_0440.'''
+        '''
+        We are at simple repository dependencies, step 1 - Create and populate column_maker_0440 so that it has an installable revision 0.
+        '''
+        category = self.create_category( name='Test 0440 Deleted Dependency Definitions',
+                                         description='Description of Deleted Dependency Definitions category for test 0440' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = [ "Repository <b>column_maker_0440</b> has been created"  ]
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column maker tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_convert_chars_repository( self ):
+        '''Create and populate a repository named convert_chars_0440.'''
+        '''
+        We are at simple repository dependencies, step 2 - Create and populate convert_chars_0440 so that it has an installable revision 0.
+        '''
+        category = self.test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = [ "Repository <b>convert_chars_0440</b> has been created"  ]
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='convert_chars/convert_chars.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded convert chars tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_dependency_on_convert_chars( self ):
+        '''Create a dependency definition file that specifies column_maker_0440 and upload it to convert_chars_0440.'''
+        '''
+        We are at simple repository dependencies, step 3 - Add a valid simple repository_dependencies.xml to
+        convert_chars_0440 that points to the installable revision of column_maker_0440.
+        '''
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        dependency_xml_path = self.generate_temp_path( 'test_0440', additional_paths=[ 'dependencies' ] )
+        column_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+        # After this, convert_chars_0440 should depend on column_maker_0440.
+        self.create_repository_dependency( repository=convert_repository,
+                                           repository_tuples=[ column_tuple ],
+                                           filepath=dependency_xml_path,
+                                           prior_installation_required=True )
+        self.check_repository_dependency( convert_repository, column_repository )
+
+    def test_0020_verify_dependency_metadata( self ):
+        '''Verify that uploading the dependency moved metadata to the tip.'''
+        '''
+        We are at simple repository dependencies, step 4 - Make sure the installable revision of convert_chars_0440 is now
+        revision 1 (the tip) instead of revision 0.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        tip = self.get_repository_tip( repository )
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, tip )
+        # Make sure that the new tip is now downloadable, and that there are no other downloadable revisions.
+        assert metadata_record.downloadable, 'Tip is not downloadable.'
+        assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+            ( repository.name, len( repository.downloadable_revisions ) )
+
+    def test_0025_delete_repository_dependency( self ):
+        '''Delete the repository_dependencies.xml from convert_chars_0440.'''
+        '''
+        We are at simple repository dependencies, steps 5 and 6 - Delete repository_dependencies.xml from convert_chars_0440.
+        Make sure convert_chars_0440 now has two installable revisions: 1 and 2
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        # Record the current tip, so we can verify that it's still a downloadable revision after repository_dependencies.xml
+        # is deleted and a new downloadable revision is created.
+        old_changeset_revision = self.get_repository_tip( repository )
+        self.delete_files_from_repository( repository, filenames=[ 'repository_dependencies.xml' ] )
+        new_changeset_revision = self.get_repository_tip( repository )
+        # Check that the old changeset revision is still downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+        assert metadata_record.downloadable, 'The revision of %s that contains repository_dependencies.xml is no longer downloadable.' % \
+            repository.name
+        # Check that the new tip is also downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+        assert metadata_record.downloadable, 'The revision of %s that does not contain repository_dependencies.xml is not downloadable.' % \
+            repository.name
+        # Explicitly reload the repository instance from the database, to avoid potential caching issues.
+        self.test_db_util.refresh( repository )
+        # Verify that there are only two downloadable revisions.
+        assert len( repository.downloadable_revisions ) == 2, 'Repository %s has %d downloadable revisions, expected 2.' % \
+            ( repository.name, len( repository.downloadable_revisions ) )
+
+    def test_0030_create_bwa_package_repository( self ):
+        '''Create and populate the bwa_package_0440 repository.'''
+        '''
+        We are at complex repository dependencies, step 1 - Create and populate bwa_package_0440 so that it has a valid
+        tool dependency definition and an installable revision 0.
+        '''
+        category = self.test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = [ "Repository <b>bwa_package_0440</b> has been created"  ]
+        repository = self.get_or_create_repository( name=bwa_package_repository_name,
+                                                    description=bwa_package_repository_description,
+                                                    long_description=bwa_package_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='bwa/complex/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded package tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0035_create_bwa_base_repository( self ):
+        '''Create and populate the bwa_base_0440 repository.'''
+        '''
+        We are at complex repository dependencies, step 2 - Create and populate bwa_base_0440 so that it has an installable revision 0.
+        This repository should contain a tool with a defined dependency that will be satisfied by the tool dependency defined in bwa_package_0440.
+        '''
+        category = self.test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = [ "Repository <b>bwa_base_0440</b> has been created"  ]
+        repository = self.get_or_create_repository( name=bwa_base_repository_name,
+                                                    description=bwa_base_repository_description,
+                                                    long_description=bwa_base_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='bwa/complex/bwa_base.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded BWA nucleotide space mapping tool tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0040_create_dependency_on_bwa_package_repository( self ):
+        '''Create a complex repository dependency on bwa_package_0440 and upload it to bwa_tool_0440.'''
+        '''
+        We are at complex repository dependencies, step 3 - Add a valid complex repository dependency tool_dependencies.xml to
+        bwa_base_0440 that points to the installable revision 0 of bwa_package_0440.
+        '''
+        bwa_package_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        bwa_base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        dependency_path = self.generate_temp_path( 'test_0440', additional_paths=[ 'complex' ] )
+        changeset_revision = self.get_repository_tip( bwa_package_repository )
+        bwa_tuple = ( self.url, bwa_package_repository.name, bwa_package_repository.user.username, changeset_revision )
+        self.create_repository_dependency( repository=bwa_base_repository,
+                                           repository_tuples=[ bwa_tuple ],
+                                           filepath=dependency_path,
+                                           prior_installation_required=True,
+                                           complex=True,
+                                           package='bwa',
+                                           version='0.5.9' )
+
+    def test_0045_verify_dependency_metadata( self ):
+        '''Verify that uploading the dependency moved metadata to the tip.'''
+        '''
+        We are at complex repository dependencies, step 4 - Make sure that bwa_base_0440 installable revision is now revision 1
+        instead of revision 0.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        tip = self.get_repository_tip( repository )
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, tip )
+        # Make sure that the new tip is now downloadable, and that there are no other downloadable revisions.
+        assert metadata_record.downloadable, 'Tip is not downloadable.'
+        assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+            ( repository.name, len( repository.downloadable_revisions ) )
+
+    def test_0050_delete_complex_repository_dependency( self ):
+        '''Delete the tool_dependencies.xml from bwa_base_0440.'''
+        '''
+        We are at complex repository dependencies, step 5 - Delete tool_dependencies.xml from bwa_base_0440,
+        and make sure bwa_base_0440 now has two installable revisions: 1 and 2
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        # Record the current tip, so we can verify that it's still a downloadable revision after tool_dependencies.xml
+        # is deleted and a new downloadable revision is created.
+        old_changeset_revision = self.get_repository_tip( repository )
+        self.delete_files_from_repository( repository, filenames=[ 'tool_dependencies.xml' ] )
+        new_changeset_revision = self.get_repository_tip( repository )
+        # Check that the old changeset revision is still downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+        assert metadata_record.downloadable, 'The revision of %s that contains tool_dependencies.xml is no longer downloadable.' % \
+            repository.name
+        # Check that the new tip is also downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+        assert metadata_record.downloadable, 'The revision of %s that does not contain tool_dependencies.xml is not downloadable.' % \
+            repository.name
+        # Verify that there are only two downloadable revisions.
+        assert len( repository.downloadable_revisions ) == 2, 'Repository %s has %d downloadable revisions, expected 2.' % \
+            ( repository.name, len( repository.downloadable_revisions ) )
+
+    def test_0055_create_bwa_tool_dependency_repository( self ):
+        '''Create and populate the bwa_tool_dependency_0440 repository.'''
+        '''
+        We are at tool dependencies, step 1 - Create and populate bwa_tool_dependency_0440 so that it has a valid tool
+        dependency definition and an installable revision 0.
+        '''
+        category = self.test_db_util.get_category_by_name( 'Test 0440 Deleted Dependency Definitions' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = [ "Repository <b>bwa_tool_dependency_0440</b> has been created"  ]
+        repository = self.get_or_create_repository( name=bwa_tool_dependency_repository_name,
+                                                    description=bwa_tool_dependency_repository_description,
+                                                    long_description=bwa_tool_dependency_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        self.upload_file( repository,
+                          filename='bwa/complex/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded package tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0060_delete_bwa_tool_dependency_definition( self ):
+        '''Delete the tool_dependencies.xml file from bwa_tool_dependency_0440.'''
+        '''
+        We are at tool dependencies, step 2 - Delete tool_dependencies.xml from bwa_tool_dependency_0440.
+        Make sure bwa_tool_dependency_0440 still has a downloadable changeset revision at the old tip.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( bwa_tool_dependency_repository_name, common.test_user_1_name )
+        # Record the current tip, so we can verify that it's still a downloadable revision after repository_dependencies.xml
+        # is deleted and a new downloadable revision is created.
+        old_changeset_revision = self.get_repository_tip( repository )
+        self.delete_files_from_repository( repository, filenames=[ 'tool_dependencies.xml' ] )
+        new_changeset_revision = self.get_repository_tip( repository )
+        # Check that the old changeset revision is still downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+        assert metadata_record.downloadable, 'The revision of %s that contains tool_dependencies.xml is no longer downloadable.' % \
+            repository.name
+        # Check that the new tip does not have a metadata revision.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+        # If a changeset revision does not have metadata, the above method will return None.
+        assert metadata_record is None, 'The tip revision of %s should not have metadata, but metadata was found.' % repository.name
+        # Verify that the new changeset revision is not downloadable.
+        assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+            ( repository.name, len( repository.downloadable_revisions ) )
+
+    def test_0065_reupload_bwa_tool_dependency_definition( self ):
+        '''Reupload the tool_dependencies.xml file to bwa_tool_dependency_0440.'''
+        '''
+        We are at tool dependencies, step 3 - Add the same tool_dependencies.xml file to bwa_tool_dependency_0440, and make sure
+        that bwa_tool_dependency_0440 still has a single installable revision 0.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( bwa_tool_dependency_repository_name, common.test_user_1_name )
+        # Record the current tip, so we can verify that it's still not a downloadable revision after tool_dependencies.xml
+        # is re-uploaded and a new downloadable revision is created.
+        old_changeset_revision = self.get_repository_tip( repository )
+        self.upload_file( repository,
+                          filename='bwa/complex/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded package tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        new_changeset_revision = self.get_repository_tip( repository )
+        # Check that the old changeset revision is still downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, old_changeset_revision )
+        assert metadata_record is None, 'The revision of %s that does not contain tool_dependencies.xml should not be downloadable, but is.' % \
+            repository.name
+        # Check that the new tip is also downloadable.
+        metadata_record = self.get_repository_metadata_by_changeset_revision( repository, new_changeset_revision )
+        assert metadata_record.downloadable, 'The revision of %s that contains tool_dependencies.xml is not downloadable.' % \
+            repository.name
+        # Verify that there are only two downloadable revisions.
+        assert len( repository.downloadable_revisions ) == 1, 'Repository %s has %d downloadable revisions, expected 1.' % \
+            ( repository.name, len( repository.downloadable_revisions ) )
diff --git a/test/shed_functional/functional/test_0460_upload_to_repository.py b/test/shed_functional/functional/test_0460_upload_to_repository.py
new file mode 100644
index 0000000..7197378
--- /dev/null
+++ b/test/shed_functional/functional/test_0460_upload_to_repository.py
@@ -0,0 +1,481 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 0460 Automatic repository revision completion'
+category_description = 'Test 0460 Automatic repository revision completion'
+datatypes_repository_name = 'emboss_datatypes_0460'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+bwa_repository_name = 'package_bwa_0_5_9_0460'
+bwa_repository_description = "Contains a tool dependency definition that downloads and compiles version 0.5.9 of the BWA package"
+bwa_repository_long_description = "bwa (alignment via Burrows-Wheeler transformation) 0.5.9-r16 by Heng Li"
+
+'''
+For all steps, verify that the generated dependency points to the tip of the specified repository.
+
+1)  Create and populate emboss_datatypes_0460.
+
+2)  Create and populate package_bwa_0_5_9_0460
+
+3)  Create complex_dependency_test_1_0460, complex_dependency_test_2_0460, complex_dependency_test_3_0460,
+    complex_dependency_test_4_0460, complex_dependency_test_5_0460.
+
+4)  Upload an uncompressed tool_dependencies.xml to complex_dependency_test_1_0460 that specifies a complex
+    repository dependency on package_bwa_0_5_9_0460 without a specified changeset revision or tool shed url.
+
+5)  Upload a tarball to complex_dependency_test_2_0460 with a tool_dependencies.xml in the root of the tarball.
+
+6)  Upload a tarball to complex_dependency_test_3_0460 with a tool_dependencies.xml in a subfolder within the tarball.
+
+7)  Create hg_tool_dependency_0460 and hg_subfolder_tool_dependency_0460 and populate with tool dependencies.
+
+8)  Upload to complex_dependency_test_4_0460 using the url hg://<tool shed url>/repos/user1/hg_tool_dependency_0460.
+
+9)  Upload to complex_dependency_test_5_0460 using the url hg://<tool shed url>/repos/user1/hg_subfolder_tool_dependency_0460.
+
+10) Create repository_dependency_test_1_0460, repository_dependency_test_2_0460, repository_dependency_test_3_0460,
+    repository_dependency_test_4_0460, repository_dependency_test_4_0460.
+
+11) Upload an uncompressed repository_dependencies.xml to repository_dependency_test_1_0460 that specifies a
+    repository dependency on emboss_datatypes_0460 without a specified changeset revision or tool shed url.
+
+12) Upload a tarball to repository_dependency_test_1_0460 with a repository_dependencies.xml in the root of the tarball.
+
+13) Upload a tarball to repository_dependency_test_1_0460 with a repository_dependencies.xml in a subfolder within the tarball.
+
+14) Create hg_repository_dependency_0460 and populate with repository_dependencies.xml.
+
+15) Upload to repository_dependency_test_4_0460 using the url hg://<tool shed url>/repos/user1/hg_repository_dependency_0460.
+
+16) Upload to repository_dependency_test_5_0460 using the url hg://<tool shed url>/repos/user1/hg_repository_dependency_0460.
+'''
+
+
+class TestAutomaticDependencyRevision( ShedTwillTestCase ):
+    '''Test defining repository dependencies without specifying the changeset revision.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_datatypes_repository( self ):
+        '''Create and populate the emboss_datatypes_0460 repository'''
+        '''
+        This is step 1 - Create and populate emboss_datatypes_0460.
+        '''
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                    description=datatypes_repository_description,
+                                                    long_description=datatypes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='emboss/datatypes/datatypes_conf.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate emboss_datatypes_0460 with datatype definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_bwa_package_repository( self ):
+        '''Create and populate the package_bwa_0_5_9_0460 repository.'''
+        '''
+        This is step 2 - Create and populate package_bwa_0_5_9_0460.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=bwa_repository_name,
+                                                    description=bwa_repository_description,
+                                                    long_description=bwa_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='bwa/complex/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_bwa_0_5_9_0460 with a tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_tool_dependency_repositories( self ):
+        '''Create repositories for testing complex dependency generation.'''
+        '''
+        This is step 3 - Create complex_dependency_test_1_0460, complex_dependency_test_2_0460, complex_dependency_test_3_0460,
+        complex_dependency_test_4_0460, complex_dependency_test_5_0460. Each of these repositories will be populated in a way
+        that tests a different way to achieve the same resulting dependency structure using complex tool dependencies.
+        The different methods being tested are:
+        - Upload an uncompressed tool_dependencies.xml to the root of the repository.
+        - Upload a tool_dependencies.xml in a tarball, not in a subfolder.
+        - Upload a tool_dependencies.xml in a subfolder within a tarball.
+        - Upload via url, with the tool_dependencies.xml in the root of another repository.
+        - Upload via url, with the tool_dependencies.xml in a subfolder within another repository.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository_base_name = 'complex_dependency_test_%d_0460'
+        repository_base_description = 'Test #%d for complex repository dependency definitions.'
+        repository_base_long_description = 'Test #%d for complex repository dependency definitions.'
+        for number in range( 1, 6 ):
+            self.get_or_create_repository( name=repository_base_name % number,
+                                           description=repository_base_description % number,
+                                           long_description=repository_base_long_description % number,
+                                           owner=common.test_user_1_name,
+                                           category_id=self.security.encode_id( category.id ),
+                                           strings_displayed=[] )
+
+    def test_0020_populate_complex_dependency_test_1_0460( self ):
+        '''Populate complex_dependency_test_1_0460.'''
+        '''
+        This is step 4 - Upload an uncompressed tool_dependencies.xml to complex_dependency_test_1_0460 that specifies
+        a complex repository dependency on package_bwa_0_5_9_0460 without a specified changeset revision or tool shed url.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'complex_dependency_test_1_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'package_bwa_0_5_9_0460', common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0460_files/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded complex repository dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'package_bwa_0_5_9_0460', 'bwa', '0.5.9', 'package', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository, filename='tool_dependencies.xml', strings_displayed=[ changeset_revision ] )
+
+    def test_0025_populate_complex_dependency_test_2_0460( self ):
+        '''Populate complex_dependency_test_2_0460.'''
+        '''
+        This is step 5 - Upload an tarball with tool_dependencies.xml to complex_dependency_test_2_0460 that specifies
+        a complex repository dependency on package_bwa_0_5_9_0460 without a specified changeset revision or tool shed url.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'complex_dependency_test_2_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'package_bwa_0_5_9_0460', common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0460_files/tool_dependencies_in_root.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message='Uploaded complex repository dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'package_bwa_0_5_9_0460', 'bwa', '0.5.9', 'package', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository, filename='tool_dependencies.xml', strings_displayed=[ changeset_revision ] )
+
+    def test_0030_populate_complex_dependency_test_3_0460( self ):
+        '''Populate complex_dependency_test_3_0460.'''
+        '''
+        This is step 6 - Upload an tarball with tool_dependencies.xml in a subfolder to complex_dependency_test_3_0460 that
+        specifies a complex repository dependency on package_bwa_0_5_9_0460 without a specified changeset revision or tool shed url.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'complex_dependency_test_3_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'package_bwa_0_5_9_0460', common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0460_files/tool_dependencies_in_subfolder.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message='Uploaded complex repository dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'package_bwa_0_5_9_0460', 'bwa', '0.5.9', 'package', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository,
+                                               filename='tool_dependencies.xml',
+                                               filepath='subfolder',
+                                               strings_displayed=[ changeset_revision ] )
+
+    def test_0035_create_repositories_for_url_upload( self ):
+        '''Create and populate hg_tool_dependency_0460 and hg_subfolder_tool_dependency_0460.'''
+        '''
+        This is step 7 - Create hg_tool_dependency_0460 and hg_subfolder_tool_dependency_0460 and populate with tool dependencies.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name='hg_tool_dependency_0460',
+                                                    description=bwa_repository_description,
+                                                    long_description=bwa_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='0460_files/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate hg_tool_dependency_0460 with a tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        repository = self.get_or_create_repository( name='hg_subfolder_tool_dependency_0460',
+                                                    description=bwa_repository_description,
+                                                    long_description=bwa_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='0460_files/tool_dependencies_in_subfolder.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate hg_subfolder_tool_dependency_0460 with a tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0040_url_upload_to_complex_test( self ):
+        '''Populate complex_dependency_test_4_0460.'''
+        '''
+        This is step 8 - Upload to complex_dependency_test_4_0460 using the url hg://<tool shed url>/repos/user1/hg_tool_dependency_0460.
+        '''
+        url = 'hg://%s:%s/repos/user1/hg_tool_dependency_0460' % ( self.host, self.port )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'complex_dependency_test_4_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'package_bwa_0_5_9_0460', common.test_user_1_name )
+        self.upload_url( repository,
+                         url=url,
+                         filepath=None,
+                         valid_tools_only=True,
+                         uncompress_file=False,
+                         remove_repo_files_not_in_tar=True,
+                         commit_message='Uploaded complex repository dependency definition.',
+                         strings_displayed=[],
+                         strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'package_bwa_0_5_9_0460', 'bwa', '0.5.9', 'package', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository,
+                                               filename='tool_dependencies.xml',
+                                               strings_displayed=[ changeset_revision ] )
+
+    def test_0045_url_upload_to_complex_test( self ):
+        '''Populate complex_dependency_test_4_0460.'''
+        '''
+        This is step 9 - Upload to complex_dependency_test_5_0460 using the url hg://<tool shed url>/repos/user1/hg_subfolder_tool_dependency_0460.
+        '''
+        url = 'hg://%s:%s/repos/user1/hg_subfolder_tool_dependency_0460' % ( self.host, self.port )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'complex_dependency_test_5_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'package_bwa_0_5_9_0460', common.test_user_1_name )
+        self.upload_url( repository,
+                         url=url,
+                         filepath=None,
+                         valid_tools_only=True,
+                         uncompress_file=False,
+                         remove_repo_files_not_in_tar=True,
+                         commit_message='Uploaded complex repository dependency definition.',
+                         strings_displayed=[],
+                         strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'package_bwa_0_5_9_0460', 'bwa', '0.5.9', 'package', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository,
+                                               filename='tool_dependencies.xml',
+                                               filepath='subfolder',
+                                               strings_displayed=[ changeset_revision ] )
+
+    def test_0050_create_repositories_for_simple_dependencies( self ):
+        '''Create repositories for testing simple dependency generation.'''
+        '''
+        This is step 10 - Create repository_dependency_test_1_0460, repository_dependency_test_2_0460, repository_dependency_test_3_0460,
+        repository_dependency_test_4_0460, repository_dependency_test_4_0460.. Each of these repositories will be populated in a way
+        that tests a different way to achieve the same resulting dependency structure using complex tool dependencies.
+        The different methods being tested are:
+        - Upload an uncompressed repository_dependencies.xml to the root of the repository.
+        - Upload a repository_dependencies.xml in a tarball, not in a subfolder.
+        - Upload a repository_dependencies.xml in a subfolder within a tarball.
+        - Upload via url, with the repository_dependencies.xml in the root of another repository.
+        - Upload via url, with the repository_dependencies.xml in a subfolder within another repository.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository_base_name = 'repository_dependency_test_%d_0460'
+        repository_base_description = 'Test #%d for repository dependency definitions.'
+        repository_base_long_description = 'Test #%d for repository dependency definitions.'
+        for number in range( 1, 6 ):
+            self.get_or_create_repository( name=repository_base_name % number,
+                                           description=repository_base_description % number,
+                                           long_description=repository_base_long_description % number,
+                                           owner=common.test_user_1_name,
+                                           category_id=self.security.encode_id( category.id ),
+                                           strings_displayed=[] )
+
+    def test_0055_populate_repository_dependency_test_1_0460( self ):
+        '''Populate repository_dependency_test_1_0460.'''
+        '''
+        This is step 11 - Upload an uncompressed repository_dependencies.xml to repository_dependency_test_1_0460 that specifies a
+        repository dependency on emboss_datatypes_0460 without a specified changeset revision or tool shed url.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'repository_dependency_test_1_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_datatypes_0460', common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0460_files/repository_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded complex repository dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'emboss_datatypes_0460', 'user1', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository, filename='repository_dependencies.xml', strings_displayed=[ changeset_revision ] )
+
+    def test_0060_populate_repository_dependency_test_2_0460( self ):
+        '''Populate repository_dependency_test_2_0460.'''
+        '''
+        This is step 12 - Upload a tarball to repository_dependency_test_2_0460 with a repository_dependencies.xml in the root of the tarball.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'repository_dependency_test_2_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_datatypes_0460', common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0460_files/repository_dependencies_in_root.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message='Uploaded complex repository dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'emboss_datatypes_0460', 'user1', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository, filename='repository_dependencies.xml', strings_displayed=[ changeset_revision ] )
+
+    def test_0065_populate_repository_dependency_test_3_0460( self ):
+        '''Populate repository_dependency_test_3_0460.'''
+        '''
+        This is step 13 - Upload a tarball to repository_dependency_test_3_0460 with a repository_dependencies.xml in a
+        subfolder within the tarball.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'repository_dependency_test_3_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_datatypes_0460', common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0460_files/repository_dependencies_in_subfolder.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message='Uploaded complex repository dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'emboss_datatypes_0460', 'user1', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository,
+                                               filename='repository_dependencies.xml',
+                                               filepath='subfolder',
+                                               strings_displayed=[ changeset_revision ] )
+
+    def test_0070_create_repositories_for_url_upload( self ):
+        '''Create and populate hg_repository_dependency_0460 and hg_subfolder_repository_dependency_0460.'''
+        '''
+        This is step 14 - Create hg_repository_dependency_0460 and hg_subfolder_repository_dependency_0460 and populate
+        with repository dependencies.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name='hg_repository_dependency_0460',
+                                                    description=bwa_repository_description,
+                                                    long_description=bwa_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='0460_files/repository_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate hg_repository_dependency_0460 with a tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        repository = self.get_or_create_repository( name='hg_subfolder_repository_dependency_0460',
+                                                    description=bwa_repository_description,
+                                                    long_description=bwa_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='0460_files/repository_dependencies_in_subfolder.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate hg_subfolder_repository_dependency_0460 with a tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0075_url_upload_to_complex_test( self ):
+        '''Populate repository_dependency_test_4_0460.'''
+        '''
+        This is step 15 - Upload to repository_dependency_test_4_0460 using the url
+        hg://<tool shed url>/repos/user1/hg_repository_dependency_0460.
+        '''
+        url = 'hg://%s:%s/repos/user1/hg_repository_dependency_0460' % ( self.host, self.port )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'repository_dependency_test_4_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_datatypes_0460', common.test_user_1_name )
+        self.upload_url( repository,
+                         url=url,
+                         filepath=None,
+                         valid_tools_only=True,
+                         uncompress_file=False,
+                         remove_repo_files_not_in_tar=True,
+                         commit_message='Uploaded repository dependency definition.',
+                         strings_displayed=[],
+                         strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'emboss_datatypes_0460', 'user1', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository,
+                                               filename='repository_dependencies.xml',
+                                               strings_displayed=[ changeset_revision ] )
+
+    def test_0080_url_upload_to_complex_test( self ):
+        '''Populate repository_dependency_test_4_0460.'''
+        '''
+        This is step 16 - Upload to repository_dependency_test_5_0460 using the url
+        hg://<tool shed url>/repos/user1/hg_subfolder_repository_dependency_0460.
+        '''
+        url = 'hg://%s:%s/repos/user1/hg_subfolder_repository_dependency_0460' % ( self.host, self.port )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'repository_dependency_test_5_0460', common.test_user_1_name )
+        package_repository = self.test_db_util.get_repository_by_name_and_owner( 'emboss_datatypes_0460', common.test_user_1_name )
+        self.upload_url( repository,
+                         url=url,
+                         filepath=None,
+                         valid_tools_only=True,
+                         uncompress_file=False,
+                         remove_repo_files_not_in_tar=True,
+                         commit_message='Uploaded repository dependency definition.',
+                         strings_displayed=[],
+                         strings_not_displayed=[] )
+        changeset_revision = self.get_repository_tip( package_repository )
+        strings_displayed = [ 'emboss_datatypes_0460', 'user1', changeset_revision ]
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        self.display_repository_file_contents( repository,
+                                               filename='repository_dependencies.xml',
+                                               filepath='subfolder',
+                                               strings_displayed=[ changeset_revision ] )
diff --git a/test/shed_functional/functional/test_0470_tool_dependency_repository_type.py b/test/shed_functional/functional/test_0470_tool_dependency_repository_type.py
new file mode 100644
index 0000000..216aad6
--- /dev/null
+++ b/test/shed_functional/functional/test_0470_tool_dependency_repository_type.py
@@ -0,0 +1,255 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 0470 Tool dependency repository type'
+category_description = 'Test script 0470 for changing repository types.'
+package_libx11_repository_name = 'package_x11_client_1_5_proto_7_0_0470'
+package_libx11_repository_description = "Contains a tool dependency definition that provides the X11 client libraries and core protocol header files."
+package_libx11_repository_long_description = "Xlib is an X Window System protocol client library written in the C programming language."
+package_emboss_repository_name = 'package_emboss_5_0_0_0470'
+package_emboss_repository_description = "Contains a tool dependency definition that downloads and compiles version 5.0.0 of the EMBOSS tool suite."
+package_emboss_repository_long_description = 'EMBOSS is "The European Molecular Biology Open Software Suite".'
+datatypes_repository_name = 'emboss_datatypes_0470'
+datatypes_repository_description = 'Galaxy applicable data formats used by Emboss tools.'
+datatypes_repository_long_description = 'Galaxy applicable data formats used by Emboss tools.  This repository contains no tools.'
+emboss_repository_name = 'emboss_5_0470'
+emboss_repository_description = "Galaxy wrappers for Emboss version 5.0.0 tools"
+emboss_repository_long_description = "Galaxy wrappers for Emboss version 5.0.0 tools"
+
+'''
+1. Create and populate a repository named package_x11_client_1_5_proto_7_0 that contains only a single file named tool_dependencies.xml.
+   Keep the repository type as the default "Unrestricted".
+
+2. Create a repository named package_emboss_5_0_0 of type "Unrestricted" that has a repository dependency definition that defines the
+   above package_x11_client_1_5_proto_7_0 repository. Upload the tool_dependencies.xml file such that it does not have a changeset_revision
+   defined so it will get automatically populated.
+
+3. Create a repository named emboss_5 of type "Unrestricted" that has a tool-dependencies.xml file defining a complex repository dependency
+   on the package_emboss_5_0_0 repository above. Upload the tool_dependencies.xml file such that it does not have a change set_revision defined
+   so it will get automatically populated.
+
+4. Add a comment to the tool_dependencies.xml file to be uploaded to the package_x11_client_1_5_prot_7_0 repository, creating a new installable
+   changeset revision at the repository tip.
+
+5. Add a comment to the tool_dependencies.xml file for the package_emboss_5_0_0 repository, eliminating the change set_revision attribute so
+   that it gets automatically populated when uploaded. After uploading the file, the package_emboss_5_0_0 repository should have 2
+   installable changeset revisions.
+
+6. Add a comment to the tool_dependencies.xml file in the emboss_5 repository, eliminating the changeset_revision attribute so that it gets
+   automatically populated when uploaded. After uploading the file, the emboss5 repository should have 2 installable metadata revisions.
+
+7. Change the repository type of the package_x11_client_1_5_proto_7_0 repository to be tool_dependency_definition.
+
+8. Change the repository type of the package_emboss_5_0_0 repository to be tool_dependency_definition.
+
+9. Reset metadata on the package_emboss_5_0_0 repository. It should now have only its tip as the installable revision.
+
+10. Reset metadata on the emboss_5 repository. It should now have only its tip as the installable revision.
+'''
+
+
+class TestEnvironmentInheritance( ShedTwillTestCase ):
+    '''Test referencing environment variables that were defined in a separate tool dependency.'''
+
+    def test_0000_initiate_users_and_category( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+
+    def test_0005_create_libx11_repository( self ):
+        '''Create and populate package_x11_client_1_5_proto_7_0_0470.'''
+        '''
+        This is step 1 - Create and populate a repository named package_x11_client_1_5_proto_7_0.
+
+        Create and populate a repository named package_x11_client_1_5_proto_7_0 that contains only a single file named tool_dependencies.xml.
+        Keep the repository type as the default "Unrestricted".
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_libx11_repository_name,
+                                                    description=package_libx11_repository_description,
+                                                    long_description=package_libx11_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload the tool dependency definition to the package_x11_client_1_5_proto_7_0_0470 repository.
+        self.upload_file( repository,
+                          filename='emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_x11_client_1_5_proto_7_0_0470 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_emboss_5_0_0_repository( self ):
+        '''Create and populate package_emboss_5_0_0_0470.'''
+        '''
+        This is step 2 - Create a repository named package_emboss_5_0_0 of type "Unrestricted".
+
+        Create a repository named package_emboss_5_0_0 of type "Unrestricted" that has a repository dependency definition that defines the
+        above package_x11_client_1_5_proto_7_0 repository. Upload the tool_dependencues.xml file such that it does not have a changeset_revision
+        defined so it will get automatically populated.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_emboss_repository_name,
+                                                    description=package_emboss_repository_description,
+                                                    long_description=package_emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload the edited tool dependency definition to the package_emboss_5_0_0 repository.
+        self.upload_file( repository,
+                          filename='emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_emboss_5_0_0_0470 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_emboss_5_repository( self ):
+        '''Create and populate emboss_5_0470.'''
+        '''
+        This is step 3 - Create a repository named emboss_5 of type "Unrestricted".
+
+        Create a repository named emboss_5 of type "Unrestricted" that has a tool-dependencies.xml file defining a complex repository dependency
+        on the package_emboss_5_0_0 repository above. Upload the tool_dependencies.xml file such that it does not have a change set_revision defined
+        so it will get automatically populated.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Populate emboss_5 with tool and dependency definitions.
+        self.upload_file( repository,
+                          filename='emboss/0470_files/emboss_complex_dependency.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate emboss_5 with tool and dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_upload_updated_tool_dependency_to_package_x11( self ):
+        '''Upload a new tool_dependencies.xml to package_x11_client_1_5_proto_7_0_0470.'''
+        '''
+        This is step 4 - Add a comment to the tool_dependencies.xml file to be uploaded to the package_x11_client_1_5_prot_7_0 repository, creating
+        a new installable changeset revision at the repository tip.
+        '''
+        package_x11_repository = self.test_db_util.get_repository_by_name_and_owner( package_libx11_repository_name, common.test_user_1_name )
+        # Upload the tool dependency definition to the package_x11_client_1_5_proto_7_0_0470 repository.
+        self.upload_file( package_x11_repository,
+                          filename='emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_x11_client_1_5_proto_7_0_0470 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        assert len( package_x11_repository.metadata_revisions ) == 1, \
+            'package_x11_client_1_5_proto_7_0_0470 has incorrect number of metadata revisions, expected 1 but found %d' % \
+            len( package_x11_repository.metadata_revisions )
+
+    def test_0025_upload_updated_tool_dependency_to_package_emboss( self ):
+        '''Upload a new tool_dependencies.xml to package_emboss_5_0_0_0470.'''
+        '''
+        This is step 5 - Add a comment to the tool_dependencies.xml file for the package_emboss_5_0_0 repository, eliminating
+        the change set_revision attribute so that it gets automatically populated when uploaded. After uploading the file,
+        the package_emboss_5_0_0 repository should have 2 installable changeset revisions.
+        '''
+        package_emboss_repository = self.test_db_util.get_repository_by_name_and_owner( package_emboss_repository_name, common.test_user_1_name )
+        # Populate package_emboss_5_0_0_0470 with updated tool dependency definition.
+        self.upload_file( package_emboss_repository,
+                          filename='emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_emboss_5_0_0_0470 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        assert len( package_emboss_repository.metadata_revisions ) == 2, \
+            'package_emboss_5_0_0_0470 has incorrect number of metadata revisions, expected 2 but found %d' % \
+            len( package_emboss_repository.metadata_revisions )
+
+    def test_0030_upload_updated_tool_dependency_to_emboss_5_repository( self ):
+        '''Upload a new tool_dependencies.xml to emboss_5_0470.'''
+        '''
+        This is step 6 - Add a comment to the tool_dependencies.xml file in the emboss_5 repository, eliminating the
+        changeset_revision attribute so that it gets automatically populated when uploaded. After uploading the file,
+        the emboss5 repository should have 2 installable metadata revisions.
+        '''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        # Populate package_emboss_5_0_0_0470 with updated tool dependency definition.
+        self.upload_file( emboss_repository,
+                          filename='emboss/0470_files/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Upload updated complex repository dependency definition to emboss_5_0470.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        assert len( emboss_repository.metadata_revisions ) == 2, 'package_emboss_5_0_0_0470 has incorrect number of metadata revisions'
+
+    def test_0035_modify_package_x11_repository_type( self ):
+        '''Set package_x11_client_1_5_proto_7_0 type tool_dependency_definition.'''
+        '''
+        This is step 7 - Change the repository type of the package_x11_client_1_5_proto_7_0 repository to be tool_dependency_definition.
+        '''
+        package_x11_repository = self.test_db_util.get_repository_by_name_and_owner( package_libx11_repository_name, common.test_user_1_name )
+        self.edit_repository_information( package_x11_repository, repository_type='tool_dependency_definition' )
+
+    def test_0040_modify_package_emboss_repository_type( self ):
+        '''Set package_emboss_5_0_0 to type tool_dependency_definition.'''
+        '''
+        This is step 8 - Change the repository type of the package_emboss_5_0_0 repository to be tool_dependency_definition.
+        '''
+        package_emboss_repository = self.test_db_util.get_repository_by_name_and_owner( package_emboss_repository_name, common.test_user_1_name )
+        self.edit_repository_information( package_emboss_repository, repository_type='tool_dependency_definition' )
+
+    def test_0045_reset_repository_metadata( self ):
+        '''Reset metadata on package_emboss_5_0_0_0470 and package_x11_client_1_5_proto_7_0.'''
+        '''
+        This is step 9 - Reset metadata on the package_emboss_5_0_0 and package_x11_client_1_5_proto_7_0 repositories. They should
+        now have only their tip as the installable revision.
+        '''
+        package_emboss_repository = self.test_db_util.get_repository_by_name_and_owner( package_emboss_repository_name, common.test_user_1_name )
+        package_x11_repository = self.test_db_util.get_repository_by_name_and_owner( package_libx11_repository_name, common.test_user_1_name )
+        self.reset_repository_metadata( package_emboss_repository )
+        self.reset_repository_metadata( package_x11_repository )
+        assert len( package_emboss_repository.metadata_revisions ) == 1, 'Repository package_emboss_5_0_0 has %d installable revisions, expected 1.' % \
+            len( package_emboss_repository.metadata_revisions )
+        assert len( package_x11_repository.metadata_revisions ) == 1, 'Repository package_x11_client_1_5_proto_7_0 has %d installable revisions, expected 1.' % \
+            len( package_x11_repository.metadata_revisions )
+
+    def test_0050_reset_emboss_5_metadata( self ):
+        '''Reset metadata on emboss_5.'''
+        '''
+        This is step 10 - Reset metadata on the emboss_5 repository. It should now have only its tip as the installable revision.
+        '''
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        self.reset_repository_metadata( emboss_repository )
+        assert len( emboss_repository.metadata_revisions ) == 1, 'Repository emboss_5 has %d installable revisions, expected 1.' % \
+            len( emboss_repository.metadata_revisions )
diff --git a/test/shed_functional/functional/test_0480_tool_dependency_xml_verification.py b/test/shed_functional/functional/test_0480_tool_dependency_xml_verification.py
new file mode 100644
index 0000000..015f175
--- /dev/null
+++ b/test/shed_functional/functional/test_0480_tool_dependency_xml_verification.py
@@ -0,0 +1,71 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 0480 Tool dependency definition validation'
+category_description = 'Test script 0480 for validating tool dependency definitions.'
+repository_name = 'package_invalid_tool_dependency_xml_1_0_0'
+repository_description = "Contains a tool dependency definition that should return an error."
+repository_long_description = "This repository is in the test suite 0480"
+
+'''
+
+1. Create a repository package_invalid_tool_dependency_xml_1_0_0
+2. Upload a tool_dependencies.xml file to the repository with no <actions> tags around the <action> tags.
+3. Verify error message is displayed.
+
+'''
+
+
+class TestDependencyDefinitionValidation( ShedTwillTestCase ):
+    '''Test the tool shed's tool dependency XML validation.'''
+
+    def test_0000_initiate_users_and_category( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+
+    def test_0005_create_tool_dependency_repository( self ):
+        '''Create and populate package_invalid_tool_dependency_xml_1_0_0.'''
+        '''
+        This is step 1 - Create a repository package_invalid_tool_dependency_xml_1_0_0.
+
+        Create a repository named package_invalid_tool_dependency_xml_1_0_0 that will contain only a single file named tool_dependencies.xml.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='0480_files/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=False,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_invalid_tool_dependency_xml_1_0_0 with an improperly defined tool dependency.',
+                          strings_displayed=[ 'package cannot be installed because', 'missing either an <actions> tag set' ],
+                          strings_not_displayed=[] )
+
+    def test_0010_populate_tool_dependency_repository( self ):
+        '''Verify package_invalid_tool_dependency_xml_1_0_0.'''
+        '''
+        This is step 3 - Verify repository. The uploaded tool dependency XML should not have resulted in a new changeset.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        assert self.repository_is_new( repository ), 'Uploading an incorrectly defined tool_dependencies.xml resulted in a changeset being generated.'
diff --git a/test/shed_functional/functional/test_0490_export_import_repositories.py b/test/shed_functional/functional/test_0490_export_import_repositories.py
new file mode 100644
index 0000000..9d5486f
--- /dev/null
+++ b/test/shed_functional/functional/test_0490_export_import_repositories.py
@@ -0,0 +1,90 @@
+import logging
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+repository_name = 'filtering_0490'
+repository_description = "Galaxy's filtering tool for test 0490"
+repository_long_description = "Long description of Galaxy's filtering tool for test 0490"
+
+category_name = 'Test 0490 Repository Import Export'
+category_description = 'Test script 0490 for importing and exporting single repositories.'
+
+'''
+First test:
+
+1. Import a repository capsule containing a repository with no dependencies, e.g. filter1.
+2. Check that the repository to be imported is not marked as preexisting. The word ' Exists' should not be displayed, but '<b>Exists</b>' will.
+3. Export that repository. Check the capsule's contents, verify that changeset revision and tool shed are not set.
+4. Import the capsule again. Check that the repository to be imported is marked as preexisting. The word ' Exists' should be
+   displayed, as will '<b>Exists</b>'.
+
+'''
+
+capsule_filepath = ''
+
+
+class TestExportImportRepository( ShedTwillTestCase ):
+    '''Test exporting and importing repositories.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_import_repository_capsule( self ):
+        """Import the filter_0490 repository capsule."""
+        '''
+        This is step 1 - Import a repository capsule containing a repository with no dependencies, e.g. filter1.
+        Check that the repository to be imported is not marked as preexisting. The string ' Exists' should not
+        be displayed, but '<b>Exists</b>' should.
+        '''
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.import_capsule( self.get_filename( 'repository_capsules/0490_filtering.tar.gz' ),
+                             strings_displayed=[ repository_name, '<b>Exists' ],
+                             strings_not_displayed=[ ' Exists' ],
+                             strings_displayed_after_submit=[ 'Repository <b>filtering_0490</b> has been created.' ],
+                             strings_not_displayed_after_submit=[ 'Import not necessary' ] )
+
+    def test_0010_export_repository_capsule( self ):
+        '''Export the repository that was imported in the previous step.'''
+        '''
+        This is step 2 - Export that repository.
+        Export the repository to a temporary location.
+        '''
+        global capsule_filepath
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        capsule_filepath = self.export_capsule( repository, aggressive=True, includes_dependencies=False )
+        assert os.path.exists( capsule_filepath ), 'Exported repository capsule file path %s not found.' % capsule_filepath
+
+    def test_0015_verify_exported_capsule( self ):
+        '''Verify the exported capsule contents.'''
+        '''
+        This is step 3 - Check the capsule's contents, verify that changeset revision and tool shed are not set.
+        Extract the exported capsule tarball to a temporary path, and confirm that the manifest does not specify
+        a tool shed or changeset revision.
+        '''
+        global capsule_filepath
+        self.verify_capsule_contents( capsule_filepath, owner=common.test_user_1_name )
+
+    def test_0020_import_repository_capsule( self ):
+        '''Import the exported repository capsule.'''
+        '''
+        This is step 4 - Import the capsule again. Check that the repository to be imported is marked as preexisting.
+        The string ' Exists' should be displayed, as should '<b>Exists</b>'.
+        '''
+        global capsule_filepath
+        self.import_capsule( capsule_filepath,
+                             strings_displayed=[ repository_name, ' Exists', self.url ],
+                             strings_not_displayed_after_submit=[ 'Repository <b>filtering_0490</b> has been created.' ],
+                             strings_displayed_after_submit=[ 'Import not necessary', 'Exists' ] )
diff --git a/test/shed_functional/functional/test_0500_export_repository_simple_dependency.py b/test/shed_functional/functional/test_0500_export_repository_simple_dependency.py
new file mode 100644
index 0000000..9dea13d
--- /dev/null
+++ b/test/shed_functional/functional/test_0500_export_repository_simple_dependency.py
@@ -0,0 +1,89 @@
+import logging
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+emboss_repository_name = 'emboss_5_0500'
+emboss_repository_description = "Galaxy wrappers for Emboss version 5.0.0 tools"
+emboss_repository_long_description = "Galaxy wrappers for Emboss version 5.0.0 tools"
+datatypes_repository_name = 'emboss_datatypes_0500'
+datatypes_repository_description = 'Galaxy applicable data formats used by Emboss tools.'
+datatypes_repository_long_description = 'Galaxy applicable data formats used by Emboss tools.  This repository contains no tools.'
+
+category_name = 'Test 0500 Repository Dependency Import Export'
+category_description = 'Test script 0500 for importing and exporting repositories with simple repository dependencies.'
+
+'''
+1. Export a repository with no dependencies, e.g. filter1.
+2. Temporarily extract the repository capsule.
+2a. For every owner in the manifest, set to a different user.
+3. Import it into the same tool shed.
+4. Check that the repository to be imported has no status in the status column.
+5. Click the import button.
+6. Verify the resulting page is correct.
+'''
+
+capsule_filepath = ''
+
+
+class TestExportImportRepository( ShedTwillTestCase ):
+    '''Test exporting and importing repositories.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category_and_repository( self ):
+        """Create categories for this test suite"""
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.import_capsule( self.get_filename( 'repository_capsules/0500_emboss_5.tar.gz' ),
+                             strings_displayed=[ emboss_repository_name, datatypes_repository_name, '<b>Exists' ],
+                             strings_not_displayed=[ ' Exists' ],
+                             strings_displayed_after_submit=[ 'Repository <b>emboss_5_0500</b> has been created.',
+                                                              'Repository <b>emboss_datatypes_0500</b> has been created.' ],
+                             strings_not_displayed_after_submit=[ 'Import not necessary' ] )
+
+    def test_0010_export_repository_capsule( self ):
+        '''Export the repository that was imported in the previous step.'''
+        '''
+        This is step 2 - Export that repository.
+        Export the repository to a temporary location.
+        '''
+        global capsule_filepath
+        repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        capsule_filepath = self.export_capsule( repository, aggressive=True, includes_dependencies=True )
+        log.debug( os.path.exists( capsule_filepath ) )
+
+    def test_0015_verify_exported_capsule( self ):
+        '''Verify the exported capsule contents.'''
+        '''
+        This is step 3 - Check the capsule's contents, verify that changeset revision and tool shed are not set.
+        Extract the exported capsule tarball to a temporary path, and confirm that the manifest does not specify
+        a tool shed or changeset revision.
+        '''
+        global capsule_filepath
+        self.verify_capsule_contents( capsule_filepath, owner=common.test_user_1_name )
+
+    def test_0020_import_repository_capsule( self ):
+        '''Import the exported repository capsule.'''
+        '''
+        This is step 4 - Import the capsule again. Check that the repository to be imported is marked as preexisting.
+        The string ' Exists' should be displayed, as should '<b>Exists</b>'.
+        '''
+        global capsule_filepath
+        self.import_capsule( capsule_filepath,
+                             strings_displayed=[ emboss_repository_name, datatypes_repository_name, ' Exists', self.url ],
+                             strings_not_displayed_after_submit=[ 'Repository <b>emboss_5_0500</b> has been created.',
+                                                                  'Repository <b>emboss_datatypes_0500</b> has been created.' ],
+                             strings_displayed_after_submit=[ 'Import not necessary', ' Exists' ] )
diff --git a/test/shed_functional/functional/test_0510_export_import_repository_complex_dependencies.py b/test/shed_functional/functional/test_0510_export_import_repository_complex_dependencies.py
new file mode 100644
index 0000000..e4eed4c
--- /dev/null
+++ b/test/shed_functional/functional/test_0510_export_import_repository_complex_dependencies.py
@@ -0,0 +1,99 @@
+import logging
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 0510 Import Export Complex Dependencies'
+category_description = 'Test script 0510 for importing and exporting repositories with complex repository dependencies.'
+
+'''
+Import a repository capsule with a complex repository dependency with trans_proteomic_pipeline and required repositories.
+Check that the repository to be imported is not marked as preexisting. The word ' Exists' should not be displayed, but '<b>Exists</b>' will.
+Click the import button.
+Verify the resulting page is correct.
+Verify the dependency structure that has been created.
+Export the trans_proteomic_pipeline repository with dependencies.
+Check the capsule's contents, verify that changeset revision and tool shed are not set.
+'''
+
+capsule_filepath = ''
+
+
+class TestExportImportRepository( ShedTwillTestCase ):
+    '''Test exporting and importing repositories with complex dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category_and_repositories( self ):
+        """Create categories for this test suite"""
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.import_capsule( self.get_filename( 'repository_capsules/0510_trans_proteomic_pipeline.tar.gz' ),
+                             strings_displayed=[ 'package_trans_proteomic_pipeline_4_6_3',
+                                                 'package_perl_5_18',
+                                                 'package_libpng_1_2',
+                                                 'package_libgd_2_1',
+                                                 'package_expat_2_1',
+                                                 '<b>Exists' ],
+                             strings_not_displayed=[ ' Exists' ],
+                             strings_displayed_after_submit=[ 'Repository <b>package_trans_proteomic_pipeline_4_6_3</b> has been created.',
+                                                              'Repository <b>package_perl_5_18</b> has been created.',
+                                                              'Repository <b>package_libpng_1_2</b> has been created.',
+                                                              'Repository <b>package_libgd_2_1</b> has been created.',
+                                                              'Repository <b>package_expat_2_1</b> has been created.' ],
+                             strings_not_displayed_after_submit=[ 'Import not necessary' ] )
+
+    def test_0010_export_repository_capsule( self ):
+        '''Export the repository that was imported in the previous step.'''
+        '''
+        This is step 2 - Export that repository.
+        Export the repository to a temporary location.
+        '''
+        global capsule_filepath
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'package_trans_proteomic_pipeline_4_6_3', common.test_user_1_name )
+        capsule_filepath = self.export_capsule( repository )
+        log.debug( os.path.exists( capsule_filepath ) )
+
+    def test_0015_verify_exported_capsule( self ):
+        '''Verify the exported capsule contents.'''
+        '''
+        This is step 3 - Check the capsule's contents, verify that changeset revision and tool shed are not set.
+        Extract the exported capsule tarball to a temporary path, and confirm that the manifest does not specify
+        a tool shed or changeset revision.
+        '''
+        global capsule_filepath
+        self.verify_capsule_contents( capsule_filepath, owner=common.test_user_1_name )
+
+    def test_0020_import_repository_capsule( self ):
+        '''Import the exported repository capsule.'''
+        '''
+        This is step 4 - Import the capsule again. Check that the repository to be imported is marked as preexisting.
+        The string ' Exists' should be displayed, as should '<b>Exists</b>'.
+        '''
+        global capsule_filepath
+        self.import_capsule( capsule_filepath,
+                             strings_displayed=[ 'package_trans_proteomic_pipeline_4_6_3',
+                                                 'package_perl_5_18',
+                                                 'package_libpng_1_2',
+                                                 'package_libgd_2_1',
+                                                 'package_expat_2_1',
+                                                 ' Exists',
+                                                 self.url ],
+                             strings_not_displayed_after_submit=[ 'Repository <b>package_trans_proteomic_pipeline_4_6_3</b> has been created.',
+                                                                  'Repository <b>package_perl_5_18</b> has been created.',
+                                                                  'Repository <b>package_libpng_1_2</b> has been created.',
+                                                                  'Repository <b>package_libgd_2_1</b> has been created.',
+                                                                  'Repository <b>package_expat_2_1</b> has been created.' ],
+                             strings_displayed_after_submit=[ 'Import not necessary', ' Exists' ] )
diff --git a/test/shed_functional/functional/test_0520_import_export_circular_dependencies.py b/test/shed_functional/functional/test_0520_import_export_circular_dependencies.py
new file mode 100644
index 0000000..db64168
--- /dev/null
+++ b/test/shed_functional/functional/test_0520_import_export_circular_dependencies.py
@@ -0,0 +1,81 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+filtering_repository_name = 'filtering_0520'
+filtering_repository_description = "Galaxy's filtering tool for test 0520"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool for test 0520"
+freebayes_repository_name = 'freebayes_0520'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool for test 0520"
+
+category_name = 'Test 0520 Circular Dependency Import Export'
+category_description = 'Test script 0520 for importing and exporting repositories with circular dependencies.'
+
+'''
+1) Upload a capsule with 2 repositories that define simple repository dependencies on each other, resulting in circular
+   dependencies to a tool shed.
+2) Make sure each repository contains an invalid repository dependency after the capsule has been uploaded (they should
+   be invalid because their toolshed and changeset_revision attributes could not be auto-populated).
+3) Make sure each repository's repository_metadata record has the downloadable column marked as False.
+'''
+
+
+class TestExportImportRepository( ShedTwillTestCase ):
+    '''Test exporting and importing repositories.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_import_repository_capsule( self ):
+        """Import the filter_0520 repository capsule with dependencies."""
+        '''
+        This is step 1 - Upload a capsule with 2 repositories that define simple repository dependencies on each other, resulting in
+        circular dependencies to a tool shed.
+        '''
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        self.import_capsule( self.get_filename( 'repository_capsules/0520_filtering.tar.gz' ),
+                             strings_displayed=[ filtering_repository_name, freebayes_repository_name, '<b>Exists' ],
+                             strings_not_displayed=[ ' Exists' ],
+                             strings_displayed_after_submit=[ 'Repository <b>filtering_0520</b> has been created.' ],
+                             strings_not_displayed_after_submit=[ 'Import not necessary' ] )
+
+    def test_0010_verify_invalid_dependency( self ):
+        '''Verify that the repository dependencies are marked as invalid.'''
+        '''
+        This is step 2 - Make sure each repository contains an invalid repository dependency after the capsule has been uploaded
+        (they should be invalid because their toolshed and changeset_revision attributes could not be auto-populated).
+        '''
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Ignoring repository dependency definition', self.url, 'filtering_0520', 'name is invalid' ]
+        self.display_manage_repository_page( freebayes_repository,
+                                             strings_displayed=strings_displayed,
+                                             strings_not_displayed=[ 'Repository dependencies' ] )
+        self.display_manage_repository_page( filtering_repository,
+                                             strings_displayed=[ 'Repository dependencies', self.get_repository_tip( freebayes_repository ) ],
+                                             strings_not_displayed=[] )
+
+    def test_0015_verify_repository_metadata( self ):
+        '''Verify that the repositories are not marked as downloadable.'''
+        '''
+        This is step 3 - Make sure each repository's repository_metadata record has the downloadable column marked as False.
+        '''
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_metadata = self.get_repository_metadata_by_changeset_revision( freebayes_repository, self.get_repository_tip( freebayes_repository ) )
+        filtering_metadata = self.get_repository_metadata_by_changeset_revision( filtering_repository, self.get_repository_tip( filtering_repository ) )
+        assert not filtering_metadata.downloadable, 'Repository filtering_0520 is incorrectly marked as downloadable.'
+        assert not freebayes_metadata.downloadable, 'Repository freebayes_0520 is incorrectly marked as downloadable.'
diff --git a/test/shed_functional/functional/test_0530_repository_admin_feature.py b/test/shed_functional/functional/test_0530_repository_admin_feature.py
new file mode 100644
index 0000000..89e2bad
--- /dev/null
+++ b/test/shed_functional/functional/test_0530_repository_admin_feature.py
@@ -0,0 +1,158 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+repository_name = 'filtering_0530'
+repository_description = 'Filtering repository for test 0530'
+repository_long_description = 'This is the filtering repository for test 0530.'
+category_name = 'Test 0530 Repository Admin Role'
+category_description = 'Verify the functionality of the code that handles the repository admin role.'
+
+'''
+1. Create new repository as user user1.
+
+2. Check to make sure a new role was created named <repo_name>_user1_admin.
+
+3. Check to make sure a new repository_role_association record was created with appropriate repository id and role id.
+
+4. Change the name of the repository created in step 1 - this can be done as long as the repository has not been installed or cloned.
+
+5. Make sure the name of the role initially inspected in Step 2 has been changed to reflect the new repository name from Step 4.
+
+6. Log into the Tool Shed as a user that is not the repository owner (e.g., user2) and make sure the repository name
+   and description cannot be changed.
+
+7. As user user1, add user user2 as a repository admin user.
+
+8. Log into the Tool Shed as user user2 and make sure the repository name and description can now be changed.
+'''
+
+
+class TestRepositoryAdminRole( ShedTwillTestCase ):
+    '''Verify that the code correctly handles the repository admin role.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_filtering_repository( self ):
+        """Create and populate the filtering_0530 repository."""
+        '''
+        This is step 1 - Create new repository as user user1.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_verify_repository_admin_role_exists( self ):
+        '''Verify that the role filtering_0530_user1_admin exists.'''
+        '''
+        This is step 2 - Check to make sure a new role was created named filtering_0530_user1_admin.
+        '''
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        repository_admin_role = self.test_db_util.get_role( test_user_1, 'filtering_0530_user1_admin' )
+        assert repository_admin_role is not None, 'Admin role for filtering_0530 was not created.'
+
+    def test_0015_verify_repository_role_association( self ):
+        '''Verify that the filtering_0530_user1_admin role is associated with the filtering_0530 repository.'''
+        '''
+        This is step 3 - Check to make sure a new repository_role_association record was created with appropriate repository id and role id.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        repository_admin_role = self.test_db_util.get_role( test_user_1, 'filtering_0530_user1_admin' )
+        repository_role_association = self.test_db_util.get_repository_role_association( repository.id, repository_admin_role.id )
+        assert repository_role_association is not None, 'Repository filtering_0530 is not associated with the filtering_0530_user1_admin role.'
+
+    def test_0020_rename_repository( self ):
+        '''Rename the repository to renamed_filtering_0530.'''
+        '''
+        This is step 4 - Change the name of the repository created in step 1 - this can be done as long as the repository has not
+        been installed or cloned.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.edit_repository_information( repository, revert=False, repo_name='renamed_filtering_0530' )
+        self.test_db_util.refresh( repository )
+        assert repository.name == 'renamed_filtering_0530', 'Repository was not renamed to renamed_filtering_0530.'
+
+    def test_0025_check_admin_role_name( self ):
+        return
+        '''Check that a role renamed_filtering_0530_user1_admin now exists, and filtering_0530_user1_admin does not.'''
+        '''
+        This is step 5 - Make sure the name of the role initially inspected in Step 2 has been changed to reflect the
+        new repository name from Step 4.
+        '''
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        old_repository_admin_role = self.test_db_util.get_role( test_user_1, 'filtering_0530_%s_admin' % test_user_1.username )
+        assert old_repository_admin_role is None, 'Admin role filtering_0530_user1_admin incorrectly exists.'
+        new_repository_admin_role = self.test_db_util.get_role( test_user_1, 'renamed_filtering_0530_%s_admin' % test_user_1.username )
+        assert new_repository_admin_role is not None, 'Admin role renamed_filtering_0530_user1_admin does not exist.'
+
+    def test_0030_verify_access_denied( self ):
+        '''Make sure a non-admin user can't modify the repository.'''
+        '''
+        This is step 6 - Log into the Tool Shed as a user that is not the repository owner (e.g., user2) and make sure the repository
+        name and description cannot be changed.
+        '''
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'renamed_filtering_0530', common.test_user_1_name )
+        strings_not_displayed = [ 'Manage repository' ]
+        strings_displayed = [ 'View repository' ]
+        self.display_manage_repository_page( repository, strings_not_displayed=strings_not_displayed )
+        self.submit_form( button='edit_repository_button', description='This description has been modified.' )
+        strings_displayed = [ 'You are not the owner of this repository, so you cannot administer it.' ]
+        strings_not_displayed = [ 'The repository information has been updated.' ]
+        self.check_for_strings( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0035_grant_admin_role( self ):
+        '''Grant the repository admin role to user2.'''
+        '''
+        This is step 7 - As user user1, add user user2 as a repository admin user.
+        '''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'renamed_filtering_0530', common.test_user_1_name )
+        self.assign_admin_role( repository, test_user_2 )
+
+    def test_0040_rename_repository_as_repository_admin( self ):
+        '''Rename the repository as user2.'''
+        '''
+        This is step 8 - Log into the Tool Shed as user user2 and make sure the repository name and description can now be changed.
+        '''
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( 'renamed_filtering_0530', common.test_user_1_name )
+        self.edit_repository_information( repository, revert=False, repo_name='filtering_0530' )
+        self.test_db_util.refresh( repository )
+        assert repository.name == 'filtering_0530', 'User with admin role failed to rename repository.'
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        old_repository_admin_role = self.test_db_util.get_role( test_user_1, 'renamed_filtering_0530_user1_admin' )
+        assert old_repository_admin_role is None, 'Admin role renamed_filtering_0530_user1_admin incorrectly exists.'
+        new_repository_admin_role = self.test_db_util.get_role( test_user_1, 'filtering_0530_user1_admin' )
+        assert new_repository_admin_role is not None, 'Admin role filtering_0530_user1_admin does not exist.'
diff --git a/test/shed_functional/functional/test_0540_get_all_metadata_from_api.py b/test/shed_functional/functional/test_0540_get_all_metadata_from_api.py
new file mode 100644
index 0000000..b4d94b5
--- /dev/null
+++ b/test/shed_functional/functional/test_0540_get_all_metadata_from_api.py
@@ -0,0 +1,147 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+repositories = dict( column=dict( name='column_maker_0540',
+                                  description='Description for column_maker_0540',
+                                  long_description='Long description for column_maker_0540' ),
+                     convert=dict( name='convert_chars_0540',
+                                   description='Description for convert_chars_0540',
+                                   long_description='Long description for convert_chars_0540' ),
+                     bwa=dict( name='package_bwa_0_5_9_0540',
+                               description='Description for package_bwa_0_5_9_0540',
+                               long_description='Long description for package_bwa_0_5_9_0540' ) )
+
+category_name = 'Test 0540'
+category_description = 'Verify API endpoint to retrieve all metadata'
+
+'''
+1. Create repository column_maker_0540 as user user1.
+
+2. Create repository convert_chars_0540 with dependency on column_maker_0540.
+
+3. Create repository package_bwa_0_5_9_0540.
+
+4. Create dependency on package_bwa_0_5_9_0540 for convert_chars_0540.
+
+5. Load /api/repositories/convert_chars_0540.id/metadata and verify contents.
+'''
+
+
+class TestGetAllMetadata( ShedTwillTestCase ):
+    '''Verify that the code correctly handles the repository admin role.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bwa_package_repository( self ):
+        '''Create and populate package_bwa_0_5_9_0540.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named package_bwa_0_5_9_0100 owned by user1.
+        repository = self.get_or_create_repository( name=repositories['bwa']['name'],
+                                                    description=repositories['bwa']['description'],
+                                                    long_description=repositories['bwa']['long_description'],
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        assert repository is not None, 'Error creating repository %s' % repositories['bwa']['name']
+        self.upload_file( repository,
+                          filename='0540_files/package_bwa/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded tool_dependencies.xml.',
+                          strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                          strings_not_displayed=[] )
+        # Visit the manage repository page for package_bwa_0_5_9_0100.
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'will not be', 'to this repository' ] )
+
+    def test_0010_create_convert_repository( self ):
+        '''Create the convert_chars_0540 repository.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=repositories['convert']['name'],
+                                                    description=repositories['convert']['description'],
+                                                    long_description=repositories['convert']['long_description'],
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        assert repository is not None, 'Error creating repository %s' % repositories['convert']['name']
+        self.upload_file( repository,
+                          filename='0540_files/convert_chars/convert_chars.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column maker 1.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        # Add a dependency on BWA.
+        self.upload_file( repository,
+                          filename='0540_files/convert_chars/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column maker 1.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        # Visit the manage repository page for convert_chars_0540.
+        self.display_manage_repository_page( repository, strings_displayed=[ repositories['bwa']['name'] ] )
+
+    def test_0015_create_column_repository( self ):
+        '''Create the column_maker_0540 repository.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=repositories['column']['name'],
+                                                    description=repositories['column']['description'],
+                                                    long_description=repositories['column']['long_description'],
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        assert repository is not None, 'Error creating repository %s' % repositories['column']['name']
+        self.upload_file( repository,
+                          filename='0540_files/column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column maker 1.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_repository_dependency( self ):
+        '''Make column_maker depend on convert_chars.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'column' ][ 'name' ],
+                                                                         common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='0540_files/column_maker/repository_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded column maker 2.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_verify_dependency_json( self ):
+        '''
+        Load the API endpoint to retrieve all repository metadata and verify
+        that all three repository names are displayed.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'column' ][ 'name' ],
+                                                                         common.test_user_1_name )
+        strings_displayed = [ repositories[ 'column' ][ 'name' ],
+                              repositories[ 'convert' ][ 'name' ],
+                              repositories[ 'bwa' ][ 'name' ] ]
+        self.fetch_repository_metadata( repository, strings_displayed=strings_displayed, strings_not_displayed=None )
diff --git a/test/shed_functional/functional/test_0550_metadata_updated_dependencies.py b/test/shed_functional/functional/test_0550_metadata_updated_dependencies.py
new file mode 100644
index 0000000..7313b4e
--- /dev/null
+++ b/test/shed_functional/functional/test_0550_metadata_updated_dependencies.py
@@ -0,0 +1,157 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+repositories = dict( freebayes=dict( name='package_freebayes_0550',
+                                     description='Description for package_freebayes_0550',
+                                     long_description='Long description for package_freebayes_0550' ),
+                     samtools=dict( name='package_samtools_0550',
+                                    description='Description for package_samtools_0550',
+                                    long_description='Long description for package_samtools_0550' ),
+                     filtering=dict( name='filtering_0550',
+                                     description='Description for filtering_0550',
+                                     long_description='Long description for filtering_0550' ) )
+
+category_name = 'Test 0550'
+category_description = 'Verify metadata updates'
+
+'''
+1. Create repository package_freebayes_0550.
+
+2. Create repository package_samtools_0550.
+
+3. Create repository filtering_0550.
+
+4. Create dependency on package_freebayes_0550 for filtering_0550.
+
+5. Create dependency on package_samtools_0550 for filtering_0550.
+
+6. Update package_freebayes_0550 and package_samtools_0550.
+
+5. Load /api/repositories/{filtering_0550}.id/metadata and verify contents.
+'''
+
+
+class TestGetUpdatedMetadata( ShedTwillTestCase ):
+    '''Verify that updated repositories still have correct dependency links.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_freebayes_repository( self ):
+        '''Create and populate package_freebayes_0550.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named package_freebayes_0550 owned by user1.
+        freebayes = self.get_or_create_repository( name=repositories['freebayes']['name'],
+                                                   description=repositories['freebayes']['description'],
+                                                   long_description=repositories['freebayes']['long_description'],
+                                                   owner=common.test_user_1_name,
+                                                   category_id=self.security.encode_id( category.id ),
+                                                   strings_displayed=[] )
+        assert freebayes is not None, 'Error creating freebayes %s' % repositories['freebayes']['name']
+        self.upload_file( freebayes,
+                          filename='0550_files/package_freebayes_1_0550.tgz',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded.',
+                          strings_displayed=[ 'has been successfully uploaded to the repository', 'contains a single file' ],
+                          strings_not_displayed=None )
+        # Visit the manage repository page for package_freebayes_0_5_9_0100.
+        self.display_manage_repository_page( freebayes, strings_displayed=[ 'Tool dependencies', 'will not be', 'to this repository' ] )
+
+    def test_0010_create_samtools_repository( self ):
+        '''Create and populate the package_samtools_0550 repository.'''
+        category = self.create_category( name=category_name, description=category_description )
+        samtools = self.get_or_create_repository( name=repositories['samtools']['name'],
+                                                  description=repositories['samtools']['description'],
+                                                  long_description=repositories['samtools']['long_description'],
+                                                  owner=common.test_user_1_name,
+                                                  category_id=self.security.encode_id( category.id ),
+                                                  strings_displayed=[] )
+        assert samtools is not None, 'Error creating samtools %s' % repositories['samtools']['name']
+        self.upload_file( samtools,
+                          filename='0550_files/package_samtools_1_0550.tgz',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded samtools 1.0.',
+                          strings_displayed=[ 'has been successfully uncompressed and uploaded to the repository' ],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_filtering_repository( self ):
+        '''Create the filtering_0550 repository.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=repositories['filtering']['name'],
+                                                    description=repositories['filtering']['description'],
+                                                    long_description=repositories['filtering']['long_description'],
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        assert repository is not None, 'Error creating repository %s' % repositories['filtering']['name']
+        self.upload_file( repository,
+                          filename='0550_files/filtering_1.0.tgz',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.0.',
+                          strings_displayed=[ 'has been successfully uncompressed and uploaded to the repository' ],
+                          strings_not_displayed=[] )
+
+    def test_0020_check_repository_dependency( self ):
+        '''Make filtering depend on samtools and freebayes.'''
+        freebayes = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'freebayes' ][ 'name' ],
+                                                                     common.test_user_1_name )
+        samtools = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'samtools' ][ 'name' ],
+                                                                      common.test_user_1_name )
+        filtering = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'filtering' ][ 'name' ],
+                                                                        common.test_user_1_name )
+        strings_displayed = [ self.security.encode_id( freebayes.id ), self.security.encode_id( samtools.id ) ]
+        self.display_manage_repository_page( filtering, strings_displayed=strings_displayed )
+
+    def test_0025_update_dependent_repositories( self ):
+        '''
+        Update freebayes and samtools, load the API endpoint again.
+        '''
+        freebayes = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'freebayes' ][ 'name' ],
+                                                                     common.test_user_1_name )
+        samtools = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'samtools' ][ 'name' ],
+                                                                      common.test_user_1_name )
+        filtering = self.test_db_util.get_repository_by_name_and_owner( repositories[ 'filtering' ][ 'name' ],
+                                                                        common.test_user_1_name )
+        self.upload_file( freebayes,
+                          filename='0550_files/package_freebayes_2_0550.tgz',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded freebayes 2.0.',
+                          strings_displayed=[ 'has been successfully uncompressed and uploaded to the repository' ],
+                          strings_not_displayed=[] )
+        self.upload_file( samtools,
+                          filename='0550_files/package_samtools_2_0550.tgz',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded samtools 2.0.',
+                          strings_displayed=[ 'has been successfully uncompressed and uploaded to the repository' ],
+                          strings_not_displayed=[] )
+        strings_displayed = [ repositories[ 'freebayes' ][ 'name' ],
+                              repositories[ 'samtools' ][ 'name' ],
+                              repositories[ 'filtering' ][ 'name' ] ]
+        self.fetch_repository_metadata( filtering, strings_displayed=strings_displayed, strings_not_displayed=None )
diff --git a/test/shed_functional/functional/test_1000_install_basic_repository.py b/test/shed_functional/functional/test_1000_install_basic_repository.py
new file mode 100644
index 0000000..443a293
--- /dev/null
+++ b/test/shed_functional/functional/test_1000_install_basic_repository.py
@@ -0,0 +1,123 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+
+class BasicToolShedFeatures( ShedTwillTestCase ):
+    '''Test installing a basic repository.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0000 category and upload the filtering repository to it, if necessary.'''
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name='Test 0000 Basic Repository Features 2', description='Test Description 0000 Basic Repository Features 2' )
+        category = self.create_category( name='Test 0000 Basic Repository Features 1', description='Test Description 0000 Basic Repository Features 1' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name='filtering_0000',
+                                                    description="Galaxy's filtering tool",
+                                                    long_description="Long description of Galaxy's filtering tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_0000.txt',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded readme for 1.1.0',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='readme.txt',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded readme for 2.2.0',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_browse_tool_sheds( self ):
+        """Browse the available tool sheds in this Galaxy instance."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.visit_galaxy_url( '/admin_toolshed/browse_tool_sheds' )
+        self.check_page_for_string( 'Embedded tool shed for functional tests' )
+        self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 2' ] )
+
+    def test_0015_browse_test_0000_category( self ):
+        '''Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.'''
+        category = self.test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
+        self.browse_category( category, strings_displayed=[ 'filtering_0000' ] )
+
+    def test_0020_preview_filtering_repository( self ):
+        '''Load the preview page for the filtering_0000 repository in the tool shed.'''
+        self.preview_repository_in_tool_shed( 'filtering_0000', common.test_user_1_name, strings_displayed=[ 'filtering_0000', 'Valid tools' ] )
+
+    def test_0025_install_filtering_repository( self ):
+        self.install_repository( 'filtering_0000',
+                                 common.test_user_1_name,
+                                 'Test 0000 Basic Repository Features 1',
+                                 new_tool_panel_section_label='test_1000' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        strings_displayed = [ 'filtering_0000',
+                              "Galaxy's filtering tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              str( installed_repository.installed_changeset_revision ) ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0030_install_filtering_repository_again( self ):
+        '''Attempt to install the already installed filtering repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        # The page displayed after installation is the ajaxian "Montior installing tool shed repositories" page.  Since the filter
+        # repository was already installed, nothing will be in the process of being installed, so the grid will not display 'filtering_0000'.
+        post_submit_strings_not_displayed = [ 'filtering_0000' ]
+        self.install_repository( 'filtering_0000',
+                                 common.test_user_1_name,
+                                 'Test 0000 Basic Repository Features 1',
+                                 post_submit_strings_not_displayed=post_submit_strings_not_displayed )
+        strings_displayed = [ 'filtering_0000',
+                              "Galaxy's filtering tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              str( installed_repository.installed_changeset_revision ) ]
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+
+    def test_0035_verify_installed_repository_metadata( self ):
+        '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+        self.verify_installed_repository_metadata_unchanged( 'filtering_0000', common.test_user_1_name )
diff --git a/test/shed_functional/functional/test_1010_install_repository_with_tool_dependencies.py b/test/shed_functional/functional/test_1010_install_repository_with_tool_dependencies.py
new file mode 100644
index 0000000..3b3eb62
--- /dev/null
+++ b/test/shed_functional/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -0,0 +1,133 @@
+import logging
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'freebayes_0010'
+repository_description = "Galaxy's freebayes tool"
+repository_long_description = "Long description of Galaxy's freebayes tool"
+category_name = 'Test 0010 Repository With Tool Dependencies'
+log = logging.getLogger( __name__ )
+
+
+class ToolWithToolDependencies( ShedTwillTestCase ):
+    '''Test installing a repository with tool dependencies.'''
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0010 category and upload the freebayes repository to it, if necessary.'''
+        category = self.create_category( name=category_name, description='Tests for a repository with tool dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool xml.',
+                              strings_displayed=[ 'Metadata may have been defined', 'This file requires an entry', 'tool_data_table_conf' ],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/tool_data_table_conf.xml.sample',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool data table sample file.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/sam_fa_indices.loc.sample',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool data table .loc file.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename=os.path.join( 'freebayes', 'malformed_tool_dependencies', 'tool_dependencies.xml' ),
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded malformed tool dependency XML.',
+                              strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded invalid tool dependency XML.',
+                              strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename=os.path.join( 'freebayes', 'tool_dependencies.xml' ),
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded valid tool dependency XML.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_browse_tool_shed( self ):
+        """Browse the available tool sheds in this Galaxy instance and preview the freebayes tool."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.browse_tool_shed( url=self.url, strings_displayed=[ category_name ] )
+        category = self.test_db_util.get_category_by_name( category_name )
+        self.browse_category( category, strings_displayed=[ repository_name ] )
+        strings_displayed = [ repository_name, 'Valid tools', 'Tool dependencies' ]
+        self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=strings_displayed )
+
+    def test_0015_install_freebayes_repository( self ):
+        '''Install the freebayes repository without installing tool dependencies.'''
+        strings_displayed = [ 'Never installed', 'install all needed dependencies', 'install Tool Shed managed', 'tool dependencies' ]
+        strings_displayed.extend( [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ] )
+        self.install_repository( repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 strings_displayed=strings_displayed,
+                                 install_tool_dependencies=False,
+                                 new_tool_panel_section_label='test_1010' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        strings_displayed = [ 'freebayes_0010',
+                              "Galaxy's freebayes tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        strings_displayed = [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
+        self.check_installed_repository_tool_dependencies( installed_repository, strings_displayed=strings_displayed, dependencies_installed=False )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0020_verify_installed_repository_metadata( self ):
+        '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+        self.verify_installed_repository_metadata_unchanged( repository_name, common.test_user_1_name )
+
+    def test_0025_verify_sample_files( self ):
+        '''Verify that the installed repository populated shed_tool_data_table.xml and the sample files.'''
+        self.verify_installed_repository_data_table_entries( required_data_table_entries=[ 'sam_fa_indexes' ] )
diff --git a/test/shed_functional/functional/test_1020_install_repository_with_repository_dependencies.py b/test/shed_functional/functional/test_1020_install_repository_with_repository_dependencies.py
new file mode 100644
index 0000000..e43248b
--- /dev/null
+++ b/test/shed_functional/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -0,0 +1,132 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0020'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0020'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0020'
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+
+
+class ToolWithRepositoryDependencies( ShedTwillTestCase ):
+    '''Test installing a repository with repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0020 category and any missing repositories.'''
+        global repository_datatypes_count
+        category = self.create_category( name='Test 0020 Basic Repository Dependencies', description='Test 0020 Basic Repository Dependencies' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                              description=datatypes_repository_description,
+                                                              long_description=datatypes_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        if self.repository_is_new( datatypes_repository ):
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                               description=emboss_repository_description,
+                                                               long_description=emboss_repository_long_description,
+                                                               owner=common.test_user_1_name,
+                                                               category_id=self.security.encode_id( category.id ),
+                                                               strings_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1020', additional_paths=[ 'emboss', '5' ] )
+            repository_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+        repository_datatypes_count = int( self.get_repository_datatypes_count( datatypes_repository ) )
+
+    def test_0010_browse_tool_shed( self ):
+        """Browse the available tool sheds in this Galaxy instance and preview the emboss tool."""
+        global base_datatypes_count
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        base_datatypes_count = int( self.get_datatypes_count() )
+        self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0020 Basic Repository Dependencies' ] )
+        category = self.test_db_util.get_category_by_name( 'Test 0020 Basic Repository Dependencies' )
+        self.browse_category( category, strings_displayed=[ 'emboss_0020' ] )
+        self.preview_repository_in_tool_shed( 'emboss_0020', common.test_user_1_name, strings_displayed=[ 'emboss_0020', 'Valid tools' ] )
+
+    def test_0015_install_emboss_repository( self ):
+        '''Install the emboss repository without installing tool dependencies.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        strings_displayed = [ 'Handle', 'Never installed', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
+        self.install_repository( 'emboss_0020',
+                                 common.test_user_1_name,
+                                 'Test 0020 Basic Repository Dependencies',
+                                 strings_displayed=strings_displayed,
+                                 install_tool_dependencies=False,
+                                 new_tool_panel_section_label='test_1020' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'emboss_0020', common.test_user_1_name )
+        strings_displayed = [ 'emboss_0020',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools for test 0020',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        strings_displayed = [ 'emboss', '5.0.0', 'package' ]
+        self.check_installed_repository_tool_dependencies( installed_repository, strings_displayed=strings_displayed, dependencies_installed=False )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+        current_datatypes = int( self.get_datatypes_count() )
+        assert current_datatypes > base_datatypes_count, 'Installing emboss did not add new datatypes. Expected: %d. Found: %d' % \
+            ( base_datatypes_count + repository_datatypes_count, current_datatypes )
+
+    def test_0020_verify_installed_repository_metadata( self ):
+        '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+        self.verify_installed_repository_metadata_unchanged( 'emboss_0020', common.test_user_1_name )
+
+    def test_0025_deactivate_datatypes_repository( self ):
+        '''Deactivate the emboss_datatypes repository without removing it from disk.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( datatypes_repository_name, common.test_user_1_name )
+        old_datatypes_count = int( self.get_datatypes_count() )
+        self.deactivate_repository( installed_repository )
+        current_datatypes = int( self.get_datatypes_count() )
+        assert current_datatypes < old_datatypes_count, 'Uninstalling emboss did not remove datatypes.'
+
+    def test_0030_reactivate_datatypes_repository( self ):
+        '''Reactivate the datatypes repository and verify that the datatypes are again present.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( datatypes_repository_name, common.test_user_1_name )
+        self.reactivate_repository( installed_repository )
+        current_datatypes = int( self.get_datatypes_count() )
+        assert current_datatypes > base_datatypes_count, 'Reactivating emboss did not add new datatypes.'
diff --git a/test/shed_functional/functional/test_1030_install_repository_with_dependency_revisions.py b/test/shed_functional/functional/test_1030_install_repository_with_dependency_revisions.py
new file mode 100644
index 0000000..47f997f
--- /dev/null
+++ b/test/shed_functional/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -0,0 +1,159 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0030'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0030'
+emboss_5_repository_name = 'emboss_5_0030'
+emboss_6_repository_name = 'emboss_6_0030'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0030'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools for test 0030'
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+running_standalone = False
+
+
+class RepositoryWithDependencyRevisions( ShedTwillTestCase ):
+    '''Test installing a repository with dependency revisions.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0030 category and add repositories to it, if necessary.'''
+        global repository_datatypes_count
+        global running_standalone
+        category = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Test 0030 Repository Dependency Revisions' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                              description=datatypes_repository_description,
+                                                              long_description=datatypes_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        if self.repository_is_new( datatypes_repository ):
+            running_standalone = True
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            emboss_5_repository = self.get_or_create_repository( name=emboss_5_repository_name,
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_5_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_5_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+            emboss_6_repository = self.get_or_create_repository( name=emboss_6_repository_name,
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_6_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '6' ] )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_6_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+            emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                               description=emboss_repository_description,
+                                                               long_description=emboss_repository_long_description,
+                                                               owner=common.test_user_1_name,
+                                                               category_id=self.security.encode_id( category.id ),
+                                                               strings_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+            dependency_tuple = ( self.url, emboss_5_repository.name, emboss_5_repository.user.username, self.get_repository_tip( emboss_5_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            dependency_tuple = ( self.url, emboss_6_repository.name, emboss_6_repository.user.username, self.get_repository_tip( emboss_6_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+        repository_datatypes_count = int( self.get_repository_datatypes_count( datatypes_repository ) )
+
+    def test_0010_browse_tool_shed( self ):
+        """Browse the available tool sheds in this Galaxy instance and preview the emboss tool."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0030 Repository Dependency Revisions' ] )
+        category = self.test_db_util.get_category_by_name( 'Test 0030 Repository Dependency Revisions' )
+        self.browse_category( category, strings_displayed=[ 'emboss_0030' ] )
+        self.preview_repository_in_tool_shed( 'emboss_0030', common.test_user_1_name, strings_displayed=[ 'emboss_0030', 'Valid tools' ] )
+
+    def test_0015_install_emboss_repository( self ):
+        '''Install the emboss repository without installing tool dependencies.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        global running_standalone
+        base_datatypes_count = int( self.get_datatypes_count() )
+        strings_displayed = [ 'Handle', 'Never installed', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
+        self.install_repository( 'emboss_0030',
+                                 common.test_user_1_name,
+                                 'Test 0030 Repository Dependency Revisions',
+                                 strings_displayed=strings_displayed,
+                                 install_tool_dependencies=False,
+                                 new_tool_panel_section_label='test_1030' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
+        strings_displayed = [ 'emboss_0030',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools for test 0030',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        strings_displayed = [ 'emboss', '5.0.0', 'package' ]
+        self.check_installed_repository_tool_dependencies( installed_repository, strings_displayed=strings_displayed, dependencies_installed=False )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+        self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
+        current_datatypes = int( self.get_datatypes_count() )
+        if running_standalone:
+            assert current_datatypes == base_datatypes_count + repository_datatypes_count, 'Installing emboss did not add new datatypes.'
+        else:
+            assert current_datatypes == base_datatypes_count, 'Installing emboss added new datatypes.'
+
+    def test_0025_verify_installed_repository_metadata( self ):
+        '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+        self.verify_installed_repository_metadata_unchanged( 'emboss_0030', common.test_user_1_name )
diff --git a/test/shed_functional/functional/test_1040_install_repository_basic_circular_dependencies.py b/test/shed_functional/functional/test_1040_install_repository_basic_circular_dependencies.py
new file mode 100644
index 0000000..e9f8501
--- /dev/null
+++ b/test/shed_functional/functional/test_1040_install_repository_basic_circular_dependencies.py
@@ -0,0 +1,155 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+freebayes_repository_name = 'freebayes_0040'
+freebayes_repository_description = "Galaxy's freebayes tool for test 0040"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool for test 0040"
+
+filtering_repository_name = 'filtering_0040'
+filtering_repository_description = "Galaxy's filtering tool for test 0040"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool for test 0040"
+
+category_name = 'test_0040_repository_circular_dependencies'
+
+running_standalone = False
+
+
+class TestInstallingCircularDependencies( ShedTwillTestCase ):
+    '''Verify that the code correctly handles installing repositories with circular dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0040.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description='Testing handling of circular repository dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_filtering_repository( self ):
+        '''Create and populate filtering_0040.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description='Testing handling of circular repository dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                    description=filtering_repository_description,
+                                                    long_description=filtering_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool tarball for filtering 1.1.0.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_create_repository_dependencies( self ):
+        '''Set up the filtering and freebayes repository dependencies.'''
+        # The dependency structure should look like:
+        # Filtering revision 0 -> freebayes revision 0.
+        # Freebayes revision 0 -> filtering revision 1.
+        # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+        global running_standalone
+        if running_standalone:
+            freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+            filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+            repository_dependencies_path = self.generate_temp_path( 'test_1040', additional_paths=[ 'circular' ] )
+            repository_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+            repository_tuple = ( self.url, filtering_repository.name, filtering_repository.user.username, self.get_repository_tip( filtering_repository ) )
+            self.create_repository_dependency( repository=freebayes_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0025_install_freebayes_repository( self ):
+        '''Install freebayes with blank tool panel section, without tool dependencies but with repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        strings_displayed = [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
+        self.install_repository( freebayes_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 strings_displayed=strings_displayed,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True )
+
+    def test_0030_uninstall_filtering_repository( self ):
+        '''Deactivate filtering, verify tool panel section and missing repository dependency.'''
+        installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name )
+        installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+        self.display_installed_repository_manage_page( installed_freebayes_repository )
+        self.display_installed_repository_manage_page( installed_filtering_repository )
+        self.deactivate_repository( installed_filtering_repository )
+        self.test_db_util.ga_refresh( installed_filtering_repository )
+        self.check_galaxy_repository_tool_panel_section( installed_filtering_repository, 'Get Data' )
+        strings_displayed = [ 'Missing repository', 'filtering', 'freebayes_0040', 'user1', "Galaxy's freebayes tool for test 0040" ]
+        self.display_installed_repository_manage_page( installed_freebayes_repository, strings_displayed=strings_displayed )
+        self.check_galaxy_repository_db_status( filtering_repository_name,
+                                                common.test_user_1_name,
+                                                'Deactivated' )
+
+    def test_0035_reactivate_filtering_repository( self ):
+        '''Reinstall filtering into 'filtering' tool panel section.'''
+        installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reinstall_repository( installed_filtering_repository,
+                                   install_tool_dependencies=False,
+                                   install_repository_dependencies=True,
+                                   new_tool_panel_section_label='filtering',
+                                   no_changes=False )
+
+    def test_0040_uninstall_freebayes_repository( self ):
+        '''Deactivate freebayes, verify tool panel section and missing repository dependency.'''
+        installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name )
+        installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+        self.display_installed_repository_manage_page( installed_freebayes_repository )
+        self.display_installed_repository_manage_page( installed_filtering_repository )
+        self.deactivate_repository( installed_freebayes_repository )
+        self.test_db_util.ga_refresh( installed_freebayes_repository )
+        self.check_galaxy_repository_tool_panel_section( installed_freebayes_repository, 'Get Data' )
+        strings_displayed = [ 'Missing repository', 'freebayes', 'filtering_0040', 'user1', "Galaxy's filtering tool for test 0040" ]
+        self.display_installed_repository_manage_page( installed_filtering_repository, strings_displayed=strings_displayed )
+        self.check_galaxy_repository_db_status( 'freebayes_0040',
+                                                'user1',
+                                                'Deactivated' )
+
+    def test_0045_deactivate_filtering_repository( self ):
+        '''Deactivate filtering, verify tool panel section.'''
+        installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+        installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name )
+        self.display_installed_repository_manage_page( installed_freebayes_repository )
+        self.display_installed_repository_manage_page( installed_filtering_repository )
+        self.deactivate_repository( installed_filtering_repository )
+        self.test_db_util.ga_refresh( installed_filtering_repository )
+        strings_displayed = [ 'Missing repository', 'filtering', 'freebayes_0040', 'user1', "Galaxy's freebayes tool for test 0040" ]
+        self.display_installed_repository_manage_page( installed_freebayes_repository, strings_displayed=strings_displayed )
+        self.check_galaxy_repository_db_status( filtering_repository_name,
+                                                common.test_user_1_name,
+                                                'Deactivated' )
diff --git a/test/shed_functional/functional/test_1050_circular_dependencies_4_levels.py b/test/shed_functional/functional/test_1050_circular_dependencies_4_levels.py
new file mode 100644
index 0000000..41f9e79
--- /dev/null
+++ b/test/shed_functional/functional/test_1050_circular_dependencies_4_levels.py
@@ -0,0 +1,376 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0050'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0050'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+filtering_repository_name = 'filtering_0050'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0050'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+column_repository_name = 'column_maker_0050'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0050'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+bismark_repository_name = 'bismark_0050'
+bismark_repository_description = "A flexible aligner."
+bismark_repository_long_description = "A flexible aligner and methylation caller for Bisulfite-Seq applications."
+
+category_name = 'Test 0050 Circular Dependencies 5 Levels'
+category_description = 'Test circular dependency features'
+
+running_standalone = False
+
+
+class TestInstallRepositoryCircularDependencies( ShedTwillTestCase ):
+    '''Verify that the code correctly handles circular dependencies down to n levels.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_convert_repository( self ):
+        '''Create and populate convert_chars_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate convert_chars_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_emboss_datatypes_repository( self ):
+        '''Create and populate emboss_datatypes_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+                                                    description=emboss_datatypes_repository_description,
+                                                    long_description=emboss_datatypes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_create_emboss_repository( self ):
+        '''Create and populate emboss_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0025_create_filtering_repository( self ):
+        '''Create and populate filtering_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                    description=filtering_repository_description,
+                                                    long_description=filtering_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0030_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0035_create_bismark_repository( self ):
+        '''Create and populate bismark_0050.'''
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=bismark_repository_name,
+                                                    description=bismark_repository_description,
+                                                    long_description=bismark_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='bismark/bismark.tar',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded bismark tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0040_create_and_upload_dependency_definitions( self ):
+        '''Set up the dependency structure.'''
+        global running_standalone
+        if running_standalone:
+            column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+            convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+            datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+            freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+            bismark_repository = self.test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
+            dependency_xml_path = self.generate_temp_path( 'test_1050', additional_paths=[ 'dependencies' ] )
+            # convert_chars depends on column_maker
+            # column_maker depends on convert_chars
+            # emboss depends on emboss_datatypes
+            # emboss_datatypes depends on bismark
+            # freebayes depends on freebayes, emboss, emboss_datatypes, and column_maker
+            # filtering depends on emboss
+            column_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+            convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+            freebayes_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+            emboss_tuple = ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            bismark_tuple = ( self.url, bismark_repository.name, bismark_repository.user.username, self.get_repository_tip( bismark_repository ) )
+            self.create_repository_dependency( repository=convert_repository, repository_tuples=[ column_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=column_repository, repository_tuples=[ convert_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=datatypes_repository, repository_tuples=[ bismark_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=freebayes_repository,
+                                               repository_tuples=[ freebayes_tuple, datatypes_tuple, emboss_tuple, column_tuple ],
+                                               filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ emboss_tuple ], filepath=dependency_xml_path )
+
+    def test_0045_verify_repository_dependencies( self ):
+        '''Verify that the generated dependency circle does not cause an infinite loop.
+        Expected structure:
+
+        id: 2 key: http://toolshed.local:10001__ESEP__filtering__ESEP__test__ESEP__871602b4276b
+            ['http://toolshed.local:10001', 'emboss_5', 'test', '8de5fe0d7b04']
+             id: 3 key: http://toolshed.local:10001__ESEP__emboss_datatypes__ESEP__test__ESEP__dbd4f68bf507
+                 ['http://toolshed.local:10001', 'freebayes', 'test', 'f40028114098']
+             id: 4 key: http://toolshed.local:10001__ESEP__freebayes__ESEP__test__ESEP__f40028114098
+                 ['http://toolshed.local:10001', 'emboss_datatypes', 'test', 'dbd4f68bf507']
+                 ['http://toolshed.local:10001', 'emboss_5', 'test', '8de5fe0d7b04']
+                 ['http://toolshed.local:10001', 'column_maker', 'test', '83e956bdbac0']
+             id: 5 key: http://toolshed.local:10001__ESEP__column_maker__ESEP__test__ESEP__83e956bdbac0
+                 ['http://toolshed.local:10001', 'convert_chars', 'test', 'b28134220c8a']
+             id: 6 key: http://toolshed.local:10001__ESEP__convert_chars__ESEP__test__ESEP__b28134220c8a
+                 ['http://toolshed.local:10001', 'column_maker', 'test', '83e956bdbac0']
+             id: 7 key: http://toolshed.local:10001__ESEP__emboss_5__ESEP__test__ESEP__8de5fe0d7b04
+                 ['http://toolshed.local:10001', 'emboss_datatypes', 'test', 'dbd4f68bf507']
+        '''
+        emboss_datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        bismark_repository = self.test_db_util.get_repository_by_name_and_owner( bismark_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( convert_repository, column_repository )
+        self.check_repository_dependency( column_repository, convert_repository )
+        self.check_repository_dependency( emboss_datatypes_repository, bismark_repository )
+        self.check_repository_dependency( emboss_repository, emboss_datatypes_repository )
+        self.check_repository_dependency( filtering_repository, emboss_repository )
+        for repository in [ emboss_datatypes_repository, emboss_repository, column_repository ]:
+            self.check_repository_dependency( freebayes_repository, repository )
+        freebayes_dependencies = [ freebayes_repository, emboss_datatypes_repository, emboss_repository, column_repository ]
+        strings_displayed = [ '%s depends on %s.' % ( freebayes_repository.name, ', '.join( repo.name for repo in freebayes_dependencies ) ) ]
+        self.display_manage_repository_page( freebayes_repository, strings_displayed=strings_displayed )
+
+    def test_0050_verify_tool_dependencies( self ):
+        '''Check that freebayes and emboss display tool dependencies.'''
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        self.display_manage_repository_page( freebayes_repository,
+                                             strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Tool dependencies' ] )
+        self.display_manage_repository_page( emboss_repository, strings_displayed=[ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
+
+    def test_0055_install_column_repository( self ):
+        '''Install column_maker with repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( column_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 new_tool_panel_section_label='column_maker' )
+        # This should result in column_maker and convert_chars being installed, and the rest never installed.
+        installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
+                                   ( convert_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'column_maker_0050', 'convert_chars_0050' ]
+        strings_not_displayed = [ 'emboss_datatypes_0050', 'emboss_0050', 'filtering_0050', 'freebayes_0050', 'bismark_0050' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories=installed_repositories )
+
+    def test_0060_install_emboss_repository( self ):
+        '''Install emboss_5 with repository dependencies.'''
+        global running_standalone
+        original_datatypes = self.get_datatypes_count()
+        self.install_repository( emboss_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 new_tool_panel_section_label='emboss_5_0050' )
+        if running_standalone:
+            assert original_datatypes < self.get_datatypes_count(), 'Installing a repository that depends on emboss_datatypes did not add datatypes.'
+        # Now we have emboss_datatypes, emboss, bismark, column_maker, and convert_chars installed, filtering and freebayes never installed.
+        installed_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
+                                   ( column_repository_name, common.test_user_1_name ),
+                                   ( emboss_repository_name, common.test_user_1_name ),
+                                   ( convert_repository_name, common.test_user_1_name ),
+                                   ( bismark_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'emboss_datatypes_0050', 'emboss_0050', 'column_maker_0050', 'convert_chars_0050', 'bismark_0050' ]
+        strings_not_displayed = [ 'filtering_0050', 'freebayes_0050' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
+
+    def test_0065_deactivate_datatypes_repository( self ):
+        '''Deactivate emboss_datatypes and verify that the datatypes count is reduced.'''
+        original_datatypes = self.get_datatypes_count()
+        repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        self.deactivate_repository( repository )
+        assert original_datatypes > self.get_datatypes_count(), 'Deactivating emboss_datatypes did not remove datatypes.'
+        # Now we have emboss, bismark, column_maker, and convert_chars installed, filtering and freebayes never installed, and emboss_datatypes deactivated.
+        installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
+                                   ( emboss_repository_name, common.test_user_1_name ),
+                                   ( convert_repository_name, common.test_user_1_name ),
+                                   ( bismark_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'emboss_0050', 'column_maker_0050', 'convert_chars_0050', 'bismark_0050' ]
+        strings_not_displayed = [ 'emboss_datatypes_0050', 'filtering_0050', 'freebayes_0050' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
+
+    def test_0070_uninstall_emboss_repository( self ):
+        '''Uninstall the emboss_5 repository.'''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.uninstall_repository( repository )
+        strings_not_displayed = [ repository.name, repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+        self.test_db_util.ga_refresh( repository )
+        self.check_galaxy_repository_tool_panel_section( repository, 'emboss_5_0050' )
+        # Now we have bismark, column_maker, and convert_chars installed, filtering and freebayes never installed, emboss_datatypes deactivated,
+        # and emboss uninstalled.
+        installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
+                                   ( convert_repository_name, common.test_user_1_name ),
+                                   ( bismark_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'column_maker_0050', 'convert_chars_0050', 'bismark_0050' ]
+        strings_not_displayed = [ 'emboss_0050', 'emboss_datatypes_0050', 'filtering_0050', 'freebayes_0050' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
+
+    def test_0075_install_freebayes_repository( self ):
+        '''Install freebayes with repository dependencies. This should also automatically reactivate emboss_datatypes and reinstall emboss_5.'''
+        original_datatypes = self.get_datatypes_count()
+        strings_displayed = [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
+        self.install_repository( freebayes_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 strings_displayed=strings_displayed,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 new_tool_panel_section_label='freebayes' )
+        assert original_datatypes < self.get_datatypes_count(), 'Installing a repository that depends on emboss_datatypes did not add datatypes.'
+        emboss_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'emboss_0050',
+                              'emboss_datatypes_0050',
+                              emboss_repository.installed_changeset_revision,
+                              datatypes_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        # Installing freebayes should automatically reinstall emboss and reactivate emboss_datatypes.
+        # Now column_maker, convert_chars, emboss, emboss_datatypes, freebayes, and bismark should be installed.
+        installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
+                                   ( emboss_datatypes_repository_name, common.test_user_1_name ),
+                                   ( emboss_repository_name, common.test_user_1_name ),
+                                   ( freebayes_repository_name, common.test_user_1_name ),
+                                   ( convert_repository_name, common.test_user_1_name ),
+                                   ( bismark_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'emboss_0050', 'emboss_datatypes_0050', 'column_maker_0050', 'convert_chars_0050', 'bismark_0050', 'freebayes_0050' ]
+        strings_not_displayed = [ 'filtering_0050' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
diff --git a/test/shed_functional/functional/test_1060_install_repository_with_workflow.py b/test/shed_functional/functional/test_1060_install_repository_with_workflow.py
new file mode 100644
index 0000000..ef77906
--- /dev/null
+++ b/test/shed_functional/functional/test_1060_install_repository_with_workflow.py
@@ -0,0 +1,153 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'filtering_0060'
+repository_description = "Galaxy's filtering tool for test 0060"
+repository_long_description = "Long description of Galaxy's filtering tool for test 0060"
+
+workflow_filename = 'Workflow_for_0060_filter_workflow_repository.ga'
+workflow_name = 'Workflow for 0060_filter_workflow_repository'
+second_workflow_name = 'New workflow for 0060_filter_workflow_repository'
+
+category_name = 'Test 0060 Workflow Features'
+category_description = 'Test 0060 for workflow features'
+
+workflow_repository_name = 'filtering_workflow_0060'
+workflow_repository_description = "Workflow referencing the filtering tool for test 0060"
+workflow_repository_long_description = "Long description of the workflow for test 0060"
+
+
+class ToolWithRepositoryDependencies( ShedTwillTestCase ):
+    '''Test installing a repository with repository dependencies.'''
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_ensure_category_exists( self ):
+        '''Create the 0060 category.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            workflow = open( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+            workflow = workflow.replace(  '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+            workflow_filepath = self.generate_temp_path( 'test_1060', additional_paths=[ 'filtering_workflow' ] )
+            if not os.path.exists( workflow_filepath ):
+                os.makedirs( workflow_filepath )
+            open( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+            self.upload_file( repository,
+                              filename=workflow_filename,
+                              filepath=workflow_filepath,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering workflow.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_install_repository_with_workflow( self ):
+        """Browse the available tool sheds in this Galaxy instance and preview the filtering tool."""
+        self.preview_repository_in_tool_shed( repository_name,
+                                              common.test_user_1_name,
+                                              strings_displayed=[ repository_name, 'Valid tools', 'Workflows' ] )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( repository_name,
+                                 common.test_user_1_name,
+                                 'Test 0060 Workflow Features',
+                                 install_tool_dependencies=False,
+                                 new_tool_panel_section_label='test_1060' )
+
+    def test_0015_import_workflow_from_installed_repository( self ):
+        '''Import the workflow from the installed repository and verify that it appears in the list of all workflows.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        self.display_installed_workflow_image( installed_repository,
+                                               workflow_name,
+                                               strings_displayed=[ '#EBD9B2' ],
+                                               strings_not_displayed=[ '#EBBCB2' ] )
+        self.display_all_workflows( strings_not_displayed=[ 'Workflow for 0060_filter_workflow_repository' ] )
+        self.import_workflow( installed_repository, workflow_name )
+        self.display_all_workflows( strings_displayed=[ 'Workflow for 0060_filter_workflow_repository' ] )
+
+    def test_0020_create_filter_workflow_repository( self ):
+        '''Create, if necessary, a filtering repository with only a workflow.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=workflow_repository_name,
+                                                    description=workflow_repository_description,
+                                                    long_description=workflow_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            workflow = open( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+            workflow = workflow.replace(  '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+            workflow = workflow.replace( 'Workflow for 0060_filter_workflow_repository',
+                                         'New workflow for 0060_filter_workflow_repository' )
+            workflow_filepath = self.generate_temp_path( 'test_0060', additional_paths=[ 'filtering_workflow_2' ] )
+            if not os.path.exists( workflow_filepath ):
+                os.makedirs( workflow_filepath )
+            open( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+            self.upload_file( repository,
+                              filename=workflow_filename,
+                              filepath=workflow_filepath,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering workflow.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.load_workflow_image_in_tool_shed( repository,
+                                                   'New workflow for 0060_filter_workflow_repository',
+                                                   strings_displayed=[ '#EBBCB2' ] )
+
+    def test_0025_install_repository_with_workflow( self ):
+        """Browse the available tool sheds in this Galaxy instance and preview the filtering workflow repository."""
+        self.preview_repository_in_tool_shed( workflow_repository_name,
+                                              common.test_user_1_name,
+                                              strings_displayed=[ 'filtering_workflow_0060', 'Workflows' ],
+                                              strings_not_displayed=[ 'Valid tools', 'Invalid tools' ] )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( workflow_repository_name,
+                                 common.test_user_1_name,
+                                 'Test 0060 Workflow Features',
+                                 install_tool_dependencies=False,
+                                 includes_tools_for_display_in_tool_panel=False )
+
+    def test_0030_import_workflow_from_installed_repository( self ):
+        '''Import the workflow from the installed repository and verify that it appears in the list of all workflows.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( workflow_repository_name, common.test_user_1_name )
+        self.display_installed_workflow_image( installed_repository,
+                                               'New workflow for 0060_filter_workflow_repository',
+                                               strings_displayed=[ '#EBD9B2' ],
+                                               strings_not_displayed=[ '#EBBCB2' ] )
+        self.display_all_workflows( strings_not_displayed=[ 'New workflow for 0060_filter_workflow_repository' ] )
+        self.import_workflow( installed_repository,
+                              'New workflow for 0060_filter_workflow_repository',
+                              strings_displayed=[ 'New workflow for 0060_filter_workflow_repository' ] )
+        self.display_all_workflows( strings_displayed=[ 'New workflow for 0060_filter_workflow_repository' ] )
diff --git a/test/shed_functional/functional/test_1070_invalid_tool.py b/test/shed_functional/functional/test_1070_invalid_tool.py
new file mode 100644
index 0000000..46edaa7
--- /dev/null
+++ b/test/shed_functional/functional/test_1070_invalid_tool.py
@@ -0,0 +1,87 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'bismark_0070'
+repository_description = "Galaxy's bismark wrapper"
+repository_long_description = "Long description of Galaxy's bismark wrapper"
+category_name = 'Test 0070 Invalid Tool Revisions'
+category_description = 'Test 1070 for a repository with an invalid tool.'
+
+
+class TestFreebayesRepository( ShedTwillTestCase ):
+    '''Test repository with multiple revisions with invalid tools.'''
+    def test_0000_create_or_login_admin_user( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_ensure_existence_of_repository_and_category( self ):
+        '''Create freebayes repository and upload only freebayes.xml. This should result in an error message and invalid tool.'''
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='bismark/bismark.tar',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded bismark tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='bismark/bismark_methylation_extractor.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded an updated tool xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_browse_tool_shed( self ):
+        """Browse the available tool sheds in this Galaxy instance and preview the bismark repository."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.browse_tool_shed( url=self.url, strings_displayed=[ category_name ] )
+        category = self.test_db_util.get_category_by_name( category_name )
+        self.browse_category( category, strings_displayed=[ repository_name ] )
+        self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=[ repository_name ] )
+
+    def test_0015_install_freebayes_repository( self ):
+        '''Install the test repository without installing tool dependencies.'''
+        self.install_repository( repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 new_tool_panel_section_label='test_1070' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        strings_displayed = [ 'bismark_0070',
+                              "Galaxy's bismark wrapper",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'methylation extractor', 'Invalid tools' ] )
+        self.display_installed_repository_manage_page( installed_repository,
+                                                       strings_displayed=strings_displayed,
+                                                       strings_not_displayed=[ 'bisulfite mapper' ] )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+        self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
+        assert 'invalid_tools' in installed_repository.metadata, 'No invalid tools were defined in %s.' % \
+            installed_repository.name
diff --git a/test/shed_functional/functional/test_1080_advanced_circular_dependency_installation.py b/test/shed_functional/functional/test_1080_advanced_circular_dependency_installation.py
new file mode 100644
index 0000000..6d7b4e9
--- /dev/null
+++ b/test/shed_functional/functional/test_1080_advanced_circular_dependency_installation.py
@@ -0,0 +1,377 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+column_repository_name = 'column_maker_0080'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0080'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0080 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+log = logging.getLogger( __name__ )
+
+running_standalone = False
+
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+    '''Testing uninstalling and reinstalling repository dependencies, and setting tool panel sections.'''
+
+    def test_0000_create_or_login_admin_user( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_and_populate_column_repository( self ):
+        """Create the category for this test suite, then create and populate column_maker."""
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            running_standalone = True
+
+    def test_0010_create_and_populate_convert_repository( self ):
+        '''Create and populate the convert_chars repository.'''
+        global running_standalone
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            running_standalone = True
+
+    def test_0015_upload_dependency_xml_if_needed( self ):
+        '''If this test is being run by itself, it will not have repository dependencies configured yet.'''
+        global running_standalone
+        if running_standalone:
+            convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+            column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+            repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
+            repository_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+            self.create_repository_dependency( repository=column_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+            repository_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+            self.create_repository_dependency( repository=convert_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0020_install_convert_repository( self ):
+        '''Install convert_chars without repository dependencies into convert_chars tool panel section.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( convert_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=False,
+                                 new_tool_panel_section_label='convert_chars' )
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                           common.test_user_1_name )
+        browse_strings_displayed = [ 'convert_chars_0080',
+                                     'Convert delimiters',
+                                     self.url.replace( 'http://', '' ),
+                                     installed_convert_repository.installed_changeset_revision ]
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Missing repository dependencies' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0025_install_column_repository( self ):
+        '''Install column maker with repository dependencies into column_maker tool panel section.'''
+        self.install_repository( column_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_repository_dependencies=True,
+                                 new_tool_panel_section_label='column_maker',
+                                 strings_displayed=[ 'install_repository_dependencies' ] )
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                           common.test_user_1_name )
+        browse_strings_displayed = [ 'convert_chars_0080',
+                                     'Convert delimiters',
+                                     self.url.replace( 'http://', '' ),
+                                     installed_convert_repository.installed_changeset_revision,
+                                     'column_maker_0080',
+                                     'Add column',
+                                     installed_column_repository.installed_changeset_revision ]
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0030_deactivate_convert_repository( self ):
+        '''Deactivate convert_chars, verify that column_maker is installed and missing repository dependencies.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.deactivate_repository( installed_convert_repository )
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Missing repository dependencies',
+                              'Deactivated' ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0035_reactivate_convert_repository( self ):
+        '''Reactivate convert_chars, both convert_chars and column_maker should now show as installed.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reactivate_repository( installed_convert_repository )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Compute',
+                              'an expression on every row',
+                              '1.1.0',
+                              'column_maker_0080',
+                              'Installed repository dependencies',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              installed_convert_repository.installed_changeset_revision ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0040_deactivate_column_repository( self ):
+        '''Deactivate column_maker, verify that convert_chars is installed and missing repository dependencies.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.deactivate_repository( installed_column_repository )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Missing repository dependencies',
+                              'Deactivated' ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0045_deactivate_convert_repository( self ):
+        '''Deactivate convert_chars, verify that both convert_chars and column_maker are deactivated.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.deactivate_repository( installed_convert_repository  )
+        strings_not_displayed = [ 'column_maker_0080',
+                                  installed_column_repository.installed_changeset_revision,
+                                  'convert_chars_0080',
+                                  installed_convert_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0050_reactivate_column_repository( self ):
+        '''Reactivate column_maker. This should not automatically reactivate convert_chars, so column_maker should be displayed as installed but missing repository dependencies.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reactivate_repository( installed_column_repository )
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Missing repository dependencies',
+                              'Deactivated' ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0055_reactivate_convert_repository( self ):
+        '''Activate convert_chars. Both convert_chars and column_maker should now show as installed.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reactivate_repository( installed_convert_repository )
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0060_uninstall_column_repository( self ):
+        '''Uninstall column_maker. Verify that convert_chars is installed and missing repository dependencies.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.uninstall_repository( installed_column_repository )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Missing repository dependencies',
+                              'Uninstalled' ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+        self.test_db_util.install_session.refresh( installed_column_repository )
+
+    def test_0065_reinstall_column_repository( self ):
+        '''Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reinstall_repository( installed_column_repository, install_repository_dependencies=False )
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0070_uninstall_convert_repository( self ):
+        '''Uninstall convert_chars, verify column_maker installed but missing repository dependencies.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.deactivate_repository( installed_convert_repository )
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Missing repository dependencies',
+                              'Deactivated' ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+        self.test_db_util.install_session.refresh( installed_convert_repository )
+
+    def test_0075_uninstall_column_repository( self ):
+        '''Uninstall column_maker, verify that both convert_chars and column_maker are uninstalled.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.deactivate_repository( installed_column_repository )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Missing repository dependencies',
+                              'Activate or reinstall repository',
+                              'Deactivated' ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0080_reinstall_convert_repository( self ):
+        '''Reinstall convert_chars with repository dependencies, verify that this installs both convert_chars and column_maker.'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reinstall_repository( installed_convert_repository,
+                                   install_repository_dependencies=True,
+                                   no_changes=False )
+        strings_displayed = [ 'column_maker_0080',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision,
+                              'convert_chars_0080',
+                              installed_convert_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+        strings_displayed = [ 'convert_chars_0080',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_0080',
+                              installed_column_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_1090_repository_dependency_handling.py b/test/shed_functional/functional/test_1090_repository_dependency_handling.py
new file mode 100644
index 0000000..5a90f88
--- /dev/null
+++ b/test/shed_functional/functional/test_1090_repository_dependency_handling.py
@@ -0,0 +1,178 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+column_repository_name = 'column_maker_1085'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_1085'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 1085 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+log = logging.getLogger( __name__ )
+
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+    '''Testing the behavior of repository dependencies with tool panel sections.'''
+
+    def test_0000_create_or_login_admin_user( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_and_populate_column_repository( self ):
+        """Create a category for this test suite and add repositories to it."""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_and_populate_convert_repository( self ):
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_and_upload_dependency_files( self ):
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_1085', additional_paths=[ 'column' ] )
+        repository_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+        self.create_repository_dependency( repository=column_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+        repository_tuple = ( self.url, column_repository.name, column_repository.user.username, self.get_repository_tip( column_repository ) )
+        self.create_repository_dependency( repository=convert_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0020_install_repositories( self ):
+        '''Install column_maker into column_maker tool panel section and install repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( column_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 new_tool_panel_section_label='column_maker',
+                                 strings_displayed=[ 'install_repository_dependencies' ] )
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        browse_strings_displayed = [ 'convert_chars_1085',
+                                     'Convert delimiters',
+                                     self.url.replace( 'http://', '' ),
+                                     installed_convert_repository.installed_changeset_revision,
+                                     'column_maker_1085',
+                                     'Add column',
+                                     installed_column_repository.installed_changeset_revision ]
+        strings_displayed = [ 'convert_chars_1085',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision,
+                              'column_maker_1085',
+                              installed_column_repository.installed_changeset_revision,
+                              'Installed repository dependencies' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0025_uninstall_column_repository( self ):
+        '''uninstall column_maker, verify same section'''
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.uninstall_repository( installed_column_repository )
+        self.test_db_util.ga_refresh( installed_column_repository )
+        self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'column_maker' )
+
+    def test_0030_uninstall_convert_repository( self ):
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        self.uninstall_repository( installed_convert_repository )
+        self.test_db_util.ga_refresh( installed_convert_repository )
+        self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'column_maker' )
+
+    def test_0035_reinstall_column_repository( self ):
+        '''reinstall column_maker into new section 'new_column_maker' (no_changes = false), no dependencies'''
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reinstall_repository( installed_column_repository,
+                                   install_tool_dependencies=False,
+                                   install_repository_dependencies=False,
+                                   new_tool_panel_section_label='new_column_maker',
+                                   no_changes=False )
+        strings_displayed = [ 'column_maker_1085',
+                              'Add column',
+                              self.url.replace( 'http://', '' ),
+                              installed_column_repository.installed_changeset_revision ]
+        self.display_installed_repository_manage_page( installed_column_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0040_reinstall_convert_repository( self ):
+        '''reinstall convert_chars into new section 'new_convert_chars' (no_changes = false), no dependencies'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        self.reinstall_repository( installed_convert_repository,
+                                   install_tool_dependencies=False,
+                                   install_repository_dependencies=False,
+                                   new_tool_panel_section_label='new_convert_chars',
+                                   no_changes=False )
+        strings_displayed = [ 'convert_chars_1085',
+                              'Convert delimiters',
+                              self.url.replace( 'http://', '' ),
+                              installed_convert_repository.installed_changeset_revision ]
+        self.display_installed_repository_manage_page( installed_convert_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0045_uninstall_and_verify_tool_panel_sections( self ):
+        '''uninstall both and verify tool panel sections'''
+        installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+                                                                                            common.test_user_1_name )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+                                                                                            common.test_user_1_name )
+        self.uninstall_repository( installed_convert_repository )
+        self.uninstall_repository( installed_column_repository )
+        self.test_db_util.ga_refresh( installed_convert_repository )
+        self.test_db_util.ga_refresh( installed_column_repository )
+        self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'new_column_maker' )
+        self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'new_convert_chars' )
diff --git a/test/shed_functional/functional/test_1100_install_updated_repository_dependencies.py b/test/shed_functional/functional/test_1100_install_updated_repository_dependencies.py
new file mode 100644
index 0000000..e74f5c4
--- /dev/null
+++ b/test/shed_functional/functional/test_1100_install_updated_repository_dependencies.py
@@ -0,0 +1,116 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+column_repository_name = 'column_maker_1087'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_1087'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 1087 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+    '''Test installing a repository, then updating it to include repository dependencies.'''
+
+    def test_0000_create_or_login_admin_user( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_and_populate_column_repository( self ):
+        """Create a category for this test suite and add repositories to it."""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_and_populate_convert_repository( self ):
+        '''Create and populate the convert_chars repository.'''
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_install_and_uninstall_column_repository( self ):
+        '''Install and uninstall the column_maker repository.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( column_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 new_tool_panel_section_label='column_maker',
+                                 strings_not_displayed=[ 'install_repository_dependencies' ] )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+        self.uninstall_repository( installed_column_repository )
+
+    def test_0020_upload_dependency_xml( self ):
+        '''Upload a repository_dependencies.xml file to column_maker that specifies convert_chars.'''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        repository_dependencies_path = self.generate_temp_path( 'test_1085', additional_paths=[ 'column' ] )
+        convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+        self.create_repository_dependency( repository=column_repository, repository_tuples=[ convert_tuple ], filepath=repository_dependencies_path )
+
+    def test_0025_verify_repository_dependency( self ):
+        '''Verify that the new revision of column_maker now depends on convert_chars.'''
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( column_repository, convert_repository )
+
+    def test_0030_reinstall_column_repository( self ):
+        '''Reinstall column_maker and verify that it now shows repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Handle repository dependencies', 'convert_chars_1087', self.get_repository_tip( convert_repository ) ]
+        # Due to twill's limitations, only check for strings on the (redirected) reselect tool panel section page, don't actually reinstall.
+        params = dict( operation='activate or reinstall', id=self.security.encode_id( installed_column_repository.id ) )
+        url = '/admin_toolshed/browse_repositories'
+        self.visit_galaxy_url( url, params )
+        self.check_for_strings( strings_displayed )
+        strings_not_displayed = [ 'column_maker_1087' ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
diff --git a/test/shed_functional/functional/test_1110_install_tool_from_tool_search.py b/test/shed_functional/functional/test_1110_install_tool_from_tool_search.py
new file mode 100644
index 0000000..8aabb98
--- /dev/null
+++ b/test/shed_functional/functional/test_1110_install_tool_from_tool_search.py
@@ -0,0 +1,311 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0090'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0090'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+filtering_repository_name = 'filtering_0090'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+freebayes_repository_name = 'freebayes_0090'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+bwa_base_repository_name = 'bwa_base_0090'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "NT space mapping with BWA"
+
+bwa_color_repository_name = 'bwa_color_0090'
+bwa_color_repository_description = "BWA Color"
+bwa_color_repository_long_description = "Color space mapping with BWA"
+
+category_name = 'Test 0090 Tool Search And Installation'
+category_description = 'Test 0090 Tool Search And Installation'
+
+running_standalone = False
+
+
+class TestToolSearchAndInstall( ShedTwillTestCase ):
+    '''Verify that the code correctly handles circular dependencies down to n levels.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bwa_base_repository( self ):
+        '''Create and populate bwa_base_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        global running_standalone
+        repository = self.get_or_create_repository( name=bwa_base_repository_name,
+                                                    description=bwa_base_repository_description,
+                                                    long_description=bwa_base_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='bwa/bwa_base.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded BWA tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_bwa_color_repository( self ):
+        '''Create and populate bwa_color_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        repository = self.get_or_create_repository( name=bwa_color_repository_name,
+                                                    description=bwa_color_repository_description,
+                                                    long_description=bwa_color_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='bwa/bwa_color.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded BWA color tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_emboss_datatypes_repository( self ):
+        '''Create and populate emboss_datatypes_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        repository = self.get_or_create_repository( name=emboss_datatypes_repository_name,
+                                                    description=emboss_datatypes_repository_description,
+                                                    long_description=emboss_datatypes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_create_emboss_repository( self ):
+        '''Create and populate emboss_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                    description=emboss_repository_description,
+                                                    long_description=emboss_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0025_create_filtering_repository( self ):
+        '''Create and populate filtering_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        repository = self.get_or_create_repository( name=filtering_repository_name,
+                                                    description=filtering_repository_description,
+                                                    long_description=filtering_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0030_create_freebayes_repository( self ):
+        '''Create and populate freebayes_0090.'''
+        category = self.create_category( name=category_name, description=category_description )
+        global running_standalone
+        repository = self.get_or_create_repository( name=freebayes_repository_name,
+                                                    description=freebayes_repository_description,
+                                                    long_description=freebayes_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0035_create_and_upload_dependency_definitions( self ):
+        '''Create and upload repository dependency definitions.'''
+        global running_standalone
+        if running_standalone:
+            bwa_color_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+            bwa_base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            emboss_datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+            freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+            dependency_xml_path = self.generate_temp_path( 'test_0090', additional_paths=[ 'freebayes' ] )
+            freebayes_tuple = ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) )
+            emboss_tuple = ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) )
+            datatypes_tuple = ( self.url, emboss_datatypes_repository.name, emboss_datatypes_repository.user.username, self.get_repository_tip( emboss_datatypes_repository ) )
+            filtering_tuple = ( self.url, filtering_repository.name, filtering_repository.user.username, self.get_repository_tip( filtering_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ freebayes_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=bwa_base_repository, repository_tuples=[ emboss_tuple ], filepath=dependency_xml_path )
+            self.create_repository_dependency( repository=bwa_color_repository, repository_tuples=[ filtering_tuple ], filepath=dependency_xml_path )
+
+    def test_0040_verify_repository_dependencies( self ):
+        '''Verify the generated dependency structure.'''
+        bwa_color_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+        bwa_base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        emboss_datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        filtering_repository = self.test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+        freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( emboss_repository, emboss_datatypes_repository )
+        self.check_repository_dependency( filtering_repository, freebayes_repository )
+        self.check_repository_dependency( bwa_base_repository, emboss_repository )
+        self.check_repository_dependency( bwa_color_repository, filtering_repository )
+
+    def test_0045_install_freebayes_repository( self ):
+        '''Install freebayes without repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        # After this test, the repositories should be in the following states:
+        # Installed: freebayes
+        # Never installed: filtering, emboss, emboss_datatypes, bwa_color, bwa_base
+        self.install_repository( freebayes_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=False,
+                                 new_tool_panel_section_label='freebayes_1090' )
+        installed_repositories = [ ( freebayes_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'freebayes_0090' ]
+        strings_not_displayed = [ 'filtering_0090', 'emboss_0090', 'emboss_datatypes_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
+
+    def test_0050_install_deactivate_filtering_repository( self ):
+        '''Install and deactivate filtering.'''
+        global running_standalone
+        # After this test, the repositories should be in the following states:
+        # Installed: freebayes
+        # Deactivated: filtering
+        # Never installed: emboss, emboss_datatypes, bwa_color, bwa_base
+        self.install_repository( filtering_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=False,
+                                 new_tool_panel_section_label='filtering_1090' )
+        installed_repositories = [ ( filtering_repository_name, common.test_user_1_name ),
+                                   ( freebayes_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'filtering_0090', 'freebayes_0090' ]
+        strings_not_displayed = [ 'emboss_0090', 'emboss_datatypes_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
+        filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+        self.deactivate_repository( filtering_repository )
+        strings_displayed = [ 'freebayes_0090' ]
+        strings_not_displayed = [ 'filtering_0090', 'emboss_0090', 'emboss_datatypes_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0055_install_uninstall_datatypes_repository( self ):
+        '''Install and uninstall emboss_datatypes.'''
+        # After this test, the repositories should be in the following states:
+        # Installed: freebayes
+        # Deactivated: filtering
+        # Uninstalled: emboss_datatypes
+        # Never installed: emboss, bwa_color, bwa_base
+        self.install_repository( emboss_datatypes_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 includes_tools_for_display_in_tool_panel=False )
+        installed_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
+                                   ( freebayes_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'emboss_datatypes_0090', 'freebayes_0090' ]
+        strings_not_displayed = [ 'filtering_0090', 'emboss_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+        self.verify_installed_repositories( installed_repositories )
+        datatypes_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+        self.uninstall_repository( datatypes_repository )
+        strings_displayed = [ 'freebayes_0090' ]
+        strings_not_displayed = [ 'emboss_datatypes_0090', 'filtering_0090', 'emboss_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+
+    def test_0060_search_for_bwa_tools( self ):
+        '''Search for and install the repositories with BWA tools, and verify that this reinstalls emboss_datatypes and reactivates filtering.'''
+        bwa_color_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
+        bwa_base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        bwa_base_revision = self.get_repository_tip( bwa_base_repository )
+        bwa_color_revision = self.get_repository_tip( bwa_color_repository )
+        self.search_for_valid_tools( search_fields={ 'tool_id': 'bwa' },
+                                     exact_matches=False, from_galaxy=True,
+                                     strings_displayed=[ 'bwa_color_0090', 'bwa_base_0090', bwa_base_revision, bwa_color_revision  ] )
+        strings_displayed = [ 'freebayes_0090', 'emboss_0090', 'filtering_0090' ]
+        strings_displayed.extend( [ 'bwa_color_0090', 'bwa_base_0090' ] )
+        strings_displayed.extend( [ 'bwa', 'Handle', 'tool dependencies' ] )
+        repositories_to_install = [ bwa_color_repository, bwa_base_repository ]
+        # BWA is a good candidate for testing the installation of tool dependencies, but it is a core requirement of functional
+        # tests that they be able to run independently of any network connection or remote data.
+        #
+        # After this test, the repositories should be in the following state:
+        # Installed: bwa_color, bwa_base, emboss_datatypes, emboss, filtering, freebayes
+        self.install_repositories_from_search_results( repositories_to_install,
+                                                       install_repository_dependencies='True',
+                                                       install_tool_dependencies=False,
+                                                       new_tool_panel_section_label='bwa_1090',
+                                                       strings_displayed=strings_displayed )
+
+        installed_repositories = [ ( emboss_repository_name, common.test_user_1_name ),
+                                   ( filtering_repository_name, common.test_user_1_name ),
+                                   ( bwa_color_repository_name, common.test_user_1_name ),
+                                   ( bwa_base_repository_name, common.test_user_1_name ),
+                                   ( emboss_datatypes_repository_name, common.test_user_1_name ),
+                                   ( freebayes_repository_name, common.test_user_1_name ) ]
+        strings_displayed = [ 'emboss_datatypes_0090', 'filtering_0090', 'emboss_0090', 'bwa_color_0090', 'bwa_base_0090', 'freebayes_0090' ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        self.verify_installed_repositories( installed_repositories )
diff --git a/test/shed_functional/functional/test_1120_install_repository_with_complex_dependencies.py b/test/shed_functional/functional/test_1120_install_repository_with_complex_dependencies.py
new file mode 100644
index 0000000..65a8bb3
--- /dev/null
+++ b/test/shed_functional/functional/test_1120_install_repository_with_complex_dependencies.py
@@ -0,0 +1,282 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+bwa_base_repository_name = 'bwa_base_repository_0100'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "BWA tool that depends on bwa 0.5.9, with a complex repository dependency pointing at package_bwa_0_5_9_0100"
+
+bwa_package_repository_name = 'package_bwa_0_5_9_0100'
+bwa_package_repository_description = "BWA Package"
+bwa_package_repository_long_description = "BWA repository with a package tool dependency defined to compile and install BWA 0.5.9."
+
+category_name = 'Test 0100 Complex Repository Dependencies'
+category_description = 'Test 0100 Complex Repository Dependencies'
+running_standalone = False
+
+
+class TestInstallingComplexRepositoryDependencies( ShedTwillTestCase ):
+    '''Test features related to installing repositories with complex repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bwa_package_repository( self ):
+        '''Create and populate package_bwa_0_5_9_0100.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=bwa_package_repository_name,
+                                                    description=bwa_package_repository_description,
+                                                    long_description=bwa_package_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'tool_dependencies.xml' ) )
+            new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+            xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+            open( xml_filename, 'w' ).write( open( old_tool_dependency, 'r' )
+                                     .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
+            self.upload_file( repository,
+                              filename=xml_filename,
+                              filepath=new_tool_dependency_path,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool_dependencies.xml.',
+                              strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                              strings_not_displayed=[] )
+            self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'consider setting its type' ] )
+
+    def test_0010_create_bwa_base_repository( self ):
+        '''Create and populate bwa_base_0100.'''
+        global running_standalone
+        if running_standalone:
+            category = self.create_category( name=category_name, description=category_description )
+            self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+            repository = self.get_or_create_repository( name=bwa_base_repository_name,
+                                                        description=bwa_base_repository_description,
+                                                        long_description=bwa_base_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            self.upload_file( repository,
+                              filename='bwa/complex/bwa_base.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_generate_complex_repository_dependency_invalid_shed_url( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+            base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            url = 'http://http://this is not an url!'
+            name = tool_repository.name
+            owner = tool_repository.user.username
+            changeset_revision = self.get_repository_tip( tool_repository )
+            strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=base_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=True,
+                                               package='bwa',
+                                               version='0.5.9' )
+
+    def test_0020_generate_complex_repository_dependency_invalid_repository_name( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid repository name.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+            base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            url = self.url
+            name = 'invalid_repository!?'
+            owner = tool_repository.user.username
+            changeset_revision = self.get_repository_tip( tool_repository )
+            strings_displayed = [ 'because the name is invalid.' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=base_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=True,
+                                               package='bwa',
+                                               version='0.5.9' )
+
+    def test_0025_generate_complex_repository_dependency_invalid_owner_name( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid owner.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+            base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            url = self.url
+            name = tool_repository.name
+            owner = 'invalid_owner!?'
+            changeset_revision = self.get_repository_tip( tool_repository )
+            strings_displayed = [ 'because the owner is invalid.' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=base_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=True,
+                                               package='bwa',
+                                               version='0.5.9' )
+
+    def test_0030_generate_complex_repository_dependency_invalid_changeset_revision( self ):
+        '''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+            base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            url = self.url
+            name = tool_repository.name
+            owner = tool_repository.user.username
+            changeset_revision = '1234abcd'
+            strings_displayed = [ 'because the changeset revision is invalid.' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=base_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=True,
+                                               package='bwa',
+                                               version='0.5.9' )
+
+    def test_0035_generate_valid_complex_repository_dependency( self ):
+        '''Generate and upload a valid tool_dependencies.xml file that specifies package_bwa_0_5_9_0100.'''
+        global running_standalone
+        if running_standalone:
+            base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
+            url = self.url
+            name = tool_repository.name
+            owner = tool_repository.user.username
+            changeset_revision = self.get_repository_tip( tool_repository )
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=base_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               complex=True,
+                                               package='bwa',
+                                               version='0.5.9' )
+            self.check_repository_dependency( base_repository, tool_repository )
+            self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package' ] )
+
+    def test_0040_update_tool_repository( self ):
+        '''Upload a new tool_dependencies.xml to the tool repository, and verify that the base repository displays the new changeset.'''
+        global running_standalone
+        if running_standalone:
+            base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+            previous_changeset = self.get_repository_tip( tool_repository )
+            old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'readme', 'tool_dependencies.xml' ) )
+            new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+            xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+            open( xml_filename, 'w' ).write( open( old_tool_dependency, 'r' )
+                                     .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
+            self.upload_file( tool_repository,
+                              filename=xml_filename,
+                              filepath=new_tool_dependency_path,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded new tool_dependencies.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
+            self.display_manage_repository_page( base_repository,
+                                                 strings_displayed=[ self.get_repository_tip( tool_repository ), 'bwa', '0.5.9', 'package' ],
+                                                 strings_not_displayed=[ previous_changeset ] )
+
+    def test_0045_install_base_repository( self ):
+        '''Verify installation of the repository with complex repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        base_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( bwa_package_repository_name, common.test_user_1_name )
+        preview_strings_displayed = [ tool_repository.name, self.get_repository_tip( tool_repository ) ]
+        self.install_repository( bwa_base_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=True,
+                                 preview_strings_displayed=preview_strings_displayed,
+                                 post_submit_strings_displayed=[ base_repository.name, tool_repository.name, 'New' ],
+                                 includes_tools_for_display_in_tool_panel=True )
+
+    def test_0050_verify_installed_repositories( self ):
+        '''Verify that the installed repositories are displayed properly.'''
+        base_repository = self.test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
+        tool_repository = self.test_db_util.get_installed_repository_by_name_owner( bwa_package_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'bwa_base_repository_0100', 'user1', base_repository.installed_changeset_revision ]
+        strings_displayed.extend( [ 'package_bwa_0_5_9_0100', 'user1', tool_repository.installed_changeset_revision ] )
+        strings_displayed.append( self.url.replace( 'http://', '' ) )
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=[] )
+        strings_displayed = [ 'package_bwa_0_5_9_0100', 'user1', tool_repository.installed_changeset_revision ]
+        strings_not_displayed = [ 'Missing tool dependencies' ]
+        self.display_installed_repository_manage_page( tool_repository,
+                                                       strings_displayed=strings_displayed,
+                                                       strings_not_displayed=strings_not_displayed )
+        strings_displayed = [ 'bwa_base_repository_0100',
+                              'user1',
+                              'package_bwa_0_5_9_0100',
+                              base_repository.installed_changeset_revision,
+                              tool_repository.installed_changeset_revision ]
+        strings_not_displayed = [ 'Missing tool dependencies' ]
+        self.display_installed_repository_manage_page( base_repository,
+                                                       strings_displayed=strings_displayed,
+                                                       strings_not_displayed=strings_not_displayed )
+
+    def test_0055_verify_complex_tool_dependency( self ):
+        '''Verify that the generated env.sh contains the right data.'''
+        base_repository = self.test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
+        tool_repository = self.test_db_util.get_installed_repository_by_name_owner( bwa_package_repository_name, common.test_user_1_name )
+        env_sh_path = self.get_env_sh_path( tool_dependency_name='bwa',
+                                            tool_dependency_version='0.5.9',
+                                            repository=base_repository )
+        assert os.path.exists( env_sh_path ), 'env.sh was not generated in %s for this dependency.' % env_sh_path
+        contents = open( env_sh_path, 'r' ).read()
+        if tool_repository.installed_changeset_revision not in contents:
+            raise AssertionError( 'Installed changeset revision %s not found in env.sh.\nContents of env.sh: %s' %
+                                  ( tool_repository.installed_changeset_revision, contents ) )
+        if 'package_bwa_0_5_9_0100' not in contents:
+            raise AssertionError( 'Repository name package_bwa_0_5_9_0100 not found in env.sh.\nContents of env.sh: %s' % contents )
+
+    def test_0060_verify_tool_dependency_uninstallation( self ):
+        '''Uninstall the package_bwa_0_5_9_0100 repository.'''
+        '''
+        Uninstall the repository that defines a tool dependency relationship on BWA 0.5.9, and verify
+        that this results in the compiled binary package also being removed.
+        '''
+        tool_repository = self.test_db_util.get_installed_repository_by_name_owner( bwa_package_repository_name, common.test_user_1_name )
+        self.deactivate_repository( tool_repository )
+        env_sh_path = os.path.join( self.galaxy_tool_dependency_dir,
+                                    'bwa',
+                                    '0.5.9',
+                                    tool_repository.owner,
+                                    tool_repository.name,
+                                    tool_repository.installed_changeset_revision,
+                                    'env.sh' )
+        assert os.path.exists( env_sh_path ), 'Path %s does not exist after deactivating the repository that generated it.' % env_sh_path
diff --git a/test/shed_functional/functional/test_1130_install_repository_with_invalid_repository_dependency.py b/test/shed_functional/functional/test_1130_install_repository_with_invalid_repository_dependency.py
new file mode 100644
index 0000000..c1e5670
--- /dev/null
+++ b/test/shed_functional/functional/test_1130_install_repository_with_invalid_repository_dependency.py
@@ -0,0 +1,175 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0110'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0110'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+category_name = 'Test 0110 Invalid Repository Dependencies'
+category_desc = 'Test 0110 Invalid Repository Dependencies'
+running_standalone = False
+
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+    '''Testing emboss 5 with repository dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_category( self ):
+        """Create a category for this test suite"""
+        self.create_category( name=category_name, description=category_desc )
+
+    def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+        '''Create and populate the emboss_datatypes repository.'''
+        global running_standalone
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                             description=datatypes_repository_description,
+                                             long_description=datatypes_repository_long_description,
+                                             owner=common.test_user_1_name,
+                                             category_id=self.security.encode_id( category.id ),
+                                             strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_verify_datatypes_in_datatypes_repository( self ):
+        '''Verify that the emboss_datatypes repository contains datatype entries.'''
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+        self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+
+    def test_0020_create_emboss_5_repository_and_upload_files( self ):
+        '''Create and populate the emboss_5_0110 repository.'''
+        global running_standalone
+        if running_standalone:
+            category = self.test_db_util.get_category_by_name( category_name )
+            repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                 description=emboss_repository_description,
+                                                 long_description=emboss_repository_long_description,
+                                                 owner=common.test_user_1_name,
+                                                 category_id=self.security.encode_id( category.id ),
+                                                 strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0025_generate_repository_dependency_with_invalid_url( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid URL.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_1110', additional_paths=[ 'simple' ] )
+            datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            url = 'http://http://this is not an url!'
+            name = datatypes_repository.name
+            owner = datatypes_repository.user.username
+            changeset_revision = self.get_repository_tip( datatypes_repository )
+            strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=emboss_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=False )
+
+    def test_0030_generate_repository_dependency_with_invalid_name( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid name.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_1110', additional_paths=[ 'simple' ] )
+            repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            url = self.url
+            name = '!?invalid?!'
+            owner = repository.user.username
+            changeset_revision = self.get_repository_tip( repository )
+            strings_displayed = [ 'because the name is invalid.' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=emboss_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=False )
+
+    def test_0035_generate_repository_dependency_with_invalid_owner( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid owner.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_1110', additional_paths=[ 'simple' ] )
+            repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            url = self.url
+            name = repository.name
+            owner = '!?invalid?!'
+            changeset_revision = self.get_repository_tip( repository )
+            strings_displayed = [ 'because the owner is invalid.' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=emboss_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=False )
+
+    def test_0040_generate_repository_dependency_with_invalid_changeset_revision( self ):
+        '''Generate a repository dependency for emboss 5 with an invalid changeset revision.'''
+        global running_standalone
+        if running_standalone:
+            dependency_path = self.generate_temp_path( 'test_1110', additional_paths=[ 'simple', 'invalid' ] )
+            repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+            emboss_repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+            url = self.url
+            name = repository.name
+            owner = repository.user.username
+            changeset_revision = '!?invalid?!'
+            strings_displayed = [ 'because the changeset revision is invalid.' ]
+            repository_tuple = ( url, name, owner, changeset_revision )
+            self.create_repository_dependency( repository=emboss_repository,
+                                               filepath=dependency_path,
+                                               repository_tuples=[ repository_tuple ],
+                                               strings_displayed=strings_displayed,
+                                               complex=False )
+
+    def test_0045_install_repository_with_invalid_repository_dependency( self ):
+        '''Install the repository and verify that galaxy detects invalid repository dependencies.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        repository = self.test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+        preview_strings_displayed = [ 'emboss_0110', self.get_repository_tip( repository ), 'Ignoring repository dependency definition' ]
+        self.install_repository( emboss_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 preview_strings_displayed=preview_strings_displayed,
+                                 post_submit_strings_displayed=[ repository.name, repository.name, 'New' ],
+                                 includes_tools_for_display_in_tool_panel=True )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.display_installed_repository_manage_page( installed_repository=installed_repository,
+                                                       strings_displayed=[],
+                                                       strings_not_displayed=[ 'Repository dependencies' ] )
diff --git a/test/shed_functional/functional/test_1140_simple_repository_dependency_multiple_owners.py b/test/shed_functional/functional/test_1140_simple_repository_dependency_multiple_owners.py
new file mode 100644
index 0000000..d496694
--- /dev/null
+++ b/test/shed_functional/functional/test_1140_simple_repository_dependency_multiple_owners.py
@@ -0,0 +1,194 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'blast_datatypes_0120'
+datatypes_repository_description = 'Galaxy applicable datatypes for BLAST'
+datatypes_repository_long_description = 'Galaxy datatypes for the BLAST top hit descriptons tool'
+
+tool_repository_name = 'blastxml_to_top_descr_0120'
+tool_repository_description = 'BLAST top hit descriptions'
+tool_repository_long_description = 'Make a table from BLAST XML'
+
+'''
+Tool shed side:
+
+1) Create and populate blast_datatypes_0120.
+1a) Check for appropriate strings.
+2) Create and populate blastxml_to_top_descr_0120.
+2a) Check for appropriate strings.
+3) Upload repository_dependencies.xml to blastxml_to_top_descr_0120 that defines a relationship to blast_datatypes_0120.
+3a) Check for appropriate strings.
+Galaxy side:
+
+1) Install blastxml_to_top_descr_0120, with repository dependencies.
+1a) Check for appropriate strings in the installed blastxml_to_top_descr_0120 and blast_datatypes_0120 repositories.
+'''
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+running_standalone = False
+
+
+class TestInstallRepositoryMultipleOwners( ShedTwillTestCase ):
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other tests.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_datatypes_repository( self ):
+        """Create and populate the blast_datatypes_0120 repository"""
+        """
+        We are at step 1.
+        Create and populate blast_datatypes.
+        """
+        category = self.create_category( name='Test 0120', description='Description of test 0120' )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        strings_displayed = self.expect_repo_created_strings(datatypes_repository_name)
+        repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                    description=datatypes_repository_description,
+                                                    long_description=datatypes_repository_long_description,
+                                                    owner=common.test_user_2_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='blast/blast_datatypes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded blast_datatypes tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_verify_datatypes_repository( self ):
+        '''Verify the blast_datatypes_0120 repository.'''
+        '''
+        We are at step 1a.
+        Check for appropriate strings, most importantly BlastXml, BlastNucDb, and BlastProtDb,
+        the datatypes that are defined in datatypes_conf.xml.
+        '''
+        global repository_datatypes_count
+        repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+        strings_displayed = [ 'BlastXml', 'BlastNucDb', 'BlastProtDb', 'application/xml', 'text/html', 'blastxml', 'blastdbn', 'blastdbp']
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+        repository_datatypes_count = int( self.get_repository_datatypes_count( repository ) )
+
+    def test_0015_create_tool_repository( self ):
+        """Create and populate the blastxml_to_top_descr_0120 repository"""
+        """
+        We are at step 2.
+        Create and populate blastxml_to_top_descr_0120.
+        """
+        global running_standalone
+        category = self.create_category( name='Test 0120', description='Description of test 0120' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        strings_displayed = self.expect_repo_created_strings(tool_repository_name)
+        repository = self.get_or_create_repository( name=tool_repository_name,
+                                                    description=tool_repository_description,
+                                                    long_description=tool_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=strings_displayed )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='blast/blastxml_to_top_descr.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded blastxml_to_top_descr tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_verify_tool_repository( self ):
+        '''Verify the blastxml_to_top_descr_0120 repository.'''
+        '''
+        We are at step 2a.
+        Check for appropriate strings, such as tool name, description, and version.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'blastxml_to_top_descr_0120', 'BLAST top hit descriptions', 'Make a table from BLAST XML' ]
+        strings_displayed.extend( [ '0.0.1', 'Valid tools'] )
+        self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+
+    def test_0025_create_repository_dependency( self ):
+        '''Create a repository dependency on blast_datatypes_0120.'''
+        '''
+        We are at step 3.
+        Create a simple repository dependency for blastxml_to_top_descr_0120 that defines a dependency on blast_datatypes_0120.
+        '''
+        global running_standalone
+        if running_standalone:
+            datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+            tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+            dependency_xml_path = self.generate_temp_path( 'test_1120', additional_paths=[ 'dependencies' ] )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=tool_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path )
+
+    def test_0040_verify_repository_dependency( self ):
+        '''Verify the created repository dependency.'''
+        '''
+        We are at step 3a.
+        Check the newly created repository dependency to ensure that it was defined and displays correctly.
+        '''
+        datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
+        tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( tool_repository, datatypes_repository )
+
+    def test_0045_install_blastxml_to_top_descr( self ):
+        '''Install the blastxml_to_top_descr_0120 repository to Galaxy.'''
+        '''
+        We are at step 1, Galaxy side.
+        Install blastxml_to_top_descr_0120 to Galaxy, with repository dependencies, so that the datatypes repository is also installed.
+        '''
+        global base_datatypes_count
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        base_datatypes_count = int( self.get_datatypes_count() )
+        post_submit_strings_displayed = [ 'blastxml_to_top_descr_0120', 'blast_datatypes_0120', 'New' ]
+        self.install_repository( name='blastxml_to_top_descr_0120',
+                                 owner=common.test_user_1_name,
+                                 category_name='Test 0120',
+                                 install_repository_dependencies=True,
+                                 post_submit_strings_displayed=post_submit_strings_displayed,
+                                 new_tool_panel_section_label='Test 0120' )
+
+    def test_0050_verify_repository_installation( self ):
+        '''Verify installation of blastxml_to_top_descr_0120 and blast_datatypes_0120.'''
+        '''
+        We are at step 1a, Galaxy side.
+        Check that the blastxml_to_top_descr_0120 and blast_datatypes_0120 repositories installed correctly, and that there
+        are now new datatypes in the registry matching the ones defined in blast_datatypes_0120. Also check that
+        blast_datatypes_0120 is labeled as an installed repository dependency of blastxml_to_top_descr_0120.
+        '''
+        global repository_datatypes_count
+        global base_datatypes_count
+        tool_repository = self.test_db_util.get_installed_repository_by_name_owner( tool_repository_name, common.test_user_1_name )
+        datatypes_repository = self.test_db_util.get_installed_repository_by_name_owner( datatypes_repository_name, common.test_user_2_name )
+        current_datatypes = int( self.get_datatypes_count() )
+        expected_count = base_datatypes_count + repository_datatypes_count
+        # Once the BLAST datatypes have been included in Galaxy itself, the count won't change
+        assert current_datatypes == base_datatypes_count or current_datatypes == expected_count, \
+            'Installing %s did not add new datatypes. Expected: %d. Found: %d' % \
+            ( 'blastxml_to_top_descr_0120', expected_count, current_datatypes )
+        strings_displayed = [ 'Installed repository dependencies', 'user1', 'blast_datatypes_0120' ]
+        strings_displayed.extend( [ 'Valid tools', 'BLAST top hit', 'Make a table', datatypes_repository.installed_changeset_revision ] )
+        self.display_installed_repository_manage_page( tool_repository, strings_displayed=strings_displayed )
+        strings_displayed = [ 'Datatypes', 'blastxml', 'blastdbp', 'blastdbn', 'BlastXml', 'BlastNucDb', 'BlastProtDb' ]
+        strings_displayed.extend( [ 'application/xml', 'text/html' ] )
+        self.display_installed_repository_manage_page( datatypes_repository, strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_1150_datatype_converters.py b/test/shed_functional/functional/test_1150_datatype_converters.py
new file mode 100644
index 0000000..11e5a4b
--- /dev/null
+++ b/test/shed_functional/functional/test_1150_datatype_converters.py
@@ -0,0 +1,88 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'bed_to_gff_0130'
+repository_description = "Converter: BED to GFF"
+repository_long_description = "Convert bed to gff"
+
+category_name = 'Test 0130 Datatype Converters'
+category_description = 'Test 0130 Datatype Converters'
+
+'''
+1) Install the bed_to_gff_converter repository.
+2) Make sure the page section to select a tool panel section is NOT displayed since the tool will not be displayed in the Galaxy tool panel.
+3) Make sure the bed_to_gff_converter tool is not displayed in the tool panel.
+'''
+
+
+class TestDatatypeConverters( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_bed_to_gff_repository( self ):
+        '''Create and populate bed_to_gff_0130.'''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named bed_to_gff_0130 owned by user1.
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            # Upload bed_to_gff_converter.tar to the repository, if the repository is new.
+            self.upload_file( repository,
+                              filename='bed_to_gff_converter/bed_to_gff_converter.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded bed_to_gff_converter.tar.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_install_datatype_converter_to_galaxy( self ):
+        '''Install bed_to_gff_converter_0130 into the running Galaxy instance.'''
+        '''
+        We are at step 1 - Install the bed_to_gff_converter repository.
+        Install bed_to_gff_converter_0130, checking that the option to select the tool panel section is *not* displayed.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        preview_strings_displayed = [ repository.name, self.get_repository_tip( repository ) ]
+        strings_displayed = [ 'Choose the configuration file' ]
+        strings_not_displayed = [ 'tool panel section' ]
+        self.install_repository( repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 preview_strings_displayed=preview_strings_displayed,
+                                 strings_displayed=strings_displayed,
+                                 strings_not_displayed=strings_not_displayed,
+                                 post_submit_strings_displayed=[ repository.name, 'New' ],
+                                 includes_tools_for_display_in_tool_panel=False )
+
+    def test_0015_uninstall_and_verify_tool_panel_section( self ):
+        '''Uninstall bed_to_gff_converter_0130 and verify that the saved tool_panel_section is None.'''
+        '''
+        We are at step 3 - Make sure the bed_to_gff_converter tool is not displayed in the tool panel.
+        The previous tool panel section for a tool is only recorded in the metadata when a repository is uninstalled,
+        so we have to uninstall it first, then verify that it was not assigned a tool panel section.
+        '''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        self.uninstall_repository( repository )
+        self.verify_installed_repository_no_tool_panel_section( repository )
diff --git a/test/shed_functional/functional/test_1160_tool_help_images.py b/test/shed_functional/functional/test_1160_tool_help_images.py
new file mode 100644
index 0000000..ac89fa7
--- /dev/null
+++ b/test/shed_functional/functional/test_1160_tool_help_images.py
@@ -0,0 +1,79 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger(__name__)
+
+repository_name = 'htseq_count_0140'
+repository_description = "Converter: BED to GFF"
+repository_long_description = "Convert bed to gff"
+
+category_name = 'Test 0140 Tool Help Images'
+category_description = 'Test 0140 Tool Help Images'
+
+'''
+1) Create and populate the htseq_count_0140 repository.
+2) Visit the manage_repository page, then the tool page, and look for the image string
+similar to the following string where the encoded repository_id is previously determined:
+
+src="/repository/static/images/<id>/count_modes.png"
+'''
+
+
+class TestToolHelpImages( ShedTwillTestCase ):
+    '''Test features related to tool help images.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_htseq_count_repository( self ):
+        '''Create and populate htseq_count_0140.'''
+        '''
+        We are at step 1 - Create and populate the htseq_count_0140 repository.
+        Create the htseq_count_0140 repository and upload the tarball.
+        '''
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        # Create a repository named htseq_count_0140 owned by user1.
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            # Upload htseq_count.tar to the repository if it hasn't already been populated.
+            self.upload_file( repository,
+                              filename='htseq_count/htseq_count.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded htseq_count.tar.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_load_tool_page( self ):
+        '''Load the tool page and check for the image URL.'''
+        '''
+        This is a duplicate of test method _0010 in test_0140_tool_help_images.
+        '''
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        # Get the repository tip.
+        changeset_revision = self.get_repository_tip( repository )
+        self.display_manage_repository_page( repository )
+        # Generate the image path.
+        image_path = 'src="/repository/static/images/%s/count_modes.png"' % self.security.encode_id( repository.id )
+        # The repository uploaded in this test should only have one metadata revision, with one tool defined, which
+        # should be the tool that contains a link to the image.
+        repository_metadata = repository.metadata_revisions[ 0 ].metadata
+        tool_path = repository_metadata[ 'tools' ][ 0 ][ 'tool_config' ]
+        self.load_display_tool_page( repository, tool_path, changeset_revision, strings_displayed=[ image_path ], strings_not_displayed=[] )
diff --git a/test/shed_functional/functional/test_1170_prior_installation_required.py b/test/shed_functional/functional/test_1170_prior_installation_required.py
new file mode 100644
index 0000000..890c6b7
--- /dev/null
+++ b/test/shed_functional/functional/test_1170_prior_installation_required.py
@@ -0,0 +1,145 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+column_repository_name = 'column_maker_0150'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0150'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0150 Simple Prior Installation'
+category_description = 'Test 0150 Simple Prior Installation'
+
+'''
+Create column_maker and convert_chars.
+
+Column maker repository dependency:
+<repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+
+Verify display.
+
+Galaxy side:
+
+Install column_maker.
+Verify that convert_chars was installed first, contrary to the ordering that would be present without prior_installation_required.
+'''
+
+running_standalone = False
+
+
+class TestSimplePriorInstallation( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_convert_repository( self ):
+        '''Create and populate convert_chars_0150.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate convert_chars_0150.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if running_standalone:
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_repository_dependency( self ):
+        '''Create a repository dependency specifying convert_chars.'''
+        '''
+        Column maker repository dependency:
+            <repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="<tip>" prior_installation_required="True" />
+        '''
+        global running_standalone
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        if running_standalone:
+            dependency_xml_path = self.generate_temp_path( 'test_1150', additional_paths=[ 'column' ] )
+            convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, self.get_repository_tip( convert_repository ) )
+            self.create_repository_dependency( repository=column_repository,
+                                               repository_tuples=[ convert_tuple ],
+                                               filepath=dependency_xml_path,
+                                               prior_installation_required=True )
+
+    def test_0020_verify_repository_dependency( self ):
+        '''Verify that the previously generated repositiory dependency displays correctly.'''
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( repository=column_repository,
+                                          depends_on_repository=convert_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+
+    def test_0025_install_column_repository( self ):
+        '''Install column_maker_0150.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        preview_strings_displayed = [ 'column_maker_0150', self.get_repository_tip( column_repository ) ]
+        strings_displayed = [ 'Choose the tool panel section' ]
+        self.install_repository( column_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 preview_strings_displayed=preview_strings_displayed,
+                                 strings_displayed=strings_displayed,
+                                 strings_not_displayed=[],
+                                 post_submit_strings_displayed=[ 'column_maker_0150', 'New' ],
+                                 includes_tools_for_display_in_tool_panel=True )
+
+    def test_0030_verify_installation_order( self ):
+        '''Verify that convert_chars_0150 was installed before column_maker_0150.'''
+        column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name )
+        # Column maker was selected for installation, so convert chars should have been installed first, as reflected by the update_time field.
+        assert column_repository.update_time > convert_repository.update_time, 'Error: column_maker_0150 shows an earlier update time than convert_chars_0150'
diff --git a/test/shed_functional/functional/test_1180_circular_prior_installation_required.py b/test/shed_functional/functional/test_1180_circular_prior_installation_required.py
new file mode 100644
index 0000000..6d9d2a8
--- /dev/null
+++ b/test/shed_functional/functional/test_1180_circular_prior_installation_required.py
@@ -0,0 +1,250 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+filter_repository_name = 'filtering_0160'
+filter_repository_description = "Galaxy's filtering tool for test 0160"
+filter_repository_long_description = "Long description of Galaxy's filtering tool for test 0160"
+
+column_repository_name = 'column_maker_0160'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_0160'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 0160 Simple Prior Installation'
+category_description = 'Test 0160 Simple Prior Installation'
+
+'''
+Create column_maker, filtering, and convert_chars.
+
+Column maker repository dependency:
+<repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+<repository toolshed="self.url" name="filtering" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+
+Convert chars repository dependency:
+<repository toolshed="self.url" name="column_maker" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+<repository toolshed="self.url" name="filtering" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+
+Filtering repository dependency:
+<repository toolshed="self.url" name="column_maker" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+<repository toolshed="self.url" name="convert_chars" owner="test" changeset_revision="c3041382815c" prior_installation_required="True" />
+
+Verify display.
+
+Galaxy side:
+
+Install filtering.
+Verify that convert_chars was installed first, contrary to the ordering that would be present without prior_installation_required.
+'''
+
+running_standalone = False
+
+
+class TestSimplePriorInstallation( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_convert_repository( self ):
+        '''Create and populate convert_chars_0160.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=convert_repository_name,
+                                                    description=convert_repository_description,
+                                                    long_description=convert_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='convert_chars/convert_chars.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded convert_chars tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate convert_chars_0160.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if running_standalone:
+            self.upload_file( repository,
+                              filename='column_maker/column_maker.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded column_maker tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_filtering_repository( self ):
+        '''Create and populate filtering_0160.'''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        repository = self.get_or_create_repository( name=filter_repository_name,
+                                                    description=filter_repository_description,
+                                                    long_description=filter_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if running_standalone:
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_create_repository_dependency( self ):
+        '''Create a repository dependency specifying convert_chars.'''
+        '''
+        Each of the three repositories should depend on the other two, to make this as circular as possible.
+        '''
+        global running_standalone
+        filter_repository = self.test_db_util.get_repository_by_name_and_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        filter_revision = self.get_repository_tip( filter_repository )
+        column_revision = self.get_repository_tip( column_repository )
+        convert_revision = self.get_repository_tip( convert_repository )
+        if running_standalone:
+            dependency_xml_path = self.generate_temp_path( 'test_1160', additional_paths=[ 'column' ] )
+            column_tuple = ( self.url, column_repository.name, column_repository.user.username, column_revision )
+            convert_tuple = ( self.url, convert_repository.name, convert_repository.user.username, convert_revision )
+            filter_tuple = ( self.url, filter_repository.name, filter_repository.user.username, filter_revision )
+            self.create_repository_dependency( repository=column_repository,
+                                               repository_tuples=[ convert_tuple, filter_tuple],
+                                               filepath=dependency_xml_path,
+                                               prior_installation_required=False )
+            self.create_repository_dependency( repository=convert_repository,
+                                               repository_tuples=[ column_tuple, filter_tuple ],
+                                               filepath=dependency_xml_path,
+                                               prior_installation_required=False )
+            self.create_repository_dependency( repository=filter_repository,
+                                               repository_tuples=[ convert_tuple, column_tuple ],
+                                               filepath=dependency_xml_path,
+                                               prior_installation_required=True )
+
+    def test_0025_verify_repository_dependency( self ):
+        '''Verify that the previously generated repositiory dependency displays correctly.'''
+        filter_repository = self.test_db_util.get_repository_by_name_and_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+        self.check_repository_dependency( repository=column_repository,
+                                          depends_on_repository=convert_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=column_repository,
+                                          depends_on_repository=filter_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=convert_repository,
+                                          depends_on_repository=column_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=convert_repository,
+                                          depends_on_repository=filter_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=filter_repository,
+                                          depends_on_repository=column_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+        self.check_repository_dependency( repository=filter_repository,
+                                          depends_on_repository=convert_repository,
+                                          depends_on_changeset_revision=None,
+                                          changeset_revision=None )
+
+    def test_0030_install_filtering_repository( self ):
+        '''Install the filtering_0160 repository.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        filter_repository = self.test_db_util.get_repository_by_name_and_owner( filter_repository_name, common.test_user_1_name )
+        preview_strings_displayed = [ 'filtering_0160', self.get_repository_tip( filter_repository ) ]
+        strings_displayed = [ 'Choose the tool panel section' ]
+        self.install_repository( filter_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 preview_strings_displayed=preview_strings_displayed,
+                                 strings_displayed=strings_displayed,
+                                 strings_not_displayed=[],
+                                 post_submit_strings_displayed=[ 'filtering_0160', 'New' ],
+                                 includes_tools_for_display_in_tool_panel=True )
+
+    def test_0035_verify_installation_order( self ):
+        '''Verify that convert_chars_0160 and column_maker_0160 were installed before filtering_0160.'''
+        filter_repository = self.test_db_util.get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name )
+        # Filtering was selected for installation, so convert chars and column maker should have been installed first.
+        assert filter_repository.update_time > convert_repository.update_time, 'Error: convert_chars_0160 shows a later update time than filtering_0160'
+        assert filter_repository.update_time > column_repository.update_time, 'Error: column_maker_0160 shows a later update time than filtering_0160'
+
+    def test_0040_deactivate_all_repositories( self ):
+        '''Uninstall convert_chars_0160, column_maker_0160, and filtering_0160.'''
+        filter_repository = self.test_db_util.get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name )
+        self.deactivate_repository( filter_repository )
+        self.deactivate_repository( column_repository )
+        self.deactivate_repository( convert_repository )
+
+    def test_0045_reactivate_filter_repository( self ):
+        '''Reinstall the filtering_0160 repository.'''
+        filter_repository = self.test_db_util.get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name )
+        self.reactivate_repository( filter_repository )
+        strings_displayed = [ 'filtering_0160',
+                              "Galaxy's filtering tool for test 0160",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              filter_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+        self.display_installed_repository_manage_page( filter_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( filter_repository )
+
+    def test_0050_verify_reinstallation_order( self ):
+        '''Verify that convert_chars_0160 and column_maker_0160 were reinstalled before filtering_0160.'''
+        # Fixme: this test is not covering any important behavior since repositories were only deactivated and not uninstalled.
+        filter_repository = self.test_db_util.get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name )
+        column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
+        convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name )
+        # Filtering was selected for reinstallation, so convert chars and column maker should have been installed first.
+        for repo in [ convert_repository, column_repository, filter_repository ]:
+            self.test_db_util.install_session.refresh( repo )
+        assert filter_repository.update_time > convert_repository.update_time, 'Prior installed convert_chars_0160 shows a later update time than filtering_0160'
+        assert filter_repository.update_time > column_repository.update_time, 'Prior installed column_maker_0160 shows a later update time than filtering_0160'
diff --git a/test/shed_functional/functional/test_1190_complex_prior_installation_required.py b/test/shed_functional/functional/test_1190_complex_prior_installation_required.py
new file mode 100644
index 0000000..6959771
--- /dev/null
+++ b/test/shed_functional/functional/test_1190_complex_prior_installation_required.py
@@ -0,0 +1,176 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+matplotlib_repository_name = 'package_matplotlib_1_2_0170'
+matplotlib_repository_description = "Contains a tool dependency definition that downloads and compiles version 1.2.x of the the python matplotlib package."
+matplotlib_repository_long_description = "This repository is intended to be defined as a complex repository dependency within a separate repository."
+
+numpy_repository_name = 'package_numpy_1_7_0170'
+numpy_repository_description = "Contains a tool dependency definition that downloads and compiles version 1.7 of the the python numpy package."
+numpy_repository_long_description = "This repository is intended to be defined as a complex repository dependency within a separate repository."
+
+category_name = 'Test 0170 Prior Installation Complex Dependencies'
+category_description = 'Test 0170 Prior Installation Complex Dependencies'
+
+'''
+1. Create and populate repositories package_matplotlib_1_2_0170 and package_numpy_1_7_0170.
+2. Create a complex repository dependency on package_numpy_1_7_0170, and upload this to package_matplotlib_1_2_0170.
+3. Verify that package_matplotlib_1_2_0170 now depends on package_numpy_1_7_0170, and that the inherited tool dependency displays correctly.
+4. Install package_matplotlib_1_2_0170 with repository dependencies.
+5. Verify that the prior_installation_required attribute resulted in package_numpy_1_7_0170 being installed first.
+'''
+
+running_standalone = False
+
+
+class TestComplexPriorInstallation( ShedTwillTestCase ):
+    '''Test features related to datatype converters.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_matplotlib_repository( self ):
+        '''Create and populate the package_matplotlib_1_2_0170 repository.'''
+        '''
+        This is step 1 - Create and populate repositories package_matplotlib_1_2_0170 and package_numpy_1_7_0170.
+        '''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=matplotlib_repository_name,
+                                                    description=matplotlib_repository_description,
+                                                    long_description=matplotlib_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='package_matplotlib/package_matplotlib_1_2.tar',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded matplotlib tool dependency tarball.',
+                              strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                              strings_not_displayed=[] )
+
+    def test_0010_create_numpy_repository( self ):
+        '''Create and populate the package_numpy_1_7_0170 repository.'''
+        '''
+        This is step 1 - Create and populate repositories package_matplotlib_1_2_0170 and package_numpy_1_7_0170.
+        '''
+        global running_standalone
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        if running_standalone:
+            repository = self.get_or_create_repository( name=numpy_repository_name,
+                                                        description=numpy_repository_description,
+                                                        long_description=numpy_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='package_numpy/package_numpy_1_7.tar',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded numpy tool dependency tarball.',
+                              strings_displayed=[ 'This repository currently contains a single file named <b>tool_dependencies.xml</b>' ],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_complex_repository_dependency( self ):
+        '''Create a dependency on package_numpy_1_7_0170.'''
+        '''
+        This is step 2 - Create a complex repository dependency on package_numpy_1_7_0170, and upload this to package_matplotlib_1_2_0170.
+        package_matplotlib_1_2_0170 should depend on package_numpy_1_7_0170, with prior_installation_required
+        set to True. When matplotlib is selected for installation, the result should be that numpy is compiled
+        and installed first.
+        '''
+        global running_standalone
+        numpy_repository = self.test_db_util.get_repository_by_name_and_owner( numpy_repository_name, common.test_user_1_name )
+        matplotlib_repository = self.test_db_util.get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name )
+        # Generate the new dependency XML. Normally, the create_repository_dependency method would be used for this, but
+        # it replaces any existing tool or repository dependency XML file with the generated contents. This is undesirable
+        # in this case, because matplotlib already has an additional tool dependency definition that we don't want to
+        # overwrite.
+        new_xml = '    <package name="numpy" version="1.7">\n'
+        new_xml += '        <repository toolshed="%s" name="%s" owner="%s" changeset_revision="%s" prior_installation_required="True" />\n'
+        new_xml += '    </package>\n'
+        url = self.url
+        name = numpy_repository.name
+        owner = numpy_repository.user.username
+        if running_standalone:
+            changeset_revision = self.get_repository_tip( numpy_repository )
+            processed_xml = new_xml % ( url, name, owner, changeset_revision )
+            original_xml = open( self.get_filename( 'package_matplotlib/tool_dependencies.xml' ), 'r' ).read()
+            dependency_xml_path = self.generate_temp_path( 'test_0170', additional_paths=[ 'matplotlib' ] )
+            new_xml_file = os.path.join( dependency_xml_path, 'tool_dependencies.xml' )
+            open( new_xml_file, 'w' ).write( original_xml.replace( '<!--NUMPY-->', processed_xml ) )
+            # Upload the generated complex repository dependency XML to the matplotlib repository.
+            self.upload_file( matplotlib_repository,
+                              filename='tool_dependencies.xml',
+                              filepath=dependency_xml_path,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded complex repository dependency on numpy 1.7.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_verify_generated_dependency( self ):
+        '''Verify that matplotlib now has a package tool dependency and a complex repository dependency.'''
+        '''
+        This is step 3 - Verify that package_matplotlib_1_2_0170 now depends on package_numpy_1_7_0170, and that the inherited tool
+                         dependency displays correctly.
+        'Inhherited' in this case means that matplotlib should show a package tool dependency on numpy version 1.7, and a repository
+        dependency on the latest revision of package_numpy_1_7_0170.
+        '''
+        numpy_repository = self.test_db_util.get_repository_by_name_and_owner( numpy_repository_name, common.test_user_1_name )
+        matplotlib_repository = self.test_db_util.get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name )
+        changeset_revision = self.get_repository_tip( numpy_repository )
+        self.check_repository_dependency( matplotlib_repository, depends_on_repository=numpy_repository )
+        self.display_manage_repository_page( matplotlib_repository, strings_displayed=[ 'numpy', '1.7', 'package', changeset_revision ] )
+
+    def test_0025_install_matplotlib_repository( self ):
+        '''Install the package_matplotlib_1_2_0170 repository.'''
+        '''
+        This is step 4 - Install package_matplotlib_1_2_0170 with repository dependencies.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        matplotlib_repository = self.test_db_util.get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name )
+        preview_strings_displayed = [ 'package_matplotlib_1_2_0170', self.get_repository_tip( matplotlib_repository ) ]
+        strings_displayed = [ 'Handle repository dependencies', numpy_repository_name, 'Never installed' ]
+        self.install_repository( matplotlib_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True,
+                                 preview_strings_displayed=preview_strings_displayed,
+                                 strings_displayed=strings_displayed,
+                                 strings_not_displayed=[],
+                                 post_submit_strings_displayed=[ 'package_matplotlib_1_2_0170', 'New' ],
+                                 includes_tools_for_display_in_tool_panel=False )
+
+    def test_0030_verify_installation_order( self ):
+        '''Verify that the numpy repository was installed before the matplotlib repository.'''
+        '''
+        This is step 5 - Verify that the prior_installation_required attribute resulted in package_numpy_1_7_0170 being installed first.
+        In the previous step, package_matplotlib_1_2_0170 was selected for installation, but package_numpy_1_7_0170 had the
+        prior_installation_required attribute set. Confirm that this resulted in package_numpy_1_7_0170 being installed before
+        package_matplotlib_1_2_0170.
+        '''
+        matplotlib_repository = self.test_db_util.get_installed_repository_by_name_owner( matplotlib_repository_name, common.test_user_1_name )
+        numpy_repository = self.test_db_util.get_installed_repository_by_name_owner( numpy_repository_name, common.test_user_1_name )
+        assert matplotlib_repository.update_time > numpy_repository.update_time, \
+            'Error: package_numpy_1_7_0170 shows a later update time than package_matplotlib_1_2_0170'
diff --git a/test/shed_functional/functional/test_1200_uninstall_and_reinstall_basic_repository.py b/test/shed_functional/functional/test_1200_uninstall_and_reinstall_basic_repository.py
new file mode 100644
index 0000000..b739e30
--- /dev/null
+++ b/test/shed_functional/functional/test_1200_uninstall_and_reinstall_basic_repository.py
@@ -0,0 +1,129 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+    '''Test uninstalling and reinstalling a basic repository.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0000 category and upload the filtering repository to the tool shed, if necessary.'''
+        category = self.create_category( name='Test 0000 Basic Repository Features 1', description='Test 0000 Basic Repository Features 1' )
+        self.create_category( name='Test 0000 Basic Repository Features 2', description='Test 0000 Basic Repository Features 2' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name='filtering_0000',
+                                                    description="Galaxy's filtering tool for test 0000",
+                                                    long_description="Long description of Galaxy's filtering tool for test 0000",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_0000.txt',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded readme for 1.1.0',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='readme.txt',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded readme for 2.2.0',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_install_filtering_repository( self ):
+        '''Install the filtering repository into the Galaxy instance.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( 'filtering_0000',
+                                 common.test_user_1_name,
+                                 'Test 0000 Basic Repository Features 1',
+                                 new_tool_panel_section_label='test_1000' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        strings_displayed = [ 'filtering_0000',
+                              "Galaxy's filtering tool for test 0000",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+
+    def test_0015_uninstall_filtering_repository( self ):
+        '''Uninstall the filtering repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        self.uninstall_repository( installed_repository )
+        strings_not_displayed = [ 'filtering_0000',
+                                  "Galaxy's filtering tool for test 0000",
+                                  installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0020_reinstall_filtering_repository( self ):
+        '''Reinstall the filtering repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        self.reinstall_repository( installed_repository )
+        strings_displayed = [ 'filtering_0000',
+                              "Galaxy's filtering tool for test 0000",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              str( installed_repository.installed_changeset_revision ) ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0025_deactivate_filtering_repository( self ):
+        '''Deactivate the filtering repository without removing it from disk.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        self.deactivate_repository( installed_repository )
+        strings_not_displayed = [ 'filtering_0000',
+                                  "Galaxy's filtering tool for test 0000",
+                                  installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0030_reactivate_filtering_repository( self ):
+        '''Reactivate the filtering repository and verify that it now shows up in the list of installed repositories.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+        self.reactivate_repository( installed_repository )
+        strings_displayed = [ 'filtering_0000',
+                              "Galaxy's filtering tool for test 0000",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              str( installed_repository.installed_changeset_revision ) ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
diff --git a/test/shed_functional/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py b/test/shed_functional/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
new file mode 100644
index 0000000..70dbd38
--- /dev/null
+++ b/test/shed_functional/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -0,0 +1,146 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+    '''Test uninstalling and reinstalling a repository with tool dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0010 category and upload the freebayes repository to the tool shed, if necessary.'''
+        category = self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name='freebayes_0010',
+                                                    description="Galaxy's freebayes tool",
+                                                    long_description="Long description of Galaxy's freebayes tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool xml.',
+                              strings_displayed=[ 'Metadata may have been defined', 'This file requires an entry', 'tool_data_table_conf' ],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/tool_data_table_conf.xml.sample',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool data table sample file.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/sam_fa_indices.loc.sample',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool data table .loc file.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename=os.path.join( 'freebayes', 'malformed_tool_dependencies', 'tool_dependencies.xml' ),
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded malformed tool dependency XML.',
+                              strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded invalid tool dependency XML.',
+                              strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename=os.path.join( 'freebayes', 'tool_dependencies.xml' ),
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded valid tool dependency XML.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0010_install_freebayes_repository( self ):
+        '''Install the freebayes repository into the Galaxy instance.'''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        strings_displayed = [ 'install Tool Shed managed', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
+        self.install_repository( 'freebayes_0010',
+                                 common.test_user_1_name,
+                                 'Test 0010 Repository With Tool Dependencies',
+                                 strings_displayed=strings_displayed,
+                                 new_tool_panel_section_label='test_1210' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+        strings_displayed = [ 'freebayes_0010',
+                              "Galaxy's freebayes tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+
+    def test_0015_uninstall_freebayes_repository( self ):
+        '''Uninstall the freebayes repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+        self.uninstall_repository( installed_repository )
+        strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0020_reinstall_freebayes_repository( self ):
+        '''Reinstall the freebayes repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+        self.reinstall_repository( installed_repository )
+        strings_displayed = [ 'freebayes_0010',
+                              "Galaxy's freebayes tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0025_deactivate_freebayes_repository( self ):
+        '''Deactivate the freebayes repository without removing it from disk.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+        self.deactivate_repository( installed_repository )
+        strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0030_reactivate_freebayes_repository( self ):
+        '''Reactivate the freebayes repository and verify that it now shows up in the list of installed repositories.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+        self.reactivate_repository( installed_repository )
+        strings_displayed = [ 'freebayes_0010',
+                              "Galaxy's freebayes tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
diff --git a/test/shed_functional/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py b/test/shed_functional/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
new file mode 100644
index 0000000..fb6a8ab
--- /dev/null
+++ b/test/shed_functional/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
@@ -0,0 +1,146 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0020'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+running_standalone = False
+
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+    '''Test uninstalling and reinstalling a repository with tool dependencies.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0020 category and upload the emboss repository to the tool shed, if necessary.'''
+        global repository_datatypes_count
+        global running_standalone
+        category = self.create_category( name='Test 0020 Basic Repository Dependencies', description='Tests for a repository with tool dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                              description=datatypes_repository_description,
+                                                              long_description=datatypes_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        if self.repository_is_new( datatypes_repository ):
+            running_standalone = True
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                               description=emboss_repository_description,
+                                                               long_description=emboss_repository_long_description,
+                                                               owner=common.test_user_1_name,
+                                                               category_id=self.security.encode_id( category.id ),
+                                                               strings_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1020', additional_paths=[ 'emboss', '5' ] )
+            repository_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+        repository_datatypes_count = int( self.get_repository_datatypes_count( datatypes_repository ) )
+
+    def test_0010_install_emboss_repository( self ):
+        '''Install the emboss repository into the Galaxy instance.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        base_datatypes_count = int( self.get_datatypes_count() )
+        strings_displayed = [ 'Handle', 'Never installed', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
+        self.install_repository( emboss_repository_name,
+                                 common.test_user_1_name,
+                                 'Test 0020 Basic Repository Dependencies',
+                                 strings_displayed=strings_displayed,
+                                 new_tool_panel_section_label='test_1210' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'emboss_0020',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        current_datatypes = int( self.get_datatypes_count() )
+        # If we are running this test by itself, installing the emboss repository should also install the emboss_datatypes
+        # repository, and this should add datatypes to the datatypes registry. If that is the case, verify that datatypes
+        # have been added, otherwise verify that the count is unchanged.
+        if running_standalone:
+            assert current_datatypes == base_datatypes_count + repository_datatypes_count, 'Installing emboss did not add new datatypes.'
+        else:
+            assert current_datatypes == base_datatypes_count, 'Installing emboss added new datatypes.'
+
+    def test_0015_uninstall_emboss_repository( self ):
+        '''Uninstall the emboss repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.uninstall_repository( installed_repository )
+        strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0020_reinstall_emboss_repository( self ):
+        '''Reinstall the emboss repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.reinstall_repository( installed_repository )
+        strings_displayed = [ 'emboss_0020',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0025_deactivate_emboss_repository( self ):
+        '''Deactivate the emboss repository without removing it from disk.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.deactivate_repository( installed_repository )
+        strings_not_displayed = [ 'emboss_0020',
+                                  installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0030_reactivate_emboss_repository( self ):
+        '''Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.reactivate_repository( installed_repository )
+        strings_displayed = [ 'emboss_0020',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
diff --git a/test/shed_functional/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py b/test/shed_functional/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
new file mode 100644
index 0000000..ba35913
--- /dev/null
+++ b/test/shed_functional/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
@@ -0,0 +1,181 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0030'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+
+emboss_repository_name = 'emboss_0030'
+emboss_5_repository_name = 'emboss_5_0030'
+emboss_6_repository_name = 'emboss_6_0030'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+running_standalone = False
+
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+    '''Test uninstalling and reinstalling a repository with repository dependency revisions.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( admin_user )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 0030 category and upload the emboss repository to the tool shed, if necessary.'''
+        global repository_datatypes_count
+        global running_standalone
+        category = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Tests for a repository with tool dependencies.' )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+                                                              description=datatypes_repository_description,
+                                                              long_description=datatypes_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        if self.repository_is_new( datatypes_repository ):
+            running_standalone = True
+            emboss_5_repository = self.get_or_create_repository( name=emboss_5_repository_name,
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_5_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+            emboss_6_repository = self.get_or_create_repository( name=emboss_6_repository_name,
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_6_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '6' ] )
+            datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_6_repository, repository_tuples=[ datatypes_tuple ], filepath=repository_dependencies_path )
+            emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+                                                               description=emboss_repository_description,
+                                                               long_description=emboss_repository_long_description,
+                                                               owner=common.test_user_1_name,
+                                                               category_id=self.security.encode_id( category.id ),
+                                                               strings_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+            dependency_tuple = ( self.url, emboss_5_repository.name, emboss_5_repository.user.username, self.get_repository_tip( emboss_5_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            dependency_tuple = ( self.url, emboss_6_repository.name, emboss_6_repository.user.username, self.get_repository_tip( emboss_6_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+        repository_datatypes_count = int( self.get_repository_datatypes_count( datatypes_repository ) )
+
+    def test_0010_install_emboss_repository( self ):
+        '''Install the emboss repository into the Galaxy instance.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        global running_standalone
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        base_datatypes_count = int( self.get_datatypes_count() )
+        strings_displayed = [ 'Handle', 'Never installed', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
+        self.install_repository( emboss_repository_name,
+                                 common.test_user_1_name,
+                                 'Test 0030 Repository Dependency Revisions',
+                                 strings_displayed=strings_displayed,
+                                 new_tool_panel_section_label='test_1210' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'emboss_0030',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        current_datatypes = int( self.get_datatypes_count() )
+        # If we are running this test by itself, installing the emboss repository should also install the emboss_datatypes
+        # repository, and this should add datatypes to the datatypes registry. If that is the case, verify that datatypes
+        # have been added, otherwise verify that the count is unchanged.
+        if running_standalone:
+            assert current_datatypes == base_datatypes_count + repository_datatypes_count, \
+                'Installing emboss resulted in unexpected count in datatypes registry, expected: %s, actual: %s, original: %s, added from repo: %s' % \
+                ( str( base_datatypes_count + repository_datatypes_count ), str( current_datatypes ), str( base_datatypes_count ), str( repository_datatypes_count ) )
+        else:
+            assert current_datatypes == base_datatypes_count, \
+                'Installing emboss added new datatypes, current datatypes: %s, original datatypes: %s' % \
+                ( str( current_datatypes ), str( base_datatypes_count ) )
+
+    def test_0015_uninstall_emboss_repository( self ):
+        '''Uninstall the emboss repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.uninstall_repository( installed_repository )
+        strings_not_displayed = [ installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0020_reinstall_emboss_repository( self ):
+        '''Reinstall the emboss repository.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.reinstall_repository( installed_repository )
+        strings_displayed = [ 'emboss_0030',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0025_deactivate_emboss_repository( self ):
+        '''Deactivate the emboss repository without removing it from disk.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.deactivate_repository( installed_repository )
+        strings_not_displayed = [ installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0030_reactivate_emboss_repository( self ):
+        '''Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.'''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+        self.reactivate_repository( installed_repository )
+        strings_displayed = [ 'emboss_0030',
+                              'Galaxy wrappers for Emboss version 5.0.0 tools',
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
diff --git a/test/shed_functional/functional/test_1300_reset_all_metadata.py b/test/shed_functional/functional/test_1300_reset_all_metadata.py
new file mode 100644
index 0000000..25bf7b7
--- /dev/null
+++ b/test/shed_functional/functional/test_1300_reset_all_metadata.py
@@ -0,0 +1,459 @@
+import os
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools.  This repository contains no tools."
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+workflow_filename = 'Workflow_for_0060_filter_workflow_repository.ga'
+workflow_name = 'Workflow for 0060_filter_workflow_repository'
+filtering_repository_description = "Galaxy's filtering tool for test 0040"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool for test 0040"
+
+category_0000_name = 'Test 0000 Basic Repository Features 1'
+category_0001_name = 'Test 0000 Basic Repository Features 2'
+category_0010_name = 'Test 0010 Repository With Tool Dependencies'
+category_0020_name = 'Test 0020 Basic Repository Dependencies'
+category_0030_name = 'Test 0030 Repository Dependency Revisions'
+category_0040_name = 'test_0040_repository_circular_dependencies'
+category_0050_name = 'test_0050_repository_n_level_circular_dependencies'
+category_0060_name = 'Test 0060 Workflow Features'
+
+'''
+This script will run in one of two possible ways:
+
+1. Directly, by calling sh run_functional_tests.sh -toolshed test/tool_shed/functional/test_1300_reset_all_metadata.py.
+ -OR-
+2. After the previous test scripts have completed.
+
+In the first case, it is desirable to have the Galaxy database in a state that is as close as possible to the state it would
+be in following the second case. This means explicitly installing whatever repositories would be in an installed state following
+the previous test scripts.
+'''
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+running_standalone = False
+
+
+class TestResetInstalledRepositoryMetadata( ShedTwillTestCase ):
+    '''Verify that the "Reset selected metadata" feature works.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_categories( self ):
+        '''Create the categories for the repositories in this test script.'''
+        self.login( email=common.admin_email, username=common.admin_username )
+        self.create_category( name=category_0000_name, description='Test 0000 Basic Repository Features 1' )
+        self.create_category( name=category_0001_name, description='Test 0000 Basic Repository Features 2' )
+        self.create_category( name=category_0010_name, description='Tests for a repository with tool dependencies.' )
+        self.create_category( name=category_0020_name, description='Testing basic repository dependency features.' )
+        self.create_category( name=category_0030_name, description='Testing repository dependencies by revision.' )
+        self.create_category( name=category_0040_name, description='Testing handling of circular repository dependencies.' )
+        self.create_category( name=category_0050_name, description='Testing handling of circular repository dependencies to n levels.' )
+        self.create_category( name=category_0060_name, description='Test 0060 - Workflow Features' )
+
+    def test_0010_create_repositories_from_0000_series( self ):
+        '''Create repository filtering_0000 if necessary.'''
+        global running_standalone
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        category = self.create_category( name=category_0000_name, description='' )
+        repository = self.get_or_create_repository( name='filtering_0000',
+                                                    description="Galaxy's filtering tool",
+                                                    long_description="Long description of Galaxy's filtering tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        if self.repository_is_new( repository ):
+            running_standalone = True
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0015_create_repositories_from_0010_series( self ):
+        '''Create repository freebayes_0010.'''
+        category = self.create_category( name=category_0010_name, description='' )
+        repository = self.get_or_create_repository( name='freebayes_0010',
+                                                    description="Galaxy's freebayes tool",
+                                                    long_description="Long description of Galaxy's freebayes tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/tool_data_table_conf.xml.sample',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool_data_table_conf.xml.sample',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/sam_fa_indices.loc.sample',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded sam_fa_indices.loc.sample',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( repository,
+                              filename='freebayes/tool_dependencies.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded tool_dependencies.xml',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0020_create_repositories_from_0020_series( self ):
+        '''Create repositories emboss_0020 and emboss_datatypes_0020 if necessary.'''
+        category = self.create_category( name=category_0020_name, description='' )
+        repository = self.get_or_create_repository( name='emboss_datatypes_0020',
+                                                    description="Galaxy applicable data formats used by Emboss tools.",
+                                                    long_description="Galaxy applicable data formats used by Emboss tools. This repository contains no tools.",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository = self.get_or_create_repository( name='emboss_0020',
+                                                 description='Galaxy wrappers for Emboss version 5.0.0 tools',
+                                                 long_description='Galaxy wrappers for Emboss version 5.0.0 tools',
+                                                 owner=common.test_user_1_name,
+                                                 category_id=self.security.encode_id( category.id ),
+                                                 strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_0025_create_repositories_from_0030_series( self ):
+        '''Create repositories emboss_0030, emboss_5_0030, emboss_6_0030, and emboss_datatypes_0030.'''
+        global repository_datatypes_count
+        category = self.create_category( name=category_0030_name, description='' )
+        datatypes_repository = self.get_or_create_repository( name='emboss_datatypes_0030',
+                                                              description=datatypes_repository_description,
+                                                              long_description=datatypes_repository_long_description,
+                                                              owner=common.test_user_1_name,
+                                                              category_id=self.security.encode_id( category.id ),
+                                                              strings_displayed=[] )
+        if self.repository_is_new( datatypes_repository ):
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_datatypes_count = int( self.get_repository_datatypes_count( datatypes_repository ) )
+            emboss_5_repository = self.get_or_create_repository( name='emboss_5_0030',
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_5_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_0330', additional_paths=[ 'emboss', '5' ] )
+            dependency_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_5_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            emboss_6_repository = self.get_or_create_repository( name='emboss_6_0030',
+                                                                 description=emboss_repository_description,
+                                                                 long_description=emboss_repository_long_description,
+                                                                 owner=common.test_user_1_name,
+                                                                 category_id=self.security.encode_id( category.id ),
+                                                                 strings_displayed=[] )
+            self.upload_file( emboss_6_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_0330', additional_paths=[ 'emboss', '6' ] )
+            dependency_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_6_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            emboss_repository = self.get_or_create_repository( name='emboss_0030',
+                                                               description=emboss_repository_description,
+                                                               long_description=emboss_repository_long_description,
+                                                               owner=common.test_user_1_name,
+                                                               category_id=self.security.encode_id( category.id ),
+                                                               strings_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_0330', additional_paths=[ 'emboss', '5' ] )
+            dependency_tuple = ( self.url, emboss_5_repository.name, emboss_5_repository.user.username, self.get_repository_tip( emboss_5_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            dependency_tuple = ( self.url, emboss_6_repository.name, emboss_6_repository.user.username, self.get_repository_tip( emboss_6_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+
+    def test_0030_create_repositories_from_0040_series( self ):
+        '''Create repositories freebayes_0040 and filtering_0040.'''
+        category = self.create_category( name=category_0040_name, description='' )
+        repository = self.get_or_create_repository( name='freebayes_0040',
+                                                    description="Galaxy's freebayes tool",
+                                                    long_description="Long description of Galaxy's freebayes tool",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository = self.get_or_create_repository( name='filtering_0040',
+                                                        description=filtering_repository_description,
+                                                        long_description=filtering_repository_long_description,
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            self.upload_file( repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded the tool tarball for filtering 1.1.0.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository = self.test_db_util.get_repository_by_name_and_owner( 'freebayes_0040', common.test_user_1_name )
+            filtering_repository = self.test_db_util.get_repository_by_name_and_owner( 'filtering_0040', common.test_user_1_name )
+            repository_dependencies_path = self.generate_temp_path( 'test_1340', additional_paths=[ 'filtering' ] )
+            repository_tuple = ( self.url, repository.name, repository.user.username, self.get_repository_tip( repository ) )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+            repository = self.test_db_util.get_repository_by_name_and_owner( 'filtering_0040', common.test_user_1_name )
+            freebayes_repository = self.test_db_util.get_repository_by_name_and_owner( 'freebayes_0040', common.test_user_1_name )
+            repository_dependencies_path = self.generate_temp_path( 'test_1340', additional_paths=[ 'freebayes' ] )
+            repository_tuple = ( self.url, repository.name, repository.user.username, self.get_repository_tip( repository ) )
+            self.create_repository_dependency( repository=freebayes_repository, repository_tuples=[ repository_tuple ], filepath=repository_dependencies_path )
+
+    def test_0035_create_repositories_from_0050_series( self ):
+        '''Create repositories emboss_0050, emboss_datatypes_0050, filtering_0050, freebayes_0050.'''
+        category = self.create_category( name=category_0050_name, description='' )
+        datatypes_repository = self.get_or_create_repository( name='emboss_datatypes_0050',
+                                                    description="Datatypes for emboss",
+                                                    long_description="Long description of Emboss' datatypes",
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( datatypes_repository ):
+            emboss_repository = self.get_or_create_repository( name='emboss_0050',
+                                                        description="Galaxy's emboss tool",
+                                                        long_description="Long description of Galaxy's emboss tool",
+                                                        owner=common.test_user_1_name,
+                                                        category_id=self.security.encode_id( category.id ),
+                                                        strings_displayed=[] )
+            filtering_repository = self.get_or_create_repository( name='filtering_0050',
+                                                                  description="Galaxy's filtering tool",
+                                                                  long_description="Long description of Galaxy's filtering tool",
+                                                                  owner=common.test_user_1_name,
+                                                                  category_id=self.security.encode_id( category.id ),
+                                                                  strings_displayed=[] )
+            freebayes_repository = self.get_or_create_repository( name='freebayes_0050',
+                                                                  description="Galaxy's freebayes tool",
+                                                                  long_description="Long description of Galaxy's freebayes tool",
+                                                                  owner=common.test_user_1_name,
+                                                                  category_id=self.security.encode_id( category.id ),
+                                                                  strings_displayed=[] )
+            self.upload_file( datatypes_repository,
+                              filename='emboss/datatypes/datatypes_conf.xml',
+                              filepath=None,
+                              valid_tools_only=False,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatypes_conf.xml.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( emboss_repository,
+                              filename='emboss/emboss.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded emboss.tar',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( freebayes_repository,
+                              filename='freebayes/freebayes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded freebayes tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( filtering_repository,
+                              filename='filtering/filtering_1.1.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 1.1.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            repository_dependencies_path = self.generate_temp_path( 'test_0350', additional_paths=[ 'emboss' ] )
+            dependency_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            repository_dependencies_path = self.generate_temp_path( 'test_0350', additional_paths=[ 'filtering' ] )
+            dependency_tuple = ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) )
+            self.create_repository_dependency( repository=filtering_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            repository_dependencies_path = self.generate_temp_path( 'test_0350', additional_paths=[ 'freebayes' ] )
+            dependency_tuple = ( self.url, filtering_repository.name, filtering_repository.user.username, self.get_repository_tip( filtering_repository ) )
+            self.create_repository_dependency( repository=emboss_repository, repository_tuples=[ dependency_tuple ], filepath=repository_dependencies_path )
+            dependency_tuples = [ ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) ),
+                                  ( self.url, emboss_repository.name, emboss_repository.user.username, self.get_repository_tip( emboss_repository ) ),
+                                  ( self.url, filtering_repository.name, filtering_repository.user.username, self.get_repository_tip( filtering_repository ) ),
+                                  ( self.url, freebayes_repository.name, freebayes_repository.user.username, self.get_repository_tip( freebayes_repository ) ) ]
+            self.create_repository_dependency( repository=freebayes_repository, repository_tuples=dependency_tuples, filepath=repository_dependencies_path )
+
+    def test_0035_create_repositories_from_0060_series( self ):
+        category = self.create_category( name=category_0060_name, description='' )
+        workflow_repository = self.get_or_create_repository( name='filtering_0060',
+                                                             description="Galaxy's filtering tool",
+                                                             long_description="Long description of Galaxy's filtering tool",
+                                                             owner=common.test_user_1_name,
+                                                             category_id=self.security.encode_id( category.id ),
+                                                             strings_displayed=[] )
+        if self.repository_is_new( workflow_repository ):
+            workflow = open( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+            workflow = workflow.replace(  '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+            workflow_filepath = self.generate_temp_path( 'test_0360', additional_paths=[ 'filtering_workflow' ] )
+            if not os.path.exists( workflow_filepath ):
+                os.makedirs( workflow_filepath )
+            open( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+            self.upload_file( workflow_repository,
+                              filename=workflow_filename,
+                              filepath=workflow_filepath,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering workflow.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+            self.upload_file( workflow_repository,
+                              filename='filtering/filtering_2.2.0.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=True,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded filtering 2.2.0 tarball.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+
+    def test_9900_install_all_missing_repositories( self ):
+        '''Call the install_repository method to ensure that all required repositories are installed.'''
+        global repository_datatypes_count
+        global base_datatypes_count
+        global running_standalone
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        base_datatypes_count = int( self.get_datatypes_count() )
+        self.install_repository( 'filtering_0000', common.test_user_1_name, category_0000_name, strings_displayed=[] )
+        self.install_repository( 'freebayes_0010', common.test_user_1_name, category_0010_name, strings_displayed=[] )
+        self.install_repository( 'emboss_0020', common.test_user_1_name, category_0020_name, strings_displayed=[] )
+        self.install_repository( 'emboss_5_0030', common.test_user_1_name, category_0030_name, strings_displayed=[] )
+        self.install_repository( 'freebayes_0050', common.test_user_1_name, category_0050_name, strings_displayed=[] )
+        self.install_repository( 'filtering_0060', common.test_user_1_name, category_0060_name, strings_displayed=[] )
+        current_datatypes = int( self.get_datatypes_count() )
+        # If we are running this test by itself, installing the emboss repository should also install the emboss_datatypes
+        # repository, and this should add datatypes to the datatypes registry. If that is the case, verify that datatypes
+        # have been added, otherwise verify that the count is unchanged.
+        if running_standalone:
+            message = 'Installing emboss did not add new datatypes.\nFound: %d\nExpected: %d' % \
+                ( current_datatypes, base_datatypes_count + repository_datatypes_count )
+            assert current_datatypes > base_datatypes_count, message
+        else:
+            assert current_datatypes == base_datatypes_count, 'Installing emboss added new datatypes.'
+
+    def test_9905_reset_metadata_on_all_repositories( self ):
+        '''Reset metadata on all repositories, then verify that it has not changed.'''
+        repository_metadata = dict()
+        repositories = self.test_db_util.get_all_installed_repositories( actually_installed=True )
+        for repository in repositories:
+            repository_metadata[ self.security.encode_id( repository.id ) ] = repository.metadata
+        self.reset_metadata_on_selected_installed_repositories( list(repository_metadata.keys()) )
+        for repository in repositories:
+            self.test_db_util.ga_refresh( repository )
+            old_metadata = repository_metadata[ self.security.encode_id( repository.id ) ]
+            # When a repository with tools to be displayed in a tool panel section is deactivated and reinstalled,
+            # the tool panel section remains in the repository metadata. However, when the repository's metadata
+            # is subsequently reset, the tool panel section is removed from the repository metadata. While this
+            # is normal and expected behavior, the functional tests assume that repository metadata will not change
+            # in any way after a reset. A workaround is to remove the tool panel section from the stored repository
+            # metadata dict, in order to eliminate the misleading detection of changed metadata.
+            if 'tool_panel_section' in old_metadata and 'tool_panel_section' not in repository.metadata:
+                del old_metadata[ 'tool_panel_section' ]
+            assert repository.metadata == old_metadata, 'Metadata for %s repository %s changed after reset. \nOld: %s\nNew: %s' % \
+                ( repository.status, repository.name, old_metadata, repository.metadata )
diff --git a/test/shed_functional/functional/test_1400_review_migration_stages.py b/test/shed_functional/functional/test_1400_review_migration_stages.py
new file mode 100644
index 0000000..926f30a
--- /dev/null
+++ b/test/shed_functional/functional/test_1400_review_migration_stages.py
@@ -0,0 +1,41 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+
+class TestToolMigrationStages( ShedTwillTestCase ):
+    '''Verify that the migration stages display correctly.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_load_migration_stages_page( self ):
+        '''Load the migration page and check for the appropriate migration stages.'''
+        stages = []
+        migration_message_strings = [ 'The Emboss 5.0.0 tools have been eliminated',
+                                      'The freebayes tool has been eliminated',
+                                      'The NCBI BLAST+ tools',
+                                      'The tools "Map with BWA for Illumina"',
+                                      'FASTQ to BAM, SAM to FASTQ, BAM ',
+                                      'Map with Bowtie for Illumina, ',
+                                      'BAM-to-SAM converts BAM format' ]
+        migrated_repository_names = [ 'emboss_5', 'emboss_datatypes', 'freebayes', 'ncbi_blast_plus',
+                                      'blast_datatypes', 'bwa_wrappers', 'picard', 'lastz',
+                                      'lastz_paired_reads', 'bowtie_color_wrappers', 'bowtie_wrappers',
+                                      'xy_plot', 'bam_to_sam' ]
+        migrated_tool_dependencies = [ 'emboss', '5.0.0', 'freebayes', '0.9.4_a46483351fd0196637614121868fb5c386612b55',
+                                       'samtools', '0.1.18', 'blast+', '2.2.26+', 'bwa', '0.5.9', 'picard', '1.56.0',
+                                       'lastz', '1.02.00', 'bowtie', '0.12.7', 'FreeBayes requires g++', 'ncurses', 'zlib',
+                                       'blast.ncbi.nlm.nih.gov', 'fastx_toolkit', '0.0.13', 'samtools', '0.1.16', 'cufflinks',
+                                       '2.1.1', 'R', '2.11.0' ]
+        migration_scripts = [ '0002_tools.sh', '0003_tools.sh', '0004_tools.sh', '0005_tools.sh', '0006_tools.sh',
+                              '0007_tools.sh', '0008_tools.sh' ]
+        stages.extend( migration_scripts + migrated_tool_dependencies + migrated_repository_names )
+        stages.extend( migration_message_strings )
+        self.load_galaxy_tool_migrations_page( strings_displayed=stages )
diff --git a/test/shed_functional/functional/test_1410_update_manager.py b/test/shed_functional/functional/test_1410_update_manager.py
new file mode 100644
index 0000000..20e7171
--- /dev/null
+++ b/test/shed_functional/functional/test_1410_update_manager.py
@@ -0,0 +1,124 @@
+import logging
+import time
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger(__name__)
+
+repository_name = 'filtering_1410'
+repository_description = "Galaxy's filtering tool"
+repository_long_description = "Long description of Galaxy's filtering repository"
+
+category_name = 'Test 1410 - Galaxy Update Manager'
+category_description = 'Functional test suite to test the update manager.'
+
+'''
+1. Create and populate the filtering_1410 repository.
+2. Install filtering_1410 to Galaxy.
+3. Upload a readme file.
+4. Verify that the browse page now shows an update available.
+'''
+
+
+class TestUpdateManager( ShedTwillTestCase ):
+    '''Test the Galaxy update manager.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts and login as an admin user."""
+        """
+        Create all the user accounts that are needed for this test script to run independently of other tests.
+        Previously created accounts will not be re-created.
+        """
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+    def test_0005_create_filtering_repository( self ):
+        '''Create and populate the filtering_1410 repository.'''
+        '''
+        We are at step 1 - Create and populate the filtering_1410 repository.
+        Create filtering_1410 and upload the tool tarball to it.
+        '''
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ) )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=True,
+                          commit_message="Uploaded filtering 1.1.0",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_install_filtering_repository( self ):
+        '''Install the filtering_1410 repository.'''
+        '''
+        We are at step 2 - Install filtering_1410 to Galaxy.
+        Install the filtering repository to Galaxy.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( 'filtering_1410',
+                                 common.test_user_1_name,
+                                 category_name,
+                                 new_tool_panel_section_label='test_1410' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filtering_1410', common.test_user_1_name )
+        strings_displayed = [ 'filtering_1410',
+                              "Galaxy's filtering tool",
+                              'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+        self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+        self.verify_tool_metadata_for_installed_repository( installed_repository )
+
+    def test_0015_upload_readme_file( self ):
+        '''Upload readme.txt to filtering_1410.'''
+        '''
+        We are at step 3 - Upload a readme file.
+        Upload readme.txt. This will have the effect of making the installed changeset revision not be the most recent downloadable revision,
+        but without generating a second downloadable revision. Then sleep for 3 seconds to make sure the update manager picks up the new
+        revision.
+        '''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message="Uploaded readme.txt",
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_check_for_displayed_update( self ):
+        '''Browse installed repositories and verify update.'''
+        '''
+        We are at step 4 - Verify that the browse page now shows an update available.
+        The browse page should now show filtering_1410 as installed, but with a yellow box indicating that there is an update available.
+        '''
+        # Wait 3 seconds, just to be sure we're past hours_between_check.
+        time.sleep( 3 )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.update_tool_shed_status()
+        ok_title = r'title=\"Updates are available in the Tool Shed for this revision\"'
+        updates_icon = '/static/images/icon_warning_sml.gif'
+        strings_displayed = [ ok_title, updates_icon ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
diff --git a/test/shed_functional/functional/test_1420_tool_dependency_environment_inheritance.py b/test/shed_functional/functional/test_1420_tool_dependency_environment_inheritance.py
new file mode 100644
index 0000000..051e85e
--- /dev/null
+++ b/test/shed_functional/functional/test_1420_tool_dependency_environment_inheritance.py
@@ -0,0 +1,319 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 1420 Tool dependency environment variable inheritance'
+category_description = 'Test script 1420 for interpolation of inherited environment variables.'
+package_atlas_repository_name = 'package_atlas_3_10_1420'
+package_bzlib_repository_name = 'package_bzlib_1_0_1420'
+package_boost_repository_name = 'package_boost_1_53_1420'
+package_numpy_repository_name = 'package_numpy_1_7_1420'
+package_rdkit_repository_name = 'package_rdkit_2012_12_1420'
+package_lapack_repository_name = 'package_lapack_3_4_1420'
+package_atlas_repository_description = 'Automatically Tuned Linear Algebra Software'
+package_bzlib_repository_description = 'Contains a tool dependency definition that downloads and compiles version 1.0 of the bzlib library.'
+package_boost_repository_description = 'Contains a tool dependency definition that downloads and compiles version 1.53 of the boost C++ libraries'
+package_numpy_repository_description = 'Contains a tool dependency definition that downloads and compiles version 1.7 of the the python numpy package'
+package_rdkit_repository_description = 'Contains a tool dependency definition that downloads and compiles version 2012-12 of the RDKit cheminformatics and machine-learning package.'
+package_lapack_repository_description = 'Linear Algebra PACKage'
+package_atlas_repository_long_description = '%s: %s' % ( package_atlas_repository_name, package_atlas_repository_description )
+package_bzlib_repository_long_description = '%s: %s' % ( package_bzlib_repository_name, package_bzlib_repository_description )
+package_boost_repository_long_description = '%s: %s' % ( package_boost_repository_name, package_boost_repository_description )
+package_numpy_repository_long_description = '%s: %s' % ( package_numpy_repository_name, package_numpy_repository_description )
+package_rdkit_repository_long_description = '%s: %s' % ( package_rdkit_repository_name, package_rdkit_repository_description )
+package_lapack_repository_long_description = '%s: %s' % ( package_lapack_repository_name, package_lapack_repository_description )
+
+'''
+1. Create repository package_lapack_3_4_1420
+
+2. Create repository package_atlas_3_10_1420
+
+3. Create repository package_bzlib_1_0_1420
+
+4. Create repository package_boost_1_53_1420
+
+5. Create repository package_numpy_1_7_1420
+
+6. Create repository package_rdkit_2012_12_1420
+
+Repository dependency structure should be as follows:
+    Repository package_rdkit_2012_12_1420
+        Repository package_boost_1_53_1420 (prior install required)
+            Repository package_bzlib_1_0_1420 (prior install required)
+        Repository package_numpy_1_7_1420 (prior install required)
+            Repository package_lapack_3_4_1420 (prior install required)
+            Repository package_atlas_3_10_1420 (prior install required)
+
+8. Install package_rdkit_2012_12 into Galaxy.
+
+9. Verify that the env.sh file for package_rdkit_2012_12_1420 also defines the variables inherited from package_numpy_1_7_1420 and package_boost_1_53_1420.
+'''
+
+
+class TestEnvironmentInheritance( ShedTwillTestCase ):
+    '''Test referencing environment variables that were defined in a separate tool dependency.'''
+
+    def test_0000_initiate_users_and_category( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+
+    def test_0005_create_lapack_repository( self ):
+        '''Create and populate package_lapack_3_4_1420.'''
+        '''
+        This is step 1 - Create repository package_lapack_3_4_1420.
+
+        All tool dependency definitions should download and extract a tarball containing precompiled binaries from the local
+        filesystem and install them into the path specified by $INSTALL_DIR.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_lapack_repository_name,
+                                                    description=package_lapack_repository_description,
+                                                    long_description=package_lapack_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Load the original tool dependency definition into memory, then fill in the __PATH__ placeholder with the
+        # actual system path where the binary tarball is found.
+        tool_dependency_path = self.generate_temp_path( '1420_tool_dependency', additional_paths=[ 'package_lapack_3_4_1420' ] )
+        precompiled_binary_tarball = self.get_filename( '1420_files/binary_tarballs/lapack.tar' )
+        edited_tool_dependency_filename = self.get_filename( filepath=tool_dependency_path, filename='tool_dependencies.xml' )
+        original_tool_dependency = self.get_filename( '1420_files/package_lapack_3_4_1420/tool_dependencies.xml' )
+        tool_dependency_definition = open( original_tool_dependency, 'r' ).read().replace( '__PATH__', precompiled_binary_tarball )
+        open( edited_tool_dependency_filename, 'w' ).write( tool_dependency_definition )
+        # Upload the edited tool dependency definition to the package_lapack_3_4_1420 repository.
+        self.upload_file( repository,
+                          filename='tool_dependencies.xml',
+                          filepath=tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_lapack_3_4_1420 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_atlas_repository( self ):
+        '''Create and populate package_atlas_3_10_1420.'''
+        '''
+        This is step 1 - Create repository package_atlas_3_10_1420.
+
+        All tool dependency definitions should download and extract a tarball containing precompiled binaries from the local
+        filesystem and install them into the path specified by $INSTALL_DIR.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_atlas_repository_name,
+                                                    description=package_atlas_repository_description,
+                                                    long_description=package_atlas_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Load the original tool dependency definition into memory, then fill in the __PATH__ placeholder with the
+        # actual system path where the binary tarball is found.
+        tool_dependency_path = self.generate_temp_path( '1420_tool_dependency', additional_paths=[ 'package_atlas_3_10_1420' ] )
+        precompiled_binary_tarball = self.get_filename( '1420_files/binary_tarballs/atlas.tar' )
+        edited_tool_dependency_filename = self.get_filename( filepath=tool_dependency_path, filename='tool_dependencies.xml' )
+        original_tool_dependency = self.get_filename( '1420_files/package_atlas_3_10_1420/tool_dependencies.xml' )
+        tool_dependency_definition = open( original_tool_dependency, 'r' ).read().replace( '__PATH__', precompiled_binary_tarball )
+        open( edited_tool_dependency_filename, 'w' ).write( tool_dependency_definition )
+        # Upload the edited tool dependency definition to the package_atlas_3_10_1420 repository.
+        self.upload_file( repository,
+                          filename='tool_dependencies.xml',
+                          filepath=tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_atlas_3_10_1420 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_bzlib_repository( self ):
+        '''Create and populate package_bzlib_1_0_1420.'''
+        '''
+        This is step 1 - Create repository package_bzlib_1_0_1420.
+
+        All tool dependency definitions should download and extract a tarball containing precompiled binaries from the local
+        filesystem and install them into the path specified by $INSTALL_DIR.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_bzlib_repository_name,
+                                                    description=package_bzlib_repository_description,
+                                                    long_description=package_bzlib_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Load the original tool dependency definition into memory, then fill in the __PATH__ placeholder with the
+        # actual system path where the binary tarball is found.
+        tool_dependency_path = self.generate_temp_path( '1420_tool_dependency', additional_paths=[ 'package_bzlib_1_0_1420' ] )
+        precompiled_binary_tarball = self.get_filename( '1420_files/binary_tarballs/bzlib.tar' )
+        edited_tool_dependency_filename = self.get_filename( filepath=tool_dependency_path, filename='tool_dependencies.xml' )
+        original_tool_dependency = self.get_filename( '1420_files/package_bzlib_1_0_1420/tool_dependencies.xml' )
+        tool_dependency_definition = open( original_tool_dependency, 'r' ).read().replace( '__PATH__', precompiled_binary_tarball )
+        open( edited_tool_dependency_filename, 'w' ).write( tool_dependency_definition )
+        # Upload the edited tool dependency definition to the package_bzlib_1_0_1420 repository.
+        self.upload_file( repository,
+                          filename='tool_dependencies.xml',
+                          filepath=tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_bzlib_1_0_1420 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_create_boost_repository( self ):
+        '''Create and populate package_boost_1_53_1420.'''
+        '''
+        This is step 1 - Create repository package_boost_1_53_1420.
+
+        All tool dependency definitions should download and extract a tarball containing precompiled binaries from the local
+        filesystem and install them into the path specified by $INSTALL_DIR.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_boost_repository_name,
+                                                    description=package_boost_repository_description,
+                                                    long_description=package_boost_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Load the original tool dependency definition into memory, then fill in the __PATH__ placeholder with the
+        # actual system path where the binary tarball is found.
+        tool_dependency_path = self.generate_temp_path( '1420_tool_dependency', additional_paths=[ 'package_boost_1_53_1420' ] )
+        precompiled_binary_tarball = self.get_filename( '1420_files/binary_tarballs/boost.tar' )
+        edited_tool_dependency_filename = self.get_filename( filepath=tool_dependency_path, filename='tool_dependencies.xml' )
+        original_tool_dependency = self.get_filename( '1420_files/package_boost_1_53_1420/tool_dependencies.xml' )
+        tool_dependency_definition = open( original_tool_dependency, 'r' ).read().replace( '__PATH__', precompiled_binary_tarball )
+        open( edited_tool_dependency_filename, 'w' ).write( tool_dependency_definition )
+        # Upload the edited tool dependency definition to the package_boost_1_53_1420 repository.
+        self.upload_file( repository,
+                          filename='tool_dependencies.xml',
+                          filepath=tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_boost_1_53_1420 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0025_create_numpy_repository( self ):
+        '''Create and populate package_numpy_1_7_1420.'''
+        '''
+        This is step 1 - Create repository package_numpy_1_7_1420.
+
+        All tool dependency definitions should download and extract a tarball containing precompiled binaries from the local
+        filesystem and install them into the path specified by $INSTALL_DIR.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_numpy_repository_name,
+                                                    description=package_numpy_repository_description,
+                                                    long_description=package_numpy_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Load the original tool dependency definition into memory, then fill in the __PATH__ placeholder with the
+        # actual system path where the binary tarball is found.
+        tool_dependency_path = self.generate_temp_path( '1420_tool_dependency', additional_paths=[ 'package_numpy_1_7_1420' ] )
+        precompiled_binary_tarball = self.get_filename( '1420_files/binary_tarballs/numpy.tar' )
+        edited_tool_dependency_filename = self.get_filename( filepath=tool_dependency_path, filename='tool_dependencies.xml' )
+        original_tool_dependency = self.get_filename( '1420_files/package_numpy_1_7_1420/tool_dependencies.xml' )
+        tool_dependency_definition = open( original_tool_dependency, 'r' ).read().replace( '__PATH__', precompiled_binary_tarball )
+        open( edited_tool_dependency_filename, 'w' ).write( tool_dependency_definition )
+        # Upload the edited tool dependency definition to the package_numpy_1_7_1420 repository.
+        self.upload_file( repository,
+                          filename='tool_dependencies.xml',
+                          filepath=tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_numpy_1_7_1420 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0030_create_rdkit_repository( self ):
+        '''Create and populate package_rdkit_2012_12_1420.'''
+        '''
+        This is step 1 - Create repository package_rdkit_2012_12_1420.
+
+        All tool dependency definitions should download and extract a tarball containing precompiled binaries from the local
+        filesystem and install them into the path specified by $INSTALL_DIR.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_rdkit_repository_name,
+                                                    description=package_rdkit_repository_description,
+                                                    long_description=package_rdkit_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Load the original tool dependency definition into memory, then fill in the __PATH__ placeholder with the
+        # actual system path where the binary tarball is found.
+        tool_dependency_path = self.generate_temp_path( '1420_tool_dependency', additional_paths=[ 'package_rdkit_2012_12_1420' ] )
+        precompiled_binary_tarball = self.get_filename( '1420_files/binary_tarballs/rdkit.tar' )
+        edited_tool_dependency_filename = self.get_filename( filepath=tool_dependency_path, filename='tool_dependencies.xml' )
+        original_tool_dependency = self.get_filename( '1420_files/package_rdkit_2012_12_1420/tool_dependencies.xml' )
+        tool_dependency_definition = open( original_tool_dependency, 'r' ).read().replace( '__PATH__', precompiled_binary_tarball )
+        open( edited_tool_dependency_filename, 'w' ).write( tool_dependency_definition )
+        # Upload the edited tool dependency definition to the package_rdkit_2012_12_1420 repository.
+        self.upload_file( repository,
+                          filename='tool_dependencies.xml',
+                          filepath=tool_dependency_path,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_rdkit_2012_12_1420 with tool dependency definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0035_install_rdkit_2012_12_repository( self ):
+        '''Install the package_rdkit_2012_12_1420 repository into Galaxy.'''
+        '''
+        This is step 4 - Install package_rdkit_2012_12_1420 into Galaxy.
+
+        Install package_rdkit_2012_12_1420 with tool dependencies selected to be installed. The result of this should be
+        package_atlas_3_10_1420, package_bzlib_1_0_1420, package_boost_1_53_1420, package_numpy_1_7_1420, package_rdkit_2012_12_1420,
+        and package_lapack_3_4_1420 being installed, and an env.sh generated for package_rdkit_2012_12_1420 that
+        contains environment variables defined in package_boost_1_53_1420 and package_numpy_1_7_1420.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        post_submit_strings_displayed = [ 'package_rdkit_2012_12_1420', 'package_atlas_3_10_1420', 'package_bzlib_1_0_1420',
+                                          'package_numpy_1_7_1420', 'package_lapack_3_4_1420', 'package_boost_1_53_1420' ]
+        self.install_repository( 'package_rdkit_2012_12_1420',
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=True,
+                                 post_submit_strings_displayed=post_submit_strings_displayed )
+
+    def test_0040_verify_env_sh_contents( self ):
+        '''Check the env.sh file for the appropriate contents.'''
+        '''
+        This is step 5 - Verify that the env.sh file for package_rdkit_2012_12_1420 also defines the variables inherited from package_numpy_1_7_1420
+        and package_boost_1_53_1420. Test for the numpy and boost tool dependency paths.
+        '''
+        package_rdkit_repository = self.test_db_util.get_installed_repository_by_name_owner( 'package_rdkit_2012_12_1420', common.test_user_1_name )
+        package_numpy_repository = self.test_db_util.get_installed_repository_by_name_owner( 'package_numpy_1_7_1420', common.test_user_1_name )
+        package_boost_repository = self.test_db_util.get_installed_repository_by_name_owner( 'package_boost_1_53_1420', common.test_user_1_name )
+        rdkit_env_sh = self.get_env_sh_path( tool_dependency_name='rdkit',
+                                             tool_dependency_version='2012_12_1',
+                                             repository=package_rdkit_repository )
+        numpy_tool_dependency_path = self.get_tool_dependency_path( tool_dependency_name='numpy',
+                                                                    tool_dependency_version='1.7.1',
+                                                                    repository=package_numpy_repository )
+        boost_tool_dependency_path = self.get_tool_dependency_path( tool_dependency_name='boost',
+                                                                    tool_dependency_version='1.53.0',
+                                                                    repository=package_boost_repository )
+        rdkit_env_file_contents = open( rdkit_env_sh, 'r' ).read()
+        if numpy_tool_dependency_path not in rdkit_env_file_contents or boost_tool_dependency_path not in rdkit_env_file_contents:
+            message = 'Environment file for package_rdkit_2012_12_1420 does not contain expected path.'
+            message += '\nExpected:\n%s\n%s\nContents:\n%s' % ( numpy_tool_dependency_path, boost_tool_dependency_path, rdkit_env_file_contents )
+            raise AssertionError( message )
diff --git a/test/shed_functional/functional/test_1430_repair_installed_repository.py b/test/shed_functional/functional/test_1430_repair_installed_repository.py
new file mode 100644
index 0000000..12cfa81
--- /dev/null
+++ b/test/shed_functional/functional/test_1430_repair_installed_repository.py
@@ -0,0 +1,173 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 1430 Repair installed repository'
+category_description = 'Test script 1430 for repairing an installed repository.'
+filter_repository_name = 'filter_1430'
+column_repository_name = 'column_1430'
+filter_repository_description = "Galaxy's filter tool for test 1430"
+column_repository_description = 'Add a value as a new column'
+filter_repository_long_description = '%s: %s' % ( filter_repository_name, filter_repository_description )
+column_repository_long_description = '%s: %s' % ( column_repository_name, column_repository_description )
+
+'''
+In the Tool Shed:
+
+1) Create and populate the filter_1430 repository
+
+2) Create and populate the column_1430 repository
+
+3) Upload a repository_dependencies.xml file to the column_1430 repository that creates a repository dependency on the filter_1430 repository.
+
+In Galaxy:
+
+1) Install the column_1430 repository, making sure to check the checkbox to Handle repository dependencies so that the filter
+   repository is also installed. Make sure to install the repositories in a specified section of the tool panel.
+
+2) Uninstall the filter_1430 repository.
+
+3) Repair the column_1430 repository.
+
+4) Make sure the filter_1430 repository is reinstalled and the tool is loaded into the tool panel in the same section specified in step 1.
+'''
+
+
+class TestRepairRepository( ShedTwillTestCase ):
+    '''Test repairing an installed repository.'''
+
+    def test_0000_initiate_users_and_category( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+
+    def test_0005_create_filter_repository( self ):
+        '''Create and populate the filter_1430 repository.'''
+        '''
+        This is step 1 - Create and populate the filter_1430 repository.
+
+        This repository will be depended on by the column_1430 repository.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=filter_repository_name,
+                                                    description=filter_repository_description,
+                                                    long_description=filter_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate filter_1430 with version 1.1.0.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_column_repository( self ):
+        '''Create and populate the column_1430 repository.'''
+        '''
+        This is step 2 - Create and populate the column_1430 repository.
+
+        This repository will depend on the filter_1430 repository.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=column_repository_name,
+                                                    description=column_repository_description,
+                                                    long_description=column_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='column_maker/column_maker.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate column_1430 with tool definitions.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_create_repository_dependency( self ):
+        '''Create a dependency on filter_1430.'''
+        '''
+        This is step 3 - Upload a repository_dependencies.xml file to the column_1430 repository that creates a repository
+        dependency on the filter_1430 repository.
+        '''
+        column_repository = self.test_db_util.get_repository_by_name_and_owner( 'column_1430', common.test_user_1_name )
+        filter_repository = self.test_db_util.get_repository_by_name_and_owner( 'filter_1430', common.test_user_1_name )
+        tool_shed_url = self.url
+        name = filter_repository.name
+        owner = filter_repository.user.username
+        changeset_revision = self.get_repository_tip( filter_repository )
+        repository_dependency_tuple = ( tool_shed_url, name, owner, changeset_revision )
+        filepath = self.generate_temp_path( '1430_repository_dependency' )
+        self.create_repository_dependency( column_repository, [ repository_dependency_tuple ], filepath=filepath )
+
+    def test_0020_install_column_repository( self ):
+        '''Install the column_1430 repository into Galaxy.'''
+        '''
+        This is step 1 (galaxy side) - Install the column_1430 repository, making sure to check the checkbox to
+        handle repository dependencies so that the filter_1430 repository is also installed. Make sure to install
+        the repositories in a specified section of the tool panel.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        post_submit_strings_displayed = [ 'column_1430', 'filter_1430' ]
+        self.install_repository( 'column_1430',
+                                 common.test_user_1_name,
+                                 category_name,
+                                 new_tool_panel_section_label='repair',
+                                 post_submit_strings_displayed=post_submit_strings_displayed,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=True )
+
+    def test_0025_uninstall_filter_repository( self ):
+        '''Uninstall the filter_1430 repository from Galaxy.'''
+        '''
+        This is step 2 - Uninstall the filter_1430 repository.
+        '''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filter_1430', common.test_user_1_name )
+        strings_displayed = [ 'Uninstalling this repository will result in the following' ]
+        strings_not_displayed = []
+        self.uninstall_repository( installed_repository,
+                                   strings_displayed=strings_displayed,
+                                   strings_not_displayed=strings_not_displayed  )
+        strings_not_displayed = [ 'filter_1430',
+                                  "Galaxy's filter tool for test 1430",
+                                  installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+
+    def test_0030_repair_column_repository( self ):
+        '''Repair the column_1430 repository.'''
+        '''
+        This is step 3 - Repair the column_1430 repository.
+        '''
+        column_repository = self.test_db_util.get_installed_repository_by_name_owner( 'column_1430', common.test_user_1_name )
+        self.repair_installed_repository( column_repository )
+
+    def test_0035_verify_tool_panel_section( self ):
+        '''Check the tool panel section after repairing.'''
+        '''
+        This is step 4 - Make sure the filter_1430 repository is reinstalled and the tool is loaded into the tool panel
+        in the same section specified in step 1.
+        '''
+        filter_repository = self.test_db_util.get_installed_repository_by_name_owner( 'filter_1430', common.test_user_1_name )
+        strings_displayed = [ 'filter_1430',
+                              "Galaxy's filter tool for test 1430",
+                              filter_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        self.check_galaxy_repository_tool_panel_section( repository=filter_repository, expected_tool_panel_section='repair' )
diff --git a/test/shed_functional/functional/test_1440_missing_env_sh_files.py b/test/shed_functional/functional/test_1440_missing_env_sh_files.py
new file mode 100644
index 0000000..781297f
--- /dev/null
+++ b/test/shed_functional/functional/test_1440_missing_env_sh_files.py
@@ -0,0 +1,125 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 1440 Tool dependency missing env.sh'
+category_description = 'Test script 1440 for detection of missing environment settings.'
+package_repository_name = 'package_env_sh_1_0_1440'
+tool_repository_name = 'filter_1440'
+package_repository_description = 'Repository that should result in an env.sh file, but does not.'
+tool_repository_description = 'Galaxy filtering tool.'
+package_repository_long_description = '%s: %s' % ( package_repository_name, package_repository_description )
+tool_repository_long_description = '%s: %s' % ( tool_repository_name, tool_repository_description )
+
+'''
+1. Create a tool dependency type repository that reliably fails to install successfully. This repository should define
+   an action that would have created an env.sh file on success, resulting in an env.sh file that should exist, but is missing.
+
+2. Create a repository that defines a complex repository dependency in the repository created in step 1, with prior_install_required
+   and set_environment_for_install.
+
+3. Attempt to install the second repository into a galaxy instance, verify that it is installed but missing tool dependencies.
+
+'''
+
+
+class TestMissingEnvSh( ShedTwillTestCase ):
+    '''Test installing a repository that should create an env.sh file, but does not.'''
+
+    def test_0000_initiate_users_and_category( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+
+    def test_0005_create_package_repository( self ):
+        '''Create and populate package_env_sh_1_0_1440.'''
+        '''
+        This is step 1 - Create repository package_env_sh_1_0_1440.
+
+        Create and populate a repository that is designed to fail a tool dependency installation. This tool dependency should
+        also define one or more environment variables.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=package_repository_name,
+                                                    description=package_repository_description,
+                                                    long_description=package_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload the edited tool dependency definition to the package_lapack_3_4_1440 repository.
+        self.upload_file( repository,
+                          filename='1440_files/dependency_definition/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate package_env_sh_1_0_1440 with a broken tool dependency definition.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_create_filter_repository( self ):
+        '''Create and populate filter_1440.'''
+        '''
+        This is step 2 - Create a repository that defines a complex repository dependency on the repository created in
+        step 1, with prior_install_required and set_environment_for_install.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=tool_repository_name,
+                                                    description=tool_repository_description,
+                                                    long_description=tool_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload the edited tool dependency definition to the package_lapack_3_4_1440 repository.
+        self.upload_file( repository,
+                          filename='filtering/filtering_2.2.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate filter_1440 with the filtering tool.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+        self.upload_file( repository,
+                          filename='1440_files/complex_dependency/tool_dependencies.xml',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate filter_1440 with a dependency on package_env_sh_1_0_1440.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0015_install_filter_repository( self ):
+        '''Install the filter_1440 repository to galaxy.'''
+        '''
+        This is step 3 - Attempt to install the second repository into a galaxy instance, verify that it is installed but
+        missing tool dependencies.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        post_submit_strings_displayed = [ 'filter_1440', 'package_env_sh_1_0_1440' ]
+        self.install_repository( 'filter_1440',
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=True,
+                                 post_submit_strings_displayed=post_submit_strings_displayed )
+
+    def test_0020_verify_missing_tool_dependency( self ):
+        '''Verify that the filter_1440 repository is installed and missing tool dependencies.'''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( 'filter_1440', common.test_user_1_name )
+        strings_displayed = [ 'Missing tool dependencies' ]
+        self.display_installed_repository_manage_page( repository, strings_displayed=strings_displayed )
+        assert len( repository.missing_tool_dependencies ) == 1, 'filter_1440 should have a missing tool dependency, but does not.'
diff --git a/test/shed_functional/functional/test_1450_installing_datatypes_sniffers.py b/test/shed_functional/functional/test_1450_installing_datatypes_sniffers.py
new file mode 100644
index 0000000..8aa6cea
--- /dev/null
+++ b/test/shed_functional/functional/test_1450_installing_datatypes_sniffers.py
@@ -0,0 +1,177 @@
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+repository_name = 'proteomics_datatypes_1450'
+repository_description = "Proteomics datatypes"
+repository_long_description = "Datatypes used in proteomics"
+
+category_name = 'Test 1450 Datatype Sniffers'
+category_description = 'Test 1450 - Installing Datatype Sniffers'
+'''
+1. Get a count of datatypes and sniffers.
+2. Install proteomics_datatypes_1450.
+3. Verify the count of datatypes and sniffers is the previous count + the datatypes contained within proteomics_datatypes_1450.
+4. Deactivate proteomics_datatypes_1450, verify the count of datatypes and sniffers is equal to the count determined in step 1.
+5. Reactivate proteomics_datatypes_1450, verify that the count of datatypes and sniffers has been increased by the contents of the repository.
+6. Uninstall proteomics_datatypes_1450, verify the count of datatypes and sniffers is equal to the count determined in step 1.
+7. Reinstall proteomics_datatypes_1450, verify that the count of datatypes and sniffers has been increased by the contents of the repository.
+'''
+
+base_datatypes_count = 0
+repository_datatypes_count = 0
+base_sniffers_count = 0
+
+
+class TestInstallDatatypesSniffers( ShedTwillTestCase ):
+    '''Test installing a repository that defines datatypes and datatype sniffers.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        galaxy_admin_user = self.test_db_util.get_galaxy_user( common.admin_email )
+        assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_galaxy_private_role( galaxy_admin_user )
+
+    def test_0005_ensure_repositories_and_categories_exist( self ):
+        '''Create the 1450 category and proteomics_datatypes_1450 repository.'''
+        global repository_datatypes_count
+        self.login( email=common.admin_email, username=common.admin_username )
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        if self.repository_is_new( repository ):
+            self.upload_file( repository,
+                              filename='proteomics_datatypes/proteomics_datatypes.tar',
+                              filepath=None,
+                              valid_tools_only=True,
+                              uncompress_file=False,
+                              remove_repo_files_not_in_tar=False,
+                              commit_message='Uploaded datatype and sniffer definitions.',
+                              strings_displayed=[],
+                              strings_not_displayed=[] )
+        repository_datatypes_count = self.get_repository_datatypes_count( repository )
+
+    def test_0010_install_datatypes_repository( self ):
+        '''Install the proteomics_datatypes_1450 repository into the Galaxy instance.'''
+        '''
+        This includes steps 1 and 2 - Get a count of datatypes and sniffers.
+        Store a count of the current datatypes registry and sniffers in global variables, to compare with the updated count
+        after changing the installation status of the proteomics_datatypes_1450 repository.
+        '''
+        global base_datatypes_count
+        global base_sniffers_count
+        base_datatypes_count = self.get_datatypes_count()
+        base_sniffers_count = self.get_sniffers_count()
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        strings_displayed = [ 'proteomics' ]
+        self.install_repository( 'proteomics_datatypes_1450',
+                                 common.test_user_1_name,
+                                 category_name,
+                                 strings_displayed=strings_displayed,
+                                 new_tool_panel_section_label='test_1450' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( 'proteomics_datatypes_1450', common.test_user_1_name )
+        strings_displayed = [ 'user1',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+
+    def test_0015_verify_datatypes_count( self ):
+        '''Verify that datatypes were added in the previous step.'''
+        '''
+        This is step 3 - Verify the count of datatypes and sniffers is the previous count + the datatypes
+                         contained within proteomics_datatypes_1450.
+        Compare the current datatypes registry and sniffers with the values that were retrieved in the previous step.
+        '''
+        current_datatypes = self.get_datatypes_count()
+        assert current_datatypes == base_datatypes_count + repository_datatypes_count, \
+            'Found %d datatypes, expected %d.' % ( current_datatypes, base_datatypes_count + repository_datatypes_count )
+        current_sniffers = self.get_sniffers_count()
+        assert current_sniffers > base_sniffers_count, \
+            'Sniffer count after installing proteomics_datatypes_1450 is %d, which is not greater than %d' % \
+            ( current_sniffers, base_sniffers_count )
+
+    def test_0020_deactivate_datatypes_repository( self ):
+        '''Deactivate the installed proteomics_datatypes_1450 repository.'''
+        '''
+        This is step 4 - Deactivate proteomics_datatypes_1450, verify the count of datatypes and sniffers is equal to
+                         the count determined in step 1.
+        Deactivate proteomics_datatypes_1450 and check that the in-memory datatypes and sniffers match the base values
+        determined in the first step.
+        '''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        global base_datatypes_count
+        global base_sniffers_count
+        self.deactivate_repository( repository )
+        new_datatypes_count = self.get_datatypes_count()
+        assert new_datatypes_count == base_datatypes_count, 'Expected %d datatypes, got %d' % ( base_datatypes_count, new_datatypes_count )
+        current_sniffers = self.get_sniffers_count()
+        assert current_sniffers == base_sniffers_count, \
+            'Sniffer count after deactivating proteomics_datatypes_1450 is %d, expected %d' % \
+            ( current_sniffers, base_sniffers_count )
+
+    def test_0025_reactivate_datatypes_repository( self ):
+        '''Reactivate the deactivated proteomics_datatypes_1450 repository.'''
+        '''
+        This is step 5 - Reactivate proteomics_datatypes, verify that the count of datatypes and sniffers has been
+                         increased by the contents of the repository.
+        '''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        global repository_datatypes_count
+        global base_datatypes_count
+        global base_sniffers_count
+        self.reactivate_repository( repository )
+        new_datatypes_count = self.get_datatypes_count()
+        assert new_datatypes_count == base_datatypes_count + repository_datatypes_count, \
+            'Found %d datatypes, expected %d.' % ( new_datatypes_count, base_datatypes_count + repository_datatypes_count )
+        current_sniffers = self.get_sniffers_count()
+        assert current_sniffers > base_sniffers_count, \
+            'Sniffer count after reactivating proteomics_datatypes_1450 is %d, which is not greater than %d' % \
+            ( current_sniffers, base_sniffers_count )
+
+    def test_0030_uninstall_datatypes_repository( self ):
+        '''Uninstall the installed proteomics_datatypes_1450 repository.'''
+        '''
+        This is step 6 - Uninstall proteomics_datatypes_1450, verify the count of datatypes and sniffers is equal
+                         to the count determined in step 1.
+        Uninstall proteomics_datatypes_1450 and check that the in-memory datatypes and sniffers match the base values
+        determined in the first step.
+        '''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        global base_datatypes_count
+        self.uninstall_repository( repository )
+        new_datatypes_count = self.get_datatypes_count()
+        assert new_datatypes_count == base_datatypes_count, 'Expected %d datatypes, got %d' % ( base_datatypes_count, new_datatypes_count )
+        current_sniffers = self.get_sniffers_count()
+        assert current_sniffers == base_sniffers_count, \
+            'Sniffer count after uninstalling proteomics_datatypes_1450 is %d, expected %d' % \
+            ( current_sniffers, base_sniffers_count )
+
+    def test_0035_reinstall_datatypes_repository( self ):
+        '''Reinstall the uninstalled proteomics_datatypes_1450 repository.'''
+        '''
+        This is step 7 - Reinstall proteomics_datatypes_1450, verify that the count of datatypes and sniffers has been
+                         increased by the contents of the repository.
+        '''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
+        global repository_datatypes_count
+        global base_datatypes_count
+        self.reinstall_repository( repository )
+        new_datatypes_count = self.get_datatypes_count()
+        assert new_datatypes_count == base_datatypes_count + repository_datatypes_count, \
+            'Found %d datatypes, expected %d.' % ( new_datatypes_count, base_datatypes_count + repository_datatypes_count )
+        current_sniffers = self.get_sniffers_count()
+        assert current_sniffers > base_sniffers_count, \
+            'Sniffer count after reinstalling proteomics_datatypes_1450 is %d, which is not greater than %d' % \
+            ( current_sniffers, base_sniffers_count )
diff --git a/test/shed_functional/functional/test_1460_data_managers.py b/test/shed_functional/functional/test_1460_data_managers.py
new file mode 100644
index 0000000..f29cf2c
--- /dev/null
+++ b/test/shed_functional/functional/test_1460_data_managers.py
@@ -0,0 +1,92 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger( __name__ )
+
+category_name = 'Test 1460 Data Manager'
+category_description = 'Test script 1460 for testing Data Managers'
+data_manager_repository_name = 'data_manager_1460'
+data_manager_repository_description = 'Repository that contains a Data Manager'
+data_manager_repository_long_description = '%s: %s' % ( data_manager_repository_name, data_manager_repository_description )
+data_manager_name = 'testing_data_manager'
+data_manager_tar_file = '1460_files/data_manager_files/test_data_manager.tar'
+
+'''
+1. Add a Data Manager to toolshed
+
+2. install Data Manager
+
+3. Check that Data Manager tool
+
+'''
+
+# TODO: Allow testing actual Execution of installed Data Manager Tool.
+
+
+class TestDataManagers( ShedTwillTestCase ):
+    '''Test installing a repository containing a Data Manager.'''
+
+    def test_0000_initiate_users_and_category( self ):
+        """Create necessary user accounts and login as an admin user."""
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+        self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+        test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
+        assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+        self.test_db_util.get_private_role( test_user_2 )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+
+    def test_0010_create_data_manager_repository( self ):
+        '''Create and populate data_manager_1460.'''
+        '''
+        This is step 1 - Create repository data_manager_1460.
+
+        Create and populate a repository that contains a Data manager.
+        '''
+        category = self.test_db_util.get_category_by_name( category_name )
+        repository = self.get_or_create_repository( name=data_manager_repository_name,
+                                                    description=data_manager_repository_description,
+                                                    long_description=data_manager_repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        # Upload the data manager files to the repository.
+        self.upload_file( repository,
+                          filename=data_manager_tar_file,
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Populate %s with a data manager configuration.' % data_manager_repository_name,
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_install_data_manager_repository( self ):
+        '''Install the data_manager_1460 repository to galaxy.'''
+        '''
+        This is step 3 - Attempt to install the repository into a galaxy instance, verify that it is installed.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        post_submit_strings_displayed = [ data_manager_repository_name ]
+        self.install_repository( data_manager_repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=True,
+                                 post_submit_strings_displayed=post_submit_strings_displayed )
+
+    def test_0030_verify_data_manager_tool( self ):
+        '''Verify that the data_manager_1460 repository is installed and Data Manager tool appears in list in Galaxy.'''
+        repository = self.test_db_util.get_installed_repository_by_name_owner( data_manager_repository_name, common.test_user_1_name )
+        strings_displayed = [ 'Access managed data by job' ]
+        self.display_installed_manage_data_manager_page( repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed )
+
+    def test_0040_verify_data_manager_data_table( self ):
+        '''Verify that the installed repository populated shed_tool_data_table.xml and the sample files.'''
+        self.verify_installed_repository_data_table_entries( required_data_table_entries=[ 'data_manager_test_data_table' ] )
diff --git a/test/shed_functional/functional/test_1470_updating_installed_repositories.py b/test/shed_functional/functional/test_1470_updating_installed_repositories.py
new file mode 100644
index 0000000..514acf8
--- /dev/null
+++ b/test/shed_functional/functional/test_1470_updating_installed_repositories.py
@@ -0,0 +1,136 @@
+import logging
+
+from shed_functional.base.twilltestcase import common, ShedTwillTestCase
+
+log = logging.getLogger(__name__)
+
+repository_name = 'filtering_1470'
+repository_description = "Galaxy's filtering tool"
+repository_long_description = "Long description of Galaxy's filtering tool"
+
+category_name = 'Test 1470 - Updating Installed Repositories'
+category_description = 'Functional test suite to ensure that updating installed repositories does not create white ghosts.'
+
+'''
+1. Install a repository into Galaxy.
+2. In the Tool Shed, update the repository from Step 1.
+3. In Galaxy, get updates to the repository.
+4. In Galaxy, uninstall the repository.
+5. In Galaxy, reinstall the repository.
+6. Make sure step 5 created no white ghosts.
+'''
+
+
+class TestUpdateInstalledRepository( ShedTwillTestCase ):
+    '''Verify that the code correctly handles updating an installed repository, then uninstalling and reinstalling.'''
+
+    def test_0000_initiate_users( self ):
+        """Create necessary user accounts."""
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
+        assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+        self.test_db_util.get_private_role( test_user_1 )
+        self.login( email=common.admin_email, username=common.admin_username )
+        admin_user = self.test_db_util.get_user( common.admin_email )
+        assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+        self.test_db_util.get_private_role( admin_user )
+
+    def test_0005_create_filtering_repository( self ):
+        """Create and populate the filtering_0530 repository."""
+        category = self.create_category( name=category_name, description=category_description )
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.get_or_create_repository( name=repository_name,
+                                                    description=repository_description,
+                                                    long_description=repository_long_description,
+                                                    owner=common.test_user_1_name,
+                                                    category_id=self.security.encode_id( category.id ),
+                                                    strings_displayed=[] )
+        self.upload_file( repository,
+                          filename='filtering/filtering_1.1.0.tar',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=True,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded filtering 1.1.0 tarball.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0010_install_filtering_to_galaxy( self ):
+        '''Install the filtering_1470 repository to galaxy.'''
+        '''
+        This is step 1 - Install a repository into Galaxy.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        self.install_repository( repository_name,
+                                 common.test_user_1_name,
+                                 category_name,
+                                 install_tool_dependencies=False,
+                                 install_repository_dependencies=False,
+                                 new_tool_panel_section_label='Filtering' )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name,
+                                                                                         common.test_user_1_name )
+        strings_displayed = [ 'filtering_1470',
+                              self.url.replace( 'http://', '' ),
+                              installed_repository.installed_changeset_revision,
+                              installed_repository.changeset_revision ]
+        self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+        self.display_installed_repository_manage_page( installed_repository,
+                                                       strings_displayed=strings_displayed )
+
+    def test_0015_update_repository( self ):
+        '''Upload a readme file to the filtering_1470 repository.'''
+        '''
+        This is step 2 - In the Tool Shed, update the repository from Step 1.
+
+        Importantly, this update should *not* create a new installable changeset revision, because that would
+        eliminate the process we're testing in this script. So, we upload a readme file.
+        '''
+        self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+        repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+        self.upload_file( repository,
+                          filename='filtering/readme.txt',
+                          filepath=None,
+                          valid_tools_only=True,
+                          uncompress_file=False,
+                          remove_repo_files_not_in_tar=False,
+                          commit_message='Uploaded readme.',
+                          strings_displayed=[],
+                          strings_not_displayed=[] )
+
+    def test_0020_get_repository_updates( self ):
+        '''Get updates to the installed repository.'''
+        '''
+        This is step 3 - In Galaxy, get updates to the repository.
+        '''
+        self.galaxy_login( email=common.admin_email, username=common.admin_username )
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name,
+                                                                                         common.test_user_1_name )
+        self.update_installed_repository( installed_repository )
+
+    def test_0025_uninstall_repository( self ):
+        '''Uninstall the filtering_1470 repository.'''
+        '''
+        This is step 4 - In Galaxy, uninstall the repository.
+        '''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name,
+                                                                                         common.test_user_1_name )
+        self.uninstall_repository( installed_repository )
+
+    def test_0030_reinstall_repository( self ):
+        '''Reinstall the filtering_1470 repository.'''
+        '''
+        This is step 5 - In Galaxy, reinstall the repository.
+        '''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name,
+                                                                                         common.test_user_1_name )
+        self.reinstall_repository( installed_repository )
+
+    def test_0035_verify_absence_of_ghosts( self ):
+        '''Check the count of repositories in the database named filtering_1470 and owned by user1.'''
+        '''
+        This is step 6 - Make sure step 5 created no white ghosts.
+        '''
+        installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repository_name,
+                                                                                         common.test_user_1_name,
+                                                                                         return_multiple=True )
+        assert len( installed_repository ) == 1, 'Multiple filtering repositories found in the Galaxy database, possibly indicating a "white ghost" scenario.'
diff --git a/test/shed_functional/functional_tests.py b/test/shed_functional/functional_tests.py
new file mode 100644
index 0000000..1fba4d9
--- /dev/null
+++ b/test/shed_functional/functional_tests.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+"""Test driver for tool shed functional tests.
+
+Launch this script by running ``run_tests.sh -t`` from GALAXY_ROOT.
+"""
+from __future__ import absolute_import, print_function
+
+import os
+import string
+import sys
+import tempfile
+
+galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
+# Need to remove this directory from sys.path
+sys.path[0:1] = [ os.path.join( galaxy_root, "lib" ), os.path.join( galaxy_root, "test" ) ]
+
+from base import driver_util
+
+# This is for the tool shed application.
+from galaxy.web import buildapp as galaxybuildapp
+from galaxy.webapps.tool_shed import buildapp as toolshedbuildapp
+
+log = driver_util.build_logger()
+
+tool_sheds_conf_xml_template = '''<?xml version="1.0"?>
+<tool_sheds>
+    <tool_shed name="Galaxy main tool shed" url="http://toolshed.g2.bx.psu.edu/"/>
+    <tool_shed name="Galaxy test tool shed" url="http://testtoolshed.g2.bx.psu.edu/"/>
+    <tool_shed name="Embedded tool shed for functional tests" url="http://${shed_url}:${shed_port}/"/>
+</tool_sheds>
+'''
+
+shed_tool_conf_xml_template = '''<?xml version="1.0"?>
+<toolbox tool_path="${shed_tool_path}">
+</toolbox>
+'''
+
+tool_data_table_conf_xml_template = '''<?xml version="1.0"?>
+<tables>
+</tables>
+'''
+
+shed_data_manager_conf_xml_template = '''<?xml version="1.0"?>
+<data_managers>
+</data_managers>
+'''
+
+
+class ToolShedTestDriver(driver_util.TestDriver):
+    """Instantial a Galaxy-style nose TestDriver for testing the tool shed."""
+
+    def setup(self):
+        """Entry point for test driver script."""
+        # ---- Configuration ------------------------------------------------------
+        tool_shed_test_tmp_dir = driver_util.setup_tool_shed_tmp_dir()
+        if not os.path.isdir( tool_shed_test_tmp_dir ):
+            os.mkdir( tool_shed_test_tmp_dir )
+        self.temp_directories.append(tool_shed_test_tmp_dir)
+        shed_db_path = driver_util.database_files_path(tool_shed_test_tmp_dir, prefix="TOOL_SHED")
+        shed_tool_data_table_conf_file = os.environ.get( 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF', os.path.join( tool_shed_test_tmp_dir, 'shed_tool_data_table_conf.xml' ) )
+        galaxy_shed_data_manager_conf_file = os.environ.get( 'GALAXY_SHED_DATA_MANAGER_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_shed_data_manager_conf.xml' ) )
+        default_tool_data_table_config_path = os.path.join( tool_shed_test_tmp_dir, 'tool_data_table_conf.xml' )
+        galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_shed_tool_conf.xml' ) )
+        galaxy_migrated_tool_conf_file = os.environ.get( 'GALAXY_TEST_MIGRATED_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_migrated_tool_conf.xml' ) )
+        galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_SHEDS_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_sheds_conf.xml' ) )
+        if 'GALAXY_TEST_TOOL_DATA_PATH' in os.environ:
+            tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
+        else:
+            tool_data_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+            os.environ[ 'GALAXY_TEST_TOOL_DATA_PATH' ] = tool_data_path
+        galaxy_db_path = driver_util.database_files_path(tool_shed_test_tmp_dir)
+        shed_file_path = os.path.join( shed_db_path, 'files' )
+        hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+        new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+        galaxy_shed_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+        galaxy_migrated_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+        hgweb_config_dir = hgweb_config_file_path
+        os.environ[ 'TEST_HG_WEB_CONFIG_DIR' ] = hgweb_config_dir
+        print("Directory location for hgweb.config:", hgweb_config_dir)
+        toolshed_database_conf = driver_util.database_conf(shed_db_path, prefix="TOOL_SHED")
+        kwargs = dict( admin_users='test at bx.psu.edu',
+                       allow_user_creation=True,
+                       allow_user_deletion=True,
+                       datatype_converters_config_file='datatype_converters_conf.xml.sample',
+                       file_path=shed_file_path,
+                       hgweb_config_dir=hgweb_config_dir,
+                       job_queue_workers=5,
+                       id_secret='changethisinproductiontoo',
+                       log_destination="stdout",
+                       new_file_path=new_repos_path,
+                       running_functional_tests=True,
+                       shed_tool_data_table_config=shed_tool_data_table_conf_file,
+                       smtp_server='smtp.dummy.string.tld',
+                       email_from='functional at localhost',
+                       template_path='templates',
+                       tool_parse_help=False,
+                       use_heartbeat=False )
+        kwargs.update(toolshed_database_conf)
+        # Generate the tool_data_table_conf.xml file.
+        open( default_tool_data_table_config_path, 'w' ).write( tool_data_table_conf_xml_template )
+        # Generate the shed_tool_data_table_conf.xml file.
+        open( shed_tool_data_table_conf_file, 'w' ).write( tool_data_table_conf_xml_template )
+        os.environ[ 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF' ] = shed_tool_data_table_conf_file
+        # ---- Build Tool Shed Application --------------------------------------------------
+        toolshedapp = driver_util.build_shed_app(kwargs)
+
+        # ---- Run tool shed webserver ------------------------------------------------------
+        # TODO: Needed for hg middleware ('lib/galaxy/webapps/tool_shed/framework/middleware/hg.py')
+        kwargs['global_conf']['database_connection'] = kwargs["database_connection"]
+        tool_shed_server_wrapper = driver_util.launch_server(
+            toolshedapp,
+            toolshedbuildapp.app_factory,
+            kwargs,
+            prefix="TOOL_SHED",
+        )
+        self.server_wrappers.append(tool_shed_server_wrapper)
+        tool_shed_test_host = tool_shed_server_wrapper.host
+        tool_shed_test_port = tool_shed_server_wrapper.port
+        log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) )
+
+        # ---- Optionally start up a Galaxy instance ------------------------------------------------------
+        if 'TOOL_SHED_TEST_OMIT_GALAXY' not in os.environ:
+            # Generate the shed_tool_conf.xml file.
+            tool_sheds_conf_template_parser = string.Template( tool_sheds_conf_xml_template )
+            tool_sheds_conf_xml = tool_sheds_conf_template_parser.safe_substitute( shed_url=tool_shed_test_host, shed_port=tool_shed_test_port )
+            open( galaxy_tool_sheds_conf_file, 'w' ).write( tool_sheds_conf_xml )
+            # Generate the tool_sheds_conf.xml file.
+            shed_tool_conf_template_parser = string.Template( shed_tool_conf_xml_template )
+            shed_tool_conf_xml = shed_tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_shed_tool_path )
+            open( galaxy_shed_tool_conf_file, 'w' ).write( shed_tool_conf_xml )
+            # Generate the migrated_tool_conf.xml file.
+            migrated_tool_conf_xml = shed_tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_migrated_tool_path )
+            open( galaxy_migrated_tool_conf_file, 'w' ).write( migrated_tool_conf_xml )
+            os.environ[ 'GALAXY_TEST_SHED_TOOL_CONF' ] = galaxy_shed_tool_conf_file
+            # Generate shed_data_manager_conf.xml
+            if not os.environ.get( 'GALAXY_SHED_DATA_MANAGER_CONF' ):
+                open( galaxy_shed_data_manager_conf_file, 'wb' ).write( shed_data_manager_conf_xml_template )
+            kwargs = dict( migrated_tools_config=galaxy_migrated_tool_conf_file,
+                           shed_data_manager_config_file=galaxy_shed_data_manager_conf_file,
+                           shed_tool_path=galaxy_shed_tool_path,
+                           tool_data_path=tool_data_path,
+                           tool_sheds_config_file=galaxy_tool_sheds_conf_file )
+            kwargs.update(
+                driver_util.setup_galaxy_config(
+                    galaxy_db_path,
+                    use_test_file_dir=False,
+                    default_install_db_merged=False,
+                    default_tool_data_table_config_path=default_tool_data_table_config_path,
+                    default_shed_tool_data_table_config=shed_tool_data_table_conf_file,
+                    enable_tool_shed_check=True,
+                    shed_tool_conf=galaxy_shed_tool_conf_file,
+                    update_integrated_tool_panel=True,
+                )
+            )
+            print("Galaxy database connection:", kwargs["database_connection"])
+
+            # ---- Run galaxy webserver ------------------------------------------------------
+            galaxyapp = driver_util.build_galaxy_app(kwargs)
+            galaxy_server_wrapper = driver_util.launch_server(
+                galaxyapp,
+                galaxybuildapp.app_factory,
+                kwargs,
+            )
+            log.info("Galaxy tests will be run against %s:%s" % (galaxy_server_wrapper.host, galaxy_server_wrapper.port))
+            self.server_wrappers.append(galaxy_server_wrapper)
+
+
+if __name__ == "__main__":
+    driver_util.drive_test(ToolShedTestDriver)
diff --git a/test/shed_functional/test_data/0460_files/repository_dependencies.xml b/test/shed_functional/test_data/0460_files/repository_dependencies.xml
new file mode 100644
index 0000000..f39196b
--- /dev/null
+++ b/test/shed_functional/test_data/0460_files/repository_dependencies.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<repositories description="">
+    <repository name="emboss_datatypes_0460" owner="user1" />
+</repositories>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/0460_files/repository_dependencies_in_root.tar b/test/shed_functional/test_data/0460_files/repository_dependencies_in_root.tar
new file mode 100644
index 0000000..e39a9db
Binary files /dev/null and b/test/shed_functional/test_data/0460_files/repository_dependencies_in_root.tar differ
diff --git a/test/shed_functional/test_data/0460_files/repository_dependencies_in_subfolder.tar b/test/shed_functional/test_data/0460_files/repository_dependencies_in_subfolder.tar
new file mode 100644
index 0000000..eb82fe8
Binary files /dev/null and b/test/shed_functional/test_data/0460_files/repository_dependencies_in_subfolder.tar differ
diff --git a/test/shed_functional/test_data/0460_files/tool_dependencies.xml b/test/shed_functional/test_data/0460_files/tool_dependencies.xml
new file mode 100644
index 0000000..d0bbbfa
--- /dev/null
+++ b/test/shed_functional/test_data/0460_files/tool_dependencies.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="bwa" version="0.5.9">
+        <repository name="package_bwa_0_5_9_0460" owner="user1" />
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/0460_files/tool_dependencies_in_root.tar b/test/shed_functional/test_data/0460_files/tool_dependencies_in_root.tar
new file mode 100644
index 0000000..8dc097b
Binary files /dev/null and b/test/shed_functional/test_data/0460_files/tool_dependencies_in_root.tar differ
diff --git a/test/shed_functional/test_data/0460_files/tool_dependencies_in_subfolder.tar b/test/shed_functional/test_data/0460_files/tool_dependencies_in_subfolder.tar
new file mode 100644
index 0000000..515f4d9
Binary files /dev/null and b/test/shed_functional/test_data/0460_files/tool_dependencies_in_subfolder.tar differ
diff --git a/test/shed_functional/test_data/0480_files/tool_dependencies.xml b/test/shed_functional/test_data/0480_files/tool_dependencies.xml
new file mode 100644
index 0000000..ba6ed73
--- /dev/null
+++ b/test/shed_functional/test_data/0480_files/tool_dependencies.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="invalid_tool_dependency_xml" version="1.0.0">
+        <install version="1.0">
+            <action type="download_file">file://./</action>
+            <action type="shell_command">/bin/true</action>
+            <action type="set_environment">
+                <environment_variable action="set_to" name="INSTALL_DIR">$INSTALL_DIR</environment_variable>
+            </action>
+        </install>
+        <readme>
+        </readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/0540_files/column_maker/column_maker.tar b/test/shed_functional/test_data/0540_files/column_maker/column_maker.tar
new file mode 100644
index 0000000..27e971b
Binary files /dev/null and b/test/shed_functional/test_data/0540_files/column_maker/column_maker.tar differ
diff --git a/test/shed_functional/test_data/0540_files/column_maker/repository_dependencies.xml b/test/shed_functional/test_data/0540_files/column_maker/repository_dependencies.xml
new file mode 100644
index 0000000..22ed058
--- /dev/null
+++ b/test/shed_functional/test_data/0540_files/column_maker/repository_dependencies.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<repositories description="Simple dependency on convert_chars.">
+    <repository name="convert_chars_0540" owner="user1" />
+</repositories>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/0540_files/convert_chars/convert_chars.tar b/test/shed_functional/test_data/0540_files/convert_chars/convert_chars.tar
new file mode 100644
index 0000000..23553b5
Binary files /dev/null and b/test/shed_functional/test_data/0540_files/convert_chars/convert_chars.tar differ
diff --git a/test/shed_functional/test_data/0540_files/convert_chars/tool_dependencies.xml b/test/shed_functional/test_data/0540_files/convert_chars/tool_dependencies.xml
new file mode 100644
index 0000000..b0f497b
--- /dev/null
+++ b/test/shed_functional/test_data/0540_files/convert_chars/tool_dependencies.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="bwa" version="0.5.9">
+        <repository name="package_bwa_0_5_9_0540" owner="user1" />
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/0540_files/package_bwa/tool_dependencies.xml b/test/shed_functional/test_data/0540_files/package_bwa/tool_dependencies.xml
new file mode 100644
index 0000000..462a873
--- /dev/null
+++ b/test/shed_functional/test_data/0540_files/package_bwa/tool_dependencies.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="bwa" version="0.5.9">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">file://__PATH__/bwa_base.tar</action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/0550_files/filtering_1.0.tgz b/test/shed_functional/test_data/0550_files/filtering_1.0.tgz
new file mode 100644
index 0000000..c2a0612
Binary files /dev/null and b/test/shed_functional/test_data/0550_files/filtering_1.0.tgz differ
diff --git a/test/shed_functional/test_data/0550_files/package_freebayes_1_0550.tgz b/test/shed_functional/test_data/0550_files/package_freebayes_1_0550.tgz
new file mode 100644
index 0000000..2befe2b
Binary files /dev/null and b/test/shed_functional/test_data/0550_files/package_freebayes_1_0550.tgz differ
diff --git a/test/shed_functional/test_data/0550_files/package_freebayes_2_0550.tgz b/test/shed_functional/test_data/0550_files/package_freebayes_2_0550.tgz
new file mode 100644
index 0000000..eaeaefb
Binary files /dev/null and b/test/shed_functional/test_data/0550_files/package_freebayes_2_0550.tgz differ
diff --git a/test/shed_functional/test_data/0550_files/package_samtools_1_0550.tgz b/test/shed_functional/test_data/0550_files/package_samtools_1_0550.tgz
new file mode 100644
index 0000000..db60638
Binary files /dev/null and b/test/shed_functional/test_data/0550_files/package_samtools_1_0550.tgz differ
diff --git a/test/shed_functional/test_data/0550_files/package_samtools_2_0550.tgz b/test/shed_functional/test_data/0550_files/package_samtools_2_0550.tgz
new file mode 100644
index 0000000..6567721
Binary files /dev/null and b/test/shed_functional/test_data/0550_files/package_samtools_2_0550.tgz differ
diff --git a/test/shed_functional/test_data/0550_files/temp/tool_dependencies.xml b/test/shed_functional/test_data/0550_files/temp/tool_dependencies.xml
new file mode 100644
index 0000000..1e75489
--- /dev/null
+++ b/test/shed_functional/test_data/0550_files/temp/tool_dependencies.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="samtools" version="0.1.18">
+        <install version="1.0">
+            <actions>
+                <action type="shell_command">echo "Success."</action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+Compiling SAMtools requires the ncurses and zlib development libraries.
+        </readme>
+    </package>
+</tool_dependency>
+
diff --git a/test/shed_functional/test_data/1420_files/binary_tarballs/atlas.tar b/test/shed_functional/test_data/1420_files/binary_tarballs/atlas.tar
new file mode 100644
index 0000000..bf1e423
Binary files /dev/null and b/test/shed_functional/test_data/1420_files/binary_tarballs/atlas.tar differ
diff --git a/test/shed_functional/test_data/1420_files/binary_tarballs/boost.tar b/test/shed_functional/test_data/1420_files/binary_tarballs/boost.tar
new file mode 100644
index 0000000..33c9b94
Binary files /dev/null and b/test/shed_functional/test_data/1420_files/binary_tarballs/boost.tar differ
diff --git a/test/shed_functional/test_data/1420_files/binary_tarballs/bzlib.tar b/test/shed_functional/test_data/1420_files/binary_tarballs/bzlib.tar
new file mode 100644
index 0000000..69330a8
Binary files /dev/null and b/test/shed_functional/test_data/1420_files/binary_tarballs/bzlib.tar differ
diff --git a/test/shed_functional/test_data/1420_files/binary_tarballs/lapack.tar b/test/shed_functional/test_data/1420_files/binary_tarballs/lapack.tar
new file mode 100644
index 0000000..8fc71bc
Binary files /dev/null and b/test/shed_functional/test_data/1420_files/binary_tarballs/lapack.tar differ
diff --git a/test/shed_functional/test_data/1420_files/binary_tarballs/numpy.tar b/test/shed_functional/test_data/1420_files/binary_tarballs/numpy.tar
new file mode 100644
index 0000000..b0f4c0b
Binary files /dev/null and b/test/shed_functional/test_data/1420_files/binary_tarballs/numpy.tar differ
diff --git a/test/shed_functional/test_data/1420_files/binary_tarballs/rdkit.tar b/test/shed_functional/test_data/1420_files/binary_tarballs/rdkit.tar
new file mode 100644
index 0000000..99e43ee
Binary files /dev/null and b/test/shed_functional/test_data/1420_files/binary_tarballs/rdkit.tar differ
diff --git a/test/shed_functional/test_data/1420_files/package_atlas_3_10_1420/tool_dependencies.xml b/test/shed_functional/test_data/1420_files/package_atlas_3_10_1420/tool_dependencies.xml
new file mode 100644
index 0000000..6f699ba
--- /dev/null
+++ b/test/shed_functional/test_data/1420_files/package_atlas_3_10_1420/tool_dependencies.xml
@@ -0,0 +1,22 @@
+<tool_dependency>
+    <package name="atlas" version="3.10.1">
+        <install version="1.0">
+            <actions>
+                <!-- first action is always downloading -->
+                <action type="download_file">file://__PATH__</action>
+                <action type="move_directory_files">
+					<source_directory>.</source_directory>
+					<destination_directory>$INSTALL_DIR</destination_directory>
+				</action>
+                <action type="set_environment">
+                    <environment_variable name="ATLAS_LIB_DIR" action="set_to">$INSTALL_DIR/atlas/lib</environment_variable>
+                    <environment_variable name="ATLAS_INCLUDE_DIR" action="set_to">$INSTALL_DIR/atlas/include</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>ATLAS_LIB_DIR and ATLAS_INCLUDE_DIR (including libatlas.a) will be exported for later use.
+        During ATLAS library compilation, ATLAS performs code efficiency checks. These checks can only provide optimal results, if "frequency scaling" is disabled on the CPU, and no other load-intense processes are running. 
+        Ideally, you should compile on an empty cluster node with CPU frequency scaling disabled (see "cpufreq-selector" or "cpufreq-set").
+        </readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1420_files/package_boost_1_53_1420/tool_dependencies.xml b/test/shed_functional/test_data/1420_files/package_boost_1_53_1420/tool_dependencies.xml
new file mode 100644
index 0000000..91a2e23
--- /dev/null
+++ b/test/shed_functional/test_data/1420_files/package_boost_1_53_1420/tool_dependencies.xml
@@ -0,0 +1,29 @@
+<?xml version='1.0' encoding='utf-8'?>
+<tool_dependency>
+    <package name="bzlib" version="1.0.6">
+        <repository name="package_bzlib_1_0_1420" owner="user1" prior_installation_required="True" />
+    </package>
+    <package name="boost" version="1.53.0">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">file://__PATH__</action>
+
+                <!-- populate the environment variables from the dependend repos -->
+                <action type="set_environment_for_install">
+                    <repository name="package_bzlib_1_0_1420" owner="user1">
+                        <package name="bzlib" version="1.0.6" />
+                    </repository>
+                </action>
+				<action type="move_directory_files">
+					<source_directory>.</source_directory>
+					<destination_directory>$INSTALL_DIR</destination_directory>
+				</action>
+                <action type="set_environment">
+                    <environment_variable action="set_to" name="BOOST_ROOT_DIR">$INSTALL_DIR/boost</environment_variable>
+                    <environment_variable action="append_to" name="LD_LIBRARY_PATH">$INSTALL_DIR/boost/lib/</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>Compiling boost requires a C++ compiler (typically gcc). You can access the boost root directory through the $BOOST_ROOT_DIR system variable. All modules will be build.</readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1420_files/package_bzlib_1_0_1420/tool_dependencies.xml b/test/shed_functional/test_data/1420_files/package_bzlib_1_0_1420/tool_dependencies.xml
new file mode 100644
index 0000000..2170ae6
--- /dev/null
+++ b/test/shed_functional/test_data/1420_files/package_bzlib_1_0_1420/tool_dependencies.xml
@@ -0,0 +1,21 @@
+ <tool_dependency>
+    <package name="bzlib" version="1.0.6">
+        <install version="1.0">
+            <actions>
+                <!-- first action is always downloading -->
+                <action type="download_by_url">file://__PATH__</action>
+                <action type="move_directory_files">
+                    <source_directory>.</source_directory>
+                    <destination_directory>$INSTALL_DIR</destination_directory>
+                </action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bzlib/bin</environment_variable>
+                    <environment_variable name="BZLIB_LIB_DIR" action="set_to">$INSTALL_DIR/bzlib/lib</environment_variable>
+                    <environment_variable name="BZLIB_INCLUDE_DIR" action="set_to">$INSTALL_DIR/bzlib/include</environment_variable>
+                    <environment_variable name="BZLIB_SOURCE_DIR" action="set_to">$INSTALL_DIR/bzlib/source</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>Compiling bzlib requires make and a C compiler. You can access bzlib with $BZLIB_LIB_DIR and $BZLIB_INCLUDE_DIR.</readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1420_files/package_lapack_3_4_1420/tool_dependencies.xml b/test/shed_functional/test_data/1420_files/package_lapack_3_4_1420/tool_dependencies.xml
new file mode 100644
index 0000000..f6c7793
--- /dev/null
+++ b/test/shed_functional/test_data/1420_files/package_lapack_3_4_1420/tool_dependencies.xml
@@ -0,0 +1,18 @@
+<tool_dependency>
+    <package name="lapack" version="3.4.2">
+        <install version="1.0">
+            <actions>
+                <!-- first action is always downloading -->
+                <action type="download_by_url">file://__PATH__</action>
+				<action type="move_directory_files">
+					<source_directory>.</source_directory>
+					<destination_directory>$INSTALL_DIR</destination_directory>
+				</action>
+                <action type="set_environment">
+                    <environment_variable name="LAPACK_LIB_DIR" action="set_to">$INSTALL_DIR/lapack/lib</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>LAPACK requires gcc and gfortran. LAPACK_LIB_DIR will be set (including liblapack.a and libblas.a). </readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1420_files/package_numpy_1_7_1420/tool_dependencies.xml b/test/shed_functional/test_data/1420_files/package_numpy_1_7_1420/tool_dependencies.xml
new file mode 100644
index 0000000..0c3dcf4
--- /dev/null
+++ b/test/shed_functional/test_data/1420_files/package_numpy_1_7_1420/tool_dependencies.xml
@@ -0,0 +1,35 @@
+<?xml version='1.0' encoding='utf-8'?>
+<tool_dependency>
+        <package name="lapack" version="3.4.2">
+            <repository name="package_lapack_3_4_1420" owner="user1" prior_installation_required="True" />
+        </package>
+        <package name="atlas" version="3.10.1">
+            <repository name="package_atlas_3_10_1420" owner="user1" prior_installation_required="True" />
+        </package>
+        <package name="numpy" version="1.7.1">
+            <install version="1.0">
+                <actions>
+                    <action type="download_by_url">file://__PATH__</action>
+                    <action type="set_environment_for_install">
+                        <repository name="package_atlas_3_10_1420" owner="user1">
+                            <package name="atlas" version="3.10.1" />
+                        </repository>
+                        <repository name="package_lapack_3_4_1420" owner="user1">
+                            <package name="lapack" version="3.4.2" />
+                        </repository>
+                    </action>
+					<action type="move_directory_files">
+						<source_directory>.</source_directory>
+						<destination_directory>$INSTALL_DIR</destination_directory>
+                    </action>
+                    <action type="set_environment">
+                        <environment_variable action="append_to" name="PYTHONPATH">$INSTALL_DIR/lib/python</environment_variable>
+                        <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/bin</environment_variable>
+                        <environment_variable action="set_to" name="PYTHONPATH_NUMPY">$INSTALL_DIR/lib/python</environment_variable>
+                        <environment_variable action="set_to" name="PATH_NUMPY">$INSTALL_DIR/bin</environment_variable>
+                        </action>
+                </actions>
+            </install>
+            <readme>Compiling numpy requires a C and Fortran compiler (typically gcc and gfortran). The PYTHONPATH for numpy can be accessed through PYTHONPATH_NUMPY and the binaries with PATH_NUMPY.</readme>
+        </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1420_files/package_rdkit_2012_12_1420/tool_dependencies.xml b/test/shed_functional/test_data/1420_files/package_rdkit_2012_12_1420/tool_dependencies.xml
new file mode 100644
index 0000000..dd06508
--- /dev/null
+++ b/test/shed_functional/test_data/1420_files/package_rdkit_2012_12_1420/tool_dependencies.xml
@@ -0,0 +1,43 @@
+<?xml version='1.0' encoding='utf-8'?>
+<tool_dependency>
+    <package name="numpy" version="1.7.1">
+        <repository name="package_numpy_1_7_1420" owner="user1" prior_installation_required="True" />
+    </package>
+    <package name="boost" version="1.53.0">
+        <repository name="package_boost_1_53_1420" owner="user1" prior_installation_required="True" />
+    </package>
+
+    <package name="rdkit" version="2012_12_1">
+        <install version="1.0">
+            <actions>
+                <!-- first action is always downloading -->
+                <action type="download_by_url">file://__PATH__</action>
+
+                <!-- populate the environment variables from the dependend repos -->
+                <action type="set_environment_for_install">
+                    <repository name="package_numpy_1_7_1420" owner="user1">
+                        <package name="numpy" version="1.7.1" />
+                    </repository>
+                    <repository name="package_boost_1_53_1420" owner="user1">
+                        <package name="boost" version="1.53.0" />
+                    </repository>
+                </action>
+
+                <!-- PYTHONPATH_NUMPY is set in the numpy package -->
+				<action type="move_directory_files">
+					<source_directory>.</source_directory>
+					<destination_directory>$INSTALL_DIR</destination_directory>
+				</action>
+                <action type="set_environment">
+                    <environment_variable action="set_to" name="RDBASE">$INSTALL_DIR/rdkit</environment_variable>
+                    <environment_variable action="append_to" name="LD_LIBRARY_PATH">$INSTALL_DIR/rdkit/lib/</environment_variable>
+                    <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/rdkit/bin</environment_variable>
+                    <environment_variable action="prepend_to" name="PYTHONPATH">$INSTALL_DIR/rdkit/lib/python2.7/site-packages/</environment_variable>
+                    <environment_variable action="prepend_to" name="PYTHONPATH">$ENV[PYTHONPATH_NUMPY]</environment_variable>
+                    <environment_variable action="append_to" name="LD_LIBRARY_PATH">$ENV[BOOST_ROOT_DIR]/lib/</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>Compiling rdkit requires cmake, python headers, sqlite3, flex and bison.</readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1440_files/complex_dependency/tool_dependencies.xml b/test/shed_functional/test_data/1440_files/complex_dependency/tool_dependencies.xml
new file mode 100644
index 0000000..b479002
--- /dev/null
+++ b/test/shed_functional/test_data/1440_files/complex_dependency/tool_dependencies.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<tool_dependency>
+    <package name="failure" version="1.0.0">
+        <repository name="package_env_sh_1_0_1440" owner="user1" prior_installation_required="True" />
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/1440_files/dependency_definition/tool_dependencies.xml b/test/shed_functional/test_data/1440_files/dependency_definition/tool_dependencies.xml
new file mode 100644
index 0000000..0cdc3ee
--- /dev/null
+++ b/test/shed_functional/test_data/1440_files/dependency_definition/tool_dependencies.xml
@@ -0,0 +1,14 @@
+<?xml version='1.0' encoding='utf-8'?>
+<tool_dependency>
+    <package name="failure" version="1.0.0">
+        <install version="1.0">
+            <actions>
+                <action type="shell_command">false</action>
+                <action type="set_environment">
+                    <environment_variable name="TEST_SUCCEEDED" action="prepend_to">true</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme></readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/1460_files/data_manager_files/test_data_manager.tar b/test/shed_functional/test_data/1460_files/data_manager_files/test_data_manager.tar
new file mode 100644
index 0000000..b4b09a8
Binary files /dev/null and b/test/shed_functional/test_data/1460_files/data_manager_files/test_data_manager.tar differ
diff --git a/test/shed_functional/test_data/bed_to_gff_converter/bed_to_gff_converter.tar b/test/shed_functional/test_data/bed_to_gff_converter/bed_to_gff_converter.tar
new file mode 100644
index 0000000..47dab9b
Binary files /dev/null and b/test/shed_functional/test_data/bed_to_gff_converter/bed_to_gff_converter.tar differ
diff --git a/test/shed_functional/test_data/bismark/bismark.tar b/test/shed_functional/test_data/bismark/bismark.tar
new file mode 100644
index 0000000..e24183c
Binary files /dev/null and b/test/shed_functional/test_data/bismark/bismark.tar differ
diff --git a/test/shed_functional/test_data/bismark/bismark_methylation_extractor.xml b/test/shed_functional/test_data/bismark/bismark_methylation_extractor.xml
new file mode 100644
index 0000000..c54d99e
--- /dev/null
+++ b/test/shed_functional/test_data/bismark/bismark_methylation_extractor.xml
@@ -0,0 +1,306 @@
+<tool id="bismark_methylation_extractor" name="Bismark" version="0.7.7.3">
+    <!-- Wrapper compatible with Bismark version 0.7.7 -->
+    <description>methylation extractor</description>
+    <!--<version_command>bismark_methylation_extractor version</version_command>-->
+    <requirements>
+        <requirement type="set_environment">SCRIPT_PATH</requirement>
+        <requirement type="package" version="0.12.8">bowtie</requirement>
+        <requirement type="package" version="2.0.0-beta7">bowtie2</requirement>
+    </requirements>
+    <parallelism method="basic"></parallelism>
+    <command interpreter="python">
+        bismark_methylation_extractor.py
+
+        --infile $input
+
+        --bismark_path \$SCRIPT_PATH
+
+        #if $singlePaired.sPaired == "single":
+            --single-end
+        #else:
+            --paired-end
+            $no_overlap
+        #end if
+
+        #if str($ignore_bps) != "0":
+           --ignore $ignore_bps
+        #end if
+
+        #if $report:
+            --report-file $o_report
+        #end if
+
+        #if $comprehensive:
+            --comprehensive
+        #end if
+
+        #if $merge_non_cpg:
+            --merge-non-cpg
+        #end if
+
+        #if $compress:
+            --compress $compressed_output
+        #else:
+            #if $comprehensive == False and $merge_non_cpg == False:
+                ##twelfe files
+                --cpg_ot $cpg_ot
+                --chg_ot $chg_ot
+                --chh_ot $chh_ot
+                --cpg_ctot $cpg_ctot
+                --chg_ctot $chg_ctot
+                --chh_ctot $chh_ctot
+                --cpg_ob $cpg_ob
+                --chg_ob $chg_ob
+                --chh_ob $chh_ob
+                --cpg_ctob $cpg_ctob
+                --chg_ctob $chg_ctob
+                --chh_ctob $chh_ctob
+            #elif $merge_non_cpg and $comprehensive:
+                ## two files
+                --non_cpg_context $non_cpg_context
+                --cpg_context $cpg_context
+            #elif $comprehensive:
+                ## three files
+                --cpg_context $cpg_context
+                --chg_context $chg_context
+                --chh_context $chh_context
+            #elif $merge_non_cpg:
+                ## eight files
+                --non_cpg_context_ctot $non_cpg_context_ctot
+                --non_cpg_context_ot $non_cpg_context_ot
+                --non_cpg_context_ob $non_cpg_context_ob
+                --non_cpg_context_ctob $non_cpg_context_ctob
+                --cpg_ot $cpg_ot
+                --cpg_ctot $cpg_ctot
+                --cpg_ob $cpg_ob
+                --cpg_ctob $cpg_ctob
+            #end if
+        ## end compress
+        #end if
+
+    </command>
+    <inputs>
+        <!-- Input Parameters -->
+        <param name="input" type="data" format="sam" label="SAM file from Bismark bisulfid mapper" />
+        <conditional name="singlePaired">
+            <param name="sPaired" type="select" label="Is this library mate-paired?">
+              <option value="single">Single-end</option>
+              <option value="paired">Paired-end</option>
+            </param>
+            <when value="single" />
+            <when value="paired">
+                <param name="no_overlap" type="boolean" truevalue="--no-overlap" falsevalue="" checked="False" label="This option avoids scoring overlapping methylation calls twice, in case of overlapping read one and read two" help="" />
+            </when>
+        </conditional>
+
+       <param name="ignore_bps" type="integer" value="0" label="Ignore the first N bp when processing the methylation call string" />
+       <param name="comprehensive" type="boolean" truevalue="true" falsevalue="false" checked="False" label="Merge all four possible strand-specific methylation info
+into context-dependent output files" help="" />
+       <param name="merge_non_cpg" type="boolean" truevalue="true" falsevalue="false" checked="False" label="Merge all non-CpG contexts into one file" help="This will produce eight strand-specific output files, or two output files in comprehensive mode." />
+       <param name="report" type="boolean" truevalue="true" falsevalue="false" checked="False" label="Short methylation summary output" />
+       <param name="compress" type="boolean" truevalue="true" falsevalue="false" checked="False" label="Compress all result files and output one single file" />
+
+    </inputs>
+    <outputs>
+        <!--
+            OT – original top strand
+            CTOT – complementary to original top strand
+            OB – original bottom strand
+            CTOB – complementary to original bottom strand
+        -->
+        <data format="tabular" name="o_report" label="${tool.name} on ${on_string}: Report file">
+          <filter> ( report is True ) </filter>
+        </data>
+
+        <!-- default output 12 files -->
+        <data format="tabular" name="cpg_ot" label="${tool.name} on ${on_string}: CpG original top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chg_ot" label="${tool.name} on ${on_string}: CHG original top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chh_ot" label="${tool.name} on ${on_string}: CHH original top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="cpg_ctot" label="${tool.name} on ${on_string}: CpG complementary to top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chg_ctot" label="${tool.name} on ${on_string}: CHG complementary to top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chh_ctot" label="${tool.name} on ${on_string}: CHH complementary to top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+
+        <data format="tabular" name="cpg_ob" label="${tool.name} on ${on_string}: CpG original bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chg_ob" label="${tool.name} on ${on_string}: CHG original bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chh_ob" label="${tool.name} on ${on_string}: CHH original bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="cpg_ctob" label="${tool.name} on ${on_string}: CpG complementary to bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chg_ctob" label="${tool.name} on ${on_string}: CHG complementary to bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chh_ctob" label="${tool.name} on ${on_string}: CHH complementary to bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_CpG == False) </filter>
+        </data>
+
+        <!-- Context-dependent methylation output files (comprehensive option) -->
+        <data format="tabular" name="cpg_context" label="${tool.name} on ${on_string}: CpG context dependent">
+          <filter> ( compress == False and comprehensive) </filter>
+        </data>
+        <data format="tabular" name="chg_context" label="${tool.name} on ${on_string}: CHG context dependent">
+          <filter> ( compress == False and comprehensive and merge_non_CpG == False) </filter>
+        </data>
+        <data format="tabular" name="chh_context" label="${tool.name} on ${on_string}: CHH context dependent">
+          <filter> ( compress == False and comprehensive and merge_non_CpG == False) </filter>
+        </data>
+
+        <data format="tabular" name="non_cpg_context" label="${tool.name} on ${on_string}: Non CpG context dependent">
+          <filter> ( compress == False and comprehensive and merge_non_cpg) </filter>
+        </data>
+
+        <data format="tabular" name="non_cpg_context_ot" label="${tool.name} on ${on_string}: Non CpG context dependent on original top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_cpg) </filter>
+        </data>
+        <data format="tabular" name="non_cpg_context_ctot" label="${tool.name} on ${on_string}: Non CpG context dependent on complementary to top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_cpg) </filter>
+        </data>
+        <data format="tabular" name="non_cpg_context_ob" label="${tool.name} on ${on_string}: Non CpG context dependent on bottom top strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_cpg) </filter>
+        </data>
+        <data format="tabular" name="non_cpg_context_ctob" label="${tool.name} on ${on_string}: Non CpG context dependent on complementary to bottom strand">
+          <filter> ( compress == False and comprehensive == False and merge_non_cpg) </filter>
+        </data>
+
+        <data format="gzipped" name="compressed_output" label="${tool.name} on ${on_string}: Result archive.">
+          <filter> ( compress ) </filter>
+        </data>
+    </outputs>
+
+    <tests>
+    </tests>
+
+    <help>
+
+**What it does**
+
+The following is a brief description of all options to control the Bismark_
+methylation extractor. The script reads in a bisulfite read alignment results file 
+produced by the Bismark bisulfite mapper and extracts the methylation information
+for individual cytosines. This information is found in the methylation call field
+which can contain the following characters:
+
+
+  - X = for methylated C in CHG context (was protected)
+  - x = for not methylated C CHG (was converted)
+  - H = for methylated C in CHH context (was protected)
+  - h = for not methylated C in CHH context (was converted)
+  - Z = for methylated C in CpG context (was protected)
+  - z = for not methylated C in CpG context (was converted)
+  - . = for any bases not involving cytosines
+
+
+The methylation extractor outputs result files for cytosines in CpG, CHG and CHH
+context (this distinction is actually already made in Bismark itself). As the methylation
+information for every C analysed can produce files which easily have tens or even hundreds of
+millions of lines, file sizes can become very large and more difficult to handle. The C
+methylation info additionally splits cytosine methylation calls up into one of the four possible
+strands a given bisulfite read aligned against:
+
+  - OT = original top strand
+  - CTOT = complementary to original top strand
+
+  - OB = original bottom strand
+  - CTOB = complementary to original bottom strand
+
+Thus, by default twelve individual output files are being generated per input file (unless
+--comprehensive is specified, see below). The output files can be imported into a genome
+viewer, such as SeqMonk, and re-combined into a single data group if desired (in fact
+unless the bisulfite reads were generated preserving directionality it doesn't make any
+sense to look at the data in a strand-specific manner). Strand-specific output files can
+optionally be skipped, in which case only three output files for CpG, CHG or CHH context
+will be generated. For both the strand-specific and comprehensive outputs there is also
+the option to merge both non-CpG contexts (CHG and CHH) into one single non-CpG context.
+
+
+.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/
+
+
+It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2.
+
+-------
+
+**Bismark settings**
+
+All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin.
+
+------
+
+**Outputs**
+
+The output files are in the following format (tab delimited)::
+
+
+    Column  Description
+  --------  --------------------------------------------------------
+      1     seq-ID
+      2     strand
+      3     chromosome
+      4     position
+      5     methylation call
+
+
+  * Methylated cytosines receive a '+' orientation,
+  * Unmethylated cytosines receive a '-' orientation.
+
+------
+
+**OPTIONS**
+
+Input::
+
+  -s/--single-end          Input file(s) are Bismark result file(s) generated from single-end
+                           read data. Specifying either --single-end or --paired-end is
+                           mandatory.
+
+  -p/--paired-end          Input file(s) are Bismark result file(s) generated from paired-end
+                           read data. Specifying either --paired-end or --single-end is
+                           mandatory.
+
+  --no_overlap             For paired-end reads it is theoretically possible that read_1 and
+                           read_2 overlap. This option avoids scoring overlapping methylation
+                           calls twice. Whilst this removes a bias towards more methylation calls
+                           towards the center of sequenced fragments it can de facto remove
+                           a good proportion of the data.
+
+  --ignore INT             Ignore the first INT bp at the 5' end of each read when processing the
+                           methylation call string. This can remove e.g. a restriction enzyme site
+                           at the start of each read.
+
+Output::
+
+  --comprehensive          Specifying this option will merge all four possible strand-specific 
+                           methylation info into context-dependent output files. The default 
+                           contexts are:
+                            - CpG context
+                            - CHG context
+                            - CHH context
+
+  --merge_non_CpG          This will produce two output files (in --comprehensive mode) or eight
+                           strand-specific output files (default) for Cs in
+                            - CpG context
+                            - non-CpG context
+
+  --report                 Prints out a short methylation summary as well as the paramaters used to run
+                           this script.
+
+
+  </help>
+</tool>
diff --git a/test/shed_functional/test_data/blast/blast_datatypes.tar b/test/shed_functional/test_data/blast/blast_datatypes.tar
new file mode 100644
index 0000000..4df64db
Binary files /dev/null and b/test/shed_functional/test_data/blast/blast_datatypes.tar differ
diff --git a/test/shed_functional/test_data/blast/blastxml_to_top_descr.tar b/test/shed_functional/test_data/blast/blastxml_to_top_descr.tar
new file mode 100644
index 0000000..1bd799b
Binary files /dev/null and b/test/shed_functional/test_data/blast/blastxml_to_top_descr.tar differ
diff --git a/test/shed_functional/test_data/bwa/bwa_base.tar b/test/shed_functional/test_data/bwa/bwa_base.tar
new file mode 100644
index 0000000..dc166f8
Binary files /dev/null and b/test/shed_functional/test_data/bwa/bwa_base.tar differ
diff --git a/test/shed_functional/test_data/bwa/bwa_color.tar b/test/shed_functional/test_data/bwa/bwa_color.tar
new file mode 100644
index 0000000..59fc3fb
Binary files /dev/null and b/test/shed_functional/test_data/bwa/bwa_color.tar differ
diff --git a/test/shed_functional/test_data/bwa/complex/bwa_base.tar b/test/shed_functional/test_data/bwa/complex/bwa_base.tar
new file mode 100644
index 0000000..db7db17
Binary files /dev/null and b/test/shed_functional/test_data/bwa/complex/bwa_base.tar differ
diff --git a/test/shed_functional/test_data/bwa/complex/readme/tool_dependencies.xml b/test/shed_functional/test_data/bwa/complex/readme/tool_dependencies.xml
new file mode 100644
index 0000000..e34b1ef
--- /dev/null
+++ b/test/shed_functional/test_data/bwa/complex/readme/tool_dependencies.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="bwa" version="0.5.9">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">file://__PATH__/bwa_base.tar</action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+Compiling BWA requires zlib and libpthread to be present on your system.
+        </readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/bwa/complex/tool_dependencies.xml b/test/shed_functional/test_data/bwa/complex/tool_dependencies.xml
new file mode 100644
index 0000000..462a873
--- /dev/null
+++ b/test/shed_functional/test_data/bwa/complex/tool_dependencies.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="bwa" version="0.5.9">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">file://__PATH__/bwa_base.tar</action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/column_maker/column_maker.tar b/test/shed_functional/test_data/column_maker/column_maker.tar
new file mode 100644
index 0000000..27e971b
Binary files /dev/null and b/test/shed_functional/test_data/column_maker/column_maker.tar differ
diff --git a/test/shed_functional/test_data/convert_chars/convert_chars.tar b/test/shed_functional/test_data/convert_chars/convert_chars.tar
new file mode 100644
index 0000000..23553b5
Binary files /dev/null and b/test/shed_functional/test_data/convert_chars/convert_chars.tar differ
diff --git a/test/shed_functional/test_data/emboss/0470_files/emboss_complex_dependency.tar b/test/shed_functional/test_data/emboss/0470_files/emboss_complex_dependency.tar
new file mode 100644
index 0000000..27189a1
Binary files /dev/null and b/test/shed_functional/test_data/emboss/0470_files/emboss_complex_dependency.tar differ
diff --git a/test/shed_functional/test_data/emboss/0470_files/tool_dependencies.xml b/test/shed_functional/test_data/emboss/0470_files/tool_dependencies.xml
new file mode 100644
index 0000000..c4b25e8
--- /dev/null
+++ b/test/shed_functional/test_data/emboss/0470_files/tool_dependencies.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="emboss" version="5.0.0">
+        <!-- These tools require the EMBOSS 5.0.0 binaries. -->
+        <repository name="package_emboss_5_0_0_0470" owner="user1" />
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/emboss/datatypes/datatypes_conf.xml b/test/shed_functional/test_data/emboss/datatypes/datatypes_conf.xml
new file mode 100644
index 0000000..5e9b8bc
--- /dev/null
+++ b/test/shed_functional/test_data/emboss/datatypes/datatypes_conf.xml
@@ -0,0 +1,101 @@
+<?xml version="1.0"?>
+<datatypes>
+    <registration>
+        <datatype extension="acedb" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="asn1" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="btwisted" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="cai" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="charge" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="checktrans" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="chips" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="clustal" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="codata" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="codcmp" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="coderet" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="compseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="cpgplot" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="cpgreport" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="cusp" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="cut" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="dan" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="dbmotif" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="diffseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="digest" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="dreg" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="einverted" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="embl" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="epestfind" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="equicktandem" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="est2genome" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="etandem" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="excel" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="feattable" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="fitch" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="freak" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="fuzznuc" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="fuzzpro" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="fuzztran" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="garnier" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="gcg" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="geecee" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="genbank" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="helixturnhelix" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="hennig86" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="hmoment" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="ig" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="isochore" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="jackknifer" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="jackknifernon" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="markx0" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="markx1" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="markx10" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="markx2" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="markx3" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="match" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="mega" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="meganon" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="motif" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="msf" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="nametable" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="ncbi" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="needle" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="newcpgreport" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="newcpgseek" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="nexus" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="nexusnon" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="noreturn" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="pair" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="palindrome" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="pepcoil" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="pepinfo" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="pepstats" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="phylip" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="phylipnon" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="pir" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="polydot" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="preg" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="prettyseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="primersearch" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="regions" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="score" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="selex" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="seqtable" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="showfeat" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="showorf" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="simple" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="sixpack" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="srs" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="srspair" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="staden" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="strider" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="supermatcher" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="swiss" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="syco" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="table" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="tagseq" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="textsearch" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="vectorstrip" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="wobble" type="galaxy.datatypes.data:Text" subclass="True"/>
+        <datatype extension="wordcount" type="galaxy.datatypes.data:Text" subclass="True"/>
+    </registration>
+</datatypes>
diff --git a/test/shed_functional/test_data/emboss/emboss.tar b/test/shed_functional/test_data/emboss/emboss.tar
new file mode 100644
index 0000000..309fd3b
Binary files /dev/null and b/test/shed_functional/test_data/emboss/emboss.tar differ
diff --git a/test/shed_functional/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml b/test/shed_functional/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml
new file mode 100644
index 0000000..ca9418f
--- /dev/null
+++ b/test/shed_functional/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="libx11" version="1.5.0">
+        <repository name="package_x11_client_1_5_proto_7_0_0470" owner="user1" prior_installation_required="True" />
+    </package>
+    <package name="emboss" version="5.0.0">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/EMBOSS-5.0.0.tar.gz</action>
+                <action type="set_environment_for_install">
+                    <repository name="package_x11_client_1_5_proto_7_0_0470" owner="user1" prior_installation_required="True">
+                        <package name="libx11" version="1.5.0" />
+                    </repository>
+                </action>
+                <action type="template_command">
+                    #if env.get('X11_LIB_DIR', False) and env.get('X11_INCLUDE_DIR', False):
+                        ./configure --prefix=$env.INSTALL_DIR --x-includes=$env.X11_INCLUDE_DIR --x-libraries=$env.X11_LIB_DIR
+                    #else:
+                        ./configure --prefix=$env.INSTALL_DIR
+                    #end if
+                </action>
+                <action type="shell_command">make && make install</action>
+                <action extract="true" type="download_file">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/PHYLIP-3.6b.tar.gz</action>
+                <action type="change_directory">PHYLIP-3.6b</action>
+                <action type="template_command">
+                    #if env.get('X11_LIB_DIR', False) and env.get('X11_INCLUDE_DIR', False):
+                        ./configure --prefix=$env.INSTALL_DIR --x-includes=$env.X11_INCLUDE_DIR --x-libraries=$env.X11_LIB_DIR CFLAGS='-I$env.INSTALL_DIR/include'
+                    #else:
+                        ./configure --prefix=$env.INSTALL_DIR
+                    #end if
+                </action>
+                <action type="shell_command">make && make install</action>
+                <action type="set_environment">
+                    <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+These links provide information for building the Emboss package in most environments.
+
+System requirements
+http://emboss.sourceforge.net/download/#Requirements
+
+Platform-dependent notes
+http://emboss.sourceforge.net/download/#Platforms
+        </readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml b/test/shed_functional/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml
new file mode 100644
index 0000000..c6dde93
--- /dev/null
+++ b/test/shed_functional/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="libx11" version="1.5.0">
+        <repository name="package_x11_client_1_5_proto_7_0_0470" owner="user1" prior_installation_required="True" />
+    </package>
+    <package name="emboss" version="5.0.0">
+        <install version="1.0">
+            <actions>
+                <!-- This is also a comment -->
+                <action type="download_by_url">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/EMBOSS-5.0.0.tar.gz</action>
+                <action type="set_environment_for_install">
+                    <repository name="package_x11_client_1_5_proto_7_0_0470" owner="user1" prior_installation_required="True">
+                        <package name="libx11" version="1.5.0" />
+                    </repository>
+                </action>
+                <action type="template_command">
+                    #if env.get('X11_LIB_DIR', False) and env.get('X11_INCLUDE_DIR', False):
+                        ./configure --prefix=$env.INSTALL_DIR --x-includes=$env.X11_INCLUDE_DIR --x-libraries=$env.X11_LIB_DIR
+                    #else:
+                        ./configure --prefix=$env.INSTALL_DIR
+                    #end if
+                </action>
+                <action type="shell_command">make && make install</action>
+                <action extract="true" type="download_file">ftp://emboss.open-bio.org/pub/EMBOSS/old/5.0.0/PHYLIP-3.6b.tar.gz</action>
+                <action type="change_directory">PHYLIP-3.6b</action>
+                <action type="template_command">
+                    #if env.get('X11_LIB_DIR', False) and env.get('X11_INCLUDE_DIR', False):
+                        ./configure --prefix=$env.INSTALL_DIR --x-includes=$env.X11_INCLUDE_DIR --x-libraries=$env.X11_LIB_DIR CFLAGS='-I$env.INSTALL_DIR/include'
+                    #else:
+                        ./configure --prefix=$env.INSTALL_DIR
+                    #end if
+                </action>
+                <action type="shell_command">make && make install</action>
+                <action type="set_environment">
+                    <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+These links provide information for building the Emboss package in most environments.
+
+System requirements
+http://emboss.sourceforge.net/download/#Requirements
+
+Platform-dependent notes
+http://emboss.sourceforge.net/download/#Platforms
+        </readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml b/test/shed_functional/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml
new file mode 100644
index 0000000..581bc6f
--- /dev/null
+++ b/test/shed_functional/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml
@@ -0,0 +1,22 @@
+<tool_dependency>
+    <package name="libx11" version="1.5.0">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">ftp://ftp.x.org/pub/X11R7.7/src/proto/xproto-7.0.23.tar.bz2</action>
+                <action type="shell_command">./configure --prefix=$INSTALL_DIR</action>
+                <action type="shell_command">make && make install</action>
+                <action type="set_environment">
+                    <environment_variable name="X11_INCLUDE_DIR" action="set_to">$INSTALL_DIR/include</environment_variable>
+                </action>
+                <action type="download_file" extract="true">ftp://ftp.x.org/pub/X11R7.7/src/lib/libX11-1.5.0.tar.bz2</action>
+                <action type="change_directory">libX11-1.5.0</action>
+                <action type="shell_command">./configure --prefix=$INSTALL_DIR CFLAGS='-I$INSTALL_DIR/include'</action>
+                <action type="shell_command">make && make install</action>
+                <action type="set_environment">
+                    <environment_variable name="X11_LIB_DIR" action="set_to">$INSTALL_DIR/lib</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>Xlib is an X Window System protocol client library written in the C programming language. It contains functions for interacting with an X server. These functions allow programmers to write programs without knowing the details of the protocol. Few applications use Xlib directly; rather, they employ other libraries that use Xlib functions to provide widget toolkits.</readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml b/test/shed_functional/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml
new file mode 100644
index 0000000..ee577e4
--- /dev/null
+++ b/test/shed_functional/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml
@@ -0,0 +1,23 @@
+<tool_dependency>
+    <package name="libx11" version="1.5.0">
+        <install version="1.0">
+            <actions>
+                <!-- This is a comment -->
+                <action type="download_by_url">ftp://ftp.x.org/pub/X11R7.7/src/proto/xproto-7.0.23.tar.bz2</action>
+                <action type="shell_command">./configure --prefix=$INSTALL_DIR</action>
+                <action type="shell_command">make && make install</action>
+                <action type="set_environment">
+                    <environment_variable name="X11_INCLUDE_DIR" action="set_to">$INSTALL_DIR/include</environment_variable>
+                </action>
+                <action type="download_file" extract="true">ftp://ftp.x.org/pub/X11R7.7/src/lib/libX11-1.5.0.tar.bz2</action>
+                <action type="change_directory">libX11-1.5.0</action>
+                <action type="shell_command">./configure --prefix=$INSTALL_DIR CFLAGS='-I$INSTALL_DIR/include'</action>
+                <action type="shell_command">make && make install</action>
+                <action type="set_environment">
+                    <environment_variable name="X11_LIB_DIR" action="set_to">$INSTALL_DIR/lib</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>Xlib is an X Window System protocol client library written in the C programming language. It contains functions for interacting with an X server. These functions allow programmers to write programs without knowing the details of the protocol. Few applications use Xlib directly; rather, they employ other libraries that use Xlib functions to provide widget toolkits.</readme>
+    </package>
+</tool_dependency>
\ No newline at end of file
diff --git a/test/shed_functional/test_data/filtering/README b/test/shed_functional/test_data/filtering/README
new file mode 100644
index 0000000..c030be8
--- /dev/null
+++ b/test/shed_functional/test_data/filtering/README
@@ -0,0 +1 @@
+This is a third readme file.
\ No newline at end of file
diff --git a/test/shed_functional/test_data/filtering/filtering_0000.txt b/test/shed_functional/test_data/filtering/filtering_0000.txt
new file mode 100644
index 0000000..0970a25
--- /dev/null
+++ b/test/shed_functional/test_data/filtering/filtering_0000.txt
@@ -0,0 +1 @@
+Readme file for filtering 1.1.0
diff --git a/test/shed_functional/test_data/filtering/filtering_1.1.0.tar b/test/shed_functional/test_data/filtering/filtering_1.1.0.tar
new file mode 100644
index 0000000..94be976
Binary files /dev/null and b/test/shed_functional/test_data/filtering/filtering_1.1.0.tar differ
diff --git a/test/shed_functional/test_data/filtering/filtering_2.2.0.tar b/test/shed_functional/test_data/filtering/filtering_2.2.0.tar
new file mode 100644
index 0000000..9b9416c
Binary files /dev/null and b/test/shed_functional/test_data/filtering/filtering_2.2.0.tar differ
diff --git a/test/shed_functional/test_data/filtering/filtering_test_data.tar b/test/shed_functional/test_data/filtering/filtering_test_data.tar
new file mode 100644
index 0000000..5d64289
Binary files /dev/null and b/test/shed_functional/test_data/filtering/filtering_test_data.tar differ
diff --git a/test/shed_functional/test_data/filtering/readme.txt b/test/shed_functional/test_data/filtering/readme.txt
new file mode 100644
index 0000000..9ad35df
--- /dev/null
+++ b/test/shed_functional/test_data/filtering/readme.txt
@@ -0,0 +1,2 @@
+These characters should not result in a unicode decoding error: ������
+....
\ No newline at end of file
diff --git a/test/shed_functional/test_data/filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga b/test/shed_functional/test_data/filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga
new file mode 100644
index 0000000..311d1c2
--- /dev/null
+++ b/test/shed_functional/test_data/filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga
@@ -0,0 +1,60 @@
+{
+    "a_galaxy_workflow": "true", 
+    "annotation": "", 
+    "format-version": "0.1", 
+    "name": "Workflow for 0060_filter_workflow_repository", 
+    "steps": {
+        "0": {
+            "annotation": "", 
+            "id": 0, 
+            "input_connections": {}, 
+            "inputs": [
+                {
+                    "description": "", 
+                    "name": "Input Dataset"
+                }
+            ], 
+            "name": "Input dataset", 
+            "outputs": [], 
+            "position": {
+                "left": 10, 
+                "top": 10
+            }, 
+            "tool_errors": null, 
+            "tool_id": null, 
+            "tool_state": "{\"name\": \"Input Dataset\"}", 
+            "tool_version": null, 
+            "type": "data_input", 
+            "user_outputs": []
+        }, 
+        "1": {
+            "annotation": "", 
+            "id": 1, 
+            "input_connections": {
+                "input": {
+                    "id": 0, 
+                    "output_name": "output"
+                }
+            }, 
+            "inputs": [], 
+            "name": "Filter", 
+            "outputs": [
+                {
+                    "name": "out_file1", 
+                    "type": "input"
+                }
+            ], 
+            "position": {
+                "left": 230, 
+                "top": 10
+            }, 
+            "post_job_actions": {}, 
+            "tool_errors": null, 
+            "tool_id": "__TEST_TOOL_SHED_URL__/repos/user1/filtering_0060/Filter1/2.2.0", 
+            "tool_state": "{\"__page__\": 0, \"cond\": \"\\\"c1=='chr22'\\\"\", \"chromInfo\": \"\\\"/Users/dave/Documents/workspace/dev-galaxy/tool-data/shared/ucsc/chrom/?.len\\\"\", \"input\": \"null\"}", 
+            "tool_version": null, 
+            "type": "tool", 
+            "user_outputs": []
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/shed_functional/test_data/freebayes/freebayes.tar b/test/shed_functional/test_data/freebayes/freebayes.tar
new file mode 100644
index 0000000..520ef60
Binary files /dev/null and b/test/shed_functional/test_data/freebayes/freebayes.tar differ
diff --git a/test/shed_functional/test_data/freebayes/freebayes.xml b/test/shed_functional/test_data/freebayes/freebayes.xml
new file mode 100644
index 0000000..4d33d92
--- /dev/null
+++ b/test/shed_functional/test_data/freebayes/freebayes.xml
@@ -0,0 +1,669 @@
+<?xml version="1.0"?>
+<tool id="freebayes" name="FreeBayes" version="0.0.2">
+  <requirements>
+    <requirement type="package" version="0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8">freebayes</requirement>
+    <requirement type="package" version="0.1.18">samtools</requirement>
+  </requirements>
+  <description> - Bayesian genetic variant detector</description>
+  <command>
+    ##set up input files
+    #set $reference_fasta_filename = "localref.fa"
+    #if str( $reference_source.reference_source_selector ) == "history":
+        ln -s "${reference_source.ref_file}" "${reference_fasta_filename}" &&
+        samtools faidx "${reference_fasta_filename}" 2>&1 || echo "Error running samtools faidx for FreeBayes" >&2 &&
+    #else:
+        #set $reference_fasta_filename = str( $reference_source.ref_file.fields.path )
+    #end if
+    #for $bam_count, $input_bam in enumerate( $reference_source.input_bams ):
+        ln -s "${input_bam.input_bam}" "localbam_${bam_count}.bam" &&
+        ln -s "${input_bam.input_bam.metadata.bam_index}" "localbam_${bam_count}.bam.bai" &&
+    #end for
+    ##finished setting up inputs
+    
+    ##start FreeBayes commandline
+    freebayes
+    #for $bam_count, $input_bam in enumerate( $reference_source.input_bams ):
+        --bam "localbam_${bam_count}.bam"
+    #end for
+    --fasta-reference "${reference_fasta_filename}" 
+    
+    ##outputs
+    --vcf "${output_vcf}"
+    
+    ##advanced options
+    #if str( $options_type.options_type_selector ) == "advanced":
+        ##additional outputs
+        #if $options_type.output_trace_option:
+            --trace "${output_trace}"
+        #end if
+        #if $options_type.output_failed_alleles_option:
+            --failed-alleles "${output_failed_alleles_bed}"
+        #end if
+        
+        ##additional inputs
+        #if str( $options_type.target_limit_type.target_limit_type_selector ) == "limit_by_target_file":
+            --targets "${options_type.target_limit_type.input_target_bed}"
+        #elif str( $options_type.target_limit_type.target_limit_type_selector ) == "limit_by_region":
+            --region "${options_type.target_limit_type.region_chromosome}:${options_type.target_limit_type.region_start}..${options_type.target_limit_type.region_end}"
+        #end if
+        #if $options_type.input_sample_file:
+            --samples "${options_type.input_sample_file}"
+        #end if
+        #if $options_type.input_populations_file:
+            --populations "${options_type.input_populations_file}"
+        #end if
+        #if $options_type.input_cnv_map_bed:
+            --cnv-map "${options_type.input_cnv_map_bed}"
+        #end if
+        #if str( $options_type.input_variant_type.input_variant_type_selector ) == "provide_vcf":
+            --variant-input "${options_type.input_variant_type.input_variant_vcf}"
+            ${options_type.input_variant_type.only_use_input_alleles}
+        #end if
+        
+        ##reporting
+        #if str( $options_type.section_reporting_type.section_reporting_type_selector ) == "set":
+            --pvar "${options_type.section_reporting_type.pvar}"
+            ${options_type.section_reporting_type.show_reference_repeats}
+        #end if
+        
+        ##population model
+        #if str( $options_type.section_population_model_type.section_population_model_type_selector ) == "set":
+            --theta "${options_type.section_population_model_type.theta}"
+            --ploidy "${options_type.section_population_model_type.ploidy}"
+            ${options_type.section_population_model_type.pooled}
+        #end if
+        
+        ##reference allele
+        #if str( $options_type.use_reference_allele_type.use_reference_allele_type_selector ) == "include_reference_allele":
+            --use-reference-allele
+            ${options_type.use_reference_allele_type.diploid_reference}
+            --reference-quality "${options_type.use_reference_allele_type.reference_quality_mq},${options_type.use_reference_allele_type.reference_quality_bq}"
+        #end if
+        
+        ##allele scope
+        #if str( $options_type.section_allele_scope_type.section_allele_scope_type_selector ) == "set":
+            ${options_type.section_allele_scope_type.no_snps}
+            ${options_type.section_allele_scope_type.no_indels}
+            ${options_type.section_allele_scope_type.no_mnps}
+            ${options_type.section_allele_scope_type.no_complex}
+            --use-best-n-alleles "${options_type.section_allele_scope_type.use_best_n_alleles}"
+            #if $options_type.section_allele_scope_type.max_complex_gap:
+                --max-complex-gap "${options_type.section_allele_scope_type.max_complex_gap}"
+            #end if
+        #end if
+        
+        ##indel realignment
+        ${options_type.left_align_indels}
+        
+        ##input filters
+        #if str( $options_type.section_input_filters_type.section_input_filters_type_selector ) == "set":
+            ${options_type.section_input_filters_type.use_duplicate_reads}
+            #if str( $options_type.section_input_filters_type.no_filter_type.no_filter_type_selector ) == "apply_filters":
+                --min-mapping-quality "${options_type.section_input_filters_type.no_filter_type.min_mapping_quality}"
+                --min-base-quality "${options_type.section_input_filters_type.no_filter_type.min_base_quality}"
+                --min-supporting-quality "${options_type.section_input_filters_type.no_filter_type.min_supporting_quality_mq},${options_type.section_input_filters_type.no_filter_type.min_supporting_quality_bq}"
+            #else:
+                --no-filters
+            #end if
+            --mismatch-base-quality-threshold "${options_type.section_input_filters_type.mismatch_base_quality_threshold}"
+            #if $options_type.section_input_filters_type.read_mismatch_limit:
+                --read-mismatch-limit "${options_type.section_input_filters_type.read_mismatch_limit}"
+            #end if
+            --read-max-mismatch-fraction "${options_type.section_input_filters_type.read_max_mismatch_fraction}"
+            #if $options_type.section_input_filters_type.read_snp_limit:
+                --read-snp-limit "${options_type.section_input_filters_type.read_snp_limit}"
+            #end if
+            #if $options_type.section_input_filters_type.read_indel_limit:
+                --read-indel-limit "${options_type.section_input_filters_type.read_indel_limit}"
+            #end if
+            --indel-exclusion-window "${options_type.section_input_filters_type.indel_exclusion_window}"
+            --min-alternate-fraction "${options_type.section_input_filters_type.min_alternate_fraction}"
+            --min-alternate-count "${options_type.section_input_filters_type.min_alternate_count}"
+            --min-alternate-qsum "${options_type.section_input_filters_type.min_alternate_qsum}"
+            --min-alternate-total "${options_type.section_input_filters_type.min_alternate_total}"
+            --min-coverage "${options_type.section_input_filters_type.min_coverage}"
+        #end if
+        
+        ##bayesian priors
+        #if str( $options_type.section_bayesian_priors_type.section_bayesian_priors_type_selector ) == "set":
+            ${options_type.section_bayesian_priors_type.no_ewens_priors}
+            ${options_type.section_bayesian_priors_type.no_population_priors}
+            ${options_type.section_bayesian_priors_type.hwe_priors}
+        #end if
+        
+        ##observation prior expectations
+        #if str( $options_type.section_observation_prior_expectations_type.section_observation_prior_expectations_type_selector ) == "set":
+            ${options_type.section_observation_prior_expectations_type.binomial_obs_priors}
+            ${options_type.section_observation_prior_expectations_type.allele_balance_priors}
+        #end if
+        
+        ##algorithmic features
+        #if str( $options_type.section_algorithmic_features_type.section_algorithmic_features_type_selector ) == "set":
+            --site-selection-max-iterations "${options_type.section_algorithmic_features_type.site_selection_max_iterations}"
+            --genotyping-max-iterations "${options_type.section_algorithmic_features_type.genotyping_max_iterations}"
+            --genotyping-max-banddepth "${options_type.section_algorithmic_features_type.genotyping_max_banddepth}"
+            --posterior-integration-limits "${options_type.section_algorithmic_features_type.posterior_integration_limits_n},${options_type.section_algorithmic_features_type.posterior_integration_limits_m}"
+            ${options_type.section_algorithmic_features_type.no_permute}
+            ${options_type.section_algorithmic_features_type.exclude_unobserved_genotypes}
+            #if $options_type.section_algorithmic_features_type.genotype_variant_threshold:
+                --genotype-variant-threshold "${options_type.section_algorithmic_features_type.genotype_variant_threshold}"
+            #end if
+            ${options_type.section_algorithmic_features_type.use_mapping_quality}
+            --read-dependence-factor "${options_type.section_algorithmic_features_type.read_dependence_factor}"
+            ${options_type.section_algorithmic_features_type.no_marginals}
+        #end if
+        
+    #end if
+  </command>
+  <inputs>
+    <conditional name="reference_source">
+      <param name="reference_source_selector" type="select" label="Choose the source for the reference list">
+        <option value="cached">Locally cached</option>
+        <option value="history">History</option>
+      </param>
+      <when value="cached">
+        <repeat name="input_bams" title="Sample BAM file" min="1">
+            <param name="input_bam" type="data" format="bam" label="BAM file">
+              <validator type="unspecified_build" />
+            </param>
+        </repeat>
+        <param name="ref_file" type="select" label="Using reference genome">
+          <options from_data_table="sam_fa_indexes">
+            <!-- <filter type="sam_fa_indexes" key="dbkey" ref="input_bam" column="value"/> does not yet work in a repeat...--> 
+          </options>
+          <validator type="no_options" message="A built-in reference genome is not available for the build associated with the selected input file"/>
+        </param>
+      </when>
+      <when value="history"> <!-- FIX ME!!!! -->
+        <repeat name="input_bams" title="Sample BAM file" min="1">
+            <param name="input_bam" type="data" format="bam" label="BAM file" />
+        </repeat>
+        <param name="ref_file" type="data" format="fasta" label="Using reference file" />
+      </when>
+    </conditional>
+    
+    <conditional name="options_type">
+      <param name="options_type_selector" type="select" label="Basic or Advanced options">
+        <option value="basic" selected="True">Basic</option>
+        <option value="advanced">Advanced</option>
+      </param>
+      <when value="basic">
+        <!-- Do nothing here -->
+      </when>
+      <when value="advanced">
+        
+        <!-- output -->
+        <param name="output_failed_alleles_option" type="boolean" truevalue="--failed-alleles" falsevalue="" checked="False" label="Write out failed alleles file" />
+        <param name="output_trace_option" type="boolean" truevalue="--trace" falsevalue="" checked="False" label="Write out algorithm trace file" />
+        
+        
+        <!-- input -->
+        <conditional name="target_limit_type">
+          <param name="target_limit_type_selector" type="select" label="Limit analysis to listed targets">
+            <option value="do_not_limit" selected="True">Do not limit</option>
+            <option value="limit_by_target_file">Limit by target file</option>
+            <option value="limit_by_region">Limit to region</option>
+          </param>
+          <when value="do_not_limit">
+            <!-- Do nothing here -->
+          </when>
+          <when value="limit_by_target_file">
+            <param name="input_target_bed" type="data" format="bed" label="Limit analysis to targets listed in the BED-format FILE." />
+          </when>
+          <when value="limit_by_region">
+            <param name="region_chromosome" type="text" label="Region Chromosome" value="" /> <!--only once? -->
+            <param name="region_start" type="integer" label="Region Start" value="" />
+            <param name="region_end" type="integer" label="Region End" value="" />
+          </when>
+        </conditional>
+        <param name="input_sample_file" type="data" format="txt" label="Limit analysis to samples listed (one per line) in the FILE" optional="True" />
+        <param name="input_populations_file" type="data" format="txt" label="Populations File" optional="True" />
+        <param name="input_cnv_map_bed" type="data" format="bed" label="Read a copy number map from the BED file FILE" optional="True" />
+        <conditional name="input_variant_type">
+          <param name="input_variant_type_selector" type="select" label="Provide variants file">
+            <option value="do_not_provide" selected="True">Do not provide</option>
+            <option value="provide_vcf">Provide VCF file</option>
+          </param>
+          <when value="do_not_provide">
+            <!-- Do nothing here -->
+          </when>
+          <when value="provide_vcf">
+            <param name="input_variant_vcf" type="data" format="vcf" label="Use variants reported in VCF file as input to the algorithm" />
+            <param name="only_use_input_alleles" type="boolean" truevalue="--only-use-input-alleles" falsevalue="" checked="False" label="Only provide variant calls and genotype likelihoods for sites in VCF" />
+          </when>
+        </conditional>
+        
+        
+        <!-- reporting -->
+        <conditional name="section_reporting_type">
+          <param name="section_reporting_type_selector" type="select" label="Set Reporting options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="pvar" type="float" label="Report sites if the probability that there is a polymorphism at the site is greater" value="0.0001" />
+            <param name="show_reference_repeats" type="boolean" truevalue="--show-reference-repeats" falsevalue="" checked="False" label="Calculate and show information about reference repeats" />
+          </when>
+        </conditional>
+        
+        
+        <!-- population model -->
+        <conditional name="section_population_model_type">
+          <param name="section_population_model_type_selector" type="select" label="Set population model options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="theta" type="float" label="expected mutation rate or pairwise nucleotide diversity among the population" value="0.001" help="This serves as the single parameter to the Ewens Sampling Formula prior model"/>
+            <param name="ploidy" type="integer" label="default ploidy for the analysis" value="2" />
+            <param name="pooled" type="boolean" truevalue="--pooled" falsevalue="" checked="False" label="Assume that samples result from pooled sequencing" help="When using this flag, set --ploidy to the number of alleles in each sample." />
+          </when>
+        </conditional>
+        
+        <!-- reference allele -->
+            <conditional name="use_reference_allele_type">
+              <param name="use_reference_allele_type_selector" type="select" label="Include the reference allele in the analysis">
+                <option value="do_not_include_reference_allele" selected="True">Do not include</option>
+                <option value="include_reference_allele">Include</option>
+              </param>
+              <when value="do_not_include_reference_allele">
+                <!-- Do nothing here -->
+              </when>
+              <when value="include_reference_allele">
+                <param name="diploid_reference" type="boolean" truevalue="--diploid-reference" falsevalue="" checked="False" label="Treat reference as diploid" />
+                <param name="reference_quality_mq" type="integer" label="Assign mapping quality" value="100" />
+                <param name="reference_quality_bq" type="integer" label="Assign base quality" value="60" />
+              </when>
+            </conditional>     
+        
+        <!-- allele scope -->
+        <conditional name="section_allele_scope_type">
+          <param name="section_allele_scope_type_selector" type="select" label="Set allele scope options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="no_snps" type="boolean" truevalue="--no-snps" falsevalue="" checked="False" label="Ignore SNP alleles" />
+            <param name="no_indels" type="boolean" truevalue="--no-indels" falsevalue="" checked="False" label="Ignore insertion and deletion alleles" />
+            <param name="no_mnps" type="boolean" truevalue="--no-mnps" falsevalue="" checked="False" label="Ignore multi-nuceotide polymorphisms, MNPs" />
+            <param name="no_complex" type="boolean" truevalue="--no-complex" falsevalue="" checked="False" label="Ignore complex events (composites of other classes)" />
+            <param name="use_best_n_alleles" type="integer" label="Evaluate only the best N SNP alleles" value="0" min="0" help="Ranked by sum of supporting quality scores; Set to 0 to use all" />
+            <param name="max_complex_gap" type="integer" label="Allow complex alleles with contiguous embedded matches of up to this length" value="" optional="True"/>
+          </when>
+        </conditional>
+        
+        <!-- indel realignment -->
+        <param name="left_align_indels" type="boolean" truevalue="--left-align-indels" falsevalue="" checked="False" label="Left-realign and merge gaps embedded in reads" />
+        
+        <!-- input filters -->
+        <conditional name="section_input_filters_type">
+          <param name="section_input_filters_type_selector" type="select" label="Set input filters options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="use_duplicate_reads" type="boolean" truevalue="--use-duplicate-reads" falsevalue="" checked="False" label="Include duplicate-marked alignments in the analysis" />
+            <conditional name="no_filter_type">
+              <param name="no_filter_type_selector" type="select" label="Apply filters">
+                <option value="apply_filters" selected="True">Apply</option>
+                <option value="no_filters">Do not apply</option>
+              </param>
+              <when value="no_filters">
+                <!-- Do nothing here --> <!-- no-filters -->
+              </when>
+              <when value="apply_filters">
+                <param name="min_mapping_quality" type="integer" label="Exclude alignments from analysis if they have a mapping quality less than" value="30" />
+                <param name="min_base_quality" type="integer" label="Exclude alleles from analysis if their supporting base quality less than" value="20" />
+                <param name="min_supporting_quality_mq" type="integer" label="In order to consider an alternate allele, at least one supporting alignment must have mapping quality" value="0" />
+                <param name="min_supporting_quality_bq" type="integer" label="In order to consider an alternate allele, at least one supporting alignment must have base quality" value="0" />
+              </when>
+            </conditional>
+            <param name="mismatch_base_quality_threshold" type="integer" label="Count mismatches toward read-mismatch-limit if the base quality of the mismatch is >=" value="10" />
+            <param name="read_mismatch_limit" type="integer" label="Exclude reads with more than N mismatches where each mismatch has base quality >= mismatch-base-quality-threshold" value="" optional="True" />
+            <param name="read_max_mismatch_fraction" type="float" label="Exclude reads with more than N [0,1] fraction of mismatches where each mismatch has base quality >= mismatch-base-quality-threshold" value="1.0" />
+            <param name="read_snp_limit" type="integer" label="Exclude reads with more than N base mismatches, ignoring gaps with quality >= mismatch-base-quality-threshold" value="" optional="True" />
+            <param name="read_indel_limit" type="integer" label="Exclude reads with more than N separate gaps" value="" optional="True" />
+            <param name="indel_exclusion_window" type="integer" label="Ignore portions of alignments this many bases from a putative insertion or deletion allele" value="0" />
+            <param name="min_alternate_fraction" type="float" label="Require at least this fraction of observations supporting an alternate allele within a single individual in the in order to evaluate the position" value="0" />
+            <param name="min_alternate_count" type="integer" label="Require at least this count of observations supporting an alternate allele within a single individual in order to evaluate the position" value="1" />
+            <param name="min_alternate_qsum" type="integer" label="Require at least this sum of quality of observations supporting an alternate allele within a single individual in order to evaluate the position" value="0" />
+            <param name="min_alternate_total" type="integer" label="Require at least this count of observations supporting an alternate allele within the total population in order to use the allele in analysis" value="1" />
+            <param name="min_coverage" type="integer" label="Require at least this coverage to process a site" value="0" />
+          </when>
+        </conditional>
+        
+        
+        <!-- bayesian priors -->
+        <conditional name="section_bayesian_priors_type">
+          <param name="section_bayesian_priors_type_selector" type="select" label="Set bayesian priors options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="no_ewens_priors" type="boolean" truevalue="--no-ewens-priors" falsevalue="" checked="False" label="Turns off the Ewens' Sampling Formula component of the priors" />
+            <param name="no_population_priors" type="boolean" truevalue="--no-population-priors" falsevalue="" checked="False" label="No population priors" help="Equivalent to --pooled --no-ewens-priors" />
+            <param name="hwe_priors" type="boolean" truevalue="--hwe-priors" falsevalue="" checked="False" label="Use the probability of the combination arising under HWE given the allele frequency as estimated by observation frequency" />
+          </when>
+        </conditional>
+        
+        <!-- observation prior expectations -->
+        <conditional name="section_observation_prior_expectations_type">
+          <param name="section_observation_prior_expectations_type_selector" type="select" label="Set observation prior expectations options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="binomial_obs_priors" type="boolean" truevalue="--binomial-obs-priors" falsevalue="" checked="False" label="Incorporate expectations about osbervations into the priors, Uses read placement probability, strand balance probability, and read position (5'-3') probability" />
+            <param name="allele_balance_priors" type="boolean" truevalue="--allele-balance-priors" falsevalue="" checked="False" label="Use aggregate probability of observation balance between alleles as a component of the priors.  Best for observations with minimal inherent reference bias" />
+          </when>
+        </conditional>
+        
+        
+        <!-- algorithmic features -->
+        <conditional name="section_algorithmic_features_type">
+          <param name="section_algorithmic_features_type_selector" type="select" label="Set algorithmic features options">
+            <option value="do_not_set" selected="True">Do not set</option>
+            <option value="set">Set</option>
+          </param>
+          <when value="do_not_set">
+            <!-- do nothing here -->
+          </when>
+          <when value="set">
+            <param name="site_selection_max_iterations" type="integer" label="Uses hill-climbing algorithm to search posterior space for N iterations to determine if the site should be evaluated." value="5" help="Set to 0 to prevent use of this algorithm for site selection, and to a low integer for improvide site selection at a slight performance penalty" />
+            <param name="genotyping_max_iterations" type="integer" label="Iterate no more than N times during genotyping step" value="25" />
+            <param name="genotyping_max_banddepth" type="integer" label="Integrate no deeper than the Nth best genotype by likelihood when genotyping" value="6" />
+            <param name="posterior_integration_limits_n" type="integer" label="Posteriror integration limit N" help="Integrate all genotype combinations in our posterior space which include no more than N samples with their Mth best data likelihood." value="1" />
+            <param name="posterior_integration_limits_m" type="integer" label="Posteriror integration limit M" help="Integrate all genotype combinations in our posterior space which include no more than N samples with their Mth best data likelihood." value="3" />
+            <param name="no_permute" type="boolean" truevalue="--no-permute" falsevalue="" checked="False" label="Do not scale prior probability of genotype combination given allele frequency by the number of permutations of included genotypes" />
+            <param name="exclude_unobserved_genotypes" type="boolean" truevalue="--exclude-unobserved-genotypes" falsevalue="" checked="False" label="Skip sample genotypings for which the sample has no supporting reads" />
+            <param name="genotype_variant_threshold" type="integer" label="Limit posterior integration to samples where the second-best genotype likelihood is no more than log(N) from the highest genotype likelihood for the sample" value="" optional="True" />
+            <param name="use_mapping_quality" type="boolean" truevalue="--use-mapping-quality" falsevalue="" checked="False" label="Use mapping quality of alleles when calculating data likelihoods" />
+            <param name="read_dependence_factor" type="float" label="Incorporate non-independence of reads by scaling successive observations by this factor during data likelihood calculations" value="0.9" />
+            <param name="no_marginals" type="boolean" truevalue="--no-marginals" falsevalue="" checked="False" label="Do not calculate the marginal probability of genotypes.  Saves time and improves scaling performance in large populations" />
+          </when>
+        </conditional>
+        
+        
+      </when>
+    </conditional>
+    
+  </inputs>
+  <outputs>
+    <data format="vcf" name="output_vcf" label="${tool.name} on ${on_string} (variants)" />
+    <data format="bed" name="output_failed_alleles_bed" label="${tool.name} on ${on_string} (failed alleles)">
+        <filter>options_type['options_type_selector'] == "advanced" and options_type['output_failed_alleles_option'] is True</filter>
+    </data>
+    <data format="txt" name="output_trace" label="${tool.name} on ${on_string} (trace)">
+        <filter>options_type['options_type_selector'] == "advanced" and options_type['output_trace_option'] is True</filter>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+     <param name="reference_source_selector" value="history" />
+      <param name="ref_file" ftype="fasta" value="phiX.fasta"/>
+      <param name="input_bam" ftype="bam" value="gatk/fake_phiX_reads_1.bam"/>
+      <param name="options_type_selector" value="basic"/>
+      <output name="output_vcf" file="variant_detection/freebayes/freebayes_out_1.vcf.contains" compare="contains"/>
+      <!-- <output name="output_failed_alleles_bed" file="empty_file.dat" />
+      <output name="output_trace" file="variant_detection/freebayes/freebayes_out_1.output_trace" /> -->
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+This tool uses FreeBayes to call SNPS given a reference sequence and a BAM alignment file.
+
+FreeBayes is a high-performance, flexible, and open-source Bayesian genetic variant detector. It operates on BAM alignment files, which are produced by most contemporary short-read aligners.
+
+In addition to substantial performance improvements over its predecessors (PolyBayes, GigaBayes, and BamBayes), it expands the scope of SNP and small-indel variant calling to populations of individuals with heterogeneous copy number. FreeBayes is currently under active development. 
+
+Go `here <http://bioinformatics.bc.edu/marthlab/FreeBayes>`_ for details on FreeBayes.
+
+------
+
+**Inputs**
+
+FreeBayes accepts an input aligned BAM file.
+
+
+**Outputs**
+
+The output is in the VCF format.
+
+-------
+
+**Settings**::
+
+  input and output:
+
+   -b --bam FILE   Add FILE to the set of BAM files to be analyzed.
+   -c --stdin      Read BAM input on stdin.
+   -v --vcf FILE   Output VCF-format results to FILE.
+   -f --fasta-reference FILE
+                   Use FILE as the reference sequence for analysis.
+                   An index file (FILE.fai) will be created if none exists.
+                   If neither --targets nor --region are specified, FreeBayes
+                   will analyze every position in this reference.
+   -t --targets FILE
+                   Limit analysis to targets listed in the BED-format FILE.
+   -r --region <chrom>:<start_position>..<end_position>
+                   Limit analysis to the specified region, 0-base coordinates,
+                   end_position not included (same as BED format).
+   -s --samples FILE
+                   Limit analysis to samples listed (one per line) in the FILE.
+                   By default FreeBayes will analyze all samples in its input
+                   BAM files.
+   --populations FILE
+                   Each line of FILE should list a sample and a population which
+                   it is part of.  The population-based bayesian inference model
+                   will then be partitioned on the basis of the populations.
+   -A --cnv-map FILE
+                   Read a copy number map from the BED file FILE, which has
+                   the format:
+                      reference sequence, start, end, sample name, copy number
+                   ... for each region in each sample which does not have the
+                   default copy number as set by --ploidy.
+   -L --trace FILE  Output an algorithmic trace to FILE.
+   --failed-alleles FILE
+                   Write a BED file of the analyzed positions which do not
+                   pass --pvar to FILE.
+   -@ --variant-input VCF
+                   Use variants reported in VCF file as input to the algorithm.
+                   A report will be generated for every record in the VCF file.
+   -l --only-use-input-alleles
+                   Only provide variant calls and genotype likelihoods for sites
+                   and alleles which are provided in the VCF input, and provide
+                   output in the VCF for all input alleles, not just those which
+                   have support in the data.
+
+  reporting:
+
+   -P --pvar N     Report sites if the probability that there is a polymorphism
+                   at the site is greater than N.  default: 0.0001
+   -_ --show-reference-repeats
+                   Calculate and show information about reference repeats in
+                   the VCF output.
+
+  population model:
+
+   -T --theta N    The expected mutation rate or pairwise nucleotide diversity
+                   among the population under analysis.  This serves as the
+                   single parameter to the Ewens Sampling Formula prior model
+                   default: 0.001
+   -p --ploidy N   Sets the default ploidy for the analysis to N.  default: 2
+   -J --pooled     Assume that samples result from pooled sequencing.
+                   When using this flag, set --ploidy to the number of
+                   alleles in each sample.
+
+  reference allele:
+
+   -Z --use-reference-allele
+                   This flag includes the reference allele in the analysis as
+                   if it is another sample from the same population.
+   -H --diploid-reference
+                   If using the reference sequence as a sample (-Z),
+                   treat it as diploid.  default: false (reference is haploid)
+   --reference-quality MQ,BQ
+                   Assign mapping quality of MQ to the reference allele at each
+                   site and base quality of BQ.  default: 100,60
+
+  allele scope:
+
+   -I --no-snps    Ignore SNP alleles.
+   -i --no-indels  Ignore insertion and deletion alleles.
+   -X --no-mnps    Ignore multi-nuceotide polymorphisms, MNPs.
+   -u --no-complex Ignore complex events (composites of other classes).
+   -n --use-best-n-alleles N
+                   Evaluate only the best N SNP alleles, ranked by sum of
+                   supporting quality scores.  (Set to 0 to use all; default: all)
+   -E --max-complex-gap N
+                   Allow complex alleles with contiguous embedded matches of up
+                   to this length.
+
+  indel realignment:
+
+   -O --left-align-indels
+                   Left-realign and merge gaps embedded in reads. default: false
+
+  input filters:
+
+   -4 --use-duplicate-reads
+                   Include duplicate-marked alignments in the analysis.
+                   default: exclude duplicates
+   -m --min-mapping-quality Q
+                   Exclude alignments from analysis if they have a mapping
+                   quality less than Q.  default: 30
+   -q --min-base-quality Q
+                   Exclude alleles from analysis if their supporting base
+                   quality is less than Q.  default: 20
+   -R --min-supporting-quality MQ,BQ
+                   In order to consider an alternate allele, at least one supporting
+                   alignment must have mapping quality MQ, and one supporting 
+                   allele must have base quality BQ. default: 0,0, unset
+   -Q --mismatch-base-quality-threshold Q
+                   Count mismatches toward --read-mismatch-limit if the base
+                   quality of the mismatch is >= Q.  default: 10
+   -U --read-mismatch-limit N
+                   Exclude reads with more than N mismatches where each mismatch
+                   has base quality >= mismatch-base-quality-threshold.
+                   default: ~unbounded
+   -z --read-max-mismatch-fraction N
+                   Exclude reads with more than N [0,1] fraction of mismatches where
+                   each mismatch has base quality >= mismatch-base-quality-threshold
+                   default: 1.0
+   -$ --read-snp-limit N
+                   Exclude reads with more than N base mismatches, ignoring gaps
+                   with quality >= mismatch-base-quality-threshold.
+                   default: ~unbounded
+   -e --read-indel-limit N
+                   Exclude reads with more than N separate gaps.
+                   default: ~unbounded
+   -0 --no-filters Do not use any input base and mapping quality filters
+                   Equivalent to -m 0 -q 0 -R 0 -S 0
+   -x --indel-exclusion-window
+                   Ignore portions of alignments this many bases from a
+                   putative insertion or deletion allele.  default: 0
+   -F --min-alternate-fraction N
+                   Require at least this fraction of observations supporting
+                   an alternate allele within a single individual in the
+                   in order to evaluate the position.  default: 0.0
+   -C --min-alternate-count N
+                   Require at least this count of observations supporting
+                   an alternate allele within a single individual in order
+                   to evaluate the position.  default: 1
+   -3 --min-alternate-qsum N
+                   Require at least this sum of quality of observations supporting
+                   an alternate allele within a single individual in order
+                   to evaluate the position.  default: 0
+   -G --min-alternate-total N
+                   Require at least this count of observations supporting
+                   an alternate allele within the total population in order
+                   to use the allele in analysis.  default: 1
+   -! --min-coverage N
+                   Require at least this coverage to process a site.  default: 0
+
+  bayesian priors:
+
+   -Y --no-ewens-priors
+                   Turns off the Ewens' Sampling Formula component of the priors.
+   -k --no-population-priors
+                   Equivalent to --pooled --no-ewens-priors
+   -w --hwe-priors Use the probability of the combination arising under HWE given
+                   the allele frequency as estimated by observation frequency.
+
+  observation prior expectations:
+
+   -V --binomial-obs-priors
+                   Incorporate expectations about osbervations into the priors,
+                   Uses read placement probability, strand balance probability,
+                   and read position (5'-3') probability.
+   -a --allele-balance-priors
+                   Use aggregate probability of observation balance between alleles
+                   as a component of the priors.  Best for observations with minimal
+                   inherent reference bias.
+
+  algorithmic features:
+
+   -M --site-selection-max-iterations N
+                   Uses hill-climbing algorithm to search posterior space for N
+                   iterations to determine if the site should be evaluated.  Set to 0
+                   to prevent use of this algorithm for site selection, and
+                   to a low integer for improvide site selection at a slight
+                   performance penalty. default: 5.
+   -B --genotyping-max-iterations N
+                   Iterate no more than N times during genotyping step. default: 25.
+   --genotyping-max-banddepth N
+                   Integrate no deeper than the Nth best genotype by likelihood when
+                   genotyping. default: 6.
+   -W --posterior-integration-limits N,M
+                   Integrate all genotype combinations in our posterior space
+                   which include no more than N samples with their Mth best
+                   data likelihood. default: 1,3.
+   -K --no-permute
+                   Do not scale prior probability of genotype combination given allele
+                   frequency by the number of permutations of included genotypes.
+   -N --exclude-unobserved-genotypes
+                   Skip sample genotypings for which the sample has no supporting reads.
+   -S --genotype-variant-threshold N
+                   Limit posterior integration to samples where the second-best
+                   genotype likelihood is no more than log(N) from the highest
+                   genotype likelihood for the sample.  default: ~unbounded
+   -j --use-mapping-quality
+                   Use mapping quality of alleles when calculating data likelihoods.
+   -D --read-dependence-factor N
+                   Incorporate non-independence of reads by scaling successive
+                   observations by this factor during data likelihood
+                   calculations.  default: 0.9
+   -= --no-marginals
+                   Do not calculate the marginal probability of genotypes.  Saves
+                   time and improves scaling performance in large populations.
+
+
+------
+
+**Citation**
+
+For the underlying tool, please cite `FreeBayes <http://bioinformatics.bc.edu/marthlab/FreeBayes>`_.
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
+  </help>
+</tool>
diff --git a/test/shed_functional/test_data/freebayes/invalid_tool_dependencies/tool_dependencies.xml b/test/shed_functional/test_data/freebayes/invalid_tool_dependencies/tool_dependencies.xml
new file mode 100644
index 0000000..cced35b
--- /dev/null
+++ b/test/shed_functional/test_data/freebayes/invalid_tool_dependencies/tool_dependencies.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="freebayes" version="0.9.5">
+        <install version="1.0">
+            <actions>
+                <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
+                <action type="shell_command">git reset --hard 9696d0ce8a962f7bb61c4791be5ce44312b81cf8</action>
+                <action type="shell_command">make</action>
+                <action type="move_directory_files">
+                    <source_directory>bin</source_directory>
+                    <destination_directory>$INSTALL_DIR/bin</destination_directory>
+                </action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+FreeBayes requires g++ and the standard C and C++ development libraries.
+Additionally, cmake is required for building the BamTools API.
+        </readme>
+    </package>
+    <package name="samtools" version="0.2.15">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2</action>
+                <action type="shell_command">sed -i .bak -e 's/-lcurses/-lncurses/g' Makefile</action>
+                <action type="shell_command">make</action>
+                <action type="move_file">
+                    <source>samtools</source>
+                    <destination>$INSTALL_DIR/bin</destination>
+                </action>
+                <action type="move_file">
+                    <source>misc/maq2sam-long</source>
+                    <destination>$INSTALL_DIR/bin</destination>
+                </action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+Compiling SAMtools requires the ncurses and zlib development libraries.
+        </readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/freebayes/malformed_tool_dependencies/tool_dependencies.xml b/test/shed_functional/test_data/freebayes/malformed_tool_dependencies/tool_dependencies.xml
new file mode 100644
index 0000000..2626f6f
--- /dev/null
+++ b/test/shed_functional/test_data/freebayes/malformed_tool_dependencies/tool_dependencies.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="freebayes" version="0.9.5">
+        <install version="1.0">
+            <actions>
+                <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action>
+                <action type="shell_command">git reset --hard 9696d0ce8a962f7bb61c4791be5ce44312b81cf8</action>
+                <action type="shell_command">make</action>
+                <action type="move_directory_files">
+                    <source_directory>bin</source_directory>
+                    <destination_directory>$INSTALL_DIR/bin</destination_directory>
+                </action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+FreeBayes requires g++ and the standard C and C++ development libraries.
+Additionally, cmake is required for building the BamTools API.
+        </readme>
+    </package>
+    <package name="samtools" version="0.2.15">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2</action>
+                <action type="shell_command">sed -i .bak -e 's/-lcurses/-lncurses/g' Makefile</action>
+                <action type="shell_command">make</action>
+                <action type="move_file">
+                    <source>samtools</source>
+                    <destination>$INSTALL_DIR/bin</destination>
+                </action>
+                <action type="move_file">
+                    <source>misc/maq2sam-long</source>
+                    <destination>$INSTALL_DIR/bin</destination>
+                </action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+This readme tag has invalid XML ><
+        </readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/freebayes/sam_fa_indices.loc.sample b/test/shed_functional/test_data/freebayes/sam_fa_indices.loc.sample
new file mode 100644
index 0000000..d06993b
--- /dev/null
+++ b/test/shed_functional/test_data/freebayes/sam_fa_indices.loc.sample
@@ -0,0 +1,28 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of Samtools indexed sequences data files.  You will need
+#to create these data files and then create a sam_fa_indices.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The sam_fa_indices.loc 
+#file has this format (white space characters are TAB characters):
+#
+#index	<seq>	<location>
+#
+#So, for example, if you had hg18 indexed stored in 
+#/depot/data2/galaxy/sam/, 
+#then the sam_fa_indices.loc entry would look like this:
+#
+#index	hg18	/depot/data2/galaxy/sam/hg18.fa
+#
+#and your /depot/data2/galaxy/sam/ directory
+#would contain hg18.fa and hg18.fa.fai files:
+#
+#-rw-r--r--  1 james    universe 830134 2005-09-13 10:12 hg18.fa
+#-rw-r--r--  1 james    universe 527388 2005-09-13 10:12 hg18.fa.fai
+#
+#Your sam_fa_indices.loc file should include an entry per line for 
+#each index set you have stored.  The file in the path does actually
+#exist, but it should never be directly used. Instead, the name serves
+#as a prefix for the index file.  For example:
+#
+#index	hg18	/depot/data2/galaxy/sam/hg18.fa
+#index	hg19	/depot/data2/galaxy/sam/hg19.fa
diff --git a/test/shed_functional/test_data/freebayes/tool_data_table_conf.xml.sample b/test/shed_functional/test_data/freebayes/tool_data_table_conf.xml.sample
new file mode 100644
index 0000000..2170eb6
--- /dev/null
+++ b/test/shed_functional/test_data/freebayes/tool_data_table_conf.xml.sample
@@ -0,0 +1,8 @@
+<!-- Use the file tool_data_table_conf.xml.oldlocstyle if you don't want to update your loc files as changed in revision 4550:535d276c92bc-->
+<tables>
+    <!-- Location of SAMTools indexes and other files -->
+    <table name="sam_fa_indexes" comment_char="#">
+        <columns>line_type, value, path</columns>
+        <file path="tool-data/sam_fa_indices.loc" />
+    </table>
+</tables>
diff --git a/test/shed_functional/test_data/freebayes/tool_dependencies.xml b/test/shed_functional/test_data/freebayes/tool_dependencies.xml
new file mode 100644
index 0000000..d995cf8
--- /dev/null
+++ b/test/shed_functional/test_data/freebayes/tool_dependencies.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<tool_dependency>
+    <package name="freebayes" version="0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8">
+        <install version="1.0">
+            <actions>
+                <action type="shell_command">echo "Success."</action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+FreeBayes requires g++ and the standard C and C++ development libraries.
+Additionally, cmake is required for building the BamTools API.
+        </readme>
+    </package>
+    <package name="samtools" version="0.1.18">
+        <install version="1.0">
+            <actions>
+                <action type="shell_command">echo "Success."</action>
+                <action type="set_environment">
+                    <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>
+Compiling SAMtools requires the ncurses and zlib development libraries.
+        </readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/htseq_count/htseq_count.tar b/test/shed_functional/test_data/htseq_count/htseq_count.tar
new file mode 100644
index 0000000..41ed8f8
Binary files /dev/null and b/test/shed_functional/test_data/htseq_count/htseq_count.tar differ
diff --git a/test/shed_functional/test_data/package_matplotlib/package_matplotlib_1_2.tar b/test/shed_functional/test_data/package_matplotlib/package_matplotlib_1_2.tar
new file mode 100644
index 0000000..188f797
Binary files /dev/null and b/test/shed_functional/test_data/package_matplotlib/package_matplotlib_1_2.tar differ
diff --git a/test/shed_functional/test_data/package_matplotlib/tool_dependencies.xml b/test/shed_functional/test_data/package_matplotlib/tool_dependencies.xml
new file mode 100644
index 0000000..c48515b
--- /dev/null
+++ b/test/shed_functional/test_data/package_matplotlib/tool_dependencies.xml
@@ -0,0 +1,25 @@
+<tool_dependency>
+<!--NUMPY-->
+    <package name="matplotlib" version="1.2.1">
+        <install version="1.0">
+            <actions>
+                <action type="download_by_url">https://downloads.sourceforge.net/project/matplotlib/matplotlib/matplotlib-1.2.1/matplotlib-1.2.1.tar.gz</action>
+                <action type="shell_command">wget http://downloads.sourceforge.net/project/freetype/freetype2/2.4.11/freetype-2.4.11.tar.bz2</action>
+                <action type="shell_command">tar xfvj freetype-2.4.11.tar.bz2 && 
+                    cd freetype-2.4.11 && 
+                    ./configure --prefix=$INSTALL_DIR/freetype/build &&
+                    make && 
+                    make install</action>
+                <action type="make_directory">$INSTALL_DIR/lib/python</action>
+                <action type="shell_command">export PYTHONPATH=$PYTHONPATH:$INSTALL_DIR/lib/python && 
+                    export CPLUS_INCLUDE_PATH=$INSTALL_DIR/freetype/build/include:$INSTALL_DIR/freetype/build/include/freetype2/ && 
+                    export LIBRARY_PATH=$INSTALL_DIR/freetype/build/lib/ && 
+                    python setup.py install --home $INSTALL_DIR --install-scripts $INSTALL_DIR/bin</action>
+                <action type="set_environment">
+                    <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
+                </action>
+            </actions>
+        </install>
+        <readme>Compiling matplotlib requires a C compiler (typically gcc), freetype2, numpy and libpng.</readme>
+    </package>
+</tool_dependency>
diff --git a/test/shed_functional/test_data/package_numpy/package_numpy_1_7.tar b/test/shed_functional/test_data/package_numpy/package_numpy_1_7.tar
new file mode 100644
index 0000000..946e1bb
Binary files /dev/null and b/test/shed_functional/test_data/package_numpy/package_numpy_1_7.tar differ
diff --git a/test/shed_functional/test_data/proteomics_datatypes/proteomics_datatypes.tar b/test/shed_functional/test_data/proteomics_datatypes/proteomics_datatypes.tar
new file mode 100644
index 0000000..2e0be49
Binary files /dev/null and b/test/shed_functional/test_data/proteomics_datatypes/proteomics_datatypes.tar differ
diff --git a/test/shed_functional/test_data/readme.txt b/test/shed_functional/test_data/readme.txt
new file mode 100644
index 0000000..d344129
--- /dev/null
+++ b/test/shed_functional/test_data/readme.txt
@@ -0,0 +1 @@
+This is a readme file.
diff --git a/test/shed_functional/test_data/repository_capsules/0490_filtering.tar.gz b/test/shed_functional/test_data/repository_capsules/0490_filtering.tar.gz
new file mode 100644
index 0000000..863a208
Binary files /dev/null and b/test/shed_functional/test_data/repository_capsules/0490_filtering.tar.gz differ
diff --git a/test/shed_functional/test_data/repository_capsules/0500_emboss_5.tar.gz b/test/shed_functional/test_data/repository_capsules/0500_emboss_5.tar.gz
new file mode 100644
index 0000000..a76199f
Binary files /dev/null and b/test/shed_functional/test_data/repository_capsules/0500_emboss_5.tar.gz differ
diff --git a/test/shed_functional/test_data/repository_capsules/0510_trans_proteomic_pipeline.tar.gz b/test/shed_functional/test_data/repository_capsules/0510_trans_proteomic_pipeline.tar.gz
new file mode 100644
index 0000000..60a12a1
Binary files /dev/null and b/test/shed_functional/test_data/repository_capsules/0510_trans_proteomic_pipeline.tar.gz differ
diff --git a/test/shed_functional/test_data/repository_capsules/0520_filtering.tar.gz b/test/shed_functional/test_data/repository_capsules/0520_filtering.tar.gz
new file mode 100644
index 0000000..cc51da6
Binary files /dev/null and b/test/shed_functional/test_data/repository_capsules/0520_filtering.tar.gz differ
diff --git a/test/unit/dataset_collections/__init__.py b/test/unit/dataset_collections/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/dataset_collections/test_matching.py b/test/unit/dataset_collections/test_matching.py
new file mode 100644
index 0000000..53c0fd7
--- /dev/null
+++ b/test/unit/dataset_collections/test_matching.py
@@ -0,0 +1,141 @@
+from galaxy.dataset_collections import (
+    matching,
+    registry,
+    type_description,
+)
+
+TYPE_REGISTRY = registry.DatasetCollectionTypesRegistry( None )
+TYPE_DESCRIPTION_FACTORY = type_description.CollectionTypeDescriptionFactory( TYPE_REGISTRY )
+
+
+def test_pairs_match():
+    assert_can_match( pair_instance(), pair_instance() )
+
+
+def test_lists_of_same_cardinality_match():
+    assert_can_match( list_instance(), list_instance() )
+
+
+def test_nested_lists_match():
+    nested_list = list_instance(
+        elements=[
+            pair_element("data1"),
+            pair_element("data2"),
+            pair_element("data3"),
+        ],
+        collection_type="list:paired",
+    )
+    assert_can_match( nested_list, nested_list )
+
+
+def test_different_types_cannot_match():
+    assert_cannot_match( list_instance(), pair_instance() )
+    assert_cannot_match( pair_instance(), list_instance() )
+
+
+def test_lists_of_different_cardinality_do_not_match():
+    list_1 = list_instance( ids=[ "data1", "data2" ] )
+    list_2 = list_instance( ids=[ "data1", "data2", "data3" ] )
+    assert_cannot_match( list_1, list_2 )
+    assert_cannot_match( list_2, list_1 )
+
+
+def test_valid_collection_subcollection_matching():
+    flat_list = list_instance( ids=[ "data1", "data2", "data3" ] )
+    nested_list = list_instance(
+        elements=[
+            pair_element("data11"),
+            pair_element("data21"),
+            pair_element("data31"),
+        ],
+        collection_type="list:paired",
+    )
+    assert_cannot_match( flat_list, nested_list )
+    assert_cannot_match( nested_list, flat_list )
+    assert_can_match( ( nested_list, "paired" ), flat_list )
+
+
+def assert_can_match( *items ):
+    to_match = build_collections_to_match( *items )
+    matching.MatchingCollections.for_collections( to_match, TYPE_DESCRIPTION_FACTORY )
+
+
+def assert_cannot_match( *items ):
+    to_match = build_collections_to_match( *items )
+    threw_exception = False
+    try:
+        matching.MatchingCollections.for_collections( to_match, TYPE_DESCRIPTION_FACTORY )
+    except Exception:
+        threw_exception = True
+    assert threw_exception
+
+
+def build_collections_to_match( *items ):
+    to_match = matching.CollectionsToMatch()
+
+    for i, item in enumerate( items ):
+        if isinstance( item, tuple ):
+            collection_instance, subcollection_type = item
+        else:
+            collection_instance, subcollection_type = item, None
+        to_match.add( "input_%d" % i, collection_instance, subcollection_type )
+    return to_match
+
+
+def pair_element( element_identifier ):
+    return collection_element( element_identifier, pair_instance().collection )
+
+
+def pair_instance( ):
+    paired_collection_instance = collection_instance( collection_type="paired", elements=[
+        hda_element( "left" ),
+        hda_element( "right" ),
+    ] )
+    return paired_collection_instance
+
+
+def list_instance( collection_type="list", elements=None, ids=None ):
+    if not elements:
+        if ids is None:
+            ids = [ "data1", "data2" ]
+        elements = [hda_element(_) for _ in ids]
+    list_collection_instance = collection_instance(
+        collection_type=collection_type,
+        elements=elements
+    )
+    return list_collection_instance
+
+
+class MockCollectionInstance( object ):
+
+    def __init__( self, collection_type, elements ):
+        self.collection = MockCollection( collection_type, elements )
+
+
+class MockCollection( object ):
+
+    def __init__( self, collection_type, elements ):
+        self.collection_type = collection_type
+        self.elements = elements
+
+
+class MockCollectionElement( object ):
+
+    def __init__( self, element_identifier, collection ):
+        self.element_identifier = element_identifier
+        self.child_collection = collection
+        self.hda = None
+
+
+class MockHDAElement( object ):
+
+    def __init__( self, element_identifier ):
+        self.element_identifier = element_identifier
+        self.child_collection = False
+        self.hda = object()
+
+
+collection_instance = MockCollectionInstance
+collection = MockCollection
+collection_element = MockCollectionElement
+hda_element = MockHDAElement
diff --git a/test/unit/datatypes/__init__.py b/test/unit/datatypes/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/datatypes/dataproviders/__init__.py b/test/unit/datatypes/dataproviders/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/datatypes/dataproviders/test_base_dataproviders.py b/test/unit/datatypes/dataproviders/test_base_dataproviders.py
new file mode 100644
index 0000000..6b87a97
--- /dev/null
+++ b/test/unit/datatypes/dataproviders/test_base_dataproviders.py
@@ -0,0 +1,380 @@
+"""
+Unit tests for base DataProviders.
+.. seealso:: galaxy.datatypes.dataproviders.base
+"""
+import logging
+import os.path
+import sys
+import unittest
+
+from six import StringIO
+
+from galaxy.datatypes.dataproviders import base, exceptions
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import tempfilecache, utility
+
+log = logging.getLogger( __name__ )
+
+# TODO: fix imports there after dist and retry
+# TODO: fix off by ones in FilteredDataProvider counters
+# currently because of dataproviders.dataset importing galaxy.model this doesn't work
+
+
+class BaseTestCase( unittest.TestCase ):
+    default_file_contents = """
+            One
+            Two
+            Three
+        """
+
+    @classmethod
+    def setUpClass( cls ):
+        log.debug( 'CLASS %s %s', ( '_' * 40 ), cls.__name__ )
+
+    @classmethod
+    def tearDownClass( cls ):
+        log.debug( 'CLASS %s %s\n\n', ( '_' * 40 ), cls.__name__ )
+
+    def __init__( self, *args ):
+        unittest.TestCase.__init__( self, *args )
+        self.tmpfiles = tempfilecache.TempFileCache( log )
+
+    def setUp( self ):
+        log.debug( 'BEGIN %s %s', ( '.' * 40 ), self._testMethodName )
+        if self._testMethodDoc:
+            log.debug( ' """%s"""', self._testMethodDoc.strip() )
+
+    def tearDown( self ):
+        self.tmpfiles.clear()
+        log.debug( 'END\n' )
+
+    def format_tmpfile_contents( self, contents=None ):
+        contents = contents or self.default_file_contents
+        contents = utility.clean_multiline_string( contents )
+        log.debug( 'file contents:\n%s', contents )
+        return contents
+
+    def parses_default_content_as( self ):
+        return [ 'One\n', 'Two\n', 'Three\n' ]
+
+
+class Test_BaseDataProvider( BaseTestCase ):
+    provider_class = base.DataProvider
+
+    def contents_provider_and_data( self,
+            filename=None, contents=None, source=None, *provider_args, **provider_kwargs ):
+        # to remove boiler plate
+        # returns file content string, provider used, and data list
+        if not filename:
+            contents = self.format_tmpfile_contents( contents )
+            filename = self.tmpfiles.create_tmpfile( contents )
+        # TODO: if filename, contents == None
+        if not source:
+            source = open( filename )
+        provider = self.provider_class( source, *provider_args, **provider_kwargs )
+        log.debug( 'provider: %s', provider )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        return ( contents, provider, data )
+
+    def test_iterators( self ):
+        source = ( str( x ) for x in range( 1, 10 ) )
+        provider = self.provider_class( source )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] )
+
+        source = ( str( x ) for x in range( 1, 10 ) )
+        provider = self.provider_class( source )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] )
+
+        source = ( str( x ) for x in range( 1, 10 ) )
+        provider = self.provider_class( source )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] )
+
+    def test_validate_source( self ):
+        """validate_source should throw an error if the source doesn't have attr '__iter__'
+        """
+        def non_iterator_dprov( source ):
+            return self.provider_class( source )
+        self.assertRaises( exceptions.InvalidDataProviderSource,
+            non_iterator_dprov, 'one two three' )
+        self.assertRaises( exceptions.InvalidDataProviderSource,
+            non_iterator_dprov, 40 )
+
+    def test_writemethods( self ):
+        """should throw an error if any write methods are called
+        """
+        source = ( str( x ) for x in range( 1, 10 ) )
+        provider = self.provider_class( source )
+
+        # should throw error
+        def call_method( provider, method_name, *args ):
+            method = getattr( provider, method_name )
+            return method( *args )
+
+        self.assertRaises( NotImplementedError, call_method, provider, 'truncate', 20 )
+        self.assertRaises( NotImplementedError, call_method, provider, 'write', 'bler' )
+        self.assertRaises( NotImplementedError, call_method, provider, 'writelines', [ 'one', 'two' ] )
+
+    def test_readlines( self ):
+        """readlines should return all the data in list form
+        """
+        source = ( str( x ) for x in range( 1, 10 ) )
+        provider = self.provider_class( source )
+        data = provider.readlines()
+        log.debug( 'data: %s', str( data ) )
+        self.assertEqual( data, [ str( x ) for x in range( 1, 10 ) ] )
+
+    def test_stringio( self ):
+        """should work with StringIO
+        """
+        contents = utility.clean_multiline_string( """
+            One
+            Two
+            Three
+        """ )
+        source = StringIO( contents )
+        provider = self.provider_class( source )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        # provider should call close on file
+        self.assertEqual( data, self.parses_default_content_as() )
+        self.assertTrue( source.closed )
+
+    def test_file( self ):
+        """should work with files
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data()
+        self.assertEqual( data, self.parses_default_content_as() )
+        # provider should call close on file
+        self.assertTrue( hasattr(provider.source, 'read'))
+        self.assertTrue( provider.source.closed )
+
+
+class Test_FilteredDataProvider( Test_BaseDataProvider ):
+    provider_class = base.FilteredDataProvider
+
+    def assertCounters( self, provider, read, valid, returned ):
+        self.assertEqual( provider.num_data_read, read )
+        self.assertEqual( provider.num_valid_data_read, valid )
+        self.assertEqual( provider.num_data_returned, returned )
+
+    def test_counters( self ):
+        """should count: lines read, lines that passed the filter, lines returned
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data()
+        self.assertCounters( provider, 3, 3, 3 )
+
+    def test_filter_fn( self ):
+        """should filter out lines using filter_fn and set counters properly
+        based on filter
+        """
+        def filter_ts( string ):
+            if string.lower().startswith( 't' ):
+                return None
+            return string
+        ( contents, provider, data ) = self.contents_provider_and_data( filter_fn=filter_ts )
+        self.assertCounters( provider, 3, 1, 1 )
+
+
+class Test_LimitedOffsetDataProvider( Test_FilteredDataProvider ):
+    provider_class = base.LimitedOffsetDataProvider
+
+    def test_offset_1( self ):
+        """when offset is 1, should skip first
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( offset=1 )
+        self.assertEqual( data, self.parses_default_content_as()[1:] )
+        self.assertCounters( provider, 3, 3, 2 )
+
+    def test_offset_all( self ):
+        """when offset >= num lines, should return empty list
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( offset=4 )
+        self.assertEqual( data, [] )
+        self.assertCounters( provider, 3, 3, 0 )
+
+    def test_offset_none( self ):
+        """when offset is 0, should return all
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( offset=0 )
+        self.assertEqual( data, self.parses_default_content_as() )
+        self.assertCounters( provider, 3, 3, 3 )
+
+    def test_offset_negative( self ):
+        """when offset is negative, should return all
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( offset=-1 )
+        self.assertEqual( data, self.parses_default_content_as() )
+        self.assertCounters( provider, 3, 3, 3 )
+
+    def test_limit_1( self ):
+        """when limit is one, should return first
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( limit=1 )
+        self.assertEqual( data, self.parses_default_content_as()[:1] )
+        self.assertCounters( provider, 1, 1, 1 )
+
+    def test_limit_all( self ):
+        """when limit >= num lines, should return all
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( limit=4 )
+        self.assertEqual( data, self.parses_default_content_as() )
+        self.assertCounters( provider, 3, 3, 3 )
+
+    def test_limit_zero( self ):
+        """when limit >= num lines, should return empty list
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( limit=0 )
+        self.assertEqual( data, [] )
+        self.assertCounters( provider, 0, 0, 0 )
+
+    def test_limit_none( self ):
+        """when limit is None, should return all
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( limit=None )
+        self.assertEqual( data, self.parses_default_content_as() )
+        self.assertCounters( provider, 3, 3, 3 )
+
+    # TODO: somehow re-use tmpfile here
+    def test_limit_with_offset( self ):
+        def limit_offset_combo( limit, offset, data_should_be, read, valid, returned ):
+            ( contents, provider, data ) = self.contents_provider_and_data( limit=limit, offset=offset )
+            self.assertEqual( data, data_should_be )
+            # self.assertCounters( provider, read, valid, returned )
+        result_data = self.parses_default_content_as()
+        test_data = [
+            ( 0, 0, [], 0, 0, 0 ),
+            ( 1, 0, result_data[:1], 1, 1, 1 ),
+            ( 2, 0, result_data[:2], 2, 2, 2 ),
+            ( 3, 0, result_data[:3], 3, 3, 3 ),
+            ( 1, 1, result_data[1:2], 1, 1, 1 ),
+            ( 2, 1, result_data[1:3], 2, 2, 2 ),
+            ( 3, 1, result_data[1:3], 2, 2, 2 ),
+            ( 1, 2, result_data[2:3], 1, 1, 1 ),
+            ( 2, 2, result_data[2:3], 1, 1, 1 ),
+            ( 3, 2, result_data[2:3], 1, 1, 1 ),
+        ]
+        for test in test_data:
+            log.debug( 'limit_offset_combo: %s', ', '.join([ str( e ) for e in test ]) )
+            limit_offset_combo( *test )
+
+    def test_limit_with_offset_and_filter( self ):
+        def limit_offset_combo( limit, offset, data_should_be, read, valid, returned ):
+            def only_ts( string ):
+                if not string.lower().startswith( 't' ):
+                    return None
+                return string
+            ( contents, provider, data ) = self.contents_provider_and_data(
+                limit=limit, offset=offset, filter_fn=only_ts )
+            self.assertEqual( data, data_should_be )
+            # self.assertCounters( provider, read, valid, returned )
+        result_data = [ c for c in self.parses_default_content_as() if c.lower().startswith( 't' ) ]
+        test_data = [
+            ( 0, 0, [], 0, 0, 0 ),
+            ( 1, 0, result_data[:1], 1, 1, 1 ),
+            ( 2, 0, result_data[:2], 2, 2, 2 ),
+            ( 3, 0, result_data[:3], 2, 2, 2 ),
+            ( 1, 1, result_data[1:2], 1, 1, 1 ),
+            ( 2, 1, result_data[1:3], 1, 1, 1 ),
+            ( 1, 2, result_data[2:3], 0, 0, 0 ),
+        ]
+        for test in test_data:
+            log.debug( 'limit_offset_combo: %s', ', '.join([ str( e ) for e in test ]) )
+            limit_offset_combo( *test )
+
+
+class Test_MultiSourceDataProvider( BaseTestCase ):
+    provider_class = base.MultiSourceDataProvider
+
+    def contents_and_tmpfile( self, contents=None ):
+        # TODO: hmmmm...
+        contents = contents or self.default_file_contents
+        contents = utility.clean_multiline_string( contents )
+        return ( contents, self.tmpfiles.create_tmpfile( contents ) )
+
+    def test_multiple_sources( self ):
+        # clean the following contents, write them to tmpfiles, open them,
+        #   and pass as a list to the provider
+        contents = [
+            """
+                One
+                Two
+                Three
+                Four
+                Five
+            """,
+            """
+                Six
+                Seven
+                Eight
+                Nine
+                Ten
+            """,
+            """
+                Eleven
+                Twelve! (<-- http://youtu.be/JZshZp-cxKg)
+            """
+        ]
+        contents = [ utility.clean_multiline_string( c ) for c in contents ]
+        source_list = [ open( self.tmpfiles.create_tmpfile( c ) ) for c in contents ]
+
+        provider = self.provider_class( source_list )
+        log.debug( 'provider: %s', provider )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        self.assertEqual( ''.join( data ), ''.join( contents) )
+
+    def test_multiple_compound_sources( self ):
+        # clean the following contents, write them to tmpfiles, open them,
+        #   and pass as a list to the provider
+        contents = [
+            """
+                One
+                Two
+                Three
+                Four
+                Five
+            """,
+            """
+                Six
+                Seven
+                Eight
+                Nine
+                Ten
+            """,
+            """
+                Eleven
+                Twelve! (<-- http://youtu.be/JZshZp-cxKg)
+            """
+        ]
+        contents = [ utility.clean_multiline_string( c ) for c in contents ]
+        source_list = [ open( self.tmpfiles.create_tmpfile( c ) ) for c in contents ]
+
+        def no_Fs( string ):
+            return None if string.startswith( 'F' ) else string
+
+        def no_youtube( string ):
+            return None if ( 'youtu.be' in string ) else string
+
+        source_list = [
+            base.LimitedOffsetDataProvider( source_list[0], filter_fn=no_Fs, limit=2, offset=1 ),
+            base.LimitedOffsetDataProvider( source_list[1], limit=1, offset=3 ),
+            base.FilteredDataProvider( source_list[2], filter_fn=no_youtube ),
+        ]
+        provider = self.provider_class( source_list )
+        log.debug( 'provider: %s', provider )
+        data = list( provider )
+        log.debug( 'data: %s', str( data ) )
+        self.assertEqual( ''.join( data ), 'Two\nThree\nNine\nEleven\n' )
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/datatypes/dataproviders/test_line_dataproviders.py b/test/unit/datatypes/dataproviders/test_line_dataproviders.py
new file mode 100644
index 0000000..7e372f4
--- /dev/null
+++ b/test/unit/datatypes/dataproviders/test_line_dataproviders.py
@@ -0,0 +1,300 @@
+"""
+Unit tests for base DataProviders.
+.. seealso:: galaxy.datatypes.dataproviders.base
+"""
+import logging
+import unittest
+
+from galaxy.datatypes.dataproviders import line
+
+from . import test_base_dataproviders
+
+log = logging.getLogger( __name__ )
+
+
+# TODO: TestCase hierarchy is a bit of mess here.
+class Test_FilteredLineDataProvider( test_base_dataproviders.Test_FilteredDataProvider ):
+    provider_class = line.FilteredLineDataProvider
+    default_file_contents = """
+            # this should be stripped out
+            One
+            # as should blank lines
+
+            # preceding/trailing whitespace too
+                Two
+            Three
+        """
+
+    def parses_default_content_as( self ):
+        return [ 'One', 'Two', 'Three' ]
+
+    def test_counters( self ):
+        """should count: lines read, lines that passed the filter, lines returned
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data()
+        self.assertCounters( provider, 7, 3, 3 )
+
+    def test_filter_fn( self ):
+        """should filter out lines using filter_fn and set counters properly
+        based on filter
+        """
+        def filter_ts( string ):
+            if string.lower().startswith( 't' ):
+                return None
+            return string
+        ( contents, provider, data ) = self.contents_provider_and_data( filter_fn=filter_ts )
+        self.assertCounters( provider, 7, 1, 1 )
+
+    def test_limit_with_offset( self ):
+        def limit_offset_combo( limit, offset, data_should_be, read, valid, returned ):
+            ( contents, provider, data ) = self.contents_provider_and_data( limit=limit, offset=offset )
+            self.assertEqual( data, data_should_be )
+            # self.assertCounters( provider, read, valid, returned )
+        result_data = self.parses_default_content_as()
+        test_data = [
+            ( 0, 0, [], 0, 0, 0 ),
+            ( 1, 0, result_data[:1], 1, 1, 1 ),
+            ( 2, 0, result_data[:2], 2, 2, 2 ),
+            ( 3, 0, result_data[:3], 3, 3, 3 ),
+            ( 1, 1, result_data[1:2], 1, 1, 1 ),
+            ( 2, 1, result_data[1:3], 2, 2, 2 ),
+            ( 3, 1, result_data[1:3], 2, 2, 2 ),
+            ( 1, 2, result_data[2:3], 1, 1, 1 ),
+            ( 2, 2, result_data[2:3], 1, 1, 1 ),
+            ( 3, 2, result_data[2:3], 1, 1, 1 ),
+        ]
+        for test in test_data:
+            log.debug( 'limit_offset_combo: %s', ', '.join([ str( e ) for e in test ]) )
+            limit_offset_combo( *test )
+
+    def test_provide_blank( self ):
+        """should return blank lines if ``provide_blank`` is true.
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( provide_blank=True )
+        self.assertEqual( data, [ 'One', '', 'Two', 'Three' ] )
+        self.assertCounters( provider, 7, 4, 4 )
+
+    def test_strip_lines( self ):
+        """should return unstripped lines if ``strip_lines`` is false.
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( strip_lines=False )
+        self.assertEqual( data, ['One\n', '\n', '    Two\n', 'Three\n'] )
+        self.assertCounters( provider, 7, 4, 4 )
+
+    def test_comment_char( self ):
+        """should return unstripped lines if ``strip_lines`` is false.
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( comment_char='T' )
+        self.assertEqual( data, [ '# this should be stripped out', 'One',
+                                  '# as should blank lines', '# preceding/trailing whitespace too' ] )
+        self.assertCounters( provider, 7, 4, 4 )
+
+
+class Test_RegexLineDataProvider( Test_FilteredLineDataProvider ):
+    provider_class = line.RegexLineDataProvider
+    default_file_contents = """
+            # this should be stripped out
+            One
+            # as should blank lines
+
+            # preceding/trailing whitespace too
+                Two
+            Three
+        """
+
+    def test_regex( self ):
+        """should return lines matching regex (AFTER strip, comments, blanks).
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( regex_list=[ r'^O' ] )
+        self.assertEqual( data, [ 'One' ] )
+        self.assertCounters( provider, 7, 1, 1 )
+
+    def test_regex_list( self ):
+        """should return regex matches using more than one regex by ORing them.
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( regex_list=[ r'^O', r'T' ] )
+        self.assertEqual( data, [ 'One', 'Two', 'Three' ] )
+        self.assertCounters( provider, 7, 3, 3 )
+
+    def test_inverse( self ):
+        """should return inverse matches when ``invert`` is true.
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( regex_list=[ r'^O' ], invert=True )
+        self.assertEqual( data, [ 'Two', 'Three' ] )
+        self.assertCounters( provider, 7, 2, 2 )
+
+    def test_regex_no_match( self ):
+        """should return empty if no regex matches.
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( regex_list=[ r'^Z' ] )
+        self.assertEqual( data, [] )
+        self.assertCounters( provider, 7, 0, 0 )
+
+    def test_regex_w_limit_offset( self ):
+        """regex should play well with limit and offset
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data( regex_list=[ r'^T' ], limit=1 )
+        self.assertEqual( data, [ 'Two' ] )
+        # TODO: once again, valid data, returned data is off
+        self.assertCounters( provider, 6, 1, 1 )
+
+        ( contents, provider, data ) = self.contents_provider_and_data( regex_list=[ r'^T' ], limit=1, offset=1 )
+        self.assertEqual( data, [ 'Three' ] )
+        self.assertCounters( provider, 7, 2, 1 )
+
+
+class Test_BlockDataProvider( test_base_dataproviders.Test_FilteredDataProvider ):
+    provider_class = line.BlockDataProvider
+    default_file_contents = """
+        One
+            ABCD
+        Two
+            ABCD
+            EFGH
+        Three
+    """
+
+    def parses_default_content_as( self ):
+        return [ ['One'], ['ABCD'], ['Two'], ['ABCD'], ['EFGH'], ['Three'] ]
+
+    # TODO: well, this is ham-handed...
+    def test_stringio( self ):
+        pass
+
+    def test_iterators( self ):
+        pass
+
+    def test_readlines( self ):
+        pass
+
+    def test_file( self ):
+        """should work with files
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data()
+        self.assertEqual( data, self.parses_default_content_as() )
+        self.assertTrue( isinstance( provider.source, line.FilteredLineDataProvider ) )
+        self.assertTrue( hasattr(provider.source.source, 'read' ) )
+        # provider should call close on file
+        self.assertTrue( provider.source.source.closed )
+
+    def test_counters( self ):
+        """should count: lines read, lines that passed the filter, lines returned
+        """
+        ( contents, provider, data ) = self.contents_provider_and_data()
+        self.assertCounters( provider, 6, 6, 6 )
+
+    def test_filter_fn( self ):
+        """should filter out lines using filter_fn and set counters properly
+        based on filter
+        """
+        def filter_ts( string ):
+            if string.lower().startswith( 't' ):
+                return None
+            return string
+        ( contents, provider, data ) = self.contents_provider_and_data( filter_fn=filter_ts )
+        # no block fns here, so will parse as lines
+        self.assertEqual( data, [ ['One'], ['ABCD'], ['ABCD'], ['EFGH'] ] )
+        self.assertCounters( provider, 4, 4, 4 )
+
+    def test_new_block_delim_fn( self ):
+        """should return blocks based on ``new_block_delim_fn``
+        """
+        def is_not_indented( line ):
+            strip_diff = len( line ) - len( line.lstrip() )
+            return ( strip_diff == 0 )
+        # in order to use indentation as a delimiter, we need to strip the newlines only
+        ( contents, provider, data ) = self.contents_provider_and_data( strip_lines=False, strip_newlines=True,
+            new_block_delim_fn=is_not_indented )
+        self.assertEqual( data, [[ 'One', '    ABCD' ], [ 'Two', '    ABCD', '    EFGH' ], [ 'Three' ]] )
+        self.assertCounters( provider, 3, 3, 3 )
+
+    def test_block_filter_fn( self ):
+        """should return blocks only blocks that pass ``block_filter_fn``
+        """
+        def is_not_indented( line ):
+            strip_diff = len( line ) - len( line.lstrip() )
+            return ( strip_diff == 0 )
+        # def empty_block( block ):
+        #    if len( block ) <= 1:
+        #        return None
+        #    return block
+
+        def no_tw( block ):
+            if block[0].startswith( 'Tw' ):
+                return None
+            return block
+
+        ( contents, provider, data ) = self.contents_provider_and_data( strip_lines=False, strip_newlines=True,
+            new_block_delim_fn=is_not_indented, block_filter_fn=no_tw )
+        self.assertEqual( data, [[ 'One', '    ABCD' ], [ 'Three' ]] )
+        self.assertCounters( provider, 3, 2, 2 )
+
+    def test_hack_block_filter_fn( self ):
+        """should allow other aggregating/mod use in filter_fn
+
+        Although, it would be better to subclass and override assemble_current_block
+        """
+        def is_not_indented( line ):
+            strip_diff = len( line ) - len( line.lstrip() )
+            return ( strip_diff == 0 )
+
+        def empty_block( block ):
+            if len( block ) <= 1:
+                return None
+            return { 'header': block[0].strip(), 'data': [ b.strip() for b in block[1:] if b.strip() ] }
+
+        ( contents, provider, data ) = self.contents_provider_and_data(
+            strip_lines=False, strip_newlines=True,
+            new_block_delim_fn=is_not_indented, block_filter_fn=empty_block )
+        self.assertEqual( data, [ { 'header': 'One', 'data': [ 'ABCD' ]},
+                                  { 'header': 'Two', 'data': [ 'ABCD', 'EFGH' ]} ])
+        self.assertCounters( provider, 3, 2, 2 )
+
+    def test_block_filter_fn_w_limit_offset( self ):
+        """should allow both block fns and limit, offset
+        """
+        def is_not_indented( line ):
+            strip_diff = len( line ) - len( line.lstrip() )
+            return ( strip_diff == 0 )
+
+        def empty_block( block ):
+            if len( block ) <= 1:
+                return None
+            return block
+
+        ( contents, provider, data ) = self.contents_provider_and_data( strip_lines=False, strip_newlines=True,
+            new_block_delim_fn=is_not_indented, block_filter_fn=empty_block, limit=1 )
+        self.assertEqual( data, [[ 'One', '    ABCD' ]] )
+        self.assertCounters( provider, 1, 1, 1 )
+        ( contents, provider, data ) = self.contents_provider_and_data( strip_lines=False, strip_newlines=True,
+            new_block_delim_fn=is_not_indented, block_filter_fn=empty_block, limit=2, offset=1 )
+        self.assertEqual( data, [[ 'Two', '    ABCD', '    EFGH' ]] )
+        self.assertCounters( provider, 3, 2, 1 )
+
+    def test_simple_example( self ):
+        """
+        """
+        file_contents = """
+            >One
+            ABCD
+
+            # this comment (and the blank line above) won't be included
+            >Two
+            ABCD
+            EFGH
+            """
+
+        def fasta_header( line ):
+            return line.startswith( '>' )
+
+        def id_seq( block ):
+            return { 'id': block[0][1:], 'seq': ( ''.join( block[1:] ) ) }
+
+        ( contents, provider, data ) = self.contents_provider_and_data( contents=file_contents,
+            new_block_delim_fn=fasta_header, block_filter_fn=id_seq )
+        self.assertEqual( data, [{ 'id': 'One', 'seq': 'ABCD' }, { 'id': 'Two', 'seq': 'ABCDEFGH' }] )
+        self.assertCounters( provider, 2, 2, 2 )
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/datatypes/test_data.py b/test/unit/datatypes/test_data.py
new file mode 100644
index 0000000..5be94c8
--- /dev/null
+++ b/test/unit/datatypes/test_data.py
@@ -0,0 +1,11 @@
+"""
+Unit tests for base DataTypes.
+.. seealso:: galaxy.datatypes.data
+"""
+
+from galaxy.datatypes.data import get_file_peek
+
+
+def test_get_file_peek( ):
+    # should get the first 5 lines of the file without a trailing newline character
+    assert get_file_peek('test-data/1.tabular', line_wrap=False) == 'chr22\t1000\tNM_17\nchr22\t2000\tNM_18\nchr10\t2200\tNM_10\nchr10\thap\ttest\nchr10\t1200\tNM_11'
diff --git a/test/unit/jobs/__init__.py b/test/unit/jobs/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/jobs/dynamic_tool_destination/__init__.py b/test/unit/jobs/dynamic_tool_destination/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/jobs/dynamic_tool_destination/data/dest_fail.yml b/test/unit/jobs/dynamic_tool_destination/data/dest_fail.yml
new file mode 100644
index 0000000..6ccdcaa
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/dest_fail.yml
@@ -0,0 +1,9 @@
+tools:
+  test:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: 2 KB
+        destination: DestinationF
+verbose: True
diff --git a/test/unit/jobs/dynamic_tool_destination/data/priority_tool_destination.yml b/test/unit/jobs/dynamic_tool_destination/data/priority_tool_destination.yml
new file mode 100644
index 0000000..10238c8
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/priority_tool_destination.yml
@@ -0,0 +1,125 @@
+tools:
+  test:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: Destination1_low
+            med: Destination1_med
+            high: Destination1_high
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 1 KB
+        fail_message: File size too small
+        destination: fail
+      - rule_type: num_input_datasets
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 5
+        destination:
+          priority:
+            low: Destination2_low
+            med: Destination2_med
+            high: Destination2_high
+      - rule_type: num_input_datasets
+        nice_value: 0
+        lower_bound: 5
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: Destination3_low
+            med: Destination3_med
+            high: Destination3_high
+
+  test_overlap:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: Destination2_low
+            med: Destination2_med
+            high: Destination2_high
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 4 KB
+        destination:
+          priority:
+            low: Destination3_low
+            med: Destination3_med
+            high: Destination3_high
+      - rule_type: num_input_datasets
+        nice_value: -20
+        lower_bound: 0
+        upper_bound: 5
+        destination:
+          priority:
+            low: Destination4_low
+            med: Destination4_med
+            high: Destination4_high
+      - rule_type: num_input_datasets
+        nice_value: -20
+        lower_bound: 5
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: Destination5_low
+            med: Destination5_med
+            high: Destination5_high
+
+  test_db:
+    rules:
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        fail_message: File size too small
+        destination: fail
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 1 KB
+        destination:
+          priority:
+            low: Destination4_low
+            med: Destination4_med
+            high: Destination4_high
+
+  test_db_high:
+    rules:
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: Infinity
+        destination:
+          priority:
+            low: Destination5_low
+            med: Destination5_med
+            high: Destination5_high
+
+  test_arguments:
+    rules:
+      - rule_type: arguments
+        nice_value: 0
+        arguments:
+          careful: true
+        destination:
+          priority:
+            med: Destination6_med
+
+default_destination:
+  priority:
+    low: waffles_default_low
+    med: waffles_default_med
+    high: waffles_default_high
+users:
+  user at email.com:
+    priority: high
+verbose: True
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test.empty b/test/unit/jobs/dynamic_tool_destination/data/test.empty
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test.fasta b/test/unit/jobs/dynamic_tool_destination/data/test.fasta
new file mode 100644
index 0000000..ffc9927
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/test.fasta
@@ -0,0 +1,21 @@
+#6 records
+
+> fhdsauo
+adfjsa
+hhfdsauiewo
+
+
+> al;fjdiiweoofsa
+jfkdsi
+
+>jfjjfjjjfjdklsllsxaaalccaccaccggattaccat
+cacacaagttggttacatttga
+>faaaccahbvvattavgttaavvaccagaaga
+
+>
+
+
+>accgcttcgagacttag
+agcttcgcgatatgagtctgag
+gcatatttattacgcggccccgga
+gacggacttattaaaagatataga
\ No newline at end of file
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test1.full b/test/unit/jobs/dynamic_tool_destination/data/test1.full
new file mode 100644
index 0000000..37ae631
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/test1.full
@@ -0,0 +1,10 @@
+asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
\ No newline at end of file
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test3.full b/test/unit/jobs/dynamic_tool_destination/data/test3.full
new file mode 100644
index 0000000..69c326e
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/test3.full
@@ -0,0 +1,115 @@
+asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ asjkldf;jal;fjdsla;jf
+safjkdsla;jioc xpzmklmwerqtjiop afdksl;a f.sa
+ sxa xjlafjoip qrweoqprklaw; sa sa
+ sal fsa fsdla a
+ a
+ fsa sad jdklf;qwjioepmckl;sa fkl asklfdf qwjf sa
+ 
\ No newline at end of file
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test_no_verbose.yml b/test/unit/jobs/dynamic_tool_destination/data/test_no_verbose.yml
new file mode 100644
index 0000000..4cacdaf
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/test_no_verbose.yml
@@ -0,0 +1,11 @@
+tools:
+  test_no_verbose:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        destination: Destination1
+    default_destination: waffles_default
+default_destination: waffles_default
+verbose: False
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test_num_input_datasets.yml b/test/unit/jobs/dynamic_tool_destination/data/test_num_input_datasets.yml
new file mode 100644
index 0000000..f15a462
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/test_num_input_datasets.yml
@@ -0,0 +1,17 @@
+tools:
+  spades:
+    default_destination: waffles_default
+  smalt:
+    rules:
+      - rule_type: num_input_datasets
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 200
+        destination: cluster_low_4
+      - rule_type: num_input_datasets
+        nice_value: 0
+        lower_bound: 200
+        upper_bound: Infinity
+        destination: cluster_high_32
+default_destination: waffles_low
+verbose: True
diff --git a/test/unit/jobs/dynamic_tool_destination/data/test_users.yml b/test/unit/jobs/dynamic_tool_destination/data/test_users.yml
new file mode 100644
index 0000000..0e4e314
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/test_users.yml
@@ -0,0 +1,14 @@
+tools:
+  test_users:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        destination: special_cluster
+        users:
+          - user at email.com
+          - notuserblah at email.com
+    default_destination: lame_cluster
+default_destination: even_lamer_cluster
+verbose: False
diff --git a/test/unit/jobs/dynamic_tool_destination/data/tool_destination.yml b/test/unit/jobs/dynamic_tool_destination/data/tool_destination.yml
new file mode 100644
index 0000000..3b78a4a
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/data/tool_destination.yml
@@ -0,0 +1,85 @@
+tools:
+  test:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        destination: Destination1
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 1 KB
+        fail_message: File size too small
+        destination: fail
+      - rule_type: num_input_datasets
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 5
+        destination: Destination2
+      - rule_type: num_input_datasets
+        nice_value: 0
+        lower_bound: 5
+        upper_bound: Infinity
+        destination: Destination3
+    default_destination: waffles_default
+
+  test_overlap:
+    rules:
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        destination: Destination2
+      - rule_type: file_size
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 4 KB
+        destination: Destination3
+      - rule_type: num_input_datasets
+        nice_value: -20
+        lower_bound: 0
+        upper_bound: 5
+        destination: Destination4
+      - rule_type: num_input_datasets
+        nice_value: -20
+        lower_bound: 5
+        upper_bound: Infinity
+        destination: Destination5
+    default_destination: waffles_default
+
+  test_db:
+    rules:
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 1 KB
+        upper_bound: Infinity
+        fail_message: File size too small
+        destination: fail
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: 1 KB
+        destination: Destination4
+    default_destination: waffles_default
+
+  test_db_high:
+    rules:
+      - rule_type: records
+        nice_value: 0
+        lower_bound: 0
+        upper_bound: Infinity
+        destination: Destination5
+    default_destination: waffles_default
+
+  test_arguments:
+    rules:
+      - rule_type: arguments
+        nice_value: 0
+        arguments:
+          careful: true
+        destination: Destination6
+    default_destination: waffles_default
+
+default_destination: waffles_default
+verbose: True
diff --git a/test/unit/jobs/dynamic_tool_destination/mockGalaxy.py b/test/unit/jobs/dynamic_tool_destination/mockGalaxy.py
new file mode 100644
index 0000000..d104295
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/mockGalaxy.py
@@ -0,0 +1,97 @@
+from collections import namedtuple
+
+
+# Job mock and helpers=======================================
+class Job(object):
+    def __init__(self):
+        self.input_datasets = []
+        self.input_library_datasets = []
+        self.param_values = dict()
+
+    def get_param_values(self, app, ignore_errors=False):
+        return self.param_values
+
+    def set_arg_value(self, key, value):
+        self.param_values[key] = value
+
+    def add_input_dataset(self, dataset):
+        self.input_datasets.append(dataset)
+
+
+class InputDataset(object):
+    def __init__(self, name, dataset):
+        self.name = name
+        self.dataset = dataset
+
+
+class NotAFile(object):
+    pass
+
+
+class Dataset(object):
+    def __init__(self, file_name, file_ext, value):
+        self.file_name = file_name
+        self.datatype = Datatype(file_ext)
+        self.ext = file_ext
+        self.metadata = dict()
+        self.metadata['sequences'] = value
+
+    def get_metadata(self):
+        return self.metadata
+
+
+class Datatype(object):
+    def __init__(self, file_ext):
+        self.file_ext = file_ext
+
+
+# Tool mock and helpers=========================================
+class Tool(object):
+    def __init__(self, id):
+        self.old_id = id
+        self.installed_tool_dependencies = []
+
+    def add_tool_dependency(self, dependency):
+        self.installed_tool_dependencies.append(dependency)
+
+
+class ToolDependency(object):
+    def __init__(self, name, dir_name):
+        self.name = name
+        self.dir_name = dir_name
+
+    def installation_directory(self, app):
+        return self.dir_name
+
+
+# App mock=======================================================
+class App(object):
+    def __init__(self, tool_id, params):
+        self.job_config = JobConfig( tool_id, params )
+
+
+class JobConfig(object):
+    def __init__(self, tool_id, params):
+        self.info = namedtuple('info', ['id', 'nativeSpec', 'runner'])
+        self.tool_id = tool_id
+        self.nativeSpec = params
+        self.default_id = "waffles_default"
+        self.defNativeSpec = "-q test.q"
+        self.defRunner = "drmaa"
+        self.keys = { tool_id: self.info( self.tool_id, self.nativeSpec, self.defRunner ),
+                     "waffles_default": self.info( self.default_id, self.defNativeSpec, self.defRunner ), }
+
+    def get_destination(self, tool_id):
+        return self.keys[tool_id]
+
+
+# JobMappingException mock=======================================
+class JobMappingException(Exception):
+    pass
+
+
+class JobDestination(object):
+    def __init__(self, **kwd):
+        self.id = kwd.get('id')
+        self.nativeSpec = kwd.get('params')['nativeSpecification']
+        self.runner = kwd.get('runner')
diff --git a/test/unit/jobs/dynamic_tool_destination/test_dynamic_tool_destination.py b/test/unit/jobs/dynamic_tool_destination/test_dynamic_tool_destination.py
new file mode 100644
index 0000000..38c252b
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/test_dynamic_tool_destination.py
@@ -0,0 +1,774 @@
+import logging
+import os
+import unittest
+
+from testfixtures import log_capture
+
+import galaxy.jobs.dynamic_tool_destination as dt
+from galaxy.jobs.dynamic_tool_destination import map_tool_to_destination
+from galaxy.jobs.mapper import JobMappingException
+
+from . import mockGalaxy as mg
+from . import ymltests as yt
+
+theApp = mg.App( "waffles_default", "test_spec")
+script_dir = os.path.dirname(__file__)
+
+# ======================Jobs====================================
+zeroJob = mg.Job()
+
+emptyJob = mg.Job()
+emptyJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test.empty"), "txt", 14)) )
+
+failJob = mg.Job()
+failJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test1.full"), "txt", 15)) )
+
+msfileJob = mg.Job()
+msfileJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/not_here.full"), "txt", 15)) )
+
+notfileinpJob = mg.Job()
+msfileJob.add_input_dataset( mg.InputDataset("input1", mg.NotAFile() ) )
+
+runJob = mg.Job()
+runJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test3.full"), "txt", 15)) )
+
+argJob = mg.Job()
+argJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test3.full"), "txt", 15)) )
+argJob.set_arg_value( "careful", True )
+
+argNotFoundJob = mg.Job()
+argNotFoundJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test3.full"), "txt", 15)) )
+argNotFoundJob.set_arg_value( "careful", False )
+
+dbJob = mg.Job()
+dbJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test.fasta"), "fasta", 10)) )
+
+dbcountJob = mg.Job()
+dbcountJob.add_input_dataset( mg.InputDataset("input1", mg.Dataset( (script_dir + "/data/test.fasta"), "fasta", None)) )
+
+# ======================Tools===================================
+vanillaTool = mg.Tool( 'test' )
+
+unTool = mg.Tool( 'unregistered' )
+
+overlapTool = mg.Tool( 'test_overlap' )
+
+defaultTool = mg.Tool( 'test_tooldefault' )
+
+dbTool = mg.Tool( 'test_db' )
+dbinfTool = mg.Tool( 'test_db_high' )
+
+argTool = mg.Tool( 'test_arguments' )
+
+noVBTool = mg.Tool( 'test_no_verbose' )
+
+usersTool = mg.Tool( 'test_users' )
+
+numinputsTool = mg.Tool( 'test_num_input_datasets' )
+
+# =======================YML file================================
+path = script_dir + "/data/tool_destination.yml"
+priority_path = script_dir + "/data/priority_tool_destination.yml"
+broken_default_dest_path = script_dir + "/data/dest_fail.yml"
+no_verbose_path = script_dir + "/data/test_no_verbose.yml"
+users_test_path = script_dir + "/data/test_users.yml"
+num_input_datasets_test_path = script_dir + "/data/test_num_input_datasets.yml"
+
+# ======================Test Variables=========================
+value = 1
+valueK = value * 1024
+valueM = valueK * 1024
+valueG = valueM * 1024
+valueT = valueG * 1024
+valueP = valueT * 1024
+valueE = valueP * 1024
+valueZ = valueE * 1024
+valueY = valueZ * 1024
+
+
+class TestDynamicToolDestination(unittest.TestCase):
+    def setUp(self):
+        self.maxDiff = None
+        self.logger = logging.getLogger()
+
+    # =======================map_tool_to_destination()================================
+
+    @log_capture()
+    def test_brokenDestYML(self, l):
+        self.assertRaises(JobMappingException, map_tool_to_destination, runJob, theApp, vanillaTool, "user at email.com", True, broken_default_dest_path)
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'No global default destination specified in config!'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test3.full'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 3.23 KB')
+        )
+
+    @log_capture()
+    def test_filesize_empty(self, l):
+        self.assertRaises(JobMappingException, map_tool_to_destination, emptyJob, theApp, vanillaTool, "user at email.com", True, path)
+        self.assertRaises(JobMappingException, map_tool_to_destination, emptyJob, theApp, vanillaTool, "user at email.com", True, priority_path)
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test.empty'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 0.00 B'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 1'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test.empty'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 0.00 B'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 1')
+        )
+
+    @log_capture()
+    def test_filesize_zero(self, l):
+        self.assertRaises(JobMappingException, map_tool_to_destination, zeroJob, theApp, vanillaTool, "user at email.com", True, path)
+        self.assertRaises(JobMappingException, map_tool_to_destination, zeroJob, theApp, vanillaTool, "user at email.com", True, priority_path)
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 0.00 B'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 0'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 0.00 B'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 0')
+        )
+
+    @log_capture()
+    def test_filesize_fail(self, l):
+        self.assertRaises(JobMappingException, map_tool_to_destination, failJob, theApp, vanillaTool, "user at email.com", True, path)
+        self.assertRaises(JobMappingException, map_tool_to_destination, failJob, theApp, vanillaTool, "user at email.com", True, priority_path)
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test1.full'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 293.00 B'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 1'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test1.full'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 293.00 B'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 1')
+        )
+
+    @log_capture()
+    def test_filesize_run(self, l):
+        job = map_tool_to_destination( runJob, theApp, vanillaTool, "user at email.com", True, path )
+        self.assertEquals( job, 'Destination1' )
+        priority_job = map_tool_to_destination( runJob, theApp, vanillaTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'Destination1_high' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test3.full'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 3.23 KB'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 1'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test' with 'Destination1'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test3.full'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total size: 3.23 KB'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total number of files: 1'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test' with 'Destination1_high'.")
+        )
+
+    @log_capture()
+    def test_default_tool(self, l):
+        job = map_tool_to_destination( runJob, theApp, defaultTool, "user at email.com", True, path )
+        self.assertEquals( job, 'waffles_default' )
+        priority_job = map_tool_to_destination( runJob, theApp, defaultTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'waffles_default_high' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Tool 'test_tooldefault' not specified in config. Using default destination."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_tooldefault' with 'waffles_default'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Tool 'test_tooldefault' not specified in config. Using default destination."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_tooldefault' with 'waffles_default_high'.")
+        )
+
+    @log_capture()
+    def test_arguments_tool(self, l):
+        job = map_tool_to_destination( argJob, theApp, argTool, "user at email.com", True, path )
+        self.assertEquals( job, 'Destination6' )
+        priority_job = map_tool_to_destination( argJob, theApp, argTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'Destination6_med' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_arguments' with 'Destination6'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_arguments' with 'Destination6_med'.")
+        )
+
+    @log_capture()
+    def test_arguments_arg_not_found(self, l):
+        job = map_tool_to_destination( argNotFoundJob, theApp, argTool, "user at email.com", True, path )
+        self.assertEquals( job, 'waffles_default' )
+        priority_job = map_tool_to_destination( argNotFoundJob, theApp, argTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'waffles_default_high' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_arguments' with 'waffles_default'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_arguments' with 'waffles_default_high'.")
+        )
+
+    @log_capture()
+    def test_tool_not_found(self, l):
+        job = map_tool_to_destination( runJob, theApp, unTool, "user at email.com", True, path )
+        self.assertEquals( job, 'waffles_default' )
+        priority_job = map_tool_to_destination( runJob, theApp, unTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'waffles_default_high' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Tool 'unregistered' not specified in config. Using default destination."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'unregistered' with 'waffles_default'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Tool 'unregistered' not specified in config. Using default destination."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'unregistered' with 'waffles_default_high'.")
+        )
+
+    @log_capture()
+    def test_fasta(self, l):
+        job = map_tool_to_destination( dbJob, theApp, dbTool, "user at email.com", True, path )
+        self.assertEquals( job, 'Destination4' )
+        priority_job = map_tool_to_destination( dbJob, theApp, dbTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'Destination4_high' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test.fasta'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total amount of records: 10'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_db' with 'Destination4'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test.fasta'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total amount of records: 10'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_db' with 'Destination4_high'.")
+        )
+
+    @log_capture()
+    def test_fasta_count(self, l):
+        job = map_tool_to_destination( dbcountJob, theApp, dbTool, "user at email.com", True, path )
+        self.assertEquals( job, 'Destination4' )
+        priority_job = map_tool_to_destination( dbcountJob, theApp, dbTool, "user at email.com", True, priority_path )
+        self.assertEquals( priority_job, 'Destination4_high' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test.fasta'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total amount of records: 6'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_db' with 'Destination4'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Loading file: input1' + script_dir + '/data/test.fasta'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Total amount of records: 6'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_db' with 'Destination4_high'.")
+        )
+
+    @log_capture()
+    def test_no_verbose(self, l):
+        job = map_tool_to_destination( runJob, theApp, noVBTool, "user at email.com", True, no_verbose_path )
+        self.assertEquals( job, 'Destination1' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_no_verbose' with 'Destination1'.")
+        )
+
+    @log_capture()
+    def test_authorized_user(self, l):
+        job = map_tool_to_destination( runJob, theApp, usersTool, "user at email.com", True, users_test_path )
+        self.assertEquals( job, 'special_cluster' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_users' with 'special_cluster'."),
+        )
+
+    @log_capture()
+    def test_unauthorized_user(self, l):
+        job = map_tool_to_destination( runJob, theApp, usersTool, "userblah at email.com", True, users_test_path )
+        self.assertEquals( job, 'lame_cluster' )
+
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Running 'test_users' with 'lame_cluster'.")
+        )
+
+
+# ================================Invalid yaml files==============================
+    @log_capture()
+    def test_no_file(self, l):
+        self.assertRaises(IOError, dt.parse_yaml, path="")
+        l.check()
+
+    @log_capture()
+    def test_bad_nice(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest11, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG',
+             "Running config validation..."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG',
+             "nice_value goes from -20 to 20; rule 1 in 'spades' has a nice_value of '-21'. Setting nice_value to 0."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_empty_file(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest2, test=True), {})
+
+    @log_capture()
+    def test_no_tool_name(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest3, test=True), yt.iv3dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Malformed YML; expected job name, but found a list instead!'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_no_rule_type(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest4, test=True), yt.ivDict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No rule_type found for rule 1 in 'spades'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_no_rule_lower_bound(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest51, test=True), yt.ivDict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Missing bounds for rule 1 in 'spades'. Ignoring rule."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_no_rule_upper_bound(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest52, test=True), yt.ivDict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Missing bounds for rule 1 in 'spades'. Ignoring rule."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_no_rule_arg(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest53, test=True), yt.ivDict53)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Found a fail_message for rule 1 in 'spades', but destination is not 'fail'! Setting destination to 'fail'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_bad_rule_type(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest6, test=True), yt.ivDict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Unrecognized rule_type 'iencs' found in 'spades'. Ignoring..."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_no_err_msg(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest91, test=True), yt.iv91dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No nice_value found for rule 1 in 'spades'. Setting nice_value to 0."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Missing a fail_message for rule 1 in 'spades'. Adding generic fail_message."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_no_default_dest(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest7, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'No global default destination specified in config!'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_invalid_category(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest8, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'No global default destination specified in config!'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Unrecognized category 'ice_cream' found in config file!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_arguments_no_err_msg(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest12, test=True), yt.iv12dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG',
+            "Missing a fail_message for rule 1 in 'spades'. Adding generic fail_message."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_arguments_no_args(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest131, test=True), yt.iv131dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG',
+            "No arguments found for rule 1 in 'spades' despite being of type arguments. Ignoring rule."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_arguments_no_arg(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest132, test=True), yt.iv132dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Found a fail_message for rule 1 in 'spades', but destination is not 'fail'! Setting destination to 'fail'."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_multiple_jobs(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest133, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Missing a fail_message for rule 1 in 'smalt'.")
+        )
+
+    @log_capture()
+    def test_return_rule_for_multiple_jobs(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest133, test=True), yt.iv133dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Missing a fail_message for rule 1 in 'smalt'. Adding generic fail_message."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_no_destination(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest134, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No destination specified for rule 1 in 'spades'.")
+        )
+
+    @log_capture()
+    def test_return_rule_for_no_destination(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest134, test=True), yt.iv134dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No destination specified for rule 1 in 'spades'. Ignoring..."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_rule_for_reversed_bounds(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest135, test=True), yt.iv135dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "lower_bound exceeds upper_bound for rule 1 in 'spades'. Reversing bounds."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_missing_tool_fields(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest136, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Tool 'spades' does not have rules nor a default_destination!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_missing_tool_fields(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest136, test=True), yt.iv136dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Tool 'spades' does not have rules nor a default_destination!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_blank_tool(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest137, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Config section for tool 'spades' is blank!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_blank_tool(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest137, test=True), yt.iv137dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Config section for tool 'spades' is blank!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_malformed_users(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest138, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Entry '123' in users for rule 1 in tool 'spades' is in an invalid format!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Supplied email 'invaliduser.email at com' for rule 1 in tool 'spades' is in an invalid format!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_malformed_users(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest138, test=True), yt.iv138dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Entry '123' in users for rule 1 in tool 'spades' is in an invalid format! Ignoring entry."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Supplied email 'invaliduser.email at com' for rule 1 in tool 'spades' is in an invalid format! Ignoring email."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_no_users(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest139, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Couldn't find a list under 'users:'!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_no_users(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest139, test=True), yt.iv139dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Couldn't find a list under 'users:'! Ignoring rule."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_malformed_user_email(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest140, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Supplied email 'invalid.user2 at com' for rule 2 in tool 'spades' is in an invalid format!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Supplied email 'invalid.user1 at com' for rule 2 in tool 'spades' is in an invalid format!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No valid user emails were specified for rule 2 in tool 'spades'!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_malformed_user_email(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest140, test=True), yt.iv140dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Supplied email 'invalid.user2 at com' for rule 2 in tool 'spades' is in an invalid format! Ignoring email."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Supplied email 'invalid.user1 at com' for rule 2 in tool 'spades' is in an invalid format! Ignoring email."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No valid user emails were specified for rule 2 in tool 'spades'! Ignoring rule."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_empty_users(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest141, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Entry 'None' in users for rule 2 in tool 'spades' is in an invalid format!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Entry 'None' in users for rule 2 in tool 'spades' is in an invalid format!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No valid user emails were specified for rule 2 in tool 'spades'!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_empty_users(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest141, test=True), yt.iv141dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Entry 'None' in users for rule 2 in tool 'spades' is in an invalid format! Ignoring entry."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Entry 'None' in users for rule 2 in tool 'spades' is in an invalid format! Ignoring entry."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No valid user emails were specified for rule 2 in tool 'spades'! Ignoring rule."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_bad_num_input_datasets_bounds(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest142, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Error: lower_bound is set to Infinity, but must be lower than upper_bound!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "lower_bound exceeds upper_bound for rule 1 in 'smalt'.")
+        )
+
+    @log_capture()
+    def test_return_rule_for_bad_num_input_datasets_bound(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest142, test=True), yt.iv142dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Error: lower_bound is set to Infinity, but must be lower than upper_bound! Setting lower_bound to 0!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_return_bool_for_worse_num_input_datasets_bounds(self, l):
+        self.assertFalse(dt.parse_yaml(path=yt.ivYMLTest143, test=True, return_bool=True))
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Error: lower_bound is set to Infinity, but must be lower than upper_bound!")
+        )
+
+    @log_capture()
+    def test_return_rule_for_worse_num_input_datasets_bound(self, l):
+        self.assertEquals(dt.parse_yaml(path=yt.ivYMLTest143, test=True), yt.iv143dict)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Error: lower_bound is set to Infinity, but must be lower than upper_bound! Setting lower_bound to 0!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_priority_default_destination_without_med_priority_destination(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest144, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No default 'med' priority destination!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_priority_default_destination_with_invalid_priority_destination(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest145, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Invalid default priority destination 'mine' found in config!"),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_tool_without_med_priority_destination(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest146, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "No 'med' priority destination for rule 1 in 'smalt'. Ignoring..."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_tool_with_invalid_priority_destination(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest147, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "Invalid priority destination 'mine' for rule 1 in 'smalt'. Ignoring..."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+    @log_capture()
+    def test_users_with_invalid_priority(self, l):
+        dt.parse_yaml(path=yt.ivYMLTest148, test=True)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', "User 'user at email.com', priority is not valid! Must be either low, med, or high."),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.')
+        )
+
+# ================================Valid yaml files==============================
+    @log_capture()
+    def test_parse_valid_yml(self, l):
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest1, test=True), yt.vdictTest1_yml)
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest2, test=True), yt.vdictTest2_yml)
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest3, test=True), yt.vdictTest3_yml)
+        self.assertTrue(dt.parse_yaml(yt.vYMLTest4, test=True, return_bool=True))
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest4, test=True), yt.vdictTest4_yml)
+        self.assertTrue(dt.parse_yaml(yt.vYMLTest5, test=True, return_bool=True))
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest5, test=True), yt.vdictTest5_yml)
+        self.assertTrue(dt.parse_yaml(yt.vYMLTest6, test=True, return_bool=True))
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest6, test=True), yt.vdictTest6_yml)
+        self.assertTrue(dt.parse_yaml(yt.vYMLTest7, test=True, return_bool=True))
+        self.assertEqual(dt.parse_yaml(yt.vYMLTest7, test=True), yt.vdictTest7_yml)
+        l.check(
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Running config validation...'),
+            ('galaxy.jobs.dynamic_tool_destination', 'DEBUG', 'Finished config validation.'),
+        )
+
+# ================================Testing str_to_bytes==========================
+    def test_str_to_bytes_invalid(self):
+        self.assertRaises(dt.MalformedYMLException, dt.str_to_bytes, "1d")
+        self.assertRaises(dt.MalformedYMLException, dt.str_to_bytes, "1 d")
+
+    def test_str_to_bytes_valid(self):
+        self.assertEqual(dt.str_to_bytes("-1"), -1)
+        self.assertEqual(dt.str_to_bytes( "1" ), value)
+        self.assertEqual(dt.str_to_bytes( 156 ), 156)
+        self.assertEqual(dt.str_to_bytes( "1 B" ), value)
+        self.assertEqual(dt.str_to_bytes( "1 KB" ), valueK)
+        self.assertEqual(dt.str_to_bytes( "1 MB" ), valueM)
+        self.assertEqual(dt.str_to_bytes( "1 gB" ), valueG)
+        self.assertEqual(dt.str_to_bytes( "1 Tb" ), valueT)
+        self.assertEqual(dt.str_to_bytes( "1 pb" ), valueP)
+        self.assertEqual(dt.str_to_bytes( "1 EB" ), valueE)
+        self.assertEqual(dt.str_to_bytes( "1 ZB" ), valueZ)
+        self.assertEqual(dt.str_to_bytes( "1 YB" ), valueY)
+
+# ==============================Testing bytes_to_str=============================
+    @log_capture()
+    def test_bytes_to_str_invalid(self, l):
+        testValue = ""
+        self.assertRaises( ValueError, dt.bytes_to_str, testValue )
+        testValue = "5564fads"
+        self.assertRaises( ValueError, dt.bytes_to_str, testValue )
+        testValue = "45.0.1"
+        self.assertRaises( ValueError, dt.bytes_to_str, testValue )
+        self.assertRaises( ValueError, dt.bytes_to_str, "1 024" )
+
+    def test_bytes_to_str_valid(self):
+        self.assertEqual(dt.bytes_to_str(-1), "Infinity")
+        self.assertEqual(dt.bytes_to_str( value), "1.00 B")
+        self.assertEqual(dt.bytes_to_str( valueK), "1.00 KB")
+        self.assertEqual(dt.bytes_to_str( valueM), "1.00 MB")
+        self.assertEqual(dt.bytes_to_str( valueG), "1.00 GB")
+        self.assertEqual(dt.bytes_to_str( valueT ), "1.00 TB")
+        self.assertEqual(dt.bytes_to_str( valueP ), "1.00 PB")
+        self.assertEqual(dt.bytes_to_str( valueE ), "1.00 EB")
+        self.assertEqual(dt.bytes_to_str( valueZ ), "1.00 ZB")
+        self.assertEqual(dt.bytes_to_str( valueY ), "1.00 YB")
+
+        self.assertEqual(dt.bytes_to_str( 10, "B" ), "10.00 B")
+        self.assertEqual(dt.bytes_to_str( 1000000, "KB" ), "976.56 KB")
+        self.assertEqual(dt.bytes_to_str( 1000000000, "MB" ), "953.67 MB")
+        self.assertEqual(dt.bytes_to_str( 1000000000000, "GB" ), "931.32 GB")
+        self.assertEqual(dt.bytes_to_str( 1000000000000000, "TB" ), "909.49 TB")
+        self.assertEqual(dt.bytes_to_str( 1000000000000000000, "PB" ), "888.18 PB")
+        self.assertEqual(dt.bytes_to_str( 1000000000000000000000, "EB" ), "867.36 EB")
+        self.assertEqual(dt.bytes_to_str( 1000000000000000000000000, "ZB" ), "847.03 ZB")
+
+        self.assertEqual(dt.bytes_to_str( value, "KB" ), "1.00 B")
+        self.assertEqual(dt.bytes_to_str( valueK, "MB" ), "1.00 KB")
+        self.assertEqual(dt.bytes_to_str( valueM, "GB" ), "1.00 MB")
+        self.assertEqual(dt.bytes_to_str( valueG, "TB" ), "1.00 GB")
+        self.assertEqual(dt.bytes_to_str( valueT, "PB" ), "1.00 TB")
+        self.assertEqual(dt.bytes_to_str( valueP, "EB" ), "1.00 PB")
+        self.assertEqual(dt.bytes_to_str( valueE, "ZB" ), "1.00 EB")
+        self.assertEqual(dt.bytes_to_str( valueZ, "YB" ), "1.00 ZB")
+
+        self.assertEqual(dt.bytes_to_str( "1" ), "1.00 B")
+        self.assertEqual(dt.bytes_to_str( "\t\t1000000" ), "976.56 KB")
+        self.assertEqual(dt.bytes_to_str( "1000000000\n" ), "953.67 MB")
+        self.assertEqual(dt.bytes_to_str( 1024, "fda" ), "1.00 KB")
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/jobs/dynamic_tool_destination/ymltests.py b/test/unit/jobs/dynamic_tool_destination/ymltests.py
new file mode 100644
index 0000000..b5b325c
--- /dev/null
+++ b/test/unit/jobs/dynamic_tool_destination/ymltests.py
@@ -0,0 +1,947 @@
+# =============================================Valid XML===================================================
+# One job, one rule
+vYMLTest1 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 100000000
+            destination: things
+    default_destination: waffles_default
+    verbose: True
+"""
+
+vdictTest1_yml = {
+    "tools": {
+        "spades": {
+            "rules": [
+                {
+                    "rule_type": "file_size",
+                    "nice_value": 0,
+                    "lower_bound": 0,
+                    "upper_bound": 100000000,
+                    "destination": "things"
+                },
+            ]
+        }
+    },
+    'default_destination': "waffles_default"
+}
+
+# Multiple jobs, multiple rules
+vYMLTest2 = '''
+    tools:
+      spades:
+        default_destination: waffles_default
+      smalt:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 100000000
+            fail_message: Too few reads for smalt to work
+            destination: fail
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 100000000
+            upper_bound: Infinity
+            fail_message: Too few reads for smalt to work
+            destination: fail
+    default_destination: waffles_low
+    verbose: True
+'''
+
+vdictTest2_yml = {
+    "tools": {
+        "spades": {
+            "default_destination": "waffles_default"
+        },
+        "smalt": {
+            "rules": [
+                {
+                    "rule_type": "file_size",
+                    'nice_value': 0,
+                    "lower_bound": 0,
+                    "upper_bound": 100000000,
+                    "fail_message": "Too few reads for smalt to work",
+                    "destination": "fail"
+                }, {
+                    "rule_type": "file_size",
+                    'nice_value': 0,
+                    "lower_bound": 100000000,
+                    "upper_bound": "Infinity",
+                    "fail_message": "Too few reads for smalt to work",
+                    "destination": "fail"
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_low"
+}
+
+# Rule with extra attribute
+vYMLTest3 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            hax: 1337
+            lower_bound: 0
+            upper_bound: 100000000
+            fail_message: Whats hax
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+vdictTest3_yml = {
+    "tools": {
+        "spades": {
+            "rules": [
+                {
+                    "rule_type": "file_size",
+                    'nice_value': 0,
+                    "hax": 1337,
+                    "lower_bound": 0,
+                    "upper_bound": 100000000,
+                    "fail_message": "Whats hax",
+                    "destination": "fail"
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_default"
+}
+
+# Arguments type
+vYMLTest4 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: arguments
+            nice_value: 0
+            arguments:
+              careful: true
+            fail_message: Failure
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+"""
+
+vdictTest4_yml = {
+    "tools": {
+        "spades": {
+            "rules": [
+                {
+                    "rule_type": "arguments",
+                    'nice_value': 0,
+                    "arguments": {
+                        "careful": True,
+                    },
+                    "fail_message": "Failure",
+                    "destination": "fail"
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_default"
+}
+
+# Records type
+vYMLTest5 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: records
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 100000000
+            destination: waffles_low_4
+    default_destination: waffles_default
+    verbose: True
+'''
+
+vdictTest5_yml = {
+    "tools": {
+        "spades": {
+            "rules": [
+                {
+                    "rule_type": "records",
+                    'nice_value': 0,
+                    "lower_bound": 0,
+                    "upper_bound": 100000000,
+                    "destination": "waffles_low_4"
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_default"
+}
+
+# Num_input_datasets type
+vYMLTest6 = '''
+    tools:
+      spades:
+        default_destination: waffles_default
+      smalt:
+        rules:
+          - rule_type: num_input_datasets
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 200
+            destination: cluster_low_4
+          - rule_type: num_input_datasets
+            nice_value: 0
+            lower_bound: 200
+            upper_bound: Infinity
+            destination: cluster_high_32
+    default_destination: waffles_low
+    verbose: True
+'''
+
+vdictTest6_yml = {
+    "tools": {
+        "spades": {
+            "default_destination": "waffles_default"
+        },
+        "smalt": {
+            "rules": [
+                {
+                    "rule_type": "num_input_datasets",
+                    'nice_value': 0,
+                    "lower_bound": 0,
+                    "upper_bound": 200,
+                    "destination": "cluster_low_4"
+                }, {
+                    "rule_type": "num_input_datasets",
+                    'nice_value': 0,
+                    "lower_bound": 200,
+                    "upper_bound": "Infinity",
+                    "destination": "cluster_high_32"
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_low"
+}
+
+# One job, one rule, and priority destinations
+vYMLTest7 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 100000000
+            destination:
+              priority:
+                med: things
+    default_destination:
+      priority:
+        med: waffles_default
+    users:
+      user at example.com:
+        priority: med
+    verbose: True
+"""
+
+vdictTest7_yml = {
+    "tools": {
+        "spades": {
+            "rules": [
+                {
+                    "rule_type": "file_size",
+                    "nice_value": 0,
+                    "lower_bound": 0,
+                    "upper_bound": 100000000,
+                    "destination": {
+                        'priority': {
+                            'med': 'things'
+                        }
+                    }
+                },
+            ]
+        }
+    },
+    'default_destination': {
+        'priority': {
+            'med': 'waffles_default'
+        }
+    },
+    'users': {
+        'user at example.com': {
+            'priority': 'med'
+        }
+    }
+}
+# =====================================================Invalid XML tests==========================================================
+
+# Empty file
+ivYMLTest2 = ""
+
+# Job without name
+ivYMLTest3 = '''
+    tools:
+      rules:
+        - rule_type: file_size
+          nice_value: 0
+          upper_bound: 100
+          lower_bound: 0
+          destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+iv3dict = {
+    'default_destination': "waffles_default"
+}
+
+# Rule missing type
+ivYMLTest4 = '''
+    tools:
+      spades:
+        rules:
+          - nice_value: 0
+            lower_bound: 0
+            upper_bound: 0
+            fail_message: No type...
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+# Rule missing attribute
+ivYMLTest51 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            upper_bound: 0
+            fail_message: No type...
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+# Rule missing attribute
+ivYMLTest52 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 0
+            fail_message: No type...
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+# Rule missing attribute
+ivYMLTest53 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 0
+            fail_message: No type...
+    default_destination: waffles_default
+    verbose: True
+'''
+
+ivDict53 = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'upper_bound': 0,
+                    'rule_type':
+                    'file_size',
+                    'fail_message':
+                    'No type...',
+                    'nice_value': 0,
+                    'lower_bound': 0,
+                    'destination': 'fail'
+                }
+            ]
+        }
+    }
+}
+
+# Rule unknown type
+ivYMLTest6 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: iencs
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 0
+            fail_message: No type...
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+# No default destination
+ivYMLTest7 = '''
+    default_destination:
+    verbose: True
+'''
+
+ivDict = {
+    'default_destination': "waffles_default"
+}
+
+# Invalid category
+ivYMLTest8 = '''
+    ice_cream:
+    verbose: True
+'''
+
+# Tool rule fail no fail_message and apparently no nice_value
+ivYMLTest91 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            lower_bound: 0
+            upper_bound: 0
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+'''
+
+iv91dict = {
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'lower_bound': 0,
+                    'nice_value': 0,
+                    'rule_type': 'file_size',
+                    'upper_bound': 0,
+                    'destination': 'fail',
+                    'fail_message': "Invalid parameters for rule 1 in 'spades'."
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_default"
+}
+
+# Tool default fail no destination
+ivYMLTest11 = '''
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            nice_value: -21
+            lower_bound: 1 KB
+            upper_bound: Infinity
+            destination: waffles_low
+        default_destination: waffles_low
+    default_destination: waffles_default
+    verbose: True
+'''
+
+# Arguments fail no fail_message
+ivYMLTest12 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: arguments
+            nice_value: 0
+            arguments:
+              careful: true
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv12dict = {
+    "tools": {
+        "spades": {
+            "rules": [
+                {
+                    "rule_type": "arguments",
+                    'nice_value': 0,
+                    "arguments": {
+                        "careful": True,
+                    },
+                    "destination": "fail",
+                    "fail_message": "Invalid parameters for rule 1 in 'spades'."
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_default"
+}
+
+# Arguments fail no arguments
+ivYMLTest131 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: arguments
+            nice_value: 0
+            fail_message: Something went wrong
+            destination: fail
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv131dict = {
+    'default_destination': "waffles_default"
+}
+
+# Arguments fail no destination
+ivYMLTest132 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: arguments
+            nice_value: 0
+            fail_message: Something went wrong
+            arguments:
+              careful: true
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv132dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'arguments': {
+                        'careful': True
+                    },
+                    'rule_type': 'arguments',
+                    'destination': 'fail',
+                    'fail_message': 'Something went wrong',
+                    'nice_value': 0
+                }
+            ]
+        }
+    }
+}
+
+# Multiple rules in 1 job, first one failing
+ivYMLTest133 = '''
+    tools:
+      smalt:
+        rules:
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: 100000000
+            destination: fail
+          - rule_type: file_size
+            nice_value: 0
+            lower_bound: 100000000
+            upper_bound: Infinity
+            destination: waffles_low_4
+    default_destination: waffles_low
+    verbose: True
+'''
+
+iv133dict = {
+    "tools": {
+        "smalt": {
+            "rules": [
+                {
+                    "rule_type": "file_size",
+                    'nice_value': 0,
+                    "lower_bound": 0,
+                    "upper_bound": 100000000,
+                    "fail_message": "Invalid parameters for rule 1 in 'smalt'.",
+                    "destination": "fail"
+                }, {
+                    "rule_type": "file_size",
+                    'nice_value': 0,
+                    "lower_bound": 100000000,
+                    "upper_bound": "Infinity",
+                    "destination": "waffles_low_4"
+                }
+            ]
+        }
+    },
+    'default_destination': "waffles_low"
+}
+
+# No destination and no fail_message
+ivYMLTest134 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            upper_bound: 10000
+            lower_bound: 0
+            nice_value: 0
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv134dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'rule_type': 'file_size',
+                    'upper_bound': 10000,
+                    'lower_bound': 0,
+                    'nice_value': 0
+                }
+            ]
+        }
+    }
+}
+
+# Reversed upper and lower bounds
+ivYMLTest135 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            upper_bound: 100
+            lower_bound: 200
+            nice_value: 0
+            destination: waffles_low_4
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv135dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'rule_type': 'file_size',
+                    'upper_bound': 200,
+                    'lower_bound': 100,
+                    'nice_value': 0,
+                    'destination': 'waffles_low_4'
+                }
+            ]
+        }
+    }
+}
+
+# Tool has rules category but no rules, and no tool-specific default destination
+ivYMLTest136 = """
+    tools:
+      spades:
+        rules:
+
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv136dict = {
+    'default_destination': 'waffles_default'
+}
+
+# Tool is blank; no tool-specific default destination, no rules category
+ivYMLTest137 = """
+    tools:
+      spades:
+
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv137dict = {
+    'default_destination': 'waffles_default'
+}
+
+# Tool specifies authorized users with an invalid entry
+ivYMLTest138 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            upper_bound: 200
+            lower_bound: 100
+            nice_value: 0
+            destination: waffles_low_4
+            users:
+              - validuser at email.com
+              - invaliduser.email at com
+              - 123
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv138dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'rule_type': 'file_size',
+                    'upper_bound': 200,
+                    'lower_bound': 100,
+                    'nice_value': 0,
+                    'destination': 'waffles_low_4',
+                    'users': [
+                        'validuser at email.com'
+                    ]
+                }
+            ]
+        }
+    }
+}
+
+# Tool does not specify list under users
+ivYMLTest139 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            upper_bound: 600
+            lower_bound: 200
+            nice_value: 0
+            destination: waffles_high
+          - rule_type: file_size
+            upper_bound: 199
+            lower_bound: 100
+            nice_value: 0
+            destination: waffles_low_4
+            users:
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv139dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'rule_type': 'file_size',
+                    'upper_bound': 600,
+                    'lower_bound': 200,
+                    'nice_value': 0,
+                    'destination': 'waffles_high'
+                }
+            ]
+        }
+    }
+}
+
+# Tool supplies only invalid users
+ivYMLTest140 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            upper_bound: 600
+            lower_bound: 200
+            nice_value: 0
+            destination: waffles_high
+          - rule_type: file_size
+            upper_bound: 199
+            lower_bound: 100
+            nice_value: 0
+            destination: waffles_low_4
+            users:
+                - invalid.user1 at com
+                - invalid.user2 at com
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv140dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'rule_type': 'file_size',
+                    'upper_bound': 600,
+                    'lower_bound': 200,
+                    'nice_value': 0,
+                    'destination': 'waffles_high'
+                }
+            ]
+        }
+    }
+}
+
+# Tool supplies users list, but empty
+ivYMLTest141 = """
+    tools:
+      spades:
+        rules:
+          - rule_type: file_size
+            upper_bound: 600
+            lower_bound: 200
+            nice_value: 0
+            destination: waffles_high
+          - rule_type: file_size
+            upper_bound: 199
+            lower_bound: 100
+            nice_value: 0
+            destination: waffles_low_4
+            users:
+                -
+                -
+    default_destination: waffles_default
+    verbose: True
+"""
+
+iv141dict = {
+    'default_destination': 'waffles_default',
+    'tools': {
+        'spades': {
+            'rules': [
+                {
+                    'rule_type': 'file_size',
+                    'upper_bound': 600,
+                    'lower_bound': 200,
+                    'nice_value': 0,
+                    'destination': 'waffles_high'
+                }
+            ]
+        }
+    }
+}
+
+# Bad bounds setup for num_input_datasets
+ivYMLTest142 = '''
+    tools:
+      smalt:
+        rules:
+          - rule_type: num_input_datasets
+            nice_value: 0
+            lower_bound: Infinity
+            upper_bound: 200
+            destination: cluster_low_4
+    default_destination: waffles_low
+    verbose: True
+'''
+
+iv142dict = {
+    'default_destination': 'waffles_low',
+    'tools': {
+        'smalt': {
+            'rules': [
+                {
+                    'rule_type': 'num_input_datasets',
+                    'upper_bound': 200,
+                    'lower_bound': 0,
+                    'nice_value': 0,
+                    'destination': 'cluster_low_4'
+                }
+            ]
+        }
+    }
+}
+
+# Even worse bounds setup for num_input_datasets
+ivYMLTest143 = '''
+    tools:
+      smalt:
+        rules:
+          - rule_type: num_input_datasets
+            nice_value: 0
+            lower_bound: Infinity
+            upper_bound: Infinity
+            destination: cluster_low_4
+    default_destination: waffles_low
+    verbose: True
+'''
+
+iv143dict = {
+    'default_destination': 'waffles_low',
+    'tools': {
+        'smalt': {
+            'rules': [
+                {
+                    'rule_type': 'num_input_datasets',
+                    'upper_bound': 'Infinity',
+                    'lower_bound': 0,
+                    'nice_value': 0,
+                    'destination': 'cluster_low_4'
+                }
+            ]
+        }
+    }
+}
+
+# No med priority destination in default destination
+ivYMLTest144 = '''
+    default_destination:
+      priority:
+        low: waffles_low
+    verbose: True
+'''
+
+# invalid priority destination in default destination
+ivYMLTest145 = '''
+    default_destination:
+      priority:
+        med: waffles_low
+        mine: waffles_low
+    verbose: True
+'''
+
+# No med priority destination in tool config
+ivYMLTest146 = '''
+    tools:
+      smalt:
+        rules:
+          - rule_type: num_input_datasets
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: Infinity
+            destination:
+              priority:
+                low: cluster_low_4
+    default_destination:
+      priority:
+        med: waffles_low
+    verbose: True
+'''
+
+# Invalid priority destination in tool config
+ivYMLTest147 = '''
+    tools:
+      smalt:
+        rules:
+          - rule_type: num_input_datasets
+            nice_value: 0
+            lower_bound: 0
+            upper_bound: Infinity
+            destination:
+              priority:
+                med: cluster_med_4
+                mine: cluster_low_4
+    default_destination:
+      priority:
+        med: waffles_low
+    verbose: True
+'''
+
+# invalid priority in users section
+ivYMLTest148 = '''
+    default_destination:
+      priority:
+        med: waffles_low
+    users:
+      user at email.com:
+        priority: mine
+    verbose: True
+'''
diff --git a/test/unit/jobs/test_command_factory.py b/test/unit/jobs/test_command_factory.py
new file mode 100644
index 0000000..34108f0
--- /dev/null
+++ b/test/unit/jobs/test_command_factory.py
@@ -0,0 +1,190 @@
+import os
+import shutil
+from os import getcwd
+from tempfile import mkdtemp
+from unittest import TestCase
+
+from galaxy.jobs.command_factory import build_command
+from galaxy.util.bunch import Bunch
+
+MOCK_COMMAND_LINE = "/opt/galaxy/tools/bowtie /mnt/galaxyData/files/000/input000.dat"
+TEST_METADATA_LINE = "set_metadata_and_stuff.sh"
+TEST_FILES_PATH = "file_path"
+
+
+class TestCommandFactory(TestCase):
+
+    def setUp(self):
+        self.job_dir = mkdtemp()
+        self.job_wrapper = MockJobWrapper(self.job_dir)
+        self.workdir_outputs = []
+
+        def workdir_outputs(job_wrapper, **kwds):
+            assert job_wrapper == self.job_wrapper
+            return self.workdir_outputs
+
+        self.runner = Bunch(app=Bunch(model=Bunch(Dataset=Bunch(file_path=TEST_FILES_PATH))), get_work_dir_outputs=workdir_outputs)
+        self.include_metadata = False
+        self.include_work_dir_outputs = True
+
+    def tearDown(self):
+        shutil.rmtree(self.job_dir)
+
+    def test_simplest_command(self):
+        self.include_work_dir_outputs = False
+        self.__assert_command_is( _surrond_command(MOCK_COMMAND_LINE + "; return_code=$?" ))
+
+    def test_shell_commands(self):
+        self.include_work_dir_outputs = False
+        dep_commands = [". /opt/galaxy/tools/bowtie/default/env.sh"]
+        self.job_wrapper.dependency_shell_commands = dep_commands
+        self.__assert_command_is( _surrond_command("%s; %s; return_code=$?" % (dep_commands[0], MOCK_COMMAND_LINE) ))
+
+    def test_shell_commands_external(self):
+        self.job_wrapper.commands_in_new_shell = True
+        self.include_work_dir_outputs = False
+        dep_commands = [". /opt/galaxy/tools/bowtie/default/env.sh"]
+        self.job_wrapper.dependency_shell_commands = dep_commands
+        self.__assert_command_is( _surrond_command( "%s/tool_script.sh; return_code=$?" % self.job_wrapper.working_directory)  )
+        self.__assert_tool_script_is( "#!/bin/sh\n%s; %s" % (dep_commands[0], MOCK_COMMAND_LINE) )
+
+    def test_remote_dependency_resolution(self):
+        self.include_work_dir_outputs = False
+        dep_commands = [". /opt/galaxy/tools/bowtie/default/env.sh"]
+        self.job_wrapper.dependency_shell_commands = dep_commands
+        self.__assert_command_is(_surrond_command(MOCK_COMMAND_LINE + "; return_code=$?"), remote_command_params=dict(dependency_resolution="remote"))
+
+    def test_explicit_local_dependency_resolution(self):
+        self.include_work_dir_outputs = False
+        dep_commands = [". /opt/galaxy/tools/bowtie/default/env.sh"]
+        self.job_wrapper.dependency_shell_commands = dep_commands
+        self.__assert_command_is( _surrond_command("%s; %s; return_code=$?" % (dep_commands[0], MOCK_COMMAND_LINE)),
+                                 remote_command_params=dict(dependency_resolution="local"))
+
+    def test_task_prepare_inputs(self):
+        self.include_work_dir_outputs = False
+        self.job_wrapper.prepare_input_files_cmds = ["/opt/split1", "/opt/split2"]
+        self.__assert_command_is( _surrond_command("/opt/split1; /opt/split2; %s; return_code=$?") % MOCK_COMMAND_LINE )
+
+    def test_workdir_outputs(self):
+        self.include_work_dir_outputs = True
+        self.workdir_outputs = [("foo", "bar")]
+        self.__assert_command_is( _surrond_command('%s; return_code=$?; if [ -f foo ] ; then cp foo bar ; fi' % MOCK_COMMAND_LINE ))
+
+    def test_set_metadata_skipped_if_unneeded(self):
+        self.include_metadata = True
+        self.include_work_dir_outputs = False
+        self.__assert_command_is( _surrond_command( MOCK_COMMAND_LINE + "; return_code=$?" ) )
+
+    def test_set_metadata(self):
+        self._test_set_metadata()
+
+    def test_strips_trailing_semicolons(self):
+        self.job_wrapper.command_line = "%s;" % MOCK_COMMAND_LINE
+        self._test_set_metadata()
+
+    def _test_set_metadata(self):
+        self.include_metadata = True
+        self.include_work_dir_outputs = False
+        self.job_wrapper.metadata_line = TEST_METADATA_LINE
+        expected_command = _surrond_command("%s; return_code=$?; cd '%s'; %s" % (MOCK_COMMAND_LINE, self.job_dir, TEST_METADATA_LINE))
+        self.__assert_command_is( expected_command )
+
+    def test_empty_metadata(self):
+        """
+        Test empty metadata as produced by TaskWrapper.
+        """
+        self.include_metadata = True
+        self.include_work_dir_outputs = False
+        self.job_wrapper.metadata_line = ' '
+        # Empty metadata command do not touch command line.
+        expected_command = _surrond_command("%s; return_code=$?; cd '%s'" % (MOCK_COMMAND_LINE, self.job_dir))
+        self.__assert_command_is( expected_command )
+
+    def test_metadata_kwd_defaults(self):
+        configured_kwds = self.__set_metadata_with_kwds()
+        assert configured_kwds['exec_dir'] == getcwd()
+        assert configured_kwds['tmp_dir'] == self.job_wrapper.working_directory
+        assert configured_kwds['dataset_files_path'] == TEST_FILES_PATH
+        assert configured_kwds['output_fnames'] == ['output1']
+
+    def test_metadata_kwds_overrride(self):
+        configured_kwds = self.__set_metadata_with_kwds(
+            exec_dir="/path/to/remote/galaxy",
+            tmp_dir="/path/to/remote/staging/directory/job1",
+            dataset_files_path="/path/to/remote/datasets/",
+            output_fnames=['/path/to/remote_output1'],
+        )
+        assert configured_kwds['exec_dir'] == "/path/to/remote/galaxy"
+        assert configured_kwds['tmp_dir'] == "/path/to/remote/staging/directory/job1"
+        assert configured_kwds['dataset_files_path'] == "/path/to/remote/datasets/"
+        assert configured_kwds['output_fnames'] == ['/path/to/remote_output1']
+
+    def __set_metadata_with_kwds(self, **kwds):
+        self.include_metadata = True
+        self.include_work_dir_outputs = False
+        self.job_wrapper.metadata_line = TEST_METADATA_LINE
+        if kwds:
+            self.__command(remote_command_params=dict(metadata_kwds=kwds))
+        else:
+            self.__command()
+        return self.job_wrapper.configured_external_metadata_kwds
+
+    def __assert_command_is(self, expected_command, **command_kwds):
+        command = self.__command(**command_kwds)
+        self.assertEqual(command, expected_command)
+
+    def __assert_tool_script_is(self, expected_command):
+        self.assertEqual(open(self.__tool_script, "r").read(), expected_command)
+
+    @property
+    def __tool_script(self):
+        return os.path.join(self.job_dir, "tool_script.sh")
+
+    def __command(self, **extra_kwds):
+        kwds = dict(
+            runner=self.runner,
+            job_wrapper=self.job_wrapper,
+            include_metadata=self.include_metadata,
+            include_work_dir_outputs=self.include_work_dir_outputs,
+            **extra_kwds
+        )
+        return build_command(**kwds)
+
+
+def _surrond_command(command):
+    return '''mkdir -p working; cd working; %s; sh -c "exit $return_code"''' % command
+
+
+class MockJobWrapper(object):
+
+    def __init__(self, job_dir):
+        self.strict_shell = False
+        self.write_version_cmd = None
+        self.command_line = MOCK_COMMAND_LINE
+        self.dependency_shell_commands = []
+        self.metadata_line = None
+        self.configured_external_metadata_kwds = None
+        self.working_directory = job_dir
+        self.prepare_input_files_cmds = None
+        self.commands_in_new_shell = False
+        self.app = Bunch(
+            config=Bunch(
+                check_job_script_integrity=False,
+            )
+        )
+        self.shell = "/bin/sh"
+
+    def get_command_line(self):
+        return self.command_line
+
+    @property
+    def requires_setting_metadata(self):
+        return self.metadata_line is not None
+
+    def setup_external_metadata(self, *args, **kwds):
+        self.configured_external_metadata_kwds = kwds
+        return self.metadata_line
+
+    def get_output_fnames(self):
+        return ["output1"]
diff --git a/test/unit/jobs/test_datasets.py b/test/unit/jobs/test_datasets.py
new file mode 100644
index 0000000..26753e4
--- /dev/null
+++ b/test/unit/jobs/test_datasets.py
@@ -0,0 +1,17 @@
+from galaxy.jobs.datasets import DatasetPath
+
+
+def test_dataset_path():
+    dataset_path_1 = DatasetPath( 1, "/galaxy/database/files/dataset_1.dat" )
+    assert dataset_path_1.dataset_id == 1
+    assert dataset_path_1.real_path == "/galaxy/database/files/dataset_1.dat"
+    assert dataset_path_1.false_path is None
+    assert dataset_path_1.mutable
+    assert str( dataset_path_1 ) == "/galaxy/database/files/dataset_1.dat"
+
+    dataset_path_2 = DatasetPath( 2, "/galaxy/database/files/dataset_2.dat", false_path="/mnt/galaxyData/files/dataset_2.dat", mutable=False )
+    assert dataset_path_2.dataset_id == 2
+    assert dataset_path_2.real_path == "/galaxy/database/files/dataset_2.dat"
+    assert dataset_path_2.false_path == "/mnt/galaxyData/files/dataset_2.dat"
+    assert not dataset_path_2.mutable
+    assert str( dataset_path_2 ) == "/mnt/galaxyData/files/dataset_2.dat"
diff --git a/test/unit/jobs/test_job_configuration.py b/test/unit/jobs/test_job_configuration.py
new file mode 100644
index 0000000..2a178a0
--- /dev/null
+++ b/test/unit/jobs/test_job_configuration.py
@@ -0,0 +1,161 @@
+import datetime
+import os
+import shutil
+import tempfile
+import unittest
+
+from galaxy.jobs import JobConfiguration
+from galaxy.util import bunch
+
+# File would be slightly more readable if contents were embedded directly, but
+# there are advantages to testing the documentation/examples.
+SIMPLE_JOB_CONF = os.path.join( os.path.dirname( __file__ ), "..", "..", "..", "config", "job_conf.xml.sample_basic" )
+ADVANCED_JOB_CONF = os.path.join( os.path.dirname( __file__ ), "..", "..", "..", "config", "job_conf.xml.sample_advanced" )
+
+
+class JobConfXmlParserTestCase( unittest.TestCase ):
+
+    def setUp( self ):
+        self.temp_directory = tempfile.mkdtemp()
+        self.config = bunch.Bunch(
+            job_config_file=os.path.join( self.temp_directory, "job_conf.xml" ),
+            use_tasked_jobs=False,
+            job_resource_params_file="/tmp/fake_absent_path",
+            config_dict={},
+        )
+        self.__write_config_from( SIMPLE_JOB_CONF )
+        self.app = bunch.Bunch( config=self.config, job_metrics=MockJobMetrics() )
+        self.__job_configuration = None
+
+    def tearDown( self ):
+        shutil.rmtree( self.temp_directory )
+
+    def test_load_simple_runner( self ):
+        runner_plugin = self.job_config.runner_plugins[ 0 ]
+        assert runner_plugin[ "id" ] == "local"
+        assert runner_plugin[ "load" ] == "galaxy.jobs.runners.local:LocalJobRunner"
+        assert runner_plugin[ "workers" ] == 4
+
+    def test_tasks_disabled( self ):
+        assert len( [ r for r in self.job_config.runner_plugins if r[ "id" ] == "tasks" ] ) == 0
+
+    def test_configuration_of_tasks( self ):
+        self.config.use_tasked_jobs = True
+        self.config.local_task_queue_workers = 5
+        task_runners = [ r for r in self.job_config.runner_plugins if r[ "id" ] == "tasks" ]
+        assert len( task_runners ) == 1
+        assert task_runners[ 0 ][ "workers" ] == 5
+
+    def test_load_simple_handler( self ):
+        main_handler = self.job_config.handlers[ "main" ]
+        assert main_handler[ 0 ] == "main"
+
+    def test_if_one_handler_implicit_default( self ):
+        assert self.job_config.default_handler_id == "main"
+
+    def test_explicit_handler_default( self ):
+        self.__with_advanced_config()
+        assert self.job_config.default_handler_id == "handlers"
+
+    def test_handler_tag_parsing( self ):
+        self.__with_advanced_config()
+        assert "handler0" in self.job_config.handlers[ "handlers" ]
+        assert "handler1" in self.job_config.handlers[ "handlers" ]
+
+    def test_load_simple_destination( self ):
+        local_dest = self.job_config.destinations[ "local" ][ 0 ]
+        assert local_dest.id == "local"
+        assert local_dest.runner == "local"
+
+    def test_load_destination_params( self ):
+        self.__with_advanced_config()
+        pbs_dest = self.job_config.destinations[ "pbs_longjobs" ][ 0 ]
+        assert pbs_dest.id == "pbs_longjobs"
+        assert pbs_dest.runner == "pbs"
+        dest_params = pbs_dest.params
+        assert dest_params[ "Resource_List" ] == "walltime=72:00:00"
+
+    def test_destination_tags( self ):
+        self.__with_advanced_config()
+        longjob_dests = self.job_config.destinations[ "longjobs" ]
+        assert len( longjob_dests ) == 2
+        assert longjob_dests[ 0 ].id == "pbs_longjobs"
+        assert longjob_dests[ 1 ].id == "remote_cluster"
+
+    def test_load_tool( self ):
+        self.__with_advanced_config()
+        baz_tool = self.job_config.tools[ "baz" ][ 0 ]
+        assert baz_tool.id == "baz"
+        assert baz_tool.handler == "special_handlers"
+        assert baz_tool.destination == "bigmem"
+
+    def test_load_tool_params( self ):
+        self.__with_advanced_config()
+        foo_tool = self.job_config.tools[ "foo" ][ 0 ]
+        assert foo_tool.params[ "source" ] == "trackster"
+
+    def test_default_limits( self ):
+        limits = self.job_config.limits
+        assert limits.registered_user_concurrent_jobs is None
+        assert limits.anonymous_user_concurrent_jobs is None
+        assert limits.walltime is None
+        assert limits.walltime_delta is None
+        assert limits.output_size is None
+        assert limits.destination_user_concurrent_jobs == {}
+        assert limits.destination_total_concurrent_jobs == {}
+
+    def test_limit_overrides( self ):
+        self.__with_advanced_config()
+        limits = self.job_config.limits
+        assert limits.registered_user_concurrent_jobs == 2
+        assert limits.anonymous_user_concurrent_jobs == 1
+        assert limits.destination_user_concurrent_jobs[ "local" ] == 1
+        assert limits.destination_user_concurrent_jobs[ "mycluster" ] == 2
+        assert limits.destination_user_concurrent_jobs[ "longjobs" ] == 1
+        assert limits.walltime_delta == datetime.timedelta( 0, 0, 0, 0, 0, 24 )
+
+    def test_env_parsing( self ):
+        self.__with_advanced_config()
+        env_dest = self.job_config.destinations[ "java_cluster" ][ 0 ]
+        assert len( env_dest.env ) == 4, len( env_dest.env )
+        assert env_dest.env[ 0 ][ "name" ] == "_JAVA_OPTIONS"
+        assert env_dest.env[ 0 ][ "value" ] == '-Xmx6G'
+
+        assert env_dest.env[ 1 ][ "name" ] == "ANOTHER_OPTION"
+        assert env_dest.env[ 1 ][ "raw" ] is True
+
+        assert env_dest.env[ 2 ][ "file" ] == "/mnt/java_cluster/environment_setup.sh"
+
+        assert env_dest.env[ 3 ][ "execute" ] == "module load javastuff/2.10"
+
+    def test_macro_expansion( self ):
+        self.__with_advanced_config()
+        for name in ["foo_small", "foo_medium", "foo_large", "foo_longrunning"]:
+            assert self.job_config.destinations[ name ]
+
+    # TODO: Add job metrics parsing test.
+
+    @property
+    def job_config( self ):
+        if not self.__job_configuration:
+            self.__job_configuration = JobConfiguration( self.app )
+        return self.__job_configuration
+
+    def __with_advanced_config( self ):
+        self.__write_config_from( ADVANCED_JOB_CONF )
+
+    def __write_config_from( self, path ):
+        self.__write_config( open( path, "r" ).read() )
+
+    def __write_config( self, contents ):
+        with open( os.path.join( self.temp_directory, "job_conf.xml" ), "w" ) as f:
+            f.write( contents )
+
+
+class MockJobMetrics( object ):
+
+    def __init__( self ):
+        pass
+
+    def set_destination_conf_element( self, id, element ):
+        pass
diff --git a/test/unit/jobs/test_job_output_checker.py b/test/unit/jobs/test_job_output_checker.py
new file mode 100644
index 0000000..4a4da7f
--- /dev/null
+++ b/test/unit/jobs/test_job_output_checker.py
@@ -0,0 +1,96 @@
+from unittest import TestCase
+
+from galaxy.jobs.error_level import StdioErrorLevel
+from galaxy.jobs.output_checker import check_output
+from galaxy.model import Job
+from galaxy.tools.parser.interface import ToolStdioRegex
+from galaxy.util.bunch import Bunch
+
+
+class OutputCheckerTestCase( TestCase ):
+
+    def setUp( self ):
+        self.tool = Bunch(
+            stdio_regexes=[],
+            stdio_exit_codes=[],
+        )
+        self.job = Job()
+        self.job.id = "test_id"
+        self.stdout = ''
+        self.stderr = ''
+        self.tool_exit_code = None
+
+    def test_default_no_stderr_success( self ):
+        self.__assertSuccessful()
+
+    def test_default_stderr_failure( self ):
+        self.stderr = 'foo'
+        self.__assertNotSuccessful()
+
+    def test_exit_code_error( self ):
+        mock_exit_code = Bunch( range_start=1, range_end=1, error_level=StdioErrorLevel.FATAL, desc=None )
+        self.tool.stdio_exit_codes.append( mock_exit_code )
+        self.tool_exit_code = 1
+        self.__assertNotSuccessful()
+
+    def test_exit_code_success( self ):
+        mock_exit_code = Bunch( range_start=1, range_end=1, error_level=StdioErrorLevel.FATAL, desc=None )
+        self.tool.stdio_exit_codes.append( mock_exit_code )
+        self.tool_exit_code = 0
+        self.__assertSuccessful()
+
+    def test_problematic_strings_matching( self ):
+        problematic_str = '\x80abc'
+        self.__add_regex( Bunch( match=r'.abc', stdout_match=False, stderr_match=True, error_level=StdioErrorLevel.FATAL, desc=None ) )
+        self.stderr = problematic_str
+        self.__assertNotSuccessful()
+
+    def test_problematic_strings_not_matching( self ):
+        problematic_str = '\x80abc'
+        self.__add_regex( Bunch( match=r'.abcd', stdout_match=False, stderr_match=True, error_level=StdioErrorLevel.FATAL, desc=None ) )
+        self.stderr = problematic_str
+        self.__assertSuccessful()
+
+    def test_stderr_regex_negative_match( self ):
+        regex = ToolStdioRegex()
+        regex.stderr_match = True
+        regex.match = "foobar"
+        self.__add_regex( regex )
+        self.stderr = "foo"
+        self.__assertSuccessful()
+
+    def test_stderr_regex_positive_match( self ):
+        regex = ToolStdioRegex()
+        regex.stderr_match = True
+        regex.match = "foo"
+        self.__add_regex( regex )
+        self.stderr = "foobar"
+        self.__assertNotSuccessful()
+
+    def test_stdout_ignored_for_stderr_regexes( self ):
+        regex = ToolStdioRegex()
+        regex.stderr_match = True
+        regex.match = "foo"
+        self.__add_regex( regex )
+        self.stdout = "foobar"
+        self.__assertSuccessful()
+
+    def test_stderr_ignored_for_stdout_regexes( self ):
+        regex = ToolStdioRegex()
+        regex.stdout_match = True
+        regex.match = "foo"
+        self.__add_regex( regex )
+        self.stderr = "foobar"
+        self.__assertSuccessful()
+
+    def __add_regex( self, regex ):
+        self.tool.stdio_regexes.append( regex )
+
+    def __assertSuccessful( self ):
+        self.assertTrue( self.__check_output() )
+
+    def __assertNotSuccessful( self ):
+        self.assertFalse( self.__check_output() )
+
+    def __check_output( self ):
+        return check_output( self.tool, self.stdout, self.stderr, self.tool_exit_code, self.job )
diff --git a/test/unit/jobs/test_job_wrapper.py b/test/unit/jobs/test_job_wrapper.py
new file mode 100644
index 0000000..544546c
--- /dev/null
+++ b/test/unit/jobs/test_job_wrapper.py
@@ -0,0 +1,209 @@
+import os
+from contextlib import contextmanager
+from unittest import TestCase
+
+from galaxy.jobs import (
+    JobWrapper,
+    TaskWrapper
+)
+from galaxy.model import (
+    Job,
+    Task,
+    User
+)
+from galaxy.tools import evaluation
+from galaxy.util.bunch import Bunch
+
+from tools_support import UsesApp
+
+TEST_TOOL_ID = "cufftest"
+TEST_VERSION_COMMAND = "bwa --version"
+TEST_DEPENDENCIES_COMMANDS = ". /galaxy/modules/bwa/0.5.9/env.sh"
+TEST_COMMAND = ""
+
+
+class BaseWrapperTestCase(UsesApp):
+
+    def setUp(self):
+        self.setup_app()
+        job = Job()
+        job.id = 345
+        job.tool_id = TEST_TOOL_ID
+        job.user = User()
+        self.model_objects = {Job: {345: job}}
+        self.app.model.context = MockContext(self.model_objects)
+
+        self.app.toolbox = MockToolbox(MockTool(self))
+        self.working_directory = os.path.join(self.test_directory, "working")
+        self.app.object_store = MockObjectStore(self.working_directory)
+
+        self.queue = MockJobQueue(self.app)
+        self.job = job
+
+    def tearDown(self):
+        self.tear_down_app()
+
+    @contextmanager
+    def _prepared_wrapper(self):
+        wrapper = self._wrapper()
+        with _mock_tool_evaluator(MockEvaluator):
+            wrapper.prepare()
+            yield wrapper
+
+    def test_version_path(self):
+        wrapper = self._wrapper()
+        version_path = wrapper.get_version_string_path()
+        expected_path = os.path.join(self.test_directory, "new_files", "GALAXY_VERSION_STRING_345")
+        self.assertEquals(version_path, expected_path)
+
+    def test_prepare_sets_command_line(self):
+        with self._prepared_wrapper() as wrapper:
+            assert TEST_COMMAND in wrapper.command_line
+
+    def test_prepare_sets_dependency_shell_commands(self):
+        with self._prepared_wrapper() as wrapper:
+            assert TEST_DEPENDENCIES_COMMANDS == wrapper.dependency_shell_commands
+
+
+class JobWrapperTestCase(BaseWrapperTestCase, TestCase):
+
+    def _wrapper(self):
+        return JobWrapper(self.job, self.queue)
+
+    def test_prepare_sets_version_command(self):
+        with self._prepared_wrapper() as wrapper:
+            assert TEST_VERSION_COMMAND in wrapper.write_version_cmd, wrapper.write_version_cmd
+
+
+class TaskWrapperTestCase(BaseWrapperTestCase, TestCase):
+
+    def setUp(self):
+        super(TaskWrapperTestCase, self).setUp()
+        self.task = Task(self.job, self.working_directory, "prepare_bwa_job.sh")
+        self.task.id = 4
+        self.model_objects[Task] = {4: self.task}
+
+    def _wrapper(self):
+        return TaskWrapper(self.task, self.queue)
+
+    def test_prepare_sets_no_version_command(self):
+        with self._prepared_wrapper() as wrapper:
+            assert wrapper.write_version_cmd is None
+
+
+class MockEvaluator(object):
+
+    def __init__(self, app, tool, job, local_working_directory):
+        self.app = app
+        self.tool = tool
+        self.job = job
+        self.local_working_directory = local_working_directory
+        self.param_dict = {}
+
+    def set_compute_environment(self, *args, **kwds):
+        pass
+
+    def build(self):
+        return TEST_COMMAND, [], []
+
+
+class MockJobQueue(object):
+
+    def __init__(self, app):
+        self.app = app
+        self.dispatcher = MockJobDispatcher(app)
+
+
+class MockJobDispatcher(object):
+
+    def __init__(self, app):
+        pass
+
+    def url_to_destination(self):
+        pass
+
+
+class MockContext(object):
+
+    def __init__(self, model_objects):
+        self.expunged_all = False
+        self.flushed = False
+        self.model_objects = model_objects
+        self.created_objects = []
+
+    def expunge_all(self):
+        self.expunged_all = True
+
+    def query(self, clazz):
+        return MockQuery(self.model_objects.get(clazz))
+
+    def flush(self):
+        self.flushed = True
+
+    def add(self, object):
+        self.created_objects.append(object)
+
+
+class MockQuery(object):
+
+    def __init__(self, class_objects):
+        self.class_objects = class_objects
+
+    def filter_by(self, **kwds):
+        return Bunch(first=lambda: None)
+
+    def get(self, id):
+        return self.class_objects.get(id, None)
+
+
+class MockTool(object):
+
+    def __init__(self, app):
+        self.version_string_cmd = TEST_VERSION_COMMAND
+        self.tool_dir = "/path/to/tools"
+        self.dependencies = []
+
+    def build_dependency_shell_commands(self, job_directory):
+        return TEST_DEPENDENCIES_COMMANDS
+
+
+class MockToolbox(object):
+
+    def __init__(self, test_tool):
+        self.test_tool = test_tool
+
+    def get(self, tool_id, default=None):
+        assert tool_id == TEST_TOOL_ID
+        return self.test_tool
+
+    def get_tool( self, tool_id, tool_version, exact=False ):
+        tool = self.get(tool_id)
+        return tool
+
+
+class MockObjectStore(object):
+
+    def __init__(self, working_directory):
+        self.working_directory = working_directory
+        os.makedirs(working_directory)
+
+    def create(self, *args, **kwds):
+        pass
+
+    def get_filename(self, *args, **kwds):
+        if kwds.get("base_dir", "") == "job_work":
+            return self.working_directory
+        return None
+
+
+# Poor man's mocking. Need to get a real mocking library as real Galaxy development
+# dependnecy.
+ at contextmanager
+def _mock_tool_evaluator(mock_constructor):
+    name = evaluation.ToolEvaluator.__name__
+    real_classs = getattr(evaluation, name)
+    try:
+        setattr(evaluation, name, mock_constructor)
+        yield
+    finally:
+        setattr(evaluation, name, real_classs)
diff --git a/test/unit/jobs/test_mapper.py b/test/unit/jobs/test_mapper.py
new file mode 100644
index 0000000..cf68bfc
--- /dev/null
+++ b/test/unit/jobs/test_mapper.py
@@ -0,0 +1,178 @@
+import uuid
+
+from galaxy.jobs import JobDestination
+from galaxy.jobs.mapper import (
+    ERROR_MESSAGE_NO_RULE_FUNCTION,
+    ERROR_MESSAGE_RULE_FUNCTION_NOT_FOUND,
+    JobRunnerMapper,
+)
+from galaxy.util import bunch
+
+from . import test_rules
+
+WORKFLOW_UUID = uuid.uuid1().hex
+TOOL_JOB_DESTINATION = JobDestination()
+DYNAMICALLY_GENERATED_DESTINATION = JobDestination()
+
+
+def test_static_mapping():
+    mapper = __mapper()
+    assert mapper.get_job_destination( {} ) is TOOL_JOB_DESTINATION
+
+
+def test_caching():
+    mapper = __mapper()
+    mapper.get_job_destination( {} )
+    mapper.get_job_destination( {} )
+    assert mapper.job_wrapper.tool.call_count == 1
+
+
+def test_dynamic_mapping():
+    mapper = __mapper( __dynamic_destination( dict( function="upload" ) ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    assert mapper.job_config.rule_response == "local_runner"
+
+
+def test_dynamic_mapping_priorities():
+    mapper = __mapper( __dynamic_destination( dict( function="tophat" ) ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    # Next line verifies we using definition in 20_instance.py instead of
+    # 10_site.py.
+    assert mapper.job_config.rule_response == "instance_dest_id"
+
+
+def test_dynamic_mapping_defaults_to_tool_id_as_rule():
+    mapper = __mapper( __dynamic_destination( ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    assert mapper.job_config.rule_response == "tool1_dest_id"
+
+
+def test_dynamic_mapping_job_conf_params():
+    mapper = __mapper( __dynamic_destination( dict( function="check_job_conf_params", param1="7" ) ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    assert mapper.job_config.rule_response == "sent_7_dest_id"
+
+
+def test_dynamic_mapping_function_parameters():
+    mapper = __mapper( __dynamic_destination( dict( function="check_rule_params" ) ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    assert mapper.job_config.rule_response == "all_passed"
+
+
+def test_dynamic_mapping_resource_parameters():
+    mapper = __mapper( __dynamic_destination( dict( function="check_resource_params" ) ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    assert mapper.job_config.rule_response == "have_resource_params"
+
+
+def test_dynamic_mapping_workflow_invocation_parameter():
+    mapper = __mapper( __dynamic_destination( dict( function="check_workflow_invocation_uuid" ) ) )
+    assert mapper.get_job_destination( {} ) is DYNAMICALLY_GENERATED_DESTINATION
+    assert mapper.job_config.rule_response == WORKFLOW_UUID
+
+
+def test_dynamic_mapping_no_function():
+    dest = __dynamic_destination( dict( ) )
+    mapper = __mapper( dest )
+    mapper.job_wrapper.tool.all_ids = [ "no_such_function" ]
+    error_message = ERROR_MESSAGE_NO_RULE_FUNCTION % dest
+    __assert_mapper_errors_with_message( mapper, error_message )
+
+
+def test_dynamic_mapping_missing_function():
+    dest = __dynamic_destination( dict( function="missing_func" ) )
+    mapper = __mapper( dest )
+    mapper.job_wrapper.tool.all_ids = [ "no_such_function" ]
+    error_message = ERROR_MESSAGE_RULE_FUNCTION_NOT_FOUND % ( "missing_func" )
+    __assert_mapper_errors_with_message( mapper, error_message )
+
+
+def __assert_mapper_errors_with_message( mapper, message ):
+    exception = None
+    try:
+        mapper.get_job_destination( {} )
+    except Exception as e:
+        exception = e
+    assert exception
+    assert str( exception ) == message, "%s != %s" % ( str( exception ), message )
+
+
+def __mapper( tool_job_destination=TOOL_JOB_DESTINATION ):
+    job_wrapper = MockJobWrapper( tool_job_destination )
+    job_config = MockJobConfig()
+
+    mapper = JobRunnerMapper(
+        job_wrapper,
+        {},
+        job_config
+    )
+    mapper.rules_module = test_rules
+    return mapper
+
+
+def __dynamic_destination( params={} ):
+    return JobDestination( runner="dynamic", params=params )
+
+
+class MockJobConfig( object ):
+
+    def __init__( self ):
+        self.rule_response = None
+        self.dynamic_params = None
+
+    def get_destination( self, rep ):
+        # Called to transform dynamic job destination rule response
+        # from destination id/runner url into a dynamic job destination.
+        self.rule_response = rep
+        return DYNAMICALLY_GENERATED_DESTINATION
+
+
+class MockJobWrapper( object ):
+
+    def __init__( self, tool_job_destination ):
+        self.tool = MockTool( tool_job_destination )
+        self.job_id = 12345
+        self.app = object()
+
+    def is_mock_job_wrapper( self ):
+        return True
+
+    def get_job(self):
+        raw_params = {
+            "threshold": 8,
+            "__workflow_invocation_uuid__": WORKFLOW_UUID,
+        }
+
+        def get_param_values( app, ignore_errors ):
+            assert app == self.app
+            params = raw_params.copy()
+            params[ "__job_resource" ] = {
+                "__job_resource__select": "True",
+                "memory": "8gb"
+            }
+            return params
+
+        return bunch.Bunch(
+            user=bunch.Bunch(
+                id=6789,
+                email="test at example.com"
+            ),
+            raw_param_dict=lambda: raw_params,
+            get_param_values=get_param_values
+        )
+
+
+class MockTool( object ):
+
+    def __init__( self, tool_job_destination ):
+        self.id = "testtoolshed/devteam/tool1/23abcd13123"
+        self.call_count = 0
+        self.tool_job_destination = tool_job_destination
+        self.all_ids = [ "testtoolshed/devteam/tool1/23abcd13123", "tool1" ]
+
+    def get_job_destination( self, params ):
+        self.call_count += 1
+        return self.tool_job_destination
+
+    def is_mock_tool( self ):
+        return True
diff --git a/test/unit/jobs/test_rule_helper.py b/test/unit/jobs/test_rule_helper.py
new file mode 100644
index 0000000..61c2ed9
--- /dev/null
+++ b/test/unit/jobs/test_rule_helper.py
@@ -0,0 +1,196 @@
+import uuid
+
+from galaxy import model
+from galaxy.jobs.rule_helper import RuleHelper
+from galaxy.model import mapping
+from galaxy.util import bunch
+
+USER_EMAIL_1 = "u1 at example.com"
+USER_EMAIL_2 = "u2 at example.com"
+USER_EMAIL_3 = "u3 at example.com"
+
+
+def test_job_count():
+    rule_helper = __rule_helper()
+    __assert_job_count_is( 0, rule_helper )
+
+    __setup_fixtures( rule_helper.app )
+
+    # Test raw counts for users...
+    __assert_job_count_is( 7, rule_helper, for_user_email=USER_EMAIL_1 )
+    __assert_job_count_is( 2, rule_helper, for_user_email=USER_EMAIL_2 )
+    __assert_job_count_is( 0, rule_helper, for_user_email=USER_EMAIL_3 )
+
+    # Test desitnation counts
+    __assert_job_count_is( 2, rule_helper, for_destination="local" )
+    __assert_job_count_is( 7, rule_helper, for_destination="cluster1" )
+
+    __assert_job_count_is( 9, rule_helper, for_destinations=["cluster1", "local"] )
+
+    # Test per user destination counts
+    __assert_job_count_is( 5, rule_helper, for_destination="cluster1", for_user_email=USER_EMAIL_1 )
+    __assert_job_count_is( 2, rule_helper, for_destination="local", for_user_email=USER_EMAIL_1 )
+    __assert_job_count_is( 7, rule_helper, for_destinations=["cluster1", "local"], for_user_email=USER_EMAIL_1 )
+
+    __assert_job_count_is( 2, rule_helper, for_destination="cluster1", for_user_email=USER_EMAIL_2 )
+    __assert_job_count_is( 0, rule_helper, for_destination="local", for_user_email=USER_EMAIL_2 )
+
+    # Test per user, per state destination counts
+    __assert_job_count_is( 3, rule_helper, for_destination="cluster1", for_user_email=USER_EMAIL_1, for_job_states=[ "queued" ] )
+    __assert_job_count_is( 2, rule_helper, for_destination="cluster1", for_user_email=USER_EMAIL_1, for_job_states=[ "running" ] )
+    __assert_job_count_is( 0, rule_helper, for_destination="cluster1", for_user_email=USER_EMAIL_1, for_job_states=[ "error" ] )
+    __assert_job_count_is( 5, rule_helper, for_destination="cluster1", for_user_email=USER_EMAIL_1, for_job_states=[ "queued", "running", "error" ] )
+
+
+def __assert_job_count_is( expected_count, rule_helper, **kwds ):
+    acutal_count = rule_helper.job_count( **kwds )
+
+    if expected_count != acutal_count:
+        template = "Expected job count %d, actual job count %s for params %s"
+        raise AssertionError( template % ( expected_count, acutal_count, kwds ) )
+
+
+def __setup_fixtures( app ):
+    # user1 has 3 jobs queued and 2 jobs running on cluster1 and one queued and
+    # on running job on local. user2 has a queued and running job on the cluster.
+    # user3 has no jobs.
+    user1 = model.User( email=USER_EMAIL_1, password="pass1" )
+    user2 = model.User( email=USER_EMAIL_2, password="pass2" )
+    user3 = model.User( email=USER_EMAIL_2, password="pass2" )
+
+    app.add( user1, user2, user3 )
+
+    app.add( __new_job( user=user1, destination_id="cluster1", state="queued" ) )
+    app.add( __new_job( user=user1, destination_id="cluster1", state="queued" ) )
+    app.add( __new_job( user=user1, destination_id="cluster1", state="queued" ) )
+    app.add( __new_job( user=user1, destination_id="cluster1", state="running" ) )
+    app.add( __new_job( user=user1, destination_id="cluster1", state="running" ) )
+
+    app.add( __new_job( user=user1, destination_id="local", state="queued" ) )
+    app.add( __new_job( user=user1, destination_id="local", state="running" ) )
+
+    app.add( __new_job( user=user2, destination_id="cluster1", state="queued" ) )
+    app.add( __new_job( user=user2, destination_id="cluster1", state="running" ) )
+
+
+def test_choose_one_unhashed():
+    rule_helper = __rule_helper()
+
+    # Random choices if hash not set.
+    chosen_ones = set([])
+    __do_a_bunch( lambda: chosen_ones.add(rule_helper.choose_one(['a', 'b'])) )
+
+    assert chosen_ones == set(['a', 'b'])
+
+
+def test_choose_one_hashed():
+    rule_helper = __rule_helper()
+
+    # Hashed, so all choosen ones should be the same...
+    chosen_ones = set([])
+    __do_a_bunch( lambda: chosen_ones.add(rule_helper.choose_one(['a', 'b'], hash_value=1234)) )
+    assert len( chosen_ones ) == 1
+
+    # ... also can verify hashing on strings
+    chosen_ones = set([])
+    __do_a_bunch( lambda: chosen_ones.add(rule_helper.choose_one(['a', 'b'], hash_value="i am a string")) )
+
+    assert len( chosen_ones ) == 1
+
+
+def test_job_hash_unique_by_default( ):
+    rule_helper = __rule_helper()
+    job1, job2 = __two_jobs_in_a_history()
+
+    rule_helper.job_hash( job1 ) != rule_helper.job_hash( job2 )
+
+
+def test_job_hash_history( ):
+    rule_helper = __rule_helper()
+    job1, job2 = __two_jobs_in_a_history()
+
+    __assert_same_hash( rule_helper, job1, job2, hash_by="history" )
+
+
+def test_job_hash_workflow_invocation():
+    rule_helper = __rule_helper()
+    job1, job2 = __two_jobs()
+    wi_uuid = uuid.uuid1().hex
+
+    job1.add_parameter( "__workflow_invocation_uuid__", wi_uuid )
+    job2.add_parameter( "__workflow_invocation_uuid__", wi_uuid )
+
+    __assert_same_hash( rule_helper, job1, job2, hash_by="workflow_invocation" )
+
+
+def test_job_hash_fallback():
+    rule_helper = __rule_helper()
+    job1, job2 = __two_jobs_in_a_history()
+
+    __assert_same_hash( rule_helper, job1, job2, hash_by="workflow_invocation,history" )
+
+
+def test_should_burst( ):
+    rule_helper = __rule_helper()
+    __setup_fixtures( rule_helper.app )
+    # cluster1 fixture has 4 queued jobs, 3 running
+    assert rule_helper.should_burst( [ "cluster1" ], "7" )
+    assert not rule_helper.should_burst( [ "cluster1" ], "10" )
+
+    assert rule_helper.should_burst( [ "cluster1" ], "2", job_states="queued" )
+    assert not rule_helper.should_burst( [ "cluster1" ], "6", job_states="queued" )
+
+
+def __assert_same_hash( rule_helper, job1, job2, hash_by ):
+    job1_hash = rule_helper.job_hash( job1, hash_by=hash_by )
+    job2_hash = rule_helper.job_hash( job2, hash_by=hash_by )
+    assert job1_hash == job2_hash
+
+
+def __two_jobs_in_a_history():
+    job1, job2 = __two_jobs()
+    job1.history_id = 4
+    job2.history_id = 4
+    return job1, job2
+
+
+def __two_jobs( ):
+    job1 = model.Job()
+    job1.id = 1
+    job2 = model.Job()
+    job2.id = 2
+    return job1, job2
+
+
+def __do_a_bunch( work ):
+    for i in range( 20 ):
+        work()
+
+
+def __new_job( **kwds ):
+    job = model.Job()
+    for key, value in kwds.items():
+        setattr( job, key, value )
+    return job
+
+
+def __rule_helper():
+    app = MockApp()
+    rule_helper = RuleHelper( app )
+    return rule_helper
+
+
+class MockApp( object ):
+
+    def __init__( self ):
+        self.config = bunch.Bunch( )
+        self.model = mapping.init(
+            "/tmp",
+            "sqlite:///:memory:",
+            create_tables=True
+        )
+
+    def add( self, *args ):
+        for arg in args:
+            self.model.context.add( arg )
+        self.model.context.flush()
diff --git a/test/unit/jobs/test_rules/10_site.py b/test/unit/jobs/test_rules/10_site.py
new file mode 100644
index 0000000..f170ca4
--- /dev/null
+++ b/test/unit/jobs/test_rules/10_site.py
@@ -0,0 +1,54 @@
+
+
+def upload():
+    return 'local_runner'
+
+
+def tophat():
+    return 'site_dest_id'
+
+
+def tool1():
+    # tool1 is id to test tool mocked out in test_mapper.py, without specify
+    # function name in dynamic destination - this function should be used by
+    # default.
+    return 'tool1_dest_id'
+
+
+def check_rule_params(
+    job_id,
+    tool,
+    tool_id,
+    job_wrapper,
+    rule_helper,
+    app,
+    job,
+    user,
+    user_email,
+):
+    assert job_id == 12345
+    assert tool.is_mock_tool()
+    assert tool_id == "testtoolshed/devteam/tool1/23abcd13123"
+    assert job_wrapper.is_mock_job_wrapper()
+    assert app == job_wrapper.app
+    assert rule_helper is not None
+
+    assert job.user == user
+    assert user.id == 6789
+    assert user_email == "test at example.com"
+
+    return "all_passed"
+
+
+def check_job_conf_params( param1 ):
+    assert param1 == "7"
+    return "sent_7_dest_id"
+
+
+def check_resource_params( resource_params ):
+    assert resource_params["memory"] == "8gb"
+    return "have_resource_params"
+
+
+def check_workflow_invocation_uuid( workflow_invocation_uuid ):
+    return workflow_invocation_uuid
diff --git a/test/unit/jobs/test_rules/20_instance.py b/test/unit/jobs/test_rules/20_instance.py
new file mode 100644
index 0000000..b8462ea
--- /dev/null
+++ b/test/unit/jobs/test_rules/20_instance.py
@@ -0,0 +1,4 @@
+
+def tophat():
+    # This should override definition in 10_site.py
+    return 'instance_dest_id'
diff --git a/test/unit/jobs/test_rules/__init__.py b/test/unit/jobs/test_rules/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/jobs/test_runner_local.py b/test/unit/jobs/test_runner_local.py
new file mode 100644
index 0000000..079968f
--- /dev/null
+++ b/test/unit/jobs/test_runner_local.py
@@ -0,0 +1,173 @@
+import os
+import threading
+import time
+from unittest import TestCase
+
+from galaxy import model
+from galaxy.jobs import metrics
+from galaxy.jobs.runners import local
+from galaxy.util import bunch
+
+from tools_support import (
+    UsesApp,
+    UsesTools
+)
+
+
+class TestLocalJobRunner( TestCase, UsesApp, UsesTools ):
+
+    def setUp( self ):
+        self.setup_app()
+        self._init_tool()
+        self.app.job_metrics = metrics.JobMetrics()
+        self.job_wrapper = MockJobWrapper( self.app, self.test_directory, self.tool )
+
+    def tearDown( self ):
+        self.tear_down_app()
+
+    def test_run( self ):
+        self.job_wrapper.command_line = "echo HelloWorld"
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert self.job_wrapper.stdout.strip() == "HelloWorld"
+
+    def test_galaxy_lib_on_path( self ):
+        self.job_wrapper.command_line = '''python -c "import galaxy.util"'''
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert self.job_wrapper.exit_code == 0
+
+    def test_default_slots( self ):
+        self.job_wrapper.command_line = '''echo $GALAXY_SLOTS'''
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert self.job_wrapper.stdout.strip() == "1"
+
+    def test_slots_override( self ):
+        # Set local_slots in job destination to specify slots for
+        # local job runner.
+        self.job_wrapper.job_destination.params[ "local_slots" ] = 3
+        self.job_wrapper.command_line = '''echo $GALAXY_SLOTS'''
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert self.job_wrapper.stdout.strip() == "3"
+
+    def test_exit_code( self ):
+        self.job_wrapper.command_line = '''sh -c "exit 4"'''
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert self.job_wrapper.exit_code == 4
+
+    def test_metadata_gets_set( self ):
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert os.path.exists( self.job_wrapper.mock_metadata_path )
+
+    def test_metadata_gets_set_if_embedded( self ):
+        self.job_wrapper.job_destination.params[ "embed_metadata_in_job" ] = "True"
+
+        # Kill off cruft for _handle_metadata_externally and make sure job stil works...
+        self.job_wrapper.external_output_metadata = None
+        self.app.datatypes_registry.set_external_metadata_tool = None
+
+        runner = local.LocalJobRunner( self.app, 1 )
+        runner.queue_job( self.job_wrapper )
+        assert os.path.exists( self.job_wrapper.mock_metadata_path )
+
+    def test_stopping_job( self ):
+        self.job_wrapper.command_line = '''python -c "import time; time.sleep(15)"'''
+        runner = local.LocalJobRunner( self.app, 1 )
+
+        def queue():
+            runner.queue_job( self.job_wrapper )
+
+        t = threading.Thread(target=queue)
+        t.start()
+        while True:
+            if self.job_wrapper.external_id:
+                break
+            time.sleep( .01 )
+        external_id = self.job_wrapper.external_id
+        mock_job = bunch.Bunch(
+            get_external_output_metadata=lambda: None,
+            get_job_runner_external_id=lambda: str(external_id),
+            get_id=lambda: 1
+        )
+        runner.stop_job( mock_job )
+        t.join(1)
+
+
+class MockJobWrapper( object ):
+
+    def __init__( self, app, test_directory, tool ):
+        working_directory = os.path.join( test_directory, "workdir" )
+        tool_working_directory = os.path.join( working_directory, "working" )
+        os.makedirs( tool_working_directory )
+        self.app = app
+        self.tool = tool
+        self.requires_containerization = False
+        self.state = model.Job.states.QUEUED
+        self.command_line = "echo HelloWorld"
+        self.environment_variables = []
+        self.commands_in_new_shell = False
+        self.prepare_called = False
+        self.write_version_cmd = None
+        self.dependency_shell_commands = None
+        self.working_directory = working_directory
+        self.tool_working_directory = tool_working_directory
+        self.requires_setting_metadata = True
+        self.job_destination = bunch.Bunch( id="default", params={} )
+        self.galaxy_lib_dir = os.path.abspath( "lib" )
+        self.job_id = 1
+        self.external_id = None
+        self.output_paths = [ '/tmp/output1.dat' ]
+        self.mock_metadata_path = os.path.abspath( os.path.join( test_directory, "METADATA_SET" ) )
+        self.metadata_command = "touch %s" % self.mock_metadata_path
+        self.galaxy_virtual_env = None
+        self.shell = "/bin/bash"
+
+        # Cruft for setting metadata externally, axe at some point.
+        self.external_output_metadata = bunch.Bunch(
+            set_job_runner_external_pid=lambda pid, session: None
+        )
+        self.app.datatypes_registry.set_external_metadata_tool = bunch.Bunch(
+            build_dependency_shell_commands=lambda: []
+        )
+
+    def prepare( self ):
+        self.prepare_called = True
+
+    def set_job_destination( self, job_destination, external_id ):
+        self.external_id = external_id
+
+    def get_command_line( self ):
+        return self.command_line
+
+    def get_id_tag( self ):
+        return "1"
+
+    def get_state( self ):
+        return self.state
+
+    def change_state( self, state ):
+        self.state = state
+
+    def get_output_fnames( self ):
+        return []
+
+    def get_job( self ):
+        return model.Job()
+
+    def setup_external_metadata( self, **kwds ):
+        return self.metadata_command
+
+    def get_env_setup_clause( self ):
+        return ""
+
+    def has_limits( self ):
+        return False
+
+    def finish( self, stdout, stderr, exit_code ):
+        self.stdout = stdout
+        self.stderr = stderr
+        self.exit_code = exit_code
diff --git a/test/unit/jobs/test_runner_params.py b/test/unit/jobs/test_runner_params.py
new file mode 100644
index 0000000..e61ace2
--- /dev/null
+++ b/test/unit/jobs/test_runner_params.py
@@ -0,0 +1,48 @@
+from galaxy.jobs import runners
+
+
+def test_default_specs():
+    # recheck_missing_job_retries is integer >= 0
+    params = runners.RunnerParams( specs=runners.BaseJobRunner.DEFAULT_SPECS, params=dict( recheck_missing_job_retries="1" ) )
+    assert params.recheck_missing_job_retries == 1
+    assert params["recheck_missing_job_retries"] == 1
+
+    exception_raised = False
+    try:
+        runners.RunnerParams( specs=runners.BaseJobRunner.DEFAULT_SPECS, params=dict( recheck_missing_job_retries=-1 ) )
+    except Exception:
+        exception_raised = True
+    assert exception_raised
+
+
+def test_missing_parameter():
+    exception = None
+    try:
+        runners.RunnerParams( specs={}, params=dict( foo="bar" ) )
+    except Exception as e:
+        exception = e
+    assert exception.message == runners.JOB_RUNNER_PARAMETER_UNKNOWN_MESSAGE % "foo"
+
+
+def test_invalid_parameter():
+    exception = None
+    try:
+        runners.RunnerParams( specs=dict( foo=dict( valid=lambda x: x != "bar", defualt="baz" ) ), params=dict( foo="bar" ) )
+    except Exception as e:
+        exception = e
+    assert exception.message == runners.JOB_RUNNER_PARAMETER_VALIDATION_FAILED_MESSAGE % "foo"
+
+
+def test_map_problem():
+    exception = None
+    try:
+        runners.RunnerParams( specs=dict( foo=dict( map=lambda x: 1 / 0, default="baz" ) ), params=dict( foo="bar" ) )
+    except Exception as e:
+        exception = e
+    assert exception.message == runners.JOB_RUNNER_PARAMETER_MAP_PROBLEM_MESSAGE % ( "foo", "bar" )
+
+
+def test_param_default():
+    runner_params = runners.RunnerParams( specs=dict( foo=dict( default="baz" ) ), params={} )
+    assert runner_params["foo"] == "baz"
+    assert runner_params.foo == "baz"
diff --git a/test/unit/managers/__init__.py b/test/unit/managers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/managers/base.py b/test/unit/managers/base.py
new file mode 100644
index 0000000..4a1eaf4
--- /dev/null
+++ b/test/unit/managers/base.py
@@ -0,0 +1,140 @@
+"""
+"""
+from __future__ import print_function
+
+import json
+import os
+import sys
+import unittest
+
+import sqlalchemy
+from six import string_types
+
+from galaxy.managers.users import UserManager
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+# =============================================================================
+admin_email = 'admin at admin.admin'
+admin_users = admin_email
+default_password = '123456'
+
+
+# =============================================================================
+class BaseTestCase( unittest.TestCase ):
+
+    @classmethod
+    def setUpClass( cls ):
+        print( '\n', '-' * 20, 'begin class', cls )
+
+    @classmethod
+    def tearDownClass( cls ):
+        print( '\n', '-' * 20, 'end class', cls )
+
+    def __init__( self, *args ):
+        unittest.TestCase.__init__( self, *args )
+
+    def setUp( self ):
+        self.log( '.' * 20, 'begin test', self )
+        self.set_up_mocks()
+        self.set_up_managers()
+        self.set_up_trans()
+
+    def set_up_mocks( self ):
+        self.trans = galaxy_mock.MockTrans( admin_users=admin_users )
+        self.app = self.trans.app
+
+    def set_up_managers( self ):
+        self.user_manager = UserManager( self.app )
+
+    def set_up_trans( self ):
+        self.admin_user = self.user_manager.create( email=admin_email, username='admin', password=default_password )
+        self.trans.set_user( self.admin_user )
+        self.trans.set_history( None )
+
+    def tearDown( self ):
+        self.log( '.' * 20, 'end test', self, '\n' )
+
+    def log( self, *args, **kwargs ):
+        print( *args, **kwargs )
+
+    # ---- additional test types
+    TYPES_NEEDING_NO_SERIALIZERS = ( string_types, bool, type( None ), int, float )
+
+    def assertKeys( self, obj, key_list ):
+        self.assertEqual( sorted( obj.keys() ), sorted( key_list ) )
+
+    def assertHasKeys( self, obj, key_list ):
+        for key in key_list:
+            if key not in obj:
+                self.fail( 'Missing key: ' + key )
+        else:
+            self.assertTrue( True, 'keys found in object' )
+
+    def assertNullableBasestring( self, item ):
+        if not isinstance( item, ( string_types, type( None ) ) ):
+            self.fail( 'Non-nullable basestring: ' + str( type( item ) ) )
+        # TODO: len mod 8 and hex re
+        self.assertTrue( True, 'is nullable basestring: ' + str( item ) )
+
+    def assertEncodedId( self, item ):
+        if not isinstance( item, string_types ):
+            self.fail( 'Non-string: ' + str( type( item ) ) )
+        # TODO: len mod 8 and hex re
+        self.assertTrue( True, 'is id: ' + item )
+
+    def assertNullableEncodedId( self, item ):
+        if item is None:
+            self.assertTrue( True, 'nullable id is None' )
+        else:
+            self.assertEncodedId( item )
+
+    def assertDate( self, item ):
+        if not isinstance( item, string_types ):
+            self.fail( 'Non-string: ' + str( type( item ) ) )
+        # TODO: no great way to parse this fully (w/o python-dateutil)
+        # TODO: re?
+        self.assertTrue( True, 'is date: ' + item )
+
+    def assertUUID( self, item ):
+        if not isinstance( item, string_types ):
+            self.fail( 'Non-string: ' + str( type( item ) ) )
+        # TODO: re for d4d76d69-80d4-4ed7-80c7-211ebcc1a358
+        self.assertTrue( True, 'is uuid: ' + item )
+
+    def assertORMFilter( self, item, msg=None ):
+        if not isinstance( item, sqlalchemy.sql.elements.BinaryExpression ):
+            self.fail( 'Not an orm filter: ' + str( type( item ) ) )
+        self.assertTrue( True, msg or ( 'is an orm filter: ' + str( item ) ) )
+
+    def assertFnFilter( self, item, msg=None ):
+        if not item or not callable( item ):
+            self.fail( 'Not a fn filter: ' + str( type( item ) ) )
+        self.assertTrue( True, msg or ( 'is a fn filter: ' + str( item ) ) )
+
+    def assertIsJsonifyable( self, item ):
+        # TODO: use galaxy's override
+        self.assertIsInstance( json.dumps( item ), string_types )
+
+
+class CreatesCollectionsMixin( object ):
+
+    def build_element_identifiers( self, elements ):
+        identifier_list = []
+        for element in elements:
+            src = 'hda'
+            # if isinstance( element, model.DatasetCollection ):
+            #    src = 'collection'#?
+            # elif isinstance( element, model.LibraryDatasetDatasetAssociation ):
+            #    src = 'ldda'#?
+            encoded_id = self.trans.security.encode_id( element.id )
+            identifier_list.append( dict( src=src, name=element.name, id=encoded_id ) )
+        return identifier_list
+
+
+# =============================================================================
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_CollectionManager.py b/test/unit/managers/test_CollectionManager.py
new file mode 100644
index 0000000..2063f3f
--- /dev/null
+++ b/test/unit/managers/test_CollectionManager.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+"""
+"""
+import unittest
+
+from galaxy import model
+from galaxy.managers.collections import DatasetCollectionManager
+from galaxy.managers.datasets import DatasetManager
+from galaxy.managers.hdas import HDAManager
+from galaxy.managers.histories import HistoryManager
+
+from .base import BaseTestCase, CreatesCollectionsMixin
+
+# =============================================================================
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+
+
+# =============================================================================
+class DatasetCollectionManagerTestCase( BaseTestCase, CreatesCollectionsMixin ):
+
+    def set_up_managers( self ):
+        super( DatasetCollectionManagerTestCase, self ).set_up_managers()
+        self.dataset_manager = DatasetManager( self.app )
+        self.hda_manager = HDAManager( self.app )
+        self.history_manager = HistoryManager( self.app )
+        self.collection_manager = DatasetCollectionManager( self.app )
+
+    def test_create_simple_list( self ):
+        owner = self.user_manager.create( **user2_data )
+
+        history = self.history_manager.create( name='history1', user=owner )
+
+        hda1 = self.hda_manager.create( name='one',
+            history=history, dataset=self.dataset_manager.create() )
+        hda2 = self.hda_manager.create( name='two',
+            history=history, dataset=self.dataset_manager.create() )
+        hda3 = self.hda_manager.create( name='three',
+            history=history, dataset=self.dataset_manager.create() )
+
+        self.log( "should be able to create a new Collection via ids" )
+        element_identifiers = self.build_element_identifiers( [ hda1, hda2, hda3 ] )
+        hdca = self.collection_manager.create( self.trans, history, 'test collection', 'list',
+                                           element_identifiers=element_identifiers )
+        self.assertIsInstance( hdca, model.HistoryDatasetCollectionAssociation )
+        self.assertEqual( hdca.name, 'test collection' )
+        self.assertEqual( hdca.hid, 4 )
+        self.assertFalse( hdca.deleted )
+        self.assertTrue( hdca.visible )
+
+        # print 'hdca dir:'
+        # for k in dir( hdca ):
+        #     print k, getattr( hdca, k, '(?)' )
+
+        self.log( "should contain an underlying, well-formed DatasetCollection" )
+        self.assertIsInstance( hdca.collection, model.DatasetCollection )
+        collection = hdca.collection
+        self.assertEqual( collection.collection_type, 'list' )
+        self.assertEqual( collection.state, 'ok' )
+        self.assertEqual( len( collection.dataset_instances ), 3 )
+        self.assertEqual( len( collection.elements ), 3 )
+
+        # print 'hdca.collection dir:'
+        # for k in dir( hdca.collection ):
+        #     print k, getattr( hdca.collection, k, '(?)' )
+
+        # elements = collection.elements
+        # print 'hdca.collection element dir:'
+        # for k in dir( elements[0] ):
+        #     print k, getattr( elements[0], k, '(?)' )
+
+        self.log( "and that collection should have three well-formed Elements" )
+        self.assertIsInstance( collection.elements[0], model.DatasetCollectionElement )
+        self.assertEqual( collection.elements[0].element_identifier, 'one' )
+        self.assertEqual( collection.elements[0].element_index, 0 )
+        self.assertEqual( collection.elements[0].element_type, 'hda' )
+        self.assertEqual( collection.elements[0].element_object, hda1 )
+
+        self.assertIsInstance( collection.elements[1], model.DatasetCollectionElement )
+        self.assertEqual( collection.elements[1].element_identifier, 'two' )
+        self.assertEqual( collection.elements[1].element_index, 1 )
+        self.assertEqual( collection.elements[1].element_type, 'hda' )
+        self.assertEqual( collection.elements[1].element_object, hda2 )
+
+        self.assertIsInstance( collection.elements[2], model.DatasetCollectionElement )
+        self.assertEqual( collection.elements[2].element_identifier, 'three' )
+        self.assertEqual( collection.elements[2].element_index, 2 )
+        self.assertEqual( collection.elements[2].element_type, 'hda' )
+        self.assertEqual( collection.elements[2].element_object, hda3 )
+
+        self.log( "should be able to create a new Collection via objects" )
+        elements = dict( one=hda1, two=hda2, three=hda3 )
+        hdca2 = self.collection_manager.create( self.trans, history, 'test collection 2', 'list', elements=elements )
+        self.assertIsInstance( hdca2, model.HistoryDatasetCollectionAssociation )
+
+    def test_update_from_dict( self ):
+        owner = self.user_manager.create( **user2_data )
+
+        history = self.history_manager.create( name='history1', user=owner )
+
+        hda1 = self.hda_manager.create( name='one',
+            history=history, dataset=self.dataset_manager.create() )
+        hda2 = self.hda_manager.create( name='two',
+            history=history, dataset=self.dataset_manager.create() )
+        hda3 = self.hda_manager.create( name='three',
+            history=history, dataset=self.dataset_manager.create() )
+
+        elements = dict( one=hda1, two=hda2, three=hda3 )
+        hdca = self.collection_manager.create( self.trans, history, 'test collection', 'list', elements=elements )
+
+        self.log( "should be set from a dictionary" )
+        self.collection_manager._set_from_dict( self.trans, hdca, {
+            'deleted': True,
+            'visible': False,
+            'name': 'New Name',
+            # TODO: doesn't work
+            # 'tags'      : [ 'one', 'two', 'three' ]
+            # 'annotations'      : [?]
+        })
+        self.assertEqual( hdca.name, 'New Name' )
+        self.assertTrue( hdca.deleted )
+        self.assertFalse( hdca.visible )
+        # self.assertEqual( hdca.tags, [ 'one', 'two', 'three' ] )
+        # self.assertEqual( hdca.annotations, [ 'one', 'two', 'three' ] )
+
+    # def test_validation( self ):
+    #    self.log( "should be able to change the name" )
+    #    self.log( "should be able to set deleted" )
+    #    self.log( "should be able to set visible" )
+    #    self.log( "should be able to set tags" )
+
+
+# =============================================================================
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_DatasetManager.py b/test/unit/managers/test_DatasetManager.py
new file mode 100644
index 0000000..5393545
--- /dev/null
+++ b/test/unit/managers/test_DatasetManager.py
@@ -0,0 +1,449 @@
+# -*- coding: utf-8 -*-
+"""
+"""
+import unittest
+
+import sqlalchemy
+from six import string_types
+
+from galaxy import (
+    exceptions,
+    model
+)
+from galaxy.managers import rbac_secured
+from galaxy.managers.base import SkipAttribute
+from galaxy.managers.datasets import (
+    DatasetDeserializer,
+    DatasetManager,
+    DatasetSerializer
+)
+from galaxy.managers.roles import RoleManager
+
+from .base import BaseTestCase
+
+# =============================================================================
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+
+
+# =============================================================================
+class DatasetManagerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( DatasetManagerTestCase, self ).set_up_managers()
+        self.dataset_manager = DatasetManager( self.app )
+
+    def test_create( self ):
+        self.log( "should be able to create a new Dataset" )
+        dataset1 = self.dataset_manager.create()
+        self.assertIsInstance( dataset1, model.Dataset )
+        self.assertEqual( dataset1, self.trans.sa_session.query( model.Dataset ).get( dataset1.id ) )
+
+    def test_base( self ):
+        dataset1 = self.dataset_manager.create()
+        dataset2 = self.dataset_manager.create()
+
+        self.log( "should be able to query" )
+        datasets = self.trans.sa_session.query( model.Dataset ).all()
+        self.assertEqual( self.dataset_manager.list(), datasets )
+        self.assertEqual( self.dataset_manager.one( filters=( model.Dataset.id == dataset1.id ) ), dataset1 )
+        self.assertEqual( self.dataset_manager.by_id( dataset1.id ), dataset1 )
+        self.assertEqual( self.dataset_manager.by_ids( [ dataset2.id, dataset1.id ] ), [ dataset2, dataset1 ] )
+
+        self.log( "should be able to limit and offset" )
+        self.assertEqual( self.dataset_manager.list( limit=1 ), datasets[0:1] )
+        self.assertEqual( self.dataset_manager.list( offset=1 ), datasets[1:] )
+        self.assertEqual( self.dataset_manager.list( limit=1, offset=1 ), datasets[1:2] )
+
+        self.assertEqual( self.dataset_manager.list( limit=0 ), [] )
+        self.assertEqual( self.dataset_manager.list( offset=3 ), [] )
+
+        self.log( "should be able to order" )
+        self.assertEqual( self.dataset_manager.list( order_by=sqlalchemy.desc( model.Dataset.create_time ) ),
+            [ dataset2, dataset1 ] )
+
+    def test_delete( self ):
+        item1 = self.dataset_manager.create()
+
+        self.log( "should be able to delete and undelete a dataset" )
+        self.assertFalse( item1.deleted )
+        self.assertEqual( self.dataset_manager.delete( item1 ), item1 )
+        self.assertTrue( item1.deleted )
+        self.assertEqual( self.dataset_manager.undelete( item1 ), item1 )
+        self.assertFalse( item1.deleted )
+
+    def test_purge_allowed( self ):
+        self.trans.app.config.allow_user_dataset_purge = True
+        item1 = self.dataset_manager.create()
+
+        self.log( "should purge a dataset if config does allow" )
+        self.assertFalse( item1.purged )
+        self.assertEqual( self.dataset_manager.purge( item1 ), item1 )
+        self.assertTrue( item1.purged )
+
+        self.log( "should delete a dataset when purging" )
+        self.assertTrue( item1.deleted )
+
+    def test_purge_not_allowed( self ):
+        self.trans.app.config.allow_user_dataset_purge = False
+        item1 = self.dataset_manager.create()
+
+        self.log( "should raise an error when purging a dataset if config does not allow" )
+        self.assertFalse( item1.purged )
+        self.assertRaises( exceptions.ConfigDoesNotAllowException, self.dataset_manager.purge, item1 )
+        self.assertFalse( item1.purged )
+
+    def test_create_with_no_permissions( self ):
+        self.log( "should be able to create a new Dataset without any permissions" )
+        dataset = self.dataset_manager.create()
+
+        permissions = self.dataset_manager.permissions.get( dataset )
+        self.assertIsInstance( permissions, tuple )
+        self.assertEqual( len( permissions ), 2 )
+        manage_permissions, access_permissions = permissions
+        self.assertEqual( manage_permissions, [] )
+        self.assertEqual( access_permissions, [] )
+
+        user3 = self.user_manager.create( **user3_data )
+        self.log( "a dataset without permissions shouldn't be manageable to just anyone" )
+        self.assertFalse( self.dataset_manager.permissions.manage.is_permitted( dataset, user3 ) )
+        self.log( "a dataset without permissions should be accessible" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, user3 ) )
+
+        self.log( "a dataset without permissions should be manageable by an admin" )
+        self.assertTrue( self.dataset_manager.permissions.manage.is_permitted( dataset, self.admin_user ) )
+        self.log( "a dataset without permissions should be accessible by an admin" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, self.admin_user ) )
+
+        self.log( "a dataset without permissions shouldn't be manageable by an anonymous user" )
+        self.assertFalse( self.dataset_manager.permissions.manage.is_permitted( dataset, None ) )
+        self.log( "a dataset without permissions should be accessible by an anonymous user" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, None ) )
+
+    def test_create_public_dataset( self ):
+        self.log( "should be able to create a new Dataset and give it some permissions that actually, you know, "
+            "might work if there's any justice in this universe" )
+        owner = self.user_manager.create( **user2_data )
+        owner_private_role = self.user_manager.private_role( owner )
+        dataset = self.dataset_manager.create( manage_roles=[ owner_private_role ] )
+
+        permissions = self.dataset_manager.permissions.get( dataset )
+        self.assertIsInstance( permissions, tuple )
+        self.assertEqual( len( permissions ), 2 )
+        manage_permissions, access_permissions = permissions
+        self.assertIsInstance( manage_permissions, list )
+        self.assertIsInstance( manage_permissions[0], model.DatasetPermissions )
+        self.assertEqual( access_permissions, [] )
+
+        user3 = self.user_manager.create( **user3_data )
+        self.log( "a public dataset should be manageable to it's owner" )
+        self.assertTrue( self.dataset_manager.permissions.manage.is_permitted( dataset, owner ) )
+        self.log( "a public dataset shouldn't be manageable to just anyone" )
+        self.assertFalse( self.dataset_manager.permissions.manage.is_permitted( dataset, user3 ) )
+        self.log( "a public dataset should be accessible" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, user3 ) )
+
+        self.log( "a public dataset should be manageable by an admin" )
+        self.assertTrue( self.dataset_manager.permissions.manage.is_permitted( dataset, self.admin_user ) )
+        self.log( "a public dataset should be accessible by an admin" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, self.admin_user ) )
+
+        self.log( "a public dataset shouldn't be manageable by an anonymous user" )
+        self.assertFalse( self.dataset_manager.permissions.manage.is_permitted( dataset, None ) )
+        self.log( "a public dataset should be accessible by an anonymous user" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, None ) )
+
+    def test_create_private_dataset( self ):
+        self.log( "should be able to create a new Dataset and give it private permissions" )
+        owner = self.user_manager.create( **user2_data )
+        owner_private_role = self.user_manager.private_role( owner )
+        dataset = self.dataset_manager.create(
+            manage_roles=[ owner_private_role ], access_roles=[ owner_private_role ] )
+
+        permissions = self.dataset_manager.permissions.get( dataset )
+        self.assertIsInstance( permissions, tuple )
+        self.assertEqual( len( permissions ), 2 )
+        manage_permissions, access_permissions = permissions
+        self.assertIsInstance( manage_permissions, list )
+        self.assertIsInstance( manage_permissions[0], model.DatasetPermissions )
+        self.assertIsInstance( access_permissions, list )
+        self.assertIsInstance( access_permissions[0], model.DatasetPermissions )
+
+        self.log( "a private dataset should be manageable by it's owner" )
+        self.assertTrue( self.dataset_manager.permissions.manage.is_permitted( dataset, owner ) )
+        self.log( "a private dataset should be accessible to it's owner" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, owner ) )
+
+        user3 = self.user_manager.create( **user3_data )
+        self.log( "a private dataset shouldn't be manageable to just anyone" )
+        self.assertFalse( self.dataset_manager.permissions.manage.is_permitted( dataset, user3 ) )
+        self.log( "a private dataset shouldn't be accessible to just anyone" )
+        self.assertFalse( self.dataset_manager.permissions.access.is_permitted( dataset, user3 ) )
+
+        self.log( "a private dataset should be manageable by an admin" )
+        self.assertTrue( self.dataset_manager.permissions.manage.is_permitted( dataset, self.admin_user ) )
+        self.log( "a private dataset should be accessible by an admin" )
+        self.assertTrue( self.dataset_manager.permissions.access.is_permitted( dataset, self.admin_user ) )
+
+        self.log( "a private dataset shouldn't be manageable by an anonymous user" )
+        self.assertFalse( self.dataset_manager.permissions.manage.is_permitted( dataset, None ) )
+        self.log( "a private dataset shouldn't be accessible by an anonymous user" )
+        self.assertFalse( self.dataset_manager.permissions.access.is_permitted( dataset, None ) )
+
+
+# =============================================================================
+class DatasetRBACPermissionsTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( DatasetRBACPermissionsTestCase, self ).set_up_managers()
+        self.dataset_manager = DatasetManager( self.app )
+
+    # def test_manage( self ):
+    #     self.log( "should be able to create a new Dataset" )
+    #     dataset1 = self.dataset_manager.create()
+    #     self.assertIsInstance( dataset1, model.Dataset )
+    #     self.assertEqual( dataset1, self.app.model.context.query( model.Dataset ).get( dataset1.id ) )
+    #
+
+
+# =============================================================================
+# web.url_for doesn't work well in the framework
+def testable_url_for(*a, **k):
+    return '(fake url): %s, %s' % ( a, k )
+
+
+DatasetSerializer.url_for = staticmethod( testable_url_for )
+
+
+class DatasetSerializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( DatasetSerializerTestCase, self ).set_up_managers()
+        self.dataset_manager = DatasetManager( self.app )
+        self.dataset_serializer = DatasetSerializer( self.app )
+        self.role_manager = RoleManager( self.app )
+
+    def test_views( self ):
+        dataset = self.dataset_manager.create()
+
+        self.log( 'should have a summary view' )
+        summary_view = self.dataset_serializer.serialize_to_view( dataset, view='summary' )
+        self.assertKeys( summary_view, self.dataset_serializer.views[ 'summary' ] )
+
+        self.log( 'should have the summary view as default view' )
+        self.dataset_serializer.serialize_to_view( dataset, default_view='summary' )
+        self.assertKeys( summary_view, self.dataset_serializer.views[ 'summary' ] )
+
+        self.log( 'should have a serializer for all serializable keys' )
+        for key in self.dataset_serializer.serializable_keyset:
+            instantiated_attribute = getattr( dataset, key, None )
+            if not ( ( key in self.dataset_serializer.serializers ) or
+                     ( isinstance( instantiated_attribute, self.TYPES_NEEDING_NO_SERIALIZERS ) ) ):
+                self.fail( 'no serializer for: %s (%s)' % ( key, instantiated_attribute ) )
+        else:
+            self.assertTrue( True, 'all serializable keys have a serializer' )
+
+    def test_views_and_keys( self ):
+        dataset = self.dataset_manager.create()
+
+        self.log( 'should be able to use keys with views' )
+        serialized = self.dataset_serializer.serialize_to_view( dataset,
+            # file_name is exposed using app.config.expose_dataset_path = True
+            view='summary', keys=[ 'file_name' ] )
+        self.assertKeys( serialized,
+            self.dataset_serializer.views[ 'summary' ] + [ 'file_name' ] )
+
+        self.log( 'should be able to use keys on their own' )
+        serialized = self.dataset_serializer.serialize_to_view( dataset,
+            keys=[ 'purgable', 'file_size' ] )
+        self.assertKeys( serialized, [ 'purgable', 'file_size' ] )
+
+    def test_serialize_permissions( self ):
+        dataset = self.dataset_manager.create()
+        who_manages = self.user_manager.create( **user2_data )
+        self.dataset_manager.permissions.manage.grant( dataset, who_manages )
+
+        self.log( 'serialized permissions should be returned for the user who can manage and be well formed' )
+        permissions = self.dataset_serializer.serialize_permissions( dataset, 'perms', user=who_manages )
+        self.assertIsInstance( permissions, dict )
+        self.assertKeys( permissions, [ 'manage', 'access' ] )
+        self.assertIsInstance( permissions[ 'manage' ], list )
+        self.assertIsInstance( permissions[ 'access' ], list )
+
+        manage_perms = permissions[ 'manage' ]
+        self.assertTrue( len( manage_perms ) == 1 )
+        role_id = manage_perms[0]
+        self.assertEncodedId( role_id )
+        role_id = self.app.security.decode_id( role_id )
+        role = self.role_manager.get( self.trans, role_id )
+        self.assertTrue( who_manages in [ user_role.user for user_role in role.users ])
+
+        self.log( 'permissions should be not returned for non-managing users' )
+        not_my_supervisor = self.user_manager.create( **user3_data )
+        self.assertRaises( SkipAttribute, self.dataset_serializer.serialize_permissions,
+            dataset, 'perms', user=not_my_supervisor )
+
+        self.log( 'permissions should not be returned for anon users' )
+        self.assertRaises( SkipAttribute, self.dataset_serializer.serialize_permissions,
+            dataset, 'perms', user=None )
+
+        self.log( 'permissions should be returned for admin users' )
+        permissions = self.dataset_serializer.serialize_permissions( dataset, 'perms', user=self.admin_user )
+        self.assertIsInstance( permissions, dict )
+        self.assertKeys( permissions, [ 'manage', 'access' ] )
+
+    def test_serializers( self ):
+        # self.user_manager.create( **user2_data )
+        dataset = self.dataset_manager.create()
+        all_keys = list( self.dataset_serializer.serializable_keyset )
+        serialized = self.dataset_serializer.serialize( dataset, all_keys )
+
+        self.log( 'everything serialized should be of the proper type' )
+        self.assertEncodedId( serialized[ 'id' ] )
+        self.assertDate( serialized[ 'create_time' ] )
+        self.assertDate( serialized[ 'update_time' ] )
+
+        self.assertUUID( serialized[ 'uuid' ] )
+        self.assertIsInstance( serialized[ 'state' ], string_types )
+        self.assertIsInstance( serialized[ 'deleted' ], bool )
+        self.assertIsInstance( serialized[ 'purged' ], bool )
+        self.assertIsInstance( serialized[ 'purgable' ], bool )
+
+        # # TODO: no great way to do these with mocked dataset
+        # self.assertIsInstance( serialized[ 'file_size' ], int )
+        # self.assertIsInstance( serialized[ 'total_size' ], int )
+
+        self.log( 'serialized should jsonify well' )
+        self.assertIsJsonifyable( serialized )
+
+
+# =============================================================================
+class DatasetDeserializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( DatasetDeserializerTestCase, self ).set_up_managers()
+        self.dataset_manager = DatasetManager( self.app )
+        self.dataset_serializer = DatasetSerializer( self.app )
+        self.dataset_deserializer = DatasetDeserializer( self.app )
+        self.role_manager = RoleManager( self.app )
+
+    def test_deserialize_delete( self ):
+        dataset = self.dataset_manager.create()
+
+        self.log( 'should raise when deserializing deleted from non-bool' )
+        self.assertFalse( dataset.deleted )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.dataset_deserializer.deserialize, dataset, data={ 'deleted': None } )
+        self.assertFalse( dataset.deleted )
+        self.log( 'should be able to deserialize deleted from True' )
+        self.dataset_deserializer.deserialize( dataset, data={ 'deleted': True } )
+        self.assertTrue( dataset.deleted )
+        self.log( 'should be able to reverse by deserializing deleted from False' )
+        self.dataset_deserializer.deserialize( dataset, data={ 'deleted': False } )
+        self.assertFalse( dataset.deleted )
+
+    def test_deserialize_purge( self ):
+        dataset = self.dataset_manager.create()
+
+        self.log( 'should raise when deserializing purged from non-bool' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.dataset_deserializer.deserialize, dataset, data={ 'purged': None } )
+        self.assertFalse( dataset.purged )
+        self.log( 'should be able to deserialize purged from True' )
+        self.dataset_deserializer.deserialize( dataset, data={ 'purged': True } )
+        self.assertTrue( dataset.purged )
+        # TODO: should this raise an error?
+        self.log( 'should NOT be able to deserialize purged from False (will remain True)' )
+        self.dataset_deserializer.deserialize( dataset, data={ 'purged': False } )
+        self.assertTrue( dataset.purged )
+
+    def test_deserialize_permissions( self ):
+        dataset = self.dataset_manager.create()
+        who_manages = self.user_manager.create( **user2_data )
+        self.dataset_manager.permissions.manage.grant( dataset, who_manages )
+        existing_permissions = self.dataset_serializer.serialize_permissions( dataset, 'permissions', user=who_manages )
+        existing_manage_permissions = existing_permissions[ 'manage' ]
+
+        user3 = self.user_manager.create( **user3_data )
+
+        self.log( 'deserializing permissions from a non-dictionary should error' )
+        not_a_dict = []
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.dataset_deserializer.deserialize,
+            dataset, user=who_manages, data={ 'permissions': not_a_dict })
+
+        self.log( 'deserializing permissions from a malformed dictionary should error' )
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.dataset_deserializer.deserialize,
+            dataset, user=who_manages, data={ 'permissions': dict( nope=[], access=[] ) })
+
+        self.log( 'deserializing permissions with no manage roles should error' )
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.dataset_deserializer.deserialize,
+            dataset, user=who_manages, data={ 'permissions': dict( manage=[], access=[] ) })
+
+        self.log( 'deserializing permissions using a non-managing user should error' )
+        self.assertRaises( rbac_secured.DatasetManagePermissionFailedException, self.dataset_deserializer.deserialize,
+            dataset, user=user3, data={ 'permissions': existing_permissions })
+
+        self.log( 'deserializing permissions using an anon user should error' )
+        self.assertRaises( rbac_secured.DatasetManagePermissionFailedException, self.dataset_deserializer.deserialize,
+            dataset, user=None, data={ 'permissions': existing_permissions })
+
+        self.log( 'deserializing permissions with a single access should make the dataset private' )
+        private_role = self.user_manager.private_role( who_manages )
+        private_role = private_role.to_dict( value_mapper={ 'id': self.app.security.encode_id } )
+        permissions = dict( manage=existing_manage_permissions, access=[ private_role[ 'id' ] ] )
+        self.dataset_deserializer.deserialize( dataset, user=who_manages, data={
+            'permissions': permissions
+        })
+        self.assertFalse( self.dataset_manager.is_accessible( dataset, user=user3 ) )
+
+        self.log( 'deserializing permissions manage should make the permissions available' )
+        self.assertRaises( SkipAttribute, self.dataset_serializer.serialize_permissions,
+            dataset, 'perms', user=user3 )
+        # now, have who_manages give a manage permission to user3
+        private_role = self.user_manager.private_role( user3 )
+        new_manage_permissions = existing_manage_permissions + [ self.app.security.encode_id( private_role.id ) ]
+        permissions = dict( manage=new_manage_permissions, access=[] )
+        self.dataset_deserializer.deserialize( dataset, user=who_manages, data={
+            'permissions': permissions
+        })
+
+        # deserializing for user3 shouldn't throw a skip bc they can manage
+        permissions = self.dataset_serializer.serialize_permissions( dataset, 'perms', user=who_manages )
+        self.assertEqual( new_manage_permissions, permissions[ 'manage' ] )
+
+    def test_deserialize_permissions_with_admin( self ):
+        dataset = self.dataset_manager.create()
+        who_manages = self.user_manager.create( **user2_data )
+        self.dataset_manager.permissions.manage.grant( dataset, who_manages )
+        existing_permissions = self.dataset_serializer.serialize_permissions( dataset, 'permissions', user=who_manages )
+        existing_manage_permissions = existing_permissions[ 'manage' ]
+
+        user3 = self.user_manager.create( **user3_data )
+        self.assertRaises( rbac_secured.DatasetManagePermissionFailedException, self.dataset_deserializer.deserialize,
+            dataset, user=user3, data={ 'permissions': existing_permissions })
+
+        self.log( 'deserializing permissions using an admin user should not error' )
+        private_role = self.user_manager.private_role( who_manages )
+        private_role = private_role.to_dict( value_mapper={ 'id' : self.app.security.encode_id } )
+        permissions = dict( manage=existing_manage_permissions, access=[ private_role[ 'id' ] ] )
+        self.dataset_deserializer.deserialize( dataset, user=who_manages, data={
+            'permissions': permissions
+        })
+
+        self.assertRaises( rbac_secured.DatasetManagePermissionFailedException, self.dataset_deserializer.deserialize,
+            dataset, user=user3, data={ 'permissions': existing_permissions })
+
+
+# =============================================================================
+# NOTE: that we test the DatasetAssociation* classes in either test_HDAManager or test_LDAManager
+# (as part of those subclasses):
+#   DatasetAssociationManager,
+#   DatasetAssociationSerializer,
+#   DatasetAssociationDeserializer,
+#   DatasetAssociationFilterParser
+
+# =============================================================================
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_HDAManager.py b/test/unit/managers/test_HDAManager.py
new file mode 100644
index 0000000..16a54aa
--- /dev/null
+++ b/test/unit/managers/test_HDAManager.py
@@ -0,0 +1,676 @@
+# -*- coding: utf-8 -*-
+import unittest
+
+import sqlalchemy
+from six import string_types
+
+from galaxy import exceptions, model
+from galaxy.managers import hdas
+from galaxy.managers.datasets import DatasetManager
+from galaxy.managers.histories import HistoryManager
+
+from .base import BaseTestCase
+
+# =============================================================================
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+
+
+# =============================================================================
+class HDATestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( HDATestCase, self ).set_up_managers()
+        self.hda_manager = hdas.HDAManager( self.app )
+        self.history_manager = HistoryManager( self.app )
+        self.dataset_manager = DatasetManager( self.app )
+
+    def _create_vanilla_hda( self, user_data=None ):
+        user_data = user_data or user2_data
+        owner = self.user_manager.create( **user_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        return self.hda_manager.create( history=history1, dataset=dataset1 )
+
+
+# =============================================================================
+class HDAManagerTestCase( HDATestCase ):
+
+    def test_base( self ):
+        hda_model = model.HistoryDatasetAssociation
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        hda1 = self.hda_manager.create( history=history1, hid=1 )
+        hda2 = self.hda_manager.create( history=history1, hid=2 )
+        hda3 = self.hda_manager.create( history=history1, hid=3 )
+
+        self.log( "should be able to query" )
+        hdas = self.trans.sa_session.query( hda_model ).all()
+        self.assertEqual( self.hda_manager.list(), hdas )
+        self.assertEqual( self.hda_manager.one( filters=( hda_model.id == hda1.id ) ), hda1 )
+        self.assertEqual( self.hda_manager.by_id( hda1.id ), hda1 )
+        self.assertEqual( self.hda_manager.by_ids( [ hda2.id, hda1.id ] ), [ hda2, hda1 ] )
+
+        self.log( "should be able to limit and offset" )
+        self.assertEqual( self.hda_manager.list( limit=1 ), hdas[0:1] )
+        self.assertEqual( self.hda_manager.list( offset=1 ), hdas[1:] )
+        self.assertEqual( self.hda_manager.list( limit=1, offset=1 ), hdas[1:2] )
+
+        self.assertEqual( self.hda_manager.list( limit=0 ), [] )
+        self.assertEqual( self.hda_manager.list( offset=3 ), [] )
+
+        self.log( "should be able to order" )
+        self.assertEqual( self.hda_manager.list( order_by=sqlalchemy.desc( hda_model.create_time ) ),
+            [ hda3, hda2, hda1 ] )
+
+    def test_create( self ):
+        owner = self.user_manager.create( self.trans, **user2_data )
+
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+
+        self.log( "should be able to create a new HDA with a specified history and dataset" )
+        hda1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+        self.assertIsInstance( hda1, model.HistoryDatasetAssociation )
+        self.assertEqual( hda1, self.trans.sa_session.query( model.HistoryDatasetAssociation ).get( hda1.id ) )
+        self.assertEqual( hda1.history, history1 )
+        self.assertEqual( hda1.dataset, dataset1 )
+        self.assertEqual( hda1.hid, 1 )
+
+        self.log( "should be able to create a new HDA with only a specified history and no dataset" )
+        hda2 = self.hda_manager.create( history=history1 )
+        self.assertIsInstance( hda2, model.HistoryDatasetAssociation )
+        self.assertIsInstance( hda2.dataset, model.Dataset )
+        self.assertEqual( hda2.history, history1 )
+        self.assertEqual( hda2.hid, 2 )
+
+        self.log( "should be able to create a new HDA with no history and no dataset" )
+        hda3 = self.hda_manager.create( hid=None )
+        self.assertIsInstance( hda3, model.HistoryDatasetAssociation )
+        self.assertIsInstance( hda3.dataset, model.Dataset, msg="dataset will be auto created" )
+        self.assertIsNone( hda3.history, msg="history will be None" )
+        self.assertEqual( hda3.hid, None, msg="should allow setting hid to None (or any other value)" )
+
+    def test_hda_tags( self ):
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        hda1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+
+        self.log( "should be able to set tags on an hda" )
+        tags_to_set = [ u'tag-one', u'tag-two' ]
+        self.hda_manager.set_tags( hda1, tags_to_set, user=owner )
+        tag_str_array = self.hda_manager.get_tags( hda1 )
+        self.assertEqual( sorted( tags_to_set ), sorted( tag_str_array ) )
+
+    def test_hda_annotation( self ):
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        hda1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+
+        self.log( "should be able to set annotation on an hda" )
+        annotation = u'an annotation or анотація'
+        self.hda_manager.annotate( hda1, annotation, user=owner )
+        self.assertEqual( self.hda_manager.annotation( hda1 ), annotation )
+
+    def test_copy_from_hda( self ):
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        hda1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+
+        self.log( "should be able to copy an HDA" )
+        hda2 = self.hda_manager.copy( hda1, history=history1 )
+        self.assertIsInstance( hda2, model.HistoryDatasetAssociation )
+        self.assertEqual( hda2, self.trans.sa_session.query( model.HistoryDatasetAssociation ).get( hda2.id ) )
+        self.assertEqual( hda2.name, hda1.name )
+        self.assertEqual( hda2.history, hda1.history )
+        self.assertEqual( hda2.dataset, hda1.dataset )
+        self.assertNotEqual( hda2, hda1 )
+
+        self.log( "tags should be copied between HDAs" )
+        tagged = self.hda_manager.create( history=history1, dataset=self.dataset_manager.create() )
+        tags_to_set = [ u'tag-one', u'tag-two' ]
+        self.hda_manager.set_tags( tagged, tags_to_set, user=owner )
+
+        hda2 = self.hda_manager.copy( tagged, history=history1 )
+        tag_str_array = self.hda_manager.get_tags( hda2 )
+        self.assertEqual( sorted( tags_to_set ), sorted( tag_str_array ) )
+
+        self.log( "annotations should be copied between HDAs" )
+        annotated = self.hda_manager.create( history=history1, dataset=self.dataset_manager.create() )
+        annotation = u'( ͡° ͜ʖ ͡°)'
+        self.hda_manager.annotate( annotated, annotation, user=owner )
+
+        hda3 = self.hda_manager.copy( annotated, history=history1 )
+        hda3_annotation = self.hda_manager.annotation( hda3 )
+        self.assertEqual( annotation, hda3_annotation )
+
+    # def test_copy_from_ldda( self ):
+    #    owner = self.user_manager.create( self.trans, **user2_data )
+    #    history1 = self.history_mgr.create( self.trans, name='history1', user=owner )
+    #
+    #    self.log( "should be able to copy an HDA" )
+    #    hda2 = self.hda_manager.copy_ldda( history1, hda1 )
+
+    def test_delete( self ):
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+
+        self.log( "should be able to delete and undelete an hda" )
+        self.assertFalse( item1.deleted )
+        self.assertEqual( self.hda_manager.delete( item1 ), item1 )
+        self.assertTrue( item1.deleted )
+        self.assertEqual( self.hda_manager.undelete( item1 ), item1 )
+        self.assertFalse( item1.deleted )
+
+    def test_purge_allowed( self ):
+        self.trans.app.config.allow_user_dataset_purge = True
+
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+
+        self.log( "should purge an hda if config does allow" )
+        self.assertFalse( item1.purged )
+        self.assertEqual( self.hda_manager.purge( item1 ), item1 )
+        self.assertTrue( item1.purged )
+
+    def test_purge_not_allowed( self ):
+        self.trans.app.config.allow_user_dataset_purge = False
+
+        owner = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history=history1, dataset=dataset1 )
+
+        self.log( "should raise an error when purging an hda if config does not allow" )
+        self.assertFalse( item1.purged )
+        self.assertRaises( exceptions.ConfigDoesNotAllowException, self.hda_manager.purge, item1 )
+        self.assertFalse( item1.purged )
+
+    def test_ownable( self ):
+        owner = self.user_manager.create( **user2_data )
+        non_owner = self.user_manager.create( **user3_data )
+
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history1, dataset1 )
+
+        self.log( "should be able to poll whether a given user owns an item" )
+        self.assertTrue(  self.hda_manager.is_owner( item1, owner ) )
+        self.assertFalse( self.hda_manager.is_owner( item1, non_owner ) )
+
+        self.log( "should raise an error when checking ownership with non-owner" )
+        self.assertRaises( exceptions.ItemOwnershipException,
+            self.hda_manager.error_unless_owner, item1, non_owner )
+
+        self.log( "should raise an error when checking ownership with anonymous" )
+        self.assertRaises( exceptions.ItemOwnershipException,
+            self.hda_manager.error_unless_owner, item1, None )
+
+        self.log( "should not raise an error when checking ownership with owner" )
+        self.assertEqual( self.hda_manager.error_unless_owner( item1, owner ), item1 )
+
+        self.log( "should not raise an error when checking ownership with admin" )
+        self.assertEqual( self.hda_manager.error_unless_owner( item1, self.admin_user ), item1 )
+
+    def test_accessible( self ):
+        owner = self.user_manager.create( **user2_data )
+        non_owner = self.user_manager.create( **user3_data )
+
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history1, dataset1 )
+
+        self.log( "(by default, dataset permissions are lax) should be accessible to all" )
+
+        for user in self.user_manager.list():
+            self.assertTrue( self.hda_manager.is_accessible( item1, user ) )
+
+        self.log( "after setting a dataset to private (one user) permissions, access should be allowed for that user" )
+        # for this test, set restrictive access permissions
+        self.dataset_manager.permissions.set_private_to_one_user( dataset1, owner )
+        accessible = self.hda_manager.get_accessible( item1.id, owner, current_history=self.trans.history )
+        self.assertEqual( accessible, item1 )
+
+        self.log( "after setting a dataset to private (one user) permissions, " +
+            "access should be not allowed for other users" )
+        self.assertRaises( exceptions.ItemAccessibilityException,
+            self.hda_manager.get_accessible, item1.id, non_owner, current_history=self.trans.history )
+
+        self.log( "a copy of a restricted dataset in another users history should be inaccessible even to " +
+            "the histories owner" )
+        history2 = self.history_manager.create( name='history2', user=non_owner )
+        self.trans.set_history( history2 )
+        item2 = self.hda_manager.copy( item1, history=history2 )
+        self.assertIsInstance( item2, model.HistoryDatasetAssociation )
+        self.assertRaises( exceptions.ItemAccessibilityException,
+            self.hda_manager.get_accessible, item2.id, non_owner, current_history=self.trans.history )
+
+        self.log( "a restricted dataset cannot be accessed by anonymous users" )
+        anon_user = None
+        self.trans.set_user( anon_user )
+        history3 = self.history_manager.create( name='anon_history', user=anon_user )
+        self.trans.set_history( history3 )
+        self.assertRaises( exceptions.ItemAccessibilityException,
+            self.hda_manager.get_accessible, item1.id, anon_user, current_history=self.trans.history )
+
+    def test_anon_ownership( self ):
+        anon_user = None
+        self.trans.set_user( anon_user )
+
+        history1 = self.history_manager.create( name='anon_history', user=anon_user )
+        self.trans.set_history( history1 )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history1, dataset1 )
+
+        self.log( "should not raise an error when checking ownership on anonymous' own dataset" )
+        # need to pass the current history for comparison
+        self.assertTrue( self.hda_manager.is_owner( item1, anon_user, current_history=self.trans.history ) )
+        item = self.hda_manager.error_unless_owner( item1, anon_user, current_history=self.trans.history )
+        self.assertEqual( item, item1 )
+        item = self.hda_manager.get_owned( item1.id, anon_user, current_history=self.trans.history )
+        self.assertEqual( item, item1 )
+
+        self.log( "should raise an error when checking ownership on anonymous' dataset with other user" )
+        non_owner = self.user_manager.create( **user3_data )
+        self.assertFalse( self.hda_manager.is_owner( item1, non_owner ) )
+        self.assertRaises( exceptions.ItemOwnershipException,
+            self.hda_manager.error_unless_owner, item1, non_owner )
+        self.assertRaises( exceptions.ItemOwnershipException,
+            self.hda_manager.get_owned, item1.id, non_owner )
+
+    def test_anon_accessibility( self ):
+        anon_user = None
+        self.trans.set_user( anon_user )
+
+        history1 = self.history_manager.create( name='anon_history', user=anon_user )
+        self.trans.set_history( history1 )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history1, dataset1 )
+
+        # datasets are public by default
+        self.assertTrue( self.hda_manager.is_accessible( item1, anon_user ) )
+        # for this test, set restrictive access permissions
+        dataset_owner = self.user_manager.create( **user3_data )
+        self.dataset_manager.permissions.set_private_to_one_user( dataset1, dataset_owner )
+
+        self.log( "anonymous users should not be able to access datasets within their own histories if " +
+            "permissions do not allow" )
+        self.assertFalse( self.hda_manager.is_accessible( item1, anon_user ) )
+        self.assertRaises( exceptions.ItemAccessibilityException,
+            self.hda_manager.error_unless_accessible, item1, anon_user )
+
+        self.log( "those users with access permissions should still be allowed access to datasets " +
+            "within anon users' histories" )
+        self.assertTrue( self.hda_manager.is_accessible( item1, dataset_owner ) )
+
+    def test_error_if_uploading( self ):
+        hda = self._create_vanilla_hda()
+
+        hda.state = model.Dataset.states.OK
+        self.log( "should not raise an error when calling error_if_uploading and in a non-uploading state" )
+        self.assertEqual( self.hda_manager.error_if_uploading( hda ), hda )
+
+        hda.state = model.Dataset.states.UPLOAD
+        self.log( "should raise an error when calling error_if_uploading and in the uploading state" )
+        self.assertRaises( exceptions.Conflict,
+            self.hda_manager.error_if_uploading, hda )
+
+    def test_data_conversion_status( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( "data conversion status should reflect state" )
+        self.assertEqual( self.hda_manager.data_conversion_status( None ),
+            hda.conversion_messages.NO_DATA )
+        hda.state = model.Dataset.states.ERROR
+        self.assertEqual( self.hda_manager.data_conversion_status( hda ),
+            hda.conversion_messages.ERROR )
+        hda.state = model.Dataset.states.QUEUED
+        self.assertEqual( self.hda_manager.data_conversion_status( hda ),
+            hda.conversion_messages.PENDING )
+        hda.state = model.Dataset.states.OK
+        self.assertEqual( self.hda_manager.data_conversion_status( hda ), None )
+
+    # def test_text_data( self ):
+
+
+# =============================================================================
+# web.url_for doesn't work well in the framework
+def testable_url_for(*a, **k):
+    return '(fake url): %s, %s' % ( a, k )
+
+
+hdas.HDASerializer.url_for = staticmethod( testable_url_for )
+
+
+class HDASerializerTestCase( HDATestCase ):
+
+    def set_up_managers( self ):
+        super( HDASerializerTestCase, self ).set_up_managers()
+        self.hda_serializer = hdas.HDASerializer( self.app )
+
+    def test_views( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should have a summary view' )
+        summary_view = self.hda_serializer.serialize_to_view( hda, view='summary' )
+        self.assertKeys( summary_view, self.hda_serializer.views[ 'summary' ] )
+
+        self.log( 'should have the summary view as default view' )
+        default_view = self.hda_serializer.serialize_to_view( hda, default_view='summary' )
+        self.assertKeys( default_view, self.hda_serializer.views[ 'summary' ] )
+
+        # self.log( 'should have a detailed view' )
+        # detailed_view = self.hda_serializer.serialize_to_view( hda, view='detailed' )
+        # self.assertKeys( detailed_view, self.hda_serializer.views[ 'detailed' ] )
+
+        # self.log( 'should have a extended view' )
+        # extended_view = self.hda_serializer.serialize_to_view( hda, view='extended' )
+        # self.assertKeys( extended_view, self.hda_serializer.views[ 'extended' ] )
+
+        self.log( 'should have a inaccessible view' )
+        inaccessible_view = self.hda_serializer.serialize_to_view( hda, view='inaccessible' )
+        self.assertKeys( inaccessible_view, self.hda_serializer.views[ 'inaccessible' ] )
+
+        # skip metadata for this test
+        def is_metadata( key ):
+            return ( key == 'metadata' or
+                key.startswith( 'metadata_' ) )
+
+        self.log( 'should have a serializer for all serializable keys' )
+        for key in self.hda_serializer.serializable_keyset:
+            instantiated_attribute = getattr( hda, key, None )
+            if not ( ( key in self.hda_serializer.serializers ) or
+                   ( isinstance( instantiated_attribute, self.TYPES_NEEDING_NO_SERIALIZERS ) ) or
+                   ( is_metadata( key ) ) ):
+                self.fail( 'no serializer for: %s (%s)' % ( key, instantiated_attribute ) )
+        else:
+            self.assertTrue( True, 'all serializable keys have a serializer' )
+
+    def test_views_and_keys( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should be able to use keys with views' )
+        serialized = self.hda_serializer.serialize_to_view( hda,
+            view='summary', keys=[ 'uuid' ] )
+        self.assertKeys( serialized,
+            self.hda_serializer.views[ 'summary' ] + [ 'uuid' ] )
+
+        self.log( 'should be able to use keys on their own' )
+        serialized = self.hda_serializer.serialize_to_view( hda,
+            keys=[ 'file_path', 'visualizations' ] )
+        self.assertKeys( serialized, [ 'file_path', 'visualizations' ] )
+
+    def test_serializers( self ):
+        hda = self._create_vanilla_hda()
+        all_keys = list( self.hda_serializer.serializable_keyset )
+        serialized = self.hda_serializer.serialize( hda, all_keys, user=hda.history.user )
+
+        self.log( 'everything serialized should be of the proper type' )
+        # base
+        self.assertEncodedId( serialized[ 'id' ] )
+        self.assertDate( serialized[ 'create_time' ] )
+        self.assertDate( serialized[ 'update_time' ] )
+
+        # dataset association
+        self.assertIsInstance( serialized[ 'dataset' ], dict )
+        self.assertEncodedId( serialized[ 'dataset_id' ] )
+        self.assertUUID( serialized[ 'uuid' ] )
+        self.assertIsInstance( serialized[ 'file_name' ], string_types )
+        self.assertIsInstance( serialized[ 'extra_files_path' ], string_types )
+        self.assertIsInstance( serialized[ 'size' ], int )
+        self.assertIsInstance( serialized[ 'file_size' ], int )
+        self.assertIsInstance( serialized[ 'nice_size' ], string_types )
+        # TODO: these should be tested w/copy
+        self.assertNullableEncodedId( serialized[ 'copied_from_history_dataset_association_id'] )
+        self.assertNullableEncodedId( serialized[ 'copied_from_library_dataset_dataset_association_id'] )
+        self.assertNullableBasestring( serialized[ 'info' ] )
+        self.assertNullableBasestring( serialized[ 'blurb' ] )
+        self.assertNullableBasestring( serialized[ 'peek' ] )
+        self.assertIsInstance( serialized[ 'meta_files' ], list )
+        self.assertNullableEncodedId( serialized[ 'parent_id'] )
+        self.assertEqual( serialized[ 'designation' ], None )
+        self.assertIsInstance( serialized[ 'genome_build' ], string_types )
+        self.assertIsInstance( serialized[ 'data_type' ], string_types )
+
+        # hda
+        self.assertEncodedId( serialized[ 'history_id' ] )
+        self.assertEqual( serialized[ 'type_id' ], 'dataset-' + serialized[ 'id' ] )
+
+        self.assertIsInstance( serialized[ 'resubmitted' ], bool )
+        self.assertIsInstance( serialized[ 'display_apps' ], list )
+        self.assertIsInstance( serialized[ 'display_types' ], list )
+        self.assertIsInstance( serialized[ 'visualizations' ], list )
+
+        # remapped
+        self.assertNullableBasestring( serialized[ 'misc_info' ] )
+        self.assertNullableBasestring( serialized[ 'misc_blurb' ] )
+        self.assertNullableBasestring( serialized[ 'file_ext' ] )
+        self.assertNullableBasestring( serialized[ 'file_path' ] )
+
+        # identities
+        self.assertEqual( serialized[ 'model_class' ], 'HistoryDatasetAssociation' )
+        self.assertEqual( serialized[ 'history_content_type' ], 'dataset' )
+        self.assertEqual( serialized[ 'hda_ldda' ], 'hda' )
+        self.assertEqual( serialized[ 'accessible' ], True )
+        self.assertEqual( serialized[ 'api_type' ], 'file' )
+        self.assertEqual( serialized[ 'type' ], 'file' )
+
+        self.assertIsInstance( serialized[ 'url' ], string_types )
+        self.assertIsInstance( serialized[ 'urls' ], dict )
+        self.assertIsInstance( serialized[ 'download_url' ], string_types )
+
+        self.log( 'serialized should jsonify well' )
+        self.assertIsJsonifyable( serialized )
+
+    def test_file_name_serializers( self ):
+        hda = self._create_vanilla_hda()
+        owner = hda.history.user
+        keys = [ 'file_name' ]
+
+        self.log( 'file_name should be included if app configured to do so' )
+        # this is on by default in galaxy_mock
+        self.assertTrue( self.app.config.expose_dataset_path )
+        # ... so non-admin user CAN get file_name
+        serialized = self.hda_serializer.serialize( hda, keys, user=None )
+        self.assertTrue( 'file_name' in serialized )
+        serialized = self.hda_serializer.serialize( hda, keys, user=owner )
+        self.assertTrue( 'file_name' in serialized )
+
+        self.log( 'file_name should be skipped for non-admin when not exposed by config' )
+        self.app.config.expose_dataset_path = False
+        serialized = self.hda_serializer.serialize( hda, keys, user=None )
+        self.assertFalse( 'file_name' in serialized )
+        serialized = self.hda_serializer.serialize( hda, keys, user=owner )
+        self.assertFalse( 'file_name' in serialized )
+
+        self.log( 'file_name should be sent for admin in either case' )
+        serialized = self.hda_serializer.serialize( hda, keys, user=self.admin_user )
+        self.assertTrue( 'file_name' in serialized )
+        self.app.config.expose_dataset_path = True
+        serialized = self.hda_serializer.serialize( hda, keys, user=self.admin_user )
+        self.assertTrue( 'file_name' in serialized )
+
+    def test_serializing_inaccessible( self ):
+        owner = self.user_manager.create( **user2_data )
+        non_owner = self.user_manager.create( **user3_data )
+
+        history1 = self.history_manager.create( name='history1', user=owner )
+        dataset1 = self.dataset_manager.create()
+        item1 = self.hda_manager.create( history1, dataset1 )
+
+        keys_in_inaccessible_view = self.hda_serializer._view_to_keys( 'inaccessible' )
+
+        self.log( 'file_name should be included if app configured to do so' )
+        self.dataset_manager.permissions.set_private_to_one_user( dataset1, owner )
+        # request random crap
+        serialized = self.hda_serializer.serialize_to_view( item1, view='detailed',
+            keys=[ 'file_path', 'visualizations' ], user=non_owner )
+        self.assertEqual( sorted( keys_in_inaccessible_view ), sorted( serialized.keys() ) )
+
+    # TODO: test extra_files_path as well
+
+
+# =============================================================================
+class HDADeserializerTestCase( HDATestCase ):
+
+    def set_up_managers( self ):
+        super( HDADeserializerTestCase, self ).set_up_managers()
+        self.hda_deserializer = hdas.HDADeserializer( self.app )
+
+    def test_deserialize_delete( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should raise when deserializing deleted from non-bool' )
+        self.assertFalse( hda.deleted )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'deleted': None } )
+        self.assertFalse( hda.deleted )
+        self.log( 'should be able to deserialize deleted from True' )
+        self.hda_deserializer.deserialize( hda, { 'deleted': True } )
+        self.assertTrue( hda.deleted )
+        self.log( 'should be able to reverse by deserializing deleted from False' )
+        self.hda_deserializer.deserialize( hda, { 'deleted': False } )
+        self.assertFalse( hda.deleted )
+
+    def test_deserialize_purge( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should raise when deserializing purged from non-bool' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'purged': None } )
+        self.assertFalse( hda.purged )
+        self.log( 'should be able to deserialize purged from True' )
+        self.hda_deserializer.deserialize( hda, { 'purged': True } )
+        self.assertTrue( hda.purged )
+        # TODO: should this raise an error?
+        self.log( 'should NOT be able to deserialize purged from False (will remain True)' )
+        self.hda_deserializer.deserialize( hda, { 'purged': False } )
+        self.assertTrue( hda.purged )
+
+    def test_deserialize_visible( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should raise when deserializing from non-bool' )
+        self.assertTrue( hda.visible )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'visible': 'None' } )
+        self.assertTrue( hda.visible )
+        self.log( 'should be able to deserialize from False' )
+        self.hda_deserializer.deserialize( hda, { 'visible': False } )
+        self.assertFalse( hda.visible )
+        self.log( 'should be able to reverse by deserializing from True' )
+        self.hda_deserializer.deserialize( hda, { 'visible': True } )
+        self.assertTrue( hda.visible )
+
+    def test_deserialize_genome_build( self ):
+        hda = self._create_vanilla_hda()
+
+        self.assertIsInstance( hda.dbkey, string_types )
+        self.log( 'should deserialize to "?" from None' )
+        self.hda_deserializer.deserialize( hda, { 'genome_build': None } )
+        self.assertEqual( hda.dbkey, '?' )
+        self.log( 'should raise when deserializing from non-string' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'genome_build': 12 } )
+        self.log( 'should be able to deserialize from unicode' )
+        date_palm = u'نخيل التمر'
+        self.hda_deserializer.deserialize( hda, { 'genome_build': date_palm } )
+        self.assertEqual( hda.dbkey, date_palm )
+        self.log( 'should be deserializable from empty string' )
+        self.hda_deserializer.deserialize( hda, { 'genome_build': '' } )
+        self.assertEqual( hda.dbkey, '' )
+
+    def test_deserialize_name( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should raise when deserializing from non-string' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'name': True } )
+        self.log( 'should raise when deserializing from None' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'name': None } )
+        # self.log( 'should deserialize to empty string from None' )
+        # self.hda_deserializer.deserialize( hda, { 'name': None } )
+        # self.assertEqual( hda.name, '' )
+        self.log( 'should be able to deserialize from unicode' )
+        olive = u'ελιά'
+        self.hda_deserializer.deserialize( hda, { 'name': olive } )
+        self.assertEqual( hda.name, olive )
+        self.log( 'should be deserializable from empty string' )
+        self.hda_deserializer.deserialize( hda, { 'name': '' } )
+        self.assertEqual( hda.name, '' )
+
+    def test_deserialize_info( self ):
+        hda = self._create_vanilla_hda()
+
+        self.log( 'should raise when deserializing from non-string' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'info': True } )
+        self.log( 'should raise when deserializing from None' )
+        self.assertRaises( exceptions.RequestParameterInvalidException,
+            self.hda_deserializer.deserialize, hda, { 'info': None } )
+        self.log( 'should be able to deserialize from unicode' )
+        rice = u'飯'
+        self.hda_deserializer.deserialize( hda, { 'info': rice } )
+        self.assertEqual( hda.info, rice )
+        self.log( 'should be deserializable from empty string' )
+        self.hda_deserializer.deserialize( hda, { 'info': '' } )
+        self.assertEqual( hda.info, '' )
+
+
+# =============================================================================
+class HDAFilterParserTestCase( HDATestCase ):
+
+    def set_up_managers( self ):
+        super( HDAFilterParserTestCase, self ).set_up_managers()
+        self.filter_parser = hdas.HDAFilterParser( self.app )
+
+    def test_parsable( self ):
+        self.log( 'the following filters should be parsable' )
+        # base
+        self.assertORMFilter( self.filter_parser.parse_filter( 'id', 'in', [ 1, 2 ] ) )
+        encoded_id_string = ','.join([ self.app.security.encode_id( id_ ) for id_ in [ 1, 2 ] ] )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'encoded_id', 'in', encoded_id_string ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'create_time', 'le', '2015-03-15' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'create_time', 'ge', '2015-03-15' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'update_time', 'le', '2015-03-15' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'update_time', 'ge', '2015-03-15' ) )
+        # purgable
+        self.assertORMFilter( self.filter_parser.parse_filter( 'deleted', 'eq', True ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'purged', 'eq', True ) )
+        # dataset asociation
+        self.assertORMFilter( self.filter_parser.parse_filter( 'name', 'eq', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'name', 'contains', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'name', 'like', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'state', 'eq', 'ok' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'state', 'in', [ 'queued', 'running' ] ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'visible', 'eq', True ) )
+        self.assertFnFilter( self.filter_parser.parse_filter( 'genome_build', 'eq', 'wot' ) )
+        self.assertFnFilter( self.filter_parser.parse_filter( 'genome_build', 'contains', 'wot' ) )
+        self.assertFnFilter( self.filter_parser.parse_filter( 'data_type', 'eq', 'wot' ) )
+        self.assertFnFilter( self.filter_parser.parse_filter( 'data_type', 'isinstance', 'wot' ) )
+        # taggable
+        self.assertFnFilter( self.filter_parser.parse_filter( 'tag', 'eq', 'wot' ) )
+        self.assertFnFilter( self.filter_parser.parse_filter( 'tag', 'has', 'wot' ) )
+        # annotatable
+        self.assertFnFilter( self.filter_parser.parse_filter( 'annotation', 'has', 'wot' ) )
+
+#     def test_genome_build_filters( self ):
+#         pass
+
+#     def test_data_type_filters( self ):
+#         pass
+
+
+# =============================================================================
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_HDCAManager.py b/test/unit/managers/test_HDCAManager.py
new file mode 100644
index 0000000..500b1d0
--- /dev/null
+++ b/test/unit/managers/test_HDCAManager.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+import unittest
+
+from galaxy.managers import (
+    collections,
+    hdas,
+    hdcas
+)
+from galaxy.managers.datasets import DatasetManager
+from galaxy.managers.histories import HistoryManager
+
+from .base import (
+    BaseTestCase,
+    CreatesCollectionsMixin
+)
+
+# =============================================================================
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+
+
+# =============================================================================
+class HDCATestCase( BaseTestCase, CreatesCollectionsMixin ):
+
+    def set_up_managers( self ):
+        super( HDCATestCase, self ).set_up_managers()
+        self.hdca_manager = hdcas.HDCAManager( self.app )
+        self.hda_manager = hdas.HDAManager( self.app )
+        self.history_manager = HistoryManager( self.app )
+        self.dataset_manager = DatasetManager( self.app )
+        self.collection_manager = collections.DatasetCollectionManager( self.app )
+
+    def _create_history( self, user_data=None, **kwargs ):
+        user_data = user_data or user2_data
+        owner = self.user_manager.create( **user_data )
+        return self.history_manager.create( user=owner, **kwargs )
+
+    def _create_hda( self, history, dataset=None, **kwargs ):
+        if not dataset:
+            dataset = self.hda_manager.dataset_manager.create()
+        hda = self.hda_manager.create( history=history, dataset=dataset, **kwargs )
+        return hda
+
+    def _create_list_hdca( self, hdas, history=None, name='test collection', **kwargs ):
+        if not history:
+            history = history or self._create_history()
+        for i, hda in enumerate( hdas ):
+            if not isinstance( hdas, self.hda_manager.model_class ):
+                hdas[ i ] = self._create_hda( history, **hda )
+        hdca = self.collection_manager.create( self.trans, history, name, 'list',
+            element_identifiers=self.build_element_identifiers( hdas ) )
+        return hdca
+
+
+# =============================================================================
+# web.url_for doesn't work well in the framework
+def testable_url_for(*a, **k):
+    return '(fake url): %s, %s' % ( a, k )
+
+
+hdcas.HDCASerializer.url_for = staticmethod( testable_url_for )
+
+
+class HDCASerializerTestCase( HDCATestCase ):
+
+    def set_up_managers( self ):
+        super( HDCASerializerTestCase, self ).set_up_managers()
+        self.hdca_serializer = hdcas.HDCASerializer( self.app )
+
+    def test_views( self ):
+        serializer = self.hdca_serializer
+        item = self._create_list_hdca([
+            dict( name=( "hda-{0}".format( i ) ), hid=i ) for i in range( 5 )
+        ])
+
+        self.log( 'should have a summary view' )
+        summary_view = serializer.serialize_to_view( item, view='summary' )
+        self.assertKeys( summary_view, serializer.views[ 'summary' ] )
+
+        self.log( 'should have the summary view as default view' )
+        default_view = serializer.serialize_to_view( item, default_view='summary' )
+        self.assertKeys( default_view, serializer.views[ 'summary' ] )
+
+        self.log( 'should have a detailed view' )
+        detailed_view = serializer.serialize_to_view( item, view='detailed' )
+        self.assertKeys( detailed_view, serializer.views[ 'detailed' ] )
+
+        self.log( 'should have a serializer for all serializable keys' )
+        for key in serializer.serializable_keyset:
+            instantiated_attribute = getattr( item, key, None )
+            if not ( ( key in serializer.serializers ) or
+                   ( isinstance( instantiated_attribute, self.TYPES_NEEDING_NO_SERIALIZERS ) ) ):
+                self.fail( 'no serializer for: %s (%s)' % ( key, instantiated_attribute ) )
+        else:
+            self.assertTrue( True, 'all serializable keys have a serializer' )
+
+    def test_views_and_keys( self ):
+        serializer = self.hdca_serializer
+        item = self._create_list_hdca([
+            dict( name=( "hda-{0}".format( i ) ), hid=i ) for i in range( 5 )
+        ])
+        summary_plus_key = [ 'elements' ]
+        only_keys = [ 'id', 'populated_state_message' ]
+
+        self.log( 'should be able to use keys with views' )
+        serialized = serializer.serialize_to_view( item, view='summary', keys=summary_plus_key )
+        self.assertKeys( serialized, serializer.views[ 'summary' ] + summary_plus_key )
+
+        self.log( 'should be able to use keys on their own' )
+        serialized = serializer.serialize_to_view( item, keys=only_keys )
+        self.assertKeys( serialized, only_keys )
+
+
+# =============================================================================
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_HistoryContentsManager.py b/test/unit/managers/test_HistoryContentsManager.py
new file mode 100644
index 0000000..8a3c64c
--- /dev/null
+++ b/test/unit/managers/test_HistoryContentsManager.py
@@ -0,0 +1,347 @@
+# -*- coding: utf-8 -*-
+"""
+"""
+import datetime
+import random
+import unittest
+
+from sqlalchemy import column, desc, false, true
+from sqlalchemy.sql import text
+
+from galaxy.managers import collections, hdas, history_contents
+from galaxy.managers.histories import HistoryManager
+
+from .base import BaseTestCase
+from .base import CreatesCollectionsMixin
+
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+user4_data = dict( email='user4 at user4.user4', username='user4', password=default_password )
+
+
+# =============================================================================
+class HistoryAsContainerBaseTestCase( BaseTestCase, CreatesCollectionsMixin ):
+
+    def set_up_managers( self ):
+        super( HistoryAsContainerBaseTestCase, self ).set_up_managers()
+        self.history_manager = HistoryManager( self.app )
+        self.hda_manager = hdas.HDAManager( self.app )
+        self.collection_manager = collections.DatasetCollectionManager( self.app )
+        self.contents_manager = history_contents.HistoryContentsManager( self.app )
+
+    def add_hda_to_history( self, history, **kwargs ):
+        dataset = self.hda_manager.dataset_manager.create()
+        hda = self.hda_manager.create( history=history, dataset=dataset, **kwargs )
+        return hda
+
+    def add_list_collection_to_history( self, history, hdas, name='test collection', **kwargs ):
+        hdca = self.collection_manager.create( self.trans, history, name, 'list',
+            element_identifiers=self.build_element_identifiers( hdas ) )
+        return hdca
+
+
+# =============================================================================
+class HistoryAsContainerTestCase( HistoryAsContainerBaseTestCase ):
+
+    def test_contents( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+
+        self.log( "calling contents on an empty history should return an empty list" )
+        self.assertEqual( [], list( self.contents_manager.contents( history ) ) )
+
+        self.log( "calling contents on an history with hdas should return those in order of their hids" )
+        hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]
+        random.shuffle( hdas )
+        ordered_hda_contents = list( self.contents_manager.contents( history ) )
+        self.assertEqual( [hda.hid for hda in ordered_hda_contents], [ 1, 2, 3 ] )
+
+        self.log( "calling contents on an history with both hdas and collections should return both" )
+        hdca = self.add_list_collection_to_history( history, hdas )
+        all_contents = list( self.contents_manager.contents( history ) )
+        self.assertEqual( all_contents, list( ordered_hda_contents ) + [ hdca ] )
+
+    def test_contained( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+
+        self.log( "calling contained on an empty history should return an empty list" )
+        self.assertEqual( [], list( self.contents_manager.contained( history ) ) )
+
+        self.log( "calling contained on an history with both hdas and collections should return only hdas" )
+        hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]
+        self.add_list_collection_to_history( history, hdas )
+        self.assertEqual( list( self.contents_manager.contained( history ) ), hdas )
+
+    def test_subcontainers( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+
+        self.log( "calling subcontainers on an empty history should return an empty list" )
+        self.assertEqual( [], list( self.contents_manager.subcontainers( history ) ) )
+
+        self.log( "calling subcontainers on an history with both hdas and collections should return only collections" )
+        hdas = [ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ]
+        hdca = self.add_list_collection_to_history( history, hdas )
+        subcontainers = list( self.contents_manager.subcontainers( history ) )
+        self.assertEqual( subcontainers, [ hdca ] )
+
+    def test_limit_and_offset( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+        contents = []
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[:3] ) )
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[4:6] ) )
+
+        # _subquery = self.contents_manager._contents_common_query( self.contents_manager.subcontainer_class, history.id )
+        # _subquery = self.contents_manager._contents_common_query( self.contents_manager.contained_class, history.id )
+        # print _subquery
+        # for row in _subquery.all():
+        #     print row
+
+        self.log( "should be able to limit and offset" )
+        results = self.contents_manager.contents( history )
+        # print [ r.id for r in results ]
+        # print '--'
+        # print [ c.id for c in contents ]
+        self.assertEqual( results, contents )
+
+        self.assertEqual( self.contents_manager.contents( history, limit=4 ), contents[0:4] )
+        self.assertEqual( self.contents_manager.contents( history, offset=3 ), contents[3:] )
+        self.assertEqual( self.contents_manager.contents( history, limit=4, offset=4 ), contents[4:8] )
+
+        self.assertEqual( self.contents_manager.contents( history, limit=0 ), [] )
+        self.assertEqual( self.contents_manager.contents( history, offset=len( contents ) ), [] )
+
+    def test_orm_filtering( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+        contents = []
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[:3] ) )
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[4:6] ) )
+
+        self.log( "should allow filter on deleted" )
+        self.hda_manager.delete( contents[1] )
+        self.hda_manager.delete( contents[4] )
+        contents[6].deleted = True
+        deleted = [ contents[1], contents[4], contents[6] ]
+        self.app.model.context.flush()
+
+        # TODO: cross db compat?
+        filters = [ text( 'deleted = 1' ) ]
+        # for content in self.contents_manager.contents( history, filters=filters ):
+        #     print content.hid, content.history_content_type, content.id, content.name
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), deleted )
+
+        # even stranger that sqlalx can use the first model in the union (HDA) for columns across the union
+        HDA = self.hda_manager.model_class
+        self.assertEqual( self.contents_manager.contents( history,
+            filters=[ HDA.deleted == true() ] ), deleted )
+        filter_limited_contents = self.contents_manager.contents( history,
+            filters=[ HDA.deleted == true() ], limit=2, offset=1 )
+        self.assertEqual( filter_limited_contents, deleted[1:] )
+
+        self.log( "should allow filter on visible" )
+        contents[2].visible = False
+        contents[5].visible = False
+        contents[6].visible = False
+        invisible = [ contents[2], contents[5], contents[6] ]
+        # for content in invisible:
+        #     print content.id, content.__class__.__name__, content
+        self.app.model.context.flush()
+
+        filters = [ text( 'visible = 0' ) ]
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), invisible )
+        self.assertEqual( self.contents_manager.contents( history,
+            filters=[ HDA.visible == false() ] ), invisible )
+        filter_limited_contents = self.contents_manager.contents( history,
+            filters=[ HDA.visible == false() ], limit=2, offset=1 )
+        self.assertEqual( filter_limited_contents, invisible[1:] )
+
+        self.log( "should allow filtering more than one attribute" )
+        deleted_and_invisible = [ contents[6] ]
+
+        filters = [ text( 'deleted = 1' ), text( 'visible = 0' ) ]
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), deleted_and_invisible )
+        self.assertEqual( self.contents_manager.contents( history,
+            filters=[ HDA.deleted == true(), HDA.visible == false() ] ), deleted_and_invisible )
+        offset_too_far = self.contents_manager.contents( history,
+            filters=[ HDA.deleted == true(), HDA.visible == false() ], limit=2, offset=1 )
+        self.assertEqual( offset_too_far, [] )
+
+        self.log( "should allow filtering more than one attribute" )
+        deleted_and_invisible = [ contents[6] ]
+        # note the two syntaxes both work
+        self.assertEqual( self.contents_manager.contents( history,
+            filters=[ text( 'deleted = 1' ), text( 'visible = 0' ) ] ), deleted_and_invisible )
+        self.assertEqual( self.contents_manager.contents( history,
+            filters=[ HDA.deleted == true(), HDA.visible == false() ] ), deleted_and_invisible )
+        offset_too_far = self.contents_manager.contents( history,
+            filters=[ HDA.deleted == true(), HDA.visible == false() ], limit=2, offset=1 )
+        self.assertEqual( offset_too_far, [] )
+
+        self.log( "should allow filtering using like" )
+        # find 'hda-4'
+        self.assertEqual( [ contents[4] ],
+            self.contents_manager.contents( history, filters=[ HDA.name.like( '%-4' ) ] ) )
+        # the collections added above have the default name 'test collection'
+        self.assertEqual( self.contents_manager.subcontainers( history ),
+            self.contents_manager.contents( history, filters=[ HDA.name.like( '%collect%' ) ] ) )
+
+    def test_order_by( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+        contents = []
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[:3] ) )
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[4:6] ) )
+
+        self.log( "should default to hid order_by" )
+        self.assertEqual( self.contents_manager.contents( history ), contents )
+
+        self.log( "should allow asc, desc order_by" )
+        self.assertEqual( self.contents_manager.contents( history, order_by=desc( 'hid' ) ), contents[::-1] )
+
+        def get_create_time( item ):
+            create_time = getattr( item, 'create_time', None )
+            if not create_time:
+                create_time = item.collection.create_time
+            return create_time
+
+        self.log( "should allow create_time order_by" )
+        newest_first = sorted( contents, key=get_create_time, reverse=True )
+        results = self.contents_manager.contents( history, order_by=desc( 'create_time' ) )
+        self.assertEqual( newest_first, results )
+
+        self.log( "should allow update_time order_by" )
+        # change the oldest created to update the update_time
+        contents[0].name = 'zany and/or wacky'
+        self.app.model.context.flush()
+        results = self.contents_manager.contents( history, order_by=desc( 'update_time' ) )
+        self.assertEqual( contents[0], results[0] )
+
+    def test_update_time_filter( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+        contents = []
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[:3] ) )
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[4:6] ) )
+
+        self.log( "should allow filtering by update_time" )
+        # in the case of collections we have to change the collection.collection (ugh) to change the update_time
+        contents[3].collection.populated_state = 'big ball of mud'
+        self.app.model.context.flush()
+        update_time = contents[3].collection.update_time
+
+        def get_update_time( item ):
+            update_time = getattr( item, 'update_time', None )
+            if not update_time:
+                update_time = item.collection.update_time
+            return update_time
+
+        results = self.contents_manager.contents( history, filters=[ column( 'update_time' ) >= update_time ] )
+        self.assertEqual( results, [ contents[3] ] )
+
+    def test_filtered_counting( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+        contents = []
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[:3] ) )
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[4:6] ) )
+
+        self.log( "should show correct count with filters" )
+        self.hda_manager.delete( contents[1] )
+        self.hda_manager.delete( contents[4] )
+        contents[6].deleted = True
+        self.app.model.context.flush()
+
+        contents[2].visible = False
+        contents[5].visible = False
+        contents[6].visible = False
+        self.app.model.context.flush()
+
+        HDA = self.hda_manager.model_class
+        self.assertEqual( self.contents_manager.contents_count( history, filters=[ HDA.deleted == true() ] ), 3 )
+        filters = [ text( 'visible = 0' ) ]
+        self.assertEqual( self.contents_manager.contents_count( history, filters=filters ), 3 )
+
+        filters = [ text( 'deleted = 1' ), text( 'visible = 0' ) ]
+        self.assertEqual( self.contents_manager.contents_count( history, filters=filters ), 1 )
+
+    def test_type_id( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history = self.history_manager.create( name='history', user=user2 )
+        contents = []
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 3 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[:3] ) )
+        contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in range( 4, 6 ) ])
+        contents.append( self.add_list_collection_to_history( history, contents[4:6] ) )
+
+        self.log( "should be able to use eq and in with hybrid type_id" )
+        filters = [ column( 'type_id' ) == u'dataset-2' ]
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), [ contents[1] ])
+        filters = [ column( 'type_id' ).in_([ u'dataset-1', u'dataset-3' ]) ]
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), [ contents[0], contents[2] ])
+        filters = [ column( 'type_id' ) == u'dataset_collection-1' ]
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), [ contents[3] ])
+        filters = [ column( 'type_id' ).in_([ u'dataset-2', u'dataset_collection-2' ]) ]
+        self.assertEqual( self.contents_manager.contents( history, filters=filters ), [ contents[1], contents[6] ])
+
+
+class HistoryContentsFilterParserTestCase( HistoryAsContainerBaseTestCase ):
+
+    def set_up_managers( self ):
+        super( HistoryContentsFilterParserTestCase, self ).set_up_managers()
+        self.filter_parser = history_contents.HistoryContentsFilters( self.app )
+
+    def test_date_parser( self ):
+        # -- seconds and milliseconds from epoch
+        self.log( 'should be able to parse epoch seconds' )
+        self.assertEqual( self.filter_parser.parse_date( '1234567890' ),
+            datetime.datetime.fromtimestamp( 1234567890 ).isoformat( sep=' ' ) )
+
+        self.log( 'should be able to parse floating point epoch seconds.milliseconds' )
+        self.assertEqual( self.filter_parser.parse_date( '1234567890.123' ),
+            datetime.datetime.fromtimestamp( 1234567890.123 ).isoformat( sep=' ' ) )
+
+        self.log( 'should error if bad epoch is used' )
+        self.assertRaises( ValueError, self.filter_parser.parse_date, '0x000234' )
+
+        # -- datetime strings
+        self.log( 'should allow date alone' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13' ), '2009-02-13' )
+
+        self.log( 'should allow date and time' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13 18:13:00' ), '2009-02-13 18:13:00' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13T18:13:00' ), '2009-02-13 18:13:00' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13T18:13:00Z' ), '2009-02-13 18:13:00' )
+
+        self.log( 'should allow date and time with milliseconds' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13 18:13:00.123' ), '2009-02-13 18:13:00.123' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13T18:13:00.123' ), '2009-02-13 18:13:00.123' )
+        self.assertEqual( self.filter_parser.parse_date( '2009-02-13T18:13:00.123Z' ), '2009-02-13 18:13:00.123' )
+
+        self.log( 'should error if timezone is added' )
+        self.assertRaises( ValueError, self.filter_parser.parse_date, '2009-02-13T18:13:00.123+0700' )
+
+        self.log( 'should error if locale is used' )
+        self.assertRaises( ValueError, self.filter_parser.parse_date, 'Fri Feb 13 18:31:30 2009' )
+
+        self.log( 'should error if wrong milliseconds format is used' )
+        self.assertRaises( ValueError, self.filter_parser.parse_date, '2009-02-13 18:13:00.' )
+        self.assertRaises( ValueError, self.filter_parser.parse_date, '2009-02-13 18:13:00.1234567' )
+
+
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_HistoryManager.py b/test/unit/managers/test_HistoryManager.py
new file mode 100644
index 0000000..d1c6a2f
--- /dev/null
+++ b/test/unit/managers/test_HistoryManager.py
@@ -0,0 +1,907 @@
+# -*- coding: utf-8 -*-
+"""
+"""
+import os
+import sys
+import unittest
+
+import sqlalchemy
+from six import string_types
+from sqlalchemy import true
+
+from galaxy import (
+    exceptions,
+    model
+)
+from galaxy.managers import (
+    base,
+    hdas
+)
+from galaxy.managers.histories import (
+    HistoryDeserializer,
+    HistoryFilters,
+    HistoryManager,
+    HistorySerializer
+)
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+from .base import BaseTestCase
+
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+user4_data = dict( email='user4 at user4.user4', username='user4', password=default_password )
+
+
+class HistoryManagerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( HistoryManagerTestCase, self ).set_up_managers()
+        self.history_manager = HistoryManager( self.app )
+        self.hda_manager = hdas.HDAManager( self.app )
+
+    def add_hda_to_history( self, history, **kwargs ):
+        dataset = self.hda_manager.dataset_manager.create()
+        hda = self.hda_manager.create( history=history, dataset=dataset, **kwargs )
+        return hda
+
+    def test_base( self ):
+        user2 = self.user_manager.create( **user2_data )
+        user3 = self.user_manager.create( **user3_data )
+
+        self.log( "should be able to create a new history" )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        self.assertIsInstance( history1, model.History )
+        self.assertEqual( history1.name, 'history1' )
+        self.assertEqual( history1.user, user2 )
+        self.assertEqual( history1, self.trans.sa_session.query( model.History ).get( history1.id ) )
+        self.assertEqual( history1,
+            self.trans.sa_session.query( model.History ).filter( model.History.name == 'history1' ).one() )
+        self.assertEqual( history1,
+            self.trans.sa_session.query( model.History ).filter( model.History.user == user2 ).one() )
+
+        history2 = self.history_manager.copy( history1, user=user3 )
+
+        self.log( "should be able to query" )
+        histories = self.trans.sa_session.query( model.History ).all()
+        self.assertEqual( self.history_manager.one( filters=( model.History.id == history1.id ) ), history1 )
+        self.assertEqual( self.history_manager.list(), histories )
+        self.assertEqual( self.history_manager.by_id( history1.id ), history1 )
+        self.assertEqual( self.history_manager.by_ids( [ history2.id, history1.id ] ), [ history2, history1 ] )
+
+        self.log( "should be able to limit and offset" )
+        self.assertEqual( self.history_manager.list( limit=1 ), histories[0:1] )
+        self.assertEqual( self.history_manager.list( offset=1 ), histories[1:] )
+        self.assertEqual( self.history_manager.list( limit=1, offset=1 ), histories[1:2] )
+
+        self.assertEqual( self.history_manager.list( limit=0 ), [] )
+        self.assertEqual( self.history_manager.list( offset=3 ), [] )
+
+        self.log( "should be able to order" )
+        history3 = self.history_manager.create( name="history3", user=user2 )
+        name_first_then_time = ( model.History.name, sqlalchemy.desc( model.History.create_time ) )
+        self.assertEqual( self.history_manager.list( order_by=name_first_then_time ),
+            [ history2, history1, history3 ] )
+
+    def test_copy( self ):
+        user2 = self.user_manager.create( **user2_data )
+        user3 = self.user_manager.create( **user3_data )
+
+        self.log( "should be able to copy a history (and it's hdas)" )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        tags = [ u'tag-one' ]
+        annotation = u'history annotation'
+        self.history_manager.set_tags( history1, tags, user=user2 )
+        self.history_manager.annotate( history1, annotation, user=user2 )
+
+        hda = self.add_hda_to_history( history1, name='wat' )
+        hda_tags = [ u'tag-one', u'tag-two' ]
+        hda_annotation = u'annotation'
+        self.hda_manager.set_tags( hda, hda_tags, user=user2 )
+        self.hda_manager.annotate( hda, hda_annotation, user=user2 )
+
+        history2 = self.history_manager.copy( history1, user=user3 )
+        self.assertIsInstance( history2, model.History )
+        self.assertEqual( history2.user, user3 )
+        self.assertEqual( history2, self.trans.sa_session.query( model.History ).get( history2.id ) )
+        self.assertEqual( history2.name, history1.name )
+        self.assertNotEqual( history2, history1 )
+
+        copied_hda = history2.datasets[0]
+        copied_hda_tags = self.hda_manager.get_tags( copied_hda )
+        self.assertEqual( sorted( hda_tags ), sorted( copied_hda_tags ) )
+        copied_hda_annotation = self.hda_manager.annotation( copied_hda )
+        self.assertEqual( hda_annotation, copied_hda_annotation )
+
+    def test_has_user( self ):
+        owner = self.user_manager.create( **user2_data )
+        non_owner = self.user_manager.create( **user3_data )
+
+        item1 = self.history_manager.create( user=owner )
+        item2 = self.history_manager.create( user=owner )
+        self.history_manager.create( user=non_owner )
+
+        self.log( "should be able to list items by user" )
+        user_histories = self.history_manager.by_user( owner )
+        self.assertEqual( user_histories, [ item1, item2 ] )
+
+    def test_ownable( self ):
+        owner = self.user_manager.create( **user2_data )
+        non_owner = self.user_manager.create( **user3_data )
+
+        item1 = self.history_manager.create( user=owner )
+
+        self.log( "should be able to poll whether a given user owns an item" )
+        self.assertTrue(  self.history_manager.is_owner( item1, owner ) )
+        self.assertFalse( self.history_manager.is_owner( item1, non_owner ) )
+
+        self.log( "should raise an error when checking ownership with non-owner" )
+        self.assertRaises( exceptions.ItemOwnershipException,
+            self.history_manager.error_unless_owner, item1, non_owner )
+        self.assertRaises( exceptions.ItemOwnershipException,
+            self.history_manager.get_owned, item1.id, non_owner )
+
+        self.log( "should not raise an error when checking ownership with owner" )
+        self.assertEqual( self.history_manager.error_unless_owner( item1, owner ), item1 )
+        self.assertEqual( self.history_manager.get_owned( item1.id, owner ), item1 )
+
+        self.log( "should not raise an error when checking ownership with admin" )
+        self.assertTrue( self.history_manager.is_owner( item1, self.admin_user ) )
+        self.assertEqual( self.history_manager.error_unless_owner( item1, self.admin_user ), item1 )
+        self.assertEqual( self.history_manager.get_owned( item1.id, self.admin_user ), item1 )
+
+    def test_accessible( self ):
+        owner = self.user_manager.create( **user2_data )
+        item1 = self.history_manager.create( user=owner )
+
+        non_owner = self.user_manager.create( **user3_data )
+
+        self.log( "should be inaccessible by default except to owner" )
+        self.assertTrue( self.history_manager.is_accessible( item1, owner ) )
+        self.assertTrue( self.history_manager.is_accessible( item1, self.admin_user ) )
+        self.assertFalse( self.history_manager.is_accessible( item1, non_owner ) )
+
+        self.log( "should raise an error when checking accessibility with non-owner" )
+        self.assertRaises( exceptions.ItemAccessibilityException,
+            self.history_manager.error_unless_accessible, item1, non_owner )
+        self.assertRaises( exceptions.ItemAccessibilityException,
+            self.history_manager.get_accessible, item1.id, non_owner )
+
+        self.log( "should not raise an error when checking ownership with owner" )
+        self.assertEqual( self.history_manager.error_unless_accessible( item1, owner ), item1 )
+        self.assertEqual( self.history_manager.get_accessible( item1.id, owner ), item1 )
+
+        self.log( "should not raise an error when checking ownership with admin" )
+        self.assertTrue( self.history_manager.is_accessible( item1, self.admin_user ) )
+        self.assertEqual( self.history_manager.error_unless_accessible( item1, self.admin_user ), item1 )
+        self.assertEqual( self.history_manager.get_accessible( item1.id, self.admin_user ), item1 )
+
+    def test_importable( self ):
+        owner = self.user_manager.create( **user2_data )
+        self.trans.set_user( owner )
+        non_owner = self.user_manager.create( **user3_data )
+
+        item1 = self.history_manager.create( user=owner )
+
+        self.log( "should not be importable by default" )
+        self.assertFalse( item1.importable )
+        self.assertIsNone( item1.slug )
+
+        self.log( "should be able to make importable (accessible by link) to all users" )
+        accessible = self.history_manager.make_importable( item1 )
+        self.assertEqual( accessible, item1 )
+        self.assertIsNotNone( accessible.slug )
+        self.assertTrue( accessible.importable )
+
+        for user in self.user_manager.list():
+            self.assertTrue( self.history_manager.is_accessible( accessible, user ) )
+
+        self.log( "should be able to make non-importable/inaccessible again" )
+        inaccessible = self.history_manager.make_non_importable( accessible )
+        self.assertEqual( inaccessible, accessible )
+        self.assertIsNotNone( inaccessible.slug )
+        self.assertFalse( inaccessible.importable )
+
+        self.assertTrue( self.history_manager.is_accessible( inaccessible, owner ) )
+        self.assertFalse( self.history_manager.is_accessible( inaccessible, non_owner ) )
+        self.assertTrue( self.history_manager.is_accessible( inaccessible, self.admin_user ) )
+
+    def test_published( self ):
+        owner = self.user_manager.create( **user2_data )
+        self.trans.set_user( owner )
+        non_owner = self.user_manager.create( **user3_data )
+
+        item1 = self.history_manager.create( user=owner )
+
+        self.log( "should not be published by default" )
+        self.assertFalse( item1.published )
+        self.assertIsNone( item1.slug )
+
+        self.log( "should be able to publish (listed publicly) to all users" )
+        published = self.history_manager.publish( item1 )
+        self.assertEqual( published, item1 )
+        self.assertTrue( published.published )
+        # note: publishing sets importable to true as well
+        self.assertTrue( published.importable )
+        self.assertIsNotNone( published.slug )
+
+        for user in self.user_manager.list():
+            self.assertTrue( self.history_manager.is_accessible( published, user ) )
+
+        self.log( "should be able to make non-importable/inaccessible again" )
+        unpublished = self.history_manager.unpublish( published )
+        self.assertEqual( unpublished, published )
+        self.assertFalse( unpublished.published )
+        # note: unpublishing does not make non-importable, you must explicitly do that separately
+        self.assertTrue( published.importable )
+        self.history_manager.make_non_importable( unpublished )
+        self.assertFalse( published.importable )
+        # note: slug still remains after unpublishing
+        self.assertIsNotNone( unpublished.slug )
+
+        self.assertTrue( self.history_manager.is_accessible( unpublished, owner ) )
+        self.assertFalse( self.history_manager.is_accessible( unpublished, non_owner ) )
+        self.assertTrue( self.history_manager.is_accessible( unpublished, self.admin_user ) )
+
+    def test_sharable( self ):
+        owner = self.user_manager.create( **user2_data )
+        self.trans.set_user( owner )
+        item1 = self.history_manager.create( user=owner )
+
+        non_owner = self.user_manager.create( **user3_data )
+        # third_party = self.user_manager.create( **user4_data )
+
+        self.log( "should be unshared by default" )
+        self.assertEqual( self.history_manager.get_share_assocs( item1 ), [] )
+        self.assertEqual( item1.slug, None )
+
+        self.log( "should be able to share with specific users" )
+        share_assoc = self.history_manager.share_with( item1, non_owner )
+        self.assertIsInstance( share_assoc, model.HistoryUserShareAssociation )
+        self.assertTrue( self.history_manager.is_accessible( item1, non_owner ) )
+        self.assertEqual(
+            len( self.history_manager.get_share_assocs( item1 ) ), 1 )
+        self.assertEqual(
+            len( self.history_manager.get_share_assocs( item1, user=non_owner ) ), 1 )
+        self.assertIsInstance( item1.slug, string_types )
+
+        self.log( "should be able to unshare with specific users" )
+        share_assoc = self.history_manager.unshare_with( item1, non_owner )
+        self.assertIsInstance( share_assoc, model.HistoryUserShareAssociation )
+        self.assertFalse( self.history_manager.is_accessible( item1, non_owner ) )
+        self.assertEqual( self.history_manager.get_share_assocs( item1 ), [] )
+        self.assertEqual(
+            self.history_manager.get_share_assocs( item1, user=non_owner ), [] )
+
+    # TODO: test slug formation
+
+    def test_anon( self ):
+        anon_user = None
+        self.trans.set_user( anon_user )
+
+        self.log( "should not allow access and owner for anon user on a history by another anon user (None)" )
+        anon_history1 = self.history_manager.create( user=None )
+        # do not set the trans.history!
+        self.assertFalse( self.history_manager.is_owner( anon_history1, anon_user, current_history=self.trans.history ) )
+        self.assertFalse( self.history_manager.is_accessible( anon_history1, anon_user, current_history=self.trans.history ) )
+
+        self.log( "should allow access and owner for anon user on a history if it's the session's current history" )
+        anon_history2 = self.history_manager.create( user=anon_user )
+        self.trans.set_history( anon_history2 )
+        self.assertTrue( self.history_manager.is_owner( anon_history2, anon_user, current_history=self.trans.history ) )
+        self.assertTrue( self.history_manager.is_accessible( anon_history2, anon_user, current_history=self.trans.history ) )
+
+        self.log( "should not allow owner or access for anon user on someone elses history" )
+        owner = self.user_manager.create( **user2_data )
+        someone_elses = self.history_manager.create( user=owner )
+        self.assertFalse( self.history_manager.is_owner( someone_elses, anon_user, current_history=self.trans.history ) )
+        self.assertFalse( self.history_manager.is_accessible( someone_elses, anon_user, current_history=self.trans.history ) )
+
+        self.log( "should allow access for anon user if the history is published or importable" )
+        self.history_manager.make_importable( someone_elses )
+        self.assertTrue( self.history_manager.is_accessible( someone_elses, anon_user, current_history=self.trans.history ) )
+        self.history_manager.publish( someone_elses )
+        self.assertTrue( self.history_manager.is_accessible( someone_elses, anon_user, current_history=self.trans.history ) )
+
+    def test_delete_and_purge( self ):
+        user2 = self.user_manager.create( **user2_data )
+        self.trans.set_user( user2 )
+
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        self.trans.set_history( history1 )
+
+        self.log( "should allow deletion and undeletion" )
+        self.assertFalse( history1.deleted )
+
+        self.history_manager.delete(  history1 )
+        self.assertTrue( history1.deleted )
+
+        self.history_manager.undelete( history1 )
+        self.assertFalse( history1.deleted )
+
+        self.log( "should allow purging" )
+        history2 = self.history_manager.create( name='history2', user=user2 )
+        self.history_manager.purge( history2 )
+        self.assertTrue( history2.purged )
+
+    def test_current( self ):
+        user2 = self.user_manager.create( **user2_data )
+        self.trans.set_user( user2 )
+
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        self.trans.set_history( history1 )
+        history2 = self.history_manager.create( name='history2', user=user2 )
+
+        self.log( "should be able to set or get the current history for a user" )
+        self.assertEqual( self.history_manager.get_current( self.trans ), history1 )
+        self.assertEqual( self.history_manager.set_current( self.trans, history2 ), history2 )
+        self.assertEqual( self.history_manager.get_current( self.trans ), history2 )
+        self.assertEqual( self.history_manager.set_current_by_id( self.trans, history1.id ), history1 )
+        self.assertEqual( self.history_manager.get_current( self.trans ), history1 )
+
+    def test_most_recently_used( self ):
+        user2 = self.user_manager.create( **user2_data )
+        self.trans.set_user( user2 )
+
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        self.trans.set_history( history1 )
+        history2 = self.history_manager.create( name='history2', user=user2 )
+
+        self.log( "should be able to get the most recently used (updated) history for a given user" )
+        self.assertEqual( self.history_manager.most_recent( user2 ), history2 )
+        self.history_manager.update( history1, { 'name': 'new name' })
+        self.assertEqual( self.history_manager.most_recent( user2 ), history1 )
+
+    def test_rating( self ):
+        user2 = self.user_manager.create( **user2_data )
+        manager = self.history_manager
+        item = manager.create( name='history1', user=user2 )
+
+        self.log( "should properly handle no ratings" )
+        self.assertEqual( manager.rating( item, user2 ), None )
+        self.assertEqual( manager.ratings( item ), [] )
+        self.assertEqual( manager.ratings_avg( item ), 0 )
+        self.assertEqual( manager.ratings_count( item ), 0 )
+
+        self.log( "should allow rating by user" )
+        manager.rate( item, user2, 5 )
+        self.assertEqual( manager.rating( item, user2 ), 5 )
+        self.assertEqual( manager.ratings( item ), [ 5 ] )
+        self.assertEqual( manager.ratings_avg( item ), 5 )
+        self.assertEqual( manager.ratings_count( item ), 1 )
+
+        self.log( "should allow updating" )
+        manager.rate( item, user2, 4 )
+        self.assertEqual( manager.rating( item, user2 ), 4 )
+        self.assertEqual( manager.ratings( item ), [ 4 ] )
+        self.assertEqual( manager.ratings_avg( item ), 4 )
+        self.assertEqual( manager.ratings_count( item ), 1 )
+
+        self.log( "should reflect multiple reviews" )
+        user3 = self.user_manager.create( **user3_data )
+        self.assertEqual( manager.rating( item, user3 ), None )
+        manager.rate( item, user3, 1 )
+        self.assertEqual( manager.rating( item, user3 ), 1 )
+        self.assertEqual( manager.ratings( item ), [ 4, 1 ] )
+        self.assertEqual( manager.ratings_avg( item ), 2.5 )
+        self.assertEqual( manager.ratings_count( item ), 2 )
+
+
+# =============================================================================
+# web.url_for doesn't work well in the framework
+def testable_url_for(*a, **k):
+    return '(fake url): %s, %s' % ( a, k )
+
+
+HistorySerializer.url_for = staticmethod( testable_url_for )
+hdas.HDASerializer.url_for = staticmethod( testable_url_for )
+
+
+class HistorySerializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( HistorySerializerTestCase, self ).set_up_managers()
+        self.history_manager = HistoryManager( self.app )
+        self.hda_manager = hdas.HDAManager( self.app )
+        self.history_serializer = HistorySerializer( self.app )
+
+    def test_views( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+
+        self.log( 'should have a summary view' )
+        summary_view = self.history_serializer.serialize_to_view( history1, view='summary' )
+        self.assertKeys( summary_view, self.history_serializer.views[ 'summary' ] )
+
+        self.log( 'should have a detailed view' )
+        detailed_view = self.history_serializer.serialize_to_view( history1, view='detailed' )
+        self.assertKeys( detailed_view, self.history_serializer.views[ 'detailed' ] )
+
+        self.log( 'should have the summary view as default view' )
+        default_view = self.history_serializer.serialize_to_view( history1, default_view='summary' )
+        self.assertKeys( default_view, self.history_serializer.views[ 'summary' ] )
+
+        self.log( 'should have a serializer for all serializable keys' )
+        for key in self.history_serializer.serializable_keyset:
+            instantiated_attribute = getattr( history1, key, None )
+            if not ( ( key in self.history_serializer.serializers ) or
+                    ( isinstance( instantiated_attribute, self.TYPES_NEEDING_NO_SERIALIZERS ) ) ):
+                self.fail( 'no serializer for: %s (%s)' % ( key, instantiated_attribute ) )
+        else:
+            self.assertTrue( True, 'all serializable keys have a serializer' )
+
+    def test_views_and_keys( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+
+        self.log( 'should be able to use keys with views' )
+        serialized = self.history_serializer.serialize_to_view( history1,
+            view='summary', keys=[ 'state_ids', 'user_id' ] )
+        self.assertKeys( serialized,
+            self.history_serializer.views[ 'summary' ] + [ 'state_ids', 'user_id' ] )
+
+        self.log( 'should be able to use keys on their own' )
+        serialized = self.history_serializer.serialize_to_view( history1,
+            keys=[ 'state_ids', 'user_id' ] )
+        self.assertKeys( serialized, [ 'state_ids', 'user_id' ] )
+
+    def test_sharable( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+
+        self.log( 'should have a serializer for all SharableModel keys' )
+        sharable_attrs = [ 'user_id', 'username_and_slug', 'importable', 'published', 'slug' ]
+        serialized = self.history_serializer.serialize( history1, sharable_attrs )
+        self.assertKeys( serialized, sharable_attrs )
+
+        self.log( 'should return user_id for user with whom it\'s been shared if the requester is the owner' )
+        non_owner = self.user_manager.create( **user3_data )
+        self.history_manager.share_with( history1, non_owner )
+        serialized = self.history_serializer.serialize( history1, [ 'users_shared_with' ], user=user2 )
+        self.assertKeys( serialized, [ 'users_shared_with' ] )
+        self.assertIsInstance( serialized[ 'users_shared_with' ], list )
+        self.assertEqual( serialized[ 'users_shared_with' ][0], self.app.security.encode_id( non_owner.id ) )
+
+        self.log( 'should not return users_shared_with if the requester is not the owner' )
+        serialized = self.history_serializer.serialize( history1, [ 'users_shared_with' ], user=non_owner )
+        self.assertFalse( hasattr( serialized, 'users_shared_with' ) )
+
+    def test_purgable( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+
+        self.log( 'deleted and purged should be returned in their default states' )
+        keys = [ 'deleted', 'purged' ]
+        serialized = self.history_serializer.serialize( history1, keys )
+        self.assertEqual( serialized[ 'deleted' ], False )
+        self.assertEqual( serialized[ 'purged' ], False )
+
+        self.log( 'deleted and purged should return their current state' )
+        self.history_manager.delete( history1 )
+        serialized = self.history_serializer.serialize( history1, keys )
+        self.assertEqual( serialized[ 'deleted' ], True )
+        self.assertEqual( serialized[ 'purged' ], False )
+
+        self.history_manager.purge( history1 )
+        serialized = self.history_serializer.serialize( history1, keys )
+        self.assertEqual( serialized[ 'deleted' ], True )
+        self.assertEqual( serialized[ 'purged' ], True )
+
+    def test_history_serializers( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        all_keys = list( self.history_serializer.serializable_keyset )
+        serialized = self.history_serializer.serialize( history1, all_keys, user=user2 )
+
+        self.log( 'everything serialized should be of the proper type' )
+        self.assertIsInstance( serialized[ 'size' ], int )
+        self.assertIsInstance( serialized[ 'nice_size' ], string_types )
+
+        self.log( 'serialized should jsonify well' )
+        self.assertIsJsonifyable( serialized )
+
+    def _history_state_from_states_and_deleted( self, user, hda_state_and_deleted_tuples ):
+        history = self.history_manager.create( name='name', user=user )
+        for state, deleted in hda_state_and_deleted_tuples:
+            hda = self.hda_manager.create( history=history )
+            hda = self.hda_manager.update( hda, dict( state=state, deleted=deleted ) )
+        history_state = self.history_serializer.serialize( history, [ 'state' ] )[ 'state' ]
+        return history_state
+
+    def test_state( self ):
+        dataset_states = model.Dataset.states
+        user2 = self.user_manager.create( **user2_data )
+
+        ready_states = [ ( state, False ) for state in [ dataset_states.OK, dataset_states.OK ] ]
+
+        self.log( 'a history\'s serialized state should be running if any of its datasets are running' )
+        self.assertEqual( 'running', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.RUNNING, False )] ))
+        self.assertEqual( 'running', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.SETTING_METADATA, False )] ))
+        self.assertEqual( 'running', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.UPLOAD, False )] ))
+
+        self.log( 'a history\'s serialized state should be queued if any of its datasets are queued' )
+        self.assertEqual( 'queued', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.QUEUED, False )] ))
+
+        self.log( 'a history\'s serialized state should be error if any of its datasets are errored' )
+        self.assertEqual( 'error', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.ERROR, False )] ))
+        self.assertEqual( 'error', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.FAILED_METADATA, False )] ))
+
+        self.log( 'a history\'s serialized state should be ok if *all* of its datasets are ok' )
+        self.assertEqual( 'ok', self._history_state_from_states_and_deleted( user2, ready_states ))
+
+        self.log( 'a history\'s serialized state should be not be affected by deleted datasets' )
+        self.assertEqual( 'ok', self._history_state_from_states_and_deleted( user2,
+            ready_states + [( dataset_states.RUNNING, True )] ))
+
+    def test_contents( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+
+        self.log( 'a history with no contents should be properly reflected in empty, etc.' )
+        keys = [ 'empty', 'count', 'state_ids', 'state_details', 'state', 'hdas' ]
+        serialized = self.history_serializer.serialize( history1, keys )
+        self.assertEqual( serialized[ 'state' ], 'new' )
+        self.assertEqual( serialized[ 'empty' ], True )
+        self.assertEqual( serialized[ 'count' ], 0 )
+        self.assertEqual( sum( serialized[ 'state_details' ].values() ), 0 )
+        self.assertEqual( serialized[ 'state_ids' ][ 'ok' ], [] )
+        self.assertIsInstance( serialized[ 'hdas' ], list )
+
+        self.log( 'a history with contents should be properly reflected in empty, etc.' )
+        hda1 = self.hda_manager.create( history=history1, hid=1 )
+        self.hda_manager.update( hda1, dict( state='ok' ) )
+
+        serialized = self.history_serializer.serialize( history1, keys )
+        self.assertEqual( serialized[ 'state' ], 'ok' )
+        self.assertEqual( serialized[ 'empty' ], False )
+        self.assertEqual( serialized[ 'count' ], 1 )
+        self.assertEqual( serialized[ 'state_details' ][ 'ok' ], 1 )
+        self.assertIsInstance( serialized[ 'state_ids' ][ 'ok' ], list )
+        self.assertIsInstance( serialized[ 'hdas' ], list )
+        self.assertIsInstance( serialized[ 'hdas' ][0], string_types )
+
+        serialized = self.history_serializer.serialize( history1, [ 'contents' ] )
+        self.assertHasKeys( serialized[ 'contents' ][0], [ 'id', 'name', 'state', 'create_time' ])
+
+        self.log( 'serialized should jsonify well' )
+        self.assertIsJsonifyable( serialized )
+
+    def test_ratings( self ):
+        user2 = self.user_manager.create( **user2_data )
+        user3 = self.user_manager.create( **user3_data )
+        manager = self.history_manager
+        serializer = self.history_serializer
+        item = manager.create( name='history1', user=user2 )
+
+        self.log( 'serialization should reflect no ratings' )
+        serialized = serializer.serialize( item, [ 'user_rating', 'community_rating' ], user=user2 )
+        self.assertEqual( serialized[ 'user_rating' ], None )
+        self.assertEqual( serialized[ 'community_rating' ][ 'count' ], 0 )
+        self.assertEqual( serialized[ 'community_rating' ][ 'average' ], 0.0 )
+
+        self.log( 'serialization should reflect ratings' )
+        manager.rate( item, user2, 1 )
+        manager.rate( item, user3, 4 )
+        serialized = serializer.serialize( item, [ 'user_rating', 'community_rating' ], user=user2 )
+        self.assertEqual( serialized[ 'user_rating' ], 1 )
+        self.assertEqual( serialized[ 'community_rating' ][ 'count' ], 2 )
+        self.assertEqual( serialized[ 'community_rating' ][ 'average' ], 2.5 )
+        self.assertIsJsonifyable( serialized )
+
+        self.log( 'serialization of user_rating without user should error' )
+        self.assertRaises( base.ModelSerializingError,
+            serializer.serialize, item, [ 'user_rating' ] )
+
+
+# =============================================================================
+class HistoryDeserializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( HistoryDeserializerTestCase, self ).set_up_managers()
+        self.history_manager = HistoryManager( self.app )
+        self.history_deserializer = HistoryDeserializer( self.app )
+
+    def test_ratings( self ):
+        user2 = self.user_manager.create( **user2_data )
+        manager = self.history_manager
+        deserializer = self.history_deserializer
+        item = manager.create( name='history1', user=user2 )
+
+        self.log( 'deserialization should allow ratings change' )
+        deserializer.deserialize( item, { 'user_rating': 4 }, user=user2 )
+        self.assertEqual( manager.rating( item, user2 ), 4 )
+        self.assertEqual( manager.ratings( item ), [ 4 ] )
+        self.assertEqual( manager.ratings_avg( item ), 4 )
+        self.assertEqual( manager.ratings_count( item ), 1 )
+
+        self.log( 'deserialization should fail silently on community_rating' )
+        deserializer.deserialize( item, { 'community_rating': 4 }, user=user2 )
+        self.assertEqual( manager.ratings_count( item ), 1 )
+
+    def test_sharable( self ):
+        manager = self.history_manager
+        deserializer = self.history_deserializer
+
+        user2 = self.user_manager.create( **user2_data )
+        item = manager.create( name='history1', user=user2 )
+        non_owner = self.user_manager.create( **user3_data )
+
+        self.log( 'should allow adding a share by adding a user id to users_shared_with' )
+        non_owner_id = self.app.security.encode_id( non_owner.id )
+        deserializer.deserialize( item, { 'users_shared_with': [ non_owner_id ] }, user=user2 )
+        user_shares = manager.get_share_assocs( item )
+        self.assertEqual( len( user_shares ), 1 )
+        self.assertEqual( user_shares[0].user_id, non_owner.id )
+
+        self.log( 're-adding an existing user id should do nothing' )
+        deserializer.deserialize( item, { 'users_shared_with': [ non_owner_id, non_owner_id ] }, user=user2 )
+        user_shares = manager.get_share_assocs( item )
+        self.assertEqual( len( user_shares ), 1 )
+        self.assertEqual( user_shares[0].user_id, non_owner.id )
+
+        self.log( 'should allow removing a share by not having it in users_shared_with' )
+        deserializer.deserialize( item, { 'users_shared_with': [] }, user=user2 )
+        user_shares = manager.get_share_assocs( item )
+        self.assertEqual( len( user_shares ), 0 )
+
+        self.log( 'adding a bad user id should error' )
+        self.assertRaises( AttributeError,
+            deserializer.deserialize, item, { 'users_shared_with': [ None ] }, user=user2 )
+
+        self.log( 'adding a non-existing user id should do nothing' )
+        non_user_id = self.app.security.encode_id( 99 )
+        deserializer.deserialize( item, { 'users_shared_with': [ non_user_id ] }, user=user2 )
+        user_shares = manager.get_share_assocs( item )
+        self.assertEqual( len( user_shares ), 0 )
+
+
+# =============================================================================
+class HistoryFiltersTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( HistoryFiltersTestCase, self ).set_up_managers()
+        self.history_manager = HistoryManager( self.app )
+        self.filter_parser = HistoryFilters( self.app )
+
+    # ---- functional and orm filter splitting and resolution
+    def test_parse_filters( self ):
+        filters = self.filter_parser.parse_filters([
+            ( 'name', 'eq', 'wot' ),
+            ( 'deleted', 'eq', 'True' ),
+            ( 'annotation', 'has', 'hrrmm' )
+        ])
+        self.log( 'both orm and fn filters should be parsed and returned' )
+        self.assertEqual( len( filters ), 3 )
+
+        self.log( 'values should be parsed' )
+        self.assertIsInstance( filters[1].right, sqlalchemy.sql.elements.True_ )
+
+    def test_parse_filters_invalid_filters( self ):
+        self.log( 'should error on non-column attr')
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.filter_parser.parse_filters, [
+            ( 'merp', 'eq', 'wot' ),
+        ])
+        self.log( 'should error on non-whitelisted attr')
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.filter_parser.parse_filters, [
+            ( 'user_id', 'eq', 'wot' ),
+        ])
+        self.log( 'should error on non-whitelisted op')
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.filter_parser.parse_filters, [
+            ( 'name', 'lt', 'wot' ),
+        ])
+        self.log( 'should error on non-listed fn op')
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.filter_parser.parse_filters, [
+            ( 'annotation', 'like', 'wot' ),
+        ])
+        self.log( 'should error on val parsing error')
+        self.assertRaises( exceptions.RequestParameterInvalidException, self.filter_parser.parse_filters, [
+            ( 'deleted', 'eq', 'true' ),
+        ])
+
+    def test_orm_filter_parsing( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        history2 = self.history_manager.create( name='history2', user=user2 )
+        history3 = self.history_manager.create( name='history3', user=user2 )
+
+        filters = self.filter_parser.parse_filters([
+            ( 'name', 'like', 'history%' ),
+        ])
+        histories = self.history_manager.list( filters=filters )
+        # for h in histories:
+        #    print h.name
+        self.assertEqual( histories, [ history1, history2, history3 ])
+
+        filters = self.filter_parser.parse_filters([ ( 'name', 'like', '%2' ), ])
+        self.assertEqual( self.history_manager.list( filters=filters ), [ history2 ])
+
+        filters = self.filter_parser.parse_filters([ ( 'name', 'eq', 'history2' ), ])
+        self.assertEqual( self.history_manager.list( filters=filters ), [ history2 ])
+
+        self.history_manager.update( history1, dict( deleted=True ) )
+        filters = self.filter_parser.parse_filters([ ( 'deleted', 'eq', 'True' ), ])
+        self.assertEqual( self.history_manager.list( filters=filters ), [ history1 ])
+        filters = self.filter_parser.parse_filters([ ( 'deleted', 'eq', 'False' ), ])
+        self.assertEqual( self.history_manager.list( filters=filters ), [ history2, history3 ])
+        self.assertEqual( self.history_manager.list(), [ history1, history2, history3 ])
+
+        self.history_manager.update( history3, dict( deleted=True ) )
+        self.history_manager.update( history1, dict( importable=True ) )
+        self.history_manager.update( history2, dict( importable=True ) )
+        filters = self.filter_parser.parse_filters([
+            ( 'deleted', 'eq', 'True' ),
+            ( 'importable', 'eq', 'True' ),
+        ])
+        self.assertEqual( self.history_manager.list( filters=filters ), [ history1 ])
+        self.assertEqual( self.history_manager.list(), [ history1, history2, history3 ])
+
+    def test_fn_filter_parsing( self ):
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        history2 = self.history_manager.create( name='history2', user=user2 )
+        history3 = self.history_manager.create( name='history3', user=user2 )
+
+        filters = self.filter_parser.parse_filters([ ( 'annotation', 'has', 'no play' ), ])
+        anno_filter = filters[0]
+
+        history3.add_item_annotation( self.trans.sa_session, user2, history3, "All work and no play" )
+        self.trans.sa_session.flush()
+
+        self.assertTrue( anno_filter( history3 ) )
+        self.assertFalse( anno_filter( history2 ) )
+
+        self.assertEqual( self.history_manager.list( filters=filters ), [ history3 ])
+
+        self.log( 'should allow combinations of orm and fn filters' )
+        self.history_manager.update( history3, dict( importable=True ) )
+        self.history_manager.update( history2, dict( importable=True ) )
+        history1.add_item_annotation( self.trans.sa_session, user2, history1, "All work and no play" )
+        self.trans.sa_session.flush()
+
+        shining_examples = self.history_manager.list( filters=self.filter_parser.parse_filters([
+            ( 'importable', 'eq', 'True' ),
+            ( 'annotation', 'has', 'no play' ),
+        ]))
+        self.assertEqual( shining_examples, [ history3 ])
+
+    def test_fn_filter_currying( self ):
+        self.filter_parser.fn_filter_parsers = {
+            'name_len': { 'op': { 'lt': lambda i, v: len( i.name ) < v }, 'val': int }
+        }
+        self.log( 'should be 2 filters now' )
+        self.assertEqual( len( self.filter_parser.fn_filter_parsers ), 1 )
+        filters = self.filter_parser.parse_filters([
+            ( 'name_len', 'lt', '4' )
+        ])
+        self.log( 'should have parsed out a single filter' )
+        self.assertEqual( len( filters ), 1 )
+
+        filter_ = filters[0]
+        fake = galaxy_mock.OpenObject()
+        fake.name = '123'
+        self.log( '123 should return true through the filter' )
+        self.assertTrue( filter_( fake ) )
+        fake.name = '1234'
+        self.log( '1234 should return false through the filter' )
+        self.assertFalse( filter_( fake ) )
+
+    def test_list( self ):
+        """
+        Test limit and offset in conjunction with both orm and fn filtering.
+        """
+        user2 = self.user_manager.create( **user2_data )
+        history1 = self.history_manager.create( name='history1', user=user2 )
+        history2 = self.history_manager.create( name='history2', user=user2 )
+        history3 = self.history_manager.create( name='history3', user=user2 )
+        history4 = self.history_manager.create( name='history4', user=user2 )
+
+        self.history_manager.delete( history1 )
+        self.history_manager.delete( history2 )
+        self.history_manager.delete( history3 )
+
+        test_annotation = "testing"
+        history2.add_item_annotation( self.trans.sa_session, user2, history2, test_annotation )
+        self.trans.sa_session.flush()
+        history3.add_item_annotation( self.trans.sa_session, user2, history3, test_annotation )
+        self.trans.sa_session.flush()
+        history3.add_item_annotation( self.trans.sa_session, user2, history4, test_annotation )
+        self.trans.sa_session.flush()
+
+        all_histories = [ history1, history2, history3, history4 ]
+        deleted_and_annotated = [ history2, history3 ]
+
+        self.log( "no offset, no limit should work" )
+        self.assertEqual( self.history_manager.list( offset=None, limit=None ), all_histories )
+        self.assertEqual( self.history_manager.list(), all_histories )
+        self.log( "no offset, limit should work" )
+        self.assertEqual( self.history_manager.list( limit=2 ), [ history1, history2 ] )
+        self.log( "offset, no limit should work" )
+        self.assertEqual( self.history_manager.list( offset=1 ), [ history2, history3, history4 ] )
+        self.log( "offset, limit should work" )
+        self.assertEqual( self.history_manager.list( offset=1, limit=1 ), [ history2 ] )
+
+        self.log( "zero limit should return empty list" )
+        self.assertEqual( self.history_manager.list( limit=0 ), [] )
+        self.log( "past len offset should return empty list" )
+        self.assertEqual( self.history_manager.list( offset=len( all_histories ) ), [] )
+        self.log( "negative limit should return full list" )
+        self.assertEqual( self.history_manager.list( limit=-1 ), all_histories )
+        self.log( "negative offset should return full list" )
+        self.assertEqual( self.history_manager.list( offset=-1 ), all_histories )
+
+        filters = [ model.History.deleted == true() ]
+        self.log( "orm filtered, no offset, no limit should work" )
+        found = self.history_manager.list( filters=filters )
+        self.assertEqual( found, [ history1, history2, history3 ] )
+        self.log( "orm filtered, no offset, limit should work" )
+        found = self.history_manager.list( filters=filters, limit=2 )
+        self.assertEqual( found, [ history1, history2 ] )
+        self.log( "orm filtered, offset, no limit should work" )
+        found = self.history_manager.list( filters=filters, offset=1 )
+        self.assertEqual( found, [ history2, history3 ] )
+        self.log( "orm filtered, offset, limit should work" )
+        found = self.history_manager.list( filters=filters, offset=1, limit=1 )
+        self.assertEqual( found, [ history2 ] )
+
+        filters = self.filter_parser.parse_filters([ ( 'annotation', 'has', test_annotation ) ])
+        self.log( "fn filtered, no offset, no limit should work" )
+        found = self.history_manager.list( filters=filters )
+        self.assertEqual( found, [ history2, history3, history4 ] )
+        self.log( "fn filtered, no offset, limit should work" )
+        found = self.history_manager.list( filters=filters, limit=2 )
+        self.assertEqual( found, [ history2, history3 ] )
+        self.log( "fn filtered, offset, no limit should work" )
+        found = self.history_manager.list( filters=filters, offset=1 )
+        self.assertEqual( found, [ history3, history4 ] )
+        self.log( "fn filtered, offset, limit should work" )
+        found = self.history_manager.list( filters=filters, offset=1, limit=1 )
+        self.assertEqual( found, [ history3 ] )
+
+        filters = self.filter_parser.parse_filters([
+            ( 'deleted', 'eq', 'True' ),
+            ( 'annotation', 'has', test_annotation )
+        ])
+        self.log( "orm and fn filtered, no offset, no limit should work" )
+        found = self.history_manager.list( filters=filters )
+        self.assertEqual( found, [ history2, history3 ] )
+        self.log( "orm and fn filtered, no offset, limit should work" )
+        found = self.history_manager.list( filters=filters, limit=1 )
+        self.assertEqual( found, [ history2 ] )
+        self.log( "orm and fn filtered, offset, no limit should work" )
+        found = self.history_manager.list( filters=filters, offset=1 )
+        self.assertEqual( found, [ history3 ] )
+        self.log( "orm and fn filtered, offset, limit should work" )
+        found = self.history_manager.list( filters=filters, offset=1, limit=1 )
+        self.assertEqual( found, [ history3 ] )
+
+        self.log( "orm and fn filtered, zero limit should return empty list" )
+        found = self.history_manager.list( filters=filters, limit=0 )
+        self.assertEqual( found, [] )
+        self.log( "orm and fn filtered, past len offset should return empty list" )
+        found = self.history_manager.list( filters=filters, offset=len( deleted_and_annotated ) )
+        self.assertEqual( found, [] )
+        self.log( "orm and fn filtered, negative limit should return full list" )
+        found = self.history_manager.list( filters=filters, limit=-1 )
+        self.assertEqual( found, deleted_and_annotated )
+        self.log( "orm and fn filtered, negative offset should return full list" )
+        found = self.history_manager.list( filters=filters, offset=-1 )
+        self.assertEqual( found, deleted_and_annotated )
+
+    # TODO: eq, ge, le
+    # def test_ratings( self ):
+    #     pass
+
+
+# =============================================================================
+if __name__ == '__main__':
+    # or more generally, nosetests test_resourcemanagers.py -s -v
+    unittest.main()
diff --git a/test/unit/managers/test_UserManager.py b/test/unit/managers/test_UserManager.py
new file mode 100644
index 0000000..25b9f5f
--- /dev/null
+++ b/test/unit/managers/test_UserManager.py
@@ -0,0 +1,278 @@
+# -*- coding: utf-8 -*-
+"""
+User Manager testing.
+
+Executable directly using: python -m test.unit.managers.test_UserManager
+"""
+import unittest
+
+import sqlalchemy
+from six import string_types
+
+from galaxy import exceptions, model
+from galaxy.managers import base as base_manager
+from galaxy.managers import histories, users
+
+from .base import BaseTestCase
+
+
+# =============================================================================
+default_password = '123456'
+user2_data = dict( email='user2 at user2.user2', username='user2', password=default_password )
+user3_data = dict( email='user3 at user3.user3', username='user3', password=default_password )
+user4_data = dict( email='user4 at user4.user4', username='user4', password=default_password )
+
+
+# =============================================================================
+class UserManagerTestCase( BaseTestCase ):
+
+    def test_framework( self ):
+        self.log( "(for testing) should have admin_user, and admin_user is current" )
+        self.assertEqual( self.trans.user, self.admin_user )
+
+    def test_base( self ):
+        self.log( "should be able to create a user" )
+        user2 = self.user_manager.create( **user2_data )
+        self.assertIsInstance( user2, model.User )
+        self.assertIsNotNone( user2.id )
+        self.assertEqual( user2.email, user2_data[ 'email' ] )
+        self.assertEqual( user2.password, default_password )
+
+        user3 = self.user_manager.create( **user3_data )
+
+        self.log( "should be able to query" )
+        users = self.trans.sa_session.query( model.User ).all()
+        self.assertEqual( self.user_manager.list(), users )
+
+        self.assertEqual( self.user_manager.by_id( user2.id ), user2 )
+        self.assertEqual( self.user_manager.by_ids( [ user3.id, user2.id ] ), [ user3, user2 ] )
+
+        self.log( "should be able to limit and offset" )
+        self.assertEqual( self.user_manager.list( limit=1 ), users[0:1] )
+        self.assertEqual( self.user_manager.list( offset=1 ), users[1:] )
+        self.assertEqual( self.user_manager.list( limit=1, offset=1 ), users[1:2] )
+
+        self.assertEqual( self.user_manager.list( limit=0 ), [] )
+        self.assertEqual( self.user_manager.list( offset=3 ), [] )
+
+        self.log( "should be able to order" )
+        self.assertEqual( self.user_manager.list( order_by=( sqlalchemy.desc( model.User.create_time ) ) ),
+            [ user3, user2, self.admin_user ] )
+
+    def test_invalid_create( self ):
+        self.user_manager.create( **user2_data )
+
+        self.log( "emails must be unique" )
+        self.assertRaises( exceptions.Conflict, self.user_manager.create,
+            **dict( email='user2 at user2.user2', username='user2a', password=default_password ) )
+        self.log( "usernames must be unique" )
+        self.assertRaises( exceptions.Conflict, self.user_manager.create,
+            **dict( email='user2a at user2.user2', username='user2', password=default_password ) )
+
+    def test_email_queries( self ):
+        user2 = self.user_manager.create( **user2_data )
+        user3 = self.user_manager.create( **user3_data )
+
+        self.log( "should be able to query by email" )
+        self.assertEqual( self.user_manager.by_email( user2_data[ 'email' ] ), user2 )
+
+        # note: sorted by email alpha
+        self.assertEqual( self.user_manager.by_email_like( '%@%' ), [ self.admin_user, user2, user3 ] )
+
+    def test_admin( self ):
+        user2 = self.user_manager.create( **user2_data )
+
+        self.log( "should be able to test whether admin" )
+        self.assertTrue( self.user_manager.is_admin( self.admin_user ) )
+        self.assertFalse( self.user_manager.is_admin( user2 ) )
+        self.assertEqual( self.user_manager.admins(), [ self.admin_user ] )
+        self.assertRaises( exceptions.AdminRequiredException, self.user_manager.error_unless_admin, user2 )
+        self.assertEqual( self.user_manager.error_unless_admin( self.admin_user ), self.admin_user )
+
+    def test_anonymous( self ):
+        anon = None
+        user2 = self.user_manager.create( **user2_data )
+
+        self.log( "should be able to tell if a user is anonymous" )
+        self.assertRaises( exceptions.AuthenticationFailed, self.user_manager.error_if_anonymous, anon )
+        self.assertEqual( self.user_manager.error_if_anonymous( user2 ), user2 )
+
+    def test_current( self ):
+        user2 = self.user_manager.create( **user2_data )
+
+        self.log( "should be able to tell if a user is the current (trans) user" )
+        self.assertEqual( self.user_manager.current_user( self.trans ), self.admin_user )
+        self.assertNotEqual( self.user_manager.current_user( self.trans ), user2 )
+
+    def test_api_keys( self ):
+        user2 = self.user_manager.create( **user2_data )
+
+        self.log( "should return None if no APIKey has been created" )
+        self.assertEqual( self.user_manager.valid_api_key( user2 ), None )
+
+        self.log( "should be able to generate and retrieve valid api key" )
+        user2_api_key = self.user_manager.create_api_key( user2 )
+        self.assertIsInstance( user2_api_key, string_types )
+        self.assertEqual( self.user_manager.valid_api_key( user2 ).key, user2_api_key )
+
+        self.log( "should return the most recent (i.e. most valid) api key" )
+        user2_api_key_2 = self.user_manager.create_api_key( user2 )
+        self.assertEqual( self.user_manager.valid_api_key( user2 ).key, user2_api_key_2 )
+
+
+# =============================================================================
+class UserSerializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( UserSerializerTestCase, self ).set_up_managers()
+        self.user_serializer = users.UserSerializer( self.app )
+
+    def test_views( self ):
+        user = self.user_manager.create( **user2_data )
+
+        self.log( 'should have a summary view' )
+        summary_view = self.user_serializer.serialize_to_view( user, view='summary' )
+        self.assertKeys( summary_view, self.user_serializer.views[ 'summary' ] )
+
+        self.log( 'should have the summary view as default view' )
+        default_view = self.user_serializer.serialize_to_view( user, default_view='summary' )
+        self.assertKeys( default_view, self.user_serializer.views[ 'summary' ] )
+
+        self.log( 'should have a serializer for all serializable keys' )
+        for key in self.user_serializer.serializable_keyset:
+            instantiated_attribute = getattr( user, key, None )
+            if not ( ( key in self.user_serializer.serializers ) or
+                     ( isinstance( instantiated_attribute, self.TYPES_NEEDING_NO_SERIALIZERS ) ) ):
+                self.fail( 'no serializer for: %s (%s)' % ( key, instantiated_attribute ) )
+        else:
+            self.assertTrue( True, 'all serializable keys have a serializer' )
+
+    def test_views_and_keys( self ):
+        user = self.user_manager.create( **user2_data )
+
+        self.log( 'should be able to use keys with views' )
+        serialized = self.user_serializer.serialize_to_view( user,
+            view='summary', keys=[ 'create_time' ] )
+        self.assertKeys( serialized,
+            self.user_serializer.views[ 'summary' ] + [ 'create_time' ] )
+
+        self.log( 'should be able to use keys on their own' )
+        serialized = self.user_serializer.serialize_to_view( user,
+            keys=[ 'tags_used', 'is_admin' ] )
+        self.assertKeys( serialized, [ 'tags_used', 'is_admin' ] )
+
+    def test_serializers( self ):
+        user = self.user_manager.create( **user2_data )
+        all_keys = list( self.user_serializer.serializable_keyset )
+        serialized = self.user_serializer.serialize( user, all_keys, trans=self.trans )
+        # pprint.pprint( serialized )
+
+        self.log( 'everything serialized should be of the proper type' )
+        self.assertEncodedId( serialized[ 'id' ] )
+        self.assertDate( serialized[ 'create_time' ] )
+        self.assertDate( serialized[ 'update_time' ] )
+        self.assertIsInstance( serialized[ 'deleted' ], bool )
+        self.assertIsInstance( serialized[ 'purged' ], bool )
+
+        # self.assertIsInstance( serialized[ 'active' ], bool )
+        self.assertIsInstance( serialized[ 'is_admin' ], bool )
+        self.assertIsInstance( serialized[ 'total_disk_usage' ], float )
+        self.assertIsInstance( serialized[ 'nice_total_disk_usage' ], string_types )
+        self.assertIsInstance( serialized[ 'quota_percent' ], ( type( None ), float ) )
+        self.assertIsInstance( serialized[ 'tags_used' ], list )
+        self.assertIsInstance( serialized[ 'has_requests' ], bool )
+
+        self.log( 'serialized should jsonify well' )
+        self.assertIsJsonifyable( serialized )
+
+
+class CurrentUserSerializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( CurrentUserSerializerTestCase, self ).set_up_managers()
+        self.history_manager = histories.HistoryManager( self.app )
+        self.user_serializer = users.CurrentUserSerializer( self.app )
+
+    def test_anonymous( self ):
+        anonym = None
+        # need a history here for total_disk_usage
+        self.trans.set_history( self.history_manager.create() )
+
+        self.log( 'should be able to serialize anonymous user' )
+        serialized = self.user_serializer.serialize_to_view( anonym, view='detailed', trans=self.trans )
+        self.assertKeys( serialized,
+            [ 'id', 'total_disk_usage', 'nice_total_disk_usage', 'quota_percent' ] )
+
+        self.log( 'anonymous\'s id should be None' )
+        self.assertEqual( serialized[ 'id' ], None )
+        self.log( 'everything serialized should be of the proper type' )
+        self.assertIsInstance( serialized[ 'total_disk_usage' ], float )
+        self.assertIsInstance( serialized[ 'nice_total_disk_usage' ], string_types )
+        self.assertIsInstance( serialized[ 'quota_percent' ], ( type( None ), float ) )
+
+        self.log( 'serialized should jsonify well' )
+        self.assertIsJsonifyable( serialized )
+
+
+# =============================================================================
+class UserDeserializerTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( UserDeserializerTestCase, self ).set_up_managers()
+        self.deserializer = users.UserDeserializer( self.app )
+
+    def _assertRaises_and_return_raised( self, exception_class, fn, *args, **kwargs ):
+        try:
+            fn( *args, **kwargs )
+        except exception_class as exception:
+            self.assertTrue( True )
+            return exception
+        assert False, '%s not raised' % ( exception_class.__name__ )
+
+    def test_username_validation( self ):
+        user = self.user_manager.create( **user2_data )
+
+        # self.log( "usernames can be unicode" ) #TODO: nope they can't
+        # self.deserializer.deserialize( user, { 'username': 'Σίσυφος' }, trans=self.trans )
+
+        self.log( "usernames must be long enough and with no non-hyphen punctuation" )
+        exception = self._assertRaises_and_return_raised( base_manager.ModelDeserializingError,
+            self.deserializer.deserialize, user, { 'username': 'ed' }, trans=self.trans )
+        self.assertTrue( 'Public name must be at least' in str( exception ) )
+        self.assertRaises( base_manager.ModelDeserializingError, self.deserializer.deserialize,
+            user, { 'username': 'f,d,r,' }, trans=self.trans )
+
+        self.log( "usernames must be unique" )
+        self.user_manager.create( **user3_data )
+        self.assertRaises( base_manager.ModelDeserializingError, self.deserializer.deserialize,
+            user, { 'username': 'user3' }, trans=self.trans )
+
+        self.log( "username should be updatable" )
+        new_name = 'double-plus-good'
+        self.deserializer.deserialize( user, { 'username': new_name }, trans=self.trans )
+        self.assertEqual( self.user_manager.by_id( user.id ).username, new_name )
+
+
+# =============================================================================
+class AdminUserFilterParserTestCase( BaseTestCase ):
+
+    def set_up_managers( self ):
+        super( AdminUserFilterParserTestCase, self ).set_up_managers()
+        self.filter_parser = users.AdminUserFilterParser( self.app )
+
+    def test_parsable( self ):
+        self.log( 'the following filters should be parsable' )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'email', 'eq', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'email', 'contains', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'email', 'like', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'username', 'eq', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'username', 'contains', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'username', 'like', 'wot' ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'active', 'eq', True ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'disk_usage', 'le', 500000.00 ) )
+        self.assertORMFilter( self.filter_parser.parse_filter( 'disk_usage', 'ge', 500000.00 ) )
+
+
+# =============================================================================
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/shed_unit/__init__.py b/test/unit/shed_unit/__init__.py
new file mode 100644
index 0000000..5690544
--- /dev/null
+++ b/test/unit/shed_unit/__init__.py
@@ -0,0 +1,4 @@
+"""
+Module cannot be called tool_shed, because this conflicts with lib/tool_shed
+also at top level of path.
+"""
diff --git a/test/unit/shed_unit/test_fabric_util.py b/test/unit/shed_unit/test_fabric_util.py
new file mode 100644
index 0000000..5b9031d
--- /dev/null
+++ b/test/unit/shed_unit/test_fabric_util.py
@@ -0,0 +1,46 @@
+from contextlib import contextmanager
+
+from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
+
+
+def test_env_file_builder():
+    install_dir = "/opt/galaxy/dependencies/foo/"
+    env_file_builder = EnvFileBuilder( install_dir )
+    added_lines = []
+    mock_return = dict(value=0)
+
+    def mock_file_append( text, file_path, **kwds ):
+        added_lines.append( text )
+        return mock_return[ "value" ]
+
+    with __mock_env_file_builder_method( env_file_builder, "file_append", mock_file_append ):
+        env_file_builder.append_line( name="PATH", action="prepend_to", value="/usr/bin/local/R" )
+        assert added_lines == [ "PATH=/usr/bin/local/R:$PATH; export PATH" ]
+        assert env_file_builder.return_code == 0
+
+        # Reset mock lines
+        del added_lines[:]
+        # Next time file_append will fail
+        mock_return["value"] = 1
+
+        env_file_builder.append_line( action="source", value="/usr/bin/local/R/env.sh" )
+        assert added_lines == [ "if [ -f /usr/bin/local/R/env.sh ] ; then . /usr/bin/local/R/env.sh ; fi" ]
+        # Check failure
+        assert env_file_builder.return_code == 1
+
+        mock_return["value"] = 0
+        env_file_builder.append_line( name="LD_LIBRARY_PATH", action="append_to", value="/usr/bin/local/R/lib" )
+        # Verify even though last append succeeded, previous failure still recorded.
+        assert env_file_builder.return_code == 1
+
+
+# Poor man's mocking. Need to get a real mocking library as real Galaxy development
+# dependnecy.
+ at contextmanager
+def __mock_env_file_builder_method( env_file_builder, name, mock_method ):
+    real_method = getattr( env_file_builder, name )
+    try:
+        setattr( env_file_builder, name, mock_method )
+        yield
+    finally:
+        setattr( env_file_builder, name, real_method )
diff --git a/test/unit/shed_unit/test_td_common_util.py b/test/unit/shed_unit/test_td_common_util.py
new file mode 100644
index 0000000..f07eea9
--- /dev/null
+++ b/test/unit/shed_unit/test_td_common_util.py
@@ -0,0 +1,88 @@
+from contextlib import contextmanager
+from os.path import join
+
+from galaxy.util import parse_xml_string
+from tool_shed.galaxy_install.tool_dependencies.env_manager import EnvManager
+from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder
+
+TEST_DEPENDENCIES_DIR = "/opt/galaxy/dependencies"
+TEST_INSTALL_DIR = "%s/test_install_dir" % TEST_DEPENDENCIES_DIR
+
+
+class MockApp( object ):
+
+    def __init__( self ):
+        pass
+
+
+def test_create_or_update_env_shell_file():
+    test_path = "/usr/share/R/libs"
+    env_file_builder = EnvFileBuilder( test_path )
+    line, path = env_file_builder.create_or_update_env_shell_file( TEST_INSTALL_DIR, dict( action="append_to", name="R_LIBS", value=test_path ) )
+    assert path == join( TEST_INSTALL_DIR, "env.sh" )
+    assert line == "R_LIBS=$R_LIBS:/usr/share/R/libs; export R_LIBS"
+
+    line, path = env_file_builder.create_or_update_env_shell_file( TEST_INSTALL_DIR, dict( action="prepend_to", name="R_LIBS", value=test_path ) )
+    assert path == join( TEST_INSTALL_DIR, "env.sh" )
+    assert line == "R_LIBS=/usr/share/R/libs:$R_LIBS; export R_LIBS"
+
+    line, path = env_file_builder.create_or_update_env_shell_file( TEST_INSTALL_DIR, dict( action="set_to", name="R_LIBS", value=test_path ) )
+    assert path == join( TEST_INSTALL_DIR, "env.sh" )
+    assert line == "R_LIBS=/usr/share/R/libs; export R_LIBS"
+
+    line, path = env_file_builder.create_or_update_env_shell_file( TEST_INSTALL_DIR, dict( action="source", value=test_path ) )
+    assert path == join( TEST_INSTALL_DIR, "env.sh" )
+    assert line == "if [ -f /usr/share/R/libs ] ; then . /usr/share/R/libs ; fi"
+
+
+def test_get_env_shell_file_paths_from_setup_environment_elem():
+    xml = """<action name="setup_r_environment">
+        <repository name="package_r_3_0_1" owner="bgruening" toolshed="toolshed.g2.bx.psu.edu" changeset_revision="1234567">
+            <package name="R" version="3.0.1" />
+        </repository>
+        <repository name="package_zlib_1_2_8" owner="iuc" toolshed="toolshed.g2.bx.psu.edu" changeset_revision="7654321">
+            <package name="zlib" version="1.2.8" />
+        </repository>
+    </action>
+    """
+    mock_app = MockApp()
+    action_elem = parse_xml_string( xml )
+    required_for_install_env_sh = '/path/to/existing.sh'
+    all_env_paths = [ required_for_install_env_sh ]
+    action_dict = {}
+    env_manager = EnvManager( mock_app )
+
+    r_env_sh = '/path/to/go/env.sh'
+
+    def mock_get_env_shell_file_paths( elem ):
+        assert elem.get( 'name' ) in ["package_r_3_0_1", "package_zlib_1_2_8"]
+        return [ r_env_sh ]
+
+    with __mock_common_util_method( env_manager, "get_env_shell_file_paths", mock_get_env_shell_file_paths ):
+        env_manager.get_env_shell_file_paths_from_setup_environment_elem( all_env_paths, action_elem, action_dict )
+        # Verify old env files weren't deleted.
+        assert required_for_install_env_sh in all_env_paths
+        # Verify new ones added.
+        assert r_env_sh in all_env_paths
+        # env_shell_file_paths includes everything
+        assert all( [ env in action_dict[ 'env_shell_file_paths' ] for env in all_env_paths ] )
+        # for every given repository there should be one env
+        # file + the required_for_install_env_sh file
+        assert len( action_dict[ 'env_shell_file_paths' ] ) == 3
+
+        # action_shell_file_paths includes only env files defined
+        # inside the setup_ action element.
+        assert required_for_install_env_sh in action_dict[ 'action_shell_file_paths' ]
+        assert r_env_sh in action_dict[ 'action_shell_file_paths' ]
+
+
+# Poor man's mocking. Need to get a real mocking library as real Galaxy development
+# dependnecy.
+ at contextmanager
+def __mock_common_util_method( env_manager, name, mock_method ):
+    real_method = getattr( env_manager, name )
+    try:
+        setattr( env_manager, name, mock_method )
+        yield
+    finally:
+        setattr( env_manager, name, real_method )
diff --git a/test/unit/shed_unit/test_tool_panel_manager.py b/test/unit/shed_unit/test_tool_panel_manager.py
new file mode 100644
index 0000000..b33d77d
--- /dev/null
+++ b/test/unit/shed_unit/test_tool_panel_manager.py
@@ -0,0 +1,206 @@
+import os
+
+from galaxy.util import parse_xml
+
+from tool_shed.galaxy_install.tools import tool_panel_manager
+from tool_shed.tools import tool_version_manager
+from tools.test_toolbox import (
+    BaseToolBoxTestCase,
+    SimplifiedToolBox
+)
+
+DEFAULT_GUID = "123456"
+
+
+class ToolPanelManagerTestCase( BaseToolBoxTestCase ):
+
+    def get_new_toolbox(self):
+        return SimplifiedToolBox(self)
+
+    def test_handle_tool_panel_section( self ):
+        self._init_tool()
+        self._add_config( """<toolbox><section id="tid" name="test"><tool file="tool.xml" /></section></toolbox>""" )
+        toolbox = self.toolbox
+        tpm = self.tpm
+        # Test fetch existing section by id.
+        section_id, section = tpm.handle_tool_panel_section( toolbox, tool_panel_section_id="tid" )
+        assert section_id == "tid"
+        assert len( section.elems ) == 1  # tool.xml
+        assert section.id == "tid"
+        assert len( toolbox._tool_panel ) == 1
+
+        section_id, section = tpm.handle_tool_panel_section( toolbox, new_tool_panel_section_label="tid2" )
+        assert section_id == "tid2"
+        assert len( section.elems ) == 0  # new section
+        assert section.id == "tid2"
+        assert len( toolbox._tool_panel ) == 2
+
+        # Test re-fetch new section by same id.
+        section_id, section = tpm.handle_tool_panel_section( toolbox, new_tool_panel_section_label="tid2" )
+        assert section_id == "tid2"
+        assert len( section.elems ) == 0  # new section
+        assert section.id == "tid2"
+        assert len( toolbox._tool_panel ) == 2
+
+    def test_add_tool_to_panel( self ):
+        self._init_ts_tool( guid=DEFAULT_GUID )
+        self._init_dynamic_tool_conf()
+        tool_path = self._tool_path()
+        new_tools = [{"guid": DEFAULT_GUID, "tool_config": tool_path}]
+        repository_tools_tups = [
+            (
+                tool_path,
+                DEFAULT_GUID,
+                self.tool,
+            )
+        ]
+        _, section = self.toolbox.get_section("tid1", create_if_needed=True)
+        tpm = self.tpm
+        tool_panel_dict = tpm.generate_tool_panel_dict_for_new_install(
+            tool_dicts=new_tools,
+            tool_section=section,
+        )
+        tpm.add_to_tool_panel(
+            repository_name="test_repo",
+            repository_clone_url="http://github.com/galaxyproject/example.git",
+            changeset_revision="0123456789abcde",
+            repository_tools_tups=repository_tools_tups,
+            owner="devteam",
+            shed_tool_conf="tool_conf.xml",
+            tool_panel_dict=tool_panel_dict,
+        )
+        self._verify_tool_confs()
+
+    def test_add_twice( self ):
+        self._init_dynamic_tool_conf()
+        tool_versions = {}
+        previous_guid = None
+        for v in "1", "2", "3":
+            self.__toolbox = self.get_new_toolbox()
+            changeset = "0123456789abcde%s" % v
+            guid = DEFAULT_GUID + ("v%s" % v)
+            tool = self._init_ts_tool( guid=guid, filename="tool_v%s.xml" % v )
+            tool_path = self._tool_path( name="tool_v%s.xml" % v )
+            new_tools = [{"guid": guid, "tool_config": tool_path}]
+            tool_shed_repository = self._repo_install( changeset )
+            repository_tools_tups = [
+                (
+                    tool_path,
+                    guid,
+                    tool,
+                )
+            ]
+            _, section = self.toolbox.get_section("tid1", create_if_needed=True)
+            tpm = self.tpm
+            tool_panel_dict = tpm.generate_tool_panel_dict_for_new_install(
+                tool_dicts=new_tools,
+                tool_section=section,
+            )
+            if previous_guid:
+                tool_versions[ guid ] = previous_guid
+            self.tvm.handle_tool_versions( [tool_versions], tool_shed_repository )
+            tpm.add_to_tool_panel(
+                repository_name="example",
+                repository_clone_url="github.com",
+                changeset_revision=changeset,
+                repository_tools_tups=repository_tools_tups,
+                owner="galaxyproject",
+                shed_tool_conf="tool_conf.xml",
+                tool_panel_dict=tool_panel_dict,
+            )
+            self._verify_tool_confs()
+            section = self.toolbox._tool_panel["tid1"]
+            # New GUID replaced old one in tool panel but both
+            # appear in integrated tool panel.
+            if previous_guid:
+                assert ("tool_%s" % previous_guid) not in section.panel_items()
+            assert ("tool_%s" % guid) in self.toolbox._integrated_tool_panel["tid1"].panel_items()
+            previous_guid = guid
+
+    def test_uninstall_in_section( self ):
+        self._setup_two_versions_remove_one( section=True, uninstall=True )
+        self._verify_version_2_removed_from_panel( )
+        # Not in tool conf because it was uninstalled.
+        assert "github.com/galaxyproject/example/test_tool/0.2" not in open(os.path.join(self.test_directory, "tool_conf.xml"), "r").read()
+        new_toolbox = self.get_new_toolbox()
+        assert "tool_github.com/galaxyproject/example/test_tool/0.2" not in new_toolbox._integrated_tool_panel["tid"].elems
+        self._verify_tool_confs()
+
+    def test_uninstall_outside_section( self ):
+        self._setup_two_versions_remove_one( section=False, uninstall=True )
+        self._verify_version_2_removed_from_panel( section=False )
+        # Still in tool conf since not uninstalled only deactivated...
+        assert "github.com/galaxyproject/example/test_tool/0.2" not in open(os.path.join(self.test_directory, "tool_conf.xml"), "r").read()
+        self._verify_tool_confs()
+
+        self._remove_guids( ["github.com/galaxyproject/example/test_tool/0.1"], uninstall=True )
+
+        # Now no versions of this tool are returned by new toolbox.
+        new_toolbox = self.get_new_toolbox()
+        all_versions = new_toolbox.get_tool( "test_tool", get_all_versions=True )
+        assert not all_versions
+
+        # Check that tool panel has reverted to old value...
+        section = new_toolbox._tool_panel["tid"]
+        assert len(section.elems) == 0
+
+    def _setup_two_versions_remove_one( self, section, uninstall ):
+        self._init_tool()
+        self._setup_two_versions_in_config( section=True )
+        self._setup_two_versions()
+        self.toolbox
+        self._remove_guids( ["github.com/galaxyproject/example/test_tool/0.2"], uninstall=uninstall )
+
+    def _verify_version_2_removed_from_panel( self, section=True ):
+        # Check that test_tool now only has one version...
+        # We load a new toolbox
+        new_toolbox = self.get_new_toolbox()
+        all_versions = new_toolbox.get_tool( "test_tool", get_all_versions=True )
+        assert len( all_versions ) == 1
+
+        # Check that tool panel has reverted to old value...
+        if section:
+            section = new_toolbox._tool_panel["tid"]
+            assert len(section.elems) == 1
+            assert next(iter(section.elems.values())).id == "github.com/galaxyproject/example/test_tool/0.1"
+
+            assert "github.com/galaxyproject/example/test_tool/0.2" not in new_toolbox._integrated_tool_panel["tid"].elems
+        else:
+            next(iter(self.toolbox._tool_panel.values())).id == "github.com/galaxyproject/example/test_tool/0.1"
+            assert "github.com/galaxyproject/example/test_tool/0.2" not in new_toolbox._integrated_tool_panel
+
+    def _remove_guids( self, guids, uninstall, shed_tool_conf="tool_conf.xml" ):
+        self.tpm.remove_guids(
+            guids_to_remove=guids,
+            shed_tool_conf=shed_tool_conf,
+            uninstall=uninstall,
+        )
+
+    def _verify_tool_confs( self ):
+        self._assert_valid_xml( self.integerated_tool_panel_path )
+        self._assert_valid_xml( os.path.join( self.test_directory, "tool_conf.xml" ) )
+
+    def _assert_valid_xml( self, filename ):
+        try:
+            parse_xml( filename )
+        except Exception:
+            message_template = "file %s does not contain valid XML, content %s"
+            message = message_template % ( filename, open( filename, "r" ).read() )
+            raise AssertionError( message )
+
+    def _init_dynamic_tool_conf( self ):
+        # Add a dynamic tool conf (such as a ToolShed managed one) to list of configs.
+        self._add_config( """<toolbox tool_path="%s"></toolbox>""" % self.test_directory )
+
+    def _init_ts_tool( self, guid=DEFAULT_GUID, **kwds ):
+        tool = self._init_tool( **kwds )
+        tool.guid = guid
+        return tool
+
+    @property
+    def tpm( self ):
+        return tool_panel_manager.ToolPanelManager( self.app )
+
+    @property
+    def tvm( self ):
+        return tool_version_manager.ToolVersionManager( self.app )
diff --git a/test/unit/test_galaxy_mapping.py b/test/unit/test_galaxy_mapping.py
new file mode 100644
index 0000000..ccf9c6c
--- /dev/null
+++ b/test/unit/test_galaxy_mapping.py
@@ -0,0 +1,524 @@
+# -*- coding: utf-8 -*-
+import time
+import unittest
+import uuid
+
+from six import text_type
+
+import galaxy.datatypes
+import galaxy.model
+import galaxy.model.mapping as mapping
+
+datatypes_registry = galaxy.datatypes.registry.Registry()
+datatypes_registry.load_datatypes()
+galaxy.model.set_datatypes_registry(datatypes_registry)
+
+
+class MappingTests( unittest.TestCase ):
+
+    def test_annotations( self ):
+        model = self.model
+
+        u = model.User( email="annotator at example.com", password="password" )
+        self.persist( u )
+
+        def persist_and_check_annotation( annotation_class, **kwds ):
+            annotated_association = annotation_class()
+            annotated_association.annotation = "Test Annotation"
+            annotated_association.user = u
+            for key, value in kwds.items():
+                setattr(annotated_association, key, value)
+            self.persist( annotated_association )
+            self.expunge()
+            stored_annotation = self.query( annotation_class ).all()[0]
+            assert stored_annotation.annotation == "Test Annotation"
+            assert stored_annotation.user.email == "annotator at example.com"
+
+        sw = model.StoredWorkflow()
+        sw.user = u
+        self.persist( sw )
+        persist_and_check_annotation( model.StoredWorkflowAnnotationAssociation, stored_workflow=sw )
+
+        workflow = model.Workflow()
+        workflow.stored_workflow = sw
+        self.persist( workflow )
+
+        ws = model.WorkflowStep()
+        ws.workflow = workflow
+        self.persist( ws )
+        persist_and_check_annotation( model.WorkflowStepAnnotationAssociation, workflow_step=ws )
+
+        h = model.History( name="History for Annotation", user=u)
+        self.persist( h )
+        persist_and_check_annotation( model.HistoryAnnotationAssociation, history=h )
+
+        d1 = model.HistoryDatasetAssociation( extension="txt", history=h, create_dataset=True, sa_session=model.session )
+        self.persist( d1 )
+        persist_and_check_annotation( model.HistoryDatasetAssociationAnnotationAssociation, hda=d1 )
+
+        page = model.Page()
+        page.user = u
+        self.persist( page )
+        persist_and_check_annotation( model.PageAnnotationAssociation, page=page )
+
+        visualization = model.Visualization()
+        visualization.user = u
+        self.persist( visualization )
+        persist_and_check_annotation( model.VisualizationAnnotationAssociation, visualization=visualization )
+
+        dataset_collection = model.DatasetCollection( collection_type="paired" )
+        history_dataset_collection = model.HistoryDatasetCollectionAssociation( collection=dataset_collection )
+        self.persist( history_dataset_collection )
+        persist_and_check_annotation( model.HistoryDatasetCollectionAnnotationAssociation, history_dataset_collection=history_dataset_collection )
+
+        library_dataset_collection = model.LibraryDatasetCollectionAssociation( collection=dataset_collection )
+        self.persist( library_dataset_collection )
+        persist_and_check_annotation( model.LibraryDatasetCollectionAnnotationAssociation, library_dataset_collection=library_dataset_collection )
+
+    def test_ratings( self ):
+        model = self.model
+
+        u = model.User( email="rater at example.com", password="password" )
+        self.persist( u )
+
+        def persist_and_check_rating( rating_class, **kwds ):
+            rating_association = rating_class()
+            rating_association.rating = 5
+            rating_association.user = u
+            for key, value in kwds.items():
+                setattr(rating_association, key, value)
+            self.persist( rating_association )
+            self.expunge()
+            stored_annotation = self.query( rating_class ).all()[0]
+            assert stored_annotation.rating == 5
+            assert stored_annotation.user.email == "rater at example.com"
+
+        sw = model.StoredWorkflow()
+        sw.user = u
+        self.persist( sw )
+        persist_and_check_rating( model.StoredWorkflowRatingAssociation, stored_workflow=sw )
+
+        h = model.History( name="History for Rating", user=u)
+        self.persist( h )
+        persist_and_check_rating( model.HistoryRatingAssociation, history=h )
+
+        d1 = model.HistoryDatasetAssociation( extension="txt", history=h, create_dataset=True, sa_session=model.session )
+        self.persist( d1 )
+        persist_and_check_rating( model.HistoryDatasetAssociationRatingAssociation, hda=d1 )
+
+        page = model.Page()
+        page.user = u
+        self.persist( page )
+        persist_and_check_rating( model.PageRatingAssociation, page=page )
+
+        visualization = model.Visualization()
+        visualization.user = u
+        self.persist( visualization )
+        persist_and_check_rating( model.VisualizationRatingAssociation, visualization=visualization )
+
+        dataset_collection = model.DatasetCollection( collection_type="paired" )
+        history_dataset_collection = model.HistoryDatasetCollectionAssociation( collection=dataset_collection )
+        self.persist( history_dataset_collection )
+        persist_and_check_rating( model.HistoryDatasetCollectionRatingAssociation, history_dataset_collection=history_dataset_collection )
+
+        library_dataset_collection = model.LibraryDatasetCollectionAssociation( collection=dataset_collection )
+        self.persist( library_dataset_collection )
+        persist_and_check_rating( model.LibraryDatasetCollectionRatingAssociation, library_dataset_collection=library_dataset_collection )
+
+    def test_display_name( self ):
+
+        def assert_display_name_converts_to_unicode( item, name ):
+            assert not isinstance( item.name, text_type )
+            assert isinstance( item.get_display_name(), text_type )
+            assert item.get_display_name() == name
+
+        ldda = self.model.LibraryDatasetDatasetAssociation( name='ldda_name' )
+        assert_display_name_converts_to_unicode( ldda, 'ldda_name' )
+
+        hda = self.model.HistoryDatasetAssociation( name='hda_name' )
+        assert_display_name_converts_to_unicode( hda, 'hda_name' )
+
+        history = self.model.History( name='history_name' )
+        assert_display_name_converts_to_unicode( history, 'history_name' )
+
+        library = self.model.Library( name='library_name' )
+        assert_display_name_converts_to_unicode( library, 'library_name' )
+
+        library_folder = self.model.LibraryFolder( name='library_folder' )
+        assert_display_name_converts_to_unicode( library_folder, 'library_folder' )
+
+        history = self.model.History(
+            name=u'Hello₩◎ґʟⅾ'
+        )
+
+        assert isinstance( history.name, text_type )
+        assert isinstance( history.get_display_name(), text_type )
+        assert history.get_display_name() == u'Hello₩◎ґʟⅾ'
+
+    def test_tags( self ):
+        model = self.model
+
+        my_tag = model.Tag(name="Test Tag")
+        u = model.User( email="tagger at example.com", password="password" )
+        self.persist( my_tag, u )
+
+        def tag_and_test( taggable_object, tag_association_class, backref_name ):
+            assert len( getattr(self.query( model.Tag ).filter( model.Tag.name == "Test Tag" ).all()[0], backref_name) ) == 0
+
+            tag_association = tag_association_class()
+            tag_association.tag = my_tag
+            taggable_object.tags = [ tag_association ]
+            self.persist( tag_association, taggable_object )
+
+            assert len( getattr(self.query( model.Tag ).filter( model.Tag.name == "Test Tag" ).all()[0], backref_name) ) == 1
+
+        sw = model.StoredWorkflow()
+        sw.user = u
+        tag_and_test( sw, model.StoredWorkflowTagAssociation, "tagged_workflows" )
+
+        h = model.History( name="History for Tagging", user=u)
+        tag_and_test( h, model.HistoryTagAssociation, "tagged_histories" )
+
+        d1 = model.HistoryDatasetAssociation( extension="txt", history=h, create_dataset=True, sa_session=model.session )
+        tag_and_test( d1, model.HistoryDatasetAssociationTagAssociation, "tagged_history_dataset_associations" )
+
+        page = model.Page()
+        page.user = u
+        tag_and_test( page, model.PageTagAssociation, "tagged_pages" )
+
+        visualization = model.Visualization()
+        visualization.user = u
+        tag_and_test( visualization, model.VisualizationTagAssociation, "tagged_visualizations" )
+
+        dataset_collection = model.DatasetCollection( collection_type="paired" )
+        history_dataset_collection = model.HistoryDatasetCollectionAssociation( collection=dataset_collection )
+        tag_and_test( history_dataset_collection, model.HistoryDatasetCollectionTagAssociation, "tagged_history_dataset_collections" )
+
+        library_dataset_collection = model.LibraryDatasetCollectionAssociation( collection=dataset_collection )
+        tag_and_test( library_dataset_collection, model.LibraryDatasetCollectionTagAssociation, "tagged_library_dataset_collections" )
+
+    def test_collections_in_histories(self):
+        model = self.model
+
+        u = model.User( email="mary at example.com", password="password" )
+        h1 = model.History( name="History 1", user=u)
+        d1 = model.HistoryDatasetAssociation( extension="txt", history=h1, create_dataset=True, sa_session=model.session )
+        d2 = model.HistoryDatasetAssociation( extension="txt", history=h1, create_dataset=True, sa_session=model.session )
+
+        c1 = model.DatasetCollection(collection_type="pair")
+        hc1 = model.HistoryDatasetCollectionAssociation(history=h1, collection=c1, name="HistoryCollectionTest1")
+
+        dce1 = model.DatasetCollectionElement(collection=c1, element=d1, element_identifier="left")
+        dce2 = model.DatasetCollectionElement(collection=c1, element=d2, element_identifier="right")
+
+        self.persist( u, h1, d1, d2, c1, hc1, dce1, dce2 )
+
+        loaded_dataset_collection = self.query( model.HistoryDatasetCollectionAssociation ).filter( model.HistoryDatasetCollectionAssociation.name == "HistoryCollectionTest1" ).first().collection
+        self.assertEqual(len(loaded_dataset_collection.elements), 2)
+        assert loaded_dataset_collection.collection_type == "pair"
+        assert loaded_dataset_collection[ "left" ] == dce1
+        assert loaded_dataset_collection[ "right" ] == dce2
+
+    def test_collections_in_library_folders(self):
+        model = self.model
+
+        u = model.User( email="mary2 at example.com", password="password" )
+        lf = model.LibraryFolder( name="RootFolder" )
+        l = model.Library( name="Library1", root_folder=lf )
+        ld1 = model.LibraryDataset( )
+        ld2 = model.LibraryDataset( )
+
+        ldda1 = model.LibraryDatasetDatasetAssociation( extension="txt", library_dataset=ld1 )
+        ldda2 = model.LibraryDatasetDatasetAssociation( extension="txt", library_dataset=ld1 )
+
+        c1 = model.DatasetCollection(collection_type="pair")
+        dce1 = model.DatasetCollectionElement(collection=c1, element=ldda1)
+        dce2 = model.DatasetCollectionElement(collection=c1, element=ldda2)
+        self.persist( u, l, lf, ld1, ld2, c1, ldda1, ldda2, dce1, dce2 )
+
+        # TODO:
+        # loaded_dataset_collection = self.query( model.DatasetCollection ).filter( model.DatasetCollection.name == "LibraryCollectionTest1" ).first()
+        # self.assertEquals(len(loaded_dataset_collection.datasets), 2)
+        # assert loaded_dataset_collection.collection_type == "pair"
+
+    def test_default_disk_usage( self ):
+        model = self.model
+
+        u = model.User( email="disk_default at test.com", password="password" )
+        self.persist( u )
+        u.adjust_total_disk_usage( 1 )
+        u_id = u.id
+        self.expunge()
+        user_reload = model.session.query( model.User ).get( u_id )
+        assert user_reload.disk_usage == 1
+
+    def test_basic( self ):
+        model = self.model
+
+        original_user_count = len( model.session.query( model.User ).all() )
+
+        # Make some changes and commit them
+        u = model.User( email="james at foo.bar.baz", password="password" )
+        # gs = model.GalaxySession()
+        h1 = model.History( name="History 1", user=u)
+        # h1.queries.append( model.Query( "h1->q1" ) )
+        # h1.queries.append( model.Query( "h1->q2" ) )
+        h2 = model.History( name=( "H" * 1024 ) )
+        self.persist( u, h1, h2 )
+        # q1 = model.Query( "h2->q1" )
+        metadata = dict( chromCol=1, startCol=2, endCol=3 )
+        d1 = model.HistoryDatasetAssociation( extension="interval", metadata=metadata, history=h2, create_dataset=True, sa_session=model.session )
+        # h2.queries.append( q1 )
+        # h2.queries.append( model.Query( "h2->q2" ) )
+        self.persist( d1 )
+
+        # Check
+        users = model.session.query( model.User ).all()
+        assert len( users ) == original_user_count + 1
+        user = [user for user in users if user.email == "james at foo.bar.baz"][0]
+        assert user.email == "james at foo.bar.baz"
+        assert user.password == "password"
+        assert len( user.histories ) == 1
+        assert user.histories[0].name == "History 1"
+        hists = model.session.query( model.History ).all()
+        hist0 = [history for history in hists if history.name == "History 1"][0]
+        hist1 = [history for history in hists if history.name == "H" * 255][0]
+        assert hist0.name == "History 1"
+        assert hist1.name == ( "H" * 255 )
+        assert hist0.user == user
+        assert hist1.user is None
+        assert hist1.datasets[0].metadata.chromCol == 1
+        # The filename test has moved to objecstore
+        # id = hist1.datasets[0].id
+        # assert hist1.datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id )
+        # Do an update and check
+        hist1.name = "History 2b"
+        self.expunge()
+        hists = model.session.query( model.History ).all()
+        hist0 = [history for history in hists if history.name == "History 1"][0]
+        hist1 = [history for history in hists if history.name == "History 2b"][0]
+        assert hist0.name == "History 1"
+        assert hist1.name == "History 2b"
+        # gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like.
+
+    def test_jobs( self ):
+        model = self.model
+        u = model.User( email="jobtest at foo.bar.baz", password="password" )
+        job = model.Job()
+        job.user = u
+        job.tool_id = "cat1"
+
+        self.persist( u, job )
+
+        loaded_job = model.session.query( model.Job ).filter( model.Job.user == u ).first()
+        assert loaded_job.tool_id == "cat1"
+
+    def test_job_metrics( self ):
+        model = self.model
+        u = model.User( email="jobtest at foo.bar.baz", password="password" )
+        job = model.Job()
+        job.user = u
+        job.tool_id = "cat1"
+
+        job.add_metric( "gx", "galaxy_slots", 5 )
+        job.add_metric( "system", "system_name", "localhost" )
+
+        self.persist( u, job )
+
+        task = model.Task( job=job, working_directory="/tmp", prepare_files_cmd="split.sh" )
+        task.add_metric( "gx", "galaxy_slots", 5 )
+        task.add_metric( "system", "system_name", "localhost" )
+
+        big_value = ":".join( [ "%d" % i for i in range( 2000 ) ] )
+        task.add_metric( "env", "BIG_PATH", big_value )
+        self.persist( task )
+        # Ensure big values truncated
+        assert len( task.text_metrics[ 1 ].metric_value ) <= 1023
+
+    def test_tasks( self ):
+        model = self.model
+        u = model.User( email="jobtest at foo.bar.baz", password="password" )
+        job = model.Job()
+        task = model.Task( job=job, working_directory="/tmp", prepare_files_cmd="split.sh" )
+        job.user = u
+        self.persist( u, job, task )
+
+        loaded_task = model.session.query( model.Task ).filter( model.Task.job == job ).first()
+        assert loaded_task.prepare_input_files_cmd == "split.sh"
+
+    def test_history_contents( self ):
+        model = self.model
+        u = model.User( email="contents at foo.bar.baz", password="password" )
+        # gs = model.GalaxySession()
+        h1 = model.History( name="HistoryContentsHistory1", user=u)
+
+        self.persist( u, h1, expunge=False )
+
+        d1 = self.new_hda( h1, name="1" )
+        d2 = self.new_hda( h1, name="2", visible=False )
+        d3 = self.new_hda( h1, name="3", deleted=True )
+        d4 = self.new_hda( h1, name="4", visible=False, deleted=True )
+
+        self.session().flush()
+
+        def contents_iter_names(**kwds):
+            history = model.context.query( model.History ).filter(
+                model.History.name == "HistoryContentsHistory1"
+            ).first()
+            return list( map( lambda hda: hda.name, history.contents_iter( **kwds ) ) )
+
+        self.assertEqual(contents_iter_names(), [ "1", "2", "3", "4" ])
+        assert contents_iter_names( deleted=False ) == [ "1", "2" ]
+        assert contents_iter_names( visible=True ) == [ "1", "3" ]
+        assert contents_iter_names( visible=False ) == [ "2", "4" ]
+        assert contents_iter_names( deleted=True, visible=False ) == [ "4" ]
+
+        assert contents_iter_names( ids=[ d1.id, d2.id, d3.id, d4.id ] ) == [ "1", "2", "3", "4" ]
+        assert contents_iter_names( ids=[ d1.id, d2.id, d3.id, d4.id ], max_in_filter_length=1 ) == [ "1", "2", "3", "4" ]
+
+        assert contents_iter_names( ids=[ d1.id, d3.id ] ) == [ "1", "3" ]
+
+    def test_workflows( self ):
+        model = self.model
+        user = model.User(
+            email="testworkflows at bx.psu.edu",
+            password="password"
+        )
+
+        def workflow_from_steps(steps):
+            stored_workflow = model.StoredWorkflow()
+            stored_workflow.user = user
+            workflow = model.Workflow()
+            workflow.steps = steps
+            workflow.stored_workflow = stored_workflow
+            return workflow
+
+        child_workflow = workflow_from_steps([])
+        self.persist( child_workflow )
+
+        workflow_step_1 = model.WorkflowStep()
+        workflow_step_1.order_index = 0
+        workflow_step_1.type = "data_input"
+        workflow_step_2 = model.WorkflowStep()
+        workflow_step_2.order_index = 1
+        workflow_step_2.type = "subworkflow"
+        workflow_step_2.subworkflow = child_workflow
+
+        workflow = workflow_from_steps([workflow_step_1, workflow_step_2])
+        self.persist( workflow )
+
+        assert workflow_step_1.id is not None
+        h1 = model.History( name="WorkflowHistory1", user=user)
+
+        invocation_uuid = uuid.uuid1()
+
+        workflow_invocation = model.WorkflowInvocation()
+        workflow_invocation.uuid = invocation_uuid
+        workflow_invocation.history = h1
+
+        workflow_invocation_step1 = model.WorkflowInvocationStep()
+        workflow_invocation_step1.workflow_invocation = workflow_invocation
+        workflow_invocation_step1.workflow_step = workflow_step_1
+
+        subworkflow_invocation = model.WorkflowInvocation()
+        workflow_invocation.attach_subworkflow_invocation_for_step(workflow_step_2, subworkflow_invocation)
+
+        workflow_invocation_step2 = model.WorkflowInvocationStep()
+        workflow_invocation_step2.workflow_invocation = workflow_invocation
+        workflow_invocation_step2.workflow_step = workflow_step_2
+
+        workflow_invocation.workflow = workflow
+
+        d1 = self.new_hda( h1, name="1" )
+        workflow_request_dataset = model.WorkflowRequestToInputDatasetAssociation()
+        workflow_request_dataset.workflow_invocation = workflow_invocation
+        workflow_request_dataset.workflow_step = workflow_step_1
+        workflow_request_dataset.dataset = d1
+        self.persist( workflow_invocation )
+        assert workflow_request_dataset is not None
+        assert workflow_invocation.id is not None
+
+        history_id = h1.id
+        self.expunge()
+
+        loaded_invocation = self.query( model.WorkflowInvocation ).get( workflow_invocation.id )
+        assert loaded_invocation.uuid == invocation_uuid, "%s != %s" % (loaded_invocation.uuid, invocation_uuid)
+        assert loaded_invocation
+        assert loaded_invocation.history.id == history_id
+
+        step_1, step_2 = loaded_invocation.workflow.steps
+
+        assert not step_1.subworkflow
+        assert step_2.subworkflow
+        assert len( loaded_invocation.steps ) == 2
+
+        subworkflow_invocation_assoc = loaded_invocation.get_subworkflow_invocation_association_for_step(step_2)
+        assert subworkflow_invocation_assoc is not None
+        assert isinstance(subworkflow_invocation_assoc.subworkflow_invocation, model.WorkflowInvocation)
+        assert isinstance(subworkflow_invocation_assoc.parent_workflow_invocation, model.WorkflowInvocation)
+
+        assert subworkflow_invocation_assoc.subworkflow_invocation.history.id == history_id
+
+        u1 = loaded_invocation.update_time
+        time.sleep(1)
+        loaded_invocation.steps[0].update()
+        self.expunge()
+        loaded_invocation = self.query( model.WorkflowInvocation ).get( workflow_invocation.id )
+        u2 = loaded_invocation.update_time
+
+        assert u1 != u2
+
+    def new_hda( self, history, **kwds ):
+        return history.add_dataset( self.model.HistoryDatasetAssociation( create_dataset=True, sa_session=self.model.session, **kwds ) )
+
+    @classmethod
+    def setUpClass(cls):
+        # Start the database and connect the mapping
+        cls.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True, object_store=MockObjectStore() )
+        assert cls.model.engine is not None
+
+    @classmethod
+    def query( cls, type ):
+        return cls.model.session.query( type )
+
+    @classmethod
+    def persist(cls, *args, **kwargs):
+        session = cls.session()
+        flush = kwargs.get('flush', True)
+        for arg in args:
+            session.add( arg )
+            if flush:
+                session.flush()
+        if kwargs.get('expunge', not flush):
+            cls.expunge()
+        return arg  # Return last or only arg.
+
+    @classmethod
+    def session(cls):
+        return cls.model.session
+
+    @classmethod
+    def expunge(cls):
+        cls.model.session.flush()
+        cls.model.session.expunge_all()
+
+
+class MockObjectStore(object):
+
+    def __init__(self):
+        pass
+
+    def size(self, dataset):
+        return 42
+
+    def exists(self, *args, **kwds):
+        return True
+
+    def get_filename(self, *args, **kwds):
+        return "dataest_14.dat"
+
+
+def get_suite():
+    suite = unittest.TestSuite()
+    suite.addTest( MappingTests( "test_basic" ) )
+    return suite
diff --git a/test/unit/test_galaxy_transactions.py b/test/unit/test_galaxy_transactions.py
new file mode 100644
index 0000000..b6f2d1f
--- /dev/null
+++ b/test/unit/test_galaxy_transactions.py
@@ -0,0 +1,74 @@
+from galaxy import model
+from galaxy.managers import context
+from galaxy.model import mapping
+from galaxy.util import bunch
+
+
+class TestTransaction( context.ProvidesAppContext ):
+
+    def __init__( self ):
+        self.app = TestApp()
+
+
+def test_logging_events_off():
+    trans = TestTransaction()
+    trans.log_event( "test event 123" )
+    assert len( trans.sa_session.query( model.Event ).all() ) == 0
+
+
+def test_logging_events_on():
+    trans = TestTransaction()
+    trans.app.config.log_events = True
+    trans.log_event( "test event 123" )
+    events = trans.sa_session.query( model.Event ).all()
+    assert len( events ) == 1
+    assert events[ 0 ].message == "test event 123"
+
+
+def test_logging_actions_off():
+    trans = TestTransaction()
+    trans.log_action( "test action 123" )
+    assert len( trans.sa_session.query( model.Event ).all() ) == 0
+
+
+def test_logging_actions_on():
+    trans = TestTransaction()
+    trans.app.config.log_actions = True
+    trans.log_action( None, "test action 123", context="the context", params=dict(foo="bar") )
+    actions = trans.sa_session.query( model.UserAction ).all()
+    assert len( actions ) == 1
+    assert actions[ 0 ].action == "test action 123"
+
+
+def test_expunge_all():
+    trans = TestTransaction()
+
+    user = model.User( "foo", "bar1" )
+    trans.sa_session.add( user )
+
+    user.password = "bar2"
+    trans.sa_session.flush()
+
+    assert trans.sa_session.query( model.User ).first().password == "bar2"
+
+    trans.sa_session.expunge_all()
+
+    user.password = "bar3"
+    trans.sa_session.flush()
+
+    # Password unchange because not attached to session/context.
+    assert trans.sa_session.query( model.User ).first().password == "bar2"
+
+
+class TestApp( object ):
+
+    def __init__( self ):
+        self.config = bunch.Bunch(
+            log_events=False,
+            log_actions=False,
+        )
+        self.model = mapping.init(
+            "/tmp",
+            "sqlite:///:memory:",
+            create_tables=True
+        )
diff --git a/test/unit/test_lazy_process.py b/test/unit/test_lazy_process.py
new file mode 100644
index 0000000..4200ec2
--- /dev/null
+++ b/test/unit/test_lazy_process.py
@@ -0,0 +1,26 @@
+"""Unit module for LazyProcess object in galaxy.util.lazy_process."""
+import os
+import tempfile
+import time
+
+from galaxy.util.lazy_process import LazyProcess
+
+
+def test_lazy_process():
+    """Create process, ensure start_process starts it and shutdown kills it."""
+    t = tempfile.NamedTemporaryFile()
+    os.remove(t.name)
+    lazy_process = LazyProcess(["bash", "-c", "touch %s; sleep 100" % t.name])
+    assert not os.path.exists(t.name)
+    lazy_process.start_process()
+    while not os.path.exists(t.name):
+        time.sleep(.01)
+    assert lazy_process.process.poll() is None
+    lazy_process.shutdown()
+    ret_val = None
+    for i in range(10):
+        ret_val = lazy_process.process.poll()
+        if ret_val is not None:
+            break
+        time.sleep(.01)
+    assert ret_val is not None
diff --git a/test/unit/test_objectstore.py b/test/unit/test_objectstore.py
new file mode 100644
index 0000000..689693f
--- /dev/null
+++ b/test/unit/test_objectstore.py
@@ -0,0 +1,247 @@
+import os
+
+from contextlib import contextmanager
+from shutil import rmtree
+from string import Template
+from tempfile import mkdtemp
+
+from galaxy import objectstore
+from galaxy.exceptions import ObjectInvalid
+
+DISK_TEST_CONFIG = """<?xml version="1.0"?>
+<object_store type="disk">
+    <files_dir path="${temp_directory}/files1"/>
+    <extra_dir type="temp" path="${temp_directory}/tmp1"/>
+    <extra_dir type="job_work" path="${temp_directory}/job_working_directory1"/>
+</object_store>
+"""
+
+
+def test_disk_store():
+    with TestConfig(DISK_TEST_CONFIG) as (directory, object_store):
+        # Test no dataset with id 1 exists.
+        absent_dataset = MockDataset(1)
+        assert not object_store.exists(absent_dataset)
+
+        # Write empty dataset 2 in second backend, ensure it is empty and
+        # exists.
+        empty_dataset = MockDataset(2)
+        directory.write("", "files1/000/dataset_2.dat")
+        assert object_store.exists(empty_dataset)
+        assert object_store.empty(empty_dataset)
+
+        # Write non-empty dataset in backend 1, test it is not emtpy & exists.
+        hello_world_dataset = MockDataset(3)
+        directory.write("Hello World!", "files1/000/dataset_3.dat")
+        assert object_store.exists(hello_world_dataset)
+        assert not object_store.empty(hello_world_dataset)
+
+        # Test get_data
+        data = object_store.get_data(hello_world_dataset)
+        assert data == "Hello World!"
+
+        data = object_store.get_data(hello_world_dataset, start=1, count=6)
+        assert data == "ello W"
+
+        # Test Size
+
+        # Test absent and empty datasets yield size of 0.
+        assert object_store.size(absent_dataset) == 0
+        assert object_store.size(empty_dataset) == 0
+        # Elsewise
+        assert object_store.size(hello_world_dataset) > 0  # Should this always be the number of bytes?
+
+        # Test percent used (to some degree)
+        percent_store_used = object_store.get_store_usage_percent()
+        assert percent_store_used > 0.0
+        assert percent_store_used < 100.0
+
+        # Test update_from_file test
+        output_dataset = MockDataset(4)
+        output_real_path = os.path.join(directory.temp_directory, "files1", "000", "dataset_4.dat")
+        assert not os.path.exists(output_real_path)
+        output_working_path = directory.write("NEW CONTENTS", "job_working_directory1/example_output")
+        object_store.update_from_file(output_dataset, file_name=output_working_path, create=True)
+        assert os.path.exists(output_real_path)
+
+        # Test delete
+        to_delete_dataset = MockDataset(5)
+        to_delete_real_path = directory.write("content to be deleted!", "files1/000/dataset_5.dat")
+        assert object_store.exists(to_delete_dataset)
+        assert object_store.delete(to_delete_dataset)
+        assert not object_store.exists(to_delete_dataset)
+        assert not os.path.exists(to_delete_real_path)
+
+
+def test_disk_store_alt_name_relpath():
+    """ Test that alt_name cannot be used to access arbitrary paths using a
+    relative path
+    """
+    with TestConfig(DISK_TEST_CONFIG) as (directory, object_store):
+        empty_dataset = MockDataset(1)
+        directory.write("", "files1/000/dataset_1.dat")
+        directory.write("foo", "foo.txt")
+        try:
+            assert object_store.get_data(
+                empty_dataset,
+                extra_dir='dataset_1_files',
+                alt_name='../../../foo.txt') != 'foo'
+        except ObjectInvalid:
+            pass
+
+
+def test_disk_store_alt_name_abspath():
+    """ Test that alt_name cannot be used to access arbitrary paths using a
+    absolute path
+    """
+    with TestConfig(DISK_TEST_CONFIG) as (directory, object_store):
+        empty_dataset = MockDataset(1)
+        directory.write("", "files1/000/dataset_1.dat")
+        absfoo = os.path.abspath(os.path.join(directory.temp_directory, "foo.txt"))
+        with open(absfoo, 'w') as f:
+            f.write("foo")
+        try:
+            assert object_store.get_data(
+                empty_dataset,
+                extra_dir='dataset_1_files',
+                alt_name=absfoo) != 'foo'
+        except ObjectInvalid:
+            pass
+
+
+HIERARCHICAL_TEST_CONFIG = """<?xml version="1.0"?>
+<object_store type="hierarchical">
+    <backends>
+        <backend id="files1" type="disk" weight="1" order="0">
+            <files_dir path="${temp_directory}/files1"/>
+            <extra_dir type="temp" path="${temp_directory}/tmp1"/>
+            <extra_dir type="job_work" path="${temp_directory}/job_working_directory1"/>
+        </backend>
+        <backend id="files2" type="disk" weight="1" order="1">
+            <files_dir path="${temp_directory}/files2"/>
+            <extra_dir type="temp" path="${temp_directory}/tmp2"/>
+            <extra_dir type="job_work" path="${temp_directory}/job_working_directory2"/>
+        </backend>
+    </backends>
+</object_store>
+"""
+
+
+def test_hierarchical_store():
+    with TestConfig(HIERARCHICAL_TEST_CONFIG) as (directory, object_store):
+
+        # Test no dataset with id 1 exists.
+        assert not object_store.exists(MockDataset(1))
+
+        # Write empty dataset 2 in second backend, ensure it is empty and
+        # exists.
+        directory.write("", "files2/000/dataset_2.dat")
+        assert object_store.exists(MockDataset(2))
+        assert object_store.empty(MockDataset(2))
+
+        # Write non-empty dataset in backend 1, test it is not emtpy & exists.
+        directory.write("Hello World!", "files1/000/dataset_3.dat")
+        assert object_store.exists(MockDataset(3))
+        assert not object_store.empty(MockDataset(3))
+
+        # Assert creation always happens in first backend.
+        for i in range(100):
+            dataset = MockDataset(100 + i)
+            object_store.create(dataset)
+            assert object_store.get_filename(dataset).find("files1") > 0
+
+
+DISTRIBUTED_TEST_CONFIG = """<?xml version="1.0"?>
+<object_store type="distributed">
+    <backends>
+        <backend id="files1" type="disk" weight="2" order="0">
+            <files_dir path="${temp_directory}/files1"/>
+            <extra_dir type="temp" path="${temp_directory}/tmp1"/>
+            <extra_dir type="job_work" path="${temp_directory}/job_working_directory1"/>
+        </backend>
+        <backend id="files2" type="disk" weight="1" order="1">
+            <files_dir path="${temp_directory}/files2"/>
+            <extra_dir type="temp" path="${temp_directory}/tmp2"/>
+            <extra_dir type="job_work" path="${temp_directory}/job_working_directory2"/>
+        </backend>
+    </backends>
+</object_store>
+"""
+
+
+def test_distributed_store():
+    with TestConfig(DISTRIBUTED_TEST_CONFIG) as (directory, object_store):
+        with __stubbed_persistence() as persisted_ids:
+            for i in range(100):
+                dataset = MockDataset(100 + i)
+                object_store.create(dataset)
+
+        # Test distributes datasets between backends according to weights
+        backend_1_count = len([v for v in persisted_ids.values() if v == "files1"])
+        backend_2_count = len([v for v in persisted_ids.values() if v == "files2"])
+
+        assert backend_1_count > 0
+        assert backend_2_count > 0
+        assert backend_1_count > backend_2_count
+
+
+class TestConfig(object):
+    def __init__(self, config_xml):
+        self.temp_directory = mkdtemp()
+        self.write(config_xml, "store.xml")
+        config = MockConfig(self.temp_directory)
+        self.object_store = objectstore.build_object_store_from_config(config)
+
+    def __enter__(self):
+        return self, self.object_store
+
+    def __exit__(self, type, value, tb):
+        rmtree(self.temp_directory)
+
+    def write(self, contents, name):
+        path = os.path.join(self.temp_directory, name)
+        directory = os.path.dirname(path)
+        if not os.path.exists(directory):
+            os.makedirs(directory)
+        contents_template = Template(contents)
+        expanded_contents = contents_template.safe_substitute(temp_directory=self.temp_directory)
+        open(path, "w").write(expanded_contents)
+        return path
+
+
+class MockConfig(object):
+
+    def __init__(self, temp_directory):
+        self.file_path = temp_directory
+        self.object_store_config_file = os.path.join(temp_directory, "store.xml")
+        self.object_store_check_old_style = False
+        self.jobs_directory = temp_directory
+        self.new_file_path = temp_directory
+        self.umask = 0000
+
+
+class MockDataset(object):
+
+    def __init__(self, id):
+        self.id = id
+        self.object_store_id = None
+
+
+# Poor man's mocking. Need to get a real mocking library as real Galaxy development
+# dependnecy.
+PERSIST_METHOD_NAME = "_create_object_in_session"
+
+
+ at contextmanager
+def __stubbed_persistence():
+    real_method = getattr(objectstore, PERSIST_METHOD_NAME)
+    try:
+        persisted_ids = {}
+
+        def persist(object):
+            persisted_ids[object.id] = object.object_store_id
+        setattr(objectstore, PERSIST_METHOD_NAME, persist)
+        yield persisted_ids
+
+    finally:
+        setattr(objectstore, PERSIST_METHOD_NAME, real_method)
diff --git a/test/unit/test_routes.py b/test/unit/test_routes.py
new file mode 100644
index 0000000..e469538
--- /dev/null
+++ b/test/unit/test_routes.py
@@ -0,0 +1,111 @@
+from galaxy.util.bunch import Bunch
+from galaxy.web import url_for
+from galaxy.web.framework.webapp import WebApplication
+from galaxy.webapps.galaxy import buildapp as galaxy_buildapp
+
+
+class TestWebapp( WebApplication ):
+
+    def _instantiate_controller( self, type, app ):
+        # Stub out all actual controllers - just want to test routes.
+        return object()
+
+    def assert_maps( self, url, method="GET", **parts ):
+        map_result = self.mapper.match( url, environ={"REQUEST_METHOD": method } )
+        for key, expected_value in parts.items():
+            actual_value = map_result[ key ]
+            if actual_value != expected_value:
+                message = "Problem mapping route [%s], part %s expected value [%s] but obtained [%s]"
+                raise AssertionError(message % ( url, key, expected_value, actual_value ) )
+
+
+def test_galaxy_routes( ):
+    test_config = Bunch( template_path="/tmp", template_cache="/tmp" )
+    app = Bunch( config=test_config, security=object(), trace_logger=None )
+    test_webapp = TestWebapp( app )
+
+    galaxy_buildapp.populate_api_routes( test_webapp, app )
+
+    assert_url_is( url_for( "api_key_retrieval" ), "/api/authenticate/baseauth" )
+
+    # Test previously problematic tool ids with slashes.
+    test_webapp.assert_maps(
+        "/api/tools/testtoolshed.g2.bx.psu.edu/devteam/tool1",
+        controller="tools",
+        id="testtoolshed.g2.bx.psu.edu/devteam/tool1"
+    )
+
+    test_webapp.assert_maps(
+        "/api/datatypes/sniffers",
+        controller="datatypes",
+        action="sniffers"
+    )
+
+    test_webapp.assert_maps(
+        "/api/histories/123/contents/456",
+        controller="history_contents",
+        action="show"
+    )
+
+    test_webapp.assert_maps(
+        "/api/histories/123/contents/456",
+        method="PUT",
+        controller="history_contents",
+        action="update",
+    )
+
+    # Test differeniating datasets and datasets collections
+    # in history contents.
+    test_webapp.assert_maps(
+        "/api/histories/123/contents/datasets/456",
+        method="PUT",
+        controller="history_contents",
+        action="update",
+        type="dataset"
+    )
+
+    test_webapp.assert_maps(
+        "/api/histories/123/contents/dataset_collections/456",
+        method="PUT",
+        controller="history_contents",
+        action="update",
+        type="dataset_collection"
+    )
+
+    assert_url_is(
+        url_for( "history_content", history_id="123", id="456" ),
+        "/api/histories/123/contents/456"
+    )
+
+    assert_url_is(
+        url_for( "history_content_typed", history_id="123", id="456", type="dataset" ),
+        "/api/histories/123/contents/datasets/456"
+    )
+
+    test_webapp.assert_maps(
+        "/api/dependency_resolvers",
+        controller="tool_dependencies",
+        action="index"
+    )
+
+    test_webapp.assert_maps(
+        "/api/dependency_resolvers/dependency",
+        controller="tool_dependencies",
+        action="manager_dependency"
+    )
+
+    test_webapp.assert_maps(
+        "/api/dependency_resolvers/0",
+        controller="tool_dependencies",
+        action="show"
+    )
+
+    test_webapp.assert_maps(
+        "/api/dependency_resolvers/0/dependency",
+        controller="tool_dependencies",
+        action="resolver_dependency"
+    )
+
+
+def assert_url_is( actual, expected ):
+    assert actual == expected, "Expected URL [%s] but obtained [%s]" % ( expected, actual )
diff --git a/test/unit/test_security_helper.py b/test/unit/test_security_helper.py
new file mode 100644
index 0000000..08e1041
--- /dev/null
+++ b/test/unit/test_security_helper.py
@@ -0,0 +1,73 @@
+from galaxy.web import security
+
+
+test_helper_1 = security.SecurityHelper( id_secret="sec1" )
+test_helper_2 = security.SecurityHelper( id_secret="sec2" )
+
+
+def test_encode_decode():
+    # Different ids are encoded differently
+    assert test_helper_1.encode_id( 1 ) != test_helper_1.encode_id( 2 )
+    # But decoding and encoded id brings back to original id
+    assert 1 == test_helper_1.decode_id( test_helper_1.encode_id( 1 ) )
+
+
+def test_nested_encoding():
+    # Does nothing if not a dict
+    assert test_helper_1.encode_all_ids( 1 ) == 1
+
+    # Encodes top-level things ending in _id
+    assert test_helper_1.encode_all_ids( dict( history_id=1 ) )[ "history_id" ] == test_helper_1.encode_id( 1 )
+    # ..except tool_id
+    assert test_helper_1.encode_all_ids( dict( tool_id=1 ) )[ "tool_id" ] == 1
+
+    # Encodes lists at top level is end in _ids
+    expected_ids = [ test_helper_1.encode_id( 1 ), test_helper_1.encode_id( 2 ) ]
+    assert test_helper_1.encode_all_ids( dict( history_ids=[ 1, 2 ] ) )[ "history_ids" ] == expected_ids
+
+    # Encodes nested stuff if and only if recursive set to true.
+    nested_dict = dict( objects=dict( history_ids=[ 1, 2 ] ) )
+    assert test_helper_1.encode_all_ids( nested_dict )[ "objects" ][ "history_ids" ] == [ 1, 2 ]
+    assert test_helper_1.encode_all_ids( nested_dict, recursive=False )[ "objects" ][ "history_ids" ] == [ 1, 2 ]
+    assert test_helper_1.encode_all_ids( nested_dict, recursive=True )[ "objects" ][ "history_ids" ] == expected_ids
+
+
+def test_per_kind_encode_deocde():
+    # Different ids are encoded differently
+    assert test_helper_1.encode_id( 1, kind="k1" ) != test_helper_1.encode_id( 2, kind="k1" )
+    # But decoding and encoded id brings back to original id
+    assert 1 == test_helper_1.decode_id( test_helper_1.encode_id( 1, kind="k1" ), kind="k1" )
+
+
+def test_different_secrets_encode_differently():
+    assert test_helper_1.encode_id( 1 ) != test_helper_2.encode_id( 1 )
+
+
+def test_per_kind_encodes_id_differently():
+    assert test_helper_1.encode_id( 1 ) != test_helper_2.encode_id( 1, kind="new_kind" )
+
+
+def test_encode_dict():
+    test_dict = dict(
+        id=1,
+        other=2,
+        history_id=3,
+    )
+    encoded_dict = test_helper_1.encode_dict_ids( test_dict )
+    assert encoded_dict[ "id" ] == test_helper_1.encode_id( 1 )
+    assert encoded_dict[ "other" ] == 2
+    assert encoded_dict[ "history_id" ] == test_helper_1.encode_id( 3 )
+
+
+def test_guid_generation():
+    guids = set()
+    for i in range( 100 ):
+        guids.add( test_helper_1.get_new_guid() )
+    assert len( guids ) == 100  # Not duplicate guids generated.
+
+
+def test_encode_decode_guid():
+    session_key = test_helper_1.get_new_guid()
+    encoded_key = test_helper_1.encode_guid( session_key )
+    decoded_key = test_helper_1.decode_guid( encoded_key ).encode( "utf-8" )
+    assert session_key == decoded_key, "%s != %s" % ( session_key, decoded_key )
diff --git a/test/unit/test_sockets.py b/test/unit/test_sockets.py
new file mode 100644
index 0000000..7fe67c4
--- /dev/null
+++ b/test/unit/test_sockets.py
@@ -0,0 +1,11 @@
+import socket
+from galaxy.util import sockets
+
+
+def test_unused_free_port_unconstrained():
+    port = sockets.unused_port()
+
+    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    # would throw exception if port was not free.
+    s.bind(('localhost', port))
+    s.close()
diff --git a/test/unit/test_sqlite_utils.py b/test/unit/test_sqlite_utils.py
new file mode 100644
index 0000000..d3490cc
--- /dev/null
+++ b/test/unit/test_sqlite_utils.py
@@ -0,0 +1,60 @@
+from galaxy.util import sqlite
+
+
+def test_query_whitelisting():
+    __assert_whitelisted("SELECT * from FOO")
+    __assert_whitelisted("SELECT f.col1, f.col2 from FOO as f")
+    __assert_whitelisted("SELECT f.col1, b.col2 from FOO as f inner join BAR as b on f.id = b.foo_id")
+    __assert_not_whitelisted("UPDATE FOO SET foo=6")
+    __assert_not_whitelisted("TRUNCATE FOO")
+
+
+def test_sqlite_exploits():
+    # This is not really testing any Galaxy code, just experimenting with ways
+    # to attempt to exploit sqlite3 connections.
+
+    # More info...
+    # http://atta.cked.me/home/sqlite3injectioncheatsheet
+
+    connection = sqlite.connect(":memory:")
+    connection.execute("create TABLE FOO (foo1 text)")
+    __assert_has_n_rows(connection, "select * from FOO", 0)
+    __assert_query_errors(connection, "select * from FOOX")
+
+    # Make sure sqlite query cannot execute multiple statements
+    __assert_query_errors(connection, "select * from FOO; select * from FOO")
+
+    # Make sure sqlite cannot select on PRAGMA results
+    __assert_query_errors(connection, "select * from (PRAGMA database_list)")
+
+    __assert_has_n_rows(connection, "select * from FOO where foo1 in (SELECT foo1 from FOO)", 0)
+    # Ensure nested queries cannot modify database.
+    __assert_query_errors(connection, "select * from FOO where foo1 in (INSERT INTO FOO VALUES ('bar')")
+
+    # Should access to the schema be blacklisted?
+    # __assert_has_n_rows(connection, "select * from SQLITE_MASTER", 0)
+
+
+def __assert_has_n_rows(connection, query, n):
+    count = 0
+    for row in connection.cursor().execute(query):
+        count += 1
+    assert count == n
+
+
+def __assert_query_errors(connection, query):
+    exception = False
+    try:
+        for row in connection.cursor().execute(query):
+            pass
+    except Exception:
+        exception = True
+    assert exception
+
+
+def __assert_whitelisted( query ):
+    assert sqlite.is_read_only_query(query), "Query [%s] fails whitelist." % query
+
+
+def __assert_not_whitelisted( query ):
+    assert not sqlite.is_read_only_query(query), "Query [%s] incorrectly fails whitelist." % query
diff --git a/test/unit/test_topsort.py b/test/unit/test_topsort.py
new file mode 100644
index 0000000..d0ad954
--- /dev/null
+++ b/test/unit/test_topsort.py
@@ -0,0 +1,39 @@
+from galaxy.util import topsort
+
+
+def test_topsort_level_stability():
+    data = [
+        (0, 2),
+        (1, 2),
+        (2, 3),
+        (2, 4),
+        (3, 4),
+        (3, 5),
+        (6, 2),
+    ]
+    assert topsort.topsort_levels( data )[ 0 ] == [ 0, 1, 6 ]
+    assert topsort.topsort( data ) == [ 0, 1, 6, 2, 3, 4, 5 ]
+    # Swap first two edges - so 1 appears first
+    swap( data, 0, 1 )
+    assert topsort.topsort_levels( data )[ 0 ] == [ 1, 0, 6 ]
+    assert topsort.topsort( data ) == [ 1, 0, 6, 2, 3, 4, 5 ]
+
+    # Shouldn't really affect sorting of 1 0 6
+    swap( data, 3, 4 )
+    assert topsort.topsort_levels( data )[ 0 ] == [ 1, 0, 6 ]
+    assert topsort.topsort( data ) == [ 1, 0, 6, 2, 3, 4, 5 ]
+
+    # Place 0 before 6 in original list
+    swap( data, 1, 6 )
+    assert topsort.topsort_levels( data )[ 0 ] == [ 1, 6, 0 ]
+    assert topsort.topsort( data ) == [ 1, 6, 0, 2, 3, 4, 5 ]
+
+
+def test_topsort_doc():
+    assert topsort.topsort([(1, 2), (3, 3)]) == [1, 3, 2]
+
+
+def swap(lst, i, j):
+    tmp = lst[j]
+    lst[j] = lst[i]
+    lst[i] = tmp
diff --git a/test/unit/tools/__init__.py b/test/unit/tools/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/tools/filter_modules/__init__.py b/test/unit/tools/filter_modules/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/tools/filter_modules/filtermod.py b/test/unit/tools/filter_modules/filtermod.py
new file mode 100644
index 0000000..64adfd4
--- /dev/null
+++ b/test/unit/tools/filter_modules/filtermod.py
@@ -0,0 +1,22 @@
+""" Test filters used by test_toolbox_filters.py.
+"""
+
+
+def filter_tool( context, tool ):
+    """Test Filter Tool"""
+    return False
+
+
+def filter_section( context, section ):
+    """Test Filter Section"""
+    return False
+
+
+def filter_label_1( context, label ):
+    """Test Filter Label 1"""
+    return False
+
+
+def filter_label_2( context, label ):
+    """Test Filter Label 2"""
+    return False
diff --git a/test/unit/tools/test_actions.py b/test/unit/tools/test_actions.py
new file mode 100644
index 0000000..8689ba5
--- /dev/null
+++ b/test/unit/tools/test_actions.py
@@ -0,0 +1,268 @@
+import string
+import unittest
+from xml.etree.ElementTree import XML
+
+import tools_support
+from galaxy import model
+from galaxy.tools.actions import (
+    DefaultToolAction,
+    determine_output_format,
+    on_text_for_names
+)
+from galaxy.tools.parser.output_objects import ToolOutput
+
+TEST_HANDLER_NAME = "test_handler_1"
+
+
+# I cannot think of a saner way to test if data is being wrapped than use a
+# data param in the output label - though you would probably never want to do
+# this.
+DATA_IN_LABEL_TOOL_CONTENTS = '''<tool id="test_tool" name="Test Tool">
+    <command>echo "$param1" < $out1</command>
+    <inputs>
+        <repeat name="repeat1" label="The Repeat">
+            <param type="data" name="param1" value="" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out1" format="data" label="Output (${repeat1[0].param1})" />
+    </outputs>
+</tool>
+'''
+
+# Tool with two outputs - used to verify all datasets within same job get same
+# object store id.
+TWO_OUTPUTS = '''<tool id="test_tool" name="Test Tool">
+    <command>echo "$param1" < $out1</command>
+    <inputs>
+        <param type="text" name="param1" value="" />
+    </inputs>
+    <outputs>
+        <data name="out1" format="data" label="Output ($param1)" />
+        <data name="out2" format="data" label="Output 2 ($param1)" />
+    </outputs>
+</tool>
+'''
+
+
+def test_on_text_for_names():
+    def assert_on_text_is( expected, *names ):
+        on_text = on_text_for_names( names )
+        assert on_text == expected, "Wrong on text value %s, expected %s" % ( on_text, expected )
+
+    assert_on_text_is( "data 1", "data 1" )
+    assert_on_text_is( "data 1 and data 2", "data 1", "data 2" )
+    assert_on_text_is( "data 1, data 2, and data 3", "data 1", "data 2", "data 3" )
+    assert_on_text_is( "data 1, data 2, and others", "data 1", "data 2", "data 3", "data 4" )
+
+    assert_on_text_is( "data 1 and data 2", "data 1", "data 1", "data 2" )
+
+
+class DefaultToolActionTestCase( unittest.TestCase, tools_support.UsesApp, tools_support.UsesTools ):
+
+    def setUp( self ):
+        self.setup_app( mock_model=False )
+        history = model.History()
+        self.history = history
+        self.trans = MockTrans(
+            self.app,
+            self.history
+        )
+        self.app.model.context.add( history )
+        self.app.model.context.flush()
+        self.action = DefaultToolAction()
+        self.app.config.len_file_path = "moocow"
+        self.app.job_config[ "get_handler" ] = lambda h: TEST_HANDLER_NAME
+        self.app.object_store = MockObjectStore()
+
+    def test_output_created( self ):
+        _, output = self._simple_execute()
+        assert len( output ) == 1
+        assert "out1" in output
+
+    def test_output_label( self ):
+        _, output = self._simple_execute()
+        self.assertEquals( output[ "out1" ].name, "Output (moo)" )
+
+    def test_output_label_data( self ):
+        hda1 = self.__add_dataset()
+        hda2 = self.__add_dataset()
+        incoming = {
+            "param1": hda1,
+            "repeat1": [
+                {"param2": hda2},
+            ]
+        }
+        job, output = self._simple_execute(
+            tools_support.SIMPLE_CAT_TOOL_CONTENTS,
+            incoming,
+        )
+        self.assertEquals( output[ "out1" ].name, "Test Tool on data 2 and data 1" )
+
+    def test_object_store_ids( self ):
+        _, output = self._simple_execute( contents=TWO_OUTPUTS )
+        self.assertEquals( output[ "out1" ].name, "Output (moo)" )
+        self.assertEquals( output[ "out2" ].name, "Output 2 (moo)" )
+
+    def test_params_wrapped( self ):
+        hda1 = self.__add_dataset()
+        _, output = self._simple_execute(
+            contents=DATA_IN_LABEL_TOOL_CONTENTS,
+            incoming=dict( repeat1=[ dict( param1=hda1 ) ] ),
+        )
+        # Again this is a stupid way to ensure data parameters are wrapped.
+        self.assertEquals( output[ "out1" ].name, "Output (%s)" % hda1.dataset.get_file_name() )
+
+    def test_handler_set( self ):
+        job, _ = self._simple_execute()
+        assert job.handler == TEST_HANDLER_NAME
+
+    def __add_dataset( self, state='ok' ):
+        hda = model.HistoryDatasetAssociation()
+        hda.dataset = model.Dataset()
+        hda.dataset.state = 'ok'
+        hda.dataset.external_filename = "/tmp/datasets/dataset_001.dat"
+        self.history.add_dataset( hda )
+        self.app.model.context.flush()
+        return hda
+
+    def _simple_execute( self, contents=None, incoming=None ):
+        if contents is None:
+            contents = tools_support.SIMPLE_TOOL_CONTENTS
+        if incoming is None:
+            incoming = dict(param1="moo")
+        self._init_tool( contents )
+        return self.action.execute(
+            tool=self.tool,
+            trans=self.trans,
+            history=self.history,
+            incoming=incoming,
+        )
+
+
+def test_determine_output_format():
+    # Test simple case of explicitly defined output with no changes.
+    direct_output = quick_output("txt")
+    __assert_output_format_is("txt", direct_output)
+
+    # Test if format is "input" (which just uses the last input on the form.)
+    input_based_output = quick_output("input")
+    __assert_output_format_is("fastq", input_based_output, [("i1", "fasta"), ("i2", "fastq")])
+
+    # Test using format_source (testing a couple different positions)
+    input_based_output = quick_output("txt", format_source="i1")
+    __assert_output_format_is("fasta", input_based_output, [("i1", "fasta"), ("i2", "fastq")])
+
+    input_based_output = quick_output("fastq", format_source="hdcai[0]")
+    __assert_output_format_is("txt", input_based_output, [("i1", "fasta"), ("i2", "fastq")], add_collection=True)
+
+    input_based_output = quick_output("fastq", format_source="""hdcai["forward"]""")
+    __assert_output_format_is("txt", input_based_output, [("i1", "fasta"), ("i2", "fastq")], add_collection=True)
+
+    input_based_output = quick_output("fastq", format_source="""hdcai['forward']""")
+    __assert_output_format_is("txt", input_based_output, [("i1", "fasta"), ("i2", "fastq")], add_collection=True)
+
+    input_based_output = quick_output("txt", format_source="i2")
+    __assert_output_format_is("fastq", input_based_output, [("i1", "fasta"), ("i2", "fastq")])
+
+    change_format_xml = """<data><change_format>
+        <when input="options_type.output_type" value="solexa" format="fastqsolexa" />
+        <when input="options_type.output_type" value="illumina" format="fastqillumina" />
+    </change_format></data>"""
+
+    change_format_output = quick_output("fastq", change_format_xml=change_format_xml)
+    # Test maching a change_format when.
+    __assert_output_format_is("fastqillumina", change_format_output, param_context={"options_type": {"output_type": "illumina"}} )
+    # Test change_format but no match
+    __assert_output_format_is("fastq", change_format_output, param_context={"options_type": {"output_type": "sanger"}} )
+
+    change_on_metadata_xml_template = string.Template("""<data><change_format>
+        <when input_dataset="${input}" attribute="random_field" value="1" format="fastqsolexa" />
+        <when input_dataset="${input}" attribute="random_field" value="2" format="fastqillumina" />
+    </change_format></data>""")
+
+    change_on_metadata_illumina = change_on_metadata_xml_template.safe_substitute({'input': "i2"})
+    change_on_metadata_output = quick_output("fastq", change_format_xml=change_on_metadata_illumina)
+    __assert_output_format_is("fastqillumina", change_on_metadata_output, [("i1", "txt"), ("i2", "txt")] )
+
+    change_on_metadata_solexa = change_on_metadata_xml_template.safe_substitute({'input': "i1"})
+    change_on_metadata_output = quick_output("fastq", change_format_xml=change_on_metadata_solexa)
+    __assert_output_format_is("fastqsolexa", change_on_metadata_output, [("i1", "txt"), ("i2", "txt")] )
+
+
+def __assert_output_format_is( expected, output, input_extensions=[], param_context=[], add_collection=False ):
+    inputs = {}
+    last_ext = "data"
+    i = 1
+    for name, ext in input_extensions:
+        hda = model.HistoryDatasetAssociation(extension=ext)
+        hda.metadata.random_field = str(i)  # Populate a random metadata field for testing
+        inputs[ name ] = hda
+        last_ext = ext
+        i += 1
+
+    input_collections = {}
+    if add_collection:
+        hda_forward = model.HistoryDatasetAssociation(extension="txt")
+        hda_reverse = model.HistoryDatasetAssociation(extension="txt")
+        c1 = model.DatasetCollection(collection_type="pair")
+        hc1 = model.HistoryDatasetCollectionAssociation(collection=c1, name="HistoryCollectionTest1")
+
+        dce1 = model.DatasetCollectionElement(collection=c1, element=hda_forward, element_identifier="forward", element_index=0)
+        dce2 = model.DatasetCollectionElement(collection=c1, element=hda_reverse, element_identifier="reverse", element_index=1)
+        c1.elements = [dce1, dce2]
+
+        input_collections["hdcai"] = [(hc1, False)]
+
+    actual_format = determine_output_format( output, param_context, inputs, input_collections, last_ext )
+    assert actual_format == expected, "Actual format %s, does not match expected %s" % (actual_format, expected)
+
+
+def quick_output(format, format_source=None, change_format_xml=None):
+    test_output = ToolOutput( "test_output" )
+    test_output.format = format
+    test_output.format_source = format_source
+    if change_format_xml:
+        test_output.change_format = XML(change_format_xml)
+    else:
+        test_output.change_format = None
+    return test_output
+
+
+class MockTrans( object ):
+
+    def __init__( self, app, history, user=None ):
+        self.app = app
+        self.history = history
+        self.user = user
+        self.sa_session = self.app.model.context
+        self.model = app.model
+
+    def db_dataset_for( self, input_db_key ):
+        return None
+
+    def get_galaxy_session( self ):
+        return model.GalaxySession()
+
+    def get_current_user_roles( self ):
+        return []
+
+    def log_event( self, *args, **kwargs ):
+        pass
+
+
+class MockObjectStore( object ):
+
+    def __init__( self ):
+        self.created_datasets = []
+        self.first_create = True
+        self.object_store_id = "mycoolid"
+
+    def create( self, dataset ):
+        self.created_datasets.append( dataset )
+        if self.first_create:
+            self.first_create = False
+            assert dataset.object_store_id is None
+            dataset.object_store_id = self.object_store_id
+        else:
+            assert dataset.object_store_id == self.object_store_id
diff --git a/test/unit/tools/test_citations.py b/test/unit/tools/test_citations.py
new file mode 100644
index 0000000..c94d505
--- /dev/null
+++ b/test/unit/tools/test_citations.py
@@ -0,0 +1,56 @@
+import tempfile
+from contextlib import contextmanager
+from shutil import rmtree
+
+from galaxy.managers.citations import (
+    BibtexCitation,
+    CitationCollection,
+    parse_citation,
+)
+from galaxy.util import parse_xml_string
+
+EXAMPLE_BIBTEX_CITATION = """<citation type="bibtex">@article{goecks2010galaxy,
+    title={Galaxy: a comprehensive approach for supporting accessible, reproducible, and transparent computational research in the life sciences},
+    author={Goecks, Jeremy and Nekrutenko, Anton and Taylor, James and The Galaxy Team},
+    journal={Genome Biol},
+    volume={11},
+    number={8},
+    pages={R86},
+    year={2010}
+}</citation>"""
+
+
+def test_parse_citation():
+    xml_text = EXAMPLE_BIBTEX_CITATION
+    citation_elem = parse_xml_string(xml_text)
+    with temp_directory() as test_directory:
+        citation = parse_citation(citation_elem, test_directory, None)
+    bibtex = citation.to_bibtex()
+    assert "title={Galaxy" in bibtex
+
+
+def test_citation_collection():
+    citation_collection = CitationCollection()
+    assert len( citation_collection ) == 0
+    cite1 = QuickBibtexCitation("@article{'test1'}")
+    cite1dup = QuickBibtexCitation("@article{'test1'}")
+    cite2 = QuickBibtexCitation("@article{'test2'}")
+    assert citation_collection.add(cite1)
+    assert not citation_collection.add(cite1dup)
+    assert citation_collection.add(cite2)
+    assert len( citation_collection ) == 2
+
+
+ at contextmanager
+def temp_directory():
+    base_path = tempfile.mkdtemp()
+    try:
+        yield base_path
+    finally:
+        rmtree(base_path)
+
+
+class QuickBibtexCitation( BibtexCitation ):
+
+    def __init__( self, raw_bibtex ):
+        self._set_raw_bibtex( raw_bibtex )
diff --git a/test/unit/tools/test_collect_primary_datasets.py b/test/unit/tools/test_collect_primary_datasets.py
new file mode 100644
index 0000000..2ffa7f2
--- /dev/null
+++ b/test/unit/tools/test_collect_primary_datasets.py
@@ -0,0 +1,330 @@
+import json
+import os
+import unittest
+
+import tools_support
+from galaxy import (
+    model,
+    util
+)
+from galaxy.tools.parser import output_collection_def
+
+
+DEFAULT_TOOL_OUTPUT = "out1"
+DEFAULT_EXTRA_NAME = "test1"
+
+
+class CollectPrimaryDatasetsTestCase( unittest.TestCase, tools_support.UsesApp, tools_support.UsesTools ):
+
+    def setUp( self ):
+        self.setup_app( mock_model=False )
+        object_store = MockObjectStore()
+        self.app.object_store = object_store
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        self._setup_test_output( )
+        self.app.config.collect_outputs_from = "job_working_directory"
+
+        self.app.model.Dataset.object_store = object_store
+
+    def tearDown( self ):
+        if self.app.model.Dataset.object_store is self.app.object_store:
+            self.app.model.Dataset.object_store = None
+
+    def test_empty_collect( self ):
+        assert len( self._collect() ) == 0
+
+    def test_collect_multiple( self ):
+        path1 = self._setup_extra_file( name="test1" )
+        path2 = self._setup_extra_file( name="test2" )
+
+        datasets = self._collect()
+        assert DEFAULT_TOOL_OUTPUT in datasets
+        self.assertEquals( len( datasets[ DEFAULT_TOOL_OUTPUT ] ), 2 )
+
+        # Test default order of collection.
+        assert list(datasets[ DEFAULT_TOOL_OUTPUT ].keys()) == ["test1", "test2"]
+
+        created_hda_1 = datasets[ DEFAULT_TOOL_OUTPUT ][ "test1" ]
+        self.app.object_store.assert_created_with_path( created_hda_1.dataset, path1 )
+
+        created_hda_2 = datasets[ DEFAULT_TOOL_OUTPUT ][ "test2" ]
+        self.app.object_store.assert_created_with_path( created_hda_2.dataset, path2 )
+
+        # Test default metadata stuff
+        assert created_hda_1.visible
+
+        # Since discovered_datasets not specified, older name based pattern
+        # didn't result in a dbkey being set.
+        assert created_hda_1.dbkey == "?"
+
+    def test_collect_sorted_reverse( self ):
+        self._replace_output_collectors( '''<output>
+            <discover_datasets pattern="__name__" directory="subdir_for_name_discovery" sort_by="reverse_filename" ext="txt" />
+        </output>''')
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="test1" )
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="test2" )
+
+        datasets = self._collect()
+        assert DEFAULT_TOOL_OUTPUT in datasets
+
+        # Test default order of collection.
+        assert list(datasets[ DEFAULT_TOOL_OUTPUT ].keys()) == ["test2", "test1"]
+
+    def test_collect_sorted_name( self ):
+        self._replace_output_collectors( '''<output>
+            <discover_datasets pattern="[abc](?P<name>.*)" directory="subdir_for_name_discovery" sort_by="name" ext="txt" />
+        </output>''')
+        # Setup filenames in reverse order and ensure name is used as key.
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="ctest1" )
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="btest2" )
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="atest3" )
+
+        datasets = self._collect()
+        assert DEFAULT_TOOL_OUTPUT in datasets
+
+        # Test default order of collection.
+        assert list(datasets[ DEFAULT_TOOL_OUTPUT ].keys()) == ["test1", "test2", "test3"]
+
+    def test_collect_sorted_numeric( self ):
+        self._replace_output_collectors( '''<output>
+            <discover_datasets pattern="[abc](?P<name>.*)" directory="subdir_for_name_discovery" sort_by="numeric_name" ext="txt" />
+        </output>''')
+        # Setup filenames in reverse order and ensure name is used as key.
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="c1" )
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="b10" )
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="a100" )
+
+        datasets = self._collect()
+        assert DEFAULT_TOOL_OUTPUT in datasets
+
+        # Test default order of collection.
+        assert list(datasets[ DEFAULT_TOOL_OUTPUT ].keys()) == ["1", "10", "100"]
+
+    def test_collect_hidden( self ):
+        self._setup_extra_file( visible="hidden" )
+        created_hda = self._collect_default_extra()
+        assert not created_hda.visible
+
+    def test_collect_ext( self ):
+        self._setup_extra_file( ext="txt" )
+        created_hda = self._collect_default_extra()
+        assert created_hda.ext == "txt"
+
+    def test_copied_to_imported_histories( self ):
+        self._setup_extra_file( )
+        cloned_hda = self.hda.copy()
+        history_2 = self._new_history( hdas=[ cloned_hda ])
+        assert len( history_2.datasets ) == 1
+
+        self._collect()
+
+        # Make sure extra primary was copied to cloned history with
+        # cloned output.
+        assert len( history_2.datasets ) == 2
+
+    def test_dbkey_from_filename( self ):
+        self._setup_extra_file( dbkey="hg19" )
+        created_hda = self._collect_default_extra()
+        assert created_hda.dbkey == "hg19"
+
+    def test_dbkey_from_galaxy_json( self ):
+        path = self._setup_extra_file( )
+        self._append_job_json( dict( dbkey="hg19" ), output_path=path )
+        created_hda = self._collect_default_extra()
+        assert created_hda.dbkey == "hg19"
+
+    def test_name_from_galaxy_json( self ):
+        path = self._setup_extra_file( )
+        self._append_job_json( dict( name="test_from_json" ), output_path=path )
+        created_hda = self._collect_default_extra()
+        assert "test_from_json" in created_hda.name
+
+    def test_info_from_galaxy_json( self ):
+        path = self._setup_extra_file( )
+        self._append_job_json( dict( info="extra output info" ), output_path=path )
+        created_hda = self._collect_default_extra()
+        assert created_hda.info == "extra output info"
+
+    def test_extension_from_galaxy_json( self ):
+        path = self._setup_extra_file( )
+        self._append_job_json( dict( ext="txt" ), output_path=path )
+        created_hda = self._collect_default_extra()
+        assert created_hda.ext == "txt"
+
+    def test_new_file_path_collection( self ):
+        self.app.config.collect_outputs_from = "new_file_path"
+        self.app.config.new_file_path = self.test_directory
+
+        self._setup_extra_file( )
+        created_hda = self._collect_default_extra( job_working_directory="/tmp" )
+        assert created_hda
+
+    def test_job_param( self ):
+        self._setup_extra_file( )
+        assert len( self.job.output_datasets ) == 1
+        self._collect_default_extra()
+        assert len( self.job.output_datasets ) == 2
+        extra_job_assoc = filter( lambda job_assoc: job_assoc.name.startswith( "__" ), self.job.output_datasets )[ 0 ]
+        assert extra_job_assoc.name == "__new_primary_file_out1|test1__"
+
+    def test_pattern_override_designation( self ):
+        self._replace_output_collectors( '''<output><discover_datasets pattern="__designation__" directory="subdir" ext="txt" /></output>''' )
+        self._setup_extra_file( subdir="subdir", filename="foo.txt" )
+        primary_outputs = self._collect( )[ DEFAULT_TOOL_OUTPUT ]
+        assert len( primary_outputs ) == 1
+        created_hda = next(iter(primary_outputs.values()))
+        assert "foo.txt" in created_hda.name
+        assert created_hda.ext == "txt"
+        assert created_hda.dbkey == "btau"
+        assert created_hda.dbkey == "btau"
+
+    def test_name_and_ext_pattern( self ):
+        self._replace_output_collectors( '''<output><discover_datasets pattern="__name_and_ext__" directory="subdir" /></output>''' )
+        self._setup_extra_file( subdir="subdir", filename="foo1.txt" )
+        self._setup_extra_file( subdir="subdir", filename="foo2.tabular" )
+        primary_outputs = self._collect( )[ DEFAULT_TOOL_OUTPUT ]
+        assert len( primary_outputs ) == 2
+        assert primary_outputs[ "foo1" ].ext == "txt"
+        assert primary_outputs[ "foo2" ].ext == "tabular"
+        assert primary_outputs[ "foo1" ].dbkey == "btau"
+        assert primary_outputs[ "foo2" ].dbkey == "btau"
+
+    def test_custom_pattern( self ):
+        # Hypothetical oral metagenomic classifier that populates a directory
+        # of files based on name and genome. Use custom regex pattern to grab
+        # and classify these files.
+        self._replace_output_collectors( '''<output><discover_datasets pattern="(?P<designation>.*)__(?P<dbkey>.*).fasta" directory="genome_breakdown" ext="fasta" /></output>''' )
+        self._setup_extra_file( subdir="genome_breakdown", filename="samp1__hg19.fasta" )
+        self._setup_extra_file( subdir="genome_breakdown", filename="samp2__lactLact.fasta" )
+        self._setup_extra_file( subdir="genome_breakdown", filename="samp3__hg19.fasta" )
+        self._setup_extra_file( subdir="genome_breakdown", filename="samp4__lactPlan.fasta" )
+        self._setup_extra_file( subdir="genome_breakdown", filename="samp5__fusoNucl.fasta" )
+
+        # Put a file in directory we don't care about, just to make sure
+        # it doesn't get picked up by pattern.
+        self._setup_extra_file( subdir="genome_breakdown", filename="overview.txt" )
+
+        primary_outputs = self._collect( )[ DEFAULT_TOOL_OUTPUT ]
+        assert len( primary_outputs ) == 5
+        genomes = dict( samp1="hg19", samp2="lactLact", samp3="hg19", samp4="lactPlan", samp5="fusoNucl" )
+        for key, hda in primary_outputs.items():
+            assert hda.dbkey == genomes[ key ]
+
+    def test_name_versus_designation( self ):
+        """ This test demonstrates the difference between name and desgination
+        in grouping patterns and named patterns such as __designation__,
+        __name__, __designation_and_ext__, and __name_and_ext__.
+        """
+        self._replace_output_collectors( '''<output>
+            <discover_datasets pattern="__name_and_ext__" directory="subdir_for_name_discovery" />
+            <discover_datasets pattern="__designation_and_ext__" directory="subdir_for_designation_discovery" />
+        </output>''')
+        self._setup_extra_file( subdir="subdir_for_name_discovery", filename="example1.txt" )
+        self._setup_extra_file( subdir="subdir_for_designation_discovery", filename="example2.txt" )
+        primary_outputs = self._collect( )[ DEFAULT_TOOL_OUTPUT ]
+        name_output = primary_outputs[ "example1" ]
+        designation_output = primary_outputs[ "example2" ]
+        # While name is also used for designation, designation is not the name -
+        # it is used in the calculation of the name however...
+        assert name_output.name == "example1"
+        assert designation_output.name == "%s (%s)" % ( self.hda.name, "example2" )
+
+    def test_cannot_read_files_outside_job_directory( self ):
+        self._replace_output_collectors( '''<output>
+            <discover_datasets pattern="__name_and_ext__" directory="../../secrets" />
+        </output>''')
+        exception_thrown = False
+        try:
+            self._collect( )
+        except Exception:
+            exception_thrown = True
+        assert exception_thrown
+
+    def _collect_default_extra( self, **kwargs ):
+        collected = self._collect( **kwargs )
+        assert DEFAULT_TOOL_OUTPUT in collected, "No such key [%s], in %s" % (DEFAULT_TOOL_OUTPUT, collected)
+        output_files = collected[DEFAULT_TOOL_OUTPUT]
+        assert DEFAULT_EXTRA_NAME in output_files, "No such key [%s]" % DEFAULT_EXTRA_NAME
+        return output_files[DEFAULT_EXTRA_NAME]
+
+    def _collect( self, job_working_directory=None ):
+        if not job_working_directory:
+            job_working_directory = self.test_directory
+        return self.tool.collect_primary_datasets( self.outputs, job_working_directory, "txt", input_dbkey="btau" )
+
+    def _replace_output_collectors( self, xml_str ):
+        # Rewrite tool as if it had been created with output containing
+        # supplied dataset_collector elem.
+        elem = util.parse_xml_string( xml_str )
+        self.tool.outputs[ DEFAULT_TOOL_OUTPUT ].dataset_collector_descriptions = output_collection_def.dataset_collector_descriptions_from_elem( elem )
+
+    def _append_job_json( self, object, output_path=None, line_type="new_primary_dataset" ):
+        object[ "type" ] = line_type
+        if output_path:
+            name = os.path.basename( output_path )
+            object[ "filename" ] = name
+        line = json.dumps( object )
+        with open( os.path.join( self.test_directory, "galaxy.json" ), "a" ) as f:
+            f.write( "%s\n" % line )
+
+    def _setup_extra_file( self, **kwargs ):
+        path = kwargs.get( "path", None )
+        filename = kwargs.get( "filename", None )
+        if not path and not filename:
+            name = kwargs.get( "name", DEFAULT_EXTRA_NAME )
+            visible = kwargs.get( "visible", "visible" )
+            ext = kwargs.get( "ext", "data" )
+            template_args = ( self.hda.id, name, visible, ext )
+            directory = kwargs.get( "directory", self.test_directory )
+            path = os.path.join( directory, "primary_%s_%s_%s_%s" % template_args )
+            if "dbkey" in kwargs:
+                path = "%s_%s" % ( path, kwargs[ "dbkey" ] )
+        if not path:
+            assert filename
+            subdir = kwargs.get( "subdir", "." )
+            path = os.path.join( self.test_directory, subdir, filename )
+        directory = os.path.dirname( path )
+        if not os.path.exists( directory ):
+            os.makedirs( directory )
+        contents = kwargs.get( "contents", "test contents" )
+        open( path, "w" ).write( contents )
+        return path
+
+    def _setup_test_output( self ):
+        dataset = model.Dataset()
+        dataset.external_filename = "example_output"  # This way object store isn't asked about size...
+        self.hda = model.HistoryDatasetAssociation( name="test", dataset=dataset )
+        job = model.Job()
+        job.add_output_dataset( DEFAULT_TOOL_OUTPUT, self.hda )
+        self.app.model.context.add( job )
+        self.job = job
+        self.history = self._new_history( hdas=[ self.hda ] )
+        self.outputs = { DEFAULT_TOOL_OUTPUT: self.hda }
+
+    def _new_history( self, hdas=[], flush=True ):
+        history = model.History()
+        self.app.model.context.add( history )
+        for hda in hdas:
+            history.add_dataset( hda, set_hid=False )
+        self.app.model.context.flush( )
+        return history
+
+
+class MockObjectStore( object ):
+
+    def __init__( self ):
+        self.created_datasets = {}
+
+    def update_from_file( self, dataset, file_name, create ):
+        if create:
+            self.created_datasets[ dataset ] = file_name
+
+    def size( self, dataset ):
+        path = self.created_datasets[ dataset ]
+        return os.stat( path ).st_size
+
+    def get_filename( self, dataset ):
+        return self.created_datasets[ dataset ]
+
+    def assert_created_with_path( self, dataset, file_name ):
+        assert self.created_datasets[ dataset ] == file_name
diff --git a/test/unit/tools/test_column_parameters.py b/test/unit/tools/test_column_parameters.py
new file mode 100644
index 0000000..8016bbc
--- /dev/null
+++ b/test/unit/tools/test_column_parameters.py
@@ -0,0 +1,106 @@
+""" Tests for tool parameters, more tests exist in test_data_parameters.py and
+test_select_parameters.py.
+"""
+
+from galaxy import model
+from galaxy.util import bunch
+from tools_support import datatypes_registry
+
+from .test_parameter_parsing import BaseParameterTestCase
+
+
+class DataColumnParameterTestCase( BaseParameterTestCase ):
+
+    def test_not_optional_by_default(self):
+        assert not self.__param_optional()
+
+    def test_force_select_disable(self):
+        self.other_attributes = 'force_select="false"'
+        assert self.__param_optional()
+
+    def test_optional_override(self):
+        self.other_attributes = 'optional="true"'
+        assert self.__param_optional()
+
+    def __param_optional(self):
+        # TODO: don't break abstraction, try setting null value instead
+        return self.param.optional
+
+    def test_from_json(self):
+        value = self.param.from_json("3", self.trans, { "input_tsv": self.build_ready_hda()  } )
+        assert value == "3"
+
+    def test_from_json_strips_c(self):
+        value = self.param.from_json("c1", self.trans, { "input_tsv": self.build_ready_hda()  } )
+        assert value == "1"
+
+    def test_multiple_from_json(self):
+        self.multiple = True
+        value = self.param.from_json("1,2,3", self.trans, { "input_tsv": self.build_ready_hda()  } )
+        assert value == ["1", "2", "3"]
+
+    def test_multiple_from_json_with_c(self):
+        self.multiple = True
+        value = self.param.from_json("c1,c2,c3", self.trans, { "input_tsv": self.build_ready_hda()  } )
+        assert value == ["1", "2", "3"]
+
+    def test_get_initial_value_default(self):
+        self.assertEqual( '1', self.param.get_initial_value( self.trans, { "input_tsv": self.build_ready_hda()  } ) )
+
+    def test_get_initial_value_override_legacy(self):
+        self.other_attributes = "default_value='2'"
+        self.assertEqual( '2', self.param.get_initial_value( self.trans, { "input_tsv": self.build_ready_hda() } ) )
+
+    def test_get_initial_value_override_newstyle(self):
+        self.other_attributes = "value='2'"
+        self.assertEqual( '2', self.param.get_initial_value( self.trans, { "input_tsv": self.build_ready_hda() } ) )
+
+    def test_get_initial_value_override_newstyle_strips_c(self):
+        self.other_attributes = "value='c2'"
+        self.assertEqual( '2', self.param.get_initial_value( self.trans, { "input_tsv": self.build_ready_hda() } ) )
+
+    def setUp( self ):
+        super(DataColumnParameterTestCase, self).setUp()
+        self.test_history = model.History()
+        self.app.model.context.add( self.test_history )
+        self.app.model.context.flush()
+        self.trans = bunch.Bunch(
+            app=self.app,
+            get_history=lambda: self.test_history,
+            get_current_user_roles=lambda: [],
+            workflow_building_mode=False,
+            webapp=bunch.Bunch( name="galaxy" ),
+        )
+
+        self.type = "data_column"
+        self.other_attributes = ""
+        self.set_data_ref = "input_tsv"
+        self.multiple = False
+        self.optional = False
+        self._param = None
+
+    def build_ready_hda(self):
+        hist = model.History()
+        self.app.model.context.add( hist )
+        ready_hda = hist.add_dataset( model.HistoryDatasetAssociation( extension='interval', create_dataset=True, sa_session=self.app.model.context ) )
+        ready_hda.set_dataset_state( 'ok' )
+        return ready_hda
+
+    @property
+    def param( self ):
+        if not self._param:
+            multi_text = ""
+            if self.multiple:
+                multi_text = 'multiple="True"'
+            optional_text = ""
+            if self.optional:
+                optional_text = 'optional="True"'
+            data_ref_text = ""
+            if self.set_data_ref:
+                data_ref_text = 'data_ref="input_tsv"'
+            template_xml = '''<param name="my_name" type="%s" %s %s %s %s></param>'''
+            param_str = template_xml % ( self.type, data_ref_text, multi_text, optional_text, self.other_attributes )
+            self._param = self._parameter_for( xml=param_str )
+            self._param.ref_input = bunch.Bunch(formats=[datatypes_registry.get_datatype_by_extension("tabular")])
+
+        return self._param
diff --git a/test/unit/tools/test_conda_resolution.py b/test/unit/tools/test_conda_resolution.py
new file mode 100644
index 0000000..3647f39
--- /dev/null
+++ b/test/unit/tools/test_conda_resolution.py
@@ -0,0 +1,69 @@
+import os
+import shutil
+import unittest
+from tempfile import mkdtemp
+
+from galaxy.tools.deps import (
+    conda_util,
+    DependencyManager
+)
+from galaxy.tools.deps.resolvers.conda import CondaDependencyResolver
+
+
+def skip_unless_environ(var):
+    if var in os.environ:
+        return lambda func: func
+    template = "Environment variable %s not found, dependent test skipped."
+    return unittest.skip(template % var)
+
+
+ at skip_unless_environ("GALAXY_TEST_INCLUDE_SLOW")
+def test_conda_resolution():
+    base_path = mkdtemp()
+    try:
+        job_dir = os.path.join(base_path, "000")
+        dependency_manager = DependencyManager(base_path)
+        resolver = CondaDependencyResolver(
+            dependency_manager,
+            auto_init=True,
+            auto_install=True,
+            use_path_exec=False,  # For the test ensure this is always a clean install
+        )
+        conda_context = resolver.conda_context
+        assert len(list(conda_util.installed_conda_targets(conda_context))) == 0
+        dependency = resolver.resolve(name="samtools", version=None, type="package", job_directory=job_dir)
+        assert dependency.shell_commands(None) is not None
+        installed_targets = list(conda_util.installed_conda_targets(conda_context))
+        assert len(installed_targets) == 1
+        samtools_target = installed_targets[0]
+        assert samtools_target.package == "samtools"
+        assert samtools_target.version is None
+    finally:
+        shutil.rmtree(base_path)
+
+
+ at skip_unless_environ("GALAXY_TEST_INCLUDE_SLOW")
+def test_conda_resolution_failure():
+    """This test is specifically designed to trigger https://github.com/rtfd/readthedocs.org/issues/1902
+    and thus it expects the install to fail. If this test fails it is a sign that the upstream
+    conda issue has been fixed.
+    """
+
+    base_path = mkdtemp(prefix='x' * 80)  # a ridiculously long prefix
+    try:
+        job_dir = os.path.join(base_path, "000")
+        dependency_manager = DependencyManager(base_path)
+        resolver = CondaDependencyResolver(
+            dependency_manager,
+            auto_init=True,
+            auto_install=True,
+            use_path_exec=False,  # For the test ensure this is always a clean install
+        )
+        conda_context = resolver.conda_context
+        assert len(list(conda_util.installed_conda_targets(conda_context))) == 0
+        dependency = resolver.resolve(name="samtools", version=None, type="package", job_directory=job_dir)
+        assert dependency.shell_commands(None) is None  # install should fail
+        installed_targets = list(conda_util.installed_conda_targets(conda_context))
+        assert len(installed_targets) == 0
+    finally:
+        shutil.rmtree(base_path)
diff --git a/test/unit/tools/test_data_parameters.py b/test/unit/tools/test_data_parameters.py
new file mode 100644
index 0000000..a39834d
--- /dev/null
+++ b/test/unit/tools/test_data_parameters.py
@@ -0,0 +1,211 @@
+import os
+import sys
+
+from galaxy import model
+from galaxy.util import bunch
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+from .test_parameter_parsing import BaseParameterTestCase
+
+
+class DataToolParameterTestCase( BaseParameterTestCase ):
+
+    def test_to_python_none_values( self ):
+        assert self.param.to_python( None, self.app ) is None
+        assert self.param.to_python( 'None', self.app ) is None
+        assert self.param.to_python( '', self.app ) is None
+
+    def test_to_python_hda( self ):
+        hda = self._new_hda()
+        as_python = self.param.to_python( hda.id, self.app )
+        assert hda == as_python
+
+    def test_to_python_multi_hdas( self ):
+        hda1 = self._new_hda()
+        hda2 = self._new_hda()
+        as_python = self.param.to_python( "%s,%s" % ( hda1.id, hda2.id ), self.app )
+        assert as_python == [ hda1, hda2 ]
+
+    def test_to_python_multi_none( self ):
+        self.multiple = True
+        hda = self._new_hda()
+        # Selection is Optional. may be selected with other stuff,
+        # not sure the UI should really allow this but easy enough
+        # to just filter it out.
+        self.assertEquals([hda], self.param.to_python( '%s,None' % hda.id, self.app ))
+
+    def test_field_filter_on_types( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda2 = MockHistoryDatasetAssociation( name="hda2", id=2 )
+        self.stub_active_datasets( hda1, hda2 )
+        field = self._simple_field()
+        assert len( field[ 'options' ][ 'hda' ] ) == 2
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'name' ] == "hda2"
+        assert field[ 'options' ][ 'hda' ][ 1 ][ 'name' ] == "hda1"
+
+        hda2.datatype_matches = False
+        field = self._simple_field()
+        assert len( field[ 'options' ][ 'hda' ] ) == 1
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'name' ] == "hda1"
+
+    def test_field_display_hidden_hdas_only_if_selected( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda2 = MockHistoryDatasetAssociation( name="hda2", id=2 )
+        self.stub_active_datasets( hda1, hda2 )
+        hda1.visible = False
+        hda2.visible = False
+        field = self._simple_field( other_values={ "data2" : hda2 } )
+        self.assertEquals( len( field[ 'options' ][ 'hda' ] ), 1 )  # hda1 not an option, not visible or selected
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'name' ] == "(unavailable) hda2"
+
+    def test_field_implicit_conversion_new( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda1.datatype_matches = False
+        hda1.conversion_destination = ( "tabular", None )
+        self.stub_active_datasets( hda1 )
+        field = self._simple_field()
+        assert len( field[ 'options' ][ 'hda' ] ) == 1
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'name' ] == "hda1 (as tabular)"
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'hid' ] == 1
+
+    def test_field_implicit_conversion_existing( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda1.datatype_matches = False
+        hda1.conversion_destination = ( "tabular", MockHistoryDatasetAssociation( name="hda1converted", id=2 ) )
+        self.stub_active_datasets( hda1 )
+        field = self._simple_field()
+        assert len( field[ 'options' ][ 'hda' ] ) == 1
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'name' ] == "hda1 (as tabular)"
+        # This is difference with previous test, value is existing
+        # hda id not new one.
+        assert field[ 'options' ][ 'hda' ][ 0 ][ 'hid' ] == 2
+
+    def test_field_multiple( self ):
+        self.multiple = True
+        field = self._simple_field()
+        assert field[ 'multiple' ]
+
+    def test_field_empty_selection( self ):
+        field = self._simple_field()
+        assert len( field[ 'options' ][ 'hda' ] ) == 0
+        assert len( field[ 'options' ][ 'hdca' ] ) == 0
+
+    def test_field_empty_selection_optional( self ):
+        self.optional = True
+        field = self._simple_field()
+        assert field[ 'optional' ]
+
+    def test_get_initial_value( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda2 = MockHistoryDatasetAssociation( name="hda2", id=2 )
+        self.stub_active_datasets( hda1, hda2 )
+        assert hda2 == self.param.get_initial_value( self.trans, {} )
+
+    def test_get_initial_value_is_none_if_no_match( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda1.visible = False
+        hda2 = MockHistoryDatasetAssociation( name="hda2", id=2 )
+        hda2.visible = False
+        self.stub_active_datasets( hda1, hda2 )
+        assert self.param.get_initial_value( self.trans, {} ) is None
+
+    def test_get_initial_none_when_optional( self ):
+        self.optional = True
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda2 = MockHistoryDatasetAssociation( name="hda2", id=2 )
+        self.stub_active_datasets( hda1, hda2 )
+        assert self.param.get_initial_value( self.trans, {} ) is None
+
+    def test_get_initial_with_previously_converted_data( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda1.datatype_matches = False
+        converted = MockHistoryDatasetAssociation( name="hda1converted", id=2 )
+        hda1.conversion_destination = ( "tabular", converted )
+        self.stub_active_datasets( hda1 )
+        assert converted == self.param.get_initial_value( self.trans, {} )
+
+    def test_get_initial_with_to_be_converted_data( self ):
+        hda1 = MockHistoryDatasetAssociation( name="hda1", id=1 )
+        hda1.datatype_matches = False
+        hda1.conversion_destination = ( "tabular", None )
+        self.stub_active_datasets( hda1 )
+        assert hda1 == self.param.get_initial_value( self.trans, {} )
+
+    def _new_hda( self ):
+        hda = model.HistoryDatasetAssociation()
+        hda.visible = True
+        hda.dataset = model.Dataset()
+        self.app.model.context.add( hda )
+        self.app.model.context.flush( )
+        return hda
+
+    def setUp( self ):
+        super(DataToolParameterTestCase, self).setUp()
+        self.test_history = model.History()
+        self.app.model.context.add( self.test_history )
+        self.app.model.context.flush()
+        self.trans = galaxy_mock.MockTrans( history=self.test_history )
+        self.multiple = False
+        self.optional = False
+        self._param = None
+
+    def stub_active_datasets( self, *hdas ):
+        self.test_history._active_datasets_children_and_roles = hdas
+
+    def _simple_field( self, **kwds ):
+        return self.param.to_dict( trans=self.trans, **kwds )
+
+    @property
+    def param( self ):
+        if not self._param:
+            multi_text = ""
+            if self.multiple:
+                multi_text = 'multiple="True"'
+            optional_text = ""
+            if self.optional:
+                optional_text = 'optional="True"'
+            template_xml = '''<param name="data2" type="data" ext="txt" %s %s></param>'''
+            param_str = template_xml % ( multi_text, optional_text )
+            self._param = self._parameter_for( tool=self.mock_tool, xml=param_str )
+
+        return self._param
+
+
+class MockHistoryDatasetAssociation( object ):
+    """ Fake HistoryDatasetAssociation stubbed out for testing matching and
+    stuff like that.
+    """
+
+    def __init__( self, test_dataset=None, name="Test Dataset", id=1 ):
+        if not test_dataset:
+            test_dataset = model.Dataset()
+            test_dataset.state = model.Dataset.states.OK
+        self.states = model.HistoryDatasetAssociation.states
+        self.deleted = False
+        self.dataset = test_dataset
+        self.visible = True
+        self.datatype_matches = True
+        self.conversion_destination = ( None, None )
+        self.datatype = bunch.Bunch(
+            matches_any=lambda formats: self.datatype_matches,
+        )
+        self.dbkey = "hg19"
+        self.implicitly_converted_parent_datasets = False
+
+        self.name = name
+        self.hid = id
+        self.id = id
+        self.children = []
+
+    @property
+    def state( self ):
+        return self.dataset.state
+
+    def get_dbkey( self ):
+        return self.dbkey
+
+    def find_conversion_destination( self, formats ):
+        return self.conversion_destination
diff --git a/test/unit/tools/test_dataset_matcher.py b/test/unit/tools/test_dataset_matcher.py
new file mode 100644
index 0000000..2ae8f37
--- /dev/null
+++ b/test/unit/tools/test_dataset_matcher.py
@@ -0,0 +1,169 @@
+from unittest import TestCase
+from xml.etree.ElementTree import XML
+
+import tools_support
+from galaxy import model
+from galaxy.tools.parameters import (
+    basic,
+    dataset_matcher
+)
+from galaxy.util import bunch
+
+from .test_data_parameters import MockHistoryDatasetAssociation
+
+
+class DatasetMatcherTestCase( TestCase, tools_support.UsesApp ):
+
+    def test_hda_accessible( self ):
+        # Cannot access errored or discard datasets.
+        self.mock_hda.dataset.state = model.Dataset.states.ERROR
+        assert not self.test_context.hda_accessible( self.mock_hda )
+
+        self.mock_hda.dataset.state = model.Dataset.states.DISCARDED
+        assert not self.test_context.hda_accessible( self.mock_hda )
+
+        # Can access datasets in other states.
+        self.mock_hda.dataset.state = model.Dataset.states.OK
+        assert self.test_context.hda_accessible( self.mock_hda )
+
+        self.mock_hda.dataset.state = model.Dataset.states.QUEUED
+        assert self.test_context.hda_accessible( self.mock_hda )
+
+        # Cannot access dataset if security agent says no.
+        self.app.security_agent.can_access_dataset = lambda roles, dataset: False
+        assert not self.test_context.hda_accessible( self.mock_hda )
+
+    def test_selected( self ):
+        self.test_context.value = []
+        assert not self.test_context.selected( self.mock_hda )
+
+        self.test_context.value = [ self.mock_hda ]
+        assert self.test_context.selected( self.mock_hda )
+
+    def test_hda_mismatches( self ):
+        # Datasets not visible are not "valid" for param.
+        self.mock_hda.visible = False
+        assert not self.test_context.hda_match( self.mock_hda )
+
+        # Datasets that don't match datatype are not valid.
+        self.mock_hda.visible = True
+        self.mock_hda.datatype_matches = False
+        assert not self.test_context.hda_match( self.mock_hda )
+
+    def test_valid_hda_direct_match( self ):
+        # Datasets that visible and matching are valid
+        self.mock_hda.visible = True
+        self.mock_hda.datatype_matches = True
+        hda_match = self.test_context.hda_match( self.mock_hda, check_implicit_conversions=False )
+        assert hda_match
+
+        # Match is not a conversion and so matching hda is the same hda
+        # supplied.
+        assert not hda_match.implicit_conversion
+        assert hda_match.hda == self.mock_hda
+
+    def test_valid_hda_implicit_convered( self ):
+        # Find conversion returns an HDA to an already implicitly converted
+        # dataset.
+        self.mock_hda.datatype_matches = False
+        converted_hda = model.HistoryDatasetAssociation()
+        self.mock_hda.conversion_destination = ( "tabular", converted_hda )
+        hda_match = self.test_context.hda_match( self.mock_hda )
+
+        assert hda_match
+        assert hda_match.implicit_conversion
+        assert hda_match.hda == converted_hda
+        assert hda_match.target_ext == "tabular"
+
+    def test_hda_match_implicit_can_convert( self ):
+        # Find conversion returns a target extension to convert to, but not
+        # a previously implicitly converted dataset.
+        self.mock_hda.datatype_matches = False
+        self.mock_hda.conversion_destination = ( "tabular", None )
+        hda_match = self.test_context.hda_match( self.mock_hda )
+
+        assert hda_match
+        assert hda_match.implicit_conversion
+        assert hda_match.hda == self.mock_hda
+        assert hda_match.target_ext == "tabular"
+
+    def test_hda_match_properly_skips_conversion( self ):
+        self.mock_hda.datatype_matches = False
+        self.mock_hda.conversion_destination = ( "tabular", bunch.Bunch() )
+        hda_match = self.test_context.hda_match( self.mock_hda, check_implicit_conversions=False )
+        assert not hda_match
+
+    def test_data_destination_tools_require_public( self ):
+        self.tool.tool_type = "data_destination"
+
+        # Public datasets okay and valid
+        self.app.security_agent.dataset_is_public = lambda dataset: True
+        hda_match = self.test_context.hda_match( self.mock_hda )
+        assert hda_match
+
+        # Non-public datasets not valid
+        self.app.security_agent.dataset_is_public = lambda dataset: False
+        hda_match = self.test_context.hda_match( self.mock_hda )
+        assert not hda_match
+
+    def test_filtered_hda_matched_key( self ):
+        self.filtered_param = True
+        data1_val = model.HistoryDatasetAssociation()
+        data1_val.dbkey = "hg18"
+        self.other_values = { "data1": data1_val }
+        assert self.test_context.filter_value == "hg18"
+
+        # mock_hda is hg19, other is hg18 so should not be "valid hda"
+        hda_match = self.test_context.hda_match( self.mock_hda )
+        assert not hda_match
+
+    def test_filtered_hda_unmatched_key( self ):
+        self.filtered_param = True
+        data1_val = model.HistoryDatasetAssociation()
+        data1_val.dbkey = "hg19"
+        self.other_values = { "data1": data1_val }
+
+        # Other param value and this dataset both hg19, should be valid
+        hda_match = self.test_context.hda_match( self.mock_hda )
+        assert hda_match
+
+    def setUp( self ):
+        self.setup_app()
+        self.mock_hda = MockHistoryDatasetAssociation()
+        self.tool = bunch.Bunch(
+            app=self.app,
+            tool_type="default",
+            valid_input_states=model.Dataset.valid_input_states,
+        )
+        self.current_user_roles = []
+        self.other_values = {}
+
+        # Reset lazily generated stuff
+        self.filtered_param = False
+        self._test_context = None
+        self.param = None
+
+    @property
+    def test_context( self ):
+        if self._test_context is None:
+            option_xml = ""
+            if self.filtered_param:
+                option_xml = '''<options><filter type="data_meta" ref="data1" key="dbkey" /></options>'''
+            param_xml = XML( '''<param name="data2" type="data" ext="txt">%s</param>''' % option_xml )
+            self.param = basic.DataToolParameter(
+                self.tool,
+                param_xml,
+            )
+
+            self._test_context = dataset_matcher.DatasetMatcher(
+                trans=bunch.Bunch(
+                    app=self.app,
+                    get_current_user_roles=lambda: self.current_user_roles,
+                    workflow_building_mode=True,
+                ),
+                param=self.param,
+                value=[ ],
+                other_values=self.other_values
+            )
+
+        return self._test_context
diff --git a/test/unit/tools/test_evaluation.py b/test/unit/tools/test_evaluation.py
new file mode 100644
index 0000000..f1d4f17
--- /dev/null
+++ b/test/unit/tools/test_evaluation.py
@@ -0,0 +1,318 @@
+import os
+from unittest import TestCase
+from xml.etree.ElementTree import XML
+
+from galaxy.jobs import SimpleComputeEnvironment
+from galaxy.jobs.datasets import DatasetPath
+from galaxy.model import (
+    Dataset,
+    History,
+    HistoryDatasetAssociation,
+    Job,
+    JobParameter,
+    JobToInputDatasetAssociation
+)
+from galaxy.tools.evaluation import ToolEvaluator
+# For MockTool
+from galaxy.tools.parameters import params_from_strings
+from galaxy.tools.parameters.basic import (
+    DataToolParameter,
+    IntegerToolParameter,
+    SelectToolParameter
+)
+from galaxy.tools.parameters.grouping import (
+    Conditional,
+    ConditionalWhen,
+    Repeat
+)
+from galaxy.tools.parser.output_objects import ToolOutput
+from galaxy.util.bunch import Bunch
+# Test fixtures for Galaxy infrastructure.
+from tools_support import UsesApp
+
+# To Test:
+# - param_file handling.
+TEST_TOOL_DIRECTORY = "/path/to/the/tool"
+
+
+class ToolEvaluatorTestCase(TestCase, UsesApp):
+
+    def setUp(self):
+        self.setup_app()
+        self.tool = MockTool(self.app)
+        self.job = Job()
+        self.job.history = History()
+        self.job.parameters = [ JobParameter( name="thresh", value="4" ) ]
+        self.evaluator = ToolEvaluator( self.app, self.tool, self.job, self.test_directory )
+
+    def tearDown(self):
+        self.tear_down_app()
+
+    def test_simple_evaluation( self ):
+        self._setup_test_bwa_job()
+        self._set_compute_environment()
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals( command_line, "bwa --thresh=4 --in=/galaxy/files/dataset_1.dat --out=/galaxy/files/dataset_2.dat" )
+
+    def test_repeat_evaluation( self ):
+        repeat = Repeat()
+        repeat.name = "r"
+        repeat.inputs = { "thresh": self.tool.test_thresh_param() }
+        self.tool.set_params( { "r": repeat } )
+        self.job.parameters = [ JobParameter( name="r", value='''[{"thresh": 4, "__index__": 0},{"thresh": 5, "__index__": 1}]''' ) ]
+        self.tool._command_line = "prog1 #for $r_i in $r # $r_i.thresh#end for#"
+        self._set_compute_environment()
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals( command_line, "prog1  4 5" )
+
+    def test_conditional_evaluation( self ):
+        select_xml = XML('''<param name="always_true" type="select"><option value="true">True</option></param>''')
+        parameter = SelectToolParameter( self.tool, select_xml )
+
+        conditional = Conditional()
+        conditional.name = "c"
+        conditional.test_param = parameter
+        when = ConditionalWhen()
+        when.inputs = { "thresh": self.tool.test_thresh_param() }
+        when.value = "true"
+        conditional.cases = [ when ]
+        self.tool.set_params( { "c": conditional } )
+        self.job.parameters = [ JobParameter( name="c", value='''{"thresh": 4, "always_true": "true", "__current_case__": 0}''' ) ]
+        self.tool._command_line = "prog1 --thresh=${c.thresh} --test_param=${c.always_true}"
+        self._set_compute_environment()
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals( command_line, "prog1 --thresh=4 --test_param=true" )
+
+    def test_evaluation_of_optional_datasets( self ):
+        # Make sure optional dataset don't cause evaluation to break and
+        # evaluate in cheetah templates as 'None'.
+        select_xml = XML('''<param name="input1" type="data" optional="true"></param>''')
+        parameter = DataToolParameter( self.tool, select_xml )
+        self.job.parameters = [ JobParameter( name="input1", value=u'null' ) ]
+        self.tool.set_params( { "input1": parameter } )
+        self.tool._command_line = "prog1 --opt_input='${input1}'"
+        self._set_compute_environment()
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals( command_line, "prog1 --opt_input='None'" )
+
+    def test_evaluation_with_path_rewrites_wrapped( self ):
+        self.tool.check_values = True
+        self.__test_evaluation_with_path_rewrites()
+
+    def test_evaluation_with_path_rewrites_unwrapped( self ):
+        self.tool.check_values = False
+        self.__test_evaluation_with_path_rewrites()
+
+    def __test_evaluation_with_path_rewrites( self ):
+        # Various things can cause dataset paths to be rewritten (Task
+        # splitting, config.outputs_to_working_directory). This tests that
+        # functionality.
+        self._setup_test_bwa_job()
+        job_path_1 = "%s/dataset_1.dat" % self.test_directory
+        job_path_2 = "%s/dataset_2.dat" % self.test_directory
+        self._set_compute_environment(
+            input_paths=[DatasetPath(1, '/galaxy/files/dataset_1.dat', false_path=job_path_1)],
+            output_paths=[DatasetPath(2, '/galaxy/files/dataset_2.dat', false_path=job_path_2)],
+        )
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals( command_line, "bwa --thresh=4 --in=%s --out=%s" % (job_path_1, job_path_2) )
+
+    def test_configfiles_evaluation( self ):
+        self.tool.config_files.append( ( "conf1", None, "$thresh" ) )
+        self.tool._command_line = "prog1 $conf1"
+        self._set_compute_environment()
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals( len( extra_filenames ), 1)
+        config_filename = extra_filenames[ 0 ]
+        config_basename = os.path.basename( config_filename )
+        # Verify config file written into working directory.
+        self.assertEquals( os.path.join( self.test_directory, config_basename ), config_filename )
+        # Verify config file contents are evaluated against parameters.
+        assert open( config_filename, "r").read() == "4"
+        self.assertEquals(command_line, "prog1 %s" % config_filename)
+
+    def test_arbitrary_path_rewriting_wrapped( self ):
+        self.tool.check_values = True
+        self.__test_arbitrary_path_rewriting()
+
+    def test_arbitrary_path_rewriting_unwrapped( self ):
+        self.tool.check_values = False
+        self.__test_arbitrary_path_rewriting()
+
+    def __test_arbitrary_path_rewriting( self ):
+        self.job.parameters = [ JobParameter( name="index_path", value="\"/old/path/human\"" ) ]
+        xml = XML('''<param name="index_path" type="select">
+            <option value="/old/path/human">Human</option>
+            <option value="/old/path/mouse">Mouse</option>
+        </param>''')
+        parameter = SelectToolParameter( self.tool, xml )
+
+        def get_field_by_name_for_value( name, value, trans, other_values ):
+            assert value == "/old/path/human"
+            assert name == "path"
+            return ["/old/path/human"]
+
+        def get_options( trans, other_values ):
+            return [ [ "", "/old/path/human", "" ] ]
+
+        parameter.options = Bunch(get_field_by_name_for_value=get_field_by_name_for_value, get_options=get_options)
+        self.tool.set_params( {
+            "index_path": parameter
+        } )
+        self.tool._command_line = "prog1 $index_path.fields.path"
+
+        def test_path_rewriter(v):
+            if v:
+                v = v.replace("/old", "/new")
+            return v
+        self._set_compute_environment(path_rewriter=test_path_rewriter)
+        command_line, extra_filenames, _ = self.evaluator.build( )
+        self.assertEquals(command_line, "prog1 /new/path/human")
+
+    def test_template_property_app( self ):
+        self._assert_template_property_is("$__app__.config.new_file_path", self.app.config.new_file_path)
+
+    def test_template_property_new_file_path( self ):
+        self._assert_template_property_is("$__new_file_path__", self.app.config.new_file_path)
+
+    def test_template_property_root_dir( self ):
+        self._assert_template_property_is("$__root_dir__", self.app.config.root)
+
+    def test_template_property_admin_users( self ):
+        self._assert_template_property_is("$__admin_users__", "mary at example.com")
+
+    def _assert_template_property_is(self, expression, value):
+        self.tool._command_line = "test.exe"
+        self.tool.config_files.append( ( "conf1", None, """%s""" % expression) )
+        self._set_compute_environment()
+        _, extra_filenames, _ = self.evaluator.build( )
+        config_filename = extra_filenames[ 0 ]
+        self.assertEquals(open( config_filename, "r").read(), value)
+
+    def _set_compute_environment(self, **kwds):
+        if "working_directory" not in kwds:
+            kwds[ "working_directory" ] = self.test_directory
+        if "new_file_path" not in kwds:
+            kwds[ "new_file_path" ] = self.app.config.new_file_path
+        self.evaluator.set_compute_environment( TestComputeEnviornment( **kwds ) )
+        assert "exec_before_job" in self.tool.hooks_called
+
+    def _setup_test_bwa_job( self ):
+        self.job.input_datasets = [ self._job_dataset( 'input1', '/galaxy/files/dataset_1.dat' ) ]
+        self.job.output_datasets = [ self._job_dataset( 'output1', '/galaxy/files/dataset_2.dat' ) ]
+
+    def _job_dataset( self, name, path ):
+        metadata = dict( )
+        hda = HistoryDatasetAssociation( name=name, metadata=metadata )
+        hda.dataset = Dataset( id=123, external_filename=path )
+        hda.dataset.metadata = dict()
+        hda.children = []
+        jida = JobToInputDatasetAssociation( name=name, dataset=hda )
+        return jida
+
+
+class MockHistoryDatasetAssociation( HistoryDatasetAssociation ):
+
+    def __init__( self, **kwds ):
+        self._metadata = dict()
+        super( MockHistoryDatasetAssociation, self ).__init__( **kwds )
+
+
+class TestComputeEnviornment( SimpleComputeEnvironment ):
+
+    def __init__(
+        self,
+        new_file_path,
+        working_directory,
+        input_paths=[ '/galaxy/files/dataset_1.dat' ],
+        output_paths=[ '/galaxy/files/dataset_2.dat' ],
+        path_rewriter=None
+    ):
+        self._new_file_path = new_file_path
+        self._working_directory = working_directory
+        self._input_paths = input_paths
+        self._output_paths = output_paths
+        self._path_rewriter = path_rewriter
+
+    def input_paths( self ):
+        return self._input_paths
+
+    def output_paths( self ):
+        return self._output_paths
+
+    def working_directory( self ):
+        return self._working_directory
+
+    def new_file_path(self):
+        return self._new_file_path
+
+    def unstructured_path_rewriter(self):
+        if self._path_rewriter:
+            return self._path_rewriter
+        else:
+            return super(TestComputeEnviornment, self).unstructured_path_rewriter()
+
+    def tool_directory( self ):
+        return TEST_TOOL_DIRECTORY
+
+
+class MockTool( object ):
+
+    def __init__( self, app ):
+        self.profile = 16.01
+        self.app = app
+        self.hooks_called = []
+        self.environment_variables = []
+        self._config_files = []
+        self._command_line = "bwa --thresh=$thresh --in=$input1 --out=$output1"
+        self._params = { "thresh": self.test_thresh_param() }
+        self.options = Bunch(sanitize=False)
+        self.check_values = True
+
+    def test_thresh_param( self ):
+        elem = XML( '<param name="thresh" type="integer" value="5" />' )
+        return IntegerToolParameter( self, elem )
+
+    def params_from_strings( self, params, app, ignore_errors=False ):
+        return params_from_strings( self.inputs, params, app, ignore_errors )
+
+    @property
+    def template_macro_params( self ):
+        return {}
+
+    @property
+    def inputs( self ):
+        return self._params
+
+    def set_params( self, params ):
+        self._params = params
+
+    @property
+    def outputs( self ):
+        return dict(
+            output1=ToolOutput( "output1" ),
+        )
+
+    @property
+    def config_files( self ):
+        return self._config_files
+
+    @property
+    def command( self ):
+        return self._command_line
+
+    @property
+    def interpreter( self ):
+        return None
+
+    def handle_unvalidated_param_values( self, input_values, app ):
+        pass
+
+    def build_param_dict( self, incoming, *args, **kwds ):
+        return incoming
+
+    def call_hook( self, hook_name, *args, **kwargs ):
+        self.hooks_called.append( hook_name )
+
+    def exec_before_job( self, *args, **kwargs ):
+        pass
diff --git a/test/unit/tools/test_execution.py b/test/unit/tools/test_execution.py
new file mode 100644
index 0000000..441308e
--- /dev/null
+++ b/test/unit/tools/test_execution.py
@@ -0,0 +1,220 @@
+""" Test Tool execution and state handling logic.
+"""
+from unittest import TestCase
+
+from paste import httpexceptions
+
+import galaxy.model
+import tools_support
+from galaxy.tools.parameters import params_to_incoming
+from galaxy.util.bunch import Bunch
+from galaxy.util.odict import odict
+
+BASE_REPEAT_TOOL_CONTENTS = '''<tool id="test_tool" name="Test Tool">
+    <command>echo "$param1" #for $r in $repeat# "$r.param2" #end for# < $out1</command>
+    <inputs>
+        <param type="text" name="param1" value="" />
+        <repeat name="repeat1" label="Repeat 1">
+          %s
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out1" format="data" />
+    </outputs>
+</tool>
+'''
+
+# Tool with a repeat parameter, to test state update.
+REPEAT_TOOL_CONTENTS = BASE_REPEAT_TOOL_CONTENTS % '''<param type="text" name="param2" value="" />'''
+REPEAT_COLLECTION_PARAM_CONTENTS = BASE_REPEAT_TOOL_CONTENTS % '''<param type="data_collection" name="param2" collection_type="paired" />'''
+
+
+class ToolExecutionTestCase( TestCase, tools_support.UsesApp, tools_support.UsesTools ):
+
+    def setUp(self):
+        self.setup_app()
+        self.history = galaxy.model.History()
+        self.trans = MockTrans( self.app, self.history )
+        self.app.dataset_collections_service = MockCollectionService()
+        self.tool_action = MockAction( self.trans )
+
+    def tearDown(self):
+        self.tear_down_app()
+
+    def test_state_new( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        vars = self.__handle_with_incoming( param1="moo" )
+        state = self.__assert_rerenders_tool_without_errors( vars )
+        assert state[ "param1" ] == "moo"
+
+    def test_execute( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        vars = self.__handle_with_incoming( param1="moo" )
+        self.__assert_executed( vars )
+        # Didn't specify a rerun_remap_id so this should be None
+        assert self.tool_action.execution_call_args[ 0 ][ "rerun_remap_job_id" ] is None
+
+    def test_execute_exception( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        self.tool_action.raise_exception( )
+        try:
+            self.__handle_with_incoming( param1="moo" )
+        except Exception as e:
+            assert 'Error executing tool' in str( e )
+
+    def test_execute_errors( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        self.tool_action.return_error( )
+        try:
+            self.__handle_with_incoming( param1="moo" )
+        except Exception as e:
+            assert 'Test Error Message' in str( e )
+
+    def test_redirect( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        self.tool_action.expect_redirect = True
+        redirect_raised = False
+        try:
+            self.__handle_with_incoming( param1="moo" )
+        except httpexceptions.HTTPFound:
+            redirect_raised = True
+        assert redirect_raised
+
+    def test_remap_job( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        vars = self.__handle_with_incoming( param1="moo", rerun_remap_job_id=self.app.security.encode_id(123) )
+        self.__assert_executed( vars )
+        assert self.tool_action.execution_call_args[ 0 ][ "rerun_remap_job_id" ] == 123
+
+    def test_invalid_remap_job( self ):
+        self._init_tool( tools_support.SIMPLE_TOOL_CONTENTS )
+        try:
+            self.__handle_with_incoming( param1="moo", rerun_remap_job_id='123' )
+        except Exception as e:
+            assert 'invalid job' in str( e )
+
+    def test_data_param_execute( self ):
+        self._init_tool( tools_support.SIMPLE_CAT_TOOL_CONTENTS )
+        hda = self.__add_dataset( 1 )
+        # Execute tool action
+        vars = self.__handle_with_incoming( param1=1 )
+        self.__assert_executed( vars )
+        # Tool 'executed' once, with hda as param1
+        assert len( self.tool_action.execution_call_args ) == 1
+        assert self.tool_action.execution_call_args[ 0 ][ "incoming" ][ "param1" ] == hda
+
+    def test_data_param_state_update( self ):
+        self._init_tool( tools_support.SIMPLE_CAT_TOOL_CONTENTS )
+        hda = self.__add_dataset( 1 )
+        # Update state
+        vars = self.__handle_with_incoming( param1=1 )
+        state = self.__assert_rerenders_tool_without_errors( vars )
+        assert hda == state[ "param1" ]
+
+    def __handle_with_incoming( self, previous_state=None, **kwds ):
+        """ Execute tool.handle_input with incoming specified by kwds
+        (optionally extending a previous state).
+        """
+        if previous_state:
+            incoming = self.__to_incoming( previous_state, **kwds)
+        else:
+            incoming = kwds
+        return self.tool.handle_input( trans=self.trans, incoming=incoming )
+
+    def __to_incoming( self, state, **kwds ):
+        new_incoming = {}
+        params_to_incoming( new_incoming, self.tool.inputs, state.inputs, self.app )
+        new_incoming[ "tool_state" ] = self.__state_to_string( state )
+        new_incoming.update( kwds )
+        return new_incoming
+
+    def __add_dataset( self, id, state='ok' ):
+        hda = galaxy.model.HistoryDatasetAssociation()
+        hda.id = id
+        hda.dataset = galaxy.model.Dataset()
+        hda.dataset.state = 'ok'
+
+        self.trans.sa_session.model_objects[ galaxy.model.HistoryDatasetAssociation ][ id ] = hda
+        self.history.datasets.append( hda )
+        return hda
+
+    def __add_collection_dataset( self, id, collection_type="paired", *hdas ):
+        hdca = galaxy.model.HistoryDatasetCollectionAssociation()
+        hdca.id = id
+        collection = galaxy.model.DatasetCollection()
+        hdca.collection = collection
+        collection.elements = [ galaxy.model.DatasetCollectionElement(element=self.__add_dataset( 1 )) ]
+        collection.type = collection_type
+        self.trans.sa_session.model_objects[ galaxy.model.HistoryDatasetCollectionAssociation ][ id ] = hdca
+        self.history.dataset_collections.append( hdca )
+        return hdca
+
+    def __assert_rerenders_tool_without_errors( self, vars ):
+        self.__assert_no_errors( vars )
+        return self.tool_action.execution_call_args[ 0 ][ "incoming" ]
+
+    def __assert_executed( self, vars ):
+        self.__assert_no_errors( vars )
+        assert len( vars[ 'jobs' ] ) > 0
+
+    def __assert_no_errors( self, vars ):
+        assert "job_errors" in vars
+        assert not vars[ "job_errors" ]
+
+
+class MockAction( object ):
+
+    def __init__( self, expected_trans ):
+        self.expected_trans = expected_trans
+        self.execution_call_args = []
+        self.expect_redirect = False
+        self.exception_after_exection = None
+        self.error_message_after_excution = None
+
+    def execute( self, tool, trans, **kwds ):
+        assert self.expected_trans == trans
+        self.execution_call_args.append( kwds )
+        num_calls = len( self.execution_call_args )
+        if self.expect_redirect:
+            raise httpexceptions.HTTPFound( "http://google.com" )
+        if self.exception_after_exection is not None:
+            if num_calls > self.exception_after_exection:
+                raise Exception( "Test Exception" )
+        if self.error_message_after_excution is not None:
+            if num_calls > self.error_message_after_excution:
+                return None, "Test Error Message"
+
+        return galaxy.model.Job(), odict(dict(out1="1"))
+
+    def raise_exception( self, after_execution=0 ):
+        self.exception_after_exection = after_execution
+
+    def return_error( self, after_execution=0 ):
+        self.error_message_after_excution = after_execution
+
+
+class MockTrans( object ):
+
+    def __init__( self, app, history ):
+        self.app = app
+        self.history = history
+        self.user = None
+        self.history._active_datasets_children_and_roles = [hda for hda in self.app.model.context.model_objects[ galaxy.model.HistoryDatasetAssociation ] if hda.active and hda.history == history]
+        self.workflow_building_mode = False
+        self.webapp = Bunch( name="galaxy" )
+        self.sa_session = self.app.model.context
+
+    def get_history( self, **kwargs ):
+        return self.history
+
+    def get_current_user_roles( self ):
+        return []
+
+
+class MockCollectionService( object ):
+
+    def __init__( self ):
+        self.collection_info = object()
+
+    def match_collections( self, collections_to_match ):
+        return self.collection_info
diff --git a/test/unit/tools/test_history_imp_exp.py b/test/unit/tools/test_history_imp_exp.py
new file mode 100644
index 0000000..d9ab1c5
--- /dev/null
+++ b/test/unit/tools/test_history_imp_exp.py
@@ -0,0 +1,168 @@
+import os
+import sys
+import tarfile
+from shutil import rmtree
+from tempfile import mkdtemp
+
+from galaxy import model
+from galaxy.exceptions import MalformedContents
+from galaxy.tools.imp_exp import JobImportHistoryArchiveWrapper, unpack_tar_gz_archive
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils.galaxy_mock import MockApp
+
+
+# good enough for the very specific tests we're writing as of now...
+DATASETS_ATTRS = '''[{{"info": "\\nuploaded txt file", "peek": "foo\\n\\n\\n\\n\\n\\n", "update_time": "2016-02-08 18:39:22.937474", "name": "Pasted Entry", "extension": "txt", "tags": {{}}, "__HistoryDatasetAssociation__": true, "file_name": "{file_name}", "deleted": false, "designation": null, "visible": true, "create_time": "2016-02-08 18:38:38.682087", "hid": 1, "parent_id": null, "extra_files_path": "", "uuid": "406d913e-925d-4ccd-800d-06c9b32df309", "metadata": {{"dbkey": "?", "dat [...]
+DATASETS_ATTRS_PROVENANCE = '''[]'''
+HISTORY_ATTRS = '''{"hid_counter": 2, "update_time": "2016-02-08 18:38:38.705058", "create_time": "2016-02-08 18:38:20.790057", "includes_deleted_datasets": false, "name": "paste", "tags": {}, "includes_hidden_datasets": false, "genome_build": "?", "annotation": null}'''
+JOBS_ATTRS = '''[{"info": null, "tool_id": "upload1", "update_time": "2016-02-08T18:39:23.356482", "stdout": "", "input_mapping": {}, "tool_version": "1.1.4", "input_datasets": [], "traceback": null, "command_line": "python /galaxy/tools/data_source/upload.py /galaxy /scratch/tmppwU9rD /scratch/tmpP4_45Y 1:/scratch/jobs/000/dataset_1_files:/data/000/dataset_1.dat", "exit_code": 0, "output_datasets": [1], "state": "ok", "create_time": "2016-02-08T18:38:39.153873", "params": {"files": [{"t [...]
+
+
+def _run_jihaw_cleanup(history_archive, msg):
+    app = MockApp()
+    job = model.Job()
+    job.stderr = ''
+    jiha = model.JobImportHistoryArchive(job=job, archive_dir=history_archive.arc_directory)
+    app.model.context.current.add_all([job, jiha])
+    app.model.context.flush()
+    jihaw = JobImportHistoryArchiveWrapper(app, 1)  # yeehaw!
+    try:
+        jihaw.cleanup_after_job()
+        data = app.object_store.get_data(model.Dataset(1))
+        assert data != 'insecure', msg
+    except MalformedContents:
+        pass
+
+
+def test_history_import_symlink():
+    """ Ensure a history containing a dataset that is a symlink cannot be imported
+    """
+    with HistoryArchive() as history_archive:
+        history_archive.write_metafiles()
+        history_archive.write_link('datasets/Pasted_Entry_1.txt', '../target.txt')
+        history_archive.write_file('target.txt', 'insecure')
+        _run_jihaw_cleanup(history_archive, 'Symlink dataset in import archive allowed')
+
+
+def test_history_import_relpath_in_metadata():
+    """ Ensure that dataset_attrs.txt cannot contain a relative path outside the archive
+    """
+    with HistoryArchive() as history_archive:
+        history_archive.write_metafiles(dataset_file_name='../outside.txt')
+        history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')
+        history_archive.write_outside()
+        _run_jihaw_cleanup(history_archive, 'Relative parent path in datasets_attrs.txt allowed')
+
+
+def test_history_import_abspath_in_metadata():
+    """ Ensure that dataset_attrs.txt cannot contain a absolute path outside the archive
+    """
+    with HistoryArchive() as history_archive:
+        history_archive.write_metafiles(
+            dataset_file_name=os.path.join(history_archive.temp_directory, 'outside.txt'))
+        history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')
+        history_archive.write_outside()
+        _run_jihaw_cleanup(history_archive, 'Absolute path in datasets_attrs.txt allowed')
+
+
+def _run_unpack(history_archive, dest_parent, msg):
+    dest_dir = os.path.join(dest_parent, 'dest')
+    insecure_dir = os.path.join(dest_parent, 'insecure')
+    os.makedirs(dest_dir)
+    options = Dummy()
+    options.is_url = False
+    options.is_file = True
+    options.is_b64encoded = False
+    args = (history_archive.tar_file_path, dest_dir)
+    try:
+        unpack_tar_gz_archive.main(options, args)
+    except AssertionError:
+        pass
+    assert not os.path.exists(insecure_dir), msg
+
+
+def test_history_import_relpath_in_archive():
+    """ Ensure that a history import archive cannot reference a relative path
+    outside the archive
+    """
+    dest_parent = mkdtemp()
+    with HistoryArchive(arcname_prefix='../insecure') as history_archive:
+        history_archive.write_metafiles()
+        history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')
+        history_archive.finalize()
+        _run_unpack(history_archive, dest_parent, 'Relative parent path in import archive allowed')
+
+
+def test_history_import_abspath_in_archive():
+    """ Ensure that a history import archive cannot reference a absolute path
+    outside the archive
+    """
+    dest_parent = mkdtemp()
+    arcname_prefix = os.path.abspath(os.path.join(dest_parent, 'insecure'))
+    with HistoryArchive(arcname_prefix=arcname_prefix) as history_archive:
+        history_archive.write_metafiles()
+        history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')
+        history_archive.finalize()
+        _run_unpack(history_archive, dest_parent, 'Absolute path in import archive allowed')
+
+
+class HistoryArchive(object):
+    def __init__(self, arcname_prefix=None):
+        self.temp_directory = mkdtemp()
+        self.arc_directory = os.path.join(self.temp_directory, 'archive')
+        self.arcname_prefix = arcname_prefix
+        self.tar_file_path = os.path.join(self.temp_directory, 'archive.tar.gz')
+        self.tar_file = tarfile.open(self.tar_file_path, 'w:gz')
+        os.makedirs(self.arc_directory)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, tb):
+        rmtree(self.temp_directory)
+
+    def _create_parent(self, fname):
+        path = os.path.join(self.arc_directory, fname)
+        if not os.path.exists(os.path.dirname(path)):
+            os.makedirs(os.path.dirname(path))
+
+    def _arcname(self, path):
+        if self.arcname_prefix:
+            path = os.path.join(self.arcname_prefix, path)
+        return path
+
+    def write_metafiles(self, dataset_file_name='datasets/Pasted_Entry_1.txt'):
+        self.write_file('datasets_attrs.txt',
+                        DATASETS_ATTRS.format(file_name=dataset_file_name))
+        self.write_file('datasets_attrs.txt.provenance', DATASETS_ATTRS_PROVENANCE)
+        self.write_file('history_attrs.txt', HISTORY_ATTRS)
+        self.write_file('jobs_attrs.txt', JOBS_ATTRS)
+
+    def write_outside(self, fname='outside.txt', contents='invalid'):
+        with open(os.path.join(self.temp_directory, fname), 'w') as f:
+            f.write(contents)
+
+    def write_file(self, fname, contents):
+        self._create_parent(fname)
+        path = os.path.join(self.arc_directory, fname)
+        with open(path, 'w') as f:
+            f.write(contents)
+        # TarFile.add() (via TarFile.gettarinfo()) strips leading '/' and is
+        # unsuitable for our purposes
+        ti = self.tar_file.gettarinfo(fileobj=open(path))
+        ti.name = self._arcname(fname)
+        self.tar_file.addfile(ti, fileobj=open(path))
+
+    def write_link(self, fname, target):
+        self._create_parent(fname)
+        path = os.path.join(self.arc_directory, fname)
+        os.symlink(target, path)
+
+    def finalize(self):
+        self.tar_file.close()
+
+
+class Dummy(object):
+    pass
diff --git a/test/unit/tools/test_parameter_parsing.py b/test/unit/tools/test_parameter_parsing.py
new file mode 100644
index 0000000..3290d39
--- /dev/null
+++ b/test/unit/tools/test_parameter_parsing.py
@@ -0,0 +1,357 @@
+from unittest import TestCase
+from xml.etree.ElementTree import XML
+
+import tools_support
+from galaxy import model
+from galaxy.tools.parameters import basic
+from galaxy.util import bunch
+
+
+class BaseParameterTestCase( TestCase, tools_support.UsesApp ):
+
+    def setUp(self):
+        self.setup_app( mock_model=False )
+        self.mock_tool = bunch.Bunch(
+            app=self.app,
+            tool_type="default",
+            valid_input_states=model.Dataset.valid_input_states,
+        )
+
+    def _parameter_for(self, **kwds):
+        content = kwds["xml"]
+        param_xml = XML( content )
+        return basic.ToolParameter.build( self.mock_tool, param_xml )
+
+
+class ParameterParsingTestCase( BaseParameterTestCase ):
+    """ Test the parsing of XML for most parameter types - in many
+    ways these are not very good tests since they break the abstraction
+    established by the tools. The docs tests in basic.py are better but
+    largely rely on HTML stuff we are moving to the client side so they
+    those tests may need to be updated anyway.
+
+    It occurs to me that rewriting this stuff to test to_dict would
+    be much better - since that is a public API of the the tools.
+    """
+
+    def test_parse_help_and_label(self):
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault" label="x" help="y" />
+        """)
+        assert param.label == "x"
+        assert param.help == "y"
+
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault">
+                <label>x2</label>
+                <help>y2</help>
+            </param>
+        """)
+        assert param.label == "x2"
+        assert param.help == "y2"
+
+    def test_parse_sanitizers(self):
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault">
+              <sanitizer invalid_char="">
+                <valid initial="string.digits"><add value=","/> </valid>
+              </sanitizer>
+            </param>
+        """)
+        sanitizer = param.sanitizer
+        assert sanitizer is not None
+        assert sanitizer.sanitize_param("a") == ""
+        assert sanitizer.sanitize_param(",") == ","
+
+    def test_parse_optional(self):
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault" />
+        """)
+        assert param.optional is False
+
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault" optional="true" />
+        """)
+        assert param.optional is True
+
+    def test_parse_validators(self):
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault">
+                <validator type="unspecified_build" message="no genome?" />
+            </param>
+        """)
+        assert param.validators[0].message == "no genome?"
+
+    def test_text_params(self):
+        param = self._parameter_for(xml="""
+            <param type="text" name="texti" value="mydefault" />
+        """)
+        assert param.value == "mydefault"
+        assert param.type == "text"
+        assert not param.area
+
+    def test_text_area_params(self):
+        param = self._parameter_for(xml="""
+            <param type="text" name="textarea" area="true" />
+        """)
+        assert param.value is None
+        assert param.type == "text"
+        assert param.area
+
+    def test_integer_params(self):
+        param = self._parameter_for(xml="""
+            <param type="integer" name="intp" min="8" max="9" value="9" />
+        """)
+        assert param.name == "intp"
+        assert param.value == "9"
+        assert param.type == "integer"
+        param.validate( 8 )
+        self.assertRaises(Exception, lambda: param.validate( 10 ))
+
+    def test_float_params(self):
+        param = self._parameter_for(xml="""
+            <param type="float" name="floatp" min="7.8" max="9.5" value="9" />
+        """)
+        assert param.name == "floatp"
+        assert param.value == "9"
+        assert param.type == "float"
+        param.validate( 8.1 )
+        self.assertRaises(Exception, lambda: param.validate( 10.0 ))
+
+    def test_boolean_params(self):
+        param = self._parameter_for(xml="""
+            <param type="boolean" name="boolp" />
+        """)
+        assert param.name == "boolp"
+        assert param.truevalue == "true"
+        assert param.falsevalue == "false"
+        assert param.type == "boolean"
+
+        param = self._parameter_for(xml="""
+            <param type="boolean" name="boolp" truevalue="t" falsevalue="f" />
+        """)
+        assert param.truevalue == "t"
+        assert param.falsevalue == "f"
+
+    def test_file_params(self):
+        param = self._parameter_for(xml="""
+            <param type="file" name="filep" />
+        """)
+        assert param.name == "filep"
+        assert param.type == "file"
+
+    def test_ftpfile_params(self):
+        param = self._parameter_for(xml="""
+            <param type="ftpfile" name="ftpfilep" />
+        """)
+        assert param.name == "ftpfilep"
+        assert param.type == "ftpfile"
+
+    def test_hidden(self):
+        param = self._parameter_for(xml="""
+            <param name="hiddenp" type="hidden" value="a hidden value" />
+        """)
+        assert param.name == "hiddenp"
+        assert param.type == "hidden"
+        assert param.value == "a hidden value"
+
+    def test_base_url(self):
+        param = self._parameter_for(xml="""
+            <param name="urlp" type="baseurl" value="http://twitter.com/" />
+        """)
+        assert param.name == "urlp"
+        assert param.type == "baseurl"
+        assert param.value == "http://twitter.com/"
+
+        param = self._parameter_for(xml="""
+            <param name="urlp" type="baseurl" />
+        """)
+        assert param.value == ""
+
+    def test_select_static(self):
+        param = self._parameter_for(xml="""
+            <param name="selectp" type="select" multiple="true">
+                <option value="a">A</option>
+                <option value="b" selected="true">B</option>
+            </param>
+        """)
+        assert param.display is None
+        assert param.multiple is True
+        assert param.name == "selectp"
+        assert param.type == "select"
+        assert param.separator == ","
+
+        assert param.options is None
+        assert param.dynamic_options is None
+        assert not param.is_dynamic
+
+        options = param.static_options
+        assert options[0][0] == "A"
+        assert options[0][1] == "a"
+        assert not options[0][2]
+
+        assert options[1][0] == "B"
+        assert options[1][1] == "b"
+        assert options[1][2]
+
+    def test_select_dynamic(self):
+        param = self._parameter_for(xml="""
+            <param name="selectp" type="select" dynamic_options="cow" display="checkboxes" separator="moo">
+            </param>
+        """)
+        assert param.multiple is False
+        assert param.options is None
+        assert param.dynamic_options == "cow"
+        # This should be None or something - not undefined.
+        # assert not param.static_options
+        assert param.is_dynamic
+
+        assert param.display == "checkboxes"
+        assert param.separator == "moo"
+
+    def test_select_options_from(self):
+        param = self._parameter_for(xml="""
+            <param name="selectp" type="select">
+                <options from_data_table="cow">
+                </options>
+            </param>
+        """)
+        assert param.dynamic_options is None
+        assert param.is_dynamic
+
+        # More detailed tests of dynamic options should be placed
+        # in test_select_parameters.
+        assert param.options.missing_tool_data_table_name == "cow"
+
+    def test_genome_build(self):
+        param = self._parameter_for(xml="""
+            <param name="genomep" type="genomebuild">
+            </param>
+        """)
+        assert param.type == "genomebuild"
+        assert param.name == "genomep"
+        assert param.static_options
+
+    def test_column_params(self):
+        param = self._parameter_for(xml="""
+            <param name="col1" type="data_column" data_ref="input1">
+            </param>
+        """)
+        assert param.data_ref == "input1"
+        assert param.usecolnames is False
+        assert param.optional is False
+        assert param.numerical is False
+
+        param = self._parameter_for(xml="""
+            <param name="col1" type="data_column" data_ref="input1" use_header_names="true" numerical="true" force_select="false">
+            </param>
+        """)
+        assert param.data_ref == "input1"
+        assert param.usecolnames is True
+        assert param.optional is True
+        assert param.numerical is True
+
+    def test_data_param_no_validation(self):
+        param = self._parameter_for(xml="""
+            <param name="input" type="data">
+            </param>
+        """)
+        assert len(param.validators) == 1
+        param = self._parameter_for(xml="""
+            <param name="input" type="data" no_validation="true">
+            </param>
+        """)
+        assert len(param.validators) == 0
+
+    def test_data_param_dynamic_options(self):
+        param = self._parameter_for(xml="""
+            <param name="input" type="data" />
+        """)
+        assert param.options is None
+        assert param.options_filter_attribute is None
+
+        param = self._parameter_for(xml="""
+            <param name="input" type="data">
+                <options from_data_table="cow">
+                </options>
+            </param>
+        """)
+        assert param.options is not None
+        assert param.options_filter_attribute is None
+
+        param = self._parameter_for(xml="""
+            <param name="input" type="data">
+                <options from_data_table="cow" options_filter_attribute="cow">
+                </options>
+            </param>
+        """)
+        assert param.options is not None
+        assert param.options_filter_attribute == "cow"
+
+    def test_conversions(self):
+        param = self._parameter_for(xml="""
+            <param name="input" type="data" />
+        """)
+        assert param.conversions == []
+
+        param = self._parameter_for(xml="""
+            <param name="input" type="data">
+                <conversion name="foo" type="txt" />
+                <conversion name="foo2" type="bam" />
+            </param>
+        """)
+        assert param.conversions[0][0] == "foo"
+        assert param.conversions[0][1] == "txt"
+        assert param.conversions[1][0] == "foo2"
+        assert param.conversions[1][1] == "bam"
+
+    def test_drilldown(self):
+        param = self._parameter_for(xml="""
+            <param name="some_name" type="drill_down" display="checkbox" hierarchy="recurse" multiple="true">
+              <options>
+               <option name="Heading 1" value="heading1">
+                   <option name="Option 1" value="option1"/>
+                   <option name="Option 2" value="option2"/>
+                   <option name="Heading 1" value="heading1">
+                     <option name="Option 3" value="option3"/>
+                     <option name="Option 4" value="option4"/>
+                   </option>
+               </option>
+               <option name="Option 5" value="option5"/>
+              </options>
+            </param>
+        """)
+        assert param.type == "drill_down"
+        assert param.name == "some_name"
+        assert param.options
+
+        heading1 = param.options[0]
+        assert heading1["selected"] is False
+        assert heading1["name"] == "Heading 1"
+        assert heading1["value"] == "heading1"
+        option1 = heading1["options"][0]
+        assert option1["selected"] is False
+        assert option1["name"] == "Option 1"
+        assert option1["value"] == "option1"
+
+        option5 = param.options[1]
+        assert option5["selected"] is False
+        assert option5["name"] == "Option 5"
+        assert option5["value"] == "option5"
+        assert len(option5["options"]) == 0
+
+    def test_tool_collection(self):
+        param = self._parameter_for(xml="""
+            <param name="datac" type="data_collection" collection_type="list,list:paired" format="txt">
+            </param>
+        """)
+        assert param.type == "data_collection"
+        assert param.collection_types == ["list", "list:paired"]
+
+    def test_library(self):
+        param = self._parameter_for(xml="""
+            <param name="libraryp" type="library_data">
+            </param>
+        """)
+        assert param.type == "library_data"
+        assert param.name == "libraryp"
diff --git a/test/unit/tools/test_parsing.py b/test/unit/tools/test_parsing.py
new file mode 100644
index 0000000..048a81c
--- /dev/null
+++ b/test/unit/tools/test_parsing.py
@@ -0,0 +1,446 @@
+import os
+import os.path
+import shutil
+import tempfile
+import unittest
+
+from math import isinf
+
+from galaxy.tools.parser.factory import get_tool_source
+
+
+TOOL_XML_1 = """
+<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
+    <description>The BWA Mapper</description>
+    <version_command interpreter="python">bwa.py --version</version_command>
+    <parallelism method="multi" split_inputs="input1" split_mode="to_size" split_size="1" merge_outputs="out_file1" />
+    <command interpreter="python">bwa.py --arg1=42</command>
+    <requirements>
+        <container type="docker">mycool/bwa</container>
+        <requirement type="package" version="1.0">bwa</requirement>
+    </requirements>
+    <outputs>
+        <data name="out1" format="bam" from_work_dir="out1.bam" />
+    </outputs>
+    <stdio>
+        <exit_code range="1:" level="fatal" />
+    </stdio>
+    <help>This is HELP TEXT1!!!</help>
+    <tests>
+        <test>
+            <param name="foo" value="5" />
+            <output name="out1" file="moo.txt" />
+        </test>
+        <test>
+            <param name="foo" value="5">
+            </param>
+            <output name="out1" lines_diff="4" compare="sim_size">
+                <metadata name="dbkey" value="hg19" />
+            </output>
+        </test>
+    </tests>
+</tool>
+"""
+
+TOOL_YAML_1 = """
+name: "Bowtie Mapper"
+class: GalaxyTool
+id: bowtie
+version: 1.0.2
+description: "The Bowtie Mapper"
+command: "bowtie_wrapper.pl --map-the-stuff"
+interpreter: "perl"
+runtime_version:
+  command: "bowtie --version"
+requirements:
+  - type: package
+    name: bwa
+    version: 1.0.1
+containers:
+  - type: docker
+    identifier: "awesome/bowtie"
+outputs:
+  out1:
+    format: bam
+    from_work_dir: out1.bam
+inputs:
+  - name: input1
+    type: integer
+    min: 7
+    max: 8
+  - name: moo
+    label: cow
+    type: repeat
+    blocks:
+      - name: nestinput
+        type: data
+      - name: nestsample
+        type: text
+help:
+|
+    This is HELP TEXT2!!!
+tests:
+   - inputs:
+       foo: 5
+     outputs:
+       out1: moo.txt
+   - inputs:
+       foo:
+         value: 5
+     outputs:
+       out1:
+         lines_diff: 4
+         compare: sim_size
+"""
+
+
+class BaseLoaderTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.temp_directory = tempfile.mkdtemp()
+
+    def tearDown(self):
+        shutil.rmtree(self.temp_directory)
+
+    @property
+    def _tool_source(self):
+        return self._get_tool_source()
+
+    def _get_tool_source(self, source_file_name=None, source_contents=None):
+        if source_file_name is None:
+            source_file_name = self.source_file_name
+        if source_contents is None:
+            source_contents = self.source_contents
+        if not os.path.isabs(source_file_name):
+            path = os.path.join(self.temp_directory, source_file_name)
+            open(path, "w").write(source_contents)
+        else:
+            path = source_file_name
+        tool_source = get_tool_source(path)
+        return tool_source
+
+
+class XmlLoaderTestCase(BaseLoaderTestCase):
+    source_file_name = "bwa.xml"
+    source_contents = TOOL_XML_1
+
+    def test_version(self):
+        assert self._tool_source.parse_version() == "1.0.1"
+
+    def test_id(self):
+        assert self._tool_source.parse_id() == "bwa"
+
+    def test_module_and_type(self):
+        assert self._tool_source.parse_tool_module() is None
+        assert self._tool_source.parse_tool_type() is None
+
+    def test_name(self):
+        assert self._tool_source.parse_name() == "BWA Mapper"
+
+    def test_is_multi_byte(self):
+        assert self._tool_source.parse_is_multi_byte()
+
+    def test_display_interface(self):
+        assert self._tool_source.parse_display_interface(False)
+
+    def test_require_login(self):
+        assert self._tool_source.parse_require_login(False)
+
+    def test_parse_request_param_translation_elem(self):
+        assert self._tool_source.parse_request_param_translation_elem() is None
+
+    def test_command_parsing(self):
+        assert self._tool_source.parse_command() == "bwa.py --arg1=42"
+        assert self._tool_source.parse_interpreter() == "python"
+
+    def test_descripting_parsing(self):
+        assert self._tool_source.parse_description() == "The BWA Mapper"
+
+    def test_version_command(self):
+        assert self._tool_source.parse_version_command() == "bwa.py --version"
+        assert self._tool_source.parse_version_command_interpreter() == "python"
+
+    def test_parallelism(self):
+        parallelism_info = self._tool_source.parse_parallelism()
+        assert parallelism_info.method == "multi"
+        assert parallelism_info.attributes["split_inputs"] == "input1"
+
+    def test_hidden(self):
+        assert self._tool_source.parse_hidden()
+
+    def test_action(self):
+        assert self._tool_source.parse_action_module() is None
+
+    def test_requirements(self):
+        requirements, containers = self._tool_source.parse_requirements_and_containers()
+        assert requirements[0].type == "package"
+        assert containers[0].identifier == "mycool/bwa"
+
+    def test_outputs(self):
+        outputs, output_collections = self._tool_source.parse_outputs(object())
+        assert len(outputs) == 1
+        assert len(output_collections) == 0
+
+    def test_stdio(self):
+        exit, regexes = self._tool_source.parse_stdio()
+        assert len(exit) == 1
+        assert len(regexes) == 0
+        assert exit[0].range_start == 1
+        assert isinf(exit[0].range_end)
+
+    def test_help(self):
+        help_text = self._tool_source.parse_help()
+        assert help_text.strip() == "This is HELP TEXT1!!!"
+
+    def test_tests(self):
+        tests_dict = self._tool_source.parse_tests_to_dict()
+        tests = tests_dict["tests"]
+        assert len(tests) == 2
+        test_dict = tests[0]
+        inputs = test_dict["inputs"]
+        assert len(inputs) == 1
+        input1 = inputs[0]
+        assert input1[0] == "foo"
+        assert input1[1] == "5"
+
+        outputs = test_dict["outputs"]
+        assert len(outputs) == 1
+        output1 = outputs[0]
+        assert output1[0] == 'out1'
+        assert output1[1] == 'moo.txt'
+        attributes1 = output1[2]
+        assert attributes1["compare"] == "diff"
+        assert attributes1["lines_diff"] == 0
+
+        test2 = tests[1]
+        outputs = test2["outputs"]
+        assert len(outputs) == 1
+        output2 = outputs[0]
+        assert output2[0] == 'out1'
+        assert output2[1] is None
+        attributes1 = output2[2]
+        assert attributes1["compare"] == "sim_size"
+        assert attributes1["lines_diff"] == 4
+
+    def test_exit_code(self):
+        tool_source = self._get_tool_source(source_contents="""<tool id="bwa" name="bwa">
+            <command detect_errors="exit_code">
+                ls
+            </command>
+        </tool>
+        """)
+        exit, regexes = tool_source.parse_stdio()
+        assert len(exit) == 2, exit
+        assert len(regexes) == 0, regexes
+
+        tool_source = self._get_tool_source(source_contents="""<tool id="bwa" name="bwa">
+            <command detect_errors="aggressive">
+                ls
+            </command>
+        </tool>
+        """)
+        exit, regexes = tool_source.parse_stdio()
+        assert len(exit) == 2, exit
+        assert len(regexes) == 2, regexes
+
+    def test_sanitize_option(self):
+        assert self._tool_source.parse_sanitize() is True
+
+    def test_refresh_option(self):
+        assert self._tool_source.parse_refresh() is False
+
+
+class YamlLoaderTestCase(BaseLoaderTestCase):
+    source_file_name = "bwa.yml"
+    source_contents = TOOL_YAML_1
+
+    def test_version(self):
+        assert self._tool_source.parse_version() == "1.0.2"
+
+    def test_id(self):
+        assert self._tool_source.parse_id() == "bowtie"
+
+    def test_module_and_type(self):
+        # These just rely on defaults
+        assert self._tool_source.parse_tool_module() is None
+        assert self._tool_source.parse_tool_type() is None
+
+    def test_name(self):
+        assert self._tool_source.parse_name() == "Bowtie Mapper"
+
+    def test_is_multi_byte(self):
+        assert not self._tool_source.parse_is_multi_byte()
+
+    def test_display_interface(self):
+        assert not self._tool_source.parse_display_interface(False)
+        assert self._tool_source.parse_display_interface(True)
+
+    def test_require_login(self):
+        assert not self._tool_source.parse_require_login(False)
+
+    def test_parse_request_param_translation_elem(self):
+        assert self._tool_source.parse_request_param_translation_elem() is None
+
+    def test_command_parsing(self):
+        assert self._tool_source.parse_command() == "bowtie_wrapper.pl --map-the-stuff"
+        assert self._tool_source.parse_interpreter() == "perl"
+
+    def test_parse_redirect_url_params_elem(self):
+        assert self._tool_source.parse_redirect_url_params_elem() is None
+
+    def test_descripting_parsing(self):
+        assert self._tool_source.parse_description() == "The Bowtie Mapper"
+
+    def test_version_command(self):
+        assert self._tool_source.parse_version_command() == "bowtie --version"
+        assert self._tool_source.parse_version_command_interpreter() is None
+
+    def test_parallelism(self):
+        assert self._tool_source.parse_parallelism() is None
+
+    def test_hidden(self):
+        assert not self._tool_source.parse_hidden()
+
+    def test_action(self):
+        assert self._tool_source.parse_action_module() is None
+
+    def test_requirements(self):
+        requirements, containers = self._tool_source.parse_requirements_and_containers()
+        assert requirements[0].type == "package"
+        assert requirements[0].name == "bwa"
+        assert containers[0].identifier == "awesome/bowtie"
+
+    def test_outputs(self):
+        outputs, output_collections = self._tool_source.parse_outputs(object())
+        assert len(outputs) == 1
+        assert len(output_collections) == 0
+
+    def test_stdio(self):
+        exit, regexes = self._tool_source.parse_stdio()
+        assert len(exit) == 2
+
+        assert isinf(exit[0].range_start)
+        assert exit[0].range_start == float("-inf")
+
+        assert exit[1].range_start == 1
+        assert isinf(exit[1].range_end)
+
+    def test_help(self):
+        help_text = self._tool_source.parse_help()
+        assert help_text.strip() == "This is HELP TEXT2!!!"
+
+    def test_inputs(self):
+        input_pages = self._tool_source.parse_input_pages()
+        assert input_pages.inputs_defined
+        page_sources = input_pages.page_sources
+        assert len(page_sources) == 1
+        page_source = page_sources[0]
+        input_sources = page_source.parse_input_sources()
+        assert len(input_sources) == 2
+
+    def test_tests(self):
+        tests_dict = self._tool_source.parse_tests_to_dict()
+        tests = tests_dict["tests"]
+        assert len(tests) == 2
+        test_dict = tests[0]
+        inputs = test_dict["inputs"]
+        assert len(inputs) == 1
+        input1 = inputs[0]
+        assert input1[0] == "foo"
+        assert input1[1] == 5
+
+        outputs = test_dict["outputs"]
+        assert len(outputs) == 1
+        output1 = outputs[0]
+        assert output1[0] == 'out1'
+        assert output1[1] == 'moo.txt'
+        attributes1 = output1[2]
+        assert attributes1["compare"] == "diff"
+        assert attributes1["lines_diff"] == 0
+
+        test2 = tests[1]
+        outputs = test2["outputs"]
+        assert len(outputs) == 1
+        output2 = outputs[0]
+        assert output2[0] == 'out1'
+        assert output2[1] is None
+        attributes1 = output2[2]
+        assert attributes1["compare"] == "sim_size"
+        assert attributes1["lines_diff"] == 4
+
+    def test_sanitize(self):
+        assert self._tool_source.parse_sanitize() is True
+
+
+class DataSourceLoaderTestCase(BaseLoaderTestCase):
+    source_file_name = "ds.xml"
+    source_contents = """<?xml version="1.0"?>
+<tool name="YeastMine" id="yeastmine" tool_type="data_source">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://yeastmine.yeastgenome.org/yeastmine/begin.do" check_values="false" method="get">
+        <display>go to yeastMine server $GALAXY_URL</display>
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="txt" /> <!-- intermine currently always provides 'txt', make this auto detect -->
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <!-- Following block doesn't really belong here - not sure what block is suppose to do actually cannot
+         find actual usage. -->
+    <redirect_url_params>cow</redirect_url_params>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
+"""
+
+    def test_sanitize_option(self):
+        assert self._tool_source.parse_sanitize() is False
+
+    def test_refresh_option(self):
+        assert self._tool_source.parse_refresh() is True
+
+    def test_tool_type(self):
+        assert self._tool_source.parse_tool_type() == "data_source"
+
+    def test_parse_request_param_translation_elem(self):
+        assert self._tool_source.parse_request_param_translation_elem() is not None
+
+    def test_redirect_url_params_elem(self):
+        assert self._tool_source.parse_redirect_url_params_elem() is not None
+
+    def test_parallelism(self):
+        assert self._tool_source.parse_parallelism() is None
+
+    def test_hidden(self):
+        assert not self._tool_source.parse_hidden()
+
+
+class SpecialToolLoaderTestCase(BaseLoaderTestCase):
+    source_file_name = os.path.join(os.getcwd(), "lib/galaxy/tools/imp_exp/exp_history_to_archive.xml")
+    source_contents = None
+
+    def test_tool_type(self):
+        tool_module = self._tool_source.parse_tool_module()
+        # Probably we don't parse_tool_module any more? -
+        # tool_type seems sufficient.
+        assert tool_module[0] == "galaxy.tools"
+        assert tool_module[1] == "ExportHistoryTool"
+        assert self._tool_source.parse_tool_type() == "export_history"
+
+    def test_is_multi_byte(self):
+        assert not self._tool_source.parse_is_multi_byte()
+
+    def test_version_command(self):
+        assert self._tool_source.parse_version_command() is None
+        assert self._tool_source.parse_version_command_interpreter() is None
+
+    def test_action(self):
+        action = self._tool_source.parse_action_module()
+        assert action[0] == "galaxy.tools.actions.history_imp_exp"
+        assert action[1] == "ExportHistoryToolAction"
diff --git a/test/unit/tools/test_select_parameters.py b/test/unit/tools/test_select_parameters.py
new file mode 100644
index 0000000..87689e2
--- /dev/null
+++ b/test/unit/tools/test_select_parameters.py
@@ -0,0 +1,112 @@
+from galaxy import model
+from galaxy.tools.parameters import basic
+from galaxy.util import bunch
+
+from .test_parameter_parsing import BaseParameterTestCase
+
+
+class SelectToolParameterTestCase( BaseParameterTestCase ):
+
+    def test_validated_values( self ):
+        self.options_xml = '''<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>'''
+        try:
+            self.param.from_json("42", self.trans, { "input_bam": model.HistoryDatasetAssociation() })
+        except ValueError as err:
+            assert str(err) == "An invalid option was selected for my_name, '42', please verify."
+            return
+        assert False
+
+    def test_validated_values_missing_dependency( self ):
+        self.options_xml = '''<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>'''
+        try:
+            self.param.from_json("42", self.trans)
+        except ValueError as err:
+            assert str(err) == "Parameter my_name requires a value, but has no legal values defined."
+            return
+        assert False
+
+    def test_unvalidated_values( self ):
+        self.options_xml = '''<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>'''
+        self.trans.workflow_building_mode = True
+        assert self.param.from_json("42", self.trans) == "42"
+
+    def test_validated_datasets( self ):
+        self.options_xml = '''<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>'''
+        try:
+            self.param.from_json( model.HistoryDatasetAssociation(), self.trans, { "input_bam": None } )
+        except ValueError as err:
+            assert str(err) == "Parameter my_name requires a value, but has no legal values defined."
+            return
+        assert False
+
+    def test_unvalidated_datasets( self ):
+        self.options_xml = '''<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>'''
+        self.trans.workflow_building_mode = True
+        assert isinstance( self.param.from_json( model.HistoryDatasetAssociation(), self.trans, { "input_bam": basic.RuntimeValue() } ), model.HistoryDatasetAssociation )
+
+    def test_filter_param_value( self ):
+        self.options_xml = '''<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="0" /></options>'''
+        assert ("testname1", "testpath1", False) in self.param.get_options( self.trans, { "input_bam": "testname1" } )
+        assert ("testname2", "testpath2", False) in self.param.get_options( self.trans, { "input_bam": "testname2" } )
+        assert len( self.param.get_options( self.trans, { "input_bam": "testname3" } ) ) == 0
+
+    def test_filter_param_value2( self ):
+        # Same test as above, but filtering on a different column.
+        self.options_xml = '''<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="1" /></options>'''
+        assert ("testname1", "testpath1", False) in self.param.get_options( self.trans, { "input_bam": "testpath1" } )
+        assert ("testname2", "testpath2", False) in self.param.get_options( self.trans, { "input_bam": "testpath2" } )
+        assert len( self.param.get_options( self.trans, { "input_bam": "testpath3" } ) ) == 0
+
+    # TODO: Good deal of overlap here with DataToolParameterTestCase,
+    # refactor.
+    def setUp( self ):
+        super(SelectToolParameterTestCase, self).setUp()
+        self.test_history = model.History()
+        self.app.model.context.add( self.test_history )
+        self.app.model.context.flush()
+        self.app.tool_data_tables[ "test_table" ] = MockToolDataTable()
+        self.trans = bunch.Bunch(
+            app=self.app,
+            get_history=lambda: self.test_history,
+            get_current_user_roles=lambda: [],
+            workflow_building_mode=False,
+            webapp=bunch.Bunch( name="galaxy" ),
+        )
+        self.type = "select"
+        self.set_data_ref = False
+        self.multiple = False
+        self.optional = False
+        self.options_xml = ""
+        self._param = None
+
+    @property
+    def param( self ):
+        if not self._param:
+            multi_text = ""
+            if self.multiple:
+                multi_text = 'multiple="True"'
+            optional_text = ""
+            if self.optional:
+                optional_text = 'optional="True"'
+            options_text = self.options_xml
+            data_ref_text = ""
+            if self.set_data_ref:
+                data_ref_text = 'data_ref="input_bam"'
+            template_xml = '''<param name="my_name" type="%s" %s %s %s>%s</param>'''
+            param_str = template_xml % ( self.type, data_ref_text, multi_text, optional_text, options_text )
+            self._param = self._parameter_for( xml=param_str )
+
+        return self._param
+
+
+class MockToolDataTable( object ):
+
+    def __init__( self ):
+        self.columns = dict(
+            name=0,
+            value=1,
+        )
+        self.missing_index_file = None
+
+    def get_fields( self ):
+        return [ [ "testname1", "testpath1" ], [ "testname2", "testpath2" ] ]
diff --git a/test/unit/tools/test_tool_dependency_description.py b/test/unit/tools/test_tool_dependency_description.py
new file mode 100644
index 0000000..a9e75bd
--- /dev/null
+++ b/test/unit/tools/test_tool_dependency_description.py
@@ -0,0 +1,45 @@
+from galaxy.model import tool_shed_install
+from galaxy.tools.deps import (
+    dependencies,
+    requirements
+)
+
+
+def test_serialization():
+    repository = tool_shed_install.ToolShedRepository(
+        owner="devteam",
+        name="tophat",
+        installed_changeset_revision="abcdefghijk",
+    )
+    dependency = tool_shed_install.ToolDependency(
+        name="tophat",
+        version="2.0",
+        type="package",
+        status=tool_shed_install.ToolDependency.installation_status.INSTALLED,
+    )
+    dependency.tool_shed_repository = repository
+    tool_requirement = requirements.ToolRequirement(
+        name="tophat",
+        version="2.0",
+        type="package",
+    )
+    descript = dependencies.DependenciesDescription(
+        requirements=[tool_requirement],
+        installed_tool_dependencies=[dependency],
+    )
+    result_descript = dependencies.DependenciesDescription.from_dict(
+        descript.to_dict()
+    )
+    result_requirement = result_descript.requirements[0]
+    assert result_requirement.name == "tophat"
+    assert result_requirement.version == "2.0"
+    assert result_requirement.type == "package"
+
+    result_tool_shed_dependency = result_descript.installed_tool_dependencies[0]
+    result_tool_shed_dependency.name = "tophat"
+    result_tool_shed_dependency.version = "2.0"
+    result_tool_shed_dependency.type = "package"
+    result_tool_shed_repository = result_tool_shed_dependency.tool_shed_repository
+    result_tool_shed_repository.name = "tophat"
+    result_tool_shed_repository.owner = "devteam"
+    result_tool_shed_repository.installed_changeset_revision = "abcdefghijk"
diff --git a/test/unit/tools/test_tool_deps.py b/test/unit/tools/test_tool_deps.py
new file mode 100644
index 0000000..d8ef0a0
--- /dev/null
+++ b/test/unit/tools/test_tool_deps.py
@@ -0,0 +1,387 @@
+import os.path
+import tempfile
+
+from contextlib import contextmanager
+from os import (
+    chmod,
+    environ,
+    makedirs,
+    stat,
+    symlink,
+)
+from shutil import rmtree
+from stat import S_IXUSR
+from subprocess import PIPE, Popen
+
+from galaxy.tools.deps import DependencyManager
+from galaxy.tools.deps.resolvers import NullDependency
+from galaxy.tools.deps.resolvers.galaxy_packages import GalaxyPackageDependency
+from galaxy.tools.deps.resolvers.modules import ModuleDependency, ModuleDependencyResolver
+from galaxy.util.bunch import Bunch
+
+
+def test_tool_dependencies():
+    # Setup directories
+
+    with __test_base_path() as base_path:
+        for name, version, sub in [ ( "dep1", "1.0", "env.sh" ), ( "dep1", "2.0", "bin" ), ( "dep2", "1.0", None ) ]:
+            if sub == "bin":
+                p = os.path.join( base_path, name, version, "bin" )
+            else:
+                p = os.path.join( base_path, name, version )
+            try:
+                makedirs( p )
+            except:
+                pass
+            if sub == "env.sh":
+                __touch( os.path.join( p, "env.sh" ) )
+
+        dm = DependencyManager( default_base_path=base_path )
+        dependency = dm.find_dep( "dep1", "1.0" )
+        assert dependency.script == os.path.join( base_path, 'dep1', '1.0', 'env.sh' )
+        assert dependency.path == os.path.join( base_path, 'dep1', '1.0' )
+        assert dependency.version == "1.0"
+        dependency = dm.find_dep( "dep1", "2.0" )
+        assert dependency.script is None
+        assert dependency.path == os.path.join( base_path, 'dep1', '2.0' )
+        assert dependency.version == "2.0"
+
+        # Test default versions
+        symlink( os.path.join( base_path, 'dep1', '2.0'), os.path.join( base_path, 'dep1', 'default' ) )
+        dependency = dm.find_dep( "dep1", None )
+        assert dependency.version == "2.0"
+
+        # Test default resolve will be fall back on default package dependency
+        # when using the default resolver.
+        dependency = dm.find_dep( "dep1", "2.1" )
+        assert dependency.version == "2.0"  # 2.0 is defined as default_version
+
+
+TEST_REPO_USER = "devteam"
+TEST_REPO_NAME = "bwa"
+TEST_REPO_CHANGESET = "12abcd41223da"
+TEST_VERSION = "0.5.9"
+
+
+def test_toolshed_set_enviornment_requiremetns():
+    with __test_base_path() as base_path:
+        test_repo = __build_test_repo('set_environment')
+        dm = DependencyManager( default_base_path=base_path )
+        env_settings_dir = os.path.join(base_path, "environment_settings", TEST_REPO_NAME, TEST_REPO_USER, TEST_REPO_NAME, TEST_REPO_CHANGESET)
+        os.makedirs(env_settings_dir)
+        dependency = dm.find_dep( TEST_REPO_NAME, version=None, type='set_environment', installed_tool_dependencies=[test_repo] )
+        assert dependency.version is None
+        assert dependency.script == os.path.join(env_settings_dir, "env.sh")
+
+
+def test_toolshed_package_requirements():
+    with __test_base_path() as base_path:
+        test_repo = __build_test_repo('package', version=TEST_VERSION)
+        dm = DependencyManager( default_base_path=base_path )
+        package_dir = __build_ts_test_package(base_path)
+        dependency = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
+        assert dependency.version == TEST_VERSION
+        assert dependency.script == os.path.join(package_dir, "env.sh")
+
+
+def test_toolshed_tools_fallback_on_manual_dependencies():
+    with __test_base_path() as base_path:
+        dm = DependencyManager( default_base_path=base_path )
+        test_repo = __build_test_repo('package', version=TEST_VERSION)
+        env_path = __setup_galaxy_package_dep(base_path, "dep1", "1.0")
+        dependency = dm.find_dep( "dep1", version="1.0", type='package', installed_tool_dependencies=[test_repo] )
+        assert dependency.version == "1.0"
+        assert dependency.script == env_path
+
+
+def test_toolshed_greater_precendence():
+    with __test_base_path() as base_path:
+        dm = DependencyManager( default_base_path=base_path )
+        test_repo = __build_test_repo('package', version=TEST_VERSION)
+        ts_package_dir = __build_ts_test_package(base_path)
+        gx_env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION)
+        ts_env_path = os.path.join(ts_package_dir, "env.sh")
+        dependency = dm.find_dep( TEST_REPO_NAME, version=TEST_VERSION, type='package', installed_tool_dependencies=[test_repo] )
+        assert dependency.script != gx_env_path  # Not the galaxy path, it should be the tool shed path used.
+        assert dependency.script == ts_env_path
+
+
+def __build_ts_test_package(base_path, script_contents=''):
+    package_dir = os.path.join(base_path, TEST_REPO_NAME, TEST_VERSION, TEST_REPO_USER, TEST_REPO_NAME, TEST_REPO_CHANGESET)
+    __touch(os.path.join(package_dir, 'env.sh'), script_contents)
+    return package_dir
+
+
+def test_module_dependency_resolver():
+    with __test_base_path() as temp_directory:
+        module_script = os.path.join(temp_directory, "modulecmd")
+        __write_script(module_script, '''#!/bin/sh
+cat %s/example_output 1>&2;
+''' % temp_directory)
+        with open(os.path.join(temp_directory, "example_output"), "w") as f:
+            # Subset of module avail from MSI cluster.
+            f.write('''
+-------------------------- /soft/modules/modulefiles ---------------------------
+JAGS/3.2.0-gcc45
+JAGS/3.3.0-gcc4.7.2
+ProbABEL/0.1-3
+ProbABEL/0.1-9e
+R/2.12.2
+R/2.13.1
+R/2.14.1
+R/2.15.0
+R/2.15.1
+R/3.0.1(default)
+abokia-blast/2.0.2-130524/ompi_intel
+abokia-blast/2.0.2-130630/ompi_intel
+
+--------------------------- /soft/intel/modulefiles ----------------------------
+advisor/2013/update1    intel/11.1.075          mkl/10.2.1.017
+advisor/2013/update2    intel/11.1.080          mkl/10.2.5.035
+advisor/2013/update3    intel/12.0              mkl/10.2.7.041
+''')
+        resolver = ModuleDependencyResolver(None, modulecmd=module_script)
+        module = resolver.resolve( name="R", version=None, type="package" )
+        assert module.module_name == "R"
+        assert module.module_version is None
+
+        module = resolver.resolve( name="R", version="3.0.1", type="package" )
+        assert module.module_name == "R"
+        assert module.module_version == "3.0.1"
+
+        module = resolver.resolve( name="R", version="3.0.4", type="package" )
+        assert isinstance(module, NullDependency)
+
+
+def test_module_dependency():
+    with __test_base_path() as temp_directory:
+        # Create mock modulecmd script that just exports a variable
+        # the way modulecmd sh load would, but also validate correct
+        # module name and version are coming through.
+        mock_modulecmd = os.path.join(temp_directory, 'modulecmd')
+        __write_script(mock_modulecmd, '''#!/bin/sh
+if [ $3 != "foomodule/1.0" ];
+then
+    exit 1
+fi
+echo 'FOO="bar"'
+''')
+        resolver = Bunch(modulecmd=mock_modulecmd, modulepath='/something')
+        dependency = ModuleDependency(resolver, "foomodule", "1.0")
+        __assert_foo_exported( dependency.shell_commands( Bunch( type="package" ) ) )
+
+
+def __write_script(path, contents):
+    with open(path, 'w') as f:
+        f.write(contents)
+    st = stat(path)
+    chmod(path, st.st_mode | S_IXUSR)
+
+
+def test_galaxy_dependency_object_script():
+    with __test_base_path() as base_path:
+        # Create env.sh file that just exports variable FOO and verify it
+        # shell_commands export it correctly.
+        env_path = __setup_galaxy_package_dep(base_path, TEST_REPO_NAME, TEST_VERSION, "export FOO=\"bar\"")
+        dependency = GalaxyPackageDependency(env_path, os.path.dirname(env_path), TEST_REPO_NAME, TEST_VERSION)
+        __assert_foo_exported( dependency.shell_commands( Bunch( type="package" ) ) )
+
+
+def test_shell_commands_built():
+    # Test that dependency manager builds valid shell commands for a list of
+    # requirements.
+    with __test_base_path() as base_path:
+        dm = DependencyManager( default_base_path=base_path )
+        __setup_galaxy_package_dep( base_path, TEST_REPO_NAME, TEST_VERSION, contents="export FOO=\"bar\"" )
+        mock_requirements = [ Bunch(type="package", version=TEST_VERSION, name=TEST_REPO_NAME ) ]
+        commands = dm.dependency_shell_commands( mock_requirements )
+        __assert_foo_exported( commands )
+
+
+def __assert_foo_exported( commands ):
+    command = ["bash", "-c", "%s; echo \"$FOO\"" % "".join(commands)]
+    process = Popen(command, stdout=PIPE)
+    output = process.communicate()[0].strip()
+    assert output == 'bar', "Command %s exports FOO as %s, not bar" % (command, output)
+
+
+def __setup_galaxy_package_dep(base_path, name, version, contents=""):
+    dep_directory = os.path.join( base_path, name, version )
+    env_path = os.path.join( dep_directory, "env.sh" )
+    __touch( env_path, contents )
+    return env_path
+
+
+def __touch( fname, data=None ):
+    dirname = os.path.dirname( fname )
+    if not os.path.exists( dirname ):
+        makedirs( dirname )
+    f = open( fname, 'w' )
+    try:
+        if data:
+            f.write( data )
+    finally:
+        f.close()
+
+
+def __build_test_repo(type, version=None):
+    return Bunch(
+        owner=TEST_REPO_USER,
+        name=TEST_REPO_NAME,
+        type=type,
+        version=version,
+        tool_shed_repository=Bunch(
+            owner=TEST_REPO_USER,
+            name=TEST_REPO_NAME,
+            installed_changeset_revision=TEST_REPO_CHANGESET
+        )
+    )
+
+
+ at contextmanager
+def __test_base_path():
+    base_path = tempfile.mkdtemp()
+    try:
+        yield base_path
+    finally:
+        rmtree(base_path)
+
+
+def test_parse():
+    with __parse_resolvers('''<dependency_resolvers>
+  <tool_shed_packages />
+  <galaxy_packages />
+</dependency_resolvers>
+''') as dependency_resolvers:
+        assert 'ToolShed' in dependency_resolvers[0].__class__.__name__
+        assert 'Galaxy' in dependency_resolvers[1].__class__.__name__
+
+    with __parse_resolvers('''<dependency_resolvers>
+  <galaxy_packages />
+  <tool_shed_packages />
+</dependency_resolvers>
+''') as dependency_resolvers:
+        assert 'Galaxy' in dependency_resolvers[0].__class__.__name__
+        assert 'ToolShed' in dependency_resolvers[1].__class__.__name__
+
+    with __parse_resolvers('''<dependency_resolvers>
+  <galaxy_packages />
+  <tool_shed_packages />
+  <galaxy_packages versionless="true" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+        assert not dependency_resolvers[0].versionless
+        assert dependency_resolvers[2].versionless
+
+    with __parse_resolvers('''<dependency_resolvers>
+  <galaxy_packages />
+  <tool_shed_packages />
+  <galaxy_packages base_path="/opt/galaxy/legacy/"/>
+</dependency_resolvers>
+''') as dependency_resolvers:
+        # Unspecified base_paths are both default_base_paths
+        assert dependency_resolvers[0].base_path == dependency_resolvers[1].base_path
+        # Can specify custom base path...
+        assert dependency_resolvers[2].base_path == "/opt/galaxy/legacy"
+        # ... that is different from the default.
+        assert dependency_resolvers[0].base_path != dependency_resolvers[2].base_path
+
+
+def test_uses_tool_shed_dependencies():
+    with __dependency_manager('''<dependency_resolvers>
+  <galaxy_packages />
+</dependency_resolvers>
+''') as dm:
+        assert not dm.uses_tool_shed_dependencies()
+
+    with __dependency_manager('''<dependency_resolvers>
+  <tool_shed_packages />
+</dependency_resolvers>
+''') as dm:
+        assert dm.uses_tool_shed_dependencies()
+
+
+def test_config_module_defaults():
+    with __parse_resolvers('''<dependency_resolvers>
+  <modules prefetch="false" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+        module_resolver = dependency_resolvers[0]
+        assert module_resolver.module_checker.__class__.__name__ == "AvailModuleChecker"
+
+
+def test_config_modulepath():
+    # Test reads and splits MODULEPATH if modulepath is not specified.
+    with __parse_resolvers('''<dependency_resolvers>
+  <modules find_by="directory" modulepath="/opt/modules/modulefiles:/usr/local/modules/modulefiles" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+        assert dependency_resolvers[0].module_checker.directories == ["/opt/modules/modulefiles", "/usr/local/modules/modulefiles"]
+
+
+def test_config_MODULEPATH():
+    # Test reads and splits MODULEPATH if modulepath is not specified.
+    with __environ({"MODULEPATH": "/opt/modules/modulefiles:/usr/local/modules/modulefiles"}):
+        with __parse_resolvers('''<dependency_resolvers>
+  <modules find_by="directory" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+            assert dependency_resolvers[0].module_checker.directories == ["/opt/modules/modulefiles", "/usr/local/modules/modulefiles"]
+
+
+def test_config_MODULESHOME():
+    # Test fallbacks to read MODULESHOME if modulepath is not specified and
+    # neither is MODULEPATH.
+    with __environ({"MODULESHOME": "/opt/modules"}, remove="MODULEPATH"):
+        with __parse_resolvers('''<dependency_resolvers>
+  <modules find_by="directory" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+            assert dependency_resolvers[0].module_checker.directories == ["/opt/modules/modulefiles"]
+
+
+def test_config_module_directory_searcher():
+    with __parse_resolvers('''<dependency_resolvers>
+  <modules find_by="directory" modulepath="/opt/Modules/modulefiles" />
+</dependency_resolvers>
+''') as dependency_resolvers:
+        module_resolver = dependency_resolvers[0]
+        assert module_resolver.module_checker.directories == ["/opt/Modules/modulefiles"]
+
+
+ at contextmanager
+def __environ(values, remove=[]):
+    """
+    Modify the environment for a test, adding/updating values in dict `values` and
+    removing any environment variables mentioned in list `remove`.
+    """
+    new_keys = set(environ.keys()) - set(values.keys())
+    old_environ = environ.copy()
+    try:
+        environ.update(values)
+        for to_remove in remove:
+            try:
+                del environ[remove]
+            except KeyError:
+                pass
+        yield
+    finally:
+        environ.update(old_environ)
+        for key in new_keys:
+            del environ[key]
+
+
+ at contextmanager
+def __parse_resolvers(xml_content):
+    with __dependency_manager(xml_content) as dm:
+        yield dm.dependency_resolvers
+
+
+ at contextmanager
+def __dependency_manager(xml_content):
+    with __test_base_path() as base_path:
+        f = tempfile.NamedTemporaryFile()
+        f.write(xml_content)
+        f.flush()
+        dm = DependencyManager( default_base_path=base_path, conf_file=f.name )
+        yield dm
diff --git a/test/unit/tools/test_tool_external_files.py b/test/unit/tools/test_tool_external_files.py
new file mode 100644
index 0000000..b013f4e
--- /dev/null
+++ b/test/unit/tools/test_tool_external_files.py
@@ -0,0 +1,31 @@
+""" Unit test logic related to finding externally referenced files in tool
+descriptions.
+"""
+import os
+import shutil
+import tempfile
+
+from galaxy.tools import Tool
+
+
+def test_finds_external_code_file():
+    assert __external_files("""<tool><code file="foo.py" /></tool>""") == ["foo.py"]
+
+
+def test_finds_skips_empty_code_file_attribute():
+    assert __external_files("""<tool><code /></tool>""") == []
+
+
+def test_finds_external_macro_file():
+    assert __external_files("""<tool><macros><import>cool_macros.xml</import></macros></tool>""") == ["cool_macros.xml"]
+
+
+def __external_files(contents):
+    base_path = tempfile.mkdtemp()
+    try:
+        tool_path = os.path.join(base_path, "tool.xml")
+        with open(tool_path, "w") as f:
+            f.write(contents)
+        return Tool.get_externally_referenced_paths(tool_path)
+    finally:
+        shutil.rmtree(base_path)
diff --git a/test/unit/tools/test_tool_loader.py b/test/unit/tools/test_tool_loader.py
new file mode 100644
index 0000000..f7cbb30
--- /dev/null
+++ b/test/unit/tools/test_tool_loader.py
@@ -0,0 +1,272 @@
+import os
+
+from shutil import rmtree
+from tempfile import mkdtemp
+
+from galaxy.tools.loader import load_tool, template_macro_params
+from galaxy.util import parse_xml
+
+
+def test_loader():
+
+    class TestToolDirectory(object):
+        def __init__(self):
+            self.temp_directory = mkdtemp()
+
+        def __enter__(self):
+            return self
+
+        def __exit__(self, type, value, tb):
+            rmtree(self.temp_directory)
+
+        def write(self, contents, name="tool.xml"):
+            open(os.path.join(self.temp_directory, name), "w").write(contents)
+
+        def load(self, name="tool.xml", preprocess=True):
+            if preprocess:
+                loader = load_tool
+            else:
+                loader = parse_xml
+            return loader(os.path.join(self.temp_directory, name))
+
+    # Test simple macro replacement.
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs" />
+    <macros>
+        <macro name="inputs">
+            <inputs />
+        </macro>
+    </macros>
+</tool>''')
+        xml = tool_dir.load(preprocess=False)
+        assert xml.find("inputs") is None
+        xml = tool_dir.load(preprocess=True)
+        assert xml.find("inputs") is not None
+
+    # Test importing macros from external files
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs" />
+    <macros>
+        <import>external.xml</import>
+    </macros>
+</tool>''')
+
+        tool_dir.write('''
+<macros>
+    <macro name="inputs">
+        <inputs />
+    </macro>
+</macros>''', name="external.xml")
+        xml = tool_dir.load(preprocess=False)
+        assert xml.find("inputs") is None
+        xml = tool_dir.load(preprocess=True)
+        assert xml.find("inputs") is not None
+
+    # Test macros with unnamed yield statements.
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs">
+        <input name="first_input" />
+    </expand>
+    <macros>
+        <macro name="inputs">
+            <inputs>
+                <yield />
+            </inputs>
+        </macro>
+    </macros>
+</tool>''')
+        xml = tool_dir.load()
+        assert xml.find("inputs").find("input").get("name") == "first_input"
+
+    # Test recursive macro applications.
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs">
+        <input name="first_input" />
+        <expand macro="second" />
+    </expand>
+    <macros>
+        <macro name="inputs">
+            <inputs>
+                <yield />
+                <input name="third_input" />
+            </inputs>
+        </macro>
+        <macro name="second">
+            <input name="second_input" />
+        </macro>
+    </macros>
+</tool>''')
+        xml = tool_dir.load()
+        assert xml.find("inputs").findall("input")[0].get("name") == "first_input"
+        assert xml.find("inputs").findall("input")[1].get("name") == "second_input"
+        assert xml.find("inputs").findall("input")[2].get("name") == "third_input"
+
+    # Test recursive macro applications.
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs">
+        <input name="first_input" />
+        <expand macro="second" />
+    </expand>
+    <macros>
+        <macro name="inputs">
+            <inputs>
+                <yield />
+            </inputs>
+        </macro>
+        <macro name="second">
+            <expand macro="second_delegate" />
+            <input name="third_input" />
+        </macro>
+        <macro name="second_delegate">
+            <input name="second_input" />
+        </macro>
+    </macros>
+</tool>''')
+        xml = tool_dir.load()
+        assert xml.find("inputs").findall("input")[0].get("name") == "first_input"
+        assert xml.find("inputs").findall("input")[1].get("name") == "second_input"
+        assert xml.find("inputs").findall("input")[2].get("name") == "third_input"
+
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool id="issue_647">
+    <macros>
+        <macro name="a">
+            <param name="a1" type="text" value="a1" label="a1"/>
+            <yield />
+        </macro>
+    </macros>
+    <inputs>
+        <expand macro="a">
+            <param name="b" type="text" value="b" label="b" />
+        </expand>
+    </inputs>
+</tool>''')
+        xml = tool_dir.load()
+        assert xml.find("inputs").findall("param")[0].get("name") == "a1"
+        assert xml.find("inputs").findall("param")[1].get("name") == "b"
+
+    # Test <xml> is shortcut for macro type="xml"
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs" />
+    <macros>
+        <xml name="inputs">
+            <inputs />
+        </xml>
+    </macros>
+</tool>''')
+        xml = tool_dir.load()
+        assert xml.find("inputs") is not None
+
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <command interpreter="python">tool_wrapper.py
+    #include source=$tool_params
+    </command>
+    <macros>
+        <template name="tool_params">-a 1 -b 2</template>
+    </macros>
+</tool>
+''')
+        xml = tool_dir.load()
+        params_dict = template_macro_params(xml.getroot())
+        assert params_dict['tool_params'] == "-a 1 -b 2"
+
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <macros>
+        <token name="@CITATION@">The citation.</token>
+    </macros>
+    <help>@CITATION@</help>
+    <another>
+        <tag />
+    </another>
+</tool>
+''')
+        xml = tool_dir.load()
+        help_el = xml.find("help")
+        assert help_el.text == "The citation.", help_el.text
+
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <macros>
+        <token name="@TAG_VAL@">The value.</token>
+    </macros>
+    <another>
+        <tag value="@TAG_VAL@" />
+    </another>
+</tool>
+''')
+        xml = tool_dir.load()
+        tag_el = xml.find("another").find("tag")
+        value = tag_el.get('value')
+        assert value == "The value.", value
+
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <macros>
+        <token name="@TAG_VAL@"><![CDATA[]]></token>
+    </macros>
+    <another>
+        <tag value="@TAG_VAL@" />
+    </another>
+</tool>
+''')
+        xml = tool_dir.load()
+        tag_el = xml.find("another").find("tag")
+        value = tag_el.get('value')
+        assert value == "", value
+
+    # Test macros XML macros with $$ expansions in attributes
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs" bar="hello" />
+    <macros>
+        <xml name="inputs" tokens="bar" token_quote="$$">
+            <inputs type="the type is $$BAR$$" />
+        </xml>
+    </macros>
+</tool>
+''')
+        xml = tool_dir.load()
+        input_els = xml.findall("inputs")
+        assert len(input_els) == 1
+        assert input_els[0].attrib["type"] == "the type is hello"
+
+    # Test macros XML macros with @ expansions in text
+    with TestToolDirectory() as tool_dir:
+        tool_dir.write('''
+<tool>
+    <expand macro="inputs" foo="hello" />
+    <expand macro="inputs" foo="world" />
+    <expand macro="inputs" />
+    <macros>
+        <xml name="inputs" token_foo="the_default">
+            <inputs>@FOO@</inputs>
+        </xml>
+    </macros>
+</tool>
+''')
+        xml = tool_dir.load()
+        input_els = xml.findall("inputs")
+        assert len(input_els) == 3
+        assert input_els[0].text == "hello"
+        assert input_els[1].text == "world"
+        assert input_els[2].text == "the_default"
diff --git a/test/unit/tools/test_tool_panel.py b/test/unit/tools/test_tool_panel.py
new file mode 100644
index 0000000..4c1d3ae
--- /dev/null
+++ b/test/unit/tools/test_tool_panel.py
@@ -0,0 +1,27 @@
+from xml.etree import ElementTree as ET
+
+from galaxy.tools.toolbox import ToolSection
+
+
+def test_tool_section( ):
+    elem = ET.Element( 'section' )
+    elem.attrib[ 'name' ] = "Cool Tools"
+    elem.attrib[ 'id' ] = "cool1"
+
+    section = ToolSection( elem )
+    assert section.id == "cool1"
+    assert section.name == "Cool Tools"
+    assert section.version == ""
+
+    section = ToolSection( dict(
+        id="cool1",
+        name="Cool Tools"
+    ) )
+    assert section.id == "cool1"
+    assert section.name == "Cool Tools"
+    assert section.version == ""
+
+    section = ToolSection()
+    assert section.id == ""
+    assert section.name == ""
+    assert section.version == ""
diff --git a/test/unit/tools/test_toolbox.py b/test/unit/tools/test_toolbox.py
new file mode 100644
index 0000000..6e9a272
--- /dev/null
+++ b/test/unit/tools/test_toolbox.py
@@ -0,0 +1,448 @@
+import json
+import os
+import string
+import unittest
+
+import routes
+from six import string_types
+
+import tools_support
+from galaxy import model
+from galaxy.model import tool_shed_install
+from galaxy.model.tool_shed_install import mapping
+from galaxy.tools import ToolBox
+from galaxy.tools.toolbox.lineages.tool_shed import ToolVersionCache
+from galaxy.tools.toolbox.watcher import get_tool_conf_watcher
+
+from .test_toolbox_filters import mock_trans
+
+
+CONFIG_TEST_TOOL_VERSION_TEMPLATE = string.Template(
+    """    <tool file="tool.xml" guid="github.com/galaxyproject/example/test_tool/0.${version}">
+            <tool_shed>github.com</tool_shed>
+            <repository_name>example</repository_name>
+            <repository_owner>galaxyproject</repository_owner>
+            <installed_changeset_revision>${version}</installed_changeset_revision>
+            <id>github.com/galaxyproject/example/test_tool/0.${version}</id>
+            <version>0.${version}</version>
+        </tool>
+    """
+)
+CONFIG_TEST_TOOL_VERSION_1 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute( dict( version="1" ) )
+CONFIG_TEST_TOOL_VERSION_2 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute( dict( version="2" ) )
+
+
+class BaseToolBoxTestCase(  unittest.TestCase, tools_support.UsesApp, tools_support.UsesTools  ):
+
+    @property
+    def integerated_tool_panel_path( self ):
+        return os.path.join(self.test_directory, "integrated_tool_panel.xml")
+
+    def assert_integerated_tool_panel( self, exists=True ):
+        does_exist = os.path.exists( self.integerated_tool_panel_path )
+        if exists:
+            assert does_exist
+        else:
+            assert not does_exist
+
+    @property
+    def toolbox( self ):
+        if self.__toolbox is None:
+            self.__toolbox = SimplifiedToolBox( self )
+            # wire app with this new toolbox
+            self.app.toolbox = self.__toolbox
+        return self.__toolbox
+
+    def setUp( self ):
+        self.reindexed = False
+        self.setup_app( mock_model=False )
+        install_model = mapping.init( "sqlite:///:memory:", create_tables=True )
+        self.app.install_model = install_model
+        self.app.reindex_tool_search = self.__reindex
+        itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml")
+        self.app.config.integrated_tool_panel_config = itp_config
+        self.__toolbox = None
+        self.config_files = []
+
+    def _repo_install( self, changeset ):
+        repository = tool_shed_install.ToolShedRepository()
+        repository.tool_shed = "github.com"
+        repository.owner = "galaxyproject"
+        repository.name = "example"
+        repository.changeset_revision = changeset
+        repository.installed_changeset_revision = changeset
+        repository.deleted = False
+        repository.uninstalled = False
+        self.app.install_model.context.add( repository )
+        self.app.install_model.context.flush( )
+        self.app.tool_version_cache = ToolVersionCache(self.app)
+        return repository
+
+    def _setup_two_versions( self ):
+        repository1 = self._repo_install( changeset="1" )
+        version1 = tool_shed_install.ToolVersion()
+        version1.tool_id = "github.com/galaxyproject/example/test_tool/0.1"
+        version1.repository = repository1
+        self.app.install_model.context.add( version1 )
+        self.app.install_model.context.flush( )
+
+        repository2 = self._repo_install( changeset="2" )
+        version2 = tool_shed_install.ToolVersion()
+        version2.tool_id = "github.com/galaxyproject/example/test_tool/0.2"
+        version2.repository = repository2
+
+        self.app.install_model.context.add( version2 )
+        self.app.install_model.context.flush( )
+
+        version_association = tool_shed_install.ToolVersionAssociation()
+        version_association.parent_id = version1.id
+        version_association.tool_id = version2.id
+
+        self.app.install_model.context.add( version_association )
+        self.app.install_model.context.flush( )
+        self.app.tool_version_cache = ToolVersionCache(self.app)
+
+    def _setup_two_versions_in_config( self, section=False ):
+        if section:
+            template = """<toolbox tool_path="%s">
+<section id="tid" name="TID" version="">
+    %s
+</section>
+<section id="tid" name="TID" version="">
+    %s
+</section>
+</toolbox>"""
+        else:
+            template = """<toolbox tool_path="%s">
+<section id="tid" name="TID" version="">
+    %s
+</section>
+<section id="tid" name="TID" version="">
+    %s
+</section>
+</toolbox>"""
+        self._add_config( template % (self.test_directory, CONFIG_TEST_TOOL_VERSION_1, CONFIG_TEST_TOOL_VERSION_2 ) )
+
+    def _add_config( self, content, name="tool_conf.xml" ):
+        is_json = name.endswith(".json")
+        path = self._tool_conf_path( name=name )
+        with open( path, "w" ) as f:
+            if not is_json or isinstance(content, string_types):
+                f.write( content )
+            else:
+                json.dump(content, f)
+        self.config_files.append( path )
+
+    def _tool_conf_path( self, name="tool_conf.xml" ):
+        path = os.path.join( self.test_directory, name )
+        return path
+
+    def _tool_path( self, name="tool.xml" ):
+        path = os.path.join( self.test_directory, name )
+        return path
+
+    def __reindex( self ):
+        self.reindexed = True
+
+
+class ToolBoxTestCase( BaseToolBoxTestCase ):
+
+    def test_load_file( self ):
+        self._init_tool()
+        self._add_config( """<toolbox><tool file="tool.xml" /></toolbox>""" )
+
+        toolbox = self.toolbox
+        assert toolbox.get_tool( "test_tool" ) is not None
+        assert toolbox.get_tool( "not_a_test_tool" ) is None
+
+    def test_to_dict_in_panel( self ):
+        for json_conf in [True, False]:
+            self._init_tool_in_section(json=json_conf)
+            mapper = routes.Mapper()
+            mapper.connect( "tool_runner", "/test/tool_runner" )
+            as_dict = self.toolbox.to_dict( mock_trans() )
+            test_section = self._find_section(as_dict, "t")
+            assert len(test_section["elems"]) == 1
+            assert test_section["elems"][0]["id"] == "test_tool"
+
+    def test_to_dict_out_of_panel( self ):
+        for json_conf in [True, False]:
+            self._init_tool_in_section(json=json_conf)
+            mapper = routes.Mapper()
+            mapper.connect( "tool_runner", "/test/tool_runner" )
+            as_dict = self.toolbox.to_dict( mock_trans(), in_panel=False )
+            assert as_dict[0]["id"] == "test_tool"
+
+    def test_out_of_panel_filtering( self ):
+        self._init_tool_in_section()
+
+        mapper = routes.Mapper()
+        mapper.connect( "tool_runner", "/test/tool_runner" )
+        as_dict = self.toolbox.to_dict( mock_trans(), in_panel=False )
+        assert len(as_dict) == 1
+
+        def allow_user_access(user, attempting_access):
+            assert not attempting_access
+            return False
+
+        # Disable access to the tool, make sure it is filtered out.
+        self.toolbox.get_tool("test_tool").allow_user_access = allow_user_access
+        as_dict = self.toolbox.to_dict( mock_trans(), in_panel=False )
+        assert len(as_dict) == 0
+
+    def _find_section( self, as_dict, section_id ):
+        for elem in as_dict:
+            if elem.get("id") == section_id:
+                assert elem["model_class"] == "ToolSection"
+                return elem
+        assert False, "Failed to find section with id [%s]" % section_id
+
+    def test_tool_shed_properties( self ):
+        self._init_tool()
+        self._setup_two_versions_in_config( section=False )
+        self._setup_two_versions()
+
+        test_tool = self.toolbox.get_tool( "test_tool" )
+        assert test_tool.tool_shed == "github.com"
+        assert test_tool.repository_owner == "galaxyproject"
+        assert test_tool.repository_name == "example"
+        # TODO: Not deterministc, probably should be?
+        assert test_tool.installed_changeset_revision in ["1", "2"]
+
+    def test_tool_shed_properties_only_on_installed_tools( self ):
+        self._init_tool()
+        self._add_config( """<toolbox><tool file="tool.xml" /></toolbox>""" )
+        toolbox = self.toolbox
+        test_tool = toolbox.get_tool( "test_tool" )
+        assert test_tool.tool_shed is None
+        assert test_tool.repository_name is None
+        assert test_tool.repository_owner is None
+        assert test_tool.installed_changeset_revision is None
+
+    def test_load_file_in_section( self ):
+        self._init_tool_in_section()
+
+        toolbox = self.toolbox
+        assert toolbox.get_tool( "test_tool" ) is not None
+        assert toolbox.get_tool( "not_a_test_tool" ) is None
+
+    def test_writes_integrate_tool_panel( self ):
+        self._init_tool()
+        self._add_config( """<toolbox><tool file="tool.xml" /></toolbox>""" )
+
+        self.assert_integerated_tool_panel(exists=False)
+        self.toolbox
+        self.assert_integerated_tool_panel(exists=True)
+
+    def test_groups_tools_in_section( self ):
+        self._init_tool()
+        self._setup_two_versions_in_config( section=True )
+        self._setup_two_versions()
+        self.toolbox
+        self.__verify_two_test_tools( )
+
+        # Assert only newer version of the tool loaded into the panel.
+        section = self.toolbox._tool_panel["tid"]
+        assert len(section.elems) == 1
+        assert next(iter(section.elems.values())).id == "github.com/galaxyproject/example/test_tool/0.2"
+
+    def test_group_tools_out_of_section( self ):
+        self._init_tool()
+        self._setup_two_versions_in_config( section=False )
+        self._setup_two_versions()
+        self.__verify_two_test_tools( )
+
+        # Assert tools merged in tool panel.
+        assert len( self.toolbox._tool_panel ) == 1
+
+    def test_get_tool_id( self ):
+        self._init_tool()
+        self._setup_two_versions_in_config( )
+        self._setup_two_versions()
+        assert self.toolbox.get_tool_id( "test_tool" ) in [
+            "github.com/galaxyproject/example/test_tool/0.1",
+            "github.com/galaxyproject/example/test_tool/0.2"
+        ]
+        assert self.toolbox.get_tool_id( "github.com/galaxyproject/example/test_tool/0.1" ) == "github.com/galaxyproject/example/test_tool/0.1"
+        assert self.toolbox.get_tool_id( "github.com/galaxyproject/example/test_tool/0.2" ) == "github.com/galaxyproject/example/test_tool/0.2"
+        assert self.toolbox.get_tool_id( "github.com/galaxyproject/example/test_tool/0.3" ) is None
+
+    def test_tool_dir( self ):
+        self._init_tool()
+        self._add_config( """<toolbox><tool_dir dir="%s" /></toolbox>""" % self.test_directory )
+
+        toolbox = self.toolbox
+        assert toolbox.get_tool( "test_tool" ) is not None
+
+    def test_tool_dir_json( self ):
+        self._init_tool()
+        self._add_config({"items": [{"type": "tool_dir", "dir": self.test_directory}]}, name="tool_conf.json")
+
+        toolbox = self.toolbox
+        assert toolbox.get_tool( "test_tool" ) is not None
+
+    def test_workflow_in_panel( self ):
+        stored_workflow = self.__test_workflow()
+        encoded_id = self.app.security.encode_id( stored_workflow.id )
+        self._add_config( """<toolbox><workflow id="%s" /></toolbox>""" % encoded_id )
+        assert len( self.toolbox._tool_panel ) == 1
+        panel_workflow = next(iter(self.toolbox._tool_panel.values()))
+        assert panel_workflow == stored_workflow.latest_workflow
+        # TODO: test to_dict with workflows
+
+    def test_workflow_in_section( self ):
+        stored_workflow = self.__test_workflow()
+        encoded_id = self.app.security.encode_id( stored_workflow.id )
+        self._add_config( """<toolbox><section id="tid" name="TID"><workflow id="%s" /></section></toolbox>""" % encoded_id )
+        assert len( self.toolbox._tool_panel ) == 1
+        section = self.toolbox._tool_panel[ 'tid' ]
+        assert len( section.elems ) == 1
+        panel_workflow = next(iter(section.elems.values()))
+        assert panel_workflow == stored_workflow.latest_workflow
+
+    def test_label_in_panel( self ):
+        self._add_config( """<toolbox><label id="lab1" text="Label 1" /><label id="lab2" text="Label 2" /></toolbox>""" )
+        assert len( self.toolbox._tool_panel ) == 2
+        self.__check_test_labels( self.toolbox._tool_panel )
+
+    def test_label_in_section( self ):
+        self._add_config( """<toolbox><section id="tid" name="TID"><label id="lab1" text="Label 1" /><label id="lab2" text="Label 2" /></section></toolbox>""" )
+        assert len( self.toolbox._tool_panel ) == 1
+        section = self.toolbox._tool_panel[ 'tid' ]
+        self.__check_test_labels( section.elems )
+
+    def _init_tool_in_section( self, json=False ):
+        self._init_tool()
+        if not json:
+            self._add_config( """<toolbox><section id="t" name="test"><tool file="tool.xml" /></section></toolbox>""" )
+        else:
+            section = {
+                "type": "section",
+                "id": "t",
+                "name": "test",
+                "items": [{"type": "tool",
+                           "file": "tool.xml"}],
+            }
+            self._add_config({"items": [section]}, name="tool_conf.json")
+
+    def __check_test_labels( self, panel_dict ):
+        assert list(panel_dict.keys()) == ["label_lab1", "label_lab2"]
+        label1 = next(iter(panel_dict.values()))
+        assert label1.id == "lab1"
+        assert label1.text == "Label 1"
+
+        label2 = panel_dict[ "label_lab2" ]
+        assert label2.id == "lab2"
+        assert label2.text == "Label 2"
+
+    def __test_workflow( self ):
+        stored_workflow = model.StoredWorkflow()
+        workflow = model.Workflow()
+        workflow.stored_workflow = stored_workflow
+        stored_workflow.latest_workflow = workflow
+        user = model.User()
+        user.email = "test at example.com"
+        user.password = "passw0rD1"
+        stored_workflow.user = user
+        self.app.model.context.add( workflow )
+        self.app.model.context.add( stored_workflow )
+        self.app.model.context.flush()
+        return stored_workflow
+
+    def __verify_two_test_tools( self ):
+        # Assert tool versions of the tool with simple id 'test_tool'
+        all_versions = self.toolbox.get_tool( "test_tool", get_all_versions=True )
+        assert len( all_versions ) == 2
+
+        # Verify lineage_ids on both tools is correctly ordered.
+        for version in ["0.1", "0.2"]:
+            guid = "github.com/galaxyproject/example/test_tool/" + version
+            lineage_ids = self.toolbox.get_tool( guid ).lineage.get_version_ids()
+            assert lineage_ids[ 0 ] == "github.com/galaxyproject/example/test_tool/0.1"
+            assert lineage_ids[ 1 ] == "github.com/galaxyproject/example/test_tool/0.2"
+
+        # Test tool_version attribute.
+        assert self.toolbox.get_tool( "test_tool", tool_version="0.1" ).guid == "github.com/galaxyproject/example/test_tool/0.1"
+        assert self.toolbox.get_tool( "test_tool", tool_version="0.2" ).guid == "github.com/galaxyproject/example/test_tool/0.2"
+
+    def test_default_lineage( self ):
+        self.__init_versioned_tools()
+        self._add_config( """<toolbox><tool file="tool_v01.xml" /><tool file="tool_v02.xml" /></toolbox>""" )
+        self.__verify_get_tool_for_default_lineage()
+
+    def test_default_lineage_reversed( self ):
+        # Run same test as above but with entries in tool_conf reversed to
+        # ensure versioning is at work and not order effects.
+        self.__init_versioned_tools()
+        self._add_config( """<toolbox><tool file="tool_v02.xml" /><tool file="tool_v01.xml" /></toolbox>""" )
+        self.__verify_get_tool_for_default_lineage()
+
+    def test_grouping_with_default_lineage( self ):
+        self.__init_versioned_tools()
+        self._add_config( """<toolbox><tool file="tool_v01.xml" /><tool file="tool_v02.xml" /></toolbox>""" )
+        self.__verify_tool_panel_for_default_lineage()
+
+    def test_grouping_with_default_lineage_reversed( self ):
+        # Run same test as above but with entries in tool_conf reversed to
+        # ensure versioning is at work and not order effects.
+        self.__init_versioned_tools()
+        self._add_config( """<toolbox><tool file="tool_v02.xml" /><tool file="tool_v02.xml" /></toolbox>""" )
+        self.__verify_tool_panel_for_default_lineage()
+
+    def __init_versioned_tools( self ):
+        self._init_tool( filename="tool_v01.xml", version="0.1" )
+        self._init_tool( filename="tool_v02.xml", version="0.2" )
+
+    def __verify_tool_panel_for_default_lineage( self ):
+        assert len( self.toolbox._tool_panel ) == 1
+        tool = self.toolbox._tool_panel["tool_test_tool"]
+        assert tool.version == "0.2", tool.version
+        assert tool.id == "test_tool"
+
+    def __verify_get_tool_for_default_lineage( self ):
+        tool_v01 = self.toolbox.get_tool( "test_tool", tool_version="0.1" )
+        tool_v02 = self.toolbox.get_tool( "test_tool", tool_version="0.2" )
+        assert tool_v02.id == "test_tool"
+        assert tool_v02.version == "0.2", tool_v02.version
+        assert tool_v01.id == "test_tool"
+        assert tool_v01.version == "0.1"
+
+        # Newer variant gets to be default for that id.
+        default_tool = self.toolbox.get_tool( "test_tool" )
+        assert default_tool.id == "test_tool"
+        assert default_tool.version == "0.2"
+
+    def __remove_itp( self ):
+        os.remove( os.path)
+
+    def __setup_shed_tool_conf( self ):
+        self._add_config( """<toolbox tool_path="."></toolbox>""" )
+
+        self.toolbox  # create toolbox
+        assert not self.reindexed
+
+        os.remove( self.integerated_tool_panel_path )
+
+
+class SimplifiedToolBox( ToolBox ):
+
+    def __init__( self, test_case ):
+        app = test_case.app
+        # Handle app/config stuff needed by toolbox but not by tools.
+        app.job_config.get_tool_resource_parameters = lambda tool_id: None
+        app.config.update_integrated_tool_panel = True
+        config_files = test_case.config_files
+        tool_root_dir = test_case.test_directory
+        super( SimplifiedToolBox, self ).__init__(
+            config_files,
+            tool_root_dir,
+            app,
+        )
+        self._tool_conf_watcher = get_tool_conf_watcher(dummy_callback)
+
+    def handle_panel_update(self, section_dict):
+        self.create_section(section_dict)
+
+
+def dummy_callback():
+    pass
diff --git a/test/unit/tools/test_toolbox_filters.py b/test/unit/tools/test_toolbox_filters.py
new file mode 100644
index 0000000..ccfb92a
--- /dev/null
+++ b/test/unit/tools/test_toolbox_filters.py
@@ -0,0 +1,84 @@
+from galaxy.tools.toolbox.filters import FilterFactory
+
+from galaxy.util.bunch import Bunch
+
+
+def test_stock_filtering_requires_login_tools( ):
+    anonymous_user_trans = mock_trans( has_user=False )
+    filters = filter_factory( {} ).build_filters( mock_trans() )[ 'tool' ]
+    assert not is_filtered( filters, anonymous_user_trans, mock_tool( require_login=False ) )
+    assert is_filtered( filters, anonymous_user_trans, mock_tool( require_login=True ) )
+
+    logged_in_trans = mock_trans( has_user=True )
+    filters = filter_factory( {} ).build_filters( logged_in_trans )[ 'tool' ]
+    assert not is_filtered( filters, logged_in_trans, mock_tool( require_login=True ) )
+
+
+def test_stock_filtering_hidden_tools( ):
+    filters = filter_factory( {} ).build_filters( mock_trans() )[ 'tool' ]
+    assert not is_filtered( filters, mock_trans(), mock_tool( hidden=False ) )
+    assert is_filtered( filters, mock_trans(), mock_tool( hidden=True ) )
+
+
+def test_trackster_filtering( ):
+    filters = filter_factory( {} ).build_filters( mock_trans(), trackster=True )[ 'tool' ]
+    # Ekkk... is trackster_conf broken? Why is it showing up.
+    # assert is_filtered( filters, mock_trans(), mock_tool( trackster_conf=False ) )
+    assert not is_filtered( filters, mock_trans(), mock_tool( trackster_conf=True ) )
+
+
+def test_custom_filters():
+    filters = filter_factory().build_filters( mock_trans() )
+    tool_filters = filters[ "tool" ]
+    # TODO: the fact that -1 is the custom filter is an implementation
+    # detail that should not be tested here.
+    assert tool_filters[ -1 ].__doc__ == "Test Filter Tool"
+
+    section_filters = filters[ "section" ]
+    assert section_filters[ 0 ].__doc__ == "Test Filter Section"
+
+    label_filters = filters[ "label" ]
+    assert label_filters[ 0 ].__doc__ == "Test Filter Label 1"
+    assert label_filters[ 1 ].__doc__ == "Test Filter Label 2"
+
+
+def filter_factory(config_dict=None):
+    if config_dict is None:
+        config_dict = dict(
+            tool_filters=["filtermod:filter_tool"],
+            tool_section_filters=["filtermod:filter_section"],
+            tool_label_filters=["filtermod:filter_label_1", "filtermod:filter_label_2"],
+        )
+    config = Bunch( **config_dict )
+    config.toolbox_filter_base_modules = "galaxy.tools.filters,tools.filter_modules"
+    app = Bunch(config=config)
+    toolbox = Bunch(app=app)
+    return FilterFactory(toolbox)
+
+
+def is_filtered( filters, trans, tool ):
+    context = Bunch( trans=trans )
+    return not all( map( lambda filter: filter( context, tool ), filters ) )
+
+
+def mock_tool( require_login=False, hidden=False, trackster_conf=False, allow_access=True ):
+    def allow_user_access(user, attempting_access):
+        assert not attempting_access
+        return allow_access
+
+    tool = Bunch(
+        require_login=require_login,
+        hidden=hidden,
+        trackster_conf=trackster_conf,
+        allow_user_access=allow_user_access,
+    )
+    return tool
+
+
+def mock_trans( has_user=True, is_admin=False ):
+    trans = Bunch( user_is_admin=lambda: is_admin )
+    if has_user:
+        trans.user = Bunch(preferences={})
+    else:
+        trans.user = None
+    return trans
diff --git a/test/unit/tools/test_watcher.py b/test/unit/tools/test_watcher.py
new file mode 100644
index 0000000..7054856
--- /dev/null
+++ b/test/unit/tools/test_watcher.py
@@ -0,0 +1,88 @@
+import tempfile
+import time
+
+from contextlib import contextmanager
+from os import path
+from shutil import rmtree
+
+from galaxy.tools.toolbox import watcher
+from galaxy.util import bunch
+
+
+def test_watcher():
+    if not watcher.can_watch:
+        from nose.plugins.skip import SkipTest
+        raise SkipTest()
+
+    with __test_directory() as t:
+        tool_path = path.join(t, "test.xml")
+        toolbox = Toolbox()
+        open(tool_path, "w").write("a")
+        tool_watcher = watcher.get_tool_watcher(toolbox, bunch.Bunch(
+            watch_tools=True
+        ))
+        time.sleep(1)
+        tool_watcher.watch_file(tool_path, "cool_tool")
+        assert not toolbox.was_reloaded("cool_tool")
+        open(tool_path, "w").write("b")
+        wait_for_reload(lambda: toolbox.was_reloaded("cool_tool"))
+        tool_watcher.shutdown()
+        assert not tool_watcher.observer.is_alive()
+
+
+def test_tool_conf_watcher():
+    if not watcher.can_watch:
+        from nose.plugins.skip import SkipTest
+        raise SkipTest()
+
+    callback = CallbackRecorder()
+    conf_watcher = watcher.get_tool_conf_watcher(callback.call)
+
+    with __test_directory() as t:
+        tool_conf_path = path.join(t, "test_conf.xml")
+        conf_watcher.watch_file(tool_conf_path)
+        time.sleep(1)
+        open(tool_conf_path, "w").write("b")
+        wait_for_reload(lambda: callback.called)
+        conf_watcher.shutdown()
+        assert not conf_watcher.thread.is_alive()
+
+
+def wait_for_reload(check):
+    reloaded = False
+    for i in range(10):
+        reloaded = check()
+        if reloaded:
+            break
+        time.sleep(.2)
+    assert reloaded
+
+
+class Toolbox(object):
+
+    def __init__(self):
+        self.reloaded = {}
+
+    def reload_tool_by_id( self, tool_id ):
+        self.reloaded[ tool_id ] = True
+
+    def was_reloaded(self, tool_id):
+        return self.reloaded.get( tool_id, False )
+
+
+class CallbackRecorder(object):
+
+    def __init__(self):
+        self.called = False
+
+    def call(self):
+        self.called = True
+
+
+ at contextmanager
+def __test_directory():
+    base_path = tempfile.mkdtemp()
+    try:
+        yield base_path
+    finally:
+        rmtree(base_path)
diff --git a/test/unit/tools/test_wrappers.py b/test/unit/tools/test_wrappers.py
new file mode 100644
index 0000000..bc80eb6
--- /dev/null
+++ b/test/unit/tools/test_wrappers.py
@@ -0,0 +1,187 @@
+import os
+import tempfile
+from xml.etree.ElementTree import XML
+
+from galaxy.datatypes.metadata import MetadataSpecCollection
+from galaxy.jobs.datasets import DatasetPath
+from galaxy.tools.parameters.basic import (
+    DrillDownSelectToolParameter,
+    IntegerToolParameter,
+    SelectToolParameter
+)
+from galaxy.tools.wrappers import (
+    DatasetFilenameWrapper,
+    InputValueWrapper,
+    RawObjectWrapper,
+    SelectToolParameterWrapper
+)
+from galaxy.util.bunch import Bunch
+
+
+def with_mock_tool(func):
+    def call():
+        test_directory = tempfile.mkdtemp()
+        app = MockApp(test_directory)
+        tool = MockTool(app)
+        return func(tool)
+    call.__name__ = func.__name__
+    return call
+
+
+ at with_mock_tool
+def test_select_wrapper_simple_options(tool):
+    xml = XML('''<param name="blah" type="select">
+        <option value="x">I am X</option>
+        <option value="y" selected="true">I am Y</option>
+        <option value="z">I am Z</option>
+    </param>''')
+    parameter = SelectToolParameter( tool, xml )
+    wrapper = SelectToolParameterWrapper( parameter, "x", tool.app )
+    assert str(wrapper) == "x"
+    assert wrapper.name == "blah"
+    assert wrapper.value_label == "I am X"
+
+
+ at with_mock_tool
+def test_select_wrapper_with_drilldown(tool):
+    parameter = _drilldown_parameter(tool)
+    wrapper = SelectToolParameterWrapper( parameter, ["option3"], tool.app )
+    assert str(wrapper) == "option3", str(wrapper)
+
+
+ at with_mock_tool
+def test_select_wrapper_option_file(tool):
+    parameter = _setup_blast_tool(tool)
+    wrapper = SelectToolParameterWrapper( parameter, "val2", tool.app )
+    assert str(wrapper) == "val2"
+    assert wrapper.fields.name == "name2"
+    assert wrapper.fields.path == "path2"
+
+
+ at with_mock_tool
+def test_select_wrapper_multiple(tool):
+    parameter = _setup_blast_tool(tool, multiple=True)
+    wrapper = SelectToolParameterWrapper( parameter, ["val1", "val2"], tool.app )
+    assert str(wrapper) == "val1,val2"
+    assert wrapper.fields.name == "name1,name2"
+
+
+ at with_mock_tool
+def test_select_wrapper_with_path_rewritting(tool):
+    parameter = _setup_blast_tool(tool, multiple=True)
+    wrapper = SelectToolParameterWrapper( parameter, ["val1", "val2"], tool.app, other_values={}, path_rewriter=lambda v: "Rewrite<%s>" % v )
+    assert wrapper.fields.path == "Rewrite<path1>,Rewrite<path2>"
+
+
+def test_raw_object_wrapper():
+    obj = Bunch(x=4)
+    wrapper = RawObjectWrapper(obj)
+    assert wrapper.x == 4
+    assert wrapper
+
+    false_wrapper = RawObjectWrapper(False)
+    assert not false_wrapper
+
+
+ at with_mock_tool
+def test_input_value_wrapper(tool):
+    parameter = IntegerToolParameter( tool, XML( '<param name="blah" type="integer" size="4" value="10" min="0" />' ) )
+    wrapper = InputValueWrapper( parameter, "5" )
+    assert str( wrapper ) == "5"
+
+
+def test_dataset_wrapper():
+    dataset = MockDataset()
+    wrapper = DatasetFilenameWrapper(dataset)
+    assert str( wrapper ) == MOCK_DATASET_PATH
+    assert wrapper.file_name == MOCK_DATASET_PATH
+
+    assert wrapper.ext == MOCK_DATASET_EXT
+
+
+def test_dataset_wrapper_false_path():
+    dataset = MockDataset()
+    new_path = "/new/path/dataset_123.dat"
+    wrapper = DatasetFilenameWrapper(dataset, dataset_path=Bunch(false_path=new_path))
+    assert str( wrapper ) == new_path
+    assert wrapper.file_name == new_path
+
+
+def test_dataset_false_extra_files_path():
+    dataset = MockDataset()
+
+    wrapper = DatasetFilenameWrapper(dataset)
+    assert wrapper.extra_files_path == MOCK_DATASET_EXTRA_FILES_PATH
+
+    new_path = "/new/path/dataset_123.dat"
+    dataset_path = DatasetPath(123, MOCK_DATASET_PATH, false_path=new_path)
+    wrapper = DatasetFilenameWrapper(dataset, dataset_path=dataset_path)
+    # Setting false_path is not enough to override
+    assert wrapper.extra_files_path == MOCK_DATASET_EXTRA_FILES_PATH
+
+    new_files_path = "/new/path/dataset_123_files"
+    dataset_path = DatasetPath(123, MOCK_DATASET_PATH, false_path=new_path, false_extra_files_path=new_files_path)
+    wrapper = DatasetFilenameWrapper(dataset, dataset_path=dataset_path)
+    assert wrapper.extra_files_path == new_files_path
+
+
+def _drilldown_parameter(tool):
+    xml = XML( '''<param name="some_name" type="drill_down" display="checkbox" hierarchy="recurse" multiple="true">
+        <options>
+            <option name="Heading 1" value="heading1">
+                <option name="Option 1" value="option1"/>
+                <option name="Option 2" value="option2"/>
+                <option name="Heading 1" value="heading1">
+                    <option name="Option 3" value="option3"/>
+                    <option name="Option 4" value="option4"/>
+               </option>
+            </option>
+           <option name="Option 5" value="option5"/>
+      </options>
+    </param>''' )
+    parameter = DrillDownSelectToolParameter( tool, xml )
+    return parameter
+
+
+def _setup_blast_tool(tool, multiple=False):
+    tool.app.write_test_tool_data("blastdb.loc", "val1\tname1\tpath1\nval2\tname2\tpath2\n")
+    xml = XML( '''<param name="database" type="select" label="Nucleotide BLAST database" multiple="%s">
+        <options from_file="blastdb.loc">
+            <column name="value" index="0"/>
+            <column name="name" index="1"/>
+            <column name="path" index="2"/>
+        </options>
+    </param>''' % multiple )
+    parameter = SelectToolParameter( tool, xml )
+    return parameter
+
+
+MOCK_DATASET_PATH = "/galaxy/database/files/001/dataset_123.dat"
+MOCK_DATASET_EXTRA_FILES_PATH = "/galaxy/database/files/001/dataset_123.dat"
+MOCK_DATASET_EXT = "bam"
+
+
+class MockDataset(object):
+
+    def __init__(self):
+        self.metadata = MetadataSpecCollection({})
+        self.file_name = MOCK_DATASET_PATH
+        self.extra_files_path = MOCK_DATASET_EXTRA_FILES_PATH
+        self.ext = MOCK_DATASET_EXT
+
+
+class MockTool(object):
+
+    def __init__(self, app):
+        self.app = app
+        self.options = Bunch(sanitize=False)
+
+
+class MockApp(object):
+
+    def __init__(self, test_directory):
+        self.config = Bunch(tool_data_path=test_directory)
+
+    def write_test_tool_data(self, name, contents):
+        path = os.path.join(self.config.tool_data_path, name)
+        open(path, "w").write(contents)
diff --git a/test/unit/tools_support.py b/test/unit/tools_support.py
new file mode 100644
index 0000000..51f23cb
--- /dev/null
+++ b/test/unit/tools_support.py
@@ -0,0 +1,210 @@
+""" Module contains test fixtures meant to aide in the testing of jobs and
+tool evaluation. Such extensive "fixtures" are something of an anti-pattern
+so use of this should be limitted to tests of very 'extensive' classes.
+"""
+
+import os.path
+import shutil
+import string
+import tempfile
+from collections import defaultdict
+
+import galaxy.datatypes.registry
+import galaxy.model
+from galaxy.jobs import NoopQueue
+from galaxy.model import mapping
+from galaxy.tools import Tool
+from galaxy.tools.deps.containers import NullContainerFinder
+from galaxy.tools.parser import get_tool_source
+from galaxy.util.bunch import Bunch
+from galaxy.util.dbkeys import GenomeBuilds
+from galaxy.web.security import SecurityHelper
+
+datatypes_registry = galaxy.datatypes.registry.Registry()
+datatypes_registry.load_datatypes()
+galaxy.model.set_datatypes_registry(datatypes_registry)
+
+
+class UsesApp( object ):
+
+    def setup_app( self, mock_model=True ):
+        self.test_directory = tempfile.mkdtemp()
+        self.app = MockApp( self.test_directory, mock_model=mock_model )
+
+    def tear_down_app( self ):
+        shutil.rmtree( self.test_directory )
+
+
+# Simple tool with just one text parameter and output.
+SIMPLE_TOOL_CONTENTS = '''<tool id="test_tool" name="Test Tool" version="$version">
+    <command>echo "$param1" < $out1</command>
+    <inputs>
+        <param type="text" name="param1" value="" />
+    </inputs>
+    <outputs>
+        <data name="out1" format="data" label="Output ($param1)" />
+    </outputs>
+</tool>
+'''
+
+
+# A tool with data parameters (kind of like cat1) my favorite test tool :)
+SIMPLE_CAT_TOOL_CONTENTS = '''<tool id="test_tool" name="Test Tool" version="$version">
+    <command>cat "$param1" #for $r in $repeat# "$r.param2" #end for# < $out1</command>
+    <inputs>
+        <param type="data" format="tabular" name="param1" value="" />
+        <repeat name="repeat1" label="Repeat 1">
+            <param type="data" format="tabular" name="param2" value="" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out1" format="data" />
+    </outputs>
+</tool>
+'''
+
+
+class UsesTools( object ):
+
+    def _init_tool(
+        self,
+        tool_contents=SIMPLE_TOOL_CONTENTS,
+        filename="tool.xml",
+        version="1.0"
+    ):
+        self._init_app_for_tools()
+        self.tool_file = os.path.join( self.test_directory, filename )
+        contents_template = string.Template( tool_contents )
+        tool_contents = contents_template.safe_substitute( dict( version=version ) )
+        self.__write_tool( tool_contents )
+        return self.__setup_tool( )
+
+    def _init_app_for_tools( self ):
+        self.app.config.drmaa_external_runjob_script = ""
+        self.app.config.tool_secret = "testsecret"
+        self.app.config.track_jobs_in_database = False
+        self.app.job_config["get_job_tool_configurations"] = lambda ids: [Bunch(handler=Bunch())]
+
+    def __setup_tool( self ):
+        tool_source = get_tool_source( self.tool_file )
+        self.tool = Tool( self.tool_file, tool_source, self.app )
+        if getattr( self, "tool_action", None ):
+            self.tool.tool_action = self.tool_action
+        return self.tool
+
+    def __write_tool( self, contents ):
+        open( self.tool_file, "w" ).write( contents )
+
+
+class MockApp( object ):
+
+    def __init__( self, test_directory, mock_model=True ):
+        # The following line is needed in order to create
+        # HistoryDatasetAssociations - ideally the model classes would be
+        # usable without the ORM infrastructure in place.
+        in_memomry_model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True )
+
+        self.datatypes_registry = Bunch(
+            integrated_datatypes_configs='/galaxy/integrated_datatypes_configs.xml',
+            get_datatype_by_extension=lambda ext: Bunch(),
+        )
+
+        self.config = Bunch(
+            outputs_to_working_directory=False,
+            commands_in_new_shell=True,
+            new_file_path=os.path.join(test_directory, "new_files"),
+            tool_data_path=os.path.join(test_directory, "tools"),
+            root=os.path.join(test_directory, "galaxy"),
+            admin_users="mary at example.com",
+            len_file_path=os.path.join( 'tool-data', 'shared', 'ucsc', 'chrom' ),
+            builds_file_path=os.path.join( 'tool-data', 'shared', 'ucsc', 'builds.txt.sample' ),
+            migrated_tools_config=os.path.join(test_directory, "migrated_tools_conf.xml"),
+            server_name="test_server",
+        )
+
+        # Setup some attributes for downstream extension by specific tests.
+        self.job_config = Bunch(
+            dynamic_params=None,
+        )
+
+        # Two ways to handle model layer, one is to stub out some objects that
+        # have an interface similar to real model (mock_model) and can keep
+        # track of 'persisted' objects in a map. The other is to use a real
+        # sqlalchemy layer but target an in memory database. Depending on what
+        # is being tested.
+        if mock_model:
+            # Create self.model to mimic app.model.
+            self.model = Bunch( context=MockContext() )
+            for module_member_name in dir( galaxy.model ):
+                module_member = getattr(galaxy.model, module_member_name)
+                if type( module_member ) == type:
+                    self.model[ module_member_name ] = module_member
+        else:
+            self.model = in_memomry_model
+        self.genome_builds = GenomeBuilds( self )
+        self.toolbox = None
+        self.object_store = None
+        self.security = SecurityHelper(id_secret="testing")
+        from galaxy.security import GalaxyRBACAgent
+        self.job_queue = NoopQueue()
+        self.security_agent = GalaxyRBACAgent( self.model )
+        self.tool_data_tables = {}
+        self.dataset_collections_service = None
+        self.container_finder = NullContainerFinder()
+        self.name = "galaxy"
+        self._toolbox_lock = MockLock()
+        self.tool_version_cache = Bunch(app=self,
+                                        tool_version_by_id={},
+                                        tool_version_by_tool_id={},
+                                        tool_id_to_parent_id={},
+                                        parent_id_to_tool_id={})
+
+    def wait_for_toolbox_reload(self, toolbox):
+        # TODO: If the tpm test case passes, does the operation really
+        # need to wait.
+        return True
+
+
+class MockLock( object ):
+    def __enter__(self):
+        pass
+
+    def __exit__(self, type, value, traceback):
+        pass
+
+
+class MockContext(object):
+
+    def __init__(self, model_objects=None):
+        self.expunged_all = False
+        self.flushed = False
+        self.model_objects = model_objects or defaultdict( lambda: {} )
+        self.created_objects = []
+        self.current = self
+
+    def expunge_all(self):
+        self.expunged_all = True
+
+    def query(self, clazz):
+        return MockQuery(self.model_objects.get(clazz))
+
+    def flush(self):
+        self.flushed = True
+
+    def add(self, object):
+        self.created_objects.append(object)
+
+
+class MockQuery(object):
+
+    def __init__(self, class_objects):
+        self.class_objects = class_objects
+
+    def filter_by(self, **kwds):
+        return Bunch(first=lambda: None)
+
+    def get(self, id):
+        return self.class_objects.get(id, None)
+
+
+__all__ = ( 'UsesApp', )
diff --git a/test/unit/unittest_utils/__init__.py b/test/unit/unittest_utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/unittest_utils/galaxy_mock.py b/test/unit/unittest_utils/galaxy_mock.py
new file mode 100644
index 0000000..93c2aff
--- /dev/null
+++ b/test/unit/unittest_utils/galaxy_mock.py
@@ -0,0 +1,197 @@
+"""
+Mock infrastructure for testing ModelManagers.
+"""
+import os
+import shutil
+import tempfile
+
+from galaxy import (
+    model,
+    objectstore,
+    quota
+)
+from galaxy.datatypes import registry
+from galaxy.managers import tags
+from galaxy.model import mapping
+from galaxy.util.bunch import Bunch
+from galaxy.web import security
+
+
+# =============================================================================
+class OpenObject( object ):
+    pass
+
+
+def buildMockEnviron( **kwargs ):
+    environ = {
+        'CONTENT_LENGTH': '0',
+        'CONTENT_TYPE': '',
+        'HTTP_ACCEPT': '*/*',
+        'HTTP_ACCEPT_ENCODING': 'gzip, deflate',
+        'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8,zh;q=0.5,ja;q=0.3',
+        'HTTP_CACHE_CONTROL': 'no-cache',
+        'HTTP_CONNECTION': 'keep-alive',
+        'HTTP_DNT': '1',
+        'HTTP_HOST': 'localhost:8000',
+        'HTTP_ORIGIN': 'http://localhost:8000',
+        'HTTP_PRAGMA': 'no-cache',
+        'HTTP_REFERER': 'http://localhost:8000',
+        'HTTP_USER_AGENT': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:43.0) Gecko/20100101 Firefox/43.0',
+        'PATH_INFO': '/',
+        'QUERY_STRING': '',
+        'REMOTE_ADDR': '127.0.0.1',
+        'REQUEST_METHOD': 'GET',
+        'SCRIPT_NAME': '',
+        'SERVER_NAME': '127.0.0.1',
+        'SERVER_PORT': '8080',
+        'SERVER_PROTOCOL': 'HTTP/1.1'
+    }
+    environ.update( **kwargs )
+    return environ
+
+
+class MockApp( object ):
+
+    def __init__( self, config=None, **kwargs ):
+        self.config = config or MockAppConfig( **kwargs )
+        self.security = self.config.security
+        self.name = kwargs.get( 'name', 'galaxy' )
+        self.object_store = objectstore.build_object_store_from_config( self.config )
+        self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store )
+        self.security_agent = self.model.security_agent
+        self.visualizations_registry = MockVisualizationsRegistry()
+        self.tag_handler = tags.GalaxyTagManager( self )
+        self.quota_agent = quota.QuotaAgent( self.model )
+        self.init_datatypes()
+
+    def init_datatypes( self ):
+        datatypes_registry = registry.Registry()
+        datatypes_registry.load_datatypes()
+        model.set_datatypes_registry( datatypes_registry )
+
+
+class MockAppConfig( Bunch ):
+
+    def __init__( self, root=None, **kwargs ):
+        Bunch.__init__( self, **kwargs )
+        self.security = security.SecurityHelper( id_secret='bler' )
+        self.use_remote_user = kwargs.get( 'use_remote_user', False )
+        self.file_path = '/tmp'
+        self.jobs_directory = '/tmp'
+        self.new_file_path = '/tmp'
+
+        self.object_store_config_file = ''
+        self.object_store = 'disk'
+        self.object_store_check_old_style = False
+
+        self.user_activation_on = False
+        self.new_user_dataset_access_role_default_private = False
+
+        self.expose_dataset_path = True
+        self.allow_user_dataset_purge = True
+        self.enable_old_display_applications = True
+
+        self.umask = 0o77
+
+        # set by MockDir
+        self.root = root
+
+
+class MockWebapp( object ):
+
+    def __init__( self, **kwargs ):
+        self.name = kwargs.get( 'name', 'galaxy' )
+        self.security = security.SecurityHelper( id_secret='bler' )
+
+
+class MockTrans( object ):
+
+    def __init__( self, app=None, user=None, history=None, **kwargs ):
+        self.app = app or MockApp( **kwargs )
+        self.model = self.app.model
+        self.webapp = MockWebapp( **kwargs )
+        self.sa_session = self.app.model.session
+        self.workflow_building_mode = False
+
+        self.galaxy_session = None
+        self.__user = user
+        self.security = self.app.security
+        self.history = history
+
+        self.request = Bunch( headers={} )
+        self.response = Bunch( headers={} )
+
+    def get_user( self ):
+        if self.galaxy_session:
+            return self.galaxy_session.user
+        else:
+            return self.__user
+
+    def set_user( self, user ):
+        """Set the current user."""
+        if self.galaxy_session:
+            self.galaxy_session.user = user
+            self.sa_session.add( self.galaxy_session )
+            self.sa_session.flush()
+        self.__user = user
+
+    user = property( get_user, set_user )
+
+    def get_history( self, **kwargs ):
+        return self.history
+
+    def set_history( self, history ):
+        self.history = history
+
+    def fill_template( self, filename, template_lookup=None, **kwargs ):
+        template = template_lookup.get_template( filename )
+        template.output_encoding = 'utf-8'
+        kwargs.update( h=MockTemplateHelpers() )
+        return template.render( **kwargs )
+
+
+class MockVisualizationsRegistry( object ):
+
+    def get_visualizations( self, trans, target ):
+        return []
+
+
+class MockDir( object ):
+
+    def __init__( self, structure_dict, where=None ):
+        self.structure_dict = structure_dict
+        self.create_root( structure_dict, where )
+
+    def create_root( self, structure_dict, where=None ):
+        self.root_path = tempfile.mkdtemp( dir=where )
+        # print 'created root:', self.root_path
+        self.create_structure( self.root_path, structure_dict )
+
+    def create_structure( self, current_path, structure_dict ):
+        for k, v in structure_dict.items():
+            # if value is string, create a file in the current path and write v as file contents
+            if isinstance( v, str ):
+                self.create_file( os.path.join( current_path, k ), v )
+            # if it's a dict, create a dir here named k and recurse into it
+            if isinstance( v, dict ):
+                subdir_path = os.path.join( current_path, k )
+                # print 'subdir:', subdir_path
+                os.mkdir( subdir_path )
+                self.create_structure( subdir_path, v )
+
+    def create_file( self, path, contents ):
+        # print 'file:', path
+        with open( path, 'w' ) as newfile:
+            newfile.write( contents )
+
+    def remove( self ):
+        # print 'removing:', self.root_path
+        shutil.rmtree( self.root_path )
+
+
+class MockTemplateHelpers( object ):
+    def js( *js_files ):
+        pass
+
+    def css( *css_files ):
+        pass
diff --git a/test/unit/unittest_utils/tempfilecache.py b/test/unit/unittest_utils/tempfilecache.py
new file mode 100644
index 0000000..660deca
--- /dev/null
+++ b/test/unit/unittest_utils/tempfilecache.py
@@ -0,0 +1,48 @@
+import logging
+import os
+import tempfile
+
+logging.getLogger( __name__ )
+log = logging
+
+
+class TempFileCache( object ):
+    """
+    Creates and caches tempfiles with/based-on the given contents.
+    """
+
+    def __init__( self, logger=None ):
+        if logger:
+            global log
+            log = logger
+        super( TempFileCache, self ).__init__()
+        self.clear()
+
+    def clear( self ):
+        self.delete_tmpfiles()
+        self._content_dict = {}
+
+    def create_tmpfile( self, contents ):
+        if not hasattr( self, '_content_dict' ):
+            self.set_up_tmpfiles()
+
+        if contents not in self._content_dict:
+            # create a named tmp and write contents to it, return filename
+            tmpfile = tempfile.NamedTemporaryFile( delete=False )
+            tmpfile.write( contents )
+            tmpfile.close()
+            log.debug( 'created tmpfile.name: %s', tmpfile.name )
+            self._content_dict[ contents ] = tmpfile.name
+
+        else:
+            log.debug( '(cached): %s', self._content_dict[ contents ] )
+        return self._content_dict[ contents ]
+
+    def delete_tmpfiles( self ):
+        if not hasattr( self, '_content_dict' ) or not self._content_dict:
+            return
+        for tmpfile_contents in self._content_dict:
+            tmpfile = self._content_dict[ tmpfile_contents ]
+            if os.path.exists( tmpfile ):
+                log.debug( 'unlinking tmpfile: %s', tmpfile )
+                os.unlink( tmpfile )
diff --git a/test/unit/unittest_utils/utility.py b/test/unit/unittest_utils/utility.py
new file mode 100644
index 0000000..68a37f7
--- /dev/null
+++ b/test/unit/unittest_utils/utility.py
@@ -0,0 +1,23 @@
+"""
+Unit test utilities.
+"""
+import textwrap
+
+
+def clean_multiline_string( multiline_string, sep='\n' ):
+    """
+    Dedent, split, remove first and last empty lines, rejoin.
+    """
+    multiline_string = textwrap.dedent( multiline_string )
+    string_list = multiline_string.split( sep )
+    if not string_list[0]:
+        string_list = string_list[1:]
+    if not string_list[-1]:
+        string_list = string_list[:-1]
+    # return '\n'.join( docstrings )
+    return ''.join([ ( s + '\n' ) for s in string_list ])
+
+
+__all__ = (
+    "clean_multiline_string",
+)
diff --git a/test/unit/visualizations/__init__.py b/test/unit/visualizations/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/visualizations/plugins/__init__.py b/test/unit/visualizations/plugins/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/visualizations/plugins/test_VisualizationPlugin.py b/test/unit/visualizations/plugins/test_VisualizationPlugin.py
new file mode 100644
index 0000000..f52c7c2
--- /dev/null
+++ b/test/unit/visualizations/plugins/test_VisualizationPlugin.py
@@ -0,0 +1,202 @@
+"""
+Test lib/galaxy/visualization/plugins/plugin.
+"""
+import os
+import sys
+import unittest
+
+from six import string_types
+
+from galaxy.visualization.plugins import (
+    plugin as vis_plugin,
+    resource_parser,
+    utils as vis_utils
+)
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock, utility
+
+
+# -----------------------------------------------------------------------------
+class VisualizationsPlugin_TestCase( unittest.TestCase ):
+    plugin_class = vis_plugin.VisualizationPlugin
+
+    def test_default_init( self ):
+        """
+        A plugin with no context passed in should have sane defaults.
+        """
+        vis_dir = galaxy_mock.MockDir({
+            'config': {
+                'vis1.xml': ''
+            },
+            'static': {},
+            'templates': {},
+        })
+        config = dict()
+        plugin = self.plugin_class( galaxy_mock.MockApp(), vis_dir.root_path,
+            'myvis', config )
+        self.assertEqual( plugin.name, 'myvis' )
+        self.assertEqual( plugin.path, vis_dir.root_path )
+        self.assertEqual( plugin.config, {} )
+        self.assertEqual( plugin.base_url, 'myvis' )
+        # static
+        self.assertTrue( plugin.serves_static )
+        self.assertEqual( plugin.static_path, vis_dir.root_path + '/static' )
+        self.assertEqual( plugin.static_url, 'myvis/static' )
+        # template
+        self.assertTrue( plugin.serves_templates )
+        self.assertEqual( plugin.template_path, vis_dir.root_path + '/templates' )
+        self.assertEqual( plugin.template_lookup.__class__.__name__, 'TemplateLookup' )
+        # resource parser
+        self.assertIsInstance( plugin.resource_parser, resource_parser.ResourceParser )
+
+    def test_init_with_context( self ):
+        """
+        A plugin with context passed in should use those in it's set up.
+        """
+        vis_dir = galaxy_mock.MockDir({
+            'config': {
+                'vis1.xml': ''
+            },
+            'static': {},
+            'templates': {},
+        })
+        config = dict()
+        context = dict(
+            base_url='u/wot/m8',
+            template_cache_dir='template_cache',
+            additional_template_paths=[ 'one' ]
+        )
+        plugin = self.plugin_class( galaxy_mock.MockApp(), vis_dir.root_path,
+            'myvis', config, context=context )
+        self.assertEqual( plugin.base_url, 'u/wot/m8/myvis' )
+        # static
+        self.assertEqual( plugin.static_url, 'u/wot/m8/myvis/static' )
+        # template
+        self.assertEqual( plugin.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+    def test_init_without_static_or_templates( self ):
+        """
+        A plugin that has neither template or static directory should serve neither.
+        """
+        vis_dir = galaxy_mock.MockDir({
+            'config': {
+                'vis1.xml': ''
+            }
+        })
+        plugin = self.plugin_class( galaxy_mock.MockApp(), vis_dir.root_path,
+            'myvis', dict() )
+        self.assertFalse( plugin.serves_static )
+        self.assertFalse( plugin.serves_templates )
+        # not sure what this would do, but...
+
+    def test_build_render_vars_default( self ):
+        """
+        Render vars passed to render should default properly.
+        """
+        # well, that's kind of a lot of typing to say nothing new
+        config = dict(
+            name='Cat Fancy Magazine\'s Genomic Visualization'
+        )
+        plugin = self.plugin_class( galaxy_mock.MockApp(), '', 'myvis', config )
+
+        render_vars = plugin._build_render_vars( config )
+        self.assertEqual( render_vars[ 'visualization_name' ], plugin.name )
+        self.assertEqual( render_vars[ 'visualization_display_name' ], plugin.config[ 'name' ] )
+        self.assertEqual( render_vars[ 'title' ], None )
+        self.assertEqual( render_vars[ 'saved_visualization' ], None )
+        self.assertEqual( render_vars[ 'visualization_id' ], None )
+        self.assertEqual( render_vars[ 'query' ], {} )
+        self.assertIsInstance( render_vars[ 'config' ], vis_utils.OpenObject )
+        self.assertEqual( render_vars[ 'config' ].__dict__, {} )
+
+    def test_build_config( self ):
+        """
+        """
+        plugin_config = dict()
+        plugin = self.plugin_class( galaxy_mock.MockApp(), '', 'myvis', plugin_config )
+        config = plugin._build_config( {} )
+        self.assertIsInstance( config, vis_utils.OpenObject )
+        self.assertEqual( config.__dict__, {} )
+
+        # existing should flow through
+        plugin_config = dict()
+        plugin = self.plugin_class( galaxy_mock.MockApp(), '', 'myvis', plugin_config )
+        existing_config = dict( wat=1 )
+        config = plugin._build_config( existing_config )
+        self.assertEqual( config.wat, 1 )
+
+        # unlisted/non-param kwargs should NOT overwrite existing
+        plugin_config = dict()
+        plugin = self.plugin_class( galaxy_mock.MockApp(), '', 'myvis', plugin_config )
+        existing_config = dict( wat=1 )
+        config = plugin._build_config( existing_config, wat=2 )
+        self.assertEqual( config.wat, 1 )
+
+        # listed/param kwargs *should* overwrite existing
+        plugin_config = dict(
+            params=dict(
+                wat={
+                    'csv': False,
+                    'required': False,
+                    'type': 'int',
+                    'var_name_in_template': 'wot'
+                },
+            )
+        )
+        plugin = self.plugin_class( galaxy_mock.MockApp(), '', 'myvis', plugin_config )
+        existing_config = dict( wat=1 )
+        # send as string like a query would - should be parsed
+        config = plugin._build_config( existing_config, wat='2' )
+        self.assertEqual( config.wat, 2 )
+
+    def test_render( self ):
+        """
+        """
+        # use the python in a template to test for variables that should be there
+        # TODO: gotta be a better way
+        testing_template = utility.clean_multiline_string( """\
+        <%
+            found_all = True
+            should_have = [
+                title, visualization_name, visualization_display_name,
+                visualization_id, saved_visualization,
+                query, config,
+                embedded,
+                vars
+            ]
+            for var in should_have:
+                try:
+                    var = str( var )
+                except NameError as name_err:
+                    found_all = False
+                    break
+        %>
+        ${ found_all }
+        """ )
+
+        mock_app_dir = galaxy_mock.MockDir({
+            'cache': {},
+            'template.mako': testing_template
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin = self.plugin_class( mock_app, '', 'myvis', {
+            "name": "Vlad News Bears"
+        })
+
+        # somewhat easier to set this up by hand
+        plugin.config[ 'entry_point' ] = { 'file': 'template.mako' }
+        plugin.template_path = mock_app_dir.root_path
+        plugin.template_lookup = plugin._build_template_lookup( mock_app_dir.root_path )
+
+        response = plugin.render( trans=galaxy_mock.MockTrans( app=mock_app ) )
+        self.assertIsInstance( response, string_types )
+        self.assertEqual( response.strip(), "True" )
+
+
+# -----------------------------------------------------------------------------
+# TODO: config parser tests (in separate file)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/visualizations/plugins/test_VisualizationsRegistry.py b/test/unit/visualizations/plugins/test_VisualizationsRegistry.py
new file mode 100644
index 0000000..4c7ebf7
--- /dev/null
+++ b/test/unit/visualizations/plugins/test_VisualizationsRegistry.py
@@ -0,0 +1,277 @@
+"""
+Test lib/galaxy/visualization/plugins/registry.
+"""
+import os
+import re
+import sys
+import unittest
+
+from six import string_types
+
+from galaxy import model
+from galaxy.visualization.plugins import plugin
+from galaxy.visualization.plugins.registry import VisualizationsRegistry
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock, utility
+
+# -----------------------------------------------------------------------------
+glx_dir = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir, os.pardir, os.pardir ) )
+template_cache_dir = os.path.join( glx_dir, 'database', 'compiled_templates' )
+addtional_templates_dir = os.path.join( glx_dir, 'config', 'plugins', 'visualizations', 'common', 'templates' )
+vis_reg_path = 'config/plugins/visualizations'
+
+config1 = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE visualization SYSTEM "../../visualization.dtd">
+<visualization name="scatterplot">
+    <data_sources>
+        <data_source>
+            <model_class>HistoryDatasetAssociation</model_class>
+            <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+            <to_param param_attr="id">dataset_id</to_param>
+        </data_source>
+    </data_sources>
+    <params>
+        <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+    </params>
+    <template>scatterplot.mako</template>
+</visualization>
+"""
+
+
+# -----------------------------------------------------------------------------
+class VisualizationsRegistry_TestCase( unittest.TestCase ):
+
+    def test_plugin_load_from_repo( self ):
+        """should attempt load if criteria met"""
+        mock_app = galaxy_mock.MockApp( root=glx_dir )
+        plugin_mgr = VisualizationsRegistry( mock_app,
+            directories_setting=vis_reg_path,
+            template_cache_dir=None )
+
+        expected_plugins_path = os.path.join( glx_dir, vis_reg_path )
+        self.assertEqual( plugin_mgr.base_url, 'visualizations' )
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+
+        scatterplot = plugin_mgr.plugins[ 'scatterplot' ]
+        self.assertEqual( scatterplot.name, 'scatterplot' )
+        self.assertEqual( scatterplot.path, os.path.join( expected_plugins_path, 'scatterplot' ) )
+        self.assertEqual( scatterplot.base_url, '/'.join([ plugin_mgr.base_url, scatterplot.name ]) )
+        self.assertTrue(  scatterplot.serves_static )
+        self.assertEqual( scatterplot.static_path, os.path.join( scatterplot.path, 'static' ) )
+        self.assertEqual( scatterplot.static_url, '/'.join([ scatterplot.base_url, 'static' ]) )
+        self.assertTrue(  scatterplot.serves_templates )
+        self.assertEqual( scatterplot.template_path, os.path.join( scatterplot.path, 'templates' ) )
+        self.assertEqual( scatterplot.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+        trackster = plugin_mgr.plugins[ 'trackster' ]
+        self.assertEqual( trackster.name, 'trackster' )
+        self.assertEqual( trackster.path, os.path.join( expected_plugins_path, 'trackster' ) )
+        self.assertEqual( trackster.base_url, '/'.join([ plugin_mgr.base_url, trackster.name ]) )
+        self.assertFalse( trackster.serves_static )
+        self.assertFalse( trackster.serves_templates )
+
+    def test_plugin_load( self ):
+        """"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'vis1': {
+                    'config': {
+                        'vis1.xml': config1
+                    },
+                    'static': {},
+                    'templates': {},
+                },
+                'vis2': {
+                    'config': {
+                        'vis2.xml': config1
+                    }
+                },
+                'not_a_vis1': {
+                    'config': {
+                        'vis1.xml': 'blerbler'
+                    },
+                },
+                # empty
+                'not_a_vis2': {},
+                'not_a_vis3': 'blerbler',
+                # bad config
+                'not_a_vis4': {
+                    'config': {
+                        'not_a_vis4.xml': 'blerbler'
+                    }
+                },
+                'not_a_vis5': {
+                    # no config
+                    'static': {},
+                    'templates': {},
+                },
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = VisualizationsRegistry( mock_app,
+            directories_setting='plugins',
+            template_cache_dir=template_cache_dir )
+
+        expected_plugins_path = os.path.join( mock_app_dir.root_path, 'plugins' )
+        expected_plugin_names = [ 'vis1', 'vis2' ]
+
+        self.assertEqual( plugin_mgr.base_url, 'visualizations' )
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), expected_plugin_names )
+
+        vis1 = plugin_mgr.plugins[ 'vis1' ]
+        self.assertEqual( vis1.name, 'vis1' )
+        self.assertEqual( vis1.path, os.path.join( expected_plugins_path, 'vis1' ) )
+        self.assertEqual( vis1.base_url, '/'.join([ plugin_mgr.base_url, vis1.name ]) )
+        self.assertTrue(  vis1.serves_static )
+        self.assertEqual( vis1.static_path, os.path.join( vis1.path, 'static' ) )
+        self.assertEqual( vis1.static_url, '/'.join([ vis1.base_url, 'static' ]) )
+        self.assertTrue(  vis1.serves_templates )
+        self.assertEqual( vis1.template_path, os.path.join( vis1.path, 'templates' ) )
+        self.assertEqual( vis1.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+        vis2 = plugin_mgr.plugins[ 'vis2' ]
+        self.assertEqual( vis2.name, 'vis2' )
+        self.assertEqual( vis2.path, os.path.join( expected_plugins_path, 'vis2' ) )
+        self.assertEqual( vis2.base_url, '/'.join([ plugin_mgr.base_url, vis2.name ]) )
+        self.assertFalse( vis2.serves_static )
+        self.assertFalse( vis2.serves_templates )
+
+        mock_app_dir.remove()
+        template_cache_dir
+
+    def test_interactive_environ_plugin_load( self ):
+        """
+        """
+        ipython_config = utility.clean_multiline_string( """\
+        <?xml version="1.0" encoding="UTF-8"?>
+        <!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd">
+        <interactive_environment name="IPython">
+            <data_sources>
+                <data_source>
+                    <model_class>HistoryDatasetAssociation</model_class>
+                    <test type="isinstance" test_attr="datatype" result_type="datatype">tabular.Tabular</test>
+                    <test type="isinstance" test_attr="datatype" result_type="datatype">data.Text</test>
+                    <to_param param_attr="id">dataset_id</to_param>
+                </data_source>
+            </data_sources>
+            <params>
+                <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param>
+            </params>
+            <template>ipython.mako</template>
+        </interactive_environment>
+        """ )
+
+        mock_app_dir = {
+            'plugins': {
+                'ipython': {
+                    'config': {
+                        'ipython.xml': ipython_config
+                    },
+                    'templates': {}
+                },
+            },
+        }
+
+        # going to use a fake template here to simplify testing
+        ipython_template = "${ ie_request }-${ get_api_key() }"
+        mock_app_dir[ 'plugins' ][ 'ipython' ][ 'templates' ][ 'ipython.mako' ] = ipython_template
+        # so that we don't create a cached version of that fake template in the real mako caches
+        #   we'll set up a cache in the temp dir
+        mock_app_dir[ 'caches' ] = {}
+        # and make sure the vis reg uses that
+        mock_app_dir = galaxy_mock.MockDir( mock_app_dir )
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = VisualizationsRegistry( mock_app,
+            directories_setting='plugins',
+            template_cache_dir=os.path.join( mock_app_dir.root_path, 'caches' ) )
+
+        # ...then start testing
+        expected_plugins_path = os.path.join( mock_app_dir.root_path, 'plugins' )
+        expected_plugin_names = [ 'ipython' ]
+
+        self.assertEqual( plugin_mgr.base_url, 'visualizations' )
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), expected_plugin_names )
+
+        ipython = plugin_mgr.plugins[ 'ipython' ]
+        config = ipython.config
+
+        self.assertEqual( ipython.name, 'ipython' )
+        self.assertEqual( config.get( 'plugin_type' ), 'interactive_environment' )
+
+        # get_api_key needs a user, fill_template a trans
+        user = model.User( email="blah at bler.blah", password="dockerDockerDOCKER" )
+        trans = galaxy_mock.MockTrans( user=user )
+        # use a mock request factory - this will be written into the filled template to show it was used
+        ipython.INTENV_REQUEST_FACTORY = lambda t, p: 'mock'
+
+        # should return the (new) api key for the above user (see the template above)
+        response = ipython._render( {}, trans=trans )
+        response.strip()
+        self.assertIsInstance( response, string_types )
+        self.assertTrue( '-' in response )
+        ie_request, api_key = response.split( '-' )
+
+        self.assertEqual( ie_request, 'mock' )
+
+        match = re.match( r'[a-f0-9]{32}', api_key )
+        self.assertIsNotNone( match )
+        self.assertEqual( match.span(), ( 0, 32 ) )
+
+        mock_app_dir.remove()
+
+    def test_script_entry( self ):
+        """"""
+        script_entry_config = utility.clean_multiline_string( """\
+        <?xml version="1.0" encoding="UTF-8"?>
+        <visualization name="js-test">
+            <data_sources>
+                <data_source>
+                    <model_class>HistoryDatasetAssociation</model_class>
+                </data_source>
+            </data_sources>
+            <entry_point entry_point_type="script" data-main="one" src="bler"></entry_point>
+        </visualization>
+        """ )
+
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'jstest': {
+                    'config': {
+                        'jstest.xml': script_entry_config
+                    },
+                    'static': {}
+                },
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = VisualizationsRegistry( mock_app,
+            directories_setting='plugins',
+            template_cache_dir=template_cache_dir )
+        script_entry = plugin_mgr.plugins[ 'jstest' ]
+
+        self.assertIsInstance( script_entry, plugin.ScriptVisualizationPlugin )
+        self.assertEqual( script_entry.name, 'jstest' )
+        self.assertTrue(  script_entry.serves_static )
+        self.assertTrue(  script_entry.serves_templates )
+        self.assertEqual( script_entry.static_path, os.path.join( script_entry.path, 'static' ) )
+
+        trans = galaxy_mock.MockTrans()
+        script_entry._set_up_template_plugin( mock_app_dir.root_path, [ addtional_templates_dir ] )
+        response = script_entry._render( {}, trans=trans, embedded=True )
+        # print response
+        self.assertTrue( 'src="bler"' in response )
+        self.assertTrue( 'type="text/javascript"' in response )
+        self.assertTrue( 'data-main="one"' in response )
+        mock_app_dir.remove()
+
+
+# -----------------------------------------------------------------------------
+# TODO: config parser tests (in separate file)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/web/__init__.py b/test/unit/web/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/web/base/__init__.py b/test/unit/web/base/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/web/base/test_HookPluginManager.py b/test/unit/web/base/test_HookPluginManager.py
new file mode 100644
index 0000000..0d7b2c1
--- /dev/null
+++ b/test/unit/web/base/test_HookPluginManager.py
@@ -0,0 +1,250 @@
+"""
+"""
+import logging
+import os
+import sys
+import types
+import unittest
+
+from galaxy.web.base.pluginframework import HookPluginManager
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+log = logging.getLogger( __name__ )
+
+# ----------------------------------------------------------------------------- globals
+loading_point = HookPluginManager.loading_point_filename
+
+contents1 = """
+import os
+
+def bler( x, y=3 ):
+    return ( x, y )
+"""
+
+contents2 = """
+raise Exception( 'Bler' )
+"""
+
+contents3 = """
+import contents1
+
+def blah( w ):
+    return tuple( [ w ] + list( contents1.bler( 2 ) ) )
+"""
+
+contents4 = """
+from galaxy import util
+
+def blah( s ):
+    return util.listify( s )
+"""
+
+contents5 = """
+def hook_blah( s ):
+    return s.title()
+
+def hook_filter_test( s ):
+    s += ' one'
+    return s
+"""
+
+contents6 = """
+def hook_blah( s ):
+    return s.upper()
+
+def hook_filter_test( s ):
+    s += ' two'
+    return s
+"""
+
+contents7 = """
+def hook_blah( s ):
+    raise Exception( 'bler' )
+
+def hook_filter_test( s ):
+    raise Exception( 'bler' )
+"""
+
+
+# -----------------------------------------------------------------------------
+class HookPluginManager_TestCase( unittest.TestCase ):
+
+    def test_loading_point( self ):
+        """should attempt load on dirs containing loading_point file"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    loading_point: contents1
+                },
+                'not_a_plugin': 'blerbler'
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins' )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [ 'plugin1' ] )
+
+        plugin = plugin_mgr.plugins[ 'plugin1' ]
+        self.assertEqual( plugin.name, 'plugin1' )
+        self.assertEqual( plugin.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+        self.assertIsInstance( plugin.module, types.ModuleType )
+        self.assertEqual( plugin.module.bler( 2 ), ( 2, 3 ) )
+
+        mock_app_dir.remove()
+
+    def test_bad_loading_points( self ):
+        """should NOT attempt load on dirs NOT containing loading_point file"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {},
+                'plugin2': {
+                    'plogin.py': 'wot'
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins' )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [] )
+
+        mock_app_dir.remove()
+
+    def test_bad_import( self ):
+        """should error gracefully (skip) on bad import"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    loading_point: contents2
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins' )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [] )
+
+        mock_app_dir.remove()
+
+    def test_import_w_rel_import( self ):
+        """should allow loading_point to rel. import other modules"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    'contents1.py': contents1,
+                    loading_point: contents3
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [ 'plugin1' ] )
+
+        plugin = plugin_mgr.plugins[ 'plugin1' ]
+        self.assertEqual( plugin.name, 'plugin1' )
+        self.assertEqual( plugin.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+        self.assertIsInstance( plugin.module, types.ModuleType )
+        self.assertEqual( plugin.module.blah( 1 ), ( 1, 2, 3 ) )
+
+        mock_app_dir.remove()
+
+    def test_import_w_galaxy_import( self ):
+        """should allow loading_point to rel. import GALAXY modules"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    loading_point: contents4
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [ 'plugin1' ] )
+
+        plugin = plugin_mgr.plugins[ 'plugin1' ]
+        self.assertEqual( plugin.name, 'plugin1' )
+        self.assertEqual( plugin.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+        self.assertIsInstance( plugin.module, types.ModuleType )
+
+        self.assertEqual( plugin.module.blah( 'one,two' ), [ 'one', 'two' ] )
+
+        mock_app_dir.remove()
+
+    def test_run_hooks( self ):
+        """should run hooks of loaded plugins"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    loading_point: contents5
+                },
+                'plugin2': {
+                    loading_point: contents6
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2' ] )
+
+        return_val_dict = plugin_mgr.run_hook( 'blah', 'one two check' )
+        self.assertEqual( return_val_dict, { 'plugin1': 'One Two Check', 'plugin2': 'ONE TWO CHECK' } )
+
+        result = plugin_mgr.filter_hook( 'filter_test', 'check' )
+        self.assertEqual( result, 'check one two' )
+
+        mock_app_dir.remove()
+
+    def test_hook_errs( self ):
+        """should fail gracefully if hook fails (and continue with other plugins)"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    loading_point: contents5
+                },
+                'plugin2': {
+                    loading_point: contents6
+                },
+                'plugin3': {
+                    loading_point: contents7
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = HookPluginManager( mock_app, directories_setting='plugins', skip_bad_plugins=False )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2', 'plugin3' ] )
+
+        return_val_dict = plugin_mgr.run_hook( 'blah', 'one two check' )
+        self.assertEqual( return_val_dict, { 'plugin1': 'One Two Check', 'plugin2': 'ONE TWO CHECK' } )
+
+        result = plugin_mgr.filter_hook( 'filter_test', 'check' )
+        self.assertEqual( result, 'check one two' )
+
+        mock_app_dir.remove()
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/web/base/test_PageServingPluginManager.py b/test/unit/web/base/test_PageServingPluginManager.py
new file mode 100644
index 0000000..ba4079a
--- /dev/null
+++ b/test/unit/web/base/test_PageServingPluginManager.py
@@ -0,0 +1,125 @@
+"""
+"""
+import logging
+import os
+import sys
+import unittest
+
+from galaxy.web.base.pluginframework import PageServingPluginManager
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+log = logging.getLogger( __name__ )
+
+# ----------------------------------------------------------------------------- globals
+contents1 = """${what} ${you} ${say}"""
+
+
+# -----------------------------------------------------------------------------
+class PageServingPluginManager_TestCase( unittest.TestCase ):
+
+    def test_plugin_load( self ):
+        """should attempt load if criteria met"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    'templates': {},
+                    'static': {}
+                },
+                'plugin2': {
+                    'static': {}
+                },
+                'plugin3': {
+                    'templates': {}
+                },
+                'not_a_plugin1': 'blerbler',
+                'not_a_plugin2': {},
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.base_url, 'test' )
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2', 'plugin3' ] )
+
+        plugin1 = plugin_mgr.plugins[ 'plugin1' ]
+        self.assertEqual( plugin1.name, 'plugin1' )
+        self.assertEqual( plugin1.path, os.path.join( expected_plugins_path, 'plugin1' ) )
+        self.assertEqual( plugin1.base_url, '/'.join([ plugin_mgr.base_url, plugin1.name ]) )
+        self.assertTrue( plugin1.serves_static )
+        self.assertEqual( plugin1.static_path, os.path.join( plugin1.path, 'static' ) )
+        self.assertEqual( plugin1.static_url, '/'.join([ plugin1.base_url, 'static' ]) )
+        self.assertTrue( plugin1.serves_templates )
+        self.assertEqual( plugin1.template_path, os.path.join( plugin1.path, 'templates' ) )
+        self.assertEqual( plugin1.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+        plugin2 = plugin_mgr.plugins[ 'plugin2' ]
+        self.assertEqual( plugin2.name, 'plugin2' )
+        self.assertEqual( plugin2.path, os.path.join( expected_plugins_path, 'plugin2' ) )
+        self.assertEqual( plugin2.base_url, '/'.join([ plugin_mgr.base_url, plugin2.name ]) )
+        self.assertTrue( plugin2.serves_static )
+        self.assertEqual( plugin2.static_path, os.path.join( plugin2.path, 'static' ) )
+        self.assertEqual( plugin2.static_url, '/'.join([ plugin2.base_url, 'static' ]) )
+        self.assertFalse( plugin2.serves_templates )
+
+        plugin3 = plugin_mgr.plugins[ 'plugin3' ]
+        self.assertEqual( plugin3.name, 'plugin3' )
+        self.assertEqual( plugin3.path, os.path.join( expected_plugins_path, 'plugin3' ) )
+        self.assertEqual( plugin3.base_url, '/'.join([ plugin_mgr.base_url, plugin3.name ]) )
+        self.assertFalse( plugin3.serves_static )
+        self.assertTrue( plugin3.serves_templates )
+        self.assertEqual( plugin1.template_path, os.path.join( plugin1.path, 'templates' ) )
+        self.assertEqual( plugin1.template_lookup.__class__.__name__, 'TemplateLookup' )
+
+        mock_app_dir.remove()
+
+    def test_plugin_static_map( self ):
+        """"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    'templates': {},
+                    'static': {}
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' )
+
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [ 'plugin1' ] )
+        plugin = plugin_mgr.plugins[ 'plugin1' ]
+        self.assertEqual( plugin_mgr.get_static_urls_and_paths(), [( plugin.static_url, plugin.static_path )] )
+
+        mock_app_dir.remove()
+
+    def test_plugin_templates( self ):
+        """"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {
+                    'templates': {
+                        'test.mako': contents1
+                    },
+                }
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = PageServingPluginManager( mock_app, 'test', directories_setting='plugins' )
+
+        self.assertEqual( list(plugin_mgr.plugins.keys()), [ 'plugin1' ] )
+        plugin = plugin_mgr.plugins[ 'plugin1' ]
+        rendered = plugin_mgr.fill_template( galaxy_mock.MockTrans(), plugin, 'test.mako',
+            what='Hey', you='Ho', say='HeyHey HoHo' )
+        self.assertEqual( rendered, 'Hey Ho HeyHey HoHo' )
+
+        mock_app_dir.remove()
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/web/base/test_PluginManager.py b/test/unit/web/base/test_PluginManager.py
new file mode 100644
index 0000000..cb16877
--- /dev/null
+++ b/test/unit/web/base/test_PluginManager.py
@@ -0,0 +1,98 @@
+"""
+Unit tests for ``galaxy.web.base.pluginframework.PluginManager``
+"""
+import logging
+import os
+import sys
+import unittest
+
+from galaxy.web.base.pluginframework import PluginManager
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+log = logging.getLogger( __name__ )
+
+
+class PluginManager_TestCase( unittest.TestCase ):
+
+    def test_rel_path_search( self ):
+        """should be able to search given rel. path"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {},
+                'plugin2': {},
+                'file1': 'blerbler'
+            }
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = PluginManager( mock_app, directories_setting='plugins' )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_path = os.path.join( app_path, 'plugins' )
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2' ] )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_path, 'plugin2' ) )
+
+        mock_app_dir.remove()
+
+    def test_abs_path_search( self ):
+        """should be able to search given abs. path"""
+        mock_app_dir = galaxy_mock.MockDir({})
+        mock_plugin_dir = galaxy_mock.MockDir({
+            'plugin1': {},
+            'plugin2': {},
+            'file1': 'blerbler'
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        plugin_mgr = PluginManager( mock_app, directories_setting=mock_plugin_dir.root_path )
+        expected_plugins_path = mock_plugin_dir.root_path
+
+        self.assertEqual( plugin_mgr.directories, [ expected_plugins_path ] )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2' ] )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_path, 'plugin1' ) )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_path, 'plugin2' ) )
+
+    def test_multiple_dirs( self ):
+        """should search in multiple directories"""
+        mock_app_dir = galaxy_mock.MockDir({
+            'plugins': {
+                'plugin1': {},
+                'plugin2': {},
+                'file1': 'blerbler'
+            }
+        })
+        mock_abs_plugin_dir = galaxy_mock.MockDir({
+            'plugin3': {},
+            'plugin4': {},
+            'file2': 'blerbler'
+        })
+        mock_app = galaxy_mock.MockApp( root=mock_app_dir.root_path )
+        directories_setting = ','.join([ 'plugins', mock_abs_plugin_dir.root_path ])
+        plugin_mgr = PluginManager( mock_app, directories_setting=directories_setting )
+
+        app_path = mock_app_dir.root_path
+        expected_plugins_rel_path = os.path.join( app_path, 'plugins' )
+        expected_plugins_abs_path = mock_abs_plugin_dir.root_path
+
+        self.assertEqual( sorted(plugin_mgr.directories), sorted([ expected_plugins_rel_path, expected_plugins_abs_path ]) )
+        self.assertEqual( sorted(plugin_mgr.plugins.keys()), [ 'plugin1', 'plugin2', 'plugin3', 'plugin4' ] )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].name, 'plugin1' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin1' ].path, os.path.join( expected_plugins_rel_path, 'plugin1' ) )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].name, 'plugin2' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin2' ].path, os.path.join( expected_plugins_rel_path, 'plugin2' ) )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin3' ].name, 'plugin3' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin3' ].path, os.path.join( expected_plugins_abs_path, 'plugin3' ) )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin4' ].name, 'plugin4' )
+        self.assertEqual( plugin_mgr.plugins[ 'plugin4' ].path, os.path.join( expected_plugins_abs_path, 'plugin4' ) )
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/web/framework/__init__.py b/test/unit/web/framework/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/web/framework/test_webapp.py b/test/unit/web/framework/test_webapp.py
new file mode 100644
index 0000000..c1d2d0f
--- /dev/null
+++ b/test/unit/web/framework/test_webapp.py
@@ -0,0 +1,121 @@
+"""
+Unit tests for ``galaxy.web.framework.webapp``
+"""
+import logging
+import os
+import re
+import sys
+import unittest
+
+import galaxy.config
+from galaxy.web.framework import webapp as Webapp
+
+unit_root = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+sys.path.insert( 1, unit_root )
+from unittest_utils import galaxy_mock
+
+log = logging.getLogger( __name__ )
+
+
+class StubGalaxyWebTransaction( Webapp.GalaxyWebTransaction ):
+    def _ensure_valid_session( self, session_cookie, create=True ):
+        pass
+
+
+class CORSParsingMockConfig( galaxy_mock.MockAppConfig ):
+    # we can't use the actual Configuration for parsing*, so steal the parser for the mock instead
+    # *It causes problems when it's change to tempfile.tempdir persists across tests
+    _parse_allowed_origin_hostnames = galaxy.config.Configuration._parse_allowed_origin_hostnames.__func__
+
+    def __init__( self, **kwargs ):
+        super( CORSParsingMockConfig, self ).__init__( **kwargs )
+        self.allowed_origin_hostnames = self._parse_allowed_origin_hostnames( kwargs )
+
+
+class GalaxyWebTransaction_Headers_TestCase( unittest.TestCase ):
+
+    def _new_trans( self, allowed_origin_hostnames=None ):
+        app = galaxy_mock.MockApp()
+        app.config = CORSParsingMockConfig(
+            allowed_origin_hostnames=allowed_origin_hostnames
+        )
+        webapp = galaxy_mock.MockWebapp()
+        environ = galaxy_mock.buildMockEnviron()
+        trans = StubGalaxyWebTransaction( environ, app, webapp )
+        return trans
+
+    def assert_cors_header_equals( self, headers, should_be ):
+        self.assertEqual( headers.get( 'access-control-allow-origin', None ), should_be )
+
+    def assert_cors_header_missing( self, headers ):
+        self.assertFalse( 'access-control-allow-origin' in headers )
+
+    def test_parse_allowed_origin_hostnames( self ):
+        """Should return a list of (possibly) mixed strings and regexps"""
+        config = CORSParsingMockConfig()
+
+        # falsy listify value should return None
+        self.assertEqual( config._parse_allowed_origin_hostnames({
+            "allowed_origin_hostnames": ""
+        }), None )
+
+        # should parse regex if using fwd slashes, string otherwise
+        hostnames = config._parse_allowed_origin_hostnames({
+            "allowed_origin_hostnames": "/host\d{2}/,geocities.com,miskatonic.edu"
+        })
+        self.assertTrue( isinstance( hostnames[0], re._pattern_type ) )
+        self.assertTrue( isinstance( hostnames[1], str ) )
+        self.assertTrue( isinstance( hostnames[2], str ) )
+
+    def test_default_set_cors_headers( self ):
+        """No CORS headers should be set (or even checked) by default"""
+        trans = self._new_trans( allowed_origin_hostnames=None )
+        self.assertTrue( isinstance( trans, Webapp.GalaxyWebTransaction ) )
+
+        trans.request.headers[ 'Origin' ] = 'http://lisaskelprecipes.pinterest.com?id=kelpcake'
+        trans.set_cors_headers()
+        self.assert_cors_header_missing( trans.response.headers )
+
+    def test_set_cors_headers( self ):
+        """Origin should be echo'd when it matches an allowed hostname"""
+        # an asterisk is a special 'allow all' string
+        trans = self._new_trans( allowed_origin_hostnames='*,beep.com' )
+        trans.request.headers[ 'Origin' ] = 'http://xxdarkhackerxx.disney.com'
+        trans.set_cors_headers()
+        self.assert_cors_header_equals( trans.response.headers, 'http://xxdarkhackerxx.disney.com' )
+
+        # subdomains should pass
+        trans = self._new_trans( allowed_origin_hostnames='something.com,/^[\w\.]*beep\.com/' )
+        trans.request.headers[ 'Origin' ] = 'http://boop.beep.com'
+        trans.set_cors_headers()
+        self.assert_cors_header_equals( trans.response.headers, 'http://boop.beep.com' )
+
+        # ports should work
+        trans = self._new_trans( allowed_origin_hostnames='somethingelse.com,/^[\w\.]*beep\.com/' )
+        trans.request.headers[ 'Origin' ] = 'http://boop.beep.com:8080'
+        trans.set_cors_headers()
+        self.assert_cors_header_equals( trans.response.headers, 'http://boop.beep.com:8080' )
+
+        # localhost should work
+        trans = self._new_trans( allowed_origin_hostnames='/localhost/' )
+        trans.request.headers[ 'Origin' ] = 'http://localhost:8080'
+        trans.set_cors_headers()
+        self.assert_cors_header_equals( trans.response.headers, 'http://localhost:8080' )
+
+        # spoofing shouldn't be easy
+        trans.response.headers = {}
+        trans.request.headers[ 'Origin' ] = 'http://localhost.badstuff.tv'
+        trans.set_cors_headers()
+        self.assert_cors_header_missing( trans.response.headers )
+
+        # unicode should work
+        trans = self._new_trans( allowed_origin_hostnames='/öbb\.at/' )
+        trans.request.headers[ 'Origin' ] = 'http://öbb.at'
+        trans.set_cors_headers()
+        self.assertEqual(
+            trans.response.headers[ 'access-control-allow-origin' ], 'http://öbb.at'
+        )
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/unit/workflows/__init__.py b/test/unit/workflows/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test/unit/workflows/test_extract_summary.py b/test/unit/workflows/test_extract_summary.py
new file mode 100644
index 0000000..5f7abd4
--- /dev/null
+++ b/test/unit/workflows/test_extract_summary.py
@@ -0,0 +1,154 @@
+import unittest
+
+from galaxy import model
+from galaxy.workflow import extract
+
+UNDEFINED_JOB = object()
+
+
+class TestWorkflowExtractSummary( unittest.TestCase ):
+
+    def setUp( self ):
+        self.history = MockHistory()
+        self.trans = MockTrans( self.history )
+
+    def test_empty_history( self ):
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert not warnings
+        assert not job_dict
+
+    def test_summarize_returns_name_and_dataset_list( self ):
+        # Create two jobs and three datasets, test they are groupped
+        # by job correctly with correct output names.
+        hda1 = MockHda()
+        self.history.active_datasets.append( hda1 )
+        hda2 = MockHda( job=hda1.job, output_name="out2" )
+        self.history.active_datasets.append( hda2 )
+        hda3 = MockHda( output_name="out3" )
+        self.history.active_datasets.append( hda3 )
+
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert len( job_dict ) == 2
+        assert not warnings
+        self.assertEquals( job_dict[ hda1.job ], [ ( 'out1', hda1 ), ( 'out2', hda2 ) ] )
+        self.assertEquals( job_dict[ hda3.job ], [ ( 'out3', hda3 ) ] )
+
+    def test_finds_original_job_if_copied( self ):
+        hda = MockHda()
+        derived_hda_1 = MockHda()
+        derived_hda_1.copied_from_history_dataset_association = hda
+        derived_hda_2 = MockHda()
+        derived_hda_2.copied_from_history_dataset_association = derived_hda_1
+        self.history.active_datasets.append( derived_hda_2 )
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert not warnings
+        assert len( job_dict ) == 1
+        self.assertEquals( job_dict[ hda.job ], [ ('out1', derived_hda_2 ) ] )
+
+    def test_fake_job_hda( self ):
+        """ Fakes job if creating_job_associations is empty.
+        """
+        hda = MockHda( job=UNDEFINED_JOB )
+        self.history.active_datasets.append( hda )
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert not warnings
+        assert len( job_dict ) == 1
+        fake_job = job_dict.keys()[ 0 ]
+        assert fake_job.id.startswith( "fake_" )
+        datasets = job_dict.values()[ 0 ]
+        assert datasets == [ ( None, hda ) ]
+
+    def test_fake_job_hdca( self ):
+        hdca = MockHdca( )
+        self.history.active_datasets.append( hdca )
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert not warnings
+        assert len( job_dict ) == 1
+        fake_job = job_dict.keys()[ 0 ]
+        assert fake_job.id.startswith( "fake_" )
+        assert fake_job.is_fake
+        content_instances = job_dict.values()[ 0 ]
+        assert content_instances == [ ( None, hdca ) ]
+
+    def test_implicit_map_job_hdca( self ):
+        creating_job = model.Job()
+        hdca = MockHdca( implicit_output_name="out1", job=creating_job )
+        self.history.active_datasets.append( hdca )
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert not warnings
+        assert len( job_dict ) == 1
+        job = job_dict.keys()[ 0 ]
+        assert job is creating_job
+
+    def test_warns_and_skips_datasets_if_not_finished( self ):
+        hda = MockHda( state='queued' )
+        self.history.active_datasets.append( hda )
+        job_dict, warnings = extract.summarize( trans=self.trans )
+        assert warnings
+        assert len( job_dict ) == 0
+
+
+class MockHistory( object ):
+
+    def __init__( self ):
+        self.active_datasets = []
+
+    @property
+    def active_contents( self ):
+        return self.active_datasets
+
+
+class MockTrans( object ):
+
+    def __init__( self, history ):
+        self.history = history
+
+    def get_history( self ):
+        return self.history
+
+
+class MockHda( object ):
+
+    def __init__( self, state='ok', output_name='out1', job=None ):
+        self.id = 123
+        self.state = state
+        self.copied_from_history_dataset_association = None
+        self.history_content_type = "dataset"
+        if job is not UNDEFINED_JOB:
+            if not job:
+                job = model.Job()
+            self.job = job
+            assoc = model.JobToOutputDatasetAssociation( output_name, self )
+            assoc.job = job
+            self.creating_job_associations = [ assoc ]
+        else:
+            self.creating_job_associations = []
+
+
+class MockHdca( object ):
+
+    def __init__( self, implicit_output_name=None, job=None, hid=1 ):
+        self.id = 124
+        self.copied_from_history_dataset_collection_association = None
+        self.history_content_type = "dataset_collection"
+        self.implicit_output_name = implicit_output_name
+        self.hid = 1
+        self.collection = model.DatasetCollection()
+        self.creating_job_associations = []
+        element = model.DatasetCollectionElement(
+            collection=self.collection,
+            element=model.HistoryDatasetAssociation(),
+            element_index=0,
+            element_identifier="moocow",
+        )
+        element.dataset_instance.dataset = model.Dataset()
+        element.dataset_instance.dataset.state = "ok"
+        creating = model.JobToOutputDatasetAssociation(
+            implicit_output_name,
+            element.dataset_instance,
+        )
+        creating.job = job
+        element.dataset_instance.creating_job_associations = [
+            creating,
+        ]
+        self.collection.elements = [element]
diff --git a/test/unit/workflows/test_modules.py b/test/unit/workflows/test_modules.py
new file mode 100644
index 0000000..734caf3
--- /dev/null
+++ b/test/unit/workflows/test_modules.py
@@ -0,0 +1,358 @@
+import json
+
+import mock
+
+from galaxy import model
+from galaxy.util import bunch
+from galaxy.workflow import modules
+
+from .workflow_support import MockTrans, yaml_to_model
+
+
+def test_input_has_no_errors():
+    trans = MockTrans()
+    input_step_module = modules.module_factory.new( trans, "data_input" )
+    assert not input_step_module.get_errors()
+
+
+def test_valid_new_tool_has_no_errors():
+    trans = MockTrans()
+    mock_tool = mock.Mock()
+    trans.app.toolbox.tools[ "cat1" ] = mock_tool
+    tool_module = modules.module_factory.new( trans, "tool", content_id="cat1" )
+    assert not tool_module.get_errors()
+
+
+def test_data_input_default_state():
+    trans = MockTrans()
+    module = modules.module_factory.new( trans, "data_input" )
+    __assert_has_runtime_input( module, label="Input Dataset" )
+
+
+def test_data_input_modified_state():
+    module = __from_state( {
+        "type": "data_input",
+        "tool_state": json.dumps({ "name": "Cool Input" }),
+    } )
+    __assert_has_runtime_input( module, label="Cool Input" )
+
+
+def test_data_input_step_modified_state():
+    module = __from_step(
+        type="data_input",
+        tool_inputs={
+            "name": "Cool Input",
+        },
+    )
+    __assert_has_runtime_input( module, label="Cool Input" )
+
+
+def test_data_input_compute_runtime_state_default():
+    module = __from_step(
+        type="data_input",
+    )
+    state, errors = module.compute_runtime_state( module.trans )
+    assert not errors
+    assert 'input' in state.inputs
+    assert state.inputs[ 'input' ] is None
+
+
+def test_data_input_compute_runtime_state_args():
+    module = __from_step(
+        type="data_input",
+    )
+    tool_state = module.get_state()
+
+    hda = model.HistoryDatasetAssociation()
+    with mock.patch('galaxy.workflow.modules.check_param') as check_method:
+        check_method.return_value = ( hda, None )
+        state, errors = module.compute_runtime_state( module.trans, { 'input': 4, 'tool_state': tool_state } )
+
+    assert not errors
+    assert 'input' in state.inputs
+    assert state.inputs[ 'input' ] is hda
+
+
+def test_data_input_connections():
+    module = __from_step(
+        type="data_input",
+    )
+    assert len( module.get_data_inputs() ) == 0
+
+    outputs = module.get_data_outputs()
+    assert len( outputs ) == 1
+    output = outputs[ 0 ]
+    assert output[ 'name' ] == 'output'
+    assert output[ 'extensions' ] == [ 'input' ]
+
+
+def test_data_input_update():
+    module = __from_step(
+        type="data_input",
+        tool_inputs={
+            "name": "Cool Input",
+        },
+    )
+    module.update_state( dict( name="Awesome New Name" ) )
+    assert module.state[ 'name' ] == "Awesome New Name"
+
+
+def test_data_input_get_form():
+    module = __from_step(
+        type="data_input",
+        tool_inputs={
+            "name": "Cool Input",
+        },
+    )
+
+    def test_form(template, **kwds ):
+        assert template == "workflow/editor_generic_form.mako"
+        assert "form" in kwds
+        assert len( kwds[ "form" ].inputs ) == 1
+        return "TEMPLATE"
+
+    fill_mock = mock.Mock( side_effect=test_form )
+    module.trans.fill_template = fill_mock
+    assert module.get_config_form() == "TEMPLATE"
+
+
+def test_data_collection_input_default_state():
+    trans = MockTrans()
+    module = modules.module_factory.new( trans, "data_collection_input" )
+    __assert_has_runtime_input( module, label="Input Dataset Collection", collection_type="list" )
+
+
+def test_data_input_collection_modified_state():
+    module = __from_state( {
+        "type": "data_collection_input",
+        "tool_state": json.dumps({ "name": "Cool Input Collection", "collection_type": "list:paired" }),
+    } )
+    __assert_has_runtime_input( module, label="Cool Input Collection", collection_type="list:paired" )
+
+
+def test_data_input_collection_step_modified_state():
+    module = __from_step(
+        type="data_collection_input",
+        tool_inputs={
+            "name": "Cool Input Collection",
+            "collection_type": "list:paired",
+        },
+    )
+    __assert_has_runtime_input( module, label="Cool Input Collection", collection_type="list:paired" )
+
+
+def test_data_collection_input_connections():
+    module = __from_step(
+        type="data_collection_input",
+        tool_inputs={
+            'collection_type': 'list:paired'
+        }
+    )
+    assert len( module.get_data_inputs() ) == 0
+
+    outputs = module.get_data_outputs()
+    assert len( outputs ) == 1
+    output = outputs[ 0 ]
+    assert output[ 'name' ] == 'output'
+    assert output[ 'extensions' ] == [ 'input_collection' ]
+    assert output[ 'collection_type' ] == 'list:paired'
+
+
+def test_data_collection_input_update():
+    module = __from_step(
+        type="data_collection_input",
+        tool_inputs={
+            'name': 'Cool Collection',
+            'collection_type': 'list:paired',
+        }
+    )
+    module.update_state( dict( name="New Collection", collection_type="list" ) )
+    assert module.state[ 'name' ] == "New Collection"
+
+
+def test_data_collection_input_config_form():
+    module = __from_step(
+        type="data_collection_input",
+        tool_inputs={
+            'name': 'Cool Collection',
+            'collection_type': 'list:paired',
+        }
+    )
+
+    def test_form(template, **kwds ):
+        assert template == "workflow/editor_generic_form.mako"
+        assert "form" in kwds
+        assert len( kwds[ "form" ].inputs ) == 2
+        return "TEMPLATE"
+
+    fill_mock = mock.Mock( side_effect=test_form )
+    module.trans.fill_template = fill_mock
+    assert module.get_config_form() == "TEMPLATE"
+
+
+def test_cannot_create_tool_modules_for_missing_tools():
+    trans = MockTrans()
+    exception = False
+    try:
+        modules.module_factory.new( trans, "tool", content_id="cat1" )
+    except Exception:
+        exception = True
+    assert exception
+
+
+def test_updated_tool_version():
+    trans = MockTrans()
+    mock_tool = __mock_tool(id="cat1", version="0.9")
+    trans.app.toolbox.tools[ "cat1" ] = mock_tool
+    module = __from_step(
+        trans=trans,
+        type="tool",
+        tool_id="cat1",
+        tool_version="0.7",
+        config=None,
+    )
+    # Make sure there is a warnin with tool id, old version,
+    # and new version.
+    for val in "cat1", "0.7", "0.9":
+        assert val in module.version_changes[0]
+
+
+def test_tool_version_same():
+    trans = MockTrans()
+    mock_tool = __mock_tool(id="cat1", version="1.0")
+    trans.app.toolbox.tools[ "cat1" ] = mock_tool
+    module = __from_step(
+        trans=trans,
+        type="tool",
+        tool_id="cat1",
+        tool_version="1.0",
+        config=None,
+    )
+    assert not module.version_changes
+
+
+TEST_WORKFLOW_YAML = """
+steps:
+  - type: "data_input"
+    label: "input1"
+    tool_inputs: {"name": "input1"}
+  - type: "data_collection_input"
+    tool_inputs: {"name": "input2"}
+  - type: "tool"
+    tool_id: "cat1"
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 0
+       output_name: "output"
+  - type: "tool"
+    tool_id: "cat1"
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 0
+       output_name: "output"
+    workflow_outputs:
+    -   output_name: "out_file1"
+        label: "out1"
+  - type: "tool"
+    tool_id: "cat1"
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 2
+       output_name: "out_file1"
+    workflow_outputs:
+    -   output_name: "out_file1"
+"""
+
+
+def test_subworkflow_new_inputs():
+    subworkflow_module = __new_subworkflow_module()
+    inputs = subworkflow_module.get_data_inputs()
+    assert len(inputs) == 2, len(inputs)
+    input1, input2 = inputs
+    assert input1["input_type"] == "dataset"
+    assert input1["name"] == "input1"
+
+    assert input2["input_type"] == "dataset_collection"
+    assert input2["name"] == "input2", input2["name"]
+
+
+def test_subworkflow_new_outputs():
+    subworkflow_module = __new_subworkflow_module()
+    outputs = subworkflow_module.get_data_outputs()
+    assert len(outputs) == 2, len(outputs)
+    output1, output2 = outputs
+    assert output1["name"] == "out1"
+    assert output1["label"] == "out1"
+    assert output1["extensions"] == ["input"]
+
+    assert output2["name"] == "4:out_file1", output2["name"]
+    assert output2["label"] == "4:out_file1", output2["label"]
+
+
+def __new_subworkflow_module():
+    trans = MockTrans()
+    workflow = yaml_to_model(TEST_WORKFLOW_YAML)
+    stored_workflow = trans.save_workflow(workflow)
+    workflow_id = trans.app.security.encode_id(stored_workflow.id)
+    subworkflow_module = modules.module_factory.new( trans, "subworkflow", workflow_id )
+    return subworkflow_module
+
+
+def __assert_has_runtime_input( module, label=None, collection_type=None ):
+    inputs = module.get_runtime_inputs()
+    assert len( inputs ) == 1
+    assert "input" in inputs
+
+    input_param = inputs[ "input" ]
+    if label is not None:
+        assert input_param.get_label() == label, input_param.get_label()
+    if collection_type is not None:
+        assert input_param.collection_types == [collection_type]
+    return input_param
+
+
+def __from_state( state ):
+    trans = MockTrans()
+    module = modules.module_factory.from_dict( trans, state )
+    return module
+
+
+def __from_step( **kwds ):
+    if "trans" in kwds:
+        trans = kwds["trans"]
+        del kwds["trans"]
+    else:
+        trans = MockTrans()
+    step = __step(
+        **kwds
+    )
+    injector = modules.WorkflowModuleInjector( trans )
+    injector.inject( step )
+    module = step.module
+    module.test_step = step
+    return module
+
+
+def __step( **kwds ):
+    step = model.WorkflowStep()
+    for key, value in kwds.items():
+        setattr( step, key, value )
+
+    return step
+
+
+def __mock_tool(
+    id="cat1",
+    version="1.0",
+):
+    # For now ignoring inputs, params_from_strings, and
+    # check_and_update_param_values since only have unit tests for version
+    # handling - but need to write tests for all of this longer term.
+    tool = bunch.Bunch(
+        id=id,
+        version=version,
+        inputs={},
+        params_from_strings=mock.Mock(),
+        check_and_update_param_values=mock.Mock(),
+    )
+    return tool
diff --git a/test/unit/workflows/test_render.py b/test/unit/workflows/test_render.py
new file mode 100644
index 0000000..b7afcd2
--- /dev/null
+++ b/test/unit/workflows/test_render.py
@@ -0,0 +1,69 @@
+from galaxy.workflow import render
+from .workflow_support import yaml_to_model
+
+TEST_WORKFLOW_YAML = """
+steps:
+  - type: "data_input"
+    order_index: 0
+    tool_inputs: {"name": "input1"}
+    input_connections: []
+    position: {"top": 3, "left": 3}
+  - type: "data_input"
+    order_index: 1
+    tool_inputs: {"name": "input2"}
+    input_connections: []
+    position: {"top": 6, "left": 4}
+  - type: "tool"
+    tool_id: "cat1"
+    order_index: 2
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 0
+       output_name: "di1"
+    position: {"top": 13, "left": 10}
+  - type: "tool"
+    tool_id: "cat1"
+    order_index: 3
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 0
+       output_name: "di1"
+    position: {"top": 33, "left": 103}
+"""
+
+
+def test_render():
+    # Doesn't check anything about the render code - just exercises to
+    # ensure that obvious errors aren't thrown.
+    workflow_canvas = render.WorkflowCanvas()
+
+    workflow = yaml_to_model(TEST_WORKFLOW_YAML)
+    step_0, step_1, step_2, step_3 = workflow.steps
+
+    workflow_canvas.populate_data_for_step(
+        step_0,
+        "input1",
+        [],
+        [{"name": "di1"}],
+    )
+    workflow_canvas.populate_data_for_step(
+        step_1,
+        "input2",
+        [],
+        [{"name": "di1"}],
+    )
+    workflow_canvas.populate_data_for_step(
+        step_2,
+        "cat wrapper",
+        [{"name": "input1", "label": "i1"}],
+        [{"name": "out1"}]
+    )
+    workflow_canvas.populate_data_for_step(
+        step_3,
+        "cat wrapper",
+        [{"name": "input1", "label": "i1"}],
+        [{"name": "out1"}]
+    )
+    workflow_canvas.add_steps()
+    workflow_canvas.finish()
+    assert workflow_canvas.canvas.tostring()
diff --git a/test/unit/workflows/test_run_parameters.py b/test/unit/workflows/test_run_parameters.py
new file mode 100644
index 0000000..12f3e9c
--- /dev/null
+++ b/test/unit/workflows/test_run_parameters.py
@@ -0,0 +1,147 @@
+from galaxy import model
+from galaxy.workflow.run_request import (
+    _normalize_inputs,
+    _normalize_step_parameters
+)
+
+from .workflow_support import MockTrans
+
+STEP_ID_OFFSET = 4  # Offset a little so ids and order index are different.
+
+
+def test_normalize_parameters_empty():
+    normalized_params = __normalize_parameters_against_fixture( {} )
+    assert normalized_params == {}
+
+
+def test_normalize_parameters_by_tool():
+    normalized_params = __normalize_parameters_against_fixture( {
+        'cat1': { 'foo': 'bar' }
+    } )
+    # Tool specified parameters are expanded out.
+    assert normalized_params[ STEP_ID_OFFSET + 3 ] == { 'foo': 'bar' }
+    assert normalized_params[ STEP_ID_OFFSET + 4 ] == { 'foo': 'bar' }
+    assert len( normalized_params.keys() ) == 2
+
+
+def test_step_parameters():
+    normalized_params = __normalize_parameters_against_fixture( {
+        str( STEP_ID_OFFSET + 1 ): { 'foo': 'bar' }
+    } )
+    assert normalized_params[ STEP_ID_OFFSET + 1 ] == { 'foo': 'bar' }
+    assert len( normalized_params.keys() ) == 1
+
+
+def test_step_parameters_legacy():
+    normalized_params = __normalize_parameters_against_fixture( {
+        str( STEP_ID_OFFSET + 1 ): { 'param': 'foo', 'value': 'bar' }
+    } )
+    assert normalized_params[ STEP_ID_OFFSET + 1 ] == { 'foo': 'bar' }, normalized_params
+    assert len( normalized_params.keys() ) == 1
+
+
+def test_inputs_by_step_id():
+    input1 = __new_input()
+    input2 = __new_input()
+    normalized_inputs = __normalize_inputs_against_fixture( {
+        str( STEP_ID_OFFSET + 1 ): input1,
+        str( STEP_ID_OFFSET + 2 ): input2
+    }, inputs_by="step_id" )
+    assert normalized_inputs[ STEP_ID_OFFSET + 1 ]['content'] == input1[ 'content' ]
+    assert normalized_inputs[ STEP_ID_OFFSET + 2 ]['content'] == input2[ 'content' ]
+
+
+def test_inputs_by_step_index():
+    input1 = __new_input()
+    input2 = __new_input()
+    normalized_inputs = __normalize_inputs_against_fixture( {
+        str( 0 ): input1,
+        str( 1 ): input2
+    }, inputs_by="step_index" )
+    assert normalized_inputs[ STEP_ID_OFFSET + 1 ]['content'] == input1[ 'content' ]
+    assert normalized_inputs[ STEP_ID_OFFSET + 2 ]['content'] == input2[ 'content' ]
+
+
+def test_inputs_by_name():
+    input1 = __new_input()
+    input2 = __new_input()
+    normalized_inputs = __normalize_inputs_against_fixture( {
+        "input1": input1,
+        "input2": input2
+    }, inputs_by="name" )
+    assert normalized_inputs[ STEP_ID_OFFSET + 1 ]['content'] == input1[ 'content' ]
+    assert normalized_inputs[ STEP_ID_OFFSET + 2 ]['content'] == input2[ 'content' ]
+
+
+def __normalize_parameters_against_fixture( params ):
+    trans = MockTrans()
+    # Create a throw away workflow so step ids and order_index
+    # are different for actual fixture.
+    __workflow_fixure( trans )
+
+    workflow = __workflow_fixure( trans )
+    normalized_params = _normalize_step_parameters( workflow.steps, params, legacy=True )
+    return normalized_params
+
+
+def __normalize_inputs_against_fixture( inputs, inputs_by ):
+    trans = MockTrans()
+    # Create a throw away workflow so step ids and order_index
+    # are different for actual fixture.
+    __workflow_fixure( trans )
+
+    workflow = __workflow_fixure( trans )
+    normalized_inputs = _normalize_inputs( workflow.steps, inputs, inputs_by )
+    return normalized_inputs
+
+
+def __new_input( ):
+    return dict( content=model.HistoryDatasetAssociation() )
+
+
+def __workflow_fixure( trans ):
+    user = model.User(
+        email="testworkflow_params at bx.psu.edu",
+        password="pass"
+    )
+    stored_workflow = model.StoredWorkflow()
+    stored_workflow.user = user
+    workflow = model.Workflow()
+    workflow.stored_workflow = stored_workflow
+
+    def add_step( **kwds ):
+        workflow_step = model.WorkflowStep()
+        for key, value in kwds.items():
+            setattr(workflow_step, key, value)
+        workflow.steps.append( workflow_step )
+
+    trans.app.model.context.add(
+        workflow,
+    )
+
+    add_step(
+        type="data_input",
+        order_index=0,
+        tool_inputs={"name": "input1"}
+    )
+    add_step(
+        type="data_input",
+        order_index=1,
+        tool_inputs={"name": "input2"}
+    )
+    add_step(
+        type="tool",
+        tool_id="cat1",
+        order_index=2,
+    )
+    add_step(
+        type="tool",
+        tool_id="cat1",
+        order_index=4,
+    )
+    trans.app.model.context.flush()
+    # Expunge and reload to ensure step state is as expected from database.
+    workflow_id = workflow.id
+    trans.app.model.context.expunge_all()
+
+    return trans.app.model.context.query( model.Workflow ).get( workflow_id )
diff --git a/test/unit/workflows/test_workflow_progress.py b/test/unit/workflows/test_workflow_progress.py
new file mode 100644
index 0000000..0036e14
--- /dev/null
+++ b/test/unit/workflows/test_workflow_progress.py
@@ -0,0 +1,207 @@
+import unittest
+
+from galaxy import model
+from galaxy.workflow.run import WorkflowProgress
+
+from .workflow_support import TestApp, yaml_to_model
+
+TEST_WORKFLOW_YAML = """
+steps:
+  - type: "data_input"
+    tool_inputs: {"name": "input1"}
+  - type: "data_input"
+    tool_inputs: {"name": "input2"}
+  - type: "tool"
+    tool_id: "cat1"
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 0
+       output_name: "output"
+  - type: "tool"
+    tool_id: "cat1"
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 0
+       output_name: "output"
+  - type: "tool"
+    tool_id: "cat1"
+    input_connections:
+    -  input_name: "input1"
+       "@output_step": 2
+       output_name: "out_file1"
+"""
+
+TEST_SUBWORKFLOW_YAML = """
+steps:
+  - type: "data_input"
+    tool_inputs: {"name": "outer_input"}
+  - type: "subworkflow"
+    subworkflow:
+       steps:
+          - type: "data_input"
+            tool_inputs: {"name": "inner_input"}
+          - type: "tool"
+            tool_id: "cat1"
+            input_connections:
+            -  input_name: "input1"
+               "@output_step": 0
+               output_name: "output"
+    input_connections:
+    -  input_name: "inner_input"
+       "@output_step": 0
+       output_name: "output"
+       "@input_subworkflow_step": 0
+"""
+
+UNSCHEDULED_STEP = object()
+
+
+class WorkflowProgressTestCase( unittest.TestCase ):
+
+    def setUp(self):
+        self.app = TestApp()
+        self.inputs_by_step_id = {}
+        self.invocation = model.WorkflowInvocation()
+        self.progress = {}
+
+    def _setup_workflow(self, workflow_yaml):
+        workflow = yaml_to_model(workflow_yaml)
+        self.invocation.workflow = workflow
+
+    def _new_workflow_progress( self ):
+        return WorkflowProgress(
+            self.invocation, self.inputs_by_step_id, MockModuleInjector(self.progress)
+        )
+
+    def _set_previous_progress(self, outputs_dict):
+        for step_id, step_value in outputs_dict.items():
+            if step_value is not UNSCHEDULED_STEP:
+                self.progress[step_id] = step_value
+
+                workflow_invocation_step = model.WorkflowInvocationStep()
+                workflow_invocation_step.workflow_step_id = step_id
+                self.invocation.steps.append(workflow_invocation_step)
+
+            workflow_invocation_step_state = model.WorkflowRequestStepState()
+            workflow_invocation_step_state.workflow_step_id = step_id
+            workflow_invocation_step_state.value = True
+            self.invocation.step_states.append(workflow_invocation_step_state)
+
+    def _step(self, index):
+        return self.invocation.workflow.steps[index]
+
+    def test_connect_data_input( self ):
+        self._setup_workflow(TEST_WORKFLOW_YAML)
+        hda = model.HistoryDatasetAssociation()
+
+        self.inputs_by_step_id = {100: hda}
+        progress = self._new_workflow_progress()
+        progress.set_outputs_for_input( self._step(0) )
+
+        conn = model.WorkflowStepConnection()
+        conn.output_name = "output"
+        conn.output_step = self._step(0)
+        assert progress.replacement_for_connection(conn) is hda
+
+    def test_replacement_for_tool_input( self ):
+        self._setup_workflow(TEST_WORKFLOW_YAML)
+        hda = model.HistoryDatasetAssociation()
+
+        self.inputs_by_step_id = {100: hda}
+        progress = self._new_workflow_progress()
+        progress.set_outputs_for_input( self._step(0) )
+
+        replacement = progress.replacement_for_tool_input(self._step(2), MockInput(), "input1")
+        assert replacement is hda
+
+    def test_connect_tool_output( self ):
+        self._setup_workflow(TEST_WORKFLOW_YAML)
+        hda = model.HistoryDatasetAssociation()
+
+        progress = self._new_workflow_progress()
+        progress.set_step_outputs( self._step(2), {"out1": hda} )
+
+        conn = model.WorkflowStepConnection()
+        conn.output_name = "out1"
+        conn.output_step = self._step(2)
+        assert progress.replacement_for_connection(conn) is hda
+
+    def test_remaining_steps_with_progress(self):
+        self._setup_workflow(TEST_WORKFLOW_YAML)
+        hda3 = model.HistoryDatasetAssociation()
+        self._set_previous_progress({
+            100: {"output": model.HistoryDatasetAssociation()},
+            101: {"output": model.HistoryDatasetAssociation()},
+            102: {"out_file1": hda3},
+            103: {"out_file1": model.HistoryDatasetAssociation()},
+            104: UNSCHEDULED_STEP,
+        })
+        progress = self._new_workflow_progress()
+        steps = progress.remaining_steps()
+        assert len(steps) == 1
+        assert steps[0] is self.invocation.workflow.steps[4]
+
+        replacement = progress.replacement_for_tool_input(self._step(4), MockInput(), "input1")
+        assert replacement is hda3
+
+    # TODO: Replace multiple true HDA with HDCA
+    # TODO: Test explicit delay
+    # TODO: Test cancel on collection invalid
+    # TODO: Test delay on collection waiting for population
+
+    def test_subworkflow_progress(self):
+        self._setup_workflow(TEST_SUBWORKFLOW_YAML)
+        hda = model.HistoryDatasetAssociation()
+        self._set_previous_progress({
+            100: {"output": hda},
+            101: UNSCHEDULED_STEP,
+        })
+        self.invocation.create_subworkflow_invocation_for_step(
+            self.invocation.workflow.step_by_index(1)
+        )
+        progress = self._new_workflow_progress()
+        remaining_steps = progress.remaining_steps()
+        subworkflow_step = remaining_steps[0]
+        subworkflow_progress = progress.subworkflow_progress(subworkflow_step)
+        subworkflow = subworkflow_step.subworkflow
+        assert subworkflow_progress.workflow_invocation.workflow == subworkflow
+        subworkflow_input_step = subworkflow.step_by_index(0)
+        subworkflow_progress.set_outputs_for_input( subworkflow_input_step )
+
+        subworkflow_cat_step = subworkflow.step_by_index(1)
+
+        assert hda is subworkflow_progress.replacement_for_tool_input(
+            subworkflow_cat_step,
+            MockInput(),
+            "input1",
+        )
+
+
+class MockInput(object):
+
+    def __init__(self, type="data", multiple=False):
+        self.multiple = multiple
+        self.type = type
+
+
+class MockModuleInjector(object):
+
+    def __init__(self, progress):
+        self.progress = progress
+
+    def inject(self, step):
+        step.module = MockModule(self.progress)
+
+
+class MockModule(object):
+
+    def __init__(self, progress):
+        self.progress = progress
+
+    def recover_runtime_state(self, runtime_state):
+        return True
+
+    def recover_mapping(self, step, step_invocations, progress):
+        step_id = step.id
+        if step_id in self.progress:
+            progress.set_step_outputs(step, self.progress[step_id])
diff --git a/test/unit/workflows/workflow_support.py b/test/unit/workflows/workflow_support.py
new file mode 100644
index 0000000..e21887b
--- /dev/null
+++ b/test/unit/workflows/workflow_support.py
@@ -0,0 +1,127 @@
+from functools import partial
+
+import yaml
+
+from galaxy import model
+from galaxy.model import mapping
+from galaxy.util import bunch
+from galaxy.web.security import SecurityHelper
+
+
+class MockTrans( object ):
+
+    def __init__( self ):
+        self.app = TestApp()
+        self.sa_session = self.app.model.context
+        self._user = None
+
+    def save_workflow(self, workflow):
+        stored_workflow = model.StoredWorkflow()
+        stored_workflow.latest_workflow = workflow
+        stored_workflow.user = self.user
+        self.sa_session.add( stored_workflow )
+        self.sa_session.flush()
+        return stored_workflow
+
+    @property
+    def user(self):
+        if self._user is None:
+            self._user = model.User(
+                email="testworkflows at bx.psu.edu",
+                password="password"
+            )
+        return self._user
+
+
+class TestApp( object ):
+
+    def __init__( self ):
+        self.config = bunch.Bunch(
+            tool_secret="awesome_secret",
+        )
+        self.model = mapping.init(
+            "/tmp",
+            "sqlite:///:memory:",
+            create_tables=True
+        )
+        self.toolbox = TestToolbox()
+        self.datatypes_registry = TestDatatypesRegistry()
+        self.security = SecurityHelper(id_secret="testing")
+
+
+class TestDatatypesRegistry( object ):
+
+    def __init__( self ):
+        pass
+
+    def get_datatype_by_extension( self, ext ):
+        return ext
+
+
+class TestToolbox( object ):
+
+    def __init__( self ):
+        self.tools = {}
+
+    def get_tool( self, tool_id, tool_version=None ):
+        # Real tool box returns None of missing tool also
+        return self.tools.get( tool_id, None )
+
+    def get_tool_id( self, tool_id ):
+        tool = self.get_tool( tool_id )
+        return tool and tool.id
+
+
+def yaml_to_model(has_dict, id_offset=100):
+    if isinstance(has_dict, str):
+        has_dict = yaml.load(has_dict)
+
+    workflow = model.Workflow()
+    workflow.steps = []
+    for i, step in enumerate(has_dict.get("steps", [])):
+        workflow_step = model.WorkflowStep()
+        if "order_index" not in step:
+            step["order_index"] = i
+        if "id" not in step:
+            # Fixed Offset ids just to test against assuption order_index != id
+            step["id"] = id_offset
+            id_offset += 1
+        step_type = step.get("type", None)
+        assert step_type is not None
+
+        if step_type == "subworkflow":
+            subworkflow_dict = step["subworkflow"]
+            del step["subworkflow"]
+            subworkflow = yaml_to_model(subworkflow_dict, id_offset=id_offset)
+            step["subworkflow"] = subworkflow
+            id_offset += len(subworkflow.steps)
+
+        for key, value in step.items():
+            if key == "input_connections":
+                connections = []
+                for conn_dict in value:
+                    conn = model.WorkflowStepConnection()
+                    for conn_key, conn_value in conn_dict.items():
+                        if conn_key == "@output_step":
+                            target_step = workflow.steps[conn_value]
+                            conn_value = target_step
+                            conn_key = "output_step"
+                        if conn_key == "@input_subworkflow_step":
+                            conn_value = step["subworkflow"].step_by_index(conn_value)
+                            conn_key = "input_subworkflow_step"
+                        setattr(conn, conn_key, conn_value)
+                    connections.append(conn)
+                value = connections
+            if key == "workflow_outputs":
+                value = [partial(_dict_to_workflow_output, workflow_step)(_) for _ in value]
+            setattr(workflow_step, key, value)
+        workflow.steps.append( workflow_step )
+
+    return workflow
+
+
+def _dict_to_workflow_output(workflow_step, as_dict):
+    output = model.WorkflowOutput(workflow_step)
+    for key, value in as_dict.items():
+        setattr(output, key, value)
+    return output
diff --git a/tool-data/add_scores.loc.sample b/tool-data/add_scores.loc.sample
new file mode 100644
index 0000000..7ff12ed
--- /dev/null
+++ b/tool-data/add_scores.loc.sample
@@ -0,0 +1,20 @@
+#This is a sample file distributed with Galaxy that lists the BigWig files
+#available for use with the add_scores (phyloP interspecies conservation
+#scores) tool.  You will need to supply these BigWig files and then create
+#an add_scores.loc file similar to this one (store it in this directory)
+#that lists their locations.  The add_scores.loc file has the following
+#format (white space characters are TAB characters):
+#
+#<build>	<BigWig_file_path>
+#
+#So, for example, if your add_scores.loc began like this:
+#
+#hg18	/galaxy/data/hg18/misc/phyloP44way.primate.bw
+#
+#then your /galaxy/data/hg18/misc/ directory would need to contain a
+#BigWig file named phyloP44way.primate.bw, among others:
+#
+#-rw-r--r-- 1 g2data g2data 6057387572 Nov 23 10:11 phyloP44way.primate.bw
+#
+#hg18	/galaxy/data/hg18/misc/phyloP44way.primate.bw
+#hg19	/galaxy/data/hg19/misc/phyloP46way.primate.bw
diff --git a/tool-data/alignseq.loc.sample b/tool-data/alignseq.loc.sample
new file mode 100644
index 0000000..8b74580
--- /dev/null
+++ b/tool-data/alignseq.loc.sample
@@ -0,0 +1,57 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use alignment data stored as axt files (lines starting with "align")
+#or nib files (lines starting with "seq").  You will need to index 
+#them and then create an alignseq.loc file similar to this one (store
+#it in this directory) that points to the directories in which those 
+#alignments are stored.  The "align" data referred to by the alignseq.loc 
+#file has this format (white space characters are TAB characters):
+#
+#align	<build1>	<build2>	<dir>
+#
+#So, for example, if you had hg18/bosTau2 alignment files stored in 
+#/depot/data2/galaxy/hg18/align/bosTau2, then the alignseq.loc entry 
+#would look like this:
+#
+#align	hg18	bosTau2	/depot/data2/galaxy/hg18/align/bosTau2
+#
+#and your /depot/data2/galaxy/hg18/align/bosTau2 directory would 
+#contain all of your alignment files (e.g.):
+#
+#-rw-rw-r--   1 nate   galaxy 151842783 2006-01-08 01:00 chr10.axt
+#-rw-rw-r--   1 nate   galaxy     79575 2006-01-08 01:00 chr10_random.axt
+#-rw-rw-r--   1 nate   galaxy 155015634 2006-01-08 01:01 chr11.axt
+#...etc...
+#
+#Your alignseq.loc file should include an entry per line for each alignment 
+#file you have stored.  For example:
+#
+#align anoGam1 dm1 /depot/data2/galaxy/anoGam1/align/dm1
+#align anoGam1 dm2 /depot/data2/galaxy/anoGam1/align/dm2
+#align canFam1 hg17 /depot/data2/galaxy/canFam1/align/hg17
+#...etc...
+#
+#The "seq" data referred to by the alignseq.loc file has this 
+#format (white space characters are TAB characters):
+#
+#seq	<build1>	<dir>
+#
+#So, for example, if you had anoGam1 sequence files stored in
+#/depot/data2/galaxy/anoGam1/seq, then the alignseq.loc entry
+#would look like this:
+#
+#seq anoGam1 /depot/data2/galaxy/anoGam1/seq
+#and your seq anoGam1 /depot/data2/galaxy/anoGam1/seq directory would
+#contain all of your sequence files (e.g.):
+#
+#-rw-rw-r-- 1 nate galaxy 24397551 2006-06-26 12:51 chr2L.nib
+#-rw-rw-r-- 1 nate galaxy 31362964 2006-06-26 12:51 chr2R.nib
+#-rw-rw-r-- 1 nate galaxy 20642013 2006-06-26 12:51 chr3L.nib
+#-rw-rw-r-- 1 nate galaxy 26636071 2006-06-26 12:51 chr3R.nib
+#
+#Your alignseq.loc file should include an entry per line for each sequence
+#file you have stored.  For example:
+#
+#seq anoGam1 /depot/data2/galaxy/anoGam1/seq
+#seq bosTau2 /depot/data2/galaxy/bosTau2/seq
+#seq bosTau3 /depot/data2/galaxy/bosTau3/seq
+#...etc...
diff --git a/tool-data/all_fasta.loc.sample b/tool-data/all_fasta.loc.sample
new file mode 100644
index 0000000..1a5a28d
--- /dev/null
+++ b/tool-data/all_fasta.loc.sample
@@ -0,0 +1,18 @@
+#This file lists the locations and dbkeys of all the fasta files
+#under the "genome" directory (a directory that contains a directory
+#for each build). The script extract_fasta.py will generate the file
+#all_fasta.loc. This file has the format (white space characters are
+#TAB characters):
+#
+#<unique_build_id>	<dbkey>	<display_name>	<file_path>
+#
+#So, all_fasta.loc could look something like this:
+#
+#apiMel3	apiMel3	Honeybee (Apis mellifera): apiMel3	/path/to/genome/apiMel3/apiMel3.fa
+#hg19canon	hg19	Human (Homo sapiens): hg19 Canonical	/path/to/genome/hg19/hg19canon.fa
+#hg19full	hg19	Human (Homo sapiens): hg19 Full	/path/to/genome/hg19/hg19full.fa
+#
+#Your all_fasta.loc file should contain an entry for each individual
+#fasta file. So there will be multiple fasta files for each build,
+#such as with hg19 above.
+#
diff --git a/tool-data/bam_iobio.loc.sample b/tool-data/bam_iobio.loc.sample
new file mode 100644
index 0000000..36bea1c
--- /dev/null
+++ b/tool-data/bam_iobio.loc.sample
@@ -0,0 +1,3 @@
+# Table used for listing bam.iobio servers
+#<unique_id>	<display_name>	<url>
+bam_iobio	bam.iobio.io	http://bam.iobio.io/
diff --git a/tool-data/bfast_indexes.loc.sample b/tool-data/bfast_indexes.loc.sample
new file mode 100644
index 0000000..d13ec67
--- /dev/null
+++ b/tool-data/bfast_indexes.loc.sample
@@ -0,0 +1,38 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of BFAST indexed sequences data files.  You will need
+#to create these data files and then create a bfast_indices.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The bfast_indexes.loc 
+#file has this format (white space characters are TAB characters):
+#
+#<unique_id>	<build>	<galaxy format extensions valid1,valid2>	<description>	<bfast_index_directory>
+#
+#
+#So, for example, if you had hg18 indexed for 40+ bp NT reads stored in 
+#/galaxy/data/hg18/bfast_index/nt/40+, 
+#then the bfast_indices.loc entry could look like this:
+#
+#hg18_nt_40+	hg18	fastqsanger	hg18: 40+ bp NT Space reads	/galaxy/data/hg18/bfast_index/nt/40+/hg18.fa
+#
+#and your /depot/data2/galaxy/hg18/bfast/nt/40+ directory
+#would contain hg18.fa.*.brg and hg18.fa.*.bif files:
+#hg18.fa.nt.brg
+#hg18.fa.nt.1.1.bif
+#hg18.fa.nt.2.1.bif
+#...etc...
+#or similarly for color space indexes:
+#hg18.fa.nt.brg #NB: the localalign process requires the nucleotide brg file
+#hg18.fa.cs.brg
+#hg18.fa.cs.1.1.bif
+#hg18.fa.cs.2.1.bif
+#...etc...
+#
+#a 'generic' directory can be used to hold intermixed NT and CS indexes, when differentiating is not needed, the bfast_indices.loc entry could look like this:
+#hg18_standard	hg18	fastqsanger,fastqcssanger	hg18 standard indexes	/galaxy/data/hg18/bfast_index/hg18.fa
+#
+#The use of symlinks to prevent copying of e.g. .fa and .brg files is recommended
+#
+#hg18_nt_40+	hg18	fastqsanger	hg18: 40+ bp NT Space reads	/galaxy/data/hg18/bfast_index/nt/40+/hg18.fa
+#hg18_cs_50+	hg18	fastqcssanger	hg18: 50+ bp Color space reads	/galaxy/data/hg18/bfast_index/cs/50+/hg18.fa
+#hg18_nt_40-	hg18	fastqsanger	hg18: 40- bp NT Space reads	/galaxy/data/hg18/bfast_index/nt/40-/hg18.fa
+#phiX_nt_50	phiX	fastqsanger	phiX: 50 bp NT Space reads	/galaxy/data/phiX/bfast_index/nt/50/phiX.fa
diff --git a/tool-data/binned_scores.loc.sample b/tool-data/binned_scores.loc.sample
new file mode 100644
index 0000000..051f583
--- /dev/null
+++ b/tool-data/binned_scores.loc.sample
@@ -0,0 +1,37 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of FileBinnedArray data files.  You will need
+#to create these data files and then create a binned_scores.loc file 
+#similar to this one (store it in this directory ) that points to 
+#the directories in which those files are stored.  The binned_scores.loc 
+#file has this format (white space characters are TAB characters):
+#
+#<build>	<description>	<dir>
+#
+#So, for example, if you had hg16 ENCODE binCons on MLAGAN alignments
+#stored in /depot/data2/galaxy/binned_scores/hg16/encode_bincons_lagan, 
+#then the binned_scores.loc entry would look like this:
+#
+#hg16    ENCODE binCons on MLAGAN alignments     /depot/data2/galaxy/binned_scores/hg16/encode_bincons_lagan
+#
+#and your /depot/data2/galaxy/binned_scores/hg16/encode_bincons_lagan
+#directory would contain all of your "binned" files (e.g.):
+#
+#-rw-r--r--  1 james    universe 830134 2005-09-13 10:12 ENm001
+#-rw-r--r--  1 james    universe 527388 2005-09-13 10:12 ENm002
+#-rw-r--r--  1 james    universe 269808 2005-09-13 10:12 ENm003
+#...etc...
+#
+#Your binned_scores.loc file should include an entry per line for each
+#"binned" file you have stored.  For example:
+#
+#hg16	ENCODE binCons on MLAGAN alignments	/depot/data2/galaxy/binned_scores/hg16/encode_bincons_lagan
+#hg16	ENCODE binCons on TBA alignments	/depot/data2/galaxy/binned_scores/hg16/encode_bincons_tba
+#hg16	ENCODE GERP on MLAGAN alignment	/depot/data2/galaxy/binned_scores/hg16/encode_gerp_lagan
+#hg16	ENCODE GERP on TBA alignment	/depot/data2/galaxy/binned_scores/hg16/encode_gerp_tba
+#hg16	ENCODE phastCons on MLAGAN alignment	/depot/data2/galaxy/binned_scores/hg16/encode_phastcons_lagan
+#hg16	ENCODE phastCons on TBA alignment	/depot/data2/galaxy/binned_scores/hg16/encode_phatcons_tba
+#hg17	ENCODE phastCons on TBA alignment	/depot/data2/galaxy/binned_scores/hg17/phastcons_encode_sep2005_tba
+#hg17	phastCons on 8-species multiz, 16-Jun-2005	/depot/data2/galaxy/binned_scores/hg17/phastcons_mzPt1Mm5Rn3Cf1Gg2Fr1Dr1/
+#hg17	phastCons on 17-species multiz, 13-Apr-2006	/depot/data2/galaxy/binned_scores/hg17/phastCons17way/ba
+#hg17	ESPERR Regulatory Potential, 7-species, 23-Jun-2006	/depot/data2/galaxy/binned_scores/hg17/esperr_rp_7way/ba
+#hg18	phastCons on 17-species multiz, 06-Apr-2006	/depot/data2/galaxy/binned_scores/hg18/phastCons17way/ba
diff --git a/tool-data/biom_simple_display.loc.sample b/tool-data/biom_simple_display.loc.sample
new file mode 100644
index 0000000..a2603a3
--- /dev/null
+++ b/tool-data/biom_simple_display.loc.sample
@@ -0,0 +1,3 @@
+# Table used for listing simple BIOM display servers
+#<unique_id>	<display_name>	<url>
+phinch_dan	Phinch	http://www.bx.psu.edu/~dan/Phinch/index.html?biomURL=%(biom_file_url_qp)s
diff --git a/tool-data/blastdb.loc.sample b/tool-data/blastdb.loc.sample
new file mode 100644
index 0000000..01e4732
--- /dev/null
+++ b/tool-data/blastdb.loc.sample
@@ -0,0 +1,44 @@
+# This is a sample file distributed with Galaxy that is used to define a
+# list of nucleotide BLAST databases, using three columns tab separated:
+#
+# <unique_id>{tab}<database_caption>{tab}<base_name_path>
+#
+# The captions typically contain spaces and might end with the build date.
+# It is important that the actual database name does not have a space in
+# it, and that there are only two tabs on each line.
+#
+# You can download the NCBI provided protein databases like NR from here:
+# ftp://ftp.ncbi.nlm.nih.gov/blast/db/
+#
+# For simplicity, many Galaxy servers are configured to offer just a live
+# version of each NCBI BLAST database (updated with the NCBI provided
+# Perl scripts or similar). In this case, we recommend using the case
+# sensistive base-name of the NCBI BLAST databases as the unique id.
+# Consistent naming is important for sharing workflows between Galaxy
+# servers.
+#
+# For example, consider the NCBI partially non-redundant nucleotide 
+# nt BLAST database, where you have downloaded and decompressed the
+# files under /data/blastdb/ meaning at the command line BLAST+ would
+# would look at the files /data/blastdb/nt.n* when run with:
+#
+# $ blastn -db /data/blastdb/nt -query ...
+#
+# In this case use nr (lower case to match the NCBI file naming) as the
+# unique id in the first column of blastdb_p.loc, giving an entry like
+# this:
+#
+# nt{tab}NCBI partially non-redundant (nt){tab}/data/blastdb/nt
+#
+# Alternatively, rather than a "live" mirror of the NCBI databases which
+# are updated automatically, for full reproducibility the Galaxy Team
+# recommend saving date-stamped copies of the databases. In this case
+# your blastdb.loc file should include an entry per line for each
+# version you have stored. For example:
+#
+# nt_05Jun2010{tab}NCBI nt (partially non-redundant) 05 Jun 2010{tab}/data/blastdb/05Jun2010/nt
+# nt_15Aug2010{tab}NCBI nt (partially non-redundant) 15 Aug 2010{tab}/data/blastdb/15Aug2010/nt
+# ...etc...
+#
+# See also blastdb_p.loc which is for any protein BLAST database, and
+# blastdb_d.loc which is for any protein domains databases (like CDD).
diff --git a/tool-data/blastdb_d.loc.sample b/tool-data/blastdb_d.loc.sample
new file mode 100644
index 0000000..d3a9a05
--- /dev/null
+++ b/tool-data/blastdb_d.loc.sample
@@ -0,0 +1,57 @@
+# This is a sample file distributed with Galaxy that is used to define a
+# list of protein domain databases, using three columns tab separated
+# (longer whitespace are TAB characters):
+#
+# <unique_id>{tab}<database_caption>{tab}<base_name_path>
+#
+# The captions typically contain spaces and might end with the build date.
+# It is important that the actual database name does not have a space in
+# it, and that there are only two tabs on each line.
+#
+# You can download the NCBI provided databases as tar-balls from here:
+# ftp://ftp.ncbi.nih.gov/pub/mmdb/cdd/little_endian/
+#
+# For simplicity, many Galaxy servers are configured to offer just a live
+# version of each NCBI BLAST database (updated with the NCBI provided
+# Perl scripts or similar). In this case, we recommend using the case
+# sensistive base-name of the NCBI BLAST databases as the unique id.
+# Consistent naming is important for sharing workflows between Galaxy
+# servers.
+#
+# For example, consider the NCBI Conserved Domains Database (CDD), where
+# you have downloaded and decompressed the files under the directory
+# /data/blastdb/domains/ meaning at the command line BLAST+ would be
+# run as follows any would look at the files /data/blastdb/domains/Cdd.*:
+#
+# $ rpsblast -db /data/blastdb/domains/Cdd -query ...
+#
+# In this case use Cdd (title case to match the NCBI file naming) as the
+# unique id in the first column of blastdb_d.loc, giving an entry like
+# this:
+#
+# Cdd{tab}NCBI Conserved Domains Database (CDD){tab}/data/blastdb/domains/Cdd
+#
+# Your blastdb_d.loc file should include an entry per line for each "base name"
+# you have stored. For example:
+#
+# Cdd{tab}NCBI CDD{tab}/data/blastdb/domains/Cdd
+# Kog{tab}KOG (eukaryotes){tab}/data/blastdb/domains/Kog
+# Cog{tab}COG (prokaryotes){tab}/data/blastdb/domains/Cog
+# Pfam{tab}Pfam-A{tab}/data/blastdb/domains/Pfam
+# Smart{tab}SMART{tab}/data/blastdb/domains/Smart
+# Tigr{tab}TIGR	/data/blastdb/domains/Tigr
+# Prk{tab}Protein Clusters database{tab}/data/blastdb/domains/Prk
+# ...etc...
+#
+# Alternatively, rather than a "live" mirror of the NCBI databases which
+# are updated automatically, for full reproducibility the Galaxy Team
+# recommend saving date-stamped copies of the databases. In this case
+# your blastdb_d.loc file should include an entry per line for each
+# version you have stored. For example:
+#
+# Cdd_05Jun2010{tab}NCBI CDD 05 Jun 2010{tab}/data/blastdb/domains/05Jun2010/Cdd
+# Cdd_15Aug2010{tab}NCBI CDD 15 Aug 2010{tab}/data/blastdb/domains/15Aug2010/Cdd
+# ...etc...
+#
+# See also blastdb.loc which is for any nucleotide BLAST database, and
+# blastdb_p.loc which is for any protein BLAST databases.
diff --git a/tool-data/blastdb_p.loc.sample b/tool-data/blastdb_p.loc.sample
new file mode 100644
index 0000000..4287a00
--- /dev/null
+++ b/tool-data/blastdb_p.loc.sample
@@ -0,0 +1,44 @@
+# This is a sample file distributed with Galaxy that is used to define a
+# list of protein BLAST databases, using three columns tab separated:
+#
+# <unique_id>{tab}<database_caption>{tab}<base_name_path>
+#
+# The captions typically contain spaces and might end with the build date.
+# It is important that the actual database name does not have a space in
+# it, and that there are only two tabs on each line.
+#
+# You can download the NCBI provided protein databases like NR from here:
+# ftp://ftp.ncbi.nlm.nih.gov/blast/db/
+#
+# For simplicity, many Galaxy servers are configured to offer just a live
+# version of each NCBI BLAST database (updated with the NCBI provided
+# Perl scripts or similar). In this case, we recommend using the case
+# sensistive base-name of the NCBI BLAST databases as the unique id.
+# Consistent naming is important for sharing workflows between Galaxy
+# servers.
+#
+# For example, consider the NCBI "non-redundant" protein BLAST database
+# where you have downloaded and decompressed the files under /data/blastdb/
+# meaning at the command line BLAST+ would be run with something like
+# which would look at the files /data/blastdb/nr.p*:
+#
+# $ blastp -db /data/blastdb/nr -query ...
+#
+# In this case use nr (lower case to match the NCBI file naming) as the
+# unique id in the first column of blastdb_p.loc, giving an entry like
+# this:
+#
+# nr{tab}NCBI non-redundant (nr){tab}/data/blastdb/nr
+#
+# Alternatively, rather than a "live" mirror of the NCBI databases which
+# are updated automatically, for full reproducibility the Galaxy Team
+# recommend saving date-stamped copies of the databases. In this case
+# your blastdb_p.loc file should include an entry per line for each
+# version you have stored. For example:
+#
+# nr_05Jun2010{tab}NCBI NR (non redundant) 05 Jun 2010{tab}/data/blastdb/05Jun2010/nr
+# nr_15Aug2010{tab}NCBI NR (non redundant) 15 Aug 2010{tab}/data/blastdb/15Aug2010/nr
+# ...etc...
+#
+# See also blastdb.loc which is for any nucleotide BLAST database, and
+# blastdb_d.loc which is for any protein domains databases (like CDD).
diff --git a/tool-data/codingSnps.loc.sample b/tool-data/codingSnps.loc.sample
new file mode 100644
index 0000000..3cd6c7f
--- /dev/null
+++ b/tool-data/codingSnps.loc.sample
@@ -0,0 +1,23 @@
+#This is a sample file distributed with Galaxy that enables tools to use a
+#directory of 2bit genome files for use with codingSnps.  You will need to
+#supply these files and then create a codingSnps.loc file similar to this one
+#(store it in this directory) that points to the directories in which those
+#files are stored.  The codingSnps.loc file has this format (white space
+#characters are TAB characters):
+#
+#<build>	<file_path>
+#
+#So, for example, if your codingSnps.loc began like this:
+#
+#hg18	/galaxy/data/hg18/seq
+#
+#then your /galaxy/data/hg18/seq directory would need to contain the following
+#2bit file:
+#
+#-rw-r--r--   1 g2data   g2data   807604784 Dec  8 13:21 hg18.2bit
+#
+#Your codingSnps.loc file should include an entry per line for each file you
+#have stored that you want to be available.  Note that your files should
+#all have the extension '2bit'.
+#hg18	/galaxy/data/hg18/seq
+#hg19	/galaxy/data/hg19/seq
diff --git a/tool-data/encode_datasets.loc.sample b/tool-data/encode_datasets.loc.sample
new file mode 100644
index 0000000..02c4739
--- /dev/null
+++ b/tool-data/encode_datasets.loc.sample
@@ -0,0 +1,62 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use ENCODE data.  The encode_datasets.loc file has this format 
+#(white space characters are TAB characters):
+#
+#<EncodeGroup>	<build>	<DisplayName>	<UniqueID>	<FullPathToFile>	<Format>
+#             Encode Groups: ShortHandCode = Group
+#                                       CC = chromatin and chromosomes
+#                                       GT = genes and transcripts
+#                                      MSA = multi-species sequence analysis
+#                                       TR = transcription regulation
+#                                      ALD = All Latest Datasets
+#All files are assumbed to be BED.
+#
+#ALD	hg17	Latest Datasets (20051208)	all_latest_datasets.20051208.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051208.bed	bed
+#ALD	hg17	Latest Datasets (20051208) [gencode_partitioned]	all_latest_datasets.20051208.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051208.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20051208) [gencode_partitioned]	all_latest_datasets.20051208.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051208.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20051208)	all_latest_datasets.20051208.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051208.gff	gff
+#ALD	hg17	Latest Datasets (20051209)	all_latest_datasets.20051209.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051209.bed	bed
+#ALD	hg17	Latest Datasets (20051209) [gencode_partitioned]	all_latest_datasets.20051209.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051209.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20051209) [gencode_partitioned]	all_latest_datasets.20051209.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051209.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20051209)	all_latest_datasets.20051209.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051209.gff	gff
+#ALD	hg17	Latest Datasets (20051211)	all_latest_datasets.20051211.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051211.bed	bed
+#ALD	hg17	Latest Datasets (20051211) [gencode_partitioned]	all_latest_datasets.20051211.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051211.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20051211) [gencode_partitioned]	all_latest_datasets.20051211.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051211.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20051211)	all_latest_datasets.20051211.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051211.gff	gff
+#ALD	hg17	Latest Datasets (20051212)	all_latest_datasets.20051212.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051212.bed	bed
+#ALD	hg17	Latest Datasets (20051212) [gencode_partitioned]	all_latest_datasets.20051212.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051212.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20051212) [gencode_partitioned]	all_latest_datasets.20051212.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051212.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20051212)	all_latest_datasets.20051212.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051212.gff	gff
+#ALD	hg17	Latest Datasets (20051214)	all_latest_datasets.20051214.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051214.bed	bed
+#ALD	hg17	Latest Datasets (20051214) [gencode_partitioned]	all_latest_datasets.20051214.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051214.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20051214) [gencode_partitioned]	all_latest_datasets.20051214.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051214.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20051214)	all_latest_datasets.20051214.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051214.gff	gff
+#ALD	hg17	Latest Datasets (20051216)	all_latest_datasets.20051216.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051216.bed	bed
+#ALD	hg17	Latest Datasets (20051216) [gencode_partitioned]	all_latest_datasets.20051216.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051216.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20051216) [gencode_partitioned]	all_latest_datasets.20051216.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051216.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20051216)	all_latest_datasets.20051216.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20051216.gff	gff
+#ALD	hg17	Latest Datasets (20060105)	all_latest_datasets.20060105.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060105.bed	bed
+#ALD	hg17	Latest Datasets (20060105) [gencode_partitioned]	all_latest_datasets.20060105.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060105.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20060105) [gencode_partitioned]	all_latest_datasets.20060105.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060105.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20060105)	all_latest_datasets.20060105.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060105.gff	gff
+#ALD	hg17	Latest Datasets (20060106)	all_latest_datasets.20060106.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060106.bed	bed
+#ALD	hg17	Latest Datasets (20060106) [gencode_partitioned]	all_latest_datasets.20060106.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060106.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20060106) [gencode_partitioned]	all_latest_datasets.20060106.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060106.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20060106)	all_latest_datasets.20060106.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060106.gff	gff
+#ALD	hg17	Latest Datasets (20060116)	all_latest_datasets.20060116.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060116.bed	bed
+#ALD	hg17	Latest Datasets (20060116) [gencode_partitioned]	all_latest_datasets.20060116.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060116.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20060116) [gencode_partitioned]	all_latest_datasets.20060116.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060116.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20060116)	all_latest_datasets.20060116.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060116.gff	gff
+#ALD	hg17	Latest Datasets (20060124)	all_latest_datasets.20060124.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060124.bed	bed
+#ALD	hg17	Latest Datasets (20060124) [gencode_partitioned]	all_latest_datasets.20060124.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060124.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20060124) [gencode_partitioned]	all_latest_datasets.20060124.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060124.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20060124)	all_latest_datasets.20060124.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060124.gff	gff
+#ALD	hg17	Latest Datasets (20060204)	all_latest_datasets.20060204.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060204.bed	bed
+#ALD	hg17	Latest Datasets (20060204) [gencode_partitioned]	all_latest_datasets.20060204.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060204.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20060204) [gencode_partitioned]	all_latest_datasets.20060204.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060204.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20060204)	all_latest_datasets.20060204.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060204.gff	gff
+#ALD	hg17	Latest Datasets (20060206)	all_latest_datasets.20060206.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060206.bed	bed
+#ALD	hg17	Latest Datasets (20060206) [gencode_partitioned]	all_latest_datasets.20060206.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060206.gencode_partitioned.bed	bed
+#ALD	hg17	Latest Datasets (20060206) [gencode_partitioned]	all_latest_datasets.20060206.gencode_partitioned.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060206.gencode_partitioned.gff	gff
+#ALD	hg17	Latest Datasets (20060206)	all_latest_datasets.20060206.gff	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060206.gff	gff
+#ALD	hg17	Latest Datasets (20060209) [gencode_partitioned]	all_latest_datasets.20060209.gencode_partitioned.bed	/depot/data2/galaxy/encode-data/all_latest/all_latest_datasets.20060209.gencode_partitioned.bed	bed
diff --git a/tool-data/faseq.loc.sample b/tool-data/faseq.loc.sample
new file mode 100644
index 0000000..1f1d781
--- /dev/null
+++ b/tool-data/faseq.loc.sample
@@ -0,0 +1,26 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use genome fasta sequence files.  The faseq.loc file has this format 
+#(white space characters are TAB characters):
+#
+# <GenomeBuild> <dir>
+#
+# In the dir, each file is fasta format and contains only one sequence. So,
+#for example, if you had hg18 fasta sequences stored in /depot/data2/galaxy/faseq/hg18,
+#then your faseq.loc entry would look like this:
+#
+#hg18	/depot/data2/galaxy/faseq/hg18
+#
+#and your /depot/data2/galaxy/faseq/hg18 directory would contain all of 
+#your fasta sequence files (e.g.):
+#
+#-rw-r--r--  1 wychung galaxy 138082251 2008-04-16 11:57 chr10.fa
+#-rw-r--r--  1 wychung galaxy    115564 2008-04-16 11:57 chr10_random.fa
+#-rw-r--r--  1 wychung galaxy 137141451 2008-04-16 11:58 chr11.fa
+#...etc...
+#Your faseq.loc file should include an entry per line for each set of fasta 
+#sequence files you have stored.  For example:
+#
+#hg18	/depot/data2/galaxy/faseq/hg18
+#mm9	/depot/data2/galaxy/faseq/mm9
+#Arabidopsis	/depot/data2/galaxy/faseq/Arabidopsis
+#...etc...
diff --git a/tool-data/funDo.loc.sample b/tool-data/funDo.loc.sample
new file mode 100644
index 0000000..861b1c4
--- /dev/null
+++ b/tool-data/funDo.loc.sample
@@ -0,0 +1,11 @@
+#This is a sample file distributed with Galaxy that is used by the FunDO tool.
+#The funDo.loc file has this format (white space characters are TAB
+#characters):
+#
+#<build>	<description>	<path to disease associated genes file>
+#
+#Your funDo.loc file should include an entry per line for each disease
+#associated genes file you have stored.
+#
+#hg18	disease associated genes	/galaxy/data/hg18/misc/funDo/genes-disease.Sept2010.interval
+#hg19	disease associated genes	/galaxy/data/hg19/misc/funDo/genes-disease.Sept2010.interval
diff --git a/tool-data/liftOver.loc.sample b/tool-data/liftOver.loc.sample
new file mode 100644
index 0000000..777a25a
--- /dev/null
+++ b/tool-data/liftOver.loc.sample
@@ -0,0 +1,27 @@
+#This is a sample file distributed with Galaxy that is used by the
+#liftOver tools.  The liftOver.loc file has this format (white space 
+#characters are TAB characters):
+#
+#<FromSpecies>	<ToSpecies>	<PathToChainFile>
+#
+#So, for example, if you had the chain file to convert from anoCar1 to galGal3
+#located at /depot/data2/galaxy/anoCar1/liftOver/anoCar1ToGalGal3.over.chain, 
+#then the liftOver.loc entry would look like this:
+#
+#anoCar1	galGal3	/depot/data2/galaxy/anoCar1/liftOver/anoCar1ToGalGal3.over.chain
+#
+#and your /depot/data2/galaxy/anoCar1/liftOver directory would 
+#contain all of your "chain" files (e.g.):
+#
+#-rw-rw-r-- 1 gua110 galaxy 24046079 2008-01-16 14:20 anoCar1ToGalGal3.over.chain
+#-rw-rw-r-- 1 gua110 galaxy 13216668 2008-01-16 14:20 anoCar1ToGasAcu1.over.chain
+#-rw-rw-r-- 1 gua110 galaxy 29597067 2008-01-16 14:20 anoCar1ToHg18.over.chain
+#...etc...
+#
+#Your liftOver.loc file should include an entry per line for each build you can
+#convert.  For example:
+#
+#anoCar1	galGal3	/depot/data2/galaxy/anoCar1/liftOver/anoCar1ToGalGal3.over.chain
+#anoCar1	gasAcu1	/depot/data2/galaxy/anoCar1/liftOver/anoCar1ToGasAcu1.over.chain
+#anoCar1	hg18	/depot/data2/galaxy/anoCar1/liftOver/anoCar1ToHg18.over.chain
+#...etc...
diff --git a/tool-data/maf_index.loc.sample b/tool-data/maf_index.loc.sample
new file mode 100644
index 0000000..c3a66c7
--- /dev/null
+++ b/tool-data/maf_index.loc.sample
@@ -0,0 +1,17 @@
+#This is a sample file distributed with Galaxy that is used by some
+#alignment tools.  The maf_index.loc file has this format (white space 
+#characters are TAB characters):
+#
+#<Display_name UID>	<indexed_for:build1,build2,build3>	<exists_in_maf:build1,build2,build3>	<Comma_Separated_List_of_Full_Paths_To_Files>
+#
+#ENCODE TBA (hg17)	ENCODE_TBA_hg17	armadillo,baboon,galGal2,panTro1,colobus_monkey,cow,canFam1,dusky_titi,elephant,fr1,galago,hedgehog,hg17,rheMac1,marmoset,monDom1,mm6,mouse_lemur,owl_monkey,platypus,rabbit,rn3,rfbat,shrew,tenrec,tetNig1,xenTro1,danRer2	armadillo,baboon,galGal2,panTro1,colobus_monkey,cow,canFam1,dusky_titi,elephant,fr1,galago,hedgehog,hg17,rheMac1,marmoset,monDom1,mm6,mouse_lemur,owl_monkey,platypus,rabbit,rn3,rfbat,shrew,tenrec,tetNig1,xenTro1,danRer2	/depot/data2/gala [...]
+#ENCODE MAVID (hg17)	ENCODE_MAVID_hg17	armadillo,baboon,galGal2,panTro1,colobus_monkey,cow,canFam1,dusky_titi,elephant,fr1,galago,hedgehog,hg17,rheMac1,marmoset,monDom1,mm6,mouse_lemur,owl_monkey,platypus,rabbit,rn3,rfbat,shrew,tenrec,tetNig1,xenTro1,danRer2	armadillo,baboon,galGal2,panTro1,colobus_monkey,cow,canFam1,dusky_titi,elephant,fr1,galago,hedgehog,hg17,rheMac1,marmoset,monDom1,mm6,mouse_lemur,owl_monkey,platypus,rabbit,rn3,rfbat,shrew,tenrec,tetNig1,xenTro1,danRer2	/depot/data2/ [...]
+#ENCODE TBA (hg16)	ENCODE_TBA_hg16	armadillo,baboon,galGal2,panTro1,colobus_monkey,cow,canFam1,dusky_titi,elephant=elephant,fr1,galago,hedgehog,hg16,rheMac1,marmoset,monDom1,mm6,mouse_lemur,owl_monkey,platypus,rabbit,rn3,rfbat,shrew,tenrec,tetNig1,xenTro1,danRer2	armadillo,baboon,galGal2,panTro1,colobus_monkey,cow,canFam1,dusky_titi,elephant=elephant,fr1,galago,hedgehog,hg16,rheMac1,marmoset,monDom1,mm6,mouse_lemur,owl_monkey,platypus,rabbit,rn3,rfbat,shrew,tenrec,tetNig1,xenTro1,danRer2 [...]
+#8-way multiZ (hg17)	8_WAY_MULTIZ_hg17	canFam1,danRer1,fr1,galGal2,hg17,mm5,panTro1,rn3	canFam1,danRer1,fr1,galGal2,hg17,mm5,panTro1,rn3	/depot/data2/galaxy/hg17/align/8way-multiZ/chr1.maf,/depot/data2/galaxy/hg17/align/8way-multiZ/chr1_random.maf,/depot/data2/galaxy/hg17/align/8way-multiZ/chr10.maf,/depot/data2/galaxy/hg17/align/8way-multiZ/chr10_random.maf,/depot/data2/galaxy/hg17/align/8way-multiZ/chr11.maf,/depot/data2/galaxy/hg17/align/8way-multiZ/chr12.maf,/depot/data2/galaxy/hg17/ [...]
+#17-way multiZ (hg18)	17_WAY_MULTIZ_hg18	hg18,panTro1,bosTau2,rheMac2,mm8,rn4,canFam2,echTel1,loxAfr1,oryCun1,danRer3,monDom4,dasNov1,galGal2,fr1,tetNig1,xenTro1	hg18,panTro1,bosTau2,rheMac2,mm8,rn4,canFam2,echTel1,loxAfr1,oryCun1,danRer3,monDom4,dasNov1,galGal2,fr1,tetNig1,xenTro1	/depot/data2/galaxy/hg18/align/17way-multiZ/chr10.maf,/depot/data2/galaxy/hg18/align/17way-multiZ/chr10_random.maf,/depot/data2/galaxy/hg18/align/17way-multiZ/chr11.maf,/depot/data2/galaxy/hg18/align/17way-mul [...]
+#3-way multiZ (hg18,panTro2,rheMac2)	3_WAY_MULTIZ_hg18	hg18,panTro2,rheMac2	hg18,panTro2,rheMac2	/depot/data2/galaxy/hg18/align/3way-multiZ/chr10.maf,/depot/data2/galaxy/hg18/align/3way-multiZ/chr10_random.maf,/depot/data2/galaxy/hg18/align/3way-multiZ/chr11.maf,/depot/data2/galaxy/hg18/align/3way-multiZ/chr11_random.maf,/depot/data2/galaxy/hg18/align/3way-multiZ/chr12.maf,/depot/data2/galaxy/hg18/align/3way-multiZ/chr13.maf,/depot/data2/galaxy/hg18/align/3way-multiZ/chr13_random.maf,/de [...]
+#5-way multiZ (hg18,panTro2,rheMac2,mm8,canFam2)	5_WAY_MULTIZ_hg18	hg18,panTro2,rheMac2,mm8,canFam2	hg18,panTro2,rheMac2,mm8,canFam2	/depot/data2/galaxy/hg18/align/5way-multiZ/chr10.maf,/depot/data2/galaxy/hg18/align/5way-multiZ/chr10_random.maf,/depot/data2/galaxy/hg18/align/5way-multiZ/chr11.maf,/depot/data2/galaxy/hg18/align/5way-multiZ/chr11_random.maf,/depot/data2/galaxy/hg18/align/5way-multiZ/chr12.maf,/depot/data2/galaxy/hg18/align/5way-multiZ/chr13.maf,/depot/data2/galaxy/hg18/al [...]
+#28-way multiZ (hg18)	28_WAY_MULTIZ_hg18	hg18	hg18,dasNov1,otoGar1,felCat3,galGal3,panTro2,bosTau3,canFam2,loxAfr1,xenTro2,fr2,cavPor2,eriEur1,equCab1,anoCar1,oryLat1,mm8,monDom4,ornAna1,oryCun1,rn4,rheMac2,sorAra1,gasAcu1,echTel1,tetNig1,tupBel1,danRer4	/depot/data2/galaxy/hg18/align/multiz28way/chr1.maf.lzo,/depot/data2/galaxy/hg18/align/multiz28way/chr10.maf.lzo,/depot/data2/galaxy/hg18/align/multiz28way/chr10_random.maf.lzo,/depot/data2/galaxy/hg18/align/multiz28way/chr11.maf.lzo,/de [...]
+#15-way multiZ (dm2)	15_WAY_MULTIZ_dm2	dm2,droSim1,droSec1,droYak2,droEre2,droAna3,dp4,droPer1,droWil1,droVir3,droMoj3,droGri2,anoGam1,apiMel2,triCas2	dm2,droSim1,droSec1,droYak2,droEre2,droAna3,dp4,droPer1,droWil1,droVir3,droMoj3,droGri2,anoGam1,apiMel2,triCas2	/depot/data2/galaxy/dm2/align/multiz15way/chr2L.maf,/depot/data2/galaxy/dm2/align/multiz15way/chr2R.maf,/depot/data2/galaxy/dm2/align/multiz15way/chr2h.maf,/depot/data2/galaxy/dm2/align/multiz15way/chr3L.maf,/depot/data2/galaxy/d [...]
+#17-way multiZ (mm8)	17_WAY_MULTIZ_mm8	mm8	hg18,panTro1,bosTau2,rheMac2,mm8,rn4,canFam2,echTel1,loxAfr1,oryCun1,danRer3,monDom4,dasNov1,galGal2,fr1,tetNig1,xenTro1	/depot/data2/galaxy/mm8/align/multiz17way/chr10.maf.lzo,/depot/data2/galaxy/mm8/align/multiz17way/chr10_random.maf.lzo,/depot/data2/galaxy/mm8/align/multiz17way/chr11.maf.lzo,/depot/data2/galaxy/mm8/align/multiz17way/chr12.maf.lzo,/depot/data2/galaxy/mm8/align/multiz17way/chr13.maf.lzo,/depot/data2/galaxy/mm8/align/multiz17way [...]
+#8-way multiZ (ponAbe2)	8_WAY_MULTIZ_ponAbe2	ponAbe2,hg18,panTro2,rheMac2,calJac1,mm9,monDom4,ornAna1	ponAbe2,hg18,panTro2,rheMac2,calJac1,mm9,monDom4,ornAna1	/depot/data2/galaxy/ponAbe2/align/multiz8way/maf/chr10.maf.lzo,/depot/data2/galaxy/ponAbe2/align/multiz8way/maf/chr10_random.maf.lzo,/depot/data2/galaxy/ponAbe2/align/multiz8way/maf/chr11.maf.lzo,/depot/data2/galaxy/ponAbe2/align/multiz8way/maf/chr11_random.maf.lzo,/depot/data2/galaxy/ponAbe2/align/multiz8way/maf/chr12.maf.lzo,/dep [...]
diff --git a/tool-data/maf_pairwise.loc.sample b/tool-data/maf_pairwise.loc.sample
new file mode 100644
index 0000000..7a2d301
--- /dev/null
+++ b/tool-data/maf_pairwise.loc.sample
@@ -0,0 +1,31 @@
+#This is a sample file distributed with Galaxy that is used by some
+#alignment tools.  The maf_pairwise.loc file has this format (white 
+#space characters are TAB characters):
+#
+#<Display_name UID>	<indexed_for:build1,build2,build3>	<exists_in_maf:build1,build2,build3>	<Comma_Separated_List_of_Full_Paths_To_Files>
+#
+#Pairwise (anoGam1,dm2)	PAIRWISE_anoGam1_dm2	anoGam1,dm2	anoGam1,dm2	/depot/data2/galaxy/anoGam1/align/dm2/maf/chr2L.maf,/depot/data2/galaxy/anoGam1/align/dm2/maf/chr2R.maf,/depot/data2/galaxy/anoGam1/align/dm2/maf/chr3L.maf,/depot/data2/galaxy/anoGam1/align/dm2/maf/chr3R.maf,/depot/data2/galaxy/anoGam1/align/dm2/maf/chrM.maf,/depot/data2/galaxy/anoGam1/align/dm2/maf/chrU.maf,/depot/data2/galaxy/anoGam1/align/dm2/maf/chrX.maf
+#Pairwise (canFam1,hg17)	PAIRWISE_canFam1_hg17	canFam1,hg17	canFam1,hg17	/depot/data2/galaxy/canFam1/align/hg17/maf/chr1.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr10.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr11.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr12.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr13.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr14.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr15.maf,/depot/data2/galaxy/canFam1/align/hg17/maf/chr16. [...]
+#Pairwise (canFam1,mm6)	PAIRWISE_canFam1_mm6	canFam1,mm6	canFam1,mm6	/depot/data2/galaxy/canFam1/align/mm6/maf/chr1.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr10.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr11.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr12.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr13.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr14.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr15.maf,/depot/data2/galaxy/canFam1/align/mm6/maf/chr16.maf,/depot/d [...]
+#Pairwise (canFam2,canFam2)	PAIRWISE_canFam2_canFam2	canFam2,canFam2	canFam2,canFam2	/depot/data2/galaxy/canFam2/align/canFam2/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/canFam2/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/canFam2/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/canFam2/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/canFam2/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/canFam2/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/canFam2/maf/chr15.maf,/depot/data2/gal [...]
+#Pairwise (canFam2,hg17)	PAIRWISE_canFam2_hg17	canFam2,hg17	canFam2,hg17	/depot/data2/galaxy/canFam2/align/hg17/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/hg17/maf/chr16. [...]
+#Pairwise (canFam2,hg18)	PAIRWISE_canFam2_hg18	canFam2,hg18	canFam2,hg18	/depot/data2/galaxy/canFam2/align/hg18/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/hg18/maf/chr16. [...]
+#Pairwise (canFam2,mm6)	PAIRWISE_canFam2_mm6	canFam2,mm6	canFam2,mm6	/depot/data2/galaxy/canFam2/align/mm6/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/mm6/maf/chr16.maf,/depot/d [...]
+#Pairwise (canFam2,mm7)	PAIRWISE_canFam2_mm7	canFam2,mm7	canFam2,mm7	/depot/data2/galaxy/canFam2/align/mm7/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/mm7/maf/chr16.maf,/depot/d [...]
+#Pairwise (canFam2,mm8)	PAIRWISE_canFam2_mm8	canFam2,mm8	canFam2,mm8	/depot/data2/galaxy/canFam2/align/mm8/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/mm8/maf/chr16.maf,/depot/d [...]
+#Pairwise (canFam2,rn3)	PAIRWISE_canFam2_rn3	canFam2,rn3	canFam2,rn3	/depot/data2/galaxy/canFam2/align/rn3/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/rn3/maf/chr16.maf,/depot/d [...]
+#Pairwise (canFam2,rn4)	PAIRWISE_canFam2_rn4	canFam2,rn4	canFam2,rn4	/depot/data2/galaxy/canFam2/align/rn4/maf/chr1.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr10.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr11.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr12.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr13.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr14.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr15.maf,/depot/data2/galaxy/canFam2/align/rn4/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer2,fr1)	PAIRWISE_danRer2_fr1	danRer2,fr1	danRer2,fr1	/depot/data2/galaxy/danRer2/align/fr1/maf/chr1.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr10.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr11.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr12.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr13.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr14.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr15.maf,/depot/data2/galaxy/danRer2/align/fr1/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer2,hg17)	PAIRWISE_danRer2_hg17	danRer2,hg17	danRer2,hg17	/depot/data2/galaxy/danRer2/align/hg17/maf/chr1.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr10.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr11.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr12.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr13.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr14.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr15.maf,/depot/data2/galaxy/danRer2/align/hg17/maf/chr16. [...]
+#Pairwise (danRer2,mm6)	PAIRWISE_danRer2_mm6	danRer2,mm6	danRer2,mm6	/depot/data2/galaxy/danRer2/align/mm6/maf/chr1.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr10.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr11.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr12.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr13.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr14.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr15.maf,/depot/data2/galaxy/danRer2/align/mm6/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer2,tetNig1)	PAIRWISE_danRer2_tetNig1	danRer2,tetNig1	danRer2,tetNig1	/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr1.maf,/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr10.maf,/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr11.maf,/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr12.maf,/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr13.maf,/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr14.maf,/depot/data2/galaxy/danRer2/align/tetNig1/maf/chr15.maf,/depot/data2/gal [...]
+#Pairwise (danRer3,fr1)	PAIRWISE_danRer3_fr1	danRer3,fr1	danRer3,fr1	/depot/data2/galaxy/danRer3/align/fr1/maf/chr1.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr10.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr11.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr12.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr13.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr14.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr15.maf,/depot/data2/galaxy/danRer3/align/fr1/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer3,hg18)	PAIRWISE_danRer3_hg18	danRer3,hg18	danRer3,hg18	/depot/data2/galaxy/danRer3/align/hg18/maf/chr1.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr10.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr11.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr12.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr13.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr14.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr15.maf,/depot/data2/galaxy/danRer3/align/hg18/maf/chr16. [...]
+#Pairwise (danRer3,mm7)	PAIRWISE_danRer3_mm7	danRer3,mm7	danRer3,mm7	/depot/data2/galaxy/danRer3/align/mm7/maf/chr1.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr10.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr11.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr12.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr13.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr14.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr15.maf,/depot/data2/galaxy/danRer3/align/mm7/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer3,mm8)	PAIRWISE_danRer3_mm8	danRer3,mm8	danRer3,mm8	/depot/data2/galaxy/danRer3/align/mm8/maf/chr1.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr10.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr11.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr12.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr13.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr14.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr15.maf,/depot/data2/galaxy/danRer3/align/mm8/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer3,tetNig1)	PAIRWISE_danRer3_tetNig1	danRer3,tetNig1	danRer3,tetNig1	/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr1.maf,/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr10.maf,/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr11.maf,/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr12.maf,/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr13.maf,/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr14.maf,/depot/data2/galaxy/danRer3/align/tetNig1/maf/chr15.maf,/depot/data2/gal [...]
+#Pairwise (danRer4,fr1)	PAIRWISE_danRer4_fr1	danRer4,fr1	danRer4,fr1	/depot/data2/galaxy/danRer4/align/fr1/maf/chr1.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr10.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr11.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr12.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr13.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr14.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr15.maf,/depot/data2/galaxy/danRer4/align/fr1/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer4,hg18)	PAIRWISE_danRer4_hg18	danRer4,hg18	danRer4,hg18	/depot/data2/galaxy/danRer4/align/hg18/maf/chr1.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr10.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr11.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr12.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr13.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr14.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr15.maf,/depot/data2/galaxy/danRer4/align/hg18/maf/chr16. [...]
+#Pairwise (danRer4,mm8)	PAIRWISE_danRer4_mm8	danRer4,mm8	danRer4,mm8	/depot/data2/galaxy/danRer4/align/mm8/maf/chr1.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr10.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr11.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr12.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr13.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr14.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr15.maf,/depot/data2/galaxy/danRer4/align/mm8/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer4,rn4)	PAIRWISE_danRer4_rn4	danRer4,rn4	danRer4,rn4	/depot/data2/galaxy/danRer4/align/rn4/maf/chr1.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr10.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr11.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr12.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr13.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr14.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr15.maf,/depot/data2/galaxy/danRer4/align/rn4/maf/chr16.maf,/depot/d [...]
+#Pairwise (danRer4,tetNig1)	PAIRWISE_danRer4_tetNig1	danRer4,tetNig1	danRer4,tetNig1	/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr1.maf,/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr10.maf,/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr11.maf,/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr12.maf,/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr13.maf,/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr14.maf,/depot/data2/galaxy/danRer4/align/tetNig1/maf/chr15.maf,/depot/data2/gal [...]
diff --git a/tool-data/microbial_data.loc.sample b/tool-data/microbial_data.loc.sample
new file mode 100755
index 0000000..d090b2d
--- /dev/null
+++ b/tool-data/microbial_data.loc.sample
@@ -0,0 +1,37 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to retrieve microbial data via a URL .
+#
+#ORG	campJeju_RM1221_1	Campylobacter jejuni RM1221	bacteria	Epsilonproteobacteria	chr	http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=303	UCSC
+#CHR	campJeju_RM1221_1	chr	Campylobacter jejuni RM1221, complete genome	1777831	57236892	None	http://www.ncbi.nlm.nih.gov/entrez/viewer.fcgi?db=nucleotide&val=NC_003912
+#DATA	campJeju_RM1221_1_chr_CDS	campJeju_RM1221_1	chr	CDS	bed	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.CDS.bed
+#DATA	campJeju_RM1221_1_chr_tRNA	campJeju_RM1221_1	chr	tRNA	bed	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.tRNA.bed
+#DATA	campJeju_RM1221_1_chr_rRNA	campJeju_RM1221_1	chr	rRNA	bed	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.rRNA.bed
+#DATA	campJeju_RM1221_1_chr_seq	campJeju_RM1221_1	chr	sequence	fasta	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.fna
+#DATA	campJeju_RM1221_1_chr_GeneMark	campJeju_RM1221_1	chr	GeneMark	bed	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.GeneMark.bed
+#DATA	campJeju_RM1221_1_chr_GeneMarkHMM	campJeju_RM1221_1	chr	GeneMarkHMM	bed	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.GeneMarkHMM.bed
+#DATA	campJeju_RM1221_1_chr_Glimmer3	campJeju_RM1221_1	chr	Glimmer3	bed	/depot/data2/galaxy/microbes/campJeju_RM1221_1/chr.Glimmer3.bed
+#ORG	12521	Clostridium perfringens SM101	bacteria	Firmicutes	NC_008262,NC_008263,NC_008264,NC_008265	http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=12521	None
+#CHR	12521	NC_008265	Clostridium perfringens phage phiSM101, complete genome	38092	110804020	None	http://www.ncbi.nlm.nih.gov/entrez/viewer.fcgi?db=nucleotide&val=NC_008265
+#DATA	12521_NC_008265_CDS	12521	NC_008265	CDS	bed	/depot/data2/galaxy/microbes/12521/NC_008265.CDS.bed
+#DATA	12521_NC_008265_tRNA	12521	NC_008265	tRNA	bed	/depot/data2/galaxy/microbes/12521/NC_008265.tRNA.bed
+#DATA	12521_NC_008265_rRNA	12521	NC_008265	rRNA	bed	/depot/data2/galaxy/microbes/12521/NC_008265.rRNA.bed
+#DATA	12521_NC_008265_seq	12521	NC_008265	sequence	fasta	/depot/data2/galaxy/microbes/12521/NC_008265.fna
+#DATA	12521_NC_008265_GeneMark	12521	NC_008265	GeneMark	bed	/depot/data2/galaxy/microbes/12521/NC_008265.GeneMark.bed
+#DATA	12521_NC_008265_GeneMarkHMM	12521	NC_008265	GeneMarkHMM	bed	/depot/data2/galaxy/microbes/12521/NC_008265.GeneMarkHMM.bed
+#DATA	12521_NC_008265_Glimmer3	12521	NC_008265	Glimmer3	bed	/depot/data2/galaxy/microbes/12521/NC_008265.Glimmer3.bed
+#CHR	12521	NC_008264	Clostridium perfringens SM101 plasmid 2, complete sequence	12206	110804009	None	http://www.ncbi.nlm.nih.gov/entrez/viewer.fcgi?db=nucleotide&val=NC_008264
+#DATA	12521_NC_008264_CDS	12521	NC_008264	CDS	bed	/depot/data2/galaxy/microbes/12521/NC_008264.CDS.bed
+#DATA	12521_NC_008264_tRNA	12521	NC_008264	tRNA	bed	/depot/data2/galaxy/microbes/12521/NC_008264.tRNA.bed
+#DATA	12521_NC_008264_rRNA	12521	NC_008264	rRNA	bed	/depot/data2/galaxy/microbes/12521/NC_008264.rRNA.bed
+#DATA	12521_NC_008264_seq	12521	NC_008264	sequence	fasta	/depot/data2/galaxy/microbes/12521/NC_008264.fna
+#DATA	12521_NC_008264_GeneMark	12521	NC_008264	GeneMark	bed	/depot/data2/galaxy/microbes/12521/NC_008264.GeneMark.bed
+#DATA	12521_NC_008264_GeneMarkHMM	12521	NC_008264	GeneMarkHMM	bed	/depot/data2/galaxy/microbes/12521/NC_008264.GeneMarkHMM.bed
+#DATA	12521_NC_008264_Glimmer3	12521	NC_008264	Glimmer3	bed	/depot/data2/galaxy/microbes/12521/NC_008264.Glimmer3.bed
+#CHR	12521	NC_008263	Clostridium perfringens SM101 plasmid 1, complete sequence	12397	110803998	None	http://www.ncbi.nlm.nih.gov/entrez/viewer.fcgi?db=nucleotide&val=NC_008263
+#DATA	12521_NC_008263_CDS	12521	NC_008263	CDS	bed	/depot/data2/galaxy/microbes/12521/NC_008263.CDS.bed
+#DATA	12521_NC_008263_tRNA	12521	NC_008263	tRNA	bed	/depot/data2/galaxy/microbes/12521/NC_008263.tRNA.bed
+#DATA	12521_NC_008263_rRNA	12521	NC_008263	rRNA	bed	/depot/data2/galaxy/microbes/12521/NC_008263.rRNA.bed
+#DATA	12521_NC_008263_seq	12521	NC_008263	sequence	fasta	/depot/data2/galaxy/microbes/12521/NC_008263.fna
+#DATA	12521_NC_008263_GeneMark	12521	NC_008263	GeneMark	bed	/depot/data2/galaxy/microbes/12521/NC_008263.GeneMark.bed
+#DATA	12521_NC_008263_GeneMarkHMM	12521	NC_008263	GeneMarkHMM	bed	/depot/data2/galaxy/microbes/12521/NC_008263.GeneMarkHMM.bed
+#DATA	12521_NC_008263_Glimmer3	12521	NC_008263	Glimmer3	bed	/depot/data2/galaxy/microbes/12521/NC_008263.Glimmer3.bed
diff --git a/tool-data/mosaik_index.loc.sample b/tool-data/mosaik_index.loc.sample
new file mode 100644
index 0000000..59286b0
--- /dev/null
+++ b/tool-data/mosaik_index.loc.sample
@@ -0,0 +1,19 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of Mosaik indexed sequences data files. You will need
+#to create these data files and then create a mosaik_index.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The mosaik_index.loc 
+#file has this format (longer white space is the TAB character):
+#
+#<unique_build_id>	<dbkey>	<display_name>	<fasta_file_path>
+#
+#So, for example, if you had hg18 indexed and stored in 
+#/depot/data2/galaxy/mosaik/hg18/
+#then the mosaik_index.loc entry would look like this:
+#
+#hg18	hg18	hg18 Pretty	/depot/data2/galaxy/mosaik/hg18/hg18.fa
+#
+#and your /depot/data2/galaxy/mosaik/hg18/ directory
+#would contain the following files:
+#TODO handle mosaik jump tables.
+#
diff --git a/tool-data/ngs_sim_fasta.loc.sample b/tool-data/ngs_sim_fasta.loc.sample
new file mode 100644
index 0000000..70623c1
--- /dev/null
+++ b/tool-data/ngs_sim_fasta.loc.sample
@@ -0,0 +1,20 @@
+#This is a sample file distributed with Galaxy that enables the NGS simulation
+#tool to use some FASTA files. You will need to make sure that these FASTA files
+#are in place and then create the ngs_sim.loc file similar to this one (store it
+#in this directory) that points to the locations of those files. The ngs_sim.loc
+#file has this format (white space characters are TAB characters):
+#
+#<unique_build_id>	<dbkey>	<display_name>	<file_base_path>
+#
+#So, for example, if you had hg18chrM.fa in 
+#/data/path/hg18/seq/, 
+#then the ngs_sim.loc entry would look like this:
+#
+#hg18chrM	hg18	hg18chrM	/data/path/hg18/seq/hg18chrM.fa
+#
+#Your ngs_sim.loc file should include an entry per line for each FASTA file you
+#have stored.
+#
+#hg18chrM	hg18	hg18chrM	/data/path/hg18/seq/hg18chrM.fa
+#phiX174	phiX	phiX174	/data/path/genome/phiX/seq/phiX.fa
+#pUC18	pUC18	pUC18	/data/path/genome/pUC18/seq/pUC18.fa
diff --git a/tool-data/perm_base_index.loc.sample b/tool-data/perm_base_index.loc.sample
new file mode 100644
index 0000000..ff6a06b
--- /dev/null
+++ b/tool-data/perm_base_index.loc.sample
@@ -0,0 +1,27 @@
+#This is a sample file distributed with Galaxy that enables tools to
+#use a directory of PerM indexed sequences data files. You will need
+#to create these data files and then create a perm_base_index.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The perm_base_index.loc 
+#file has this format (longer white space characters are TAB characters):
+#
+#<build_seed_readlength>	<display_name>	<file_base>
+#
+#Because each PerM index is built with a specific seed and a specific read
+#length, this needs to be specified so the user can choose the appropriate
+#one. So, for example, if you had phiX indexed with seed F3 and read length 
+#50, and stored in /depot/data/galaxy/phiX/perm_index/,
+#then the perm_base_index.loc entry would look something like this:
+#
+#phiX_F3_50	phiX: seed=F3, read length=50	/depot/data/galaxy/phiX/perm_index/phiX_base_F3_50.index
+#
+#and your /depot/data/galaxy/phiX/perm_index/ directory
+#would contain the file phiX_base_F3_50.index:
+#
+#Your perm_base_index.loc file should include an entry per line for each
+#index set you have stored. For example:
+#
+#phiX_F3_50	phiX: seed=F3, read length=50	/data/galaxy/phiX/perm_index/phiX_base_F3_50.index
+#phiX_F4_50	phiX: seed=F4, read length=50	/data/galaxy/phiX/perm_index/phiX_base_F3_50.index
+#hg19_F3_50	hg19: seed=F3, read length=50	/data/galaxy/hg19/perm_index/hg19_base_F3_50.index
+#hg19_F4_50	hg19: seed=F4, read length=50	/data/galaxy/hg19/perm_index/hg19_base_F3_50.index
diff --git a/tool-data/perm_color_index.loc.sample b/tool-data/perm_color_index.loc.sample
new file mode 100644
index 0000000..00f687a
--- /dev/null
+++ b/tool-data/perm_color_index.loc.sample
@@ -0,0 +1,28 @@
+#This is a sample file distributed with Galaxy that enables tools to
+#use a directory of PerM indexed sequences data files. You will need
+#to create these data files and then create a perm_color_index.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The perm_color_index.loc 
+#file has this format (white space characters are TAB characters):
+#
+#<build_seed_readlength>	<display_name>	<file_base>
+#
+#Because each PerM index is built with a specific seed and a specific read
+#length, this needs to be specified so the user can choose the appropriate
+#one. So, for example, if you had phiX indexed with seed F3 and read length 
+#50, and stored in /depot/data/galaxy/phiX/perm_index/,
+#then the perm_color_index.loc entry would look something like this:
+#
+#phiX_F3_50	phiX: seed=F3, read length=50	/data/galaxy/phiX/perm_index/phiX_color_F3_50.index
+#
+#and your /depot/data/galaxy/phiX/perm_index/ directory
+#would contain the file phiX_color_F3_50.index:
+#
+#Your perm_color_index.loc file should include an entry per line for each
+#index set you have stored. For example:
+#
+#phiX_F3_50	phiX: seed=F3, read length=50	/data/galaxy/phiX/perm_index/phiX_color_F3_50.index
+#phiX_F4_50	phiX: seed=F4, read length=50	/data/galaxy/phiX/perm_index/phiX_color_F3_50.index
+#hg19_F3_50	hg19: seed=F3, read length=50	/data/galaxy/hg19/perm_index/hg19_color_F3_50.index
+#hg19_F4_50	hg19: seed=F4, read length=50	/data/galaxy/hg19/perm_index/hg19_color_F3_50.index
+#
diff --git a/tool-data/phastOdds.loc.sample b/tool-data/phastOdds.loc.sample
new file mode 100644
index 0000000..bcdbec0
--- /dev/null
+++ b/tool-data/phastOdds.loc.sample
@@ -0,0 +1,21 @@
+#This is a sample file distributed with Galaxy that is used by the
+#phastOdds scores tool.  The phastOdds.loc file has this format (white space 
+#characters are TAB characters):
+#
+#<build>	<description>	<path to phastOdds score files>
+#
+#So, for example, if you had hg17 ENCODE regions only, SEP-2005 tba.v2 alignments
+#stored in /depot/data2/galaxy/phastOdds_precomputed/encode_SEP-2005_tba.v2_phastOdds, 
+#then the phastOdds.loc entry would look like this:
+#
+#hg17	ENCODE regions only, SEP-2005 tba.v2 alignments	/depot/data2/galaxy/phastOdds_precomputed/encode_SEP-2005_tba.v2_phastOdds
+#
+#and your /depot/data2/galaxy/phastOdds_precomputed/ directory would 
+#contain all of your alignment files (e.g.):
+#
+#-rw-r--r--   1 james g2cache 56555036 2006-02-22 18:26 encode_SEP-2005_tba.v2_phastOdds.h5
+#-rw-r--r--   1 james g2cache     1367 2006-02-08 12:23 encode_SEP-2005_tba.v2_phastOdds.mapping.bed
+#...etc...
+#
+#Your phastOdds.loc file should include an entry per line for each phastOdds 
+#file you have stored.
diff --git a/tool-data/picard_index.loc.sample b/tool-data/picard_index.loc.sample
new file mode 100644
index 0000000..d105568
--- /dev/null
+++ b/tool-data/picard_index.loc.sample
@@ -0,0 +1,26 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of Picard dict and associated files. You will need
+#to create these data files and then create a picard_index.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The picard_index.loc 
+#file has this format (longer white space is the TAB character):
+#
+#<unique_build_id>	<dbkey>	<display_name>	<fasta_file_path>
+#
+#So, for example, if you had hg18 indexed and stored in 
+#/depot/data2/galaxy/srma/hg18/, 
+#then the srma_index.loc entry would look like this:
+#
+#hg18	hg18	hg18 Pretty	/depot/data2/galaxy/picard/hg18/hg18.fa
+#
+#and your /depot/data2/galaxy/srma/hg18/ directory
+#would contain the following three files:
+#hg18.fa
+#hg18.dict
+#hg18.fa.fai
+#
+#The dictionary file for each reference (ex. hg18.dict) must be 
+#created via Picard (http://picard.sourceforge.net). Note that
+#the dict file does not have the .fa extension although the
+#path list in the loc file does include it.
+#
diff --git a/tool-data/quality_scores.loc.sample b/tool-data/quality_scores.loc.sample
new file mode 100644
index 0000000..bfb49ff
--- /dev/null
+++ b/tool-data/quality_scores.loc.sample
@@ -0,0 +1,26 @@
+#This is a sample file distributed with Galaxy that is used by some
+#regional variation tools.  The quality_scores.loc file has this format (white 
+#space characters are TAB characters):
+#
+#<build>	<QualityScoreDirectoryPath>
+#
+#So, for example, if you had panTro2 qualit score files stored in 
+#/depot/data2/galaxy/panTro2/quality_scores, then the quality_scores.loc entry 
+#would look like this:
+#
+#panTro2	/depot/data2/galaxy/panTro2/quality_scores
+#
+#and your /depot/data2/galaxy/panTro2/quality_scores directory would 
+#contain all of your quality score files (e.g.):
+#
+#-rw-r--r--  1 gua110 galaxy 10964768 2007-10-02 10:33 chr10.qa.bqv
+#-rw-r--r--  1 gua110 galaxy  1749899 2007-10-02 10:33 chr10_random.qa.bqv
+#-rw-r--r--  1 gua110 galaxy 11310721 2007-10-02 10:33 chr11.qa.bqv
+#...etc...
+#
+#Your quality_scores.loc file should include an entry per line for each quality score 
+#file you have stored.  For example:
+#
+#panTro2	/depot/data2/galaxy/panTro2/quality_scores
+#rheMac2	/depot/data2/galaxy/rheMac2/quality_scores
+#ponAbe2	/depot/data2/galaxy/ponAbe2/quality_scores
diff --git a/tool-data/regions.loc.sample b/tool-data/regions.loc.sample
new file mode 100644
index 0000000..fac1412
--- /dev/null
+++ b/tool-data/regions.loc.sample
@@ -0,0 +1,24 @@
+#This is a sample file distributed with Galaxy that is used by some
+#ENCODE tools.  The regions.loc file has this format (white space 
+#characters are TAB characters):
+#
+#<build>	<uid>	<region_description>	<file_path>
+#
+#So, for example, if you had ENCODE Regions for build hg16 stored in 
+#/depot/data2/galaxy/regions/encode_regions_coords_hg16.bed, then the
+#regions.loc entry would look like this:
+#
+#hg16	encode_hg16	ENCODE Regions	/depot/data2/galaxy/regions/encode_regions_coords_hg16.bed
+#
+#and your /depot/data2/galaxy/regions/ directory would 
+#contain all of your regions files (e.g.):
+#
+#-rw-r--r--   1 dan  g2cache 1367 2006-03-14 13:55 encode_regions_coords_hg16.bed
+#-rw-r--r--   1 dan  g2cache 1367 2006-03-14 13:53 encode_regions_coords_hg17.bed
+#...etc...
+#
+#Your regions.loc file should include an entry per line for each region 
+#file you have stored.  For example:
+#
+#hg16	encode_hg16	ENCODE Regions	/depot/data2/galaxy/regions/encode_regions_coords_hg16.bed 
+#hg17	encode_hg17	ENCODE Regions	/depot/data2/galaxy/regions/encode_regions_coords_hg17.bed
diff --git a/tool-data/sequence_index_base.loc.sample b/tool-data/sequence_index_base.loc.sample
new file mode 100644
index 0000000..475e9ec
--- /dev/null
+++ b/tool-data/sequence_index_base.loc.sample
@@ -0,0 +1,29 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of BWA indexed sequences data files.  You will need
+#to create these data files and then create a sequence_index_base.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The sequence_index_base.loc 
+#file has this format (white space characters are TAB characters):
+#
+#<build>	<file_base>
+#
+#So, for example, if you had phiX indexed stored in 
+#/depot/data2/galaxy/phiX/base/, 
+#then the sequence_index_base.loc entry would look like this:
+#
+#phiX	/depot/data2/galaxy/phiX/base/phiX.fa
+#
+#and your /depot/data2/galaxy/phiX/base/ directory
+#would contain phiX.fa.* files:
+#
+#-rw-r--r--  1 james    universe 830134 2005-09-13 10:12 phiX.fa.amb
+#-rw-r--r--  1 james    universe 527388 2005-09-13 10:12 phiX.fa.ann
+#-rw-r--r--  1 james    universe 269808 2005-09-13 10:12 phiX.fa.bwt
+#...etc...
+#
+#Your sequence_index_base.loc file should include an entry per line for 
+#each index set you have stored.  The "file" in the path does not actually
+#exist, but it is the prefix for the actual index files.  For example:
+#
+#phiX	/depot/data2/galaxy/phiX/base/phiX.fa
+#hg18	/depot/data2/galaxy/hg18/base/hg18.fa
diff --git a/tool-data/sequence_index_color.loc.sample b/tool-data/sequence_index_color.loc.sample
new file mode 100644
index 0000000..a06d380
--- /dev/null
+++ b/tool-data/sequence_index_color.loc.sample
@@ -0,0 +1,29 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of BWA indexed sequences data files.  You will need
+#to create these data files and then create a sequence_index_color.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The sequence_index_color.loc 
+#file has this format (white space characters are TAB characters):
+#
+#<build>	<file_base>
+#
+#So, for example, if you had phiX indexed stored in 
+#/depot/data2/galaxy/phiX/color/, 
+#then the sequence_index_color.loc entry would look like this:
+#
+#phiX	/depot/data2/galaxy/phiX/color/phiX.fa
+#
+#and your /depot/data2/galaxy/phiX/color/ directory
+#would contain phiX.fa.* files:
+#
+#-rw-r--r--  1 james    universe 830134 2005-09-13 10:12 phiX.fa.amb
+#-rw-r--r--  1 james    universe 527388 2005-09-13 10:12 phiX.fa.ann
+#-rw-r--r--  1 james    universe 269808 2005-09-13 10:12 phiX.fa.bwt
+#...etc...
+#
+#Your sequence_index_color.loc file should include an entry per line for 
+#each index set you have stored.  The "file" in the path does not actually
+#exist, but it is the prefix for the actual index files.  For example:
+#
+#phiX	/depot/data2/galaxy/phiX/color/phiX.fa
+#hg18	/depot/data2/galaxy/hg18/color/hg18.fa
diff --git a/tool-data/shared/ensembl/ensembl_sites.txt b/tool-data/shared/ensembl/ensembl_sites.txt
new file mode 100644
index 0000000..793a763
--- /dev/null
+++ b/tool-data/shared/ensembl/ensembl_sites.txt
@@ -0,0 +1,5 @@
+#These builds are displayed using the method described in:
+#http://www.ensembl.org/info/docs/webcode/linking.html
+ensembl_Current	Current	http://www.ensembl.org/	hg19,felCat3,galGal3,bosTau4,canFam2,loxAfr3,cavPor3,equCab2,anoCar1,oryLat2,mm9,monDom5,ponAbe2,susScr2,ornAna1,oryCun2,rn4,rheMac2,gasAcu1,tetNig2,xenTro2,taeGut1,danRer5,ci2,dm3,ce6,sacCer2	Homo_sapiens,Felis_catus,Gallus_gallus,Bos_taurus,Canis_familiaris,Loxodonta_africana,Cavia_porcellus,Equus_caballus,Anolis_carolinensis,Oryzias_latipes,Mus_musculus,Monodelphis_domestica,Pongo_pygmaeus,Sus_scrofa,Ornithorhynchus_anatinus,Oryctolagus_ [...]
+ensembl_May_2009	May 2009	http://may2009.archive.ensembl.org/	hg18	Homo_sapiens
+ensembl_plants	Plants	http://plants.ensembl.org/	araTha_tair9	Arabidopsis_thaliana
diff --git a/tool-data/shared/ensembl/ensembl_sites_data_URL.txt b/tool-data/shared/ensembl/ensembl_sites_data_URL.txt
new file mode 100644
index 0000000..f3565f1
--- /dev/null
+++ b/tool-data/shared/ensembl/ensembl_sites_data_URL.txt
@@ -0,0 +1,8 @@
+#These builds are displayed using the method described in:
+#http://aug2007.archive.ensembl.org/Homo_sapiens/helpview?se=1;kw=urlsource
+ensembl_March_2008	March 2008	http://mar2008.archive.ensembl.org/	bosTau3,tetNig1	Bos_taurus,Tetraodon_nigroviridis
+ensembl_February_2007	February 2007	http://feb2007.archive.ensembl.org/	monDom4,danRer4	Monodelphis_domestica,Danio_rerio
+ensembl_July_2008	July 2008	http://jul2008.archive.ensembl.org/	panTro2	Pan_troglodytes
+ensembl_April_2006	April 2006	http://apr2006.archive.ensembl.org/	galGal2,bosTau2,canFam1,mm7,rheMac1,danRer3,apiMel2,sacCer1	Gallus_gallus,Bos_taurus,Canis_familiaris,Mus_musculus,Macaca_mulatta,Danio_rerio,Apis_mellifera,Saccharomyces_cerevisiae
+ensembl_November_2005	November 2005	http://nov2005.archive.ensembl.org/	hg17,panTro1,bosTau1,mm6,xenTro1,anoGam1,dm2	Homo_sapiens,Pan_troglodytes,Bos_taurus,Mus_musculus,Xenopus_tropicalis,Anopheles_gambiae,Drosophila_melanogaster
+ensembl_August_2007	August 2007	http://aug2007.archive.ensembl.org/	mm8,ce4	Mus_musculus,Caenorhabditis_elegans
diff --git a/tool-data/shared/gbrowse/gbrowse_build_sites.txt b/tool-data/shared/gbrowse/gbrowse_build_sites.txt
new file mode 100644
index 0000000..df21421
--- /dev/null
+++ b/tool-data/shared/gbrowse/gbrowse_build_sites.txt
@@ -0,0 +1,24 @@
+#GBrowse Sites
+#site_id	site_name	site_link	site_dbkeys	site_organisms
+
+#WormBase
+wormbase	WormBase current	http://www.wormbase.org/db/gb2/gbrowse/	ce10,ce9,ce8,ce7,ce6,ce5,ce4,ce3,ce2,cb4,cb3,cb2,cb1,caeRem4,caeRem3,caeRem2,caeRem1,caePb2,caePb1,caeJap4,caeJap3,caeJap2,caeJap1,caeAng1	c_elegans,c_elegans,c_elegans,c_elegans,c_elegans,c_elegans,c_elegans,c_elegans,c_elegans,c_briggsae,c_briggsae,c_briggsae,c_briggsae,c_remanei,c_remanei,c_remanei,c_remanei,c_brenneri,c_brenneri,c_japonica,c_japonica,c_japonica,c_japonica,c_angaria
+wormbase_ws120	WormBase WS120	http://ws120.wormbase.org/db/seq/gbrowse/	ce2	wormbase
+wormbase_ws140	WormBase WS140	http://ws140.wormbase.org/db/seq/gbrowse/	ce3	wormbase
+wormbase_ws170	WormBase WS170	http://ws170.wormbase.org/db/seq/gbrowse/	ce4	wormbase
+wormbase_ws180	WormBase WS180	http://ws180.wormbase.org/db/seq/gbrowse/	ce5	elegans
+wormbase_ws190	WormBase WS190	http://ws190.wormbase.org/db/seq/gbrowse/	ce6	elegans
+wormbase_ws200	WormBase WS200	http://ws200.wormbase.org/db/seq/gbrowse/	ce7	c_elegans
+wormbase_ws204	WormBase WS204	http://ws204.wormbase.org/db/seq/gbrowse/	ce8	c_elegans
+wormbase_ws210	WormBase WS210	http://ws210.wormbase.org/db/seq/gbrowse/	ce9	c_elegans
+wormbase_ws220	WormBase WS220	http://ws220.wormbase.org/db/gb2/gbrowse/	ce10,caeRem4	c_elegans,c_remanei
+wormbase_ws225	WormBase WS225	http://ws225.wormbase.org/db/gb2/gbrowse/	caeAng1,cb4	c_angaria,c_briggsae
+
+#The Arabidopsis Information Resource (tair)
+tair	tair	http://arabidopsis.org/cgi-bin/gbrowse/	arabidopsis_tair8,arabidopsis,Arabidopsis_thaliana_TAIR10	arabidopsis_tair8,arabidopsis_tair9,arabidopsis
+
+#modENCODE
+modencode	modENCODE	http://gbrowse.modencode.org/fgb2/gbrowse/	ce10,dm3	worm,fly
+
+#Saccharomyces Genome Database (SGD)
+sgd_yeast	Saccharomyces Genome Database	http://browse.yeastgenome.org/fgb2/gbrowse/	Saccharomyces_cerevisiae_S288C_SGD2010	scgenome
diff --git a/tool-data/shared/igv/igv_build_sites.txt.sample b/tool-data/shared/igv/igv_build_sites.txt.sample
new file mode 100644
index 0000000..e19b0b6
--- /dev/null
+++ b/tool-data/shared/igv/igv_build_sites.txt.sample
@@ -0,0 +1,4 @@
+#site_id	site_name	site_url	dbkey	ivg_build_name
+web_link_main	web current	http://www.broadinstitute.org/igv/projects/current/igv.php	hg_g1k_v37	b37
+#web_jnlp_1.5	web 1.5	http://www.broadinstitute.org/igvdata/jws/prod	hg19,hg_g1k_v37,hg18,1kg_ref,hg17,hg16,mm9,mm8,mm7,panTro2,rheMac2,rn4,canFam2,bosTau6,bosTau4,bosTau3,susScrofa,galGal3,cavPor3,monDom5,xenTro2,taeGut1,zebrafish,danRer6,danRer7,gasAcu1,Aplysia,Plasmodium_3D7_v2.1,Plasmodium_3D7_v5.5,Plasmodium_6.1,PlasmoDB_7.0,pvivax,GSM552910,sacCer1,sacCer2,sk1,Y55,sacCer62,spombe_709,spombe_1.55,candida,mg8,spur_2.1,spur_2.5,spur_3.0,WS201,ce6,ce4,dm3,dm2,dmel_5.9,dmel_r5.22,dmel_r [...]
+local_default	local	http://localhost:60151/load	hg38,hg19,hg_g1k_v37,hg18,1kg_ref,hg17,hg16,mm9,mm8,mm7,panTro2,rheMac2,rn4,canFam2,bosTau6,bosTau4,bosTau3,susScrofa,galGal3,cavPor3,monDom5,xenTro2,taeGut1,zebrafish,danRer6,danRer7,gasAcu1,Aplysia,Plasmodium_3D7_v2.1,Plasmodium_3D7_v5.5,Plasmodium_6.1,PlasmoDB_7.0,pvivax,GSM552910,sacCer1,sacCer2,sk1,Y55,sacCer62,spombe_709,spombe_1.55,candida,mg8,spur_2.1,spur_2.5,spur_3.0,WS201,ce6,ce4,dm3,dm2,dmel_5.9,dmel_r5.22,dmel_r5.33,tcas_2.0,tc [...]
diff --git a/tool-data/shared/rviewer/rviewer_build_sites.txt.sample b/tool-data/shared/rviewer/rviewer_build_sites.txt.sample
new file mode 100644
index 0000000..5ce6195
--- /dev/null
+++ b/tool-data/shared/rviewer/rviewer_build_sites.txt.sample
@@ -0,0 +1,3 @@
+#site_id	site_name	site_url	dbkey	rviewer_genome_version
+#lbl_test	test	http://127.0.0.1:8888	hg18,hg19	hg18,hg19
+lbl_main	main	http://rviewer.lbl.gov/rviewer	hg18,hg19	hg18,hg19
diff --git a/tool-data/shared/ucsc/builds.txt.buildbot b/tool-data/shared/ucsc/builds.txt.buildbot
new file mode 100644
index 0000000..c68fa85
--- /dev/null
+++ b/tool-data/shared/ucsc/builds.txt.buildbot
@@ -0,0 +1 @@
+chrM	equCab2 chrM (chrM)
diff --git a/tool-data/shared/ucsc/builds.txt.sample b/tool-data/shared/ucsc/builds.txt.sample
new file mode 100644
index 0000000..d570340
--- /dev/null
+++ b/tool-data/shared/ucsc/builds.txt.sample
@@ -0,0 +1,152 @@
+#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
+?	unspecified (?)
+hg19Haps	hg19Haplotypes Feb. 2009 (GRCh37/hg19Haps) (hg19Haps)
+hg19	Human Feb. 2009 (GRCh37/hg19) (hg19)
+hg18	Human Mar. 2006 (NCBI36/hg18) (hg18)
+hg17	Human May 2004 (NCBI35/hg17) (hg17)
+hg16	Human July 2003 (NCBI34/hg16) (hg16)
+hg15	Human Apr. 2003 (NCBI33/hg15) (hg15)
+venter1	J. Craig Venter Sep. 2007 (HuRef/venter1) (venter1)
+panTro2	Chimp Mar. 2006 (CGSC 2.1/panTro2) (panTro2)
+panTro1	Chimp Nov. 2003 (CGSC 1.1/panTro1) (panTro1)
+gorGor2	Gorilla Aug. 2009 (Sanger 4/gorGor2) (gorGor2)
+gorGor1	Gorilla Oct. 2008 (Sanger 0.1/gorGor1) (gorGor1)
+ponAbe2	Orangutan July 2007 (WUGSC 2.0.2/ponAbe2) (ponAbe2)
+rheMac2	Rhesus Jan. 2006 (MGSC Merged 1.0/rheMac2) (rheMac2)
+papHam1	Baboon Nov. 2008 (Baylor 1.0/papHam1) (papHam1)
+calJac3	Marmoset March 2009 (WUGSC 3.2/calJac3) (calJac3)
+calJac1	Marmoset June 2007 (WUGSC 2.0.2/calJac1) (calJac1)
+otoGar1	Bushbaby Dec. 2006 (Broad/otoGar1) (otoGar1)
+micMur1	Mouse lemur Jun. 2003 (Broad/micMur1) (micMur1)
+tupBel1	Tree shrew Dec. 2006 (Broad/tupBel1) (tupBel1)
+mmtv	MMTV Nov. 2009 (MMTV/mmtv) (mmtv)
+homPan20	chimp/human Jun. 2008 (UCSC Recon/homPan20) (homPan20)
+homIni20	orang/human Jun. 2008 (UCSC Recon/homIni20) (homIni20)
+sorAra1	Shrew June 2006 (Broad/sorAra1) (sorAra1)
+mm9	Mouse July 2007 (NCBI37/mm9) (mm9)
+mm8	Mouse Feb. 2006 (NCBI36/mm8) (mm8)
+mm7	Mouse Aug. 2005 (NCBI35/mm7) (mm7)
+rn4	Rat Nov. 2004 (Baylor 3.4/rn4) (rn4)
+rn3	Rat June 2003 (Baylor 3.1/rn3) (rn3)
+speTri1	Squirrel Feb. 2008 (Broad/speTri1) (speTri1)
+cavPor3	Guinea pig Feb. 2008 (Broad/cavPor3) (cavPor3)
+oryCun2	Rabbit Apr. 2009 (Broad/oryCun2) (oryCun2)
+oryCun1	Rabbit May 2005 (Broad/oryCun1) (oryCun1)
+ochPri2	Pika Jul. 2008 (Broad/ochPri2) (ochPri2)
+eriEur1	Hedgehog June 2006 (Broad/eriEur1) (eriEur1)
+felCatV17e	Cat Dec. 2008 (NHGRI/GTB V17e/felCatV17e) (felCatV17e)
+felCat3	Cat Mar. 2006 (Broad/felCat3) (felCat3)
+ailMel1	Panda Dec. 2009 (BGI-Shenzhen 1.0/ailMel1) (ailMel1)
+nemVec1	Starlet sea anemone Jun. 2007 (JGI-PDF/nemVec1) (nemVec1)
+canFam2	Dog May 2005 (Broad/canFam2) (canFam2)
+canFam1	Dog July 2004 (Broad/canFam1) (canFam1)
+canFamPoodle1	Dog May 2003 (TIGR Poodle/canFamPoodle1) (canFamPoodle1)
+equCab2	Horse Sep. 2007 (Broad/equCab2) (equCab2)
+equCab1	Horse Jan. 2007 (Broad/equCab1) (equCab1)
+susScr2	Pig Nov. 2009 (SGSC Sscrofa9.2/susScr2) (susScr2)
+pteVam1	Megabat Jul. 2008 (Broad/pteVam1) (pteVam1)
+myoLuc1	Microbat Mar. 2006 (Broad/myoLuc1) (myoLuc1)
+susScr1	Pig Apr. 2009 (SGSC 9.53/susScr1) (susScr1)
+turTru1	Dolphin Feb. 2008 (Broad/turTru1) (turTru1)
+tarSyr1	Tarsier Aug. 2008 (Broad/tarSyr1) (tarSyr1)
+proCap1	Rock hyrax Jul. 2008 (Broad/proCap1) (proCap1)
+oviAri1	Sheep Feb. 2010 (ISGC Ovis_aries_1.0/oviAri1) (oviAri1)
+dipOrd1	Kangaroo rat Jul. 2008 (Broad/dipOrd1) (dipOrd1)
+choHof1	Sloth Jul. 2008 (Broad/choHof1) (choHof1)
+bosTau4	Cow Oct. 2007 (Baylor 4.0/bosTau4) (bosTau4)
+bosTau3	Cow Aug. 2006 (Baylor 3.1/bosTau3) (bosTau3)
+bosTau2	Cow Mar. 2005 (Baylor 2.0/bosTau2) (bosTau2)
+macEug1	Wallaby Nov. 2007 (Baylor 1.0/macEug1) (macEug1)
+dasNov2	Armadillo Jul. 2008 (Broad/dasNov2) (dasNov2)
+dasNov1	Armadillo May 2005 (Broad/dasNov1) (dasNov1)
+echTel1	Tenrec July 2005 (Broad/echTel1) (echTel1)
+loxAfr3	Elephant Jul. 2009 (Broad/loxAfr3) (loxAfr3)
+loxAfr2	Elephant Jul. 2008 (Broad/loxAfr2) (loxAfr2)
+loxAfr1	Elephant May 2005 (Broad/loxAfr1) (loxAfr1)
+monDom5	Opossum Oct. 2006 (Broad/monDom5) (monDom5)
+monDom4	Opossum Jan. 2006 (Broad/monDom4) (monDom4)
+monDom1	Opossum Oct. 2004 (Broad prelim/monDom1) (monDom1)
+ornAna1	Platypus Mar. 2007 (WUGSC 5.0.1/ornAna1) (ornAna1)
+galGal3	Chicken May 2006 (WUGSC 2.1/galGal3) (galGal3)
+galGal2	Chicken Feb. 2004 (WUGSC 1.0/galGal2) (galGal2)
+taeGut1	Zebra finch Jul. 2008 (WUGSC 3.2.4/taeGut1) (taeGut1)
+anoCar1	Lizard Feb. 2007 (Broad/anoCar1) (anoCar1)
+xenTro2	X. tropicalis Aug. 2005 (JGI 4.1/xenTro2) (xenTro2)
+xenTro1	X. tropicalis Oct. 2004 (JGI 3.0/xenTro1) (xenTro1)
+danRer6	Zebrafish Dec. 2008 (Zv8/danRer6) (danRer6)
+danRer5	Zebrafish July 2007 (Zv7/danRer5) (danRer5)
+danRer4	Zebrafish Mar. 2006 (Zv6/danRer4) (danRer4)
+danRer3	Zebrafish May 2005 (Zv5/danRer3) (danRer3)
+tetNig2	Tetraodon Mar. 2007 (Genoscope 8.0/tetNig2) (tetNig2)
+tetNig1	Tetraodon Feb. 2004 (Genoscope 7/tetNig1) (tetNig1)
+fr2	Fugu Oct. 2004 (JGI 4.0/fr2) (fr2)
+fr1	Fugu Aug. 2002 (JGI 3.0/fr1) (fr1)
+gasAcu1	Stickleback Feb. 2006 (Broad/gasAcu1) (gasAcu1)
+oryLat2	Medaka Oct. 2005 (NIG/UT MEDAKA1/oryLat2) (oryLat2)
+oryLat1	Medaka Apr. 2006 (NIG/UT MEDAKA1/oryLat1) (oryLat1)
+petMar1	Lamprey Mar. 2007 (WUGSC 3.0/petMar1) (petMar1)
+borEut13	Boreoeutherian Apr. 24. 2006 (UCSC Recon/borEut13) (borEut13)
+canHg12	Boreoeutherian Nov. 19. 2005 (UCSC Recon/canHg12) (canHg12)
+rodEnt13	Rodent Apr. 24. 2006 (UCSC Recon/rodEnt13) (rodEnt13)
+priMat13	Primate Apr. 24. 2006 (UCSC Recon/priMat13) (priMat13)
+nonAfr13	Non-Afrotheria Apr. 24. 2006 (UCSC Recon/nonAfr13) (nonAfr13)
+lauRas13	Laurasiatheria Apr. 24. 2006 (UCSC Recon/lauRas13) (lauRas13)
+homIni14	Hominidae Oct. 1. 2006 (UCSC Recon/homIni14) (homIni14)
+homIni13	Hominidae Apr. 24. 2006 (UCSC Recon/homIni13) (homIni13)
+gliRes13	Glires Apr. 24. 2006 (UCSC Recon/gliRes13) (gliRes13)
+eutHer13	Eutheria Apr. 24. 2006 (UCSC Recon/eutHer13) (eutHer13)
+euaGli13	Euarchontoglires Apr. 24. 2006 (UCSC Recon/euaGli13) (euaGli13)
+catArr1	Catarrhini June 13. 2006 (UCSC Recon/catArr1) (catArr1)
+afrOth13	Afrotheria Apr. 24. 2006 (UCSC Recon/afrOth13) (afrOth13)
+braFlo1	Lancelet Mar. 2006 (JGI 1.0/braFlo1) (braFlo1)
+ci2	C. intestinalis Mar. 2005 (JGI 2.1/ci2) (ci2)
+ci1	C. intestinalis Dec. 2002 (JGI 1.0/ci1) (ci1)
+cioSav2	C. savignyi July 2005 (Sidow Lab 2.0/cioSav2) (cioSav2)
+cioSav1	C. savignyi Apr. 2003 (Broad/cioSav1) (cioSav1)
+strPur2	S. purpuratus Sep. 2006 (Baylor 2.1/strPur2) (strPur2)
+strPur1	S. purpuratus Apr. 2005 (Baylor 1.1/strPur1) (strPur1)
+ce8	C. elegans Jun 2009 (WS204/ce8) (ce8)
+ce7	C. elegans Feb 2009 (WS200/ce7) (ce7)
+aplCal1	Sea hare Sept. 2008 (Broad 2.0/aplCal1) (aplCal1)
+ce6	C. elegans May 2008 (WS190/ce6) (ce6)
+ce5	C. elegans Aug. 2007 (WS180/ce5) (ce5)
+ce4	C. elegans Jan. 2007 (WS170/ce4) (ce4)
+ce3	C. elegans March 2005 (WS140/ce3) (ce3)
+ce2	C. elegans Mar. 2004 (WS120/ce2) (ce2)
+caePb2	C. brenneri Feb. 2008 (WUGSC 6.0.1/caePb2) (caePb2)
+caePb1	C. brenneri Jan. 2007 (WUGSC 4.0/caePb1) (caePb1)
+cb3	C. briggsae Jan. 2007 (WUGSC 1.0/cb3) (cb3)
+cb2	C. briggsae Aug 2005 (WUGSC prelim/cb2) (cb2)
+cb1	C. briggsae July 2002 (WormBase cb25.agp8/cb1) (cb1)
+caeRem3	C. remanei May 2007 (WUGSC 15.0.1/caeRem3) (caeRem3)
+caeRem2	C. remanei Mar. 2006 (WUGSC 1.0/caeRem2) (caeRem2)
+caeRem1	C. remanei March 2005 (WUGSC prelim/caeRem1) (caeRem1)
+caeJap2	C. japonica Jan. 2009 (WUGSC 4.0.1/caeJap2) (caeJap2)
+caeJap1	C. japonica Mar. 2008 (WUGSC 3.0.2/caeJap1) (caeJap1)
+priPac1	P. pacificus Feb. 2007 (WUGSC 5.0/priPac1) (priPac1)
+dm3	D. melanogaster Apr. 2006 (BDGP R5/dm3) (dm3)
+dm2	D. melanogaster Apr. 2004 (BDGP R4/dm2) (dm2)
+dm1	D. melanogaster Jan. 2003 (BDGP R3/dm1) (dm1)
+droSim1	D. simulans Apr. 2005 (WUGSC mosaic 1.0/droSim1) (droSim1)
+droSec1	D. sechellia Oct. 2005 (Broad/droSec1) (droSec1)
+droYak2	D. yakuba Nov. 2005 (WUGSC 7.1/droYak2) (droYak2)
+droYak1	D. yakuba Apr. 2004 (WUGSC 1.0/droYak1) (droYak1)
+droEre1	D. erecta Aug. 2005 (Agencourt prelim/droEre1) (droEre1)
+droAna2	D. ananassae Aug. 2005 (Agencourt prelim/droAna2) (droAna2)
+droAna1	D. ananassae July 2004 (TIGR/droAna1) (droAna1)
+dp3	D. pseudoobscura Nov. 2004 (FlyBase 1.03/dp3) (dp3)
+dp2	D. pseudoobscura Aug. 2003 (Baylor freeze1/dp2) (dp2)
+droPer1	D. persimilis Oct. 2005 (Broad/droPer1) (droPer1)
+droVir2	D. virilis Aug. 2005 (Agencourt prelim/droVir2) (droVir2)
+droVir1	D. virilis July 2004 (Agencourt prelim/droVir1) (droVir1)
+droMoj2	D. mojavensis Aug. 2005 (Agencourt prelim/droMoj2) (droMoj2)
+droMoj1	D. mojavensis Aug. 2004 (Agencourt prelim/droMoj1) (droMoj1)
+droGri1	D. grimshawi Aug. 2005 (Agencourt prelim/droGri1) (droGri1)
+anoGam1	A. gambiae Feb. 2003 (IAGEC MOZ2/anoGam1) (anoGam1)
+apiMel2	A. mellifera Jan. 2005 (Baylor 2.0/apiMel2) (apiMel2)
+apiMel1	A. mellifera July 2004 (Baylor 1.2/apiMel1) (apiMel1)
+triCas2	T. castaneum Sep. 2005 (Baylor 2.0/triCas2) (triCas2)
+falciparum	P. falciparum Plasmodium falciparum (?/falciparum) (falciparum)
+sacCer2	S. cerevisiae June 2008 (SGD/sacCer2) (sacCer2)
+sacCer1	S. cerevisiae Oct. 2003 (SGD/sacCer1) (sacCer1)
+sc1	SARS coronavirus Apr. 2003 (GenBank Apr. 14 '03/sc1) (sc1)
diff --git a/tool-data/shared/ucsc/manual_builds.txt.sample b/tool-data/shared/ucsc/manual_builds.txt.sample
new file mode 100644
index 0000000..391a924
--- /dev/null
+++ b/tool-data/shared/ucsc/manual_builds.txt.sample
@@ -0,0 +1,708 @@
+phiX	phiX174	phiX=5386
+16079	Mycobacterium sp. JLS	NC_009077=6048425
+symbTher_IAM14863	Symbiobacterium thermophilum IAM 14863	chr=3566135
+16070	Yersinia pseudotuberculosis IP 31758	NC_009708=4723306,NC_009705=153140,NC_009704=58679
+18883	Mycobacterium tuberculosis H37Ra	NC_009525=4419977
+nocaJS61	Nocardioides sp. JS614	plasmid_pNOCA01=307814,chr=4985871
+12521	Clostridium perfringens SM101	NC_008265=38092,NC_008264=12206,NC_008263=12397,NC_008262=2897393
+acidCryp_JF_5	Acidiphilium cryptum JF-5	plasmid_pACRY07=5629,plasmid_pACRY06=8781,plasmid_pACRY05=37155,plasmid_pACRY04=37415,plasmid_pACRY03=88953,plasmid_pACRY02=187422,plasmid_pACRY01=203589,plasmid_pACRY08=4909,chr=3389227
+lactLact	Lactococcus lactis subsp. lactis Il1403	chr=2365589
+13082	Bacillus licheniformis ATCC 14580	NC_006322=4222645
+344	Rhizobium leguminosarum bv. viciae 3841	NC_008384=684202,NC_008382=151564,NC_008383=147463,NC_008380=5057142,NC_008381=488135,NC_008379=352782,NC_008378=870021
+siliPome_DSS_3	Silicibacter pomeroyi DSS-3	chr=4109442,megaplasmid=491611
+paraSp_UWE25	Candidatus Protochlamydia amoebophila UWE25	chr=2414465
+geobKaus_HTA426	Geobacillus kaustophilus HTA426	chr=3544776,plasmid_pHTA426=47890
+rhizEtli_CFN_42	Rhizobium etli CFN 42	d=371254,plasmid_p42c=250948,plasmid_p42b=184338,plasmid_p42a=194229,plasmid_p42f=642517,plasmid_p42e=505334,chr=4381608
+13162	Streptococcus thermophilus LMG 18311	NC_006448=1796846
+13163	Streptococcus thermophilus CNRZ1066	NC_006449=1796226
+18817	Methylobacterium radiotolerans JCM 2831	NC_010510=586164,NC_010502=27836,NC_010505=6077833,NC_010504=22114,NC_010507=21022,NC_010509=47003,NC_010518=36410,NC_010514=42985,NC_010517=37743
+28109	Bartonella tribocorum CIP 105476	NC_010161=2619061,NC_010160=23343
+dichNodo_VCS1703A	Dichelobacter nodosus VCS1703A	chr=1389350
+12931	Xanthomonas oryzae pv. oryzae KACC10331	NC_006834=4941439
+eschColi_O157H7	Escherichia coli O157:H7 str. Sakai	plasmid_pOSAK1=3306,chr=5498450,plasmid_pO157=92721
+burk383	Burkholderia sp. 383	chr3=1395069,chr2=3587082,chr1=3694126
+15759	Psychrobacter sp. PRwf-1	NC_009524=2978976,NC_009516=13956,NC_009517=2117
+15758	Staphylococcus aureus subsp. aureus JH1	NC_009632=2906507,NC_009619=30429
+27835	Microcystis aeruginosa NIES-843	NC_010296=5842795
+campJeju_81_176	Campylobacter jejuni subsp. jejuni 81-176	chr=1616554,plasmid_pVir=37473,plasmid_pTet=45025
+15751	Rhodopseudomonas palustris BisA53	NC_008435=5505494
+15750	Rhodopseudomonas palustris BisB18	NC_007925=5513844
+15757	Staphylococcus aureus subsp. aureus JH9	NC_009477=30429,NC_009487=2906700
+15756	Xanthobacter autotrophicus Py2	NC_009717=316164,NC_009720=5308934
+15755	Rhodobacter sphaeroides ATCC 17025	NC_009428=3217726,NC_009429=877879,NC_009433=13873,NC_009432=36198,NC_009431=121962,NC_009430=289489
+15754	Rhodobacter sphaeroides ATCC 17029	NC_009050=1219053,NC_009040=122606,NC_009049=3147721
+13603	Ralstonia eutropha H16	NC_008314=2912490,NC_005241=452156,NC_008313=4052032
+shewPutrCN32	Shewanella putrefaciens CN-32	chr=4659220
+aerPer1	Aeropyrum pernix K1	chr=1669696
+18255	Bacillus thuringiensis str. Al Hakam	NC_008600=5257091,NC_008598=55939
+acinSp_ADP1	Acinetobacter sp. ADP1	chr=3598621
+anapMarg_ST_MARIES	Anaplasma marginale str. St. Maries	chr=1197687
+297	Xanthomonas axonopodis pv. citri str. 306	NC_003922=64920,NC_003921=33700,NC_003919=5175554
+desuPsyc_LSV54	Desulfotalea psychrophila LSv54	chr=3523383,plasmid_small=14664,plasmid_large=121587
+17227	Desulfovibrio vulgaris subsp. vulgaris DP4	NC_008751=3462887,NC_008741=198504
+190	Yersinia enterocolitica subsp. enterocolitica 8081	NC_008791=67721,NC_008800=4615899
+193	Clostridium botulinum A str. ATCC 3502	NC_009495=3886916,NC_009496=16344
+270	Streptococcus pyogenes str. Manfredo	NC_009332=1841271
+16252	Synechococcus sp. JA-2-3B'a(2-13)	NC_007776=3046682
+16251	Synechococcus sp. JA-3-3Ab	NC_007775=2932766
+16319	Arcobacter butzleri RM4018	NC_009850=2341251
+276	Listeria monocytogenes EGD-e	NC_003210=2944528
+277	Streptococcus pneumoniae TIGR4	NC_003028=2160842
+13427	Heliobacterium modesticaldum Ice1	NC_010337=3075407
+279	Staphylococcus epidermidis ATCC 12228	NC_005008=4439,NC_004461=2499279,NC_005003=6585,NC_005006=8007,NC_005007=4679,NC_005004=24365,NC_005005=17261
+19979	Flavobacterium psychrophilum JIP02/86	NC_009613=2861988
+13548	Prochlorococcus marinus str. AS9601	NC_008816=1669886
+16700	Yersinia pestis Pestoides F	NC_009378=137010,NC_009381=4517345,NC_009377=71507
+therTher_HB8	Thermus thermophilus HB8	chr=1849742,plasmid_pTT27=256992,plasmid_pTT8=9322
+28035	Streptococcus pneumoniae Hungary19A-6	NC_010380=2245615
+19575	Bradyrhizobium sp. ORS278	NC_009445=7456587
+18083	Escherichia coli C str. ATCC 8739	
+methStad1	Methanosphaera stadtmanae DSM 3091	chr=1767403
+magnMagn_AMB_1	Magnetospirillum magneticum AMB-1	chr=4967148
+13901	Thermoanaerobacter pseudethanolicus ATCC 33223	NC_010321=2362816
+methBurt2	Methanococcoides burtonii DSM 6242	chr=2575032
+13908	Rhodoferax ferrireducens T118	NC_007901=257447,NC_007908=4712337
+13909	Pseudomonas putida F1	NC_009512=5959964
+16400	Haemophilus influenzae PittEE	NC_009566=1813033
+16401	Haemophilus influenzae PittGG	NC_009567=1887192
+100	Mycoplasma pulmonis UAB CTIP	NC_002771=963879
+methMari_C7	Methanococcus maripaludis C7	chr=1772694
+106	Halobacterium salinarum R1	NC_010364=2000962,NC_010367=40894,NC_010366=147625,NC_010369=194963,NC_010368=284332
+geobSulf	Geobacter sulfurreducens PCA	chr=3814139
+38	Haemophilus ducreyi 35000HP	NC_002940=1698955
+28247	Synechococcus sp. PCC 7002	NC_010480=124030,NC_010479=38515,NC_010478=31972,NC_010475=3008047,NC_010474=186459,NC_010477=16103,NC_010476=4809
+16791	Coxiella burnetii RSA 331	NC_010117=2016427,NC_010115=37317
+procMari_CCMP1375	Prochlorococcus marinus subsp. marinus str. CCMP1375	chr=1751080
+31	Klebsiella pneumoniae subsp. pneumoniae MGH 78578	NC_009653=3478,NC_009652=4259,NC_009651=88582,NC_009650=107576,NC_009648=5315120,NC_009649=175879
+oenoOeni_PSU_1	Oenococcus oeni PSU-1	chr=1780517
+16817	Pseudomonas stutzeri A1501	NC_009434=4567418
+wiggBrev	Wigglesworthia glossinidia endosymbiont of Glossina brevipalpis	chr=697724,plasmid_pWb1=5280
+10639	Mycoplasma hyopneumoniae 7448	NC_007332=920079
+10638	Yersinia pestis biovar Microtus str. 91001	NC_005810=4595065,NC_005813=70159,NC_005815=106642,NC_005814=21742,NC_005816=9609
+caldMaqu1	Caldivirga maquilingensis IC-167	chr=2077567
+salmEnte_PARATYPI_ATC	Salmonella enterica subsp. enterica serovar Paratyphi A str. ATCC 9150	chr=4585229
+onioYell_PHYTOPLASMA	Onion yellows phytoplasma OY-M	chr=860631
+18459	Francisella tularensis subsp. tularensis WY96-3418	NC_009257=1898476
+334	Streptococcus agalactiae NEM316	NC_004368=2211485
+mariAqua_VT8	Marinobacter aquaeolei VT8	plasmid_pMAQU01=239623,chr=4326849,plasmid_pMAQU02=213290
+16719	Fervidobacterium nodosum Rt17-B1	NC_009718=1948941
+330	Streptococcus agalactiae 2603V/R	NC_004116=2160267
+333	Streptococcus mutans UA159	NC_004350=2030921
+9638	Lactobacillus johnsonii NCC 533	NC_005362=1992676
+17729	Anaeromyxobacter sp. Fw109-5	NC_009675=5277990
+shewLoihPV4	Shewanella loihica PV-4	chr=4602594
+candPela_UBIQUE_HTCC1	Candidatus Pelagibacter ubique HTCC1062	chr=1308759
+myxoXant_DK_1622	Myxococcus xanthus DK 1622	chr=9139763
+9636	Rickettsia rickettsii str. 'Sheila Smith'	NC_009882=1257710
+salmTyph_TY2	Salmonella enterica subsp. enterica serovar Typhi str. Ty2	chr=4791961
+chloChlo_CAD3	Chlorobium chlorochromatii CaD3	chr=2572079
+mesoFlor_L1	Mesoplasma florum L1	chr=793224
+28921	Acinetobacter baumannii	NC_010401=5644,NC_010402=9661,NC_010403=2726,NC_010404=94413,NC_010410=3936291
+hermArse	Herminiimonas arsenicoxydans	chr=3424307
+methMaze1	Methanosarcina mazei Go1	chr=4096345
+17343	Sphingomonas wittichii RW1	NC_009508=222757,NC_009507=310228,NC_009511=5382261
+99	Mycoplasma pneumoniae M129	NC_000912=816394
+13643	Synechococcus sp. CC9605	NC_007516=2510659
+13642	Synechococcus sp. WH 7803	NC_009481=2366980
+lawsIntr_PHE_MN1_00	Lawsonia intracellularis PHE/MN1-00	plasmid_2=39794,plasmid_3=194553,chr=1457619,plasmid_1=27048
+jannCCS1	Jannaschia sp. CCS1	chr=4317977,plasmid1=86072
+91	Mycobacterium avium subsp. paratuberculosis K-10	NC_002944=4829781
+90	Mycobacterium leprae TN	NC_002677=3268203
+92	Mycobacterium smegmatis str. MC2 155	NC_008596=6988209
+95	Tropheryma whipplei str. Twist	NC_004572=927303
+13649	Xanthomonas campestris pv. vesicatoria str. 85-10	NC_007505=19146,NC_007504=1852,NC_007507=182572,NC_007506=38116,NC_007508=5178466
+13885	Chlamydia trachomatis A/HAR-13	NC_007430=7510,NC_007429=1044459
+13884	Rickettsia felis URRWXCal2	NC_007109=1485148,NC_007110=62829,NC_007111=39263
+13887	Streptococcus pyogenes MGAS6180	NC_007296=1897573
+13760	Corynebacterium glutamicum ATCC 13032	NC_006958=3282708
+13888	Streptococcus pyogenes MGAS5005	NC_007297=1838554
+ureaUrea	Ureaplasma parvum serovar 3 str. ATCC 700970	chr=751719
+streCoel	Streptomyces coelicolor A3(2)	chr=8667507,plasmid_SCP2=31317,plasmid_SCP1=356023
+orieTsut_BORYONG	Orientia tsutsugamushi str. Boryong	chr=2127051
+polaJS66	Polaromonas sp. JS666	plasmid_2=338007,chr=5200264,plasmid_1=360405
+rhodRubr_ATCC11170	Rhodospirillum rubrum ATCC 11170	chr=4352825,plasmid_unnamed=53732
+rhodRHA1	Rhodococcus sp. RHA1	plasmid_pRHL1=1123075,chr=7804765,plasmid_pRHL3=332361,plasmid_pRHL2=442536
+pireSp	Rhodopirellula baltica SH 1	chr=7145576
+franTula_TULARENSIS	Francisella tularensis subsp. tularensis SCHU S4	chr=1892819
+syneSp_WH8102	Synechococcus sp. WH 8102	chr=2434428
+19521	Clostridium botulinum A str. Hall	NC_009698=3760560
+238	Staphylococcus aureus subsp. aureus COL	NC_002951=2809422,NC_006629=4440
+15660	Prochlorococcus marinus str. NATL1A	NC_008819=1864731
+17823	Xylella fastidiosa M12	NC_010513=2475130
+methPetr_PM1	Methylibium petroleiphilum PM1	chr=4044195,plasmid_RPME01=599444
+237	Staphylococcus aureus subsp. aureus NCTC 8325	NC_007795=2821361
+thioCrun_XCL_2	Thiomicrospira crunogena XCL-2	chr=2427734
+haloHalo1	Halobacterium sp. NRC-1	chr=2014239,plasmid_pNRC100=191346,plasmid_pNRC200=365425
+16190	Roseiflexus sp. RS-1	NC_009523=5801598
+idioLoih_L2TR	Idiomarina loihiensis L2TR	chr=2839318
+18637	Methylobacterium extorquens PA1	NC_010172=5471154
+sodaGlos_MORSITANS	Sodalis glossinidius str. 'morsitans'	chr=4171146,plasmid_pSG3=10810,plasmid_pSG2=27240,plasmid_pSG1=83306
+18633	Prochlorococcus marinus str. MIT 9215	NC_009840=1738790
+mycoTube_H37RV	Mycobacterium tuberculosis H37Rv	chr=4411532
+polyQLWP	Polynucleobacter sp. QLW-P1DMWA-1	chr=2159490
+peloLute_DSM273	Pelodictyon luteolum DSM 273	chr=2364842
+burkCeno_AU_1054	Burkholderia cenocepacia AU 1054	chr3=1196094,chr2=2788459,chr1=3294563
+13942	Streptococcus sanguinis SK36	NC_009009=2388435
+13943	Burkholderia mallei NCTC 10229	NC_008835=2284095,NC_008836=3458208
+ente638	Enterobacter sp. 638	chr=4518712,plasmid_pENTE01=157749
+13946	Burkholderia mallei NCTC 10247	NC_009080=3495687,NC_009079=2352693
+13947	Burkholderia mallei SAVP1	NC_008784=1734922,NC_008785=3497479
+16363	Streptococcus pyogenes MGAS9429	NC_008021=1836467
+neorSenn_MIYAYAMA	Neorickettsia sennetsu str. Miyayama	chr=859006
+heliAcin_SHEEBA	Helicobacter acinonychis str. Sheeba	chr=1553927,plasmid_pHac1=3661
+16366	Streptococcus pyogenes MGAS10750	NC_008024=1937111
+16365	Streptococcus pyogenes MGAS2096	NC_008023=1860355
+13473	Nitrobacter hamburgensis X14	NC_007961=121408,NC_007960=188318,NC_007964=4406967,NC_007959=294829
+burkCepa_AMMD	Burkholderia ambifaria AMMD	plasmid_1=43581,chr3=1281472,chr2=2646969,chr1=3556545
+methMari_C5_1	Methanococcus maripaludis C5	chr=1780761,plasmid_pMMC501=8285
+13478	Aster yellows witches'-broom phytoplasma AYWB	NC_007718=4009,NC_007719=5104,NC_007720=4316,NC_007716=706569,NC_007717=3972
+13370	Actinobacillus succinogenes 130Z	NC_009655=2319663
+methBark1	Methanosarcina barkeri str. Fusaro	chr=4837408,plasmid1=36358
+geobMeta_GS15	Geobacter metallireducens GS-15	chr=3997420,plasmid_unnamed=13762
+eschColi_CFT073	Escherichia coli CFT073	chr=5231428
+metMar1	Methanococcus maripaludis S2	chr=1661137
+enteFaec_V583	Enterococcus faecalis V583	chr=3218031,plasmid_pTEF1=66320,plasmid_pTEF3=17963,plasmid_pTEF2=57660
+17265	Francisella tularensis subsp. holarctica OSU18	NC_008369=1895727
+shigFlex_2A	Shigella flexneri 2a str. 301	chr=4607203,plasmid_pCP301=221618
+erwiCaro_ATROSEPTICA	Pectobacterium atrosepticum SCRI1043	chr=5064019
+pseuHalo_TAC125	Pseudoalteromonas haloplanktis TAC125	chrII=635328,chrI=3214944
+17163	Campylobacter jejuni subsp. doylei 269.97	NC_009707=1845106
+17161	Campylobacter curvus 525.92	NC_009715=1971264
+10690	Mesorhizobium sp. BNC1	NC_008243=131247,NC_008242=343931,NC_008254=4412446,NC_008244=47561
+10694	Ehrlichia canis str. Jake	NC_007354=1315030
+10697	Mycoplasma mobile 163K	NC_006908=777079
+25	Bordetella parapertussis 12822	NC_002928=4773551
+26	Bordetella pertussis Tohama I	NC_002929=4086189
+16841	Candidatus Ruthia magnifica str. Cm (Calyptogena magnifica)	NC_008610=1160782
+sulfToko1	Sulfolobus tokodaii str. 7	chr=2694756
+404	Lactobacillus brevis ATCC 367	NC_008498=13413,NC_008499=35595,NC_008497=2291220
+403	Lactobacillus delbrueckii subsp. bulgaricus ATCC BAA-365	NC_008529=1856951
+13920	Psychrobacter cryohalolentis K5	NC_007969=3059876,NC_007968=41221
+401	Lactococcus lactis subsp. cremoris SK11	NC_008527=2438589,NC_008503=14041,NC_008504=9554,NC_008505=74750,NC_008506=47208,NC_008507=14206
+16137	Bradyrhizobium sp. BTAi1	NC_009475=228826,NC_009485=8264687
+uncuMeth_RCI	uncultured methanogenic archaeon RC-I	chr=3179916
+coryEffi_YS_314	Corynebacterium efficiens YS-314	chr=3147090
+13485	Parabacteroides distasonis ATCC 8503	NC_009615=4811379
+409	Mycoplasma gallisepticum R	NC_004829=996422
+28743	Yersinia pseudotuberculosis YPIII	NC_010465=4689441
+19469	Escherichia coli SECEC SMS-3-5	NC_010488=130440,NC_010498=5068389,NC_010485=8909,NC_010486=4074,NC_010487=3565
+psycArct_273_4	Psychrobacter arcticus 273-4	chr=2650701
+propAcne_KPA171202	Propionibacterium acnes KPA171202	chr=2560265
+acidJS42	Acidovorax sp. JS42	plasmid_pAOVO01=72689,chr=4448856,plasmid_pAOVO02=63609
+chroSale_DSM3043	Chromohalobacter salexigens DSM 3043	chr=3696649
+19065	Clostridium kluyveri DSM 555	NC_009466=59182,NC_009706=3964618
+18801	Staphylococcus aureus subsp. aureus str. Newman	NC_009641=2878897
+13151	Shigella sonnei Ss046	NC_009346=5153,NC_009345=8401,NC_009347=2101,NC_007385=214396,NC_007384=4825265
+18965	Sulfurovum sp. NBC37-1	NC_009663=2562277
+18963	Nitratiruptor sp. SB155-2	NC_009662=1877931
+18809	Methylobacterium sp. 4-46	
+tropWhip_TW08_27	Tropheryma whipplei TW08/27	chr=925938
+28135	Bordetella petrii DSM 12804	NC_010170=5287950
+natrPhar1	Natronomonas pharaonis DSM 2160	plasmid_PL23=23486,chr=2595221,plasmid_PL131=130989
+370	Chlamydophila felis Fe/C-56	NC_007900=7552,NC_007899=1166239
+377	Gluconacetobacter diazotrophicus PAl 5	NC_010123=16610,NC_010125=3944163,NC_010124=38818
+rubrXyla_DSM9941	Rubrobacter xylanophilus DSM 9941	chr=3225748
+15746	Prochlorococcus marinus str. MIT 9301	NC_009091=1641879
+15747	Rhodopseudomonas palustris HaA2	NC_007778=5331656
+18981	Finegoldia magna ATCC 29328	NC_010376=1797577,NC_010371=189163
+franCcI3	Frankia sp. CcI3	chr=5433628
+15749	Rhodopseudomonas palustris BisB5	NC_007958=4892717
+16549	Janthinobacterium sp. Marseille	NC_009659=4110251
+89	Mycobacterium bovis AF2122/97	NC_002945=4345492
+12716	Citrobacter koseri ATCC BAA-895	NC_009792=4720462,NC_009793=9294,NC_009794=5601
+82	Lactobacillus acidophilus NCFM	NC_006814=1993560
+81	Clostridium tetani E88	NC_004557=2799251,NC_004565=74082
+16095	Mycoplasma agalactiae PG2	NC_009497=877438
+87	Corynebacterium diphtheriae NCTC 13129	NC_002935=2488635
+84	Lactobacillus gasseri ATCC 33323	NC_008530=1894360
+85	Listeria monocytogenes str. 4b F2365	NC_002973=2905187
+20197	Francisella tularensis subsp. holarctica FTA	NC_009749=1890909
+caldSacc_DSM8903	Caldicellulosiruptor saccharolyticus DSM 8903	chr=2970275
+anabVari_ATCC29413	Anabaena variabilis ATCC 29413	plasmid_B=35762,plasmid_C=300758,chr=6365727,plasmid_A=366354
+lactPlan	Lactobacillus plantarum WCFS1	chr=3308274,plasmid_pWCFS101=1917,plasmid_pWCFS103=36069,plasmid_pWCFS102=2365
+carbHydr_Z_2901	Carboxydothermus hydrogenoformans Z-2901	chr=2401520
+13617	Prochlorococcus marinus str. MIT 9515	NC_008817=1704176
+10878	Bacillus anthracis str. Sterne	NC_005945=5228663
+16067	Yersinia pestis Angola	NC_010158=114570,NC_010159=4504254,NC_010157=68190
+16062	Leuconostoc citreum KM20	NC_010466=31463,NC_010467=17971,NC_010471=1796284,NC_010470=38713,NC_010469=12183
+16148	Leptospira borgpetersenii serovar Hardjo-bovis JB197	NC_008511=299762,NC_008510=3576473
+16146	Leptospira borgpetersenii serovar Hardjo-bovis L550	NC_008508=3614446,NC_008509=317336
+vibrVuln_CMCP6_1	Vibrio vulnificus CMCP6	chrII=1844853,chrI=3281944
+10877	Bacillus thuringiensis serovar konkukian str. 97-27	NC_006578=77112,NC_005957=5237682
+19517	Clostridium botulinum A str. ATCC 19397	NC_009697=3863450
+nitrWino_NB_255	Nitrobacter winogradskyi Nb-255	chr=3402093
+16321	Bifidobacterium adolescentis ATCC 15703	NC_008618=2089645
+17629	Pseudomonas putida GB-1	NC_010322=6078430
+17237	Rickettsia bellii OSU 85-389	NC_009883=1528980
+19519	Clostridium botulinum F str. Langeland	NC_009700=17531,NC_009699=3995387
+mesoLoti	Mesorhizobium loti MAFF303099	chr=7036071,plasmid_pMLa=351911,plasmid_pMLb=208315
+coxiBurn	Coxiella burnetii RSA 493	chr=1995281,plasmid_pQpH1=37393
+247	Chlamydophila pneumoniae AR39	NC_002179=1229853
+acidBact_ELLIN345	Acidobacteria bacterium Ellin345	chr=5650368
+241	Salmonella typhimurium LT2	NC_003277=93939,NC_003197=4857432
+16249	Bartonella bacilliformis KC583	NC_008783=1445021
+12997	Acaryochloris marina MBIC11017	NC_009929=226680,NC_009928=273121,NC_009934=2133,NC_009932=155110,NC_009933=120693,NC_009930=177162,NC_009931=172728,NC_009925=6503724,NC_009927=356087,NC_009926=374161
+xyleFast	Xylella fastidiosa 9a5c	plasmid_pXF1.3=1286,chr=2679306,plasmid_pXF51=51158
+13435	Lactobacillus sakei subsp. sakei 23K	NC_007576=1884661
+11752	Haemophilus influenzae 86-028NP	NC_007146=1914490
+16393	Neisseria meningitidis 053442	NC_010120=2153416
+16394	Thermoanaerobacter sp. X514	NC_010320=2457259
+methKand1	Methanopyrus kandleri AV19	chr=1694969
+bifiLong	Bifidobacterium longum NCC2705	chr=2256640,plasmid_pBLO1=3626
+leptInte	Leptospira interrogans serovar Lai str. 56601	chrII=358943,chrI=4332241
+burkCeno_HI2424	Burkholderia cenocepacia HI2424	plasmid_1=164857,chr3=1055417,chr2=2998664,chr1=3483902
+16364	Streptococcus pyogenes MGAS10270	NC_008022=1928252
+13875	Candidatus Blochmannia pennsylvanicus str. BPEN	NC_007292=791654
+buchSp	Buchnera aphidicola str. APS (Acyrthosiphon pisum)	chr=640681,plasmid_pTrp=7258,plasmid_pLeu=7786
+178	Burkholderia pseudomallei K96243	NC_006351=3173005,NC_006350=4074542
+176	Mycoplasma penetrans HF-2	NC_004432=1358633
+pyroCali1	Pyrobaculum calidifontis JCM 11548	chr=2009313
+chloTepi_TLS	Chlorobium tepidum TLS	chr=2154946
+13282	Synechococcus elongatus PCC 6301	NC_006576=2696255
+16800	Pseudomonas entomophila L48	NC_008027=5888780
+picrTorr1	Picrophilus torridus DSM 9790	chr=1545895
+shewANA3	Shewanella sp. ANA-3	plasmid_1=278942,chr1=4972204
+13443	Listeria welshimeri serovar 6b str. SLCC5334	NC_008555=2814130
+bdelBact	Bdellovibrio bacteriovorus HD100	chr=3782950
+gramFors_KT0803	Gramella forsetii KT0803	chr=3798465
+184	Clavibacter michiganensis subsp. sepedonicus	
+189	Streptomyces avermitilis MA-4680	NC_004719=94287,NC_003155=9025608
+clavMich_NCPPB_382	Clavibacter michiganensis subsp. michiganensis NCPPB 382	chr=3297891,plasmid_pCM1=27357,plasmid_pCM2=69989
+peloCarb	Pelobacter carbinolicus DSM 2380	chr=3665893
+20083	Campylobacter hominis ATCC BAA-381	NC_009713=3678,NC_009714=1711273
+saliTrop_CNB_440	Salinispora tropica CNB-440	chr=5183331
+17057	Borrelia afzelii PKo	NC_008277=905394,NC_008274=26533,NC_008273=30017,NC_008568=28533,NC_008569=24794,NC_008566=34273,NC_008567=32368,NC_008564=59958,NC_008565=59804
+campJeju	Campylobacter jejuni subsp. jejuni NCTC 11168	chr=1641481
+17053	Pseudomonas putida W619	NC_010501=5774330
+12	Pseudomonas fluorescens PfO-1	NC_007492=6438405
+15	Xanthomonas campestris pv. campestris str. 8004	NC_007086=5148708
+12475	Wolbachia endosymbiont strain TRS of Brugia malayi	NC_006833=1080084
+13217	Azoarcus sp. BH72	NC_008702=4376040
+dehaEthe_195	Dehalococcoides ethenogenes 195	chr=1469720
+methTherPT1	Methanosaeta thermophila PT	chr=1879471
+shewMR7	Shewanella sp. MR-7	chr=4792610,plasmid1=6499
+shewMR4	Shewanella sp. MR-4	chr=4706287
+15708	Acidovorax avenae subsp. citrulli AAC00-1	NC_008752=5352772
+19087	Ureaplasma parvum serovar 3 str. ATCC 27815	
+geobUran_RF4	Geobacter uraniireducens Rf4	chr=5136364
+shewDeni	Shewanella denitrificans OS217	chr=4545906
+pyrFur2	Pyrococcus furiosus DSM 3638	chr=1908256
+desuRedu_MI_1	Desulfotomaculum reducens MI-1	chr=3608104
+syntFuma_MPOB	Syntrophobacter fumaroxidans MPOB	chr=4990251
+278	Streptococcus pneumoniae R6	NC_003098=2038615
+13654	Synechococcus sp. RCC307	NC_009482=2224914
+13655	Synechococcus sp. CC9902	NC_007513=2234828
+16313	Staphylococcus aureus subsp. aureus USA300	NC_007790=3125,NC_007791=4439,NC_007792=37136,NC_007793=2872769
+legiPneu_PHILADELPHIA	Legionella pneumophila subsp. pneumophila str. Philadelphia 1	chr=3397754
+20391	Bacillus pumilus SAFR-032	NC_009848=3704465
+chlaPneu_CWL029	Chlamydophila pneumoniae CWL029	chr=1230230
+16259	Escherichia coli UTI89	NC_007946=5065741,NC_007941=114230
+13067	Bacteroides fragilis YCH46	NC_006297=33716,NC_006347=5277274
+gloeViol	Gloeobacter violaceus PCC 7421	chr=4659019
+moorTher_ATCC39073	Moorella thermoacetica ATCC 39073	chr=2628784
+323	Pseudomonas syringae pv. syringae B728a	NC_007005=6093698
+320	Brucella suis 1330	NC_004311=1207381,NC_004310=2107794
+roseDeni_OCH_114	Roseobacter denitrificans OCh 114	chr=5824
+326	Streptococcus agalactiae A909	NC_007432=2127839
+327	Pseudomonas fluorescens Pf-5	NC_004129=7074893
+325	Ehrlichia chaffeensis str. Arkansas	NC_007799=1176248
+329	Desulfovibrio desulfuricans G20	NC_007519=3730232
+16203	Brucella melitensis biovar Abortus 2308	NC_007624=1156948,NC_007618=2121359
+12950	Yersinia pseudotuberculosis IP 32953	NC_006153=68525,NC_006155=4744671,NC_006154=27702
+heliPylo_26695	Helicobacter pylori 26695	chr=1667867
+12952	Rickettsia canadensis str. McKiel	NC_009879=1159772
+12953	Rickettsia akari str. Hartford	NC_009881=1231060
+16208	Mycoplasma capricolum subsp. capricolum ATCC 27343	NC_007633=1010023
+17375	Francisella tularensis subsp. tularensis FSC198	NC_008245=1892616
+19259	Acholeplasma laidlawii PG-8A	NC_010163=1496992
+77	Clostridium acetobutylicum ATCC 824	NC_003030=3940880,NC_001988=192000
+74	Bacillus cereus ATCC 10987	NC_005707=208369,NC_003909=5224283
+15770	Dehalococcoides sp. BAV1	NC_009455=1341892
+79	Clostridium perfringens str. 13	NC_003042=54310,NC_003366=3031430
+78	Clostridium difficile 630	NC_009089=4290252,NC_008226=7881
+13624	Bacillus cereus subsp. cytotoxis NVH 391-98	NC_009674=4087024,NC_009673=7135
+13623	Bacillus weihenstephanensis KBAB4	NC_010184=5262775,NC_010181=75107,NC_010180=417054,NC_010183=52830,NC_010182=64977
+tricEryt_IMS101	Trichodesmium erythraeum IMS101	chr=7750108
+photLumi	Photorhabdus luminescens subsp. laumondii TTO1	chr=5688987
+peloTher_SI	Pelotomaculum thermopropionicum SI	chr=3025375
+18789	Shewanella sediminis HAW-EB3	NC_009831=5517674
+therKoda1	Thermococcus kodakarensis KOD1	chr=2088737
+oceaIhey	Oceanobacillus iheyensis HTE831	chr=3630528
+16297	Xanthomonas oryzae pv. oryzae MAFF 311018	NC_007705=4940217
+19639	Methanococcus maripaludis C6	NC_009975=1744193
+blocFlor	Candidatus Blochmannia floridanus	chr=705557
+shewW318	Shewanella sp. W3-18-1	chr=4708380
+16184	Clostridium phytofermentans ISDg	NC_010001=4847594
+17477	Acinetobacter baumannii ATCC 17978	NC_009084=11302,NC_009085=3976747,NC_009083=13408
+therElon	Thermosynechococcus elongatus BP-1	chr=2593857
+13403	Bacillus amyloliquefaciens FZB42	NC_009725=3918589
+17679	Petrotoga mobilis SJ95	NC_010003=2169548
+16374	Streptococcus pneumoniae D39	NC_008533=2046115
+16375	Shigella flexneri 5 str. 8401	NC_008258=4574284
+methBoon1	Candidatus Methanoregula boonei 6A8	chr=2542943
+mariMari_MCS10	Maricaulis maris MCS10	chr=3368780
+ralsEutr_JMP134	Ralstonia eutropha JMP134	plasmid_1=87688,chr2=2726152,chr1=3806533,megaplasmid=634917
+baciSubt	Bacillus subtilis subsp. subtilis str. 168	chr=4214630
+16372	Buchnera aphidicola str. Cc (Cinara cedri)	NC_008513=416380
+213	Prochlorococcus marinus subsp. pastoris str. CCMP1986	NC_005072=1657990
+erytLito_HTCC2594	Erythrobacter litoralis HTCC2594	chr=3052398
+acidCell_11B	Acidothermus cellulolyticus 11B	chr=2443540
+pyrAer1	Pyrobaculum aerophilum str. IM2	chr=2222430
+xantCamp	Xanthomonas campestris pv. campestris str. ATCC 33913	chr=5076188
+thioDeni_ATCC25259	Thiobacillus denitrificans ATCC 25259	chr=2909809
+28583	Chlamydia trachomatis 434/Bu	NC_010287=1038842
+deinRadi	Deinococcus radiodurans R1	plasmid_CP1=45704,plasmid_MP1=177466,chr2=412348,chr1=2648638
+yersPest_CO92	Yersinia pestis CO92	chr=4653728,plasmid_pCD1=70305,plasmid_pPCP1=9612,plasmid_pMT1=96210
+28585	Chlamydia trachomatis L2b/UCH-1/proctitis	NC_010280=1038869
+wolbEndo_OF_DROSOPHIL	Wolbachia endosymbiont of Drosophila melanogaster	chr=1267782
+17153	Streptococcus suis 05ZYH33	NC_009442=2096309
+10689	Kineococcus radiotolerans SRS30216	NC_009806=182572,NC_009664=4761183,NC_009660=12917
+17155	Streptococcus suis 98HAH33	NC_009443=2095698
+17159	Campylobacter concisus 13826	NC_009802=2052007,NC_009796=16457,NC_009795=30949
+ralsSola	Ralstonia solanacearum GMI1000	chr=3716413,plasmid_pGMI1000MP=2094509
+10687	Leptospira interrogans serovar Copenhageni str. Fiocruz L1-130	NC_005823=4277185,NC_005824=350181
+17403	Frankia alni ACN14a	NC_008278=7497934
+16871	Lactobacillus delbrueckii subsp. bulgaricus ATCC 11842	NC_008054=1864998
+12388	Bacillus licheniformis ATCC 14580	NC_006270=4222597
+neisMeni_Z2491_1	Neisseria meningitidis Z2491	chr=2184406
+saliRube_DSM13855	Salinibacter ruber DSM 13855	chr=3551823,plasmid_pSR35=35505
+17953	Campylobacter jejuni subsp. jejuni 81116	NC_009839=1628115
+10616	Mycoplasma mycoides subsp. mycoides SC str. PG1	NC_005364=1211703
+13496	Prochlorococcus marinus str. MIT 9303	NC_008820=2682675
+28507	Clostridium botulinum A3 str. Loch Maree	NC_010520=3992906,NC_010418=266785
+colwPsyc_34H	Colwellia psychrerythraea 34H	chr=5373180
+hyphNept_ATCC15444	Hyphomonas neptunium ATCC 15444	chr=3705021
+vibrChol1	Vibrio cholerae O1 biovar eltor str. N16961	chrII=1072315,chrI=2961149
+deinGeot_DSM11300	Deinococcus geothermalis DSM 11300	chr=2467205,plasmid_1=574127
+312	Buchnera aphidicola str. Sg (Schizaphis graminum)	NC_004061=641454
+311	Streptococcus pyogenes MGAS315	NC_004070=1900521
+metaSedu	Metallosphaera sedula DSM 5348	chr=2191517
+314	Clostridium thermocellum ATCC 27405	NC_009012=3843301
+lactSali_UCC118	Lactobacillus salivarius UCC118	plasmid_pSF118-20=20417,chr=1827111,plasmid_pMP118=242436,plasmid_pSF118-44=44013
+pediPent_ATCC25745	Pediococcus pentosaceus ATCC 25745	chr=1832387
+flavJohn_UW101	Flavobacterium johnsoniae UW101	chr=6096872
+mannSucc_MBEL55E	Mannheimia succiniciproducens MBEL55E	chr=2314078
+haemSomn_129PT	Haemophilus somnus 129PT	chr=2007700,plasmid_pHS129=5178
+13145	Shigella dysenteriae Sd197	NC_007606=4369232,NC_007607=182726,NC_009344=8953
+13146	Shigella boydii Sb227	NC_007613=4519823,NC_007608=126697
+17491	Legionella pneumophila str. Corby	NC_009494=3576470
+nanEqu1	Nanoarchaeum equitans Kin4-M	chr=490885
+12508	Staphylococcus haemolyticus JCSC1435	NC_007168=2685015
+13384	Pelobacter propionicus DSM 2379	NC_008609=4008000,NC_008608=30722,NC_008607=202397
+chlaTrac	Chlamydia trachomatis D/UW-3/CX	chr=1042519
+haloWals1	Haloquadratum walsbyi DSM 16790	chr=3132494,plasmid_PL47=46867
+13389	Shewanella baltica OS195	NC_009998=75605,NC_009999=75508,NC_010000=49148,NC_009997=5347283
+baciHalo	Bacillus halodurans C-125	chr=4202352
+hypeButy1	Hyperthermus butylicus DSM 5456	chr=1667163
+chroViol	Chromobacterium violaceum ATCC 12472	chr=4751080
+16088	Francisella tularensis subsp. novicida U112	NC_008601=1910031
+methJann1	Methanocaldococcus jannaschii DSM 2661	chr=1664970,lg_extrachr=58407,sm_extrachr=16550
+384	Bacillus cereus ATCC 14579	NC_004721=15274,NC_004722=5411809
+pastMult	Pasteurella multocida subsp. multocida str. Pm70	chr=2257487
+386	Pseudomonas aeruginosa UCBPP-PA14	NC_008463=6537648
+388	Haemophilus somnus 2336	NC_010519=2263857
+16083	Alkaliphilus oremlandii OhILAs	NC_009922=3123558
+27853	Francisella philomiragia subsp. philomiragia ATCC 25017	NC_010331=3936,NC_010336=2045775
+16081	Mycobacterium sp. KMS	NC_008703=302089,NC_008704=216763,NC_008705=5737227
+cytoHutc_ATCC33406	Cytophaga hutchinsonii ATCC 33406	chr=4433218
+nitrEuro	Nitrosomonas europaea ATCC 19718	chr=2812094
+12720	Enterobacter sakazakii ATCC BAA-894	NC_009780=131196,NC_009778=4368373,NC_009779=31208
+therMari	Thermotoga maritima MSB8	chr=1860725
+mycoGeni	Mycoplasma genitalium G37	chr=580076
+vibrFisc_ES114_1	Vibrio fischeri ES114	chrII=1332022,plasmid_pES100=45849,chrI=2906179
+pyroIsla1	Pyrobaculum islandicum DSM 4184	chr=1826402
+17407	Burkholderia multivorans ATCC 17616	NC_010084=3448466,NC_010070=167422,NC_010086=2472928,NC_010087=919806
+13030	Salmonella enterica subsp. arizonae serovar 62:z4,z23:--	NC_010067=4600800
+18271	Rickettsia massiliae MTU5	NC_009897=15286,NC_009900=1360898
+21047	Candidatus Desulforudis audaxviator MP104C	NC_010424=2349476
+19857	Vibrio harveyi ATCC BAA-1116	NC_009783=3765351,NC_009777=89008,NC_009784=2204018
+17639	Parvibaculum lavamentivorans DS-1	NC_009719=3914745
+18059	Mycobacterium bovis BCG str. Pasteur 1173P2	NC_008769=4374522
+vibrPara1	Vibrio parahaemolyticus RIMD 2210633	chrII=1877212,chrI=3288558
+259	Escherichia coli O157:H7 EDL933	NC_007414=92077,NC_002655=5528445
+64	Staphylococcus epidermidis RP62A	NC_006663=27310,NC_002976=2616530
+neisGono_FA1090_1	Neisseria gonorrhoeae FA 1090	chr=2153922
+66	Streptococcus gordonii str. Challis substr. CH1	NC_009785=2196662
+16723	Aeromonas salmonicida subsp. salmonicida A449	NC_009349=166749,NC_009348=4702402,NC_009350=155098,NC_004923=5424,NC_004924=5616,NC_004925=5247
+16230	Mycobacterium ulcerans Agy99	NC_008611=5631606
+16721	Coxiella burnetii Dugway 5J108-111	NC_009727=2158758,NC_009726=54179
+16720	Pseudomonas aeruginosa PA7	NC_009656=6588339
+256	Buchnera aphidicola str. Bp (Baizongia pistaciae)	NC_004555=2399,NC_004545=615980
+257	Chlamydophila pneumoniae J138	NC_002491=1226565
+novoArom_DSM12444	Novosphingobium aromaticivorans DSM 12444	chr=3561584,plasmid_pNL2=487268,plasmid_pNL1=184462
+15642	Mycobacterium tuberculosis F11	NC_009565=4424435
+azoaSp_EBN1	Azoarcus sp. EbN1	plasmid_2=223670,chr=4296230,plasmid_1=207355
+therTher_HB27	Thermus thermophilus HB27	chr=1894877,plasmid_pTT27=232605
+metAce1	Methanosarcina acetivorans C2A	chr=5751492
+glucOxyd_621H	Gluconobacter oxydans 621H	plasmid_pGOX2=26568,plasmid_pGOX1=163186,chr=2702173,plasmid_pGOX3=14547,plasmid_pGOX4=13223,plasmid_pGOX5=2687
+listInno	Listeria innocua Clip11262	chr=3011208,plasmid_pLI100=81905
+sulfAcid1	Sulfolobus acidocaldarius DSM 639	chr=2225959
+geobTher_NG80_2	Geobacillus thermodenitrificans NG80-2	chr=3550319,plasmid_pLW1071=57693
+402	Lactobacillus casei ATCC 334	NC_008502=29061,NC_008526=2895264
+13960	Escherichia coli E24377A	NC_009789=6199,NC_009788=70609,NC_009801=4979619,NC_009790=74224,NC_009791=5033,NC_009787=34367,NC_009786=79237
+13954	Burkholderia pseudomallei 1710b	NC_007435=3181762,NC_007434=4126292
+17445	Marinomonas sp. MWYL1	NC_009654=5100344
+13967	Corynebacterium jeikeium K411	NC_003080=14323,NC_007164=2462499
+leifXyli_XYLI_CTCB0	Leifsonia xyli subsp. xyli str. CTCB07	chr=2584158
+10645	Synechococcus elongatus PCC 7942	NC_007604=2695903,NC_007595=46366
+campFetu_82_40	Campylobacter fetus subsp. fetus 82-40	chr=1773615
+13355	Ehrlichia ruminantium str. Welgevonden	NC_006832=1512977
+desuHafn_Y51	Desulfitobacterium hafniense Y51	chr=5727534
+13356	Ehrlichia ruminantium str. Gardel	NC_006831=1499920
+13291	Bacillus clausii KSM-K16	NC_006582=4303871
+16421	Francisella tularensis subsp. holarctica	NC_007880=1895994
+therVolc1	Thermoplasma volcanium GSS1	chr=1584804
+soliUsit_ELLIN6076	Solibacter usitatus Ellin6076	chr=9965640
+bradJapo	Bradyrhizobium japonicum USDA 110	chr=9105828
+methTher1	Methanothermobacter thermautotrophicus str. Delta H	chr=1751377
+13454	Pseudoalteromonas atlantica T6c	NC_008228=5187005
+methAeol1	Methanococcus aeolicus Nankai-3	chr=1569500
+nostSp	Nostoc sp. PCC 7120	plasmid_pCC7120delta=55414,plasmid_pCC7120beta=186614,chr=6413771,plasmid_pCC7120gamma=101965,plasmid_pCC7120alpha=408101,plasmid_pCC7120zeta=5584,plasmid_pCC7120epsilon=40340
+sphiAlas_RB2256	Sphingopyxis alaskensis RB2256	F_plasmid=28543,chr=3345170
+saccDegr_2_40	Saccharophagus degradans 2-40	chr=5057531
+408	Shigella flexneri 2a str. 2457T	NC_004741=4599354
+anaeDeha_2CP_C	Anaeromyxobacter dehalogenans 2CP-C	chr=5013479
+bordBron	Bordetella bronchiseptica RB50	chr=5339179
+rickBell_RML369_C	Rickettsia bellii RML369-C	chr=1522076
+pyroArse1	Pyrobaculum arsenaticum DSM 13514	chr=2121076
+thioDeni_ATCC33889	Sulfurimonas denitrificans DSM 1251	chr=2201561
+methSmit1	Methanobrevibacter smithii ATCC 35061	chr=1853160
+halMar1	Haloarcula marismortui ATCC 43049	chrII=288050,plasmid_pNG600=155300,plasmid_pNG100=33303,plasmid_pNG200=33452,plasmid_pNG500=132678,plasmid_pNG400=50060,plasmid_pNG300=39521,chrI=3131724,plasmid_pNG700=410554
+9618	Salmonella enterica subsp. enterica serovar Choleraesuis str. SC-B67	NC_006855=49558,NC_006856=138742,NC_006905=4755700
+9619	Brucella abortus biovar 1 str. 9-941	NC_006933=1162204,NC_006932=2124241
+vermEise_EF01_2	Verminephrobacter eiseniae EF01-2	chr=5566749,plasmid_pVEIS01=31194
+granBeth_CGDNIH1	Granulibacter bethesdensis CGDNIH1	chr=2708355
+alcaBork_SK2	Alcanivorax borkumensis SK2	chr=3120143
+vibrChol_O395_1	Vibrio cholerae O395	chr2=3024069,chr1=1108250
+nitrOcea_ATCC19707	Nitrosococcus oceani ATCC 19707	chr=3481691,plasmid_A=40420
+campJeju_RM1221	Campylobacter jejuni RM1221	chr=1777831
+12468	Bacillus cereus E33L	NC_007103=466370,NC_007105=53501,NC_007104=5108,NC_007107=9150,NC_007106=8191,NC_006274=5300915
+12469	Streptococcus pyogenes MGAS10394	NC_006086=1899877
+19135	Actinobacillus pleuropneumoniae serovar 3 str. JL03	NC_010278=2242062
+woliSucc	Wolinella succinogenes DSM 1740	chr=2110355
+19485	Ochrobactrum anthropi ATCC 49188	NC_009668=1895911,NC_009669=170351,NC_009667=2887297,NC_009671=93589,NC_009670=101491,NC_009672=57138
+therPend1	Thermofilum pendens Hrk 5	chr=1781889,pTPEN01=31504
+19489	Staphylococcus aureus subsp. aureus USA300_TCH1516	NC_010079=2872915,NC_010063=27041
+therFusc_YX	Thermobifida fusca YX	chr=3642249
+12609	Chlorobium phaeobacteroides DSM 266	NC_008639=3133902
+aquiAeol	Aquifex aeolicus VF5	chr=1551335,plasmid_ece1=39456
+heliPylo_J99	Helicobacter pylori J99	chr=1643831
+burkPseu_1106A	Burkholderia pseudomallei 1106a	chrII=3100794,chrI=3988455
+azorCaul2	Azorhizobium caulinodans ORS 571	chr=5369772
+12607	Prosthecochloris vibrioformis DSM 265	NC_009337=1966858
+synePCC6	Synechocystis sp. PCC 6803	plasmid_pSYSG=44343,chr=3573470,plasmid_pSYSM=119895,plasmid_pSYSA=103307,plasmid_pSYSX=106004
+baumCica_HOMALODISCA	Baumannia cicadellinicola str. Hc (Homalodisca coagulata)	chr=686194
+355	Chlamydophila abortus S26/3	NC_004552=1144377
+28111	Sorangium cellulosum 'So ce 56'	NC_010162=13033779
+shewBalt	Shewanella baltica OS155	plasmid_pSbal04=7995,chr=5127376,plasmid_pSbal01=116763,plasmid_pSbal02=74000,plasmid_pSbal03=16762
+therTeng	Thermoanaerobacter tengcongensis MB4	chr=2689445
+359	Pseudomonas syringae pv. tomato str. DC3000	NC_004632=67473,NC_004578=6397126,NC_004633=73661
+19943	Rickettsia rickettsii str. Iowa	NC_010263=1268175
+alkaEhrl_MLHE_1	Alkalilimnicola ehrlichei MLHE-1	chr=3275944
+sulSol1	Sulfolobus solfataricus P2	chr=2992245
+15604	Dehalococcoides sp. CBDB1	NC_007356=1395502
+19265	Nitrosopumilus maritimus SCM1	NC_010085=1645259
+19543	Thermotoga sp. RQ2	
+methHung1	Methanospirillum hungatei JF-1	chr=3544738
+27803	Salmonella enterica subsp. enterica serovar Paratyphi B str. SPB7	NC_010102=4858887
+28505	Clostridium botulinum B1 str. Okra	NC_010516=3958233,NC_010379=148780
+15766	Lactobacillus reuteri F275	NC_009513=1999618
+17929	Burkholderia cenocepacia MC0-3	NC_010512=1224595,NC_010508=3532883,NC_010515=3213911
+15762	Mycobacterium sp. MCS	NC_008146=5705448,NC_008147=215075
+15760	Mycobacterium gilvum PYR-GCK	NC_009341=16660,NC_009340=25309,NC_009338=5619607,NC_009339=321253
+15761	Mycobacterium vanbaalenii PYR-1	NC_008726=6491865
+18797	Lactococcus lactis subsp. cremoris MG1363	NC_009004=2529478
+20371	Brucella suis ATCC 23445	NC_010167=1400844,NC_010169=1923763
+desuVulg_HILDENBOROUG	Desulfovibrio vulgaris subsp. vulgaris str. Hildenborough	chr=3570858,megaplasmid=202301
+20079	Escherichia coli DH10B	NC_010473=4686137
+288	Yersinia pestis KIM	NC_004838=100990,NC_004088=4600755
+13006	Alkaliphilus metalliredigens QYMF	NC_009633=4929566
+13001	Acinetobacter baumannii	NC_010398=24922,NC_010400=3421954,NC_010395=6106,NC_010396=25014
+4	Treponema denticola ATCC 35405	NC_002967=2843201
+283	Agrobacterium tumefaciens str. C58	NC_003062=2841580,NC_003063=2075577,NC_003064=542868,NC_003065=214233
+methVann1	Methanococcus vannielii SB	chr=1720048
+285	Xylella fastidiosa Temecula1	NC_004556=2519802,NC_004554=1346
+286	Streptococcus pyogenes MGAS8232	NC_003485=1895017
+candCars_RUDDII	Candidatus Carsonella ruddii PV	chr=159662
+336	Anaplasma phagocytophilum HZ	NC_007797=1471282
+burkMall_ATCC23344	Burkholderia mallei ATCC 23344	chr2=2325379,chr1=3510148
+leucMese_ATCC8293	Leuconostoc mesenteroides subsp. mesenteroides ATCC 8293	chr=2038396,plasmid_pLEUM1=37367
+burkViet_G4	Burkholderia vietnamiensis G4	chr3=1241007,chr2=2411759,chr1=3652814,plasmid_pBVIE04=107231,plasmid_pBVIE05=88096,plasmid_pBVIE02=265616,plasmid_pBVIE03=226679,plasmid_pBVIE01=397868
+267	Pseudomonas putida KT2440	NC_002947=6181863
+266	Staphylococcus aureus subsp. aureus MSSA476	NC_005951=20652,NC_002953=2799802
+265	Staphylococcus aureus subsp. aureus MRSA252	NC_002952=2902619
+264	Staphylococcus aureus subsp. aureus N315	NC_003140=24653,NC_002745=2814816
+17643	Shewanella baltica OS185	NC_009665=5229686,NC_009661=83224
+archFulg1	Archaeoglobus fulgidus DSM 4304	chr=2178400
+13418	Polaromonas naphthalenivorans CJ2	NC_008757=353291,NC_008759=171866,NC_008758=190172,NC_008781=4410291,NC_008764=6459,NC_008760=143747,NC_008761=58808,NC_008762=21611,NC_008763=9898
+18007	Desulfococcus oleovorans Hxd3	NC_009943=3944167
+16304	Sinorhizobium medicae WSM419	NC_009622=219313,NC_009620=1570951,NC_009621=1245408,NC_009636=3781904
+16306	Caulobacter sp. K31	NC_010333=177878,NC_010338=5477872,NC_010335=233649
+59	Chloroflexus aurantiacus J-10-fl	NC_010175=5258541
+12530	Synechococcus sp. CC9311	NC_008319=2606748
+13551	Prochlorococcus marinus str. MIT 9211	NC_009976=1688963
+15691	Mycobacterium abscessus	NC_010394=23319,NC_010397=5067172
+fusoNucl	Fusobacterium nucleatum subsp. nucleatum ATCC 25586	chr=2174500
+63	Staphylococcus aureus RF122	NC_007622=2742531
+heliHepa	Helicobacter hepaticus ATCC 51449	chr=1799146
+17249	Thermosipho melanesiensis BI429	NC_009616=1915238
+15596	Staphylococcus saprophyticus subsp. saprophyticus ATCC 15305	NC_007350=2516575,NC_007351=38454,NC_007352=22870
+psycIngr_37	Psychromonas ingrahamii 37	chr=4559598
+syntAcid_SB	Syntrophus aciditrophicus SB	chr=3179300
+salmTyph	Salmonella enterica subsp. enterica serovar Typhi str. CT18	plasmid_pHCM2=106516,chr=4809037,plasmid_pHCM1=218160
+burkXeno_LB400	Burkholderia xenovorans LB400	chr3=1471779,chr2=3363523,chr1=4895836
+trepPall	Treponema pallidum subsp. pallidum str. Nichols	chr=1138011
+neisMeni_MC58_1	Neisseria meningitidis MC58	chr=2272360
+syntWolf_GOETTINGEN	Syntrophomonas wolfei subsp. wolfei str. Goettingen	chr=2936195
+15644	Thermotoga lettingae TMO	NC_009828=2135342
+13913	Nitrosomonas eutropha C91	NC_008342=55635,NC_008341=65132,NC_008344=2661057
+17417	Dinoroseobacter shibae DFL 12	NC_009958=86208,NC_009959=72296,NC_009955=190506,NC_009956=152970,NC_009957=126304,NC_009952=3789584
+13911	Prochlorococcus marinus str. NATL2A	NC_007335=1842899
+13910	Prochlorococcus marinus str. MIT 9312	NC_007577=1709204
+250	Ralstonia metallidurans CH34	NC_007974=2580084,NC_007971=233720,NC_007972=171459,NC_007973=3928089
+13915	Frankia sp. EAN1pec	NC_009921=8982042
+13914	Ignicoccus hospitalis KIN4/I	NC_009776=1297538
+rhodSpha_2_4_1	Rhodobacter sphaeroides 2.4.1	chr2=943016,chr1=3188609,plasmid_B=114178,plasmid_C=105284,plasmid_A=114045,plasmid_D=100828,plasmid_E=37100
+16235	Escherichia coli 536	NC_008253=4938920
+zymoMobi_ZM4	Zymomonas mobilis subsp. mobilis ZM4	chr=2056416
+rhodPalu_CGA009	Rhodopseudomonas palustris CGA009	chr=5459213,plasmid_pRPA=8427
+420	Chlamydophila pneumoniae TW-183	NC_005043=1225935
+stapMari1	Staphylothermus marinus F1	chr=1570485
+sinoMeli	Sinorhizobium meliloti 1021	chr=3654135,plasmid_pSymA=1354226,plasmid_pSymB=1683333
+bartHens_HOUSTON_1	Bartonella henselae str. Houston-1	chr=1931047
+301	Streptococcus pyogenes SSI-1	NC_004606=1894275
+19193	Corynebacterium glutamicum R	NC_009343=49120,NC_009342=3314179
+304	Clostridium perfringens ATCC 13124	NC_008261=3256683
+306	Staphylococcus aureus subsp. aureus MW2	NC_003923=2820462
+307	Corynebacterium glutamicum ATCC 13032	NC_003450=3309401
+caulCres	Caulobacter crescentus CB15	chr=4016947
+haheChej_KCTC_2396	Hahella chejuensis KCTC 2396	chr=7215267
+10784	Bacillus anthracis str. 'Ames Ancestor'	NC_007530=5227419,NC_007323=94830,NC_007322=181677
+magnMC1	Magnetococcus sp. MC-1	chr=4719581
+heliPylo_HPAG1	Helicobacter pylori HPAG1	chr=1596366,plasmid_pHPAG1=9370
+pyrHor1	Pyrococcus horikoshii OT3	chr=1738505
+12514	Brucella ovis ATCC 25840	NC_009505=2111370,NC_009504=1164220
+12512	Arthrobacter aurescens TC1	NC_008711=4597686,NC_008713=300725,NC_008712=328237
+actiPleu_L20	Actinobacillus pleuropneumoniae L20	chr=2274482
+paraDeni_PD1222	Paracoccus denitrificans PD1222	plasmid_1=653815,chr2=1730097,chr1=2852282
+borrBurg	Borrelia burgdorferi B31	plasmid_cp26=26498,plasmid_lp21=18753,plasmid_lp25=24177,plasmid_lp28-4=27323,plasmid_lp28-2=29766,plasmid_lp28-3=28601,plasmid_lp28-1=26921,chr=910724,plasmid_cp9=9386,plasmid_lp5=5228,plasmid_lp38=38829,plasmid_lp17=16823,plasmid_cp32-1=30750,plasmid_cp32-6=29838,plasmid_cp32-7=30800,plasmid_cp32-4=30299,plasmid_cp32-8=30885,plasmid_cp32-9=30651,plasmid_lp36=36849,plasmid_lp56=52971,plasmid_lp54=53561,plasmid_cp32-3=30223
+strePyog_M1_GAS	Streptococcus pyogenes M1 GAS	chr=1852441
+pseuAeru	Pseudomonas aeruginosa PAO1	chr=6264404
+16523	Herpetosiphon aurantiacus ATCC 23779	NC_009974=99204,NC_009972=6346587,NC_009973=339639
+16525	Candidatus Korarchaeum cryptofilum OPF8	NC_010482=1590757
+19619	Lysinibacillus sphaericus C3-41	NC_010382=4639821,NC_010381=177642
+19617	Candidatus Sulcia muelleri GWSS	NC_010118=245530
+saccEryt_NRRL_2338	Saccharopolyspora erythraea NRRL 2338	chr=8212805
+methCaps_BATH	Methylococcus capsulatus str. Bath	chr=3304561
+16645	Yersinia pestis Antiqua	NC_008150=4702289,NC_008122=70299,NC_008120=96471,NC_008121=10777
+16646	Yersinia pestis Nepal516	NC_008149=4534590,NC_008119=10778,NC_008118=100918
+12637	Clostridium beijerinckii NCIMB 8052	NC_009617=6000632
+ehrlRumi_WELGEVONDEN	Ehrlichia ruminantium str. Welgevonden	chr=1516355
+methLabrZ_1	Methanocorpusculum labreanum Z	chr=1804962
+vibrVuln_YJ016_1	Vibrio vulnificus YJ016	plasmid_pYJ016=48508,chrII=1857073,chrI=3354505
+13773	Streptococcus thermophilus LMD-9	NC_008500=4449,NC_008501=3361,NC_008532=1856368
+20039	Leptothrix cholodnii SP-6	
+shewAmaz	Shewanella amazonensis SB2B	chr=4306142
+13126	Legionella pneumophila str. Lens	NC_006369=3345687,NC_006366=59832
+13127	Legionella pneumophila str. Paris	NC_006368=3503610,NC_006365=131885
+13120	Mycoplasma hyopneumoniae 232	NC_006360=892758
+18509	Staphylococcus aureus subsp. aureus Mu3	NC_009782=2880168
+13040	Silicibacter sp. TM1040	NC_008043=821788,NC_008042=130973,NC_008044=3200938
+18267	Candidatus Vesicomyosocius okutanii HA	NC_009465=1022154
+methFlag_KT	Methylobacillus flagellatus KT	chr=2971517
+therAcid1	Thermoplasma acidophilum DSM 1728	chr=1564906
+therPetr_RKU_1	Thermotoga petrophila RKU-1	chr=1823511
+shewFrig	Shewanella frigidimarina NCIMB 400	chr=4845257
+photProf_SS9	Photobacterium profundum SS9	chr2=2237943,chr1=4085304,plasmid_pPBPR1=80033
+burkThai_E264	Burkholderia thailandensis E264	chrII=2914771,chrI=3809201
+17811	Lactobacillus helveticus DPC 4571	NC_010080=2080931
+229	Chlamydia muridarum Nigg	NC_002182=7501,NC_002620=1072950
+228	Chlamydophila caviae GPIC	NC_004720=7966,NC_003361=1173390
+nitrMult_ATCC25196	Nitrosospira multiformis ATCC 25196	plasmid_2=17036,plasmid_3=14159,plasmid_1=18871,chr1=3184243
+223	Mycobacterium tuberculosis CDC1551	NC_002755=4403837
+220	Prochlorococcus marinus str. MIT 9313	NC_005071=2410873
+20241	Shewanella halifaxensis HAW-EB4	NC_010334=5226917
+20243	Brucella canis ATCC 23365	NC_010104=1206800,NC_010103=2105969
+88	Mycobacterium avium 104	NC_008595=5475491
+19227	Renibacterium salmoninarum ATCC 33209	NC_010168=3155250
+neisMeni_FAM18_1	Neisseria meningitidis FAM18	chr=2194961
+arthFB24	Arthrobacter sp. FB24	plasmid_2=115507,plasmid_3=96488,plasmid_1=159538,chr1=4698945
+10679	Rickettsia typhi str. Wilmington	NC_006142=1111496
+17457	Pseudomonas mendocina ymp	NC_009439=5072807
+13959	Escherichia coli HS	NC_009800=4643538
+17455	Shewanella woodyi ATCC 51908	NC_010506=5935403
+17109	Salinispora arenicola CNS-205	NC_009953=5786361
+17459	Serratia proteamaculans 568	NC_009832=5448853,NC_009829=46804
+13953	Burkholderia pseudomallei 668	NC_009075=3127456,NC_009074=3912947
+10676	Mycoplasma synoviae 53	NC_007294=799476
+10675	Mycoplasma hyopneumoniae J	NC_007295=897405
+16820	Clostridium novyi NT	NC_008593=2547720
+nocaFarc_IFM10152	Nocardia farcinica IFM 10152	chr=6021225,plasmid_pNF1=184026,plasmid_pNF2=87093
+13378	Bacteroides vulgatus ATCC 8482	NC_009614=5163189
+16351	Escherichia coli W3110	AC_000091=4646332
+porpGing_W83	Porphyromonas gingivalis W83	chr=2343476
+17415	Shewanella pealeana ATCC 700345	NC_009901=5174581
+eschColi_APEC_O1	Escherichia coli APEC O1	chr=5082025
+46	Bacteroides fragilis NCTC 9343	NC_003228=5205140,NC_006873=36560
+13462	Roseiflexus castenholzii DSM 13941	NC_009767=5723298
+44	Bartonella quintana str. Toulouse	NC_005955=1581384
+42	Rickettsia conorii str. Malish 7	NC_003103=1268755
+43	Rickettsia prowazekii str. Madrid E	NC_000963=1111523
+bactThet_VPI_5482	Bacteroides thetaiotaomicron VPI-5482	chr=6260361,plasmid_p5482=33038
+17413	Delftia acidovorans SPH-1	NC_010002=6767514
+haemInfl_KW20	Haemophilus influenzae Rd KW20	chr=1830138
+brucMeli	Brucella melitensis 16M	chrII=1177787,chrI=2117144
+haloHalo_SL1	Halorhodospira halophila SL1	chr=2678452
+pyrAby1	Pyrococcus abyssi GE5	chr=1765118,plasmid_pGT5=3444
+mculMari1	Methanoculleus marisnigri JR1	chr=2478101
+12554	Borrelia garinii PBi	NC_006128=27108,NC_006129=55560,NC_006156=904246
+dechArom_RCB	Dechloromonas aromatica RCB	chr=4501104
+stapAure_MU50	Staphylococcus aureus subsp. aureus Mu50	chr=2878529,plasmid_VRSAp=25107
+eschColi_K12	Escherichia coli K12	chr=4639675
+12416	Pseudomonas syringae pv. phaseolicola 1448A	NC_005773=5928787,NC_007275=51711,NC_007274=131950
+aeroHydr_ATCC7966	Aeromonas hydrophila subsp. hydrophila ATCC 7966	chr=4744448
+baciAnth_AMES	Bacillus anthracis str. Ames	chr=5227293
+shewOnei	Shewanella oneidensis MR-1	plasmid_pMR-1=161613,chr=4969803
+15217	Human herpesvirus 1	NC_001806=152261
+lMaj5	Leishmania major 2005	chr1=268984,chr2=355714,chr3=384518,chr4=441313,chr5=465823,chr6=516874,chr7=596348,chr8=574972,chr9=573441,chr10=570864,chr11=582575,chr12=675347,chr13=654604,chr14=622648,chr15=629514,chr16=714659,chr17=684831,chr18=739751,chr19=702212,chr20=742551,chr21=772974,chr22=716608,chr23=772567,chr24=840950,chr25=912849,chr26=1091579,chr27=1130447,chr28=1160128,chr29=1212674,chr30=1403454,chr31=1484336,chr32=1604650,chr33=1583673,chr34=1866754,chr35=2090491,chr36=2682183
+arabidopsis	Arabidopsis thaliana TAIR9	chr1=30427671,chr2=19698289,chr3=23459830,chr4=18585056,chr5=26975502
+arabidopsis_tair8	Arabidopsis thaliana TAIR8
+araTha1	Arabidopsis thaliana TAIR7
+mm5	Mouse May 2004
+Sscrofa9.58	Pig May 2010	chr1=79819395,chr2=66741929,chr3=145240301,chr4=57436344,chr5=134546103,chr6=136414062,chr7=64400339,chr8=77440658,chr9=54314914,chr10=295534705,chr11=16613,chr12=123604780,chr13=140138492,chr14=100521970,chr15=136259946,chr16=125876292,chr17=123310171,chr18=132473591,chrM=119990671,chrX=148515138
+AaegL1	Mosquito (Aedes aegypti)
+AgamP3	Mosquito (Anopheles gambiae)	chr3L=41963435,chr2L=49364325,chrX=24393108,chr2R=61545105,chrY_unplaced=237045,chr3R=53200684,chrUn=42389979,chrM=15363
+CpipJ1	Mosquito (Culex quinquefasciatus)
+IscaW1	Deer Tick (Ixodes scapularis)
+PhumU1	Head Louse (Pediculus humanus)
+bosTauMd3	Cow Aug. 2009 (UMD3.1)
+droWil1	D. willistoni Feb. 2006
+droAna3	D. ananassae Feb. 2006
+droEre2	D. erecta Feb. 2006
+droGri2	D. grimshawi Feb. 2006
+droMoj3	D. mojavensis Feb. 2006
+droSec1	D. sechellia Oct. 2005
+droSim1	D. simulans Apr. 2005	chrU=15797150,chr4=949497,chrYh_random=100575,chr3R=27517382,chr3R_random=1307089,chr3L=22553184,chr2L=22036055,chr2h_random=3178526,chrX_random=5698898,chrXh_random=84659,chrX=17042790,chr4_random=134295,chr2R=19596830,chr2R_random=2996586,chr3L_random=1049610,chr2L_random=909653,chrM=14972,chr3h_random=1452968
+droVir3	D. virilis Feb. 2006
+Bombyx_mori_p50T_2.0	Silkworm
+oryza_sativa_japonica_nipponbare_IRGSP4.0	Rice (Oryza sativa ssp. japonica var. Nipponbare)	chr1=30357780,chr10=32124789,chr11=30039014,chr12=35863200,chr2=37257345,chr3=36823111,chr4=45064769,chr5=134525,chr6=23843360,chr7=28530027,chr8=27757321,chr9=30828668,chrM=23661561,plastid=490520
+Schizosaccharomyces_pombe_1.1	Fission Yeast (S. pombe)	chrII=4539804,chrIII=2452883,chrI=5579133,chrM=19431
+Xanthomonas_oryzae_PXO99A	Xanthomonas oryzae pv. oryzae PXO99A	chr=5240075
+eschColi_EC4115	Escherichia coli EC4115	chr=5572075,plasmid_pO157=94644,plasmid_pEC4115=37452
+eschColi_TW14359	Escherichia coli TW14359	chr=5528136,plasmid_pO157=94601
+pUC18	pUC18 Plasmid	plasmid=2686
+nomLeu1	Gibbon	chr1=245538060,chr10=135320619,chr10_random=51011,chr11=134551115,chr12=132397767,chr12_random=336702,chr13=114140689,chr14=106383727,chr15=100373758,chr15_random=814500,chr16=88836341,chr16_random=121782,chr17=78641643,chr17_random=2617017,chr18=76124575,chr19=63791437,chr19_random=297341,chr1_random=3502083,chr2=242756806,chr20=62432189,chr21=47019833,chr22=49520868,chr22_random=31530,chr2_random=335351,chr3=199337376,chr3_random=950829,chr4=191633586,chr4_random=923474, [...]
+Saccharomyces_cerevisiae_S288C_SGD2010	S. cerevisae str. S288C	chr1=230218,chr2=813184,chr3=316620,chr4=1531933,chr5=576874,chr6=270161,chr7=1090940,chr8=562643,chr9=439888,chr10=745751,chr11=666816,chr12=1078177,chr13=924431,chr14=784333,chr15=1091291,chr16=948066,chrM=85779
+Spur_v2.6	Purple Sea Urchin (Strongylocentrotus purpuratus) v2.6
+Ptrichocarpa_156	Poplar (Populus trichocarpa)
+Hydra_JCVI	Hydra magnipapillata str. 105
+Araly1	Arabidopsis lyrata
+Zea_mays_B73_RefGen_v2	Maize (Zea mays)	chr1=301354135,chr2=237068928,chr3=232140222,chr4=241473566,chr5=217872898,chr6=169174371,chr7=176764813,chr8=175793772,chr9=156750718,chr10=150189513,chr11=7140224
+Homo_sapiens_AK1	Korean Man	chrM=16571,chr1=247249719,chr2=242951149,chr3=199501827,chr4=191273063,chr5=180857866,chr6=170899992,chr7=158821424,chr8=146274826,chr9=140273252,chr10=135374737,chr11=134452384,chr12=132349534,chr13=114142980,chr14=106368585,chr15=100338915,chr16=88827254,chr17=78774742,chr18=76117153,chr19=63811651,chr20=62435964,chr21=46944323,chr22=49691432,chrX=154913754,chrY=57772954
+Tcas_3.0	Red Flour Beetle (Tribolium castaneum)	chrLG1=X=10877635,chrLG2=20218415,chrLG3=38791480,chrLG4=13894384,chrLG5=19135781,chrLG6=13176827,chrLG7=20532854,chrLG8=18021898,chrLG9=21459655,chrLG10=11386040
+hg_g1k_v37	Homo sapiens b37	1=249250621,2=243199373,3=198022430,4=191154276,5=180915260,6=171115067,7=159138663,8=146364022,9=141213431,10=135534747,11=135006516,12=133851895,13=115169878,14=107349540,15=102531392,16=90354753,17=81195210,18=78077248,19=59128983,20=63025520,21=48129895,22=51304566,X=155270560,Y=59373566,MT=16569,GL000207.1=4262,GL000226.1=15008,GL000229.1=19913,GL000231.1=27386,GL000210.1=27682,GL000239.1=33824,GL000235.1=34474,GL000201.1=36148,GL000247.1=36422,GL000245.1 [...]
+Homo_sapiens_nuHg19_mtrCRS	Homo sapiens (hg19 with mtDNA replaced with rCRS)	chr1=249250621,chr2=243199373,chr3=198022430,chr4=191154276,chr5=180915260,chr6=171115067,chr7=159138663,chr8=146364022,chr9=141213431,chr10=135534747,chr11=135006516,chr12=133851895,chr13=115169878,chr14=107349540,chr15=102531392,chr16=90354753,chr17=81195210,chr18=78077248,chr19=59128983,chr20=63025520,chr21=48129895,chr22=51304566,chrX=155270560,chrY=59373566,chrM=16569,chr1_gl000191_random=106433,chr1_gl0001 [...]
+Arabidopsis_thaliana_TAIR10	Arabidopsis thaliana TAIR10 (Arabidopsis_thaliana_TAIR10)	chr1=30427671,chr2=19698289,chr3=23459830,chr4=18585056,chr5=26975502,chrM=366924,chrC=154478
+dp4	D. pseudoobscura (dp4)
+Tcacao_1.0	Theobroma cacao Dec 2010 (CIRAD/Theobroma cacao Belizian Criollo)	Tc01=31268538,Tc02=27754001,Tc03=25475297,Tc04=23504306,Tc05=25651337,Tc06=15484475,Tc07=14169093,Tc08=11535834,Tc09=28459094,Tc10=15164258,Tc00=108886888
+apiMel3	Honeybee (Apis mellifera): apiMel3	Group10=11440700,Group11=12576330,Group12=9182753,Group13=8929068,Group14=8318479,Group15=7856270,Group16=5631066,Group1=25854376,Group2=14465785,Group3=12341916,Group4=10796202,Group5=13386189,Group6=14581788,Group7=9974240,Group8=11452794,Group9=10282195,GroupUn=399230636
\ No newline at end of file
diff --git a/tool-data/shared/ucsc/ucsc_build_sites.txt.sample b/tool-data/shared/ucsc/ucsc_build_sites.txt.sample
new file mode 100644
index 0000000..1dde52f
--- /dev/null
+++ b/tool-data/shared/ucsc/ucsc_build_sites.txt.sample
@@ -0,0 +1,7 @@
+#Harvested from http://genome.ucsc.edu/cgi-bin/das/dsn
+main	http://genome.ucsc.edu/cgi-bin/hgTracks?	priPac1,danRer4,mm9,mm8,droAna1,mm5,caeRem2,mm7,mm6,panTro1,dm3,panTro2,anoCar1,ce4,galGal3,galGal2,ce1,rn3,rn2,droMoj1,droMoj2,rn4,droYak1,droYak2,dp3,dp2,dm1,canFam1,danRer5,canFam2,danRer3,danRer2,ornAna1,ci2,ci1,tetNig1,bosTau1,bosTau3,bosTau2,equCab1,oryLat1,droAna2,droEre1,ponAbe2,rheMac2,sacCer1,droPer1,droSim1,monDom1,cb1,dm2,droSec1,strPur1,droVir2,droVir1,strPur2,sc1,xenTro1,droGri1,xenTro2,cb3,gasAcu1,caePb1,anoGam1,fr2,fr1,hg15,hg [...]
+#Harvested from http://archaea.ucsc.edu/cgi-bin/das/dsn
+archaea	http://archaea.ucsc.edu/cgi-bin/hgTracks?	alkaEhrl_MLHE_1,shewW318,idioLoih_L2TR,sulSol1,erwiCaro_ATROSEPTICA,symbTher_IAM14863,moorTher_ATCC39073,therFusc_YX,methHung1,bradJapo,therElon,shewPutrCN32,pediPent_ATCC25745,mariMari_MCS10,nanEqu1,baciSubt,chlaTrac,magnMagn_AMB_1,chroViol,ralsSola,acidCryp_JF_5,erytLito_HTCC2594,desuVulg_HILDENBOROUG,pyrAer1,sulfToko1,shewANA3,paraSp_UWE25,geobKaus_HTA426,rhizEtli_CFN_42,uncuMeth_RCI,candBloc_FLORIDANUS,deinRadi,yersPest_CO92,saccEryt_ [...]
+#Harvested from http://genome-test.cse.ucsc.edu/cgi-bin/das/dsn
+test	http://genome-test.cse.ucsc.edu/cgi-bin/hgTracks?	anoCar1,ce4,ce3,ce2,ce1,loxAfr1,rn2,eschColi_O157H7_1,rn4,droYak1,heliPylo_J99_1,droYak2,dp3,dp2,caeRem2,caeRem1,oryLat1,eschColi_K12_1,homIni13,homIni14,droAna1,droAna2,oryCun1,sacCer1,heliHepa1,droGri1,sc1,dasNov1,choHof1,tupBel1,mm9,mm8,vibrChol1,mm5,mm4,mm7,mm6,mm3,mm2,rn3,venter1,galGal3,galGal2,ornAna1,equCab1,cioSav2,rheMac2,eutHer13,droPer1,droVir2,droVir1,heliPylo_26695_1,euaGli13,calJac1,campJeju1,droSim1,hg13,hg15,hg16,hg1 [...]
+ucla	http://epigenomics.mcdb.ucla.edu/cgi-bin/hgTracks?	araTha1
diff --git a/tool-data/sift_db.loc.sample b/tool-data/sift_db.loc.sample
new file mode 100644
index 0000000..bc289c4
--- /dev/null
+++ b/tool-data/sift_db.loc.sample
@@ -0,0 +1,22 @@
+#This is a sample file distributed with Galaxy that enables tools to use
+#a directory of sqlite files for use with SIFT.  You will need to supply
+#these files and then create a sift_db.loc file similar to this one (store
+#it in this directory) that points to the directories in which those files
+#are stored.  The sift_db.loc file has this format (white space characters
+#are TAB characters):
+#
+#<build>	<file_path>
+#
+#So, for example, if your sift_db.loc began like this:
+#
+#hg18	/galaxy/data/hg18/misc/sift/Human_db_36
+#
+#then your /galaxy/data/hg18/misc/sift/Human_db_36 directory would need to
+#contain the following sqlite files, among others:
+#
+#-rw-r--r-- 1 g2test g2data 3010870272 Sep  2 14:09 Human_CHR10.sqlite
+#-rw-r--r-- 1 g2test g2data 2926365696 Sep  2 15:09 Human_CHR11.sqlite
+#-rw-r--r-- 1 g2test g2data 2557210624 Sep  2 14:19 Human_CHR12.sqlite
+#
+#hg18	/galaxy/data/hg18/misc/sift/Human_db_36
+#hg19	/galaxy/data/hg19/misc/sift/Human_db_37
diff --git a/tool-data/srma_index.loc.sample b/tool-data/srma_index.loc.sample
new file mode 100644
index 0000000..ef13833
--- /dev/null
+++ b/tool-data/srma_index.loc.sample
@@ -0,0 +1,29 @@
+#You should be using picard_index.loc instead of srma_index.loc now.
+#
+#
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of SRMA indexed sequences data files. You will need
+#to create these data files and then create a srma_index.loc file 
+#similar to this one (store it in this directory) that points to 
+#the directories in which those files are stored. The srma_index.loc 
+#file has this format (longer white space is the TAB character):
+#
+#<unique_build_id>	<dbkey>	<display_name>	<fasta_file_path>
+#
+#So, for example, if you had hg18 indexed and stored in 
+#/depot/data2/galaxy/srma/hg18/, 
+#then the srma_index.loc entry would look like this:
+#
+#hg18	hg18	hg18 Pretty	/depot/data2/galaxy/srma/hg18/hg18.fa
+#
+#and your /depot/data2/galaxy/srma/hg18/ directory
+#would contain the following three files:
+#hg18.fa
+#hg18.dict
+#hg18.fa.fai
+#
+#The dictionary file for each reference (ex. hg18.dict) must be 
+#created via Picard (http://picard.sourceforge.net). Note that
+#the dict file does not have the .fa extension although the
+#path list in the loc file does include it.
+#
diff --git a/tool-data/twobit.loc.sample b/tool-data/twobit.loc.sample
new file mode 100644
index 0000000..fee7c23
--- /dev/null
+++ b/tool-data/twobit.loc.sample
@@ -0,0 +1,26 @@
+#This is a sample file distributed with Galaxy that is used by some
+#tools.  The twobit.loc file has this format (white space characters 
+#are TAB characters):
+#
+#<Build>	<FullPathToFile>
+#
+#So, for example, if you had droPer1 twobit files stored in 
+#/depot/data2/galaxy/droPer1/, then the twobit.loc entry 
+#would look like this:
+#
+#droPer1	/depot/data2/galaxy/droPer1/droPer1.2bit
+#
+#and your /depot/data2/galaxy/droPer1/ directory would 
+#contain all of your twobit files (e.g.):
+#
+#-rw-rw-r--   1 nate   galaxy 48972650 2007-05-04 11:27 droPer1.2bit
+#...etc...
+#
+#Your twobit.loc file should include an entry per line for each twobit 
+#file you have stored.  For example:
+#
+#droPer1	/depot/data2/galaxy/droPer1/droPer1.2bit
+#apiMel2	/depot/data2/galaxy/apiMel2/apiMel2.2bit
+#droAna1	/depot/data2/galaxy/droAna1/droAna1.2bit
+#droAna2	/depot/data2/galaxy/droAna2/droAna2.2bit
+#...etc...
diff --git a/tool-data/vcf_iobio.loc.sample b/tool-data/vcf_iobio.loc.sample
new file mode 100644
index 0000000..72d2173
--- /dev/null
+++ b/tool-data/vcf_iobio.loc.sample
@@ -0,0 +1,3 @@
+# Table used for listing vcf.iobio servers
+#<unique_id>	<display_name>	<url>
+vcf_iobio	vcf.iobio.io	http://vcf.iobio.io/
diff --git a/tool_list.py b/tool_list.py
new file mode 100644
index 0000000..49cf0d4
--- /dev/null
+++ b/tool_list.py
@@ -0,0 +1,83 @@
+from __future__ import print_function
+
+import os
+import sys
+
+# read tool_conf.xml to get all the tool xml file names
+onoff = 1
+tool_list = []
+tool_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', None )
+
+if tool_conf_file is None:
+    for possible_tool_file in [ 'config/tool_conf.xml', 'tool_conf.xml', 'config/tool_conf.xml.sample' ]:
+        tool_conf_file = possible_tool_file
+        if os.path.isfile( possible_tool_file ):
+            break
+
+if tool_conf_file is None or not os.path.isfile(tool_conf_file):
+    sys.stderr.write( "Tool config file not found: {}\n".format(tool_conf_file) )
+    sys.exit(1)
+
+for line in open(tool_conf_file, "r"):
+    if line.find("<!--") != -1:
+        onoff = 0
+    if line.find("file") != -1 and onoff == 1:
+        strs = line.split('\"')
+        tool_list.append(strs[1])
+    if line.find("<section") != -1 and onoff == 1:
+        keys = line.strip().split('\"')
+        n = 0
+        strtmp = "section::"
+        while n < len(keys):
+            if keys[n].find("id") != -1:
+                strtmp = strtmp + keys[n + 1]
+            if keys[n].find("name") != -1:
+                strtmp = strtmp + keys[n + 1] + "-"
+            n = n + 1
+        tool_list.append(strtmp.replace(' ', '_'))
+    if line.find("-->") != -1:
+        onoff = 1
+
+# read tool info from every tool xml file
+name = []
+id = []
+desc = []
+tool_infos = []
+for tool in tool_list:
+    if tool.find("section") != -1:
+        tool_info = dict()
+        tool_info["id"] = tool
+        tool_infos.append(tool_info)
+    if os.path.exists("tools/" + tool):
+        for line in open("tools/" + tool):
+            if line.find("<tool ") != -1 and line.find("id") != -1:
+                keys = line.strip().split('\"')
+                tool_info = dict()
+                tool_info["desc"] = ''
+                for n in range(len(keys) - 1):
+                    if " id=" in keys[n]:
+                        tool_info["id"] = keys[n + 1].replace(' ', '_')
+                    if " name=" in keys[n]:
+                        tool_info["name"] = keys[n + 1]
+                    if " description=" in keys[n]:
+                        tool_info["desc"] = keys[n + 1]
+                tool_infos.append(tool_info)
+                break
+
+flag = 0
+if len(sys.argv) == 1:
+    for tool_info in tool_infos:
+        if tool_info["id"].find("section") != -1:
+            print("===========================================================================================================================================")
+            print("%-45s\t%-40s\t%s" % ("id", "name", tool_info["id"]))
+            print("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -")
+        else:
+            print("%-45s\t%-40s" % (tool_info["id"], tool_info["name"]))
+else:
+    for tool_info in tool_infos:
+        if tool_info["id"].find("section") != -1:
+            flag = 0
+        elif flag == 1:
+            print(" functional.test_toolbox:TestForTool_%s" % tool_info["id"], end=' ')
+        if tool_info["id"].replace('section::', '') == sys.argv[1]:
+            flag = 1
diff --git a/tools/data_source/access_libraries.xml b/tools/data_source/access_libraries.xml
new file mode 100644
index 0000000..fe59155
--- /dev/null
+++ b/tools/data_source/access_libraries.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0"?>
+<tool name="Access Libraries" id="library_access1" version="1.0.0">
+    <description>stored locally</description>
+    <inputs action="/library/index" method="get" target="_parent">
+        <param name="default_action" type="hidden" value="import_to_histories" />
+    </inputs>
+    <uihints minwidth="800"/>
+</tool>
diff --git a/tools/data_source/bed_convert.xml b/tools/data_source/bed_convert.xml
new file mode 100644
index 0000000..eb41406
--- /dev/null
+++ b/tools/data_source/bed_convert.xml
@@ -0,0 +1,14 @@
+<tool id="BED File Converter1" name="BED File Converter" version="1.0.0">
+  <description>creates a bed or xbed file containing from text query</description>
+  <command>noop</command>
+  <inputs>
+    <display>creates a bed or xbed file containing user assigned input of $input</display>
+    <param format="tabular" name="input" type="data" />
+    <param name="chrom" size="4" type="text" value="all" />
+  </inputs>
+  <outputs>
+    <data format="bed" name="out_file1" />
+  </outputs>
+  <help>User specifies delimiter, header information, and column assignments and the file will be converted to BED or xBED.
+</help>
+</tool>
\ No newline at end of file
diff --git a/tools/data_source/biomart.xml b/tools/data_source/biomart.xml
new file mode 100644
index 0000000..bf4c4c1
--- /dev/null
+++ b/tools/data_source/biomart.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+
+    TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile
+    everything including and beyond the first '&' is truncated from URL.  They said they'll let us know when this is fixed at their end.
+-->
+<tool name="BioMart" id="biomart" tool_type="data_source" version="1.0.1">
+	<description>Ensembl server</description>
+	<command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+	<inputs action="http://www.ensembl.org/biomart/martview" check_values="false" method="get" target="_top">
+		<display>go to BioMart Ensembl $GALAXY_URL</display>
+		<param name="GALAXY_URL" type="baseurl" value="/tool_runner/biomart" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="_export" missing="1" />
+                <value name="GALAXY_URL" missing="0" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="data_type" remote_name="exportView_outputformat" missing="tabular" >
+            <value_translation>
+                <value galaxy_value="tabular" remote_value="TSV" />
+            </value_translation>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="Biomart query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+    </request_param_translation>
+	<uihints minwidth="800"/>
+	<outputs>
+		<data name="output" format="tabular" />
+	</outputs>
+	<options sanitize="False" refresh="True"/>
+    <citations>
+        <citation type="doi">10.1093/database/bar011</citation>
+        <citation type="doi">10.1093/nar/gkv350</citation>
+    </citations>
+</tool>
diff --git a/tools/data_source/biomart_test.xml b/tools/data_source/biomart_test.xml
new file mode 100644
index 0000000..0bc4541
--- /dev/null
+++ b/tools/data_source/biomart_test.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+
+    TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile
+    everything including and beyond the first '&' is truncated from URL.  They said they'll let us know when this is fixed at their end.
+-->
+<tool name="BioMart" id="biomart_test" tool_type="data_source" version="1.0.1">
+	<description>Test server</description>
+	<command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+	<inputs action="http://test.biomart.org/biomart/martview" check_values="false" method="get" target="_top">
+		<display>go to BioMart Central $GALAXY_URL</display>
+		<param name="GALAXY_URL" type="baseurl" value="/tool_runner/biomart" />
+	</inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="_export" missing="1" />
+                <value name="GALAXY_URL" missing="0" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="data_type" remote_name="exportView_outputformat" missing="tabular" >
+            <value_translation>
+                <value galaxy_value="tabular" remote_value="TSV" />
+            </value_translation>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="Biomart test query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+    </request_param_translation>
+	<uihints minwidth="800"/>		
+	<outputs>
+		<data name="output" format="tabular" />
+	</outputs>
+	<options sanitize="False" refresh="True"/>
+    <citations>
+        <citation type="doi">10.1093/database/bar011</citation>
+        <citation type="doi">10.1093/nar/gkv350</citation>
+    </citations>
+</tool>
diff --git a/tools/data_source/cbi_rice_mart.xml b/tools/data_source/cbi_rice_mart.xml
new file mode 100644
index 0000000..c867cab
--- /dev/null
+++ b/tools/data_source/cbi_rice_mart.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="CBI Rice Mart" id="cbi_rice_mart" tool_type="data_source" version="1.0.1">
+    <description>rice mart</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://ricemart.cbi.edu.cn/biomart/martview/" check_values="false" method="get" target="_top">
+        <display>go to RMap rice mart $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner/biomart" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="_export" missing="1" />
+                <value name="GALAXY_URL" missing="0" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="data_type" remote_name="exportView_outputformat" missing="tabular" >
+            <value_translation>
+                <value galaxy_value="tabular" remote_value="TSV" />
+            </value_translation>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="Rice mart query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="tabular" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/data_source.py b/tools/data_source/data_source.py
new file mode 100644
index 0000000..53121f1
--- /dev/null
+++ b/tools/data_source/data_source.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# Retrieves data from external data source applications and stores in a dataset file.
+# Data source application parameters are temporarily stored in the dataset file.
+import os
+import socket
+import sys
+from json import dumps, loads
+
+from six.moves.urllib.parse import urlencode
+from six.moves.urllib.request import urlopen
+
+from galaxy.datatypes import sniff
+from galaxy.datatypes.registry import Registry
+from galaxy.jobs import TOOL_PROVIDED_JOB_METADATA_FILE
+from galaxy.util import get_charset_from_http_headers
+
+GALAXY_PARAM_PREFIX = 'GALAXY'
+GALAXY_ROOT_DIR = os.path.realpath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) )
+GALAXY_DATATYPES_CONF_FILE = os.path.join( GALAXY_ROOT_DIR, 'datatypes_conf.xml' )
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def load_input_parameters( filename, erase_file=True ):
+    datasource_params = {}
+    try:
+        json_params = loads( open( filename, 'r' ).read() )
+        datasource_params = json_params.get( 'param_dict' )
+    except:
+        json_params = None
+        for line in open( filename, 'r' ):
+            try:
+                line = line.strip()
+                fields = line.split( '\t' )
+                datasource_params[ fields[0] ] = fields[1]
+            except:
+                continue
+    if erase_file:
+        open( filename, 'w' ).close()  # open file for writing, then close, removes params from file
+    return json_params, datasource_params
+
+
+def __main__():
+    filename = sys.argv[1]
+    try:
+        max_file_size = int( sys.argv[2] )
+    except:
+        max_file_size = 0
+
+    job_params, params = load_input_parameters( filename )
+    if job_params is None:  # using an older tabular file
+        enhanced_handling = False
+        job_params = dict( param_dict=params )
+        job_params[ 'output_data' ] = [ dict( out_data_name='output',
+                                              ext='data',
+                                              file_name=filename,
+                                              extra_files_path=None ) ]
+        job_params[ 'job_config' ] = dict( GALAXY_ROOT_DIR=GALAXY_ROOT_DIR, GALAXY_DATATYPES_CONF_FILE=GALAXY_DATATYPES_CONF_FILE, TOOL_PROVIDED_JOB_METADATA_FILE=TOOL_PROVIDED_JOB_METADATA_FILE )
+    else:
+        enhanced_handling = True
+        json_file = open( job_params[ 'job_config' ][ 'TOOL_PROVIDED_JOB_METADATA_FILE' ], 'w' )  # specially named file for output junk to pass onto set metadata
+
+    datatypes_registry = Registry()
+    datatypes_registry.load_datatypes( root_dir=job_params[ 'job_config' ][ 'GALAXY_ROOT_DIR' ], config=job_params[ 'job_config' ][ 'GALAXY_DATATYPES_CONF_FILE' ] )
+
+    URL = params.get( 'URL', None )  # using exactly URL indicates that only one dataset is being downloaded
+    URL_method = params.get( 'URL_method', None )
+
+    # The Python support for fetching resources from the web is layered. urllib uses the httplib
+    # library, which in turn uses the socket library.  As of Python 2.3 you can specify how long
+    # a socket should wait for a response before timing out. By default the socket module has no
+    # timeout and can hang. Currently, the socket timeout is not exposed at the httplib or urllib2
+    # levels. However, you can set the default timeout ( in seconds ) globally for all sockets by
+    # doing the following.
+    socket.setdefaulttimeout( 600 )
+
+    for data_dict in job_params[ 'output_data' ]:
+        cur_filename = data_dict.get( 'file_name', filename )
+        cur_URL = params.get( '%s|%s|URL' % ( GALAXY_PARAM_PREFIX, data_dict[ 'out_data_name' ] ), URL )
+        if not cur_URL:
+            open( cur_filename, 'w' ).write( "" )
+            stop_err( 'The remote data source application has not sent back a URL parameter in the request.' )
+
+        # The following calls to urlopen() will use the above default timeout
+        try:
+            if not URL_method or URL_method == 'get':
+                page = urlopen( cur_URL )
+            elif URL_method == 'post':
+                page = urlopen( cur_URL, urlencode( params ) )
+        except Exception as e:
+            stop_err( 'The remote data source application may be off line, please try again later. Error: %s' % str( e ) )
+        if max_file_size:
+            file_size = int( page.info().get( 'Content-Length', 0 ) )
+            if file_size > max_file_size:
+                stop_err( 'The size of the data (%d bytes) you have requested exceeds the maximum allowed (%d bytes) on this server.' % ( file_size, max_file_size ) )
+        # do sniff stream for multi_byte
+        try:
+            cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, source_encoding=get_charset_from_http_headers( page.headers ) )
+        except Exception as e:
+            stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) )
+
+        # here import checks that upload tool performs
+        if enhanced_handling:
+            try:
+                ext = sniff.handle_uploaded_dataset_file( filename, datatypes_registry, ext=data_dict[ 'ext' ], is_multi_byte=is_multi_byte )
+            except Exception as e:
+                stop_err( str( e ) )
+            info = dict( type='dataset',
+                         dataset_id=data_dict[ 'dataset_id' ],
+                         ext=ext)
+
+            json_file.write( "%s\n" % dumps( info ) )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/data_source/ebi_sra.xml b/tools/data_source/ebi_sra.xml
new file mode 100644
index 0000000..8336c87
--- /dev/null
+++ b/tools/data_source/ebi_sra.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.1">
+    <description>ENA SRA</description>
+    <!-- This paython script imports the file into Galaxy -->
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <!-- The URL where Galaxy will forwards the user when this tool is accessed from the Get Data menu -->
+    <inputs action="https://www.ebi.ac.uk/ena/data/search" check_values="false" method="get">
+        <display>go to EBI SRA server $GALAXY_URL</display>
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="auto"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/eupathdb.xml b/tools/data_source/eupathdb.xml
new file mode 100644
index 0000000..c3e958b
--- /dev/null
+++ b/tools/data_source/eupathdb.xml
@@ -0,0 +1,13 @@
+<tool name="EuPathDB" id="eupathdb" tool_type="data_source" url_method="post" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://eupathdb.org/eupathdb/queries_tools.jsp" check_values="false" method="get"> 
+        <display>go to EuPathDB server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=eupathdb" />
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="tabular" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/fetch.py b/tools/data_source/fetch.py
new file mode 100644
index 0000000..873cc2f
--- /dev/null
+++ b/tools/data_source/fetch.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+"""
+Script that just echos the command line.
+"""
+from __future__ import print_function
+
+import sys
+from six.moves.urllib.request import urlopen
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+BUFFER = 1048576
+
+url = sys.argv[1]
+out_name = sys.argv[2]
+
+out = open(out_name, 'wt')
+try:
+    page = urlopen(url)
+    while 1:
+        data = page.read(BUFFER)
+        if not data:
+            break
+        out.write(data)
+except Exception as e:
+    print('Error getting the data -> %s' % e)
+out.close()
diff --git a/tools/data_source/fly_modencode.xml b/tools/data_source/fly_modencode.xml
new file mode 100644
index 0000000..3d3f5b8
--- /dev/null
+++ b/tools/data_source/fly_modencode.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<tool name="modENCODE fly" id="modENCODEfly" tool_type="data_source" version="1.0.1">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://gbrowse.modencode.org/fgb2/gbrowse/fly" check_values="false" target="_top"> 
+        <display>go to modENCODE fly server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=modENCODEfly" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="dm3" >
+            <value_translation>
+                <value galaxy_value="dm3" remote_value="fly" />
+            </value_translation>
+        </request_param>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="d" missing="" />
+                <value name="dbkey" missing="dm3" />
+                <value name="q" missing="" />
+                <value name="s" missing="" />
+                <value name="t" missing="" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" label="${tool.name} on $getVar( 'q', 'unknown position' )"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/flymine.xml b/tools/data_source/flymine.xml
new file mode 100644
index 0000000..373c8ea
--- /dev/null
+++ b/tools/data_source/flymine.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="Flymine" id="flymine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://www.flymine.org" check_values="false" method="get"> 
+        <display>go to Flymine server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="txt" /> <!-- intermine currently always provides 'txt', make this auto detect -->
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
+
diff --git a/tools/data_source/flymine_test.xml b/tools/data_source/flymine_test.xml
new file mode 100644
index 0000000..72b64f5
--- /dev/null
+++ b/tools/data_source/flymine_test.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="Flymine test" id="flymine_test" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://preview.flymine.org/preview/begin.do" check_values="false" method="get"> 
+        <display>go to Flymine server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=flymine" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="FlyMine query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="txt" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
+
diff --git a/tools/data_source/genbank.py b/tools/data_source/genbank.py
new file mode 100644
index 0000000..91f59d3
--- /dev/null
+++ b/tools/data_source/genbank.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import sys
+import textwrap
+
+from Bio import GenBank
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def make_fasta(rec):
+    '''Creates fasta format from a record'''
+    gi = rec.annotations.get('gi', '')
+    org = rec.annotations.get('organism', '')
+    date = rec.annotations.get('date', '')
+    head = '>gi:%s, id:%s, org:%s, date:%s\n' % (gi, rec.id, org, date)
+    body = '\n'.join(textwrap.wrap(rec.seq.data, width=80))
+    return head, body
+
+
+if __name__ == '__main__':
+    mode = sys.argv[1]
+    text = sys.argv[2]
+    output_file = sys.argv[3]
+
+    print('Searching for %s <br>' % text)
+
+    # check if inputs are all numbers
+    try:
+        gi_list = text.split()
+        [int(_) for _ in gi_list]
+    except ValueError:
+        gi_list = GenBank.search_for(text, max_ids=10)
+
+    fp = open(output_file, 'wt')
+    record_parser = GenBank.FeatureParser()
+    ncbi_dict = GenBank.NCBIDictionary(mode, 'genbank', parser=record_parser)
+    for gid in gi_list:
+        res = ncbi_dict[gid]
+        head, body = make_fasta(res)
+        fp.write(head + body + '\n')
+        print(head)
+    fp.close()
diff --git a/tools/data_source/genbank.xml b/tools/data_source/genbank.xml
new file mode 100644
index 0000000..65bd9c7
--- /dev/null
+++ b/tools/data_source/genbank.xml
@@ -0,0 +1,25 @@
+<tool id="genbank" name="Connect to Genbank" version="1.0.0">
+<!--  <description>queries genbank</description> -->
+  <command interpreter="python">genbank.py $mode "$text" $output</command>
+  <inputs>
+    <param name="mode" type="select">
+      <option value="nucleotide">nucleotide database</option>
+      <option value="protein">proteins database</option>
+      <label>Get sequences from the</label>
+    </param>
+    <param name="text" size="40" type="text" value="6273291">
+      <label>with accession ID</label>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="fasta" name="output" />
+  </outputs>
+  <help>
+At the moment this tool allows the following simple searches:
+
+- by GI: **51594135**
+- by accession: **CF622840**
+- using text: **human hbb1** (this feature is experimental)
+  </help>
+
+</tool>
diff --git a/tools/data_source/gramene_mart.xml b/tools/data_source/gramene_mart.xml
new file mode 100644
index 0000000..4c4aaed
--- /dev/null
+++ b/tools/data_source/gramene_mart.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+
+    TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile
+    everything including and beyond the first '&' is truncated from URL.  They said they'll let us know when this is fixed at their end.
+-->
+<tool name="GrameneMart" id="gramenemart" tool_type="data_source" version="1.0.1">
+    <description> Central server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://www.gramene.org/biomart/martview" check_values="false" method="get" target="_top">
+        <display>go to GrameneMart Central $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner/biomart" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="_export" missing="1" />
+                <value name="GALAXY_URL" missing="0" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="data_type" remote_name="exportView_outputformat" missing="tabular">
+            <value_translation>
+                <value galaxy_value="tabular" remote_value="TSV" />
+            </value_translation> 
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="Biomart query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="tabular" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/hapmapmart.xml b/tools/data_source/hapmapmart.xml
new file mode 100644
index 0000000..55c0179
--- /dev/null
+++ b/tools/data_source/hapmapmart.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+    hacked from biomart.xml - testing hapmap biomart - problem is going to be converting these to lped/pbed
+    the data returned will be in all sorts of different shapes - and the sample ids need to be obtained separately
+    to create reliable pedigrees. eesh...
+
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+
+    TODO: Hack to get biomart to work - the 'add_to_URL' param can be eliminated when the Biomart team encodes URL prior to sending, meanwhile
+    everything including and beyond the first '&' is truncated from URL.  They said they'll let us know when this is fixed at their end.
+-->
+<tool name="HapMapMart" id="hapmapmart" tool_type="data_source" version="0.0.01">
+	<description>HapMap Biomart</description>
+	<command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+	<inputs action="http://hapmap.ncbi.nlm.nih.gov/biomart/martview" check_values="false" method="get" target="_top">
+		<display>go to HapMap BioMart $GALAXY_URL</display>
+		<param name="GALAXY_URL" type="baseurl" value="/tool_runner/hapmapmart" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="_export" missing="1" />
+                <value name="GALAXY_URL" missing="0" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="data_type" remote_name="exportView_outputformat" missing="tabular" >
+            <value_translation>
+                <value galaxy_value="tabular" remote_value="TSV" />
+            </value_translation>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="get" />
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="hg18" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="human" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="HapMap query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+    </request_param_translation>
+	<uihints minwidth="800"/>
+	<outputs>
+		<data name="output" format="tabular" />
+	</outputs>
+	<options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/hbvar.xml b/tools/data_source/hbvar.xml
new file mode 100644
index 0000000..e087141
--- /dev/null
+++ b/tools/data_source/hbvar.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<tool name="HbVar" id="hbvar" tool_type="data_source" version="2.0.0">
+	
+	<description>Human Hemoglobin Variants and Thalassemias</description>
+	
+	<command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+	
+	<inputs action="http://globin.bx.psu.edu/cgi-bin/hbvar/query_vars3" check_values="false" method="get" target="_top">
+		<display>go to HbVar database $GALAXY_URL $tool_id</display>
+	</inputs>
+	
+	<uihints minwidth="800"/>
+	
+	<outputs>
+		<data name="output" format="auto" />
+	</outputs>
+	
+	<options sanitize="False" refresh="True"/>
+	
+</tool>
+
diff --git a/tools/data_source/hbvar_filter.py b/tools/data_source/hbvar_filter.py
new file mode 100644
index 0000000..f072de7
--- /dev/null
+++ b/tools/data_source/hbvar_filter.py
@@ -0,0 +1,81 @@
+# TODO: Set dbkey to proper UCSC build, if known
+import shutil
+import tempfile
+
+from six.moves.urllib.request import urlopen
+
+from galaxy import datatypes
+
+
+def exec_before_job( app, inp_data, out_data, param_dict, tool=None):
+    """Sets the name of the data"""
+    data_name = param_dict.get( 'name', 'HbVar query' )
+    data_type = param_dict.get( 'type', 'txt' )
+    if data_type == 'txt':
+        data_type = 'interval'  # All data is TSV, assume interval
+    name, data = next(iter(out_data.items()))
+    data = app.datatypes_registry.change_datatype(data, data_type)
+    data.name = data_name
+    out_data[name] = data
+
+
+def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
+    """Verifies the data after the run"""
+
+    URL = param_dict.get( 'URL', None )
+    URL = URL + '&_export=1&GALAXY_URL=0'
+    if not URL:
+        raise Exception('Datasource has not sent back a URL parameter')
+
+    CHUNK_SIZE = 2**20  # 1Mb
+    MAX_SIZE = CHUNK_SIZE * 100
+
+    try:
+        page = urlopen(URL)
+    except Exception as exc:
+        raise Exception('Problems connecting to %s (%s)' % (URL, exc) )
+
+    data = next(iter(out_data.values()))
+
+    fp = open(data.file_name, 'wb')
+    size = 0
+    while 1:
+        chunk = page.read(CHUNK_SIZE)
+        if not chunk:
+            break
+        if size > MAX_SIZE:
+            raise Exception('----- maximum datasize exceeded ---')
+        size += len(chunk)
+        fp.write(chunk)
+
+    fp.close()
+    # Set meta data, format file to be valid interval type
+    if isinstance(data.datatype, datatypes.interval.Interval):
+        data.set_meta(first_line_is_header=True)
+        # check for missing meta data, if all there, comment first line and process file
+        if not data.missing_meta():
+            line_ctr = -1
+            temp = tempfile.NamedTemporaryFile('w')
+            temp_filename = temp.name
+            temp.close()
+            temp = open(temp_filename, 'w')
+            int(data.metadata.chromCol)
+            int(data.metadata.startCol)
+            int(data.metadata.strandCol)
+
+            for line in open(data.file_name, 'r'):
+                line_ctr += 1
+
+                fields = line.strip().split('\t')
+
+                temp.write("%s\n" % '\t'.join(fields))
+
+            temp.close()
+            shutil.move(temp_filename, data.file_name)
+
+        else:
+            data = app.datatypes_registry.change_datatype(data, 'tabular')
+    data.set_size()
+    data.set_peek()
+    app.model.context.add( data )
+    app.model.context.flush()
diff --git a/tools/data_source/import.py b/tools/data_source/import.py
new file mode 100644
index 0000000..8c723c9
--- /dev/null
+++ b/tools/data_source/import.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+"""
+Script that imports locally stored data as a new dataset for the user
+Usage: import id outputfile
+"""
+from __future__ import print_function
+
+import os
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+BUFFER = 1048576
+
+dataid = sys.argv[1]
+out_name = sys.argv[2]
+
+
+id2name = {
+    'eryth'         : 'ErythPreCRMmm3_cusTrk.txt',
+    'cishg16'       : 'ReglRegHBBhg16CusTrk.txt',
+    'cishg17'       : 'ReglRegHBBhg17CusTrk.txt',
+    'exons'         : 'ExonsKnownGenes_mm3.txt',
+    'krhg16'        : 'known_regulatory_hg16.bed',
+    'krhg17'        : 'known_regulatory_hg17.bed',
+    'tARhg16mmc'    : 'hg16.mouse.t_AR.cold.bed',
+    'tARhg16mmm'    : 'hg16.mouse.t_AR.medium.bed',
+    'tARhg16mmh'    : 'hg16.mouse.t_AR.hot.bed',
+    'tARhg16rnc'    : 'hg16.rat.t_AR.cold.bed',
+    'tARhg16rnm'    : 'hg16.rat.t_AR.medium.bed',
+    'tARhg16rnh'    : 'hg16.rat.t_AR.hot.bed',
+    'phastConsHg16' : 'phastConsMost_hg16.bed',
+    'omimhg16'      : 'omimDisorders_hg16.tab',
+    'omimhg17'      : 'omimDisorders_hg17.tab',
+}
+
+fname = id2name.get(dataid, '')
+if not fname:
+    print('Importing invalid data %s' % dataid)
+    sys.exit()
+else:
+    print('Imported %s' % fname)
+
+# this path is hardcoded
+inp_name = os.path.join('database', 'import', fname)
+
+try:
+    inp = open(inp_name, 'rt')
+except:
+    print('Could not find file %s' % inp_name)
+    sys.exit()
+
+out = open(out_name, 'wt')
+
+while 1:
+    data = inp.read(BUFFER)
+    if not data:
+        break
+    out.write(data)
+
+inp.close()
+out.close()
diff --git a/tools/data_source/import.xml b/tools/data_source/import.xml
new file mode 100644
index 0000000..99d0450
--- /dev/null
+++ b/tools/data_source/import.xml
@@ -0,0 +1,27 @@
+<tool id="Featured datasets4" name="Featured datasets" version="1.0.0">
+  <description>(PSU prepared queries)</description>
+  <command interpreter="python">import.py $data $output</command>
+  <inputs>
+	<display>$data</display>
+	<param name="data" type="select" display="radio">
+      <option value="eryth">Erythroid predicted cis-regulatory modules</option>
+      <option value="exons">Exons of protein-coding genes in the mouse genome, assembly mm3</option>
+      <option value="cishg16 ">Known cis-regulatory modules in the human HBB gene complex (hg16)</option>
+      <option value="cishg17">Known cis-regulatory modules in the human HBB gene complex (hg17)</option>
+      <option value="krhg16">Known regulatory regions (hg16)</option>
+      <option value="krhg17">Known regulatory regions (hg17)</option>
+      <option value="tARhg16mmc">Human (hg16) evolutionary cold region (vs mouse)</option>
+      <option value="tARhg16mmm">Human (hg16) evolutionary medium region (vs mouse)</option>
+      <option value="tARhg16mmh">Human (hg16) evolutionary hot region (vs mouse)</option>
+      <option value="tARhg16rnc">Human (hg16) evolutionary cold region (vs rat)</option>
+      <option value="tARhg16rnm">Human (hg16) evolutionary medium region (vs rat)</option>
+      <option value="tARhg16rnh">Human (hg16) evolutionary hot region (vs rat)</option>
+      <option value="phastConsHg16">phastCons hg16 (stringent, top ~5%) from UCSC</option>
+      <option value="omimhg16">OMIM disorders (hg16)</option>
+      <option value="omimhg17">OMIM disorders (hg17)</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="bed" name="output" />
+  </outputs>
+</tool>
diff --git a/tools/data_source/metabolicmine.xml b/tools/data_source/metabolicmine.xml
new file mode 100644
index 0000000..e5de2e3
--- /dev/null
+++ b/tools/data_source/metabolicmine.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<tool name="metabolicMine" id="metabolicmine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://www.metabolicmine.org/beta/begin.do" check_values="false" method="get"> 
+        <display>go to metabolicMine server $GALAXY_URL</display>
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/microbial_import.py b/tools/data_source/microbial_import.py
new file mode 100644
index 0000000..9dfcb91
--- /dev/null
+++ b/tools/data_source/microbial_import.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+"""
+Script that imports locally stored data as a new dataset for the user
+Usage: import id outputfile
+"""
+from __future__ import print_function
+
+import sys
+from shutil import copyfile
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+BUFFER = 1048576
+
+uids = sys.argv[1].split(",")
+out_file1 = sys.argv[2]
+
+# remove NONE from uids
+have_none = True
+while have_none:
+    try:
+        uids.remove('None')
+    except:
+        have_none = False
+
+
+# create dictionary keyed by uid of tuples of (displayName,filePath,build) for all files
+available_files = {}
+try:
+    filename = sys.argv[-1]
+    for i, line in enumerate( open( filename ) ):
+        if not line or line[0:1] == "#":
+            continue
+        fields = line.split('\t')
+        try:
+            info_type = fields.pop(0)
+
+            if info_type.upper() == "DATA":
+                uid = fields.pop(0)
+                org_num = fields.pop(0)
+                chr_acc = fields.pop(0)
+                feature = fields.pop(0)
+                filetype = fields.pop(0)
+                path = fields.pop(0).replace("\r", "").replace("\n", "")
+
+                file_type = filetype
+                build = org_num
+                description = uid
+            else:
+                continue
+        except:
+            continue
+
+        available_files[uid] = (description, path, build, file_type, chr_acc)
+except:
+    print("It appears that the configuration file for this tool is missing.", file=sys.stderr)
+
+# create list of tuples of (displayName,FileName,build) for desired files
+desired_files = []
+for uid in uids:
+    try:
+        desired_files.append(available_files[uid])
+    except:
+        continue
+
+# copy first file to contents of given output file
+file1_copied = False
+while not file1_copied:
+    try:
+        first_file = desired_files.pop(0)
+    except:
+        print("There were no valid files requested.", file=sys.stderr)
+        sys.exit()
+    file1_desc, file1_path, file1_build, file1_type, file1_chr_acc = first_file
+    try:
+        copyfile(file1_path, out_file1)
+        print("#File1\t" + file1_desc + "\t" + file1_chr_acc + "\t" + file1_build + "\t" + file1_type)
+        file1_copied = True
+    except:
+        print("The file specified is missing.", file=sys.stderr)
+        continue
+
+# Tell post-process filter where remaining files reside
+for extra_output in desired_files:
+    file_desc, file_path, file_build, file_type, file_chr_acc = extra_output
+    print("#NewFile\t" + file_desc + "\t" + file_chr_acc + "\t" + file_build + "\t" + file_path + "\t" + file_type)
diff --git a/tools/data_source/microbial_import.xml b/tools/data_source/microbial_import.xml
new file mode 100644
index 0000000..b07f557
--- /dev/null
+++ b/tools/data_source/microbial_import.xml
@@ -0,0 +1,114 @@
+<tool id="microbial_import1" name="Get Microbial Data" version="1.0.0">
+  <command interpreter="python">microbial_import.py $CDS,$tRNA,$rRNA,$sequence,$GeneMark,$GeneMarkHMM,$Glimmer3 $output ${GALAXY_DATA_INDEX_DIR}/microbial_data.loc</command>
+  <inputs>
+      <param name="kingdom" type="select" label="Select the Desired Kingdom">
+        <options from_file="microbial_data.loc" startswith="ORG">
+          <column name="name" index="3"/>
+          <column name="value" index="3"/>
+          <filter type="unique_value" name="unique" column="3"/>
+        </options>
+      </param>
+      <param name="org" type="select" label="Select the Desired Organism">
+        <options from_file="microbial_data.loc" startswith="ORG">
+          <column name="name" index="2"/>
+          <column name="value" index="1"/>
+          <filter type="param_value" ref="kingdom" name="kingdom" column="3"/>
+          <filter type="sort_by" column="2"/>
+        </options>
+      </param>
+      <param name="CDS" type="select" label="Select Desired Coding Sequences" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="CDS" column="4"/>
+        </options>
+      </param>
+      <param name="tRNA" type="select" label="Select Desired tRNA" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="tRNA" column="4"/>
+        </options>
+      </param>
+      <param name="rRNA" type="select" label="Select Desired rRNA" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="rRNA" column="4"/>
+        </options>
+      </param>
+      <param name="sequence" type="select" label="Select Desired DNA Sequences" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="sequence" column="4"/>
+        </options>
+      </param>
+      <param name="GeneMark" type="select" label="Select Desired GeneMark Annotations" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="GeneMark" column="4"/>
+        </options>
+      </param>
+      <param name="GeneMarkHMM" type="select" label="Select Desired GeneMarkHMM Annotations" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="GeneMarkHMM" column="4"/>
+        </options>
+      </param>
+      <param name="Glimmer3" type="select" label="Select Desired Glimmer3 Annotations" display="checkboxes" multiple="True">
+        <options from_file="microbial_data.loc" startswith="DATA">
+          <column name="name" index="3"/>
+          <column name="value" index="1"/>
+          <column name="feature" index="4"/>
+          <filter type="param_value" ref="org" name="kingdom" column="2"/>
+          <filter type="static_value" name="feature" value="Glimmer3" column="4"/>
+        </options>
+      </param>
+  </inputs>
+  <outputs>
+    <data format="bed" name="output"/>
+  </outputs>
+  <code file="microbial_import_code.py"/>
+  <help>
+
+This tool will allow you to obtain various genomic datasets for any completed Microbial Genome Project as listed at NCBI_.
+
+.. _NCBI: http://www.ncbi.nlm.nih.gov/genomes/lproks.cgi?view=1
+
+Current datasets available include
+  1. CDS
+  2. tRNA
+  3. rRNA
+  4. FASTA Sequences
+  5. GeneMark Annotations
+  6. GeneMarkHMM Annotations
+  7. Glimmer3 Annotations
+
+-----
+
+Organisms in **bold** are available at the UCSC Browser.
+
+-----
+
+.. class:: infomark
+
+**Note:** Having trouble locating your organism?  Click here_ for a list of available species and their location.
+
+.. _here: https://wiki.galaxyproject.org/Main/Data%20Libraries/Microbes
+  </help>
+</tool>
diff --git a/tools/data_source/microbial_import_code.py b/tools/data_source/microbial_import_code.py
new file mode 100644
index 0000000..dbc14ec
--- /dev/null
+++ b/tools/data_source/microbial_import_code.py
@@ -0,0 +1,158 @@
+from __future__ import print_function
+
+from shutil import copyfile
+
+from galaxy import tools
+
+
+def load_microbial_data( GALAXY_DATA_INDEX_DIR, sep='\t' ):
+    # FIXME: this function is duplicated in the DynamicOptions class.  It is used here only to
+    # set data.name in exec_after_process().
+    microbe_info = {}
+    orgs = {}
+
+    filename = "%s/microbial_data.loc" % GALAXY_DATA_INDEX_DIR
+    for i, line in enumerate( open( filename ) ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ):
+            fields = line.split( sep )
+            # read each line, if not enough fields, go to next line
+            try:
+                info_type = fields.pop(0)
+                if info_type.upper() == "ORG":
+                    # ORG     12521   Clostridium perfringens SM101   bacteria        Firmicutes      CP000312,CP000313,CP000314,CP000315     http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=genomeprj&cmd=Retrieve&dopt=Overview&list_uids=12521
+                    org_num = fields.pop(0)
+                    name = fields.pop(0)
+                    kingdom = fields.pop(0)
+                    group = fields.pop(0)
+                    chromosomes = fields.pop(0)
+                    info_url = fields.pop(0)
+                    link_site = fields.pop(0)
+                    if org_num not in orgs:
+                        orgs[ org_num ] = {}
+                        orgs[ org_num ][ 'chrs' ] = {}
+                    orgs[ org_num ][ 'name' ] = name
+                    orgs[ org_num ][ 'kingdom' ] = kingdom
+                    orgs[ org_num ][ 'group' ] = group
+                    orgs[ org_num ][ 'chromosomes' ] = chromosomes
+                    orgs[ org_num ][ 'info_url' ] = info_url
+                    orgs[ org_num ][ 'link_site' ] = link_site
+                elif info_type.upper() == "CHR":
+                    # CHR     12521   CP000315        Clostridium perfringens phage phiSM101, complete genome 38092   110684521       CP000315.1
+                    org_num = fields.pop(0)
+                    chr_acc = fields.pop(0)
+                    name = fields.pop(0)
+                    length = fields.pop(0)
+                    gi = fields.pop(0)
+                    gb = fields.pop(0)
+                    info_url = fields.pop(0)
+                    chr = {}
+                    chr[ 'name' ] = name
+                    chr[ 'length' ] = length
+                    chr[ 'gi' ] = gi
+                    chr[ 'gb' ] = gb
+                    chr[ 'info_url' ] = info_url
+                    if org_num not in orgs:
+                        orgs[ org_num ] = {}
+                        orgs[ org_num ][ 'chrs' ] = {}
+                    orgs[ org_num ][ 'chrs' ][ chr_acc ] = chr
+                elif info_type.upper() == "DATA":
+                    # DATA    12521_12521_CDS 12521   CP000315        CDS     bed     /home/djb396/alignments/playground/bacteria/12521/CP000315.CDS.bed
+                    uid = fields.pop(0)
+                    org_num = fields.pop(0)
+                    chr_acc = fields.pop(0)
+                    feature = fields.pop(0)
+                    filetype = fields.pop(0)
+                    path = fields.pop(0)
+                    data = {}
+                    data[ 'filetype' ] = filetype
+                    data[ 'path' ] = path
+                    data[ 'feature' ] = feature
+
+                    if org_num not in orgs:
+                        orgs[ org_num ] = {}
+                        orgs[ org_num ][ 'chrs' ] = {}
+                    if 'data' not in orgs[ org_num ][ 'chrs' ][ chr_acc ]:
+                        orgs[ org_num ][ 'chrs' ][ chr_acc ][ 'data' ] = {}
+                    orgs[ org_num ][ 'chrs' ][ chr_acc ][ 'data' ][ uid ] = data
+                else:
+                    continue
+            except:
+                continue
+    for org_num in orgs:
+        org = orgs[ org_num ]
+        if org[ 'kingdom' ] not in microbe_info:
+            microbe_info[ org[ 'kingdom' ] ] = {}
+        if org_num not in microbe_info[ org[ 'kingdom' ] ]:
+            microbe_info[ org[ 'kingdom' ] ][org_num] = org
+    return microbe_info
+
+
+# post processing, set build for data and add additional data to history
+def exec_after_process(app, inp_data, out_data, param_dict, tool, stdout, stderr):
+    base_dataset = next(iter(out_data.values()))
+    history = base_dataset.history
+    if history is None:
+        print("unknown history!")
+        return
+    kingdom = param_dict.get( 'kingdom', None )
+    org = param_dict.get( 'org', None )
+
+    # if not (kingdom or group or org):
+    if not (kingdom or org):
+        print("Parameters are not available.")
+    # workflow passes galaxy.tools.parameters.basic.UnvalidatedValue instead of values
+    if isinstance( kingdom, tools.parameters.basic.UnvalidatedValue ):
+        kingdom = kingdom.value
+    if isinstance( org, tools.parameters.basic.UnvalidatedValue ):
+        org = org.value
+
+    GALAXY_DATA_INDEX_DIR = app.config.tool_data_path
+    microbe_info = load_microbial_data( GALAXY_DATA_INDEX_DIR, sep='\t' )
+    split_stdout = stdout.split("\n")
+    basic_name = ""
+    for line in split_stdout:
+        fields = line.split("\t")
+        if fields[0] == "#File1":
+            description = fields[1]
+            chr = fields[2]
+            dbkey = fields[3]
+            file_type = fields[4]
+            data = next(iter(out_data.values()))
+            data.set_size()
+            basic_name = data.name
+            data.name = data.name + " (" + microbe_info[kingdom][org]['chrs'][chr]['data'][description]['feature'] + " for " + microbe_info[kingdom][org]['name'] + ":" + chr + ")"
+            data.dbkey = dbkey
+            data.info = data.name
+            data = app.datatypes_registry.change_datatype( data, file_type )
+            data.init_meta()
+            data.set_peek()
+            app.model.context.add( data )
+            app.model.context.flush()
+        elif fields[0] == "#NewFile":
+            description = fields[1]
+            chr = fields[2]
+            dbkey = fields[3]
+            filepath = fields[4]
+            file_type = fields[5]
+            newdata = app.model.HistoryDatasetAssociation( create_dataset=True, sa_session=app.model.context )  # This import should become a library
+            newdata.set_size()
+            newdata.extension = file_type
+            newdata.name = basic_name + " (" + microbe_info[kingdom][org]['chrs'][chr]['data'][description]['feature'] + " for " + microbe_info[kingdom][org]['name'] + ":" + chr + ")"
+            app.model.context.add( newdata )
+            app.model.context.flush()
+            app.security_agent.copy_dataset_permissions( base_dataset.dataset, newdata.dataset )
+            history.add_dataset( newdata )
+            app.model.context.add( history )
+            app.model.context.flush()
+            try:
+                copyfile(filepath, newdata.file_name)
+                newdata.info = newdata.name
+                newdata.state = newdata.states.OK
+            except:
+                newdata.info = "The requested file is missing from the system."
+                newdata.state = newdata.states.ERROR
+            newdata.dbkey = dbkey
+            newdata.init_meta()
+            newdata.set_peek()
+            app.model.context.flush()
diff --git a/tools/data_source/modmine.xml b/tools/data_source/modmine.xml
new file mode 100644
index 0000000..065ecd6
--- /dev/null
+++ b/tools/data_source/modmine.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="modENCODE modMine" id="modmine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://intermine.modencode.org/" check_values="false" method="get"> 
+        <display>go to modENCODE modMine server $GALAXY_URL</display>
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
+
diff --git a/tools/data_source/mousemine.xml b/tools/data_source/mousemine.xml
new file mode 100644
index 0000000..ac783f2
--- /dev/null
+++ b/tools/data_source/mousemine.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="MouseMine" id="mousemine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://www.mousemine.org/mousemine/begin.do" check_values="false" method="get"> 
+        <display>go to MouseMine server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=mousemine" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="MouseMine query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="txt" /> <!-- intermine currently always provides 'txt', make this auto detect -->
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
+
diff --git a/tools/data_source/ratmine.xml b/tools/data_source/ratmine.xml
new file mode 100644
index 0000000..25d271b
--- /dev/null
+++ b/tools/data_source/ratmine.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="Ratmine" id="ratmine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://ratmine.mcw.edu/ratmine/begin.do" check_values="false" method="get"> 
+        <display>go to Ratmine server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=ratmine" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="organism" missing="" />
+        <request_param galaxy_name="table" remote_name="table" missing="" />
+        <request_param galaxy_name="description" remote_name="description" missing="" />
+        <request_param galaxy_name="name" remote_name="name" missing="Ratmine query" />
+        <request_param galaxy_name="info" remote_name="info" missing="" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="txt" /> <!-- intermine currently always provides 'txt', make this auto detect -->
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/ucsc_tablebrowser.xml b/tools/data_source/ucsc_tablebrowser.xml
new file mode 100644
index 0000000..f93aca6
--- /dev/null
+++ b/tools/data_source/ucsc_tablebrowser.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="UCSC Main" id="ucsc_table_direct1" tool_type="data_source" version="1.0.0">
+    <description>table browser</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="https://genome.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
+        <display>go to UCSC Table Browser $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
+        <param name="tool_id" type="hidden" value="ucsc_table_direct1" />
+        <param name="sendToGalaxy" type="hidden" value="1" />
+        <param name="hgta_compressType" type="hidden" value="none" />
+        <param name="hgta_outputType" type="hidden" value="bed" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+        <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
+        <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
+        <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="primaryTable" />
+                <value galaxy_value="auto" remote_value="selectedFields" />
+                <value galaxy_value="wig" remote_value="wigData" />
+                <value galaxy_value="interval" remote_value="tab" />
+                <value galaxy_value="html" remote_value="hyperlinks" />
+                <value galaxy_value="fasta" remote_value="sequence" />
+                <value galaxy_value="gtf" remote_value="gff" />
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="tabular" label="${tool.name} on ${organism}: ${table} (#if $description == 'range' then $getVar( 'position', 'unknown position' ) else $description#)"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+    <citations>
+        <citation type="doi">10.1093/database/bar011</citation>
+        <citation type="doi">10.1101/gr.229102</citation>
+    </citations>
+</tool>
diff --git a/tools/data_source/ucsc_tablebrowser_archaea.xml b/tools/data_source/ucsc_tablebrowser_archaea.xml
new file mode 100644
index 0000000..271b232
--- /dev/null
+++ b/tools/data_source/ucsc_tablebrowser_archaea.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="UCSC Archaea" id="ucsc_table_direct_archaea1" tool_type="data_source" version="1.0.0">
+    <description>table browser</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://archaea.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
+        <display>go to UCSC Table Browser $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
+        <param name="tool_id" type="hidden" value="ucsc_table_direct_archaea1" />
+        <param name="sendToGalaxy" type="hidden" value="1" />
+        <param name="hgta_compressType" type="hidden" value="none" />
+        <param name="hgta_outputType" type="hidden" value="bed" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+        <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
+        <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
+        <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="primaryTable" />
+                <value galaxy_value="auto" remote_value="selectedFields" />
+                <value galaxy_value="wig" remote_value="wigData" />
+                <value galaxy_value="interval" remote_value="tab" />
+                <value galaxy_value="html" remote_value="hyperlinks" />
+                <value galaxy_value="fasta" remote_value="sequence" />
+                <value galaxy_value="gtf" remote_value="gff" />
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="tabular" label="${tool.name} on ${organism}: ${table} (#if $description == 'range' then $getVar( 'position', 'unknown position' ) else $description#)"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+    <citations>
+        <citation type="doi">10.1093/database/bar011</citation>
+        <citation type="doi">10.1101/gr.229102</citation>
+        <citation type="doi">10.1093/nar/gkj134</citation>
+    </citations>
+</tool>
diff --git a/tools/data_source/ucsc_tablebrowser_test.xml b/tools/data_source/ucsc_tablebrowser_test.xml
new file mode 100644
index 0000000..ced9428
--- /dev/null
+++ b/tools/data_source/ucsc_tablebrowser_test.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0"?>
+<!--
+    If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+    the initial response.  If value of 'URL_method' is 'post', any additional params coming back in the
+    initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="UCSC Test" id="ucsc_table_direct_test1" tool_type="data_source" version="1.0.0">
+    <description>table browser</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://genome-test.cse.ucsc.edu/cgi-bin/hgTables" check_values="false" method="get">
+        <display>go to UCSC Table Browser $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner" />
+        <param name="tool_id" type="hidden" value="ucsc_table_direct_test1" />
+        <param name="sendToGalaxy" type="hidden" value="1" />
+        <param name="hgta_compressType" type="hidden" value="none" />
+        <param name="hgta_outputType" type="hidden" value="bed" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="URL" remote_name="URL" missing="" />
+        <request_param galaxy_name="dbkey" remote_name="db" missing="?" />
+        <request_param galaxy_name="organism" remote_name="org" missing="unknown species" />
+        <request_param galaxy_name="table" remote_name="hgta_table" missing="unknown table" />
+        <request_param galaxy_name="description" remote_name="hgta_regionType" missing="no description" />
+        <request_param galaxy_name="data_type" remote_name="hgta_outputType" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="primaryTable" />
+                <value galaxy_value="auto" remote_value="selectedFields" />
+                <value galaxy_value="wig" remote_value="wigData" />
+                <value galaxy_value="interval" remote_value="tab" />
+                <value galaxy_value="html" remote_value="hyperlinks" />
+                <value galaxy_value="fasta" remote_value="sequence" />
+                <value galaxy_value="gtf" remote_value="gff" />
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="tabular" label="${tool.name} on ${organism}: ${table} (#if $description == 'range' then $getVar( 'position', 'unknown position' ) else $description#)"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+    <citations>
+        <citation type="doi">10.1093/database/bar011</citation>
+        <citation type="doi">10.1101/gr.229102</citation>
+    </citations>
+</tool>
diff --git a/tools/data_source/upload.py b/tools/data_source/upload.py
new file mode 100644
index 0000000..972d62c
--- /dev/null
+++ b/tools/data_source/upload.py
@@ -0,0 +1,425 @@
+#!/usr/bin/env python
+# Processes uploads from the user.
+
+# WARNING: Changes in this tool (particularly as related to parsing) may need
+# to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
+from __future__ import print_function
+
+import codecs
+import gzip
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+from json import dumps, loads
+
+from six.moves.urllib.request import urlopen
+
+from galaxy import util
+from galaxy.datatypes import sniff
+from galaxy.datatypes.binary import Binary
+from galaxy.datatypes.registry import Registry
+from galaxy.util import multi_byte
+from galaxy.util.checkers import check_binary, check_bz2, check_gzip, check_html, check_zip
+from galaxy.util.image_util import get_image_ext
+
+
+try:
+    import bz2
+except:
+    bz2 = None
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg, ret=1 ):
+    sys.stderr.write( msg )
+    sys.exit( ret )
+
+
+def file_err( msg, dataset, json_file ):
+    json_file.write( dumps( dict( type='dataset',
+                                  ext='data',
+                                  dataset_id=dataset.dataset_id,
+                                  stderr=msg ) ) + "\n" )
+    # never remove a server-side upload
+    if dataset.type in ( 'server_dir', 'path_paste' ):
+        return
+    try:
+        os.remove( dataset.path )
+    except:
+        pass
+
+
+def safe_dict(d):
+    """
+    Recursively clone json structure with UTF-8 dictionary keys
+    http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-keys-as-python-arguments/
+    """
+    if isinstance(d, dict):
+        return dict([(k.encode('utf-8'), safe_dict(v)) for k, v in d.items()])
+    elif isinstance(d, list):
+        return [safe_dict(x) for x in d]
+    else:
+        return d
+
+
+def parse_outputs( args ):
+    rval = {}
+    for arg in args:
+        id, files_path, path = arg.split( ':', 2 )
+        rval[int( id )] = ( path, files_path )
+    return rval
+
+
+def add_file( dataset, registry, json_file, output_path ):
+    data_type = None
+    line_count = None
+    converted_path = None
+    stdout = None
+    link_data_only = dataset.get( 'link_data_only', 'copy_files' )
+    in_place = dataset.get( 'in_place', True )
+    purge_source = dataset.get( 'purge_source', True )
+    try:
+        ext = dataset.file_type
+    except AttributeError:
+        file_err( 'Unable to process uploaded file, missing file_type parameter.', dataset, json_file )
+        return
+
+    if dataset.type == 'url':
+        try:
+            page = urlopen( dataset.path )  # page will be .close()ed by sniff methods
+            temp_name, dataset.is_multi_byte = sniff.stream_to_file( page, prefix='url_paste', source_encoding=util.get_charset_from_http_headers( page.headers ) )
+        except Exception as e:
+            file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
+            return
+        dataset.path = temp_name
+    # See if we have an empty file
+    if not os.path.exists( dataset.path ):
+        file_err( 'Uploaded temporary file (%s) does not exist.' % dataset.path, dataset, json_file )
+        return
+    if not os.path.getsize( dataset.path ) > 0:
+        file_err( 'The uploaded file is empty', dataset, json_file )
+        return
+    if not dataset.type == 'url':
+        # Already set is_multi_byte above if type == 'url'
+        try:
+            dataset.is_multi_byte = multi_byte.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
+        except UnicodeDecodeError as e:
+            dataset.is_multi_byte = False
+    # Is dataset an image?
+    i_ext = get_image_ext( dataset.path )
+    if i_ext:
+        ext = i_ext
+        data_type = ext
+    # Is dataset content multi-byte?
+    elif dataset.is_multi_byte:
+        data_type = 'multi-byte char'
+        ext = sniff.guess_ext( dataset.path, registry.sniff_order, is_multi_byte=True )
+    # Is dataset content supported sniffable binary?
+    else:
+        # FIXME: This ignores the declared sniff order in datatype_conf.xml
+        # resulting in improper behavior
+        type_info = Binary.is_sniffable_binary( dataset.path )
+        if type_info:
+            data_type = type_info[0]
+            ext = type_info[1]
+    if not data_type:
+        root_datatype = registry.get_datatype_by_extension( dataset.file_type )
+        if getattr( root_datatype, 'compressed', False ):
+            data_type = 'compressed archive'
+            ext = dataset.file_type
+        else:
+            # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
+            is_gzipped, is_valid = check_gzip( dataset.path )
+            if is_gzipped and not is_valid:
+                file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
+                return
+            elif is_gzipped and is_valid:
+                if link_data_only == 'copy_files':
+                    # We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
+                    CHUNK_SIZE = 2 ** 20  # 1Mb
+                    fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
+                    gzipped_file = gzip.GzipFile( dataset.path, 'rb' )
+                    while 1:
+                        try:
+                            chunk = gzipped_file.read( CHUNK_SIZE )
+                        except IOError:
+                            os.close( fd )
+                            os.remove( uncompressed )
+                            file_err( 'Problem decompressing gzipped data', dataset, json_file )
+                            return
+                        if not chunk:
+                            break
+                        os.write( fd, chunk )
+                    os.close( fd )
+                    gzipped_file.close()
+                    # Replace the gzipped file with the decompressed file if it's safe to do so
+                    if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
+                        dataset.path = uncompressed
+                    else:
+                        shutil.move( uncompressed, dataset.path )
+                    os.chmod(dataset.path, 0o644)
+                dataset.name = dataset.name.rstrip( '.gz' )
+                data_type = 'gzip'
+            if not data_type and bz2 is not None:
+                # See if we have a bz2 file, much like gzip
+                is_bzipped, is_valid = check_bz2( dataset.path )
+                if is_bzipped and not is_valid:
+                    file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
+                    return
+                elif is_bzipped and is_valid:
+                    if link_data_only == 'copy_files':
+                        # We need to uncompress the temp_name file
+                        CHUNK_SIZE = 2 ** 20  # 1Mb
+                        fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_bunzip2_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
+                        bzipped_file = bz2.BZ2File( dataset.path, 'rb' )
+                        while 1:
+                            try:
+                                chunk = bzipped_file.read( CHUNK_SIZE )
+                            except IOError:
+                                os.close( fd )
+                                os.remove( uncompressed )
+                                file_err( 'Problem decompressing bz2 compressed data', dataset, json_file )
+                                return
+                            if not chunk:
+                                break
+                            os.write( fd, chunk )
+                        os.close( fd )
+                        bzipped_file.close()
+                        # Replace the bzipped file with the decompressed file if it's safe to do so
+                        if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
+                            dataset.path = uncompressed
+                        else:
+                            shutil.move( uncompressed, dataset.path )
+                        os.chmod(dataset.path, 0o644)
+                    dataset.name = dataset.name.rstrip( '.bz2' )
+                    data_type = 'bz2'
+            if not data_type:
+                # See if we have a zip archive
+                is_zipped = check_zip( dataset.path )
+                if is_zipped:
+                    if link_data_only == 'copy_files':
+                        CHUNK_SIZE = 2 ** 20  # 1Mb
+                        uncompressed = None
+                        uncompressed_name = None
+                        unzipped = False
+                        z = zipfile.ZipFile( dataset.path )
+                        for name in z.namelist():
+                            if name.endswith('/'):
+                                continue
+                            if unzipped:
+                                stdout = 'ZIP file contained more than one file, only the first file was added to Galaxy.'
+                                break
+                            fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_zip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
+                            if sys.version_info[:2] >= ( 2, 6 ):
+                                zipped_file = z.open( name )
+                                while 1:
+                                    try:
+                                        chunk = zipped_file.read( CHUNK_SIZE )
+                                    except IOError:
+                                        os.close( fd )
+                                        os.remove( uncompressed )
+                                        file_err( 'Problem decompressing zipped data', dataset, json_file )
+                                        return
+                                    if not chunk:
+                                        break
+                                    os.write( fd, chunk )
+                                os.close( fd )
+                                zipped_file.close()
+                                uncompressed_name = name
+                                unzipped = True
+                            else:
+                                # python < 2.5 doesn't have a way to read members in chunks(!)
+                                try:
+                                    outfile = open( uncompressed, 'wb' )
+                                    outfile.write( z.read( name ) )
+                                    outfile.close()
+                                    uncompressed_name = name
+                                    unzipped = True
+                                except IOError:
+                                    os.close( fd )
+                                    os.remove( uncompressed )
+                                    file_err( 'Problem decompressing zipped data', dataset, json_file )
+                                    return
+                        z.close()
+                        # Replace the zipped file with the decompressed file if it's safe to do so
+                        if uncompressed is not None:
+                            if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
+                                dataset.path = uncompressed
+                            else:
+                                shutil.move( uncompressed, dataset.path )
+                            os.chmod(dataset.path, 0o644)
+                            dataset.name = uncompressed_name
+                    data_type = 'zip'
+            if not data_type:
+                # TODO refactor this logic.  check_binary isn't guaranteed to be
+                # correct since it only looks at whether the first 100 chars are
+                # printable or not.  If someone specifies a known unsniffable
+                # binary datatype and check_binary fails, the file gets mangled.
+                if check_binary( dataset.path ) or Binary.is_ext_unsniffable(dataset.file_type):
+                    # We have a binary dataset, but it is not Bam, Sff or Pdf
+                    data_type = 'binary'
+                    # binary_ok = False
+                    parts = dataset.name.split( "." )
+                    if len( parts ) > 1:
+                        ext = parts[-1].strip().lower()
+                        if not Binary.is_ext_unsniffable(ext):
+                            file_err( 'The uploaded binary file contains inappropriate content', dataset, json_file )
+                            return
+                        elif Binary.is_ext_unsniffable(ext) and dataset.file_type != ext:
+                            err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
+                            file_err( err_msg, dataset, json_file )
+                            return
+            if not data_type:
+                # We must have a text file
+                if check_html( dataset.path ):
+                    file_err( 'The uploaded file contains inappropriate HTML content', dataset, json_file )
+                    return
+            if data_type != 'binary':
+                if link_data_only == 'copy_files':
+                    if dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
+                        in_place = False
+                    # Convert universal line endings to Posix line endings, but allow the user to turn it off,
+                    # so that is becomes possible to upload gzip, bz2 or zip files with binary data without
+                    # corrupting the content of those files.
+                    if dataset.to_posix_lines:
+                        tmpdir = output_adjacent_tmpdir( output_path )
+                        tmp_prefix = 'data_id_%s_convert_' % dataset.dataset_id
+                        if dataset.space_to_tab:
+                            line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
+                        else:
+                            line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
+                if dataset.file_type == 'auto':
+                    ext = sniff.guess_ext( dataset.path, registry.sniff_order )
+                else:
+                    ext = dataset.file_type
+                data_type = ext
+    # Save job info for the framework
+    if ext == 'auto' and dataset.ext:
+        ext = dataset.ext
+    if ext == 'auto':
+        ext = 'data'
+    datatype = registry.get_datatype_by_extension( ext )
+    if dataset.type in ( 'server_dir', 'path_paste' ) and link_data_only == 'link_to_files':
+        # Never alter a file that will not be copied to Galaxy's local file store.
+        if datatype.dataset_content_needs_grooming( dataset.path ):
+            err_msg = 'The uploaded files need grooming, so change your <b>Copy data into Galaxy?</b> selection to be ' + \
+                '<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed.'
+            file_err( err_msg, dataset, json_file )
+            return
+    if link_data_only == 'copy_files' and dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
+        # Move the dataset to its "real" path
+        if converted_path is not None:
+            shutil.copy( converted_path, output_path )
+            try:
+                os.remove( converted_path )
+            except:
+                pass
+        else:
+            # This should not happen, but it's here just in case
+            shutil.copy( dataset.path, output_path )
+    elif link_data_only == 'copy_files':
+        if purge_source:
+            shutil.move( dataset.path, output_path )
+        else:
+            shutil.copy( dataset.path, output_path )
+    # Write the job info
+    stdout = stdout or 'uploaded %s file' % data_type
+    info = dict( type='dataset',
+                 dataset_id=dataset.dataset_id,
+                 ext=ext,
+                 stdout=stdout,
+                 name=dataset.name,
+                 line_count=line_count )
+    if dataset.get('uuid', None) is not None:
+        info['uuid'] = dataset.get('uuid')
+    json_file.write( dumps( info ) + "\n" )
+
+    if link_data_only == 'copy_files' and datatype.dataset_content_needs_grooming( output_path ):
+        # Groom the dataset content if necessary
+        datatype.groom_dataset_content( output_path )
+
+
+def add_composite_file( dataset, json_file, output_path, files_path ):
+        if dataset.composite_files:
+            os.mkdir( files_path )
+            for name, value in dataset.composite_files.items():
+                value = util.bunch.Bunch( **value )
+                if dataset.composite_file_paths[ value.name ] is None and not value.optional:
+                    file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
+                    break
+                elif dataset.composite_file_paths[value.name] is not None:
+                    dp = dataset.composite_file_paths[value.name][ 'path' ]
+                    isurl = dp.find('://') != -1  # todo fixme
+                    if isurl:
+                        try:
+                            temp_name, dataset.is_multi_byte = sniff.stream_to_file( urlopen( dp ), prefix='url_paste' )
+                        except Exception as e:
+                            file_err( 'Unable to fetch %s\n%s' % ( dp, str( e ) ), dataset, json_file )
+                            return
+                        dataset.path = temp_name
+                        dp = temp_name
+                    if not value.is_binary:
+                        tmpdir = output_adjacent_tmpdir( output_path )
+                        tmp_prefix = 'data_id_%s_convert_' % dataset.dataset_id
+                        if dataset.composite_file_paths[ value.name ].get( 'space_to_tab', value.space_to_tab ):
+                            sniff.convert_newlines_sep2tabs( dp, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
+                        else:
+                            sniff.convert_newlines( dp, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
+                    shutil.move( dp, os.path.join( files_path, name ) )
+        # Move the dataset to its "real" path
+        shutil.move( dataset.primary_file, output_path )
+        # Write the job info
+        info = dict( type='dataset',
+                     dataset_id=dataset.dataset_id,
+                     stdout='uploaded %s file' % dataset.file_type )
+        json_file.write( dumps( info ) + "\n" )
+
+
+def output_adjacent_tmpdir( output_path ):
+    """ For temp files that will ultimately be moved to output_path anyway
+    just create the file directly in output_path's directory so shutil.move
+    will work optimially.
+    """
+    return os.path.dirname( output_path )
+
+
+def __main__():
+
+    if len( sys.argv ) < 4:
+        print('usage: upload.py <root> <datatypes_conf> <json paramfile> <output spec> ...', file=sys.stderr)
+        sys.exit( 1 )
+
+    output_paths = parse_outputs( sys.argv[4:] )
+    json_file = open( 'galaxy.json', 'w' )
+
+    registry = Registry()
+    registry.load_datatypes( root_dir=sys.argv[1], config=sys.argv[2] )
+
+    for line in open( sys.argv[3], 'r' ):
+        dataset = loads( line )
+        dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+        try:
+            output_path = output_paths[int( dataset.dataset_id )][0]
+        except:
+            print('Output path for dataset %s not found on command line' % dataset.dataset_id, file=sys.stderr)
+            sys.exit( 1 )
+        if dataset.type == 'composite':
+            files_path = output_paths[int( dataset.dataset_id )][1]
+            add_composite_file( dataset, json_file, output_path, files_path )
+        else:
+            add_file( dataset, registry, json_file, output_path )
+
+    # clean up paramfile
+    # TODO: this will not work when running as the actual user unless the
+    # parent directory is writable by the user.
+    try:
+        os.remove( sys.argv[3] )
+    except:
+        pass
+
+
+if __name__ == '__main__':
+    __main__()
diff --git a/tools/data_source/upload.xml b/tools/data_source/upload.xml
new file mode 100644
index 0000000..6d08c75
--- /dev/null
+++ b/tools/data_source/upload.xml
@@ -0,0 +1,232 @@
+<?xml version="1.0"?>
+
+<tool name="Upload File" id="upload1" version="1.1.4" workflow_compatible="false">
+  <description>
+    from your computer  
+  </description>
+  <action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
+  <requirements>
+      <requirement type="package">samtools</requirement>
+  </requirements>
+  <command interpreter="python">
+      upload.py $GALAXY_ROOT_DIR $GALAXY_DATATYPES_CONF_FILE $paramfile
+    #set $outnum = 0
+    #while $varExists('output%i' % $outnum):
+        #set $output = $getVar('output%i' % $outnum)
+        #set $outnum += 1
+        #set $file_name = $output.file_name
+        ## FIXME: This is not future-proof for other uses of external_filename (other than for use by the library upload's "link data" feature)
+        #if $output.dataset.dataset.external_filename:
+            #set $file_name = "None"
+        #end if
+        ${output.dataset.dataset.id}:${output.files_path}:${file_name}
+    #end while
+  </command>
+  <inputs nginx_upload="true">
+    <param name="file_type" type="select" label="File Format" help="Which format? See help below">
+      <options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
+        <column name="value" index="1"/>
+        <column name="name" index="0"/>
+        <filter type="sort_by" column="0"/>
+        <filter type="add_value" name="Auto-detect" value="auto" index="0"/>
+      </options>
+    </param>
+    <param name="async_datasets" type="hidden" value="None"/>
+    <upload_dataset name="files" title="Specify Files for Dataset" file_type_name="file_type" metadata_ref="files_metadata">
+        <param name="file_data" type="file" size="30" label="File" ajax-upload="true" help="TIP: Due to browser limitations, uploading files larger than 2GB is guaranteed to fail.  To upload large files, use the URL method (below) or FTP (if enabled by the site administrator).">
+      </param>
+      <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/> 
+      <param name="ftp_files" type="ftpfile" label="Files uploaded via FTP"/>
+      <!-- Swap the following parameter for the select one that follows to
+           enable the to_posix_lines option in the Web GUI. See Bitbucket
+           Pull Request 171 for more information. -->
+      <param name="uuid" type="hidden" required="False" />
+      <param name="to_posix_lines" type="hidden" value="Yes" />
+      <!--
+      <param name="to_posix_lines" type="select" display="checkboxes" multiple="True" label="Convert universal line endings to Posix line endings" help="Turn this option off if you upload a gzip, bz2 or zip archive which contains a binary file." value="Yes"> 
+        <option value="Yes" selected="true">Yes</option>
+      </param>
+      -->
+      <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."> 
+        <option value="Yes">Yes</option>
+      </param>
+      <param name="NAME" type="hidden" help="Name for dataset in upload"></param>
+    </upload_dataset>
+    <param name="dbkey" type="genomebuild" label="Genome" />
+    <conditional name="files_metadata" value_from="self:app.datatypes_registry.get_upload_metadata_params" value_ref="file_type" value_ref_in_group="False" />
+    <!-- <param name="other_dbkey" type="text" label="Or user-defined Genome" /> -->
+  </inputs>
+  <help>
+  
+**Auto-detect**
+
+The system will attempt to detect Axt, Fasta, Fastqsolexa, Gff, Gff3, Html, Lav, Maf, Tabular, Wiggle, Bed and Interval (Bed with headers) formats. If your file is not detected properly as one of the known formats, it most likely means that it has some format problems (e.g., different number of columns on different rows). You can still coerce the system to set your data to the format you think it should be.  You can also upload compressed files, which will automatically be decompressed. 
+
+-----
+
+**Ab1**
+
+A binary sequence file in 'ab1' format with a '.ab1' file extension.  You must manually select this 'File Format' when uploading the file.
+
+-----
+
+**Axt**
+
+blastz pairwise alignment format.  Each alignment block in an axt file contains three lines: a summary line and 2 sequence lines.  Blocks are separated from one another by blank lines.  The summary line contains chromosomal position and size information about the alignment. It consists of 9 required fields.
+
+-----
+
+**Bam**
+
+A binary file compressed in the BGZF format with a '.bam' file extension.
+
+-----
+
+**Bed**
+
+* Tab delimited format (tabular)
+* Does not require header line
+* Contains 3 required fields:
+
+  - chrom - The name of the chromosome (e.g. chr3, chrY, chr2_random) or contig (e.g. ctgY1).
+  - chromStart - The starting position of the feature in the chromosome or contig. The first base in a chromosome is numbered 0.
+  - chromEnd - The ending position of the feature in the chromosome or contig. The chromEnd base is not included in the display of the feature. For example, the first 100 bases of a chromosome are defined as chromStart=0, chromEnd=100, and span the bases numbered 0-99.
+
+* May contain 9 additional optional BED fields:
+
+  - name - Defines the name of the BED line. This label is displayed to the left of the BED line in the Genome Browser window when the track is open to full display mode or directly to the left of the item in pack mode.
+  - score - A score between 0 and 1000. If the track line useScore attribute is set to 1 for this annotation data set, the score value will determine the level of gray in which this feature is displayed (higher numbers = darker gray).
+  - strand - Defines the strand - either '+' or '-'.
+  - thickStart - The starting position at which the feature is drawn thickly (for example, the start codon in gene displays).
+  - thickEnd - The ending position at which the feature is drawn thickly (for example, the stop codon in gene displays).
+  - itemRgb - An RGB value of the form R,G,B (e.g. 255,0,0). If the track line itemRgb attribute is set to "On", this RBG value will determine the display color of the data contained in this BED line. NOTE: It is recommended that a simple color scheme (eight colors or less) be used with this attribute to avoid overwhelming the color resources of the Genome Browser and your Internet browser.
+  - blockCount - The number of blocks (exons) in the BED line.
+  - blockSizes - A comma-separated list of the block sizes. The number of items in this list should correspond to blockCount.
+  - blockStarts - A comma-separated list of block starts. All of the blockStart positions should be calculated relative to chromStart. The number of items in this list should correspond to blockCount.
+
+* Example::
+
+    chr22 1000 5000 cloneA 960 + 1000 5000 0 2 567,488, 0,3512
+    chr22 2000 6000 cloneB 900 - 2000 6000 0 2 433,399, 0,3601
+
+-----
+
+**Fasta**
+
+A sequence in FASTA format consists of a single-line description, followed by lines of sequence data.  The first character of the description line is a greater-than (">") symbol in the first column.  All lines should be shorter than 80 characters::
+
+    >sequence1
+    atgcgtttgcgtgc
+    gtcggtttcgttgc
+    >sequence2
+    tttcgtgcgtatag
+    tggcgcggtga
+
+-----
+
+**FastqSolexa**
+
+FastqSolexa is the Illumina (Solexa) variant of the Fastq format, which stores sequences and quality scores in a single file::
+
+    @seq1  
+    GACAGCTTGGTTTTTAGTGAGTTGTTCCTTTCTTT  
+    +seq1  
+    hhhhhhhhhhhhhhhhhhhhhhhhhhPW at hhhhhh  
+    @seq2  
+    GCAATGACGGCAGCAATAAACTCAACAGGTGCTGG  
+    +seq2  
+    hhhhhhhhhhhhhhYhhahhhhWhAhFhSIJGChO
+    
+Or:: 
+
+    @seq1
+    GAATTGATCAGGACATAGGACAACTGTAGGCACCAT
+    +seq1
+    40 40 40 40 35 40 40 40 25 40 40 26 40 9 33 11 40 35 17 40 40 33 40 7 9 15 3 22 15 30 11 17 9 4 9 4
+    @seq2
+    GAGTTCTCGTCGCCTGTAGGCACCATCAATCGTATG
+    +seq2
+    40 15 40 17 6 36 40 40 40 25 40 9 35 33 40 14 14 18 15 17 19 28 31 4 24 18 27 14 15 18 2 8 12 8 11 9
+    
+-----
+
+**Gff**
+
+GFF lines have nine required fields that must be tab-separated.
+
+-----
+
+**Gff3**
+
+The GFF3 format addresses the most common extensions to GFF, while preserving backward compatibility with previous formats.
+
+-----
+
+**Interval (Genomic Intervals)**
+
+- Tab delimited format (tabular)
+- File must start with definition line in the following format (columns may be in any order).::
+
+    #CHROM START END STRAND
+
+- CHROM - The name of the chromosome (e.g. chr3, chrY, chr2_random) or contig (e.g. ctgY1).
+- START - The starting position of the feature in the chromosome or contig. The first base in a chromosome is numbered 0.
+- END - The ending position of the feature in the chromosome or contig. The chromEnd base is not included in the display of the feature. For example, the first 100 bases of a chromosome are defined as chromStart=0, chromEnd=100, and span the bases numbered 0-99.
+- STRAND - Defines the strand - either '+' or '-'.
+
+- Example::
+
+    #CHROM START END   STRAND NAME COMMENT
+    chr1   10    100   +      exon myExon
+    chrX   1000  10050 -      gene myGene
+
+-----
+
+**Lav**
+
+Lav is the primary output format for BLASTZ.  The first line of a .lav file begins with #:lav..
+
+-----
+
+**MAF**
+
+TBA and multiz multiple alignment format.  The first line of a .maf file begins with ##maf. This word is followed by white-space-separated "variable=value" pairs. There should be no white space surrounding the "=".
+
+-----
+
+**Scf**
+
+A binary sequence file in 'scf' format with a '.scf' file extension.  You must manually select this 'File Format' when uploading the file.
+
+-----
+
+**Sff**
+
+A binary file in 'Standard Flowgram Format' with a '.sff' file extension.
+
+-----
+
+**Tabular (tab delimited)**
+
+Any data in tab delimited format (tabular)
+
+-----
+
+**Table (delimiter-separated)**
+
+Any delimiter-separated tabular data (CSV or TSV).
+
+-----
+
+**Wig**
+
+The wiggle format is line-oriented.  Wiggle data is preceded by a track definition line, which adds a number of options for controlling the default display of this track.
+
+-----
+
+**Other text type**
+
+Any text file
+
+  </help>
+</tool>
diff --git a/tools/data_source/worm_modencode.xml b/tools/data_source/worm_modencode.xml
new file mode 100644
index 0000000..b021f4a
--- /dev/null
+++ b/tools/data_source/worm_modencode.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<tool name="modENCODE worm" id="modENCODEworm" tool_type="data_source" version="1.0.1">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://gbrowse.modencode.org/fgb2/gbrowse/worm" check_values="false" target="_top"> 
+        <display>go to modENCODE worm server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=modENCODEworm" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="dbkey" remote_name="dbkey" missing="ce10" >
+            <value_translation>
+                <value galaxy_value="ce10" remote_value="worm" />
+            </value_translation>
+        </request_param>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="d" missing="" />
+                <value name="dbkey" missing="ce10" />
+                <value name="q" missing="" />
+                <value name="s" missing="" />
+                <value name="t" missing="" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" label="${tool.name} on $getVar( 'q', 'unknown position' )"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/wormbase.xml b/tools/data_source/wormbase.xml
new file mode 100644
index 0000000..63e112c
--- /dev/null
+++ b/tools/data_source/wormbase.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<tool name="WormBase" id="wormbase" tool_type="data_source" version="1.0.1">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://www.wormbase.org/tools/genome/gbrowse/c_elegans/" check_values="false" target="_top"> 
+        <display>go to Wormbase server $GALAXY_URL</display>
+        <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=wormbase" />
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="d" missing="" />
+                <value name="dbkey" missing="" />
+                <value name="q" missing="" />
+                <value name="s" missing="" />
+                <value name="t" missing="" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" label="${tool.name} on $getVar( 'q', 'unknown position' )"/>
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/wormbase_test.xml b/tools/data_source/wormbase_test.xml
new file mode 100644
index 0000000..72ab934
--- /dev/null
+++ b/tools/data_source/wormbase_test.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<tool name="Wormbase" id="wormbase_test" tool_type="data_source" version="1.0.0">
+	<description>test server</description>
+	<command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+	<inputs action="http://dev.wormbase.org/db/seq/gbrowse/c_elegans/" check_values="false" target="_top"> 
+		<display>go to Wormbase test server $GALAXY_URL</display>
+		<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=wormbase_test" />
+	</inputs>
+    <request_param_translation>
+        <request_param galaxy_name="URL" remote_name="URL" missing="">
+            <append_param separator="&" first_separator="?" join="=">
+                <value name="d" missing="" />
+                <value name="dbkey" missing="" />
+                <value name="q" missing="" />
+                <value name="s" missing="" />
+                <value name="t" missing="" />
+            </append_param>
+        </request_param>
+        <request_param galaxy_name="URL_method" remote_name="URL_method" missing="post" />
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" />
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" label="${tool.name} on $getVar( 'q', 'unknown position' )"/>
+	</outputs>
+	<options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/yeastmine.xml b/tools/data_source/yeastmine.xml
new file mode 100644
index 0000000..7ee5a07
--- /dev/null
+++ b/tools/data_source/yeastmine.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<tool name="YeastMine" id="yeastmine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://yeastmine.yeastgenome.org/yeastmine/begin.do" check_values="false" method="get"> 
+        <display>go to yeastMine server $GALAXY_URL</display>
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="txt" /> <!-- intermine currently always provides 'txt', make this auto detect -->
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/data_source/zebrafishmine.xml b/tools/data_source/zebrafishmine.xml
new file mode 100644
index 0000000..376d8c2
--- /dev/null
+++ b/tools/data_source/zebrafishmine.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<tool name="ZebrafishMine" id="zebrafishmine" tool_type="data_source" version="1.0.0">
+    <description>server</description>
+    <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+    <inputs action="http://zebrafishmine.org/begin.do" check_values="false" method="get"> 
+        <display>go to ZebrafishMine server $GALAXY_URL</display>
+    </inputs>
+    <request_param_translation>
+        <request_param galaxy_name="data_type" remote_name="data_type" missing="auto" >
+            <value_translation>
+                <value galaxy_value="auto" remote_value="txt" /> <!-- make txt auto detect -->
+            </value_translation>
+        </request_param>
+    </request_param_translation>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="txt" />
+    </outputs>
+    <options sanitize="False"/>
+</tool>
diff --git a/tools/evolution/add_scores.py b/tools/evolution/add_scores.py
new file mode 100755
index 0000000..f9a4493
--- /dev/null
+++ b/tools/evolution/add_scores.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import sys
+
+from bx.bbi.bigwig_file import BigWigFile
+
+
+def die( message ):
+    print(message, file=sys.stderr)
+    sys.exit(1)
+
+
+def open_or_die( filename, mode='r', message=None ):
+    if message is None:
+        message = 'Error opening %s' % filename
+    try:
+        fh = open( filename, mode )
+    except IOError as err:
+        die( '%s: %s' % ( message, err.strerror ) )
+    return fh
+
+
+class LocationFile( object ):
+    def __init__( self, filename, comment_chars=None, delimiter='\t', key_column=0 ):
+        self.filename = filename
+        if comment_chars is None:
+            self.comment_chars = ( '#' )
+        else:
+            self.comment_chars = tuple( comment_chars )
+        self.delimiter = delimiter
+        self.key_column = key_column
+        self._map = {}
+        self._populate_map()
+
+    def _populate_map( self ):
+        try:
+            with open( self.filename ) as fh:
+                line_number = 0
+                for line in fh:
+                    line_number += 1
+                    line = line.rstrip( '\r\n' )
+                    if not line.startswith( self.comment_chars ):
+                        elems = line.split( self.delimiter )
+                        if len( elems ) <= self.key_column:
+                            die( 'Location file %s line %d: less than %d columns' % ( self.filename, line_number, self.key_column + 1 ) )
+                        else:
+                            key = elems.pop( self.key_column )
+                            if key in self._map:
+                                if self._map[key] != elems:
+                                    die( 'Location file %s line %d: duplicate key "%s"' % ( self.filename, line_number, key ) )
+                            else:
+                                self._map[key] = elems
+        except IOError as err:
+            die( 'Error opening location file %s: %s' % ( self.filename, err.strerror ) )
+
+    def get_values( self, key ):
+        if key in self._map:
+            rval = self._map[key]
+            if len( rval ) == 1:
+                return rval[0]
+            else:
+                return rval
+        else:
+            die( 'key "%s" not found in location file %s' % ( key, self.filename ) )
+
+
+def main():
+    input_filename, output_filename, loc_filename, loc_key, chrom_col, start_col = sys.argv[1:]
+
+    # open input, output, and bigwig files
+    location_file = LocationFile( loc_filename )
+    bigwig_filename = location_file.get_values( loc_key )
+    bwfh = open_or_die( bigwig_filename, message='Error opening BigWig file %s' % bigwig_filename )
+    bw = BigWigFile( file=bwfh )
+    ifh = open_or_die( input_filename, message='Error opening input file %s' % input_filename )
+    ofh = open_or_die( output_filename, mode='w', message='Error opening output file %s' % output_filename )
+
+    # make column numbers 0-based
+    chrom_col = int( chrom_col ) - 1
+    start_col = int( start_col ) - 1
+    min_cols = max( chrom_col, start_col )
+
+    # add score column to imput file
+    line_number = 0
+    for line in ifh:
+        line_number += 1
+        line = line.rstrip( '\r\n' )
+        elems = line.split( '\t' )
+        if len( elems ) > min_cols:
+            chrom = elems[chrom_col].strip()
+            # base-0 position in chrom
+            start = int( elems[start_col] )
+            score_list = bw.get( chrom, start, start + 1 )
+            score_list_len = len( score_list )
+            if score_list_len == 1:
+                beg, end, score = score_list[0]
+                score_val = '%1.3f' % score
+            elif score_list_len == 0:
+                score_val = 'NA'
+            else:
+                die( '%s line %d: chrom=%s, start=%d, score_list_len = %d' % ( input_filename, line_number, chrom, start, score_list_len ) )
+            print('\t'.join( [line, score_val] ), file=ofh)
+        else:
+            print(line, file=ofh)
+
+    bwfh.close()
+    ifh.close()
+    ofh.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/evolution/add_scores.xml b/tools/evolution/add_scores.xml
new file mode 100644
index 0000000..c7616b3
--- /dev/null
+++ b/tools/evolution/add_scores.xml
@@ -0,0 +1,106 @@
+<tool id="hgv_add_scores" name="phyloP" version="1.0.0">
+  <description>interspecies conservation scores</description>
+
+  <command interpreter="python">
+    add_scores.py "$input1" "$out_file1" "${GALAXY_DATA_INDEX_DIR}/add_scores.loc" "${input1.metadata.dbkey}" "${input1.metadata.chromCol}" "${input1.metadata.startCol}"
+  </command>
+
+  <inputs>
+    <param format="interval" name="input1" type="data" label="Dataset">
+      <validator type="unspecified_build"/>
+      <validator type="dataset_metadata_in_file" filename="add_scores.loc" metadata_name="dbkey" metadata_column="0" message="Data is currently not available for the specified build."/>
+    </param>
+  </inputs>
+
+  <outputs>
+    <data format="input" name="out_file1" />
+  </outputs>
+
+  <requirements>
+    <requirement type="package">add_scores</requirement>
+  </requirements>
+
+  <tests>
+    <test>
+      <param name="input1" value="add_scores_input1.interval" ftype="interval" dbkey="hg18" />
+      <output name="output" file="add_scores_output1.interval" />
+    </test>
+    <test>
+      <param name="input1" value="add_scores_input2.bed" ftype="interval" dbkey="hg18" />
+      <output name="output" file="add_scores_output2.interval" />
+    </test>
+  </tests>
+
+  <help>
+.. class:: warningmark
+
+This currently works only for builds hg18 and hg19.
+
+-----
+
+**Dataset formats**
+
+The input can be any interval_ format dataset.  The output is also in interval format.
+(`Dataset missing?`_)
+
+.. _interval: ${static_path}/formatHelp.html#interval
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+This tool adds a column that measures interspecies conservation at each SNP 
+position, using conservation scores for primates pre-computed by the 
+phyloP program.  PhyloP performs an exact P-value computation under a 
+continuous Markov substitution model. 
+
+The chromosome and start position
+are used to look up the scores, so if a larger interval is in the input,
+only the score for the first nucleotide is returned.
+
+-----
+
+**Example**
+
+- input file, with SNPs::
+
+    chr22  16440426  14440427  C/T
+    chr22  15494851  14494852  A/G
+    chr22  14494911  14494912  A/T
+    chr22  14550435  14550436  A/G
+    chr22  14611956  14611957  G/T
+    chr22  14612076  14612077  A/G
+    chr22  14668537  14668538  C
+    chr22  14668703  14668704  A/T
+    chr22  14668775  14668776  G
+    chr22  14680074  14680075  A/T
+    etc.
+
+- output file, showing conservation scores for primates::
+
+    chr22  16440426  14440427  C/T  0.509
+    chr22  15494851  14494852  A/G  0.427
+    chr22  14494911  14494912  A/T  NA
+    chr22  14550435  14550436  A/G  NA
+    chr22  14611956  14611957  G/T  -2.142
+    chr22  14612076  14612077  A/G  0.369
+    chr22  14668537  14668538  C    0.419
+    chr22  14668703  14668704  A/T  -1.462
+    chr22  14668775  14668776  G    0.470
+    chr22  14680074  14680075  A/T  0.303
+    etc.
+
+  "NA" means that the phyloP score was not available.
+
+-----
+
+**Reference**
+
+Siepel A, Pollard KS, Haussler D. (2006)
+New methods for detecting lineage-specific selection.
+In Proceedings of the 10th International Conference on Research in Computational
+Molecular Biology (RECOMB 2006), pp. 190-205.
+
+  </help>
+</tool>
diff --git a/tools/evolution/codingSnps.pl b/tools/evolution/codingSnps.pl
new file mode 100755
index 0000000..dd70205
--- /dev/null
+++ b/tools/evolution/codingSnps.pl
@@ -0,0 +1,571 @@
+#!/usr/bin/perl -w 
+use strict;
+
+#########################################################################
+#	codingSnps.pl
+#	This takes a bed file with the names being / separated nts
+#	and a gene bed file with cds start and stop.
+#	It then checks for changes in coding regions, reporting
+#	those that cause a frameshift or substitution in the amino acid.
+#	Output columns:
+#		chrom, start, end, allele as given (amb code translated)
+#		Gene ID from genes file, ref amino acid:variant amino acids,
+#		codon number, (in strand of gene)ref nt, refCodon:variantCodons
+#########################################################################
+
+my $seqFlag = "2bit"; #flag to set sequence type 2bit|nib
+if (!@ARGV or scalar @ARGV < 3) {
+   print "Usage: codingSnps.pl snps.bed genes.bed (/dir/*$seqFlag|Galaxy build= loc=) [chr=# start=# end=# snp=# strand=#|-|+ keepColumns=1 synon=1 unique=1] > codingSnps.txt\n";
+   exit;
+}
+my $uniq = 0; #flag for whether want uniq positions
+my $syn = 0;  #flag for if want synonomous changes rather than non-syn
+my $keep = 0; #keep old columns and append new ones
+my $snpFile = shift @ARGV;
+my $geneFile = shift @ARGV;
+my $nibDir = shift @ARGV;  #2bit or nib, depending on flag above
+if ($nibDir eq 'Galaxy') { getGalaxyInfo(); }
+my $col0 = 0; #bed like columns in default positions
+my $col1 = 1;
+my $col2 = 2;
+my $col3 = 3;
+my $strand = -1;
+#column positions 1 based coming in (for Galaxy)
+foreach (@ARGV) {
+   if (/chr=(\d+)/) { $col0 = $1 -1; }
+   elsif (/start=(\d+)/) { $col1 = $1 -1; }
+   elsif (/end=(\d+)/) { $col2 = $1 -1; }
+   elsif (/snp=(\d+)/) { $col3 = $1 -1; }
+   elsif (/keepColumns=1/) { $keep = 1; }
+   elsif (/synon=1/) { $syn = 1; }
+   elsif (/unique=1/) { $uniq = 1; }
+   elsif (/strand=(\d+)/) { $strand = $1 -1; } #0 based column
+   elsif (/strand=-/) { $strand = -99; }  #special case of all minus
+}
+if ($col0 < 0 || $col1 < 0 || $col2 < 0 || $col3 < 0) {
+   print STDERR "ERROR column numbers are given with origin 1\n";
+   exit 1;
+}
+my @genes; #bed lines for genes, sorted by chrom and start
+my %chrSt; #index in array where each chrom starts
+my %codon; #hash of codon amino acid conversions
+my $ends = 0; #ends vs sizes in bed 11 position, starts relative to chrom
+my $ignoreN = 1; #skip N
+my $origAll; #alleles from input file (before changes for strand)
+
+my %amb = (
+"R" => "A/G",
+"Y" => "C/T",
+"S" => "C/G",
+"W" => "A/T",
+"K" => "G/T",
+"M" => "A/C",
+"B" => "C/G/T",
+"D" => "A/G/T",
+"H" => "A/C/T",
+"V" => "A/C/G",
+"N" => "A/C/G/T"
+);
+fill_codon();
+open(FH, "cat $geneFile | sort -k1,1 -k2,2n |") 
+   or die "Couldn't open and sort $geneFile, $!\n";
+my $i = 0;
+while(<FH>) {
+   chomp;
+   if (/refGene.cdsEnd|ccdsGene.exonEnds/) { $ends = 1; next; }
+   push(@genes, "$_");
+   my @f = split(/\t/);
+   if (!exists $chrSt{$f[0]}) { $chrSt{$f[0]} = $i; }
+   $i++;
+}
+close FH or die "Couldn't close $geneFile, $!\n";
+
+if ($ends) { print STDERR "WARNING using block ends rather than sizes\n"; }
+
+#open snps sorted as well
+my $s1 = $col0 + 1; #sort order is origin 1
+my $s2 = $col1 + 1; 
+open(FH, "cat $snpFile | sort -k$s1,$s1 -k$s2,${s2}n |")
+   or die "Couldn't open and sort $snpFile, $!\n";
+$i = 0;
+my @g; #one genes fields, should be used repeatedly
+my %done;
+while(<FH>) {
+   chomp;
+   if (/^\s*#/) { next; } #comment
+   my @s = split(/\t/); #SNP fields
+   if (!@s or !$s[$col0]) { die "ERROR missing SNP data, $_\n"; }
+   my $size = $#s;
+   if ($col0 > $size || $col1 > $size || $col2 > $size || $col3 > $size) {
+      print STDERR "ERROR file has fewer columns than requested, requested columns (0 based) $col0 $col1 $col2 $col3, file has $size\n";
+      exit 1;
+   }
+   if ($strand >= 0 && $strand > $size) { 
+      print STDERR "ERROR file has fewer columns than requested, requested strand in $strand (0 based), file has $size\n";
+      exit 1;
+   }
+   if ($s[$col1] =~ /\D/) { 
+      print STDERR "ERROR the start point must be an integer not $s[$col1]\n";
+      exit 1;
+   }
+   if ($s[$col2] =~ /\D/) {
+      print STDERR "ERROR the start point must be an integer not $s[$col2]\n";
+      exit 1;
+   }
+   if ($s[$col3] eq 'N' && $ignoreN) { next; }
+   if (exists $amb{$s[$col3]}) { $s[$col3] = $amb{$s[$col3]}; }
+   if (($strand >= 0 && $s[$strand] eq '-') or $strand == -99) { 
+      #reverse complement nts
+      $origAll = $s[$col3];
+      $s[$col3] = reverseCompAlleles($s[$col3]);
+   }else { undef $origAll }
+   if (!@g && exists $chrSt{$s[$col0]}) { #need to fetch first gene row
+      $i = $chrSt{$s[$col0]};
+      @g = split(/\t/, $genes[$i]);
+      if (scalar @g < 12) {  
+         print STDERR "ERROR the gene file must be the whole genes in BED format\n";
+         exit 1;
+      }
+   }elsif (!@g) { 
+      next; #no gene for this chrom
+   }elsif ($s[$col0] ne $g[0] && exists $chrSt{$s[$col0]}) { #new chrom 
+      $i = $chrSt{$s[$col0]};
+      @g = split(/\t/, $genes[$i]);
+   }elsif ($s[$col0] ne $g[0]) {
+      next; #no gene for this chrom
+   }elsif ($s[$col1] < $g[1] && $i == $chrSt{$s[$col0]}) {
+      next; #before any genes
+   }elsif ($s[$col1] > $g[2] && ($i == $#genes or $genes[$i+1] !~ $s[$col0])) {
+      next; #after all genes on chr
+   }else {
+      while ($s[$col1] > $g[2] && $i < $#genes) {
+         $i++;
+         @g = split(/\t/, $genes[$i]);
+         if ($s[$col0] ne $g[0]) { last; } #end of gene
+      }
+      if ($s[$col0] ne $g[0] or $s[$col1] < $g[1] or $s[$col1] > $g[2]) {
+         next; #no overlap with genes
+      }
+   }
+
+   processSnp(\@s, \@g);
+   if ($uniq && exists $done{"$s[$col0] $s[$col1] $s[$col2]"}) { next; }
+
+   my $k = $i + 1; #check for more genes without losing data of first
+   if ($k <= $#genes) {
+      my @g2 = split(/\t/, $genes[$k]);
+      while (@g2 && $k <= $#genes) {
+         @g2 = split(/\t/, $genes[$k]);
+         if ($s[$col0] ne $g2[0]) {
+            undef @g2;
+            last; #not same chrom
+         }else {
+            while ($s[$col1] > $g2[2] && $k < $#genes) {
+               $k++;
+               @g2 = split(/\t/, $genes[$k]);
+               if ($s[$col0] ne $g2[0]) { last; } #end of chrom
+            }
+            if ($s[$col0] ne $g2[0] or $s[$col1] < $g2[1] or $s[$col1] > $g2[2]) {
+               undef @g2;
+               last; #no overlap with more genes
+            }
+            processSnp(\@s, \@g2);
+            if ($uniq && exists $done{"$s[$col0] $s[$col1] $s[$col2]"}) { last; }
+         }      
+         $k++;
+      }
+   }
+}
+close FH or die "Couldn't close $snpFile, $!\n";
+
+exit;
+
+########################################################################
+sub processSnp {
+   my $sref = shift;
+   my $gref = shift;
+   #overlaps gene, but maybe not coding seq
+   #inside cds
+   if ($sref->[$col1] + 1 < $gref->[6] or $sref->[$col2] > $gref->[7]) {
+      return; #outside of coding 
+   }
+   #now check exon
+   my $i = 0;
+   my @st = split(/,/, $gref->[11]);
+   my @size = split(/,/, $gref->[10]);
+   if (scalar @st ne $gref->[9]) { return; } #cant do this gene #die "bad gene $gref->[3]\n"; }
+   my @pos;
+   my $in = 0;
+   for($i = 0; $i < $gref->[9]; $i++) {
+      my $sta = $gref->[1] + $st[$i] + 1; #1 based position
+      my $end = $sta + $size[$i] - 1; #
+      if ($ends) { $end = $size[$i]; $sta = $st[$i] + 1; } #ends instead of sizes
+      if ($end < $gref->[6]) { next; } #utr only
+      if ($sta > $gref->[7]) { next; } #utr only
+      #shorten to coding only
+      if ($sta < $gref->[6]) { $sta = $gref->[6] + 1; }
+      if ($end > $gref->[7]) { $end = $gref->[7]; }
+      if ($sref->[$col1] + 1 >= $sta && $sref->[$col2] <= $end) { $in = 1; }
+      elsif ($sref->[$col1] == $sref->[$col2] && $sref->[$col2] <= $end && $sref->[$col2] >= $sta) { $in = 1; }
+      push(@pos, ($sta .. $end)); #add exon worth of positions
+   }
+   #@pos has coding positions for whole gene (chr coors), 
+   #and $in has whether we need to continue
+   if (!$in) { return; } #not in coding exon
+   if ((scalar @pos) % 3 != 0) { return; } #partial gene? not even codons
+   if ($sref->[$col3] =~ /^-+\/[ACTG]+$/ or $sref->[$col3] =~ /^[ACTG]+\/-+$/ or
+       $sref->[$col3] =~ /^-+$/) { #indel or del
+      my $copy = $sref->[$col3];
+      my $c = ($copy =~ tr/-//);
+      if ($c % 3 == 0) { return; } #not frameshift 
+      #handle bed4 or any interval file
+      if (!$keep) {
+         print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+         print "\t$gref->[3]\tframeshift\n";
+      }else {
+         my @s = @{$sref};
+         print join("\t", @s), "\t$gref->[3]\tframeshift\n";
+      }
+      $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+      return;
+   }elsif ($sref->[$col1] == $sref->[$col2]) { #insertion
+      my $copy = $sref->[$col3];
+      my $c = ($copy =~ tr/\[ACTG]+//);
+      if ($c % 3 == 0) { return; } #not frameshift
+      #handle bed4 or any interval file
+      if (!$keep) {
+         print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+         print "\t$gref->[3]\tframeshift\n";
+      }else {
+         my @s = @{$sref};
+         print join("\t", @s), "\t$gref->[3]\tframeshift\n";
+      }
+      $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+      return;
+   }elsif ($sref->[$col3] =~ /-/) { #indel and sub?
+      return; #skip
+   }
+   #check for amino acid substitutions
+   my $s = $sref->[$col1] + 1;
+   my $e = $sref->[$col2];
+   my $len = $sref->[$col2] - $sref->[$col1];
+   if ($gref->[5] eq '-') { 
+      @pos = reverse(@pos); 
+      my $t = $s;
+      $s = $e;
+      $e = $t;
+   }
+   $i = 0;
+   my $found = 0;
+   foreach (@pos) {
+      if ($s == $_) {
+         $found = 1;
+         last;
+      }
+      $i++;
+   }
+   if ($found) {
+      my $fs = $i; #keep original start index
+      #have index where substitution starts
+      my $cp = $i % 3; 
+      $i -= $cp; #i is now first position in codon
+      my $cdNum = int($i / 3) + 1;
+      my $ls = $i;
+      if (!defined $ls) { die "ERROR not defined ls for $fs $sref->[$col2]\n"; }
+      if (!@pos) { die "ERROR not defined array pos\n"; }
+      if (!defined $pos[$ls]) { die "ERROR not defined pos at $ls\n"; }
+      if (!defined $e) { die "ERROR not defined e for $pos[0] $pos[1] $pos[2]\n"; }
+      while ($ls <= $#pos && $pos[$ls] ne $e) { 
+         $ls++; 
+      }
+      my $i2 = $ls + (2 - ($ls % 3));
+      if ($i2 > $#pos) { return; } #not a full codon, partial gene?
+
+      if ($i2 - $i < 2) { die "not a full codon positions $i to $i2 for $sref->[3]\n"; }
+      my $oldnts = getnts($sref->[$col0], @pos[$i..$i2]);
+      if (!$oldnts) { die "Failed to get sequence for $sref->[$col0] $pos[$i] .. $pos[$i2]\n"; }
+      my @vars = split(/\//, $sref->[$col3]);
+      if ($gref->[5] eq '-') { #complement oldnts and revcomp vars
+         $oldnts = compl($oldnts);
+         if (!$oldnts) { return; } #skip this one
+         $oldnts = join('', (reverse(split(/ */, $oldnts))));
+         foreach (@vars) {
+            $_ = reverse(split(/ */)); #needed for indels
+            $_ = compl($_);
+         }
+      }
+      my $r = $fs - $i; #difference in old indexes gives new index
+      my @newnts;
+      my $changed = '';
+      foreach my $v (@vars) {
+         if (!$v or length($v) != 1) { return; } #only simple changes
+         my @new = split(/ */, $oldnts);
+         $changed = splice(@new, $r, $len, split(/ */, $v));
+         #should only change single nt
+         push(@newnts, join("", @new));
+      }
+      #now compute amino acids
+      my $oldaa = getaa($oldnts);
+      my $codon = "$oldnts:";
+      my @newaa;
+      my $change = 0; #flag for if there is a change
+      foreach my $v (@newnts) {
+         my $t = getaa($v);
+         if ($t ne $oldaa) { $change = 1; }
+         push(@newaa, "$t");
+         $codon .= "$v/";
+      }
+      $codon =~ s/\/$//; 
+      if (!$change && $syn) { 
+          if (!$keep) {
+             print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$sref->[$col3]";
+             print "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\t$codon\n";
+          }else {
+             my @s = @{$sref};
+             print join("\t", @s), 
+                   "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\t$codon\n";
+          }
+          $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+          return;
+      }elsif ($syn) { return; } #only want synonymous changes
+      if (!$change) { return; } #no change in amino acids
+      if (!$keep) {
+         my $a = $sref->[$col3];
+         if (($strand >= 0 && $origAll) or $strand == -99) { $a = $origAll; }
+         print "$sref->[$col0]\t$sref->[$col1]\t$sref->[$col2]\t$a";
+         #my $minus = $changed; #in case minus strand and change back
+         #if ($gref->[5] eq '-') { $changed = compl($changed); } #use plus for ref
+         if (!$changed) { return; } #skip this one
+         print "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\t$codon\n";
+      }else {
+         my @s = @{$sref};
+         if (($strand >= 0 && $origAll) or $strand == -99) { $s[$col3] = $origAll; }
+         print join("\t", @s);
+         #my $minus = $changed; #in case minus strand and change back
+         #if ($gref->[5] eq '-') { $changed = compl($changed); } #use plus for ref
+         if (!$changed) { return; } #skip this one
+         print "\t$gref->[3]\t$oldaa:", join("/", @newaa), "\t$cdNum\t$changed\t$codon\n";
+      }
+      $done{"$sref->[$col0] $sref->[$col1] $sref->[$col2]"}++;
+   }
+}
+
+sub getnts {
+   my $chr = shift;
+   my @pos = @_; #list of positions not necessarily in order
+   #list may be reversed or have gaps(introns), at least 3 bps
+   my $seq = '';
+   if (scalar @pos < 3) { die "too small region for $chr $pos[0]\n"; }
+   if ($pos[0] < $pos[1]) { #not reversed
+      my $s = $pos[0];
+      for(my $i = 1; $i <= $#pos; $i++) {
+         if ($pos[$i] == $pos[$i-1] + 1) { next; }
+         if ($seqFlag eq '2bit') { 
+            $seq .= fetchSeq2bit($chr, $s, $pos[$i-1]);
+         }else {
+            $seq .= fetchSeqNib($chr, $s, $pos[$i-1]);
+         }
+         $s = $pos[$i];
+      }
+      if (length $seq != scalar @pos) { #still need to fetch seq
+         if ($seqFlag eq '2bit') {
+            $seq .= fetchSeq2bit($chr, $s, $pos[$#pos]);
+         }else {
+            $seq .= fetchSeqNib($chr, $s, $pos[$#pos]);
+         }
+      }
+   }else { #reversed
+      my $s = $pos[$#pos];
+      for(my $i = $#pos -1; $i >= 0; $i--) {
+         if ($pos[$i] == $pos[$i+1] + 1) { next; }
+         if ($seqFlag eq '2bit') {
+            $seq .= fetchSeq2bit($chr, $s, $pos[$i+1]);
+         }else {
+            $seq .= fetchSeqNib($chr, $s, $pos[$i+1]);
+         }
+         $s = $pos[$i];
+      }
+      if (length $seq != scalar @pos) { #still need to fetch seq
+         if ($seqFlag eq '2bit') {
+            $seq .= fetchSeq2bit($chr, $s, $pos[0]);
+         }else {
+            $seq .= fetchSeqNib($chr, $s, $pos[0]);
+         }
+      }
+   }
+}
+
+sub fetchSeq2bit {
+   my $chr = shift;
+   my $st = shift;
+   my $end = shift;
+   my $strand = '+';
+   $st--; #change to UCSC numbering
+   open (BIT, "twoBitToFa -seq=$chr -start=$st -end=$end $nibDir stdout |") or
+      die "Couldn't run twoBitToFa, $!\n";
+   my $seq = '';
+   while (<BIT>) {
+      chomp;
+      if (/^>/) { next; } #header
+      $seq .= uc($_);
+   }
+   close BIT or die "Couldn't finish twoBitToFa on $chr $st $end, $!\n";
+   return $seq;
+}
+
+sub fetchSeqNib {
+   my $chr = shift;
+   my $st = shift;
+   my $end = shift;
+   my $strand = '+';
+   $st--; #change to UCSC numbering
+   open (NIB, "nibFrag -upper $nibDir/${chr}.nib $st $end $strand stdout |") or die "Couldn't run nibFrag, $!\n";
+   my $seq = '';
+   while (<NIB>) {
+      chomp;
+      if (/^>/) { next; } #header
+      $seq .= $_;
+   }
+   close NIB or die "Couldn't finish nibFrag on $chr $st $end, $!\n";
+   return $seq;
+}
+
+sub compl {
+   my $nts = shift;
+   my $comp = '';
+   if (!$nts) { die "ERROR called compl with nts undefined"; }
+   foreach my $n (split(/ */, $nts)) {
+      if ($n eq 'A') { $comp .= 'T'; }
+      elsif ($n eq 'T') { $comp .= 'A'; }
+      elsif ($n eq 'C') { $comp .= 'G'; }
+      elsif ($n eq 'G') { $comp .= 'C'; }
+      elsif ($n eq 'N') { $comp .= 'N'; }
+      elsif ($n eq '-') { $comp .= '-'; } #deletion
+      else { $comp = undef; }
+   }
+   return $comp;
+}
+
+sub reverseCompAlleles {
+   my $all = shift;
+   my @nt = split(/\//, $all);
+   my $rv = '';
+   foreach my $n (@nt) {
+      $n = reverse(split(/ */, $n)); #needed for indels
+      $n = compl($n);
+      $rv .= "$n/";
+   }
+   $rv =~ s/\/$//;
+   return $rv;
+}
+
+sub getaa {
+   my $nts = shift;  #in multiples of 3
+   my $aa = '';
+   my @n = split(/ */, $nts);
+   while (@n) {
+      my @t = splice(@n, 0, 3);
+      my $n = uc(join("", @t));
+      if (!exists $codon{$n}) { $aa .= 'N'; next; }
+      $aa .= $codon{$n};
+   }
+   return $aa;
+}
+
+sub fill_codon {
+$codon{GCA} = 'Ala';
+$codon{GCC} = 'Ala';
+$codon{GCG} = 'Ala';
+$codon{GCT} = 'Ala';
+$codon{CGG} = 'Arg';
+$codon{CGT} = 'Arg';
+$codon{CGC} = 'Arg';
+$codon{AGA} = 'Arg';
+$codon{AGG} = 'Arg';
+$codon{CGA} = 'Arg';
+$codon{AAC} = 'Asn';
+$codon{AAT} = 'Asn';
+$codon{GAC} = 'Asp';
+$codon{GAT} = 'Asp';
+$codon{TGC} = 'Cys';
+$codon{TGT} = 'Cys';
+$codon{CAG} = 'Gln';
+$codon{CAA} = 'Gln';
+$codon{GAA} = 'Glu';
+$codon{GAG} = 'Glu';
+$codon{GGG} = 'Gly';
+$codon{GGA} = 'Gly';
+$codon{GGC} = 'Gly';
+$codon{GGT} = 'Gly';
+$codon{CAC} = 'His';
+$codon{CAT} = 'His';
+$codon{ATA} = 'Ile';
+$codon{ATT} = 'Ile';
+$codon{ATC} = 'Ile';
+$codon{CTA} = 'Leu';
+$codon{CTC} = 'Leu';
+$codon{CTG} = 'Leu';
+$codon{CTT} = 'Leu';
+$codon{TTG} = 'Leu';
+$codon{TTA} = 'Leu';
+$codon{AAA} = 'Lys';
+$codon{AAG} = 'Lys';
+$codon{ATG} = 'Met';
+$codon{TTC} = 'Phe';
+$codon{TTT} = 'Phe';
+$codon{CCT} = 'Pro';
+$codon{CCA} = 'Pro';
+$codon{CCC} = 'Pro';
+$codon{CCG} = 'Pro';
+$codon{TCA} = 'Ser';
+$codon{AGC} = 'Ser';
+$codon{AGT} = 'Ser';
+$codon{TCC} = 'Ser';
+$codon{TCT} = 'Ser';
+$codon{TCG} = 'Ser';
+$codon{TGA} = 'Stop';
+$codon{TAG} = 'Stop';
+$codon{TAA} = 'Stop';
+$codon{ACT} = 'Thr';
+$codon{ACA} = 'Thr';
+$codon{ACC} = 'Thr';
+$codon{ACG} = 'Thr';
+$codon{TGG} = 'Trp';
+$codon{TAT} = 'Tyr';
+$codon{TAC} = 'Tyr';
+$codon{GTC} = 'Val';
+$codon{GTA} = 'Val';
+$codon{GTG} = 'Val';
+$codon{GTT} = 'Val';
+}
+
+sub getGalaxyInfo {
+   my $build;
+   my $locFile;
+   foreach (@ARGV) {
+      if (/build=(.*)/) { $build = $1; }
+      elsif (/loc=(.*)/) { $locFile = $1; }
+   }
+   if (!$build or !$locFile) {
+      print STDERR "ERROR missing build or locfile for Galaxy input\n";
+      exit 1;
+   }
+   # read $locFile to get $nibDir (ignoring commets)
+   open(LF, "< $locFile") || die "open($locFile): $!\n";
+   while(<LF>) {
+      s/#.*$//;
+      s/(?:^\s+|\s+$)//g;
+      next if (/^$/);
+   
+      my @t = split(/\t/);
+      if ($t[0] eq $build) { $nibDir = $t[1]; }
+   }
+   close(LF);
+   if ($nibDir eq 'Galaxy') {
+      print STDERR "Failed to find sequence directory in locfile $locFile\n";
+   }
+   # lparsons: allow specification of full filename in loc file for greater felxibility
+   unless ($nibDir =~ /(.*)\.2bit$/) { $nibDir .= "/$build.2bit"; }
+   #$nibDir .= "/$build.2bit";  #we want full path and filename
+}
+
diff --git a/tools/evolution/codingSnps.xml b/tools/evolution/codingSnps.xml
new file mode 100644
index 0000000..345a993
--- /dev/null
+++ b/tools/evolution/codingSnps.xml
@@ -0,0 +1,177 @@
+<tool id="hgv_codingSnps" name="aaChanges" version="1.0.0">
+  <description>amino-acid changes caused by a set of SNPs</description>
+
+  <command interpreter="perl">
+    codingSnps.pl $input1 $input2 Galaxy build=${input1.metadata.dbkey} loc=${GALAXY_DATA_INDEX_DIR}/codingSnps.loc chr=${input1.metadata.chromCol} start=${input1.metadata.startCol} end=${input1.metadata.endCol} snp=$col1 keepColumns=$keep strand=${strand_source.strand_col} unique=$uniqpos > $out_file1
+  </command>
+
+  <inputs>
+    <param format="interval" name="input1" type="data" label="SNP dataset">
+      <validator type="dataset_metadata_in_file" filename="codingSnps.loc" metadata_name="dbkey" metadata_column="0" message="Sequences are not currently available for the specified build." split="\t" />
+    </param>
+    <param name="col1" type="data_column" data_ref="input1" label="Column with SNPs" />
+    <param format="interval" name="input2" type="data" label="Gene dataset">
+      <validator type="dataset_metadata_in_file" filename="codingSnps.loc" metadata_name="dbkey" metadata_column="0" message="Sequences are not currently available for the specified build." split="\t" />
+    </param>
+    <param name="keep" type="select" label="Keep columns from SNP dataset">
+      <option value="0" selected="true">No</option>
+      <option value="1">Yes</option>
+    </param>
+    <param name="uniqpos" type="select" label="Only report each SNP position once">
+      <option value="1" selected="true">Yes</option>
+      <option value="0">No</option>
+    </param>
+    <conditional name="strand_source">
+      <param name="strand_choice" type="select" label="Strand info">
+        <option value="data_column">a column in the dataset</option>
+        <option value="all_pos" selected="true">all on sense/forward/+ strand</option>
+        <option value="all_neg">all on antisense/reverse/- strand</option>
+      </param>
+      <when value="data_column">
+        <param name="strand_col" type="data_column" data_ref="input1" label="Column with strand"/>
+      </when>
+      <when value="all_pos">
+        <param name="strand_col" type="hidden" value="+"/>
+      </when>
+      <when value="all_neg">
+        <param name="strand_col" type="hidden" value="-"/>
+      </when>
+    </conditional>
+  </inputs>
+
+  <outputs>
+    <data format="interval" name="out_file1" />
+  </outputs>
+
+  <code file="codingSnps_filter.py"></code>
+
+  <requirements>
+    <requirement type="package">gnu_coreutils</requirement>
+    <requirement type="package">ucsc_tools</requirement>
+  </requirements>
+
+  <tests>
+    <test>
+      <param name="input1" ftype="interval" value="codingSnps_input1.interval" dbkey="hg18" />
+      <param name="col1" value="6" />
+      <param name="input2" ftype="interval" value="codingSnps_inputGenes1.bed" dbkey="hg18" />
+      <param name="strand_choice" value="all_pos" />
+      <param name="strand_col" value="+" />
+      <param name="uniqpos" value="0" />
+      <output name="output" file="codingSnps_output1.interval" />
+    </test>
+    <test>
+      <param name="input1" ftype="interval" value="codingSnps_input2.interval" dbkey="hg18" />
+      <param name="input2" ftype="interval" value="codingSnps_inputGenes2.bed" dbkey="hg18" />
+      <param name="col1" value="4" />
+      <param name="strand_choice" value="all_pos" />
+      <param name="strand_col" value="+" />
+      <param name="uniqpos" value="0" />
+      <output name="output" file="codingSnps_output2.interval" />
+    </test>
+    <test>
+      <param name="input1" ftype="interval" value="codingSnps_input2.interval" dbkey="hg18" />
+      <param name="input2" ftype="interval" value="codingSnps_inputGenes2.bed" dbkey="hg18" />
+      <param name="col1" value="4" />
+      <param name="strand_choice" value="all_neg" />
+      <param name="strand_col" value="-" />
+      <output name="output" file="codingSnps_output3.interval" />
+    </test>
+  </tests>
+
+  <help>
+.. class:: infomark
+
+The build must be defined for the input files and must be the same for both files.
+Use the pencil icon to add the build to the files if necessary.
+
+-----
+
+**Dataset formats**
+
+The SNP dataset is in interval_ format, with a column of SNPs as described below.
+The gene dataset is in BED_ format with 12 columns.  The output dataset is also interval.
+(`Dataset missing?`_)
+
+.. _interval: ${static_path}/formatHelp.html#interval
+.. _BED: ${static_path}/formatHelp.html#bed
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+This tool identifies which SNPs create amino-acid changes in the specified 
+coding regions.  The first input file contains the SNPs and must be an interval file.
+It needs the chromosome, start, and end position as well as the SNP.  The 
+SNP can be given using ambiguous-nucleotide symbols or a list of two to four
+alleles 
+separated by '/'.  Any other columns in the first input file will not be
+used but will be kept for the output.  The second input file contains the genes
+to be used for defining the coding regions.  This file must be a BED file with
+the first 12 columns standard BED columns.  The output is the same as the
+first input file with
+several columns added: the name field from the line of the gene input file
+used, the amino acids, the codon number, the reference nucleotide that 
+changed in the amino acid (in the same strand as the gene), and the codons 
+that go with the amino acids.
+The amino acids are listed with the reference amino acid first, then a colon,
+and then the amino acids for the alleles.  If a SNP is not in a coding region
+or is synonymous then it is not included in the output file.
+
+-----
+
+**Example**
+
+- first input file, with SNPs::
+
+    chr22  15660821  15660822  A/G
+    chr22  15825725  15825726  G/T
+    chr22  15827035  15827036  G
+    chr22  15827135  15827136  C/G
+    chr22  15830928  15830929  A/G
+    chr22  15830951  15830952  G
+    chr22  15830955  15830956  C/T
+    chr22  15848885  15848886  C/T
+    chr22  15849048  15849049  A/C
+    chr22  15919711  15919712  A/G
+    etc.
+
+  or, indicating polymorphisms using ambiguous-nucleotide symbols::
+
+    chr22  15660821  15660822  R
+    chr22  15825725  15825726  K
+    chr22  15827035  15827036  G
+    chr22  15827135  15827136  S
+    chr22  15830928  15830929  R
+    chr22  15830951  15830952  G
+    chr22  15830955  15830956  Y
+    chr22  15848885  15848886  Y
+    chr22  15849048  15849049  M
+    chr22  15919711  15919712  R
+    etc.
+
+- second input file, with UCSC annotations for human genes::
+
+    chr22  15688363  15690225  uc010gqr.1  0  +  15688363  15688363  0  2   587,794,  0,1068,
+    chr22  15822826  15869112  uc002zlw.1  0  -  15823622  15869004  0  10  940,105,97,91,265,86,251,208,304,282,  0,1788,2829,3241,4163,6361,8006,26023,29936,46004,
+    chr22  15826991  15869112  uc010gqs.1  0  -  15829218  15869004  0  5   1380,86,157,304,282,  0,2196,21858,25771,41839,
+    chr22  15897459  15919682  uc002zlx.1  0  +  15897459  15897459  0  4   775,128,103,1720,  0,8303,10754,20503,
+    chr22  15945848  15971389  uc002zly.1  0  +  15945981  15970710  0  13  271,25,147,113,127,48,164,84,85,12,102,42,2193,  0,12103,12838,13816,15396,17037,17180,18535,19767,20632,20894,22768,23348,
+    etc.
+
+- output file, showing non-synonymous substitutions in coding regions::
+
+    chr22  15825725  15825726  G/T  uc002zlw.1  Gln:Pro/Gln   469  A  CAA:CCA/CAA
+    chr22  15827035  15827036  G    uc002zlw.1  Glu:Asp       414  G  GAG:GAC
+    chr22  15827135  15827136  C/G  uc002zlw.1  Gly:Gly/Ala   381  G  GGT:GGT/GCT
+    chr22  15830928  15830929  A/G  uc002zlw.1  Ala:Ser/Pro   281  G  GCA:TCA/CCA
+    chr22  15830951  15830952  G    uc002zlw.1  Leu:Pro       273  T  CTT:CCT
+    chr22  15830955  15830956  C/T  uc002zlw.1  Ser:Gly/Ser   272  A  AGC:GGC/AGC
+    chr22  15848885  15848886  C/T  uc002zlw.1  Ser:Trp/Stop  217  C  TCG:TGG/TAG
+    chr22  15848885  15848886  C/T  uc010gqs.1  Ser:Trp/Stop  200  C  TCG:TGG/TAG
+    chr22  15849048  15849049  A/C  uc002zlw.1  Gly:Stop/Gly  163  G  GGA:TGA/GGA
+    etc.
+
+  </help>
+</tool>
diff --git a/tools/evolution/codingSnps_filter.py b/tools/evolution/codingSnps_filter.py
new file mode 100755
index 0000000..8a1dc38
--- /dev/null
+++ b/tools/evolution/codingSnps_filter.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# runs after the job (and after the default post-filter)
+from galaxy.tools.parameters import DataToolParameter
+# Older py compatibility
+try:
+    set()
+except:
+    from sets import Set as set
+
+
+def validate_input( trans, error_map, param_values, page_param_map ):
+    dbkeys = set()
+    data_param_names = set()
+    data_params = 0
+    for name, param in page_param_map.items():
+        if isinstance( param, DataToolParameter ):
+            # for each dataset parameter
+            if param_values.get(name, None) is not None:
+                dbkeys.add( param_values[name].dbkey )
+                data_params += 1
+                # check meta data
+                try:
+                    param = param_values[name]
+                    int( param.metadata.startCol )
+                    int( param.metadata.endCol )
+                    int( param.metadata.chromCol )
+                    if param.metadata.strandCol is not None:
+                        int( param.metadata.strandCol )
+                except:
+                    error_msg = ("The attributes of this dataset are not properly set. "
+                        "Click the pencil icon in the history item to set the chrom, start, end and strand columns.")
+                    error_map[name] = error_msg
+            data_param_names.add( name )
+    if len( dbkeys ) > 1:
+        for name in data_param_names:
+            error_map[name] = "All datasets must belong to same genomic build, " \
+                "this dataset is linked to build '%s'" % param_values[name].dbkey
+    if data_params != len(data_param_names):
+        for name in data_param_names:
+            error_map[name] = "A dataset of the appropriate type is required"
diff --git a/tools/extract/extract_genomic_dna.py b/tools/extract/extract_genomic_dna.py
new file mode 100755
index 0000000..38707ee
--- /dev/null
+++ b/tools/extract/extract_genomic_dna.py
@@ -0,0 +1,316 @@
+#!/usr/bin/env python
+"""
+usage: %prog $input $out_file1
+    -1, --cols=N,N,N,N,N: Columns for start, end, strand in input file
+    -d, --dbkey=N: Genome build of input file
+    -o, --output_format=N: the data type of the output file
+    -g, --GALAXY_DATA_INDEX_DIR=N: the directory containing alignseq.loc or twobit.loc
+    -I, --interpret_features: if true, complete features are interpreted when input is GFF
+    -F, --fasta=<genomic_sequences>: genomic sequences to use for extraction
+    -G, --gff: input and output file, when it is interval, coordinates are treated as GFF format (1-based, half-open) rather than 'traditional' 0-based, closed format.
+"""
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+import tempfile
+
+import bx.seq.nib
+import bx.seq.twobit
+from bx.cookbook import doc_optparse
+from bx.intervals.io import Comment, Header
+
+from galaxy.datatypes.util import gff_util
+from galaxy.tools.util.galaxyops import parse_cols_arg
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def reverse_complement( s ):
+    complement_dna = {"A": "T", "T": "A", "C": "G", "G": "C", "a": "t", "t": "a", "c": "g", "g": "c", "N": "N", "n": "n"}
+    reversed_s = []
+    for i in s:
+        reversed_s.append( complement_dna[i] )
+    reversed_s.reverse()
+    return "".join( reversed_s )
+
+
+def check_seq_file( dbkey, GALAXY_DATA_INDEX_DIR ):
+    # Checks for the presence of *.nib files matching the dbkey within alignseq.loc
+    seq_file = "%s/alignseq.loc" % GALAXY_DATA_INDEX_DIR
+    for line in open(seq_file):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( "#" ) and line.startswith( 'seq' ):
+            fields = line.split( '\t' )
+            if len( fields) >= 3 and fields[1] == dbkey:
+                print("Using *.nib genomic reference files")
+                return fields[2].strip()
+
+    # If no entry in aligseq.loc was found, check for the presence of a *.2bit file in twobit.loc
+    seq_file = "%s/twobit.loc" % GALAXY_DATA_INDEX_DIR
+    for line in open( seq_file ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( "#" ) and line.endswith( '.2bit' ):
+            fields = line.split( '\t' )
+            if len(fields) >= 2 and fields[0] == dbkey:
+                print("Using a *.2bit genomic reference file")
+                return fields[1].strip()
+
+    return ''
+
+
+def __main__():
+    #
+    # Parse options, args.
+    #
+    options, args = doc_optparse.parse( __doc__ )
+    try:
+        if len(options.cols.split(',')) == 5:
+            # BED file
+            chrom_col, start_col, end_col, strand_col, name_col = parse_cols_arg( options.cols )
+        else:
+            # gff file
+            chrom_col, start_col, end_col, strand_col = parse_cols_arg( options.cols )
+            name_col = False
+        dbkey = options.dbkey
+        output_format = options.output_format
+        gff_format = options.gff
+        interpret_features = options.interpret_features
+        GALAXY_DATA_INDEX_DIR = options.GALAXY_DATA_INDEX_DIR
+        fasta_file = options.fasta
+        input_filename, output_filename = args
+    except:
+        doc_optparse.exception()
+
+    includes_strand_col = strand_col >= 0
+    strand = None
+    nibs = {}
+
+    #
+    # Set path to sequence data.
+    #
+    if fasta_file:
+        # Need to create 2bit file from fasta file.
+        try:
+            seq_path = tempfile.NamedTemporaryFile( dir="." ).name
+            cmd = "faToTwoBit %s %s" % ( fasta_file, seq_path )
+
+            tmp_name = tempfile.NamedTemporaryFile( dir="." ).name
+            tmp_stderr = open( tmp_name, 'wb' )
+            proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() )
+            returncode = proc.wait()
+            tmp_stderr.close()
+
+            # Get stderr, allowing for case where it's very large.
+            tmp_stderr = open( tmp_name, 'rb' )
+            stderr = ''
+            buffsize = 1048576
+            try:
+                while True:
+                    stderr += tmp_stderr.read( buffsize )
+                    if not stderr or len( stderr ) % buffsize != 0:
+                        break
+            except OverflowError:
+                pass
+            tmp_stderr.close()
+
+            # Error checking.
+            if returncode != 0:
+                raise Exception(stderr)
+        except Exception as e:
+            stop_err( 'Error running faToTwoBit. ' + str( e ) )
+    else:
+        seq_path = check_seq_file( dbkey, GALAXY_DATA_INDEX_DIR )
+        if not os.path.exists( seq_path ):
+            # If this occurs, we need to fix the metadata validator.
+            stop_err( "No sequences are available for '%s', request them by reporting this error." % dbkey )
+
+    #
+    # Fetch sequences.
+    #
+
+    # Get feature's line(s).
+    def get_lines( feature ):
+        if isinstance( feature, gff_util.GFFFeature ):
+            return feature.lines()
+        else:
+            return [ feature.rstrip( '\r\n' ) ]
+
+    skipped_lines = 0
+    first_invalid_line = 0
+    invalid_lines = []
+    fout = open( output_filename, "w" )
+    warnings = []
+    warning = ''
+    twobitfile = None
+    file_iterator = open( input_filename )
+    if gff_format and interpret_features:
+        file_iterator = gff_util.GFFReaderWrapper( file_iterator, fix_strand=False )
+    line_count = 1
+    for feature in file_iterator:
+        # Ignore comments, headers.
+        if isinstance( feature, ( Header, Comment ) ):
+            line_count += 1
+            continue
+
+        name = ""
+        if gff_format and interpret_features:
+            # Processing features.
+            gff_util.convert_gff_coords_to_bed( feature )
+            chrom = feature.chrom
+            start = feature.start
+            end = feature.end
+            strand = feature.strand
+        else:
+            # Processing lines, either interval or GFF format.
+            line = feature.rstrip( '\r\n' )
+            if line and not line.startswith( "#" ):
+                fields = line.split( '\t' )
+                try:
+                    chrom = fields[chrom_col]
+                    start = int( fields[start_col] )
+                    end = int( fields[end_col] )
+                    if name_col:
+                        name = fields[name_col]
+                    if gff_format:
+                        start, end = gff_util.convert_gff_coords_to_bed( [start, end] )
+                    if includes_strand_col:
+                        strand = fields[strand_col]
+                except:
+                    warning = "Invalid chrom, start or end column values. "
+                    warnings.append( warning )
+                    if not invalid_lines:
+                        invalid_lines = get_lines( feature )
+                        first_invalid_line = line_count
+                    skipped_lines += len( invalid_lines )
+                    continue
+                if start > end:
+                    warning = "Invalid interval, start '%d' > end '%d'.  " % ( start, end )
+                    warnings.append( warning )
+                    if not invalid_lines:
+                        invalid_lines = get_lines( feature )
+                        first_invalid_line = line_count
+                    skipped_lines += len( invalid_lines )
+                    continue
+
+                if strand not in ['+', '-']:
+                    strand = '+'
+                sequence = ''
+            else:
+                continue
+
+        # Open sequence file and get sequence for feature/interval.
+        if seq_path and os.path.exists( "%s/%s.nib" % ( seq_path, chrom ) ):
+            # TODO: improve support for GFF-nib interaction.
+            if chrom in nibs:
+                nib = nibs[chrom]
+            else:
+                nibs[chrom] = nib = bx.seq.nib.NibFile( open( "%s/%s.nib" % ( seq_path, chrom ) ) )
+            try:
+                sequence = nib.get( start, end - start )
+            except Exception as e:
+                warning = "Unable to fetch the sequence from '%d' to '%d' for build '%s'. " % ( start, end - start, dbkey )
+                warnings.append( warning )
+                if not invalid_lines:
+                    invalid_lines = get_lines( feature )
+                    first_invalid_line = line_count
+                skipped_lines += len( invalid_lines )
+                continue
+        elif seq_path and os.path.isfile( seq_path ):
+            if not(twobitfile):
+                twobitfile = bx.seq.twobit.TwoBitFile( open( seq_path ) )
+            try:
+                if options.gff and interpret_features:
+                    # Create sequence from intervals within a feature.
+                    sequence = ''
+                    for interval in feature.intervals:
+                        sequence += twobitfile[interval.chrom][interval.start:interval.end]
+                else:
+                    sequence = twobitfile[chrom][start:end]
+            except:
+                warning = "Unable to fetch the sequence from '%d' to '%d' for chrom '%s'. " % ( start, end - start, chrom )
+                warnings.append( warning )
+                if not invalid_lines:
+                    invalid_lines = get_lines( feature )
+                    first_invalid_line = line_count
+                skipped_lines += len( invalid_lines )
+                continue
+        else:
+            warning = "Chromosome by name '%s' was not found for build '%s'. " % ( chrom, dbkey )
+            warnings.append( warning )
+            if not invalid_lines:
+                invalid_lines = get_lines( feature )
+                first_invalid_line = line_count
+            skipped_lines += len( invalid_lines )
+            continue
+        if sequence == '':
+            warning = "Chrom: '%s', start: '%s', end: '%s' is either invalid or not present in build '%s'. " % \
+                ( chrom, start, end, dbkey )
+            warnings.append( warning )
+            if not invalid_lines:
+                invalid_lines = get_lines( feature )
+                first_invalid_line = line_count
+            skipped_lines += len( invalid_lines )
+            continue
+        if includes_strand_col and strand == "-":
+            sequence = reverse_complement( sequence )
+
+        if output_format == "fasta":
+            l = len( sequence )
+            c = 0
+            if gff_format:
+                start, end = gff_util.convert_bed_coords_to_gff( [ start, end ] )
+            fields = [dbkey, str( chrom ), str( start ), str( end ), strand]
+            meta_data = "_".join( fields )
+            if name.strip():
+                fout.write( ">%s %s\n" % (meta_data, name) )
+            else:
+                fout.write( ">%s\n" % meta_data )
+            while c < l:
+                b = min( c + 50, l )
+                fout.write( "%s\n" % str( sequence[c:b] ) )
+                c = b
+        else:  # output_format == "interval"
+            if gff_format and interpret_features:
+                # TODO: need better GFF Reader to capture all information needed
+                # to produce this line.
+                meta_data = "\t".join(
+                    [feature.chrom, "galaxy_extract_genomic_dna", "interval",
+                    str( feature.start ), str( feature.end ), feature.score, feature.strand,
+                    ".", gff_util.gff_attributes_to_str( feature.attributes, "GTF" ) ] )
+            else:
+                meta_data = "\t".join( fields )
+            if gff_format:
+                format_str = "%s seq \"%s\";\n"
+            else:
+                format_str = "%s\t%s\n"
+            fout.write( format_str % ( meta_data, str( sequence ) ) )
+
+        # Update line count.
+        if isinstance( feature, gff_util.GFFFeature ):
+            line_count += len( feature.intervals )
+        else:
+            line_count += 1
+
+    fout.close()
+
+    if warnings:
+        warn_msg = "%d warnings, 1st is: " % len( warnings )
+        warn_msg += warnings[0]
+        print(warn_msg)
+    if skipped_lines:
+        # Error message includes up to the first 10 skipped lines.
+        print('Skipped %d invalid lines, 1st is #%d, "%s"' % ( skipped_lines, first_invalid_line, '\n'.join( invalid_lines[:10] ) ))
+
+    # Clean up temp file.
+    if fasta_file:
+        os.remove( seq_path )
+        os.remove( tmp_name )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/extract/extract_genomic_dna.xml b/tools/extract/extract_genomic_dna.xml
new file mode 100644
index 0000000..69847cd
--- /dev/null
+++ b/tools/extract/extract_genomic_dna.xml
@@ -0,0 +1,189 @@
+<tool id="Extract genomic DNA 1" name="Extract Genomic DNA" version="2.2.3">
+  <description>using coordinates from assembled/unassembled genomes</description>
+  <command interpreter="python">
+      extract_genomic_dna.py "${input}" "${out_file1}" -o "${out_format}" -d "${dbkey}" 
+      
+      #if str( $interpret_features ) == "yes":
+        -I
+      #end if
+      
+      ## Columns to use in input file.
+      #if isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gff').__class__):
+        -1 "1,4,5,7" --gff
+      #else:
+        -1 "${input.metadata.chromCol},${input.metadata.startCol},${input.metadata.endCol},${input.metadata.strandCol},${input.metadata.nameCol}"
+      #end if
+
+      #if $seq_source.index_source == "cached":
+        ## Genomic data from cache.
+        -g "${GALAXY_DATA_INDEX_DIR}"
+      #else:
+        ## Genomic data from history.
+        -F "${seq_source.ref_file}"
+      #end if
+  </command>
+  <inputs>
+      <param format="interval,gff" name="input" type="data" label="Fetch sequences for intervals in"/>
+      <param name="interpret_features" type="select" label="Interpret features when possible" help="Only meaningful for GFF, GTF datasets.">
+          <option value="yes">Yes</option>
+          <option value="no">No</option>
+      </param>
+      <conditional name="seq_source">
+          <param name="index_source" type="select" label="Source for Genomic Data" help="If 'Locally cached' is selected, it will use a genomic reference file that matches the input file's dbkey. First it looks whether there are corresponding *.nib files in alignseq.loc. If that is not available, it searches for a corresponding *.2bit in twobit.loc.">
+              <option value="cached">Locally cached</option>
+              <option value="history">History</option>
+          </param>
+          <when value="cached">
+          </when>
+          <when value="history">
+              <param name="ref_file" type="data" format="fasta" label="Using reference file" />
+          </when>
+      </conditional>
+      <param name="out_format" type="select" label="Output data type">
+          <option value="fasta">FASTA</option>
+          <option value="interval">Interval</option>
+      </param>
+  </inputs>
+  <outputs>
+      <data format="input" name="out_file1" metadata_source="input">
+          <change_format>
+              <when input="out_format" value="fasta" format="fasta" />
+          </change_format>
+      </data>
+  </outputs>
+  <requirements>
+      <requirement type="package">ucsc_tools</requirement>
+      <requirement type="binary">faToTwoBit</requirement>
+  </requirements>
+  <tests>
+    <test>
+      <param name="input" value="1.bed" dbkey="hg17" ftype="bed" />
+      <param name="interpret_features" value="yes"/>
+      <param name="index_source" value="cached"/>
+      <param name="out_format" value="fasta"/>
+      <output name="out_file1">
+	<assert_contents>
+	  <!-- First few lines... -->
+	  <has_text text=">hg17_chr1_147962192_147962580_- CCDS989.1_cds_0_0_chr1_147962193_r" />
+	  <has_text text="ACTTGATCCTGCTCCCTCGGTGTCTGCATTGACTCCTCATGCTGGGACTG" />
+	  <has_text text="GACCCGTCAACCCCCCTGCTCGCTGCTCACGTACCTTCATCACTTTTAGT" />
+	  <has_text text="GATGATGCAACTTTCGAGGAATGGTTCCCCCAAGGGCGGCCCCCAAAAGT" />
+	  <!-- Last few lines... -->
+	  <has_text text="GCTGTGGCACAGAACATGGACTCTGTGTTTAAGGAGCTCTTGGGAAAGAC" />
+	  <has_text text="CTCTGTCCGCCAGGGCCTTGGGCCAGCATCTACCACCTCTCCCAGTCCTG" />
+	  <has_text text="GGCCCCGAAGCCCAAAGGCCCCGCCCAGCAGCCGCCTGGGCAGGAACAAA" />
+	  <has_text text="GGCTTCTCCCGGGGCCCTGGGGCCCCAGCCTCACCCTCAGCTTCCCACCC" />
+	  <has_text text="CCAGGGCCTAGACACGACCCCCAAGCCACACTGA" />
+	</assert_contents>
+      </output>
+    </test>
+    <test>
+      <param name="input" value="droPer1.bed" dbkey="droPer1" ftype="bed" />
+      <param name="interpret_features" value="yes"/>
+      <param name="index_source" value="cached"/>
+      <param name="out_format" value="fasta"/>
+      <output name="out_file1" file="extract_genomic_dna_out2.fasta" />
+    </test>
+    <test>
+      <param name="input" value="1.bed" dbkey="hg17" ftype="bed" />
+      <param name="interpret_features" value="yes"/>
+      <param name="index_source" value="cached"/>
+      <param name="out_format" value="interval"/>
+      <output name="out_file1" file="extract_genomic_dna_out3.interval" />
+    </test>
+    <!-- Test GFF file support. -->
+    <test>
+      <param name="input" value="gff_filter_by_attribute_out1.gff" dbkey="mm9" ftype="gff" />
+      <param name="interpret_features" value="no"/>
+      <param name="index_source" value="cached"/>
+      <param name="out_format" value="interval"/>
+      <output name="out_file1" file="extract_genomic_dna_out4.gff" />
+    </test>
+    <test>
+      <param name="input" value="gff_filter_by_attribute_out1.gff" dbkey="mm9" ftype="gff" />
+      <param name="interpret_features" value="no"/>
+      <param name="out_format" value="fasta"/>
+      <param name="index_source" value="cached"/>
+      <output name="out_file1" file="extract_genomic_dna_out5.fasta" />
+    </test>
+    <!-- Test custom sequences support and GFF feature interpretation. -->
+    <test>
+      <param name="input" value="cufflinks_out1.gtf" dbkey="mm9" ftype="gff" />
+      <param name="interpret_features" value="no"/>
+      <param name="index_source" value="history"/>
+      <param name="ref_file" value="tophat_in1.fasta"/>
+      <param name="out_format" value="fasta"/>
+      <output name="out_file1" file="extract_genomic_dna_out6.fasta" />
+    </test>
+    <test>
+      <param name="input" value="cufflinks_out1.gtf" dbkey="mm9" ftype="gff" />
+      <param name="interpret_features" value="yes"/>
+      <param name="index_source" value="history"/>
+      <param name="ref_file" value="tophat_in1.fasta"/>
+      <param name="out_format" value="fasta"/>
+      <output name="out_file1" file="extract_genomic_dna_out7.fasta" />
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+This tool requires interval or gff (special tabular formatted data).  If your data is not TAB delimited, first use *Text Manipulation->Convert*.
+
+.. class:: warningmark
+
+Make sure that the genome build is specified for the dataset from which you are extracting sequences (click the pencil icon in the history item if it is not specified). 
+
+.. class:: warningmark
+
+All of the following will cause a line from the input dataset to be skipped and a warning generated.  The number of warnings and skipped lines is documented in the resulting history item.
+ - Any lines that do not contain at least 3 columns, a chromosome and numerical start and end coordinates.
+ - Sequences that fall outside of the range of a line's start and end coordinates. 
+ - Chromosome, start or end coordinates that are invalid for the specified build.
+ - Any lines whose data columns are not separated by a **TAB** character ( other white-space characters are invalid ).
+
+.. class:: infomark
+
+ **Extract genomic DNA using coordinates from ASSEMBLED genomes and UNassembled genomes** previously were achieved by two separate tools. 
+
+-----
+
+**What it does**
+
+This tool uses coordinate, strand, and build information to fetch genomic DNAs in FASTA or interval format.
+
+If strand is not defined, the default value is "+".
+
+-----
+
+**Example**
+
+If the input dataset is::
+
+    chr7  127475281  127475310  NM_000230  0  +
+    chr7  127485994  127486166  NM_000230  0  +
+    chr7  127486011  127486166  D49487     0  +
+
+Extracting sequences with **FASTA** output data type returns::
+
+    >hg17_chr7_127475281_127475310_+ NM_000230
+    GTAGGAATCGCAGCGCCAGCGGTTGCAAG
+    >hg17_chr7_127485994_127486166_+ NM_000230
+    GCCCAAGAAGCCCATCCTGGGAAGGAAAATGCATTGGGGAACCCTGTGCG
+    GATTCTTGTGGCTTTGGCCCTATCTTTTCTATGTCCAAGCTGTGCCCATC
+    CAAAAAGTCCAAGATGACACCAAAACCCTCATCAAGACAATTGTCACCAG
+    GATCAATGACATTTCACACACG
+    >hg17_chr7_127486011_127486166_+ D49487
+    TGGGAAGGAAAATGCATTGGGGAACCCTGTGCGGATTCTTGTGGCTTTGG
+    CCCTATCTTTTCTATGTCCAAGCTGTGCCCATCCAAAAAGTCCAAGATGA
+    CACCAAAACCCTCATCAAGACAATTGTCACCAGGATCAATGACATTTCAC
+    ACACG
+
+Extracting sequences with **Interval** output data type returns::
+
+    chr7    127475281       127475310       NM_000230       0       +       GTAGGAATCGCAGCGCCAGCGGTTGCAAG
+    chr7    127485994       127486166       NM_000230       0       +       GCCCAAGAAGCCCATCCTGGGAAGGAAAATGCATTGGGGAACCCTGTGCGGATTCTTGTGGCTTTGGCCCTATCTTTTCTATGTCCAAGCTGTGCCCATCCAAAAAGTCCAAGATGACACCAAAACCCTCATCAAGACAATTGTCACCAGGATCAATGACATTTCACACACG
+    chr7    127486011       127486166       D49487  0       +       TGGGAAGGAAAATGCATTGGGGAACCCTGTGCGGATTCTTGTGGCTTTGGCCCTATCTTTTCTATGTCCAAGCTGTGCCCATCCAAAAAGTCCAAGATGACACCAAAACCCTCATCAAGACAATTGTCACCAGGATCAATGACATTTCACACACG
+
+</help>
+</tool>
diff --git a/tools/extract/liftOver_wrapper.py b/tools/extract/liftOver_wrapper.py
new file mode 100644
index 0000000..0ba1af8
--- /dev/null
+++ b/tools/extract/liftOver_wrapper.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# Guruprasad Ananda
+"""
+Converts coordinates from one build/assembly to another using liftOver binary and mapping files downloaded from UCSC.
+"""
+
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+
+def stop_err(msg):
+    sys.stderr.write(msg)
+    sys.exit()
+
+
+def safe_bed_file(infile):
+    """Make a BED file with track and browser lines ready for liftOver.
+
+    liftOver will fail with track or browser lines. We can make it happy
+    by converting these to comments. See:
+
+    https://lists.soe.ucsc.edu/pipermail/genome/2007-May/013561.html
+    """
+    fix_pat = re.compile("^(track|browser)")
+    (fd, fname) = tempfile.mkstemp()
+    in_handle = open(infile)
+    out_handle = open(fname, "w")
+    for line in in_handle:
+        if fix_pat.match(line):
+            line = "#" + line
+        out_handle.write(line)
+    in_handle.close()
+    out_handle.close()
+    return fname
+
+
+if len( sys.argv ) < 9:
+    stop_err( "USAGE: prog input out_file1 out_file2 input_dbkey output_dbkey infile_type minMatch multiple <minChainT> <minChainQ> <minSizeQ>" )
+
+infile = sys.argv[1]
+outfile1 = sys.argv[2]
+outfile2 = sys.argv[3]
+in_dbkey = sys.argv[4]
+mapfilepath = sys.argv[5]
+infile_type = sys.argv[6]
+gff_option = ""
+if infile_type == "gff":
+    gff_option = "-gff "
+minMatch = sys.argv[7]
+multiple = int(sys.argv[8])
+multiple_option = ""
+if multiple:
+    minChainT = sys.argv[9]
+    minChainQ = sys.argv[10]
+    minSizeQ = sys.argv[11]
+    multiple_option = " -multiple -minChainT=%s -minChainQ=%s -minSizeQ=%s " % (minChainT, minChainQ, minSizeQ)
+
+try:
+    assert float(minMatch)
+except:
+    minMatch = 0.1
+# ensure dbkey is set
+if in_dbkey == "?":
+    stop_err( "Input dataset genome build unspecified, click the pencil icon in the history item to specify it." )
+
+if not os.path.isfile( mapfilepath ):
+    stop_err( "%s mapping is not currently available." % ( mapfilepath.split('/')[-1].split('.')[0] ) )
+
+safe_infile = safe_bed_file(infile)
+cmd_line = "liftOver " + gff_option + "-minMatch=" + str(minMatch) + multiple_option + " " + safe_infile + " " + mapfilepath + " " + outfile1 + " " + outfile2 + "  > /dev/null"
+
+try:
+    # have to nest try-except in try-finally to handle 2.4
+    try:
+        proc = subprocess.Popen( args=cmd_line, shell=True, stderr=subprocess.PIPE )
+        returncode = proc.wait()
+        stderr = proc.stderr.read()
+        if returncode != 0:
+            raise Exception(stderr)
+    except Exception as e:
+        raise Exception('Exception caught attempting conversion: ' + str( e ))
+finally:
+    os.remove(safe_infile)
diff --git a/tools/extract/liftOver_wrapper.xml b/tools/extract/liftOver_wrapper.xml
new file mode 100644
index 0000000..34373d4
--- /dev/null
+++ b/tools/extract/liftOver_wrapper.xml
@@ -0,0 +1,144 @@
+<tool id="liftOver1" name="Convert genome coordinates" version="1.0.4">
+  <description> between assemblies and genomes</description>
+  <command interpreter="python">
+  liftOver_wrapper.py
+  $input
+  "$out_file1"
+  "$out_file2"
+  $dbkey
+  $to_dbkey
+  #if isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gff').__class__) or isinstance( $input.datatype, $__app__.datatypes_registry.get_datatype_by_extension('gtf').__class__):
+        "gff"
+  #else:
+        "interval"
+  #end if
+  $minMatch ${multiple.choice} ${multiple.minChainT} ${multiple.minChainQ} ${multiple.minSizeQ}
+  </command>
+  <inputs>
+    <param format="interval,gff,gtf" name="input" type="data" label="Convert coordinates of">
+      <validator type="unspecified_build" />
+      <validator type="dataset_metadata_in_file" filename="liftOver.loc" metadata_name="dbkey" metadata_column="0" message="Liftover mappings are currently not available for the specified build." />
+    </param>
+    <param name="to_dbkey" type="select" label="To">
+      <options from_data_table="liftOver">
+        <filter type="data_meta" ref="input" key="dbkey" column="0" />
+      </options>
+    </param>
+    <param name="minMatch" size="10" type="float" value="0.95" label="Minimum ratio of bases that must remap" help="Recommended values: same species = 0.95, different species = 0.10" />
+    <conditional name="multiple">
+        <param name="choice" type="select" label="Allow multiple output regions?" help="Recommended values: same species = No, different species = Yes">
+            <option value="0" selected="true">No</option>
+            <option value="1">Yes</option>
+        </param>
+        <when value="0">
+            <param name="minSizeQ" type="hidden" value="0" />
+            <param name="minChainQ" type="hidden" value="0" />
+            <param name="minChainT" type="hidden" value="0" />
+        </when>
+        <when value="1">
+            <param name="minSizeQ" size="10" type="integer" value="0" label="Minimum matching region size in dataset" help="Recommended value: set to >= 300 bases for complete transcripts"/>
+            <param name="minChainQ" size="10" type="integer" value="500" label="Minimum chain size in dataset"/>
+            <param name="minChainT" size="10" type="integer" value="500" label="Minimum chain size in target"/>
+        </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" label="${tool.name} on ${on_string} [ MAPPED COORDINATES ]">
+      <actions>
+        <action type="metadata" name="dbkey">
+          <option type="from_data_table" name="liftOver" key="name" column="1" offset="0">
+            <filter type="param_value" column="0" value="#" compare="startswith" keep="False"/>
+            <filter type="param_value" ref="to_dbkey" column="2"/>
+          </option>
+        </action>
+      </actions>
+    </data>
+    <data format="input" name="out_file2" label="${tool.name} on ${on_string} [ UNMAPPED COORDINATES ]" />
+  </outputs>
+  <requirements>
+    <requirement type="package">ucsc_tools</requirement>
+  </requirements>
+  <tests>
+    <!--
+    <test>
+      <param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
+      <param name="to_dbkey" value="panTro2" />
+      <param name="minMatch" value="0.95" />
+      <param name="choice" value="0" />
+      <output name="out_file1" file="5_liftover_mapped.bed"/>
+      <output name="out_file2" file="5_liftover_unmapped.bed"/>
+    </test>
+    <test>
+      <param name="input" value="5.bed" dbkey="hg18" ftype="bed" />
+      <param name="to_dbkey" value="panTro2" />
+      <param name="minMatch" value="0.10" />
+      <param name="choice" value="1" />
+      <param name="minSizeQ" value="0" />
+      <param name="minChainQ" value="500" />
+      <param name="minChainT" value="500" />
+      <output name="out_file1" file="5_mult_liftover_mapped.bed"/>
+      <output name="out_file2" file="5_mult_liftover_unmapped.bed"/>
+    </test>
+    <test>
+      <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+      <param name="to_dbkey" value="panTro2" />
+      <param name="minMatch" value="0.95" />
+      <param name="choice" value="0" />
+      <output name="out_file1" file="cuffcompare_in1_liftover_mapped.bed"/>
+      <output name="out_file2" file="cuffcompare_in1_liftover_unmapped.bed"/>
+    </test>
+    <test>
+      <param name="input" value="cuffcompare_in1.gtf" dbkey="hg18" ftype="gtf" />
+      <param name="to_dbkey" value="panTro2" />
+      <param name="minMatch" value="0.10" />
+      <param name="choice" value="1" />
+      <param name="minSizeQ" value="0" />
+      <param name="minChainQ" value="500" />
+      <param name="minChainT" value="500" />
+      <output name="out_file1" file="cuffcompare_in1_mult_liftover_mapped.bed"/>
+      <output name="out_file2" file="cuffcompare_in1_mult_liftover_unmapped.bed"/>
+    </test>
+    -->
+  </tests>
+  <help>
+.. class:: warningmark
+
+Make sure that the genome build of the input dataset is specified (click the pencil icon in the history item to set it if necessary).
+
+.. class:: warningmark
+
+This tool can work with interval, GFF, and GTF datasets. It requires the interval datasets to have chromosome in column 1,
+start co-ordinate in column 2 and end co-ordinate in column 3. BED comments
+and track and browser lines will be ignored, but if other non-interval lines
+are present the tool will return empty output datasets.
+
+-----
+
+.. class:: infomark
+
+**What it does**
+
+This tool is based on the LiftOver utility and Chain track from `the UC Santa Cruz Genome Browser`__.
+
+It converts coordinates and annotations between assemblies and genomes. It produces 2 files, one containing all the mapped coordinates and the other containing the unmapped coordinates, if any.
+
+ .. __: http://genome.ucsc.edu/
+
+-----
+
+**Example**
+
+Converting the following hg16 intervals to hg18 intervals::
+
+    chrX  85170   112199  AK002185  0  +
+    chrX  110458  112199  AK097346  0  +
+    chrX  112203  121212  AK074528  0  -
+
+will produce the following hg18 intervals::
+
+    chrX  132991  160020  AK002185  0  +
+    chrX  158279  160020  AK097346  0  +
+    chrX  160024  169033  AK074528  0  -
+
+  </help>
+</tool>
diff --git a/tools/filters/CreateInterval.pl b/tools/filters/CreateInterval.pl
new file mode 100644
index 0000000..18e0a32
--- /dev/null
+++ b/tools/filters/CreateInterval.pl
@@ -0,0 +1,19 @@
+#! /usr/bin/perl -w
+
+# Accepts chrom, start, end, name, and strand
+# If strand is void sets it to plus
+# CreateInterval.pl $chrom $start $end $name $strand $output
+
+my $strand = "+";
+
+die "Not enough arguments\n" unless @ARGV == 6;
+
+open OUT, ">$ARGV[5]" or die "Cannot open $ARGV[5]:$!\n";
+
+$strand = "-" if $ARGV[4] eq "minus";
+$ARGV[3] =~ s/\s+/_/g;
+$ARGV[3] =~ s/\t+/_/g;
+
+print OUT "$ARGV[0]\t$ARGV[1]\t$ARGV[2]\t$ARGV[3]\t0\t$strand\n";
+close OUT;
+
diff --git a/tools/filters/CreateInterval.xml b/tools/filters/CreateInterval.xml
new file mode 100644
index 0000000..6943799
--- /dev/null
+++ b/tools/filters/CreateInterval.xml
@@ -0,0 +1,56 @@
+<tool id="createInterval" name="Create single interval" version="1.0.0">
+  <description>as a new dataset</description>
+  <command interpreter="perl">CreateInterval.pl $chrom $start $end "$name" $strand $out_file1</command>
+  <inputs>
+    <param name="chrom" size="20" type="text" value="chr7" label="Chromosome"/>
+    <param name="start" size="20" type="integer" value="100" label="Start position"/>
+    <param name="end"   size="20" type="integer" value="1000" label="End position"/>
+    <param name="name" size="20" type="text" value="myInterval" label="Name"/>
+    <param name="strand" type="select" label="Strand" help="If your interval is strandless set strand to plus" >
+      <option value="plus">plus</option>
+      <option value="minus">minus</option>
+    </param>    
+  </inputs>
+  <outputs>
+    <data format="bed" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="chrom" value="chr7"/>
+      <param name="start" value="100"/>
+      <param name="end" value="1000"/>
+      <param name="name" value="myinterval"/>
+      <param name="strand" value="plus"/>
+      <output name="out_file1" file="eq-createinterval.dat"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**TIP**. Once your interval appears in history, you must tell Galaxy which genome it belongs to by clicking pencil icon or the "?" link in the history item.
+
+-----
+
+**What it does**
+
+This tool allows you to create a single genomic interval. The resulting history item will be in the BED format.
+
+-----
+
+**Example**
+
+Typing the following values in the form::
+
+    Chromosome: chrX
+    Start position: 151087187
+    End position: 151370486
+    Name: NM_000808
+    Strand: minus
+
+will create a single interval::
+
+    chrX  151087187  151370486  NM_000808  0  -
+
+</help>
+</tool>
diff --git a/tools/filters/axt_to_concat_fasta.py b/tools/filters/axt_to_concat_fasta.py
new file mode 100644
index 0000000..23e53ab
--- /dev/null
+++ b/tools/filters/axt_to_concat_fasta.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+"""
+Adapted from bx/scripts/axt_to_concat_fasta.py
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.align.axt
+
+
+def usage(s=None):
+    message = """
+axt_to_fasta species1 species2 < axt_file > fasta_file
+"""
+    if s is None:
+        sys.exit(message)
+    else:
+        sys.exit("%s\n%s" % (s, message))
+
+
+def main():
+    # check the command line
+    species1 = sys.argv[1]
+    species2 = sys.argv[2]
+
+    # convert the alignment blocks
+
+    reader = bx.align.axt.Reader(sys.stdin, support_ids=True,
+                                 species1=species1, species2=species2)
+    sp1text = list()
+    sp2text = list()
+    for a in reader:
+        sp1text.append(a.components[0].text)
+        sp2text.append(a.components[1].text)
+    sp1seq = "".join(sp1text)
+    sp2seq = "".join(sp2text)
+    print_component_as_fasta(sp1seq, species1)
+    print_component_as_fasta(sp2seq, species2)
+
+
+# TODO: this should be moved to a bx.align.fasta module
+def print_component_as_fasta(text, src):
+    header = ">" + src
+    print(header)
+    print(text)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/axt_to_concat_fasta.xml b/tools/filters/axt_to_concat_fasta.xml
new file mode 100644
index 0000000..5ded7a2
--- /dev/null
+++ b/tools/filters/axt_to_concat_fasta.xml
@@ -0,0 +1,66 @@
+<tool id="axt_to_concat_fasta" name="AXT to concatenated FASTA" version="1.0.0">
+  <description>Converts an AXT formatted file to a concatenated FASTA alignment</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command interpreter="python">axt_to_concat_fasta.py $dbkey_1 $dbkey_2 < $axt_input > $out_file1</command>
+  <inputs>
+    <param format="axt" name="axt_input" type="data" label="AXT file"/>
+    <param name="dbkey_1" type="genomebuild" label="Genome"/>
+    <param name="dbkey_2" type="genomebuild" label="Genome"/>
+  </inputs>
+  <outputs>
+    <data format="fasta" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="axt_input" value="1.axt" ftype="axt" />
+      <param name="dbkey_1" value='hg17' />
+      <param name="dbkey_2" value="panTro1" />
+      <output name="out_file1" file="axt_to_concat_fasta.dat" />
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**IMPORTANT**: AXT formatted alignments will be phased out from Galaxy in the coming weeks. They will be replaced with pairwise MAF alignments, which are already available. To try pairwise MAF alignments use "Extract Pairwise MAF blocks" tool in *Fetch Sequences and Alignments* section.
+
+--------
+
+**Syntax**
+
+This tool converts an AXT formatted file to the FASTA format, and concatenates the results in the same build.
+
+- **AXT format** The alignments are produced from Blastz, an alignment tool available from Webb Miller's lab at Penn State University. The lav format Blastz output, which does not include the sequence, was converted to AXT format with lavToAxt. Each alignment block in an AXT file contains three lines: a summary line and 2 sequence lines. Blocks are separated from one another by blank lines. 
+
+- **FASTA format** a text-based format for representing both nucleic and protein sequences, in which base pairs or proteins are represented using a single-letter code.
+
+  - This format contains an one line header. It starts with a " >" symbol. The first word on this line is the name of the sequence. The rest of the line is a description of the sequence.
+  - The remaining lines contain the sequence itself.
+  - Blank lines in a FASTA file are ignored, and so are spaces or other gap symbols (dashes, underscores, periods) in a sequence.
+  - Fasta files containing multiple sequences are just the same, with one sequence listed right after another. This format is accepted for many multiple sequence alignment programs.
+
+-----
+
+**Example**
+
+- AXT format::
+
+    0 chr19 3001012 3001075 chr11 70568380 70568443 - 3500
+    TCAGCTCATAAATCACCTCCTGCCACAAGCCTGGCCTGGTCCCAGGAGAGTGTCCAGGCTCAGA
+    TCTGTTCATAAACCACCTGCCATGACAAGCCTGGCCTGTTCCCAAGACAATGTCCAGGCTCAGA
+
+    1 chr19 3008279 3008357 chr11 70573976 70574054 - 3900
+    CACAATCTTCACATTGAGATCCTGAGTTGCTGATCAGAATGGAAGGCTGAGCTAAGATGAGCGACGAGGCAATGTCACA
+    CACAGTCTTCACATTGAGGTACCAAGTTGTGGATCAGAATGGAAAGCTAGGCTATGATGAGGGACAGTGCGCTGTCACA
+
+- Convert the above file to concatenated FASTA format::
+
+    >hg16
+    TCAGCTCATAAATCACCTCCTGCCACAAGCCTGGCCTGGTCCCAGGAGAGTGTCCAGGCTCAGACACAATCTTCACATTGAGATCCTGAGTTGCTGATCAGAATGGAAGGCTGAGCTAAGATGAGCGACGAGGCAATGTCACA
+    >mm5
+    TCTGTTCATAAACCACCTGCCATGACAAGCCTGGCCTGTTCCCAAGACAATGTCCAGGCTCAGACACAGTCTTCACATTGAGGTACCAAGTTGTGGATCAGAATGGAAAGCTAGGCTATGATGAGGGACAGTGCGCTGTCACA
+
+  </help>
+</tool>
diff --git a/tools/filters/axt_to_fasta.py b/tools/filters/axt_to_fasta.py
new file mode 100644
index 0000000..d0b0859
--- /dev/null
+++ b/tools/filters/axt_to_fasta.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+"""
+Adapted from bx/scripts/axt_to_fasta.py
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.align.axt
+
+
+def usage(s=None):
+    message = """
+axt_to_fasta species1 species2 < axt_file > fasta_file
+"""
+    if s is None:
+        sys.exit(message)
+    else:
+        sys.exit("%s\n%s" % (s, message))
+
+
+def main():
+    # check the command line
+    species1 = sys.argv[1]
+    species2 = sys.argv[2]
+
+    # convert the alignment blocks
+
+    reader = bx.align.axt.Reader(sys.stdin, support_ids=True,
+                                 species1=species1, species2=species2)
+
+    for a in reader:
+        if ("id" in a.attributes):
+            id = a.attributes["id"]
+        else:
+            id = None
+        print_component_as_fasta(a.components[0], id)
+        print_component_as_fasta(a.components[1], id)
+        print()
+
+
+# TODO: this should be moved to a bx.align.fasta module
+def print_component_as_fasta(c, id=None):
+    header = ">%s_%s_%s" % (c.src, c.start, c.start + c.size)
+    if id is not None:
+        header += " " + id
+    print(header)
+    print(c.text)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/axt_to_fasta.xml b/tools/filters/axt_to_fasta.xml
new file mode 100644
index 0000000..e34d706
--- /dev/null
+++ b/tools/filters/axt_to_fasta.xml
@@ -0,0 +1,72 @@
+<tool id="axt_to_fasta" name="AXT to FASTA" version="1.0.0">
+  <description>Converts an AXT formatted file to FASTA format</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command interpreter="python">axt_to_fasta.py $dbkey_1 $dbkey_2 < $axt_input > $out_file1</command>
+  <inputs>
+    <param format="axt" name="axt_input" type="data" label="AXT file"/>
+    <param name="dbkey_1" type="genomebuild" label="Genome"/>
+    <param name="dbkey_2" type="genomebuild" label="Genome"/>
+  </inputs>
+  <outputs>
+    <data format="fasta" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="axt_input" value="1.axt" ftype="axt" />
+      <param name="dbkey_1" value="hg17" />
+      <param name="dbkey_2" value="panTro1" />
+      <output name="out_file1" file="axt_to_fasta.dat" />
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**IMPORTANT**: AXT formatted alignments will be phased out from Galaxy in the coming weeks. They will be replaced with pairwise MAF alignments, which are already available. To try pairwise MAF alignments use "Extract Pairwise MAF blocks" tool in *Fetch Sequences and Alignments* section.
+
+--------
+
+
+**Syntax**
+
+This tool converts an AXT formatted file to the FASTA format.
+
+- **AXT format** The alignments are produced from Blastz, an alignment tool available from Webb Miller's lab at Penn State University. The lav format Blastz output, which does not include the sequence, was converted to AXT format with lavToAxt. Each alignment block in an AXT file contains three lines: a summary line and 2 sequence lines. Blocks are separated from one another by blank lines.
+
+- **FASTA format** a text-based format for representing both nucleic and protein sequences, in which base pairs or proteins are represented using a single-letter code.
+
+  - This format contains an one line header. It starts with a " >" symbol. The first word on this line is the name of the sequence. The rest of the line is a description of the sequence.
+  - The remaining lines contain the sequence itself.
+  - Blank lines in a FASTA file are ignored, and so are spaces or other gap symbols (dashes, underscores, periods) in a sequence.
+  - Fasta files containing multiple sequences are just the same, with one sequence listed right after another. This format is accepted for many multiple sequence alignment programs.
+
+-----
+
+**Example**
+
+- AXT format::
+
+    0 chr19 3001012 3001075 chr11 70568380 70568443 - 3500
+    TCAGCTCATAAATCACCTCCTGCCACAAGCCTGGCCTGGTCCCAGGAGAGTGTCCAGGCTCAGA
+    TCTGTTCATAAACCACCTGCCATGACAAGCCTGGCCTGTTCCCAAGACAATGTCCAGGCTCAGA
+
+    1 chr19 3008279 3008357 chr11 70573976 70574054 - 3900
+    CACAATCTTCACATTGAGATCCTGAGTTGCTGATCAGAATGGAAGGCTGAGCTAAGATGAGCGACGAGGCAATGTCACA
+    CACAGTCTTCACATTGAGGTACCAAGTTGTGGATCAGAATGGAAAGCTAGGCTATGATGAGGGACAGTGCGCTGTCACA
+
+- Convert the above file to FASTA format::
+
+    >hg16.chr19(+):3001012-3001075|hg16_0
+    TCAGCTCATAAATCACCTCCTGCCACAAGCCTGGCCTGGTCCCAGGAGAGTGTCCAGGCTCAGA
+    >mm5.chr11(-):70568380-70568443|mm5_0
+    TCTGTTCATAAACCACCTGCCATGACAAGCCTGGCCTGTTCCCAAGACAATGTCCAGGCTCAGA
+
+    >hg16.chr19(+):3008279-3008357|hg16_1
+    CACAATCTTCACATTGAGATCCTGAGTTGCTGATCAGAATGGAAGGCTGAGCTAAGATGAGCGACGAGGCAATGTCACA
+    >mm5.chr11(-):70573976-70574054|mm5_1
+    CACAGTCTTCACATTGAGGTACCAAGTTGTGGATCAGAATGGAAAGCTAGGCTATGATGAGGGACAGTGCGCTGTCACA
+
+  </help>
+</tool>
diff --git a/tools/filters/axt_to_lav.py b/tools/filters/axt_to_lav.py
new file mode 100644
index 0000000..c2a91cf
--- /dev/null
+++ b/tools/filters/axt_to_lav.py
@@ -0,0 +1,180 @@
+#!/usr/bin/env python
+"""
+Application to convert AXT file to LAV file
+-------------------------------------------
+
+:Author: Bob Harris (rsharris at bx.psu.edu)
+:Version: $Revision: $
+
+The application reads an AXT file from standard input and writes a LAV file to
+standard out;  some statistics are written to standard error.
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.align.axt
+import bx.align.lav
+
+
+def usage(s=None):
+    message = """
+axt_to_lav primary_spec secondary_spec [--silent] < axt_file > lav_file
+  Each spec is of the form seq_file[:species_name]:lengths_file.
+
+  seq_file should be a format string for the file names for the individual
+  sequences, with %s to be replaced by the alignment's src field.  For example,
+  "hg18/%s.nib" would prescribe files named "hg18/chr1.nib", "hg18/chr2.nib",
+  etc.
+
+  species_name is optional.  If present, it is prepended to the alignment's src
+  field.
+
+  Lengths files provide the length of each chromosome (lav format needs this
+  information but axt file does not contain it).  The format is a series of
+  lines of the form
+    <chromosome name> <length>
+  The chromosome field in each axt block must match some <chromosome name> in
+  the lengths file.
+"""
+    if s is None:
+        sys.exit(message)
+    else:
+        sys.exit("%s\n%s" % (s, message))
+
+
+def main():
+    global debug
+
+    # parse the command line
+
+    primary = None
+    secondary = None
+    silent = False
+
+    # pick off options
+
+    args = sys.argv[1:]
+    seq_file2 = open(args.pop(-1), 'w')
+    seq_file1 = open(args.pop(-1), 'w')
+    lav_out = args.pop(-1)
+    axt_in = args.pop(-1)
+    while len(args) > 0:
+        arg = args.pop(0)
+        val = None
+        fields = arg.split("=", 1)
+        if len(fields) == 2:
+            arg = fields[0]
+            val = fields[1]
+            if val == "":
+                usage("missing a value in %s=" % arg)
+
+        if arg == "--silent" and val is None:
+            silent = True
+        elif primary is None and val is None:
+            primary = arg
+        elif secondary is None and val is None:
+            secondary = arg
+        else:
+            usage("unknown argument: %s" % arg)
+
+    if primary is None:
+        usage("missing primary file name and length")
+
+    if secondary is None:
+        usage("missing secondary file name and length")
+
+    try:
+        (primaryFile, primary, primaryLengths) = parse_spec(primary)
+    except:
+        usage("bad primary spec (must be seq_file[:species_name]:lengths_file")
+
+    try:
+        (secondaryFile, secondary, secondaryLengths) = parse_spec(secondary)
+    except:
+        usage("bad secondary spec (must be seq_file[:species_name]:lengths_file")
+
+    # read the lengths
+
+    speciesToLengths = {}
+    speciesToLengths[primary] = read_lengths(primaryLengths)
+    speciesToLengths[secondary] = read_lengths(secondaryLengths)
+
+    # read the alignments
+
+    out = bx.align.lav.Writer(open(lav_out, 'w'),
+                              attributes={ "name_format_1": primaryFile,
+                                           "name_format_2": secondaryFile })
+
+    axtsRead = 0
+    axtsWritten = 0
+    for axtBlock in bx.align.axt.Reader(
+            open(axt_in), species_to_lengths=speciesToLengths, species1=primary,
+            species2=secondary, support_ids=True):
+        axtsRead += 1
+        out.write(axtBlock)
+        primary_c = axtBlock.get_component_by_src_start(primary)
+        secondary_c = axtBlock.get_component_by_src_start(secondary)
+
+        print(">%s_%s_%s_%s" % (primary_c.src, secondary_c.strand, primary_c.start, primary_c.start + primary_c.size), file=seq_file1)
+        print(primary_c.text, file=seq_file1)
+        print(file=seq_file1)
+
+        print(">%s_%s_%s_%s" % (secondary_c.src, secondary_c.strand, secondary_c.start, secondary_c.start + secondary_c.size), file=seq_file2)
+        print(secondary_c.text, file=seq_file2)
+        print(file=seq_file2)
+        axtsWritten += 1
+
+    out.close()
+    seq_file1.close()
+    seq_file2.close()
+
+    if not silent:
+        sys.stdout.write("%d blocks read, %d written\n" % (axtsRead, axtsWritten))
+
+
+def parse_spec(spec):
+    """returns (seq_file,species_name,lengths_file)"""
+    fields = spec.split(":")
+    if len(fields) == 2:
+        return (fields[0], "", fields[1])
+    elif len(fields) == 3:
+        return (fields[0], fields[1], fields[2])
+    else:
+        raise ValueError
+
+
+def read_lengths(fileName):
+    chromToLength = {}
+
+    f = open(fileName, "r")
+
+    for lineNumber, line in enumerate(f):
+        line = line.strip()
+        if line == "":
+            continue
+        if line.startswith("#"):
+            continue
+
+        fields = line.split()
+        if len(fields) != 2:
+            raise Exception( "bad lengths line (%s:%d): %s" % (fileName, lineNumber, line) )
+
+        chrom = fields[0]
+        try:
+            length = int(fields[1])
+        except:
+            raise Exception( "bad lengths line (%s:%d): %s" % (fileName, lineNumber, line) )
+
+        if chrom in chromToLength:
+            raise Exception( "%s appears more than once (%s:%d): %s" % (chrom, fileName, lineNumber) )
+
+        chromToLength[chrom] = length
+
+    f.close()
+
+    return chromToLength
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/axt_to_lav.xml b/tools/filters/axt_to_lav.xml
new file mode 100644
index 0000000..ea8f428
--- /dev/null
+++ b/tools/filters/axt_to_lav.xml
@@ -0,0 +1,97 @@
+<tool id="axt_to_lav_1" name="AXT to LAV" version="1.0.0">
+  <description>Converts an AXT formatted file to LAV format</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command interpreter="python">axt_to_lav.py /galaxy/data/$dbkey_1/seq/%s.nib:$dbkey_1:${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey_1}.len /galaxy/data/$dbkey_2/seq/%s.nib:$dbkey_2:${GALAXY_DATA_INDEX_DIR}/shared/ucsc/chrom/${dbkey_2}.len $align_input $lav_file $seq_file1 $seq_file2</command>
+  <inputs>
+    <param name="align_input" type="data" format="axt" label="Alignment File" optional="False"/>
+    <param name="dbkey_1" type="genomebuild" label="Genome"/>
+    <param name="dbkey_2" type="genomebuild" label="Genome"/>
+  </inputs>
+  <outputs>
+    <data name="lav_file" format="lav"/>
+    <data name="seq_file1" format="fasta" parent="lav_file"/>
+    <data name="seq_file2" format="fasta" parent="lav_file"/>
+  </outputs>
+  <help>
+
+.. class:: warningmark
+
+**IMPORTANT**: AXT formatted alignments will be phased out from Galaxy in the coming weeks. They will be replaced with pairwise MAF alignments, which are already available. To try pairwise MAF alignments use "Extract Pairwise MAF blocks" tool in *Fetch Sequences and Alignments* section.
+
+--------
+
+
+**Syntax**
+
+This tool converts an AXT formatted file to the LAV format.
+
+- **AXT format** The alignments are produced from Blastz, an alignment tool available from Webb Miller's lab at Penn State University. The lav format Blastz output, which does not include the sequence, was converted to AXT format with lavToAxt. Each alignment block in an AXT file contains three lines: a summary line and 2 sequence lines. Blocks are separated from one another by blank lines.
+
+- **LAV format** LAV is an alignment format developed by Webb Miller's group. It is the primary output format for BLASTZ.
+
+- **FASTA format** a text-based format for representing both nucleic and protein sequences, in which base pairs or proteins are represented using a single-letter code.
+
+  - This format contains an one line header. It starts with a ">" symbol. The first word on this line is the name of the sequence. The rest of the line is a description of the sequence.
+  - The remaining lines contain the sequence itself.
+  - Blank lines in a FASTA file are ignored, and so are spaces or other gap symbols (dashes, underscores, periods) in a sequence.
+  - Fasta files containing multiple sequences are just the same, with one sequence listed right after another. This format is accepted for many multiple sequence alignment programs.
+
+-----
+
+**Example**
+
+- AXT format::
+
+    0 chr19 3001012 3001075 chr11 70568380 70568443 - 3500
+    TCAGCTCATAAATCACCTCCTGCCACAAGCCTGGCCTGGTCCCAGGAGAGTGTCCAGGCTCAGA
+    TCTGTTCATAAACCACCTGCCATGACAAGCCTGGCCTGTTCCCAAGACAATGTCCAGGCTCAGA
+
+    1 chr19 3008279 3008357 chr11 70573976 70574054 - 3900
+    CACAATCTTCACATTGAGATCCTGAGTTGCTGATCAGAATGGAAGGCTGAGCTAAGATGAGCGACGAGGCAATGTCACA
+    CACAGTCTTCACATTGAGGTACCAAGTTGTGGATCAGAATGGAAAGCTAGGCTATGATGAGGGACAGTGCGCTGTCACA
+
+- Convert the above file to LAV format::
+
+    #:lav
+    s {
+      "/galaxy/data/hg16/seq/chr19.nib" 1 63811651 0 1
+      "/galaxy/data/mm5/seq/chr11.nib-" 1 121648857 0 1
+    }
+    h {
+      "> hg16.chr19"
+      "> mm5.chr11 (reverse complement)"
+    }
+    a {
+      s 3500
+      b 3001012 70568380
+      e 3001075 70568443
+      l 3001012 70568380 3001075 70568443 81
+    }
+    a {
+      s 3900
+      b 3008279 70573976
+      e 3008357 70574054
+      l 3008279 70573976 3008357 70574054 78
+    }
+    #:eof
+
+- With two files in the FASTA format::
+
+    >hg16.chr19_-_3001011_3001075
+    TCAGCTCATAAATCACCTCCTGCCACAAGCCTGGCCTGGTCCCAGGAGAGTGTCCAGGCTCAGA
+
+    >hg16.chr19_-_3008278_3008357
+    CACAATCTTCACATTGAGATCCTGAGTTGCTGATCAGAATGGAAGGCTGAGCTAAGATGAGCGACGAGGCAATGTCACA
+
+ **and**::
+
+    >mm5.chr11_-_70568379_70568443
+    TCTGTTCATAAACCACCTGCCATGACAAGCCTGGCCTGTTCCCAAGACAATGTCCAGGCTCAGA
+
+    >mm5.chr11_-_70573975_70574054
+    CACAGTCTTCACATTGAGGTACCAAGTTGTGGATCAGAATGGAAAGCTAGGCTATGATGAGGGACAGTGCGCTGTCACA
+  </help>
+  <code file="axt_to_lav_code.py"/>
+</tool>
diff --git a/tools/filters/axt_to_lav_code.py b/tools/filters/axt_to_lav_code.py
new file mode 100644
index 0000000..f67bb64
--- /dev/null
+++ b/tools/filters/axt_to_lav_code.py
@@ -0,0 +1,6 @@
+
+def exec_after_process(app, inp_data, out_data, param_dict, tool, stdout, stderr):
+    data = out_data["seq_file2"]
+    data.dbkey = param_dict['dbkey_2']
+    app.model.context.add( data )
+    app.model.context.flush()
diff --git a/tools/filters/bed2gff.xml b/tools/filters/bed2gff.xml
new file mode 100644
index 0000000..4734b74
--- /dev/null
+++ b/tools/filters/bed2gff.xml
@@ -0,0 +1,92 @@
+<tool id="bed2gff1" name="BED-to-GFF" version="2.0.0">
+  <description>converter</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command interpreter="python">bed_to_gff_converter.py $input $out_file1</command>
+  <inputs>
+    <param format="bed" name="input" type="data" label="Convert this dataset"/>
+  </inputs>
+  <outputs>
+    <data format="gff" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="9.bed"/>
+      <output name="out_file1" file="bed2gff_out.gff"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool converts data from BED format to GFF format (scroll down for format description).
+
+--------
+
+**Example**
+
+The following data in BED format::
+
+	chr28	346187	388197	BC114771	0	+	346187	388197	0	9	144,81,115,63,155,96,134,105,112,	0,24095,26190,31006,32131,33534,36994,41793,41898,
+
+Will be converted to GFF (**note** that the start coordinate is incremented by 1)::
+
+	##gff-version 2
+	##bed_to_gff_converter.py
+
+	chr28	bed2gff	mRNA	346188	388197	0	+	.	mRNA BC114771;
+	chr28	bed2gff	exon	346188	346331	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	370283	370363	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	372378	372492	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	377194	377256	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	378319	378473	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	379722	379817	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	383182	383315	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	387981	388085	0	+	.	exon BC114771;
+	chr28	bed2gff	exon	388086	388197	0	+	.	exon BC114771;
+
+
+------
+
+.. class:: informark
+
+**About formats**
+
+**BED format** Browser Extensible Data format was designed at UCSC for displaying data tracks in the Genome Browser. It has three required fields and several additional optional ones:
+
+The first three BED fields (required) are::
+
+    1. chrom - The name of the chromosome (e.g. chr1, chrY_random).
+    2. chromStart - The starting position in the chromosome. (The first base in a chromosome is numbered 0.)
+    3. chromEnd - The ending position in the chromosome, plus 1 (i.e., a half-open interval).
+
+The additional BED fields (optional) are::
+
+    4. name - The name of the BED line.
+    5. score - A score between 0 and 1000.
+    6. strand - Defines the strand - either '+' or '-'.
+    7. thickStart - The starting position where the feature is drawn thickly at the Genome Browser.
+    8. thickEnd - The ending position where the feature is drawn thickly at the Genome Browser.
+    9. reserved - This should always be set to zero.
+   10. blockCount - The number of blocks (exons) in the BED line.
+   11. blockSizes - A comma-separated list of the block sizes. The number of items in this list should correspond to blockCount.
+   12. blockStarts - A comma-separated list of block starts. All of the blockStart positions should be calculated relative to chromStart. The number of items in this list should correspond to blockCount.
+   13. expCount - The number of experiments.
+   14. expIds - A comma-separated list of experiment ids. The number of items in this list should correspond to expCount.
+   15. expScores - A comma-separated list of experiment scores. All of the expScores should be relative to expIds. The number of items in this list should correspond to expCount.
+
+**GFF format** General Feature Format is a format for describing genes and other features associated with DNA, RNA and Protein sequences. GFF lines have nine tab-separated fields::
+
+    1. seqname - Must be a chromosome or scaffold.
+    2. source - The program that generated this feature.
+    3. feature - The name of this type of feature. Some examples of standard feature types are "CDS", "start_codon", "stop_codon", and "exon".
+    4. start - The starting position of the feature in the sequence. The first base is numbered 1.
+    5. end - The ending position of the feature (inclusive).
+    6. score - A score between 0 and 1000. If there is no score value, enter ".".
+    7. strand - Valid entries include '+', '-', or '.' (for don't know/care).
+    8. frame - If the feature is a coding exon, frame should be a number between 0-2 that represents the reading frame of the first base. If the feature is not a coding exon, the value should be '.'.
+    9. group - All lines with the same group are linked together into a single item.
+
+</help>
+</tool>
diff --git a/tools/filters/bed_to_bigbed.xml b/tools/filters/bed_to_bigbed.xml
new file mode 100644
index 0000000..d72858a
--- /dev/null
+++ b/tools/filters/bed_to_bigbed.xml
@@ -0,0 +1,58 @@
+<tool id="bed_to_bigBed" name="BED-to-bigBed" version="1.0.0">
+  <description>converter</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command>bedToBigBed $input1 $chromInfo $out_file1
+    #if $settings.settingsType == "full":
+      -blockSize=${settings.blockSize} -itemsPerSlot=${settings.itemsPerSlot} ${settings.unc}
+    #end if
+    2>&1 || echo "Error running bedToBigBed." >&2
+  </command>
+  <requirements>
+    <requirement type="package">ucsc_tools</requirement>
+  </requirements>
+  <inputs>
+    <param format="bed" name="input1" type="data" label="Convert">
+      <validator type="unspecified_build" />
+    </param>
+    <conditional name="settings">
+      <param name="settingsType" type="select" label="Converter settings to use" help="Default settings should usually be used.">
+        <option value="preset">Default</option>
+        <option value="full">Full parameter list</option>
+      </param>
+      <when value="preset" />
+      <when value="full">
+        <param name="blockSize" size="4" type="integer" value="256" label="Items to bundle in r-tree" help="Default is 256 (blockSize)" />
+        <param name="itemsPerSlot" size="4" type="integer" value="512" label="Data points bundled at lowest level" help="Default is 512 (itemsPerSlot)" />
+        <param name="unc" type="boolean" truevalue="-unc" falsevalue="" checked="False" label="Do not use compression" help="(unc)"/>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="bigbed" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="7.bed" dbkey="hg17" />
+      <param name="settingsType" value="full" />
+      <param name="blockSize" value="256" />
+      <param name="itemsPerSlot" value="512" />
+      <param name="unc" value="False" />
+      <output name="out_file1" file="7.bigbed"/>
+    </test>
+    <test>
+      <param name="input1" value="7.bed" dbkey="hg17" />
+      <param name="settingsType" value="preset" />
+      <output name="out_file1" file="7.bigbed"/>
+    </test>
+  </tests>
+  <help>
+
+This tool converts a **sorted** BED file into a bigBed file.
+
+Currently, the bedFields option to specify the number of non-standard fields is not supported as an AutoSQL file must be provided, which is a format
+currently not supported by Galaxy.
+
+</help>
+</tool>
diff --git a/tools/filters/bed_to_gff_converter.py b/tools/filters/bed_to_gff_converter.py
new file mode 100644
index 0000000..8217154
--- /dev/null
+++ b/tools/filters/bed_to_gff_converter.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# This code exists in 2 places: ~/datatypes/converters and ~/tools/filters
+from __future__ import print_function
+
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( output_name, 'w' )
+    out.write( "##gff-version 2\n" )
+    out.write( "##bed_to_gff_converter.py\n\n" )
+    i = 0
+    for i, line in enumerate( open( input_name ) ):
+        complete_bed = False
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ) and not line.startswith( 'track' ) and not line.startswith( 'browser' ):
+            try:
+                elems = line.split( '\t' )
+                if len( elems ) == 12:
+                    complete_bed = True
+                chrom = elems[0]
+                if complete_bed:
+                    feature = "mRNA"
+                else:
+                    try:
+                        feature = elems[3]
+                    except:
+                        feature = 'feature%d' % ( i + 1 )
+                start = int( elems[1] ) + 1
+                end = int( elems[2] )
+                try:
+                    score = elems[4]
+                except:
+                    score = '0'
+                try:
+                    strand = elems[5]
+                except:
+                    strand = '+'
+                try:
+                    group = elems[3]
+                except:
+                    group = 'group%d' % ( i + 1 )
+                if complete_bed:
+                    out.write( '%s\tbed2gff\t%s\t%d\t%d\t%s\t%s\t.\t%s %s;\n' % ( chrom, feature, start, end, score, strand, feature, group  ) )
+                else:
+                    out.write( '%s\tbed2gff\t%s\t%d\t%d\t%s\t%s\t.\t%s;\n' % ( chrom, feature, start, end, score, strand, group  ) )
+                if complete_bed:
+                    # We have all the info necessary to annotate exons for genes and mRNAs
+                    block_count = int( elems[9] )
+                    block_sizes = elems[10].split( ',' )
+                    block_starts = elems[11].split( ',' )
+                    for j in range( block_count ):
+                        exon_start = int( start ) + int( block_starts[j] )
+                        exon_end = exon_start + int( block_sizes[j] ) - 1
+                        out.write( '%s\tbed2gff\texon\t%d\t%d\t%s\t%s\t.\texon %s;\n' % ( chrom, exon_start, exon_end, score, strand, group ) )
+            except:
+                skipped_lines += 1
+                if not first_skipped_line:
+                    first_skipped_line = i + 1
+        else:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = i + 1
+    out.close()
+    info_msg = "%i lines converted to GFF version 2.  " % ( i + 1 - skipped_lines )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/catWrapper.py b/tools/filters/catWrapper.py
new file mode 100644
index 0000000..e45553f
--- /dev/null
+++ b/tools/filters/catWrapper.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# By Guruprasad Ananda.
+import os
+import shutil
+import sys
+
+
+def stop_err(msg):
+    sys.stderr.write(msg)
+    sys.exit()
+
+
+def main():
+    outfile = sys.argv[1]
+    infile = sys.argv[2]
+
+    if len(sys.argv) < 4:
+        shutil.copyfile(infile, outfile)
+        sys.exit()
+
+    cmdline = "cat %s " % (infile)
+    for inp in sys.argv[3:]:
+        cmdline = cmdline + inp + " "
+    cmdline = cmdline + ">" + outfile
+    try:
+        os.system(cmdline)
+    except:
+        stop_err("Error encountered with cat.")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/catWrapper.xml b/tools/filters/catWrapper.xml
new file mode 100644
index 0000000..33e67e1
--- /dev/null
+++ b/tools/filters/catWrapper.xml
@@ -0,0 +1,79 @@
+<tool id="cat1" name="Concatenate datasets" version="1.0.0">
+    <description>tail-to-head</description>
+    <command interpreter="python">
+        catWrapper.py
+        $out_file1
+        $input1
+        #for $q in $queries
+            ${q.input2}
+        #end for
+    </command>
+    <inputs>
+        <param name="input1" type="data" label="Concatenate Dataset"/>
+        <repeat name="queries" title="Dataset">
+            <param name="input2" type="data" label="Select" />
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file1" format="input" metadata_source="input1"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input1" value="1.bed"/>
+            <param name="input2" value="2.bed"/>
+            <output name="out_file1" file="cat_wrapper_out1.bed"/>
+        </test>
+        <!--TODO: if possible, enhance the underlying test code to handle this test
+            the problem is multiple params with the same name "input2"
+        <test>
+            <param name="input1" value="1.bed"/>
+            <param name="input2" value="2.bed"/>
+            <param name="input2" value="3.bed"/>
+            <output name="out_file1" file="cat_wrapper_out2.bed"/>
+        </test>
+        -->
+    </tests>
+    <help>
+
+.. class:: warningmark
+
+**WARNING:** Be careful not to concatenate datasets of different kinds (e.g., sequences with intervals). This tool does not check if the datasets being concatenated are in the same format.
+
+-----
+
+**What it does**
+
+Concatenates datasets
+
+-----
+
+**Example**
+
+Concatenating Dataset::
+
+    chrX  151087187  151087355  A  0  -
+    chrX  151572400  151572481  B  0  +
+
+with Dataset1::
+
+    chr1  151242630  151242955  X  0  +
+    chr1  151271715  151271999  Y  0  +
+    chr1  151278832  151279227  Z  0  -
+
+and with Dataset2::
+
+    chr2  100000030  200000955  P  0  +
+    chr2  100000015  200000999  Q  0  +
+
+will result in the following::
+
+    chrX  151087187  151087355  A  0  -
+    chrX  151572400  151572481  B  0  +
+    chr1  151242630  151242955  X  0  +
+    chr1  151271715  151271999  Y  0  +
+    chr1  151278832  151279227  Z  0  -
+    chr2  100000030  200000955  P  0  +
+    chr2  100000015  200000999  Q  0  +
+
+    </help>
+</tool>
diff --git a/tools/filters/changeCase.pl b/tools/filters/changeCase.pl
new file mode 100644
index 0000000..f3aa1ae
--- /dev/null
+++ b/tools/filters/changeCase.pl
@@ -0,0 +1,58 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+my $columns = {};
+my $del = "";
+my @in = ();
+my @out = ();
+my $command = "";
+my $field = 0;
+
+# a wrapper for changing the case of columns from within galaxy
+# isaChangeCase.pl [filename] [columns] [delim] [casing] [output]
+
+die "Check arguments: $0 [filename] [columns] [delim] [casing] [output]\n" unless @ARGV == 5;
+
+# process column input
+$ARGV[1] =~ s/\s+//g;
+foreach ( split /,/, $ARGV[1] ) {
+  if (m/^c\d{1,}$/i) {
+    s/c//ig;
+    $columns->{$_} = --$_;
+  }
+}
+
+die "No columns specified, columns are not preceeded with 'c', or commas are not used to separate column numbers: $ARGV[1]\n" if keys %$columns == 0;
+
+my $column_delimiters_href = {
+	'TAB' => q{\t},
+	'COMMA' => ",",
+	'DASH' => "-",
+	'UNDERSCORE' => "_",
+	'PIPE' => q{\|},
+	'DOT' => q{\.},
+	'SPACE' => q{\s+}
+};
+	
+$del = $column_delimiters_href->{$ARGV[2]};
+
+open (OUT, ">$ARGV[4]") or die "Cannot create $ARGV[4]:$!\n";
+open (IN,  "<$ARGV[0]") or die "Cannot open $ARGV[0]:$!\n";
+while (<IN>) {
+  chop;
+  @in = split /$del/; 
+  for ( my $i = 0; $i <= $#in; ++$i) {
+	if (exists $columns->{$i}) {
+		push(@out, $ARGV[3] eq 'up' ? uc($in[$i]) : lc($in[$i]));
+	} else {
+		push(@out, $in[$i]);
+	}
+  }
+  print OUT join("\t", at out), "\n";
+  @out = ();
+}
+close IN;
+
+close OUT;
diff --git a/tools/filters/changeCase.xml b/tools/filters/changeCase.xml
new file mode 100644
index 0000000..6912bdd
--- /dev/null
+++ b/tools/filters/changeCase.xml
@@ -0,0 +1,77 @@
+<tool id="ChangeCase" name="Change Case" version="1.0.0">
+  <description> of selected columns</description>
+  <stdio>
+    <exit_code range="1:" err_level="fatal" />
+  </stdio>
+  <command interpreter="perl">changeCase.pl $input "$cols" $delimiter $casing $out_file1</command>
+  <inputs>
+    <param name="input" format="txt" type="data" label="From"/>
+    <param name="cols" size="10" type="text" value="c1,c2" label="Change case of columns"/>
+    <param name="delimiter" type="select" label="Delimited by">
+      <option value="TAB">Tab</option>
+      <option value="SPACE">Whitespace</option>
+      <option value="DOT">Dot</option>
+      <option value="COMMA">Comma</option>
+      <option value="DASH">Dash</option>
+      <option value="UNDERSCORE">Underscore</option>
+      <option value="PIPE">Pipe</option>
+    </param>
+    <param name="casing" type="select" label="To">
+      <option value="up">Upper case</option>
+      <option value="lo">Lower case</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="1.txt" ftype="txt"/>
+      <param name="cols" value="c1"/>
+      <param name="delimiter" value="SPACE"/>
+      <param name="casing" value="up"/>
+      <output name="out_file1" file="changeCase_out1.tabular"/>
+    </test>
+    <test>
+      <param name="input" value="1.bed" ftype="bed"/>
+      <param name="cols" value="c1"/>
+      <param name="delimiter" value="TAB"/>
+      <param name="casing" value="up"/>
+      <output name="out_file1" file="changeCase_out2.tabular"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**This tool breaks column assignments.** To re-establish column assignments run the tool and click on the pencil icon in the resulting history item.
+
+.. class:: warningmark
+
+The format of the resulting dataset from this tool is always tabular.
+
+-----
+
+**What it does**
+
+This tool selects specified columns from a dataset and converts the values of those columns to upper or lower case.
+
+- Columns are specified as **c1**, **c2**, and so on.
+- Columns can be specified in any order (e.g., **c2,c1,c6**)
+
+-----
+
+**Example**
+
+Changing columns 1 and 3 ( delimited by Comma ) to upper case in::
+
+  apple,is,good
+  windows,is,bad
+
+will result in::
+
+  APPLE is GOOD
+  WINDOWS is BAD
+
+  </help>
+</tool>
diff --git a/tools/filters/commWrapper.pl b/tools/filters/commWrapper.pl
new file mode 100644
index 0000000..7fde4f1
--- /dev/null
+++ b/tools/filters/commWrapper.pl
@@ -0,0 +1,19 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+use File::Temp "tempfile";
+#use POSIX qw(tmpnam);
+
+my ($input1, $input2, $mode, $out_file1) = @ARGV;
+
+my ($fh, $file1) = tempfile();
+my ($fh1,$file2) = tempfile(); 
+
+`sort $input1 > $file1`;
+`sort $input2 > $file2`;
+`comm $mode $file1 $file2 > $out_file1`;
+`rm $file1 ; rm $file2`;
+
+
+
diff --git a/tools/filters/commWrapper.xml b/tools/filters/commWrapper.xml
new file mode 100644
index 0000000..dfe1012
--- /dev/null
+++ b/tools/filters/commWrapper.xml
@@ -0,0 +1,38 @@
+<tool id="Comm1" name="Find Similarities and Differences" version="1.0.0">
+  <description>between two datasets</description>
+  <command interpreter="perl">commWrapper.pl $input1 $input2 $mode $out_file1</command>
+  <inputs>
+    <param format="tabular" name="input1" type="data" label="Compare Dataset1"/>
+    <param format="tabular" name="input2" type="data" label="with Dataset2"/>
+    <param name="mode" type="select" label="And find">
+      <option value="-23">Lines unique to Dataset1</option>
+      <option value="-12">Lines shared between Dataset1 and Dataset2</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input1" />
+  </outputs>
+  <help>
+This tool is based on UNIX shell command comm. It compares two datasets and returns similarities or differences. For example, if you have two datasets::
+  
+ a  1
+ b  2
+ c  3
+
+and::
+
+ a  1
+ f  6
+ h  8
+
+Using this tool with **Lines unique to Dataset1** option will return::
+
+ b  2
+ c  3
+
+If you use **Lines shared between Dataset1 and Dataset2** option output will look like this::
+
+ a  1
+
+</help>
+</tool>
\ No newline at end of file
diff --git a/tools/filters/compare.xml b/tools/filters/compare.xml
new file mode 100644
index 0000000..667d416
--- /dev/null
+++ b/tools/filters/compare.xml
@@ -0,0 +1,79 @@
+<tool id="comp1" name="Compare two Datasets" version="1.0.2">
+  <description>to find common or distinct rows</description>
+  <command interpreter="python">joinWrapper.py $input1 $input2 $field1 $field2 $mode $out_file1</command>
+  <inputs>
+    <param format="tabular" name="input1" type="data" label="Compare"/>
+    <param name="field1" label="Using column" type="data_column" data_ref="input1">
+        <validator type="no_options" message="Invalid column choice. Please try again after editing metadata of your input dataset by clicking on the pencil icon next to it."/>
+    </param>
+    <param format="tabular" name="input2" type="data" label="against" />
+    <param name="field2" label="and column" type="data_column" data_ref="input2">
+            <validator type="no_options" message="Invalid column choice. Please try again after editing metadata of your input dataset by clicking on the pencil icon next to it."/>
+    </param>
+    <param name="mode" type="select" label="To find" help="See examples below for explanation of these options">
+      <option value="N">Matching rows of 1st dataset</option>
+      <option value="V">Non Matching rows of 1st dataset</option>
+    </param>
+  </inputs>
+  <outputs>
+     <data format="input" name="out_file1" metadata_source="input1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="2.bed"/>
+      <param name="field1" value="2"/>
+      <param name="field2" value="2"/>
+      <param name="mode" value="N"/>
+      <output name="out_file1" file="fs-compare.dat"/>
+    </test>
+    <!--test case with duplicated key values-->
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="3.bed"/>
+      <param name="field1" value="1"/>
+      <param name="field2" value="1"/>
+      <param name="mode" value="V"/>
+      <output name="out_file1" file="fs-compare-2.dat"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+This tool finds lines in one dataset that HAVE or DO NOT HAVE a common field with another dataset.
+
+-----
+
+**Example**
+
+If this is **First dataset**::
+
+  chr1 10 20 geneA 
+  chr1 50 80 geneB
+  chr5 10 40 geneL
+
+and this is **Second dataset**::
+
+  geneA tumor-suppressor
+  geneB Foxp2
+  geneC Gnas1
+  geneE INK4a
+
+Finding lines of the **First dataset** whose 4th column matches the 1st column of the **Second dataset** yields::
+
+  chr1 10 20 geneA 
+  chr1 50 80 geneB
+
+Conversely, using option **Non Matching rows of First dataset** on the same fields will yield::
+
+  chr5 10 40 geneL
+
+</help>
+</tool>
diff --git a/tools/filters/condense_characters.pl b/tools/filters/condense_characters.pl
new file mode 100644
index 0000000..0e22a02
--- /dev/null
+++ b/tools/filters/condense_characters.pl
@@ -0,0 +1,105 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# condenses all consecutive characters of one type
+# convert_characters.pl [input] [character] [output]
+
+die "Check arguments" unless @ARGV == 3;
+
+my $inputfile = $ARGV[0];
+my $character = $ARGV[1];
+my $outputfile = $ARGV[2];
+
+
+my $convert_from;
+my $convert_to;
+
+
+if ($character eq "s")
+{
+    $convert_from = '\s';
+}
+elsif ($character eq "T")
+{
+    $convert_from = '\t';
+}
+elsif ($character eq "Sp")
+{
+    $convert_from = " ";
+}
+elsif ($character eq "Dt")
+{
+    $convert_from = '\.';
+}
+elsif ($character eq "C")
+{
+    $convert_from = ",";
+}
+elsif ($character eq "D")
+{
+    $convert_from = "-";
+}
+elsif ($character eq "U")
+{
+    $convert_from = "_";
+}
+elsif ($character eq "P")
+{
+    $convert_from = '\|';
+}
+else
+{
+    die "Invalid value specified for convert from\n";
+}
+
+
+if ($character eq "T")
+{
+    $convert_to = "\t";
+}
+elsif ($character eq "Sp")
+{
+    $convert_to = " ";
+}
+elsif ($character eq "Dt")
+{
+    $convert_to = "\.";
+}
+elsif ($character eq "C")
+{
+    $convert_to = ",";
+}
+elsif ($character eq "D")
+{
+    $convert_to = "-";
+}
+elsif ($character eq "U")
+{
+    $convert_to = "_";
+}
+elsif ($character eq "P")
+{
+    $convert_to = "|";
+}
+else
+{
+    die "Invalid value specified for Convert to\n";
+}
+
+my $fhIn;
+open ($fhIn, "< $inputfile") or die "Cannot open source file";
+
+my $fhOut;
+open ($fhOut, "> $outputfile");
+
+while (<$fhIn>)
+{
+    my $thisLine = $_;
+    chomp $thisLine;
+    $thisLine =~ s/${convert_from}+/$convert_to/g;
+    print $fhOut $thisLine,"\n";    
+}
+close ($fhIn) or die "Cannot close source file";
+close ($fhOut) or die "Cannot close output file";
diff --git a/tools/filters/condense_characters.xml b/tools/filters/condense_characters.xml
new file mode 100644
index 0000000..f792851
--- /dev/null
+++ b/tools/filters/condense_characters.xml
@@ -0,0 +1,48 @@
+<tool id="Condense characters1" name="Condense" version="1.0.0">
+  <description>consecutive characters</description>
+  <command interpreter="perl">condense_characters.pl $input $character $out_file1</command>
+  <inputs>
+<!--    <display>condense all consecutive $character from $input</display> -->
+    <param name="character" type="select" label="Condense all consecutive">
+      <option value="T">Tabs</option>
+      <option value="Sp">Spaces</option>
+      <option value="Dt">Dots</option>
+      <option value="C">Commas</option>
+      <option value="D">Dashes</option>
+      <option value="U">Underscores</option>
+      <option value="P">Pipes</option>
+    </param>
+    <param format="txt" name="input" type="data" label="in this Dataset"/>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="character" value="T"/>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="eq-condense.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool condenses all consecutive characters of a specified type.
+
+-----
+
+**Example**
+
+- Input file::
+
+    geneX,,,10,,,,,20
+    geneY,,5,,,,,12,15,9,
+
+- Condense all consecutive commas. The above file will be converted into::
+
+    geneX,10,20
+    geneY,5,12,15,9
+
+</help>
+</tool>
diff --git a/tools/filters/convert_characters.pl b/tools/filters/convert_characters.pl
new file mode 100644
index 0000000..9dd098d
--- /dev/null
+++ b/tools/filters/convert_characters.pl
@@ -0,0 +1,101 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# converts all characters of one type into another 
+# convert_characters.pl [input] [convert_from] [convert_to] [output]
+
+die "Check argument\n" unless @ARGV == 4;
+
+my $inputfile = $ARGV[0];
+my $convert_from = $ARGV[1];
+my $convert_to = $ARGV[2];
+my $outputfile = $ARGV[3];
+
+if ($convert_from eq "s")
+{
+    $convert_from = '\s';
+}
+elsif ($convert_from eq "T")
+{
+    $convert_from = '\t';
+}
+elsif ($convert_from eq "Sp")
+{
+    $convert_from = '\s';
+}
+elsif ($convert_from eq "Dt")
+{
+    $convert_from = '\.';
+}
+elsif ($convert_from eq "C")
+{
+    $convert_from = ",";
+}
+elsif ($convert_from eq "D")
+{
+    $convert_from = "-";
+}
+elsif ($convert_from eq "U")
+{
+    $convert_from = "_";
+}
+elsif ($convert_from eq "P")
+{
+    $convert_from = '\|';
+}
+else
+{
+    die "Invalid value specified for convert from\n";
+}
+
+
+if ($convert_to eq "T")
+{
+    $convert_to = "\t";
+}
+elsif ($convert_to eq "Sp")
+{
+    $convert_to = '\s';
+}
+elsif ($convert_to eq "Dt")
+{
+    $convert_to = "\.";
+}
+elsif ($convert_to eq "C")
+{
+    $convert_to = ",";
+}
+elsif ($convert_to eq "D")
+{
+    $convert_to = "-";
+}
+elsif ($convert_to eq "U")
+{
+    $convert_to = "_";
+}
+elsif ($convert_to eq "P")
+{
+    $convert_to = "|";
+}
+else
+{
+    die "Invalid value specified for convert to\n";
+}
+
+my $fhIn;
+open ($fhIn, "< $inputfile") or die "Cannot open source file";
+
+my $fhOut;
+open ($fhOut, "> $outputfile");
+
+while (<$fhIn>)
+{
+    my $thisLine = $_;
+    chomp $thisLine;
+    $thisLine =~ s/$convert_from{1,}/$convert_to/g;
+    print $fhOut $thisLine,"\n";    
+}
+close ($fhIn) or die "Cannot close source file\n";
+close ($fhOut) or die "Cannot close output fil\n";
diff --git a/tools/filters/convert_characters.py b/tools/filters/convert_characters.py
new file mode 100644
index 0000000..efbb154
--- /dev/null
+++ b/tools/filters/convert_characters.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# By, Guruprasad Ananda.
+from __future__ import print_function
+
+import optparse
+import re
+
+
+def __main__():
+    parser = optparse.OptionParser()
+    parser.add_option('--strip', action='store_true',
+                      help='strip leading and trailing whitespaces')
+    parser.add_option('--condense', action='store_true',
+                      help='condense consecutive delimiters')
+    (options, args) = parser.parse_args()
+    if len(args) != 3:
+        parser.error("usage: convert_characters.py infile from_char outfile")
+
+    char_dict = {
+        'T': '\t',
+        's': '\s',
+        'Dt': '\.',
+        'C': ',',
+        'D': '-',
+        'U': '_',
+        'P': '\|',
+        'Co': ':',
+        'Sc': ';'
+    }
+    # regexp to match 1 or more occurences.
+    from_char = args[1]
+    from_ch = char_dict[from_char]
+    if options.condense:
+        from_ch += '+'
+
+    skipped = 0
+    with open(args[0], 'rU') as fin:
+        with open(args[2], 'w') as fout:
+            for line in fin:
+                if options.strip:
+                    line = line.strip()
+                else:
+                    line = line.rstrip('\n')
+                try:
+                    fout.write("%s\n" % (re.sub(from_ch, '\t', line)))
+                except:
+                    skipped += 1
+
+    if skipped:
+        print("Skipped %d lines as invalid." % skipped)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/convert_characters.xml b/tools/filters/convert_characters.xml
new file mode 100644
index 0000000..bde0972
--- /dev/null
+++ b/tools/filters/convert_characters.xml
@@ -0,0 +1,77 @@
+<tool id="Convert characters1" name="Convert" version="1.0.0">
+  <description>delimiters to TAB</description>
+  <command interpreter="python">
+convert_characters.py
+#if $strip
+  --strip
+#end if
+#if $condense
+  --condense
+#end if
+$input $convert_from $out_file1
+  </command>
+  <inputs>
+    <param name="convert_from" type="select" label="Convert all">
+      <option value="s">Whitespaces</option>
+      <option value="T">Tabs</option>
+      <!--<option value="Sp">Spaces</option>-->
+      <option value="Dt">Dots</option>
+      <option value="C">Commas</option>
+      <option value="D">Dashes</option>
+      <option value="U">Underscores</option>
+      <option value="P">Pipes</option>
+      <option value="Co">Colons</option>
+      <option value="Sc">Semicolons</option>
+    </param>
+    <param format="txt" name="input" type="data" label="in Dataset"/>
+    <param name="strip" type="boolean" checked="true" label="Strip leading and trailing whitespaces" />
+    <param name="condense" type="boolean" checked="true" label="Condense consecutive delimiters in one TAB" />
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" />
+  </outputs>
+  <stdio>
+    <exit_code range="1:" level="fatal" />
+  </stdio>
+  <tests>
+    <test>
+      <param name="convert_from" value="s"/>
+      <param name="input" value="1.bed"/>
+      <param name="strip" value="true" />
+      <param name="condense" value="true" />
+      <output name="out_file1" file="eq-convert.dat"/>
+    </test>
+    <test>
+      <param name="convert_from" value="s"/>
+      <param name="input" value="a.txt"/>
+      <param name="strip" value="true" />
+      <param name="condense" value="true" />
+      <output name="out_file1" file="a.tab"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+Converts all delimiters of a specified type into TABs. Consecutive delimiters can be condensed in a single TAB.
+
+-----
+
+**Example**
+
+- Input file::
+
+    chrX||151283558|151283724|NM_000808_exon_8_0_chrX_151283559_r|0|-
+    chrX|151370273|151370486|NM_000808_exon_9_0_chrX_151370274_r|0|-
+    chrX|151559494|151559583|NM_018558_exon_1_0_chrX_151559495_f|0|+
+    chrX|151564643|151564711|NM_018558_exon_2_0_chrX_151564644_f||||0|+
+
+- Converting all pipe delimiters of the above file to TABs and condensing delimiters will get::
+
+    chrX  151283558  151283724  NM_000808_exon_8_0_chrX_151283559_r  0  -
+    chrX  151370273  151370486  NM_000808_exon_9_0_chrX_151370274_r  0  -
+    chrX  151559494  151559583  NM_018558_exon_1_0_chrX_151559495_f  0  +
+    chrX  151564643  151564711  NM_018558_exon_2_0_chrX_151564644_f  0  +
+
+  </help>
+</tool>
diff --git a/tools/filters/cutWrapper.pl b/tools/filters/cutWrapper.pl
new file mode 100644
index 0000000..4843052
--- /dev/null
+++ b/tools/filters/cutWrapper.pl
@@ -0,0 +1,87 @@
+#!/usr/bin/perl -w
+
+use strict;
+use warnings;
+
+my @columns = ();
+my $del = "";
+my @in = ();
+my @out = ();
+my $command = "";
+my $field = 0;
+my $start = 0;
+my $end = 0;
+my $i = 0;
+
+# a wrapper for cut for use in galaxy
+# cutWrapper.pl [filename] [columns] [delim] [output]
+
+die "Check arguments\n" unless @ARGV == 4;
+
+$ARGV[1] =~ s/\s+//g;
+foreach ( split /,/, $ARGV[1] ) {
+  if (m/^c\d{1,}$/i) {
+    push (@columns, $_);
+    $columns[@columns-1] =~s/c//ig;
+  } elsif (m/^c\d{1,}-c\d{1,}$/i) {
+    ($start, $end)  = split(/-/, $_);
+    $start =~ s/c//ig;
+    $end =~ s/c//ig;
+    for $i ($start .. $end) {
+       push (@columns, $i); 
+    }
+  }
+}
+
+die "No columns specified, columns are not preceded with 'c', or commas are not used to separate column numbers: $ARGV[1]\n" if @columns == 0;
+
+my $column_delimiters_href = {
+  'T' => q{\t},
+  'C' => ",",
+  'D' => "-",
+  'U' => "_",
+  'P' => q{\|},
+  'Dt' => q{\.},
+  'Sp' => q{\s+}
+};
+
+$del = $column_delimiters_href->{$ARGV[2]};
+
+open (OUT, ">$ARGV[3]") or die "Cannot create $ARGV[2]:$!\n";
+open (IN,  "<$ARGV[0]") or die "Cannot open $ARGV[0]:$!\n";
+
+while (my $line=<IN>) {
+   if ($line =~ /^#/) {
+     #Ignore comment lines
+   } else {
+     chop($line);
+     @in = split(/$del/, $line);
+     foreach $field (@columns) {
+       if (defined($in[$field-1])) {
+         push(@out, $in[$field-1]);
+       } else {
+         push(@out, ".");
+       }
+     }    
+     print OUT join("\t", at out), "\n";
+     @out = ();
+   }
+}
+
+#while (<IN>) {
+#  chop;
+#  @in = split /$del/; 
+#  foreach $field (@columns) {
+#    if (defined($in[$field-1])) {
+#      push(@out, $in[$field-1]);
+#    } else {
+#      push(@out, ".");
+#    }
+#  }
+#  print OUT join("\t", at out), "\n";
+#  @out = ();
+#}
+close IN;
+
+close OUT;
+    
diff --git a/tools/filters/cutWrapper.xml b/tools/filters/cutWrapper.xml
new file mode 100644
index 0000000..0ba38b6
--- /dev/null
+++ b/tools/filters/cutWrapper.xml
@@ -0,0 +1,211 @@
+<tool id="Cut1" name="Cut" version="1.0.2">
+  <description>columns from a table</description>
+  <command interpreter="perl">cutWrapper.pl "${input}" "${columnList}" "${delimiter}" "${out_file1}"</command>
+  <inputs>
+    <param name="columnList" type="text" value="c1,c2" label="Cut columns"/>
+    <param name="delimiter" type="select" label="Delimited by">
+      <option value="T">Tab</option>
+      <option value="Sp">Whitespace</option>
+      <option value="Dt">Dot</option>
+      <option value="C">Comma</option>
+      <option value="D">Dash</option>
+      <option value="U">Underscore</option>
+      <option value="P">Pipe</option>
+    </param>
+    <param format="txt" name="input" type="data" label="From"/>
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" >
+      <actions>
+        <conditional name="delimiter">
+          <when value="T">
+            <conditional name="input">
+              <when datatype_isinstance="interval">
+                <action type="format" default="tabular">
+                  <option type="from_param" name="columnList" column="0" offset="0"> <!-- chromCol is 1-->
+
+                    <filter type="insert_column" column="0" value="interval"/>
+
+                    <filter type="insert_column" ref="columnList" /> <!-- startCol -->
+
+                    <filter type="insert_column" ref="columnList" /> <!-- endCol -->
+
+                    <filter type="multiple_splitter" column="1" separator=","/>
+                    <filter type="column_strip" column="1"/> <!-- get rid of all external whitespace -->
+                    <filter type="string_function" column="1" name="lower" />
+                    <filter type="param_value" column="1" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                    <filter type="column_strip" column="1" strip="c"/> <!-- get rid of c's  -->
+                    <filter type="boolean" column="1" cast="int" />
+
+                    <filter type="multiple_splitter" column="2" separator=","/>
+                    <filter type="column_strip" column="2"/> <!-- get rid of all external whitespace -->
+                    <filter type="string_function" column="2" name="lower" />
+                    <filter type="param_value" column="2" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                    <filter type="column_strip" column="2" strip="c"/> <!-- get rid of c's  -->
+                    <filter type="boolean" column="2" cast="int" />
+
+                    <filter type="multiple_splitter" column="3" separator=","/>
+                    <filter type="column_strip" column="3"/> <!-- get rid of all external whitespace -->
+                    <filter type="string_function" column="3" name="lower" />
+                    <filter type="param_value" column="3" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                    <filter type="column_strip" column="3" strip="c"/> <!-- get rid of c's  -->
+                    <filter type="boolean" column="3" cast="int" />
+
+                    <filter type="metadata_value" ref="input" name="chromCol" column="1" />
+                    <filter type="metadata_value" ref="input" name="startCol" column="2" />
+                    <filter type="metadata_value" ref="input" name="endCol" column="3" />
+
+                  </option>
+                </action>
+
+                <conditional name="out_file1">
+                  <when datatype_isinstance="interval">
+                    <action type="metadata" name="chromCol">
+                      <option type="from_param" name="columnList" column="0" offset="0"> <!-- chromCol is 0-->
+                        <filter type="multiple_splitter" column="0" separator=","/>
+                        <filter type="column_strip" column="0"/> <!-- get rid of all external whitespace -->
+                        <filter type="string_function" column="0" name="lower" />
+                        <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                        <filter type="column_strip" column="0" strip="c"/> <!-- get rid of c's  -->
+                        <filter type="insert_column" value="1" iterate="True" column="0"/>
+                        <filter type="boolean" column="1" cast="int" />
+                        <filter type="metadata_value" ref="input" name="chromCol" column="1" />
+                      </option>
+                    </action>
+
+                    <action type="metadata" name="startCol">
+                      <option type="from_param" name="columnList" column="0" offset="0"> <!-- startCol is 0-->
+                        <filter type="multiple_splitter" column="0" separator=","/>
+                        <filter type="column_strip" column="0"/> <!-- get rid of all external whitespace -->
+                        <filter type="string_function" column="0" name="lower" />
+                        <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                        <filter type="column_strip" column="0" strip="c"/> <!-- get rid of c's  -->
+                        <filter type="insert_column" value="1" iterate="True" column="0"/>
+                        <filter type="boolean" column="1" cast="int" />
+                        <filter type="metadata_value" ref="input" name="startCol" column="1" />
+                      </option>
+                    </action>
+
+                    <action type="metadata" name="endCol">
+                      <option type="from_param" name="columnList" column="0" offset="0"> <!-- endCol is 0-->
+                        <filter type="multiple_splitter" column="0" separator=","/>
+                        <filter type="column_strip" column="0"/> <!-- get rid of all external whitespace -->
+                        <filter type="string_function" column="0" name="lower" />
+                        <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                        <filter type="column_strip" column="0" strip="c"/> <!-- get rid of c's  -->
+                        <filter type="insert_column" value="1" iterate="True" column="0"/>
+                        <filter type="boolean" column="1" cast="int" />
+                        <filter type="metadata_value" ref="input" name="endCol" column="1" />
+                      </option>
+                    </action>
+
+                    <action type="metadata" name="nameCol" default="0">
+                      <option type="from_param" name="columnList" column="0" offset="0"> <!-- nameCol is 0-->
+                        <filter type="multiple_splitter" column="0" separator=","/>
+                        <filter type="column_strip" column="0"/> <!-- get rid of all external whitespace -->
+                        <filter type="string_function" column="0" name="lower" />
+                        <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                        <filter type="column_strip" column="0" strip="c"/> <!-- get rid of c's  -->
+                        <filter type="insert_column" value="1" iterate="True" column="0"/>
+                        <filter type="boolean" column="1" cast="int" />
+                        <filter type="metadata_value" ref="input" name="nameCol" column="1" />
+                      </option>
+                    </action>
+
+                    <action type="metadata" name="strandCol" default="0">
+                      <option type="from_param" name="columnList" column="0" offset="0"> <!-- strandCol is 0-->
+                        <filter type="multiple_splitter" column="0" separator=","/>
+                        <filter type="column_strip" column="0"/> <!-- get rid of all external whitespace -->
+                        <filter type="string_function" column="0" name="lower" />
+                        <filter type="param_value" column="0" value="^c\d{1,}$" compare="re_search" keep="True"/>
+                        <filter type="column_strip" column="0" strip="c"/> <!-- get rid of c's  -->
+                        <filter type="insert_column" value="1" iterate="True" column="0"/>
+                        <filter type="boolean" column="1" cast="int" />
+                        <filter type="metadata_value" ref="input" name="strandCol" column="1" />
+                      </option>
+                    </action>
+                  </when>
+                </conditional>
+
+              </when>
+            </conditional>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="columnList" value="c1,c4,c2,c3"/>
+      <param name="delimiter" value="T"/>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="eq-cut.dat"/>
+    </test>
+    <test>
+      <param name="columnList" value="c1,c4,c2-c3" />
+      <param name="delimiter" value="T" />
+      <param name="input" value="1.bed" />
+      <output name="out_file1" file="eq-cut.dat" />
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**WARNING: This tool breaks column assignments.** To re-establish column assignments run the tools and click on the pencil icon in the latest history item.
+
+.. class:: infomark
+
+The output of this tool is always in tabular format (e.g., if your original delimiters are commas, they will be replaced with tabs). For example:
+
+  Cutting columns 1 and 3 from::
+
+     apple,is,good
+     windows,is,bad
+
+  will give::
+
+    apple   good
+    windows bad
+
+-----
+
+**What it does**
+
+This tool selects (cuts out) specified columns from the dataset.
+
+- Columns are specified as **c1**, **c2**, and so on. Column count begins with **1**
+- Columns can be specified in any order (e.g., **c2,c1,c6**)
+- If you specify more columns than actually present - empty spaces will be filled with dots
+
+-----
+
+**Example**
+
+Input dataset (six columns: c1, c2, c3, c4, c5, and c6)::
+
+   chr1 10   1000  gene1 0 +
+   chr2 100  1500  gene2 0 +
+
+**cut** on columns "**c1,c4,c6**" will return::
+
+   chr1 gene1 +
+   chr2 gene2 +
+
+**cut** on columns "**c6,c5,c4,c1**" will return::
+
+   + 0 gene1 chr1
+   + 0 gene2 chr2
+
+**cut** on columns "**c1-c3**" will return::
+
+   chr1 10   1000
+   chr2 100  1500
+
+
+**cut** on columns "**c8,c7,c4**" will return::
+
+   . . gene1
+   . . gene2
+</help>
+</tool>
diff --git a/tools/filters/fileGrep.xml b/tools/filters/fileGrep.xml
new file mode 100644
index 0000000..5b248c7
--- /dev/null
+++ b/tools/filters/fileGrep.xml
@@ -0,0 +1,42 @@
+<tool id="fileGrep1" name="Match" version="1.0.0">
+  <description>a column from one Query against another Query</description>
+  <command>cut -f $col $input1 | grep -f - $match $input2 > $out_file1</command>
+  <inputs>
+    <param name="col" size="2" type="text" value="1" label="Match content of column"/>
+    <param format="tabular" name="input1" type="data" label="From Query1"/>
+    <param format="tabular" name="input2" type="data" label="Against Query2"/>
+    <param name="match" type="select" label="and return rows that">
+      <option value="">Match</option>
+      <option value="-v">Do not match</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input2" />
+  </outputs>
+  <help>
+This tool is based on UNIX command grep with option -f. It matches content of one query against another. For example, assume you have two queries - one that contains EST accession numbers and some other information::
+
+  AA001229	12	12
+  A001501	7	7
+  AA001641	6	6
+  AA001842	6	6
+  AA002047	6	6
+  AA004638	3	3
+
+and another that is a typical BED file describing genomic location of some ESTs::
+
+  chr7 115443235 115443809 CA947954_exon_0_0_chr7_115443236_f 0	+
+  chr7 115443236 115443347 DB338189_exon_0_0_chr7_115443237_f 0	+
+  chr7 115443347 115443768 DB338189_exon_1_0_chr7_115443348_f 0	+
+  chr7 115443239 115443802 AA001842_exon_0_0_chr7_115443240_f 0	+
+  chr7 115443243 115443347 DB331869_exon_0_0_chr7_115443244_f 0	+
+  chr7 115443347 115443373 DB331869_exon_1_0_chr7_115443348_f 0	+
+
+Using this tool you will be able to tell how many ESTs in Query1 are also preset in Query2 and will output this::
+
+  chr7 115443239 115443802 AA001842_exon_0_0_chr7_115443240_f 0	
+
+if **Match** option is chosen.
+
+</help>
+</tool>
\ No newline at end of file
diff --git a/tools/filters/fixedValueColumn.pl b/tools/filters/fixedValueColumn.pl
new file mode 100644
index 0000000..8ebdd29
--- /dev/null
+++ b/tools/filters/fixedValueColumn.pl
@@ -0,0 +1,34 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# fixedValueColumn.pl $input $out_file1 "expression" "iterate [yes|no]"
+
+my ($input, $out_file1, $expression, $iterate) = @ARGV;
+my $i = 0;
+my $numeric = 0;
+
+die "Check arguments\n" unless @ARGV == 4;
+
+open (DATA, "<$input") or die "Cannot open $input:$!\n";
+open (OUT,  ">$out_file1") or die "Cannot create $out_file1:$!\n";
+
+if ($expression =~ m/^\d+$/) {
+  $numeric = 1;
+  $i = $expression;
+}
+
+while (<DATA>) {
+  chop;
+  if ($iterate eq "no") {
+    print OUT "$_\t$expression\n";
+  } else {
+    print OUT "$_\t$i\n" if $numeric == 1;
+    print OUT "$_\t$expression-$i\n" if $numeric == 0;
+    ++$i;
+  }
+}
+
+close DATA;
+close OUT;
diff --git a/tools/filters/fixedValueColumn.xml b/tools/filters/fixedValueColumn.xml
new file mode 100644
index 0000000..953e4a6
--- /dev/null
+++ b/tools/filters/fixedValueColumn.xml
@@ -0,0 +1,61 @@
+<tool id="addValue" name="Add column" version="1.0.0">
+  <description>to an existing dataset</description>
+  <command interpreter="perl">fixedValueColumn.pl $input $out_file1 "$exp" $iterate</command>
+  <inputs>
+     <param name="exp" size="20" type="text" value="1" label="Add this value"/>
+    <param format="tabular" name="input" type="data" label="to Dataset" help="Dataset missing? See TIP below" />
+    <param name="iterate" type="select" label="Iterate?">
+      <option value="no">NO</option>
+      <option value="yes">YES</option>
+    </param>    
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="exp" value="1"/>
+      <param name="input" value="1.bed"/>
+      <param name="iterate" value="no"/>
+      <output name="out_file1" file="eq-addvalue.dat"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**What it does**
+
+You can enter any value and it will be added as a new column to your dataset
+
+-----
+
+**Example**
+
+If you original data looks like this::
+
+    chr1 10  100 geneA
+    chr2 200 300 geneB
+    chr2 400 500 geneC
+
+Typing **+** in the text box will generate::
+
+    chr1 10  100 geneA +
+    chr2 200 300 geneB +
+    chr2 400 500 geneC +
+     
+
+You can also add line numbers by selecting **Iterate: YES**. In this case if you enter **1** in the text box you will get::
+
+    chr1 10  100 geneA 1
+    chr2 200 300 geneB 2
+    chr2 400 500 geneC 3
+
+
+
+</help>
+</tool>
diff --git a/tools/filters/gff/extract_GFF_Features.py b/tools/filters/gff/extract_GFF_Features.py
new file mode 100644
index 0000000..0396d12
--- /dev/null
+++ b/tools/filters/gff/extract_GFF_Features.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# Guruprasad Ananda
+"""
+Extract features from GFF file.
+
+usage: %prog input1 out_file1 column features
+"""
+from __future__ import print_function
+
+import sys
+
+from bx.cookbook import doc_optparse
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def main():
+    # Parsing Command Line here
+    options, args = doc_optparse.parse( __doc__ )
+
+    try:
+        inp_file, out_file, column, features = args
+    except:
+        stop_err( "One or more arguments is missing or invalid.\nUsage: prog input output column features" )
+    try:
+        column = int( column )
+    except:
+        stop_err( "Column %s is an invalid column." % column )
+
+    if features is None:
+        stop_err( "Column %d has no features to display, select another column." % ( column + 1 ) )
+
+    fo = open( out_file, 'w' )
+    for i, line in enumerate( open( inp_file ) ):
+        line = line.rstrip( '\r\n' )
+        if line and line.startswith( '#' ):
+            # Keep valid comment lines in the output
+            fo.write( "%s\n" % line )
+        else:
+            try:
+                if line.split( '\t' )[column] in features.split( ',' ):
+                    fo.write( "%s\n" % line )
+            except:
+                pass
+    fo.close()
+
+    print('Column %d features: %s' % ( column + 1, features ))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/gff/extract_GFF_Features.xml b/tools/filters/gff/extract_GFF_Features.xml
new file mode 100644
index 0000000..69c62c3
--- /dev/null
+++ b/tools/filters/gff/extract_GFF_Features.xml
@@ -0,0 +1,114 @@
+<tool id="Extract_features1" name="Extract features" version="1.0.0">
+  <description>from GFF data</description>
+  <command interpreter="python">extract_GFF_Features.py $input1 $out_file1 ${column_choice.col} ${column_choice.feature}</command>
+  <inputs>
+    <param format="gff" name="input1" type="data" label="Select GFF data"/>
+    <conditional name="column_choice">
+      <param name="col" type="select" label="From">
+        <option value="0" selected="true">Column 1 / Sequence name</option>
+        <option value="1">Column 2 / Source</option>
+        <option value="2">Column 3 / Feature</option>
+        <option value="6">Column 7 / Strand</option>
+        <option value="7">Column 8 / Frame</option>
+      </param>
+      <when value="0">
+        <param name="feature" type="select" multiple="true" label="Extract features" help="Multi-select list - hold the appropriate key while clicking to select multiple columns">
+          <options from_dataset="input1">
+            <column name="name" index="0"/>
+            <column name="value" index="0"/>
+            <filter type="unique_value" name="unique" column="0"/>
+          </options>
+        </param>
+      </when>
+      <when value="1">
+        <param name="feature" type="select" multiple="true" label="Extract features" help="Multi-select list - hold the appropriate key while clicking to select multiple columns">
+          <options from_dataset="input1">
+            <column name="name" index="1"/>
+            <column name="value" index="1"/>
+            <filter type="unique_value" name="unique" column="1"/>
+          </options>
+        </param>
+      </when>
+      <when value="2">
+        <param name="feature" type="select" multiple="true" label="Extract features" help="Multi-select list - hold the appropriate key while clicking to select multiple columns">
+          <options from_dataset="input1">
+            <column name="name" index="2"/>
+            <column name="value" index="2"/>
+            <filter type="unique_value" name="unique" column="2"/>
+          </options>
+        </param>
+      </when>
+      <when value="6">
+        <param name="feature" type="select" multiple="true" label="Extract features" help="Multi-select list - hold the appropriate key while clicking to select multiple columns">
+          <options from_dataset="input1">
+            <column name="name" index="6"/>
+            <column name="value" index="6"/>
+            <filter type="unique_value" name="unique" column="6"/>
+          </options>
+        </param>
+      </when>
+      <when value="7">
+        <param name="feature" type="select" multiple="true" label="Extract features" help="Multi-select list - hold the appropriate key while clicking to select multiple columns">
+          <options from_dataset="input1">
+            <column name="name" index="7"/>
+            <column name="value" index="7"/>
+            <filter type="unique_value" name="unique" column="7"/>
+          </options>
+        </param>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input1"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="5.gff"/>
+      <param name="col" value="0" />
+      <param name="feature" value="chr5,chr6,chr7,chr8" />
+      <output name="out_file1" file="Extract_features1_out.gff"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool extracts selected features from GFF data.
+
+-----
+
+**Example**
+
+Selecting **promoter** from the following GFF data::
+
+    chr22  GeneA  enhancer  10000000  10001000  500  +  .  TGA
+    chr22  GeneA  promoter  10010000  10010100  900  +  .  TGA
+    chr22  GeneB  promoter  10020000  10025000  400  -  .  TGB
+    chr22  GeneB  CCDS2220  10030000  10065000  800  -  .  TGB
+
+will produce the following output::
+
+    chr22  GeneA  promoter  10010000  10010100  900  +  .  TGA
+    chr22  GeneB  promoter  10020000  10025000  400  -  .  TGB
+
+----
+
+.. class:: infomark
+
+**About formats**
+
+**GFF format** General Feature Format is a format for describing genes and other features associated with DNA, RNA and Protein sequences. GFF lines have nine tab-separated fields::
+
+    1. seqname - Must be a chromosome or scaffold.
+    2. source - The program that generated this feature.
+    3. feature - The name of this type of feature. Some examples of standard feature types are "CDS", "start_codon", "stop_codon", and "exon".
+    4. start - The starting position of the feature in the sequence. The first base is numbered 1.
+    5. end - The ending position of the feature (inclusive).
+    6. score - A score between 0 and 1000. If there is no score value, enter ".".
+    7. strand - Valid entries include '+', '-', or '.' (for don't know/care).
+    8. frame - If the feature is a coding exon, frame should be a number between 0-2 that represents the reading frame of the first base. If the feature is not a coding exon, the value should be '.'.
+    9. group - All lines with the same group are linked together into a single item.
+
+
+  </help>
+</tool>
diff --git a/tools/filters/gff/gff_filter_by_attribute.py b/tools/filters/gff/gff_filter_by_attribute.py
new file mode 100644
index 0000000..605add3
--- /dev/null
+++ b/tools/filters/gff/gff_filter_by_attribute.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+# This tool takes a gff file as input and creates filters on attributes based on certain properties.
+# The tool will skip over invalid lines within the file, informing the user about the number of lines skipped.
+# TODO: much of this code is copied from the Filter1 tool (filtering.py in tools/stats/). The commonalities should be
+# abstracted and leveraged in each filtering tool.
+from __future__ import division, print_function
+
+import sys
+from json import loads
+from ast import Module, parse, walk
+
+AST_NODE_TYPE_WHITELIST = [
+    'Expr', 'Load', 'Str', 'Num', 'BoolOp', 'Compare', 'And', 'Eq', 'NotEq',
+    'Or', 'GtE', 'LtE', 'Lt', 'Gt', 'BinOp', 'Add', 'Div', 'Sub', 'Mult', 'Mod',
+    'Pow', 'LShift', 'GShift', 'BitAnd', 'BitOr', 'BitXor', 'UnaryOp', 'Invert',
+    'Not', 'NotIn', 'In', 'Is', 'IsNot', 'List', 'Index', 'Subscript',
+    'Name',
+]
+
+
+BUILTIN_AND_MATH_FUNCTIONS = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'.split('|')
+STRING_AND_LIST_METHODS = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
+VALID_FUNCTIONS = BUILTIN_AND_MATH_FUNCTIONS + STRING_AND_LIST_METHODS
+# Name blacklist isn't strictly needed - but provides extra peace of mind.
+NAME_BLACKLIST = ["exec", "eval", "globals", "locals", "__import__", "__builtins__"]
+
+
+def __check_name( ast_node ):
+    name = ast_node.id
+    return name not in NAME_BLACKLIST
+
+
+def check_simple_name( text ):
+    """
+
+    >>> check_simple_name("col_name")
+    True
+    >>> check_simple_name("c1=='chr1' and c3-c2>=2000 and c6=='+'")
+    False
+    >>> check_simple_name("eval('1+1')")
+    False
+    >>> check_simple_name("import sys")
+    False
+    >>> check_simple_name("[].__str__")
+    False
+    >>> check_simple_name("__builtins__")
+    False
+    >>> check_simple_name("'x' in globals")
+    False
+    >>> check_simple_name("'x' in [1,2,3]")
+    False
+    >>> check_simple_name("c3=='chr1' and c5>5")
+    False
+    >>> check_simple_name("c3=='chr1' and d5>5")
+    False
+    >>> check_simple_name("c3=='chr1' and c5>5 or exec")
+    False
+    >>> check_simple_name("type(c1) != type(1)")
+    False
+    >>> check_simple_name("c1.split(',')[1] == '1'")
+    False
+    >>> check_simple_name("exec 1")
+    False
+    >>> check_simple_name("str(c2) in [\\\"a\\\",\\\"b\\\"]")
+    False
+    >>> check_simple_name("__import__('os').system('touch /tmp/OOPS')")
+    False
+    """
+    try:
+        module = parse( text )
+    except SyntaxError:
+        return False
+
+    if not isinstance(module, Module):
+        return False
+    statements = module.body
+    if not len( statements ) == 1:
+        return False
+    expression = statements[0]
+    if expression.__class__.__name__ != 'Expr':
+        return False
+
+    for ast_node in walk( expression ):
+        ast_node_class = ast_node.__class__.__name__
+        if ast_node_class not in ["Expr", "Name", "Load"]:
+            return False
+
+        if ast_node_class == "Name" and not __check_name(ast_node):
+            return False
+
+    return True
+
+
+def check_expression( text ):
+    """
+
+    >>> check_expression("c1=='chr1' and c3-c2>=2000 and c6=='+'")
+    True
+    >>> check_expression("eval('1+1')")
+    False
+    >>> check_expression("import sys")
+    False
+    >>> check_expression("[].__str__")
+    False
+    >>> check_expression("__builtins__")
+    False
+    >>> check_expression("'x' in globals")
+    False
+    >>> check_expression("'x' in [1,2,3]")
+    True
+    >>> check_expression("c3=='chr1' and c5>5")
+    True
+    >>> check_expression("c3=='chr1' and d5>5")
+    True
+    >>> check_expression("c3=='chr1' and c5>5 or exec")
+    False
+    >>> check_expression("type(c1) != type(1)")
+    False
+    >>> check_expression("c1.split(',')[1] == '1'")
+    False
+    >>> check_expression("exec 1")
+    False
+    >>> check_expression("str(c2) in [\\\"a\\\",\\\"b\\\"]")
+    False
+    >>> check_expression("__import__('os').system('touch /tmp/OOPS')")
+    False
+    """
+    try:
+        module = parse( text )
+    except SyntaxError:
+        return False
+
+    if not isinstance(module, Module):
+        return False
+    statements = module.body
+    if not len( statements ) == 1:
+        return False
+    expression = statements[0]
+    if expression.__class__.__name__ != 'Expr':
+        return False
+
+    for ast_node in walk( expression ):
+        ast_node_class = ast_node.__class__.__name__
+
+        # Toss out everything that is not a "simple" expression,
+        # imports, error handling, etc...
+        if ast_node_class not in AST_NODE_TYPE_WHITELIST:
+            return False
+
+        if ast_node_class == "Name" and not __check_name(ast_node):
+            return False
+
+    return True
+
+#
+# Helper functions.
+#
+def get_operands( filter_condition ):
+    # Note that the order of all_operators is important
+    items_to_strip = ['+', '-', '**', '*', '//', '/', '%', '<<', '>>', '&', '|', '^', '~', '<=', '<', '>=', '>', '==', '!=', '<>', ' and ', ' or ', ' not ', ' is ', ' is not ', ' in ', ' not in ']
+    for item in items_to_strip:
+        if filter_condition.find( item ) >= 0:
+            filter_condition = filter_condition.replace( item, ' ' )
+    operands = set( filter_condition.split( ' ' ) )
+    return operands
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def check_for_executable( text, description='' ):
+    # Attempt to determine if the condition includes executable stuff and, if so, exit.
+    secured = dir()
+    operands = get_operands( text )
+    for operand in operands:
+        try:
+            int( operand )
+        except:
+            if operand in secured:
+                stop_err( "Illegal value '%s' in %s '%s'" % ( operand, description, text ) )
+
+
+#
+# Process inputs.
+#
+in_fname = sys.argv[1]
+out_fname = sys.argv[2]
+cond_text = sys.argv[3]
+attribute_types = loads( sys.argv[4] )
+
+# Convert types from str to type objects.
+for name, a_type in attribute_types.items():
+    check_for_executable(a_type)
+    if not check_simple_name( a_type ):
+        stop_err("Problem with attribute type [%s]" % a_type)
+    attribute_types[ name ] = eval( a_type )
+
+# Unescape if input has been escaped
+mapped_str = {
+    '__lt__': '<',
+    '__le__': '<=',
+    '__eq__': '==',
+    '__ne__': '!=',
+    '__gt__': '>',
+    '__ge__': '>=',
+    '__sq__': '\'',
+    '__dq__': '"',
+}
+for key, value in mapped_str.items():
+    cond_text = cond_text.replace( key, value )
+
+# Attempt to determine if the condition includes executable stuff and, if so, exit.
+check_for_executable( cond_text, 'condition')
+
+if not check_expression(cond_text):
+    stop_err( "Illegal/invalid in condition '%s'" % ( cond_text ) )
+
+# Prepare the column variable names and wrappers for column data types. Only
+# prepare columns up to largest column in condition.
+attrs, type_casts = [], []
+for name, attr_type in attribute_types.items():
+    attrs.append( name )
+    type_cast = "get_value('%(name)s', attribute_types['%(name)s'], attribute_values)" % ( {'name': name} )
+    type_casts.append( type_cast )
+
+attr_str = ', '.join( attrs )    # 'c1, c2, c3, c4'
+type_cast_str = ', '.join( type_casts )  # 'str(c1), int(c2), int(c3), str(c4)'
+wrap = "%s = %s" % ( attr_str, type_cast_str )
+
+# Stats
+skipped_lines = 0
+first_invalid_line = 0
+invalid_line = None
+lines_kept = 0
+total_lines = 0
+out = open( out_fname, 'wt' )
+
+
+# Helper function to safely get and type cast a value in a dict.
+def get_value(name, a_type, values_dict):
+    if name in values_dict:
+        return (a_type)(values_dict[ name ])
+    else:
+        return None
+
+
+# Read and filter input file, skipping invalid lines
+code = '''
+for i, line in enumerate( open( in_fname ) ):
+    total_lines += 1
+    line = line.rstrip( '\\r\\n' )
+    if not line or line.startswith( '#' ):
+        skipped_lines += 1
+        if not invalid_line:
+            first_invalid_line = i + 1
+            invalid_line = line
+        continue
+    try:
+        # Place attribute values into variables with attribute
+        # name; type casting is done as well.
+        elems = line.split( '\t' )
+        attribute_values = {}
+        for name_value_pair in elems[8].split(";"):
+            pair = name_value_pair.strip().split(" ")
+            if pair == '':
+                continue
+            name = pair[0].strip()
+            if name == '':
+                continue
+            # Need to strip double quote from value and typecast.
+            attribute_values[name] = pair[1].strip(" \\"")
+        %s
+        if %s:
+            lines_kept += 1
+            print( line, file=out )
+    except Exception as e:
+        print( e )
+        skipped_lines += 1
+        if not invalid_line:
+            first_invalid_line = i + 1
+            invalid_line = line
+''' % ( wrap, cond_text )
+
+valid_filter = True
+try:
+    exec(code)
+except Exception as e:
+    out.close()
+    if str( e ).startswith( 'invalid syntax' ):
+        valid_filter = False
+        stop_err( 'Filter condition "%s" likely invalid. See tool tips, syntax and examples.' % cond_text )
+    else:
+        stop_err( str( e ) )
+
+if valid_filter:
+    out.close()
+    valid_lines = total_lines - skipped_lines
+    print('Filtering with %s, ' % ( cond_text ))
+    if valid_lines > 0:
+        print('kept %4.2f%% of %d lines.' % ( 100.0 * lines_kept / valid_lines, total_lines ))
+    else:
+        print('Possible invalid filter condition "%s" or non-existent column referenced. See tool tips, syntax and examples.' % cond_text)
+    if skipped_lines > 0:
+        print('Skipped %d invalid lines starting at line #%d: "%s"' % ( skipped_lines, first_invalid_line, invalid_line ))
diff --git a/tools/filters/gff/gff_filter_by_attribute.xml b/tools/filters/gff/gff_filter_by_attribute.xml
new file mode 100644
index 0000000..2ef27f4
--- /dev/null
+++ b/tools/filters/gff/gff_filter_by_attribute.xml
@@ -0,0 +1,54 @@
+<tool id="gff_filter_by_attribute" name="Filter GFF data by attribute" version="0.1.1">
+  <description>using simple expressions</description>
+  <command interpreter="python">
+    gff_filter_by_attribute.py $input $out_file1 "$cond" '${input.metadata.attribute_types}'
+  </command>
+  <inputs>
+    <param format="gff" name="input" type="data" label="Filter" help="Dataset missing? See TIP below."/>
+    <param name="cond" size="40" type="text" value="gene_id=='uc002loc.1'" label="With following condition" help="Double equal signs, ==, must be used as shown above. To filter for an arbitrary string, use the Select tool.">
+      <validator type="empty_field" message="Enter a valid filtering condition, see syntax and examples below."/>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+        <param name="input" value="gff_filter_attr_in1.gff"/>
+        <param name="cond" value="conf_lo>0"/>
+        <output name="out_file1" file="gff_filter_by_attribute_out1.gff"/>
+    </test>
+    <test>
+        <param name="input" value="gff_filter_attr_in1.gff"/>
+        <param name="cond" value="conf_lo==0 or conf_hi>125"/>
+        <output name="out_file1" file="gff_filter_by_attribute_out2.gff"/>
+    </test>
+  </tests>
+
+  <help>
+
+.. class:: warningmark
+
+Double equal signs, ==, must be used as *"equal to"* (e.g., **c1 == 'chr22'**)
+
+.. class:: infomark
+
+**TIP:** Attempting to apply a filtering condition may throw exceptions if the data type (e.g., string, integer) in every line of the attribute being filtered is not appropriate for the condition (e.g., attempting certain numerical calculations on strings).  If an exception is thrown when applying the condition to a line, that line is skipped as invalid for the filter condition.  The number of invalid skipped lines is documented in the resulting history item as a "Condition/data issue".
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+The filter tool allows you to restrict the dataset using simple conditional statements.
+
+- Make sure that multi-character operators contain no white space ( e.g., **<=** is valid while **< =** is not valid )
+- When using 'equal-to' operator **double equal sign '==' must be used** ( e.g., **attribute_name=='chr1'** )
+- Non-numerical values must be included in single or double quotes ( e.g., **attribute_name=='XX22'** )
+- You can combine multiple conditional statements using **and** or **or** ( e.g., **attribute_name=='XX22' or attribute_name=='XX21'** )
+
+</help>
+</tool>
diff --git a/tools/filters/gff/gff_filter_by_feature_count.py b/tools/filters/gff/gff_filter_by_feature_count.py
new file mode 100644
index 0000000..8b2ff94
--- /dev/null
+++ b/tools/filters/gff/gff_filter_by_feature_count.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python
+"""
+Filter a gff file using a criterion based on feature counts for a transcript.
+
+Usage:
+%prog input_name output_name feature_name condition
+"""
+from __future__ import print_function
+
+import sys
+
+from bx.intervals.io import GenomicInterval
+
+from galaxy.datatypes.util.gff_util import GFFReaderWrapper
+from ast import Module, parse, walk
+
+AST_NODE_TYPE_WHITELIST = [
+    'Expr', 'Load', 'Str', 'Num', 'BoolOp', 'Compare', 'And', 'Eq', 'NotEq',
+    'Or', 'GtE', 'LtE', 'Lt', 'Gt', 'BinOp', 'Add', 'Div', 'Sub', 'Mult', 'Mod',
+    'Pow', 'LShift', 'GShift', 'BitAnd', 'BitOr', 'BitXor', 'UnaryOp', 'Invert',
+    'Not', 'NotIn', 'In', 'Is', 'IsNot', 'List', 'Index', 'Subscript',
+    'Name',
+]
+
+
+BUILTIN_AND_MATH_FUNCTIONS = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'.split('|')
+STRING_AND_LIST_METHODS = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
+VALID_FUNCTIONS = BUILTIN_AND_MATH_FUNCTIONS + STRING_AND_LIST_METHODS
+# Name blacklist isn't strictly needed - but provides extra peace of mind.
+NAME_BLACKLIST = ["exec", "eval", "globals", "locals", "__import__", "__builtins__"]
+
+
+def __check_name( ast_node ):
+    name = ast_node.id
+    return name not in NAME_BLACKLIST
+
+
+def check_expression( text ):
+    """
+
+    >>> check_expression("c1=='chr1' and c3-c2>=2000 and c6=='+'")
+    True
+    >>> check_expression("eval('1+1')")
+    False
+    >>> check_expression("import sys")
+    False
+    >>> check_expression("[].__str__")
+    False
+    >>> check_expression("__builtins__")
+    False
+    >>> check_expression("'x' in globals")
+    False
+    >>> check_expression("'x' in [1,2,3]")
+    True
+    >>> check_expression("c3=='chr1' and c5>5")
+    True
+    >>> check_expression("c3=='chr1' and d5>5")
+    True
+    >>> check_expression("c3=='chr1' and c5>5 or exec")
+    False
+    >>> check_expression("type(c1) != type(1)")
+    False
+    >>> check_expression("c1.split(',')[1] == '1'")
+    False
+    >>> check_expression("exec 1")
+    False
+    >>> check_expression("str(c2) in [\\\"a\\\",\\\"b\\\"]")
+    False
+    >>> check_expression("__import__('os').system('touch /tmp/OOPS')")
+    False
+    """
+    try:
+        module = parse( text )
+    except SyntaxError:
+        return False
+
+    if not isinstance(module, Module):
+        return False
+    statements = module.body
+    if not len( statements ) == 1:
+        return False
+    expression = statements[0]
+    if expression.__class__.__name__ != 'Expr':
+        return False
+
+    for ast_node in walk( expression ):
+        ast_node_class = ast_node.__class__.__name__
+
+        # Toss out everything that is not a "simple" expression,
+        # imports, error handling, etc...
+        if ast_node_class not in AST_NODE_TYPE_WHITELIST:
+            return False
+
+        if ast_node_class == "Name" and not __check_name(ast_node):
+            return False
+
+    return True
+
+
+# Valid operators, ordered so that complex operators (e.g. '>=') are
+# recognized before simple operators (e.g. '>')
+ops = [
+    '>=',
+    '<=',
+    '<',
+    '>',
+    '==',
+    '!='
+]
+
+# Escape sequences for valid operators.
+mapped_ops = {
+    '__ge__': ops[0],
+    '__le__': ops[1],
+    '__lt__': ops[2],
+    '__gt__': ops[3],
+    '__eq__': ops[4],
+    '__ne__': ops[5],
+}
+
+
+def __main__():
+    # Get args.
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    feature_name = sys.argv[3]
+    condition = sys.argv[4]
+
+    # Unescape operations in condition str.
+    for key, value in mapped_ops.items():
+        condition = condition.replace( key, value )
+
+    # Error checking: condition should be of the form <operator><number>
+    for op in ops:
+        if op in condition:
+            empty, number_str = condition.split( op )
+            try:
+                number = float( number_str )
+            except:
+                number = None
+            if empty != "" or not number:
+                print("Invalid condition: %s, cannot filter." % condition, file=sys.stderr)
+                return
+            break
+
+    # Do filtering.
+    kept_features = 0
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( output_name, 'w' )
+    for i, feature in enumerate( GFFReaderWrapper( open( input_name ) ) ):
+        if not isinstance( feature, GenomicInterval ):
+            continue
+        count = 0
+        for interval in feature.intervals:
+            if interval.feature == feature_name:
+                count += 1
+        eval_text = '%s %s' % ( count, condition )
+        if not check_expression(eval_text):
+            print("Invalid condition: %s, cannot filter." % condition, file=sys.stderr)
+            sys.exit(1)
+
+        if eval(eval_text):
+            # Keep feature.
+            for interval in feature.intervals:
+                out.write( "\t".join(interval.fields) + '\n' )
+            kept_features += 1
+
+    # Needed because i is 0-based but want to display stats using 1-based.
+    i += 1
+
+    # Clean up.
+    out.close()
+    info_msg = "%i of %i features kept (%.2f%%) using condition %s.  " % \
+        ( kept_features, i, float(kept_features) / i * 100.0, feature_name + condition )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/gff/gff_filter_by_feature_count.xml b/tools/filters/gff/gff_filter_by_feature_count.xml
new file mode 100644
index 0000000..5861036
--- /dev/null
+++ b/tools/filters/gff/gff_filter_by_feature_count.xml
@@ -0,0 +1,53 @@
+<tool id="gff_filter_by_feature_count" name="Filter GFF data by feature count" version="0.1.1">
+  <description>using simple expressions</description>
+  <command interpreter="python">
+    gff_filter_by_feature_count.py $input_file1 $out_file1 "$feature_name" "$cond"
+  </command>
+  <inputs>
+    <param format="gff" name="input_file1" type="data" label="Filter"/>
+    <param name="feature_name" type="select" label="Using feature name">
+        <options from_dataset="input_file1">
+            <column name="name" index="2"/>
+            <column name="value" index="2"/>
+            <filter type="unique_value" name="unique" column="2"/>
+        </options>
+    </param>
+    <param name="cond" size="40" type="text" value=">0" label="With following condition">
+      <validator type="empty_field" message="Enter a valid filtering condition, see syntax and examples below."/>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input_file1"/>
+  </outputs>
+  <tests>
+      <!-- Test GTF filtering. -->
+      <test>
+          <param name="input_file1" value="gops_subtract_in1.gff"/>
+          <param name="feature_name" value="exon"/>
+          <param name="cond" value=">1"/>
+          <output name="out_file1" file="gff_filter_by_feature_count_out1.gff"/>
+      </test>
+      <!-- Test GFF3 filtering. -->
+      <test>
+          <param name="input_file1" value="5.gff3"/>
+          <param name="feature_name" value="HSP"/>
+          <param name="cond" value=">=5"/>
+          <output name="out_file1" file="gff_filter_by_feature_count_out2.gff"/>
+      </test>
+  </tests>
+
+  <help>
+
+
+.. class:: infomark
+
+Valid comparison operators are: > < >=, <=, !=, and ==
+
+-----
+
+**Syntax**
+
+The filter tool allows you to restrict the dataset based on transcripts' feature counts.
+
+</help>
+</tool>
diff --git a/tools/filters/gff/gtf_filter_by_attribute_values_list.py b/tools/filters/gff/gtf_filter_by_attribute_values_list.py
new file mode 100644
index 0000000..9ff4c53
--- /dev/null
+++ b/tools/filters/gff/gtf_filter_by_attribute_values_list.py
@@ -0,0 +1,71 @@
+#
+# Filters a GFF file using a list of attribute values. Attribute values must
+# be in the first column of the file; subsequent columns are ignored.
+# Usage:
+# python gff_filter_by_attribute_values.py <gff_file> <attribute_name> <ids_file> <output_file>
+#
+from __future__ import print_function
+
+import sys
+
+
+def parse_gff_attributes( attr_str ):
+    """
+    Parses a GFF/GTF attribute string and returns a dictionary of name-value
+    pairs. The general format for a GFF3 attributes string is
+        name1=value1;name2=value2
+    The general format for a GTF attribute string is
+        name1 "value1" ; name2 "value2"
+    The general format for a GFF attribute string is a single string that
+    denotes the interval's group; in this case, method returns a dictionary
+    with a single key-value pair, and key name is 'group'
+    """
+    attributes_list = attr_str.split(";")
+    attributes = {}
+    for name_value_pair in attributes_list:
+        # Try splitting by space and, if necessary, by '=' sign.
+        pair = name_value_pair.strip().split(" ")
+        if len( pair ) == 1:
+            pair = name_value_pair.strip().split("=")
+        if len( pair ) == 1:
+            # Could not split for some reason -- raise exception?
+            continue
+        if pair == '':
+            continue
+        name = pair[0].strip()
+        if name == '':
+            continue
+        # Need to strip double quote from values
+        value = pair[1].strip(" \"")
+        attributes[ name ] = value
+
+    if len( attributes ) == 0:
+        # Could not split attributes string, so entire string must be
+        # 'group' attribute. This is the case for strictly GFF files.
+        attributes['group'] = attr_str
+    return attributes
+
+
+def gff_filter( gff_file, attribute_name, ids_file, output_file ):
+    # Put ids in dict for quick lookup.
+    ids_dict = {}
+    for line in open( ids_file ):
+        ids_dict[ line.split('\t')[0].strip() ] = True
+
+    # Filter GFF file using ids.
+    output = open( output_file, 'w' )
+    for line in open( gff_file ):
+        fields = line.split( '\t' )
+        attributes = parse_gff_attributes( fields[8] )
+        if ( attribute_name in attributes ) and ( attributes[ attribute_name ] in ids_dict ):
+            output.write( line )
+    output.close()
+
+
+if __name__ == "__main__":
+    # Handle args.
+    if len( sys.argv ) != 5:
+        print("usage: python %s <gff_file> <attribute_name> <ids_file> <output_file>" % sys.argv[0], file=sys.stderr)
+        sys.exit( -1 )
+    gff_file, attribute_name, ids_file, output_file = sys.argv[1:]
+    gff_filter( gff_file, attribute_name, ids_file, output_file )
diff --git a/tools/filters/gff/gtf_filter_by_attribute_values_list.xml b/tools/filters/gff/gtf_filter_by_attribute_values_list.xml
new file mode 100644
index 0000000..0f5d0da
--- /dev/null
+++ b/tools/filters/gff/gtf_filter_by_attribute_values_list.xml
@@ -0,0 +1,42 @@
+<tool id="gtf_filter_by_attribute_values_list" name="Filter GTF data by attribute values_list" version="0.1">
+    <description></description>
+    <command interpreter="python">
+      gtf_filter_by_attribute_values_list.py $input $attribute_name $ids $output
+    </command>
+    <inputs>
+        <param format="gtf" name="input" type="data" label="Filter"/>
+        <param name="attribute_name" type="select" label="Using attribute name">
+            <option value="gene_id">gene_id</option>
+            <option value="transcript_id">transcript_id</option>
+            <option value="p_id">p_id</option>
+            <option value="tss_id">tss_id</option>
+        </param>
+        <param format="tabular,txt" name="ids" type="data" label="And attribute values"/>
+    </inputs>
+    <outputs>
+        <data format="gtf" name="output"/>
+    </outputs>
+    <tests>
+        <!-- Test filtering with a simple list of values. -->
+        <test>
+            <param name="input" value="gops_subtract_in1.gff"/>
+            <param name="attribute_name" value="gene_id"/>
+            <param name="ids" value="gtf_filter_by_attribute_values_list_in1.txt"/>
+            <output name="output" file="gtf_filter_by_attribute_values_list_out1.gtf"/>
+        </test>
+        <!-- Test filtering with a more complex tabular file. -->
+        <test>
+            <param name="input" value="gtf_filter_by_attribute_values_list_in2.gtf"/>
+            <param name="attribute_name" value="transcript_id"/>
+            <param name="ids" value="gtf_filter_by_attribute_values_list_in3.tabular"/>
+            <output name="output" file="gtf_filter_by_attribute_values_list_out2.gtf"/>
+        </test>
+    </tests>
+    <help>
+
+This tool filters a GTF file using a list of attribute values. The attribute values are
+taken from the first column in the file; additional columns in the file are ignored. An example
+use of this tool is to filter a GTF file using a list of transcript_ids or gene_ids obtained from Cuffdiff.
+
+    </help>
+</tool>
diff --git a/tools/filters/gff/sort_gtf.py b/tools/filters/gff/sort_gtf.py
new file mode 100644
index 0000000..9cd9e0d
--- /dev/null
+++ b/tools/filters/gff/sort_gtf.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+import sys
+
+from galaxy.datatypes.util.gff_util import read_unordered_gtf
+
+#
+# Process inputs.
+#
+
+in_fname = sys.argv[1]
+out_fname = sys.argv[2]
+
+out = open( out_fname, 'w' )
+for feature in read_unordered_gtf( open( in_fname, 'r' ) ):
+    # Print feature.
+    for interval in feature.intervals:
+        out.write( "\t".join(interval.fields) )
+out.close()
+
+# TODO: print status information: how many lines processed and features found.
diff --git a/tools/filters/gff2bed.xml b/tools/filters/gff2bed.xml
new file mode 100644
index 0000000..cb81bf4
--- /dev/null
+++ b/tools/filters/gff2bed.xml
@@ -0,0 +1,90 @@
+<tool id="gff2bed1" name="GFF-to-BED" version="1.0.1">
+  <description>converter</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command interpreter="python">gff_to_bed_converter.py $input $out_file1</command>
+  <inputs>
+    <param format="gff" name="input" type="data" label="Convert this dataset"/>
+  </inputs>
+  <outputs>
+    <data format="bed" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="5.gff" ftype="gff"/>
+      <output name="out_file1" file="gff2bed_out.bed"/>
+    </test>
+    <test>
+      <param name="input" value="gff2bed_in2.gff" ftype="gff"/>
+      <output name="out_file1" file="gff2bed_out2.bed"/>
+    </test>
+    <test>
+      <!-- Test conversion of gff3 file. -->
+      <param name="input" value="5.gff3" ftype="gff"/>
+      <output name="out_file1" file="gff2bed_out3.bed"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool converts data from GFF format to BED format (scroll down for format description).
+
+--------
+
+**Example**
+
+The following data in GFF format::
+
+    chr22  GeneA  enhancer  10000000  10001000  500	 +   .  TGA
+    chr22  GeneA  promoter  10010000  10010100  900	 +   .  TGA
+
+Will be converted to BED (**note** that 1 is subtracted from the start coordinate)::
+
+    chr22   9999999  10001000   enhancer   0   +
+    chr22  10009999  10010100   promoter   0   +
+
+------
+
+.. class:: infomark
+
+**About formats**
+
+**BED format** Browser Extensible Data format was designed at UCSC for displaying data tracks in the Genome Browser. It has three required fields and several additional optional ones:
+
+The first three BED fields (required) are::
+
+    1. chrom - The name of the chromosome (e.g. chr1, chrY_random).
+    2. chromStart - The starting position in the chromosome. (The first base in a chromosome is numbered 0.)
+    3. chromEnd - The ending position in the chromosome, plus 1 (i.e., a half-open interval).
+
+The additional BED fields (optional) are::
+
+    4. name - The name of the BED line.
+    5. score - A score between 0 and 1000.
+    6. strand - Defines the strand - either '+' or '-'.
+    7. thickStart - The starting position where the feature is drawn thickly at the Genome Browser.
+    8. thickEnd - The ending position where the feature is drawn thickly at the Genome Browser.
+    9. reserved - This should always be set to zero.
+   10. blockCount - The number of blocks (exons) in the BED line.
+   11. blockSizes - A comma-separated list of the block sizes. The number of items in this list should correspond to blockCount.
+   12. blockStarts - A comma-separated list of block starts. All of the blockStart positions should be calculated relative to chromStart. The number of items in this list should correspond to blockCount.
+   13. expCount - The number of experiments.
+   14. expIds - A comma-separated list of experiment ids. The number of items in this list should correspond to expCount.
+   15. expScores - A comma-separated list of experiment scores. All of the expScores should be relative to expIds. The number of items in this list should correspond to expCount.
+
+**GFF format** General Feature Format is a format for describing genes and other features associated with DNA, RNA and Protein sequences. GFF lines have nine tab-separated fields::
+
+    1. seqname - Must be a chromosome or scaffold.
+    2. source - The program that generated this feature.
+    3. feature - The name of this type of feature. Some examples of standard feature types are "CDS", "start_codon", "stop_codon", and "exon".
+    4. start - The starting position of the feature in the sequence. The first base is numbered 1.
+    5. end - The ending position of the feature (inclusive).
+    6. score - A score between 0 and 1000. If there is no score value, enter ".".
+    7. strand - Valid entries include '+', '-', or '.' (for don't know/care).
+    8. frame - If the feature is a coding exon, frame should be a number between 0-2 that represents the reading frame of the first base. If the feature is not a coding exon, the value should be '.'.
+    9. group - All lines with the same group are linked together into a single item.
+
+</help>
+</tool>
diff --git a/tools/filters/gff_to_bed_converter.py b/tools/filters/gff_to_bed_converter.py
new file mode 100644
index 0000000..b0ccd89
--- /dev/null
+++ b/tools/filters/gff_to_bed_converter.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import sys
+
+from galaxy.datatypes.util.gff_util import parse_gff_attributes
+
+
+def get_bed_line( chrom, name, strand, blocks ):
+    """ Returns a BED line for given data. """
+
+    if len( blocks ) == 1:
+        # Use simple BED format if there is only a single block:
+        #   chrom, chromStart, chromEnd, name, score, strand
+        #
+        start, end = blocks[0]
+        return "%s\t%i\t%i\t%s\t0\t%s\n" % ( chrom, start, end, name, strand )
+
+    #
+    # Build lists for transcript blocks' starts, sizes.
+    #
+
+    # Get transcript start, end.
+    t_start = sys.maxsize
+    t_end = -1
+    for block_start, block_end in blocks:
+        if block_start < t_start:
+            t_start = block_start
+        if block_end > t_end:
+            t_end = block_end
+
+    # Get block starts, sizes.
+    block_starts = []
+    block_sizes = []
+    for block_start, block_end in blocks:
+        block_starts.append( str( block_start - t_start ) )
+        block_sizes.append( str( block_end - block_start ) )
+
+    #
+    # Create BED entry.
+    # Bed format: chrom, chromStart, chromEnd, name, score, strand, \
+    #               thickStart, thickEnd, itemRgb, blockCount, blockSizes, blockStarts
+    #
+    # Render complete feature with thick blocks. There's no clear way to do this unless
+    # we analyze the block names, but making everything thick makes more sense than
+    # making everything thin.
+    #
+    return "%s\t%i\t%i\t%s\t0\t%s\t%i\t%i\t0\t%i\t%s\t%s\n" % \
+        ( chrom, t_start, t_end, name, strand, t_start, t_end, len( block_starts ),
+        ",".join( block_sizes ), ",".join( block_starts ) )
+
+
+def __main__():
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( output_name, 'w' )
+    i = 0
+    cur_transcript_chrome = None
+    cur_transcript_id = None
+    cur_transcript_strand = None
+    cur_transcripts_blocks = []  # (start, end) for each block.
+    for i, line in enumerate( open( input_name ) ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ):
+            try:
+                # GFF format: chrom source, name, chromStart, chromEnd, score, strand, attributes
+                elems = line.split( '\t' )
+                start = str( int( elems[3] ) - 1 )
+                coords = [ int( start ), int( elems[4] ) ]
+                strand = elems[6]
+                if strand not in ['+', '-']:
+                    strand = '+'
+                attributes = parse_gff_attributes( elems[8] )
+                t_id = attributes.get( "transcript_id", None )
+
+                if not t_id:
+                    #
+                    # No transcript ID, so write last transcript and write current line as its own line.
+                    #
+
+                    # Write previous transcript.
+                    if cur_transcript_id:
+                        # Write BED entry.
+                        out.write( get_bed_line( cur_transcript_chrome, cur_transcript_id, cur_transcript_strand, cur_transcripts_blocks ) )
+
+                    # Replace any spaces in the name with underscores so UCSC will not complain.
+                    name = elems[2].replace(" ", "_")
+                    out.write( get_bed_line( elems[0], name, strand, [ coords ] ) )
+                    continue
+
+                # There is a transcript ID, so process line at transcript level.
+                if t_id == cur_transcript_id:
+                    # Line is element of transcript and will be a block in the BED entry.
+                    cur_transcripts_blocks.append( coords )
+                    continue
+
+                #
+                # Line is part of new transcript; write previous transcript and start
+                # new transcript.
+                #
+
+                # Write previous transcript.
+                if cur_transcript_id:
+                    # Write BED entry.
+                    out.write( get_bed_line( cur_transcript_chrome, cur_transcript_id, cur_transcript_strand, cur_transcripts_blocks ) )
+
+                # Start new transcript.
+                cur_transcript_chrome = elems[0]
+                cur_transcript_id = t_id
+                cur_transcript_strand = strand
+                cur_transcripts_blocks = []
+                cur_transcripts_blocks.append( coords )
+            except:
+                skipped_lines += 1
+                if not first_skipped_line:
+                    first_skipped_line = i + 1
+        else:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = i + 1
+
+    # Write last transcript.
+    if cur_transcript_id:
+        # Write BED entry.
+        out.write( get_bed_line( cur_transcript_chrome, cur_transcript_id, cur_transcript_strand, cur_transcripts_blocks ) )
+    out.close()
+    info_msg = "%i lines converted to BED.  " % ( i + 1 - skipped_lines )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/grep.py b/tools/filters/grep.py
new file mode 100644
index 0000000..81486ec
--- /dev/null
+++ b/tools/filters/grep.py
@@ -0,0 +1,137 @@
+# Filename: grep.py
+# Author: Ian N. Schenck
+# Version: 8/23/2005
+#
+# This script accepts regular expressions, as well as an "invert"
+# option, and applies the regular expression using grep.  This wrapper
+# provides security and pipeline.
+#
+# Grep is launched based on these inputs:
+# -i Input file
+# -o Output file
+# -pattern RegEx pattern
+# -v true or false (output NON-matching lines)
+from __future__ import print_function
+
+import os
+import re
+import subprocess
+import sys
+from subprocess import PIPE, Popen
+from tempfile import NamedTemporaryFile
+
+
+# This function is exceedingly useful, perhaps package for reuse?
+def getopts(argv):
+    opts = {}
+    while argv:
+        if argv[0][0] == '-':
+            opts[argv[0]] = argv[1]
+            argv = argv[2:]
+        else:
+            argv = argv[1:]
+    return opts
+
+
+def main():
+    args = sys.argv[1:]
+
+    try:
+        opts = getopts(args)
+    except IndexError:
+        print("Usage:")
+        print(" -i Input file")
+        print(" -o Output file")
+        print(" -pattern RegEx pattern")
+        print(" -v true or false (Invert match)")
+        return 0
+
+    outputfile = opts.get("-o")
+    if outputfile is None:
+        print("No output file specified.")
+        return -1
+
+    inputfile = opts.get("-i")
+    if inputfile is None:
+        print("No input file specified.")
+        return -2
+
+    invert = opts.get("-v")
+    if invert is None:
+        print("Match style (Invert or normal) not specified.")
+        return -3
+
+    pattern = opts.get("-pattern")
+    if pattern is None:
+        print("RegEx pattern not specified.")
+        return -4
+
+    # All inputs have been specified at this point, now validate.
+
+    # replace if input has been escaped, remove sq
+    # characters that are allowed but need to be escaped
+    mapped_chars = {'>' : '__gt__',
+                    '<' : '__lt__',
+                    '\'': '__sq__',
+                    '"' : '__dq__',
+                    '[' : '__ob__',
+                    ']' : '__cb__',
+                    '{' : '__oc__',
+                    '}' : '__cc__'}
+
+    # with new sanitizing we only need to replace for single quote,
+    # but this needs to remain for backwards compatibility
+    for key, value in mapped_chars.items():
+        pattern = pattern.replace(value, key)
+
+    # match filename and invert flag
+    fileRegEx = re.compile("^[A-Za-z0-9./\-_]+$")
+    invertRegEx = re.compile("(true)|(false)")
+
+    # verify that filename and inversion flag are in the correct format
+    if not fileRegEx.match(outputfile):
+        print("Illegal output filename.")
+        return -5
+    if not fileRegEx.match(inputfile):
+        print("Illegal input filename.")
+        return -6
+    if not invertRegEx.match(invert):
+        print("Illegal invert option.")
+        return -7
+
+    # invert grep search?
+    if invert == "true":
+        invertflag = "-v"
+        print("Not matching pattern: %s" % pattern)
+    else:
+        invertflag = ""
+        print("Matching pattern: %s" % pattern)
+
+    # set version flag
+    versionflag = "-P"
+
+    # MacOS 10.8.2 does not support -P option for perl-regex anymore
+    versionmatch = Popen("grep -V | grep 'BSD'", shell=True, stdout=PIPE).communicate()[0]
+    if versionmatch:
+        versionflag = "-E"
+
+    # create temp file holding pattern
+    # by using a file to hold the pattern, we don't have worry about sanitizing grep commandline and can include single quotes in pattern
+    pattern_file_name = NamedTemporaryFile().name
+    open( pattern_file_name, 'w' ).write( pattern )
+
+    # generate grep command
+    commandline = "grep %s %s -f %s %s > %s" % ( versionflag, invertflag, pattern_file_name, inputfile, outputfile )
+
+    # run grep
+    errorcode = subprocess.call(commandline, shell=True)
+
+    # remove temp pattern file
+    os.unlink( pattern_file_name )
+
+    # return error code
+    return errorcode
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/grep.xml b/tools/filters/grep.xml
new file mode 100644
index 0000000..2137262
--- /dev/null
+++ b/tools/filters/grep.xml
@@ -0,0 +1,82 @@
+<tool id="Grep1" name="Select" version="1.0.1">
+  <description>lines that match an expression</description>
+  <command interpreter="python">grep.py -i $input -o $out_file1 -pattern '$pattern' -v $invert</command>
+  <inputs>
+    <param format="txt" name="input" type="data" label="Select lines from"/>
+    <param name="invert" type="select" label="that">
+      <option value="false">Matching</option>
+      <option value="true">NOT Matching</option>
+    </param>
+    <param name="pattern" size="40" type="text" value="^chr([0-9A-Za-z])+" label="the pattern" help="here you can enter text or regular expression (for syntax check lower part of this frame)">
+      <sanitizer>
+        <valid initial="string.printable">
+         <remove value="'"/>
+        </valid>
+        <mapping initial="none">
+          <add source="'" target="__sq__"/>
+        </mapping>
+      </sanitizer>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="1.bed"/>
+      <param name="invert" value="false"/>
+      <param name="pattern" value="^chr[0-9]*"/>
+      <output name="out_file1" file="fs-grep.dat"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+The select tool searches the data for lines containing or not containing a match to the given pattern. Regular Expression is introduced in this tool. A Regular Expression is a pattern describing a certain amount of text. 
+
+- **( ) { } [ ] . * ? + \ ^ $** are all special characters. **\\** can be used to "escape" a special character, allowing that special character to be searched for.
+- **\\A** matches the beginning of a string(but not an internal line).
+- **\\d** matches a digit, same as [0-9].
+- **\\D** matches a non-digit.
+- **\\s** matches a whitespace character.
+- **\\S** matches anything BUT a whitespace.
+- **\\t** matches a tab.
+- **\\w** matches an alphanumeric character.
+- **\\W** matches anything but an alphanumeric character.
+- **(** .. **)** groups a particular pattern.
+- **\\Z** matches the end of a string(but not a internal line).
+- **{** n or n, or n,m **}** specifies an expected number of repetitions of the preceding pattern.
+
+  - **{n}** The preceding item is matched exactly n times.
+  - **{n,}** The preceding item is matched n or more times. 
+  - **{n,m}** The preceding item is matched at least n times but not more than m times. 
+
+- **[** ... **]** creates a character class. Within the brackets, single characters can be placed. A dash (-) may be used to indicate a range such as **a-z**.
+- **.** Matches any single character except a newline.
+- ***** The preceding item will be matched zero or more times.
+- **?** The preceding item is optional and matched at most once.
+- **+** The preceding item will be matched one or more times.
+- **^** has two meaning:
+  - matches the beginning of a line or string. 
+  - indicates negation in a character class. For example, [^...] matches every character except the ones inside brackets.
+- **$** matches the end of a line or string.
+- **\|** Separates alternate possibilities. 
+
+-----
+
+**Example**
+
+- **^chr([0-9A-Za-z])+** would match lines that begin with chromosomes, such as lines in a BED format file.
+- **(ACGT){1,5}** would match at least 1 "ACGT" and at most 5 "ACGT" consecutively.
+- **([^,][0-9]{1,3})(,[0-9]{3})\*** would match a large integer that is properly separated with commas such as 23,078,651.
+- **(abc)|(def)** would match either "abc" or "def".
+- **^\\W+#** would match any line that is a comment.
+</help>
+</tool>
diff --git a/tools/filters/gtf2bedgraph.xml b/tools/filters/gtf2bedgraph.xml
new file mode 100644
index 0000000..f1123af
--- /dev/null
+++ b/tools/filters/gtf2bedgraph.xml
@@ -0,0 +1,84 @@
+<tool id="gtf2bedgraph" name="GTF-to-BEDGraph" version="1.0.0">
+  <description>converter</description>
+  <edam_operations>
+    <edam_operation>operation_3434</edam_operation>
+  </edam_operations>
+  <command interpreter="python">gtf_to_bedgraph_converter.py $input $out_file1 $attribute_name</command>
+  <inputs>
+    <param format="gtf" name="input" type="data" label="Convert this query"/>
+    <param name="attribute_name" type="text" label="Attribute to Use for Value">
+        <validator type="empty_field" /> 
+    </param>
+  </inputs>
+  <outputs>
+    <data format="bedgraph" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="gtf2bedgraph_in.gtf" ftype="gtf"/>
+      <param name="attribute_name" value="FPKM"/>
+      <output name="out_file1" file="gtf2bedgraph_out.bedgraph" ftype="bedgraph"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool converts data from GTF format to BEDGraph format (scroll down for format description).
+
+--------
+
+**Example**
+
+The following data in GFF format::
+
+    chr22  GeneA  enhancer  10000000  10001000  500	 +   .  gene_id "GeneA"; transcript_id "TranscriptAlpha"; FPKM "2.75"; frac "1.000000";
+    chr22  GeneA  promoter  10010000  10010100  900	 +   .  gene_id "GeneA"; transcript_id "TranscriptsAlpha"; FPKM "2.25"; frac "1.000000";
+
+using the attribute name 'FPKM' will be converted to BEDGraph (**note** that 1 is subtracted from the start coordinate)::
+
+
+    chr22   9999999  10001000   2.75
+    chr22  10009999  10010100   2.25   
+
+------
+
+.. class:: infomark
+
+**About formats**
+
+**GTF format** Gene Transfer Format is a format for describing genes and other features associated with DNA, RNA and Protein sequences. GTF lines have nine tab-separated fields::
+
+    1. seqname - Must be a chromosome or scaffold.
+    2. source - The program that generated this feature.
+    3. feature - The name of this type of feature. Some examples of standard feature types are "CDS", "start_codon", "stop_codon", and "exon".
+    4. start - The starting position of the feature in the sequence. The first base is numbered 1.
+    5. end - The ending position of the feature (inclusive).
+    6. score - A score between 0 and 1000. If there is no score value, enter ".".
+    7. strand - Valid entries include '+', '-', or '.' (for don't know/care).
+    8. frame - If the feature is a coding exon, frame should be a number between 0-2 that represents the reading frame of the first base. If the feature is not a coding exon, the value should be '.'.
+    9. group - The group field is a list of attributes. Each attribute consists of a type/value pair. Attributes must end in a semi-colon, and be separated from any following attribute by exactly one space. The attribute list must begin with the two mandatory attributes: (i) gene_id value - A globally unique identifier for the genomic source of the sequence and (ii) transcript_id value - A globally unique identifier for the predicted transcript.
+    
+**BEDGraph format**
+
+The bedGraph format is line-oriented. Bedgraph data are preceeded by a track definition line, which adds a number of options for controlling the default display of this track.
+
+For the track definition line, all options are placed in a single line separated by spaces:
+  track type=bedGraph name=track_label description=center_label
+        visibility=display_mode color=r,g,b altColor=r,g,b
+        priority=priority autoScale=on|off alwaysZero=on|off
+        gridDefault=on|off maxHeightPixels=max:default:min
+        graphType=bar|points viewLimits=lower:upper
+        yLineMark=real-value yLineOnOff=on|off
+        windowingFunction=maximum|mean|minimum smoothingWindow=off|2-16
+
+The track type is REQUIRED, and must be bedGraph:
+  type=bedGraph
+
+Following the track definition line are the track data in four column BED format::
+
+  chromA  chromStartA  chromEndA  dataValueA
+  chromB  chromStartB  chromEndB  dataValueB
+
+</help>
+</tool>
diff --git a/tools/filters/gtf_to_bedgraph_converter.py b/tools/filters/gtf_to_bedgraph_converter.py
new file mode 100644
index 0000000..eb63048
--- /dev/null
+++ b/tools/filters/gtf_to_bedgraph_converter.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import os
+import sys
+import tempfile
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def __main__():
+    # Read parms.
+    input_name = sys.argv[1]
+    output_name = sys.argv[2]
+    attribute_name = sys.argv[3]
+
+    # Create temp files.
+    tmp_name1 = tempfile.NamedTemporaryFile().name
+    tmp_name2 = tempfile.NamedTemporaryFile().name
+
+    # Do conversion.
+    skipped_lines = 0
+    first_skipped_line = 0
+    out = open( tmp_name1, 'w' )
+
+    # Write track data to temporary file.
+    i = 0
+    for i, line in enumerate( open( input_name ) ):
+        line = line.rstrip( '\r\n' )
+
+        if line and not line.startswith( '#' ):
+            try:
+                elems = line.split( '\t' )
+                start = str( int( elems[3] ) - 1 )  # GTF coordinates are 1-based, BedGraph are 0-based.
+                strand = elems[6]
+                if strand not in ['+', '-']:
+                    strand = '+'
+                attributes_list = elems[8].split(";")
+                attributes = {}
+                for name_value_pair in attributes_list:
+                    pair = name_value_pair.strip().split(" ")
+                    name = pair[0].strip()
+                    if name == '':
+                        continue
+                    # Need to strip double quote from values
+                    value = pair[1].strip(" \"")
+                    attributes[name] = value
+                value = attributes[ attribute_name ]
+                # GTF format: chrom source, name, chromStart, chromEnd, score, strand, frame, attributes.
+                # BedGraph format: chrom, chromStart, chromEnd, value
+                out.write( "%s\t%s\t%s\t%s\n" % ( elems[0], start, elems[4], value ) )
+            except:
+                skipped_lines += 1
+                if not first_skipped_line:
+                    first_skipped_line = i + 1
+        else:
+            skipped_lines += 1
+            if not first_skipped_line:
+                first_skipped_line = i + 1
+    out.close()
+
+    # Sort tmp file by chromosome name and chromosome start to create ordered track data.
+    cmd = "sort -k1,1 -k2,2n < %s > %s" % ( tmp_name1, tmp_name2 )
+    try:
+        os.system(cmd)
+        os.remove(tmp_name1)
+    except Exception as ex:
+        sys.stderr.write( "%s\n" % ex )
+        sys.exit(1)
+
+    # Create bedgraph file by combining track definition with ordered track data.
+    cmd = "echo 'track type=bedGraph' | cat - %s > %s " % ( tmp_name2, output_name )
+    try:
+        os.system(cmd)
+        os.remove(tmp_name2)
+    except Exception as ex:
+        sys.stderr.write( "%s\n" % ex )
+        sys.exit(1)
+
+    info_msg = "%i lines converted to BEDGraph.  " % ( i + 1 - skipped_lines )
+    if skipped_lines > 0:
+        info_msg += "Skipped %d blank/comment/invalid lines starting with line #%d." % ( skipped_lines, first_skipped_line )
+    print(info_msg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/headWrapper.pl b/tools/filters/headWrapper.pl
new file mode 100644
index 0000000..2049e34
--- /dev/null
+++ b/tools/filters/headWrapper.pl
@@ -0,0 +1,19 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# a wrapper for head for use in galaxy
+# headWrapper.pl [filename] [# lines to show] [output]
+
+die "Check arguments" unless @ARGV == 3;
+die "Line number must be an integer\n" unless $ARGV[1]=~ m/^\d+$/;
+
+open (OUT, ">$ARGV[2]") or die "Cannot create $ARGV[2]:$!\n";
+open (HEAD, "head -n $ARGV[1] $ARGV[0]|") or die "Cannot run head:$!\n";
+while (<HEAD>) {
+    print OUT;
+}
+close OUT;
+close HEAD;
+    
diff --git a/tools/filters/headWrapper.xml b/tools/filters/headWrapper.xml
new file mode 100644
index 0000000..53451c4
--- /dev/null
+++ b/tools/filters/headWrapper.xml
@@ -0,0 +1,42 @@
+<tool id="Show beginning1" name="Select first" version="1.0.0">
+  <description>lines from a dataset</description>
+  <command interpreter="perl">headWrapper.pl $input $lineNum $out_file1</command>
+  <inputs>
+    <param name="lineNum" size="5" type="integer" value="10" label="Select first" help="lines"/>
+    <param format="txt" name="input" type="data" label="from"/>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="lineNum" value="10"/>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="eq-showbeginning.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool outputs specified number of lines from the **beginning** of a dataset
+
+-----
+
+**Example**
+
+Selecting 2 lines from this::
+
+    chr7  56632  56652  D17003_CTCF_R6  310  +
+    chr7  56736  56756  D17003_CTCF_R7  354  +
+    chr7  56761  56781  D17003_CTCF_R4  220  +
+    chr7  56772  56792  D17003_CTCF_R7  372  +
+    chr7  56775  56795  D17003_CTCF_R4  207  +
+
+will produce::
+
+    chr7  56632  56652  D17003_CTCF_R6  310  +
+    chr7  56736  56756  D17003_CTCF_R7  354  +
+
+  </help>
+</tool>
diff --git a/tools/filters/join.py b/tools/filters/join.py
new file mode 100644
index 0000000..6b610f2
--- /dev/null
+++ b/tools/filters/join.py
@@ -0,0 +1,390 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+Script to Join Two Files on specified columns.
+
+Takes two tab delimited files, two column numbers (base 1) and outputs a new tab delimited file with lines joined by tabs.
+User can also opt to have have non-joining rows of file1 echoed.
+"""
+from __future__ import print_function
+
+import json
+import optparse
+import os
+import struct
+import sys
+import tempfile
+
+from galaxy.util import stringify_dictionary_keys
+from galaxy.util.bunch import Bunch
+
+
+class OffsetList:
+    def __init__( self, filesize=0, fmt=None ):
+        self.file = tempfile.NamedTemporaryFile( 'w+b' )
+        if fmt:
+            self.fmt = fmt
+        elif filesize and filesize <= sys.maxsize * 2:
+            self.fmt = 'I'
+        else:
+            self.fmt = 'Q'
+        self.fmt_size = struct.calcsize( self.fmt )
+
+    @property
+    def size( self ):
+        self.file.flush()
+        return self.file_size / self.fmt_size
+
+    @property
+    def file_size( self ):
+        self.file.flush()
+        return os.stat( self.file.name ).st_size
+
+    def add_offset( self, offset ):
+        if not isinstance( offset, list ):
+            offset = [offset]
+        self.file.seek( self.file_size )
+        for off in offset:
+            self.file.write( struct.pack( self.fmt, off ) )
+
+    def get_offsets( self, start=0 ):
+        self.file.seek( start * self.fmt_size )
+        while True:
+            packed = self.file.read( self.fmt_size )
+            if not packed:
+                break
+            yield struct.unpack( self.fmt, packed )[0]
+
+    def get_offset_by_index( self, index ):
+        self.file.seek( index * self.fmt_size )
+        return struct.unpack( self.fmt, self.file.read( self.fmt_size ) )[0]
+
+    def set_offset_at_index( self, index, offset ):
+        if not isinstance( offset, list ):
+            offset = [offset]
+        if index >= self.size:
+            self.add_offset( offset )
+        else:
+            temp_file = tempfile.NamedTemporaryFile( 'w+b' )
+            self.file.seek( 0 )
+            temp_file.write( self.file.read( ( index ) * self.fmt_size ) )
+            for off in offset:
+                temp_file.write( struct.pack( self.fmt, off ) )
+            temp_file.write( self.file.read() )
+            self.file = temp_file
+
+
+class SortedOffsets( OffsetList ):
+    def __init__( self, indexed_filename, column, split=None ):
+        OffsetList.__init__( self, os.stat( indexed_filename ).st_size )
+        self.indexed_filename = indexed_filename
+        self.indexed_file = open( indexed_filename, 'rb' )
+        self.column = column
+        self.split = split
+        self.last_identifier = None
+        self.last_identifier_merged = None
+        self.last_offset_merged = 0
+
+    def merge_with_dict( self, new_offset_dict ):
+        if not new_offset_dict:
+            return  # no items to merge in
+        keys = list(new_offset_dict.keys())
+        keys.sort()
+        identifier2 = keys.pop( 0 )
+
+        result_offsets = OffsetList( fmt=self.fmt )
+        offsets1 = enumerate( self.get_offsets() )
+        try:
+            index1, offset1 = next(offsets1)
+            identifier1 = self.get_identifier_by_offset( offset1 )
+        except StopIteration:
+            offset1 = None
+            identifier1 = None
+            index1 = 0
+
+        while True:
+            if identifier1 is None and identifier2 is None:
+                self.file = result_offsets.file  # self is now merged results
+                return
+            elif identifier1 is None or ( identifier2 and identifier2 < identifier1 ):
+                result_offsets.add_offset( new_offset_dict[identifier2] )
+                if keys:
+                    identifier2 = keys.pop( 0 )
+                else:
+                    identifier2 = None
+            elif identifier2 is None:
+                result_offsets.file.seek( result_offsets.file_size )
+                self.file.seek( index1 * self.fmt_size )
+                result_offsets.file.write( self.file.read() )
+                identifier1 = None
+                offset1 = None
+            else:
+                result_offsets.add_offset( offset1 )
+                try:
+                    index1, offset1 = next(offsets1)
+                    identifier1 = self.get_identifier_by_offset( offset1 )
+                except StopIteration:
+                    offset1 = None
+                    identifier1 = None
+                    index1 += 1
+
+# methods to help link offsets to lines, ids, etc
+    def get_identifier_by_line( self, line ):
+        if isinstance( line, str ):
+            fields = line.rstrip( '\r\n' ).split( self.split )
+            if self.column < len( fields ):
+                return fields[self.column]
+        return None
+
+    def get_line_by_offset( self, offset ):
+        self.indexed_file.seek( offset )
+        return self.indexed_file.readline()
+
+    def get_identifier_by_offset( self, offset ):
+        return self.get_identifier_by_line( self.get_line_by_offset( offset ) )
+
+
+# indexed set of offsets, index is built on demand
+class OffsetIndex:
+    def __init__( self, filename, column, split=None, index_depth=3 ):
+        self.filename = filename
+        self.file = open( filename, 'rb' )
+        self.column = column
+        self.split = split
+        self._offsets = {}
+        self._index = None
+        self.index_depth = index_depth
+
+    def _build_index( self ):
+        self._index = {}
+        for start_char, sorted_offsets in self._offsets.items():
+            self._index[start_char] = {}
+            for i, offset in enumerate( sorted_offsets.get_offsets() ):
+                identifier = sorted_offsets.get_identifier_by_offset( offset )
+                if identifier[0:self.index_depth] not in self._index[start_char]:
+                    self._index[start_char][identifier[0:self.index_depth]] = i
+
+    def get_lines_by_identifier( self, identifier ):
+        if not identifier:
+            return
+        # if index doesn't exist, build it
+        if self._index is None:
+            self._build_index()
+
+        # identifier cannot exist
+        if identifier[0] not in self._index or identifier[0:self.index_depth] not in self._index[identifier[0]]:
+            return
+        # identifier might exist, search for it
+        offset_index = self._index[identifier[0]][identifier[0:self.index_depth]]
+        while True:
+            if offset_index >= self._offsets[identifier[0]].size:
+                return
+            offset = self._offsets[identifier[0]].get_offset_by_index( offset_index )
+            identifier2 = self._offsets[identifier[0]].get_identifier_by_offset( offset )
+            if not identifier2 or identifier2 > identifier:
+                return
+            if identifier2 == identifier:
+                yield self._offsets[identifier[0]].get_line_by_offset( offset )
+            offset_index += 1
+
+    def get_offsets( self ):
+        keys = list(self._offsets.keys())
+        keys.sort()
+        for key in keys:
+            for offset in self._offsets[key].get_offsets():
+                yield offset
+
+    def get_line_by_offset( self, offset ):
+        self.file.seek( offset )
+        return self.file.readline()
+
+    def get_identifiers_offsets( self ):
+        keys = list(self._offsets.keys())
+        keys.sort()
+        for key in keys:
+            for offset in self._offsets[key].get_offsets():
+                yield self._offsets[key].get_identifier_by_offset( offset ), offset
+
+    def get_identifier_by_line( self, line ):
+        if isinstance( line, str ):
+            fields = line.rstrip( '\r\n' ).split( self.split )
+            if self.column < len( fields ):
+                return fields[self.column]
+        return None
+
+    def merge_with_dict( self, d ):
+        if not d:
+            return  # no data to merge
+        self._index = None
+        keys = list(d.keys())
+        keys.sort()
+        identifier = keys.pop( 0 )
+        first_char = identifier[0]
+        temp = { identifier: d[identifier] }
+        while True:
+            if not keys:
+                if first_char not in self._offsets:
+                    self._offsets[first_char] = SortedOffsets( self.filename, self.column, self.split )
+                self._offsets[first_char].merge_with_dict( temp )
+                return
+            identifier = keys.pop( 0 )
+            if identifier[0] == first_char:
+                temp[identifier] = d[identifier]
+            else:
+                if first_char not in self._offsets:
+                    self._offsets[first_char] = SortedOffsets( self.filename, self.column, self.split )
+                self._offsets[first_char].merge_with_dict( temp )
+                temp = { identifier: d[identifier] }
+                first_char = identifier[0]
+
+
+class BufferedIndex:
+    def __init__( self, filename, column, split=None, buffer=1000000, index_depth=3 ):
+        self.index = OffsetIndex( filename, column, split, index_depth )
+        self.buffered_offsets = {}
+        f = open( filename, 'rb' )
+        offset = f.tell()
+        identified_offset_count = 1
+        while True:
+            offset = f.tell()
+            line = f.readline()
+            if not line:
+                break  # EOF
+            identifier = self.index.get_identifier_by_line( line )
+            if identifier:
+                # flush buffered offsets, if buffer size reached
+                if buffer and identified_offset_count % buffer == 0:
+                    self.index.merge_with_dict( self.buffered_offsets )
+                    self.buffered_offsets = {}
+                if identifier not in self.buffered_offsets:
+                    self.buffered_offsets[identifier] = []
+                self.buffered_offsets[identifier].append( offset )
+                identified_offset_count += 1
+        f.close()
+
+    def get_lines_by_identifier( self, identifier ):
+        for line in self.index.get_lines_by_identifier( identifier ):
+            yield line
+        if identifier in self.buffered_offsets:
+            for offset in self.buffered_offsets[identifier]:
+                yield self.index.get_line_by_offset( offset )
+
+
+def fill_empty_columns( line, split, fill_values ):
+    if not fill_values:
+        return line
+    filled_columns = []
+    for i, field in enumerate( line.split( split ) ):
+        if field or i >= len( fill_values ):
+            filled_columns.append( field )
+        else:
+            filled_columns.append( fill_values[i] )
+    if len( fill_values ) > len( filled_columns ):
+        filled_columns.extend( fill_values[ len( filled_columns ): ] )
+    return split.join( filled_columns )
+
+
+def join_files( filename1, column1, filename2, column2, out_filename, split=None, buffer=1000000, keep_unmatched=False, keep_partial=False, index_depth=3, fill_options=None ):
+    # return identifier based upon line
+    def get_identifier_by_line( line, column, split=None ):
+        if isinstance( line, str ):
+            fields = line.rstrip( '\r\n' ).split( split )
+            if column < len( fields ):
+                return fields[column]
+        return None
+    if fill_options is None:
+        fill_options = Bunch( fill_unjoined_only=True, file1_columns=None, file2_columns=None )
+    out = open( out_filename, 'w+b' )
+    index = BufferedIndex( filename2, column2, split, buffer, index_depth )
+    for line1 in open( filename1, 'rb' ):
+        identifier = get_identifier_by_line( line1, column1, split )
+        if identifier:
+            written = False
+            for line2 in index.get_lines_by_identifier( identifier ):
+                if not fill_options.fill_unjoined_only:
+                    out.write( "%s%s%s\n" % ( fill_empty_columns( line1.rstrip( '\r\n' ), split, fill_options.file1_columns ), split, fill_empty_columns( line2.rstrip( '\r\n' ), split, fill_options.file2_columns ) ) )
+                else:
+                    out.write( "%s%s%s\n" % ( line1.rstrip( '\r\n' ), split, line2.rstrip( '\r\n' ) ) )
+                written = True
+            if not written and keep_unmatched:
+                out.write( fill_empty_columns( line1.rstrip( '\r\n' ), split, fill_options.file1_columns ) )
+                if fill_options:
+                    if fill_options.file2_columns:
+                        out.write( "%s%s" % ( split, fill_empty_columns( "", split, fill_options.file2_columns ) ) )
+                out.write( "\n" )
+        elif keep_partial:
+            out.write( fill_empty_columns( line1.rstrip( '\r\n' ), split, fill_options.file1_columns ) )
+            if fill_options:
+                if fill_options.file2_columns:
+                    out.write( "%s%s" % ( split, fill_empty_columns( "", split, fill_options.file2_columns ) ) )
+            out.write( "\n" )
+    out.close()
+
+
+def main():
+    parser = optparse.OptionParser()
+    parser.add_option(
+        '-b', '--buffer',
+        dest='buffer',
+        type='int', default=1000000,
+        help='Number of lines to buffer at a time. Default: 1,000,000 lines. A buffer of 0 will attempt to use memory only.'
+    )
+    parser.add_option(
+        '-d', '--index_depth',
+        dest='index_depth',
+        type='int', default=3,
+        help='Depth to use on filebased offset indexing. Default: 3.'
+    )
+    parser.add_option(
+        '-p', '--keep_partial',
+        action='store_true',
+        dest='keep_partial',
+        default=False,
+        help='Keep rows in first input which are missing identifiers.')
+    parser.add_option(
+        '-u', '--keep_unmatched',
+        action='store_true',
+        dest='keep_unmatched',
+        default=False,
+        help='Keep rows in first input which are not joined with the second input.')
+    parser.add_option(
+        '-f', '--fill_options_file',
+        dest='fill_options_file',
+        type='str', default=None,
+        help='Fill empty columns with a values from a JSONified file.')
+
+    options, args = parser.parse_args()
+
+    fill_options = None
+    if options.fill_options_file is not None:
+        try:
+            fill_options = Bunch( **stringify_dictionary_keys( json.load( open( options.fill_options_file ) ) ) )  # json.load( open( options.fill_options_file ) )
+        except Exception as e:
+            print("Warning: Ignoring fill options due to json error (%s)." % e)
+    if fill_options is None:
+        fill_options = Bunch()
+    if 'fill_unjoined_only' not in fill_options:
+        fill_options.fill_unjoined_only = True
+    if 'file1_columns' not in fill_options:
+        fill_options.file1_columns = None
+    if 'file2_columns' not in fill_options:
+        fill_options.file2_columns = None
+
+    try:
+        filename1 = args[0]
+        filename2 = args[1]
+        column1 = int( args[2] ) - 1
+        column2 = int( args[3] ) - 1
+        out_filename = args[4]
+    except:
+        print("Error parsing command line.", file=sys.stderr)
+        sys.exit()
+
+    # Character for splitting fields and joining lines
+    split = "\t"
+
+    return join_files( filename1, column1, filename2, column2, out_filename, split, options.buffer, options.keep_unmatched, options.keep_partial, options.index_depth, fill_options=fill_options )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/joinWrapper.pl b/tools/filters/joinWrapper.pl
new file mode 100644
index 0000000..69f95b4
--- /dev/null
+++ b/tools/filters/joinWrapper.pl
@@ -0,0 +1,51 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+use File::Temp "tempfile";
+
+my ($input1, $input2, $field1, $field2, $mode, $OOption, $out_file1) = @ARGV;
+
+die "No arguments\n" unless @ARGV == 7;
+
+my ($fh1, $file1) = tempfile();
+my ($fh2, $file2) = tempfile(); 
+
+`sort -k $field1 $input1 > $file1`;
+`sort -k $field2 $input2 > $file2`;
+
+my $option = "";
+my @fields = ();
+my $line = "";
+
+if ($OOption eq "Y") {
+  if (defined($fh1)) {
+    $line = <$fh1>;
+  } else {
+    die "Failed to create file $file1\n";
+  }
+  @fields = split /\t/, $line;
+  die "The field you selected does not exist in the input file" if (@fields < $field1);
+  my @optionO = ();
+  my $i = 0;
+  foreach (@fields) {
+    ++$i;
+    push(@optionO, "1.$i");
+  }
+  $option = "-o " . join(",", @optionO);
+} else {
+  $option = "";
+}
+
+$ENV{'LC_ALL'} = 'POSIX';
+
+if ($mode eq "V") {
+  `join -v 1 $option -1 $field1 -2 $field2 $file1 $file2 | tr " " "\t" > $out_file1`;
+} else {
+  `join $option -1 $field1 -2 $field2 $file1 $file2 | tr " " "\t" > $out_file1`;
+}
+
+`rm $file1 ; rm $file2`;
+
+
+
diff --git a/tools/filters/joinWrapper.py b/tools/filters/joinWrapper.py
new file mode 100644
index 0000000..7e931bf
--- /dev/null
+++ b/tools/filters/joinWrapper.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+# Guruprasad Ananda
+"""
+This tool provides the UNIX "join" functionality.
+"""
+import os
+import subprocess
+import sys
+import tempfile
+
+
+def stop_err(msg):
+    sys.stderr.write(msg)
+    sys.exit()
+
+
+def main():
+    infile1 = sys.argv[1]
+    infile2 = sys.argv[2]
+    field1 = int(sys.argv[3])
+    field2 = int(sys.argv[4])
+    mode = sys.argv[5]
+    outfile = sys.argv[6]
+
+    tmpfile1 = tempfile.NamedTemporaryFile()
+    tmpfile2 = tempfile.NamedTemporaryFile()
+
+    try:
+        # Sort the two files based on specified fields
+        os.system("sort -t '	' -k %d,%d -o %s %s" % (field1, field1, tmpfile1.name, infile1))
+        os.system("sort -t '	' -k %d,%d -o %s %s" % (field2, field2, tmpfile2.name, infile2))
+    except Exception as exc:
+        stop_err( 'Initialization error -> %s' % str(exc) )
+
+    option = ""
+    for line in open(tmpfile1.name):
+        line = line.strip()
+        if line:
+            elems = line.split('\t')
+            for j in range(1, len(elems) + 1):
+                if j == 1:
+                    option = "1.1"
+                else:
+                    option = option + ",1." + str(j)
+            break
+
+    # check if join has --version option. BSD join doens't have this option, while GNU join does.
+    # The return value in the latter case will be 0, and non-zero in the latter case.
+    ret = subprocess.call('join --version 2>/dev/null', shell=True)
+    # check if we are a version later than 7 of join. If so, we want to skip
+    # checking the order since join will raise an error with duplicated items in
+    # the two files being joined.
+    if ret == 0:
+        cl = subprocess.Popen(["join", "--version"], stdout=subprocess.PIPE)
+        (stdout, _) = cl.communicate()
+        version_line = stdout.split("\n")[0]
+        (version, _) = version_line.split()[-1].split(".")
+        if int(version) >= 7:
+            flags = "--nocheck-order"
+        else:
+            flags = ""
+    else:
+        flags = ""
+
+    if mode == "V":
+        cmdline = "join %s -t '	' -v 1 -o %s -1 %d -2 %d %s %s > %s" % (flags, option, field1, field2, tmpfile1.name, tmpfile2.name, outfile)
+    else:
+        cmdline = "join %s -t '	' -o %s -1 %d -2 %d %s %s > %s" % (flags, option, field1, field2, tmpfile1.name, tmpfile2.name, outfile)
+
+    try:
+        os.system(cmdline)
+    except Exception as exj:
+        stop_err('Error joining the two datasets -> %s' % str(exj))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/joiner.xml b/tools/filters/joiner.xml
new file mode 100644
index 0000000..953145f
--- /dev/null
+++ b/tools/filters/joiner.xml
@@ -0,0 +1,180 @@
+<tool id="join1" name="Join two Datasets" version="2.0.2">
+  <description>side by side on a specified field</description>
+  <command interpreter="python">join.py $input1 $input2 $field1 $field2 $out_file1 $unmatched $partial --index_depth=3 --buffer=50000000 --fill_options_file=$fill_options_file</command>
+  <inputs>
+    <param format="tabular" name="input1" type="data" label="Join"/>
+    <param name="field1" label="using column" type="data_column" data_ref="input1" />
+    <param format="tabular" name="input2" type="data" label="with" />
+    <param name="field2" label="and column" type="data_column" data_ref="input2" />
+    <param name="unmatched" type="select" label="Keep lines of first input that do not join with second input">
+      <option value="-u">Yes</option>
+      <option value="" selected="true">No</option>
+    </param>
+    <param name="partial" type="select" label="Keep lines of first input that are incomplete">
+      <option value="-p">Yes</option>
+      <option value="" selected="true">No</option>
+    </param>
+    <conditional name="fill_empty_columns">
+      <param name="fill_empty_columns_switch" type="select" label="Fill empty columns">
+        <option value="no_fill" selected="True">No</option>
+        <option value="fill_empty">Yes</option>
+      </param>
+     <when value="no_fill">
+        <!-- do nothing -->
+     </when>
+     <when value="fill_empty">
+       <param type="select" name="fill_columns_by" label="Only fill unjoined rows">
+         <option value="fill_unjoined_only" selected="True">Yes</option>
+         <option value="fill_all">No</option>
+       </param>
+       <conditional name="do_fill_empty_columns">
+         <param name="column_fill_type" type="select" label="Fill Columns by">
+           <option value="single_fill_value" selected="True">Single fill value</option>
+           <option value="fill_value_by_column">Values by column</option>
+         </param>
+         <when value="single_fill_value">
+           <param type="text" name="fill_value" label="Fill value" value="."/>
+         </when>
+         <when value="fill_value_by_column">
+           <repeat name="column_fill1" title="Fill Column for Input 1">
+             <param name="column_number1" label="Column" type="data_column" data_ref="input1" />
+             <param type="text" name="fill_value1" value="."/>
+           </repeat>
+           <repeat name="column_fill2" title="Fill Column for Input 2">
+             <param name="column_number2" label="Column" type="data_column" data_ref="input2" />
+             <param type="text" name="fill_value2" value="."/>
+           </repeat>
+         </when>
+       </conditional>
+     </when>
+   </conditional>
+  </inputs>
+  <configfiles>
+    <configfile name="fill_options_file"><%
+import json
+%>
+#set $__fill_options = {}
+#if $fill_empty_columns['fill_empty_columns_switch'] == 'fill_empty':
+    #set $__fill_options['fill_unjoined_only'] = $fill_empty_columns['fill_columns_by'].value == 'fill_unjoined_only'
+    #if $fill_empty_columns['do_fill_empty_columns']['column_fill_type'] == 'single_fill_value':
+        #set $__start_fill = $fill_empty_columns['do_fill_empty_columns']['fill_value'].value
+    #else:
+        #set $__start_fill = ""
+    #end if
+    #set $__fill_options['file1_columns'] = [ __start_fill for i in range( int( $input1.metadata.columns ) ) ]
+    #set $__fill_options['file2_columns'] = [ __start_fill for i in range( int( $input2.metadata.columns ) ) ]
+    #if $fill_empty_columns['do_fill_empty_columns']['column_fill_type'] == 'fill_value_by_column':
+        #for column_fill1 in $fill_empty_columns['do_fill_empty_columns']['column_fill1']:
+            #set $__fill_options['file1_columns'][ int( column_fill1['column_number1'].value ) - 1 ] = column_fill1['fill_value1'].value
+        #end for
+        #for column_fill2 in $fill_empty_columns['do_fill_empty_columns']['column_fill2']:
+            #set $__fill_options['file2_columns'][ int( column_fill2['column_number2'].value ) - 1 ] = column_fill2['fill_value2'].value
+        #end for
+    #end if
+#end if
+${json.dumps( __fill_options )}
+    </configfile>
+  </configfiles>
+  <outputs>
+     <data format="input" name="out_file1" metadata_source="input1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="2.bed"/>
+      <param name="field1" value="2"/>
+      <param name="field2" value="2"/>
+      <param name="unmatched" value=""/>
+      <param name="partial" value=""/>
+      <param name="fill_empty_columns_switch" value="no_fill"/>
+      <output name="out_file1" file="joiner_out1.bed"/>
+    </test>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="2.bed"/>
+      <param name="field1" value="2"/>
+      <param name="field2" value="2"/>
+      <param name="unmatched" value="Yes"/>
+      <param name="partial" value="Yes"/>
+      <param name="fill_empty_columns_switch" value="no_fill"/>
+      <output name="out_file1" file="joiner_out2.bed"/>
+    </test>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="2.bed"/>
+      <param name="field1" value="2"/>
+      <param name="field2" value="2"/>
+      <param name="unmatched" value="Yes"/>
+      <param name="partial" value="Yes"/>
+      <param name="fill_empty_columns_switch" value="fill_empty"/>
+      <param name="fill_columns_by" value="fill_all"/>
+      <param name="column_fill_type" value="single_fill_value"/>
+      <param name="fill_value" value="~"/>
+      <output name="out_file1" file="joiner_out3.bed"/>
+    </test>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="2.bed"/>
+      <param name="field1" value="2"/>
+      <param name="field2" value="2"/>
+      <param name="unmatched" value="Yes"/>
+      <param name="partial" value="Yes"/>
+      <param name="fill_empty_columns_switch" value="fill_empty"/>
+      <param name="fill_columns_by" value="fill_all"/>
+      <param name="column_fill_type" value="fill_value_by_column"/>
+      <param name="column_number1" value="6"/>
+      <param name="fill_value1" value="+"/>
+      <param name="column_number2" value="1"/>
+      <param name="fill_value2" value="NoChrom"/>
+      <output name="out_file1" file="joiner_out4.bed"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**This tool will attempt to reuse the metadata from your first input.** To change metadata assignments click on the "edit attributes" link of the history item generated by this tool.
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+This tool joins lines of two datasets on a common field. An empty string ("") is not a valid identifier.
+You may choose to include lines of your first input that do not join with your second input.
+
+- Columns are referenced with a **number**. For example, **3** refers to the 3rd column of a tab-delimited file.
+
+-----
+
+**Example**
+
+Dataset1::
+
+  chr1 10 20 geneA 
+  chr1 50 80 geneB
+  chr5 10 40 geneL
+
+Dataset2::
+
+  geneA tumor-supressor
+  geneB Foxp2
+  geneC Gnas1
+  geneE INK4a
+
+Joining the 4th column of Dataset1 with the 1st column of Dataset2 will yield::
+
+  chr1 10 20 geneA geneA tumor-suppressor
+  chr1 50 80 geneB geneB Foxp2
+
+Joining the 4th column of Dataset1 with the 1st column of Dataset2, while keeping all lines from Dataset1, will yield::
+
+  chr1 10 20 geneA geneA tumor-suppressor
+  chr1 50 80 geneB geneB Foxp2
+  chr5 10 40 geneL
+
+</help>
+</tool>
diff --git a/tools/filters/joiner2.xml b/tools/filters/joiner2.xml
new file mode 100644
index 0000000..93cc920
--- /dev/null
+++ b/tools/filters/joiner2.xml
@@ -0,0 +1,13 @@
+<tool id="joiner2" name="Relational join 2" version="1.0.0">
+  <description>two datasets a specific column of which has the same value</description>
+  <command>sort -k $col1 $input1 > $input1.tmp; sort -k $col2 $input2 > $input2.tmp; join -1 $col1 -2 $col2 $input1.tmp $input2.tmp | tr " " "\t" > $out_file1; rm -rf $input1.tmp $input2.tmp </command>
+  <inputs>
+    <param name="input1" label="Combine dataset" format="tabular" type="data" />
+    <param name="col1" label="using column" type="data_column" data_ref="input1" />
+    <param name="input2" label="with dataset"	format="tabular" type="data"/>
+    <param name="col2" label="and column" type="data_column" data_ref="input2" />
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input1" />
+  </outputs>
+</tool>
diff --git a/tools/filters/lav_to_bed.py b/tools/filters/lav_to_bed.py
new file mode 100644
index 0000000..264a6b0
--- /dev/null
+++ b/tools/filters/lav_to_bed.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Reads a LAV file and writes two BED files.
+from __future__ import print_function
+
+import sys
+
+import bx.align.lav
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def main():
+    try:
+        lav_file = open(sys.argv[1], 'r')
+        bed_file1 = open(sys.argv[2], 'w')
+        bed_file2 = open(sys.argv[3], 'w')
+    except Exception as e:
+        stop_err( str( e ) )
+
+    lavsRead = 0
+    bedsWritten = 0
+    species = {}
+    # TODO: this is really bad since everything is read into memory.  Can we eliminate this tool?
+    for lavBlock in bx.align.lav.Reader( lav_file ):
+        lavsRead += 1
+        for c in lavBlock.components:
+            spec, chrom = bx.align.lav.src_split( c.src )
+            if bedsWritten < 1:
+                if len( species ) == 0:
+                    species[spec] = bed_file1
+                elif len( species ) == 1:
+                    species[spec] = bed_file2
+                else:
+                    continue  # this is a pairwise alignment...
+            if spec in species:
+                species[spec].write( "%s\t%i\t%i\t%s_%s\t%i\t%s\n" % ( chrom, c.start, c.end, spec, str( bedsWritten ), 0, c.strand ) )
+        bedsWritten += 1
+
+    for spec, file in species.items():
+        print("#FILE\t%s\t%s" % (file.name, spec))
+
+    lav_file.close()
+    bed_file1.close()
+    bed_file2.close()
+
+    print("%d lav blocks read, %d regions written\n" % (lavsRead, bedsWritten))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/lav_to_bed.xml b/tools/filters/lav_to_bed.xml
new file mode 100644
index 0000000..369a0e5
--- /dev/null
+++ b/tools/filters/lav_to_bed.xml
@@ -0,0 +1,68 @@
+<tool id="lav_to_bed1" name="LAV to BED" version="1.0.0">
+  <description>Converts a LAV formatted file to BED format</description>
+  <command interpreter="python">lav_to_bed.py $lav_file $bed_file1 $bed_file2</command>
+  <inputs>
+    <param name="lav_file" type="data" format="lav" label="LAV File" optional="False"/>
+  </inputs>
+  <outputs>
+    <data name="bed_file1" format="bed"/>
+    <data name="bed_file2" format="bed"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="lav_file" value="2.lav" ftype="lav" />
+      <output name="bed_file2" file="lav_to_bed_out_1.bed" />
+      <output name="bed_file2" file="lav_to_bed_out_2.bed" />
+    </test>
+  </tests>
+  <help>
+
+**Syntax**
+
+This tool converts a LAV formatted file to the BED format.
+
+- **LAV format** LAV is an alignment format developed by Webb Miller's group at Penn State University. It is the primary output format for BLASTZ.
+
+- **BED format** Browser Extensible Data format was designed at UCSC for displaying data tracks in the Genome Browser.
+
+-----
+
+**Example**
+
+- Convert LAV format::
+
+    #:lav
+    s {
+      "/galaxy/data/hg16/seq/chr19.nib" 1 63811651 0 1
+      "/galaxy/data/mm5/seq/chr11.nib" 1 121648857 0 1
+    }
+    h {
+      "> hg16.chr19"
+      "> mm5.chr11 (reverse complement)"
+    }
+    a {
+      s 3500
+      b 3001012 70568380
+      e 3001075 70568443
+      l 3001012 70568380 3001075 70568443 81
+    }
+    a {
+      s 3900
+      b 3008279 70573976
+      e 3008357 70574054
+      l 3008279 70573976 3008357 70574054 78
+    }
+    #:eof
+
+- To two BED formatted files::
+
+    chr19	3001011	3001075	hg16_0	0	+
+    chr19	3008278	3008357	hg16_1	0	+
+
+ **and**::
+
+    chr11	70568379	70568443	mm5_0	0	+
+    chr11	70573975	70574054	mm5_1	0	+
+  </help>
+  <code file="lav_to_bed_code.py"/>
+</tool>
diff --git a/tools/filters/lav_to_bed_code.py b/tools/filters/lav_to_bed_code.py
new file mode 100644
index 0000000..9fc5274
--- /dev/null
+++ b/tools/filters/lav_to_bed_code.py
@@ -0,0 +1,19 @@
+# Set build, name, and info for each output BED file
+def exec_after_process(app, inp_data, out_data, param_dict, tool, stdout, stderr):
+    new_stdout = ""
+    filename_to_build = {}
+    for line in stdout.split("\n"):
+        if line.startswith("#FILE"):
+            fields = line.split("\t")
+            filename_to_build[fields[1]] = fields[2].strip()
+        else:
+            new_stdout = "%s%s" % ( new_stdout, line )
+    for data in out_data.values():
+        try:
+            data.info = "%s\n%s" % ( new_stdout, stderr )
+            data.dbkey = filename_to_build[data.file_name]
+            data.name = "%s (%s)" % ( data.name, data.dbkey )
+            app.model.context.add( data )
+            app.model.context.flush()
+        except:
+            continue
diff --git a/tools/filters/mergeCols.py b/tools/filters/mergeCols.py
new file mode 100644
index 0000000..9e27a20
--- /dev/null
+++ b/tools/filters/mergeCols.py
@@ -0,0 +1,43 @@
+from __future__ import print_function
+
+import sys
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def __main__():
+    try:
+        infile = open( sys.argv[1], 'r')
+        outfile = open( sys.argv[2], 'w')
+    except:
+        stop_err( 'Cannot open or create a file\n' )
+
+    if len( sys.argv ) < 4:
+        stop_err( 'No columns to merge' )
+    else:
+        cols = sys.argv[3:]
+
+    skipped_lines = 0
+
+    for line in infile:
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ):
+            fields = line.split( '\t' )
+            line += '\t'
+            for col in cols:
+                try:
+                    line += fields[ int( col ) - 1 ]
+                except:
+                    skipped_lines += 1
+
+            print(line, file=outfile)
+
+    if skipped_lines > 0:
+        print('Skipped %d invalid lines' % skipped_lines)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/mergeCols.xml b/tools/filters/mergeCols.xml
new file mode 100644
index 0000000..44f813e
--- /dev/null
+++ b/tools/filters/mergeCols.xml
@@ -0,0 +1,63 @@
+<tool id="mergeCols1" name="Merge Columns" version="1.0.1">
+  <description>together</description>
+  <command interpreter="python">
+   mergeCols.py 
+      $input1
+      $out_file1
+      $col1
+      $col2
+      #for $col in $columns
+        ${col.datacol}
+      #end for
+  </command>
+  <inputs>
+    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
+    <param name="col1" label="Merge column" type="data_column" data_ref="input1" />
+    <param name="col2" label="with column" type="data_column" data_ref="input1" help="Need to add more columns? Use controls below."/>
+    <repeat name="columns" title="Columns">
+      <param name="datacol" label="Add column" type="data_column" data_ref="input1" />
+    </repeat>
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="col1" value="4" />
+      <param name="col2" value="1" />
+      <param name="datacol" value="6" />
+      <output name="out_file1" file="mergeCols.dat"/>
+    </test>
+  </tests>
+<help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**What it does**
+
+This tool merges columns together. Any number of valid columns can be merged in any order.
+
+-----
+
+**Example**
+
+Input dataset (five columns: c1, c2, c3, c4, and c5)::
+
+   1 10   1000  gene1 chr
+   2 100  1500  gene2 chr
+
+merging columns "**c5,c1**" will return::
+
+   1 10   1000  gene1 chr chr1
+   2 100  1500  gene2 chr chr2
+
+.. class:: warningmark
+   
+Note that all original columns are preserved and the result of merge is added as the rightmost column.
+  </help>
+</tool>
diff --git a/tools/filters/pasteWrapper.pl b/tools/filters/pasteWrapper.pl
new file mode 100644
index 0000000..8808824
--- /dev/null
+++ b/tools/filters/pasteWrapper.pl
@@ -0,0 +1,35 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+my $command = "";
+# a wrapper for paste for use in galaxy
+# pasteWrapper.pl [filename1] [filename2] [delimiter] [output]
+
+die "Check arguments" unless @ARGV == 4;
+
+if ($ARGV[2] eq 'T') {
+    $command = "paste $ARGV[0] $ARGV[1]";
+} elsif ($ARGV[2] eq 'C') {
+    $command = "paste -d \",\" $ARGV[0] $ARGV[1]";
+} elsif ($ARGV[2] eq 'D') {
+    $command = "paste -d \"-\" $ARGV[0] $ARGV[1]";
+} elsif ($ARGV[2] eq 'U') {
+    $command = "paste -d \"_\" $ARGV[0] $ARGV[1]";
+} elsif ($ARGV[2] eq 'P') {
+    $command = "paste -d \"|\" $ARGV[0] $ARGV[1]";
+} elsif ($ARGV[2] eq 'Dt') {
+    $command = "paste -d \".\" $ARGV[0] $ARGV[1]";
+} elsif ($ARGV[2] eq 'Sp') {
+    $command = "paste -d \" \" $ARGV[0] $ARGV[1]";
+}
+
+open (OUT, ">$ARGV[3]") or die "Cannot create $ARGV[2]:$!\n";
+open (PASTE, "$command |") or die "Cannot run paste:$!\n";
+
+while (<PASTE>) {
+    print OUT;
+}
+close OUT;
+close PASTE;
+    
diff --git a/tools/filters/pasteWrapper.xml b/tools/filters/pasteWrapper.xml
new file mode 100644
index 0000000..e853d61
--- /dev/null
+++ b/tools/filters/pasteWrapper.xml
@@ -0,0 +1,68 @@
+<tool id="Paste1" name="Paste" version="1.0.0">
+  <description>two files side by side</description>
+  <command interpreter="perl">pasteWrapper.pl $input1 $input2 $delimiter $out_file1</command>
+  <inputs>
+<!--    <display>paste $input1 and $input2 using $delimiter as delimiter</display> -->
+    <param format="txt" name="input1" type="data" label="Paste"/>
+    <param format="txt" name="input2" type="data" label="and"/>
+    <param name="delimiter" type="select" label="Delimit by">
+      <option value="T">Tab</option>
+      <option value="Dt">Dot</option>
+      <option value="C">Comma</option>
+      <option value="D">Dash</option>
+      <option value="U">Underscore</option>
+      <option value="P">Pipe</option>
+      <option value="Sp">Space</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input1">
+      <change_format>
+        <when input_dataset="input1" attribute="ext" value="bed" format="interval"/>
+      </change_format>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="input2" value="2.bed"/>
+      <param name="delimiter" value="T"/>
+      <output name="out_file1" file="eq-paste.dat"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: infomark
+
+Paste preserves column assignments of the first dataset.
+
+-----
+
+**What it does**
+
+This tool merges two datasets side by side. If the first (left) dataset contains column assignments such as chromosome, start, end and strand, these will be preserved. However, if you would like to change column assignments, click the pencil icon in the history item.
+
+-----
+
+**Example**
+
+First dataset::
+
+    a 1
+    a 2
+    a 3
+
+Second dataset::
+
+    20
+    30
+    40
+
+Pasting them together will produce::
+
+    a 1 20
+    a 2 30
+    a 3 40
+
+</help>
+</tool>
diff --git a/tools/filters/random_lines_two_pass.py b/tools/filters/random_lines_two_pass.py
new file mode 100644
index 0000000..44847a1
--- /dev/null
+++ b/tools/filters/random_lines_two_pass.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+# Selects N random lines from a file and outputs to another file, maintaining original line order
+# allows specifying a seed
+# does two passes to determine line offsets/count, and then to output contents
+from __future__ import print_function
+
+import optparse
+import random
+
+
+def get_random_by_subtraction( line_offsets, num_lines ):
+    while len( line_offsets ) > num_lines:
+        del line_offsets[ random.randint( 0, len( line_offsets ) - 1 ) ]
+    return line_offsets
+
+
+def get_random_by_sample( line_offsets, num_lines ):
+    line_offsets = random.sample( line_offsets, num_lines )
+    line_offsets.sort()
+    return line_offsets
+
+
+def get_random( line_offsets, num_lines ):
+    if num_lines > ( len( line_offsets ) / 2 ):
+        return get_random_by_subtraction( line_offsets, num_lines )
+    else:
+        return get_random_by_sample( line_offsets, num_lines )
+
+
+def __main__():
+    parser = optparse.OptionParser()
+    parser.add_option( '-s', '--seed', dest='seed', action='store', type="string", default=None, help='Set the random seed.' )
+    (options, args) = parser.parse_args()
+
+    assert len( args ) == 3, "Invalid command line specified."
+
+    input = open( args[0], 'rb' )
+    output = open( args[1], 'wb' )
+    num_lines = int( args[2] )
+    assert num_lines > 0, "You must select at least one line."
+
+    if options.seed is not None:
+        random.seed( options.seed )
+
+    # get line offsets
+    line_offsets = []
+    teller = input.tell
+    readliner = input.readline
+    appender = line_offsets.append
+    while True:
+        offset = teller()
+        if readliner():
+            appender( offset )
+        else:
+            break
+
+    total_lines = len( line_offsets )
+    assert num_lines <= total_lines, "Error: asked to select more lines (%i) than there were in the file (%i)." % ( num_lines, total_lines )
+
+    # get random line offsets
+    line_offsets = get_random( line_offsets, num_lines )
+
+    # write out random lines
+    seeker = input.seek
+    writer = output.write
+    for line_offset in line_offsets:
+        seeker( line_offset )
+        writer( readliner() )
+    input.close()
+    output.close()
+    print("Kept %i of %i total lines." % ( num_lines, total_lines ))
+    if options.seed is not None:
+        print('Used random seed of "%s".' % options.seed)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/randomlines.py b/tools/filters/randomlines.py
new file mode 100644
index 0000000..eae7cf9
--- /dev/null
+++ b/tools/filters/randomlines.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+# Kanwei Li, 2010
+# Selects N random lines from a file and outputs to another file
+
+import random
+import sys
+
+
+def main():
+    infile = open(sys.argv[1], 'r')
+    total_lines = int(sys.argv[2])
+
+    if total_lines < 1:
+        sys.stderr.write( "Must select at least one line." )
+        sys.exit()
+
+    kept = []
+    n = 0
+    for line in infile:
+        line = line.rstrip("\n")
+        n += 1
+        if (n <= total_lines):
+            kept.append(line)
+        elif random.randint(1, n) <= total_lines:
+            kept.pop(random.randint(0, total_lines - 1))
+            kept.append(line)
+
+    if n < total_lines:
+        sys.stderr.write( "Error: asked to select more lines than there were in the file." )
+        sys.exit()
+
+    open(sys.argv[3], 'w').write( "\n".join(kept) )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/randomlines.xml b/tools/filters/randomlines.xml
new file mode 100644
index 0000000..526c66d
--- /dev/null
+++ b/tools/filters/randomlines.xml
@@ -0,0 +1,66 @@
+<tool id="random_lines1" name="Select random lines" version="2.0.1">
+  <description>from a file</description>
+  <command interpreter="python">random_lines_two_pass.py "${input}" "${out_file1}" "${num_lines}"
+  #if str( $seed_source.seed_source_selector ) == "set_seed":
+      --seed "${seed_source.seed}"
+  #end if
+  </command>
+  <inputs>
+    <param name="num_lines" size="5" type="integer" value="1" label="Randomly select" help="lines"/>
+    <param format="txt" name="input" type="data" label="from"/>
+    <conditional name="seed_source">
+      <param name="seed_source_selector" type="select" label="Set a random seed">
+        <option value="no_seed" selected="True">Don't set seed</option>
+        <option value="set_seed">Set seed</option>
+      </param>
+      <when value="no_seed">
+        <!-- Do nothing here -->
+      </when>
+      <when value="set_seed"> 
+        <param name="seed" type="text" label="Random seed" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="num_lines" value="65"/>
+      <param name="input" value="1.bed"/>
+      <param name="seed_source_selector" value="no_seed"/>
+      <output name="out_file1" file="1.bed"/>
+    </test>
+    <test>
+      <param name="num_lines" value="1"/>
+      <param name="input" value="1.bed"/>
+      <param name="seed_source_selector" value="set_seed"/>
+      <param name="seed" value="asdf"/>
+      <output name="out_file1" file="1_bed_random_lines_1_seed_asdf_out.bed"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool selects N random lines from a file, with no repeats, and preserving ordering.
+
+-----
+
+**Example**
+
+Input File::
+
+    chr7  56632  56652   D17003_CTCF_R6  310  +
+    chr7  56736  56756   D17003_CTCF_R7  354  +
+    chr7  56761  56781   D17003_CTCF_R4  220  +
+    chr7  56772  56792   D17003_CTCF_R7  372  +
+    chr7  56775  56795   D17003_CTCF_R4  207  +
+
+Selecting 2 random lines might return this::
+
+    chr7  56736  56756   D17003_CTCF_R7  354  +
+    chr7  56775  56795   D17003_CTCF_R4  207  +
+
+    </help>
+</tool>
diff --git a/tools/filters/remove_beginning.pl b/tools/filters/remove_beginning.pl
new file mode 100644
index 0000000..a8d80ac
--- /dev/null
+++ b/tools/filters/remove_beginning.pl
@@ -0,0 +1,33 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# Removes the specified number of lines from the beginning of the file.
+# remove_beginning.pl [input] [num_lines] [output]
+
+die "Check arguments" unless @ARGV == 3;
+
+my $inputfile = $ARGV[0];
+my $num_lines = $ARGV[1];
+my $outputfile = $ARGV[2];
+
+my $curCount=0;
+
+my $fhIn;
+open ($fhIn, "< $inputfile") or die "Cannot open source file";
+
+my $fhOut;
+open ($fhOut, "> $outputfile");
+
+while (<$fhIn>)
+{
+    $curCount++;
+    if ($curCount<=$num_lines)
+    {
+        next;
+    }
+    print $fhOut $_;
+}
+close ($fhIn) or die "Cannot close source file";
+close ($fhOut) or die "Cannot close output file";
diff --git a/tools/filters/remove_beginning.xml b/tools/filters/remove_beginning.xml
new file mode 100644
index 0000000..a929e48
--- /dev/null
+++ b/tools/filters/remove_beginning.xml
@@ -0,0 +1,42 @@
+<tool id="Remove beginning1" name="Remove beginning" version="1.0.0">
+  <description>of a file</description>
+  <command interpreter="perl">remove_beginning.pl $input $num_lines $out_file1</command>
+  <inputs>
+    <param name="num_lines" size="5" type="integer" value="1" label="Remove first" help="lines"/>
+    <param format="txt" name="input" type="data" label="from"/>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="num_lines" value="5"/>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="eq-removebeginning.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool removes a specified number of lines from the beginning of a dataset.
+
+-----
+
+**Example**
+
+Input File::
+
+    chr7  56632  56652   D17003_CTCF_R6  310  +
+    chr7  56736  56756   D17003_CTCF_R7  354  +
+    chr7  56761  56781   D17003_CTCF_R4  220  +
+    chr7  56772  56792   D17003_CTCF_R7  372  +
+    chr7  56775  56795   D17003_CTCF_R4  207  +
+
+After removing the first 3 lines the dataset will look like this::
+
+    chr7  56772  56792   D17003_CTCF_R7  372  +
+    chr7  56775  56795   D17003_CTCF_R4  207  +
+
+</help>
+</tool>
diff --git a/tools/filters/secure_hash_message_digest.py b/tools/filters/secure_hash_message_digest.py
new file mode 100644
index 0000000..27872d9
--- /dev/null
+++ b/tools/filters/secure_hash_message_digest.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+A script for calculating secure hashes / message digests.
+"""
+import hashlib
+import optparse
+
+from galaxy.util.odict import odict
+
+HASH_ALGORITHMS = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ]
+CHUNK_SIZE = 2 ** 20  # 1mb
+
+
+def __main__():
+    # Parse Command Line
+    parser = optparse.OptionParser()
+    parser.add_option( '-a', '--algorithm', dest='algorithms', action='append', type="string", help='Algorithms to use, eg. (md5, sha1, sha224, sha256, sha384, sha512)' )
+    parser.add_option( '-i', '--input', dest='input', action='store', type="string", help='Input filename' )
+    parser.add_option( '-o', '--output', dest='output', action='store', type="string", help='Output filename' )
+    (options, args) = parser.parse_args()
+
+    algorithms = odict()
+    for algorithm in options.algorithms:
+        assert algorithm in HASH_ALGORITHMS, "Invalid algorithm specified: %s" % ( algorithm )
+        assert algorithm not in algorithms, "Specify each algorithm only once."
+        algorithms[ algorithm ] = hashlib.new( algorithm )
+    assert options.algorithms, "You must provide at least one algorithm."
+    assert options.input, "You must provide an input filename."
+    assert options.output, "You must provide an output filename."
+
+    input = open( options.input )
+    while True:
+        chunk = input.read( CHUNK_SIZE )
+        if chunk:
+            for algorithm in algorithms.values():
+                algorithm.update( chunk )
+        else:
+            break
+
+    output = open( options.output, 'wb' )
+    output.write( '#%s\n' % ( '\t'.join( algorithms.keys() ) ) )
+    output.write( '%s\n' % ( '\t'.join( x.hexdigest() for x in algorithms.values() ) ) )
+    output.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/filters/secure_hash_message_digest.xml b/tools/filters/secure_hash_message_digest.xml
new file mode 100644
index 0000000..e20ce80
--- /dev/null
+++ b/tools/filters/secure_hash_message_digest.xml
@@ -0,0 +1,45 @@
+<tool id="secure_hash_message_digest" name="Secure Hash / Message Digest" version="0.0.1">
+    <description>on a dataset</description>
+    <command interpreter="python">secure_hash_message_digest.py --input "${input1}" --output "${out_file1}"
+        #if $algorithms.value:
+            #for $algorithm in str( $algorithms ).split( "," ):
+                --algorithm "${algorithm}"
+            #end for
+        #end if
+    </command>
+    <inputs>
+        <param format="data" name="input1" type="data" label="Text file"/>
+        <param name="algorithms" type="select" multiple="True" display="checkboxes" label="Choose the algorithms">
+          <option value="md5"/>
+          <option value="sha1"/>
+          <option value="sha224"/>
+          <option value="sha256"/>
+          <option value="sha384"/>
+          <option value="sha512"/>
+          <validator type="no_options" message="You must select at least one algorithm." />
+        </param>
+    </inputs>
+    <outputs>
+        <data format="tabular" name="out_file1"/>
+    </outputs>
+    <tests>
+        <test>
+          <param name="input1" value="1.bed"/>
+          <param name="algorithms" value="md5,sha1,sha224,sha384,sha512"/>
+          <output name="out_file1" file="secure_hash_message_digest_out1.tabular" />
+        </test>
+    </tests>
+    <help>
+
+**What it does**
+
+This tool outputs Secure Hashes / Message Digests of a dataset using the user selected algorithms.
+
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
+    </help>
+</tool>
diff --git a/tools/filters/sff_extract.py b/tools/filters/sff_extract.py
new file mode 100644
index 0000000..bf31807
--- /dev/null
+++ b/tools/filters/sff_extract.py
@@ -0,0 +1,1340 @@
+#!/usr/bin/python
+'''This software extracts the seq, qual and ancillary information from an sff
+file, like the ones used by the 454 sequencer.
+
+Optionally, it can also split paired-end reads if given the linker sequence.
+The splitting is done with maximum match, i.e., every occurence of the linker
+sequence will be removed, even if occuring multiple times.'''
+
+# copyright Jose Blanca and Bastien Chevreux
+# COMAV institute, Universidad Politecnica de Valencia (UPV)
+# Valencia, Spain
+
+# additions to handle paired end reads by Bastien Chevreux
+# bugfixes for linker specific lengths: Lionel Guy
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import print_function
+
+import os
+import struct
+import subprocess
+import sys
+import tempfile
+
+__author__ = 'Jose Blanca and Bastien Chevreux'
+__copyright__ = 'Copyright 2008, Jose Blanca, COMAV, and Bastien Chevreux'
+__license__ = 'GPLv3 or later'
+__version__ = '0.2.10'
+__email__ = 'jblanca at btc.upv.es'
+__status__ = 'beta'
+
+fake_sff_name = 'fake_sff_name'
+
+# readname as key: lines with matches from SSAHA, one best match
+ssahapematches = {}
+# linker readname as key: length of linker sequence
+linkerlengths = {}
+
+# set to true if something really fishy is going on with the sequences
+stern_warning = True
+
+
+def read_bin_fragment(struct_def, fileh, offset=0, data=None, byte_padding=None):
+    '''It reads a chunk of a binary file.
+
+    You have to provide the struct, a file object, the offset (where to start
+    reading).
+    Also you can provide an optional dict that will be populated with the
+    extracted data.
+    If a byte_padding is given the number of bytes read will be a multiple of
+    that number, adding the required pad at the end.
+    It returns the number of bytes reads and the data dict.
+    '''
+    if data is None:
+        data = {}
+
+    # we read each item
+    bytes_read = 0
+    for item in struct_def:
+        # we go to the place and read
+        fileh.seek(offset + bytes_read)
+        n_bytes = struct.calcsize(item[1])
+        buffer = fileh.read(n_bytes)
+        read = struct.unpack('>' + item[1], buffer)
+        if len(read) == 1:
+            read = read[0]
+        data[item[0]] = read
+        bytes_read += n_bytes
+
+    # if there is byte_padding the bytes_to_read should be a multiple of the
+    # byte_padding
+    if byte_padding is not None:
+        pad = byte_padding
+        bytes_read = ((bytes_read + pad - 1) // pad) * pad
+
+    return (bytes_read, data)
+
+
+def check_magic(magic):
+    '''It checks that the magic number of the file matches the sff magic.'''
+    if magic != 779314790:
+        raise RuntimeError('This file does not seems to be an sff file.')
+
+
+def check_version(version):
+    '''It checks that the version is supported, otherwise it raises an error.'''
+    supported = ('\x00', '\x00', '\x00', '\x01')
+    i = 0
+    for item in version:
+        if version[i] != supported[i]:
+            raise RuntimeError('SFF version not supported. Please contact the author of the software.')
+        i += 1
+
+
+def read_header(fileh):
+    '''It reads the header from the sff file and returns a dict with the
+    information'''
+    # first we read the first part of the header
+    head_struct = [
+        ('magic_number', 'I'),
+        ('version', 'cccc'),
+        ('index_offset', 'Q'),
+        ('index_length', 'I'),
+        ('number_of_reads', 'I'),
+        ('header_length', 'H'),
+        ('key_length', 'H'),
+        ('number_of_flows_per_read', 'H'),
+        ('flowgram_format_code', 'B'),
+    ]
+    data = {}
+    first_bytes, data = read_bin_fragment(struct_def=head_struct, fileh=fileh,
+                                          offset=0, data=data)
+    check_magic(data['magic_number'])
+    check_version(data['version'])
+    # now that we know the number_of_flows_per_read and the key_length
+    # we can read the second part of the header
+    struct2 = [
+        ('flow_chars', str(data['number_of_flows_per_read']) + 'c'),
+        ('key_sequence', str(data['key_length']) + 'c')
+    ]
+    read_bin_fragment(struct_def=struct2, fileh=fileh, offset=first_bytes, data=data)
+    return data
+
+
+def read_sequence(header, fileh, fposition):
+    '''It reads one read from the sff file located at the fposition and
+    returns a dict with the information.'''
+    # the sequence struct
+    read_header_1 = [
+        ('read_header_length', 'H'),
+        ('name_length', 'H'),
+        ('number_of_bases', 'I'),
+        ('clip_qual_left', 'H'),
+        ('clip_qual_right', 'H'),
+        ('clip_adapter_left', 'H'),
+        ('clip_adapter_right', 'H'),
+    ]
+
+    def read_header_2(name_length):
+        '''It returns the struct definition for the second part of the header'''
+        return [('name', str(name_length) + 'c')]
+
+    def read_data(number_of_bases):
+        '''It returns the struct definition for the read data section.'''
+        if header['flowgram_format_code'] == 1:
+            flow_type = 'H'
+        else:
+            raise Exception('file version not supported')
+        number_of_bases = str(number_of_bases)
+        return [
+            ('flowgram_values', str(header['number_of_flows_per_read']) + flow_type),
+            ('flow_index_per_base', number_of_bases + 'B'),
+            ('bases', number_of_bases + 'c'),
+            ('quality_scores', number_of_bases + 'B'),
+        ]
+
+    data = {}
+    # we read the first part of the header
+    bytes_read, data = read_bin_fragment(struct_def=read_header_1,
+                                    fileh=fileh, offset=fposition, data=data)
+
+    read_bin_fragment(struct_def=read_header_2(data['name_length']),
+                      fileh=fileh, offset=fposition + bytes_read, data=data)
+    # we join the letters of the name
+    data['name'] = ''.join(data['name'])
+    offset = data['read_header_length']
+    # we read the sequence and the quality
+    read_data_st = read_data(data['number_of_bases'])
+    bytes_read, data = read_bin_fragment(struct_def=read_data_st,
+                                    fileh=fileh, offset=fposition + offset,
+                                    data=data, byte_padding=8)
+    # we join the bases
+    data['bases'] = ''.join(data['bases'])
+
+    # correct for the case the right clip is <= than the left clip
+    # in this case, left clip is 0 are set to 0 (right clip == 0 means
+    # "whole sequence")
+    if data['clip_qual_right'] <= data['clip_qual_left']:
+        data['clip_qual_right'] = 0
+        data['clip_qual_left'] = 0
+    if data['clip_adapter_right'] <= data['clip_adapter_left']:
+        data['clip_adapter_right'] = 0
+        data['clip_adapter_left'] = 0
+
+    # the clipping section follows the NCBI's guidelines Trace Archive RFC
+    # http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=rfc&m=doc&s=rfc
+    # if there's no adapter clip: qual -> vector
+    # else:  qual-> qual
+    #       adapter -> vector
+
+    if not data['clip_adapter_left']:
+        data['clip_adapter_left'], data['clip_qual_left'] = data['clip_qual_left'], data['clip_adapter_left']
+    if not data['clip_adapter_right']:
+        data['clip_adapter_right'], data['clip_qual_right'] = data['clip_qual_right'], data['clip_adapter_right']
+
+    # see whether we have to override the minimum left clips
+    if config['min_leftclip'] > 0:
+        if data['clip_adapter_left'] > 0 and data['clip_adapter_left'] < config['min_leftclip']:
+            data['clip_adapter_left'] = config['min_leftclip']
+        if data['clip_qual_left'] > 0 and data['clip_qual_left'] < config['min_leftclip']:
+            data['clip_qual_left'] = config['min_leftclip']
+
+    # for handling the -c (clip) option gently, we already clip here
+    #  and set all clip points to the sequence end points
+    if config['clip']:
+        data['bases'], data['quality_scores'] = clip_read(data)
+
+        data['number_of_bases'] = len(data['bases'])
+        data['clip_qual_right'] = data['number_of_bases']
+        data['clip_adapter_right'] = data['number_of_bases']
+        data['clip_qual_left'] = 0
+        data['clip_adapter_left'] = 0
+
+    return data['read_header_length'] + bytes_read, data
+
+
+def sequences(fileh, header):
+    '''It returns a generator with the data for each read.'''
+    # now we can read all the sequences
+    fposition = header['header_length']  # position in the file
+    reads_read = 0
+    while True:
+        if fposition == header['index_offset']:
+            # we have to skip the index section
+            fposition += header['index_length']
+            continue
+        else:
+            bytes_read, seq_data = read_sequence(header=header, fileh=fileh,
+                                                 fposition=fposition)
+            yield seq_data
+            fposition += bytes_read
+            reads_read += 1
+            if reads_read >= header['number_of_reads']:
+                break
+
+
+def remove_last_xmltag_in_file(fname, tag=None):
+    '''Given an xml file name and a tag, it removes the last tag of the
+    file if it matches the given tag. Tag removal is performed via file
+    truncation.
+
+    It the given tag is not the last in the file, a RunTimeError will be
+    raised.
+
+    The resulting xml file will be not xml valid. This function is a hack
+    that allows to append records to xml files in a quick and dirty way.
+    '''
+
+    fh = open(fname, 'r+')
+    # we have to read from the end to the start of the file and keep the
+    # string enclosed by </ >
+    i = -1
+    last_tag = []  # the chars that form the last tag
+    while True:
+        fh.seek(i, 2)
+        char = fh.read(1)
+        if not char.isspace():
+            last_tag.append(char)
+        if char == '<':
+            break
+        i -= 1
+
+    # we have read the last tag backwards
+    last_tag = ''.join(last_tag[::-1])
+    # we remove the </ and >
+    last_tag = last_tag.rstrip('>').lstrip('</')
+
+    # we check that we're removing the asked tag
+    if tag is not None and tag != last_tag:
+        etxt = 'The given xml tag (%s) was not the last one in the file' % tag
+        raise RuntimeError(etxt)
+
+    # while we are at it: also remove all white spaces in that line :-)
+    i -= 1
+    while True:
+        fh.seek(i, 2)
+        char = fh.read(1)
+        if not char == ' ' and not char == '\t':
+            break
+        if fh.tell() == 1:
+            break
+        i -= 1
+
+    fh.truncate()
+
+    fh.close()
+    return last_tag
+
+
+def create_basic_xml_info(readname, fname):
+    '''Formats a number of read specific infos into XML format.
+    Currently formated: name and the tags set from command line
+    '''
+    to_print = ['    <trace>\n']
+    to_print.append('        <trace_name>')
+    to_print.append(readname)
+    to_print.append('</trace_name>\n')
+
+    # extra information
+    # do we have extra info for this file?
+    info = None
+    if config['xml_info']:
+        # with this name?
+        if fname in config['xml_info']:
+            info = config['xml_info'][fname]
+        else:
+            # with no name?
+            try:
+                info = config['xml_info'][fake_sff_name]
+            except KeyError:
+                pass
+    # we print the info that we have
+    if info:
+        for key in info:
+            to_print.append('        <' + key + '>' + info[key] +
+                            '</' + key + '>\n')
+
+    return ''.join(to_print)
+
+
+def create_clip_xml_info(readlen, adapl, adapr, quall, qualr):
+    '''Takes the clip values of the read and formats them into XML
+    Corrects "wrong" values that might have resulted through
+    simplified calculations earlier in the process of conversion
+    (especially during splitting of paired-end reads)
+    '''
+
+    to_print = [""]
+
+    # if right borders are >= to read length, they don't need
+    # to be printed
+    if adapr >= readlen:
+        adapr = 0
+    if qualr >= readlen:
+        qualr = 0
+
+    # BaCh
+    # when called via split_paired_end(), some values may be < 0
+    #  (when clip values were 0 previously)
+    # instead of putting tons of if clauses for different calculations there,
+    #  I centralise corrective measure here
+    # set all values <0 to 0
+
+    if adapr < 0:
+        adapr = 0
+    if qualr < 0:
+        qualr = 0
+    if adapl < 0:
+        adapl = 0
+    if quall < 0:
+        quall = 0
+
+    if quall:
+        to_print.append('        <clip_quality_left>')
+        to_print.append(str(quall))
+        to_print.append('</clip_quality_left>\n')
+    if qualr:
+        to_print.append('        <clip_quality_right>')
+        to_print.append(str(qualr))
+        to_print.append('</clip_quality_right>\n')
+    if adapl:
+        to_print.append('        <clip_vector_left>')
+        to_print.append(str(adapl))
+        to_print.append('</clip_vector_left>\n')
+    if adapr:
+        to_print.append('        <clip_vector_right>')
+        to_print.append(str(adapr))
+        to_print.append('</clip_vector_right>\n')
+    return ''.join(to_print)
+
+
+def create_xml_for_unpaired_read(data, fname):
+    '''Given the data for one read it returns an str with the xml ancillary
+    data.'''
+    to_print = [create_basic_xml_info(data['name'], fname)]
+    # clippings in the XML only if we do not hard clip
+    if not config['clip']:
+        to_print.append(create_clip_xml_info(data['number_of_bases'], data['clip_adapter_left'], data['clip_adapter_right'], data['clip_qual_left'], data['clip_qual_right']))
+    to_print.append('    </trace>\n')
+    return ''.join(to_print)
+
+
+def format_as_fasta(name, seq, qual):
+    name_line = ''.join(('>', name, '\n'))
+    seqstring = ''.join((name_line, seq, '\n'))
+    qual_line = ' '.join([str(q) for q in qual])
+    qualstring = ''.join((name_line, qual_line, '\n'))
+    return seqstring, qualstring
+
+
+def format_as_fastq(name, seq, qual):
+    qual_line = ''.join([chr(q + 33) for q in qual])
+    seqstring = ''.join(('@', name, '\n', seq, '\n+\n', qual_line, '\n'))
+    return seqstring
+
+
+def get_read_data(data):
+    '''Given the data for one read it returns 2 strs with the fasta seq
+    and fasta qual.'''
+    # seq and qual
+    if config['mix_case']:
+        seq = sequence_case(data)
+        qual = data['quality_scores']
+    else:
+        seq = data['bases']
+        qual = data['quality_scores']
+
+    return seq, qual
+
+
+def extract_read_info(data, fname):
+    '''Given the data for one read it returns 3 strs with the fasta seq, fasta
+    qual and xml ancillary data.'''
+    seq, qual = get_read_data(data)
+    seqstring, qualstring = format_as_fasta(data['name'], seq, qual)
+    xmlstring = create_xml_for_unpaired_read(data, fname)
+    return seqstring, qualstring, xmlstring
+
+
+def write_sequence(name, seq, qual, seq_fh, qual_fh):
+    '''Write sequence and quality FASTA and FASTA qual filehandles
+    (or into FASTQ and XML)
+    if sequence length is 0, don't write'''
+    if len(seq) == 0:
+        return
+
+    if qual_fh is None:
+        seq_fh.write(format_as_fastq(name, seq, qual))
+    else:
+        seqstring, qualstring = format_as_fasta(name, seq, qual)
+        seq_fh.write(seqstring)
+        qual_fh.write(qualstring)
+    return
+
+
+def write_unpaired_read(data, sff_fh, seq_fh, qual_fh, xml_fh):
+    '''Writes an unpaired read into FASTA, FASTA qual and XML filehandles
+    (or into FASTQ and XML)
+    if sequence length is 0, don't write'''
+    seq, qual = get_read_data(data)
+    if len(seq) == 0:
+        return
+
+    write_sequence(data['name'], seq, qual, seq_fh, qual_fh)
+
+    anci = create_xml_for_unpaired_read(data, sff_fh.name)
+    if anci is not None:
+        xml_fh.write(anci)
+    return
+
+
+def reverse_complement(seq):
+    '''Returns the reverse complement of a DNA sequence as string'''
+    compdict = {
+        'a': 't',
+        'c': 'g',
+        'g': 'c',
+        't': 'a',
+        'u': 't',
+        'm': 'k',
+        'r': 'y',
+        'w': 'w',
+        's': 's',
+        'y': 'r',
+        'k': 'm',
+        'v': 'b',
+        'h': 'd',
+        'd': 'h',
+        'b': 'v',
+        'x': 'x',
+        'n': 'n',
+        'A': 'T',
+        'C': 'G',
+        'G': 'C',
+        'T': 'A',
+        'U': 'T',
+        'M': 'K',
+        'R': 'Y',
+        'W': 'W',
+        'S': 'S',
+        'Y': 'R',
+        'K': 'M',
+        'V': 'B',
+        'H': 'D',
+        'D': 'H',
+        'B': 'V',
+        'X': 'X',
+        'N': 'N',
+        '*': '*'}
+
+    complseq = ''.join([compdict[base] for base in seq])
+    # python hack to reverse a list/string/etc
+    complseq = complseq[::-1]
+    return complseq
+
+
+def mask_sequence(seq, maskchar, fpos, tpos):
+    '''Given a sequence, mask it with maskchar starting at fpos (including) and
+    ending at tpos (excluding)
+    '''
+    if len(maskchar) > 1:
+        raise RuntimeError("Internal error: more than one character given to mask_sequence")
+    if fpos < 0:
+        fpos = 0
+    if tpos > len(seq):
+        tpos = len(seq)
+
+    newseq = ''.join((seq[:fpos], maskchar * (tpos - fpos), seq[tpos:]))
+
+    return newseq
+
+
+def fragment_sequences(sequence, qualities, splitchar):
+    '''Works like split() on strings, except it does this on a sequence
+    and the corresponding list with quality values.
+    Returns a tuple for each fragment, each sublist has the fragment
+    sequence as first and the fragment qualities as second elemnt'''
+    # this is slow (due to zip and list appends... use an iterator over
+    #  the sequence find find variations and splices on seq and qual
+
+    if len(sequence) != len(qualities):
+        print(sequence, qualities)
+        raise RuntimeError("Internal error: length of sequence and qualities don't match???")
+
+    retlist = ([])
+    if len(sequence) == 0:
+        return retlist
+
+    actseq = ([])
+    actqual = ([])
+    if sequence[0] != splitchar:
+        inseq = True
+    else:
+        inseq = False
+    for char, qual in zip(sequence, qualities):
+        if inseq:
+            if char != splitchar:
+                actseq.append(char)
+                actqual.append(qual)
+            else:
+                retlist.append((''.join(actseq), actqual))
+                actseq = ([])
+                actqual = ([])
+                inseq = False
+        else:
+            if char != splitchar:
+                inseq = True
+                actseq.append(char)
+                actqual.append(qual)
+
+    if inseq and len(actseq):
+        retlist.append((''.join(actseq), actqual))
+
+    return retlist
+
+
+def calc_subseq_boundaries(maskedseq, maskchar):
+    '''E.g.:
+       ........xxxxxxxx..........xxxxxxxxxxxxxxxxxxxxx.........
+       to
+         (0,8),(8,16),(16,26),(26,47),(47,56)
+    '''
+    blist = ([])
+    if len(maskedseq) == 0:
+        return blist
+
+    inmask = True
+    if maskedseq[0] != maskchar:
+        inmask = False
+
+    start = 0
+    for spos in range(len(maskedseq)):
+        if inmask and maskedseq[spos] != maskchar:
+            blist.append(([start, spos]))
+            start = spos
+            inmask = False
+        elif not inmask and maskedseq[spos] == maskchar:
+            blist.append(([start, spos]))
+            start = spos
+            inmask = True
+
+    blist.append(([start, spos + 1]))
+
+    return blist
+
+
+def correct_for_smallhits(maskedseq, maskchar, linkername):
+    '''If partial hits were found, take preventive measure: grow
+        the masked areas by 20 bases in each direction
+       Returns either unchanged "maskedseq" or a new sequence
+        with some more characters masked.
+    '''
+    global linkerlengths
+
+    if len(maskedseq) == 0:
+        return maskedseq
+
+    growl = 40
+    growl2 = growl / 2
+
+    boundaries = calc_subseq_boundaries(maskedseq, maskchar)
+
+    foundpartial = False
+    for bounds in boundaries:
+        left, right = bounds
+        if left != 0 and right != len(maskedseq):
+            if maskedseq[left] == maskchar:
+                # allow 10% discrepancy
+                #    -linkerlengths[linkername]/10
+                # that's a kind of safety net if there are slight sequencing
+                #  errors in the linker itself
+                if right - left < linkerlengths[linkername] - linkerlengths[linkername] / 10:
+                    foundpartial = True
+
+    if not foundpartial:
+        return maskedseq
+
+    # grow
+    newseq = ""
+    for bounds in boundaries:
+        left, right = bounds
+        if maskedseq[left] == maskchar:
+            newseq += maskedseq[left:right]
+        else:
+            clearstart = 0
+            if left > 0:
+                clearstart = left + growl2
+            clearstop = len(maskedseq)
+            if right < len(maskedseq):
+                clearstop = right - growl2
+
+            if clearstop <= clearstart:
+                newseq += maskchar * (right - left)
+            else:
+                if clearstart != left:
+                    newseq += maskchar * growl2
+                newseq += maskedseq[clearstart:clearstop]
+                if clearstop != right:
+                    newseq += maskchar * growl2
+
+    return newseq
+
+
+def split_paired_end(data, sff_fh, seq_fh, qual_fh, xml_fh):
+    '''Splits a paired end read and writes sequences into FASTA, FASTA qual
+    and XML traceinfo file. Returns the number of sequences created.
+
+    As the linker sequence may be anywhere in the read, including the ends
+    and overlapping with bad quality sequence, we need to perform some
+    computing and eventually set new clip points.
+
+    If the resulting split yields only one sequence (because linker
+    was not present or overlapping with left or right clip), only one
+    sequence will be written with ".fn" appended to the name.
+
+    If the read can be split, two reads will be written. The side left of
+    the linker will be named ".r" and will be written in reverse complement
+    into the file to conform with what approximately all assemblers expect
+    when reading paired-end data: reads in forward direction in file. The side
+    right of the linker will be named ".f"
+
+    If SSAHA found partial linker (linker sequences < length of linker),
+    the sequences will get a "_pl" furthermore be cut back thoroughly.
+
+    If SSAHA found multiple occurences of the linker, the names will get an
+    additional "_mlc" within the name to show that there was "multiple
+    linker contamination".
+
+    For multiple or partial linker, the "good" parts of the reads are
+    stored with a ".part<number>" name, additionally they will not get
+    template information in the XML
+    '''
+    global ssahapematches
+
+    maskchar = "#"
+
+    numseqs = 0
+    readname = data['name']
+    readlen = data['number_of_bases']
+
+    leftclip, rightclip = return_merged_clips(data)
+    seq, qual = get_read_data(data)
+
+    maskedseq = seq
+    if leftclip > 0:
+        maskedseq = mask_sequence(maskedseq, maskchar, 0, leftclip - 1)
+    if rightclip < len(maskedseq):
+        maskedseq = mask_sequence(maskedseq, maskchar, rightclip, len(maskedseq))
+
+    leftclip, rightclip = return_merged_clips(data)
+    readlen = data['number_of_bases']
+
+    for match in ssahapematches[data['name']]:
+        int(match[0])
+        linkername = match[2]
+        leftreadhit = int(match[3])
+        rightreadhit = int(match[4])
+
+        maskedseq = mask_sequence(maskedseq, maskchar, leftreadhit - 1, rightreadhit)
+
+    correctedseq = correct_for_smallhits(maskedseq, maskchar, linkername)
+
+    if len(maskedseq) != len(correctedseq):
+        raise RuntimeError("Internal error: maskedseq != correctedseq")
+
+    partialhits = False
+    if correctedseq != maskedseq:
+        partialhits = True
+        readname += "_pl"
+        maskedseq = correctedseq
+
+    fragments = fragment_sequences(maskedseq, qual, maskchar)
+
+    mlcflag = False
+
+    if len(fragments) > 2:
+        mlcflag = True
+        readname += "_mlc"
+
+    # print fragments
+    if mlcflag or partialhits:
+        fragcounter = 1
+        readname += ".part"
+        for frag in fragments:
+            actseq = frag[0]
+            if len(actseq) >= 20:
+                actqual = frag[1]
+                oname = readname + str(fragcounter)
+                write_sequence(oname, actseq, actqual, seq_fh, qual_fh)
+                to_print = [create_basic_xml_info(oname, sff_fh.name)]
+                # No clipping in XML ... the multiple and partial fragments
+                #  are clipped "hard"
+                # No template ID and trace_end: we don't know the
+                #  orientation of the frahments. Even if it were
+                #  only two, the fact we had multiple linkers
+                #  says something went wrong, so simply do not
+                #  write any paired-end information for all these fragments
+                to_print.append('    </trace>\n')
+                xml_fh.write(''.join(to_print))
+                numseqs += 1
+                fragcounter += 1
+    else:
+        if len(fragments) > 2:
+            raise RuntimeError("Unexpected: more than two fragments detected in " + readname + ". please contact the authors.")
+        # nothing will happen for 0 fragments
+        if len(fragments) == 1:
+            boundaries = calc_subseq_boundaries(maskedseq, maskchar)
+            if len(boundaries) < 1 or len(boundaries) > 3:
+                raise RuntimeError("Unexpected case: ", str(len(boundaries)), "boundaries for 1 fragment of ", readname)
+            if len(boundaries) == 3:
+                # case: mask char on both sides of sequence
+                data['clip_adapter_left'] = boundaries[0][1]
+                data['clip_adapter_right'] = boundaries[2][0]
+            elif len(boundaries) == 2:
+                # case: mask char left or right of sequence
+                if maskedseq[0] == maskchar:
+                    # case: mask char left
+                    data['clip_adapter_left'] = boundaries[0][1]
+                else:
+                    # case: mask char right
+                    data['clip_adapter_right'] = boundaries[1][0]
+            data['name'] = data['name'] + ".fn"
+            write_unpaired_read(data, sff_fh, seq_fh, qual_fh, xml_fh)
+            numseqs = 1
+        elif len(fragments) == 2:
+            oname = readname + ".r"
+            seq, qual = get_read_data(data)
+
+            startsearch = False
+            for spos in range(len(maskedseq)):
+                if maskedseq[spos] != maskchar:
+                    startsearch = True
+                else:
+                    if startsearch:
+                        break
+
+            lseq = seq[:spos]
+            actseq = reverse_complement(lseq)
+            lreadlen = len(actseq)
+            lqual = qual[:spos]
+            # python hack to reverse a list/string/etc
+            lqual = lqual[::-1]
+
+            write_sequence(oname, actseq, lqual, seq_fh, qual_fh)
+
+            to_print = [create_basic_xml_info(oname, sff_fh.name)]
+            to_print.append(create_clip_xml_info(lreadlen, 0, lreadlen + 1 - data['clip_adapter_left'], 0, lreadlen + 1 - data['clip_qual_left']))
+            to_print.append('        <template_id>')
+            to_print.append(readname)
+            to_print.append('</template_id>\n')
+            to_print.append('        <trace_end>r</trace_end>\n')
+            to_print.append('    </trace>\n')
+            xml_fh.write(''.join(to_print))
+
+            oname = readname + ".f"
+            startsearch = False
+            for spos in range(len(maskedseq) - 1, -1, -1):
+                if maskedseq[spos] != maskchar:
+                    startsearch = True
+                else:
+                    if startsearch:
+                        break
+
+            actseq = seq[spos + 1:]
+            actqual = qual[spos + 1:]
+
+            write_sequence(oname, actseq, actqual, seq_fh, qual_fh)
+
+            rreadlen = len(actseq)
+            to_print = [create_basic_xml_info(oname, sff_fh.name)]
+            to_print.append(create_clip_xml_info(rreadlen, 0, rreadlen - (readlen - data['clip_adapter_right']), 0, rreadlen - (readlen - data['clip_qual_right'])))
+            to_print.append('        <template_id>')
+            to_print.append(readname)
+            to_print.append('</template_id>\n')
+            to_print.append('        <trace_end>f</trace_end>\n')
+            to_print.append('    </trace>\n')
+            xml_fh.write(''.join(to_print))
+            numseqs = 2
+
+    return numseqs
+
+
+def extract_reads_from_sff(config, sff_files):
+    '''Given the configuration and the list of sff_files it writes the seqs,
+    qualities and ancillary data into the output file(s).
+
+    If file for paired-end linker was given, first extracts all sequences
+    of an SFF and searches these against the linker(s) with SSAHA2 to
+    create needed information to split reads.
+    '''
+    global ssahapematches
+
+    if len(sff_files) == 0:
+        raise RuntimeError("No SFF file given?")
+
+    # we go through all input files
+    for sff_file in sff_files:
+        if not os.path.getsize(sff_file):
+            raise RuntimeError('Empty file? : ' + sff_file)
+        fh = open(sff_file, 'r')
+        fh.close()
+
+    openmode = 'w'
+    if config['append']:
+        openmode = 'a'
+
+    seq_fh = open(config['seq_fname'], openmode)
+    xml_fh = open(config['xml_fname'], openmode)
+    if config['want_fastq']:
+        qual_fh = None
+        try:
+            os.remove(config['qual_fname'])
+        except:
+            pass
+    else:
+        qual_fh = open(config['qual_fname'], openmode)
+
+    if not config['append']:
+        xml_fh.write('<?xml version="1.0"?>\n<trace_volume>\n')
+    else:
+        remove_last_xmltag_in_file(config['xml_fname'], "trace_volume")
+
+    # we go through all input files
+    for sff_file in sff_files:
+        ssahapematches.clear()
+
+        seqcheckstore = ([])
+
+        debug = 0
+
+        if not debug and config['pelinker_fname']:
+            sys.stdout.flush()
+
+            if 0:
+                # for debugging
+                pid = os.getpid()
+                tmpfasta_fname = 'sffe.tmp.' + str(pid) + '.fasta'
+                tmpfasta_fh = open(tmpfasta_fname, 'w')
+            else:
+                tmpfasta_fh = tempfile.NamedTemporaryFile(prefix='sffeseqs_',
+                                                          suffix='.fasta')
+
+            sff_fh = open(sff_file, 'rb')
+            header_data = read_header(fileh=sff_fh)
+            for seq_data in sequences(fileh=sff_fh, header=header_data):
+                seq, qual = get_read_data(seq_data)
+                seqstring, qualstring = format_as_fasta(seq_data['name'], seq, qual)
+                tmpfasta_fh.write(seqstring)
+            tmpfasta_fh.seek(0)
+
+            if 0:
+                # for debugging
+                tmpssaha_fname = 'sffe.tmp.' + str(pid) + '.ssaha2'
+                tmpssaha_fh = open(tmpssaha_fname, 'w+')
+            else:
+                tmpssaha_fh = tempfile.NamedTemporaryFile(prefix='sffealig_',
+                                                          suffix='.ssaha2')
+
+            launch_ssaha(config['pelinker_fname'], tmpfasta_fh.name, tmpssaha_fh)
+            tmpfasta_fh.close()
+
+            tmpssaha_fh.seek(0)
+            read_ssaha_data(tmpssaha_fh)
+            tmpssaha_fh.close()
+
+        if debug:
+            tmpssaha_fh = open("sffe.tmp.10634.ssaha2", 'r')
+            read_ssaha_data(tmpssaha_fh)
+
+        sys.stdout.flush()
+        sff_fh = open(sff_file, 'rb')
+        header_data = read_header(fileh=sff_fh)
+
+        # now convert all reads
+        nseqs_sff = 0
+        nseqs_out = 0
+        for seq_data in sequences(fileh=sff_fh, header=header_data):
+            nseqs_sff += 1
+
+            seq, qual = clip_read(seq_data)
+            seqcheckstore.append(seq[0:50])
+
+            if seq_data['name'] in ssahapematches:
+                nseqs_out += split_paired_end(seq_data, sff_fh, seq_fh, qual_fh, xml_fh)
+            else:
+                if config['pelinker_fname']:
+                    seq_data['name'] = seq_data['name'] + ".fn"
+                write_unpaired_read(seq_data, sff_fh, seq_fh, qual_fh, xml_fh)
+                nseqs_out += 1
+        sff_fh.close()
+
+        check_for_dubious_startseq(seqcheckstore, sff_file, seq_data)
+        seqcheckstore = ([])
+
+    xml_fh.write('</trace_volume>\n')
+
+    xml_fh.close()
+    seq_fh.close()
+    if qual_fh is not None:
+        qual_fh.close()
+
+    return
+
+
+def check_for_dubious_startseq(seqcheckstore, sffname, seqdata):
+    global stern_warning
+
+    foundproblem = ""
+    for checklen in range(1, len(seqcheckstore[0])):
+        foundinloop = False
+        seqdict = {}
+        for seq in seqcheckstore:
+            shortseq = seq[0:checklen]
+            if shortseq in seqdict:
+                seqdict[shortseq] += 1
+            else:
+                seqdict[shortseq] = 1
+
+        for shortseq, count in seqdict.items():
+            if float(count) / len(seqcheckstore) >= 0.5:
+                foundinloop = True
+                stern_warning
+                foundproblem = "\n" + "*" * 80
+                foundproblem += "\nWARNING: "
+                foundproblem += "weird sequences in file " + sffname + "\n\n"
+                foundproblem += "After applying left clips, " + str(count) + " sequences (="
+                foundproblem += '%.0f' % (100.0 * float(count) / len(seqcheckstore))
+                foundproblem += "%) start with these bases:\n" + shortseq
+                foundproblem += "\n\nThis does not look sane.\n\n"
+                foundproblem += "Countermeasures you *probably* must take:\n"
+                foundproblem += "1) Make your sequence provider aware of that problem and ask whether this can be\n    corrected in the SFF.\n"
+                foundproblem += "2) If you decide that this is not normal and your sequence provider does not\n    react, use the --min_left_clip of sff_extract.\n"
+                left, right = return_merged_clips(seqdata)
+                foundproblem += "    (Probably '--min_left_clip=" + str(left + len(shortseq)) + "' but you should cross-check that)\n"
+                foundproblem += "*" * 80 + "\n"
+        if not foundinloop:
+            break
+    if len(foundproblem):
+        print(foundproblem)
+
+
+def parse_extra_info(info):
+    '''It parses the information that will go in the xml file.
+
+    There are two formats accepted for the extra information:
+    key1:value1, key2:value2
+    or:
+    file1.sff{key1:value1, key2:value2};file2.sff{key3:value3}
+    '''
+    if not info:
+        return info
+    finfos = info.split(';')  # information for each file
+    data_for_files = {}
+    for finfo in finfos:
+        # we split the file name from the rest
+        items = finfo.split('{')
+        if len(items) == 1:
+            fname = fake_sff_name
+            info = items[0]
+        else:
+            fname = items[0]
+            info = items[1]
+        # now we get each key,value pair in the info
+        info = info.replace('}', '')
+        data = {}
+        for item in info.split(','):
+            key, value = item.strip().split(':')
+            key = key.strip()
+            value = value.strip()
+            data[key] = value
+        data_for_files[fname] = data
+    return data_for_files
+
+
+def return_merged_clips(data):
+    '''It returns the left and right positions to clip.'''
+    def max(a, b):
+        '''It returns the max of the two given numbers.
+
+        It won't take into account the zero values.
+        '''
+        if not a and not b:
+            return None
+        if not a:
+            return b
+        if not b:
+            return a
+        if a >= b:
+            return a
+        else:
+            return b
+
+    def min(a, b):
+        '''It returns the min of the two given numbers.
+
+        It won't take into account the zero values.
+        '''
+        if not a and not b:
+            return None
+        if not a:
+            return b
+        if not b:
+            return a
+        if a <= b:
+            return a
+        else:
+            return b
+    left = max(data['clip_adapter_left'], data['clip_qual_left'])
+    right = min(data['clip_adapter_right'], data['clip_qual_right'])
+    # maybe both clips where zero
+    if left is None:
+        left = 1
+    if right is None:
+        right = data['number_of_bases']
+    return left, right
+
+
+def sequence_case(data):
+    '''Given the data for one read it returns the seq with mixed case.
+
+    The regions to be clipped will be lower case and the rest upper case.
+    '''
+    left, right = return_merged_clips(data)
+    seq = data['bases']
+    if left >= right:
+        new_seq = seq.lower()
+    else:
+        new_seq = ''.join((seq[:left - 1].lower(), seq[left - 1:right], seq[right:].lower()))
+
+    return new_seq
+
+
+def clip_read(data):
+    '''Given the data for one read it returns clipped seq and qual.'''
+    qual = data['quality_scores']
+    left, right = return_merged_clips(data)
+    seq = data['bases']
+    qual = data['quality_scores']
+    new_seq = seq[left - 1:right]
+    new_qual = qual[left - 1:right]
+
+    return new_seq, new_qual
+
+
+def tests_for_ssaha():
+    '''Tests whether SSAHA2 can be successfully called.'''
+    try:
+        print("Testing whether SSAHA2 is installed and can be launched ... ", end=' ')
+        sys.stdout.flush()
+        fh = open('/dev/null', 'w')
+        subprocess.call(["ssaha2"], stdout=fh)
+        fh.close()
+        print("ok.")
+    except:
+        print("nope? Uh oh ...\n\n")
+        raise RuntimeError('Could not launch ssaha2. Have you installed it? Is it in your path?')
+
+
+def load_linker_sequences(linker_fname):
+    '''Loads all linker sequences into memory, storing only the length
+    of each linker.'''
+    global linkerlengths
+
+    if not os.path.getsize(linker_fname):
+        raise RuntimeError("File empty? '" + linker_fname + "'")
+    fh = open(linker_fname, 'r')
+    linkerseqs = read_fasta(fh)
+    if len(linkerseqs) == 0:
+        raise RuntimeError(linker_fname + ": no sequence found?")
+    for i in linkerseqs:
+        if i.name in linkerlengths:
+            raise RuntimeError(linker_fname + ": sequence '" + i.name + "' present multiple times. Aborting.")
+        linkerlengths[i.name] = len(i.sequence)
+    fh.close()
+
+
+def launch_ssaha(linker_fname, query_fname, output_fh):
+    '''Launches SSAHA2 on the linker and query file, string SSAHA2 output
+    into the output filehandle'''
+    tests_for_ssaha()
+
+    try:
+        print("Searching linker sequences with SSAHA2 (this may take a while) ... ", end=' ')
+        sys.stdout.flush()
+        retcode = subprocess.call(["ssaha2", "-output", "ssaha2", "-solexa", "-kmer", "4", "-skip", "1", linker_fname, query_fname], stdout=output_fh)
+        if retcode:
+            raise RuntimeError('Ups.')
+        else:
+            print("ok.")
+    except:
+        print("\n")
+        raise RuntimeError('An error occured during the SSAHA2 execution, aborting.')
+
+
+def read_ssaha_data(ssahadata_fh):
+    '''Given file handle, reads file generated with SSAHA2 (with default
+    output format) and stores all matches as list ssahapematches
+    (ssaha paired-end matches) dictionary'''
+    global ssahapematches
+
+    print("Parsing SSAHA2 result file ... ", end=' ')
+    sys.stdout.flush()
+
+    for line in ssahadata_fh:
+        if line.startswith('ALIGNMENT'):
+            ml = line.split()
+            if len(ml) != 12:
+                print("\n", line, end=' ')
+                raise RuntimeError('Expected 12 elements in the SSAHA2 line with ALIGMENT keyword, but found ' + str(len(ml)))
+            if ml[2] not in ssahapematches:
+                ssahapematches[ml[2]] = ([])
+            if ml[8] == 'F':
+                # store everything except the first element (output
+                #  format name (ALIGNMENT)) and the last element
+                #  (length)
+                ssahapematches[ml[2]].append(ml[1:-1])
+            else:
+                ml[4], ml[5] = ml[5], ml[4]
+                ssahapematches[ml[2]].append(ml[1:-1])
+
+    print("done.")
+
+
+##########################################################################
+#
+# BaCh: This block was shamelessly copied from
+#  http://python.genedrift.org/2007/07/04/reading-fasta-files-conclusion/
+# and then subsequently modified to read fasta correctly
+# It's still not fool proof, but should be good enough
+#
+##########################################################################
+
+class Fasta:
+    def __init__(self, name, sequence):
+        self.name = name
+        self.sequence = sequence
+
+
+def read_fasta(file):
+    items = []
+    aninstance = Fasta('', '')
+    linenum = 0
+    for line in file:
+        linenum += 1
+        if line.startswith(">"):
+            if len(aninstance.sequence):
+                items.append(aninstance)
+                aninstance = Fasta('', '')
+            # name == all characters until the first whitespace
+            #  (split()[0]) but without the starting ">" ([1:])
+            aninstance.name = line.split()[0][1:]
+            aninstance.sequence = ''
+            if len(aninstance.name) == 0:
+                raise RuntimeError(file.name + ': no name in line ' + str(linenum) + '?')
+
+        else:
+            if len(aninstance.name) == 0:
+                raise RuntimeError(file.name + ': no sequence header at line ' + str(linenum) + '?')
+            aninstance.sequence += line.strip()
+
+    if len(aninstance.name) and len(aninstance.sequence):
+        items.append(aninstance)
+
+    return items
+##########################################################################
+
+
+def version_string():
+    return "sff_extract " + __version__
+
+
+def read_config():
+    '''It reads the configuration options from the command line arguments and
+    it returns a dict with them.'''
+    from optparse import OptionParser, OptionGroup
+    usage = "usage: %prog [options] sff1 sff2 ..."
+    desc = "Extract sequences from 454 SFF files into FASTA, FASTA quality"\
+           " and XML traceinfo format. When a paired-end linker sequence"\
+           " is given (-l), use SSAHA2 to scan the sequences for the linker,"\
+           " then split the sequences, removing the linker."
+    parser = OptionParser(usage=usage, version=version_string(), description=desc)
+    parser.add_option('-a', '--append', action="store_true", dest='append',
+            help='append output to existing files', default=False)
+    parser.add_option('-i', '--xml_info', dest='xml_info',
+            help='extra info to write in the xml file')
+    parser.add_option("-l", "--linker_file", dest="pelinker_fname",
+            help="FASTA file with paired-end linker sequences", metavar="FILE")
+
+    group = OptionGroup(parser, "File name options", "")
+    group.add_option('-c', '--clip', action="store_true", dest='clip',
+                     help='clip (completely remove) ends with low qual and/or adaptor sequence', default=False)
+    group.add_option('-u', '--upper_case', action="store_false", dest='mix_case',
+                     help='all bases in upper case, including clipped ends', default=True)
+    group.add_option('', '--min_left_clip', dest='min_leftclip',
+                     metavar="INTEGER", type="int",
+                     help='if the left clip coming from the SFF is smaller than this value, override it', default=0)
+    group.add_option('-Q', '--fastq', action="store_true", dest='want_fastq',
+                     help='store as FASTQ file instead of FASTA + FASTA quality file', default=False)
+    parser.add_option_group(group)
+
+    group = OptionGroup(parser, "File name options", "")
+    group.add_option("-o", "--out_basename", dest="basename",
+                     help="base name for all output files")
+    group.add_option("-s", "--seq_file", dest="seq_fname",
+                     help="output sequence file name", metavar="FILE")
+    group.add_option("-q", "--qual_file", dest="qual_fname",
+                     help="output quality file name", metavar="FILE")
+    group.add_option("-x", "--xml_file", dest="xml_fname",
+                     help="output ancillary xml file name", metavar="FILE")
+    parser.add_option_group(group)
+
+    # default fnames
+    # is there an sff file?
+    basename = 'reads'
+    if sys.argv[-1][-4:].lower() == '.sff':
+        basename = sys.argv[-1][:-4]
+    def_seq_fname = basename + '.fasta'
+    def_qual_fname = basename + '.fasta.qual'
+    def_xml_fname = basename + '.xml'
+    def_pelinker_fname = ''
+    parser.set_defaults(seq_fname=def_seq_fname)
+    parser.set_defaults(qual_fname=def_qual_fname)
+    parser.set_defaults(xml_fname=def_xml_fname)
+    parser.set_defaults(pelinker_fname=def_pelinker_fname)
+
+    # we parse the cmd line
+    (options, args) = parser.parse_args()
+
+    # we put the result in a dict
+    global config
+    config = {}
+    for property in dir(options):
+        if property[0] == '_' or property in ('ensure_value', 'read_file', 'read_module'):
+            continue
+        config[property] = getattr(options, property)
+
+    if config['basename'] is None:
+        config['basename'] = basename
+
+    # if we have not set a file name with -s, -q or -x we set the basename
+    # based file name
+    if config['want_fastq']:
+        config['qual_fname'] = ''
+        if config['seq_fname'] == def_seq_fname:
+            config['seq_fname'] = config['basename'] + '.fastq'
+    else:
+        if config['seq_fname'] == def_seq_fname:
+            config['seq_fname'] = config['basename'] + '.fasta'
+        if config['qual_fname'] == def_qual_fname:
+            config['qual_fname'] = config['basename'] + '.fasta.qual'
+
+    if config['xml_fname'] == def_xml_fname:
+        config['xml_fname'] = config['basename'] + '.xml'
+
+    # we parse the extra info for the xml file
+    config['xml_info'] = parse_extra_info(config['xml_info'])
+    return config, args
+
+
+def testsome():
+    sys.exit()
+    return
+
+
+def main():
+    argv = sys.argv
+    if len(argv) == 1:
+        sys.argv.append('-h')
+        read_config()
+        sys.exit()
+    try:
+        config, args = read_config()
+
+        if config['pelinker_fname']:
+            load_linker_sequences(config['pelinker_fname'])
+        if len(args) == 0:
+            raise RuntimeError("No SFF file given?")
+        extract_reads_from_sff(config, args)
+    except (OSError, IOError, RuntimeError) as errval:
+        print(errval)
+        return 1
+
+    if stern_warning:
+        return 1
+
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/tools/filters/sff_extractor.xml b/tools/filters/sff_extractor.xml
new file mode 100644
index 0000000..2beb754
--- /dev/null
+++ b/tools/filters/sff_extractor.xml
@@ -0,0 +1,58 @@
+<tool id="Sff_extractor" name="SFF converter" version="1.0.1">
+    <description></description>
+    <command interpreter="python">
+        #if   str($fastq_output) == "fastq_false"  #sff_extract.py $clip --seq_file=$out_file3 --qual_file=$out_file4 --xml_file=$out_file2 $input
+        #elif str($fastq_output) == "fastq_true"   #sff_extract.py $clip --fastq --seq_file=$out_file1 --xml_file=$out_file2 $input
+        #end if#
+    </command>
+    <inputs>
+        <param format="sff" name="input" type="data" label="Extract from this dataset"/>
+        <param name="clip" type="select" label="Completely remove ends with low qual and/or adaptor sequence">
+            <option value="">No</option>
+            <option value="--clip">Yes</option>
+        </param>
+        <param name="fastq_output" type="boolean" truevalue="fastq_true" falsevalue="fastq_false" checked="False" label="Do you want FASTQ file instead of FASTA + FASTA quality file?" />
+    </inputs>
+    <outputs>
+        <data format="fastqsanger" name="out_file1" >
+            <filter>fastq_output is True</filter>
+        </data>
+        <data format="xml" name="out_file2">
+        </data>  
+        <data format="fasta" name="out_file3">
+            <filter>fastq_output is False</filter>
+        </data>
+        <data format="qual" name="out_file4">
+            <filter>fastq_output is False</filter>
+        </data>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="2.sff"/>
+            <param name="clip" value=""/>
+            <param name="fastq_output" value="false"/>
+            <output name="out_file2" file="sff_converter_xml_1.dat"/>
+            <output name="out_file3" file="sff_converter_fasta.dat"/>
+            <output name="out_file4" file="sff_converter_qual.dat"/>
+        </test>
+        <test>
+            <param name="input" value="2.sff"/>
+            <param name="clip" value=""/>
+            <param name="fastq_output" value="true"/>
+            <output name="out_file1" file="sff_converter_fastq.dat"/>
+            <output name="out_file2" file="sff_converter_xml_2.dat"/>
+        </test>
+    </tests>
+    <help>
+
+**What it does**
+
+This tool extracts data from the 454 Sequencer SFF format and creates three files containing the: 
+Sequences (FASTA),
+Qualities (QUAL) and 
+Clippings (XML)
+
+    </help>
+</tool>
+
+
diff --git a/tools/filters/sorter.py b/tools/filters/sorter.py
new file mode 100644
index 0000000..dda21f1
--- /dev/null
+++ b/tools/filters/sorter.py
@@ -0,0 +1,58 @@
+"""
+    Sorts tabular data on one or more columns. All comments of the file are collected
+    and placed at the beginning of the sorted output file.
+
+    usage: sorter.py [options]
+    -i, --input: Tabular file to be sorted
+    -o, --output: Sorted output file
+    -k, --key: Key (see manual for bash/sort)
+
+    usage: sorter.py input output [key ...]
+"""
+# 03/05/2013 guerler
+
+import os
+import sys
+from optparse import OptionParser
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def main():
+    # define options
+    parser = OptionParser()
+    parser.add_option("-i", "--input")
+    parser.add_option("-o", "--output")
+    parser.add_option("-k", "--key", action="append")
+
+    # parse
+    options, args = parser.parse_args()
+
+    try:
+        # retrieve options
+        input = options.input
+        output = options.output
+        key = [" -k" + k for k in options.key]
+
+        # grep comments
+        grep_comments = "(grep '^#' %s) > %s" % (input, output)
+
+        # grep and sort columns
+        sort_columns = "(grep '^[^#]' %s | sort -f -t '\t' %s) >> %s" % (input, ' '.join(key), output)
+
+        # execute
+        os.system(grep_comments)
+        os.system(sort_columns)
+
+    except Exception as ex:
+        stop_err('Error running sorter.py\n' + str(ex))
+
+    # exit
+    sys.exit(0)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/sorter.xml b/tools/filters/sorter.xml
new file mode 100644
index 0000000..d5cc2f5
--- /dev/null
+++ b/tools/filters/sorter.xml
@@ -0,0 +1,188 @@
+<tool id="sort1" name="Sort" version="1.0.3">
+    <description>data in ascending or descending order</description>
+    <command interpreter="python">
+        sorter.py
+        
+        --input=${input}
+        --output=${out_file1}
+        
+        #if (str($style) == 'num'):
+            #set $style = 'n'
+        #elif (str($style) == 'gennum'):
+            #set $style = 'g'
+        #else:
+            #set $style = ''
+        #end if
+
+        #set $order = '' if (str($order) == 'ASC') else 'r'
+        
+        --key=${column},${column}${style}${order}
+        
+        
+        #for $col in $column_set:
+            #set $other_column = str($col.other_column)
+            
+            #if (str($col.other_style) == 'num'):
+                #set $other_style = 'n'
+            #elif (str($col.other_style) == 'gennum'):
+                #set $other_style = 'g'
+            #else:
+                #set $other_style = ''
+            #end if
+            
+            #set $other_order = '' if (str($col.other_order) == "ASC") else 'r'
+            --key=${other_column},${other_column}${other_style}${other_order}
+        #end for
+    </command>
+    <inputs>
+        <param format="tabular" name="input" type="data" label="Sort Dataset" />
+        <param name="column" label="on column" type="data_column" data_ref="input" accept_default="true"/>
+        <param name="style" type="select" label="with flavor">
+            <option value="num">Numerical sort</option>
+            <option value="gennum">General numeric sort</option>
+            <option value="alpha">Alphabetical sort</option>
+        </param>
+        <param name="order" type="select" label="everything in">
+            <option value="DESC">Descending order</option>
+            <option value="ASC">Ascending order</option>
+        </param>
+        <repeat name="column_set" title="Column selection">
+            <param name="other_column" label="on column" type="data_column" data_ref="input" accept_default="true" />
+            <param name="other_style" type="select" label="with flavor">
+                <option value="num">Numerical sort</option>
+                <option value="gennum">General numeric sort</option>
+                <option value="alpha">Alphabetical sort</option>
+            </param>
+            <param name="other_order" type="select" label="everything in">
+                <option value="DESC">Descending order</option>
+                <option value="ASC">Ascending order</option>
+            </param>
+        </repeat>
+    </inputs>
+    <outputs>
+        <data format="input" name="out_file1" metadata_source="input"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="sort_in1.bed"/>
+            <param name="column" value="1"/>
+            <param name="style" value="alpha"/>
+            <param name="order" value="ASC"/>
+            <param name="other_column" value="3"/>
+            <param name="other_style" value="num"/>
+            <param name="other_order" value="DESC"/>
+            <output name="out_file1" file="sort_out1.bed"/>
+        </test>
+        <test>
+            <param name="input" value="sort_in1.bed"/>
+            <param name="column" value="1"/>
+            <param name="style" value="alpha"/>
+            <param name="order" value="ASC"/>
+            <param name="other_column" value="3"/>
+            <param name="other_style" value="num"/>
+            <param name="other_order" value="ASC"/>
+            <output name="out_file1" file="sort_out2.bed"/>
+        </test>
+        <test>
+            <param name="input" value="sort_in2.bed"/>
+            <param name="column" value="5"/>
+            <param name="style" value="gennum"/>
+            <param name="order" value="ASC"/>
+            <output name="out_file1" file="sort_out3.bed"/>
+        </test>
+    </tests>
+    <help>
+.. class:: infomark
+        
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+        
+-----
+
+**Syntax**
+
+This tool sorts the dataset on any number of columns in either ascending or descending order.
+
+* **Numerical sort** orders numbers by their magnitude, ignores all characters besides numbers, and evaluates a string of numbers to the value they signify.
+* **General numeric sort** orders numbers by their general numerical value. Unlike the numerical sort option, it can handle numbers in scientific notation too.
+* **Alphabetical sort** is a phonebook type sort based on the conventional order of letters in an alphabet. Each nth letter is compared with the nth letter of other words in the list, starting at the first letter of each word and advancing to the second, third, fourth, and so on, until the order is established. Therefore, in an alphabetical sort, 2 comes after 100 (1 < 2).
+        
+-----
+
+**Examples**
+
+The list of numbers 4,17,3,5 collates to 3,4,5,17 by numerical sorting, while it collates to 17,3,4,5 by alphabetical sorting.
+
+Sorting the following::
+
+    Q     d    7   II    jhu  45
+    A     kk   4   I     h    111
+    Pd    p    1   ktY   WS   113
+    A     g    10  H     ZZ   856
+    A     edf  4   tw    b    234
+    BBB   rt   10  H     ZZ   100
+    A     rew  10  d     b    1111
+    C     sd   19  YH    aa   10
+    Hah   c    23  ver   bb   467
+    MN    gtr  1   a     X    32
+    N     j    9   a     T    205
+    BBB   rrf  10  b     Z    134
+    odfr  ws   6   Weg   dew  201
+    C     f    3   WW    SW   34
+    A     jhg  4   I     b    345
+    Pd    gf   7   Gthe  de   567
+    rS    hty  90  YY    LOp  89
+    A     g    10  H     h    43
+    A     g    4   I     h    500
+
+on columns 1 (alphabetical), 3 (numerical), and 6 (numerical) in ascending order will yield::
+
+    A     kk   4   I     h    111
+    A     edf  4   tw    b    234
+    A     jhg  4   I     b    345
+    A     g    4   I     h    500
+    A     g    10  H     h    43
+    A     g    10  H     ZZ   856
+    A     rew  10  d     b    1111
+    BBB   rt   10  H     ZZ   100
+    BBB   rrf  10  b     Z    134
+    C     f    3   WW    SW   34
+    C     sd   19  YH    aa   10
+    Hah   c    23  ver   bb   467
+    MN    gtr  1   a     X    32
+    N     j    9   a     T    205
+    odfr  ws   6   Weg   dew  201
+    Pd    p    1   ktY   WS   113
+    Pd    gf   7   Gthe  de   567
+    Q     d    7   II    jhu  45
+    rS    hty  90  YY    LOp  89
+
+
+Sorting the following::
+
+    chr10  100  200  feature1  100.01   +
+    chr20  800  900  feature2  1.1      +
+    chr2   500  600  feature3  1000.1   +
+    chr1   300  400  feature4  1.1e-05  +
+    chr21  300  500  feature5  1.1e2    +
+    chr15  700  800  feature6  1.1e4    +
+
+on column 5 (numerical) in ascending order will yield::
+
+    chr1   300  400  feature4  1.1e-05  +
+    chr15  700  800  feature6  1.1e4    +
+    chr20  800  900  feature2  1.1      +
+    chr21  300  500  feature5  1.1e2    +
+    chr10  100  200  feature1  100.01   +
+    chr2   500  600  feature3  1000.1   +
+
+on column 5 (general numeric) in ascending order will yield::
+
+    chr1   300  400  feature4  1.1e-05  +
+    chr20  800  900  feature2  1.1      +
+    chr10  100  200  feature1  100.01   +
+    chr21  300  500  feature5  1.1e2    +
+    chr2   500  600  feature3  1000.1   +
+    chr15  700  800  feature6  1.1e4    +
+
+    </help>
+</tool>
diff --git a/tools/filters/tailWrapper.pl b/tools/filters/tailWrapper.pl
new file mode 100644
index 0000000..2445553
--- /dev/null
+++ b/tools/filters/tailWrapper.pl
@@ -0,0 +1,19 @@
+#! /usr/bin/perl -w
+
+use strict;
+use warnings;
+
+# a wrapper for tail for use in galaxy
+# lessWrapper.pl [filename] [# lines to show] [output]
+
+die "Check arguments" unless @ARGV == 3;
+die "Line number should be an integer\n" unless $ARGV[1]=~ m/^\d+$/;
+
+open (OUT, ">$ARGV[2]") or die "Cannot create $ARGV[2]:$!\n";
+open (TAIL, "tail -n $ARGV[1] $ARGV[0]|") or die "Cannot run tail:$!\n";
+while (<TAIL>) {
+    print OUT;
+}
+close OUT;
+close TAIL;
+    
diff --git a/tools/filters/tailWrapper.xml b/tools/filters/tailWrapper.xml
new file mode 100644
index 0000000..1a7d778
--- /dev/null
+++ b/tools/filters/tailWrapper.xml
@@ -0,0 +1,42 @@
+<tool id="Show tail1" name="Select last" version="1.0.0">
+  <description>lines from a dataset</description>
+  <command interpreter="perl">tailWrapper.pl $input $lineNum $out_file1</command>
+  <inputs>
+    <param name="lineNum" size="5" type="integer" value="10" label="Select last" help="lines"/>
+    <param format="txt" name="input" type="data" label="from"/>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="lineNum" value="10"/>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="eq-showtail.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool outputs specified number of lines from the **end** of a dataset
+
+-----
+
+**Example**
+
+- Input File::
+
+    chr7    57134   57154   D17003_CTCF_R7  356     -
+    chr7    57247   57267   D17003_CTCF_R4  207     +
+    chr7    57314   57334   D17003_CTCF_R5  269     +
+    chr7    57341   57361   D17003_CTCF_R7  375     +
+    chr7    57457   57477   D17003_CTCF_R3  188     +
+
+- Show last two lines of above file. The result is::
+
+    chr7    57341   57361   D17003_CTCF_R7  375     +
+    chr7    57457   57477   D17003_CTCF_R3  188     +
+
+  </help>
+</tool>
diff --git a/tools/filters/trimmer.py b/tools/filters/trimmer.py
new file mode 100644
index 0000000..9c71ff2
--- /dev/null
+++ b/tools/filters/trimmer.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import optparse
+import sys
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def main():
+    usage = """%prog [options]
+
+options (listed below) default to 'None' if omitted
+    """
+    parser = optparse.OptionParser(usage=usage)
+
+    parser.add_option(
+        '-a', '--ascii',
+        dest='ascii',
+        action='store_true',
+        default=False,
+        help='Use ascii codes to defined ignored beginnings instead of raw characters')
+
+    parser.add_option(
+        '-q', '--fastq',
+        dest='fastq',
+        action='store_true',
+        default=False,
+        help='The input data in fastq format. It selected the script skips every even line since they contain sequence ids')
+
+    parser.add_option(
+        '-i', '--ignore',
+        dest='ignore',
+        help='A comma separated list on ignored beginnings (e.g., ">,@"), or its ascii codes (e.g., "60,42") if option -a is enabled')
+
+    parser.add_option(
+        '-s', '--start',
+        dest='start',
+        default='0',
+        help='Trim from beginning to here (1-based)')
+
+    parser.add_option(
+        '-e', '--end',
+        dest='end',
+        default='0',
+        help='Trim from here to the ned (1-based)')
+
+    parser.add_option(
+        '-f', '--file',
+        dest='input_txt',
+        default=False,
+        help='Name of file to be chopped. STDIN is default')
+
+    parser.add_option(
+        '-c', '--column',
+        dest='col',
+        default='0',
+        help='Column to chop. If 0 = chop the whole line')
+
+    options, args = parser.parse_args()
+    invalid_starts = []
+
+    if options.input_txt:
+        infile = open( options.input_txt, 'r')
+    else:
+        infile = sys.stdin
+
+    if options.ignore and options.ignore != "None":
+        invalid_starts = options.ignore.split(',')
+
+    if options.ascii and options.ignore and options.ignore != "None":
+        for i, item in enumerate( invalid_starts ):
+            invalid_starts[i] = chr( int( item ) )
+
+    col = int( options.col )
+
+    for i, line in enumerate( infile ):
+        line = line.rstrip( '\r\n' )
+        if line:
+            if options.fastq and i % 2 == 0:
+                print(line)
+                continue
+
+            if line[0] not in invalid_starts:
+                if col == 0:
+                    if int( options.end ) > 0:
+                        line = line[ int( options.start ) - 1:int( options.end ) ]
+                    elif int( options.end ) < 0:
+                        endposition = len(line) + int( options.end )
+                        line = line[ int( options.start ) - 1:endposition ]
+                    else:
+                        line = line[ int( options.start ) - 1: ]
+                else:
+                    fields = line.split( '\t' )
+                    if col - 1 > len( fields ):
+                        stop_err('Column %d does not exist. Check input parameters\n' % col)
+
+                    if int( options.end ) > 0:
+                        fields[col - 1] = fields[col - 1][ int( options.start ) - 1:int( options.end ) ]
+                    elif int( options.end ) < 0:
+                        endposition = len(fields[col - 1]) + int( options.end )
+                        fields[col - 1] = fields[col - 1][ int( options.start ) - 1:endposition ]
+                    else:
+                        fields[col - 1] = fields[col - 1][ int( options.start ) - 1: ]
+                    line = '\t'.join(fields)
+            print(line)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/trimmer.xml b/tools/filters/trimmer.xml
new file mode 100644
index 0000000..cf9d176
--- /dev/null
+++ b/tools/filters/trimmer.xml
@@ -0,0 +1,140 @@
+<tool id="trimmer" name="Trim" version="0.0.1">
+    <description>leading or trailing characters</description>
+    <command>
+<![CDATA[
+python $__tool_directory__/trimmer.py -a -f '$input1' -c $col -s $start -e $end -i '$ignore' $fastq > '$out_file1'
+]]>
+    </command>
+    <inputs>
+        <param format="tabular,txt" name="input1" type="data" label="this dataset"/>
+        <param name="col" type="integer" value="0" label="Trim this column only" help="0 = process entire line" />
+        <param name="start" type="integer" size="10" value="1" label="Trim from the beginning up to this position" help="Only positive positions allowed. 1 = do not trim the beginning"/>
+        <param name="end" type="integer" size="10" value="0" label="Remove everything from this position to the end" help="Use negative position to indicate position starting from the end. 0 = do not trim the end"/>
+        <param name="fastq" type="select" label="Is input dataset in fastq format?" help="If set to YES, the tool will not trim evenly numbered lines (0, 2, 4, etc...). This allows for trimming the seq and qual lines, only if they are not spread over multiple lines (see warning below).">
+            <option selected="true" value="">No</option>
+            <option value="-q">Yes</option>
+        </param>
+        <param name="ignore" type="select" display="checkboxes" multiple="True" label="Ignore lines beginning with these characters" help="lines beginning with these are not trimmed">
+            <option value="62">></option>
+            <option value="64">@</option>
+            <option value="43">+</option>
+            <option value="60"><</option>
+            <option value="42">*</option>
+            <option value="45">-</option>
+            <option value="61">=</option>
+            <option value="124">|</option>
+            <option value="63">?</option>
+            <option value="36">$</option>
+            <option value="46">.</option>
+            <option value="58">:</option>
+            <option value="38">&</option>
+            <option value="37">%</option>
+            <option value="94">^</option>
+            <option value="35">#</option>
+         </param>   
+    </inputs>
+    <outputs>
+        <data name="out_file1" format_source="input1" metadata_source="input1"/>
+    </outputs>
+    <tests>
+        <test>
+           <param name="input1" value="trimmer_tab_delimited.dat"/>
+           <param name="col" value="0"/>
+           <param name="start" value="1"/>
+           <param name="end" value="13"/>
+           <param name="ignore" value="62"/>
+           <param name="fastq" value="No"/>
+           <output name="out_file1" file="trimmer_a_f_c0_s1_e13_i62.dat"/>
+        </test>
+        <test>
+           <param name="input1" value="trimmer_tab_delimited.dat"/>
+           <param name="col" value="2"/>
+           <param name="start" value="1"/>
+           <param name="end" value="2"/>
+           <param name="ignore" value="62"/>
+           <param name="fastq" value="No"/>
+           <output name="out_file1" file="trimmer_a_f_c2_s1_e2_i62.dat"/>
+        </test>
+        <test>
+           <param name="input1" value="trimmer_tab_delimited.dat"/>
+           <param name="col" value="2"/>
+           <param name="start" value="2"/>
+           <param name="end" value="-2"/>
+           <param name="ignore" value="62"/>
+           <param name="fastq" value="No"/>
+           <output name="out_file1" file="trimmer_a_f_c2_s2_e-2_i62.dat"/>
+        </test>	
+    </tests>
+
+    <help>
+**What it does**
+
+Trims specified number of characters from a dataset or its field (if dataset is tab-delimited).
+
+-----
+
+**Example 1**
+
+Trimming this dataset::
+
+  1234567890
+  abcdefghijk
+
+by setting **Trim from the beginning up to this position** to *2* and **Remove everything from this position to the end** to *6* will produce::
+
+  23456
+  bcdef
+
+-----
+
+**Example 2**
+
+Trimming column 2 of this dataset::
+
+  abcde 12345 fghij 67890
+  fghij 67890 abcde 12345
+
+by setting **Trim content of this column only** to *2*, **Trim from the beginning up to this position** to *2*, and **Remove everything from this position to the end** to *4* will produce::
+
+  abcde  234 fghij 67890
+  fghij  789 abcde 12345
+
+-----
+
+**Example 3**
+
+Trimming column 2 of this dataset::
+
+  abcde 12345 fghij 67890
+  fghij 67890 abcde 12345
+
+by setting **Trim content of this column only** to *2*, **Trim from the beginning up to this position** to *2*, and **Remove everything from this position to the end** to *-2* will produce::
+
+  abcde  23 fghij 67890
+  fghij  78 abcde 12345
+
+----
+
+**Trimming FASTQ datasets**
+
+This tool can be used to trim sequences and quality strings in fastq datasets. This is done by selected *Yes* from the **Is input dataset in fastq format?** dropdown. If set to *Yes*, the tool will skip all even numbered lines (see warning below). For example, trimming last 5 bases of this dataset::
+
+  @081017-and-081020:1:1:1715:1759
+  GGACTCAGATAGTAATCCACGCTCCTTTAAAATATC
+  +
+  II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B
+  
+cab done by setting **Remove everything from this position to the end** to 31::
+
+  @081017-and-081020:1:1:1715:1759
+  GGACTCAGATAGTAATCCACGCTCCTTTAAA
+  +
+  II#IIIIIII$5+.(9IIIIIII$%*$G$A3 
+  
+**Note** that headers are skipped.
+
+.. class:: warningmark
+
+**WARNING:** This tool will only work on properly formatted fastq datasets where (1) each read and quality string occupy one line and (2) '@' (read header) and "+" (quality header) lines are evenly numbered like in the above example.
+    </help>
+</tool>
diff --git a/tools/filters/ucsc_gene_bed_to_exon_bed.py b/tools/filters/ucsc_gene_bed_to_exon_bed.py
new file mode 100755
index 0000000..301a670
--- /dev/null
+++ b/tools/filters/ucsc_gene_bed_to_exon_bed.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+"""
+Read a table dump in the UCSC gene table format and print a tab separated
+list of intervals corresponding to requested features of each gene.
+
+usage: ucsc_gene_table_to_intervals.py [options]
+
+options:
+  -h, --help                  show this help message and exit
+  -rREGION, --region=REGION
+                              Limit to region: one of coding, utr3, utr5, codon, intron, transcribed [default]
+  -e, --exons                 Only print intervals overlapping an exon
+  -i, --input=inputfile       input file
+  -o, --output=outputfile     output file
+"""
+from __future__ import print_function
+
+import optparse
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def main():
+    parser = optparse.OptionParser( usage="%prog [options] " )
+    parser.add_option( "-r", "--region", dest="region", default="transcribed",
+                       help="Limit to region: one of coding, utr3, utr5, transcribed [default]" )
+    parser.add_option( "-e", "--exons", action="store_true", dest="exons",
+                       help="Only print intervals overlapping an exon" )
+    parser.add_option( "-s", "--strand", action="store_true", dest="strand",
+                       help="Print strand after interval" )
+    parser.add_option( "-i", "--input", dest="input", default=None,
+                       help="Input file" )
+    parser.add_option( "-o", "--output", dest="output", default=None,
+                       help="Output file" )
+    options, args = parser.parse_args()
+    assert options.region in ( 'coding', 'utr3', 'utr5', 'transcribed', 'intron', 'codon' ), "Invalid region argument"
+
+    try:
+        out_file = open(options.output, "w")
+    except:
+        print("Bad output file.", file=sys.stderr)
+        sys.exit(0)
+
+    try:
+        in_file = open(options.input)
+    except:
+        print("Bad input file.", file=sys.stderr)
+        sys.exit(0)
+
+    print("Region:", options.region + ";")
+    """print "Only overlap with Exons:",
+    if options.exons:
+        print "Yes"
+    else:
+        print "No"
+    """
+
+    # Read table and handle each gene
+    for line in in_file:
+        try:
+            if line[0:1] == "#":
+                continue
+            # Parse fields from gene tabls
+            fields = line.split( '\t' )
+            chrom = fields[0]
+            tx_start = int( fields[1] )
+            tx_end = int( fields[2] )
+            name = fields[3]
+            strand = fields[5].replace(" ", "_")
+            cds_start = int( fields[6] )
+            cds_end = int( fields[7] )
+
+            # Determine the subset of the transcribed region we are interested in
+            if options.region == 'utr3':
+                if strand == '-':
+                    region_start, region_end = tx_start, cds_start
+                else:
+                    region_start, region_end = cds_end, tx_end
+            elif options.region == 'utr5':
+                if strand == '-':
+                    region_start, region_end = cds_end, tx_end
+                else:
+                    region_start, region_end = tx_start, cds_start
+            elif options.region == 'coding' or options.region == 'codon':
+                region_start, region_end = cds_start, cds_end
+            else:
+                region_start, region_end = tx_start, tx_end
+
+            # If only interested in exons, print the portion of each exon overlapping
+            # the region of interest, otherwise print the span of the region
+        # options.exons is always TRUE
+            if options.exons:
+                exon_starts = [int(_) + tx_start for _ in fields[11].rstrip( ',\n' ).split( ',' )]
+                exon_ends = [int(_) for _ in fields[10].rstrip( ',\n' ).split( ',' )]
+                exon_ends = [x + y for x, y in zip(exon_starts, exon_ends)]
+
+        # for Intron regions:
+            if options.region == 'intron':
+                i = 0
+                while i < len(exon_starts) - 1:
+                    intron_starts = exon_ends[i]
+                    intron_ends = exon_starts[i + 1]
+                    if strand:
+                        print_tab_sep(out_file, chrom, intron_starts, intron_ends, name, "0", strand )
+                    else:
+                        print_tab_sep(out_file, chrom, intron_starts, intron_ends )
+                    i += 1
+        # for non-intron regions:
+            else:
+                for start, end in zip( exon_starts, exon_ends ):
+                    start = max( start, region_start )
+                    end = min( end, region_end )
+                    if start < end:
+                        if options.region == 'codon':
+                            start += (3 - ((start - region_start) % 3)) % 3
+                            c_start = start
+                            while c_start + 3 <= end:
+                                if strand:
+                                    print_tab_sep(out_file, chrom, c_start, c_start + 3, name, "0", strand )
+                                else:
+                                    print_tab_sep(out_file, chrom, c_start, c_start + 3)
+                                c_start += 3
+                        else:
+                            if strand:
+                                print_tab_sep(out_file, chrom, start, end, name, "0", strand )
+                            else:
+                                print_tab_sep(out_file, chrom, start, end )
+        except:
+            continue
+
+
+def print_tab_sep(out_file, *args ):
+    """Print items in `l` to stdout separated by tabs"""
+    print('\t'.join(str( f ) for f in args), file=out_file)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/ucsc_gene_bed_to_exon_bed.xml b/tools/filters/ucsc_gene_bed_to_exon_bed.xml
new file mode 100644
index 0000000..e606cc8
--- /dev/null
+++ b/tools/filters/ucsc_gene_bed_to_exon_bed.xml
@@ -0,0 +1,78 @@
+<tool id="gene2exon1" name="Gene BED To Exon/Intron/Codon BED" version="1.0.0">
+<description>expander</description>
+  <command interpreter="python">ucsc_gene_bed_to_exon_bed.py --input=$input1 --output=$out_file1 --region=$region "--exons"</command>
+  <inputs>
+    <param name="region" type="select">
+      <label>Extract</label>
+      <option value="transcribed">Coding Exons + UTR Exons</option>
+      <option value="coding">Coding Exons only</option>
+      <option value="utr5">5'-UTR Exons</option>
+      <option value="utr3">3'-UTR Exons</option>
+      <option value="intron">Introns</option>
+      <option value="codon">Codons</option>
+    </param>
+    <param name="input1" type="data" format="bed" label="from" help="this history item must contain a 12 field BED (see below)"/>
+  </inputs>
+  <outputs>
+    <data name="out_file1" format="bed"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="3.bed" /> 
+      <param name="region" value="transcribed" />
+      <output name="out_file1" file="cf-gene2exon.dat"/>
+    </test>
+  </tests>
+<help>
+
+.. class:: warningmark
+
+This tool works only on a BED file that contains at least 12 fields (see **Example** and **About formats** below).  The output will be empty if applied to a BED file with 3 or 6 fields.
+
+------
+
+**What it does**
+
+BED format can be used to represent a single gene in just one line, which contains the information about exons, coding sequence location (CDS), and positions of untranslated regions (UTRs).  This tool *unpacks* this information by converting a single line describing a gene into a collection of lines representing individual exons, introns, UTRs, etc. 
+
+-------
+
+**Example**
+
+Extracting **Coding Exons + UTR Exons** from the following two BED lines::
+
+    chr7 127475281 127491632 NM_000230 0 + 127486022 127488767 0 3 29,172,3225,    0,10713,13126
+    chr7 127486011 127488900 D49487    0 + 127486022 127488767 0 2 155,490,        0,2399
+
+will return::
+
+    chr7 127475281 127475310 NM_000230 0 +
+    chr7 127485994 127486166 NM_000230 0 +
+    chr7 127488407 127491632 NM_000230 0 +
+    chr7 127486011 127486166 D49487    0 +
+    chr7 127488410 127488900 D49487    0 +
+
+------
+
+.. class:: infomark
+
+**About formats**
+
+**BED format** Browser Extensible Data format was designed at UCSC for displaying data tracks in the Genome Browser. It has three required fields and additional optional ones. In the specific case of this tool the following fields must be present::
+
+    1. chrom - The name of the chromosome (e.g. chr1, chrY_random).
+    2. chromStart - The starting position in the chromosome. (The first base in a chromosome is numbered 0.)
+    3. chromEnd - The ending position in the chromosome, plus 1 (i.e., a half-open interval).
+    4. name - The name of the BED line.
+    5. score - A score between 0 and 1000.
+    6. strand - Defines the strand - either '+' or '-'.
+    7. thickStart - The starting position where the feature is drawn thickly at the Genome Browser.
+    8. thickEnd - The ending position where the feature is drawn thickly at the Genome Browser.
+    9. reserved - This should always be set to zero.
+   10. blockCount - The number of blocks (exons) in the BED line.
+   11. blockSizes - A comma-separated list of the block sizes. The number of items in this list should correspond to blockCount.
+   12. blockStarts - A comma-separated list of block starts. All of the blockStart positions should be calculated relative to chromStart. The number of items in this list should correspond to blockCount.
+
+
+</help>
+</tool>
diff --git a/tools/filters/ucsc_gene_bed_to_intron_bed.py b/tools/filters/ucsc_gene_bed_to_intron_bed.py
new file mode 100755
index 0000000..9be6f91
--- /dev/null
+++ b/tools/filters/ucsc_gene_bed_to_intron_bed.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+Read a table dump in the UCSC gene table format and print a tab separated
+list of intervals corresponding to requested features of each gene.
+
+usage: ucsc_gene_table_to_intervals.py [options]
+
+options:
+  -h, --help                  show this help message and exit
+  -rREGION, --region=REGION
+                              Limit to region: one of coding, utr3, utr5, transcribed [default]
+  -e, --exons                 Only print intervals overlapping an exon
+  -i, --input=inputfile       input file
+  -o, --output=outputfile     output file
+"""
+from __future__ import print_function
+
+import optparse
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def main():
+    parser = optparse.OptionParser( usage="%prog [options] " )
+    parser.add_option( "-s", "--strand", action="store_true", dest="strand",
+                       help="Print strand after interval" )
+    parser.add_option( "-i", "--input", dest="input", default=None,
+                       help="Input file" )
+    parser.add_option( "-o", "--output", dest="output", default=None,
+                       help="Output file" )
+    options, args = parser.parse_args()
+
+    try:
+        out_file = open(options.output, "w")
+    except:
+        print("Bad output file.", file=sys.stderr)
+        sys.exit(0)
+
+    try:
+        in_file = open(options.input)
+    except:
+        print("Bad input file.", file=sys.stderr)
+        sys.exit(0)
+
+    # Read table and handle each gene
+    for line in in_file:
+        try:
+            if line[0:1] == "#":
+                continue
+
+            # Parse fields from gene tabls
+            fields = line.split( '\t' )
+            chrom = fields[0]
+            tx_start = int( fields[1] )
+            int( fields[2] )
+            name = fields[3]
+            strand = fields[5].replace(" ", "_")
+            int( fields[6] )
+            int( fields[7] )
+
+            exon_starts = [int(_) + tx_start for _ in fields[11].rstrip( ',\n' ).split( ',' )]
+            exon_ends = [int(_) for _ in fields[10].rstrip( ',\n' ).split( ',' )]
+            exon_ends = [x + y for x, y in zip(exon_starts, exon_ends)]
+
+            i = 0
+            while i < len(exon_starts) - 1:
+                intron_starts = exon_ends[i] + 1
+                intron_ends = exon_starts[i + 1] - 1
+                if strand:
+                    print_tab_sep(out_file, chrom, intron_starts, intron_ends, name, "0", strand )
+                else:
+                    print_tab_sep(out_file, chrom, intron_starts, intron_ends )
+                i += 1
+        except:
+            continue
+
+
+def print_tab_sep(out_file, *args ):
+    """Print items in `l` to stdout separated by tabs"""
+    print('\t'.join(str( f ) for f in args), file=out_file)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/ucsc_gene_bed_to_intron_bed.xml b/tools/filters/ucsc_gene_bed_to_intron_bed.xml
new file mode 100644
index 0000000..7825770
--- /dev/null
+++ b/tools/filters/ucsc_gene_bed_to_intron_bed.xml
@@ -0,0 +1,60 @@
+<tool id="gene2intron1" name="Gene BED To Intron BED" version="1.0.0">
+<description>expander</description>
+  <command interpreter="python">ucsc_gene_bed_to_intron_bed.py --input=$input1 --output=$out_file1</command>
+  <inputs>
+    <param name="input1" type="data" format="interval" label="UCSC Gene Table"/>
+    
+  </inputs>
+  <outputs>
+    <data name="out_file1" format="bed"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="3.bed" /> 
+      <output name="out_file1" file="cf-gene2intron.dat"/>
+    </test>
+  </tests>
+<help>
+
+**Syntax**
+
+This tool converts a UCSC gene bed format file to a list of bed format lines corresponding to requested features of each gene.
+
+- **BED format** Browser Extensible Data format was designed at UCSC for displaying data tracks in the Genome Browser. It has three required fields and twelve additional optional ones::
+
+    The first three BED fields (required) are:
+    1. chrom - The name of the chromosome (e.g. chr1, chrY_random).
+    2. chromStart - The starting position in the chromosome. (The first base in a chromosome is numbered 0.)
+    3. chromEnd - The ending position in the chromosome, plus 1 (i.e., a half-open interval).
+
+    The twelve additional BED fields (optional) are:
+    4. name - The name of the BED line.
+    5. score - A score between 0 and 1000.
+    6. strand - Defines the strand - either '+' or '-'.
+    7. thickStart - The starting position where the feature is drawn thickly at the Genome Browser.
+    8. thickEnd - The ending position where the feature is drawn thickly at the Genome Browser.
+    9. reserved - This should always be set to zero.
+   10. blockCount - The number of blocks (exons) in the BED line.
+   11. blockSizes - A comma-separated list of the block sizes. The number of items in this list should correspond to blockCount.
+   12. blockStarts - A comma-separated list of block starts. All of the blockStart positions should be calculated relative to chromStart. The number of items in this list should correspond to blockCount.
+   13. expCount - The number of experiments.
+   14. expIds - A comma-separated list of experiment ids. The number of items in this list should correspond to expCount.
+   15. expScores - A comma-separated list of experiment scores. All of the expScores should be relative to expIds. The number of items in this list should correspond to expCount.
+
+-----
+
+**Example**
+
+- A UCSC gene bed format file::
+
+    chr7 127475281 127491632 NM_000230 0 + 127486022 127488767 0 3 29,172,3225,    0,10713,13126
+    chr7 127486011 127488900 D49487    0 + 127486022 127488767 0 2 155,490,        0,2399
+
+- Converts the above file to a list of bed lines, which has the introns::
+
+    chr7 127475311 127475993 NM_000230 0 +
+    chr7 127486167 127488406 NM_000230 0 +
+    chr7 127486167 127488409 D49487    0 +
+
+</help>
+</tool>
diff --git a/tools/filters/ucsc_gene_table_to_intervals.py b/tools/filters/ucsc_gene_table_to_intervals.py
new file mode 100755
index 0000000..744610a
--- /dev/null
+++ b/tools/filters/ucsc_gene_table_to_intervals.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+"""
+Read a table dump in the UCSC gene table format and print a tab separated
+list of intervals corresponding to requested features of each gene.
+
+usage: ucsc_gene_table_to_intervals.py [options]
+
+options:
+  -h, --help                  show this help message and exit
+  -rREGION, --region=REGION
+                              Limit to region: one of coding, utr3, utr5, transcribed [default]
+  -e, --exons                 Only print intervals overlapping an exon
+  -i, --input=inputfile       input file
+  -o, --output=outputfile     output file
+"""
+from __future__ import print_function
+
+import optparse
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def main():
+    parser = optparse.OptionParser( usage="%prog [options] " )
+    parser.add_option( "-r", "--region", dest="region", default="transcribed",
+                       help="Limit to region: one of coding, utr3, utr5, transcribed [default]" )
+    parser.add_option( "-e", "--exons", action="store_true", dest="exons",
+                       help="Only print intervals overlapping an exon" )
+    parser.add_option( "-s", "--strand", action="store_true", dest="strand",
+                       help="Print strand after interval" )
+    parser.add_option( "-i", "--input", dest="input", default=None,
+                       help="Input file" )
+    parser.add_option( "-o", "--output", dest="output", default=None,
+                       help="Output file" )
+    options, args = parser.parse_args()
+    assert options.region in ( 'coding', 'utr3', 'utr5', 'transcribed' ), "Invalid region argument"
+
+    try:
+        out_file = open(options.output, "w")
+    except:
+        print("Bad output file.", file=sys.stderr)
+        sys.exit(0)
+
+    try:
+        in_file = open(options.input)
+    except:
+        print("Bad input file.", file=sys.stderr)
+        sys.exit(0)
+
+    print("Region:", options.region + ";")
+    print("Only overlap with Exons:", end=' ')
+    if options.exons:
+        print("Yes")
+    else:
+        print("No")
+
+    # Read table and handle each gene
+    for line in in_file:
+        try:
+            if line[0:1] == "#":
+                continue
+            # Parse fields from gene tabls
+            fields = line.split( '\t' )
+            name = fields[0]
+            chrom = fields[1]
+            strand = fields[2].replace(" ", "_")
+            tx_start = int( fields[3] )
+            tx_end = int( fields[4] )
+            cds_start = int( fields[5] )
+            cds_end = int( fields[6] )
+
+            # Determine the subset of the transcribed region we are interested in
+            if options.region == 'utr3':
+                if strand == '-':
+                    region_start, region_end = tx_start, cds_start
+                else:
+                    region_start, region_end = cds_end, tx_end
+            elif options.region == 'utr5':
+                if strand == '-':
+                    region_start, region_end = cds_end, tx_end
+                else:
+                    region_start, region_end = tx_start, cds_start
+            elif options.region == 'coding':
+                region_start, region_end = cds_start, cds_end
+            else:
+                region_start, region_end = tx_start, tx_end
+
+            # If only interested in exons, print the portion of each exon overlapping
+            # the region of interest, otherwise print the span of the region
+            if options.exons:
+                exon_starts = map( int, fields[8].rstrip( ',\n' ).split( ',' ) )
+                exon_ends = map( int, fields[9].rstrip( ',\n' ).split( ',' ) )
+                for start, end in zip( exon_starts, exon_ends ):
+                    start = max( start, region_start )
+                    end = min( end, region_end )
+                    if start < end:
+                        if strand:
+                            print_tab_sep(out_file, chrom, start, end, name, "0", strand )
+                        else:
+                            print_tab_sep(out_file, chrom, start, end )
+            else:
+                if strand:
+                    print_tab_sep(out_file, chrom, region_start, region_end, name, "0", strand )
+                else:
+                    print_tab_sep(out_file, chrom, region_start, region_end )
+        except:
+            continue
+
+
+def print_tab_sep(out_file, *args ):
+    """Print items in `l` to stdout separated by tabs"""
+    print('\t'.join(str( f ) for f in args), file=out_file)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/ucsc_gene_table_to_intervals.xml b/tools/filters/ucsc_gene_table_to_intervals.xml
new file mode 100644
index 0000000..8e382f8
--- /dev/null
+++ b/tools/filters/ucsc_gene_table_to_intervals.xml
@@ -0,0 +1,25 @@
+<tool id="ucsc_gene_table_to_intervals1" name="Gene Table To BED" version="1.0.0">
+<description>Parse a UCSC Gene Table dump</description>
+  <command interpreter="python">ucsc_gene_table_to_intervals.py --input=$input1 --output=$out_file1 --region=$region $exon</command>
+  <inputs>
+    <param name="input1" type="data" format="inverval" label="UCSC Gene Table"/>
+    <param name="region" type="select">
+      <label>Feature Type</label>
+      <option value="transcribed">Transcribed</option>
+      <option value="coding">Coding</option>
+      <option value="utr3">3' UTR</option>
+      <option value="utr5">5' UTR</option>
+    </param>
+    <param name="exon" type="select">
+      <label>Only print intervals overlapping an exon</label>
+      <option value="">False</option>
+      <option value="--exons">True</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data name="out_file1" format="bed"/>
+  </outputs>
+<help>
+Read a table dump in the UCSC gene table format and create a BED file corresponding to the requested feature of each gene.
+</help>
+</tool>
\ No newline at end of file
diff --git a/tools/filters/uniq.py b/tools/filters/uniq.py
new file mode 100644
index 0000000..07bbefa
--- /dev/null
+++ b/tools/filters/uniq.py
@@ -0,0 +1,141 @@
+# Filename: uniq.py
+# Author: Ian N. Schenck
+# Version: 19/12/2005
+#
+# This script accepts an input file, an output file, a column
+# delimiter, and a list of columns.  The script then grabs unique
+# lines based on the columns, and returns those records with a count
+# of occurences of each unique column (ignoring trailing spaces),
+# inserted before the columns.
+#
+# This executes the command pipeline:
+#       cut -f $fields | sort  | uniq -C
+#
+# -i            Input file
+# -o            Output file
+# -d            Delimiter
+# -c            Column list (Comma Seperated)
+from __future__ import print_function
+
+import re
+import subprocess
+import sys
+
+
+# This function is exceedingly useful, perhaps package for reuse?
+def getopts(argv):
+    opts = {}
+    while argv:
+        if argv[0][0] == '-':
+            opts[argv[0]] = argv[1]
+            argv = argv[2:]
+        else:
+            argv = argv[1:]
+    return opts
+
+
+def main():
+    args = sys.argv[1:]
+
+    try:
+        opts = getopts(args)
+    except IndexError:
+        print("Usage:")
+        print(" -i        Input file")
+        print(" -o        Output file")
+        print(" -c        Column list (comma seperated)")
+        print(" -d        Delimiter:")
+        print("                     T   Tab")
+        print("                     C   Comma")
+        print("                     D   Dash")
+        print("                     U   Underscore")
+        print("                     P   Pipe")
+        print("                     Dt  Dot")
+        print("                     Sp  Space")
+        print(" -s        Sorting: value (default), largest, or smallest")
+        return 0
+
+    outputfile = opts.get("-o")
+    if outputfile is None:
+        print("No output file specified.")
+        return -1
+
+    inputfile = opts.get("-i")
+    if inputfile is None:
+        print("No input file specified.")
+        return -2
+
+    delim = opts.get("-d")
+    if delim is None:
+        print("Field delimiter not specified.")
+        return -3
+
+    columns = opts.get("-c")
+    if columns is None or columns == 'None':
+        print("Columns not specified.")
+        return -4
+
+    sorting = opts.get("-s")
+    if sorting is None:
+        sorting = "value"
+    if sorting not in ["value", "largest", "smallest"]:
+        print("Unknown sorting option %r" % sorting)
+        return -5
+
+    # All inputs have been specified at this point, now validate.
+    fileRegEx = re.compile("^[A-Za-z0-9./\-_]+$")
+    columnRegEx = re.compile("([0-9]{1,},?)+")
+
+    if not columnRegEx.match(columns):
+        print("Illegal column specification.")
+        return -4
+    if not fileRegEx.match(outputfile):
+        print("Illegal output filename.")
+        return -5
+    if not fileRegEx.match(inputfile):
+        print("Illegal input filename.")
+        return -6
+
+    column_list = re.split(",", columns)
+    columns_for_display = "c" + ", c".join(column_list)
+
+    commandline = "cut "
+    # Set delimiter
+    if delim == 'C':
+        commandline += "-d \",\" "
+    if delim == 'D':
+        commandline += "-d \"-\" "
+    if delim == 'U':
+        commandline += "-d \"_\" "
+    if delim == 'P':
+        commandline += "-d \"|\" "
+    if delim == 'Dt':
+        commandline += "-d \".\" "
+    if delim == 'Sp':
+        commandline += "-d \" \" "
+
+    # set columns
+    commandline += "-f " + columns
+    # we want to remove *trailing* spaces from each field,
+    # so look for spaces then tab (for first and middle selected columns)
+    # and replacw with just tab, and remove any spaces at end of the line
+    # (for the final selected column):
+    commandline += " " + inputfile + " | sed 's/\ *\t/\t/' | sed 's/\ *$//'"
+    commandline += " | sort | uniq -c"
+    # uniq -C puts counts at the start, so we can sort lines by numerical value
+    if sorting == "largest":
+        commandline += " | sort -n -r"
+    elif sorting == "smallest":
+        commandline += " | sort -n"
+    # uniq -C produces lines with leading spaces, use sed to remove that
+    # uniq -C puts a space between the count and the field, want a tab.
+    # To replace just first tab, use sed again with 1 as the index
+    commandline += " | sed 's/^\ *//' | sed 's/ /\t/1' > " + outputfile
+    errorcode = subprocess.call(commandline, shell=True)
+
+    print("Count of unique values in " + columns_for_display)
+    return errorcode
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/uniq.xml b/tools/filters/uniq.xml
new file mode 100644
index 0000000..63ed13f
--- /dev/null
+++ b/tools/filters/uniq.xml
@@ -0,0 +1,105 @@
+<tool id="Count1" name="Count" version="1.0.2">
+  <description>occurrences of each record</description>
+  <command interpreter="python">uniq.py -i $input -o $out_file1 -c "$column" -d $delim -s $sorting</command>
+  <inputs>
+    <param name="input" type="data" format="tabular" label="from dataset" help="Dataset missing? See TIP below"/>
+    <param name="column" type="data_column" data_ref="input" multiple="True" numerical="False" label="Count occurrences of values in column(s)" help="Multi-select list - hold the appropriate key while clicking to select multiple columns" />
+    <param name="delim" type="select" label="Delimited by">
+      <option value="T">Tab</option>
+      <option value="Sp">Whitespace</option>
+      <option value="Dt">Dot</option>
+      <option value="C">Comma</option>
+      <option value="D">Dash</option>
+      <option value="U">Underscore</option>
+      <option value="P">Pipe</option>
+    </param>
+    <param name="sorting" type="select" label="How should the results be sorted?">
+      <option value="value">By the values being counted</option>
+      <option value="largest">With the most common values first</option>
+      <option value="smallest">With the rarest values first</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="uniq_out.dat"/>
+      <param name="column" value="1"/>
+      <param name="delim" value="T"/>
+    </test>
+    <test>
+      <param name="input" value="species_assignment.tabular" ftype="tabular"/>
+      <output name="out_file1" file="species_assignment_c2.tabular"/>
+      <param name="column" value="2"/>
+      <param name="delim" value="T"/>
+    </test>
+    <test>
+      <param name="input" value="species_assignment.tabular" ftype="tabular"/>
+      <output name="out_file1" file="species_assignment_c2_c3.tabular"/>
+      <param name="column" value="2,3"/>
+      <param name="delim" value="T"/>
+    </test>
+    <test>
+      <param name="input" value="species_assignment.tabular" ftype="tabular"/>
+      <output name="out_file1" file="species_assignment_c2_c3_largest.tabular"/>
+      <param name="column" value="2,3"/>
+      <param name="delim" value="T"/>
+      <param name="sorting" value="largest"/>
+    </test>
+    <test>
+      <param name="input" value="species_assignment.tabular" ftype="tabular"/>
+      <output name="out_file1" file="species_assignment_c2_c3_smallest.tabular"/>
+      <param name="column" value="2,3"/>
+      <param name="delim" value="T"/>
+      <param name="sorting" value="smallest"/>
+    </test>
+  </tests>
+  <help>
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+This tool counts occurrences of unique values in selected column(s).
+
+- If multiple columns are selected, counting is performed on each unique group of all values in the selected columns.
+- The first column of the resulting dataset will be the count of unique values in the selected column(s) and will be followed by each value.
+
+-----
+
+**Example**
+
+- Input file::
+     
+       chr1   10  100  gene1
+       chr1  105  200  gene2
+       chr1  205  300  gene3
+       chr2   10  100  gene4
+       chr2 1000 1900  gene5
+       chr3   15 1656  gene6
+       chr4   10 1765  gene7
+       chr4   10 1765  gene8
+
+- Counting unique values in column c1 will result in::
+
+       3 chr1
+       2 chr2
+       1 chr3
+       2 chr4   
+
+- Counting unique values in the grouping of columns c2 and c3 will result in::
+
+       2    10    100
+       2    10    1765
+       1    1000  1900
+       1    105   200
+       1    15    1656
+       1    205   300
+
+</help>
+</tool>
diff --git a/tools/filters/wc_gnu.xml b/tools/filters/wc_gnu.xml
new file mode 100644
index 0000000..d87eb5b
--- /dev/null
+++ b/tools/filters/wc_gnu.xml
@@ -0,0 +1,72 @@
+<tool id="wc_gnu" name="Line/Word/Character count" version="1.0.0">
+    <description>of a dataset</description>
+    <command>
+        #set $word_to_arg = { 'characters':'m', 'words':'w', 'lines':'l' }
+        #set $arg_order = [ 'lines', 'words', 'characters' ]
+        #if not isinstance( $options.value, list ):
+            #set $args = [ $options.value ]
+        #else:
+            #set $args = $options.value
+        #end if
+        #if $include_header.value:
+            echo "#${ "\t".join( [ i for i in $arg_order if i in $args ] ) }" > $out_file1
+            &&
+        #end if
+        wc
+        #for $option in $args:
+           -${ word_to_arg[ str(option) ] }
+        #end for
+        $input1 | awk '{ print ${ '"\\t"'.join( [ "$%i" % ( i+1 ) for i in range( len( $args ) ) ] ) } }'
+        >> $out_file1
+    </command>
+    <inputs>
+        <param format="txt" name="input1" type="data" label="Text file"/>
+        <param name="options" type="select" multiple="True" display="checkboxes" label="Desired values">
+            <!-- <option value="bytes" selected="True">Byte count</option> -->
+            <option value="lines" selected="True">Line count</option>
+            <option value="words" selected="True">Word count</option>
+            <option value="characters" selected="True">Character count</option>
+            <validator type="no_options" message="You must pick at least one attribute to count." />
+        </param>
+        <param name="include_header" type="boolean" label="Include Output header" checked="True"/>
+    </inputs>
+    <outputs>
+        <data format="tabular" name="out_file1"/>
+    </outputs>
+    <tests>
+        <test>
+          <param name="input1" value="1.bed"/>
+          <param name="options" value="lines,words,characters"/>
+          <param name="include_header" value="True"/>
+          <output name="out_file1" file="wc_gnu_out_1.tabular"/>
+        </test>
+        <test>
+          <param name="input1" value="1.bed"/>
+          <param name="options" value="lines,words,characters"/>
+          <param name="include_header" value="False"/>
+          <output name="out_file1" file="wc_gnu_out_2.tabular"/>
+        </test>
+    </tests>
+    <help>
+
+**What it does**
+
+This tool outputs counts of specified attributes (lines, words, characters) of a dataset. 
+
+-----
+
+**Example Output**
+
+::
+
+  #lines  words  characters
+  7499	  41376	 624971
+
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
+    </help>
+</tool>
diff --git a/tools/filters/wig_to_bigwig.xml b/tools/filters/wig_to_bigwig.xml
new file mode 100644
index 0000000..261229d
--- /dev/null
+++ b/tools/filters/wig_to_bigwig.xml
@@ -0,0 +1,94 @@
+<tool id="wig_to_bigWig" name="Wig/BedGraph-to-bigWig" version="1.1.0">
+  <description>converter</description>
+  <requirements>
+    <requirement type="package">ucsc_tools</requirement>
+  </requirements>
+  <stdio>
+      <!-- Anything other than zero is an error -->
+      <regex match="needLargeMem: trying to allocate 0 bytes"
+          description="Your input file might be empty or wrongly formatted"/>
+      <regex match="^Error"/>
+  </stdio>
+  <command>
+      <![CDATA[
+      grep -v "^track" $input1 | wigToBigWig stdin $chromInfo $out_file1
+    #if $settings.settingsType == "full":
+      -blockSize=${settings.blockSize} -itemsPerSlot=${settings.itemsPerSlot} ${settings.clip} ${settings.unc}
+    #else:
+      -clip
+    #end if
+    2>&1 || echo "Error running wigToBigWig." >&2
+    ]]>
+  </command>
+  <inputs>
+    <param format="wig,bedgraph" name="input1" type="data" label="Convert">
+      <validator type="unspecified_build" />
+    </param>
+    <conditional name="settings">
+      <param name="settingsType" type="select" label="Converter settings to use" help="Default settings should usually be used.">
+        <option value="preset">Default</option>
+        <option value="full">Full parameter list</option>
+      </param>
+      <when value="preset" />
+      <when value="full">
+        <param name="blockSize" size="4" type="integer" value="256" label="Items to bundle in r-tree" help="Default is 256 (blockSize)" />
+        <param name="itemsPerSlot" size="4" type="integer" value="1024" label="Data points bundled at lowest level" help="Default is 1024 (itemsPerSlot)" />
+        <param name="clip" type="boolean" truevalue="-clip" falsevalue="" checked="True" label="Clip chromosome positions" help="Issue warning messages rather than dying if wig file contains items off end of chromosome. (clip)"/>
+        <param name="unc" type="boolean" truevalue="-unc" falsevalue="" checked="False" label="Do not use compression" help="(unc)"/>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="bigwig" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="2.wig" dbkey="hg17" />
+      <param name="settingsType" value="full" />
+      <param name="blockSize" value="256" />
+      <param name="itemsPerSlot" value="1024" />
+      <param name="clip" value="True" />
+      <param name="unc" value="False" />
+      <output name="out_file1" file="2.bigwig"/>
+    </test>
+    <test>
+      <param name="input1" value="2.wig" dbkey="hg17" />
+      <param name="settingsType" value="preset" />
+      <output name="out_file1" file="2.bigwig"/>
+    </test>
+    <test>
+      <param name="input1" value="1.bedgraph" dbkey="hg19" ftype="bedgraph"/>
+      <param name="settingsType" value="preset" />
+      <output name="out_file1" file="3.bigwig"/>
+    </test>
+  </tests>
+  <help>
+**Syntax**
+
+This tool converts bedgraph or wiggle data into bigWig type.
+
+- **Wiggle format**: The .wig format is line-oriented. Wiggle data is preceded by a UCSC track definition line.  Following the track definition line is the track data, which can be entered in three different formats described below.
+
+  - **BED format** with no declaration line and four columns of data::
+
+      chromA  chromStartA  chromEndA  dataValueA
+      chromB  chromStartB  chromEndB  dataValueB
+
+  - **variableStep** two column data; started by a declaration line and followed with chromosome positions and data values::
+
+      variableStep  chrom=chrN  [span=windowSize]
+      chromStartA  dataValueA
+      chromStartB  dataValueB
+
+  - **fixedStep** single column data; started by a declaration line and followed with data values::
+
+      fixedStep  chrom=chrN  start=position  step=stepInterval  [span=windowSize]
+      dataValue1
+      dataValue2
+
+- The **BedGraph format** is described in detail at the `UCSC Bioinformatics website`_
+
+.. _UCSC Bioinformatics website: http://genome.ucsc.edu/goldenPath/help/bedgraph.html
+
+</help>
+</tool>
diff --git a/tools/filters/wiggle_to_simple.py b/tools/filters/wiggle_to_simple.py
new file mode 100755
index 0000000..855523b
--- /dev/null
+++ b/tools/filters/wiggle_to_simple.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+"""
+Read a wiggle track and print out a series of lines containing
+"chrom position score". Ignores track lines, handles bed, variableStep
+and fixedStep wiggle lines.
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.wiggle
+
+from galaxy.util.ucsc import UCSCLimitException, UCSCOutWrapper
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def main():
+    if len( sys.argv ) > 1:
+        in_file = open( sys.argv[1] )
+    else:
+        in_file = open( sys.stdin )
+
+    if len( sys.argv ) > 2:
+        out_file = open( sys.argv[2], "w" )
+    else:
+        out_file = sys.stdout
+
+    try:
+        for fields in bx.wiggle.IntervalReader( UCSCOutWrapper( in_file ) ):
+            out_file.write( "%s\n" % "\t".join( map( str, fields ) ) )
+    except UCSCLimitException:
+        # Wiggle data was truncated, at the very least need to warn the user.
+        print('Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.')
+    except ValueError as e:
+        in_file.close()
+        out_file.close()
+        stop_err( str( e ) )
+
+    in_file.close()
+    out_file.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/filters/wiggle_to_simple.xml b/tools/filters/wiggle_to_simple.xml
new file mode 100644
index 0000000..991ce65
--- /dev/null
+++ b/tools/filters/wiggle_to_simple.xml
@@ -0,0 +1,88 @@
+<tool id="wiggle2simple1" name="Wiggle-to-Interval" version="1.0.0">
+  <description>converter</description>
+  <command interpreter="python">wiggle_to_simple.py $input $out_file1 </command>
+  <inputs>
+    <param format="wig" name="input" type="data" label="Convert"/>
+  </inputs>
+  <outputs>
+    <data format="interval" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="2.wig" />
+      <output name="out_file1" file="2.interval"/>
+    </test>
+    <test>
+      <param name="input" value="3.wig" />
+      <output name="out_file1" file="3_wig.bed"/>
+    </test>
+  </tests>
+  <help>
+**Syntax**
+
+This tool converts wiggle data into interval type.
+
+- **Wiggle format**: The .wig format is line-oriented. Wiggle data is preceded by a UCSC track definition line.  Following the track definition line is the track data, which can be entered in three different formats described below.
+
+  - **BED format** with no declaration line and four columns of data::
+
+      chromA  chromStartA  chromEndA  dataValueA
+      chromB  chromStartB  chromEndB  dataValueB
+
+  - **variableStep** two column data; started by a declaration line and followed with chromosome positions and data values::
+
+      variableStep  chrom=chrN  [span=windowSize]
+      chromStartA  dataValueA
+      chromStartB  dataValueB
+
+  - **fixedStep** single column data; started by a declaration line and followed with data values::
+
+      fixedStep  chrom=chrN  start=position  step=stepInterval  [span=windowSize]
+      dataValue1
+      dataValue2
+
+-----
+
+**Example**
+
+- input wiggle format file::
+
+    #track type=wiggle_0 name="Bed Format" description="BED format"
+    chr19 59302000 59302300 -1.0
+    chr19 59302300 59302600 -0.75
+    chr19 59302600 59302900 -0.50
+    chr19 59302900 59303200 -0.25
+    chr19 59303200 59303500 0.0
+    #track type=wiggle_0 name="variableStep" description="variableStep format"
+    variableStep chrom=chr19 span=150
+    59304701 10.0
+    59304901 12.5
+    59305401 15.0
+    59305601 17.5
+    #track type=wiggle_0 name="fixedStep" description="fixed step" visibility=full
+    fixedStep chrom=chr19 start=59307401 step=300 span=200
+    1000
+    900
+    800
+    700
+    600
+
+- convert the above file to interval file::
+
+    chr19	59302000	59302300	+	-1.0
+    chr19	59302300	59302600	+	-0.75
+    chr19	59302600	59302900	+	-0.5
+    chr19	59302900	59303200	+	-0.25
+    chr19	59303200	59303500	+	0.0
+    chr19	59304701	59304851	+	10.0
+    chr19	59304901	59305051	+	12.5
+    chr19	59305401	59305551	+	15.0
+    chr19	59305601	59305751	+	17.5
+    chr19	59307701	59307901	+	1000.0
+    chr19	59308001	59308201	+	900.0
+    chr19	59308301	59308501	+	800.0
+    chr19	59308601	59308801	+	700.0
+    chr19	59308901	59309101	+	600.0
+
+</help>
+</tool>
diff --git a/tools/genomespace/genomespace_exporter.py b/tools/genomespace/genomespace_exporter.py
new file mode 100644
index 0000000..7e3f409
--- /dev/null
+++ b/tools/genomespace/genomespace_exporter.py
@@ -0,0 +1,334 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+from __future__ import print_function
+
+import base64
+import binascii
+import datetime
+import hashlib
+import json
+import logging
+import optparse
+import os
+import tempfile
+
+import six
+from six.moves import http_cookiejar
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.parse import quote, urlencode, urljoin
+from six.moves.urllib.request import build_opener, HTTPCookieProcessor, Request, urlopen
+
+log = logging.getLogger( "tools.genomespace.genomespace_exporter" )
+
+try:
+    import boto
+    from boto.s3.connection import S3Connection
+except ImportError:
+    boto = None
+
+GENOMESPACE_API_VERSION_STRING = "v1.0"
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
+DEFAULT_GENOMESPACE_TOOLNAME = 'Galaxy'
+
+CHUNK_SIZE = 2 ** 20  # 1mb
+
+# TODO: TARGET_SPLIT_SIZE and TARGET_SIMPLE_PUT_UPLOAD_SIZE are arbitrarily defined
+# we should programmatically determine these, based upon the current environment
+TARGET_SPLIT_SIZE = 250 * 1024 * 1024  # 250 mb
+MIN_MULTIPART_UPLOAD_SIZE = 5 * 1024 * 1024  # 5mb
+MAX_SIMPLE_PUT_UPLOAD_SIZE = 5 * 1024 * 1024 * 1024  # 5gb
+TARGET_SIMPLE_PUT_UPLOAD_SIZE = MAX_SIMPLE_PUT_UPLOAD_SIZE / 2
+
+# Some basic Caching, so we don't have to reload and download everything every time,
+# especially now that we are calling the parameter's get options method 5 times
+# (6 on reload) when a user loads the tool interface
+# For now, we'll use 30 seconds as the cache valid time
+CACHE_TIME = datetime.timedelta( seconds=30 )
+GENOMESPACE_DIRECTORIES_BY_USER = {}
+
+
+def chunk_write( source_stream, target_stream, source_method="read", target_method="write" ):
+    source_method = getattr( source_stream, source_method )
+    target_method = getattr( target_stream, target_method )
+    while True:
+        chunk = source_method( CHUNK_SIZE )
+        if chunk:
+            target_method( chunk )
+        else:
+            break
+
+
+def get_cookie_opener( gs_username, gs_token, gs_toolname=None ):
+    """ Create a GenomeSpace cookie opener """
+    cj = http_cookiejar.CookieJar()
+    for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
+        # create a super-cookie, valid for all domains
+        cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
+        cj.set_cookie( cookie )
+    cookie_opener = build_opener( HTTPCookieProcessor( cj ) )
+    cookie_opener.addheaders.append( ( 'gs-toolname', gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME ) )
+    return cookie_opener
+
+
+def get_genomespace_site_urls():
+    genomespace_sites = {}
+    for line in urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
+        line = line.rstrip()
+        if not line or line.startswith( "#" ):
+            continue
+        server, line = line.split( '.', 1 )
+        if server not in genomespace_sites:
+            genomespace_sites[server] = {}
+        line = line.split( "=", 1 )
+        genomespace_sites[server][line[0]] = line[1]
+    return genomespace_sites
+
+
+def get_directory( url_opener, dm_url, path ):
+    url = dm_url
+    i = None
+    dir_dict = {}
+    for i, sub_path in enumerate( path ):
+        url = "%s/%s" % ( url, sub_path )
+        dir_request = Request( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' } )
+        dir_request.get_method = lambda: 'GET'
+        try:
+            dir_dict = json.loads( url_opener.open( dir_request ).read() )
+        except HTTPError:
+            # print "e", e, url #punting, assuming lack of permissions at this low of a level...
+            continue
+        break
+    if i is not None:
+        path = path[i + 1:]
+    else:
+        path = []
+    return ( dir_dict, path )
+
+
+def get_default_directory( url_opener, dm_url ):
+    return get_directory( url_opener, dm_url, ["%s/defaultdirectory" % ( GENOMESPACE_API_VERSION_STRING ) ] )[0]
+
+
+def get_personal_directory( url_opener, dm_url ):
+    return get_directory( url_opener, dm_url, [ "%s/personaldirectory" % ( GENOMESPACE_API_VERSION_STRING ) ] )[0]
+
+
+def create_directory( url_opener, directory_dict, new_dir, dm_url ):
+    payload = { "isDirectory": True }
+    for dir_slice in new_dir:
+        if dir_slice in ( '', '/', None ):
+            continue
+        url = '/'.join( ( directory_dict['url'], quote( dir_slice.replace( '/', '_' ), safe='' ) ) )
+        new_dir_request = Request( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' }, data=json.dumps( payload ) )
+        new_dir_request.get_method = lambda: 'PUT'
+        directory_dict = json.loads( url_opener.open( new_dir_request ).read() )
+    return directory_dict
+
+
+def get_genome_space_launch_apps( atm_url, url_opener, file_url, file_type ):
+    gs_request = Request( "%s/%s/webtool/descriptor" % ( atm_url, GENOMESPACE_API_VERSION_STRING ) )
+    gs_request.get_method = lambda: 'GET'
+    opened_gs_request = url_opener.open( gs_request )
+    webtool_descriptors = json.loads( opened_gs_request.read() )
+    webtools = []
+    for webtool in webtool_descriptors:
+        webtool_name = webtool.get( 'name' )
+        base_url = webtool.get( 'baseUrl' )
+        use_tool = False
+        for param in webtool.get( 'fileParameters', [] ):
+            for format in param.get( 'formats', [] ):
+                if format.get( 'name' ) == file_type:
+                    use_tool = True
+                    break
+            if use_tool:
+                file_param_name = param.get( 'name' )
+                # file_name_delimiters = param.get( 'nameDelimiters' )
+                if '?' in base_url:
+                    url_delimiter = "&"
+                else:
+                    url_delimiter = "?"
+                launch_url = "%s%s%s" % ( base_url, url_delimiter, urlencode( [ ( file_param_name, file_url ) ] ) )
+                webtools.append( ( launch_url, webtool_name ) )
+                break
+    return webtools
+
+
+def galaxy_code_get_genomespace_folders( genomespace_site='prod', trans=None, value=None, base_url=None, **kwd ):
+    if value:
+        if isinstance( value, list ):
+            value = value[0]  # single select, only 1 value
+        elif not isinstance( value, six.string_types ):
+            # unvalidated value
+            value = value.value
+            if isinstance( value, list ):
+                value = value[0]  # single select, only 1 value
+
+    def recurse_directory_dict( url_opener, cur_options, url ):
+        cur_directory = Request( url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json, text/plain' } )
+        cur_directory.get_method = lambda: 'GET'
+        # get url to upload to
+        try:
+            cur_directory = url_opener.open( cur_directory ).read()
+        except HTTPError as e:
+            log.debug( 'GenomeSpace export tool failed reading a directory "%s": %s' % ( url, e ) )
+            return  # bad url, go to next
+        cur_directory = json.loads( cur_directory )
+        directory = cur_directory.get( 'directory', {} )
+        contents = cur_directory.get( 'contents', [] )
+        if directory.get( 'isDirectory', False ):
+            selected = directory.get( 'path' ) == value
+            cur_options.append( { 'name': directory.get( 'name' ), 'value': directory.get( 'path'), 'options': [], 'selected': selected  } )
+            for sub_dir in contents:
+                if sub_dir.get( 'isDirectory', False ):
+                    recurse_directory_dict( url_opener, cur_options[-1]['options'], sub_dir.get( 'url' ) )
+
+    rval = []
+    if trans and trans.user:
+        username = trans.user.preferences.get( 'genomespace_username', None )
+        token = trans.user.preferences.get( 'genomespace_token', None )
+        if None not in ( username, token ):
+            # NB: it is possible, but unlikely for a user to swap GenomeSpace accounts around
+            # in the middle of interacting with tools, so we'll have several layers of caching by ids/values
+            if trans.user in GENOMESPACE_DIRECTORIES_BY_USER:
+                if username in GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ]:
+                    if token in GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ][ username ]:
+                        cache_dict = GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ][ username ][ token ]
+                        if datetime.datetime.now() - cache_dict.get( 'time_loaded' ) > CACHE_TIME:
+                            # cache too old, need to reload, we'll just kill the whole trans.user
+                            del GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ]
+                        else:
+                            rval = cache_dict.get( 'rval' )
+                    else:
+                        del GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ]
+                else:
+                    del GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ]
+            if not rval:
+                url_opener = get_cookie_opener( username, token, gs_toolname=os.environ.get( 'GENOMESPACE_TOOLNAME', None ) )
+                genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+                dm_url = genomespace_site_dict['dmServer']
+                # get export root directory
+                # directory_dict = get_default_directory( url_opener, dm_url ).get( 'directory', None ) #This directory contains shares and other items outside of the users home
+                directory_dict = get_personal_directory( url_opener, dm_url ).get( 'directory', None )  # Limit export list to only user's home dir
+                if directory_dict is not None:
+                    recurse_directory_dict( url_opener, rval, directory_dict.get( 'url' ) )
+                # Save the cache
+                GENOMESPACE_DIRECTORIES_BY_USER[ trans.user ] = { username: { token: { 'time_loaded': datetime.datetime.now(), 'rval': rval } }  }
+    if not rval:
+        if not base_url:
+            base_url = '..'
+        rval = [ { 'name': 'Your GenomeSpace token appears to be <strong>expired</strong>, please <a href="%s">reauthenticate</a>.' % ( urljoin( base_url, 'user/openid_auth?openid_provider=genomespace&auto_associate=True' ) ), 'value': '', 'options': [], 'selected': False  } ]
+    return rval
+
+
+def send_file_to_genomespace( genomespace_site, username, token, source_filename, target_directory, target_filename, file_type, content_type, log_filename, gs_toolname ):
+    target_filename = target_filename.replace( '/', '-' )  # Slashes no longer allowed in filenames
+    url_opener = get_cookie_opener( username, token, gs_toolname=gs_toolname )
+    genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+    dm_url = genomespace_site_dict['dmServer']
+    # get default directory
+    if target_directory and target_directory[0] == '/':
+        directory_dict, target_directory = get_directory( url_opener, dm_url, [ "%s/%s/%s" % ( GENOMESPACE_API_VERSION_STRING, 'file', target_directory[1] ) ] + target_directory[2:] )
+        directory_dict = directory_dict['directory']
+    else:
+        directory_dict = get_personal_directory( url_opener, dm_url )['directory']  # this is the base for the auto-generated galaxy export directories
+    # what directory to stuff this in
+    target_directory_dict = create_directory( url_opener, directory_dict, target_directory, dm_url )
+    content_length = os.path.getsize( source_filename )
+    input_file = open( source_filename, 'rb' )
+    if content_length > TARGET_SIMPLE_PUT_UPLOAD_SIZE:
+        # Determine sizes of each part.
+        split_count = content_length / TARGET_SPLIT_SIZE
+        last_size = content_length - ( split_count * TARGET_SPLIT_SIZE )
+        sizes = [ TARGET_SPLIT_SIZE ] * split_count
+        if last_size:
+            if last_size < MIN_MULTIPART_UPLOAD_SIZE:
+                if sizes:
+                    sizes[-1] = sizes[-1] + last_size
+                else:
+                    sizes = [ last_size ]
+            else:
+                sizes.append( last_size )
+        print("Performing multi-part upload in %i parts." % ( len( sizes ) ))
+        # get upload url
+        upload_url = "uploadinfo"
+        upload_url = "%s/%s/%s%s/%s" % ( dm_url, GENOMESPACE_API_VERSION_STRING, upload_url, target_directory_dict['path'], quote( target_filename, safe='' ) )
+        upload_request = Request( upload_url, headers={ 'Content-Type': 'application/json', 'Accept': 'application/json' } )
+        upload_request.get_method = lambda: 'GET'
+        upload_info = json.loads( url_opener.open( upload_request ).read() )
+        conn = S3Connection( aws_access_key_id=upload_info['amazonCredentials']['accessKey'],
+                             aws_secret_access_key=upload_info['amazonCredentials']['secretKey'],
+                             security_token=upload_info['amazonCredentials']['sessionToken'] )
+        # Cannot use conn.get_bucket due to permissions, manually create bucket object
+        bucket = boto.s3.bucket.Bucket( connection=conn, name=upload_info['s3BucketName'] )
+        mp = bucket.initiate_multipart_upload( upload_info['s3ObjectKey'] )
+        for i, part_size in enumerate( sizes, start=1 ):
+            fh = tempfile.TemporaryFile( 'wb+' )
+            while part_size:
+                if CHUNK_SIZE > part_size:
+                    read_size = part_size
+                else:
+                    read_size = CHUNK_SIZE
+                chunk = input_file.read( read_size )
+                fh.write( chunk )
+                part_size = part_size - read_size
+            fh.flush()
+            fh.seek(0)
+            mp.upload_part_from_file( fh, i )
+            fh.close()
+        upload_result = mp.complete_upload()
+    else:
+        print('Performing simple put upload.')
+        upload_url = "uploadurl"
+        content_md5 = hashlib.md5()
+        chunk_write( input_file, content_md5, target_method="update" )
+        input_file.seek( 0 )  # back to start, for uploading
+
+        upload_params = { 'Content-Length': content_length, 'Content-MD5': base64.standard_b64encode( content_md5.digest() ), 'Content-Type': content_type }
+        upload_url = "%s/%s/%s%s/%s?%s" % ( dm_url, GENOMESPACE_API_VERSION_STRING, upload_url, target_directory_dict['path'], quote( target_filename, safe='' ), urlencode( upload_params ) )
+        new_file_request = Request( upload_url )  # , headers = { 'Content-Type': 'application/json', 'Accept': 'application/text' } ) #apparently http://www.genomespace.org/team/specs/updated-dm-rest-api:"Every HTTP request to the Data Manager should include the Accept header with a preference for the media types application/json and application/text." is not correct
+        new_file_request.get_method = lambda: 'GET'
+        # get url to upload to
+        target_upload_url = url_opener.open( new_file_request ).read()
+        # upload file to determined url
+        upload_headers = dict( upload_params )
+        # upload_headers[ 'x-amz-meta-md5-hash' ] = content_md5.hexdigest()
+        upload_headers[ 'Accept' ] = 'application/json'
+        upload_file_request = Request( target_upload_url, headers=upload_headers, data=input_file )
+        upload_file_request.get_method = lambda: 'PUT'
+        upload_result = urlopen( upload_file_request ).read()
+    result_url = "%s/%s" % ( target_directory_dict['url'], quote( target_filename, safe='' ) )
+    # determine available gs launch apps
+    web_tools = get_genome_space_launch_apps( genomespace_site_dict['atmServer'], url_opener, result_url, file_type )
+    if log_filename:
+        log_file = open( log_filename, 'wb' )
+        log_file.write( "<html><head><title>File uploaded to GenomeSpace from Galaxy</title></head><body>\n" )
+        log_file.write( '<p>Uploaded <a href="%s">%s/%s</a> to GenomeSpace.</p>\n' % ( result_url, target_directory_dict['path'], target_filename ) )
+        if web_tools:
+            log_file.write( "<p>You may open this file directly in the following applications:</p>\n" )
+            log_file.write( '<p><ul>\n' )
+            for web_tool in web_tools:
+                log_file.write( '<li><a href="%s">%s</a></li>\n' % ( web_tool ) )
+            log_file.write( '</p></ul>\n' )
+        else:
+            log_file.write( '<p>There are no GenomeSpace applications available for file type: %s</p>\n' % ( file_type ) )
+        log_file.write( "</body></html>\n" )
+    return upload_result
+
+
+if __name__ == '__main__':
+    # Parse Command Line
+    parser = optparse.OptionParser()
+    parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
+    parser.add_option( '-t', '--token', dest='token', action='store', type="string", default=None, help='token' )
+    parser.add_option( '-u', '--username', dest='username', action='store', type="string", default=None, help='username' )
+    parser.add_option( '-d', '--dataset', dest='dataset', action='store', type="string", default=None, help='dataset' )
+    parser.add_option( '-f', '--filename', dest='filename', action='store', type="string", default=None, help='filename' )
+    parser.add_option( '-y', '--subdirectory', dest='subdirectory', action='append', type="string", default=None, help='subdirectory' )
+    parser.add_option( '', '--file_type', dest='file_type', action='store', type="string", default=None, help='file_type' )
+    parser.add_option( '-c', '--content_type', dest='content_type', action='store', type="string", default=None, help='content_type' )
+    parser.add_option( '-l', '--log', dest='log', action='store', type="string", default=None, help='log' )
+    parser.add_option( '', '--genomespace_toolname', dest='genomespace_toolname', action='store', type="string", default=DEFAULT_GENOMESPACE_TOOLNAME, help='value to use for gs-toolname, used in GenomeSpace internal logging' )
+
+    (options, args) = parser.parse_args()
+
+    send_file_to_genomespace( options.genomespace_site, options.username, options.token, options.dataset, [binascii.unhexlify(_) for _ in options.subdirectory], binascii.unhexlify( options.filename ), options.file_type, options.content_type, options.log, options.genomespace_toolname )
diff --git a/tools/genomespace/genomespace_exporter.xml b/tools/genomespace/genomespace_exporter.xml
new file mode 100644
index 0000000..441961d
--- /dev/null
+++ b/tools/genomespace/genomespace_exporter.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace Exporter" id="genomespace_exporter" require_login="True" version="0.0.4">
+    <description> - send data to GenomeSpace</description>
+    <command interpreter="python">genomespace_exporter.py 
+        --genomespace_site "prod"
+        #assert $__user__, Exception( 'You must be logged in to use this tool.' )
+        #set $username = $__user__.preferences.get( 'genomespace_username', None )
+        #set $token = $__user__.preferences.get( 'genomespace_token', None )
+        #assert None not in ( $username, $token ), Exception( 'You must associate a GenomeSpace OpenID with your account and log in with it.' )
+        #import binascii
+        --username "${username}"
+        --token "${token}"
+        --dataset "${input1}"
+        #if $subdirectory:
+            #for $subd in str( $subdirectory ).split( '/' ):
+                #if not $subd:
+                    --subdirectory "${ binascii.hexlify( '/' ) }"
+                #else:
+                    --subdirectory "${ binascii.hexlify( $subd ) }"
+                #end if
+            #end for
+        #else:
+            --subdirectory "${ binascii.hexlify( 'galaxy_export' ) }"
+            --subdirectory "${ binascii.hexlify( str( $base_url ).split( '://', 1 )[-1] ) }" ##Protocol removed by request
+        #end if
+        #if $filename:
+            --filename "${ binascii.hexlify( str( $filename ) ) }"
+        #else:
+            --filename "${ binascii.hexlify( "Galaxy History Item %s (%s) - %s: %s.%s" % ( $__app__.security.encode_id( $input1.id ), $__app__.security.encode_id( $output_log.id ), $input1.hid, $input1.name, $input1.ext ) ) }"
+        #end if
+        --file_type "${input1.ext}"
+        --content_type "${input1.get_mime()}"
+        --log "${output_log}"
+        --genomespace_toolname="\${GENOMESPACE_TOOLNAME:-Galaxy}"
+    </command>
+    <inputs>
+        <param format="data" name="input1" type="data" label="Send this dataset to GenomeSpace" />
+        <param name="base_url" type="baseurl" />
+        <param name="subdirectory" type="drill_down" display="radio" hierarchy="exact" multiple="False" optional="True" label="Choose Target Directory" dynamic_options="galaxy_code_get_genomespace_folders( genomespace_site = 'prod', trans=__trans__, value=__value__, input_dataset=input1, base_url=base_url )" help="Leave blank to generate automatically"/>
+        <param name="filename" type="text" size="80" label="Filename" help="Leave blank to generate automatically" />
+    </inputs>
+    <outputs>
+        <data format="html" name="output_log" />
+    </outputs>
+    <help>
+This Tool allows you to export data to GenomeSpace. You must have logged in using your GenomeSpace OpenID. You can associate your OpenID credentials under the User Preferences panel.
+
+If you are having trouble with this tool, click here_ to refresh your GenomeSpace token before reporting errors.
+
+.. _here:  ${static_path}/../user/openid_auth?openid_provider=genomespace&auto_associate=True
+    </help>
+    <options refresh="True"/>
+    <code file="genomespace_exporter.py" />
+</tool>
diff --git a/tools/genomespace/genomespace_file_browser.py b/tools/genomespace/genomespace_file_browser.py
new file mode 100644
index 0000000..07559fe
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser.py
@@ -0,0 +1,217 @@
+# Dan Blankenberg
+import json
+import optparse
+import os
+
+from six.moves import http_cookiejar
+from six.moves.urllib.parse import unquote_plus, urlencode, urlparse
+from six.moves.urllib.request import build_opener, HTTPCookieProcessor, Request, urlopen
+
+from galaxy.datatypes import sniff
+from galaxy.datatypes.registry import Registry
+
+GENOMESPACE_API_VERSION_STRING = "v1.0"
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
+DEFAULT_GENOMESPACE_TOOLNAME = 'Galaxy'
+FILENAME_VALID_CHARS = '.-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ '
+
+CHUNK_SIZE = 2**20  # 1mb
+
+AUTO_GALAXY_EXT = "auto"
+DEFAULT_GALAXY_EXT = "data"
+
+# genomespace format identifier is the URL
+GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT = {}  # TODO: fix this so it is not a global variable
+# TODO: we should use a better way to set up this mapping
+GENOMESPACE_EXT_TO_GALAXY_EXT = {'rifles': 'rifles',
+                                 'lifes': 'lifes',
+                                 'cn': 'cn',
+                                 'GTF': 'gtf',
+                                 'res': 'res',
+                                 'xcn': 'xcn',
+                                 'lowercasetxt': 'lowercasetxt',
+                                 'bed': 'bed',
+                                 'CBS': 'cbs',
+                                 'genomicatab': 'genomicatab',
+                                 'gxp': 'gxp',
+                                 'reversedtxt': 'reversedtxt',
+                                 'nowhitespace': 'nowhitespace',
+                                 'unknown': 'unknown',
+                                 'txt': 'txt',
+                                 'uppercasetxt': 'uppercasetxt',
+                                 'GISTIC': 'gistic',
+                                 'GFF': 'gff',
+                                 'gmt': 'gmt',
+                                 'gct': 'gct'}
+
+GENOMESPACE_UNKNOWN_FORMAT_KEY = 'unknown'
+GENOMESPACE_FORMAT_IDENTIFIER_UNKNOWN = None
+
+
+def chunk_write( source_stream, target_stream, source_method="read", target_method="write" ):
+    source_method = getattr( source_stream, source_method )
+    target_method = getattr( target_stream, target_method )
+    while True:
+        chunk = source_method( CHUNK_SIZE )
+        if chunk:
+            target_method( chunk )
+        else:
+            break
+
+
+def get_cookie_opener( gs_username, gs_token, gs_toolname=None ):
+    """ Create a GenomeSpace cookie opener """
+    cj = http_cookiejar.CookieJar()
+    for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
+        # create a super-cookie, valid for all domains
+        cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
+        cj.set_cookie( cookie )
+    cookie_opener = build_opener( HTTPCookieProcessor( cj ) )
+    cookie_opener.addheaders.append( ( 'gs-toolname', gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME ) )
+    return cookie_opener
+
+
+def get_galaxy_ext_from_genomespace_format_url( url_opener, file_format_url ):
+    ext = GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT.get( file_format_url, None )
+    if ext is not None:
+        ext = GENOMESPACE_EXT_TO_GALAXY_EXT.get( ext, None )
+    if ext is None:
+        # could check content type, etc here
+        ext = AUTO_GALAXY_EXT
+    return ext
+
+
+def get_genomespace_site_urls():
+    genomespace_sites = {}
+    for line in urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
+        line = line.rstrip()
+        if not line or line.startswith( "#" ):
+            continue
+        server, line = line.split( '.', 1 )
+        if server not in genomespace_sites:
+            genomespace_sites[server] = {}
+        line = line.split( "=", 1 )
+        genomespace_sites[server][line[0]] = line[1]
+    return genomespace_sites
+
+
+def set_genomespace_format_identifiers( url_opener, dm_site ):
+    gs_request = Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
+    gs_request.get_method = lambda: 'GET'
+    opened_gs_request = url_opener.open( gs_request )
+    genomespace_formats = json.loads( opened_gs_request.read() )
+    for format in genomespace_formats:
+        GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
+    global GENOMESPACE_FORMAT_IDENTIFIER_UNKNOWN
+    GENOMESPACE_FORMAT_IDENTIFIER_UNKNOWN = dict( ( x[1], x[0] ) for x in GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT.items() ).get( GENOMESPACE_UNKNOWN_FORMAT_KEY, GENOMESPACE_FORMAT_IDENTIFIER_UNKNOWN )
+
+
+def download_from_genomespace_file_browser( json_parameter_file, genomespace_site, gs_toolname ):
+    json_params = json.loads( open( json_parameter_file, 'r' ).read() )
+    datasource_params = json_params.get( 'param_dict' )
+    username = datasource_params.get( "gs-username", None )
+    token = datasource_params.get( "gs-token", None )
+    assert None not in [ username, token ], "Missing GenomeSpace username or token."
+    output_filename = datasource_params.get( "output", None )
+    dataset_id = json_params['output_data'][0]['dataset_id']
+    hda_id = json_params['output_data'][0]['hda_id']
+    url_opener = get_cookie_opener( username, token, gs_toolname=gs_toolname )
+    # load and set genomespace format ids to galaxy exts
+    genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+    set_genomespace_format_identifiers( url_opener, genomespace_site_dict['dmServer'] )
+
+    file_url_prefix = "fileUrl"
+    file_type_prefix = "fileFormat"
+    metadata_parameter_file = open( json_params['job_config']['TOOL_PROVIDED_JOB_METADATA_FILE'], 'wb' )
+
+    # setup datatypes registry for sniffing
+    datatypes_registry = Registry()
+    datatypes_registry.load_datatypes( root_dir=json_params[ 'job_config' ][ 'GALAXY_ROOT_DIR' ], config=json_params[ 'job_config' ][ 'GALAXY_DATATYPES_CONF_FILE' ] )
+
+    file_numbers = []
+    for name in datasource_params.keys():
+        if name.startswith( file_url_prefix ):
+            name = name[len( file_url_prefix ):]
+            file_numbers.append( int( name ) )
+    if not file_numbers:
+        if output_filename:
+            open( output_filename, 'wb' )  # erase contents of file
+        raise Exception( "You must select at least one file to import into Galaxy." )
+    file_numbers.sort()
+    used_filenames = []
+    for file_num in file_numbers:
+        url_key = "%s%i" % ( file_url_prefix, file_num )
+        download_url = datasource_params.get( url_key, None )
+        if download_url is None:
+            break
+        filetype_key = "%s%i" % ( file_type_prefix, file_num )
+        filetype_url = datasource_params.get( filetype_key, None )
+        galaxy_ext = get_galaxy_ext_from_genomespace_format_url( url_opener, filetype_url )
+        formatted_download_url = "%s?%s" % ( download_url, urlencode( [ ( 'dataformat', filetype_url ) ] ) )
+        new_file_request = Request( formatted_download_url )
+        new_file_request.get_method = lambda: 'GET'
+        target_download_url = url_opener.open( new_file_request )
+        filename = None
+        if 'Content-Disposition' in target_download_url.info():
+            # If the response has Content-Disposition, try to get filename from it
+            content_disposition = dict( x.strip().split('=') if '=' in x else ( x.strip(), '' ) for x in target_download_url.info()['Content-Disposition'].split( ';' ) )
+            if 'filename' in content_disposition:
+                filename = content_disposition[ 'filename' ].strip( "\"'" )
+        if not filename:
+            parsed_url = urlparse( download_url )
+            filename = unquote_plus( parsed_url[2].split( '/' )[-1] )
+        if not filename:
+            filename = download_url
+        metadata_dict = None
+        original_filename = filename
+        if output_filename is None:
+            filename = ''.join( c in FILENAME_VALID_CHARS and c or '-' for c in filename )
+            while filename in used_filenames:
+                filename = "-%s" % filename
+            used_filenames.append( filename )
+            output_filename = os.path.join( os.getcwd(), 'primary_%i_%s_visible_%s' % ( hda_id, filename, galaxy_ext ) )
+
+            metadata_dict = dict( type='new_primary_dataset',
+                                  base_dataset_id=dataset_id,
+                                  ext=galaxy_ext,
+                                  filename=output_filename,
+                                  name="GenomeSpace import on %s" % ( original_filename ) )
+        else:
+            if dataset_id is not None:
+                metadata_dict = dict( type='dataset',
+                                      dataset_id=dataset_id,
+                                      ext=galaxy_ext,
+                                      name="GenomeSpace import on %s" % ( filename ) )
+        output_file = open( output_filename, 'wb' )
+        chunk_write( target_download_url, output_file )
+        output_file.close()
+
+        if ( galaxy_ext == AUTO_GALAXY_EXT or filetype_url == GENOMESPACE_FORMAT_IDENTIFIER_UNKNOWN ) and metadata_dict:
+            # try to sniff datatype
+            try:
+                galaxy_ext = sniff.handle_uploaded_dataset_file( output_filename, datatypes_registry )
+            except:
+                # sniff failed
+                galaxy_ext = original_filename.rsplit( '.', 1 )[-1]
+                if galaxy_ext not in datatypes_registry.datatypes_by_extension:
+                    galaxy_ext = DEFAULT_GALAXY_EXT
+            metadata_dict[ 'ext' ] = galaxy_ext
+
+        output_filename = None  # only have one filename available
+
+        # write out metadata info
+        if metadata_dict:
+            metadata_parameter_file.write( "%s\n" % json.dumps( metadata_dict ) )
+
+    metadata_parameter_file.close()
+    return True
+
+
+if __name__ == '__main__':
+    parser = optparse.OptionParser()
+    parser.add_option( '-p', '--json_parameter_file', dest='json_parameter_file', action='store', type="string", default=None, help='json_parameter_file' )
+    parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
+    parser.add_option( '', '--genomespace_toolname', dest='genomespace_toolname', action='store', type="string", default=DEFAULT_GENOMESPACE_TOOLNAME, help='value to use for gs-toolname, used in GenomeSpace internal logging' )
+    (options, args) = parser.parse_args()
+
+    download_from_genomespace_file_browser( options.json_parameter_file, options.genomespace_site, options.genomespace_toolname )
diff --git a/tools/genomespace/genomespace_file_browser_dev.xml b/tools/genomespace/genomespace_file_browser_dev.xml
new file mode 100644
index 0000000..700576c
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser_dev.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace import" id="genomespace_file_browser_dev" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
+    <description>from file browser (development)</description>
+    <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "dev" --genomespace_toolname="\${GENOMESPACE_TOOLNAME:-Galaxy}"</command>
+    <inputs action="https://dmdev.genomespace.org:8444/datamanager/defaultdirectory" check_values="False" method="post"> 
+        <display>go to GenomeSpace Data Manager </display>
+        <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_dev&runtool_btn=Execute" />
+        <param name="appName" type="hidden" value="Galaxy" />
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="auto" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/genomespace/genomespace_file_browser_prod.xml b/tools/genomespace/genomespace_file_browser_prod.xml
new file mode 100644
index 0000000..54ab784
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser_prod.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace import" id="genomespace_file_browser_prod" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
+    <description>from file browser</description>
+    <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "prod" --genomespace_toolname="\${GENOMESPACE_TOOLNAME:-Galaxy}"</command>
+    <inputs action="https://dm.genomespace.org/datamanager/defaultdirectory" check_values="False" method="post"> 
+        <display>go to GenomeSpace Data Manager </display>
+        <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_prod&runtool_btn=Execute" />
+        <param name="appName" type="hidden" value="Galaxy" />
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="auto" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/genomespace/genomespace_file_browser_test.xml b/tools/genomespace/genomespace_file_browser_test.xml
new file mode 100644
index 0000000..5e48321
--- /dev/null
+++ b/tools/genomespace/genomespace_file_browser_test.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace import" id="genomespace_file_browser_test" tool_type="data_source" add_galaxy_url="False" force_history_refresh="True" version="0.0.1">
+    <description>from file browser (test)</description>
+    <command interpreter="python">genomespace_file_browser.py --json_parameter_file "${output}" --genomespace_site "test" --genomespace_toolname="\${GENOMESPACE_TOOLNAME:-Galaxy}"</command>
+    <inputs action="https://dmtest.genomespace.org:8444/datamanager/defaultdirectory" check_values="False" method="post"> 
+        <display>go to GenomeSpace Data Manager </display>
+        <param name="appCallbackUrl" type="baseurl" value="/tool_runner?tool_id=genomespace_file_browser_test&runtool_btn=Execute" />
+        <param name="appName" type="hidden" value="Galaxy" />
+    </inputs>
+    <uihints minwidth="800"/>
+    <outputs>
+        <data name="output" format="auto" />
+    </outputs>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/genomespace/genomespace_importer.py b/tools/genomespace/genomespace_importer.py
new file mode 100644
index 0000000..c06f00a
--- /dev/null
+++ b/tools/genomespace/genomespace_importer.py
@@ -0,0 +1,220 @@
+# Dan Blankenberg
+
+import json
+import optparse
+import os
+import shutil
+import tempfile
+
+from six.moves import http_cookiejar
+from six.moves.urllib.parse import parse_qs, unquote_plus, urlparse
+from six.moves.urllib.request import build_opener, HTTPCookieProcessor, Request, urlopen
+
+from galaxy.datatypes import sniff
+from galaxy.datatypes.registry import Registry
+
+GENOMESPACE_API_VERSION_STRING = "v1.0"
+GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING )
+DEFAULT_GENOMESPACE_TOOLNAME = 'Galaxy'
+FILENAME_VALID_CHARS = '.-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ '
+
+CHUNK_SIZE = 2**20  # 1mb
+
+DEFAULT_GALAXY_EXT = "data"
+
+# genomespace format identifier is the URL
+GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT = {}  # TODO: fix this so it is not a global variable
+# TODO: we should use a better way to set up this mapping
+GENOMESPACE_EXT_TO_GALAXY_EXT = {'rifles': 'rifles',
+                                 'lifes': 'lifes',
+                                 'cn': 'cn',
+                                 'GTF': 'gtf',
+                                 'res': 'res',
+                                 'xcn': 'xcn',
+                                 'lowercasetxt': 'lowercasetxt',
+                                 'bed': 'bed',
+                                 'CBS': 'cbs',
+                                 'genomicatab': 'genomicatab',
+                                 'gxp': 'gxp',
+                                 'reversedtxt': 'reversedtxt',
+                                 'nowhitespace': 'nowhitespace',
+                                 'unknown': 'unknown',
+                                 'txt': 'txt',
+                                 'uppercasetxt': 'uppercasetxt',
+                                 'GISTIC': 'gistic',
+                                 'GFF': 'gff',
+                                 'gmt': 'gmt',
+                                 'gct': 'gct'}
+
+
+def chunk_write( source_stream, target_stream, source_method="read", target_method="write" ):
+    source_method = getattr( source_stream, source_method )
+    target_method = getattr( target_stream, target_method )
+    while True:
+        chunk = source_method( CHUNK_SIZE )
+        if chunk:
+            target_method( chunk )
+        else:
+            break
+
+
+def get_cookie_opener( gs_username, gs_token, gs_toolname=None ):
+    """ Create a GenomeSpace cookie opener """
+    cj = http_cookiejar.CookieJar()
+    for cookie_name, cookie_value in [ ( 'gs-token', gs_token ), ( 'gs-username', gs_username ) ]:
+        # create a super-cookie, valid for all domains
+        cookie = http_cookiejar.Cookie(version=0, name=cookie_name, value=cookie_value, port=None, port_specified=False, domain='', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False )
+        cj.set_cookie( cookie )
+    cookie_opener = build_opener( HTTPCookieProcessor( cj ) )
+    cookie_opener.addheaders.append( ( 'gs-toolname', gs_toolname or DEFAULT_GENOMESPACE_TOOLNAME ) )
+    return cookie_opener
+
+
+def get_galaxy_ext_from_genomespace_format_url( url_opener, file_format_url, default=DEFAULT_GALAXY_EXT ):
+    ext = GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT.get( file_format_url, None )
+    if ext is not None:
+        ext = GENOMESPACE_EXT_TO_GALAXY_EXT.get( ext, None )
+    if ext is None:
+        # could check content type, etc here
+        ext = default
+    return ext
+
+
+def get_genomespace_site_urls():
+    genomespace_sites = {}
+    for line in urlopen( GENOMESPACE_SERVER_URL_PROPERTIES ).read().split( '\n' ):
+        line = line.rstrip()
+        if not line or line.startswith( "#" ):
+            continue
+        server, line = line.split( '.', 1 )
+        if server not in genomespace_sites:
+            genomespace_sites[server] = {}
+        line = line.split( "=", 1 )
+        genomespace_sites[server][line[0]] = line[1]
+    return genomespace_sites
+
+
+def set_genomespace_format_identifiers( url_opener, dm_site ):
+    gs_request = Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) )
+    gs_request.get_method = lambda: 'GET'
+    opened_gs_request = url_opener.open( gs_request )
+    genomespace_formats = json.loads( opened_gs_request.read() )
+    for format in genomespace_formats:
+        GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name']
+
+
+def download_from_genomespace_importer( username, token, json_parameter_file, genomespace_site, gs_toolname ):
+    json_params = json.loads( open( json_parameter_file, 'r' ).read() )
+    datasource_params = json_params.get( 'param_dict' )
+    assert None not in [ username, token ], "Missing GenomeSpace username or token."
+    output_filename = datasource_params.get( "output_file1", None )
+    dataset_id = base_dataset_id = json_params['output_data'][0]['dataset_id']
+    hda_id = json_params['output_data'][0]['hda_id']
+    url_opener = get_cookie_opener( username, token, gs_toolname=gs_toolname )
+    # load and set genomespace format ids to galaxy exts
+    genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
+    set_genomespace_format_identifiers( url_opener, genomespace_site_dict['dmServer'] )
+    file_url_name = "URL"
+    metadata_parameter_file = open( json_params['job_config']['TOOL_PROVIDED_JOB_METADATA_FILE'], 'wb' )
+    # setup datatypes registry for sniffing
+    datatypes_registry = Registry()
+    datatypes_registry.load_datatypes( root_dir=json_params[ 'job_config' ][ 'GALAXY_ROOT_DIR' ], config=json_params[ 'job_config' ][ 'GALAXY_DATATYPES_CONF_FILE' ] )
+    url_param = datasource_params.get( file_url_name, None )
+    used_filenames = []
+    for download_url in url_param.split( ',' ):
+        using_temp_file = False
+        parsed_url = urlparse( download_url )
+        query_params = parse_qs( parsed_url[4] )
+        # write file to disk
+        new_file_request = Request( download_url )
+        new_file_request.get_method = lambda: 'GET'
+        target_download_url = url_opener.open( new_file_request )
+        filename = None
+        if 'Content-Disposition' in target_download_url.info():
+            content_disposition = dict( x.strip().split('=') if '=' in x else ( x.strip(), '' ) for x in target_download_url.info()['Content-Disposition'].split( ';' ) )
+            if 'filename' in content_disposition:
+                filename = content_disposition[ 'filename' ].strip( "\"'" )
+        if not filename:
+            parsed_url = urlparse( download_url )
+            query_params = parse_qs( parsed_url[4] )
+            filename = unquote_plus( parsed_url[2].split( '/' )[-1] )
+        if not filename:
+            filename = download_url
+        if output_filename is None:
+            # need to use a temp file here, because we do not know the ext yet
+            using_temp_file = True
+            output_filename = tempfile.NamedTemporaryFile( prefix='tmp-genomespace-importer-' ).name
+        output_file = open( output_filename, 'wb' )
+        chunk_write( target_download_url, output_file )
+        output_file.close()
+
+        # determine file format
+        file_type = None
+        if 'dataformat' in query_params:  # this is a converted dataset
+            file_type = query_params[ 'dataformat' ][0]
+            file_type = get_galaxy_ext_from_genomespace_format_url( url_opener, file_type )
+        else:
+            try:
+                # get and use GSMetadata object
+                download_file_path = download_url.split( "%s/file/" % ( genomespace_site_dict['dmServer'] ), 1)[-1]  # FIXME: This is a very bad way to get the path for determining metadata. There needs to be a way to query API using download URLto get to the metadata object
+                metadata_request = Request( "%s/%s/filemetadata/%s" % ( genomespace_site_dict['dmServer'], GENOMESPACE_API_VERSION_STRING, download_file_path ) )
+                metadata_request.get_method = lambda: 'GET'
+                metadata_url = url_opener.open( metadata_request )
+                file_metadata_dict = json.loads( metadata_url.read() )
+                metadata_url.close()
+                file_type = file_metadata_dict.get( 'dataFormat', None )
+                if file_type and file_type.get( 'url' ):
+                    file_type = file_type.get( 'url' )
+                    file_type = get_galaxy_ext_from_genomespace_format_url( url_opener, file_type, default=None )
+            except:
+                pass
+        if file_type is None:
+            # try to sniff datatype
+            try:
+                file_type = sniff.handle_uploaded_dataset_file( output_filename, datatypes_registry )
+            except:
+                pass  # sniff failed
+        if file_type is None and '.' in parsed_url[2]:
+            # still no known datatype, fall back to using extension
+            file_type = parsed_url[2].rsplit( '.', 1 )[-1]
+            file_type = GENOMESPACE_EXT_TO_GALAXY_EXT.get( file_type, file_type )
+        if file_type is None:
+            # use default extension (e.g. 'data')
+            file_type = DEFAULT_GALAXY_EXT
+
+        # save json info for single primary dataset
+        if dataset_id is not None:
+            metadata_parameter_file.write( "%s\n" % json.dumps( dict( type='dataset',
+                                                                      dataset_id=dataset_id,
+                                                                      ext=file_type,
+                                                                      name="GenomeSpace importer on %s" % ( filename ) ) ) )
+        # if using tmp file, move the file to the new file path dir to get scooped up later
+        if using_temp_file:
+            original_filename = filename
+            filename = ''.join( c in FILENAME_VALID_CHARS and c or '-' for c in filename )
+            while filename in used_filenames:
+                filename = "-%s" % filename
+            used_filenames.append( filename )
+            target_output_filename = os.path.join( os.getcwd(), 'primary_%i_%s_visible_%s' % ( hda_id, filename, file_type ) )
+            shutil.move( output_filename, target_output_filename )
+            metadata_parameter_file.write( "%s\n" % json.dumps( dict( type='new_primary_dataset',
+                                                                      base_dataset_id=base_dataset_id,
+                                                                      ext=file_type,
+                                                                      filename=target_output_filename,
+                                                                      name="GenomeSpace importer on %s" % ( original_filename ) ) ) )
+        dataset_id = None  # only one primary dataset available
+        output_filename = None  # only have one filename available
+    metadata_parameter_file.close()
+    return True
+
+
+if __name__ == '__main__':
+    parser = optparse.OptionParser()
+    parser.add_option( '-p', '--json_parameter_file', dest='json_parameter_file', action='store', type="string", default=None, help='json_parameter_file' )
+    parser.add_option( '-s', '--genomespace_site', dest='genomespace_site', action='store', type="string", default=None, help='genomespace_site' )
+    parser.add_option( '-t', '--token', dest='token', action='store', type="string", default=None, help='token' )
+    parser.add_option( '-u', '--username', dest='username', action='store', type="string", default=None, help='username' )
+    parser.add_option( '', '--genomespace_toolname', dest='genomespace_toolname', action='store', type="string", default=DEFAULT_GENOMESPACE_TOOLNAME, help='value to use for gs-toolname, used in GenomeSpace internal logging' )
+    (options, args) = parser.parse_args()
+
+    download_from_genomespace_importer( options.username, options.token, options.json_parameter_file, options.genomespace_site, options.genomespace_toolname )
diff --git a/tools/genomespace/genomespace_importer.xml b/tools/genomespace/genomespace_importer.xml
new file mode 100644
index 0000000..ab77dd7
--- /dev/null
+++ b/tools/genomespace/genomespace_importer.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<tool name="GenomeSpace Importer" id="genomespace_importer" tool_type="data_source" force_history_refresh="True" hidden="True" display_interface="False" require_login="True" version="0.0.2">
+    <description> - receive data from GenomeSpace</description>
+    <command interpreter="python">genomespace_importer.py
+        --genomespace_site "prod"
+        #assert $__user__, Exception( 'You must be logged in to use this tool.' )
+        #set $username = $__user__.preferences.get( 'genomespace_username', None )
+        #set $token = $__user__.preferences.get( 'genomespace_token', None )
+        #assert None not in ( $username, $token ), Exception( 'You must associate a GenomeSpace OpenID with your account and log in with it.' )
+        --username "${username}"
+        --token "${token}"
+        --json_parameter_file "${output_file1}"
+        --genomespace_toolname="\${GENOMESPACE_TOOLNAME:-Galaxy}"
+    </command>
+    <inputs check_values="False">
+        <!-- <param name="file_name" type="text" value="" /> -->
+        <param name="URL" type="hidden" value="" />
+    </inputs>
+    <outputs>
+        <data format="auto" name="output_file1" />
+    </outputs>
+    <help>
+       some help text here...
+    </help>
+    <options sanitize="False" refresh="True"/>
+</tool>
diff --git a/tools/maf/genebed_maf_to_fasta.xml b/tools/maf/genebed_maf_to_fasta.xml
new file mode 100644
index 0000000..42c0473
--- /dev/null
+++ b/tools/maf/genebed_maf_to_fasta.xml
@@ -0,0 +1,95 @@
+<tool id="GeneBed_Maf_Fasta2" name="Stitch Gene blocks" version="1.0.1">
+  <description>given a set of coding exon intervals</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">
+    #if $maf_source_type.maf_source == "user" #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_file --mafIndex=$maf_source_type.maf_file.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --mafSourceType=$maf_source_type.maf_source --geneBED --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+    #else                                     #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_identifier --interval_file=$input1 --output_file=$out_file1 --mafSourceType=$maf_source_type.maf_source  --geneBED --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+    #end if# --overwrite_with_gaps=$overwrite_with_gaps
+  </command>
+  <inputs>
+    <param name="input1" type="data" format="bed" label="Gene BED File">
+      <validator type="unspecified_build" />
+      <validator type="expression" message="Input must be in BED12 format.">value.metadata.columns >= 12</validator> <!-- allow 12+ columns, not as strict as possible. TODO: only list bed files with 12+ columns -->
+    </param>
+    <conditional name="maf_source_type">
+      <param name="maf_source" type="select" label="MAF Source">
+        <option value="cached" selected="true">Locally Cached Alignments</option>
+        <option value="user">Alignments in Your History</option>
+      </param>
+      <when value="user">
+        <param name="maf_file" type="data" format="maf" label="MAF File">
+          <validator type="dataset_ok_validator" />
+          <options>
+            <filter type="data_meta" ref="input1" key="dbkey" />
+          </options>
+        </param>
+        <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+          <options>
+            <filter type="data_meta" ref="maf_file" key="species" />
+          </options>
+        </param>
+      </when>
+      <when value="cached">
+        <param name="maf_identifier" type="select" label="MAF Type" >
+          <options from_file="maf_index.loc">
+            <column name="name" index="0"/>
+            <column name="value" index="1"/>
+            <column name="dbkey" index="2"/>
+            <column name="species" index="3"/>
+            <filter type="data_meta" ref="input1" key="dbkey" column="2" multiple="True" separator=","/>
+            <validator type="no_options" message="No alignments are available for the build associated with the selected interval file"/>
+          </options>
+        </param>
+        <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+          <options from_file="maf_index.loc">
+            <column name="uid" index="1"/>
+            <column name="value" index="3"/>
+            <column name="name" index="3"/>
+            <filter type="param_value" ref="maf_identifier" name="uid" column="1"/>
+            <filter type="multiple_splitter" column="3" separator=","/>
+          </options>
+        </param>
+      </when>
+    </conditional>
+    <param name="overwrite_with_gaps" type="select" label="Split into Gapless MAF blocks" help="When set to Yes, blocks are divided around gaps appearing in any species. This will prevent gaps occurring in the interior of the sequence for an aligning species from overwriting a nucleotide found for the same position in a lower-scoring block.">
+      <option value="True" selected="true">No</option>
+      <option value="False">Yes</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="fasta" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="8.bed"/>
+      <param name="maf_source" value="cached"/>in aligning species
+      <param name="maf_identifier" value="8_WAY_MULTIZ_hg17"/>
+      <param name="species" value="canFam1,hg17,mm5,panTro1,rn3"/>
+      <param name="overwrite_with_gaps" value="True"/>
+      <output name="out_file1" file="gene_bed_maf_to_fasta_out.fasta" />
+    </test>
+    <test>
+      <param name="input1" value="8.bed"/>
+      <param name="maf_source" value="user"/>
+      <param name="maf_file" value="4.maf"/>
+      <param name="species" value="hg17,panTro1"/>
+      <param name="overwrite_with_gaps" value="True"/>
+      <output name="out_file1" file="gene_bed_maf_to_fasta_user_out.fasta" />
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+The coding sequence of genes are usually composed of several coding exons. Each of these coding exons is an individual genomic region, which when concatenated with each other constitutes the coding sequence. A single genomic region can be covered by multiple alignment blocks. In many cases it is desirable to stitch these alignment blocks together. This tool accepts a list of gene-based intervals, in the Gene BED format. For every interval it performs the following:
+
+  * finds all MAF blocks that overlap the coding regions;
+  * sorts MAF blocks by alignment score;
+  * stitches blocks together and resolves overlaps based on alignment score;
+  * outputs alignments in FASTA format.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/interval2maf.py b/tools/maf/interval2maf.py
new file mode 100755
index 0000000..73cf6c8
--- /dev/null
+++ b/tools/maf/interval2maf.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+"""
+Reads a list of intervals and a maf. Produces a new maf containing the
+blocks or parts of blocks in the original that overlapped the intervals.
+
+If a MAF file, not UID, is provided the MAF file is indexed before being processed.
+
+NOTE: If two intervals overlap the same block it will be written twice.
+
+usage: %prog maf_file [options]
+   -d, --dbkey=d: Database key, ie hg17
+   -c, --chromCol=c: Column of Chr
+   -s, --startCol=s: Column of Start
+   -e, --endCol=e: Column of End
+   -S, --strandCol=S: Column of Strand
+   -t, --mafType=t: Type of MAF source to use
+   -m, --mafFile=m: Path of source MAF file, if not using cached version
+   -I, --mafIndex=I: Path of precomputed source MAF file index, if not using cached version
+   -i, --interval_file=i:       Input interval file
+   -o, --output_file=o:      Output MAF file
+   -p, --species=p: Species to include in output
+   -P, --split_blocks_by_species=P: Split blocks by species
+   -r, --remove_all_gap_columns=r: Remove all Gap columns
+   -l, --indexLocation=l: Override default maf_index.loc file
+   -z, --mafIndexFile=z: Directory of local maf index file ( maf_index.loc or maf_pairwise.loc )
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import bx.align.maf
+import bx.intervals.io
+from bx.cookbook import doc_optparse
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    index = index_filename = None
+
+    # Parse Command Line
+    options, args = doc_optparse.parse( __doc__ )
+
+    if options.dbkey:
+        dbkey = options.dbkey
+    else:
+        dbkey = None
+    if dbkey in [None, "?"]:
+        maf_utilities.tool_fail( "You must specify a proper build in order to extract alignments. You can specify your genome build by clicking on the pencil icon associated with your interval file." )
+
+    species = maf_utilities.parse_species_option( options.species )
+
+    if options.chromCol:
+        chromCol = int( options.chromCol ) - 1
+    else:
+        maf_utilities.tool_fail( "Chromosome column not set, click the pencil icon in the history item to set the metadata attributes." )
+
+    if options.startCol:
+        startCol = int( options.startCol ) - 1
+    else:
+        maf_utilities.tool_fail( "Start column not set, click the pencil icon in the history item to set the metadata attributes." )
+
+    if options.endCol:
+        endCol = int( options.endCol ) - 1
+    else:
+        maf_utilities.tool_fail( "End column not set, click the pencil icon in the history item to set the metadata attributes." )
+
+    if options.strandCol:
+        strandCol = int( options.strandCol ) - 1
+    else:
+        strandCol = -1
+
+    if options.interval_file:
+        interval_file = options.interval_file
+    else:
+        maf_utilities.tool_fail( "Input interval file has not been specified." )
+
+    if options.output_file:
+        output_file = options.output_file
+    else:
+        maf_utilities.tool_fail( "Output file has not been specified." )
+
+    split_blocks_by_species = remove_all_gap_columns = False
+    if options.split_blocks_by_species and options.split_blocks_by_species == 'split_blocks_by_species':
+        split_blocks_by_species = True
+        if options.remove_all_gap_columns and options.remove_all_gap_columns == 'remove_all_gap_columns':
+            remove_all_gap_columns = True
+    else:
+        remove_all_gap_columns = True
+    # Finish parsing command line
+
+    # Open indexed access to MAFs
+    if options.mafType:
+        if options.indexLocation:
+            index = maf_utilities.maf_index_by_uid( options.mafType, options.indexLocation )
+        else:
+            index = maf_utilities.maf_index_by_uid( options.mafType, options.mafIndexFile )
+        if index is None:
+            maf_utilities.tool_fail( "The MAF source specified (%s) appears to be invalid." % ( options.mafType ) )
+    elif options.mafFile:
+        index, index_filename = maf_utilities.open_or_build_maf_index( options.mafFile, options.mafIndex, species=[dbkey] )
+        if index is None:
+            maf_utilities.tool_fail( "Your MAF file appears to be malformed." )
+    else:
+        maf_utilities.tool_fail( "Desired source MAF type has not been specified." )
+
+    # Create MAF writter
+    out = bx.align.maf.Writer( open(output_file, "w") )
+
+    # Iterate over input regions
+    num_blocks = 0
+    num_regions = None
+    for num_regions, region in enumerate( bx.intervals.io.NiceReaderWrapper( open( interval_file, 'r' ), chrom_col=chromCol, start_col=startCol, end_col=endCol, strand_col=strandCol, fix_strand=True, return_header=False, return_comments=False ) ):
+        src = maf_utilities.src_merge( dbkey, region.chrom )
+        for block in index.get_as_iterator( src, region.start, region.end ):
+            if split_blocks_by_species:
+                blocks = [ new_block for new_block in maf_utilities.iter_blocks_split_by_species( block ) if maf_utilities.component_overlaps_region( new_block.get_component_by_src_start( dbkey ), region ) ]
+            else:
+                blocks = [ block ]
+            for block in blocks:
+                block = maf_utilities.chop_block_by_region( block, src, region )
+                if block is not None:
+                    if species is not None:
+                        block = block.limit_to_species( species )
+                    block = maf_utilities.orient_block_by_region( block, src, region )
+                    if remove_all_gap_columns:
+                        block.remove_all_gap_columns()
+                    out.write( block )
+                    num_blocks += 1
+
+    # Close output MAF
+    out.close()
+
+    # remove index file if created during run
+    maf_utilities.remove_temp_index_file( index_filename )
+
+    if num_blocks:
+        print("%i MAF blocks extracted for %i regions." % ( num_blocks, ( num_regions + 1 ) ))
+    elif num_regions is not None:
+        print("No MAF blocks could be extracted for %i regions." % ( num_regions + 1 ))
+    else:
+        print("No valid regions have been provided.")
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/interval2maf.xml b/tools/maf/interval2maf.xml
new file mode 100644
index 0000000..a93d4c6
--- /dev/null
+++ b/tools/maf/interval2maf.xml
@@ -0,0 +1,295 @@
+<tool id="Interval2Maf1" name="Extract MAF blocks" version="1.0.1">
+  <description>given a set of genomic intervals</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">
+#if $maf_source_type.maf_source == "user"
+    interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafFile=$maf_source_type.mafFile --mafIndex=$maf_source_type.mafFile.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --mafIndexFile=${GALAXY_DATA_INDEX_DIR}/maf_index.loc --species=$maf_source_type.species
+#else
+    interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafType=$maf_source_type.mafType --interval_file=$input1 --output_file=$out_file1 --mafIndexFile=${GALAXY_DATA_INDEX_DIR}/maf_index.loc --species=$maf_source_type.species
+#end if
+--split_blocks_by_species=$split_blocks_by_species_selector.split_blocks_by_species
+#if $split_blocks_by_species_selector.split_blocks_by_species == "split_blocks_by_species"
+    --remove_all_gap_columns=$split_blocks_by_species_selector.remove_all_gap_columns
+#end if
+  </command>
+  <inputs>
+    <param format="interval" name="input1" type="data" label="Choose intervals">
+      <validator type="unspecified_build" />
+    </param>
+    <conditional name="maf_source_type">
+      <param name="maf_source" type="select" label="MAF Source">
+        <option value="cached" selected="true">Locally Cached Alignments</option>
+        <option value="user">Alignments in Your History</option>
+      </param>
+      <when value="user">
+        <param format="maf" name="mafFile" label="Choose alignments" type="data">
+          <options>
+            <filter type="data_meta" ref="input1" key="dbkey" />
+          </options>
+          <validator type="dataset_ok_validator" />
+        </param>
+        <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+          <options>
+            <filter type="data_meta" ref="mafFile" key="species" />
+          </options>
+        </param>
+      </when>
+      <when value="cached">
+        <param name="mafType" type="select" label="Choose alignments">
+          <options from_data_table="indexed_maf_files">
+            <!--
+            <column name="name" index="0"/>
+            <column name="value" index="1"/>
+            <column name="dbkey" index="2"/>
+            <column name="species" index="3"/>
+            -->
+            <filter type="data_meta" ref="input1" key="dbkey" column="dbkey" multiple="True" separator=","/>
+            <validator type="no_options" message="No alignments are available for the build associated with the selected interval file"/>
+          </options>
+        </param>
+        <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+          <options from_data_table="indexed_maf_files">
+            <column name="uid" index="1"/>
+            <column name="value" index="3"/>
+            <column name="name" index="3"/>
+            <filter type="param_value" ref="mafType" column="uid"/>
+            <filter type="multiple_splitter" column="name" separator=","/>
+          </options>
+        </param>
+      </when>
+    </conditional>
+    <conditional name="split_blocks_by_species_selector">
+      <param name="split_blocks_by_species" type="select" label="Split blocks by species" help="Not usually applicable. See help below for more information.">
+        <option value="split_blocks_by_species">Split by species</option>
+        <option value="dont_split_blocks_by_species" selected="true">Do not split</option>
+      </param>
+      <when value="dont_split_blocks_by_species">
+        <!-- do nothing here -->
+      </when>
+      <when value="split_blocks_by_species">
+        <param name="remove_all_gap_columns" type="select" label="Collapse empty alignment columns">
+          <option value="remove_all_gap_columns" selected="true">Collapse empty columns</option>
+          <option value="do_not_remove_all_gap_columns">Do not collapse</option>
+        </param>
+      </when>
+    </conditional>
+   </inputs>
+   <outputs>
+     <data format="maf" name="out_file1"/>
+   </outputs>
+   <tests>
+     <test>
+       <param name="input1" value="1.bed"/>
+       <param name="maf_source" value="cached"/>
+       <param name="mafType" value="ENCODE_TBA_hg17"/>
+       <param name="species" value="hg17,panTro1,baboon,marmoset,galago,rn3,mm6,rabbit,cow,canFam1,rfbat,shrew,armadillo,tenrec,monDom1,tetNig1,fr1,rheMac1,galGal2,xenTro1,danRer2,elephant,platypus,hedgehog,colobus_monkey,dusky_titi,owl_monkey,mouse_lemur"/>
+       <param name="split_blocks_by_species" value="dont_split_blocks_by_species"/>
+       <output name="out_file1" file="fsa_interval2maf.dat" />
+     </test>
+     <test>
+       <param name="input1" value="1.bed"/>
+       <param name="maf_source" value="user"/>
+       <param name="mafFile" value="fsa_interval2maf.dat"/>
+       <param name="species" value="hg17,panTro1,baboon,marmoset,galago,rn3,mm6,rabbit,cow,canFam1,rfbat,shrew,armadillo,tenrec,monDom1,tetNig1,fr1,rheMac1,galGal2,xenTro1,danRer2,elephant,platypus,hedgehog,colobus_monkey,dusky_titi,owl_monkey,mouse_lemur"/>
+       <param name="split_blocks_by_species" value="dont_split_blocks_by_species"/>
+       <output name="out_file1" file="fsa_interval2maf.dat" />
+     </test>
+     <test>
+       <param name="input1" value="1.bed" dbkey="hg18" ftype="bed"/>
+       <param name="maf_source" value="cached"/>
+       <param name="mafType" value="28_WAY_MULTIZ_hg18"/>
+       <param name="species" value="hg18,panTro2,mm8"/>
+       <param name="split_blocks_by_species" value="dont_split_blocks_by_species"/>
+       <output name="out_file1" file="interval2maf_3from28way.maf" />
+     </test>
+   </tests>
+   <help>
+**What it does**
+
+This tool takes genomic coordinates, superimposes them on multiple alignments (in MAF format) stored on the Galaxy site or from your history, and excises alignment blocks corresponding to each set of coordinates. Alignment blocks that extend past START and/or END positions of an interval are trimmed. Note that a single genomic interval may correspond to two or more alignment blocks.
+
+-----
+
+**Example**
+
+Here a single interval is superimposed on three MAF blocks. Blocks 1 and 3 are trimmed because they extend beyond boundaries of the interval:
+
+.. image:: ${static_path}/images/maf_icons/interval2maf.png
+
+-------
+
+**Split blocks by species**
+
+This option examines each MAF block for multiple occurrences of a species in a single block. When this occurs, a block is split into multiple blocks where every combination of one sequence per species per block is represented.
+
+The interface for this option has two inputs:
+
+ * **MAF file to split**. Choose multiple alignments from history to be split by species.
+ * **Collapse empty alignment columns**. Should alignment columns containing only gaps in the new blocks be removed.
+
+
+
+**Example 1**: **Collapse empty alignment columns is Yes**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+the tool will create **a single** history item containing 12 alignment blocks (notice that no columns contain only gaps)::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT-GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT-GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC--GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC-GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC-GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGCAG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC---AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC---AG
+
+
+
+**Example 2**: **Collapse empty alignment columns is No**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+the tool will create **a single** history item containing 12 alignment blocks (notice that some columns contain only gaps)::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/interval2maf_pairwise.xml b/tools/maf/interval2maf_pairwise.xml
new file mode 100644
index 0000000..99916f4
--- /dev/null
+++ b/tools/maf/interval2maf_pairwise.xml
@@ -0,0 +1,48 @@
+<tool id="Interval2Maf_pairwise1" name="Extract Pairwise MAF blocks" version="1.0.1">
+  <description>given a set of genomic intervals</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafType=$mafType --interval_file=$input1 --output_file=$out_file1 --indexLocation=${GALAXY_DATA_INDEX_DIR}/maf_pairwise.loc</command>
+  <inputs>
+    <param name="input1" type="data" format="interval" label="Interval File">
+      <validator type="unspecified_build" />
+    </param>
+    <param name="mafType" type="select" label="Choose MAF source">
+      <options from_file="maf_pairwise.loc">
+        <column name="name" index="0"/>
+        <column name="value" index="1"/>
+        <column name="dbkey" index="2"/>
+        <column name="species" index="3"/>
+        <filter type="data_meta" ref="input1" key="dbkey" column="2" multiple="True" separator=","/>
+        <validator type="no_options" message="No alignments are available for the build associated with the selected interval file"/>
+      </options>
+    </param>
+   </inputs>
+  <outputs>
+    <data format="maf" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="8.bed" dbkey="hg17" format="bed"/>
+      <param name="mafType" value="PAIRWISE_hg17_fr1"/>
+      <output name="out_file1" file="Interval2Maf_pairwise_out.maf"/>
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+This tool takes genomic coordinates, superimposes them on pairwise alignments (in MAF format) stored on the Galaxy site, and excises alignment blocks corresponding to each set of coordinates. Alignment blocks that extend past START and/or END positions of an interval are trimmed. Note that a single genomic interval may correspond to two or more alignment blocks.
+
+-----
+
+**Example**
+
+Here a single interval is superimposed on three MAF blocks. Blocks 1 and 3 are trimmed because they extend beyond boundaries of the interval:
+
+.. image:: ${static_path}/images/maf_icons/interval2maf.png
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/interval_maf_to_merged_fasta.py b/tools/maf/interval_maf_to_merged_fasta.py
new file mode 100644
index 0000000..a2375ec
--- /dev/null
+++ b/tools/maf/interval_maf_to_merged_fasta.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python
+"""
+Reads an interval or gene BED and a MAF Source.
+Produces a FASTA file containing the aligned intervals/gene sequences, based upon the provided coordinates
+
+Alignment blocks are layered ontop of each other based upon score.
+
+usage: %prog maf_file [options]
+   -d, --dbkey=d: Database key, ie hg17
+   -c, --chromCol=c: Column of Chr
+   -s, --startCol=s: Column of Start
+   -e, --endCol=e: Column of End
+   -S, --strandCol=S: Column of Strand
+   -G, --geneBED: Input is a Gene BED file, process and join exons as one region
+   -t, --mafSourceType=t: Type of MAF source to use
+   -m, --mafSource=m: Path of source MAF file, if not using cached version
+   -I, --mafIndex=I: Path of precomputed source MAF file index, if not using cached version
+   -i, --interval_file=i:       Input interval file
+   -o, --output_file=o:      Output MAF file
+   -p, --species=p: Species to include in output
+   -O, --overwrite_with_gaps=O: Overwrite bases found in a lower-scoring block with gaps interior to the sequence for a species.
+   -z, --mafIndexFileDir=z: Directory of local maf_index.loc file
+
+usage: %prog dbkey_of_BED comma_separated_list_of_additional_dbkeys_to_extract comma_separated_list_of_indexed_maf_files input_gene_bed_file output_fasta_file cached|user GALAXY_DATA_INDEX_DIR
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.intervals.io
+from bx.cookbook import doc_optparse
+
+from galaxy.tools.util import maf_utilities
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def __main__():
+    # Parse Command Line
+    options, args = doc_optparse.parse( __doc__ )
+    mincols = 0
+    strand_col = -1
+
+    if options.dbkey:
+        primary_species = options.dbkey
+    else:
+        primary_species = None
+    if primary_species in [None, "?", "None"]:
+        stop_err( "You must specify a proper build in order to extract alignments. You can specify your genome build by clicking on the pencil icon associated with your interval file." )
+
+    include_primary = True
+    secondary_species = maf_utilities.parse_species_option( options.species )
+    if secondary_species:
+        species = list( secondary_species )  # make copy of species list
+        if primary_species in secondary_species:
+            secondary_species.remove( primary_species )
+        else:
+            include_primary = False
+    else:
+        species = None
+
+    if options.interval_file:
+        interval_file = options.interval_file
+    else:
+        stop_err( "Input interval file has not been specified." )
+
+    if options.output_file:
+        output_file = options.output_file
+    else:
+        stop_err( "Output file has not been specified." )
+
+    if not options.geneBED:
+        if options.chromCol:
+            chr_col = int( options.chromCol ) - 1
+        else:
+            stop_err( "Chromosome column not set, click the pencil icon in the history item to set the metadata attributes." )
+
+        if options.startCol:
+            start_col = int( options.startCol ) - 1
+        else:
+            stop_err( "Start column not set, click the pencil icon in the history item to set the metadata attributes." )
+
+        if options.endCol:
+            end_col = int( options.endCol ) - 1
+        else:
+            stop_err( "End column not set, click the pencil icon in the history item to set the metadata attributes." )
+
+        if options.strandCol:
+            strand_col = int( options.strandCol ) - 1
+
+    mafIndexFile = "%s/maf_index.loc" % options.mafIndexFileDir
+
+    overwrite_with_gaps = True
+    if options.overwrite_with_gaps and options.overwrite_with_gaps.lower() == 'false':
+        overwrite_with_gaps = False
+
+    # Finish parsing command line
+
+    # get index for mafs based on type
+    index = index_filename = None
+    # using specified uid for locally cached
+    if options.mafSourceType.lower() in ["cached"]:
+        index = maf_utilities.maf_index_by_uid( options.mafSource, mafIndexFile )
+        if index is None:
+            stop_err( "The MAF source specified (%s) appears to be invalid." % ( options.mafSource ) )
+    elif options.mafSourceType.lower() in ["user"]:
+        # index maf for use here, need to remove index_file when finished
+        index, index_filename = maf_utilities.open_or_build_maf_index( options.mafSource, options.mafIndex, species=[primary_species] )
+        if index is None:
+            stop_err( "Your MAF file appears to be malformed." )
+    else:
+        stop_err( "Invalid MAF source type specified." )
+
+    # open output file
+    output = open( output_file, "w" )
+
+    if options.geneBED:
+        region_enumerator = maf_utilities.line_enumerator( open( interval_file, "r" ).readlines() )
+    else:
+        region_enumerator = enumerate(bx.intervals.io.NiceReaderWrapper(
+            open( interval_file, 'r' ), chrom_col=chr_col, start_col=start_col,
+            end_col=end_col, strand_col=strand_col, fix_strand=True,
+            return_header=False, return_comments=False ) )
+
+    # Step through intervals
+    regions_extracted = 0
+    line_count = 0
+    for line_count, line in region_enumerator:
+        try:
+            if options.geneBED:  # Process as Gene BED
+                try:
+                    starts, ends, fields = maf_utilities.get_starts_ends_fields_from_gene_bed( line )
+                    # create spliced alignment object
+                    alignment = maf_utilities.get_spliced_region_alignment(
+                        index, primary_species, fields[0], starts, ends,
+                        strand='+', species=species, mincols=mincols,
+                        overwrite_with_gaps=overwrite_with_gaps )
+                    primary_name = secondary_name = fields[3]
+                    alignment_strand = fields[5]
+                except Exception as e:
+                    print("Error loading exon positions from input line %i: %s" % ( line_count, e ))
+                    continue
+            else:  # Process as standard intervals
+                try:
+                    # create spliced alignment object
+                    alignment = maf_utilities.get_region_alignment(
+                        index, primary_species, line.chrom, line.start,
+                        line.end, strand='+', species=species, mincols=mincols,
+                        overwrite_with_gaps=overwrite_with_gaps )
+                    primary_name = "%s(%s):%s-%s" % ( line.chrom, line.strand, line.start, line.end )
+                    secondary_name = ""
+                    alignment_strand = line.strand
+                except Exception as e:
+                    print("Error loading region positions from input line %i: %s" % ( line_count, e ))
+                    continue
+
+            # Write alignment to output file
+            # Output primary species first, if requested
+            if include_primary:
+                output.write( ">%s.%s\n" % ( primary_species, primary_name ) )
+                if alignment_strand == "-":
+                    output.write( alignment.get_sequence_reverse_complement( primary_species ) )
+                else:
+                    output.write( alignment.get_sequence( primary_species ) )
+                output.write( "\n" )
+            # Output all remainging species
+            for spec in secondary_species or alignment.get_species_names( skip=primary_species ):
+                if secondary_name:
+                    output.write( ">%s.%s\n" % ( spec, secondary_name ) )
+                else:
+                    output.write( ">%s\n" % ( spec ) )
+                if alignment_strand == "-":
+                    output.write( alignment.get_sequence_reverse_complement( spec ) )
+                else:
+                    output.write( alignment.get_sequence( spec ) )
+                output.write( "\n" )
+
+            output.write( "\n" )
+            regions_extracted += 1
+        except Exception as e:
+            print("Unexpected error from input line %i: %s" % ( line_count, e ))
+            continue
+
+    # close output file
+    output.close()
+
+    # remove index file if created during run
+    maf_utilities.remove_temp_index_file( index_filename )
+
+    # Print message about success for user
+    if regions_extracted > 0:
+        print("%i regions were processed successfully." % ( regions_extracted ))
+    else:
+        print("No regions were processed successfully.")
+        if line_count > 0 and options.geneBED:
+            print("This tool requires your input file to conform to the 12 column BED standard.")
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/interval_maf_to_merged_fasta.xml b/tools/maf/interval_maf_to_merged_fasta.xml
new file mode 100644
index 0000000..25d9d91
--- /dev/null
+++ b/tools/maf/interval_maf_to_merged_fasta.xml
@@ -0,0 +1,112 @@
+<tool id="Interval_Maf_Merged_Fasta2" name="Stitch MAF blocks" version="1.0.1">
+  <description>given a set of genomic intervals</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">
+    #if $maf_source_type.maf_source == "user" #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_file --mafIndex=$maf_source_type.maf_file.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafSourceType=$maf_source_type.maf_source --mafIndexFileDir=${GA [...]
+    #else                                     #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_identifier --interval_file=$input1 --output_file=$out_file1 --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafSourceType=$maf_source_type.maf_source --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+    #end if# --overwrite_with_gaps=$overwrite_with_gaps
+  </command>
+  <inputs>
+    <page>
+        <param format="interval" name="input1" type="data" label="Choose intervals">
+          <validator type="unspecified_build" />
+        </param>
+        <conditional name="maf_source_type">
+            <param name="maf_source" type="select" label="MAF Source">
+              <option value="cached" selected="true">Locally Cached Alignments</option>
+              <option value="user">Alignments in Your History</option>
+            </param>
+            <when value="user">
+              <param name="maf_file" type="data" format="maf" label="MAF File">
+                <options>
+                  <filter type="data_meta" ref="input1" key="dbkey" />
+                </options>
+                <validator type="dataset_ok_validator" />
+              </param>
+              <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+                <options>
+                  <filter type="data_meta" ref="maf_file" key="species" />
+                </options>
+              </param>
+            </when>
+            <when value="cached">
+              <param name="maf_identifier" type="select" label="MAF Type" >
+                <options from_file="maf_index.loc">
+                  <column name="name" index="0"/>
+                  <column name="value" index="1"/>
+                  <column name="dbkey" index="2"/>
+                  <column name="species" index="3"/>
+                  <filter type="data_meta" ref="input1" key="dbkey" column="2" multiple="True" separator=","/>
+                  <validator type="no_options" message="No alignments are available for the build associated with the selected interval file"/>
+                </options>
+              </param>
+              <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+                <options from_file="maf_index.loc">
+                  <column name="uid" index="1"/>
+                  <column name="value" index="3"/>
+                  <column name="name" index="3"/>
+                  <filter type="param_value" ref="maf_identifier" name="uid" column="1"/>
+                  <filter type="multiple_splitter" column="3" separator=","/>
+                </options>
+              </param>
+            </when>
+        </conditional>
+        <param name="overwrite_with_gaps" type="select" label="Split into Gapless MAF blocks" help="When set to Yes, blocks are divided around gaps appearing in any species. This will prevent gaps occurring in the interior of the sequence for an aligning species from overwriting a nucleotide found for the same position in a lower-scoring block.">
+          <option value="True" selected="true">No</option>
+          <option value="False">Yes</option>
+        </param>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="fasta" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="13.bed" dbkey="hg18" ftype="bed"/>
+      <param name="maf_source" value="cached"/>
+      <param name="maf_identifier" value="17_WAY_MULTIZ_hg18"/>
+      <param name="species" value="hg18,mm8"/>
+      <param name="overwrite_with_gaps" value="True"/>
+      <output name="out_file1" file="interval_maf_to_merged_fasta_out3.fasta" />
+    </test>
+    <test>
+      <param name="input1" value="1.bed" dbkey="hg17" ftype="bed"/>
+      <param name="maf_source" value="cached"/>
+      <param name="maf_identifier" value="8_WAY_MULTIZ_hg17"/>
+      <param name="species" value="canFam1,hg17,mm5,panTro1,rn3"/>
+      <param name="overwrite_with_gaps" value="True"/>
+      <output name="out_file1" file="interval_maf_to_merged_fasta_out.dat" />
+    </test>
+    <test>
+      <param name="input1" value="1.bed" dbkey="hg17" ftype="bed"/>
+      <param name="maf_source" value="user"/>
+      <param name="maf_file" value="5.maf"/>
+      <param name="species" value="canFam1,hg17,mm5,panTro1,rn3"/>
+      <param name="overwrite_with_gaps" value="True"/>
+      <output name="out_file1" file="interval_maf_to_merged_fasta_user_out.dat" />
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+A single genomic region can be covered by multiple alignment blocks. In many cases it is desirable to stitch these alignment blocks together. This tool accepts a list of genomic intervals. For every interval it performs the following:
+
+  * finds all MAF blocks that overlap the interval;
+  * sorts MAF blocks by alignment score;
+  * stitches blocks together and resolves overlaps based on alignment score;
+  * outputs alignments in FASTA format.
+
+------
+
+**Example**
+
+Here three MAF blocks overlapping a single interval are stitched together. Space between blocks 2 and 3 is filled with gaps:
+
+.. image:: ${static_path}/images/maf_icons/stitchMaf.png
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/macros.xml b/tools/maf/macros.xml
new file mode 100644
index 0000000..d11af1c
--- /dev/null
+++ b/tools/maf/macros.xml
@@ -0,0 +1,16 @@
+<macros>
+  <token name="@HELP_CITATIONS@">
+------
+
+**Citation**
+
+If you use this tool, please cite `Blankenberg D, Taylor J, Nekrutenko A; The Galaxy Team. Making whole genome multiple alignments usable for biologists. Bioinformatics. 2011 Sep 1;27(17):2426-2428. <http://www.ncbi.nlm.nih.gov/pubmed/21775304>`_
+
+
+  </token>
+  <xml name="citations">
+    <citations>
+      <citation type="doi">10.1093/bioinformatics/btr398</citation>
+    </citations>
+  </xml>
+</macros>
diff --git a/tools/maf/maf_by_block_number.py b/tools/maf/maf_by_block_number.py
new file mode 100644
index 0000000..060df3a
--- /dev/null
+++ b/tools/maf/maf_by_block_number.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+Reads a list of block numbers and a maf. Produces a new maf containing the
+blocks specified by number.
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    input_block_filename = sys.argv[1].strip()
+    input_maf_filename = sys.argv[2].strip()
+    output_filename1 = sys.argv[3].strip()
+    block_col = int( sys.argv[4].strip() ) - 1
+    if block_col < 0:
+        print("Invalid column specified", file=sys.stderr)
+        sys.exit(0)
+    species = maf_utilities.parse_species_option( sys.argv[5].strip() )
+
+    maf_writer = bx.align.maf.Writer( open( output_filename1, 'w' ) )
+    # we want to maintain order of block file and write blocks as many times as they are listed
+    failed_lines = []
+    for ctr, line in enumerate( open( input_block_filename, 'r' ) ):
+        try:
+            block_wanted = int( line.split( "\t" )[block_col].strip() )
+        except:
+            failed_lines.append( str( ctr ) )
+            continue
+        try:
+            for count, block in enumerate( bx.align.maf.Reader( open( input_maf_filename, 'r' ) ) ):
+                if count == block_wanted:
+                    if species:
+                        block = block.limit_to_species( species )
+                    maf_writer.write( block )
+                    break
+        except:
+            print("Your MAF file appears to be malformed.", file=sys.stderr)
+            sys.exit()
+    if len( failed_lines ) > 0:
+        print("Failed to extract from %i lines (%s)." % ( len( failed_lines ), ",".join( failed_lines ) ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_by_block_number.xml b/tools/maf/maf_by_block_number.xml
new file mode 100644
index 0000000..474e461
--- /dev/null
+++ b/tools/maf/maf_by_block_number.xml
@@ -0,0 +1,38 @@
+<tool id="maf_by_block_number1" name="Extract MAF by block number" version="1.0.1">
+  <description>given a set of block numbers and a MAF file</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_by_block_number.py $input1 $input2 $out_file1 $block_col $species</command>
+  <inputs>
+    <param format="txt" name="input1" type="data" label="Block Numbers"/>
+    <param format="maf" name="input2" label="MAF File" type="data"/>
+    <param name="block_col" type="data_column" label="Column containing Block number" data_ref="input1" accept_default="True" />
+    <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+      <options>
+        <filter type="data_meta" ref="input2" key="species" />
+      </options>
+    </param>
+   </inputs>
+  <outputs>
+    <data format="maf" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="maf_by_block_numbers.dat"/>
+      <param name="input2" value="3.maf"/>
+      <param name="block_col" value="1"/>
+      <param name="species" value="hg17,panTro1,mm5,rn3,canFam1"/>
+      <output name="out_file1" file="maf_by_block_number_out.dat" />
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool takes a list of block numbers, one per line, and extracts the corresponding MAF blocks from the provided file. Block numbers start at 0.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/maf_filter.py b/tools/maf/maf_filter.py
new file mode 100644
index 0000000..eae7053
--- /dev/null
+++ b/tools/maf/maf_filter.py
@@ -0,0 +1,74 @@
+# Dan Blankenberg
+# Filters a MAF file according to the provided code file, which is generated in maf_filter.xml <configfiles>
+# Also allows filtering by number of columns in a block, and limiting output species
+from __future__ import print_function
+
+import os
+import shutil
+import sys
+
+import bx.align.maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def main():
+    # Read command line arguments
+    try:
+        script_file = sys.argv.pop( 1 )
+        maf_file = sys.argv.pop( 1 )
+        out_file = sys.argv.pop( 1 )
+        additional_files_path = sys.argv.pop( 1 )
+        species = maf_utilities.parse_species_option( sys.argv.pop( 1 ) )
+        min_size = int( sys.argv.pop( 1 ) )
+        max_size = int( sys.argv.pop( 1 ) )
+        if max_size < 1:
+            max_size = sys.maxsize
+        min_species_per_block = int( sys.argv.pop( 1 ) )
+        exclude_incomplete_blocks = int( sys.argv.pop( 1 ) )
+        if species:
+            num_species = len( species )
+        else:
+            num_species = len( sys.argv.pop( 1 ).split( ',') )
+    except:
+        print("One or more arguments is missing.\nUsage: maf_filter.py maf_filter_file input_maf output_maf path_to_save_debug species_to_keep", file=sys.stderr)
+        sys.exit()
+
+    # Open input and output MAF files
+    try:
+        maf_reader = bx.align.maf.Reader( open( maf_file, 'r' ) )
+        maf_writer = bx.align.maf.Writer( open( out_file, 'w' ) )
+    except:
+        print("Your MAF file appears to be malformed.", file=sys.stderr)
+        sys.exit()
+
+    # Save script file for debuging/verification info later
+    os.mkdir( additional_files_path )
+    shutil.copy( script_file, os.path.join( additional_files_path, 'debug.txt' ) )
+
+    # Loop through blocks, running filter on each
+    # 'maf_block' and 'ret_val' are used/shared in the provided code file
+    # 'ret_val' should be set to True if the block is to be kept
+    i = 0
+    blocks_kept = 0
+    for i, maf_block in enumerate( maf_reader ):
+        if min_size <= maf_block.text_size <= max_size:
+            local = {'maf_block': maf_block, 'ret_val': False}
+            exec(compile(open( script_file ).read(), script_file, 'exec'), {}, local)
+            if local['ret_val']:
+                # Species limiting must be done after filters as filters could be run on non-requested output species
+                if species:
+                    maf_block = maf_block.limit_to_species( species )
+                if len( maf_block.components ) >= min_species_per_block and ( not exclude_incomplete_blocks or len( maf_block.components ) >= num_species ):
+                    maf_writer.write( maf_block )
+                    blocks_kept += 1
+    maf_writer.close()
+    maf_reader.close()
+    if i == 0:
+        print("Your file contains no valid maf_blocks.")
+    else:
+        print('Kept %s of %s blocks (%.2f%%).' % ( blocks_kept, i + 1, float( blocks_kept ) / float( i + 1 ) * 100.0 ))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/maf/maf_filter.xml b/tools/maf/maf_filter.xml
new file mode 100644
index 0000000..7b33837
--- /dev/null
+++ b/tools/maf/maf_filter.xml
@@ -0,0 +1,199 @@
+<tool id="MAF_filter" name="Filter MAF" version="1.0.1">
+  <description>by specified attributes</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_filter.py $maf_filter_file $input1 $out_file1 $out_file1.files_path $species $min_size $max_size $min_species_per_block $exclude_incomplete_blocks ${input1.metadata.species}</command>
+  <inputs>
+    <page>
+      <param name="input1" type="data" format="maf" label="MAF File"/>
+      <param name="min_size" label="Minimum Size" value="0" type="integer"/>
+      <param name="max_size" label="Maximum Size" value="0" type="integer" help="A maximum size less than 1 indicates no limit"/>
+      <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+        <options>
+          <filter type="data_meta" ref="input1" key="species" />
+        </options>
+      </param>
+      <param name="min_species_per_block" type="select" label="Exclude blocks which have only one species" >
+        <option value="2">Yes</option>
+        <option value="1" selected="True">No</option>
+      </param>
+      <param name="exclude_incomplete_blocks" type="select" label="Exclude blocks which have missing species" >
+        <option value="1">Yes</option>
+        <option value="0" selected="True">No</option>
+      </param>
+      <repeat name="maf_filters" title="Filter">
+        <param name="species1" type="select" label="When Species" multiple="false">
+          <options>
+            <filter type="data_meta" ref="input1" key="species" />
+          </options>
+        </param>
+        <conditional name="species1_attributes">
+          <param name="species1_attribute_type" type="select" label="Species Attribute">
+            <option value="attribute_strand">Strand</option>
+            <option value="attribute_chr" selected="true">Chromosome</option>
+          </param>
+          <when value="attribute_strand">
+            <param name="species1_is_isnot" type="select" label="Conditional">
+              <option value="==">Is</option>
+              <option value="!=">Is Not</option>
+            </param>
+            <param name="species1_attribute" type="select" label="Strand">
+              <option value="+" selected="true">+</option>
+              <option value="-">-</option>
+            </param>
+            <repeat name="filter_condition" title="Filter Condition">
+              <param name="species2" type="select" label="Species" multiple="false">
+                <options>
+                  <filter type="data_meta" ref="input1" key="species" />
+                </options>
+              </param>
+              <conditional name="species2_attributes">
+                <param name="species2_attribute_type" type="select" label="Species Attribute">
+                  <option value="attribute_strand" selected="true">Strand</option>
+                  <option value="attribute_chr">Chromosome</option>
+                </param>
+                <when value="attribute_strand">
+                  <param name="species2_is_isnot" type="select" label="Conditional">
+                    <option value="==">Is</option>
+                    <option value="!=">Is Not</option>
+                  </param>
+                  <param name="species2_attribute" type="select" label="Strand">
+                    <option value="+" selected="true">+</option>
+                    <option value="-">-</option>
+                  </param>
+                </when>
+                <when value="attribute_chr">
+                  <param name="species2_is_isnot" type="select" label="Conditional">
+                    <option value="in">Is</option>
+                    <option value="not in">Is Not</option>
+                  </param>
+                  <param name="species2_attribute" type="text" label="Chromosome" value="chr1"/>
+                </when>
+              </conditional>
+            </repeat>
+          </when>
+          <when value="attribute_chr">
+            <param name="species1_is_isnot" type="select" label="Conditional">
+              <option value="in">Is</option>
+              <option value="not in">Is Not</option>
+            </param>
+            <param name="species1_attribute" type="text" label="Chromosome" value="chr1"/>
+            <repeat name="filter_condition" title="Filter Condition">
+              <param name="species2" type="select" label="Species" multiple="false">
+                <options>
+                  <filter type="data_meta" ref="input1" key="species" />
+                </options>
+              </param>
+              <conditional name="species2_attributes">
+                <param name="species2_attribute_type" type="select" label="Species Attribute">
+                  <option value="attribute_strand">Strand</option>
+                  <option value="attribute_chr" selected="true">Chromosome</option>
+                </param>
+                <when value="attribute_strand">
+                  <param name="species2_is_isnot" type="select" label="Conditional">
+                    <option value="==">Is</option>
+                    <option value="!=">Is Not</option>
+                  </param>
+                  <param name="species2_attribute" type="select" label="Strand">
+                    <option value="+" selected="true">+</option>
+                    <option value="-">-</option>
+                  </param>
+                </when>
+                <when value="attribute_chr">
+                  <param name="species2_is_isnot" type="select" label="Conditional">
+                    <option value="in">Is</option>
+                    <option value="not in">Is Not</option>
+                  </param>
+                  <param name="species2_attribute" type="text" label="Chromosome" value="chr1"/>
+                </when>
+              </conditional>
+            </repeat>
+          </when>
+        </conditional>
+      </repeat>
+    </page>
+  </inputs>
+  <configfiles>
+    <configfile name="maf_filter_file">
+#set $is_isnot_valid = {"==":"==", "!=":"!=", "in":"in", "not in":"not in"}
+def maf_block_pass_filter( maf_block ):
+#for $maf_filter in $maf_filters:
+#if $len( $maf_filter['species1_attributes']['filter_condition'] ) == 0:
+#continue
+#end if
+    primary_component = maf_block.get_component_by_src_start( """$maf_filter['species1'].value.encode( 'string_escape' )""".decode( 'string_escape' ) )
+    if primary_component is not None:
+#if $maf_filter['species1_attributes']['species1_attribute_type'] == 'attribute_chr':
+        if primary_component.src.split( "." )[-1] $is_isnot_valid.get( $maf_filter['species1_attributes']['species1_is_isnot'].value.strip(), 'is in' ) """$maf_filter['species1_attributes']['species1_attribute'].value.encode( 'string_escape' )""".decode( 'string_escape' ).split( "," ):
+#else
+        if primary_component.strand $is_isnot_valid.get( $maf_filter['species1_attributes']['species1_is_isnot'].value.strip(), '==' ) """$maf_filter['species1_attributes']['species1_attribute'].value.encode( 'string_escape' )""".decode( 'string_escape' ):
+#end if
+#for $filter_condition in $maf_filter['species1_attributes']['filter_condition']:
+            secondary_component = maf_block.get_component_by_src_start( """$filter_condition['species2'].value.encode( 'string_escape' )""".decode( 'string_escape' ) )
+#if $filter_condition['species2_attributes']['species2_attribute_type'] == 'attribute_chr':
+            if secondary_component is not None:
+                if not ( secondary_component.src.split( "." )[-1] $is_isnot_valid.get( $filter_condition['species2_attributes']['species2_is_isnot'].value.strip(), 'is in' ) """$filter_condition['species2_attributes']['species2_attribute'].value.encode( 'string_escape' )""".decode( 'string_escape' ).split( "," ) ):
+                    return False
+#else:
+            if secondary_component is not None:
+                if not ( secondary_component.strand $is_isnot_valid.get( $filter_condition['species2_attributes']['species2_is_isnot'].value.strip(), '==' ) """$filter_condition['species2_attributes']['species2_attribute'].value.encode( 'string_escape' )""".decode( 'string_escape' ) ):
+                    return False
+#end if
+#end for
+#end for
+    return True
+ret_val = maf_block_pass_filter( maf_block )
+</configfile>
+  </configfiles>
+  <outputs>
+    <data format="maf" name="out_file1" />
+  </outputs>
+<!--
+  <tests>
+    <test>
+      <param name="input1" value="4.maf"/>
+      <param name="species" value="bosTau2,canFam2,hg17,panTro1,rheMac2,rn3"/>
+      <param name="exclude_incomplete_blocks" value="0"/>
+      <param name="min_species_per_block" value="1"/>
+      <param name="min_size" value="0"/>
+      <param name="max_size" value="0"/>
+      <param name="species1" value="hg17"/>
+      <param name="species2" value="hg17"/>
+      <param name="species1_attribute_type" value="attribute_chr"/>
+      <param name="species1_is_isnot" value="in"/>
+      <param name="species1_attribute" value="chr1"/>
+      <param name="filter_condition"/> Test will ERROR when this is set or when it is not set.
+      <output name="out_file1" file="cf_maf_limit_to_species.dat"/>
+    </test>
+  </tests>
+-->
+<help>
+This tool allows you to build complex filters to be applied to each alignment block of a MAF file. You can define restraints on species based upon chromosome and strand. You can specify comma separated lists of chromosomes where appropriate.
+
+.. class:: infomark
+
+For example, this tool is useful to restrict a set of alignments to only those blocks which contain alignments between chromosomes that are considered homologous.
+
+-----
+
+.. class:: warningmark
+
+If a species is not found in a particular block, all filters on that species are ignored.
+
+-----
+
+This tool allows the user to remove any undesired species from a MAF file. If no species are specified then all species will be kept. If species are specified, columns which contain only gaps are removed. The options for this are:
+
+ * **Exclude blocks which have missing species** - suppose you want to restrict an 8-way alignment to human, mouse, and rat.  The tool will first remove all other species. Next, if this option is set to **YES** the tool WILL NOT return MAF blocks, which do not include human, mouse, or rat. This means that all alignment blocks returned by the tool will have exactly three sequences in this example.
+
+ * **Exclude blocks which have only one species** - if this option is set to **YES** all single sequence alignment blocks WILL NOT be returned.
+
+-----
+
+You can also provide a size range and limit your output to the MAF blocks which fall within the specified range.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/maf_limit_size.py b/tools/maf/maf_limit_size.py
new file mode 100644
index 0000000..0c0324e
--- /dev/null
+++ b/tools/maf/maf_limit_size.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+Removes blocks that fall outside of specified size range.
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+
+
+def __main__():
+    input_maf_filename = sys.argv[1].strip()
+    output_filename1 = sys.argv[2].strip()
+    min_size = int( sys.argv[3].strip() )
+    max_size = int( sys.argv[4].strip() )
+    if max_size < 1:
+        max_size = sys.maxsize
+    maf_writer = bx.align.maf.Writer( open( output_filename1, 'w' ) )
+    try:
+        maf_reader = bx.align.maf.Reader( open( input_maf_filename, 'r' ) )
+    except:
+        print("Your MAF file appears to be malformed.", file=sys.stderr)
+        sys.exit()
+
+    blocks_kept = 0
+    i = 0
+    for i, m in enumerate( maf_reader ):
+        if min_size <= m.text_size <= max_size:
+            maf_writer.write( m )
+            blocks_kept += 1
+    print('Kept %s of %s blocks (%.2f%%).' % ( blocks_kept, i + 1, float( blocks_kept ) / float( i + 1 ) * 100.0 ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_limit_size.xml b/tools/maf/maf_limit_size.xml
new file mode 100644
index 0000000..207628a
--- /dev/null
+++ b/tools/maf/maf_limit_size.xml
@@ -0,0 +1,34 @@
+<tool id="maf_limit_size1" name="Filter MAF blocks" version="1.0.1">
+  <description>by Size</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_limit_size.py $input1 $out_file1 $min_size $max_size</command>
+  <inputs>
+    <page>
+        <param format="maf" name="input1" label="MAF File" type="data"/>
+        <param name="min_size" label="Minimum Size" value="0" type="integer"/>
+        <param name="max_size" label="Maximum Size" value="0" type="integer" help="A maximum size less than 1 indicates no limit"/>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="maf" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="3.maf" ftype="maf" />
+      <param name="min_size" value="0"/>
+      <param name="max_size" value="0"/>
+      <output name="out_file1" file="maf_limit_size1_out.maf" />
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool takes a MAF file and a size range and extracts the MAF blocks which fall within the specified range.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/maf_limit_to_species.py b/tools/maf/maf_limit_to_species.py
new file mode 100644
index 0000000..ac4abde
--- /dev/null
+++ b/tools/maf/maf_limit_to_species.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+"""
+Read a maf file and write out a new maf with only blocks having the
+required species, after dropping any other species and removing
+columns containing only gaps.
+
+usage: %prog species,species2,... input_maf output_maf allow_partial min_species_per_block
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def main():
+    species = maf_utilities.parse_species_option( sys.argv[1] )
+    if species:
+        spec_len = len( species )
+    else:
+        spec_len = 0
+    try:
+        maf_reader = bx.align.maf.Reader( open( sys.argv[2], 'r' ) )
+        maf_writer = bx.align.maf.Writer( open( sys.argv[3], 'w' ) )
+    except:
+        print("Your MAF file appears to be malformed.", file=sys.stderr)
+        sys.exit()
+    allow_partial = False
+    if int( sys.argv[4] ):
+        allow_partial = True
+    min_species_per_block = int( sys.argv[5] )
+
+    maf_blocks_kept = 0
+    for m in maf_reader:
+        if species:
+            m = m.limit_to_species( species )
+        m.remove_all_gap_columns()
+        spec_in_block_len = len( maf_utilities.get_species_in_block( m ) )
+        if ( not species or allow_partial or spec_in_block_len == spec_len ) and spec_in_block_len > min_species_per_block:
+            maf_writer.write( m )
+            maf_blocks_kept += 1
+
+    maf_reader.close()
+    maf_writer.close()
+
+    print("Restricted to species: %s." % ", ".join( species ))
+    print("%i MAF blocks have been kept." % maf_blocks_kept)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/maf/maf_limit_to_species.xml b/tools/maf/maf_limit_to_species.xml
new file mode 100644
index 0000000..3e22e0d
--- /dev/null
+++ b/tools/maf/maf_limit_to_species.xml
@@ -0,0 +1,49 @@
+<tool id="MAF_Limit_To_Species1" name="Filter MAF blocks" version="1.0.0">
+  <description>by Species</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_limit_to_species.py $species $input1 $out_file1 $allow_partial $min_species</command>
+  <inputs>
+    <param name="input1" type="data" format="maf" label="MAF file"/>
+    <param name="allow_partial" type="select" label="Exclude blocks which have missing species" >
+      <option value="1">No</option>
+      <option value="0">Yes</option>
+    </param>
+    <param name="min_species" type="select" label="Exclude blocks which have only one species" >
+      <option value="1">Yes</option>
+      <option value="0">No</option>
+    </param>
+    <param name="species" type="select" label="Species to keep" display="checkboxes" multiple="true">
+      <options>
+        <filter type="data_meta" ref="input1" key="species" />
+      </options>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="maf" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="4.maf"/>
+      <param name="species" value="bosTau2,canFam2,hg17,panTro1,rheMac2,rn3"/>
+      <param name="allow_partial" value="0"/>
+      <param name="min_species" value="0"/>
+      <output name="out_file1" file="cf_maf_limit_to_species.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What It Does**
+
+This tool allows the user to remove any undesired species from a MAF file. Columns which contain only gaps are removed. The options for this tool are:
+
+ * **Exclude blocks which have missing species** - suppose you want to restrict an 8-way alignment to human, mouse, and rat.  The tool will first remove all other species. Next, if this option is set to **YES** the tool WILL NOT return MAF blocks, which do not include human, mouse, or rat. This means that all alignment blocks returned by the tool will have exactly three sequences in this example.
+
+ * **Exclude blocks with have only one species** - if this option is set to **YES** all single sequence alignment blocks WILL NOT be returned.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
+
diff --git a/tools/maf/maf_reverse_complement.py b/tools/maf/maf_reverse_complement.py
new file mode 100644
index 0000000..681a944
--- /dev/null
+++ b/tools/maf/maf_reverse_complement.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+"""
+Reads a MAF file. Produces a MAF file containing
+the reverse complement for each block in the source file.
+
+usage: %prog input_maf_file output_maf_file
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    # Parse Command Line
+    input_file = sys.argv.pop( 1 )
+    output_file = sys.argv.pop( 1 )
+    species = maf_utilities.parse_species_option( sys.argv.pop( 1 ) )
+
+    try:
+        maf_writer = bx.align.maf.Writer( open( output_file, 'w' ) )
+    except:
+        print(sys.stderr, "Unable to open output file")
+        sys.exit()
+    try:
+        count = 0
+        for count, maf in enumerate( bx.align.maf.Reader( open( input_file ) ) ):
+            maf = maf.reverse_complement()
+            if species:
+                maf = maf.limit_to_species( species )
+            maf_writer.write( maf )
+    except:
+        print("Your MAF file appears to be malformed.", file=sys.stderr)
+        sys.exit()
+    print("%i regions were reverse complemented." % count)
+    maf_writer.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_reverse_complement.xml b/tools/maf/maf_reverse_complement.xml
new file mode 100644
index 0000000..ce62d0d
--- /dev/null
+++ b/tools/maf/maf_reverse_complement.xml
@@ -0,0 +1,51 @@
+<tool id="MAF_Reverse_Complement_1" name="Reverse Complement" version="1.0.1">
+  <description>a MAF file</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_reverse_complement.py $input1 $out_file1 $species</command>
+  <inputs>
+    <page>
+        <param format="maf" name="input1" label="Alignment File" type="data"/>
+        <param name="species" type="select" display="checkboxes" multiple="true" label="Choose species" help="Select species to be included in the final alignment">
+          <options>
+            <filter type="data_meta" ref="input1" key="species" />
+          </options>
+        </param>
+    </page>
+   </inputs>
+  <outputs>
+    <data format="maf" name="out_file1" metadata_source="input1"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="3.maf" dbkey="hg17" format="maf"/>
+      <param name="species" value="hg17,panTro1,mm5,rn3,canFam1"/>
+      <output name="out_file1" file="maf_reverse_complement_out.dat"/>
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+This tool takes a MAF file and creates a new MAF file, where each block has been reversed complemented.
+
+**Example**
+
+This MAF Block::
+
+  a score=8157.000000
+  s hg17.chr7    127471526 58 + 158628139 AATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+  s panTro1.chr6 129885407 58 + 161576975 AATTTGTGGTTTATTCGTTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+  s mm5.chr6      28904928 54 + 149721531 AA----CGTTTCATTGATTGCTCATCATTTAAAAAAAGAAATTCCTCAGTGGAAGAGG
+
+becomes::
+
+  a score=8157.000000
+  s hg17.chr7     31156555 58 - 158628139 CCTCTTCCACTATAGACCTCCTTAAACAAAATAATGAAAAATGAATAAACCACAAATT
+  s panTro1.chr6  31691510 58 - 161576975 CCTCTTCCACTATAGACCTCCTTAAACAAAATAATGAAAAACGAATAAACCACAAATT
+  s mm5.chr6     120816549 54 - 149721531 CCTCTTCCACTGAGGAATTTCTTTTTTTAAATGATGAGCAATCAATGAAACG----TT
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/maf_split_by_species.py b/tools/maf/maf_split_by_species.py
new file mode 100644
index 0000000..07a876f
--- /dev/null
+++ b/tools/maf/maf_split_by_species.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+"""
+Read a maf and split blocks by unique species combinations
+"""
+from __future__ import print_function
+
+import sys
+
+from bx.align import maf
+
+from galaxy.tools.util import maf_utilities
+from galaxy.util import string_as_bool
+
+
+def __main__():
+    try:
+        maf_reader = maf.Reader( open( sys.argv[1] ) )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error opening MAF: %s" % e )
+    try:
+        out = maf.Writer( open( sys.argv[2], "w") )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error opening file for output: %s" % e )
+    try:
+        collapse_columns = string_as_bool( sys.argv[3] )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error determining collapse columns value: %s" % e )
+
+    start_count = 0
+    end_count = 0
+    for start_count, start_block in enumerate( maf_reader ):
+        for block in maf_utilities.iter_blocks_split_by_species( start_block ):
+            if collapse_columns:
+                block.remove_all_gap_columns()
+            out.write( block )
+            end_count += 1
+    out.close()
+
+    if end_count:
+        print("%i alignment blocks created from %i original blocks." % ( end_count, start_count + 1 ))
+    else:
+        print("No alignment blocks were created.")
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_split_by_species.xml b/tools/maf/maf_split_by_species.xml
new file mode 100644
index 0000000..b33a029
--- /dev/null
+++ b/tools/maf/maf_split_by_species.xml
@@ -0,0 +1,221 @@
+<tool id="MAF_split_blocks_by_species1" name="Split MAF blocks" version="1.0.0">
+  <description>by Species</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_split_by_species.py $input1 $out_file1 $collapse_columns</command>
+  <inputs>
+    <param format="maf" name="input1" type="data" label="MAF file to split"/>
+    <param name="collapse_columns" type="select" label="Collapse empty alignment columns" help="Removes columns that are gaps in all sequences">
+      <option value="True" selected="true">Yes</option>
+      <option value="False">No</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="maf" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="maf_split_by_species_in.maf"/>
+      <param name="collapse_columns" value="True"/>
+      <output name="out_file1" file="maf_split_by_species_collapsed_out.maf"/>
+    </test>
+    <test>
+      <param name="input1" value="maf_split_by_species_in.maf"/>
+      <param name="collapse_columns" value="False"/>
+      <output name="out_file1" file="maf_split_by_species_not_collapsed_out.maf"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool examines each MAF block for multiple occurrences of a species in a single block. When this occurs, a block is split into multiple blocks where every combination of one sequence per species per block is represented.
+
+The interface for this tool has two inputs: 
+
+ * **MAF file to split**. Choose multiple alignments from history to be split by species.
+ * **Collapse empty alignment columns**. Should alignment columns containing only gaps in the new blocks be removed.
+
+-----
+
+**Example 1**: **Collapse empty alignment columns is Yes**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+
+the tool will create **a single** history item containing 12 alignment blocks (notice that no columns contain only gaps)::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT-GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT-GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC--GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+ 
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC-GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC-GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGCAG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC---AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC---AG 
+
+-----
+
+**Example 2**: **Collapse empty alignment columns is No**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+
+the tool will create **a single** history item containing 12 alignment blocks (notice that some columns contain only gaps)::
+
+  ##maf version=1
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723125 85 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723125 83 - 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCT--GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTCGTCCTCAG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 85 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984545 83 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTT--GTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTCCTCAG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 + 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTT------AG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+  
+  a score=2047408.0
+  s species1.chr1 147984645 79 - 245522847 ATGGCGTCGGCCTCCTCCGGGCCGTCGTC---GGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTTGTC---AG 
+  s species2.chr1 129723925 79 + 229575298 ATGGCGTCGGCCTCCTCCGGGCCGTCGTCTTCGGTCGGTTTTTCATCCTTTGATCCCGCGGTCCCTTCCTGTACCTC------AG 
+  s species3.chr3  68255714 76 - 258222147 ATGGCGTCCGCCTCCTCAGGGCCAGCGGC---GGCGGGGTTTTCACCCCTTGATTCCGGGGTCCCTGCCGGTACCGC------AG 
+
+-------
+
+.. class:: infomark
+
+**About formats**
+
+**MAF format** multiple alignment format file. This format stores multiple alignments at the DNA level between entire genomes. 
+
+ - The .maf format is line-oriented. Each multiple alignment ends with a blank line.
+ - Each sequence in an alignment is on a single line.
+ - Lines starting with # are considered to be comments.
+ - Each multiple alignment is in a separate paragraph that begins with an "a" line and contains an "s" line for each sequence in the multiple alignment.
+ - Some MAF files may contain two optional line types: 
+
+   - An "i" line containing information about what is in the aligned species DNA before and after the immediately preceding "s" line; 
+   - An "e" line containing information about the size of the gap between the alignments that span the current block.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
+
diff --git a/tools/maf/maf_stats.py b/tools/maf/maf_stats.py
new file mode 100644
index 0000000..261244e
--- /dev/null
+++ b/tools/maf/maf_stats.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+Reads a list of intervals and a maf. Outputs a new set of intervals with statistics appended.
+"""
+from __future__ import print_function
+
+import sys
+
+import bx.intervals.io
+from bx.bitset import BitSet
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    maf_source_type = sys.argv.pop( 1 )
+    input_maf_filename = sys.argv[1].strip()
+    input_interval_filename = sys.argv[2].strip()
+    output_filename = sys.argv[3].strip()
+    dbkey = sys.argv[4].strip()
+    try:
+        chr_col = int( sys.argv[5].strip() ) - 1
+        start_col = int( sys.argv[6].strip() ) - 1
+        end_col = int( sys.argv[7].strip() ) - 1
+    except:
+        print("You appear to be missing metadata. You can specify your metadata by clicking on the pencil icon associated with your interval file.", file=sys.stderr)
+        sys.exit()
+    summary = sys.argv[8].strip()
+    if summary.lower() == "true":
+        summary = True
+    else:
+        summary = False
+
+    mafIndexFile = "%s/maf_index.loc" % sys.argv[9]
+    try:
+        maf_index_filename = sys.argv[10].strip()
+    except:
+        maf_index_filename = None
+    index = index_filename = None
+    if maf_source_type == "user":
+        # index maf for use here
+        index, index_filename = maf_utilities.open_or_build_maf_index( input_maf_filename, maf_index_filename, species=[dbkey] )
+        if index is None:
+            print("Your MAF file appears to be malformed.", file=sys.stderr)
+            sys.exit()
+    elif maf_source_type == "cached":
+        # access existing indexes
+        index = maf_utilities.maf_index_by_uid( input_maf_filename, mafIndexFile )
+        if index is None:
+            print("The MAF source specified (%s) appears to be invalid." % ( input_maf_filename ), file=sys.stderr)
+            sys.exit()
+    else:
+        print('Invalid source type specified: %s' % maf_source_type, file=sys.stdout)
+        sys.exit()
+
+    out = open(output_filename, 'w')
+
+    num_region = None
+    num_bad_region = 0
+    species_summary = {}
+    total_length = 0
+    # loop through interval file
+    for num_region, region in enumerate( bx.intervals.io.NiceReaderWrapper( open( input_interval_filename, 'r' ), chrom_col=chr_col, start_col=start_col, end_col=end_col, fix_strand=True, return_header=False, return_comments=False ) ):
+        src = "%s.%s" % ( dbkey, region.chrom )
+        region_length = region.end - region.start
+        if region_length < 1:
+            num_bad_region += 1
+            continue
+        total_length += region_length
+        coverage = { dbkey: BitSet( region_length ) }
+
+        for block in index.get_as_iterator( src, region.start, region.end ):
+            for spec in maf_utilities.get_species_in_block( block ):
+                if spec not in coverage:
+                    coverage[spec] = BitSet( region_length )
+            for block in maf_utilities.iter_blocks_split_by_species( block ):
+                if maf_utilities.component_overlaps_region( block.get_component_by_src( src ), region ):
+                    # need to chop and orient the block
+                    block = maf_utilities.orient_block_by_region( maf_utilities.chop_block_by_region( block, src, region ), src, region, force_strand='+' )
+                    start_offset, alignment = maf_utilities.reduce_block_by_primary_genome( block, dbkey, region.chrom, region.start )
+                    for i in range( len( alignment[dbkey] ) ):
+                        for spec, text in alignment.items():
+                            if text[i] != '-':
+                                coverage[spec].set( start_offset + i )
+        if summary:
+            # record summary
+            for key in coverage.keys():
+                if key not in species_summary:
+                    species_summary[key] = 0
+                species_summary[key] = species_summary[key] + coverage[key].count_range()
+        else:
+            # print coverage for interval
+            coverage_sum = coverage[dbkey].count_range()
+            out.write( "%s\t%s\t%s\t%s\n" % ( "\t".join( region.fields ), dbkey, coverage_sum, region_length - coverage_sum ) )
+            keys = list(coverage.keys())
+            keys.remove( dbkey )
+            keys.sort()
+            for key in keys:
+                coverage_sum = coverage[key].count_range()
+                out.write( "%s\t%s\t%s\t%s\n" % ( "\t".join( region.fields ), key, coverage_sum, region_length - coverage_sum ) )
+    if summary:
+        out.write( "#species\tnucleotides\tcoverage\n" )
+        for spec in species_summary:
+            out.write( "%s\t%s\t%.4f\n" % ( spec, species_summary[spec], float( species_summary[spec] ) / total_length ) )
+    out.close()
+    if num_region is not None:
+        print("%i regions were processed with a total length of %i." % ( num_region + 1, total_length ))
+    if num_bad_region:
+        print("%i regions were invalid." % ( num_bad_region ))
+    maf_utilities.remove_temp_index_file( index_filename )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_stats.xml b/tools/maf/maf_stats.xml
new file mode 100644
index 0000000..39be202
--- /dev/null
+++ b/tools/maf/maf_stats.xml
@@ -0,0 +1,115 @@
+<tool id="maf_stats1" name="MAF Coverage Stats" version="1.0.1">
+  <description>Alignment coverage information</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">
+    maf_stats.py
+    #if $maf_source_type.maf_source == "user":
+      $maf_source_type.maf_source $input2 $input1 $out_file1 $dbkey ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $summary
+    #else:
+      $maf_source_type.maf_source $maf_source_type.mafType $input1 $out_file1 $dbkey ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $summary
+    #end if
+    ${GALAXY_DATA_INDEX_DIR}
+    #if $maf_source_type.maf_source == "user":
+    $input2.metadata.maf_index
+    #end if
+  </command>
+  <inputs>
+    <param format="interval" name="input1" label="Interval File" type="data">
+      <validator type="unspecified_build" />
+    </param>
+    <conditional name="maf_source_type">
+      <param name="maf_source" type="select" label="MAF Source">
+        <option value="cached" selected="true">Locally Cached Alignments</option>
+        <option value="user">Alignments in Your History</option>
+      </param>
+      <when value="user">
+        <param format="maf" name="input2" label="MAF File" type="data">
+          <options>
+            <filter type="data_meta" ref="input1" key="dbkey" />
+          </options>
+          <validator type="dataset_ok_validator" />
+        </param>
+      </when>
+      <when value="cached">
+        <param name="mafType" type="select" label="MAF Type">
+          <options from_file="maf_index.loc">
+            <column name="name" index="0"/>
+            <column name="value" index="1"/>
+            <column name="dbkey" index="2"/>
+            <filter type="data_meta" ref="input1" key="dbkey" column="2" multiple="True" separator=","/>
+            <validator type="no_options" message="No alignments are available for the build associated with the selected interval file"/>
+          </options>
+        </param> 
+      </when>
+    </conditional>
+    <param name="summary" type="select" label="Type of Output">
+      <option value="false" selected="true">Coverage by Region</option>
+      <option value="true">Summarize Coverage</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="interval" name="out_file1" metadata_source="input1">
+      <change_format>
+        <when input="summary" value="true" format="tabular" />
+      </change_format>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="1.bed" dbkey="hg17" format="bed"/>
+      <param name="maf_source" value="cached"/>
+      <param name="mafType" value="8_WAY_MULTIZ_hg17"/>
+      <output name="out_file1" file="maf_stats_interval_out.dat"/>
+      <param name="summary" value="false"/>
+    </test>
+    <test>
+      <param name="input1" value="1.bed" dbkey="hg17" format="bed"/>
+      <param name="maf_source" value="cached"/>
+      <param name="mafType" value="8_WAY_MULTIZ_hg17"/>
+      <output name="out_file1" file="maf_stats_summary_out.dat"/>
+      <param name="summary" value="true"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool takes a MAF file and an interval file and relates coverage information by interval for each species.
+If a column does not exist in the reference genome, it is not included in the output.
+
+Consider the interval: "chrX 1000 1100 myInterval"
+  Let's suppose we want to do stats on three way alignments for H, M, and R. The result look like this:
+
+    chrX 1000 1100 myInterval H XXX YYY 
+    
+    chrX 1000 1100 myInterval M XXX YYY 
+    
+    chrX 1000 1100 myInterval R XXX YYY 
+    
+
+  where XXX and YYY are:
+
+    XXX = number of nucleotides
+    
+    YYY = number of gaps
+
+----
+
+Alternatively, you can request only summary information for a set of intervals:
+  
+  ========  ===========  ========
+  #species  nucleotides  coverage
+  ========  ===========  ========
+  hg18         30639      0.2372
+  rheMac2      7524       0.0582
+  panTro2      30390      0.2353
+  ========  ===========  ========
+
+  where **coverage** is the number of nucleotides divided by the total length of the provided intervals.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/maf_thread_for_species.py b/tools/maf/maf_thread_for_species.py
new file mode 100644
index 0000000..581a936
--- /dev/null
+++ b/tools/maf/maf_thread_for_species.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""
+Read a maf file and write out a new maf with only blocks having all of
+the passed in species, after dropping any other species and removing columns
+containing only gaps. This will attempt to fuse together any blocks
+which are adjacent after the unwanted species have been dropped.
+
+usage: %prog input_maf output_maf species1,species2
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+import bx.align.maf
+from bx.align.tools.fuse import FusingAlignmentWriter
+from bx.align.tools.thread import get_components_for_species, remove_all_gap_columns
+
+
+def main():
+    input_file = sys.argv.pop( 1 )
+    output_file = sys.argv.pop( 1 )
+    species = sys.argv.pop( 1 ).split( ',' )
+
+    try:
+        maf_reader = bx.align.maf.Reader( open( input_file ) )
+    except:
+        print("Unable to open source MAF file", file=sys.stderr)
+        sys.exit()
+    try:
+        maf_writer = FusingAlignmentWriter( bx.align.maf.Writer( open( output_file, 'w' ) ) )
+    except:
+        print("Unable to open output file", file=sys.stderr)
+        sys.exit()
+    try:
+        for m in maf_reader:
+            new_components = m.components
+            if species != ['None']:
+                new_components = get_components_for_species( m, species )
+            if new_components:
+                remove_all_gap_columns( new_components )
+                m.components = new_components
+                m.score = 0.0
+                maf_writer.write( m )
+    except Exception as e:
+        print("Error steping through MAF File: %s" % e, file=sys.stderr)
+        sys.exit()
+    maf_reader.close()
+    maf_writer.close()
+
+    print("Restricted to species: %s." % ", ".join( species ))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/maf/maf_thread_for_species.xml b/tools/maf/maf_thread_for_species.xml
new file mode 100644
index 0000000..9decd04
--- /dev/null
+++ b/tools/maf/maf_thread_for_species.xml
@@ -0,0 +1,59 @@
+<tool id="MAF_Thread_For_Species1" name="Join MAF blocks" version="1.0.0">
+  <description>by Species</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_thread_for_species.py $input1 $out_file1 $species</command>
+  <inputs>
+    <param format="maf" name="input1" type="data" label="MAF file"/>
+    <param name="species" type="select" label="Species to keep" display="checkboxes" multiple="true">
+      <options>
+        <filter type="data_meta" ref="input1" key="species" />
+      </options>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="maf" name="out_file1"  metadata_source="input1"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="3.maf" format="maf"/>
+      <param name="species" value="hg17,panTro1"/>
+      <output name="out_file1" file="maf_thread_for_species.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool allows the user to merge MAF blocks which are adjoining in each specified species from a MAF file. Columns which contain only gaps are removed. Species which are not desired are removed from the output.
+
+**Example**
+
+Specifying the desired species as hg17 and panTro1 with this MAF file::
+
+  ##maf version=1
+  a score=60426.000000
+  s hg17.chr7    127471195 331 + 158628139 gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAA
+  s panTro1.chr6 129885076 331 + 161576975 gtttgccatcttttgctgctcttgggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTGAAACTTCCC-------------------------------AAATACT-GCCACTGATGTCCTG-----ATGGAGGTA-------TGAA-------------------AACATCCACTAA
+  s mm5.chr6      28904571 357 + 149721531 CTCCACTCTCGTTTGCTGTT----------------CTGTCACCATGGAAACAAA-CGAGGGTGGTCCAGTTACTATCTTGACTGCAGCTGGCAGTCAGTT-GCCACT-----CAGGAATAAGGCTATGCCATT-GATCCACTGAACCGTGATCTGGAAACCTGGCTGTTGTTT-------CAAGCCTTGGGGCCAGTTTGCGGTGTTACTCATGA--CTCTAAGATCGTGTGCTTG----CTGCAGGAAGAGACAGCAAGGGGGTTACATTTAAAAAGCCCCCAGTTTAGCTATAGGCAGGCCAACAGGTGTAAAAATACTCACTAGTAATGGGCTGAACTCATGGAGGTAGCATTAGTGAGACACTGTAACTGTTTTTTTAAAAATCACTAA
+  s rn3.chr4      56178191 282 + 187371129 CTTCACTCTCATTTGCTGTT----------------CTGTCACTATGGAGACAAACACAGGCTAGCCCAGTTACTATCTTGATCACAGCAGCT-GTCAGCTAGCTGCCACTCACAGGAATAAGGCCATACCATT-GATCCACTGAACCTTGATCTAGGAATTTGGC----------------------TGGGGCCAGTTTGCGGTGTCACTCATGA--CTCTAAGATTGTGTGTTTG----CTCCAGGAAGAGACGGCAAGAGGATTACCTTTAAAAGGTTC---------------------------------GGAGTCTAGCTGTAGACAGCCCA-----ATG--GGTA-------TAAC-------------------AATACTCACTAA
+
+  a score=8157.000000
+  s hg17.chr7    127471526 58 + 158628139 AATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+  s panTro1.chr6 129885407 58 + 161576975 AATTTGTGGTTTATTCGTTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG
+  s mm5.chr6      28904928 54 + 149721531 AA----CGTTTCATTGATTGCTCATCATTTAAAAAAAGAAATTCCTCAGTGGAAGAGG
+
+results in::
+
+  ##maf version=1
+  a score=0.0
+  s hg17.chr7    127471195 389 + 158628139 gtttgccatcttttgctgctctagggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTAAAACTTCCCAAATACTGCCACTGATGTCCTGATGGAGGTATGAAAACATCCACTAAAATTTGTGGTTTATTCATTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG 
+  s panTro1.chr6 129885076 389 + 161576975 gtttgccatcttttgctgctcttgggaatccagcagctgtcaccatgtaaacaagcccaggctagaccaGTTACCCTCATCATCTTAGCTGATAGCCAGCCAGCCACCACAGGCAtgagtcaggccatattgctggacccacagaattatgagctaaataaatagtcttgggttaagccactaagttttaggcatagtgtgttatgtaTCTCACAAACATATAAGACTGTGTGTTTGTTGACTGGAGGAAGAGATGCTATAAAGACCACCTTTTGAAACTTCCCAAATACTGCCACTGATGTCCTGATGGAGGTATGAAAACATCCACTAAAATTTGTGGTTTATTCGTTTTTCATTATTTTGTTTAAGGAGGTCTATAGTGGAAGAGG 
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+
+</tool>
+
diff --git a/tools/maf/maf_to_bed.py b/tools/maf/maf_to_bed.py
new file mode 100644
index 0000000..803bf97
--- /dev/null
+++ b/tools/maf/maf_to_bed.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+"""
+Read a maf and output intervals for specified list of species.
+"""
+from __future__ import print_function
+
+import os
+import sys
+
+from bx.align import maf
+
+
+def __main__():
+    input_filename = sys.argv[1]
+    output_filename = sys.argv[2]
+    # where to store files that become additional output
+    database_tmp_dir = sys.argv[5]
+
+    species = sys.argv[3].split(',')
+    partial = sys.argv[4]
+    output_id = sys.argv[6]
+    out_files = {}
+    primary_spec = None
+
+    if "None" in species:
+        species = set()
+        try:
+            for i, m in enumerate( maf.Reader( open( input_filename, 'r' ) ) ):
+                for c in m.components:
+                    spec, chrom = maf.src_split( c.src )
+                    if not spec or not chrom:
+                        spec = chrom = c.src
+                    species.add(spec)
+        except:
+            print("Invalid MAF file specified", file=sys.stderr)
+            return
+
+    if "?" in species:
+        print("Invalid dbkey specified", file=sys.stderr)
+        return
+
+    for i, spec in enumerate( species ):
+        if i == 0:
+            out_files[spec] = open( output_filename, 'w' )
+            primary_spec = spec
+        else:
+            out_files[ spec ] = open( os.path.join( database_tmp_dir, 'primary_%s_%s_visible_bed_%s' % ( output_id, spec, spec ) ), 'wb+' )
+    num_species = len( species )
+
+    print("Restricted to species:", ",".join( species ))
+
+    file_in = open( input_filename, 'r' )
+    maf_reader = maf.Reader( file_in )
+
+    block_num = -1
+
+    for i, m in enumerate( maf_reader ):
+        block_num += 1
+        if "None" not in species:
+            m = m.limit_to_species( species )
+        l = m.components
+        if len(l) < num_species and partial == "partial_disallowed":
+            continue
+        for c in l:
+            spec, chrom = maf.src_split( c.src )
+            if not spec or not chrom:
+                    spec = chrom = c.src
+            if spec not in out_files.keys():
+                out_files[ spec ] = open( os.path.join( database_tmp_dir, 'primary_%s_%s_visible_bed_%s' % ( output_id, spec, spec ) ), 'wb+' )
+
+            if c.strand == "-":
+                out_files[spec].write( chrom + "\t" + str( c.src_size - c.end ) + "\t" + str( c.src_size - c.start ) + "\t" + spec + "_" + str( block_num ) + "\t" + "0\t" + c.strand + "\n" )
+            else:
+                out_files[spec].write( chrom + "\t" + str( c.start ) + "\t" + str( c.end ) + "\t" + spec + "_" + str( block_num ) + "\t" + "0\t" + c.strand + "\n" )
+
+    file_in.close()
+    for file_out in out_files.keys():
+        out_files[file_out].close()
+
+    print("#FILE1_DBKEY\t%s" % ( primary_spec ))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_to_bed.xml b/tools/maf/maf_to_bed.xml
new file mode 100644
index 0000000..3fd75c1
--- /dev/null
+++ b/tools/maf/maf_to_bed.xml
@@ -0,0 +1,134 @@
+<tool id="MAF_To_BED1" name="MAF to BED" force_history_refresh="True" version="1.0.0">
+  <description>Converts a MAF formatted file to the BED format</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_to_bed.py "${ input1 }" "${ out_file1 }" "${ species }" "${ complete_blocks }" "." "${ out_file1.id }"</command>
+  <inputs>
+    <param format="maf" name="input1" type="data" label="MAF file to convert"/>
+    <param name="species" type="select" label="Select species" display="checkboxes" multiple="true" help="a separate history item will be created for each checked species">
+      <options>
+        <filter type="data_meta" ref="input1" key="species" />
+      </options>
+    </param>
+    <param name="complete_blocks" type="select" label="Exclude blocks which have a requested species missing">
+      <option value="partial_allowed">include blocks with missing species</option>
+      <option value="partial_disallowed">exclude blocks with missing species</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="bed" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="4.maf"/>
+      <param name="species" value="hg17"/>
+      <param name="complete_blocks" value="partial_disallowed"/>
+      <output name="out_file1" file="cf_maf_to_bed.dat"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool converts every MAF block to an interval line (in BED format; scroll down for description of MAF and BED formats) describing position of that alignment block within a corresponding genome. 
+
+The interface for this tool contains two pages (steps): 
+
+ * **Step 1 of 2**. Choose multiple alignments from history to be converted to BED format.
+ * **Step 2 of 2**. Choose species from the alignment to be included in the output and specify how to deal with alignment blocks that lack one or more species:
+
+   *  **Choose species** - the tool reads the alignment provided during Step 1 and generates a list of species contained within that alignment. Using checkboxes you can specify taxa to be included in the output (only reference genome, shown in **bold**, is selected by default). If you select more than one species, then more than one history item will be created.
+   *  **Choose to include/exclude blocks with missing species** - if an alignment block does not contain any one of the species you selected within **Choose species** menu and this option is set to **exclude blocks with missing species**, then coordinates of such a block **will not** be included in the output (see **Example 2** below).  
+
+
+-----
+
+**Example 1**: **Include only reference genome** (hg18 in this case) and **include blocks with missing species**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA------- 
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC------- 
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C 
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG 
+
+the tool will create **a single** history item containing the following (**note** that field 4 is added to the output and is numbered iteratively: hg18_0, hg18_1 etc.)::
+
+  chr20    56827368    56827443   hg18_0   0   +
+  chr20    56827443    56827480   hg18_1   0   +
+
+-----
+
+**Example 2**: **Include hg18 and mm8** and **exclude blocks with missing species**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA------- 
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC------- 
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C 
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG 
+
+the tool will create **two** history items (one for hg18 and one fopr mm8) containing the following (**note** that both history items contain only one line describing the first alignment block. The second MAF block is not included in the output because it does not contain mm8):
+
+History item **1** (for hg18)::
+
+   chr20    56827368    56827443   hg18_0   0   +
+
+History item **2** (for mm8)::
+
+   chr2    173910832   173910893    mm8_0   0   +
+
+-------
+
+.. class:: infomark
+
+**About formats**
+
+**MAF format** multiple alignment format file. This format stores multiple alignments at the DNA level between entire genomes. 
+
+ - The .maf format is line-oriented. Each multiple alignment ends with a blank line.
+ - Each sequence in an alignment is on a single line.
+ - Lines starting with # are considered to be comments.
+ - Each multiple alignment is in a separate paragraph that begins with an "a" line and contains an "s" line for each sequence in the multiple alignment.
+ - Some MAF files may contain two optional line types: 
+
+   - An "i" line containing information about what is in the aligned species DNA before and after the immediately preceding "s" line; 
+   - An "e" line containing information about the size of the gap between the alignments that span the current block.
+
+**BED format** Browser Extensible Data format was designed at UCSC for displaying data tracks in the Genome Browser. It has three required fields and a number of additional optional ones:
+
+The first three BED fields (required) are::
+
+    1. chrom - The name of the chromosome (e.g. chr1, chrY_random).
+    2. chromStart - The starting position in the chromosome. (The first base in a chromosome is numbered 0.)
+    3. chromEnd - The ending position in the chromosome, plus 1 (i.e., a half-open interval).
+
+Additional (optional) fields are::
+
+    4. name - The name of the BED line.
+    5. score - A score between 0 and 1000.
+    6. strand - Defines the strand - either '+' or '-'.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+    <code file="maf_to_bed_code.py"/>
+</tool>
+
diff --git a/tools/maf/maf_to_bed_code.py b/tools/maf/maf_to_bed_code.py
new file mode 100644
index 0000000..a30118d
--- /dev/null
+++ b/tools/maf/maf_to_bed_code.py
@@ -0,0 +1,19 @@
+def exec_after_process(app, inp_data, out_data, param_dict, tool, stdout, stderr):
+    output_data = next(iter(out_data.values()))
+    new_stdout = ""
+    split_stdout = stdout.split("\n")
+    for line in split_stdout:
+        if line.startswith("#FILE1"):
+            fields = line.split("\t")
+            dbkey = fields[1]
+            output_data.dbkey = dbkey
+            output_data.name = "%s (%s)" % ( output_data.name, dbkey )
+            app.model.context.add( output_data )
+            app.model.context.flush()
+        else:
+            new_stdout = "%s\n%s" % ( new_stdout, line )
+    for data in output_data.creating_job.output_datasets:
+        data = data.dataset
+        data.info = new_stdout
+        app.model.context.add( data )
+        app.model.context.flush()
diff --git a/tools/maf/maf_to_fasta.xml b/tools/maf/maf_to_fasta.xml
new file mode 100644
index 0000000..ec2abfa
--- /dev/null
+++ b/tools/maf/maf_to_fasta.xml
@@ -0,0 +1,197 @@
+<tool id="MAF_To_Fasta1" name="MAF to FASTA" version="1.0.1">
+  <description>Converts a MAF formatted file to FASTA format</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">
+    #if $fasta_target_type.fasta_type == "multiple" #maf_to_fasta_multiple_sets.py $input1 $out_file1 $fasta_target_type.species $fasta_target_type.complete_blocks
+    #else                                           #maf_to_fasta_concat.py $fasta_target_type.species $input1 $out_file1
+    #end if#
+  </command>
+  <inputs>
+    <param format="maf" name="input1" type="data" label="MAF file to convert"/>
+    <conditional name="fasta_target_type">
+      <param name="fasta_type" type="select" label="Type of FASTA Output">
+        <option value="multiple" selected="true">Multiple Blocks</option>
+        <option value="concatenated">One Sequence per Species</option>
+      </param>
+      <when value="multiple">
+        <param name="species" type="select" label="Select species" display="checkboxes" multiple="true" help="checked taxa will be included in the output">
+          <options>
+            <filter type="data_meta" ref="input1" key="species" />
+          </options>
+        </param>
+        <param name="complete_blocks" type="select" label="Choose to">
+          <option value="partial_allowed">include blocks with missing species</option>
+          <option value="partial_disallowed">exclude blocks with missing species</option>
+        </param>
+      </when>
+      <when value="concatenated">
+        <param name="species" type="select" label="Species to extract" display="checkboxes" multiple="true">
+          <options>
+            <filter type="data_meta" ref="input1" key="species" />
+          </options>
+        </param>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="fasta" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="3.maf" ftype="maf"/>
+      <param name="fasta_type" value="concatenated"/>
+      <param name="species" value="canFam1"/>
+      <output name="out_file1" file="cf_maf2fasta_concat.dat" ftype="fasta"/>
+    </test>
+    <test>
+      <param name="input1" value="4.maf" ftype="maf"/>
+      <param name="fasta_type" value="multiple"/>
+      <param name="species" value="hg17,panTro1,rheMac2,rn3,mm7,canFam2,bosTau2,dasNov1"/>
+      <param name="complete_blocks" value="partial_allowed"/>
+      <output name="out_file1" file="cf_maf2fasta_new.dat" ftype="fasta"/>
+    </test>
+  </tests>
+  <help>
+
+**Types of MAF to FASTA conversion**
+
+ * **Multiple Blocks** converts a single MAF block to a single FASTA block. For example, if you have 6 MAF blocks, they will be converted to 6 FASTA blocks.
+ * **One Sequence per Species** converts MAF blocks to a single aggregated FASTA block. For example, if you have 6 MAF blocks, they will be converted and concatenated into a single FASTA block.
+
+-------
+
+**What it does**
+
+This tool converts MAF blocks to FASTA format and concatenates them into a single FASTA block or outputs multiple FASTA blocks separated by empty lines.
+
+The interface for this tool contains two pages (steps):
+
+ * **Step 1 of 2**. Choose multiple alignments from history to be converted to FASTA format.
+ * **Step 2 of 2**. Choose the type of output as well as the species from the alignment to be included in the output.
+
+   Multiple Block output has additional options:
+
+   *  **Choose species** - the tool reads the alignment provided during Step 1 and generates a list of species contained within that alignment. Using checkboxes you can specify taxa to be included in the output (all species are selected by default).
+   *  **Choose to include/exclude blocks with missing species** - if an alignment block does not contain any one of the species you selected within **Choose species** menu and this option is set to **exclude blocks with missing species**, then such a block **will not** be included in the output (see **Example 2** below).  For example, if you want to extract human, mouse, and rat from a series of alignments and one of the blocks does not contain mouse sequence, then this block will not be [...]
+
+
+-----
+
+**Example 1**:
+
+In the concatenated approach, the following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
+
+will be converted to (**note** that because mm8 (mouse) and canFam2 (dog) are absent from the second block, they are replaced with gaps after concatenation)::
+
+  >canFam2
+  CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C-------------------------------------
+  >hg18
+  GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  >mm8
+  AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC--------------------------------------------
+  >panTro2
+  GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  >rheMac2
+  GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
+
+------
+
+**Example 2a**: Multiple Block Approach **Include all species** and **include blocks with missing species**:
+
+The following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
+
+will be converted to::
+
+  >hg18.chr20(+):56827368-56827443|hg18_0
+  GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  >panTro2.chr20(+):56528685-56528760|panTro2_0
+  GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  >rheMac2.chr10(-):89144112-89144181|rheMac2_0
+  GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+  >mm8.chr2(+):173910832-173910893|mm8_0
+  AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------
+  >canFam2.chr24(+):46551822-46551889|canFam2_0
+  CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C
+
+  >hg18.chr20(+):56827443-56827480|hg18_1
+  ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  >panTro2.chr20(+):56528760-56528797|panTro2_1
+  ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  >rheMac2.chr10(-):89144181-89144218|rheMac2_1
+  ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
+
+-----
+
+**Example 2b**: Multiple Block Approach **Include hg18 and mm8** and **exclude blocks with missing species**:
+
+The following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
+
+will be converted to (**note** that the second MAF block, which does not have mm8, is not included in the output)::
+
+  >hg18.chr20(+):56827368-56827443|hg18_0
+  GACAGGGTGCATCTGGGAGGGCCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC
+  >mm8.chr2(+):173910832-173910893|mm8_0
+  AGAAGGATCCACCT---------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC------
+
+------
+
+.. class:: infomark
+
+**About formats**
+
+ **MAF format** multiple alignment format file. This format stores multiple alignments at the DNA level between entire genomes.
+
+ - The .maf format is line-oriented. Each multiple alignment ends with a blank line.
+ - Each sequence in an alignment is on a single line.
+ - Lines starting with # are considered to be comments.
+ - Each multiple alignment is in a separate paragraph that begins with an "a" line and contains an "s" line for each sequence in the multiple alignment.
+ - Some MAF files may contain two optional line types:
+
+   - An "i" line containing information about what is in the aligned species DNA before and after the immediately preceding "s" line;
+   - An "e" line containing information about the size of the gap between the alignments that span the current block.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
diff --git a/tools/maf/maf_to_fasta_concat.py b/tools/maf/maf_to_fasta_concat.py
new file mode 100755
index 0000000..9a97f7c
--- /dev/null
+++ b/tools/maf/maf_to_fasta_concat.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+"""
+Read a maf and output a single block fasta file, concatenating blocks
+
+usage %prog species1,species2 maf_file out_file
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+from bx.align import maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    try:
+        species = maf_utilities.parse_species_option( sys.argv[1] )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error determining species value: %s" % e )
+    try:
+        input_filename = sys.argv[2]
+    except Exception as e:
+        maf_utilities.tool_fail( "Error reading MAF filename: %s" % e )
+    try:
+        file_out = open( sys.argv[3], 'w' )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error opening file for output: %s" % e )
+
+    if species:
+        print("Restricted to species: %s" % ', '.join( species ))
+    else:
+        print("Not restricted to species.")
+
+    if not species:
+        try:
+            species = maf_utilities.get_species_in_maf( input_filename )
+        except Exception as e:
+            maf_utilities.tool_fail( "Error determining species in input MAF: %s" % e )
+
+    for spec in species:
+        file_out.write( ">" + spec + "\n" )
+        try:
+            for start_block in maf.Reader( open( input_filename, 'r' ) ):
+                for block in maf_utilities.iter_blocks_split_by_species( start_block ):
+                    block.remove_all_gap_columns()  # remove extra gaps
+                    component = block.get_component_by_src_start( spec )  # blocks only have one occurrence of a particular species, so this is safe
+                    if component:
+                        file_out.write( component.text )
+                    else:
+                        file_out.write( "-" * block.text_size )
+        except Exception as e:
+            maf_utilities.tool_fail( "Your MAF file appears to be malformed: %s" % e )
+        file_out.write( "\n" )
+    file_out.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_to_fasta_multiple_sets.py b/tools/maf/maf_to_fasta_multiple_sets.py
new file mode 100755
index 0000000..54c30e5
--- /dev/null
+++ b/tools/maf/maf_to_fasta_multiple_sets.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+"""
+Read a maf and output a multiple block fasta file.
+"""
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+
+from bx.align import maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    try:
+        maf_reader = maf.Reader( open( sys.argv[1] ) )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error opening input MAF: %s" % e )
+    try:
+        file_out = open( sys.argv[2], 'w' )
+    except Exception as e:
+        maf_utilities.tool_fail( "Error opening file for output: %s" % e )
+    try:
+        species = maf_utilities.parse_species_option( sys.argv[3] )
+        if species:
+            num_species = len( species )
+        else:
+            num_species = 0
+    except Exception as e:
+        maf_utilities.tool_fail( "Error determining species value: %s" % e )
+    try:
+        partial = sys.argv[4]
+    except Exception as e:
+        maf_utilities.tool_fail( "Error determining keep partial value: %s" % e )
+
+    if species:
+        print("Restricted to species: %s" % ', '.join( species ))
+    else:
+        print("Not restricted to species.")
+
+    for block_num, block in enumerate( maf_reader ):
+        if species:
+            block = block.limit_to_species( species )
+            if len( maf_utilities.get_species_in_block( block ) ) < num_species and partial == "partial_disallowed":
+                continue
+        spec_counts = {}
+        for component in block.components:
+            spec, chrom = maf_utilities.src_split( component.src )
+            if spec not in spec_counts:
+                spec_counts[ spec ] = 0
+            else:
+                spec_counts[ spec ] += 1
+            file_out.write( "%s\n" % maf_utilities.get_fasta_header( component, { 'block_index': block_num, 'species': spec, 'sequence_index': spec_counts[ spec ] }, suffix="%s_%i_%i" % ( spec, block_num, spec_counts[ spec ] ) ) )
+            file_out.write( "%s\n" % component.text )
+        file_out.write( "\n" )
+    file_out.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_to_interval.py b/tools/maf/maf_to_interval.py
new file mode 100644
index 0000000..8cbddac
--- /dev/null
+++ b/tools/maf/maf_to_interval.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+
+"""
+Read a maf and output intervals for specified list of species.
+"""
+import os
+import sys
+
+from bx.align import maf
+
+from galaxy.tools.util import maf_utilities
+
+
+def __main__():
+    input_filename = sys.argv[1]
+    output_filename = sys.argv[2]
+    output_id = sys.argv[3]
+    # where to store files that become additional output
+    database_tmp_dir = sys.argv[4]
+    primary_spec = sys.argv[5]
+    species = sys.argv[6].split( ',' )
+    all_species = sys.argv[7].split( ',' )
+    partial = sys.argv[8]
+    keep_gaps = sys.argv[9]
+    out_files = {}
+
+    if "None" in species:
+        species = []
+
+    if primary_spec not in species:
+        species.append( primary_spec )
+    if primary_spec not in all_species:
+        all_species.append( primary_spec )
+
+    all_species.sort()
+    for spec in species:
+        if spec == primary_spec:
+            out_files[ spec ] = open( output_filename, 'wb+' )
+        else:
+            out_files[ spec ] = open( os.path.join( database_tmp_dir, 'primary_%s_%s_visible_interval_%s' % ( output_id, spec, spec ) ), 'wb+' )
+        out_files[ spec ].write( '#chrom\tstart\tend\tstrand\tscore\tname\t%s\n' % ( '\t'.join( all_species ) ) )
+    num_species = len( all_species )
+
+    file_in = open( input_filename, 'r' )
+    maf_reader = maf.Reader( file_in )
+
+    for i, m in enumerate( maf_reader ):
+        for j, block in enumerate( maf_utilities.iter_blocks_split_by_species( m ) ):
+            if len( block.components ) < num_species and partial == "partial_disallowed":
+                continue
+            sequences = {}
+            for c in block.components:
+                spec, chrom = maf_utilities.src_split( c.src )
+                if keep_gaps == 'remove_gaps':
+                    sequences[ spec ] = c.text.replace( '-', '' )
+                else:
+                    sequences[ spec ] = c.text
+            sequences = '\t'.join( [ sequences.get( _, '' ) for _ in all_species ] )
+            for spec in species:
+                c = block.get_component_by_src_start( spec )
+                if c is not None:
+                    spec2, chrom = maf_utilities.src_split( c.src )
+                    assert spec2 == spec, Exception( 'Species name inconsistancy found in component: %s != %s' % ( spec, spec2 ) )
+                    out_files[ spec ].write( "%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % ( chrom, c.forward_strand_start, c.forward_strand_end, c.strand, m.score, "%s_%s_%s" % (spec, i, j), sequences ) )
+    file_in.close()
+    for file_out in out_files.values():
+        file_out.close()
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/maf/maf_to_interval.xml b/tools/maf/maf_to_interval.xml
new file mode 100644
index 0000000..13c98fc
--- /dev/null
+++ b/tools/maf/maf_to_interval.xml
@@ -0,0 +1,131 @@
+<tool id="MAF_To_Interval1" name="MAF to Interval" force_history_refresh="True" version="1.0.0">
+  <description>Converts a MAF formatted file to the Interval format</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">maf_to_interval.py "${ input1 }" "${ out_file1 }" "${ out_file1.id }" "." "${ input1.dbkey }" "${ species }" "${ input1.metadata.species }" "${ complete_blocks }" "${ remove_gaps }"</command>
+  <inputs>
+    <param format="maf" name="input1" type="data" label="MAF file to convert"/>
+    <param name="species" type="select" label="Select additional species" display="checkboxes" multiple="true" help="The species matching the dbkey of the alignment is always included. A separate history item will be created for each species.">
+      <options>
+        <filter type="data_meta" ref="input1" key="species" />
+        <filter type="remove_value" meta_ref="input1" key="dbkey" />
+      </options>
+    </param>
+    <param name="complete_blocks" type="select" label="Exclude blocks which have a species missing">
+      <option value="partial_allowed">include blocks with missing species</option>
+      <option value="partial_disallowed">exclude blocks with missing species</option>
+    </param>
+    <param name="remove_gaps" type="select" label="Remove Gap characters from sequences">
+      <option value="keep_gaps">keep gaps</option>
+      <option value="remove_gaps">remove gaps</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="interval" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="4.maf" dbkey="hg17"/>
+      <param name="complete_blocks" value="partial_disallowed"/>
+      <param name="remove_gaps" value="keep_gaps"/>
+      <param name="species" value="panTro1" />
+      <output name="out_file1" file="maf_to_interval_out_hg17.interval"/>
+      <output name="out_file1" file="maf_to_interval_out_panTro1.interval"/>
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool converts every MAF block to a set of genomic intervals describing the position of that alignment block within a corresponding genome. Sequences from aligning species are also included in the output.
+
+The interface for this tool contains several options: 
+
+ * **MAF file to convert**. Choose multiple alignments from history to be converted to BED format.
+ * **Choose species**. Choose additional species from the alignment to be included in the output 
+ * **Exclude blocks which have a species missing**. if an alignment block does not contain any one of the species found in the alignment set and this option is set to **exclude blocks with missing species**, then coordinates of such a block **will not** be included in the output (see **Example 2** below).
+ * **Remove Gap characters from sequences**. Gaps can be removed from sequences before they are output.
+
+
+-----
+
+**Example 1**: **Include only reference genome** (hg18 in this case) and **include blocks with missing species**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA------- 
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC------- 
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C 
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG 
+
+the tool will create **a single** history item containing the following (**note** the name field is numbered iteratively: hg18_0_0, hg18_1_0 etc. where the first number is the block number and the second number is the iteration through the block (if a species appears twice in a block, that interval will be repeated) and sequences for each species are included in the order specified in the header: the field is left empty when no sequence is available for that species)::
+
+  #chrom	start	end	strand	score	name	canFam2	hg18	mm8	panTro2	rheMac2
+  chr20	56827368	56827443	+	68686.0	hg18_0_0	CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C	GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-	AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------	GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-	GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+  chr20	56827443	56827480	+	10289.0	hg18_1_0		ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG		ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG	ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
+
+
+-----
+
+**Example 2**: **Include hg18 and mm8** and **exclude blocks with missing species**:
+
+For the following alignment::
+
+  ##maf version=1
+  a score=68686.000000
+  s hg18.chr20     56827368 75 +  62435964 GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s panTro2.chr20  56528685 75 +  62293572 GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC- 
+  s rheMac2.chr10  89144112 69 -  94855758 GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA------- 
+  s mm8.chr2      173910832 61 + 181976762 AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC------- 
+  s canFam2.chr24  46551822 67 +  50763139 CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C 
+
+  a score=10289.000000
+  s hg18.chr20    56827443 37 + 62435964 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s panTro2.chr20 56528760 37 + 62293572 ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG 
+  s rheMac2.chr10 89144181 37 - 94855758 ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG 
+
+the tool will create **two** history items (one for hg18 and one for mm8) containing the following (**note** that both history items contain only one line describing the first alignment block. The second MAF block is not included in the output because it does not contain mm8):
+
+History item **1** (for hg18)::
+
+   #chrom	start	end	strand	score	name	canFam2	hg18	mm8	panTro2	rheMac2
+   chr20	56827368	56827443	+	68686.0	hg18_0_0	CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C	GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-	AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------	GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-	GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+
+
+History item **2** (for mm8)::
+
+   #chrom	start	end	strand	score	name	canFam2	hg18	mm8	panTro2	rheMac2
+   chr2	173910832	173910893	+	68686.0	mm8_0_0	CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C	GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-	AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------	GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-	GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
+
+
+-------
+
+.. class:: infomark
+
+**About formats**
+
+**MAF format** multiple alignment format file. This format stores multiple alignments at the DNA level between entire genomes. 
+
+ - The .maf format is line-oriented. Each multiple alignment ends with a blank line.
+ - Each sequence in an alignment is on a single line.
+ - Lines starting with # are considered to be comments.
+ - Each multiple alignment is in a separate paragraph that begins with an "a" line and contains an "s" line for each sequence in the multiple alignment.
+ - Some MAF files may contain two optional line types: 
+
+   - An "i" line containing information about what is in the aligned species DNA before and after the immediately preceding "s" line; 
+   - An "e" line containing information about the size of the gap between the alignments that span the current block.
+
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
+
diff --git a/tools/maf/vcf_to_maf_customtrack.py b/tools/maf/vcf_to_maf_customtrack.py
new file mode 100644
index 0000000..2c49819
--- /dev/null
+++ b/tools/maf/vcf_to_maf_customtrack.py
@@ -0,0 +1,163 @@
+# Dan Blankenberg
+from __future__ import print_function
+
+import sys
+from optparse import OptionParser
+
+import bx.align.maf
+import galaxy_utils.sequence.vcf
+from six import Iterator
+
+UNKNOWN_NUCLEOTIDE = '*'
+
+
+class PopulationVCFParser( Iterator ):
+    def __init__( self, reader, name ):
+        self.reader = reader
+        self.name = name
+        self.counter = 0
+
+    def __next__( self ):
+        rval = []
+        vc = next(self.reader)
+        for i, allele in enumerate( vc.alt ):
+            rval.append( ( '%s_%i.%i' % ( self.name, i + 1, self.counter + 1 ), allele ) )
+        self.counter += 1
+        return ( vc, rval )
+
+    def __iter__( self ):
+        while True:
+            yield next(self)
+
+
+class SampleVCFParser( Iterator ):
+    def __init__( self, reader ):
+        self.reader = reader
+        self.counter = 0
+
+    def __next__( self ):
+        rval = []
+        vc = next(self.reader)
+        alleles = [ vc.ref ] + vc.alt
+
+        if 'GT' in vc.format:
+            gt_index = vc.format.index( 'GT' )
+            for sample_name, sample_value in zip( vc.sample_names, vc.sample_values ):
+                gt_indexes = []
+                for i in sample_value[ gt_index ].replace( '|', '/' ).replace( '\\', '/' ).split( '/' ):  # Do we need to consider phase here?
+                    try:
+                        gt_indexes.append( int( i ) )
+                    except:
+                        gt_indexes.append( None )
+                for i, allele_i in enumerate( gt_indexes ):
+                    if allele_i is not None:
+                        rval.append( ( '%s_%i.%i' % ( sample_name, i + 1, self.counter + 1 ), alleles[ allele_i ] ) )
+        self.counter += 1
+        return ( vc, rval )
+
+    def __iter__( self ):
+        while True:
+            yield next(self)
+
+
+def main():
+    usage = "usage: %prog [options] output_file dbkey inputfile pop_name"
+    parser = OptionParser( usage=usage )
+    parser.add_option( "-p", "--population", action="store_true", dest="population", default=False, help="Create MAF on a per population basis")
+    parser.add_option( "-s", "--sample", action="store_true", dest="sample", default=False, help="Create MAF on a per sample basis")
+    parser.add_option( "-n", "--name", dest="name", default='Unknown Custom Track', help="Name for Custom Track")
+    parser.add_option( "-g", "--galaxy", action="store_true", dest="galaxy", default=False, help="Tool is being executed by Galaxy (adds extra error messaging).")
+    ( options, args ) = parser.parse_args()
+
+    if len( args ) < 3:
+        if options.galaxy:
+            print("It appears that you forgot to specify an input VCF file, click 'Add new VCF...' to add at least input.\n", file=sys.stderr)
+        parser.error( "Need to specify an output file, a dbkey and at least one input file" )
+
+    if not ( options.population ^ options.sample ):
+        parser.error( 'You must specify either a per population conversion or a per sample conversion, but not both' )
+
+    out = open( args.pop(0), 'wb' )
+    out.write( 'track name="%s" visibility=pack\n' % options.name.replace( "\"", "'" ) )
+
+    maf_writer = bx.align.maf.Writer( out )
+
+    dbkey = args.pop(0)
+
+    vcf_files = []
+    if options.population:
+        i = 0
+        while args:
+            filename = args.pop( 0 )
+            pop_name = args.pop( 0 ).replace( ' ', '_' )
+            if not pop_name:
+                pop_name = 'population_%i' % ( i + 1 )
+            vcf_files.append( PopulationVCFParser( galaxy_utils.sequence.vcf.Reader( open( filename ) ), pop_name  ) )
+            i += 1
+    else:
+        while args:
+            filename = args.pop( 0 )
+            vcf_files.append( SampleVCFParser( galaxy_utils.sequence.vcf.Reader( open( filename ) ) ) )
+
+    non_spec_skipped = 0
+    for vcf_file in vcf_files:
+        for vc, variants in vcf_file:
+            num_ins = 0
+            num_dels = 0
+            for variant_name, variant_text in variants:
+                if 'D' in variant_text:
+                    num_dels = max( num_dels, int( variant_text[1:] ) )
+                elif 'I' in variant_text:
+                    num_ins = max( num_ins, len( variant_text ) - 1 )
+
+            alignment = bx.align.maf.Alignment()
+            ref_text = vc.ref + '-' * num_ins + UNKNOWN_NUCLEOTIDE * ( num_dels - len( vc.ref ) )
+            start_pos = vc.pos - 1
+            if num_dels and start_pos:
+                ref_text = UNKNOWN_NUCLEOTIDE + ref_text
+                start_pos -= 1
+            alignment.add_component( bx.align.maf.Component(
+                src='%s.%s%s' % ( dbkey, ("chr" if not vc.chrom.startswith("chr") else ""), vc.chrom ),
+                start=start_pos, size=len( ref_text.replace( '-', '' ) ),
+                strand='+', src_size=start_pos + len( ref_text ),
+                text=ref_text ) )
+            for variant_name, variant_text in variants:
+                # FIXME:
+                # skip non-spec. compliant data, see: http://1000genomes.org/wiki/doku.php?id=1000_genomes:analysis:vcf3.3 for format spec
+                # this check is due to data having indels not represented in the published format spec,
+                # e.g. 1000 genomes pilot 1 indel data: ftp://ftp-trace.ncbi.nih.gov/1000genomes/ftp/pilot_data/release/2010_03/pilot1/indels/CEU.SRP000031.2010_03.indels.sites.vcf.gz
+                if variant_text and variant_text[0] in [ '-', '+' ]:
+                    non_spec_skipped += 1
+                    continue
+
+                # do we need a left padding unknown nucleotide (do we have deletions)?
+                if num_dels and start_pos:
+                    var_text = UNKNOWN_NUCLEOTIDE
+                else:
+                    var_text = ''
+                if 'D' in variant_text:
+                    cur_num_del = int( variant_text[1:] )
+                    pre_del = min( len( vc.ref ), cur_num_del )
+                    post_del = cur_num_del - pre_del
+                    var_text = var_text + '-' * pre_del + '-' * num_ins + '-' * post_del
+                    var_text = var_text + UNKNOWN_NUCLEOTIDE * ( len( ref_text ) - len( var_text ) )
+                elif 'I' in variant_text:
+                    cur_num_ins = len( variant_text ) - 1
+                    var_text = var_text + vc.ref + variant_text[1:] + '-' * ( num_ins - cur_num_ins ) + UNKNOWN_NUCLEOTIDE * max( 0, ( num_dels - 1 ) )
+                else:
+                    var_text = var_text + variant_text + '-' * num_ins + UNKNOWN_NUCLEOTIDE * ( num_dels - len( vc.ref ) )
+                alignment.add_component( bx.align.maf.Component(
+                    src=variant_name, start=0,
+                    size=len( var_text.replace( '-', '' ) ), strand='+',
+                    src_size=len( var_text.replace( '-', '' ) ),
+                    text=var_text ) )
+            maf_writer.write( alignment )
+
+    maf_writer.close()
+
+    if non_spec_skipped:
+        print('Skipped %i non-specification compliant indels.' % non_spec_skipped)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/maf/vcf_to_maf_customtrack.xml b/tools/maf/vcf_to_maf_customtrack.xml
new file mode 100644
index 0000000..bebf04e
--- /dev/null
+++ b/tools/maf/vcf_to_maf_customtrack.xml
@@ -0,0 +1,131 @@
+<tool id="vcf_to_maf_customtrack1" name="VCF to MAF Custom Track" version="1.0.0">
+  <description>for display at UCSC</description>
+  <macros>
+      <import>macros.xml</import>
+  </macros>
+  <command interpreter="python">vcf_to_maf_customtrack.py '$out_file1'
+    #if $vcf_source_type.vcf_file
+    '${vcf_source_type.vcf_file[0].vcf_input.dbkey}'
+    #else
+    '?'
+    #end if
+    ${vcf_source_type.vcf_source} -n '$track_name'
+    #for $vcf_repeat in $vcf_source_type.vcf_file
+    '${vcf_repeat.vcf_input}'
+    #if $vcf_source_type.vcf_source == '-p'
+      '${vcf_repeat.population_name}'
+    #end if
+    #end for
+    -g
+  </command>
+  <inputs>
+    <param name="track_name" type="text" label="Custom Track Name" value="Galaxy Custom Track" size="30" />
+    <conditional name="vcf_source_type">
+      <param name="vcf_source" type="select" label="VCF Source Source Type">
+        <option value="-p" selected="true">Per Population (file)</option>
+        <option value="-s">Per Sample</option>
+      </param>
+      <when value="-p">
+        <repeat name="vcf_file" title="VCF population file" min="1">
+          <param format="tabular" name="vcf_input" type="data" label="VCF file"/>
+          <param name="population_name" type="text" label="Name for this population" value=""/>
+        </repeat>
+      </when>
+      <when value="-s">
+        <repeat name="vcf_file" title="VCF sample file" min="1">
+          <param format="tabular" name="vcf_input" type="data" label="VCF file"/>
+          <!-- add column count validator >= 8? -->
+        </repeat>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="mafcustomtrack" name="out_file1" />
+  </outputs>
+<!--  <tests>
+    <test>
+      <param name="track_name" value="Galaxy Custom Track"/>
+      <param name="vcf_source" value="Per Population"/>
+      <param name="vcf_input" value="vcf_to_maf_in.vcf" ftype="tabular"/>
+      <param name="population_name" value=""/>
+      <output name="out_file1" file="vcf_to_maf_population_out.mafcustomtrack"/>
+    </test>
+    <test>
+      <param name="track_name" value="Galaxy Custom Track"/>
+      <param name="vcf_source" value="Per Sample"/>
+      <param name="vcf_input" value="vcf_to_maf_in.vcf" ftype="tabular"/>
+      <output name="out_file1" file="vcf_to_maf_sample_out.mafcustomtrack"/>
+    </test>
+  </tests> -->
+  <help>
+**What it does**
+
+This tool converts a Variant Call Format (VCF) file into a Multiple Alignment Format (MAF) custom track file suitable for display at genome browsers. 
+
+This file should be used for display purposes only (e.g as a UCSC Custom Track). Performing an analysis using the output created by this tool as input is not recommended; the source VCF file should be used when performing an analysis.
+
+*Unknown nucleotides* are represented as '*' as required to allow the display to draw properly; these include e.g. reference bases which appear before a deletion and are not available without querying the original reference sequence.
+
+**Example**
+
+Starting with a VCF::
+
+  ##fileformat=VCFv3.3
+  ##fileDate=20090805
+  ##source=myImputationProgramV3.1
+  ##reference=1000GenomesPilot-NCBI36
+  ##phasing=partial
+  ##INFO=NS,1,Integer,"Number of Samples With Data"
+  ##INFO=DP,1,Integer,"Total Depth"
+  ##INFO=AF,-1,Float,"Allele Frequency"
+  ##INFO=AA,1,String,"Ancestral Allele"
+  ##INFO=DB,0,Flag,"dbSNP membership, build 129"
+  ##INFO=H2,0,Flag,"HapMap2 membership"
+  ##FILTER=q10,"Quality below 10"
+  ##FILTER=s50,"Less than 50% of samples have data"
+  ##FORMAT=GT,1,String,"Genotype"
+  ##FORMAT=GQ,1,Integer,"Genotype Quality"
+  ##FORMAT=DP,1,Integer,"Read Depth"
+  ##FORMAT=HQ,2,Integer,"Haplotype Quality"
+  #CHROM  POS ID  REF ALT QUAL    FILTER  INFO    FORMAT  NA00001 NA00002 NA00003
+  20  14370   rs6054257   G   A   29  0   NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51  1|0:48:8:51,51  1/1:43:5:-1,-1
+  20  17330   .   T   A   3   q10 NS=3;DP=11;AF=0.017 GT:GQ:DP:HQ 0|0:49:3:58,50  0|1:3:5:65,3    0/0:41:3:-1,-1
+  20  1110696 rs6040355   A   G,T 67  0   NS=2;DP=10;AF=0.333,0.667;AA=T;DB   GT:GQ:DP:HQ 1|2:21:6:23,27  2|1:2:0:18,2    2/2:35:4:-1,-1
+  20  1230237 .   T   .   47  0   NS=3;DP=13;AA=T GT:GQ:DP:HQ 0|0:54:7:56,60  0|0:48:4:51,51  0/0:61:2:-1,-1
+  20  1234567 microsat1   G   D4,IGA  50  0   NS=3;DP=9;AA=G  GT:GQ:DP    0/1:35:4    0/2:17:2    1/1:40:3
+  
+
+
+
+Under the following conditions: **VCF Source type:** *Per Population (file)*, **Name for this population:** *CHB+JPT*
+Results in the following MAF custom track::
+
+  track name="Galaxy Custom Track" visibility=pack
+  ##maf version=1
+  a score=0
+  s hg18.chr20  14369 1 + 14370 G 
+  s CHB+JPT_1.1     0 1 +     1 A 
+  
+  a score=0
+  s hg18.chr20  17329 1 + 17330 T 
+  s CHB+JPT_1.2     0 1 +     1 A 
+  
+  a score=0
+  s hg18.chr20  1110695 1 + 1110696 A 
+  s CHB+JPT_1.3       0 1 +       1 G 
+  s CHB+JPT_2.3       0 1 +       1 T 
+  
+  a score=0
+  s hg18.chr20  1230236 1 + 1230237 T 
+  s CHB+JPT_1.4       0 1 +       1 . 
+  
+  a score=0
+  s hg18.chr20  1234565 5 + 1234572 *G--*** 
+  s CHB+JPT_1.5       0 1 +       1 *------ 
+  s CHB+JPT_2.5       0 7 +       7 *GGA*** 
+  
+ at HELP_CITATIONS@
+    </help>
+    <expand macro="citations" />
+</tool>
+
diff --git a/tools/meme/fimo.xml b/tools/meme/fimo.xml
new file mode 100644
index 0000000..199c3bb
--- /dev/null
+++ b/tools/meme/fimo.xml
@@ -0,0 +1,238 @@
+<tool id="meme_fimo" name="FIMO" version="0.0.1">
+  <requirements><requirement type="package">meme</requirement></requirements>
+  <description>- Find Individual Motif Occurrences</description>
+  <command interpreter="python">fimo_wrapper.py 'fimo --o "${$html_outfile.files_path}" --verbosity "1"
+  
+  #if str( $options_type.options_type_selector ) == 'advanced':
+  --max-seq-length "${options_type.max_seq_length}" 
+  --max-stored-scores "${options_type.max_stored_scores }" 
+  --motif-pseudo "${options_type.motif_pseudo}" 
+  ${options_type.norc} 
+  --output-pthresh "${options_type.output_pthresh}" 
+
+  
+  #for $motif in $options_type.motifs:
+    --motif "${motif.motif}"
+  #end for
+  
+  #if str( $options_type.bgfile_type.bgfile_type_selector ) == 'motif-file':
+    --bgfile "motif-file"
+  #elif str( $options_type.bgfile_type.bgfile_type_selector ) == 'motif-file':
+    --bgfile "${options_type.bgfile_type.bgfile}"
+  #end if
+  
+  #if str( $options_type.qvalue_type.qvalue_type_selector ) == 'no-qvalue':
+    --no-qvalue
+  #else:
+    --output-qthresh "${options_type.qvalue_type.output_qthresh}"
+  #end if
+  #end if
+  
+  "${input_motifs}" 
+  
+  #if str( $fasta_type.fasta_type_selector ) == 'history':
+    "${fasta_type.input_database}"
+  #else:
+    "${fasta_type.input_database.fields.path}"
+  #end if
+
+  '
+  
+  '${html_outfile.files_path}'
+  
+  '${html_outfile}'
+  
+  '${interval_outfile}'
+  
+  '${txt_outfile}'
+  
+  '${xml_outfile}'
+  
+  '${gff_outfile}'
+    
+  </command>
+  <inputs>
+    <param format="memexml" name="input_motifs" type="data" label="'MEME output' formatted file"/>
+    
+    <conditional name="fasta_type">
+      <param name="fasta_type_selector" type="select" label="Source for sequence to search">
+        <option value="cached">Locally Cached sequences</option>
+        <option value="history" selected="true">Sequences from your history</option>
+      </param>
+      <when value="cached">
+        <param name="input_database" type="select" label="Genome to search">
+          <options from_data_table="all_fasta">
+          </options>
+        </param>
+      </when>
+      <when value="history">
+         <param format="fasta" name="input_database" type="data" label="Sequences"/>
+      </when>
+    </conditional>
+    
+      <conditional name="options_type">
+        <param name="options_type_selector" type="select" label="Options Configuration">
+          <option value="basic" selected="true">Basic</option>
+          <option value="advanced">Advanced</option>
+        </param>
+        <when value="basic">
+          <!-- do nothing here -->
+        </when>
+        <when value="advanced">
+    
+    <conditional name="bgfile_type">
+      <param name="bgfile_type_selector" type="select" label="Background file type">
+        <option value="motif-file">Use Frequencies from Motif File</option>
+        <option value="default" selected="true">Use frequencies from non-redundant database (default)</option>
+        <option value="bgfile">Use Frequencies from Background File</option>
+      </param>
+      <when value="motif-file">
+      <!-- do nothing here -->
+      </when>
+      <when value="default">
+      <!-- do nothing here -->
+      </when>
+      <when value="bgfile">
+        <param name="bgfile" type="data" format="txt" optional="True" label="Background Model" />
+      </when>
+    </conditional>
+    
+    <repeat name="motifs" title="Limit to specified motif">
+      <param name="motif" type="text" value="" label="Specify motif by id" />
+    </repeat>
+    
+    <param name="max_seq_length" type="integer" value="250000000" label="Maximum input sequence length" />
+    <param name="max_stored_scores" type="integer" value="100000" label="Maximum score count to store" />
+    <param name="motif_pseudo" type="float" value="0.1" label="Pseudocount to add to counts in motif matrix" />
+    <param name="norc" label="Do not check reverse complement" type="boolean" truevalue="--norc" falsevalue="" checked="False"/>
+    <param name="output_pthresh" type="float" value="1e-4" label="p-value threshold" />
+    
+    <conditional name="qvalue_type">
+      <param name="qvalue_type_selector" type="select" label="q-value options">
+        <option value="no-qvalue">Do not compute q-value</option>
+        <option value="q-value" selected="true">Compute q-value</option>
+      </param>
+      <when value="no-qvalue">
+      <!-- do nothing here -->
+      </when>
+      <when value="q-value">
+        <param name="output_qthresh" type="float" value="1.0" label="q-value threshold" />
+      </when>
+    </conditional>
+    
+      </when>
+    </conditional>
+    
+    <param name="non_commercial_use" label="I certify that I am not using this tool for commercial purposes." type="boolean" truevalue="NON_COMMERCIAL_USE" falsevalue="COMMERCIAL_USE" checked="False">
+      <validator type="expression" message="This tool is only available for non-commercial use.">value == True</validator>
+    </param>
+  
+  </inputs>
+  <outputs>
+    <data format="html" name="html_outfile" label="${tool.name} on ${on_string} (html)">
+      <actions>
+        <conditional name="fasta_type.fasta_type_selector">
+          <when value="cached">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="all_fasta" column="1" offset="0">
+                <filter type="param_value" column="0" value="seq" keep="True"/>
+                <filter type="param_value" ref="fasta_type.input_database" column="1"/>
+              </option>
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+    <data format="tabular" name="txt_outfile" label="${tool.name} on ${on_string} (text)">
+      <actions>
+        <conditional name="fasta_type.fasta_type_selector">
+          <when value="cached">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="all_fasta" column="1" offset="0">
+                <filter type="param_value" ref="fasta_type.input_database" column="0"/>
+              </option>
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+    <data format="tabular" name="gff_outfile" label="${tool.name} on ${on_string} (almost-gff)">
+      <actions>
+        <conditional name="fasta_type.fasta_type_selector">
+          <when value="cached">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="all_fasta" column="1" offset="0">
+                <filter type="param_value" ref="fasta_type.input_database" column="0"/>
+              </option>
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+    <data format="cisml" name="xml_outfile" label="${tool.name} on ${on_string} (xml)">
+      <actions>
+        <conditional name="fasta_type.fasta_type_selector">
+          <when value="cached">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="all_fasta" column="1" offset="0">
+                <filter type="param_value" ref="fasta_type.input_database" column="0"/>
+              </option>
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+    <data format="interval" name="interval_outfile" label="${tool.name} on ${on_string} (interval)">
+      <actions>
+        <conditional name="fasta_type.fasta_type_selector">
+          <when value="cached">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="all_fasta" column="1" offset="0">
+                <filter type="param_value" ref="fasta_type.input_database" column="0"/>
+              </option>
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input_motifs" value="meme/meme/meme_output_xml_1.xml" ftype="memexml"/>
+      <param name="fasta_type_selector" value="history"/>
+      <param name="input_database" value="phiX.fasta" ftype="fasta"/>
+      <param name="options_type_selector" value="basic"/>
+      <param name="non_commercial_use" value="True"/>
+      <output name="html_outfile" file="meme/fimo/fimo_output_html_1.html" lines_diff="12"/>
+      <output name="txt_outfile" file="meme/fimo/fimo_output_txt_1.txt" lines_diff="0"/>
+      <output name="gff_outfile" file="meme/fimo/fimo_output_almost-gff_1.txt" lines_diff="0"/>
+      <output name="xml_outfile" file="meme/fimo/fimo_output_xml_1.xml" lines_diff="8"/>
+      <output name="interval_outfile" file="meme/fimo/fimo_output_interval_1.txt" lines_diff="0"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**WARNING: This tool is only available for non-commercial use. Use for educational, research and non-profit purposes is permitted. Before using, be sure to review, agree, and comply with the license.**
+
+.. class:: infomark
+
+**To cite FIMO:**
+`Grant CE, Bailey TL, Noble WS. FIMO: scanning for occurrences of a given motif. Bioinformatics. 2011 Apr 1;27(7):1017-8. <http://www.ncbi.nlm.nih.gov/pubmed/21330290>`_
+
+
+For detailed information on FIMO, click here_. To view the license_.
+
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
+
+.. _here: http://meme.nbcr.net/meme/fimo-intro.html
+.. _license: http://meme.nbcr.net/meme/COPYRIGHT.html
+
+  </help>
+</tool>
diff --git a/tools/meme/fimo_wrapper.py b/tools/meme/fimo_wrapper.py
new file mode 100644
index 0000000..86e5c63
--- /dev/null
+++ b/tools/meme/fimo_wrapper.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+"""
+Read text output from FIMO and create an interval file.
+"""
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from galaxy_utils.sequence.transform import DNA_reverse_complement
+
+buffsize = 1048576
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def main():
+    assert len( sys.argv ) == 8, "Wrong number of arguments"
+    sys.argv.pop(0)
+    fimo_cmd = sys.argv.pop(0)
+    html_path = sys.argv.pop(0)
+    html_out = sys.argv.pop(0)
+    interval_out = sys.argv.pop(0)
+    txt_out = sys.argv.pop(0)
+    xml_out = sys.argv.pop(0)
+    gff_out = sys.argv.pop(0)
+
+    # run fimo
+    try:
+        tmp_stderr = tempfile.NamedTemporaryFile()
+        proc = subprocess.Popen( args=fimo_cmd, shell=True, stderr=tmp_stderr )
+        returncode = proc.wait()
+        tmp_stderr.seek(0)
+        stderr = ''
+        try:
+            while True:
+                stderr += tmp_stderr.read( buffsize )
+                if not stderr or len( stderr ) % buffsize != 0:
+                    break
+        except OverflowError:
+            pass
+
+        if returncode != 0:
+            raise Exception(stderr)
+    except Exception as e:
+        raise Exception('Error running FIMO:\n' + str( e ))
+
+    shutil.move( os.path.join( html_path, 'fimo.txt' ), txt_out )
+    shutil.move( os.path.join( html_path, 'fimo.gff' ), gff_out )
+    shutil.move( os.path.join( html_path, 'fimo.xml' ), xml_out )
+    shutil.move( os.path.join( html_path, 'fimo.html' ), html_out )
+
+    out_file = open( interval_out, 'wb' )
+    out_file.write( "#%s\n" % "\t".join( ( "chr", "start", "end", "pattern name", "score", "strand", "matched sequence", "p-value", "q-value" ) ) )
+    for line in open( txt_out ):
+        if line.startswith( '#' ):
+            continue
+        fields = line.rstrip( "\n\r" ).split( "\t" )
+        start, end = int( fields[2] ), int( fields[3] )
+        sequence = fields[7]
+        if start > end:
+            start, end = end, start  # flip start and end, and set strand
+            strand = "-"
+            sequence = DNA_reverse_complement( sequence )  # we want sequences relative to strand; FIMO always provides + stranded sequence
+        else:
+            strand = "+"
+        start -= 1  # make 0-based start position
+        out_file.write( "%s\n" % "\t".join( [ fields[1], str( start ), str( end ), fields[0], fields[4], strand, sequence, fields[5], fields[6] ] ) )
+    out_file.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/meme/meme.xml b/tools/meme/meme.xml
new file mode 100644
index 0000000..4eac866
--- /dev/null
+++ b/tools/meme/meme.xml
@@ -0,0 +1,353 @@
+<tool id="meme_meme" name="MEME" version="1.0.0">
+  <requirements><requirement type='package'>meme</requirement></requirements>
+  <description>- Multiple Em for Motif Elicitation</description>
+  <command>meme "$input1" -o "${html_outfile.files_path}" 
+  -nostatus
+  
+  ##-p 8 ##number of processors
+  
+  #if str( $options_type.options_type_selector ) == 'advanced':
+  -sf "${ str( $options_type.sf ).replace( ' ', '_' ) }"
+  -${options_type.alphabet_type.alphabet_type_selector} 
+  -mod "${options_type.mod_type.mod_type_selector}" 
+  -nmotifs "${options_type.nmotifs}" 
+  -wnsites "${options_type.wnsites}"
+  
+  #if $options_type.evt < float('inf'):
+    -evt "${options_type.evt}" 
+  #end if
+  
+  #if str( $options_type.mod_type.mod_type_selector ) != 'oops':
+    #if str( $options_type.mod_type.motif_occurrence_type.motif_occurrence_type_selector ) == 'nsites':
+      -nsites "${options_type.mod_type.motif_occurrence_type.nsites}"
+    #elif str( $options_type.mod_type.motif_occurrence_type.motif_occurrence_type_selector ) == 'min_max_sites':
+      -minsites "${options_type.mod_type.motif_occurrence_type.minsites}" -maxsites "${options_type.mod_type.motif_occurrence_type.maxsites}"
+    #end if
+  #end if
+  
+  #if str( $options_type.motif_width_type.motif_width_type_selector ) == 'exact':
+    -w "${options_type.motif_width_type.width}"
+  #else
+    -minw "${options_type.motif_width_type.minw}" -maxw "${options_type.motif_width_type.maxw}"
+  #end if
+  
+  #if str( $options_type.motif_trim_type.motif_trim_type_selector ) == 'nomatrim':
+    -nomatrim
+  #else
+    -wg "${options_type.motif_trim_type.wg}" -ws "${options_type.motif_trim_type.ws}" ${options_type.motif_trim_type.noendgaps}
+  #end if
+  
+  #if str( $options_type.bfile ) != 'None':
+    -bfile "${options_type.bfile}"
+  #end if
+  
+  #if str( $options_type.pspfile ) != 'None':
+    -psp "${options_type.pspfile}"
+  #end if
+  
+  #if str( $options_type.alphabet_type.alphabet_type_selector ) == "dna":
+    ${options_type.alphabet_type.revcomp} ${options_type.alphabet_type.pal}
+  #end if
+  
+  -maxiter "${options_type.maxiter}" -distance "${options_type.distance}"
+  
+  -prior "${options_type.alphabet_type.prior_type.prior_type_selector}"
+  #if str( $options_type.alphabet_type.prior_type.prior_type_selector ) != 'addone':
+    -b "${options_type.alphabet_type.prior_type.prior_b}" 
+    #if str( $options_type.alphabet_type.prior_type.plib ) != 'None':
+      -plib "${options_type.alphabet_type.prior_type.plib}"
+    #end if
+  #end if
+  
+  #if str( $options_type.alphabet_type.spmap_type.spmap_type_selector ) == 'cons':
+    -cons "${options_type.alphabet_type.spmap_type.cons}" 
+  #else
+    -spmap "${options_type.alphabet_type.spmap_type.spmap_type_selector}"
+    -spfuzz "${options_type.alphabet_type.spmap_type.spfuzz}" 
+  #end if
+  
+  #if str( $options_type.branching_type.branching_type_selector ) == 'x_branch':
+    -x_branch -bfactor "${options_type.branching_type.bfactor}" -heapsize "${options_type.branching_type.heapsize}"
+  #end if
+  
+  ##-maxsize "1000000" ##remove hardcoded maxsize? should increase number of processors instead
+  
+  #end if
+  
+  2>&1 || echo "Error running MEME."
+  
+  
+  && mv ${html_outfile.files_path}/meme.html ${html_outfile}
+  
+  && mv ${html_outfile.files_path}/meme.txt ${txt_outfile}
+  
+  && mv ${html_outfile.files_path}/meme.xml ${xml_outfile}
+  
+  </command>
+  <inputs>
+    <param format="fasta" name="input1" type="data" label="Sequences"/>
+      
+      <conditional name="options_type">
+        <param name="options_type_selector" type="select" label="Options Configuration">
+          <option value="basic" selected="true">Basic</option>
+          <option value="advanced">Advanced</option>
+        </param>
+        <when value="basic">
+          <!-- do nothing here -->
+        </when>
+        <when value="advanced">
+      
+      <param name="sf" type="text" value="Galaxy FASTA Input" label="Name of sequence set" />
+      
+      <conditional name="alphabet_type">
+        <param name="alphabet_type_selector" type="select" label="Sequence Alphabet">
+          <option value="protein">Protein</option>
+          <option value="dna" selected="true">DNA</option>
+        </param>
+        <when value="protein">
+          <conditional name="prior_type">
+            <param name="prior_type_selector" type="select" label="Choice of prior">
+              <option value="dirichlet">simple Dirichlet prior</option>
+              <option value="dmix" selected="true">mixture of Dirichlets prior</option>
+              <option value="mega">extremely low variance dmix</option>
+              <option value="megap">mega for all but last iteration of EM; dmix on last iteration</option>
+              <option value="addone">add +1 to each observed count</option>
+            </param>
+            <when value="dirichlet">
+              <param name="prior_b" type="float" value="0.01" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="dmix">
+              <param name="prior_b" type="float" value="0" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="mega">
+              <param name="prior_b" type="float" value="0" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="megap">
+              <param name="prior_b" type="float" value="0" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="addone">
+              <!-- no values here? -->
+            </when>
+          </conditional>
+          <conditional name="spmap_type">
+            <param name="spmap_type_selector" type="select" label="EM starting points">
+              <option value="uni">uni</option>
+              <option value="pam" selected="true">pam</option>
+              <option value="cons">Use starting point from string</option>
+            </param>
+            <when value="uni">
+              <param name="spfuzz" type="float" value="0.5" label="Fuzziness of the mapping" />
+            </when>
+            <when value="pam">
+              <param name="spfuzz" type="integer" value="120" label="Fuzziness of the mapping" />
+            </when>
+            <when value="cons">
+              <param name="cons" type="text" value="" label="Starting point from string" />
+            </when>
+          </conditional>
+        </when>
+        <when value="dna">
+          <param name="revcomp" label="Check reverse complement" type="boolean" truevalue="-revcomp" falsevalue="" checked="False"/>
+          <param name="pal" label="Check for palindromes" type="boolean" truevalue="-pal" falsevalue="" checked="False"/>
+          <conditional name="prior_type">
+            <param name="prior_type_selector" type="select" label="Sequence Alphabet">
+              <option value="dirichlet" selected="true">simple Dirichlet prior</option>
+              <option value="dmix">mixture of Dirichlets prior</option>
+              <option value="mega">extremely low variance dmix</option>
+              <option value="megap">mega for all but last iteration of EM; dmix on last iteration</option>
+              <option value="addone">add +1 to each observed count</option>
+            </param>
+            <when value="dirichlet">
+              <param name="prior_b" type="float" value="0.01" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="dmix">
+              <param name="prior_b" type="float" value="0" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="mega">
+              <param name="prior_b" type="float" value="0" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="megap">
+              <param name="prior_b" type="float" value="0" label="strength of prior on model parameters" />
+              <param name="plib" type="data" format="txt" optional="True" label="Dirichlet prior file" />
+            </when>
+            <when value="addone">
+              <!-- no values here? -->
+            </when>
+          </conditional>
+          <conditional name="spmap_type">
+            <param name="spmap_type_selector" type="select" label="EM starting points">
+              <option value="uni" selected="true">uni</option>
+              <option value="pam">pam</option>
+              <option value="cons">Use starting point from string</option>
+            </param>
+            <when value="uni">
+              <param name="spfuzz" type="float" value="0.5" label="Fuzziness of the mapping" />
+            </when>
+            <when value="pam">
+              <param name="spfuzz" type="integer" value="120" label="Fuzziness of the mapping" />
+            </when>
+            <when value="cons">
+              <param name="cons" type="text" value="" label="Starting point from string" />
+            </when>
+          </conditional>
+        </when>
+      </conditional>
+      
+      <param name="nmotifs" type="integer" value="1" label="Number of different motifs to search" />
+      <param name="evt" type="float" value="inf" label="E-value to stop looking for motifs" />
+      <conditional name="mod_type">
+        <param name="mod_type_selector" type="select" label="Expected motif distribution">
+          <option value="oops">One Occurrence Per Sequence</option>
+          <option value="zoops" selected="true">Zero or One Occurrence Per Sequence</option>
+          <option value="anr">Any Number of Repetitions</option>
+        </param>
+        <when value="oops">
+          <!-- no values here -->
+        </when>
+        <when value="zoops">
+          <conditional name="motif_occurrence_type">
+            <param name="motif_occurrence_type_selector" type="select" label="Number of motif occurrences">
+              <option value="default" selected="true">Use defaults</option>
+              <option value="nsites">nsites</option>
+              <option value="min_max_sites">min and max sites</option>
+            </param>
+            <when value="default">
+              <!-- no values here -->
+            </when>
+            <when value="nsites">
+              <param name="nsites" type="integer" value="1" label="Search nsites number of occurrences" />
+            </when>
+            <when value="min_max_sites">
+              <param name="minsites" type="integer" value="1" label="minsites" />
+              <param name="maxsites" type="integer" value="50" label="maxsites" />
+            </when>
+          </conditional>
+        </when>
+        <when value="anr">
+          <conditional name="motif_occurrence_type">
+            <param name="motif_occurrence_type_selector" type="select" label="Number of motif occurrences">
+              <option value="default" selected="true">Use defaults</option>
+              <option value="nsites">nsites</option>
+              <option value="min_max_sites">min and max sites</option>
+            </param>
+            <when value="default">
+              <!-- no values here -->
+            </when>
+            <when value="nsites">
+              <param name="nsites" type="integer" value="1" label="Search nsites number of occurrences" />
+            </when>
+            <when value="min_max_sites">
+              <param name="minsites" type="integer" value="1" label="minsites" />
+              <param name="maxsites" type="integer" value="50" label="maxsites" />
+            </when>
+          </conditional>
+        </when>
+      </conditional>
+      <param name="wnsites" type="float" value="0.8" label="Weight on the prior on nsites" />
+      
+      <conditional name="motif_width_type">
+        <param name="motif_width_type_selector" type="select" label="Motif width type">
+          <option value="exact">Exact width</option>
+          <option value="range" selected="true">Specify a range</option>
+        </param>
+        <when value="exact">
+          <param name="width" type="integer" value="10" label="Width of motif to search" />
+        </when>
+        <when value="range">
+          <param name="minw" type="integer" value="8" label="Min width of motif to search" />
+          <param name="maxw" type="integer" value="50" label="Max width of motif to search" />
+        </when>
+      </conditional>
+    
+      <conditional name="motif_trim_type">
+        <param name="motif_trim_type_selector" type="select" label="Motif trim type">
+          <option value="nomatrim">No motif trim</option>
+          <option value="trim" selected="true">Trim motif</option>
+        </param>
+        <when value="nomatrim">
+          <!-- no values here -->
+        </when>
+        <when value="trim">
+          <param name="wg" type="integer" value="11" label="Gap cost" />
+          <param name="ws" type="integer" value="1" label="Space cost" />
+          <param name="noendgaps" label="Do not penalize endgaps" type="boolean" truevalue="-noendgaps" falsevalue="" checked="False"/>
+        </when>
+      </conditional>
+    
+    <param name="bfile" type="data" format="txt" optional="True" label="Background Model" />
+    <param name="pspfile" type="data" format="txt" optional="True" label="Position-Specific Prior" />
+    
+    <param name="maxiter" type="integer" value="50" label="Number of iterations of EM to run" />
+    <param name="distance" type="float" value="0.001" label="Convergence criterion" />
+    
+      <conditional name="branching_type">
+        <param name="branching_type_selector" type="select" label="x-branching type">
+          <option value="x_branch">Perform x-branching</option>
+          <option value="no_x_branch" selected="true">No x-branching</option>
+        </param>
+        <when value="no_x_branch">
+          <!-- no values here -->
+        </when>
+        <when value="x_branch">
+          <param name="bfactor" type="integer" value="3" label="Number of iterations of branching" />
+          <param name="heapsize" type="integer" value="64" label="Maximum number of heaps to use" />
+        </when>
+      </conditional>
+  
+    </when>
+  </conditional>
+  
+  <param name="non_commercial_use" label="I certify that I am not using this tool for commercial purposes." type="boolean" truevalue="NON_COMMERCIAL_USE" falsevalue="COMMERCIAL_USE" checked="False">
+    <validator type="expression" message="This tool is only available for non-commercial use.">value == True</validator>
+  </param>
+  
+  </inputs>
+  <outputs>
+    <data format="html" name="html_outfile" label="${tool.name} on ${on_string} (html)"/>
+    <data format="txt" name="txt_outfile" label="${tool.name} on ${on_string} (text)"/>
+    <data format="memexml" name="xml_outfile" label="${tool.name} on ${on_string} (xml)"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="meme/meme/meme_input_1.fasta" ftype="fasta" dbkey="hg19"/>
+      <param name="options_type_selector" value="basic"/>
+      <param name="non_commercial_use" value="True"/>
+      <output name="html_outfile" file="meme/meme/meme_output_html_1.html" lines_diff="12"/>
+      <output name="txt_outfile" file="meme/meme/meme_output_txt_1.txt" lines_diff="12"/>
+      <output name="xml_outfile" file="meme/meme/meme_output_xml_1.xml" lines_diff="8"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+**WARNING: This tool is only available for non-commercial use. Use for educational, research and non-profit purposes is permitted. Before using, be sure to review, agree, and comply with the license.**
+
+If you want to specify sequence weights, you must include them at the top of your input FASTA file.
+
+.. class:: infomark
+
+**To cite MEME:**
+Timothy L. Bailey and Charles Elkan, "Fitting a mixture model by expectation maximization to discover motifs in biopolymers", Proceedings of the Second International Conference on Intelligent Systems for Molecular Biology, pp. 28-36, AAAI Press, Menlo Park, California, 1994. 
+
+
+For detailed information on MEME, click here_. To view the license_.
+
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
+.. _here: http://meme.nbcr.net/meme/meme-intro.html
+.. _license: http://meme.nbcr.net/meme/COPYRIGHT.html
+
+  </help>
+</tool>
diff --git a/tools/metag_tools/blat_wrapper.py b/tools/metag_tools/blat_wrapper.py
new file mode 100644
index 0000000..54d4606
--- /dev/null
+++ b/tools/metag_tools/blat_wrapper.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+
+import os
+import sys
+import tempfile
+
+assert sys.version_info[:2] >= (2.4)
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def check_nib_file( dbkey, GALAXY_DATA_INDEX_DIR ):
+    nib_file = "%s/alignseq.loc" % GALAXY_DATA_INDEX_DIR
+    nib_path = ''
+    nibs = {}
+    for i, line in enumerate( open( nib_file ) ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( "#" ):
+            fields = line.split( '\t' )
+            if len( fields ) < 3:
+                continue
+            if fields[0] == 'seq':
+                nibs[( fields[1] )] = fields[2]
+    if dbkey in nibs:
+        nib_path = nibs[( dbkey )]
+    return nib_path
+
+
+def check_twobit_file( dbkey, GALAXY_DATA_INDEX_DIR ):
+    twobit_file = "%s/twobit.loc" % GALAXY_DATA_INDEX_DIR
+    twobit_path = ''
+    twobits = {}
+    for i, line in enumerate( open( twobit_file ) ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( "#" ):
+            fields = line.split( '\t' )
+            if len( fields ) < 2:
+                continue
+            twobits[( fields[0] )] = fields[1]
+    if dbkey in twobits:
+        twobit_path = twobits[( dbkey )]
+    return twobit_path
+
+
+def __main__():
+    # I/O
+    source_format = sys.argv[1]        # 0: dbkey; 1: upload file
+    target_file = sys.argv[2]
+    query_file = sys.argv[3]
+    output_file = sys.argv[4]
+    min_iden = sys.argv[5]
+    tile_size = sys.argv[6]
+    one_off = sys.argv[7]
+
+    try:
+        float(min_iden)
+    except:
+        stop_err('Invalid value for minimal identity.')
+
+    try:
+        test = int(tile_size)
+        assert test >= 6 and test <= 18
+    except:
+        stop_err('Invalid value for tile size. DNA word size must be between 6 and 18.')
+
+    try:
+        test = int(one_off)
+        assert test >= 0 and test <= int(tile_size)
+    except:
+        stop_err('Invalid value for mismatch numbers in the word')
+
+    GALAXY_DATA_INDEX_DIR = sys.argv[8]
+
+    all_files = []
+    if source_format == '0':
+        # check target genome
+        dbkey = target_file
+        nib_path = check_nib_file( dbkey, GALAXY_DATA_INDEX_DIR )
+        twobit_path = check_twobit_file( dbkey, GALAXY_DATA_INDEX_DIR )
+        if not os.path.exists( nib_path ) and not os.path.exists( twobit_path ):
+            stop_err("No sequences are available for %s, request them by reporting this error." % dbkey)
+
+        # check the query file, see whether all of them are legitimate sequence
+        if nib_path and os.path.isdir( nib_path ):
+            compress_files = os.listdir(nib_path)
+            target_path = nib_path
+        elif twobit_path:
+            compress_files = [twobit_path]
+            target_path = ""
+        else:
+            stop_err("Requested genome build has no available sequence.")
+
+        for file in compress_files:
+            file = "%s/%s" % ( target_path, file )
+            file = os.path.normpath(file)
+            all_files.append(file)
+    else:
+        all_files = [target_file]
+
+    for detail_file_path in all_files:
+        output_tempfile = tempfile.NamedTemporaryFile().name
+        command = "blat %s %s %s -oneOff=%s -tileSize=%s -minIdentity=%s -mask=lower -noHead -out=pslx 2>&1" % ( detail_file_path, query_file, output_tempfile, one_off, tile_size, min_iden )
+        os.system( command )
+        os.system( 'cat %s >> %s' % ( output_tempfile, output_file ) )
+        os.remove( output_tempfile )
+
+
+if __name__ == '__main__':
+    __main__()
diff --git a/tools/metag_tools/blat_wrapper.xml b/tools/metag_tools/blat_wrapper.xml
new file mode 100644
index 0000000..f018f1a
--- /dev/null
+++ b/tools/metag_tools/blat_wrapper.xml
@@ -0,0 +1,99 @@
+<tool id="blat_wrapper" name="BLAT" version="1.0.0">
+  <description> compare sequencing reads against UCSC genome builds</description>
+  <command interpreter="python">
+    #if $source.source_select=="database" #blat_wrapper.py 0 $source.dbkey $input_query $output1 $iden $tile_size $one_off
+    #else                                 #blat_wrapper.py 1 $source.input_target $input_query $output1 $iden $tile_size $one_off
+    #end if# ${GALAXY_DATA_INDEX_DIR}
+  </command>
+	<inputs>
+	<conditional name="source">
+		<param name="source_select" type="select" label="Target source">
+				<option value="database">Genome Build</option>
+				<option value="input_ref">Your Upload File</option>
+		</param>
+		<when value="database">
+			<param name="dbkey" type="genomebuild" label="Genome" />
+		</when>
+		<when value="input_ref">
+			<param name="input_target" type="data" format="fasta" label="Reference sequence" />
+ 		</when>
+	</conditional>	
+		<param name="input_query" type="data" format="fasta" label="Sequence file"/>
+		<param name="iden" type="float" size="15" value="90.0" label="Minimal identity (-minIdentity)" />
+		<param name="tile_size" type="integer" size="15" value="11" label="Minimal size of exact match (-tileSize)" help="Must be between 6 and 18."/>
+		<param name="one_off" type="integer" size="15" value="0" label="Number of mismatch in the word (-oneOff)" help="Must be between 0 and 2." />
+	</inputs>
+	<outputs>
+		<data name="output1" format="tabular"/>
+	</outputs>
+	<requirements>
+	  <requirement type="binary">blat</requirement>
+	</requirements>
+	<tests>
+		<test>
+		<param name="source_select" value="database" />
+		<param name="dbkey" value="eschColi_K12" />
+		<param name="input_query" value="blat_wrapper_test1.fa" ftype="fasta"/>
+		<param name="iden" value="90.0" />
+		<param name="tile_size" value="11" />
+		<param name="one_off" value="0" />
+		<output name="output1" file="blat_wrapper_test1.out" />
+		</test>
+	</tests>
+	<help>
+	
+.. class:: warningmark 
+
+Using a smaller word size (*Minimal Size of Exact Match*) will increase the computational time.
+
+.. class:: warningmark 
+
+Using a larger mismatch number (*Number of Mismatch in the Word*) will increase the computational time.
+
+-----
+	
+**What it does**
+ 
+This tool currently uses the **BLAT** alignment program. Your short reads file is searched against a genome build or another uploaded file. 
+ 
+-----
+ 
+**Example**
+ 
+- Input a multiple fasta file::
+
+	>seq1
+	TGGTAATGGTGGTTTTTTTTTTTTTTTTTTATTTTT
+
+- Use the default settings:
+
+  - alignment identity must be higher than or equal to 90%.
+  
+  - minimal size of exact match to trigger an alignment is 11.
+  
+  - allow 0 mismatches in the above exact match size.
+  
+- Search against ce2 (C. elegans March 2004), partial result::
+
+	25 1 0 0 0 0 0 0 + seq1 36 10 36 chrI 15080483 9704438 9704464 1 26, 10, 9704438, ggttttttttttttttttttattttt, ggtttttttttttttttttttttttt,
+	27 0 0 0 0 0 1 32 + seq1 36 9 36 chrI 15080483 1302536 1302595 2 21,6, 9,30, 1302536,1302589, tggtttttttttttttttttt,attttt, tggtttttttttttttttttt,attttt,
+
+-----
+
+**Parameters**
+
+- *Minimal Identity* (**-minIdentity**) : In percent, the minimum sequence identity between the query and target alignment. Default is 90.
+
+- *Minimal Size of Exact Match* (**-tileSize**) : The size of a match that will trigger an alignment. Default is 11. Usually between 8 and 12. Must be between 6 and 18.
+
+- *Number of Mismatch in the Word* (**-oneOff**) : The number of mismatches allowed in the word (tile size) and still triggers an alignment. Default is 0.
+
+-----
+
+**Reference**
+ 
+ **BLAT**: Kent, W James, BLAT--the BLAST-like alignment tool. (2002) Genome Research:12(4) 656-664.
+
+
+	</help>
+</tool>
diff --git a/tools/metag_tools/shrimp_color_wrapper.py b/tools/metag_tools/shrimp_color_wrapper.py
new file mode 100644
index 0000000..a088fa2
--- /dev/null
+++ b/tools/metag_tools/shrimp_color_wrapper.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+"""
+SHRiMP wrapper : Color space
+"""
+
+import os
+import os.path
+import re
+import sys
+import tempfile
+
+assert sys.version_info[:2] >= (2.4)
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def __main__():
+    # SHRiMP path
+    shrimp = 'rmapper-cs'
+
+    # I/O
+    input_target_file = sys.argv[1]  # fasta
+    input_query_file = sys.argv[2]
+    shrimp_outfile = sys.argv[3]  # shrimp output
+
+    # SHRiMP parameters
+    spaced_seed = '1111001111'
+    seed_matches_per_window = '2'
+    seed_hit_taboo_length = '4'
+    seed_generation_taboo_length = '0'
+    seed_window_length = '115.0'
+    max_hits_per_read = '100'
+    max_read_length = '1000'
+    kmer = '-1'
+    sw_match_value = '100'
+    sw_mismatch_value = '-150'
+    sw_gap_open_ref = '-400'
+    sw_gap_open_query = '-400'
+    sw_gap_ext_ref = '-70'
+    sw_gap_ext_query = '-70'
+    sw_crossover_penalty = '-140'
+    sw_full_hit_threshold = '68.0'
+    sw_vector_hit_threshold = '60.0'
+
+    # TODO: put the threshold on each of these parameters
+    if len(sys.argv) > 4:
+        try:
+            if sys.argv[4].isdigit():
+                spaced_seed = sys.argv[4]
+            else:
+                stop_err('Error in assigning parameter: Spaced seed.')
+        except:
+            stop_err('Spaced seed must be a combination of 1s and 0s.')
+
+        seed_matches_per_window = sys.argv[5]
+        seed_hit_taboo_length = sys.argv[6]
+        seed_generation_taboo_length = sys.argv[7]
+        seed_window_length = sys.argv[8]
+        max_hits_per_read = sys.argv[9]
+        max_read_length = sys.argv[10]
+        kmer = sys.argv[11]
+        sw_match_value = sys.argv[12]
+        sw_mismatch_value = sys.argv[13]
+        sw_gap_open_ref = sys.argv[14]
+        sw_gap_open_query = sys.argv[15]
+        sw_gap_ext_ref = sys.argv[16]
+        sw_gap_ext_query = sys.argv[17]
+        sw_crossover_penalty = sys.argv[18]
+        sw_full_hit_threshold = sys.argv[19]
+        sw_vector_hit_threshold = sys.argv[20]
+
+    # temp file for shrimp log file
+    shrimp_log = tempfile.NamedTemporaryFile().name
+
+    # SHRiMP command
+    command = ' '.join([shrimp, '-s', spaced_seed, '-n', seed_matches_per_window, '-t', seed_hit_taboo_length, '-9', seed_generation_taboo_length, '-w', seed_window_length, '-o', max_hits_per_read, '-r', max_read_length, '-d', kmer, '-m', sw_match_value, '-i', sw_mismatch_value, '-g', sw_gap_open_ref, '-q', sw_gap_open_query, '-e', sw_gap_ext_ref, '-f', sw_gap_ext_query, '-x', sw_crossover_penalty, '-h', sw_full_hit_threshold, '-v', sw_vector_hit_threshold, input_query_file, input_target [...]
+
+    try:
+        os.system(command)
+    except Exception as e:
+        stop_err(str(e))
+
+    # check SHRiMP output: count number of lines
+    num_hits = 0
+    if shrimp_outfile:
+        for i, line in enumerate(open(shrimp_outfile)):
+            line = line.rstrip('\r\n')
+            if not line or line.startswith('#'):
+                continue
+            try:
+                line.split()
+                num_hits += 1
+            except Exception as e:
+                stop_err(str(e))
+
+    if num_hits == 0:   # no hits generated
+        err_msg = ''
+        if shrimp_log:
+            for i, line in enumerate(open(shrimp_log)):
+                if line.startswith('error'):            # deal with memory error:
+                    err_msg += line                     # error: realloc failed: Cannot allocate memory
+                if re.search('Reads Matched', line):    # deal with zero hits
+                    if int(line[8:].split()[2]) == 0:
+                        err_msg = 'Zero hits found.\n'
+        stop_err('SHRiMP Failed due to:\n' + err_msg)
+
+    # remove temp. files
+    if os.path.exists(shrimp_log):
+        os.remove(shrimp_log)
+
+
+if __name__ == '__main__':
+    __main__()
diff --git a/tools/metag_tools/shrimp_color_wrapper.xml b/tools/metag_tools/shrimp_color_wrapper.xml
new file mode 100644
index 0000000..428431f
--- /dev/null
+++ b/tools/metag_tools/shrimp_color_wrapper.xml
@@ -0,0 +1,181 @@
+<tool id="shrimp_color_wrapper" name="SHRiMP for Color-space" version="1.0.0">
+  <description>reads mapping against reference sequence </description>
+  <command interpreter="python">
+    #if $param.skip_or_full=="skip" #shrimp_color_wrapper.py $input_target $input_query $output1 
+    #else                           #shrimp_color_wrapper.py $input_target $input_query $output1 $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_crossover_penalty $param.sw_full_hit_thresh [...]
+    #end if#
+  </command>
+    <inputs>
+        <page>
+        <param name="input_query" type="data" format="csfasta" label="Align sequencing reads" help="No dataset? Read tip below"/>
+        <param name="input_target" type="data" format="fasta" label="against reference" />
+        <conditional name="param">
+            <param name="skip_or_full" type="select" label="SHRiMP settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full List">
+                <option value="skip">Commonly used</option>
+                <option value="full">Full Parameter List</option>
+            </param>
+            <when value="skip" />
+            <when value="full">
+                <param name="spaced_seed"                   type="text"     size="30"   value="1111001111"    label="Spaced Seed" />
+                <param name="seed_matches_per_window"       type="integer"  size="5"    value="2"               label="Seed Matches per Window" />
+                <param name="seed_hit_taboo_length"         type="integer"  size="5"    value="4"               label="Seed Hit Taboo Length" />
+                <param name="seed_generation_taboo_length"  type="integer"  size="5"    value="0"               label="Seed Generation Taboo Length" />
+                <param name="seed_window_length"            type="float"    size="10"   value="115.0"           label="Seed Window Length"          help="in percentage"/>
+                <param name="max_hits_per_read"             type="integer"  size="10"   value="100"             label="Maximum Hits per Read" />
+                <param name="max_read_length"               type="integer"  size="10"   value="1000"            label="Maximum Read Length" />
+                <param name="kmer"                          type="integer"  size="10"   value="-1"              label="Kmer Std. Deviation Limit"   help="-1 as None"/>
+                <param name="sw_match_value"                type="integer"  size="10"   value="100"             label="S-W Match Value" />
+                <param name="sw_mismatch_value"             type="integer"  size="10"   value="-150"            label="S-W Mismatch Value" />
+                <param name="sw_gap_open_ref"               type="integer"  size="10"   value="-400"            label="S-W Gap Open Penalty (Reference)" />
+                <param name="sw_gap_open_query"             type="integer"  size="10"   value="-400"            label="S-W Gap Open Penalty (Query)" />
+                <param name="sw_gap_ext_ref"                type="integer"  size="10"   value="-70"             label="S-W Gap Extend Penalty (Reference)" />
+                <param name="sw_gap_ext_query"              type="integer"  size="10"   value="-70"             label="S-W Gap Extend Penalty (Query)" />
+                <param name="sw_crossover_penalty"          type="integer"  size="10"   value="-140"            label="S-W Crossover Penalty" />               
+                <param name="sw_full_hit_threshold"         type="float"    size="10"   value="68.0"            label="S-W Full Hit Threshold"      help="in percentage"/>
+                <param name="sw_vector_hit_threshold"       type="float"    size="10"   value="60.0"            label="S-W Vector Hit Threshold"    help="in percentage"/>
+            </when>
+        </conditional>
+        </page>
+    </inputs>
+    <outputs>
+        <data name="output1" format="tabular"/>
+    </outputs>
+    <requirements>
+      <requirement type="binary">rmapper-cs</requirement>
+    </requirements>
+    <tests>
+        <test>
+            <param name="skip_or_full" value="skip" />
+            <param name="input_target" value="Ssuis.fasta" ftype="fasta" />
+            <param name="input_query" value="shrimp_cs_test1.csfasta" ftype="csfasta"/>
+            <output name="output1" file="shrimp_cs_test1.out" />
+        </test>
+    </tests>
+<help>
+    
+.. class:: warningmark 
+
+To use this tool your dataset needs to be in the *csfasta* (as ABI SOLiD color-space sequences) format. Click pencil icon next to your dataset to set the datatype to *csfasta*.   
+
+
+-----
+    
+**What it does**
+ 
+SHRiMP (SHort Read Mapping Package) is a software package for aligning genomic reads against a target genome.  
+  
+
+-----
+
+**Input formats**
+
+A multiple color-space file, for example::
+
+    >2_263_779_F3
+    T132032030200202202003211302222202230022110222
+
+
+-----
+
+**Outputs**
+
+The tool returns the default SHRiMP output::
+
+ 
+     1                      2               3         4        5        6       7      8      9      10
+  --------------------------------------------------------------------------------------------------------------------
+    >2_263_779_F3   Streptococcus_suis      +       814344  814388      1      45      45    3660    8x19x3x2x6x4x3  
+
+where::
+
+  1. (>2_263_779_F3)        - Read id 
+  2. (Streptococcus_suis)   - Reference sequence id
+  3. (+)                    - Strand of the read
+  4. (814344)               - Start position of the alignment in the reference
+  5. (814388)               - End position of the alignment in the reference
+  6. (1)                    - Start position of the alignment in the read
+  7. (45)                   - End position of the alignment in the read
+  8. (45)                   - Length of the read
+  9. (3660)                 - Score 
+ 10. (8x19x3x2x6x4x3)       - Edit string
+
+ 
+-----
+
+**SHRiMP parameter list**
+
+The commonly used parameters with default value setting::
+
+    -s    Spaced Seed                             (default: 111111011111)
+          The spaced seed is a single contiguous string of 0's and 1's. 
+          0's represent wildcards, or positions which will always be 
+          considered as matching, whereas 1's dictate positions that 
+          must match. A string of all 1's will result in a simple kmer scan.
+    -n    Seed Matches per Window                 (default: 2)
+          The number of seed matches per window dictates how many seeds 
+          must match within some window length of the genome before that 
+          region is considered for Smith-Waterman alignment. A lower 
+          value will increase sensitivity while drastically increasing 
+          running time. Higher values will have the opposite effect.
+    -t    Seed Hit Taboo Length                   (default: 4)
+          The seed taboo length specifies how many target genome bases 
+          or colours must exist prior to a previous seed match in order 
+          to count another seed match as a hit.
+    -9    Seed Generation Taboo Length            (default: 0)
+          
+    -w    Seed Window Length                      (default: 115.00%)
+          This parameter specifies the genomic span in bases (or colours) 
+          in which *seed_matches_per_window* must exist before the read 
+          is given consideration by the Simth-Waterman alignment machinery.
+    -o    Maximum Hits per Read                   (default: 100)
+          This parameter specifies how many hits to remember for each read. 
+          If more hits are encountered, ones with lower scores are dropped 
+          to make room.
+    -r    Maximum Read Length                     (default: 1000)
+          This parameter specifies the maximum length of reads that will 
+          be encountered in the dataset. If larger reads than the default 
+          are used, an appropriate value must be passed to *rmapper*.
+    -d    Kmer Std. Deviation Limit               (default: -1 [None])
+          This option permits pruning read kmers, which occur with 
+          frequencies greater than *kmer_std_dev_limit* standard 
+          deviations above the average. This can shorten running 
+          time at the cost of some sensitivity. 
+          *Note*: A negative value disables this option.            
+    -m    S-W Match Value                         (default: 100)
+          The value applied to matches during the Smith-Waterman score calculation.
+    -i    S-W Mismatch Value                      (default: -150)
+          The value applied to mismatches during the Smith-Waterman 
+          score calculation.
+    -g    S-W Gap Open Penalty (Reference)        (default: -400)
+          The value applied to gap opens along the reference sequence 
+          during the Smith-Waterman score calculation.
+          *Note*: Note that for backward compatibility, if -g is set 
+          and -q is not set, the gap open penalty for the query will 
+          be set to the same value as specified for the reference.
+    -q    S-W Gap Open Penalty (Query)            (default: -400)
+          The value applied to gap opens along the query sequence during 
+          the Smith-Waterman score calculation.        
+    -e    S-W Gap Extend Penalty (Reference)      (default: -70)
+          The value applied to gap extends during the Smith-Waterman score calculation.
+          *Note*: Note that for backward compatibility, if -e is set 
+          and -f is not set, the gap exten penalty for the query will 
+          be set to the same value as specified for the reference. 
+    -f    S-W Gap Extend Penalty (Query)          (default: -70)
+          The value applied to gap extends during the Smith-Waterman score calculation.
+    -x
+    -h    S-W Full Hit Threshold                  (default: 68.00%)
+          In letter-space, this parameter determines the threshold 
+          score for both vectored and full Smith-Waterman alignments. 
+          Any values less than this quantity will be thrown away.
+          *Note* This option differs slightly in meaning between letter-space and color-space.
+    -v
+    
+
+-----
+
+**Reference**
+ 
+ **SHRiMP**: Stephen M. Rumble, Michael Brudno, Phil Lacroute, Vladimir Yanovsky, Marc Fiume, Adrian Dalca. shrimp at cs dot toronto dot edu. 
+
+</help>
+</tool>
diff --git a/tools/metag_tools/shrimp_wrapper.py b/tools/metag_tools/shrimp_wrapper.py
new file mode 100644
index 0000000..89aa5e6
--- /dev/null
+++ b/tools/metag_tools/shrimp_wrapper.py
@@ -0,0 +1,642 @@
+#!/usr/bin/env python
+"""
+TODO
+1. decrease memory usage
+2. multi-fasta fastq file, ex. 454
+3. split reads into small chuncks?
+
+SHRiMP wrapper
+
+Inputs:
+1. reference seq
+2. reads
+
+Outputs:
+1. table of 8 columns:
+         chrom   ref_loc     read_id     read_loc    ref_nuc     read_nuc    quality     coverage
+2. SHRiMP output
+
+Parameters:
+    -s    Spaced Seed                             (default: 111111011111)
+    -n    Seed Matches per Window                 (default: 2)
+    -t    Seed Hit Taboo Length                   (default: 4)
+    -9    Seed Generation Taboo Length            (default: 0)
+    -w    Seed Window Length                      (default: 115.00%)
+    -o    Maximum Hits per Read                   (default: 100)
+    -r    Maximum Read Length                     (default: 1000)
+    -d    Kmer Std. Deviation Limit               (default: -1 [None])
+
+    -m    S-W Match Value                         (default: 100)
+    -i    S-W Mismatch Value                      (default: -150)
+    -g    S-W Gap Open Penalty (Reference)        (default: -400)
+    -q    S-W Gap Open Penalty (Query)            (default: -400)
+    -e    S-W Gap Extend Penalty (Reference)      (default: -70)
+    -f    S-W Gap Extend Penalty (Query)          (default: -70)
+    -h    S-W Hit Threshold                       (default: 68.00%)
+
+Command:
+%rmapper -s spaced_seed -n seed_matches_per_window -t seed_hit_taboo_length -9 seed_generation_taboo_length -w seed_window_length -o max_hits_per_read -r max_read_length -d kmer -m sw_match_value -i sw_mismatch_value -g sw_gap_open_ref -q sw_gap_open_query -e sw_gap_ext_ref -f sw_gap_ext_query -h sw_hit_threshold <query> <target> > <output> 2> <log>
+
+SHRiMP output:
+>7:2:1147:982/1 chr3    +   36586562    36586595    2   35  36  2900    3G16G13
+>7:2:1147:982/1 chr3    +   95338194    95338225    4   35  36  2700    9T7C14
+>7:2:587:93/1   chr3    +   14913541    14913577    1   35  36  2960    19--16
+"""
+from __future__ import print_function
+
+import os
+import os.path
+import re
+import sys
+import tempfile
+
+assert sys.version_info[:2] >= (2.4)
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def reverse_complement(s):
+    complement_dna = {"A": "T", "T": "A", "C": "G", "G": "C", "a": "t", "t": "a", "c": "g", "g": "c", "N": "N", "n": "n", ".": ".", "-": "-"}
+    reversed_s = []
+    for i in s:
+        reversed_s.append(complement_dna[i])
+    reversed_s.reverse()
+    return "".join(reversed_s)
+
+
+def generate_sub_table(result_file, ref_file, score_files, table_outfile, hit_per_read, insertion_size):
+    invalid_editstring_char = 0
+    all_score_file = score_files.split(',')
+
+    if len(all_score_file) != hit_per_read:
+        stop_err('One or more query files is missing. Please check your dataset.')
+
+    temp_table_name = tempfile.NamedTemporaryFile().name
+    temp_table = open(temp_table_name, 'w')
+
+    outfile = open(table_outfile, 'w')
+
+    # reference seq: not a single fasta seq
+    refseq = {}
+    chrom_cov = {}
+    seq = ''
+    title = None
+
+    for i, line in enumerate(open(ref_file)):
+        line = line.rstrip()
+        if not line or line.startswith('#'):
+            continue
+
+        if line.startswith('>'):
+            if seq:
+                if title in refseq:
+                    pass
+                else:
+                    refseq[title] = seq
+                    chrom_cov[title] = {}
+                seq = ''
+            title = line[1:]
+        else:
+            seq += line
+    if seq:
+        if title not in refseq:
+            refseq[title] = seq
+            chrom_cov[title] = {}
+
+    # find hits: one end and/or the other
+    hits = {}
+    for i, line in enumerate(open(result_file)):
+        line = line.rstrip()
+        if not line or line.startswith('#'):
+            continue
+
+        # FORMAT: readname contigname strand contigstart contigend readstart readend readlength score editstring
+        fields = line.split('\t')
+        readname = fields[0][1:]
+        chrom = fields[1]
+        strand = fields[2]
+        chrom_start = int(fields[3]) - 1
+        chrom_end = int(fields[4])
+        read_start = fields[5]
+        score = fields[8]
+        editstring = fields[9]
+
+        if hit_per_read == 1:
+            endindex = '1'
+        else:
+            readname, endindex = readname.split('/')
+
+        if readname in hits:
+            if endindex in hits[readname]:
+                hits[readname][endindex].append([strand, editstring, chrom_start, chrom_end, read_start, chrom])
+            else:
+                hits[readname][endindex] = [[strand, editstring, chrom_start, chrom_end, read_start, chrom]]
+        else:
+            hits[readname] = {}
+            hits[readname][endindex] = [[strand, editstring, chrom_start, chrom_end, read_start, chrom]]
+
+    # find score: one end and the other end
+    hits_score = {}
+    readname = ''
+    score = ''
+    for num_score_file in range(len(all_score_file)):
+        score_file = all_score_file[num_score_file]
+        for i, line in enumerate(open(score_file)):
+            line = line.rstrip()
+            if not line or line.startswith('#'):
+                continue
+
+            if line.startswith('>'):
+                if score:
+                    if readname in hits:
+                        if len(hits[readname]) == hit_per_read:
+                            if readname in hits_score:
+                                if endindex in hits_score[readname]:
+                                    pass
+                                else:
+                                    hits_score[readname][endindex] = score
+                            else:
+                                hits_score[readname] = {}
+                                hits_score[readname][endindex] = score
+                    score = ''
+                if hit_per_read == 1:
+                    readname = line[1:]
+                    endindex = '1'
+                else:
+                    readname, endindex = line[1:].split('/')
+            else:
+                score = line
+
+        if score:  # the last one
+            if readname in hits:
+                if len(hits[readname]) == hit_per_read:
+                    if readname in hits_score:
+                        if endindex in hits_score[readname]:
+                            pass
+                        else:
+                            hits_score[readname][endindex] = score
+                    else:
+                        hits_score[readname] = {}
+                        hits_score[readname][endindex] = score
+
+    # call to all mappings
+    for readkey in hits.keys():
+        if len(hits[readkey]) != hit_per_read:
+            continue
+
+        matches = []
+        match_count = 0
+
+        if hit_per_read == 1:
+            if len(hits[readkey]['1']) == 1:
+                matches = [ hits[readkey]['1'] ]
+                match_count = 1
+        else:
+            end1_data = hits[readkey]['1']
+            end2_data = hits[readkey]['2']
+
+            for i, end1_hit in enumerate(end1_data):
+                crin_strand = {'+': False, '-': False}
+                crin_insertSize = {'+': False, '-': False}
+
+                crin_strand[end1_hit[0]] = True
+                crin_insertSize[end1_hit[0]] = int(end1_hit[2])
+
+                for j, end2_hit in enumerate(end2_data):
+                    crin_strand[end2_hit[0]] = True
+                    crin_insertSize[end2_hit[0]] = int(end2_hit[2])
+
+                    if end1_hit[-1] != end2_hit[-1]:
+                        continue
+
+                    if crin_strand['+'] and crin_strand['-']:
+                        if (crin_insertSize['-'] - crin_insertSize['+']) <= insertion_size:
+                            matches.append([end1_hit, end2_hit])
+                            match_count += 1
+
+        if match_count == 1:
+            for x, end_data in enumerate(matches[0]):
+                end_strand, end_editstring, end_chr_start, end_chr_end, end_read_start, end_chrom = end_data
+                end_read_start = int(end_read_start) - 1
+
+                if end_strand == '-':
+                    refsegment = reverse_complement(refseq[end_chrom][end_chr_start:end_chr_end])
+                else:
+                    refsegment = refseq[end_chrom][end_chr_start:end_chr_end]
+
+                match_len = 0
+                editindex = 0
+                gap_read = 0
+
+                while editindex < len(end_editstring):
+                    editchr = end_editstring[editindex]
+                    chrA = ''
+                    chrB = ''
+
+                    if editchr.isdigit():
+                        editcode = ''
+
+                        while editchr.isdigit() and editindex < len(end_editstring):
+                            editcode += editchr
+                            editindex += 1
+                            if editindex < len(end_editstring):
+                                editchr = end_editstring[editindex]
+
+                        for baseIndex in range(int(editcode)):
+                            chrA += refsegment[match_len + baseIndex]
+                            chrB = chrA
+
+                        match_len += int(editcode)
+
+                    elif editchr == 'x':
+                        # crossover: inserted between the appropriate two bases
+                        # Two sequencing errors: 4x15x6 (25 matches with 2 crossovers)
+                        # Treated as errors in the reads; Do nothing.
+                        editindex += 1
+
+                    elif editchr.isalpha():
+                        editcode = editchr
+                        editindex += 1
+                        chrA = refsegment[match_len]
+                        chrB = editcode
+                        match_len += len(editcode)
+
+                    elif editchr == '-':
+                        editcode = editchr
+                        editindex += 1
+                        chrA = refsegment[match_len]
+                        chrB = editcode
+                        match_len += len(editcode)
+                        gap_read += 1
+
+                    elif editchr == '(':
+                        editcode = ''
+
+                        while editchr != ')' and editindex < len(end_editstring):
+                            if editindex < len(end_editstring):
+                                editchr = end_editstring[editindex]
+                            editcode += editchr
+                            editindex += 1
+
+                        editcode = editcode[1:-1]
+                        chrA = '-' * len(editcode)
+                        chrB = editcode
+
+                    else:
+                        invalid_editstring_char += 1
+
+                    if end_strand == '-':
+                        chrA = reverse_complement(chrA)
+                        chrB = reverse_complement(chrB)
+
+                    pos_line = ''
+                    rev_line = ''
+
+                    for mappingIndex in range(len(chrA)):
+                        # reference
+                        chrAx = chrA[mappingIndex]
+                        # read
+                        chrBx = chrB[mappingIndex]
+
+                        if chrAx and chrBx and chrBx.upper() != 'N':
+                            if end_strand == '+':
+                                chrom_loc = end_chr_start + match_len - len(chrA) + mappingIndex
+                                read_loc = end_read_start + match_len - len(chrA) + mappingIndex - gap_read
+
+                                if chrAx == '-':
+                                    chrom_loc -= 1
+
+                                if chrBx == '-':
+                                    scoreBx = '-1'
+                                else:
+                                    scoreBx = hits_score[readkey][str(x + 1)].split()[read_loc]
+
+                                # 1-based on chrom_loc and read_loc
+                                pos_line = pos_line + '\t'.join([end_chrom, str(chrom_loc + 1), readkey + '/' + str(x + 1), str(read_loc + 1), chrAx, chrBx, scoreBx]) + '\n'
+
+                            else:
+                                chrom_loc = end_chr_end - match_len + mappingIndex
+                                read_loc = end_read_start + match_len - 1 - mappingIndex - gap_read
+
+                                if chrAx == '-':
+                                    chrom_loc -= 1
+
+                                if chrBx == '-':
+                                    scoreBx = '-1'
+                                else:
+                                    scoreBx = hits_score[readkey][str(x + 1)].split()[read_loc]
+
+                                # 1-based on chrom_loc and read_loc
+                                rev_line = '\t'.join([end_chrom, str(chrom_loc + 1), readkey + '/' + str(x + 1), str(read_loc + 1), chrAx, chrBx, scoreBx]) + '\n' + rev_line
+
+                            if end_chrom in chrom_cov:
+                                if chrom_loc in chrom_cov[end_chrom]:
+                                    chrom_cov[end_chrom][chrom_loc] += 1
+                                else:
+                                    chrom_cov[end_chrom][chrom_loc] = 1
+
+                            else:
+                                chrom_cov[end_chrom] = {}
+                                chrom_cov[end_chrom][chrom_loc] = 1
+
+                    if pos_line:
+                        temp_table.write('%s\n' % (pos_line.rstrip('\r\n')))
+                    if rev_line:
+                        temp_table.write('%s\n' % (rev_line.rstrip('\r\n')))
+
+    temp_table.close()
+
+    # chrom-wide coverage
+    for i, line in enumerate(open(temp_table_name)):
+        line = line.rstrip()
+        if not line or line.startswith('#'):
+            continue
+
+        fields = line.split()
+        chrom = fields[0]
+        eachBp = int(fields[1])
+        readname = fields[2]
+
+        if hit_per_read == 1:
+            fields[2] = readname.split('/')[0]
+
+        if eachBp in chrom_cov[chrom]:
+            outfile.write('%s\t%d\n' % ('\t'.join(fields), chrom_cov[chrom][eachBp]))
+        else:
+            outfile.write('%s\t%d\n' % ('\t'.join(fields), 0))
+
+    outfile.close()
+
+    if os.path.exists(temp_table_name):
+        os.remove(temp_table_name)
+
+    if invalid_editstring_char:
+        print('Skip ', invalid_editstring_char, ' invalid characters in editstrings')
+
+    return True
+
+
+def convert_fastqsolexa_to_fasta_qual(infile_name, query_fasta, query_qual):
+    outfile_seq = open( query_fasta, 'w' )
+    outfile_score = open( query_qual, 'w' )
+
+    seq_title_startswith = ''
+    qual_title_startswith = ''
+
+    default_coding_value = 64  # Solexa ascii-code
+    fastq_block_lines = 0
+
+    for i, line in enumerate( open( infile_name ) ):
+        line = line.rstrip()
+        if not line or line.startswith( '#' ):
+            continue
+
+        fastq_block_lines = ( fastq_block_lines + 1 ) % 4
+        line_startswith = line[0:1]
+
+        if fastq_block_lines == 1:
+            # first line is @title_of_seq
+            if not seq_title_startswith:
+                seq_title_startswith = line_startswith
+
+            if line_startswith != seq_title_startswith:
+                outfile_seq.close()
+                outfile_score.close()
+                stop_err( 'Invalid fastqsolexa format at line %d: %s.' % ( i + 1, line ) )
+
+            read_title = line[1:]
+            outfile_seq.write( '>%s\n' % line[1:] )
+
+        elif fastq_block_lines == 2:
+            # second line is nucleotides
+            read_length = len( line )
+            outfile_seq.write( '%s\n' % line )
+
+        elif fastq_block_lines == 3:
+            # third line is +title_of_qualityscore ( might be skipped )
+            if not qual_title_startswith:
+                qual_title_startswith = line_startswith
+
+            if line_startswith != qual_title_startswith:
+                outfile_seq.close()
+                outfile_score.close()
+                stop_err( 'Invalid fastqsolexa format at line %d: %s.' % ( i + 1, line ) )
+
+            quality_title = line[1:]
+            if quality_title and read_title != quality_title:
+                outfile_seq.close()
+                outfile_score.close()
+                stop_err( 'Invalid fastqsolexa format at line %d: sequence title "%s" differes from score title "%s".' % ( i + 1, read_title, quality_title ) )
+
+            if not quality_title:
+                outfile_score.write( '>%s\n' % read_title )
+            else:
+                outfile_score.write( '>%s\n' % line[1:] )
+
+        else:
+            # fourth line is quality scores
+            qual = ''
+            fastq_integer = True
+            # peek: ascii or digits?
+            val = line.split()[0]
+            try:
+                int( val )
+                fastq_integer = True
+            except:
+                fastq_integer = False
+
+            if fastq_integer:
+                # digits
+                qual = line
+            else:
+                # ascii
+                quality_score_length = len( line )
+                if quality_score_length == read_length + 1:
+                    # first char is qual_score_startswith
+                    qual_score_startswith = ord( line[0:1] )
+                    line = line[1:]
+                elif quality_score_length == read_length:
+                    qual_score_startswith = default_coding_value
+                else:
+                    stop_err( 'Invalid fastqsolexa format at line %d: the number of quality scores ( %d ) is not the same as bases ( %d ).' % ( i + 1, quality_score_length, read_length ) )
+
+                for j, char in enumerate( line ):
+                    score = ord( char ) - qual_score_startswith    # 64
+                    qual = "%s%s " % ( qual, str( score ) )
+
+            outfile_score.write( '%s\n' % qual )
+
+    outfile_seq.close()
+    outfile_score.close()
+
+    return True
+
+
+def __main__():
+    # SHRiMP path
+    shrimp = 'rmapper-ls'
+
+    # I/O
+    input_target_file = sys.argv[1]  # fasta
+    shrimp_outfile = sys.argv[2]  # shrimp output
+    table_outfile = sys.argv[3]  # table output
+    single_or_paired = sys.argv[4].split(',')
+
+    insertion_size = 600
+
+    if len(single_or_paired) == 1:                  # single or paired
+        type_of_reads = 'single'
+        hit_per_read = 1
+        input_query = single_or_paired[0]
+        query_fasta = tempfile.NamedTemporaryFile().name
+        query_qual = tempfile.NamedTemporaryFile().name
+
+    else:                                           # paired-end
+        type_of_reads = 'paired'
+        hit_per_read = 2
+        input_query_end1 = single_or_paired[0]
+        input_query_end2 = single_or_paired[1]
+        insertion_size = int(single_or_paired[2])
+        query_fasta_end1 = tempfile.NamedTemporaryFile().name
+        query_fasta_end2 = tempfile.NamedTemporaryFile().name
+        query_qual_end1 = tempfile.NamedTemporaryFile().name
+        query_qual_end2 = tempfile.NamedTemporaryFile().name
+
+    # SHRiMP parameters: total = 15, default values
+    spaced_seed = '111111011111'
+    seed_matches_per_window = '2'
+    seed_hit_taboo_length = '4'
+    seed_generation_taboo_length = '0'
+    seed_window_length = '115.0'
+    max_hits_per_read = '100'
+    max_read_length = '1000'
+    kmer = '-1'
+    sw_match_value = '100'
+    sw_mismatch_value = '-150'
+    sw_gap_open_ref = '-400'
+    sw_gap_open_query = '-400'
+    sw_gap_ext_ref = '-70'
+    sw_gap_ext_query = '-70'
+    sw_hit_threshold = '68.0'
+
+    # TODO: put the threshold on each of these parameters
+    if len(sys.argv) > 5:
+        try:
+            if sys.argv[5].isdigit():
+                spaced_seed = sys.argv[5]
+            else:
+                stop_err('Error in assigning parameter: Spaced seed.')
+        except:
+            stop_err('Spaced seed must be a combination of 1s and 0s.')
+
+        seed_matches_per_window = sys.argv[6]
+        seed_hit_taboo_length = sys.argv[7]
+        seed_generation_taboo_length = sys.argv[8]
+        seed_window_length = sys.argv[9]
+        max_hits_per_read = sys.argv[10]
+        max_read_length = sys.argv[11]
+        kmer = sys.argv[12]
+        sw_match_value = sys.argv[13]
+        sw_mismatch_value = sys.argv[14]
+        sw_gap_open_ref = sys.argv[15]
+        sw_gap_open_query = sys.argv[16]
+        sw_gap_ext_ref = sys.argv[17]
+        sw_gap_ext_query = sys.argv[18]
+        sw_hit_threshold = sys.argv[19]
+
+    # temp file for shrimp log file
+    shrimp_log = tempfile.NamedTemporaryFile().name
+
+    # convert fastq to fasta and quality score files
+    if type_of_reads == 'single':
+        convert_fastqsolexa_to_fasta_qual(input_query, query_fasta, query_qual)
+    else:
+        convert_fastqsolexa_to_fasta_qual(input_query_end1, query_fasta_end1, query_qual_end1)
+        convert_fastqsolexa_to_fasta_qual(input_query_end2, query_fasta_end2, query_qual_end2)
+
+    # SHRiMP command
+    if type_of_reads == 'single':
+        command = ' '.join([shrimp, '-s', spaced_seed, '-n', seed_matches_per_window, '-t', seed_hit_taboo_length, '-9', seed_generation_taboo_length, '-w', seed_window_length, '-o', max_hits_per_read, '-r', max_read_length, '-d', kmer, '-m', sw_match_value, '-i', sw_mismatch_value, '-g', sw_gap_open_ref, '-q', sw_gap_open_query, '-e', sw_gap_ext_ref, '-f', sw_gap_ext_query, '-h', sw_hit_threshold, query_fasta, input_target_file, '>', shrimp_outfile, '2>', shrimp_log])
+
+        try:
+            os.system(command)
+        except Exception as e:
+            if os.path.exists(query_fasta):
+                os.remove(query_fasta)
+            if os.path.exists(query_qual):
+                os.remove(query_qual)
+            stop_err(str(e))
+
+    else:  # paired
+        command_end1 = ' '.join([shrimp, '-s', spaced_seed, '-n', seed_matches_per_window, '-t', seed_hit_taboo_length, '-9', seed_generation_taboo_length, '-w', seed_window_length, '-o', max_hits_per_read, '-r', max_read_length, '-d', kmer, '-m', sw_match_value, '-i', sw_mismatch_value, '-g', sw_gap_open_ref, '-q', sw_gap_open_query, '-e', sw_gap_ext_ref, '-f', sw_gap_ext_query, '-h', sw_hit_threshold, query_fasta_end1, input_target_file, '>', shrimp_outfile, '2>', shrimp_log])
+        command_end2 = ' '.join([shrimp, '-s', spaced_seed, '-n', seed_matches_per_window, '-t', seed_hit_taboo_length, '-9', seed_generation_taboo_length, '-w', seed_window_length, '-o', max_hits_per_read, '-r', max_read_length, '-d', kmer, '-m', sw_match_value, '-i', sw_mismatch_value, '-g', sw_gap_open_ref, '-q', sw_gap_open_query, '-e', sw_gap_ext_ref, '-f', sw_gap_ext_query, '-h', sw_hit_threshold, query_fasta_end2, input_target_file, '>>', shrimp_outfile, '2>>', shrimp_log])
+
+        try:
+            os.system(command_end1)
+            os.system(command_end2)
+        except Exception as e:
+            if os.path.exists(query_fasta_end1):
+                os.remove(query_fasta_end1)
+            if os.path.exists(query_fasta_end2):
+                os.remove(query_fasta_end2)
+            if os.path.exists(query_qual_end1):
+                os.remove(query_qual_end1)
+            if os.path.exists(query_qual_end2):
+                os.remove(query_qual_end2)
+            stop_err(str(e))
+
+    # check SHRiMP output: count number of lines
+    num_hits = 0
+    if shrimp_outfile:
+        for i, line in enumerate(open(shrimp_outfile)):
+            line = line.rstrip('\r\n')
+            if not line or line.startswith('#'):
+                continue
+            try:
+                line.split()
+                num_hits += 1
+            except Exception as e:
+                stop_err(str(e))
+
+    if num_hits == 0:   # no hits generated
+        err_msg = ''
+        if shrimp_log:
+            for i, line in enumerate(open(shrimp_log)):
+                if line.startswith('error'):            # deal with memory error:
+                    err_msg += line                     # error: realloc failed: Cannot allocate memory
+                if re.search('Reads Matched', line):    # deal with zero hits
+                    if int(line[8:].split()[2]) == 0:
+                        err_msg = 'Zero hits found.\n'
+        stop_err('SHRiMP Failed due to:\n' + err_msg)
+
+    # convert to table
+    if type_of_reads == 'single':
+        generate_sub_table(shrimp_outfile, input_target_file, query_qual, table_outfile, hit_per_read, insertion_size)
+    else:
+        generate_sub_table(shrimp_outfile, input_target_file, query_qual_end1 + ',' + query_qual_end2, table_outfile, hit_per_read, insertion_size)
+
+    # remove temp. files
+    if type_of_reads == 'single':
+        if os.path.exists(query_fasta):
+            os.remove(query_fasta)
+        if os.path.exists(query_qual):
+            os.remove(query_qual)
+    else:
+        if os.path.exists(query_fasta_end1):
+            os.remove(query_fasta_end1)
+        if os.path.exists(query_fasta_end2):
+            os.remove(query_fasta_end2)
+        if os.path.exists(query_qual_end1):
+            os.remove(query_qual_end1)
+        if os.path.exists(query_qual_end2):
+            os.remove(query_qual_end2)
+
+    if os.path.exists(shrimp_log):
+        os.remove(shrimp_log)
+
+
+if __name__ == '__main__':
+    __main__()
diff --git a/tools/metag_tools/shrimp_wrapper.xml b/tools/metag_tools/shrimp_wrapper.xml
new file mode 100644
index 0000000..f411cde
--- /dev/null
+++ b/tools/metag_tools/shrimp_wrapper.xml
@@ -0,0 +1,279 @@
+<tool id="shrimp_wrapper" name="SHRiMP for Letter-space" version="1.0.0">
+  <description>reads mapping against reference sequence </description>
+  <command interpreter="python">
+    #if     ($type_of_reads.single_or_paired=="single" and $param.skip_or_full=="skip") #shrimp_wrapper.py $input_target $output1 $output2 $input_query
+    #elif   ($type_of_reads.single_or_paired=="paired" and $param.skip_or_full=="skip") #shrimp_wrapper.py $input_target $output1 $output2 $type_of_reads.input1,$type_of_reads.input2,$type_of_reads.insertion_size
+    #elif   ($type_of_reads.single_or_paired=="single" and $param.skip_or_full=="full") #shrimp_wrapper.py $input_target $output1 $output2 $input_query                                                              $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.s [...]
+    #elif   ($type_of_reads.single_or_paired=="paired" and $param.skip_or_full=="full") #shrimp_wrapper.py $input_target $output1 $output2 $type_of_reads.input1,$type_of_reads.input2,$type_of_reads.insertion_size $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.s [...]
+    #end if#
+  </command>
+    <inputs>
+        <page>
+        <conditional name="type_of_reads">
+            <param name="single_or_paired" type="select" label="Single- or Paired-ends">
+                <option value="single">Single-end</option>
+                <option value="paired">Paired-end</option>
+            </param>
+            <when value="single">
+                <param name="input_query" type="data" format="fastqsolexa" label="Align sequencing reads" help="No dataset? Read tip below"/>
+            </when>
+            <when value="paired">
+                <param name="insertion_size" type="integer" size="5" value="600" label="Insertion length between two ends" help="bp" />
+                <param name="input1" type="data" format="fastqsolexa" label="Align sequencing reads, one end" />
+                <param name="input2" type="data" format="fastqsolexa" label="and the other end" />
+            </when> 
+        </conditional>
+        <param name="input_target" type="data" format="fasta" label="against reference" />
+        <conditional name="param">
+            <param name="skip_or_full" type="select" label="SHRiMP settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full List">
+                <option value="skip">Commonly used</option>
+                <option value="full">Full Parameter List</option>
+            </param>
+            <when value="skip" />
+            <when value="full">
+                <param name="spaced_seed"                   type="text"     size="30"   value="111111011111"    label="Spaced Seed" />
+                <param name="seed_matches_per_window"       type="integer"  size="5"    value="2"               label="Seed Matches per Window" />
+                <param name="seed_hit_taboo_length"         type="integer"  size="5"    value="4"               label="Seed Hit Taboo Length" />
+                <param name="seed_generation_taboo_length"  type="integer"  size="5"    value="0"               label="Seed Generation Taboo Length" />
+                <param name="seed_window_length"            type="float"    size="10"   value="115.0"           label="Seed Window Length"          help="in percentage"/>
+                <param name="max_hits_per_read"             type="integer"  size="10"   value="100"             label="Maximum Hits per Read" />
+                <param name="max_read_length"               type="integer"  size="10"   value="1000"            label="Maximum Read Length" />
+                <param name="kmer"                          type="integer"  size="10"   value="-1"              label="Kmer Std. Deviation Limit"   help="-1 as None"/>
+                <param name="sw_match_value"                type="integer"  size="10"   value="100"             label="S-W Match Value" />
+                <param name="sw_mismatch_value"             type="integer"  size="10"   value="-150"            label="S-W Mismatch Value" />
+                <param name="sw_gap_open_ref"               type="integer"  size="10"   value="-400"            label="S-W Gap Open Penalty (Reference)" />
+                <param name="sw_gap_open_query"             type="integer"  size="10"   value="-400"            label="S-W Gap Open Penalty (Query)" />
+                <param name="sw_gap_ext_ref"                type="integer"  size="10"   value="-70"             label="S-W Gap Extend Penalty (Reference)" />
+                <param name="sw_gap_ext_query"              type="integer"  size="10"   value="-70"             label="S-W Gap Extend Penalty (Query)" />
+                <param name="sw_hit_threshold"              type="float"    size="10"   value="68.0"            label="S-W Hit Threshold"           help="in percentage"/>
+            </when>
+        </conditional>
+        </page>
+    </inputs>
+    <outputs>
+        <data name="output1" format="tabular"/>
+        <data name="output2" format="tabular"/>
+    </outputs>
+    <requirements>
+      <requirement type="binary">rmapper-ls</requirement>
+    </requirements>
+    <tests>
+        <test>
+            <param name="single_or_paired" value="single" />
+            <param name="skip_or_full" value="skip" />
+            <param name="input_target" value="shrimp_phix_anc.fa" ftype="fasta" />
+            <param name="input_query" value="shrimp_wrapper_test1.fastq" ftype="fastqsolexa"/>
+            <output name="output1" file="shrimp_wrapper_test1.out1" />
+        </test>
+        <!--  
+        <test>
+            <param name="single_or_paired" value="paired" />
+            <param name="skip_or_full" value="skip" />
+            <param name="input_target" value="shrimp_eca_chrMT.fa" ftype="fasta" />
+            <param name="input1" value="shrimp_wrapper_test2_end1.fastq" ftype="fastqsolexa" />
+            <param name="input2" value="shrimp_wrapper_test2_end2.fastq" ftype="fastqsolexa" />
+            <param name="insertion_size" value="600" />
+            <output name="output1" file="shrimp_wrapper_test2.out1" />
+        </test>
+        <test>
+            <param name="single_or_paired" value="single" />
+            <param name="skip_or_full" value="full" />
+            <param name="input_target" value="shrimp_phix_anc.fa" ftype="fasta" />
+            <param name="input_query" value="shrimp_wrapper_test1.fastq" ftype="fastqsolexa"/>
+            <param name="spaced_seed" value="111111011111" />
+            <param name="seed_matches_per_window" value="2" />
+            <param name="seed_hit_taboo_length" value="4" />
+            <param name="seed_generation_taboo_length" value="0" />
+            <param name="seed_window_length" value="115.0" />
+            <param name="max_hits_per_read" value="100" />
+            <param name="max_read_length" value="1000" />
+            <param name="kmer" value="-1" />
+            <param name="sw_match_value" value="100" />
+            <param name="sw_mismatch_value" value="-150" />
+            <param name="sw_gap_open_ref" value="-400" />
+            <param name="sw_gap_open_query" value="-400" />
+            <param name="sw_gap_ext_ref" value="-70" />
+            <param name="sw_gap_ext_query" value="-70" />
+            <param name="sw_hit_threshold" value="68.0" />
+            <output name="output1" file="shrimp_wrapper_test1.out1" />
+        </test> 
+        <test>
+            <param name="single_or_paired" value="paired" />
+            <param name="skip_or_full" value="full" />
+            <param name="input_target" value="shrimp_eca_chrMT.fa" ftype="fasta" />
+            <param name="spaced_seed" value="111111011111" />
+            <param name="seed_matches_per_window" value="2" />
+            <param name="seed_hit_taboo_length" value="4" />
+            <param name="seed_generation_taboo_length" value="0" />
+            <param name="seed_window_length" value="115.0" />
+            <param name="max_hits_per_read" value="100" />
+            <param name="max_read_length" value="1000" />
+            <param name="kmer" value="-1" />
+            <param name="sw_match_value" value="100" />
+            <param name="sw_mismatch_value" value="-150" />
+            <param name="sw_gap_open_ref" value="-400" />
+            <param name="sw_gap_open_query" value="-400" />
+            <param name="sw_gap_ext_ref" value="-70" />
+            <param name="sw_gap_ext_query" value="-70" />
+            <param name="sw_hit_threshold" value="68.0" />
+            <param name="input1" value="shrimp_wrapper_test2_end1.fastq" ftype="fastqsolexa"/>
+            <param name="input2" value="shrimp_wrapper_test2_end2.fastq" ftype="fastqsolexa"/>
+            <param name="insertion_size" value="600" />
+            <output name="output1" file="shrimp_wrapper_test2.out1" />
+        </test>
+        -->
+    </tests>
+<help>
+
+.. class:: warningmark
+
+IMPORTANT: This tool currently only supports data where the quality scores are integers or ASCII quality scores with base 64. Click pencil icon next to your dataset to set datatype to *fastqsolexa*.
+
+
+-----
+    
+**What it does**
+ 
+SHRiMP (SHort Read Mapping Package) is a software package for aligning genomic reads against a target genome. 
+
+This wrapper post-processes the default SHRiMP/rmapper-ls output and generates a table with all information from reads and reference for the mapping. The tool takes single- or paired-end reads. For single-end reads, only uniquely mapped alignment is considered. In paired-end reads, only pairs that meet the following criteria will be used to generate the table: 1). the ends fall within the insertion size; 2). the ends are mapped at the opposite directions. If there are still multiple mapp [...]
+  
+
+-----
+
+**Input formats**
+
+A multiple-fastq file, for example::
+
+    @seq1
+    TACCCGATTTTTTGCTTTCCACTTTATCCTACCCTT
+    +seq1
+    hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh
+
+
+-----
+
+**Outputs**
+
+The tool gives two outputs.
+
+**Table output**
+
+Table output contains 8 columns::
+
+     1     2        3       4     5     6     7     8 
+  ----------------------------------------------------
+  chrM   14711     seq1     0     T     A    40     1 
+  chrM   14712     seq1     1     T     T    40     1 
+
+where::
+    
+  1. (chrM)   - Reference sequence id
+  2. (14711)  - Position of the mapping in the reference
+  3. (seq1)   - Read id
+  4. (0)      - Position of the mapping in the read
+  5. (T)      - Nucleotide in the reference
+  6. (A)      - Nucleotide in the read
+  7. (40)     - Quality score for the nucleotide in the position of the read
+  8. (1)      - The number of times this position is covered by reads
+
+     
+**SHRiMP output**
+
+This is the default output from SHRiMP/rmapper-ls::
+ 
+     1     2     3       4      5      6     7     8      9      10
+  -------------------------------------------------------------------
+   seq1  chrM    +     3644    3679    1    36     36    3600    36  
+
+where::
+
+  1. (seq1)   - Read id 
+  2. (chrM)   - Reference sequence id
+  3. (+)      - Strand of the read
+  4. (3466)   - Start position of the alignment in the reference
+  5. (3679)   - End position of the alignment in the reference
+  6. (1)      - Start position of the alignment in the read
+  7. (36)     - End position of the alignment in the read
+  8. (36)     - Length of the read
+  9. (3600)   - Score 
+ 10. (36)     - Edit string
+
+ 
+-----
+
+**SHRiMP parameter list**
+
+The commonly used parameters with default value setting::
+
+    -s    Spaced Seed                             (default: 111111011111)
+          The spaced seed is a single contiguous string of 0's and 1's. 
+          0's represent wildcards, or positions which will always be 
+          considered as matching, whereas 1's dictate positions that 
+          must match. A string of all 1's will result in a simple kmer scan.
+    -n    Seed Matches per Window                 (default: 2)
+          The number of seed matches per window dictates how many seeds 
+          must match within some window length of the genome before that 
+          region is considered for Smith-Waterman alignment. A lower 
+          value will increase sensitivity while drastically increasing 
+          running time. Higher values will have the opposite effect.
+    -t    Seed Hit Taboo Length                   (default: 4)
+          The seed taboo length specifies how many target genome bases 
+          or colors must exist prior to a previous seed match in order 
+          to count another seed match as a hit.
+    -9    Seed Generation Taboo Length            (default: 0)
+          
+    -w    Seed Window Length                      (default: 115.00%)
+          This parameter specifies the genomic span in bases (or colours) 
+          in which *seed_matches_per_window* must exist before the read 
+          is given consideration by the Simth-Waterman alignment machinery.
+    -o    Maximum Hits per Read                   (default: 100)
+          This parameter specifies how many hits to remember for each read. 
+          If more hits are encountered, ones with lower scores are dropped 
+          to make room.
+    -r    Maximum Read Length                     (default: 1000)
+          This parameter specifies the maximum length of reads that will 
+          be encountered in the dataset. If larger reads than the default 
+          are used, an appropriate value must be passed to *rmapper*.
+    -d    Kmer Std. Deviation Limit               (default: -1 [None])
+          This option permits pruning read kmers, which occur with 
+          frequencies greater than *kmer_std_dev_limit* standard 
+          deviations above the average. This can shorten running 
+          time at the cost of some sensitivity. 
+          *Note*: A negative value disables this option.            
+    -m    S-W Match Value                         (default: 100)
+          The value applied to matches during the Smith-Waterman score calculation.
+    -i    S-W Mismatch Value                      (default: -150)
+          The value applied to mismatches during the Smith-Waterman 
+          score calculation.
+    -g    S-W Gap Open Penalty (Reference)        (default: -400)
+          The value applied to gap opens along the reference sequence 
+          during the Smith-Waterman score calculation.
+          *Note*: Note that for backward compatibility, if -g is set 
+          and -q is not set, the gap open penalty for the query will 
+          be set to the same value as specified for the reference.
+    -q    S-W Gap Open Penalty (Query)            (default: -400)
+          The value applied to gap opens along the query sequence during 
+          the Smith-Waterman score calculation.        
+    -e    S-W Gap Extend Penalty (Reference)      (default: -70)
+          The value applied to gap extends during the Smith-Waterman score calculation.
+          *Note*: Note that for backward compatibility, if -e is set 
+          and -f is not set, the gap exten penalty for the query will 
+          be set to the same value as specified for the reference. 
+    -f    S-W Gap Extend Penalty (Query)          (default: -70)
+          The value applied to gap extends during the Smith-Waterman score calculation.
+    -h    S-W Hit Threshold                       (default: 68.00%)
+          In letter-space, this parameter determines the threshold 
+          score for both vectored and full Smith-Waterman alignments. 
+          Any values less than this quantity will be thrown away.
+          *Note* This option differs slightly in meaning between letter-space and color-space.
+
+
+-----
+
+**Reference**
+ 
+ **SHRiMP**: Stephen M. Rumble, Michael Brudno, Phil Lacroute, Vladimir Yanovsky, Marc Fiume, Adrian Dalca. shrimp at cs dot toronto dot edu. 
+
+</help>
+</tool>
diff --git a/tools/next_gen_conversion/bwa_solid2fastq_modified.pl b/tools/next_gen_conversion/bwa_solid2fastq_modified.pl
new file mode 100755
index 0000000..eb0ddce
--- /dev/null
+++ b/tools/next_gen_conversion/bwa_solid2fastq_modified.pl
@@ -0,0 +1,89 @@
+#!/usr/bin/perl -w
+
+# Author: lh3
+# Note: Ideally, this script should be written in C. It is a bit slow at present.
+
+use strict;
+use warnings;
+use Getopt::Std;
+
+my %opts;
+my $version = '0.1.3';
+my $usage = qq{
+Usage: solid2fastq.pl <paired> <outfile1> <outfile2> <F3.csfasta> <F3.qual> <R3.csfasta> <R3.qual> 
+
+Note: <in.title> is the string showed in the `# Title:' line of a
+      ".csfasta" read file. Then <in.title>F3.csfasta is read sequence
+      file and <in.title>F3_QV.qual is the quality file. If
+      <in.title>R3.csfasta is present, this script assumes reads are
+      paired; otherwise reads will be regarded as single-end.
+
+      The read name will be <out.prefix>:panel_x_y/[12] with `1' for R3
+      tag and `2' for F3. Usually you may want to use short <out.prefix>
+      to save diskspace. Long <out.prefix> also causes troubles to maq.
+
+};
+
+getopts('', \%opts);
+die($usage) if (@ARGV != 7);
+my ($is_paired,$outfile1,$outfile2,$f3reads,$f3qual,$r3reads,$r3qual) = @ARGV;
+my (@fhr, @fhw);
+my $fn = '';
+my @fn_suff = ($f3reads,$f3qual,$r3reads,$r3qual);
+if ($is_paired eq "yes") { # paired end
+  for (0 .. 3) {
+	$fn = $fn_suff[$_];
+	$fn = "gzip -dc $fn.gz |" if (!-f $fn && -f "$fn.gz");
+	open($fhr[$_], $fn) || die("** Fail to open '$fn'.\n");
+  }
+  open($fhw[0], "|gzip >$outfile2") || die;
+  open($fhw[1], "|gzip >$outfile1") || die;
+  my (@df, @dr);
+  @df = &read1(1); @dr = &read1(2);
+  while (@df && @dr) {
+	if ($df[0] eq $dr[0]) { # mate pair
+	  print {$fhw[0]} $df[1]; print {$fhw[1]} $dr[1];
+	  @df = &read1(1); @dr = &read1(2);
+	}
+  }
+  close($fhr[$_]) for (0 .. $#fhr);
+  close($fhw[$_]) for (0 .. $#fhw);
+} else { # single end
+  for (0 .. 1) {
+	my $fn = "$fn_suff[$_]";
+	$fn = "gzip -dc $fn.gz |" if (!-f $fn && -f "$fn.gz");
+	open($fhr[$_], $fn) || die("** Fail to open '$fn'.\n");
+  }
+  open($fhw[2], "|gzip >$outfile1") || die;
+  my @df;
+  while (@df = &read1(1, $fhr[0], $fhr[1])) {
+	print {$fhw[2]} $df[1];
+  }
+  close($fhr[$_]) for (0 .. $#fhr);
+  close($fhw[2]);
+}
+
+sub read1 {
+  my $i = shift(@_);
+  my $j = ($i-1)<<1;
+  my ($key, $seq);
+  my ($fhs, $fhq) = ($fhr[$j], $fhr[$j|1]);
+  while (<$fhs>) {
+	my $t = <$fhq>;
+	if (/^>(\d+)_(\d+)_(\d+)_[FR]3/) {
+	  $key = sprintf("%.4d_%.4d_%.4d", $1, $2, $3); # this line could be improved on 64-bit machines
+	  #print $key;
+	  die(qq/** unmatched read name: '$_' != '$t'\n/) unless ($_ eq $t);
+	  my $name = "$1_$2_$3/$i";
+	  $_ = substr(<$fhs>, 2);
+	  tr/0123./ACGTN/;
+	  my $s = $_;
+	  $_ = <$fhq>;
+	  s/^(\d+)\s*//;
+	  s/(\d+)\s*/chr($1+33)/eg;
+	  $seq = qq/\@$name\n$s+\n$_\n/;
+	  last;
+	} 
+  }
+  return defined($seq)? ($key, $seq) : ();
+}
diff --git a/tools/next_gen_conversion/fastq_conversions.py b/tools/next_gen_conversion/fastq_conversions.py
new file mode 100644
index 0000000..5737346
--- /dev/null
+++ b/tools/next_gen_conversion/fastq_conversions.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+"""
+Performs various conversions around Sanger FASTQ data
+
+usage: %prog [options]
+   -c, --command=c: Command to run
+   -i, --input=i: Input file to be converted
+   -o, --outputFastqsanger=o: FASTQ Sanger converted output file for sol2std
+   -s, --outputFastqsolexa=s: FASTQ Solexa converted output file
+   -f, --outputFasta=f: FASTA converted output file
+
+usage: %prog command input_file output_file
+"""
+
+import os
+import sys
+
+from bx.cookbook import doc_optparse
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def __main__():
+    # Parse Command Line
+    options, args = doc_optparse.parse( __doc__ )
+
+    cmd = "fq_all2std.pl %s %s > %s"
+    if options.command == 'sol2std':
+        cmd = cmd % (options.command, options.input, options.outputFastqsanger)
+    elif options.command == 'std2sol':
+        cmd = cmd % (options.command, options.input, options.outputFastqsolexa)
+    elif options.command == 'fq2fa':
+        cmd = cmd % (options.command, options.input, options.outputFasta)
+    try:
+        os.system(cmd)
+    except Exception as eq:
+        stop_err("Error converting data format.\n" + str(eq))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/next_gen_conversion/fastq_conversions.xml b/tools/next_gen_conversion/fastq_conversions.xml
new file mode 100644
index 0000000..c4dca32
--- /dev/null
+++ b/tools/next_gen_conversion/fastq_conversions.xml
@@ -0,0 +1,133 @@
+<tool id="fastq_conversions" name="FASTQ Conversions" version="1.0.0">
+  <description>converts between FASTQ data and other data formats</description>
+  <command interpreter="python">
+    fastq_conversions.py 
+    --command=$conversionType.type
+    --input=$input
+    #if $conversionType.type == "sol2std":
+     --outputFastqsanger=$outputFastqsanger
+    #else:
+     --outputFastqsanger="None"
+    #end if
+    #if $conversionType.type == "std2sol":
+     --outputFastqsolexa=$outputFastqsolexa
+    #else:
+     --outputFastqsolexa="None"
+    #end if
+    #if $conversionType.type == "fq2fa":
+     --outputFasta=$outputFasta
+    #else:
+     --outputFasta="None"
+    #end if
+  </command>
+  <inputs>
+    <conditional name="conversionType">
+      <param name="type" type="select" label="What type of conversion do you want to do?">
+        <option value="sol2std">Solexa/Illumina FASTQ to standard Sanger FASTQ</option>
+        <option value="std2sol">Standard Sanger FASTQ to Solexa/Illumina FASTQ</option>
+        <option value="fq2fa">Various FASTQ to FASTA</option>
+      </param>
+      <when value="sol2std">
+        <param name="input" type="data" format="fastqsolexa" label="File to convert" />
+      </when>
+      <when value="std2sol">
+        <param name="input" type="data" format="fastqsanger" label="File to convert" />
+      </when>
+      <when value="fq2fa">
+        <param name="input" type="data" format="fastqsolexa, fastqsanger" label="File to convert" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data name="outputFastqsanger" format="fastqsanger">
+      <filter>conversionType['type'] == 'sol2std'</filter>
+    </data>
+    <data name="outputFastqsolexa" format="fastqsolexa">
+      <filter>conversionType['type'] == 'std2sol'</filter>
+    </data>
+    <data name="outputFasta" format="fasta">
+      <filter>conversionType['type'] == 'fq2fa'</filter>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="type" value="sol2std" />
+      <param name="input" value="fastq_conv_in1.fastq" ftype="fastqsolexa" />
+      <output name="outputFastqsanger" file="fastq_conv_out1.fastqsanger" />
+    </test>
+    <test>
+      <param name="type" value="std2sol" />
+      <param name="input" value="1.fastqsanger" ftype="fastqsanger" />
+      <output name="outputFastqsolexa" file="fastq_conv_out2.fastqsolexa" />
+    </test>
+    <test>
+      <param name="type" value="fq2fa" />
+      <param name="input" value="1.fastqsanger" ftype="fastqsanger" />
+      <output name="outputFasta" file="fastq_conv_out4.fasta" />
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+This tool offers several conversions options relating to the FASTQ format. 
+
+-----
+
+**Examples**
+
+- Converting the Solexa/Illumina FASTQ data::
+
+	@081017-and-081020:1:1:1715:1759
+	GGACTCAGATAGTAATCCACGCTCCTTTAAAATATC
+	+
+	II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B
+
+- will produce the following Sanger FASTQ data::
+
+	@081017-and-081020:1:1:1715:1759
+	GGACTCAGATAGTAATCCACGCTCCTTTAAAATATC
+	+
+	++!+++++++!!!!!"+++++++!!!!)!%!!+!!%!
+	
+- Converting standard Sanger FASTQ::
+    
+    @1831_573_1004/1
+	AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
+	+
+	><C&&9952+C>5<.?<79,=42<292:<(9/-7
+	@1831_573_1050/1
+	TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
+	+
+	;@@17?@=>7??@A8?==@4A?A4)&+.'&+'1,
+
+- will produce the following Solexa/Illumina FASTQ data::
+
+	@1831_573_1004/1
+	AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
+	+
+	][bEEXXTQJb]T[M^[VXK\SQ[QXQY[GXNLV
+	@1831_573_1050/1
+	TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
+	+
+	Z__PV^_\]V^^_`W^\\_S`^`SHEJMFEJFPK
+
+- Converting the Sanger FASTQ data::
+
+    @1831_573_1004/1
+	AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
+	+
+	><C&&9952+C>5<.?<79,=42<292:<(9/-7
+	@1831_573_1050/1
+	TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
+	+
+	;@@17?@=>7??@A8?==@4A?A4)&+.'&+'1,
+	
+- will produce the following FASTA data::
+
+	>1831_573_1004/1
+	AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
+	>1831_573_1050/1
+	TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
+
+  </help>
+</tool>
diff --git a/tools/next_gen_conversion/fastq_gen_conv.py b/tools/next_gen_conversion/fastq_gen_conv.py
new file mode 100644
index 0000000..2e2e2be
--- /dev/null
+++ b/tools/next_gen_conversion/fastq_gen_conv.py
@@ -0,0 +1,177 @@
+"""
+Converts any type of FASTQ file to Sanger type  and makes small adjustments if necessary.
+
+usage: %prog [options]
+   -i, --input=i: Input FASTQ candidate file
+   -r, --origType=r: Original type
+   -a, --allOrNot=a: Whether or not to check all blocks
+   -b, --blocks=b: Number of blocks to check
+   -o, --output=o: Output file
+
+usage: %prog input_file oroutput_file
+"""
+
+import math
+import sys
+
+from bx.cookbook import doc_optparse
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def all_bases_valid(seq):
+    """Confirm that the sequence contains only bases"""
+    valid_bases = ['a', 'A', 'c', 'C', 'g', 'G', 't', 'T', 'N']
+    for base in seq:
+        if base not in valid_bases:
+            return False
+    return True
+
+
+def __main__():
+    # Parse Command Line
+    options, args = doc_optparse.parse( __doc__ )
+    orig_type = options.origType
+    if orig_type == 'sanger' and options.allOrNot == 'not':
+        max_blocks = int(options.blocks)
+    else:
+        max_blocks = -1
+    fin = open(options.input, 'r')
+    fout = open(options.output, 'w')
+    range_min = 1000
+    range_max = -5
+    block_num = 0
+    bad_blocks = 0
+    base_len = -1
+    line_count = 0
+    lines = []
+    line = fin.readline()
+    while line:
+        if line.strip() and max_blocks >= 0 and block_num > 0 and orig_type == 'sanger' and block_num >= max_blocks:
+            fout.write(line)
+            if line_count % 4 == 0:
+                block_num += 1
+            line_count += 1
+        elif line.strip():
+            # the line that starts a block, with a name
+            if line_count % 4 == 0 and line.startswith('@'):
+                lines.append(line)
+            else:
+                # if we expect a sequence of bases
+                if line_count % 4 == 1 and all_bases_valid(line.strip()):
+                    lines.append(line)
+                    base_len = len(line.strip())
+                # if we expect the second name line
+                elif line_count % 4 == 2 and line.startswith('+'):
+                    lines.append(line)
+                # if we expect a sequence of qualities and it's the expected length
+                elif line_count % 4 == 3:
+                    split_line = line.strip().split()
+                    # decimal qualities
+                    if len(split_line) == base_len:
+                        # convert
+                        phred_list = []
+                        for ch in split_line:
+                            int_ch = int(ch)
+                            if int_ch < range_min:
+                                range_min = int_ch
+                            if int_ch > range_max:
+                                range_max = int_ch
+                            if int_ch >= 0 and int_ch <= 93:
+                                phred_list.append(chr(int_ch + 33))
+                        # make sure we haven't lost any quality values
+                        if len(phred_list) == base_len:
+                            # print first three lines
+                            for l in lines:
+                                fout.write(l)
+                            # print converted quality line
+                            fout.write(''.join(phred_list))
+                            # reset
+                            lines = []
+                            base_len = -1
+                        # abort if so
+                        else:
+                            bad_blocks += 1
+                            lines = []
+                            base_len = -1
+                    # ascii qualities
+                    elif len(split_line[0]) == base_len:
+                        qualities = []
+                        # print converted quality line
+                        if orig_type == 'illumina':
+                            for c in line.strip():
+                                if ord(c) - 64 < range_min:
+                                    range_min = ord(c) - 64
+                                if ord(c) - 64 > range_max:
+                                    range_max = ord(c) - 64
+                                if ord(c) < 64 or ord(c) > 126:
+                                    bad_blocks += 1
+                                    base_len = -1
+                                    lines = []
+                                    break
+                                else:
+                                    qualities.append( chr( ord(c) - 31 ) )
+                            quals = ''.join(qualities)
+                        elif orig_type == 'solexa':
+                            for c in line.strip():
+                                if ord(c) - 64 < range_min:
+                                    range_min = ord(c) - 64
+                                if ord(c) - 64 > range_max:
+                                    range_max = ord(c) - 64
+                                if ord(c) < 59 or ord(c) > 126:
+                                    bad_blocks += 1
+                                    base_len = -1
+                                    lines = []
+                                    break
+                                else:
+                                    p = 10.0 ** ( ( ord(c) - 64 ) / -10.0 ) / ( 1 + 10.0 ** ( ( ord(c) - 64 ) / -10.0 ) )
+                                    qualities.append( chr( int( -10.0 * math.log10( p ) ) + 33 ) )
+                            quals = ''.join(qualities)
+                        else:  # 'sanger'
+                            for c in line.strip():
+                                if ord(c) - 33 < range_min:
+                                    range_min = ord(c) - 33
+                                if ord(c) - 33 > range_max:
+                                    range_max = ord(c) - 33
+                                if ord(c) < 33 or ord(c) > 126:
+                                    bad_blocks += 1
+                                    base_len = -1
+                                    lines = []
+                                    break
+                                else:
+                                    qualities.append(c)
+                            quals = ''.join(qualities)
+                        # make sure we don't have bad qualities
+                        if len(quals) == base_len:
+                            # print first three lines
+                            for l in lines:
+                                fout.write(l)
+                            # print out quality line
+                            fout.write(quals + '\n')
+                        # reset
+                        lines = []
+                        base_len = -1
+                    else:
+                        bad_blocks += 1
+                        base_len = -1
+                        lines = []
+                    # mark the successful end of a block
+                    block_num += 1
+            line_count += 1
+        line = fin.readline()
+    fout.close()
+    fin.close()
+    if range_min != 1000 and range_min != -5:
+        outmsg = 'The range of quality values found were: %s to %s' % (range_min, range_max)
+    else:
+        outmsg = ''
+    if bad_blocks > 0:
+        outmsg += '\nThere were %s bad blocks skipped' % (bad_blocks)
+    sys.stdout.write(outmsg)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/next_gen_conversion/fastq_gen_conv.xml b/tools/next_gen_conversion/fastq_gen_conv.xml
new file mode 100644
index 0000000..8085608
--- /dev/null
+++ b/tools/next_gen_conversion/fastq_gen_conv.xml
@@ -0,0 +1,106 @@
+<tool id="fastq_gen_conv" name="FASTQ Groomer" version="1.0.0">
+  <description>converts any FASTQ to Sanger</description>
+  <command interpreter="python">
+    fastq_gen_conv.py 
+     --input=$input 
+     --origType=$origTypeChoice.origType
+     #if $origTypeChoice.origType == "sanger":
+      --allOrNot=$origTypeChoice.howManyBlocks.allOrNot 
+      #if $origTypeChoice.howManyBlocks.allOrNot == "not":
+       --blocks=$origTypeChoice.howManyBlocks.blocks
+      #else:
+       --blocks="None"
+      #end if
+     #else:
+      --allOrNot="None"
+      --blocks="None"
+     #end if
+     --output=$output
+  </command>
+  <inputs>
+    <param name="input" type="data" format="fastq" label="Groom this dataset" />
+    <conditional name="origTypeChoice">
+      <param name="origType" type="select" label="How do you think quality values are scaled?" help="See below for explanation">
+        <option value="solexa">Solexa/Illumina 1.0</option>
+        <option value="illumina">Illumina 1.3+</option>
+        <option value="sanger">Sanger (validation only)</option>
+      </param>
+      <when value="solexa" />
+      <when value="illumina" />
+      <when value="sanger">
+        <conditional name="howManyBlocks">
+          <param name="allOrNot" type="select" label="Since your fastq is already in Sanger format you can check it for consistency">
+            <option value="all">Check all (may take a while)</option> 
+            <option selected="true" value="not">Check selected number of blocks</option>
+          </param>
+          <when value="all" />
+          <when value="not">
+            <param name="blocks" type="integer" value="1000" label="How many blocks (four lines each) do you want to check?" />
+          </when>
+        </conditional>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data name="output" format="fastqsanger"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="fastq_gen_conv_in1.fastq" ftype="fastq" />
+      <param name="origType" value="solexa" />
+      <output name="output" format="fastqsanger" file="fastq_gen_conv_out1.fastqsanger" />
+    </test>
+    <test>
+      <param name="input" value="fastq_gen_conv_in2.fastq" ftype="fastq" />
+      <param name="origType" value="sanger" />
+      <param name="allOrNot" value="not" />
+      <param name="blocks" value="3" />
+      <output name="output" format="fastqsanger" file="fastq_gen_conv_out2.fastqsanger" />
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+Galaxy pipeline for mapping of Illumina data requires data to be in fastq format with quality values conforming to so called "Sanger" format. Unfortunately there are many other types of fastq. Thus the main objective of this tool is to "groom" multiple types of fastq into Sanger-conforming fastq that can be used in downstream application such as mapping.
+
+.. class:: infomark
+
+**TIP**: If the input dataset is already in Sanger format the tool does not perform conversion. However validation (described below) is still performed.
+
+-----
+
+**Types of fastq datasets**
+
+A good description of fastq datasets can be found `here`__, while a description of Galaxy's fastq "logic" can be found `here`__. Because ranges of quality values within different types of fastq datasets overlap it very difficult to detect them automatically. This tool supports conversion of two commonly found types (Solexa/Illumina 1.0 and Illumina 1.3+) into fastq Sanger. 
+
+ .. __: http://en.wikipedia.org/wiki/FASTQ_format
+ .. __: https://wiki.galaxyproject.org/Learn/Datatypes#Fastq
+
+.. class:: warningmark
+
+**NOTE** that there is also a type of fastq format where quality values are represented by a list of space-delimited integers (e.g., 40 40 20 15 -5 20 ...). This tool **does not** handle such fastq. If you have such a dataset, it needs to be converted into ASCII-type fastq (where quality values are encoded by characters) by "Numeric-to-ASCII" utility before it can accepted by this tool.
+
+-----
+
+**Validation**
+
+In addition to converting quality values to Sanger format the tool also checks the input dataset for consistency. Specifically, it performs these four checks:
+
+- skips empty lines
+- checks that blocks are properly formed by making sure that:
+
+  #. there are four lines per block
+  #. the first line starts with "@"
+  #. the third line starts with "+"
+  #. lengths of second line (sequences) and the fourth line (quality string) are identical
+  
+- checks that quality values are within range for the chosen fastq format (e.g., the format provided by the user in **How do you think quality values are scaled?** drop down.
+
+To see exactly what the tool does you can take a look at its source code `here`__.
+
+ .. __: http://bitbucket.org/galaxy/galaxy-central/src/tip/tools/next_gen_conversion/fastq_gen_conv.py
+
+
+    </help>
+</tool>
diff --git a/tools/next_gen_conversion/solid2fastq.py b/tools/next_gen_conversion/solid2fastq.py
new file mode 100644
index 0000000..ec41bd6
--- /dev/null
+++ b/tools/next_gen_conversion/solid2fastq.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python
+
+import optparse
+import sqlite3
+import string
+import sys
+import tempfile
+
+import six
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def solid2sanger( quality_string, min_qual=0 ):
+    sanger = ""
+    quality_string = quality_string.rstrip( " " )
+    for qv in quality_string.split(" "):
+        try:
+            if int( qv ) < 0:
+                qv = '0'
+            if int( qv ) < min_qual:
+                return False
+                break
+            sanger += chr( int( qv ) + 33 )
+        except:
+            pass
+    return sanger
+
+
+def Translator(frm='', to='', delete=''):
+    if len(to) == 1:
+        to = to * len(frm)
+    if six.PY2:
+        trans = string.maketrans(frm, to)
+    else:
+        trans = str.maketrans(frm, to)
+
+    def callable(s):
+        return s.translate(trans, delete)
+
+    return callable
+
+
+def merge_reads_qual( f_reads, f_qual, f_out, trim_name=False, out='fastq', double_encode=False, trim_first_base=False, pair_end_flag='', min_qual=0, table_name=None ):
+    # Reads from two files f_csfasta (reads) and f_qual (quality values) and produces output in three formats depending on out parameter,
+    # which can have three values: fastq, txt, and db
+    # fastq = fastq format
+    # txt = space delimited format with defline, reads, and qvs
+    # dp = dump data into sqlite3 db.
+    # IMPORTNAT! If out = db two optins must be provided:
+    #   1. f_out must be a db connection object initialized with sqlite3.connect()
+    #   2. table_name must be provided
+    if out == 'db':
+        cursor = f_out.cursor()
+        sql = "create table %s (name varchar(50) not null, read blob, qv blob)" % table_name
+        cursor.execute(sql)
+
+    lines = []
+    line = " "
+    while line:
+        for f in [ f_reads, f_qual ]:
+            line = f.readline().rstrip( '\n\r' )
+            while line.startswith( '#' ):
+                line = f.readline().rstrip( '\n\r' )
+            lines.append( line )
+
+        if lines[0].startswith( '>' ) and lines[1].startswith( '>' ):
+            if lines[0] != lines[1]:
+                stop_err('Files reads and quality score files are out of sync and likely corrupted. Please, check your input data')
+
+            defline = lines[0][1:]
+            if trim_name and ( defline[ len(defline) - 3: ] == "_F3" or defline[ len(defline) - 3: ] == "_R3" ):
+                defline = defline[ :len(defline) - 3 ]
+
+        elif ( not lines[0].startswith( '>' ) and not lines[1].startswith( '>' ) and len( lines[0] ) > 0 and len( lines[1] ) > 0 ):
+            if trim_first_base:
+                lines[0] = lines[0][1:]
+            if double_encode:
+                de = Translator(frm="0123.", to="ACGTN")
+                lines[0] = de(lines[0])
+            qual = solid2sanger( lines[1], int( min_qual ) )
+            if qual:
+                if out == 'fastq':
+                    f_out.write( "@%s%s\n%s\n+\n%s\n" % ( defline, pair_end_flag, lines[0], qual ) )
+                if out == 'txt':
+                    f_out.write( '%s %s %s\n' % (defline, lines[0], qual ) )
+                if out == 'db':
+                    cursor.execute('insert into %s values("%s","%s","%s")' % (table_name, defline, lines[0], qual ) )
+        lines = []
+
+
+def main():
+    usage = "%prog --fr F3.csfasta --fq R3.csfasta --fout fastq_output_file [option]"
+    parser = optparse.OptionParser(usage=usage)
+    parser.add_option(
+        '--fr', '--f_reads',
+        metavar="F3_CSFASTA_FILE",
+        dest='fr',
+        help='Name of F3 file with color space reads')
+    parser.add_option(
+        '--fq', '--f_qual',
+        metavar="F3_QUAL_FILE",
+        dest='fq',
+        help='Name of F3 file with color quality values')
+    parser.add_option(
+        '--fout', '--f3_fastq_output',
+        metavar="F3_OUTPUT",
+        dest='fout',
+        help='Name for F3 output file')
+    parser.add_option(
+        '--rr', '--r_reads',
+        metavar="R3_CSFASTA_FILE",
+        dest='rr',
+        default=False,
+        help='Name of R3 file with color space reads')
+    parser.add_option(
+        '--rq', '--r_qual',
+        metavar="R3_QUAL_FILE",
+        dest='rq',
+        default=False,
+        help='Name of R3 file with color quality values')
+    parser.add_option(
+        '--rout',
+        metavar="R3_OUTPUT",
+        dest='rout',
+        help='Name for F3 output file')
+    parser.add_option(
+        '-q', '--min_qual',
+        dest='min_qual',
+        default='-1000',
+        help='Minimum quality threshold for printing reads. If a read contains a single call with QV lower than this value, it will not be reported. Default is -1000')
+    parser.add_option(
+        '-t', '--trim_name',
+        dest='trim_name',
+        action='store_true',
+        default=False,
+        help='Trim _R3 and _F3 off read names. Default is False')
+    parser.add_option(
+        '-f', '--trim_first_base',
+        dest='trim_first_base',
+        action='store_true',
+        default=False,
+        help='Remove the first base of reads in color-space. Default is False')
+    parser.add_option(
+        '-d', '--double_encode',
+        dest='de',
+        action='store_true',
+        default=False,
+        help='Double encode color calls as nucleotides: 0123. becomes ACGTN. Default is False')
+
+    options, args = parser.parse_args()
+
+    if not ( options.fout and options.fr and options.fq ):
+        parser.error("""
+        One or more of the three required paremetrs is missing:
+        (1) --fr F3.csfasta file
+        (2) --fq F3.qual file
+        (3) --fout name of output file
+        Use --help for more info
+        """)
+
+    fr = open( options.fr, 'r' )
+    fq = open( options.fq, 'r' )
+    f_out = open( options.fout, 'w' )
+
+    if options.rr and options.rq:
+        rr = open( options.rr, 'r' )
+        rq = open( options.rq, 'r' )
+        if not options.rout:
+            parser.error("Provide the name for f3 output using --rout option. Use --help for more info")
+        r_out = open( options.rout, 'w' )
+
+        db = tempfile.NamedTemporaryFile()
+
+        try:
+            con = sqlite3.connect(db.name)
+            cur = con.cursor()
+        except:
+            stop_err('Cannot connect to %s\n') % db.name
+
+        merge_reads_qual( fr, fq, con, trim_name=options.trim_name, out='db', double_encode=options.de, trim_first_base=options.trim_first_base, min_qual=options.min_qual, table_name="f3" )
+        merge_reads_qual( rr, rq, con, trim_name=options.trim_name, out='db', double_encode=options.de, trim_first_base=options.trim_first_base, min_qual=options.min_qual, table_name="r3" )
+        cur.execute('create index f3_name on f3( name )')
+        cur.execute('create index r3_name on r3( name )')
+
+        cur.execute('select * from f3,r3 where f3.name = r3.name')
+        for item in cur:
+            f_out.write( "@%s%s\n%s\n+\n%s\n" % (item[0], "/1", item[1], item[2]) )
+            r_out.write( "@%s%s\n%s\n+\n%s\n" % (item[3], "/2", item[4], item[5]) )
+
+    else:
+        merge_reads_qual( fr, fq, f_out, trim_name=options.trim_name, out='fastq', double_encode=options.de, trim_first_base=options.trim_first_base, min_qual=options.min_qual )
+
+    f_out.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/next_gen_conversion/solid2fastq.xml b/tools/next_gen_conversion/solid2fastq.xml
new file mode 100644
index 0000000..f88c414
--- /dev/null
+++ b/tools/next_gen_conversion/solid2fastq.xml
@@ -0,0 +1,154 @@
+<tool id="solid2fastq" name="Convert" version="1.0.0">
+  <description>SOLiD output to fastq</description>
+  <command interpreter="python">
+    #if   $is_run.paired == "no"    #solid2fastq.py --fr=$input1 --fq=$input2 --fout=$out_file1 -q $qual $trim_name $trim_first_base $double_encode
+    #elif $is_run.paired == "yes"   #solid2fastq.py --fr=$input1 --fq=$input2 --fout=$out_file1 --rr=$input3 --rq=$input4 --rout=$out_file2 -q $qual $trim_name $trim_first_base $double_encode
+    #end if#
+  </command>
+  <inputs>
+    <param name="input1" type="data" format="csfasta" label="Select reads"/>
+    <param name="input2" type="data" format="qualsolid" label="Select qualities"/>
+    <conditional name="is_run">
+        <param name="paired" type="select" label="Is this a mate-pair run?">
+            <option value="no" selected="true">No</option>
+            <option value="yes">Yes</option>
+        </param>
+        <when value="yes">
+            <param name="input3" type="data" format="csfasta" label="Select Reverse reads"/>
+            <param name="input4" type="data" format="qualsolid" label="Select Reverse qualities"/>
+        </when>
+        <when value="no">
+        </when>
+    </conditional>
+    <param name="qual" label="Remove reads containing color qualities below this value" type="integer" value="0"/>
+    <param name="trim_name" type="select" label="Trim trailing "_F3" and "_R3" ?">
+        <option value="-t" selected="true">Yes</option>
+        <option value="">No</option>
+    </param>
+    <param name="trim_first_base" type="select" label="Trim first base?">
+        <option value="-f">Yes (BWA)</option>
+        <option value="" selected="true">No (bowtie)</option>
+    </param>
+    <param name="double_encode" type="select" label="Double encode?">
+        <option value="-d">Yes (BWA)</option>
+        <option value="" selected="true">No (bowtie)</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="fastqcssanger" name="out_file1"/>
+    <data format="fastqcssanger" name="out_file2">
+        <filter>is_run['paired'] == 'yes'</filter>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="fr.csfasta" ftype="csfasta"/>
+      <param name="input2" value="fr.qualsolid" ftype="qualsolid" />
+      <param name="paired" value="no"/>
+      <param name="qual" value="0" />
+      <param name="trim_first_base" value="No" />
+      <param name="trim_name" value="No" />
+      <param name="double_encode" value="No"/>
+      <output name="out_file1" file="solid2fastq_out_1.fastq"/>
+    </test>
+    <test>
+      <param name="input1" value="fr.csfasta" ftype="csfasta"/>
+      <param name="input2" value="fr.qualsolid" ftype="qualsolid" />
+      <param name="paired" value="yes"/>
+      <param name="input3" value="rr.csfasta" ftype="csfasta"/>
+      <param name="input4" value="rr.qualsolid" ftype="qualsolid" />
+      <param name="qual" value="0" />
+      <param name="trim_first_base" value="No" />
+      <param name="trim_name" value="Yes" />
+      <param name="double_encode" value="No"/>
+      <output name="out_file1" file="solid2fastq_out_2.fastq"/>
+      <output name="out_file2" file="solid2fastq_out_3.fastq"/>
+    </test>
+ </tests>
+<help>
+**What it does**
+
+Converts output of SOLiD instrument (versions 3.5 and earlier) to fastq format suitable for bowtie, bwa, and PerM mappers.
+
+--------
+
+**Input datasets**
+
+Below are examples of forward (F3) reads and quality scores:
+
+Reads::
+
+    >1831_573_1004_F3
+    T00030133312212111300011021310132222
+    >1831_573_1567_F3
+    T03330322230322112131010221102122113
+
+Quality scores::
+
+    >1831_573_1004_F3
+    4 29 34 34 32 32 24 24 20 17 10 34 29 20 34 13 30 34 22 24 11 28 19 17 34 17 24 17 25 34 7 24 14 12 22
+    >1831_573_1567_F3
+    8 26 31 31 16 22 30 31 28 29 22 30 30 31 32 23 30 28 28 31 19 32 30 32 19 8 32 10 13 6 32 10 6 16 11
+
+
+**Mate pairs**
+
+If your data is from a mate-paired run, you will have additional read and quality datasets that will look similar to the ones above with one exception: the names of reads will be ending with "_R3".
+In this case choose **Yes** from the *Is this a mate-pair run?* drop down and you will be able to select R reads. When processing mate pairs this tool generates two output files: one for F3 reads and the other for R3 reads.
+The reads are guaranteed to be paired -- mated reads will be in the same position in F3 and R3 fastq file. However, because pairing is verified it may take a while to process an entire SOLiD run (several hours).
+
+------
+
+**Explanation of parameters**
+
+**Remove reads containing color qualities below this value** - any read that contains as least one color call with quality lower than the specified value **will not** be reported.
+
+**Trim trailing "_F3" and "_R3"?** - does just that. Not necessary for bowtie. Required for BWA.
+
+**Trim first base?** - SOLiD reads contain an adapter base such as the first T in this read::
+
+    >1831_573_1004_F3
+    T00030133312212111300011021310132222
+  
+this option removes this base leaving only color calls. Not necessary for bowtie. Required for BWA.
+
+**Double encode?** - converts color calls (0123.) to pseudo-nucleotides (ACGTN). Not necessary for bowtie. Required for BWA.
+
+------
+
+**Examples of output**
+
+When all parameters are left "as-is" you will get this (using reads and qualities shown above)::
+
+ @1831_573_1004
+ T00030133312212111300011021310132222
+ +
+ %%>CCAA9952+C>5C.?C79,=42C292:C(9/-7
+ @1831_573_1004
+ T03330322230322112131010221102122113
+ +
+ );@@17?@=>7??@A8?==@4A?A4)A+.'A+'1,
+
+Setting *Trim first base from reads* to **Yes** will produce this::
+
+ @1831_573_1004
+ 00030133312212111300011021310132222
+ +
+ %%>CCAA9952+C>5C.?C79,=42C292:C(9/-7
+ @1831_573_1004
+ 03330322230322112131010221102122113
+ +
+ );@@17?@=>7??@A8?==@4A?A4)A+.'A+'1,
+
+Finally, setting *Double encode* to **Yes** will yield::
+
+ @1831_573_1004
+ TAAATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
+ +
+ %%>CCAA9952+C>5C.?C79,=42C292:C(9/-7
+ @1831_573_1004
+ TATTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
+ +
+ );@@17?@=>7??@A8?==@4A?A4)A+.'A+'1,
+</help>
+</tool>
diff --git a/tools/next_gen_conversion/solid_to_fastq.py b/tools/next_gen_conversion/solid_to_fastq.py
new file mode 100644
index 0000000..dc748ad
--- /dev/null
+++ b/tools/next_gen_conversion/solid_to_fastq.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+"""
+Converts SOLiD data to Sanger FASTQ format.
+
+usage: %prog [options]
+   -i, --input1=i: Forward reads file
+   -q, --input2=q: Forward qual file
+   -I, --input3=I: Reverse reads file
+   -Q, --input4=Q: Reverse qual file
+   -o, --output1=o: Forward output
+   -r, --output2=r: Reverse output
+
+usage: %prog forward_reads_file forwards_qual_file reverse_reads_file(or_None) reverse_qual_file(or_None) output_file ouptut_id output_dir
+"""
+
+import os
+import sys
+import tempfile
+
+from bx.cookbook import doc_optparse
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def replaceNeg1(fin, fout):
+    line = fin.readline()
+    while line.strip():
+        fout.write(line.replace('-1', '1'))
+        line = fin.readline()
+    fout.seek(0)
+    return fout
+
+
+def __main__():
+    # Parse Command Line
+    options, args = doc_optparse.parse( __doc__ )
+    # common temp file setup
+    tmpf = tempfile.NamedTemporaryFile()  # forward reads
+    tmpqf = tempfile.NamedTemporaryFile()
+    tmpqf = replaceNeg1(open(options.input2, 'r'), tmpqf)
+    # if paired-end data (have reverse input files)
+    if options.input3 != "None" and options.input4 != "None":
+        tmpr = tempfile.NamedTemporaryFile()  # reverse reads
+        # replace the -1 in the qualities file
+        tmpqr = tempfile.NamedTemporaryFile()
+        tmpqr = replaceNeg1(open(options.input4, 'r'), tmpqr)
+        cmd1 = "%s/bwa_solid2fastq_modified.pl 'yes' %s %s %s %s %s %s 2>&1" % (os.path.split(sys.argv[0])[0], tmpf.name, tmpr.name, options.input1, tmpqf.name, options.input3, tmpqr.name)
+        try:
+            os.system(cmd1)
+            os.system('gunzip -c %s >> %s' % (tmpf.name, options.output1))
+            os.system('gunzip -c %s >> %s' % (tmpr.name, options.output2))
+        except Exception as eq:
+            stop_err("Error converting data to fastq format.\n" + str(eq))
+        tmpr.close()
+        tmpqr.close()
+    # if single-end data
+    else:
+        cmd1 = "%s/bwa_solid2fastq_modified.pl 'no' %s %s %s %s %s %s 2>&1" % (os.path.split(sys.argv[0])[0], tmpf.name, None, options.input1, tmpqf.name, None, None)
+        try:
+            os.system(cmd1)
+            os.system('gunzip -c %s >> %s' % (tmpf.name, options.output1))
+        except Exception as eq:
+            stop_err("Error converting data to fastq format.\n" + str(eq))
+    tmpqf.close()
+    tmpf.close()
+    sys.stdout.write('converted SOLiD data')
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/next_gen_conversion/solid_to_fastq.xml b/tools/next_gen_conversion/solid_to_fastq.xml
new file mode 100644
index 0000000..d8a1930
--- /dev/null
+++ b/tools/next_gen_conversion/solid_to_fastq.xml
@@ -0,0 +1,101 @@
+<tool id="solid_to_fastq" name="SOLiD-to-FASTQ" version="1.0.0">
+  <description>converts SOLiD data to FASTQ data</description>
+  <command interpreter="python">
+    solid_to_fastq.py 
+    --input1=$input1 
+    --input2=$input2
+    #if $paired.pairedSingle == "single":
+     --input3="None"
+     --input4="None"
+    #else:
+     --input3=$input3
+     --input4=$input4
+    #end if
+    --output1=$output1
+    #if $paired.pairedSingle == "single":
+     --output2="None"
+    #else:
+     --output2=$output2
+    #end if
+  </command>
+  <inputs>
+    <conditional name="paired">
+      <param name="pairedSingle" type="select" label="Is this library mate-paired?">
+        <option value="single">Single</option>
+        <option value="paired">Paired</option>
+      </param>
+      <when value="single">
+        <param name="input1" type="data" format="csfasta" label="F3 read file" />
+        <param name="input2" type="data" format="qualsolid" label="F3 qual file" />
+      </when>
+      <when value="paired">
+        <param name="input1" type="data" format="csfasta" label="F3 read file" />
+        <param name="input2" type="data" format="qualsolid" label="F3 qual file" />
+        <param name="input3" type="data" format="csfasta" label="R3 read file" />
+        <param name="input4" type="data" format="qualsolid" label="R3 qual file" />      
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <!-- Variable number of outputs. Either one (for single-end) or two (for paired-end) -->
+    <data name="output1" format="fastqsanger"/>
+    <data name="output2" format="fastqsanger">
+      <filter>paired['pairedSingle'] == 'paired'</filter>
+    </data>    
+  </outputs>
+  <tests>
+    <test>
+      <param name="pairedSingle" value="single" />
+      <param name="input1" value="s2fq_phiX.csfasta" ftype="csfasta" />
+      <param name="input2" value="s2fq_phiX.qualsolid" ftype="qualsolid" />
+      <output name="output1" file="s2fq_out1.fastqsanger" />
+    </test>
+    <test>
+      <param name="pairedSingle" value="paired" />
+      <param name="input1" value="s2fq_paired_F3.csfasta" ftype="csfasta" />
+      <param name="input2" value="s2fq_paired_F3_QV.qualsolid" ftype="qualsolid" />
+      <param name="input3" value="s2fq_paired_R3.csfasta" ftype="csfasta" />
+      <param name="input4" value="s2fq_paired_R3_QV.qualsolid" ftype="qualsolid" />
+      <output name="output1" file="s2fq_out2.fastqsanger" />
+      <!-- testing framework does not deal with multiple outputs yet
+      <output name="output2" file="s2fq_out3.fastqsanger" />
+      -->
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+This tool takes reads and quality files and converts them to FASTQ data ( Sanger variant ). Any -1 qualities are converted to 1 before being converted to FASTQ. Note that it also converts sequences to base pairs.
+
+-----
+
+**Example**
+
+- Converting the following sequences::
+
+    >1831_573_1004_F3
+    T00030133312212111300011021310132222
+    >1831_573_1567_F3
+    T03330322230322112131010221102122113
+
+- and quality scores::
+
+    >1831_573_1004_F3
+    4 29 34 34 32 32 24 24 20 17 10 34 29 20 34 13 30 34 22 24 11 28 19 17 34 17 24 17 25 34 7 24 14 12 22
+    >1831_573_1567_F3
+    8 26 31 31 16 22 30 31 28 29 22 30 30 31 32 23 30 28 28 31 19 32 30 32 19 8 32 10 13 6 32 10 6 16 11
+
+- will produce the following Sanger FASTQ data::
+
+    @1831_573_1004/1
+    AATACTTTCGGCGCCCTAAACCAGCTCACTGGGG
+    +
+    >CCAA9952+C>5C.?C79,=42C292:C(9/-7
+    @1831_573_1567/1
+    TTTATGGGTATGGCCGCTCACAGGCCAGCGGCCT
+    +
+    ;@@17?@=>7??@A8?==@4A?A4)A+.'A+'1,
+
+    </help>
+</tool>
diff --git a/tools/ngs_simulation/ngs_simulation.py b/tools/ngs_simulation/ngs_simulation.py
new file mode 100644
index 0000000..c30ed44
--- /dev/null
+++ b/tools/ngs_simulation/ngs_simulation.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+"""
+Runs Ben's simulation.
+
+usage: %prog [options]
+   -i, --input=i: Input genome (FASTA format)
+   -g, --genome=g: If built-in, the genome being used
+   -l, --read_len=l: Read length
+   -c, --avg_coverage=c: Average coverage
+   -e, --error_rate=e: Error rate (0-1)
+   -n, --num_sims=n: Number of simulations to run
+   -p, --polymorphism=p: Frequency/ies for minor allele (comma-separate list of 0-1)
+   -d, --detection_thresh=d: Detection thresholds (comma-separate list of 0-1)
+   -p, --output_png=p: Plot output
+   -s, --summary_out=s: Whether or not to output a file with summary of all simulations
+   -m, --output_summary=m: File name for output summary of all simulations
+   -f, --new_file_path=f: Directory for summary output files
+"""
+# removed output of all simulation results on request (not working)
+#   -r, --sim_results=r: Output all tabular simulation results (number of polymorphisms times number of detection thresholds)
+#   -o, --output=o: Base name for summary output for each run
+from __future__ import print_function
+
+import itertools
+import os
+import random
+import sys
+import tempfile
+
+from bx.cookbook import doc_optparse
+from rpy import r
+
+
+def stop_err( msg ):
+    sys.stderr.write( '%s\n' % msg )
+    sys.exit()
+
+
+def __main__():
+    # Parse Command Line
+    options, args = doc_optparse.parse( __doc__ )
+    # validate parameters
+    error = ''
+    try:
+        read_len = int( options.read_len )
+        if read_len <= 0:
+            raise Exception(' greater than 0')
+    except TypeError as e:
+        error = ': %s' % str( e )
+    if error:
+        stop_err( 'Make sure your number of reads is an integer value%s' % error )
+    error = ''
+    try:
+        avg_coverage = int( options.avg_coverage )
+        if avg_coverage <= 0:
+            raise Exception(' greater than 0')
+    except Exception as e:
+        error = ': %s' % str( e )
+    if error:
+        stop_err( 'Make sure your average coverage is an integer value%s' % error )
+    error = ''
+    try:
+        error_rate = float( options.error_rate )
+        if error_rate >= 1.0:
+            error_rate = 10 ** ( -error_rate / 10.0 )
+        elif error_rate < 0:
+            raise Exception(' between 0 and 1')
+    except Exception as e:
+        error = ': %s' % str( e )
+    if error:
+        stop_err( 'Make sure the error rate is a decimal value%s or the quality score is at least 1' % error )
+    try:
+        num_sims = int( options.num_sims )
+    except TypeError as e:
+        stop_err( 'Make sure the number of simulations is an integer value: %s' % str( e ) )
+    if options.polymorphism != 'None':
+        polymorphisms = [ float( p ) for p in options.polymorphism.split( ',' ) ]
+    else:
+        stop_err( 'Select at least one polymorphism value to use' )
+    if options.detection_thresh != 'None':
+        detection_threshes = [ float( dt ) for dt in options.detection_thresh.split( ',' ) ]
+    else:
+        stop_err( 'Select at least one detection threshold to use' )
+
+    # mutation dictionaries
+    hp_dict = { 'A': 'G', 'G': 'A', 'C': 'T', 'T': 'C', 'N': 'N' }  # heteroplasmy dictionary
+    mt_dict = { 'A': 'C', 'C': 'A', 'G': 'T', 'T': 'G', 'N': 'N'}  # misread dictionary
+
+    # read fasta file to seq string
+    all_lines = open( options.input, 'rb' ).readlines()
+    seq = ''
+    for line in all_lines:
+        line = line.rstrip()
+        if line.startswith('>'):
+            pass
+        else:
+            seq += line.upper()
+    seq_len = len( seq )
+
+    # output file name template
+# removed output of all simulation results on request (not working)
+#    if options.sim_results == "true":
+#        out_name_template = os.path.join( options.new_file_path, 'primary_output%s_' + options.output + '_visible_tabular' )
+#    else:
+#        out_name_template = tempfile.NamedTemporaryFile().name + '_%s'
+    out_name_template = tempfile.NamedTemporaryFile().name + '_%s'
+    print('out_name_template:', out_name_template)
+
+    # set up output files
+    outputs = {}
+    i = 1
+    for p in polymorphisms:
+        outputs[ p ] = {}
+        for d in detection_threshes:
+            outputs[ p ][ d ] = out_name_template % i
+            i += 1
+
+    # run sims
+    for polymorphism in polymorphisms:
+        for detection_thresh in detection_threshes:
+            output = open( outputs[ polymorphism ][ detection_thresh ], 'wb' )
+            output.write( 'FP\tFN\tGENOMESIZE=%s\n' % seq_len )
+            sim_count = 0
+            while sim_count < num_sims:
+                # randomly pick heteroplasmic base index
+                hbase = random.randrange( seq_len )
+                # hbase = seq_len/2#random.randrange( 0, seq_len )
+                # create 2D quasispecies list
+                qspec = [[] for _ in range(seq_len)]
+                # simulate read indices and assign to quasispecies
+                i = 0
+                while i < ( avg_coverage * ( seq_len / read_len ) ):  # number of reads (approximates coverage)
+                    start = random.randrange( seq_len )
+                    if random.random() < 0.5:  # positive sense read
+                        end = start + read_len  # assign read end
+                        if end > seq_len:  # overshooting origin
+                            read = itertools.chain(range( start, seq_len ), range( 0, end - seq_len ))
+                        else:  # regular read
+                            read = range( start, end )
+                    else:  # negative sense read
+                        end = start - read_len  # assign read end
+                        if end < -1:  # overshooting origin
+                            read = itertools.chain(range( start, -1, -1 ), range( seq_len - 1, seq_len + end, -1))
+                        else:  # regular read
+                            read = range( start, end, -1 )
+                    # assign read to quasispecies list by index
+                    for j in read:
+                        if j == hbase and random.random() < polymorphism:  # heteroplasmic base is variant with p = het
+                            ref = hp_dict[ seq[ j ] ]
+                        else:  # ref is the verbatim reference nucleotide (all positions)
+                            ref = seq[ j ]
+                        if random.random() < error_rate:  # base in read is misread with p = err
+                            qspec[ j ].append( mt_dict[ ref ] )
+                        else:  # otherwise we carry ref through to the end
+                            qspec[ j ].append(ref)
+                    # last but not least
+                    i += 1
+                bases, fpos, fneg = {}, 0, 0  # last two will be outputted to summary file later
+                for i, nuc in enumerate( seq ):
+                    cov = len( qspec[ i ] )
+                    bases[ 'A' ] = qspec[ i ].count( 'A' )
+                    bases[ 'C' ] = qspec[ i ].count( 'C' )
+                    bases[ 'G' ] = qspec[ i ].count( 'G' )
+                    bases[ 'T' ] = qspec[ i ].count( 'T' )
+                    # calculate max NON-REF deviation
+                    del bases[ nuc ]
+                    maxdev = float( max( bases.values() ) ) / cov
+                    # deal with non-het sites
+                    if i != hbase:
+                        if maxdev >= detection_thresh:  # greater than detection threshold = false positive
+                            fpos += 1
+                    # deal with het sites
+                    if i == hbase:
+                        hnuc = hp_dict[ nuc ]  # let's recover het variant
+                        if ( float( bases[ hnuc ] ) / cov ) < detection_thresh:  # less than detection threshold = false negative
+                            fneg += 1
+                        del bases[ hnuc ]  # ignore het variant
+                        maxdev = float( max( bases.values() ) ) / cov  # check other non-ref bases at het site
+                        if maxdev >= detection_thresh:  # greater than detection threshold = false positive (possible)
+                            fpos += 1
+                # output error sums and genome size to summary file
+                output.write( '%d\t%d\n' % ( fpos, fneg ) )
+                sim_count += 1
+            # close output up
+            output.close()
+
+    # Parameters (heteroplasmy, error threshold, colours)
+    r( '''
+    het=c(%s)
+    err=c(%s)
+    grade = (0:32)/32
+    hues = rev(gray(grade))
+    ''' % ( ','.join( [ str( p ) for p in polymorphisms ] ), ','.join( [ str( d ) for d in detection_threshes ] ) ) )
+
+    # Suppress warnings
+    r( 'options(warn=-1)' )
+
+    # Create allsum (for FP) and allneg (for FN) objects
+    r( 'allsum <- data.frame()' )
+    for polymorphism in polymorphisms:
+        for detection_thresh in detection_threshes:
+            output = outputs[ polymorphism ][ detection_thresh ]
+            cmd = '''
+                  ngsum = read.delim('%s', header=T)
+                  ngsum$fprate <- ngsum$FP/%s
+                  ngsum$hetcol <- %s
+                  ngsum$errcol <- %s
+                  allsum <- rbind(allsum, ngsum)
+                  ''' % ( output, seq_len, polymorphism, detection_thresh )
+            r( cmd )
+
+    if os.path.getsize( output ) == 0:
+        for p in outputs.keys():
+            for d in outputs[ p ].keys():
+                sys.stderr.write(outputs[ p ][ d ] + ' ' + str( os.path.getsize( outputs[ p ][ d ] ) ) + '\n')
+
+    if options.summary_out == "true":
+        r( 'write.table(summary(ngsum), file="%s", quote=FALSE, sep="\t", row.names=FALSE)' % options.output_summary )
+
+    # Summary objects (these could be printed)
+    r( '''
+    tr_pos <- tapply(allsum$fprate,list(allsum$hetcol,allsum$errcol), mean)
+    tr_neg <- tapply(allsum$FN,list(allsum$hetcol,allsum$errcol), mean)
+    cat('\nFalse Positive Rate Summary\n\t', file='%s', append=T, sep='\t')
+    write.table(format(tr_pos, digits=4), file='%s', append=T, quote=F, sep='\t')
+    cat('\nFalse Negative Rate Summary\n\t', file='%s', append=T, sep='\t')
+    write.table(format(tr_neg, digits=4), file='%s', append=T, quote=F, sep='\t')
+    ''' % tuple( [ options.output_summary ] * 4 ) )
+
+    # Setup graphs
+    r( '''
+    png('%s', width=800, height=500, units='px', res=250)
+    layout(matrix(data=c(1,2,1,3,1,4), nrow=2, ncol=3), widths=c(4,6,2), heights=c(1,10,10))
+    ''' % options.output_png )
+
+    # Main title
+    genome = ''
+    if options.genome:
+        genome = '%s: ' % options.genome
+    r( '''
+    par(mar=c(0,0,0,0))
+    plot(1, type='n', axes=F, xlab='', ylab='')
+    text(1,1,paste('%sVariation in False Positives and Negatives (', %s, ' simulations, coverage ', %s,')', sep=''), font=2, family='sans', cex=0.7)
+    ''' % ( genome, options.num_sims, options.avg_coverage ) )
+
+    # False positive boxplot
+    r( '''
+    par(mar=c(5,4,2,2), las=1, cex=0.35)
+    boxplot(allsum$fprate ~ allsum$errcol, horizontal=T, ylim=rev(range(allsum$fprate)), cex.axis=0.85)
+    title(main='False Positives', xlab='false positive rate', ylab='')
+    ''' )
+
+    # False negative heatmap (note zlim command!)
+    num_polys = len( polymorphisms )
+    num_dets = len( detection_threshes )
+    r( '''
+    par(mar=c(5,4,2,1), las=1, cex=0.35)
+    image(1:%s, 1:%s, tr_neg, zlim=c(0,1), col=hues, xlab='', ylab='', axes=F, border=1)
+    axis(1, at=1:%s, labels=rownames(tr_neg), lwd=1, cex.axis=0.85, axs='i')
+    axis(2, at=1:%s, labels=colnames(tr_neg), lwd=1, cex.axis=0.85)
+    title(main='False Negatives', xlab='minor allele frequency', ylab='detection threshold')
+    ''' % ( num_polys, num_dets, num_polys, num_dets ) )
+
+    # Scale alongside
+    r( '''
+    par(mar=c(2,2,2,3), las=1)
+    image(1, grade, matrix(grade, ncol=length(grade), nrow=1), col=hues, xlab='', ylab='', xaxt='n', las=1, cex.axis=0.85)
+    title(main='Key', cex=0.35)
+    mtext('false negative rate', side=1, cex=0.35)
+    ''' )
+
+    # Close graphics
+    r( '''
+    layout(1)
+    dev.off()
+    ''' )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/ngs_simulation/ngs_simulation.xml b/tools/ngs_simulation/ngs_simulation.xml
new file mode 100644
index 0000000..e03f8cf
--- /dev/null
+++ b/tools/ngs_simulation/ngs_simulation.xml
@@ -0,0 +1,222 @@
+<tool id="ngs_simulation" name="Simulate" version="1.0.0">
+<!--<tool id="ngs_simulation" name="Simulate" force_history_refresh="True" version="1.0.0">-->
+  <description>Illumina runs</description>
+  <requirements>
+    <requirement type="package" version="1.0.3">rpy</requirement>
+  </requirements>
+  <command interpreter="python">
+    ngs_simulation.py
+      #if $in_type.input_type == "built-in"
+        --input="${in_type.genome.fields.path}"
+        --genome=$in_type.genome
+      #else
+        --input="${ in_type.input1 }"
+      #end if
+      --read_len=$read_len
+      --avg_coverage=$avg_coverage
+      --error_rate=$error_rate
+      --num_sims=$num_sims
+      --polymorphism=$polymorphism
+      --detection_thresh=$detection_thresh
+      --output_png="${ output_png }"
+      --summary_out=$summary_out
+      --output_summary="${ output_summary }"
+      --new_file_path="."
+  </command>
+<!-- If want to include all simulation results file
+        sim_results=$sim_results
+        output=$output.id
+-->
+  <inputs>
+    <conditional name="in_type">
+      <param name="input_type" type="select" label="Use a built-in FASTA file or one from the history?">
+        <option value="built-in">Built-in</option>
+        <option value="history">History file</option>
+      </param>
+      <when value="built-in">
+        <param name="genome" type="select" label="Select a built-in genome" help="if your genome of interest is not listed - contact Galaxy team">
+          <options from_data_table="ngs_sim_fasta" />
+        </param>
+      </when>
+      <when value="history">
+        <param name="input1" type="data" format="fasta" label="Input genome (FASTA format)" />
+      </when>
+    </conditional>
+    <param name="read_len" type="integer" value="76" label="Read length" />
+    <param name="avg_coverage" type="integer" value="200" label="Average coverage" />
+    <param name="error_rate" type="float" value="0.001" label="Error rate or quality score" help="Quality score if integer 1 or greater; error rate if between 0 and 1" />
+    <param name="num_sims" type="integer" value="100" label="The number of simulations to run" />
+    <param name="polymorphism" type="select" multiple="true" label="Frequency/ies for minor allele">
+      <option value="0.001">0.001</option>
+      <option value="0.002">0.002</option>
+      <option value="0.003">0.003</option>
+      <option value="0.004">0.004</option>
+      <option value="0.005">0.005</option>
+      <option value="0.006">0.006</option>
+      <option value="0.007">0.007</option>
+      <option value="0.008">0.008</option>
+      <option value="0.009">0.009</option>
+      <option value="0.01">0.01</option>
+      <option value="0.02">0.02</option>
+      <option value="0.03">0.03</option>
+      <option value="0.04">0.04</option>
+      <option value="0.05">0.05</option>
+      <option value="0.06">0.06</option>
+      <option value="0.07">0.07</option>
+      <option value="0.08">0.08</option>
+      <option value="0.09">0.09</option>
+      <option value="0.1">0.1</option>
+      <option value="0.2">0.2</option>
+      <option value="0.3">0.3</option>
+      <option value="0.4">0.4</option>
+      <option value="0.5">0.5</option>
+      <option value="0.6">0.6</option>
+      <option value="0.7">0.7</option>
+      <option value="0.8">0.8</option>
+      <option value="0.9">0.9</option>
+      <option value="1.0">1.0</option>
+      <validator type="no_options" message="You must select at least one value" />
+    </param>
+    <param name="detection_thresh" type="select" multiple="true" label="Detection thresholds">
+      <option value="0.001">0.001</option>
+      <option value="0.002">0.002</option>
+      <option value="0.003">0.003</option>
+      <option value="0.004">0.004</option>
+      <option value="0.005">0.005</option>
+      <option value="0.006">0.006</option>
+      <option value="0.007">0.007</option>
+      <option value="0.008">0.008</option>
+      <option value="0.009">0.009</option>
+      <option value="0.01">0.01</option>
+      <option value="0.02">0.02</option>
+      <option value="0.03">0.03</option>
+      <option value="0.04">0.04</option>
+      <option value="0.05">0.05</option>
+      <option value="0.06">0.06</option>
+      <option value="0.07">0.07</option>
+      <option value="0.08">0.08</option>
+      <option value="0.09">0.09</option>
+      <option value="0.1">0.1</option>
+      <option value="0.2">0.2</option>
+      <option value="0.3">0.3</option>
+      <option value="0.4">0.4</option>
+      <option value="0.5">0.5</option>
+      <option value="0.6">0.6</option>
+      <option value="0.7">0.7</option>
+      <option value="0.8">0.8</option>
+      <option value="0.9">0.9</option>
+      <option value="1.0">1.0</option>
+      <validator type="no_options" message="You must select at least one value" />
+    </param>
+    <param name="summary_out" type="boolean" truevalue="true" falsevalue="false" checked="true" label="Include a (text) summary file for all the simulations" />
+<!--    <param name="sim_results" type="boolean" truevalue="true" falsevalue="false" checked="false" label="Output all tabular simulation results" help="Number of polymorphisms times number of detection thresholds"/>
+-->
+  </inputs>
+  <outputs>
+    <data format="png" name="output_png" />
+    <data format="tabular" name="output_summary">
+      <filter>summary_out == True</filter>
+    </data>
+<!--
+    <data format="tabular" name="output">
+      <filter>sim_files_out</filter>
+    </data>
+-->
+  </outputs>
+  <tests>
+    <!--
+      Tests cannot be run because of the non-deterministic element of the simulation.
+      But if you run the following "tests" manually in the browser and check against
+      the output files, they should be very similar to the listed output files.
+    -->
+    <!--
+    <test>
+      <param name="input_type" value="history" />
+      <param name="input1" value="ngs_simulation_in1.fasta" ftype="fasta" />
+      <param name="read_len" value="76" />
+      <param name="avg_coverage" value="200" />
+      <param name="error_rate" value="0.001" />
+      <param name="num_sims" value="25" />
+      <param name="polymorphism" value="0.02,0.04,0.1" />
+      <param name="detection_thresh" value="0.01,0.02" />
+      <param name="summary_out" value="true" />
+      <output name="output_png" file="ngs_simulation_out1.png" />
+      <output name="output_summary" file="ngs_simulation_out2.tabular" />
+    </test>
+    <test>
+      <param name="input_type" value="built-in" />
+      <param name="genome" value="pUC18" />
+      <param name="read_len" value="50" />
+      <param name="avg_coverage" value="150" />
+      <param name="error_rate" value="0.005" />
+      <param name="num_sims" value="25" />
+      <param name="polymorphism" value="0.001,0.005" />
+      <param name="detection_thresh" value="0.001,0.002" />
+      <param name="summary_out" value="false" />
+      <output name="output_png" file="ngs_simulation_out3.png" />
+    </test>
+    -->
+  </tests>
+  <help>
+
+**What it does**
+
+This tool simulates an Illumina run and provides plots of false positives and false negatives. It allows for a range of simulation parameters to be set. Note that this simulation sets only one (randomly chosen) position in the genome as polymorphic, according to the value specified. Superimposed on this are "sequencing errors", which are uniformly (and randomly) distributed. Polymorphisms are assigned using the detection threshold, so if the detection threshold is set to the same as the  [...]
+
+**Parameter list**
+
+These are the parameters that should be set for the simulation::
+
+  Read length (which is the same for all reads)
+  Average Coverage
+  Frequency for Minor Allele
+  Sequencing Error Rate
+  Detection Threshold
+  Number of Simulations
+
+You also should choose to use either a built-in genome or supply your own FASTA file.
+
+**Output**
+
+There are one or two. The first is a png that contains two different plots and is always generated. The second is optional and is a text file with some summary information about the simulations that were run. Below are some example outputs for a 10-simulation run on phiX with the default settings::
+
+  Read length                    76
+  Average coverage               200
+  Error rate/quality score       0.001
+  Number of simulations          100
+  Frequencies for minor allele   0.002
+                                 0.004
+  Detection thresholds           0.003
+                                 0.005
+                                 0.007
+  Include summary file           Yes
+
+Plot output (png):
+
+.. image:: ${static_path}/images/ngs_simulation.png
+
+Summary output (txt)::
+
+        FP              FN       GENOMESIZE.5386      fprate          hetcol          errcol
+  Min.   : 71.0   Min.   :0.0    Mode:logical     Min.   :0.01318         Min.   :0.004   Min.   :0.007
+  1st Qu.:86.0    1st Qu.:1.0    NA's:10          1st Qu.:0.01597         1st Qu.:0.004   1st Qu.:0.007
+  Median :92.5    Median :1.0    NA       Median :0.01717         Median :0.004   Median :0.007
+  Mean   :93.6    Mean   :0.9    NA       Mean   :0.01738         Mean   :0.004   Mean   :0.007
+  3rd Qu.:100.8   3rd Qu.:1.0    NA       3rd Qu.:0.01871         3rd Qu.:0.004   3rd Qu.:0.007
+  Max.   :123.0   Max.   :1.0    NA       Max.   :0.02284         Max.   :0.004   Max.   :0.007
+  
+  False Positive Rate Summary
+          0.003   0.005   0.007
+  0.001   0.17711 0.10854 0.01673
+  0.009   0.18049 0.10791 0.01738
+
+  False Negative Rate Summary
+          0.003   0.005     0.007
+  0.001   1.0     0.8       1.0
+  0.009   0.4     0.7       0.9
+
+
+  </help>
+</tool>
+
+
diff --git a/tools/phenotype_association/BEAM2_wrapper.sh b/tools/phenotype_association/BEAM2_wrapper.sh
new file mode 100755
index 0000000..1a64592
--- /dev/null
+++ b/tools/phenotype_association/BEAM2_wrapper.sh
@@ -0,0 +1,75 @@
+#!/usr/bin/env bash
+#
+# Galaxy wrapper for Yu Zhang's BEAM2 adds two new options
+#  significance=foo    renames significance.txt to foo after BEAM2 is run
+#  posterior=bar       renames posterior.txt    to bar after BEAM2 is run
+# 
+
+set -e
+
+export PATH=$PATH:$(dirname $0)
+
+## options
+significance=
+posterior=
+new_args=
+map=
+ped=
+
+TFILE="/tmp/BEAM2.$$.tmp"
+
+## separate significance and posterior arguments from arguments to BEAM2
+until [ $# -eq 0 ]
+do
+  case $1 in
+    significance=*)
+      significance=${1#significance=}
+      ;;
+    posterior=*)
+      posterior=${1#posterior=}
+      ;;
+    map=*)
+      map=${1#map=}
+      ;;
+    ped=*)
+      ped=${1#ped=}
+      ;;
+    *)
+      if [ -z "$new_args" ]; then
+        new_args=$1
+      else
+        new_args="$new_args $1"
+      fi
+      ;;
+  esac
+
+  shift
+done
+
+## convert input for use with BEAM2
+lped_to_geno.pl $map $ped > $TFILE
+if [ $? -ne 0 ]; then
+  echo "failed: lped_to_geno.pl $map $ped > $TFILE"
+  exit 1
+fi
+
+## run BEAM2
+BEAM2 $TFILE $new_args 1>/dev/null
+if [ $? -ne 0 ]; then
+  echo "failed: BEAM2 $TFILE $new_args"
+  exit 1
+fi
+
+mergeSnps.pl significance.txt $TFILE
+if [ $? -ne 0 ]; then
+  echo "failed: mergeSnps.pl significance.txt $TFILE"
+  exit 1
+fi
+
+## move output files
+mv significance.txt $significance
+mv posterior.txt $posterior
+
+## cleanup
+rm -f $TFILE
+
diff --git a/tools/phenotype_association/beam.xml b/tools/phenotype_association/beam.xml
new file mode 100644
index 0000000..d2700b9
--- /dev/null
+++ b/tools/phenotype_association/beam.xml
@@ -0,0 +1,141 @@
+<tool id="hgv_beam" name="BEAM" version="1.0.0">
+  <description>significant single- and multi-locus SNP associations in case-control studies</description>
+
+  <command interpreter="bash">
+    BEAM2_wrapper.sh map=${input.extra_files_path}/${input.metadata.base_name}.map ped=${input.extra_files_path}/${input.metadata.base_name}.ped $burnin $mcmc $pvalue significance=$significance posterior=$posterior
+  </command>
+
+  <inputs>
+    <param format="lped" name="input" type="data" label="Dataset"/>
+    <param name="burnin" label="Number of MCMC burn-in steps" type="integer" value="200" />
+    <param name="mcmc" label="Number of MCMC sampling steps" type="integer" value="200" />
+    <param name="pvalue" label="Significance cutoff (after Bonferroni adjustment)" type="float" value="0.05" />
+  </inputs>
+
+  <outputs>
+    <data format="tabular" name="significance" />
+    <data format="tabular" name="posterior" />
+  </outputs>
+
+  <requirements>
+    <requirement type="package">beam</requirement>
+    <requirement type="binary">mv</requirement>
+    <requirement type="binary">rm</requirement>
+  </requirements>
+
+  <!-- broken.  will be fixed soon.
+  <tests>
+    <test>
+      <param name='input' value='gpass_and_beam_input' ftype='lped' >
+        <metadata name='base_name' value='gpass_and_beam_input' />
+        <composite_data value='gpass_and_beam_input.ped' />
+        <composite_data value='gpass_and_beam_input.map' />
+        <edit_attributes type='name' value='gpass_and_beam_input' />
+      </param>
+      <param name="burnin" value="200"/>
+      <param name="mcmc" value="200"/>
+      <param name="pvalue" value="0.05"/>
+      <output name="significance" file="beam_output1.tab"/>
+      <output name="posterior" file="beam_output2.tab"/>
+    </test>
+  </tests>
+  -->
+
+  <help>
+.. class:: infomark
+
+This tool can take a long time to run, depending on the number of SNPs, the
+sample size, and the number of MCMC steps specified.  If you have hundreds
+of thousands of SNPs, it may take over a day.  The main tasks that slow down
+this tool are searching for interactions and dynamically partitioning the
+SNPs into blocks.  Optimization is certainly possible, but hasn't been done
+yet.  **If your only interest is to detect SNPs with primary effects (i.e.,
+single-SNP associations), please use the GPASS tool instead.**
+
+-----
+
+**Dataset formats**
+
+The input dataset must be in lped_ format.  The output datasets are both tabular_.
+(`Dataset missing?`_)
+
+.. _lped: ${static_path}/formatHelp.html#lped
+.. _tabular: ${static_path}/formatHelp.html#tabular
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+BEAM (Bayesian Epistasis Association Mapping) uses a Markov Chain Monte Carlo (MCMC) method to infer SNP block structures and detect both single-marker
+and interaction effects from case-control SNP data.
+This tool also partitions SNPs into blocks based on linkage disequilibrium (LD).  The method utilized is Bayesian, so the outputs are posterior probabilities of association, along with block partitions.  An advantage of this method is that it provides uncertainty measures for the associations and block partitions, and it scales well from small to large sample sizes. It is powerful in detecting gene-gene interactions, although slow for large datasets.
+
+-----
+
+**Example**
+
+- input map file::
+
+    1  rs0  0  738547
+    1  rs1  0  5597094
+    1  rs2  0  9424115
+    etc.
+
+- input ped file::
+
+    1 1 0 0 1  1  G G  A A  A A  A A  A A  A G  A A  G G  G G  A A  G G  G G  G G  A A  A A  A G  A A  G G  A G  A G  A A  G G  A A  G G  A A  G G  A G  A A  G G  A A  G G  A A  A G  A G  G G  A G  G G  G G  A A  A G  A A  G G  G G  G G  G G  A G  A A  A A  A A  A A
+    1 1 0 0 1  1  G G  A G  G G  A A  A A  A G  A A  G G  G G  G G  A A  G G  A G  A G  G G  G G  A G  G G  A G  A A  G G  A G  G G  A A  G G  G G  A G  A G  G G  A G  A A  A A  G G  G G  A G  A G  G G  A G  A A  A A  A G  G G  A G  G G  A G  G G  G G  A A  G G  A G
+    etc.
+
+- first output file, significance.txt::
+
+    ID   chr   position  results
+    rs0  chr1  738547    10 20 score= 45.101397 , df= 8 , p= 0.000431 , N=1225
+
+- second output file, posterior.txt::
+
+    id:  chr position  marginal + interaction = total posterior
+    0:   1 738547      0.0000 + 0.0000 = 0.0000
+    1:   1 5597094     0.0000 + 0.0000 = 0.0000
+    2:   1 9424115     0.0000 + 0.0000 = 0.0000
+    3:   1 13879818    0.0000 + 0.0000 = 0.0000
+    4:   1 13934751    0.0000 + 0.0000 = 0.0000
+    5:   1 16803491    0.0000 + 0.0000 = 0.0000
+    6:   1 17236854    0.0000 + 0.0000 = 0.0000
+    7:   1 18445387    0.0000 + 0.0000 = 0.0000
+    8:   1 21222571    0.0000 + 0.0000 = 0.0000
+    etc.
+
+    id:  chr position block_boundary  | allele counts in cases and controls
+    0:   1 738547      1.000          | 156 93 251 | 169 83 248 
+    1:   1 5597094     1.000          | 323 19 158 | 328 16 156 
+    2:   1 9424115     1.000          | 366 6 128 | 369 11 120 
+    3:   1 13879818    1.000          | 252 31 217 | 278 32 190 
+    4:   1 13934751    1.000          | 246 64 190 | 224 58 218 
+    5:   1 16803491    1.000          | 91 160 249 | 91 174 235 
+    6:   1 17236854    1.000          | 252 43 205 | 249 44 207 
+    7:   1 18445387    1.000          | 205 66 229 | 217 56 227 
+    8:   1 21222571    1.000          | 353 9 138 | 352 8 140 
+    etc.
+
+  The "id" field is an internally used index.
+
+-----
+
+**References**
+
+Zhang Y, Liu JS. (2007)
+Bayesian inference of epistatic interactions in case-control studies.
+Nat Genet. 39(9):1167-73. Epub 2007 Aug 26.
+
+Zhang Y, Zhang J, Liu JS. (2010)
+Block-based bayesian epistasis association mapping with application to WTCCC type 1 diabetes data.
+Submitted.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1038/ng2110</citation>
+    <citation type="doi">10.1214/11-AOAS469</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/gpass.pl b/tools/phenotype_association/gpass.pl
new file mode 100755
index 0000000..16deb23
--- /dev/null
+++ b/tools/phenotype_association/gpass.pl
@@ -0,0 +1,79 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use File::Basename;
+use File::Temp qw/ tempfile /;
+
+$ENV{'PATH'} .= ':' . dirname($0);
+
+#this is a wrapper for gpass that converts a linkage pedigree file to input 
+#for this program
+
+my($map, $ped, $out, $fdr) = @ARGV;
+
+if (!$map or !$ped or !$out or !$fdr) { die "missing args\n"; }
+
+my($fh, $name) = tempfile();
+#by default this file is removed when these variable go out of scope
+print $fh "map=$map ped=$ped\n";
+close $fh;  #converter will overwrite, just keep name
+
+#run converter 
+system("lped_to_geno.pl $map $ped > $name") == 0
+	or die "system lped_to_geno.pl $map $ped > $name failed\n";
+
+#system("cp $name tmp.middle");
+
+#run GPASS
+system("gpass $name -o $out -fdr $fdr 1>/dev/null") == 0
+	or die "system gpass $name -o $out -fdr $fdr, failed\n";
+
+#merge SNP data with results
+merge();
+
+exit;
+
+########################################
+
+#merge the input and output files so have SNP data with result
+sub merge {
+   open(FH, $out) or die "Couldn't open $out, $!\n";
+   my %res;
+   my @ind;
+   while (<FH>) {
+      chomp;
+      my $line = $_;
+      if ($line =~ /^(\d+)/) { $res{$1} = $line; push(@ind, $1); }
+      else { $res{'index'} = $line; }
+   }
+   close FH;
+   if (!@ind) { return; } #no results, leave alone
+   @ind = sort { $a <=> $b } @ind;
+   $res{'index'} =~ s/Index/#ID\tchr\tposition/;
+   #read input file to get SNP data
+   open(FH, $name) or die "Couldn't open $name, $!\n";
+   my $i = 0; #index is 0 based not counting header line
+   my $c = shift @ind;
+   while (<FH>) {
+      chomp; 
+      if (/^ID/) { next; }
+      my @f = split(/\s+/);
+      if ($i == $c) { 
+         $res{$i} =~ s/^$i/$f[0]\t$f[1]\t$f[2]/;
+         if (!@ind) { last; }
+         $c = shift @ind;
+      }
+      $i++;      
+   }
+   close FH;
+   #now reprint results with SNP data included
+   open(FH, ">", $out) or die "Couldn't write to $out, $!\n";
+   print FH $res{'index'}, "\n";
+   delete $res{'index'};
+   foreach $i (keys %res) {
+      print FH $res{$i}, "\n";
+   }
+   close FH;
+}
+
diff --git a/tools/phenotype_association/gpass.xml b/tools/phenotype_association/gpass.xml
new file mode 100644
index 0000000..0c6edaf
--- /dev/null
+++ b/tools/phenotype_association/gpass.xml
@@ -0,0 +1,115 @@
+<tool id="hgv_gpass" name="GPASS" version="1.0.0">
+  <description>significant single-SNP associations in case-control studies</description>
+
+  <command interpreter="perl">
+    gpass.pl ${input1.extra_files_path}/${input1.metadata.base_name}.map ${input1.extra_files_path}/${input1.metadata.base_name}.ped $output $fdr
+  </command>
+
+  <inputs>
+    <param name="input1" type="data" format="lped" label="Dataset"/>
+    <param name="fdr" type="float" value="0.05" label="FDR"/>
+  </inputs>
+
+  <outputs>
+    <data name="output" format="tabular" />
+  </outputs>
+
+  <requirements>
+    <requirement type="package">gpass</requirement>
+  </requirements>
+
+  <!-- we need to be able to set the seed for the random number generator
+  <tests>
+    <test>
+      <param name='input1' value='gpass_and_beam_input' ftype='lped' >
+        <metadata name='base_name' value='gpass_and_beam_input' />
+        <composite_data value='gpass_and_beam_input.ped' />
+        <composite_data value='gpass_and_beam_input.map' />
+        <edit_attributes type='name' value='gpass_and_beam_input' />
+      </param>
+      <param name="fdr" value="0.05" />
+      <output name="output" file="gpass_output.txt" />
+    </test>
+  </tests>
+  -->
+
+  <help>
+**Dataset formats**
+
+The input dataset must be in lped_ format, and the output is tabular_.
+(`Dataset missing?`_)
+
+.. _lped: ${static_path}/formatHelp.html#lped
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+GPASS (Genome-wide Poisson Approximation for Statistical Significance)
+detects significant single-SNP associations in case-control studies at a user-specified FDR.  Unlike previous methods, this tool can accurately approximate the genome-wide significance and FDR of SNP associations, while adjusting for millions of multiple comparisons, within seconds or minutes.
+
+The program has two main functionalities:
+
+1. Detect significant single-SNP associations at a user-specified false
+   discovery rate (FDR).
+
+   *Note*: a "typical" definition of FDR could be
+            FDR = E(# of false positive SNPs / # of significant SNPs)
+
+   This definition however is very inappropriate for association mapping, since SNPs are
+   highly correlated.  Our FDR is
+   defined differently to account for SNP correlations, and thus will obtain
+   a proper FDR in terms of "proportion of false positive loci".
+
+2. Approximate the significance of a list of candidate SNPs, adjusting for
+   multiple comparisons. If you have isolated a few SNPs of interest and want 
+   to know their significance in a GWAS, you can supply the GWAS data and let 
+   the program specifically test those SNPs.
+
+
+*Also note*: the number of SNPs in a study cannot be both too small and at the same
+time too clustered in a local region. A few hundreds of SNPs, or tens of SNPs
+spread in different regions, will be fine. The sample size cannot be too small
+either; around 100 or more individuals (case + control combined) will be fine.
+Otherwise use permutation.
+
+-----
+
+**Example**
+
+- input map file::
+
+    1  rs0  0  738547
+    1  rs1  0  5597094
+    1  rs2  0  9424115
+    etc.
+
+- input ped file::
+
+    1 1 0 0 1  1  G G  A A  A A  A A  A A  A G  A A  G G  G G  A A  G G  G G  G G  A A  A A  A G  A A  G G  A G  A G  A A  G G  A A  G G  A A  G G  A G  A A  G G  A A  G G  A A  A G  A G  G G  A G  G G  G G  A A  A G  A A  G G  G G  G G  G G  A G  A A  A A  A A  A A
+    1 1 0 0 1  1  G G  A G  G G  A A  A A  A G  A A  G G  G G  G G  A A  G G  A G  A G  G G  G G  A G  G G  A G  A A  G G  A G  G G  A A  G G  G G  A G  A G  G G  A G  A A  A A  G G  G G  A G  A G  G G  A G  A A  A A  A G  G G  A G  G G  A G  G G  G G  A A  G G  A G
+    etc.
+
+- output dataset, showing significant SNPs and their p-values and FDR::
+
+    #ID   chr   position   Statistics  adj-Pvalue  FDR
+    rs35  chr1  136606952  4.890849    0.991562    0.682138
+    rs36  chr1  137748344  4.931934    0.991562    0.795827
+    rs44  chr2  14423047   7.712832    0.665086    0.218776
+    etc.
+
+-----
+
+**Reference**
+
+Zhang Y, Liu JS. (2010)
+Fast and accurate significance approximation for genome-wide association studies.
+Submitted.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1198/jasa.2011.ap10657</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/ldtools.xml b/tools/phenotype_association/ldtools.xml
new file mode 100644
index 0000000..9fc188e
--- /dev/null
+++ b/tools/phenotype_association/ldtools.xml
@@ -0,0 +1,114 @@
+<tool id="hgv_ldtools" name="LD" version="1.0.0">
+  <description>linkage disequilibrium and tag SNPs</description>
+
+  <command interpreter="bash">
+    ldtools_wrapper.sh rsquare=$rsquare freq=$freq input=$input output=$output
+  </command>
+
+  <inputs>
+    <param format="tabular" name="input" type="data" label="Dataset"/>
+    <param name="rsquare" label="r<sup>2</sup> threshold" type="float" value="0.64">
+      <validator type="in_range" message="rsquare must be in range [0.00, 1.00]" min="0.00" max="1.00" />
+    </param>
+    <param name="freq" label="Minimum allele frequency threshold" type="float" value="0.00">
+      <validator type="in_range" message="freq must be in range (0.00, 0.50]" min="0.00" max="0.50" />
+    </param>
+  </inputs>
+
+  <outputs>
+    <data format="tabular" name="output" />
+  </outputs>
+
+  <tests>
+    <test>
+      <param name="input" value="ldInput1.txt" />
+      <param name="rsquare" value="0.64" />
+      <param name="freq" value="0.00" />
+      <output name="output" file="ldOutput1.txt" />
+    </test>
+  </tests>
+
+  <help>
+**Dataset formats**
+
+The input and output datasets are tabular_.
+(`Dataset missing?`_)
+
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+This tool can be used to analyze the patterns of linkage disequilibrium
+(LD) between polymorphic sites in a locus.  SNPs are grouped based on the
+threshold level of LD as measured by r\ :sup:`2` (regardless of genomic
+position), and a representative "tag SNP" is reported for each group.
+The other SNPs in the group are in LD with the tag SNP, but not necessarily
+with each other.
+
+The underlying algorithm is the same as the one used in ldSelect (Carlson
+et al. 2004).  However, this tool is implemented to be much faster and more
+efficient than ldSelect.
+
+The input is a tabular file with genotype information for each individual
+at each SNP site, in exactly four columns: site ID, sample ID, and the
+two allele nucleotides.
+
+-----
+
+**Example**
+
+- input file::
+
+    rs2334386  NA20364  G  T
+    rs2334386  NA20363  G  G
+    rs2334386  NA20360  G  G
+    rs2334386  NA20359  G  G
+    rs2334386  NA20358  G  G
+    rs2334386  NA20356  G  G
+    rs2334386  NA20357  G  G
+    rs2334386  NA20350  G  G
+    rs2334386  NA20349  G  G
+    rs2334386  NA20348  G  G
+    rs2334386  NA20347  G  G
+    rs2334386  NA20346  G  G
+    rs2334386  NA20345  G  G
+    rs2334386  NA20344  G  G
+    rs2334386  NA20342  G  G
+    etc.
+
+- output file::
+
+    rs2238748  rs2793064,rs6518516,rs6518517,rs2283641,rs5993533,rs715590,rs2072123,rs2105421,rs2800954,rs1557847,rs807750,rs807753,rs5993488,rs8138035,rs2800980,rs2525079,rs5992353,rs712966,rs2525036,rs807743,rs1034727,rs807744,rs2074003
+    rs2871023  rs1210715,rs1210711,rs5748189,rs1210709,rs3788298,rs7284649,rs9306217,rs9604954,rs1210703,rs5748179,rs5746727,rs5748190,rs5993603,rs2238766,rs885981,rs2238763,rs5748165,rs9605996,rs9606001,rs5992398
+    rs7292006  rs13447232,rs5993665,rs2073733,rs1057457,rs756658,rs5992395,rs2073760,rs739369,rs9606017,rs739370,rs4493360,rs2073736
+    rs2518840  rs1061325,rs2283646,rs362148,rs1340958,rs361956,rs361991,rs2073754,rs2040771,rs2073740,rs2282684
+    rs2073775  rs10160,rs2800981,rs807751,rs5993492,rs2189490,rs5747997,rs2238743
+    rs5747263  rs12159924,rs2300688,rs4239846,rs3747025,rs3747024,rs3747023,rs2300691
+    rs433576   rs9605439,rs1109052,rs400509,rs401099,rs396012,rs410456,rs385105
+    rs2106145  rs5748131,rs2013516,rs1210684,rs1210685,rs2238767,rs2277837
+    rs2587082  rs2257083,rs2109659,rs2587081,rs5747306,rs2535704,rs2535694
+    rs807667   rs2800974,rs756651,rs762523,rs2800973,rs1018764
+    rs2518866  rs1206542,rs807467,rs807464,rs807462,rs712950
+    rs1110661  rs1110660,rs7286607,rs1110659,rs5992917,rs1110662
+    rs759076   rs5748760,rs5748755,rs5748752,rs4819925,rs933461
+    rs5746487  rs5992895,rs2034113,rs2075455,rs1867353
+    rs5748212  rs5746736,rs4141527,rs5748147,rs5748202
+    etc.
+
+-----
+
+**Reference**
+
+Carlson CS, Eberle MA, Rieder MJ, Yi Q, Kruglyak L, Nickerson DA. (2004)
+Selecting a maximally informative set of single-nucleotide polymorphisms for
+association analyses using linkage disequilibrium.
+Am J Hum Genet. 74(1):106-20. Epub 2003 Dec 15.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1086/381000</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/ldtools_wrapper.sh b/tools/phenotype_association/ldtools_wrapper.sh
new file mode 100755
index 0000000..fb70a36
--- /dev/null
+++ b/tools/phenotype_association/ldtools_wrapper.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+#
+# Galaxy wrapper for Aakrosh Ratan's ldtools
+# 
+
+set -e
+
+export PATH=$PATH:$(dirname $0)
+
+## pagetag options
+input=
+rsquare=0.64
+freq=0.00
+sample=###
+
+## senatag options
+excluded=###
+required=###
+output=
+
+until [ $# -eq 0 ]
+do
+  case $1 in
+    rsquare=*)
+      rsquare=${1#rsquare=}
+      ;;
+    freq=*)
+      freq=${1#freq=}
+      ;;
+    input=*)
+      input=${1#input=}
+      ;;
+    output=*)
+      output=${1#output=}
+      ;;
+    *)
+      if [ -z "$new_args" ]; then
+        new_args=$1
+      else
+        new_args="$new_args $1"
+      fi
+      ;;
+  esac
+
+  shift
+done
+
+## run pagetag
+pagetag.py --rsquare $rsquare --freq $freq $input snps.txt neighborhood.txt &> /dev/null
+if [ $? -ne 0 ]; then
+	echo "failed: pagetag.py --rsquare $rsquare --freq $freq $input snps.txt neighborhood.txt"
+	exit 1
+fi
+
+## run sentag
+senatag.py neighborhood.txt snps.txt > $output 2> /dev/null
+if [ $? -ne 0 ]; then
+	echo "failed: senatag.py neighborhood.txt snps.txt"
+	exit 1
+fi
+
+## cleanup
+rm -f snps.txt neighborhood.txt
+
diff --git a/tools/phenotype_association/linkToDavid.pl b/tools/phenotype_association/linkToDavid.pl
new file mode 100755
index 0000000..5caa4ea
--- /dev/null
+++ b/tools/phenotype_association/linkToDavid.pl
@@ -0,0 +1,59 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+
+###################################################
+# linkToDavid.pl
+# Generates a link to David for a list of gene IDs.
+###################################################
+ 
+if (!@ARGV or scalar @ARGV != 4) {
+   print "usage: linkToDavid.pl infile.tab 1basedCol idType outfile\n";
+   exit 1;
+}
+
+my $in = shift @ARGV;
+my $col = shift @ARGV;
+my $type = shift @ARGV;
+my $out = shift @ARGV;
+
+if ($col < 1) { 
+   print "ERROR the column number should be 1 based counting\n";
+   exit 1;
+}
+my @gene;
+open(FH, $in) or die "Couldn't open $in, $!\n";
+while (<FH>) {
+   chomp;
+   my @f = split(/\t/);
+   if (scalar @f < $col) {
+      print "ERROR there is no column $col in $in\n";
+      exit 1;
+   }
+   if ($f[$col-1]) { push(@gene, $f[$col-1]); }
+}
+close FH or die "Couldn't close $in, $!\n";
+
+if (scalar @gene > 400) {
+   print "ERROR David only allows 400 genes submitted via a link\n";
+   exit 1;
+}
+ 
+my $link = 'http://david.abcc.ncifcrf.gov/api.jsp?type=TYPE&ids=GENELIST&tool=summary';
+
+my $g = join(",", @gene);
+$link =~ s/GENELIST/$g/;
+$link =~ s/TYPE/$type/;
+#print output
+if (length $link > 2048) { 
+   print "ERROR too many genes to fit in URL, please select a smaller set\n";
+   exit;
+}
+open(FH, ">", $out) or die "Couldn't open $out, $!\n";
+print FH "<html><head><title>DAVID link</title></head><body>\n",
+      '<A TARGET=_BLANK HREF="', $link, '">click here to send of identifiers to DAVID</A>', "\n",
+      '</body></html>', "\n";
+close FH or die "Couldn't close $out, $!\n";
+
+exit;
diff --git a/tools/phenotype_association/linkToDavid.xml b/tools/phenotype_association/linkToDavid.xml
new file mode 100644
index 0000000..b780858
--- /dev/null
+++ b/tools/phenotype_association/linkToDavid.xml
@@ -0,0 +1,114 @@
+<tool id="hgv_david" name="DAVID" version="1.0.1">
+  <description>functional annotation for a list of genes</description>
+
+  <command interpreter="perl">
+    linkToDavid.pl $input $numerical_column $type $out_file1
+  </command>
+
+  <inputs>
+    <param name="input" type="data" format="tabular" label="Dataset" />
+    <param name="numerical_column" type="data_column" data_ref="input" label="Column with identifiers" />
+    <param name="type" label="Identifier type" type="select">
+      <option value="AFFYMETRIX_3PRIME_IVT_ID">AFFYMETRIX_3PRIME_IVT_ID</option>
+      <option value="AFFYMETRIX_EXON_GENE_ID">AFFYMETRIX_EXON_GENE_ID</option>
+      <option value="AFFYMETRIX_SNP_ID">AFFYMETRIX_SNP_ID</option>
+      <option value="AGILENT_CHIP_ID">AGILENT_CHIP_ID</option>
+      <option value="AGILENT_ID">AGILENT_ID</option>
+      <option value="AGILENT_OLIGO_ID">AGILENT_OLIGO_ID</option>
+      <option value="ENSEMBL_GENE_ID">ENSEMBL_GENE_ID</option>
+      <option value="ENSEMBL_TRANSCRIPT_ID">ENSEMBL_TRANSCRIPT_ID</option>
+      <option value="ENTREZ_GENE_ID">ENTREZ_GENE_ID</option>
+      <option value="FLYBASE_GENE_ID">FLYBASE_GENE_ID</option>
+      <option value="FLYBASE_TRANSCRIPT_ID">FLYBASE_TRANSCRIPT_ID</option>
+      <option value="GENBANK_ACCESSION">GENBANK_ACCESSION</option>
+      <option value="GENOMIC_GI_ACCESSION">GENOMIC_GI_ACCESSION</option>
+      <option value="GENPEPT_ACCESSION">GENPEPT_ACCESSION</option>
+      <option value="ILLUMINA_ID">ILLUMINA_ID</option>
+      <option value="IPI_ID">IPI_ID</option>
+      <option value="MGI_ID">MGI_ID</option>
+      <option value="OFFICIAL_GENE_SYMBOL" selected="true">OFFICIAL_GENE_SYMBOL</option>
+      <option value="PFAM_ID">PFAM_ID</option>
+      <!--option value="PIR_ACCESSION">PIR_ACCESSION</option-->
+      <option value="PIR_ID">PIR_ID</option>
+      <option value="PROTEIN_GI_ACCESSION">PROTEIN_GI_ACCESSION</option>
+      <!--option value="PIR_NREF_ID">PIR_NREF_ID</option-->
+      <option value="REFSEQ_GENOMIC">REFSEQ_GENOMIC</option>
+      <option value="REFSEQ_MRNA">REFSEQ_MRNA</option>
+      <option value="REFSEQ_PROTEIN">REFSEQ_PROTEIN</option>
+      <option value="REFSEQ_RNA">REFSEQ_RNA</option>
+      <option value="RGD_ID">RGD_ID</option>
+      <option value="SGD_ID">SGD_ID</option>
+      <option value="TAIR_ID">TAIR_ID</option>
+      <option value="UCSC_GENE_ID">UCSC_GENE_ID</option>
+      <option value="UNIGENE">UNIGENE</option>
+      <option value="UNIPROT_ACCESSION">UNIPROT_ACCESSION</option>
+      <option value="UNIPROT_ID">UNIPROT_ID</option>
+      <option value="UNIREF100_ID">UNIREF100_ID</option>
+      <option value="WORMBASE_GENE_ID">WORMBASE_GENE_ID</option>
+      <option value="WORMPEP_ID">WORMPEP_ID</option>
+      <option value="ZFIN_ID">ZFIN_ID</option>
+    </param>
+  </inputs>
+
+  <outputs>
+    <data format="html" name="out_file1" />
+  </outputs>
+
+  <tests>
+    <test>
+      <param name="input" ftype="tabular" value="linkToDavid.tabular" />
+      <param name="numerical_column" value="1" />
+      <param name="type" value="ENTREZ_GENE_ID" />
+      <output name="out_file1" file="linkToDavid_1.out" />
+    </test>
+  </tests>
+
+  <help>
+ .. class:: infomark
+
+The list is limited to 400 IDs.
+
+-----
+
+**Dataset formats**
+
+The input dataset is in tabular_ format.  The output dataset is html_ with
+a link to the DAVID website as described below.
+(`Dataset missing?`_)
+
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _html: ${static_path}/formatHelp.html#html
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+This tool creates a link to the Database for Annotation,
+Visualization, and Integrated Discovery (DAVID) website at NIH,
+sending a list of IDs from the selected column of a tabular
+Galaxy dataset.  To follow the created link, click on the
+eye icon once the Galaxy tool has finished running.
+
+DAVID provides a comprehensive set of functional annotation tools
+to help investigators discover biological meaning behind large
+lists of genes.
+
+-----
+
+**References**
+
+Huang DW, Sherman BT, Lempicki RA. (2009) Systematic and integrative analysis
+of large gene lists using DAVID bioinformatics resources.
+Nat Protoc. 4(1):44-57.
+
+Dennis G, Sherman BT, Hosack DA, Yang J, Gao W, Lane HC, Lempicki RA. (2003)
+DAVID: database for annotation, visualization, and integrated discovery.
+Genome Biol. 4(5):P3. Epub 2003 Apr 3.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1038/nprot.2008.211</citation>
+    <citation type="doi">10.1186/gb-2003-4-5-p3</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/linkToGProfile.pl b/tools/phenotype_association/linkToGProfile.pl
new file mode 100755
index 0000000..0f0ee24
--- /dev/null
+++ b/tools/phenotype_association/linkToGProfile.pl
@@ -0,0 +1,89 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+
+###################################################
+# linkToGProfile.pl
+# Generates a link to gprofile for a list of gene IDs.
+# g:Profiler a web-based toolset for functional profiling of gene lists from large-scale experiments (2007) NAR 35 W193-W200
+###################################################
+ 
+if (!@ARGV or scalar @ARGV < 4) {
+   print "usage: linkToGProfile.pl infile.tab idType outfile -gene=1basedCol -chr=1basedCol -start=1basedCol -end=1basedCol\n";
+   exit 1;
+}
+
+my $in = shift @ARGV;
+my $type = shift @ARGV;
+my $out = shift @ARGV;
+
+my $col = 9999;  #large unrealistic default
+my $chr = 9999;
+my $st = 9999;
+my $end = 9999;
+foreach (@ARGV) {
+   if (/gene=(\d+)/) { $col = $1; }
+   elsif (/chr=(\d+)/) { $chr = $1; }
+   elsif (/start=(\d+)/) { $st = $1; }
+   elsif (/end=(\d+)/) { $end = $1; }
+   elsif (/region=1/) { $type = 'region'; }
+}
+
+if ($col < 1 or $chr < 1 or $st < 1 or $end < 1) { 
+   print "ERROR the column number should be 1 based counting\n";
+   exit 1;
+}
+my @gene;
+my @pos;
+open(FH, $in) or die "Couldn't open $in, $!\n";
+while (<FH>) {
+   chomp;
+   my @f = split(/\t/);
+   if ($type ne 'region') {
+      if (scalar @f < $col) {
+         print "ERROR there is no column $col in $in for type $type\n";
+         exit 1;
+      }
+      if ($f[$col-1]) { push(@gene, $f[$col-1]); }
+   }else {
+      if (scalar @f < $chr or scalar @f < $st or scalar @f < $end) {
+         print "ERROR there is not enough columns ($chr,$st,$end) in $in\n";
+         exit 1;
+      }
+      if ($f[$chr-1]) {
+         $f[$chr-1] =~ s/chr//;
+         push(@pos, "$f[$chr-1]:$f[$st-1]:$f[$end-1]");
+      }
+   }
+}
+close FH or die "Couldn't close $in, $!\n";
+ 
+#region_query = 1 for coordinates X:1:10
+#can now do POST method
+#http://biit.cs.ut.ee/gprofiler/index.cgi?organism=hsapiens&query=pax6&term=&analytical=1&user_thr=1&sort_by_structure=1&output=txt
+my $g = join("+", @gene) if @gene;
+if (@pos) { $g = join("+", @pos); }
+my %params = (
+"analytical"=>1,
+"organism"=>"hsapiens",
+"query"=>$g,
+"term"=>"",
+"output"=>"png",
+"prefix"=>$type,
+"user_thr"=>"1.00"
+);
+if (@pos) { $params{"region_query"} = 1; }
+
+open(FH, ">", $out) or die "Couldn't open $out, $!\n";
+print FH "<html><head><title>g:Profiler link</title></head><body>\n";
+print FH '<form method="POST" action="http://biit.cs.ut.ee/gprofiler/index.cgi">';
+foreach my $k (keys %params) {
+   print FH "<input type='hidden' name='$k' value='$params{$k}'>\n";
+}
+print FH '<input type="Submit" name="foo" value="Send to g:Profiler">';
+print FH '</form></body></html>', "\n";
+close FH or die "Couldn't close $out, $!\n";
+
+#also do link that prints text that could be pulled back into Galaxy?
+exit;
diff --git a/tools/phenotype_association/linkToGProfile.xml b/tools/phenotype_association/linkToGProfile.xml
new file mode 100644
index 0000000..a21a00d
--- /dev/null
+++ b/tools/phenotype_association/linkToGProfile.xml
@@ -0,0 +1,93 @@
+<tool id="hgv_linkToGProfile" name="g:Profiler" version="1.0.0">
+  <description>tools for functional profiling of gene lists</description>
+
+  <command interpreter="perl">
+    linkToGProfile.pl $input $type $out_file1 -region=$region -gene=$genes -chr=${input.metadata.chromCol} -start=${input.metadata.startCol} -end=${input.metadata.endCol}
+  </command>
+
+  <inputs>
+    <param name="input" type="data" format="tabular" label="Dataset" />
+    <param name="genes" type="data_column" data_ref="input" label="Column with identifiers" />
+    <param name="region" type="select" label="Or use genomic intervals">
+      <option value="0" selected="true">No</option>
+      <option value="1">Yes</option>
+    </param>
+    <param name="type" label="Identifier type if numeric" type="select">
+      <option value="ENTREZGENE_ACC" selected="true">Entrez Gene Acc</option>
+      <option value="MIM_MORBID">OMIM Morbid Map</option>
+      <option value="MIM_GENE">OMIM Gene ID</option>
+      <option value="AFFY_HUGENE_1_0_ST_V1">AFFY_HUGENE_1_0_ST_V1</option>
+      <option value="HGNC_AUTOMATIC_GENE_ACC">HGNC_AUTOMATIC_GENE_ACC</option>
+      <option value="HGNC_MB001_ACC">HGNC_MB001_ACC</option>
+      <option value="HGNC_ACC">HGNC_ACC</option>
+      <option value="WIKIGENE_ACC">WIKIGENE_ACC</option>
+      <option value="DBASS5_ACC">DBASS5_ACC</option>
+      <option value="ILLUMINA_HUMANWG_6_V1">ILLUMINA_HUMANWG_6_V1</option>
+      <option value="AFFY_HUEX_1_0_ST_V2">AFFY_HUEX_1_0_ST_V2</option>
+      <option value="DBASS3_ACC">DBASS3_ACC</option>
+    </param>
+  </inputs>
+
+  <outputs>
+    <data format="html" name="out_file1" />
+  </outputs>
+
+  <tests>
+    <test>
+      <param name="input" ftype="tabular" value="linkToGProfile.tabular" />
+      <param name="genes" value="2" />
+      <param name="type" value="ENTREZGENE_ACC" />
+      <output name="out_file1" file="linkToGProfile_1.out" />
+    </test>
+  </tests>
+
+  <help>
+**Dataset formats**
+
+The input dataset is tabular_ with a column of identifiers.
+The output dataset is html_ with a link to g:Profiler.
+(`Dataset missing?`_)
+
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _html: ${static_path}/formatHelp.html#html
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+This tool creates a link to the g:GOSt tool (Gene Group Functional
+Profiling), which is part of the g:Profiler site at the University
+of Tartu in Estonia.  g:GOSt retrieves the most significant Gene
+Ontology (GO) terms, KEGG and REACTOME pathways, and TRANSFAC motifs
+for a user-specified group of genes, proteins, or microarray probes.
+g:GOSt also allows analysis of ranked or ordered lists of genes,
+visual browsing of GO graph structure, interactive visualization of
+retrieved results, and many other features.  Multiple testing
+corrections are applied to extract only statistically important
+results.
+
+The g:GOSt form is pre-filled with gene, protein, or microarray probe
+IDs from the selected column of a tabular Galaxy dataset.  Or you
+can chose to use the genomic coordinates (must be lastest build used by
+Ensembl).  The coordinates don't have to be genes they can be for
+SNPs, and g:GOst will map to the gene ID.  To follow
+the created link, click on the eye icon when the Galaxy tool has
+finished running.  Once at the g:Profiler site, scroll down to see
+the g:GOSt results.  You can also adjust the options in the g:GOSt
+form to your liking, or use the row of links between the form and
+the results to run other g:Profiler tools using the same list of IDs.
+
+-----
+
+**Reference**
+
+Reimand J, Kull M, Peterson H, Hansen J, Vilo J. (2007) g:Profiler -- a web-based
+toolset for functional profiling of gene lists from large-scale experiments.
+Nucleic Acids Res. 35(Web Server issue):W193-200. Epub 2007 May 3.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1093/nar/gkm226</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/lped_to_geno.pl b/tools/phenotype_association/lped_to_geno.pl
new file mode 100755
index 0000000..21e2d78
--- /dev/null
+++ b/tools/phenotype_association/lped_to_geno.pl
@@ -0,0 +1,104 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+
+#convert from a MAP and PED file to a genotype file (format desc from PLINK)
+#assumes not many SNPs but lots of individuals
+# transposed formats are used when lots of SNPs (TPED, TFAM)
+
+if (!@ARGV or scalar @ARGV ne 2) {
+   print "usage: lped_to_geno.pl infile.map infile.ped > outfile\n";
+   exit;
+}
+
+my $map = shift @ARGV;
+my $ped = shift @ARGV;
+
+my @snp; #array to hold SNPs from map file
+open(FH, $map) or die "Couldn't open $map, $!\n";
+while (<FH>) {
+   chomp; 
+   my @f = split(/\s+/); #3 or 4 columns
+   #chrom ID [distance|morgans] position
+   if (!exists $f[3]) { $f[3] = $f[2]; } #only 3 columns
+   #have to leave in so know which to skip later
+   #if ($f[3] < 0) { next; } #way of excluding SNPs
+   #if ($f[0] eq '0') { next; } #unplaced SNP
+   if ($f[0] !~ /chr/) { $f[0] = "chr$f[0]"; }
+   push(@snp, "$f[0]:$f[3]:$f[1]");
+}
+close FH or die "Couldn't finish $map, $!\n";
+
+#rows are individuals, columns are SNPs (7 & up)
+#familyId indId fatherId motherId sex phenotype(-9|0|1|2) alleles....
+#need to print row per SNP
+my @allele; #alleles to go with @snp
+my @pheno;  #marker for phenotype
+open(FH, $ped) or die "Couldn't open $ped, $!\n";
+while (<FH>) {
+   chomp;
+   my @f = split(/\s+/);
+   if (!defined $f[5]) { die "ERROR undefined phenotype $f[0] $f[1] $f[2] $f[3] $f[4]\n"; }
+   #-9 is always unknown, 0 unknown or unaffected, 1|2 is affected
+   #either -9|0|1 or 0|1|2
+   push(@pheno, $f[5]);
+   my $j = 0;
+   for(my $i = 6; $i< $#f; $i+=2) {
+      if (!$allele[$j]) { $allele[$j] = ''; }
+      #can be ACTG or 1234 (for haploview etc) or 0 for missing
+      if ($f[$i] eq '1') { $f[$i] = 'A'; }
+      elsif ($f[$i] eq '2') { $f[$i] = 'C'; }
+      elsif ($f[$i] eq '3') { $f[$i] = 'G'; }
+      elsif ($f[$i] eq '4') { $f[$i] = 'T'; }
+      if ($f[$i+1] eq '1') { $f[$i+1] = 'A'; }
+      elsif ($f[$i+1] eq '2') { $f[$i+1] = 'C'; }
+      elsif ($f[$i+1] eq '3') { $f[$i+1] = 'G'; }
+      elsif ($f[$i+1] eq '4') { $f[$i+1] = 'T'; }
+      $f[$i] = uc($f[$i]);
+      $f[$i+1] = uc($f[$i+1]);
+      $allele[$j] .= " $f[$i]$f[$i+1]"; 
+      $j++;
+   }
+   if ($j > scalar @snp) { 
+      die "ERROR: more allele columns in the ped file than there are SNP positions in the map file.\n";
+   }
+}
+close FH or die "Couldn't close $ped, $!\n";
+
+print "ID Chr Pos";
+my $max = 0;
+foreach (@pheno) { if ($_ > $max) { $max = $_; } } 
+if ($max > 1) {
+   foreach (@pheno) { if ($_ > 0) { print " ", $_ - 1; }} #go from 1/2 to 0/1
+}else {
+   foreach (@pheno) { print " $_"; }
+}
+print "\n";
+for(my $i =0; $i <= $#snp; $i++) { #foreach snp
+   $allele[$i] =~ /(\w)/;
+   my $nt = $1;
+   my $j = 0;
+   my @t = split(/:/, $snp[$i]);
+   if ($t[0] eq 'chr0' or $t[1] < 0) { next; } #skip this SNP
+   if ($t[0] eq 'chrX') { $t[0] = 'chr23'; }
+   elsif ($t[0] eq 'chrY') { $t[0] = 'chr24'; }
+   elsif ($t[0] eq 'chrXY') { $t[0] = 'chr23'; }
+   elsif ($t[0] eq 'chrMT') { $t[0] = 'chr25'; }
+   print "$t[2] $t[0] $t[1]";
+   $allele[$i] =~ s/^\s+//;
+my $test = 0;
+   foreach my $p (split(/\s+/, $allele[$i])) {
+      if ($pheno[$j] > 0 or ($max == 1 && $pheno[$j] > -1)) { #pheno 0 or -9 skip
+          #change AA BB AB to 2 0 1
+          if ($p eq "$nt$nt") { print " 2"; }
+          elsif ($p =~ /$nt/) { print " 1"; }
+          else { print " 0"; }
+$test++;
+      }
+      $j++;
+   }
+   print "\n";
+}
+
+exit;
diff --git a/tools/phenotype_association/lps.xml b/tools/phenotype_association/lps.xml
new file mode 100644
index 0000000..1f18d88
--- /dev/null
+++ b/tools/phenotype_association/lps.xml
@@ -0,0 +1,323 @@
+<tool id="hgv_lps" name="LPS" version="1.0.0">
+  <description>LASSO-Patternsearch algorithm</description>
+
+  <command interpreter="bash">
+    lps_tool_wrapper.sh $lambda_fac $input_file $label_column $output_file $log_file
+    Initialization 0
+    #if $advanced.options == "true":
+      Sample $advanced.sample
+      Verbosity $advanced.verbosity
+      Standardize $advanced.standardize
+      initialLambda $advanced.initialLambda
+      #if $advanced.continuation.continuation == "1":
+        Continuation $advanced.continuation.continuation
+        continuationSteps $advanced.continuation.continuationSteps
+        accurateIntermediates $advanced.continuation.accurateIntermediates
+      #end if
+      printFreq $advanced.printFreq
+      #if $advanced.newton.newton == "1":
+        Newton $advanced.newton.newton
+        NewtonThreshold $advanced.newton.newtonThreshold
+      #end if
+      HessianSampleFraction $advanced.hessianSampleFraction
+      BB 0
+      Monotone 0
+      FullGradient $advanced.fullGradient
+      GradientFraction $advanced.gradientFraction
+      InitialAlpha $advanced.initialAlpha
+      AlphaIncrease $advanced.alphaIncrease
+      AlphaDecrease $advanced.alphaDecrease
+      AlphaMax $advanced.alphaMax
+      c1 $advanced.c1
+      MaxIter $advanced.maxIter
+      StopTol $advanced.stopTol
+      IntermediateTol $advanced.intermediateTol
+      FinalOnly $advanced.finalOnly
+    #end if
+  </command>
+
+  <inputs>
+    <param name="input_file" type="data" format="tabular" label="Dataset"/>
+    <param name="label_column" type="data_column" data_ref="input_file" numerical="true" label="Label column" help="Column containing outcome labels: +1 or -1."/>
+    <param name="lambda_fac" label="Lambda_fac" type="float" value="0.03" help="Target value of the regularization parameter, expressed as a fraction of the calculated lambda_max.">
+      <validator type="in_range" message="0.00 < lambda_fac <= 1.00" min="0.00" max="1.00"/>
+    </param>
+    <conditional name="advanced">
+      <param name="options" type="select" label="Advanced Options">
+        <option value="false" selected="true">Hide advanced options</option>
+        <option value="true">Show advanced options</option>
+      </param>
+      <when value="false">
+        <!-- no options -->
+      </when>
+      <when value="true">
+        <!-- HARDCODED: 'Sample' we don't support passing an array -->
+        <param name="sample" type="float" value="1.0" label="Sample fraction" help="Sample this fraction of the data set.">
+          <validator type="in_range" message="0.0 <= sample <= 1.0" min="0.0" max="1.0"/>
+        </param>
+        <!-- HARDCODED: 'Initialization' = 0 :: Initialize at beta=0 -->
+        <param name="verbosity" type="select" format="integer" label="Verbosity">
+          <option value="0" selected="true">Little output</option>
+          <option value="1">More output</option>
+          <option value="2">Still more output</option>
+        </param>
+        <param name="standardize" type="select" format="integer" label="Standardize" help="Scales and shifts each column so that it has mean zero and variance 1.">
+          <option value="0" selected="true">Don't standardize</option>
+          <option value="1">Standardize</option>
+        </param>
+        <param name="initialLambda" type="float" value="0.8" label="Initial lambda" help="First value of lambda to be used in the continuation scheme, expressed as a fraction of lambda_max.">
+          <validator type="in_range" message="0.0 < initialLambda < 1.0" min="0.0" max="1.0"/>
+        </param>
+        <conditional name="continuation">
+          <param name="continuation" type="select" format="integer" label="Continuation" help="Use continuation strategy to start with a larger value of lambda, decreasing it successively to lambda_fac.">
+            <option value="0" selected="true">Don't use continuation</option>
+            <option value="1">Use continuation</option>
+          </param>
+          <when value="0">
+            <!-- no options -->
+          </when>
+          <when value="1">
+            <param name="continuationSteps" type="integer" value="5" label="Continuation steps" help="Number of lambda values to use in continuation <em>prior</em> to target value lambda_fac."/>
+
+            <param name="accurateIntermediates" type="select" format="integer" label="Accurate intermediates" help="Indicates whether accurate solutions are required for lambda values other than the target value lambda_fac.">
+              <option value="0" selected="true">Don't need accurate intemediates</option>
+              <option value="1">Calculate accurate intermediates</option>
+            </param>
+          </when>
+        </conditional> <!-- name="continuation" -->
+        <param name="printFreq" type="integer" value="1" label="Print frequency" help="Print a progress report every NI iterations, where NI is the supplied value of this parameter.">
+          <validator type="in_range" message="printFreq >= 1" min="1"/>
+        </param>
+        <conditional name="newton">
+          <param name="newton" type="select" format="integer" label="Projected Newton steps">
+            <option value="0" selected="true">No Newton steps</option>
+            <option value="1">Try projected Newton steps</option>
+          </param>
+          <when value="0">
+            <!-- no options -->
+          </when>
+          <when value="1">
+            <param name="newtonThreshold" type="integer" value="500" label="Newton threshold" help="Maximum size of free variable subvector for Newton."/>
+          </when>
+        </conditional>
+        <param name="hessianSampleFraction" type="float" value="1.0" label="Hessian sample fraction" help="Fraction of terms to use in approximate Hessian calculation.">
+          <validator type="in_range" message="0.01 < hessianSampleFraction <= 1.00" min="0.01" max="1.00"/>
+        </param>
+        <!-- HARDCODED: 'BB' = 0 :: don't use Barzilai-Borwein steps -->
+        <!-- HARDCODED: 'Monotone' = 0 :: don't force monotonicity -->
+        <param name="fullGradient" type="select" format="integer" label="Partial gradient vector selection">
+          <option value="0">Use randomly selected partial gradient, including current active components ("biased")</option>
+          <option value="1">Use full gradient vector at every step</option>
+          <option value="2">Randomly selected partial gradient, without regard to current active set ("unbiased")</option>
+        </param>
+        <param name="gradientFraction" type="float" value="0.1" label="Gradient fraction" help="Fraction of inactive gradient vector to evaluate.">
+          <validator type="in_range" message="0.0 < gradientFraction <= 1" min="0.0" max="1.0"/>
+        </param>
+        <param name="initialAlpha" type="float" value="1.0" label="Initial value of alpha"/>
+        <param name="alphaIncrease" type="float" value="2.0" label="Alpha increase" help="Factor by which to increase alpha after descent not obtained."/>
+        <param name="alphaDecrease" type="float" value="0.8" label="Alpha decrease" help="Factor by which to decrease alpha after successful first-order step."/>
+        <param name="alphaMax" type="float" value="1e12" label="Alpha max" help="Maximum value of alpha; terminate with error if we exceed this."/>
+        <param name="c1" type="float" value="1e-3" help="Parameter defining the margin by which the first-order step is required to decrease before being taken.">
+          <validator type="in_range" message="0.0 < c1 < 1.0" min="0.0" max="1.0"/>
+        </param>
+        <param name="maxIter" type="integer" value="10000" label="Maximum number of iterations" help="Terminate with error if we exceed this."/>
+        <param name="stopTol" type="float" value="1e-6" label="Stop tolerance" help="Convergence tolerance for target value of lambda."/>
+        <param name="intermediateTol" type="float" value="1e-4" label="Intermediate tolerance" help="Convergence tolerance for intermediate values of lambda."/>
+        <param name="finalOnly" type="select" format="integer" label="Final only">
+          <option value="0" selected="true">Return information for all intermediate values</option>
+          <option value="1">Just return information at the last lambda</option>
+        </param>
+      </when> <!-- value="advanced" -->
+    </conditional> <!-- name="advanced" -->
+  </inputs>
+
+  <outputs>
+    <data name="output_file" format="tabular" label="${tool.name} on ${on_string}: results"/>
+    <data name="log_file" format="txt" label="${tool.name} on ${on_string}: log"/>
+  </outputs>
+
+  <requirements>
+    <requirement type="package">lps_tool</requirement>
+  </requirements>
+
+  <tests>
+    <test>
+      <param name="input_file" value="lps_arrhythmia.tabular"/>
+      <param name="label_column" value="280"/>
+      <param name="lambda_fac" value="0.03"/>
+      <param name="options" value="true"/>
+      <param name="sample" value="1.0"/>
+      <param name="verbosity" value="1"/>
+      <param name="standardize" value="0"/>
+      <param name="initialLambda" value="0.9"/>
+      <param name="continuation" value="1"/>
+      <param name="continuationSteps" value="10"/>
+      <param name="accurateIntermediates" value="0"/>
+      <param name="printFreq" value="1"/>
+      <param name="newton" value="1"/>
+      <param name="newtonThreshold" value="500"/>
+      <param name="hessianSampleFraction" value="1.0"/>
+      <param name="fullGradient" value="1"/>
+      <param name="gradientFraction" value="0.5"/>
+      <param name="initialAlpha" value="1.0"/>
+      <param name="alphaIncrease" value="2.0"/>
+      <param name="alphaDecrease" value="0.8"/>
+      <param name="alphaMax" value="1e12"/>
+      <param name="c1" value="1e-3"/>
+      <param name="maxIter" value="2500"/>
+      <param name="stopTol" value="1e-6"/>
+      <param name="intermediateTol" value="1e-6"/>
+      <param name="finalOnly" value="0"/>
+      <output name="ouput_file" file="lps_arrhythmia_beta.tabular"/>
+      <output name="log_file" file="lps_arrhythmia_log.txt"/>
+    </test>
+  </tests>
+
+  <help>
+**Dataset formats**
+
+The input and output datasets are tabular_.  The columns are described below.
+There is a second output dataset (a log) that is in text_ format.
+(`Dataset missing?`_)
+
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _text: ${static_path}/formatHelp.html#text
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+The LASSO-Patternsearch algorithm fits your dataset to an L1-regularized
+logistic regression model.  A benefit of using L1-regularization is
+that it typically yields a weight vector with relatively few non-zero
+coefficients.
+
+For example, say you have a dataset containing M rows (subjects)
+and N columns (attributes) where one of these N attributes is binary,
+indicating whether or not the subject has some property of interest P.
+In simple terms, LPS calculates a weight for each of the other attributes
+in your dataset.  This weight indicates how "relevant" that attribute
+is for predicting whether or not a given subject has property P.
+The L1-regularization causes most of these weights to be equal to zero,
+which means LPS will find a "small" subset of the remaining N-1 attributes
+in your dataset that can be used to predict P.
+
+In other words, LPS can be used for feature selection.
+
+The input dataset is tabular, and must contain a label column which
+indicates whether or not a given row has property P.  In the current
+version of this tool, P must be encoded using +1 and -1.  The Lambda_fac
+parameter ranges from 0 to 1, and controls how sparse the weight
+vector will be.  At the low end, when Lambda_fac = 0, there will be
+no regularization.  At the high end, when Lambda_fac = 1, there will be
+"too much" regularization, and all of the weights will equal zero.
+
+The LPS tool creates two output datasets.  The first, called the results
+file, is a tabular dataset containing one column of weights for each
+value of the regularization parameter lambda that was tried.  The weight
+columns are in order from left to right by decreasing values of lambda.
+The first N-1 rows in each column are the weights for the N-1 attributes
+in your input dataset.  The final row is a constant, the intercept.
+
+Let **x** be a row from your input dataset and let **b** be a column
+from the results file.  To compute the probability that row **x** has
+a label value of +1:
+
+  Probability(row **x** has label value = +1) = 1 / [1 + exp{**x** \* **b**\[1..N-1\] + **b**\[N\]}]
+
+where **x** \* **b**\[1..N-1\] represents matrix multiplication.
+
+The second output dataset, called the log file, is a text file which
+contains additional data about the fitted L1-regularized logistic
+regression model.  These data include the number of features, the
+computed value of lambda_max, the actual values of lambda used, the
+optimal values of the log-likelihood and regularized log-likelihood
+functions, the number of non-zeros, and the number of iterations.
+
+Website: http://pages.cs.wisc.edu/~swright/LPS/
+
+-----
+
+**Example**
+
+- input file::
+
+    +1   1   0   0   0   0   1   0   1   1   ...
+    +1   1   1   1   0   0   1   0   1   1   ...
+    +1   1   0   1   0   1   0   1   0   1   ...
+    etc.
+
+- output results file::
+
+    0
+    0
+    0
+    0
+    0.025541
+    etc.
+
+- output log file::
+
+    Data set has 100 vectors with 50 features.
+      calculateLambdaMax: n=50, m=100, m+=50, m-=50
+      computed value of lambda_max: 5.0000e-01
+     
+    lambda=2.96e-02 solution:
+      optimal log-likelihood function value: 6.46e-01
+      optimal *regularized* log-likelihood function value: 6.79e-01
+      number of nonzeros at the optimum:      5
+      number of iterations required:     43
+    etc.
+
+-----
+
+**References**
+
+Koh K, Kim S-J, Boyd S. (2007)
+An interior-point method for large-scale l1-regularized logistic regression.
+Journal of Machine Learning Research. 8:1519-1555.
+
+Shi W, Wahba G, Wright S, Lee K, Klein R, Klein B. (2008)
+LASSO-Patternsearch algorithm with application to ophthalmology and genomic data.
+Stat Interface. 1(1):137-153.
+
+<!--
+Wright S, Novak R, Figueiredo M. (2009)
+Sparse reconstruction via separable approximation.
+IEEE Transactions on Signal Processing. 57:2479-2403.
+
+Shi J, Yin W, Osher S, Sajda P. (2010)
+A fast hybrid algorithm for large scale l1-regularized logistic regression.
+Journal of Machine Learning Research. 11:713-741.
+
+Byrd R, Chin G, Neveitt W, Nocedal J. (2010)
+On the use of stochastic Hessian information in unconstrained optimization.
+Technical Report. Northwestern University. June 16, 2010.
+
+Wright S. (2010)
+Accelerated block-coordinate relaxation for regularized optimization.
+Technical Report. University of Wisconsin. August 10, 2010.
+-->
+
+  </help>
+  <citations>
+    <citation type="bibtex">@ARTICLE{Kim07aninterior-point,
+    author = {Seung-jean Kim and Kwangmoo Koh and Michael Lustig and Stephen Boyd and Dimitry Gorinevsky},
+    title = {An interior-point method for large-scale l1-regularized logistic regression},
+    journal = {Journal of Machine Learning Research},
+    year = {2007},
+    volume = {8},
+    pages = {1519--1555},
+}</citation>
+    <citation type="bibtex">@ARTICLE{Shi08lasso-patternsearchalgorithm,
+    author = {Weiliang Shi and Grace Wahba and Stephen Wright and Kristine Lee and Ronald Klein and Barbara Klein},
+    title = {LASSO-Patternsearch Algorithm with Application to Ophthalmology and Genomic Data},
+    journal= {Stat Interface},
+    year = {2008},
+    volume = {1},
+    number = {1},
+    pages = {137--153}
+}</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/lps_tool_wrapper.sh b/tools/phenotype_association/lps_tool_wrapper.sh
new file mode 100755
index 0000000..bbb4197
--- /dev/null
+++ b/tools/phenotype_association/lps_tool_wrapper.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+# script for execution of deployed applications
+#
+# Sets up the MCR environment for the current $ARCH and executes 
+# the specified command.
+#
+
+export PATH=$PATH:$(dirname $0)
+
+MCRROOT=${MCRROOT:-/galaxy/software/linux2.6-x86_64/bin/MCR-7.11/v711}
+MWE_ARCH=glnxa64
+
+if [ "$MWE_ARCH" = "sol64" ] ; then
+  LD_LIBRARY_PATH=.:/usr/lib/lwp:${MCRROOT}/runtime/glnxa64
+else
+  LD_LIBRARY_PATH=.:${MCRROOT}/runtime/glnxa64
+fi
+
+LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/bin/glnxa64
+LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/os/glnxa64
+
+if [ "$MWE_ARCH" = "maci" -o "$MWE_ARCH" = "maci64" ]; then
+  DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH}:/System/Library/Frameworks/JavaVM.framework/JavaVM:/System/Library/Frameworks/JavaVM.framework/Libraries
+else
+  MCRJRE=${MCRROOT}/sys/java/jre/glnxa64/jre/lib/amd64
+  LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}/native_threads
+  LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}/server
+  LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}/client
+  LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRJRE}
+fi
+
+XAPPLRESDIR=${MCRROOT}/X11/app-defaults
+
+export LD_LIBRARY_PATH XAPPLRESDIR
+
+lps_tool $*
+
+exit 0
diff --git a/tools/phenotype_association/master2gd_snp.pl b/tools/phenotype_association/master2gd_snp.pl
new file mode 100755
index 0000000..6539cf1
--- /dev/null
+++ b/tools/phenotype_association/master2gd_snp.pl
@@ -0,0 +1,221 @@
+#!/usr/bin/perl -w
+use strict;
+
+#convert from master variant file to snp table (Webb format?)
+#new format for version 2.0, also different format for cancer normal pairs
+#set columns for 2.0 version Cancer format
+my $aCnt1 = 21;
+my $aCnt2 = 22;
+
+#snp table format:
+#1. chr
+#2. position (0 based)
+#3. ref allele
+#4. second allele
+#5. overall quality
+#foreach individual (6-9, 10-13, ...)
+#a. count of allele in 3
+#b. count of allele in 4
+#c. genotype call (-1, or count of ref allele)
+#d. quality of genotype call (quality of non-ref allele from masterVar)
+
+if (!@ARGV) {
+   print "usage: master2gd_snp.pl masterVar.txt[.gz|.bz2] [-tab=snpTable.txt -addColsOnly -build=hg19 -name=na ] > newSnpTable.txt\n";
+   exit;
+}
+
+my $in = shift @ARGV;
+my $tab;
+my $tabOnly;
+my $build;
+my $name;
+foreach (@ARGV) {
+   if (/-tab=(.*)/) { $tab = $1; }
+   elsif (/-addColsOnly/) { $tabOnly = 1; }
+   elsif (/-build=(.*)/) { $build = $1; }
+   elsif (/-name=(.*)/) { $name = $1; }
+}
+
+#WARNING loads snp table in memory, this could take > 1G ram
+my %old;
+my $colcnt = 0;
+my @head;
+if ($tab) {
+   open(FH, $tab) or die "Couldn't open $tab, $!\n";
+   while (<FH>) {
+      chomp;
+      if (/^#/) { push(@head, $_); next; }
+      my @f = split(/\t/);
+      $old{"$f[0]:$f[1]"} = join("\t", @f);
+      $colcnt = scalar @f;
+   }
+   close FH or die "Couldn't close $tab, $!\n";
+}
+
+if ($in =~ /.gz$/) { 
+   open(FH, "zcat $in |") or die "Couldn't open $in, $!\n";
+}elsif ($in =~ /.bz2$/) {
+   open(FH, "bzcat $in |") or die "Couldn't open $in, $!\n";
+}else {
+   open(FH, $in) or die "Couldn't open $in, $!\n";
+}
+prepHeader();
+if (@head) { #keep old header, add new?
+   print join("\n", @head), "\n";
+}
+while (<FH>) {
+   chomp;
+   #FORMAT_VERSION 2.0
+   if (/^#FORMAT_VERSION\s+1\./) { 
+      $aCnt1 = 16;
+      $aCnt2 = 17;
+   }
+   if (/^#/) { next; }
+   if (/^>/) { next; } #headers
+   if (/^\s*$/) { next; } 
+   my @f = split(/\t/);
+   if (!$f[6]) { next; } #WHAT? most likely still zipped?
+   if ($f[6] ne 'snp') { next; } #table only has substitutions
+   if ($f[5] eq 'het-alt') { next; } #skip heterozygous with no ref match
+   if ($f[5] =~ /(hom|het)/) { #zygosity #haploid chrX and chrY?
+         my $a = $f[7];  #reference allele
+         my $a2;
+         my $freq;
+         my $freq2;
+         my $sc;
+         my $alt;
+         my $g = 1; #genotype == ref allele count
+         if ($f[8] eq $f[9]) { #should be homozygous?
+            $a2 = $f[8];
+            $g = 0;
+            $sc = $f[10]; #is this the best one to use? or smallest?
+         }else {
+            if ($a ne $f[8]) { $a2 = $f[8]; $alt = 8; }
+            elsif ($a ne $f[9]) { $a2 = $f[9]; $alt = 9; }
+         }
+         if (defined $f[10] && defined $f[11] && $alt) { #VAF score in 2.0 format
+            $sc = $f[$alt+2];
+         }
+	 #version 1.12 columns 16 & 17, version 2.0 Cancer columns 21 & 22
+         if (defined $f[$aCnt1] && defined $f[$aCnt2] && $alt) {
+            if ($alt == 8) { 
+                $freq = $f[$aCnt2];
+                $freq2 = $f[$aCnt1];
+            }elsif ($alt == 9) {
+                $freq = $f[$aCnt1];
+                $freq2 = $f[$aCnt2];
+            }
+         }elsif (defined $f[$aCnt1]) {
+            $freq = 0;
+            $freq2 = $f[$aCnt1];
+         }
+         #if starting a new table or new SNP in old table
+         #add option to only build on current table?
+	 if (!$tab) { 
+            print "$f[2]\t$f[3]\t$a\t$a2\t-1"; 
+         }elsif (!$tabOnly && !exists $old{"$f[2]:$f[3]"}) {
+            print "$f[2]\t$f[3]\t$a\t$a2\t-1";
+         }elsif (exists $old{"$f[2]:$f[3]"}) {
+            print $old{"$f[2]:$f[3]"};
+            $old{"$f[2]:$f[3]"} = ''; #unset so we know it is printed
+         }elsif ($tabOnly && !exists $old{"$f[2]:$f[3]"}) {
+            next; #skip this one entirely
+         }
+         if ($colcnt && !exists $old{"$f[2]:$f[3]"}) { 
+            #new SNP pad for missing individuals
+            my $i = 5;
+            while ($i < $colcnt) {
+               print "\t-1\t-1\t-1\t-1";
+               $i += 4;
+            }
+         }
+         #add columns for individual
+         print "\t$freq\t$freq2\t$g\t$sc\n";
+   }elsif ($f[5] eq 'hap') {
+      my $g = 0;
+      my $freq = 0;
+      my $freq2 = 0; 
+      if (defined $f[10]) { $freq2 = $f[10]; }
+      my $sc = -1;
+      if (defined $f[$aCnt1]) { $sc = $f[$aCnt1]; }
+      if ($f[8]) {
+         if (!$tab) {
+            print "$f[2]\t$f[3]\t$f[7]\t$f[8]\t-1";
+         }elsif (!$tabOnly && !exists $old{"$f[2]:$f[3]"}) {
+            print "$f[2]\t$f[3]\t$f[7]\t$f[8]\t-1";
+         }elsif (exists $old{"$f[2]:$f[3]"}) {
+            print $old{"$f[2]:$f[3]"};
+            $old{"$f[2]:$f[3]"} = ''; #unset so we know it is printed
+         }elsif ($tabOnly && !exists $old{"$f[2]:$f[3]"}) {
+            next; #skip this one entirely
+         }
+         if ($colcnt && !exists $old{"$f[2]:$f[3]"}) {
+            #new SNP pad for missing individuals
+            my $i = 5;
+            while ($i < $colcnt) {
+               print "\t-1\t-1\t-1\t-1";
+               $i += 4;
+            }
+         }
+         #add columns for individual
+         print "\t$freq\t$freq2\t$g\t$sc\n";
+      }   
+   }
+}
+close FH or die "Couldn't close $in, $!\n";
+
+#if adding to a snp table, now we need to finish those not in the latest set
+foreach my $k (keys %old) {
+   if ($old{$k} ne '') { #not printed yet
+      print $old{$k}, "\t-1\t-1\t-1\t-1\n"; #plus blank for this one
+   }
+}
+
+exit;
+
+#parse old header and add or create new
+sub prepHeader {
+   if (!$build) { $build = 'hg19'; } #set default
+   my @cnames;
+   my @ind;
+   my $n;
+   if (@head) { #parse previous header
+      my $h = join("", @head); #may split between lines
+      if ($h =~ /"column_names":\[(.*?)\]/) {
+         my @t = split(/,/, $1);
+         foreach (@t) { s/"//g; }
+         @cnames = @t;
+         $n = $cnames[$#cnames];
+         $n =~ s/Q//;
+         $n++;
+      }
+      if ($h =~ /"dbkey":"(.*?)"/) { $build = $1; }
+      if ($h =~ /"individuals":\[(.*)\]/) {
+         my $t = $1;
+         $t =~ s/\]\].*/]/; #remove if there is more categories
+         @ind = split(/,/, $t);
+      } 
+   }else { #start new header
+      @cnames = ("chr", "pos", "A", "B", "Q");   
+      $n = 1;
+   }
+   #add current
+   if (!$name) { $name= 'na'; }
+   my $stcol = $colcnt + 1;
+   if ($stcol == 1) { $stcol = 6; } #move past initial columns
+   push(@ind, "[\"$name\",$stcol]");
+   push(@cnames, "${n}A", "${n}B", "${n}G", "${n}Q");
+   #reassign head
+   undef @head;
+   foreach (@cnames) { $_ = "\"$_\""; } #quote name
+   $head[0] = "#{\"column_names\":[" . join(",", @cnames) . "],";
+   $head[1] = "#\"individuals\":[" . join(",", @ind) . "],"; 
+   $head[2] = "#\"dbkey\":\"$build\",\"pos\":2,\"rPos\":2,\"ref\":1,\"scaffold\":1,\"species\":\"$build\"}";
+}
+####End
+
+##example header 
+#{"column_names":["chr","pos","A","B","Q","1A","1B","1G","1Q","2A","2B","2G","2Q","3A","3B","3G","3Q","4A","4B","4G","4Q","5A","5B","5G","5Q","6A","6B","6G","6Q","7A","7B","7G","7Q","8A","8B","8G",
+#"8Q","9A","9B","9G","9Q","10A","10B","10G","10Q"],"dbkey":"hg19","individuals":[["Boh_15M",6],["Boh_19M",10],["Paya_27F",14],["Paya_2F",18],["Paya_32F",22],["Ruil_2M",26],["Ruil_36M",30],["Ruil_3M",
+#34],["Ruil_40",38],["Ruil_47F",42]],"pos":2,"rPos":2,"ref":1,"scaffold":1,"species":"hg19"}
+#chr1	10290	C	T	46.4	0	2	0	7	1	2	0	4	3	2	1	22	0	0	-1	0	1	0	1	4	0	2	0	7	0	0	-1	0	2	3	1	14	0	1	0	4	1	1	1	6
diff --git a/tools/phenotype_association/master2gd_snp.xml b/tools/phenotype_association/master2gd_snp.xml
new file mode 100644
index 0000000..7be4393
--- /dev/null
+++ b/tools/phenotype_association/master2gd_snp.xml
@@ -0,0 +1,86 @@
+<tool id="master2gd_snp" name="MasterVar to gd_snp" hidden="false" version="1.0.0">
+  <description>Convert from MasterVar to gd_snp table</description>
+  <command interpreter="perl">
+    #if $snptab.tab2 == "yes" 
+      #if $snptab.colsOnly == "addColsOnly" #master2gd_snp.pl $input1 -tab=$snptab.input2 -name=$indName -build=${input1.metadata.dbkey} -addColsOnly > $out_file1 
+      #else #master2gd_snp.pl $input1 -tab=$snptab.input2 -name=$indName -build=${input1.metadata.dbkey} > $out_file1
+      #end if
+    #else #master2gd_snp.pl $input1 -name=$indName -build=${input1.metadata.dbkey} > $out_file1
+    #end if
+  </command>
+  <inputs>
+    <param format="tab" name="input1" type="data" label="Complete Genomics MasterVar dataset" />
+    <conditional name="snptab">
+      <param name="tab2" type="select" label="Append to gd_snp table in history">
+        <option value="yes">yes</option>
+        <option value="no" selected="true">no</option>
+      </param>
+      <when value="yes">
+      <param format="gd_snp" name="input2" type="data" label="gd_snp table" />
+      <param name="colsOnly" type="select" label="Skip new SNPs">
+        <option value="" selected="true">no</option>
+        <option value="addColsOnly">yes</option>
+      </param>
+      </when>
+      <when value="no"> <!-- do nothing -->
+      </when>
+    </conditional>
+    <param name="indName" type="text" size="20" label="Label for new individual/group" value="na" />
+  </inputs>
+  <outputs>
+  <data format="gd_snp" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name='input1' value='masterVarTest.txt' ftype='tab' />
+      <param name='tab2' value='no' />
+      <param name='indName' value='na' />
+      <output name="output" file="master2snp_output.txt" />
+    </test>
+  </tests>
+
+  <help>
+**Dataset formats**
+
+The input dataset is in the MasterVar_ format provided by the Complete Genomics
+analysis process (Galaxy considers this to be tabular_, but it must have the
+columns specified for MasterVar).
+The output dataset is a gd_snp_ table.  (`Dataset missing?`_)
+
+.. _Dataset missing?: ./static/formatHelp.html
+.. _gd_snp: ./static/formatHelp.html#gd_snp
+.. _MasterVar: ./static/formatHelp.html#mastervar
+.. _tabular: ./static/formatHelp.html#tab
+
+-----
+
+**What it does**
+
+This converts a Complete Genomics MasterVar file to gd_snp format,
+so it can be used with the genome diversity tools.
+It can either 
+start a new dataset or append to an old one. When appending, if any new SNPs 
+appear only in the MasterVar file they can either be skipped or backfilled with 
+"-1" (unknown) for previous individuals/groups in the gd_snp dataset. 
+Positions homozygous for the reference are skipped.
+
+
+-----
+
+**Examples**
+
+- input MasterVar file::
+
+   934     2       chr1    41980   41981   hom     snp     A       G       G       76      97                      dbsnp.86:rs806721       425     1       1       1       2       -170                            ERVL-E-int:ERVL:47.4    2       1.17    N
+   935     2       chr1    41981   42198   hom     ref     =       =       =                                                                                       -170                                            1.17    N
+   1102    2       chr1    53205   53206   het-ref snp     G       C       G       93      127                     dbsnp.100:rs2854676     477     7       30      0       37      -127                                    2       1.17    N
+   etc.
+
+- output::
+
+   chr1    41980   A       G       -1      0       1       0       76
+   chr1    53205   G       C       -1      30      7       1       93
+   etc.
+
+</help>
+</tool>
diff --git a/tools/phenotype_association/master2pg.pl b/tools/phenotype_association/master2pg.pl
new file mode 100755
index 0000000..6697fa2
--- /dev/null
+++ b/tools/phenotype_association/master2pg.pl
@@ -0,0 +1,131 @@
+#!/usr/bin/perl -w
+use strict;
+
+#convert from master variant file to pgSnp
+my $snpsOnly = 1; #flag for if doing SNPs or indels
+if (@ARGV && $ARGV[0] eq 'indel') { shift @ARGV; $snpsOnly = 0; }
+my $in = shift @ARGV;
+open(FH, $in) or die "Couldn't open input file $in, $!\n";
+
+while (<FH>) {
+   chomp;
+   if (/^#/) { next; }
+   if (/^>/) { next; } #headers
+   if (/^\s*$/) { next; } 
+   my @f = split(/\t/);
+   if (!$f[5]) { next; } #WHAT? most likely still zipped?
+   if ($f[5] =~ /(hom|het)/) { #zygosity #haploid chrX and chrY?
+      #only get snps for now
+      if ($snpsOnly && $f[6] eq 'snp') { #varType
+         my $a;
+         my $c = 2;
+         my $freq;
+         my $sc;
+         if ($f[8] eq $f[9]) { #should be homozygous?
+            $a = $f[8];
+            $c = 1;
+         }else {
+            $a = "$f[8]/$f[9]";
+         }
+         if (defined $f[10] && $c == 1) {
+            $sc = $f[10];
+         }elsif (defined $f[10] && defined $f[11] && $c == 2) {
+            $sc = "$f[10],$f[11]";
+         }
+         if (defined $f[16] && $c == 1) {
+            $freq = $f[16];
+         }elsif (defined $f[16] && defined $f[17] && $c == 2) {
+            $freq = "$f[16],$f[17]";
+         }
+         print "$f[2]\t$f[3]\t$f[4]\t$a\t$c\t$freq\t$sc\n";
+      }elsif (!$snpsOnly) {
+         if ($f[8] =~ /^\s*$/) { undef $f[8]; }
+         if ($f[9] =~ /^\s*$/) { undef $f[9]; }
+         my $a;
+         my $c = 2;
+         #do indels
+         if ($f[6] eq "ins") {
+            if (defined $f[8] && defined $f[9] && $f[8] eq $f[9]) { $a = $f[8]; $c = 1; }
+            elsif (defined $f[8] && defined $f[9] && $f[8] ne '?' && $f[9] ne '?') { 
+               $a = "$f[8]/$f[9]";
+            }elsif (!defined $f[8] && defined $f[9]) {
+               $a = "$f[9]/-";
+            }elsif (defined $f[8] && !defined $f[9]) {
+               $a = "$f[8]/-";
+            }
+         }elsif ($f[6] eq "del") {
+            if (!defined $f[8] && !defined $f[9]) {
+               $a = '-'; #homozygous deletion
+               $c = 1;
+            }elsif (!defined $f[8] && defined $f[9]) {
+               $a = "$f[9]/-";
+            }elsif (defined $f[8] && !defined $f[9]) {
+               $a = "$f[8]/-";
+            }            
+         }elsif ($f[6] eq "sub") { #multiple nt substitutions
+            if ($f[8] eq $f[9]) {
+               $a = $f[8];
+               $c = 1;
+            }else {
+               $a = "$f[8]/$f[9]";
+            }
+         }elsif ($f[6] eq "complex") { #treat same as multi-nt sub
+            if ($f[5] =~ /het-alt/ && !defined $f[8]) { $f[8] = '-'; }
+            if ($f[5] =~ /het-alt/ && !defined $f[9]) { $f[9] = '-'; }
+            if (defined $f[8] && defined $f[9] && $f[8] eq $f[9]) {
+               $c = 1;
+               $a = $f[8];
+            }elsif (defined $f[8] && defined $f[9]) {
+               $a = "$f[8]/$f[9]";
+            }
+         }
+         my $sc = '';
+         my $freq = '';
+         if (defined $f[10] && $c == 1) {
+            $sc = $f[10];
+         }elsif (defined $f[10] && defined $f[11] && $c == 2) {
+            $sc = "$f[10],$f[11]";
+         }
+         if (defined $f[16] && $c == 1) {
+            $freq = $f[16];
+         }elsif (defined $f[16] && defined $f[17] && $c == 2) {
+            $freq = "$f[16],$f[17]";
+         }
+         if ($a) {
+            print "$f[2]\t$f[3]\t$f[4]\t$a\t$c\t$freq\t$sc\n";
+         }
+      }
+   }elsif ($f[5] eq 'hap' && $f[6] eq 'snp' && $snpsOnly) {
+      my $c = 1;
+      my $freq = '';
+      if (defined $f[10]) { $freq = $f[10]; }
+      my $sc = '';
+      if (defined $f[16]) { $sc = $f[16]; }
+      if ($f[8]) {
+         print "$f[2]\t$f[3]\t$f[4]\t$f[8]\t$c\t$freq\t$sc\n";
+      }   
+   }elsif ($f[5] eq 'hap' && !$snpsOnly && $f[6] =~ /(del|ins|sub)/) {
+      if ($f[8] =~ /^\s*$/) { undef $f[8]; }
+      my $a;
+      my $c = 1;
+      #do indels
+      if ($f[6] eq "ins") {
+         $a = $f[8]; 
+      }elsif ($f[6] eq "del") {
+         $a = '-'; #deletion
+      }elsif ($f[6] eq "sub") { #multiple nt substitutions
+         $a = $f[8];
+      }
+      my $sc = '';
+      my $freq = '';
+      if (defined $f[10]) { $sc = $f[10]; }
+      if (defined $f[16]) { $freq = $f[16]; }
+      if ($a) {
+         print "$f[2]\t$f[3]\t$f[4]\t$a\t$c\t$freq\t$sc\n";
+      }
+   }
+}
+
+close FH or die "Couldn't close $in, $!\n";
+
+exit;
diff --git a/tools/phenotype_association/master2pg.xml b/tools/phenotype_association/master2pg.xml
new file mode 100644
index 0000000..6d2a250
--- /dev/null
+++ b/tools/phenotype_association/master2pg.xml
@@ -0,0 +1,66 @@
+<tool id="master2pgSnp" name="MasterVar to pgSnp" hidden="false" version="1.0.0">
+  <description>Convert from MasterVar to pgSnp format</description>
+  <command interpreter="perl">
+    master2pg.pl $indel $input1 > $out_file1
+  </command>
+  <inputs>
+    <param format="tab" name="input1" type="data" label="Complete Genomics MasterVar dataset" />
+    <param name="indel" type="select" label="Convert indels">
+      <option value="" selected="true">no</option>
+      <option value="indel">yes</option>
+    </param>
+  </inputs>
+  <outputs>
+  <data format="interval" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name='input1' value='masterVarTest.txt' ftype='tab' />
+      <param name='indel' value="" />
+      <output name="output" file="masterVar_output.txt" />
+    </test>
+  </tests>
+
+  <help>
+**Dataset formats**
+
+The input dataset is in the MasterVar_ format provided by the Complete Genomics 
+analysis process (Galaxy considers this to be tabular_, but it must have the 
+columns specified for MasterVar).
+The output dataset is in pgSnp_ format.  (`Dataset missing?`_)
+
+.. _Dataset missing?: ./static/formatHelp.html
+.. _pgSnp: ./static/formatHelp.html#pgSnp
+.. _MasterVar: ./static/formatHelp.html#mastervar
+.. _tabular: ./static/formatHelp.html#tab
+
+-----
+
+**What it does**
+
+This converts a Complete Genomics MasterVar file to pgSnp format, 
+so it can be viewed in browsers or used with the phenotype association and 
+interval operations tools.
+Positions homozygous for the reference are skipped.
+
+-----
+
+**Examples**
+
+- input MasterVar file::
+
+   934     2       chr1    41980   41981   hom     snp     A       G       G       76      97                      dbsnp.86:rs806721       425     1       1       1       2       -170                            ERVL-E-int:ERVL:47.4    2       1.17    N
+   935     2       chr1    41981   42198   hom     ref     =       =       =                                                                                       -170                                            1.17    N
+   1102    2       chr1    53205   53206   het-ref snp     G       C       G       93      127                     dbsnp.100:rs2854676     477     7       30      0       37      -127                                    2       1.17    N
+   etc.
+
+- output::
+
+   chr1    41980   41981   G       1       1       76
+   chr1    51672   51673   C       1       1       53
+   chr1    52237   52238   G       1       7       63
+   chr1    53205   53206   C/G     2       7,30    93,127
+   etc.
+
+</help>
+</tool>
diff --git a/tools/phenotype_association/mergeSnps.pl b/tools/phenotype_association/mergeSnps.pl
new file mode 100755
index 0000000..c51c273
--- /dev/null
+++ b/tools/phenotype_association/mergeSnps.pl
@@ -0,0 +1,57 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+
+#this merges the significance output with the SNPs so users get more than an index
+
+my($out, $snp) = @ARGV;
+
+if (!$out or !$snp) { die "missing args\n"; }
+
+#merge SNP data with results
+merge();
+
+exit;
+
+########################################
+
+#merge the input and output files so have SNP data with result
+sub merge {
+   open(FH, $out) or die "Couldn't open $out, $!\n";
+   my %res;
+   my @ind;
+   while (<FH>) {
+      chomp;
+      my $line = $_;
+      #0:      10 score= 14.224153 , df= 2 , p= 0.040760 , N=50
+      if ($line =~ /^(\d+):\s+(.*)/) { $res{$1} = $2; push(@ind, $1); }
+   }
+   close FH;
+   if (!@ind) { return; } #no results, leave alone
+   @ind = sort { $a <=> $b } @ind;
+   #read input file to get SNP data
+   open(FH, $snp) or die "Couldn't open $snp, $!\n";
+   my $i = 0; #0 based, not counting ID line
+   my $c = shift @ind;
+   while (<FH>) {
+      chomp; 
+      if (/^ID/) { next; }
+      my @f = split(/\s+/);
+      if ($i == $c) { 
+         $res{$i} = "$f[0]\t$f[1]\t$f[2]\t$res{$i}";
+         if (!@ind) { last; }
+         $c = shift @ind;
+      }
+      $i++;      
+   }
+   close FH;
+   #now reprint results with SNP data included
+   open(FH, ">", $out) or die "Couldn't write to $out, $!\n";
+   print FH "ID\tchr\tposition\tresults\n";
+   foreach $i (keys %res) {
+      print FH $res{$i}, "\n";
+   }
+   close FH;
+}
+
diff --git a/tools/phenotype_association/pagetag.py b/tools/phenotype_association/pagetag.py
new file mode 100755
index 0000000..fd52179
--- /dev/null
+++ b/tools/phenotype_association/pagetag.py
@@ -0,0 +1,313 @@
+#!/usr/bin/env python
+"""
+This accepts as input a file of the following format:
+
+    Site   Sample   Allele1   Allele2
+
+for example:
+
+    000834   D001    G       G
+    000834   D002    G       G
+    000834   D003    G       G
+    000834   D004    G       G
+    000834   D005    N       N
+    000834   E001    G       G
+    000834   E002    G       G
+    000834   E003    G       G
+    000834   E004    G       G
+    000834   E005    G       G
+    000963   D001    T       T
+    000963   D002    T       T
+    000963   D003    T       T
+    000963   D004    T       T
+    000963   D005    N       N
+    000963   E001    T       T
+    000963   E002    N       N
+    000963   E003    G       T
+    000963   E004    G       G
+    000963   E005    G       T
+
+and a rsquare threshold and outputs two files:
+
+a) a file of input snps (one on each line). A SNP is identified by the "Site"
+column in the input file
+
+b) a file where each line has the following:
+    SNP     list
+where SNP is one of  the SNPs and the "list" is a comma separated list of SNPs
+that exceed the rsquare threshold with the first SNP.
+"""
+from __future__ import print_function
+
+from getopt import getopt, GetoptError
+from sys import argv, exit, stderr
+
+__author__ = "Aakrosh Ratan"
+__email__ = "ratan at bx.psu.edu"
+
+# do we want the debug information to be printed?
+debug_flag = False
+
+# denote different combos of alleles in code
+HOMC = str(1)
+HOMR = str(2)
+HETE = str(3)
+OTHER = str(4)
+
+indexcalculator = {(HOMC, HOMC): 0,
+                   (HOMC, HOMR): 1,
+                   (HOMC, HETE): 2,
+                   (HOMR, HOMC): 3,
+                   (HOMR, HOMR): 4,
+                   (HOMR, HETE): 5,
+                   (HETE, HOMC): 6,
+                   (HETE, HOMR): 7,
+                   (HETE, HETE): 8}
+
+
+def read_inputfile(filename, samples):
+    input = {}
+
+    file = open(filename, "r")
+
+    for line in file:
+        position, sample, allele1, allele2 = line.split()
+
+        # if the user specified a list of samples, then only use those samples
+        if samples is not None and sample not in samples:
+            continue
+
+        if position in input:
+            v = input[position]
+            v[sample] = (allele1, allele2)
+        else:
+            v = {sample: (allele1, allele2)}
+            input[position] = v
+
+    file.close()
+    return input
+
+
+def annotate_locus(input, minorallelefrequency, snpsfile):
+    locus = {}
+    for k, v in input.items():
+        genotypes = v.values()
+        alleles = [y for x in genotypes for y in x]
+        alleleset = list(set(alleles))
+        alleleset = list(set(alleles) - set(["N", "X"]))
+
+        if len(alleleset) == 2:
+            genotypevec = ""
+            num1 = len([x for x in alleles if x == alleleset[0]])
+            num2 = len([x for x in alleles if x == alleleset[1]])
+
+            if num1 > num2:
+                major = alleleset[0]
+                minor = alleleset[1]
+                minorfreq = (num2 * 1.0) / (num1 + num2)
+            else:
+                major = alleleset[1]
+                minor = alleleset[0]
+                minorfreq = (num1 * 1.0) / (num1 + num2)
+
+            if minorfreq < minorallelefrequency:
+                continue
+
+            for gen in genotypes:
+                if gen == (major, major):
+                    genotypevec += HOMC
+                elif gen == (minor, minor):
+                    genotypevec += HOMR
+                elif gen == (major, minor) or gen == (minor, major):
+                    genotypevec += HETE
+                else:
+                    genotypevec += OTHER
+
+            locus[k] = genotypevec, minorfreq
+        elif len(alleleset) > 2:
+            print(k, file=snpsfile)
+    return locus
+
+
+def calculateLD(loci, rsqthreshold):
+    snps = list(loci)
+    rsquare = {}
+
+    for index, loc1 in enumerate(snps):
+        for loc2 in snps[index + 1:]:
+            matrix = [0] * 9
+
+            vec1 = loci[loc1][0]
+            vec2 = loci[loc2][0]
+
+            for gen in zip(vec1, vec2):
+                if gen[0] == OTHER or gen[1] == OTHER:
+                    continue
+                matrix[indexcalculator[gen]] += 1
+
+            n = sum(matrix)
+            x11 = 2 * matrix[0] + matrix[2] + matrix[6]
+            x12 = 2 * matrix[1] + matrix[2] + matrix[7]
+            x21 = 2 * matrix[3] + matrix[6] + matrix[5]
+
+            p = (x11 + x12 + matrix[8] * 1.0) / (2 * n)
+            q = (x11 + x21 + matrix[8] * 1.0) / (2 * n)
+            p11 = p * q
+
+            oldp11 = p11
+            range = 0.0
+            converged = False
+            convergentcounter = 0
+            if p11 > 0.0:
+                while converged is False and convergentcounter < 100:
+                    if (1.0 - p - q + p11) != 0.0 and oldp11 != 0.0:
+                        num = matrix[8] * p11 * (1.0 - p - q + p11)
+                        den = p11 * (1.0 - p - q + p11) + (p - p11) * (q - p11)
+                        p11 = (x11 + (num / den)) / (2.0 * n)
+                        range = p11 / oldp11
+                        if range >= 0.9999 and range <= 1.001:
+                            converged = True
+                        oldp11 = p11
+                        convergentcounter += 1
+                    else:
+                        converged = True
+
+            dvalue = 0.0
+            if converged is True:
+                dvalue = p11 - (p * q)
+
+            if dvalue != 0.0:
+                rsq = (dvalue ** 2) / (p * q * (1 - p) * (1 - q))
+                if rsq >= rsqthreshold:
+                    rsquare["%s %s" % (loc1, loc2)] = rsq
+
+    return rsquare
+
+
+def main(inputfile, snpsfile, neigborhoodfile,
+         rsquare, minorallelefrequency, samples):
+    # read the input file
+    input = read_inputfile(inputfile, samples)
+    print("Read %d locations" % len(input), file=stderr)
+
+    # open the snpsfile to print
+    file = open(snpsfile, "w")
+
+    # annotate the inputs, remove the abnormal loci (which do not have 2 alleles
+    # and add the major and minor allele to each loci
+    loci = annotate_locus(input, minorallelefrequency, file)
+    print("Read %d interesting locations" % len(loci), file=stderr)
+
+    # print all the interesting loci as candidate snps
+    for k in loci.keys():
+        print(k, file=file)
+    file.close()
+    print("Finished creating the snpsfile", file=stderr)
+
+    # calculate the LD values and store it if it exceeds the threshold
+    lds = calculateLD(loci, rsquare)
+    print("Calculated all the LD values", file=stderr)
+
+    # create a list of SNPs
+    snps = {}
+    ldvals = {}
+    for k, v in lds.items():
+        s1, s2 = k.split()
+        if s1 in snps:
+            snps[s1].append(s2)
+        else:
+            snps[s1] = [s2]
+        if s2 in snps:
+            snps[s2].append(s1)
+        else:
+            snps[s2] = [s1]
+
+        if s1 in ldvals:
+            ldvals[s1].append(str(v))
+        else:
+            ldvals[s1] = [str(v)]
+        if s2 in ldvals:
+            ldvals[s2].append(str(v))
+        else:
+            ldvals[s2] = [str(v)]
+
+    # print the snps to the output file
+    file = open(neigborhoodfile, "w")
+
+    for k, v in snps.items():
+        ldv = ldvals[k]
+        if debug_flag is True:
+            print("%s\t%s\t%s" % (k, ",".join(v), ",".join(ldv)), file=file)
+        else:
+            print("%s\t%s" % (k, ",".join(v)), file=file)
+
+    file.close()
+
+
+def read_list(filename):
+    file = open(filename, "r")
+    list = {}
+
+    for line in file:
+        list[line.strip()] = 1
+
+    file.close()
+    return list
+
+
+def usage():
+    f = stderr
+    print("usage:", file=f)
+    print("pagetag [options] input.txt snps.txt neighborhood.txt", file=f)
+    print("where input.txt is the prettybase file", file=f)
+    print("where snps.txt is the first output file with the snps", file=f)
+    print("where neighborhood.txt is the output neighborhood file", file=f)
+    print("where the options are:", file=f)
+    print("-h,--help : print usage and quit", file=f)
+    print("-d,--debug: print debug information", file=f)
+    print("-r,--rsquare: the rsquare threshold (default : 0.64)", file=f)
+    print("-f,--freq : the minimum MAF required (default: 0.0)", file=f)
+    print("-s,--sample : a list of samples to be clustered", file=f)
+
+
+if __name__ == "__main__":
+    try:
+        opts, args = getopt(argv[1:], "hds:r:f:",
+                            ["help", "debug", "rsquare=", "freq=", "sample="])
+    except GetoptError as err:
+        print(str(err))
+        usage()
+        exit(2)
+
+    rsquare = 0.64
+    minorallelefrequency = 0.0
+    samples = None
+
+    for o, a in opts:
+        if o in ("-h", "--help"):
+            usage()
+            exit()
+        elif o in ("-d", "--debug"):
+            debug_flag = True
+        elif o in ("-r", "--rsquare"):
+            rsquare = float(a)
+        elif o in ("-f", "--freq"):
+            minorallelefrequency = float(a)
+        elif o in ("-s", "--sample"):
+            samples = read_list(a)
+        else:
+            assert False, "unhandled option"
+
+    if rsquare < 0.00 or rsquare > 1.00:
+        print("input value of rsquare should be in [0.00, 1.00]", file=stderr)
+        exit(3)
+
+    if minorallelefrequency < 0.0 or minorallelefrequency > 0.5:
+        print("input value of MAF should be (0.00,0.50]", file=stderr)
+        exit(4)
+
+    if len(args) != 3:
+        usage()
+        exit(5)
+
+    main(args[0], args[1], args[2], rsquare, minorallelefrequency, samples)
diff --git a/tools/phenotype_association/pass.xml b/tools/phenotype_association/pass.xml
new file mode 100644
index 0000000..0095393
--- /dev/null
+++ b/tools/phenotype_association/pass.xml
@@ -0,0 +1,130 @@
+<tool id="hgv_pass" name="PASS" version="1.0.0">
+  <description>significant transcription factor binding sites from ChIP data</description>
+
+  <command interpreter="bash">
+    pass_wrapper.sh "$input" "$min_window" "$max_window" "$false_num" "$output"
+  </command>
+
+  <inputs>
+    <param format="gff" name="input" type="data" label="Dataset"/>
+    <param name="min_window" label="Smallest window size (by # of probes)" type="integer" value="2" />
+    <param name="max_window" label="Largest window size (by # of probes)" type="integer" value="6" />
+    <param name="false_num" label="Expected total number of false positive intervals to be called" type="float" value="5.0" help="N.B.: this is a <em>count</em>, not a rate." />
+  </inputs>
+
+  <outputs>
+    <data format="tabular" name="output" />
+  </outputs>
+
+  <requirements>
+    <requirement type="package">pass</requirement>
+    <requirement type="binary">sed</requirement>
+  </requirements>
+
+  <!-- we need to be able to set the seed for the random number generator
+  <tests>
+    <test>
+      <param name="input" ftype="gff" value="pass_input.gff"/>
+      <param name="min_window" value="2"/>
+      <param name="max_window" value="6"/>
+      <param name="false_num" value="5"/>
+      <output name="output" file="pass_output.tab"/>
+    </test>
+  </tests>
+  -->
+
+  <help>
+**Dataset formats**
+
+The input is in GFF_ format, and the output is tabular_.
+(`Dataset missing?`_)
+
+.. _GFF: ${static_path}/formatHelp.html#gff
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+PASS (Poisson Approximation for Statistical Significance) detects
+significant transcription factor binding sites in the genome from
+ChIP data.  This is probably the only peak-calling method that
+accurately controls the false-positive rate and FDR in ChIP data,
+which is important given the huge discrepancy in results obtained
+from different peak-calling algorithms.  At the same time, this
+method achieves a similar or better power than previous methods.
+
+<!-- we don't have wrapper support for the "prior" file yet
+Another unique feature of this method is that it allows varying
+thresholds to be used for peak calling at different genomic
+locations.  For example, if a position lies in an open chromatin
+region, is depleted of nucleosome positioning, or a co-binding
+protein has been detected within the neighborhood, then the position
+is more likely to be bound by the target protein of interest, and
+hence a lower threshold will be used to call significant peaks.
+As a result, weak but real binding sites can be detected.
+-->
+
+-----
+
+**Hints**
+
+- ChIP-Seq data:
+
+  If the data is from ChIP-Seq, you need to convert the ChIP-Seq values
+  into z-scores before using this program.  It is also recommended that
+  you group read counts within a neighborhood together, e.g. in tiled
+  windows of 30bp.  In this way, the ChIP-Seq data will resemble
+  ChIP-chip data in format.
+
+- Choosing window size options:
+
+  The window size is related to the probe tiling density.  For example,
+  if the probes are tiled at every 100bp, then setting the smallest
+  window = 2 and largest window = 6 is appropriate, because the DNA
+  fragment size is around 300-500bp.
+
+-----
+
+**Example**
+
+- input file::
+
+    chr7  Nimblegen  ID  40307603  40307652  1.668944     .  .  .
+    chr7  Nimblegen  ID  40307703  40307752  0.8041307    .  .  .
+    chr7  Nimblegen  ID  40307808  40307865  -1.089931    .  .  .
+    chr7  Nimblegen  ID  40307920  40307969  1.055044     .  .  .
+    chr7  Nimblegen  ID  40308005  40308068  2.447853     .  .  .
+    chr7  Nimblegen  ID  40308125  40308174  0.1638694    .  .  .
+    chr7  Nimblegen  ID  40308223  40308275  -0.04796628  .  .  .
+    chr7  Nimblegen  ID  40308318  40308367  0.9335709    .  .  .
+    chr7  Nimblegen  ID  40308526  40308584  0.5143972    .  .  .
+    chr7  Nimblegen  ID  40308611  40308660  -1.089931    .  .  .
+    etc.
+
+  In GFF, a value of dot '.' is used to mean "not applicable".
+
+- output file::
+
+    ID  Chr   Start     End       WinSz  PeakValue  # of FPs  FDR
+    1   chr7  40310931  40311266  4      1.663446   0.248817  0.248817
+
+-----
+
+**References**
+
+Zhang Y. (2008)
+Poisson approximation for significance in genome-wide ChIP-chip tiling arrays.
+Bioinformatics. 24(24):2825-31. Epub 2008 Oct 25.
+
+Chen KB, Zhang Y. (2010)
+A varying threshold method for ChIP peak calling using multiple sources of information.
+Submitted.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1093/bioinformatics/btn549</citation>
+    <citation type="doi">10.1093/bioinformatics/btq379</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/pass_wrapper.sh b/tools/phenotype_association/pass_wrapper.sh
new file mode 100755
index 0000000..fff42d0
--- /dev/null
+++ b/tools/phenotype_association/pass_wrapper.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+export PATH=$PATH:$(dirname $0)
+
+input=$1
+min_window=$2
+max_window=$3
+false_num=$4
+output=$5
+
+pass "$input" "$min_window" "$max_window" "$false_num" "$output" >/dev/null
+sed -i -e 's/\t\t*/\t/g' "$output"
+
diff --git a/tools/phenotype_association/senatag.py b/tools/phenotype_association/senatag.py
new file mode 100755
index 0000000..61e4e39
--- /dev/null
+++ b/tools/phenotype_association/senatag.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+"""
+This tool takes the following file pairs as input:
+a) input_snp  : A file with identifiers for SNPs (one on each line)
+b) ldfile     : A file where each line has  the following
+                snp     list
+                where "snp" is an identifier for one SNP and the "list" is a
+                comma separated list of all the other snps that are in LD with
+                it (as per some threshold of rsquare)
+
+The output is a set of tag SNPs for the given datasets
+
+The algorithm is as follows:
+
+a) Construct a graph for each population, where each node is a SNP and two nodes
+are connected using an edge iff they are in LD.
+b) For each SNP, count the total number of connected nodes, which have not yet
+been visited.
+c) Find the SNP with the highest count and assign it to be a tag SNP.
+d) Mark that SNP and all the snps connected to it as "visited". This should be
+done for each population.
+e) Continue steps b-e until all SNPs, in all populations have been visited.
+"""
+from __future__ import print_function
+
+import heapq
+import os
+
+from getopt import getopt, GetoptError
+from sys import argv, exit, stderr
+
+__author__ = "Aakrosh Ratan"
+__email__ = "ratan at bx.psu.edu"
+
+# do we want the debug information to be printed?
+debug_flag = False
+
+
+class node:
+    def __init__(self, name):
+        self.name = name
+        self.edges = []
+        self.visited = False
+
+    # return the number of nodes connected to this node, that have yet to be
+    # visited
+    def num_not_visited(self):
+        num = 0
+        for n in self.edges:
+            if n.visited is False:
+                num += 1
+        return num
+
+    def __cmp__(self, other):
+        return other.num_not_visited() - self.num_not_visited()
+
+    def __str__(self):
+        return self.name
+
+
+class graph:
+    def __init__(self):
+        self.nodes = {}
+
+    def __str__(self):
+        string = ""
+        for n1 in self.nodes.values():
+            n2s = [x.name for x in n1.edges]
+            string += "%s %s\n" % (n1.name, ",".join(n2s))
+        return string[:-1]
+
+    def add_node(self, n):
+        self.nodes[n.name] = n
+
+    def add_edges(self, n1, n2):
+        assert n1.name in self.nodes
+        assert n2.name in self.nodes
+        n1.edges.append(n2)
+        n2.edges.append(n1)
+
+    def check_graph(self):
+        for n in self.nodes.values():
+            ms = [x for x in n.edges]
+            for m in ms:
+                if n not in m.edges:
+                    print("check : %s - %s" % (n, m), file=stderr)
+
+
+def construct_graph(ldfile, snpfile):
+    # construct the initial graph. add all the SNPs as nodes
+    g = graph()
+    file = open(snpfile, "r")
+
+    for line in file:
+        # ignore empty lines and add the remainder to the graph
+        if len(line.strip()) == 0:
+            continue
+        n = node(line.strip())
+        g.add_node(n)
+
+    file.close()
+    print("Added %d nodes to a graph" % len(g.nodes), file=stderr)
+
+    # now add all the edges
+    file = open(ldfile, "r")
+
+    for line in file:
+        tokens = line.split()
+        assert len(tokens) == 2
+
+        # if this node is in the graph, then we need to construct an edge from
+        # this node to all the nodes which are highly related to it
+        if tokens[0] in g.nodes:
+            n1 = g.nodes[tokens[0]]
+            n2s = [g.nodes[x] for x in tokens[1].split(",")]
+
+            for n2 in n2s:
+                g.add_edges(n1, n2)
+
+    file.close()
+    print("Added all edges to the graph", file=stderr)
+
+    return g
+
+
+def check_output(g, tagsnps):
+    # find all the nodes in the graph
+    allsnps = [x.name for x in g.nodes.values()]
+
+    # find the nodes that are covered by our tagsnps
+    mysnps = [x.name for x in tagsnps]
+
+    for n in tagsnps:
+        for m in n.edges:
+                mysnps.append(m.name)
+
+    mysnps = list(set(mysnps))
+
+    if set(allsnps) != set(mysnps):
+        diff = list(set(allsnps) - set(mysnps))
+        print("%s are not covered" % ",".join(diff), file=stderr)
+
+
+def main(ldfile, snpsfile, required, excluded):
+    # construct the graph
+    g = construct_graph(ldfile, snpsfile)
+    if debug_flag is True:
+        g.check_graph()
+
+    tagsnps = []
+    neighbors = {}
+
+    # take care of the SNPs that are required to be TagSNPs
+    for s in required:
+        t = g.nodes[s]
+
+        t.visited = True
+        ns = []
+
+        for n in t.edges:
+            if n.visited is False:
+                ns.append(n.name)
+            n.visited = True
+
+        tagsnps.append(t)
+        neighbors[t.name] = list(set(ns))
+
+    # find the tag SNPs for this graph
+    data = g.nodes.values()[:]
+    heapq.heapify(data)
+
+    while data:
+        s = heapq.heappop(data)
+
+        if s.visited is True or s.name in excluded:
+            continue
+
+        s.visited = True
+        ns = []
+
+        for n in s.edges:
+            if n.visited is False:
+                ns.append(n.name)
+            n.visited = True
+
+        tagsnps.append(s)
+        neighbors[s.name] = list(set(ns))
+
+        heapq.heapify(data)
+
+    for s in tagsnps:
+        if len(neighbors[s.name]) > 0:
+            print("%s\t%s" % (s, ",".join(neighbors[s.name])))
+            continue
+        print(s)
+
+    if debug_flag is True:
+        check_output(g, tagsnps)
+
+
+def read_list(filename):
+    assert os.path.exists(filename)
+    file = open(filename, "r")
+    list = {}
+
+    for line in file:
+        list[line.strip()] = 1
+
+    file.close()
+    return list
+
+
+def usage():
+    f = stderr
+    print("usage:", file=f)
+    print("senatag [options] neighborhood.txt inputsnps.txt", file=f)
+    print("where inputsnps.txt is a file of snps from one population", file=f)
+    print("where neighborhood.txt is neighborhood details for the pop.", file=f)
+    print("where the options are:", file=f)
+    print("-h,--help : print usage and quit", file=f)
+    print("-d,--debug: print debug information", file=f)
+    print("-e,--excluded : file with names of SNPs that cannot be TagSNPs", file=f)
+    print("-r,--required : file with names of SNPs that should be TagSNPs", file=f)
+
+
+if __name__ == "__main__":
+    try:
+        opts, args = getopt(argv[1:], "hdr:e:",
+                            ["help", "debug", "required=", "excluded="])
+    except GetoptError as err:
+        print(str(err))
+        usage()
+        exit(2)
+
+    required = {}
+    excluded = {}
+
+    for o, a in opts:
+        if o in ("-h", "--help"):
+            usage()
+            exit()
+        elif o in ("-d", "--debug"):
+            debug_flag = True
+        elif o in ("-r", "--required"):
+            required = read_list(a)
+        elif o in ("-e", "--excluded"):
+            excluded = read_list(a)
+        else:
+            assert False, "unhandled option"
+
+    if len(args) != 2:
+        usage()
+        exit(3)
+
+    assert os.path.exists(args[0])
+    assert os.path.exists(args[1])
+
+    main(args[0], args[1], required, excluded)
diff --git a/tools/phenotype_association/sift.xml b/tools/phenotype_association/sift.xml
new file mode 100644
index 0000000..3654c74
--- /dev/null
+++ b/tools/phenotype_association/sift.xml
@@ -0,0 +1,180 @@
+<tool id="hgv_sift" name="SIFT" version="1.0.0">
+  <description>predictions of functional sites</description>
+
+  <command interpreter="bash">
+    sift_variants_wrapper.sh "$input" "$output" "${input.metadata.dbkey}" "${GALAXY_DATA_INDEX_DIR}/sift_db.loc" "$chrom_col" "$pos_col" "$base" "$allele_col" "$strand_source.strand_col" "$comment_source.comment_col" "$output_opts"
+  </command>
+
+  <inputs>
+    <param name="input" type="data" format="tabular" label="Dataset">
+      <validator type="unspecified_build"/>
+      <validator type="dataset_metadata_in_file" filename="sift_db.loc" metadata_name="dbkey" metadata_column="0" message="Data is currently not available for the specified build."/>
+    </param>
+    <param name="chrom_col"  type="data_column" data_ref="input" label="Column with chromosome"/>
+    <param name="pos_col"    type="data_column" data_ref="input" numerical="true" label="Column with position"/>
+    <param name="base" type="select" label="Position coordinates are">
+      <option value="1" selected="true">one-based</option>
+      <option value="0">zero-based</option>
+    </param>
+    <param name="allele_col" type="data_column" data_ref="input" label="Column with allele"/>
+    <conditional name="strand_source">
+      <param name="strand_choice" type="select" label="Strand info">
+        <option value="data_column" selected="true">a column in the dataset</option>
+        <option value="all_pos">all on sense/forward/+ strand</option>
+        <option value="all_neg">all on antisense/reverse/- strand</option>
+      </param>
+      <when value="data_column">
+        <param name="strand_col" type="data_column" data_ref="input" label="Column with strand"/>
+      </when>
+      <when value="all_pos">
+        <param name="strand_col" type="hidden" value="+"/>
+      </when>
+      <when value="all_neg">
+        <param name="strand_col" type="hidden" value="-"/>
+      </when>
+    </conditional>
+    <conditional name="comment_source">
+      <param name="comment_choice" type="select" label="Include comment column">
+        <option value="no" selected="true">no</option>
+        <option value="yes">yes</option>
+      </param>
+      <when value="no">
+        <param name="comment_col" type="hidden" value="-"/>
+      </when>
+      <when value="yes">
+        <param name="comment_col" type="data_column" data_ref="input" label="Column with comment"/>
+      </when>
+    </conditional>
+    <param name="output_opts" type="select" multiple="true" display="checkboxes" label="Include the following additional fields in the output">
+      <option value="A">Ensembl Gene ID</option>
+      <option value="B">Gene Name</option>
+      <option value="C">Gene Description</option>
+      <option value="D">Ensembl Protein Family ID</option>
+      <option value="E">Ensembl Protein Family Description</option>
+      <option value="F">Ensembl Transcript Status (Known / Novel)</option>
+      <option value="G">Protein Family Size</option>
+      <option value="H">Ka/Ks (Human-mouse)</option>
+      <option value="I">Ka/Ks (Human-macaque)</option>
+      <option value="J">OMIM Disease</option>
+      <option value="K">Allele Frequencies (All Hapmap Populations - weighted average)</option>
+      <option value="L">Allele Frequencies (CEU Hapmap population)</option>
+    </param>
+  </inputs>
+
+  <outputs>
+    <data format="tabular" name="output" />
+  </outputs>
+
+  <requirements>
+    <requirement type="binary">awk</requirement>
+    <requirement type="binary">rm</requirement>
+    <requirement type="binary">sed</requirement>
+  </requirements>
+
+  <tests>
+    <test>
+      <param name="input" value="sift_variants.tab" ftype="tabular" dbkey="hg18"/>
+      <param name="chrom_col" value="1"/>
+      <param name="pos_col" value="3"/>
+      <param name="base" value="1"/>
+      <param name="allele_col" value="5"/>
+      <param name="strand_choice" value="data_column"/>
+      <param name="strand_col" value="4"/>
+      <param name="output_opts" value="A"/>
+      <output name="output" file="sift_variants_result.tab"/>
+    </test>
+  </tests>
+
+  <help>
+.. class:: warningmark
+
+This currently works only for builds hg18 or hg19.
+
+-----
+
+**Dataset formats**
+
+The input and output datasets are tabular_. 
+(`Dataset missing?`_)
+
+.. _tabular: ${static_path}/formatHelp.html#tab
+.. _Dataset missing?: ${static_path}/formatHelp.html
+
+-----
+
+**What it does**
+
+SIFT predicts whether an amino-acid substitution affects protein function,
+based on sequence homology and the physical properties of amino acids.
+SIFT can be applied to naturally occurring non-synonymous polymorphisms
+and laboratory-induced missense mutations.  This tool uses SQLite databases
+containing pre-computed SIFT scores and annotations for all possible nucleotide
+substitutions at each position in the human exome.  Allele frequency data
+are from the HapMap frequency database, and additional transcript and 
+gene-level data are from Ensembl BioMart.
+
+The input dataset must contain columns for the chromosome, position, and
+alleles.  The alleles must be two nucleotides separated by '/',
+usually the reference allele and the allele of interest.
+The strand must either be in another column or all the same.
+The output contains a standard set of columns plus the additional ones that
+have been selected from the list above.
+
+Website: http://sift.jcvi.org/
+
+-----
+
+**Example**
+
+- input file::
+
+    chr3   81780820   +  T/C
+    chr2   230341630  +  G/A
+    chr2   43881517   +  A/T
+    chr2   43857514   +  T/C
+    chr6   88375602   +  G/A
+    chr22  29307353   -  T/A
+    chr10  115912482  -  G/T
+    chr10  115900918  -  C/T
+    chr16  69875502   +  G/T
+    etc.
+
+- output file::
+
+    #Chrom  Position   Strand  Allele  Codons   Transcript ID    Protein ID       Substitution  Region    dbSNP ID      SNP Type       Prediction  Score  Median Info  Num seqs at position  User Comment
+    chr3    81780820   +       T/C     AGA-gGA  ENST00000264326  ENSP00000264326  R190G         EXON CDS  rs2229519:C   Nonsynonymous  DAMAGING    0.04   3.06         149
+    chr2    230341630  +       G/T     -        ENST00000389045  ENSP00000373697  NA            EXON CDS  rs1803846:A   Unknown        Not scored  NA     NA           NA
+    chr2    43881517   +       A/T     ATA-tTA  ENST00000260605  ENSP00000260605  I230L         EXON CDS  rs11556157:T  Nonsynonymous  TOLERATED   0.47   3.19         7
+    chr2    43857514   +       T/C     TTT-TcT  ENST00000260605  ENSP00000260605  F33S          EXON CDS  rs2288709:C   Nonsynonymous  TOLERATED   0.61   3.33         6
+    chr6    88375602   +       G/A     GTT-aTT  ENST00000257789  ENSP00000257789  V217I         EXON CDS  rs2307389:A   Nonsynonymous  TOLERATED   0.75   3.17         13
+    chr22   29307353   +       T/A     ACC-tCC  ENST00000335214  ENSP00000334612  T264S         EXON CDS  rs42942:A     Nonsynonymous  TOLERATED   0.4    3.14         23
+    chr10   115912482  +       C/A     CGA-CtA  ENST00000369285  ENSP00000358291  R179L         EXON CDS  rs12782946:T  Nonsynonymous  TOLERATED   0.06   4.32         2
+    chr10   115900918  +       G/A     CAA-tAA  ENST00000369287  ENSP00000358293  Q271*         EXON CDS  rs7095762:T   Nonsynonymous  N/A         N/A    N/A          N/A
+    chr16   69875502   +       G/T     ACA-AaA  ENST00000338099  ENSP00000337512  T608K         EXON CDS  rs3096381:T   Nonsynonymous  TOLERATED   0.12   3.41         3
+    etc.
+
+-----
+
+**References**
+
+Ng PC, Henikoff S. (2001) Predicting deleterious amino acid substitutions.
+Genome Res. 11(5):863-74.
+
+Ng PC, Henikoff S. (2002) Accounting for human polymorphisms predicted to affect protein function.
+Genome Res. 12(3):436-46.
+
+Ng PC, Henikoff S. (2003) SIFT: Predicting amino acid changes that affect protein function.
+Nucleic Acids Res. 31(13):3812-4.
+
+Kumar P, Henikoff S, Ng PC. (2009) Predicting the effects of coding non-synonymous variants
+on protein function using the SIFT algorithm.
+Nat Protoc. 4(7):1073-81. Epub 2009 Jun 25.
+
+  </help>
+  <citations>
+    <citation type="doi">10.1101/gr.176601</citation>
+    <citation type="doi">10.1101/gr.212802</citation>
+    <citation type="doi">10.1093/nar/gkg509</citation>
+    <citation type="doi">10.1038/nprot.2009.86</citation>
+  </citations>
+</tool>
diff --git a/tools/phenotype_association/sift_variants_wrapper.sh b/tools/phenotype_association/sift_variants_wrapper.sh
new file mode 100755
index 0000000..f08e7e0
--- /dev/null
+++ b/tools/phenotype_association/sift_variants_wrapper.sh
@@ -0,0 +1,184 @@
+#!/usr/bin/env bash
+
+input_file=$1
+output_file=$2
+org=$3
+db_loc=$4
+chrom_col=$5
+pos_col=$6
+base=$7
+allele_col=$8
+strand_col=$9
+comment_col=${10}
+output_opts=${11}
+
+working_dir=$PWD
+sift_input="$working_dir/sift_input.txt"
+sift_output="$working_dir/sift_output.txt"
+
+################################################################################
+## make sure input file column selections are mutually exclusive              ##
+################################################################################
+ERROR=0
+declare -a col_use
+
+function check_col () {
+    local col=$1
+    local use=$2
+    local int=$3
+
+    if [ -n "${col//[0-9]}" ]; then
+        if [ $int -eq 1 ]; then
+            echo "ERROR: invalid value for $use column: $col" 1>&2
+            ERROR=1
+        fi
+        return
+    fi
+
+    local cur=${col_use[$col]}
+    if [ -n "$cur" ]; then
+        echo "ERROR: $use column is the same as $cur column" 1>&2
+        col_use[$col]="${cur},$use"
+        ERROR=1
+    else
+        col_use[$col]=$use
+    fi
+}
+
+check_col $chrom_col   'chromosome' 1
+check_col $pos_col     'position'   1
+check_col $allele_col  'allele'     1
+check_col $strand_col  'strand'     0
+check_col $comment_col 'comment'    0
+
+if [ $ERROR -ne 0 ]; then
+    exit 1
+fi
+
+################################################################################
+## get/check the db directory from the argument org,db_loc                    ##
+################################################################################
+db_dir=$( awk '$1 == org { print $2 }' org=$org $db_loc )
+
+if [ -z "$db_dir" ]; then
+    echo "Can't find dbkey \"$org\" in loc file \"$db_loc\"" 1>&2
+    exit 1
+fi
+
+if [ ! -d "$db_dir" ]; then
+    echo "Can't access SIFT database directory \"$db_dir\"" 1>&2
+    exit 1
+fi
+
+################################################################################
+## create input file for SIFT_exome_nssnvs.pl                                 ##
+################################################################################
+if [ ! -r "$input_file" ]; then
+    echo "Can't read input file \"$input_file\"" 1>&2
+    exit 1
+fi
+
+if [ $base -eq 0 ]; then
+    beg_col="$pos_col"
+    end_col="$pos_col + 1"
+    pos_adj='$2 = $2 - 1'
+else
+    beg_col="$pos_col - 1"
+    end_col="$pos_col"
+    pos_adj=''
+fi
+
+strand_cvt=''
+if [ \( "$strand_col" = "+" \) ]; then
+    strand='"1"'
+elif [ \( "$strand_col" = "-" \) ]; then
+    strand='"-1"'
+else
+    strand="\$$strand_col"
+    strand_cvt='if ('"${strand}"' == "+") {'"${strand}"' = "1"} else if ('"${strand}"' == "-") {'"${strand}"' = "-1"}'
+fi
+
+print_row='print $'"${chrom_col}"', $'"${beg_col}"', $'"${end_col}"', '"${strand}"', $'"${allele_col}"''
+if [ "$comment_col" != "-" ]; then
+    print_row=''"${print_row}"', $'"${comment_col}"''
+fi
+
+awk '
+BEGIN {FS="\t";OFS=","}
+$'"${chrom_col}"' ~ /^[cC][hH][rR]/ {$'"${chrom_col}"' = substr($'"${chrom_col}"',4)}
+{
+    '"${strand_cvt}"'
+    '"${print_row}"'
+}
+' "$input_file" > "$sift_input"
+
+################################################################################
+## run SIFT_exome_nssnvs.pl command line program                              ##
+################################################################################
+if [ "$output_opts" = "None" ]; then
+    output_opts=""
+else
+    output_opts=$( echo "$output_opts" | sed -e 's/,/ 1 -/g' )
+    output_opts="-$output_opts 1"
+fi
+
+SIFT_exome_nssnvs.pl -i "$sift_input" -d "$db_dir" -o "$working_dir" $output_opts &> "$sift_output"
+if [ $? -ne 0 ]; then
+    echo "failed: SIFT_exome_nssnvs.pl -i \"$sift_input\" -d \"$db_dir\" -o \"$working_dir\" $output_opts"
+    exit 1
+fi
+
+################################################################################
+## locate the SIFT_exome_nssnvs.pl output file                                ##
+################################################################################
+sift_pid=$( sed -n -e 's/^.*Your job id is \([0-9][0-9]*\) and is currently running.*$/\1/p' "$sift_output" )
+
+if [ -z "$sift_pid" ]; then
+    echo "Can't find SIFT pid in \"$sift_output\"" 1>&2
+    exit 1
+fi
+
+sift_outdir="$working_dir/$sift_pid"
+if [ ! -d "$sift_outdir" ]; then
+    echo "Can't access SIFT output directory \"$sift_outdir\"" 1>&2
+    exit 1
+fi
+
+sift_outfile="$sift_outdir/${sift_pid}_predictions.tsv"
+if [ ! -r "$sift_outfile" ]; then
+    echo "Can't access SIFT output file \"$sift_outfile\"" 1>&2
+    exit 1
+fi
+
+################################################################################
+## create galaxy output file                                                  ##
+################################################################################
+awk '
+BEGIN {FS="\t";OFS="\t"}
+NR == 1 {
+    $12 = "Num seqs at position"
+    $1 = "Chrom\tPosition\tStrand\tAllele"
+    print
+}
+NR != 1 {
+    $1 = "chr" $1
+    gsub(/,/, "\t", $1)
+    print
+}
+' "$sift_outfile" | awk '
+BEGIN {FS="\t";OFS="\t"}
+NR == 1 {
+    print "#" $0
+}
+NR != 1 {
+    if ($3 == "1") {$3 = "+"} else if ($3 == "-1") {$3 = "-"}
+    '"${pos_adj}"'
+    print
+}
+' > "$output_file"
+
+################################################################################
+## cleanup                                                                    ##
+################################################################################
+rm -rf "$sift_outdir" "$sift_input" "$sift_output"
+
diff --git a/tools/phenotype_association/vcf2pgSnpMult.pl b/tools/phenotype_association/vcf2pgSnpMult.pl
new file mode 100755
index 0000000..09e0833
--- /dev/null
+++ b/tools/phenotype_association/vcf2pgSnpMult.pl
@@ -0,0 +1,81 @@
+#!/usr/bin/perl -w
+use strict;
+
+#convert from a vcf file to a pgSnp file with multiple sets of the allele
+# specific columns
+#frequency count = chromosome count
+
+my $in;
+my $stCol = 9;
+my $endCol;
+if (@ARGV && scalar @ARGV == 1) {
+   $in = shift @ARGV;
+}else {
+   print "usage: vcf2pgSnpMult.pl file.vcf > file.pgSnpMult\n";
+   exit;
+}
+
+if ($in =~ /.gz$/) {
+   open(FH, "zcat $in |") or die "Couldn't open $in, $!\n";
+}else {
+   open(FH, $in) or die "Couldn't open $in, $!\n";
+}
+while (<FH>) {
+   chomp; 
+   if (/^\s*#/) { next; } #skip comments/headers
+   if (/^\s*$/) { next; } #skip blank lines
+   my @f = split(/\t/);
+   #chr pos1base ID refNt altNt[,|D#|Int] quality filter info format geno1 ...
+   my $a;
+   my %nt;
+   my %all;
+   my $cnt = 0;
+   my $var;
+   if ($f[3] eq 'N') { next; } #ignore ref=N
+   if ($f[4] =~ /[DI]/ or $f[3] =~ /[DI]/) { next; } #don't do microsatellite
+   if ($f[6] && !($f[6] eq '.' or $f[6] eq 'PASS')) { next; } #filtered for some reason
+   my $ind = 0;
+   if ($f[8] ne 'GT') { #more than just genotype
+      my @t = split(/:/, $f[8]);
+      foreach (@t) { if ($_ eq 'GT') { last; } $ind++; }
+      if ($ind == 0 && $f[8] !~ /^GT/) { die "ERROR couldn't find genotype in format $f[8]\n"; }
+   }
+   if (!$endCol) { $endCol = $#f; }
+   #put f[3] => nt{0} and split f[4] for rest of nt{}
+   $nt{0} = $f[3];
+   my @t = split(/,/, $f[4]);
+   for (my $i=0; $i<=$#t; $i++) {
+      my $j = $i + 1;
+      $nt{$j} = $t[$i];
+   }
+   if ($f[0] !~ /chr/) { $f[0] = "chr$f[0]"; }
+   print "$f[0]\t", ($f[1]-1), "\t$f[1]"; #position info
+   foreach my $col ($stCol .. $endCol) {  #add each individual (4 columns)
+      if ($ind > 0) { 
+         my @t = split(/:/, $f[$col]);
+         $f[$col] = $t[$ind] . ":"; #only keep genotype part
+      }
+      print "\t";
+      if ($f[$col] =~ /^(\d).(\d)/) {
+          my $a1 = $1;
+          my $a2 = $2;
+          if (!exists $nt{$a1}) { die "ERROR bad allele $a1 in $f[3] $f[4]\n"; }
+          if (!exists $nt{$a2}) { die "ERROR bad allele $a2 in $f[3] $f[4]\n"; }
+          if ($a1 eq $a2) { #homozygous
+             print "$nt{$a1}\t1\t2\t0"; 
+          }else { #heterozygous
+             print "$nt{$a1}/$nt{$a2}\t2\t1,1\t0,0";
+          } 
+      }elsif ($f[$col] =~ /^(\d):/) { #chrY or male chrX, single
+          my $a1 = $1;
+          if (!exists $nt{$a1}) { die "ERROR bad allele $a1 in $f[3] $f[4]\n"; }
+          print "$nt{$a1}\t1\t1\t0";
+      }else { #don't know how to parse
+          die "ERROR unknown genotype $f[$col]\n";
+      }
+   }
+   print "\n"; #end this SNP
+}
+close FH;
+
+exit;
diff --git a/tools/plotting/bar_chart.py b/tools/plotting/bar_chart.py
new file mode 100644
index 0000000..dd64535
--- /dev/null
+++ b/tools/plotting/bar_chart.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+"""
+histogram_gnuplot.py <datafile> <xtic column> <column_list> <title> <ylabel> <yrange_min> <yrange_max> <grath_file>
+a generic histogram builder based on gnuplot backend
+
+   data_file    - tab delimited file with data
+   xtic_column  - column containing labels for x ticks [integer, 0 means no ticks]
+   column_list  - comma separated list of columns to plot
+   title        - title for the entire histrogram
+   ylabel       - y axis label
+   yrange_max   - minimal value at the y axis (integer)
+   yrange_max   - maximal value at the y_axis (integer)
+                  to set yrange to autoscaling assign 0 to yrange_min and yrange_max
+   graph_file   - file to write histogram image to
+   img_size     - as X,Y pair in pixels (e.g., 800,600 or 600,800 etc.)
+
+
+   This tool required gnuplot and gnuplot.py
+
+anton nekrutenko | anton at bx.psu.edu
+"""
+
+import string
+import sys
+import tempfile
+
+import Gnuplot
+import Gnuplot.funcutils
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err(msg):
+    sys.stderr.write(msg)
+    sys.exit()
+
+
+def main(tmpFileName):
+    skipped_lines_count = 0
+    skipped_lines_index = []
+    gf = open(tmpFileName, 'w')
+
+    try:
+        in_file = open( sys.argv[1], 'r' )
+        xtic = int( sys.argv[2] )
+        col_list = string.split( sys.argv[3], "," )
+        title = 'set title "' + sys.argv[4] + '"'
+        ylabel = 'set ylabel "' + sys.argv[5] + '"'
+        ymin = sys.argv[6]
+        ymax = sys.argv[7]
+        img_file = sys.argv[8]
+        img_size = sys.argv[9]
+    except:
+        stop_err("Check arguments\n")
+
+    try:
+        int( col_list[0] )
+    except:
+        stop_err('You forgot to set columns for plotting\n')
+
+    for i, line in enumerate( in_file ):
+        valid = True
+        line = line.rstrip('\r\n')
+        if line and not line.startswith( '#' ):
+            row = []
+            try:
+                fields = line.split( '\t' )
+                for col in col_list:
+                    row.append( str( float( fields[int( col ) - 1] ) ) )
+            except:
+                valid = False
+                skipped_lines_count += 1
+                skipped_lines_index.append(i)
+        else:
+            valid = False
+            skipped_lines_count += 1
+            skipped_lines_index.append(i)
+
+        if valid and xtic > 0:
+            row.append( fields[xtic - 1] )
+        elif valid and xtic == 0:
+            row.append( str( i ) )
+
+        if valid:
+            gf.write( '\t'.join( row ) )
+            gf.write( '\n' )
+
+    if skipped_lines_count < i:
+        # Prepare 'using' clause of plot statement
+        g_plot_command = ' '
+
+        # Set the first column
+        if xtic > 0:
+            g_plot_command = "'%s' using 1:xticlabels(%s) ti 'Column %s', " % ( tmpFileName, str( len( row ) ), col_list[0] )
+        else:
+            g_plot_command = "'%s' using 1 ti 'Column %s', " % ( tmpFileName, col_list[0] )
+
+        # Set subsequent columns
+        for i in range(1, len(col_list)):
+            g_plot_command += "'%s' using %s t 'Column %s', " % ( tmpFileName, str(i + 1), col_list[i] )
+
+        g_plot_command = g_plot_command.rstrip( ', ' )
+
+        yrange = 'set yrange [' + ymin + ":" + ymax + ']'
+
+        try:
+            g = Gnuplot.Gnuplot()
+            g('reset')
+            g('set boxwidth 0.9 absolute')
+            g('set style fill  solid 1.00 border -1')
+            g('set style histogram clustered gap 5 title  offset character 0, 0, 0')
+            g('set xtics border in scale 1,0.5 nomirror rotate by 90 offset character 0, 0, 0')
+            g('set key invert reverse Left outside')
+            if xtic == 0:
+                g('unset xtics')
+            g(title)
+            g(ylabel)
+            g_term = 'set terminal png tiny size ' + img_size
+            g(g_term)
+            g_out = 'set output "' + img_file + '"'
+            if ymin != ymax:
+                g(yrange)
+            g(g_out)
+            g('set style data histograms')
+            g.plot(g_plot_command)
+        except:
+            stop_err("Gnuplot error: Data cannot be plotted")
+    else:
+        sys.stderr.write('Column(s) %s of your dataset do not contain valid numeric data' % sys.argv[3])
+
+    if skipped_lines_count > 0:
+        sys.stdout.write('\nWARNING. You dataset contain(s) %d invalid lines starting with line #%d.  These lines were skipped while building the graph.\n' % ( skipped_lines_count, skipped_lines_index[0] + 1 ) )
+
+
+if __name__ == "__main__":
+    # The tempfile initialization is here because while inside the main() it seems to create a condition
+    # when the file is removed before gnuplot has a chance of accessing it
+    gp_data_file = tempfile.NamedTemporaryFile('w')
+    Gnuplot.gp.GnuplotOpts.default_term = 'png'
+    main(gp_data_file.name)
diff --git a/tools/plotting/bar_chart.xml b/tools/plotting/bar_chart.xml
new file mode 100644
index 0000000..d5f86bc
--- /dev/null
+++ b/tools/plotting/bar_chart.xml
@@ -0,0 +1,58 @@
+<tool id="barchart_gnuplot" name="Bar chart" version="1.0.0">
+  <description>for multiple columns</description>
+  <command interpreter="python">
+    #if $xtic.userSpecified == "Yes" #bar_chart.py $input $xtic.xticColumn $colList "$title" "$ylabel" $ymin $ymax $out_file1 "$pdf_size"
+    #else                            #bar_chart.py $input 0 $colList "$title" "$ylabel" $ymin $ymax $out_file1 "$pdf_size"
+    #end if
+  </command>
+  <inputs>
+    <param name="input" type="data" format="tabular" label="Dataset" help="Dataset missing? See TIP below"/>
+    <conditional name="xtic">
+        <param name="userSpecified" type="select" label="Use X Tick labels?" help="see example below">
+            <option value="Yes">Yes</option>
+            <option value="No">No</option>
+        </param>
+        <when value="Yes">
+            <param name="xticColumn" type="data_column" data_ref="input" numerical="False" label="Use this column for X Tick labels" />
+        </when>
+        <when value="No">
+        </when>
+    </conditional>
+    <param name="colList" label="Numerical columns" type="data_column" numerical="True" multiple="True" data_ref="input" help="Multi-select list - hold the appropriate key while clicking to select multiple columns" />
+    <param name="title" type="text" size="30" value="Bar Chart" label="Plot title"/>
+    <param name="ylabel" type="text" size="30" value="V1" label="Label for Y axis"/>
+    <param name="ymin" type="integer" size="4" value="0" label="Minimal value on Y axis" help="set to 0 for autoscaling"/>
+    <param name="ymax" type="integer" size="4" value="0" label="Maximal value on Y axis" help="set to 0 for autoscaling"/>
+    <param name="pdf_size" type="select" label="Choose chart size (pixels)">
+        <option value="800,600">Normal: 800 by 600</option>
+        <option value="640,480">Small: 640 by 480</option>
+        <option value="1480,800">Large: 1480 by 800</option>
+        <option value="600,800">Normal Flipped: 600 by 800</option>
+        <option value="480,640">Small Flipped: 480 by 640</option>
+        <option value="800,1480">Large Flipped: 800 by 1480</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="png" name="out_file1" />
+  </outputs>
+  <requirements>
+    <requirement type="python-module">Gnuplot</requirement>
+    <requirement type="python-module">Numeric</requirement>
+  </requirements>
+  <help>
+**What it does**
+
+This tool builds a bar chart on one or more columns. Suppose you have dataset like this one::
+
+  Gene1	10	15
+  Gene2	20	14
+  Gene3	67	45
+  Gene4	55	12
+
+Graphing columns 2 and 3 while using column 1 for X Tick Labels will produce the following plot:
+
+.. image:: ${static_path}/images/bar_chart.png
+   :height: 324
+   :width: 540
+</help>
+</tool>
diff --git a/tools/plotting/boxplot.xml b/tools/plotting/boxplot.xml
new file mode 100644
index 0000000..38fc474
--- /dev/null
+++ b/tools/plotting/boxplot.xml
@@ -0,0 +1,111 @@
+<tool id="qual_stats_boxplot" name="Boxplot" version="1.0.0">
+  <description>of quality statistics</description>
+  <command>gnuplot < '$gnuplot_commands' 2>&1 || echo "Error running gnuplot." >&2</command>
+  <requirements>
+    <requirement type="package" version="4.6">gnuplot</requirement>
+  </requirements>
+  <inputs>
+    <param name="input_file" type="data" format="tabular" label="Quality Statistics File"/>
+    <param name="title" type="text" value="Box plot in Galaxy" label="Title for plot" size="50"/>
+    <param name="graph_size" type="text" value="2048,768" label="Dimensions of Graph"/>
+    <param name="xlabel" type="text" value="X Axis Label" label="X axis label" size="50"/>
+    <param name="ylabel" type="text" value="Score Value" label="Y axis label" size="50"/>
+    <param name="xcol" type="data_column" data_ref="input_file" label="Column for X axis position" default_value="1" help="A unique number; c1 if plotting output of FASTQ summary"/>
+    <param name="q1col" type="data_column" data_ref="input_file" label="Column for Q1" default_value="7" help="c7 if plotting output of FASTQ summary"/>
+    <param name="medcol" type="data_column" data_ref="input_file" label="Column for Median" default_value="8" help="c8 if plotting output of FASTQ summary"/>
+    <param name="q3col" type="data_column" data_ref="input_file" label="Column for Q3" default_value="9" help="c9 if plotting output of FASTQ summary"/>
+    <param name="lwcol" type="data_column" data_ref="input_file" label="Column for left whisker" default_value="11" help="c11 if plotting output of FASTQ summary"/>
+    <param name="rwcol" type="data_column" data_ref="input_file" label="Column for right whisker" default_value="12" help="c12 if plotting output of FASTQ summary"/>
+    <conditional name="use_outliers">
+      <param name="use_outliers_type" type="select" label="Plot Outliers">
+        <option value="use_outliers" selected="true">Plot Outliers</option>
+        <option value="dont_use_outliers">Don't Plot Outliers</option>
+      </param>
+      <when value="use_outliers">
+        <param name="outliercol" type="data_column" data_ref="input_file" label="Column for Outliers" default_value="13" help="c13 if plotting output of FASTQ summary"/>
+      </when>
+      <when value="dont_use_outliers">
+      </when>
+    </conditional>
+  </inputs>
+  <configfiles>
+    <configfile name="gnuplot_commands">
+set output '$output_file'
+set term png size ${graph_size}
+set boxwidth 0.8 
+set key right tmargin
+set xlabel "${xlabel}"
+set ylabel "${ylabel}"
+set title  "${title}"
+set xtics 1 
+set ytics 1
+set grid ytics
+set offsets 1, 1, 1, 1
+plot '${input_file}' using ${xcol}:${q1col}:${lwcol}:${rwcol}:${q3col} with candlesticks lt 1  lw 1 title 'Quartiles' whiskerbars, \
+      ''         using ${xcol}:${medcol}:${medcol}:${medcol}:${medcol} with candlesticks lt -1 lw 2 title 'Medians'\
+#if str( $use_outliers['use_outliers_type'] ) == 'use_outliers':
+,      "< python -c \"for xval, yvals in [ ( fields[${xcol} - 1], fields[${use_outliers['outliercol']} - 1].split( ',' ) ) for fields in [ line.rstrip( '\\n\\r' ).split( '\\t' ) for line in open( '${input_file}' ) if not line.startswith( '#' ) ] if len( fields ) > max( ${xcol} - 1, ${use_outliers['outliercol']} - 1 ) ]: print '\\n'.join( [ '%s\\t%s' % ( xval, yval ) for yval in yvals if yval ] )\"" using 1:2 with points pt 29 title 'Outliers'
+#end if
+    </configfile>
+  </configfiles>
+  <outputs>
+    <data name="output_file" format="png" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input_file" value="fastq_stats_1_out.tabular" ftype="tabular" />
+      <param name="title" value="Boxplot of Summary Statistics for Sanger Reads" />
+      <param name="graph_size" value="2048,768" />
+      <param name="xlabel" value="Read Column" />
+      <param name="ylabel" value="Quality Score Value" />
+      <param name="xcol" value="1" />
+      <param name="q1col" value="7" />
+      <param name="medcol" value="8" />
+      <param name="q3col" value="9" />
+      <param name="lwcol" value="11" />
+      <param name="rwcol" value="12" />
+      <param name="use_outliers_type" value="use_outliers" />
+      <param name="outliercol" value="13" />
+      <output name="output_file" file="boxplot_summary_statistics_out.png" />
+    </test>
+  </tests>
+  <help>
+
+**What it does**
+
+Creates a boxplot graph. Its main purpose is to display a distribution of quality scores produced by *NGS: QC and maniupulation -> FASTQ Summary Statistics* tool.
+
+.. class:: warningmark
+
+**TIP:** If you want to display a distribution of quality scores produced by *NGS: QC and maniupulation -> FASTQ Summary Statistics* and the column assignments within the tool's interface are not automatically set (they will all read "c1" in that case) set columns manually to the following values::
+
+  Column for X axis           c1
+  Column for Q1               c7
+  Column for Median           c8
+  Column for Q3               c9
+  Column for left whisker     c11
+  Column for right whisker    c12
+  Column for Outliers         c13
+
+-----
+
+**Output Example**
+
+* Black horizontal lines are medians
+* Rectangular red boxes show the Inter-quartile Range (IQR) (top value is Q3, bottom value is Q1)
+* Whiskers show outliers at max. 1.5*IQR
+
+.. image:: ${static_path}/images/solid_qual.png
+
+------
+
+**Citation**
+
+If you use this tool, please cite `Blankenberg D, Gordon A, Von Kuster G, Coraor N, Taylor J, Nekrutenko A; Galaxy Team. Manipulation of FASTQ data with Galaxy. Bioinformatics. 2010 Jul 15;26(14):1783-5. <http://www.ncbi.nlm.nih.gov/pubmed/20562416>`_
+
+
+  </help>
+  <citations>
+    <citation type="doi">10.1093/bioinformatics/btq281</citation>
+  </citations>
+</tool>
diff --git a/tools/solid_tools/maq_cs_wrapper.py b/tools/solid_tools/maq_cs_wrapper.py
new file mode 100644
index 0000000..1a8917e
--- /dev/null
+++ b/tools/solid_tools/maq_cs_wrapper.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Guruprasad Ananda
+# MAQ mapper for SOLiD colourspace-reads
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+import tempfile
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def __main__():
+    out_fname = sys.argv[1].strip()
+    out_f2 = open(sys.argv[2].strip(), 'r+')
+    ref_fname = sys.argv[3].strip()
+    f3_read_fname = sys.argv[4].strip()
+    f3_qual_fname = sys.argv[5].strip()
+    paired = sys.argv[6]
+    if paired == 'yes':
+        r3_read_fname = sys.argv[7].strip()
+        r3_qual_fname = sys.argv[8].strip()
+    min_mapqual = int(sys.argv[9].strip())
+    max_mismatch = int(sys.argv[10].strip())
+    out_f3name = sys.argv[11].strip()
+    subprocess_dict = {}
+
+    ref_csfa = tempfile.NamedTemporaryFile()
+    ref_bfa = tempfile.NamedTemporaryFile()
+    ref_csbfa = tempfile.NamedTemporaryFile()
+    cmd2_1 = 'maq fasta2csfa %s > %s 2>&1' % (ref_fname, ref_csfa.name)
+    cmd2_2 = 'maq fasta2bfa %s %s 2>&1' % (ref_csfa.name, ref_csbfa.name)
+    cmd2_3 = 'maq fasta2bfa %s %s 2>&1' % (ref_fname, ref_bfa.name)
+    try:
+        os.system(cmd2_1)
+        os.system(cmd2_2)
+        os.system(cmd2_3)
+    except Exception as erf:
+        stop_err(str(erf) + "Error processing reference sequence")
+
+    if paired == 'yes':  # paired end reads
+        tmpf = tempfile.NamedTemporaryFile()  # forward reads
+        tmpr = tempfile.NamedTemporaryFile()  # reverse reads
+        tmps = tempfile.NamedTemporaryFile()  # single reads
+        tmpffastq = tempfile.NamedTemporaryFile()
+        tmprfastq = tempfile.NamedTemporaryFile()
+        tmpsfastq = tempfile.NamedTemporaryFile()
+
+        cmd1 = "solid2fastq_modified.pl 'yes' %s %s %s %s %s %s %s 2>&1" % (tmpf.name, tmpr.name, tmps.name, f3_read_fname, f3_qual_fname, r3_read_fname, r3_qual_fname)
+        try:
+            os.system(cmd1)
+            os.system('gunzip -c %s >> %s' % (tmpf.name, tmpffastq.name))
+            os.system('gunzip -c %s >> %s' % (tmpr.name, tmprfastq.name))
+            os.system('gunzip -c %s >> %s' % (tmps.name, tmpsfastq.name))
+
+        except Exception as eq:
+            stop_err("Error converting data to fastq format." + str(eq))
+
+        # Make a temp directory where the split fastq files will be stored
+        try:
+            split_dir = tempfile.mkdtemp()
+            split_file_prefix_f = tempfile.mktemp(dir=split_dir)
+            split_file_prefix_r = tempfile.mktemp(dir=split_dir)
+            splitcmd_f = 'split -a 2 -l %d %s %s' % (32000000, tmpffastq.name, split_file_prefix_f)  # 32M lines correspond to 8M reads
+            splitcmd_r = 'split -a 2 -l %d %s %s' % (32000000, tmprfastq.name, split_file_prefix_r)  # 32M lines correspond to 8M reads
+
+            os.system(splitcmd_f)
+            os.system(splitcmd_r)
+            os.chdir(split_dir)
+            ii = 0
+            for fastq in os.listdir(split_dir):
+                if not fastq.startswith(split_file_prefix_f.split("/")[-1]):
+                    continue
+                fastq_r = split_file_prefix_r + fastq.split(split_file_prefix_f.split("/")[-1])[1]  # find the reverse strand fastq corresponding to forward strand fastq
+                tmpbfq_f = tempfile.NamedTemporaryFile()
+                tmpbfq_r = tempfile.NamedTemporaryFile()
+                cmd3 = 'maq fastq2bfq %s %s 2>&1; maq fastq2bfq %s %s 2>&1; maq map -c %s.csmap %s %s %s 1>/dev/null 2>&1; maq mapview %s.csmap > %s.txt' % (fastq, tmpbfq_f.name, fastq_r, tmpbfq_r.name, fastq, ref_csbfa.name, tmpbfq_f.name, tmpbfq_r.name, fastq, fastq)
+                subprocess_dict['sp' + str(ii + 1)] = subprocess.Popen([cmd3], shell=True, stdout=subprocess.PIPE)
+                ii += 1
+            while True:
+                all_done = True
+                for j, k in enumerate(subprocess_dict.keys()):
+                    if subprocess_dict['sp' + str(j + 1)].wait() != 0:
+                        err = subprocess_dict['sp' + str(j + 1)].communicate()[1]
+                        if err is not None:
+                            stop_err("Mapping error: %s" % err)
+                        all_done = False
+                if all_done:
+                    break
+            cmdout = "for map in *.txt; do cat $map >> %s; done" % (out_fname)
+            os.system(cmdout)
+
+            tmpcsmap = tempfile.NamedTemporaryFile()
+            cmd_cat_csmap = "for csmap in *.csmap; do cat $csmap >> %s; done" % (tmpcsmap.name)
+            os.system(cmd_cat_csmap)
+
+            tmppileup = tempfile.NamedTemporaryFile()
+            cmdpileup = "maq pileup -m %s -q %s %s %s > %s" % (max_mismatch, min_mapqual, ref_bfa.name, tmpcsmap.name, tmppileup.name)
+            os.system(cmdpileup)
+            tmppileup.seek(0)
+            print("#chr\tposition\tref_nt\tcoverage\tSNP_count\tA_count\tT_count\tG_count\tC_count", file=out_f2)
+            for line in open(tmppileup.name):
+                elems = line.strip().split()
+                ref_nt = elems[2].capitalize()
+                read_nt = elems[4]
+                coverage = int(elems[3])
+                a, t, g, c = 0, 0, 0, 0
+                ref_nt_count = 0
+                for ch in read_nt:
+                    ch = ch.capitalize()
+                    if ch not in ['A', 'T', 'G', 'C', ',', '.']:
+                        continue
+                    if ch in [',', '.']:
+                        ch = ref_nt
+                        ref_nt_count += 1
+                    try:
+                        nt_ind = ['A', 'T', 'G', 'C'].index(ch)
+                        if nt_ind == 0:
+                            a += 1
+                        elif nt_ind == 1:
+                            t += 1
+                        elif nt_ind == 2:
+                            g += 1
+                        else:
+                            c += 1
+                    except ValueError as we:
+                        print(we, file=sys.stderr)
+                print("%s\t%s\t%s\t%s\t%s\t%s" % ("\t".join(elems[:4]), coverage - ref_nt_count, a, t, g, c), file=out_f2)
+        except Exception as er2:
+            stop_err("Encountered error while mapping: %s" % (str(er2)))
+
+    else:  # single end reads
+        tmpf = tempfile.NamedTemporaryFile()
+        tmpfastq = tempfile.NamedTemporaryFile()
+        cmd1 = "solid2fastq_modified.pl 'no' %s %s %s %s %s %s %s 2>&1" % (tmpf.name, None, None, f3_read_fname, f3_qual_fname, None, None)
+        try:
+            os.system(cmd1)
+            os.system('gunzip -c %s >> %s' % (tmpf.name, tmpfastq.name))
+            tmpf.close()
+        except:
+            stop_err("Error converting data to fastq format.")
+
+        # Make a temp directory where the split fastq files will be stored
+        try:
+            split_dir = tempfile.mkdtemp()
+            split_file_prefix = tempfile.mktemp(dir=split_dir)
+            splitcmd = 'split -a 2 -l %d %s %s' % (32000000, tmpfastq.name, split_file_prefix)  # 32M lines correspond to 8M reads
+            os.system(splitcmd)
+            os.chdir(split_dir)
+            for i, fastq in enumerate(os.listdir(split_dir)):
+                tmpbfq = tempfile.NamedTemporaryFile()
+                cmd3 = 'maq fastq2bfq %s %s 2>&1; maq map -c %s.csmap %s %s  1>/dev/null 2>&1; maq mapview %s.csmap > %s.txt' % (fastq, tmpbfq.name, fastq, ref_csbfa.name, tmpbfq.name, fastq, fastq)
+                subprocess_dict['sp' + str(i + 1)] = subprocess.Popen([cmd3], shell=True, stdout=subprocess.PIPE)
+
+            while True:
+                all_done = True
+                for j, k in enumerate(subprocess_dict.keys()):
+                    if subprocess_dict['sp' + str(j + 1)].wait() != 0:
+                        err = subprocess_dict['sp' + str(j + 1)].communicate()[1]
+                        if err is not None:
+                            stop_err("Mapping error: %s" % err)
+                        all_done = False
+                if all_done:
+                    break
+
+            cmdout = "for map in *.txt; do cat $map >> %s; done" % (out_fname)
+            os.system(cmdout)
+
+            tmpcsmap = tempfile.NamedTemporaryFile()
+            cmd_cat_csmap = "for csmap in *.csmap; do cat $csmap >> %s; done" % (tmpcsmap.name)
+            os.system(cmd_cat_csmap)
+
+            tmppileup = tempfile.NamedTemporaryFile()
+            cmdpileup = "maq pileup -m %s -q %s %s %s > %s" % (max_mismatch, min_mapqual, ref_bfa.name, tmpcsmap.name, tmppileup.name)
+            os.system(cmdpileup)
+            tmppileup.seek(0)
+            print("#chr\tposition\tref_nt\tcoverage\tSNP_count\tA_count\tT_count\tG_count\tC_count", file=out_f2)
+            for line in open(tmppileup.name):
+                elems = line.strip().split()
+                ref_nt = elems[2].capitalize()
+                read_nt = elems[4]
+                coverage = int(elems[3])
+                a, t, g, c = 0, 0, 0, 0
+                ref_nt_count = 0
+                for ch in read_nt:
+                    ch = ch.capitalize()
+                    if ch not in ['A', 'T', 'G', 'C', ',', '.']:
+                        continue
+                    if ch in [',', '.']:
+                        ch = ref_nt
+                        ref_nt_count += 1
+                    try:
+                        nt_ind = ['A', 'T', 'G', 'C'].index(ch)
+                        if nt_ind == 0:
+                            a += 1
+                        elif nt_ind == 1:
+                            t += 1
+                        elif nt_ind == 2:
+                            g += 1
+                        else:
+                            c += 1
+                    except:
+                        pass
+                print("%s\t%s\t%s\t%s\t%s\t%s" % ("\t".join(elems[:4]), coverage - ref_nt_count, a, t, g, c), file=out_f2)
+        except Exception as er2:
+            stop_err("Encountered error while mapping: %s" % (str(er2)))
+
+    # Build custom track from pileup
+    chr_list = []
+    out_f2.seek(0)
+    fcov = tempfile.NamedTemporaryFile()
+    fout_a = tempfile.NamedTemporaryFile()
+    fout_t = tempfile.NamedTemporaryFile()
+    fout_g = tempfile.NamedTemporaryFile()
+    fout_c = tempfile.NamedTemporaryFile()
+    fcov.write('''track type=wiggle_0 name="Coverage track" description="Coverage track (from Galaxy)" color=0,0,0 visibility=2\n''')
+    fout_a.write('''track type=wiggle_0 name="Track A" description="Track A (from Galaxy)" color=255,0,0 visibility=2\n''')
+    fout_t.write('''track type=wiggle_0 name="Track T" description="Track T (from Galaxy)" color=0,255,0 visibility=2\n''')
+    fout_g.write('''track type=wiggle_0 name="Track G" description="Track G (from Galaxy)" color=0,0,255 visibility=2\n''')
+    fout_c.write('''track type=wiggle_0 name="Track C" description="Track C (from Galaxy)" color=255,0,255 visibility=2\n''')
+
+    for line in out_f2:
+        if line.startswith("#"):
+            continue
+        elems = line.split()
+        chr = elems[0]
+
+        if chr not in chr_list:
+            chr_list.append(chr)
+            if not (chr.startswith('chr') or chr.startswith('scaffold')):
+                chr = 'chr'
+            header = "variableStep chrom=%s" % (chr)
+            fcov.write("%s\n" % (header))
+            fout_a.write("%s\n" % (header))
+            fout_t.write("%s\n" % (header))
+            fout_g.write("%s\n" % (header))
+            fout_c.write("%s\n" % (header))
+        try:
+            pos = int(elems[1])
+            cov = int(elems[3])
+            a = int(elems[5])
+            t = int(elems[6])
+            g = int(elems[7])
+            c = int(elems[8])
+        except:
+            continue
+        fcov.write("%s\t%s\n" % (pos, cov))
+        try:
+            a_freq = a * 100. / cov
+            t_freq = t * 100. / cov
+            g_freq = g * 100. / cov
+            c_freq = c * 100. / cov
+        except ZeroDivisionError:
+            a_freq = t_freq = g_freq = c_freq = 0
+        fout_a.write("%s\t%s\n" % (pos, a_freq))
+        fout_t.write("%s\t%s\n" % (pos, t_freq))
+        fout_g.write("%s\t%s\n" % (pos, g_freq))
+        fout_c.write("%s\t%s\n" % (pos, c_freq))
+
+    fcov.seek(0)
+    fout_a.seek(0)
+    fout_g.seek(0)
+    fout_t.seek(0)
+    fout_c.seek(0)
+    os.system("cat %s %s %s %s %s | cat > %s" % (fcov.name, fout_a.name, fout_t.name, fout_g.name, fout_c.name, out_f3name))
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/solid_tools/maq_cs_wrapper.xml b/tools/solid_tools/maq_cs_wrapper.xml
new file mode 100644
index 0000000..8fe3fb7
--- /dev/null
+++ b/tools/solid_tools/maq_cs_wrapper.xml
@@ -0,0 +1,120 @@
+<tool id="maq_cs_wrapper" name="MAQ for SOLiD" version="1.0.0">
+    <description> </description>
+    <command interpreter="python">
+    maq_cs_wrapper.py 
+    $output1 
+    $output2 
+    $ref 
+    $library_type.f3_reads 
+    $library_type.f3_qual 
+    $library_type.is_paired
+    #if $library_type.is_paired == "yes":  
+     $library_type.r3_reads 
+     $library_type.r3_qual 
+    #else:
+     "None"
+     "None"
+    #end if
+    $min_mapqual
+    $max_mismatch
+    $output3
+    
+    </command>
+
+    <inputs>
+        <param name="ref" type="data" format="fasta" label="Target Genome"/> 
+        <conditional name="library_type">
+          <param name="is_paired" type="select" label="Is the library mate-paired?" multiple="false">
+             <option value="no">No</option>
+             <option value="yes">Yes</option>
+         </param>
+         <when value="no">
+           <param name="f3_reads" type="data" format="csfasta" label="F3 reads file"/> 
+           <param format="qualsolid" name="f3_qual" type="data" label="F3 quality file" help="If your dataset doesn't show up in the menu, click the pencil icon next to your dataset and set the datatype to 'qualsolid'" /> 
+          </when>
+          <when value="yes">
+           <param name="f3_reads" type="data" format="csfasta" label="F3 reads file"/> 
+           <param format="qualsolid" name="f3_qual" type="data" label="F3 quality file" help="If your dataset doesn't show up in the menu, click the pencil icon next to your dataset and set the datatype to 'qualsolid'" /> 
+           <param name="r3_reads" type="data" format="csfasta" label="R3 reads file"/> 
+           <param format="qualsolid" name="r3_qual" type="data" label="R3 quality file" help="If your dataset doesn't show up in the menu, click the pencil icon next to your dataset and set the datatype to 'qualsolid'" /> 
+          </when>
+      </conditional>
+      <param name="min_mapqual" type="integer" size="3" value="0" label="Minimum mapping quality allowed for a read to be used" help="Reads below the specified mapping quality will not be considered in coverage and SNP analysis."/> 
+      <param name="max_mismatch" type="integer" size="3" value="7" label="Maximum number of mismatches allowed for a read to be used" help="Reads above the specified threshold will not be considered in coverage and SNP analysis."/> 
+    </inputs>
+    <outputs>
+        <data format="tabular" name="output1" metadata_source="ref" />
+        <data format="tabular" name="output2" metadata_source="ref" />
+        <data format="customtrack" name="output3" metadata_source="ref" />
+    </outputs>
+    
+    <!--  "ToolTestCase does not deal with multiple outputs properly yet."
+    <tests>
+        
+        <test>
+            <param name="ref" value="phiX_mod.fasta" />
+            <param name="is_paired" value="no" />
+            <param name="f3_reads" value="phiX_solid.csfasta" />
+            <param name="f3_qual" value="phiX_solid.qualsolid" />
+            <param name="min_mapqual" value="0" />
+            <param name="max_mismatch" value="7" />
+            <output name="output1" file="phiX_solid_maq.map" />
+            <output name="output2" file="phiX_solid_maq.pileup" />
+            <output name="output3" file="phiX_solid_maq.ctrack" />
+            
+        </test>
+    </tests>
+    -->
+<help>
+
+.. class:: infomark
+
+**What it does**
+
+This tool maps SOLiD color-space reads against the target genome using MAQ. It produces three output datasets: 
+
+
+**ALIGNMENT INFO** : contains the read alignment information, 
+
+**PILEUP** : contains the coverage and SNP statistics for every nucleotide of the target genome,
+
+**CUSTOM TRACK** : contains the coverage and SNP statistics as custom tracks displayable in the UCSC browser. 
+
+-----
+
+**The ALIGNMENT INFO dataset will contain the following fields:**
+
+* column 1  = read name
+* column 2  = chromosome
+* column 3  = position
+* column 4  = strand
+* column 5  = insert size from the outer coorniates of a pair
+* column 6  = paired flag
+* column 7  = mapping quality
+* column 8  = single-end mapping quality
+* column 9  = alternative mapping quality
+* column 10 = number of mismatches of the best hit
+* column 11 = sum of qualities of mismatched bases of the best hit
+* column 12 = number of 0-mismatch hits of the first 24bp
+* column 13 = number of 1-mismatch hits of the first 24bp on the reference
+* column 14 = length of the read
+* column 15 = read sequence
+* column 16 = read quality
+
+
+**The PILEUP dataset will contain the following fields:**
+
+* column 1  = chromosome
+* column 2  = position
+* column 3  = reference nucleotide
+* column 4  = coverage (number of reads that cover this position)
+* column 5  = number of SNPs
+* column 6  = number of As
+* column 7  = number of Ts
+* column 8  = number of Gs
+* column 9  = number of Cs
+
+</help>
+<code file="maq_cs_wrapper_code.py"/>
+
+</tool>
diff --git a/tools/solid_tools/maq_cs_wrapper_code.py b/tools/solid_tools/maq_cs_wrapper_code.py
new file mode 100644
index 0000000..7a0a7e7
--- /dev/null
+++ b/tools/solid_tools/maq_cs_wrapper_code.py
@@ -0,0 +1,4 @@
+def exec_before_job(app, inp_data, out_data, param_dict, tool):
+    out_data['output1'].name = out_data['output1'].name + " [ ALIGNMENT INFO ]"
+    out_data['output2'].name = out_data['output2'].name + " [ PILEUP ]"
+    out_data['output3'].name = out_data['output3'].name + " [ CUSTOM TRACK ]"
diff --git a/tools/solid_tools/qualsolid_boxplot_graph.sh b/tools/solid_tools/qualsolid_boxplot_graph.sh
new file mode 100755
index 0000000..887d46b
--- /dev/null
+++ b/tools/solid_tools/qualsolid_boxplot_graph.sh
@@ -0,0 +1,94 @@
+#!/bin/sh
+
+#    Modified fastq_quality_boxplot_graph.sh from FASTX-toolkit - FASTA/FASTQ preprocessing tools.
+#    Copyright (C) 2009  A. Gordon (gordon at cshl.edu)
+#
+#   This program is free software: you can redistribute it and/or modify
+#   it under the terms of the GNU Affero General Public License as
+#   published by the Free Software Foundation, either version 3 of the
+#   License, or (at your option) any later version.
+#
+#   This program is distributed in the hope that it will be useful,
+#   but WITHOUT ANY WARRANTY; without even the implied warranty of
+#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#   GNU Affero General Public License for more details.
+#
+#    You should have received a copy of the GNU Affero General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+function usage()
+{
+	echo "SOLiD-Quality BoxPlot plotter"
+	echo "Generates a SOLiD quality score box-plot graph "
+	echo
+	echo "Usage: $0 [-i INPUT.TXT] [-t TITLE] [-p] [-o OUTPUT]"
+	echo
+	echo "  [-p]           - Generate PostScript (.PS) file. Default is PNG image."
+	echo "  [-i INPUT.TXT] - Input file. Should be the output of \"solid_qual_stats\" program."
+	echo "  [-o OUTPUT]    - Output file name. default is STDOUT."
+	echo "  [-t TITLE]     - Title (usually the solid file name) - will be plotted on the graph."
+	echo
+	exit 
+}
+
+#
+# Input Data columns: #pos	cnt	min	max	sum       	mean	Q1	med	Q3	IQR	lW	rW
+#  As produced by "solid_qual_stats" program
+
+TITLE=""					# default title is empty
+FILENAME=""
+OUTPUTTERM="set term png size 800,600"
+OUTPUTFILE="/dev/stdout"   			# Default output file is simply "stdout"
+while getopts ":t:i:o:ph" Option
+	do
+	case $Option in
+		# w ) CMD=$OPTARG; FILENAME="PIMSLogList.txt"; TARGET="logfiles"; ;;
+		t ) TITLE="for $OPTARG" ;;
+		i ) FILENAME=$OPTARG ;;
+		o ) OUTPUTFILE="$OPTARG" ;;
+		p ) OUTPUTTERM="set term postscript enhanced color \"Helvetica\" 4" ;;
+		h ) usage ;;
+		* ) echo "unrecognized argument. use '-h' for usage information."; exit -1 ;;
+	esac
+done
+shift $(($OPTIND - 1)) 
+
+
+if [ "$FILENAME" == "" ]; then
+	usage
+fi
+
+if [ ! -r "$FILENAME" ]; then
+	echo "Error: can't open input file ($1)." >&2
+	exit 1
+fi
+
+#Read number of cycles from the stats file (each line is a cycle, minus the header line)
+#But for the graph, I want xrange to reach (num_cycles+1), so I don't subtract 1 now.
+NUM_CYCLES=$(cat "$FILENAME" | wc -l) 
+
+GNUPLOTCMD="
+$OUTPUTTERM
+set boxwidth 0.8 
+set size 1,1
+set key Left inside
+set xlabel \"read position\"
+set ylabel \"Quality Score \"
+set title  \"Quality Scores $TITLE\"
+#set auto x
+set bars 4.0
+set xrange [ 0: $NUM_CYCLES ]
+set yrange [-2:45]
+set y2range [-2:45]
+set xtics 1 
+set x2tics 1
+set ytics 2
+set y2tics 2
+set tics out
+set grid ytics
+set style fill empty
+plot '$FILENAME' using 1:7:11:12:9 with candlesticks lt 1  lw 1 title 'Quartiles' whiskerbars, \
+      ''         using 1:8:8:8:8 with candlesticks lt -1 lw 2 title 'Medians'
+"
+
+echo "$GNUPLOTCMD" | gnuplot > "$OUTPUTFILE"
diff --git a/tools/solid_tools/solid_qual_boxplot.xml b/tools/solid_tools/solid_qual_boxplot.xml
new file mode 100644
index 0000000..dd3d1dc
--- /dev/null
+++ b/tools/solid_tools/solid_qual_boxplot.xml
@@ -0,0 +1,40 @@
+<tool id="solid_qual_boxplot" name="Draw quality score boxplot" version="1.0.0">
+	<description>for SOLiD data</description>
+	
+	<command interpreter="bash">qualsolid_boxplot_graph.sh -t '$input.name' -i $input -o $output</command>
+	
+	<inputs>
+		<param format="txt" name="input" type="data" label="Statistics report file (output of 'Quality Statistics for SOLiD data' tool)" />
+	</inputs>
+
+	<outputs>
+		<data format="png" name="output" metadata_source="input" />
+	</outputs>
+<help>
+
+**What it does**
+
+Creates a boxplot graph for the quality scores in the library.
+
+.. class:: infomark
+
+**TIP:** Use the **Quality Statistics for SOLiD data** tool to generate the report file needed for this tool.
+
+-----
+
+**Output Example**
+
+* Black horizontal lines are medians
+* Rectangular red boxes show the Inter-quartile Range (IQR) (top value is Q3, bottom value is Q1)
+* Whiskers show outliers at max. 1.5*IQR
+
+
+.. image:: ${static_path}/images/solid_qual.png
+
+------
+
+This tool is based on `FASTX-toolkit`__ by Assaf Gordon.
+
+ .. __: http://hannonlab.cshl.edu/fastx_toolkit/
+</help>
+</tool>
diff --git a/tools/solid_tools/solid_qual_stats.py b/tools/solid_tools/solid_qual_stats.py
new file mode 100644
index 0000000..d8299e6
--- /dev/null
+++ b/tools/solid_tools/solid_qual_stats.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+# Guruprasad Ananda
+from __future__ import print_function
+
+import sys
+import tempfile
+import zipfile
+
+QUAL_UPPER_BOUND = 41
+QUAL_LOWER_BOUND = 1
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def unzip( filename ):
+    zip_file = zipfile.ZipFile( filename, 'r' )
+    tmpfilename = tempfile.NamedTemporaryFile().name
+    for name in zip_file.namelist():
+        open( tmpfilename, 'a' ).write( zip_file.read( name ) )
+    zip_file.close()
+    return tmpfilename
+
+
+def __main__():
+    infile_score_name = sys.argv[1].strip()
+    fout = open(sys.argv[2].strip(), 'r+w')
+
+    if zipfile.is_zipfile( infile_score_name ):
+        infile_name = unzip( infile_score_name )
+    else:
+        infile_name = infile_score_name
+
+    readlen = None
+    invalid_lines = 0
+    j = 0
+    for line in open( infile_name ):
+        line = line.strip()
+        if not(line) or line.startswith("#") or line.startswith(">"):
+            continue
+        elems = line.split()
+        try:
+            for item in elems:
+                int(item)
+            if not readlen:
+                readlen = len(elems)
+            if len(elems) != readlen:
+                print("Note: Reads in the input dataset are of variable lengths.")
+            j += 1
+        except ValueError:
+            invalid_lines += 1
+        if j > 10:
+            break
+
+    position_dict = {}
+    print("column\tcount\tmin\tmax\tsum\tmean\tQ1\tmed\tQ3\tIQR\tlW\trW", file=fout)
+    for k, line in enumerate(open( infile_name )):
+        line = line.strip()
+        if not(line) or line.startswith("#") or line.startswith(">"):
+            continue
+        elems = line.split()
+        if position_dict == {}:
+            for pos in range(readlen):
+                position_dict[pos] = [0] * QUAL_UPPER_BOUND
+        if len(elems) != readlen:
+            invalid_lines += 1
+            continue
+        for ind, item in enumerate(elems):
+            try:
+                item = int(item)
+                position_dict[ind][item] += 1
+            except:
+                pass
+
+    invalid_positions = 0
+    for pos in position_dict:
+        carr = position_dict[pos]  # count array for position pos
+        total = sum(carr)  # number of bases found in this column.
+        med_elem = int(round(total / 2.0))
+        lowest = None   # Lowest quality score value found in this column.
+        highest = None  # Highest quality score value found in this column.
+        median = None   # Median quality score value found in this column.
+        qsum = 0.0      # Sum of quality score values for this column.
+        q1 = None       # 1st quartile quality score.
+        q3 = None       # 3rd quartile quality score.
+        q1_elem = int(round((total + 1) / 4.0))
+        q3_elem = int(round((total + 1) * 3 / 4.0))
+
+        try:
+            for ind, cnt in enumerate(carr):
+                qsum += ind * cnt
+
+                if cnt != 0:
+                    highest = ind
+
+                if lowest is None and cnt != 0:  # first non-zero count
+                    lowest = ind
+
+                if q1 is None:
+                    if sum(carr[:ind + 1]) >= q1_elem:
+                        q1 = ind
+
+                if median is None:
+                    if sum(carr[:ind + 1]) < med_elem:
+                        continue
+                    median = ind
+                    if total % 2 == 0:  # even number of elements
+                        median2 = median
+                        if sum(carr[:ind + 1]) < med_elem + 1:
+                            for ind2, elem in enumerate(carr[ind + 1:]):
+                                if elem != 0:
+                                    median2 = ind + ind2 + 1
+                                    break
+                        median = (median + median2) / 2.0
+
+                if q3 is None:
+                    if sum(carr[:ind + 1]) >= q3_elem:
+                        q3 = ind
+
+            mean = qsum / total  # Mean quality score value for this column.
+            iqr = q3 - q1
+            left_whisker = max(q1 - 1.5 * iqr, lowest)
+            right_whisker = min(q3 + 1.5 * iqr, highest)
+
+            print("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (pos + 1, total, lowest, highest, qsum, mean, q1, median, q3, iqr, left_whisker, right_whisker), file=fout)
+        except:
+            invalid_positions += 1
+            nullvals = ['NA'] * 11
+            print("%s\t%s" % (pos + 1, '\t'.join(nullvals)), file=fout)
+
+    if invalid_lines:
+        print("Skipped %d reads as invalid." % invalid_lines)
+    if invalid_positions:
+        print("Skipped stats computation for %d read positions." % invalid_positions)
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/solid_tools/solid_qual_stats.xml b/tools/solid_tools/solid_qual_stats.xml
new file mode 100644
index 0000000..c201859
--- /dev/null
+++ b/tools/solid_tools/solid_qual_stats.xml
@@ -0,0 +1,69 @@
+<tool id="solid_qual_stats" name="Compute quality statistics" version="1.0.0">
+    <description>for SOLiD data</description>
+    <command interpreter="python">solid_qual_stats.py $input $output1</command>
+
+    <inputs>
+        <param format="qualsolid" name="input" type="data" label="SOLiD qual file" help="If your dataset doesn't show up in the menu, click the pencil icon next to your dataset and set the datatype to 'qualsolid'" />
+    </inputs>
+    <outputs>
+        <data format="txt" name="output1" metadata_source="input" />
+    </outputs>
+    <tests>
+        <test>
+            <param name="input" value="qualscores.qualsolid" />
+            <output name="output1" file="qualsolid.stats" />
+        </test>
+    </tests>
+
+<help>
+
+**What it does**
+
+Creates quality statistics report for the given SOLiD quality score file.
+
+.. class:: infomark
+
+**TIP:** This statistics report can be used as input for **Quality Boxplot for SOLiD data** and **Nucleotides Distribution** tool.
+
+-----
+
+**The output file will contain the following fields:**
+
+* column    = column number (position on the read)
+* count   = number of bases found in this column.
+* min     = Lowest quality score value found in this column.
+* max     = Highest quality score value found in this column.
+* sum     = Sum of quality score values for this column.
+* mean    = Mean quality score value for this column.
+* Q1    = 1st quartile quality score.
+* med   = Median quality score.
+* Q3    = 3rd quartile quality score.
+* IQR   = Inter-Quartile range (Q3-Q1).
+* lW    = 'Left-Whisker' value (for boxplotting).
+* rW    = 'Right-Whisker' value (for boxplotting).
+
+
+
+
+
+**Output Example**::
+
+    column  count   min max sum mean    Q1  med Q3  IQR lW  rW
+    1   6362991 2  32  250734117   20.41   5  9  28  23   2  31
+    2   6362991 2  32  250531036   21.37  10  26 30  20   5  31
+    3   6362991 2  34  248722469   19.09  10  26 30  20   5  31
+    4   6362991 2  34  247654797   18.92  10  26 30  20   5  31
+    .
+    .    
+    32  6362991 2  31  143436943   16.54   3  10  25  22  2  31
+    33  6362991 2  32  114269843   16.96   3  10  25  22  2  31
+    34  6362991 2  29  140638447   12.10   3  10  25  22  2  29
+    35  6362991 2  29  138910532   11.83   3  10  25  22  2  29
+    
+------
+
+This tool is based on `FASTX-toolkit`__ by Assaf Gordon.
+
+ .. __: http://hannonlab.cshl.edu/fastx_toolkit/
+</help>
+</tool>
diff --git a/tools/sr_assembly/velvetg.xml b/tools/sr_assembly/velvetg.xml
new file mode 100644
index 0000000..9feaed5
--- /dev/null
+++ b/tools/sr_assembly/velvetg.xml
@@ -0,0 +1,301 @@
+<tool id="velvetg" name="velvetg" version="1.0.0">
+  <description>Velvet sequence assembler for very short reads</description>
+  <version_command>velvetg 2>&1 | grep "Version" | sed -e 's/Version //'</version_command>
+  <command interpreter="python">
+    velvetg_wrapper.py 
+           '$input.extra_files_path'
+           #if $generate_amos.afg  == "yes":
+               -amos_file $generate_amos.afg
+           #end if
+           #if $unused_reads.generate_unused  == "yes":
+               -unused_reads $unused_reads.generate_unused
+           #end if
+           $read_trkg
+           #if $coverage.cutoff == "auto":
+               -cov_cutoff auto
+           #elif $coverage.cutoff == "value":
+               -cov_cutoff $coverage.cov_cutoff
+           #end if
+           #if $expected.coverage == "auto":
+               -exp_cov auto
+           #elif $expected.coverage == "value":
+               -exp_cov $expected.exp_cov
+           #end if
+           #if $contig_lgth.use_contig_lgth == "yes":
+               -min_contig_lgth $contig_lgth.min_contig_lgth
+           #end if
+           #if $reads.paired == "yes":
+               #if int($reads.ins_length) > 0:
+                   -ins_length $reads.ins_length
+               #end if
+               #if $reads.options.advanced == "yes":
+                   #if int($reads.options.ins_length_sd) > 0:
+                       -ins_length_sd $reads.options.ins_length_sd
+                   #end if
+                   #if int($reads.options.ins_length2) > 0:
+                       -ins_length2 $reads.options.ins_length2
+                   #end if
+                   #if int($reads.options.ins_length2_sd) > 0:
+                       -ins_length2_sd $reads.options.ins_length2_sd
+                   #end if
+                   #if int($reads.options.ins_length_long) > 0:
+                       -ins_length_long $reads.options.ins_length_long
+                   #end if
+                   #if int($reads.options.ins_length_long_sd) > 0:
+                       -ins_length_long_sd $reads.options.ins_length_long_sd
+                   #end if
+                   #if int($reads.options.max_branch_length) > 0:
+                       -max_branch_length $reads.options.max_branch_length
+                   #end if
+                   #if int($reads.options.max_divergence) > 0:
+                       -max_divergence $reads.options.max_divergence
+                   #end if
+                   #if int($reads.options.max_gap_count) > 0:
+                       -max_gap_count $reads.options.max_gap_count
+                   #end if
+                   #if int($reads.options.min_pair_count) > 0:
+                       -min_pair_count $reads.options.min_pair_count
+                   #end if
+                   #if int($reads.options.max_coverage) > 0:
+                       -max_coverage $reads.options.max_coverage
+                   #end if
+                   #if int($reads.options.long_mult_cutoff) > 0:
+                       -long_mult_cutoff $reads.options.long_mult_cutoff
+                   #end if
+                   $reads.options.scaffolding
+               #end if
+           #end if
+  </command>
+  <inputs>
+    <param name="input" type="data" format="velvet" label="Velvet Dataset" help="Prepared by velveth."/>
+    <conditional name="generate_amos">
+      <param name="afg" type="select" label="Generate a AMOS.afg file">
+        <option value="no">No</option>
+        <option value="yes">Yes</option>
+      </param>
+      <when value="no"/>
+      <when value="yes"/>
+    </conditional>
+
+    <conditional name="unused_reads">
+      <param name="generate_unused" type="select" label="Generate a UnusedReads fasta file">
+        <option value="no">No</option>
+        <option value="yes">Yes</option>
+      </param>
+      <when value="no"/>
+      <when value="yes"/>
+    </conditional>
+
+    <conditional name="last_graph">
+      <param name="generate_graph" type="select" label="Generate velvet LastGraph file">
+        <option value="no">No</option>
+        <option value="yes">Yes</option>
+      </param>
+      <when value="no"/>
+      <when value="yes"/>
+    </conditional>
+
+    <param name="read_trkg" type="boolean" checked="false" truevalue="-read_trkg yes" falsevalue="-read_trkg no" label="Tracking of short read positions in assembly" help="Generates Graph2 dataset" />
+
+    <conditional name="coverage">
+      <param name="cutoff" type="select" label="Coverage cutoff" help="">
+        <option value="none">None</option>
+        <option value="auto">Automatically Determined</option> 
+        <option value="value">Specify Cutoff Value</option>
+      </param>
+      <when value="none"/>
+      <when value="auto"/>
+      <when value="value">
+        <param name="cov_cutoff" value = "10.0" label="Remove nodes with coverage below" type="float" />
+      </when>
+    </conditional>
+
+    <conditional name="expected">
+      <param name="coverage" type="select" label="Expected Coverage of Unique Regions" help="">
+        <option value="none">None</option>
+        <option value="auto">Automatically Determined</option> 
+        <option value="value">Specify Expected Value</option>
+      </param>
+      <when value="none"/>
+      <when value="auto"/>
+      <when value="value">
+        <param name="exp_cov" value = "10.0" label="Remove nodes with coverage below" type="float" />
+      </when>
+    </conditional>
+
+    <conditional name="contig_lgth">
+      <param name="use_contig_lgth" type="select" label=" Set minimum contig length" help="minimum contig length exported to contigs.fa file (default: hash length * 2).">
+        <option value="no">No</option>
+        <option value="yes">Yes</option>
+      </param>
+      <when value="no"/>
+      <when value="yes">
+        <param name="min_contig_lgth" value = "42" label="minimum contig length" type="integer" help="minimum contig length exported to contigs.fa file (default: hash length * 2)"/>
+      </when>
+    </conditional>
+
+    <conditional name="reads">
+      <param name="paired" type="select" label="Using Paired Reads">
+        <option value="no">No</option>
+        <option value="yes" selected="${input.metadata.paired_end_reads}">Yes</option>
+      </param>
+      <when value="no"/>
+      <when value="yes">
+        <param name="ins_length" value = "-1" label="Insert Length in Paired-End Read dataset (ignored when -1)" type="integer" help="Expected distance between two paired end reads"/>
+        <conditional name="options">
+          <param name="advanced" type="select" label="Velvet Advanced Options">
+            <option value="no">Use Defaults</option>
+            <option value="yes">Set Advanced Option Values</option>
+          </param>
+          <when value="no"/>
+          <when value="yes">
+            <param name="ins_length_sd" value = "-1" label="Estimate of Standard Deviation of Paired-End Read dataset(ignored when -1)" type="integer" help="Estimate of standard deviation of Paired-End Read dataset (default: 10% of corresponding length)"/>
+            <param name="ins_length2" value = "-1" label="Insert Length in 2nd Paired-End Short Read dataset (ignored when -1)" type="integer" help="Expected distance between two paired end reads in the second short-read dataset"/>
+            <param name="ins_length2_sd" value = "-1" label="Estimate of Standard Deviation of 2nd Paired-End Read dataset (ignored when -1)" type="integer" help="Estimate of standard deviation of 2nd Paired-End Read dataset (default: 10% of corresponding length)"/>
+            <param name="ins_length_long" value = "-1" label="Insert Length in Long Paired-End Read dataset (ignored when -1)" type="integer" help="Expected distance between two long paired-end reads"/>
+            <param name="ins_length_long_sd" value = "-1" label="Estimate of Standard Deviation of 2nd Paired-End Read dataset (ignored when -1)" type="integer" help="Estimate of standard deviation of Long Paired-End Read dataset (default: 10% of corresponding length)"/>
+            <param name="max_branch_length" value = "-1" label="Maximum branch length (ignored when -1)" type="integer" help="maximum length in base pair of bubble (default: 100)"/>
+            <param name="max_divergence" value = "-1." label="Maximum max_divergence (between .0 and 1., ignored when -1.)" type="float" help="maximum divergence rate between two branches in a bubble (default: .2)"/>
+            <param name="max_gap_count" value = "-1" label="Maximum gap count (ignored when -1)" type="integer" help="maximum number of gaps allowed in the alignment of the two branches of a bubble (default: 3)"/>
+            <param name="min_pair_count" value = "-1" label="Minimum read-pair count (ignored when -1)" type="integer" help="minimum number of paired end connections to justify the scaffolding of two long contigs (default: 10)"/>
+            <param name="max_coverage" value = "-1." label="Maximum coverage exclusion(ignored when -1.)" type="float" help="Exclude data that has coverage more than this maximum coverage value"/>
+            <param name="long_mult_cutoff" value = "-1" label="Minimum number of long reads required to merge contigs (ignored when -1)" type="integer" help="minimum number of long reads required to merge contigs (default: 2)"/>
+            <param name="scaffolding" type="boolean" checked="true" truevalue="-scaffolding yes" falsevalue="-scaffolding no" label="Use Scaffolding" help="Scaffold contigs that it cannot quite be connected (This results in sequences of Ns in the contigs)"/>
+
+          </when>
+        </conditional>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="txt" name="Graph2" label="${tool.name} on ${on_string}: Graph2" from_work_dir="Graph2">
+      <filter>read_trkg is True</filter>
+    </data>
+    <data format="txt" name="LastGraph" label="${tool.name} on ${on_string}: LastGraph" from_work_dir="LastGraph">
+      <filter>last_graph['generate_graph'] == "yes"</filter>
+    </data>
+    <data format="afg" name="velvet_asm" label="${tool.name} on ${on_string}: AMOS.afg" from_work_dir="velvet_asm.afg">
+      <filter>generate_amos['afg'] == "yes"</filter>
+    </data>
+    <data format="fasta" name="unused_reads_fasta" label="${tool.name} on ${on_string}: Unused Reads" from_work_dir="UnusedReads.fa">
+      <filter>unused_reads['generate_unused'] == "yes"</filter>
+    </data>
+    <data format="tabular" name="stats" label="${tool.name} on ${on_string}: Stats" from_work_dir="stats.txt" />
+    <data format="fasta" name="contigs" label="${tool.name} on ${on_string}: Contigs" from_work_dir="contigs.fa" />
+  </outputs>
+  <requirements>
+    <requirement type="package">velvet</requirement>
+  </requirements>
+  <tests>
+    <test>
+      <param name="input" value="velveth_test1/output.html" ftype="velvet" >
+        <composite_data value='velveth_test1/Sequences' ftype="Sequences"/>
+        <composite_data value='velveth_test1/Roadmaps' ftype="Roadmaps"/>
+        <composite_data value='velveth_test1/Log'/>
+      </param>
+      <param name="afg" value="yes" />
+      <param name="generate_unused" value="yes" />
+      <param name="generate_graph" value="no" />
+      <param name="read_trkg" value="-read_trkg no" />
+      <param name="cutoff" value="auto" />
+      <param name="coverage" value="auto" />
+      <param name="use_contig_lgth" value="no" />
+      <param name="paired" value="no" />
+      <!--
+      <output name="LastGraph" file="velvetg_test1/lastgraph.txt" compare="diff"/>
+      -->
+      <output name="velvet_asm" file="velvetg_test1/amos.afg" compare="diff"/>
+      <output name="unused_reads_fasta" file="velvetg_test1/unusedreads.fa" compare="diff"/>
+      <output name="stats" file="velvetg_test1/stats.csv" compare="diff"/>
+      <output name="contigs" file="velvetg_test1/contigs.fa" compare="diff"/>
+    </test>
+  </tests>
+  <help>
+**Velvet Overview**
+
+Velvet_ is a de novo genomic assembler specially designed for short read sequencing technologies, such as Solexa or 454, developed by Daniel Zerbino and Ewan Birney at the European Bioinformatics Institute (EMBL-EBI), near Cambridge, in the United Kingdom.
+
+Velvet currently takes in short read sequences, removes errors then produces high quality unique contigs. It then uses paired-end read and long read information, when available, to retrieve the repeated areas between contigs.
+
+Read the Velvet `documentation`__ for details on using the Velvet Assembler. 
+
+.. _Velvet: http://www.ebi.ac.uk/~zerbino/velvet/
+
+.. __: http://www.ebi.ac.uk/~zerbino/velvet/Manual.pdf
+
+------
+
+**Input formats**
+
+Velvet can input sequence files in the following formats: fasta fastq fasta.gz fastq.gz eland gerald
+
+The input files are prepared for the velvet assembler using **velveth**.
+
+------
+
+**Outputs**
+
+**Contigs**
+
+The *contigs.fa* file.  
+This fasta file contains the sequences of the contigs longer than 2k, where k is the word-length used in velveth. If you have specified a min contig length threshold, then the contigs shorter than that value are omitted.
+Note that the length and coverage information provided in the header of each contig should therefore be understood in k-mers and in k-mer coverage (cf. 5.1) respectively.
+The N's in the sequence correspond to gaps between scaffolded contigs. The number of N's corresponds to the estimated length of the gap. For reasons of compatibility with the archives, any gap shorter than 10bp is represented by a sequence of 10 N's.
+
+**Stats**
+
+The *stats.txt* file.  
+This file is a simple tabbed-delimited description of the nodes. The column names are pretty much self-explanatory. Note however that node lengths are given in k-mers. To obtain the length in nucleotides of each node you simply need to add k - 1, where k is the word-length used in velveth.
+The in and out columns correspond to the number of arcs on the 5' and 3' ends of the contig respectively.
+The coverages in columns short1 cov, short1 Ocov, short2 cov, and short2 Ocov are provided in k-mer coverage (5.1).
+Also, the difference between # cov and # Ocov is the way these values are computed. In the first count, slightly divergent sequences are added to the coverage tally. However, in the second, stricter count, only the sequences which map perfectly onto the consensus sequence are taken into account.
+
+**LastGraph**
+
+The *LastGraph* file.  
+This file describes in its entirety the graph produced by Velvet. 
+
+**AMOS.afg**
+
+The *velvet_asm.afg* file.  
+This file is mainly designed to be read by the open-source AMOS genome assembly package. Nonetheless, a number of programs are available to transform this kind of file into other assembly file formats (namely ACE, TIGR, Arachne and Celera). See http://amos.sourceforge.net/ for more information.
+The file describes all the contigs contained in the contigs.fa file (cf 4.2.1).
+
+------
+
+**Velvet parameter list**
+
+This is a list of implemented Velvetg options::
+
+  Standard options:
+        -cov_cutoff  floating-point|auto : removal of low coverage nodes AFTER tour bus or allow the system to infer it
+                (default: no removal)
+        -ins_length  integer             : expected distance between two paired end reads (default: no read pairing)
+        -read_trkg  yes|no               : tracking of short read positions in assembly (default: no tracking)
+        -min_contig_lgth  integer        : minimum contig length exported to contigs.fa file (default: hash length * 2)
+        -amos_file  yes|no               : export assembly to AMOS file (default: no export)
+        -exp_cov  floating point|auto    : expected coverage of unique regions or allow the system to infer it
+                (default: no long or paired-end read resolution)
+   
+  Advanced options:
+        -ins_length2  integer            : expected distance between two paired-end reads in the second short-read dataset (default: no read pairing)
+        -ins_length_long  integer        : expected distance between two long paired-end reads (default: no read pairing)
+        -ins_length*_sd  integer         : est. standard deviation of respective dataset (default: 10% of corresponding length)
+                [replace '*' by nothing, '2' or '_long' as necessary]
+        -scaffolding  yes|no             : scaffolding of contigs used paired end information (default: on)
+        -max_branch_length  integer      : maximum length in base pair of bubble (default: 100)
+        -max_divergence  floating-point  : maximum divergence rate between two branches in a bubble (default: 0.2)
+        -max_gap_count  integer          : maximum number of gaps allowed in the alignment of the two branches of a bubble (default: 3)
+        -min_pair_count  integer         : minimum number of paired end connections to justify the scaffolding of two long contigs (default: 10)
+        -max_coverage  floating point    : removal of high coverage nodes AFTER tour bus (default: no removal)
+        -long_mult_cutoff  int           : minimum number of long reads required to merge contigs (default: 2)
+        -unused_reads  yes|no            : export unused reads in UnusedReads.fa file (default: no)
+   
+  Output:
+        directory/contigs.fa             : fasta file of contigs longer than twice hash length
+        directory/stats.txt              : stats file (tab-spaced) useful for determining appropriate coverage cutoff
+        directory/LastGraph              : special formatted file with all the information on the final graph
+        directory/velvet_asm.afg         : (if requested) AMOS compatible assembly file
+
+  </help>
+</tool>
diff --git a/tools/sr_assembly/velvetg_wrapper.py b/tools/sr_assembly/velvetg_wrapper.py
new file mode 100644
index 0000000..292dc36
--- /dev/null
+++ b/tools/sr_assembly/velvetg_wrapper.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+"""
+Classes encapsulating decypher tool.
+James E Johnson - University of Minnesota
+"""
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def __main__():
+    # Parse Command Line
+    working_dir = sys.argv[1]
+    inputs = ' '.join(sys.argv[2:])
+    for _ in ('Roadmaps', 'Sequences'):
+        os.symlink(os.path.join(working_dir, _), _)
+    cmdline = 'velvetg . %s' % (inputs)
+    print("Command to be executed: %s" % cmdline)
+    try:
+        proc = subprocess.Popen( args=cmdline, shell=True, stderr=subprocess.PIPE )
+        returncode = proc.wait()
+        # get stderr, allowing for case where it's very large
+        stderr = ''
+        buffsize = 1048576
+        try:
+            while True:
+                stderr += proc.stderr.read( buffsize )
+                if not stderr or len( stderr ) % buffsize != 0:
+                    break
+        except OverflowError:
+            pass
+        if returncode != 0:
+            raise Exception(stderr)
+    except Exception as e:
+        stop_err( 'Error running velvetg ' + str( e ) )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/sr_assembly/velveth.xml b/tools/sr_assembly/velveth.xml
new file mode 100644
index 0000000..369b54a
--- /dev/null
+++ b/tools/sr_assembly/velveth.xml
@@ -0,0 +1,129 @@
+<tool id="velveth" name="velveth" version="1.0.0">
+  <description>Prepare a dataset for the Velvet velvetg Assembler</description>
+  <version_command>velveth 2>&1 | grep "Version" | sed -e 's/Version //'</version_command>
+  <command interpreter="python">
+    velveth_wrapper.py 
+           '$out_file1' '$out_file1.extra_files_path'
+           $hash_length
+           $strand_specific
+           #for $i in $inputs
+                ${i.file_format}
+                ${i.read_type}
+                ${i.input}
+           #end for
+  </command>
+  <inputs>
+    <param label="Hash Length" name="hash_length" type="select" help="k-mer length in base pairs of the words being hashed.">
+      <option value="11">11</option>
+      <option value="13">13</option>
+      <option value="15">15</option>
+      <option value="17">17</option>
+      <option value="19">19</option>
+      <option value="21" selected="yes">21</option>
+      <option value="23">23</option>
+      <option value="25">25</option>
+      <option value="27">27</option>
+      <option value="29">29</option>
+    </param>
+    <param name="strand_specific" type="boolean" checked="false" truevalue="-strand_specific" falsevalue="" label="Use strand specific transcriptome sequencing" help="If you are using a strand specific transcriptome sequencing protocol, you may wish to use this option for better results."/>
+    <repeat name="inputs" title="Input Files">
+      <param label="file format" name="file_format" type="select">
+        <option value="-fasta" selected="yes">fasta</option>
+        <option value="-fastq">fastq</option>
+        <option value="-eland">eland</option>
+        <option value="-gerald">gerald</option>
+      </param>
+      <param label="read type" name="read_type" type="select">
+        <option value="-short" selected="yes">short reads</option>
+        <option value="-shortPaired">shortPaired reads</option>
+        <option value="-short2">short2 reads</option>
+        <option value="-shortPaired2">shortPaired2 reads</option>
+        <option value="-long">long reads</option>
+        <option value="-longPaired">longPaired reads</option>
+      </param>
+
+      <param name="input" type="data" format="fasta,fastq,eland,gerald" label="Dataset"/>
+    </repeat>
+  </inputs>
+  <outputs>
+    <data format="velvet" name="out_file1" />
+  </outputs>
+  <requirements>
+    <requirement type="package">velvet</requirement>
+  </requirements>
+  <tests>
+    <test>
+      <param name="hash_length" value="21" />
+      <param name="read_type" value="-shortPaired" />
+      <!-- <repeat name="inputs"> -->
+      <param name="file_format" value="fasta" />
+      <param name="read_type" value="shortPaired reads" />
+      <param name="input" value="velvet_test_reads.fa" ftype="fasta" />
+      <!-- </repeat> -->
+      <param name="strand_specific" value="" />
+      <output name="out_file1" file="velveth_test1/output.html" lines_diff="4">
+        <extra_files type="file" name='Sequences' value="velveth_test1/Sequences" compare="diff" />
+        <extra_files type="file" name='Roadmaps' value="velveth_test1/Roadmaps" compare="diff" />
+      </output>
+    </test>
+  </tests>
+  <help>
+**Velvet Overview**
+
+Velvet_ is a de novo genomic assembler specially designed for short read sequencing technologies, such as Solexa or 454, developed by Daniel Zerbino and Ewan Birney at the European Bioinformatics Institute (EMBL-EBI), near Cambridge, in the United Kingdom.
+
+Velvet currently takes in short read sequences, removes errors then produces high quality unique contigs. It then uses paired-end read and long read information, when available, to retrieve the repeated areas between contigs.
+
+Read the Velvet `documentation`__ for details on using the Velvet Assembler.
+
+.. _Velvet: http://www.ebi.ac.uk/~zerbino/velvet/
+
+.. __: http://www.ebi.ac.uk/~zerbino/velvet/Manual.pdf
+
+------
+
+**Velveth**
+
+Velveth takes in a number of sequence files, produces a hashtable, then outputs two files in an output directory (creating it if necessary), Sequences and Roadmaps, which are necessary to velvetg.
+
+------
+
+**Hash Length**
+
+The hash length, also known as k-mer length, corresponds to the length, in base pairs, of the words being hashed. 
+
+The hash length is the length of the k-mers being entered in the hash table. Firstly, you must observe three technical constraints::
+
+# it must be an odd number, to avoid palindromes. If you put in an even number, Velvet will just decrement it and proceed.
+# it must be below or equal to MAXKMERHASH length (cf. 2.3.3, by default 31bp), because it is stored on 64 bits
+# it must be strictly inferior to read length, otherwise you simply will not observe any overlaps between reads, for obvious reasons.
+
+Now you still have quite a lot of possibilities. As is often the case, it's a trade- off between specificity and sensitivity. Longer kmers bring you more specificity (i.e. less spurious overlaps) but lowers coverage (cf. below). . . so there's a sweet spot to be found with time and experience.
+We like to think in terms of "k-mer coverage", i.e. how many times has a k-mer been seen among the reads. The relation between k-mer coverage Ck and standard (nucleotide-wise) coverage C is Ck = C # (L - k + 1)/L where k is your hash length, and L you read length.
+Experience shows that this kmer coverage should be above 10 to start getting decent results. If Ck is above 20, you might be "wasting" coverage. Experience also shows that empirical tests with different values for k are not that costly to run!
+
+**Input Files**
+
+Velvet works mainly with fasta and fastq formats. For paired-end reads, the assumption is that each read is next to its mate
+read. In other words, if the reads are indexed from 0, then reads 0 and 1 are paired, 2 and 3, 4 and 5, etc.
+
+Supported file formats are::
+
+  fasta
+  fastq 
+  fasta.gz 
+  fastq.gz 
+  eland
+  gerald
+
+Read categories are::
+
+  short (default)
+  shortPaired
+  short2 (same as short, but for a separate insert-size library)
+  shortPaired2 (see above)
+  long (for Sanger, 454 or even reference sequences)
+  longPaired
+
+  </help>
+</tool>
diff --git a/tools/sr_assembly/velveth_wrapper.py b/tools/sr_assembly/velveth_wrapper.py
new file mode 100644
index 0000000..cbdbec6
--- /dev/null
+++ b/tools/sr_assembly/velveth_wrapper.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+"""
+Classes encapsulating decypher tool.
+James E Johnson - University of Minnesota
+"""
+import os
+import string
+import subprocess
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+
+def stop_err( msg ):
+    sys.stderr.write( "%s\n" % msg )
+    sys.exit()
+
+
+def __main__():
+    # Parse command line
+    html_file = sys.argv[1]
+    working_dir = sys.argv[2]
+    try:  # for test - needs this done
+        os.makedirs(working_dir)
+    except Exception as e:
+        stop_err( 'Error running velveth ' + str( e ) )
+    hash_length = sys.argv[3]
+    inputs = string.join(sys.argv[4:], ' ')
+    cmdline = 'velveth %s %s %s > /dev/null' % (working_dir, hash_length, inputs)
+    try:
+        proc = subprocess.Popen( args=cmdline, shell=True, stderr=subprocess.PIPE )
+        returncode = proc.wait()
+        # get stderr, allowing for case where it's very large
+        stderr = ''
+        buffsize = 1048576
+        try:
+            while True:
+                stderr += proc.stderr.read( buffsize )
+                if not stderr or len( stderr ) % buffsize != 0:
+                    break
+        except OverflowError:
+            pass
+        if returncode != 0:
+            raise Exception(stderr)
+    except Exception as e:
+        stop_err( 'Error running velveth ' + str( e ) )
+    sequences_path = os.path.join(working_dir, 'Sequences')
+    roadmaps_path = os.path.join(working_dir, 'Roadmaps')
+    rval = ['<html><head><title>Velvet Galaxy Composite Dataset </title></head><p/>']
+    rval.append('<div>%s<p/></div>' % (cmdline) )
+    rval.append('<div>This composite dataset is composed of the following files:<p/><ul>')
+    rval.append( '<li><a href="%s" type="text/plain">%s </a>%s</li>' % (sequences_path, 'Sequences', 'Sequences' ) )
+    rval.append( '<li><a href="%s" type="text/plain">%s </a>%s</li>' % (roadmaps_path, 'Roadmaps', 'Roadmaps' ) )
+    rval.append( '</ul></div></html>' )
+    with open(html_file, 'w') as f:
+        f.write("\n".join( rval ))
+        f.write('\n')
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/sr_mapping/PerM.xml b/tools/sr_mapping/PerM.xml
new file mode 100644
index 0000000..d0b84ea
--- /dev/null
+++ b/tools/sr_mapping/PerM.xml
@@ -0,0 +1,369 @@
+<tool id="PerM" name="Map with PerM" version="1.1.2">
+  <description>for SOLiD and Illumina</description>
+  <!-- works with PerM version 0.2.6 -->
+  <requirements>
+      <requirement type="package">perm</requirement>
+  </requirements>
+  <command>
+    echo -n "PerM "; PerM 2>&1 | grep "Version";
+    PerM
+      #if $s.sourceOfRef.refSource == "history"
+        $s.sourceOfRef.ref
+      #else
+        #if $s.space == "color"
+          "${s.sourceOfRef.index.fields.path}"
+        #elif $s.space == "base"
+          "${s.sourceOfRef.index.fields.path}"
+        #end if
+      #end if
+      #if $s.mate.singleOrPairs == "single":
+        $s.mate.reads
+      #else:
+        -1 $s.mate.reads1 -2 $s.mate.reads2
+        -U $s.mate.upperbound
+        -L $s.mate.lowerbound
+        $s.mate.excludeAmbiguousPairs
+      #end if
+      #if $s.space == "color":
+        --readFormat "csfastq"
+      #else:
+        --readFormat "fastq"
+      #end if
+      #if $int($str($valAlign)) >= 0
+        -v $valAlign
+      #end if
+      #if $align.options == "full":
+        --seed $align.seed
+        -$align.alignments
+        #if $str($align.delimiter) != "None"
+          --delimiter $align.delimiter
+        #end if
+        -T $align.sTrimL
+        $align.includeReadsWN
+        $align.statsOnly
+        $align.ignoreQS
+      #end if
+      #if $str($bUnmappedRead) == "true" and $s.space == "color"
+        -u $unmappedReadOutCS
+      #elif $str($bUnmappedRead) == "true" and $s.space == "base"
+        -u $unmappedReadOut
+      #end if
+      -o $output
+      --outputFormat sam
+      --noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+  </command>
+  <inputs>
+    <conditional name="s">
+      <param name="space" label="Is your data color space (SOLiD) or base space (Illumina)?" type="select">
+        <option value="color">Color space</option>
+        <option value="base">Base space</option>
+      </param>
+      <when value="color">
+        <conditional name="sourceOfRef">
+          <param name="refSource" label="Will you provide your own reference file from the history or use a built-in index?" type="select">
+            <option value="indexed">Built-in index</option>
+            <option value="history">Fasta file from history</option>
+          </param>
+          <when value="indexed">
+            <param name="index" type="select" label="Select a reference genome (with seed and read length)" help="if your genome of interest is not listed - contact Galaxy team">
+              <options from_data_table="perm_color_indexes"/>
+            </param>
+          </when>
+          <when value="history">
+            <param name="ref" format="fasta" type="data" label="Reference" />
+          </when>
+        </conditional>
+        <conditional name="mate">
+          <param name="singleOrPairs" label="Mate-paired?" type="select">
+            <option value="single">Single-end</option>
+            <option value="paired">Mate pairs</option>
+          </param>
+          <when value="single">
+            <param format="fastqcssanger" name="reads" type="data" label="Reads" />
+          </when>
+          <when value="paired">
+            <param name="reads1" format="fastqcssanger" label="Forward FASTQ file" type="data" />
+            <param name="reads2" format="fastqcssanger" label="Reverse FASTQ file" type="data" />
+            <param label="Upperbound of pairs separation (-U)" name="upperbound" type="integer" size="8" value="100000" />
+            <param label="Lowerbound of pairs separation (-L)" name="lowerbound" type="integer" size="8" value="0" />
+            <param label="Exclude ambiguous pairs (-e)" name="excludeAmbiguousPairs" type="boolean" checked="false" truevalue="-e" falsevalue="" />
+          </when>
+        </conditional>
+      </when>
+      <when value="base">
+        <conditional name="sourceOfRef">
+          <param name="refSource" label="Will you provide your own reference file from the history or use a built-in index?" type="select">
+            <option value="indexed">Built-in index</option>
+            <option value="history">Fasta file from history</option>
+          </param>
+          <when value="indexed">
+            <param name="index" type="select" label="Select a reference genome with seed and read length" help="if your genome of interest is not listed - contact Galaxy team">
+              <options from_data_table="perm_base_indexes"/>
+            </param>
+          </when>
+          <when value="history">
+            <param name="ref" format="fasta" type="data" label="Reference" />
+          </when>
+        </conditional>
+        <conditional name="mate">
+          <param name="singleOrPairs" label="Mate-paired?" type="select">
+            <option value="single">Single-end</option>
+            <option value="paired">Mate pairs</option>
+          </param>
+          <when value="single">
+            <param format="fastqsanger" name="reads" type="data" label="Reads" />
+          </when>
+          <when value="paired">
+            <param name="reads1" format="fastqsanger" label="Forward FASTQ file" type="data" />
+            <param name="reads2" format="fastqsanger" label="Reverse FASTQ file" type="data" />
+            <param label="Upperbound of pairs separation (-U)" name="upperbound" type="integer" size="8" value="100000" />
+            <param label="Lowerbound of pairs separation (-L)" name="lowerbound" type="integer" size="8" value="0" />
+            <param label="Exclude ambiguous pairs (-e)" name="excludeAmbiguousPairs" type="boolean" checked="false" truevalue="-e" falsevalue="" />
+          </when>
+        </conditional>
+      </when>
+    </conditional>
+    <param label="Maximum number of mismatches permitted in one end of full read (-v)" name="valAlign" type="integer" size="5" value="2" />
+    <conditional name="align">
+      <param help="Use default setting or specify full parameters list" label="PerM settings to use" name="options" type="select">
+        <option value="preSet">Commonly used</option>
+        <option value="full">Full parameter list</option>
+      </param>
+      <when value="preSet"/>
+      <when value="full">
+        <param label="Whether or not to report all valid alignments per read (-A/-B/-E)" name="alignments" type="select">
+          <option value="A">Report all valid alignments</option>
+          <option value="B">Report the best alignments in terms of number of mismatches</option>
+          <option value="E">Report only uniquely mapped reads</option>
+        </param>
+        <param label="Choose the seed full sensitive to different number of mismatches (--seed)" name="seed" type="select" >
+          <option value="F2">2 mismatches</option>
+          <option value="S11">1 SNP + 1 color error</option>
+          <option value="F3">3 mismatches</option>
+          <option value="F4">4 mismatches</option>
+        </param>
+        <param label="Choose the delimiter to identify read name (--delimiter)" name="delimiter" type="select">
+          <option value="None">Tab/Space/Comma</option>
+          <option value=":">Colon</option>
+          <option value="_">Underscore</option>
+        </param>
+        <param label="Use the first n bases of each read for alignment (-T)" name="sTrimL" type="integer" size="5" value="50" />
+        <param name="includeReadsWN" type="boolean" checked="true" truevalue="--includeReadsWN" falsevalue="" label="Include reads with 'N' or '.' by encoding '.' as 3, 'N' as 'A' (--includeReadsWN)" /> 
+        <param name="statsOnly" type="boolean" checked="false" truevalue="--statsOnly" falsevalue="" label="Output mapping stats only. Don't output alignments (--statsOnly)" />
+        <param name="ignoreQS" type="boolean" checked="false" truevalue="--ignoreQS" falsevalue="" label="Ignore quality scores (--ignoreQS)" />
+      </when>
+    </conditional> <!-- options -->
+    <param name="bUnmappedRead" type="select" label="Output the unmapped reads (-u)">
+      <option value="true">Yes</option>
+      <option value="false">No</option>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="sam" name="output" label="${tool.name} on ${on_string}: mapped reads" />
+    <data format="fastqsanger" name="unmappedReadOut" label="${tool.name} on ${on_string}: unmapped reads">
+      <filter>bUnmappedRead == "true" and s["space"] == "base"</filter>
+    </data>
+    <data format="fastqcssanger" name="unmappedReadOutCS" label="${tool.name} on ${on_string}: unmapped reads">
+      <filter>bUnmappedRead == "true" and s["space"] == "color"</filter>
+    </data>
+  </outputs>
+  <tests>
+    <test>
+      <!--
+      PerM command:
+      PerM /afs/bx.psu.edu/depot/data/genome/phiX/perm_index/phiX_base_F3_50.index -1 test-data/perm_in1.fastqsanger -2 test-data/perm_in2.fastqsanger -U 100000 -L 0 -e +readFormat fastq -v 0 +seed F3 -A -T 50 +includeReadsWN -o perm_out1.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+      You need to replace the + with 2 dashes.
+      -->
+      <param name="space" value="base" />
+      <param name="refSource" value="indexed" />
+      <param name="index" value="phiX_F3_50" />
+      <param name="singleOrPairs" value="paired" />
+      <param name="reads1" value="perm_in1.fastqsanger" ftype="fastqsanger" />
+      <param name="reads2" value="perm_in2.fastqsanger" ftype="fastqsanger" />
+      <param name="upperbound" value="100000" />
+      <param name="lowerbound" value="0" />
+      <param name="excludeAmbiguousPairs" value="true" />
+      <param name="valAlign" value="0" />
+      <param name="options" value="full" />
+      <param name="alignments" value="A" />
+      <param name="seed" value="F3" />
+      <param name="delimiter" value="None" />
+      <param name="sTrimL" value="50" />
+      <param name="includeReadsWN" value="true" />
+      <param name="statsOnly" value="false" />
+      <param name="ignoreQS" value="false" />
+      <param name="bUnmappedRead" value="false" />
+      <output name="output" file="perm_out1.sam" ftype="sam" />
+    </test>
+    <test>
+      <!--
+      PerM command:
+      PerM test-data/chr_m.fasta test-data/perm_in3.fastqsanger +readFormat fastq -v 2 -u perm_out3.fastqsanger -o perm_out2.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+      You need to replace the + with 2 dashes.
+      -->
+      <param name="space" value="base" />
+      <param name="refSource" value="history" />
+      <param name="ref" value="chr_m.fasta" ftype="fasta" />
+      <param name="singleOrPairs" value="single" />
+      <param name="reads" value="perm_in3.fastqsanger" ftype="fastqsanger" />
+      <param name="valAlign" value="2" />
+      <param name="options" value="preSet" />
+      <param name="bUnmappedRead" value="true" />
+      <output name="output" file="perm_out2.sam" ftype="sam" />
+      <output name="unmappedReadOut" file="perm_out3.fastqsanger" ftype="fastqsanger" />
+    </test>
+    <test>
+      <!--
+      PerM command:
+      PerM test-data/phiX.fasta test-data/perm_in4.fastqcssanger +readFormat csfastq -v 1 -o perm_out4.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+      You need to replace the + with 2 dashes.
+      -->
+      <param name="space" value="color" />
+      <param name="refSource" value="history" />
+      <param name="ref" value="phiX.fasta" ftype="fasta" />
+      <param name="singleOrPairs" value="single" />
+      <param name="reads" value="perm_in4.fastqcssanger" ftype="fastqcssanger" />
+      <param name="valAlign" value="1" />
+      <param name="options" value="preSet" />
+      <param name="bUnmappedRead" value="false" />
+      <output name="output" file="perm_out4.sam" ftype="sam" />
+    </test>
+    <test>
+      <!--
+      PerM command:
+      PerM /afs/bx.psu.edu/depot/data/genome/equCab2/perm_index/equCab2_chrM_color_F2_50.index -1 test-data/perm_in5.fastqcssanger -2 test-data/perm_in6.fastqcssanger -U 90000 -L 10000 +readFormat csfastq -v 3 +seed F2-o perm_out5.sam +outputFormat sam +noSamHeader | tr '\r' '\n' | tr -cd "[:print:]\t\n " | grep "Reads\|Sub0\|Pairs\|single" | sed 's/.*Reads:,//' | sed 's/\/.*dat,_ Sub0/Sub0/'
+      You need to replace the + with 2 dashes.
+      -->
+      <param name="space" value="color" />
+      <param name="refSource" value="indexed" />
+      <param name="index" value="equCab2_chrM_F2_50" />
+      <param name="singleOrPairs" value="paired" />
+      <param name="reads1" value="perm_in5.fastqcssanger" ftype="fastqcssanger" />
+      <param name="reads2" value="perm_in6.fastqcssanger" ftype="fastqcssanger" />
+      <param name="upperbound" value="90000" />
+      <param name="lowerbound" value="10000" />
+      <param name="excludeAmbiguousPairs" value="false" />
+      <param name="valAlign" value="3" />
+      <param name="options" value="preSet" />
+      <param name="bUnmappedRead" value="false" />
+      <output name="output" file="perm_out5.sam" ftype="sam" />
+    </test>
+  </tests>
+  <help>
+**What it does**
+
+PerM is a short read aligner designed to be ultrafast with long SOLiD reads to the whole genome or transcriptions. PerM can be fully sensitive to alignments with up to four mismatches and highly sensitive to a higher number of mismatches.
+
+**Development team**
+
+PerM is developed by Ting Chen's group, Center of Excellence in Genomic Sciences at the University of Southern California. If you have any questions, please email yanghoch at usc.edu or check the `project page`__.
+
+ .. __: http://code.google.com/p/perm/
+
+**Citation**
+
+PerM: Efficient mapping of short sequencing reads with periodic full sensitive spaced seeds. Bioinformatics, 2009, 25 (19): 2514-2521.
+
+**Input**
+
+The input files are read files and a reference. Users can use the pre-indexed reference in Galaxy or upload their own reference.
+
+The uploaded reference file should be in the fasta format. Multiple sequences like transcriptions should be concatenated together separated by a header line that starts with the ">" character.
+
+Reads files must be in either fastqsanger or fastqcssanger format to use in PerM. However, there are several possible starting formats that can be converted to one of those two: fastq (any type), color-space fastq, fasta, csfasta, or csfasta+qualsolid. 
+
+An uploaded base-space fastq file MUST be checked/transformed with FASTQGroomer tools in Galaxy to be converted to the fastqsanger format (this is true even if the original file is in Sanger format).
+
+Uploaded fasta and csfasta without quality score files can be transformed to fastqsanger by the FASTQGroomer, with pseudo quality scores added.
+
+An uploaded csfasta + qual pair can also be transformed into fastqcssanger by solid2fastq.
+
+**Outputs**
+
+The output mapping result is in SAM format, and has the following columns::
+
+    Column  Description
+  --------  --------------------------------------------------------
+   1 QNAME  Query (pair) NAME
+   2 FLAG   bitwise FLAG
+   3 RNAME  Reference sequence NAME
+   4 POS    1-based leftmost POSition/coordinate of clipped sequence
+   5 MAPQ   MAPping Quality (Phred-scaled)
+   6 CIGAR  extended CIGAR string
+   7 MRNM   Mate Reference sequence NaMe ('=' if same as RNAME)
+   8 MPOS   1-based Mate POSition
+   9 ISIZE  Inferred insert SIZE
+  10 SEQ    query SEQuence on the same strand as the reference
+  11 QUAL   query QUALity (ASCII-33 gives the Phred base quality)
+  12 OPT    variable OPTional fields in the format TAG:VTYPE:VALUE
+  12.1 NM   Number of mismatches (SOLiD-specific)
+  12.2 CS   Reads in color space (SOLiD-specific)
+  12.3 CQ   Bases quality in color spacehidden="true" (SOLiD-specific)
+
+The flags are as follows::
+
+    Flag  Description
+  ------  -------------------------------------
+  0x0001  the read is paired in sequencing
+  0x0002  the read is mapped in a proper pair
+  0x0004  the query sequence itself is unmapped
+  0x0008  the mate is unmapped
+  0x0010  strand of the query (1 for reverse)
+  0x0020  strand of the mate
+  0x0040  the read is the first read in a pair
+  0x0080  the read is the second read in a pair
+  0x0100  the alignment is not primary
+
+Here is some sample output::
+
+  Qname	FLAG	Rname	POS	MAPQ	CIAGR	MRNM	MPOS	ISIZE	SEQ	QUAL	NM	CS	CQ
+  491_28_332_F3   16      ref-1   282734  255     35M     *       0       0       AGTCAAACTCCGAATGCCAATGACTTATCCTTAGG    #%%%%%%%!!%%%!!%%%%%%%%!!%%%%%%%%%%      NM:i:3  CS:Z:C0230202330012130103100230121001212        CQ:Z:###################################
+  491_28_332_F3   16      ref-1   269436  255     35M     *       0       0       AGTCAAACTCCGAATGCCAATGACTTATCCTTAGG    #%%%%%%%!!%%%!!%%%%%%%%!!%%%%%%%%%%      NM:i:3  CS:Z:C0230202330012130103100230121001212        CQ:Z:###################################
+
+The user can check a checkbox for optional output containing the unmmaped reads in fastqsanger or fastqcssanger. The default is to produce it.
+
+**PerM parameter list**
+
+Below is a list of PerM command line options for PerM. Not all of these are relevant to Galaxy's implementation, but are included for completeness.
+
+The command for single-end::
+
+  PerM [ref_or_index] [read] [options]
+
+The command for paired-end::
+
+  PerM [ref_or_index] -1 [read1] -2 [read1] [options]
+
+The command-line options::
+
+  -A                Output all alignments within the given mismatch threshold, end-to-end.
+  -B                Output best alignments in terms of mismatches in the given mismatch threshold. [Default]
+  -E                Output only the uniquely mapped reads in the given mismatch threshold.
+  -m                Create the reference index, without reusing the saved index.
+  -s PATH           Save the reference index to accelerate the mapping in the future. If PATH is not specified, the default path will be used.
+  -v INT            Where INT is the number of mismatches allowed in one end. [Default=2]
+  -T INT            Where INT is the length to truncate read length to, so 30 means use only first 30 bases (signals). Leave blank if the full read is meant to be used.
+  -o PATH           Where PATH is for output the mapping of one read set. PerM's output are in .mapping or .sam format, determined by the ext name of PATH. Ex: -o out.sam will output in SAM format; -o out.mapping will output in .mapping format.
+  -d PATH           Where PATH is the directory for multiple read sets.
+  -u PATH           Print the fastq file of those unmapped reads to the file in PATH.
+  --noSamHeader     Print no SAM header so it is convenient to concatenate multiple SAM output files.
+  --includeReadsWN  Encodes N or "." with A or 3, respectively.
+  --statsOnly       Output the mapping statistics in stdout only, without saving alignments to files.
+  --ignoreQS        Ignore the quality scores in fastq or QUAL files.
+  --seed {F2 | S11 | F3 | F4}    Specify the seed pattern, which has a specific full sensitivity. Check the algorithm page (link below) for seed patterns to balance the sensitivity and running time.
+  --readFormat {fasta | fastq | csfasta | csfastq}    Read in reads in the specified format, instead of guessing according to the extension name.
+  --delimiter CHAR  Which is a character used as the delimiter to separate the the read id, and the additional info in the line with ">" in fasta or csfasta.
+
+Paired reads options::
+
+  -e        Exclude ambiguous paired.
+  -L INT    Mate-paired separate lower bound.
+  -U INT    Mate-paired separate upper bound.
+  -1 PATH   The forward reads file path.
+  -2 PATH   The reversed reads file path.
+
+See the PerM `algorithm page`__ for information on algorithms and seeds.
+
+ .. __: http://code.google.com/p/perm/wiki/Algorithms
+  </help>
+</tool>
diff --git a/tools/sr_mapping/bfast_wrapper.py b/tools/sr_mapping/bfast_wrapper.py
new file mode 100644
index 0000000..f240a64
--- /dev/null
+++ b/tools/sr_mapping/bfast_wrapper.py
@@ -0,0 +1,351 @@
+#!/usr/bin/env python
+"""
+Runs BFAST on single-end or paired-end data.
+TODO: more documentation
+
+TODO:
+    - auto-detect gzip or bz2
+    - split options (?)
+    - queue lengths (?)
+    - assumes reference always has been indexed
+    - main and secondary indexes
+    - scoring matrix file ?
+    - read group file ?
+
+usage: bfast_wrapper.py [options]
+    -r, --ref=r: The reference genome to use or index
+    -f, --fastq=f: The fastq file to use for the mapping
+    -F, --output=u: The file to save the output (SAM format)
+    -s, --fileSource=s: Whether to use a previously indexed reference sequence or one from history (indexed or history)
+    -p, --params=p: Parameter setting to use (pre_set or full)
+    -n, --numThreads=n: The number of threads to use
+    -A, --space=A: The encoding space (0: base 1: color)
+    -o, --offsets=o: The offsets for 'match'
+    -l, --loadAllIndexes=l: Load all indexes into memory
+    -k, --keySize=k: truncate key size in 'match'
+    -K, --maxKeyMatches=K: the maximum number of matches to allow before a key is ignored
+    -M, --maxNumMatches=M: the maximum number of matches to allow before the read is discarded
+    -w, --whichStrand=w: the strands to consider (0: both 1: forward 2: reverse)
+    -t, --timing=t: output timing information to stderr
+    -u, --ungapped=u: performed ungapped local alignment
+    -U, --unconstrained=U: performed local alignment without mask constraints
+    -O, --offset=O: the number of bases before and after each hit to consider in local alignment
+    -q, --avgMismatchQuality=q: average mismatch quality
+    -a, --algorithm=a: post processing algorithm (0: no filtering, 1: all passing filters, 2: unique, 3: best scoring unique, 4: best score all)
+    -P, --disallowPairing=P: do not choose alignments based on pairing
+    -R, --reverse=R: paired end reads are given on reverse strands
+    -z, --random=z: output a random best scoring alignment
+    -D, --dbkey=D: Dbkey for reference genome
+    -H, --suppressHeader=H: Suppress the sam header
+"""
+
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def stop_err( msg ):
+    sys.stderr.write( '%s\n' % msg )
+    sys.exit()
+
+
+def __main__():
+    parser = optparse.OptionParser()
+    parser.add_option( '-r', '--ref', dest='ref', help='The reference genome to index and use' )
+    parser.add_option( '-f', '--fastq', dest='fastq', help='The fastq file to use for the mapping' )
+    parser.add_option( '-F', '--output', dest='output', help='The file to save the output (SAM format)' )
+    parser.add_option( '-A', '--space', dest='space', type="choice", default='0', choices=('0', '1'), help='The encoding space (0: base 1: color)' )
+    parser.add_option( '-H', '--suppressHeader', action="store_true", dest='suppressHeader', default=False, help='Suppress header' )
+    parser.add_option( '-n', '--numThreads', dest='numThreads', type="int", default="1", help='The number of threads to use' )
+    parser.add_option( '-t', '--timing', action="store_true", default=False, dest='timing', help='output timming information to stderr' )
+    parser.add_option( '-l', '--loadAllIndexes', action="store_true", default=False, dest='loadAllIndexes', help='Load all indexes into memory' )
+    parser.add_option( '-m', '--indexMask', dest='indexMask', help='String containing info on how to build custom indexes' )
+    parser.add_option( "-b", "--buildIndex", action="store_true", dest="buildIndex", default=False, help='String containing info on how to build custom indexes' )
+    parser.add_option( "--indexRepeatMasker", action="store_true", dest="indexRepeatMasker", default=False, help='Do not index lower case sequences. Such as those created by RepeatMasker' )
+    parser.add_option( '--indexContigOptions', dest='indexContigOptions', default="", help='The contig range options to use for the indexing' )
+    parser.add_option( '--indexExonsFileName', dest='indexExonsFileName', default="", help='The exons file to use for the indexing' )
+
+    parser.add_option( '-o', '--offsets', dest='offsets', default="", help='The offsets for \'match\'' )
+    parser.add_option( '-k', '--keySize', dest='keySize', type="int", default="-1", help='truncate key size in \'match\'' )
+    parser.add_option( '-K', '--maxKeyMatches', dest='maxKeyMatches', type="int", default="-1", help='the maximum number of matches to allow before a key is ignored' )
+    parser.add_option( '-M', '--maxNumMatches', dest='maxNumMatches', type="int", default="-1", help='the maximum number of matches to allow bfore the read is discarded' )
+    parser.add_option( '-w', '--whichStrand', dest='whichStrand', type="choice", default='0', choices=('0', '1', '2'), help='the strands to consider (0: both 1: forward 2: reverse)' )
+
+    parser.add_option( '--scoringMatrixFileName', dest='scoringMatrixFileName', help='Scoring Matrix file used to score the alignments' )
+    parser.add_option( '-u', '--ungapped', dest='ungapped', action="store_true", default=False, help='performed ungapped local alignment' )
+    parser.add_option( '-U', '--unconstrained', dest='unconstrained', action="store_true", default=False, help='performed local alignment without mask constraints' )
+    parser.add_option( '-O', '--offset', dest='offset', type="int", default="0", help='the number of bases before and after each hit to consider in local alignment' )
+    parser.add_option( '-q', '--avgMismatchQuality', type="int", default="-1", dest='avgMismatchQuality', help='average mismatch quality' )
+
+    parser.add_option( '-a', '--algorithm', dest='algorithm', default='0', type="choice", choices=('0', '1', '2', '3', '4'), help='post processing algorithm (0: no filtering, 1: all passing filters, 2: unique, 3: best scoring unique, 4: best score all' )
+    parser.add_option( '--unpaired', dest='unpaired', action="store_true", default=False, help='do not choose alignments based on pairing' )
+    parser.add_option( '--reverseStrand', dest='reverseStrand', action="store_true", default=False, help='paired end reads are given on reverse strands' )
+    parser.add_option( '--pairedEndInfer', dest='pairedEndInfer', action="store_true", default=False, help='break ties when one end of a paired end read by estimating the insert size distribution' )
+    parser.add_option( '--randomBest', dest='randomBest', action="store_true", default=False, help='output a random best scoring alignment' )
+
+    (options, args) = parser.parse_args()
+
+    # output version # of tool
+    try:
+        tmp = tempfile.NamedTemporaryFile().name
+        tmp_stdout = open( tmp, 'wb' )
+        proc = subprocess.Popen( args='bfast 2>&1', shell=True, stdout=tmp_stdout )
+        tmp_stdout.close()
+        returncode = proc.wait()
+        stdout = None
+        for line in open( tmp_stdout.name, 'rb' ):
+            if line.lower().find( 'version' ) >= 0:
+                stdout = line.strip()
+                break
+        if stdout:
+            sys.stdout.write( '%s\n' % stdout )
+        else:
+            raise Exception
+    except:
+        sys.stdout.write( 'Could not determine BFAST version\n' )
+
+    buffsize = 1048576
+
+    # make temp directory for bfast, requires trailing slash
+    tmp_dir = '%s/' % tempfile.mkdtemp()
+
+    # 'generic' options used in all bfast commands here
+    if options.timing:
+        all_cmd_options = "-t"
+    else:
+        all_cmd_options = ""
+
+    try:
+        if options.buildIndex:
+            reference_filepath = tempfile.NamedTemporaryFile( dir=tmp_dir, suffix='.fa' ).name
+            # build bfast indexes
+            os.symlink( options.ref, reference_filepath )
+
+            # bfast fast2brg
+            try:
+                nuc_space = [ "0" ]
+                if options.space == "1":
+                    # color space localalign appears to require nuc space brg
+                    nuc_space.append( "1" )
+                for space in nuc_space:
+                    cmd = 'bfast fasta2brg -f "%s" -A "%s" %s' % ( reference_filepath, space, all_cmd_options )
+                    tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                    tmp_stderr = open( tmp, 'wb' )
+                    proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                    returncode = proc.wait()
+                    tmp_stderr.close()
+                    # get stderr, allowing for case where it's very large
+                    tmp_stderr = open( tmp, 'rb' )
+                    stderr = ''
+                    try:
+                        while True:
+                            stderr += tmp_stderr.read( buffsize )
+                            if not stderr or len( stderr ) % buffsize != 0:
+                                break
+                    except OverflowError:
+                        pass
+                    tmp_stderr.close()
+                    if returncode != 0:
+                        raise Exception(stderr)
+            except Exception as e:
+                raise Exception('Error in \'bfast fasta2brg\'.\n' + str( e ))
+
+            # bfast index
+            try:
+                all_index_cmds = 'bfast index %s -f "%s" -A "%s" -n "%s"' % ( all_cmd_options, reference_filepath, options.space, options.numThreads )
+
+                if options.indexRepeatMasker:
+                    all_index_cmds += " -R"
+
+                if options.indexContigOptions:
+                    index_contig_options = [ int(_) for _ in options.indexContigOptions.split( ',' ) ]
+                    if index_contig_options[0] >= 0:
+                        all_index_cmds += ' -s "%s"' % index_contig_options[0]
+                    if index_contig_options[1] >= 0:
+                        all_index_cmds += ' -S "%s"' % index_contig_options[1]
+                    if index_contig_options[2] >= 0:
+                        all_index_cmds += ' -e "%s"' % index_contig_options[2]
+                    if index_contig_options[3] >= 0:
+                        all_index_cmds += ' -E "%s"' % index_contig_options[3]
+                elif options.indexExonsFileName:
+                    all_index_cmds += ' -x "%s"' % options.indexExonsFileName
+
+                index_count = 1
+                for mask, hash_width in [ mask.split( ':' ) for mask in options.indexMask.split( ',' ) ]:
+                    cmd = '%s -m "%s" -w "%s" -i "%i"' % ( all_index_cmds, mask, hash_width, index_count )
+                    tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                    tmp_stderr = open( tmp, 'wb' )
+                    proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                    returncode = proc.wait()
+                    tmp_stderr.close()
+                    # get stderr, allowing for case where it's very large
+                    tmp_stderr = open( tmp, 'rb' )
+                    stderr = ''
+                    try:
+                        while True:
+                            stderr += tmp_stderr.read( buffsize )
+                            if not stderr or len( stderr ) % buffsize != 0:
+                                break
+                    except OverflowError:
+                        pass
+                    tmp_stderr.close()
+                    if returncode != 0:
+                        raise Exception(stderr)
+                    index_count += 1
+            except Exception as e:
+                raise Exception('Error in \'bfast index\'.\n' + str( e ))
+
+        else:
+            reference_filepath = options.ref
+        assert reference_filepath and os.path.exists( reference_filepath ), 'A valid genome reference was not provided.'
+
+        # set up aligning and generate aligning command options
+        # set up temp output files
+        tmp_bmf = tempfile.NamedTemporaryFile( dir=tmp_dir )
+        tmp_bmf_name = tmp_bmf.name
+        tmp_bmf.close()
+        tmp_baf = tempfile.NamedTemporaryFile( dir=tmp_dir )
+        tmp_baf_name = tmp_baf.name
+        tmp_baf.close()
+
+        bfast_match_cmd = 'bfast match -f "%s" -r "%s" -n "%s" -A "%s" -T "%s" -w "%s" %s' % ( reference_filepath, options.fastq, options.numThreads, options.space, tmp_dir, options.whichStrand, all_cmd_options )
+        bfast_localalign_cmd = 'bfast localalign -f "%s" -m "%s" -n "%s" -A "%s" -o "%s" %s' % ( reference_filepath, tmp_bmf_name, options.numThreads, options.space, options.offset, all_cmd_options )
+        bfast_postprocess_cmd = 'bfast postprocess -O 1 -f "%s" -i "%s" -n "%s" -A "%s" -a "%s" %s' % ( reference_filepath, tmp_baf_name, options.numThreads, options.space, options.algorithm, all_cmd_options )
+
+        if options.offsets:
+            bfast_match_cmd += ' -o "%s"' % options.offsets
+        if options.keySize >= 0:
+            bfast_match_cmd += ' -k "%s"' % options.keySize
+        if options.maxKeyMatches >= 0:
+            bfast_match_cmd += ' -K "%s"' % options.maxKeyMatches
+        if options.maxNumMatches >= 0:
+            bfast_match_cmd += ' -M "%s"' % options.maxNumMatches
+            bfast_localalign_cmd += ' -M "%s"' % options.maxNumMatches
+        if options.scoringMatrixFileName:
+            bfast_localalign_cmd += ' -x "%s"' % options.scoringMatrixFileName
+            bfast_postprocess_cmd += ' -x "%s"' % options.scoringMatrixFileName
+        if options.ungapped:
+            bfast_localalign_cmd += ' -u'
+        if options.unconstrained:
+            bfast_localalign_cmd += ' -U'
+        if options.avgMismatchQuality >= 0:
+            bfast_localalign_cmd += ' -q "%s"' % options.avgMismatchQuality
+            bfast_postprocess_cmd += ' -q "%s"' % options.avgMismatchQuality
+        if options.algorithm == 3:
+            if options.pairedEndInfer:
+                bfast_postprocess_cmd += ' -P'
+            if options.randomBest:
+                bfast_postprocess_cmd += ' -z'
+        if options.unpaired:
+            bfast_postprocess_cmd += ' -U'
+        if options.reverseStrand:
+            bfast_postprocess_cmd += ' -R'
+
+        # instead of using temp files, should we stream through pipes?
+        bfast_match_cmd += " > %s" % tmp_bmf_name
+        bfast_localalign_cmd += " > %s" % tmp_baf_name
+        bfast_postprocess_cmd += " > %s" % options.output
+
+        # need to nest try-except in try-finally to handle 2.4
+        try:
+            # bfast 'match'
+            try:
+                tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                tmp_stderr = open( tmp, 'wb' )
+                proc = subprocess.Popen( args=bfast_match_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                returncode = proc.wait()
+                tmp_stderr.close()
+                # get stderr, allowing for case where it's very large
+                tmp_stderr = open( tmp, 'rb' )
+                stderr = ''
+                try:
+                    while True:
+                        stderr += tmp_stderr.read( buffsize )
+                        if not stderr or len( stderr ) % buffsize != 0:
+                            break
+                except OverflowError:
+                    pass
+                tmp_stderr.close()
+                if returncode != 0:
+                    raise Exception(stderr)
+            except Exception as e:
+                raise Exception('Error in \'bfast match\'. \n' + str( e ))
+            # bfast 'localalign'
+            try:
+                tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                tmp_stderr = open( tmp, 'wb' )
+                proc = subprocess.Popen( args=bfast_localalign_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                returncode = proc.wait()
+                tmp_stderr.close()
+                # get stderr, allowing for case where it's very large
+                tmp_stderr = open( tmp, 'rb' )
+                stderr = ''
+                try:
+                    while True:
+                        stderr += tmp_stderr.read( buffsize )
+                        if not stderr or len( stderr ) % buffsize != 0:
+                            break
+                except OverflowError:
+                    pass
+                tmp_stderr.close()
+                if returncode != 0:
+                    raise Exception(stderr)
+            except Exception as e:
+                raise Exception('Error in \'bfast localalign\'. \n' + str( e ))
+            # bfast 'postprocess'
+            try:
+                tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                tmp_stderr = open( tmp, 'wb' )
+                proc = subprocess.Popen( args=bfast_postprocess_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                returncode = proc.wait()
+                tmp_stderr.close()
+                # get stderr, allowing for case where it's very large
+                tmp_stderr = open( tmp, 'rb' )
+                stderr = ''
+                try:
+                    while True:
+                        stderr += tmp_stderr.read( buffsize )
+                        if not stderr or len( stderr ) % buffsize != 0:
+                            break
+                except OverflowError:
+                    pass
+                tmp_stderr.close()
+                if returncode != 0:
+                    raise Exception(stderr)
+            except Exception as e:
+                raise Exception('Error in \'bfast postprocess\'. \n' + str( e ))
+            # remove header if necessary
+            if options.suppressHeader:
+                tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
+                tmp_out_name = tmp_out.name
+                tmp_out.close()
+                try:
+                    shutil.move( options.output, tmp_out_name )
+                except Exception as e:
+                    raise Exception('Error moving output file before removing headers. \n' + str( e ))
+                fout = open( options.output, 'w' )
+                for line in open( tmp_out.name, 'r' ):
+                    if len( line ) < 3 or line[0:3] not in [ '@HD', '@SQ', '@RG', '@PG', '@CO' ]:
+                        fout.write( line )
+                fout.close()
+            # check that there are results in the output file
+            if os.path.getsize( options.output ) > 0:
+                if "0" == options.space:
+                    sys.stdout.write( 'BFAST run on Base Space data' )
+                else:
+                    sys.stdout.write( 'BFAST run on Color Space data' )
+            else:
+                raise Exception('The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.')
+        except Exception as e:
+            stop_err( 'The alignment failed.\n' + str( e ) )
+    finally:
+        # clean up temp dir
+        if os.path.exists( tmp_dir ):
+            shutil.rmtree( tmp_dir )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/sr_mapping/bfast_wrapper.xml b/tools/sr_mapping/bfast_wrapper.xml
new file mode 100644
index 0000000..45426e0
--- /dev/null
+++ b/tools/sr_mapping/bfast_wrapper.xml
@@ -0,0 +1,384 @@
+<tool id="bfast_wrapper" name="Map with BFAST" version="0.1.3">
+  <description></description>
+  <command interpreter="python">bfast_wrapper.py
+    --numThreads="\${GALAXY_SLOTS:-4}"
+    --fastq="$input1"
+    #if $input1.extension.startswith( "fastqcs" ):
+        ##if extention starts with fastqcs, then we have a color space file
+        --space="1" ##color space
+    #else
+        --space="0"
+    #end if
+    --output="$output"
+    $suppressHeader
+    
+    #if $refGenomeSource.refGenomeSource_type == "history":
+      ##build indexes on the fly
+      --buildIndex
+      --ref="${refGenomeSource.ownFile}"
+      --indexMask="${",".join( [ "%s:%s" % ( str( custom_index.get( 'mask' ) ).strip(), str( custom_index.get( 'hash_width' ) ).strip() ) for custom_index in $refGenomeSource.custom_index ] )}"
+      ${refGenomeSource.indexing_repeatmasker}
+      #if $refGenomeSource.indexing_option.indexing_option_selector == "contig_offset":
+        --indexContigOptions="${refGenomeSource.indexing_option.start_contig},${refGenomeSource.indexing_option.start_pos},${refGenomeSource.indexing_option.end_contig},${refGenomeSource.indexing_option.end_pos}"
+      #elif $refGenomeSource.indexing_option.indexing_option_selector == "exons_file":
+        --indexExonsFileName="${refGenomeSource.indexing_option.exons_file}"
+      #end if
+    #else:
+      ##use precomputed indexes
+      --ref="${ refGenomeSource.indices.fields.path }"
+    #end if
+    
+    #if $params.source_select == "full":
+      --offsets="$params.offsets"
+      --keySize="$params.keySize"
+      --maxKeyMatches="$params.maxKeyMatches"
+      --maxNumMatches="$params.maxNumMatches"
+      --whichStrand="$params.whichStrand"
+      
+      #if str( $params.scoringMatrixFileName ) != 'None':
+        --scoringMatrixFileName="$params.scoringMatrixFileName"
+      #end if
+      ${params.ungapped}
+      ${params.unconstrained}
+      --offset="${params.offset}"
+      --avgMismatchQuality="${params.avgMismatchQuality}"
+      
+      --algorithm="${params.localalign_params.algorithm}"
+      ${params.unpaired}
+      ${params.reverseStrand}
+      #if $params.localalign_params.algorithm == "3":
+        ${params.localalign_params.pairedEndInfer}
+        ${params.localalign_params.randomBest}
+      #end if
+    #end if
+  </command>
+  <inputs>
+    <param name="input1" type="data" format="fastqsanger,fastqcssanger" label="FASTQ file" help="Must have Sanger-scaled quality values with ASCII offset 33"/>
+    <conditional name="refGenomeSource">
+      <param name="refGenomeSource_type" type="select" label="Will you select a reference genome from your history or use a built-in index?">
+        <option value="indexed">Use a built-in index</option>
+        <option value="history">Use one from the history</option>
+      </param>
+      <when value="indexed">
+        <param name="indices" type="select" label="Select a reference genome index set">
+          <options from_data_table="bfast_indexes">
+            <filter type="multiple_splitter" column="2" separator=","/>
+            <filter type="param_value" column="2" ref="input1" ref_attribute="extension"/>
+            <filter type="sort_by" column="3"/>
+            <validator type="no_options" message="No indexes are available for the selected input dataset"/>
+          </options>
+        </param>
+      </when>
+      <when value="history">
+        <param name="ownFile" type="data" format="fasta" metadata_name="dbkey" label="Select a reference from history" />
+        <repeat name="custom_index" title="Custom indice" min="1" >
+            <param name="mask" type="text" value="" label="Specify the mask" size="20">
+              <!-- <validator type="no_options" message="No indexes are available for the selected input dataset"/> need is int validator here or regex all 01s-->
+            </param>
+            <param name="hash_width" type="integer" value="" label="Hash Width" />
+        </repeat>
+        <param name="indexing_repeatmasker" type="boolean" truevalue="--indexRepeatMasker" falsevalue="" checked="False" label="Do not index lower case sequences" help="Such as those created by RepeatMasker"/>
+        <conditional name="indexing_option">
+          <param name="indexing_option_selector" type="select" label="BFAST indexing settings to use" help="For most indexing needs use default settings. If you want full control use the other options.">
+            <option value="default">Default</option>
+            <option value="contig_offset">Contig Offset</option>
+            <option value="exons_file">Exons file</option>
+          </param>
+          <when value="default">
+            <!-- nothing here -->
+          </when>
+          <when value="contig_offset">
+            <param name="start_contig" type="integer" value="-1" label="Start Contig" help="Specifies the first contig to include when building indexes. (advanced users only)" />
+            <param name="start_pos" type="integer" value="-1" label="Start Position" help="Specifies the first position in the first contig to include when building indexes. (advanced users only)" />
+            <param name="end_contig" type="integer" value="-1" label="End Contig" help="Specifies the last contig to include when building indexes. (advanced users only)" />
+            <param name="end_pos" type="integer" value="-1" label="End Position" help="Specifies the last position in the last contig to include when building indexes. (advanced users only)" />
+          </when>
+          <when value="exons_file">
+            <param name="exons_file" type="data" format="tabular" label="Select an exons file from history" help="See BFAST manual for file format requirements. (advanced users only)"/>
+          </when>
+        </conditional>
+      </when>
+    </conditional>
+    <conditional name="params">
+      <param name="source_select" type="select" label="BFAST matching settings to use" help="For most mapping needs use Commonly Used settings. If you want full control use Full Parameter List">
+        <option value="pre_set">Commonly Used</option>
+        <option value="full">Full Parameter List</option>
+      </param>
+      <when value="pre_set">
+        <!-- nothing here -->
+      </when>
+      <when value="full">
+        <param name="offsets" type="text" value="" label="The offsets for 'bfast match'" help="Set if not all offsets from the 5' end of the read are to be examined (advanced users only)" />
+        <param name="keySize" type="integer" value="-1" label="Truncate key size in 'match'" help="Set this to reduce the effective key size of all indexes in 'bfast match' (advanced users only)" />
+        <param name="maxKeyMatches" type="integer" value="8" label="The maximum number of matches to allow before a key is ignored" help="Lower values will result in more unique regions being examined, while larger values will allow include repetitive regions" />
+        <param name="maxNumMatches" type="integer" value="384" label="The maximum number of matches to allow before a read is discarded" help="Larger values will allow more hits to be examined" />
+        <param name="whichStrand" type="select" label="The strands to consider" help="Both strands, forward strand only, or reverse strand only">
+          <option value="0">Both strands</option>
+          <option value="1">Forward strand only</option>
+          <option value="2">Reverse strand only</option>
+        </param>
+        
+        <param name="scoringMatrixFileName" type="data" format="text" optional="True" label="Scoring Matrix file used to score the alignments" help="See BFAST manual for file format requirements. (advanced users only)"/>
+        <param name="ungapped" type="boolean" truevalue="--ungapped" falsevalue="" checked="no" label="Perform ungapped local alignment" help="Performing ungapped local alignment will not consider indels while providing a significant speed increase" />
+        <param name="unconstrained" type="boolean" truevalue="--unconstrained" falsevalue="" checked="no" label="Perform unconstrained local alignment" help="Performing unconstrained local alignment will not use mask constraints at the cost of speed" />
+        <param name="offset" type="integer" value="20" label="The number of bases before and after each hit to consider in local alignment" help="Larger values will allow for larger insertions and deletions to be detected at the cost of speed" />
+        <param name="avgMismatchQuality" type="integer" value="10" label="The average mismatch quality" help="This can be used as a scaling factor for mapping quality (advanced users only)" />
+        
+        <conditional name="localalign_params">
+          <param name="algorithm" type="select" label="The post processing algorithm" help="This determines how reads with multiple candidate alignments are returned.  Unique alignments will return an alignment if the read has only one candidate alignment.  Uniquely best scoring alignments will return one alignment for a read if that alignment has a better alignment score than the rest of the candidate alignments.  All best scoring alignments will return all alignments that have the best [...]
+              <option value="0" selected="True">No filtering</option>
+              <option value="1">All alignments that pass filtering</option>
+              <option value="2">Unique alignments</option>
+              <option value="3">Uniquely best scoring alignments</option>
+              <option value="4">All best scoring alignments</option>
+          </param>
+          <when value="0">
+            <!-- nothing here -->
+          </when>
+          <when value="1">
+            <!-- nothing here -->
+          </when>
+          <when value="2">
+            <!-- nothing here -->
+          </when>
+          <when value="4">
+            <!-- nothing here -->
+          </when>
+          <when value="3">
+            <param name="pairedEndInfer" type="boolean" truevalue="--pairedEndInfer" falsevalue="" checked="no" label="pairedEndInfer" help="break ties when one end of a paired end read by estimating the insert size distribution" />
+            <param name="randomBest" type="boolean" truevalue="--randomBest" falsevalue="" checked="no" label="Random alignments" help="output a random best scoring alignment (advanced users only)" />
+          </when>
+        </conditional>
+        <param name="unpaired" type="boolean" truevalue="--unpaired" falsevalue="" checked="no" label="Disallow pairing" help="do not choose alignments based on pairing" />
+        <param name="reverseStrand" type="boolean" truevalue="--reverseStrand" falsevalue="" checked="no" label="Reverse paired ends" help="paired end reads are given on reverse strands" />
+        
+      </when>
+    </conditional>
+    <param name="suppressHeader" type="boolean" truevalue="--suppressHeader" falsevalue="" checked="False" label="Suppress the header in the output SAM file" help="BFAST produces SAM with several lines of header information" />
+  </inputs>
+  <outputs>
+    <data format="sam" name="output" label="${tool.name} on ${on_string}: mapped reads">
+      <actions>
+        <conditional name="refGenomeSource.refGenomeSource_type">
+          <when value="indexed">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" column="1" name="bfast_indexes">
+                <filter type="param_value" ref="refGenomeSource.indices" column="0" />
+              </option>
+            </action>
+          </when>
+          <when value="history">
+            <action type="metadata" name="dbkey">
+              <option type="from_param" name="refGenomeSource.ownFile" param_attribute="dbkey" />
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+  </outputs>
+  <help>
+**What it does**
+
+BFAST facilitates the fast and accurate mapping of short reads to reference sequences. Some advantages of BFAST include:
+* Speed: enables billions of short reads to be mapped quickly.
+* Accuracy: A priori probabilities for mapping reads with defined set of variants
+* An easy way to measurably tune accuracy at the expense of speed. 
+Specifically, BFAST was designed to facilitate whole-genome resequencing, where mapping billions of short reads with variants is of utmost importance.
+
+BFAST supports both Illumina and ABI SOLiD data, as well as any other Next-Generation Sequencing Technology (454, Helicos), with particular emphasis on sensitivity towards errors, SNPs and especially indels. Other algorithms take short-cuts by ignoring errors, certain types of variants (indels), and even require further alignment, all to be the "fastest" (but still not complete). BFAST is able to be tuned to find variants regardless of the error-rate, polymorphism rate, or other factors. 
+
+------
+
+Please cite the website "http://bfast.sourceforge.net" as well as the accompanying 
+papers:
+
+Homer N, Merriman B, Nelson SF.
+BFAST: An alignment tool for large scale genome resequencing.
+PMID: 19907642
+PLoS ONE. 2009 4(11): e7767.  
+http://dx.doi.org/10.1371/journal.pone.0007767  
+
+Homer N, Merriman B, Nelson SF.
+Local alignment of two-base encoded DNA sequence.
+BMC Bioinformatics. 2009 Jun 9;10(1):175.
+PMID: 19508732 
+http://dx.doi.org/10.1186/1471-2105-10-175
+
+------
+
+**Know what you are doing**
+
+.. class:: warningmark
+
+There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy.
+
+.. __: http://bfast.sourceforge.net/
+
+------
+
+**Input formats**
+
+BFAST accepts files in Sanger FASTQ format. Use the FASTQ Groomer to prepare your files.
+
+------
+
+**Outputs**
+
+The output is in SAM format, and has the following columns::
+
+    Column  Description
+  --------  --------------------------------------------------------
+  1  QNAME  Query (pair) NAME
+  2  FLAG   bitwise FLAG
+  3  RNAME  Reference sequence NAME
+  4  POS    1-based leftmost POSition/coordinate of clipped sequence
+  5  MAPQ   MAPping Quality (Phred-scaled)
+  6  CIGAR  extended CIGAR string
+  7  MRNM   Mate Reference sequence NaMe ('=' if same as RNAME)
+  8  MPOS   1-based Mate POSition
+  9  ISIZE  Inferred insert SIZE
+  10 SEQ    query SEQuence on the same strand as the reference
+  11 QUAL   query QUALity (ASCII-33 gives the Phred base quality)
+  12 OPT    variable OPTional fields in the format TAG:VTYPE:VALU
+
+The flags are as follows::
+
+  Flag  Description
+  ------  -------------------------------------
+  0x0001  the read is paired in sequencing
+  0x0002  the read is mapped in a proper pair
+  0x0004  the query sequence itself is unmapped
+  0x0008  the mate is unmapped
+  0x0010  strand of the query (1 for reverse)
+  0x0020  strand of the mate
+  0x0040  the read is the first read in a pair
+  0x0080  the read is the second read in a pair
+  0x0100  the alignment is not primary
+
+It looks like this (scroll sideways to see the entire example)::
+
+  QNAME  FLAG  RNAME  POS  MAPQ  CIAGR  MRNM  MPOS  ISIZE  SEQ  QUAL  OPT
+  HWI-EAS91_1_30788AAXX:1:1:1761:343  4  *  0  0  *  *  0  0  AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG  hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh
+  HWI-EAS91_1_30788AAXX:1:1:1578:331  4  *  0  0  *  *  0  0  GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG  hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh
+
+-------
+
+**BFAST settings**
+
+All of the options have a default value. You can change any of them. Most of the options in BFAST have been implemented here.
+
+------
+
+**BFAST parameter list**
+
+This is an exhaustive list of BFAST options:
+
+For **match**::
+
+  -o  STRING   Specifies the offset [Use all]
+  -l      Specifies to load all main or secondary indexes into memory
+  -A  INT    0: NT space 1: Color space [0]
+  -k  INT    Specifies to truncate all indexes to have the given key size
+  (must be greater than the hash width) [Not Using]
+  -K  INT    Specifies the maximum number of matches to allow before a key
+  is ignored [8]
+  -M  INT    Specifies the maximum total number of matches to consider
+  before the read is discarded [384]
+  -w  INT    0: consider both strands 1: forward strand only 2: reverse
+  strand only [0]
+  -n  INT   Specifies the number of threads to use [1] 
+  -t         Specifies to output timing information
+
+For **localalign**::
+
+  -x  FILE  Specifies the file name storing the scoring matrix
+  -u        Do ungapped local alignment (the default is gapped).
+  -U         Do not use mask constraints from the match step
+  -A  INT    0: NT space 1: Color space [0]
+  -o  INT    Specifies the number of bases before and after the match to
+  include in the reference genome
+  -M  INT    Specifies the maximum total number of matches to consider
+  before the read is discarded [384]
+  -q  INT    Specifies the average mismatch quality
+  -n  INT   Specifies the number of threads to use [1] 
+  -t         Specifies to output timing information
+
+For **postprocess**::
+
+  -a  INT    Specifies the algorithm to choose the alignment for each end of the read:
+
+    0: No filtering will occur.
+    1: All alignments that pass the filters will be output
+    2: Only consider reads that have been aligned uniquely
+    3: Choose uniquely the alignment with the best score
+    4: Choose all alignments with the best score
+  
+  -A  INT    0: NT space 1: Color space [0]
+  -U      Specifies that pairing should not be performed
+  -R          Specifies that paired reads are on opposite strands
+  -q   INT    Specifies the average mismatch quality
+  -x  FILE  Specifies the file name storing the scoring matrix
+  -z          Specifies to output a random best scoring alignment (with -a 3)
+  -r   FILE  Specifies to add the RG in the specified file to the SAM
+  header and updates the RG tag (and LB/PU tags if present) in
+  the reads (SAM only)
+  -n  INT   Specifies the number of threads to use [1] 
+  -t         Specifies to output timing information
+
+  </help>
+  <requirements>
+    <requirement type="package">bfast</requirement>
+  </requirements>
+  <tests>
+    <test>
+      <param name="input1" ftype="fastqsanger" value="random_phiX_1.fastqsanger" />
+      <param name="refGenomeSource_type" value="history" />
+      <param name="ownFile" ftype="fasta" value="phiX.fasta" />
+      <param name="mask" value="111111111111111111" />
+      <param name="hash_width" value="14" />
+      <param name="source_select" value="pre_set" />
+      <param name="indexing_repeatmasker" value="False" />
+      <param name="indexing_option_selector" value="default" />
+      <param name="suppressHeader" value="" />
+      <output name="output" ftype="sam" file="bfast_out1.sam" />
+    </test>
+    <test>
+      <param name="input1" ftype="fastqsanger" value="random_phiX_1.fastqsanger"/>
+      <param name="refGenomeSource_type" value="history" />
+      <param name="ownFile" ftype="fasta" value="phiX.fasta" />
+      <param name="mask" value="111111111111111111" />
+      <param name="hash_width" value="14" />
+      <param name="source_select" value="pre_set" />
+      <param name="indexing_repeatmasker" value="False" />
+      <param name="indexing_option_selector" value="default" />
+      <param name="suppressHeader" value="--suppressHeader" />
+      <output name="output" ftype="sam" file="bfast_out1.sam" lines_diff="3" /><!--  3 headers exist in compare file, but headers are suppressed -->
+    </test>
+    <test>
+      <param name="input1" ftype="fastqcssanger" value="random_phiX_1.fastqcssanger" />
+      <param name="refGenomeSource_type" value="history" />
+      <param name="ownFile" ftype="fasta" value="phiX.fasta" />
+      <param name="mask" value="111111111111111111" />
+      <param name="hash_width" value="14" />
+      <param name="source_select" value="pre_set" />
+      <param name="indexing_repeatmasker" value="False" />
+      <param name="indexing_option_selector" value="default" />
+      <param name="suppressHeader" value="" />
+      <output name="output" ftype="sam" file="bfast_out2.sam" />
+    </test>
+    <!-- test of pre-indexed data now -->
+    <test>
+      <param name="input1" ftype="fastqsanger" value="random_phiX_1.fastqsanger" />
+      <param name="refGenomeSource_type" value="indexed" />
+      <param name="indices" value="phiX_nt_50" />
+      <param name="source_select" value="pre_set" />
+      <param name="suppressHeader" value="" />
+      <output name="output" ftype="sam" file="bfast_out3.sam" lines_diff="2" /><!-- MD:Z:11T38 instead of MD:Z:50 on one line-->
+    </test>
+  </tests>
+
+  <citations>
+    <citation type="doi">10.1371/journal.pone.0007767</citation>
+  </citations>
+
+</tool>
diff --git a/tools/sr_mapping/fastq_statistics.xml b/tools/sr_mapping/fastq_statistics.xml
new file mode 100755
index 0000000..16c37b2
--- /dev/null
+++ b/tools/sr_mapping/fastq_statistics.xml
@@ -0,0 +1,94 @@
+<tool id="cshl_fastq_statistics" name="FASTQ Statistics" version="1.0.0">
+  <description>for Solexa file</description>
+  <command>cat $input | solexa_quality_statistics -o $output</command>
+  <inputs>
+    <param format="fastqsolexa" name="input" type="data" label="Library to analyze" />
+  </inputs>
+  <outputs>
+    <data format="txt" name="output" />
+  </outputs>
+  <help>
+
+**What it does**
+
+Creates quality statistics report for the given Solexa/FASTQ library.
+
+-----
+
+**The output file will contain the following fields:**
+
+* column	= column number (1 to 36 for a 36-cycles read solexa file)
+* count   = number of bases found in this column.
+* min     = Lowest quality score value found in this column.
+* max     = Highest quality score value found in this column.
+* sum     = Sum of quality score values for this column.
+* mean    = Mean quality score value for this column.
+* Q1	= 1st quartile quality score.
+* med	= Median quality score.
+* Q3	= 3rd quartile quality score.
+* IQR	= Inter-Quartile range (Q3-Q1).
+* lW	= 'Left-Whisker' value (for boxplotting).
+* rW	= 'Right-Whisker' value (for boxplotting).
+* A_Count	= Count of 'A' nucleotides found in this column.
+* C_Count	= Count of 'C' nucleotides found in this column.
+* G_Count	= Count of 'G' nucleotides found in this column.
+* T_Count	= Count of 'T' nucleotides found in this column.
+* N_Count = Count of 'N' nucleotides found in this column.  
+
+
+.. class:: infomark
+
+**TIP:** This statistics report can be used as input for **Quality Score** and **Nucleotides Distribution** tools.
+
+
+
+
+
+**Output Example**::
+
+    column	count	min	max	sum	mean	Q1	med	Q3	IQR	lW	rW	A_Count	C_Count	G_Count	T_Count	N_Count
+    1	6362991	-4	40	250734117	39.41	40	40	40	0	40	40	1396976	1329101	678730	2958184	0
+    2	6362991	-5	40	250531036	39.37	40	40	40	0	40	40	1786786	1055766	1738025	1782414	0
+    3	6362991	-5	40	248722469	39.09	40	40	40	0	40	40	2296384	984875	1443989	1637743	0
+    4	6362991	-5	40	247654797	38.92	40	40	40	0	40	40	1683197	1410855	1722633	1546306	0
+    5	6362991	-4	40	248214827	39.01	40	40	40	0	40	40	2536861	1167423	1248968	1409739	0
+    6	6362991	-5	40	248499903	39.05	40	40	40	0	40	40	1598956	1236081	1568608	1959346	0
+    7	6362991	-4	40	247719760	38.93	40	40	40	0	40	40	1692667	1822140	1496741	1351443	0
+    8	6362991	-5	40	245745205	38.62	40	40	40	0	40	40	2230936	1343260	1529928	1258867	0
+    9	6362991	-5	40	245766735	38.62	40	40	40	0	40	40	1702064	1306257	1336511	2018159	0
+    10	6362991	-5	40	245089706	38.52	40	40	40	0	40	40	1519917	1446370	1450995	1945709	0
+    11	6362991	-5	40	242641359	38.13	40	40	40	0	40	40	1717434	1282975	1387804	1974778	0
+    12	6362991	-5	40	242026113	38.04	40	40	40	0	40	40	1662872	1202041	1519721	1978357	0
+    13	6362991	-5	40	238704245	37.51	40	40	40	0	40	40	1549965	1271411	1973291	1566681	1643
+    14	6362991	-5	40	235622401	37.03	40	40	40	0	40	40	2101301	1141451	1603990	1515774	475
+    15	6362991	-5	40	230766669	36.27	40	40	40	0	40	40	2344003	1058571	1440466	1519865	86
+    16	6362991	-5	40	224466237	35.28	38	40	40	2	35	40	2203515	1026017	1474060	1651582	7817
+    17	6362991	-5	40	219990002	34.57	34	40	40	6	25	40	1522515	1125455	2159183	1555765	73
+    18	6362991	-5	40	214104778	33.65	30	40	40	10	15	40	1479795	2068113	1558400	1249337	7346
+    19	6362991	-5	40	212934712	33.46	30	40	40	10	15	40	1432749	1231352	1769799	1920093	8998
+    20	6362991	-5	40	212787944	33.44	29	40	40	11	13	40	1311657	1411663	2126316	1513282	73
+    21	6362991	-5	40	211369187	33.22	28	40	40	12	10	40	1887985	1846300	1300326	1318380	10000
+    22	6362991	-5	40	213371720	33.53	30	40	40	10	15	40	542299	3446249	516615	1848190	9638
+    23	6362991	-5	40	221975899	34.89	36	40	40	4	30	40	347679	1233267	926621	3855355	69
+    24	6362991	-5	40	194378421	30.55	21	40	40	19	-5	40	433560	674358	3262764	1992242	67
+    25	6362991	-5	40	199773985	31.40	23	40	40	17	-2	40	944760	325595	1322800	3769641	195
+    26	6362991	-5	40	179404759	28.20	17	34	40	23	-5	40	3457922	156013	1494664	1254293	99
+    27	6362991	-5	40	163386668	25.68	13	28	40	27	-5	40	1392177	281250	3867895	821491	178
+    28	6362991	-5	40	156230534	24.55	12	25	40	28	-5	40	907189	981249	4174945	299437	171
+    29	6362991	-5	40	163236046	25.65	13	28	40	27	-5	40	1097171	3418678	1567013	280008	121
+    30	6362991	-5	40	151309826	23.78	12	23	40	28	-5	40	3514775	2036194	566277	245613	132
+    31	6362991	-5	40	141392520	22.22	10	21	40	30	-5	40	1569000	4571357	124732	97721	181
+    32	6362991	-5	40	143436943	22.54	10	21	40	30	-5	40	1453607	4519441	38176	351107	660
+    33	6362991	-5	40	114269843	17.96	6	14	30	24	-5	40	3311001	2161254	155505	734297	934
+    34	6362991	-5	40	140638447	22.10	10	20	40	30	-5	40	1501615	1637357	18113	3205237	669
+    35	6362991	-5	40	138910532	21.83	10	20	40	30	-5	40	1532519	3495057	23229	1311834	352
+    36	6362991	-5	40	117158566	18.41	7	15	30	23	-5	40	4074444	1402980	63287	822035	245
+    
+
+</help>
+
+  <citations>
+    <citation type="doi">10.1093/bioinformatics/btq281</citation>
+  </citations>
+
+</tool>
diff --git a/tools/sr_mapping/mosaik.xml b/tools/sr_mapping/mosaik.xml
new file mode 100644
index 0000000..8f7c79f
--- /dev/null
+++ b/tools/sr_mapping/mosaik.xml
@@ -0,0 +1,129 @@
+<?xml version="1.0"?>
+<tool id="mosaik_wrapper" name="Map with Mosaik" version="1.1.2">
+  <description/>
+  <requirements>
+    <requirement type="package" version="1.1.0021">mosaik</requirement>
+    <requirement type="package" version="0.1.18">samtools</requirement>
+  </requirements>
+  <version_command>MosaikAligner | sed -e 's/\x1b\[[[:digit:]]\{1,2\}\(;[[:digit:]]\{1,2\}\)\{0,1\}m//g' | grep -o 'MosaikAligner [[:digit:].]\{1,\}'</version_command>
+  <command>
+    #set $processors = '-p ${GALAXY_SLOTS:-4}'
+    MosaikBuild -fr
+    #if $genomeSource.refGenomeSource == 'indexed':
+        ${genomeSource.indexReference.fields.path}
+    #else:
+        ${genomeSource.historyReference}
+    #end if
+    -oa mosaik_ref_file;
+    MosaikBuild -q $reads
+    #if $paired.kind == 'single'
+      #set $ls_string  = ''
+    #else
+        -q2 ${paired.reads2}
+        -mfl ${paired.mfl}
+        #set $ls_string = '-ls %s' % $paired.ls
+    #end if
+    -st $st -out mosaik_reads_file;
+    MosaikAligner -ia mosaik_ref_file -in mosaik_reads_file -out mosaik_aligned_file $ls_string -mm $mm -mhp $mhp -act $act -bw $bw $processors -hs 15;
+    MosaikText -in mosaik_aligned_file -$outFormat sam_bam_file;
+    #if str($outFormat) == 'bam':
+        samtools sort sam_bam_file sorted_bam;
+        mv sorted_bam.bam $output
+    #else:
+        gunzip sam_bam_file.gz;
+        mv sam_bam_file $output
+    #end if
+  </command>
+  <inputs>
+    <conditional name="genomeSource">
+      <param name="refGenomeSource" type="select" label="Will you select a reference genome from your history or use a built-in index?">
+        <option value="indexed">Use a built-in index</option>
+        <option value="history">Use one from the history</option>
+      </param>
+      <when value="indexed">
+        <param name="indexReference" type="select" label="Select a reference genome">
+          <options from_data_table="mosaik_indexes">
+            <filter type="sort_by" column="2"/>
+            <validator type="no_options" message="No indexes are available" />
+          </options>
+        </param>
+      </when>
+      <when value="history">
+        <param format="fasta" name="historyReference" type="data" metadata_name="dbkey" label="Select a reference from history"/>
+      </when>
+    </conditional>
+    <param format="fastq" name="reads" type="data" label="FASTQ reads file" />
+    <param name="outFormat" type="select" label="Output format">
+      <option value="sam">SAM</option>
+      <option value="bam">BAM</option>
+    </param>
+    <param name="st" type="select" label="Sequencing technology used">
+      <option value="454">454</option>
+      <option value="illumina">Illumina</option>
+      <option value="solid">Solid</option>
+      <option value="sanger">Sanger</option>
+      <option value="helicos">Helicos</option>
+    </param>
+    <conditional name="paired">
+      <param name="kind" type="select" label="Is this library mate-paired?">
+        <option value="single">Single-end</option>
+        <option value="paired">Paired-end</option>
+      </param>
+      <when value="single"/>
+      <when value="paired">
+        <param format="fastq" name="reads2" type="data" label="FASTQ 2nd mate" />
+        <param name="mfl" type="integer" value="200" label="Median fragment length" />
+        <param name="ls" type="integer" min="0" value="50" label="Local alignment search radius to rescue mates" help="A large value slows down performances" />
+      </when>
+    </conditional>
+    <param name="mm" type="integer" value="6" label="Number of mismatches allowed per sequence" />
+    <param name="act" type="integer" value="35" label="Alignment candidate threshold" help="Determines which hash regions will be aligned with Smith-Waterman" />
+    <param name="bw" type="integer" value="9" label="Smith-Waterman band width" />
+    <param name="mhp" type="integer" value="100" label="Maximum number of positions stored per seed" help="Number of places in the reference the aligner will try to place a particular hash" />
+  </inputs>
+  <outputs>
+    <data format="sam" name="output">
+      <change_format>
+        <when input="outFormat" value="bam" format="bam" />
+      </change_format>
+      <actions>
+        <conditional name="genomeSource.refGenomeSource">
+          <when value="indexed">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="mosaik_indexes" column="1">
+                <filter type="param_value" column="0" value="#" compare="startswith" keep="False" />
+                <filter type="param_value" ref="genomeSource.indexReference" column="0" />
+              </option>
+            </action>
+          </when>
+          <when value="history">
+            <action type="metadata" name="dbkey">
+              <option type="from_param" name="genomeSource.historyReference" param_attribute="dbkey" />
+            </action>
+          </when>
+        </conditional>
+      </actions>
+   </data>
+  </outputs>
+  <tests>
+    <test>
+      <param name="refGenomeSource" value="history"/>
+      <param name="historyReference" ftype="fasta" value="mosaik_test_ref.fasta"/>
+      <param name="reads" ftype="fastq" value="mosaik_test_input.fastq"/>
+      <param name="outFormat" value="sam"/>
+      <param name="st" value="454"/>
+      <param name="kind" value="single"/>
+      <param name="mm" value="6"/>
+      <param name="act" value="35"/>
+      <param name="bw" value="19"/>
+      <param name="mhp" value="100"/>
+      <output name="output" file="mosaik_test_out.sam" compare="sim_size" delta="0"/>
+    </test>
+  </tests>
+  <help>
+This tool uses Mosaik to align reads to a reference sequence.
+  </help>
+  <citations>
+    <citation type="doi">10.1371/journal.pone.0090581</citation>
+  </citations>
+</tool>
diff --git a/tools/sr_mapping/srma_wrapper.py b/tools/sr_mapping/srma_wrapper.py
new file mode 100644
index 0000000..7c73fc6
--- /dev/null
+++ b/tools/sr_mapping/srma_wrapper.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python
+"""
+Runs SRMA on a SAM/BAM file;
+TODO: more documentation
+
+usage: srma_wrapper.py [options]
+
+See below for options
+"""
+
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def stop_err( msg ):
+    sys.stderr.write( '%s\n' % msg )
+    sys.exit()
+
+
+def parseRefLoc( refLoc, refUID ):
+    for line in open( refLoc ):
+        if not line.startswith( '#' ):
+            fields = line.strip().split( '\t' )
+            if len( fields ) >= 3:
+                if fields[0] == refUID:
+                    return fields[1]
+    return None
+
+
+def __main__():
+    parser = optparse.OptionParser()
+    parser.add_option( '-r', '--ref', dest='ref', help='The reference genome to index and use' )
+    parser.add_option( '-u', '--refUID', dest='refUID', help='The pre-index reference genome unique Identifier' )
+    parser.add_option( '-i', '--input', dest='input', help='The SAM/BAM input file' )
+    parser.add_option( '-I', '--inputIndex', dest='inputIndex', help='The SAM/BAM input index file' )
+    parser.add_option( '-o', '--output', dest='output', help='The SAM/BAM output file' )
+    parser.add_option( '-O', '--offset', dest='offset', help='The alignment offset' )
+    parser.add_option( '-Q', '--minMappingQuality', dest='minMappingQuality', help='The minimum mapping quality' )
+    parser.add_option( '-P', '--minAlleleProbability', dest='minAlleleProbability', help='The minimum allele probability conditioned on coverage (for the binomial quantile).' )
+    parser.add_option( '-C', '--minAlleleCoverage', dest='minAlleleCoverage', help='The minimum haploid coverage for the consensus' )
+    parser.add_option( '-R', '--range', dest='range', help='A range to examine' )
+    parser.add_option( '-c', '--correctBases', dest='correctBases', help='Correct bases ' )
+    parser.add_option( '-q', '--useSequenceQualities', dest='useSequenceQualities', help='Use sequence qualities ' )
+    parser.add_option( '-M', '--maxHeapSize', dest='maxHeapSize', help='The maximum number of nodes on the heap before re-alignment is ignored' )
+    parser.add_option( '-s', '--fileSource', dest='fileSource', help='Whether to use a previously indexed reference sequence or one from history (indexed or history)' )
+    parser.add_option( '-p', '--params', dest='params', help='Parameter setting to use (pre_set or full)' )
+    parser.add_option( '-j', '--jarBin', dest='jarBin', default='', help='The path to where jars are stored' )
+    parser.add_option( '-f', '--jarFile', dest='jarFile', help='The file name of the jar file to use')
+    (options, args) = parser.parse_args()
+
+    # make temp directory for srma
+    tmp_dir = tempfile.mkdtemp()
+    buffsize = 1048576
+
+    # set up reference filenames
+    reference_filepath_name = None
+    # need to create SRMA dict and Samtools fai files for custom genome
+    if options.fileSource == 'history':
+        try:
+            reference_filepath = tempfile.NamedTemporaryFile( dir=tmp_dir, suffix='.fa' )
+            reference_filepath_name = reference_filepath.name
+            reference_filepath.close()
+            dict_filepath_name = reference_filepath_name.replace( '.fa', '.dict' )
+            os.symlink( options.ref, reference_filepath_name )
+            # create fai file using Samtools
+            index_fai_cmd = 'samtools faidx %s' % reference_filepath_name
+            try:
+                tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                tmp_stderr = open( tmp, 'wb' )
+                proc = subprocess.Popen( args=index_fai_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                returncode = proc.wait()
+                tmp_stderr.close()
+                # get stderr, allowing for case where it's very large
+                tmp_stderr = open( tmp, 'rb' )
+                stderr = ''
+                try:
+                    while True:
+                        stderr += tmp_stderr.read( buffsize )
+                        if not stderr or len( stderr ) % buffsize != 0:
+                            break
+                except OverflowError:
+                    pass
+                tmp_stderr.close()
+                if returncode != 0:
+                    raise Exception(stderr)
+            except Exception as e:
+                # clean up temp dir
+                if os.path.exists( tmp_dir ):
+                    shutil.rmtree( tmp_dir )
+                stop_err( 'Error creating Samtools index for custom genome file: %s\n' % str( e ) )
+            # create dict file using SRMA
+            dict_cmd = 'java -cp "%s" net.sf.picard.sam.CreateSequenceDictionary R=%s O=%s' % ( os.path.join( options.jarBin, options.jarFile ), reference_filepath_name, dict_filepath_name )
+            try:
+                tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                tmp_stderr = open( tmp, 'wb' )
+                proc = subprocess.Popen( args=dict_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                returncode = proc.wait()
+                tmp_stderr.close()
+                # get stderr, allowing for case where it's very large
+                tmp_stderr = open( tmp, 'rb' )
+                stderr = ''
+                try:
+                    while True:
+                        stderr += tmp_stderr.read( buffsize )
+                        if not stderr or len( stderr ) % buffsize != 0:
+                            break
+                except OverflowError:
+                    pass
+                tmp_stderr.close()
+                if returncode != 0:
+                    raise Exception(stderr)
+            except Exception as e:
+                # clean up temp dir
+                if os.path.exists( tmp_dir ):
+                    shutil.rmtree( tmp_dir )
+                stop_err( 'Error creating index for custom genome file: %s\n' % str( e ) )
+        except Exception as e:
+            # clean up temp dir
+            if os.path.exists( tmp_dir ):
+                shutil.rmtree( tmp_dir )
+            stop_err( 'Problem handling SRMA index (dict file) for custom genome file: %s\n' % str( e ) )
+    # using built-in dict/index files
+    else:
+        if options.ref:
+            reference_filepath_name = options.ref
+        else:
+            reference_filepath_name = parseRefLoc( options.refLocation, options.refUID )
+    if reference_filepath_name is None:
+        raise ValueError( 'A valid genome reference was not provided.' )
+
+    # set up aligning and generate aligning command options
+    if options.params == 'pre_set':
+        srma_cmds = ''
+    else:
+        ranges = 'null'
+        if options.range == 'None':
+            range = 'null'
+        else:
+            range = options.range
+        srma_cmds = "OFFSET=%s MIN_MAPQ=%s MINIMUM_ALLELE_PROBABILITY=%s MINIMUM_ALLELE_COVERAGE=%s RANGES=%s RANGE=%s CORRECT_BASES=%s USE_SEQUENCE_QUALITIES=%s MAX_HEAP_SIZE=%s" % ( options.offset, options.minMappingQuality, options.minAlleleProbability, options.minAlleleCoverage, ranges, range, options.correctBases, options.useSequenceQualities, options.maxHeapSize )
+
+    srma_cmds = "%s VALIDATION_STRINGENCY=LENIENT" % srma_cmds
+
+    # perform alignments
+    buffsize = 1048576
+    try:
+        # symlink input bam and index files due to the naming conventions required by srma here
+        input_bam_filename = os.path.join( tmp_dir, '%s.bam' % os.path.split( options.input )[-1] )
+        os.symlink( options.input, input_bam_filename )
+        input_bai_filename = "%s.bai" % os.path.splitext( input_bam_filename )[0]
+        os.symlink( options.inputIndex, input_bai_filename )
+
+        # create a temp output name, ending in .bam due to required naming conventions? unkown if required
+        output_bam_filename = os.path.join( tmp_dir, "%s.bam" % os.path.split( options.output )[-1] )
+        # generate commandline
+        java_opts = ''
+        if '_JAVA_OPTIONS' not in os.environ:
+            java_opts = '-Xmx2048m'
+        cmd = 'java %s -jar %s I=%s O=%s R=%s %s' % ( java_opts, os.path.join( options.jarBin, options.jarFile ), input_bam_filename, output_bam_filename, reference_filepath_name, srma_cmds )
+        # need to nest try-except in try-finally to handle 2.4
+        try:
+            try:
+                tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
+                tmp_stderr = open( tmp, 'wb' )
+                proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
+                returncode = proc.wait()
+                tmp_stderr.close()
+                # get stderr, allowing for case where it's very large
+                tmp_stderr = open( tmp, 'rb' )
+                stderr = ''
+                try:
+                    while True:
+                        stderr += tmp_stderr.read( buffsize )
+                        if not stderr or len( stderr ) % buffsize != 0:
+                            break
+                except OverflowError:
+                    pass
+                tmp_stderr.close()
+                if returncode != 0:
+                    raise Exception(stderr)
+            except Exception as e:
+                raise Exception('Error executing SRMA. ' + str( e ))
+            # move file from temp location (with .bam name) to provided path
+            shutil.move( output_bam_filename, options.output )
+            # check that there are results in the output file
+            if os.path.getsize( options.output ) <= 0:
+                raise Exception('The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.')
+        except Exception as e:
+            stop_err( 'The re-alignment failed.\n' + str( e ) )
+    finally:
+        # clean up temp dir
+        if os.path.exists( tmp_dir ):
+            shutil.rmtree( tmp_dir )
+
+
+if __name__ == "__main__":
+    __main__()
diff --git a/tools/sr_mapping/srma_wrapper.xml b/tools/sr_mapping/srma_wrapper.xml
new file mode 100644
index 0000000..1041a25
--- /dev/null
+++ b/tools/sr_mapping/srma_wrapper.xml
@@ -0,0 +1,221 @@
+<tool id="srma_wrapper" name="Re-align with SRMA" version="0.2.5">
+  <description></description>
+  <command interpreter="python">srma_wrapper.py 
+    #if $refGenomeSource.refGenomeSource_type == "history":
+      --ref=$refGenomeSource.ownFile
+    #else:
+      --ref="${refGenomeSource.ref.fields.path}"
+      --refUID=$refGenomeSource.ref
+      ##--refLocations=${GALAXY_DATA_INDEX_DIR}/srma_index.loc
+    #end if
+    --input=$input
+    --inputIndex=${input.metadata.bam_index}
+    --output=$output
+    --params=$params.source_select
+    --fileSource=$refGenomeSource.refGenomeSource_type
+    --jarBin="${GALAXY_DATA_INDEX_DIR}/shared/jars"
+    #if $params.source_select == "full":
+      --offset=$params.offset
+      --minMappingQuality=$params.minMappingQuality
+      --minAlleleProbability=$params.minAlleleProbability
+      --minAlleleCoverage=$params.minAlleleCoverage
+      --range=$params.range
+      --correctBases=$params.correctBases
+      --useSequenceQualities=$params.useSequenceQualities
+      --maxHeapSize=$params.maxHeapSize
+    #end if
+    --jarFile="srma.jar"
+  </command>
+  <inputs>
+    <conditional name="refGenomeSource">
+      <param name="refGenomeSource_type" type="select" label="Will you select a reference genome from your history or use a built-in reference?">
+        <option value="built-in">Use a built-in reference</option>
+        <option value="history">Use one from the history</option>
+      </param>
+      <when value="built-in">
+        <param name="ref" type="select" label="Select a reference genome">
+          <options from_data_table="srma_indexes">
+            <filter type="sort_by" column="2" />
+            <validator type="no_options" message="No indexes are available" />
+          </options>
+        </param>
+      </when>
+      <when value="history">
+        <param name="ownFile" type="data" format="fasta" metadata_name="dbkey" label="Select a reference from history" />
+      </when>
+    </conditional>
+    <param name="input" type="data" format="bam" label="Input BAM file" help="The input BAM file to re-align"/>
+    <conditional name="params">
+      <param name="source_select" type="select" label="SRMA settings to use" help="For most re-alignment needs, use Commonly Used settings. If you want full control use Full Parameter List">
+        <option value="pre_set">Commonly Used</option>
+        <option value="full">Full Parameter List</option>
+      </param>
+      <when value="pre_set" />
+      <when value="full">
+        <param name="offset" type="integer" value="20" label="Offset" help="The alignment offset" />
+        <param name="minMappingQuality" type="integer" value="0" label="Minimum mapping quality" help="The minimum mapping quality" />
+        <param name="minAlleleProbability" type="float" value="0.1" label="Minimum allele probability" help="The minimum allele probability conditioned on coverage (for the binomial quantile)." />
+        <param name="minAlleleCoverage" type="integer" value="2" label="Minimum allele coverage" help="The minimum haploid coverage for the consensus. Default value: 3. This option can be set " />
+        <param name="range" type="text" value="null" label="Range" help="A range to examine" />
+        <param name="correctBases" type="boolean" truevalue="true" falsevalue="false" checked="no" label="Correct bases" help="Correct bases " />
+        <param name="useSequenceQualities" type="boolean" truevalue="true" falsevalue="false" checked="no" label="Use sequence qualities" help="Use sequence qualities " />
+        <param name="maxHeapSize" type="integer" value="8192" label="Maximum heap size" help="The maximum number of nodes on the heap before re-alignment is ignored" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="bam" name="output" label="${tool.name} on ${on_string}: re-aligned reads">
+      <actions>
+        <conditional name="refGenomeSource.refGenomeSource_type">
+          <when value="built-in">
+            <action type="metadata" name="dbkey">
+              <option type="from_data_table" name="srma_indexes" column="1" offset="0">
+                <filter type="param_value" column="0" value="#" compare="startswith" keep="False" />
+                <filter type="param_value" ref="refGenomeSource.ref" column="0" />
+              </option>
+            </action>
+          </when>
+          <when value="history">
+            <action type="metadata" name="dbkey">
+              <option type="from_param" name="refGenomeSource.ownFile" param_attribute="dbkey" />
+            </action>
+          </when>
+        </conditional>
+      </actions>
+    </data>
+  </outputs>
+  <tests>
+      <test>
+          <!-- Commands to run to prepare test files (uses built-in index)
+          Prepare bam index file:
+          samtools index srma_in1.bam
+          Run SRMA:
+          java -jar srma.jar I=srma_in1.bam O=srma_out1.bam R=/afs/bx.psu.edu/depot/data/genome/hg18/srma_index/chr21.fa
+          To create the bam file first, start with a sam file (srma_in1.sam) generated with a run using the chr21 fasta file and which contains the header. Run before samtools index:
+          samtools view -bt /afs/bx.psu.edu/depot/data/genome/hg18/sam_index/chr21.fa -o srma_in1.u.bam srma_in1.sam
+          samtools sort srma_in1.u.bam srma_in1
+          -->
+          <param name="refGenomeSource_type" value="built-in" />
+          <param name="ref" value="hg18chr21" />
+          <param name="input" value="srma_in1.bam" ftype="bam" />
+          <param name="source_select" value="pre_set" />
+          <output name="output" file="srma_out1.bam" ftype="bam" lines_diff="2" /><!-- allows tag with version number to be different -->
+      </test>
+      <test>
+          <!-- Commands to run to prepare test files (uses custom genome):
+          Prepare custom dict/index files:
+          samtools faidx srma_in2.fa
+          java -cp srma.jar net.sf.picard.sam.CreateSequenceDictionary R=srma_in2.fa O=srma_in2.dict
+          Prepare bam index file:
+          samtools index srma_in3.bam
+          Run SRMA:
+          java -jar "srma.jar" I=srma_in3.bam O=srma_out2.bam R=srma_in2.fa OFFSET=20 MIN_MAPQ=0 MINIMUM_ALLELE_PROBABILITY=0.1 MINIMUM_ALLELE_COVERAGE=2 RANGES=null RANGE=null CORRECT_BASES=true USE_SEQUENCE_QUALITIES=true MAX_HEAP_SIZE=8192
+          To create the bam file first, the sam file needs to have been run with the same reference file (srma_in2.fa) and have the header present. For instance:
+          samtools view -bT srma_in2.fa -o srma_in3.u.bam srma_in3.sam
+          samtools sort srma_in3.u.bam srma_in3
+          -->
+          <param name="refGenomeSource_type" value="history" />
+          <param name="ownFile" value="srma_in2.fa" ftype="fasta" />
+          <param name="input" value="srma_in3.bam" ftype="bam" />
+          <param name="source_select" value="full" />
+          <param name="offset" value="20" />
+          <param name="minMappingQuality" value="0" />
+          <param name="minAlleleProbability" value="0.1" />
+          <param name="minAlleleCoverage" value="2" />
+          <param name="range" value="null" />
+          <param name="correctBases" value="true" />
+          <param name="useSequenceQualities" value="true" />
+          <param name="maxHeapSize" value="8192" />
+          <output name="output" file="srma_out2.bam" ftype="bam" lines_diff="2" /><!-- allows tag with version number to be different -->
+      </test>
+  </tests>
+  <help>
+**What it does**
+
+SRMA is a short read micro re-aligner for next-generation high throughput sequencing data.
+
+Sequence alignment algorithms examine each read independently. When indels occur towards the ends of reads, the alignment can lead to false SNPs as well as improperly placed indels. This tool aims to perform a re-alignment of each read to a graphical representation of all alignments within a local region to provide a better overall base-resolution consensus.
+
+Currently this tool works well with and has been tested on 30x diploid coverage genome sequencing data from Illumina and ABI SOLiD technology. This tool may not work well with 454 data, as indels are a significant error mode for 454 data. 
+
+------
+
+Please cite the website "http://srma.sourceforge.net" as well as:
+
+Homer N, and Nelson SF.  SRMA: short read micro re-aligner. 2010.
+
+------
+
+**Know what you are doing**
+
+.. class:: warningmark
+
+There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy.
+
+.. __: http://srma.sourceforge.net/
+
+------
+
+**Input formats**
+
+SRMA accepts a BAM input file. Note that this file should have been generated from a SAM file which contains the header.
+
+------
+
+**Outputs**
+
+The output is in BAM format, see http://samtools.sourceforge.net for more details.
+
+-------
+
+**SRMA settings**
+
+All of the options have a default value. You can change any of them. Most of the options in SRMA have been implemented here.
+
+------
+
+**SRMA parameter list**
+
+This is an exhaustive list of SRMA options:
+
+For **SRMA**::
+
+  INPUT=File
+  I=File                        The input SAM or BAM file. Required. 
+  
+  OUTPUT=File
+  O=File                        The output SAM or BAM file. Default value: null. 
+  
+  REFERENCE=File
+  R=File                        The reference FASTA file. Required. 
+  
+  OFFSET=Integer                The alignment offset. Default value: 20. This option can be set to 'null' to clear the 
+                                default value. 
+  
+  MIN_MAPQ=Integer              The minimum mapping quality. Default value: 0. This option can be set to 'null' to clear 
+                                the default value. 
+  
+  MINIMUM_ALLELE_PROBABILITY=Double
+                                The minimum allele probability conditioned on coverage (for the binomial quantile). 
+                                Default value: 0.1. This option can be set to 'null' to clear the default value. 
+  
+  MINIMUM_ALLELE_COVERAGE=Integer
+                                The minimum haploid coverage for the consensus. Default value: 3. This option can be set 
+                                to 'null' to clear the default value. 
+  
+  RANGE=String                  A range to examine. Default value: null. 
+  
+  CORRECT_BASES=Boolean         Correct bases. Default value: false. This option can be set to 'null' to clear the 
+                                default value. Possible values: {true, false} 
+  
+  USE_SEQUENCE_QUALITIES=BooleanUse sequence qualities Default value: true. This option can be set to 'null' to clear the 
+                                default value. Possible values: {true, false} 
+  
+  MAX_HEAP_SIZE=Integer         The maximum number of nodes on the heap before re-alignment is ignored Default value: 
+                                8192. This option can be set to 'null' to clear the default value. 
+
+  </help>
+  <citations>
+    <citation type="doi">10.1093/bioinformatics/bts286</citation>
+  </citations>
+</tool>
diff --git a/tools/stats/aggregate_binned_scores_in_intervals.xml b/tools/stats/aggregate_binned_scores_in_intervals.xml
new file mode 100644
index 0000000..ab55178
--- /dev/null
+++ b/tools/stats/aggregate_binned_scores_in_intervals.xml
@@ -0,0 +1,113 @@
+<tool id="aggregate_scores_in_intervals2" description="such as phastCons, GERP, binCons, and others for a set of genomic intervals" name="Aggregate datapoints" version="1.1.3">
+  <description>Appends the average, min, max of datapoints per interval</description>
+  <command interpreter="python">
+    #if $score_source_type.score_source == "user" #aggregate_scores_in_intervals.py $score_source_type.input2 $input1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $out_file1 --chrom_buffer=3
+    #else                                         #aggregate_scores_in_intervals.py $score_source_type.datasets $input1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $out_file1 -b
+    #end if#
+  </command>
+  <inputs>
+    <param format="interval" name="input1" type="data" label="Interval file"/>
+    <conditional name="score_source_type">
+      <param name="score_source" type="select" label="Score Source">
+        <option value="cached" selected="true">Locally Cached Scores</option>
+        <option value="user">Scores in Your History</option>
+      </param>
+      <when value="cached">
+        <param name="datasets" type="select" label="Available datasets" display="radio">
+          <options from_file="binned_scores.loc">
+            <column name="name" index="1"/>
+            <column name="value" index="2"/>
+            <column name="dbkey" index="0"/>
+            <filter type="data_meta" ref="input1" key="dbkey" column="0" />
+          </options>
+        </param>
+      </when>
+      <when value="user">
+        <param format="wig" name="input2" type="data" label="Score file">
+          <options>
+            <filter type="data_meta" ref="input1" key="dbkey" />
+          </options>
+        </param>
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data format="interval" name="out_file1" metadata_source="input1"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input1" value="6.bed" dbkey="hg17" ftype="bed"/>
+      <param name="score_source" value="cached"/>
+      <param name="datasets" value="/galaxy/data/binned_scores/hg17/phastcons_encode_sep2005_tba" />
+      <output name="out_file1" file="aggregate_binned_scores_in_intervals.out" />
+    </test>
+    <test>
+      <param name="input1" value="9_hg18.bed" dbkey="hg18" ftype="bed"/>
+      <param name="score_source" value="cached"/>
+      <param name="datasets" value="/galaxy/data/binned_scores/hg18/phastCons17way/ba" />
+      <output name="out_file1" file="aggregate_binned_scores_in_intervals2.interval" />
+    </test>
+    <test>
+      <param name="input1" value="6.bed" dbkey="hg17" ftype="bed"/>
+      <param name="score_source" value="user"/>
+      <param name="input2" value="aggregate_binned_scores_3.wig" dbkey="hg17" ftype="wig"/>
+      <output name="out_file1" file="aggregate_binned_scores_in_intervals3.out"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+This tool currently only has cached data for genome builds hg16, hg17 and hg18. However, you may use your own data point (wiggle) data, such as those available from UCSC. If you are trying to use your own data point file and it is not appearing as an option, make sure that the builds for your history items are the same.
+
+.. class:: warningmark
+
+This tool assumes that the input dataset is in interval format and contains at least a chrom column, a start column and an end column.  These 3 columns can be dispersed throughout any number of other data columns. 
+
+-----
+
+.. class:: infomark
+
+**TIP:** Computing summary information may throw exceptions if the data type (e.g., string, integer) in every line of the columns is not appropriate for the computation (e.g., attempting numerical calculations on strings).  If an exception is thrown when computing summary information for a line, that line is skipped as invalid for the computation.  The number of invalid skipped lines is documented in the resulting history item as a "Data issue".
+
+-----
+
+**Syntax**
+
+This tool appends columns of summary information for each interval matched against a selected dataset.  For each interval, the average, minimum and maximum for the data falling within the interval is computed.
+
+- Several quantitative scores are provided for the ENCODE regions.
+
+  - Various Scores
+      - Regulatory Potential
+      - Neutral rate (Ancestral Repeats)
+      - GC fraction
+  - Conservation Scores
+      - PhastCons
+      - binCons
+      - GERP
+
+-----
+
+**Example**
+
+If your original data has the following format:
+
++------+-----+-----+---+------+
+|other1|chrom|start|end|other2|
++------+-----+-----+---+------+
+
+and you choose to aggregate phastCons scores, your output will look like this:
+
++------+-----+-----+---+------+---+---+---+
+|other1|chrom|start|end|other2|avg|min|max|
++------+-----+-----+---+------+---+---+---+
+
+where:
+
+* **avg** - average phastCons score for each region
+* **min** - minimum phastCons score for each region
+* **max** - maximum phastCons score for each region
+
+  </help>
+</tool>
diff --git a/tools/stats/aggregate_scores_in_intervals.py b/tools/stats/aggregate_scores_in_intervals.py
new file mode 100755
index 0000000..2fb7d41
--- /dev/null
+++ b/tools/stats/aggregate_scores_in_intervals.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+# Greg Von Kuster
+"""
+usage: %prog score_file interval_file chrom start stop [out_file] [options]
+    -b, --binned: 'score_file' is actually a directory of binned array files
+    -m, --mask=FILE: bed file containing regions not to consider valid
+    -c, --chrom_buffer=INT: number of chromosomes (default is 3) to keep in memory when using a user supplied score file
+"""
+
+from __future__ import division, print_function
+
+import os
+import os.path
+import struct
+import sys
+import tempfile
+from math import isnan
+from UserDict import DictMixin
+
+import bx.wiggle
+from bx.binned_array import BinnedArray, FileBinnedArray
+from bx.bitset_builders import binned_bitsets_from_file
+from bx.cookbook import doc_optparse
+
+from galaxy.util.ucsc import UCSCLimitException, UCSCOutWrapper
+
+
+class PositionalScoresOnDisk:
+    fmt = 'f'
+    fmt_size = struct.calcsize( fmt )
+    default_value = float( 'nan' )
+
+    def __init__( self ):
+        self.file = tempfile.TemporaryFile( 'w+b' )
+        self.length = 0
+
+    def __getitem__( self, i ):
+        if i < 0:
+            i = self.length + i
+        if i < 0 or i >= self.length:
+            return self.default_value
+        try:
+            self.file.seek( i * self.fmt_size )
+            return struct.unpack( self.fmt, self.file.read( self.fmt_size ) )[0]
+        except Exception as e:
+            raise IndexError(e)
+
+    def __setitem__( self, i, value ):
+        if i < 0:
+            i = self.length + i
+        if i < 0:
+            raise IndexError('Negative assignment index out of range')
+        if i >= self.length:
+            self.file.seek( self.length * self.fmt_size )
+            self.file.write( struct.pack( self.fmt, self.default_value ) * ( i - self.length ) )
+            self.length = i + 1
+        self.file.seek( i * self.fmt_size )
+        self.file.write( struct.pack( self.fmt, value ) )
+
+    def __len__( self ):
+        return self.length
+
+    def __repr__( self ):
+        i = 0
+        repr = "[ "
+        for i in range( self.length ):
+            repr = "%s %s," % ( repr, self[i] )
+        return "%s ]" % ( repr )
+
+
+class FileBinnedArrayDir( DictMixin ):
+    """
+    Adapter that makes a directory of FileBinnedArray files look like
+    a regular dict of BinnedArray objects.
+    """
+    def __init__( self, dir ):
+        self.dir = dir
+        self.cache = dict()
+
+    def __getitem__( self, key ):
+        value = None
+        if key in self.cache:
+            value = self.cache[key]
+        else:
+            fname = os.path.join( self.dir, "%s.ba" % key )
+            if os.path.exists( fname ):
+                value = FileBinnedArray( open( fname ) )
+                self.cache[key] = value
+        if value is None:
+            raise KeyError( "File does not exist: " + fname )
+        return value
+
+
+def stop_err(msg):
+    sys.stderr.write(msg)
+    sys.exit()
+
+
+def load_scores_wiggle( fname, chrom_buffer_size=3 ):
+    """
+    Read a wiggle file and return a dict of BinnedArray objects keyed
+    by chromosome.
+    """
+    scores_by_chrom = dict()
+    try:
+        for chrom, pos, val in bx.wiggle.Reader( UCSCOutWrapper( open( fname ) ) ):
+            if chrom not in scores_by_chrom:
+                if chrom_buffer_size:
+                    scores_by_chrom[chrom] = BinnedArray()
+                    chrom_buffer_size -= 1
+                else:
+                    scores_by_chrom[chrom] = PositionalScoresOnDisk()
+            scores_by_chrom[chrom][pos] = val
+    except UCSCLimitException:
+        # Wiggle data was truncated, at the very least need to warn the user.
+        print('Encountered message from UCSC: "Reached output limit of 100000 data values", so be aware your data was truncated.')
+    except IndexError:
+        stop_err('Data error: one or more column data values is missing in "%s"' % fname)
+    except ValueError:
+        stop_err('Data error: invalid data type for one or more values in "%s".' % fname)
+    return scores_by_chrom
+
+
+def load_scores_ba_dir( dir ):
+    """
+    Return a dict-like object (keyed by chromosome) that returns
+    FileBinnedArray objects created from "key.ba" files in `dir`
+    """
+    return FileBinnedArrayDir( dir )
+
+
+def main():
+
+    # Parse command line
+    options, args = doc_optparse.parse( __doc__ )
+
+    try:
+        score_fname = args[0]
+        interval_fname = args[1]
+        chrom_col = args[2]
+        start_col = args[3]
+        stop_col = args[4]
+        if len( args ) > 5:
+            out_file = open( args[5], 'w' )
+        else:
+            out_file = sys.stdout
+        binned = bool( options.binned )
+        mask_fname = options.mask
+    except:
+        doc_optparse.exit()
+
+    if score_fname == 'None':
+        stop_err( 'This tool works with data from genome builds hg16, hg17 or hg18.  Click the pencil icon in your history item to set the genome build if appropriate.' )
+
+    try:
+        chrom_col = int(chrom_col) - 1
+        start_col = int(start_col) - 1
+        stop_col = int(stop_col) - 1
+    except:
+        stop_err( 'Chrom, start & end column not properly set, click the pencil icon in your history item to set these values.' )
+
+    if chrom_col < 0 or start_col < 0 or stop_col < 0:
+        stop_err( 'Chrom, start & end column not properly set, click the pencil icon in your history item to set these values.' )
+
+    if binned:
+        scores_by_chrom = load_scores_ba_dir( score_fname )
+    else:
+        try:
+            chrom_buffer = int( options.chrom_buffer )
+        except:
+            chrom_buffer = 3
+        scores_by_chrom = load_scores_wiggle( score_fname, chrom_buffer )
+
+    if mask_fname:
+        masks = binned_bitsets_from_file( open( mask_fname ) )
+    else:
+        masks = None
+
+    skipped_lines = 0
+    first_invalid_line = 0
+    invalid_line = ''
+
+    for i, line in enumerate( open( interval_fname )):
+        valid = True
+        line = line.rstrip('\r\n')
+        if line and not line.startswith( '#' ):
+            fields = line.split()
+
+            try:
+                chrom, start, stop = fields[chrom_col], int( fields[start_col] ), int( fields[stop_col] )
+            except:
+                valid = False
+                skipped_lines += 1
+                if not invalid_line:
+                    first_invalid_line = i + 1
+                    invalid_line = line
+            if valid:
+                total = 0
+                count = 0
+                min_score = 100000000
+                max_score = -100000000
+                for j in range( start, stop ):
+                    if chrom in scores_by_chrom:
+                        try:
+                            # Skip if base is masked
+                            if masks and chrom in masks:
+                                if masks[chrom][j]:
+                                    continue
+                            # Get the score, only count if not 'nan'
+                            score = scores_by_chrom[chrom][j]
+                            if not isnan( score ):
+                                total += score
+                                count += 1
+                                max_score = max( score, max_score )
+                                min_score = min( score, min_score )
+                        except:
+                            continue
+                if count > 0:
+                    avg = total / count
+                else:
+                    avg = "nan"
+                    min_score = "nan"
+                    max_score = "nan"
+
+                # Build the resulting line of data
+                out_line = []
+                for k in range(0, len(fields)):
+                    out_line.append(fields[k])
+                out_line.append(avg)
+                out_line.append(min_score)
+                out_line.append(max_score)
+
+                print("\t".join( map( str, out_line ) ), file=out_file)
+            else:
+                skipped_lines += 1
+                if not invalid_line:
+                    first_invalid_line = i + 1
+                    invalid_line = line
+        elif line.startswith( '#' ):
+            # We'll save the original comments
+            print(line, file=out_file)
+
+    out_file.close()
+
+    if skipped_lines > 0:
+        print('Data issue: skipped %d invalid lines starting at line #%d which is "%s"' % ( skipped_lines, first_invalid_line, invalid_line ))
+        if skipped_lines == i:
+            print('Consider changing the metadata for the input dataset by clicking on the pencil icon in the history item.')
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/stats/filtering.py b/tools/stats/filtering.py
new file mode 100644
index 0000000..068b89c
--- /dev/null
+++ b/tools/stats/filtering.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python
+# This tool takes a tab-delimited text file as input and creates filters on columns based on certain properties.
+# The tool will skip over invalid lines within the file, informing the user about the number of lines skipped.
+from __future__ import division, print_function
+
+import re
+import sys
+from ast import Module, parse, walk
+
+AST_NODE_TYPE_WHITELIST = [
+    'Expr', 'Load', 'Str', 'Num', 'BoolOp', 'Compare', 'And', 'Eq', 'NotEq',
+    'Or', 'GtE', 'LtE', 'Lt', 'Gt', 'BinOp', 'Add', 'Div', 'Sub', 'Mult', 'Mod',
+    'Pow', 'LShift', 'GShift', 'BitAnd', 'BitOr', 'BitXor', 'UnaryOp', 'Invert',
+    'Not', 'NotIn', 'In', 'Is', 'IsNot', 'List', 'Index', 'Subscript',
+    # Further checks
+    'Name', 'Call', 'Attribute',
+]
+
+
+BUILTIN_AND_MATH_FUNCTIONS = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'.split('|')
+STRING_AND_LIST_METHODS = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
+VALID_FUNCTIONS = BUILTIN_AND_MATH_FUNCTIONS + STRING_AND_LIST_METHODS
+
+
+def __check_name( ast_node ):
+    name = ast_node.id
+    if re.match(r'^c\d+$', name):
+        return True
+    return name in VALID_FUNCTIONS
+
+
+def __check_attribute( ast_node ):
+    attribute_name = ast_node.attr
+    if attribute_name not in STRING_AND_LIST_METHODS:
+        return False
+    return True
+
+
+def __check_call( ast_node ):
+    # If we are calling a function or method, it better be a math,
+    # string or list function.
+    ast_func = ast_node.func
+    ast_func_class = ast_func.__class__.__name__
+    if ast_func_class == 'Name':
+        if ast_func.id not in BUILTIN_AND_MATH_FUNCTIONS:
+            return False
+    elif ast_func_class == 'Attribute':
+        if not __check_attribute( ast_func ):
+            return False
+    else:
+        return False
+
+    return True
+
+
+def check_expression( text ):
+    """
+
+    >>> check_expression("c1=='chr1' and c3-c2>=2000 and c6=='+'")
+    True
+    >>> check_expression("eval('1+1')")
+    False
+    >>> check_expression("import sys")
+    False
+    >>> check_expression("[].__str__")
+    False
+    >>> check_expression("__builtins__")
+    False
+    >>> check_expression("'x' in globals")
+    False
+    >>> check_expression("'x' in [1,2,3]")
+    True
+    >>> check_expression("c3=='chr1' and c5>5")
+    True
+    >>> check_expression("c3=='chr1' and d5>5")  # Invalid d5 reference
+    False
+    >>> check_expression("c3=='chr1' and c5>5 or exec")
+    False
+    >>> check_expression("type(c1) != type(1)")
+    True
+    >>> check_expression("c1.split(',')[1] == '1'")
+    True
+    >>> check_expression("exec 1")
+    False
+    >>> check_expression("str(c2) in [\\\"a\\\",\\\"b\\\"]")
+    True
+    """
+    try:
+        module = parse( text )
+    except SyntaxError:
+        return False
+
+    if not isinstance(module, Module):
+        return False
+    statements = module.body
+    if not len( statements ) == 1:
+        return False
+    expression = statements[0]
+    if expression.__class__.__name__ != 'Expr':
+        return False
+
+    for ast_node in walk( expression ):
+        ast_node_class = ast_node.__class__.__name__
+
+        # Toss out everything that is not a "simple" expression,
+        # imports, error handling, etc...
+        if ast_node_class not in AST_NODE_TYPE_WHITELIST:
+            return False
+
+        # White-list more potentially dangerous types AST elements.
+        if ast_node_class == 'Name':
+            # In order to prevent loading 'exec', 'eval', etc...
+            # put string restriction on names allowed.
+            if not __check_name( ast_node ):
+                return False
+        # Check only valid, white-listed functions are called.
+        elif ast_node_class == 'Call':
+            if not __check_call( ast_node ):
+                return False
+        # Check only valid, white-listed attributes are accessed
+        elif ast_node_class == 'Attribute':
+            if not __check_attribute( ast_node ):
+                return False
+
+    return True
+
+
+def get_operands( filter_condition ):
+    # Note that the order of all_operators is important
+    items_to_strip = ['+', '-', '**', '*', '//', '/', '%', '<<', '>>', '&', '|', '^', '~', '<=', '<', '>=', '>', '==', '!=', '<>', ' and ', ' or ', ' not ', ' is ', ' is not ', ' in ', ' not in ']
+    for item in items_to_strip:
+        if filter_condition.find( item ) >= 0:
+            filter_condition = filter_condition.replace( item, ' ' )
+    operands = set( filter_condition.split( ' ' ) )
+    return operands
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+in_fname = sys.argv[1]
+out_fname = sys.argv[2]
+cond_text = sys.argv[3]
+try:
+    in_columns = int( sys.argv[4] )
+    assert sys.argv[5]  # check to see that the column types variable isn't null
+    in_column_types = sys.argv[5].split( ',' )
+except:
+    stop_err( "Data does not appear to be tabular.  This tool can only be used with tab-delimited data." )
+num_header_lines = int( sys.argv[6] )
+
+# Unescape if input has been escaped
+mapped_str = {
+    '__lt__': '<',
+    '__le__': '<=',
+    '__eq__': '==',
+    '__ne__': '!=',
+    '__gt__': '>',
+    '__ge__': '>=',
+    '__sq__': '\'',
+    '__dq__': '"',
+    '__ob__': '[',
+    '__cb__': ']',
+}
+for key, value in mapped_str.items():
+    cond_text = cond_text.replace( key, value )
+
+# Attempt to determine if the condition includes executable stuff and, if so, exit
+secured = dir()
+operands = get_operands(cond_text)
+for operand in operands:
+    try:
+        check = int( operand )
+    except:
+        if operand in secured:
+            stop_err( "Illegal value '%s' in condition '%s'" % ( operand, cond_text ) )
+
+if not check_expression(cond_text):
+    stop_err( "Illegal/invalid in condition '%s'" % ( cond_text ) )
+
+# Work out which columns are used in the filter (save using 1 based counting)
+used_cols = sorted(set(int(match.group()[1:])
+                   for match in re.finditer('c(\d)+', cond_text)))
+largest_col_index = max(used_cols)
+
+# Prepare the column variable names and wrappers for column data types. Only
+# cast columns used in the filter.
+cols, type_casts = [], []
+for col in range( 1, largest_col_index + 1 ):
+    col_name = "c%d" % col
+    cols.append( col_name )
+    col_type = in_column_types[ col - 1 ]
+    if col in used_cols:
+        type_cast = "%s(%s)" % ( col_type, col_name )
+    else:
+        # If we don't use this column, don't cast it.
+        # Otherwise we get errors on things like optional integer columns.
+        type_cast = col_name
+    type_casts.append( type_cast )
+
+col_str = ', '.join( cols )    # 'c1, c2, c3, c4'
+type_cast_str = ', '.join( type_casts )  # 'str(c1), int(c2), int(c3), str(c4)'
+assign = "%s, = line.split( '\\t' )[:%i]" % ( col_str, largest_col_index )
+wrap = "%s = %s" % ( col_str, type_cast_str )
+skipped_lines = 0
+invalid_lines = 0
+first_invalid_line = 0
+invalid_line = None
+lines_kept = 0
+total_lines = 0
+out = open( out_fname, 'wt' )
+
+# Read and filter input file, skipping invalid lines
+code = '''
+for i, line in enumerate( open( in_fname ) ):
+    total_lines += 1
+    line = line.rstrip( '\\r\\n' )
+
+    if i < num_header_lines:
+        lines_kept += 1
+        print( line, file=out )
+        continue
+
+    if not line or line.startswith( '#' ):
+        skipped_lines += 1
+        continue
+    try:
+        %s
+        %s
+        if %s:
+            lines_kept += 1
+            print( line, file=out )
+    except:
+        invalid_lines += 1
+        if not invalid_line:
+            first_invalid_line = i + 1
+            invalid_line = line
+''' % ( assign, wrap, cond_text )
+valid_filter = True
+try:
+    exec(code)
+except Exception as e:
+    out.close()
+    if str( e ).startswith( 'invalid syntax' ):
+        valid_filter = False
+        stop_err( 'Filter condition "%s" likely invalid. See tool tips, syntax and examples.' % cond_text )
+    else:
+        stop_err( str( e ) )
+
+if valid_filter:
+    out.close()
+    valid_lines = total_lines - skipped_lines
+    print('Filtering with %s, ' % cond_text)
+    if valid_lines > 0:
+        print('kept %4.2f%% of %d valid lines (%d total lines).' % ( 100.0 * lines_kept / valid_lines, valid_lines, total_lines ))
+    else:
+        print('Possible invalid filter condition "%s" or non-existent column referenced. See tool tips, syntax and examples.' % cond_text)
+    if invalid_lines:
+        print('Skipped %d invalid line(s) starting at line #%d: "%s"' % ( invalid_lines, first_invalid_line, invalid_line ))
+    if skipped_lines:
+        print('Skipped %i comment (starting with #) or blank line(s)' % skipped_lines)
diff --git a/tools/stats/filtering.xml b/tools/stats/filtering.xml
new file mode 100644
index 0000000..be86fc5
--- /dev/null
+++ b/tools/stats/filtering.xml
@@ -0,0 +1,90 @@
+<tool id="Filter1" name="Filter" version="1.1.0">
+  <description>data on any column using simple expressions</description>
+  <edam_operations>
+    <edam_operation>operation_0335</edam_operation>
+  </edam_operations>
+  <command interpreter="python">
+    filtering.py $input $out_file1 "$cond" ${input.metadata.columns} "${input.metadata.column_types}" $header_lines
+  </command>
+  <inputs>
+    <param format="tabular" name="input" type="data" label="Filter" help="Dataset missing? See TIP below."/>
+    <param name="cond" size="40" type="text" value="c1=='chr22'" label="With following condition" help="Double equal signs, ==, must be used as shown above. To filter for an arbitrary string, use the Select tool.">
+      <validator type="empty_field" message="Enter a valid filtering condition, see syntax and examples below."/>
+    </param>
+    <param name="header_lines" type="integer" value="0" label="Number of header lines to skip"/>
+  </inputs>
+  <outputs>
+    <data format="input" name="out_file1" metadata_source="input"/>
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="1.bed"/>
+      <param name="cond" value="c1=='chr22'"/>
+      <param name="header_lines" value="0"/>
+      <output name="out_file1" file="filter1_test1.bed"/>
+    </test>
+    <test>
+      <param name="input" value="7.bed"/>
+      <param name="cond" value="c1=='chr1' and c3-c2>=2000 and c6=='+'"/>
+      <param name="header_lines" value="0"/>
+      <output name="out_file1" file="filter1_test2.bed"/>
+    </test>
+    <!-- Test filtering of file with a variable number of columns. -->
+    <test>
+      <param name="input" value="filter1_in3.sam"/>
+      <param name="cond" value="c3=='chr1' and c5>5"/>
+      <param name="header_lines" value="0"/>
+      <output name="out_file1" file="filter1_test3.sam"/>
+    </test>
+    <test>
+      <param name="input" value="filter1_inbad.bed"/>
+      <param name="cond" value="c1=='chr22'"/>
+      <param name="header_lines" value="0"/>
+      <output name="out_file1" file="filter1_test4.bed"/>
+    </test>
+    <test>
+      <param name="input" value="filter1_in5.tab"/>
+      <param name="cond" value="c8>500"/>
+      <param name="header_lines" value="1"/>
+      <output name="out_file1" file="filter1_test5.tab"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+Double equal signs, ==, must be used as *"equal to"* (e.g., **c1 == 'chr22'**)
+
+.. class:: infomark
+
+**TIP:** Attempting to apply a filtering condition may throw exceptions if the data type (e.g., string, integer) in every line of the columns being filtered is not appropriate for the condition (e.g., attempting certain numerical calculations on strings).  If an exception is thrown when applying the condition to a line, that line is skipped as invalid for the filter condition.  The number of invalid skipped lines is documented in the resulting history item as a "Condition/data issue".
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+The filter tool allows you to restrict the dataset using simple conditional statements.
+
+- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file
+- Make sure that multi-character operators contain no white space ( e.g., **<=** is valid while **< =** is not valid )
+- When using 'equal-to' operator **double equal sign '==' must be used** ( e.g., **c1=='chr1'** )
+- Non-numerical values must be included in single or double quotes ( e.g., **c6=='+'** )
+- Filtering condition can include logical operators, but **make sure operators are all lower case** ( e.g., **(c1!='chrX' and c1!='chrY') or not c6=='+'** )
+
+-----
+
+**Example**
+
+- **c1=='chr1'** selects lines in which the first column is chr1
+- **c3-c2<100*c4** selects lines where subtracting column 3 from column 2 is less than the value of column 4 times 100
+- **len(c2.split(',')) < 4** will select lines where the second column has less than four comma separated elements
+- **c2>=1** selects lines in which the value of column 2 is greater than or equal to 1
+- Numbers should not contain commas - **c2<=44,554,350** will not work, but **c2<=44554350** will
+- Some words in the data can be used, but must be single or double quoted ( e.g., **c3=='exon'** )
+
+</help>
+</tool>
diff --git a/tools/stats/grouping.py b/tools/stats/grouping.py
new file mode 100644
index 0000000..cbad9d4
--- /dev/null
+++ b/tools/stats/grouping.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+# Guruprasad Ananda
+# Refactored 2011 to use numpy instead of rpy, Kanwei Li
+"""
+This tool provides the SQL "group by" functionality.
+"""
+from __future__ import print_function
+
+import random
+import subprocess
+import sys
+import tempfile
+from itertools import groupby
+
+import numpy
+
+
+def stop_err(msg):
+    sys.stderr.write(msg)
+    sys.exit()
+
+
+def mode(data):
+    counts = {}
+    for x in data:
+        counts[x] = counts.get(x, 0) + 1
+    maxcount = max(counts.values())
+    modelist = []
+    for x in counts:
+        if counts[x] == maxcount:
+            modelist.append( str(x) )
+    return ','.join(modelist)
+
+
+def main():
+    inputfile = sys.argv[2]
+    ignorecase = int(sys.argv[4])
+    ops = []
+    cols = []
+    round_val = []
+
+    if sys.argv[5] != "None":
+        asciitodelete = sys.argv[5]
+        if asciitodelete:
+            oldfile = open(inputfile, 'r')
+            newinputfile = "input_cleaned.tsv"
+            newfile = open(newinputfile, 'w')
+            asciitodelete = asciitodelete.split(',')
+            for i in range(len(asciitodelete)):
+                asciitodelete[i] = chr(int(asciitodelete[i]))
+            for line in oldfile:
+                if line[0] not in asciitodelete:
+                    newfile.write(line)
+            oldfile.close()
+            newfile.close()
+            inputfile = newinputfile
+
+    for var in sys.argv[6:]:
+        op, col, do_round = var.split()
+        ops.append(op)
+        cols.append(col)
+        round_val.append(do_round)
+    """
+    At this point, ops, cols and rounds will look something like this:
+    ops:  ['mean', 'min', 'c']
+    cols: ['1', '3', '4']
+    round_val: ['no', 'yes' 'no']
+    """
+
+    try:
+        group_col = int(sys.argv[3]) - 1
+    except:
+        stop_err( "Group column not specified." )
+
+    tmpfile = tempfile.NamedTemporaryFile()
+
+    try:
+        """
+        The -k option for the Posix sort command is as follows:
+        -k, --key=POS1[,POS2]
+        start a key at POS1, end it at POS2 (origin 1)
+        In other words, column positions start at 1 rather than 0, so
+        we need to add 1 to group_col.
+        if POS2 is not specified, the newer versions of sort will consider the entire line for sorting. To prevent this, we set POS2=POS1.
+        """
+        case = ''
+        if ignorecase == 1:
+            case = '-f'
+        command_line = "sort -t '	' %s -k%s,%s -o %s %s" % (case, group_col + 1, group_col + 1, tmpfile.name, inputfile)
+    except Exception as exc:
+        stop_err( 'Initialization error -> %s' % str(exc) )
+
+    try:
+        subprocess.check_output(command_line, stderr=subprocess.STDOUT, shell=True)
+    except subprocess.CalledProcessError as e:
+        stop_err( "Sorting input dataset resulted in error: %s: %s" % ( e.returncode, e.output ))
+
+    fout = open(sys.argv[1], "w")
+
+    def is_new_item(line):
+        try:
+            item = line.strip().split("\t")[group_col]
+        except IndexError:
+            stop_err( "The following line didn't have %s columns: %s" % (group_col + 1, line) )
+
+        if ignorecase == 1:
+            return item.lower()
+        return item
+
+    for key, line_list in groupby(tmpfile, key=is_new_item):
+        op_vals = [ [] for _ in ops ]
+        out_str = key
+
+        for line in line_list:
+            fields = line.strip().split("\t")
+            for i, col in enumerate(cols):
+                col = int(col) - 1  # cXX from galaxy is 1-based
+                try:
+                    val = fields[col].strip()
+                    op_vals[i].append(val)
+                except IndexError:
+                    sys.stderr.write( 'Could not access the value for column %s on line: "%s". Make sure file is tab-delimited.\n' % (col + 1, line) )
+                    sys.exit( 1 )
+
+        # Generate string for each op for this group
+        for i, op in enumerate( ops ):
+            data = op_vals[i]
+            rval = ""
+            if op == "mode":
+                rval = mode( data )
+            elif op == "length":
+                rval = len( data )
+            elif op == "random":
+                rval = random.choice(data)
+            elif op in ['cat', 'cat_uniq']:
+                if op == 'cat_uniq':
+                    data = numpy.unique(data)
+                rval = ','.join(data)
+            elif op == "unique":
+                rval = len( numpy.unique(data) )
+            else:
+                # some kind of numpy fn
+                try:
+                    data = [float(_) for _ in data]
+                except ValueError:
+                    sys.stderr.write( "Operation %s expected number values but got %s instead.\n" % (op, data) )
+                    sys.exit( 1 )
+                rval = getattr(numpy, op)( data )
+                if round_val[i] == 'yes':
+                    rval = int(round(rval))
+                else:
+                    rval = '%g' % rval
+            out_str += "\t%s" % rval
+
+        fout.write(out_str + "\n")
+
+    # Generate a useful info message.
+    msg = "--Group by c%d: " % (group_col + 1)
+    for i, op in enumerate(ops):
+        if op == 'cat':
+            op = 'concat'
+        elif op == 'cat_uniq':
+            op = 'concat_distinct'
+        elif op == 'length':
+            op = 'count'
+        elif op == 'unique':
+            op = 'count_distinct'
+        elif op == 'random':
+            op = 'randomly_pick'
+
+        msg += op + "[c" + cols[i] + "] "
+
+    print(msg)
+    fout.close()
+    tmpfile.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/stats/grouping.xml b/tools/stats/grouping.xml
new file mode 100644
index 0000000..a4e646d
--- /dev/null
+++ b/tools/stats/grouping.xml
@@ -0,0 +1,142 @@
+<tool id="Grouping1" name="Group" version="2.1.1">
+  <description>data by a column and perform aggregate operation on other columns.</description>
+  <command interpreter="python">
+    grouping.py 
+      "${out_file1}"
+      "${input1}"
+      "${groupcol}"
+      "${ignorecase}"
+      "${ignorelines}"
+      #for $op in $operations
+       '${op.optype}
+        ${op.opcol}
+        ${op.opround}'
+      #end for
+  </command>
+  <inputs>
+    <param format="tabular" name="input1" type="data" label="Select data" help="Dataset missing? See TIP below."/>
+    <param name="groupcol" label="Group by column" type="data_column" data_ref="input1" />
+    <param name="ignorecase" type="boolean" truevalue="1" falsevalue="0">
+      <label>Ignore case while grouping?</label>
+    </param>
+    <param name="ignorelines" type="select" display="checkboxes" multiple="True" label="Ignore lines beginning with these characters" help="lines beginning with these are not grouped">
+            <option value="62">></option>
+            <option value="64">@</option>
+            <option value="43">+</option>
+            <option value="60"><</option>
+            <option value="42">*</option>
+            <option value="45">-</option>
+            <option value="61">=</option>
+            <option value="124">|</option>
+            <option value="63">?</option>
+            <option value="36">$</option>
+            <option value="46">.</option>
+            <option value="58">:</option>
+            <option value="38">&</option>
+            <option value="37">%</option>
+            <option value="94">^</option>
+            <option value="35">#</option>
+    </param>
+    <repeat name="operations" title="Operation">
+      <param name="optype" type="select" label="Type">
+        <option value="mean">Mean</option>
+        <option value="median">Median</option>
+        <option value="mode">Mode</option>
+        <option value="max">Maximum</option>
+        <option value="min">Minimum</option>
+        <option value="sum">Sum</option>
+        <option value="length">Count</option>
+        <option value="unique">Count Distinct</option>
+        <option value="cat">Concatenate</option>
+        <option value="cat_uniq">Concatenate Distinct</option>
+        <option value="random">Randomly pick</option>
+        <option value="std">Standard deviation</option>
+      </param>
+      <param name="opcol" label="On column" type="data_column" data_ref="input1" />
+      <param name="opround" type="select" label="Round result to nearest integer?">
+         <option value="no">NO</option>
+         <option value="yes">YES</option>
+       </param>
+    </repeat>
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" />
+  </outputs>
+  <requirements>
+    <requirement type="python-module">numpy</requirement>
+  </requirements>
+  <tests>
+    <!-- Test valid data -->
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="groupcol" value="1"/>
+      <param name="ignorecase" value="true"/>
+      <param name="optype" value="mean"/>
+      <param name="opcol" value="2"/>
+      <param name="opround" value="no"/>
+      <output name="out_file1" file="groupby_out1.dat"/>
+    </test>
+    <!-- Long case but test framework doesn't allow yet
+    <test>
+      <param name="input1" value="1.bed"/>
+      <param name="groupcol" value="1"/>
+      <param name="ignorecase" value="false"/>
+      <param name="operations" value='[{"opcol": "2", "__index__": 0, "optype": "mean", "opround": "no"}, {"opcol": "2", "__index__": 1, "optype": "median", "opround": "no"}, {"opcol": "6", "__index__": 2, "optype": "mode", "opround": "no"}, {"opcol": "2", "__index__": 3, "optype": "max", "opround": "no"}, {"opcol": "2", "__index__": 4, "optype": "min", "opround": "no"}, {"opcol": "2", "__index__": 5, "optype": "sum", "opround": "no"}, {"opcol": "1", "__index__": 6, "optype": "length", " [...]
+      <output name="out_file1" file="groupby_out3.tabular"/>
+    </test>
+    -->
+    <!-- Test data with an invalid value in a column. Can't do it because test framework doesn't allow testing of errors
+    <test>
+      <param name="input1" value="1.tabular"/>
+      <param name="groupcol" value="1"/>
+      <param name="ignorecase" value="true"/>
+      <param name="optype" value="mean"/>
+      <param name="opcol" value="2"/>
+      <param name="opround" value="no"/>
+      <output name="out_file1" file="groupby_out2.dat"/>
+    </test>
+     -->
+  </tests>
+  <help>
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+-----
+
+**Syntax**
+
+This tool allows you to group the input dataset by a particular column and perform aggregate functions: Mean, Median, Mode, Sum, Max, Min, Count, Concatenate, and Randomly pick on any column(s).
+
+The Concatenate function will take, for each group, each item in the specified column and build a comma delimited list. Concatenate Unique will do the same but will build a list of unique items with no repetition.
+
+Count and Count Unique are equivalent to Concatenate and Concatenate Unique, but will only count the number of items and will return an integer.
+
+- If multiple modes are present, all are reported.
+
+-----
+
+**Example**
+
+- For the following input::
+
+   chr22  1000  1003  TTT
+   chr22  2000  2003  aaa
+   chr10  2200  2203  TTT
+   chr10  1200  1203  ttt
+   chr22  1600  1603  AAA
+
+- **Grouping on column 4** while ignoring case, and performing operation **Count on column 1** will return::
+
+   AAA    2
+   TTT    3
+   
+- **Grouping on column 4** while not ignoring case, and performing operation **Count on column 1** will return::
+
+   aaa    1
+   AAA    1
+   ttt    1
+   TTT    2
+  </help>
+</tool>
diff --git a/tools/stats/gsummary.py b/tools/stats/gsummary.py
new file mode 100755
index 0000000..0ede475
--- /dev/null
+++ b/tools/stats/gsummary.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import re
+import sys
+import tempfile
+try:
+    from rpy2.rpy_classic import BASIC_CONVERSION, NO_CONVERSION, r, RException, set_default_mode
+except:
+    # RPy isn't maintained, and doesn't work with R>3.0, use it as a fallback
+    from rpy import BASIC_CONVERSION, NO_CONVERSION, r, RException, set_default_mode
+
+
+def stop_err( msg ):
+    sys.stderr.write( msg )
+    sys.exit()
+
+
+def S3_METHODS( all="key" ):
+    Group_Math = [ "abs", "sign", "sqrt", "floor", "ceiling", "trunc", "round", "signif",
+        "exp", "log", "cos", "sin", "tan", "acos", "asin", "atan", "cosh", "sinh", "tanh",
+        "acosh", "asinh", "atanh", "lgamma", "gamma", "gammaCody", "digamma", "trigamma",
+        "cumsum", "cumprod", "cummax", "cummin", "c" ]
+    Group_Ops = [ "+", "-", "*", "/", "^", "%%", "%/%", "&", "|", "!", "==", "!=", "<", "<=", ">=", ">", "(", ")", "~", "," ]
+    if all is "key":
+        return { 'Math': Group_Math, 'Ops': Group_Ops }
+
+
+def main():
+    try:
+        datafile = sys.argv[1]
+        outfile_name = sys.argv[2]
+        expression = sys.argv[3]
+    except:
+        stop_err( 'Usage: python gsummary.py input_file ouput_file expression' )
+
+    math_allowed = S3_METHODS()[ 'Math' ]
+    ops_allowed = S3_METHODS()[ 'Ops' ]
+
+    # Check for invalid expressions
+    for word in re.compile( '[a-zA-Z]+' ).findall( expression ):
+        if word and word not in math_allowed:
+            stop_err( "Invalid expression '%s': term '%s' is not recognized or allowed" % ( expression, word ) )
+    symbols = set()
+    for symbol in re.compile( '[^a-z0-9\s]+' ).findall( expression ):
+        if symbol and symbol not in ops_allowed:
+            stop_err( "Invalid expression '%s': operator '%s' is not recognized or allowed" % ( expression, symbol ) )
+        else:
+            symbols.add( symbol )
+    if len( symbols ) == 1 and ',' in symbols:
+        # User may have entered a comma-separated list r_data_frame columns
+        stop_err( "Invalid columns '%s': this tool requires a single column or expression" % expression )
+
+    # Find all column references in the expression
+    cols = []
+    for col in re.compile( 'c[0-9]+' ).findall( expression ):
+        try:
+            cols.append( int( col[1:] ) - 1 )
+        except:
+            pass
+
+    tmp_file = tempfile.NamedTemporaryFile( 'w+b' )
+    # Write the R header row to the temporary file
+    hdr_str = "\t".join( "c%s" % str( col + 1 ) for col in cols )
+    tmp_file.write( "%s\n" % hdr_str )
+    skipped_lines = 0
+    first_invalid_line = 0
+    i = 0
+    for i, line in enumerate( open( datafile ) ):
+        line = line.rstrip( '\r\n' )
+        if line and not line.startswith( '#' ):
+            valid = True
+            fields = line.split( '\t' )
+            # Write the R data row to the temporary file
+            for col in cols:
+                try:
+                    float( fields[ col ] )
+                except:
+                    skipped_lines += 1
+                    if not first_invalid_line:
+                        first_invalid_line = i + 1
+                    valid = False
+                    break
+            if valid:
+                data_str = "\t".join( fields[ col ] for col in cols )
+                tmp_file.write( "%s\n" % data_str )
+    tmp_file.flush()
+
+    if skipped_lines == i + 1:
+        stop_err( "Invalid column or column data values invalid for computation.  See tool tips and syntax for data requirements." )
+    else:
+        # summary function and return labels
+        set_default_mode( NO_CONVERSION )
+        summary_func = r( "function( x ) { c( sum=sum( as.numeric( x ), na.rm=T ), mean=mean( as.numeric( x ), na.rm=T ), stdev=sd( as.numeric( x ), na.rm=T ), quantile( as.numeric( x ), na.rm=TRUE ) ) }" )
+        headings = [ 'sum', 'mean', 'stdev', '0%', '25%', '50%', '75%', '100%' ]
+        headings_str = "\t".join( headings )
+
+        r_data_frame = r.read_table( tmp_file.name, header=True, sep="\t" )
+
+        outfile = open( outfile_name, 'w' )
+
+        for col in re.compile( 'c[0-9]+' ).findall( expression ):
+            r.assign( col, r[ "$" ]( r_data_frame, col ) )
+        try:
+            summary = summary_func( r( expression ) )
+        except RException as s:
+            outfile.close()
+            stop_err( "Computation resulted in the following error: %s" % str( s ) )
+        summary = summary.as_py( BASIC_CONVERSION )
+        outfile.write( "#%s\n" % headings_str )
+        if type(summary) is dict:
+            # using rpy
+            outfile.write( "%s\n" % "\t".join( [ "%g" % summary[k] for k in headings ] ) )
+        else:
+            # using rpy2
+            outfile.write( "%s\n" % "\t".join( [ "%g" % k for k in summary ] ) )
+        outfile.close()
+
+        if skipped_lines:
+            print("Skipped %d invalid lines beginning with line #%d.  See tool tips for data requirements." % ( skipped_lines, first_invalid_line ))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/stats/gsummary.xml b/tools/stats/gsummary.xml
new file mode 100644
index 0000000..86c7407
--- /dev/null
+++ b/tools/stats/gsummary.xml
@@ -0,0 +1,82 @@
+<tool id="Summary_Statistics1" name="Summary Statistics" version="1.1.1">
+  <description>for any numerical column</description>
+  <edam_topics>
+    <edam_topic>topic_2269</edam_topic>
+  </edam_topics>
+  <requirements>
+    <requirement type="package" version="2.7.8">rpy2</requirement>
+  </requirements>
+  <stdio>
+    <exit_code range="1" level="fatal" />
+  </stdio>
+  <command>python $__tool_directory__/gsummary.py "$input" "$out_file1" "$cond"</command>
+  <inputs>
+    <param format="tabular" name="input" type="data" label="Summary statistics on" help="Dataset missing? See TIP below"/>
+    <param name="cond" size="30" type="text" value="c5" label="Column or expression" help="See syntax below">
+      <validator type="empty_field" message="Enter a valid column or expression, see syntax below for examples"/>
+    </param>
+  </inputs>
+  <outputs>
+    <data format="tabular" name="out_file1" />
+  </outputs>
+  <tests>
+    <test>
+      <param name="input" value="1.bed"/>
+      <output name="out_file1" file="gsummary_out1.tabular"/>
+      <param name="cond" value="c2"/>
+    </test>
+  </tests>
+  <help>
+
+.. class:: warningmark
+
+This tool expects input datasets consisting of tab-delimited columns (blank or comment lines beginning with a # character are automatically skipped).
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert delimiters to TAB*
+
+.. class:: infomark
+
+**TIP:** Computing summary statistics may throw exceptions if the data value in every line of the columns being summarized is not numerical.  If a line is missing a value or contains a non-numerical value in the column being summarized, that line is skipped and the value is not included in the statistical computation.  The number of invalid skipped lines is documented in the resulting history item.
+
+.. class:: infomark
+
+**USING R FUNCTIONS:** Most functions (like *abs*) take only a single expression. *log* can take one or two parameters, like *log(expression,base)*
+
+Currently, these R functions are supported: *abs, sign, sqrt, floor, ceiling, trunc, round, signif, exp, log, cos, sin, tan, acos, asin, atan, cosh, sinh, tanh, acosh, asinh, atanh, lgamma, gamma, gammaCody, digamma, trigamma, cumsum, cumprod, cummax, cummin*
+
+-----
+
+**Syntax**
+
+This tool computes basic summary statistics on a given column, or on a valid expression containing one or more columns.
+
+- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file.
+
+- For example:
+
+  - **log(c5)** calculates the summary statistics for the natural log of column 5
+  - **(c5 + c6 + c7) / 3** calculates the summary statistics on the average of columns 5-7
+  - **log(c5,10)** summary statistics of the base 10 log of column 5
+  - **sqrt(c5+c9)** summary statistics of the square root of column 5 + column 9
+
+-----
+
+**Examples**
+
+- Input Dataset::
+
+    c1      c2      c3      c4      c5              c6
+    586     chrX    161416  170887  41108_at        16990
+    73      chrX    505078  532318  35073_at        1700
+    595     chrX    1361578 1388460 33665_s_at      1960
+    74      chrX    1420620 1461919 1185_at         8600
+
+- Summary Statistics on column c6 of the above input dataset::
+
+    #sum       mean      stdev     0%        25%       50%       75%        100%
+    29250.000  7312.500  7198.636  1700.000  1895.000  5280.000  10697.500  16990.000
+
+</help>
+</tool>
diff --git a/tools/stats/gsummary.xml.groups b/tools/stats/gsummary.xml.groups
new file mode 100644
index 0000000..218ab31
--- /dev/null
+++ b/tools/stats/gsummary.xml.groups
@@ -0,0 +1,62 @@
+<tool id="Summary Statistics1" name="Summary Statistics">
+  <description>of a column in a tab delimited file according to an expression</description>
+  <command interpreter="python">gsummary.py $input $out_file1 "$cond" "$groups"</command>
+  <inputs>
+    <param name="cond" size="40" type="text" value="c5" label="expression"/>
+    <param name="groups" size="40" type="text" value="none" label="group terms (c1,c4,etc.)"/>
+    <param format="txt" name="input" type="data" label="summary statistics on"/>
+
+  </inputs>
+  <outputs>
+    <data format="txt" name="out_file1" />
+  </outputs>
+  <help>
+
+.. class:: warningmark
+
+This tool expects input datasets to consist of tab-delimited columns (blank or comment lines beginning with a # character are automatically skipped).
+
+.. class:: infomark
+
+**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert*
+
+.. class:: infomark
+
+**TIP:** Computing summary statistics may throw exceptions if the data value in every line of the columns being summarized is not numerical.  If a line is missing a value or contains a non-numerical value in the column being summarized, that line is skipped and the value is not included in the statistical computation.  The number of invalid skipped lines is documented in the resulting history item.
+
+**Syntax**
+
+This tool computes basic summary statistics on a given column, or on an expression containing those columns
+
+- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file
+- To group the summary by the values in a column or columns, specify in the **group terms** box...
+    + **c1**  *group by the values in column 1*
+    + **c1,c4** *group by the values in column 1, then by the values in column 4*
+
+
+-----
+
+**Expression examples**
+
+- **log(c5)** calculates the summary statistics for the natural log of column 5
+- **(c5 + c6 + c7) / 3** calculates the summary statistics on the average of columns 5-7
+- **log(c5,10)** summary statistics of the base 10 log of column 5
+- **sqrt(c5+c9)** summary statistics of the square root of column 5 + column 9
+
+**Group examples**
+
+- **c1**  group by the values in column 1
+- **c1,c4** group by the values in column 1, then by the values in column 4
+
+-----
+
+.. class:: infomark
+
+**TIP:** Most functions (like *abs*) take only a single expression. *log* can take one or two parameters, like *log(expression,base)* 
+
+Currently, these R functions are supported: *abs, sign, sqrt, floor, ceiling, trunc, round, signif, exp, log, cos, sin, tan, acos, asin, atan, cosh, sinh, tanh, acosh, asinh, atanh, lgamma, gamma, gammaCody, digamma, trigamma, cumsum, cumprod, cummax, cummin*
+
+.. |INFO| image:: ./static/images/icon_info_sml.gif
+
+</help>
+</tool>
diff --git a/tools/stats/r_wrapper.sh b/tools/stats/r_wrapper.sh
new file mode 100644
index 0000000..d1ddf1e
--- /dev/null
+++ b/tools/stats/r_wrapper.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+### Run R providing the R script in $1 as standard input and passing 
+### the remaining arguments on the command line
+
+# Function that writes a message to stderr and exits
+fail()
+{
+    echo "$@" >&2
+    exit 1
+}
+
+# Ensure R executable is found
+which R > /dev/null || fail "'R' is required by this tool but was not found on path" 
+
+# Extract first argument
+infile=$1; shift
+
+# Ensure the file exists
+test -f $infile || fail "R input file '$infile' does not exist"
+
+# Invoke R passing file named by first argument to stdin
+R --vanilla --slave $* < $infile
diff --git a/tools/visualization/LAJ.py b/tools/visualization/LAJ.py
new file mode 100644
index 0000000..19635a8
--- /dev/null
+++ b/tools/visualization/LAJ.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+"""
+Copies LAV file over to new file for use with LAJ
+"""
+import shutil
+import sys
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+shutil.copyfile(sys.argv[1], sys.argv[2])
diff --git a/tools/visualization/LAJ.xml b/tools/visualization/LAJ.xml
new file mode 100644
index 0000000..aa4943d
--- /dev/null
+++ b/tools/visualization/LAJ.xml
@@ -0,0 +1,42 @@
+<tool id="laj_1" name="LAJ" version="1.0.0">
+<description>Pairwise Alignment Viewer</description>
+  <command interpreter="python">LAJ.py $maf_input $out_file1</command>
+  <inputs>
+      <param name="maf_input" type="data" format="lav" label="Alignment File" optional="False"/>
+      <param name="seq_file1" type="data" format="fasta" label="First Sequence File" optional="True"/>
+      <param name="seq_file2" type="data" format="fasta" label="Second Sequence File" optional="True"/>
+      <param name="exonfile" type="data" format="txt" label="Exon File" optional="True"/>
+      <param name="repeatfile" type="data" format="txt" label="Repeat File" optional="True"/>
+      <param name="annotationfile" type="data" format="txt" label="Annotation File" optional="True"/>
+      <param name="underlayfile" type="data" format="txt" label="Underlay File" optional="True"/>
+      <param name="highlightfile" type="data" format="txt" label="Highlight File" optional="True"/>
+  </inputs>
+  <outputs>
+    <data name="out_file1" format="laj"/>
+  </outputs>
+<help>
+You can use this tool to view a set of LAV alignments.  You may include FASTA formatted sequences for both species.
+
+For detailed information on LAJ, click here_.
+
+.. _here: http://globin.cse.psu.edu/dist/laj/
+
+Laj is a tool for viewing and manipulating the output from pairwise alignment programs such as blastz. It can display interactive dotplot, pip, and text representations of the alignments, a diagram showing the locations of exons and repeats, and annotation links to other web sites containing additional information about particular regions.
+
+.. class:: infomark
+
+**Note:** If you save output from the applet, you will need to manually refresh your history.
+
+  </help>
+  <code file="LAJ_code.py"/>
+  <citations>
+    <citation type="bibtex">
+      @misc{Miller2005,
+author = {Miller Lab},
+year = {2005},
+title = {Laj},
+url = {http://globin.bx.psu.edu/dist/laj/},
+}
+    </citation>
+  </citations>
+</tool>
diff --git a/tools/visualization/LAJ_code.py b/tools/visualization/LAJ_code.py
new file mode 100644
index 0000000..ee8f84c
--- /dev/null
+++ b/tools/visualization/LAJ_code.py
@@ -0,0 +1,42 @@
+# post processing, add sequence and additional annoation info if available
+from six.moves.urllib.parse import urlencode
+
+from galaxy.datatypes.images import create_applet_tag_peek
+
+
+def exec_after_process(app, inp_data, out_data, param_dict, tool, stdout, stderr):
+    primary_data = next(iter(out_data.values()))
+
+    # default params for LAJ type
+    params = {
+        "alignfile1": "display?id=%s" % primary_data.id,
+        "buttonlabel": "Launch LAJ",
+        "title": "LAJ in Galaxy",
+        "posturl": "history_add_to?%s" % urlencode( { 'history_id': primary_data.history_id, 'ext': 'lav', 'name': 'LAJ Output', 'info': 'Added by LAJ', 'dbkey': primary_data.dbkey } )
+    }
+    for name, data in inp_data.items():
+        if name == "maf_input":
+            params["alignfile1"] = "display?id=%s" % data.id
+        elif name == "seq_file1" and data.state == data.states.OK and data.has_data():
+            params["file1seq1"] = "display?id=%s" % data.id
+        elif name == "seq_file2" and data.state == data.states.OK and data.has_data():
+            params["file1seq2"] = "display?id=%s" % data.id
+        elif name == "exonfile" and data.state == data.states.OK and data.has_data():
+            params["exonfile"] = "display?id=%s" % data.id
+        elif name == "repeatfile" and data.state == data.states.OK and data.has_data():
+            params["repeatfile"] = "display?id=%s" % data.id
+        elif name == "annotationfile" and data.state == data.states.OK and data.has_data():
+            params["annotationfile"] = "display?id=%s" % data.id
+        elif name == "underlayfile" and data.state == data.states.OK and data.has_data():
+            params["underlayfile"] = "display?id=%s" % data.id
+        elif name == "highlightfile" and data.state == data.states.OK and data.has_data():
+            params["highlightfile"] = "display?id=%s" % data.id
+
+    if "file1seq1" not in params and "file1seq2" not in params:
+        params["noseq"] = "true"
+
+    class_name = "edu.psu.cse.bio.laj.LajApplet.class"
+    archive = "/static/laj/laj.jar"
+    primary_data.peek = create_applet_tag_peek( class_name, archive, params )
+    app.model.context.add( primary_data )
+    app.model.context.flush()
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..bc91dd8
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,90 @@
+[tox]
+envlist = py27-lint, py27-lint-imports, py27-lint-imports-include-list, py27-unit, qunit, mako-count, web-controller-line-count, py33-lint, py34-lint, py35-lint, validate-test-tools, py27-lint-docstring-include-list, py27-lint-docstring
+skipsdist = True
+
+[testenv:py27-lint]
+commands = bash .ci/flake8_wrapper.sh
+whitelist_externals = bash
+deps =
+    flake8==3.2.1
+    flake8-docstrings==1.0.2
+
+[testenv:py33-lint]
+commands = bash .ci/flake8_py3_wrapper.sh
+whitelist_externals = bash
+deps = flake8
+
+[testenv:py34-lint]
+commands = bash .ci/flake8_py3_wrapper.sh
+whitelist_externals = bash
+deps = flake8==3.2.1
+
+[testenv:py35-lint]
+commands = bash .ci/flake8_py3_wrapper.sh
+whitelist_externals = bash
+deps = flake8
+
+[testenv:py27-unit]
+commands = bash run_tests.sh --no-create-venv -u
+whitelist_externals = bash
+deps =
+    nose
+    NoseHTML
+    mock
+
+# Setup tox environments for linting all of Galaxy for imports and
+# just a subset we expect to pass (the include import list). Once the
+# include list is reduced to just the inverse of Galaxy's linting
+# blacklist these can both just be removed and flake8-import-order can
+# be added as a dependency to Galaxy's main linting task.
+[testenv:py27-lint-imports]
+commands = bash .ci/flake8_wrapper.sh
+whitelist_externals = bash
+skip_install = True
+deps =
+    flake8
+    flake8-import-order>=0.9
+
+[testenv:py27-lint-imports-include-list]
+commands = bash .ci/flake8_wrapper_imports.sh
+whitelist_externals = bash
+skip_install = True
+deps =
+    flake8==3.0.4
+    flake8-import-order==0.11
+
+[testenv:qunit]
+commands = bash run_tests.sh -q
+whitelist_externals = bash
+
+[testenv:mako-count]
+commands = bash .ci/check_mako.sh
+whitelist_externals = bash
+
+[testenv:web-controller-line-count]
+commands = bash .ci/check_controller.sh
+whitelist_externals = bash
+
+[testenv:first_startup]
+commands = bash .ci/first_startup.sh
+whitelist_externals = bash
+
+[testenv:validate-test-tools]
+commands = bash .ci/validate_test_tools.sh
+whitelist_externals = bash
+
+[testenv:py27-lint-docstring]
+commands = bash .ci/flake8_wrapper_docstrings.sh --exclude
+whitelist_externals = bash
+skip_install = True
+deps =
+    flake8
+    flake8-docstrings==1.0.2
+
+[testenv:py27-lint-docstring-include-list]
+commands = bash .ci/flake8_wrapper_docstrings.sh --include
+whitelist_externals = bash
+skip_install = True
+deps =
+    flake8
+    flake8-docstrings==1.0.2

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/galaxy.git



More information about the debian-med-commit mailing list